From 06b5409ff01f58cee38862f3f0eeb2442d369384 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Tue, 7 Oct 2025 21:13:08 +0200 Subject: [PATCH 001/103] When looking for peers, skip peers with error instead of aborting the whole function (#15815) * `findPeersWithSubnets`: If the `filter` function returns an error for a given peer, log an error and skip the peer instead of aborting the whole function. * `computeIndicesByRootByPeer`: If the loop returns an error for a given peer, log an error and skip the peer instead of aborting the whole function. * Add changelog. --------- Co-authored-by: james-prysm <90280386+james-prysm@users.noreply.github.com> --- beacon-chain/p2p/subnets.go | 9 ++++++++- beacon-chain/sync/data_column_sidecars.go | 15 ++++++++++----- changelog/manu-skip-bad-peers.md | 3 +++ 3 files changed, 21 insertions(+), 6 deletions(-) create mode 100644 changelog/manu-skip-bad-peers.md diff --git a/beacon-chain/p2p/subnets.go b/beacon-chain/p2p/subnets.go index 62196d206c..3742b36ae9 100644 --- a/beacon-chain/p2p/subnets.go +++ b/beacon-chain/p2p/subnets.go @@ -25,6 +25,7 @@ import ( "github.com/holiman/uint256" "github.com/pkg/errors" "github.com/prysmaticlabs/go-bitfield" + "github.com/sirupsen/logrus" ) var ( @@ -223,8 +224,14 @@ func (s *Service) findPeersWithSubnets( // Skip nodes that are not subscribed to any of the defective subnets. nodeSubnets, err := filter(node) if err != nil { - return nil, errors.Wrap(err, "filter node") + log.WithError(err).WithFields(logrus.Fields{ + "nodeID": node.ID(), + "topicFormat": topicFormat, + }).Debug("Could not get needed subnets from peer") + + continue } + if len(nodeSubnets) == 0 { continue } diff --git a/beacon-chain/sync/data_column_sidecars.go b/beacon-chain/sync/data_column_sidecars.go index e4d25217b1..d5fba889f7 100644 --- a/beacon-chain/sync/data_column_sidecars.go +++ b/beacon-chain/sync/data_column_sidecars.go @@ -1022,17 +1022,20 @@ func computeIndicesByRootByPeer( peersByIndex := make(map[uint64]map[goPeer.ID]bool) headSlotByPeer := make(map[goPeer.ID]primitives.Slot) for peer := range peers { + log := log.WithField("peerID", peer) + // Computes the custody columns for each peer nodeID, err := prysmP2P.ConvertPeerIDToNodeID(peer) if err != nil { - return nil, errors.Wrapf(err, "convert peer ID to node ID for peer %s", peer) + log.WithError(err).Debug("Failed to convert peer ID to node ID") + continue } custodyGroupCount := p2p.CustodyGroupCountFromPeer(peer) - dasInfo, _, err := peerdas.Info(nodeID, custodyGroupCount) if err != nil { - return nil, errors.Wrapf(err, "peerdas info for peer %s", peer) + log.WithError(err).Debug("Failed to get peer DAS info") + continue } for column := range dasInfo.CustodyColumns { @@ -1045,11 +1048,13 @@ func computeIndicesByRootByPeer( // Compute the head slot for each peer peerChainState, err := p2p.Peers().ChainState(peer) if err != nil { - return nil, errors.Wrapf(err, "get chain state for peer %s", peer) + log.WithError(err).Debug("Failed to get peer chain state") + continue } if peerChainState == nil { - return nil, errors.Errorf("chain state is nil for peer %s", peer) + log.Debug("Peer chain state is nil") + continue } // Our view of the head slot of a peer is not updated in real time. diff --git a/changelog/manu-skip-bad-peers.md b/changelog/manu-skip-bad-peers.md new file mode 100644 index 0000000000..b9e7cc2a51 --- /dev/null +++ b/changelog/manu-skip-bad-peers.md @@ -0,0 +1,3 @@ +### Fixed +- `findPeersWithSubnets`: If the filter function returns an error for a given peer, log an error and skip the peer instead of aborting the whole function. +- `computeIndicesByRootByPeer`: If the loop returns an error for a given peer, log an error and skip the peer instead of aborting the whole function. \ No newline at end of file From 0d742c6f884a1e85e213278fae0e7be2cf01ceec Mon Sep 17 00:00:00 2001 From: kasey <489222+kasey@users.noreply.github.com> Date: Tue, 7 Oct 2025 20:09:22 -0500 Subject: [PATCH 002/103] make registerSubscribers idempotent (#15779) * make registerSubscribers idempotent * clean up debugging changes * test fix * rm unused var * sobbing noises * naming feedback and separate test for digestActionDone * gazelle * manu's feedback * refactor to enable immediate sub after init sync * preston comment re panic causing db corruption risk * ensure we check that we're 1 epoch past the fork * manu feedback --------- Co-authored-by: Kasey Kirkham --- beacon-chain/sync/BUILD.bazel | 2 + beacon-chain/sync/error.go | 6 +- beacon-chain/sync/fork_watcher.go | 137 ++++++------ beacon-chain/sync/fork_watcher_test.go | 35 ++- beacon-chain/sync/once.go | 40 ++++ beacon-chain/sync/once_test.go | 40 ++++ beacon-chain/sync/rpc.go | 47 +--- .../sync/rpc_beacon_blocks_by_range_test.go | 2 +- beacon-chain/sync/service.go | 38 ++-- beacon-chain/sync/subscriber.go | 211 ++++++++++-------- beacon-chain/sync/subscriber_test.go | 79 +++---- changelog/kasey_idempotent-registration.md | 2 + 12 files changed, 363 insertions(+), 276 deletions(-) create mode 100644 beacon-chain/sync/once.go create mode 100644 beacon-chain/sync/once_test.go create mode 100644 changelog/kasey_idempotent-registration.md diff --git a/beacon-chain/sync/BUILD.bazel b/beacon-chain/sync/BUILD.bazel index 44112d8e4f..00b9354aa1 100644 --- a/beacon-chain/sync/BUILD.bazel +++ b/beacon-chain/sync/BUILD.bazel @@ -17,6 +17,7 @@ go_library( "fuzz_exports.go", # keep "log.go", "metrics.go", + "once.go", "options.go", "pending_attestations_queue.go", "pending_blocks_queue.go", @@ -172,6 +173,7 @@ go_test( "error_test.go", "fork_watcher_test.go", "kzg_batch_verifier_test.go", + "once_test.go", "pending_attestations_queue_test.go", "pending_blocks_queue_test.go", "rate_limiter_test.go", diff --git a/beacon-chain/sync/error.go b/beacon-chain/sync/error.go index c67af0417f..594820f4ad 100644 --- a/beacon-chain/sync/error.go +++ b/beacon-chain/sync/error.go @@ -2,7 +2,6 @@ package sync import ( "bytes" - "errors" "io" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" @@ -12,6 +11,7 @@ import ( libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/network" multiplex "github.com/libp2p/go-mplex" + "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -38,7 +38,7 @@ func ReadStatusCode(stream network.Stream, encoding encoder.NetworkEncoding) (ui b := make([]byte, 1) _, err := stream.Read(b) if err != nil { - return 0, "", err + return 0, "", errors.Wrap(err, "stream read") } if b[0] == responseCodeSuccess { @@ -52,7 +52,7 @@ func ReadStatusCode(stream network.Stream, encoding encoder.NetworkEncoding) (ui SetStreamReadDeadline(stream, params.BeaconConfig().RespTimeoutDuration()) msg := &types.ErrorMessage{} if err := encoding.DecodeWithMaxLength(stream, msg); err != nil { - return 0, "", err + return 0, "", errors.Wrap(err, "decode error message") } return b[0], string(*msg), nil diff --git a/beacon-chain/sync/fork_watcher.go b/beacon-chain/sync/fork_watcher.go index 7b6663dd70..a4791f3c40 100644 --- a/beacon-chain/sync/fork_watcher.go +++ b/beacon-chain/sync/fork_watcher.go @@ -9,23 +9,28 @@ import ( "github.com/pkg/errors" ) -// Is a background routine that observes for new incoming forks. Depending on the epoch -// it will be in charge of subscribing/unsubscribing the relevant topics at the fork boundaries. -func (s *Service) forkWatcher() { - <-s.initialSyncComplete +// p2pHandlerControlLoop runs in a continuous loop to ensure that: +// - We are subscribed to the correct gossipsub topics (for the current and upcoming epoch). +// - We have registered the correct RPC stream handlers (for the current and upcoming epoch). +// - We have cleaned up gossipsub topics and RPC stream handlers that are no longer needed. +func (s *Service) p2pHandlerControlLoop() { + // At startup, launch registration and peer discovery loops, and register rpc stream handlers. + startEntry := params.GetNetworkScheduleEntry(s.cfg.clock.CurrentEpoch()) + s.registerSubscribers(startEntry) + slotTicker := slots.NewSlotTicker(s.cfg.clock.GenesisTime(), params.BeaconConfig().SecondsPerSlot) for { select { // In the event of a node restart, we will still end up subscribing to the correct // topics during/after the fork epoch. This routine is to ensure correct // subscriptions for nodes running before a fork epoch. - case currSlot := <-slotTicker.C(): - currEpoch := slots.ToEpoch(currSlot) - if err := s.registerForUpcomingFork(currEpoch); err != nil { + case <-slotTicker.C(): + current := s.cfg.clock.CurrentEpoch() + if err := s.ensureRegistrationsForEpoch(current); err != nil { log.WithError(err).Error("Unable to check for fork in the next epoch") continue } - if err := s.deregisterFromPastFork(currEpoch); err != nil { + if err := s.ensureDeregistrationForEpoch(current); err != nil { log.WithError(err).Error("Unable to check for fork in the previous epoch") continue } @@ -37,102 +42,90 @@ func (s *Service) forkWatcher() { } } -// registerForUpcomingFork registers appropriate gossip and RPC topic if there is a fork in the next epoch. -func (s *Service) registerForUpcomingFork(currentEpoch primitives.Epoch) error { - nextEntry := params.GetNetworkScheduleEntry(currentEpoch + 1) - // Check if there is a fork in the next epoch. - if nextEntry.ForkDigest == s.registeredNetworkEntry.ForkDigest { - return nil - } +// ensureRegistrationsForEpoch ensures that gossip topic and RPC stream handler +// registrations are in place for the current and subsequent epoch. +func (s *Service) ensureRegistrationsForEpoch(epoch primitives.Epoch) error { + current := params.GetNetworkScheduleEntry(epoch) + s.registerSubscribers(current) - if s.subHandler.digestExists(nextEntry.ForkDigest) { - return nil - } - - // Register the subscribers (gossipsub) for the next epoch. - s.registerSubscribers(nextEntry.Epoch, nextEntry.ForkDigest) - - // Get the handlers for the current and next fork. - currentHandler, err := s.rpcHandlerByTopicFromEpoch(currentEpoch) + currentHandler, err := s.rpcHandlerByTopicFromFork(current.VersionEnum) if err != nil { return errors.Wrap(err, "RPC handler by topic from before fork epoch") } + if !s.digestActionDone(current.ForkDigest, registerRpcOnce) { + for topic, handler := range currentHandler { + s.registerRPC(topic, handler) + } + } - nextHandler, err := s.rpcHandlerByTopicFromEpoch(nextEntry.Epoch) + next := params.GetNetworkScheduleEntry(epoch + 1) + if current.Epoch == next.Epoch { + return nil // no fork in the next epoch + } + s.registerSubscribers(next) + + if s.digestActionDone(next.ForkDigest, registerRpcOnce) { + return nil + } + + nextHandler, err := s.rpcHandlerByTopicFromFork(next.VersionEnum) if err != nil { return errors.Wrap(err, "RPC handler by topic from fork epoch") } - // Compute newly added topics. newHandlersByTopic := addedRPCHandlerByTopic(currentHandler, nextHandler) - // Register the new RPC handlers. + // We deregister the old topics later, at least one epoch after the fork. for topic, handler := range newHandlersByTopic { s.registerRPC(topic, handler) } - s.registeredNetworkEntry = nextEntry return nil } -// deregisterFromPastFork deregisters appropriate gossip and RPC topic if there is a fork in the current epoch. -func (s *Service) deregisterFromPastFork(currentEpoch primitives.Epoch) error { - // Get the fork. - currentFork, err := params.Fork(currentEpoch) - if err != nil { - return errors.Wrap(err, "genesis validators root") - } +// ensureDeregistrationForEpoch deregisters appropriate gossip and RPC topic if there is a fork in the current epoch. +func (s *Service) ensureDeregistrationForEpoch(currentEpoch primitives.Epoch) error { + current := params.GetNetworkScheduleEntry(currentEpoch) // If we are still in our genesis fork version then exit early. - if currentFork.Epoch == params.BeaconConfig().GenesisEpoch { + if current.Epoch == params.BeaconConfig().GenesisEpoch { return nil } + if currentEpoch < current.Epoch+1 { + return nil // wait until we are 1 epoch into the fork + } - // Get the epoch after the fork epoch. - afterForkEpoch := currentFork.Epoch + 1 + previous := params.GetNetworkScheduleEntry(current.Epoch - 1) + // Remove stream handlers for all topics that are in the set of + // currentTopics-previousTopics + if !s.digestActionDone(previous.ForkDigest, unregisterRpcOnce) { + previousTopics, err := s.rpcHandlerByTopicFromFork(previous.VersionEnum) + if err != nil { + return errors.Wrap(err, "RPC handler by topic from before fork epoch") + } + currentTopics, err := s.rpcHandlerByTopicFromFork(current.VersionEnum) + if err != nil { + return errors.Wrap(err, "RPC handler by topic from fork epoch") + } + topicsToRemove := removedRPCTopics(previousTopics, currentTopics) + for topic := range topicsToRemove { + fullTopic := topic + s.cfg.p2p.Encoding().ProtocolSuffix() + s.cfg.p2p.Host().RemoveStreamHandler(protocol.ID(fullTopic)) + log.WithField("topic", fullTopic).Debug("Removed RPC handler") + } + } - // Start de-registering if the current epoch is after the fork epoch. - if currentEpoch != afterForkEpoch { + // Unsubscribe from all gossip topics with the previous fork digest. + if s.digestActionDone(previous.ForkDigest, unregisterGossipOnce) { return nil } - - // Look at the previous fork's digest. - beforeForkEpoch := currentFork.Epoch - 1 - - beforeForkDigest := params.ForkDigest(beforeForkEpoch) - - // Exit early if there are no topics with that particular digest. - if !s.subHandler.digestExists(beforeForkDigest) { - return nil - } - - // Compute the RPC handlers that are no longer needed. - beforeForkHandlerByTopic, err := s.rpcHandlerByTopicFromEpoch(beforeForkEpoch) - if err != nil { - return errors.Wrap(err, "RPC handler by topic from before fork epoch") - } - - forkHandlerByTopic, err := s.rpcHandlerByTopicFromEpoch(currentFork.Epoch) - if err != nil { - return errors.Wrap(err, "RPC handler by topic from fork epoch") - } - - topicsToRemove := removedRPCTopics(beforeForkHandlerByTopic, forkHandlerByTopic) - for topic := range topicsToRemove { - fullTopic := topic + s.cfg.p2p.Encoding().ProtocolSuffix() - s.cfg.p2p.Host().RemoveStreamHandler(protocol.ID(fullTopic)) - log.WithField("topic", fullTopic).Debug("Removed RPC handler") - } - - // Run through all our current active topics and see - // if there are any subscriptions to be removed. for _, t := range s.subHandler.allTopics() { retDigest, err := p2p.ExtractGossipDigest(t) if err != nil { log.WithError(err).Error("Could not retrieve digest") continue } - if retDigest == beforeForkDigest { + if retDigest == previous.ForkDigest { s.unSubscribeFromTopic(t) } } diff --git a/beacon-chain/sync/fork_watcher_test.go b/beacon-chain/sync/fork_watcher_test.go index 1375b1a51f..e6466a763e 100644 --- a/beacon-chain/sync/fork_watcher_test.go +++ b/beacon-chain/sync/fork_watcher_test.go @@ -50,12 +50,36 @@ func testForkWatcherService(t *testing.T, current primitives.Epoch) *Service { return r } +func TestRegisterSubscriptions_Idempotent(t *testing.T) { + params.SetupTestConfigCleanup(t) + genesis.StoreEmbeddedDuringTest(t, params.BeaconConfig().ConfigName) + fulu := params.BeaconConfig().ElectraForkEpoch + 4096*2 + params.BeaconConfig().FuluForkEpoch = fulu + params.BeaconConfig().InitializeForkSchedule() + + current := fulu - 1 + s := testForkWatcherService(t, current) + next := params.GetNetworkScheduleEntry(fulu) + wg := attachSpawner(s) + require.Equal(t, true, s.registerSubscribers(next)) + done := make(chan struct{}) + go func() { wg.Wait(); close(done) }() + select { + case <-time.After(5 * time.Second): + t.Fatal("timed out waiting for subscriptions to be registered") + case <-done: + } + // the goal of this callback is just to assert that spawn is never called. + s.subscriptionSpawner = func(func()) { t.Error("registration routines spawned twice for the same digest") } + require.NoError(t, s.ensureRegistrationsForEpoch(fulu)) +} + func TestService_CheckForNextEpochFork(t *testing.T) { closedChan := make(chan struct{}) close(closedChan) params.SetupTestConfigCleanup(t) genesis.StoreEmbeddedDuringTest(t, params.BeaconConfig().ConfigName) - params.BeaconConfig().FuluForkEpoch = params.BeaconConfig().ElectraForkEpoch + 1096*2 + params.BeaconConfig().FuluForkEpoch = params.BeaconConfig().ElectraForkEpoch + 4096*2 params.BeaconConfig().InitializeForkSchedule() tests := []struct { @@ -171,7 +195,7 @@ func TestService_CheckForNextEpochFork(t *testing.T) { current := tt.epochAtRegistration(tt.forkEpoch) s := testForkWatcherService(t, current) wg := attachSpawner(s) - require.NoError(t, s.registerForUpcomingFork(s.cfg.clock.CurrentEpoch())) + require.NoError(t, s.ensureRegistrationsForEpoch(s.cfg.clock.CurrentEpoch())) wg.Wait() tt.checkRegistration(t, s) @@ -193,10 +217,13 @@ func TestService_CheckForNextEpochFork(t *testing.T) { // Move the clock to just before the next fork epoch and ensure deregistration is correct wg = attachSpawner(s) s.cfg.clock = defaultClockWithTimeAtEpoch(tt.nextForkEpoch - 1) - require.NoError(t, s.registerForUpcomingFork(s.cfg.clock.CurrentEpoch())) + require.NoError(t, s.ensureRegistrationsForEpoch(s.cfg.clock.CurrentEpoch())) wg.Wait() + + require.NoError(t, s.ensureDeregistrationForEpoch(tt.nextForkEpoch)) + assert.Equal(t, true, s.subHandler.digestExists(digest)) // deregister as if it is the epoch after the next fork epoch - require.NoError(t, s.deregisterFromPastFork(tt.nextForkEpoch+1)) + require.NoError(t, s.ensureDeregistrationForEpoch(tt.nextForkEpoch+1)) assert.Equal(t, false, s.subHandler.digestExists(digest)) assert.Equal(t, true, s.subHandler.digestExists(nextDigest)) }) diff --git a/beacon-chain/sync/once.go b/beacon-chain/sync/once.go new file mode 100644 index 0000000000..65f3551c19 --- /dev/null +++ b/beacon-chain/sync/once.go @@ -0,0 +1,40 @@ +package sync + +import "sync" + +// oncePerDigest represents an action that should only be performed once per fork digest. +type oncePerDigest uint8 + +const ( + doneZero oncePerDigest = 0 + registerGossipOnce oncePerDigest = 1 << 0 + unregisterGossipOnce oncePerDigest = 1 << 1 + registerRpcOnce oncePerDigest = 1 << 2 + unregisterRpcOnce oncePerDigest = 1 << 3 +) + +// perDigestSet keeps track of which oncePerDigest actions +// have been performed for each fork digest. +type perDigestSet struct { + sync.Mutex + history map[[4]byte]oncePerDigest +} + +// digestActionDone marks the action as done for the given digest, returning true if it was already done. +func (s *Service) digestActionDone(digest [4]byte, action oncePerDigest) bool { + s.digestActions.Lock() + defer s.digestActions.Unlock() + // lazy initialize registrationHistory; the lock is not a reference type so it is ready to go + if s.digestActions.history == nil { + s.digestActions.history = make(map[[4]byte]oncePerDigest) + } + + prev := s.digestActions.history[digest] + // Return true if the bit was already set + if prev&action != 0 { + return true + } + + s.digestActions.history[digest] = prev | action + return false +} diff --git a/beacon-chain/sync/once_test.go b/beacon-chain/sync/once_test.go new file mode 100644 index 0000000000..82729b8788 --- /dev/null +++ b/beacon-chain/sync/once_test.go @@ -0,0 +1,40 @@ +package sync + +import ( + "fmt" + "slices" + "testing" +) + +func TestDigestActionDone(t *testing.T) { + digests := [][4]byte{ + {0, 0, 0, 0}, + {1, 2, 3, 4}, + {4, 3, 2, 1}, + } + actions := []oncePerDigest{ + registerGossipOnce, + unregisterGossipOnce, + registerRpcOnce, + unregisterRpcOnce, + } + testCombos := func(d [][4]byte, a []oncePerDigest) { + s := &Service{} + for _, digest := range d { + for _, action := range a { + t.Run(fmt.Sprintf("digest=%#x/action=%d", digest, action), func(t *testing.T) { + if s.digestActionDone(digest, action) { + t.Fatal("expected first call to return false") + } + if !s.digestActionDone(digest, action) { + t.Fatal("expected second call to return true") + } + }) + } + } + } + testCombos(digests, actions) + slices.Reverse(digests) + slices.Reverse(actions) + testCombos(digests, actions) +} diff --git a/beacon-chain/sync/rpc.go b/beacon-chain/sync/rpc.go index 6122748cb3..edfa79c241 100644 --- a/beacon-chain/sync/rpc.go +++ b/beacon-chain/sync/rpc.go @@ -11,11 +11,9 @@ import ( p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" "github.com/OffchainLabs/prysm/v6/config/features" "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/monitoring/tracing" "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/network" "github.com/pkg/errors" @@ -122,41 +120,9 @@ func (s *Service) rpcHandlerByTopicFromFork(forkIndex int) (map[string]rpcHandle return nil, errors.Errorf("RPC handler not found for fork index %d", forkIndex) } -// rpcHandlerByTopic returns the RPC handlers for a given epoch. -func (s *Service) rpcHandlerByTopicFromEpoch(epoch primitives.Epoch) (map[string]rpcHandler, error) { - // Get the beacon config. - beaconConfig := params.BeaconConfig() - - if epoch >= beaconConfig.FuluForkEpoch { - return s.rpcHandlerByTopicFromFork(version.Fulu) - } - - if epoch >= beaconConfig.ElectraForkEpoch { - return s.rpcHandlerByTopicFromFork(version.Electra) - } - - if epoch >= beaconConfig.DenebForkEpoch { - return s.rpcHandlerByTopicFromFork(version.Deneb) - } - - if epoch >= beaconConfig.CapellaForkEpoch { - return s.rpcHandlerByTopicFromFork(version.Capella) - } - - if epoch >= beaconConfig.BellatrixForkEpoch { - return s.rpcHandlerByTopicFromFork(version.Bellatrix) - } - - if epoch >= beaconConfig.AltairForkEpoch { - return s.rpcHandlerByTopicFromFork(version.Altair) - } - - return s.rpcHandlerByTopicFromFork(version.Phase0) -} - // addedRPCHandlerByTopic returns the RPC handlers that are added in the new map that are not present in the old map. func addedRPCHandlerByTopic(previous, next map[string]rpcHandler) map[string]rpcHandler { - added := make(map[string]rpcHandler) + added := make(map[string]rpcHandler, len(next)) for topic, handler := range next { if _, ok := previous[topic]; !ok { @@ -181,13 +147,12 @@ func removedRPCTopics(previous, next map[string]rpcHandler) map[string]bool { } // registerRPCHandlers for p2p RPC. -func (s *Service) registerRPCHandlers() error { - // Get the current epoch. - currentSlot := s.cfg.clock.CurrentSlot() - currentEpoch := slots.ToEpoch(currentSlot) - +func (s *Service) registerRPCHandlers(nse params.NetworkScheduleEntry) error { + if s.digestActionDone(nse.ForkDigest, registerRpcOnce) { + return nil + } // Get the RPC handlers for the current epoch. - handlerByTopic, err := s.rpcHandlerByTopicFromEpoch(currentEpoch) + handlerByTopic, err := s.rpcHandlerByTopicFromFork(nse.VersionEnum) if err != nil { return errors.Wrap(err, "rpc handler by topic from epoch") } diff --git a/beacon-chain/sync/rpc_beacon_blocks_by_range_test.go b/beacon-chain/sync/rpc_beacon_blocks_by_range_test.go index 1e58011511..0b893b50aa 100644 --- a/beacon-chain/sync/rpc_beacon_blocks_by_range_test.go +++ b/beacon-chain/sync/rpc_beacon_blocks_by_range_test.go @@ -854,7 +854,7 @@ func TestRPCBeaconBlocksByRange_FilterBlocks(t *testing.T) { blocks := make([]*ethpb.SignedBeaconBlock, 0, req.Count) for i := req.StartSlot; i < req.StartSlot.Add(req.Count*req.Step); i += primitives.Slot(req.Step) { code, _, err := ReadStatusCode(stream, &encoder.SszNetworkEncoder{}) - if err != nil && err != io.EOF { + if err != nil && !errors.Is(err, io.EOF) { t.Fatal(err) } if code != 0 || errors.Is(err, io.EOF) { diff --git a/beacon-chain/sync/service.go b/beacon-chain/sync/service.go index 22edbe23dc..02ccffd9f3 100644 --- a/beacon-chain/sync/service.go +++ b/beacon-chain/sync/service.go @@ -180,7 +180,7 @@ type Service struct { slasherEnabled bool lcStore *lightClient.Store dataColumnLogCh chan dataColumnLogEntry - registeredNetworkEntry params.NetworkScheduleEntry + digestActions perDigestSet subscriptionSpawner func(func()) // see Service.spawn for details } @@ -377,10 +377,13 @@ func (s *Service) waitForChainStart() { } s.ctxMap = ctxMap - // Register respective rpc handlers at state initialized event. - err = s.registerRPCHandlers() - if err != nil { - log.WithError(err).Error("Could not register rpc handlers") + // We need to register RPC handlers ASAP so that we can handle incoming status message + // requests from peers. + nse := params.GetNetworkScheduleEntry(clock.CurrentEpoch()) + if err := s.registerRPCHandlers(nse); err != nil { + // If we fail here, we won't be able to peer with anyone because we can't handle their status messages. + log.WithError(err).Error("Failed to register RPC handlers") + // TODO: need ability to bubble the error up to the top of the node init tree and exit safely. return } @@ -401,22 +404,8 @@ func (s *Service) startDiscoveryAndSubscriptions() { return } - // Compute the current epoch. - currentSlot := slots.CurrentSlot(s.cfg.clock.GenesisTime()) - currentEpoch := slots.ToEpoch(currentSlot) - - // Compute the current fork forkDigest. - forkDigest, err := s.currentForkDigest() - if err != nil { - log.WithError(err).Error("Could not retrieve current fork digest") - return - } - - // Register respective pubsub handlers at state synced event. - s.registerSubscribers(currentEpoch, forkDigest) - // Start the fork watcher. - go s.forkWatcher() + go s.p2pHandlerControlLoop() } func (s *Service) writeErrorResponseToStream(responseCode byte, reason string, stream libp2pcore.Stream) { @@ -454,6 +443,15 @@ func (s *Service) chainIsStarted() bool { return s.chainStarted.IsSet() } +func (s *Service) waitForInitialSync(ctx context.Context) error { + select { + case <-s.initialSyncComplete: + return nil + case <-ctx.Done(): + return ctx.Err() + } +} + // Checker defines a struct which can verify whether a node is currently // synchronizing a chain with the rest of peers in the network. type Checker interface { diff --git a/beacon-chain/sync/subscriber.go b/beacon-chain/sync/subscriber.go index 4d32c10b65..103f3e3825 100644 --- a/beacon-chain/sync/subscriber.go +++ b/beacon-chain/sync/subscriber.go @@ -55,7 +55,7 @@ type subscribeParameters struct { topicFormat string validate wrappedVal handle subHandler - digest [4]byte + nse params.NetworkScheduleEntry // getSubnetsToJoin is a function that returns all subnets the node should join. getSubnetsToJoin func(currentSlot primitives.Slot) map[uint64]bool // getSubnetsRequiringPeers is a function that returns all subnets that require peers to be found @@ -70,7 +70,7 @@ func (p subscribeParameters) shortTopic() string { if fmtLen >= 3 && short[fmtLen-3:] == "_%d" { short = short[:fmtLen-3] } - return fmt.Sprintf(short, p.digest) + return fmt.Sprintf(short, p.nse.ForkDigest) } func (p subscribeParameters) logFields() logrus.Fields { @@ -81,7 +81,7 @@ func (p subscribeParameters) logFields() logrus.Fields { // fullTopic is the fully qualified topic string, given to gossipsub. func (p subscribeParameters) fullTopic(subnet uint64, suffix string) string { - return fmt.Sprintf(p.topicFormat, p.digest, subnet) + suffix + return fmt.Sprintf(p.topicFormat, p.nse.ForkDigest, subnet) + suffix } // subnetTracker keeps track of which subnets we are subscribed to, out of the set of @@ -204,41 +204,45 @@ func (s *Service) spawn(f func()) { } // Register PubSub subscribers -func (s *Service) registerSubscribers(epoch primitives.Epoch, digest [4]byte) { +func (s *Service) registerSubscribers(nse params.NetworkScheduleEntry) bool { + // If we have already registered for this fork digest, exit early. + if s.digestActionDone(nse.ForkDigest, registerGossipOnce) { + return false + } s.spawn(func() { - s.subscribe(p2p.BlockSubnetTopicFormat, s.validateBeaconBlockPubSub, s.beaconBlockSubscriber, digest) + s.subscribe(p2p.BlockSubnetTopicFormat, s.validateBeaconBlockPubSub, s.beaconBlockSubscriber, nse) }) s.spawn(func() { - s.subscribe(p2p.AggregateAndProofSubnetTopicFormat, s.validateAggregateAndProof, s.beaconAggregateProofSubscriber, digest) + s.subscribe(p2p.AggregateAndProofSubnetTopicFormat, s.validateAggregateAndProof, s.beaconAggregateProofSubscriber, nse) }) s.spawn(func() { - s.subscribe(p2p.ExitSubnetTopicFormat, s.validateVoluntaryExit, s.voluntaryExitSubscriber, digest) + s.subscribe(p2p.ExitSubnetTopicFormat, s.validateVoluntaryExit, s.voluntaryExitSubscriber, nse) }) s.spawn(func() { - s.subscribe(p2p.ProposerSlashingSubnetTopicFormat, s.validateProposerSlashing, s.proposerSlashingSubscriber, digest) + s.subscribe(p2p.ProposerSlashingSubnetTopicFormat, s.validateProposerSlashing, s.proposerSlashingSubscriber, nse) }) s.spawn(func() { - s.subscribe(p2p.AttesterSlashingSubnetTopicFormat, s.validateAttesterSlashing, s.attesterSlashingSubscriber, digest) + s.subscribe(p2p.AttesterSlashingSubnetTopicFormat, s.validateAttesterSlashing, s.attesterSlashingSubscriber, nse) }) s.spawn(func() { s.subscribeWithParameters(subscribeParameters{ topicFormat: p2p.AttestationSubnetTopicFormat, validate: s.validateCommitteeIndexBeaconAttestation, handle: s.committeeIndexBeaconAttestationSubscriber, - digest: digest, getSubnetsToJoin: s.persistentAndAggregatorSubnetIndices, getSubnetsRequiringPeers: attesterSubnetIndices, + nse: nse, }) }) // New gossip topic in Altair - if params.BeaconConfig().AltairForkEpoch <= epoch { + if params.BeaconConfig().AltairForkEpoch <= nse.Epoch { s.spawn(func() { s.subscribe( p2p.SyncContributionAndProofSubnetTopicFormat, s.validateSyncContributionAndProof, s.syncContributionAndProofSubscriber, - digest, + nse, ) }) s.spawn(func() { @@ -246,8 +250,8 @@ func (s *Service) registerSubscribers(epoch primitives.Epoch, digest [4]byte) { topicFormat: p2p.SyncCommitteeSubnetTopicFormat, validate: s.validateSyncCommitteeMessage, handle: s.syncCommitteeMessageSubscriber, - digest: digest, getSubnetsToJoin: s.activeSyncSubnetIndices, + nse: nse, }) }) @@ -257,7 +261,7 @@ func (s *Service) registerSubscribers(epoch primitives.Epoch, digest [4]byte) { p2p.LightClientOptimisticUpdateTopicFormat, s.validateLightClientOptimisticUpdate, noopHandler, - digest, + nse, ) }) s.spawn(func() { @@ -265,32 +269,32 @@ func (s *Service) registerSubscribers(epoch primitives.Epoch, digest [4]byte) { p2p.LightClientFinalityUpdateTopicFormat, s.validateLightClientFinalityUpdate, noopHandler, - digest, + nse, ) }) } } // New gossip topic in Capella - if params.BeaconConfig().CapellaForkEpoch <= epoch { + if params.BeaconConfig().CapellaForkEpoch <= nse.Epoch { s.spawn(func() { s.subscribe( p2p.BlsToExecutionChangeSubnetTopicFormat, s.validateBlsToExecutionChange, s.blsToExecutionChangeSubscriber, - digest, + nse, ) }) } // New gossip topic in Deneb, removed in Electra - if params.BeaconConfig().DenebForkEpoch <= epoch && epoch < params.BeaconConfig().ElectraForkEpoch { + if params.BeaconConfig().DenebForkEpoch <= nse.Epoch && nse.Epoch < params.BeaconConfig().ElectraForkEpoch { s.spawn(func() { s.subscribeWithParameters(subscribeParameters{ topicFormat: p2p.BlobSubnetTopicFormat, validate: s.validateBlob, handle: s.blobSubscriber, - digest: digest, + nse: nse, getSubnetsToJoin: func(primitives.Slot) map[uint64]bool { return mapFromCount(params.BeaconConfig().BlobsidecarSubnetCount) }, @@ -299,13 +303,13 @@ func (s *Service) registerSubscribers(epoch primitives.Epoch, digest [4]byte) { } // New gossip topic in Electra, removed in Fulu - if params.BeaconConfig().ElectraForkEpoch <= epoch && epoch < params.BeaconConfig().FuluForkEpoch { + if params.BeaconConfig().ElectraForkEpoch <= nse.Epoch && nse.Epoch < params.BeaconConfig().FuluForkEpoch { s.spawn(func() { s.subscribeWithParameters(subscribeParameters{ topicFormat: p2p.BlobSubnetTopicFormat, validate: s.validateBlob, handle: s.blobSubscriber, - digest: digest, + nse: nse, getSubnetsToJoin: func(currentSlot primitives.Slot) map[uint64]bool { return mapFromCount(params.BeaconConfig().BlobsidecarSubnetCountElectra) }, @@ -314,35 +318,54 @@ func (s *Service) registerSubscribers(epoch primitives.Epoch, digest [4]byte) { } // New gossip topic in Fulu. - if params.BeaconConfig().FuluForkEpoch <= epoch { + if params.BeaconConfig().FuluForkEpoch <= nse.Epoch { s.spawn(func() { s.subscribeWithParameters(subscribeParameters{ topicFormat: p2p.DataColumnSubnetTopicFormat, validate: s.validateDataColumn, handle: s.dataColumnSubscriber, - digest: digest, + nse: nse, getSubnetsToJoin: s.dataColumnSubnetIndices, getSubnetsRequiringPeers: s.allDataColumnSubnets, }) }) } + return true +} + +func (s *Service) subscriptionRequestExpired(nse params.NetworkScheduleEntry) bool { + next := params.NextNetworkScheduleEntry(nse.Epoch) + return next.Epoch != nse.Epoch && s.cfg.clock.CurrentEpoch() > next.Epoch +} + +func (s *Service) subscribeLogFields(topic string, nse params.NetworkScheduleEntry) logrus.Fields { + return logrus.Fields{ + "topic": topic, + "digest": nse.ForkDigest, + "forkEpoch": nse.Epoch, + "currentEpoch": s.cfg.clock.CurrentEpoch(), + } } // subscribe to a given topic with a given validator and subscription handler. // The base protobuf message is used to initialize new messages for decoding. -func (s *Service) subscribe(topic string, validator wrappedVal, handle subHandler, digest [4]byte) { - <-s.initialSyncComplete - _, e, err := params.ForkDataFromDigest(digest) - if err != nil { - // Impossible condition as it would mean digest does not exist. - panic(err) // lint:nopanic -- Impossible condition. +func (s *Service) subscribe(topic string, validator wrappedVal, handle subHandler, nse params.NetworkScheduleEntry) { + if err := s.waitForInitialSync(s.ctx); err != nil { + log.WithFields(s.subscribeLogFields(topic, nse)).WithError(err).Debug("Context cancelled while waiting for initial sync, not subscribing to topic") + return } - base := p2p.GossipTopicMappings(topic, e) + // Check if this subscribe request is still valid - we may have crossed another fork epoch while waiting for initial sync. + if s.subscriptionRequestExpired(nse) { + // If we are already past the next fork epoch, do not subscribe to this topic. + log.WithFields(s.subscribeLogFields(topic, nse)).Debug("Not subscribing to topic as we are already past the next fork epoch") + return + } + base := p2p.GossipTopicMappings(topic, nse.Epoch) if base == nil { // Impossible condition as it would mean topic does not exist. panic(fmt.Sprintf("%s is not mapped to any message in GossipTopicMappings", topic)) // lint:nopanic -- Impossible condition. } - s.subscribeWithBase(s.addDigestToTopic(topic, digest), validator, handle) + s.subscribeWithBase(s.addDigestToTopic(topic, nse.ForkDigest), validator, handle) } func (s *Service) subscribeWithBase(topic string, validator wrappedVal, handle subHandler) *pubsub.Subscription { @@ -352,7 +375,7 @@ func (s *Service) subscribeWithBase(topic string, validator wrappedVal, handle s // Do not resubscribe already seen subscriptions. ok := s.subHandler.topicExists(topic) if ok { - log.WithField("topic", topic).Debug("Provided topic already has an active subscription running") + log.WithField("topic", topic).Error("Provided topic already has an active subscription running") return nil } @@ -504,89 +527,93 @@ func (s *Service) wrapAndReportValidation(topic string, v wrappedVal) (string, p } } -// pruneSubscriptions unsubscribes from topics we are currently subscribed to but that are +// pruneNotWanted unsubscribes from topics we are currently subscribed to but that are // not in the list of wanted subnets. -// This function mutates the `subscriptionBySubnet` map, which is used to keep track of the current subscriptions. -func (s *Service) pruneSubscriptions(t *subnetTracker, wantedSubnets map[uint64]bool) { +func (s *Service) pruneNotWanted(t *subnetTracker, wantedSubnets map[uint64]bool) { for _, subnet := range t.unwanted(wantedSubnets) { t.cancelSubscription(subnet) s.unSubscribeFromTopic(t.fullTopic(subnet, s.cfg.p2p.Encoding().ProtocolSuffix())) } } -// subscribeToSubnets subscribes to needed subnets and unsubscribe from unneeded ones. -// This functions mutates the `subscriptionBySubnet` map, which is used to keep track of the current subscriptions. -func (s *Service) subscribeToSubnets(t *subnetTracker) error { - // Do not subscribe if not synced. - if s.chainStarted.IsSet() && s.cfg.initialSync.Syncing() { - return nil - } - - valid, err := isDigestValid(t.digest, s.cfg.clock) - if err != nil { - return errors.Wrap(err, "is digest valid") - } - - // Unsubscribe from all subnets if digest is not valid. It's likely to be the case after a hard fork. - if !valid { - s.pruneSubscriptions(t, nil) - return errInvalidDigest - } - - subnetsToJoin := t.getSubnetsToJoin(s.cfg.clock.CurrentSlot()) - s.pruneSubscriptions(t, subnetsToJoin) - for _, subnet := range t.missing(subnetsToJoin) { - // TODO: subscribeWithBase appends the protocol suffix, other methods don't. Make this consistent. - topic := t.fullTopic(subnet, "") - t.track(subnet, s.subscribeWithBase(topic, t.validate, t.handle)) - } - - return nil -} - // subscribeWithParameters subscribes to a list of subnets. func (s *Service) subscribeWithParameters(p subscribeParameters) { + ctx, cancel := context.WithCancel(s.ctx) + defer cancel() + tracker := newSubnetTracker(p) - // Try once immediately so we don't have to wait until the next slot. - s.ensureSubnetPeersAndSubscribe(tracker) - - go s.logMinimumPeersPerSubnet(p) + go s.ensurePeers(ctx, tracker) + go s.logMinimumPeersPerSubnet(ctx, p) + if err := s.waitForInitialSync(ctx); err != nil { + log.WithFields(p.logFields()).WithError(err).Debug("Could not subscribe to subnets as initial sync failed") + return + } + s.trySubscribeSubnets(tracker) slotTicker := slots.NewSlotTicker(s.cfg.clock.GenesisTime(), params.BeaconConfig().SecondsPerSlot) defer slotTicker.Done() for { select { case <-slotTicker.C(): - s.ensureSubnetPeersAndSubscribe(tracker) + // Check if this subscribe request is still valid - we may have crossed another fork epoch while waiting for initial sync. + if s.subscriptionRequestExpired(p.nse) { + // If we are already past the next fork epoch, do not subscribe to this topic. + log.WithFields(logrus.Fields{ + "topic": p.shortTopic(), + "digest": p.nse.ForkDigest, + "epoch": p.nse.Epoch, + "currentEpoch": s.cfg.clock.CurrentEpoch(), + }).Debug("Exiting topic subnet subscription loop") + return + } + s.trySubscribeSubnets(tracker) case <-s.ctx.Done(): return } } } -func (s *Service) ensureSubnetPeersAndSubscribe(tracker *subnetTracker) { - timeout := time.Duration(params.BeaconConfig().SecondsPerSlot) * time.Second - minPeers := flags.Get().MinimumPeersPerSubnet - logFields := tracker.logFields() - neededSubnets := computeAllNeededSubnets(s.cfg.clock.CurrentSlot(), tracker.getSubnetsToJoin, tracker.getSubnetsRequiringPeers) - - if err := s.subscribeToSubnets(tracker); err != nil { - if errors.Is(err, errInvalidDigest) { - log.WithFields(logFields).Debug("Digest is invalid, stopping subscription") - return - } - log.WithFields(logFields).WithError(err).Error("Could not subscribe to subnets") - return - } - - ctx, cancel := context.WithTimeout(s.ctx, timeout) - defer cancel() - if err := s.cfg.p2p.FindAndDialPeersWithSubnets(ctx, tracker.topicFormat, tracker.digest, minPeers, neededSubnets); err != nil && !errors.Is(err, context.DeadlineExceeded) { - log.WithFields(logFields).WithError(err).Debug("Could not find peers with subnets") +// trySubscribeSubnets attempts to subscribe to any missing subnets that we should be subscribed to. +// Only if initial sync is complete. +func (s *Service) trySubscribeSubnets(t *subnetTracker) { + subnetsToJoin := t.getSubnetsToJoin(s.cfg.clock.CurrentSlot()) + s.pruneNotWanted(t, subnetsToJoin) + for _, subnet := range t.missing(subnetsToJoin) { + // TODO: subscribeWithBase appends the protocol suffix, other methods don't. Make this consistent. + topic := t.fullTopic(subnet, "") + t.track(subnet, s.subscribeWithBase(topic, t.validate, t.handle)) } } -func (s *Service) logMinimumPeersPerSubnet(p subscribeParameters) { +func (s *Service) ensurePeers(ctx context.Context, tracker *subnetTracker) { + // Try once immediately so we don't have to wait until the next slot. + s.tryEnsurePeers(ctx, tracker) + + oncePerSlot := slots.NewSlotTicker(s.cfg.clock.GenesisTime(), params.BeaconConfig().SecondsPerSlot) + defer oncePerSlot.Done() + for { + select { + case <-oncePerSlot.C(): + s.tryEnsurePeers(ctx, tracker) + case <-ctx.Done(): + return + } + } +} + +func (s *Service) tryEnsurePeers(ctx context.Context, tracker *subnetTracker) { + timeout := (time.Duration(params.BeaconConfig().SecondsPerSlot) * time.Second) - 100*time.Millisecond + minPeers := flags.Get().MinimumPeersPerSubnet + neededSubnets := computeAllNeededSubnets(s.cfg.clock.CurrentSlot(), tracker.getSubnetsToJoin, tracker.getSubnetsRequiringPeers) + ctx, cancel := context.WithTimeout(ctx, timeout) + defer cancel() + err := s.cfg.p2p.FindAndDialPeersWithSubnets(ctx, tracker.topicFormat, tracker.nse.ForkDigest, minPeers, neededSubnets) + if err != nil && !errors.Is(err, context.DeadlineExceeded) { + log.WithFields(tracker.logFields()).WithError(err).Debug("Could not find peers with subnets") + } +} + +func (s *Service) logMinimumPeersPerSubnet(ctx context.Context, p subscribeParameters) { logFields := p.logFields() minimumPeersPerSubnet := flags.Get().MinimumPeersPerSubnet // Warn the user if we are not subscribed to enough peers in the subnets. @@ -603,7 +630,7 @@ func (s *Service) logMinimumPeersPerSubnet(p subscribeParameters) { isSubnetWithMissingPeers := false // Find new peers for wanted subnets if needed. for index := range subnetsToFindPeersIndex { - topic := fmt.Sprintf(p.topicFormat, p.digest, index) + topic := fmt.Sprintf(p.topicFormat, p.nse.ForkDigest, index) // Check if we have enough peers in the subnet. Skip if we do. if count := s.connectedPeersCount(topic); count < minimumPeersPerSubnet { @@ -617,7 +644,7 @@ func (s *Service) logMinimumPeersPerSubnet(p subscribeParameters) { if !isSubnetWithMissingPeers { log.WithFields(logFields).Debug("All subnets have enough connected peers") } - case <-s.ctx.Done(): + case <-ctx.Done(): return } } diff --git a/beacon-chain/sync/subscriber_test.go b/beacon-chain/sync/subscriber_test.go index 3f9f9c4c9f..fd6ac72e85 100644 --- a/beacon-chain/sync/subscriber_test.go +++ b/beacon-chain/sync/subscriber_test.go @@ -57,8 +57,9 @@ func TestSubscribe_ReceivesValidMessage(t *testing.T) { } markInitSyncComplete(t, &r) var err error - p2pService.Digest, err = r.currentForkDigest() require.NoError(t, err) + nse := params.GetNetworkScheduleEntry(r.cfg.clock.CurrentEpoch()) + p2pService.Digest = nse.ForkDigest topic := "/eth2/%x/voluntary_exit" var wg sync.WaitGroup wg.Add(1) @@ -71,7 +72,7 @@ func TestSubscribe_ReceivesValidMessage(t *testing.T) { } wg.Done() return nil - }, p2pService.Digest) + }, nse) r.markForChainStart() p2pService.ReceivePubSub(topic, &pb.SignedVoluntaryExit{Exit: &pb.VoluntaryExit{Epoch: 55}, Signature: make([]byte, fieldparams.BLSSignatureLength)}) @@ -105,14 +106,13 @@ func TestSubscribe_UnsubscribeTopic(t *testing.T) { subHandler: newSubTopicHandler(), } markInitSyncComplete(t, &r) - var err error - p2pService.Digest, err = r.currentForkDigest() - require.NoError(t, err) + nse := params.GetNetworkScheduleEntry(r.cfg.clock.CurrentEpoch()) + p2pService.Digest = nse.ForkDigest topic := "/eth2/%x/voluntary_exit" r.subscribe(topic, r.noopValidator, func(_ context.Context, msg proto.Message) error { return nil - }, p2pService.Digest) + }, nse) r.markForChainStart() fullTopic := fmt.Sprintf(topic, p2pService.Digest) + p2pService.Encoding().ProtocolSuffix() @@ -160,14 +160,13 @@ func TestSubscribe_ReceivesAttesterSlashing(t *testing.T) { topic := "/eth2/%x/attester_slashing" var wg sync.WaitGroup wg.Add(1) - var err error - p2pService.Digest, err = r.currentForkDigest() - require.NoError(t, err) + nse := params.GetNetworkScheduleEntry(r.cfg.clock.CurrentEpoch()) + p2pService.Digest = nse.ForkDigest r.subscribe(topic, r.noopValidator, func(ctx context.Context, msg proto.Message) error { require.NoError(t, r.attesterSlashingSubscriber(ctx, msg)) wg.Done() return nil - }, p2pService.Digest) + }, nse) beaconState, privKeys := util.DeterministicGenesisState(t, 64) chainService.State = beaconState r.markForChainStart() @@ -216,14 +215,13 @@ func TestSubscribe_ReceivesProposerSlashing(t *testing.T) { wg.Add(1) params.SetupTestConfigCleanup(t) params.OverrideBeaconConfig(params.MainnetConfig()) - var err error - p2pService.Digest, err = r.currentForkDigest() - require.NoError(t, err) + nse := params.GetNetworkScheduleEntry(r.cfg.clock.CurrentEpoch()) + p2pService.Digest = nse.ForkDigest r.subscribe(topic, r.noopValidator, func(ctx context.Context, msg proto.Message) error { require.NoError(t, r.proposerSlashingSubscriber(ctx, msg)) wg.Done() return nil - }, p2pService.Digest) + }, nse) beaconState, privKeys := util.DeterministicGenesisState(t, 64) chainService.State = beaconState r.markForChainStart() @@ -261,9 +259,8 @@ func TestSubscribe_HandlesPanic(t *testing.T) { } markInitSyncComplete(t, &r) - var err error - p.Digest, err = r.currentForkDigest() - require.NoError(t, err) + nse := params.GetNetworkScheduleEntry(r.cfg.clock.CurrentEpoch()) + p.Digest = nse.ForkDigest topic := p2p.GossipTypeMapping[reflect.TypeOf(&pb.SignedVoluntaryExit{})] var wg sync.WaitGroup @@ -272,7 +269,7 @@ func TestSubscribe_HandlesPanic(t *testing.T) { r.subscribe(topic, r.noopValidator, func(_ context.Context, msg proto.Message) error { defer wg.Done() panic("bad") - }, p.Digest) + }, nse) r.markForChainStart() p.ReceivePubSub(topic, &pb.SignedVoluntaryExit{Exit: &pb.VoluntaryExit{Epoch: 55}, Signature: make([]byte, fieldparams.BLSSignatureLength)}) @@ -298,12 +295,11 @@ func TestRevalidateSubscription_CorrectlyFormatsTopic(t *testing.T) { chainStarted: abool.New(), subHandler: newSubTopicHandler(), } - digest, err := r.currentForkDigest() - require.NoError(t, err) + nse := params.GetNetworkScheduleEntry(r.cfg.clock.CurrentEpoch()) params := subscribeParameters{ topicFormat: "/eth2/testing/%#x/committee%d", - digest: digest, + nse: nse, } tracker := newSubnetTracker(params) @@ -326,7 +322,7 @@ func TestRevalidateSubscription_CorrectlyFormatsTopic(t *testing.T) { require.NoError(t, err) tracker.track(c2, sub2) - r.pruneSubscriptions(tracker, map[uint64]bool{c2: true}) + r.pruneNotWanted(tracker, map[uint64]bool{c2: true}) require.LogsDoNotContain(t, hook, "Could not unregister topic validator") } @@ -483,6 +479,7 @@ func TestFilterSubnetPeers(t *testing.T) { chainStarted: abool.New(), subHandler: newSubTopicHandler(), } + markInitSyncComplete(t, &r) // Empty cache at the end of the test. defer cache.SubnetIDs.EmptyAllCaches() digest, err := r.currentForkDigest() @@ -548,16 +545,16 @@ func TestSubscribeWithSyncSubnets_DynamicOK(t *testing.T) { chainStarted: abool.New(), subHandler: newSubTopicHandler(), } + markInitSyncComplete(t, &r) // Empty cache at the end of the test. defer cache.SyncSubnetIDs.EmptyAllCaches() slot := r.cfg.clock.CurrentSlot() currEpoch := slots.ToEpoch(slot) cache.SyncSubnetIDs.AddSyncCommitteeSubnets([]byte("pubkey"), currEpoch, []uint64{0, 1}, 10*time.Second) - digest, err := r.currentForkDigest() - assert.NoError(t, err) + nse := params.GetNetworkScheduleEntry(r.cfg.clock.CurrentEpoch()) go r.subscribeWithParameters(subscribeParameters{ topicFormat: p2p.SyncCommitteeSubnetTopicFormat, - digest: digest, + nse: nse, getSubnetsToJoin: r.activeSyncSubnetIndices, }) time.Sleep(2 * time.Second) @@ -566,10 +563,10 @@ func TestSubscribeWithSyncSubnets_DynamicOK(t *testing.T) { for _, t := range r.cfg.p2p.PubSub().GetTopics() { topicMap[t] = true } - firstSub := fmt.Sprintf(p2p.SyncCommitteeSubnetTopicFormat, digest, 0) + r.cfg.p2p.Encoding().ProtocolSuffix() + firstSub := fmt.Sprintf(p2p.SyncCommitteeSubnetTopicFormat, nse.ForkDigest, 0) + r.cfg.p2p.Encoding().ProtocolSuffix() assert.Equal(t, true, topicMap[firstSub]) - secondSub := fmt.Sprintf(p2p.SyncCommitteeSubnetTopicFormat, digest, 1) + r.cfg.p2p.Encoding().ProtocolSuffix() + secondSub := fmt.Sprintf(p2p.SyncCommitteeSubnetTopicFormat, nse.ForkDigest, 1) + r.cfg.p2p.Encoding().ProtocolSuffix() assert.Equal(t, true, topicMap[secondSub]) cancel() } @@ -600,43 +597,39 @@ func TestSubscribeWithSyncSubnets_DynamicSwitchFork(t *testing.T) { // Empty cache at the end of the test. defer cache.SyncSubnetIDs.EmptyAllCaches() cache.SyncSubnetIDs.AddSyncCommitteeSubnets([]byte("pubkey"), 0, []uint64{0, 1}, 10*time.Second) - digest := params.ForkDigest(r.cfg.clock.CurrentEpoch()) - version, e, err := params.ForkDataFromDigest(digest) - require.NoError(t, err) - require.Equal(t, [4]byte(params.BeaconConfig().DenebForkVersion), version) - require.Equal(t, params.BeaconConfig().DenebForkEpoch, e) + nse := params.GetNetworkScheduleEntry(r.cfg.clock.CurrentEpoch()) + require.Equal(t, [4]byte(params.BeaconConfig().DenebForkVersion), nse.ForkVersion) + require.Equal(t, params.BeaconConfig().DenebForkEpoch, nse.Epoch) sp := newSubnetTracker(subscribeParameters{ topicFormat: p2p.SyncCommitteeSubnetTopicFormat, - digest: digest, + nse: nse, getSubnetsToJoin: r.activeSyncSubnetIndices, }) - require.NoError(t, r.subscribeToSubnets(sp)) + r.trySubscribeSubnets(sp) assert.Equal(t, 2, len(r.cfg.p2p.PubSub().GetTopics())) topicMap := map[string]bool{} for _, t := range r.cfg.p2p.PubSub().GetTopics() { topicMap[t] = true } - firstSub := fmt.Sprintf(p2p.SyncCommitteeSubnetTopicFormat, digest, 0) + r.cfg.p2p.Encoding().ProtocolSuffix() + firstSub := fmt.Sprintf(p2p.SyncCommitteeSubnetTopicFormat, nse.ForkDigest, 0) + r.cfg.p2p.Encoding().ProtocolSuffix() assert.Equal(t, true, topicMap[firstSub]) - secondSub := fmt.Sprintf(p2p.SyncCommitteeSubnetTopicFormat, digest, 1) + r.cfg.p2p.Encoding().ProtocolSuffix() + secondSub := fmt.Sprintf(p2p.SyncCommitteeSubnetTopicFormat, nse.ForkDigest, 1) + r.cfg.p2p.Encoding().ProtocolSuffix() assert.Equal(t, true, topicMap[secondSub]) electraSlot, err := slots.EpochStart(params.BeaconConfig().ElectraForkEpoch) require.NoError(t, err) mockNow.SetSlot(t, clock, electraSlot) - digest = params.ForkDigest(r.cfg.clock.CurrentEpoch()) - version, e, err = params.ForkDataFromDigest(digest) - require.NoError(t, err) - require.Equal(t, [4]byte(params.BeaconConfig().ElectraForkVersion), version) - require.Equal(t, params.BeaconConfig().ElectraForkEpoch, e) + nse = params.GetNetworkScheduleEntry(r.cfg.clock.CurrentEpoch()) + require.Equal(t, [4]byte(params.BeaconConfig().ElectraForkVersion), nse.ForkVersion) + require.Equal(t, params.BeaconConfig().ElectraForkEpoch, nse.Epoch) - sp.digest = digest + sp.nse = nse // clear the cache and re-subscribe to subnets. // this should result in the subscriptions being removed cache.SyncSubnetIDs.EmptyAllCaches() - require.NoError(t, r.subscribeToSubnets(sp)) + r.trySubscribeSubnets(sp) assert.Equal(t, 0, len(r.cfg.p2p.PubSub().GetTopics())) } diff --git a/changelog/kasey_idempotent-registration.md b/changelog/kasey_idempotent-registration.md new file mode 100644 index 0000000000..afd2085f95 --- /dev/null +++ b/changelog/kasey_idempotent-registration.md @@ -0,0 +1,2 @@ +### Fixed +- Fixed issue #15738 where separate goroutines assume sole responsibility for topic registration. From 71f05b597f337009010e9e3279c0fdcd6c0c58bf Mon Sep 17 00:00:00 2001 From: kasey <489222+kasey@users.noreply.github.com> Date: Tue, 7 Oct 2025 23:02:05 -0500 Subject: [PATCH 003/103] Use NetworkSchedule config to determine max blobs at epoch (#15714) Co-authored-by: Kasey Kirkham --- api/client/builder/BUILD.bazel | 1 + api/client/builder/client_test.go | 10 +- beacon-chain/blockchain/process_block_test.go | 40 ++--- beacon-chain/blockchain/service_test.go | 5 +- .../core/peerdas/reconstruction_test.go | 13 +- .../core/peerdas/verification_test.go | 31 ++-- beacon-chain/das/availability_blobs_test.go | 78 ++++----- beacon-chain/das/blob_cache_test.go | 2 +- beacon-chain/db/filesystem/blob_test.go | 14 +- beacon-chain/db/filesystem/cache_test.go | 24 +-- beacon-chain/db/filesystem/iteration_test.go | 40 ++--- beacon-chain/db/filesystem/migration_test.go | 29 ++-- beacon-chain/db/filesystem/mock.go | 4 +- beacon-chain/db/filesystem/pruner_test.go | 25 +-- beacon-chain/p2p/BUILD.bazel | 1 + beacon-chain/p2p/pubsub.go | 4 +- beacon-chain/p2p/topics.go | 136 ++++++++++++++++ beacon-chain/p2p/topics_test.go | 70 ++++++++ beacon-chain/rpc/eth/beacon/handlers_test.go | 145 ++++++----------- beacon-chain/rpc/eth/blob/BUILD.bazel | 1 - beacon-chain/rpc/eth/blob/handlers_test.go | 58 ++++--- beacon-chain/rpc/lookup/blocker.go | 12 +- beacon-chain/rpc/lookup/blocker_test.go | 79 ++------- beacon-chain/startup/clock.go | 12 ++ beacon-chain/sync/backfill/blobs_test.go | 11 +- beacon-chain/sync/blobs_test.go | 63 +++----- .../sync/initial-sync/blocks_fetcher_test.go | 94 ++++++----- .../sync/initial-sync/service_test.go | 5 +- .../sync/rpc_beacon_blocks_by_root_test.go | 2 + .../sync/rpc_blob_sidecars_by_range.go | 4 +- .../sync/rpc_blob_sidecars_by_range_test.go | 64 +++----- .../sync/rpc_blob_sidecars_by_root.go | 2 +- .../sync/rpc_blob_sidecars_by_root_test.go | 36 +++-- beacon-chain/sync/rpc_send_request_test.go | 22 +-- beacon-chain/sync/subscriber.go | 2 +- beacon-chain/sync/verify/BUILD.bazel | 1 + beacon-chain/sync/verify/blob_test.go | 8 +- beacon-chain/verification/blob_test.go | 6 +- .../kasey_max-blobs-use-network-schedule.md | 2 + config/params/BUILD.bazel | 1 + config/params/config.go | 111 ++++--------- config/params/config_test.go | 150 ++++++++++-------- config/params/fork.go | 25 +-- config/params/fork_test.go | 14 +- config/params/testutils.go | 29 ++++ .../shared/common/forkchoice/runner.go | 2 + .../merkle_proof/single_merkle_proof.go | 3 +- .../shared/common/operations/BUILD.bazel | 2 + .../common/operations/execution_payload.go | 6 + .../spectest/shared/deneb/sanity/BUILD.bazel | 2 + .../shared/deneb/sanity/block_processing.go | 4 + testing/util/BUILD.bazel | 1 + testing/util/deneb.go | 10 +- testing/util/deneb_test.go | 3 +- testing/util/electra_state.go | 24 ++- testing/util/slot.go | 15 ++ 56 files changed, 874 insertions(+), 684 deletions(-) create mode 100644 beacon-chain/p2p/topics_test.go create mode 100644 changelog/kasey_max-blobs-use-network-schedule.md create mode 100644 testing/util/slot.go diff --git a/api/client/builder/BUILD.bazel b/api/client/builder/BUILD.bazel index 4f5d156b43..c4dd47b4fe 100644 --- a/api/client/builder/BUILD.bazel +++ b/api/client/builder/BUILD.bazel @@ -59,6 +59,7 @@ go_test( "//runtime/version:go_default_library", "//testing/assert:go_default_library", "//testing/require:go_default_library", + "//testing/util:go_default_library", "@com_github_ethereum_go_ethereum//common/hexutil:go_default_library", "@com_github_pkg_errors//:go_default_library", "@com_github_prysmaticlabs_go_bitfield//:go_default_library", diff --git a/api/client/builder/client_test.go b/api/client/builder/client_test.go index d6ad8f9ff8..13f3d65483 100644 --- a/api/client/builder/client_test.go +++ b/api/client/builder/client_test.go @@ -12,6 +12,7 @@ import ( "github.com/OffchainLabs/prysm/v6/api" "github.com/OffchainLabs/prysm/v6/api/server/structs" + "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" @@ -170,8 +171,11 @@ func TestClient_RegisterValidator(t *testing.T) { func TestClient_GetHeader(t *testing.T) { ctx := t.Context() - expectedPath := "/eth/v1/builder/header/23/0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2/0x93247f2209abcacf57b75a51dafae777f9dd38bc7053d1af526f220a7489a6d3a2753e5f3e8b1cfe39b56f43611df74a" - var slot primitives.Slot = 23 + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) + es := util.SlotAtEpoch(t, params.BeaconConfig().ElectraForkEpoch) + expectedPath := "/eth/v1/builder/header/%d/0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2/0x93247f2209abcacf57b75a51dafae777f9dd38bc7053d1af526f220a7489a6d3a2753e5f3e8b1cfe39b56f43611df74a" + expectedPath = fmt.Sprintf(expectedPath, ds) + var slot primitives.Slot = ds parentHash := ezDecode(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2") pubkey := ezDecode(t, "0x93247f2209abcacf57b75a51dafae777f9dd38bc7053d1af526f220a7489a6d3a2753e5f3e8b1cfe39b56f43611df74a") t.Run("server error", func(t *testing.T) { @@ -533,7 +537,7 @@ func TestClient_GetHeader(t *testing.T) { require.Equal(t, expectedPath, r.URL.Path) epr := &ExecHeaderResponseElectra{} require.NoError(t, json.Unmarshal([]byte(testExampleHeaderResponseElectra), epr)) - pro, err := epr.ToProto(100) + pro, err := epr.ToProto(es) require.NoError(t, err) ssz, err := pro.MarshalSSZ() require.NoError(t, err) diff --git a/beacon-chain/blockchain/process_block_test.go b/beacon-chain/blockchain/process_block_test.go index 8a82476385..40ad3b9198 100644 --- a/beacon-chain/blockchain/process_block_test.go +++ b/beacon-chain/blockchain/process_block_test.go @@ -2413,6 +2413,8 @@ func driftGenesisTime(s *Service, slot primitives.Slot, delay time.Duration) { } func TestMissingBlobIndices(t *testing.T) { + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) + maxBlobs := params.BeaconConfig().MaxBlobsPerBlock(ds) cases := []struct { name string expected [][]byte @@ -2426,23 +2428,23 @@ func TestMissingBlobIndices(t *testing.T) { }, { name: "expected exceeds max", - expected: fakeCommitments(params.BeaconConfig().MaxBlobsPerBlock(0) + 1), + expected: fakeCommitments(maxBlobs + 1), err: errMaxBlobsExceeded, }, { name: "first missing", - expected: fakeCommitments(params.BeaconConfig().MaxBlobsPerBlock(0)), + expected: fakeCommitments(maxBlobs), present: []uint64{1, 2, 3, 4, 5}, result: fakeResult([]uint64{0}), }, { name: "all missing", - expected: fakeCommitments(params.BeaconConfig().MaxBlobsPerBlock(0)), + expected: fakeCommitments(maxBlobs), result: fakeResult([]uint64{0, 1, 2, 3, 4, 5}), }, { name: "none missing", - expected: fakeCommitments(params.BeaconConfig().MaxBlobsPerBlock(0)), + expected: fakeCommitments(maxBlobs), present: []uint64{0, 1, 2, 3, 4, 5}, result: fakeResult([]uint64{}), }, @@ -2475,8 +2477,8 @@ func TestMissingBlobIndices(t *testing.T) { for _, c := range cases { bm, bs := filesystem.NewEphemeralBlobStorageWithMocker(t) t.Run(c.name, func(t *testing.T) { - require.NoError(t, bm.CreateFakeIndices(c.root, 0, c.present...)) - missing, err := missingBlobIndices(bs, c.root, c.expected, 0) + require.NoError(t, bm.CreateFakeIndices(c.root, ds, c.present...)) + missing, err := missingBlobIndices(bs, c.root, c.expected, ds) if c.err != nil { require.ErrorIs(t, err, c.err) return @@ -2904,22 +2906,21 @@ type testIsAvailableParams struct { columnsToSave []uint64 } -func testIsAvailableSetup(t *testing.T, params testIsAvailableParams) (context.Context, context.CancelFunc, *Service, [fieldparams.RootLength]byte, interfaces.SignedBeaconBlock) { +func testIsAvailableSetup(t *testing.T, p testIsAvailableParams) (context.Context, context.CancelFunc, *Service, [fieldparams.RootLength]byte, interfaces.SignedBeaconBlock) { ctx, cancel := context.WithCancel(t.Context()) dataColumnStorage := filesystem.NewEphemeralDataColumnStorage(t) - options := append(params.options, WithDataColumnStorage(dataColumnStorage)) + options := append(p.options, WithDataColumnStorage(dataColumnStorage)) service, _ := minimalTestService(t, options...) + fs := util.SlotAtEpoch(t, params.BeaconConfig().FuluForkEpoch) - genesisState, secretKeys := util.DeterministicGenesisStateElectra(t, 32 /*validator count*/) - - err := service.saveGenesisData(ctx, genesisState) - require.NoError(t, err) + genesisState, secretKeys := util.DeterministicGenesisStateElectra(t, 32, util.WithElectraStateSlot(fs)) + require.NoError(t, service.saveGenesisData(ctx, genesisState)) conf := util.DefaultBlockGenConfig() - conf.NumBlobKzgCommitments = params.blobKzgCommitmentsCount + conf.NumBlobKzgCommitments = p.blobKzgCommitmentsCount - signedBeaconBlock, err := util.GenerateFullBlockFulu(genesisState, secretKeys, conf, 10 /*block slot*/) + signedBeaconBlock, err := util.GenerateFullBlockFulu(genesisState, secretKeys, conf, fs+1) require.NoError(t, err) block := signedBeaconBlock.Block @@ -2929,8 +2930,8 @@ func testIsAvailableSetup(t *testing.T, params testIsAvailableParams) (context.C root, err := block.HashTreeRoot() require.NoError(t, err) - dataColumnsParams := make([]util.DataColumnParam, 0, len(params.columnsToSave)) - for _, i := range params.columnsToSave { + dataColumnsParams := make([]util.DataColumnParam, 0, len(p.columnsToSave)) + for _, i := range p.columnsToSave { dataColumnParam := util.DataColumnParam{ Index: i, Slot: block.Slot, @@ -2954,8 +2955,12 @@ func testIsAvailableSetup(t *testing.T, params testIsAvailableParams) (context.C } func TestIsDataAvailable(t *testing.T) { + params.SetupTestConfigCleanup(t) + cfg := params.BeaconConfig() + cfg.AltairForkEpoch, cfg.BellatrixForkEpoch, cfg.CapellaForkEpoch, cfg.DenebForkEpoch, cfg.ElectraForkEpoch, cfg.FuluForkEpoch = 0, 0, 0, 0, 0, 0 + params.OverrideBeaconConfig(cfg) t.Run("Fulu - out of retention window", func(t *testing.T) { - params := testIsAvailableParams{options: []Option{WithGenesisTime(time.Unix(0, 0))}} + params := testIsAvailableParams{} ctx, _, service, root, signed := testIsAvailableSetup(t, params) roBlock, err := consensusblocks.NewROBlockWithRoot(signed, root) @@ -2972,7 +2977,6 @@ func TestIsDataAvailable(t *testing.T) { err = service.isDataAvailable(ctx, roBlock) require.NoError(t, err) }) - t.Run("Fulu - more than half of the columns in custody", func(t *testing.T) { minimumColumnsCountToReconstruct := peerdas.MinimumColumnCountToReconstruct() indices := make([]uint64, 0, minimumColumnsCountToReconstruct) diff --git a/beacon-chain/blockchain/service_test.go b/beacon-chain/blockchain/service_test.go index 19c951e1ab..9526e30d32 100644 --- a/beacon-chain/blockchain/service_test.go +++ b/beacon-chain/blockchain/service_test.go @@ -562,8 +562,9 @@ func TestNotifyIndex(t *testing.T) { var root [32]byte copy(root[:], "exampleRoot") + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) // Test notifying a new index - bn.notifyIndex(root, 1, 1) + bn.notifyIndex(root, 1, ds) if !bn.seenIndex[root][1] { t.Errorf("Index was not marked as seen") } @@ -580,7 +581,7 @@ func TestNotifyIndex(t *testing.T) { } // Test notifying a new index again - bn.notifyIndex(root, 2, 1) + bn.notifyIndex(root, 2, ds) if !bn.seenIndex[root][2] { t.Errorf("Index was not marked as seen") } diff --git a/beacon-chain/core/peerdas/reconstruction_test.go b/beacon-chain/core/peerdas/reconstruction_test.go index 80108fd4a9..de2791f9e8 100644 --- a/beacon-chain/core/peerdas/reconstruction_test.go +++ b/beacon-chain/core/peerdas/reconstruction_test.go @@ -125,11 +125,12 @@ func TestReconstructDataColumnSidecars(t *testing.T) { } func TestReconstructBlobs(t *testing.T) { - // Start the trusted setup. - err := kzg.Start() - require.NoError(t, err) + params.SetupTestConfigCleanup(t) + params.BeaconConfig().FuluForkEpoch = params.BeaconConfig().ElectraForkEpoch + 4096*2 + require.NoError(t, kzg.Start()) var emptyBlock blocks.ROBlock + fs := util.SlotAtEpoch(t, params.BeaconConfig().FuluForkEpoch) t.Run("no index", func(t *testing.T) { actual, err := peerdas.ReconstructBlobs(emptyBlock, nil, nil) @@ -190,10 +191,10 @@ func TestReconstructBlobs(t *testing.T) { }) t.Run("not committed to the same block", func(t *testing.T) { - _, _, verifiedRoSidecars := util.GenerateTestFuluBlockWithSidecars(t, 3, util.WithParentRoot([fieldparams.RootLength]byte{1})) - roBlock, _, _ := util.GenerateTestFuluBlockWithSidecars(t, 3, util.WithParentRoot([fieldparams.RootLength]byte{2})) + _, _, verifiedRoSidecars := util.GenerateTestFuluBlockWithSidecars(t, 3, util.WithParentRoot([fieldparams.RootLength]byte{1}), util.WithSlot(fs)) + roBlock, _, _ := util.GenerateTestFuluBlockWithSidecars(t, 3, util.WithParentRoot([fieldparams.RootLength]byte{2}), util.WithSlot(fs)) - _, err = peerdas.ReconstructBlobs(roBlock, verifiedRoSidecars, []int{0}) + _, err := peerdas.ReconstructBlobs(roBlock, verifiedRoSidecars, []int{0}) require.ErrorContains(t, peerdas.ErrRootMismatch.Error(), err) }) diff --git a/beacon-chain/core/peerdas/verification_test.go b/beacon-chain/core/peerdas/verification_test.go index 5f3714aafc..8f9d89859a 100644 --- a/beacon-chain/core/peerdas/verification_test.go +++ b/beacon-chain/core/peerdas/verification_test.go @@ -16,61 +16,60 @@ func TestDataColumnsAlignWithBlock(t *testing.T) { err := kzg.Start() require.NoError(t, err) + params.BeaconConfig().FuluForkEpoch = params.BeaconConfig().ElectraForkEpoch + 4096*2 + fs := util.SlotAtEpoch(t, params.BeaconConfig().ElectraForkEpoch) + require.NoError(t, err) + fuluMax := params.BeaconConfig().MaxBlobsPerBlock(fs) t.Run("pre fulu", func(t *testing.T) { - block, _ := util.GenerateTestElectraBlockWithSidecar(t, [fieldparams.RootLength]byte{}, 0, 0) + block, _ := util.GenerateTestElectraBlockWithSidecar(t, [fieldparams.RootLength]byte{}, fs, 0) err := peerdas.DataColumnsAlignWithBlock(block, nil) require.NoError(t, err) }) - t.Run("too many commitmnets", func(t *testing.T) { - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.BlobSchedule = []params.BlobScheduleEntry{{}} - params.OverrideBeaconConfig(config) - - block, _, _ := util.GenerateTestFuluBlockWithSidecars(t, 3) + t.Run("too many commitments", func(t *testing.T) { + block, _, _ := util.GenerateTestFuluBlockWithSidecars(t, fuluMax+1, util.WithSlot(fs)) err := peerdas.DataColumnsAlignWithBlock(block, nil) require.ErrorIs(t, err, peerdas.ErrTooManyCommitments) }) t.Run("root mismatch", func(t *testing.T) { - _, sidecars, _ := util.GenerateTestFuluBlockWithSidecars(t, 2) - block, _, _ := util.GenerateTestFuluBlockWithSidecars(t, 0) + _, sidecars, _ := util.GenerateTestFuluBlockWithSidecars(t, 2, util.WithSlot(fs)) + block, _, _ := util.GenerateTestFuluBlockWithSidecars(t, 0, util.WithSlot(fs)) err := peerdas.DataColumnsAlignWithBlock(block, sidecars) require.ErrorIs(t, err, peerdas.ErrRootMismatch) }) t.Run("column size mismatch", func(t *testing.T) { - block, sidecars, _ := util.GenerateTestFuluBlockWithSidecars(t, 2) + block, sidecars, _ := util.GenerateTestFuluBlockWithSidecars(t, 2, util.WithSlot(fs)) sidecars[0].Column = [][]byte{} err := peerdas.DataColumnsAlignWithBlock(block, sidecars) require.ErrorIs(t, err, peerdas.ErrBlockColumnSizeMismatch) }) t.Run("KZG commitments size mismatch", func(t *testing.T) { - block, sidecars, _ := util.GenerateTestFuluBlockWithSidecars(t, 2) + block, sidecars, _ := util.GenerateTestFuluBlockWithSidecars(t, 2, util.WithSlot(fs)) sidecars[0].KzgCommitments = [][]byte{} err := peerdas.DataColumnsAlignWithBlock(block, sidecars) require.ErrorIs(t, err, peerdas.ErrBlockColumnSizeMismatch) }) t.Run("KZG proofs mismatch", func(t *testing.T) { - block, sidecars, _ := util.GenerateTestFuluBlockWithSidecars(t, 2) + block, sidecars, _ := util.GenerateTestFuluBlockWithSidecars(t, 2, util.WithSlot(fs)) sidecars[0].KzgProofs = [][]byte{} err := peerdas.DataColumnsAlignWithBlock(block, sidecars) require.ErrorIs(t, err, peerdas.ErrBlockColumnSizeMismatch) }) t.Run("commitment mismatch", func(t *testing.T) { - block, _, _ := util.GenerateTestFuluBlockWithSidecars(t, 2) - _, alteredSidecars, _ := util.GenerateTestFuluBlockWithSidecars(t, 2) + block, _, _ := util.GenerateTestFuluBlockWithSidecars(t, 2, util.WithSlot(fs)) + _, alteredSidecars, _ := util.GenerateTestFuluBlockWithSidecars(t, 2, util.WithSlot(fs)) alteredSidecars[1].KzgCommitments[0][0]++ // Overflow is OK err := peerdas.DataColumnsAlignWithBlock(block, alteredSidecars) require.ErrorIs(t, err, peerdas.ErrCommitmentMismatch) }) t.Run("nominal", func(t *testing.T) { - block, sidecars, _ := util.GenerateTestFuluBlockWithSidecars(t, 2) + block, sidecars, _ := util.GenerateTestFuluBlockWithSidecars(t, 2, util.WithSlot(fs)) err := peerdas.DataColumnsAlignWithBlock(block, sidecars) require.NoError(t, err) }) diff --git a/beacon-chain/das/availability_blobs_test.go b/beacon-chain/das/availability_blobs_test.go index 32baf54c9d..ab49e74489 100644 --- a/beacon-chain/das/availability_blobs_test.go +++ b/beacon-chain/das/availability_blobs_test.go @@ -18,13 +18,16 @@ import ( ) func Test_commitmentsToCheck(t *testing.T) { + params.SetupTestConfigCleanup(t) + params.BeaconConfig().FuluForkEpoch = params.BeaconConfig().ElectraForkEpoch + 4096*2 + fulu := primitives.Slot(params.BeaconConfig().FuluForkEpoch) * params.BeaconConfig().SlotsPerEpoch windowSlots, err := slots.EpochEnd(params.BeaconConfig().MinEpochsForBlobsSidecarsRequest) require.NoError(t, err) - commits := [][]byte{ - bytesutil.PadTo([]byte("a"), 48), - bytesutil.PadTo([]byte("b"), 48), - bytesutil.PadTo([]byte("c"), 48), - bytesutil.PadTo([]byte("d"), 48), + windowSlots = windowSlots + primitives.Slot(params.BeaconConfig().FuluForkEpoch) + maxBlobs := params.LastNetworkScheduleEntry().MaxBlobsPerBlock + commits := make([][]byte, maxBlobs+1) + for i := 0; i < len(commits); i++ { + commits[i] = bytesutil.PadTo([]byte{byte(i)}, 48) } cases := []struct { name string @@ -47,41 +50,40 @@ func Test_commitmentsToCheck(t *testing.T) { { name: "commitments within da", block: func(t *testing.T) blocks.ROBlock { - d := util.NewBeaconBlockDeneb() - d.Block.Body.BlobKzgCommitments = commits - d.Block.Slot = 100 + d := util.NewBeaconBlockFulu() + d.Block.Body.BlobKzgCommitments = commits[:maxBlobs] + d.Block.Slot = fulu + 100 sb, err := blocks.NewSignedBeaconBlock(d) require.NoError(t, err) rb, err := blocks.NewROBlock(sb) require.NoError(t, err) return rb }, - commits: commits, - slot: 100, + commits: commits[:maxBlobs], + slot: fulu + 100, }, { name: "commitments outside da", block: func(t *testing.T) blocks.ROBlock { - d := util.NewBeaconBlockDeneb() + d := util.NewBeaconBlockFulu() + d.Block.Slot = fulu // block is from slot 0, "current slot" is window size +1 (so outside the window) - d.Block.Body.BlobKzgCommitments = commits + d.Block.Body.BlobKzgCommitments = commits[:maxBlobs] sb, err := blocks.NewSignedBeaconBlock(d) require.NoError(t, err) rb, err := blocks.NewROBlock(sb) require.NoError(t, err) return rb }, - slot: windowSlots + 1, + slot: fulu + windowSlots + 1, }, { name: "excessive commitments", block: func(t *testing.T) blocks.ROBlock { - d := util.NewBeaconBlockDeneb() - d.Block.Slot = 100 + d := util.NewBeaconBlockFulu() + d.Block.Slot = fulu + 100 // block is from slot 0, "current slot" is window size +1 (so outside the window) d.Block.Body.BlobKzgCommitments = commits - // Double the number of commitments, assert that this is over the limit - d.Block.Body.BlobKzgCommitments = append(commits, d.Block.Body.BlobKzgCommitments...) sb, err := blocks.NewSignedBeaconBlock(d) require.NoError(t, err) rb, err := blocks.NewROBlock(sb) @@ -115,67 +117,69 @@ func Test_commitmentsToCheck(t *testing.T) { func TestLazilyPersistent_Missing(t *testing.T) { ctx := t.Context() store := filesystem.NewEphemeralBlobStorage(t) + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) - blk, blobSidecars := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 1, 3) + blk, blobSidecars := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, ds, 3) mbv := &mockBlobBatchVerifier{t: t, scs: blobSidecars} as := NewLazilyPersistentStore(store, mbv) // Only one commitment persisted, should return error with other indices - require.NoError(t, as.Persist(1, blobSidecars[2])) - err := as.IsDataAvailable(ctx, 1, blk) + require.NoError(t, as.Persist(ds, blobSidecars[2])) + err := as.IsDataAvailable(ctx, ds, blk) require.ErrorIs(t, err, errMissingSidecar) // All but one persisted, return missing idx - require.NoError(t, as.Persist(1, blobSidecars[0])) - err = as.IsDataAvailable(ctx, 1, blk) + require.NoError(t, as.Persist(ds, blobSidecars[0])) + err = as.IsDataAvailable(ctx, ds, blk) require.ErrorIs(t, err, errMissingSidecar) // All persisted, return nil - require.NoError(t, as.Persist(1, blobSidecars...)) + require.NoError(t, as.Persist(ds, blobSidecars...)) - require.NoError(t, as.IsDataAvailable(ctx, 1, blk)) + require.NoError(t, as.IsDataAvailable(ctx, ds, blk)) } func TestLazilyPersistent_Mismatch(t *testing.T) { ctx := t.Context() store := filesystem.NewEphemeralBlobStorage(t) + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) - blk, blobSidecars := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 1, 3) + blk, blobSidecars := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, ds, 3) mbv := &mockBlobBatchVerifier{t: t, err: errors.New("kzg check should not run")} blobSidecars[0].KzgCommitment = bytesutil.PadTo([]byte("nope"), 48) as := NewLazilyPersistentStore(store, mbv) // Only one commitment persisted, should return error with other indices - require.NoError(t, as.Persist(1, blobSidecars[0])) - err := as.IsDataAvailable(ctx, 1, blk) + require.NoError(t, as.Persist(ds, blobSidecars[0])) + err := as.IsDataAvailable(ctx, ds, blk) require.NotNil(t, err) require.ErrorIs(t, err, errCommitmentMismatch) } func TestLazyPersistOnceCommitted(t *testing.T) { - _, blobSidecars := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 1, 6) + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) + _, blobSidecars := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, ds, 6) as := NewLazilyPersistentStore(filesystem.NewEphemeralBlobStorage(t), &mockBlobBatchVerifier{}) // stashes as expected - require.NoError(t, as.Persist(1, blobSidecars...)) + require.NoError(t, as.Persist(ds, blobSidecars...)) // ignores duplicates - require.ErrorIs(t, as.Persist(1, blobSidecars...), ErrDuplicateSidecar) + require.ErrorIs(t, as.Persist(ds, blobSidecars...), ErrDuplicateSidecar) // ignores index out of bound blobSidecars[0].Index = 6 - require.ErrorIs(t, as.Persist(1, blobSidecars[0]), errIndexOutOfBounds) - - _, moreBlobSidecars := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 1, 4) + require.ErrorIs(t, as.Persist(ds, blobSidecars[0]), errIndexOutOfBounds) + _, moreBlobSidecars := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, ds, 4) // ignores sidecars before the retention period - slotOOB, err := slots.EpochStart(params.BeaconConfig().MinEpochsForBlobsSidecarsRequest) - require.NoError(t, err) - require.NoError(t, as.Persist(32+slotOOB, moreBlobSidecars[0])) + slotOOB := util.SlotAtEpoch(t, params.BeaconConfig().MinEpochsForBlobsSidecarsRequest) + slotOOB += ds + 32 + require.NoError(t, as.Persist(slotOOB, moreBlobSidecars[0])) // doesn't ignore new sidecars with a different block root - require.NoError(t, as.Persist(1, moreBlobSidecars...)) + require.NoError(t, as.Persist(ds, moreBlobSidecars...)) } type mockBlobBatchVerifier struct { diff --git a/beacon-chain/das/blob_cache_test.go b/beacon-chain/das/blob_cache_test.go index 56d2c76c6e..615f4112cf 100644 --- a/beacon-chain/das/blob_cache_test.go +++ b/beacon-chain/das/blob_cache_test.go @@ -39,7 +39,7 @@ func filterTestCaseSetup(slot primitives.Slot, nBlobs int, onDisk []int, numExpe entry := &blobCacheEntry{} if len(onDisk) > 0 { od := map[[32]byte][]int{blk.Root(): onDisk} - sumz := filesystem.NewMockBlobStorageSummarizer(t, od) + sumz := filesystem.NewMockBlobStorageSummarizer(t, slots.ToEpoch(slot), od) sum := sumz.Summary(blk.Root()) entry.setDiskSummary(sum) } diff --git a/beacon-chain/db/filesystem/blob_test.go b/beacon-chain/db/filesystem/blob_test.go index 0079bd8884..eca59408b4 100644 --- a/beacon-chain/db/filesystem/blob_test.go +++ b/beacon-chain/db/filesystem/blob_test.go @@ -21,7 +21,8 @@ import ( ) func TestBlobStorage_SaveBlobData(t *testing.T) { - _, sidecars := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 1, params.BeaconConfig().MaxBlobsPerBlock(1)) + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) + _, sidecars := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, ds, params.BeaconConfig().MaxBlobsPerBlock(ds)) testSidecars := verification.FakeVerifySliceForTest(t, sidecars) t.Run("no error for duplicate", func(t *testing.T) { @@ -127,21 +128,22 @@ func TestBlobStorage_SaveBlobData(t *testing.T) { } func TestBlobIndicesBounds(t *testing.T) { + es := util.SlotAtEpoch(t, params.BeaconConfig().ElectraForkEpoch) fs := afero.NewMemMapFs() root := [32]byte{} - okIdx := uint64(params.BeaconConfig().MaxBlobsPerBlock(0)) - 1 - writeFakeSSZ(t, fs, root, 0, okIdx) + okIdx := uint64(params.BeaconConfig().MaxBlobsPerBlock(es)) - 1 + writeFakeSSZ(t, fs, root, es, okIdx) bs := NewWarmedEphemeralBlobStorageUsingFs(t, fs, WithLayout(LayoutNameByEpoch)) indices := bs.Summary(root).mask - expected := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(0)) + expected := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(es)) expected[okIdx] = true for i := range expected { require.Equal(t, expected[i], indices[i]) } - oobIdx := uint64(params.BeaconConfig().MaxBlobsPerBlock(0)) - writeFakeSSZ(t, fs, root, 0, oobIdx) + oobIdx := uint64(params.BeaconConfig().MaxBlobsPerBlock(es)) + writeFakeSSZ(t, fs, root, es, oobIdx) // This now fails at cache warmup time. require.ErrorIs(t, warmCache(bs.layout, bs.cache), errIndexOutOfBounds) } diff --git a/beacon-chain/db/filesystem/cache_test.go b/beacon-chain/db/filesystem/cache_test.go index 0d45890cce..d5da4a1ec7 100644 --- a/beacon-chain/db/filesystem/cache_test.go +++ b/beacon-chain/db/filesystem/cache_test.go @@ -6,14 +6,17 @@ import ( "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v6/testing/util" ) func TestSlotByRoot_Summary(t *testing.T) { - noneSet := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(0)) - allSet := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(0)) - firstSet := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(0)) - lastSet := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(0)) - oneSet := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(0)) + ee := params.BeaconConfig().ElectraForkEpoch + es := util.SlotAtEpoch(t, ee) + noneSet := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(es)) + allSet := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(es)) + firstSet := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(es)) + lastSet := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(es)) + oneSet := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(es)) firstSet[0] = true lastSet[len(lastSet)-1] = true oneSet[1] = true @@ -53,7 +56,7 @@ func TestSlotByRoot_Summary(t *testing.T) { for _, c := range cases { if c.expected != nil { key := bytesutil.ToBytes32([]byte(c.name)) - sc.cache[key] = BlobStorageSummary{epoch: 0, mask: c.expected} + sc.cache[key] = BlobStorageSummary{epoch: ee, mask: c.expected} } } for _, c := range cases { @@ -73,6 +76,7 @@ func TestSlotByRoot_Summary(t *testing.T) { } func TestAllAvailable(t *testing.T) { + es := util.SlotAtEpoch(t, params.BeaconConfig().ElectraForkEpoch) idxUpTo := func(u int) []int { r := make([]int, u) for i := range r { @@ -125,13 +129,13 @@ func TestAllAvailable(t *testing.T) { }, { name: "out of bound is safe", - count: params.BeaconConfig().MaxBlobsPerBlock(0) + 1, + count: params.BeaconConfig().MaxBlobsPerBlock(es) + 1, aa: false, }, { name: "max present", - count: params.BeaconConfig().MaxBlobsPerBlock(0), - idxSet: idxUpTo(params.BeaconConfig().MaxBlobsPerBlock(0)), + count: params.BeaconConfig().MaxBlobsPerBlock(es), + idxSet: idxUpTo(params.BeaconConfig().MaxBlobsPerBlock(es)), aa: true, }, { @@ -143,7 +147,7 @@ func TestAllAvailable(t *testing.T) { } for _, c := range cases { t.Run(c.name, func(t *testing.T) { - mask := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(0)) + mask := make([]bool, params.BeaconConfig().MaxBlobsPerBlock(es)) for _, idx := range c.idxSet { mask[idx] = true } diff --git a/beacon-chain/db/filesystem/iteration_test.go b/beacon-chain/db/filesystem/iteration_test.go index da01f9d23b..25acd897a3 100644 --- a/beacon-chain/db/filesystem/iteration_test.go +++ b/beacon-chain/db/filesystem/iteration_test.go @@ -11,6 +11,7 @@ import ( "testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" @@ -60,12 +61,13 @@ func TestRootFromDir(t *testing.T) { } func TestSlotFromFile(t *testing.T) { + es := util.SlotAtEpoch(t, params.BeaconConfig().ElectraForkEpoch) cases := []struct { slot primitives.Slot }{ - {slot: 0}, - {slot: 2}, - {slot: 1123581321}, + {slot: es + 0}, + {slot: es + 2}, + {slot: es + 1123581321}, {slot: math.MaxUint64}, } for _, c := range cases { @@ -243,39 +245,40 @@ func TestSlotFromBlob(t *testing.T) { } func TestIterationComplete(t *testing.T) { + de := params.BeaconConfig().DenebForkEpoch targets := []migrationTestTarget{ { - ident: ezIdent(t, "0x0125e54c64c925018c9296965a5b622d9f5ab626c10917860dcfb6aa09a0a00b", 1234, 0), - path: "by-epoch/0/1234/0x0125e54c64c925018c9296965a5b622d9f5ab626c10917860dcfb6aa09a0a00b/0.ssz", + ident: ezIdent(t, "0x0125e54c64c925018c9296965a5b622d9f5ab626c10917860dcfb6aa09a0a00b", de+1234, 0), + path: "by-epoch/%d/%d/0x0125e54c64c925018c9296965a5b622d9f5ab626c10917860dcfb6aa09a0a00b/0.ssz", }, { - ident: ezIdent(t, "0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86", 5330, 0), + ident: ezIdent(t, "0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86", de+5330, 0), slotOffset: 31, - path: "by-epoch/1/5330/0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86/0.ssz", + path: "by-epoch/%d/%d/0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86/0.ssz", }, { - ident: ezIdent(t, "0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86", 5330, 1), + ident: ezIdent(t, "0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86", de+5330, 1), slotOffset: 31, - path: "by-epoch/1/5330/0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86/1.ssz", + path: "by-epoch/%d/%d/0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86/1.ssz", }, { - ident: ezIdent(t, "0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c", 16777216, 0), + ident: ezIdent(t, "0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c", -1+math.MaxUint64/32, 0), slotOffset: 16, - path: "by-epoch/4096/16777216/0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c/0.ssz", + path: "by-epoch/%d/%d/0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c/0.ssz", }, { - ident: ezIdent(t, "0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c", 16777216, 1), + ident: ezIdent(t, "0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c", -1+math.MaxUint64/32, 1), slotOffset: 16, - path: "by-epoch/4096/16777216/0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c/1.ssz", + path: "by-epoch/%d/%d/0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c/1.ssz", }, { - ident: ezIdent(t, "0x42eabe3d2c125410cd226de6f2825fb7575ab896c3f52e43de1fa29e4c809aba", 16777217, 0), + ident: ezIdent(t, "0x42eabe3d2c125410cd226de6f2825fb7575ab896c3f52e43de1fa29e4c809aba", -1+math.MaxUint64/32, 0), slotOffset: 16, - path: "by-epoch/4096/16777217/0x42eabe3d2c125410cd226de6f2825fb7575ab896c3f52e43de1fa29e4c809aba/0.ssz", + path: "by-epoch/%d/%d/0x42eabe3d2c125410cd226de6f2825fb7575ab896c3f52e43de1fa29e4c809aba/0.ssz", }, { - ident: ezIdent(t, "0x666cea5034e22bd3b849cb33914cad59afd88ee08e4d5bc0e997411c945fbc1d", 11235, 1), - path: "by-epoch/2/11235/0x666cea5034e22bd3b849cb33914cad59afd88ee08e4d5bc0e997411c945fbc1d/1.ssz", + ident: ezIdent(t, "0x666cea5034e22bd3b849cb33914cad59afd88ee08e4d5bc0e997411c945fbc1d", de+11235, 1), + path: "by-epoch/%d/%d/0x666cea5034e22bd3b849cb33914cad59afd88ee08e4d5bc0e997411c945fbc1d/1.ssz", }, } fs := afero.NewMemMapFs() @@ -299,6 +302,7 @@ func TestIterationComplete(t *testing.T) { require.Equal(t, true, ok) require.Equal(t, tar.ident.epoch, entry.epoch) require.Equal(t, true, entry.HasIndex(tar.ident.index)) - require.Equal(t, tar.path, byEpoch.sszPath(tar.ident)) + path := fmt.Sprintf(tar.path, periodForEpoch(tar.ident.epoch), tar.ident.epoch) + require.Equal(t, path, byEpoch.sszPath(tar.ident)) } } diff --git a/beacon-chain/db/filesystem/migration_test.go b/beacon-chain/db/filesystem/migration_test.go index 06bcf6ca1f..fa29f9bf2a 100644 --- a/beacon-chain/db/filesystem/migration_test.go +++ b/beacon-chain/db/filesystem/migration_test.go @@ -4,10 +4,10 @@ import ( "os" "testing" + "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/spf13/afero" ) @@ -18,9 +18,7 @@ func ezIdent(t *testing.T, rootStr string, epoch primitives.Epoch, index uint64) } func setupTestBlobFile(t *testing.T, ident blobIdent, offset primitives.Slot, fs afero.Fs, l fsLayout) { - slot, err := slots.EpochStart(ident.epoch) - require.NoError(t, err) - slot += offset + slot := util.SlotAtEpoch(t, ident.epoch) + offset _, sc := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, slot, 1) scb, err := sc[0].MarshalSSZ() require.NoError(t, err) @@ -53,6 +51,7 @@ func testAssertFsMigrated(t *testing.T, fs afero.Fs, ident blobIdent, before, af } func TestMigrations(t *testing.T) { + de := params.BeaconConfig().DenebForkEpoch cases := []struct { name string forwardLayout string @@ -65,18 +64,18 @@ func TestMigrations(t *testing.T) { forwardLayout: LayoutNameByEpoch, targets: []migrationTestTarget{ { - ident: ezIdent(t, "0x0125e54c64c925018c9296965a5b622d9f5ab626c10917860dcfb6aa09a0a00b", 1234, 0), + ident: ezIdent(t, "0x0125e54c64c925018c9296965a5b622d9f5ab626c10917860dcfb6aa09a0a00b", de+1234, 0), }, { - ident: ezIdent(t, "0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86", 5330, 0), + ident: ezIdent(t, "0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86", de+5330, 0), slotOffset: 31, }, { - ident: ezIdent(t, "0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86", 5330, 1), + ident: ezIdent(t, "0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86", de+5330, 1), slotOffset: 31, }, { - ident: ezIdent(t, "0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c", 16777216, 0), + ident: ezIdent(t, "0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c", de+16777216, 0), slotOffset: 16, }, }, @@ -87,33 +86,33 @@ func TestMigrations(t *testing.T) { forwardLayout: LayoutNameByEpoch, targets: []migrationTestTarget{ { - ident: ezIdent(t, "0x0125e54c64c925018c9296965a5b622d9f5ab626c10917860dcfb6aa09a0a00b", 1234, 0), + ident: ezIdent(t, "0x0125e54c64c925018c9296965a5b622d9f5ab626c10917860dcfb6aa09a0a00b", de+1234, 0), }, { - ident: ezIdent(t, "0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86", 5330, 0), + ident: ezIdent(t, "0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86", de+5330, 0), slotOffset: 31, }, { - ident: ezIdent(t, "0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86", 5330, 1), + ident: ezIdent(t, "0x0127dba6fd30fdbb47e73e861d5c6e602b38ac3ddc945bb6a2fc4e10761e9a86", de+5330, 1), slotOffset: 31, }, { - ident: ezIdent(t, "0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c", 16777216, 0), + ident: ezIdent(t, "0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c", de+16777216, 0), slotOffset: 16, migrated: true, }, { - ident: ezIdent(t, "0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c", 16777216, 1), + ident: ezIdent(t, "0x0232521756a0b965eab2c2245d7ad85feaeaf5f427cd14d1a7531f9d555b415c", de+16777216, 1), slotOffset: 16, migrated: true, }, { - ident: ezIdent(t, "0x42eabe3d2c125410cd226de6f2825fb7575ab896c3f52e43de1fa29e4c809aba", 16777217, 0), + ident: ezIdent(t, "0x42eabe3d2c125410cd226de6f2825fb7575ab896c3f52e43de1fa29e4c809aba", de+16777217, 0), slotOffset: 16, migrated: true, }, { - ident: ezIdent(t, "0x666cea5034e22bd3b849cb33914cad59afd88ee08e4d5bc0e997411c945fbc1d", 11235, 1), + ident: ezIdent(t, "0x666cea5034e22bd3b849cb33914cad59afd88ee08e4d5bc0e997411c945fbc1d", de+11235, 1), migrated: true, }, }, diff --git a/beacon-chain/db/filesystem/mock.go b/beacon-chain/db/filesystem/mock.go index 5605592e92..a7c2594fba 100644 --- a/beacon-chain/db/filesystem/mock.go +++ b/beacon-chain/db/filesystem/mock.go @@ -88,11 +88,11 @@ func NewEphemeralBlobStorageWithMocker(t testing.TB) (*BlobMocker, *BlobStorage) return &BlobMocker{fs: fs, bs: bs}, bs } -func NewMockBlobStorageSummarizer(t *testing.T, set map[[32]byte][]int) BlobStorageSummarizer { +func NewMockBlobStorageSummarizer(t *testing.T, epoch primitives.Epoch, set map[[32]byte][]int) BlobStorageSummarizer { c := newBlobStorageCache() for k, v := range set { for i := range v { - if err := c.ensure(blobIdent{root: k, epoch: 0, index: uint64(v[i])}); err != nil { + if err := c.ensure(blobIdent{root: k, epoch: epoch, index: uint64(v[i])}); err != nil { t.Fatal(err) } } diff --git a/beacon-chain/db/filesystem/pruner_test.go b/beacon-chain/db/filesystem/pruner_test.go index 52137f18c0..1c4f028bb0 100644 --- a/beacon-chain/db/filesystem/pruner_test.go +++ b/beacon-chain/db/filesystem/pruner_test.go @@ -142,6 +142,7 @@ func testRoots(n int) [][32]byte { } func TestLayoutPruneBefore(t *testing.T) { + electra := params.BeaconConfig().ElectraForkEpoch roots := testRoots(10) cases := []struct { name string @@ -153,27 +154,27 @@ func TestLayoutPruneBefore(t *testing.T) { }{ { name: "none pruned", - pruneBefore: 1, + pruneBefore: electra + 1, pruned: []testIdent{}, remain: []testIdent{ - {offset: 1, blobIdent: blobIdent{root: roots[0], epoch: 1, index: 0}}, - {offset: 1, blobIdent: blobIdent{root: roots[1], epoch: 1, index: 0}}, + {offset: 1, blobIdent: blobIdent{root: roots[0], epoch: electra + 1, index: 0}}, + {offset: 1, blobIdent: blobIdent{root: roots[1], epoch: electra + 1, index: 0}}, }, }, { name: "expected pruned before epoch", - pruneBefore: 3, + pruneBefore: electra + 3, pruned: []testIdent{ - {offset: 0, blobIdent: blobIdent{root: roots[0], epoch: 1, index: 0}}, - {offset: 31, blobIdent: blobIdent{root: roots[1], epoch: 1, index: 5}}, - {offset: 0, blobIdent: blobIdent{root: roots[2], epoch: 2, index: 0}}, - {offset: 31, blobIdent: blobIdent{root: roots[3], epoch: 2, index: 3}}, + {offset: 0, blobIdent: blobIdent{root: roots[0], epoch: electra + 1, index: 0}}, + {offset: 31, blobIdent: blobIdent{root: roots[1], epoch: electra + 1, index: 5}}, + {offset: 0, blobIdent: blobIdent{root: roots[2], epoch: electra + 2, index: 0}}, + {offset: 31, blobIdent: blobIdent{root: roots[3], epoch: electra + 2, index: 3}}, }, remain: []testIdent{ - {offset: 0, blobIdent: blobIdent{root: roots[4], epoch: 3, index: 2}}, // boundary - {offset: 31, blobIdent: blobIdent{root: roots[5], epoch: 3, index: 0}}, // boundary - {offset: 0, blobIdent: blobIdent{root: roots[6], epoch: 4, index: 1}}, - {offset: 31, blobIdent: blobIdent{root: roots[7], epoch: 4, index: 5}}, + {offset: 0, blobIdent: blobIdent{root: roots[4], epoch: electra + 3, index: 2}}, // boundary + {offset: 31, blobIdent: blobIdent{root: roots[5], epoch: electra + 3, index: 0}}, // boundary + {offset: 0, blobIdent: blobIdent{root: roots[6], epoch: electra + 4, index: 1}}, + {offset: 31, blobIdent: blobIdent{root: roots[7], epoch: electra + 4, index: 5}}, }, sum: pruneSummary{blobsPruned: 4}, }, diff --git a/beacon-chain/p2p/BUILD.bazel b/beacon-chain/p2p/BUILD.bazel index 74dc125362..406f756b39 100644 --- a/beacon-chain/p2p/BUILD.bazel +++ b/beacon-chain/p2p/BUILD.bazel @@ -139,6 +139,7 @@ go_test( "sender_test.go", "service_test.go", "subnets_test.go", + "topics_test.go", "utils_test.go", ], embed = [":go_default_library"], diff --git a/beacon-chain/p2p/pubsub.go b/beacon-chain/p2p/pubsub.go index aed88b73e7..6877fe606f 100644 --- a/beacon-chain/p2p/pubsub.go +++ b/beacon-chain/p2p/pubsub.go @@ -134,13 +134,15 @@ func (s *Service) peerInspector(peerMap map[peer.ID]*pubsub.PeerScoreSnapshot) { // pubsubOptions creates a list of options to configure our router with. func (s *Service) pubsubOptions() []pubsub.Option { + filt := pubsub.NewAllowlistSubscriptionFilter(s.allTopicStrings()...) + filt = pubsub.WrapLimitSubscriptionFilter(filt, pubsubSubscriptionRequestLimit) psOpts := []pubsub.Option{ pubsub.WithMessageSignaturePolicy(pubsub.StrictNoSign), pubsub.WithNoAuthor(), pubsub.WithMessageIdFn(func(pmsg *pubsubpb.Message) string { return MsgID(s.genesisValidatorsRoot, pmsg) }), - pubsub.WithSubscriptionFilter(s), + pubsub.WithSubscriptionFilter(filt), pubsub.WithPeerOutboundQueueSize(int(s.cfg.QueueSize)), pubsub.WithMaxMessageSize(int(MaxMessageSize())), // lint:ignore uintcast -- Max Message Size is a config value and is naturally bounded by networking limitations. pubsub.WithValidateQueueSize(int(s.cfg.QueueSize)), diff --git a/beacon-chain/p2p/topics.go b/beacon-chain/p2p/topics.go index c84e092f24..1124809ab3 100644 --- a/beacon-chain/p2p/topics.go +++ b/beacon-chain/p2p/topics.go @@ -1,5 +1,15 @@ package p2p +import ( + "encoding/hex" + "slices" + "strconv" + + "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" +) + const ( // GossipProtocolAndDigest represents the protocol and fork digest prefix in a gossip topic. GossipProtocolAndDigest = "/eth2/%x/" @@ -66,3 +76,129 @@ const ( // DataColumnSubnetTopicFormat is the topic format for the data column subnet. DataColumnSubnetTopicFormat = GossipProtocolAndDigest + GossipDataColumnSidecarMessage + "_%d" ) + +// topic is a struct representing a single gossipsub topic. +// It can also be used to represent a set of subnet topics: see appendSubnetsBelow(). +// topic is intended to be used as an immutable value - it is hashable so it can be used as a map key +// and it uses strings in order to leverage golangs string interning for memory efficiency. +type topic struct { + full string + digest string + message string + start primitives.Epoch + end primitives.Epoch + suffix string + subnet uint64 +} + +func (t topic) String() string { + return t.full +} + +// sszEnc is used to get the protocol suffix for topics. This value has been effectively hardcoded +// since phase0. +var sszEnc = &encoder.SszNetworkEncoder{} + +// newTopic constructs a topic value for an ordinary topic structure (without subnets). +func newTopic(start, end primitives.Epoch, digest [4]byte, message string) topic { + suffix := sszEnc.ProtocolSuffix() + t := topic{digest: hex.EncodeToString(digest[:]), message: message, start: start, end: end, suffix: suffix} + t.full = "/" + "eth2" + "/" + t.digest + "/" + t.message + t.suffix + return t +} + +// newSubnetTopic constructs a topic value for a topic with a subnet structure. +func newSubnetTopic(start, end primitives.Epoch, digest [4]byte, message string, subnet uint64) topic { + t := newTopic(start, end, digest, message) + t.subnet = subnet + t.full = "/" + "eth2" + "/" + t.digest + "/" + t.message + "_" + strconv.Itoa(int(t.subnet)) + t.suffix + return t +} + +// allTopicStrings returns the full topic string for all topics +// that could be derived from the current fork schedule. +func (s *Service) allTopicStrings() []string { + topics := s.allTopics() + topicStrs := make([]string, 0, len(topics)) + for _, t := range topics { + topicStrs = append(topicStrs, t.String()) + } + return topicStrs +} + +// appendSubnetsBelow uses the value of top.subnet as the subnet count +// and creates a topic value for each subnet less than the subnet count, appending them all +// to appendTo. +func appendSubnetsBelow(top topic, digest [4]byte, appendTo []topic) []topic { + for i := range top.subnet { + appendTo = append(appendTo, newSubnetTopic(top.start, top.end, digest, top.message, i)) + } + return appendTo +} + +// allTopics returns all topics that could be derived from the current fork schedule. +func (s *Service) allTopics() []topic { + cfg := params.BeaconConfig() + // bellatrix: no special topics; electra: blobs topics handled all together + genesis, altair, capella := cfg.GenesisEpoch, cfg.AltairForkEpoch, cfg.CapellaForkEpoch + deneb, fulu, future := cfg.DenebForkEpoch, cfg.FuluForkEpoch, cfg.FarFutureEpoch + // Templates are starter topics - they have a placeholder digest and the subnet is set to the maximum value + // for the subnet (see how this is used in allSubnetsBelow). These are not directly returned by the method, + // they are copied and modified for each digest where they apply based on the start and end epochs. + empty := [4]byte{0, 0, 0, 0} // empty digest for templates, replaced by real digests in per-fork copies. + templates := []topic{ + newTopic(genesis, future, empty, GossipBlockMessage), + newTopic(genesis, future, empty, GossipAggregateAndProofMessage), + newTopic(genesis, future, empty, GossipExitMessage), + newTopic(genesis, future, empty, GossipProposerSlashingMessage), + newTopic(genesis, future, empty, GossipAttesterSlashingMessage), + newSubnetTopic(genesis, future, empty, GossipAttestationMessage, cfg.AttestationSubnetCount), + newSubnetTopic(altair, future, empty, GossipSyncCommitteeMessage, cfg.SyncCommitteeSubnetCount), + newTopic(altair, future, empty, GossipContributionAndProofMessage), + newTopic(altair, future, empty, GossipLightClientOptimisticUpdateMessage), + newTopic(altair, future, empty, GossipLightClientFinalityUpdateMessage), + newTopic(capella, future, empty, GossipBlsToExecutionChangeMessage), + } + last := params.GetNetworkScheduleEntry(genesis) + schedule := []params.NetworkScheduleEntry{last} + for next := params.NextNetworkScheduleEntry(last.Epoch); next.ForkDigest != last.ForkDigest; next = params.NextNetworkScheduleEntry(next.Epoch) { + schedule = append(schedule, next) + last = next + } + slices.Reverse(schedule) // reverse the fork schedule because it simplifies dealing with BPOs + fullTopics := make([]topic, 0, len(templates)) + for _, top := range templates { + for _, entry := range schedule { + if top.start <= entry.Epoch && entry.Epoch < top.end { + if top.subnet > 0 { // subnet topics in the list above should set this value to the max subnet count: see allSubnetsBelow + fullTopics = appendSubnetsBelow(top, entry.ForkDigest, fullTopics) + } else { + fullTopics = append(fullTopics, newTopic(top.start, top.end, entry.ForkDigest, top.message)) + } + } + } + } + end := future + // We're iterating from high to low per the slices.Reverse above. + // So we'll update end = n.Epoch as we go down, and use that as the end for the next entry. + // This loop either adds blob or data column sidecar topics depending on the fork. + for _, entry := range schedule { + if entry.Epoch < deneb { + break + // note: there is a special case where deneb is the genesis fork, in which case + // we'll generate blob sidecar topics for the earlier schedule, but + // this only happens in devnets where it doesn't really matter. + } + message := GossipDataColumnSidecarMessage + subnets := cfg.DataColumnSidecarSubnetCount + if entry.Epoch < fulu { + message = GossipBlobSidecarMessage + subnets = uint64(cfg.MaxBlobsPerBlockAtEpoch(entry.Epoch)) + } + // Set subnet to max value, allSubnetsBelow will iterate every index up to that value. + top := newSubnetTopic(entry.Epoch, end, entry.ForkDigest, message, subnets) + fullTopics = appendSubnetsBelow(top, entry.ForkDigest, fullTopics) + end = entry.Epoch // These topics / subnet structures are mutually exclusive, so set each end to the next highest entry. + } + return fullTopics +} diff --git a/beacon-chain/p2p/topics_test.go b/beacon-chain/p2p/topics_test.go new file mode 100644 index 0000000000..75669a9006 --- /dev/null +++ b/beacon-chain/p2p/topics_test.go @@ -0,0 +1,70 @@ +package p2p + +import ( + "encoding/hex" + "testing" + + "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v6/testing/require" +) + +func TestAllTopics(t *testing.T) { + params.SetupTestConfigCleanup(t) + cfg := params.MainnetConfig() + cfg.FuluForkEpoch = params.BeaconConfig().ElectraForkEpoch + 4096*2 + params.OverrideBeaconConfig(cfg) + s := &Service{} + all := s.allTopicStrings() + tops := map[string]struct{}{} + for _, t := range all { + tops[t] = struct{}{} + } + require.Equal(t, len(tops), len(all), "duplicate topics found") + expected := []string{ + "/eth2/ad532ceb/sync_committee_contribution_and_proof/ssz_snappy", + "/eth2/ad532ceb/beacon_aggregate_and_proof/ssz_snappy", + "/eth2/ad532ceb/beacon_block/ssz_snappy", + "/eth2/ad532ceb/bls_to_execution_change/ssz_snappy", + "/eth2/afcaaba0/beacon_attestation_19/ssz_snappy", + "/eth2/cc2c5cdb/data_column_sidecar_0/ssz_snappy", + "/eth2/cc2c5cdb/data_column_sidecar_127/ssz_snappy", + } + forks := []primitives.Epoch{cfg.GenesisEpoch, cfg.AltairForkEpoch, + cfg.BellatrixForkEpoch, cfg.CapellaForkEpoch, cfg.DenebForkEpoch, + cfg.ElectraForkEpoch, cfg.FuluForkEpoch} + // sanity check: we should always have a block topic. + // construct it by hand in case there are bugs in newTopic. + for _, f := range forks { + digest := params.ForkDigest(f) + expected = append(expected, "/eth2/"+hex.EncodeToString(digest[:])+"/beacon_block/ssz_snappy") + } + for _, e := range expected { + _, ok := tops[e] + require.Equal(t, true, ok) + } + // we should have no data column subnets before fulu + electraColumn := newSubnetTopic(cfg.ElectraForkEpoch, cfg.FuluForkEpoch, + params.ForkDigest(params.BeaconConfig().ElectraForkEpoch), + GossipDataColumnSidecarMessage, + cfg.DataColumnSidecarSubnetCount-1) + // we should have no blob sidecars before deneb or after electra + blobBeforeDeneb := newSubnetTopic(cfg.DenebForkEpoch-1, cfg.DenebForkEpoch, + params.ForkDigest(cfg.DenebForkEpoch-1), + GossipBlobSidecarMessage, + uint64(cfg.MaxBlobsPerBlockAtEpoch(cfg.DenebForkEpoch-1))-1) + blobAfterElectra := newSubnetTopic(cfg.FuluForkEpoch, cfg.FarFutureEpoch, + params.ForkDigest(cfg.FuluForkEpoch), + GossipBlobSidecarMessage, + uint64(cfg.MaxBlobsPerBlockAtEpoch(cfg.FuluForkEpoch))-1) + unexpected := []string{ + "/eth2/cc2c5cdb/data_column_sidecar_128/ssz_snappy", + electraColumn.String(), + blobBeforeDeneb.String(), + blobAfterElectra.String(), + } + for _, e := range unexpected { + _, ok := tops[e] + require.Equal(t, false, ok) + } +} diff --git a/beacon-chain/rpc/eth/beacon/handlers_test.go b/beacon-chain/rpc/eth/beacon/handlers_test.go index c13759b8f7..d39830dec9 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_test.go @@ -4876,8 +4876,16 @@ func TestServer_broadcastBlobSidecars(t *testing.T) { } func Test_validateBlobs(t *testing.T) { + params.SetupTestConfigCleanup(t) + params.BeaconConfig().FuluForkEpoch = params.BeaconConfig().ElectraForkEpoch + 4096*2 + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) + es := util.SlotAtEpoch(t, params.BeaconConfig().ElectraForkEpoch) + fe := params.BeaconConfig().FuluForkEpoch + fs := util.SlotAtEpoch(t, fe) + require.NoError(t, kzg.Start()) + denebMax := params.BeaconConfig().MaxBlobsPerBlock(ds) blob := util.GetRandBlob(123) // Generate proper commitment and proof for the blob var kzgBlob kzg.Blob @@ -4887,6 +4895,7 @@ func Test_validateBlobs(t *testing.T) { proof, err := kzg.ComputeBlobKZGProof(&kzgBlob, commitment) require.NoError(t, err) blk := util.NewBeaconBlockDeneb() + blk.Block.Slot = ds blk.Block.Body.BlobKzgCommitments = [][]byte{commitment[:]} b, err := blocks.NewSignedBeaconBlock(blk) require.NoError(t, err) @@ -4902,10 +4911,11 @@ func Test_validateBlobs(t *testing.T) { require.NoError(t, err) require.ErrorContains(t, "could not verify blob proofs", s.validateBlobs(b, [][]byte{blob[:]}, [][]byte{proof[:]})) + electraMax := params.BeaconConfig().MaxBlobsPerBlock(es) blobs := [][]byte{} commitments := [][]byte{} proofs := [][]byte{} - for i := 0; i < 10; i++ { + for i := 0; i < electraMax+1; i++ { blobs = append(blobs, blob[:]) commitments = append(commitments, commitment[:]) proofs = append(proofs, proof[:]) @@ -4923,6 +4933,7 @@ func Test_validateBlobs(t *testing.T) { t.Run("Deneb block with valid single blob", func(t *testing.T) { blk := util.NewBeaconBlockDeneb() + blk.Block.Slot = ds blk.Block.Body.BlobKzgCommitments = [][]byte{commitment[:]} b, err := blocks.NewSignedBeaconBlock(blk) require.NoError(t, err) @@ -4931,107 +4942,54 @@ func Test_validateBlobs(t *testing.T) { }) t.Run("Deneb block with max blobs (6)", func(t *testing.T) { - cfg := params.BeaconConfig().Copy() - defer params.OverrideBeaconConfig(cfg) - - testCfg := params.BeaconConfig().Copy() - testCfg.DenebForkEpoch = 0 - testCfg.ElectraForkEpoch = 100 - testCfg.DeprecatedMaxBlobsPerBlock = 6 - params.OverrideBeaconConfig(testCfg) - blk := util.NewBeaconBlockDeneb() - blk.Block.Slot = 10 // Deneb slot + blk.Block.Slot = ds blk.Block.Body.BlobKzgCommitments = commitments[:6] b, err := blocks.NewSignedBeaconBlock(blk) require.NoError(t, err) s := &Server{} // Should pass with exactly 6 blobs - require.NoError(t, s.validateBlobs(b, blobs[:6], proofs[:6])) + require.NoError(t, s.validateBlobs(b, blobs[:denebMax], proofs[:denebMax])) }) t.Run("Deneb block exceeding max blobs", func(t *testing.T) { - cfg := params.BeaconConfig().Copy() - defer params.OverrideBeaconConfig(cfg) - - testCfg := params.BeaconConfig().Copy() - testCfg.DenebForkEpoch = 0 - testCfg.ElectraForkEpoch = 100 - testCfg.DeprecatedMaxBlobsPerBlock = 6 - params.OverrideBeaconConfig(testCfg) - blk := util.NewBeaconBlockDeneb() - blk.Block.Slot = 10 // Deneb slot + blk.Block.Slot = ds blk.Block.Body.BlobKzgCommitments = commitments[:7] b, err := blocks.NewSignedBeaconBlock(blk) require.NoError(t, err) s := &Server{} // Should fail with 7 blobs when max is 6 - err = s.validateBlobs(b, blobs[:7], proofs[:7]) - require.ErrorContains(t, "number of blobs over max, 7 > 6", err) + err = s.validateBlobs(b, blobs[:denebMax+1], proofs[:denebMax+1]) + require.ErrorContains(t, "number of blobs over max", err) }) t.Run("Electra block with valid blobs", func(t *testing.T) { - cfg := params.BeaconConfig().Copy() - defer params.OverrideBeaconConfig(cfg) - - // Set up Electra config with max 9 blobs - testCfg := params.BeaconConfig().Copy() - testCfg.DenebForkEpoch = 0 - testCfg.ElectraForkEpoch = 5 - testCfg.DeprecatedMaxBlobsPerBlock = 6 - testCfg.DeprecatedMaxBlobsPerBlockElectra = 9 - params.OverrideBeaconConfig(testCfg) - blk := util.NewBeaconBlockElectra() - blk.Block.Slot = 160 // Electra slot (epoch 5+) + blk.Block.Slot = es blk.Block.Body.BlobKzgCommitments = commitments[:9] b, err := blocks.NewSignedBeaconBlock(blk) require.NoError(t, err) s := &Server{} // Should pass with 9 blobs in Electra - require.NoError(t, s.validateBlobs(b, blobs[:9], proofs[:9])) + require.NoError(t, s.validateBlobs(b, blobs[:electraMax], proofs[:electraMax])) }) t.Run("Electra block exceeding max blobs", func(t *testing.T) { - cfg := params.BeaconConfig().Copy() - defer params.OverrideBeaconConfig(cfg) - - // Set up Electra config with max 9 blobs - testCfg := params.BeaconConfig().Copy() - testCfg.DenebForkEpoch = 0 - testCfg.ElectraForkEpoch = 5 - testCfg.DeprecatedMaxBlobsPerBlock = 6 - testCfg.DeprecatedMaxBlobsPerBlockElectra = 9 - params.OverrideBeaconConfig(testCfg) - blk := util.NewBeaconBlockElectra() - blk.Block.Slot = 160 // Electra slot - blk.Block.Body.BlobKzgCommitments = commitments[:10] + blk.Block.Slot = es + blk.Block.Body.BlobKzgCommitments = commitments[:electraMax+1] b, err := blocks.NewSignedBeaconBlock(blk) require.NoError(t, err) s := &Server{} // Should fail with 10 blobs when max is 9 - err = s.validateBlobs(b, blobs[:10], proofs[:10]) - require.ErrorContains(t, "number of blobs over max, 10 > 9", err) + err = s.validateBlobs(b, blobs[:electraMax+1], proofs[:electraMax+1]) + require.ErrorContains(t, "number of blobs over max", err) }) t.Run("Fulu block with valid cell proofs", func(t *testing.T) { - cfg := params.BeaconConfig().Copy() - defer params.OverrideBeaconConfig(cfg) - - testCfg := params.BeaconConfig().Copy() - testCfg.DenebForkEpoch = 0 - testCfg.ElectraForkEpoch = 5 - testCfg.FuluForkEpoch = 10 - testCfg.DeprecatedMaxBlobsPerBlock = 6 - testCfg.DeprecatedMaxBlobsPerBlockElectra = 9 - testCfg.NumberOfColumns = 128 // Standard PeerDAS configuration - params.OverrideBeaconConfig(testCfg) - - // Create Fulu block with proper cell proofs blk := util.NewBeaconBlockFulu() - blk.Block.Slot = 320 // Epoch 10 (Fulu fork) + blk.Block.Slot = fs // Generate valid commitments and cell proofs for testing blobCount := 2 @@ -5075,18 +5033,8 @@ func Test_validateBlobs(t *testing.T) { }) t.Run("Fulu block with invalid cell proof count", func(t *testing.T) { - cfg := params.BeaconConfig().Copy() - defer params.OverrideBeaconConfig(cfg) - - testCfg := params.BeaconConfig().Copy() - testCfg.DenebForkEpoch = 0 - testCfg.ElectraForkEpoch = 5 - testCfg.FuluForkEpoch = 10 - testCfg.NumberOfColumns = 128 - params.OverrideBeaconConfig(testCfg) - blk := util.NewBeaconBlockFulu() - blk.Block.Slot = 320 // Epoch 10 (Fulu fork) + blk.Block.Slot = fs // Create valid commitments but wrong number of cell proofs blobCount := 2 @@ -5123,6 +5071,7 @@ func Test_validateBlobs(t *testing.T) { require.NoError(t, err) blk := util.NewBeaconBlockDeneb() + blk.Block.Slot = ds blk.Block.Body.BlobKzgCommitments = [][]byte{sk.PublicKey().Marshal()} b, err := blocks.NewSignedBeaconBlock(blk) require.NoError(t, err) @@ -5134,6 +5083,7 @@ func Test_validateBlobs(t *testing.T) { t.Run("empty blobs and proofs should pass", func(t *testing.T) { blk := util.NewBeaconBlockDeneb() + blk.Block.Slot = ds blk.Block.Body.BlobKzgCommitments = [][]byte{} b, err := blocks.NewSignedBeaconBlock(blk) require.NoError(t, err) @@ -5148,53 +5098,48 @@ func Test_validateBlobs(t *testing.T) { // Set up config with BlobSchedule (BPO - Blob Production Optimization) testCfg := params.BeaconConfig().Copy() - testCfg.DenebForkEpoch = 0 - testCfg.ElectraForkEpoch = 100 - testCfg.FuluForkEpoch = 200 testCfg.DeprecatedMaxBlobsPerBlock = 6 testCfg.DeprecatedMaxBlobsPerBlockElectra = 9 // Define blob schedule with progressive increases testCfg.BlobSchedule = []params.BlobScheduleEntry{ - {Epoch: 0, MaxBlobsPerBlock: 3}, // Start with 3 blobs - {Epoch: 10, MaxBlobsPerBlock: 5}, // Increase to 5 at epoch 10 - {Epoch: 20, MaxBlobsPerBlock: 7}, // Increase to 7 at epoch 20 - {Epoch: 30, MaxBlobsPerBlock: 9}, // Increase to 9 at epoch 30 + {Epoch: fe + 1, MaxBlobsPerBlock: 3}, // Start with 3 blobs + {Epoch: fe + 10, MaxBlobsPerBlock: 5}, // Increase to 5 at epoch 10 + {Epoch: fe + 20, MaxBlobsPerBlock: 7}, // Increase to 7 at epoch 20 + {Epoch: fe + 30, MaxBlobsPerBlock: 9}, // Increase to 9 at epoch 30 } params.OverrideBeaconConfig(testCfg) s := &Server{} - - // Test epoch 0-9: max 3 blobs - t.Run("epoch 0-9: max 3 blobs", func(t *testing.T) { + t.Run("deneb under and over max", func(t *testing.T) { blk := util.NewBeaconBlockDeneb() - blk.Block.Slot = 5 // Epoch 0 - blk.Block.Body.BlobKzgCommitments = commitments[:3] + blk.Block.Slot = ds + blk.Block.Body.BlobKzgCommitments = commitments[:denebMax] b, err := blocks.NewSignedBeaconBlock(blk) require.NoError(t, err) - require.NoError(t, s.validateBlobs(b, blobs[:3], proofs[:3])) + require.NoError(t, s.validateBlobs(b, blobs[:denebMax], proofs[:denebMax])) // Should fail with 4 blobs blk.Block.Body.BlobKzgCommitments = commitments[:4] b, err = blocks.NewSignedBeaconBlock(blk) require.NoError(t, err) - err = s.validateBlobs(b, blobs[:4], proofs[:4]) - require.ErrorContains(t, "number of blobs over max, 4 > 3", err) + err = s.validateBlobs(b, blobs[:denebMax+1], proofs[:denebMax+1]) + require.ErrorContains(t, "number of blobs over max", err) }) // Test epoch 30+: max 9 blobs - t.Run("epoch 30+: max 9 blobs", func(t *testing.T) { - blk := util.NewBeaconBlockDeneb() - blk.Block.Slot = 960 // Epoch 30 - blk.Block.Body.BlobKzgCommitments = commitments[:9] + t.Run("different max in electra", func(t *testing.T) { + blk := util.NewBeaconBlockElectra() + blk.Block.Slot = es + blk.Block.Body.BlobKzgCommitments = commitments[:electraMax] b, err := blocks.NewSignedBeaconBlock(blk) require.NoError(t, err) - require.NoError(t, s.validateBlobs(b, blobs[:9], proofs[:9])) + require.NoError(t, s.validateBlobs(b, blobs[:electraMax], proofs[:electraMax])) - // Should fail with 10 blobs - blk.Block.Body.BlobKzgCommitments = commitments[:10] + // exceed the electra max + blk.Block.Body.BlobKzgCommitments = commitments[:electraMax+1] b, err = blocks.NewSignedBeaconBlock(blk) require.NoError(t, err) - err = s.validateBlobs(b, blobs[:10], proofs[:10]) + err = s.validateBlobs(b, blobs[:electraMax+1], proofs[:electraMax+1]) require.ErrorContains(t, "number of blobs over max, 10 > 9", err) }) }) diff --git a/beacon-chain/rpc/eth/blob/BUILD.bazel b/beacon-chain/rpc/eth/blob/BUILD.bazel index fe0386a5a9..e98c2bd769 100644 --- a/beacon-chain/rpc/eth/blob/BUILD.bazel +++ b/beacon-chain/rpc/eth/blob/BUILD.bazel @@ -51,7 +51,6 @@ go_test( "//testing/assert:go_default_library", "//testing/require:go_default_library", "//testing/util:go_default_library", - "//time/slots:go_default_library", "@com_github_ethereum_go_ethereum//common/hexutil:go_default_library", ], ) diff --git a/beacon-chain/rpc/eth/blob/handlers_test.go b/beacon-chain/rpc/eth/blob/handlers_test.go index 33cac54941..eca9a16b03 100644 --- a/beacon-chain/rpc/eth/blob/handlers_test.go +++ b/beacon-chain/rpc/eth/blob/handlers_test.go @@ -30,18 +30,19 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" ) func TestBlobs(t *testing.T) { params.SetupTestConfigCleanup(t) cfg := params.BeaconConfig().Copy() - cfg.DenebForkEpoch = 1 + cfg.FuluForkEpoch = cfg.ElectraForkEpoch + 4096*2 params.OverrideBeaconConfig(cfg) + es := util.SlotAtEpoch(t, cfg.ElectraForkEpoch) + ds := util.SlotAtEpoch(t, cfg.DenebForkEpoch) db := testDB.SetupDB(t) - denebBlock, blobs := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 123, 4) + denebBlock, blobs := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, es, 4) require.NoError(t, db.SaveBlock(t.Context(), denebBlock)) bs := filesystem.NewEphemeralBlobStorage(t) testSidecars := verification.FakeVerifySliceForTest(t, blobs) @@ -171,7 +172,7 @@ func TestBlobs(t *testing.T) { require.Equal(t, false, resp.Finalized) }) t.Run("slot", func(t *testing.T) { - u := "http://foo.example/123" + u := fmt.Sprintf("http://foo.example/%d", es) request := httptest.NewRequest("GET", u, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} @@ -195,7 +196,7 @@ func TestBlobs(t *testing.T) { require.Equal(t, false, resp.Finalized) }) t.Run("slot not found", func(t *testing.T) { - u := "http://foo.example/122" + u := fmt.Sprintf("http://foo.example/%d", es-1) request := httptest.NewRequest("GET", u, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} @@ -212,7 +213,7 @@ func TestBlobs(t *testing.T) { assert.Equal(t, http.StatusNotFound, writer.Code) }) t.Run("one blob only", func(t *testing.T) { - u := "http://foo.example/123?indices=2" + u := fmt.Sprintf("http://foo.example/%d?indices=2", es) request := httptest.NewRequest("GET", u, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} @@ -243,7 +244,7 @@ func TestBlobs(t *testing.T) { require.Equal(t, false, resp.Finalized) }) t.Run("no blobs returns an empty array", func(t *testing.T) { - u := "http://foo.example/123" + u := fmt.Sprintf("http://foo.example/%d", es) request := httptest.NewRequest("GET", u, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} @@ -267,10 +268,8 @@ func TestBlobs(t *testing.T) { require.Equal(t, false, resp.Finalized) }) t.Run("blob index over max", func(t *testing.T) { - forkslot, err := slots.EpochStart(params.BeaconConfig().DenebForkEpoch) - require.NoError(t, err) - overLimit := params.BeaconConfig().MaxBlobsPerBlock(forkslot) - u := fmt.Sprintf("http://foo.example/123?indices=%d", overLimit) + overLimit := params.BeaconConfig().MaxBlobsPerBlock(ds) + u := fmt.Sprintf("http://foo.example/%d?indices=%d", es, overLimit) request := httptest.NewRequest("GET", u, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} @@ -284,7 +283,7 @@ func TestBlobs(t *testing.T) { assert.Equal(t, true, strings.Contains(e.Message, fmt.Sprintf("requested blob indices [%d] are invalid", overLimit))) }) t.Run("outside retention period returns 200 with what we have", func(t *testing.T) { - u := "http://foo.example/123" + u := fmt.Sprintf("http://foo.example/%d", es) request := httptest.NewRequest("GET", u, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} @@ -308,13 +307,13 @@ func TestBlobs(t *testing.T) { require.Equal(t, false, resp.Finalized) }) t.Run("block without commitments returns 200 w/empty list ", func(t *testing.T) { - denebBlock, _ := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 333, 0) + denebBlock, _ := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, es+128, 0) commitments, err := denebBlock.Block().Body().BlobKzgCommitments() require.NoError(t, err) require.Equal(t, len(commitments), 0) require.NoError(t, db.SaveBlock(t.Context(), denebBlock)) - u := "http://foo.example/333" + u := fmt.Sprintf("http://foo.example/%d", es+128) request := httptest.NewRequest("GET", u, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} @@ -426,19 +425,17 @@ func TestBlobs(t *testing.T) { func TestBlobs_Electra(t *testing.T) { params.SetupTestConfigCleanup(t) cfg := params.BeaconConfig().Copy() - cfg.DenebForkEpoch = 0 - cfg.ElectraForkEpoch = 1 + cfg.FuluForkEpoch = cfg.ElectraForkEpoch + 4096*2 cfg.BlobSchedule = []params.BlobScheduleEntry{ - {Epoch: 0, MaxBlobsPerBlock: 6}, - {Epoch: 1, MaxBlobsPerBlock: 9}, + {Epoch: cfg.FuluForkEpoch + 4096, MaxBlobsPerBlock: 6}, + {Epoch: cfg.FuluForkEpoch + 4096 + 128, MaxBlobsPerBlock: 9}, } params.OverrideBeaconConfig(cfg) + es := util.SlotAtEpoch(t, cfg.ElectraForkEpoch) db := testDB.SetupDB(t) - forkslot, err := slots.EpochStart(params.BeaconConfig().ElectraForkEpoch) - require.NoError(t, err) - overLimit := params.BeaconConfig().MaxBlobsPerBlock(forkslot) - electraBlock, blobs := util.GenerateTestElectraBlockWithSidecar(t, [32]byte{}, 123, overLimit) + overLimit := params.BeaconConfig().MaxBlobsPerBlock(es) + electraBlock, blobs := util.GenerateTestElectraBlockWithSidecar(t, [32]byte{}, es, overLimit) require.NoError(t, db.SaveBlock(t.Context(), electraBlock)) bs := filesystem.NewEphemeralBlobStorage(t) testSidecars := verification.FakeVerifySliceForTest(t, blobs) @@ -456,7 +453,7 @@ func TestBlobs_Electra(t *testing.T) { TimeFetcher: mockChainService, } t.Run("max blobs for electra", func(t *testing.T) { - u := "http://foo.example/123" + u := fmt.Sprintf("http://foo.example/%d", es) request := httptest.NewRequest("GET", u, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} @@ -487,8 +484,8 @@ func TestBlobs_Electra(t *testing.T) { require.Equal(t, false, resp.Finalized) }) t.Run("requested blob index at max", func(t *testing.T) { - limit := overLimit - 1 - u := fmt.Sprintf("http://foo.example/123?indices=%d", limit) + limit := params.BeaconConfig().MaxBlobsPerBlock(es) - 1 + u := fmt.Sprintf("http://foo.example/%d?indices=%d", es, limit) request := httptest.NewRequest("GET", u, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} @@ -519,7 +516,8 @@ func TestBlobs_Electra(t *testing.T) { require.Equal(t, false, resp.Finalized) }) t.Run("blob index over max", func(t *testing.T) { - u := fmt.Sprintf("http://foo.example/123?indices=%d", overLimit) + overLimit := params.BeaconConfig().MaxBlobsPerBlock(es) + u := fmt.Sprintf("http://foo.example/%d?indices=%d", es, overLimit) request := httptest.NewRequest("GET", u, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} @@ -535,6 +533,7 @@ func TestBlobs_Electra(t *testing.T) { } func Test_parseIndices(t *testing.T) { + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) tests := []struct { name string query string @@ -564,7 +563,7 @@ func Test_parseIndices(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, err := parseIndices(&url.URL{RawQuery: tt.query}, 0) + got, err := parseIndices(&url.URL{RawQuery: tt.query}, ds) if err != nil && tt.wantErr != "" { require.StringContains(t, tt.wantErr, err.Error()) return @@ -593,6 +592,7 @@ func TestGetBlobs(t *testing.T) { {Epoch: 20, MaxBlobsPerBlock: 12}, // Fulu } params.OverrideBeaconConfig(cfg) + es := util.SlotAtEpoch(t, cfg.ElectraForkEpoch) db := testDB.SetupDB(t) denebBlock, blobs := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 123, 4) @@ -1014,9 +1014,7 @@ func TestGetBlobs(t *testing.T) { // Test for Electra fork t.Run("electra max blobs", func(t *testing.T) { - forkslot, err := slots.EpochStart(params.BeaconConfig().ElectraForkEpoch) - require.NoError(t, err) - overLimit := params.BeaconConfig().MaxBlobsPerBlock(forkslot) + overLimit := params.BeaconConfig().MaxBlobsPerBlock(es) electraBlock, electraBlobs := util.GenerateTestElectraBlockWithSidecar(t, [32]byte{}, 323, overLimit) require.NoError(t, db.SaveBlock(t.Context(), electraBlock)) electraBs := filesystem.NewEphemeralBlobStorage(t) diff --git a/beacon-chain/rpc/lookup/blocker.go b/beacon-chain/rpc/lookup/blocker.go index bb9c99a2c3..d0f4b6d2f9 100644 --- a/beacon-chain/rpc/lookup/blocker.go +++ b/beacon-chain/rpc/lookup/blocker.go @@ -3,7 +3,6 @@ package lookup import ( "context" "fmt" - "math" "strconv" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" @@ -284,14 +283,9 @@ func (p *BeaconDbBlocker) Blobs(ctx context.Context, id string, opts ...options. return make([]*blocks.VerifiedROBlob, 0), nil } - // Compute the first Fulu slot. - fuluForkEpoch := params.BeaconConfig().FuluForkEpoch - fuluForkSlot := primitives.Slot(math.MaxUint64) - if fuluForkEpoch != primitives.Epoch(math.MaxUint64) { - fuluForkSlot, err = slots.EpochStart(fuluForkEpoch) - if err != nil { - return nil, &core.RpcError{Err: errors.Wrap(err, "could not calculate Fulu start slot"), Reason: core.Internal} - } + fuluForkSlot, err := slots.EpochStart(params.BeaconConfig().FuluForkEpoch) + if err != nil { + return nil, &core.RpcError{Err: errors.Wrap(err, "could not calculate Fulu start slot"), Reason: core.Internal} } // Convert versioned hashes to indices if provided diff --git a/beacon-chain/rpc/lookup/blocker_test.go b/beacon-chain/rpc/lookup/blocker_test.go index ff3bd10956..cc6d5542d7 100644 --- a/beacon-chain/rpc/lookup/blocker_test.go +++ b/beacon-chain/rpc/lookup/blocker_test.go @@ -190,7 +190,7 @@ func TestBlobsErrorHandling(t *testing.T) { t.Run("non-existent block by slot returns 404", func(t *testing.T) { blocker := &BeaconDbBlocker{ - BeaconDB: db, + BeaconDB: db, ChainInfoFetcher: &mockChain.ChainService{}, } @@ -275,39 +275,19 @@ func TestBlobsErrorHandling(t *testing.T) { } func TestGetBlob(t *testing.T) { - const ( - slot = 123 - blobCount = 4 - denebForEpoch = 1 - fuluForkEpoch = 2 - ) - - setupDeneb := func(t *testing.T) { - params.SetupTestConfigCleanup(t) - cfg := params.BeaconConfig().Copy() - cfg.DenebForkEpoch = denebForEpoch - params.OverrideBeaconConfig(cfg) - } - - setupFulu := func(t *testing.T) { - params.SetupTestConfigCleanup(t) - cfg := params.BeaconConfig().Copy() - cfg.DenebForkEpoch = denebForEpoch - cfg.FuluForkEpoch = fuluForkEpoch - params.OverrideBeaconConfig(cfg) - } - + const blobCount = 4 ctx := t.Context() - db := testDB.SetupDB(t) + params.SetupTestConfigCleanup(t) + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) + params.BeaconConfig().FuluForkEpoch = params.BeaconConfig().DenebForkEpoch + 4096*2 - // Start the trusted setup. - err := kzg.Start() - require.NoError(t, err) + db := testDB.SetupDB(t) + require.NoError(t, kzg.Start()) // Create and save Deneb block and blob sidecars. _, blobStorage := filesystem.NewEphemeralBlobStorageAndFs(t) - denebBlock, storedBlobSidecars := util.GenerateTestDenebBlockWithSidecar(t, [fieldparams.RootLength]byte{}, slot, blobCount) + denebBlock, storedBlobSidecars := util.GenerateTestDenebBlockWithSidecar(t, [fieldparams.RootLength]byte{}, ds, blobCount, util.WithDenebSlot(ds)) denebBlockRoot := denebBlock.Root() verifiedStoredSidecars := verification.FakeVerifySliceForTest(t, storedBlobSidecars) @@ -316,13 +296,14 @@ func TestGetBlob(t *testing.T) { require.NoError(t, err) } - err = db.SaveBlock(t.Context(), denebBlock) + err := db.SaveBlock(t.Context(), denebBlock) require.NoError(t, err) // Create Electra block and blob sidecars. (Electra block = Fulu block), // save the block, convert blob sidecars to data column sidecars and save the block. - fuluForkSlot := fuluForkEpoch * params.BeaconConfig().SlotsPerEpoch - fuluBlock, fuluBlobSidecars := util.GenerateTestElectraBlockWithSidecar(t, [fieldparams.RootLength]byte{}, fuluForkSlot, blobCount) + fs := util.SlotAtEpoch(t, params.BeaconConfig().FuluForkEpoch) + dsStr := fmt.Sprintf("%d", ds) + fuluBlock, fuluBlobSidecars := util.GenerateTestElectraBlockWithSidecar(t, [fieldparams.RootLength]byte{}, fs, blobCount) fuluBlockRoot := fuluBlock.Root() cellsAndProofsList := make([]kzg.CellsAndProofs, 0, len(fuluBlobSidecars)) @@ -347,8 +328,6 @@ func TestGetBlob(t *testing.T) { require.NoError(t, err) t.Run("genesis", func(t *testing.T) { - setupDeneb(t) - blocker := &BeaconDbBlocker{} _, rpcErr := blocker.Blobs(ctx, "genesis") require.Equal(t, http.StatusBadRequest, core.ErrorReasonToHTTP(rpcErr.Reason)) @@ -356,8 +335,6 @@ func TestGetBlob(t *testing.T) { }) t.Run("head", func(t *testing.T) { - setupDeneb(t) - blocker := &BeaconDbBlocker{ ChainInfoFetcher: &mockChain.ChainService{ Root: denebBlockRoot[:], @@ -388,8 +365,6 @@ func TestGetBlob(t *testing.T) { }) t.Run("finalized", func(t *testing.T) { - setupDeneb(t) - blocker := &BeaconDbBlocker{ ChainInfoFetcher: &mockChain.ChainService{FinalizedCheckPoint: ðpb.Checkpoint{Root: denebBlockRoot[:]}}, GenesisTimeFetcher: &testutil.MockGenesisTimeFetcher{ @@ -405,8 +380,6 @@ func TestGetBlob(t *testing.T) { }) t.Run("justified", func(t *testing.T) { - setupDeneb(t) - blocker := &BeaconDbBlocker{ ChainInfoFetcher: &mockChain.ChainService{CurrentJustifiedCheckPoint: ðpb.Checkpoint{Root: denebBlockRoot[:]}}, GenesisTimeFetcher: &testutil.MockGenesisTimeFetcher{ @@ -422,8 +395,6 @@ func TestGetBlob(t *testing.T) { }) t.Run("root", func(t *testing.T) { - setupDeneb(t) - blocker := &BeaconDbBlocker{ GenesisTimeFetcher: &testutil.MockGenesisTimeFetcher{ Genesis: time.Now(), @@ -438,8 +409,6 @@ func TestGetBlob(t *testing.T) { }) t.Run("slot", func(t *testing.T) { - setupDeneb(t) - blocker := &BeaconDbBlocker{ ChainInfoFetcher: &mockChain.ChainService{}, GenesisTimeFetcher: &testutil.MockGenesisTimeFetcher{ @@ -449,7 +418,7 @@ func TestGetBlob(t *testing.T) { BlobStorage: blobStorage, } - verifiedBlobs, rpcErr := blocker.Blobs(ctx, "123") + verifiedBlobs, rpcErr := blocker.Blobs(ctx, dsStr) require.IsNil(t, rpcErr) require.Equal(t, blobCount, len(verifiedBlobs)) }) @@ -457,8 +426,6 @@ func TestGetBlob(t *testing.T) { t.Run("one blob only", func(t *testing.T) { const index = 2 - setupDeneb(t) - blocker := &BeaconDbBlocker{ ChainInfoFetcher: &mockChain.ChainService{FinalizedCheckPoint: ðpb.Checkpoint{Root: denebBlockRoot[:]}}, GenesisTimeFetcher: &testutil.MockGenesisTimeFetcher{ @@ -468,7 +435,7 @@ func TestGetBlob(t *testing.T) { BlobStorage: blobStorage, } - retrievedVerifiedSidecars, rpcErr := blocker.Blobs(ctx, "123", options.WithIndices([]int{index})) + retrievedVerifiedSidecars, rpcErr := blocker.Blobs(ctx, dsStr, options.WithIndices([]int{index})) require.IsNil(t, rpcErr) require.Equal(t, 1, len(retrievedVerifiedSidecars)) @@ -483,8 +450,6 @@ func TestGetBlob(t *testing.T) { }) t.Run("no blobs returns an empty array", func(t *testing.T) { - setupDeneb(t) - blocker := &BeaconDbBlocker{ ChainInfoFetcher: &mockChain.ChainService{FinalizedCheckPoint: ðpb.Checkpoint{Root: denebBlockRoot[:]}}, GenesisTimeFetcher: &testutil.MockGenesisTimeFetcher{ @@ -494,14 +459,12 @@ func TestGetBlob(t *testing.T) { BlobStorage: filesystem.NewEphemeralBlobStorage(t), } - verifiedBlobs, rpcErr := blocker.Blobs(ctx, "123") + verifiedBlobs, rpcErr := blocker.Blobs(ctx, dsStr) require.IsNil(t, rpcErr) require.Equal(t, 0, len(verifiedBlobs)) }) t.Run("no blob at index", func(t *testing.T) { - setupDeneb(t) - blocker := &BeaconDbBlocker{ ChainInfoFetcher: &mockChain.ChainService{FinalizedCheckPoint: ðpb.Checkpoint{Root: denebBlockRoot[:]}}, GenesisTimeFetcher: &testutil.MockGenesisTimeFetcher{ @@ -512,14 +475,12 @@ func TestGetBlob(t *testing.T) { } noBlobIndex := len(storedBlobSidecars) + 1 - _, rpcErr := blocker.Blobs(ctx, "123", options.WithIndices([]int{0, noBlobIndex})) + _, rpcErr := blocker.Blobs(ctx, dsStr, options.WithIndices([]int{0, noBlobIndex})) require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.NotFound), rpcErr.Reason) }) t.Run("index too big", func(t *testing.T) { - setupDeneb(t) - blocker := &BeaconDbBlocker{ ChainInfoFetcher: &mockChain.ChainService{FinalizedCheckPoint: ðpb.Checkpoint{Root: denebBlockRoot[:]}}, GenesisTimeFetcher: &testutil.MockGenesisTimeFetcher{ @@ -528,14 +489,12 @@ func TestGetBlob(t *testing.T) { BeaconDB: db, BlobStorage: blobStorage, } - _, rpcErr := blocker.Blobs(ctx, "123", options.WithIndices([]int{0, math.MaxInt})) + _, rpcErr := blocker.Blobs(ctx, dsStr, options.WithIndices([]int{0, math.MaxInt})) require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.BadRequest), rpcErr.Reason) }) t.Run("not enough stored data column sidecars", func(t *testing.T) { - setupFulu(t) - _, dataColumnStorage := filesystem.NewEphemeralDataColumnStorageAndFs(t) err = dataColumnStorage.Save(verifiedRoDataColumnSidecars[:fieldparams.CellsPerBlob-1]) require.NoError(t, err) @@ -555,8 +514,6 @@ func TestGetBlob(t *testing.T) { }) t.Run("reconstruction needed", func(t *testing.T) { - setupFulu(t) - _, dataColumnStorage := filesystem.NewEphemeralDataColumnStorageAndFs(t) err = dataColumnStorage.Save(verifiedRoDataColumnSidecars[1 : peerdas.MinimumColumnCountToReconstruct()+1]) require.NoError(t, err) @@ -582,8 +539,6 @@ func TestGetBlob(t *testing.T) { }) t.Run("no reconstruction needed", func(t *testing.T) { - setupFulu(t) - _, dataColumnStorage := filesystem.NewEphemeralDataColumnStorageAndFs(t) err = dataColumnStorage.Save(verifiedRoDataColumnSidecars) require.NoError(t, err) diff --git a/beacon-chain/startup/clock.go b/beacon-chain/startup/clock.go index deae7c6ce2..fb4f19be8b 100644 --- a/beacon-chain/startup/clock.go +++ b/beacon-chain/startup/clock.go @@ -74,6 +74,18 @@ func WithTimeAsNow(t time.Time) ClockOpt { } } +func WithSlotAsNow(s types.Slot) ClockOpt { + return func(g *Clock) { + g.now = func() time.Time { + t, err := slots.StartTime(g.t, s) + if err != nil { + panic(err) // lint:nopanic -- This is a programming error if genesis/slot are invalid. + } + return t + } + } +} + // NewClock constructs a Clock value from a genesis timestamp (t) and a Genesis Validator Root (vr). // The WithNower ClockOpt can be used in tests to specify an alternate `time.Now` implementation, // for instance to return a value for `Now` spanning a certain number of slots from genesis time, to control the current slot. diff --git a/beacon-chain/sync/backfill/blobs_test.go b/beacon-chain/sync/backfill/blobs_test.go index a1353cc678..b891249ed8 100644 --- a/beacon-chain/sync/backfill/blobs_test.go +++ b/beacon-chain/sync/backfill/blobs_test.go @@ -5,6 +5,7 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" @@ -24,8 +25,9 @@ func testBlobGen(t *testing.T, start primitives.Slot, n int) ([]blocks.ROBlock, } func TestValidateNext_happy(t *testing.T) { - current := primitives.Slot(128) - blks, blobs := testBlobGen(t, 63, 4) + startSlot := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) + current := startSlot + 65 + blks, blobs := testBlobGen(t, startSlot, 4) cfg := &blobSyncConfig{ retentionStart: 0, nbv: testNewBlobVerifier(), @@ -74,8 +76,9 @@ func TestValidateNext_sigMatch(t *testing.T) { } func TestValidateNext_errorsFromVerifier(t *testing.T) { - current := primitives.Slot(128) - blks, blobs := testBlobGen(t, 63, 1) + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) + current := primitives.Slot(ds + 96) + blks, blobs := testBlobGen(t, ds+31, 1) cases := []struct { name string err error diff --git a/beacon-chain/sync/blobs_test.go b/beacon-chain/sync/blobs_test.go index 0e0d937a5e..544a58d46f 100644 --- a/beacon-chain/sync/blobs_test.go +++ b/beacon-chain/sync/blobs_test.go @@ -2,6 +2,7 @@ package sync import ( "encoding/binary" + "io" "math" "math/big" "testing" @@ -18,9 +19,11 @@ import ( "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v6/genesis" enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/require" @@ -165,23 +168,12 @@ func (r *expectedBlobChunk) requireExpected(t *testing.T, s *Service, stream net require.Equal(t, rob.Index, r.sidecar.Index) } -func (c *blobsTestCase) setup(t *testing.T) (*Service, []blocks.ROBlob, func()) { - cfg := params.BeaconConfig() - copiedCfg := cfg.Copy() - repositionFutureEpochs(copiedCfg) - copiedCfg.InitializeForkSchedule() - params.OverrideBeaconConfig(copiedCfg) - cleanup := func() { - params.OverrideBeaconConfig(cfg) - } - maxBlobs := int(params.BeaconConfig().MaxBlobsPerBlock(0)) - chain, clock := defaultMockChain(t) +func (c *blobsTestCase) setup(t *testing.T) (*Service, []blocks.ROBlob) { + maxBlobs := int(params.BeaconConfig().MaxBlobsPerBlockAtEpoch(params.BeaconConfig().DenebForkEpoch)) + chain := defaultMockChain(t, c.clock.CurrentEpoch()) if c.chain == nil { c.chain = chain } - if c.clock == nil { - c.clock = clock - } d := db.SetupDB(t) sidecars := make([]blocks.ROBlob, 0) @@ -208,16 +200,16 @@ func (c *blobsTestCase) setup(t *testing.T) (*Service, []blocks.ROBlob, func()) client := p2ptest.NewTestP2P(t) s := &Service{ - cfg: &config{p2p: client, chain: c.chain, clock: clock, beaconDB: d, blobStorage: filesystem.NewEphemeralBlobStorage(t)}, + cfg: &config{p2p: client, chain: c.chain, clock: c.clock, beaconDB: d, blobStorage: filesystem.NewEphemeralBlobStorage(t)}, rateLimiter: newRateLimiter(client), } - byRootRate := params.BeaconConfig().MaxRequestBlobSidecars * uint64(params.BeaconConfig().MaxBlobsPerBlock(0)) - byRangeRate := params.BeaconConfig().MaxRequestBlobSidecars * uint64(params.BeaconConfig().MaxBlobsPerBlock(0)) + byRootRate := params.BeaconConfig().MaxRequestBlobSidecars * uint64(maxBlobs) + byRangeRate := params.BeaconConfig().MaxRequestBlobSidecars * uint64(maxBlobs) s.setRateCollector(p2p.RPCBlobSidecarsByRootTopicV1, leakybucket.NewCollector(0.000001, int64(byRootRate), time.Second, false)) s.setRateCollector(p2p.RPCBlobSidecarsByRangeTopicV1, leakybucket.NewCollector(0.000001, int64(byRangeRate), time.Second, false)) - return s, sidecars, cleanup + return s, sidecars } func defaultExpectedRequirer(t *testing.T, s *Service, expect []*expectedBlobChunk) func(network.Stream) { @@ -225,12 +217,16 @@ func defaultExpectedRequirer(t *testing.T, s *Service, expect []*expectedBlobChu for _, ex := range expect { ex.requireExpected(t, s, stream) } + + encoding := s.cfg.p2p.Encoding() + _, _, err := ReadStatusCode(stream, encoding) + require.ErrorIs(t, err, io.EOF) } } func (c *blobsTestCase) run(t *testing.T) { - s, sidecars, cleanup := c.setup(t) - defer cleanup() + blobRpcThrottleInterval = time.Microsecond * 1 + s, sidecars := c.setup(t) req := c.requestFromSidecars(sidecars) expect := c.defineExpected(t, sidecars, req) m := map[types.Slot][]blocks.ROBlob{} @@ -266,41 +262,32 @@ func (c *blobsTestCase) run(t *testing.T) { // so it is helpful in tests to temporarily reposition the epochs to give room for some math. func repositionFutureEpochs(cfg *params.BeaconChainConfig) { if cfg.FuluForkEpoch == math.MaxUint64 { - cfg.FuluForkEpoch = cfg.ElectraForkEpoch + 100 + cfg.FuluForkEpoch = cfg.ElectraForkEpoch + 4096*2 } } -func defaultMockChain(t *testing.T) (*mock.ChainService, *startup.Clock) { - de := params.BeaconConfig().DenebForkEpoch - df, err := params.Fork(de) +func defaultMockChain(t *testing.T, current primitives.Epoch) *mock.ChainService { + fe := current - 2 + df, err := params.Fork(current) require.NoError(t, err) - denebBuffer := params.BeaconConfig().MinEpochsForBlobsSidecarsRequest + 1000 - ce := de + denebBuffer - fe := ce - 2 - cs, err := slots.EpochStart(ce) - require.NoError(t, err) - genesis := time.Now() - mockNow := startup.MockNower{} - clock := startup.NewClock(genesis, params.BeaconConfig().GenesisValidatorsRoot, startup.WithNower(mockNow.Now)) - mockNow.SetSlot(t, clock, cs) chain := &mock.ChainService{ FinalizedCheckPoint: ðpb.Checkpoint{Epoch: fe}, Fork: df, } - return chain, clock + return chain } func TestTestcaseSetup_BlocksAndBlobs(t *testing.T) { + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) ctx := t.Context() nblocks := 10 - c := &blobsTestCase{nblocks: nblocks} + c := &blobsTestCase{nblocks: nblocks, clock: startup.NewClock(genesis.Time(), genesis.ValidatorsRoot(), startup.WithSlotAsNow(ds))} c.oldestSlot = c.defaultOldestSlotByRoot - s, sidecars, cleanup := c.setup(t) + s, sidecars := c.setup(t) req := blobRootRequestFromSidecars(sidecars) expect := c.filterExpectedByRoot(t, sidecars, req) - defer cleanup() - maxed := nblocks * params.BeaconConfig().MaxBlobsPerBlock(0) + maxed := nblocks * params.BeaconConfig().MaxBlobsPerBlockAtEpoch(params.BeaconConfig().DenebForkEpoch) require.Equal(t, maxed, len(sidecars)) require.Equal(t, maxed, len(expect)) for _, sc := range sidecars { diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher_test.go b/beacon-chain/sync/initial-sync/blocks_fetcher_test.go index 84f71c8411..b0fcd1df7e 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher_test.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher_test.go @@ -1017,13 +1017,13 @@ func TestBlobRangeForBlocks(t *testing.T) { for i := range blks { sbbs[i] = blks[i] } - retentionStart := primitives.Slot(5) + retentionStart := blks[len(blks)/2].Block().Slot() bwb, err := sortedBlockWithVerifiedBlobSlice(sbbs) require.NoError(t, err) bounds := countCommitments(bwb, retentionStart).blobRange(nil) require.Equal(t, retentionStart, bounds.low) - higher := primitives.Slot(len(blks) + 1) - bounds = countCommitments(bwb, higher).blobRange(nil) + maxBlkSlot := blks[len(blks)-1].Block().Slot() + bounds = countCommitments(bwb, maxBlkSlot+1).blobRange(nil) var nilBounds *blobRange require.Equal(t, nilBounds, bounds) @@ -1054,17 +1054,17 @@ func TestBlobRequest(t *testing.T) { } bwb, err := sortedBlockWithVerifiedBlobSlice(sbbs) require.NoError(t, err) - maxBlkSlot := primitives.Slot(len(blks) - 1) - tooHigh := primitives.Slot(len(blks) + 1) + maxBlkSlot := blks[len(blks)-1].Block().Slot() + tooHigh := maxBlkSlot + 1 req = countCommitments(bwb, tooHigh).blobRange(nil).Request() require.Equal(t, nilReq, req) req = countCommitments(bwb, maxBlkSlot).blobRange(nil).Request() - require.Equal(t, uint64(1), req.Count) require.Equal(t, maxBlkSlot, req.StartSlot) + require.Equal(t, uint64(1), req.Count) - halfway := primitives.Slot(5) + halfway := blks[len(blks)/2].Block().Slot() req = countCommitments(bwb, halfway).blobRange(nil).Request() require.Equal(t, halfway, req.StartSlot) // adding 1 to include the halfway slot itself @@ -1103,6 +1103,12 @@ func TestCountCommitments(t *testing.T) { } func TestCommitmentCountList(t *testing.T) { + de := params.BeaconConfig().DenebForkEpoch + ds := util.SlotAtEpoch(t, de) + denebRel := func(s primitives.Slot) primitives.Slot { + return ds + s + } + maxBlobs := params.BeaconConfig().MaxBlobsPerBlock(ds) cases := []struct { name string cc commitmentCountList @@ -1119,20 +1125,20 @@ func TestCommitmentCountList(t *testing.T) { { name: "nil bss, single slot", cc: []commitmentCount{ - {slot: 11235, count: 1}, + {slot: denebRel(11235), count: 1}, }, - expected: &blobRange{low: 11235, high: 11235}, - request: ðpb.BlobSidecarsByRangeRequest{StartSlot: 11235, Count: 1}, + expected: &blobRange{low: denebRel(11235), high: denebRel(11235)}, + request: ðpb.BlobSidecarsByRangeRequest{StartSlot: denebRel(11235), Count: 1}, }, { name: "nil bss, sparse slots", cc: []commitmentCount{ - {slot: 11235, count: 1}, - {slot: 11240, count: params.BeaconConfig().MaxBlobsPerBlock(0)}, - {slot: 11250, count: 3}, + {slot: denebRel(11235), count: 1}, + {slot: denebRel(11240), count: maxBlobs}, + {slot: denebRel(11250), count: 3}, }, - expected: &blobRange{low: 11235, high: 11250}, - request: ðpb.BlobSidecarsByRangeRequest{StartSlot: 11235, Count: 16}, + expected: &blobRange{low: denebRel(11235), high: denebRel(11250)}, + request: ðpb.BlobSidecarsByRangeRequest{StartSlot: denebRel(11235), Count: 16}, }, { name: "AllAvailable in middle, some avail low, none high", @@ -1141,15 +1147,15 @@ func TestCommitmentCountList(t *testing.T) { bytesutil.ToBytes32([]byte("0")): {0, 1}, bytesutil.ToBytes32([]byte("1")): {0, 1, 2, 3, 4, 5}, } - return filesystem.NewMockBlobStorageSummarizer(t, onDisk) + return filesystem.NewMockBlobStorageSummarizer(t, de, onDisk) }, cc: []commitmentCount{ - {slot: 0, count: 3, root: bytesutil.ToBytes32([]byte("0"))}, - {slot: 5, count: params.BeaconConfig().MaxBlobsPerBlock(0), root: bytesutil.ToBytes32([]byte("1"))}, - {slot: 15, count: 3}, + {slot: denebRel(0), count: 3, root: bytesutil.ToBytes32([]byte("0"))}, + {slot: denebRel(5), count: maxBlobs, root: bytesutil.ToBytes32([]byte("1"))}, + {slot: denebRel(15), count: 3}, }, - expected: &blobRange{low: 0, high: 15}, - request: ðpb.BlobSidecarsByRangeRequest{StartSlot: 0, Count: 16}, + expected: &blobRange{low: denebRel(0), high: denebRel(15)}, + request: ðpb.BlobSidecarsByRangeRequest{StartSlot: denebRel(0), Count: 16}, }, { name: "AllAvailable at high and low", @@ -1158,15 +1164,15 @@ func TestCommitmentCountList(t *testing.T) { bytesutil.ToBytes32([]byte("0")): {0, 1}, bytesutil.ToBytes32([]byte("2")): {0, 1, 2, 3, 4, 5}, } - return filesystem.NewMockBlobStorageSummarizer(t, onDisk) + return filesystem.NewMockBlobStorageSummarizer(t, de, onDisk) }, cc: []commitmentCount{ - {slot: 0, count: 2, root: bytesutil.ToBytes32([]byte("0"))}, - {slot: 5, count: 3}, - {slot: 15, count: params.BeaconConfig().MaxBlobsPerBlock(0), root: bytesutil.ToBytes32([]byte("2"))}, + {slot: denebRel(0), count: 2, root: bytesutil.ToBytes32([]byte("0"))}, + {slot: denebRel(5), count: 3}, + {slot: denebRel(15), count: maxBlobs, root: bytesutil.ToBytes32([]byte("2"))}, }, - expected: &blobRange{low: 5, high: 5}, - request: ðpb.BlobSidecarsByRangeRequest{StartSlot: 5, Count: 1}, + expected: &blobRange{low: denebRel(5), high: denebRel(5)}, + request: ðpb.BlobSidecarsByRangeRequest{StartSlot: denebRel(5), Count: 1}, }, { name: "AllAvailable at high and low, adjacent range in middle", @@ -1175,16 +1181,16 @@ func TestCommitmentCountList(t *testing.T) { bytesutil.ToBytes32([]byte("0")): {0, 1}, bytesutil.ToBytes32([]byte("2")): {0, 1, 2, 3, 4, 5}, } - return filesystem.NewMockBlobStorageSummarizer(t, onDisk) + return filesystem.NewMockBlobStorageSummarizer(t, de, onDisk) }, cc: []commitmentCount{ - {slot: 0, count: 2, root: bytesutil.ToBytes32([]byte("0"))}, - {slot: 5, count: 3}, - {slot: 6, count: 3}, - {slot: 15, count: params.BeaconConfig().MaxBlobsPerBlock(0), root: bytesutil.ToBytes32([]byte("2"))}, + {slot: denebRel(0), count: 2, root: bytesutil.ToBytes32([]byte("0"))}, + {slot: denebRel(5), count: 3}, + {slot: denebRel(6), count: 3}, + {slot: denebRel(15), count: maxBlobs, root: bytesutil.ToBytes32([]byte("2"))}, }, - expected: &blobRange{low: 5, high: 6}, - request: ðpb.BlobSidecarsByRangeRequest{StartSlot: 5, Count: 2}, + expected: &blobRange{low: denebRel(5), high: denebRel(6)}, + request: ðpb.BlobSidecarsByRangeRequest{StartSlot: denebRel(5), Count: 2}, }, { name: "AllAvailable at high and low, range in middle", @@ -1194,16 +1200,16 @@ func TestCommitmentCountList(t *testing.T) { bytesutil.ToBytes32([]byte("1")): {0, 1}, bytesutil.ToBytes32([]byte("2")): {0, 1, 2, 3, 4, 5}, } - return filesystem.NewMockBlobStorageSummarizer(t, onDisk) + return filesystem.NewMockBlobStorageSummarizer(t, de, onDisk) }, cc: []commitmentCount{ - {slot: 0, count: 2, root: bytesutil.ToBytes32([]byte("0"))}, - {slot: 5, count: 3, root: bytesutil.ToBytes32([]byte("1"))}, - {slot: 10, count: 3}, - {slot: 15, count: params.BeaconConfig().MaxBlobsPerBlock(0), root: bytesutil.ToBytes32([]byte("2"))}, + {slot: denebRel(0), count: 2, root: bytesutil.ToBytes32([]byte("0"))}, + {slot: denebRel(5), count: 3, root: bytesutil.ToBytes32([]byte("1"))}, + {slot: denebRel(10), count: 3}, + {slot: denebRel(15), count: maxBlobs, root: bytesutil.ToBytes32([]byte("2"))}, }, - expected: &blobRange{low: 5, high: 10}, - request: ðpb.BlobSidecarsByRangeRequest{StartSlot: 5, Count: 6}, + expected: &blobRange{low: denebRel(5), high: denebRel(10)}, + request: ðpb.BlobSidecarsByRangeRequest{StartSlot: denebRel(5), Count: 6}, }, } for _, c := range cases { @@ -1218,8 +1224,8 @@ func TestCommitmentCountList(t *testing.T) { require.IsNil(t, br.Request()) } else { req := br.Request() - require.DeepEqual(t, req.StartSlot, c.request.StartSlot) - require.DeepEqual(t, req.Count, c.request.Count) + require.Equal(t, req.StartSlot, c.request.StartSlot) + require.Equal(t, req.Count, c.request.Count) } }) } @@ -1299,7 +1305,7 @@ func TestVerifyAndPopulateBlobs(t *testing.T) { r1: {0, 1}, r7: {0, 1, 2, 3, 4, 5}, } - bss := filesystem.NewMockBlobStorageSummarizer(t, onDisk) + bss := filesystem.NewMockBlobStorageSummarizer(t, params.BeaconConfig().DenebForkEpoch, onDisk) err := verifyAndPopulateBlobs(bwb, blobs, testReqFromResp(bwb), bss) require.NoError(t, err) require.Equal(t, 6, len(bwb[i1].Blobs)) diff --git a/beacon-chain/sync/initial-sync/service_test.go b/beacon-chain/sync/initial-sync/service_test.go index 6a88ddf153..fc3c06baf1 100644 --- a/beacon-chain/sync/initial-sync/service_test.go +++ b/beacon-chain/sync/initial-sync/service_test.go @@ -439,6 +439,7 @@ func TestService_Synced(t *testing.T) { } func TestMissingBlobRequest(t *testing.T) { + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) cases := []struct { name string setup func(t *testing.T) (blocks.ROBlock, *filesystem.BlobStorage) @@ -476,7 +477,7 @@ func TestMissingBlobRequest(t *testing.T) { { name: "2 commitments, 1 missing", setup: func(t *testing.T) (blocks.ROBlock, *filesystem.BlobStorage) { - bk, _ := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 0, 2) + bk, _ := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, ds, 2) bm, fs := filesystem.NewEphemeralBlobStorageWithMocker(t) require.NoError(t, bm.CreateFakeIndices(bk.Root(), bk.Block().Slot(), 1)) return bk, fs @@ -486,7 +487,7 @@ func TestMissingBlobRequest(t *testing.T) { { name: "2 commitments, 0 missing", setup: func(t *testing.T) (blocks.ROBlock, *filesystem.BlobStorage) { - bk, _ := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 0, 2) + bk, _ := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, ds, 2) bm, fs := filesystem.NewEphemeralBlobStorageWithMocker(t) require.NoError(t, bm.CreateFakeIndices(bk.Root(), bk.Block().Slot(), 0, 1)) return bk, fs diff --git a/beacon-chain/sync/rpc_beacon_blocks_by_root_test.go b/beacon-chain/sync/rpc_beacon_blocks_by_root_test.go index 23d456708e..7fe47a08d7 100644 --- a/beacon-chain/sync/rpc_beacon_blocks_by_root_test.go +++ b/beacon-chain/sync/rpc_beacon_blocks_by_root_test.go @@ -415,6 +415,7 @@ func TestRequestPendingBlobs(t *testing.T) { } func TestConstructPendingBlobsRequest(t *testing.T) { + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) d := db.SetupDB(t) bs := filesystem.NewEphemeralBlobStorage(t) s := &Service{cfg: &config{beaconDB: d, blobStorage: bs}} @@ -436,6 +437,7 @@ func TestConstructPendingBlobsRequest(t *testing.T) { ParentRoot: bytesutil.PadTo([]byte{}, 32), StateRoot: bytesutil.PadTo([]byte{}, 32), BodyRoot: bytesutil.PadTo([]byte{}, 32), + Slot: ds, }, Signature: bytesutil.PadTo([]byte{}, 96), } diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_range.go b/beacon-chain/sync/rpc_blob_sidecars_by_range.go index d4f35fe74a..ff3e6e28ae 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_range.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_range.go @@ -57,6 +57,8 @@ func (s *Service) streamBlobBatch(ctx context.Context, batch blockBatch, wQuota return wQuota, nil } +var blobRpcThrottleInterval = time.Second + // blobsSidecarsByRangeRPCHandler looks up the request blobs from the database from a given start slot index func (s *Service) blobSidecarsByRangeRPCHandler(ctx context.Context, msg interface{}, stream libp2pcore.Stream) error { var err error @@ -86,7 +88,7 @@ func (s *Service) blobSidecarsByRangeRPCHandler(ctx context.Context, msg interfa } // Ticker to stagger out large requests. - ticker := time.NewTicker(time.Second) + ticker := time.NewTicker(blobRpcThrottleInterval) defer ticker.Stop() batcher, err := newBlockRangeBatcher(rp, s.cfg.beaconDB, s.rateLimiter, s.cfg.chain.IsCanonical, ticker) if err != nil { diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_range_test.go b/beacon-chain/sync/rpc_blob_sidecars_by_range_test.go index f1e4d16200..b79c65b1bc 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_range_test.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_range_test.go @@ -4,12 +4,14 @@ import ( "testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v6/genesis" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v6/testing/util" ) func (c *blobsTestCase) defaultOldestSlotByRange(t *testing.T) types.Slot { @@ -18,8 +20,7 @@ func (c *blobsTestCase) defaultOldestSlotByRange(t *testing.T) types.Slot { if oldestEpoch < params.BeaconConfig().DenebForkEpoch { oldestEpoch = params.BeaconConfig().DenebForkEpoch } - oldestSlot, err := slots.EpochStart(oldestEpoch) - require.NoError(t, err) + oldestSlot := util.SlotAtEpoch(t, oldestEpoch) return oldestSlot } @@ -89,16 +90,11 @@ func (c *blobsTestCase) runTestBlobSidecarsByRange(t *testing.T) { } func TestBlobByRangeOK(t *testing.T) { - origNC := params.BeaconConfig() - // restore network config after test completes - defer func() { - params.OverrideBeaconConfig(origNC) - }() - // set MaxRequestBlobSidecars to a low-ish value so the test doesn't timeout. - nc := params.BeaconConfig().Copy() - nc.MaxRequestBlobSidecars = 100 - params.OverrideBeaconConfig(nc) - + params.SetupTestConfigCleanup(t) + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) + params.BeaconConfig().InitializeForkSchedule() + retainSlots := util.SlotAtEpoch(t, params.BeaconConfig().MinEpochsForBlobsSidecarsRequest) + current := ds + retainSlots cases := []*blobsTestCase{ { name: "beginning of window + 10", @@ -134,11 +130,11 @@ func TestBlobByRangeOK(t *testing.T) { Count: 20, } }, - total: func() *int { x := params.BeaconConfig().MaxBlobsPerBlock(0) * 10; return &x }(), // 10 blocks * 4 blobs = 40 + total: func() *int { x := params.BeaconConfig().MaxBlobsPerBlock(ds) * 10; return &x }(), // 10 blocks * 4 blobs = 40 }, { name: "when request count > MAX_REQUEST_BLOCKS_DENEB, MAX_REQUEST_BLOBS_SIDECARS sidecars in response", - nblocks: int(params.BeaconConfig().MaxRequestBlocksDeneb) + 10, + nblocks: int(params.BeaconConfig().MaxRequestBlocksDeneb) + 1, requestFromSidecars: func(scs []blocks.ROBlob) interface{} { return ðpb.BlobSidecarsByRangeRequest{ StartSlot: scs[0].Slot(), @@ -148,7 +144,9 @@ func TestBlobByRangeOK(t *testing.T) { total: func() *int { x := int(params.BeaconConfig().MaxRequestBlobSidecars); return &x }(), }, } + clock := startup.NewClock(genesis.Time(), genesis.ValidatorsRoot(), startup.WithSlotAsNow(current)) for _, c := range cases { + c.clock = clock t.Run(c.name, func(t *testing.T) { c.runTestBlobSidecarsByRange(t) }) @@ -156,19 +154,12 @@ func TestBlobByRangeOK(t *testing.T) { } func TestBlobsByRangeValidation(t *testing.T) { - cfg := params.BeaconConfig() - repositionFutureEpochs(cfg) - undo, err := params.SetActiveWithUndo(cfg) - require.NoError(t, err) - defer func() { - require.NoError(t, undo()) - }() - denebSlot, err := slots.EpochStart(params.BeaconConfig().DenebForkEpoch) - require.NoError(t, err) + params.SetupTestConfigCleanup(t) + repositionFutureEpochs(params.BeaconConfig()) + denebSlot := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) minReqEpochs := params.BeaconConfig().MinEpochsForBlobsSidecarsRequest - minReqSlots, err := slots.EpochStart(minReqEpochs) - require.NoError(t, err) + minReqSlots := util.SlotAtEpoch(t, minReqEpochs) // spec criteria for mix,max bound checking /* Clients MUST keep a record of signed blobs sidecars seen on the epoch range @@ -231,7 +222,7 @@ func TestBlobsByRangeValidation(t *testing.T) { }, start: defaultMinStart, end: defaultMinStart + 9, - batch: blobBatchLimit(100), + batch: blobBatchLimit(defaultCurrent), }, { name: "count > MAX_REQUEST_BLOB_SIDECARS", @@ -243,7 +234,7 @@ func TestBlobsByRangeValidation(t *testing.T) { start: defaultMinStart, end: defaultMinStart - 10 + 999, // a large count is ok, we just limit the amount of actual responses - batch: blobBatchLimit(100), + batch: blobBatchLimit(defaultCurrent), }, { name: "start + count > current", @@ -265,7 +256,7 @@ func TestBlobsByRangeValidation(t *testing.T) { }, start: denebSlot, end: denebSlot + 89, - batch: blobBatchLimit(100), + batch: blobBatchLimit(defaultCurrent - minReqSlots + 100), }, } for _, c := range cases { @@ -285,8 +276,7 @@ func TestBlobsByRangeValidation(t *testing.T) { } func TestBlobRPCMinValidSlot(t *testing.T) { - denebSlot, err := slots.EpochStart(params.BeaconConfig().DenebForkEpoch) - require.NoError(t, err) + denebSlot := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) cases := []struct { name string current func(t *testing.T) types.Slot @@ -296,9 +286,8 @@ func TestBlobRPCMinValidSlot(t *testing.T) { { name: "before deneb", current: func(t *testing.T) types.Slot { - st, err := slots.EpochStart(params.BeaconConfig().DenebForkEpoch - 1) + st := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch-1) // note: we no longer need to deal with deneb fork epoch being far future - require.NoError(t, err) return st }, expected: denebSlot, @@ -306,9 +295,8 @@ func TestBlobRPCMinValidSlot(t *testing.T) { { name: "equal to deneb", current: func(t *testing.T) types.Slot { - st, err := slots.EpochStart(params.BeaconConfig().DenebForkEpoch) + st := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) // note: we no longer need to deal with deneb fork epoch being far future - require.NoError(t, err) return st }, expected: denebSlot, @@ -316,9 +304,8 @@ func TestBlobRPCMinValidSlot(t *testing.T) { { name: "after deneb, before expiry starts", current: func(t *testing.T) types.Slot { - st, err := slots.EpochStart(params.BeaconConfig().DenebForkEpoch + params.BeaconConfig().MinEpochsForBlobsSidecarsRequest) + st := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch+params.BeaconConfig().MinEpochsForBlobsSidecarsRequest) // note: we no longer need to deal with deneb fork epoch being far future - require.NoError(t, err) return st }, expected: denebSlot, @@ -326,9 +313,8 @@ func TestBlobRPCMinValidSlot(t *testing.T) { { name: "expiry starts one epoch after deneb + MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS", current: func(t *testing.T) types.Slot { - st, err := slots.EpochStart(params.BeaconConfig().DenebForkEpoch + params.BeaconConfig().MinEpochsForBlobsSidecarsRequest + 1) + st := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch+params.BeaconConfig().MinEpochsForBlobsSidecarsRequest+1) // note: we no longer need to deal with deneb fork epoch being far future - require.NoError(t, err) return st }, expected: denebSlot + params.BeaconConfig().SlotsPerEpoch, diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_root.go b/beacon-chain/sync/rpc_blob_sidecars_by_root.go index b1d226216d..f51ec93c3e 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_root.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_root.go @@ -49,7 +49,7 @@ func (s *Service) blobSidecarByRootRPCHandler(ctx context.Context, msg interface batchSize := flags.Get().BlobBatchLimit var ticker *time.Ticker if len(blobIdents) > batchSize { - ticker = time.NewTicker(time.Second) + ticker = time.NewTicker(blobRpcThrottleInterval) } // Compute the oldest slot we'll allow a peer to request, based on the current slot. diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go b/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go index a291524d2b..e011455e0f 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go @@ -7,13 +7,15 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" p2pTypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v6/genesis" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/libp2p/go-libp2p/core/network" ) @@ -123,6 +125,13 @@ func (c *blobsTestCase) runTestBlobSidecarsByRoot(t *testing.T) { if c.streamReader == nil { c.streamReader = defaultExpectedRequirer } + if c.clock == nil { + de := params.BeaconConfig().DenebForkEpoch + denebBuffer := params.BeaconConfig().MinEpochsForBlobsSidecarsRequest + 1000 + ce := de + denebBuffer + cs := util.SlotAtEpoch(t, ce) + c.clock = startup.NewClock(genesis.Time(), genesis.ValidatorsRoot(), startup.WithSlotAsNow(cs)) + } c.run(t) } @@ -181,18 +190,20 @@ func readChunkEncodedBlobsAsStreamReader(t *testing.T, s *Service, expect []*exp } func TestBlobsByRootValidation(t *testing.T) { - cfg := params.BeaconConfig() - repositionFutureEpochs(cfg) - undo, err := params.SetActiveWithUndo(cfg) - require.NoError(t, err) - defer func() { - require.NoError(t, undo()) - }() - capellaSlot, err := slots.EpochStart(params.BeaconConfig().CapellaForkEpoch) - require.NoError(t, err) - dmc, clock := defaultMockChain(t) + params.SetupTestConfigCleanup(t) + repositionFutureEpochs(params.BeaconConfig()) + + de := params.BeaconConfig().DenebForkEpoch + denebBuffer := params.BeaconConfig().MinEpochsForBlobsSidecarsRequest + 1000 + ce := de + denebBuffer + cs := util.SlotAtEpoch(t, ce) + clock := startup.NewClock(genesis.Time(), genesis.ValidatorsRoot(), startup.WithSlotAsNow(cs)) + + dmc := defaultMockChain(t, ce) + capellaSlot := util.SlotAtEpoch(t, params.BeaconConfig().CapellaForkEpoch) dmc.Slot = &capellaSlot dmc.FinalizedCheckPoint = ðpb.Checkpoint{Epoch: params.BeaconConfig().CapellaForkEpoch} + maxBlobs := params.BeaconConfig().MaxBlobsPerBlockAtEpoch(params.BeaconConfig().DenebForkEpoch) cases := []*blobsTestCase{ { name: "block before minimum_request_epoch", @@ -222,7 +233,7 @@ func TestBlobsByRootValidation(t *testing.T) { name: "block with all indices missing between 2 full blocks", nblocks: 3, missing: map[int]bool{1: true}, - total: func(i int) *int { return &i }(2 * int(params.BeaconConfig().MaxBlobsPerBlock(0))), + total: func(i int) *int { return &i }(2 * int(maxBlobs)), }, { name: "exceeds req max", @@ -232,6 +243,7 @@ func TestBlobsByRootValidation(t *testing.T) { } for _, c := range cases { t.Run(c.name, func(t *testing.T) { + c.clock = clock c.runTestBlobSidecarsByRoot(t) }) } diff --git a/beacon-chain/sync/rpc_send_request_test.go b/beacon-chain/sync/rpc_send_request_test.go index a75d5f1b07..8db82508cc 100644 --- a/beacon-chain/sync/rpc_send_request_test.go +++ b/beacon-chain/sync/rpc_send_request_test.go @@ -27,7 +27,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/libp2p/go-libp2p/core/network" ) @@ -614,18 +613,19 @@ func TestBlobValidatorFromRangeReq(t *testing.T) { } func TestSeqBlobValid(t *testing.T) { - one, oneBlobs := generateTestBlockWithSidecars(t, [32]byte{}, 0, 3) + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) + one, oneBlobs := generateTestBlockWithSidecars(t, [32]byte{}, ds, 3) r1, err := one.Block.HashTreeRoot() require.NoError(t, err) - two, twoBlobs := generateTestBlockWithSidecars(t, r1, 1, 3) + two, twoBlobs := generateTestBlockWithSidecars(t, r1, ds+1, 3) r2, err := two.Block.HashTreeRoot() require.NoError(t, err) - _, oops := generateTestBlockWithSidecars(t, r2, 0, 4) + _, oops := generateTestBlockWithSidecars(t, r2, ds, 4) oops[1].SignedBlockHeader.Header.ParentRoot = bytesutil.PadTo([]byte("derp"), 32) wrongRoot, err := blocks.NewROBlobWithRoot(oops[2].BlobSidecar, bytesutil.ToBytes32([]byte("parentderp"))) require.NoError(t, err) oob := oops[3] - oob.Index = uint64(params.BeaconConfig().MaxBlobsPerBlock(0)) + oob.Index = uint64(params.BeaconConfig().MaxBlobsPerBlock(ds)) cases := []struct { name string @@ -704,7 +704,7 @@ func TestSendBlobsByRangeRequest(t *testing.T) { t.Run("single blob - Deneb", func(t *testing.T) { // Setup genesis such that we are currently in deneb. - s := uint64(slots.UnsafeEpochStart(params.BeaconConfig().DenebForkEpoch)) * params.BeaconConfig().SecondsPerSlot + s := uint64(util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch)) * params.BeaconConfig().SecondsPerSlot clock := startup.NewClock(time.Now().Add(-time.Second*time.Duration(s)), [32]byte{}) ctxByte, err := ContextByteVersionsForValRoot(clock.GenesisValidatorsRoot()) require.NoError(t, err) @@ -713,7 +713,7 @@ func TestSendBlobsByRangeRequest(t *testing.T) { p2 := p2ptest.NewTestP2P(t) p1.Connect(p2) // Set current slot to a deneb slot. - slot := slots.UnsafeEpochStart(params.BeaconConfig().DenebForkEpoch + 1) + slot := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch+1) // Create a simple handler that will return a valid response. p2.SetStreamHandler(topic, func(stream network.Stream) { defer func() { @@ -757,7 +757,7 @@ func TestSendBlobsByRangeRequest(t *testing.T) { require.NoError(t, undo()) }() // Setup genesis such that we are currently in deneb. - s := uint64(slots.UnsafeEpochStart(params.BeaconConfig().DenebForkEpoch)) * params.BeaconConfig().SecondsPerSlot + s := uint64(util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch)) * params.BeaconConfig().SecondsPerSlot clock := startup.NewClock(time.Now().Add(-time.Second*time.Duration(s)), [32]byte{}) ctxByte, err := ContextByteVersionsForValRoot(clock.GenesisValidatorsRoot()) require.NoError(t, err) @@ -766,7 +766,7 @@ func TestSendBlobsByRangeRequest(t *testing.T) { p2 := p2ptest.NewTestP2P(t) p1.Connect(p2) // Set current slot to the first slot of the last deneb epoch. - slot := slots.UnsafeEpochStart(params.BeaconConfig().DenebForkEpoch) + slot := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) // Create a simple handler that will return a valid response. p2.SetStreamHandler(topic, func(stream network.Stream) { defer func() { @@ -825,7 +825,7 @@ func TestSendBlobsByRangeRequest(t *testing.T) { require.NoError(t, undo()) }() - s := uint64(slots.UnsafeEpochStart(params.BeaconConfig().ElectraForkEpoch)) * params.BeaconConfig().SecondsPerSlot + s := uint64(util.SlotAtEpoch(t, params.BeaconConfig().ElectraForkEpoch)) * params.BeaconConfig().SecondsPerSlot clock := startup.NewClock(time.Now().Add(-time.Second*time.Duration(s)), [32]byte{}) ctxByte, err := ContextByteVersionsForValRoot(clock.GenesisValidatorsRoot()) require.NoError(t, err) @@ -834,7 +834,7 @@ func TestSendBlobsByRangeRequest(t *testing.T) { p2 := p2ptest.NewTestP2P(t) p1.Connect(p2) - slot := slots.UnsafeEpochStart(params.BeaconConfig().ElectraForkEpoch) + slot := util.SlotAtEpoch(t, params.BeaconConfig().ElectraForkEpoch) // Create a simple handler that will return a valid response. p2.SetStreamHandler(topic, func(stream network.Stream) { defer func() { diff --git a/beacon-chain/sync/subscriber.go b/beacon-chain/sync/subscriber.go index 103f3e3825..d509f1dbd0 100644 --- a/beacon-chain/sync/subscriber.go +++ b/beacon-chain/sync/subscriber.go @@ -812,7 +812,7 @@ func isDigestValid(digest [4]byte, clock *startup.Clock) (bool, error) { // In the event there is a fork the next epoch, // we skip the check, as we subscribe subnets an // epoch in advance. - if params.DigestChangesAfter(current) { + if params.NextNetworkScheduleEntry(current).Epoch == current+1 { return true, nil } return params.ForkDigest(current) == digest, nil diff --git a/beacon-chain/sync/verify/BUILD.bazel b/beacon-chain/sync/verify/BUILD.bazel index 6519ebe331..705ba5eb00 100644 --- a/beacon-chain/sync/verify/BUILD.bazel +++ b/beacon-chain/sync/verify/BUILD.bazel @@ -19,6 +19,7 @@ go_test( srcs = ["blob_test.go"], embed = [":go_default_library"], deps = [ + "//config/params:go_default_library", "//consensus-types/blocks:go_default_library", "//encoding/bytesutil:go_default_library", "//testing/require:go_default_library", diff --git a/beacon-chain/sync/verify/blob_test.go b/beacon-chain/sync/verify/blob_test.go index fb2f7abc5e..441b6b4c2b 100644 --- a/beacon-chain/sync/verify/blob_test.go +++ b/beacon-chain/sync/verify/blob_test.go @@ -4,6 +4,7 @@ import ( "fmt" "testing" + "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" "github.com/OffchainLabs/prysm/v6/testing/require" @@ -11,6 +12,7 @@ import ( ) func TestBlobAlignsWithBlock(t *testing.T) { + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) tests := []struct { name string blockAndBlob func(t *testing.T) (blocks.ROBlock, []blocks.ROBlob) @@ -19,13 +21,13 @@ func TestBlobAlignsWithBlock(t *testing.T) { { name: "happy path", blockAndBlob: func(t *testing.T) (blocks.ROBlock, []blocks.ROBlob) { - return util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 0, 1) + return util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, ds, 1) }, }, { name: "mismatched roots", blockAndBlob: func(t *testing.T) (blocks.ROBlock, []blocks.ROBlob) { - blk, blobs := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 0, 1) + blk, blobs := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, ds, 1) tweaked := blobs[0].BlobSidecar tweaked.SignedBlockHeader.Header.Slot = tweaked.SignedBlockHeader.Header.Slot + 1 tampered, err := blocks.NewROBlob(tweaked) @@ -37,7 +39,7 @@ func TestBlobAlignsWithBlock(t *testing.T) { { name: "mismatched roots - fake", blockAndBlob: func(t *testing.T) (blocks.ROBlock, []blocks.ROBlob) { - blk, blobs := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 0, 1) + blk, blobs := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, ds, 1) copied := blobs[0].BlobSidecar // exact same header, mess with the root fake, err := blocks.NewROBlobWithRoot(copied, bytesutil.ToBytes32([]byte("derp"))) diff --git a/beacon-chain/verification/blob_test.go b/beacon-chain/verification/blob_test.go index 25753f290f..f18111e92e 100644 --- a/beacon-chain/verification/blob_test.go +++ b/beacon-chain/verification/blob_test.go @@ -23,7 +23,8 @@ import ( func TestBlobIndexInBounds(t *testing.T) { ini := &Initializer{} - _, blobs := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 0, 1) + ds := util.SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) + _, blobs := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, ds, 1) b := blobs[0] // set Index to a value that is out of bounds v := ini.NewBlobVerifier(b, GossipBlobSidecarRequirements) @@ -31,7 +32,8 @@ func TestBlobIndexInBounds(t *testing.T) { require.Equal(t, true, v.results.executed(RequireBlobIndexInBounds)) require.NoError(t, v.results.result(RequireBlobIndexInBounds)) - b.Index = uint64(params.BeaconConfig().MaxBlobsPerBlock(0)) + maxBlobs := params.BeaconConfig().MaxBlobsPerBlock(ds) + b.Index = uint64(maxBlobs) v = ini.NewBlobVerifier(b, GossipBlobSidecarRequirements) require.ErrorIs(t, v.BlobIndexInBounds(), ErrBlobIndexInvalid) require.Equal(t, true, v.results.executed(RequireBlobIndexInBounds)) diff --git a/changelog/kasey_max-blobs-use-network-schedule.md b/changelog/kasey_max-blobs-use-network-schedule.md new file mode 100644 index 0000000000..7f3e8c1220 --- /dev/null +++ b/changelog/kasey_max-blobs-use-network-schedule.md @@ -0,0 +1,2 @@ +### Ignored +- Switch implementation of get max blobs to use network schedule entry code to be consistent with other fork-related helpers. diff --git a/config/params/BUILD.bazel b/config/params/BUILD.bazel index b1d325f443..f2bc785355 100644 --- a/config/params/BUILD.bazel +++ b/config/params/BUILD.bazel @@ -82,6 +82,7 @@ go_test( "//genesis:go_default_library", "//io/file:go_default_library", "//proto/prysm/v1alpha1:go_default_library", + "//runtime/version:go_default_library", "//testing/assert:go_default_library", "//testing/require:go_default_library", "@com_github_ethereum_go_ethereum//common/hexutil:go_default_library", diff --git a/config/params/config.go b/config/params/config.go index a6bd5991da..f36a3097d2 100644 --- a/config/params/config.go +++ b/config/params/config.go @@ -5,7 +5,6 @@ import ( "encoding/binary" "fmt" "math" - "slices" "sort" "strings" "sync" @@ -384,10 +383,12 @@ func (b *BeaconChainConfig) ApplyOptions(opts ...Option) { } } -// TODO: this needs to be able to return an error -// InitializeForkSchedule initializes the schedules forks baked into the config. +// InitializeForkSchedule initializes the scheduled forks and BPOs baked into the config. func (b *BeaconChainConfig) InitializeForkSchedule() { - // Reset Fork Version Schedule. + // TODO: this needs to be able to return an error. The network schedule code has + // to implement weird fallbacks when it is not initialized properly, it would be better + // if the beacon node could crash if there isn't a valid fork schedule + // at the return of this function. b.ForkVersionSchedule = configForkSchedule(b) b.ForkVersionNames = configForkNames(b) b.forkSchedule = initForkSchedule(b) @@ -439,16 +440,18 @@ func (ns *NetworkSchedule) epochIdx(epoch primitives.Epoch) int { return -1 } +func (ns *NetworkSchedule) safeIndex(idx int) NetworkScheduleEntry { + if idx < 0 || len(ns.entries) == 0 { + return genesisNetworkScheduleEntry() + } + if idx >= len(ns.entries) { + return ns.entries[len(ns.entries)-1] + } + return ns.entries[idx] +} + func (ns *NetworkSchedule) Next(epoch primitives.Epoch) NetworkScheduleEntry { - lastIdx := len(ns.entries) - 1 - idx := ns.epochIdx(epoch) - if idx < 0 { - return ns.entries[0] - } - if idx == lastIdx { - return ns.entries[lastIdx] - } - return ns.entries[idx+1] + return ns.safeIndex(ns.epochIdx(epoch) + 1) } func (ns *NetworkSchedule) LastEntry() NetworkScheduleEntry { @@ -457,38 +460,21 @@ func (ns *NetworkSchedule) LastEntry() NetworkScheduleEntry { return ns.entries[i] } } - return ns.entries[0] + return genesisNetworkScheduleEntry() } // LastFork is the last full fork (this is used by e2e testing) func (ns *NetworkSchedule) LastFork() NetworkScheduleEntry { for i := len(ns.entries) - 1; i >= 0; i-- { - if ns.entries[i].isFork { + if ns.entries[i].isFork && ns.entries[i].Epoch != BeaconConfig().FarFutureEpoch { return ns.entries[i] } } - return ns.entries[0] + return genesisNetworkScheduleEntry() } -func (ns *NetworkSchedule) ForEpoch(epoch primitives.Epoch) NetworkScheduleEntry { - idx := ns.epochIdx(epoch) - if idx < 0 { - return ns.entries[0] - } - if idx >= len(ns.entries)-1 { - return ns.entries[len(ns.entries)-1] - } - return ns.entries[idx] -} - -func (ns *NetworkSchedule) activatedAt(epoch primitives.Epoch) (*NetworkScheduleEntry, bool) { - ns.mu.RLock() - defer ns.mu.RUnlock() - if ns.byEpoch == nil { - return nil, false - } - entry, ok := ns.byEpoch[epoch] - return entry, ok +func (ns *NetworkSchedule) forEpoch(epoch primitives.Epoch) NetworkScheduleEntry { + return ns.safeIndex(ns.epochIdx(epoch)) } func (ns *NetworkSchedule) merge(other *NetworkSchedule) *NetworkSchedule { @@ -497,10 +483,15 @@ func (ns *NetworkSchedule) merge(other *NetworkSchedule) *NetworkSchedule { merged = append(merged, other.entries...) sort.Slice(merged, func(i, j int) bool { if merged[i].Epoch == merged[j].Epoch { - if merged[i].VersionEnum == merged[j].VersionEnum { - return merged[i].isFork + // This can happen for 2 reasons: + // 1) both entries are forks in a test setup (eg starting genesis at a later fork) + // - break tie by version enum + // 2) one entry is a fork, the other is a BPO change + // - break tie by putting the fork first + if merged[i].isFork && merged[j].isFork { + return merged[i].VersionEnum < merged[j].VersionEnum } - return merged[i].VersionEnum < merged[j].VersionEnum + return merged[i].isFork } return merged[i].Epoch < merged[j].Epoch }) @@ -702,52 +693,12 @@ func (b *BeaconChainConfig) TargetBlobsPerBlock(slot primitives.Slot) int { // MaxBlobsPerBlock returns the maximum number of blobs per block for the given slot. func (b *BeaconChainConfig) MaxBlobsPerBlock(slot primitives.Slot) int { epoch := primitives.Epoch(slot.DivSlot(b.SlotsPerEpoch)) - - if len(b.BlobSchedule) > 0 { - if !slices.IsSortedFunc(b.BlobSchedule, func(a, b BlobScheduleEntry) int { - return int(a.Epoch - b.Epoch) - }) { - slices.SortFunc(b.BlobSchedule, func(a, b BlobScheduleEntry) int { - return int(a.Epoch - b.Epoch) - }) - } - - for i := len(b.BlobSchedule) - 1; i >= 0; i-- { - if epoch >= b.BlobSchedule[i].Epoch { - return int(b.BlobSchedule[i].MaxBlobsPerBlock) - } - } - } - - if epoch >= b.ElectraForkEpoch { - return b.DeprecatedMaxBlobsPerBlockElectra - } - - return b.DeprecatedMaxBlobsPerBlock + return b.MaxBlobsPerBlockAtEpoch(epoch) } // MaxBlobsPerBlockAtEpoch returns the maximum number of blobs per block for the given epoch func (b *BeaconChainConfig) MaxBlobsPerBlockAtEpoch(epoch primitives.Epoch) int { - if len(b.BlobSchedule) > 0 { - if !slices.IsSortedFunc(b.BlobSchedule, func(a, b BlobScheduleEntry) int { - return int(a.Epoch - b.Epoch) - }) { - slices.SortFunc(b.BlobSchedule, func(a, b BlobScheduleEntry) int { - return int(a.Epoch - b.Epoch) - }) - } - - for i := len(b.BlobSchedule) - 1; i >= 0; i-- { - if epoch >= b.BlobSchedule[i].Epoch { - return int(b.BlobSchedule[i].MaxBlobsPerBlock) - } - } - } - - if epoch >= b.ElectraForkEpoch { - return b.DeprecatedMaxBlobsPerBlockElectra - } - return b.DeprecatedMaxBlobsPerBlock + return int(b.networkSchedule.forEpoch(epoch).MaxBlobsPerBlock) } // DenebEnabled centralizes the check to determine if code paths that are specific to deneb should be allowed to execute. diff --git a/config/params/config_test.go b/config/params/config_test.go index 39608bc533..81a48f4118 100644 --- a/config/params/config_test.go +++ b/config/params/config_test.go @@ -10,6 +10,7 @@ import ( "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/genesis" + "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" ) @@ -109,75 +110,80 @@ func TestConfigGenesisValidatorRoot(t *testing.T) { require.Equal(t, params.BeaconConfig().GenesisValidatorsRoot, genesis.ValidatorsRoot()) } -func TestMaxBlobsPerBlock(t *testing.T) { - t.Run("Before all forks and no BlobSchedule", func(t *testing.T) { - cfg := params.MainnetConfig() - cfg.BlobSchedule = nil - cfg.ElectraForkEpoch = 100 - cfg.FuluForkEpoch = 200 - require.Equal(t, cfg.MaxBlobsPerBlock(0), cfg.DeprecatedMaxBlobsPerBlock) - }) +func TestMaxBlobsJumbled(t *testing.T) { + params.SetActiveTestCleanup(t, params.MainnetBeaconConfig) + cfg := params.MainnetConfig() + cfg.FuluForkEpoch = cfg.ElectraForkEpoch + 4098*2 + electraMaxBlobs := uint64(cfg.DeprecatedMaxBlobsPerBlockElectra) + offsets := []primitives.Epoch{cfg.FuluForkEpoch} + for _, offset := range []primitives.Epoch{320, 640, 960, 1080} { + offsets = append(offsets, cfg.FuluForkEpoch+offset) + } + maxBlobs := map[primitives.Epoch]uint64{ + cfg.FuluForkEpoch: electraMaxBlobs, + offsets[0]: electraMaxBlobs + 3, + offsets[1]: electraMaxBlobs + 6, + offsets[2]: electraMaxBlobs + 9, + offsets[3]: electraMaxBlobs + 12, + } + schedule := make([]params.BlobScheduleEntry, 0, len(maxBlobs)) + for _, epoch := range offsets[1:] { + schedule = append(schedule, params.BlobScheduleEntry{Epoch: epoch, MaxBlobsPerBlock: maxBlobs[epoch]}) + } + cfg.BlobSchedule = schedule + cfg.InitializeForkSchedule() + for i := 1; i < len(cfg.BlobSchedule); i++ { + beforeEpoch, epoch := cfg.BlobSchedule[i-1].Epoch, cfg.BlobSchedule[i].Epoch + before, after := maxBlobs[beforeEpoch], maxBlobs[epoch] + require.Equal(t, before, uint64(cfg.MaxBlobsPerBlockAtEpoch(epoch-1))) + require.Equal(t, after, uint64(cfg.MaxBlobsPerBlockAtEpoch(epoch))) + beforeSlot, err := cfg.SlotsPerEpoch.SafeMul(uint64(beforeEpoch)) + require.NoError(t, err) + afterSlot, err := cfg.SlotsPerEpoch.SafeMul(uint64(epoch)) + require.NoError(t, err) + require.Equal(t, before, uint64(cfg.MaxBlobsPerBlock(beforeSlot))) + require.Equal(t, after, uint64(cfg.MaxBlobsPerBlock(afterSlot))) + } - t.Run("Uses latest matching BlobSchedule entry", func(t *testing.T) { - cfg := params.MainnetConfig() - cfg.BlobSchedule = []params.BlobScheduleEntry{ - {Epoch: 5, MaxBlobsPerBlock: 7}, - {Epoch: 10, MaxBlobsPerBlock: 11}, - } - slot := 11 * cfg.SlotsPerEpoch - require.Equal(t, cfg.MaxBlobsPerBlock(slot), 11) - }) + require.Equal(t, electraMaxBlobs, uint64(cfg.MaxBlobsPerBlockAtEpoch(cfg.FuluForkEpoch-1))) + require.Equal(t, electraMaxBlobs, uint64(cfg.MaxBlobsPerBlockAtEpoch(cfg.ElectraForkEpoch))) + require.Equal(t, cfg.DeprecatedMaxBlobsPerBlock, cfg.MaxBlobsPerBlockAtEpoch(cfg.ElectraForkEpoch-1)) + require.Equal(t, cfg.DeprecatedMaxBlobsPerBlock, cfg.MaxBlobsPerBlockAtEpoch(cfg.DenebForkEpoch)) + preBlobEpochs := []primitives.Epoch{cfg.DenebForkEpoch - 1, cfg.CapellaForkEpoch, cfg.BellatrixForkEpoch, cfg.AltairForkEpoch, 0} + for _, epoch := range preBlobEpochs { + require.Equal(t, 0, cfg.MaxBlobsPerBlockAtEpoch(epoch)) + } +} - t.Run("Uses earlier matching BlobSchedule entry", func(t *testing.T) { - cfg := params.MainnetConfig() - cfg.BlobSchedule = []params.BlobScheduleEntry{ - {Epoch: 5, MaxBlobsPerBlock: 7}, - {Epoch: 10, MaxBlobsPerBlock: 11}, - } - slot := 6 * cfg.SlotsPerEpoch - require.Equal(t, cfg.MaxBlobsPerBlock(slot), 7) - }) +func TestFirstBPOAtFork(t *testing.T) { + params.SetActiveTestCleanup(t, params.MainnetBeaconConfig) + cfg := params.MainnetConfig() + cfg.FuluForkEpoch = cfg.ElectraForkEpoch + 4096*2 + electraMaxBlobs := uint64(cfg.DeprecatedMaxBlobsPerBlockElectra) + cfg.BlobSchedule = []params.BlobScheduleEntry{ + {Epoch: cfg.FuluForkEpoch, MaxBlobsPerBlock: electraMaxBlobs + 1}, + {Epoch: cfg.FuluForkEpoch + 1, MaxBlobsPerBlock: electraMaxBlobs + 2}, + } + cfg.InitializeForkSchedule() + require.Equal(t, electraMaxBlobs, uint64(cfg.MaxBlobsPerBlockAtEpoch(cfg.FuluForkEpoch-1))) + require.Equal(t, electraMaxBlobs+1, uint64(cfg.MaxBlobsPerBlockAtEpoch(cfg.FuluForkEpoch))) + require.Equal(t, electraMaxBlobs+2, uint64(cfg.MaxBlobsPerBlockAtEpoch(cfg.FuluForkEpoch+2))) +} - t.Run("Before first BlobSchedule entry falls back to fork logic", func(t *testing.T) { - cfg := params.MainnetConfig() - cfg.FuluForkEpoch = 1 - cfg.BlobSchedule = []params.BlobScheduleEntry{ - {Epoch: 5, MaxBlobsPerBlock: 7}, - } - slot := primitives.Slot(2) // Epoch 0 - require.Equal(t, cfg.MaxBlobsPerBlock(slot), cfg.DeprecatedMaxBlobsPerBlock) - }) - - t.Run("Unsorted BlobSchedule still picks latest matching entry", func(t *testing.T) { - cfg := params.MainnetConfig() - cfg.BlobSchedule = []params.BlobScheduleEntry{ - {Epoch: 10, MaxBlobsPerBlock: 11}, - {Epoch: 5, MaxBlobsPerBlock: 7}, - } - slot := 11 * cfg.SlotsPerEpoch - require.Equal(t, cfg.MaxBlobsPerBlock(slot), 11) - }) - - t.Run("Unsorted BlobSchedule picks earlier matching entry correctly", func(t *testing.T) { - cfg := params.MainnetConfig() - cfg.BlobSchedule = []params.BlobScheduleEntry{ - {Epoch: 10, MaxBlobsPerBlock: 11}, - {Epoch: 5, MaxBlobsPerBlock: 7}, - } - slot := 6 * cfg.SlotsPerEpoch - require.Equal(t, cfg.MaxBlobsPerBlock(slot), 7) - }) - - t.Run("Unsorted BlobSchedule falls back to fork logic when epoch is before all entries", func(t *testing.T) { - cfg := params.MainnetConfig() - cfg.ElectraForkEpoch = 2 - cfg.BlobSchedule = []params.BlobScheduleEntry{ - {Epoch: 10, MaxBlobsPerBlock: 11}, - {Epoch: 5, MaxBlobsPerBlock: 7}, - } - slot := primitives.Slot(1) // Epoch 0 - require.Equal(t, cfg.MaxBlobsPerBlock(slot), cfg.DeprecatedMaxBlobsPerBlock) - }) +func TestMaxBlobsNoSchedule(t *testing.T) { + params.SetActiveTestCleanup(t, params.MainnetBeaconConfig) + cfg := params.MainnetConfig() + electraMaxBlobs := uint64(cfg.DeprecatedMaxBlobsPerBlockElectra) + cfg.BlobSchedule = nil + cfg.InitializeForkSchedule() + require.Equal(t, electraMaxBlobs, uint64(cfg.MaxBlobsPerBlockAtEpoch(cfg.FuluForkEpoch-1))) + require.Equal(t, electraMaxBlobs, uint64(cfg.MaxBlobsPerBlockAtEpoch(cfg.ElectraForkEpoch))) + require.Equal(t, cfg.DeprecatedMaxBlobsPerBlock, cfg.MaxBlobsPerBlockAtEpoch(cfg.ElectraForkEpoch-1)) + require.Equal(t, cfg.DeprecatedMaxBlobsPerBlock, cfg.MaxBlobsPerBlockAtEpoch(cfg.DenebForkEpoch)) + preBlobEpochs := []primitives.Epoch{cfg.DenebForkEpoch - 1, cfg.CapellaForkEpoch, cfg.BellatrixForkEpoch, cfg.AltairForkEpoch, 0} + for _, epoch := range preBlobEpochs { + require.Equal(t, 0, cfg.MaxBlobsPerBlockAtEpoch(epoch)) + } } func Test_TargetBlobCount(t *testing.T) { @@ -287,3 +293,15 @@ func TestFarFuturePrepareFilter(t *testing.T) { entry := params.GetNetworkScheduleEntry(oldElectra) require.Equal(t, [4]byte(params.BeaconConfig().DenebForkVersion), entry.ForkVersion) } + +func TestMaxBlobsOverrideEpoch(t *testing.T) { + params.SetupTestConfigCleanup(t) + cfg := params.BeaconConfig() + require.Equal(t, 0, cfg.MaxBlobsPerBlockAtEpoch(0)) + params.SetGenesisFork(t, cfg, version.Deneb) + require.Equal(t, cfg.DeprecatedMaxBlobsPerBlock, cfg.MaxBlobsPerBlockAtEpoch(0)) + params.SetGenesisFork(t, cfg, version.Electra) + require.Equal(t, cfg.DeprecatedMaxBlobsPerBlockElectra, cfg.MaxBlobsPerBlockAtEpoch(0)) + params.SetGenesisFork(t, cfg, version.Fulu) + require.Equal(t, cfg.DeprecatedMaxBlobsPerBlockElectra, cfg.MaxBlobsPerBlockAtEpoch(0)) +} diff --git a/config/params/fork.go b/config/params/fork.go index 8b4e92a7e7..5727698274 100644 --- a/config/params/fork.go +++ b/config/params/fork.go @@ -4,19 +4,14 @@ import ( fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/pkg/errors" ) -// DigestChangesAfter checks if an allotted fork is in the following epoch. -func DigestChangesAfter(e primitives.Epoch) bool { - _, ok := BeaconConfig().networkSchedule.activatedAt(e + 1) - return ok -} - // ForkDigestUsingConfig retrieves the fork digest from the current schedule determined // by the provided epoch. func ForkDigestUsingConfig(epoch primitives.Epoch, cfg *BeaconChainConfig) [4]byte { - entry := cfg.networkSchedule.ForEpoch(epoch) + entry := cfg.networkSchedule.forEpoch(epoch) return entry.ForkDigest } @@ -42,10 +37,10 @@ func Fork(epoch primitives.Epoch) (*ethpb.Fork, error) { } func ForkFromConfig(cfg *BeaconChainConfig, epoch primitives.Epoch) *ethpb.Fork { - current := cfg.networkSchedule.ForEpoch(epoch) + current := cfg.networkSchedule.forEpoch(epoch) previous := current if current.Epoch > 0 { - previous = cfg.networkSchedule.ForEpoch(current.Epoch - 1) + previous = cfg.networkSchedule.forEpoch(current.Epoch - 1) } return ðpb.Fork{ PreviousVersion: previous.ForkVersion[:], @@ -102,11 +97,17 @@ func LastForkEpoch() primitives.Epoch { } func LastNetworkScheduleEntry() NetworkScheduleEntry { - lastIdx := len(BeaconConfig().networkSchedule.entries) - 1 - return BeaconConfig().networkSchedule.entries[lastIdx] + return BeaconConfig().networkSchedule.LastEntry() } func GetNetworkScheduleEntry(epoch primitives.Epoch) NetworkScheduleEntry { - entry := BeaconConfig().networkSchedule.ForEpoch(epoch) + entry := BeaconConfig().networkSchedule.forEpoch(epoch) return entry } + +func genesisNetworkScheduleEntry() NetworkScheduleEntry { + b := BeaconConfig() + // TODO: note this has a zero digest, but we would never hit this fallback condition on + // a properly initialized fork schedule. + return NetworkScheduleEntry{Epoch: b.GenesisEpoch, isFork: true, ForkVersion: to4(b.GenesisForkVersion), VersionEnum: version.Phase0} +} diff --git a/config/params/fork_test.go b/config/params/fork_test.go index 76b625eb76..c31042b503 100644 --- a/config/params/fork_test.go +++ b/config/params/fork_test.go @@ -7,7 +7,6 @@ import ( "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" ) @@ -93,15 +92,6 @@ func TestRetrieveForkDataFromDigest(t *testing.T) { require.Equal(t, params.BeaconConfig().AltairForkEpoch, epoch) } -func TestIsForkNextEpoch(t *testing.T) { - // at - assert.Equal(t, false, params.DigestChangesAfter(params.BeaconConfig().ElectraForkEpoch)) - // just before - assert.Equal(t, true, params.DigestChangesAfter(params.BeaconConfig().ElectraForkEpoch-1)) - // just after - assert.Equal(t, false, params.DigestChangesAfter(params.BeaconConfig().ElectraForkEpoch+1)) -} - func TestNextForkData(t *testing.T) { params.SetupTestConfigCleanup(t) params.BeaconConfig().InitializeForkSchedule() @@ -163,7 +153,9 @@ func TestNextForkData(t *testing.T) { func TestLastForkEpoch(t *testing.T) { params.SetupTestConfigCleanup(t) cfg := params.BeaconConfig().Copy() - require.Equal(t, cfg.ElectraForkEpoch, params.LastForkEpoch()) + if cfg.FuluForkEpoch == cfg.FarFutureEpoch { + require.Equal(t, cfg.ElectraForkEpoch, params.LastForkEpoch()) + } } func TestForkFromConfig_UsesPassedConfig(t *testing.T) { diff --git a/config/params/testutils.go b/config/params/testutils.go index 320148f944..76e9c5c7bf 100644 --- a/config/params/testutils.go +++ b/config/params/testutils.go @@ -2,12 +2,41 @@ package params import ( "testing" + + "github.com/OffchainLabs/prysm/v6/runtime/version" ) const ( EnvNameOverrideAccept = "PRYSM_API_OVERRIDE_ACCEPT" ) +func SetGenesisFork(t *testing.T, cfg *BeaconChainConfig, fork int) { + setGenesisUpdateEpochs(cfg, fork) + OverrideBeaconConfig(cfg) +} + +func setGenesisUpdateEpochs(b *BeaconChainConfig, fork int) { + switch fork { + case version.Fulu: + b.FuluForkEpoch = 0 + setGenesisUpdateEpochs(b, version.Electra) + case version.Electra: + b.ElectraForkEpoch = 0 + setGenesisUpdateEpochs(b, version.Deneb) + case version.Deneb: + b.DenebForkEpoch = 0 + setGenesisUpdateEpochs(b, version.Capella) + case version.Capella: + b.CapellaForkEpoch = 0 + setGenesisUpdateEpochs(b, version.Bellatrix) + case version.Bellatrix: + b.BellatrixForkEpoch = 0 + setGenesisUpdateEpochs(b, version.Altair) + case version.Altair: + b.AltairForkEpoch = 0 + } +} + // SetupTestConfigCleanup preserves configurations allowing to modify them within tests without any // restrictions, everything is restored after the test. func SetupTestConfigCleanup(t testing.TB) { diff --git a/testing/spectest/shared/common/forkchoice/runner.go b/testing/spectest/shared/common/forkchoice/runner.go index ef0f1c7110..d664c06a41 100644 --- a/testing/spectest/shared/common/forkchoice/runner.go +++ b/testing/spectest/shared/common/forkchoice/runner.go @@ -49,6 +49,8 @@ func Run(t *testing.T, config string, fork int) { func runTest(t *testing.T, config string, fork int, basePath string) { // nolint:gocognit require.NoError(t, utils.SetConfig(t, config)) + cfg := params.BeaconConfig() + params.SetGenesisFork(t, cfg, fork) testFolders, _ := utils.TestFolders(t, config, version.String(fork), basePath) if len(testFolders) == 0 { t.Fatalf("No test folders found for %s/%s/%s", config, version.String(fork), basePath) diff --git a/testing/spectest/shared/common/merkle_proof/single_merkle_proof.go b/testing/spectest/shared/common/merkle_proof/single_merkle_proof.go index 72b9a7c0fb..99280f3423 100644 --- a/testing/spectest/shared/common/merkle_proof/single_merkle_proof.go +++ b/testing/spectest/shared/common/merkle_proof/single_merkle_proof.go @@ -80,7 +80,8 @@ func runSingleMerkleProofTests(t *testing.T, config, forkOrPhase string, unmarsh if err != nil { return } - if index < consensus_blocks.KZGOffset || index > uint64(consensus_blocks.KZGOffset+params.BeaconConfig().MaxBlobsPerBlock(0)) { + maxBlobs := params.BeaconConfig().MaxBlobsPerBlockAtEpoch(params.BeaconConfig().DenebForkEpoch) + if index < consensus_blocks.KZGOffset || index > uint64(consensus_blocks.KZGOffset+maxBlobs) { return } localProof, err := consensus_blocks.MerkleProofKZGCommitment(body, int(index-consensus_blocks.KZGOffset)) diff --git a/testing/spectest/shared/common/operations/BUILD.bazel b/testing/spectest/shared/common/operations/BUILD.bazel index af04da3f5a..758f479767 100644 --- a/testing/spectest/shared/common/operations/BUILD.bazel +++ b/testing/spectest/shared/common/operations/BUILD.bazel @@ -29,10 +29,12 @@ go_library( "//beacon-chain/core/helpers:go_default_library", "//beacon-chain/core/validators:go_default_library", "//beacon-chain/state:go_default_library", + "//config/params:go_default_library", "//consensus-types/blocks:go_default_library", "//consensus-types/interfaces:go_default_library", "//proto/engine/v1:go_default_library", "//proto/prysm/v1alpha1:go_default_library", + "//runtime/version:go_default_library", "//testing/require:go_default_library", "//testing/spectest/utils:go_default_library", "//testing/util:go_default_library", diff --git a/testing/spectest/shared/common/operations/execution_payload.go b/testing/spectest/shared/common/operations/execution_payload.go index 27861761c3..f62041349f 100644 --- a/testing/spectest/shared/common/operations/execution_payload.go +++ b/testing/spectest/shared/common/operations/execution_payload.go @@ -7,7 +7,9 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" "github.com/OffchainLabs/prysm/v6/testing/util" @@ -19,6 +21,10 @@ type SSZToBlockBody func([]byte) (interfaces.ReadOnlyBeaconBlockBody, error) func RunExecutionPayloadTest(t *testing.T, config string, fork string, sszToBlockBody SSZToBlockBody, sszToState SSZToState) { require.NoError(t, utils.SetConfig(t, config)) + cfg := params.BeaconConfig() + fv, err := version.FromString(fork) + require.NoError(t, err) + params.SetGenesisFork(t, cfg, fv) testFolders, testsFolderPath := utils.TestFolders(t, config, fork, "operations/execution_payload/pyspec_tests") if len(testFolders) == 0 { t.Fatalf("No test folders found for %s/%s/%s", config, fork, "operations/execution_payload/pyspec_tests") diff --git a/testing/spectest/shared/deneb/sanity/BUILD.bazel b/testing/spectest/shared/deneb/sanity/BUILD.bazel index ab5f28e2e0..cf137e4df1 100644 --- a/testing/spectest/shared/deneb/sanity/BUILD.bazel +++ b/testing/spectest/shared/deneb/sanity/BUILD.bazel @@ -15,8 +15,10 @@ go_library( "//beacon-chain/core/transition:go_default_library", "//beacon-chain/state:go_default_library", "//beacon-chain/state/state-native:go_default_library", + "//config/params:go_default_library", "//consensus-types/blocks:go_default_library", "//proto/prysm/v1alpha1:go_default_library", + "//runtime/version:go_default_library", "//testing/require:go_default_library", "//testing/spectest/utils:go_default_library", "//testing/util:go_default_library", diff --git a/testing/spectest/shared/deneb/sanity/block_processing.go b/testing/spectest/shared/deneb/sanity/block_processing.go index 01ef4e7d3c..c49b9a080e 100644 --- a/testing/spectest/shared/deneb/sanity/block_processing.go +++ b/testing/spectest/shared/deneb/sanity/block_processing.go @@ -12,8 +12,10 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" "github.com/OffchainLabs/prysm/v6/beacon-chain/state" state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" "github.com/OffchainLabs/prysm/v6/testing/util" @@ -29,6 +31,8 @@ func init() { // RunBlockProcessingTest executes "sanity/blocks" tests. func RunBlockProcessingTest(t *testing.T, config, folderPath string) { require.NoError(t, utils.SetConfig(t, config)) + cfg := params.BeaconConfig() + params.SetGenesisFork(t, cfg, version.Deneb) testFolders, testsFolderPath := utils.TestFolders(t, config, "deneb", folderPath) for _, folder := range testFolders { diff --git a/testing/util/BUILD.bazel b/testing/util/BUILD.bazel index 1b49740d77..86f4c85fc6 100644 --- a/testing/util/BUILD.bazel +++ b/testing/util/BUILD.bazel @@ -27,6 +27,7 @@ go_library( "lightclient.go", "logging.go", "merge.go", + "slot.go", "state.go", "sync_aggregate.go", "sync_committee.go", diff --git a/testing/util/deneb.go b/testing/util/deneb.go index 336ddaf8c5..122f04739e 100644 --- a/testing/util/deneb.go +++ b/testing/util/deneb.go @@ -50,6 +50,12 @@ func WithPayloadSetter(p *enginev1.ExecutionPayloadDeneb) DenebBlockGeneratorOpt } } +func WithDenebSlot(slot primitives.Slot) DenebBlockGeneratorOption { + return func(g *denebBlockGenerator) { + g.slot = slot + } +} + func GenerateTestDenebBlockWithSidecar(t *testing.T, parent [32]byte, slot primitives.Slot, nblobs int, opts ...DenebBlockGeneratorOption) (blocks.ROBlock, []blocks.ROBlob) { g := &denebBlockGenerator{ parent: parent, @@ -178,9 +184,11 @@ func fakeEmptyProof(_ *testing.T, _ *ethpb.BlobSidecar) [][]byte { } func ExtendBlocksPlusBlobs(t *testing.T, blks []blocks.ROBlock, size int) ([]blocks.ROBlock, []blocks.ROBlob) { + deneb := params.BeaconConfig().DenebForkEpoch + denebSlot := SlotAtEpoch(t, deneb) blobs := make([]blocks.ROBlob, 0) if len(blks) == 0 { - blk, blb := GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 0, 6) + blk, blb := GenerateTestDenebBlockWithSidecar(t, [32]byte{}, denebSlot, 6) blobs = append(blobs, blb...) blks = append(blks, blk) } diff --git a/testing/util/deneb_test.go b/testing/util/deneb_test.go index a91ab9bb6f..218dd2a351 100644 --- a/testing/util/deneb_test.go +++ b/testing/util/deneb_test.go @@ -9,7 +9,8 @@ import ( ) func TestInclusionProofs(t *testing.T) { - _, blobs := GenerateTestDenebBlockWithSidecar(t, [32]byte{}, 0, params.BeaconConfig().MaxBlobsPerBlock(0)) + ds := SlotAtEpoch(t, params.BeaconConfig().DenebForkEpoch) + _, blobs := GenerateTestDenebBlockWithSidecar(t, [32]byte{}, ds, params.BeaconConfig().MaxBlobsPerBlock(ds)) for i := range blobs { require.NoError(t, blocks.VerifyKZGInclusionProof(blobs[i])) } diff --git a/testing/util/electra_state.go b/testing/util/electra_state.go index abfa8e2317..29c6cf8930 100644 --- a/testing/util/electra_state.go +++ b/testing/util/electra_state.go @@ -18,8 +18,17 @@ import ( "github.com/pkg/errors" ) +type ElectraStateOption func(*ethpb.BeaconStateElectra) error + +func WithElectraStateSlot(slot primitives.Slot) ElectraStateOption { + return func(s *ethpb.BeaconStateElectra) error { + s.Slot = slot + return nil + } +} + // DeterministicGenesisStateElectra returns a genesis state in Electra format made using the deterministic deposits. -func DeterministicGenesisStateElectra(t testing.TB, numValidators uint64) (state.BeaconState, []bls.SecretKey) { +func DeterministicGenesisStateElectra(t testing.TB, numValidators uint64, opts ...ElectraStateOption) (state.BeaconState, []bls.SecretKey) { deposits, privKeys, err := DeterministicDepositsAndKeys(numValidators) if err != nil { t.Fatal(errors.Wrapf(err, "failed to get %d deposits", numValidators)) @@ -28,7 +37,7 @@ func DeterministicGenesisStateElectra(t testing.TB, numValidators uint64) (state if err != nil { t.Fatal(errors.Wrapf(err, "failed to get eth1data for %d deposits", numValidators)) } - beaconState, err := genesisBeaconStateElectra(t.Context(), deposits, uint64(0), eth1Data) + beaconState, err := genesisBeaconStateElectra(t.Context(), deposits, uint64(0), eth1Data, opts...) if err != nil { t.Fatal(errors.Wrapf(err, "failed to get genesis beacon state of %d validators", numValidators)) } @@ -51,7 +60,7 @@ func setKeysToActive(beaconState state.BeaconState) error { } // genesisBeaconStateElectra returns the genesis beacon state. -func genesisBeaconStateElectra(ctx context.Context, deposits []*ethpb.Deposit, genesisTime uint64, eth1Data *ethpb.Eth1Data) (state.BeaconState, error) { +func genesisBeaconStateElectra(ctx context.Context, deposits []*ethpb.Deposit, genesisTime uint64, eth1Data *ethpb.Eth1Data, opts ...ElectraStateOption) (state.BeaconState, error) { st, err := emptyGenesisStateElectra() if err != nil { return nil, err @@ -68,7 +77,7 @@ func genesisBeaconStateElectra(ctx context.Context, deposits []*ethpb.Deposit, g return nil, errors.Wrap(err, "could not process validator deposits") } - return buildGenesisBeaconStateElectra(genesisTime, st, st.Eth1Data()) + return buildGenesisBeaconStateElectra(genesisTime, st, st.Eth1Data(), opts...) } // emptyGenesisStateElectra returns an empty genesis state in Electra format. @@ -105,7 +114,7 @@ func emptyGenesisStateElectra() (state.BeaconState, error) { return state_native.InitializeFromProtoElectra(st) } -func buildGenesisBeaconStateElectra(genesisTime uint64, preState state.BeaconState, eth1Data *ethpb.Eth1Data) (state.BeaconState, error) { +func buildGenesisBeaconStateElectra(genesisTime uint64, preState state.BeaconState, eth1Data *ethpb.Eth1Data, opts ...ElectraStateOption) (state.BeaconState, error) { if eth1Data == nil { return nil, errors.New("no eth1data provided for genesis state") } @@ -213,6 +222,11 @@ func buildGenesisBeaconStateElectra(genesisTime uint64, preState state.BeaconSta PendingPartialWithdrawals: make([]*ethpb.PendingPartialWithdrawal, 0), PendingConsolidations: make([]*ethpb.PendingConsolidation, 0), } + for _, opt := range opts { + if err := opt(st); err != nil { + return nil, err + } + } var scBits [fieldparams.SyncAggregateSyncCommitteeBytesLength]byte bodyRoot, err := (ðpb.BeaconBlockBodyElectra{ diff --git a/testing/util/slot.go b/testing/util/slot.go new file mode 100644 index 0000000000..a010fa5dcf --- /dev/null +++ b/testing/util/slot.go @@ -0,0 +1,15 @@ +package util + +import ( + "testing" + + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v6/time/slots" +) + +func SlotAtEpoch(t *testing.T, e primitives.Epoch) primitives.Slot { + s, err := slots.EpochStart(e) + require.NoError(t, err) + return s +} From 38955fd08c57516226a84f880e25d19283343352 Mon Sep 17 00:00:00 2001 From: terence Date: Tue, 7 Oct 2025 22:18:13 -0700 Subject: [PATCH 004/103] Optimize pending attestation processing by adding batching (#15801) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Optimize pending attestation processing by adding batching * Update beacon-chain/sync/pending_attestations_queue.go Co-authored-by: Radosław Kapka * Update beacon-chain/sync/pending_attestations_queue.go Co-authored-by: Radosław Kapka * Add root for debug * Change it to map * Dont need receiver * Use two slices --------- Co-authored-by: Radosław Kapka --- beacon-chain/sync/BUILD.bazel | 1 + .../sync/pending_attestations_queue.go | 364 ++++++++++----- .../pending_attestations_queue_bucket_test.go | 417 ++++++++++++++++++ .../ttsao_optimize-attestation-batching.md | 3 + 4 files changed, 669 insertions(+), 116 deletions(-) create mode 100644 beacon-chain/sync/pending_attestations_queue_bucket_test.go create mode 100644 changelog/ttsao_optimize-attestation-batching.md diff --git a/beacon-chain/sync/BUILD.bazel b/beacon-chain/sync/BUILD.bazel index 00b9354aa1..d151af3d3e 100644 --- a/beacon-chain/sync/BUILD.bazel +++ b/beacon-chain/sync/BUILD.bazel @@ -174,6 +174,7 @@ go_test( "fork_watcher_test.go", "kzg_batch_verifier_test.go", "once_test.go", + "pending_attestations_queue_bucket_test.go", "pending_attestations_queue_test.go", "pending_blocks_queue_test.go", "rate_limiter_test.go", diff --git a/beacon-chain/sync/pending_attestations_queue.go b/beacon-chain/sync/pending_attestations_queue.go index 1270ffac71..3153ec6ed3 100644 --- a/beacon-chain/sync/pending_attestations_queue.go +++ b/beacon-chain/sync/pending_attestations_queue.go @@ -7,9 +7,11 @@ import ( "fmt" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v6/beacon-chain/state" "github.com/OffchainLabs/prysm/v6/config/features" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" @@ -18,6 +20,7 @@ import ( "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v6/time" "github.com/OffchainLabs/prysm/v6/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/sirupsen/logrus" @@ -47,12 +50,16 @@ func (s *Service) processPendingAttsForBlock(ctx context.Context, bRoot [32]byte s.pendingAttsLock.RUnlock() if len(attestations) > 0 { + start := time.Now() s.processAttestations(ctx, attestations) + duration := time.Since(start) log.WithFields(logrus.Fields{ "blockRoot": hex.EncodeToString(bytesutil.Trunc(bRoot[:])), "pendingAttsCount": len(attestations), + "duration": duration, }).Debug("Verified and saved pending attestations to pool") } + randGen := rand.NewGenerator() // Delete the missing block root key from pending attestation queue so a node will not request for the block again. s.pendingAttsLock.Lock() @@ -72,18 +79,224 @@ func (s *Service) processPendingAttsForBlock(ctx context.Context, bRoot [32]byte } func (s *Service) processAttestations(ctx context.Context, attestations []any) { - for _, signedAtt := range attestations { - // The pending attestations can arrive as both aggregates and attestations, - // and each form has to be processed differently. - switch t := signedAtt.(type) { + if len(attestations) == 0 { + return + } + + atts := make([]ethpb.Att, 0, len(attestations)) + for _, att := range attestations { + switch v := att.(type) { case ethpb.Att: - s.processAtt(ctx, t) + atts = append(atts, v) case ethpb.SignedAggregateAttAndProof: - s.processAggregate(ctx, t) + s.processAggregate(ctx, v) default: - log.Warnf("Unexpected item of type %T in pending attestation queue. Item will not be processed", t) + log.Warnf("Unexpected attestation type %T, skipping", v) } } + + for _, bucket := range bucketAttestationsByData(atts) { + s.processAttestationBucket(ctx, bucket) + } +} + +// attestationBucket groups attestations with the same AttestationData for batch processing. +type attestationBucket struct { + dataHash [32]byte + data *ethpb.AttestationData + attestations []ethpb.Att +} + +// processAttestationBucket processes a bucket of attestations with shared AttestationData. +func (s *Service) processAttestationBucket(ctx context.Context, bucket *attestationBucket) { + if bucket == nil || len(bucket.attestations) == 0 { + return + } + + data := bucket.data + + // Shared validations for the entire bucket. + if !s.cfg.chain.InForkchoice(bytesutil.ToBytes32(data.BeaconBlockRoot)) { + log.WithError(blockchain.ErrNotDescendantOfFinalized).WithField("root", fmt.Sprintf("%#x", data.BeaconBlockRoot)).Debug("Failed forkchoice check for bucket") + return + } + + preState, err := s.cfg.chain.AttestationTargetState(ctx, data.Target) + if err != nil { + log.WithError(err).Debug("Failed to get attestation prestate for bucket") + return + } + + if err := s.cfg.chain.VerifyLmdFfgConsistency(ctx, bucket.attestations[0]); err != nil { + log.WithError(err).Debug("Failed FFG consistency check for bucket") + return + } + + // Collect valid attestations for both single and electra formats. + // Broadcast takes single format but attestation pool and batch signature verification take electra format. + forBroadcast := make([]ethpb.Att, 0, len(bucket.attestations)) + forPool := make([]ethpb.Att, 0, len(bucket.attestations)) + + for _, att := range bucket.attestations { + committee, err := helpers.BeaconCommitteeFromState(ctx, preState, data.Slot, att.GetCommitteeIndex()) + if err != nil { + log.WithError(err).Debug("Failed to get committee from state") + continue + } + + valid, err := validateAttesterData(ctx, att, committee) + if err != nil { + log.WithError(err).Debug("Failed attester data validation") + continue + } + if valid != pubsub.ValidationAccept { + log.Debug("Pending attestation rejected due to invalid data") + continue + } + + var conv ethpb.Att + if att.Version() >= version.Electra { + single, ok := att.(*ethpb.SingleAttestation) + if !ok { + log.Debugf("Wrong type: expected %T, got %T", ðpb.SingleAttestation{}, att) + continue + } + conv = single.ToAttestationElectra(committee) + } else { + conv = att + } + + forBroadcast = append(forBroadcast, att) + forPool = append(forPool, conv) + } + + if len(forPool) == 0 { + return + } + + verified := s.batchVerifyAttestationSignatures(ctx, forPool, preState) + verifiedSet := make(map[ethpb.Att]struct{}, len(verified)) + for _, att := range verified { + verifiedSet[att] = struct{}{} + } + + for i, poolAtt := range forPool { + if _, ok := verifiedSet[poolAtt]; ok { + s.processVerifiedAttestation(ctx, forBroadcast[i], poolAtt, preState) + } + } +} + +// batchVerifyAttestationSignatures attempts batch verification, with individual fallback on failure. +func (s *Service) batchVerifyAttestationSignatures( + ctx context.Context, + attestations []ethpb.Att, + preState state.ReadOnlyBeaconState, +) []ethpb.Att { + const fallbackMsg = "batch verification failed, using individual checks" + + set, err := blocks.AttestationSignatureBatch(ctx, preState, attestations) + if err != nil { + log.WithError(err).Debug(fallbackMsg) + return s.fallbackToIndividualVerification(ctx, attestations, preState) + } + + ok, err := set.Verify() + if err != nil || !ok { + if err != nil { + log.WithError(err).Debug(fallbackMsg) + } else { + log.Debug(fallbackMsg) + } + return s.fallbackToIndividualVerification(ctx, attestations, preState) + } + + return attestations +} + +// fallbackToIndividualVerification verifies each attestation individually if batch verification fails. +func (s *Service) fallbackToIndividualVerification( + ctx context.Context, + attestations []ethpb.Att, + preState state.ReadOnlyBeaconState, +) []ethpb.Att { + verified := make([]ethpb.Att, 0, len(attestations)) + + for _, att := range attestations { + res, err := s.validateUnaggregatedAttWithState(ctx, att, preState) + if err != nil { + log.WithError(err).Debug("Individual signature verification error") + continue + } + if res == pubsub.ValidationAccept { + verified = append(verified, att) + } + } + + return verified +} + +// saveAttestation saves an attestation to the appropriate pool. +func (s *Service) saveAttestation(att ethpb.Att) error { + if features.Get().EnableExperimentalAttestationPool { + return s.cfg.attestationCache.Add(att) + } + return s.cfg.attPool.SaveUnaggregatedAttestation(att) +} + +// processVerifiedAttestation handles a signature-verified attestation. +func (s *Service) processVerifiedAttestation( + ctx context.Context, + broadcastAtt ethpb.Att, + poolAtt ethpb.Att, + preState state.ReadOnlyBeaconState, +) { + data := broadcastAtt.GetData() + + if err := s.saveAttestation(poolAtt); err != nil { + log.WithError(err).Debug("Failed to save unaggregated attestation") + return + } + + if key, err := generateUnaggregatedAttCacheKey(broadcastAtt); err != nil { + log.WithError(err).Error("Failed to generate cache key for attestation tracking") + } else { + s.setSeenUnaggregatedAtt(key) + } + + valCount, err := helpers.ActiveValidatorCount(ctx, preState, slots.ToEpoch(data.Slot)) + if err != nil { + log.WithError(err).Debug("Failed to retrieve active validator count") + return + } + + if err := s.cfg.p2p.BroadcastAttestation(ctx, helpers.ComputeSubnetForAttestation(valCount, broadcastAtt), broadcastAtt); err != nil { + log.WithError(err).Debug("Failed to broadcast attestation") + } + + var ( + eventType feed.EventType + eventData any + ) + + switch { + case broadcastAtt.Version() >= version.Electra: + if sa, ok := broadcastAtt.(*ethpb.SingleAttestation); ok { + eventType = operation.SingleAttReceived + eventData = &operation.SingleAttReceivedData{Attestation: sa} + break + } + fallthrough + default: + eventType = operation.UnaggregatedAttReceived + eventData = &operation.UnAggregatedAttReceivedData{Attestation: broadcastAtt} + } + + // Send event notification + s.cfg.attestationNotifier.OperationFeed().Send(&feed.Event{ + Type: eventType, + Data: eventData, + }) } func (s *Service) processAggregate(ctx context.Context, aggregate ethpb.SignedAggregateAttAndProof) { @@ -94,9 +307,10 @@ func (s *Service) processAggregate(ctx context.Context, aggregate ethpb.SignedAg valRes, err := s.validateAggregatedAtt(ctx, aggregate) if err != nil { log.WithError(err).Debug("Pending aggregated attestation failed validation") + return } aggValid := pubsub.ValidationAccept == valRes - if s.validateBlockInAttestation(ctx, aggregate) && aggValid { + if aggValid && s.validateBlockInAttestation(ctx, aggregate) { if features.Get().EnableExperimentalAttestationPool { if err = s.cfg.attestationCache.Add(att); err != nil { log.WithError(err).Debug("Could not save aggregated attestation") @@ -123,114 +337,6 @@ func (s *Service) processAggregate(ctx context.Context, aggregate ethpb.SignedAg } } -func (s *Service) processAtt(ctx context.Context, att ethpb.Att) { - data := att.GetData() - - // This is an important validation before retrieving attestation pre state to defend against - // attestation's target intentionally referencing a checkpoint that's long ago. - if !s.cfg.chain.InForkchoice(bytesutil.ToBytes32(data.BeaconBlockRoot)) { - log.WithError(blockchain.ErrNotDescendantOfFinalized).Debug("Could not verify finalized consistency") - return - } - if err := s.cfg.chain.VerifyLmdFfgConsistency(ctx, att); err != nil { - log.WithError(err).Debug("Could not verify FFG consistency") - return - } - preState, err := s.cfg.chain.AttestationTargetState(ctx, data.Target) - if err != nil { - log.WithError(err).Debug("Could not retrieve attestation prestate") - return - } - committee, err := helpers.BeaconCommitteeFromState(ctx, preState, data.Slot, att.GetCommitteeIndex()) - if err != nil { - log.WithError(err).Debug("Could not retrieve committee from state") - return - } - valid, err := validateAttesterData(ctx, att, committee) - if err != nil { - log.WithError(err).Debug("Could not validate attester data") - return - } else if valid != pubsub.ValidationAccept { - log.Debug("Attestation failed attester data validation") - return - } - - // Decide if the attestation is an Electra SingleAttestation or a Phase0 unaggregated attestation - var ( - attForValidation ethpb.Att - broadcastAtt ethpb.Att - eventType feed.EventType - eventData interface{} - ) - - if att.Version() >= version.Electra { - singleAtt, ok := att.(*ethpb.SingleAttestation) - if !ok { - log.Debugf("Attestation has wrong type (expected %T, got %T)", ðpb.SingleAttestation{}, att) - return - } - // Convert Electra SingleAttestation to unaggregated ElectraAttestation. This is needed because many parts of the codebase assume that attestations have a certain structure and SingleAttestation validates these assumptions. - attForValidation = singleAtt.ToAttestationElectra(committee) - broadcastAtt = singleAtt - eventType = operation.SingleAttReceived - eventData = &operation.SingleAttReceivedData{ - Attestation: singleAtt, - } - } else { - // Phase0 attestation - attForValidation = att - broadcastAtt = att - eventType = operation.UnaggregatedAttReceived - eventData = &operation.UnAggregatedAttReceivedData{ - Attestation: att, - } - } - - valid, err = s.validateUnaggregatedAttWithState(ctx, attForValidation, preState) - if err != nil { - log.WithError(err).Debug("Pending unaggregated attestation failed validation") - return - } - - if valid == pubsub.ValidationAccept { - if features.Get().EnableExperimentalAttestationPool { - if err = s.cfg.attestationCache.Add(attForValidation); err != nil { - log.WithError(err).Debug("Could not save unaggregated attestation") - return - } - } else { - if err := s.cfg.attPool.SaveUnaggregatedAttestation(attForValidation); err != nil { - log.WithError(err).Debug("Could not save unaggregated attestation") - return - } - } - - attKey, err := generateUnaggregatedAttCacheKey(att) - if err != nil { - log.WithError(err).Error("Could not generate cache key for attestation tracking") - } else { - s.setSeenUnaggregatedAtt(attKey) - } - - valCount, err := helpers.ActiveValidatorCount(ctx, preState, slots.ToEpoch(data.Slot)) - if err != nil { - log.WithError(err).Debug("Could not retrieve active validator count") - return - } - - // Broadcast the final 'broadcastAtt' object - if err := s.cfg.p2p.BroadcastAttestation(ctx, helpers.ComputeSubnetForAttestation(valCount, broadcastAtt), broadcastAtt); err != nil { - log.WithError(err).Debug("Could not broadcast") - } - - // Feed event notification for other services - s.cfg.attestationNotifier.OperationFeed().Send(&feed.Event{ - Type: eventType, - Data: eventData, - }) - } -} - // This defines how pending aggregates are saved in the map. The key is the // root of the missing block. The value is the list of pending attestations/aggregates // that voted for that block root. The caller of this function is responsible @@ -372,3 +478,29 @@ func (s *Service) validatePendingAtts(ctx context.Context, slot primitives.Slot) } } } + +// bucketAttestationsByData groups attestations by their AttestationData hash. +func bucketAttestationsByData(attestations []ethpb.Att) map[[32]byte]*attestationBucket { + bucketMap := make(map[[32]byte]*attestationBucket) + + for _, att := range attestations { + data := att.GetData() + dataHash, err := data.HashTreeRoot() + if err != nil { + log.WithError(err).Debug("Failed to hash attestation data, skipping attestation") + continue + } + + if bucket, ok := bucketMap[dataHash]; ok { + bucket.attestations = append(bucket.attestations, att) + } else { + bucketMap[dataHash] = &attestationBucket{ + dataHash: dataHash, + data: data, + attestations: []ethpb.Att{att}, + } + } + } + + return bucketMap +} diff --git a/beacon-chain/sync/pending_attestations_queue_bucket_test.go b/beacon-chain/sync/pending_attestations_queue_bucket_test.go new file mode 100644 index 0000000000..59b72e71c1 --- /dev/null +++ b/beacon-chain/sync/pending_attestations_queue_bucket_test.go @@ -0,0 +1,417 @@ +package sync + +import ( + "context" + "testing" + + mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v6/testing/util" + logTest "github.com/sirupsen/logrus/hooks/test" +) + +func TestProcessAttestationBucket(t *testing.T) { + t.Run("EmptyBucket", func(t *testing.T) { + hook := logTest.NewGlobal() + s := &Service{} + + s.processAttestationBucket(context.Background(), nil) + + emptyBucket := &attestationBucket{ + attestations: []ethpb.Att{}, + } + s.processAttestationBucket(context.Background(), emptyBucket) + + require.Equal(t, 0, len(hook.Entries), "Should not log any messages for empty buckets") + }) + + t.Run("ForkchoiceFailure", func(t *testing.T) { + hook := logTest.NewGlobal() + chainService := &mockChain.ChainService{ + NotFinalized: true, // This makes InForkchoice return false + } + + s := &Service{ + cfg: &config{ + chain: chainService, + }, + } + + attData := ðpb.AttestationData{ + BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot"), 32), + } + + bucket := &attestationBucket{ + data: attData, + attestations: []ethpb.Att{util.NewAttestation()}, + } + + s.processAttestationBucket(context.Background(), bucket) + + require.Equal(t, 1, len(hook.Entries)) + assert.StringContains(t, "Failed forkchoice check for bucket", hook.LastEntry().Message) + require.NotNil(t, hook.LastEntry().Data["error"]) + }) + + t.Run("CommitteeFailure", func(t *testing.T) { + hook := logTest.NewGlobal() + beaconState, err := util.NewBeaconState() + require.NoError(t, err) + require.NoError(t, beaconState.SetSlot(1)) + + chainService := &mockChain.ChainService{ + State: beaconState, + ValidAttestation: true, + } + + s := &Service{ + cfg: &config{ + chain: chainService, + }, + } + + attData := ðpb.AttestationData{ + BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot"), 32), + Target: ðpb.Checkpoint{ + Epoch: 1, + Root: bytesutil.PadTo([]byte("blockroot"), 32), + }, + CommitteeIndex: 999999, + } + + att := util.NewAttestation() + att.Data = attData + + bucket := &attestationBucket{ + data: attData, + attestations: []ethpb.Att{att}, + } + + s.processAttestationBucket(context.Background(), bucket) + + require.Equal(t, 1, len(hook.Entries)) + assert.StringContains(t, "Failed to get committee from state", hook.LastEntry().Message) + }) + + t.Run("FFGConsistencyFailure", func(t *testing.T) { + hook := logTest.NewGlobal() + + validators := make([]*ethpb.Validator, 64) + for i := range validators { + validators[i] = ðpb.Validator{ + ExitEpoch: 1000000, + EffectiveBalance: 32000000000, + } + } + + beaconState, err := util.NewBeaconState() + require.NoError(t, err) + require.NoError(t, beaconState.SetSlot(1)) + require.NoError(t, beaconState.SetValidators(validators)) + + chainService := &mockChain.ChainService{ + State: beaconState, + } + + s := &Service{ + cfg: &config{ + chain: chainService, + }, + } + + attData := ðpb.AttestationData{ + BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot"), 32), + Target: ðpb.Checkpoint{ + Epoch: 1, + Root: bytesutil.PadTo([]byte("different_target"), 32), // Different from BeaconBlockRoot to trigger FFG failure + }, + } + + att := util.NewAttestation() + att.Data = attData + + bucket := &attestationBucket{ + data: attData, + attestations: []ethpb.Att{att}, + } + + s.processAttestationBucket(context.Background(), bucket) + + require.Equal(t, 1, len(hook.Entries)) + assert.StringContains(t, "Failed FFG consistency check for bucket", hook.LastEntry().Message) + }) + + t.Run("ProcessingSuccess", func(t *testing.T) { + hook := logTest.NewGlobal() + validators := make([]*ethpb.Validator, 64) + for i := range validators { + validators[i] = ðpb.Validator{ + ExitEpoch: 1000000, + EffectiveBalance: 32000000000, + } + } + + beaconState, err := util.NewBeaconState() + require.NoError(t, err) + require.NoError(t, beaconState.SetSlot(1)) + require.NoError(t, beaconState.SetValidators(validators)) + + chainService := &mockChain.ChainService{ + State: beaconState, + ValidAttestation: true, + } + + s := &Service{ + cfg: &config{ + chain: chainService, + }, + } + + // Test with Phase0 attestation + t.Run("Phase0_NoError", func(t *testing.T) { + hook.Reset() // Reset logs before test + phase0Att := util.NewAttestation() + phase0Att.Data.Slot = 1 + phase0Att.Data.CommitteeIndex = 0 + + bucket := &attestationBucket{ + data: phase0Att.GetData(), + attestations: []ethpb.Att{phase0Att}, + } + + s.processAttestationBucket(context.Background(), bucket) + }) + + // Test with SingleAttestation + t.Run("Electra_NoError", func(t *testing.T) { + hook.Reset() // Reset logs before test + attData := ðpb.AttestationData{ + Slot: 1, + CommitteeIndex: 0, + BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot"), 32), + Source: ðpb.Checkpoint{ + Epoch: 0, + Root: bytesutil.PadTo([]byte("source"), 32), + }, + Target: ðpb.Checkpoint{ + Epoch: 1, + Root: bytesutil.PadTo([]byte("blockroot"), 32), // Same as BeaconBlockRoot for LMD/FFG consistency + }, + } + + singleAtt := ðpb.SingleAttestation{ + CommitteeId: 0, + AttesterIndex: 0, + Data: attData, + Signature: make([]byte, 96), + } + + bucket := &attestationBucket{ + data: singleAtt.GetData(), + attestations: []ethpb.Att{singleAtt}, + } + + s.processAttestationBucket(context.Background(), bucket) + }) + }) +} + +func TestBucketAttestationsByData(t *testing.T) { + t.Run("EmptyInput", func(t *testing.T) { + hook := logTest.NewGlobal() + buckets := bucketAttestationsByData(nil) + require.Equal(t, 0, len(buckets)) + require.Equal(t, 0, len(hook.Entries)) + + buckets = bucketAttestationsByData([]ethpb.Att{}) + require.Equal(t, 0, len(buckets)) + require.Equal(t, 0, len(hook.Entries)) + }) + + t.Run("SingleAttestation", func(t *testing.T) { + hook := logTest.NewGlobal() + att := util.NewAttestation() + att.Data.Slot = 1 + att.Data.CommitteeIndex = 0 + + buckets := bucketAttestationsByData([]ethpb.Att{att}) + + require.Equal(t, 1, len(buckets)) + var bucket *attestationBucket + for _, b := range buckets { + bucket = b + break + } + require.NotNil(t, bucket) + require.Equal(t, 1, len(bucket.attestations)) + require.Equal(t, att, bucket.attestations[0]) + require.Equal(t, att.GetData(), bucket.data) + require.Equal(t, 0, len(hook.Entries)) + }) + + t.Run("MultipleAttestationsSameData", func(t *testing.T) { + hook := logTest.NewGlobal() + + att1 := util.NewAttestation() + att1.Data.Slot = 1 + att1.Data.CommitteeIndex = 0 + + att2 := util.NewAttestation() + att2.Data = att1.Data // Same data + att2.Signature = make([]byte, 96) // Different signature + + buckets := bucketAttestationsByData([]ethpb.Att{att1, att2}) + + require.Equal(t, 1, len(buckets), "Should have one bucket for same data") + var bucket *attestationBucket + for _, b := range buckets { + bucket = b + break + } + require.NotNil(t, bucket) + require.Equal(t, 2, len(bucket.attestations), "Should have both attestations in one bucket") + require.Equal(t, att1.GetData(), bucket.data) + require.Equal(t, 0, len(hook.Entries)) + }) + + t.Run("MultipleAttestationsDifferentData", func(t *testing.T) { + hook := logTest.NewGlobal() + + att1 := util.NewAttestation() + att1.Data.Slot = 1 + att1.Data.CommitteeIndex = 0 + + att2 := util.NewAttestation() + att2.Data.Slot = 2 // Different slot + att2.Data.CommitteeIndex = 1 + + buckets := bucketAttestationsByData([]ethpb.Att{att1, att2}) + + require.Equal(t, 2, len(buckets), "Should have two buckets for different data") + bucketCount := 0 + for _, bucket := range buckets { + require.Equal(t, 1, len(bucket.attestations), "Each bucket should have one attestation") + bucketCount++ + } + require.Equal(t, 2, bucketCount, "Should have exactly two buckets") + require.Equal(t, 0, len(hook.Entries)) + }) + + t.Run("MixedAttestationTypes", func(t *testing.T) { + hook := logTest.NewGlobal() + + // Create Phase0 attestation + phase0Att := util.NewAttestation() + phase0Att.Data.Slot = 1 + phase0Att.Data.CommitteeIndex = 0 + + electraAtt := ðpb.SingleAttestation{ + CommitteeId: 0, + AttesterIndex: 1, + Data: phase0Att.Data, // Same data + Signature: make([]byte, 96), + } + + buckets := bucketAttestationsByData([]ethpb.Att{phase0Att, electraAtt}) + + require.Equal(t, 1, len(buckets), "Should have one bucket for same data") + var bucket *attestationBucket + for _, b := range buckets { + bucket = b + break + } + require.NotNil(t, bucket) + require.Equal(t, 2, len(bucket.attestations), "Should have both attestations in one bucket") + require.Equal(t, phase0Att.GetData(), bucket.data) + require.Equal(t, 0, len(hook.Entries)) + }) +} + +func TestBatchVerifyAttestationSignatures(t *testing.T) { + t.Run("EmptyInput", func(t *testing.T) { + s := &Service{} + + beaconState, err := util.NewBeaconState() + require.NoError(t, err) + + result := s.batchVerifyAttestationSignatures(context.Background(), []ethpb.Att{}, beaconState) + + // Empty input should return empty output + require.Equal(t, 0, len(result)) + }) + + t.Run("BatchVerificationWithState", func(t *testing.T) { + hook := logTest.NewGlobal() + validators := make([]*ethpb.Validator, 64) + for i := range validators { + validators[i] = ðpb.Validator{ + ExitEpoch: 1000000, + EffectiveBalance: 32000000000, + } + } + + beaconState, err := util.NewBeaconState() + require.NoError(t, err) + require.NoError(t, beaconState.SetSlot(1)) + require.NoError(t, beaconState.SetValidators(validators)) + + s := &Service{} + + att := util.NewAttestation() + att.Data.Slot = 1 + attestations := []ethpb.Att{att} + + result := s.batchVerifyAttestationSignatures(context.Background(), attestations, beaconState) + require.NotNil(t, result) + + if len(result) == 0 && len(hook.Entries) > 0 { + _ = false // Check if fallback message is logged + for _, entry := range hook.Entries { + if entry.Message == "batch verification failed, using individual checks" { + _ = true // Found the fallback message + break + } + } + // It's OK if fallback message is logged - this means the function is working correctly + } + }) + + t.Run("BatchVerificationFailureFallbackToIndividual", func(t *testing.T) { + hook := logTest.NewGlobal() + beaconState, err := util.NewBeaconState() + require.NoError(t, err) + require.NoError(t, beaconState.SetSlot(1)) + + chainService := &mockChain.ChainService{ + State: beaconState, + ValidAttestation: false, // This will cause verification to fail + } + + s := &Service{ + cfg: &config{ + chain: chainService, + }, + } + + att := util.NewAttestation() + att.Data.Slot = 1 + attestations := []ethpb.Att{att} + + result := s.batchVerifyAttestationSignatures(context.Background(), attestations, beaconState) + + require.Equal(t, 0, len(result)) + + require.NotEqual(t, 0, len(hook.Entries), "Should have log entries") + found := false + for _, entry := range hook.Entries { + if entry.Message == "batch verification failed, using individual checks" { + found = true + break + } + } + require.Equal(t, true, found, "Should log fallback message") + }) +} diff --git a/changelog/ttsao_optimize-attestation-batching.md b/changelog/ttsao_optimize-attestation-batching.md new file mode 100644 index 0000000000..a03c9039c0 --- /dev/null +++ b/changelog/ttsao_optimize-attestation-batching.md @@ -0,0 +1,3 @@ +### Changed + +- Optimize pending attestation processing by adding batching From d5ca327c304a86ecf964bfff1d68af20e155689b Mon Sep 17 00:00:00 2001 From: Galoretka Date: Wed, 8 Oct 2025 18:47:11 +0300 Subject: [PATCH 005/103] feature: Use service context and continue on slasher attestation errors (#15803) * feature: Use service context and continue on slasher attestation errors * Create Galoretka_feature-slasher-feed-use-service-ctx * Rename Galoretka_feature-slasher-feed-use-service-ctx to Galoretka_feature-slasher-feed-use-service-ctx.md --------- Co-authored-by: james-prysm <90280386+james-prysm@users.noreply.github.com> --- beacon-chain/blockchain/receive_block.go | 6 +++--- changelog/Galoretka_feature-slasher-feed-use-service-ctx.md | 3 +++ 2 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 changelog/Galoretka_feature-slasher-feed-use-service-ctx.md diff --git a/beacon-chain/blockchain/receive_block.go b/beacon-chain/blockchain/receive_block.go index 5f0935822a..9326812295 100644 --- a/beacon-chain/blockchain/receive_block.go +++ b/beacon-chain/blockchain/receive_block.go @@ -585,17 +585,17 @@ func (s *Service) sendNewFinalizedEvent(ctx context.Context, postState state.Bea func (s *Service) sendBlockAttestationsToSlasher(signed interfaces.ReadOnlySignedBeaconBlock, preState state.BeaconState) { // Feed the indexed attestation to slasher if enabled. This action // is done in the background to avoid adding more load to this critical code path. - ctx := context.TODO() + ctx := s.ctx for _, att := range signed.Block().Body().Attestations() { committees, err := helpers.AttestationCommitteesFromState(ctx, preState, att) if err != nil { log.WithError(err).Error("Could not get attestation committees") - return + continue } indexedAtt, err := attestation.ConvertToIndexed(ctx, att, committees...) if err != nil { log.WithError(err).Error("Could not convert to indexed attestation") - return + continue } s.cfg.SlasherAttestationsFeed.Send(&types.WrappedIndexedAtt{IndexedAtt: indexedAtt}) } diff --git a/changelog/Galoretka_feature-slasher-feed-use-service-ctx.md b/changelog/Galoretka_feature-slasher-feed-use-service-ctx.md new file mode 100644 index 0000000000..a5d0b8752c --- /dev/null +++ b/changelog/Galoretka_feature-slasher-feed-use-service-ctx.md @@ -0,0 +1,3 @@ +### Fixed + +- Use service context and continue on slasher attestation errors (#15803). From 5b20352ac6f55b985fdfe53ee05771ef5e353ce5 Mon Sep 17 00:00:00 2001 From: james-prysm <90280386+james-prysm@users.noreply.github.com> Date: Wed, 8 Oct 2025 13:34:32 -0500 Subject: [PATCH 006/103] cleaning up processAggregate (#15823) * cleaning up some code * kasey feedback * further simplifying * kasey's suggestion --- .../sync/pending_attestations_queue.go | 45 +++++++------------ .../james-prysm_cleanup-process-aggregate.md | 3 ++ 2 files changed, 20 insertions(+), 28 deletions(-) create mode 100644 changelog/james-prysm_cleanup-process-aggregate.md diff --git a/beacon-chain/sync/pending_attestations_queue.go b/beacon-chain/sync/pending_attestations_queue.go index 3153ec6ed3..7073a058aa 100644 --- a/beacon-chain/sync/pending_attestations_queue.go +++ b/beacon-chain/sync/pending_attestations_queue.go @@ -241,6 +241,9 @@ func (s *Service) saveAttestation(att ethpb.Att) error { if features.Get().EnableExperimentalAttestationPool { return s.cfg.attestationCache.Add(att) } + if att.IsAggregated() { + return s.cfg.attPool.SaveAggregatedAttestation(att) + } return s.cfg.attPool.SaveUnaggregatedAttestation(att) } @@ -300,40 +303,26 @@ func (s *Service) processVerifiedAttestation( } func (s *Service) processAggregate(ctx context.Context, aggregate ethpb.SignedAggregateAttAndProof) { - att := aggregate.AggregateAttestationAndProof().AggregateVal() - - // Save the pending aggregated attestation to the pool if it passes the aggregated - // validation steps. - valRes, err := s.validateAggregatedAtt(ctx, aggregate) + res, err := s.validateAggregatedAtt(ctx, aggregate) if err != nil { log.WithError(err).Debug("Pending aggregated attestation failed validation") return } - aggValid := pubsub.ValidationAccept == valRes - if aggValid && s.validateBlockInAttestation(ctx, aggregate) { - if features.Get().EnableExperimentalAttestationPool { - if err = s.cfg.attestationCache.Add(att); err != nil { - log.WithError(err).Debug("Could not save aggregated attestation") - return - } - } else { - if att.IsAggregated() { - if err = s.cfg.attPool.SaveAggregatedAttestation(att); err != nil { - log.WithError(err).Debug("Could not save aggregated attestation") - return - } - } else if err = s.cfg.attPool.SaveUnaggregatedAttestation(att); err != nil { - log.WithError(err).Debug("Could not save unaggregated attestation") - return - } - } + if res != pubsub.ValidationAccept || !s.validateBlockInAttestation(ctx, aggregate) { + log.Debug("Pending aggregated attestation failed validation") + return + } - s.setAggregatorIndexEpochSeen(att.GetData().Target.Epoch, aggregate.AggregateAttestationAndProof().GetAggregatorIndex()) + att := aggregate.AggregateAttestationAndProof().AggregateVal() + if err := s.saveAttestation(att); err != nil { + log.WithError(err).Debug("Could not save aggregated attestation") + return + } - // Broadcasting the signed attestation again once a node is able to process it. - if err := s.cfg.p2p.Broadcast(ctx, aggregate); err != nil { - log.WithError(err).Debug("Could not broadcast") - } + s.setAggregatorIndexEpochSeen(att.GetData().Target.Epoch, aggregate.AggregateAttestationAndProof().GetAggregatorIndex()) + + if err := s.cfg.p2p.Broadcast(ctx, aggregate); err != nil { + log.WithError(err).Debug("Could not broadcast aggregated attestation") } } diff --git a/changelog/james-prysm_cleanup-process-aggregate.md b/changelog/james-prysm_cleanup-process-aggregate.md new file mode 100644 index 0000000000..b41b7f4fb4 --- /dev/null +++ b/changelog/james-prysm_cleanup-process-aggregate.md @@ -0,0 +1,3 @@ +### Ignored + +- Small code changes for reusability and readability to processAggregate. \ No newline at end of file From 3f10439de1835a794700186a14003551620fbb84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Kapka?= Date: Wed, 8 Oct 2025 23:40:48 +0200 Subject: [PATCH 007/103] Do not verify block data when calculating rewards (#15819) * Do not verify block data when calculating rewards * remove `Get` from function names * changelog <3 * do not verify sync committee sig in handler * Revert "remove `Get` from function names" This reverts commit 770a89d9901492d95757b0b30abf4d8a4d1eee96. * typo fix --------- Co-authored-by: james-prysm <90280386+james-prysm@users.noreply.github.com> --- beacon-chain/core/altair/block.go | 11 +++- beacon-chain/core/altair/block_test.go | 10 ++++ beacon-chain/core/blocks/attester_slashing.go | 46 +++++++++++++++++ .../core/blocks/attester_slashing_test.go | 10 ++++ beacon-chain/core/blocks/proposer_slashing.go | 50 ++++++++++++++++++- .../core/blocks/proposer_slashing_test.go | 9 ++++ beacon-chain/rpc/eth/rewards/handlers.go | 2 +- beacon-chain/rpc/eth/rewards/service.go | 6 +-- changelog/radek_rewards-no-verify.md | 3 ++ 9 files changed, 140 insertions(+), 7 deletions(-) create mode 100644 changelog/radek_rewards-no-verify.md diff --git a/beacon-chain/core/altair/block.go b/beacon-chain/core/altair/block.go index bd35385684..db2385287c 100644 --- a/beacon-chain/core/altair/block.go +++ b/beacon-chain/core/altair/block.go @@ -49,13 +49,22 @@ func ProcessSyncAggregate(ctx context.Context, s state.BeaconState, sync *ethpb. if err != nil { return nil, 0, errors.Wrap(err, "could not filter sync committee votes") } - if err := VerifySyncCommitteeSig(s, votedKeys, sync.SyncCommitteeSignature); err != nil { return nil, 0, errors.Wrap(err, "could not verify sync committee signature") } return s, reward, nil } +// ProcessSyncAggregateNoVerifySig processes the sync aggregate without verifying the sync committee signature. +// This is useful in scenarios such as block reward calculation, where we can assume the data in the block is valid. +func ProcessSyncAggregateNoVerifySig(ctx context.Context, s state.BeaconState, sync *ethpb.SyncAggregate) (state.BeaconState, uint64, error) { + s, _, reward, err := processSyncAggregate(ctx, s, sync) + if err != nil { + return nil, 0, errors.Wrap(err, "could not filter sync committee votes") + } + return s, reward, nil +} + // processSyncAggregate applies all the logic in the spec function `process_sync_aggregate` except // verifying the BLS signatures. It returns the modified beacons state, the list of validators' // public keys that voted (for future signature verification) and the proposer reward for including diff --git a/beacon-chain/core/altair/block_test.go b/beacon-chain/core/altair/block_test.go index 9877c34f00..2642390f86 100644 --- a/beacon-chain/core/altair/block_test.go +++ b/beacon-chain/core/altair/block_test.go @@ -53,9 +53,19 @@ func TestProcessSyncCommittee_PerfectParticipation(t *testing.T) { SyncCommitteeSignature: aggregatedSig, } + // Verify that ProcessSyncAggregateNoVerifySig and ProcessSyncAggregate have the same outcome. + beaconStateNoVerifySig := beaconState.Copy() + beaconStateNoVerifySig, rewardNoVerifySig, err := altair.ProcessSyncAggregateNoVerifySig(t.Context(), beaconStateNoVerifySig, syncAggregate) + require.NoError(t, err) + sszNoVerifySig, err := beaconStateNoVerifySig.MarshalSSZ() + require.NoError(t, err) var reward uint64 beaconState, reward, err = altair.ProcessSyncAggregate(t.Context(), beaconState, syncAggregate) require.NoError(t, err) + ssz, err := beaconState.MarshalSSZ() + require.NoError(t, err) + assert.DeepEqual(t, sszNoVerifySig, ssz, "States resulting from ProcessSyncAggregateNoVerifySig and ProcessSyncAggregate are not equal") + assert.Equal(t, rewardNoVerifySig, reward, "Rewards resulting from ProcessSyncAggregateNoVerifySig and ProcessSyncAggregate are not equal") assert.Equal(t, uint64(72192), reward) // Use a non-sync committee index to compare profitability. diff --git a/beacon-chain/core/blocks/attester_slashing.go b/beacon-chain/core/blocks/attester_slashing.go index 80d95f3973..f6fe916aa1 100644 --- a/beacon-chain/core/blocks/attester_slashing.go +++ b/beacon-chain/core/blocks/attester_slashing.go @@ -55,6 +55,28 @@ func ProcessAttesterSlashings( return beaconState, nil } +// ProcessAttesterSlashingsNoVerify processes attester slashings without verifying them. +// This is useful in scenarios such as block reward calculation, where we can assume the data +// in the block is valid. +func ProcessAttesterSlashingsNoVerify( + ctx context.Context, + beaconState state.BeaconState, + slashings []ethpb.AttSlashing, + exitInfo *validators.ExitInfo, +) (state.BeaconState, error) { + if exitInfo == nil && len(slashings) > 0 { + return nil, errors.New("exit info required to process attester slashings") + } + var err error + for _, slashing := range slashings { + beaconState, err = ProcessAttesterSlashingNoVerify(ctx, beaconState, slashing, exitInfo) + if err != nil { + return nil, err + } + } + return beaconState, nil +} + // ProcessAttesterSlashing processes individual attester slashing. func ProcessAttesterSlashing( ctx context.Context, @@ -68,6 +90,30 @@ func ProcessAttesterSlashing( if err := VerifyAttesterSlashing(ctx, beaconState, slashing); err != nil { return nil, errors.Wrap(err, "could not verify attester slashing") } + return processAttesterSlashing(ctx, beaconState, slashing, exitInfo) +} + +// ProcessAttesterSlashingNoVerify processes individual attester slashing without verifying it. +// This is useful in scenarios such as block reward calculation, where we can assume the data +// in the block is valid. +func ProcessAttesterSlashingNoVerify( + ctx context.Context, + beaconState state.BeaconState, + slashing ethpb.AttSlashing, + exitInfo *validators.ExitInfo, +) (state.BeaconState, error) { + if exitInfo == nil { + return nil, errors.New("exit info is required to process attester slashing") + } + return processAttesterSlashing(ctx, beaconState, slashing, exitInfo) +} + +func processAttesterSlashing( + ctx context.Context, + beaconState state.BeaconState, + slashing ethpb.AttSlashing, + exitInfo *validators.ExitInfo, +) (state.BeaconState, error) { slashableIndices := SlashableAttesterIndices(slashing) sort.SliceStable(slashableIndices, func(i, j int) bool { return slashableIndices[i] < slashableIndices[j] diff --git a/beacon-chain/core/blocks/attester_slashing_test.go b/beacon-chain/core/blocks/attester_slashing_test.go index a71598a205..a95534c0c2 100644 --- a/beacon-chain/core/blocks/attester_slashing_test.go +++ b/beacon-chain/core/blocks/attester_slashing_test.go @@ -242,8 +242,18 @@ func TestProcessAttesterSlashings_AppliesCorrectStatus(t *testing.T) { currentSlot := 2 * params.BeaconConfig().SlotsPerEpoch require.NoError(t, tc.st.SetSlot(currentSlot)) + // Verify that ProcessAttesterSlashingsNoVerify and ProcessAttesterSlashings have the same outcome. + stNoVerify := tc.st.Copy() + newStateNoVerify, err := blocks.ProcessAttesterSlashingsNoVerify(t.Context(), stNoVerify, []ethpb.AttSlashing{tc.slashing}, v.ExitInformation(stNoVerify)) + require.NoError(t, err) + sszNoVerify, err := newStateNoVerify.MarshalSSZ() + require.NoError(t, err) newState, err := blocks.ProcessAttesterSlashings(t.Context(), tc.st, []ethpb.AttSlashing{tc.slashing}, v.ExitInformation(tc.st)) require.NoError(t, err) + ssz, err := newState.MarshalSSZ() + require.NoError(t, err) + assert.DeepEqual(t, sszNoVerify, ssz, "States resulting from ProcessAttesterSlashingsNoVerify and ProcessAttesterSlashings are not equal") + newRegistry := newState.Validators() // Given the intersection of slashable indices is [1], only validator diff --git a/beacon-chain/core/blocks/proposer_slashing.go b/beacon-chain/core/blocks/proposer_slashing.go index f45ae2fc97..48ba4b0047 100644 --- a/beacon-chain/core/blocks/proposer_slashing.go +++ b/beacon-chain/core/blocks/proposer_slashing.go @@ -64,6 +64,28 @@ func ProcessProposerSlashings( return beaconState, nil } +// ProcessProposerSlashingsNoVerify processes proposer slashings without verifying them. +// This is useful in scenarios such as block reward calculation, where we can assume the data +// in the block is valid. +func ProcessProposerSlashingsNoVerify( + ctx context.Context, + beaconState state.BeaconState, + slashings []*ethpb.ProposerSlashing, + exitInfo *validators.ExitInfo, +) (state.BeaconState, error) { + if exitInfo == nil && len(slashings) > 0 { + return nil, errors.New("exit info required to process proposer slashings") + } + var err error + for _, slashing := range slashings { + beaconState, err = ProcessProposerSlashingNoVerify(ctx, beaconState, slashing, exitInfo) + if err != nil { + return nil, err + } + } + return beaconState, nil +} + // ProcessProposerSlashing processes individual proposer slashing. func ProcessProposerSlashing( ctx context.Context, @@ -71,16 +93,40 @@ func ProcessProposerSlashing( slashing *ethpb.ProposerSlashing, exitInfo *validators.ExitInfo, ) (state.BeaconState, error) { - var err error if slashing == nil { return nil, errors.New("nil proposer slashings in block body") } - if err = VerifyProposerSlashing(beaconState, slashing); err != nil { + if err := VerifyProposerSlashing(beaconState, slashing); err != nil { return nil, errors.Wrap(err, "could not verify proposer slashing") } + return processProposerSlashing(ctx, beaconState, slashing, exitInfo) +} + +// ProcessProposerSlashingNoVerify processes individual proposer slashing without verifying it. +// This is useful in scenarios such as block reward calculation, where we can assume the data +// in the block is valid. +func ProcessProposerSlashingNoVerify( + ctx context.Context, + beaconState state.BeaconState, + slashing *ethpb.ProposerSlashing, + exitInfo *validators.ExitInfo, +) (state.BeaconState, error) { + if slashing == nil { + return nil, errors.New("nil proposer slashings in block body") + } + return processProposerSlashing(ctx, beaconState, slashing, exitInfo) +} + +func processProposerSlashing( + ctx context.Context, + beaconState state.BeaconState, + slashing *ethpb.ProposerSlashing, + exitInfo *validators.ExitInfo, +) (state.BeaconState, error) { if exitInfo == nil { return nil, errors.New("exit info is required to process proposer slashing") } + var err error beaconState, err = validators.SlashValidator(ctx, beaconState, slashing.Header_1.Header.ProposerIndex, exitInfo) if err != nil { return nil, errors.Wrapf(err, "could not slash proposer index %d", slashing.Header_1.Header.ProposerIndex) diff --git a/beacon-chain/core/blocks/proposer_slashing_test.go b/beacon-chain/core/blocks/proposer_slashing_test.go index 0c3d231eff..f720448a30 100644 --- a/beacon-chain/core/blocks/proposer_slashing_test.go +++ b/beacon-chain/core/blocks/proposer_slashing_test.go @@ -172,8 +172,17 @@ func TestProcessProposerSlashings_AppliesCorrectStatus(t *testing.T) { block := util.NewBeaconBlock() block.Block.Body.ProposerSlashings = slashings + // Verify that ProcessProposerSlashingsNoVerify and ProcessProposerSlashings have the same outcome. + beaconStateNoVerify := beaconState.Copy() + newStateNoVerify, err := blocks.ProcessProposerSlashingsNoVerify(t.Context(), beaconStateNoVerify, block.Block.Body.ProposerSlashings, v.ExitInformation(beaconStateNoVerify)) + require.NoError(t, err) + sszNoVerify, err := newStateNoVerify.MarshalSSZ() + require.NoError(t, err) newState, err := blocks.ProcessProposerSlashings(t.Context(), beaconState, block.Block.Body.ProposerSlashings, v.ExitInformation(beaconState)) require.NoError(t, err) + ssz, err := newState.MarshalSSZ() + require.NoError(t, err) + assert.DeepEqual(t, sszNoVerify, ssz, "States resulting from ProcessProposerSlashingsNoVerify and ProcessProposerSlashings are not equal") newStateVals := newState.Validators() if newStateVals[1].ExitEpoch != beaconState.Validators()[1].ExitEpoch { diff --git a/beacon-chain/rpc/eth/rewards/handlers.go b/beacon-chain/rpc/eth/rewards/handlers.go index 7eeb7710ce..a95e7f9bdd 100644 --- a/beacon-chain/rpc/eth/rewards/handlers.go +++ b/beacon-chain/rpc/eth/rewards/handlers.go @@ -151,7 +151,7 @@ func (s *Server) SyncCommitteeRewards(w http.ResponseWriter, r *http.Request) { } } - _, proposerReward, err := altair.ProcessSyncAggregate(r.Context(), st, sa) + _, proposerReward, err := altair.ProcessSyncAggregateNoVerifySig(r.Context(), st, sa) if err != nil { httputil.HandleError(w, "Could not get sync aggregate rewards: "+err.Error(), http.StatusInternalServerError) return diff --git a/beacon-chain/rpc/eth/rewards/service.go b/beacon-chain/rpc/eth/rewards/service.go index 6e36451848..6043620e20 100644 --- a/beacon-chain/rpc/eth/rewards/service.go +++ b/beacon-chain/rpc/eth/rewards/service.go @@ -73,7 +73,7 @@ func (rs *BlockRewardService) GetBlockRewardsData(ctx context.Context, blk inter // ExitInformation is expensive to compute, only do it if we need it. exitInfo = validators.ExitInformation(st) } - st, err = coreblocks.ProcessAttesterSlashings(ctx, st, blk.Body().AttesterSlashings(), exitInfo) + st, err = coreblocks.ProcessAttesterSlashingsNoVerify(ctx, st, blk.Body().AttesterSlashings(), exitInfo) if err != nil { return nil, &httputil.DefaultJsonError{ Message: "Could not get attester slashing rewards: " + err.Error(), @@ -87,7 +87,7 @@ func (rs *BlockRewardService) GetBlockRewardsData(ctx context.Context, blk inter Code: http.StatusInternalServerError, } } - st, err = coreblocks.ProcessProposerSlashings(ctx, st, blk.Body().ProposerSlashings(), exitInfo) + st, err = coreblocks.ProcessProposerSlashingsNoVerify(ctx, st, blk.Body().ProposerSlashings(), exitInfo) if err != nil { return nil, &httputil.DefaultJsonError{ Message: "Could not get proposer slashing rewards: " + err.Error(), @@ -109,7 +109,7 @@ func (rs *BlockRewardService) GetBlockRewardsData(ctx context.Context, blk inter } } var syncCommitteeReward uint64 - _, syncCommitteeReward, err = altair.ProcessSyncAggregate(ctx, st, sa) + _, syncCommitteeReward, err = altair.ProcessSyncAggregateNoVerifySig(ctx, st, sa) if err != nil { return nil, &httputil.DefaultJsonError{ Message: "Could not get sync aggregate rewards: " + err.Error(), diff --git a/changelog/radek_rewards-no-verify.md b/changelog/radek_rewards-no-verify.md new file mode 100644 index 0000000000..0bf8a9bd94 --- /dev/null +++ b/changelog/radek_rewards-no-verify.md @@ -0,0 +1,3 @@ +### Changed + +- Do not verify block data when calculating rewards. \ No newline at end of file From 4946b007abc91ba6edab9be86311640db47cc724 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Thu, 9 Oct 2025 12:34:04 +0200 Subject: [PATCH 008/103] Data column sidecars fetch: Adjust log levels. (#15820) --- beacon-chain/sync/data_column_sidecars.go | 4 ++-- changelog/manu-adjust-log-levels.md | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 changelog/manu-adjust-log-levels.md diff --git a/beacon-chain/sync/data_column_sidecars.go b/beacon-chain/sync/data_column_sidecars.go index d5fba889f7..d9b8715b23 100644 --- a/beacon-chain/sync/data_column_sidecars.go +++ b/beacon-chain/sync/data_column_sidecars.go @@ -169,7 +169,7 @@ func FetchDataColumnSidecars( result[root] = sidecars } - log.WithField("finalMissingRootCount", len(incompleteRoots)).Debug("Failed to fetch data column sidecars from storage and peers using rescue mode") + log.WithField("finalMissingRootCount", len(incompleteRoots)).Warning("Failed to fetch data column sidecars") return result, missingByRoot, nil } @@ -738,7 +738,7 @@ func fetchDataColumnSidecarsFromPeers( roDataColumns, err := sendDataColumnSidecarsRequest(params, slotByRoot, slotsWithCommitments, peerID, indicesByRoot) if err != nil { - log.WithError(err).Warning("Failed to send data column sidecars request") + log.WithError(err).Debug("Failed to send data column sidecars request") return } diff --git a/changelog/manu-adjust-log-levels.md b/changelog/manu-adjust-log-levels.md new file mode 100644 index 0000000000..64e0826df1 --- /dev/null +++ b/changelog/manu-adjust-log-levels.md @@ -0,0 +1,2 @@ +### Ignored +- Data column sidecars fetch: Adjust log levels. From 83a171b4399b1bf1fa6431b11d37098c6cacc686 Mon Sep 17 00:00:00 2001 From: terence Date: Thu, 9 Oct 2025 07:27:03 -0700 Subject: [PATCH 009/103] Process pending atts after pending blocks clear (#15824) --- beacon-chain/sync/pending_blocks_queue.go | 5 +++++ changelog/ttsao_optimize-pending-attestation-processing.md | 3 +++ 2 files changed, 8 insertions(+) create mode 100644 changelog/ttsao_optimize-pending-attestation-processing.md diff --git a/beacon-chain/sync/pending_blocks_queue.go b/beacon-chain/sync/pending_blocks_queue.go index 4c8cb12c3f..3a0bacbba4 100644 --- a/beacon-chain/sync/pending_blocks_queue.go +++ b/beacon-chain/sync/pending_blocks_queue.go @@ -147,6 +147,11 @@ func (s *Service) processPendingBlocks(ctx context.Context) error { } cancelFunction() + // Process pending attestations for this block. + if err := s.processPendingAttsForBlock(ctx, blkRoot); err != nil { + log.WithError(err).Debug("Failed to process pending attestations for block") + } + // Remove the processed block from the queue. if err := s.removeBlockFromQueue(b, blkRoot); err != nil { return err diff --git a/changelog/ttsao_optimize-pending-attestation-processing.md b/changelog/ttsao_optimize-pending-attestation-processing.md new file mode 100644 index 0000000000..44e571a324 --- /dev/null +++ b/changelog/ttsao_optimize-pending-attestation-processing.md @@ -0,0 +1,3 @@ +### Changed + +- Process pending attestations after pending blocks are cleared From 515590e7feeaa0cdc3578e99966273b3c729905f Mon Sep 17 00:00:00 2001 From: james-prysm <90280386+james-prysm@users.noreply.github.com> Date: Thu, 9 Oct 2025 11:42:48 -0500 Subject: [PATCH 010/103] making block event only send on certain success (#15814) * making block event only send on certain success * potuz's comment * potuz comment * test --- beacon-chain/blockchain/process_block.go | 4 +- beacon-chain/blockchain/process_block_test.go | 156 ++++++++++++++++++ changelog/james-prysm_fix-block-event.md | 3 + 3 files changed, 162 insertions(+), 1 deletion(-) create mode 100644 changelog/james-prysm_fix-block-event.md diff --git a/beacon-chain/blockchain/process_block.go b/beacon-chain/blockchain/process_block.go index 26d5508869..3ff4d88089 100644 --- a/beacon-chain/blockchain/process_block.go +++ b/beacon-chain/blockchain/process_block.go @@ -72,7 +72,7 @@ func (s *Service) postBlockProcess(cfg *postBlockProcessConfig) error { if features.Get().EnableLightClient && slots.ToEpoch(s.CurrentSlot()) >= params.BeaconConfig().AltairForkEpoch { defer s.processLightClientUpdates(cfg) } - defer s.sendStateFeedOnBlock(cfg) + defer reportProcessingTime(startTime) defer reportAttestationInclusion(cfg.roblock.Block()) @@ -93,6 +93,8 @@ func (s *Service) postBlockProcess(cfg *postBlockProcessConfig) error { return errors.Wrap(err, "could not set optimistic block to valid") } } + + defer s.sendStateFeedOnBlock(cfg) // only send event after successful insertion start := time.Now() cfg.headRoot, err = s.cfg.ForkChoiceStore.Head(ctx) if err != nil { diff --git a/beacon-chain/blockchain/process_block_test.go b/beacon-chain/blockchain/process_block_test.go index 40ad3b9198..bebfb576bc 100644 --- a/beacon-chain/blockchain/process_block_test.go +++ b/beacon-chain/blockchain/process_block_test.go @@ -9,8 +9,10 @@ import ( "testing" "time" + mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" + statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" @@ -3147,6 +3149,160 @@ func TestIsDataAvailable(t *testing.T) { }) } +// Test_postBlockProcess_EventSending tests that block processed events are only sent +// when block processing succeeds according to the decision tree: +// +// Block Processing Flow: +// ├─ InsertNode FAILS (fork choice timeout) +// │ └─ blockProcessed = false ❌ NO EVENT +// │ +// ├─ InsertNode succeeds +// │ ├─ handleBlockAttestations FAILS +// │ │ └─ blockProcessed = false ❌ NO EVENT +// │ │ +// │ ├─ Block is NON-CANONICAL (not head) +// │ │ └─ blockProcessed = true ✅ SEND EVENT (Line 111) +// │ │ +// │ ├─ Block IS CANONICAL (new head) +// │ │ ├─ getFCUArgs FAILS +// │ │ │ └─ blockProcessed = true ✅ SEND EVENT (Line 117) +// │ │ │ +// │ │ ├─ sendFCU FAILS +// │ │ │ └─ blockProcessed = false ❌ NO EVENT +// │ │ │ +// │ │ └─ Full success +// │ │ └─ blockProcessed = true ✅ SEND EVENT (Line 125) +func Test_postBlockProcess_EventSending(t *testing.T) { + ctx := context.Background() + + // Helper to create a minimal valid block and state + createTestBlockAndState := func(t *testing.T, slot primitives.Slot, parentRoot [32]byte) (consensusblocks.ROBlock, state.BeaconState) { + st, _ := util.DeterministicGenesisState(t, 64) + require.NoError(t, st.SetSlot(slot)) + + stateRoot, err := st.HashTreeRoot(ctx) + require.NoError(t, err) + + blk := util.NewBeaconBlock() + blk.Block.Slot = slot + blk.Block.ProposerIndex = 0 + blk.Block.ParentRoot = parentRoot[:] + blk.Block.StateRoot = stateRoot[:] + + signed := util.HydrateSignedBeaconBlock(blk) + roBlock, err := consensusblocks.NewSignedBeaconBlock(signed) + require.NoError(t, err) + + roBlk, err := consensusblocks.NewROBlock(roBlock) + require.NoError(t, err) + return roBlk, st + } + + tests := []struct { + name string + setupService func(*Service, [32]byte) + expectEvent bool + expectError bool + errorContains string + }{ + { + name: "Block successfully processed - sends event", + setupService: func(s *Service, blockRoot [32]byte) { + // Default setup should work + }, + expectEvent: true, + expectError: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create service with required options + opts := testServiceOptsWithDB(t) + service, err := NewService(ctx, opts...) + require.NoError(t, err) + + // Initialize fork choice with genesis block + st, _ := util.DeterministicGenesisState(t, 64) + require.NoError(t, st.SetSlot(0)) + genesisBlock := util.NewBeaconBlock() + genesisBlock.Block.StateRoot = bytesutil.PadTo([]byte("genesisState"), 32) + signedGenesis := util.HydrateSignedBeaconBlock(genesisBlock) + block, err := consensusblocks.NewSignedBeaconBlock(signedGenesis) + require.NoError(t, err) + genesisRoot, err := block.Block().HashTreeRoot() + require.NoError(t, err) + require.NoError(t, service.cfg.BeaconDB.SaveBlock(ctx, block)) + require.NoError(t, service.cfg.BeaconDB.SaveGenesisBlockRoot(ctx, genesisRoot)) + require.NoError(t, service.cfg.BeaconDB.SaveState(ctx, st, genesisRoot)) + + genesisROBlock, err := consensusblocks.NewROBlock(block) + require.NoError(t, err) + require.NoError(t, service.cfg.ForkChoiceStore.InsertNode(ctx, st, genesisROBlock)) + + // Create test block and state with genesis as parent + roBlock, postSt := createTestBlockAndState(t, 100, genesisRoot) + + // Apply additional service setup if provided + if tt.setupService != nil { + tt.setupService(service, roBlock.Root()) + } + + // Create post block process config + cfg := &postBlockProcessConfig{ + ctx: ctx, + roblock: roBlock, + postState: postSt, + isValidPayload: true, + } + + // Execute postBlockProcess + err = service.postBlockProcess(cfg) + + // Check error expectation + if tt.expectError { + require.NotNil(t, err) + if tt.errorContains != "" { + require.ErrorContains(t, tt.errorContains, err) + } + } else { + require.NoError(t, err) + } + + // Give a moment for deferred functions to execute + time.Sleep(10 * time.Millisecond) + + // Check event expectation + notifier := service.cfg.StateNotifier.(*mock.MockStateNotifier) + events := notifier.ReceivedEvents() + + if tt.expectEvent { + require.NotEqual(t, 0, len(events), "Expected event to be sent but none were received") + + // Verify it's a BlockProcessed event + foundBlockProcessed := false + for _, evt := range events { + if evt.Type == statefeed.BlockProcessed { + foundBlockProcessed = true + data, ok := evt.Data.(*statefeed.BlockProcessedData) + require.Equal(t, true, ok, "Event data should be BlockProcessedData") + require.Equal(t, roBlock.Root(), data.BlockRoot, "Event should contain correct block root") + break + } + } + require.Equal(t, true, foundBlockProcessed, "Expected BlockProcessed event type") + } else { + // For no-event cases, verify no BlockProcessed events were sent + for _, evt := range events { + require.NotEqual(t, statefeed.BlockProcessed, evt.Type, + "Expected no BlockProcessed event but one was sent") + } + } + }) + } +} + + func setupLightClientTestRequirements(ctx context.Context, t *testing.T, s *Service, v int, options ...util.LightClientOption) (*util.TestLightClient, *postBlockProcessConfig) { var l *util.TestLightClient switch v { diff --git a/changelog/james-prysm_fix-block-event.md b/changelog/james-prysm_fix-block-event.md new file mode 100644 index 0000000000..6b290e1563 --- /dev/null +++ b/changelog/james-prysm_fix-block-event.md @@ -0,0 +1,3 @@ +### Fixed + +- block event probably shouldn't be sent on certain block processing failures, now sends only on successing processing Block is NON-CANONICAL, Block IS CANONICAL but getFCUArgs FAILS, and Full success \ No newline at end of file From c0ad87df4b83e4fefff40f9bf48f987805a55ace Mon Sep 17 00:00:00 2001 From: james-prysm <90280386+james-prysm@users.noreply.github.com> Date: Thu, 9 Oct 2025 14:21:56 -0500 Subject: [PATCH 011/103] fixing web3signer for e2e (#15832) * fixing web3signer for e2e * fixing tests * gaz * reverting fix * extra space --- changelog/james-prysm_fix-web3signer-e2e.md | 7 +++ .../endtoend/components/web3remotesigner.go | 11 +--- testing/endtoend/deps.bzl | 6 +- .../remote-web3signer/types/mock/BUILD.bazel | 2 + .../remote-web3signer/types/mock/mocks.go | 60 ++++++++++++++++--- .../remote-web3signer/types/requests.go | 48 +++++++++++---- .../remote-web3signer/types/requests_test.go | 26 ++++++-- .../types/web3signer_types.go | 6 +- 8 files changed, 131 insertions(+), 35 deletions(-) create mode 100644 changelog/james-prysm_fix-web3signer-e2e.md diff --git a/changelog/james-prysm_fix-web3signer-e2e.md b/changelog/james-prysm_fix-web3signer-e2e.md new file mode 100644 index 0000000000..87f95f2d3f --- /dev/null +++ b/changelog/james-prysm_fix-web3signer-e2e.md @@ -0,0 +1,7 @@ +### Fixed + +- Fixed web3signer e2e, issues caused due to a regression on old fork support + +### Changed + +- updated web3signer to 25.9.1 diff --git a/testing/endtoend/components/web3remotesigner.go b/testing/endtoend/components/web3remotesigner.go index 10619a66c5..f84d8bfc1a 100644 --- a/testing/endtoend/components/web3remotesigner.go +++ b/testing/endtoend/components/web3remotesigner.go @@ -256,17 +256,12 @@ func (w *Web3RemoteSigner) UnderlyingProcess() *os.Process { func createTestnetDir() (string, error) { testNetDir := e2e.TestParams.TestPath + "/web3signer-testnet" configPath := filepath.Join(testNetDir, "config.yaml") - - // TODO: add blob schedule back in as soon as web3signer supports it! configCopy := params.BeaconConfig().Copy() - configCopy.BlobSchedule = nil - // --- - rawYaml := params.ConfigToYaml(configCopy) - // Add in deposit contract in yaml - depContractStr := fmt.Sprintf("\nDEPOSIT_CONTRACT_ADDRESS: %s\n", params.BeaconConfig().DepositContractAddress) - rawYaml = append(rawYaml, []byte(depContractStr)...) + // TODO: remove this when it's removed from web3signer + maxBlobsStr := fmt.Sprintf("\nMAX_BLOBS_PER_BLOCK_ELECTRA: %s\n", fmt.Sprintf("%d", params.BeaconConfig().DeprecatedMaxBlobsPerBlockElectra)) + rawYaml = append(rawYaml, []byte(maxBlobsStr)...) if err := file.MkdirAll(testNetDir); err != nil { return "", err diff --git a/testing/endtoend/deps.bzl b/testing/endtoend/deps.bzl index 33f424a369..574a1b6d54 100644 --- a/testing/endtoend/deps.bzl +++ b/testing/endtoend/deps.bzl @@ -6,10 +6,10 @@ lighthouse_archive_name = "lighthouse-%s-x86_64-unknown-linux-gnu.tar.gz" % ligh def e2e_deps(): http_archive( name = "web3signer", - urls = ["https://github.com/Consensys/web3signer/releases/download/25.9.0/web3signer-25.9.0.tar.gz"], - sha256 = "4bc95a86e232050ff071279043e1d04616572d551f6f72aee31108f96dc77bd8", + urls = ["https://github.com/Consensys/web3signer/releases/download/25.9.1/web3signer-25.9.1.tar.gz"], + sha256 = "d84498abbe46fcf10ca44f930eafcd80d7339cbf3f7f7f42a77eb1763ab209cf", build_file = "@prysm//testing/endtoend:web3signer.BUILD", - strip_prefix = "web3signer-25.9.0", + strip_prefix = "web3signer-25.9.1", ) http_archive( diff --git a/validator/keymanager/remote-web3signer/types/mock/BUILD.bazel b/validator/keymanager/remote-web3signer/types/mock/BUILD.bazel index a9a32929c2..40df946d36 100644 --- a/validator/keymanager/remote-web3signer/types/mock/BUILD.bazel +++ b/validator/keymanager/remote-web3signer/types/mock/BUILD.bazel @@ -8,6 +8,8 @@ go_library( visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", + "//config/params:go_default_library", + "//consensus-types/primitives:go_default_library", "//proto/prysm/v1alpha1:go_default_library", "//proto/prysm/v1alpha1/validator-client:go_default_library", "//runtime/version:go_default_library", diff --git a/validator/keymanager/remote-web3signer/types/mock/mocks.go b/validator/keymanager/remote-web3signer/types/mock/mocks.go index 5d4d4b164b..765980508a 100644 --- a/validator/keymanager/remote-web3signer/types/mock/mocks.go +++ b/validator/keymanager/remote-web3signer/types/mock/mocks.go @@ -1,10 +1,13 @@ package mock import ( + "encoding/json" "fmt" "strings" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" + "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" "github.com/OffchainLabs/prysm/v6/runtime/version" @@ -55,6 +58,32 @@ func GetMockSignRequest(t string) *validatorpb.SignRequest { }, SigningSlot: 0, } + case "AGGREGATE_AND_PROOF": + return &validatorpb.SignRequest{ + PublicKey: make([]byte, fieldparams.BLSPubkeyLength), + SigningRoot: make([]byte, fieldparams.RootLength), + SignatureDomain: make([]byte, 4), + Object: &validatorpb.SignRequest_AggregateAttestationAndProof{ + AggregateAttestationAndProof: ð.AggregateAttestationAndProof{ + AggregatorIndex: 0, + Aggregate: ð.Attestation{ + AggregationBits: bitfield.Bitlist{0b1101}, + Data: ð.AttestationData{ + BeaconBlockRoot: make([]byte, fieldparams.RootLength), + Source: ð.Checkpoint{ + Root: make([]byte, fieldparams.RootLength), + }, + Target: ð.Checkpoint{ + Root: make([]byte, fieldparams.RootLength), + }, + }, + Signature: make([]byte, 96), + }, + SelectionProof: make([]byte, fieldparams.BLSSignatureLength), + }, + }, + SigningSlot: 0, + } case "AGGREGATE_AND_PROOF_V2": committeeBits := bitfield.NewBitvector64() committeeBits.SetBitAt(0, true) @@ -82,7 +111,7 @@ func GetMockSignRequest(t string) *validatorpb.SignRequest { SelectionProof: make([]byte, fieldparams.BLSSignatureLength), }, }, - SigningSlot: 0, + SigningSlot: primitives.Slot(params.BeaconConfig().ElectraForkEpoch) * primitives.Slot(params.BeaconConfig().SlotsPerEpoch), } case "ATTESTATION": return &validatorpb.SignRequest{ @@ -521,17 +550,34 @@ func AggregationSlotSignRequest() *types.AggregationSlotSignRequest { // AggregateAndProofV2SignRequest is a mock implementation of the AggregateAndProofV2SignRequest. func AggregateAndProofV2SignRequest(ver int) *types.AggregateAndProofV2SignRequest { + var aggregateAndProofJSON []byte + var slot primitives.Slot + if ver < version.Electra { + aggregateAndProofData := &types.AggregateAndProof{ + AggregatorIndex: "0", + Aggregate: Attestation(), + SelectionProof: make([]byte, fieldparams.BLSSignatureLength), + } + aggregateAndProofJSON, _ = json.Marshal(aggregateAndProofData) + slot = 0 // Pre-Electra slot + } else { + aggregateAndProofData := &types.AggregateAndProofElectra{ + AggregatorIndex: "0", + Aggregate: AttestationElectra(), + SelectionProof: make([]byte, fieldparams.BLSSignatureLength), + } + aggregateAndProofJSON, _ = json.Marshal(aggregateAndProofData) + slot = primitives.Slot(params.BeaconConfig().ElectraForkEpoch) * primitives.Slot(params.BeaconConfig().SlotsPerEpoch) + } + // Generate ForkInfo dynamically based on the slot + forkInfo, _ := types.MapForkInfo(slot, make([]byte, fieldparams.RootLength)) return &types.AggregateAndProofV2SignRequest{ Type: "AGGREGATE_AND_PROOF_V2", - ForkInfo: ForkInfo(), + ForkInfo: forkInfo, SigningRoot: make([]byte, fieldparams.RootLength), AggregateAndProof: &types.AggregateAndProofV2{ Version: strings.ToUpper(version.String(ver)), - Data: &types.AggregateAndProofElectra{ - AggregatorIndex: "0", - Aggregate: AttestationElectra(), - SelectionProof: make([]byte, fieldparams.BLSSignatureLength), - }, + Data: aggregateAndProofJSON, }, } } diff --git a/validator/keymanager/remote-web3signer/types/requests.go b/validator/keymanager/remote-web3signer/types/requests.go index c458c6c7f0..a3f99babd2 100644 --- a/validator/keymanager/remote-web3signer/types/requests.go +++ b/validator/keymanager/remote-web3signer/types/requests.go @@ -1,6 +1,7 @@ package types import ( + "encoding/json" "fmt" "strings" @@ -67,28 +68,53 @@ func GetAggregationSlotSignRequest(request *validatorpb.SignRequest, genesisVali // GetAggregateAndProofV2SignRequest maps the request for signing type AGGREGATE_AND_PROOF_V2 on Electra changes. func GetAggregateAndProofV2SignRequest(v int, request *validatorpb.SignRequest, genesisValidatorsRoot []byte) (*AggregateAndProofV2SignRequest, error) { - aggregateAttestationAndProof, ok := request.Object.(*validatorpb.SignRequest_AggregateAttestationAndProofElectra) - if !ok { - return nil, errors.New("failed to cast request object to aggregate attestation and proof") - } - if aggregateAttestationAndProof == nil { - return nil, errors.New("invalid sign request: AggregateAndProof is nil") - } fork, err := MapForkInfo(request.SigningSlot, genesisValidatorsRoot) if err != nil { return nil, err } - aggregateAndProof, err := MapAggregateAndProofElectra(aggregateAttestationAndProof.AggregateAttestationAndProofElectra) - if err != nil { - return nil, err + + var aggregateAndProofJSON []byte + if v < version.Electra { + aggregateAttestationAndProof, ok := request.Object.(*validatorpb.SignRequest_AggregateAttestationAndProof) + if !ok { + return nil, errors.New("failed to cast request object to aggregate attestation and proof") + } + if aggregateAttestationAndProof == nil { + return nil, errors.New("invalid sign request: AggregateAndProof is nil") + } + aggregateAndProof, err := MapAggregateAndProof(aggregateAttestationAndProof.AggregateAttestationAndProof) + if err != nil { + return nil, err + } + aggregateAndProofJSON, err = json.Marshal(aggregateAndProof) + if err != nil { + return nil, err + } + } else { + aggregateAttestationAndProof, ok := request.Object.(*validatorpb.SignRequest_AggregateAttestationAndProofElectra) + if !ok { + return nil, errors.New("failed to cast request object to aggregate attestation and proof Electra") + } + if aggregateAttestationAndProof == nil { + return nil, errors.New("invalid sign request: AggregateAndProof is nil") + } + aggregateAndProof, err := MapAggregateAndProofElectra(aggregateAttestationAndProof.AggregateAttestationAndProofElectra) + if err != nil { + return nil, err + } + aggregateAndProofJSON, err = json.Marshal(aggregateAndProof) + if err != nil { + return nil, err + } } + return &AggregateAndProofV2SignRequest{ Type: "AGGREGATE_AND_PROOF_V2", ForkInfo: fork, SigningRoot: request.SigningRoot, AggregateAndProof: &AggregateAndProofV2{ Version: strings.ToUpper(version.String(v)), - Data: aggregateAndProof, + Data: aggregateAndProofJSON, }, }, nil } diff --git a/validator/keymanager/remote-web3signer/types/requests_test.go b/validator/keymanager/remote-web3signer/types/requests_test.go index 32a678bf00..a12619146e 100644 --- a/validator/keymanager/remote-web3signer/types/requests_test.go +++ b/validator/keymanager/remote-web3signer/types/requests_test.go @@ -1,6 +1,7 @@ package types_test import ( + "encoding/json" "reflect" "testing" @@ -15,6 +16,7 @@ import ( func TestGetAggregateAndProofV2SignRequest(t *testing.T) { type args struct { + version int request *validatorpb.SignRequest genesisValidatorsRoot []byte } @@ -25,24 +27,40 @@ func TestGetAggregateAndProofV2SignRequest(t *testing.T) { wantErr bool }{ { - name: "Happy Path Test", + name: "Happy Path Test Electra", args: args{ + version: version.Electra, request: mock.GetMockSignRequest("AGGREGATE_AND_PROOF_V2"), genesisValidatorsRoot: make([]byte, fieldparams.RootLength), }, want: mock.AggregateAndProofV2SignRequest(version.Electra), wantErr: false, }, + { + name: "Happy Path Test Pre-Electra", + args: args{ + version: version.Deneb, + request: mock.GetMockSignRequest("AGGREGATE_AND_PROOF"), + genesisValidatorsRoot: make([]byte, fieldparams.RootLength), + }, + want: mock.AggregateAndProofV2SignRequest(version.Deneb), + wantErr: false, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, err := types.GetAggregateAndProofV2SignRequest(version.Electra, tt.args.request, tt.args.genesisValidatorsRoot) + got, err := types.GetAggregateAndProofV2SignRequest(tt.args.version, tt.args.request, tt.args.genesisValidatorsRoot) if (err != nil) != tt.wantErr { t.Errorf("GetAggregateAndProofV2SignRequest() error = %v, wantErr %v", err, tt.wantErr) return } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("GetAggregateAndProofV2SignRequest() got = %v, want %v", got, tt.want) + // Marshal to JSON for comparison since ForkInfo is generated dynamically + gotJSON, err := json.Marshal(got) + require.NoError(t, err) + wantJSON, err := json.Marshal(tt.want) + require.NoError(t, err) + if string(gotJSON) != string(wantJSON) { + t.Errorf("JSON mismatch:\ngot: %s\nwant: %s", string(gotJSON), string(wantJSON)) } }) } diff --git a/validator/keymanager/remote-web3signer/types/web3signer_types.go b/validator/keymanager/remote-web3signer/types/web3signer_types.go index 49a7028320..682bb23da8 100644 --- a/validator/keymanager/remote-web3signer/types/web3signer_types.go +++ b/validator/keymanager/remote-web3signer/types/web3signer_types.go @@ -3,6 +3,8 @@ package types import ( + "encoding/json" + "github.com/ethereum/go-ethereum/common/hexutil" ) @@ -32,8 +34,8 @@ type AggregateAndProofV2SignRequest struct { // AggregateAndProofV2 is a wrapper object for AggregateAndProofV2SignRequest type AggregateAndProofV2 struct { - Version string `json:"version" validate:"required"` - Data *AggregateAndProofElectra `json:"data" validate:"required"` // specifies Electra for now + Version string `json:"version" validate:"required"` + Data json.RawMessage `json:"data" validate:"required"` } // AttestationSignRequest is a request object for web3signer sign api. From a94ea1e5f5c99651cc12d58253045db5769b9ace Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Thu, 9 Oct 2025 21:30:26 +0200 Subject: [PATCH 012/103] Add grandine in known agents (#15829) * `knownAgentVersions`: Sort. * `knownAgentVersions`: Add Grandine. * Add changelog. --- beacon-chain/p2p/monitoring.go | 7 ++++--- changelog/manu-grandine-known-agents.md | 2 ++ 2 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 changelog/manu-grandine-known-agents.md diff --git a/beacon-chain/p2p/monitoring.go b/beacon-chain/p2p/monitoring.go index 31890211b1..35cb266b47 100644 --- a/beacon-chain/p2p/monitoring.go +++ b/beacon-chain/p2p/monitoring.go @@ -11,14 +11,15 @@ import ( var ( knownAgentVersions = []string{ + "erigon/caplin", + "grandine", + "js-libp2p", "lighthouse", + "lodestar", "nimbus", "prysm", "teku", - "lodestar", - "js-libp2p", "rust-libp2p", - "erigon/caplin", } p2pPeerCount = promauto.NewGaugeVec(prometheus.GaugeOpts{ Name: "p2p_peer_count", diff --git a/changelog/manu-grandine-known-agents.md b/changelog/manu-grandine-known-agents.md new file mode 100644 index 0000000000..849526554d --- /dev/null +++ b/changelog/manu-grandine-known-agents.md @@ -0,0 +1,2 @@ +### Added +- Add Grandine to P2P known agents. (Useful for metrics) \ No newline at end of file From 4e47905884bda2d13678d6704887a7196aa33839 Mon Sep 17 00:00:00 2001 From: Potuz Date: Fri, 10 Oct 2025 17:55:29 -0300 Subject: [PATCH 013/103] Do not mark blocks as invalid unnecessarily (#15846) --- beacon-chain/blockchain/error.go | 2 +- beacon-chain/blockchain/receive_block.go | 3 +++ changelog/potuz_invalid_not_descendant.md | 3 +++ 3 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 changelog/potuz_invalid_not_descendant.md diff --git a/beacon-chain/blockchain/error.go b/beacon-chain/blockchain/error.go index 50f535f387..35630eb139 100644 --- a/beacon-chain/blockchain/error.go +++ b/beacon-chain/blockchain/error.go @@ -30,7 +30,7 @@ var ( // errWSBlockNotFoundInEpoch is returned when a block is not found in the WS cache or DB within epoch. errWSBlockNotFoundInEpoch = errors.New("weak subjectivity root not found in db within epoch") // ErrNotDescendantOfFinalized is returned when a block is not a descendant of the finalized checkpoint - ErrNotDescendantOfFinalized = invalidBlock{error: errors.New("not descendant of finalized checkpoint")} + ErrNotDescendantOfFinalized = errors.New("not descendant of finalized checkpoint") // ErrNotCheckpoint is returned when a given checkpoint is not a // checkpoint in any chain known to forkchoice ErrNotCheckpoint = errors.New("not a checkpoint in forkchoice") diff --git a/beacon-chain/blockchain/receive_block.go b/beacon-chain/blockchain/receive_block.go index 9326812295..00064234b7 100644 --- a/beacon-chain/blockchain/receive_block.go +++ b/beacon-chain/blockchain/receive_block.go @@ -219,6 +219,9 @@ func (s *Service) validateExecutionAndConsensus( eg.Go(func() error { var err error postState, err = s.validateStateTransition(ctx, preState, block) + if errors.Is(err, ErrNotDescendantOfFinalized) { + return invalidBlock{error: err, root: block.Root()} + } if err != nil { return errors.Wrap(err, "failed to validate consensus state transition function") } diff --git a/changelog/potuz_invalid_not_descendant.md b/changelog/potuz_invalid_not_descendant.md new file mode 100644 index 0000000000..5a647d2f48 --- /dev/null +++ b/changelog/potuz_invalid_not_descendant.md @@ -0,0 +1,3 @@ +### Fixed + +- Do not mark blocks as invalid from ErrNotDescendantOfFinalized From 6973cd2c5fe9639d75d866c5cd56e642740eeb6d Mon Sep 17 00:00:00 2001 From: Preston Van Loon Date: Fri, 10 Oct 2025 16:04:38 -0500 Subject: [PATCH 014/103] Changelog v6.1.2 (#15843) * Changelog for v6.0.5 * Changelog for v6.1.0 * Changelog for v6.1.2 * Removing old fragments --- CHANGELOG.md | 301 ++++++++++++++++++ changelog/2025-08-25-docs-links.md | 2 - .../Alleysira-fix-meta-in-getPeers-resp.md | 3 - ...Mark_fix-web3signer-test-error-handling.md | 3 - ...aloretka_fix-leakybucket-test-duplicate.md | 2 - changelog/James-prysm_parallel-goodbyes.md | 3 - .../James-prysm_persistent-seq-number.md | 3 - ...d-blob-schedule-to-config-spec-endpoint.md | 3 - changelog/bastin_abstract-save-update.md | 3 - .../bastin_abstract-update-validation.md | 3 - changelog/bastin_attestation-api-ssz.md | 4 - .../bastin_canonical-light-client-updates.md | 3 - .../bastin_checkpoints-only-bootstraps.md | 3 - changelog/bastin_delayed-lc-broadcast.md | 3 - changelog/bastin_fix-lc-versioning.md | 3 - changelog/bastin_lc-fulu-spectest.md | 3 - changelog/bastin_lc-p2p-validation.md | 4 - .../bastin_move-lc-package-out-of-core.md | 3 - changelog/bastin_move_broadcast_to_store.md | 5 - changelog/bastin_put-lc-store-behind-flag.md | 3 - .../bastin_read-updates-from-store-in-rpc.md | 3 - .../bastin_refactor-lc-bootstrap-tests.md | 3 - changelog/bastin_refactor-lc-kv-tests.md | 3 - changelog/bastin_refactor-lc-testutils.md | 3 - changelog/bastin_refine-lc-rpc-server.md | 3 - changelog/bastin_remove-unused.md | 3 - changelog/bastin_save-updates-in-store.md | 3 - changelog/bastin_unify-lc-api-bootstrap.md | 3 - changelog/bastin_unify-lc-api-updates.md | 3 - changelog/bastin_version-to-fork-epoch.md | 3 - ...populate-variable-length-analyze-object.md | 3 - changelog/fix-fulu-bid-compatibility.md | 3 - changelog/fix-genesis-block-eip6110.md | 3 - changelog/hyunchel_fix-misleading-log-msg.md | 3 - changelog/james-prysm_alpaha6-spec-tests.md | 3 - changelog/james-prysm_block-proposal-fulu.md | 3 - changelog/james-prysm_config-cleanup.md | 3 - changelog/james-prysm_debug-data-columns.md | 3 - changelog/james-prysm_default-duties-v2.md | 3 - .../james-prysm_deprecate-publish-blos.md | 3 - .../james-prysm_fix-builder-version-check.md | 3 - changelog/james-prysm_fix-config-parsing.md | 3 - changelog/james-prysm_fix-da-metric.md | 3 - .../james-prysm_fix-duties-v2-assignment.md | 3 - changelog/james-prysm_fix-find-peers.md | 3 - .../james-prysm_fix-justified-blocker.md | 3 - changelog/james-prysm_fix-origin-block-log.md | 3 - changelog/james-prysm_fulu-web3signer.md | 7 - changelog/james-prysm_get-blob-fulu.md | 3 - changelog/james-prysm_get-duties-v2.md | 3 - changelog/james-prysm_improve-duties-v2.md | 3 - changelog/james-prysm_move-ticker.md | 3 - changelog/james-prysm_move-web-flag.md | 3 - changelog/james-prysm_post-block-ssz.md | 3 - .../james-prysm_proposer-lookahead-api.md | 3 - .../james-prysm_remove-cononical-head.md | 3 - changelog/james-prysm_remove-ssz-only-flag.md | 7 - .../james-prysm_safe-validator-shutdown.md | 11 - .../james-prysm_skip-omit-config-values.md | 3 - changelog/james-prysm_ssz-validator-block.md | 7 - changelog/james-prysm_validator-duties-v2.md | 3 - changelog/james_prysm-fix-blocker-notfound.md | 3 - .../james_prysm-fulu-general-spectests.md | 3 - changelog/jihoonsong_support-fulu-genesis.md | 3 - changelog/jtraglia_das-core-fixes.md | 3 - changelog/jtraglia_dev-to-master.md | 3 - ...jtraglia_fix-blobs-bundle-v2-max-proofs.md | 3 - .../jtraglia_move-reconstruction-lock.md | 3 - changelog/jtraglia_nits-dcsc-verification.md | 3 - changelog/jtraglia_specrefs.md | 3 - .../jtraglia_specrefs_compute_fork_digest.md | 3 - changelog/jtraglia_update-within-da-period.md | 3 - changelog/jtraglia_various-renaming.md | 3 - ...kaloyantanev_rework-dv-selection-proofs.md | 3 - changelog/kasey_decouple-proto-params.md | 2 - changelog/kasey_exclude-far-future-fork.md | 2 - changelog/kasey_fix-15607.md | 2 - changelog/kasey_fix-cache-panic.md | 2 - changelog/kasey_fix-e2e-panic.md | 2 - changelog/kasey_fusaka-nfd.md | 2 - changelog/kasey_gzip-skip-sse.md | 2 - changelog/kasey_idempotent-registration.md | 2 - changelog/kasey_ignore-far-future-mismatch.md | 2 - changelog/kasey_init-genesis-asap.md | 3 - changelog/kasey_invalid-digest-log.md | 2 - changelog/kasey_isolate-committee-cache.md | 2 - changelog/kasey_log-invalid-ee-root.md | 2 - .../kasey_max-blobs-use-network-schedule.md | 2 - changelog/kasey_omit-non-blob-fields.md | 2 - changelog/kasey_refactor-fork-schedules.md | 2 - .../kasey_reject-mismatched-schedules.md | 2 - changelog/kasey_rename-backfill-flag.md | 2 - .../kasey_start-discovery-immediately.md | 2 - changelog/kasey_unwedge-ethspecify.md | 2 - changelog/kasey_update-gossipsub.md | 2 - changelog/manu-TestHostIsResolved.md | 2 - changelog/manu-bootnodes.md | 2 - changelog/manu-broadcast.md | 4 - changelog/manu-cgc.md | 3 - changelog/manu-eas.md | 3 - changelog/manu-fix-log.md | 3 - changelog/manu-from-disk-test.md | 3 - changelog/manu-go-1.25.1.md | 3 - changelog/manu-inclusion-cache-key.md | 3 - changelog/manu-log-ms.md | 3 - changelog/manu-logs-datacolumns.md | 2 - changelog/manu-logs-to-trace.md | 4 - changelog/manu-peer-ban-at-restart.md | 2 - changelog/manu-peerdas-beacon-api.md | 2 - changelog/manu-peerdas-builder.md | 2 - changelog/manu-peerdas-c-kzg-update.md | 2 - .../manu-peerdas-columns-by-range-handler.md | 2 - .../manu-peerdas-columns-by-root-handler.md | 2 - changelog/manu-peerdas-columns-logs.md | 2 - changelog/manu-peerdas-das.md | 2 - ...erdas-dataColumnSidecarByRootRPCHandler.md | 2 - changelog/manu-peerdas-get-blobs-V2.md | 3 - changelog/manu-peerdas-metadata.md | 2 - changelog/manu-peerdas-node.md | 2 - changelog/manu-peerdas-reconstruct.md | 4 - .../manu-peerdas-reconstruction-delay.md | 2 - .../manu-peerdas-reconstruction-from-el.md | 2 - changelog/manu-peerdas-reconstruction.md | 2 - .../manu-peerdas-send-data-column-requests.md | 3 - changelog/manu-peerdas-small.md | 12 - changelog/manu-peerdas-sync.md | 2 - .../manu-peerdas-syncing-disjoint-network.md | 10 - changelog/manu-peerdas-variou.md | 9 - changelog/manu-peerdas-various.md | 3 - changelog/manu-remove-clock.md | 2 - changelog/manu-retry-fetch-origin-columns.md | 2 - changelog/manu-skip-bad-peers.md | 3 - changelog/manu-subscriptions.md | 2 - changelog/manu-wait-init-custody-info.md | 2 - .../muzry_fix_get_block_attestation_v2.md | 3 - changelog/muzry_fix_produce_sync_committee.md | 4 - changelog/muzry_fix_prysmctl_panics.md | 3 - changelog/muzry_fix_state_randao.md | 3 - ...x_submit_pool_sync_committee_signatures.md | 3 - changelog/muzry_fix_unremove_empty_dir.md | 3 - ...ry_fix_validator_client_committee_index.md | 3 - changelog/muzry_update_not_found_status.md | 2 - changelog/nisdas_mv_slice_permanent.md | 3 - changelog/pop_fix-bug.md | 2 - changelog/pop_fix-subnet.md | 2 - changelog/potuz_add_publishv2_metric.md | 3 - changelog/potuz_avoid_exit_info.md | 3 - changelog/potuz_change_error_message.md | 2 - changelog/potuz_change_insertchain.md | 3 - changelog/potuz_double_receive_block.md | 3 - changelog/potuz_eip_7917.md | 3 - changelog/potuz_event_happy_path.md | 2 - changelog/potuz_fix_forkchoice_startup.md | 3 - changelog/potuz_fix_initialize_lookahead.md | 2 - changelog/potuz_fix_races_in_tests.md | 3 - changelog/potuz_fix_stater.md | 2 - changelog/potuz_next_epoch_proposer_duties.md | 3 - changelog/potuz_process_pending_atts.md | 3 - changelog/potuz_remove_bls_broadcast.md | 3 - changelog/potuz_reorg_safe_vc.md | 6 - changelog/potuz_start_from_justified.md | 2 - changelog/potuz_update_gohashtree.md | 3 - changelog/potuz_update_quic_go.md | 3 - changelog/potuz_update_spectest.md | 3 - changelog/pvl-blob-cache-panic.md | 3 - changelog/pvl-blst-go-repo.md | 3 - changelog/pvl-debug-log.md | 3 - changelog/pvl-downscore-init-sync.md | 3 - changelog/pvl-erigon-agent.md | 3 - changelog/pvl-fulu-StreamBlocksAltair.md | 3 - changelog/pvl-fulu-prevent-datacolumns-oob.md | 3 - changelog/pvl-gh-runners.md | 3 - changelog/pvl-go-1.24.5.md | 3 - changelog/pvl-go-1.24.6.md | 3 - changelog/pvl-go-1.25.md | 5 - changelog/pvl-p2p-colocation-whitelist.md | 3 - changelog/pvl-peer-metrics-fix.md | 3 - changelog/pvl-peerdas-peer-fanout.md | 3 - changelog/pvl-regression-15369.md | 3 - changelog/pvl-rm-eth1voting-tool.md | 3 - changelog/pvl-slasherkv-timeout.md | 2 - changelog/pvl-spectest-size.md | 3 - changelog/pvl-strip.md | 3 - changelog/pvl-testing-context.md | 3 - changelog/pvl-time.md | 3 - changelog/pvl-update-cc-debian11.md | 3 - changelog/pvl-update-eth-clients.md | 6 - changelog/pvl-v6.0.4.md | 3 - changelog/pvl-v6.1.2.md | 3 + changelog/pvl_gcr_offchain.md | 3 - changelog/radek_agg-sc-messages.md | 3 - .../radek_consensus-value-unavailable.md | 3 - changelog/radek_do-not-compare-liveness.md | 3 - changelog/radek_duplicate-sync-aggregate.md | 3 - .../radek_electra-committee-assertion.md | 3 - changelog/radek_fix-gzip.md | 3 - .../radek_fix-max-epoch-calculation-once.md | 3 - changelog/radek_redesign-pending-att-queue.md | 3 - changelog/radek_reorganize-lc-processing.md | 3 - changelog/radek_ssz-pending.md | 3 - changelog/radek_state-fuzz-gc.md | 3 - changelog/radek_update-github-bug-template.md | 3 - changelog/raulk_beacon-api-metadata.md | 3 - changelog/rose2221-develop.md | 5 - .../sahil-4555-refactor-to-reflect-typefor.md | 3 - ...sahil-4555-refactor-to-use-atomic-types.md | 3 - changelog/sahil-4555-use-inbuilt-max-min.md | 3 - ...ss1315_fix-priority-queue-pop-lock-race.md | 2 - changelog/satushh-getblobsv2-retry.md | 3 - changelog/syjn99_initialize-ssz-ql.md | 3 - changelog/syjn99_persist-p2p-seqnum.md | 7 - changelog/syjn99_refactor-htrutil.md | 3 - changelog/syjn99_save-state-efficient-fulu.md | 3 - changelog/syjn99_ssz-ql-bitlist-bitvector.md | 3 - changelog/syjn99_ssz-ql-list.md | 3 - changelog/syjn99_ssz-ql-nested-list.md | 3 - changelog/syjn99_ssz-ql-tag-parser.md | 4 - ...sandroil_replace_grpc_gateway_flag_name.md | 2 - changelog/tt_45.md | 3 - changelog/tt_beans.md | 3 - changelog/tt_check_pending_att.md | 3 - changelog/tt_chicken.md | 3 - changelog/tt_duty.md | 3 - changelog/tt_egg.md | 3 - changelog/tt_fish.md | 3 - changelog/tt_formula_mlk.md | 7 - changelog/tt_fugu_.md | 3 - changelog/tt_milk.md | 3 - changelog/tt_noodles.md | 7 - changelog/tt_onion.md | 3 - changelog/tt_opt-val-lookup.md | 3 - changelog/tt_post-fulu-mev-boost-protocol.md | 3 - changelog/tt_state_root_debug.md | 3 - changelog/tt_steak.md | 3 - changelog/tt_sushi.md | 3 - .../ttsao_add-fulu-fork-transition-tests.md | 3 - ...ttsao_add-fulu-proposer-lookahead-tests.md | 3 - changelog/ttsao_fix-attestation-cache-key.md | 3 - .../ttsao_fix-blinded-block-v2-endpoint.md | 3 - .../ttsao_fix-equivocation-block-field.md | 3 - changelog/ttsao_fix-gofmt-formatting.md | 3 - .../ttsao_implement-kzg-batch-verification.md | 3 - .../ttsao_move-cache-key-outside-locks.md | 3 - .../ttsao_optimize-attestation-batching.md | 3 - changelog/ttsao_refactor-beacon-core-types.md | 3 - .../ttsao_refactor-proto-beacon-blocks.md | 3 - changelog/ttsao_return-early-req-column.md | 3 - changelog/ttsao_set-fulu-fork-epochs.md | 3 - changelog/ttsao_simplify-golangci-config.md | 3 - .../ttsao_update-consensus-spec-alpha5.md | 3 - ...ttsao_update-consensus-spec-v160-alpha4.md | 3 - changelog/ttsao_update-ssz-generated.md | 3 - .../user-agent-addition-validator-outbound.md | 4 - 253 files changed, 304 insertions(+), 772 deletions(-) delete mode 100644 changelog/2025-08-25-docs-links.md delete mode 100644 changelog/Alleysira-fix-meta-in-getPeers-resp.md delete mode 100644 changelog/DeVikingMark_fix-web3signer-test-error-handling.md delete mode 100644 changelog/Galoretka_fix-leakybucket-test-duplicate.md delete mode 100644 changelog/James-prysm_parallel-goodbyes.md delete mode 100644 changelog/James-prysm_persistent-seq-number.md delete mode 100644 changelog/add-blob-schedule-to-config-spec-endpoint.md delete mode 100644 changelog/bastin_abstract-save-update.md delete mode 100644 changelog/bastin_abstract-update-validation.md delete mode 100644 changelog/bastin_attestation-api-ssz.md delete mode 100644 changelog/bastin_canonical-light-client-updates.md delete mode 100644 changelog/bastin_checkpoints-only-bootstraps.md delete mode 100644 changelog/bastin_delayed-lc-broadcast.md delete mode 100644 changelog/bastin_fix-lc-versioning.md delete mode 100644 changelog/bastin_lc-fulu-spectest.md delete mode 100644 changelog/bastin_lc-p2p-validation.md delete mode 100644 changelog/bastin_move-lc-package-out-of-core.md delete mode 100644 changelog/bastin_move_broadcast_to_store.md delete mode 100644 changelog/bastin_put-lc-store-behind-flag.md delete mode 100644 changelog/bastin_read-updates-from-store-in-rpc.md delete mode 100644 changelog/bastin_refactor-lc-bootstrap-tests.md delete mode 100644 changelog/bastin_refactor-lc-kv-tests.md delete mode 100644 changelog/bastin_refactor-lc-testutils.md delete mode 100644 changelog/bastin_refine-lc-rpc-server.md delete mode 100644 changelog/bastin_remove-unused.md delete mode 100644 changelog/bastin_save-updates-in-store.md delete mode 100644 changelog/bastin_unify-lc-api-bootstrap.md delete mode 100644 changelog/bastin_unify-lc-api-updates.md delete mode 100644 changelog/bastin_version-to-fork-epoch.md delete mode 100644 changelog/fernantho_populate-variable-length-analyze-object.md delete mode 100644 changelog/fix-fulu-bid-compatibility.md delete mode 100644 changelog/fix-genesis-block-eip6110.md delete mode 100644 changelog/hyunchel_fix-misleading-log-msg.md delete mode 100644 changelog/james-prysm_alpaha6-spec-tests.md delete mode 100644 changelog/james-prysm_block-proposal-fulu.md delete mode 100644 changelog/james-prysm_config-cleanup.md delete mode 100644 changelog/james-prysm_debug-data-columns.md delete mode 100644 changelog/james-prysm_default-duties-v2.md delete mode 100644 changelog/james-prysm_deprecate-publish-blos.md delete mode 100644 changelog/james-prysm_fix-builder-version-check.md delete mode 100644 changelog/james-prysm_fix-config-parsing.md delete mode 100644 changelog/james-prysm_fix-da-metric.md delete mode 100644 changelog/james-prysm_fix-duties-v2-assignment.md delete mode 100644 changelog/james-prysm_fix-find-peers.md delete mode 100644 changelog/james-prysm_fix-justified-blocker.md delete mode 100644 changelog/james-prysm_fix-origin-block-log.md delete mode 100644 changelog/james-prysm_fulu-web3signer.md delete mode 100644 changelog/james-prysm_get-blob-fulu.md delete mode 100644 changelog/james-prysm_get-duties-v2.md delete mode 100644 changelog/james-prysm_improve-duties-v2.md delete mode 100644 changelog/james-prysm_move-ticker.md delete mode 100644 changelog/james-prysm_move-web-flag.md delete mode 100644 changelog/james-prysm_post-block-ssz.md delete mode 100644 changelog/james-prysm_proposer-lookahead-api.md delete mode 100644 changelog/james-prysm_remove-cononical-head.md delete mode 100644 changelog/james-prysm_remove-ssz-only-flag.md delete mode 100644 changelog/james-prysm_safe-validator-shutdown.md delete mode 100644 changelog/james-prysm_skip-omit-config-values.md delete mode 100644 changelog/james-prysm_ssz-validator-block.md delete mode 100644 changelog/james-prysm_validator-duties-v2.md delete mode 100644 changelog/james_prysm-fix-blocker-notfound.md delete mode 100644 changelog/james_prysm-fulu-general-spectests.md delete mode 100644 changelog/jihoonsong_support-fulu-genesis.md delete mode 100644 changelog/jtraglia_das-core-fixes.md delete mode 100644 changelog/jtraglia_dev-to-master.md delete mode 100644 changelog/jtraglia_fix-blobs-bundle-v2-max-proofs.md delete mode 100644 changelog/jtraglia_move-reconstruction-lock.md delete mode 100644 changelog/jtraglia_nits-dcsc-verification.md delete mode 100644 changelog/jtraglia_specrefs.md delete mode 100644 changelog/jtraglia_specrefs_compute_fork_digest.md delete mode 100644 changelog/jtraglia_update-within-da-period.md delete mode 100644 changelog/jtraglia_various-renaming.md delete mode 100644 changelog/kaloyantanev_rework-dv-selection-proofs.md delete mode 100644 changelog/kasey_decouple-proto-params.md delete mode 100644 changelog/kasey_exclude-far-future-fork.md delete mode 100644 changelog/kasey_fix-15607.md delete mode 100644 changelog/kasey_fix-cache-panic.md delete mode 100644 changelog/kasey_fix-e2e-panic.md delete mode 100644 changelog/kasey_fusaka-nfd.md delete mode 100644 changelog/kasey_gzip-skip-sse.md delete mode 100644 changelog/kasey_idempotent-registration.md delete mode 100644 changelog/kasey_ignore-far-future-mismatch.md delete mode 100644 changelog/kasey_init-genesis-asap.md delete mode 100644 changelog/kasey_invalid-digest-log.md delete mode 100644 changelog/kasey_isolate-committee-cache.md delete mode 100644 changelog/kasey_log-invalid-ee-root.md delete mode 100644 changelog/kasey_max-blobs-use-network-schedule.md delete mode 100644 changelog/kasey_omit-non-blob-fields.md delete mode 100644 changelog/kasey_refactor-fork-schedules.md delete mode 100644 changelog/kasey_reject-mismatched-schedules.md delete mode 100644 changelog/kasey_rename-backfill-flag.md delete mode 100644 changelog/kasey_start-discovery-immediately.md delete mode 100644 changelog/kasey_unwedge-ethspecify.md delete mode 100644 changelog/kasey_update-gossipsub.md delete mode 100644 changelog/manu-TestHostIsResolved.md delete mode 100644 changelog/manu-bootnodes.md delete mode 100644 changelog/manu-broadcast.md delete mode 100644 changelog/manu-cgc.md delete mode 100644 changelog/manu-eas.md delete mode 100644 changelog/manu-fix-log.md delete mode 100644 changelog/manu-from-disk-test.md delete mode 100644 changelog/manu-go-1.25.1.md delete mode 100644 changelog/manu-inclusion-cache-key.md delete mode 100644 changelog/manu-log-ms.md delete mode 100644 changelog/manu-logs-datacolumns.md delete mode 100644 changelog/manu-logs-to-trace.md delete mode 100644 changelog/manu-peer-ban-at-restart.md delete mode 100644 changelog/manu-peerdas-beacon-api.md delete mode 100644 changelog/manu-peerdas-builder.md delete mode 100644 changelog/manu-peerdas-c-kzg-update.md delete mode 100644 changelog/manu-peerdas-columns-by-range-handler.md delete mode 100644 changelog/manu-peerdas-columns-by-root-handler.md delete mode 100644 changelog/manu-peerdas-columns-logs.md delete mode 100644 changelog/manu-peerdas-das.md delete mode 100644 changelog/manu-peerdas-dataColumnSidecarByRootRPCHandler.md delete mode 100644 changelog/manu-peerdas-get-blobs-V2.md delete mode 100644 changelog/manu-peerdas-metadata.md delete mode 100644 changelog/manu-peerdas-node.md delete mode 100644 changelog/manu-peerdas-reconstruct.md delete mode 100644 changelog/manu-peerdas-reconstruction-delay.md delete mode 100644 changelog/manu-peerdas-reconstruction-from-el.md delete mode 100644 changelog/manu-peerdas-reconstruction.md delete mode 100644 changelog/manu-peerdas-send-data-column-requests.md delete mode 100644 changelog/manu-peerdas-small.md delete mode 100644 changelog/manu-peerdas-sync.md delete mode 100644 changelog/manu-peerdas-syncing-disjoint-network.md delete mode 100644 changelog/manu-peerdas-variou.md delete mode 100644 changelog/manu-peerdas-various.md delete mode 100644 changelog/manu-remove-clock.md delete mode 100644 changelog/manu-retry-fetch-origin-columns.md delete mode 100644 changelog/manu-skip-bad-peers.md delete mode 100644 changelog/manu-subscriptions.md delete mode 100644 changelog/manu-wait-init-custody-info.md delete mode 100644 changelog/muzry_fix_get_block_attestation_v2.md delete mode 100644 changelog/muzry_fix_produce_sync_committee.md delete mode 100644 changelog/muzry_fix_prysmctl_panics.md delete mode 100644 changelog/muzry_fix_state_randao.md delete mode 100644 changelog/muzry_fix_submit_pool_sync_committee_signatures.md delete mode 100644 changelog/muzry_fix_unremove_empty_dir.md delete mode 100644 changelog/muzry_fix_validator_client_committee_index.md delete mode 100644 changelog/muzry_update_not_found_status.md delete mode 100644 changelog/nisdas_mv_slice_permanent.md delete mode 100644 changelog/pop_fix-bug.md delete mode 100644 changelog/pop_fix-subnet.md delete mode 100644 changelog/potuz_add_publishv2_metric.md delete mode 100644 changelog/potuz_avoid_exit_info.md delete mode 100644 changelog/potuz_change_error_message.md delete mode 100644 changelog/potuz_change_insertchain.md delete mode 100644 changelog/potuz_double_receive_block.md delete mode 100644 changelog/potuz_eip_7917.md delete mode 100644 changelog/potuz_event_happy_path.md delete mode 100644 changelog/potuz_fix_forkchoice_startup.md delete mode 100644 changelog/potuz_fix_initialize_lookahead.md delete mode 100644 changelog/potuz_fix_races_in_tests.md delete mode 100644 changelog/potuz_fix_stater.md delete mode 100644 changelog/potuz_next_epoch_proposer_duties.md delete mode 100644 changelog/potuz_process_pending_atts.md delete mode 100644 changelog/potuz_remove_bls_broadcast.md delete mode 100644 changelog/potuz_reorg_safe_vc.md delete mode 100644 changelog/potuz_start_from_justified.md delete mode 100644 changelog/potuz_update_gohashtree.md delete mode 100644 changelog/potuz_update_quic_go.md delete mode 100644 changelog/potuz_update_spectest.md delete mode 100644 changelog/pvl-blob-cache-panic.md delete mode 100644 changelog/pvl-blst-go-repo.md delete mode 100644 changelog/pvl-debug-log.md delete mode 100644 changelog/pvl-downscore-init-sync.md delete mode 100644 changelog/pvl-erigon-agent.md delete mode 100644 changelog/pvl-fulu-StreamBlocksAltair.md delete mode 100644 changelog/pvl-fulu-prevent-datacolumns-oob.md delete mode 100644 changelog/pvl-gh-runners.md delete mode 100644 changelog/pvl-go-1.24.5.md delete mode 100644 changelog/pvl-go-1.24.6.md delete mode 100644 changelog/pvl-go-1.25.md delete mode 100644 changelog/pvl-p2p-colocation-whitelist.md delete mode 100644 changelog/pvl-peer-metrics-fix.md delete mode 100644 changelog/pvl-peerdas-peer-fanout.md delete mode 100644 changelog/pvl-regression-15369.md delete mode 100644 changelog/pvl-rm-eth1voting-tool.md delete mode 100644 changelog/pvl-slasherkv-timeout.md delete mode 100644 changelog/pvl-spectest-size.md delete mode 100644 changelog/pvl-strip.md delete mode 100644 changelog/pvl-testing-context.md delete mode 100644 changelog/pvl-time.md delete mode 100644 changelog/pvl-update-cc-debian11.md delete mode 100644 changelog/pvl-update-eth-clients.md delete mode 100644 changelog/pvl-v6.0.4.md create mode 100644 changelog/pvl-v6.1.2.md delete mode 100644 changelog/pvl_gcr_offchain.md delete mode 100644 changelog/radek_agg-sc-messages.md delete mode 100644 changelog/radek_consensus-value-unavailable.md delete mode 100644 changelog/radek_do-not-compare-liveness.md delete mode 100644 changelog/radek_duplicate-sync-aggregate.md delete mode 100644 changelog/radek_electra-committee-assertion.md delete mode 100644 changelog/radek_fix-gzip.md delete mode 100644 changelog/radek_fix-max-epoch-calculation-once.md delete mode 100644 changelog/radek_redesign-pending-att-queue.md delete mode 100644 changelog/radek_reorganize-lc-processing.md delete mode 100644 changelog/radek_ssz-pending.md delete mode 100644 changelog/radek_state-fuzz-gc.md delete mode 100644 changelog/radek_update-github-bug-template.md delete mode 100644 changelog/raulk_beacon-api-metadata.md delete mode 100644 changelog/rose2221-develop.md delete mode 100644 changelog/sahil-4555-refactor-to-reflect-typefor.md delete mode 100644 changelog/sahil-4555-refactor-to-use-atomic-types.md delete mode 100644 changelog/sahil-4555-use-inbuilt-max-min.md delete mode 100644 changelog/sashass1315_fix-priority-queue-pop-lock-race.md delete mode 100644 changelog/satushh-getblobsv2-retry.md delete mode 100644 changelog/syjn99_initialize-ssz-ql.md delete mode 100644 changelog/syjn99_persist-p2p-seqnum.md delete mode 100644 changelog/syjn99_refactor-htrutil.md delete mode 100644 changelog/syjn99_save-state-efficient-fulu.md delete mode 100644 changelog/syjn99_ssz-ql-bitlist-bitvector.md delete mode 100644 changelog/syjn99_ssz-ql-list.md delete mode 100644 changelog/syjn99_ssz-ql-nested-list.md delete mode 100644 changelog/syjn99_ssz-ql-tag-parser.md delete mode 100644 changelog/tomasandroil_replace_grpc_gateway_flag_name.md delete mode 100644 changelog/tt_45.md delete mode 100644 changelog/tt_beans.md delete mode 100644 changelog/tt_check_pending_att.md delete mode 100644 changelog/tt_chicken.md delete mode 100644 changelog/tt_duty.md delete mode 100644 changelog/tt_egg.md delete mode 100644 changelog/tt_fish.md delete mode 100644 changelog/tt_formula_mlk.md delete mode 100644 changelog/tt_fugu_.md delete mode 100644 changelog/tt_milk.md delete mode 100644 changelog/tt_noodles.md delete mode 100644 changelog/tt_onion.md delete mode 100644 changelog/tt_opt-val-lookup.md delete mode 100644 changelog/tt_post-fulu-mev-boost-protocol.md delete mode 100644 changelog/tt_state_root_debug.md delete mode 100644 changelog/tt_steak.md delete mode 100644 changelog/tt_sushi.md delete mode 100644 changelog/ttsao_add-fulu-fork-transition-tests.md delete mode 100644 changelog/ttsao_add-fulu-proposer-lookahead-tests.md delete mode 100644 changelog/ttsao_fix-attestation-cache-key.md delete mode 100644 changelog/ttsao_fix-blinded-block-v2-endpoint.md delete mode 100644 changelog/ttsao_fix-equivocation-block-field.md delete mode 100644 changelog/ttsao_fix-gofmt-formatting.md delete mode 100644 changelog/ttsao_implement-kzg-batch-verification.md delete mode 100644 changelog/ttsao_move-cache-key-outside-locks.md delete mode 100644 changelog/ttsao_optimize-attestation-batching.md delete mode 100644 changelog/ttsao_refactor-beacon-core-types.md delete mode 100644 changelog/ttsao_refactor-proto-beacon-blocks.md delete mode 100644 changelog/ttsao_return-early-req-column.md delete mode 100644 changelog/ttsao_set-fulu-fork-epochs.md delete mode 100644 changelog/ttsao_simplify-golangci-config.md delete mode 100644 changelog/ttsao_update-consensus-spec-alpha5.md delete mode 100644 changelog/ttsao_update-consensus-spec-v160-alpha4.md delete mode 100644 changelog/ttsao_update-ssz-generated.md delete mode 100644 changelog/user-agent-addition-validator-outbound.md diff --git a/CHANGELOG.md b/CHANGELOG.md index ff8645ca5c..eefbd31817 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,307 @@ All notable changes to this project will be documented in this file. The format is based on Keep a Changelog, and this project adheres to Semantic Versioning. +## [v6.1.2](https://github.com/prysmaticlabs/prysm/compare/v6.1.1...v6.1.2) - 2025-10-10 + +This release has several important fixes to improve Prysm's peering, stability, and attestation inclusion on mainnet and all testnets. All node operators are encouraged to update to this release as soon as practical for the best mainnet performance. + +### Added + +- Added a 1 minute timeout on PruneAttestationOnEpoch operations to prevent very large bolt transactions. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15746) +- Added expected delay before broadcasting light client p2p messages. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15776) + +### Changed + +- Replaced reflect.TypeOf with reflect.TypeFor. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15627) +- Bazel builds with `--config=release` now properly apply `--strip=always` to strip debug symbols from the release assets. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15774) +- Add sources for compute_fork_digest to specrefs. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15699) +- Aggregate logs when broadcasting data column sidecars (one per root instead of one per sidecar). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15748) +- `c-kzg-4844`: Update from `v2.1.1` to `v2.1.5`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15708) +- Process pending attestations as soon as the block arrives. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15791) +- Compare received LC messages over gossipsub with locally computed ones before forwarding. Also no longer save updates. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15783) +- Optimize pending attestation processing by adding batching. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15801) + +### Removed + +- removed unused configs and hides prysm specific configs from `/eth/v1/config/spec` endpoint. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15797) + +### Fixed + +- SSZ-QL: Support nested `List` type (e.g., `ExecutionPayload.Transactions`). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15725) +- Fixing Unsupported config field kind; value forwarded verbatim errors for type string. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15773) +- fix /eth/v1/config/spec endpoint to properly skip omitted values. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15777) +- Fix ProduceSyncCommitteeContribution not returning error when committee index is out of range. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15770) +- adding in improvements to getduties v2, replaces helpers.PrecomputeCommittees() ( exepensive ) with CommitteeAssignments. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15784) +- Avoid unnecessary calls to `ExitInformation()`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15764) +- `inclusionProofKey`: Include the commitments in the key. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15795) +- Do not reject peers if they have a mismatched version|digest when the next for epoch is FAR_FUTURE_EPOCH. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15798) +- Don't include entries in the fork schedule if their epoch is set to far future epoch. Avoids reporting next_fork_version == . [[PR]](https://github.com/prysmaticlabs/prysm/pull/15799) +- Wait for custody info to be initialized before querying them. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15804) +- fixes level=error msg="Could not clean up dirty states" error="OriginBlockRoot: not found in db" prefix=state-gen error when starting in kurtosis. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15808) +- Correctly clear disconnected peers from `connected_libp2p_peers` and `connected_libp2p_peers_average_scores`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15807) +- `buildStatusFromStream`: Respond `statusV2` only if Fulu is enabled. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15818) +- Send our real earliest available slot when sending a Status request post Fulu instead of `0`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15818) +- switch to built-in min/max. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15817) +- `findPeersWithSubnets`: If the filter function returns an error for a given peer, log an error and skip the peer instead of aborting the whole function. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15815) +- `computeIndicesByRootByPeer`: If the loop returns an error for a given peer, log an error and skip the peer instead of aborting the whole function. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15815) +- Fixed issue #15738 where separate goroutines assume sole responsibility for topic registration. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15779) + +## [v6.1.0](https://github.com/prysmaticlabs/prysm/compare/v6.0.5...v6.1.0) and [v6.1.1](https://github.com/prysmaticlabs/prysm/compare/v6.1.0...v6.1.1) - 2025-09-26 + +This release has support for Fusaka testnets as well as many mainnet improvements. Testnet operators are required to updated prior to the testnet fork date. See [PR #15721](https://github.com/OffchainLabs/prysm/pull/15721). + +Mainnet operators are encouraged to update per their regular update cadence. + +Note: This release was re-issued as v6.1.1 to distribute release assets without debug symbols. See issue [#15760](https://github.com/OffchainLabs/prysm/issues/15760). + +#### Noteworthy improvements, changes and bugfixes: +- The `--disable-experimental-state` beacon-node flag has been removed, marking the full graduation of the [Copy-on-write design](https://hackmd.io/zlTJ6Qe_RiueT3y2R77BvA) for BeaconState fields, which reduces the memory overhead of keeping multiple BeaconStates in RAM for block processing. Congrats @rkapka! +- The behavior set by the `--attest_timely` flag is now on by default, with the flag itself deprecated. +- GetDutiesV2 introduced, lowering duty request latency and beacon-node load. Multiple other improvements and bugfixes have been made to harden the validator run loop. +- New validator flag `--max-health-checks` configures a validator to switch to a fallback beacon node after the given number of health check failures. +- Improvements to rest-mode validator, defaulting to SSZ where available and adding SSZ support to more Beacon API endpoints. +- Beacon API now honors the gzip content-encoding header. +- Log timestamps now include milliseconds. +- Full fusaka support for testnets! + +**Special thanks to external contributors!**: @Alleysira, @KaloyanTanev, @rose2221 + +[1] To override this limit, use the validator flag `--suggested-gas-limit` or set the `builder.gas_limit` setting in your [proposer settings file](https://prysm.offchainlabs.com/docs/configure-prysm/fee-recipient/#advanced-configure-mev-builder-and-gas-limit). + + +### Added + +- PeerDAS: Add `CustodyInfo` in `BeaconNode`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15378) +- GetDutiesV2 gRPC function, removes committee list from duties, replaced with committee length, validator committee index. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15273) +- Add SSZ support for two attestation APIs: `/eth/v1/validator/attestation_data` and. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15377) +- Added feature flag for validator client to use get duties v2. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15380) +- PeerDAS: Implement DAS. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15367) +- `verifyBlobCommitmentCount`: Print max allowed blob count in error message. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15386) +- Data column support for beacon api event end point. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15387) +- Implement EIP-7917: Stable proposer lookahead. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15129) +- Implement `dataColumnSidecarByRootRPCHandler`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15405) +- New ssz-only flag for validator client to enable calling rest apis in SSZ, starting with get block endpoint. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15390) +- Implement `dataColumnSidecarsByRangeRPCHandler`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15421) +- Add SSZ support for `submitPoolAttestationsV2` beacon API. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15422) +- New `StatusV2` proto message. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15423) +- Implement `SendDataColumnSidecarsByRangeRequest`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15430) +- Implement `SendDataColumnSidecarsByRootRequest`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15430) +- Implement beacon API blob sidecar enpoint for Fulu. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15436) +- PeerDAS: Implement the new Fulu Metadata. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15440) +- PeerDAS: Implement reconstruction. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15454) +- Implement engine method `GetBlobsV2`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15469) +- Implement execution `ReconstructDataColumnSidecars`, which reconstruct data column sidecars from data fetched from the execution layer. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15469) +- new `--batch-verifier-limit` flag to configure max number of signatures to batch verify on gossip. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15467) +- `disable-attest-timely` flag to disable attest timely. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15410) +- Added `max-health-checks` flag that sets the maximum times the validator tries to check the health of the beacon node before timing out. 0 or a negative number is indefinite. (the default is 0). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15401) +- Add method `VersionToForkEpochMap()` to the `BeaconChainConfig` in the `params` package. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15482) +- Add log capitalization analyzer and apply changes across codebase. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15452) +- Slot aware cache for seen data column gossip p2p to reduce memory usages. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15477) +- **Gzip Compression for Beacon API:**. [[PR]](https://github.com/prysmaticlabs/prysm/pull/14982) +- Implement data column sidecars reconstruction with data retrieved from the execution client when receiving a block via gossip. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15483) +- Add support for parsing and handling `ExecutionPayloadAndBlobsBundleV2`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15503) +- Added new PRYSM_API_OVERRIDE_ACCEPT environment variable to override ssz accept header as a replacement to flag. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15433) +- Implements the `/eth/v1/beacon/states/{state_id}/proposer_lookahead` beacon api endpoint. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15525) +- Added new metadata fields (attnets,syncnets,custody_group_count) to `/eth/v1/node/identity`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15506) +- Add BLOB_SCHEDULE field to `/eth/v1/config/spec` endpoint response to expose blob scheduling configuration for networks. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15485) +- Add timing metric `publish_block_v2_duration_milliseconds` to measure processing duration of the `PublishBlockV2` beacon API endpoint. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15539) +- Add Fulu case for `saveStatesEfficientInternal`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15553) +- Support for fusaka `nfd` enr field, and changes to the semantics of the eth2 field. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15501) +- Implement post-Fulu MEV-boost protocol changes where relays only return status codes for blinded block submissions. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15486) +- Added fulu block support to StreamBlocksAltair. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15583) +- All outbound HTTP requests from the validator client now include a custom `User-Agent` header in the format `Prysm//`. This enhances observability and enables upstream systems to correctly identify Prysm validator clients by their name and version. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15574) +- Fixes [#15435](https://github.com/OffchainLabs/prysm/issues/15435). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15574) +- Data columns syncing for Fusaka. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15564) +- Added specification references which map spec to implementation. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15592) +- Warm data columns storage cache at start. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15629) +- Add `--data-column-path` flag. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15629) +- Initialize package for SSZ Query Language. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15588) +- In FetchDataColumnSidecars, after retrieving sidecars from peers, if still some sidecars are missing for a given root and if a reconstruction is possible (combining sidecars already retrieved from peers and sidecars in the storage), then reconstruct missing sidecars instead of trying to fetch the missing ones from peers. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15593) +- Fulu block proposal changes for beacon api and gRPC. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15628) +- Retry to fetch origin data column sidecars when starting from a checkpoint. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15634) +- Aggregate and pack sync committee messages into blocks. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15608) +- Support `List` type for SSZ-QL. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15637) +- Configured the beacon node to seek peers when we have validator custody requirements. If one or more validators are connected to the beacon node, then the beacon node should seek a diverse set of peers such that broadcasting to all data column subnets for a block proposal is more efficient. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15654) +- SSZ-QL: Add element information for `Vector` type. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15668) +- SSZ-QL: Support multi-dimensional tag parsing. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15668) +- Added more metadata for debug logs when initial sync requests fail for "invalid data returned from peer" errors. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15674) +- Adding Fulu types for web3signer. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15498) +- Added erigon/caplin to known p2p agent strings. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15678) +- Add Fulu fork transition tests for mainnet and minimal configurations. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15666) +- Fulu proposer lookahead epoch processing tests for mainnet and minimal configurations. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15667) +- Populate sszInfo of variable-length fields in AnalyzeObjects. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15676) +- KZG proof batch verification for data column gossip validation. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15617) +- Added flag `--p2p-colocation-whitelist` to accept CIDRs which will bypass the p2p colocation restrictions. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15685) +- Fulu spec tests coverage for covering the general package. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15682) +- Implemented syncing in a disjoint network with respect to data column sidecars subscribed by peers. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15644) +- Add retry logic when GetBlobsV2 is called. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15520) +- Call GetBlobsV2 as soon as we receive the first data column sidecar or block. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15520) +- Added new post fulu /eth/v1/beacon/blobs/{block_id} endpoint. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15610) +- SSZ-QL: Handle `Bitlist` and `Bitvector` types. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15704) +- Adding `/eth/v1/debug/beacon/data_column_sidecars/{block_id}` endpoint. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15701) +- Support Fulu genesis block. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15652) +- Update spectests to 1.6.0-beta.0. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15741) + +### Changed + +- `parseIndices`: Return `[]int` instead of `[]uint64`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15386) +- Reclaim memory manually in some tests that fuzz the beacon state. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15395) +- when REST api is enabled the get Block api defaults to requesting and receiving SSZ instead of JSON, JSON is the fallback. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15390) +- Remove "invalid" from logs for incoming blob sidecar that is missing parent or out of range slot. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15428) +- In `TopicFromMessage`: Do not assume anymore that all Fulu specific topic are V3 only. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15423) +- `readChunkedDataColumnSidecar`: Add `validationFunctions` parameter and add tests. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15423) +- Put the initiation of LC Store behind the `enable-light-client` flag. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15464) +- default batch signature verification limit increased from 50 to 1000. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15467) +- Increase mainnet DefaultBuilderGasLimit from 36M to 45M. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15455) +- Attest timely is now default. `attest-timely` flag is now deprecated. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15410) +- Move data col reconstruction log to a more accurate place in the code. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15475) +- Makes the multivalue slice permanent in the state and removes old paths. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15414) +- Previously, we optimistically believed the beacon node was healthy and tried to get chain start, but now we do a health check at the start. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15401) +- Optimize proposer inclusion proof calcuation by pre caching subtries. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15473) +- Move setter/getter functions for LC Bootstrap into LcStore for a unified interface. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15476) +- Changed `enable-duties-v2` to `disable-duties-v2` to default to using duties v2. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15445) +- Changed `uint64` genesis time to use `time.Time`. Also did some refactoring and cleanup that was enabled by these changes. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15419) +- Add milliseconds to log timestamps. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15496) +- Move setter/getter functions for LC Updates into LcStore for a unified interface. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15488) +- Change LC Bootstrap logic to only save bootstraps on finalized checkpoints instead of every block. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15497) +- Update links to consensus-specs to point to `master` branch. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15523) +- changed from in-memory to persistent discv5 db to keep local node information persistent for the key to keep the ENR sequence number deterministic when restarting. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15519) +- Fix some nits associated with data column sidecar verification. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15521) +- Include state root in StateNotFoundError for better debugging of consensus validation failures. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15533) +- when shutting down the sync service we now send p2p goodbye messages in parallel to maxmimize changes of propogating goodbyes to all peers before an unsafe shutdown. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15542) +- Do not compare liveness response with LH in e2e Beacon API evaluator. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15556) +- Moved the broadcast and event notifier logic for saving LC updates to the store function. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15540) +- Fixed the issue with broadcasting more than twice per LC Finality update, and the if-case bug. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15540) +- Separated the finality update validation rules for saving and broadcasting. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15540) +- Update validator custody to the latest specification, including the new status message. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15532) +- Beacon api optimize validator lookup for large batch request size. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15558) +- Check pending block is in forkchoice before importing pending attestation. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15547) +- Redesign the pending attestation queue. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15024) +- Replaced hardcoded `grpc-gateway-port` with `flags.HTTPServerPort.Name` in `testing/endtoend/components/validator.go`, resolving an inline TODO for improved flag consistency. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15236) +- Refactor `htrutil.go` by removing redundant codes. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15453) +- Improved sync unaggregated attestation cache key outside of lock path. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15572) +- Move aggregated attestation cache key generation outside of critical locks to improve performance. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15579) +- Renamed various variables/functions to be more clear. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15529) +- Update consensus spec to v1.6.0-alpha.4 and implement data column support for forkchoice spectests. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15590) +- Reject incoming connections when the fork schedule of the connecting peer (parsed from their ENR) has a matching next_fork_epoch, but mismatched next_fork_version or nfd (next fork digest). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15604) +- Update gohashtree to v0.0.5-beta. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15619) +- Updated consensus spec from v1.6.0-alpha.4 to v1.6.0-alpha.5 with adjusted minimal config parameters. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15621) +- Changed old atomic functions to new atomic.Int for safer and clearer code. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15625) +- Start from justified checkpoint by default. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15636) +- Updated consensus spec from v1.6.0-alpha.5 to v1.6.0-alpha.6. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15658) +- Updated outdated documentation links for Web3Signer and Why Bazel. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15631) +- changed validatorpb.SignRequest_AggregateAttestationAndProof signing type to use AggregateAttestationAndProofV2 on web3signer. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15498) +- Pre-calculate exit epoch, churn and active balance before processing slashings to reduce CPU load. [[PR]](https://github.com/prysmaticlabs/prysm/pull/14990) +- Switching default of validator client rest call for submit block from JSON to SSZ. Fallback json will be attempted. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15645) +- Deprecated and added error to /prysm/v1/beacon/blobs endpoint for post Fulu fork. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15643) +- Upgraded gossipsub to v0.14.2 and libp2p to v0.39.1. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15677) +- Prysm will now downscore peers that return invalid block_by_range responses. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15686) +- Filtering peers for data column subnets: Added a one-epoch slack to the peer’s head slot view. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15644) +- Fetching data column sidecars: If not all requested sidecars are available for a given root, return the successfully retrieved ones along with a map indicating which could not be fetched. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15644) +- Fetching origin data column sidecars: If only some sidecars are fetched, save the retrieved ones and retry fetching the missing ones on the next attempt. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15644) +- Renamed the `--enable-experimental-backfill` flag to `--enable-backfill` to signal that it is more mature. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15690) +- Restrict best LC update collection to canonical blocks. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15585) +- PeerDAS: Wait for a random delay, then reconstruct data column sidecars and immediately reseed instead of immediately reconstructing, waiting and then reseeding. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15705) +- Clarified misleading log messages in beacon-chain/rpc/service gRPC module. [[PR]](https://github.com/prysmaticlabs/prysm/pull/13063) +- Broadcast block then sidecars, instead block and sidecars concurrently. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15720) +- Broadcast and receive sidecars in concurrently, instead sequentially. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15720) +- Changed blst dependency from `http_archive` to `go_repository` so that gazelle can keep it in sync with go.mod. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15709) +- Updated go to v1.25.1. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15641) +- Updated rules_go to v0.57.0. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15641) +- Updated protobuf to 28.3. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15641) +- Set Fulu fork epochs for Holesky, Hoodi, and Sepolia testnets. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15721) +- Improve logging of data column sidecars. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15728) +- Updated go.mod to v1.25.1. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15740) + +### Deprecated + +- Deprecated `p2p-metadata` flag. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15554) + +### Removed + +- Removed //tools/eth1voting tool. This is no longer needed as the beacon chain no longer uses eth1data voting since Electra. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15415) +- Remove deposit count from sync new block log. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15420) +- Unused `DataColumnIdentifier` proto message. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15423) +- Validator client will no longer need to call the canonical head api. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15480) +- Partially reverting pr #15390 removing the `ssz-only` debug flag until there is a real usecase for the flag. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15433) + +### Fixed + +- Added regression test for [PR 15369](https://github.com/OffchainLabs/prysm/pull/15369). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15379) +- Added missing `meta` field to the response of the endpoint `/eth/v1/node/peers` to align with the Beacon API spec (#15370). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15371) +- Fix blob metric name for peer count. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15412) +- Non deterministic output order of `dataColumnSidecarByRootRPCHandler`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15441) +- Fixed the versioning bug for light client data types in the Beacon API. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15400) +- `--chain-config-file`: Do not use any more mainnet boot nodes. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15460) +- Fix panic on dutiesv2 when there is no committee assignment on the epoch. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15466) +- Allow SSZ requests for pending deposits, partial withdrawals and consolidations. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15474) +- Validator client shuts down cleanly on error instead of fatal error. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15401) +- Fixes edge case starting validator client with new validator keys starts the slot ticker too early resulting in replayed slots in the main runner loop. Fixes edge case of replayed slots when waiting for account acivations. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15479) +- DV aggregations failing first slot of the epoch. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15156) +- Skip genesis block retrieval when EIP-6110 deposit requests have started to prevent "pruned history unavailable" errors with execution clients that have pruned pre-merge data. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15494) +- Fixed lookahead initialization at the fulu fork. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15450) +- Write `Content-Encoding` header in the response properly when gzip encoding is requested. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15499) +- Subnets subscription: Avoid dynamic subscribing blocking in case not enough peers per subnets are found. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15471) +- Do not apply the gzip middleware to the event stream API. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15517) +- Fixed various reasons why a node is banned by its peers when it stops. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15505) +- Use `MinEpochsForDataColumnSidecarsRequest` in `WithinDAPeriod` when in Fulu. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15522) +- Return zero value for `Eth-Consensus-Block-Value` on error to avoid missed block proposals. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15526) +- Moved reconstruction lock to prevent unnecessary work. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15528) +- Fixed variable names, links, and typos in das core code. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15524) +- Fix builder bid version compatibility to support Electra bids with Fulu blocks. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15536) +- Fixed align submitPoolSyncCommitteeSignatures response with Beacon API specification. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15516) +- Trigger payload attribute event as soon as an early block is processed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15541) +- Beacon-api proposer duty fulu computation. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15534) +- Fixed the max proofs in `BlobsBundleV2`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15530) +- Prevent a race on double `ReceiveBlock`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15565) +- Fixed [#15544](https://github.com/OffchainLabs/prysm/issues/15544): Persist metadata sequence number if it is needed (e.g., use static peer ID option or Fulu enabled). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15554) +- Fix the validateConsensus endpoint handler. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15548) +- builder version check was using head block version instead of current fork's version based on slot, fixes e2e from https://github.com/OffchainLabs/prysm/commit/57e27199bdb9b3ef1af14c3374999aba5e0788a3. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15568) +- Don't submit duplicate `SignedContributionAndProof` messages. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15571) +- Genesis state, timestamp and validators root now ubiquitously available at node startup, supporting tech debt cleanup. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15470) +- Fixed a condition where the blob cache could panic when there were less than or no sidecars in the cache entry. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15581) +- Fixed endpoint response to return 404 or 400 after isOptimistic check. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15559) +- Safeguard against accidental out of bounds array access in dataColumnSidecars method. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15586) +- Fixed NewSignedBeaconBlock calls to use Block field for proper equivocation handling. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15595) +- Fixed regression in find peer functions introduced in PR#15471, where nodes with equal sequence numbers were incorrectly skipped and the peer count was incorrectly reduced when replacing nodes with higher sequence numbers. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15578) +- Fix bug where stale computed value in closure excludes newly required (eg attestation) subscriptions. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15603) +- Fix bug where arguments of fillInForkChoiceMissingBlocks were incorrectly placed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15639) +- Fix next epoch proposer duties in Fulu by advancing the state to the beginning of the current epoch. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15642) +- Fix getBlockAttestationsV2 to return [] instead of null when data is empty. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15651) +- Fixed the issue of empty dirs not being deleted when using –blob-storage-layout=by-epoch. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15573) +- Start topic-based peer discovery before initial sync completes so that we have coverage of needed columns when range syncing. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15660) +- Fixed an off-by-one in forkchoice startup. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15684) +- mitigate potential supernode clustering due to libp2p ConnManager pruning of non-supernodes, see https://github.com/OffchainLabs/prysm/issues/15607. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15681) +- Initial sync: Do not request data column sidecars for blocks before the retention period. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15644) +- Fixed incorrect attestation data request where the assigned committee index was used after Electra, instead of 0. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15696) +- Use v2 endpoint for blinded block submission post-Fulu. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15716) +- Fixed 'justified' block support missing on blocker.Block and optimized logic between blocker.Block and blocker.Blob. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15715) +- Fix prysmctl panic when baseFee is not set in genesis.json. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15687) +- Fix getStateRandao not returning historic RANDAO mix values. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15653) +- fix race in PriorityQueue.Pop by checking emptiness under write lock. (#15726). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15726) +- In P2P service start, wait for the custody info to be correctly initialized. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15732) +- `createLocalNode`: Wait before retrying to retrieve the custody group count if not present. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15735) +- Replace fmt.Printf with proper test error handling in web3signer keymanager tests, using require.NoError(t, err) instead of t.Fatalf for better error handling and debugging. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15723) +- fixed regression introduced in PR #15715 , blocker now returns an error for not found, and error handling correctly handles error and returns 404 instead of 500. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15742) +- da metric was not writing correctly because if statement on err was accidently flipped. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15743) + +### Security + +- Updated go to version 1.24.5. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15561) +- Updated distroless/cc-debian11 to latest to resolve CVE-2024-2961. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15562) +- Updated go to version 1.24.6. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15566) +- Updated quic-go to latest version. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15749) + + +## [v6.0.5](https://github.com/prysmaticlabs/prysm/compare/v6.0.4...v6.0.5) - 2025-09-26 + +We are releasing a patch update on top of v6.0.4 to address a stability issue with quic-go. +All operators should update as soon as possible to v6.0.5 or later. + +### Security + +- Updated quic-go to latest version. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15749) + ## [v6.0.4](https://github.com/prysmaticlabs/prysm/compare/v6.0.3...v6.0.4) - 2025-06-05 This release has more work on PeerDAS, and light client support. Additionally, we have a few bug fixes: diff --git a/changelog/2025-08-25-docs-links.md b/changelog/2025-08-25-docs-links.md deleted file mode 100644 index fde4586300..0000000000 --- a/changelog/2025-08-25-docs-links.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- Updated outdated documentation links for Web3Signer and Why Bazel. diff --git a/changelog/Alleysira-fix-meta-in-getPeers-resp.md b/changelog/Alleysira-fix-meta-in-getPeers-resp.md deleted file mode 100644 index fe2cba98b9..0000000000 --- a/changelog/Alleysira-fix-meta-in-getPeers-resp.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Added missing `meta` field to the response of the endpoint `/eth/v1/node/peers` to align with the Beacon API spec (#15370) \ No newline at end of file diff --git a/changelog/DeVikingMark_fix-web3signer-test-error-handling.md b/changelog/DeVikingMark_fix-web3signer-test-error-handling.md deleted file mode 100644 index 3f3457949d..0000000000 --- a/changelog/DeVikingMark_fix-web3signer-test-error-handling.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Replace fmt.Printf with proper test error handling in web3signer keymanager tests, using require.NoError(t, err) instead of t.Fatalf for better error handling and debugging. diff --git a/changelog/Galoretka_fix-leakybucket-test-duplicate.md b/changelog/Galoretka_fix-leakybucket-test-duplicate.md deleted file mode 100644 index 57e70c5505..0000000000 --- a/changelog/Galoretka_fix-leakybucket-test-duplicate.md +++ /dev/null @@ -1,2 +0,0 @@ -## Ignored -- Remove duplicate test case in `container/leaky-bucket/collector_test.go` to reduce redundancy. (#15672) diff --git a/changelog/James-prysm_parallel-goodbyes.md b/changelog/James-prysm_parallel-goodbyes.md deleted file mode 100644 index ae45a85a78..0000000000 --- a/changelog/James-prysm_parallel-goodbyes.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- when shutting down the sync service we now send p2p goodbye messages in parallel to maxmimize changes of propogating goodbyes to all peers before an unsafe shutdown. \ No newline at end of file diff --git a/changelog/James-prysm_persistent-seq-number.md b/changelog/James-prysm_persistent-seq-number.md deleted file mode 100644 index 4c4f193392..0000000000 --- a/changelog/James-prysm_persistent-seq-number.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- changed from in-memory to persistent discv5 db to keep local node information persistent for the key to keep the ENR sequence number deterministic when restarting. \ No newline at end of file diff --git a/changelog/add-blob-schedule-to-config-spec-endpoint.md b/changelog/add-blob-schedule-to-config-spec-endpoint.md deleted file mode 100644 index 3f5aa09e50..0000000000 --- a/changelog/add-blob-schedule-to-config-spec-endpoint.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Add BLOB_SCHEDULE field to `/eth/v1/config/spec` endpoint response to expose blob scheduling configuration for networks. \ No newline at end of file diff --git a/changelog/bastin_abstract-save-update.md b/changelog/bastin_abstract-save-update.md deleted file mode 100644 index ae9233b6af..0000000000 --- a/changelog/bastin_abstract-save-update.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Moved the validation logic for saving LC updates to the store function. \ No newline at end of file diff --git a/changelog/bastin_abstract-update-validation.md b/changelog/bastin_abstract-update-validation.md deleted file mode 100644 index abc158546c..0000000000 --- a/changelog/bastin_abstract-update-validation.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Moved the validation logic for saving LC finality/optimistic updates to the store. \ No newline at end of file diff --git a/changelog/bastin_attestation-api-ssz.md b/changelog/bastin_attestation-api-ssz.md deleted file mode 100644 index 451cf0d78c..0000000000 --- a/changelog/bastin_attestation-api-ssz.md +++ /dev/null @@ -1,4 +0,0 @@ -### Added - -- Add SSZ support for two attestation APIs: `/eth/v1/validator/attestation_data` and - `/eth/v2/validator/aggregate_attestation`. \ No newline at end of file diff --git a/changelog/bastin_canonical-light-client-updates.md b/changelog/bastin_canonical-light-client-updates.md deleted file mode 100644 index 8c33072598..0000000000 --- a/changelog/bastin_canonical-light-client-updates.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Restrict best LC update collection to canonical blocks. \ No newline at end of file diff --git a/changelog/bastin_checkpoints-only-bootstraps.md b/changelog/bastin_checkpoints-only-bootstraps.md deleted file mode 100644 index 75f1e90b9a..0000000000 --- a/changelog/bastin_checkpoints-only-bootstraps.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Change LC Bootstrap logic to only save bootstraps on finalized checkpoints instead of every block. \ No newline at end of file diff --git a/changelog/bastin_delayed-lc-broadcast.md b/changelog/bastin_delayed-lc-broadcast.md deleted file mode 100644 index 71e5864c99..0000000000 --- a/changelog/bastin_delayed-lc-broadcast.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Added expected delay before broadcasting light client p2p messages. \ No newline at end of file diff --git a/changelog/bastin_fix-lc-versioning.md b/changelog/bastin_fix-lc-versioning.md deleted file mode 100644 index 51579b6936..0000000000 --- a/changelog/bastin_fix-lc-versioning.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixed the versioning bug for light client data types in the Beacon API. \ No newline at end of file diff --git a/changelog/bastin_lc-fulu-spectest.md b/changelog/bastin_lc-fulu-spectest.md deleted file mode 100644 index 86ffc1a873..0000000000 --- a/changelog/bastin_lc-fulu-spectest.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Fulu spec tests coverage for light client tests. \ No newline at end of file diff --git a/changelog/bastin_lc-p2p-validation.md b/changelog/bastin_lc-p2p-validation.md deleted file mode 100644 index 354e3d6d4c..0000000000 --- a/changelog/bastin_lc-p2p-validation.md +++ /dev/null @@ -1,4 +0,0 @@ -### Changed - -- Compare received LC messages over gossipsub with locally computed ones before forwarding. Also no longer save updates - from gossipsub, just validate and forward. \ No newline at end of file diff --git a/changelog/bastin_move-lc-package-out-of-core.md b/changelog/bastin_move-lc-package-out-of-core.md deleted file mode 100644 index 985b018161..0000000000 --- a/changelog/bastin_move-lc-package-out-of-core.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Moved light-client package out of `beacon-chain/core/` and into `beacon-chain/`. \ No newline at end of file diff --git a/changelog/bastin_move_broadcast_to_store.md b/changelog/bastin_move_broadcast_to_store.md deleted file mode 100644 index 5bae68b2a9..0000000000 --- a/changelog/bastin_move_broadcast_to_store.md +++ /dev/null @@ -1,5 +0,0 @@ -### Changed - -- Moved the broadcast and event notifier logic for saving LC updates to the store function. -- Fixed the issue with broadcasting more than twice per LC Finality update, and the if-case bug. -- Separated the finality update validation rules for saving and broadcasting. \ No newline at end of file diff --git a/changelog/bastin_put-lc-store-behind-flag.md b/changelog/bastin_put-lc-store-behind-flag.md deleted file mode 100644 index 3dcacbd9fb..0000000000 --- a/changelog/bastin_put-lc-store-behind-flag.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Put the initiation of LC Store behind the `enable-light-client` flag. \ No newline at end of file diff --git a/changelog/bastin_read-updates-from-store-in-rpc.md b/changelog/bastin_read-updates-from-store-in-rpc.md deleted file mode 100644 index 4d8155c802..0000000000 --- a/changelog/bastin_read-updates-from-store-in-rpc.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Read light client optimistic and finality updates from LCStore instead of computing them in the beacon API handlers. \ No newline at end of file diff --git a/changelog/bastin_refactor-lc-bootstrap-tests.md b/changelog/bastin_refactor-lc-bootstrap-tests.md deleted file mode 100644 index ef44046d55..0000000000 --- a/changelog/bastin_refactor-lc-bootstrap-tests.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Refactor light client bootstrap tests in the RPC package. \ No newline at end of file diff --git a/changelog/bastin_refactor-lc-kv-tests.md b/changelog/bastin_refactor-lc-kv-tests.md deleted file mode 100644 index 7d456d574a..0000000000 --- a/changelog/bastin_refactor-lc-kv-tests.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Refactor light client kv tests. \ No newline at end of file diff --git a/changelog/bastin_refactor-lc-testutils.md b/changelog/bastin_refactor-lc-testutils.md deleted file mode 100644 index ddc8fcb97b..0000000000 --- a/changelog/bastin_refactor-lc-testutils.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Refactor light client testing utils to use functional options. \ No newline at end of file diff --git a/changelog/bastin_refine-lc-rpc-server.md b/changelog/bastin_refine-lc-rpc-server.md deleted file mode 100644 index aa34a6d61c..0000000000 --- a/changelog/bastin_refine-lc-rpc-server.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Removed extra/unused fields of the LC Beacon API server. \ No newline at end of file diff --git a/changelog/bastin_remove-unused.md b/changelog/bastin_remove-unused.md deleted file mode 100644 index e3a671b5d5..0000000000 --- a/changelog/bastin_remove-unused.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Remove unused parameter `currentSlot` from LC functions. \ No newline at end of file diff --git a/changelog/bastin_save-updates-in-store.md b/changelog/bastin_save-updates-in-store.md deleted file mode 100644 index 58a537074e..0000000000 --- a/changelog/bastin_save-updates-in-store.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Save light client finality and optimistic updates to the light client store. \ No newline at end of file diff --git a/changelog/bastin_unify-lc-api-bootstrap.md b/changelog/bastin_unify-lc-api-bootstrap.md deleted file mode 100644 index 45653278f2..0000000000 --- a/changelog/bastin_unify-lc-api-bootstrap.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Move setter/getter functions for LC Bootstrap into LcStore for a unified interface. \ No newline at end of file diff --git a/changelog/bastin_unify-lc-api-updates.md b/changelog/bastin_unify-lc-api-updates.md deleted file mode 100644 index af0350d3c5..0000000000 --- a/changelog/bastin_unify-lc-api-updates.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Move setter/getter functions for LC Updates into LcStore for a unified interface. \ No newline at end of file diff --git a/changelog/bastin_version-to-fork-epoch.md b/changelog/bastin_version-to-fork-epoch.md deleted file mode 100644 index 05bbd20afd..0000000000 --- a/changelog/bastin_version-to-fork-epoch.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Add method `VersionToForkEpochMap()` to the `BeaconChainConfig` in the `params` package. \ No newline at end of file diff --git a/changelog/fernantho_populate-variable-length-analyze-object.md b/changelog/fernantho_populate-variable-length-analyze-object.md deleted file mode 100644 index d96e38fe1c..0000000000 --- a/changelog/fernantho_populate-variable-length-analyze-object.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Populate sszInfo of variable-length fields in AnalyzeObjects diff --git a/changelog/fix-fulu-bid-compatibility.md b/changelog/fix-fulu-bid-compatibility.md deleted file mode 100644 index 34f7b7d10c..0000000000 --- a/changelog/fix-fulu-bid-compatibility.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fix builder bid version compatibility to support Electra bids with Fulu blocks \ No newline at end of file diff --git a/changelog/fix-genesis-block-eip6110.md b/changelog/fix-genesis-block-eip6110.md deleted file mode 100644 index 46277c2a04..0000000000 --- a/changelog/fix-genesis-block-eip6110.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Skip genesis block retrieval when EIP-6110 deposit requests have started to prevent "pruned history unavailable" errors with execution clients that have pruned pre-merge data \ No newline at end of file diff --git a/changelog/hyunchel_fix-misleading-log-msg.md b/changelog/hyunchel_fix-misleading-log-msg.md deleted file mode 100644 index 925267097c..0000000000 --- a/changelog/hyunchel_fix-misleading-log-msg.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Clarified misleading log messages in beacon-chain/rpc/service gRPC module. diff --git a/changelog/james-prysm_alpaha6-spec-tests.md b/changelog/james-prysm_alpaha6-spec-tests.md deleted file mode 100644 index be0f6a6daf..0000000000 --- a/changelog/james-prysm_alpaha6-spec-tests.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Updated consensus spec from v1.6.0-alpha.5 to v1.6.0-alpha.6 \ No newline at end of file diff --git a/changelog/james-prysm_block-proposal-fulu.md b/changelog/james-prysm_block-proposal-fulu.md deleted file mode 100644 index 5a23e97fc2..0000000000 --- a/changelog/james-prysm_block-proposal-fulu.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Fulu block proposal changes for beacon api and gRPC. \ No newline at end of file diff --git a/changelog/james-prysm_config-cleanup.md b/changelog/james-prysm_config-cleanup.md deleted file mode 100644 index b686b28e0e..0000000000 --- a/changelog/james-prysm_config-cleanup.md +++ /dev/null @@ -1,3 +0,0 @@ -### Removed - -- removed unused configs and hides prysm specific configs from `/eth/v1/config/spec` endpoint \ No newline at end of file diff --git a/changelog/james-prysm_debug-data-columns.md b/changelog/james-prysm_debug-data-columns.md deleted file mode 100644 index 24da7794dd..0000000000 --- a/changelog/james-prysm_debug-data-columns.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Adding `/eth/v1/debug/beacon/data_column_sidecars/{block_id}` endpoint. \ No newline at end of file diff --git a/changelog/james-prysm_default-duties-v2.md b/changelog/james-prysm_default-duties-v2.md deleted file mode 100644 index 68353d2c63..0000000000 --- a/changelog/james-prysm_default-duties-v2.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Changed `enable-duties-v2` to `disable-duties-v2` to default to using duties v2. \ No newline at end of file diff --git a/changelog/james-prysm_deprecate-publish-blos.md b/changelog/james-prysm_deprecate-publish-blos.md deleted file mode 100644 index 836457f4f5..0000000000 --- a/changelog/james-prysm_deprecate-publish-blos.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Deprecated and added error to /prysm/v1/beacon/blobs endpoint for post Fulu fork. \ No newline at end of file diff --git a/changelog/james-prysm_fix-builder-version-check.md b/changelog/james-prysm_fix-builder-version-check.md deleted file mode 100644 index 1ca5a30636..0000000000 --- a/changelog/james-prysm_fix-builder-version-check.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- builder version check was using head block version instead of current fork's version based on slot, fixes e2e from https://github.com/OffchainLabs/prysm/commit/57e27199bdb9b3ef1af14c3374999aba5e0788a3. \ No newline at end of file diff --git a/changelog/james-prysm_fix-config-parsing.md b/changelog/james-prysm_fix-config-parsing.md deleted file mode 100644 index c5556b7723..0000000000 --- a/changelog/james-prysm_fix-config-parsing.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixing Unsupported config field kind; value forwarded verbatim errors for type string. \ No newline at end of file diff --git a/changelog/james-prysm_fix-da-metric.md b/changelog/james-prysm_fix-da-metric.md deleted file mode 100644 index 165b6e4624..0000000000 --- a/changelog/james-prysm_fix-da-metric.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- da metric was not writing correctly because if statement on err was accidently flipped \ No newline at end of file diff --git a/changelog/james-prysm_fix-duties-v2-assignment.md b/changelog/james-prysm_fix-duties-v2-assignment.md deleted file mode 100644 index d5b989a48b..0000000000 --- a/changelog/james-prysm_fix-duties-v2-assignment.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fix panic on dutiesv2 when there is no committee assignment on the epoch \ No newline at end of file diff --git a/changelog/james-prysm_fix-find-peers.md b/changelog/james-prysm_fix-find-peers.md deleted file mode 100644 index 5dbca701cf..0000000000 --- a/changelog/james-prysm_fix-find-peers.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixed regression in find peer functions introduced in PR#15471, where nodes with equal sequence numbers were incorrectly skipped and the peer count was incorrectly reduced when replacing nodes with higher sequence numbers. \ No newline at end of file diff --git a/changelog/james-prysm_fix-justified-blocker.md b/changelog/james-prysm_fix-justified-blocker.md deleted file mode 100644 index 97d81fe521..0000000000 --- a/changelog/james-prysm_fix-justified-blocker.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixed 'justified' block support missing on blocker.Block and optimized logic between blocker.Block and blocker.Blob. \ No newline at end of file diff --git a/changelog/james-prysm_fix-origin-block-log.md b/changelog/james-prysm_fix-origin-block-log.md deleted file mode 100644 index ec8f59d493..0000000000 --- a/changelog/james-prysm_fix-origin-block-log.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- fixes level=error msg="Could not clean up dirty states" error="OriginBlockRoot: not found in db" prefix=state-gen error when starting in kurtosis \ No newline at end of file diff --git a/changelog/james-prysm_fulu-web3signer.md b/changelog/james-prysm_fulu-web3signer.md deleted file mode 100644 index 699abb4150..0000000000 --- a/changelog/james-prysm_fulu-web3signer.md +++ /dev/null @@ -1,7 +0,0 @@ -### Added - -- Adding Fulu types for web3signer. - -### Changed - -- changed validatorpb.SignRequest_AggregateAttestationAndProof signing type to use AggregateAttestationAndProofV2 on web3signer. \ No newline at end of file diff --git a/changelog/james-prysm_get-blob-fulu.md b/changelog/james-prysm_get-blob-fulu.md deleted file mode 100644 index 860cc23577..0000000000 --- a/changelog/james-prysm_get-blob-fulu.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Added new post fulu /eth/v1/beacon/blobs/{block_id} endpoint \ No newline at end of file diff --git a/changelog/james-prysm_get-duties-v2.md b/changelog/james-prysm_get-duties-v2.md deleted file mode 100644 index 92926f6eb3..0000000000 --- a/changelog/james-prysm_get-duties-v2.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- GetDutiesV2 gRPC function, removes committee list from duties, replaced with committee length, validator committee index. \ No newline at end of file diff --git a/changelog/james-prysm_improve-duties-v2.md b/changelog/james-prysm_improve-duties-v2.md deleted file mode 100644 index aa83ea1a7e..0000000000 --- a/changelog/james-prysm_improve-duties-v2.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- adding in improvements to getduties v2, replaces helpers.PrecomputeCommittees() ( exepensive ) with CommitteeAssignments \ No newline at end of file diff --git a/changelog/james-prysm_move-ticker.md b/changelog/james-prysm_move-ticker.md deleted file mode 100644 index a5c6982ace..0000000000 --- a/changelog/james-prysm_move-ticker.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixes edge case starting validator client with new validator keys starts the slot ticker too early resulting in replayed slots in the main runner loop. Fixes edge case of replayed slots when waiting for account acivations. \ No newline at end of file diff --git a/changelog/james-prysm_move-web-flag.md b/changelog/james-prysm_move-web-flag.md deleted file mode 100644 index a046ee223f..0000000000 --- a/changelog/james-prysm_move-web-flag.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Code cleanup by moving the web flag as a feature flag so that we don't need to pass a variable throughout the code base. \ No newline at end of file diff --git a/changelog/james-prysm_post-block-ssz.md b/changelog/james-prysm_post-block-ssz.md deleted file mode 100644 index 8b79e087e2..0000000000 --- a/changelog/james-prysm_post-block-ssz.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Switching default of validator client rest call for submit block from JSON to SSZ. Fallback json will be attempted. \ No newline at end of file diff --git a/changelog/james-prysm_proposer-lookahead-api.md b/changelog/james-prysm_proposer-lookahead-api.md deleted file mode 100644 index fadad42a7d..0000000000 --- a/changelog/james-prysm_proposer-lookahead-api.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Implements the `/eth/v1/beacon/states/{state_id}/proposer_lookahead` beacon api endpoint. \ No newline at end of file diff --git a/changelog/james-prysm_remove-cononical-head.md b/changelog/james-prysm_remove-cononical-head.md deleted file mode 100644 index 0028bac841..0000000000 --- a/changelog/james-prysm_remove-cononical-head.md +++ /dev/null @@ -1,3 +0,0 @@ -### Removed - -- Validator client will no longer need to call the canonical head api. \ No newline at end of file diff --git a/changelog/james-prysm_remove-ssz-only-flag.md b/changelog/james-prysm_remove-ssz-only-flag.md deleted file mode 100644 index 7325cc60fa..0000000000 --- a/changelog/james-prysm_remove-ssz-only-flag.md +++ /dev/null @@ -1,7 +0,0 @@ -### Removed - -- Partially reverting pr #15390 removing the `ssz-only` debug flag until there is a real usecase for the flag - -### Added - -- Added new PRYSM_API_OVERRIDE_ACCEPT environment variable to override ssz accept header as a replacement to flag \ No newline at end of file diff --git a/changelog/james-prysm_safe-validator-shutdown.md b/changelog/james-prysm_safe-validator-shutdown.md deleted file mode 100644 index 74d9a4eb63..0000000000 --- a/changelog/james-prysm_safe-validator-shutdown.md +++ /dev/null @@ -1,11 +0,0 @@ -## Added - -- Added `max-health-checks` flag that sets the maximum times the validator tries to check the health of the beacon node before timing out. 0 or a negative number is indefinite. (the default is 0) - -## Fixed - -- Validator client shuts down cleanly on error instead of fatal error. - -## Changed - -- Previously, we optimistically believed the beacon node was healthy and tried to get chain start, but now we do a health check at the start. \ No newline at end of file diff --git a/changelog/james-prysm_skip-omit-config-values.md b/changelog/james-prysm_skip-omit-config-values.md deleted file mode 100644 index 654d3449cd..0000000000 --- a/changelog/james-prysm_skip-omit-config-values.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- fix /eth/v1/config/spec endpoint to properly skip omitted values. \ No newline at end of file diff --git a/changelog/james-prysm_ssz-validator-block.md b/changelog/james-prysm_ssz-validator-block.md deleted file mode 100644 index 6965120693..0000000000 --- a/changelog/james-prysm_ssz-validator-block.md +++ /dev/null @@ -1,7 +0,0 @@ -### Added - -- New ssz-only flag for validator client to enable calling rest apis in SSZ, starting with get block endpoint. - -### Changed - -- when REST api is enabled the get Block api defaults to requesting and receiving SSZ instead of JSON, JSON is the fallback. \ No newline at end of file diff --git a/changelog/james-prysm_validator-duties-v2.md b/changelog/james-prysm_validator-duties-v2.md deleted file mode 100644 index 900fe5b814..0000000000 --- a/changelog/james-prysm_validator-duties-v2.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Added feature flag for validator client to use get duties v2. \ No newline at end of file diff --git a/changelog/james_prysm-fix-blocker-notfound.md b/changelog/james_prysm-fix-blocker-notfound.md deleted file mode 100644 index d286d5c11c..0000000000 --- a/changelog/james_prysm-fix-blocker-notfound.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- fixed regression introduced in PR #15715 , blocker now returns an error for not found, and error handling correctly handles error and returns 404 instead of 500 \ No newline at end of file diff --git a/changelog/james_prysm-fulu-general-spectests.md b/changelog/james_prysm-fulu-general-spectests.md deleted file mode 100644 index 521cc38063..0000000000 --- a/changelog/james_prysm-fulu-general-spectests.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Fulu spec tests coverage for covering the general package \ No newline at end of file diff --git a/changelog/jihoonsong_support-fulu-genesis.md b/changelog/jihoonsong_support-fulu-genesis.md deleted file mode 100644 index 41dca9f6e8..0000000000 --- a/changelog/jihoonsong_support-fulu-genesis.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Support Fulu genesis block. diff --git a/changelog/jtraglia_das-core-fixes.md b/changelog/jtraglia_das-core-fixes.md deleted file mode 100644 index 338d154516..0000000000 --- a/changelog/jtraglia_das-core-fixes.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixed variable names, links, and typos in das core code. diff --git a/changelog/jtraglia_dev-to-master.md b/changelog/jtraglia_dev-to-master.md deleted file mode 100644 index 97e073b2a3..0000000000 --- a/changelog/jtraglia_dev-to-master.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Update links to consensus-specs to point to `master` branch diff --git a/changelog/jtraglia_fix-blobs-bundle-v2-max-proofs.md b/changelog/jtraglia_fix-blobs-bundle-v2-max-proofs.md deleted file mode 100644 index d98ba47775..0000000000 --- a/changelog/jtraglia_fix-blobs-bundle-v2-max-proofs.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixed the max proofs in `BlobsBundleV2`. diff --git a/changelog/jtraglia_move-reconstruction-lock.md b/changelog/jtraglia_move-reconstruction-lock.md deleted file mode 100644 index 687edf0e81..0000000000 --- a/changelog/jtraglia_move-reconstruction-lock.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Moved reconstruction lock to prevent unnecessary work. diff --git a/changelog/jtraglia_nits-dcsc-verification.md b/changelog/jtraglia_nits-dcsc-verification.md deleted file mode 100644 index a9dc73d1cb..0000000000 --- a/changelog/jtraglia_nits-dcsc-verification.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Fix some nits associated with data column sidecar verification diff --git a/changelog/jtraglia_specrefs.md b/changelog/jtraglia_specrefs.md deleted file mode 100644 index cb4ead972f..0000000000 --- a/changelog/jtraglia_specrefs.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Added specification references which map spec to implementation diff --git a/changelog/jtraglia_specrefs_compute_fork_digest.md b/changelog/jtraglia_specrefs_compute_fork_digest.md deleted file mode 100644 index 040deebf31..0000000000 --- a/changelog/jtraglia_specrefs_compute_fork_digest.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Add sources for compute_fork_digest to specrefs diff --git a/changelog/jtraglia_update-within-da-period.md b/changelog/jtraglia_update-within-da-period.md deleted file mode 100644 index 2879a0aa25..0000000000 --- a/changelog/jtraglia_update-within-da-period.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Use `MinEpochsForDataColumnSidecarsRequest` in `WithinDAPeriod` when in Fulu diff --git a/changelog/jtraglia_various-renaming.md b/changelog/jtraglia_various-renaming.md deleted file mode 100644 index 1285ad889b..0000000000 --- a/changelog/jtraglia_various-renaming.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Renamed various variables/functions to be more clear. diff --git a/changelog/kaloyantanev_rework-dv-selection-proofs.md b/changelog/kaloyantanev_rework-dv-selection-proofs.md deleted file mode 100644 index 6294f6d1d8..0000000000 --- a/changelog/kaloyantanev_rework-dv-selection-proofs.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- DV aggregations failing first slot of the epoch. diff --git a/changelog/kasey_decouple-proto-params.md b/changelog/kasey_decouple-proto-params.md deleted file mode 100644 index 3720f25f2f..0000000000 --- a/changelog/kasey_decouple-proto-params.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- remove usages of params from the proto package so that the params package can access proto types. diff --git a/changelog/kasey_exclude-far-future-fork.md b/changelog/kasey_exclude-far-future-fork.md deleted file mode 100644 index 06668cdfa9..0000000000 --- a/changelog/kasey_exclude-far-future-fork.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Don't include entries in the fork schedule if their epoch is set to far future epoch. Avoids reporting next_fork_version == . diff --git a/changelog/kasey_fix-15607.md b/changelog/kasey_fix-15607.md deleted file mode 100644 index 8dbd2cbc1e..0000000000 --- a/changelog/kasey_fix-15607.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- mitigate potential supernode clustering due to libp2p ConnManager pruning of non-supernodes, see https://github.com/OffchainLabs/prysm/issues/15607. diff --git a/changelog/kasey_fix-cache-panic.md b/changelog/kasey_fix-cache-panic.md deleted file mode 100644 index b8f0c9dac5..0000000000 --- a/changelog/kasey_fix-cache-panic.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- initialize data column storage in rpc handlers. diff --git a/changelog/kasey_fix-e2e-panic.md b/changelog/kasey_fix-e2e-panic.md deleted file mode 100644 index 7d8f43a824..0000000000 --- a/changelog/kasey_fix-e2e-panic.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- Fix a panic in e2e that is caused by poor context management leading to error assertions being called after test cleanup. diff --git a/changelog/kasey_fusaka-nfd.md b/changelog/kasey_fusaka-nfd.md deleted file mode 100644 index 3193f63a93..0000000000 --- a/changelog/kasey_fusaka-nfd.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- Support for fusaka `nfd` enr field, and changes to the semantics of the eth2 field. diff --git a/changelog/kasey_gzip-skip-sse.md b/changelog/kasey_gzip-skip-sse.md deleted file mode 100644 index f805609f6b..0000000000 --- a/changelog/kasey_gzip-skip-sse.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Do not apply the gzip middleware to the event stream API. diff --git a/changelog/kasey_idempotent-registration.md b/changelog/kasey_idempotent-registration.md deleted file mode 100644 index afd2085f95..0000000000 --- a/changelog/kasey_idempotent-registration.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Fixed issue #15738 where separate goroutines assume sole responsibility for topic registration. diff --git a/changelog/kasey_ignore-far-future-mismatch.md b/changelog/kasey_ignore-far-future-mismatch.md deleted file mode 100644 index 8428bfcee8..0000000000 --- a/changelog/kasey_ignore-far-future-mismatch.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Do not reject peers if they have a mismatched version|digest when the next for epoch is FAR_FUTURE_EPOCH. diff --git a/changelog/kasey_init-genesis-asap.md b/changelog/kasey_init-genesis-asap.md deleted file mode 100644 index a0d44edfc5..0000000000 --- a/changelog/kasey_init-genesis-asap.md +++ /dev/null @@ -1,3 +0,0 @@ -## Fixed - -- Genesis state, timestamp and validators root now ubiquitously available at node startup, supporting tech debt cleanup. diff --git a/changelog/kasey_invalid-digest-log.md b/changelog/kasey_invalid-digest-log.md deleted file mode 100644 index 175a42249b..0000000000 --- a/changelog/kasey_invalid-digest-log.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- decrease log level for peer subscription failures using invalid digests. diff --git a/changelog/kasey_isolate-committee-cache.md b/changelog/kasey_isolate-committee-cache.md deleted file mode 100644 index 2b7296474a..0000000000 --- a/changelog/kasey_isolate-committee-cache.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- Committee cache moved from a package var to a service struct. diff --git a/changelog/kasey_log-invalid-ee-root.md b/changelog/kasey_log-invalid-ee-root.md deleted file mode 100644 index a43b28359b..0000000000 --- a/changelog/kasey_log-invalid-ee-root.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- Adding context to logs around execution engine notification failures. diff --git a/changelog/kasey_max-blobs-use-network-schedule.md b/changelog/kasey_max-blobs-use-network-schedule.md deleted file mode 100644 index 7f3e8c1220..0000000000 --- a/changelog/kasey_max-blobs-use-network-schedule.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- Switch implementation of get max blobs to use network schedule entry code to be consistent with other fork-related helpers. diff --git a/changelog/kasey_omit-non-blob-fields.md b/changelog/kasey_omit-non-blob-fields.md deleted file mode 100644 index 2c04b22de9..0000000000 --- a/changelog/kasey_omit-non-blob-fields.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- omits non-standard blob schedule entry struct fields from marshaling. diff --git a/changelog/kasey_refactor-fork-schedules.md b/changelog/kasey_refactor-fork-schedules.md deleted file mode 100644 index e277de81eb..0000000000 --- a/changelog/kasey_refactor-fork-schedules.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- Refactor of fork schedule code to remove alternate methods of doing the same thing and support BPO digests. diff --git a/changelog/kasey_reject-mismatched-schedules.md b/changelog/kasey_reject-mismatched-schedules.md deleted file mode 100644 index dd7ff36e0e..0000000000 --- a/changelog/kasey_reject-mismatched-schedules.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- Reject incoming connections when the fork schedule of the connecting peer (parsed from their ENR) has a matching next_fork_epoch, but mismatched next_fork_version or nfd (next fork digest). diff --git a/changelog/kasey_rename-backfill-flag.md b/changelog/kasey_rename-backfill-flag.md deleted file mode 100644 index 38624db056..0000000000 --- a/changelog/kasey_rename-backfill-flag.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- Renamed the `--enable-experimental-backfill` flag to `--enable-backfill` to signal that it is more mature. diff --git a/changelog/kasey_start-discovery-immediately.md b/changelog/kasey_start-discovery-immediately.md deleted file mode 100644 index 85f9f235b8..0000000000 --- a/changelog/kasey_start-discovery-immediately.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Start topic-based peer discovery before initial sync completes so that we have coverage of needed columns when range syncing. diff --git a/changelog/kasey_unwedge-ethspecify.md b/changelog/kasey_unwedge-ethspecify.md deleted file mode 100644 index 09ae76a83f..0000000000 --- a/changelog/kasey_unwedge-ethspecify.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- Unwedge ethspecify. diff --git a/changelog/kasey_update-gossipsub.md b/changelog/kasey_update-gossipsub.md deleted file mode 100644 index fb3adb8d54..0000000000 --- a/changelog/kasey_update-gossipsub.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- Upgraded gossipsub to v0.14.2 and libp2p to v0.39.1. diff --git a/changelog/manu-TestHostIsResolved.md b/changelog/manu-TestHostIsResolved.md deleted file mode 100644 index 075a5f1fea..0000000000 --- a/changelog/manu-TestHostIsResolved.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- Workaround `TestHostIsResolved` by using "more reliable" DNS resolver. diff --git a/changelog/manu-bootnodes.md b/changelog/manu-bootnodes.md deleted file mode 100644 index 5e52d52d58..0000000000 --- a/changelog/manu-bootnodes.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- `--chain-config-file`: Do not use any more mainnet boot nodes. diff --git a/changelog/manu-broadcast.md b/changelog/manu-broadcast.md deleted file mode 100644 index 9f3f1878a4..0000000000 --- a/changelog/manu-broadcast.md +++ /dev/null @@ -1,4 +0,0 @@ -### Changed -- Broadcast block then sidecars, instead block and sidecars concurrently -- Broadcast and receive sidecars in concurrently, instead sequentially - diff --git a/changelog/manu-cgc.md b/changelog/manu-cgc.md deleted file mode 100644 index cd8fc24247..0000000000 --- a/changelog/manu-cgc.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed -- Update validator custody to the latest specification, including the new status message. - diff --git a/changelog/manu-eas.md b/changelog/manu-eas.md deleted file mode 100644 index 43ab206cac..0000000000 --- a/changelog/manu-eas.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed -- `buildStatusFromStream`: Respond `statusV2` only if Fulu is enabled. -- Send our real earliest available slot when sending a Status request post Fulu instead of `0`. \ No newline at end of file diff --git a/changelog/manu-fix-log.md b/changelog/manu-fix-log.md deleted file mode 100644 index 25937d5544..0000000000 --- a/changelog/manu-fix-log.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored -- `requestAndSaveMissingDataColumnSidecars`: Fix log - diff --git a/changelog/manu-from-disk-test.md b/changelog/manu-from-disk-test.md deleted file mode 100644 index 8511a6a5b9..0000000000 --- a/changelog/manu-from-disk-test.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored -- Add tests for `VerifiedROBlobFromDisk`. -- Add tests for `VerifiedRODataColumnFromDisk`. \ No newline at end of file diff --git a/changelog/manu-go-1.25.1.md b/changelog/manu-go-1.25.1.md deleted file mode 100644 index 776c60795d..0000000000 --- a/changelog/manu-go-1.25.1.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Updated go.mod to v1.25.1 \ No newline at end of file diff --git a/changelog/manu-inclusion-cache-key.md b/changelog/manu-inclusion-cache-key.md deleted file mode 100644 index e541de0cdb..0000000000 --- a/changelog/manu-inclusion-cache-key.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed -- `inclusionProofKey`: Include the commitments in the key. - diff --git a/changelog/manu-log-ms.md b/changelog/manu-log-ms.md deleted file mode 100644 index 83cee9a180..0000000000 --- a/changelog/manu-log-ms.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed -- Add milliseconds to log timestamps. - diff --git a/changelog/manu-logs-datacolumns.md b/changelog/manu-logs-datacolumns.md deleted file mode 100644 index 5378b3810a..0000000000 --- a/changelog/manu-logs-datacolumns.md +++ /dev/null @@ -1,2 +0,0 @@ -## Changed -- Improve logging of data column sidecars \ No newline at end of file diff --git a/changelog/manu-logs-to-trace.md b/changelog/manu-logs-to-trace.md deleted file mode 100644 index 53afd4a44c..0000000000 --- a/changelog/manu-logs-to-trace.md +++ /dev/null @@ -1,4 +0,0 @@ -### Ignored -- Prettify logs for byRange/byRoot data column sidecar requests. -- Moving byRoot/byRange data column sidecars requests from peers to TRACE level. -- Move "Peer requested blob sidecar by root not found in db" in TRACE. \ No newline at end of file diff --git a/changelog/manu-peer-ban-at-restart.md b/changelog/manu-peer-ban-at-restart.md deleted file mode 100644 index 43dc7a36dc..0000000000 --- a/changelog/manu-peer-ban-at-restart.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Fixed various reasons why a node is banned by its peers when it stops. \ No newline at end of file diff --git a/changelog/manu-peerdas-beacon-api.md b/changelog/manu-peerdas-beacon-api.md deleted file mode 100644 index ccd0e56228..0000000000 --- a/changelog/manu-peerdas-beacon-api.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- Implement beacon API blob sidecar enpoint for Fulu. diff --git a/changelog/manu-peerdas-builder.md b/changelog/manu-peerdas-builder.md deleted file mode 100644 index 3183261be5..0000000000 --- a/changelog/manu-peerdas-builder.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- Add support for parsing and handling `ExecutionPayloadAndBlobsBundleV2`. diff --git a/changelog/manu-peerdas-c-kzg-update.md b/changelog/manu-peerdas-c-kzg-update.md deleted file mode 100644 index d9dea64650..0000000000 --- a/changelog/manu-peerdas-c-kzg-update.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- `c-kzg-4844`: Update from `v2.1.1` to `v2.1.5` diff --git a/changelog/manu-peerdas-columns-by-range-handler.md b/changelog/manu-peerdas-columns-by-range-handler.md deleted file mode 100644 index b59cde58cd..0000000000 --- a/changelog/manu-peerdas-columns-by-range-handler.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- Implement `dataColumnSidecarsByRangeRPCHandler`. diff --git a/changelog/manu-peerdas-columns-by-root-handler.md b/changelog/manu-peerdas-columns-by-root-handler.md deleted file mode 100644 index e8055305b9..0000000000 --- a/changelog/manu-peerdas-columns-by-root-handler.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- Implement `dataColumnSidecarByRootRPCHandler`. diff --git a/changelog/manu-peerdas-columns-logs.md b/changelog/manu-peerdas-columns-logs.md deleted file mode 100644 index b452522714..0000000000 --- a/changelog/manu-peerdas-columns-logs.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- Aggregate logs when broadcasting data column sidecars (one per root instead of one per sidecar) diff --git a/changelog/manu-peerdas-das.md b/changelog/manu-peerdas-das.md deleted file mode 100644 index 6ec100083b..0000000000 --- a/changelog/manu-peerdas-das.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- PeerDAS: Implement DAS. diff --git a/changelog/manu-peerdas-dataColumnSidecarByRootRPCHandler.md b/changelog/manu-peerdas-dataColumnSidecarByRootRPCHandler.md deleted file mode 100644 index 80c3fdf9d9..0000000000 --- a/changelog/manu-peerdas-dataColumnSidecarByRootRPCHandler.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Non deterministic output order of `dataColumnSidecarByRootRPCHandler`. diff --git a/changelog/manu-peerdas-get-blobs-V2.md b/changelog/manu-peerdas-get-blobs-V2.md deleted file mode 100644 index 6fb1d55383..0000000000 --- a/changelog/manu-peerdas-get-blobs-V2.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added -- Implement engine method `GetBlobsV2` -- Implement execution `ReconstructDataColumnSidecars`, which reconstruct data column sidecars from data fetched from the execution layer. diff --git a/changelog/manu-peerdas-metadata.md b/changelog/manu-peerdas-metadata.md deleted file mode 100644 index 937f84288b..0000000000 --- a/changelog/manu-peerdas-metadata.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- PeerDAS: Implement the new Fulu Metadata. diff --git a/changelog/manu-peerdas-node.md b/changelog/manu-peerdas-node.md deleted file mode 100644 index 9d837628a5..0000000000 --- a/changelog/manu-peerdas-node.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- PeerDAS: Add `CustodyInfo` in `BeaconNode`. diff --git a/changelog/manu-peerdas-reconstruct.md b/changelog/manu-peerdas-reconstruct.md deleted file mode 100644 index e5f3ffa8a8..0000000000 --- a/changelog/manu-peerdas-reconstruct.md +++ /dev/null @@ -1,4 +0,0 @@ -### Added -- In FetchDataColumnSidecars, after retrieving sidecars from peers, if still some sidecars are missing for a given root and if a reconstruction is possible (combining sidecars already retrieved from peers and sidecars in the storage), then reconstruct missing sidecars instead of trying to fetch the missing ones from peers. - - diff --git a/changelog/manu-peerdas-reconstruction-delay.md b/changelog/manu-peerdas-reconstruction-delay.md deleted file mode 100644 index 113511af5c..0000000000 --- a/changelog/manu-peerdas-reconstruction-delay.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- PeerDAS: Wait for a random delay, then reconstruct data column sidecars and immediately reseed instead of immediately reconstructing, waiting and then reseeding. \ No newline at end of file diff --git a/changelog/manu-peerdas-reconstruction-from-el.md b/changelog/manu-peerdas-reconstruction-from-el.md deleted file mode 100644 index 047871d68a..0000000000 --- a/changelog/manu-peerdas-reconstruction-from-el.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- Implement data column sidecars reconstruction with data retrieved from the execution client when receiving a block via gossip. \ No newline at end of file diff --git a/changelog/manu-peerdas-reconstruction.md b/changelog/manu-peerdas-reconstruction.md deleted file mode 100644 index d62d9c28e9..0000000000 --- a/changelog/manu-peerdas-reconstruction.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- PeerDAS: Implement reconstruction. diff --git a/changelog/manu-peerdas-send-data-column-requests.md b/changelog/manu-peerdas-send-data-column-requests.md deleted file mode 100644 index 5801f82c56..0000000000 --- a/changelog/manu-peerdas-send-data-column-requests.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added -- Implement `SendDataColumnSidecarsByRangeRequest`. -- Implement `SendDataColumnSidecarsByRootRequest`. diff --git a/changelog/manu-peerdas-small.md b/changelog/manu-peerdas-small.md deleted file mode 100644 index a8d364f8a0..0000000000 --- a/changelog/manu-peerdas-small.md +++ /dev/null @@ -1,12 +0,0 @@ -### Added -- `verifyBlobCommitmentCount`: Print max allowed blob count in error message. - - -### Ignored -- `TestPersist`: Use `fieldparams.RootLength` instead of `32`. -- `TestDataColumnSidecarsByRootReq_Marshal`: Remove blank line. -- `ConvertPeerIDToNodeID`: Improve readability by using one line per field. - -### Changed -- `parseIndices`: Return `[]int` instead of `[]uint64`. - diff --git a/changelog/manu-peerdas-sync.md b/changelog/manu-peerdas-sync.md deleted file mode 100644 index ddcd634065..0000000000 --- a/changelog/manu-peerdas-sync.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- Data columns syncing for Fusaka. \ No newline at end of file diff --git a/changelog/manu-peerdas-syncing-disjoint-network.md b/changelog/manu-peerdas-syncing-disjoint-network.md deleted file mode 100644 index 68c7e459a8..0000000000 --- a/changelog/manu-peerdas-syncing-disjoint-network.md +++ /dev/null @@ -1,10 +0,0 @@ -### Changed -- Filtering peers for data column subnets: Added a one-epoch slack to the peer’s head slot view. -- Fetching data column sidecars: If not all requested sidecars are available for a given root, return the successfully retrieved ones along with a map indicating which could not be fetched. -- Fetching origin data column sidecars: If only some sidecars are fetched, save the retrieved ones and retry fetching the missing ones on the next attempt. - -### Added -- Implemented syncing in a disjoint network with respect to data column sidecars subscribed by peers. - -## Fixed -- Initial sync: Do not request data column sidecars for blocks before the retention period. \ No newline at end of file diff --git a/changelog/manu-peerdas-variou.md b/changelog/manu-peerdas-variou.md deleted file mode 100644 index fa50d01c52..0000000000 --- a/changelog/manu-peerdas-variou.md +++ /dev/null @@ -1,9 +0,0 @@ -### Changed -- In `TopicFromMessage`: Do not assume anymore that all Fulu specific topic are V3 only. -- `readChunkedDataColumnSidecar`: Add `validationFunctions` parameter and add tests. - -### Added -- New `StatusV2` proto message. - -### Removed -- Unused `DataColumnIdentifier` proto message. diff --git a/changelog/manu-peerdas-various.md b/changelog/manu-peerdas-various.md deleted file mode 100644 index 140445c778..0000000000 --- a/changelog/manu-peerdas-various.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added -- Warm data columns storage cache at start. -- Add `--data-column-path` flag. diff --git a/changelog/manu-remove-clock.md b/changelog/manu-remove-clock.md deleted file mode 100644 index bb9177b480..0000000000 --- a/changelog/manu-remove-clock.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- P2P service: Remove unused clock. diff --git a/changelog/manu-retry-fetch-origin-columns.md b/changelog/manu-retry-fetch-origin-columns.md deleted file mode 100644 index 1b60f09569..0000000000 --- a/changelog/manu-retry-fetch-origin-columns.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- Retry to fetch origin data column sidecars when starting from a checkpoint. diff --git a/changelog/manu-skip-bad-peers.md b/changelog/manu-skip-bad-peers.md deleted file mode 100644 index b9e7cc2a51..0000000000 --- a/changelog/manu-skip-bad-peers.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed -- `findPeersWithSubnets`: If the filter function returns an error for a given peer, log an error and skip the peer instead of aborting the whole function. -- `computeIndicesByRootByPeer`: If the loop returns an error for a given peer, log an error and skip the peer instead of aborting the whole function. \ No newline at end of file diff --git a/changelog/manu-subscriptions.md b/changelog/manu-subscriptions.md deleted file mode 100644 index 45931a0065..0000000000 --- a/changelog/manu-subscriptions.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Subnets subscription: Avoid dynamic subscribing blocking in case not enough peers per subnets are found. \ No newline at end of file diff --git a/changelog/manu-wait-init-custody-info.md b/changelog/manu-wait-init-custody-info.md deleted file mode 100644 index 28650d4d3e..0000000000 --- a/changelog/manu-wait-init-custody-info.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Wait for custody info to be initialized before querying them. \ No newline at end of file diff --git a/changelog/muzry_fix_get_block_attestation_v2.md b/changelog/muzry_fix_get_block_attestation_v2.md deleted file mode 100644 index e5e2046279..0000000000 --- a/changelog/muzry_fix_get_block_attestation_v2.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fix getBlockAttestationsV2 to return [] instead of null when data is empty diff --git a/changelog/muzry_fix_produce_sync_committee.md b/changelog/muzry_fix_produce_sync_committee.md deleted file mode 100644 index e6ff7c707d..0000000000 --- a/changelog/muzry_fix_produce_sync_committee.md +++ /dev/null @@ -1,4 +0,0 @@ -### Fixed - -- Fix ProduceSyncCommitteeContribution not returning error when committee index is out of range - diff --git a/changelog/muzry_fix_prysmctl_panics.md b/changelog/muzry_fix_prysmctl_panics.md deleted file mode 100644 index 1fdb66009d..0000000000 --- a/changelog/muzry_fix_prysmctl_panics.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fix prysmctl panic when baseFee is not set in genesis.json diff --git a/changelog/muzry_fix_state_randao.md b/changelog/muzry_fix_state_randao.md deleted file mode 100644 index e69a7d92b1..0000000000 --- a/changelog/muzry_fix_state_randao.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fix getStateRandao not returning historic RANDAO mix values diff --git a/changelog/muzry_fix_submit_pool_sync_committee_signatures.md b/changelog/muzry_fix_submit_pool_sync_committee_signatures.md deleted file mode 100644 index a112f2d3eb..0000000000 --- a/changelog/muzry_fix_submit_pool_sync_committee_signatures.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixed align submitPoolSyncCommitteeSignatures response with Beacon API specification diff --git a/changelog/muzry_fix_unremove_empty_dir.md b/changelog/muzry_fix_unremove_empty_dir.md deleted file mode 100644 index ad667819ef..0000000000 --- a/changelog/muzry_fix_unremove_empty_dir.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixed the issue of empty dirs not being deleted when using –blob-storage-layout=by-epoch diff --git a/changelog/muzry_fix_validator_client_committee_index.md b/changelog/muzry_fix_validator_client_committee_index.md deleted file mode 100644 index e9b29335ce..0000000000 --- a/changelog/muzry_fix_validator_client_committee_index.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixed incorrect attestation data request where the assigned committee index was used after Electra, instead of 0. diff --git a/changelog/muzry_update_not_found_status.md b/changelog/muzry_update_not_found_status.md deleted file mode 100644 index 73b414add8..0000000000 --- a/changelog/muzry_update_not_found_status.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Fixed endpoint response to return 404 or 400 after isOptimistic check \ No newline at end of file diff --git a/changelog/nisdas_mv_slice_permanent.md b/changelog/nisdas_mv_slice_permanent.md deleted file mode 100644 index ebe7937f76..0000000000 --- a/changelog/nisdas_mv_slice_permanent.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Makes the multivalue slice permanent in the state and removes old paths. \ No newline at end of file diff --git a/changelog/pop_fix-bug.md b/changelog/pop_fix-bug.md deleted file mode 100644 index b1bf9ef0fe..0000000000 --- a/changelog/pop_fix-bug.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Fix bug where arguments of fillInForkChoiceMissingBlocks were incorrectly placed diff --git a/changelog/pop_fix-subnet.md b/changelog/pop_fix-subnet.md deleted file mode 100644 index 062eeab94c..0000000000 --- a/changelog/pop_fix-subnet.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Fix bug where stale computed value in closure excludes newly required (eg attestation) subscriptions. diff --git a/changelog/potuz_add_publishv2_metric.md b/changelog/potuz_add_publishv2_metric.md deleted file mode 100644 index 02667e08ca..0000000000 --- a/changelog/potuz_add_publishv2_metric.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Add timing metric `publish_block_v2_duration_milliseconds` to measure processing duration of the `PublishBlockV2` beacon API endpoint. \ No newline at end of file diff --git a/changelog/potuz_avoid_exit_info.md b/changelog/potuz_avoid_exit_info.md deleted file mode 100644 index b21477b397..0000000000 --- a/changelog/potuz_avoid_exit_info.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Avoid unnecessary calls to `ExitInformation()`. diff --git a/changelog/potuz_change_error_message.md b/changelog/potuz_change_error_message.md deleted file mode 100644 index 47981fec71..0000000000 --- a/changelog/potuz_change_error_message.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- Fix error message. diff --git a/changelog/potuz_change_insertchain.md b/changelog/potuz_change_insertchain.md deleted file mode 100644 index 3c082a6f97..0000000000 --- a/changelog/potuz_change_insertchain.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Change InsertChain to use be increasingly ordered and insert the last block of the chain. diff --git a/changelog/potuz_double_receive_block.md b/changelog/potuz_double_receive_block.md deleted file mode 100644 index d55f5f5e19..0000000000 --- a/changelog/potuz_double_receive_block.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Prevent a race on double `ReceiveBlock`. diff --git a/changelog/potuz_eip_7917.md b/changelog/potuz_eip_7917.md deleted file mode 100644 index 9764e0c8c1..0000000000 --- a/changelog/potuz_eip_7917.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Implement EIP-7917: Stable proposer lookahead. diff --git a/changelog/potuz_event_happy_path.md b/changelog/potuz_event_happy_path.md deleted file mode 100644 index 24958b3065..0000000000 --- a/changelog/potuz_event_happy_path.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Trigger payload attribute event as soon as an early block is processed. diff --git a/changelog/potuz_fix_forkchoice_startup.md b/changelog/potuz_fix_forkchoice_startup.md deleted file mode 100644 index eace33d3ec..0000000000 --- a/changelog/potuz_fix_forkchoice_startup.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixed an off-by-one in forkchoice startup. diff --git a/changelog/potuz_fix_initialize_lookahead.md b/changelog/potuz_fix_initialize_lookahead.md deleted file mode 100644 index 17e67b7ada..0000000000 --- a/changelog/potuz_fix_initialize_lookahead.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Fixed lookahead initialization at the fulu fork. diff --git a/changelog/potuz_fix_races_in_tests.md b/changelog/potuz_fix_races_in_tests.md deleted file mode 100644 index 4f143e791b..0000000000 --- a/changelog/potuz_fix_races_in_tests.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Fix races in tests that cause nil panics. diff --git a/changelog/potuz_fix_stater.md b/changelog/potuz_fix_stater.md deleted file mode 100644 index ffb8f718a8..0000000000 --- a/changelog/potuz_fix_stater.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Fix the validateConsensus endpoint handler. diff --git a/changelog/potuz_next_epoch_proposer_duties.md b/changelog/potuz_next_epoch_proposer_duties.md deleted file mode 100644 index 7b7e17460e..0000000000 --- a/changelog/potuz_next_epoch_proposer_duties.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fix next epoch proposer duties in Fulu by advancing the state to the beginning of the current epoch. diff --git a/changelog/potuz_process_pending_atts.md b/changelog/potuz_process_pending_atts.md deleted file mode 100644 index 961591e121..0000000000 --- a/changelog/potuz_process_pending_atts.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Process pending attestations as soon as the block arrives. diff --git a/changelog/potuz_remove_bls_broadcast.md b/changelog/potuz_remove_bls_broadcast.md deleted file mode 100644 index 6926cd6f10..0000000000 --- a/changelog/potuz_remove_bls_broadcast.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Remove broadcast of BLS changes at the Capella fork. diff --git a/changelog/potuz_reorg_safe_vc.md b/changelog/potuz_reorg_safe_vc.md deleted file mode 100644 index 3f44edd6f9..0000000000 --- a/changelog/potuz_reorg_safe_vc.md +++ /dev/null @@ -1,6 +0,0 @@ -### Added - -- Add a feature flag `--disable-duties-polling` to disable duties polling according to the dependent root. -### Fixed - -- Make the validator reorg resistant using dependent root on epoch boundaries. diff --git a/changelog/potuz_start_from_justified.md b/changelog/potuz_start_from_justified.md deleted file mode 100644 index 93c0c2029e..0000000000 --- a/changelog/potuz_start_from_justified.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- Start from justified checkpoint by default. diff --git a/changelog/potuz_update_gohashtree.md b/changelog/potuz_update_gohashtree.md deleted file mode 100644 index 52b01b44f5..0000000000 --- a/changelog/potuz_update_gohashtree.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Update gohashtree to v0.0.5-beta. diff --git a/changelog/potuz_update_quic_go.md b/changelog/potuz_update_quic_go.md deleted file mode 100644 index cf304f3826..0000000000 --- a/changelog/potuz_update_quic_go.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Updated quick-go to latest version. diff --git a/changelog/potuz_update_spectest.md b/changelog/potuz_update_spectest.md deleted file mode 100644 index 8ee456f73f..0000000000 --- a/changelog/potuz_update_spectest.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Update spectests to 1.6.0-beta.0 diff --git a/changelog/pvl-blob-cache-panic.md b/changelog/pvl-blob-cache-panic.md deleted file mode 100644 index 97542a264b..0000000000 --- a/changelog/pvl-blob-cache-panic.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixed a condition where the blob cache could panic when there were less than or no sidecars in the cache entry. diff --git a/changelog/pvl-blst-go-repo.md b/changelog/pvl-blst-go-repo.md deleted file mode 100644 index b32e3a5efc..0000000000 --- a/changelog/pvl-blst-go-repo.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Changed blst dependency from `http_archive` to `go_repository` so that gazelle can keep it in sync with go.mod. diff --git a/changelog/pvl-debug-log.md b/changelog/pvl-debug-log.md deleted file mode 100644 index c92fda5708..0000000000 --- a/changelog/pvl-debug-log.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Added more metadata for debug logs when initial sync requests fail for "invalid data returned from peer" errors diff --git a/changelog/pvl-downscore-init-sync.md b/changelog/pvl-downscore-init-sync.md deleted file mode 100644 index fafd3ddac0..0000000000 --- a/changelog/pvl-downscore-init-sync.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Prysm will now downscore peers that return invalid block_by_range responses. diff --git a/changelog/pvl-erigon-agent.md b/changelog/pvl-erigon-agent.md deleted file mode 100644 index 5a4980cbb5..0000000000 --- a/changelog/pvl-erigon-agent.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Added erigon/caplin to known p2p agent strings. diff --git a/changelog/pvl-fulu-StreamBlocksAltair.md b/changelog/pvl-fulu-StreamBlocksAltair.md deleted file mode 100644 index 094e75b5bb..0000000000 --- a/changelog/pvl-fulu-StreamBlocksAltair.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Added fulu block support to StreamBlocksAltair diff --git a/changelog/pvl-fulu-prevent-datacolumns-oob.md b/changelog/pvl-fulu-prevent-datacolumns-oob.md deleted file mode 100644 index 44d1c94a9f..0000000000 --- a/changelog/pvl-fulu-prevent-datacolumns-oob.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Safeguard against accidental out of bounds array access in dataColumnSidecars method. diff --git a/changelog/pvl-gh-runners.md b/changelog/pvl-gh-runners.md deleted file mode 100644 index c27411c813..0000000000 --- a/changelog/pvl-gh-runners.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Changed github action runners from `ubuntu-latest` to `ubuntu-4` diff --git a/changelog/pvl-go-1.24.5.md b/changelog/pvl-go-1.24.5.md deleted file mode 100644 index 5f6a982643..0000000000 --- a/changelog/pvl-go-1.24.5.md +++ /dev/null @@ -1,3 +0,0 @@ -### Security - -- Updated go to version 1.24.5 diff --git a/changelog/pvl-go-1.24.6.md b/changelog/pvl-go-1.24.6.md deleted file mode 100644 index 106058ff90..0000000000 --- a/changelog/pvl-go-1.24.6.md +++ /dev/null @@ -1,3 +0,0 @@ -### Security - -- Updated go to version 1.24.6 diff --git a/changelog/pvl-go-1.25.md b/changelog/pvl-go-1.25.md deleted file mode 100644 index b3348dd207..0000000000 --- a/changelog/pvl-go-1.25.md +++ /dev/null @@ -1,5 +0,0 @@ -### Changed - -- Updated go to v1.25.1 -- Updated rules_go to v0.57.0 -- Updated protobuf to 28.3 diff --git a/changelog/pvl-p2p-colocation-whitelist.md b/changelog/pvl-p2p-colocation-whitelist.md deleted file mode 100644 index b6a97afd29..0000000000 --- a/changelog/pvl-p2p-colocation-whitelist.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Added flag `--p2p-colocation-whitelist` to accept CIDRs which will bypass the p2p colocation restrictions. diff --git a/changelog/pvl-peer-metrics-fix.md b/changelog/pvl-peer-metrics-fix.md deleted file mode 100644 index f217431929..0000000000 --- a/changelog/pvl-peer-metrics-fix.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Correctly clear disconnected peers from `connected_libp2p_peers` and `connected_libp2p_peers_average_scores`. diff --git a/changelog/pvl-peerdas-peer-fanout.md b/changelog/pvl-peerdas-peer-fanout.md deleted file mode 100644 index a4e03fdbb1..0000000000 --- a/changelog/pvl-peerdas-peer-fanout.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Configured the beacon node to seek peers when we have validator custody requirements. If one or more validators are connected to the beacon node, then the beacon node should seek a diverse set of peers such that broadcasting to all data column subnets for a block proposal is more efficient. diff --git a/changelog/pvl-regression-15369.md b/changelog/pvl-regression-15369.md deleted file mode 100644 index 31095dff55..0000000000 --- a/changelog/pvl-regression-15369.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Added regression test for [PR 15369](https://github.com/OffchainLabs/prysm/pull/15369) diff --git a/changelog/pvl-rm-eth1voting-tool.md b/changelog/pvl-rm-eth1voting-tool.md deleted file mode 100644 index 76d4d0d72a..0000000000 --- a/changelog/pvl-rm-eth1voting-tool.md +++ /dev/null @@ -1,3 +0,0 @@ -### Removed - -- Removed //tools/eth1voting tool. This is no longer needed as the beacon chain no longer uses eth1data voting since Electra. diff --git a/changelog/pvl-slasherkv-timeout.md b/changelog/pvl-slasherkv-timeout.md deleted file mode 100644 index 7f632f8f4f..0000000000 --- a/changelog/pvl-slasherkv-timeout.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- Added a 1 minute timeout on PruneAttestationOnEpoch operations to prevent very large bolt transactions diff --git a/changelog/pvl-spectest-size.md b/changelog/pvl-spectest-size.md deleted file mode 100644 index de28e6b473..0000000000 --- a/changelog/pvl-spectest-size.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- CI: Changed test size for //testing/spectest/mainnet:go_default_test diff --git a/changelog/pvl-strip.md b/changelog/pvl-strip.md deleted file mode 100644 index 5f9cd58bfd..0000000000 --- a/changelog/pvl-strip.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Bazel builds with `--config=release` now properly apply `--strip=always` to strip debug symbols from the release assets. diff --git a/changelog/pvl-testing-context.md b/changelog/pvl-testing-context.md deleted file mode 100644 index 67e1d73a88..0000000000 --- a/changelog/pvl-testing-context.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Replaced context.Background, etc to use t.Context. This is only in test code and does not change much. diff --git a/changelog/pvl-time.md b/changelog/pvl-time.md deleted file mode 100644 index 28917e32d6..0000000000 --- a/changelog/pvl-time.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Changed `uint64` genesis time to use `time.Time`. Also did some refactoring and cleanup that was enabled by these changes. diff --git a/changelog/pvl-update-cc-debian11.md b/changelog/pvl-update-cc-debian11.md deleted file mode 100644 index e0a17c3a9a..0000000000 --- a/changelog/pvl-update-cc-debian11.md +++ /dev/null @@ -1,3 +0,0 @@ -### Security - -- Updated distroless/cc-debian11 to latest to resolve CVE-2024-2961. diff --git a/changelog/pvl-update-eth-clients.md b/changelog/pvl-update-eth-clients.md deleted file mode 100644 index 3f85574cce..0000000000 --- a/changelog/pvl-update-eth-clients.md +++ /dev/null @@ -1,6 +0,0 @@ -### Ignored - -- Updated pinned commit for eth-clients/holesky -- Updated pinned commit for eth-clients/hoodi -- Updated pinned commit for eth-clients/sepolia -- Removed deprecated dependency for eth-clients/eth2-networks diff --git a/changelog/pvl-v6.0.4.md b/changelog/pvl-v6.0.4.md deleted file mode 100644 index 1985d30ec3..0000000000 --- a/changelog/pvl-v6.0.4.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Added changelog for v6.0.4 diff --git a/changelog/pvl-v6.1.2.md b/changelog/pvl-v6.1.2.md new file mode 100644 index 0000000000..6d2726fa8c --- /dev/null +++ b/changelog/pvl-v6.1.2.md @@ -0,0 +1,3 @@ +### Ignored + +- Changelog entries for v6.1.2 through v6.0.5 diff --git a/changelog/pvl_gcr_offchain.md b/changelog/pvl_gcr_offchain.md deleted file mode 100644 index 31e8f473c5..0000000000 --- a/changelog/pvl_gcr_offchain.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Updated docker images URL to gcr.io/offchainlabs/prysm diff --git a/changelog/radek_agg-sc-messages.md b/changelog/radek_agg-sc-messages.md deleted file mode 100644 index eb27548f89..0000000000 --- a/changelog/radek_agg-sc-messages.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Aggregate and pack sync committee messages into blocks. \ No newline at end of file diff --git a/changelog/radek_consensus-value-unavailable.md b/changelog/radek_consensus-value-unavailable.md deleted file mode 100644 index a93b28e7d8..0000000000 --- a/changelog/radek_consensus-value-unavailable.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Return zero value for `Eth-Consensus-Block-Value` on error to avoid missed block proposals. \ No newline at end of file diff --git a/changelog/radek_do-not-compare-liveness.md b/changelog/radek_do-not-compare-liveness.md deleted file mode 100644 index 228b3059c6..0000000000 --- a/changelog/radek_do-not-compare-liveness.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Do not compare liveness response with LH in e2e Beacon API evaluator. \ No newline at end of file diff --git a/changelog/radek_duplicate-sync-aggregate.md b/changelog/radek_duplicate-sync-aggregate.md deleted file mode 100644 index 1b7d35a5ad..0000000000 --- a/changelog/radek_duplicate-sync-aggregate.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Don't submit duplicate `SignedContributionAndProof` messages. \ No newline at end of file diff --git a/changelog/radek_electra-committee-assertion.md b/changelog/radek_electra-committee-assertion.md deleted file mode 100644 index 355e2e20a2..0000000000 --- a/changelog/radek_electra-committee-assertion.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Guard against no attesters within committee in `VerifyAttestationNoVerifySignature`. \ No newline at end of file diff --git a/changelog/radek_fix-gzip.md b/changelog/radek_fix-gzip.md deleted file mode 100644 index 9cbc37d3c1..0000000000 --- a/changelog/radek_fix-gzip.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Write `Content-Encoding` header in the response properly when gzip encoding is requested. \ No newline at end of file diff --git a/changelog/radek_fix-max-epoch-calculation-once.md b/changelog/radek_fix-max-epoch-calculation-once.md deleted file mode 100644 index e3c1575651..0000000000 --- a/changelog/radek_fix-max-epoch-calculation-once.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Pre-calculate exit epoch, churn and active balance before processing slashings to reduce CPU load. \ No newline at end of file diff --git a/changelog/radek_redesign-pending-att-queue.md b/changelog/radek_redesign-pending-att-queue.md deleted file mode 100644 index 0e90e953dc..0000000000 --- a/changelog/radek_redesign-pending-att-queue.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Redesign the pending attestation queue. \ No newline at end of file diff --git a/changelog/radek_reorganize-lc-processing.md b/changelog/radek_reorganize-lc-processing.md deleted file mode 100644 index bd484657fb..0000000000 --- a/changelog/radek_reorganize-lc-processing.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Reorganize processing of light client updates. \ No newline at end of file diff --git a/changelog/radek_ssz-pending.md b/changelog/radek_ssz-pending.md deleted file mode 100644 index ccfb98b7eb..0000000000 --- a/changelog/radek_ssz-pending.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Allow SSZ requests for pending deposits, partial withdrawals and consolidations. \ No newline at end of file diff --git a/changelog/radek_state-fuzz-gc.md b/changelog/radek_state-fuzz-gc.md deleted file mode 100644 index b8a5c73ce5..0000000000 --- a/changelog/radek_state-fuzz-gc.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Reclaim memory manually in some tests that fuzz the beacon state. \ No newline at end of file diff --git a/changelog/radek_update-github-bug-template.md b/changelog/radek_update-github-bug-template.md deleted file mode 100644 index 071d5aa46c..0000000000 --- a/changelog/radek_update-github-bug-template.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Update Github bug template. \ No newline at end of file diff --git a/changelog/raulk_beacon-api-metadata.md b/changelog/raulk_beacon-api-metadata.md deleted file mode 100644 index 7f52416f2d..0000000000 --- a/changelog/raulk_beacon-api-metadata.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Added new metadata fields (attnets,syncnets,custody_group_count) to `/eth/v1/node/identity`. \ No newline at end of file diff --git a/changelog/rose2221-develop.md b/changelog/rose2221-develop.md deleted file mode 100644 index c8cca2f58c..0000000000 --- a/changelog/rose2221-develop.md +++ /dev/null @@ -1,5 +0,0 @@ -### Added - -- **Gzip Compression for Beacon API:** - Fixed an issue where the beacon chain server ignored the `Accept-Encoding: gzip` header and returned uncompressed JSON responses. With this change, endpoints that use the `AcceptHeaderHandler` now also compress responses when a client requests gzip encoding. - Fixes [#14593](https://github.com/prysmaticlabs/prysm/issues/14593). diff --git a/changelog/sahil-4555-refactor-to-reflect-typefor.md b/changelog/sahil-4555-refactor-to-reflect-typefor.md deleted file mode 100644 index 9a59e0a0eb..0000000000 --- a/changelog/sahil-4555-refactor-to-reflect-typefor.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Replaced reflect.TypeOf with reflect.TypeFor diff --git a/changelog/sahil-4555-refactor-to-use-atomic-types.md b/changelog/sahil-4555-refactor-to-use-atomic-types.md deleted file mode 100644 index c2bb2b3c64..0000000000 --- a/changelog/sahil-4555-refactor-to-use-atomic-types.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Changed old atomic functions to new atomic.Int for safer and clearer code. diff --git a/changelog/sahil-4555-use-inbuilt-max-min.md b/changelog/sahil-4555-use-inbuilt-max-min.md deleted file mode 100644 index cf20495845..0000000000 --- a/changelog/sahil-4555-use-inbuilt-max-min.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- switch to built-in min/max diff --git a/changelog/sashass1315_fix-priority-queue-pop-lock-race.md b/changelog/sashass1315_fix-priority-queue-pop-lock-race.md deleted file mode 100644 index 237d576b54..0000000000 --- a/changelog/sashass1315_fix-priority-queue-pop-lock-race.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- fix race in PriorityQueue.Pop by checking emptiness under write lock. (#15726) diff --git a/changelog/satushh-getblobsv2-retry.md b/changelog/satushh-getblobsv2-retry.md deleted file mode 100644 index 741efeae6e..0000000000 --- a/changelog/satushh-getblobsv2-retry.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added -- Add retry logic when GetBlobsV2 is called. -- Call GetBlobsV2 as soon as we receive the first data column sidecar or block diff --git a/changelog/syjn99_initialize-ssz-ql.md b/changelog/syjn99_initialize-ssz-ql.md deleted file mode 100644 index 2d3448b448..0000000000 --- a/changelog/syjn99_initialize-ssz-ql.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Initialize package for SSZ Query Language. diff --git a/changelog/syjn99_persist-p2p-seqnum.md b/changelog/syjn99_persist-p2p-seqnum.md deleted file mode 100644 index 9f6a583ee7..0000000000 --- a/changelog/syjn99_persist-p2p-seqnum.md +++ /dev/null @@ -1,7 +0,0 @@ -### Fixed - -- Fixed [#15544](https://github.com/OffchainLabs/prysm/issues/15544): Persist metadata sequence number if it is needed (e.g., use static peer ID option or Fulu enabled). - -### Deprecated - -- Deprecated `p2p-metadata` flag. \ No newline at end of file diff --git a/changelog/syjn99_refactor-htrutil.md b/changelog/syjn99_refactor-htrutil.md deleted file mode 100644 index e3ae3ebfcd..0000000000 --- a/changelog/syjn99_refactor-htrutil.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Refactor `htrutil.go` by removing redundant codes. diff --git a/changelog/syjn99_save-state-efficient-fulu.md b/changelog/syjn99_save-state-efficient-fulu.md deleted file mode 100644 index 97788a0ce3..0000000000 --- a/changelog/syjn99_save-state-efficient-fulu.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Add Fulu case for `saveStatesEfficientInternal` \ No newline at end of file diff --git a/changelog/syjn99_ssz-ql-bitlist-bitvector.md b/changelog/syjn99_ssz-ql-bitlist-bitvector.md deleted file mode 100644 index 956e18b0cb..0000000000 --- a/changelog/syjn99_ssz-ql-bitlist-bitvector.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- SSZ-QL: Handle `Bitlist` and `Bitvector` types. diff --git a/changelog/syjn99_ssz-ql-list.md b/changelog/syjn99_ssz-ql-list.md deleted file mode 100644 index baf304da2e..0000000000 --- a/changelog/syjn99_ssz-ql-list.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Support `List` type for SSZ-QL. diff --git a/changelog/syjn99_ssz-ql-nested-list.md b/changelog/syjn99_ssz-ql-nested-list.md deleted file mode 100644 index b45b0393f4..0000000000 --- a/changelog/syjn99_ssz-ql-nested-list.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- SSZ-QL: Support nested `List` type (e.g., `ExecutionPayload.Transactions`) diff --git a/changelog/syjn99_ssz-ql-tag-parser.md b/changelog/syjn99_ssz-ql-tag-parser.md deleted file mode 100644 index acc2c63ae6..0000000000 --- a/changelog/syjn99_ssz-ql-tag-parser.md +++ /dev/null @@ -1,4 +0,0 @@ -### Added - -- SSZ-QL: Add element information for `Vector` type. -- SSZ-QL: Support multi-dimensional tag parsing. diff --git a/changelog/tomasandroil_replace_grpc_gateway_flag_name.md b/changelog/tomasandroil_replace_grpc_gateway_flag_name.md deleted file mode 100644 index 81ebdaacdd..0000000000 --- a/changelog/tomasandroil_replace_grpc_gateway_flag_name.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- Replaced hardcoded `grpc-gateway-port` with `flags.HTTPServerPort.Name` in `testing/endtoend/components/validator.go`, resolving an inline TODO for improved flag consistency. diff --git a/changelog/tt_45.md b/changelog/tt_45.md deleted file mode 100644 index 3aba907c36..0000000000 --- a/changelog/tt_45.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Increase mainnet DefaultBuilderGasLimit from 36M to 45M \ No newline at end of file diff --git a/changelog/tt_beans.md b/changelog/tt_beans.md deleted file mode 100644 index 6af86cd185..0000000000 --- a/changelog/tt_beans.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Slot aware cache for seen data column gossip p2p to reduce memory usages. \ No newline at end of file diff --git a/changelog/tt_check_pending_att.md b/changelog/tt_check_pending_att.md deleted file mode 100644 index c8224a7544..0000000000 --- a/changelog/tt_check_pending_att.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Check pending block is in forkchoice before importing pending attestation. diff --git a/changelog/tt_chicken.md b/changelog/tt_chicken.md deleted file mode 100644 index 48a1a87e09..0000000000 --- a/changelog/tt_chicken.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Add log capitalization analyzer and apply changes across codebase. \ No newline at end of file diff --git a/changelog/tt_duty.md b/changelog/tt_duty.md deleted file mode 100644 index ffa0e00d6c..0000000000 --- a/changelog/tt_duty.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Beacon-api proposer duty fulu computation \ No newline at end of file diff --git a/changelog/tt_egg.md b/changelog/tt_egg.md deleted file mode 100644 index fd0786c21e..0000000000 --- a/changelog/tt_egg.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fix blob metric name for peer count \ No newline at end of file diff --git a/changelog/tt_fish.md b/changelog/tt_fish.md deleted file mode 100644 index db25bbc80c..0000000000 --- a/changelog/tt_fish.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Data column support for beacon api event end point \ No newline at end of file diff --git a/changelog/tt_formula_mlk.md b/changelog/tt_formula_mlk.md deleted file mode 100644 index b0b0890671..0000000000 --- a/changelog/tt_formula_mlk.md +++ /dev/null @@ -1,7 +0,0 @@ -### Changed - -- Attest timely is now default. `attest-timely` flag is now deprecated. - -### Added - -- `disable-attest-timely` flag to disable attest timely. \ No newline at end of file diff --git a/changelog/tt_fugu_.md b/changelog/tt_fugu_.md deleted file mode 100644 index 76f1b51705..0000000000 --- a/changelog/tt_fugu_.md +++ /dev/null @@ -1,3 +0,0 @@ -### Removed - -- Deneb and electra entries from blob schedule. \ No newline at end of file diff --git a/changelog/tt_milk.md b/changelog/tt_milk.md deleted file mode 100644 index c581106290..0000000000 --- a/changelog/tt_milk.md +++ /dev/null @@ -1,3 +0,0 @@ -### Removed - -- Remove deposit count from sync new block log \ No newline at end of file diff --git a/changelog/tt_noodles.md b/changelog/tt_noodles.md deleted file mode 100644 index 74b3c0b3d3..0000000000 --- a/changelog/tt_noodles.md +++ /dev/null @@ -1,7 +0,0 @@ -### Added - -- new `--batch-verifier-limit` flag to configure max number of signatures to batch verify on gossip - -### Changed - -- default batch signature verification limit increased from 50 to 1000 \ No newline at end of file diff --git a/changelog/tt_onion.md b/changelog/tt_onion.md deleted file mode 100644 index c9b666998c..0000000000 --- a/changelog/tt_onion.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Move data col reconstruction log to a more accurate place in the code. \ No newline at end of file diff --git a/changelog/tt_opt-val-lookup.md b/changelog/tt_opt-val-lookup.md deleted file mode 100644 index a48d4cefd7..0000000000 --- a/changelog/tt_opt-val-lookup.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Beacon api optimize validator lookup for large batch request size. diff --git a/changelog/tt_post-fulu-mev-boost-protocol.md b/changelog/tt_post-fulu-mev-boost-protocol.md deleted file mode 100644 index 363daa931b..0000000000 --- a/changelog/tt_post-fulu-mev-boost-protocol.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Implement post-Fulu MEV-boost protocol changes where relays only return status codes for blinded block submissions. \ No newline at end of file diff --git a/changelog/tt_state_root_debug.md b/changelog/tt_state_root_debug.md deleted file mode 100644 index b203ba4b2d..0000000000 --- a/changelog/tt_state_root_debug.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Include state root in StateNotFoundError for better debugging of consensus validation failures \ No newline at end of file diff --git a/changelog/tt_steak.md b/changelog/tt_steak.md deleted file mode 100644 index cc849d0905..0000000000 --- a/changelog/tt_steak.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Remove "invalid" from logs for incoming blob sidecar that is missing parent or out of range slot \ No newline at end of file diff --git a/changelog/tt_sushi.md b/changelog/tt_sushi.md deleted file mode 100644 index 8c437153de..0000000000 --- a/changelog/tt_sushi.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Optimize proposer inclusion proof calcuation by pre caching subtries diff --git a/changelog/ttsao_add-fulu-fork-transition-tests.md b/changelog/ttsao_add-fulu-fork-transition-tests.md deleted file mode 100644 index a4d4c4ba85..0000000000 --- a/changelog/ttsao_add-fulu-fork-transition-tests.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Add Fulu fork transition tests for mainnet and minimal configurations diff --git a/changelog/ttsao_add-fulu-proposer-lookahead-tests.md b/changelog/ttsao_add-fulu-proposer-lookahead-tests.md deleted file mode 100644 index 4f6a77e5d3..0000000000 --- a/changelog/ttsao_add-fulu-proposer-lookahead-tests.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Fulu proposer lookahead epoch processing tests for mainnet and minimal configurations diff --git a/changelog/ttsao_fix-attestation-cache-key.md b/changelog/ttsao_fix-attestation-cache-key.md deleted file mode 100644 index efe96135ee..0000000000 --- a/changelog/ttsao_fix-attestation-cache-key.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Improved sync unaggregated attestation cache key outside of lock path diff --git a/changelog/ttsao_fix-blinded-block-v2-endpoint.md b/changelog/ttsao_fix-blinded-block-v2-endpoint.md deleted file mode 100644 index d17c7f5b32..0000000000 --- a/changelog/ttsao_fix-blinded-block-v2-endpoint.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Use v2 endpoint for blinded block submission post-Fulu diff --git a/changelog/ttsao_fix-equivocation-block-field.md b/changelog/ttsao_fix-equivocation-block-field.md deleted file mode 100644 index 4a0630b6c8..0000000000 --- a/changelog/ttsao_fix-equivocation-block-field.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixed NewSignedBeaconBlock calls to use Block field for proper equivocation handling diff --git a/changelog/ttsao_fix-gofmt-formatting.md b/changelog/ttsao_fix-gofmt-formatting.md deleted file mode 100644 index 0d8911c7fe..0000000000 --- a/changelog/ttsao_fix-gofmt-formatting.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Fix Go formatting issues in test files diff --git a/changelog/ttsao_implement-kzg-batch-verification.md b/changelog/ttsao_implement-kzg-batch-verification.md deleted file mode 100644 index db6a119b88..0000000000 --- a/changelog/ttsao_implement-kzg-batch-verification.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- KZG proof batch verification for data column gossip validation diff --git a/changelog/ttsao_move-cache-key-outside-locks.md b/changelog/ttsao_move-cache-key-outside-locks.md deleted file mode 100644 index f3c5df9c71..0000000000 --- a/changelog/ttsao_move-cache-key-outside-locks.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Move aggregated attestation cache key generation outside of critical locks to improve performance diff --git a/changelog/ttsao_optimize-attestation-batching.md b/changelog/ttsao_optimize-attestation-batching.md deleted file mode 100644 index a03c9039c0..0000000000 --- a/changelog/ttsao_optimize-attestation-batching.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Optimize pending attestation processing by adding batching diff --git a/changelog/ttsao_refactor-beacon-core-types.md b/changelog/ttsao_refactor-beacon-core-types.md deleted file mode 100644 index 3bfd0dea4d..0000000000 --- a/changelog/ttsao_refactor-beacon-core-types.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Refactored beacon core types to in beacon_core_types.proto diff --git a/changelog/ttsao_refactor-proto-beacon-blocks.md b/changelog/ttsao_refactor-proto-beacon-blocks.md deleted file mode 100644 index f4353389b8..0000000000 --- a/changelog/ttsao_refactor-proto-beacon-blocks.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Refactored proto definitions by extracting common beacon block types and components into separate files diff --git a/changelog/ttsao_return-early-req-column.md b/changelog/ttsao_return-early-req-column.md deleted file mode 100644 index c24226dc8c..0000000000 --- a/changelog/ttsao_return-early-req-column.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Return early if there's no block for `requestAndSaveMissingDataColumnSidecars` \ No newline at end of file diff --git a/changelog/ttsao_set-fulu-fork-epochs.md b/changelog/ttsao_set-fulu-fork-epochs.md deleted file mode 100644 index ebbbd83b33..0000000000 --- a/changelog/ttsao_set-fulu-fork-epochs.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Set Fulu fork epochs for Holesky, Hoodi, and Sepolia testnets diff --git a/changelog/ttsao_simplify-golangci-config.md b/changelog/ttsao_simplify-golangci-config.md deleted file mode 100644 index 515f3a7a79..0000000000 --- a/changelog/ttsao_simplify-golangci-config.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Update golangci-lint config to enable only basic linters that currently pass diff --git a/changelog/ttsao_update-consensus-spec-alpha5.md b/changelog/ttsao_update-consensus-spec-alpha5.md deleted file mode 100644 index 35210417b7..0000000000 --- a/changelog/ttsao_update-consensus-spec-alpha5.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Updated consensus spec from v1.6.0-alpha.4 to v1.6.0-alpha.5 with adjusted minimal config parameters diff --git a/changelog/ttsao_update-consensus-spec-v160-alpha4.md b/changelog/ttsao_update-consensus-spec-v160-alpha4.md deleted file mode 100644 index 6f2e3bf968..0000000000 --- a/changelog/ttsao_update-consensus-spec-v160-alpha4.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Update consensus spec to v1.6.0-alpha.4 and implement data column support for forkchoice spectests diff --git a/changelog/ttsao_update-ssz-generated.md b/changelog/ttsao_update-ssz-generated.md deleted file mode 100644 index af6c74825c..0000000000 --- a/changelog/ttsao_update-ssz-generated.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Updated SSZ generated files \ No newline at end of file diff --git a/changelog/user-agent-addition-validator-outbound.md b/changelog/user-agent-addition-validator-outbound.md deleted file mode 100644 index a9d204b720..0000000000 --- a/changelog/user-agent-addition-validator-outbound.md +++ /dev/null @@ -1,4 +0,0 @@ -### Added - -- All outbound HTTP requests from the validator client now include a custom `User-Agent` header in the format `Prysm//`. This enhances observability and enables upstream systems to correctly identify Prysm validator clients by their name and version. -- Fixes [#15435](https://github.com/OffchainLabs/prysm/issues/15435). From 0aa248e6631912142690929984ac924f31c08ae1 Mon Sep 17 00:00:00 2001 From: satushh Date: Sun, 12 Oct 2025 20:19:45 +0100 Subject: [PATCH 015/103] Fix ignored gossip attestation validation for early arriving attestations (#15840) * debug log * undo debug logs * fix: return if block not available * changelog --- beacon-chain/sync/validate_beacon_attestation.go | 4 ++++ changelog/satushh-gossip.md | 3 +++ 2 files changed, 7 insertions(+) create mode 100644 changelog/satushh-gossip.md diff --git a/beacon-chain/sync/validate_beacon_attestation.go b/beacon-chain/sync/validate_beacon_attestation.go index 4d351f09ce..6deb020078 100644 --- a/beacon-chain/sync/validate_beacon_attestation.go +++ b/beacon-chain/sync/validate_beacon_attestation.go @@ -112,8 +112,12 @@ func (s *Service) validateCommitteeIndexBeaconAttestation( // Verify the block being voted and the processed state is in beaconDB and the block has passed validation if it's in the beaconDB. blockRoot := bytesutil.ToBytes32(data.BeaconBlockRoot) if !s.hasBlockAndState(ctx, blockRoot) { + // Block not yet available - save attestation to pending queue for later processing + // when the block arrives. Return ValidationIgnore so gossip doesn't potentially penalize the peer. s.savePendingAtt(att) + return pubsub.ValidationIgnore, nil } + // Block exists - verify it's in forkchoice (i.e., it's a descendant of the finalized checkpoint) if !s.cfg.chain.InForkchoice(blockRoot) { tracing.AnnotateError(span, blockchain.ErrNotDescendantOfFinalized) return pubsub.ValidationIgnore, blockchain.ErrNotDescendantOfFinalized diff --git a/changelog/satushh-gossip.md b/changelog/satushh-gossip.md new file mode 100644 index 0000000000..36deb8e6ea --- /dev/null +++ b/changelog/satushh-gossip.md @@ -0,0 +1,3 @@ +### Fixed + +- Fixed [#15812](https://github.com/OffchainLabs/prysm/issues/15812): Gossip attestation validation incorrectly rejecting attestations that arrive before their referenced blocks. Previously, attestations were saved to the pending queue but immediately rejected by forkchoice validation, causing "not descendant of finalized checkpoint" errors. Now attestations for missing blocks return `ValidationIgnore` without error, allowing them to be properly processed when their blocks arrive. This eliminates false positive rejections and prevents potential incorrect peer downscoring during network congestion. \ No newline at end of file From 4b984516497c034284e702745731774891b6c9ea Mon Sep 17 00:00:00 2001 From: Marco Munizaga Date: Mon, 13 Oct 2025 10:16:02 -0700 Subject: [PATCH 016/103] fix allocation size of proofs in ComputeCellsAndProofsFromStructured (#15809) * fix allocation size of proofs in ComputeCellsAndProofsFromStructured the preallocated slice for KZG Proofs was 48x bigger than it needed to be. * changelog --------- Co-authored-by: Kasey Kirkham Co-authored-by: james-prysm <90280386+james-prysm@users.noreply.github.com> --- beacon-chain/core/peerdas/reconstruction.go | 2 +- beacon-chain/core/peerdas/reconstruction_test.go | 1 + changelog/marcopolo_push-nxynxywxtlpo.md | 2 ++ 3 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog/marcopolo_push-nxynxywxtlpo.md diff --git a/beacon-chain/core/peerdas/reconstruction.go b/beacon-chain/core/peerdas/reconstruction.go index b27247bc4c..5a9663bace 100644 --- a/beacon-chain/core/peerdas/reconstruction.go +++ b/beacon-chain/core/peerdas/reconstruction.go @@ -257,7 +257,7 @@ func ComputeCellsAndProofsFromStructured(blobsAndProofs []*pb.BlobAndProofV2) ([ return nil, errors.Wrap(err, "compute cells") } - kzgProofs := make([]kzg.Proof, 0, numberOfColumns*kzg.BytesPerProof) + kzgProofs := make([]kzg.Proof, 0, numberOfColumns) for _, kzgProofBytes := range blobAndProof.KzgProofs { if len(kzgProofBytes) != kzg.BytesPerProof { return nil, errors.New("wrong KZG proof size - should never happen") diff --git a/beacon-chain/core/peerdas/reconstruction_test.go b/beacon-chain/core/peerdas/reconstruction_test.go index de2791f9e8..88fe79d178 100644 --- a/beacon-chain/core/peerdas/reconstruction_test.go +++ b/beacon-chain/core/peerdas/reconstruction_test.go @@ -441,6 +441,7 @@ func TestComputeCellsAndProofsFromStructured(t *testing.T) { for i := range blobCount { require.Equal(t, len(expectedCellsAndProofs[i].Cells), len(actualCellsAndProofs[i].Cells)) require.Equal(t, len(expectedCellsAndProofs[i].Proofs), len(actualCellsAndProofs[i].Proofs)) + require.Equal(t, len(expectedCellsAndProofs[i].Proofs), cap(actualCellsAndProofs[i].Proofs)) // Compare cells for j, expectedCell := range expectedCellsAndProofs[i].Cells { diff --git a/changelog/marcopolo_push-nxynxywxtlpo.md b/changelog/marcopolo_push-nxynxywxtlpo.md new file mode 100644 index 0000000000..f450494aa7 --- /dev/null +++ b/changelog/marcopolo_push-nxynxywxtlpo.md @@ -0,0 +1,2 @@ +### Ignored +- Fix (unreleased) bug where the preallocated slice for KZG Proofs was 48x bigger than it needed to be. From 5f8eb69201d2c1a7bb7724bef2a86fff85365df0 Mon Sep 17 00:00:00 2001 From: terence Date: Mon, 13 Oct 2025 11:36:06 -0700 Subject: [PATCH 017/103] Add proper handling for submit blind block 502 error (#15848) * Add proper handling for builder relay 502 BadGateway errors * James feedback * Change wording --- api/client/builder/client.go | 6 +++ api/client/builder/client_test.go | 2 +- api/client/builder/errors.go | 1 + beacon-chain/blockchain/process_block_test.go | 1 - .../rpc/prysm/v1alpha1/validator/proposer.go | 4 ++ .../prysm/v1alpha1/validator/proposer_test.go | 49 +++++++++++++++++++ changelog/ttsao_handle-relay-502-errors.md | 3 ++ .../beacon_api_validator_client_test.go | 10 ++-- .../beacon-api/mock/json_rest_handler_mock.go | 2 +- 9 files changed, 70 insertions(+), 8 deletions(-) create mode 100644 changelog/ttsao_handle-relay-502-errors.md diff --git a/api/client/builder/client.go b/api/client/builder/client.go index f1ad36221d..b50dcf1891 100644 --- a/api/client/builder/client.go +++ b/api/client/builder/client.go @@ -726,6 +726,12 @@ func unexpectedStatusErr(response *http.Response, expected int) error { return errors.Wrap(jsonErr, "unable to read response body") } return errors.Wrap(ErrNotOK, errMessage.Message) + case http.StatusBadGateway: + log.WithError(ErrBadGateway).Debug(msg) + if jsonErr := json.Unmarshal(bodyBytes, &errMessage); jsonErr != nil { + return errors.Wrap(jsonErr, "unable to read response body") + } + return errors.Wrap(ErrBadGateway, errMessage.Message) default: log.WithError(ErrNotOK).Debug(msg) return errors.Wrap(ErrNotOK, fmt.Sprintf("unsupported error code: %d", response.StatusCode)) diff --git a/api/client/builder/client_test.go b/api/client/builder/client_test.go index 13f3d65483..79be7ae79d 100644 --- a/api/client/builder/client_test.go +++ b/api/client/builder/client_test.go @@ -12,7 +12,6 @@ import ( "github.com/OffchainLabs/prysm/v6/api" "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" @@ -22,6 +21,7 @@ import ( eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/prysmaticlabs/go-bitfield" log "github.com/sirupsen/logrus" ) diff --git a/api/client/builder/errors.go b/api/client/builder/errors.go index 92b3a71baf..5b3d88af0a 100644 --- a/api/client/builder/errors.go +++ b/api/client/builder/errors.go @@ -21,3 +21,4 @@ var ErrUnsupportedMediaType = errors.Wrap(ErrNotOK, "The media type in \"Content // ErrNotAcceptable specifically means that a '406 - Not Acceptable' was received from the API. var ErrNotAcceptable = errors.Wrap(ErrNotOK, "The accept header value is not acceptable") +var ErrBadGateway = errors.Wrap(ErrNotOK, "recv 502 BadGateway response from API") diff --git a/beacon-chain/blockchain/process_block_test.go b/beacon-chain/blockchain/process_block_test.go index bebfb576bc..1494d3da8e 100644 --- a/beacon-chain/blockchain/process_block_test.go +++ b/beacon-chain/blockchain/process_block_test.go @@ -3302,7 +3302,6 @@ func Test_postBlockProcess_EventSending(t *testing.T) { } } - func setupLightClientTestRequirements(ctx context.Context, t *testing.T, s *Service, v int, options ...util.LightClientOption) (*util.TestLightClient, *postBlockProcessConfig) { var l *util.TestLightClient switch v { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go index 868ce62e9f..ad326a8af1 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go @@ -316,6 +316,10 @@ func (vs *Server) ProposeBeaconBlock(ctx context.Context, req *ethpb.GenericSign blobSidecars, dataColumnSidecars, err = vs.handleUnblindedBlock(rob, req) } if err != nil { + if errors.Is(err, builderapi.ErrBadGateway) && block.IsBlinded() { + log.WithError(err).Info("Optimistically proposed block - builder relay temporarily unavailable, block may arrive over P2P") + return ðpb.ProposeResponse{BlockRoot: root[:]}, nil + } return nil, status.Errorf(codes.Internal, "%s: %v", "handle block failed", err) } diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go index 20c7fe5f11..f3773085d8 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go @@ -6,6 +6,7 @@ import ( "testing" "time" + builderapi "github.com/OffchainLabs/prysm/v6/api/client/builder" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" @@ -3634,4 +3635,52 @@ func TestServer_ProposeBeaconBlock_PostFuluBlindedBlock(t *testing.T) { require.NotNil(t, res) require.NotEmpty(t, res.BlockRoot) }) + + t.Run("blinded block - 502 error handling", func(t *testing.T) { + params.SetupTestConfigCleanup(t) + cfg := params.BeaconConfig().Copy() + cfg.FuluForkEpoch = 10 + params.OverrideBeaconConfig(cfg) + + mockBuilder := &builderTest.MockBuilderService{ + HasConfigured: true, + Cfg: &builderTest.Config{BeaconDB: db}, + PayloadDeneb: &enginev1.ExecutionPayloadDeneb{}, + ErrSubmitBlindedBlock: builderapi.ErrBadGateway, + } + + c := &mock.ChainService{State: beaconState, Root: parentRoot[:]} + proposerServer := &Server{ + ChainStartFetcher: &mockExecution.Chain{}, + Eth1InfoFetcher: &mockExecution.Chain{}, + Eth1BlockFetcher: &mockExecution.Chain{}, + BlockReceiver: c, + BlobReceiver: c, + HeadFetcher: c, + BlockNotifier: c.BlockNotifier(), + OperationNotifier: c.OperationNotifier(), + StateGen: stategen.New(db, doublylinkedtree.New()), + TimeFetcher: c, + SyncChecker: &mockSync.Sync{IsSyncing: false}, + BeaconDB: db, + BlockBuilder: mockBuilder, + P2P: &mockp2p.MockBroadcaster{}, + } + + blindedBlock := util.NewBlindedBeaconBlockDeneb() + blindedBlock.Message.Slot = 160 // This puts us at epoch 5 (160/32 = 5) + blindedBlock.Message.ProposerIndex = 0 + blindedBlock.Message.ParentRoot = parentRoot[:] + blindedBlock.Message.StateRoot = make([]byte, 32) + + req := ðpb.GenericSignedBeaconBlock{ + Block: ðpb.GenericSignedBeaconBlock_BlindedDeneb{BlindedDeneb: blindedBlock}, + } + + // Should handle 502 error gracefully and continue with original blinded block + res, err := proposerServer.ProposeBeaconBlock(ctx, req) + require.NoError(t, err) + require.NotNil(t, res) + require.NotEmpty(t, res.BlockRoot) + }) } diff --git a/changelog/ttsao_handle-relay-502-errors.md b/changelog/ttsao_handle-relay-502-errors.md new file mode 100644 index 0000000000..e4fb005bd9 --- /dev/null +++ b/changelog/ttsao_handle-relay-502-errors.md @@ -0,0 +1,3 @@ +### Changed + +- Gracefully handle submit blind block returning 502 errors. diff --git a/validator/client/beacon-api/beacon_api_validator_client_test.go b/validator/client/beacon-api/beacon_api_validator_client_test.go index d20e98a3cf..d86414fe1c 100644 --- a/validator/client/beacon-api/beacon_api_validator_client_test.go +++ b/validator/client/beacon-api/beacon_api_validator_client_test.go @@ -202,10 +202,10 @@ func TestBeaconApiValidatorClient_ProposeBeaconBlockError_ThenPass(t *testing.T) func TestBeaconApiValidatorClient_ProposeBeaconBlockAllTypes(t *testing.T) { tests := []struct { - name string - block *ethpb.GenericSignedBeaconBlock - expectedPath string - wantErr bool + name string + block *ethpb.GenericSignedBeaconBlock + expectedPath string + wantErr bool errorMessage string }{ { @@ -374,7 +374,7 @@ func TestBeaconApiValidatorClient_ProposeBeaconBlockHTTPErrors(t *testing.T) { gomock.Any(), gomock.Any(), ).Return(nil, nil, tt.sszError).Times(1) - + if tt.expectJSON { // When SSZ fails, it falls back to JSON jsonRestHandler.EXPECT().Post( diff --git a/validator/client/beacon-api/mock/json_rest_handler_mock.go b/validator/client/beacon-api/mock/json_rest_handler_mock.go index a604da109d..52cca1b971 100644 --- a/validator/client/beacon-api/mock/json_rest_handler_mock.go +++ b/validator/client/beacon-api/mock/json_rest_handler_mock.go @@ -121,7 +121,7 @@ func (m *MockJsonRestHandler) PostSSZ(ctx context.Context, endpoint string, head ret0, _ := ret[0].([]byte) ret1, _ := ret[1].(http.Header) ret2, _ := ret[2].(error) - return ret0,ret1,ret2 + return ret0, ret1, ret2 } // Post indicates an expected call of Post. From e463bcd1e154f6ace89d086bf1a38d828f0e7a1b Mon Sep 17 00:00:00 2001 From: Potuz Date: Mon, 13 Oct 2025 17:29:27 -0300 Subject: [PATCH 018/103] Mark block as invalid in gossip if it fails signature check (#15847) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Mark block as invalid in gossip if it fails signature check * Add tests 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --------- Co-authored-by: Claude --- beacon-chain/core/blocks/error.go | 1 + beacon-chain/core/blocks/signature.go | 7 +- beacon-chain/core/blocks/signature_test.go | 33 ++++++++ beacon-chain/sync/validate_beacon_blocks.go | 3 + .../sync/validate_beacon_blocks_test.go | 77 ++++++++++++++++++- changelog/potuz_invalid_sig.md | 3 + 6 files changed, 120 insertions(+), 4 deletions(-) create mode 100644 changelog/potuz_invalid_sig.md diff --git a/beacon-chain/core/blocks/error.go b/beacon-chain/core/blocks/error.go index 7d968e7d5e..87218529c6 100644 --- a/beacon-chain/core/blocks/error.go +++ b/beacon-chain/core/blocks/error.go @@ -6,3 +6,4 @@ var errNilSignedWithdrawalMessage = errors.New("nil SignedBLSToExecutionChange m var errNilWithdrawalMessage = errors.New("nil BLSToExecutionChange message") var errInvalidBLSPrefix = errors.New("withdrawal credential prefix is not a BLS prefix") var errInvalidWithdrawalCredentials = errors.New("withdrawal credentials do not match") +var ErrInvalidSignature = errors.New("invalid signature") diff --git a/beacon-chain/core/blocks/signature.go b/beacon-chain/core/blocks/signature.go index 72a542a8db..1d8ec1dcc1 100644 --- a/beacon-chain/core/blocks/signature.go +++ b/beacon-chain/core/blocks/signature.go @@ -114,9 +114,12 @@ func VerifyBlockSignatureUsingCurrentFork(beaconState state.ReadOnlyBeaconState, } proposerPubKey := proposer.PublicKey sig := blk.Signature() - return signing.VerifyBlockSigningRoot(proposerPubKey, sig[:], domain, func() ([32]byte, error) { + if err := signing.VerifyBlockSigningRoot(proposerPubKey, sig[:], domain, func() ([32]byte, error) { return blkRoot, nil - }) + }); err != nil { + return ErrInvalidSignature + } + return nil } // BlockSignatureBatch retrieves the block signature batch from the provided block and its corresponding state. diff --git a/beacon-chain/core/blocks/signature_test.go b/beacon-chain/core/blocks/signature_test.go index 896e531cea..aa86bb8b3c 100644 --- a/beacon-chain/core/blocks/signature_test.go +++ b/beacon-chain/core/blocks/signature_test.go @@ -89,3 +89,36 @@ func TestVerifyBlockSignatureUsingCurrentFork(t *testing.T) { require.NoError(t, err) assert.NoError(t, blocks.VerifyBlockSignatureUsingCurrentFork(bState, wsb, blkRoot)) } + +func TestVerifyBlockSignatureUsingCurrentFork_InvalidSignature(t *testing.T) { + params.SetupTestConfigCleanup(t) + bCfg := params.BeaconConfig() + bCfg.AltairForkEpoch = 100 + bCfg.ForkVersionSchedule[bytesutil.ToBytes4(bCfg.AltairForkVersion)] = 100 + params.OverrideBeaconConfig(bCfg) + bState, keys := util.DeterministicGenesisState(t, 100) + altairBlk := util.NewBeaconBlockAltair() + altairBlk.Block.ProposerIndex = 0 + altairBlk.Block.Slot = params.BeaconConfig().SlotsPerEpoch * 100 + blkRoot, err := altairBlk.Block.HashTreeRoot() + assert.NoError(t, err) + + // Sign with wrong key (proposer index 0, but using key 1) + fData := ðpb.Fork{ + Epoch: 100, + CurrentVersion: params.BeaconConfig().AltairForkVersion, + PreviousVersion: params.BeaconConfig().GenesisForkVersion, + } + domain, err := signing.Domain(fData, 100, params.BeaconConfig().DomainBeaconProposer, bState.GenesisValidatorsRoot()) + assert.NoError(t, err) + rt, err := signing.ComputeSigningRoot(altairBlk.Block, domain) + assert.NoError(t, err) + wrongSig := keys[1].Sign(rt[:]).Marshal() + altairBlk.Signature = wrongSig + + wsb, err := consensusblocks.NewSignedBeaconBlock(altairBlk) + require.NoError(t, err) + + err = blocks.VerifyBlockSignatureUsingCurrentFork(bState, wsb, blkRoot) + require.ErrorIs(t, err, blocks.ErrInvalidSignature, "Expected ErrInvalidSignature for invalid signature") +} diff --git a/beacon-chain/sync/validate_beacon_blocks.go b/beacon-chain/sync/validate_beacon_blocks.go index 49d101cb17..8418ff9819 100644 --- a/beacon-chain/sync/validate_beacon_blocks.go +++ b/beacon-chain/sync/validate_beacon_blocks.go @@ -294,6 +294,9 @@ func (s *Service) validatePhase0Block(ctx context.Context, blk interfaces.ReadOn } if err := blocks.VerifyBlockSignatureUsingCurrentFork(parentState, blk, blockRoot); err != nil { + if errors.Is(err, blocks.ErrInvalidSignature) { + s.setBadBlock(ctx, blockRoot) + } return nil, err } // In the event the block is more than an epoch ahead from its diff --git a/beacon-chain/sync/validate_beacon_blocks_test.go b/beacon-chain/sync/validate_beacon_blocks_test.go index 6f5acb6915..a64aea417e 100644 --- a/beacon-chain/sync/validate_beacon_blocks_test.go +++ b/beacon-chain/sync/validate_beacon_blocks_test.go @@ -103,11 +103,84 @@ func TestValidateBeaconBlockPubSub_InvalidSignature(t *testing.T) { }, } res, err := r.validateBeaconBlockPubSub(ctx, "", m) - require.ErrorIs(t, err, signing.ErrSigFailedToVerify) + require.ErrorContains(t, "invalid signature", err) result := res == pubsub.ValidationReject assert.Equal(t, true, result) } +func TestValidateBeaconBlockPubSub_InvalidSignature_MarksBlockAsBad(t *testing.T) { + db := dbtest.SetupDB(t) + p := p2ptest.NewTestP2P(t) + ctx := t.Context() + beaconState, privKeys := util.DeterministicGenesisState(t, 100) + parentBlock := util.NewBeaconBlock() + util.SaveBlock(t, ctx, db, parentBlock) + bRoot, err := parentBlock.Block.HashTreeRoot() + require.NoError(t, err) + require.NoError(t, db.SaveState(ctx, beaconState, bRoot)) + require.NoError(t, db.SaveStateSummary(ctx, ðpb.StateSummary{Root: bRoot[:]})) + copied := beaconState.Copy() + require.NoError(t, copied.SetSlot(1)) + proposerIdx, err := helpers.BeaconProposerIndex(ctx, copied) + require.NoError(t, err) + msg := util.NewBeaconBlock() + msg.Block.ParentRoot = bRoot[:] + msg.Block.Slot = 1 + msg.Block.ProposerIndex = proposerIdx + badPrivKeyIdx := proposerIdx + 1 // We generate a valid signature from a wrong private key which fails to verify + msg.Signature, err = signing.ComputeDomainAndSign(beaconState, 0, msg.Block, params.BeaconConfig().DomainBeaconProposer, privKeys[badPrivKeyIdx]) + require.NoError(t, err) + + stateGen := stategen.New(db, doublylinkedtree.New()) + chainService := &mock.ChainService{Genesis: time.Unix(time.Now().Unix()-int64(params.BeaconConfig().SecondsPerSlot), 0), + FinalizedCheckPoint: ðpb.Checkpoint{ + Epoch: 0, + Root: make([]byte, 32), + }, + DB: db, + } + r := &Service{ + cfg: &config{ + beaconDB: db, + p2p: p, + initialSync: &mockSync.Sync{IsSyncing: false}, + chain: chainService, + clock: startup.NewClock(chainService.Genesis, chainService.ValidatorsRoot), + blockNotifier: chainService.BlockNotifier(), + stateGen: stateGen, + }, + seenBlockCache: lruwrpr.New(10), + badBlockCache: lruwrpr.New(10), + } + + blockRoot, err := msg.Block.HashTreeRoot() + require.NoError(t, err) + + // Verify block is not marked as bad initially + assert.Equal(t, false, r.hasBadBlock(blockRoot), "block should not be marked as bad initially") + + buf := new(bytes.Buffer) + _, err = p.Encoding().EncodeGossip(buf, msg) + require.NoError(t, err) + topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + digest, err := r.currentForkDigest() + assert.NoError(t, err) + topic = r.addDigestToTopic(topic, digest) + m := &pubsub.Message{ + Message: &pubsubpb.Message{ + Data: buf.Bytes(), + Topic: &topic, + }, + } + res, err := r.validateBeaconBlockPubSub(ctx, "", m) + require.ErrorContains(t, "invalid signature", err) + result := res == pubsub.ValidationReject + assert.Equal(t, true, result) + + // Verify block is now marked as bad after invalid signature + assert.Equal(t, true, r.hasBadBlock(blockRoot), "block should be marked as bad after invalid signature") +} + func TestValidateBeaconBlockPubSub_BlockAlreadyPresentInDB(t *testing.T) { db := dbtest.SetupDB(t) ctx := t.Context() @@ -976,7 +1049,7 @@ func TestValidateBeaconBlockPubSub_InvalidParentBlock(t *testing.T) { }, } res, err := r.validateBeaconBlockPubSub(ctx, "", m) - require.ErrorContains(t, "could not unmarshal bytes into signature", err) + require.ErrorContains(t, "invalid signature", err) assert.Equal(t, res, pubsub.ValidationReject, "block with invalid signature should be rejected") require.NoError(t, copied.SetSlot(2)) diff --git a/changelog/potuz_invalid_sig.md b/changelog/potuz_invalid_sig.md new file mode 100644 index 0000000000..756ed7748f --- /dev/null +++ b/changelog/potuz_invalid_sig.md @@ -0,0 +1,3 @@ +### Fixed + +- Mark the block as invalid if it has an invalid signature. From 0568bec935062ce0c417fc693071cd2df0762558 Mon Sep 17 00:00:00 2001 From: fernantho Date: Mon, 13 Oct 2025 23:39:15 +0200 Subject: [PATCH 019/103] SSZ-QL: use FastSSZ-generated HashTreeRoot through SSZObject in sszInfo (#15805) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * stored CL object to enable the usage Fastssz's HashTreeRoot(). added basic test * refactorization - using interfaces instead of storing original object * added tests covering ssz custom types * renamed hash_tree_root to ssz_interface as it contains MarshalSSZ and UnmarshalSSZ functions * run gazelle * renamed test and improved comments * refactored test and extend to marshalSSZ and UnmarshalSSZ * added changelog * updated comment * Changed SSZIface name to SSZObject. Removed MarshalSSZ and UnmarshalSSZ function signatures from interface as they are not used still. Refactored tests. * renamed file ssz_interface.go to ssz_object.go. merge test from ssz_interface_test.go into query_test.go. reordered source SSZObject field from sszInfo struct * sticked SSZObject interface to HashTreeRoot() function, the only one needed so far * run gazelle :) --------- Co-authored-by: Radosław Kapka --- ..._ssz-ql-use-fastssz-generated-functions.md | 3 + encoding/ssz/query/BUILD.bazel | 1 + encoding/ssz/query/analyzer.go | 5 +- encoding/ssz/query/query_test.go | 60 ++++++++++++++++++- encoding/ssz/query/ssz_info.go | 2 + encoding/ssz/query/ssz_object.go | 22 +++++++ encoding/ssz/query/testutil/runner.go | 5 +- encoding/ssz/query/testutil/type.go | 6 +- 8 files changed, 98 insertions(+), 6 deletions(-) create mode 100644 changelog/fernantho_ssz-ql-use-fastssz-generated-functions.md create mode 100644 encoding/ssz/query/ssz_object.go diff --git a/changelog/fernantho_ssz-ql-use-fastssz-generated-functions.md b/changelog/fernantho_ssz-ql-use-fastssz-generated-functions.md new file mode 100644 index 0000000000..1743dca5cd --- /dev/null +++ b/changelog/fernantho_ssz-ql-use-fastssz-generated-functions.md @@ -0,0 +1,3 @@ +### Added + +- Delegate sszInfo HashTreeRoot to FastSSZ-generated implementations via SSZObject, enabling roots calculation for generated types while avoiding duplicate logic. diff --git a/encoding/ssz/query/BUILD.bazel b/encoding/ssz/query/BUILD.bazel index 93caf3b170..837d17054c 100644 --- a/encoding/ssz/query/BUILD.bazel +++ b/encoding/ssz/query/BUILD.bazel @@ -11,6 +11,7 @@ go_library( "path.go", "query.go", "ssz_info.go", + "ssz_object.go", "ssz_type.go", "tag_parser.go", "vector.go", diff --git a/encoding/ssz/query/analyzer.go b/encoding/ssz/query/analyzer.go index 5369d11561..52ba85ad6d 100644 --- a/encoding/ssz/query/analyzer.go +++ b/encoding/ssz/query/analyzer.go @@ -10,7 +10,7 @@ import ( const offsetBytes = 4 // AnalyzeObject analyzes given object and returns its SSZ information. -func AnalyzeObject(obj any) (*sszInfo, error) { +func AnalyzeObject(obj SSZObject) (*sszInfo, error) { value := dereferencePointer(obj) info, err := analyzeType(value.Type(), nil) @@ -18,6 +18,9 @@ func AnalyzeObject(obj any) (*sszInfo, error) { return nil, fmt.Errorf("could not analyze type %s: %w", value.Type().Name(), err) } + // Store the original object interface + info.source = obj + // Populate variable-length information using the actual value. err = PopulateVariableLengthInfo(info, value.Interface()) if err != nil { diff --git a/encoding/ssz/query/query_test.go b/encoding/ssz/query/query_test.go index d67ab37bde..934f28a5f9 100644 --- a/encoding/ssz/query/query_test.go +++ b/encoding/ssz/query/query_test.go @@ -302,7 +302,7 @@ func getFixedTestContainerSpec() testutil.TestSpec { return testutil.TestSpec{ Name: "FixedTestContainer", - Type: sszquerypb.FixedTestContainer{}, + Type: &sszquerypb.FixedTestContainer{}, Instance: testContainer, PathTests: []testutil.PathTest{ // Basic types @@ -364,6 +364,62 @@ func getFixedTestContainerSpec() testutil.TestSpec { } } +func TestSSZObject_batch(t *testing.T) { + tests := []struct { + name string + obj any + }{ + { + name: "FixedNestedContainer", + obj: &sszquerypb.FixedNestedContainer{ + Value1: 42, + Value2: []byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08}, + }, + }, + { + name: "FixedTestContainer", + obj: createFixedTestContainer(), + }, + { + name: "VariableNestedContainer", + obj: &sszquerypb.VariableNestedContainer{ + Value1: 84, + FieldListUint64: []uint64{1, 2, 3, 4, 5}, + NestedListField: [][]byte{ + {0x0a, 0x0b, 0x0c}, + {0x1a, 0x1b, 0x1c, 0x1d}, + }, + }, + }, + { + name: "VariableTestContainer", + obj: createVariableTestContainer(), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Analyze the object to get its sszInfo + object, ok := tt.obj.(query.SSZObject) + require.Equal(t, true, ok, "Expected object to implement SSZObject") + info, err := query.AnalyzeObject(object) + require.NoError(t, err) + require.NotNil(t, info, "Expected non-nil SSZ info") + + // Ensure the original object implements SSZObject + originalFunctions, ok := tt.obj.(query.SSZObject) + require.Equal(t, ok, true, "Original object does not implement SSZObject") + + // Call HashTreeRoot on the sszInfo and compare results + hashTreeRoot, err := info.HashTreeRoot() + require.NoError(t, err, "HashTreeRoot should not return an error") + expectedHashTreeRoot, err := originalFunctions.HashTreeRoot() + require.NoError(t, err, "HashTreeRoot on original object should not return an error") + require.Equal(t, expectedHashTreeRoot, hashTreeRoot, "HashTreeRoot from sszInfo should match original object's HashTreeRoot") + }) + } +} + func createVariableTestContainer() *sszquerypb.VariableTestContainer { leadingField := make([]byte, 32) for i := range leadingField { @@ -439,7 +495,7 @@ func getVariableTestContainerSpec() testutil.TestSpec { return testutil.TestSpec{ Name: "VariableTestContainer", - Type: sszquerypb.VariableTestContainer{}, + Type: &sszquerypb.VariableTestContainer{}, Instance: testContainer, PathTests: []testutil.PathTest{ // Fixed leading field diff --git a/encoding/ssz/query/ssz_info.go b/encoding/ssz/query/ssz_info.go index e360013443..0ba3369196 100644 --- a/encoding/ssz/query/ssz_info.go +++ b/encoding/ssz/query/ssz_info.go @@ -13,6 +13,8 @@ type sszInfo struct { sszType SSZType // Type in Go. Need this for unmarshaling. typ reflect.Type + // Original object being analyzed + source SSZObject // isVariable is true if the struct contains any variable-size fields. isVariable bool diff --git a/encoding/ssz/query/ssz_object.go b/encoding/ssz/query/ssz_object.go new file mode 100644 index 0000000000..a56b15983d --- /dev/null +++ b/encoding/ssz/query/ssz_object.go @@ -0,0 +1,22 @@ +package query + +import "errors" + +type SSZObject interface { + HashTreeRoot() ([32]byte, error) +} + +// HashTreeRoot calls the HashTreeRoot method on the stored interface if it implements SSZObject. +// Returns the 32-byte hash tree root or an error if the interface doesn't support hashing. +func (info *sszInfo) HashTreeRoot() ([32]byte, error) { + if info == nil { + return [32]byte{}, errors.New("sszInfo is nil") + } + + if info.source == nil { + return [32]byte{}, errors.New("sszInfo.source is nil") + } + + // Check if the value implements the Hashable interface + return info.source.HashTreeRoot() +} diff --git a/encoding/ssz/query/testutil/runner.go b/encoding/ssz/query/testutil/runner.go index e639dd3fc7..610e2d6be0 100644 --- a/encoding/ssz/query/testutil/runner.go +++ b/encoding/ssz/query/testutil/runner.go @@ -10,7 +10,10 @@ import ( func RunStructTest(t *testing.T, spec TestSpec) { t.Run(spec.Name, func(t *testing.T) { - info, err := query.AnalyzeObject(spec.Type) + object, ok := spec.Type.(query.SSZObject) + require.Equal(t, true, ok, "spec.Type must implement SSZObject interface") + require.NotNil(t, object, "spec.Type must not be nil") + info, err := query.AnalyzeObject(object) require.NoError(t, err) testInstance := spec.Instance diff --git a/encoding/ssz/query/testutil/type.go b/encoding/ssz/query/testutil/type.go index 645fd7d24c..6e300adc20 100644 --- a/encoding/ssz/query/testutil/type.go +++ b/encoding/ssz/query/testutil/type.go @@ -1,5 +1,7 @@ package testutil +import "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" + type PathTest struct { Path string Expected any @@ -7,7 +9,7 @@ type PathTest struct { type TestSpec struct { Name string - Type any - Instance any + Type query.SSZObject + Instance query.SSZObject PathTests []PathTest } From c88aa77ac188bf77266def34423d757305aaf51e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Kapka?= Date: Tue, 14 Oct 2025 14:08:21 +0200 Subject: [PATCH 020/103] Display non-JSON error messages (#15860) * Display non-JSON error messages * changelog <3 --- changelog/radek_read-non-json-error.md | 3 +++ validator/client/beacon-api/rest_handler_client.go | 4 ++-- .../client/beacon-api/rest_handler_client_test.go | 14 ++++++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 changelog/radek_read-non-json-error.md diff --git a/changelog/radek_read-non-json-error.md b/changelog/radek_read-non-json-error.md new file mode 100644 index 0000000000..11be946ad4 --- /dev/null +++ b/changelog/radek_read-non-json-error.md @@ -0,0 +1,3 @@ +### Fixed + +- Display error messages from the server verbatim when they are not encoded as `application/json`. \ No newline at end of file diff --git a/validator/client/beacon-api/rest_handler_client.go b/validator/client/beacon-api/rest_handler_client.go index ab8378d875..7f48af862a 100644 --- a/validator/client/beacon-api/rest_handler_client.go +++ b/validator/client/beacon-api/rest_handler_client.go @@ -135,7 +135,7 @@ func (c *BeaconApiRestHandler) GetSSZ(ctx context.Context, endpoint string) ([]b decoder := json.NewDecoder(bytes.NewBuffer(body)) errorJson := &httputil.DefaultJsonError{} if err = decoder.Decode(errorJson); err != nil { - return nil, nil, errors.Wrapf(err, "failed to decode response body into error json for %s", httpResp.Request.URL) + return nil, nil, fmt.Errorf("HTTP request for %s unsuccessful (%d: %s)", httpResp.Request.URL, httpResp.StatusCode, string(body)) } return nil, nil, errorJson } @@ -241,7 +241,7 @@ func (c *BeaconApiRestHandler) PostSSZ( decoder := json.NewDecoder(bytes.NewBuffer(body)) errorJson := &httputil.DefaultJsonError{} if err = decoder.Decode(errorJson); err != nil { - return nil, nil, errors.Wrapf(err, "failed to decode response body into error json for %s", httpResp.Request.URL) + return nil, nil, fmt.Errorf("HTTP request for %s unsuccessful (%d: %s)", httpResp.Request.URL, httpResp.StatusCode, string(body)) } return nil, nil, errorJson } diff --git a/validator/client/beacon-api/rest_handler_client_test.go b/validator/client/beacon-api/rest_handler_client_test.go index af3d928440..476cacff29 100644 --- a/validator/client/beacon-api/rest_handler_client_test.go +++ b/validator/client/beacon-api/rest_handler_client_test.go @@ -344,4 +344,18 @@ func Test_decodeResp(t *testing.T) { err = decodeResp(r, nil) assert.ErrorContains(t, "failed to decode response body into error json", err) }) + t.Run("500 not JSON", func(t *testing.T) { + body := bytes.Buffer{} + _, err := body.WriteString("foo") + require.NoError(t, err) + r := &http.Response{ + Status: "500", + StatusCode: http.StatusInternalServerError, + Body: io.NopCloser(&body), + Header: map[string][]string{"Content-Type": {"text/plain"}}, + Request: &http.Request{}, + } + err = decodeResp(r, nil) + assert.ErrorContains(t, "HTTP request unsuccessful (500: foo)", err) + }) } From 82f556c50fb50fdcfb2d0fbb5b7f2dd62ffa0b9f Mon Sep 17 00:00:00 2001 From: Potuz Date: Tue, 14 Oct 2025 09:39:19 -0300 Subject: [PATCH 021/103] Remove redundant check (#15844) * Remove redundant check * changelog * fix gazelle --- beacon-chain/node/BUILD.bazel | 1 - beacon-chain/node/node.go | 18 +----------------- changelog/potuz_redundant_check.md | 3 +++ 3 files changed, 4 insertions(+), 18 deletions(-) create mode 100644 changelog/potuz_redundant_check.md diff --git a/beacon-chain/node/BUILD.bazel b/beacon-chain/node/BUILD.bazel index 3a5ad5a7ae..560969063b 100644 --- a/beacon-chain/node/BUILD.bazel +++ b/beacon-chain/node/BUILD.bazel @@ -58,7 +58,6 @@ go_library( "//config/params:go_default_library", "//consensus-types/primitives:go_default_library", "//container/slice:go_default_library", - "//encoding/bytesutil:go_default_library", "//genesis:go_default_library", "//monitoring/prometheus:go_default_library", "//monitoring/tracing:go_default_library", diff --git a/beacon-chain/node/node.go b/beacon-chain/node/node.go index 6a6146e4be..129695ae3a 100644 --- a/beacon-chain/node/node.go +++ b/beacon-chain/node/node.go @@ -60,7 +60,6 @@ import ( "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" "github.com/OffchainLabs/prysm/v6/genesis" "github.com/OffchainLabs/prysm/v6/monitoring/prometheus" "github.com/OffchainLabs/prysm/v6/runtime" @@ -598,22 +597,7 @@ func (b *BeaconNode) startStateGen(ctx context.Context, bfs coverage.AvailableBl return err } - r := bytesutil.ToBytes32(cp.Root) - // Consider edge case where finalized root are zeros instead of genesis root hash. - if r == params.BeaconConfig().ZeroHash { - genesisBlock, err := b.db.GenesisBlock(ctx) - if err != nil { - return err - } - if genesisBlock != nil && !genesisBlock.IsNil() { - r, err = genesisBlock.Block().HashTreeRoot() - if err != nil { - return err - } - } - } - - b.finalizedStateAtStartUp, err = sg.StateByRoot(ctx, r) + b.finalizedStateAtStartUp, err = sg.StateByRoot(ctx, [32]byte(cp.Root)) if err != nil { return err } diff --git a/changelog/potuz_redundant_check.md b/changelog/potuz_redundant_check.md new file mode 100644 index 0000000000..6f4531bd01 --- /dev/null +++ b/changelog/potuz_redundant_check.md @@ -0,0 +1,3 @@ +### Ignored + +- Remove redundant check for genesis root at startup. From fbbf2a140451c3b9b667abd1261dce59f1984f94 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Tue, 14 Oct 2025 16:39:38 +0200 Subject: [PATCH 022/103] `HasAtLeastOneIndex`: Check the index is not too high. (#15865) --- beacon-chain/db/filesystem/data_column_cache.go | 3 ++- beacon-chain/db/filesystem/data_column_cache_test.go | 8 ++++---- changelog/manu-has-at-least-one-index.md | 2 ++ 3 files changed, 8 insertions(+), 5 deletions(-) create mode 100644 changelog/manu-has-at-least-one-index.md diff --git a/beacon-chain/db/filesystem/data_column_cache.go b/beacon-chain/db/filesystem/data_column_cache.go index 7a6ee1d53f..52684b683b 100644 --- a/beacon-chain/db/filesystem/data_column_cache.go +++ b/beacon-chain/db/filesystem/data_column_cache.go @@ -35,8 +35,9 @@ func (s DataColumnStorageSummary) HasIndex(index uint64) bool { // HasAtLeastOneIndex returns true if at least one of the DataColumnSidecars at the given indices is available in the filesystem. func (s DataColumnStorageSummary) HasAtLeastOneIndex(indices []uint64) bool { + size := uint64(len(s.mask)) for _, index := range indices { - if s.mask[index] { + if index < size && s.mask[index] { return true } } diff --git a/beacon-chain/db/filesystem/data_column_cache_test.go b/beacon-chain/db/filesystem/data_column_cache_test.go index bbaa1cf71f..fc89a93e10 100644 --- a/beacon-chain/db/filesystem/data_column_cache_test.go +++ b/beacon-chain/db/filesystem/data_column_cache_test.go @@ -25,11 +25,11 @@ func TestHasIndex(t *testing.T) { func TestHasAtLeastOneIndex(t *testing.T) { summary := NewDataColumnStorageSummary(0, [fieldparams.NumberOfColumns]bool{false, true}) - hasAtLeastOneIndex := summary.HasAtLeastOneIndex([]uint64{3, 1, 2}) - require.Equal(t, true, hasAtLeastOneIndex) + actual := summary.HasAtLeastOneIndex([]uint64{3, 1, fieldparams.NumberOfColumns, 2}) + require.Equal(t, true, actual) - hasAtLeastOneIndex = summary.HasAtLeastOneIndex([]uint64{3, 4, 2}) - require.Equal(t, false, hasAtLeastOneIndex) + actual = summary.HasAtLeastOneIndex([]uint64{3, 4, fieldparams.NumberOfColumns, 2}) + require.Equal(t, false, actual) } func TestCount(t *testing.T) { diff --git a/changelog/manu-has-at-least-one-index.md b/changelog/manu-has-at-least-one-index.md new file mode 100644 index 0000000000..a742f2a2a8 --- /dev/null +++ b/changelog/manu-has-at-least-one-index.md @@ -0,0 +1,2 @@ +### Fixed +- `HasAtLeastOneIndex`: Check the index is not too high. From 683608e34a7780c1e98f80988a9e2ff9f09a640c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Kapka?= Date: Tue, 14 Oct 2025 17:22:00 +0200 Subject: [PATCH 023/103] Improve returning individual message errors from Beacon API (#15835) * Improve returning individual message errors from Beacon API * changelog <3 * fix test * add debug logs * batch broadcast errors * use logrus fields * capitalize log messages --- api/client/beacon/client.go | 2 +- api/server/error.go | 40 ++++- beacon-chain/rpc/core/BUILD.bazel | 1 + beacon-chain/rpc/core/validator.go | 25 +-- beacon-chain/rpc/eth/beacon/handlers_pool.go | 154 +++++++++++------- .../rpc/eth/beacon/handlers_pool_test.go | 8 +- beacon-chain/rpc/eth/validator/BUILD.bazel | 1 + beacon-chain/rpc/eth/validator/handlers.go | 124 +++++++++++--- changelog/radek_api-individual-failure.md | 3 + cmd/prysmctl/validator/withdraw_test.go | 4 +- 10 files changed, 245 insertions(+), 117 deletions(-) create mode 100644 changelog/radek_api-individual-failure.md diff --git a/api/client/beacon/client.go b/api/client/beacon/client.go index e5ec0ea243..46fac052c7 100644 --- a/api/client/beacon/client.go +++ b/api/client/beacon/client.go @@ -284,7 +284,7 @@ func (c *Client) SubmitChangeBLStoExecution(ctx context.Context, request []*stru if resp.StatusCode != http.StatusOK { decoder := json.NewDecoder(resp.Body) decoder.DisallowUnknownFields() - errorJson := &server.IndexedVerificationFailureError{} + errorJson := &server.IndexedErrorContainer{} if err := decoder.Decode(errorJson); err != nil { return errors.Wrapf(err, "failed to decode error JSON for %s", resp.Request.URL) } diff --git a/api/server/error.go b/api/server/error.go index feb09facb8..f1e398b40c 100644 --- a/api/server/error.go +++ b/api/server/error.go @@ -6,6 +6,11 @@ import ( "strings" ) +var ( + ErrIndexedValidationFail = "One or more messages failed validation" + ErrIndexedBroadcastFail = "One or more messages failed broadcast" +) + // DecodeError represents an error resulting from trying to decode an HTTP request. // It tracks the full field name for which decoding failed. type DecodeError struct { @@ -29,19 +34,38 @@ func (e *DecodeError) Error() string { return fmt.Sprintf("could not decode %s: %s", strings.Join(e.path, "."), e.err.Error()) } -// IndexedVerificationFailureError wraps a collection of verification failures. -type IndexedVerificationFailureError struct { - Message string `json:"message"` - Code int `json:"code"` - Failures []*IndexedVerificationFailure `json:"failures"` +// IndexedErrorContainer wraps a collection of indexed errors. +type IndexedErrorContainer struct { + Message string `json:"message"` + Code int `json:"code"` + Failures []*IndexedError `json:"failures"` } -func (e *IndexedVerificationFailureError) StatusCode() int { +func (e *IndexedErrorContainer) StatusCode() int { return e.Code } -// IndexedVerificationFailure represents an issue when verifying a single indexed object e.g. an item in an array. -type IndexedVerificationFailure struct { +// IndexedError represents an issue when processing a single indexed object e.g. an item in an array. +type IndexedError struct { Index int `json:"index"` Message string `json:"message"` } + +// BroadcastFailedError represents an error scenario where broadcasting a published message failed. +type BroadcastFailedError struct { + msg string + err error +} + +// NewBroadcastFailedError creates a new instance of BroadcastFailedError. +func NewBroadcastFailedError(msg string, err error) *BroadcastFailedError { + return &BroadcastFailedError{ + msg: msg, + err: err, + } +} + +// Error returns the underlying error message. +func (e *BroadcastFailedError) Error() string { + return fmt.Sprintf("could not broadcast %s: %s", e.msg, e.err.Error()) +} diff --git a/beacon-chain/rpc/core/BUILD.bazel b/beacon-chain/rpc/core/BUILD.bazel index 899f8d89b8..47e85fd9ce 100644 --- a/beacon-chain/rpc/core/BUILD.bazel +++ b/beacon-chain/rpc/core/BUILD.bazel @@ -12,6 +12,7 @@ go_library( importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core", visibility = ["//visibility:public"], deps = [ + "//api/server:go_default_library", "//beacon-chain/blockchain:go_default_library", "//beacon-chain/cache:go_default_library", "//beacon-chain/core/altair:go_default_library", diff --git a/beacon-chain/rpc/core/validator.go b/beacon-chain/rpc/core/validator.go index e513fed97a..761e2478f1 100644 --- a/beacon-chain/rpc/core/validator.go +++ b/beacon-chain/rpc/core/validator.go @@ -7,6 +7,7 @@ import ( "sort" "time" + "github.com/OffchainLabs/prysm/v6/api/server" "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" @@ -36,24 +37,6 @@ import ( var errOptimisticMode = errors.New("the node is currently optimistic and cannot serve validators") -// AggregateBroadcastFailedError represents an error scenario where -// broadcasting an aggregate selection proof failed. -type AggregateBroadcastFailedError struct { - err error -} - -// NewAggregateBroadcastFailedError creates a new error instance. -func NewAggregateBroadcastFailedError(err error) AggregateBroadcastFailedError { - return AggregateBroadcastFailedError{ - err: err, - } -} - -// Error returns the underlying error message. -func (e *AggregateBroadcastFailedError) Error() string { - return fmt.Sprintf("could not broadcast signed aggregated attestation: %s", e.err.Error()) -} - // ComputeValidatorPerformance reports the validator's latest balance along with other important metrics on // rewards and penalties throughout its lifecycle in the beacon chain. func (s *Service) ComputeValidatorPerformance( @@ -360,7 +343,8 @@ func (s *Service) SubmitSignedContributionAndProof( // Wait for p2p broadcast to complete and return the first error (if any) err := errs.Wait() if err != nil { - return &RpcError{Err: err, Reason: Internal} + log.WithError(err).Debug("Could not broadcast signed contribution and proof") + return &RpcError{Err: server.NewBroadcastFailedError("SignedContributionAndProof", err), Reason: Internal} } s.OperationNotifier.OperationFeed().Send(&feed.Event{ @@ -411,7 +395,8 @@ func (s *Service) SubmitSignedAggregateSelectionProof( } if err := s.Broadcaster.Broadcast(ctx, agg); err != nil { - return &RpcError{Err: &AggregateBroadcastFailedError{err: err}, Reason: Internal} + log.WithError(err).Debug("Could not broadcast signed aggregate att and proof") + return &RpcError{Err: server.NewBroadcastFailedError("SignedAggregateAttAndProof", err), Reason: Internal} } if logrus.GetLevel() >= logrus.DebugLevel { diff --git a/beacon-chain/rpc/eth/beacon/handlers_pool.go b/beacon-chain/rpc/eth/beacon/handlers_pool.go index 168519c307..f8ca39e2ea 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_pool.go +++ b/beacon-chain/rpc/eth/beacon/handlers_pool.go @@ -6,8 +6,6 @@ import ( "fmt" "io" "net/http" - "strconv" - "strings" "time" "github.com/OffchainLabs/prysm/v6/api" @@ -31,6 +29,7 @@ import ( "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" + "github.com/sirupsen/logrus" ) const broadcastBLSChangesRateLimit = 128 @@ -200,22 +199,23 @@ func (s *Server) SubmitAttestations(w http.ResponseWriter, r *http.Request) { return } - if len(failedBroadcasts) > 0 { - httputil.HandleError( - w, - fmt.Sprintf("Attestations at index %s could not be broadcasted", strings.Join(failedBroadcasts, ", ")), - http.StatusInternalServerError, - ) - return - } - if len(attFailures) > 0 { - failuresErr := &server.IndexedVerificationFailureError{ + failuresErr := &server.IndexedErrorContainer{ Code: http.StatusBadRequest, - Message: "One or more attestations failed validation", + Message: server.ErrIndexedValidationFail, Failures: attFailures, } httputil.WriteError(w, failuresErr) + return + } + if len(failedBroadcasts) > 0 { + failuresErr := &server.IndexedErrorContainer{ + Code: http.StatusInternalServerError, + Message: server.ErrIndexedBroadcastFail, + Failures: failedBroadcasts, + } + httputil.WriteError(w, failuresErr) + return } } @@ -247,8 +247,8 @@ func (s *Server) SubmitAttestationsV2(w http.ResponseWriter, r *http.Request) { return } - var attFailures []*server.IndexedVerificationFailure - var failedBroadcasts []string + var attFailures []*server.IndexedError + var failedBroadcasts []*server.IndexedError if v >= version.Electra { attFailures, failedBroadcasts, err = s.handleAttestationsElectra(ctx, req.Data) @@ -260,29 +260,30 @@ func (s *Server) SubmitAttestationsV2(w http.ResponseWriter, r *http.Request) { return } - if len(failedBroadcasts) > 0 { - httputil.HandleError( - w, - fmt.Sprintf("Attestations at index %s could not be broadcasted", strings.Join(failedBroadcasts, ", ")), - http.StatusInternalServerError, - ) - return - } - if len(attFailures) > 0 { - failuresErr := &server.IndexedVerificationFailureError{ + failuresErr := &server.IndexedErrorContainer{ Code: http.StatusBadRequest, - Message: "One or more attestations failed validation", + Message: server.ErrIndexedValidationFail, Failures: attFailures, } httputil.WriteError(w, failuresErr) + return + } + if len(failedBroadcasts) > 0 { + failuresErr := &server.IndexedErrorContainer{ + Code: http.StatusInternalServerError, + Message: server.ErrIndexedBroadcastFail, + Failures: failedBroadcasts, + } + httputil.WriteError(w, failuresErr) + return } } func (s *Server) handleAttestationsElectra( ctx context.Context, data json.RawMessage, -) (attFailures []*server.IndexedVerificationFailure, failedBroadcasts []string, err error) { +) (attFailures []*server.IndexedError, failedBroadcasts []*server.IndexedError, err error) { var sourceAttestations []*structs.SingleAttestation currentEpoch := slots.ToEpoch(s.TimeFetcher.CurrentSlot()) if currentEpoch < params.BeaconConfig().ElectraForkEpoch { @@ -301,14 +302,14 @@ func (s *Server) handleAttestationsElectra( for i, sourceAtt := range sourceAttestations { att, err := sourceAtt.ToConsensus() if err != nil { - attFailures = append(attFailures, &server.IndexedVerificationFailure{ + attFailures = append(attFailures, &server.IndexedError{ Index: i, Message: "Could not convert request attestation to consensus attestation: " + err.Error(), }) continue } if _, err = bls.SignatureFromBytes(att.Signature); err != nil { - attFailures = append(attFailures, &server.IndexedVerificationFailure{ + attFailures = append(attFailures, &server.IndexedError{ Index: i, Message: "Incorrect attestation signature: " + err.Error(), }) @@ -317,6 +318,13 @@ func (s *Server) handleAttestationsElectra( validAttestations = append(validAttestations, att) } + // We store the error for the first failed broadcast and use it in the log message in case + // there are broadcast issues. Having a single log at the end instead of logging + // for every failed broadcast prevents log noise in case there are many failures. + // Even though we only retain the first error, there is a very good chance that all + // broadcasts fail for the same reason, so this should be sufficient in most cases. + var broadcastErr error + for i, singleAtt := range validAttestations { s.OperationNotifier.OperationFeed().Send(&feed.Event{ Type: operation.SingleAttReceived, @@ -338,31 +346,45 @@ func (s *Server) handleAttestationsElectra( wantedEpoch := slots.ToEpoch(att.Data.Slot) vals, err := s.HeadFetcher.HeadValidatorsIndices(ctx, wantedEpoch) if err != nil { - failedBroadcasts = append(failedBroadcasts, strconv.Itoa(i)) - continue + return nil, nil, errors.Wrap(err, "could not get head validator indices") } subnet := corehelpers.ComputeSubnetFromCommitteeAndSlot(uint64(len(vals)), att.GetCommitteeIndex(), att.Data.Slot) if err = s.Broadcaster.BroadcastAttestation(ctx, subnet, singleAtt); err != nil { - log.WithError(err).Errorf("could not broadcast attestation at index %d", i) - failedBroadcasts = append(failedBroadcasts, strconv.Itoa(i)) + failedBroadcasts = append(failedBroadcasts, &server.IndexedError{ + Index: i, + Message: server.NewBroadcastFailedError("SingleAttestation", err).Error(), + }) + if broadcastErr == nil { + broadcastErr = err + } continue } if features.Get().EnableExperimentalAttestationPool { if err = s.AttestationCache.Add(att); err != nil { - log.WithError(err).Error("could not save attestation") + log.WithError(err).Error("Could not save attestation") } } else { if err = s.AttestationsPool.SaveUnaggregatedAttestation(att); err != nil { - log.WithError(err).Error("could not save attestation") + log.WithError(err).Error("Could not save attestation") } } } + if len(failedBroadcasts) > 0 { + log.WithFields(logrus.Fields{ + "failedCount": len(failedBroadcasts), + "totalCount": len(validAttestations), + }).WithError(broadcastErr).Error("Some attestations failed to be broadcast") + } + return attFailures, failedBroadcasts, nil } -func (s *Server) handleAttestations(ctx context.Context, data json.RawMessage) (attFailures []*server.IndexedVerificationFailure, failedBroadcasts []string, err error) { +func (s *Server) handleAttestations( + ctx context.Context, + data json.RawMessage, +) (attFailures []*server.IndexedError, failedBroadcasts []*server.IndexedError, err error) { var sourceAttestations []*structs.Attestation if slots.ToEpoch(s.TimeFetcher.CurrentSlot()) >= params.BeaconConfig().ElectraForkEpoch { @@ -381,14 +403,14 @@ func (s *Server) handleAttestations(ctx context.Context, data json.RawMessage) ( for i, sourceAtt := range sourceAttestations { att, err := sourceAtt.ToConsensus() if err != nil { - attFailures = append(attFailures, &server.IndexedVerificationFailure{ + attFailures = append(attFailures, &server.IndexedError{ Index: i, Message: "Could not convert request attestation to consensus attestation: " + err.Error(), }) continue } if _, err = bls.SignatureFromBytes(att.Signature); err != nil { - attFailures = append(attFailures, &server.IndexedVerificationFailure{ + attFailures = append(attFailures, &server.IndexedError{ Index: i, Message: "Incorrect attestation signature: " + err.Error(), }) @@ -397,6 +419,13 @@ func (s *Server) handleAttestations(ctx context.Context, data json.RawMessage) ( validAttestations = append(validAttestations, att) } + // We store the error for the first failed broadcast and use it in the log message in case + // there are broadcast issues. Having a single log at the end instead of logging + // for every failed broadcast prevents log noise in case there are many failures. + // Even though we only retain the first error, there is a very good chance that all + // broadcasts fail for the same reason, so this should be sufficient in most cases. + var broadcastErr error + for i, att := range validAttestations { // Broadcast the unaggregated attestation on a feed to notify other services in the beacon node // of a received unaggregated attestation. @@ -413,32 +442,43 @@ func (s *Server) handleAttestations(ctx context.Context, data json.RawMessage) ( wantedEpoch := slots.ToEpoch(att.Data.Slot) vals, err := s.HeadFetcher.HeadValidatorsIndices(ctx, wantedEpoch) if err != nil { - failedBroadcasts = append(failedBroadcasts, strconv.Itoa(i)) - continue + return nil, nil, errors.Wrap(err, "could not get head validator indices") } subnet := corehelpers.ComputeSubnetFromCommitteeAndSlot(uint64(len(vals)), att.Data.CommitteeIndex, att.Data.Slot) if err = s.Broadcaster.BroadcastAttestation(ctx, subnet, att); err != nil { - log.WithError(err).Errorf("could not broadcast attestation at index %d", i) - failedBroadcasts = append(failedBroadcasts, strconv.Itoa(i)) + failedBroadcasts = append(failedBroadcasts, &server.IndexedError{ + Index: i, + Message: server.NewBroadcastFailedError("Attestation", err).Error(), + }) + if broadcastErr == nil { + broadcastErr = err + } continue } if features.Get().EnableExperimentalAttestationPool { if err = s.AttestationCache.Add(att); err != nil { - log.WithError(err).Error("could not save attestation") + log.WithError(err).Error("Could not save attestation") } } else if att.IsAggregated() { if err = s.AttestationsPool.SaveAggregatedAttestation(att); err != nil { - log.WithError(err).Error("could not save aggregated attestation") + log.WithError(err).Error("Could not save aggregated attestation") } } else { if err = s.AttestationsPool.SaveUnaggregatedAttestation(att); err != nil { - log.WithError(err).Error("could not save unaggregated attestation") + log.WithError(err).Error("Could not save unaggregated attestation") } } } + if len(failedBroadcasts) > 0 { + log.WithFields(logrus.Fields{ + "failedCount": len(failedBroadcasts), + "totalCount": len(validAttestations), + }).WithError(broadcastErr).Error("Some attestations failed to be broadcast") + } + return attFailures, failedBroadcasts, nil } @@ -541,11 +581,11 @@ func (s *Server) SubmitSyncCommitteeSignatures(w http.ResponseWriter, r *http.Re } var validMessages []*eth.SyncCommitteeMessage - var msgFailures []*server.IndexedVerificationFailure + var msgFailures []*server.IndexedError for i, sourceMsg := range req.Data { msg, err := sourceMsg.ToConsensus() if err != nil { - msgFailures = append(msgFailures, &server.IndexedVerificationFailure{ + msgFailures = append(msgFailures, &server.IndexedError{ Index: i, Message: "Could not convert request message to consensus message: " + err.Error(), }) @@ -562,7 +602,7 @@ func (s *Server) SubmitSyncCommitteeSignatures(w http.ResponseWriter, r *http.Re } if len(msgFailures) > 0 { - failuresErr := &server.IndexedVerificationFailureError{ + failuresErr := &server.IndexedErrorContainer{ Code: http.StatusBadRequest, Message: "One or more messages failed validation", Failures: msgFailures, @@ -581,7 +621,7 @@ func (s *Server) SubmitBLSToExecutionChanges(w http.ResponseWriter, r *http.Requ httputil.HandleError(w, fmt.Sprintf("Could not get head state: %v", err), http.StatusInternalServerError) return } - var failures []*server.IndexedVerificationFailure + var failures []*server.IndexedError var toBroadcast []*eth.SignedBLSToExecutionChange var req []*structs.SignedBLSToExecutionChange @@ -602,7 +642,7 @@ func (s *Server) SubmitBLSToExecutionChanges(w http.ResponseWriter, r *http.Requ for i, change := range req { sbls, err := change.ToConsensus() if err != nil { - failures = append(failures, &server.IndexedVerificationFailure{ + failures = append(failures, &server.IndexedError{ Index: i, Message: "Unable to decode SignedBLSToExecutionChange: " + err.Error(), }) @@ -610,14 +650,14 @@ func (s *Server) SubmitBLSToExecutionChanges(w http.ResponseWriter, r *http.Requ } _, err = blocks.ValidateBLSToExecutionChange(st, sbls) if err != nil { - failures = append(failures, &server.IndexedVerificationFailure{ + failures = append(failures, &server.IndexedError{ Index: i, Message: "Could not validate SignedBLSToExecutionChange: " + err.Error(), }) continue } if err := blocks.VerifyBLSChangeSignature(st, sbls); err != nil { - failures = append(failures, &server.IndexedVerificationFailure{ + failures = append(failures, &server.IndexedError{ Index: i, Message: "Could not validate signature: " + err.Error(), }) @@ -636,9 +676,9 @@ func (s *Server) SubmitBLSToExecutionChanges(w http.ResponseWriter, r *http.Requ } go s.broadcastBLSChanges(context.Background(), toBroadcast) if len(failures) > 0 { - failuresErr := &server.IndexedVerificationFailureError{ + failuresErr := &server.IndexedErrorContainer{ Code: http.StatusBadRequest, - Message: "One or more BLSToExecutionChange failed validation", + Message: server.ErrIndexedValidationFail, Failures: failures, } httputil.WriteError(w, failuresErr) @@ -655,18 +695,18 @@ func (s *Server) broadcastBLSBatch(ctx context.Context, ptr *[]*eth.SignedBLSToE } st, err := s.ChainInfoFetcher.HeadStateReadOnly(ctx) if err != nil { - log.WithError(err).Error("could not get head state") + log.WithError(err).Error("Could not get head state") return } for _, ch := range (*ptr)[:limit] { if ch != nil { _, err := blocks.ValidateBLSToExecutionChange(st, ch) if err != nil { - log.WithError(err).Error("could not validate BLS to execution change") + log.WithError(err).Error("Could not validate BLS to execution change") continue } if err := s.Broadcaster.Broadcast(ctx, ch); err != nil { - log.WithError(err).Error("could not broadcast BLS to execution changes.") + log.WithError(err).Error("Could not broadcast BLS to execution changes.") } } } diff --git a/beacon-chain/rpc/eth/beacon/handlers_pool_test.go b/beacon-chain/rpc/eth/beacon/handlers_pool_test.go index d3b1a07672..4fb0c7d238 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_pool_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_pool_test.go @@ -638,7 +638,7 @@ func TestSubmitAttestations(t *testing.T) { s.SubmitAttestations(writer, request) assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &server.IndexedVerificationFailureError{} + e := &server.IndexedErrorContainer{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) assert.Equal(t, http.StatusBadRequest, e.Code) require.Equal(t, 1, len(e.Failures)) @@ -772,7 +772,7 @@ func TestSubmitAttestations(t *testing.T) { s.SubmitAttestationsV2(writer, request) assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &server.IndexedVerificationFailureError{} + e := &server.IndexedErrorContainer{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) assert.Equal(t, http.StatusBadRequest, e.Code) require.Equal(t, 1, len(e.Failures)) @@ -873,7 +873,7 @@ func TestSubmitAttestations(t *testing.T) { s.SubmitAttestationsV2(writer, request) assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &server.IndexedVerificationFailureError{} + e := &server.IndexedErrorContainer{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) assert.Equal(t, http.StatusBadRequest, e.Code) require.Equal(t, 1, len(e.Failures)) @@ -1538,7 +1538,7 @@ func TestSubmitSignedBLSToExecutionChanges_Failures(t *testing.T) { s.SubmitBLSToExecutionChanges(writer, request) assert.Equal(t, http.StatusBadRequest, writer.Code) time.Sleep(10 * time.Millisecond) // Delay to allow the routine to start - require.StringContains(t, "One or more BLSToExecutionChange failed validation", writer.Body.String()) + require.StringContains(t, "One or more messages failed validation", writer.Body.String()) assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) assert.Equal(t, numValidators, len(broadcaster.BroadcastMessages)+1) diff --git a/beacon-chain/rpc/eth/validator/BUILD.bazel b/beacon-chain/rpc/eth/validator/BUILD.bazel index f4d7f416a7..a965ef4aee 100644 --- a/beacon-chain/rpc/eth/validator/BUILD.bazel +++ b/beacon-chain/rpc/eth/validator/BUILD.bazel @@ -12,6 +12,7 @@ go_library( visibility = ["//visibility:public"], deps = [ "//api:go_default_library", + "//api/server:go_default_library", "//api/server/structs:go_default_library", "//beacon-chain/blockchain:go_default_library", "//beacon-chain/builder:go_default_library", diff --git a/beacon-chain/rpc/eth/validator/handlers.go b/beacon-chain/rpc/eth/validator/handlers.go index 7be36698d7..043ad17bb8 100644 --- a/beacon-chain/rpc/eth/validator/handlers.go +++ b/beacon-chain/rpc/eth/validator/handlers.go @@ -14,6 +14,7 @@ import ( "time" "github.com/OffchainLabs/prysm/v6/api" + "github.com/OffchainLabs/prysm/v6/api/server" "github.com/OffchainLabs/prysm/v6/api/server/structs" "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" @@ -268,22 +269,61 @@ func (s *Server) SubmitContributionAndProofs(w http.ResponseWriter, r *http.Requ return } - for _, item := range reqData { + var failures []*server.IndexedError + var failedBroadcasts []*server.IndexedError + + for i, item := range reqData { var contribution structs.SignedContributionAndProof if err := json.Unmarshal(item, &contribution); err != nil { - httputil.HandleError(w, "Could not decode item: "+err.Error(), http.StatusBadRequest) - return + failures = append(failures, &server.IndexedError{ + Index: i, + Message: "Could not unmarshal message: " + err.Error(), + }) + continue } consensusItem, err := contribution.ToConsensus() if err != nil { - httputil.HandleError(w, "Could not convert contribution to consensus format: "+err.Error(), http.StatusBadRequest) - return + failures = append(failures, &server.IndexedError{ + Index: i, + Message: "Could not convert request contribution to consensus contribution: " + err.Error(), + }) + continue } - if rpcError := s.CoreService.SubmitSignedContributionAndProof(ctx, consensusItem); rpcError != nil { - httputil.HandleError(w, rpcError.Err.Error(), core.ErrorReasonToHTTP(rpcError.Reason)) - return + + rpcError := s.CoreService.SubmitSignedContributionAndProof(ctx, consensusItem) + if rpcError != nil { + var broadcastFailedErr *server.BroadcastFailedError + if errors.As(rpcError.Err, &broadcastFailedErr) { + failedBroadcasts = append(failedBroadcasts, &server.IndexedError{ + Index: i, + Message: rpcError.Err.Error(), + }) + continue + } else { + httputil.HandleError(w, rpcError.Err.Error(), core.ErrorReasonToHTTP(rpcError.Reason)) + return + } } } + + if len(failures) > 0 { + failuresErr := &server.IndexedErrorContainer{ + Code: http.StatusBadRequest, + Message: server.ErrIndexedValidationFail, + Failures: failures, + } + httputil.WriteError(w, failuresErr) + return + } + if len(failedBroadcasts) > 0 { + failuresErr := &server.IndexedErrorContainer{ + Code: http.StatusInternalServerError, + Message: server.ErrIndexedBroadcastFail, + Failures: failedBroadcasts, + } + httputil.WriteError(w, failuresErr) + return + } } // Deprecated: use SubmitAggregateAndProofsV2 instead @@ -322,8 +362,8 @@ func (s *Server) SubmitAggregateAndProofs(w http.ResponseWriter, r *http.Request } rpcError := s.CoreService.SubmitSignedAggregateSelectionProof(ctx, consensusItem) if rpcError != nil { - var aggregateBroadcastFailedError *core.AggregateBroadcastFailedError - ok := errors.As(rpcError.Err, &aggregateBroadcastFailedError) + var broadcastFailedErr *server.BroadcastFailedError + ok := errors.As(rpcError.Err, &broadcastFailedErr) if ok { broadcastFailed = true } else { @@ -368,49 +408,83 @@ func (s *Server) SubmitAggregateAndProofsV2(w http.ResponseWriter, r *http.Reque return } - broadcastFailed := false + var failures []*server.IndexedError + var failedBroadcasts []*server.IndexedError + var rpcError *core.RpcError - for _, raw := range reqData { + for i, raw := range reqData { if v >= version.Electra { var signedAggregate structs.SignedAggregateAttestationAndProofElectra err = json.Unmarshal(raw, &signedAggregate) if err != nil { - httputil.HandleError(w, "Failed to parse aggregate attestation and proof: "+err.Error(), http.StatusBadRequest) - return + failures = append(failures, &server.IndexedError{ + Index: i, + Message: "Could not parse message: " + err.Error(), + }) + continue } consensusItem, err := signedAggregate.ToConsensus() if err != nil { - httputil.HandleError(w, "Could not convert request aggregate to consensus aggregate: "+err.Error(), http.StatusBadRequest) - return + failures = append(failures, &server.IndexedError{ + Index: i, + Message: "Could not convert request aggregate to consensus aggregate: " + err.Error(), + }) + continue } rpcError = s.CoreService.SubmitSignedAggregateSelectionProof(ctx, consensusItem) } else { var signedAggregate structs.SignedAggregateAttestationAndProof err = json.Unmarshal(raw, &signedAggregate) if err != nil { - httputil.HandleError(w, "Failed to parse aggregate attestation and proof: "+err.Error(), http.StatusBadRequest) - return + failures = append(failures, &server.IndexedError{ + Index: i, + Message: "Could not parse message: " + err.Error(), + }) + continue } consensusItem, err := signedAggregate.ToConsensus() if err != nil { - httputil.HandleError(w, "Could not convert request aggregate to consensus aggregate: "+err.Error(), http.StatusBadRequest) - return + failures = append(failures, &server.IndexedError{ + Index: i, + Message: "Could not convert request aggregate to consensus aggregate: " + err.Error(), + }) + continue } rpcError = s.CoreService.SubmitSignedAggregateSelectionProof(ctx, consensusItem) } if rpcError != nil { - var aggregateBroadcastFailedError *core.AggregateBroadcastFailedError - if errors.As(rpcError.Err, &aggregateBroadcastFailedError) { - broadcastFailed = true + var broadcastFailedErr *server.BroadcastFailedError + if errors.As(rpcError.Err, &broadcastFailedErr) { + failedBroadcasts = append(failedBroadcasts, &server.IndexedError{ + Index: i, + Message: rpcError.Err.Error(), + }) + continue } else { httputil.HandleError(w, rpcError.Err.Error(), core.ErrorReasonToHTTP(rpcError.Reason)) return } } } - if broadcastFailed { - httputil.HandleError(w, "Could not broadcast one or more signed aggregated attestations", http.StatusInternalServerError) + + if len(failures) > 0 { + failuresErr := &server.IndexedErrorContainer{ + Code: http.StatusBadRequest, + Message: server.ErrIndexedValidationFail, + Failures: failures, + } + httputil.WriteError(w, failuresErr) + return + } + if len(failedBroadcasts) > 0 { + failuresErr := &server.IndexedErrorContainer{ + Code: http.StatusInternalServerError, + Message: server.ErrIndexedBroadcastFail, + Failures: failedBroadcasts, + } + httputil.WriteError(w, failuresErr) + return } } diff --git a/changelog/radek_api-individual-failure.md b/changelog/radek_api-individual-failure.md new file mode 100644 index 0000000000..26480c06fc --- /dev/null +++ b/changelog/radek_api-individual-failure.md @@ -0,0 +1,3 @@ +### Changed + +- Improve returning individual message errors from Beacon API. \ No newline at end of file diff --git a/cmd/prysmctl/validator/withdraw_test.go b/cmd/prysmctl/validator/withdraw_test.go index 1b8d040c49..dcc4a61339 100644 --- a/cmd/prysmctl/validator/withdraw_test.go +++ b/cmd/prysmctl/validator/withdraw_test.go @@ -219,8 +219,8 @@ func TestCallWithdrawalEndpoint_Errors(t *testing.T) { if r.Method == http.MethodPost && r.RequestURI == "/eth/v1/beacon/pool/bls_to_execution_changes" { w.WriteHeader(400) w.Header().Set("Content-Type", "application/json") - err = json.NewEncoder(w).Encode(&server.IndexedVerificationFailureError{ - Failures: []*server.IndexedVerificationFailure{ + err = json.NewEncoder(w).Encode(&server.IndexedErrorContainer{ + Failures: []*server.IndexedError{ {Index: 0, Message: "Could not validate SignedBLSToExecutionChange"}, }, }) From 4eab41ea4ca770840bf2a00c7613a5d67c7a54cc Mon Sep 17 00:00:00 2001 From: Jun Song <87601811+syjn99@users.noreply.github.com> Date: Tue, 14 Oct 2025 18:33:52 +0100 Subject: [PATCH 024/103] SSZ-QL: use `fastssz`-generated `SizeSSZ` method & clarify `Size` method (#15864) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add SizeSSZ as a member of SSZObject * Temporarily rename dereferencePointer function * Fix analyzeType: use reflect.Value for analyzing * Fix PopulateVariableLengthInfo: change function signature & reset pointer * Remove Container arm for Size function as it'll be handled in the previous branch * Remove OffsetBytes function in listInfo * Refactor and document codes * Remove misleading "fixedSize" concept & Add Uint8...64 SSZTypes * Add size testing * Move TestSSZObject_Batch and rename it as TestHashTreeRoot * Changelog :) * Rename endOffset to fixedOffset --------- Co-authored-by: Radosław Kapka --- changelog/syjn99_ssz-ql-fastssz-sizessz.md | 7 + encoding/ssz/query/BUILD.bazel | 1 - encoding/ssz/query/analyzer.go | 272 ++++++++++++--------- encoding/ssz/query/analyzer_test.go | 17 -- encoding/ssz/query/bitvector.go | 9 + encoding/ssz/query/container.go | 6 +- encoding/ssz/query/list.go | 14 -- encoding/ssz/query/query_test.go | 134 +++++----- encoding/ssz/query/ssz_info.go | 54 ++-- encoding/ssz/query/ssz_object.go | 1 + encoding/ssz/query/ssz_type.go | 18 +- encoding/ssz/query/testutil/runner.go | 8 +- encoding/ssz/query/vector.go | 12 + 13 files changed, 309 insertions(+), 244 deletions(-) create mode 100644 changelog/syjn99_ssz-ql-fastssz-sizessz.md delete mode 100644 encoding/ssz/query/analyzer_test.go diff --git a/changelog/syjn99_ssz-ql-fastssz-sizessz.md b/changelog/syjn99_ssz-ql-fastssz-sizessz.md new file mode 100644 index 0000000000..7a3a539c2a --- /dev/null +++ b/changelog/syjn99_ssz-ql-fastssz-sizessz.md @@ -0,0 +1,7 @@ +### Added + +- SSZ-QL: Use `fastssz`'s `SizeSSZ` method for calculating the size of `Container` type. + +### Changed + +- SSZ-QL: Clarify `Size` method with more sophisticated `SSZType`s. diff --git a/encoding/ssz/query/BUILD.bazel b/encoding/ssz/query/BUILD.bazel index 837d17054c..79b8ad84c1 100644 --- a/encoding/ssz/query/BUILD.bazel +++ b/encoding/ssz/query/BUILD.bazel @@ -24,7 +24,6 @@ go_library( go_test( name = "go_default_test", srcs = [ - "analyzer_test.go", "path_test.go", "query_test.go", "tag_parser_test.go", diff --git a/encoding/ssz/query/analyzer.go b/encoding/ssz/query/analyzer.go index 52ba85ad6d..83a5b964cc 100644 --- a/encoding/ssz/query/analyzer.go +++ b/encoding/ssz/query/analyzer.go @@ -11,20 +11,17 @@ const offsetBytes = 4 // AnalyzeObject analyzes given object and returns its SSZ information. func AnalyzeObject(obj SSZObject) (*sszInfo, error) { - value := dereferencePointer(obj) + value := reflect.ValueOf(obj) - info, err := analyzeType(value.Type(), nil) + info, err := analyzeType(value, nil) if err != nil { return nil, fmt.Errorf("could not analyze type %s: %w", value.Type().Name(), err) } - // Store the original object interface - info.source = obj - // Populate variable-length information using the actual value. - err = PopulateVariableLengthInfo(info, value.Interface()) + err = PopulateVariableLengthInfo(info, value) if err != nil { - return nil, fmt.Errorf("could not populate variable length info: %w", err) + return nil, fmt.Errorf("could not populate variable length info for type %s: %w", value.Type().Name(), err) } return info, nil @@ -33,13 +30,13 @@ func AnalyzeObject(obj SSZObject) (*sszInfo, error) { // PopulateVariableLengthInfo populates runtime information for SSZ fields of variable-sized types. // This function updates the sszInfo structure with actual lengths and offsets that can only // be determined at runtime for variable-sized items like Lists and variable-sized Container fields. -func PopulateVariableLengthInfo(sszInfo *sszInfo, value any) error { +func PopulateVariableLengthInfo(sszInfo *sszInfo, value reflect.Value) error { if sszInfo == nil { return errors.New("sszInfo is nil") } - if value == nil { - return errors.New("value is nil") + if !value.IsValid() { + return errors.New("value is invalid") } // Short circuit: If the type is fixed-sized, we don't need to fill in the info. @@ -59,18 +56,18 @@ func PopulateVariableLengthInfo(sszInfo *sszInfo, value any) error { return errors.New("listInfo is nil") } - val := reflect.ValueOf(value) - if val.Kind() != reflect.Slice { - return fmt.Errorf("expected slice for List type, got %v", val.Kind()) + if value.Kind() != reflect.Slice { + return fmt.Errorf("expected slice for List type, got %v", value.Kind()) } - length := val.Len() + + length := value.Len() if listInfo.element.isVariable { listInfo.elementSizes = make([]uint64, 0, length) // Populate nested variable-sized type element lengths recursively. for i := range length { - if err := PopulateVariableLengthInfo(listInfo.element, val.Index(i).Interface()); err != nil { + if err := PopulateVariableLengthInfo(listInfo.element, value.Index(i)); err != nil { return fmt.Errorf("could not populate nested list element at index %d: %w", i, err) } listInfo.elementSizes = append(listInfo.elementSizes, listInfo.element.Size()) @@ -94,8 +91,7 @@ func PopulateVariableLengthInfo(sszInfo *sszInfo, value any) error { return errors.New("bitlistInfo is nil") } - val := reflect.ValueOf(value) - if err := bitlistInfo.SetLengthFromBytes(val.Bytes()); err != nil { + if err := bitlistInfo.SetLengthFromBytes(value.Bytes()); err != nil { return fmt.Errorf("could not set bitlist length from bytes: %w", err) } @@ -108,11 +104,21 @@ func PopulateVariableLengthInfo(sszInfo *sszInfo, value any) error { return fmt.Errorf("could not get container info: %w", err) } + if containerInfo == nil { + return errors.New("containerInfo is nil") + } + // Dereference first in case value is a pointer. derefValue := dereferencePointer(value) + if derefValue.Kind() != reflect.Struct { + return fmt.Errorf("expected struct for Container type, got %v", derefValue.Kind()) + } - // Start with the fixed size of this Container. - currentOffset := sszInfo.FixedSize() + // Reset the pointer to the new value. + sszInfo.source = castToSSZObject(derefValue) + + // Start with the end offset of this Container. + currentOffset := containerInfo.fixedOffset for _, fieldName := range containerInfo.order { fieldInfo := containerInfo.fields[fieldName] @@ -128,13 +134,15 @@ func PopulateVariableLengthInfo(sszInfo *sszInfo, value any) error { // Recursively populate variable-sized fields. fieldValue := derefValue.FieldByName(fieldInfo.goFieldName) - if err := PopulateVariableLengthInfo(childSszInfo, fieldValue.Interface()); err != nil { + if err := PopulateVariableLengthInfo(childSszInfo, fieldValue); err != nil { return fmt.Errorf("could not populate from value for field %s: %w", fieldName, err) } // Each variable-sized element needs an offset entry. - if childSszInfo.sszType == List { - currentOffset += childSszInfo.listInfo.OffsetBytes() + if listInfo, err := childSszInfo.ListInfo(); err == nil && listInfo != nil { + if listInfo.element.isVariable { + currentOffset += listInfo.Length() * offsetBytes + } } // Set the actual offset for variable-sized fields. @@ -149,66 +157,64 @@ func PopulateVariableLengthInfo(sszInfo *sszInfo, value any) error { } } -// analyzeType is an entry point that inspects a reflect.Type and computes its SSZ layout information. -func analyzeType(typ reflect.Type, tag *reflect.StructTag) (*sszInfo, error) { - switch typ.Kind() { +// analyzeType is an entry point that inspects a reflect.Value and computes its SSZ layout information. +func analyzeType(value reflect.Value, tag *reflect.StructTag) (*sszInfo, error) { + switch value.Kind() { // Basic types (e.g., uintN where N is 8, 16, 32, 64) // NOTE: uint128 and uint256 are represented as []byte in Go, // so we handle them as slices. See `analyzeHomogeneousColType`. case reflect.Uint64, reflect.Uint32, reflect.Uint16, reflect.Uint8, reflect.Bool: - return analyzeBasicType(typ) + return analyzeBasicType(value) case reflect.Slice: - return analyzeHomogeneousColType(typ, tag) + return analyzeHomogeneousColType(value, tag) case reflect.Struct: - return analyzeContainerType(typ) + return analyzeContainerType(value) - case reflect.Ptr: - // Dereference pointer types. - return analyzeType(typ.Elem(), tag) + case reflect.Pointer: + derefValue := dereferencePointer(value) + return analyzeType(derefValue, tag) default: - return nil, fmt.Errorf("unsupported type %v for SSZ calculation", typ.Kind()) + return nil, fmt.Errorf("unsupported type %v for SSZ calculation", value.Kind()) } } // analyzeBasicType analyzes SSZ basic types (uintN, bool) and returns its info. -func analyzeBasicType(typ reflect.Type) (*sszInfo, error) { +func analyzeBasicType(value reflect.Value) (*sszInfo, error) { + var sszType SSZType + + switch value.Kind() { + case reflect.Uint64: + sszType = Uint64 + case reflect.Uint32: + sszType = Uint32 + case reflect.Uint16: + sszType = Uint16 + case reflect.Uint8: + sszType = Uint8 + case reflect.Bool: + sszType = Boolean + default: + return nil, fmt.Errorf("unsupported basic type %v for SSZ calculation", value.Kind()) + } + sszInfo := &sszInfo{ - typ: typ, + sszType: sszType, + typ: value.Type(), // Every basic type is fixed-size and not variable. isVariable: false, } - switch typ.Kind() { - case reflect.Uint64: - sszInfo.sszType = UintN - sszInfo.fixedSize = 8 - case reflect.Uint32: - sszInfo.sszType = UintN - sszInfo.fixedSize = 4 - case reflect.Uint16: - sszInfo.sszType = UintN - sszInfo.fixedSize = 2 - case reflect.Uint8: - sszInfo.sszType = UintN - sszInfo.fixedSize = 1 - case reflect.Bool: - sszInfo.sszType = Boolean - sszInfo.fixedSize = 1 - default: - return nil, fmt.Errorf("unsupported basic type %v for SSZ calculation", typ.Kind()) - } - return sszInfo, nil } // analyzeHomogeneousColType analyzes homogeneous collection types (e.g., List, Vector, Bitlist, Bitvector) and returns its SSZ info. -func analyzeHomogeneousColType(typ reflect.Type, tag *reflect.StructTag) (*sszInfo, error) { - if typ.Kind() != reflect.Slice { - return nil, fmt.Errorf("can only analyze slice types, got %v", typ.Kind()) +func analyzeHomogeneousColType(value reflect.Value, tag *reflect.StructTag) (*sszInfo, error) { + if value.Kind() != reflect.Slice { + return nil, fmt.Errorf("can only analyze slice types, got %v", value.Kind()) } // Parse the first dimension from the tag and get remaining tag for element @@ -220,8 +226,12 @@ func analyzeHomogeneousColType(typ reflect.Type, tag *reflect.StructTag) (*sszIn return nil, errors.New("ssz tag is required for slice types") } + // NOTE: Elem() won't panic because value is guaranteed to be a slice here. + elementType := value.Type().Elem() // Analyze element type with remaining dimensions - elementInfo, err := analyzeType(typ.Elem(), remainingTag) + // Note that it is enough to analyze by a zero value, + // as the actual value with variable-sized type will be populated later. + elementInfo, err := analyzeType(reflect.New(elementType), remainingTag) if err != nil { return nil, fmt.Errorf("could not analyze element type for homogeneous collection: %w", err) } @@ -233,7 +243,7 @@ func analyzeHomogeneousColType(typ reflect.Type, tag *reflect.StructTag) (*sszIn return nil, fmt.Errorf("could not get list limit: %w", err) } - return analyzeListType(typ, elementInfo, limit, sszDimension.isBitfield) + return analyzeListType(value, elementInfo, limit, sszDimension.isBitfield) } // 2. Handle Vector/Bitvector type @@ -243,7 +253,7 @@ func analyzeHomogeneousColType(typ reflect.Type, tag *reflect.StructTag) (*sszIn return nil, fmt.Errorf("could not get vector length: %w", err) } - return analyzeVectorType(typ, elementInfo, length, sszDimension.isBitfield) + return analyzeVectorType(value, elementInfo, length, sszDimension.isBitfield) } // Parsing ssz tag doesn't provide enough information to determine the collection type, @@ -252,13 +262,12 @@ func analyzeHomogeneousColType(typ reflect.Type, tag *reflect.StructTag) (*sszIn } // analyzeListType analyzes SSZ List/Bitlist type and returns its SSZ info. -func analyzeListType(typ reflect.Type, elementInfo *sszInfo, limit uint64, isBitfield bool) (*sszInfo, error) { +func analyzeListType(value reflect.Value, elementInfo *sszInfo, limit uint64, isBitfield bool) (*sszInfo, error) { if isBitfield { return &sszInfo{ sszType: Bitlist, - typ: typ, + typ: value.Type(), - fixedSize: offsetBytes, isVariable: true, bitlistInfo: &bitlistInfo{ @@ -273,9 +282,8 @@ func analyzeListType(typ reflect.Type, elementInfo *sszInfo, limit uint64, isBit return &sszInfo{ sszType: List, - typ: typ, + typ: value.Type(), - fixedSize: offsetBytes, isVariable: true, listInfo: &listInfo{ @@ -286,14 +294,12 @@ func analyzeListType(typ reflect.Type, elementInfo *sszInfo, limit uint64, isBit } // analyzeVectorType analyzes SSZ Vector/Bitvector type and returns its SSZ info. -func analyzeVectorType(typ reflect.Type, elementInfo *sszInfo, length uint64, isBitfield bool) (*sszInfo, error) { +func analyzeVectorType(value reflect.Value, elementInfo *sszInfo, length uint64, isBitfield bool) (*sszInfo, error) { if isBitfield { return &sszInfo{ sszType: Bitvector, - typ: typ, + typ: value.Type(), - // Size in bytes - fixedSize: length, isVariable: false, bitvectorInfo: &bitvectorInfo{ @@ -314,9 +320,8 @@ func analyzeVectorType(typ reflect.Type, elementInfo *sszInfo, length uint64, is return &sszInfo{ sszType: Vector, - typ: typ, + typ: value.Type(), - fixedSize: length * elementInfo.Size(), isVariable: false, vectorInfo: &vectorInfo{ @@ -327,44 +332,36 @@ func analyzeVectorType(typ reflect.Type, elementInfo *sszInfo, length uint64, is } // analyzeContainerType analyzes SSZ Container type and returns its SSZ info. -func analyzeContainerType(typ reflect.Type) (*sszInfo, error) { - if typ.Kind() != reflect.Struct { - return nil, fmt.Errorf("can only analyze struct types, got %v", typ.Kind()) +func analyzeContainerType(value reflect.Value) (*sszInfo, error) { + if value.Kind() != reflect.Struct { + return nil, fmt.Errorf("can only analyze struct types, got %v", value.Kind()) } + containerTyp := value.Type() fields := make(map[string]*fieldInfo) - order := make([]string, 0, typ.NumField()) + order := make([]string, 0) - sszInfo := &sszInfo{ - sszType: Container, - typ: typ, - } + isVariable := false var currentOffset uint64 - for i := 0; i < typ.NumField(); i++ { - field := typ.Field(i) + for i := 0; i < value.NumField(); i++ { + structFieldInfo := containerTyp.Field(i) // Protobuf-generated structs contain private fields we must skip. // e.g., state, sizeCache, unknownFields, etc. - if !field.IsExported() { + if !structFieldInfo.IsExported() { continue } - // The JSON tag contains the field name in the first part. - // e.g., "attesting_indices,omitempty" -> "attesting_indices". - jsonTag := field.Tag.Get("json") - if jsonTag == "" { - return nil, fmt.Errorf("field %s has no JSON tag", field.Name) - } - - // NOTE: `fieldName` is a string with `snake_case` format (following consensus specs). - fieldName := strings.Split(jsonTag, ",")[0] - if fieldName == "" { - return nil, fmt.Errorf("field %s has an empty JSON tag", field.Name) + tag := structFieldInfo.Tag + goFieldName := structFieldInfo.Name + fieldName, err := parseFieldNameFromTag(tag) + if err != nil { + return nil, fmt.Errorf("could not parse field name from tag for field %s: %w", goFieldName, err) } // Analyze each field so that we can complete full SSZ information. - info, err := analyzeType(field.Type, &field.Tag) + info, err := analyzeType(value.Field(i), &tag) if err != nil { return nil, fmt.Errorf("could not analyze type for field %s: %w", fieldName, err) } @@ -373,7 +370,7 @@ func analyzeContainerType(typ reflect.Type) (*sszInfo, error) { fields[fieldName] = &fieldInfo{ sszInfo: info, offset: currentOffset, - goFieldName: field.Name, + goFieldName: goFieldName, } // Persist order order = append(order, fieldName) @@ -382,34 +379,87 @@ func analyzeContainerType(typ reflect.Type) (*sszInfo, error) { if info.isVariable { // If one of the fields is variable-sized, // the entire struct is considered variable-sized. - sszInfo.isVariable = true + isVariable = true currentOffset += offsetBytes } else { - currentOffset += info.fixedSize + currentOffset += info.Size() } } - sszInfo.fixedSize = currentOffset - sszInfo.containerInfo = &containerInfo{ - fields: fields, - order: order, - } + return &sszInfo{ + sszType: Container, + typ: containerTyp, + source: castToSSZObject(value), - return sszInfo, nil + isVariable: isVariable, + + containerInfo: &containerInfo{ + fields: fields, + order: order, + fixedOffset: currentOffset, + }, + }, nil } // dereferencePointer dereferences a pointer to get the underlying value using reflection. -func dereferencePointer(obj any) reflect.Value { - value := reflect.ValueOf(obj) - if value.Kind() == reflect.Ptr { +func dereferencePointer(value reflect.Value) reflect.Value { + derefValue := value + + if value.IsValid() && value.Kind() == reflect.Pointer { if value.IsNil() { - // If we encounter a nil pointer before the end of the path, we can still proceed - // by analyzing the type, not the value. - value = reflect.New(value.Type().Elem()).Elem() + // Create a zero value if the pointer is nil. + derefValue = reflect.New(value.Type().Elem()).Elem() } else { - value = value.Elem() + derefValue = value.Elem() } } - return value + return derefValue +} + +// castToSSZObject attempts to cast a reflect.Value to the SSZObject interface. +// If failed, it returns nil. +func castToSSZObject(value reflect.Value) SSZObject { + if !value.IsValid() { + return nil + } + + // SSZObject is only implemented by struct types. + if value.Kind() != reflect.Struct { + return nil + } + + // To cast to SSZObject, we need the addressable value. + if !value.CanAddr() { + return nil + } + + if sszObj, ok := value.Addr().Interface().(SSZObject); ok { + return sszObj + } + + return nil +} + +// parseFieldNameFromTag extracts the field name (`snake_case` format) +// from a struct tag by looking for the json tag. +// The JSON tag contains the field name in the first part. +// e.g., "attesting_indices,omitempty" -> "attesting_indices". +func parseFieldNameFromTag(tag reflect.StructTag) (string, error) { + jsonTag := tag.Get("json") + if jsonTag == "" { + return "", errors.New("no JSON tag found") + } + + substrings := strings.Split(jsonTag, ",") + if len(substrings) == 0 { + return "", errors.New("invalid JSON tag format") + } + + fieldName := strings.TrimSpace(substrings[0]) + if fieldName == "" { + return "", errors.New("empty field name") + } + + return fieldName, nil } diff --git a/encoding/ssz/query/analyzer_test.go b/encoding/ssz/query/analyzer_test.go deleted file mode 100644 index 7c8d4666fa..0000000000 --- a/encoding/ssz/query/analyzer_test.go +++ /dev/null @@ -1,17 +0,0 @@ -package query_test - -import ( - "testing" - - "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" - sszquerypb "github.com/OffchainLabs/prysm/v6/proto/ssz_query" - "github.com/OffchainLabs/prysm/v6/testing/require" -) - -func TestAnalyzeSSZInfo(t *testing.T) { - info, err := query.AnalyzeObject(&sszquerypb.FixedTestContainer{}) - require.NoError(t, err) - - require.NotNil(t, info, "Expected non-nil SSZ info") - require.Equal(t, uint64(565), info.FixedSize()) -} diff --git a/encoding/ssz/query/bitvector.go b/encoding/ssz/query/bitvector.go index 046189d232..7ef42686d7 100644 --- a/encoding/ssz/query/bitvector.go +++ b/encoding/ssz/query/bitvector.go @@ -13,3 +13,12 @@ func (v *bitvectorInfo) Length() uint64 { return v.length } + +func (v *bitvectorInfo) Size() uint64 { + if v == nil { + return 0 + } + + // Size in bytes. + return v.length / 8 +} diff --git a/encoding/ssz/query/container.go b/encoding/ssz/query/container.go index 374bd7c4fb..f0d2fc9088 100644 --- a/encoding/ssz/query/container.go +++ b/encoding/ssz/query/container.go @@ -3,9 +3,11 @@ package query // containerInfo has // 1. fields: a field map that maps a field's JSON name to its sszInfo for nested Containers // 2. order: a list of field names in the order they should be serialized +// 3. fixedOffset: the total size of the fixed part of the container type containerInfo struct { - fields map[string]*fieldInfo - order []string + fields map[string]*fieldInfo + order []string + fixedOffset uint64 } type fieldInfo struct { diff --git a/encoding/ssz/query/list.go b/encoding/ssz/query/list.go index 5b797a422f..d09a5fd821 100644 --- a/encoding/ssz/query/list.go +++ b/encoding/ssz/query/list.go @@ -71,17 +71,3 @@ func (l *listInfo) Size() uint64 { } return totalSize } - -// OffsetBytes returns the total number of offset bytes used for the list elements. -// Each variable-sized element uses 4 bytes to store its offset. -func (l *listInfo) OffsetBytes() uint64 { - if l == nil { - return 0 - } - - if !l.element.isVariable { - return 0 - } - - return offsetBytes * l.length -} diff --git a/encoding/ssz/query/query_test.go b/encoding/ssz/query/query_test.go index 934f28a5f9..0b11a57556 100644 --- a/encoding/ssz/query/query_test.go +++ b/encoding/ssz/query/query_test.go @@ -11,6 +11,34 @@ import ( "github.com/prysmaticlabs/go-bitfield" ) +func TestSize(t *testing.T) { + tests := []struct { + name string + obj query.SSZObject + expectedSize uint64 + }{ + { + name: "FixedTestContainer", + obj: &sszquerypb.FixedTestContainer{}, + expectedSize: 565, + }, + { + name: "VariableTestContainer", + obj: &sszquerypb.VariableTestContainer{}, + expectedSize: 128, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + info, err := query.AnalyzeObject(tt.obj) + require.NoError(t, err) + require.NotNil(t, info) + require.Equal(t, tt.expectedSize, info.Size()) + }) + } +} + func TestCalculateOffsetAndLength(t *testing.T) { type testCase struct { name string @@ -224,6 +252,56 @@ func TestCalculateOffsetAndLength(t *testing.T) { }) } +func TestHashTreeRoot(t *testing.T) { + tests := []struct { + name string + obj query.SSZObject + }{ + { + name: "FixedNestedContainer", + obj: &sszquerypb.FixedNestedContainer{ + Value1: 42, + Value2: []byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08}, + }, + }, + { + name: "FixedTestContainer", + obj: createFixedTestContainer(), + }, + { + name: "VariableNestedContainer", + obj: &sszquerypb.VariableNestedContainer{ + Value1: 84, + FieldListUint64: []uint64{1, 2, 3, 4, 5}, + NestedListField: [][]byte{ + {0x0a, 0x0b, 0x0c}, + {0x1a, 0x1b, 0x1c, 0x1d}, + }, + }, + }, + { + name: "VariableTestContainer", + obj: createVariableTestContainer(), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Analyze the object to get its sszInfo + info, err := query.AnalyzeObject(tt.obj) + require.NoError(t, err) + require.NotNil(t, info, "Expected non-nil SSZ info") + + // Call HashTreeRoot on the sszInfo and compare results + hashTreeRoot, err := info.HashTreeRoot() + require.NoError(t, err, "HashTreeRoot should not return an error") + expectedHashTreeRoot, err := tt.obj.HashTreeRoot() + require.NoError(t, err, "HashTreeRoot on original object should not return an error") + require.Equal(t, expectedHashTreeRoot, hashTreeRoot, "HashTreeRoot from sszInfo should match original object's HashTreeRoot") + }) + } +} + func TestRoundTripSszInfo(t *testing.T) { specs := []testutil.TestSpec{ getFixedTestContainerSpec(), @@ -364,62 +442,6 @@ func getFixedTestContainerSpec() testutil.TestSpec { } } -func TestSSZObject_batch(t *testing.T) { - tests := []struct { - name string - obj any - }{ - { - name: "FixedNestedContainer", - obj: &sszquerypb.FixedNestedContainer{ - Value1: 42, - Value2: []byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08}, - }, - }, - { - name: "FixedTestContainer", - obj: createFixedTestContainer(), - }, - { - name: "VariableNestedContainer", - obj: &sszquerypb.VariableNestedContainer{ - Value1: 84, - FieldListUint64: []uint64{1, 2, 3, 4, 5}, - NestedListField: [][]byte{ - {0x0a, 0x0b, 0x0c}, - {0x1a, 0x1b, 0x1c, 0x1d}, - }, - }, - }, - { - name: "VariableTestContainer", - obj: createVariableTestContainer(), - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Analyze the object to get its sszInfo - object, ok := tt.obj.(query.SSZObject) - require.Equal(t, true, ok, "Expected object to implement SSZObject") - info, err := query.AnalyzeObject(object) - require.NoError(t, err) - require.NotNil(t, info, "Expected non-nil SSZ info") - - // Ensure the original object implements SSZObject - originalFunctions, ok := tt.obj.(query.SSZObject) - require.Equal(t, ok, true, "Original object does not implement SSZObject") - - // Call HashTreeRoot on the sszInfo and compare results - hashTreeRoot, err := info.HashTreeRoot() - require.NoError(t, err, "HashTreeRoot should not return an error") - expectedHashTreeRoot, err := originalFunctions.HashTreeRoot() - require.NoError(t, err, "HashTreeRoot on original object should not return an error") - require.Equal(t, expectedHashTreeRoot, hashTreeRoot, "HashTreeRoot from sszInfo should match original object's HashTreeRoot") - }) - } -} - func createVariableTestContainer() *sszquerypb.VariableTestContainer { leadingField := make([]byte, 32) for i := range leadingField { diff --git a/encoding/ssz/query/ssz_info.go b/encoding/ssz/query/ssz_info.go index 0ba3369196..37fe9507c5 100644 --- a/encoding/ssz/query/ssz_info.go +++ b/encoding/ssz/query/ssz_info.go @@ -18,8 +18,6 @@ type sszInfo struct { // isVariable is true if the struct contains any variable-size fields. isVariable bool - // fixedSize is the total size of the struct's fixed part. - fixedSize uint64 // For Container types. containerInfo *containerInfo @@ -37,46 +35,38 @@ type sszInfo struct { bitvectorInfo *bitvectorInfo } -func (info *sszInfo) FixedSize() uint64 { - if info == nil { - return 0 - } - return info.fixedSize -} - func (info *sszInfo) Size() uint64 { if info == nil { return 0 } - // Easy case: if the type is not variable, we can return the fixed size. - if !info.isVariable { - return info.fixedSize - } - switch info.sszType { + case Uint8: + return 1 + case Uint16: + return 2 + case Uint32: + return 4 + case Uint64: + return 8 + case Boolean: + return 1 + case Container: + // Using existing API if the pointer is available. + if info.source != nil { + return uint64(info.source.SizeSSZ()) + } + + return 0 + case Vector: + return info.vectorInfo.Size() case List: return info.listInfo.Size() - + case Bitvector: + return info.bitvectorInfo.Size() case Bitlist: return info.bitlistInfo.Size() - case Container: - size := info.fixedSize - for _, fieldInfo := range info.containerInfo.fields { - if !fieldInfo.sszInfo.isVariable { - continue - } - - // Include offset bytes inside nested lists. - if fieldInfo.sszInfo.sszType == List { - size += fieldInfo.sszInfo.listInfo.OffsetBytes() - } - - size += fieldInfo.sszInfo.Size() - } - return size - default: return 0 } @@ -193,7 +183,7 @@ func printRecursive(info *sszInfo, builder *strings.Builder, prefix string) { switch info.sszType { case Container: - builder.WriteString(fmt.Sprintf("%s (%s / fixed size: %d, total size: %d)\n", info, sizeDesc, info.FixedSize(), info.Size())) + builder.WriteString(fmt.Sprintf("%s (%s / size: %d)\n", info, sizeDesc, info.Size())) for i, key := range info.containerInfo.order { connector := "├─" diff --git a/encoding/ssz/query/ssz_object.go b/encoding/ssz/query/ssz_object.go index a56b15983d..ae60613696 100644 --- a/encoding/ssz/query/ssz_object.go +++ b/encoding/ssz/query/ssz_object.go @@ -4,6 +4,7 @@ import "errors" type SSZObject interface { HashTreeRoot() ([32]byte, error) + SizeSSZ() int } // HashTreeRoot calls the HashTreeRoot method on the stored interface if it implements SSZObject. diff --git a/encoding/ssz/query/ssz_type.go b/encoding/ssz/query/ssz_type.go index fe6195cc8f..a31e5e1b73 100644 --- a/encoding/ssz/query/ssz_type.go +++ b/encoding/ssz/query/ssz_type.go @@ -9,8 +9,10 @@ type SSZType int // SSZ type constants. const ( // Basic types - UintN SSZType = iota - Byte + Uint8 SSZType = iota + Uint16 + Uint32 + Uint64 Boolean // Composite types @@ -27,10 +29,14 @@ const ( func (t SSZType) String() string { switch t { - case UintN: - return "UintN" - case Byte: - return "Byte" + case Uint8: + return "Uint8" + case Uint16: + return "Uint16" + case Uint32: + return "Uint32" + case Uint64: + return "Uint64" case Boolean: return "Boolean" case Container: diff --git a/encoding/ssz/query/testutil/runner.go b/encoding/ssz/query/testutil/runner.go index 610e2d6be0..066d1d4464 100644 --- a/encoding/ssz/query/testutil/runner.go +++ b/encoding/ssz/query/testutil/runner.go @@ -1,6 +1,7 @@ package testutil import ( + "reflect" "testing" "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" @@ -10,14 +11,11 @@ import ( func RunStructTest(t *testing.T, spec TestSpec) { t.Run(spec.Name, func(t *testing.T) { - object, ok := spec.Type.(query.SSZObject) - require.Equal(t, true, ok, "spec.Type must implement SSZObject interface") - require.NotNil(t, object, "spec.Type must not be nil") - info, err := query.AnalyzeObject(object) + info, err := query.AnalyzeObject(spec.Type) require.NoError(t, err) testInstance := spec.Instance - err = query.PopulateVariableLengthInfo(info, testInstance) + err = query.PopulateVariableLengthInfo(info, reflect.ValueOf(testInstance)) require.NoError(t, err) marshaller, ok := testInstance.(ssz.Marshaler) diff --git a/encoding/ssz/query/vector.go b/encoding/ssz/query/vector.go index c0c0f70d38..8e90856952 100644 --- a/encoding/ssz/query/vector.go +++ b/encoding/ssz/query/vector.go @@ -25,3 +25,15 @@ func (v *vectorInfo) Element() (*sszInfo, error) { return v.element, nil } + +func (v *vectorInfo) Size() uint64 { + if v == nil { + return 0 + } + + if v.element == nil { + return 0 + } + + return v.length * v.element.Size() +} From 10f8d8c26e6372d775e6da8bd35acf4d3d0828db Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Tue, 14 Oct 2025 23:38:12 +0200 Subject: [PATCH 025/103] Fix `/eth/v1/beacon/blob_sidecars/` beacon API if the fulu fork epoch is set to the far future epoch. (#15867) * Fix `/eth/v1/beacon/blob_sidecars/` beacon API is the fulu fork epoch is set to the far future epoch. * Fix Terence's comment. * adding a test --------- Co-authored-by: james-prysm --- beacon-chain/rpc/lookup/blocker.go | 11 ++++-- beacon-chain/rpc/lookup/blocker_test.go | 45 ++++++++++++++++++++++ changelog/manu-blob-sidecars-beacon-api.md | 2 + 3 files changed, 55 insertions(+), 3 deletions(-) create mode 100644 changelog/manu-blob-sidecars-beacon-api.md diff --git a/beacon-chain/rpc/lookup/blocker.go b/beacon-chain/rpc/lookup/blocker.go index d0f4b6d2f9..f618808dce 100644 --- a/beacon-chain/rpc/lookup/blocker.go +++ b/beacon-chain/rpc/lookup/blocker.go @@ -3,6 +3,7 @@ package lookup import ( "context" "fmt" + "math" "strconv" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" @@ -283,9 +284,13 @@ func (p *BeaconDbBlocker) Blobs(ctx context.Context, id string, opts ...options. return make([]*blocks.VerifiedROBlob, 0), nil } - fuluForkSlot, err := slots.EpochStart(params.BeaconConfig().FuluForkEpoch) - if err != nil { - return nil, &core.RpcError{Err: errors.Wrap(err, "could not calculate Fulu start slot"), Reason: core.Internal} + // Compute the first Fulu slot. + fuluForkSlot := primitives.Slot(math.MaxUint64) + if fuluForkEpoch := params.BeaconConfig().FuluForkEpoch; fuluForkEpoch != primitives.Epoch(math.MaxUint64) { + fuluForkSlot, err = slots.EpochStart(fuluForkEpoch) + if err != nil { + return nil, &core.RpcError{Err: errors.Wrap(err, "could not calculate Fulu start slot"), Reason: core.Internal} + } } // Convert versioned hashes to indices if provided diff --git a/beacon-chain/rpc/lookup/blocker_test.go b/beacon-chain/rpc/lookup/blocker_test.go index cc6d5542d7..bf8f967c64 100644 --- a/beacon-chain/rpc/lookup/blocker_test.go +++ b/beacon-chain/rpc/lookup/blocker_test.go @@ -587,6 +587,51 @@ func TestGetBlob(t *testing.T) { require.Equal(t, http.StatusBadRequest, core.ErrorReasonToHTTP(rpcErr.Reason)) require.StringContains(t, "not supported before", rpcErr.Err.Error()) }) + + t.Run("fulu fork epoch not set (MaxUint64)", func(t *testing.T) { + // Setup with Deneb fork enabled but Fulu fork epoch set to MaxUint64 (not set/far future) + params.SetupTestConfigCleanup(t) + cfg := params.BeaconConfig().Copy() + cfg.DenebForkEpoch = 1 + cfg.FuluForkEpoch = primitives.Epoch(math.MaxUint64) // Not set / far future + params.OverrideBeaconConfig(cfg) + + // Create and save Deneb block and blob sidecars + denebSlot := util.SlotAtEpoch(t, cfg.DenebForkEpoch) + _, tempBlobStorage := filesystem.NewEphemeralBlobStorageAndFs(t) + + denebBlockWithBlobs, denebBlobSidecars := util.GenerateTestDenebBlockWithSidecar(t, [fieldparams.RootLength]byte{}, denebSlot, 2, util.WithDenebSlot(denebSlot)) + denebBlockRoot := denebBlockWithBlobs.Root() + + verifiedDenebBlobs := verification.FakeVerifySliceForTest(t, denebBlobSidecars) + for i := range verifiedDenebBlobs { + err := tempBlobStorage.Save(verifiedDenebBlobs[i]) + require.NoError(t, err) + } + + err := db.SaveBlock(t.Context(), denebBlockWithBlobs) + require.NoError(t, err) + + blocker := &BeaconDbBlocker{ + GenesisTimeFetcher: &testutil.MockGenesisTimeFetcher{ + Genesis: time.Now(), + }, + BeaconDB: db, + BlobStorage: tempBlobStorage, + } + + // Should successfully retrieve blobs even when FuluForkEpoch is not set + retrievedBlobs, rpcErr := blocker.Blobs(ctx, hexutil.Encode(denebBlockRoot[:])) + require.IsNil(t, rpcErr) + require.Equal(t, 2, len(retrievedBlobs)) + + // Verify blob content matches + for i, retrievedBlob := range retrievedBlobs { + require.NotNil(t, retrievedBlob.BlobSidecar) + require.DeepEqual(t, denebBlobSidecars[i].Blob, retrievedBlob.Blob) + require.DeepEqual(t, denebBlobSidecars[i].KzgCommitment, retrievedBlob.KzgCommitment) + } + }) } func TestBlobs_CommitmentOrdering(t *testing.T) { diff --git a/changelog/manu-blob-sidecars-beacon-api.md b/changelog/manu-blob-sidecars-beacon-api.md new file mode 100644 index 0000000000..c5360e80e9 --- /dev/null +++ b/changelog/manu-blob-sidecars-beacon-api.md @@ -0,0 +1,2 @@ +### Fixed +- Fix `/eth/v1/beacon/blob_sidecars/` beacon API is the fulu fork epoch is set to the far future epoch. From 55b9448d416c04d8b52637a4d06f7236ba2f1731 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Wed, 15 Oct 2025 14:16:05 +0200 Subject: [PATCH 026/103] `dataColumnSidecarsByRangeRPCHandler`: Gracefully close the stream if no data to return. (#15866) * `TestDataColumnSidecarsByRangeRPCHandler`: Remove commented code. * Remove double import * `dataColumnSidecarsByRangeRPCHandler`: Gracefully close the stream if no data to return. * Tests: Change `require` to `assert` in goroutines in tests. https://pkg.go.dev/github.com/stretchr/testify/require#hdr-Assertions * Add changelog. --- .../sync/rpc_data_column_sidecars_by_range.go | 1 + .../rpc_data_column_sidecars_by_range_test.go | 91 ++++++++++++++----- changelog/manu-gracefully-close-stream.md | 2 + 3 files changed, 69 insertions(+), 25 deletions(-) create mode 100644 changelog/manu-gracefully-close-stream.md diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_range.go b/beacon-chain/sync/rpc_data_column_sidecars_by_range.go index e64ef8ef64..a2be4b4587 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_range.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_range.go @@ -70,6 +70,7 @@ func (s *Service) dataColumnSidecarsByRangeRPCHandler(ctx context.Context, msg i log.Trace("Serving data column sidecars by range") if rangeParameters == nil { + closeStream(stream, log) return nil } diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_range_test.go b/beacon-chain/sync/rpc_data_column_sidecars_by_range_test.go index b0620918cc..07d0f2a66a 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_range_test.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_range_test.go @@ -23,16 +23,15 @@ import ( fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/util" ) func TestDataColumnSidecarsByRangeRPCHandler(t *testing.T) { params.SetupTestConfigCleanup(t) beaconConfig := params.BeaconConfig() - //beaconConfig.FuluForkEpoch = beaconConfig.ElectraForkEpoch + 100 beaconConfig.FuluForkEpoch = 0 params.OverrideBeaconConfig(beaconConfig) params.BeaconConfig().InitializeForkSchedule() @@ -47,6 +46,7 @@ func TestDataColumnSidecarsByRangeRPCHandler(t *testing.T) { ctxMap, err := ContextByteVersionsForValRoot(params.BeaconConfig().GenesisValidatorsRoot) require.NoError(t, err) + t.Run("invalid request", func(t *testing.T) { slot := primitives.Slot(400) mockNower.SetSlot(t, clock, slot) @@ -72,8 +72,8 @@ func TestDataColumnSidecarsByRangeRPCHandler(t *testing.T) { remoteP2P.BHost.SetStreamHandler(protocolID, func(stream network.Stream) { defer wg.Done() code, _, err := readStatusCodeNoDeadline(stream, localP2P.Encoding()) - require.NoError(t, err) - require.Equal(t, responseCodeInvalidRequest, code) + assert.NoError(t, err) + assert.Equal(t, responseCodeInvalidRequest, code) }) localP2P.Connect(remoteP2P) @@ -94,6 +94,48 @@ func TestDataColumnSidecarsByRangeRPCHandler(t *testing.T) { } }) + t.Run("in the future", func(t *testing.T) { + slot := primitives.Slot(400) + mockNower.SetSlot(t, clock, slot) + + localP2P, remoteP2P := p2ptest.NewTestP2P(t), p2ptest.NewTestP2P(t) + protocolID := protocol.ID(fmt.Sprintf("%s/ssz_snappy", p2p.RPCDataColumnSidecarsByRangeTopicV1)) + + service := &Service{ + cfg: &config{ + p2p: localP2P, + chain: &chainMock.ChainService{ + Slot: &slot, + }, + clock: clock, + }, + rateLimiter: newRateLimiter(localP2P), + } + + var wg sync.WaitGroup + wg.Add(1) + + remoteP2P.BHost.SetStreamHandler(protocolID, func(stream network.Stream) { + defer wg.Done() + + _, err := readChunkedDataColumnSidecar(stream, remoteP2P, ctxMap) + assert.Equal(t, true, errors.Is(err, io.EOF)) + }) + + localP2P.Connect(remoteP2P) + stream, err := localP2P.BHost.NewStream(ctx, remoteP2P.BHost.ID(), protocolID) + require.NoError(t, err) + + msg := &pb.DataColumnSidecarsByRangeRequest{ + StartSlot: slot + 1, + Count: 50, + Columns: []uint64{1, 2, 3, 4, 6, 7, 8, 9, 10}, + } + + err = service.dataColumnSidecarsByRangeRPCHandler(ctx, msg, stream) + require.NoError(t, err) + }) + t.Run("nominal", func(t *testing.T) { slot := primitives.Slot(400) @@ -133,12 +175,12 @@ func TestDataColumnSidecarsByRangeRPCHandler(t *testing.T) { signedBeaconBlockPb.Block.ParentRoot = roots[i-1][:] } - signedBeaconBlock, err := consensusblocks.NewSignedBeaconBlock(signedBeaconBlockPb) + signedBeaconBlock, err := blocks.NewSignedBeaconBlock(signedBeaconBlockPb) require.NoError(t, err) // There is a discrepancy between the root of the beacon block and the rodata column root, // but for the sake of this test, we actually don't care. - roblock, err := consensusblocks.NewROBlockWithRoot(signedBeaconBlock, roots[i]) + roblock, err := blocks.NewROBlockWithRoot(signedBeaconBlock, roots[i]) require.NoError(t, err) roBlocks = append(roBlocks, roblock) @@ -178,28 +220,28 @@ func TestDataColumnSidecarsByRangeRPCHandler(t *testing.T) { break } - require.NoError(t, err) + assert.NoError(t, err) sidecars = append(sidecars, sidecar) } - require.Equal(t, 8, len(sidecars)) - require.Equal(t, root0, sidecars[0].BlockRoot()) - require.Equal(t, root0, sidecars[1].BlockRoot()) - require.Equal(t, root0, sidecars[2].BlockRoot()) - require.Equal(t, root3, sidecars[3].BlockRoot()) - require.Equal(t, root3, sidecars[4].BlockRoot()) - require.Equal(t, root5, sidecars[5].BlockRoot()) - require.Equal(t, root5, sidecars[6].BlockRoot()) - require.Equal(t, root5, sidecars[7].BlockRoot()) + assert.Equal(t, 8, len(sidecars)) + assert.Equal(t, root0, sidecars[0].BlockRoot()) + assert.Equal(t, root0, sidecars[1].BlockRoot()) + assert.Equal(t, root0, sidecars[2].BlockRoot()) + assert.Equal(t, root3, sidecars[3].BlockRoot()) + assert.Equal(t, root3, sidecars[4].BlockRoot()) + assert.Equal(t, root5, sidecars[5].BlockRoot()) + assert.Equal(t, root5, sidecars[6].BlockRoot()) + assert.Equal(t, root5, sidecars[7].BlockRoot()) - require.Equal(t, uint64(1), sidecars[0].Index) - require.Equal(t, uint64(2), sidecars[1].Index) - require.Equal(t, uint64(3), sidecars[2].Index) - require.Equal(t, uint64(4), sidecars[3].Index) - require.Equal(t, uint64(6), sidecars[4].Index) - require.Equal(t, uint64(7), sidecars[5].Index) - require.Equal(t, uint64(8), sidecars[6].Index) - require.Equal(t, uint64(9), sidecars[7].Index) + assert.Equal(t, uint64(1), sidecars[0].Index) + assert.Equal(t, uint64(2), sidecars[1].Index) + assert.Equal(t, uint64(3), sidecars[2].Index) + assert.Equal(t, uint64(4), sidecars[3].Index) + assert.Equal(t, uint64(6), sidecars[4].Index) + assert.Equal(t, uint64(7), sidecars[5].Index) + assert.Equal(t, uint64(8), sidecars[6].Index) + assert.Equal(t, uint64(9), sidecars[7].Index) }) localP2P.Connect(remoteP2P) @@ -215,7 +257,6 @@ func TestDataColumnSidecarsByRangeRPCHandler(t *testing.T) { err = service.dataColumnSidecarsByRangeRPCHandler(ctx, msg, stream) require.NoError(t, err) }) - } func TestValidateDataColumnsByRange(t *testing.T) { diff --git a/changelog/manu-gracefully-close-stream.md b/changelog/manu-gracefully-close-stream.md new file mode 100644 index 0000000000..236706058e --- /dev/null +++ b/changelog/manu-gracefully-close-stream.md @@ -0,0 +1,2 @@ +### Fixed +- `dataColumnSidecarsByRangeRPCHandler`: Gracefully close the stream if no data to return. From c811fadf338fd672b019292c5e60fff8e2c75bca Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Wed, 15 Oct 2025 14:18:04 +0200 Subject: [PATCH 027/103] `VerifyDataColumnSidecar`: Check if there is no too many commitments. (#15859) * `VerifyDataColumnSidecar`: Check if there is no too many commitments. * `TestVerifyDataColumnSidecar`: Refactor using test cases. * Add changelog. --- beacon-chain/core/peerdas/p2p_interface.go | 7 ++ .../core/peerdas/p2p_interface_test.go | 64 +++++++++++-------- .../sync/data_column_sidecars_test.go | 7 ++ .../sync/initial-sync/service_test.go | 1 + beacon-chain/verification/data_column_test.go | 9 ++- changelog/manu-check-commitment-count.md | 2 + 6 files changed, 61 insertions(+), 29 deletions(-) create mode 100644 changelog/manu-check-commitment-count.md diff --git a/beacon-chain/core/peerdas/p2p_interface.go b/beacon-chain/core/peerdas/p2p_interface.go index 8295d3d8c9..46b49fdd9f 100644 --- a/beacon-chain/core/peerdas/p2p_interface.go +++ b/beacon-chain/core/peerdas/p2p_interface.go @@ -43,6 +43,13 @@ func VerifyDataColumnSidecar(sidecar blocks.RODataColumn) error { return ErrNoKzgCommitments } + // A sidecar with more commitments than the max blob count for this block is invalid. + slot := sidecar.Slot() + maxBlobsPerBlock := params.BeaconConfig().MaxBlobsPerBlock(slot) + if len(sidecar.KzgCommitments) > maxBlobsPerBlock { + return ErrTooManyCommitments + } + // The column length must be equal to the number of commitments/proofs. if len(sidecar.Column) != len(sidecar.KzgCommitments) || len(sidecar.Column) != len(sidecar.KzgProofs) { return ErrMismatchLength diff --git a/beacon-chain/core/peerdas/p2p_interface_test.go b/beacon-chain/core/peerdas/p2p_interface_test.go index f02a5becb4..3b4c20e35c 100644 --- a/beacon-chain/core/peerdas/p2p_interface_test.go +++ b/beacon-chain/core/peerdas/p2p_interface_test.go @@ -18,38 +18,46 @@ import ( ) func TestVerifyDataColumnSidecar(t *testing.T) { - t.Run("index too large", func(t *testing.T) { - roSidecar := createTestSidecar(t, 1_000_000, nil, nil, nil) - err := peerdas.VerifyDataColumnSidecar(roSidecar) - require.ErrorIs(t, err, peerdas.ErrIndexTooLarge) - }) + testCases := []struct { + name string + index uint64 + blobCount int + commitmentCount int + proofCount int + maxBlobsPerBlock uint64 + expectedError error + }{ + {name: "index too large", index: 1_000_000, expectedError: peerdas.ErrIndexTooLarge}, + {name: "no commitments", expectedError: peerdas.ErrNoKzgCommitments}, + {name: "too many commitments", blobCount: 10, commitmentCount: 10, proofCount: 10, maxBlobsPerBlock: 2, expectedError: peerdas.ErrTooManyCommitments}, + {name: "commitments size mismatch", commitmentCount: 1, maxBlobsPerBlock: 1, expectedError: peerdas.ErrMismatchLength}, + {name: "proofs size mismatch", blobCount: 1, commitmentCount: 1, maxBlobsPerBlock: 1, expectedError: peerdas.ErrMismatchLength}, + {name: "nominal", blobCount: 1, commitmentCount: 1, proofCount: 1, maxBlobsPerBlock: 1, expectedError: nil}, + } - t.Run("no commitments", func(t *testing.T) { - roSidecar := createTestSidecar(t, 0, nil, nil, nil) - err := peerdas.VerifyDataColumnSidecar(roSidecar) - require.ErrorIs(t, err, peerdas.ErrNoKzgCommitments) - }) + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + params.SetupTestConfigCleanup(t) + cfg := params.BeaconConfig() + cfg.FuluForkEpoch = 0 + cfg.BlobSchedule = []params.BlobScheduleEntry{{Epoch: 0, MaxBlobsPerBlock: tc.maxBlobsPerBlock}} + params.OverrideBeaconConfig(cfg) - t.Run("KZG commitments size mismatch", func(t *testing.T) { - kzgCommitments := make([][]byte, 1) - roSidecar := createTestSidecar(t, 0, nil, kzgCommitments, nil) - err := peerdas.VerifyDataColumnSidecar(roSidecar) - require.ErrorIs(t, err, peerdas.ErrMismatchLength) - }) + column := make([][]byte, tc.blobCount) + kzgCommitments := make([][]byte, tc.commitmentCount) + kzgProof := make([][]byte, tc.proofCount) - t.Run("KZG proofs size mismatch", func(t *testing.T) { - column, kzgCommitments := make([][]byte, 1), make([][]byte, 1) - roSidecar := createTestSidecar(t, 0, column, kzgCommitments, nil) - err := peerdas.VerifyDataColumnSidecar(roSidecar) - require.ErrorIs(t, err, peerdas.ErrMismatchLength) - }) + roSidecar := createTestSidecar(t, tc.index, column, kzgCommitments, kzgProof) + err := peerdas.VerifyDataColumnSidecar(roSidecar) - t.Run("nominal", func(t *testing.T) { - column, kzgCommitments, kzgProofs := make([][]byte, 1), make([][]byte, 1), make([][]byte, 1) - roSidecar := createTestSidecar(t, 0, column, kzgCommitments, kzgProofs) - err := peerdas.VerifyDataColumnSidecar(roSidecar) - require.NoError(t, err) - }) + if tc.expectedError != nil { + require.ErrorIs(t, err, tc.expectedError) + return + } + + require.NoError(t, err) + }) + } } func TestVerifyDataColumnSidecarKZGProofs(t *testing.T) { diff --git a/beacon-chain/sync/data_column_sidecars_test.go b/beacon-chain/sync/data_column_sidecars_test.go index c7e886d348..4c533ccc31 100644 --- a/beacon-chain/sync/data_column_sidecars_test.go +++ b/beacon-chain/sync/data_column_sidecars_test.go @@ -45,6 +45,7 @@ func TestFetchDataColumnSidecars(t *testing.T) { params.SetupTestConfigCleanup(t) cfg := params.BeaconConfig().Copy() cfg.FuluForkEpoch = 0 + cfg.BlobSchedule = []params.BlobScheduleEntry{{Epoch: 0, MaxBlobsPerBlock: 10}} params.OverrideBeaconConfig(cfg) // Start the trusted setup. @@ -760,6 +761,12 @@ func TestVerifyDataColumnSidecarsByPeer(t *testing.T) { err := kzg.Start() require.NoError(t, err) + params.SetupTestConfigCleanup(t) + cfg := params.BeaconConfig() + cfg.FuluForkEpoch = 0 + cfg.BlobSchedule = []params.BlobScheduleEntry{{Epoch: 0, MaxBlobsPerBlock: 2}} + params.OverrideBeaconConfig(cfg) + t.Run("nominal", func(t *testing.T) { const ( start, stop = 0, 15 diff --git a/beacon-chain/sync/initial-sync/service_test.go b/beacon-chain/sync/initial-sync/service_test.go index fc3c06baf1..e2b4bccbb0 100644 --- a/beacon-chain/sync/initial-sync/service_test.go +++ b/beacon-chain/sync/initial-sync/service_test.go @@ -683,6 +683,7 @@ func TestFetchOriginColumns(t *testing.T) { params.SetupTestConfigCleanup(t) cfg := params.BeaconConfig().Copy() cfg.FuluForkEpoch = 0 + cfg.BlobSchedule = []params.BlobScheduleEntry{{Epoch: 0, MaxBlobsPerBlock: 10}} params.OverrideBeaconConfig(cfg) const ( diff --git a/beacon-chain/verification/data_column_test.go b/beacon-chain/verification/data_column_test.go index 8646d195ea..e4a92d41bf 100644 --- a/beacon-chain/verification/data_column_test.go +++ b/beacon-chain/verification/data_column_test.go @@ -58,7 +58,6 @@ func TestValid(t *testing.T) { t.Run("one invalid column", func(t *testing.T) { columns := GenerateTestDataColumns(t, [fieldparams.RootLength]byte{}, 1, 1) - columns[0].KzgCommitments = [][]byte{} verifier := initializer.NewDataColumnsVerifier(columns, GossipDataColumnSidecarRequirements) err := verifier.ValidFields() @@ -67,6 +66,14 @@ func TestValid(t *testing.T) { }) t.Run("nominal", func(t *testing.T) { + const maxBlobsPerBlock = 2 + + params.SetupTestConfigCleanup(t) + cfg := params.BeaconConfig() + cfg.FuluForkEpoch = 0 + cfg.BlobSchedule = []params.BlobScheduleEntry{{Epoch: 0, MaxBlobsPerBlock: maxBlobsPerBlock}} + params.OverrideBeaconConfig(cfg) + columns := GenerateTestDataColumns(t, [fieldparams.RootLength]byte{}, 1, 1) verifier := initializer.NewDataColumnsVerifier(columns, GossipDataColumnSidecarRequirements) diff --git a/changelog/manu-check-commitment-count.md b/changelog/manu-check-commitment-count.md new file mode 100644 index 0000000000..404d96eb8b --- /dev/null +++ b/changelog/manu-check-commitment-count.md @@ -0,0 +1,2 @@ +### Fixed +- `VerifyDataColumnSidecar`: Check if there is no too many commitments. From 9742333f680e392edfd3928e4f334e3a80a3aa0c Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Wed, 15 Oct 2025 16:44:49 +0200 Subject: [PATCH 028/103] `WithDataColumnRetentionEpochs`: Use `dataColumnRetentionEpoch` instead of `blobColumnRetentionEpoch`. (#15872) --- beacon-chain/db/filesystem/data_column.go | 2 +- beacon-chain/db/filesystem/mock.go | 2 +- .../manu-data-column-retention-period.md | 2 + cmd/beacon-chain/storage/options.go | 27 ++++++++++++- cmd/beacon-chain/storage/options_test.go | 39 +++++++++++++++++++ 5 files changed, 69 insertions(+), 3 deletions(-) create mode 100644 changelog/manu-data-column-retention-period.md diff --git a/beacon-chain/db/filesystem/data_column.go b/beacon-chain/db/filesystem/data_column.go index 73d1edd65b..49331b4a01 100644 --- a/beacon-chain/db/filesystem/data_column.go +++ b/beacon-chain/db/filesystem/data_column.go @@ -1032,5 +1032,5 @@ func extractFileMetadata(path string) (*fileMetadata, error) { // period computes the period of a given epoch. func period(epoch primitives.Epoch) uint64 { - return uint64(epoch / params.BeaconConfig().MinEpochsForBlobsSidecarsRequest) + return uint64(epoch / params.BeaconConfig().MinEpochsForDataColumnSidecarsRequest) } diff --git a/beacon-chain/db/filesystem/mock.go b/beacon-chain/db/filesystem/mock.go index a7c2594fba..1a5eb2f40c 100644 --- a/beacon-chain/db/filesystem/mock.go +++ b/beacon-chain/db/filesystem/mock.go @@ -126,7 +126,7 @@ func NewWarmedEphemeralDataColumnStorageUsingFs(t testing.TB, fs afero.Fs, opts func NewEphemeralDataColumnStorageUsingFs(t testing.TB, fs afero.Fs, opts ...DataColumnStorageOption) *DataColumnStorage { opts = append(opts, - WithDataColumnRetentionEpochs(params.BeaconConfig().MinEpochsForBlobsSidecarsRequest), + WithDataColumnRetentionEpochs(params.BeaconConfig().MinEpochsForDataColumnSidecarsRequest), WithDataColumnFs(fs), ) diff --git a/changelog/manu-data-column-retention-period.md b/changelog/manu-data-column-retention-period.md new file mode 100644 index 0000000000..cec4f0b2c8 --- /dev/null +++ b/changelog/manu-data-column-retention-period.md @@ -0,0 +1,2 @@ +### Fixed +- `WithDataColumnRetentionEpochs`: Use `dataColumnRetentionEpoch` instead of `blobColumnRetentionEpoch`. \ No newline at end of file diff --git a/cmd/beacon-chain/storage/options.go b/cmd/beacon-chain/storage/options.go index bb2b96473a..598ef99a86 100644 --- a/cmd/beacon-chain/storage/options.go +++ b/cmd/beacon-chain/storage/options.go @@ -68,8 +68,13 @@ func BeaconNodeOptions(c *cli.Context) ([]node.Option, error) { filesystem.WithLayout(c.String(BlobStorageLayout.Name)), // This is validated in the Action func for BlobStorageLayout. ) + dataColumnRetentionEpoch, err := dataColumnRetentionEpoch(c) + if err != nil { + return nil, errors.Wrap(err, "data column retention epoch") + } + dataColumnStorageOption := node.WithDataColumnStorageOptions( - filesystem.WithDataColumnRetentionEpochs(blobRetentionEpoch), + filesystem.WithDataColumnRetentionEpochs(dataColumnRetentionEpoch), filesystem.WithDataColumnBasePath(dataColumnStoragePath(c)), ) @@ -116,6 +121,26 @@ func blobRetentionEpoch(cliCtx *cli.Context) (primitives.Epoch, error) { return re, nil } +// dataColumnRetentionEpoch returns the spec default MIN_EPOCHS_FOR_DATA_COLUMN_SIDECARS_REQUEST +// or a user-specified flag overriding this value. If a user-specified override is +// smaller than the spec default, an error will be returned. +func dataColumnRetentionEpoch(cliCtx *cli.Context) (primitives.Epoch, error) { + defaultValue := params.BeaconConfig().MinEpochsForDataColumnSidecarsRequest + if !cliCtx.IsSet(BlobRetentionEpochFlag.Name) { + return defaultValue, nil + } + + // We use on purpose the same retention flag for both blobs and data columns. + customValue := primitives.Epoch(cliCtx.Uint64(BlobRetentionEpochFlag.Name)) + + // Validate the epoch value against the spec default. + if customValue < defaultValue { + return defaultValue, errors.Wrapf(errInvalidBlobRetentionEpochs, "%s=%d, spec=%d", BlobRetentionEpochFlag.Name, customValue, defaultValue) + } + + return customValue, nil +} + func init() { BlobStorageLayout.Action = validateLayoutFlag } diff --git a/cmd/beacon-chain/storage/options_test.go b/cmd/beacon-chain/storage/options_test.go index b4bdcd79a1..7136a8911a 100644 --- a/cmd/beacon-chain/storage/options_test.go +++ b/cmd/beacon-chain/storage/options_test.go @@ -61,6 +61,45 @@ func TestConfigureBlobRetentionEpoch(t *testing.T) { _, err = blobRetentionEpoch(cliCtx) require.ErrorIs(t, err, errInvalidBlobRetentionEpochs) } + +func TestConfigureDataColumnRetentionEpoch(t *testing.T) { + specValue := params.BeaconConfig().MinEpochsForDataColumnSidecarsRequest + + app := cli.App{} + set := flag.NewFlagSet("test", 0) + cliCtx := cli.NewContext(&app, set, nil) + + // Test case: Specification value + expected := specValue + + actual, err := dataColumnRetentionEpoch(cliCtx) + require.NoError(t, err) + require.Equal(t, expected, actual) + + // Manually define the flag in the set, so the following code can use set.Set + set.Uint64(BlobRetentionEpochFlag.Name, 0, "") + + // Test case: Input epoch is greater than or equal to specification value. + expected = specValue + 1 + + err = set.Set(BlobRetentionEpochFlag.Name, fmt.Sprintf("%d", expected)) + require.NoError(t, err) + + actual, err = dataColumnRetentionEpoch(cliCtx) + require.NoError(t, err) + require.Equal(t, primitives.Epoch(expected), actual) + + // Test case: Input epoch is less than specification value. + expected = specValue - 1 + + err = set.Set(BlobRetentionEpochFlag.Name, fmt.Sprintf("%d", expected)) + require.NoError(t, err) + + actual, err = dataColumnRetentionEpoch(cliCtx) + require.ErrorIs(t, err, errInvalidBlobRetentionEpochs) + require.Equal(t, specValue, actual) +} + func TestDataColumnStoragePath_FlagSpecified(t *testing.T) { app := cli.App{} set := flag.NewFlagSet("test", 0) From f67ca6ae5ebbd4e3e500734407494e359c62c17e Mon Sep 17 00:00:00 2001 From: Potuz Date: Wed, 15 Oct 2025 12:13:49 -0300 Subject: [PATCH 029/103] Fix epoch transition on head event (#15871) h/t to the NuConstruct team for reporting this. The event feed incorrectly sends epoch transition flag on head events when the first slot of the epoch is missing (or reorgs across epoch transition). Co-authored-by: james-prysm <90280386+james-prysm@users.noreply.github.com> --- beacon-chain/blockchain/head.go | 13 +++++++++- beacon-chain/blockchain/head_test.go | 37 ++++++++++++++++++++++++++++ changelog/potuz_fix_head_event.md | 3 +++ 3 files changed, 52 insertions(+), 1 deletion(-) create mode 100644 changelog/potuz_fix_head_event.md diff --git a/beacon-chain/blockchain/head.go b/beacon-chain/blockchain/head.go index c0a843e647..d4a73dcd96 100644 --- a/beacon-chain/blockchain/head.go +++ b/beacon-chain/blockchain/head.go @@ -346,13 +346,24 @@ func (s *Service) notifyNewHeadEvent( if err != nil { return errors.Wrap(err, "could not check if node is optimistically synced") } + + parentRoot, err := s.ParentRoot([32]byte(newHeadRoot)) + if err != nil { + return errors.Wrap(err, "could not obtain parent root in forkchoice") + } + parentSlot, err := s.RecentBlockSlot(parentRoot) + if err != nil { + return errors.Wrap(err, "could not obtain parent slot in forkchoice") + } + epochTransition := slots.ToEpoch(newHeadSlot) > slots.ToEpoch(parentSlot) + s.cfg.StateNotifier.StateFeed().Send(&feed.Event{ Type: statefeed.NewHead, Data: ðpbv1.EventHead{ Slot: newHeadSlot, Block: newHeadRoot, State: newHeadStateRoot, - EpochTransition: slots.IsEpochStart(newHeadSlot), + EpochTransition: epochTransition, PreviousDutyDependentRoot: previousDutyDependentRoot[:], CurrentDutyDependentRoot: currentDutyDependentRoot[:], ExecutionOptimistic: isOptimistic, diff --git a/beacon-chain/blockchain/head_test.go b/beacon-chain/blockchain/head_test.go index 86753e0259..a7221ec1e6 100644 --- a/beacon-chain/blockchain/head_test.go +++ b/beacon-chain/blockchain/head_test.go @@ -162,6 +162,9 @@ func Test_notifyNewHeadEvent(t *testing.T) { require.NoError(t, srv.cfg.ForkChoiceStore.InsertNode(t.Context(), st, blk)) newHeadStateRoot := [32]byte{2} newHeadRoot := [32]byte{3} + st, blk, err = prepareForkchoiceState(t.Context(), 1, newHeadRoot, [32]byte{}, [32]byte{}, ðpb.Checkpoint{}, ðpb.Checkpoint{}) + require.NoError(t, err) + require.NoError(t, srv.cfg.ForkChoiceStore.InsertNode(t.Context(), st, blk)) require.NoError(t, srv.notifyNewHeadEvent(t.Context(), 1, bState, newHeadStateRoot[:], newHeadRoot[:])) events := notifier.ReceivedEvents() require.Equal(t, 1, len(events)) @@ -196,6 +199,9 @@ func Test_notifyNewHeadEvent(t *testing.T) { newHeadStateRoot := [32]byte{2} newHeadRoot := [32]byte{3} + st, blk, err = prepareForkchoiceState(t.Context(), 0, newHeadRoot, [32]byte{}, [32]byte{}, ðpb.Checkpoint{}, ðpb.Checkpoint{}) + require.NoError(t, err) + require.NoError(t, srv.cfg.ForkChoiceStore.InsertNode(t.Context(), st, blk)) err = srv.notifyNewHeadEvent(t.Context(), epoch2Start, bState, newHeadStateRoot[:], newHeadRoot[:]) require.NoError(t, err) events := notifier.ReceivedEvents() @@ -213,6 +219,37 @@ func Test_notifyNewHeadEvent(t *testing.T) { } require.DeepSSZEqual(t, wanted, eventHead) }) + t.Run("epoch transition", func(t *testing.T) { + bState, _ := util.DeterministicGenesisState(t, 10) + srv := testServiceWithDB(t) + srv.SetGenesisTime(time.Now()) + notifier := srv.cfg.StateNotifier.(*mock.MockStateNotifier) + srv.originBlockRoot = [32]byte{1} + st, blk, err := prepareForkchoiceState(t.Context(), 0, [32]byte{}, [32]byte{}, [32]byte{}, ðpb.Checkpoint{}, ðpb.Checkpoint{}) + require.NoError(t, err) + require.NoError(t, srv.cfg.ForkChoiceStore.InsertNode(t.Context(), st, blk)) + newHeadStateRoot := [32]byte{2} + newHeadRoot := [32]byte{3} + st, blk, err = prepareForkchoiceState(t.Context(), 32, newHeadRoot, [32]byte{}, [32]byte{}, ðpb.Checkpoint{}, ðpb.Checkpoint{}) + require.NoError(t, err) + require.NoError(t, srv.cfg.ForkChoiceStore.InsertNode(t.Context(), st, blk)) + newHeadSlot := params.BeaconConfig().SlotsPerEpoch + require.NoError(t, srv.notifyNewHeadEvent(t.Context(), newHeadSlot, bState, newHeadStateRoot[:], newHeadRoot[:])) + events := notifier.ReceivedEvents() + require.Equal(t, 1, len(events)) + + eventHead, ok := events[0].Data.(*ethpbv1.EventHead) + require.Equal(t, true, ok) + wanted := ðpbv1.EventHead{ + Slot: newHeadSlot, + Block: newHeadRoot[:], + State: newHeadStateRoot[:], + EpochTransition: true, + PreviousDutyDependentRoot: params.BeaconConfig().ZeroHash[:], + CurrentDutyDependentRoot: srv.originBlockRoot[:], + } + require.DeepSSZEqual(t, wanted, eventHead) + }) } func TestRetrieveHead_ReadOnly(t *testing.T) { diff --git a/changelog/potuz_fix_head_event.md b/changelog/potuz_fix_head_event.md new file mode 100644 index 0000000000..7d68a89d7f --- /dev/null +++ b/changelog/potuz_fix_head_event.md @@ -0,0 +1,3 @@ +### Fixed + +- Mark epoch transition correctly on new head events From 5ced1125f259a530eaae18f64dc5d13474ff87df Mon Sep 17 00:00:00 2001 From: phrwlk Date: Wed, 15 Oct 2025 19:02:08 +0300 Subject: [PATCH 030/103] fix: reject out-of-range attestation committee index (#15855) * reject committee index >= committees_per_slot in unaggregated attestation validation * Create phrwlk_fix-attestation-committee-index-bound.md * add a unit test * fix test * fixing test --------- Co-authored-by: james-prysm <90280386+james-prysm@users.noreply.github.com> Co-authored-by: james-prysm --- .../sync/validate_aggregate_proof_test.go | 6 ++- .../sync/validate_beacon_attestation.go | 4 +- .../sync/validate_beacon_attestation_test.go | 38 +++++++++++++++++++ ...k_fix-attestation-committee-index-bound.md | 3 ++ 4 files changed, 47 insertions(+), 4 deletions(-) create mode 100644 changelog/phrwlk_fix-attestation-committee-index-bound.md diff --git a/beacon-chain/sync/validate_aggregate_proof_test.go b/beacon-chain/sync/validate_aggregate_proof_test.go index bf1f5d23bf..035f1d81e2 100644 --- a/beacon-chain/sync/validate_aggregate_proof_test.go +++ b/beacon-chain/sync/validate_aggregate_proof_test.go @@ -94,9 +94,11 @@ func TestVerifyIndexInCommittee_ExistsInBeaconCommittee(t *testing.T) { assert.ErrorContains(t, wanted, err) assert.Equal(t, pubsub.ValidationReject, result) - att.Data.CommitteeIndex = 10000 + // Test the edge case where committee index equals count (should be rejected) + // With 64 validators and minimal config, count = 2, so valid indices are 0 and 1 + att.Data.CommitteeIndex = 2 _, _, result, err = service.validateCommitteeIndexAndCount(ctx, att, s) - require.ErrorContains(t, "committee index 10000 > 2", err) + require.ErrorContains(t, "committee index 2 >= 2", err) assert.Equal(t, pubsub.ValidationReject, result) } diff --git a/beacon-chain/sync/validate_beacon_attestation.go b/beacon-chain/sync/validate_beacon_attestation.go index 6deb020078..6a603aabd2 100644 --- a/beacon-chain/sync/validate_beacon_attestation.go +++ b/beacon-chain/sync/validate_beacon_attestation.go @@ -278,8 +278,8 @@ func (s *Service) validateCommitteeIndexAndCount( } else { ci = a.GetCommitteeIndex() } - if uint64(ci) > count { - return 0, 0, pubsub.ValidationReject, fmt.Errorf("committee index %d > %d", ci, count) + if uint64(ci) >= count { + return 0, 0, pubsub.ValidationReject, fmt.Errorf("committee index %d >= %d", ci, count) } return ci, valCount, pubsub.ValidationAccept, nil } diff --git a/beacon-chain/sync/validate_beacon_attestation_test.go b/beacon-chain/sync/validate_beacon_attestation_test.go index 70ca20f868..991010dfe1 100644 --- a/beacon-chain/sync/validate_beacon_attestation_test.go +++ b/beacon-chain/sync/validate_beacon_attestation_test.go @@ -611,3 +611,41 @@ func TestService_setSeenUnaggregatedAtt(t *testing.T) { }) }) } + +func Test_validateCommitteeIndexAndCount_Boundary(t *testing.T) { + ctx := t.Context() + + // Create a minimal state with a known number of validators. + validators := uint64(64) + bs, _ := util.DeterministicGenesisState(t, validators) + require.NoError(t, bs.SetSlot(1)) + + s := &Service{} + + // Build a minimal Phase0 attestation (unaggregated path). + att := ðpb.Attestation{ + Data: ðpb.AttestationData{ + Slot: 1, + CommitteeIndex: 0, + }, + } + + // First call to obtain the active validator count used to derive committees per slot. + _, valCount, res, err := s.validateCommitteeIndexAndCount(ctx, att, bs) + require.NoError(t, err) + require.Equal(t, pubsub.ValidationAccept, res) + + count := helpers.SlotCommitteeCount(valCount) + + // committee_index == count - 1 should be accepted. + att.Data.CommitteeIndex = primitives.CommitteeIndex(count - 1) + _, _, res, err = s.validateCommitteeIndexAndCount(ctx, att, bs) + require.NoError(t, err) + require.Equal(t, pubsub.ValidationAccept, res) + + // committee_index == count should be rejected (out of range). + att.Data.CommitteeIndex = primitives.CommitteeIndex(count) + _, _, res, err = s.validateCommitteeIndexAndCount(ctx, att, bs) + require.ErrorContains(t, "committee index", err) + require.Equal(t, pubsub.ValidationReject, res) +} diff --git a/changelog/phrwlk_fix-attestation-committee-index-bound.md b/changelog/phrwlk_fix-attestation-committee-index-bound.md new file mode 100644 index 0000000000..86ac126bfd --- /dev/null +++ b/changelog/phrwlk_fix-attestation-committee-index-bound.md @@ -0,0 +1,3 @@ +### Fixed + +- reject committee index >= committees_per_slot in unaggregated attestation validation From cd429dc253392ddb7f972c4eb75431b735722198 Mon Sep 17 00:00:00 2001 From: Jun Song <87601811+syjn99@users.noreply.github.com> Date: Wed, 15 Oct 2025 17:11:12 +0100 Subject: [PATCH 031/103] SSZ-QL: Access n-th element in `List`/`Vector`. (#15767) * Add basic parsing feature for accessing by index * Add more tests for 2d byte vector * Add List case for access indexing * Handle 2D bytes List example * Fix misleading cases for CalculateOffsetAndLength * Use elementSizes[index] if it is the last path element * Add variable_container_list field for mocking attester_slashings in BeaconBlockBody * Remove redundant protobuf message * Better documentation * Changelog * Fix `expectedSize` of `VariableTestContainer`: as we added `variable_container_list` here * Apply reviews from Radek --- changelog/syjn99_ssz-ql-index-accessing.md | 3 + encoding/ssz/query/path.go | 33 ++- encoding/ssz/query/query.go | 47 +++- encoding/ssz/query/query_test.go | 225 ++++++++++++++++++- proto/ssz_query/ssz_query.pb.go | 205 +++++++++++------ proto/ssz_query/ssz_query.proto | 17 +- proto/ssz_query/ssz_query.ssz.go | 250 +++++++++++++++++++-- 7 files changed, 677 insertions(+), 103 deletions(-) create mode 100644 changelog/syjn99_ssz-ql-index-accessing.md diff --git a/changelog/syjn99_ssz-ql-index-accessing.md b/changelog/syjn99_ssz-ql-index-accessing.md new file mode 100644 index 0000000000..1ebefa4965 --- /dev/null +++ b/changelog/syjn99_ssz-ql-index-accessing.md @@ -0,0 +1,3 @@ +### Added + +- SSZ-QL: Access n-th element in `List`/`Vector`. diff --git a/encoding/ssz/query/path.go b/encoding/ssz/query/path.go index 4fd976828d..a45ed171d3 100644 --- a/encoding/ssz/query/path.go +++ b/encoding/ssz/query/path.go @@ -2,12 +2,16 @@ package query import ( "errors" + "fmt" + "strconv" "strings" ) // PathElement represents a single element in a path. type PathElement struct { Name string + // [Optional] Index for List/Vector elements + Index *uint64 } func ParsePath(rawPath string) ([]PathElement, error) { @@ -24,7 +28,34 @@ func ParsePath(rawPath string) ([]PathElement, error) { var path []PathElement for _, elem := range rawElements { - path = append(path, PathElement{Name: elem}) + if elem == "" { + return nil, errors.New("invalid path: consecutive dots or trailing dot") + } + + fieldName := elem + var index *uint64 + + // Check for index notation, e.g., "field[0]" + if strings.Contains(elem, "[") { + parts := strings.SplitN(elem, "[", 2) + if len(parts) != 2 { + return nil, fmt.Errorf("invalid index notation in path element %s", elem) + } + + fieldName = parts[0] + indexPart := strings.TrimSuffix(parts[1], "]") + if indexPart == "" { + return nil, errors.New("index cannot be empty") + } + + indexValue, err := strconv.ParseUint(indexPart, 10, 64) + if err != nil { + return nil, fmt.Errorf("invalid index in path element %s: %w", elem, err) + } + index = &indexValue + } + + path = append(path, PathElement{Name: fieldName, Index: index}) } return path, nil diff --git a/encoding/ssz/query/query.go b/encoding/ssz/query/query.go index c19ec94b20..62eb6810ee 100644 --- a/encoding/ssz/query/query.go +++ b/encoding/ssz/query/query.go @@ -19,7 +19,7 @@ func CalculateOffsetAndLength(sszInfo *sszInfo, path []PathElement) (*sszInfo, u walk := sszInfo offset := uint64(0) - for _, elem := range path { + for pathIndex, elem := range path { containerInfo, err := walk.ContainerInfo() if err != nil { return nil, 0, 0, fmt.Errorf("could not get field infos: %w", err) @@ -32,6 +32,51 @@ func CalculateOffsetAndLength(sszInfo *sszInfo, path []PathElement) (*sszInfo, u offset += fieldInfo.offset walk = fieldInfo.sszInfo + + // Check for accessing List/Vector elements by index + if elem.Index != nil { + switch walk.sszType { + case List: + index := *elem.Index + listInfo := walk.listInfo + if index >= listInfo.length { + return nil, 0, 0, fmt.Errorf("index %d out of bounds for field %s with size %d", index, elem.Name, listInfo.length) + } + + walk = listInfo.element + if walk.isVariable { + // Cumulative sum of sizes of previous elements to get the offset. + for i := range index { + offset += listInfo.elementSizes[i] + } + + // NOTE: When populating recursively, the shared element template is updated for each + // list item, causing it to retain the size information of the last processed element. + // This wouldn't be an issue if this is in the middle of the path, as the walk would be updated + // to the next field's sszInfo, which would have the correct size information. + // However, if this is the last element in the path, we need to ensure we return the correct size + // for the indexed element. Hence, we return the size from elementSizes. + if pathIndex == len(path)-1 { + return walk, offset, listInfo.elementSizes[index], nil + } + } else { + offset += index * listInfo.element.Size() + } + + case Vector: + index := *elem.Index + vectorInfo := walk.vectorInfo + if index >= vectorInfo.length { + return nil, 0, 0, fmt.Errorf("index %d out of bounds for field %s with size %d", index, elem.Name, vectorInfo.length) + } + + offset += index * vectorInfo.element.Size() + walk = vectorInfo.element + + default: + return nil, 0, 0, fmt.Errorf("field %s of type %s does not support index access", elem.Name, walk.sszType) + } + } } return walk, offset, walk.Size(), nil diff --git a/encoding/ssz/query/query_test.go b/encoding/ssz/query/query_test.go index 0b11a57556..a84046b597 100644 --- a/encoding/ssz/query/query_test.go +++ b/encoding/ssz/query/query_test.go @@ -25,7 +25,7 @@ func TestSize(t *testing.T) { { name: "VariableTestContainer", obj: &sszquerypb.VariableTestContainer{}, - expectedSize: 128, + expectedSize: 132, }, } @@ -102,6 +102,19 @@ func TestCalculateOffsetAndLength(t *testing.T) { expectedOffset: 85, expectedLength: 192, // 24 * 8 bytes }, + // Accessing an element in the vector + { + name: "vector field (0th element)", + path: ".vector_field[0]", + expectedOffset: 85, + expectedLength: 8, + }, + { + name: "vector field (10th element)", + path: ".vector_field[10]", + expectedOffset: 165, + expectedLength: 8, + }, // 2D bytes field { name: "two_dimension_bytes_field", @@ -109,6 +122,13 @@ func TestCalculateOffsetAndLength(t *testing.T) { expectedOffset: 277, expectedLength: 160, // 5 * 32 bytes }, + // Accessing an element in the 2D bytes field + { + name: "two_dimension_bytes_field (1st element)", + path: ".two_dimension_bytes_field[1]", + expectedOffset: 309, + expectedLength: 32, + }, // Bitvector fields { name: "bitvector64_field", @@ -161,26 +181,53 @@ func TestCalculateOffsetAndLength(t *testing.T) { { name: "field_list_uint64", path: ".field_list_uint64", - expectedOffset: 112, // First part of variable-sized type. + expectedOffset: 116, // First part of variable-sized type. expectedLength: 40, // 5 elements * uint64 (8 bytes each) }, + // Accessing an element in the list + { + name: "field_list_uint64 (2nd element)", + path: ".field_list_uint64[2]", + expectedOffset: 132, + expectedLength: 8, + }, { name: "field_list_container", path: ".field_list_container", - expectedOffset: 152, // Second part of variable-sized type. + expectedOffset: 156, // Second part of variable-sized type. expectedLength: 120, // 3 elements * FixedNestedContainer (40 bytes each) }, + // Accessing an element in the list of containers + { + name: "field_list_container (1st element)", + path: ".field_list_container[1]", + expectedOffset: 196, + expectedLength: 40, + }, { name: "field_list_bytes32", path: ".field_list_bytes32", - expectedOffset: 272, + expectedOffset: 276, expectedLength: 96, // 3 elements * 32 bytes each }, + // Accessing an element in the list of bytes32 + { + name: "field_list_bytes32 (0th element)", + path: ".field_list_bytes32[0]", + expectedOffset: 276, + expectedLength: 32, + }, + { + name: "field_list_bytes32 (2nd element)", + path: ".field_list_bytes32[2]", + expectedOffset: 340, + expectedLength: 32, + }, // Nested paths { name: "nested", path: ".nested", - expectedOffset: 368, + expectedOffset: 372, // Calculated with: // - Value1: 8 bytes // - field_list_uint64 offset: 4 bytes @@ -194,40 +241,85 @@ func TestCalculateOffsetAndLength(t *testing.T) { { name: "nested.value1", path: ".nested.value1", - expectedOffset: 368, + expectedOffset: 372, expectedLength: 8, }, { name: "nested.field_list_uint64", path: ".nested.field_list_uint64", - expectedOffset: 384, + expectedOffset: 388, expectedLength: 40, }, + { + name: "nested.field_list_uint64 (3rd element)", + path: ".nested.field_list_uint64[3]", + expectedOffset: 412, + expectedLength: 8, + }, { name: "nested.nested_list_field", path: ".nested.nested_list_field", - expectedOffset: 436, + expectedOffset: 440, expectedLength: 99, }, + // Accessing an element in the nested list of bytes + { + name: "nested.nested_list_field (1st element)", + path: ".nested.nested_list_field[1]", + expectedOffset: 472, + expectedLength: 33, + }, + { + name: "nested.nested_list_field (2nd element)", + path: ".nested.nested_list_field[2]", + expectedOffset: 505, + expectedLength: 34, + }, + // Variable list of variable-sized containers + { + name: "variable_container_list", + path: ".variable_container_list", + expectedOffset: 547, + expectedLength: 604, + }, // Bitlist field { name: "bitlist_field", path: ".bitlist_field", - expectedOffset: 535, + expectedOffset: 1151, expectedLength: 33, // 32 bytes + 1 byte for length delimiter }, // 2D bytes field { name: "nested_list_field", path: ".nested_list_field", - expectedOffset: 580, + expectedOffset: 1196, expectedLength: 99, }, + // Accessing an element in the list of nested bytes + { + name: "nested_list_field (0th element)", + path: ".nested_list_field[0]", + expectedOffset: 1196, + expectedLength: 32, + }, + { + name: "nested_list_field (1st element)", + path: ".nested_list_field[1]", + expectedOffset: 1228, + expectedLength: 33, + }, + { + name: "nested_list_field (2nd element)", + path: ".nested_list_field[2]", + expectedOffset: 1261, + expectedLength: 34, + }, // Fixed trailing field { name: "trailing_field", path: ".trailing_field", - expectedOffset: 56, // After leading_field + 6 offset pointers + expectedOffset: 60, // After leading_field + 7 offset pointers expectedLength: 56, }, } @@ -419,11 +511,27 @@ func getFixedTestContainerSpec() testutil.TestSpec { Path: ".vector_field", Expected: testContainer.VectorField, }, + { + Path: ".vector_field[0]", + Expected: testContainer.VectorField[0], + }, + { + Path: ".vector_field[10]", + Expected: testContainer.VectorField[10], + }, // 2D bytes field { Path: ".two_dimension_bytes_field", Expected: testContainer.TwoDimensionBytesField, }, + { + Path: ".two_dimension_bytes_field[0]", + Expected: testContainer.TwoDimensionBytesField[0], + }, + { + Path: ".two_dimension_bytes_field[1]", + Expected: testContainer.TwoDimensionBytesField[1], + }, // Bitvector fields { Path: ".bitvector64_field", @@ -481,6 +589,28 @@ func createVariableTestContainer() *sszquerypb.VariableTestContainer { } } + // Two VariableOuterContainer elements, each with two VariableInnerContainer elements + variableContainerList := make([]*sszquerypb.VariableOuterContainer, 2) + for i := range variableContainerList { + // Inner1: 8 + 4 + 4 + (8*3) + (4*3) + 99 = 151 bytes + inner1 := &sszquerypb.VariableNestedContainer{ + Value1: 42, + FieldListUint64: []uint64{uint64(i), uint64(i + 1), uint64(i + 2)}, + NestedListField: nestedListField, + } + // Inner2: 8 + 4 + 4 + (8*2) + (4*3) + 99 = 143 bytes + inner2 := &sszquerypb.VariableNestedContainer{ + Value1: 84, + FieldListUint64: []uint64{uint64(i + 3), uint64(i + 4)}, + NestedListField: nestedListField, + } + // (4*2) + 151 + 143 = 302 bytes per VariableOuterContainer + variableContainerList[i] = &sszquerypb.VariableOuterContainer{ + Inner_1: inner1, + Inner_2: inner2, + } + } + return &sszquerypb.VariableTestContainer{ // Fixed leading field LeadingField: leadingField, @@ -501,6 +631,9 @@ func createVariableTestContainer() *sszquerypb.VariableTestContainer { NestedListField: nestedListField, }, + // Variable list of variable-sized containers + VariableContainerList: variableContainerList, + // Bitlist field BitlistField: bitlistField, @@ -530,11 +663,24 @@ func getVariableTestContainerSpec() testutil.TestSpec { Path: ".field_list_uint64", Expected: testContainer.FieldListUint64, }, + { + Path: ".field_list_uint64[2]", + Expected: testContainer.FieldListUint64[2], + }, // Variable-size list of (fixed-size) containers { Path: ".field_list_container", Expected: testContainer.FieldListContainer, }, + // Accessing an element in the list of containers + { + Path: ".field_list_container[0]", + Expected: testContainer.FieldListContainer[0], + }, + { + Path: ".field_list_container[1]", + Expected: testContainer.FieldListContainer[1], + }, // Variable-size list of bytes32 { Path: ".field_list_bytes32", @@ -553,10 +699,55 @@ func getVariableTestContainerSpec() testutil.TestSpec { Path: ".nested.field_list_uint64", Expected: testContainer.Nested.FieldListUint64, }, + { + Path: ".nested.field_list_uint64[3]", + Expected: testContainer.Nested.FieldListUint64[3], + }, { Path: ".nested.nested_list_field", Expected: testContainer.Nested.NestedListField, }, + { + Path: ".nested.nested_list_field[0]", + Expected: testContainer.Nested.NestedListField[0], + }, + { + Path: ".nested.nested_list_field[1]", + Expected: testContainer.Nested.NestedListField[1], + }, + { + Path: ".nested.nested_list_field[2]", + Expected: testContainer.Nested.NestedListField[2], + }, + // Variable list of variable-sized containers + { + Path: ".variable_container_list", + Expected: testContainer.VariableContainerList, + }, + { + Path: ".variable_container_list[0]", + Expected: testContainer.VariableContainerList[0], + }, + { + Path: ".variable_container_list[0].inner_1.field_list_uint64[1]", + Expected: testContainer.VariableContainerList[0].Inner_1.FieldListUint64[1], + }, + { + Path: ".variable_container_list[0].inner_2.field_list_uint64[1]", + Expected: testContainer.VariableContainerList[0].Inner_2.FieldListUint64[1], + }, + { + Path: ".variable_container_list[1]", + Expected: testContainer.VariableContainerList[1], + }, + { + Path: ".variable_container_list[1].inner_1.field_list_uint64[1]", + Expected: testContainer.VariableContainerList[1].Inner_1.FieldListUint64[1], + }, + { + Path: ".variable_container_list[1].inner_2.field_list_uint64[1]", + Expected: testContainer.VariableContainerList[1].Inner_2.FieldListUint64[1], + }, // Bitlist field { Path: ".bitlist_field", @@ -567,6 +758,18 @@ func getVariableTestContainerSpec() testutil.TestSpec { Path: ".nested_list_field", Expected: testContainer.NestedListField, }, + { + Path: ".nested_list_field[0]", + Expected: testContainer.NestedListField[0], + }, + { + Path: ".nested_list_field[1]", + Expected: testContainer.NestedListField[1], + }, + { + Path: ".nested_list_field[2]", + Expected: testContainer.NestedListField[2], + }, // Fixed trailing field { Path: ".trailing_field", diff --git a/proto/ssz_query/ssz_query.pb.go b/proto/ssz_query/ssz_query.pb.go index 3d273eeeb1..6b9d5bd7d5 100755 --- a/proto/ssz_query/ssz_query.pb.go +++ b/proto/ssz_query/ssz_query.pb.go @@ -251,23 +251,76 @@ func (x *VariableNestedContainer) GetNestedListField() [][]byte { return nil } +type VariableOuterContainer struct { + state protoimpl.MessageState `protogen:"open.v1"` + Inner_1 *VariableNestedContainer `protobuf:"bytes,1,opt,name=inner_1,json=inner1,proto3" json:"inner_1,omitempty"` + Inner_2 *VariableNestedContainer `protobuf:"bytes,2,opt,name=inner_2,json=inner2,proto3" json:"inner_2,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *VariableOuterContainer) Reset() { + *x = VariableOuterContainer{} + mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *VariableOuterContainer) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VariableOuterContainer) ProtoMessage() {} + +func (x *VariableOuterContainer) ProtoReflect() protoreflect.Message { + mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VariableOuterContainer.ProtoReflect.Descriptor instead. +func (*VariableOuterContainer) Descriptor() ([]byte, []int) { + return file_proto_ssz_query_ssz_query_proto_rawDescGZIP(), []int{3} +} + +func (x *VariableOuterContainer) GetInner_1() *VariableNestedContainer { + if x != nil { + return x.Inner_1 + } + return nil +} + +func (x *VariableOuterContainer) GetInner_2() *VariableNestedContainer { + if x != nil { + return x.Inner_2 + } + return nil +} + type VariableTestContainer struct { - state protoimpl.MessageState `protogen:"open.v1"` - LeadingField []byte `protobuf:"bytes,1,opt,name=leading_field,json=leadingField,proto3" json:"leading_field,omitempty" ssz-size:"32"` - FieldListUint64 []uint64 `protobuf:"varint,2,rep,packed,name=field_list_uint64,json=fieldListUint64,proto3" json:"field_list_uint64,omitempty" ssz-max:"2048"` - FieldListContainer []*FixedNestedContainer `protobuf:"bytes,3,rep,name=field_list_container,json=fieldListContainer,proto3" json:"field_list_container,omitempty" ssz-max:"128"` - FieldListBytes32 [][]byte `protobuf:"bytes,4,rep,name=field_list_bytes32,json=fieldListBytes32,proto3" json:"field_list_bytes32,omitempty" ssz-max:"100" ssz-size:"?,32"` - Nested *VariableNestedContainer `protobuf:"bytes,5,opt,name=nested,proto3" json:"nested,omitempty"` - BitlistField github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,6,opt,name=bitlist_field,json=bitlistField,proto3" json:"bitlist_field,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitlist" ssz-max:"2048"` - NestedListField [][]byte `protobuf:"bytes,7,rep,name=nested_list_field,json=nestedListField,proto3" json:"nested_list_field,omitempty" ssz-max:"100,50" ssz-size:"?,?"` - TrailingField []byte `protobuf:"bytes,8,opt,name=trailing_field,json=trailingField,proto3" json:"trailing_field,omitempty" ssz-size:"56"` - unknownFields protoimpl.UnknownFields - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + LeadingField []byte `protobuf:"bytes,1,opt,name=leading_field,json=leadingField,proto3" json:"leading_field,omitempty" ssz-size:"32"` + FieldListUint64 []uint64 `protobuf:"varint,2,rep,packed,name=field_list_uint64,json=fieldListUint64,proto3" json:"field_list_uint64,omitempty" ssz-max:"2048"` + FieldListContainer []*FixedNestedContainer `protobuf:"bytes,3,rep,name=field_list_container,json=fieldListContainer,proto3" json:"field_list_container,omitempty" ssz-max:"128"` + FieldListBytes32 [][]byte `protobuf:"bytes,4,rep,name=field_list_bytes32,json=fieldListBytes32,proto3" json:"field_list_bytes32,omitempty" ssz-max:"100" ssz-size:"?,32"` + Nested *VariableNestedContainer `protobuf:"bytes,5,opt,name=nested,proto3" json:"nested,omitempty"` + VariableContainerList []*VariableOuterContainer `protobuf:"bytes,6,rep,name=variable_container_list,json=variableContainerList,proto3" json:"variable_container_list,omitempty" ssz-max:"10"` + BitlistField github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,7,opt,name=bitlist_field,json=bitlistField,proto3" json:"bitlist_field,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitlist" ssz-max:"2048"` + NestedListField [][]byte `protobuf:"bytes,8,rep,name=nested_list_field,json=nestedListField,proto3" json:"nested_list_field,omitempty" ssz-max:"100,50" ssz-size:"?,?"` + TrailingField []byte `protobuf:"bytes,9,opt,name=trailing_field,json=trailingField,proto3" json:"trailing_field,omitempty" ssz-size:"56"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *VariableTestContainer) Reset() { *x = VariableTestContainer{} - mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[3] + mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -279,7 +332,7 @@ func (x *VariableTestContainer) String() string { func (*VariableTestContainer) ProtoMessage() {} func (x *VariableTestContainer) ProtoReflect() protoreflect.Message { - mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[3] + mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[4] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -292,7 +345,7 @@ func (x *VariableTestContainer) ProtoReflect() protoreflect.Message { // Deprecated: Use VariableTestContainer.ProtoReflect.Descriptor instead. func (*VariableTestContainer) Descriptor() ([]byte, []int) { - return file_proto_ssz_query_ssz_query_proto_rawDescGZIP(), []int{3} + return file_proto_ssz_query_ssz_query_proto_rawDescGZIP(), []int{4} } func (x *VariableTestContainer) GetLeadingField() []byte { @@ -330,6 +383,13 @@ func (x *VariableTestContainer) GetNested() *VariableNestedContainer { return nil } +func (x *VariableTestContainer) GetVariableContainerList() []*VariableOuterContainer { + if x != nil { + return x.VariableContainerList + } + return nil +} + func (x *VariableTestContainer) GetBitlistField() github_com_prysmaticlabs_go_bitfield.Bitlist { if x != nil { return x.BitlistField @@ -411,45 +471,60 @@ var file_proto_ssz_query_ssz_query_proto_rawDesc = []byte{ 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x11, 0x8a, 0xb5, 0x18, 0x03, 0x3f, 0x2c, 0x3f, 0x92, 0xb5, 0x18, 0x06, 0x31, 0x30, 0x30, 0x2c, 0x35, 0x30, 0x52, 0x0f, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73, - 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x22, 0x9e, 0x04, 0x0a, 0x15, 0x56, 0x61, 0x72, 0x69, 0x61, - 0x62, 0x6c, 0x65, 0x54, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, - 0x12, 0x2b, 0x0a, 0x0d, 0x6c, 0x65, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, - 0x0c, 0x6c, 0x65, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x34, 0x0a, - 0x11, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x75, 0x69, 0x6e, 0x74, - 0x36, 0x34, 0x18, 0x02, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, - 0x34, 0x38, 0x52, 0x0f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x55, 0x69, 0x6e, - 0x74, 0x36, 0x34, 0x12, 0x5a, 0x0a, 0x14, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x73, - 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x18, 0x03, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x46, 0x69, - 0x78, 0x65, 0x64, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, - 0x65, 0x72, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, 0x12, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, - 0x3d, 0x0a, 0x12, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x62, 0x79, - 0x74, 0x65, 0x73, 0x33, 0x32, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0f, 0x8a, 0xb5, 0x18, - 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x30, 0x30, 0x52, 0x10, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x79, 0x74, 0x65, 0x73, 0x33, 0x32, 0x12, 0x3a, - 0x0a, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, - 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, - 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, - 0x65, 0x72, 0x52, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x12, 0x5d, 0x0a, 0x0d, 0x62, 0x69, - 0x74, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, - 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x6c, - 0x69, 0x73, 0x74, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0c, 0x62, 0x69, 0x74, - 0x6c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x3d, 0x0a, 0x11, 0x6e, 0x65, 0x73, - 0x74, 0x65, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x07, - 0x20, 0x03, 0x28, 0x0c, 0x42, 0x11, 0x8a, 0xb5, 0x18, 0x03, 0x3f, 0x2c, 0x3f, 0x92, 0xb5, 0x18, - 0x06, 0x31, 0x30, 0x30, 0x2c, 0x35, 0x30, 0x52, 0x0f, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x4c, - 0x69, 0x73, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x2d, 0x0a, 0x0e, 0x74, 0x72, 0x61, 0x69, - 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0c, - 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x35, 0x36, 0x52, 0x0d, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, - 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x42, 0x32, 0x5a, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x22, 0x92, 0x01, 0x0a, 0x16, 0x56, 0x61, 0x72, 0x69, 0x61, + 0x62, 0x6c, 0x65, 0x4f, 0x75, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, + 0x72, 0x12, 0x3b, 0x0a, 0x07, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x5f, 0x31, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x56, + 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, + 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x31, 0x12, 0x3b, + 0x0a, 0x07, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x5f, 0x32, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x22, 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x56, 0x61, 0x72, 0x69, + 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, + 0x6e, 0x65, 0x72, 0x52, 0x06, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x32, 0x22, 0x81, 0x05, 0x0a, 0x15, + 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x54, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x74, + 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x2b, 0x0a, 0x0d, 0x6c, 0x65, 0x61, 0x64, 0x69, 0x6e, 0x67, + 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, + 0x18, 0x02, 0x33, 0x32, 0x52, 0x0c, 0x6c, 0x65, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, + 0x6c, 0x64, 0x12, 0x34, 0x0a, 0x11, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, + 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x18, 0x02, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, 0x92, + 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, + 0x73, 0x74, 0x55, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x12, 0x5a, 0x0a, 0x14, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, + 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, + 0x72, 0x79, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, + 0x52, 0x12, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x12, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, + 0x73, 0x74, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x33, 0x32, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0c, + 0x42, 0x0f, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x30, + 0x30, 0x52, 0x10, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x79, 0x74, 0x65, + 0x73, 0x33, 0x32, 0x12, 0x3a, 0x0a, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x18, 0x05, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, + 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x12, + 0x61, 0x0a, 0x17, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x74, + 0x61, 0x69, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x21, 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x56, 0x61, 0x72, + 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x75, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, + 0x6e, 0x65, 0x72, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x30, 0x52, 0x15, 0x76, 0x61, 0x72, + 0x69, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4c, 0x69, + 0x73, 0x74, 0x12, 0x5d, 0x0a, 0x0d, 0x62, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2c, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, + 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x92, 0xb5, 0x18, 0x04, 0x32, + 0x30, 0x34, 0x38, 0x52, 0x0c, 0x62, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x12, 0x3d, 0x0a, 0x11, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, + 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x11, 0x8a, 0xb5, + 0x18, 0x03, 0x3f, 0x2c, 0x3f, 0x92, 0xb5, 0x18, 0x06, 0x31, 0x30, 0x30, 0x2c, 0x35, 0x30, 0x52, + 0x0f, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x12, 0x2d, 0x0a, 0x0e, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x35, 0x36, + 0x52, 0x0d, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x42, + 0x32, 0x5a, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, + 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, + 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, + 0x65, 0x72, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -464,22 +539,26 @@ func file_proto_ssz_query_ssz_query_proto_rawDescGZIP() []byte { return file_proto_ssz_query_ssz_query_proto_rawDescData } -var file_proto_ssz_query_ssz_query_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_proto_ssz_query_ssz_query_proto_msgTypes = make([]protoimpl.MessageInfo, 5) var file_proto_ssz_query_ssz_query_proto_goTypes = []any{ (*FixedNestedContainer)(nil), // 0: ssz_query.FixedNestedContainer (*FixedTestContainer)(nil), // 1: ssz_query.FixedTestContainer (*VariableNestedContainer)(nil), // 2: ssz_query.VariableNestedContainer - (*VariableTestContainer)(nil), // 3: ssz_query.VariableTestContainer + (*VariableOuterContainer)(nil), // 3: ssz_query.VariableOuterContainer + (*VariableTestContainer)(nil), // 4: ssz_query.VariableTestContainer } var file_proto_ssz_query_ssz_query_proto_depIdxs = []int32{ 0, // 0: ssz_query.FixedTestContainer.nested:type_name -> ssz_query.FixedNestedContainer - 0, // 1: ssz_query.VariableTestContainer.field_list_container:type_name -> ssz_query.FixedNestedContainer - 2, // 2: ssz_query.VariableTestContainer.nested:type_name -> ssz_query.VariableNestedContainer - 3, // [3:3] is the sub-list for method output_type - 3, // [3:3] is the sub-list for method input_type - 3, // [3:3] is the sub-list for extension type_name - 3, // [3:3] is the sub-list for extension extendee - 0, // [0:3] is the sub-list for field type_name + 2, // 1: ssz_query.VariableOuterContainer.inner_1:type_name -> ssz_query.VariableNestedContainer + 2, // 2: ssz_query.VariableOuterContainer.inner_2:type_name -> ssz_query.VariableNestedContainer + 0, // 3: ssz_query.VariableTestContainer.field_list_container:type_name -> ssz_query.FixedNestedContainer + 2, // 4: ssz_query.VariableTestContainer.nested:type_name -> ssz_query.VariableNestedContainer + 3, // 5: ssz_query.VariableTestContainer.variable_container_list:type_name -> ssz_query.VariableOuterContainer + 6, // [6:6] is the sub-list for method output_type + 6, // [6:6] is the sub-list for method input_type + 6, // [6:6] is the sub-list for extension type_name + 6, // [6:6] is the sub-list for extension extendee + 0, // [0:6] is the sub-list for field type_name } func init() { file_proto_ssz_query_ssz_query_proto_init() } @@ -493,7 +572,7 @@ func file_proto_ssz_query_ssz_query_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_proto_ssz_query_ssz_query_proto_rawDesc, NumEnums: 0, - NumMessages: 4, + NumMessages: 5, NumExtensions: 0, NumServices: 0, }, diff --git a/proto/ssz_query/ssz_query.proto b/proto/ssz_query/ssz_query.proto index 4fe88515d7..4898eab1cb 100644 --- a/proto/ssz_query/ssz_query.proto +++ b/proto/ssz_query/ssz_query.proto @@ -71,6 +71,13 @@ message VariableNestedContainer { ]; } + +// Mock of AttesterSlashingElectra +message VariableOuterContainer { + VariableNestedContainer inner_1 = 1; + VariableNestedContainer inner_2 = 2; +} + // VariableTestContainer - comprehensive variable-size container for SSZ query testing // Tests: Variable-size lists, offsets in variable containers, mixed fixed/variable fields message VariableTestContainer { @@ -93,8 +100,12 @@ message VariableTestContainer { // Variable nested container - test nested container access within variable container VariableNestedContainer nested = 5; + // List of variable-sized containers + // e.g., BeaconBlockBody.attester_slashings + repeated VariableOuterContainer variable_container_list = 6 [ (ethereum.eth.ext.ssz_max) = "10" ]; // Test: List[VariableOuterContainer, 10] + // Bitlist type - test bitlist serialization - bytes bitlist_field = 6 [ + bytes bitlist_field = 7 [ (ethereum.eth.ext.ssz_max) = "2048", (ethereum.eth.ext.cast_type) = "github.com/prysmaticlabs/go-bitfield.Bitlist" @@ -102,12 +113,12 @@ message VariableTestContainer { // 2D bytes list - test list of bytelists. // e.g., ExecutionPayload.transactions - repeated bytes nested_list_field = 7 [ + repeated bytes nested_list_field = 8 [ (ethereum.eth.ext.ssz_size) = "?,?", (ethereum.eth.ext.ssz_max) = "100,50" ]; // Fixed-size trailing field - test fixed field after variable fields // Verifies correct offset calculation after variable-size fields - bytes trailing_field = 8 [ (ethereum.eth.ext.ssz_size) = "56" ]; // Test: fixed 56-byte field at end, offset: 32 + 4 + 4 + 4 + 4 + 4 + 4 = 56 + bytes trailing_field = 9 [ (ethereum.eth.ext.ssz_size) = "56" ]; // Test: fixed 56-byte field at end, offset: 32 + 4 + 4 + 4 + 4 + 4 + 4 + 4 = 60 } diff --git a/proto/ssz_query/ssz_query.ssz.go b/proto/ssz_query/ssz_query.ssz.go index 677d3db7a9..0a0f65fbc3 100644 --- a/proto/ssz_query/ssz_query.ssz.go +++ b/proto/ssz_query/ssz_query.ssz.go @@ -509,6 +509,134 @@ func (v *VariableNestedContainer) HashTreeRootWith(hh *ssz.Hasher) (err error) { return } +// MarshalSSZ ssz marshals the VariableOuterContainer object +func (v *VariableOuterContainer) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(v) +} + +// MarshalSSZTo ssz marshals the VariableOuterContainer object to a target array +func (v *VariableOuterContainer) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + offset := int(8) + + // Offset (0) 'Inner_1' + dst = ssz.WriteOffset(dst, offset) + if v.Inner_1 == nil { + v.Inner_1 = new(VariableNestedContainer) + } + offset += v.Inner_1.SizeSSZ() + + // Offset (1) 'Inner_2' + dst = ssz.WriteOffset(dst, offset) + if v.Inner_2 == nil { + v.Inner_2 = new(VariableNestedContainer) + } + offset += v.Inner_2.SizeSSZ() + + // Field (0) 'Inner_1' + if dst, err = v.Inner_1.MarshalSSZTo(dst); err != nil { + return + } + + // Field (1) 'Inner_2' + if dst, err = v.Inner_2.MarshalSSZTo(dst); err != nil { + return + } + + return +} + +// UnmarshalSSZ ssz unmarshals the VariableOuterContainer object +func (v *VariableOuterContainer) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size < 8 { + return ssz.ErrSize + } + + tail := buf + var o0, o1 uint64 + + // Offset (0) 'Inner_1' + if o0 = ssz.ReadOffset(buf[0:4]); o0 > size { + return ssz.ErrOffset + } + + if o0 != 8 { + return ssz.ErrInvalidVariableOffset + } + + // Offset (1) 'Inner_2' + if o1 = ssz.ReadOffset(buf[4:8]); o1 > size || o0 > o1 { + return ssz.ErrOffset + } + + // Field (0) 'Inner_1' + { + buf = tail[o0:o1] + if v.Inner_1 == nil { + v.Inner_1 = new(VariableNestedContainer) + } + if err = v.Inner_1.UnmarshalSSZ(buf); err != nil { + return err + } + } + + // Field (1) 'Inner_2' + { + buf = tail[o1:] + if v.Inner_2 == nil { + v.Inner_2 = new(VariableNestedContainer) + } + if err = v.Inner_2.UnmarshalSSZ(buf); err != nil { + return err + } + } + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the VariableOuterContainer object +func (v *VariableOuterContainer) SizeSSZ() (size int) { + size = 8 + + // Field (0) 'Inner_1' + if v.Inner_1 == nil { + v.Inner_1 = new(VariableNestedContainer) + } + size += v.Inner_1.SizeSSZ() + + // Field (1) 'Inner_2' + if v.Inner_2 == nil { + v.Inner_2 = new(VariableNestedContainer) + } + size += v.Inner_2.SizeSSZ() + + return +} + +// HashTreeRoot ssz hashes the VariableOuterContainer object +func (v *VariableOuterContainer) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(v) +} + +// HashTreeRootWith ssz hashes the VariableOuterContainer object with a hasher +func (v *VariableOuterContainer) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'Inner_1' + if err = v.Inner_1.HashTreeRootWith(hh); err != nil { + return + } + + // Field (1) 'Inner_2' + if err = v.Inner_2.HashTreeRootWith(hh); err != nil { + return + } + + hh.Merkleize(indx) + return +} + // MarshalSSZ ssz marshals the VariableTestContainer object func (v *VariableTestContainer) MarshalSSZ() ([]byte, error) { return ssz.MarshalSSZ(v) @@ -517,7 +645,7 @@ func (v *VariableTestContainer) MarshalSSZ() ([]byte, error) { // MarshalSSZTo ssz marshals the VariableTestContainer object to a target array func (v *VariableTestContainer) MarshalSSZTo(buf []byte) (dst []byte, err error) { dst = buf - offset := int(112) + offset := int(116) // Field (0) 'LeadingField' if size := len(v.LeadingField); size != 32 { @@ -545,18 +673,25 @@ func (v *VariableTestContainer) MarshalSSZTo(buf []byte) (dst []byte, err error) } offset += v.Nested.SizeSSZ() - // Offset (5) 'BitlistField' + // Offset (5) 'VariableContainerList' + dst = ssz.WriteOffset(dst, offset) + for ii := 0; ii < len(v.VariableContainerList); ii++ { + offset += 4 + offset += v.VariableContainerList[ii].SizeSSZ() + } + + // Offset (6) 'BitlistField' dst = ssz.WriteOffset(dst, offset) offset += len(v.BitlistField) - // Offset (6) 'NestedListField' + // Offset (7) 'NestedListField' dst = ssz.WriteOffset(dst, offset) for ii := 0; ii < len(v.NestedListField); ii++ { offset += 4 offset += len(v.NestedListField[ii]) } - // Field (7) 'TrailingField' + // Field (8) 'TrailingField' if size := len(v.TrailingField); size != 56 { err = ssz.ErrBytesLengthFn("--.TrailingField", size, 56) return @@ -601,14 +736,32 @@ func (v *VariableTestContainer) MarshalSSZTo(buf []byte) (dst []byte, err error) return } - // Field (5) 'BitlistField' + // Field (5) 'VariableContainerList' + if size := len(v.VariableContainerList); size > 10 { + err = ssz.ErrListTooBigFn("--.VariableContainerList", size, 10) + return + } + { + offset = 4 * len(v.VariableContainerList) + for ii := 0; ii < len(v.VariableContainerList); ii++ { + dst = ssz.WriteOffset(dst, offset) + offset += v.VariableContainerList[ii].SizeSSZ() + } + } + for ii := 0; ii < len(v.VariableContainerList); ii++ { + if dst, err = v.VariableContainerList[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (6) 'BitlistField' if size := len(v.BitlistField); size > 2048 { err = ssz.ErrBytesLengthFn("--.BitlistField", size, 2048) return } dst = append(dst, v.BitlistField...) - // Field (6) 'NestedListField' + // Field (7) 'NestedListField' if size := len(v.NestedListField); size > 100 { err = ssz.ErrListTooBigFn("--.NestedListField", size, 100) return @@ -635,12 +788,12 @@ func (v *VariableTestContainer) MarshalSSZTo(buf []byte) (dst []byte, err error) func (v *VariableTestContainer) UnmarshalSSZ(buf []byte) error { var err error size := uint64(len(buf)) - if size < 112 { + if size < 116 { return ssz.ErrSize } tail := buf - var o1, o2, o3, o4, o5, o6 uint64 + var o1, o2, o3, o4, o5, o6, o7 uint64 // Field (0) 'LeadingField' if cap(v.LeadingField) == 0 { @@ -653,7 +806,7 @@ func (v *VariableTestContainer) UnmarshalSSZ(buf []byte) error { return ssz.ErrOffset } - if o1 != 112 { + if o1 != 116 { return ssz.ErrInvalidVariableOffset } @@ -672,21 +825,26 @@ func (v *VariableTestContainer) UnmarshalSSZ(buf []byte) error { return ssz.ErrOffset } - // Offset (5) 'BitlistField' + // Offset (5) 'VariableContainerList' if o5 = ssz.ReadOffset(buf[48:52]); o5 > size || o4 > o5 { return ssz.ErrOffset } - // Offset (6) 'NestedListField' + // Offset (6) 'BitlistField' if o6 = ssz.ReadOffset(buf[52:56]); o6 > size || o5 > o6 { return ssz.ErrOffset } - // Field (7) 'TrailingField' - if cap(v.TrailingField) == 0 { - v.TrailingField = make([]byte, 0, len(buf[56:112])) + // Offset (7) 'NestedListField' + if o7 = ssz.ReadOffset(buf[56:60]); o7 > size || o6 > o7 { + return ssz.ErrOffset } - v.TrailingField = append(v.TrailingField, buf[56:112]...) + + // Field (8) 'TrailingField' + if cap(v.TrailingField) == 0 { + v.TrailingField = make([]byte, 0, len(buf[60:116])) + } + v.TrailingField = append(v.TrailingField, buf[60:116]...) // Field (1) 'FieldListUint64' { @@ -746,9 +904,31 @@ func (v *VariableTestContainer) UnmarshalSSZ(buf []byte) error { } } - // Field (5) 'BitlistField' + // Field (5) 'VariableContainerList' { buf = tail[o5:o6] + num, err := ssz.DecodeDynamicLength(buf, 10) + if err != nil { + return err + } + v.VariableContainerList = make([]*VariableOuterContainer, num) + err = ssz.UnmarshalDynamic(buf, num, func(indx int, buf []byte) (err error) { + if v.VariableContainerList[indx] == nil { + v.VariableContainerList[indx] = new(VariableOuterContainer) + } + if err = v.VariableContainerList[indx].UnmarshalSSZ(buf); err != nil { + return err + } + return nil + }) + if err != nil { + return err + } + } + + // Field (6) 'BitlistField' + { + buf = tail[o6:o7] if err = ssz.ValidateBitlist(buf, 2048); err != nil { return err } @@ -758,9 +938,9 @@ func (v *VariableTestContainer) UnmarshalSSZ(buf []byte) error { v.BitlistField = append(v.BitlistField, buf...) } - // Field (6) 'NestedListField' + // Field (7) 'NestedListField' { - buf = tail[o6:] + buf = tail[o7:] num, err := ssz.DecodeDynamicLength(buf, 100) if err != nil { return err @@ -785,7 +965,7 @@ func (v *VariableTestContainer) UnmarshalSSZ(buf []byte) error { // SizeSSZ returns the ssz encoded size in bytes for the VariableTestContainer object func (v *VariableTestContainer) SizeSSZ() (size int) { - size = 112 + size = 116 // Field (1) 'FieldListUint64' size += len(v.FieldListUint64) * 8 @@ -802,10 +982,16 @@ func (v *VariableTestContainer) SizeSSZ() (size int) { } size += v.Nested.SizeSSZ() - // Field (5) 'BitlistField' + // Field (5) 'VariableContainerList' + for ii := 0; ii < len(v.VariableContainerList); ii++ { + size += 4 + size += v.VariableContainerList[ii].SizeSSZ() + } + + // Field (6) 'BitlistField' size += len(v.BitlistField) - // Field (6) 'NestedListField' + // Field (7) 'NestedListField' for ii := 0; ii < len(v.NestedListField); ii++ { size += 4 size += len(v.NestedListField[ii]) @@ -886,14 +1072,30 @@ func (v *VariableTestContainer) HashTreeRootWith(hh *ssz.Hasher) (err error) { return } - // Field (5) 'BitlistField' + // Field (5) 'VariableContainerList' + { + subIndx := hh.Index() + num := uint64(len(v.VariableContainerList)) + if num > 10 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range v.VariableContainerList { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 10) + } + + // Field (6) 'BitlistField' if len(v.BitlistField) == 0 { err = ssz.ErrEmptyBitlist return } hh.PutBitlist(v.BitlistField, 2048) - // Field (6) 'NestedListField' + // Field (7) 'NestedListField' { subIndx := hh.Index() num := uint64(len(v.NestedListField)) @@ -916,7 +1118,7 @@ func (v *VariableTestContainer) HashTreeRootWith(hh *ssz.Hasher) (err error) { hh.MerkleizeWithMixin(subIndx, num, 100) } - // Field (7) 'TrailingField' + // Field (8) 'TrailingField' if size := len(v.TrailingField); size != 56 { err = ssz.ErrBytesLengthFn("--.TrailingField", size, 56) return From 41e76070922fbfec7fe6bfff739d573ee1a6b77c Mon Sep 17 00:00:00 2001 From: kasey <489222+kasey@users.noreply.github.com> Date: Thu, 16 Oct 2025 12:30:59 -0500 Subject: [PATCH 032/103] Decrease att batch deadline to 5ms for faster net prop (#15882) Co-authored-by: Kasey Kirkham --- beacon-chain/sync/batch_verifier.go | 2 +- changelog/kasey_att-batch-5ms-deadline.md | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog/kasey_att-batch-5ms-deadline.md diff --git a/beacon-chain/sync/batch_verifier.go b/beacon-chain/sync/batch_verifier.go index 3383f780ec..4968ac8805 100644 --- a/beacon-chain/sync/batch_verifier.go +++ b/beacon-chain/sync/batch_verifier.go @@ -14,7 +14,7 @@ import ( "github.com/pkg/errors" ) -const signatureVerificationInterval = 50 * time.Millisecond +const signatureVerificationInterval = 5 * time.Millisecond type signatureVerifier struct { set *bls.SignatureBatch diff --git a/changelog/kasey_att-batch-5ms-deadline.md b/changelog/kasey_att-batch-5ms-deadline.md new file mode 100644 index 0000000000..8afb133ac5 --- /dev/null +++ b/changelog/kasey_att-batch-5ms-deadline.md @@ -0,0 +1,2 @@ +### Fixed +- Decreased attestation gossip validation batch deadline to 5ms. From b2d350b9880be3c3d7725bf400eaa8123e8d4781 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Thu, 16 Oct 2025 23:12:00 +0200 Subject: [PATCH 033/103] Correctly advertise (in ENR and metadata) attestation subnets when using `--subscribe-all-subnets`. (#15880) --- beacon-chain/p2p/subnets.go | 21 ++++++++++++----- beacon-chain/p2p/subnets_test.go | 40 +++++++++++++++++++++++++------- beacon-chain/sync/subscriber.go | 4 ---- changelog/manu-advertise-atts.md | 2 ++ 4 files changed, 48 insertions(+), 19 deletions(-) create mode 100644 changelog/manu-advertise-atts.md diff --git a/beacon-chain/p2p/subnets.go b/beacon-chain/p2p/subnets.go index 3742b36ae9..9ba953017c 100644 --- a/beacon-chain/p2p/subnets.go +++ b/beacon-chain/p2p/subnets.go @@ -514,17 +514,26 @@ func initializePersistentSubnets(id enode.ID, epoch primitives.Epoch) error { // // return [compute_subscribed_subnet(node_id, epoch, index) for index in range(SUBNETS_PER_NODE)] func computeSubscribedSubnets(nodeID enode.ID, epoch primitives.Epoch) ([]uint64, error) { - subnetsPerNode := params.BeaconConfig().SubnetsPerNode - subs := make([]uint64, 0, subnetsPerNode) + beaconConfig := params.BeaconConfig() - for i := uint64(0); i < subnetsPerNode; i++ { + if flags.Get().SubscribeToAllSubnets { + subnets := make([]uint64, 0, beaconConfig.AttestationSubnetCount) + for i := range beaconConfig.AttestationSubnetCount { + subnets = append(subnets, i) + } + return subnets, nil + } + + subnets := make([]uint64, 0, beaconConfig.SubnetsPerNode) + for i := range beaconConfig.SubnetsPerNode { sub, err := computeSubscribedSubnet(nodeID, epoch, i) if err != nil { - return nil, err + return nil, errors.Wrap(err, "compute subscribed subnet") } - subs = append(subs, sub) + subnets = append(subnets, sub) } - return subs, nil + + return subnets, nil } // Spec pseudocode definition: diff --git a/beacon-chain/p2p/subnets_test.go b/beacon-chain/p2p/subnets_test.go index 1a72cf4349..3ae4b52f7c 100644 --- a/beacon-chain/p2p/subnets_test.go +++ b/beacon-chain/p2p/subnets_test.go @@ -514,17 +514,39 @@ func TestDataColumnSubnets(t *testing.T) { func TestSubnetComputation(t *testing.T) { db, err := enode.OpenDB("") - assert.NoError(t, err) + require.NoError(t, err) defer db.Close() - priv, _, err := crypto.GenerateSecp256k1Key(rand.Reader) - assert.NoError(t, err) - convertedKey, err := ecdsaprysm.ConvertFromInterfacePrivKey(priv) - assert.NoError(t, err) - localNode := enode.NewLocalNode(db, convertedKey) - retrievedSubnets, err := computeSubscribedSubnets(localNode.ID(), 1000) - assert.NoError(t, err) - assert.Equal(t, retrievedSubnets[0]+1, retrievedSubnets[1]) + priv, _, err := crypto.GenerateSecp256k1Key(rand.Reader) + require.NoError(t, err) + + convertedKey, err := ecdsaprysm.ConvertFromInterfacePrivKey(priv) + require.NoError(t, err) + + localNode := enode.NewLocalNode(db, convertedKey) + beaconConfig := params.BeaconConfig() + + t.Run("standard", func(t *testing.T) { + retrievedSubnets, err := computeSubscribedSubnets(localNode.ID(), 1000) + require.NoError(t, err) + require.Equal(t, beaconConfig.SubnetsPerNode, uint64(len(retrievedSubnets))) + require.Equal(t, retrievedSubnets[0]+1, retrievedSubnets[1]) + }) + + t.Run("subscribed to all", func(t *testing.T) { + gFlags := new(flags.GlobalFlags) + gFlags.SubscribeToAllSubnets = true + flags.Init(gFlags) + defer flags.Init(new(flags.GlobalFlags)) + + retrievedSubnets, err := computeSubscribedSubnets(localNode.ID(), 1000) + require.NoError(t, err) + require.Equal(t, beaconConfig.AttestationSubnetCount, uint64(len(retrievedSubnets))) + for i := range beaconConfig.AttestationSubnetCount { + require.Equal(t, i, retrievedSubnets[i]) + } + }) + } func TestInitializePersistentSubnets(t *testing.T) { diff --git a/beacon-chain/sync/subscriber.go b/beacon-chain/sync/subscriber.go index d509f1dbd0..e63c554171 100644 --- a/beacon-chain/sync/subscriber.go +++ b/beacon-chain/sync/subscriber.go @@ -716,10 +716,6 @@ func (s *Service) samplingSize() (uint64, error) { } func (s *Service) persistentAndAggregatorSubnetIndices(currentSlot primitives.Slot) map[uint64]bool { - if flags.Get().SubscribeToAllSubnets { - return mapFromCount(params.BeaconConfig().AttestationSubnetCount) - } - persistentSubnetIndices := persistentSubnetIndices() aggregatorSubnetIndices := aggregatorSubnetIndices(currentSlot) diff --git a/changelog/manu-advertise-atts.md b/changelog/manu-advertise-atts.md new file mode 100644 index 0000000000..dc95b4fe4d --- /dev/null +++ b/changelog/manu-advertise-atts.md @@ -0,0 +1,2 @@ +### Fixed +- Correctly advertise (in ENR and beacon API) attestation subnets when using `--subscribe-all-subnets`. From 47764696ce183390a425841ffd4f8c84f05321de Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Thu, 16 Oct 2025 23:13:11 +0200 Subject: [PATCH 034/103] `randomPeer`: Return if the context is cancelled when waiting for peers. (#15876) * `randomPeer`: Return if the context is cancelled when waiting for peers. * `randomPeer`: Refactor to reduce indentation. --- beacon-chain/sync/data_column_sidecars.go | 24 ++++++++++++----------- changelog/manu-random-peer.md | 2 ++ 2 files changed, 15 insertions(+), 11 deletions(-) create mode 100644 changelog/manu-random-peer.md diff --git a/beacon-chain/sync/data_column_sidecars.go b/beacon-chain/sync/data_column_sidecars.go index d9b8715b23..1cdebb65e3 100644 --- a/beacon-chain/sync/data_column_sidecars.go +++ b/beacon-chain/sync/data_column_sidecars.go @@ -1122,19 +1122,21 @@ func randomPeer( } } - slices.Sort(nonRateLimitedPeers) - - if len(nonRateLimitedPeers) == 0 { - log.WithFields(logrus.Fields{ - "peerCount": peerCount, - "delay": waitPeriod, - }).Debug("Waiting for a peer with enough bandwidth for data column sidecars") - time.Sleep(waitPeriod) - continue + if len(nonRateLimitedPeers) > 0 { + slices.Sort(nonRateLimitedPeers) + randomIndex := randomSource.Intn(len(nonRateLimitedPeers)) + return nonRateLimitedPeers[randomIndex], nil } - randomIndex := randomSource.Intn(len(nonRateLimitedPeers)) - return nonRateLimitedPeers[randomIndex], nil + log.WithFields(logrus.Fields{ + "peerCount": peerCount, + "delay": waitPeriod, + }).Debug("Waiting for a peer with enough bandwidth for data column sidecars") + + select { + case <-time.After(waitPeriod): + case <-ctx.Done(): + } } return "", ctx.Err() diff --git a/changelog/manu-random-peer.md b/changelog/manu-random-peer.md new file mode 100644 index 0000000000..eb37eed1b3 --- /dev/null +++ b/changelog/manu-random-peer.md @@ -0,0 +1,2 @@ +### Fixed +- `randomPeer`: Return if the context is cancelled when waiting for peers. \ No newline at end of file From 0486631d731eae4fcf7872752e6981c8f97d50c1 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Thu, 16 Oct 2025 23:49:11 +0200 Subject: [PATCH 035/103] Improve error message when the byte count read from disk when reading a data column sidecar is lower than expected. (Mostly, because the file is truncated.) (#15881) * `VerifiedRODataColumnError`: Don't reuse Blob error. * `VerifiedRODataColumnFromDisk`: Use a specific error when the count of read bytes is lower than expected. * Add changelog. --- beacon-chain/verification/error.go | 16 +++++++++++----- beacon-chain/verification/filesystem.go | 6 ++++-- changelog/manu-read-columns-from-disk-error.md | 2 ++ 3 files changed, 17 insertions(+), 7 deletions(-) create mode 100644 changelog/manu-read-columns-from-disk-error.md diff --git a/beacon-chain/verification/error.go b/beacon-chain/verification/error.go index f76688c9a8..51507efaaa 100644 --- a/beacon-chain/verification/error.go +++ b/beacon-chain/verification/error.go @@ -71,9 +71,15 @@ var ( errBatchBlockRootMismatch = errors.Join(ErrBlobInvalid, errors.New("sidecar block header root does not match signed block")) ) -// errVerificationImplementationFault indicates that a code path yielding VerifiedROBlobs has an implementation -// error, leading it to call VerifiedROBlobError with a nil error. -var errVerificationImplementationFault = errors.New("could not verify blob data or create a valid VerifiedROBlob") +var ( + // errBlobVerificationImplementationFault indicates that a code path yielding VerifiedROBlobs has an implementation + // error, leading it to call VerifiedROBlobError with a nil error. + errBlobVerificationImplementationFault = errors.New("could not verify blob data or create a valid VerifiedROBlob") + + // errDataColumnVerificationImplementationFault indicates that a code path yielding VerifiedRODataColumns has an implementation + // error, leading it to call VerifiedRODataColumnError with a nil error. + errDataColumnVerificationImplementationFault = errors.New("could not verify blob data or create a valid VerifiedROBlob") +) // VerificationMultiError is a custom error that can be used to access individual verification failures. type VerificationMultiError struct { @@ -111,7 +117,7 @@ func newVerificationMultiError(r *results, err error) VerificationMultiError { // create a value of that type in order to generate an error return value. func VerifiedROBlobError(err error) (blocks.VerifiedROBlob, error) { if err == nil { - return blocks.VerifiedROBlob{}, errVerificationImplementationFault + return blocks.VerifiedROBlob{}, errBlobVerificationImplementationFault } return blocks.VerifiedROBlob{}, err } @@ -120,7 +126,7 @@ func VerifiedROBlobError(err error) (blocks.VerifiedROBlob, error) { // create a value of that type in order to generate an error return value. func VerifiedRODataColumnError(err error) (blocks.VerifiedRODataColumn, error) { if err == nil { - return blocks.VerifiedRODataColumn{}, errVerificationImplementationFault + return blocks.VerifiedRODataColumn{}, errDataColumnVerificationImplementationFault } return blocks.VerifiedRODataColumn{}, err } diff --git a/beacon-chain/verification/filesystem.go b/beacon-chain/verification/filesystem.go index 27fd0b791b..2441f3565d 100644 --- a/beacon-chain/verification/filesystem.go +++ b/beacon-chain/verification/filesystem.go @@ -4,6 +4,7 @@ import ( fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/pkg/errors" "github.com/spf13/afero" ) @@ -25,7 +26,8 @@ func VerifiedROBlobFromDisk(fs afero.Fs, root [32]byte, path string) (blocks.Ver return blocks.NewVerifiedROBlob(ro), nil } -// VerifiedRODataColumnFromDisk created a verified read-only data column sidecar from disk. +// VerifiedRODataColumnFromDisk creates a verified read-only data column sidecar from disk. +// The file cursor must be positioned at the start of the data column sidecar SSZ data. func VerifiedRODataColumnFromDisk(file afero.File, root [fieldparams.RootLength]byte, sszEncodedDataColumnSidecarSize uint32) (blocks.VerifiedRODataColumn, error) { // Read the ssz encoded data column sidecar from the file sszEncodedDataColumnSidecar := make([]byte, sszEncodedDataColumnSidecarSize) @@ -34,7 +36,7 @@ func VerifiedRODataColumnFromDisk(file afero.File, root [fieldparams.RootLength] return VerifiedRODataColumnError(err) } if uint32(count) != sszEncodedDataColumnSidecarSize { - return VerifiedRODataColumnError(err) + return VerifiedRODataColumnError(errors.Errorf("read %d bytes while expecting %d", count, sszEncodedDataColumnSidecarSize)) } // Unmarshal the SSZ encoded data column sidecar. diff --git a/changelog/manu-read-columns-from-disk-error.md b/changelog/manu-read-columns-from-disk-error.md new file mode 100644 index 0000000000..60a895b328 --- /dev/null +++ b/changelog/manu-read-columns-from-disk-error.md @@ -0,0 +1,2 @@ +### Fixed +- Improve error message when the byte count read from disk when reading a data column sidecars is lower than expected. (Mostly, because the file is truncated.) \ No newline at end of file From fdb06ea4617eb807ac963b43936c37a691008446 Mon Sep 17 00:00:00 2001 From: kasey <489222+kasey@users.noreply.github.com> Date: Fri, 17 Oct 2025 09:03:15 -0500 Subject: [PATCH 036/103] clear genesis state file when --(force-)clear-db is specified (#15883) Co-authored-by: Kasey Kirkham --- beacon-chain/node/clear_db.go | 18 ++++++++++++++++++ beacon-chain/node/node.go | 3 +++ changelog/kasey_clear-db-rm-genesis.md | 2 ++ genesis/initialize.go | 5 +++-- genesis/storage.go | 7 ++++--- 5 files changed, 30 insertions(+), 5 deletions(-) create mode 100644 changelog/kasey_clear-db-rm-genesis.md diff --git a/beacon-chain/node/clear_db.go b/beacon-chain/node/clear_db.go index ce9e8a24a7..9476e113c2 100644 --- a/beacon-chain/node/clear_db.go +++ b/beacon-chain/node/clear_db.go @@ -2,11 +2,13 @@ package node import ( "context" + "os" "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" "github.com/OffchainLabs/prysm/v6/beacon-chain/db/slasherkv" "github.com/OffchainLabs/prysm/v6/cmd" + "github.com/OffchainLabs/prysm/v6/genesis" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) @@ -36,6 +38,22 @@ func (c *dbClearer) clearKV(ctx context.Context, db *kv.Store) (*kv.Store, error return kv.NewKVStore(ctx, db.DatabasePath()) } +func (c *dbClearer) clearGenesis(dir string) error { + if !c.shouldProceed() { + return nil + } + + gfile, err := genesis.FindStateFile(dir) + if err != nil { + return nil + } + + if err := os.Remove(gfile.FilePath()); err != nil { + return errors.Wrapf(err, "genesis state file not removed: %s", gfile.FilePath()) + } + return nil +} + func (c *dbClearer) clearBlobs(bs *filesystem.BlobStorage) error { if !c.shouldProceed() { return nil diff --git a/beacon-chain/node/node.go b/beacon-chain/node/node.go index 129695ae3a..36678afb70 100644 --- a/beacon-chain/node/node.go +++ b/beacon-chain/node/node.go @@ -177,6 +177,9 @@ func New(cliCtx *cli.Context, cancel context.CancelFunc, opts ...Option) (*Beaco } beacon.db = kvdb + if err := dbClearer.clearGenesis(dataDir); err != nil { + return nil, errors.Wrap(err, "could not clear genesis state") + } providers := append(beacon.GenesisProviders, kv.NewLegacyGenesisProvider(kvdb)) if err := genesis.Initialize(ctx, dataDir, providers...); err != nil { return nil, errors.Wrap(err, "could not initialize genesis state") diff --git a/changelog/kasey_clear-db-rm-genesis.md b/changelog/kasey_clear-db-rm-genesis.md new file mode 100644 index 0000000000..2cc3153512 --- /dev/null +++ b/changelog/kasey_clear-db-rm-genesis.md @@ -0,0 +1,2 @@ +### Fixed +- Delete the genesis state file when --clear-db / --force-clear-db is specified. diff --git a/genesis/initialize.go b/genesis/initialize.go index e5690387de..c70367a964 100644 --- a/genesis/initialize.go +++ b/genesis/initialize.go @@ -23,7 +23,7 @@ func Initialize(ctx context.Context, dir string, providers ...Provider) error { setPkgVar(emb, true) return nil } - gd, err := findGenesisFile(dir) + gd, err := FindStateFile(dir) if err == nil { setPkgVar(gd, true) return nil @@ -65,7 +65,8 @@ func newGenesisData(st state.BeaconState, dir string) (GenesisData, error) { }, nil } -func findGenesisFile(dir string) (GenesisData, error) { +// FindStateFile searches for a valid genesis state file in the specified directory. +func FindStateFile(dir string) (GenesisData, error) { if dir == "" { return GenesisData{}, ErrFilePathUnset } diff --git a/genesis/storage.go b/genesis/storage.go index 8879b1077a..b0c0298a99 100644 --- a/genesis/storage.go +++ b/genesis/storage.go @@ -100,7 +100,8 @@ type GenesisData struct { initialized bool } -func (d GenesisData) filePath() string { +// FilePath returns the full path to the genesis state file. +func (d GenesisData) FilePath() string { parts := [3]string{} parts[genesisPart] = "genesis" parts[timePart] = strconv.FormatInt(d.Time.Unix(), 10) @@ -115,7 +116,7 @@ func persist(d GenesisData) error { if d.FileDir == "" { return ErrFilePathUnset } - fpath := d.filePath() + fpath := d.FilePath() sb, err := d.State.MarshalSSZ() if err != nil { return errors.Wrap(err, "marshal ssz") @@ -144,7 +145,7 @@ func loadState() (state.BeaconState, error) { stateMu.Lock() defer stateMu.Unlock() - s, err := stateFromFile(data.filePath()) + s, err := stateFromFile(data.FilePath()) if err != nil { return nil, errors.Wrapf(err, "InitializeFromProtoUnsafePhase0") } From 64ec6658903eb7089b0ed1aa5c592620c4b97f79 Mon Sep 17 00:00:00 2001 From: terence Date: Fri, 17 Oct 2025 12:03:53 -0700 Subject: [PATCH 037/103] Fix sync committee subscription to use subnet indices instead of committee indices (#15885) Co-authored-by: james-prysm <90280386+james-prysm@users.noreply.github.com> --- beacon-chain/rpc/eth/validator/handlers.go | 13 ++++++++++++- beacon-chain/rpc/eth/validator/handlers_test.go | 5 ++--- .../ttsao_fix-sync-committee-subnet-indices.md | 3 +++ 3 files changed, 17 insertions(+), 4 deletions(-) create mode 100644 changelog/ttsao_fix-sync-committee-subnet-indices.md diff --git a/beacon-chain/rpc/eth/validator/handlers.go b/beacon-chain/rpc/eth/validator/handlers.go index 043ad17bb8..16df7d5448 100644 --- a/beacon-chain/rpc/eth/validator/handlers.go +++ b/beacon-chain/rpc/eth/validator/handlers.go @@ -597,7 +597,18 @@ func (s *Server) SubmitSyncCommitteeSubscription(w http.ResponseWriter, r *http. epochDuration := time.Duration(params.BeaconConfig().SlotsPerEpoch.Mul(params.BeaconConfig().SecondsPerSlot)) * time.Second totalDuration := epochDuration * time.Duration(epochsToWatch) - cache.SyncSubnetIDs.AddSyncCommitteeSubnets(pubkey48[:], startEpoch, sub.SyncCommitteeIndices, totalDuration) + subcommitteeSize := params.BeaconConfig().SyncCommitteeSize / params.BeaconConfig().SyncCommitteeSubnetCount + seen := make(map[uint64]bool) + var subnetIndices []uint64 + + for _, idx := range sub.SyncCommitteeIndices { + subnetIdx := idx / subcommitteeSize + if !seen[subnetIdx] { + seen[subnetIdx] = true + subnetIndices = append(subnetIndices, subnetIdx) + } + } + cache.SyncSubnetIDs.AddSyncCommitteeSubnets(pubkey48[:], startEpoch, subnetIndices, totalDuration) } } diff --git a/beacon-chain/rpc/eth/validator/handlers_test.go b/beacon-chain/rpc/eth/validator/handlers_test.go index 3baba284b8..d0f0d90747 100644 --- a/beacon-chain/rpc/eth/validator/handlers_test.go +++ b/beacon-chain/rpc/eth/validator/handlers_test.go @@ -1049,9 +1049,8 @@ func TestSubmitSyncCommitteeSubscription(t *testing.T) { s.SubmitSyncCommitteeSubscription(writer, request) assert.Equal(t, http.StatusOK, writer.Code) subnets, _, _, _ := cache.SyncSubnetIDs.GetSyncCommitteeSubnets(pubkeys[1], 0) - require.Equal(t, 2, len(subnets)) + require.Equal(t, 1, len(subnets)) assert.Equal(t, uint64(0), subnets[0]) - assert.Equal(t, uint64(2), subnets[1]) }) t.Run("multiple", func(t *testing.T) { cache.SyncSubnetIDs.EmptyAllCaches() @@ -1070,7 +1069,7 @@ func TestSubmitSyncCommitteeSubscription(t *testing.T) { assert.Equal(t, uint64(0), subnets[0]) subnets, _, _, _ = cache.SyncSubnetIDs.GetSyncCommitteeSubnets(pubkeys[1], 0) require.Equal(t, 1, len(subnets)) - assert.Equal(t, uint64(2), subnets[0]) + assert.Equal(t, uint64(0), subnets[0]) }) t.Run("no body", func(t *testing.T) { request := httptest.NewRequest(http.MethodPost, "http://example.com", nil) diff --git a/changelog/ttsao_fix-sync-committee-subnet-indices.md b/changelog/ttsao_fix-sync-committee-subnet-indices.md new file mode 100644 index 0000000000..e2793410b2 --- /dev/null +++ b/changelog/ttsao_fix-sync-committee-subnet-indices.md @@ -0,0 +1,3 @@ +### Fixed + +- Fix sync committee subscription to use subnet indices instead of committee indices From 90190883bce326496e72c3580133aff789dc28cb Mon Sep 17 00:00:00 2001 From: Muzry Date: Mon, 20 Oct 2025 22:17:32 +0800 Subject: [PATCH 038/103] Fixed metadata extraction on Windows by correctly splitting file paths (#15899) * Fixed metadata extraction on Windows by correctly splitting file paths * `TestExtractFileMetadata`: Refactor a bit. --------- Co-authored-by: Manu NALEPA --- beacon-chain/db/filesystem/data_column.go | 5 +-- .../db/filesystem/data_column_test.go | 35 +++++++++++++++++++ changelog/muzry_fix_extract_metadata_file.md | 2 ++ 3 files changed, 40 insertions(+), 2 deletions(-) create mode 100644 changelog/muzry_fix_extract_metadata_file.md diff --git a/beacon-chain/db/filesystem/data_column.go b/beacon-chain/db/filesystem/data_column.go index 49331b4a01..ac33e8870f 100644 --- a/beacon-chain/db/filesystem/data_column.go +++ b/beacon-chain/db/filesystem/data_column.go @@ -200,6 +200,7 @@ func (dcs *DataColumnStorage) WarmCache() { fileMetadata, err := extractFileMetadata(path) if err != nil { log.WithError(err).Error("Error encountered while extracting file metadata") + return nil } // Open the data column filesystem file. @@ -988,8 +989,8 @@ func filePath(root [fieldparams.RootLength]byte, epoch primitives.Epoch) string // extractFileMetadata extracts the metadata from a file path. // If the path is not a leaf, it returns nil. func extractFileMetadata(path string) (*fileMetadata, error) { - // Is this Windows friendly? - parts := strings.Split(path, "/") + // Use filepath.Separator to handle both Windows (\) and Unix (/) path separators + parts := strings.Split(path, string(filepath.Separator)) if len(parts) != 3 { return nil, errors.Errorf("unexpected file %s", path) } diff --git a/beacon-chain/db/filesystem/data_column_test.go b/beacon-chain/db/filesystem/data_column_test.go index 86989408f4..23dc6d5c72 100644 --- a/beacon-chain/db/filesystem/data_column_test.go +++ b/beacon-chain/db/filesystem/data_column_test.go @@ -3,6 +3,7 @@ package filesystem import ( "encoding/binary" "os" + "path/filepath" "testing" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" @@ -725,3 +726,37 @@ func TestPrune(t *testing.T) { require.Equal(t, true, compareSlices([]string{"0x0de28a18cae63cbc6f0b20dc1afb0b1df38da40824a5f09f92d485ade04de97f.sszs"}, dirs)) }) } + +func TestExtractFileMetadata(t *testing.T) { + t.Run("Unix", func(t *testing.T) { + // Test with Unix-style path separators (/) + path := "12/1234/0x8bb2f09de48c102635622dc27e6de03ae2b22639df7c33edbc8222b2ec423746.sszs" + metadata, err := extractFileMetadata(path) + if filepath.Separator == '/' { + // On Unix systems, this should succeed + require.NoError(t, err) + require.Equal(t, uint64(12), metadata.period) + require.Equal(t, primitives.Epoch(1234), metadata.epoch) + return + } + + // On Windows systems, this should fail because it uses the wrong separator + require.NotNil(t, err) + }) + + t.Run("Windows", func(t *testing.T) { + // Test with Windows-style path separators (\) + path := "12\\1234\\0x8bb2f09de48c102635622dc27e6de03ae2b22639df7c33edbc8222b2ec423746.sszs" + metadata, err := extractFileMetadata(path) + if filepath.Separator == '\\' { + // On Windows systems, this should succeed + require.NoError(t, err) + require.Equal(t, uint64(12), metadata.period) + require.Equal(t, primitives.Epoch(1234), metadata.epoch) + return + } + + // On Unix systems, this should fail because it uses the wrong separator + require.NotNil(t, err) + }) +} diff --git a/changelog/muzry_fix_extract_metadata_file.md b/changelog/muzry_fix_extract_metadata_file.md new file mode 100644 index 0000000000..445f15f94f --- /dev/null +++ b/changelog/muzry_fix_extract_metadata_file.md @@ -0,0 +1,2 @@ +### Fixed +- Fixed metadata extraction on Windows by correctly splitting file paths \ No newline at end of file From 5a897dfa6b6dda19fc23040d01119dd8163fdda2 Mon Sep 17 00:00:00 2001 From: Jun Song <87601811+syjn99@users.noreply.github.com> Date: Mon, 20 Oct 2025 17:24:06 +0100 Subject: [PATCH 039/103] SSZ-QL: Add endpoints (`BeaconState`/`BeaconBlock`) (#15888) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Move ssz_query objects into testing folder (ensuring test objects only used in test environment) * Add containers for response * Export sszInfo * Add QueryBeaconState/Block * Add comments and few refactor * Fix merge conflict issues * Return 500 when calculate offset fails * Add test for QueryBeaconState * Add test for QueryBeaconBlock * Changelog :) * Rename `QuerySSZRequest` to `SSZQueryRequest` * Fix middleware hooks for RPC to accept JSON from client and return SSZ * Convert to `SSZObject` directly from proto * Move marshalling/calculating hash tree root part after `CalculateOffsetAndLength` * Make nogo happy * Add informing comment for using proto unsafe conversion --------- Co-authored-by: Radosław Kapka --- api/server/structs/endpoints_beacon.go | 5 + beacon-chain/rpc/endpoints.go | 26 +- beacon-chain/rpc/endpoints_test.go | 2 + beacon-chain/rpc/prysm/beacon/BUILD.bazel | 10 + beacon-chain/rpc/prysm/beacon/server.go | 1 + beacon-chain/rpc/prysm/beacon/ssz_query.go | 202 +++++++++ .../rpc/prysm/beacon/ssz_query_test.go | 335 ++++++++++++++ changelog/syjn99_ssz-ql-endpoints.md | 3 + encoding/ssz/query/BUILD.bazel | 2 +- encoding/ssz/query/analyzer.go | 32 +- encoding/ssz/query/container.go | 4 +- encoding/ssz/query/list.go | 4 +- encoding/ssz/query/query.go | 4 +- encoding/ssz/query/query_test.go | 2 +- encoding/ssz/query/ssz_info.go | 48 +- encoding/ssz/query/ssz_object.go | 6 +- encoding/ssz/query/vector.go | 4 +- proto/ssz_query/BUILD.bazel | 25 +- proto/ssz_query/response.pb.go | 282 ++++++++++++ proto/ssz_query/response.proto | 27 ++ proto/ssz_query/response.ssz.go | 410 ++++++++++++++++++ proto/ssz_query/testing/BUILD.bazel | 70 +++ .../test_containers.pb.go} | 287 ++++++------ .../test_containers.proto} | 4 +- .../test_containers.ssz.go} | 2 +- proto/testing/test.pb.go | 227 +++------- 26 files changed, 1638 insertions(+), 386 deletions(-) create mode 100644 beacon-chain/rpc/prysm/beacon/ssz_query.go create mode 100644 beacon-chain/rpc/prysm/beacon/ssz_query_test.go create mode 100644 changelog/syjn99_ssz-ql-endpoints.md create mode 100755 proto/ssz_query/response.pb.go create mode 100644 proto/ssz_query/response.proto create mode 100644 proto/ssz_query/response.ssz.go create mode 100644 proto/ssz_query/testing/BUILD.bazel rename proto/ssz_query/{ssz_query.pb.go => testing/test_containers.pb.go} (55%) rename proto/ssz_query/{ssz_query.proto => testing/test_containers.proto} (99%) rename proto/ssz_query/{ssz_query.ssz.go => testing/test_containers.ssz.go} (99%) diff --git a/api/server/structs/endpoints_beacon.go b/api/server/structs/endpoints_beacon.go index f9c0a2a380..3a2788c141 100644 --- a/api/server/structs/endpoints_beacon.go +++ b/api/server/structs/endpoints_beacon.go @@ -296,3 +296,8 @@ type GetBlobsResponse struct { Finalized bool `json:"finalized"` Data []string `json:"data"` //blobs } + +type SSZQueryRequest struct { + Query string `json:"query"` + IncludeProof bool `json:"include_proof,omitempty"` +} diff --git a/beacon-chain/rpc/endpoints.go b/beacon-chain/rpc/endpoints.go index 3092955b54..b2d4de50df 100644 --- a/beacon-chain/rpc/endpoints.go +++ b/beacon-chain/rpc/endpoints.go @@ -97,7 +97,7 @@ func (s *Service) endpoints( endpoints = append(endpoints, s.beaconEndpoints(ch, stater, blocker, validatorServer, coreService)...) endpoints = append(endpoints, s.configEndpoints()...) endpoints = append(endpoints, s.eventsEndpoints()...) - endpoints = append(endpoints, s.prysmBeaconEndpoints(ch, stater, coreService)...) + endpoints = append(endpoints, s.prysmBeaconEndpoints(ch, stater, blocker, coreService)...) endpoints = append(endpoints, s.prysmNodeEndpoints()...) endpoints = append(endpoints, s.prysmValidatorEndpoints(stater, coreService)...) @@ -1184,6 +1184,7 @@ func (s *Service) eventsEndpoints() []endpoint { func (s *Service) prysmBeaconEndpoints( ch *stategen.CanonicalHistory, stater lookup.Stater, + blocker lookup.Blocker, coreService *core.Service, ) []endpoint { server := &beaconprysm.Server{ @@ -1194,6 +1195,7 @@ func (s *Service) prysmBeaconEndpoints( CanonicalHistory: ch, BeaconDB: s.cfg.BeaconDB, Stater: stater, + Blocker: blocker, ChainInfoFetcher: s.cfg.ChainInfoFetcher, FinalizationFetcher: s.cfg.FinalizationFetcher, CoreService: coreService, @@ -1266,6 +1268,28 @@ func (s *Service) prysmBeaconEndpoints( handler: server.PublishBlobs, methods: []string{http.MethodPost}, }, + { + template: "/prysm/v1/beacon/states/{state_id}/query", + name: namespace + ".QueryBeaconState", + middleware: []middleware.Middleware{ + middleware.ContentTypeHandler([]string{api.JsonMediaType}), + middleware.AcceptHeaderHandler([]string{api.OctetStreamMediaType}), + middleware.AcceptEncodingHeaderHandler(), + }, + handler: server.QueryBeaconState, + methods: []string{http.MethodPost}, + }, + { + template: "/prysm/v1/beacon/blocks/{block_id}/query", + name: namespace + ".QueryBeaconBlock", + middleware: []middleware.Middleware{ + middleware.ContentTypeHandler([]string{api.JsonMediaType}), + middleware.AcceptHeaderHandler([]string{api.OctetStreamMediaType}), + middleware.AcceptEncodingHeaderHandler(), + }, + handler: server.QueryBeaconBlock, + methods: []string{http.MethodPost}, + }, } } diff --git a/beacon-chain/rpc/endpoints_test.go b/beacon-chain/rpc/endpoints_test.go index 27d62a97ee..e2a579cdd7 100644 --- a/beacon-chain/rpc/endpoints_test.go +++ b/beacon-chain/rpc/endpoints_test.go @@ -127,6 +127,8 @@ func Test_endpoints(t *testing.T) { "/prysm/v1/beacon/states/{state_id}/validator_count": {http.MethodGet}, "/prysm/v1/beacon/chain_head": {http.MethodGet}, "/prysm/v1/beacon/blobs": {http.MethodPost}, + "/prysm/v1/beacon/states/{state_id}/query": {http.MethodPost}, + "/prysm/v1/beacon/blocks/{block_id}/query": {http.MethodPost}, } prysmNodeRoutes := map[string][]string{ diff --git a/beacon-chain/rpc/prysm/beacon/BUILD.bazel b/beacon-chain/rpc/prysm/beacon/BUILD.bazel index e4a36ab4ce..6315d6ef7c 100644 --- a/beacon-chain/rpc/prysm/beacon/BUILD.bazel +++ b/beacon-chain/rpc/prysm/beacon/BUILD.bazel @@ -5,11 +5,13 @@ go_library( srcs = [ "handlers.go", "server.go", + "ssz_query.go", "validator_count.go", ], importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/beacon", visibility = ["//visibility:public"], deps = [ + "//api:go_default_library", "//api/server/structs:go_default_library", "//beacon-chain/blockchain:go_default_library", "//beacon-chain/core/helpers:go_default_library", @@ -27,10 +29,13 @@ go_library( "//consensus-types/primitives:go_default_library", "//consensus-types/validator:go_default_library", "//encoding/bytesutil:go_default_library", + "//encoding/ssz/query:go_default_library", "//monitoring/tracing/trace:go_default_library", "//network/httputil:go_default_library", "//proto/eth/v1:go_default_library", "//proto/prysm/v1alpha1:go_default_library", + "//proto/ssz_query:go_default_library", + "//runtime/version:go_default_library", "//time/slots:go_default_library", "@com_github_ethereum_go_ethereum//common/hexutil:go_default_library", "@com_github_pkg_errors//:go_default_library", @@ -41,10 +46,12 @@ go_test( name = "go_default_test", srcs = [ "handlers_test.go", + "ssz_query_test.go", "validator_count_test.go", ], embed = [":go_default_library"], deps = [ + "//api:go_default_library", "//api/server/structs:go_default_library", "//beacon-chain/blockchain/testing:go_default_library", "//beacon-chain/core/helpers:go_default_library", @@ -63,10 +70,13 @@ go_test( "//config/fieldparams:go_default_library", "//config/params:go_default_library", "//consensus-types/blocks:go_default_library", + "//consensus-types/interfaces:go_default_library", "//consensus-types/primitives:go_default_library", "//encoding/bytesutil:go_default_library", "//network/httputil:go_default_library", "//proto/prysm/v1alpha1:go_default_library", + "//proto/ssz_query:go_default_library", + "//runtime/version:go_default_library", "//testing/assert:go_default_library", "//testing/require:go_default_library", "//testing/util:go_default_library", diff --git a/beacon-chain/rpc/prysm/beacon/server.go b/beacon-chain/rpc/prysm/beacon/server.go index 8789552a54..efb9714b04 100644 --- a/beacon-chain/rpc/prysm/beacon/server.go +++ b/beacon-chain/rpc/prysm/beacon/server.go @@ -18,6 +18,7 @@ type Server struct { CanonicalHistory *stategen.CanonicalHistory BeaconDB beacondb.ReadOnlyDatabase Stater lookup.Stater + Blocker lookup.Blocker ChainInfoFetcher blockchain.ChainInfoFetcher FinalizationFetcher blockchain.FinalizationFetcher CoreService *core.Service diff --git a/beacon-chain/rpc/prysm/beacon/ssz_query.go b/beacon-chain/rpc/prysm/beacon/ssz_query.go new file mode 100644 index 0000000000..063570d91d --- /dev/null +++ b/beacon-chain/rpc/prysm/beacon/ssz_query.go @@ -0,0 +1,202 @@ +package beacon + +import ( + "encoding/json" + "errors" + "io" + "net/http" + + "github.com/OffchainLabs/prysm/v6/api" + "github.com/OffchainLabs/prysm/v6/api/server/structs" + "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" + "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v6/network/httputil" + sszquerypb "github.com/OffchainLabs/prysm/v6/proto/ssz_query" + "github.com/OffchainLabs/prysm/v6/runtime/version" +) + +// QueryBeaconState handles SSZ Query request for BeaconState. +// Returns as bytes serialized SSZQueryResponse. +func (s *Server) QueryBeaconState(w http.ResponseWriter, r *http.Request) { + ctx, span := trace.StartSpan(r.Context(), "beacon.QueryBeaconState") + defer span.End() + + stateID := r.PathValue("state_id") + if stateID == "" { + httputil.HandleError(w, "state_id is required in URL params", http.StatusBadRequest) + return + } + + // Validate path before lookup: it might be expensive. + var req structs.SSZQueryRequest + err := json.NewDecoder(r.Body).Decode(&req) + switch { + case errors.Is(err, io.EOF): + httputil.HandleError(w, "No data submitted", http.StatusBadRequest) + return + case err != nil: + httputil.HandleError(w, "Could not decode request body: "+err.Error(), http.StatusBadRequest) + return + } + + if len(req.Query) == 0 { + httputil.HandleError(w, "Empty query submitted", http.StatusBadRequest) + return + } + + path, err := query.ParsePath(req.Query) + if err != nil { + httputil.HandleError(w, "Could not parse path '"+req.Query+"': "+err.Error(), http.StatusBadRequest) + return + } + + stateRoot, err := s.Stater.StateRoot(ctx, []byte(stateID)) + if err != nil { + var rootNotFoundErr *lookup.StateRootNotFoundError + if errors.As(err, &rootNotFoundErr) { + httputil.HandleError(w, "State root not found: "+rootNotFoundErr.Error(), http.StatusNotFound) + return + } + httputil.HandleError(w, "Could not get state root: "+err.Error(), http.StatusInternalServerError) + return + } + + st, err := s.Stater.State(ctx, []byte(stateID)) + if err != nil { + shared.WriteStateFetchError(w, err) + return + } + + // NOTE: Using unsafe conversion to proto is acceptable here, + // as we play with a copy of the state returned by Stater. + sszObject, ok := st.ToProtoUnsafe().(query.SSZObject) + if !ok { + httputil.HandleError(w, "Unsupported state version for querying: "+version.String(st.Version()), http.StatusBadRequest) + return + } + + info, err := query.AnalyzeObject(sszObject) + if err != nil { + httputil.HandleError(w, "Could not analyze state object: "+err.Error(), http.StatusInternalServerError) + return + } + + _, offset, length, err := query.CalculateOffsetAndLength(info, path) + if err != nil { + httputil.HandleError(w, "Could not calculate offset and length for path '"+req.Query+"': "+err.Error(), http.StatusInternalServerError) + return + } + + encodedState, err := st.MarshalSSZ() + if err != nil { + httputil.HandleError(w, "Could not marshal state to SSZ: "+err.Error(), http.StatusInternalServerError) + return + } + + response := &sszquerypb.SSZQueryResponse{ + Root: stateRoot, + Result: encodedState[offset : offset+length], + } + + responseSsz, err := response.MarshalSSZ() + if err != nil { + httputil.HandleError(w, "Could not marshal response to SSZ: "+err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set(api.VersionHeader, version.String(st.Version())) + httputil.WriteSsz(w, responseSsz) +} + +// QueryBeaconState handles SSZ Query request for BeaconState. +// Returns as bytes serialized SSZQueryResponse. +func (s *Server) QueryBeaconBlock(w http.ResponseWriter, r *http.Request) { + ctx, span := trace.StartSpan(r.Context(), "beacon.QueryBeaconBlock") + defer span.End() + + blockId := r.PathValue("block_id") + if blockId == "" { + httputil.HandleError(w, "block_id is required in URL params", http.StatusBadRequest) + return + } + + // Validate path before lookup: it might be expensive. + var req structs.SSZQueryRequest + err := json.NewDecoder(r.Body).Decode(&req) + switch { + case errors.Is(err, io.EOF): + httputil.HandleError(w, "No data submitted", http.StatusBadRequest) + return + case err != nil: + httputil.HandleError(w, "Could not decode request body: "+err.Error(), http.StatusBadRequest) + return + } + + if len(req.Query) == 0 { + httputil.HandleError(w, "Empty query submitted", http.StatusBadRequest) + return + } + + path, err := query.ParsePath(req.Query) + if err != nil { + httputil.HandleError(w, "Could not parse path '"+req.Query+"': "+err.Error(), http.StatusBadRequest) + return + } + + signedBlock, err := s.Blocker.Block(ctx, []byte(blockId)) + if !shared.WriteBlockFetchError(w, signedBlock, err) { + return + } + + protoBlock, err := signedBlock.Block().Proto() + if err != nil { + httputil.HandleError(w, "Could not convert block to proto: "+err.Error(), http.StatusInternalServerError) + return + } + + block, ok := protoBlock.(query.SSZObject) + if !ok { + httputil.HandleError(w, "Unsupported block version for querying: "+version.String(signedBlock.Version()), http.StatusBadRequest) + return + } + + info, err := query.AnalyzeObject(block) + if err != nil { + httputil.HandleError(w, "Could not analyze block object: "+err.Error(), http.StatusInternalServerError) + return + } + + _, offset, length, err := query.CalculateOffsetAndLength(info, path) + if err != nil { + httputil.HandleError(w, "Could not calculate offset and length for path '"+req.Query+"': "+err.Error(), http.StatusInternalServerError) + return + } + + encodedBlock, err := signedBlock.Block().MarshalSSZ() + if err != nil { + httputil.HandleError(w, "Could not marshal block to SSZ: "+err.Error(), http.StatusInternalServerError) + return + } + + blockRoot, err := block.HashTreeRoot() + if err != nil { + httputil.HandleError(w, "Could not compute block root: "+err.Error(), http.StatusInternalServerError) + return + } + + response := &sszquerypb.SSZQueryResponse{ + Root: blockRoot[:], + Result: encodedBlock[offset : offset+length], + } + + responseSsz, err := response.MarshalSSZ() + if err != nil { + httputil.HandleError(w, "Could not marshal response to SSZ: "+err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set(api.VersionHeader, version.String(signedBlock.Version())) + httputil.WriteSsz(w, responseSsz) +} diff --git a/beacon-chain/rpc/prysm/beacon/ssz_query_test.go b/beacon-chain/rpc/prysm/beacon/ssz_query_test.go new file mode 100644 index 0000000000..15fdc7fe3b --- /dev/null +++ b/beacon-chain/rpc/prysm/beacon/ssz_query_test.go @@ -0,0 +1,335 @@ +package beacon + +import ( + "bytes" + "context" + "encoding/binary" + "encoding/json" + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/OffchainLabs/prysm/v6/api" + "github.com/OffchainLabs/prysm/v6/api/server/structs" + chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + sszquerypb "github.com/OffchainLabs/prysm/v6/proto/ssz_query" + "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/prysmaticlabs/go-bitfield" +) + +func TestQueryBeaconState(t *testing.T) { + ctx := context.Background() + + st, _ := util.DeterministicGenesisState(t, 16) + require.NoError(t, st.SetSlot(primitives.Slot(42))) + stateRoot, err := st.HashTreeRoot(ctx) + require.NoError(t, err) + require.NoError(t, st.UpdateBalancesAtIndex(0, 42000000000)) + + tests := []struct { + path string + expectedValue []byte + }{ + { + path: ".slot", + expectedValue: func() []byte { + slot := st.Slot() + result, _ := slot.MarshalSSZ() + return result + }(), + }, + { + path: ".latest_block_header", + expectedValue: func() []byte { + header := st.LatestBlockHeader() + result, _ := header.MarshalSSZ() + return result + }(), + }, + { + path: ".validators", + expectedValue: func() []byte { + b := make([]byte, 0) + validators := st.Validators() + for _, v := range validators { + vBytes, _ := v.MarshalSSZ() + b = append(b, vBytes...) + } + return b + + }(), + }, + { + path: ".validators[0]", + expectedValue: func() []byte { + v, _ := st.ValidatorAtIndex(0) + result, _ := v.MarshalSSZ() + return result + }(), + }, + { + path: ".validators[0].withdrawal_credentials", + expectedValue: func() []byte { + v, _ := st.ValidatorAtIndex(0) + return v.WithdrawalCredentials + }(), + }, + { + path: ".validators[0].effective_balance", + expectedValue: func() []byte { + v, _ := st.ValidatorAtIndex(0) + b := make([]byte, 8) + binary.LittleEndian.PutUint64(b, uint64(v.EffectiveBalance)) + return b + }(), + }, + } + + for _, tt := range tests { + t.Run(tt.path, func(t *testing.T) { + chainService := &chainMock.ChainService{Optimistic: false, FinalizedRoots: make(map[[32]byte]bool)} + s := &Server{ + OptimisticModeFetcher: chainService, + FinalizationFetcher: chainService, + Stater: &testutil.MockStater{ + BeaconStateRoot: stateRoot[:], + BeaconState: st, + }, + } + + requestBody := &structs.SSZQueryRequest{ + Query: tt.path, + } + var buf bytes.Buffer + require.NoError(t, json.NewEncoder(&buf).Encode(requestBody)) + + request := httptest.NewRequest(http.MethodPost, "http://example.com/prysm/v1/beacon/states/{state_id}/query", &buf) + request.SetPathValue("state_id", "head") + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.QueryBeaconState(writer, request) + require.Equal(t, http.StatusOK, writer.Code) + assert.Equal(t, version.String(version.Phase0), writer.Header().Get(api.VersionHeader)) + + expectedResponse := &sszquerypb.SSZQueryResponse{ + Root: stateRoot[:], + Result: tt.expectedValue, + } + sszExpectedResponse, err := expectedResponse.MarshalSSZ() + require.NoError(t, err) + assert.DeepEqual(t, sszExpectedResponse, writer.Body.Bytes()) + }) + } +} + +func TestQueryBeaconStateInvalidRequest(t *testing.T) { + ctx := context.Background() + + st, _ := util.DeterministicGenesisState(t, 16) + require.NoError(t, st.SetSlot(primitives.Slot(42))) + stateRoot, err := st.HashTreeRoot(ctx) + require.NoError(t, err) + + tests := []struct { + name string + stateId string + path string + code int + errorString string + }{ + { + name: "empty query submitted", + stateId: "head", + path: "", + errorString: "Empty query submitted", + }, + { + name: "invalid path", + stateId: "head", + path: ".invalid[]]", + errorString: "Could not parse path", + }, + { + name: "non-existent field", + stateId: "head", + path: ".non_existent_field", + code: http.StatusInternalServerError, + errorString: "Could not calculate offset and length for path", + }, + { + name: "empty state ID", + stateId: "", + path: "", + }, + { + name: "far future slot", + stateId: "1000000000000", + path: "", + }, + } + + for _, tt := range tests { + t.Run(tt.path, func(t *testing.T) { + chainService := &chainMock.ChainService{Optimistic: false, FinalizedRoots: make(map[[32]byte]bool)} + s := &Server{ + OptimisticModeFetcher: chainService, + FinalizationFetcher: chainService, + Stater: &testutil.MockStater{ + BeaconStateRoot: stateRoot[:], + BeaconState: st, + }, + } + + requestBody := &structs.SSZQueryRequest{ + Query: tt.path, + } + var buf bytes.Buffer + require.NoError(t, json.NewEncoder(&buf).Encode(requestBody)) + + request := httptest.NewRequest(http.MethodPost, "http://example.com/prysm/v1/beacon/states/{state_id}/query", &buf) + request.SetPathValue("state_id", tt.stateId) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.QueryBeaconState(writer, request) + + if tt.code == 0 { + tt.code = http.StatusBadRequest + } + require.Equal(t, tt.code, writer.Code) + if tt.errorString != "" { + errorString := writer.Body.String() + require.Equal(t, true, strings.Contains(errorString, tt.errorString)) + } + }) + } +} + +func TestQueryBeaconBlock(t *testing.T) { + randaoReveal, err := hexutil.Decode("0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505") + require.NoError(t, err) + root, err := hexutil.Decode("0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2") + require.NoError(t, err) + signature, err := hexutil.Decode("0x1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505cc411d61252fb6cb3fa0017b679f8bb2305b26a285fa2737f175668d0dff91cc1b66ac1fb663c9bc59509846d6ec05345bd908eda73e670af888da41af171505") + require.NoError(t, err) + att := ð.Attestation{ + AggregationBits: bitfield.Bitlist{0x01}, + Data: ð.AttestationData{ + Slot: 1, + CommitteeIndex: 1, + BeaconBlockRoot: root, + Source: ð.Checkpoint{ + Epoch: 1, + Root: root, + }, + Target: ð.Checkpoint{ + Epoch: 1, + Root: root, + }, + }, + Signature: signature, + } + + tests := []struct { + name string + path string + block interfaces.ReadOnlySignedBeaconBlock + expectedValue []byte + }{ + { + name: "slot", + path: ".slot", + block: func() interfaces.ReadOnlySignedBeaconBlock { + b := util.NewBeaconBlock() + b.Block.Slot = 123 + sb, err := blocks.NewSignedBeaconBlock(b) + require.NoError(t, err) + return sb + }(), + expectedValue: func() []byte { + b := make([]byte, 8) + binary.LittleEndian.PutUint64(b, 123) + return b + }(), + }, + { + name: "randao_reveal", + path: ".body.randao_reveal", + block: func() interfaces.ReadOnlySignedBeaconBlock { + b := util.NewBeaconBlock() + b.Block.Body.RandaoReveal = randaoReveal + sb, err := blocks.NewSignedBeaconBlock(b) + require.NoError(t, err) + return sb + }(), + expectedValue: randaoReveal, + }, + { + name: "attestations", + path: ".body.attestations", + block: func() interfaces.ReadOnlySignedBeaconBlock { + b := util.NewBeaconBlock() + b.Block.Body.Attestations = []*eth.Attestation{ + att, + } + sb, err := blocks.NewSignedBeaconBlock(b) + require.NoError(t, err) + return sb + }(), + expectedValue: func() []byte { + b, err := att.MarshalSSZ() + require.NoError(t, err) + return b + }(), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockBlockFetcher := &testutil.MockBlocker{BlockToReturn: tt.block} + mockChainService := &chainMock.ChainService{ + FinalizedRoots: map[[32]byte]bool{}, + } + s := &Server{ + FinalizationFetcher: mockChainService, + Blocker: mockBlockFetcher, + } + requestBody := &structs.SSZQueryRequest{ + Query: tt.path, + } + var buf bytes.Buffer + require.NoError(t, json.NewEncoder(&buf).Encode(requestBody)) + + request := httptest.NewRequest(http.MethodPost, "http://example.com/prysm/v1/beacon/blocks/{block_id}/query", &buf) + request.SetPathValue("block_id", "head") + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.QueryBeaconBlock(writer, request) + require.Equal(t, http.StatusOK, writer.Code) + assert.Equal(t, version.String(version.Phase0), writer.Header().Get(api.VersionHeader)) + + blockRoot, err := tt.block.Block().HashTreeRoot() + require.NoError(t, err) + + expectedResponse := &sszquerypb.SSZQueryResponse{ + Root: blockRoot[:], + Result: tt.expectedValue, + } + sszExpectedResponse, err := expectedResponse.MarshalSSZ() + require.NoError(t, err) + assert.DeepEqual(t, sszExpectedResponse, writer.Body.Bytes()) + }) + } +} diff --git a/changelog/syjn99_ssz-ql-endpoints.md b/changelog/syjn99_ssz-ql-endpoints.md new file mode 100644 index 0000000000..b5fccec18b --- /dev/null +++ b/changelog/syjn99_ssz-ql-endpoints.md @@ -0,0 +1,3 @@ +### Added + +- SSZ-QL: Add endpoints for `BeaconState`/`BeaconBlock`. diff --git a/encoding/ssz/query/BUILD.bazel b/encoding/ssz/query/BUILD.bazel index 79b8ad84c1..e6a21f6aa3 100644 --- a/encoding/ssz/query/BUILD.bazel +++ b/encoding/ssz/query/BUILD.bazel @@ -31,7 +31,7 @@ go_test( deps = [ ":go_default_library", "//encoding/ssz/query/testutil:go_default_library", - "//proto/ssz_query:go_default_library", + "//proto/ssz_query/testing:go_default_library", "//testing/require:go_default_library", "@com_github_prysmaticlabs_go_bitfield//:go_default_library", ], diff --git a/encoding/ssz/query/analyzer.go b/encoding/ssz/query/analyzer.go index 83a5b964cc..305e934ee8 100644 --- a/encoding/ssz/query/analyzer.go +++ b/encoding/ssz/query/analyzer.go @@ -10,7 +10,7 @@ import ( const offsetBytes = 4 // AnalyzeObject analyzes given object and returns its SSZ information. -func AnalyzeObject(obj SSZObject) (*sszInfo, error) { +func AnalyzeObject(obj SSZObject) (*SszInfo, error) { value := reflect.ValueOf(obj) info, err := analyzeType(value, nil) @@ -28,9 +28,9 @@ func AnalyzeObject(obj SSZObject) (*sszInfo, error) { } // PopulateVariableLengthInfo populates runtime information for SSZ fields of variable-sized types. -// This function updates the sszInfo structure with actual lengths and offsets that can only +// This function updates the SszInfo structure with actual lengths and offsets that can only // be determined at runtime for variable-sized items like Lists and variable-sized Container fields. -func PopulateVariableLengthInfo(sszInfo *sszInfo, value reflect.Value) error { +func PopulateVariableLengthInfo(sszInfo *SszInfo, value reflect.Value) error { if sszInfo == nil { return errors.New("sszInfo is nil") } @@ -124,7 +124,7 @@ func PopulateVariableLengthInfo(sszInfo *sszInfo, value reflect.Value) error { fieldInfo := containerInfo.fields[fieldName] childSszInfo := fieldInfo.sszInfo if childSszInfo == nil { - return fmt.Errorf("sszInfo is nil for field %s", fieldName) + return fmt.Errorf("SszInfo is nil for field %s", fieldName) } // Skip fixed-size fields. @@ -158,7 +158,7 @@ func PopulateVariableLengthInfo(sszInfo *sszInfo, value reflect.Value) error { } // analyzeType is an entry point that inspects a reflect.Value and computes its SSZ layout information. -func analyzeType(value reflect.Value, tag *reflect.StructTag) (*sszInfo, error) { +func analyzeType(value reflect.Value, tag *reflect.StructTag) (*SszInfo, error) { switch value.Kind() { // Basic types (e.g., uintN where N is 8, 16, 32, 64) // NOTE: uint128 and uint256 are represented as []byte in Go, @@ -182,7 +182,7 @@ func analyzeType(value reflect.Value, tag *reflect.StructTag) (*sszInfo, error) } // analyzeBasicType analyzes SSZ basic types (uintN, bool) and returns its info. -func analyzeBasicType(value reflect.Value) (*sszInfo, error) { +func analyzeBasicType(value reflect.Value) (*SszInfo, error) { var sszType SSZType switch value.Kind() { @@ -200,7 +200,7 @@ func analyzeBasicType(value reflect.Value) (*sszInfo, error) { return nil, fmt.Errorf("unsupported basic type %v for SSZ calculation", value.Kind()) } - sszInfo := &sszInfo{ + sszInfo := &SszInfo{ sszType: sszType, typ: value.Type(), @@ -212,7 +212,7 @@ func analyzeBasicType(value reflect.Value) (*sszInfo, error) { } // analyzeHomogeneousColType analyzes homogeneous collection types (e.g., List, Vector, Bitlist, Bitvector) and returns its SSZ info. -func analyzeHomogeneousColType(value reflect.Value, tag *reflect.StructTag) (*sszInfo, error) { +func analyzeHomogeneousColType(value reflect.Value, tag *reflect.StructTag) (*SszInfo, error) { if value.Kind() != reflect.Slice { return nil, fmt.Errorf("can only analyze slice types, got %v", value.Kind()) } @@ -262,9 +262,9 @@ func analyzeHomogeneousColType(value reflect.Value, tag *reflect.StructTag) (*ss } // analyzeListType analyzes SSZ List/Bitlist type and returns its SSZ info. -func analyzeListType(value reflect.Value, elementInfo *sszInfo, limit uint64, isBitfield bool) (*sszInfo, error) { +func analyzeListType(value reflect.Value, elementInfo *SszInfo, limit uint64, isBitfield bool) (*SszInfo, error) { if isBitfield { - return &sszInfo{ + return &SszInfo{ sszType: Bitlist, typ: value.Type(), @@ -280,7 +280,7 @@ func analyzeListType(value reflect.Value, elementInfo *sszInfo, limit uint64, is return nil, errors.New("element info is required for List") } - return &sszInfo{ + return &SszInfo{ sszType: List, typ: value.Type(), @@ -294,9 +294,9 @@ func analyzeListType(value reflect.Value, elementInfo *sszInfo, limit uint64, is } // analyzeVectorType analyzes SSZ Vector/Bitvector type and returns its SSZ info. -func analyzeVectorType(value reflect.Value, elementInfo *sszInfo, length uint64, isBitfield bool) (*sszInfo, error) { +func analyzeVectorType(value reflect.Value, elementInfo *SszInfo, length uint64, isBitfield bool) (*SszInfo, error) { if isBitfield { - return &sszInfo{ + return &SszInfo{ sszType: Bitvector, typ: value.Type(), @@ -318,7 +318,7 @@ func analyzeVectorType(value reflect.Value, elementInfo *sszInfo, length uint64, return nil, fmt.Errorf("vector length must be greater than 0, got %d", length) } - return &sszInfo{ + return &SszInfo{ sszType: Vector, typ: value.Type(), @@ -332,7 +332,7 @@ func analyzeVectorType(value reflect.Value, elementInfo *sszInfo, length uint64, } // analyzeContainerType analyzes SSZ Container type and returns its SSZ info. -func analyzeContainerType(value reflect.Value) (*sszInfo, error) { +func analyzeContainerType(value reflect.Value) (*SszInfo, error) { if value.Kind() != reflect.Struct { return nil, fmt.Errorf("can only analyze struct types, got %v", value.Kind()) } @@ -386,7 +386,7 @@ func analyzeContainerType(value reflect.Value) (*sszInfo, error) { } } - return &sszInfo{ + return &SszInfo{ sszType: Container, typ: containerTyp, source: castToSSZObject(value), diff --git a/encoding/ssz/query/container.go b/encoding/ssz/query/container.go index f0d2fc9088..a373ab916b 100644 --- a/encoding/ssz/query/container.go +++ b/encoding/ssz/query/container.go @@ -1,7 +1,7 @@ package query // containerInfo has -// 1. fields: a field map that maps a field's JSON name to its sszInfo for nested Containers +// 1. fields: a field map that maps a field's JSON name to its SszInfo for nested Containers // 2. order: a list of field names in the order they should be serialized // 3. fixedOffset: the total size of the fixed part of the container type containerInfo struct { @@ -12,7 +12,7 @@ type containerInfo struct { type fieldInfo struct { // sszInfo contains the SSZ information of the field. - sszInfo *sszInfo + sszInfo *SszInfo // offset is the offset of the field within the parent struct. offset uint64 // goFieldName is the name of the field in Go struct. diff --git a/encoding/ssz/query/list.go b/encoding/ssz/query/list.go index d09a5fd821..3188801071 100644 --- a/encoding/ssz/query/list.go +++ b/encoding/ssz/query/list.go @@ -13,7 +13,7 @@ type listInfo struct { // limit is the maximum number of elements in the list. limit uint64 // element is the SSZ info of the list's element type. - element *sszInfo + element *SszInfo // length is the actual number of elements at runtime (0 if not set). length uint64 // elementSizes caches each element's byte size for variable-sized type elements @@ -27,7 +27,7 @@ func (l *listInfo) Limit() uint64 { return l.limit } -func (l *listInfo) Element() (*sszInfo, error) { +func (l *listInfo) Element() (*SszInfo, error) { if l == nil { return nil, errors.New("listInfo is nil") } diff --git a/encoding/ssz/query/query.go b/encoding/ssz/query/query.go index 62eb6810ee..8b90788eed 100644 --- a/encoding/ssz/query/query.go +++ b/encoding/ssz/query/query.go @@ -6,8 +6,8 @@ import ( ) // CalculateOffsetAndLength calculates the offset and length of a given path within the SSZ object. -// By walking the given path, it accumulates the offsets based on sszInfo. -func CalculateOffsetAndLength(sszInfo *sszInfo, path []PathElement) (*sszInfo, uint64, uint64, error) { +// By walking the given path, it accumulates the offsets based on SszInfo. +func CalculateOffsetAndLength(sszInfo *SszInfo, path []PathElement) (*SszInfo, uint64, uint64, error) { if sszInfo == nil { return nil, 0, 0, errors.New("sszInfo is nil") } diff --git a/encoding/ssz/query/query_test.go b/encoding/ssz/query/query_test.go index a84046b597..3935bd9ea0 100644 --- a/encoding/ssz/query/query_test.go +++ b/encoding/ssz/query/query_test.go @@ -6,7 +6,7 @@ import ( "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" "github.com/OffchainLabs/prysm/v6/encoding/ssz/query/testutil" - sszquerypb "github.com/OffchainLabs/prysm/v6/proto/ssz_query" + sszquerypb "github.com/OffchainLabs/prysm/v6/proto/ssz_query/testing" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/prysmaticlabs/go-bitfield" ) diff --git a/encoding/ssz/query/ssz_info.go b/encoding/ssz/query/ssz_info.go index 37fe9507c5..6a06d3b867 100644 --- a/encoding/ssz/query/ssz_info.go +++ b/encoding/ssz/query/ssz_info.go @@ -7,8 +7,8 @@ import ( "strings" ) -// sszInfo holds the all necessary data for analyzing SSZ data types. -type sszInfo struct { +// SszInfo holds the all necessary data for analyzing SSZ data types. +type SszInfo struct { // Type of the SSZ structure (Basic, Container, List, etc.). sszType SSZType // Type in Go. Need this for unmarshaling. @@ -35,7 +35,7 @@ type sszInfo struct { bitvectorInfo *bitvectorInfo } -func (info *sszInfo) Size() uint64 { +func (info *SszInfo) Size() uint64 { if info == nil { return 0 } @@ -72,73 +72,73 @@ func (info *sszInfo) Size() uint64 { } } -func (info *sszInfo) ContainerInfo() (*containerInfo, error) { +func (info *SszInfo) ContainerInfo() (*containerInfo, error) { if info == nil { - return nil, errors.New("sszInfo is nil") + return nil, errors.New("SszInfo is nil") } if info.sszType != Container { - return nil, fmt.Errorf("sszInfo is not a Container type, got %s", info.sszType) + return nil, fmt.Errorf("SszInfo is not a Container type, got %s", info.sszType) } if info.containerInfo == nil { - return nil, errors.New("sszInfo.containerInfo is nil") + return nil, errors.New("SszInfo.containerInfo is nil") } return info.containerInfo, nil } -func (info *sszInfo) ListInfo() (*listInfo, error) { +func (info *SszInfo) ListInfo() (*listInfo, error) { if info == nil { - return nil, errors.New("sszInfo is nil") + return nil, errors.New("SszInfo is nil") } if info.sszType != List { - return nil, fmt.Errorf("sszInfo is not a List type, got %s", info.sszType) + return nil, fmt.Errorf("SszInfo is not a List type, got %s", info.sszType) } return info.listInfo, nil } -func (info *sszInfo) VectorInfo() (*vectorInfo, error) { +func (info *SszInfo) VectorInfo() (*vectorInfo, error) { if info == nil { - return nil, errors.New("sszInfo is nil") + return nil, errors.New("SszInfo is nil") } if info.sszType != Vector { - return nil, fmt.Errorf("sszInfo is not a Vector type, got %s", info.sszType) + return nil, fmt.Errorf("SszInfo is not a Vector type, got %s", info.sszType) } return info.vectorInfo, nil } -func (info *sszInfo) BitlistInfo() (*bitlistInfo, error) { +func (info *SszInfo) BitlistInfo() (*bitlistInfo, error) { if info == nil { - return nil, errors.New("sszInfo is nil") + return nil, errors.New("SszInfo is nil") } if info.sszType != Bitlist { - return nil, fmt.Errorf("sszInfo is not a Bitlist type, got %s", info.sszType) + return nil, fmt.Errorf("SszInfo is not a Bitlist type, got %s", info.sszType) } return info.bitlistInfo, nil } -func (info *sszInfo) BitvectorInfo() (*bitvectorInfo, error) { +func (info *SszInfo) BitvectorInfo() (*bitvectorInfo, error) { if info == nil { - return nil, errors.New("sszInfo is nil") + return nil, errors.New("SszInfo is nil") } if info.sszType != Bitvector { - return nil, fmt.Errorf("sszInfo is not a Bitvector type, got %s", info.sszType) + return nil, fmt.Errorf("SszInfo is not a Bitvector type, got %s", info.sszType) } return info.bitvectorInfo, nil } -// String implements the Stringer interface for sszInfo. +// String implements the Stringer interface for SszInfo. // This follows the notation used in the consensus specs. -func (info *sszInfo) String() string { +func (info *SszInfo) String() string { if info == nil { return "" } @@ -163,8 +163,8 @@ func (info *sszInfo) String() string { } } -// Print returns a string representation of the sszInfo, which is useful for debugging. -func (info *sszInfo) Print() string { +// Print returns a string representation of the SszInfo, which is useful for debugging. +func (info *SszInfo) Print() string { if info == nil { return "" } @@ -173,7 +173,7 @@ func (info *sszInfo) Print() string { return builder.String() } -func printRecursive(info *sszInfo, builder *strings.Builder, prefix string) { +func printRecursive(info *SszInfo, builder *strings.Builder, prefix string) { var sizeDesc string if info.isVariable { sizeDesc = "Variable-size" diff --git a/encoding/ssz/query/ssz_object.go b/encoding/ssz/query/ssz_object.go index ae60613696..eae56a8e3c 100644 --- a/encoding/ssz/query/ssz_object.go +++ b/encoding/ssz/query/ssz_object.go @@ -9,13 +9,13 @@ type SSZObject interface { // HashTreeRoot calls the HashTreeRoot method on the stored interface if it implements SSZObject. // Returns the 32-byte hash tree root or an error if the interface doesn't support hashing. -func (info *sszInfo) HashTreeRoot() ([32]byte, error) { +func (info *SszInfo) HashTreeRoot() ([32]byte, error) { if info == nil { - return [32]byte{}, errors.New("sszInfo is nil") + return [32]byte{}, errors.New("SszInfo is nil") } if info.source == nil { - return [32]byte{}, errors.New("sszInfo.source is nil") + return [32]byte{}, errors.New("SszInfo.source is nil") } // Check if the value implements the Hashable interface diff --git a/encoding/ssz/query/vector.go b/encoding/ssz/query/vector.go index 8e90856952..638b0ea2dc 100644 --- a/encoding/ssz/query/vector.go +++ b/encoding/ssz/query/vector.go @@ -5,7 +5,7 @@ import "errors" // vectorInfo holds information about a SSZ Vector type. type vectorInfo struct { // element is the SSZ info of the vector's element type. - element *sszInfo + element *SszInfo // length is the fixed length of the vector. length uint64 } @@ -18,7 +18,7 @@ func (v *vectorInfo) Length() uint64 { return v.length } -func (v *vectorInfo) Element() (*sszInfo, error) { +func (v *vectorInfo) Element() (*SszInfo, error) { if v == nil { return nil, errors.New("vectorInfo is nil") } diff --git a/proto/ssz_query/BUILD.bazel b/proto/ssz_query/BUILD.bazel index 16991bf95c..b4357eec32 100644 --- a/proto/ssz_query/BUILD.bazel +++ b/proto/ssz_query/BUILD.bazel @@ -1,14 +1,13 @@ load("@rules_proto//proto:defs.bzl", "proto_library") load("@io_bazel_rules_go//go:def.bzl", "go_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") -load("//proto:ssz_proto_library.bzl", "ssz_proto_files") load("//tools:ssz.bzl", "SSZ_DEPS", "ssz_gen_marshal") # gazelle:ignore proto_library( name = "proto", - srcs = ["ssz_query.proto"], + srcs = ["response.proto"], visibility = ["//visibility:public"], deps = [ "//proto/eth/ext:proto", @@ -25,22 +24,21 @@ go_proto_library( visibility = ["//visibility:public"], deps = [ "//proto/eth/ext:go_default_library", - "@com_github_prysmaticlabs_go_bitfield//:go_default_library", "@com_github_golang_protobuf//proto:go_default_library", "@org_golang_google_protobuf//reflect/protoreflect:go_default_library", "@org_golang_google_protobuf//runtime/protoimpl:go_default_library", ], ) -# SSZ generation for test proto messages +# SSZ generation for proto messages ssz_gen_marshal( name = "ssz_generated", - out = "ssz_query.ssz.go", + out = "response.ssz.go", go_proto = ":go_proto", objs = [ - "FixedTestContainer", - "FixedNestedContainer", - "VariableTestContainer", + "SSZQueryProof", + "SSZQueryResponse", + "SSZQueryResponseWithProof", ], ) @@ -55,16 +53,5 @@ go_library( deps = SSZ_DEPS + [ "//proto/eth/ext:go_default_library", "@com_github_golang_protobuf//proto:go_default_library", - "@com_github_prysmaticlabs_go_bitfield//:go_default_library", ], ) - -ssz_proto_files( - name = "ssz_proto_files", - srcs = ["ssz_query.proto"], - config = select({ - "//conditions:default": "mainnet", - "//proto:ssz_mainnet": "mainnet", - "//proto:ssz_minimal": "minimal", - }), -) diff --git a/proto/ssz_query/response.pb.go b/proto/ssz_query/response.pb.go new file mode 100755 index 0000000000..f6ef39d3f0 --- /dev/null +++ b/proto/ssz_query/response.pb.go @@ -0,0 +1,282 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.36.3 +// protoc v3.21.7 +// source: proto/ssz_query/response.proto + +package ssz_query + +import ( + reflect "reflect" + sync "sync" + + _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type SSZQueryProof struct { + state protoimpl.MessageState `protogen:"open.v1"` + Leaf []byte `protobuf:"bytes,1,opt,name=leaf,proto3" json:"leaf,omitempty" ssz-size:"32"` + Gindex uint64 `protobuf:"varint,2,opt,name=gindex,proto3" json:"gindex,omitempty"` + Proofs [][]byte `protobuf:"bytes,3,rep,name=proofs,proto3" json:"proofs,omitempty" ssz-max:"64,?" ssz-size:"?,32"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SSZQueryProof) Reset() { + *x = SSZQueryProof{} + mi := &file_proto_ssz_query_response_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SSZQueryProof) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SSZQueryProof) ProtoMessage() {} + +func (x *SSZQueryProof) ProtoReflect() protoreflect.Message { + mi := &file_proto_ssz_query_response_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SSZQueryProof.ProtoReflect.Descriptor instead. +func (*SSZQueryProof) Descriptor() ([]byte, []int) { + return file_proto_ssz_query_response_proto_rawDescGZIP(), []int{0} +} + +func (x *SSZQueryProof) GetLeaf() []byte { + if x != nil { + return x.Leaf + } + return nil +} + +func (x *SSZQueryProof) GetGindex() uint64 { + if x != nil { + return x.Gindex + } + return 0 +} + +func (x *SSZQueryProof) GetProofs() [][]byte { + if x != nil { + return x.Proofs + } + return nil +} + +type SSZQueryResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + Root []byte `protobuf:"bytes,1,opt,name=root,proto3" json:"root,omitempty" ssz-size:"32"` + Result []byte `protobuf:"bytes,2,opt,name=result,proto3" json:"result,omitempty" ssz-max:"1073741824"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SSZQueryResponse) Reset() { + *x = SSZQueryResponse{} + mi := &file_proto_ssz_query_response_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SSZQueryResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SSZQueryResponse) ProtoMessage() {} + +func (x *SSZQueryResponse) ProtoReflect() protoreflect.Message { + mi := &file_proto_ssz_query_response_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SSZQueryResponse.ProtoReflect.Descriptor instead. +func (*SSZQueryResponse) Descriptor() ([]byte, []int) { + return file_proto_ssz_query_response_proto_rawDescGZIP(), []int{1} +} + +func (x *SSZQueryResponse) GetRoot() []byte { + if x != nil { + return x.Root + } + return nil +} + +func (x *SSZQueryResponse) GetResult() []byte { + if x != nil { + return x.Result + } + return nil +} + +type SSZQueryResponseWithProof struct { + state protoimpl.MessageState `protogen:"open.v1"` + Root []byte `protobuf:"bytes,1,opt,name=root,proto3" json:"root,omitempty" ssz-size:"32"` + Result []byte `protobuf:"bytes,2,opt,name=result,proto3" json:"result,omitempty" ssz-max:"1073741824"` + Proof *SSZQueryProof `protobuf:"bytes,3,opt,name=proof,proto3" json:"proof,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SSZQueryResponseWithProof) Reset() { + *x = SSZQueryResponseWithProof{} + mi := &file_proto_ssz_query_response_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SSZQueryResponseWithProof) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SSZQueryResponseWithProof) ProtoMessage() {} + +func (x *SSZQueryResponseWithProof) ProtoReflect() protoreflect.Message { + mi := &file_proto_ssz_query_response_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SSZQueryResponseWithProof.ProtoReflect.Descriptor instead. +func (*SSZQueryResponseWithProof) Descriptor() ([]byte, []int) { + return file_proto_ssz_query_response_proto_rawDescGZIP(), []int{2} +} + +func (x *SSZQueryResponseWithProof) GetRoot() []byte { + if x != nil { + return x.Root + } + return nil +} + +func (x *SSZQueryResponseWithProof) GetResult() []byte { + if x != nil { + return x.Result + } + return nil +} + +func (x *SSZQueryResponseWithProof) GetProof() *SSZQueryProof { + if x != nil { + return x.Proof + } + return nil +} + +var File_proto_ssz_query_response_proto protoreflect.FileDescriptor + +var file_proto_ssz_query_response_proto_rawDesc = []byte{ + 0x0a, 0x1e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, + 0x79, 0x2f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x07, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x1a, 0x1b, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x65, 0x78, 0x74, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x6d, 0x0a, 0x0d, 0x53, 0x53, 0x5a, 0x51, 0x75, 0x65, + 0x72, 0x79, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1a, 0x0a, 0x04, 0x6c, 0x65, 0x61, 0x66, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x04, 0x6c, + 0x65, 0x61, 0x66, 0x12, 0x16, 0x0a, 0x06, 0x67, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x04, 0x52, 0x06, 0x67, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x28, 0x0a, 0x06, 0x70, + 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, + 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x04, 0x36, 0x34, 0x2c, 0x3f, 0x52, 0x06, 0x70, + 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x22, 0x56, 0x0a, 0x10, 0x53, 0x53, 0x5a, 0x51, 0x75, 0x65, 0x72, + 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x04, 0x72, 0x6f, 0x6f, + 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, + 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x12, 0x26, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x0e, 0x92, 0xb5, 0x18, 0x0a, 0x31, 0x30, 0x37, 0x33, 0x37, + 0x34, 0x31, 0x38, 0x32, 0x34, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x22, 0x8d, 0x01, + 0x0a, 0x19, 0x53, 0x53, 0x5a, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x57, 0x69, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1a, 0x0a, 0x04, 0x72, + 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, + 0x32, 0x52, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x12, 0x26, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x0e, 0x92, 0xb5, 0x18, 0x0a, 0x31, 0x30, 0x37, + 0x33, 0x37, 0x34, 0x31, 0x38, 0x32, 0x34, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, + 0x2c, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, + 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x53, 0x53, 0x5a, 0x51, 0x75, 0x65, 0x72, + 0x79, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x42, 0x32, 0x5a, + 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, + 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, + 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, + 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_proto_ssz_query_response_proto_rawDescOnce sync.Once + file_proto_ssz_query_response_proto_rawDescData = file_proto_ssz_query_response_proto_rawDesc +) + +func file_proto_ssz_query_response_proto_rawDescGZIP() []byte { + file_proto_ssz_query_response_proto_rawDescOnce.Do(func() { + file_proto_ssz_query_response_proto_rawDescData = protoimpl.X.CompressGZIP(file_proto_ssz_query_response_proto_rawDescData) + }) + return file_proto_ssz_query_response_proto_rawDescData +} + +var file_proto_ssz_query_response_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_proto_ssz_query_response_proto_goTypes = []any{ + (*SSZQueryProof)(nil), // 0: testing.SSZQueryProof + (*SSZQueryResponse)(nil), // 1: testing.SSZQueryResponse + (*SSZQueryResponseWithProof)(nil), // 2: testing.SSZQueryResponseWithProof +} +var file_proto_ssz_query_response_proto_depIdxs = []int32{ + 0, // 0: testing.SSZQueryResponseWithProof.proof:type_name -> testing.SSZQueryProof + 1, // [1:1] is the sub-list for method output_type + 1, // [1:1] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_proto_ssz_query_response_proto_init() } +func file_proto_ssz_query_response_proto_init() { + if File_proto_ssz_query_response_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_proto_ssz_query_response_proto_rawDesc, + NumEnums: 0, + NumMessages: 3, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_proto_ssz_query_response_proto_goTypes, + DependencyIndexes: file_proto_ssz_query_response_proto_depIdxs, + MessageInfos: file_proto_ssz_query_response_proto_msgTypes, + }.Build() + File_proto_ssz_query_response_proto = out.File + file_proto_ssz_query_response_proto_rawDesc = nil + file_proto_ssz_query_response_proto_goTypes = nil + file_proto_ssz_query_response_proto_depIdxs = nil +} diff --git a/proto/ssz_query/response.proto b/proto/ssz_query/response.proto new file mode 100644 index 0000000000..299204af29 --- /dev/null +++ b/proto/ssz_query/response.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; + +package testing; + +import "proto/eth/ext/options.proto"; + +option go_package = "github.com/OffchainLabs/prysm/v6/proto/ssz_query"; + +message SSZQueryProof { + bytes leaf = 1 [ (ethereum.eth.ext.ssz_size) = "32" ]; + uint64 gindex = 2; + repeated bytes proofs = 3 [ + (ethereum.eth.ext.ssz_size) = "?,32", + (ethereum.eth.ext.ssz_max) = "64,?" + ]; +} + +message SSZQueryResponse { + bytes root = 1 [ (ethereum.eth.ext.ssz_size) = "32" ]; + bytes result = 2 [ (ethereum.eth.ext.ssz_max) = "1073741824" ]; +} + +message SSZQueryResponseWithProof { + bytes root = 1 [ (ethereum.eth.ext.ssz_size) = "32" ]; + bytes result = 2 [ (ethereum.eth.ext.ssz_max) = "1073741824" ]; + SSZQueryProof proof = 3; +} diff --git a/proto/ssz_query/response.ssz.go b/proto/ssz_query/response.ssz.go new file mode 100644 index 0000000000..d7d082318f --- /dev/null +++ b/proto/ssz_query/response.ssz.go @@ -0,0 +1,410 @@ +// Code generated by fastssz. DO NOT EDIT. +package ssz_query + +import ( + ssz "github.com/prysmaticlabs/fastssz" +) + +// MarshalSSZ ssz marshals the SSZQueryProof object +func (s *SSZQueryProof) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(s) +} + +// MarshalSSZTo ssz marshals the SSZQueryProof object to a target array +func (s *SSZQueryProof) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + offset := int(44) + + // Field (0) 'Leaf' + if size := len(s.Leaf); size != 32 { + err = ssz.ErrBytesLengthFn("--.Leaf", size, 32) + return + } + dst = append(dst, s.Leaf...) + + // Field (1) 'Gindex' + dst = ssz.MarshalUint64(dst, s.Gindex) + + // Offset (2) 'Proofs' + dst = ssz.WriteOffset(dst, offset) + offset += len(s.Proofs) * 32 + + // Field (2) 'Proofs' + if size := len(s.Proofs); size > 64 { + err = ssz.ErrListTooBigFn("--.Proofs", size, 64) + return + } + for ii := 0; ii < len(s.Proofs); ii++ { + if size := len(s.Proofs[ii]); size != 32 { + err = ssz.ErrBytesLengthFn("--.Proofs[ii]", size, 32) + return + } + dst = append(dst, s.Proofs[ii]...) + } + + return +} + +// UnmarshalSSZ ssz unmarshals the SSZQueryProof object +func (s *SSZQueryProof) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size < 44 { + return ssz.ErrSize + } + + tail := buf + var o2 uint64 + + // Field (0) 'Leaf' + if cap(s.Leaf) == 0 { + s.Leaf = make([]byte, 0, len(buf[0:32])) + } + s.Leaf = append(s.Leaf, buf[0:32]...) + + // Field (1) 'Gindex' + s.Gindex = ssz.UnmarshallUint64(buf[32:40]) + + // Offset (2) 'Proofs' + if o2 = ssz.ReadOffset(buf[40:44]); o2 > size { + return ssz.ErrOffset + } + + if o2 != 44 { + return ssz.ErrInvalidVariableOffset + } + + // Field (2) 'Proofs' + { + buf = tail[o2:] + num, err := ssz.DivideInt2(len(buf), 32, 64) + if err != nil { + return err + } + s.Proofs = make([][]byte, num) + for ii := 0; ii < num; ii++ { + if cap(s.Proofs[ii]) == 0 { + s.Proofs[ii] = make([]byte, 0, len(buf[ii*32:(ii+1)*32])) + } + s.Proofs[ii] = append(s.Proofs[ii], buf[ii*32:(ii+1)*32]...) + } + } + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the SSZQueryProof object +func (s *SSZQueryProof) SizeSSZ() (size int) { + size = 44 + + // Field (2) 'Proofs' + size += len(s.Proofs) * 32 + + return +} + +// HashTreeRoot ssz hashes the SSZQueryProof object +func (s *SSZQueryProof) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(s) +} + +// HashTreeRootWith ssz hashes the SSZQueryProof object with a hasher +func (s *SSZQueryProof) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'Leaf' + if size := len(s.Leaf); size != 32 { + err = ssz.ErrBytesLengthFn("--.Leaf", size, 32) + return + } + hh.PutBytes(s.Leaf) + + // Field (1) 'Gindex' + hh.PutUint64(s.Gindex) + + // Field (2) 'Proofs' + { + if size := len(s.Proofs); size > 64 { + err = ssz.ErrListTooBigFn("--.Proofs", size, 64) + return + } + subIndx := hh.Index() + for _, i := range s.Proofs { + if len(i) != 32 { + err = ssz.ErrBytesLength + return + } + hh.Append(i) + } + + numItems := uint64(len(s.Proofs)) + hh.MerkleizeWithMixin(subIndx, numItems, 64) + } + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the SSZQueryResponse object +func (s *SSZQueryResponse) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(s) +} + +// MarshalSSZTo ssz marshals the SSZQueryResponse object to a target array +func (s *SSZQueryResponse) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + offset := int(36) + + // Field (0) 'Root' + if size := len(s.Root); size != 32 { + err = ssz.ErrBytesLengthFn("--.Root", size, 32) + return + } + dst = append(dst, s.Root...) + + // Offset (1) 'Result' + dst = ssz.WriteOffset(dst, offset) + offset += len(s.Result) + + // Field (1) 'Result' + if size := len(s.Result); size > 1073741824 { + err = ssz.ErrBytesLengthFn("--.Result", size, 1073741824) + return + } + dst = append(dst, s.Result...) + + return +} + +// UnmarshalSSZ ssz unmarshals the SSZQueryResponse object +func (s *SSZQueryResponse) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size < 36 { + return ssz.ErrSize + } + + tail := buf + var o1 uint64 + + // Field (0) 'Root' + if cap(s.Root) == 0 { + s.Root = make([]byte, 0, len(buf[0:32])) + } + s.Root = append(s.Root, buf[0:32]...) + + // Offset (1) 'Result' + if o1 = ssz.ReadOffset(buf[32:36]); o1 > size { + return ssz.ErrOffset + } + + if o1 != 36 { + return ssz.ErrInvalidVariableOffset + } + + // Field (1) 'Result' + { + buf = tail[o1:] + if len(buf) > 1073741824 { + return ssz.ErrBytesLength + } + if cap(s.Result) == 0 { + s.Result = make([]byte, 0, len(buf)) + } + s.Result = append(s.Result, buf...) + } + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the SSZQueryResponse object +func (s *SSZQueryResponse) SizeSSZ() (size int) { + size = 36 + + // Field (1) 'Result' + size += len(s.Result) + + return +} + +// HashTreeRoot ssz hashes the SSZQueryResponse object +func (s *SSZQueryResponse) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(s) +} + +// HashTreeRootWith ssz hashes the SSZQueryResponse object with a hasher +func (s *SSZQueryResponse) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'Root' + if size := len(s.Root); size != 32 { + err = ssz.ErrBytesLengthFn("--.Root", size, 32) + return + } + hh.PutBytes(s.Root) + + // Field (1) 'Result' + { + elemIndx := hh.Index() + byteLen := uint64(len(s.Result)) + if byteLen > 1073741824 { + err = ssz.ErrIncorrectListSize + return + } + hh.PutBytes(s.Result) + hh.MerkleizeWithMixin(elemIndx, byteLen, (1073741824+31)/32) + } + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the SSZQueryResponseWithProof object +func (s *SSZQueryResponseWithProof) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(s) +} + +// MarshalSSZTo ssz marshals the SSZQueryResponseWithProof object to a target array +func (s *SSZQueryResponseWithProof) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + offset := int(40) + + // Field (0) 'Root' + if size := len(s.Root); size != 32 { + err = ssz.ErrBytesLengthFn("--.Root", size, 32) + return + } + dst = append(dst, s.Root...) + + // Offset (1) 'Result' + dst = ssz.WriteOffset(dst, offset) + offset += len(s.Result) + + // Offset (2) 'Proof' + dst = ssz.WriteOffset(dst, offset) + if s.Proof == nil { + s.Proof = new(SSZQueryProof) + } + offset += s.Proof.SizeSSZ() + + // Field (1) 'Result' + if size := len(s.Result); size > 1073741824 { + err = ssz.ErrBytesLengthFn("--.Result", size, 1073741824) + return + } + dst = append(dst, s.Result...) + + // Field (2) 'Proof' + if dst, err = s.Proof.MarshalSSZTo(dst); err != nil { + return + } + + return +} + +// UnmarshalSSZ ssz unmarshals the SSZQueryResponseWithProof object +func (s *SSZQueryResponseWithProof) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size < 40 { + return ssz.ErrSize + } + + tail := buf + var o1, o2 uint64 + + // Field (0) 'Root' + if cap(s.Root) == 0 { + s.Root = make([]byte, 0, len(buf[0:32])) + } + s.Root = append(s.Root, buf[0:32]...) + + // Offset (1) 'Result' + if o1 = ssz.ReadOffset(buf[32:36]); o1 > size { + return ssz.ErrOffset + } + + if o1 != 40 { + return ssz.ErrInvalidVariableOffset + } + + // Offset (2) 'Proof' + if o2 = ssz.ReadOffset(buf[36:40]); o2 > size || o1 > o2 { + return ssz.ErrOffset + } + + // Field (1) 'Result' + { + buf = tail[o1:o2] + if len(buf) > 1073741824 { + return ssz.ErrBytesLength + } + if cap(s.Result) == 0 { + s.Result = make([]byte, 0, len(buf)) + } + s.Result = append(s.Result, buf...) + } + + // Field (2) 'Proof' + { + buf = tail[o2:] + if s.Proof == nil { + s.Proof = new(SSZQueryProof) + } + if err = s.Proof.UnmarshalSSZ(buf); err != nil { + return err + } + } + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the SSZQueryResponseWithProof object +func (s *SSZQueryResponseWithProof) SizeSSZ() (size int) { + size = 40 + + // Field (1) 'Result' + size += len(s.Result) + + // Field (2) 'Proof' + if s.Proof == nil { + s.Proof = new(SSZQueryProof) + } + size += s.Proof.SizeSSZ() + + return +} + +// HashTreeRoot ssz hashes the SSZQueryResponseWithProof object +func (s *SSZQueryResponseWithProof) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(s) +} + +// HashTreeRootWith ssz hashes the SSZQueryResponseWithProof object with a hasher +func (s *SSZQueryResponseWithProof) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'Root' + if size := len(s.Root); size != 32 { + err = ssz.ErrBytesLengthFn("--.Root", size, 32) + return + } + hh.PutBytes(s.Root) + + // Field (1) 'Result' + { + elemIndx := hh.Index() + byteLen := uint64(len(s.Result)) + if byteLen > 1073741824 { + err = ssz.ErrIncorrectListSize + return + } + hh.PutBytes(s.Result) + hh.MerkleizeWithMixin(elemIndx, byteLen, (1073741824+31)/32) + } + + // Field (2) 'Proof' + if err = s.Proof.HashTreeRootWith(hh); err != nil { + return + } + + hh.Merkleize(indx) + return +} diff --git a/proto/ssz_query/testing/BUILD.bazel b/proto/ssz_query/testing/BUILD.bazel new file mode 100644 index 0000000000..929ce56d34 --- /dev/null +++ b/proto/ssz_query/testing/BUILD.bazel @@ -0,0 +1,70 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") +load("@io_bazel_rules_go//go:def.bzl", "go_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") +load("//proto:ssz_proto_library.bzl", "ssz_proto_files") +load("//tools:ssz.bzl", "SSZ_DEPS", "ssz_gen_marshal") + +# gazelle:ignore + +proto_library( + name = "proto", + srcs = ["test_containers.proto"], + visibility = ["//visibility:public"], + deps = [ + "//proto/eth/ext:proto", + ], +) + +go_proto_library( + name = "go_proto", + compilers = [ + "@com_github_prysmaticlabs_protoc_gen_go_cast//:go_cast_grpc", + ], + importpath = "github.com/OffchainLabs/prysm/v6/proto/ssz_query/testing", + proto = ":proto", + visibility = ["//visibility:public"], + deps = [ + "//proto/eth/ext:go_default_library", + "@com_github_prysmaticlabs_go_bitfield//:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@org_golang_google_protobuf//reflect/protoreflect:go_default_library", + "@org_golang_google_protobuf//runtime/protoimpl:go_default_library", + ], +) + +# SSZ generation for test proto messages +ssz_gen_marshal( + name = "ssz_generated", + out = "test_containers.ssz.go", + go_proto = ":go_proto", + objs = [ + "FixedTestContainer", + "FixedNestedContainer", + "VariableTestContainer", + ], +) + +go_library( + name = "go_default_library", + srcs = [ + ":ssz_generated", # keep + ], + embed = [":go_proto"], + importpath = "github.com/OffchainLabs/prysm/v6/proto/ssz_query/testing", + visibility = ["//visibility:public"], + deps = SSZ_DEPS + [ + "//proto/eth/ext:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_prysmaticlabs_go_bitfield//:go_default_library", + ], +) + +ssz_proto_files( + name = "ssz_proto_files", + srcs = ["test_containers.proto"], + config = select({ + "//conditions:default": "mainnet", + "//proto:ssz_mainnet": "mainnet", + "//proto:ssz_minimal": "minimal", + }), +) diff --git a/proto/ssz_query/ssz_query.pb.go b/proto/ssz_query/testing/test_containers.pb.go similarity index 55% rename from proto/ssz_query/ssz_query.pb.go rename to proto/ssz_query/testing/test_containers.pb.go index 6b9d5bd7d5..695e68103c 100755 --- a/proto/ssz_query/ssz_query.pb.go +++ b/proto/ssz_query/testing/test_containers.pb.go @@ -2,9 +2,9 @@ // versions: // protoc-gen-go v1.36.3 // protoc v3.21.7 -// source: proto/ssz_query/ssz_query.proto +// source: proto/ssz_query/testing/test_containers.proto -package ssz_query +package testing import ( reflect "reflect" @@ -33,7 +33,7 @@ type FixedNestedContainer struct { func (x *FixedNestedContainer) Reset() { *x = FixedNestedContainer{} - mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[0] + mi := &file_proto_ssz_query_testing_test_containers_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -45,7 +45,7 @@ func (x *FixedNestedContainer) String() string { func (*FixedNestedContainer) ProtoMessage() {} func (x *FixedNestedContainer) ProtoReflect() protoreflect.Message { - mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[0] + mi := &file_proto_ssz_query_testing_test_containers_proto_msgTypes[0] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -58,7 +58,7 @@ func (x *FixedNestedContainer) ProtoReflect() protoreflect.Message { // Deprecated: Use FixedNestedContainer.ProtoReflect.Descriptor instead. func (*FixedNestedContainer) Descriptor() ([]byte, []int) { - return file_proto_ssz_query_ssz_query_proto_rawDescGZIP(), []int{0} + return file_proto_ssz_query_testing_test_containers_proto_rawDescGZIP(), []int{0} } func (x *FixedNestedContainer) GetValue1() uint64 { @@ -93,7 +93,7 @@ type FixedTestContainer struct { func (x *FixedTestContainer) Reset() { *x = FixedTestContainer{} - mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[1] + mi := &file_proto_ssz_query_testing_test_containers_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -105,7 +105,7 @@ func (x *FixedTestContainer) String() string { func (*FixedTestContainer) ProtoMessage() {} func (x *FixedTestContainer) ProtoReflect() protoreflect.Message { - mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[1] + mi := &file_proto_ssz_query_testing_test_containers_proto_msgTypes[1] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -118,7 +118,7 @@ func (x *FixedTestContainer) ProtoReflect() protoreflect.Message { // Deprecated: Use FixedTestContainer.ProtoReflect.Descriptor instead. func (*FixedTestContainer) Descriptor() ([]byte, []int) { - return file_proto_ssz_query_ssz_query_proto_rawDescGZIP(), []int{1} + return file_proto_ssz_query_testing_test_containers_proto_rawDescGZIP(), []int{1} } func (x *FixedTestContainer) GetFieldUint32() uint32 { @@ -202,7 +202,7 @@ type VariableNestedContainer struct { func (x *VariableNestedContainer) Reset() { *x = VariableNestedContainer{} - mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[2] + mi := &file_proto_ssz_query_testing_test_containers_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -214,7 +214,7 @@ func (x *VariableNestedContainer) String() string { func (*VariableNestedContainer) ProtoMessage() {} func (x *VariableNestedContainer) ProtoReflect() protoreflect.Message { - mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[2] + mi := &file_proto_ssz_query_testing_test_containers_proto_msgTypes[2] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -227,7 +227,7 @@ func (x *VariableNestedContainer) ProtoReflect() protoreflect.Message { // Deprecated: Use VariableNestedContainer.ProtoReflect.Descriptor instead. func (*VariableNestedContainer) Descriptor() ([]byte, []int) { - return file_proto_ssz_query_ssz_query_proto_rawDescGZIP(), []int{2} + return file_proto_ssz_query_testing_test_containers_proto_rawDescGZIP(), []int{2} } func (x *VariableNestedContainer) GetValue1() uint64 { @@ -261,7 +261,7 @@ type VariableOuterContainer struct { func (x *VariableOuterContainer) Reset() { *x = VariableOuterContainer{} - mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[3] + mi := &file_proto_ssz_query_testing_test_containers_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -273,7 +273,7 @@ func (x *VariableOuterContainer) String() string { func (*VariableOuterContainer) ProtoMessage() {} func (x *VariableOuterContainer) ProtoReflect() protoreflect.Message { - mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[3] + mi := &file_proto_ssz_query_testing_test_containers_proto_msgTypes[3] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -286,7 +286,7 @@ func (x *VariableOuterContainer) ProtoReflect() protoreflect.Message { // Deprecated: Use VariableOuterContainer.ProtoReflect.Descriptor instead. func (*VariableOuterContainer) Descriptor() ([]byte, []int) { - return file_proto_ssz_query_ssz_query_proto_rawDescGZIP(), []int{3} + return file_proto_ssz_query_testing_test_containers_proto_rawDescGZIP(), []int{3} } func (x *VariableOuterContainer) GetInner_1() *VariableNestedContainer { @@ -320,7 +320,7 @@ type VariableTestContainer struct { func (x *VariableTestContainer) Reset() { *x = VariableTestContainer{} - mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[4] + mi := &file_proto_ssz_query_testing_test_containers_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -332,7 +332,7 @@ func (x *VariableTestContainer) String() string { func (*VariableTestContainer) ProtoMessage() {} func (x *VariableTestContainer) ProtoReflect() protoreflect.Message { - mi := &file_proto_ssz_query_ssz_query_proto_msgTypes[4] + mi := &file_proto_ssz_query_testing_test_containers_proto_msgTypes[4] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -345,7 +345,7 @@ func (x *VariableTestContainer) ProtoReflect() protoreflect.Message { // Deprecated: Use VariableTestContainer.ProtoReflect.Descriptor instead. func (*VariableTestContainer) Descriptor() ([]byte, []int) { - return file_proto_ssz_query_ssz_query_proto_rawDescGZIP(), []int{4} + return file_proto_ssz_query_testing_test_containers_proto_rawDescGZIP(), []int{4} } func (x *VariableTestContainer) GetLeadingField() []byte { @@ -411,100 +411,100 @@ func (x *VariableTestContainer) GetTrailingField() []byte { return nil } -var File_proto_ssz_query_ssz_query_proto protoreflect.FileDescriptor +var File_proto_ssz_query_testing_test_containers_proto protoreflect.FileDescriptor -var file_proto_ssz_query_ssz_query_proto_rawDesc = []byte{ - 0x0a, 0x1f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, - 0x79, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x12, 0x09, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x1a, 0x1b, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x65, 0x78, 0x74, 0x2f, 0x6f, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x4e, 0x0a, 0x14, 0x46, 0x69, 0x78, - 0x65, 0x64, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, - 0x72, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x31, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x04, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x31, 0x12, 0x1e, 0x0a, 0x06, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x32, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, - 0x32, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x32, 0x22, 0xd2, 0x04, 0x0a, 0x12, 0x46, 0x69, - 0x78, 0x65, 0x64, 0x54, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, - 0x12, 0x21, 0x0a, 0x0c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x33, 0x32, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0b, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x55, 0x69, 0x6e, - 0x74, 0x33, 0x32, 0x12, 0x21, 0x0a, 0x0c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x75, 0x69, 0x6e, - 0x74, 0x36, 0x34, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x55, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x12, 0x1d, 0x0a, 0x0a, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, - 0x62, 0x6f, 0x6f, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x42, 0x6f, 0x6f, 0x6c, 0x12, 0x2b, 0x0a, 0x0d, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x62, - 0x79, 0x74, 0x65, 0x73, 0x33, 0x32, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x33, 0x32, 0x52, 0x0c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, - 0x33, 0x32, 0x12, 0x37, 0x0a, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x46, - 0x69, 0x78, 0x65, 0x64, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, - 0x6e, 0x65, 0x72, 0x52, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x12, 0x29, 0x0a, 0x0c, 0x76, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x03, 0x28, - 0x04, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x32, 0x34, 0x52, 0x0b, 0x76, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x43, 0x0a, 0x19, 0x74, 0x77, 0x6f, 0x5f, 0x64, 0x69, - 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x08, 0x8a, 0xb5, 0x18, 0x04, 0x35, - 0x2c, 0x33, 0x32, 0x52, 0x16, 0x74, 0x77, 0x6f, 0x44, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, - 0x6e, 0x42, 0x79, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x66, 0x0a, 0x11, 0x62, - 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x18, 0x08, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x39, 0x82, 0xb5, 0x18, 0x30, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, - 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x8a, 0xb5, 0x18, 0x01, - 0x38, 0x52, 0x10, 0x62, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x46, 0x69, - 0x65, 0x6c, 0x64, 0x12, 0x6a, 0x0a, 0x12, 0x62, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x35, 0x31, 0x32, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x42, - 0x3b, 0x82, 0xb5, 0x18, 0x31, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, - 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x35, 0x31, 0x32, 0x8a, 0xb5, 0x18, 0x02, 0x36, 0x34, 0x52, 0x11, 0x62, 0x69, - 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x35, 0x31, 0x32, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, - 0x2d, 0x0a, 0x0e, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x35, 0x36, 0x52, - 0x0d, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x22, 0xa5, - 0x01, 0x0a, 0x17, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, - 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x31, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x31, 0x12, 0x33, 0x0a, 0x11, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, - 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x18, 0x02, 0x20, 0x03, 0x28, 0x04, 0x42, 0x07, 0x92, - 0xb5, 0x18, 0x03, 0x31, 0x30, 0x30, 0x52, 0x0f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, - 0x74, 0x55, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x12, 0x3d, 0x0a, 0x11, 0x6e, 0x65, 0x73, 0x74, 0x65, - 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x0c, 0x42, 0x11, 0x8a, 0xb5, 0x18, 0x03, 0x3f, 0x2c, 0x3f, 0x92, 0xb5, 0x18, 0x06, 0x31, - 0x30, 0x30, 0x2c, 0x35, 0x30, 0x52, 0x0f, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73, - 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x22, 0x92, 0x01, 0x0a, 0x16, 0x56, 0x61, 0x72, 0x69, 0x61, - 0x62, 0x6c, 0x65, 0x4f, 0x75, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, - 0x72, 0x12, 0x3b, 0x0a, 0x07, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x5f, 0x31, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x56, - 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, - 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x31, 0x12, 0x3b, - 0x0a, 0x07, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x5f, 0x32, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x22, 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x56, 0x61, 0x72, 0x69, - 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, - 0x6e, 0x65, 0x72, 0x52, 0x06, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x32, 0x22, 0x81, 0x05, 0x0a, 0x15, - 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x54, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x74, - 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x2b, 0x0a, 0x0d, 0x6c, 0x65, 0x61, 0x64, 0x69, 0x6e, 0x67, - 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x33, 0x32, 0x52, 0x0c, 0x6c, 0x65, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x12, 0x34, 0x0a, 0x11, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, - 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x18, 0x02, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, 0x92, - 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, - 0x73, 0x74, 0x55, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x12, 0x5a, 0x0a, 0x14, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, - 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, - 0x72, 0x79, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, - 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, - 0x52, 0x12, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x61, - 0x69, 0x6e, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x12, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, - 0x73, 0x74, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x33, 0x32, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0c, - 0x42, 0x0f, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x30, - 0x30, 0x52, 0x10, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x79, 0x74, 0x65, - 0x73, 0x33, 0x32, 0x12, 0x3a, 0x0a, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, - 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, - 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x12, - 0x61, 0x0a, 0x17, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x74, - 0x61, 0x69, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x21, 0x2e, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x56, 0x61, 0x72, +var file_proto_ssz_query_testing_test_containers_proto_rawDesc = []byte{ + 0x0a, 0x2d, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, + 0x79, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x63, + 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, + 0x07, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x1a, 0x1b, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, + 0x65, 0x74, 0x68, 0x2f, 0x65, 0x78, 0x74, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x4e, 0x0a, 0x14, 0x46, 0x69, 0x78, 0x65, 0x64, 0x4e, 0x65, + 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x16, 0x0a, + 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x31, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x31, 0x12, 0x1e, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x32, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x06, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x32, 0x22, 0xd0, 0x04, 0x0a, 0x12, 0x46, 0x69, 0x78, 0x65, 0x64, 0x54, + 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x21, 0x0a, 0x0c, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0d, 0x52, 0x0b, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x55, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x12, + 0x21, 0x0a, 0x0c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x55, 0x69, 0x6e, 0x74, + 0x36, 0x34, 0x12, 0x1d, 0x0a, 0x0a, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x62, 0x6f, 0x6f, 0x6c, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x42, 0x6f, 0x6f, + 0x6c, 0x12, 0x2b, 0x0a, 0x0d, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, + 0x33, 0x32, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, + 0x52, 0x0c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x33, 0x32, 0x12, 0x35, + 0x0a, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, + 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x4e, 0x65, + 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, 0x6e, + 0x65, 0x73, 0x74, 0x65, 0x64, 0x12, 0x29, 0x0a, 0x0c, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x5f, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x03, 0x28, 0x04, 0x42, 0x06, 0x8a, 0xb5, 0x18, + 0x02, 0x32, 0x34, 0x52, 0x0b, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x12, 0x43, 0x0a, 0x19, 0x74, 0x77, 0x6f, 0x5f, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, + 0x6e, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x07, 0x20, + 0x03, 0x28, 0x0c, 0x42, 0x08, 0x8a, 0xb5, 0x18, 0x04, 0x35, 0x2c, 0x33, 0x32, 0x52, 0x16, 0x74, + 0x77, 0x6f, 0x44, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x79, 0x74, 0x65, 0x73, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x66, 0x0a, 0x11, 0x62, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x36, 0x34, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0c, + 0x42, 0x39, 0x82, 0xb5, 0x18, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, + 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x38, 0x52, 0x10, 0x62, 0x69, 0x74, + 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x6a, 0x0a, + 0x12, 0x62, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x35, 0x31, 0x32, 0x5f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x3b, 0x82, 0xb5, 0x18, 0x31, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, + 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x35, 0x31, 0x32, + 0x8a, 0xb5, 0x18, 0x02, 0x36, 0x34, 0x52, 0x11, 0x62, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, + 0x72, 0x35, 0x31, 0x32, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x2d, 0x0a, 0x0e, 0x74, 0x72, 0x61, + 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, + 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x35, 0x36, 0x52, 0x0d, 0x74, 0x72, 0x61, 0x69, 0x6c, + 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x22, 0xa5, 0x01, 0x0a, 0x17, 0x56, 0x61, 0x72, + 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x31, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x31, 0x12, 0x33, 0x0a, 0x11, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x36, + 0x34, 0x18, 0x02, 0x20, 0x03, 0x28, 0x04, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x30, 0x30, + 0x52, 0x0f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x55, 0x69, 0x6e, 0x74, 0x36, + 0x34, 0x12, 0x3d, 0x0a, 0x11, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, + 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x11, 0x8a, 0xb5, + 0x18, 0x03, 0x3f, 0x2c, 0x3f, 0x92, 0xb5, 0x18, 0x06, 0x31, 0x30, 0x30, 0x2c, 0x35, 0x30, 0x52, + 0x0f, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x22, 0x8e, 0x01, 0x0a, 0x16, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x75, 0x74, + 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x39, 0x0a, 0x07, 0x69, + 0x6e, 0x6e, 0x65, 0x72, 0x5f, 0x31, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, + 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, + 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, + 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x31, 0x12, 0x39, 0x0a, 0x07, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x5f, + 0x32, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, + 0x67, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, + 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, 0x69, 0x6e, 0x6e, 0x65, 0x72, + 0x32, 0x22, 0xfb, 0x04, 0x0a, 0x15, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x54, 0x65, + 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x2b, 0x0a, 0x0d, 0x6c, + 0x65, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0c, 0x6c, 0x65, 0x61, 0x64, + 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x34, 0x0a, 0x11, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x04, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0f, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x55, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x12, 0x58, + 0x0a, 0x14, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6e, + 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, + 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x4e, 0x65, 0x73, 0x74, + 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x42, 0x07, 0x92, 0xb5, 0x18, + 0x03, 0x31, 0x32, 0x38, 0x52, 0x12, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x43, + 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x12, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x33, 0x32, 0x18, 0x04, + 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0f, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, + 0x18, 0x03, 0x31, 0x30, 0x30, 0x52, 0x10, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, + 0x42, 0x79, 0x74, 0x65, 0x73, 0x33, 0x32, 0x12, 0x38, 0x0a, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, + 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, + 0x67, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, + 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, + 0x64, 0x12, 0x5f, 0x0a, 0x17, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x63, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x18, 0x06, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x75, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x30, 0x52, 0x15, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4c, 0x69, @@ -521,39 +521,40 @@ var file_proto_ssz_query_ssz_query_proto_rawDesc = []byte{ 0x12, 0x2d, 0x0a, 0x0e, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x35, 0x36, 0x52, 0x0d, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x42, - 0x32, 0x5a, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, + 0x42, 0x5a, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, - 0x65, 0x72, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x72, 0x79, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x3b, 0x74, 0x65, 0x73, 0x74, + 0x69, 0x6e, 0x67, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( - file_proto_ssz_query_ssz_query_proto_rawDescOnce sync.Once - file_proto_ssz_query_ssz_query_proto_rawDescData = file_proto_ssz_query_ssz_query_proto_rawDesc + file_proto_ssz_query_testing_test_containers_proto_rawDescOnce sync.Once + file_proto_ssz_query_testing_test_containers_proto_rawDescData = file_proto_ssz_query_testing_test_containers_proto_rawDesc ) -func file_proto_ssz_query_ssz_query_proto_rawDescGZIP() []byte { - file_proto_ssz_query_ssz_query_proto_rawDescOnce.Do(func() { - file_proto_ssz_query_ssz_query_proto_rawDescData = protoimpl.X.CompressGZIP(file_proto_ssz_query_ssz_query_proto_rawDescData) +func file_proto_ssz_query_testing_test_containers_proto_rawDescGZIP() []byte { + file_proto_ssz_query_testing_test_containers_proto_rawDescOnce.Do(func() { + file_proto_ssz_query_testing_test_containers_proto_rawDescData = protoimpl.X.CompressGZIP(file_proto_ssz_query_testing_test_containers_proto_rawDescData) }) - return file_proto_ssz_query_ssz_query_proto_rawDescData + return file_proto_ssz_query_testing_test_containers_proto_rawDescData } -var file_proto_ssz_query_ssz_query_proto_msgTypes = make([]protoimpl.MessageInfo, 5) -var file_proto_ssz_query_ssz_query_proto_goTypes = []any{ - (*FixedNestedContainer)(nil), // 0: ssz_query.FixedNestedContainer - (*FixedTestContainer)(nil), // 1: ssz_query.FixedTestContainer - (*VariableNestedContainer)(nil), // 2: ssz_query.VariableNestedContainer - (*VariableOuterContainer)(nil), // 3: ssz_query.VariableOuterContainer - (*VariableTestContainer)(nil), // 4: ssz_query.VariableTestContainer +var file_proto_ssz_query_testing_test_containers_proto_msgTypes = make([]protoimpl.MessageInfo, 5) +var file_proto_ssz_query_testing_test_containers_proto_goTypes = []any{ + (*FixedNestedContainer)(nil), // 0: testing.FixedNestedContainer + (*FixedTestContainer)(nil), // 1: testing.FixedTestContainer + (*VariableNestedContainer)(nil), // 2: testing.VariableNestedContainer + (*VariableOuterContainer)(nil), // 3: testing.VariableOuterContainer + (*VariableTestContainer)(nil), // 4: testing.VariableTestContainer } -var file_proto_ssz_query_ssz_query_proto_depIdxs = []int32{ - 0, // 0: ssz_query.FixedTestContainer.nested:type_name -> ssz_query.FixedNestedContainer - 2, // 1: ssz_query.VariableOuterContainer.inner_1:type_name -> ssz_query.VariableNestedContainer - 2, // 2: ssz_query.VariableOuterContainer.inner_2:type_name -> ssz_query.VariableNestedContainer - 0, // 3: ssz_query.VariableTestContainer.field_list_container:type_name -> ssz_query.FixedNestedContainer - 2, // 4: ssz_query.VariableTestContainer.nested:type_name -> ssz_query.VariableNestedContainer - 3, // 5: ssz_query.VariableTestContainer.variable_container_list:type_name -> ssz_query.VariableOuterContainer +var file_proto_ssz_query_testing_test_containers_proto_depIdxs = []int32{ + 0, // 0: testing.FixedTestContainer.nested:type_name -> testing.FixedNestedContainer + 2, // 1: testing.VariableOuterContainer.inner_1:type_name -> testing.VariableNestedContainer + 2, // 2: testing.VariableOuterContainer.inner_2:type_name -> testing.VariableNestedContainer + 0, // 3: testing.VariableTestContainer.field_list_container:type_name -> testing.FixedNestedContainer + 2, // 4: testing.VariableTestContainer.nested:type_name -> testing.VariableNestedContainer + 3, // 5: testing.VariableTestContainer.variable_container_list:type_name -> testing.VariableOuterContainer 6, // [6:6] is the sub-list for method output_type 6, // [6:6] is the sub-list for method input_type 6, // [6:6] is the sub-list for extension type_name @@ -561,27 +562,27 @@ var file_proto_ssz_query_ssz_query_proto_depIdxs = []int32{ 0, // [0:6] is the sub-list for field type_name } -func init() { file_proto_ssz_query_ssz_query_proto_init() } -func file_proto_ssz_query_ssz_query_proto_init() { - if File_proto_ssz_query_ssz_query_proto != nil { +func init() { file_proto_ssz_query_testing_test_containers_proto_init() } +func file_proto_ssz_query_testing_test_containers_proto_init() { + if File_proto_ssz_query_testing_test_containers_proto != nil { return } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_proto_ssz_query_ssz_query_proto_rawDesc, + RawDescriptor: file_proto_ssz_query_testing_test_containers_proto_rawDesc, NumEnums: 0, NumMessages: 5, NumExtensions: 0, NumServices: 0, }, - GoTypes: file_proto_ssz_query_ssz_query_proto_goTypes, - DependencyIndexes: file_proto_ssz_query_ssz_query_proto_depIdxs, - MessageInfos: file_proto_ssz_query_ssz_query_proto_msgTypes, + GoTypes: file_proto_ssz_query_testing_test_containers_proto_goTypes, + DependencyIndexes: file_proto_ssz_query_testing_test_containers_proto_depIdxs, + MessageInfos: file_proto_ssz_query_testing_test_containers_proto_msgTypes, }.Build() - File_proto_ssz_query_ssz_query_proto = out.File - file_proto_ssz_query_ssz_query_proto_rawDesc = nil - file_proto_ssz_query_ssz_query_proto_goTypes = nil - file_proto_ssz_query_ssz_query_proto_depIdxs = nil + File_proto_ssz_query_testing_test_containers_proto = out.File + file_proto_ssz_query_testing_test_containers_proto_rawDesc = nil + file_proto_ssz_query_testing_test_containers_proto_goTypes = nil + file_proto_ssz_query_testing_test_containers_proto_depIdxs = nil } diff --git a/proto/ssz_query/ssz_query.proto b/proto/ssz_query/testing/test_containers.proto similarity index 99% rename from proto/ssz_query/ssz_query.proto rename to proto/ssz_query/testing/test_containers.proto index 4898eab1cb..a3a5656c22 100644 --- a/proto/ssz_query/ssz_query.proto +++ b/proto/ssz_query/testing/test_containers.proto @@ -1,10 +1,10 @@ syntax = "proto3"; -package ssz_query; +package testing; import "proto/eth/ext/options.proto"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/ssz_query"; +option go_package = "github.com/OffchainLabs/prysm/v6/proto/ssz_query/testing;testing"; // ===== FIXED-SIZE TEST CONTAINERS ===== diff --git a/proto/ssz_query/ssz_query.ssz.go b/proto/ssz_query/testing/test_containers.ssz.go similarity index 99% rename from proto/ssz_query/ssz_query.ssz.go rename to proto/ssz_query/testing/test_containers.ssz.go index 0a0f65fbc3..0e0c9c90fe 100644 --- a/proto/ssz_query/ssz_query.ssz.go +++ b/proto/ssz_query/testing/test_containers.ssz.go @@ -1,5 +1,5 @@ // Code generated by fastssz. DO NOT EDIT. -package ssz_query +package testing import ( ssz "github.com/prysmaticlabs/fastssz" diff --git a/proto/testing/test.pb.go b/proto/testing/test.pb.go index b5891dc142..7bb1f9c379 100755 --- a/proto/testing/test.pb.go +++ b/proto/testing/test.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 +// protoc-gen-go v1.36.3 // protoc v3.21.7 // source: proto/testing/test.proto @@ -73,21 +73,18 @@ func (Person_PhoneType) EnumDescriptor() ([]byte, []int) { } type TestMessage struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Foo string `protobuf:"bytes,1,opt,name=foo,proto3" json:"foo,omitempty"` + Bar string `protobuf:"bytes,2,opt,name=bar,proto3" json:"bar,omitempty" spec-name:"foo" ssz-size:"32"` unknownFields protoimpl.UnknownFields - - Foo string `protobuf:"bytes,1,opt,name=foo,proto3" json:"foo,omitempty"` - Bar string `protobuf:"bytes,2,opt,name=bar,proto3" json:"bar,omitempty" spec-name:"foo" ssz-size:"32"` + sizeCache protoimpl.SizeCache } func (x *TestMessage) Reset() { *x = TestMessage{} - if protoimpl.UnsafeEnabled { - mi := &file_proto_testing_test_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_proto_testing_test_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TestMessage) String() string { @@ -98,7 +95,7 @@ func (*TestMessage) ProtoMessage() {} func (x *TestMessage) ProtoReflect() protoreflect.Message { mi := &file_proto_testing_test_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -128,21 +125,18 @@ func (x *TestMessage) GetBar() string { } type TestNestedMessage struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Fuzz string `protobuf:"bytes,1,opt,name=fuzz,proto3" json:"fuzz,omitempty"` + Msg *TestMessage `protobuf:"bytes,2,opt,name=msg,proto3" json:"msg,omitempty"` unknownFields protoimpl.UnknownFields - - Fuzz string `protobuf:"bytes,1,opt,name=fuzz,proto3" json:"fuzz,omitempty"` - Msg *TestMessage `protobuf:"bytes,2,opt,name=msg,proto3" json:"msg,omitempty"` + sizeCache protoimpl.SizeCache } func (x *TestNestedMessage) Reset() { *x = TestNestedMessage{} - if protoimpl.UnsafeEnabled { - mi := &file_proto_testing_test_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_proto_testing_test_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TestNestedMessage) String() string { @@ -153,7 +147,7 @@ func (*TestNestedMessage) ProtoMessage() {} func (x *TestNestedMessage) ProtoReflect() protoreflect.Message { mi := &file_proto_testing_test_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -183,21 +177,18 @@ func (x *TestNestedMessage) GetMsg() *TestMessage { } type Puzzle struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Challenge string `protobuf:"bytes,1,opt,name=challenge,proto3" json:"challenge,omitempty"` + Answer string `protobuf:"bytes,2,opt,name=answer,proto3" json:"answer,omitempty"` unknownFields protoimpl.UnknownFields - - Challenge string `protobuf:"bytes,1,opt,name=challenge,proto3" json:"challenge,omitempty"` - Answer string `protobuf:"bytes,2,opt,name=answer,proto3" json:"answer,omitempty"` + sizeCache protoimpl.SizeCache } func (x *Puzzle) Reset() { *x = Puzzle{} - if protoimpl.UnsafeEnabled { - mi := &file_proto_testing_test_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_proto_testing_test_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Puzzle) String() string { @@ -208,7 +199,7 @@ func (*Puzzle) ProtoMessage() {} func (x *Puzzle) ProtoReflect() protoreflect.Message { mi := &file_proto_testing_test_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -238,24 +229,21 @@ func (x *Puzzle) GetAnswer() string { } type Person struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Id int32 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` + Email string `protobuf:"bytes,3,opt,name=email,proto3" json:"email,omitempty"` + Phones []*Person_PhoneNumber `protobuf:"bytes,4,rep,name=phones,proto3" json:"phones,omitempty"` + LastUpdated *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=last_updated,json=lastUpdated,proto3" json:"last_updated,omitempty"` unknownFields protoimpl.UnknownFields - - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Id int32 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` - Email string `protobuf:"bytes,3,opt,name=email,proto3" json:"email,omitempty"` - Phones []*Person_PhoneNumber `protobuf:"bytes,4,rep,name=phones,proto3" json:"phones,omitempty"` - LastUpdated *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=last_updated,json=lastUpdated,proto3" json:"last_updated,omitempty"` + sizeCache protoimpl.SizeCache } func (x *Person) Reset() { *x = Person{} - if protoimpl.UnsafeEnabled { - mi := &file_proto_testing_test_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_proto_testing_test_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Person) String() string { @@ -266,7 +254,7 @@ func (*Person) ProtoMessage() {} func (x *Person) ProtoReflect() protoreflect.Message { mi := &file_proto_testing_test_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -317,20 +305,17 @@ func (x *Person) GetLastUpdated() *timestamppb.Timestamp { } type AddressBook struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + People []*Person `protobuf:"bytes,1,rep,name=people,proto3" json:"people,omitempty"` unknownFields protoimpl.UnknownFields - - People []*Person `protobuf:"bytes,1,rep,name=people,proto3" json:"people,omitempty"` + sizeCache protoimpl.SizeCache } func (x *AddressBook) Reset() { *x = AddressBook{} - if protoimpl.UnsafeEnabled { - mi := &file_proto_testing_test_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_proto_testing_test_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *AddressBook) String() string { @@ -341,7 +326,7 @@ func (*AddressBook) ProtoMessage() {} func (x *AddressBook) ProtoReflect() protoreflect.Message { mi := &file_proto_testing_test_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -364,21 +349,18 @@ func (x *AddressBook) GetPeople() []*Person { } type TestSimpleMessage struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Foo []byte `protobuf:"bytes,1,opt,name=foo,proto3" json:"foo,omitempty"` + Bar uint64 `protobuf:"varint,2,opt,name=bar,proto3" json:"bar,omitempty"` unknownFields protoimpl.UnknownFields - - Foo []byte `protobuf:"bytes,1,opt,name=foo,proto3" json:"foo,omitempty"` - Bar uint64 `protobuf:"varint,2,opt,name=bar,proto3" json:"bar,omitempty"` + sizeCache protoimpl.SizeCache } func (x *TestSimpleMessage) Reset() { *x = TestSimpleMessage{} - if protoimpl.UnsafeEnabled { - mi := &file_proto_testing_test_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_proto_testing_test_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TestSimpleMessage) String() string { @@ -389,7 +371,7 @@ func (*TestSimpleMessage) ProtoMessage() {} func (x *TestSimpleMessage) ProtoReflect() protoreflect.Message { mi := &file_proto_testing_test_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -419,21 +401,18 @@ func (x *TestSimpleMessage) GetBar() uint64 { } type Person_PhoneNumber struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Number string `protobuf:"bytes,1,opt,name=number,proto3" json:"number,omitempty"` + Type Person_PhoneType `protobuf:"varint,2,opt,name=type,proto3,enum=testing.Person_PhoneType" json:"type,omitempty"` unknownFields protoimpl.UnknownFields - - Number string `protobuf:"bytes,1,opt,name=number,proto3" json:"number,omitempty"` - Type Person_PhoneType `protobuf:"varint,2,opt,name=type,proto3,enum=testing.Person_PhoneType" json:"type,omitempty"` + sizeCache protoimpl.SizeCache } func (x *Person_PhoneNumber) Reset() { *x = Person_PhoneNumber{} - if protoimpl.UnsafeEnabled { - mi := &file_proto_testing_test_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_proto_testing_test_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Person_PhoneNumber) String() string { @@ -444,7 +423,7 @@ func (*Person_PhoneNumber) ProtoMessage() {} func (x *Person_PhoneNumber) ProtoReflect() protoreflect.Message { mi := &file_proto_testing_test_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -574,7 +553,7 @@ func file_proto_testing_test_proto_rawDescGZIP() []byte { var file_proto_testing_test_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_proto_testing_test_proto_msgTypes = make([]protoimpl.MessageInfo, 7) -var file_proto_testing_test_proto_goTypes = []interface{}{ +var file_proto_testing_test_proto_goTypes = []any{ (Person_PhoneType)(0), // 0: testing.Person.PhoneType (*TestMessage)(nil), // 1: testing.TestMessage (*TestNestedMessage)(nil), // 2: testing.TestNestedMessage @@ -606,92 +585,6 @@ func file_proto_testing_test_proto_init() { if File_proto_testing_test_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_proto_testing_test_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TestMessage); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_proto_testing_test_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TestNestedMessage); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_proto_testing_test_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Puzzle); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_proto_testing_test_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Person); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_proto_testing_test_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*AddressBook); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_proto_testing_test_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TestSimpleMessage); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_proto_testing_test_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Person_PhoneNumber); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ From a3baf98b05eef512ededf127fd53eb0c7439cd14 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Mon, 20 Oct 2025 19:06:13 +0200 Subject: [PATCH 040/103] `VerifyDataColumnsSidecarKZGProofs`: Check if sizes match. (#15892) --- beacon-chain/core/peerdas/p2p_interface.go | 26 ++++++++++++++++--- .../core/peerdas/p2p_interface_test.go | 8 ++++++ ...u-verify-data-column-sidecar-kzg-proofs.md | 2 ++ 3 files changed, 33 insertions(+), 3 deletions(-) create mode 100644 changelog/manu-verify-data-column-sidecar-kzg-proofs.md diff --git a/beacon-chain/core/peerdas/p2p_interface.go b/beacon-chain/core/peerdas/p2p_interface.go index 46b49fdd9f..942c278de1 100644 --- a/beacon-chain/core/peerdas/p2p_interface.go +++ b/beacon-chain/core/peerdas/p2p_interface.go @@ -79,10 +79,30 @@ func VerifyDataColumnsSidecarKZGProofs(sidecars []blocks.RODataColumn) error { for _, sidecar := range sidecars { for i := range sidecar.Column { - commitments = append(commitments, kzg.Bytes48(sidecar.KzgCommitments[i])) + var ( + commitment kzg.Bytes48 + cell kzg.Cell + proof kzg.Bytes48 + ) + + commitmentBytes := sidecar.KzgCommitments[i] + cellBytes := sidecar.Column[i] + proofBytes := sidecar.KzgProofs[i] + + if len(commitmentBytes) != len(commitment) || + len(cellBytes) != len(cell) || + len(proofBytes) != len(proof) { + return ErrMismatchLength + } + + copy(commitment[:], commitmentBytes) + copy(cell[:], cellBytes) + copy(proof[:], proofBytes) + + commitments = append(commitments, commitment) indices = append(indices, sidecar.Index) - cells = append(cells, kzg.Cell(sidecar.Column[i])) - proofs = append(proofs, kzg.Bytes48(sidecar.KzgProofs[i])) + cells = append(cells, cell) + proofs = append(proofs, proof) } } diff --git a/beacon-chain/core/peerdas/p2p_interface_test.go b/beacon-chain/core/peerdas/p2p_interface_test.go index 3b4c20e35c..882690af71 100644 --- a/beacon-chain/core/peerdas/p2p_interface_test.go +++ b/beacon-chain/core/peerdas/p2p_interface_test.go @@ -68,6 +68,14 @@ func TestVerifyDataColumnSidecarKZGProofs(t *testing.T) { err := kzg.Start() require.NoError(t, err) + t.Run("size mismatch", func(t *testing.T) { + sidecars := generateRandomSidecars(t, seed, blobCount) + sidecars[0].Column[0] = sidecars[0].Column[0][:len(sidecars[0].Column[0])-1] // Remove one byte to create size mismatch + + err := peerdas.VerifyDataColumnsSidecarKZGProofs(sidecars) + require.ErrorIs(t, err, peerdas.ErrMismatchLength) + }) + t.Run("invalid proof", func(t *testing.T) { sidecars := generateRandomSidecars(t, seed, blobCount) sidecars[0].Column[0][0]++ // It is OK to overflow diff --git a/changelog/manu-verify-data-column-sidecar-kzg-proofs.md b/changelog/manu-verify-data-column-sidecar-kzg-proofs.md new file mode 100644 index 0000000000..9f54a3f97a --- /dev/null +++ b/changelog/manu-verify-data-column-sidecar-kzg-proofs.md @@ -0,0 +1,2 @@ +### Fixed +- `VerifyDataColumnsSidecarKZGProofs`: Check if sizes match. From 426fbcc3b05c82ebce5c72ee263a6e6079b76825 Mon Sep 17 00:00:00 2001 From: Potuz Date: Mon, 20 Oct 2025 18:52:32 -0300 Subject: [PATCH 041/103] Add state diff serialization (#15250) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add serialization code for state diffs Adds serialization code for state diffs. Adds code to create and apply state diffs Adds fuzz tests and benchmarks for serialization/deserialization Co-authored-by: Claude * Add Fulu support * Review #1 * gazelle * Fix some fuzzers * Failing cases from the fuzzers in consensus-types/hdiff * Fix more fuzz tests 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude * add comparison tests 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * Use ConvertToElectra in UpgradeToElectra * Add comments on constants * Fix readEth1Data * remove colons from error messages * Add design doc * Apply suggestions from code review Bast Co-authored-by: Bastin <43618253+Inspector-Butters@users.noreply.github.com> --------- Co-authored-by: Claude Co-authored-by: Preston Van Loon Co-authored-by: Bastin <43618253+Inspector-Butters@users.noreply.github.com> --- beacon-chain/core/altair/upgrade.go | 74 +- beacon-chain/core/electra/upgrade.go | 260 +- beacon-chain/core/execution/BUILD.bazel | 1 + beacon-chain/core/fulu/BUILD.bazel | 1 + beacon-chain/core/fulu/upgrade.go | 35 +- beacon-chain/state/interfaces.go | 3 + .../state/state-native/setters_churn.go | 30 + .../state/state-native/setters_withdrawal.go | 21 + beacon-chain/state/state-native/state_trie.go | 5 + changelog/potuz_hdiff_diff_type.md | 3 + consensus-types/blocks/execution.go | 6 + consensus-types/hdiff/BUILD.bazel | 57 + consensus-types/hdiff/db_layout.png | Bin 0 -> 392317 bytes consensus-types/hdiff/fuzz_test.go | 636 +++++ consensus-types/hdiff/property_test.go | 403 ++++ consensus-types/hdiff/security_test.go | 392 +++ consensus-types/hdiff/state_diff.go | 2145 +++++++++++++++++ consensus-types/hdiff/state_diff.md | 399 +++ consensus-types/hdiff/state_diff_test.go | 1286 ++++++++++ .../fuzz/FuzzNewStateDiff/d5bce2d6a168dcf4 | 5 + .../582528ddfad69eb5 | 2 + .../a40f5c684fca518d | 2 + consensus-types/helpers/BUILD.bazel | 16 + consensus-types/helpers/comparisons.go | 109 + consensus-types/helpers/comparisons_test.go | 637 +++++ 25 files changed, 6359 insertions(+), 169 deletions(-) create mode 100644 changelog/potuz_hdiff_diff_type.md create mode 100644 consensus-types/hdiff/BUILD.bazel create mode 100644 consensus-types/hdiff/db_layout.png create mode 100644 consensus-types/hdiff/fuzz_test.go create mode 100644 consensus-types/hdiff/property_test.go create mode 100644 consensus-types/hdiff/security_test.go create mode 100644 consensus-types/hdiff/state_diff.go create mode 100644 consensus-types/hdiff/state_diff.md create mode 100644 consensus-types/hdiff/state_diff_test.go create mode 100644 consensus-types/hdiff/testdata/fuzz/FuzzNewStateDiff/d5bce2d6a168dcf4 create mode 100644 consensus-types/hdiff/testdata/fuzz/FuzzPropertyValidatorIndices/582528ddfad69eb5 create mode 100644 consensus-types/hdiff/testdata/fuzz/FuzzReadPendingAttestation/a40f5c684fca518d create mode 100644 consensus-types/helpers/BUILD.bazel create mode 100644 consensus-types/helpers/comparisons.go create mode 100644 consensus-types/helpers/comparisons_test.go diff --git a/beacon-chain/core/altair/upgrade.go b/beacon-chain/core/altair/upgrade.go index e8bad8ce76..1c2b135acc 100644 --- a/beacon-chain/core/altair/upgrade.go +++ b/beacon-chain/core/altair/upgrade.go @@ -12,6 +12,46 @@ import ( "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" ) +// ConvertToAltair converts a Phase 0 beacon state to an Altair beacon state. +func ConvertToAltair(state state.BeaconState) (state.BeaconState, error) { + epoch := time.CurrentEpoch(state) + + numValidators := state.NumValidators() + s := ðpb.BeaconStateAltair{ + GenesisTime: uint64(state.GenesisTime().Unix()), + GenesisValidatorsRoot: state.GenesisValidatorsRoot(), + Slot: state.Slot(), + Fork: ðpb.Fork{ + PreviousVersion: state.Fork().CurrentVersion, + CurrentVersion: params.BeaconConfig().AltairForkVersion, + Epoch: epoch, + }, + LatestBlockHeader: state.LatestBlockHeader(), + BlockRoots: state.BlockRoots(), + StateRoots: state.StateRoots(), + HistoricalRoots: state.HistoricalRoots(), + Eth1Data: state.Eth1Data(), + Eth1DataVotes: state.Eth1DataVotes(), + Eth1DepositIndex: state.Eth1DepositIndex(), + Validators: state.Validators(), + Balances: state.Balances(), + RandaoMixes: state.RandaoMixes(), + Slashings: state.Slashings(), + PreviousEpochParticipation: make([]byte, numValidators), + CurrentEpochParticipation: make([]byte, numValidators), + JustificationBits: state.JustificationBits(), + PreviousJustifiedCheckpoint: state.PreviousJustifiedCheckpoint(), + CurrentJustifiedCheckpoint: state.CurrentJustifiedCheckpoint(), + FinalizedCheckpoint: state.FinalizedCheckpoint(), + InactivityScores: make([]uint64, numValidators), + } + newState, err := state_native.InitializeFromProtoUnsafeAltair(s) + if err != nil { + return nil, err + } + return newState, nil +} + // UpgradeToAltair updates input state to return the version Altair state. // // Spec code: @@ -64,39 +104,7 @@ import ( // post.next_sync_committee = get_next_sync_committee(post) // return post func UpgradeToAltair(ctx context.Context, state state.BeaconState) (state.BeaconState, error) { - epoch := time.CurrentEpoch(state) - - numValidators := state.NumValidators() - s := ðpb.BeaconStateAltair{ - GenesisTime: uint64(state.GenesisTime().Unix()), - GenesisValidatorsRoot: state.GenesisValidatorsRoot(), - Slot: state.Slot(), - Fork: ðpb.Fork{ - PreviousVersion: state.Fork().CurrentVersion, - CurrentVersion: params.BeaconConfig().AltairForkVersion, - Epoch: epoch, - }, - LatestBlockHeader: state.LatestBlockHeader(), - BlockRoots: state.BlockRoots(), - StateRoots: state.StateRoots(), - HistoricalRoots: state.HistoricalRoots(), - Eth1Data: state.Eth1Data(), - Eth1DataVotes: state.Eth1DataVotes(), - Eth1DepositIndex: state.Eth1DepositIndex(), - Validators: state.Validators(), - Balances: state.Balances(), - RandaoMixes: state.RandaoMixes(), - Slashings: state.Slashings(), - PreviousEpochParticipation: make([]byte, numValidators), - CurrentEpochParticipation: make([]byte, numValidators), - JustificationBits: state.JustificationBits(), - PreviousJustifiedCheckpoint: state.PreviousJustifiedCheckpoint(), - CurrentJustifiedCheckpoint: state.CurrentJustifiedCheckpoint(), - FinalizedCheckpoint: state.FinalizedCheckpoint(), - InactivityScores: make([]uint64, numValidators), - } - - newState, err := state_native.InitializeFromProtoUnsafeAltair(s) + newState, err := ConvertToAltair(state) if err != nil { return nil, err } diff --git a/beacon-chain/core/electra/upgrade.go b/beacon-chain/core/electra/upgrade.go index c4e303ddd3..88b00a7718 100644 --- a/beacon-chain/core/electra/upgrade.go +++ b/beacon-chain/core/electra/upgrade.go @@ -15,6 +15,129 @@ import ( "github.com/pkg/errors" ) +// ConvertToElectra converts a Deneb beacon state to an Electra beacon state. It does not perform any fork logic. +func ConvertToElectra(beaconState state.BeaconState) (state.BeaconState, error) { + currentSyncCommittee, err := beaconState.CurrentSyncCommittee() + if err != nil { + return nil, err + } + nextSyncCommittee, err := beaconState.NextSyncCommittee() + if err != nil { + return nil, err + } + prevEpochParticipation, err := beaconState.PreviousEpochParticipation() + if err != nil { + return nil, err + } + currentEpochParticipation, err := beaconState.CurrentEpochParticipation() + if err != nil { + return nil, err + } + inactivityScores, err := beaconState.InactivityScores() + if err != nil { + return nil, err + } + payloadHeader, err := beaconState.LatestExecutionPayloadHeader() + if err != nil { + return nil, err + } + txRoot, err := payloadHeader.TransactionsRoot() + if err != nil { + return nil, err + } + wdRoot, err := payloadHeader.WithdrawalsRoot() + if err != nil { + return nil, err + } + wi, err := beaconState.NextWithdrawalIndex() + if err != nil { + return nil, err + } + vi, err := beaconState.NextWithdrawalValidatorIndex() + if err != nil { + return nil, err + } + summaries, err := beaconState.HistoricalSummaries() + if err != nil { + return nil, err + } + excessBlobGas, err := payloadHeader.ExcessBlobGas() + if err != nil { + return nil, err + } + blobGasUsed, err := payloadHeader.BlobGasUsed() + if err != nil { + return nil, err + } + + s := ðpb.BeaconStateElectra{ + GenesisTime: uint64(beaconState.GenesisTime().Unix()), + GenesisValidatorsRoot: beaconState.GenesisValidatorsRoot(), + Slot: beaconState.Slot(), + Fork: ðpb.Fork{ + PreviousVersion: beaconState.Fork().CurrentVersion, + CurrentVersion: params.BeaconConfig().ElectraForkVersion, + Epoch: time.CurrentEpoch(beaconState), + }, + LatestBlockHeader: beaconState.LatestBlockHeader(), + BlockRoots: beaconState.BlockRoots(), + StateRoots: beaconState.StateRoots(), + HistoricalRoots: beaconState.HistoricalRoots(), + Eth1Data: beaconState.Eth1Data(), + Eth1DataVotes: beaconState.Eth1DataVotes(), + Eth1DepositIndex: beaconState.Eth1DepositIndex(), + Validators: beaconState.Validators(), + Balances: beaconState.Balances(), + RandaoMixes: beaconState.RandaoMixes(), + Slashings: beaconState.Slashings(), + PreviousEpochParticipation: prevEpochParticipation, + CurrentEpochParticipation: currentEpochParticipation, + JustificationBits: beaconState.JustificationBits(), + PreviousJustifiedCheckpoint: beaconState.PreviousJustifiedCheckpoint(), + CurrentJustifiedCheckpoint: beaconState.CurrentJustifiedCheckpoint(), + FinalizedCheckpoint: beaconState.FinalizedCheckpoint(), + InactivityScores: inactivityScores, + CurrentSyncCommittee: currentSyncCommittee, + NextSyncCommittee: nextSyncCommittee, + LatestExecutionPayloadHeader: &enginev1.ExecutionPayloadHeaderDeneb{ + ParentHash: payloadHeader.ParentHash(), + FeeRecipient: payloadHeader.FeeRecipient(), + StateRoot: payloadHeader.StateRoot(), + ReceiptsRoot: payloadHeader.ReceiptsRoot(), + LogsBloom: payloadHeader.LogsBloom(), + PrevRandao: payloadHeader.PrevRandao(), + BlockNumber: payloadHeader.BlockNumber(), + GasLimit: payloadHeader.GasLimit(), + GasUsed: payloadHeader.GasUsed(), + Timestamp: payloadHeader.Timestamp(), + ExtraData: payloadHeader.ExtraData(), + BaseFeePerGas: payloadHeader.BaseFeePerGas(), + BlockHash: payloadHeader.BlockHash(), + TransactionsRoot: txRoot, + WithdrawalsRoot: wdRoot, + ExcessBlobGas: excessBlobGas, + BlobGasUsed: blobGasUsed, + }, + NextWithdrawalIndex: wi, + NextWithdrawalValidatorIndex: vi, + HistoricalSummaries: summaries, + + DepositRequestsStartIndex: params.BeaconConfig().UnsetDepositRequestsStartIndex, + DepositBalanceToConsume: 0, + EarliestConsolidationEpoch: helpers.ActivationExitEpoch(slots.ToEpoch(beaconState.Slot())), + PendingDeposits: make([]*ethpb.PendingDeposit, 0), + PendingPartialWithdrawals: make([]*ethpb.PendingPartialWithdrawal, 0), + PendingConsolidations: make([]*ethpb.PendingConsolidation, 0), + } + + // need to cast the beaconState to use in helper functions + post, err := state_native.InitializeFromProtoUnsafeElectra(s) + if err != nil { + return nil, errors.Wrap(err, "failed to initialize post electra beaconState") + } + return post, nil +} + // UpgradeToElectra updates inputs a generic state to return the version Electra state. // // nolint:dupword @@ -126,55 +249,7 @@ import ( // // return post func UpgradeToElectra(beaconState state.BeaconState) (state.BeaconState, error) { - currentSyncCommittee, err := beaconState.CurrentSyncCommittee() - if err != nil { - return nil, err - } - nextSyncCommittee, err := beaconState.NextSyncCommittee() - if err != nil { - return nil, err - } - prevEpochParticipation, err := beaconState.PreviousEpochParticipation() - if err != nil { - return nil, err - } - currentEpochParticipation, err := beaconState.CurrentEpochParticipation() - if err != nil { - return nil, err - } - inactivityScores, err := beaconState.InactivityScores() - if err != nil { - return nil, err - } - payloadHeader, err := beaconState.LatestExecutionPayloadHeader() - if err != nil { - return nil, err - } - txRoot, err := payloadHeader.TransactionsRoot() - if err != nil { - return nil, err - } - wdRoot, err := payloadHeader.WithdrawalsRoot() - if err != nil { - return nil, err - } - wi, err := beaconState.NextWithdrawalIndex() - if err != nil { - return nil, err - } - vi, err := beaconState.NextWithdrawalValidatorIndex() - if err != nil { - return nil, err - } - summaries, err := beaconState.HistoricalSummaries() - if err != nil { - return nil, err - } - excessBlobGas, err := payloadHeader.ExcessBlobGas() - if err != nil { - return nil, err - } - blobGasUsed, err := payloadHeader.BlobGasUsed() + s, err := ConvertToElectra(beaconState) if err != nil { return nil, err } @@ -206,97 +281,38 @@ func UpgradeToElectra(beaconState state.BeaconState) (state.BeaconState, error) if err != nil { return nil, errors.Wrap(err, "failed to get total active balance") } - - s := ðpb.BeaconStateElectra{ - GenesisTime: uint64(beaconState.GenesisTime().Unix()), - GenesisValidatorsRoot: beaconState.GenesisValidatorsRoot(), - Slot: beaconState.Slot(), - Fork: ðpb.Fork{ - PreviousVersion: beaconState.Fork().CurrentVersion, - CurrentVersion: params.BeaconConfig().ElectraForkVersion, - Epoch: time.CurrentEpoch(beaconState), - }, - LatestBlockHeader: beaconState.LatestBlockHeader(), - BlockRoots: beaconState.BlockRoots(), - StateRoots: beaconState.StateRoots(), - HistoricalRoots: beaconState.HistoricalRoots(), - Eth1Data: beaconState.Eth1Data(), - Eth1DataVotes: beaconState.Eth1DataVotes(), - Eth1DepositIndex: beaconState.Eth1DepositIndex(), - Validators: beaconState.Validators(), - Balances: beaconState.Balances(), - RandaoMixes: beaconState.RandaoMixes(), - Slashings: beaconState.Slashings(), - PreviousEpochParticipation: prevEpochParticipation, - CurrentEpochParticipation: currentEpochParticipation, - JustificationBits: beaconState.JustificationBits(), - PreviousJustifiedCheckpoint: beaconState.PreviousJustifiedCheckpoint(), - CurrentJustifiedCheckpoint: beaconState.CurrentJustifiedCheckpoint(), - FinalizedCheckpoint: beaconState.FinalizedCheckpoint(), - InactivityScores: inactivityScores, - CurrentSyncCommittee: currentSyncCommittee, - NextSyncCommittee: nextSyncCommittee, - LatestExecutionPayloadHeader: &enginev1.ExecutionPayloadHeaderDeneb{ - ParentHash: payloadHeader.ParentHash(), - FeeRecipient: payloadHeader.FeeRecipient(), - StateRoot: payloadHeader.StateRoot(), - ReceiptsRoot: payloadHeader.ReceiptsRoot(), - LogsBloom: payloadHeader.LogsBloom(), - PrevRandao: payloadHeader.PrevRandao(), - BlockNumber: payloadHeader.BlockNumber(), - GasLimit: payloadHeader.GasLimit(), - GasUsed: payloadHeader.GasUsed(), - Timestamp: payloadHeader.Timestamp(), - ExtraData: payloadHeader.ExtraData(), - BaseFeePerGas: payloadHeader.BaseFeePerGas(), - BlockHash: payloadHeader.BlockHash(), - TransactionsRoot: txRoot, - WithdrawalsRoot: wdRoot, - ExcessBlobGas: excessBlobGas, - BlobGasUsed: blobGasUsed, - }, - NextWithdrawalIndex: wi, - NextWithdrawalValidatorIndex: vi, - HistoricalSummaries: summaries, - - DepositRequestsStartIndex: params.BeaconConfig().UnsetDepositRequestsStartIndex, - DepositBalanceToConsume: 0, - ExitBalanceToConsume: helpers.ActivationExitChurnLimit(primitives.Gwei(tab)), - EarliestExitEpoch: earliestExitEpoch, - ConsolidationBalanceToConsume: helpers.ConsolidationChurnLimit(primitives.Gwei(tab)), - EarliestConsolidationEpoch: helpers.ActivationExitEpoch(slots.ToEpoch(beaconState.Slot())), - PendingDeposits: make([]*ethpb.PendingDeposit, 0), - PendingPartialWithdrawals: make([]*ethpb.PendingPartialWithdrawal, 0), - PendingConsolidations: make([]*ethpb.PendingConsolidation, 0), + if err := s.SetExitBalanceToConsume(helpers.ActivationExitChurnLimit(primitives.Gwei(tab))); err != nil { + return nil, errors.Wrap(err, "failed to set exit balance to consume") + } + if err := s.SetEarliestExitEpoch(earliestExitEpoch); err != nil { + return nil, errors.Wrap(err, "failed to set earliest exit epoch") + } + if err := s.SetConsolidationBalanceToConsume(helpers.ConsolidationChurnLimit(primitives.Gwei(tab))); err != nil { + return nil, errors.Wrap(err, "failed to set consolidation balance to consume") } // Sorting preActivationIndices based on a custom criteria + vals := s.Validators() sort.Slice(preActivationIndices, func(i, j int) bool { // Comparing based on ActivationEligibilityEpoch and then by index if the epochs are the same - if s.Validators[preActivationIndices[i]].ActivationEligibilityEpoch == s.Validators[preActivationIndices[j]].ActivationEligibilityEpoch { + if vals[preActivationIndices[i]].ActivationEligibilityEpoch == vals[preActivationIndices[j]].ActivationEligibilityEpoch { return preActivationIndices[i] < preActivationIndices[j] } - return s.Validators[preActivationIndices[i]].ActivationEligibilityEpoch < s.Validators[preActivationIndices[j]].ActivationEligibilityEpoch + return vals[preActivationIndices[i]].ActivationEligibilityEpoch < vals[preActivationIndices[j]].ActivationEligibilityEpoch }) - // need to cast the beaconState to use in helper functions - post, err := state_native.InitializeFromProtoUnsafeElectra(s) - if err != nil { - return nil, errors.Wrap(err, "failed to initialize post electra beaconState") - } - for _, index := range preActivationIndices { - if err := QueueEntireBalanceAndResetValidator(post, index); err != nil { + if err := QueueEntireBalanceAndResetValidator(s, index); err != nil { return nil, errors.Wrap(err, "failed to queue entire balance and reset validator") } } // Ensure early adopters of compounding credentials go through the activation churn for _, index := range compoundWithdrawalIndices { - if err := QueueExcessActiveBalance(post, index); err != nil { + if err := QueueExcessActiveBalance(s, index); err != nil { return nil, errors.Wrap(err, "failed to queue excess active balance") } } - return post, nil + return s, nil } diff --git a/beacon-chain/core/execution/BUILD.bazel b/beacon-chain/core/execution/BUILD.bazel index a9f5ef787a..46cb39bd84 100644 --- a/beacon-chain/core/execution/BUILD.bazel +++ b/beacon-chain/core/execution/BUILD.bazel @@ -7,6 +7,7 @@ go_library( visibility = [ "//beacon-chain:__subpackages__", "//cmd/prysmctl/testnet:__pkg__", + "//consensus-types/hdiff:__subpackages__", "//testing/spectest:__subpackages__", "//validator/client:__pkg__", ], diff --git a/beacon-chain/core/fulu/BUILD.bazel b/beacon-chain/core/fulu/BUILD.bazel index 9b5bf02f7a..40bd2392ea 100644 --- a/beacon-chain/core/fulu/BUILD.bazel +++ b/beacon-chain/core/fulu/BUILD.bazel @@ -15,6 +15,7 @@ go_library( "//beacon-chain/state:go_default_library", "//beacon-chain/state/state-native:go_default_library", "//config/params:go_default_library", + "//consensus-types/primitives:go_default_library", "//monitoring/tracing/trace:go_default_library", "//proto/engine/v1:go_default_library", "//proto/prysm/v1alpha1:go_default_library", diff --git a/beacon-chain/core/fulu/upgrade.go b/beacon-chain/core/fulu/upgrade.go index f48e15a77e..ce1abff635 100644 --- a/beacon-chain/core/fulu/upgrade.go +++ b/beacon-chain/core/fulu/upgrade.go @@ -8,6 +8,7 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/state" state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/time/slots" @@ -17,6 +18,25 @@ import ( // UpgradeToFulu updates inputs a generic state to return the version Fulu state. // https://github.com/ethereum/consensus-specs/blob/master/specs/fulu/fork.md#upgrading-the-state func UpgradeToFulu(ctx context.Context, beaconState state.BeaconState) (state.BeaconState, error) { + s, err := ConvertToFulu(beaconState) + if err != nil { + return nil, errors.Wrap(err, "could not convert to fulu") + } + proposerLookahead, err := helpers.InitializeProposerLookahead(ctx, beaconState, slots.ToEpoch(beaconState.Slot())) + if err != nil { + return nil, err + } + pl := make([]primitives.ValidatorIndex, len(proposerLookahead)) + for i, v := range proposerLookahead { + pl[i] = primitives.ValidatorIndex(v) + } + if err := s.SetProposerLookahead(pl); err != nil { + return nil, errors.Wrap(err, "failed to set proposer lookahead") + } + return s, nil +} + +func ConvertToFulu(beaconState state.BeaconState) (state.BeaconState, error) { currentSyncCommittee, err := beaconState.CurrentSyncCommittee() if err != nil { return nil, err @@ -105,11 +125,6 @@ func UpgradeToFulu(ctx context.Context, beaconState state.BeaconState) (state.Be if err != nil { return nil, err } - proposerLookahead, err := helpers.InitializeProposerLookahead(ctx, beaconState, slots.ToEpoch(beaconState.Slot())) - if err != nil { - return nil, err - } - s := ðpb.BeaconStateFulu{ GenesisTime: uint64(beaconState.GenesisTime().Unix()), GenesisValidatorsRoot: beaconState.GenesisValidatorsRoot(), @@ -171,14 +186,6 @@ func UpgradeToFulu(ctx context.Context, beaconState state.BeaconState) (state.Be PendingDeposits: pendingDeposits, PendingPartialWithdrawals: pendingPartialWithdrawals, PendingConsolidations: pendingConsolidations, - ProposerLookahead: proposerLookahead, } - - // Need to cast the beaconState to use in helper functions - post, err := state_native.InitializeFromProtoUnsafeFulu(s) - if err != nil { - return nil, errors.Wrap(err, "failed to initialize post fulu beaconState") - } - - return post, nil + return state_native.InitializeFromProtoUnsafeFulu(s) } diff --git a/beacon-chain/state/interfaces.go b/beacon-chain/state/interfaces.go index 428e548ca7..54f1aceae6 100644 --- a/beacon-chain/state/interfaces.go +++ b/beacon-chain/state/interfaces.go @@ -266,6 +266,8 @@ type WriteOnlyEth1Data interface { SetEth1DepositIndex(val uint64) error ExitEpochAndUpdateChurn(exitBalance primitives.Gwei) (primitives.Epoch, error) ExitEpochAndUpdateChurnForTotalBal(totalActiveBalance primitives.Gwei, exitBalance primitives.Gwei) (primitives.Epoch, error) + SetExitBalanceToConsume(val primitives.Gwei) error + SetEarliestExitEpoch(val primitives.Epoch) error } // WriteOnlyValidators defines a struct which only has write access to validators methods. @@ -333,6 +335,7 @@ type WriteOnlyWithdrawals interface { DequeuePendingPartialWithdrawals(num uint64) error SetNextWithdrawalIndex(i uint64) error SetNextWithdrawalValidatorIndex(i primitives.ValidatorIndex) error + SetPendingPartialWithdrawals(val []*ethpb.PendingPartialWithdrawal) error } type WriteOnlyConsolidations interface { diff --git a/beacon-chain/state/state-native/setters_churn.go b/beacon-chain/state/state-native/setters_churn.go index c4ed930ba9..b2073b8f80 100644 --- a/beacon-chain/state/state-native/setters_churn.go +++ b/beacon-chain/state/state-native/setters_churn.go @@ -91,3 +91,33 @@ func (b *BeaconState) exitEpochAndUpdateChurn(totalActiveBalance primitives.Gwei return b.earliestExitEpoch, nil } + +// SetExitBalanceToConsume sets the exit balance to consume. This method mutates the state. +func (b *BeaconState) SetExitBalanceToConsume(exitBalanceToConsume primitives.Gwei) error { + if b.version < version.Electra { + return errNotSupported("SetExitBalanceToConsume", b.version) + } + + b.lock.Lock() + defer b.lock.Unlock() + + b.exitBalanceToConsume = exitBalanceToConsume + b.markFieldAsDirty(types.ExitBalanceToConsume) + + return nil +} + +// SetEarliestExitEpoch sets the earliest exit epoch. This method mutates the state. +func (b *BeaconState) SetEarliestExitEpoch(earliestExitEpoch primitives.Epoch) error { + if b.version < version.Electra { + return errNotSupported("SetEarliestExitEpoch", b.version) + } + + b.lock.Lock() + defer b.lock.Unlock() + + b.earliestExitEpoch = earliestExitEpoch + b.markFieldAsDirty(types.EarliestExitEpoch) + + return nil +} diff --git a/beacon-chain/state/state-native/setters_withdrawal.go b/beacon-chain/state/state-native/setters_withdrawal.go index 08c2b19a31..e0fc5a42e1 100644 --- a/beacon-chain/state/state-native/setters_withdrawal.go +++ b/beacon-chain/state/state-native/setters_withdrawal.go @@ -100,3 +100,24 @@ func (b *BeaconState) DequeuePendingPartialWithdrawals(n uint64) error { return nil } + +// SetPendingPartialWithdrawals sets the pending partial withdrawals. This method mutates the state. +func (b *BeaconState) SetPendingPartialWithdrawals(pendingPartialWithdrawals []*eth.PendingPartialWithdrawal) error { + if b.version < version.Electra { + return errNotSupported("SetPendingPartialWithdrawals", b.version) + } + + b.lock.Lock() + defer b.lock.Unlock() + + if pendingPartialWithdrawals == nil { + return errors.New("cannot set nil pending partial withdrawals") + } + b.sharedFieldReferences[types.PendingPartialWithdrawals].MinusRef() + b.sharedFieldReferences[types.PendingPartialWithdrawals] = stateutil.NewRef(1) + + b.pendingPartialWithdrawals = pendingPartialWithdrawals + b.markFieldAsDirty(types.PendingPartialWithdrawals) + + return nil +} diff --git a/beacon-chain/state/state-native/state_trie.go b/beacon-chain/state/state-native/state_trie.go index 977f947a76..722edcd5e0 100644 --- a/beacon-chain/state/state-native/state_trie.go +++ b/beacon-chain/state/state-native/state_trie.go @@ -650,6 +650,11 @@ func InitializeFromProtoUnsafeFulu(st *ethpb.BeaconStateFulu) (state.BeaconState for i, v := range st.ProposerLookahead { proposerLookahead[i] = primitives.ValidatorIndex(v) } + // Proposer lookahead must be exactly 2 * SLOTS_PER_EPOCH in length. We fill in with zeroes instead of erroring out here + for i := len(proposerLookahead); i < 2*fieldparams.SlotsPerEpoch; i++ { + proposerLookahead = append(proposerLookahead, 0) + } + fieldCount := params.BeaconConfig().BeaconStateFuluFieldCount b := &BeaconState{ version: version.Fulu, diff --git a/changelog/potuz_hdiff_diff_type.md b/changelog/potuz_hdiff_diff_type.md new file mode 100644 index 0000000000..ee26b598e2 --- /dev/null +++ b/changelog/potuz_hdiff_diff_type.md @@ -0,0 +1,3 @@ +### Added + +- Add native state diff type and marshalling functions diff --git a/consensus-types/blocks/execution.go b/consensus-types/blocks/execution.go index 7e4156d386..0129404cab 100644 --- a/consensus-types/blocks/execution.go +++ b/consensus-types/blocks/execution.go @@ -42,6 +42,12 @@ func NewWrappedExecutionData(v proto.Message) (interfaces.ExecutionData, error) return WrappedExecutionPayloadDeneb(pbStruct.Payload) case *enginev1.ExecutionBundleFulu: return WrappedExecutionPayloadDeneb(pbStruct.Payload) + case *enginev1.ExecutionPayloadHeader: + return WrappedExecutionPayloadHeader(pbStruct) + case *enginev1.ExecutionPayloadHeaderCapella: + return WrappedExecutionPayloadHeaderCapella(pbStruct) + case *enginev1.ExecutionPayloadHeaderDeneb: + return WrappedExecutionPayloadHeaderDeneb(pbStruct) default: return nil, errors.Wrapf(ErrUnsupportedVersion, "type %T", pbStruct) } diff --git a/consensus-types/hdiff/BUILD.bazel b/consensus-types/hdiff/BUILD.bazel new file mode 100644 index 0000000000..e19531167d --- /dev/null +++ b/consensus-types/hdiff/BUILD.bazel @@ -0,0 +1,57 @@ +load("@prysm//tools/go:def.bzl", "go_library", "go_test") + +go_library( + name = "go_default_library", + srcs = ["state_diff.go"], + importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/hdiff", + visibility = ["//visibility:public"], + deps = [ + "//beacon-chain/core/altair:go_default_library", + "//beacon-chain/core/capella:go_default_library", + "//beacon-chain/core/deneb:go_default_library", + "//beacon-chain/core/electra:go_default_library", + "//beacon-chain/core/execution:go_default_library", + "//beacon-chain/core/fulu:go_default_library", + "//beacon-chain/state:go_default_library", + "//config/fieldparams:go_default_library", + "//consensus-types/blocks:go_default_library", + "//consensus-types/helpers:go_default_library", + "//consensus-types/interfaces:go_default_library", + "//consensus-types/primitives:go_default_library", + "//proto/engine/v1:go_default_library", + "//proto/prysm/v1alpha1:go_default_library", + "//runtime/version:go_default_library", + "@com_github_golang_snappy//:go_default_library", + "@com_github_pkg_errors//:go_default_library", + "@com_github_prysmaticlabs_fastssz//:go_default_library", + "@com_github_prysmaticlabs_go_bitfield//:go_default_library", + "@com_github_sirupsen_logrus//:go_default_library", + "@org_golang_google_protobuf//proto:go_default_library", + ], +) + +go_test( + name = "go_default_test", + srcs = [ + "fuzz_test.go", + "property_test.go", + "security_test.go", + "state_diff_test.go", + ], + data = glob(["testdata/**"]), + embed = [":go_default_library"], + deps = [ + "//beacon-chain/core/transition:go_default_library", + "//beacon-chain/state:go_default_library", + "//beacon-chain/state/state-native:go_default_library", + "//config/fieldparams:go_default_library", + "//consensus-types/blocks:go_default_library", + "//consensus-types/primitives:go_default_library", + "//proto/prysm/v1alpha1:go_default_library", + "//runtime/version:go_default_library", + "//testing/require:go_default_library", + "//testing/util:go_default_library", + "@com_github_golang_snappy//:go_default_library", + "@com_github_pkg_errors//:go_default_library", + ], +) diff --git a/consensus-types/hdiff/db_layout.png b/consensus-types/hdiff/db_layout.png new file mode 100644 index 0000000000000000000000000000000000000000..6835f1d40f741011159873be22a9cf099b315f6c GIT binary patch literal 392317 zcmeFZbx>RF*ENh4EzscZrBI5ymja;_E0p4{#jQ95*C0iTI|WK{C@#T6(c%^e9*PAi zZg1{yzIW#RJ@Y;9-2Z+v&&-)gCUcUx&N=&9d+oK?PRx6CB|>}}d^9vPLglv#T4-oE zAT%^A3|wr~C-TkuMW`2a4=trPXw~C%f6&mF(3BPAw0%qta&aIR}ZmhN)#zqM&BqDbu6YiS_{d07rUZ{qp@cQ2!W-}zNY#^n>h4OV4 zCX9b|nBl&?vuBA8#{E}EH^&R66Ukwm_hXdkfM@^eh%p`eSLglnb5z?{sO4Xy{kZ;D zYyamGPb$R!<&`pF6z2r@BRLpf3!DDC+xZ%`8eyD&ee+XEzg*F8V}9QO|I51>++VZv zZ%@UIbH^Zo6bGQq2K>t#5BPF5OZV@d5#U*fruyp*?^NP~EZ%$HX^` z_x3K~6kYpF@n44W>kVCxe|stxP#ir5t|=32-}UdpIQXBSC;C?rk9t1;6ZHT6b}{>( zp#N|8!2bmOe-HfsgY^GB@c$3e|M$TE|2w3=O2lhn9shC%wVlDE$O|sv-6NPdfS6Ah zR7JiiM4LE!2IWsyV7-F#@z{r4EyC^IzThPh=H-ZEs)s>x1^`$m5iAT5c z=>QltsKb+?>^!jl!NuOo0YX-F)&%lSPO`X>J#gK3r5y8me&OfhvE+O2Z(BWamQ=F5 zk^J+jPLZI2FcE5TMqQXg>6LSEOq}R_7V`XgE!^&ipwG+vqVRakrlGGlT@zv>)A?}= z$#QC(Au_75%24NehNwD{jqA{$Y+7MO!snhun(|U~$uE7tQdq-$_j=7R?~ zpbxq9i`a&=gR)9pbJo4xUFON9|Gzj30o&_eJ55-xBGwJd^CA{q3j1qPm^?{XUk3WG zn9t5@gB&p@gdw}iXk6h-t3qxu z`Nn&VZV~o`k5I6v^0J~hRKQK@m|rr@gl4m<+Dxxmp<{cyB)CsxfL{t4GEZ_bxG!}ck> zd8{)Ep>b=GutR!wxf%<9!dGWGD^s@cc%io8ePY7+ITbhH8H%=no+%jzQlOwbB!X@2 zR;HH^YrpE)wiH(A8YudWYrus}v0#fZwEJfU#Dn}(@uZLLQtg+FpUWIf-M*u~%d@lA zJ7kD2#)?w-SvBmaa_IkLU9XriQfokh-OuWlZ)li1seJceye@Tu{63Hj+HyY7I&KUv zC+%}kd}Yuh^6J^j&=6~0Vo@3A0j!iQ9Q?ba@^p!fo;T+yL8+A<)APQmwna>?getx) zO3C5;4lB^Fu?Kg84d2NDP7H+vVjw|wv}F|c4~%iC#ZT2U?0_mg+O&keUI#0-hOhH@ z9K0TGAMQk8Pm2J_4uz5a3RYHvZ~O*_ zIX>pmtl(y7cxSa5(6DK@ywoNMe5J5f{Mh<*d;pL((jsL zvHv2+8o$OWr0Rs#fm0)lMC6-ls#J<@^Q^HSltpF1wL7=QZpSsmDGe+qEDbhY65eIg zje_X%Qa5R`PYCgdU(dmK$KctOJ8qv=QM-K+_cLoSXJQnk^TNGh4dW-15tS^kwAgqK zX#o8v$~gj0jZZKBft)pX@X^sxlc$UY%l}(N@L9xhdRD$F_Hm-@Qjyr0%ge7{1U{Vd zJCk{(LQ=bz6wJ)=-k(z7KlvaUE$cz#rmp`cr%m zmVcVQNn3cFwvm3jbZrBLj*HTIE;*WdXucM)TjR>V;&6AvRA&bxY)1#-`ByK7LlPr# z6ms4XJcAriBE+GsoC*j$^BdJzQ)ji4g^ek8t&ll20eJ37J{bN6)*Z{>`Y@?verGlo z4R1K=!l7IdCzo%f`{5Sd{VMg730R7ATYLiHus?)r!#eNl;u}eT5QK(Zew&E?qeJE* zsI??o|MfD@`1fxf(0c#QYOx+-jtp`}5Ckp}$5B&3BncFCE`n(3VLV{M$F$?-sf@FV z#UoKAdx)%Q%q$lQHoTKjWw)gA2L#DKAaY5K9;-XdkH40XK*VayX~Y@9X_Y=R=4$O`XF?pzF7d$ucr)PasE`zM{B74GJ2@3mXylpF z0e~FZ`8g@DB--|y-_EJt?{_LHuGoAoTG>hO6%m*Pa%!nJu`oyNFHg}4?CYFKLt>=9 zt$AfT@!ft1Xw194JpY_cGCI7l;7~g1382AK%Y#vf&U*OA8V{Rtft#=S_ zJPDWOXZgvwgizeHF(&>#$Hz0awk!!km#nUng0c0Fm;_^LT=V-GGYaPyt^Vv9XeSR` zk-@oYh}pQvwgczdESWp^o!Oyt##J3}#C0Np0wfp zaMMmelTs>W-_%My@M(X9T^$zt+8Ov=YfC_&p3~gPc-@zv(-*%KztpT)U8NXT&M%VW3kIRAUB`+C^@CR5%*i{YH;+sD61rgtMB4YGt{zT5T~eCDsQ9PW>)6GD zS_W&RKdnmHS6nXZuwJM+Ih}hNRfWSD{rK2DCgrHscgpexHANrE8}K96Gi^USB`%Y4 zjwcSXJ+x@}w65mqjL;x(Mzk?ck@Txy0WPJIrXko{}Sdw+;x_Vd@eNe`2FvdYZ@oeJ~O41V0=XUZgnI-JZ1$z$UJ3Z)&vXK=f@FWYIQL^N?maT)qteDjK2 zeQ96iN(#|GUw&py@EzYlFePO7I#qxjC>z<$E1bVu?icvEW}Ys1PU1RzpHms~vN0@= zp%&g_L3kDkXe+IW?|2Y&X`F`Hm@IQVRf80X0eRBpr5%~hsh?-EU{ZGN69(~E5`9u^ zUD6-;4&!r<-F@>VVpE6`66IzE@-m*&HhxdD9+91%mZWApwogn*SXkDc?YiDi^rMkR z)mX3I$l#;?i$%GfT`Oh}3EC_W0h{u3(u`L}+=TQnI7PP;j;Z?2#ygKSD&r zazb}^^9$;F!5uW1BlL#bb4^lw>+X#Q1Mq2q>zLHo&Db@r-}xNOGFwn?{FwG7Ayu9o zP4)c1}TM#B^Mj%h*{4hycOOA9N9+^n~q2k!VXY?Rv zZ2%Uvg67I7FQ`ekLOAOZYca#kd6m^BGaBA;S=RN3+$>r`|wXm(KIKoiPH&X?6?(6*FY#ukkUSwtPvRVZ7{#^(>=j9rF^+g$7R2(#P6V_%~t*B1+Z$5Poyg%q3+sQ(v7t_>h(uopdf%snSaUeOIz-0AqxGyUE5^0Y&soav8vL_0RoS4p~wj`lN+KJB= zmwJw$Vd?jO#lPcw)1$KH&rc%yWb}sfuOI8vEsN_ciC! z$>S^l7_`TPcrx^jvj~gB_q8SZWOJXeOUg5Ss9b3t3&$2;v6kHX1HLy|MFDPb_yky} zsqrf*Yk%PIEHJugzlV@i=%qDlxinmmBw^Dsl(2~EqZsa)@63<@(PZXC0TmjkBs7mo zY)eJbyka}OZ^7b49{9t7#|H6xt~j|d%X`b=Nvv()6Ob+dQ!|<-g0p7C;3s=>R%1@e zj@~To5Z_IDHZp4fOZ{1a_b1%UHvk5~9N{dB+#K=v42F*GO}m5@GRuA8yOVxPO{#;i ze8S_kiU!x+=Mcr9QoevQ?UZMPb0Q4WF+F}y%0+p9cPms3R(cZn82x+(J)Zg`7RVoA z8+IN877yFh`3yh7;)*uEO|CW zo?!rs*tB`Cp_NL*53`8MY@X$N)AU8uV{T^8Gcy?`XOB+>buE6T2v?%}`hL`AO>!)P z#yqDWxj))N|EiTGIZOGeQ-Y}7&?b!pCNZeh#>!FHb5v@=HK2%0T_&vid~`vf@#WMv zA{!hHqq#J%$zx2RwLR+ZP<6Mf6M|7H%Ppr_wURH4dMH>LGrZ6_;N5@Sbv@Vh3C3L zm$!d7P=joxm#2ng8Z1Q9Gv=BT;((l0;yk3p+khi z`Q75ibWO<}cxL0z*=vHg({qDYLqMZEgd3BTrai^HkdXAq;f$5&_ZLPV4AZ8{SusR1 zf3nDkTjRHp*q38lh`!<6+*qHt-KbV#0G0yQE%#l5$L8`vzrJ{78NNL-UAB4g;vohi zeJy5##g8hnnwwXtHF1Doy z^=D7dLR5BY)KYBK!WJAEyPNO5U^EIOd{^zjc2C)S7o;a{+P%P`$ZT(R=2>b+r|3vx z$;YvFRF>CcrjRtz1RV3-wE7)IH4l>{PgD;g#A`?suLh(UpW`l9wTm>Z`dE^!6&vtm zUr&mP1<(82uAkde2l=1crFNj1u=6GTHey@jldjD!sF>^qEIY>Y$49o>@Hm@g6V6BK zaQ>{!P#@>6$^*nwh*zyBtEj4VnR?^R%B}k3O}Wp*~kz%*M#~RP#e5^L=Q70{`;=?N#JX zQHsWrqoSrZb|8=Ua=>c?m|>h=jRI-SrWVK?s_N+m$^H47gPv(<LxX<_h!SqWBVB73QLyDO)jG_Y`A=ICmKi}<^Dy5RtI=dN)GKd@o=szj zDjD?LY}K#XTwD}9nO9;M#N<^|YW$)!a0YNs$XuE&KKJ~1h*G_LYEZqxF5N9pW0b(b z`ubXt+3#Y}77~N|h>m>defZyA0MS1Y0YPFLDT$t5URj%K{(g5dwsZN}p1JPMaZ?rRn^d zp#y~=BcE=NUPU{OD@MorYKwf+2dZgl90tmK0aS2vt*_QkHrgTAM$(ZL!wl*a9}h)y1-EEo0H9H^*;3Wql9x^Q$q3i2;B;r zll>?9qj9Wv=Zj^>XAl@J4mzj}(WB=-D)!kIZ}oe6-J_!Idgi-d(XZb*rHr=<3lD}1 z)BW5oJ04IIkuOa$W7^zY*bfS`y~}!yIiVO-9n72inZ+g?F!;zddv*zFViegmp-IsfY!(Vu zEhb9Wtah{5yNS%icr#yi8u9CXglh7)k|}%l#N%he2iHgAX#6Mr35iS>lg2gcwG)B( zlFoO4Z(rM}3C>#IWfiTL0PE_oS+&S()DCFuN1a6sU0Jh$*cPI#kip}FrYR1>D z3^GDRy%qqZFkEV&pHd6}CR_?Y@~nMz=XLfu1{r-mI|N#Z7%}oAfDW#$-`8Z-&PStB zdPn-p%WRHQ2V_p<*Lgk63Z8bUDL+E`)~7Ua<6amu?!Ho|gS1zFKI(3i%=8`Dljo`` z9sGV|=XAyhQhLup+GKXiJg z-bp2+y{(YX*z&vj^?`rSm`iI!#j}iU~2qpY_Cqz$1kRtOPeMAd3 z?`FmT=5*3Vl&(>1yJ>ESdmZgL_7|s__upr0?+0nuvn3r9PZGk6IwMu9u-OQ;bDw3F z_#L}j)Hf5J5j?BDh*;o*1U(=N^y6F)eTi5Zcp!9bH zqv{#iZ%B+&FS+X6h~-kX;$@WGPW^D44Zj=wn;B@uxFEAkl)bSe(C{a*WkO4Z+ghiz zNlD?%)+B1O_TYBtAucCSig>x_;Y{qv<-^N{u`SPsYlP+XcDm@T)aekb*wz>Mw6|*7 zBMF;w{LcE#Whv~o^{gKCYdN3b3pV3~BV%)ic8sPEqYia&DV)m}Tp-ir@`^*fFXGGH zO^Y|<0dY=<%}Q(7{Jf|X&)v$nBJGKD5aMLZe6dk*OWfDg)ntk0!-Y{Awf-6Zs?uGi zlYk3}A^4diEw_ivC6eOJu~<{mrUlY^l~*eA44gQTE9vbMpfnD(q0iZo?)Dw>5_UND z=@YIygw<{?fF_#rC+0frDMt@4@h`0nB_$*x_bnIOmM^559y=zk1Z$iS6#JfayoWV_ zCN(dU#rfD~zuBM{lwWsw!T-UZqPsUp&H798$1?qDl*x)K3d>DT&B+DB z(EM{i5_!IXB3=)>k9{)v*H788nSYqXnZmrd9(*3r&^c&i^+P)kZow(|gpe`f@AMyg;MRtJ;Yhxo*Hm(H6yDhf`hSB#I zFs{FcxVh0wy3})CJS@!V@o*H0EJV5MJsJF5IL=RC)B+0CYNeD{kG-`W8`R)=Bs57h z7J@xJebxE3En2T(;d;XUPV253rQaa|dZjaoZ|E$&f(X@_iOZ2^#gPue|GeACTKj7MuMV63S~VzS+Y^rg_$JU-<^pGyf@ae(Y;QepoB8L{Vpufe8>or05`>uvnf49AxJgl_Ytf>+${T;2?AcnsN znrb^Y2jc~A-{*9Gw0eHoxRDOklT%_I2I=7qM#I1Ank?34ia0F=Y+UA2=;|0O8%VM$ z=2(x3xYPOuGCd&P&7Fb`9a(Rui19@uiPYLpn_U(UpFOEGwV<6|J!ALteWj{yyLPyRS9M=BoC&*LJ#?a~l_uf-FU9VU*(94-jwjTrfe%ll zF84-or)YGt`Ox!CTu56=NbtoeC+6QRY#uZm(@|bNaD~$YCzTu{^<#J*tbXeOyn*9$r@+RV8|jtFyl+XHZpaa?rgk zdXFbqEMYIhT1g8$qDbNSJ4c%#f%w>A#8o`5h?PJfdj5M6_hjIFZgAL`es^f=H-|2N zb^G$nQxlJK;b(j|B93U(l%4dPAfIdNt7U>3%ij|;C(DbU%n+V)Z*;@>2W3t*!)sjb zsmV5}7F7Yf_Idx@t5z_ zNB7_BJA4@mKBeB+4>v>dR6joQTYvppJF6rbD%nh#*%76$9-6{)EBr7i8E#)4{e5`K za7<_esojsfR$>e(QMoTQ-Cj_d(d<_vUX4*Q4Fm+vVr5r1^EVlJ;1510k}$>zYsHZ9 z9Ddwb1men?IXm+FP2^uxb2|aSyWh#7-^(*kSupStcO=(?o;o65yu zg)!=8DK#%i=`C~2@#AZovV7;PlI+uF2qOamoi69|$@3PEBc|bNb?uLP2lsa`-UgC} zJb#07-^Q5DDx>VN6p`I;Pey_cIDNcUJas!zyt2>JzpB5`F;l=gHs(!<-ui=Wt|tZP ze&3X~&h`l@W0^v6u!YbGROW)8m6!VCEzDzLJp_k=56%d>GG_%lvccch4ep<2kvV3& zbcxRyIB-`&9&R$k+=>8^s^WKdXA{|<>vU9`Fa3MN@fNJpjpJTzwsM9yt@`b+za3R# z^d;l%yi5Kg2YiV9jBYFxu}wUlYSd?Go3Fo*`gXYimd6pps|_Ai-$4>(FHx6 z<&Rxg)*619v4Ld1%M7$+6J|~Rb@b~+e*e$%Y1ie$=JjM19%}XT0F*M!JE=WES4{7< zV6G?O{&ZP1fgE}?E{PN(abxq~nrec%t5i0x`q>=h1zzZ<5apcNEITz1pxQ4MgSY4N zrlKBeOAa*yD!iRmN%s+wqIu77FJlpfS<-zNs$VFo?k!qsRt)I?Wie%_at z<@KW8Sv`CII^lqK08u7$&MD~ZP+5m9gDRY}KQGY9f*?Ct4yup^)A9Kvi{&*KlO<OZ?cjr6k7IG2?_+~n2i!+CrQG;ax7X$6W%fgTTJzYOW1X4^ z5(vSW?wi~H(^?bVo&2JErm=9p$4OzAL-hD-yhy*YWFY$apBmxIb6zulG>-+rQWGT-57_$pw-d!_mb*c4xI82%&{4u4>1;AA9a zB7HF=YJc@d4#zwtCvn^nd;0OK^VO|wZUo|r;hVvmC}a^AcGI$z$3fF<89#G%G4GG{ z(OYEC^T#X6$hDp^t4WUe-}GDWRo^(aF1-}9x#cZr0QBI+`mn~j>B8ymuIg1(4VGIp zdzLH**Ji?qd+^EU(#JO1(&63Zx<=;M)^Of~;75J`k7Sr@p6{QUgtq%3%y^_y&@(yx z!OpRVbB2sjPS^UM2VXC(M3C~fW6ew5KfR$E*ewNW3w(9=r=L~?K;ophcVeRv7Zzbh zuheC?%qpLi9>ZQ8`8Y|acU%RGzM=m(k4`G6#MvO?j~ig)e5R>pkqW~qB+b6*R)i`l zW%W}3FoQv`_5kgNkrS#$lM10agWM6)36l68p=k2@8AE$EYP)FF`2%Ka!S96P>CVKK zDfnWg@RQEyu@8#!23<)~9BFbh>0~;`Zx)c#`H_YUZ){Z2i7ziK9zDhd%e$!SXukm@ zGS=hrxl7{E5?seLp=NLPr~v^;hnltDD&qq%BSHVH_#5kfFOhtCZtF)aHnZ3r8~c-# zuAi(;dcNdX@nq+!6qM?nq@vQUvNwIWt*R8Ds@Pdc=-Q}DAT7Oec*wWdsGFTNT1u0+ zE{jQpFQN_;TF#HQ0Ei_GU@jPZmLP$#`SRX1mn>|NWgK#iZ6tjN(moA_1G=g4-KI=# zEvA*Z%rZF?T{hOfR0JhP-QrB@QlsrldW+%7G~WCWvo51w7aU%kSg)qcgZ@f4jnty} z#MHf@$}n=zB?+JOKT7y`x6`rD?PUp1Q6D5w4{a$Xh&PBULa@>b*Q!>%Whb$k3PpD^xwgAivildRa4=Q(ppWh?%!TE>v zq%y5j_{YU$B{@+IM{VRmKF20cBZ%%)(e=gjIvb)rh+&NMjrjJOaX2=Y8?=-@r-yiG*9#~Vv~+~rUjRRUXwy`0D)R%C4a}licfqJHJ<_JZSrwlT!15pdzW9Jh_vi!m z8UP(7uLV9{QFa|a5ftI=IL6@V+O?qbzH{pXWQv^g72E%c6b?tWBV`01ke~CHpotwC zQYQ+6gI=#3zj55&q~zaWIaIfnCbi}?2VIYts%ncpVQWT#>1N-qcV1YaT|Q<`@qr`) zEuxbiej)C9d)S=)kV1_>%G@4U4rU!TaAu2Bwtf$tx@d>rSFcZx4*_rL1C4@R`vFR3 znbv94>bXwE2~J!0^xiP@8)apC#isbZ#-x+f4OIT=umg8$EOFzzsg;CxhTl_RIMM0 zsxL+bPui41`Xj~(UVy;n;fb$g0WS_ZDRmTMLki{Y?DvF>JSU8)T+Q}oZIZDT{lZIf z&}5e$(H!~B?VipIUrqnjbvr0YFVG$>-t+sey~lCGGkDF@WZ+4j)3N!;B&!#RpyZRx z?@JFlA&?T7NmNBpwP?~?zPlGg>=xImH<4=M!i38hU*w&0`3y+0TL&6_K4u=$zSSHP zYq|dO;y6s!B+=dNX|FqGZbDSvw}kuVW$fjeE{YBgMeA~ggOmHsV}W31_rFxzl90cR z$W01A=h89li3oeVd}BL(y4zXfI&Eo%RKFINaXLERIcln*TlS5{_13pxXyy5GtXMRS z&+2}iA7U^R14&8I52FalrmzNrN*<*_iGj6;{mO5Dd>rNYY?f}Bpy2B$irv8ThBsuh z6t{Qg+t(eVBGuBr=W8sntX4Ut9kw)Xa~3viLdi34@2dmQ2eh6<1)2NbK^o_PdgTQY zUay)Yx5-9FZL|NB=VC4P7pg1mw&|5lb)QEC5Oc9H?7el{_t4nfrLFFv;{wrVT-J4{ zC*}LI0Z4cw5I(MZ{OmI{SA75_i+FzHjFtIB+eqwMuvn5THn6ic2fGsMH14u-#K$Wxk$Ew;A28IPMpJ znMg(WzKmJslO{k%s0$`W(9{{kqco=p9h-Uuu1=gKEsaFFY2&e!@;r!2&(8~GQ{(+VXoQi z=@#AiFSAS25!(}YUxMW9Oess*%f2#SwBHvz9PaVjcce>&ptkL4L{dlP_U$lN%~Pdw z_q+w65m?klI_>ijUqmiWDXTMFWYLN=$F&!ay-{%A)u4RZWd3V%BJ=}M-5d|ABd2f6 zqC{TDP7Uasi!@g`)3G?t>7AV<+{uc&WLNg_dSd-=L)@;^W}IMLPW>5dRq`%(-X(G9 z=Xvlo)xaTx{{g=##E18Nx0a5e)6dQq2Z4Q?oC=&n9L2s5E}*k)_LrH6En!Z8Z;eyk z00r>i)H^RKu({E8(d9@37#C-q`U<&^!x-QiGkayNB<@T$c@XvWSmaRfwm1LrI7*i4 zq`hDslVHCfjHnkyi5Fj6Gqusx!9aig%#=HRNTY+^UEvDR>R_b^SnSN z!}76x4*=Q|W+eKpLH`q6BdKl<&!)h=nQ}3dwj0H|xF~Wo!eIdH6fDYtF5irHP(0y- zEYEBhsdf+PzEjTzX2QjlT0Vu&GrN?YG|e|6>S{XUjJEkZkS$+&fHTY58;+zXzfnzX zyw2{@TkmD(BdE-2fa$|&`WRiwLF1nkab9utIDzKPvA{#vmhVB!G6e5#yl&H1=|Q^X zuxU9mD*9}Z!Qn&6S=U0~h&1PH1!+#7PoQoL>(PZ3GDWo^XpCuR zB5QTN^pxDvN5@pY&vtVQ13-ePm_e698|fQ6nfBzS;`^^yLDF+?b2aW5!pS?x?AW`< zRL}1WW2NSp#+98kFBY7@8XMV$ zpx$pcf$W>+m!v_#UE>%(UKM}68PWvKab1mOIjmjfdI@YSxiz$$w<3-YxoQ+z{q2P4 zh?R>I>P*5DE1OtEGCJt&8?p0_=zDXCWiUjuuYNV@;hdzpS&!vx)}Dt6Jbz3!GDYN^BiS{(Pw`z?1iL%_(4haC-TX~Lzk zP%jO}H{jikdAR!3F%7?aLX_hdyR_1GtO1}ZPBGVX<7YFp8HYFv`$cERL%-2By5-0Q zIldWjtQ8)ik7)n6+Bb(o>*Jr1zv!W_frQHkd6U~vW&t2pf3n)z)n0F9?++In{9r1d zs{_sJ8MPrn8M|or=9p#1fzKaKhbyQ?x1Ncs$`VcQM5&_G^quLtfi6wSB5a-ir?Fkk2SQDF^f5kOlYt&?;l;d2v*5}3R=U$h zT*lJ|G358+-~7L$z7OM6z^@r}F(@Nt{7fns=xU7r!9p z(-!J+RjlenKWiSHUu`WHS4dXUS$;`%*I$Rto2NL8uT4uy3sVFquS~McGVSrw8EYx$ z_7AXL;@PkatV1C9iA02Ez*fg8w_D{<@;0b>Hle8E>N$VvV8-C9Ai*7r_-YoOn;n^q zT{bF5HnHFfU5tD<8FBXKTg8e02(xR##4k1rmH zZ=$CzZuV0ycfAe<__<7qg2iT@Cykjg$Iz zTzIga({Vm$c!J{4wB@6S>y@T#i`2=|9PbXEi3x{A4x{1C`%5_dXNpttf~8PtmG!V? zB~z)Zb*lC4;F;EzR+i>#H&dCx&y8f;9SqZdwZsfL2R_dz(RU&&{adM8;2Hx1GL$4DDXb4L>V63U?0CPHYB7?v0Z_EV$&D@;oPK&^gMPP~h`0)d? zihttb0H!1HCF0D`sqw4w6X$*Glx^qsLmR54pLqQ~-=`rB9?o;G5S%l!4fdcW3vE)6^0s1Q4uQsI9;RJ>vROp8IYa=^MWhyCiF z89v}+gE?a3Iz8>e4GQb-o1Pu6z4FXtxZ&GJ%61+im=`VC zKKb8yhnAalCDnerjrZCZzBQPc{qneNA_Wan`-Z%=e4*{=68uuEH`VkPulC)JT!I`&g?D}xtHIFy6PVy zuU2^;F>4-$!v`?bTaGA<9(PlsL!Y|G)5wT4k(M1mcrjwzs-5q42bW!^Sv9iW&JAI{ zG?^iTC<_+3o%Gj$fzRbAjmY*I+&1Pf1}G}3!>idaim+)kV;_JR5L?c_Ku;-Y59}kq zJ2%+<)nMGA!(B5? zBog@M2!ge!Jp@p#{`G60Pv04p(Pn4?7+XD3lZ3K8SMSK1=|OC1%d*ie|}~1BTpeftmSba{dnR5J%vKtIR-9N zY4OKsaA;VgvQwnKgvO7+mlcGh-c=ms_s|P!;{+`}4y+60N@*@%D@z@q%Zi2Z=Z?RC`XJT}LdK7&=jT_BO;ZAmtEP%lV z(0@FG|NOZCC>Z-TclSq?_b~DAbVFzHGxPQ;hfn*3Jj0$>#JOtR%O(>k=fKRilqB1` zS{TrT`^Qz|Mjuc}tFo5I%3ttONXd4a)a@Lp_{`4BS zdLju;E!vTUUnc;pM8=R%ky|SDhxK2;mJ@#98j64WQSyN+nZ&fbAVxAvjMFxq1jQJhA>KqwJN_IkpdQc=_gEXH?JyZ3> z{~zP<2hADH*IMq$j1Z36jJp*=;N)8v!s>A)k&~XP=kkdQouC!x*_xkb#P9Sys80cgv2NQ?EXAM$ie(M#e_xr4a zVOzG?{CY40B73u23@ljowe}S}7kkb}`NTCxED_aT94`d(v<-D!xXbc9Gi#?_boe0S zp-L=yz{eVVEy!5*$|l!%BrU0_Qq75yW?}c5tGcBZSLb2ecj#J8CkLEk+Tra3S^T*r z#rWZaM}mHg3g@stN_7CyDUIvqf!!B)7%Zf11?nIvIMyxkZ+1LxkJ@YWXUvdD6wV8( zEqtCm)LhePYl2jt)d@~WP0#$U#;SEXvw{{y7%ZRE z2Ob*a_!e9Km;W4D4aA^-j5W*Ec3F$->k6ASnzJ0+*n{Q`+KHM(wfeSEGpgsvzphHt z<}Wnp<%ZHnC3MO8EWUE~+vB9Wfy!EVB_X#-Rq-uzAW#OqvWgJ}V%ZNB8zZ*4F`_WE zo@_1Y3GfxF9h6=@=lZ)e+w78C2D?VeY6_=pMH-kVfagpDeVc+NWBoC=Jc5+0o@j@u zhX6$3pZ}S}51eUp`+;&%>`k>!PuaW?CKP0@^x2U;+kP_=#5B z2@bjjN_Sl?M&AWEdQaKyo)e%ITdefY3<>{9nto*a>SQs6X-jXWdQkkH(=y+wX@J6^ zCc)Zg>9ioGTFsKl%-D$6lI?1=HpHQ96{WBG)A7y+lL!K)EF=K<31ACp1>NWU{Oy5H z@=fhn?9J(?{r!%=T59wkFuC;ePSbg)(9N}3XYKx`W3>LjC+gLt1C4zghMkYxUqJB} z=`hlk1PE|QtnSd~_SeDrO)E0`&LA|Jaba}uF9w*Z{i!m(e(#J{P3_^a_f^<cG2cIFHiO@x@uqJ9V<`P)-^*S`N_>Yw~+3k$frYGSDCJ?#uLbTS0a| z(a!{qBoKqH?-gZSlXOT&c|KdzjPMNH2X^9a4n>v$R)xo&Cd*?bRQAaGrmPu!psU5OsdAQBRg=@pWzHi44ycB3Z1Vvf*b;dCJWb7v!W zM6S(2hO@srMvnhXl3teykU93fhQM2WkumVIQhpMf77yxa`W~=+oH<()shLg&&51wf zJ{lIJR0DG{RsFN4K%(UH{Ji!sQGEc$Zwau)V4WomA8L`2u%HCUI(TSV z3CI)~(C6!W2;U*q^MyNdNAh@4Ma1$#_Z#B6dWU9WAkV7!fevjhGj% z-RL-@x_xP$ft*E-28iNBnI#1Vt#>DPIZ9HD#aOLAJwTqLpD1sl=3vge2zhn9mz5RI zHZ?21ud_*hZ>(lSsz;p27Lu$_^`nZi0{`f>)3mr?XxhZ0L}yu^-;bCXD@M-vN;FH; z*4-Z9JYiuC6s2oF+5`}JAGw*9~J7W=67zreWuul5nEga zgC~sm%T~~!c+jo0Yl85X^u3x=1Z~U2_B%NQ+9Mb|vfw*58v>{a6OV+?VX%skw84EOkWPAhV6ZefmX;9$5G#jpx>{T533J?_Tlq(KYuo9W2$fIMdiye=@| zaH@SmR#o($$@Fe)%pEpj&Z!-Vox=l56`vPZkN8{86^sLh_MoX+nueP!BN=ohIpkJO zw+u_ZBizG?3_|(IpJZLLk7ALVdG}DkHr^GBvQ!DVfP%A=VYl`cp$ak+wTu>anRmw% zXZ}FK;ZO88Cvwp=h!X#HpSWm?pN%NJ>TS*KAiYJ?I7orNir(J%7>7+WTU7WM^7W09 zs$IIT8K^0ke=u%ObnX1Dcx+&T?A^_T3N_tvz87ToPU+;Xzlj}&?|DB@+_9*_zdHO9 zD+rnSaB@NW<}Y{eTxhP;FdpsGAP2eFhmKEhz%yIN?;kzxiKWBr-|j{URLVSLd>ImI zOiXk$Pum=9&fn@G7j)VvTjM7btI5$F=rYzXj_APo^D+9tsL~6@BIB2uC+~Hy_^Yns zUniCiqeP;3goNWCFlxs&4hclOs_M88GINowsG0iZTK?GD@!@eoWYodp@!k8!M9zL2 zLRT1X@jL;e=WGY8ab;XSOfm8IObYKO{I=3}YFki#^GpdU<}BkSyCtECpwK?`aHWNh zeHclZje2bKtTjegl_Ua6!j3tb?TWsgl^txb2Ums`rWrALT;7kv*iZ=JRnkju^HPz{ zj6#z~S9ef3kw7rU*wV1yr@JRdMm|JuHShkgiNRNbW5%J#_7G_nh=*+5G8}s*sv9mc zGFz-!gn`WWe)bIF7B-=VX=Q!X^yGmuz_83j*3KIZrzN7OOHf-E1{o-rRnPgJHJXXO`wQAHQ*_e9Ir@eH8Vj|Aljq z&||-)sMm)9qOACD#k(n+mScDhaz6#xa-cH|VPvws7Qjrsas zxa|6Cb@G$+$j=AEUxx=6Qh!Xj6itcUee4z&pSIs)FYS`3#>*Fp^F{P|%AF^cqG5D0 zNTsyW_I!%c^gJr|!*HA}TeQSVSpDJlu7RyZXomH|zud(931EQbf6BEt^YrZeYV&J%Vtf=z`Hi|lk` zTHID8;4c!fy@h^CPa4QVw}Un@MKEUvYdSVC_g~)JGxU+vF1`4Odr^_8&YHJ^$OKqZ zSJY5)n9Io>>-{rD@5<0%=*G=SRb3*1+;_omQxkIa5`2@)EFNPxgDIrhP`tx${_Zo$ z#tleC@Vqb%m4|coTYU=9q08qyXHCWr+VL^cDBg40i_7nR-%~2Rp4`cAhcMbr7{yq? zY_kY)p|-vo7>^}d8_1+(eG!P7($&dn;i?`XMXx)PVY#53F^2b8Cc)CrTHZJKv-rVd zehG6ryPQ~b$~iCFTFdj^o4{2|)`VL?fVE@&Pqn5eKj%L&g(?$qn8tn z-Y7~VebWCM{S3!`I-La#>-B~(2HDEls*{| zt$jH-WJpULEQK+8*J`XV{qXQy-)>cU);Mxr;r@$}?W&ZT^^HxpH950zeOl#<+j?)N z!z4W$Y$sA!41)dTuiC9heoy%`5kb(iuUxDpgalzBFKTt2Lb$?8xK5LcaDtzg)zq=* z+$+e((Mh)vZ&NGkIKBOC=ia;Pl=1j`+S79^c8?>&cT{p#^5Ch+tZxpd+F^N{^T5tD zNh+PkzU}HI5v#buhN|m3moyy`xz?nCaKtU4wh)rp00wO>e1N;#`EwAnEQhuHvSX39 zzPi}eMstqvB}BLLvc@1kl710_K1s)!D`?)&#kslNd)1RV_ypwaI7F$+9GlQ-`etQq zi!-M?(YcMhyi$~sn#oUakX=Z9;0SXHXw5g0Q-ex@7y(-ng@a zy>$B1%WSL{VjuL;Ef|FHJks&z{g9xb(t;P?HpO&?`JmM!>!jdv1HJ~i*2wH_$N!`_TV4o5u+P^p` z$Mo9huf}WMg_vYVCEv^0nPCg&PSs_3i95wbCrj;E{Loo`-zN?xLLVd-iEnl#Q00H( zcCr;)^2T8+P=;Fkhsca06RxS4Le}_tX!>n<4j1U>c-G^TK_$wr0f}zIoY>e`Qh1IN z+%Ufmlf2byn+uXM;}R0R6~8-HpR_{4Nl*D5>|@+Pv0a^q3|rv41VMw!4+Uz} z?k7%|ssBxV@CX-Z#aV<%q`g!~>ZYgzHQKD>FI}-*XL(ErktV{aY|4q@kT(>Y_t{1> z#f|b$TX(AZ{CUhDVNO!Ccj1tlNWJK~ zyx@j9*Bn@CcI)JQdF5_)77Bs?8Ko^8dv9x+eEzb2_i70xC(e@OON>5`FnQ2}J~~|h zuEhB4T{%RF^@L54;Z^_(J&Y)4h|ft*basxh0@LUYQw%_5doz`w93%L1?W{$1-pe0$ z-fcet{Z!KBJnFhxonK&_6dEU6cc@nlSL;svtZ zs7y75AJqG91cI?eToem}rN+I}LyYl9h0m@u?PS-^KLs5Tnm4ixM&w@c!gGXaGZbU= zN!IF7S>dQ~k_@%-l~YNyVAbcLndG6>*+_=ChpBaHTG&~A+tuOQ4ubl{g%$t19LjIo zgN->Su6|MfoS6*|P^IT@IbbplS~UAXP4yZb!^QS|oBc({v(Nopf`;kBjg^EQ+Hucz z!f(vp2yD_s=#g>E1@=95+miE#(RH~<$aQ*rXNSt!QEmCkEg6~&-_Pid8h2XbB}v?A zJ^i9((!p;0p+9LZ;L3-hgTkeVTkfKR*#cGx?=AH|2y3AGYIp1)ak*Q6beNT{J<)tf z+w)%M>Fkgl7vKcP^1pug?2u40C7kYys90ic5oDQ<1xA$=Yf15p{lm1M+fpa<01FADgE4F}_~szT zwJ=Y&(2q;&59Xg*vHD(T;a2(nR8+@7}tL=7H&T-AI3b< z^A1_}TB5{u_!ClG4@V&pIl*!{yWDKw8^2L;y0-gS3)%1w92^ughCT-L1>sxCCStR8 zOz=?vN#71v=1`f%59SLEi`;Iv9qp30;saCewBES^1bW=5j>84 zOMJ_iz9f$;TN*xx9`~UrQ!u&8IrfYbIA`IvJ*xLV=qsE&P=&gvR#p=yPd@9sZb6u% zO3PXHV3F23)Ct$C#76=1Dw4=hmgpqmcxjE+Bu9$bdbL)509VAdlPNmxDv8W)#qg!O zU8Hw@jF*xk@%5FF!fG*i9tj+lp?bf=pNw0Ra6DlJD=g-XXomh~&L3ZQk-1T>NVQH< zrj;>!wtkp<`=DNicv?d7+gyUfr|(%i>o6Ul9$T^``mI2FI_E%O6OEjy-da$)r`$~j zKy;01C_;gv*M;SBOr7#3te|FP*2uYKVAw3^If9F8q-g*mlOUXM9QLzioOv0YNB|}i zDHLGWs^5mN2O0DwCwb|yEH-qU;rgi46j|8#Lr#vrhP+2TA$?EPMWMpr0sf`^dSR-u z!e3q6e&;giybUS#2PT;lk5070=+B&O?}&)0&~gupB9|g@J_XMmWm2e7bQupnZ&6WG zc{so&H$e%0AxsB7)s#eB%r)3g#=VD^nvBzv7fBK*k>CC93x3!SL|88o2xNIV8Ux8% z*Tfz3D+jTAczl#vw91bgXWl6MV)4RabbvBt$%$=KB7*5a9DF2vT4V6j{^nh<|9YbV zdU{5(XvI>?2A0+|eJx(_{pV}8i|iOP`tNPCrvBrc)R4Bh16(4B?l{V!40jg^aqQ*b znMhtLJ>DvJah5O%-=dG~vE8M8(N9#f&~%_xycs;{=VF#)yS~#251Tfv+7fRN_t_rK z$4KU&xwv}DB#P2FoF?wkjLWOzQGwiQY!-7Yd6&#AonwVXo!FdTo6KE={k-@D_p*LP z(^z#t4tgFuxlm%V+=kEi;d2X;<7L+SAbD4-U`U{?`=&2#;24r|w@pjs_grXC407vw zp2InS?_1m!4vz5FsV{R<;-)>jput?j-*M8pagXxp-jda4KzU3@zAw3D-V8L8gZelK6JlHx5gfUry~46s_|Vk^(AAMm>=;X7t4eUE)PLaW+G% zh*$EF)h5Aw%g?9)R(RRj|_WMQ$7txQdZDSyUsUtMVEHS(xi`?JJ0l;3tTKUztWW$y8VUY$^iMdNsxU=%d5`Nha+`2GYBVw8(@YSyncu%yn zbL@mf_ZUmB81Yi1yc{}(w5Tzq(aog5Ac$$5RxMLl%)?MYU$UDRaB&CtjC&^i*#}Sk zj3y4g{m{OUChq>by{qEHBHxKg{(6e^UDJ>I7_qTumlA)yy9V<;P~WyTURMnifofJi z-b?1O$+|vzWghufU$r>BC#%0lyQ_mCIQN=kL=s=^Nh|>&oyeMKm7Q!H*FtuGK~T=? zzXN}mrSD*!j`^+2pKPgcR|Q`^{3j9lJigodn?xbBw9tf2ZoW7ySEbNc1z!0s@ruYd zh}x!3SS&Nc>ngirqy~N_6_9uvuUq5(Dp&0^V3{p=i2Zzezy%Xb+XpplefRi~Up(AQ zXsu}}zJxE)|PX9~cpY&M!jp zvb28SGf?Od=cEgr)){DI416#kmvFDY_JoCUr;_pKdffxr3-r1X`dFXsNbKgJfBZIh z&SCVNrnrPXDO%n6CM)iH3#VQ=X2KM^5^@gdi~Y)Z{|56zsd=rcO=uMHRrzj~-cHg% z(HEC6MIjfRShNS;jIOQYPVMafrx1Jx+F<;&oO{!+LbUbZY255&@&!%8(=%%YB`f&1 zDcqHR>5u!|Z@yVMLO{g}1uQH0y9haKMUv~d5zkToR8a-3lcU11kN=};t%v_C^8sIp zFIj@-U!K|?IqEu#;G`y%fau&(-|F4fz0$FE7rKkWjgZ}}p%em9^4SP=$B|8+t@S6SrpGuFejMhhW-dlgU`0_iXZ z=eyK>=h$kGDCW_3bXT8ZZB}2l}e0#I_AWusVpM&nK+BV~d>&0$&OH z%@>uz3HgS@W5t|y=u!>-j~8J4SH$n?ir@c~2)dVmMYcbRyemNK;zy*MHB=0V)G)d_ zUf6iG4Dw4QEl)w|1%Z~`7+#I&kG$#Jck9uxDzn=k-JT>4Si#*oO+~Hb-IGRSn#u3U zSah|#kM!s+dR~OF>2I#QJ4P|=WVb$}er0&{eSiv7Sznp&tWuM$d)5ky5JB-Av z!PC#59heZ<@lVXz!XTQU2g_X)OI}C#*b#WWsdOQ4^=$i`WpjVEiE`0 zx?^sESwYu~(4*?$e7ZB19n73-e52cLXm0=3JcX$X^Jn@p*|dmgyOmmTvVF})#i`8t8)R5@z`-;)9+rhpr=+~#N&j%$CO5SVhW3j=p_;wvJzB#e# zILTDaX-AiAEIX&h)bK`5?wN7wqx$uPp-eY|Cc4!lYSSKI7$w4TjxdfnQpBZ`va1cn zpCZhkWl3Zo-0D+=zs7t8GoYfltbq5Z}5>idoQf;S=|MH zKX@5TqhG$Z%y38BrSQlcf1GMWcK8`L`K9A?+{USaT?qxg zxFnUZGeb(Fmkg2hMS*z9;WQSK-R+uGh;dkWIq;2#OfQK2CcC4+;IFOGiE2ERF~MC| znWg1MkJ+rVTk|#;gnHc{tTnO4)yNqGWi~f-YBrK3x#xoTnlT+2%cBV zdC@~1gAyV zd)9un@dN6W7UqfUVlVN*ydF;(H!1gh1G_Ku5d-FqSAWB?-+oh#uch1NSA27|*|V95 z00yi^#~R6B$^2CQWf(dn)d>2Vj*bUF2Y{8_)Pa>la!V6$)Wc1#_J(7UWs$B~b5qlI zEPmbR>Sze|X2OXa#gs44Z76BxV+y9NXX36#_zrU^<)q7dtm5Qba(BJcI2&e56!j?e z?bO*e`S4Sv2okUlSfoUMhJwEQjB%k?KpGdPct*=(=Mi)dN8{JNpz63IU~IdRL%uJC zpAs%tSLFNcx1i`s2jvbW!QvkBOfHVa$Nui@h<~HGFW^bgzZqFT18^vUU)f+`Exlb5 z3H3Fv9|wt`|7H~D_UI?v#P}{1^`m@}#cc%gU5j64J8a>z1<~uudc;XIaY!tIZvmY6!JX z&Av3XT}|NF4_AN-1I!(w7eE~ivV%8FD>y{_DK%j;BSsT0wjt(b7+GxXe(Mh1Ck74M zJ1dP4DSO~evIsk9+M2b4{msxu2UN8Ty3}*Q*OfSS@x}d3md`6F$AT;7$KD)(tnMCM z_Yp}y0(yRH&yiGT>ZM3#CG7RL{QuZu%_SbbzVdtI5YGcOE?+%KWRA+7HKHD9z9*lq zB5piZuD8-k`=KFGiNR(ywH&%J^I12GZIzLzXk`L^!Dt!KMaCLEoZn`st@+01klSg#WU-er@>aH6L{q=!*zuSnFwA= z7ftD8VS%cyJe4DV<=%GoY&e&`a;5-Fi5(6YWNV_MzU$Ra2gb}9AXO2~L}QQDp%nL< zj~a9ZTXZ7u5e0#^*+$?{;dtHw)=5GA0BZCSg?!7g`_*Zez}G~{c!xxJHpnRJ#$VjWn}Y3D%B zw$66OsC(O#>FIj0=e=_Lhd6J@>bGfv_eV{$$GMB1)B#>F#-ttYZyo{oBh$~^BV=&? zqC|zXds9AtOIG>*r?)A{O^ykMedZU4&%h5Wg00Uy6SQ+XchD!XAQuSF4cxmIXE(l> zKy-DLL)*MoeV!)}PwLyLwwk@iXR)ryWEZ^217$e-wp)Z5w|cdf?!6Q*7j%EGb(mFI z_1yrrNVE4m0mG`N8uk}fugxG%6q?NPK_~fsmgnc}rxEm=&>d>mo|CTC%V3$X=Fi>S zrz?{OzVe;T2dXP?FMp)oiBP9CxIf}Siih&zkImsy-u3lypD>cx67N#<23fTlAYNj; zo_Q?WRlEu~Jd<9Hg2&&TPEcPhqU zN)deR@GpJ&pI94meYS{D{yN_&h$vX9_h&V z>@@zlh7=hg(#3+bYiTmZEQU^atINA%$HX=?1i-sAi~?0}vt+Hw({5Dr^APu_pH&R{ zMaP9p1p#$`hI~!Ii^o}4IS5Ag;U_)-X%zpLqDVGB+aToQlN|+-x^z@c@Nn&Q+{xy8 zR(XnbT$L;Ui;GKf$YD+?*2Fqe(+D;;RF*sF{$9A8kM~sMlO^CFaM)MUenC5dwX`_3 z-0QHnfu_1xE3UQxJV+Y3c79szIk);XSfVbG@LIY~?5uVF?TfM_M-?elU$64jFRCuk zvsA0Wh}Ycxz$DBlP|OzJIkl1D6U!wB~vl zumH5L%h)VBIXJ*a`9o^7y8{xi0hm5!(4V^R@u-zRYsW_M&R>t-UM5o1go1yG&)yz4smiS{~ng zoqL`1)GI$;k@tb-<;s+!(l&fm9w^@##SQV*8u1m3n8anV6bZMJxq`(%K9ywUtzh22 z!VR!R5w8trdp)<U7lDocGPJizL=*1B@N$DY30{o88@%mcV2I9>W(B+FLVOH&^W{p;Yl-K6dTuU)pY^PUAGAF*@l%7E1N(tXVpQ`ZKl4o)5c7sebK7| zA7nH?SW{RnPS5ry63a4}baV_uyNRe3dImf!9_LD+RkwNIJ?H@C1S?kB(z>OfKhhLO zj&!L{p}V%4mupsMNM^>4K(79`1utnIP9D}(=im>G_juPmxU@W(VLjQD$-=^vvUh1}hG@GfjNsKjHKuWo@P-&&K^8 zc*yy+KM3~OCcU~scw6QCF}s6L+UpSYR@j%}aQ}Smu;WbSNpw=YWimk)_vgpIL{-Ld z%S@Tjd8|ManNBnJLSRgBe5wx;cfuG0R9pw(d4>teRUgeGGB&+n&)JSe}k z_4l3kUEBRN*x#(tB#z}ouiu;tkA3GezH)aSvUsekLyg~s3Y54EN$e>d6ZtuO@id}O z4lJfDgy+gcDW5n$&k%&cBG)@-O?!Ca4iRpv5uktk{&)X&jA=}$`=1rp7dTlxGm60v z!i>uY0Klouf$f2#^&uGXI>h8R*p+ZfYCGLoW{6Y%LXEA!=_x~E{yPj%r91svHH;R9 zhORJ61MrsOB%72DIEcHDs*uUCUf9IJDtoXeCs+{ch`e~%K-RUi2u}5o)2?xDoYn(p zy6tOA2sPGg*=raKrmW;%jM-_X2<8X40AO#nxXP$CNhEgpJk!e8nSfJHHKYGPQXHeo zlK+YJ`{dx7xhMx7}+9;We%T{4~*`7 z=9#>4m#JW}#kQEQm6cb~y(b(Hn56KKhx^u$V#2d;W#ospvu7|6sxGgbIj`4W7wEc{ z($}0N#DQ_DSOQF|y$Hi$V`S1)KxdM;nXUnPhTpGNEU1NeD9LJM*lwzl*0x>{9hI5Q>lZj+D++uod32h}JA=l;;YVZh;G4SWb z2E;h&?wOq&QUa8P3>f3TtVtS>5tkISOxGUL(7l>LKA54-p6e7Znu6=h^FLjaVa0{5 zWv`v*{5j|KlCod@;kpd@f7H%)51f?TIOt~ERRB~$=w6)#o~{TJ(fm%|oPF&0kMHkC zMTND4T=4D!r8WDK@AYZ(ZgcgwNwR`vLNbII|JtGRcf(glH%Mrbf8!zx66tU z&%d*drrr2*uVqf@m+qjgV^w(K?xlrO$Nb=7S>Q0oTOWIChmV8i@xNG2ab0)us8>qG zQLJu+(B0PNQ}Dz!JK2*&TeVyrXOTOR8F3TCXJ=)(0U`!?^Ly71td0BURmYx z!NZx~lC-eDsbey&l!AIKc>hf1`MQP6RK}qwy-u}yu6KCzR=+Q_Z9ndG_8obPsLBMu z@r-35Slz0J>i+4&73lgnS=~t#NK5deJI)`pXyHJbzXSkXJKP6*BJ2XFw{z?r zl+D~`k%z@?c31Uh|V)K1494W}b-iH!p$wOpk zuG9p|6L3CGy7y#oQ|hO5we??HR16+w{?Rjqab7N;%gPLi{jl6TPF>U%@!hHD61tK_8G?#)L^jOfw*du;S zul=MakVi2#cp1#Blcm#d_}%_r8SA3^sMb`ZkQVO|9>-+A&Y;c%H>V2;8}^-<1umtS zzJERPf*+e0Va4=6ZY|G4f5p*HE*ZAG-{ii1ZDnG{?`|IR-ZT+qU9bxH)UEny*_I4~ zQ^?Xi&H%ni{|)v)o&`xobu=1fy)5k{l56+dT%8x}?e45|zYdZztVs!=I-<1O>$G+S z7W=RX-2U9nkf|!!Po-`dYZ>Px)dQVLr20uZCx${pZkV&CE{Ki^nxS1zQd{rX)DAhE zPzo^Ugc)}`>Y6jm%?LBkX@Rcx&&9Zn+kQ?t3rvkmMfB(yzvGccEn=3h^Tstm0_g<8 z`;XO1SuQE(plK02%o^zWhGyY_n33;4K-NB$?kF>PA#Q)P z!o;vz|C3e%+6{+5SO3v>-ha!6o0(d}<=SPuiQ3i3Nhh)*q+iKCUUz)kk4G&okEVFp zd;?|+iVeblrtm<3c!O(UL`=8O`**Yn^fgznw}}jTWORGaX=tgFK+hf)awh7S$LBaL zCt*+d+bt|(9%s#os-+?Bt6u0%Q$Exl_t_JIIOj*@@BIHNYjRy4P7bqJ#4PKhKCH*9 zN9p%IaQjA{gZyMnlV4v=pV9nj*r(uLMg!9YRFIIIO@GqwY0aWu_pZZ7^{S}3QPx7i zEP;-L#e=4$sfct8jaj8-_|x8@rr%`D^M9_n3ZM9KzU)m8!YIj5vn?VsqHH~*btD_I{$qyTQt?To?-2ueqjb}ng((gj zB{?EAU2j#ApLJtZPw4~RTY!^A9t+ua;4#-h`Fv<-%s8L*?L_2A;|qL12x)J>CBL8( zOrfqgqZjF^(N$tMUIFf6ieVLPobJ&wyiG$@wz=hRdc+Fdu;LD@hlf_Y^Y_qh;zHO%|& z0j&_ClLfa}XP_V}vBaBSYEgLmSOoe7qN)l?Oqh5%R-SeQ_T{&Yd}5obGgcWfoXFFi z=(zoel{|hFAxm>F2XE)5^>8VU^fIf8;+%1h5o+*PrBpUH<~yZ)7pAfc+)|?FzyAnxo?+|lv#t93kbg6Sf;2#6 z4F6Ubm>w0P6>5KT=f&i}jPLu?D>9SU((&(b^iFg>F1)GClBeiODY?Qe7gEncnz?^x zH?K72r&>W&^!WqDCT*#$id_B>?L$W(u(fvTy*YA=VUJ;98rl&8yqsl#6{jqX6bkB{ z4udxa`VzrIgG1&Gr=II2 z3QpH=3cYc&QQ=!`2e>dW(IGE3d@RDfqB^tUb{*WJ#y|F;JL)YxxBry3YDVh-$iEv% zpyv6D%H_AehHJtg%1z^G&pT6gvXoNt0!EMOxG9L^Z%tWFY7_27LOw+9`iB#eqoCKq^?@MEjKI~vK0Fd*r=B5*;fE13+xV6C z8qy}v!VFlrK5@eVC1P+SSL;4+ib{~)f5*bwaS`McS7aAFoEj@dr#sw*l06Wzd9Z8T zCD9GE{INVXlK5cB?F^z|HG&1rr~(*n+koPM=;j3 zGcN3xcKZ7Flq%h#Xw_XEPNCB`#cz0;Hw%&lYspw9sIUVE(@+!ZPwipR_a9x;Wr>!m zlGYdTJH{TX$9?f!YQp0x)cM7H;jo}nT}$T^6wJ5OI@RduHQxDRMMxv*LVf)N9=F-z z8?YhVD4g#oTmK3~G=6h>S#kP)aWEc{VF#tl=p8?zcW9YOWUDTr$~<>BPhn%`ji536 z+qbOCvDSbHzRx(%yR}u2Fz$IHm zJO`%T2Hr;dc-X+;ZF0eMi?hS!IXKz=Q=2?CaIM(oVW&=^4RWpZdKNRnj-Wv4bUtQy z2CL}H3);`(uaPtOY!xl$-Ep<9dFJ) z&T7yAj}55}y$VDA`*|WBeOOOjM~V_>Za`f$1zbD49mL#p-MCi>El{>JqkTNaWksL> zk3g9hh%$33F)g;^e1=@icHQ6sSdQAHpS$vj7T1nOuxa%-rm^eKzYPv+5Qxnm~?Z%@o;8aSnxD*91T^o#DCHwVuv<&96y3l0r;K zx?9AujI6Q^V@$xYE`SB6DBFox+!W@XwvNfYy&6)6>WsKEkBkCB6@&%ivmO>ct#9`G zh0ly?Qw9y!ORe!zE@6)x)K?aphG+TK^pnpQRUSLxRwJVPi%y-FBoOmdMVw9N_`31B zWJV8vLy0c_HEn4i<8rS23lXFs-SCUocLW1Bp+XSR#SsrTs}I73!fd)c39E>X7@NjIRC2}o^#j<*Im(}SP&LAB3)m^ zsk3VioL=QGL7XG|5B~pEXl(euVhGd%bt)` z<2FH~kjV8)I0IQEX~6T1qYkYwP2*~LshUO#E3IpLc4eciY9YP@^A{bJsLMoQRuP39 zUaq^OeKt?sGW)RlX5__zAIvKB#=aHn^%)I-7%uAx4>esLa(76M8lH-n2bwt(KIqfC zZiqLt4yLwogPqKinMSUO|4CHONnFI4HH(u4;dj>WAjHissqalOJGynKz!C7i_u8M} z=Vp1gM^wgXK1_{SNW!rSKk55b$hnF8N$t$v$|9c}q zymN}kIP0g`e3^B5u+wn>g^!Fp9@}{_jg9}i+cZ?MphLy0g-6g|Q4pZD$M^<5UIc1N zY6E3{PHJsJ2DM4z8O(31(ce}7rq(ik(hXmBK%a6o$)D9GW;iZvTk;(Fyi`ByIA30D z2!&C8+3*15h|Jm=`{WF`H}D872(5Z%^rm~d;NRSTxpQ!zo_(=*3pe9+tB4^Ksk@vF zP~-Lr+R}JpI7Iko=7s883!1|Q67 zq}CJ`n0T<;BHrBoX9fNU5?>Ub5K@29Hf#||yc-fO?^&~IH~GQ?IVd`&>4@0gAKW%K z&7ECqUX((BzRU`*HFCMp3XS=PoKu~z%JT?SVj5#vn{WS3Oj8lMI(yW!-;qELhqPtC%Z03K^mI0!pZ`Be z{Q>mOfJ5QorWxIEHd<_B%@0-$Oy3iSD=ZtNY zI_d&gIv)G2xb@IyO|Kh{nzZzTA{9Rv#Ln89j$$t>2dizcz!X1y{-BYGWi6s9E0G6q zkF1<3sst0#=tYSioJ|b4@La~=I;*@u#CE>}8qv}-b|II*G0q5~1f|F>aW!&zaTQg3 zY|v^rj+WP@TWGDMc&3+jED1YKN1|z*>78mqFVyc2zZpx>rg*K#v{>>T%n*>Xx6(-= zlMhhc)bm)tD$x7C%##gxg(v1`|z9E zy`9&^M2psJarF6(^aEL&wwt-x0`X3j6fOZ11U}LNP83f00rg4!XPDK5?u|Sg`gjP6 zbWy1Yo+e@-(g8yC0W^(!MLO_P*9^Z5;UzH0W65bs9BV~QdgwM`#)Lpg>R&Jn<*Xqm zy!{b#hI0WqShq(`e@_O58uQX-o5&X7x#*9)5Qkga?+`q(eD7k-49h&9@9UfCW+3z; z7#AIIwv0M&XZDqMotRgl(t{tti+-71$;*!2jHSgZoH&O8*h#f43ix#)iaFwD^^6JXQSLU$QBq zS&`0=n{^{>J}tF#{gulVUy77jB z866vImtA8T0AjBWCHaDs=HK6@h#{n@-(A$z&9kv*)-oDCu+3Z44C7bX|HVoDfD zs($*7@tYW9|E1R|rCKeJoks#Oqia3_&X1@;xfLbGUoIwNUX`PK`9{1(lwx$$)#PmR z73e3EJ>sCOfPsye@pCBvMswgFGOTh!Bn4%3-H#E0w=B>=(#3bOy_jvH z8T0oWse#~H`kxp^&h8EOkkJIGyE`_7Tw4Z_V$T_7LJ~mjB>2tOe~R5!bE{M{GG>jD zQ~6qPNRzBANRoq3v<|gE)zHCnLBx5Fg2y6^O)ruw$B6G9O#{YdI>&;lt8$@hxR!aX znfJKwqe^_btT_}bKL@L{QT8e)f40^{`2<*{ZIumdkb9SV_ixvOis$zXqmux~>^HB< z)L{p9zszc+dI3>mM`Z$YSqVf;uCfYwb(M!90yhZkXU4Am9<#6@^QZ~5Uq+gSYs2*# zL}I3AJc~{?Js~E`eDFCORF41t^F0SS@8Ktr(1s0=Vit~%y>oarLPIJi))C!%Z_o9# zc3ScCYHNb*E1safz}rXOSWmhC>mvW_+eVOoRkVyZb&l?fGNaBgzI(Cn+q_=E%wo2$ z!P z1{kR80jP9#dJ&5shU5Q?MS{qwfBoNP4*uCKB5Ys_sedq8n ztl9cQ1`gnN>pvg^E5gZ2$FI#6X3e(Nnrg!zkKG>~$SHb@)o+eMZrybzb(32X2;>_$ zVO4v}12XQjM6R+Vt_B05KESdGt?6?l{CoS!bl%lKX7|m?l zxcNs3U399CzsNbBo!Nv`n2Zg*bIcI4B9_`_(cM{?DS?XxMlKW$FbW(=i?#x@|_RVuCtdADt!?3Uw3)dA>F=h!&p zZ0#4wsL|gWAhOoMFI0cbF8&FIUQtf+?ShSeAwFMU<{i=vX68Ryl-I!LSu=V#uMn&3 z-^BaP$Hmh_?F!B+dW#Ncs%AI-A=s?<-UT>^+L}P7M?qZG$^o2;Zgbry#BZ(n9p%FJ zkaf;T^XjIC3jv6!M%y)LB>h?fKnT|Svl(y(T}b5na6h^zg4%y&4!&|rKE8;cHWf@< zT=+rJRoTC0KJ*GJVDv9orZrV@{3n&@Q~#>g?rTrwN>=dFx|4$f2Hp}JnHKx=m0Qvz z|HV#kvzy4DMC$tK2>bP!hLDy&0Zx79AWnm;Q|i--_yG8*l95B>F9e4NPgtM1(viLr^@&K zhk|1Mi0nOuRjDJ@A3DjoTTIx20CwGy`pFcy3}f;usTc#i=BxW%lA$i#Y3V5^f_t1t z(wZ!hJECM=eJVOA($HFU;7(N}v|sY!@UQ?Q0G)+=`=984nq$S6W|{ZVe2^Or*z7N7 z`TP&uF2EK)Ek5Mg+1W+E3&}kn^tiZ|i5VCzj3>I-zikG?$t3Mr0OJFv{HIXD--!R5 z#n0Vjw}D+_01quLHt}6xF=>~E)TQ1P(J)ff@R!mKwvi$~mc%NY5*q6;{i`!*6irMt z`}LgZS#jE4dNacbK(qT7>1e>f&F_1lTMX>WrlI>k-*yIum&oLF`w4s57uZ=i9&C>` zT-ZZhaJ$l8jCQ`hhZaJfYmKw1TVR_jFGi~=`mfYUD4YO(|4Wso(8201e=)LQ~ny-{fjCvOt$@A z8eD?$JKDm+i7S-8X)Cj%GKYEj=j42vSSb6A`FjI#&DsC>f`RWZ8rsuorj^NRYE$ zBr6nTk@#fc80O#@OGQ88u^MC6}aeTnFvT|&nHX?oZ?ct-d_(_7_8A3lK zqLBl{?jpH*mQB-WEA#)w*H=JA)o*Xh(B0jQAT8b9(p}P^bV&{%AdN^jh$zx6%^(b* zgoLCZF$gFiI3O_ecLv`Z@BQxouEoM})(mszcXm9xo~@a(!>4(fy0dP){V$o4M;oGw zdJhIO11Ktf)sCXRc5AnEaEod1NJ{2LzKb1fiT|>g66tRmk$QgbdG9$NhzEE}uaCw1 ze7UWBRyYq_YK!BFNb(+B3N1*|=-&iZ9?e7m6n%B62s4E}>Al-smT0?FXF~if+B4z^ zzb1Z_e2-mc92k`NE#vwVciLv-<1pRp8P16w$q{U{f%342zAvp77QI)Pu`+KJP%ml8x>U~qcu?! zGf^6X9wbfA{WqiXAB1i^Xec}<6hwYJ%Dc}RiA|LG>OV}q4!<&~tlj0>Z4HpJD6O9gjt6xkQU0%jQ7Dd(H6J^A32_# z+-{#Ie+pO^96`KCF@ICse`O|br3xN(vz$s{~@JzC^D|9`GeA)YG@H-Xdz|m+VEfZd0=fdVCb^M+a`uFQalxfB%fu zGn%kF;)#3rdy@}{4I-0p$EM&J?ynydKLUe*Z7ngP0g zpPg_mLoX(r1qB`06YBAM)~ZN}eHIOTS#P}=iGNp(|MK|Z2Q&)X4Y7icuPA2LoP^?8 z--LJ$?{|Soy7v6Nt>;2yf=)Vy1;fw3Vm&2VOintUH>XtKeIFONLP}jvTDQCY`bIWR zZuJCH`sg5T&57rOqeF}jwp+irAf@`@mCQ>7e%vxo5x^~hq?cF(B-L89Fo6937LQx8 zaaDK-FzrQep93Q4dhiG{oh9(gdPKw3y9=h0q8QGS5WLf|b^=|w3ns}+rvYGNn<@9>uX4+r+sw51#8^b znb_TLYa!@0LQZ~R>ywP7X%nzf+g02=MKOcS?Ic8ifyzZ)#ln?#-H4_)aY_w3vS0g7M0hDgS9u5wu|56MPYqj1FL1Mr9m6wL&rP_WFt z82`;N{Qs(zO%de`FStL4&=HOYVKOe=;4b(q>27QlctUvrwtRaR^i~0R*%CF9!6PwV z46C-f&ZiBZ!Mb*9rPwA!>0QIi1iNcP>*mB`SM?97OUx z04=aS?w+yupLQimU$Zm|F&?C%ebHZO7O-<74$Pn0Z594d?-6?;NkG@e7w(Z_o497h z0il9h-YLUIz}AQDdX869R&9J`i^KL$uHyl}LrL%mVA%DqtF#1U;jg{yrpKE9Qy3n` z5WBL2UpO{i)M45U|CEac=G1W8t~M*Yg_>WS%)-T@<%m)~X`XxWe<D;ZX?eE_a^M(AQ-k|v-!*;kt6Zl8Q1@S zmw#Pwl)hFUep=^d$Z7gX=VfeMtH~0{a2OnPB=NDO{^wJsVEnp*{-eD8JdxntvIQL> zZ)=?8&fhwp@6=_mq$;&MX=SZS9*%o=fpfCFac5a(1LyMWgh#*jQyq}9me4$wezct< z2}EK7+<-q$9kDa6xOGzgM8CorORn}@e1PrU8(t9?v=3y@9DfPE;hg?^*!E9^^(wzl z5$ct$93PD>{-c`GAuDUXPv?nE3`843(Aq$oN0)Y@cFf|GV#DZ`-uHyQ9IMZvL&D1C zz;TVu(0fSi<_;#c_RZ2}``J6KHG^{+_^9g!+8w#*b<^7lJCK_|;udu(_EvqQkPR_K zS@Fg7_R5BxjoOOBa)^7S_8Owi$49>0*ETE%T*n`0`}f~K{LZN`64F(PTORy*T=O-h z$6!{&{*Kf}`~)Me-0oUeMM?b12xQ7K9~)e3FE;G+-tp1c4lp3Yi!JSVN#NLnz3})s zYUAeTZaJKib}U6SJG3#W8$nL% z)r1dhio&+?hZFATL&h-1JG-y+yDA;DoY=k1PQ|$Uo(yk9ONEhz?p^EuK0aBW8a^1f z={mMohD!(WC7}y{5i=_H+FhSwKz>?7Kj2)g*PkilBi#xLaRVCFY}c%b%u4@_^ynX|M0JH$(|)|;mUB12kkRc623wK%UK%O9PNz*31gs7} z3t0}FKUhcfd~ifn_QA)di?K`gvCf!WvQ|<9v+s2tp&-^UI5CQ(8|(HdwGazn#%TUMW=pS}rWFvgmGT^=wtbW$W`49hRNe z|7L^x7MLFn@`t+b?h6Gp6S8?=ReOl}F{kvMB=Nf`RsCzc*269c+qM^*Of)qwJu!TDjMZ-;wXc+wz@inVP2Z(Mx;KrzYGpf^Kx zUlHMuJECqSQ8Q?;PVXX6$oA`n@HSMRK5*R2`eEV9(&jwrGI8i`vkf$}GGkrB9O@Pc zEWU`a2_b9ydKM~uQLm;~Jtp#^m#intH?mu<7p$MKDkwZVqYrq&DHQ;~_@`8a+(uik zp51vBCNm*C{RG&&&6TCeG2K>Cfmu{k#DQaWyT0O|(CVKLk~Kj9`H%jcuB&6Pb$Rko zfSoLlLcUsUer(*`h436*yYX4k0m;Ypgp7A3eF>^&NdQNM&IXQ=s#uNWEbhi!yrgsV zWuCsTTKPpSD_QX+s@n&P{>)K*8CXtb@rBdT+ix`wZZ(FB6+jjv7)D=_A{RBK%%o#D zn@+8RTGji{Yx(yFc&UA1FCf30Gn+2w`D5;a&xCicIjGt6hr$kAd_NdD>~;R22-sLx08fruG9AerX5gbp(x}C`Ent0moP(J^0rJn9*&y>C zl)=`w9#i>YBN|2amU(4~h}*xu(Z5^F(kS^1#Q3>v*y`}!Vsqmt#_q;4>S^3T`@+F* zVfZs!v~&;)WqeOMG2{WPFK^;X^YRh+_7pIxMz>diamF_fY=)g2}(g28alv2RsgfBM^+o&4cZVNIstaDQwuvY z6^!Q_SBH^qh>vOgUp``pFG>|YI-!Z+*C07+6|P!umhz0Q061iBa+l&d+$h|>u?9v? z)-wgLgNOIW^e2Fur^wD&;6djivxx z9nO%cHE=8b{~sV5wiUl>^)8Z4M-NwtEsJExDXjf2kz?~)H0H2jwH>zjWh`$JrhxS~ zo;~{q>F$XLYZA>bYPV3B7Px@TXEv07V2&9b3XY8`Vjrl9=ZbpJjBE-80)!Z(D9dwm z&Sl`gEb70j5b`k+q}~I+;!?0|#P=b2HDjtMTN1Y&>YYwyN7k>C(fLcoAg8C&>z4)q zIL*s%0VfAwlV7>rpQQx8&2Jwz&_8+sh^bW2a;G~iYGJ@&EEHMkKe_@stX!wbeGF4p z_3tn2B>q=hEkOE^jX&*;+f6WOxs4oLFV`@G;IWkOLH_V5>7g$W;DvZrf4%QNjvcsAnn5pM z4=0SndwD_mVy>+dA<{IjjG}d!T83R(2OLbYKJZBn{%f-yw;D8s?XALXC}Vgi1Um)R z@+j--KCk?#^<(t^l4oG$rB0vnrDQ_FwGLh4Dp@iLgZj_H^BIV@`@;qnut*V7^ByWA zA;Z)_#Snnw1VBn zBo|Yu(^D=|c^mH7B-oDAAjOAD2bmH5>{`W-e5=3+aQ<7q62Fg+nM)>KnN;VM+$v{Z z6IvHeq;sboPJJjTnuBW}N} z*AS2ShEeN#5?8R)*9SBz!>BJGU)z3rbILd%#vqx2)VzDRz!x5U+En)^MJK=o3Nw81 z=C1Svt$6BmP8mBENT9z?g8X+WU+Ev`M1)o~TlG=451lIvzie-_pR89EzT33&`~7GW zN8Rq`dJl<22&3)_hF@q+s2Ecppd1{ZSfLvIVk-_Kxs*PE$_3iX9aV^H@rf zarP_+ysJiiSMqOnM4$<~MGb{uDHzked#Y3XEC-?}QJ_R~roPVNZ};dz=<5+|rhX+N zc~7Er4jDJgAQjweY}VoS4iTeqnNe8}{{)7=umRXdY!!Yyi|WgsSVlZWC}IHU>NnET zWv6m}2$G9+E7X|@v9x+EnsylMcATE{F#mOvY>>G{OTfyef_y+z4rYM0;XkAlSi<=$ zgUCal+B3g2eF6oH5^n{qjLsjnfJEj1XOeYR3PuiD;3F@`p>S5~_HumCxB~A=_5Oa% zzYbLQMf3wL-5b%rEG_BO0v$t>taW~m8UTxdqtxe2_3$^JtqYfN8?aQCbUYNguEvCg z+I-xCCU8N2Fh`ahbU&5i>8H9{D97c6)6L$`o10B~xdM<@b@ zH9IN8O?3FyT4<_bD2em-jjm8)%B(_QFfRyT8S^L${>hR#NZgR5-!3;X7(v!IOy@Ty zFS9mCuiF>~kQo%r0~^WgK3C`tCvXSpnP(^>Chu(<27h z74+f-@k!EBZer0X-;F3&&M4}lB1OPh|D(Vo*^O2D^9%c$aoOX)Ak`i*RVWaR_Pp4p ztt`-B338L^&k^i>F(3)Kn$95voM(@vq72}ssj`rkV6&mh@YWJYV*Nt31^VQOUS=)= zJd4`OL~>}Jm^jZ=ItlMSLy)t{FzAp@Co1IPT1!~V&a7PmJos1d7v3PU$8 z2U2V0RdqLzgYmJG*AD!G6lHIfjgrZ`hS>7oVBEJSqkV`gc=B+@bq^iAalRyh=JWLl zgO+@Br2PQHE3?ShpHiDWH`}2u6TeObyS!Xn0s}`nN+z22#zIevFM6)R4kY3r(dp6v z0}-M9$=;%j?Edpb8M}To;S=!{gsjtA?mVuu!rR@`MEM`glR3E?;lUNZ(>~MO1zknU zuMzYS@dzhMi(CAR0OCVW$vi4jm#P8gkmJ0&!{i*75z#fM zdrbqIPw69wCGve$6repf8ko)yhxaV-bie-ia!M}T%Fh^$3`M0BLZk<4gQU%dk)x={ z6YHThDavQnnZK{#=K&?kV_;2An=XskMqC|pyy52sX2A9hihQC#^#&T@rt< zYt0k4+0pKv1WK9w{QNO=!t5U@2R+{FigXc|aA#?t!#{Fpf$w;w;+Tg6Er>cPVV95J z-TTrxy}j4Bhmw$%ZljPDI@>MBZ?jjn1ejvyBZ7IqbVia8)s>Bd_tBUIqn7!|Z|8*t zmHeV-Z5Fumq=>!W)x7K)^@I+mD|t?X~$ zetgy_?leRTFXp=fb!?aTRb{%YZZjLhB&QU-i!}MzSr8AN@_@*eOs6tOUh;$V4zE_) z4{hmGgCu-akE*5^JcP4*M6DL;Qq-TW#;tv+BY~hUYFoTxDaGOfNvO&vtkR)YE#3ts zM&3Si`wtl<2n@@ChsXXF8DX0jK_iM#Hs9lnRu`Ds6xLwpjy+SdPtYKneX7X;JphOY z(?y6)tZH9QXVZe{csL8aK?HJ_-hRRTjGrY9rCr@77&~oT(iaxf36HkT0GjKZ@dcJB zsILagi*FqVy%*l2ylv(2wjBuOgX%%8H@syNJfIGxNM(M0r(e@#^Xm?&5^uD&F1stB z$GMMXtq1tc8t&Bho>!T7X|SbG3EHQOS}Wemi1`ArZFwVyKRPxyp34n=eX{DI&b7$s z?Ww=Jy4>2jWdz2`6>ssOBsb2Amy*A?)sGWPj1xP4;2IY_GV(|q+oe^k_<@k9=O9s= zLj#ct?^i0drSxnF7PFl(_4HhmTTcNScpRO8(s_dTX_6}_{uRc{h7^S?5AQv38cEm? ziTL!HJDddavG<#}d}s@*rxVsYCuYT6+qg!shr{gNGxgFZs|Wr!hF}MS$2Gvw52i$t zKnA~pxqJR~?p%PBXJa7}R)K{Jd@Ap7evG<6i)zd+)EkX}-DPt$=mJ>-MjWewOCAsR zY`8P8pf5-u=Aya1HU?PU$>YT$v`eXrb42xvBoP1l2e9n3X^+lT&615w$6v_B?L!h8 zY^iP}eV2tBIq;4%Ye{n>xVgbsT^>V6thDw`s`83tU=DG)rXw98G2C-wpWT*7_c<)? z8*j-fFxY5N_w{o}R?eu+E4AmMPC{OFKMHxMo^3%%b?@hNsS^o%f$RO0Uj%TIw=ofw zI@Quv-eECzP2pHm=6(?mdf93BmQfUB9H8{@70q#UTtTZ0h1Ss9*Mr=o*tSa^gS++8 z(cU#ES#T5OSEJY2$FfMc|K;=-75 znMukAA6y0w{nTNze_Kb}--wpHMB&~utE=$e`DW{>u{ zs>CV10Y3CS8{P@@uuS1-53oQv3lcgLTv`VrWHb!Cn-nFA$Ce#Mo*gNM)|@FCi9X=Y}Yrs|iy zwOE;)AfGN09S^=Q%u^6pf5IOgI`i7z5?s`a)n(UJUs4(dUYNK z0*k%L?DjZQvk>?(ri^-M=lCd}h;yt1^US64gpP^&f}g&mZol?*hXCtq#DV21omMl4 zIQ%QJ4B@}^mG-8S;>6%rxP0w>R`=JJ*C1TeUh=L(iSh}|lb-XhYVE@>{ipAmgoGU)`-#Hf zUrKxL2E?s=C&Cf)7mQ36hWNw257q6W6Flr`p5y>FWlToi$rOz&%2ydM=f3U%watIG zDL7GKq!1$_L%4Jp!kZ32XIsMs(EjhpspHCu;zya|`D9TBDt%g)G=p^$ia8rcMNd{V zI-HM5dFZsX0d9s!pZXPA7<)Y8t5c4(w(O6ZSYx<(m5lzbF&>tq*5wFdbKwmw0*Tl- z?xXhL3w&ye=-Fhi1^x>dyJ-GjjnlM^i@Kvn5pR4$Z z#Q7;z?~l~CHFXMpk7@V3V;c{2%wI}kJJgOUm(3SY4bNBN3?z!#nU$eLKz`EHghj~i z6q+a(<_6#QRFOp(&y%^!<46Uj6c&@dxBpcGSKSB#%@+Ppcr7QiSy=ZJpFN z?As9Ziz|t{oOI84MK3A#gWY7WFGsBI)yFf@JRpIv&81vCZbkg8=c_y8Mk;*T6Xa1! zeX}V2bCmL;Sn)6p^~wSr>hn6_sZ2`H83(x1>dkcC$QE!G1Q{GUJ0S6Gu^~!!cqp*4 zzM7?@yVJpZE^uosZ)jFHqXbbTSV1-CQ^D^5>?6YRh z9T44WjxAUs25~%rsj!V|Q*O_YM8PXWS#1-3Y%E;w4PM{AK8xEu23%`IUVeL(tY=!@lka-uqUOJPt~KH8kmd>&rfn0^V&4+m%=Kk9T7dM(g&u2Bw;L z9DXXr@{x-gO}@2f7nVMDL%KqSs9q9Cy2G1BaCE%wmR4R2V)Mp$u}r4jh?m)BQV-;F~JQASBs>8 zgwlsHKl73$Xi!pb=PPu@bZ1#A1-%sLf7vjFZuSm+Ni+SuKg!Sh^@?m? z;-73mk||3rX{)AUs+4hA)Stz~M+tU%#H3VjKf3=#3BIf4*>7rws}snp_)?t2lZ<(E zuoNM*>@%l4!(sCBO%+aUt;bueC%TCzJzHnN!Z7g$OBz8S27`a%yitkUEfZ0A{Kzv5 zR;2jbO782;w~E>pvfE(9Hx@u_9f}4=`hW1l2MLhTmq`*&7&L*yb5&xyFvH;6w{C%P zB)}M&fm{;dxlUr^cvy@wt<=B<=N zz03pTYTb!hej%6g!g8R`OX9^hGY#?JJd|&Qdbp~fUc`Y|Ld_KD%gBuMoxE) zGFf0Zel#m~*ya1Bw{;1nuU}*H(9%pUPfrix0OYTUy0Rb(nnvF{nCKA z8K88hMd$Te-A(!Y)LoQZlbA~rum`*&&W10w&`@j+C4pt><0^pe?A6ln%YU1OnZ_5X zF$AMUW8hmj$i0MmCHUW~RsF~Uq*<3cFD6hcO2Cp^6D3|-B;cGM!S8i1ryu3ktEl_Het!OZ|&m{YAsyzOvmf9rP(nrDb`_bsof zlQSu-*gIk`)H@3Kh&~8WL?2l`eq5a=*D(+8dPg`S@cHaMm)w=jA2lqEAO_G+1blY+ z9|5g=WZPK;&?sQvAA`+A9JUsPNn<*pZ3J~SKnfKTCDykBMAlgnPNu&gcorr`Sz8rM zMhfCA4q^dT7THqcGFohe7}h1&rq)+gaZBG^9;yEL(I#L5Ys(Eu9kEEo`mKDKILM!1 zYeP0Z2Wx8sRuSKme&wiHp6PJmRzx1@dY8S+-^!^p$S0}$zaimXeiO%^V zd0)$P0(Mwb?x|Bw^YiD=cy^M80cIw5YSW?A%|6xicKnV9)3d^lP!m=u588=rAyN}W zAIBI|+m-n~iGE{9ve8v8Nq6G~j6IoP`0W8iMX3OQBx=xB3qU8gARGgQTQ8`cO5_ol z^F(n<*M8^drJe%M>fNAQN%{st;Z_JjEyh{N6Lx_C8NuQ+xrn#W*CYue8_3O`?L~wJ zY?$HvQ2vhSPz?@{u)v1Wl5Y4*3&7C%??VPQSWcco#b%6X;%np@LF@8zDAGYdmr#&q; z7mM(~#~&tqevOae)ksV8vr6P1xFpYyfkrm$>lnKyi^*>a2=s1&gyyN-=6#mdI^ewY zaIm=GmN>O<1O7xZXv9Q0pOPVJrpZRjX<;u}vN5x@$tBpaUYKYA(&!s=CxPCAI{i+5 zz{WlIj@j&IL{+u5DoRSx332TETZ4&jE6SoXx$h`u>>QfB5pdGH>;-H>u8@=FNT=C$oHp_NHhB~qhz&f=clfhr=I0AW*_XT^GIgChRYsRQd2tM2gq;JvcK_S~%^bgy zZX)?7+rkMbNdklrNO(+^#ZxF1ZmWa80?WQQ>h5aTL(m=hc4~sTK(SBn@JnkKh~krS zKDe)3y^3zv3uy2Ju*u;Lk7?cgjjsot#^fcj(|$&&zDOAE+Z{s6ukFKIUtPURDt&!i zuuulCSmZ>jYtVcyXi#6srKH8>3<)Ip#A)ZOeeT1p`sm!kehqa;XmrMcCOVht8bCqAkrTW&f0L- z-u~Aq`VKjyrOBa;Xr?Y624%*=| zRi^TAS>ET2&M}JC|CJ42^DnWI*RcFey71pq$bpc?Ah`W2WeT1(aCNtH4HnrQ(B93) zP|5%>zhBh(eUqGL6?*U#9XEN`q{DxmGaJZe+$mC3;d+b~!U8^B_S5wXj@cEP-_!oM zpq;P^jCE52)WvZfL_;0SpASbmnOhGNBQa4(AZj<)f%jGC`VWn3-&WNPrG4RE01Ra3 zcn~7>@#OW#$8Y27H7)}~LPgPdNqIx_)Sc7OC}26BB`8uq2xw-%V-XGzK9{6dQcY1~ zt-{yXTllz*Ic)lHebuW^?Wh>tUVp~uv#_ouu(qffrialm2d9xj2~@b zm=#HakH~}=c;2{=@Z7I4W>D#SekXy%;tO))eb7-_ZYLd2_Z@}N(~AR$Tz~}Yk{%&l zXiFIc;71k+`R#fXL88*qzHP>upwyICVu+FOk=J)OXhyaM3y%&&mx0VTbWjbB8q&2@ zo74&&Vce_$gX4H-+?1U{_&I=|eXRzvxb&5iSIk?9`!-Fnomk-6i5-neD7xxmvC_v} z$<70cjlGkwdN*e^u6{!)O&*_R9VdntGvOVEGK>fQTS?4eFqFTPyO#vp=Qvq!{ufaZ zw0>r6pDj|uJAEeLXZx9lmAoand=S^r{+Xe>#I-t86%z#RtjZ$O%ju`xI1 zR>=&X7bHLS%8N!nfS^`%;!JsSIl-%;UO%1-*S`D{-2p+FY3O5*zYx%0FtJmiZ`Uaj z$BH|9@LV8=q8(`1p)^7>VU=geR>mQJhXg#mS3kXINn+9Xt!pxVvA_R3J#CLwq&=)F zX&=W%JS?iQ%9<(;0!H0fJv%ljj;O6=j>Hn^v#Ti}Uo6Q1d%p%WML>c0&ZWjnW6U>o zE$n?jeqC`MBG`5-(I3ihJ(YoQy7r|`_QTwM23UD||IH-li2TIV~Ts%^RdNpxqoX=zYSS=s)n%)sheiuZw z3T%25T|6>tF|I@9*~@pir>vjjsyrO$2cHRVd+D$!WVDEF_JWFzM^xaBlljh?E8TF) zDlXZ8F@sI1Y`P8(ub3U0HQ4fgRAp_#)vle3>2~Q#*z(LDNVZK&GBhjLt{S5? zsXe?zz}xqlM#s!HEqHTUzSBRPt1+(}r3q(#_?eiVEsIkpKZfeAu3MmA;uZg<^Yg^5QY4pq-Wnu|281SU1*AFW>YILD5phfi@CY zirRjED8J16?+`3YL1j|jXZQJr?z7PhVoMz)E1dvnwZ#UO$CU%>Y7VV( z5!YeeN1vpI0=mquPXdb7#jedU_>;7Yr5DpWf&8;p3VLGPVE#L$uTV~_sB~%S{N-$ zL}_wrB~^i#ay^^>*M9TqMo+MAS*6|GXZX}H?_9@RNDIPyjUT*){Onki2f+|;D6 zhj!a{U#Lyd5xg3xi~KnW6U%fQ4rO{5aFbC#H|=|?v>))1IR*^1ah1O$aTqJUz}Vt) z57<%|1eA3Q+kOOhLF#de=#nb`6qM2gK?=}xIVIy3N4&;fJ!LF3^GpD;DyLMIaf7bn zBZIEjH+A#G-mQc@?gp~iK-jO_0sf?lb`u6|YRAaX^L+G3$c5NOnS26OIs+LYMePy0 z;?6oCzRUQ0fL6M?;SRJay=}Nt(SzazxxQ}`dd>LO&xDE2>t{1E2J(e)4Atx8Ovr1^ z41JgiT_0A{MqyFXr>;|GTS*Ppy(HJN386B%$cbmSu#0tPBEzsspeqb7I?7YhkxqR% zu7g;7PS9Uq3yz9q43B>k6Wva}&j!v|_R`guU_EmevXOLoXS-y;A^f38YzGyxjm29zv-&&N-HXJ&M`&d)HCt3L>~ea*@p^+q z?h+w*vKQaO?}Lx1w|aj)DRp4SFjkQ_T#Hf4fkwv8=Ex(S=t9|kSkSag5Bgb(gE+sH z_3y~{2TW1mqP}>)5CBnXxb7@iJEq)Tk48Td;F^l8(xYo^h|S$)GSDw%7`95914{02 zw0oRU>fti^OB&8<27+m!pDD8tKk+rslW*Z)Y_z;NF=RzYn)d_Sk7qLBalbny?iKP? zrz0Uu2b28M9L?-#LRl(+6C&NGF%YP)=eAoXzpoJ68-2~`DnlMMt;9q>wRw~5SG2FM zRsXY9^)X8+wEhGK2_JbuYz&Sbj#N~g7F(;*lxb+6PQunoo~sLKjd|6LUcOl9x5r+# zTs)RQzL+5@#^Y6EE$WL-yZBZ~d1L&lerBeXr{4DDkE;5;H#B_P4xs$JRvMQkG)cBn zQ%|cJVpC73xT`T4INkZXQ86KfC z2OpfRnv8Z07Ey(OUpW~+jym3EY)#34rXjB*&Pt5X-<=d|6FrQ8ElW6!kkN(iHE!Ji zztkQTPf`I{wn`w&mMG86oR<1+vv3%fZ#l=umh+<7{q#+&kyn-qv3i_*8hTjo=|wEZ z^Aj52=*&_gO;j|yeTPa9_5E?_ z1E~j=6f$3WDZM-EHY45jG7ogIw)1Af&<1mNNx95PMvy<=tzJe3w{=*V^l%=0UJ8g& zQ}9PFKF3RqwhqD_HfjH$TE{%X1Z-}&I!4ACo6if4GWX0Z6E8(jGafhUW*Tdo zy=%5j1mEWRjY{Kp&gacC$_ZuZ;zTalD%4j#o!yuf+M_cmYI={i4?>$!4qD1xAURqX zHrPP!!ZtKhQDaZHutfDaLkhs@hgTAIiTywv9z%Aw4C-~arD;xqm`fL}`-0w_wnI1l zD!r5?l!HBDw7Dcxt2X-N=4|DFGGBUgny{rfQQnpai>5yOqv47^OpNyTZ)+7MKeP~< ziH}#l2Bu4tu=(%wQgK!)^GJzVLJWT}?c#M|<(FOUju zw!R&22|nY)Htt0I0xU{Mu{5ywCX3`YY;x2XqNpwV&U#lXXe7Qi&sobc==&>7rdKZt|0!5q5jvy0po zT*40isf=z)HcEPr%E1E zpDa`&>WGmSd~r<(c=ixDu@IWe79m zuXk;49*rDr4rA zt+4d<4oJjbjY)m#u|b^y)wQ|q_yNi=NeT*wAExt`!I)L&H1?(1mOQ1JJr7TpJN(Ak zr5AG!S&BGRN{xuISUk@iAQL_lekPsGpX5InR(Vt(8=#%jMwPW#K|XD%{%a+@_g;nEi9=78ZHeJu$!)r*CN{7J!uNd$Ku(5d-;zx)>Yl6r zP9Iwyb_zr>1DK3qk-%P#h@h=JmtQNP2QTaD_%=cLiQ?%U*RjuW(9&~|jpu+1bhy?% zd`Q<;59dI>Ido-4nGbX>f zH;;Rdi_Dkb3p2NEyhFi5x}`NLIrnw9)AVrc_d^x?XW~~Ij5mF0%u30|{6}lsY#4)8 zh-BeU0-6sZaJY5F81e|uwO5Fev`$Fhp zEV2YHCO3L)!S5nmT)@Wmyxn3Ov2iLBn=yH$CL0o){SxapLDaPkIFxu0}7NNgf+<)2pkG_!*7p-}MG+<6H7tNjdzs zo><=5yE+Po=UswFrCEpAoNODu27y}|THk>?^5w(m1i+Y4Am){#Ph@1%$XJNcKf;w! z`RuUqrkOt2C(84EWlSglI(-JoO{q3p%$&~?R*?diQ3jYBChtqii=cT9z!XIFE6ekJ z_R;-t<2SycB9RWU!t99K*n~vfT&yC-y}%h?q)u8zg>p2cZTFuSgU{UzF*LZqe4!17 z49HqKf2+fy@6mG*lIW4m|s@d z3jJC2#f!4&)Lvec^+Y4geRXlAm950=ag7${eGSw)z>{ zD|5c1va-nnDE&dUrDp=rqdFX>lAcpy8@T8+w38R$VpG#>Msy^$djPRxWUFs;?;Z8c zB|M_swdM9#mv0VZ21fv*K?4&rER$eNq@3%3j1UhRL}b4hM}vX~uipoj^;<6Q9~uk* zQ#vV|^(2r*rkaD3n8=9UAN4+OYmph1#5?oC`^`WvS@Z~h#F>B9m8ns`krU>O+LSbnoAHk`O<3Q8iGZE=d*QC3k*D-fa^O0F5he(HLmwKQH z69d@})L$!+LH$R*{U+IFEw6$)nD3c}HNHJn_ z=xG5a!=YQF`fJp$vxmQKz8-)n3%q)RM{0$#Nc?A|AJ5hB!M!?u5P>rAVeI^s_e!09 z&f7+KsLyH>60kWB+UNR&MVx;w$rp988MCz7*OWGfRJ$xv=>Q33xtp_z@zBguV14hf z!*iMJ5v6!rZW`*-+_Q6s$N9C92OMTzlBMlVPK&;y#R!{_AFsDeWHy=MK&1z~nFx{A zpH%PY`5F|oL7r5g3fWV95c+Z7Ur_bxAR+X_$_UKZD&a!4(&bHXc?NsV{x2x`3UaOm zxcEgY`-hBnOH=5QH@b^wN=ZTW)h-s7TL#%HzeU2|v=9*u)&xnX!CzFv4J=&UJdhzR zEgEn)L(_rgJcc;>z}!U$ys0ooV(kflPK1zqpK20p>u1;3;}|{paMSnf)5F>4^=uNQ zu6}>MXt^1|ApCFRabC|w*-oX(Os`e+^% zY8~Ph0u*zS@odC?^x}s3GuxR8r(CdS8&0 zr`51IlNLeUKM-Yp>=82LfYvjI$0Az4(@1JuzhMQRQA66kD}G@Yw6Gg#wytpfxZ0T; z{qf?42s3q+R!jDVpD#~_R3{Lmzohzfh_B&&M5rbJX^+1ftRq{Y@QArWVWwo5!Y-fg zm)E&7mGkSt<(=qijo4Ry=zABDPLR3ltRroF%#x28%bXwD^;=4oXAaZg%?Bu$ zXVs}1@2E{=RB|m6Rls2g&*5rxlo+{a@E3qO%kAk&D*Wq;qX}T0K4Y^VxW9%$zse5t z4bu<5yLqbhW6tLB8^AK0DfZDCL|^o3WN_q_4dR&cl>w^;={Zt+h>XLto?j3yzM7K4 z`{>NE81WITmay8(Qgc8~3Zf%iW)!Xyb+8^4Pm>X}E*2icul?q9P^@zWZ3^0%sT4o| z{%XGc?T04t)vj$pxM%02*kWew0WvfS#PYUT3sWF;m5w;FTI)gMUy*F$aXMZq#8js2 z3qbash%r;WHGEl-rajS|CtFe-;wlT|<5}gKS}S`{jbGr*m>QKzT1B}_NKYz->pqDY#-mYhH z4H891^gv{BlIHSS^zwFzBN+A}LumOFMoWJm?t zja+x2N(O1t9Srn~L=YBSjUlh_T}uwnhMR|{KxfF>tLL&pbK=0bJ~P8ekT{uQriTK| zHyU45Jsc>f!=@K(T^iS9f3|!!!0ZBu3}%Rm6#pMnZy6Qk_lJ8cqJSu!Qi61cga}B7 zG$`F6C|%MZl0!)j9nwlkHzP1~3QEJkP%1fu)BtDC?|;sEo_D@*E#RI#`~Jq~x~{*` zR#DS3+>!DekjpsHd8(cWLLnU8p+19=>h{Cvn#Weg1B|J*2U|tS8h>@#4Y0MPlGZU( zUIJiO_vHCDYR)c?nsWV<)CIdeac<@1^WY_gAL_lcVR)n?GZow5I&g8dHI5pVFhUd1 zh>RR_{1Y`oM|W4X1r^csn|%5KWtClsY04d>ZKFCXhto)epj>J6Q z0}{^4z~1@g%UqN6)SCG$i4$tfC?Xi!XE4+55>QQJ*-ibi)S=HjHUV-kb!#hn&&uQU zwcr@xPf{&J&xw(z?9ZbBP?L;{-D}AWzMLnyFdKX#z4S(5z+iSBLPUgvgov38%P`4y zIAM`9yXKe?Rd+B4?YdGq{h->>dWejVY2MDSEqS!7M_W^}QC3~DA5hZOLpiPne96X*kd5|Gl4o%O+Q(8X%FFd}7&lu%#J% zK}toV2`U=26+PKo%N}?SB~Jl$&osjm!zfeEaCuQW!HUZZrFN|6ibQ^i2+ z-6&&9ZIr**&=b*cYMO&2Ro<>>a<16yfV2~frQbmdS@Nq2qD!E{VN?EIyHunR5E;06 zb&3$(W9vO}ubPib7Z?Y-G;+_=k!uaCtZ6MZ8Mw||6Z7h zCAj6`_qw1M3rR34gy6>$0MBut;0fAq*~+cp39^k}>x^3Ea|XYa@N}@($sz+!r7r~& zmOCWD(){;p74CzM%S(WxFZpbv`Yv}JOl|hTJ595+BIatVoI|%{m#x2r`xn zs`y=xV)FzxW)r!tFv*0Pg{>Ne-YzF;?ET%UEnlH%fMTa5fqfC{kj!5LuP|m)$w(5F z81oqoBXpy}bD}ERFul_I8a6}`j?Drh=VMU&efz!zpc+&j)-Ut4ia0LbeoNEqfHF>w zvI1G?eH^+P-{r<7!(kFv+(xwccwDgwQ-AImKZu!JQCjr8<}4-t;sw+kAv)6eI}r1? z@ICGz-w!K&W{cOTgYlkwDsp|h-<4<4U13o0ms4 zFEgDPNPP>y@9>W&V(STVBaFlQ?-HF9Xrl!3SO#C#zEUqg$dfsLOpA1v7_sV&>`P3qC5&=# z_(w-if1aIC%3MbeBqF@uqM0%`gzjZDmlU?yG6wrHhFrx@uYw$Md5y{-LmX(&^%IKp zb}?y^Y?Z}bUsFkco}kc+C}-MGBF_DNTd9q7tD90RC~DT$d1(6AJKw@`;Cf1Sn-9yx zQR%L?ZL( zHmZ)E&RLtf%9`Wgyp_-K4&vp8nxVOT%S7Hc7oESDzya(s4z-JE3 zDq;&#PHY_Mtoji3nT8IP4LWW zAw~DgL?wEGjp%SUZ+JB<_^x5u)=THn(x*f#&BzVh<=>?a-kkdOs2(>SX7#r}UpagR z1=*cAX(E*BdGlW4SqGSidAP5c{~8p#g@#v?Xx>?pt4CkUOOMGhTTKyH(h?pA z^jry@4hIoRW(@VA;2qeKOm5Yyh`d?}Gg@0(&AC$lVJ#%9F8st7LtHt62KCKoibt)_ zZ)9_E!#-fqGuyhwXM*@p+c1004lP!_KJno7df%sYB(nLEuj2OU4}@ap)`;qjdIW6F z{0v&qs$uh;@Z{jm+j%fy!cbX$xA!w@Qm5mxCVmy6)Sc4?jgud`-alxeZj}A4nH3lL z|5yNN$;KrYlE%d=CHv@CjBk{P-dmT&h*}ML5F!zpQel=}#PuQ~k4aue+~;mgVXnw2 z%6iv2*61k4NQ2ie0-f-*Q+Ll|Ov!j~o<*>++&srC;`++$zt;UF6{dB!U0VjT>*(+5 zB6D!|OL-LQbSgT_SGnDUlPN5$Sb?ol5fCbKO=przkgMNHF9cuWx-qC7ev*$&_<@cu zs(R4@bP0VQ)366GwyRx~HER59mznQ=Vg8(rR5YqkxWb_{f+k6?KQHh+hKeS@t5ei7 zII=9p6+I_0=~#nXy- zQroR;2Ygy3(1XRdkaFEnUu|BaTT{DJ3^!*=M-O=L*eQ9c|Kw&yzDAC3%_>ptr{PE^ zhVtecZX&M#HNB!qaC{@h{GNjbvGtre3&=Jdrp&lwiVS#3j~0PSAlAXvtyqD})986j z*1KtS2*RYG$`=o7$9Tt%mW?@Rj^P>fJZp^&s0kGQH>&DV=Rr%-_+ z7+$~NdFi-+2W%a&twxKKi4I6$vdFW3IbMi4-Va%PYgQPS9b*tBuSs*n!dt0oxO$=| zH+da1hPh}&S> zqSGIlHUbmZJ?Neo+<$dloP3udj%^I)nLieaGd4GD5*DnNuro(6l55A3l)3^5!d1V7sr1# z`@S}F{v++Vqk`5kV+`ocNoH5%C?)z1Pn-TB^k#%Vok+L!?e%7sODszRe9Cr-!S6St zwW%uB+w;Q>v7K){4}LI%z={{9o}`pDJ1nTd^i~}aHk5BTBdHOd&tj(Rl>i^oc zsOT(H2;*?LPKEp1&zntWC#0|dD7+7Z-^G?&c4}g2S;1-pI3KkcTby&JQUK9X{GTr7 zGL<(&{<3>Rm)tSY1kfia>kiXAn-lO*4H76|DY^q*e7>V1L^@f#d1<_+?-nIrfNc@S zVJgKBO{w#ux2jgdTT!=F76D82lW5&BMrcf|2Y!!f?&ZkYC8nIF_usJ`@J#A)WP_?C z+`Kp=jIFp`rxw%moyt&{*FRW z1$ty1^GchHv7sG*Lq7MAFm7_b+kLfm?ZY;c6$m( zZi2kdkevRw6{`F)ZbjrL8(ByIV?NxaGhy1DECkK;jZ=^|nYV@J;WVy+-b1F(xU#7C zuUPaf?d_?na)W5!$vGL4!kC{wM5z`CQOG5CP{g;dj4MC5rkJ;SP`UMZCrmrbzVF`Z zJ(H8RWG$zr?-1cGbqA+x^Xv;fBfG$*g#8nZra7o}nK@}h1gwoi9te1`lX3(pKTymL5#{OABM;?S!2btS-zF7F8>C`9$t(=e}*!cKeH2tVXr-xErK{$ zZ4Ng>nORsgjS4TU(T9?ZP(q`Kr&P5`HL5&zgpn0H@zoeb-OQGDzY|T)8Z;WP)pS7Q*l$S*1>$!YaWBouckg_ z$S@ki#m#`@T-ra z0XAeEyj?KX}yYkXR%-jFQhAr@yzG_cekp;;?XIG6e)nE1VdrHB@F~*Gly^sc-+NLYLG?y z+1i*=$E5A5x+kyie+ouhKYjW%T_nGFpIqO>GRn;8Zrzef_oRE)cpWXt2!Lllb)`+W zFy|Ta){xKqG+TsoX1gb!k$+5chqS$8G&z?Vv`MOFF6wYv(0zgXE^;|&e0G}IwRb3e zuTI51raSgz>+JE|cEefmuV*|$5K>YY<>_h%kMaN8E^1*PV&LI(S)*ZX>}h$6e26(r zfp6;$ABv07LjLf&)8|VdW-imJ^cVr>WBDxgA#Kd}nsQu@=`)a;0K9MU@dOn;#QbX2 ztim%gf4)k7_I_c5WOP@m+t1#cNQXDsA2Z>HQlzjHou}WJgGixqW#ruhqkWbLbJH;U zUUAn2(r=44v@4t>uz#Y>hv>IuEGQb6kc~;%=`B5R(O>(eCVJc1$&KxuKdBeeoU1dq zhef>hc{xxEnxjQ?;8=p4tmgl|&(Uq>Mhm&#%f1mKYj5(3c9K|Brysk7atBf0{Bjg) zWeogc@VJG|cA~OMLKArk+y;J6k~5ESzaX2OQN2EN?LHn68ZTNT1uY?{>!cQ@tfVHu zYw!{*3G8qUhctYZFLY#{Fc-piiiN)t63z2%lKLM}4*;#8k13cZhdO05_)-$F2=g73XKO`?P75$W8%ZvQm-@)?K zro9@{#D%#Pv%4!ZQU$O&Sc6Pkt@qSqP5zOHE;PjTRy#b43je^)BuN5uSU{}&Enp3j z{O+05X1a!cPtxNocwO!DaW5W2?xoSGGyJz?$(P0mj43H#CX1jfg=P3uy;5_n3&sd* zwvAs9XY!zigH{{PY#$h@Fz2Q57(Cy<+abD|od51p;Jhl`IFCC(S4{pVpa-@mMwR?rPA&uUd#^ zktIfp_PTFcw4_hR;knG>h$*VKEsk`f|FLLb%c76#>cVu%vDeXj?VG%#0D>S0*6^NC zQdTzWxic`nbae4`uW?y!_Z0&(sF`p4L$5o888EjKyA4vGSNx*ij&Y?#kR59yN?IJL zKWzi_oK_wgKG#9u?LKO2$9m(dhbiSNeN%<7t~Aj}ix0YLNFvVEXH}4CRk=W-U^;v; zPXzNkaqQ2uoT6{n(GUGxSk6CMOSAjOOI7WJo z6lV2Vd04e?hkRI-a}D0@22^zxCVF6~P0|Tmf-ov=otH0+ppy^z3Gd1lP5wQ`1j$3Y zUQzLDY=bqvld=@+v+SZLI^YQswMNpqN4d#ltB?pL!>beAnB@U}a3N)A@kSM`Q}dFR zI_(r#yYn{_iMARCAt7wBlc@+@JrW4>gI-g?yaiBC$My0ZG2;2|)bt~d#$Sjwxn!il z*_HuKECa?!%1I-hX(Ok<$1-YCSj>cmvjlCUAzwyn({UE!o2QIkr)Z}n^48r|>Qve` z?+XTXdhkP|JF3CR-dl~rdBxcklt-2~t1d&mowdO1#eDj%`zB+XV+uC}qIjRMt zib^;^YjE2s#9z02^@~rnWqPRfIN-pVS&$DL0_B%o<8TbPub(Q${!Dg`Cw0n zp4BXqQFb$*`{1tb18Rc=Ams(x76<#G1mXkj<0%}dVfUcJvwDH@p#=M+Dlfcjio zbPjmZG}Qcj6JP>Wq784id5-s^eN23t$|Q`v#EWMk+IN8QQ%$hSVN8J@8Js&AQB$o2 zk#7eR8`DC5q!6>@IS6-#iuEH243#Y*p{liMUUpeE9qc$Nj;-p@I0{=LlI9nc_K+VF zpXGETO`sc3yce&f1It{L?;cwsWT9Ui?% z#`kPT!{HrRkXaK0w4cBn*sG12g>S>yQ(0)kXBLz`uazBgUi({Yjv$gXR&pGrf>F2ljR9fiUJ%4#EGzm^`CYws>O`VDD->)fW6Dof zr42KEbGjv1&Qxh*t;E$qC(n$SR>@HKcq(P^Y61YWf?D7u7G|{aDQSP1nAXj+zXCmY~hJ=~UiKoT@x~`w$8gtO&B>Vs(Oz4z z7uFd%-bN-9_j8J#FMa+*`)rbEiFuBiYtDgX)mz<+M5s(p7Cb>r9r~MiMM`r(&(Pbq zX6JX|ClP}%lxQWAg%YS&;%5~0` zfKjJM-XN?4vbGBrblqTsk~)vDivfw_>fX8YVDK|SttGK*W1QPr4X&gXnGbe>e%5PL zo8fQGlAuC=K~%do5xjbaa|~v#71OeKd}p-yn1~4JO&;-?#4@bXbqsOZ8BN5K0~=oM&xJB>7f%{1bE+5YSA_5Ii)kNhtBU7|9UQ z9sf7;wAb(c7*pvCGA%I2-JLcuzjOzl8zyQhfp2EkYgkoTbWy&S>9Wf7_`cDuYWRX8 zX9km&Bj~=&TYmR9IA%FD-j-8xtQ};1Lnuv|WO8bomoO~;auype?)#x_+y?z)$|@%h zj_S=~+;x)M@m%q@%_^uwolWLr2%hh^dkkIq@eXZ@bpoPP;Qfw)N5lbZt~!-j#HC04 zposz1mh78R4Bu)YE2y3$F6@yZO+})hCc^%8P%_` zKN9oj?lC#6&c^I3gbI%&bVA8x+n+?syaok-=XWpI^Si>3jk1AmWLgq`Cm+??43<@t z1^i80Iu(qLeET~KYT;b1p^l`7RMqX*eo4WQ^KE2hSv?OKC_^^n1-Pdp4AabS4?m>Q zA0OuGu_p|-Y*W!62Q6E3-Ct60*#YSaF_x_o`u?+F;|;#{1KFYIP$xzAm$>G2jIO3f zf!5YfsyhA|SqluQcd~E6S&oHgOsp$}Y>vhy;FHiih{Fny94$e8N%OWohu=>~)|8rS zPJLrVUw>8{ZqKTM`>KznWHKT62H468I^WL_Y@iywi{gSa@_bdXxZg6^&eF(dOS-`+ za0YIAoxP;s>!ZBTs_fqIT2a<-27f0;4w`T z;}#<6aKcJQj0rF`zNuR^x6_LEoVqPJVQ}mcSKv!-Ulp?$By?9zmEMhIFhE5-xoq=h zL-6CVs2i7tp>oya#cAV$^KkQ(;GgFmzR<%7WX>7{5cKtP^c6GeqDv`ELB)CJsK3J- zq00ls<0<#B+$}#ee5Q;5C2;9t%kZMnBjI00pg#F7%>Rd8I|({D{*FyPdK%YiCt#6M6ZA=n(;%V^1wlvy=9NYEOaL^ZWYQ z9whHbxmrnJW*?MIGJpQ+@JLe{_&fQ!j$G8uc`eFfsq!D3p49HKcFS!mx(6()-x~|c z-1G_>ckh^ZqdZ?l6ht*^hb_tjv7F_MjFSQ`IqvELVi~)d*8sP?ge2Y;9KOGI)VE8) zEoBR8U}D%Ss;Grd@S^RbJ+__F=#Co-$E<)1!X~3rVCKNN#M+DTrBwB>QIc!D2}WkD zo1^Uf@n!o;Zj`yveScCyZlp#!HX9)c3B$ij#7!sTXCW1d6fH&ij|QUj3BP62rwd5h zM3T`T$r3vTRjuYpO5;SAsVGfcL)^BxQEtT#IE`oT)rVbyk+2TRheH%k&r0asPjR!P~Vwjnl zf#}e|RNnPZ1EQ(oy5$$fCNZs|0|53=FJWq9#!F%a_Be>*L*sTNIk|?@(C}=P^o6V{EF6 zKR{Oa)K25CdISMo=49f;Hre<WcW zL?QN6%%wCb$IOtFdgOmasX{~J+sNorX&4{X#RV~XngazZugah zDW8$qX|UDCFe0eQ!c5kHTNt!)wOmVBei1T?M%9gLD>cJ+zkW(1qIvyZO0YTDvz`S5`9KC54J|*cT9Gwm zS?0I3r4jr`L|Ekb;_Wb~d7D(vWu;XSJ7dVS6DERJJ}TUWY69aYN6WKz@YFLumF?Ui z{J*dk=<6dcUTm=lAOBp6Yv!P5V?KisYdLGumzr)3r7z2I$po=7_FOLfukn&fVZ(gu zjXHBL32#@O?o74g4k8w}ChrQJ9@iHodk~-gYH@8ClaxHAM5;tGR}Xye;C``1wVn$- zJP=aHRNGXz2ib$FyJkzc<74Qow1%zmygXf*j8lw|0JI_!8{}a}cen8TaPrxN8ACgk z*W{zFj|q~dqTjuV{(kH6w|l7P=PxT_W2K*}ePZCPFnGSRS#t5YKD5a9cca=6bheYj1KWaqWBa(B9?ZJ~=ez zuWL~m#QX<~hk0)Y^hVKdDP6_}OU>;KdCFgF`%)Lp}Rv z+3@F^F23d_2}izCJ+&N>$E#}m<0o}GF4Jkvb#X*!C=>lAVPMyj%p$u3- z`s2xbXd%6b8CfOvw>eq%Z{Z2bG>={c;eERC73t=A2sCw#G#{;5627TivYtej7o-Ay zYwma1{H@B>t|(*6u(zn~itkC34Bs6zUqsm(-}zh4-ZvRpV!Q>rR?B9kg9-c$mc~-}oQ4(`ubHrHv?iYvCHjUfC+so5k2L znhNbMXFzk_USEeAi6bXsR9sNdhw%w0Zly&L-9h+`3n&lM#l5~lW=6Kk)-4xJm)SQN zQ{yJ#4lx;H zM;n!eVQw#A;nxcLvwy;+oO{g+qncN<<)*_tW#B5X4NEW<$8}IA3z?Mp5XOaPW`><= z(1unQ6w49)kblmiYh%#atU^9+BKb#1aMV0cAA5*Aq)|&!(fK)j+m!`V#n;uSez`LI zUj@@|zmFgo;=aN~%W6#xB3jjx{p8T%mSPkTb0e~TVm!8q>BL*_W_ru^_o5FH63?bf z3cr1=xm^uAduk+Q8pc^_>y=Rwi&U4n7|uV}8E5ES_{Z4-t}@_y3G4;%P+8?Y%HSGW zu>1-31siLh570;B{arW=j?QhUl|FYB!NG@C%a5eMIP?IxI%myNBCRsU{d6b#IE*Fk zUj?!R;dg{b>r2&4mb!

GD=%^8?{*-vY zJ6Ipt^z<@u`l|%dO0gpeOrl8?tW>o4(P}|BrV?WmED*Mlm@g(VS5-M)fnyYwfiM$d zWdmY3lb5~ft%cnNQIR|bea;z$~L?6$A;db5hw9;8G~L|z>2)hP0+0swkJb^{G73}JtTwq`zXGt9` z6(ZnBJv->)4{JPc-d2~670#s#F&Aq4-|V^!&c5TNb>o#fg3l4#M(2zp0KcU}TX<2)sK2vXt0(cN*} z<9l#dHRcb#94Wy^J9~MWA2-uFA9rDnWXVeXxwmIdl+94d{y-D zzBbz9r< zBq?hA>_3{UkY_;H)YCPQ=?8zLkJT7%uW;AuJ#IX?F0$#ju>seA;+*f9V66rGI~3QL zbbFN*GfI8Mep`^qGnXW%`8wDCP^1#~WCzVKnsWF#oW!USG{Wn$I}Z22faG= zkx4oKX-+6NzY*=~V5P&a*$E?H}X zZD((vm%}!Gp~*u6n24U5)v*Lo_fKarAItr%BVi3HCvp4P@N{j=p}OmFnA@L`44*$o zd_P_>x&`J4n`~Szm~B-D#NYe>8595nCDu=SLXo?0#1G{<(Z>X~UBQ|PJgH^%xu0zx ziu%=mwJfljap>yVpPt42C%YLSMG~edW&20n5d4u@62}>bijQ;fn+_G_9sV2aNq>h) zpehpz9H~DFP^V50xc2|(ST>bY12qcF6_eCY}6d1IG0&_4G@YZMRlg= zlL|3s0Ji;vf8cWIBp(kv7tUlN|DqQ9b4@(>gf2Kx7c<}t36TbOU4E+5VAW<76Z=y zquKq3XS$}QP6G&xt`1#E0aIA$cy&Nelu*+zoO5$pwcBpd)yaoY5t*yHSyXbBaMSFn z89}s@ac*J7yX`$Q_y^#OiP_oN;(K$@YC6ow_ApZVbI!Kc^?><;gGz6t^n}l-FskjI ztW*pl1^@TY(BAF8Eklrk&W5|e=|T4!P3)dXJoBcn@?Ce6LmTlDeSA2x*AfIINQiVD z2&UR-2h|4VEGSC zMX#A=syX;=?$O^i%nLK=09tU&R8J1~#}nMjfb%6_aRD)QZej7d&7z+)o*{&TI3M$V z(A9frz`G@Fbh!tNe;vY#2VS>ieqz#M$8~++-eCvZdNe*vL5q*hLREEe823KmGvL>R zs&uE>;^B?Bhkam0sTz9T7_oQAZ)gGacmBjN<=mKg#*=&(;GG_dc@UXrEM!vNe1#R>J;=d?2d0y_*m-O;0=EnbEcnG@p zzfd7E8JZZKe%7h*wpo-8(%Ztno^$LT#f=qLj-FV3?U(ZM9(=y`4;Wjk(`A1?;HMLp zBrZMHRy^M}~wug~HgN^8qj@TGiO>OM^>cO9nlpyb) zR_bYBd9a^Yz%+@)odP)$E$`LeCZEl-OhXIa?LLjGgu9h_$&p;HZf3yNqQ zSpU1l$R2=t3d{@?a|G<4K(*i8IdGK%ib5xkU98&YWzoAUW$^u5o)=8KN$WT~?tuFf zqxJf!9brEQ8gunHv@=6Yb0%jixKe>xn;FvkXPNe90Upnl{oMiP8#X+zX+ygV16YgN zxWtj#P}c21_AxhE3C%nyI*+Zf*8uza;9zzaIEK9@sabHWd&mpE?`vj;6;2%NYvw|I zjYW>9uzHDSf1>+Ak_2F|EfKaE-Gr#5#quug6j@KoaJ|H^EQ6tB#OxM`|v z@zH(%-vgw?veiUW=zBbDuRj+T+bAztY)&X6pK*Uxt24-jBg*Ng@XV$$;Wp)6pV>me z^ynv?(Pt7b3P~WrM@P9Nd6XDda3Lu(qkmD z=6-Jj9l2vbuDgJFYsXZaNvB^fbczOB?0VK)b;l@=xc;KZwk*t@Ll-8R;zv~TL4vhoHV zbcM%-rM6Fyzh4NE$1!=1J^Qn6&OYXSerqR1x)Grk9eCv z*&OWmi}6%Ui%eIlN&XK5?ex(=v(o?Li`RdpG6rn1a_+dDAh{4abt`JoI`I`PWWj(l zRb3s=aC|x5!oB}fS5tVyjgL-# z{rELOAcnjaRQh0N<99n9sK^cLd~>&`N<`y<6?V@E+j2_f^~e1q3C(dDZjis{hr42U zv7W5i*By>OO53%?PMC)p=P>h45gTkDJ&BuC`aTg;6&D@N2`9CDD%`KFs~?))fThuX z$5IrVEmvAWLEqG<1YYg79gB;6pLEu+#uIVya3)q-PM52*qn91+{bwuc8J%mEo|Upf zan1erJ5(maf9h}X7W_LPhC$_!ehp{>zWL-cukf>nmz<@*54oW+`oHV^`y))vn#1%2 zyH--n_LTuW{brW~a5m|&j07%fgc1{r*K9H`tspjB*8Yi41J@IkTo;_0GUUu$_*WjT zb5%8WFUpo+#o+YloB7^_0oOTy=Ad)h;nJkHX9ufis87MM3;@&T_!twZ(nl!);5h${ zm(fMM8+MYu$|Zunb)zCZ6vmDr0GJ>I;TRgd{QelGWCWP%%gTO;CW~j}eDlTNGYM2h zijUdwH3isavpZw_9{2v~{)wTyT~l^51G~1JKJ}D$1Z4Xj4Yx7t5mAXlU``KD{pD4OZT2}Q?^Rb0rHC{*9t$SEu6u2Fy1S+`9pHE31 zP+8w$l(uJlwZ^pY)w`+fW-DhZTXQQ}V{%Ewd-A!2?BUX03K-aqjXg)41|FT`P|=}& zNZlC2|9llQ9x-8Nb#V@)xSvKASSTZ&>(Z@yQ$qCehxbS=jYS8KH$2@IDq}1NF|3p7 ziKS%dY1(g(5D&g>Qo7U}M?kcVq-gc5s z1;oorckkz0Fr>@2*e5#No;Mxrh)XlviAm;xHEpD3kyYXZA7vqI-X9+ICR%-Ih-Q)7 zF3I5YBeRP0Cz~1F-c02CSl`!yjOL1s5o5iy8c35dqG-RlzFdlijd+85Q}6|du*v=s zq#N7T6AR8vKr*J=B#RSflZ_^8Oj$WCmdbLqL{UAs=^n~ND3uc-S5AK>3C z_Jx*p>;DzsmDnG3JYDY?Q^NttYf;j-!5!)aiTb`mv~rZI=ecMY^UQl(-g{fh3W!B@Aa##b?30yyL%^h8H>SbXN(bD{yh^#Z|$6x)Nr zcHnO{F*H;)H+rah&g-|o&ccl!aSdwLdnwIFjJbkfy%fea-BkyY1?e`XqPl0r4Q5jK)_#97M4XJ4 zt!!%a9|Y!bVf7NPl*EDpovZHO_eIA51O_Ja`BVFaENl8ae(coa`QG~GVB9ZS;7qCT z8TEPMwD-r4IVLSh?47z9qoHxazu&e5wip|KzB_mXRTSL#Zu?*vmB*8{YV$H=b1H zKiWq$YgF3$k#%yHPX0-dl3SAf!x;#9v`D3cr5% z5QZS}PHIpryvxGIA5gY=KYK1YkZaL>()#92Dm$4@Q9ZJvlC}56=Aipd!QGq2joI`jJ%dK8FJ8KiM+HPiJ24~+g8}K78fOKQcIdPjv$&NP>}=9`1wJ|v z#F0>Ik~lyp)%^`L8{IJ4J7iGjnPdgSr#$c?wd|6(xju!_$fRDWg!lp=q#vm36Cvde zh6~$aylGAn0{>PqiA)v5XVku@HmEF3Dr@wU*1?7uN>SWz2x^aXXOEjx*v%@C4GRl| z0(Nnm1VKEcf&EfRy4&TC0Vf+cB?@@TXTM%EVui{*Ha;8O;!Hi=)H8On@GlJwIkqU$oQ?tY(BRhSj$q(#0g-mT!?Zs9IJ}RVjEl(k999 zh|dScR>k!EIlGStIA*4AIeqt&ZJy|;tq--=Vrm+0ZnQgz6x}ExV!~pZk4l}HBc|E$ z#M*pTnBXZ+^k`L7+KHdWMZ)(^$<2|a*k{5jrats7Gjq?au83R~v<}+q?T6(16I!}4 zHdGd!TRq;1Z#QEjvQtIsMd5+6b|SJY zZvS0nkVm-^aq4*;Exon%$n2m_<7-|860Vd39~%F@``vR`wO+|KJi5ES(?iqOB?tDV zI7dz7slE~mmOQU3y{f%JL=Ue3%WC27XWo25n|zmS#%Dao$mS8j;SYL! zJCv6sFT2uCR>#)o>g=*4>Mjeka(fSEz-#`&Z9G`-#8eA-IN`X;XFT%Ch!|;K%)bcr zH#o865~W?j5jqB!^TN@x6flyYNMve>NNT9GhO%KD#jpA4H`64Asudseasnhf|CY(s zN7$i9W%{edMC`#3SmV$JEa``rR< z)bttB9-pL-DZSgpRE+e-@7hl-@S0DVWqy@4@TvT}@=4@Og~Ek&=)T3(uRiNIa=l(t z!mGQ8pJ(=;u(a>b+gMqldC7*`uHy@Ugx|7Ev1fsH@{?V;5WQw88I2oXK6@okw&M2jA~R2+s)x@hwZJko77|HVTge-a#<%6fY3K>2)k@a!uWJGDpU z^NB)CR@1h%c`7U4RK8n3+_v79yRir}FG`{^)Wv}n(+Tj8?W_76hd2vRZ?lYMQr)+i&WheU5|8m-bL~<@{^KIK-i!*kwI( zNp0{8uf7g_&q*O)O9SV~18a-cn?%nnfCIf7Ug|cg`pHTryTG-SrLIRSlq3kJD)dzn zCL1^<4ZH=I&UJ8$3{*aUnohOLc8egsU#V)tyZD1g(0W-%Yu#DP2s)Pc0)gu+60r|F zkUn9oiE#bN!2VXSk&N<|Q|(@&;imX@MJNzn%tnQhJ$Os(2bkD&n3JPLu4>IGh_8cx zEw9RS!%?qEJh7IlSV-Z}bsc-Qm3~y=RqHjV*3={Nf=lr5d0+n$U}oKb2Ot$?pTzk6 z&n9I>Woiya_aWsKwb8^3MZ@??YM!7Gz3eq*d?^a#sS!1$zDDzR~bkM1J@`Onqfklx-I-r4k~|kV+^a z4N6EOU?8DLH%Li$44vk`pcd{q&~+=96xt_n)3<&wgyg9 zmxH~y7-UOPoo$76o}T@(TfRK&@~QH6n2b5;PBc=_N;!v*XIDAONC$leD~@4Q!E=VK zEYOj|sNrMA2;F}p`8wfoacsz}<4V#WK&WGciij5(Zt znwGnPP`s*-gr9A5eF~SPvssB7wq2ysd6?}*<6@)lX+n6HrH}9l4|)sIKvecwlbKpW z2eWTN9)W{a!M(z*suk$*5fe&$kAu>pU)XG{F3gq9MDz1ValV`QDXjOc72uxHo80VD zl=QUh!s2~=L)Ce=ZuEjGaGUnL4U=XVRcza<(|25$cz6S%Z;YqJb>L*utLr8xO*lbm z%XxF6P$0gHPeE6=UXX95yRT`p3TVC9c!8WSo{#6PUAk9$-B(G_UmgK(ZEuD@Xi`l; zyxpUIp-~w1S%7-x>2j zYhX*@KsX(vhPrIrj=!~We*CBDbVjcVq<+(VPrl$6J_2Z_MdL}Du@4xODtOSmU#A~t z64wx=0tqxEtDF_?rxv%@(K=@^b2j^Y{!pFqKIUeZ1m=>Xp%G_5mj%wt_Y!j!zWUv6 zDuQxa+7L=R;kh0mr2vsGiky+^hAe8Y@$REF-+B}avZ#H>{L%Hi*qLP&xw!EVF}$Zw@; zBG7fXuy`-P0)F?kh=smb&B|khZ|en$(KED*kJ^=xIKHmze_8-Ibo$Iu0-yikZ)8h8 z6qdMt>iH)T;?%38Y7#>R#chn?LNvVh(V~K2E-^Ite$-y(YGH*~vL^9)Jb$;=ZC`93tw2%zeQKcD+hPT1vFkWP<(PRw?evYdx--uyX|3&s{{YhIOtIEnhw_8<_m_d` zo9eDV0IMcUQ5 zg(MKDj`Z1$J(;apLl+)RLv!=)EYl}2uVa(D=qx#You?J z78Ye5+1@`n)ujnXTt=tl+pGa3S>HL_D`a7tB|F)-eUga4fX~JDq51kv zA#ax|W*Xa_J5m*5$(YEumpU28qCkF-Df;JYBd`34BBB3ICPZ1*I+>}tQ)%Uqb`jd6Xr+qjoMDWSGEq>qd z;k-Da;rE3N-82|ZA|;W(l=#-SlCu<*rW6ux%22in&9qme7%dvUrUBf6UmjEBRGW#( z=9&Zc*roHMH07g;rY*YR46(^D8%jCw@}10>l}fZC+G^=?vCj7!R?`#|+trXMz`#9+ z-$)s1y)4TW`>O}m0c4t@W@JN$_V6or;2M=wB&~`}plC=9s~MHzI&mQMcO_PiOTKV1 ziJNKHpDWNPcH?;-n5LPdiLRrrmN)~spEiAowRKWbXk>SE;|r&_ZS5h$>vU<#NVDrJ6GRdFtj73q9IZ4`8y;zfWCh{>C1Wy#EgV8moc2zgE*CJ+-vt z-E|v4HH>!)lZ9DOU^pSh%SwAa{Gm(iGFfx(mjm<~4DU=M#+kPpsH@+PL=HL6H7r@l zG>p4`8QF`Pr8f%T92A-nyo9+`3S`dTlZf?`Q*th{9YV4=kY)1XT(Ncd#AyC)t>=tz zeD#CvhPa_sz8w7fPkFHIB=Ee+q5G|2Cow>KJSEYulq%}*LnzJV-Y6{!y>*g0_6v9{ zWa(>O66kqpFkp{*c*Qz(+q5eiw2JKN^m}VQEG*dukxTw$cZai?Jw!!Dq0g#ro%MfE z>y_1l$>+QzxLu1>diA@}T(V@dy#nKg={upLn0-rVcY6}`x)74}b(s$*kq509-Fx^` z?`ldLU5`iHx9(9)2zSImnGnXkBQ%STNmYM5-T2&-mnDbFRTpW{8+ejuOMMY#t(Iw{6cmJ0mt?0;B+C&m<#1dV?swttEL zLgs=-TtBN0^nc};^Y5v=PD$zpnUVfcD4QFQq)Z<4f?y-lC!0XI63Us>aIcB3L^+N< zl}HokJnio4`J1}Tda{Y9roQh4>BQYjFEXkE!DT1yafvgng=i329%9Ti3j z5$=>yz>J)~#dHq0y0FU)4>hZeYPr`_xl~cZh56bYCr(9qe03bo7%LQ-Q|nFn_PXE$ z8CUkwmcLBf#Z?c#0XBm~?OhK5N6_1d z>3&{-QO%4rzcIfSJ_GypguWG^42l^R%Vm|OFr%#gP)T?`%;q4*hcL!f<2f%SrFExm z*$0sYD^@eN;rgnBAHV=BVot4&5>s$#m#E4rT?P3<*6WC^MLP#x#}%Frzc7J^7o`*GI9+ttR!MZk8D`x+JfA zu7PymRidW0$qi(0D%|VNWu)}dbD7zNw(#7q;YM+|VJdvj17>M@4Vx9W>xKnFHuTRF zlY>%surPaT=SH9{A{imlXdOepgYqpLG7oXespH#rDWrz#2t$%~Efz-2MM?4!^OyN& zBiVLH$%T9(?ZSo*4dQ$a()bUur$v^Ie{c(l0=_XcP!L{8T9t5VNJS7lKu zJw9zy=RW>>Whdah8Lt?fFWB&%*w&wy8*CWke|^*9y(NCDKVk(Re#TfhehL5c8ZU$> z@FfJQmnUG`(vP628^jZJ+aSH)jqY-o3rd*`)hoJXf`0E4fROSR3s9G z>Nn=p;rG5wvgT1MLk>yomZ#$I?maU>g4a3_Hr>?4cSAL)m+_+Z&ge4sm|&ymV5_Z! zgl^`sMAAsIfbL0kOzp!W8|x%xO^Gc`uz}_`U+X-2+rEPRfv*))T@_`JrnpNC^G8!{ z@GBXZYNc6G$3o4OqbgTq(3&y6zUdk&=ta8b(_0LV0Rw4&i;zk-EN3vP?*lP@FPwY| z6Ey|xez4FO6#;j&or^N0x_GLm;v2U7=kwE1s%9n{iPyC~8>0} zwE}zih?}iJC1LJA)ZhbFc4kJuQ)`vW11ZSG5?3KH6rcapRsn|lXgaT-1 zkdS-QHvi9eu&x`WPp=ikMO|v#?>kS3}7kd(99FiEY z#4KnT%ivC=QGL$G63af^hCQxjuZK1Fe_!kWv5^zgU@(5xtd4R&LdXT+v)R*I-H zqAjyNGm(w5?R$!4>2i-=xe~&ciSl7zAJ47QSHN=I9Psczg?Wb8BKKzLl#B@AAC{j! zG{RgWFO|SjiIceaB-`)~ZRZiIS2e|}Qi(5RY{$I__=DlFP~X5DtBiCSo1Xyo5cE3k zHO7e47ZSksUIzV2YOjoLCIgt0mpWP2!pfT=agExb#hxbgrPONrXbR!kXSum&ZlE+7 zlv1AS#o*5|BdDV_HZurY=U;xb&8xlD{_RIQs&cs>^AkGlBGJib48 zyw_dt+-VLJ|9<4q%Z1Eetb~O#i9)^GU^*uSkPj7pp`h3x)hn?Y%CFe`eTFjSfzIEh zFYy9p=p(;xXBhA$Rdjsb&E)TRM8<3$)g-FR7hr!W-Gn@@yS;S{k|i5|lsddQze&O= ziQrdd@uL)=(1PL-j(9{o_>>9vtGDpk;Sl!E1m)YSfLUn;iqRLlSN-jwNWZ{za!e4e z#Y){=jOliE#%lGe(Kysx`@9jqY|eW0%LAfmWxR)(SR!UF-8T=l z%Gn(=D(Md$+D5&4V#a*=#(fHE+{1F}x>Q5W{Q4j8HhYr#Wif9ne6={*p z%&VaHM#|lL!5(wAATVeBmtu{#=B{prcTc{xbboC`8A%#)cd}tUn*Hyqcy0N$ZUPF& zuJ=?>1H9bOu#H?PIt80WiQsYv(?1jI$$6p@t6>Mi*64@ljJaq989e!%R!sddkjzI9 zq)rv#xI>}qV4{U>sYMyAj4HTE#+b*y>6^@+aWip-kflm89g-%Z#1&f^d$w7ls6)x) z17_p_*$#({vz7#Ga;c}9cF_XU9hg2cYc}-1I^BW=UsS2gJbpa3DL>R+Fy33?c4@mo zc|ya5)-Bn*CWLc`$_bmzytXyh5&u>B!7DuPv5*V&y$${bp~-vI07_Ky1^lB`?RRRNBeI=%|pGsxm|L>lim7npvvMJo&8 zb*t33Ko%rW1LV-a+jjXG!6m^2nlx_w1Rq2`J0dwCGV}0uq{Q%CbseYKl!s$t{Sq)|9}oFY`7VcPL*1_^()%*!zbH*pe_e8k zNV)jI(lY@mwEU?)ZDRzad02Ka&%04b#a~=ZeNlJ%{r#C`)_3=s=VwAKODCb9gf&yc zWl{-#5W;6_lpj%dn{wGjjhtUgrsd^FA@^>`NYWvAQ(!X{hMi9vjDoECGgtr>^Sc=t zg!Mus5d1{ix?7o&I;5?i|98X>=DBNN*BM^;qB2%LJQz z)s($V-t0; zW+fU7STOt9Sx7;L@EdpN03lo=mQUh}3OX;vyM-BEpy+6B zNJ(d%YujsI545wF=@+ePx7$o1a%C{=`D2ARTWjSJ?M7gPOe71k&K&WsJlKpm(kMkL z6_sxMEUAbAliX*1Uw~;g_DiACr&H>Rwqq*XvEoHdE@|=h^_f9O7}8CCO^(P0*3Nf) zHnEFY0^p-YhBw&e2(TTNW}QSDJe&-r!)2*rq#4f(p#^(4ayGkZ&c89J>79NBL{A1zS1?8_+3B7 zG?qwa$3-#%+@3{)DfV>!y$_biIb1E)wD&=fEvM5Zo5)qrtClPm(906NulgSA-aD9n z5d3z3eFvX4$F3r>10O!N>?=%-KIf_B)ed;>`Z{th@ST>ejw0%m1=&TZ_yqgSzMbN4 zWihhgFlI+ldl+a+Z@cuS4STs@y~7q@UhJtCr8q1 zPK}h~KeICCt(T%897Qvz1#GgQiljJmuzVInB5VGKHAfN@#PT7m_o;0YIrae+GaL8q6%bF2S+_zNR1K6ro9W8#uT3Kx0rC`frya#t1ZB>eAAEr)lDLW>cC&r{MK-zhR>>M^xAxdn3#C$iO=7wbDMW{xju? zs?V6t>r|I_Ndsa6dpQ>lb^4NDw;fqKe3B35LGX%~8Ehz;78v&FRG#b>M1|J85v-Eq z8=42pc350g64$XT$V}74#bsq~gB+?fPc&##ObV8Tm;aHJZeMcEvmf6ShwZI@t`jyb{CS{B@yw3Yc>ge_JIY#U{JUDH zdvYFMuTcm1)$I(v{`sd&iT&5620G@+ReiGmmZbZhla24XfPgoEUOEeBm3A&2JR>jMvXBqlvxyUF=;OW6m>XVu{>w#KpdxI3ucH>{F<>chbJq$82GwYk~Fy zL+fZ}=OtpJFkbA9kPN@Eo;@U>kRHvZEC=(O{khF>T9Pm5m%7;Q%;yUk0vj9jYnYZz z3_s!WLhArM)G6;3A@c?Qhwjvt{V(p*tM8NOWpCAoutxodQiIRHE$mEi`!`CX>Hb?G zGL9IT7IAVL+a~coha-yN$SJcQ#ShkM>zRcA`Zpd-Eej0VglJKW99lKacJUcJ_Q9fA zoWad=vr-9}vl_Z*QWGwozf2o=W1k?(uDH8R#4x5P_H*ux1C0K@a)!|=vCL63-8O!0 z&Yd*K+L{40s14e9BMW+@ppWh`vx=DQAl7emy|~@v&3o{0X8zwPse^g-^Y!0e@Nmc3PUqzee+_MqcGd~lvkHiUc zYO#t)UkTKGmM8mkT6FsGhUVV;RP%+#Z>6|AMFZs`rp`vqmF$RFsO8DVyBO+QN$k?> zLNnoZh2oquO$pY3?d!piG1Uci*tB!k%zDivmXiB<32nS!3UYZAskCViJ}dR4X<&9N ztq_~Kf(`aCiLJDds_vgR+$(^}A1T7CkL%xZAd|9hz&NG7%S2+m~P zrjvVj+X^%_xdH>JRR{^-KT-@nn@Zl+uz?q^faeDL_eL8PpUB~TjSo2N`Tp~f@l~0B zNEKIE^kG@+lXldPr>)1|uAW1WRi{FDc8opo)pCYkv{6EL59W%--LU(GsM~WXWN}`X z!Z+o8&)^lv3!g)56&%(!$~+Ssd|y_yZsg{B{acYPev{`_tMz(jhr}X%pI3)_R%VT2 z=N~@~W5Qwo_3+Q9_!ZT!Cx&mzFvoibInZ4Fs555&LpdT0Kplz?=a|HR@HVB+7@`}R z#`$4=8yB?8dxl^zP-Ma=L2$IgWBVmhvs{~H-jm!RIBm%(5U8|~Vb{RKS~xldOE)p% z^%%@}EbZ$zJ+;&=^G>z4zFdUglMTJUkU3n;A41Q?DfCjh9^cRtZ>lmabE6$Oq*V=d zpTep?sOds`RJK)(vY{jUbD zFnEmS7;Is2wpmD%tk>K58}+ZUAxC<%FtqLJ$)7yrsFbnauPy9rLlYQPRM@D^ry%iO zV?E2QvHH0YqFH?x;=5iy$Xuf5Y`A9Y>e!%u`>z@QsZdz>DgW;6+=a=pz(P#RNXq4)c$MuvY zAHeG!koZE65E*7Cx6Cx0oeLbw8D+DYNZCMCky2%Rre0J$?{x3R1W0b23wj2 z(aXt$Phc)G+joB0Yvj=+@k}p*f^DrPwWjj$w823TzvPcAlcC^1+x&} za{Qw&qw0RtC1uxUEP+V-DsU9joe!Y1q|sUHoIC=YU!}kh6t*I}^2$Dcz0MRI#$E)m z@LWe&FFGS=?e#N87_E@SyV8J~bYnBT6Omx|lW51|Gmw4N zWfOWOo11af_vE;_@$#U>X<9=o)%!$0qN$X9%*@A1vK=rGDcaBHailIk&5~Cr=>7p% zl-bxw=J@+{EnAX3^c&Jjvt(|^m$K?!;5q#ck!i~IpJ#rceeV)om;UR~%4t!u>#I6) z8Y={-;_D1Kzc%2%y&D*K7yd9d;QgZ|{DLyW$v0&muqd_s*!Ky>^3~E=@1BLk4vY|x z;t|}(z4twh%uxbTzMc2Q`i`P-OhESYE@A`L4&G0HIMEXk4P{MK`HGL2@cr(`?^`U- zM9YsaE!^ynq$e`-$@Y;wC4`d|knUP_Ta=WBsk^T-s=|M(>Yx4^vWl2iLhX+zd^H25 zxO@)bKNe}Sl;5&uz~H&Id7?&h0urN>jMTERPStCTrp;*EnN5C3?R}JE^(`Are@tDa z)C4$BWU7!Q-D?3o%3A;J<|ljBRrig2oafb^*R4OivE`56xJR2Ce-MvJ^i_u>4Z+k3 zuPMImbLpZ5i-aZtH5GVqvTZ5YFf)qI5jQDYB3iw2_%g~6fmaRtc(l>KfpMwOGyX!2 zCs$c<9KTKn-*xa=hqn&GcO;}W2lamYLE*}-Lz|=HWY%d<=LLCozx$;Ag>FfPLQXY7 zg7>wK)wSBs%a-chukPTC+`O;$^Qa`lO=E>cEi541VGpbQDfUb#gl$dn{l~BvML}Ao zg9fH9&&Dg|OdNZUq-HOLRbO*Ac0Bs;-X_jg8pBgwzE4K&p1eky>v5h=&|?8H!7|4YzwV`LY(r^tffv8{HHK_QOA-C`%I->s$lCbHN!GH9Gda_)tB+07Z%kCn>Y^^ zBN8HuYs)2f3*w{PYU>0gfgS7!r0iOjYH}hu5<>#o?hd34=sGRhd?$oU_CA{Isd(g< ztM9EEFuMn4=0$?&Uz5QEPEpXajCjWF@#LP1{d|tWvN>J_;BdFScLZn_n-BH2Tw#5O zxfplsg|m7F@nT1H&9({5rkqvYc!!MR6kRj!6BWFg>p}cQfrVci@1-P^SX2CMIeq~_ zBim@%VUYojI21?477yy)A>jGvi>1K!Mgu9)!p(bttX+K--O@)=FW?6u9G%`?7c;VS zl&o)SV~mLxuC3ZnK#HT1%isU&mJ08KXO-~i-~ zGV#{R4qi2J;e4z7loLA6D7L()j@o?u%45g(^#GOW*P_>7i$Nu&LAur_6ag15xW$#} z(EZ{Ip7Cg>6=Fyr=I$NsYv6ykks3PXRj0zzctOU&9QX!vcq`3`4ZZ|B=^0MpU3jn_ z-pxp?W$qScnA|;1k$^*-7)USqP-z4sJ7e@L2b-$G|Fi%`epHiL?U2WvL7iXMyPR9d zppY-xa~eC!;?AQgGq}0YJo=km71`zfK7#wOs*nB8qm4fJbC%rvS1SFd-P=PfE!}!| z&}DppTmM#kh=>5bJl(CEq+K$;R5X9`=OKVT-W(ghTEL|60I>(-49F3T(zE7u7hE-P zpYPRf87EJXux(v`!}B!>n9=Wh~FNVEGD~X@#kG~4z%$$0kU1|s%s*poV_6gPe6mkR!=EiSJPOEgiFh9$D zKkb1-32n=nl$+X@nQ;65^I7G0O>wgi1TBa$lBEx+hmx^r#)hjxe5bVJ&=c~)(tS~0 z)5lH}(8?A!qsykgiZ@0ngR1|~h9rPC2%hlPR;vA6kJQ04G5GV40rLJ{Z|}1EA~9xE zoX)obl+qp*uZ5saa!Bp9-)5p#lpC>8j;CSyoc?>wZXWo7zUa9*nKA_&;XZxa-iPy# z^DDE`-!|nQ|c&a1!M@BG{LZ9U}%aLS);S zzLu2-qnCn~`(Nx%w#P)y>4t{f3rBsj%Smb^?>~P0IMu8sUbs+mFfrd7qjX<5s*wsD zgFMc5dM?~fN^EFqQyl~94nxb^W)vl2T--(;Wm71P;=dRk0@MS-pdK(k$d>D}`Vx6K z(%te|P>BcaeB=xgRY6NGG`!A9F~kJnEeA9AMEcr<%B)a0iS(0NL|lb!a)eEH`CPB8 z*0mO{)$AoHDtR-TIs`$5EEM7>n-t02O1+=L67z3`b7S)^IiJvOvn<(ShE38p9M4Qw zGkRcB&@d%Pn=bdRQYM&^`lPh^=T|l(dXLCF%D*FC3Ea?2IrV-`R_utf)-%r9iOmZP zasyN>0%q9R`>929=eR3-?yL|07MV+nd{IdiynzYS+cJP1K5%FFW}$0)F+C^!wFVpo zPi(0V{iV_`qPAL|ADrlPRg@>>DtEvlvF=L#79$&eD#^y^N|}T@ICnbovdmq{R^Orl zD%ndPzqpLq-N7f06Hb10*Rw`xPyQ{#uOBeW@O;t{0=;mq-7QL|1FQw73LQ4e}5O*;xFMt<09uKh7e=knhO|{G#;AU~*?Wv00 zeC@daAQS3&8m+b{xO=gKKm}?CK$Eit@P|cpjV=y!I^m)K;>7~5Og46Q>D2eD#`Dc z|7K7%KrZ+2rlTCw0j{ieq(#LY_6%X#zn6a~mYjy%tnhsSVc`KkE+8j>NA{f6)s1As z>5Jf>jTatk#P&MqqU!b?qmg?a^+S8MiXY85%Z^GUw5pt`F#4!K%%wV95ZZzqQb-jr z>!aaH0wu{hCJD3E>tr~H4dPZ>a=)EhXV!k+;LUKJN@`2}!Iw__V6=0M8rz;?Vz$%Mn}_|F8FCvJA6!0|u8^g$ zi^Ms_-!f=k9I==={#%%A*hRpeVcfH$FH)Y^qwGcoo}eerMt`hGy6P@-fSHCnK9@cD zfWhB5M?s`t0r}9=06CKhA{nwrTP?_L?^;h*$B9(SwF}7IzM(Oxkei}msNTq2?h>V+ zl=m|Oj?JZ<>LKM>paB`%_x7FJ#Sz;{P&C*WrBnQ2;Zp-!Enjyyvg1*8;7Kc?f`-o! z>$94&KjHD`L{~?nx%JS~sK;t;`Og0eL$d*+~Ez{ci6B{6ZCGh>j=W%9}( zt};&!JWR@D-{2TwB|4pD3Op46)imb+kmD}9lUhm@r(^5Pz$&VzHCFv(>W0Ky0_1gm z9iNE7Tw}|;9Y!_PanA;(lv_xHNcK8exdz6wfVvr7u8qWRNdt#FRo|Lj&94%`cXl-7_F;BpySfI z)9;&~xDqJEH8pO!P0Qg;lm}WL*=&_S@d!SXDMhaW>=g}3Ia`SL+}wpTXT1-F@KQc? zsqkmtYw>+$9Y}}A+^YK|;pUqG(oKe|jS+?qo21+MddS8L=*fkTJdTp$v@2 zl89)*#%X+hpOJ}9XkUmP5QC`w7pxTOl&4+4tJ;VDYMD6W$Y<2%Te8Q=`K{5~tR@vl z7Tl8e8w*=%-}BIY{8^6^(Li{qt3i!f;oKOPmKX-%`QPWZ4bDaIsx6bg)mEOp7|e(_ zrs^(b%0&ZJX8p3e-bpH`aQwpW?Dm~2qS`LD(ur|xV0uw0rW;n$0IXCj9w27ygpFeO z1#Zvd_IRXt0iX2aazcV}A}kRrfq6^T{$R}16fp1h{R=($*U9P1*FejYS6EmWDUt^e zMq1EA3$Ms(w+F9&%G0u-Qtapn-mh&v4Z>f@jMFw-T&^0>)Z568U5*wpg5o@u#W747 zoNDFk;w!T^sJL^o7?1zcRNEbvh};eRi~FgpIJS<7oNWyi#}d3MN;;L=+@!*VBNCnf zkzJe4c|hCUXbz8kki;13DG|XrCKdhqu@kCnBPH)& zC{#Mr9pE4Ivi->S4e`h(IYktmAyTwM=qPa9izWDvTxqvwhee5>hIHi)r(t}3$Y>A0;hdZf`i`c<7yW;I12HH93K7Nn|2@i^0a%a^ zbpe7{IU2$Ex1Zurx>OrTTC&n`L>k#_az(D1cw*n#U~@5X0s+JVT@LM*5-_7?r%k6q zQYhyre+5RROra`D4+qQFSK#X|m8vm-)gIYrrJ`)NQjD`RYX>Da6VzWW+}s+A=B(wf z--so?A4jc#WdZ{0_+^5Utj+3dE z`JL8*8|*?yjwc3xC7Zle#G>bH7hlX7UoIcP?)9QUg9UkN(9bR5w5Wsn*{1cKG+$}m z6}?>kMMMC~ir?+MF(Dd6U7(-y_z6ylT{ElZ!aJ?T{C;eeTY2RqpWA5dR(+#xRfye+ z{&Ep^gHaJ#?v9u}-07em;)#?#sJqo_MY)(QMY}pPL+dfSi|RKJdx>J~p;uABK)5ml z7%l``{=Y+&Ma>)MWNSp|0AklF-FLE{7va{#5t%+Aq!q)$uTS7uH50>gi_!ICxJX(i ziZK?k&TBC0tpC&cb~Sa6M2Hy#R`lx~4}Me5#LP?x0LKP=?Kb{^|EyeV9cd?`V08V1 zLd6-j>Z|GI$J~Dza}thGsNm)QI&p!mF2Tmy6AH}$fcH|#IMZ#W6LQEVtR8wFGpO&^^OlV&c{UN zuy*WN-CAh(Wb`m{qo!NFBi~hiu1)viFxV*A$;B1VPO*ph8E+&3TogyCyb0IBO&%fs zLi9Nk9UH&k<69ybPE3jgTTFEB9mKeJrIW^IvSjA;ohwe2h=~oy&H@9naK)|bB#jNaNEO6*@qB51kMM-Zia3e68`MZIMA8# z@qnTtwi-t}n2}y_Fqm^zFj}IB(|V8Ns~T$4FQsLi1JOX|qv1v!c#L6Y#GuGfdO2Vk z;9+nX7I?rTs)ZUE$#4k*`s_*kvHku1$S9-#?`gjY7nV15n(BMTTHP}^ckof@2>$m9 zqPpr$&B9~EayQ)k{TxtL&g+ri%7BWbu_!U^-sr2VPDctnObV)5!(DP6urA(DiPj!Z z3YBvJUpzfCmpqa7ZS@qOM!YX63;CDe31#T!xGmHMdN5BguBOG857F?Dk5!{N8;@gw z!E-Ipfcs~4OKlp+jMHE6IQoCTkF@Y`s*=E$|pyZ5V(#_n_psY=HDZFPj0 z@~kqM;`JODWB>=I62b#6bof;LofGP=@*>8H|Iai5fE6MHEr1Da~r4 zZb3gm{hul5-!I2xYbUF0AW1DM6G|_;<9en?LlbFda|%vi!WIGzM$C%dVFf3*A6`*6 zW(5N-J_HNmsH>+J?enBuB^2XVt+a*5l=#{}2^DyX zmB0u!0ZPdSb{5QtbI_f1V@l?q6H80B>mioxKxkhpo-ofu&Yx%UNM4WJ>iC&){5~}1 zvG6Lbx}n#dLuvneri?W{t|!A^&+ns0&y#xEEmuEyt*qlOCx{6jh*`Pnt_k}E+wh6j zsiQ_LK?*M$W0iZHXC39EI6;=rWYA^rTK1nudqR94v>F&J8k&$c@8evt`-JTA!EZEq z26RU-V>S`jmPWa|JqJ5_-)pmkc{6b3S$KoSWgq6?{7S38sq&=dw*cr0Z)!dJchYZS zc#$T*{4|WzvZIpC*zEnA--QoIVX0;H3xkcm^-0d%bRX#eS)e9}Luo8luBO%u>@gj% z&0p`9pJU=zK#osHI8j<=P6!pWjL37@0oReN7@utC0rpDC?@yhVLOvTI#3hwL`Sn}y z^9p0%QNSU~QpzVjOzpY06!2mt%)9^XGYxj^h%dR>a^fub&!7vD+M ztl2AQD!5@w*nutA_k=B7r75G#h-RR%1Xk16__NhR!v>u62%v2{MDF|4>0>s=!z`>` z8bW@+77GJK*#{8ZynxRHu4TP<&-{8t6sah|RiTXo1IJ_+Wje^P!GD$c#4XO=H=lN4j4tkITdZtHR4gq%>ZW2WGRj?a{U(kf+xD(W(I9Va;s9`E7XP{m@D=0I;I`+&94*GMRRBWr zRUj`*jIIWUu|1{L^arSXO6%IAPg3^?)6Us@>Fh_>Jm~Febo$jqaoGByuV3?+{}f{tbVQyO1d$`m=VG`7VjmuGnX@ z+Q|^lxtN)=to6gQUq4g34%@d^A}`_u!hoF8{(i*suT-auLOztxNx4EdOIriT_FytL z4j5yEi&9ybc9~#=b&s}^E2}K5i+t~#>(c2uw9XWOir<(5qPlsuh?W_XII4m8%nG5` z*?#f$=>4>JM~mr~a{ZWQ5V-5XIr2p^-}~P=!q48(t9`J(Eyq*lr>CnMr9fh4;*3ak z5=g|<=Q)&kjUUZ>3w+D7tT&%ldGs@SJffCt67Uie);VgVEYIRJ{_Y)TU$`sx$?M;9 z1ZF15iy!evKoygIft7vLBv9}5_w~2ULojuK0#Id#EXAlM$rw3-L&TDH#@YRqy>#2^ z@TS?a%emY^!+r9=_pBY~xXsqvSE(%ok#@kKF`x8zUbEA$?X#Ga9dJT4g0K_8ySUY< zFHmLmT+dH=La<<+?|dJbuIdo^9QcrK`JFIdJI|gvrdhC1vyn9$-aT?O35Rh*U0_E2 zkxV}usK8T=iMWW*;$vUN&!6w|n4o8G<0lZgjm2)hKL1Ch2Or)pL>`*q$GO#H7AOi^ zbu|zIFZwxZ-&XsNRtj~$AD$xvB+eDzZ1TALMVEuXdF1FnP^puR3E@%cBeVIRwBiS5 zO%271Prh9hCG4_K#5SG*XyjktQ>e-G$1UZzXso+5ovFYrvlNXcA9-SC&Z8K4KfDTl&& zUsyvHntMk)D2t!{5ppUagIZ4XyU?Ys-gExosiG2PMEJvZb}eNKkC_F%--r}S0!PZN zs)o0<7@#B}0|2d`ST#a0Ffw>!OdsiMREfAYb4n!?j8Wv@Vvf9aazjlq)+HQ1NX)9| z2J!fa5RX%nw!Re_kw?gLo3;is;?eMj=T>K{@QSC|NqV@KGr|kR^;^8&0+mcRKnl2Q zRuMig?xZKFYh$JF4oVFSLOlwFOxhzb1Xt{ z=+RXn$)+W`egwF0c^8+O@cm35IHdV+sB-_8To!(OJkgt&#ibZL0-?w-6fAm`YWm)y zclpJXl&HaXpY-)A~-SP@c~q;tovtjsE1gotru1pmqv z&nUR|W(=Z)58f#o?;jeo;4M!_-5qp8g`~?tW|T-kSr%a0ZCr<+GbS0W3cFq z5YuY>gnWCzC3`_~AA98%#K4sd@L+x}s>nY@#bzQp;}bFGSiQz(g{B{-+%!M+zwSVg z94;@>WcYzQOmH%2Y$d&^e8%lB@sH`;YWW;4>=;}IJ`gm>cw$>OX5sUe8IgQ`h|vq1 zp1nbC!Y{ggMF9h6D1OHYUAgYy(RGONE%n{Cu`ZRO2+VyR5M(%?qFX*^#S=vnZyJ<6mCEQtML1L0DouOV6!s?kf9NE(*_PAvM ziCl6=qDh@&AOe99E*w)y)!2x7^*uRqfHI&aKKS$3ri{1W4N2GBMx#lha;-&lkcJ!E z=d1qki;Q!oLt2~gWS;Be8Zbo-ALG;!W1ppR1zum>1tTEIEZTC<4?Tbzx)$6a12t_& z)f%VxB>DCVI2Xa{9Sw!wITUV$oU55RNLN62xGAJB7}zQ%TvK!R(=8KC2-RjFzDTL z$6i+|jfgvS!sA@EEeaDSn5y-Ugs*thrb&4bKVHU%GpevE@-|?zF<$Ep6Ys@^J<5?p zlK!ED>a7I_>`iyN8-+;)FnR(ZOLvJXwWu#7aO!Hg7TVj58~ z6#e_pvZF$@Z?|MYakl!cv3U?4GWIM~ESPP9`wG;kSh!kFm@UGD)5h8XqdfTed9AaT zX74Nxe$wRp%b@%TV59#p!U=pRUaHtzAlkDV+$D)sJ6FK@7L}7e79;|)5Y~UGt4f-0 z5zW&Jm2$i@Xj(|gmIz^q$<+IJ>%@fn5Cbzz^(W;?&TUco@Xx@wc*LJM-*+{iPH0rAH`S{+UbHq}mE9{4DPkxmO zLgb}eol$hgJ=#c~J1J3r4rBUFS^M(bWQFqXkls%dsFvz^>R}iocGw=)Y;3rK*2}EY zHi{lXYz74}hSkm}a zu6E>F*+_p-f>GdiQ!+Grm9uG$=@eKU?R9r*4pjsUL94h}WCP3&8j1P*&ImHJDX|S! zZkZ&XSzk|9DCw|c3Ne{pQc#u&801F$huFWPSi^}lur(RxPHz&NauQFgQ!s>+3_Kx5ykN(S)*|R6iKQ;{+BNVW_?it zRE_)fi;m!0bN}+H962ap4)P0y(j`itk?g%-x|E0ZRT*PCz|0S3{Yy+C04Qbvoe(y9lx;pmF z0G2fR`rHvwE^hYDk2t3FNb^;?7{!VB!pGZBr~38)YjrOC<^Z!%_;UbAitXRk zGOmZCpMo$YGHNd!QqLMPI3F1qv8a0X;{WZMW&&-k5=tG8%5mpWwmscC_M>2Gf7hD# z+xVDnyBVu0=n%xXM7c4UomX?)aDIcQ`aga8;euDB2!vw&Y;S*inXAyzw^nH5(QH$X zvkz>5=G+c0heq{MKIk=6ml?*>@C&FzEf{u2*JophHHQn;ULlgh)8G6(PKLkJ|GwzN z;nDgLDPCI+M=a5;hUeoxp7U%mGOpwC4}oO);oXz;qo)%fkzHeTl?XQOm^(0#JnyOK zwdskyBS9Nk(Mq<}&?if@u}~aoH6Ens!OJUsL^`FIPDXOlkFxVnH2oE!x7Odn7&KE! zO&rM+?FIZGdtKQ6w*r(XR?(;VTvqov4*nE}hob&$%p!YuEb~XR%}0U80`JO3&s<-G zcWJ!)xpW}VIZ*AFoj9Q`l8x*4a#-=nD#`-0ku9~aPqGJh!~o9*j-fvo$C&@XuEc*0 zUNw1TB#$p*ZT$|!fW{-XrMHd|BQZ5(&oyB%lxFCT4E+G*-PDV%QvW8XTPcTI1Z`zHu>i}G7frd@$-g*BrpHT z3I9L8cNN1nx1xQW{7;%3x3=EclOt5?Z9Me6_I~~!y1oJ`%5QsHMM0!QL>fe-yL&)E zx{)sF?g0czX({O(Lb^-3I|b=xfMGyNU`Ap9;d{~h`@Z|X_y4W+E*30cy=V5>d!K!F zJWuLJwt-K@>V;|5UHzmtw*-ZQ+FdU173ybzJ*+Xn9e(eRsd5g<3Nt;VQR*#%&vna> zi+u7Ea~0kVY_d)s%F##cfQ-xdvN|N5V|EO&KJT==hVLy%=oL_<_<;@+of6SAC7x8CZ{^D9lf-pXV} z>P?0-K5MY9tm!bRB zGuQdbdOJsSG3@eZD?SqR{;u1#2u(pqbd=vxn}(f7HzRBuJpn9%x3vFDzO4PaUOW$X zs9tmQau;pcujg<*7;Ei~=|iN>zL2%8K7Zs3@SaM``B~L&bNahp6WAoD4+(V68DG*? zW$OF8)=$pr7zBqa&#~8E2876n#!;}ohed54?_%N6EMD`P7&9*`$wR+uV`~|G`C%Ks z{h7x=3k`E1`0%@&FzfP?*`+KpYj@9fM*CJLYk<1n%*me<^o2_7{lWq$y6do~hgNPo5V$GcR_e9;4%ut1RPucKPB@bXPwav=;F8iJ?Io z{>MFATYS9S^DX2}j^{OhQTW~CDni2)uI=@J}oKrU}BlXYNcj zOK%zfuRQ`G;FqHvGkoG(5CPnspBgaq^gAom+&Y|9Na)v^1E;+S0SC8zx9^psAI_=9 zFIhsB9BHG8W)oCQE3}N5Y)D}Y=lw&Dh`BL=m~(YitAQ~BU8^Ro?`P#!r-3o z=hsY;#3KfDFhimEd+(_q9{@>8zD3Jc<2mjx_B=8vikq)^?ulV@YqlE`cRK4!ir@i_ zvAJ${^-FTv1H%Yj1>NH`z2uUriswSX9lPFn=pPO@!01kTF?iiH0PO9JlFsK`8Ia(lDsoHs3*nDy{iW~Vsxu&q+(FnaoYLRd-2#vhYdl|V)f z4}4EjIC$U&$53)v?)}U znH2sx%>M?$w^^{a_tAA9PIg-sdnuv9#G%00#P8KJ(o^q{{ung^4PRR7wfnyuBFz@f zo*gT#&$;S1;xGOx;9$4v?6O>qYF@-oIc)%7=Yl&(=Z2r+#yeOC87(CR->W42c)!x51~~e&F{X2zLI~&rT7*3~c1rF3pS1o)4vzzT35czzWw5e-|73zN z+W((cL+9mQ`>Gkb^o@)bhXLCx-%mP2&z?V#;2}M1@{6=}r2Vtj02^wcX-(Hl?gXeG z{Aqf zW^F80f(B^jvZDK)_+Ee>nC$H?Jdn8WK`2domDKEak(CvZM`cny%O?ULx)77ZY?NJI z2Nhl;9l!R9W>-8bHoI$V*zI_8y!R&=4@{;4J8`%9(|@mx0q_D?ubw!3I^>1}$004p z_gj3Dg%wXyF*(JzsKhvbc`UuqZpW6P*{C^V!%r#R(en^ z9%Yojj|cR{0>?0%9uVGDq{hcTv5Pn+W{|O!iz|6yP!kjLKhg}adCBqp_2*}x(qkr7 zNGOoiZ->f%Q-nzuhck6d;ybN}P_Dh4>SkY|livOQY&wTTeq(-!$TUDb)VdNBV;!>` zb}?-?-Pn%*L0##74y+6FcgF6f?iFJ1XSp1Fw+Y#tB59^4&AzI(I!uNj9|EYBpQ zr9)Azi$uP1T{5?GB>(w{JKqO@SOVpl4Pm>2{&*O!x~AvX&BDfo26*CYQqG5vIg>8S zO6sJPz{Yqkpd!=g{Mf#CgL0snR8XNe~UUDc_}O4{N=24N6|$?Yg)q>gyLetKf8-J@5%Rh7b&GWTRmo#4wJU z)gzr#ZH&#lDIm5JEjuZATi)?2BrY0g+Dn*B2ex&`SRFzlbQ|lX&6LUfyG&1b-TF<9j^HcNQko9)+ zy{9%1Z8?GzIiKtYyPW8kWHLyDu=8i;=febDw^H^Q@0`)fo*!Vb0&w4ep#&-eX`;2W zfWH$JFFuFf@yJqJGXQWBoPm!0HDJ7__ZIY%#Rt^{17Ey+BKnW@{JXFKUz#082Bb{l zKOS;eIO6EG^*NvGG~}+F9WN5_ua_s^1PJjz0Dq<43VyiW?P2iz*IKZ@Wrjbm*nKdj}`>0)YYjJ+XJtJlU;^LhaJEpLc+Bx+Xb<6u6TAL9(OxLQ!h* zCx=o;uoi*gk%M?(3P2wZ{Mwm{nyIWiO`LJ81MUWYr|ABM?*9tlG4CrxYSD9SRO0)Z zMrqsULaHu$(G=|F`LmkQr?PICzr6A%z%&nfRA#wBQ4mB-|eAwzkOT~ z^wCc`4aM8TPEHEY_8}D}nm^JAP(< zK9%llH&8!UY{D0)R@JtzHK@8O_(1&x^SJxBK-`x44BLkd{)9ByRQJn@ z_=OVc}#sDWbI*LP&u zE2VGb!IXmQ@6zdL6x=j8 zWM%-^{>CYtH|RiT*1N6aBuKH?LHl5=wUUrnv=HRD?Y@mK2x$ zavxR`!u#2xpim-M?`FGA)X{ZIO{2k4D|Gkg8E9NVW3KHiU8%jY7@4B?fasAru2DcOtf50A zY@4I9Y|d$8J#^z3m@BKaoA-Cr_oV%4rI5|R;jfun7lmA9_PrDqV};zl-VDDA#vk$d zFdbNlKPC%tfWw&4xPU>GopUeUyLf!xozX3#&t@j?gZ#!WIT3rdoKGDyUppB| zJGPf+)M{TuLRnr<}ljCKC5py1(Pm%QcS(GiD~l+=>}c8urEmiqr8xW7NP;TQ4R zTJ=3BBiD@(rVBYnsDq-U+#X6a5?Hpu>WHnDg9F~%F+CCX{Qwh%I%>&lyydg+HpYYE z4^X;dUmpxJAD@h{4HTF#>l}}8`}T;`!93bs2ai*4m1zG7phWJTLOg#oC_v{$XJR|; z7>I(RZ^1Gv3c#0mMx^i5)rR)1+ovHpEx(8&@{jWU+4Wm3{JZ^F_m&Mx3F{v7>oN2O z7U75xkNYm^5Zm4@J*Ln2u$#`I4UM!BaXx(CnkWS$?D$>Aa-vHR=u991GbJw5r~C{>cshgeXS)yVg6m*gXiC^c;f6c0rL3wB8Q&i^7USz zAK8I%gdHPD zXuFNrAu5-A?%>=YZ^5t}_X~*c(_ICJ9T8pI9F0%@Md2yYP|bvSUGTzjiwyUo$*sx7B?0P%t<-2Ep+M%uM?Z*9&HvLDZWs6+E#P(JMenl& z49#3lC$W9-+Eg`C-6+VSHUz4F>ZyOzR+$61fNHlx;w!^}wM{wbhw)AO4g^1L=8sou z9YPZKRC5wqb;Ki0uoKepNz3VM-c+3TP4kVrpN~DRGU5i@K>-GF13c~~FkALo$r zQhdOnTD$jlW^_tPm{o8tIT{)e5MXv`xr+Q>7s=sz55IAvHd@o1_GbysMiFc^dPbNk z=$i*L0BU&pVEO&=9Oq+RRlAkFX+WX2T8TCkg?wT69ut`)=!)la?v!OQIIO@0Ggr!+ ziC#oSPyI21HN#u6k^LjK&hOtJ@16{m=QYj%BW2J?pcQ{$=#!CQ z9Z?Uh2BV^3@mx zmy)+pi1?fSfq{5#-?j%6SMX3%uK#egcL}u9?*BNq=Orsj)-pMvIqOkn|3Je>H1|%U zDcE(@8ykJ#I?RN8>A2-#tr4I`0hZ@dz$Y|RwVhm$C&OY_E;9;BwT-=9Nj4Us?d6nz zHP}bmKibIfYFb?Kwb#2zj|KC9|KYd(_^@Z30X9uxQO9@OMK^Wkkwbo=Qr5(NS2N>I z>z;>Q-+LiNlEJc;>$A2tFHSY3H`+YB`yGu+?$S@96+M$5GP`iekK3=3N|*P{7OGp) zH5zuv`TGu_EDz)7=j!}db{@+5RX3dvF+A;MchRBZxIvt_ep8uPye|3l)QDFN@(IS?@hXj?L2PV}Me zTv^|E#RX->O~2=S1!=BMrH9*$0W9$#V1;WeSO9hNM3oXc%yfLrEK3R3FP^+=|3R;6di5c;D`TrH<^RVuYUV^L>ZQHk;EwK-$oxfq^)tq=S82~A1KHm2 zoDxMd`!+&6*L=VcFOmqMA>^BmUsa;nQ;O&LB(wF<)OnA>uajik?-5xpbtI6^Kmfus zWBVIEr5FIB9Ai@s{g z*_>tS!DQf6I=r72%+wEiweP-$KaU(FqAzuCxEh~1-=!ZH&#%TTMIW|`=5H4A)$3Yl z2S1l0VfJpj*Ldcg?(lGQ+$2A;t1;Z?X7^g)=e()Yah=vW7WAAwlCRV%k;(l^iKrm! zkNbY}8i@7Y#Hl{}<2RGY?pEx>Pha!wwr`LKYKSt-rwdnUej(Im?*u2#eZDFJH9WboC#Te%W|zlJGq&eaTF(!B->6dur&$ zeSFh+nl-v9&auE=Tc~48uf6PQuvA%R{oCnprB-cFqqrI0r4dAXG-yJ9Fge+tzsWP^ zN?vYRFX$p-mAJCKB+Im?ek23!01%L`Rg&8}|VfrEt zk4%;TShRx8cJdnUkR2FwiW9^^iJ%)P+a{r!8r?pJund{Jf8WT8Rg>uL9eRAkPkNN1 z6f$w}qB1r%_N9*k(Nr%`n3foVT1z~i>V$e`xp0A;?HYtJ%+-X)8dE3@)AkeLof_WL zG?c%yy=M;(CW(gWSK#ze)hm;3C?nNLv~euOjdKHj|G>CIG9qawPbsVrk-gI5y# zz5B#1OJR!0+^E{*joMjk570W9B{I@D*^iJBzvPTiqPf@oK9lE27Tt7=P zDoEt2PtTZCnj;|^xz~fw-97?M83TKodG%Z=X2_S%PCJN4(v(Ro9TV_}#Mhasy#E?G zDGAZ3wNO-3nj}JUB*YwY$MkU+Q01Q}quU0@^H#f6z~vhp6)L0Wj485a4D;dY7bzVq zmd4j5WwJ!)Y)fp3L_0o3htqA~ODT&-xdxQMp7q}mv3I8wqc`Q)kc?DQ$`zwTE;RDP zpMTiv_dT3lJNL*8Fk05=>AAkBbUN_!dBSgSY-FWq*Im+e7ieIx;teo)syHK|`c_iZ z;iVsQWx?2al%5q|nk=O2gY#2*co;OELyh_4(uvWu$ENVEsp_hG^m7*tA3B}s&_{3r z*Ik^R1n(;YMBScn!Jgp>>su`u%N`f1BoYeazM5XkdBbB6^0gQB0&&NNdfUJeioKLl zoj9&?4D)1SbG;32a`)Q=enJGkO)xReQxzr^JbH?YlF%8CLc4u}g3AkQR0?ig&aQ{W zNe7Bt=SUP@{GO32@maOZh%_YD`(Q&|zy{-OA?GUiLwp64&~lOF+u0=I*Z@Q|JkT*? zsoFfr6@uWz=9;MRI9U9C#2=zaq2|}Ng#5(+EykW~iS}e=>NoSTd0KI9oMSVLqAq;p zqBD|vg|A3V-m{O>j6Rd8V?VvLqxjlu_C2R)<{GptIdPUpxxcTPXrMiT(QXc`09j4N zLN#+HE0v71Y1+8*_+szTT=!opU`#3cm^8hkR2~Qro`Y5M+>~8QYkKNboB^SIh2SCh zHgXrwdZI!fNP+ME+UR03c>~Vw=Q-H~0?3!*mEDyrY&CVWOsceR{V9jzeY2-9e`U0N zhuB)mq~m4?b@__8;uw~ojZHIIE2Ia)OKUab#UMCIuu6vSC5%U{@PPY!Vw3i+Mb!+Su9m z?=&=3k)N*gSQe5W$Md+7q%u(T&<}9czI>emdKHsI?lhGjSj7IG!K{&RE)x zv-har)rB-9!L$RSs^72SvbA}4w|O`DD=2RSW8s)zNk1DYI^x7It1top38g9s?ee9* zDEEsDL<}{pu(enD8jD;>ql&gyhA~pV)p_rnMxpR`Z@&Hf0MQ6wpx1?u4bFK;3jx^lC<|I@T_-Y zpzVRjv^+5z{}BXvL*j9T5aQXTA!gC=r~95_^+i8ZSIjGrAvVh!#Gi@jZh_2t%uxQCo(wfGs%cklj6aC$pd#tKk4R>$uQDqzdic^#g6OKv zc3Li;*v`g6Wx88+H=K1iKjpt54bN>2e=U-VXM>gED?~tuNAk&@A2ldGmb)W>&RZM{ z*HSwA3S-(F*dC%|%_3LXxzP0S5^R{JpM6>APEglCGodqAXOZMF<&OmyHAgm|tT0r5GIqhMK6Atzd zvu5xeS@4XZaxiu0WZ&>|P>&aNX`7mu0t_=sf;egSL0{o@93tgV1*$Qtp%va|H$tSu zWSu-dw5YLdVUjp27HY@(WQFudqIKkoS;D$RX)AF^2E8uD(_v=+i9P3t_YAkVxUTmQ7(i#v$9Llhya@@sbAp~ z6N_n%sZ@VUD}uANjEi`9x2(#$4nr=S&@fH8Os=8v&G*F6pe-554L4zEFz(bdE{a zCG6vgeI@Yf`DS5F?p2~`@?mlNLzvxwV~psjOrzgPsgZrlCy;lR3osnV_S~Z&{O0{M zHq5cX^tJ8{zyi?llGW(En<(zxRCMas>)57W9}(sU3>amYwl{gV(_?eN-3rnK%x1>YdgDs<}v5A2f;t}9Iicr;Aolt_H9{19)93O-}A^` zO%ca`P-ak#URSNl#h4x~G&^E+J*-vbJVl3%t@LyB^H&KVppD-9HZ&6!C3L!Ms=VJm z;2@jUaRp~LO=!uKT>2>zV2k>8GNavc*a-23_T5e+NVUWyq)SzkPLj8(|DN&VYwZE9U} z{ILoV<7Es0^j6HrbzrAFQh411(B6oh_%_^=)k)|W9S6Ab1c!|J%ZKEQQH9>qElP+w z*C5*$1Ugmi1(a3gCL~t;p3hm?;p#27eAJ2uvpGWa-t7=?vm8twV{e;K2~^$`Fr~5d z)&21o{b{U%2sc*&*)obVCDh1SclZ~tWE6X$SW~s5ltF^GweOAq3e?1Hl+sW_B_wE; zdT57+YIR4cQ>0QxSjWkx71K?Xh%MW@1kq(3tK;f#h4%Lud&RjB*hP>>SN6!JF&(;6 zXZ2GjFpEA@g=U=f3T9-pP+-%$1e?33)Wz2%WB9NHZbGV?BwX?`gjx0Phlz1l29Vi zbGCqjJ>L&z_ons*)XG25)BZq!0(PVSd2}R)hy=kginKFK6$++s2O&cic05TkESOuH z9$JFj-mz^ehYBV)bCV$&rox-Piw+Cvc?xSOSiXRQ&2m%&ji(xE2DBSmO*6l%6cGw? z-S=>{;T*Rrilx~C8nqWJKEU0Nt=E9@l@H40BKhmjZPyALt+2Q5oB~M32PL1lIF39? z8O&JD0S+Fh^P3{_J7&@Yv6Cmo%05i4%;va*p6+ehQ;(Mq;;6UR@%-L*tDwUG7usU) zJLE4RaUUmcfZSCfPHU8&bgdSfm*<7(w&1wzxMi*St^dhXPw2%WgdWZisw$Z0-KMpk`;c|Hyj@VwEPsYK7-*g3 ztuV(9aON_iCgUl3LoXCiR^k@Wz&WzjqmW+I#DP~gLFQ+LxA6f<))%)Ph?p5Jjt8Zu zxxVbg0Db{+l;~2(Hlm_ zjdCtbwiMmdQP)eYCFfFHe-y$aRsaLgp&d;Urjy129E80>^3HW+4t{J++rBxwJR*cZ zVD(o4KkXRQK}#1jT<81yqF9#OI(k}Nh@BxT)u4Ic=D=vK&@^Jnd z{{zhL<~{wa#@=}YH`Ko_UzO*PNdG-zCavRkb)`OEASE59r^%l#OXjj#D3)lqqrX8? zGuqQ_29jEqYJ4l_eq?g3iOw89pr0T&bpP=xUM2V@_@WTPKuo!pY;=uC9%4SYz>JJ= zW4+{L(n^Rqd9zqua_!7Ob)sJC&SQM49CVkSiIRv_`AF zmHg^fdXR$BO93N90ZuGsguF0|zH=Yp`vY5}^sqz+yh%j4UIUwFbVseh=&mD|UHKL^q!LxIJZ#5mQsBQk!1 zCx^8>&>F>qiaBX^_f@?fHkL}Ig`!a&`M+cf83El}f;-GDPzjR>AJ!lfOw-)x_Qo9G zG@ApBf;s_iU^T;Bd#yF3qoDVK^h~vITntOIft_EmJQM#kP4pX~D8*Ex+fl?!)TpJB zz2GS4Bm-g-^gEV-|8<^4W)7D}P}b;SG0lMl1+%k+HBqi$L(PPy8x2M)Q$yy<<` zF{>tLSR}??D|qBhJ}EKnJJXikV|k)kLhe*3ETo&$=_KGPcgA}e))7kR*G$74$lPO` zOD|zc@4|JyJIa9*jVeZ7b$gIhjPVUE`8YSz7=0NI2g)8K_) zIe*(d=arBXIWI#`+3dgk4c>Y@HOQmlUSObSVr@}C9E6e=#T=KMEMcyhTZ=y&dQ8Dr zHXQMtm~@v&IOt$3XDZ40kyc2MJNI+~-EbKzp_?x?Qi4Jf4zXBaET1fN`oQsR%+A9h zf0!WksEl{#IV=x16do`x*P-D{%tvCgN3}X9UJX%~gNsWUL8x9n`$VIF*MSto{qhx_ z@JmfW!pD+$l&0Vnw#FEVA3y4=hGL3kTHAay6-wT{ym#G`KJ8$x7$l!;>#T_pYg`a@ zI{blTJ5{P*7BW>IvlBq;>FmS;A&B|@d9Iu=`%2%f)5)qe{*^3b|0{Kju_}7GP`3W< z6huM2i2Ph7*EJ#@Vi7Wa=n|4Im~U>b&$H=LFOcfoS%8>yQ2R72hLg#Y;4CGAv%^)8 z*U%j25Kk0^lcu96y3UkT7R^7^dqzxzffHGp?!EaYP!!``N#Tyumz=;QIJ<&mB3?KF~~q) zqZ!YSw^D}3pR^Q6tVnQ?;n-6r$=6%lJT~90#B&Aqthbn>ULWk%aFCM*PaCS@F;I<| zwQv=kr6`*Xw&t%+IK4JSgU&sg=dd58$K6yGKKNQ(pCtsIHx8w}S|q6`s-4yldicX? z@-)Tl6^&^_giu#a4q-Qsi!(>1MnQrwD-;+X#wEo{bwK3lm!U1%hYo$v&Hn)O>L}Px z$giw1;qOW{Z~+$~xcUd_o^@Gz2sq1ixce&6`%$u|J21|wv{SZA3nznH{Fb0k9~PNX zM0teF)Bipmw z%^vGB6WS@$r21k&_BxfV{d}X?UVA9^O1q6hrYewn8I0yRBu>CKNyrjBJW1*SGIyKN{KdJS0$(HSRKTTG-u}G(?uh^Hgo$PWQ zMP`Kp)V+j72`}`2*Dg8F9PsL&_vjrZNW}C;CExHnpvLG`q7KE&iE~t~a^|hp=ZV?` zb}IRI-Sm;UwnrzGH)Ot5#%Qx;1`!j-_8uIne!)JLNgs9vksK?I`w5n1$(sTuMvQ=f zyq#-CnT=4x#m$kUsEP}JYNR22O7i*exUUe!W$R)&@#djjVJh8h`m`HpfRlXbr~9Fm z{urhUOX$8llD{DIz$D4^1fC&tGHOZ7rP>~ob?JO{c+epMv`{QZsI=j3(&lXmN(46mTRNiHC*Pa20bDv>{ zXGXY1hpOpkCC@V17t{Pn%v*$)+Bu4R-pT7V*SV_*k}cXx3*`@sQw8<*b4YJZQ`{ux zBprk)X|Xa3l}@zJ>9*jt`byvRlRa9)gZ-PGW8#yK*223*x%CTjz!3R}mZs;yzu#Ag zGW~_p$P+@9M&09vIENnT*H)hwm-BDhVL~qH_e$A>j7rtu17TZ~{Hl_>}${Ax89t9t9QrZW1S1s;@ zSj-O358)P(AGqYnVVpBszVMU>xj%8Uz=dk1P4#mrhGd&YKuJIl<`r^UA@$D`&+O@Y zaB&wnGmqjpV~vr*mirSjU6#E5wCqaV*#ytO1@2f#r=uAOV*lVV5s4HQ&Ha2>iX6Xt zUpOJHb7HKTR^lO~*E_jZkc5gI7uWiHmyRmSv@jF$CZtb;!8@k*(Gt88JB(+|*`Hy6 zkAjP5*%C+6IsqFF6OL4N(_MW0WnrERq-uz(b{+$9<-%jfac9`}yBhF&8HgiDB!E@} zspj?0Lf0i#Hs8$6`K+{X%v~f z%G30d2u+@P?x4^3Cjo(;q}lJQar4-USL6z%e?Oe{12dJ_e_=DX_ZTJ&iFw~k^TbyH z1BbUv*!;f29{R}rJ$E4;Vj@rLKBgX-xd!WjNQ;!(OtZ_b=K7_z)aHSU7$oR9=4s6*GbB_{m~hE}0QDQotT^LlyKru(xnI!X zP*tfA$n3M>%xeR+-wM7udJ8tRIFS5=V7P3TbLv{?P|W@)$}8J}GWX;0qCt9auS zelDw$@q7ck=9zJ8RFQlMeXkO_b#Sh8&_z>gQD(ED9dT#piU{hTvf86Cc z^9?w}=m&$jKx-ByK@rRGg41aKHj2MzR@66{t%S`6$kQamtjmBx3f*6^yd7ZBkJ12V zc@5t399Kw&1GEZT+N+1b`s!;+~+sEHfn@-BUQUZiP@#Bq{OT7 zbG5_b65FFJ2zo?1+6~i(zX%MnBzkT!-xP~6=>_jbJDimNdaWe0(7V`{dm48VzG|wY z?g=9{;3L?{TwF^XEId!3OpF4j)99GvVeP#GF}FEtK@pjBg7HR!(Qvgx_YKuBy&;lO zoA@VNvK{Gef+Od%gqh%rdJ~SD?rL2x?9vOCGTS~SKDq^7hOAxpA)=aCLk>`#+YUHr zywtJC{S}|&%!8d%n(5Fgyc|#s-KEE*STIpES{zsHN#y#kdBJJM*_^bez2FdgwIaL{ zR}FRsMH^%dxj`BGJiGc*4=9q3eOQyAKpBSJE}c%Yq#7*?_g6X#<&9PR{U*9)VcVob z+H0Zwifbk9hn%OCrMjOC?$WQzgn)WM#SK%V6cLoSRTkEzdX1mcRrmXImVu{`;V&Bs zBhuT27xWhjbL!afqp2NNV;~+kCQ(YP%Ro8i1N01)K2E9wNu2{mWL$iCDyRnb!Asg) zOd)qyT50+7@+fn((d+bvUvi$W+P^9njk;jR@oK)LDu^K9G{eYid+ny!a-m>q88t8% z36oz;WH4&>vE$F&D{yR|%LJNcX0Zvh#qX6p*IAgT8j-8g>&XkD0e>!WHqK{~)=J5! zsx5!jhW?vp9xSZRwvwv8otqx1+trUQfo8@RcBM1`G1DrPp}+!F;-*K!U9f|WhXM+@ zPg1UrMsvynYJ^?^1#%8@KCT2k^pvWcZRC0?JC@IuimpaJcS1x|A-cf+H!oATk2^Yo2Hh*6!%EZ1exQ} z&%d%+f9^V)kKN>R65lF8wGy<>X5933iRro9%)?H;#Cg8K`b;!%R?g$yLS zT5JU$h9ni4d?k)53RJ)t;jnbEkymgh>zw2Hrx!qfGUWZWVj%`R-c*M>9A$RWEv3#4zO)Ii`>1PWq+TCGEPq!qsmMfEa@;g?u3^x`a3wyMI zZC`-0k`6WO(hIgmF9M5}cX&rfyKYr)fPn$h{&;TNeshr zRWXW*Y+SV~YS5Wjw5HuiY>M_)Hfit#Q9oX;c>u?R$W(&iP@#8DrrJ${ML`7GJPS&D zz$t{MCuh}JzIm{GE@5G2jU?P!fkl zPAMNc`r-0JQvZ6gAIBv2_9gabKUhq>7}6uXAiKma$(&jtFMJU`NqgU&KD%=iWm-}W z9aHubkY1R1Pp;|JPDn`jY4UUPoDV&m9UIKP4v!4t`lxcPLiN&57%gux%v~uJd=q>8 znR^Lcuz5Nedqk^9w?O2)n419=_DLWxc0s55EmyR3H2id^d6qU!AgOl zf|dEO!=hz5HV}Po^F_!C;(Z0&R8M{-X+qNtUyRpcoQA4rN;6}p9jiyxFWg+~HzR1g zUA|e2S)#*NtGx(1HAWei#QV*W4{oYoqaH?Ojs>)NbR1k*=3IYE^41?JF6}(m;Y#^C`=Sr1RHPVkYQPr(m1qkX?-b6jEJVA5u4CXj=TemrKyWA7tA0?RzJvj zG_Q>i)YvAZ?Y1CIAMSHHEv@TH%A=2WA|~-g>gHs7@%Dwo72#J+vK> z$mCC|GYJ@b!gq)K{V@xn&_f{g$5LS$V5_)aB~7bH{->l?ewh6m&%``}6H z{DY4v=v!o#f%#-EiD>T6eq?W)1_P&-NUh|ytt$OgElaWK;W;rzVNezV2D9MeC=aY<`_h!sY{qkcurZ2FwBF#I$Bh6lH4}&+SMtyOb;e!XiDmEwl)dHuS zs@O>2L2=N7LxosqTG11CE(`)y^25(~9~b=Oiad%#ezcCoSMtic z>nM!Y(4Cz*(n;Wgr9~J-u3=R4Q|(5{I1e`I57|*$VO%k7t{JY(V2o7m$L{ZI_Sb60 zmLKnF<^>~y_szW3vw~5|o|~KMD|7Zv>;{wWH)cwxLD>u9{vnROk3M8@`jI;g+Dc>_ z33!`j2S0UZpb1{&r9M9Y`+@MGX*lS?~V+m6!^w~<;b&LKkohqAX0 ziz-?hzL7>sq+3Kny1N7crBPD4l^QySp}UkuS|pSP=@=RbN$D712x*3HzKwd$bIx+ZeQ?=EO~#5NY8IP@+wT&;|VV~joY>!=L*7$MUbTi%uezH7ja zAI_(Zn^Y=+glDeReSUp%OmK$tiWuW5T|D;QY2_BT9aEa~1b2$DFZd-bHGL4lYsLPa zZ}ANMpQt=GPCgECshsq$3^S>YQ<}_@&k!-wW{n7sTdQ8^wz!hKtVhsym>7vRnkXo0QK09tFfi5?|aD`K2=tCQx)Vo}z zTv&f(BjMT8l&IcXdj84&qNk1_i}BvOhnfWyS&SqI;p)JTO2h0N3-Mr9^iamyvGvX& z(;)v>_Y#rgpj$`Rauz=W50h9?#y|UZLgKp562#|W8maVKz!pu8cQC$+;`z>ew-Z|B z7;C|4o@?-w*1Kf>UeXiXPFahP%D1Lzj7fc^WIRgA^2d^AIIad4#4*)JE!~!1Z^s3P z$)FkHvaXLU;F6mJoIL8(DVkZ_hR5=J{t7*fd3H((vZB>NjTTSw&DYh^DXI|SG|}Cv z(X)IczkaTT`p8V56Va@iHB!{*DwTCteI@-_t8GMYx0fUKCb3jN&rGB%QM;v__F;|q zOP~7cA(@P^I=`jRNVN8=SGS6THYd~kU&;)`Hk!0(l~xVhp4Ry6FrxISH#+xn|>K0Azf(Qc?JZR4#pVqT5?$^W0A{XthqcGD+05-9sJg z#O^6HxD>AW_(NX^MFrTzDCxJBS-!0(eJm-8tN#==%fn+&PDKq9<=-SN1t0W2ZC$T_sC@o& z*MNMW_dTxY(3kA7%J^K_47HHn#P>XMqn<^0x&<);9iJtFSsNJAM5Nxqb3{&D;`+k+ z$Ky>x=^|fh-Mj#ax=ObX+Hg8rY&bSvt`Kym3f!2JQ&>J+*KeYiaVj%e-(hy6u&s@! z=aXJb6X=Tx_}8?W#Vg;*sI zIx}|Ht9dC7Ys>)6jJ`xF&{(fvqUx~>P($zSh#bmK z&BL|Py%^brh!(sstQQf_+Eoqm8FCRHfX?R0$1@#lhT!CSac&SzORZi6|)?>1*dvEwWNP z?Tc~F7ow2HXwHIoZC&sfd|Bl)N@*jBvleaPpFiUNP(FRY)3quJs)pTJ)lR znO#vrVn{PODvkB5E|`(?N5t}JxpTHAj?2V5slubrt79bQn;r)Sc2K41%9>r%xg}d( z{-2v_uS;F5jH>Q;45DVet95nws9YDp*`G>GQ9~9pI{1v{+*u$&WF}4>H%E)JV93|x zqQTX^+8|C#$b$b!A(mJSQg|LqlnXX)6y{>$|b9fL)%X7c&=+?6f?>CRnc@@9f2>jmO5f z&&S-6@>&q3P)$kkF&nQ+4~sm0(0Lz^*bmpsl&+%5OWme}g}u(&*WU3x!ztP%AsKm` zx~ElVYl%-;n_QZcj=>YxO$V#&v>6wq85e`jt7jQ1B36wYSt{we$G%!O7V}x*I7_0s zOY6OHcD!8&VcG7tmcQ}X#v$J-XA-y>aNng-rJbr7U@lN7fZrpDgs-~ z59#@?_(-tcxOg2*`09#P9?=zvxw3MOz4JZHmsDI>^@0JJ#>YdI(|`xYtNA-bPyAS_ z94PbM+7ZJ1QBHy`3G6SoH!ZT*b&SXNiO_7rWP2lW0G%*=pIR zwoBv701^x~zT8x3|M_Ovho$e9=}K}6MhUYE7|_dFWrymD zi5$Z$-*8ke_b;a=wg6fhoXns)arRm7%h4Q;weVNf-^ z?*E7h2UakHQadb~5<^vl zqpTdR==Z2YHWUYi?WozAJ`GOvBfk_Tq8;lMs;m4qpDGI08hny*Q_8)<>Z62cd8Th2Br||Kd>KQA*m|UrB+Tct z?0h!(o4z%9V%+Eg4TN%GyQd#B4Ud^X=bPq2>7{{H+BbJX1uFt?oEG%w5l&9CfyTaM zRUSQ!1Q|j+JNmsOwjhS*2T8HTS9>dOvs8?HL@KSS_cAVCj$wIoWcL1%tf+rVmTCcT zR`7=7k64`#BPes@(h+*yE2%Tpg#i@@BHNDN594?tzU52SRBrn%br3aT% z$2B4lGZBcQ+m+kXC%QXE3s*xdP|tpwY7IF@Fiz||!rx<_)hKDo+VH$V(9Gp6+U^%Y zt~yhXv`2>%R=OL2kPo@&i_Lo+9t`Q-9E3<3)EA@;>7FNQN~#Ldh$r)B{@Ce5v*{^>pw>rE4Nm)j8UpcpldFh|ws3`ePScjddLQQ*Iq=`X9%u8X!<@e3m7MH%ia)& zfAQ7F`Kg1@Bu&`rcmYHe1d(oT{*Y{>W$Sr9=ys)hV1Y|iwP5tt(>qa;)yLUKGSbNK zI@QLLROE`yu!$jMitOk%KB z;fcZA5t5YT~#g&rQHP6 z_*(U*Y5P{5u}xkATZkvq-5V|gq$GtQ>&Pz_Lij-P97Q=V3ZCxWudg?bV;U$vT3tsxsQ6Zn-(W7&suxM4G(Ii)|oL z`Q38dU3b+cM|4wnL+{t6=eY&&hqEk2Z`jVg#zTXXYPHV~4^ns5tIk%(v!;{EVD0_O z{Idrf-)V=hxf2k3GZx7;)kLVyAgBNKQOvS= z9*AfbG0U#HrsrEwMEjA==sBHdUU)CoLAv=6k5s6G2HPio&qh#LF~S*kf1x1Y_k7a- z-_B>NtuQ1p#Zc>N5bb>v?z@E;eJz;}D)H3Ns9(OCo#Y1FnTNJ57XPFQ>mC91B$B!F?}ojo|#^U!Uw)LRG^pkk}J8tThB zS5Zw8(*@842bR6Olc(N5!{L_IZb8nh)f=#(PddnF)BW7?wJ#$zlwcWom~BFhvpX!8 z=S?7fC${PpQfd8$?|$~>Jibi?{nYTmVhZDY@bSpc)r*Cf8m50DR zldRC<)5Cno1VTL# zh^rey;&MCi_g>kPQX{AYdG;^xn*Gzh2q09N9uI|JmrTZ7=@&2obH`$YJ+Gc+Bheg^ z1?WBCpWoHMhvLe#eRO_nJ!jT5MB=yB+nryqH{EId-v6uC_i7Ef{OEowTW#(Q<4&l! zHrfkcZ(7dU&rtLYDiGT+*7q6lT?_&FQf#3!4!<4uL)N}NU^kr2M5MXh%plV0&yR~O z-(|LXXpnHuNb&hgl-;a{gE%sOP)H8RrYi~$)DPY^UGEug-8OBc?wuWMz5-mx|4dmE zIJy52(Cg%MHxXsydzdSBg==q~yA!Q{+^<=(;$P7nb*SddsBPS?R9rq=O_c$8bWm~` zG;i|X1n**6AQ0Di%8$x!RBH9v?5t1oIXIzg9BXN^-%sxj_y6N{@ewHl>as*aQAnyV z74U4^=lchbfqU|cU$-D3J$tu8S4Us>mh$(mSwr;FM#A9cVk$kH)FJGU4X3>M%;2A2 z7a=f(y0Sxik|6k5nAyiCoO*UiwnNfXcoL%MbMKTsqrg zG0ZaYa~FN5b7GJ0GB>m#ecU(K0nX}|(An~uH&Pm30sK%Ixd=-ATbZSmizAY`9}_Di z!){1JNMDTrNK6VTa+Mt{l{Dw_5r7i9d_>3=@CV`GjqI}P%rs15U2 z`{4OHAmAO;O~n@fB`0ikeN`%GQ{8g1 zb`gI)c5>-Alo8Ki7^?LTbo4%Ajb&s7^`JK;AqX$rVE_9RC1k6GDMEgt1~t$l_nS8& zcN6dez>`dhnEW2ca5Cz*Pm?jQmH`^<|D@QIV`2^?W|?Kfel13tMG;Ld(qT|X_lU1D zgU_9_I*KxVpFo^O(CXIWK-on10EW=<+Dd8*pxpsDnIU2N(|Y#RTywT21=O^6UEPU| zF4qLP4cT)vzhu9645^}qj`?lN-)U`j>>b%jKGj71j}GtN6(OtJ{zs$#soGxjT=mQD zJUF`rS@#p<|FIW~m3~r@I1S)cNj7>_C++L@iF8hL7&^rQUn293`d*ZDw zoYLMJmL#><;aZJyZrgl#5Z1>~{Pekwcnz9W)8UG_RrO)ZfdzQaO34|Oni11?l1#IBsd+af9i%(B36hKayWflXUOD^K7h`yL zG#MG0RMm86Um|O~^V?rnC6imz_s;%6p#B}dqq7FEccS|UHh+*sOOvl^;&=9|h~UEJ z9jGVlZ>6@OvoeS_K?&NT`)TSN$s=!-MDB?ryf&CsqDVq0`NhuZ@Vkm)ARKoL5a07( z({;0yYq4g(aUs);fcv&3boKaS)vBbWRA2~%BY4}4wgYIR7IZi$%ISD@pn(6}lHnDG z>3apGE|D|Gjfe|z~u9;t;91J|JA>_c-Cc z4MS*QLC<$Gg!bRl?@v2RM$POXm-fLfGK(1k#gGf&G`)$+Vl9dLOKp4M#Is$-!tbmOE)sy}6Istr9w%e)W3@AMdtt{pGN=JZbe`ZEZ#B?(W`?OKw7yyxven04L1I0BDx(j!h59 z$TBVXKZ!)Yc<*>kd?7U*W_aElCwjLU`upL|j&l_~#vOnT6meXGirJ!a=Qf*)lBl6R zNjbuuIfuzHMv$6McGOVW=gi;U7bEYk99d`#-cq$*7v9O$(H)}ce|%MI4QkztHYs(} zRhreM6B?H!Kx*L@6SKAb=d}Fmg#p&{LJf?HFI+(x_&y{jdcI_X*9Gp&KlZ@b92eba zbvq%n*&4L+SkW;#Q;1p*&4CzLSDJg$+?`>!1gt46HSW;+&ks7Q_^l5U-JczNk@$KF zWk|SZ5A&Z!|DP8m!v0!x(iX=2H%*Sn^3A*EbK>f)`I7U&fhTk1u=C|m$H_hWXZl5+ zUF(mB*4>tEiq;RLhzSV^d!JwUE4VuGT=ylutv3B>?Mk#Plqx6IIrE|R8$jA ze*%azgEBBEhKy#F^t-uXGemZ|U_`+Od;o~?jnluY)t&UCF$}XLjIiMuJDXsZL54yd zdNlEYYeQ=gnvZ6pO6d=#OGO1{BN4A~g69eeMCUN+o~kjQ_d)#o168c}O7%{d3kV*ko_LRFBP>KYkcC39R^v@Ekr2%gbmFIqw5C7fW zvGF5E7e*o8Vs7EzdT&?VIHaHKjyW8OE(A7$pybJrBlal?iZ0ZQ4!~bk7iTG`e$kMw z(CUczAhO^xhwwPcb!)pE5~rlRIbBa-CXFYCz5(=LFHRGMX#j`^$pKiVJGn(oy!D;o zjSNSsjr*#>LPklcXqkqGhxbXVhyjWi1JF|cKB&95QtrUOA_#Cyp@H~4bgDk+D%Z(l z`=jnR*btYET148OfuBD@bc7|TtqO>e7B)Q{ZFF--THRK8v|?;K3sfYjQF3wh!8@)( zJ5XpiLbD^R#|9vNjmg`y@BY;Dx)TZ=$epo)cC^n6`l@A>Qxp! zd)wJ=%y+^vObU!|C(^e3!5`6ihkWcs4FvhfR`~~_)U9(jsdy$L0sX6$Ise^01HiyN z5*#=`Q&vR3z}17UgGV&Coq>oCwM`^>0LhAlLOy582W;1~x2#psW=&jz;4W?zS~$O$ zTqKy4d;mM!WjNcmNQMu3;i9ZY-1)S~KR(S?1_-EFgMLrP9W|=Y(4MnX6Uyn?s~ESF z0X^&DWXy6w#mB)p%9|tU`!CVpMR2Q!Bbg0N1!)Xqx~RgW17Dqmq~g@jdy$?AN*;(X z&@r(0>HAyPwI*5{THgBQnsjn#j?V- z>y&fFq`zwSG*ea-9Cq$DcN_{^jLbUYLZ_r0`AF8bxVR|b?||&);n}sfJW*-t1aXeS z#3g!=JNMiv|ImX@n#$3=TIi*5&SeZNk}@Dq@Xq8|JO?b4MZ&bo*Sl$BV?<`vDsG*c zA_I=8tEPLdrS|s0Q1HdLrgs5^rv1I0{{J#-O4+>kL?EYP%fVCL*Goc|8GEf*AYb}Y z==N;g#WKkB5e)`~S?`F#5+FjOqlGVfILpY?v6+#wV+A>$za{p|r-mxBc4Sc8jph!h zYf*-b{+ph5{n9VSAalL(_T~}}9A$qocr1SKot|4lB6C1)Eota4pYWg7{O-bdnvl6u z%4RgYKr^kYGY7-=X=yh%NR&`p-;{)#{{y(A+9ax{1UieIP(1d8W93Zm zc&?k6nm#_yutK{0PnUali^hEj%ADclq!#Y`!4KSW;b_ z27x(uujY5^j(GRFHYXVQ^n%T+LH7s&JyK`*08frt@FL0~pgZp=QtAQW0LAsU3!=J! zG3rkS97@07UdDeq*ME+~UslC$_SE_Z;O0;gA9lkZ?}lR_n#^K5V(@*)7EUwq7m1mC zk&+>&dUe+kWu~$wsK=Hz2iw|!*RDr*A(3>snutJ5uxBz21;o5bX9mLjxF6u(US;|Y z-8*gBOlbdK!uda@lCgXW%OS6fBLg9-UTFHc9Ixzl{dDOg?bQS5%?|DUfSF-TQHw8u zef^nYp@3lSGz%lEPxJVp>7?K80Vy_t0)ZMRDu;+z%^vw@D#8@YvC}6@5AZSXiu_&! z^v3dmJ2>Y6&s;D6KX3PcUHIp{Fm_tq^5HDC*8fUM?ZN%46)mE~Yq`}%;A2W*Y5P_nZw(@%EUx!D)ZqUt8G>MjX(K@ZJI4pJ?W&q_(dJF^|$)=?u%75qLx03(c z_3s-{>;XlY)<-l_Xo{`vo9(AuPr&tz&XH*@vZ)=0?_n&)(@GF`{ea5X3&)W3g~J6o z_F|A%4v zS6=;Z=(3~3;KC&D+NhwxB~NYU8&?mgm|R&rO3&;_`g&nq6R7vYMk2853aXJsX>T4= zIqZ+^ZwNw3KwhxM{l+X<*zMJ*6_?Iz5OY4fHmI&u6ADCQ+7R~VV(RrlqrWhBVPqYl z3l{?0ItGDzhMvR{4hY%0Dl-4mMxcJh@w`-6D;8XN@u@$v(7{#r%B(m5Rx z2F_$#>>d9TEBl6oi_z*1r>IskQ+q_P1B?b=9AxQh1!#0 z)W&8iB!`sRQ9*&@8nPv1U0UhiM=%Fhl+Ay&nSjgS$xD;Z>VFlo7W-gkIvi0}bi_0+ zkL^K37Yk0)bW1vxGxByO4`xkQ9ozaSgPI^7I;#|TT`-NCPLKyAgB4*UNIgYz@LKqWjj zifHtRF4F^5lp(L?7@z`WT`fYYF8aH4@^5zyFtWKeB%=JK*&WZiOZ?5fmkSTywB#MUap^CqYi!GnS?f&4!9pwbfp@vwP(zay3!5uIw=P zF1w!*YEhBQ-BJXE;bPU&_l`+cBM1^Bm@RExDZA8{o9syZ81IJ@x&s9&LVkx)r}#lF z5#L!MhcxvdFOBEq>>7-6eqMxx>S)})MM&xzQgj@TGV}38GWcDtc)*6e}0xwqNmf@*L1)uYUa>JzFT(Ax`)L~WF_GYl?D|CG+r#8`g0?sNfc z(*MB|QPNxT-i(i%pjM4FiOaC~5l4dGV_2F?W=$g*IyF>^oEa%m0Sj?viPO!`XOml) zgdJZL3Pi+kQ>7waQfoq*|I7#$B4u2hq=w21g z@hjrfIUoF+vjRzz`G#t;d9eWR-FghOw7qB55)rR#wl+0H1hIq7gZBY&jSUVy65`jM zdb`f@k&=HF_mVUbcA-8ZTd_eI5TnDzIou@ONiSz!h@FcI*|JZG1K4gH(r zH=X*Gi42R(R;B2zi7R-`#6^2>6J^-Gdyq3R7fTOcS zs}rRizntFih)(OpQ*UZHnvOL+1eM7VQ;Ld0^tx7>dk-BHr^FUo32&ElwvRfYJvyH3 z-Ao_tw6U>CYQxc~CxB8+^@qj^donWHh3MO>E8VmHAzwd#4`=zB`X znF0tZr{sp+86pwZ7ES6)0%ZKm42oiA-)iiS2Tp;kF#A&l(nwzgt%%Fm;Efs^5R^QM zr^>sF-H)Vz*qLVW{C6Rj%fFsJyCf69lqYv0ygP6Oe{*#oQ<0Iq(E3MQsw07FKBpP6 zdbZN}$R6XRnP%0?TsvO_+p@SFmfB1M*pI>q6(<^y6kKp@=p^=?d5SF)xuwVtH_TJ* z8$BRezjHnhhXwk?v}*+FEu1X8#ddp#+B;~R$v$?R_N=|4o5mAfmgsA9`yrcIkS4@O z*=ywlu`B0ERK@RdU#cw2h|nv$rqL=hj0X7G$^gtRICGCE-Y$WjPrWN?nPnUCbKN-h zSl+Z)6YXnY%G;Wn>0JR^)kw!6Yv<7~ee8*PVQMV$%st$jX#%EWAW)~fq&p8fTvXK3 zUE^!E;ru;LNdI(QV>upt1C zK(1E@d5q{>A&DMqBb+^fv@N0%9v8Yo2b+(}?rHs1wlLR~Yzx|6qcrn6Fy zhXasleM4}DV^m!f0OA~O>-P9A&LOyqbJQ(GzucLG@)XRVkX?AJBn@zMxRrlWKOQIG z-8Ky+rJho6Me!e_QT5!fQ37a)z|<(Y|9Vi3;_bA=6S;vBRzJ(9eVGAEyTp9?>Jx2k zZF8)y8&mnt?j9aO!t0qP!)M)lO5PU(-aw;?J`U)gQa6`8YTc-Suy@V4RIZw=&Ek#F zfhUvcS=(i{Ksj~I^Jw#N>l}wNlsLTEas$Qm7XQ~CPI~K@tZ+%V6{!!Apgtn&Wtv{m zq?*DYJfkVjp{^k@1}06c_sMnY@Qb_|OeQOMM2-L+O;RW7es+fuUu-5}T}U02M1d1#hxlNQ?hA zN)PHIV85`x^BKQ`&s=!9J&X+R_G!s+`vbXUO`(mIQ~1{+Q0wy|zaE-$8l%g0Ca!}m zR`tv}N_D49Zfz+hdw2=nskaOO!;iTyorRv32Lc}dz3bLx`yE&SI$>1wr#%Z$5qSSB z3(uIMSq4QAqoZl>kLxo&Cs?%B77?2k*pJ^q8DcP-ZH47Kt4xDm<&J3;xz;-g+Ctx& z_w#&403-n{RsFWl-*|A-1nd;Zf|7yaRcd3*-`{CKt!v=dhWf~1=Z%;Yc1orL@m9%nT&7oV+_f&$VI8~XQ9yX z$JlF-%)BU0HUaPZlf6q&i+IEQq$q%?Oq%HXNJN}R{j1d3=gwV@^*^Oez=R=O{x+QF zED*O1QZ6#sJPAg|k6{05kE3TptrGH0524Ud`1IptX%xqwt$l0MXFnL$;Y`Ul@;o__ zJ{T%T9~Wc`I4v3he;*lA&d@*`ToU|6;(W!Z35^DI=FIwvev$N#YOujW#p< z>*)joCgc3Z$g*nObdMtSPgBW5cxi2vhd2w@%LK||HP+KX70PBZs*8fjl^7b z3z5J2u?#yBEyBQFE&KHjfUhzxDyiW~^ZSM9Q+_rboGBsZw)ykRS|UxG5f~B7ZAm}` z1K7viTUuhl+WDWvY&;CSW=2s8RI?<601S(gPm@5F;uUzY)&J27F&XO+XJ*G1;`ne# zvPYiQCx|K2$PE_XVIKgmK1;|;N?mvGfzglc3*ajI2w?_AsOi7)XZNxX zedbLSmZBYyPz_f8fD0MB$ah}q8>&fct*x>$o=JFlW-&O)tfKjNv1^MuB8V`=s7ByK z{7S=R)*7GfWzeFxRtW}o+a4(8@FL@EbDmTwDE8pMsY=C!U{P^(w4`uH1|6xOjk80w ziYwVM+8~CektRCBoifC`JE|=;IjXy74;rS*q>A)OQJt!MsbRwvZK+J1ky=o+DAu*{ z6AY?%ZBR3c&NJ*;K^UM(h_?JPAt`}=lYsym>1j{CqWkP_c8%fR$%+A1+g1AaqGsR! zrhwM@zGSgh6E9Xk;qC+eC3f;%OP z>6k!g;@WLg)_{wBeg9`RW+^=NLEk<-rZ>TNG}3?W1xS%Ne_Cv$9u&7+`l5j(-B`%S z73IBnC*e3Iy1yVS-EXQ1L{NH;OHjr*)($4%Ld6rEOAp30)k1p(DIYv{ZC99FYhOJ! z;6$T+iAvV;5rYAa>G@I!x0N>lwrTt{M)HP6;LOltDL6&K$7?}t2-!wHip(Eqbo9ex zq0)YB!b1Ei)gfi)eIe1bVVzjaWD-^L6_AL%yL+6fOzYIf8=H7_0_?VA--X^n;L`)` z&Yt2ET_POyD{)cs!`8H9&DnpJ6EGe+Xqd|E?XT9p8xUM|Z<{{(a3X~3C8cZ4O>=%q z-Wr$nz`=)zIQbLL2E9dp$WzjRi7ixH)IhvB58Qw+%v6#)U44~2`!bHlpO`n?VNPhO>cz7u#q zmiKvn0Y({kz}1}wl)Uo4Xa%`t(p@i4vxF*B=G}crs*RWneO#)QOo|hF<#e<)$w|1N z+fEzlpFm2gN-n{KI(TqiVcz>NAu*A>u)$^Hl^CGF=wH{o=GWZ()(=F2T8@NDzkK1I zB;_=RFqYTExU1O7ip9`TVu7adCr-PLIJqvaM$i}6Laf)g!LdE~6nvOKJM?DZ&`E+a zi<_&Bf~;kr#?R&R^V6R(1J=VDjRFa3W#Gw55}1=YwwSC|^&LZj5DMSLj(v`n^!U9} zs3{+a=i*3y#6!&Pugat3|f zi4Woe4d-Yzzf6rQC}NPfury`@+eet=-9BYoxb}RshdDi430^ky17z@IP#uS}Zx4zY zo=k9c=eJNBvh&@o3mo`%Yt;r~n{F}B*c%HKjD?`$LL#sECT=V(p5=rnN#R<>4w8{2 zn=8PuZ`{^!i9QR{)S{y@&oo1WmRw{$HHWj|hif3`Ej@UQAX1sK0a1+?2)10lqZx#( zyc~e*3tm2=aK9z0eD@~#kAzG0GiNW>TDXkA(Y*ItKR*+(DHw^<0 zL-^uH?-3M$W{T(nVF+r(2`M8>SWhmM`zu z*);pJ7V!&0N1qSNhqr=0zqS>7YsSyd%*pcXofG$N!;w%z12%z!-BeJzfAMgSTvo%z zLpCFcGD+=s-MRdSRApPKHsfAMkw-~CL7x52*kkczG(q@Qk{v&sVWwwm#EL(kKC3mn zE?*}opx-Qx4I+KUEgrE%_v)dvQ=6d2uQ)TDUxPj|hxHldk>%%ZjVa0|lD>!J3by$7 zBsJ4!1#uyP&#*_HKh$~@a4ZO;xukJrTsSvIZvfSiE0mz#INuV7)hR018HE&_C_%-9 zGFNa)?cBQ?>+L07u7^WF3Nfz3a-SL~45Uq{n)*4&rlGVgw=n(m+cWHxJ+3)Vk)Q^W zzt;66$1ebO!W}b%{rewgGLZS=#)eg71eZ8{*L}GwZGPpr39ii0{r=xg%}1nH_5hrF zO~^A#@=&+!7Ht3E>VW*&3zoFdGny|b{?=4i#RWrQ)h4hG6P=)myBpnb{!+3lE*Y0GTC4>c z)QkShoz(y|P~=VnL({qLN2-~$f?iex>a6mhJhp*kL|g3%(OT?2JQTf`xA$aeHt6*L zijq#f7%k`JEYW@%B&0yxE4tQJJSV5*8}?^}v4*!($p9s5JKzB0 z*xlP9^n*cq->?=FI$n0ZkI-(fGSe|3gF9^$3aoy&|HQqhs0X8e;5!>sQozT5~>G1^BH^7*_+(o z%48#V4-IM~=idDFuWr$xpcY#A9&opK}w28P9I z5P-^e#zh<~A(~o##&025{Vw8FCAfFUMBUgHfF{T2;?7v_v5KcjZ^S$gRzy&GFIJtA zS1#HW%WPN1%?fcc-S3j>wihC`XaahCg z;4nJbmdi&tFmOY*--Zp^vVyTjFz(OevMY?_P3?)kJlNpS%%y{XQLwBeI6#o* z45JTx)6(>vS9CMMByApFCX3p=y~qsI0fH&ZzvRmB6M7e{4C_+}3}bq&qqAeOFVRsk z&vCif)^i&u6AOR>n+5Oj!YCp?tvp0+OKrn^)!!j=-G4xH0DV@ag~{*6 z3X^3eZ~@NRVIM+22872Ng!8z6z^YPuu^~L+v~SY{Ij3?>>VSzY zCX4HG&mA7za9=sP_4*n4w>(6iUeD*zBJs;oDNTc9rX`28?a86ww=SZ@_Ox$ueJzTg z%2s6Z=p|zl4QIH$Br2Qpd53& zv6kz!{o_{#@io8o%NOS~3Xu^6vAp6DW=;l|PlH=*tZk1P{#c+OAfM2a)+~AbcUq?J z55HF#Vy2h`4#y~%y`>d$$mVVsyHsdu_~=7da+h%4rEHq4QV%F%dvJ@J&)3qF&*0l! zL-mWbE${nXaqERSB%^=%oE%d)dgS80V|EJmvy{NV@&q*>YmHewMeQDW`_pfQcTlLK zbvK^~4XUiJ7^`{rqf*4oLCBrqjH+d_`;=1A^W?L;oGcct{b)d0_kZPIxX~^ZB2;^B(0Z5v!bRI;%ar)?i9#W5LTXp-ppsgN)r&}Qw_Gp7bd$ea~hpB}znr2eB)DDNlq>r{a zWjkX#5DeDg9y|r&N$1Zrm5F3OaJR_ufyd)U*11T_yKWj>kraP(DRE+@X-M5noKco_D+b zFlGPUU#Dpx*g{@SUETlVW*4T8I#>W93>IJFPH5=Y@@pyPH6hJm)bMxasKbE{T$981 zpoFUnJgC=iMU*(zGk0=X$Xn%n=ZG)yBuPNZUif;aQ$%#W+?lk#w8#hx%#p1z!uDib zi2tTeBBIIdu??Luikp)}00g-vZSc#sZhsZE;H!t?w#qESNN4<~A^50^l#6M7+o1#X zW;|(>6Xr}uGk@duqOU;MpsA@c^xH-Od6_!e+mw8F(&mqHF;wB?yq3m5{*+xn*zALa z^z9!RxGx%}+<9#l(izZ2j@#3%s(*9;%uYS$HFA zrgH2=!*KF(F(zb7z{NEXme|{=_9G26LG^M1juIP)owqa+N2x^ZX3xVNg=1SNj%_abC9 z^Co3-7Lmqp4Eu_)^ASI4I?BxkpFjS_@o+nSuS11hW>SsVzHwRXxiv&~xa5nrT>wnu zGa;VDnnr><40TB229w3gHz;hRM<6q8s%SLwGe8 z+_Rg8GgD8=^t(4^MeLR93EpZz3}N=I9X^5R3_fSTgG8WA$mr{X(1=Cx`4% zI(c_cGZ{I}9r1mqTitn#0`k6~PHIN$9LaRBSgW&}+oIXf1jjz^^Uch(F}NmJAsAor zAx;^&S@1@yS7~;r;z;2(h>40C!?@!8fKjuEtZX(cdvml_D0X!A-CEn|wcI!xJQ*=q z>5*dFT1Pg3+^L$vt(2-^$)1fh8`1$SdR$}i)2aMRYwc9|*@T|@zc+B97^r=V+s(EW zf4*RIf%Be8Znmp>wP~5K-BasNCnJgT?j6i;{R783;0;rjrdQ^t5`IWz^QDM47j*S- zOkyXCeU$3|M&#GLoccDbf=Mc7;~C}n?rV!3rt2>E00jI%4}N;1SL5}RC_3nEL){9ZYoC0) z+ST#P9T+rs-CTdvyYDQp_sL#sn^!<9YjxIxx%3J8Qxg~5Z61v`E935yfzUJl+pBtF zHC}}*iKN(_cZMZ44H$8x@~*DLj}nm`Iv1JdKwb3k)!sokyz(xey9J9_%qpl>0TQx= zR?(Ee4YL=zYe)@AbtA?JCsMhDl^1aG^7Ju_MTA*0r6oRB?LPcP$J?Iqr+$RLlJ9oT zBma5zuWI4s1Nk&$pfveUuAJB=-`!pH%5-e(1_mheHUYQ;D-r;5Zme=n^r1_1Lk2gfGo$M3;N41xk6Fp@MWQ2I=%Vez zv8gzD@u0UuAqPo$?7zBe>B2(j?aD!lN*I?RU9I%V4n6|-H3V>OV3{`*(N?%dh)j|P)T8bpK@P_2z&StG|jh3|)pUyx{S(mlJj%tSCV5NK_~=*NSvJ`t`f3$GHEF5smi|cn-IP{*Az<`&8MaSn;n*9>h`6 zm)wRQ*yQD0pC3fOc53NEf=o^6B<`3f%82>k>|y__T=x3Ny@WRnJ6wtnFag{&5(O4& z{({3OtVgM|y!>gGdN4thhWPuWQU3l@@9hzmr?g+Z;{s;?W?H)M`q?_FXI9Q@}mV)3Tzcab+Q8DH0L>M z4qrdfgss^V7R*&B@TW(#+rlylh?fmTOm!`m%d0aFj-Wr*#6DZ( zw;!X;x`xTXN1x|&5``p-T$3Rhe@*`E|1(K{)C0ODAGRsfe~~u>jAyVnRLl_#g+Pwf zCMNk1<`Z9S`0u-t5h(r`6O2P{H)pdJ10R{TCA4&PEpiBf5wWzYs(&I+F_we-WYsFw zNNZ`~#qj7+=Avxl9nesr$pl_y%*RTl24n3MMmi zg1pmTXXJ(`byc6ah_qNcHiINFYLR2MBZVW}4jKp@FiNeuA;}_@B9EUpE|-ti+WW2x zMMYX8eSyQR{5ED5`DPXKZ5;P~mcsR0uO+?VzuYGtYK3O(zR@S>HTP1;&BM&!Sw$7t z+u)n%gv7yDzj7R%M2;g*Lexww#|gR>kqA`Y+1E4(BEOqQ*)al%L?sTpY(d5!@6-{k z{~RX5LBrcUb9d`&WH-hY7Fi4wNr_b9N8QZI< z<%{6WtvN7dILQ0+OCD$ekNdzy>#OSD83GfX7A$}f@SxFlg0fdU_RiAMvVXPu|10Uv zrahXsH;0f;x%*>vsG!TNRV&7hJ6JCv4c1UDI!UykBOQW*lG37nRP^4x^|nj&Ouz0e zb`I#u_8tf2Iu(P&Xu&cynQn~!O(1M!jBxc{^$4Eg3!CN7{Uk@vst&rIMGCOxxpmlef)mUKVwf}9V zLb({tb1pA(wHY;JFW+W)F4(5y%g4(Bx3xR2olqm)WBZe;zS2}3`yiss)2dmNCpu-}6+Cs^awAFbW4P@0m$ z!e+KA7G(|V`OM-2zg)M6cI6dE(;|V2Eqb=HVym+(+#@K#5}!NLd-F=FHPE>UQ!JR0 zpXUpc^ouUf_Mt1&MU^pt!WK0k>U_IP2H*m*hx7gJW0YBPK#&F(Np8AJ)Pu!9-&VBjv<%yD# zlGXCCbQ5r;8MRn5*C~e_k!}8Q4|{lGe7>kM)Kr#mZ*N##b01eG$d=YR*C68?ak&75 zZsG3B6n*(W>g{>hj}QV<*BxS1YxycEg@=3o>I2V&haCSuggT zMI=rw%#KU0{HqFFdcflfw#LHeLn9HMm?)ikp{N!xy0)$-O$ToY`?{f+mgB-Dv{FMS z9CDOwu1!s!JQ222OH>Mzg9<}cW-B)yPEKRWwDVKB#XIH)Kf3TYX>DjAd-?KZx;=$A zR)Zu^!fZHjV9u0xuK=ieIP9#@HE_CAo4wjx_OR5GIoMP2-@EF0@H&@+ukVwGi(>9c zT5*-NUeoSnj_swqW5fS(%{mM4t4r_2WUo+AJ%5LeL;P`s}$q4Sa&|5J>;X)Qnfml9*Mc+PJAx)kOl< zrR@Ne`OUGc>NCGvJMF(?h7A;WQ+Z%m&o!Puc!9;8VPQjpni2%L!qd|URGei3njy|S zy&!o#dQGem&tq%sSTLksCNy&abe=n#Z;f$>Yetcdj&nk#;3^_b?u}ySQ7$i~14QdL z&jY*T2PBB&LvRo>>IfwHN;IXRpvqqFU#Y4i(@)QP(R~;tfqJ4&ZydOzt0JfO?<2+H zyz_=Z7hOUvUA{R5TL3oTXbMD>@?j_OGIKKA@mA+c{ZD#vINwfQKd*wpT(7&AafGED z=u>dV<3&Z`bcL5wk*}xdtc~2|bOu>}UtK@LVol8#Ckr|dlRgG8p$+3to$B+eV)fKL zNM@a$Z9jaP))rxEDjw*amoI;^{7qmt_X#XmfVq^7pZpN=vONsRv}0;D89&G!-|P(8 z@MxX9+N@EzEzE$j+d&kT>{)H+=aUnbUoh()KtFy8w_L<_{Lb|a;9lK<2I%w6qpnV4 zyq%?1FX#J%rtHt}VSK5T1))F9(gPj&?!zH`7xgnn9azQ1-=4F)D?>{jDG7ZXi6g$I z47;d3)YNBTwj#=zW$RJ}{rN{MVCDiF;S9UN4g+JJTKmcBI3cxrF??7FCFoJE@91rO z{Us8UPmb7fg?Zygx?3VvILhgsQZhq70dW5KWBYmUH7TS&vRXEnmzbu^TT;+WQK{RR zl--_7ditVL(hTxg($#b5)x;uBr0QQXP`;jA3$}=cO@sQh)$0&opD)KTt~wCM!*D<% zoZ%8qPH4$n56CMh?@x-(ympp(2c5xJP37%FT%M>C)X$puRgOIqMyNDf-Kvcn-i{fW z8Gn1%4g6p%P#4LdU+UwIGA!s$wnuvbZ>+tEkH0(4cnOCJ=iuDhJ;}c2i#k%R zUwjceY(0yZTK{PQRxf3x!NJDU=xj7j(QA=(AFAapkr+BoO!aHST@Ykb+rR<7wh)Lv z%j`F~YFv<`e%m28Vb$oz@!{Ew%DoQT>{*%JbT)O+VLXo0>EuspVn1?!^F0{P*h4Tf1K={>faKBxrQ& z#I1lP%Hni)au=A!*!B3U6@!L7Niyhtck?{T6San|T3$if)Z$x!+o3eQ5%@xmtm0!% zxQm`!&|F(^1P#PIQ@WN#k#+!qBf8(%ft# z;xY|;7Qgd%7fB&mIr6?88g8iYBjla7yssqDQ zS}LJ0ZDbl`|NIeO1gGc)t^W9mtiTom2|CPUrS0ztk6sIt>Tcrc(x6hICU~l4_#(>Y zNza_F5nuS!dV(Qi2eUsL zJeDlgO#3Ctpo?`($EMwrO8$Ns7pZ-Stny0_oOVwc;(!POMgCQbDd@Oazu!3XKF^bh z_#S1F?tC5YQtb7h4Zc1MsB;M4iZ@QZtPf3X24oG*0dC?r0Kq5@4S}R|Y8sy7%%2Pe z+lw;0)Q97?&e)bzF6r`AB+gX*VK}d~KKBMgydVKhV*}5sR1^mLqA53q`8vN7${~V) zw_&mE`Iw3C97(g)PN$_(0ywqC9k^fly_ZnVfic}169U{)_68I&qkcNGL&XG8u7?l1 zA7j#m0egOikt}_obpdE2XvN&ZMCw`M#5>tuCfUEx5F|54-o9A(dZ=B1leI-1FHm*@ z+FOh_?zPXbq%8Vl$ggH!C?+Afg+TO7eEJ$IZ1X<%q>Ca5&a%f3CLQ&nWu8>US?72!F*0@olIK9r3^6YRGXk zu2z8No&84{wxOVaVgsvF&(C=tJyH_1WZ9-HNXQ%vedb*z9#Y22OhHupUik-qLQXd} z7rGc1x8v*J7igVf4kr8|-a|d?nHT-5Os#9zhdXA+tpFgN4>aN2%S9 zV~?8S^3kLYZ>>-1Bx!Y5(!<70Xd<=j1el8`r1`?%hG2t!ZZvHjMgVt3-<6(jZ4Ad! zaid+eF)R+mexR=j`z#!UWyZtDCuRmUy}P0`@jp^R z{RWpX*FO(@^+ZLLQx&OkErRC30GE$D_G7+pyUcB8Gp{ikNwOwey)DBda)ArZ48VnL z{VQY#gYB!+a>RC~b4)5*Fir4pJ=1{j>%-1Y4G-yDSX@xt4S?gg^S|5G+SnWRJT~%W z47}Pa!Fc3c(~V&k2#%q)xSB9h{D|aa5C2e9uVP^OUIN9YGf`v(3ii%R!H%@kM^U=T zoW~n3ZSOd36dqAx!@qh^`B6()(mVvziA~KyjSH@Xc+{dKoxC6N3Y*c2y2*0B9X>#u z9|$LTYopyfX_sM!SAJj%!g5D=b`R8p87FK5{4aj)cyFw55j`};jJ&%+hUblOo}{jA zft|r^28T@he4&DS1}2RN~m$wBY+MSrK4@_Sm?T6o{fZoJ5z{x;F39g{;?s z3+DXt(Lr3Z8>wLaELY(NB3&_gshZ+zXeTSpt{Xrlzgv{oHwgt3k!50*>9wwYh5jxs zSc@d`9GAHe1|LaBC<3P&qwy!$&?ubWfKrSp15;=_(W>x9heh9eu`5cO0*7nUL>)8A zA|2Pb7Qq(!fu8T7FI!}u=#* z^&+*8Nbrg4iAVNjX3{J8!vLH?JYSI#*Bhb%>~m`8^qBfz_H3 z;`GQq?R=r0(<7B>RjAFqE6n@+G3SD)`zQ8&5`=3PXwLjXk(h6p>{;uQ$~KsiSw{p@ zbIF~&W*0N;=1S~G$&W)x74%&?x*iZXD=M~eFP^~);Lt9N@%7DVK}Reu%*I-4W8m!B z7zJ@j!pGB8&Kq_mv>e7D4x{nzCS8;7%GGkAynV>i+^=!Ok`=@g;(j;Q<*$lCxSf}C ze`GGX(X#413*-6Bn$Dlekspke@JjLaHod$4V-g;ke~`2VDIYFz*SI zOdS#$w^jJy-MMq`g%9PHAxC#P#8Kh~vqd9v zq)fu5X*uYXt8EngO_Wyy&X3ySyoH6@M7*BqI1hSp$<>dY5)28$#iGj$;p3jqUl%eQ z=Zw6h=bN?m+q}5CnwZP%?AUlq+GQR|7jaK?8YKo;cWhIdcq7Z*A?lB4?|{`IHliGO z46;i$eI5P+H@I1zSGJ=qZ8`s&%I|{*2WPWNlP!v1_MuVgNwPh$f!yHsKqK?k&@| zG7wVm(d}Elq%93i0p)8P*6oeUNGEy!-yEFhDI)r|(tAC;Rb6#&(XMk{2);Qq@=)AJ z67sFMN{BABQDVe2W4X>g_8)uECj0wUm~J!?j(90SQvU4c_Sz%Q)}H8>>zA2#aqF8X zVJMz-)wV}ij-|-zlO0DxyPql)k=^;e+YS>jr@SxF75vaTj-cs4{IXvHl#BXii&zo6 zMh)zT2|x3~O#Y0T!Epuhk(iz1(kC&&PQv9A`cD*xT)}ja1#yx?BPbo?OGT&aHwR6S z81oNz)W1`RG^H8QKU!+iVdJWAJm>vP%A&#B%=Qih+@KxtFt`U)NXOD=*A-Hq<}TU4 zz2luoI500=VN#5Ey8!sgZfwI(UQ5Broh{Fed+byLFLS)jqGv_HQQ%3Wr}Bde8;r3b z(@BTK1Sd;N0s^M2cHpdUFxn$Ka{a7U)CBhk4;+`0+f#&<-`g*5xr-{_EG1qtfon&n zcsq_&k??(&@Vc&iWb^ot>%2dyGT@$ZM4uhY0aoTnhB#dfiN4Li+s#DHn8(SEA7RUg z?7Y!E1E-mfN6qu%dH-;?znHqhqmQ^xTeeph)Mj9H5{Xdus@{bMb0bYh3HD0#EKq&W z-KsB-UuAlA-2Rb`{S}!TudVTapz_mT2N)Di|0sKm%58*AzXHto{fiD9i~6-9XDu3e z*`@b(hkfm4{5pFo;~!G{?wiiYRq z$P8G$A*s*Pr01n7TakZDoO`47vcZQ@30DhD10VNPK(!%tJW~t*cpyW=U;;u=Zos*L zg)q}QmQ(UjVj1c^TU8|RcpCJgyrN1_K87)FBF;P;n9dGj#7ez$t!Q+Ae}nQ`6cUHV zb}3QM8NsAnR^^R+wvsVuNd$4;k=A4pBA&Mk-|}nnx2LtFAyz{P`2pz)xx&jPPZ-xm zH^}+hUNIbKYz%jD;5Qc#bJ?wQ%J6*HKG>kj*u={tS(0m(*vL1l9_g+X8@rqp};(aJw3^NM?Md@t$Mx8z4g0%Md>NDXa#Sdj z*lBxxYQ6cJE!)=nm}gtofRt3mbIDb?fEXNPvi*M>niehHHe<^lwAFngII~`Lzj-&$ zUVp3dpGbi!Rgt!AuNx+O(n}jtnrUc(5FUTk>von6KXw1t11NBP#OMsGeJHN-dEs}I zj7Hl7^1$85jHJ*PB%Iii?-}EGgE@12cdbS;&xm_gbkJ@%xeL|kry32*QLUShnTuovC| zO|p9DWo|$*FD9)E(wOYEgAlzio?z-RpoU95!XZv;aamJ9b+UcYx>uYj2!jLpM|5TO zjnQUSUdgj+lXf+F8(;fq%o^`UC;AVd?CKeBf4ym~Q!UZRh9OSIXOA?ja=pibG3noD zdoExDLsd?C?sRZMsr>xy>o8?L9{$p~Xig4SK> zgf`^lOhXd^RR{%;S0bE<&nTkE6Z@ROhkjMCqy*~y?86igLiSVS2n0Q*wX-b?I2%-pyX63i0_hb4EJ#?0-XonrFZJ%Ndb*_ z)?js}BNd7Mf>za{!V<>l=+W@`a}{fa(`dd9?{KT52kT|S8=aTiG#h2AhXw+LN69Pm znsPS{ld%iqSrVjy*Nc!C)xn5jz5V8r$sp;?P+- zQsUKg)&?UDaw%7PFEuD5cTy_-?Ae@^XILhlxM&;i4tcxHJbmL0!yP;__zYp-hB_>5 z<~KKHU)6#EXxgI663xWL$UJnxTAwqrIOu=0QM5lVv6%S$*vd&qYHWGF z-dgM|!ME+v#AlR*;g8jAv&9X?JWvyf4CR`{f6MET;W#L0`ef+sk*n69pju5 zF?GzJ{N3c7M{%}LJB@S&ePedJc9>_aj9A=wqynx^***S`45%p%BRXq^_NLxi5!eZwZ;%L#*;dDKhNtk+<16wz$kLa>go>Fbf=+DVc{vwU zKE&VdYUNU1v|@!#5fD5EVm>blK1UB2&kg|U4}xEG-YsuRl2;EbdoGR!ecKlV zM)A}Ac_GrF=bq?&!BoGKK{vI+GUy5pslB}~G0l8{ zdfn$1aR~?hZ3_iuQjmKbvFwSrf)k3JN&K0J$h7Hi{hbKr5_Yt9TIF;jfKV_O1(7-a z^}(!!bK6lr4thG&^R4LBk#!aNVe6qR z|GK(v)Pvs{`6>TVgIZMR?`^RukQXr@W%KUdrAF6y#%G%|0W$tLbk=h1m`82W=dZYP z&f<_eI|gU$dx3*QzE1Y5O&1_=f%4tWDtZ(V_ah~B;i!*W-Op$4t8(ubF>n-fvdjA9;D#Iivs2$fj*u>M1sPww zA6QC?_<8iP!lx*fSf&x4W4s8p)ogiv%tibs* zMFL4npk>_0R3KG!hj9m#Css~YzHAu(54m#*V$RboM5Zw-~eHFPB=1@-$c&PCn4gJ%NrI zXKw(@$_*Ku?* zmA2AJE^`3Pz$6P_MMcfoHC9!}&QE(CM_+r|B?Dg#6^c+9!|xckz$Box=@BJ;LA*o& zKoH*4?rP$o>Fn9O9g!WH9WtnOE>@ZC>RY+K-cY6zLaBXzcV2L&NmtlN8%3bA#vpp9 z11YlD2~)kydf8ZG4WXv=QP@%2r`v&IEb35&EqW)7+|#AT3ZxyOL-m!#CGXx&OcL~9 z_nHp=GsmW;1-E~Ff5g2bSr!!-ip=wyHmf>YZa=C>7%^;BSuaS!Ep6;U;8g(PurN|S z_SzPG6>r+^^2HiWQGGu5tse8!%zJiO@bLu3OT1(VbIoRtkl$!FJitn z{fZC9jDK9?AJBXhj@1(ml}Zx?HY$aY%(-4yt{DpWUmmgg!=}WOA_MeYec@Lw_U-;W zq*q=ol%)W86-Y^57d}r+XbT-))=&52LGKKIVG8||Mm}!=9A73X$o2`-{+l($So+vE z^1gtTUM zrWlq#{kHP!jNocg_)P+$NzkOuJ+>u-P-2tFLU_{$t;`g3X=!C-AcicM$17K2b-J`Y z?ncnfTm2PK>eH-VO{wy&0f$b9H|NL?=B;>(#p7+VHV!Su7(&No@9;WruPTtgI&j73 zX(!Teq1^c_x1TRmjA^;>60!ccP6_`;)ZsQ_8LHsk&+EkmzS=WZ8@Ivc7}INf?e^t3 znFz7$1sNUT8yq%*ScUGN@AcG56#Tp$PriOne}l&{cI^zPtWAt~Gn`b!*X`wN_r!%gs+ z8`0%{^&U%mpSFqi2uPZEGH*6~d+@7;k+kEdUIO1w6(`JhH*>!tU(o!u;?V&@-5FF6 z+ns%r({c<)WthpflW)XzvVTc}uITGYU?}N-;G0d7LOea?l4wjI4U^}OPu;AMUVqOL z1J3l`Cy}iS7`Yfaxs>W=Ssap_7oB;AHPEP9`qsB)iZiJ-G#V_Z$BzrO`xPn&xtXSg z+0W$Vq+*+^5A?bWL^6QH?`KB`fV;qWRbR7g8gQ08c)_)ihz}~t4y%@qU(n(UVGa2q z^}Cs6!*cin+s;Sk0m1#ywc*3XRDfRC30$8^pI8LuGo<#K`M7=lhi&w@C$Q%>h7opF zIi|K^rV=J!E-IT=1P|70O-B(pCD|ZDx(|KBij3UXk1Uo*0LdURr@E$%ldze|7H|}u zGSi{%!(@ggMH1sWWu+1@5+w+9f_OxMb#WOUV8n|rXAHsHCCrEj0LU};Emhs9s?Ptvap6RF z2=vK;yaZSX&DExcLbO2}@-}_84*x?XfKvOE9ArxPP!4+>#Rw9j*Z7b?4oZL~d4qiP zWLkbHC^sL%m$m+;IoX~6w`s}eX~QrZW~Y7OK2HIZ9DGW(@NE9_t5=N%Mmy*tHz3NNUS$)tRiJf4;2i*)biN|0Hc{cOd z1u?CwNe#mJ23W0V*{SSGdc({TTT>T~+TF)xW|26sDMuT%tRuuzwCcYo zD#%-hnwn^KaW8(q`ipW|$b8Xt+pfz6Oj*UYk02!%hX8Jn`zg&`<9KE;D~Yfu$A9C~ z;mOh^6H^I5ik!qASedpw3tmuJv5i9)AY&*SvG9avYIDUAJp za=}CSYSXkkRaE22JHAx21A3|65DN$6$6yqapPWz0P3Zl&C<1N>m{)YXDaPx~eLNR* zMM@f8#zVCD(d4MZv!m9b535(}QxAn!n`B$FtbJAkA(~8Ee(w#St6e{wl=?6hS+=_8 zGD4IH6g*LgD=-pG>7%Plq*0|hkm&vE&E*WJ#Ux4SGuMmUl3u`G*OaY1;wtyMnETpk z2)ZITuDtqPwM931W>G==;-6Uu|Iz1FF-%0%EEj{#$j|Gh&YRKq%0bGd*FX+UhgdSr zynu1F7^1)5-aeH9)^6azx31ATBI$i?Fmi}Hhzo0D!xB(Puvf7BE^{_-*YhubXv&Eo zeUkf8ahCGhX>fit^>^ex%p&u<#aFG181MbM*pVNDTJt1584DWOk{(5=Bv|9LE%%Ic z^vm;?rzP}l7&6*y`^EwEo$~=KtSi9w0lWc@q@t&2U+}inKfeQr^US3YU{4ZF#H$qM zmSCTN5RZr@3wrZg4RJO`{VN`q)Cm(t@@l10F4(Ic;=12Exq&N2sd7I)f?)wIN^Zos zrQ6L-WN;A(x-?&6Xxa@cf?U(gu5YPdI$}ZKZV7-XKhD3`Vo(|T9nVXP>rwJ`+?Q2P z_|ECL20B)wiKSM&fwWs*9H2TPH{P(Y!rC`kO0! zRsgMLA1wN>za&!w+Di|Av-BV2sPU~>_Kxk*wPF*zJ%?Ve+Hi_W`=jGmBG64W6~1%8 z&qjPwaxuDEiX4L=8oRY%tW$b7Mx9`iW~{v8;s>9C;(7ZBKyUiK@hsP}{SzK$QSQ94b^XKiUvpZ$w|b}tUd zCuMr*W2fatHDiDtuOMF5uIth8NVvf zkE}Dj6^85!FsjZ!FCAH6x!UoWjdCNmpU!fotG02lqVdVp0DVS}nb0g>@5>V}?4sqQ z^WEBVc?&ZAp1Oq%gB=9Z(^#OMAZop7h;hQH+Nu;0o-+af4p_Q2GlJ#BC08j+r zjRy5gy=oICM^s6ZVSO#CI#ru-`;#&)7 z6!RIr!Tyo)j|&3Q%jb5kiOFuh&U*f1{MjL+3O8des3xFy4>-5K`;&ca_;83GN!Y@m zU~OPQLGBnrEJ|#F;-}iq1}d>s%x4-!VB11Y0E~kB)>X0xM9y9DjY6~07)Ij(xFu{E zXx)a;OVyHka8ojZ&)%p`ws1!AvCBql-boR#$zFeZ!<4ViXd?@V+ZULgWaDJr#=qtt z|AoWxB1--z%1G8JN=Nn;$#Y%Rpp3r$0wJeLEYRN3voxoLkM0sZ-S2>ymHpiT&~^{T zk5nFocL&a)$lTVj<@jIYKXGJCQ3H&545(G@e>`MeoD*wZe2RI*&ujTjX8>Wv$T*rS zqI1U3KG^K&b0~oz2Dt)nNDrx|G_h&m%^41TJxY+JEM36qq=y@#9PGRx6g*M?y2*(> zsTN6pTlHS!s5kKf&tTpBIexn847Up%W=W23IW_Lg;5Sy&D1xMJrSJAz?a_1i;@x|o6 z`M3daNk^Sxm1*)SHX!C#Bue%EvxZiogf%`!rJ)Of$Wvp)QiXc>;y)#0d*2s878$}> zo6i|_^IqBM8{Y2(2BM?#*x5(X_DB1OG0ag%Th%NE_vaIp%BK5b>)fn8G-d`%Plr6T ziI0e5M*FOt<-*fRO%=XZlwBOZV6;3&^mLsRz6^U|!-KH>zQ?)vg%9xH_}`9gz^#5k zu{ctDKK%U5YL>o1su5JqvfDCnzCaFq)W+}cn;Ip5n9oQs=xE?%ZwN^HtXy(plW!bb z=k~s?x9TQ9v+H)gapWuHsry=1aPUXv{8{D^5D`|%0@&8qXh-Bz~Zbjne7UItbLfH&8S8y(VH*paKKM&LsiyX4Yu z2Z)4bq~j#AZ1oFf@D$YtMkc^UtFTV*6P&I>W!_!|)s?l}z}tDFlUT78{*`X&NQtYV5EPfIferLbJ?}r$MfK%e_ zv25J)VgE@t^?-^L<-Ur)>c1pjgY;0`gx~Q)p$t%_nD>Q7j2HnO+aRS@SvxO+}fe(aSHD z|2!d&S^2W zK(A(TP?hYN=6q0T0~W$z#NcebZ0rgv*&#X_6KQO4U5uDB^G6KO+KVfrhd3aqh3qtf z{n3eE%Wp)|T-KDVNW16bJQbH7Q?>dZQU2-@3i&&CnGwvof(MN~_+WT?12a!JeFqPMMO2$5-U$ zkqWLB8+3Z&D-%E>hN1IZ#m;X0ZUlK08r4)2bmy@84In6}tMwkNdlP{5ju)dSB_I}S z*QaxgWLCxwG&{sQsDay7Xy-DmFrzpoQ|!8WnV{D=2px+E7ppm|d511htW$EVF-RE; zIA-JBsL1TwEB2Lz(QH%48^SM*ojy3Qob~oTzjXKKzm--XBqZ%VdK2~kD$_y;iV>!( zdJlfc{engufY|(E`=fXb5X%ib(HQ)#XKxz6`C3ymjlcT~-KF%VmC|JnKs>qM4Q&s^ zFg1xjS5vg8h-hu(hklLyBk*Vt19otrG`@^&rhEFun{MCldzrrd#!8Sb4uinlpc33+ zT?oE@qomL<~EE?&Cr45ZYYOy1nHJfehGSc|6z@uoXB?*Rvl;y}+m=GslasW8L3OrFPx`hRt#Be4Mw#VwgTzVPsaO=pXrG zj)49&=~C^NZ50!DZeG+Z@%u50y|XJoe)Hg?hWeg*rZJYA53P(hC68eeaxY$sn##}- zu{gI`Dt*UvhIwY--HlfawNO9O3zMe&{GQEa8DIljY*S^7^SOvb|7kvB42KcL+>q9o zzvflifd^L(2U`Bf;}O^djbnp0#!_AEo5G|MEakmHH9qskSVw@?p&7l*BHY(>T9cqv zjDrI0OeVJa*v4Dk$L{_7UWigch(+@Wu+~O`l*0Im(LFtY^t0wT=>zjz@AVJl3W^ox z0P0eaTdjsXrhF^2f!Mj}?j)>Rd;A;sf63~GtLuy+7ZptFj+gdIN%ZHl@fMSH$@qt2 z@cjYfLlNuB2FdBsrW_E^N{!Ti9G5@OmUE{Y72y{X7}|beYu|D8E}&8J>C5W8zcjs; z9x#8gdEzm~ktvR0NahXUTY&Sf&3ole<(J~uLN6d)`3xOTw984DnlK)ZqauJGK6M!0 zojf-Ft`kI8DnEq(bBI)_tNdx&lNk*ue3ddngq3W4kUn$V`V`5F{sJsX(H;|8f-}Lj znjaH0-Z+bUNb@PynR;b_&_wy0GIqd^8_T%e<0p95zgwhkYmThMuNzxygo#3QbqZIT zSk$o(DgZL0!4F-M7G9)m<4Nq@r`0QWb4qG8o)ZheseNJ579D!Qy(lzrDH)YL{O9qe z0^C~q!UC;sPy%-c z{qC1$!g>Uq5Lp;C0J`5D7Rb(MwPH-oXQ!tBZpjr)9ct#}hy59QArcq`W2^y$Z?dZ& z(YyX~z!SG?Y%F~EwiaXp)I4C@pLr<0sJwxJfz^^tDH)Qc`15h&IG$h%8C~&s@0d0{ z>%2psV)bk(uYLgEab9gqN`b+PQnH*+6#zy7kYBxeoj$zf+oy}#0roiqkmPhfWh7&# z>yuFfz?W&Gf}F0Km56sRh=sQ#-hJKN0oy4r{6`aAzhGp5Zy6o|GomQ;5q&0&Ye@@q zIpviI#5coU-uK+Cozq_TLxzye1Y`Z!2=C=DUD)CjOZnNqeyotZ6e{rRbsV+`Ei7?6 zIj7>8xXK(dnv_>h{;m#Vy8O-*2_rG)`cZ>dMKVVEa4%Vt2*SSPhj3Tq2HGk(Anc1U z{48{2c^J$7^!)4UT-?#Xm+C?Yxw@lwRdo5U=SgJb-5360S^1~7uj(}Dz7lk@CpmkL zUIIq#jq^PD2Umgv-|#7!D6xDWhP9zBB}K~@qG>}yp>tMon=4KE!2kjXc9<{$P%L?{ z_Pt4ADUY1$S&cHEDRR62=Vx;!zM)fzK~Kr(HA8ho5sCaGRsohpO`d5_*Xj!Y+g{4Skxz@}z-`_k8HJLOMxOWE{AQJ;5i3}uXzNWPQPd5^X~NYgYp{;ggd=x*K4l3<4*ACOF1R!0Nd zi=uTH%j3BsrP8RGt~ftV0*ap@Cc)aaT3HdDfFIa!WD?q|`fbWaT3Z?eE+S z9ggX@#G}9r_2Z@pEYvGg5{(~ET!%tH%b%=I(wNwI=_ATQ&8|LYqgw(dnw*{lf-=Xc z`soFmjI|&QpR`zj)j&jMOmFpvOq&>Ov7*(c$&6pYe?K1xT~ZqE$Z6!%C}vPKD}P?I zjat$YK`GE6ETZPD8%G&B`-}hArV#hm3JQu${aQoN^pXX-gOzE4Y;36uIo4`S%jb*KhuAra z)loo3p7egX>8mBIp54Rn|ET)vxTyLkTt%d%I|Y=KmQHC2N$Exqq#Kr&6r`j1Gj-rMo*8xM%VG-TS%!D(oK4nKR#+XP)O7z+(3x<3r3(-G`W`^S_)-k?O=X z3F{I+(3)~>_|z=OA^pERmq%HrE$Txm2Bc1MLC3Gbc}F`e6Lq=@ z;+Ip+JL`u~I;X4tByQwzU{jz-HPhPt_xj@8i2TIls90^|AGCrp(rT)jpfTT zDgy6;G%#xYwYZk|#P-*5&3_o!`9q+>RNpmBlF&$Mrh)e|(P0 z{vQKy46E!#6KzEn7sN#QSH;LF85W}aLvxBRA_~23B5C>cU#Zx|Rr4a(8rL4)?%}Sz2-MDN8)y*g<#Xb=aQjw=-A!KbCf`Kl{_bJIsJfvEQ-t5Win@El@@L z(6$$ZXiXg7zc`8aNL0n`fMu%^;FtR4i$*|eSU3u+|EHI70jAK=BEtuM!~1JPfRozb z+sd1-TnDM*`LK#T25@`zM*q`~%_E`kr+IsvEjs5H9;K`ACxQyoMS#3%#YiG<$t4g+weVew)d{oZre65 zg<1Kr9@0NW{@m|WskZZFKN!0C*|bSfv;Ameu(+i99s2zK3CbxMG)G3LjOViDDdt6I zh{7Pom+#Lqv`kRDDlc{Hx3@o1Kj2Zqz<5YM5{)W*@uYpoi+(EF@@>3o7zd8duhF>& z-$s<3A}wQu(U`wFf9xYr*pCMUiI#hFjoyHxp>}L-Mh4~TWxty{OY5XVpx*UgWAC4P zhuh^h=6)+~|1J+7MLPEaGZYhyFsiVXy>O76KRQ=EG<`Jo=->ad1fX2b$N7T5@}cmh z92xf6M_kYfuX^0Wx#7>O&{VYXU@D%;&crN|KVU=sE7xp%-~-K0@$nZ0+r{Bw(_0kv z5Zk(9vV8Hse4rCo`p)c++~&y`cmHpdFL{6RO>2gxQK#E~9)f3oYX-cG>& zhQYRI!Fw}cwlt+IX({9#0PdxfIROSyfGr2?rHH1ixjvnvYTI^|A50Cn;0yBK1pj`n zatqVOsWT|hHsVQUAJOEsr0J|rQ8_5lw>HdgA9`7i2l-60DEL`@Gl145IBmDxUA0z^ zDPN$nh^l+!6b6MVgFD9D-I^!pUr}C`ODd-pov0wcUY#2G06uHAVOK4Yuf2V5rN$-e zJEOUK`U-Epr$?S-rm>~Ph+k$gb-vxG<-jefvsuaHVy0Kj!Hvy z8V~bd&$JZ{hL>SP1=I!JVWwOY<(RM+(6QQ2eI0fdgEI7PhkuJEk(VqxOkAAy z?!p-GyQN5Kj;YifjI8^hF2{vH(U8T5Mump`^n}xi?o@H9>x32c7_^9BLgCqTvVVkje2O$J*&nXW@4+f^`oM!ZfGuLj6D){`d-DMy@@XTZ zdIyMVjxz${>#!i?i~BU`K^WZLW}VzViGG!G+U5LF)Dc8>KAEzkDLZB z$nrV*33v?r-jY>7D;$yv@(Qs;)eyq8UB#W;tIO$Bi{Kf0MN`3#bKO_Wl z2scY>>!D_lTmCy$w#SQtEVZ6iZAV`#m)(VS{LoE9;eMZb4=?e}a2>O102zXs_F@*D z&kDh*61@;@+`3j4H9IPhfsrZhDW-ed~YVhPT_^r_YxB#M?W`h|*WnF}b-C@D}v*TY$=M`i}6n)AK zddf2;vTXIFOthY8;W%)DCoBrln?I<*Hx^86gz&|AICY%TdzboX!Vkan6q==W*Z4Ph zH{6J=Jh3J>(d?VL<-Koe;>-G|TU$bF zEW0H(hshsFv3D1S{eG5~%6NG2vxIu+xDJ}0#n67)A#y9!e7Wp`5KmAn(WSW+DZ4eB zt#;ANC2Ex&Sn}w!vsw9_&;V#JCJ=Oz0WD3jI%Rr!-HURo z3xfe4cxR~TS+bpQYx&9{XmBbypufj>)0A;c=Ki^M5OGsC$cqqd35PS)pxHS9b8PV| zqlqSd=YF)@?|B5t1Wz+eVlVmb!iTLTKV!XrY|qtQ(HCf^&dve(BG9Mdq`Nc*pAYFC zGEhuDRX{nh_QY4tEU8Lexcr`KaY%Mt-O9&zGv)OAR+#|TFF3udC~13jx_<7NJ$p>3 z6@-T&a)t?~*ki2ss;V7LZGZszCak=c3WY!5CI~qG`by4I2%5^k)(O=_wOaI@lbV3A zo3h4U!M95?2Y7yj8E1Fq+=O|!6uiM6GME^Ao8y>6hIkx`BvOntDvp8G1~peE-=1rz+W6p--unq+1f zt6kPVhhqOrD~p5}YkupLG5eEKQ$Hdu8hRE*ey}sx(4X<{?BmVfO+$e>0(mqkhbA7{ z7W(x?E(0vm*qH7}NFPkH^a4NgKI#iqgUqcR#)$$J<>R^0%t|q#<`)|`BQ_JcWgnhA z%OH$o2;(iK$QjIv#mmnAi1nV;;Dy@fsH40y29GP%Oe-7rewUu-g89Pr^j}s(MlGHt z9T=v><^fAXO}>JvFu;ZW6lL}Ebw1%ii`mKO5_CNe40vaQhy!k5sTkl=hlX$F_s=^H zSqlX5*e@3rx#;YG@x!<=F7bVFTD9qG61E zRkJt2wf?VIlXAfa?mE(>VX`%NI}LoZ>-53D%QEUM;>q>(GT^=lEy{F^a;X0!|C3;S zC@UHXegh=5K8c3u$y55c#8X3;0pBXk`^>6@%7Q3*fj;5D82ddGuIed{#`p_PX1yxn z{1a=s_vKhGQxeqJ?|4+D<#wbl(vp=>2Q~I#*Le2}g#0M^_jZC}lc?Xh8j8ty@@&$J zWRZnS8#yLLq42^xA9m55Y^YP|*zSfpjLoQmn0|yG1yrAg``DCa6vjVLb}lBq#rj~p zAY0CNGWuYazs)u2KAEUv0%@2$y)q_Mnntmz)DD%!&s8h_<34=^^~W4QtvnB5T7EA5 zFiDzSZVr$&8+}r}C7fd^fPOkVp#cnJZx13QYlQ=V5op6&hFwzRfIwYjDIV6E!z?g= z*Jd#U@sEvb_%^fv)V+qKDE5h>A~34B;m70D_2aL3G7&$XjExp3+>FlaB^Yu@Kah7s-A?$J7kC*^Q=oA=@(HSdRY{Fe<>D;@*zsn%EEYuHX-l}Ga3CM3r+ zFQu(p6yD#ABusU^{DXc7*>l96D{(!gFNUTye?}r&AA4Vd;>)~h2&9^8ZLzzVOS-X! z7*k%yZ7~1rLe)UyrCfg%Z1jDB>tCMA{)qM%-sIFz)!NiIWBmt5{$x=HhJ9omAGQq7 zc!CEWJ*w~Bj)znfx;nD&o04{hZ`OxvrJgQ-pyamILB`0446>#SB3}FzVXgB}w+2w( zEeV1zRRsj8#nI~?Y~y*Tn19L!%-cvleuyf5F}gVVS`L;+R@R8<(1&CSduu7RX( zl0>PmTOa!(wz&O?bI+$O@t*g60NiF}X1%jX13E)>hD@ViCsJ#}c0cP#ky0vP>?iSq zGs@=Hd3f%b)m^Qi^GK)g4hW;BIH)){jp@HLQwoC^^I=v>$VV=i%B#HN5Il)?Kne9- z)=Zw8wVu$($;J3P%sLTp!@ax2hMB%|2AoLE0K1soE9x_|+%kBSLwIEC{TaK&EVT&h zpX{H@Cl0Wy?50hu{2R(xRof9pRjMr9R`g04)~7M6Z)6#K!K)xJ~t``fZhe_P)% zHW{sNl;~OR5=$~(QXz(q$9c_~a4M)?fln-25bMg72+4uN%)^#<5y>pYtpR+5+1?1! zyT?_OzU_7i*e54cKiWawexDSZvAonDo1HN3Nnq7py&rnbff$<_iozutbYoIU%f0FgP*<_` zw-3quRGaRhF(R?ou&ljdM5|zkc)kOQkLfhYY8y*Kh9|P7lXxAp{Uj=#I6zH!HEBo2 zo)V4D=o+Bfe*$P~+%HuX=urk%H992t_L+2b9VZv7+xkf zAA;(a)9p;--1oX<{UZf0kL-PfbD%IF&!2OMr$|G0+ zMcaLq{Cmc1UwbIOJ7r1y>audS#pie;gQ+2#we}WucXMYy4jAG-0yxI6Gqv2=;s6mN z9e^S|+072!!j^l6{_nHe*_al{vb0Oy*PyL#JH^So5u|2EAWnR^39}0LU!}NuBE45kLCPsB}?S z%!Liz1GxL0z;G{}#9z1wWUs#tpi)7AIGYCNEns=V#a5MW0(pdvI}i>+@^NG|YJMWU z#*kZR2!l3-jdXR3W!Gn&z{Z#Z_PXaX!+hZpCIFj4WZo1~j;?Hp0@?uidz zR7P$t9DE-pN<8aaZXoKz`{27a5l-^0hRnf=qw$b!j;!nuyyr&`<<=U{$s`niyFW`c za>WGn%B`N;`%0te%l#qJaDSqLx$~kU z-1x#jrjt&w;O+soW?QxhfG%L(dOFpaFBd%lj+3}j!~~t7OkV14KJIg)p!JTR@l+Sq z*Jrss^o~lL+c0l^VQ{hPg0op9b3SGX=PbBAhs zx#;WsHw#m`fD3cK^S@rg;!3^Y(2>%V{O5u@HvLFnsHTKE-c-745!V&Pmg~LdAMjhq zZQ?bhU0E>FKJJJA*=7eiU01E`_)IA2N)dy84ruFpUv%0$6F)zHUvnzM7RRr-kAOFT zI*)Y@3sX(N?&Gg^TAFeEyI0rnyYf)ez>@cYX&XlV@3VA`J(7|*Nn`T{5taE>s&d_V&;>cHT*tcT4) z(eG|Y@YY6gLBer!_V1_?r%lUWr7sJ@Kmt4a6#C%PGYsYbpoiSE`f7SB@kOt8`Q@s8 z;Qa3X@haN9gD24;6rS#7nkN=O5s;D{Rk(L3dY)HW^=^@>a+lbid9{CE`CNr4-24-e z6_nWJq{IalK*uZ&vKm<`=5r!;i)C7B5Rdb}A&({fqCrsEnUf>ib++mNBi0kw`w%g! z13GiTAPM_|&`S;0a$6@=EMMXo7UHp4sfaDBx~plxGmHw@6T%4i%A>E|^tv^g*W6xh zv%k`Si=`{4sO@dfRU2GP=4csNm+L!PAJTO;YkWN+%Wein@TJaP#2WfGiSW+EU9Hb; z$3vorz05f*!MeU6&04EAeU1;+Gy(O%8n5RzdsR@{AC6geDNqfuZyT@auTI9*bRAr^ z_O7-obUlvKkc&8@UIAzam2X|;3^*`wl2cCOc0k>Kt{oez=5MQJ1BK#Z;#tkEQQiYv zu49BI_j6xbnbC8#z^lZqI8G3*7@+a~AoioQ^bnXSECWlv)TzDz6KOoomILX@z3nMcR{EgUy(le#cOM}O=b?U>Zd3)gf_SzQzmXs9hoC|EAK!ASZNDyE}7{oFo zW>{qI-eC{TnZLxH_eGgEK|JAX-6eh5^&`F(080|+QyuTG1*jv;2tyr`)Kv;{mCYT1 zeU_RQOpgt2Xv4G|yVTW}hfjTQxeSfd-LEkL-SO&{g6#osjy|f7XSTsZW}C%KZP5 zeKLT{?S`3_pE9Xsmx%?01cF165As*}keKkBe<$(`zS(c{l;i=vZL)yOpOiS%qm9u0 zvgUHB7)hwpCv0$q+VztZPCx9}#PD5b(@_UTgi^`-M%reXc3*o)=yHm=b3TPvwkvh` zPjS?J7)R#{)m@~l6+dALnGdgS!l|X#Mt|dd`8AI$6oY)G0+T_+8x3-Ik47_r( zQ_3#!*iLzOUdu?t?pP1;2>fAL(Ni~NOwKB91^y>Syk*j#FMKESQucX zE&nQ@rV3=pq>(lQCHCTUeO2+poOdNd$2;Ii{3AHJT&PQqOetzv;t98t}*+!c6C}8>fnZZ5Z$cai06!-8=bT8^SL4lo7#u!mI2h z%_V3!N!~E3x8r-#sF)TK&zmQ>0U`B zEwTDnE~iK-4g3$~jwLruZB_AN)P?Y2a6Mj%x@Itj7N<R4moG%6zS&a58lWT?7nB~ICQ_~K0wwzPoFu~@y*$CE{i#Ah^&RY(k^bKV zQ0F-^PTV^~IccO*ef*h$K2TPr%?sEN$EF51qYwwx5*p}pkvRe@Qj)N$R7T?*8WO`lt5`CEbSu_-J zdk%);itgkF6lVb?^DiuZmC>}Nm&3*AM}P|KXQ;x=kY5u*;TqzK2#?WiBg4CXgJ+zT znQAHh@U&^WT=C(|dr&GPV7KT6a7=NYoR+oz z68!Uys5`SUTz^;)?)M05nAq1gq&~+j67fgiS~=kws2p^c3+T{B<{%e+5H2_c6LeiB z=s4xrqj|SpYZTwxSpC78SHciDOU>W4>|dKq%naP!SuC*8dIBbF6}b?hS(~jxgLLti z(0Z#L2l8lPQA7d4qJEYAN>el6xir+YhEOIv2679{jtl>-PQJ>o*S6%_)z8Qh2POD< zhlXe5`Qn2;80vJ01-31qep1q9ZF$~N?@OB(3MVa4*}aq-Qzc*FgespKhgg~Vwf}uq z2TfIB;-v)#rvp*?%7n7pGAZ3f+GS!s&l5G_2E0V^s!IF5874tCXDnfVH&|*lao%xG z0tJ96W-)RPG9Ym*FJmqRD@ECzv=sh+ZJxLoyLq{VMXcg2r77!12#EvLC8 z?Au{}Zfwy9nfo}2%g6M&VW#q84FEZQ((7sKj*r9UHeb%wir&AbfaMKv59`SS8pB0_ zeO8CJ*ZaQ=!C^mBt!{L_!h-)jaJ|2%E!3?iM|DZ8O!nmE>*xK1OsK~~&<93oiWI9h z?rbijr{}o>;^dySgpsxF9uCh4RrL;LYA$vz(KWJ6vPmR;9IHTx8F^yW?-)yVw>Xx# z5+rtU)0C`?XydOtzF*VQfzNOxe0MSRdRMcx9kNh2yX$xoU-}Egru11$7)Tk)K?xt`Fe#bGH!3upVf- zpZb<+Jhk#Cwq=cN18xJG+FpC`qvT4Zsy)CxR!5rcZuB??!Z`~=!3a|dSGHC(cZ?kj zbds3Qyvr7Wo4Pk66)&L7C631=Yj{)nMWz>48)o75MY{vo{gv%yIhm>k8+jKo^T0y|ydlkS#=?!0ct0nIt8UF&@8m-F$kdB1IT$nqHgJ}!?WAI4jTN-3aTWVP?p21s&G zh_6!>P;NLMcI*q%v;Hd+3tvA~xmcw9WQv6%-16$8^$Oo0VOGLuov0;wO_E3C#yGsJ3qVhzGsxRBGiY}IC^egH_ zYmBGjx_PTDijg_29eK8yF@<6p6W^wJoqjLQ)?~;%`?9)*x_1DEsc6)hjx%Nz2I-y zH0&$61>$l&$7!6}!NMPD|Almbo75c=4EJ5Dy5w9r5|^+?vQ-wCbHSDr+1Yq4&XzfO zE5iFiUZ26^F-|D@^7RO#c*SSB$^ zq}jqDz@+|eS*L0ry~NY~u5B2^iaRa}GX+9pnhu0hLyN^f*I+qhgcT!}+9;YXsT=W8 z0;u#4MiuCO62_TwRF5;ACEhG~YZw(X3v@p}%TxhWGYZf3(B0<4Is6dE#e$)xt_9A6 z5+&;~cV6yk;-2Nq<{lp^k9>3$pRR5;(E%+M+^o$6vvwYdB!_w=P zR8`XNQx8dh(rAKrAt9eA+Z!gy%GER#xs=lXaRHPNsHLD5A{apP-anaIzz1* zDtAzq6d|tT+{T8Vz}?ha>Dnr(`GVRx?(`K&8f#(gLoLJ3u$I*K`|AL8V@*zDMJH@G z5WSgs8_GcE%wvj#{1t#XbbEZn-;0xVq{n>Up|Z;b1s0C14UTd%_TAS=4HQ>|!%R8C`6OS8|2$so1z9b^ z*7qn{XUuPCpg#TEIuQmiIO8qccaq!oo`Llg6_hNMcF>Pb{0s0FsYB~Kbyio$*nPX) zJfc^A%IC1w=?|bAAzPVX=f4A|Y_KVWFu99jD4bI&Rpwu>kW2HBwuRk zwbN+o{HV-d1E2Ku=Qmt_(c)oA2}*W+PW3ar(&^rgKbi3@^|hrrHj?ukWFI!Sa>~yP?+g$|68#|!SP|7Vrf(G@8{Y^ zKr!-n&giUM0XxfQ{nvix&K(KNJG$#ZoqlA6ccN~?)E{$reHz7!wV9MsWdPN z$p{dMJ3Y)XskE&y+m-9rjiMr^0U9!Ii6|F zJ8Fx17qQ-WuHrkPJ~fhZ^Y1Nf^7_xuL@fxX=FNx@9y8i6@Ae<1arB#g(*h$E2csC)XC{UsebpL9F455MdMbfn18$@|DnpUZZEoB)$@dvMj%7-7>*p83pld~ z9fkK@v7iAhI`z%Y{JZChc6ho`69z9P zgu`tMaQ#)xGfoeaJ^3oQt`)9tk5a)UmE!}LrEVC;$$;%3d0-dta)>$8y_ zCKCoRqb?08nSAeYk)qGK@bP@__ttC861!oX%0&O>e(KJsDrs-D^^>Dsmg@fcBlk=v z>?#Snz!krqR`B66bT_<_FQ=zPH%B4e7!UjPG;zF9B1M2G$4p`xq~gqve-LA=C5Iwt z*;*{-QC*28#a81&+Rx9gUMUrhmL}8Fub0uH7(5W*z#ddp{d2RBi{An&OVs(tzsoH~ zb_qu6;TWH07*jq{g$?8&R+yE<3cUO9hXzdh#gb+$lP38l4u2;w9(I$NE&%1T-FW^P z#?%s5_uHvn)iR)alL||mTAXVbJq6YUzLPVZ$?qf@M_U`S!uSv#T_mLFvH}!YP@r0i zMNak}o7KzFr%Us-MzG>nKWS5QvUbckG{OoQP$(0RN18HDN^+PTzl|uJ%Id4D`X*-u zng#&n#w}{V6|3a{Wus$YP&6tN_(U+FdEGgOU=1X+yE3MFAq-NV4R9~)sdo5g3vyKx zL^l@uuj)ny4~<*AU5a5QZB%yDlJ$*%ApXao*zKFM+GrCFkw9Ryce8K5;YbU;ta9HR z^E~_NKK6Rc{Gm7}@n_cZK*0L<12yY2Q@_LRX^UtMZJHB|btCcP0lu2uwc}Mn0g20r z=TNL*Zj?(CQ}ff__5U6xHR?%clwXhw5~ANu1FS;~ekP|crvSx6yph{h9mwTmpoZ68 zyq{kX77gr+XI&f4sfb;d%sR?drERCCNxgp0Gl*987MW8jK^P;mF!ur_qdGo_Q{&sm zjG8edjuZm30b-uh{%Ql>d$0X2`n1TYy%xS@BvrBUp^!1XfNA8Vi?WG8xmqLBw*@wW z$;H$}$+tx07>vc;WDGd?QJ5G`8k~MW>kXaCrGExq!MR4`6}c-x^)=?PT!r#-SB&>K z20yYalz(Zz9G3ZdPCNgizLzP}49P@z!sOVezOwp$E^-m=iBpnTHhA1nkQ}7zbXI8` z5-RKA!E6HKCK&7A(n!&g1@!QaqLPZou73gc;-jR7()EYpRi}P=#wQqSB7nGfQDF`E+mhjv+V$2(l9>DIY)hXbCa}xI!WZ9O!M#YSAsA%A{b9+Qt()| zFz}7eH#S~~VBx~}jG&}in1;iys!)&SHv|Q+&wi_LN^?BrzMGmf>yT+SI$_4(1m$_m~6GyVk$rA z`GDZGsL~mg&ahFuj&fqq`GYV_A*+%%bqk-xy*e&v6c!T;C6|7A&4;bVej=^m->(1& zb;5Yxbb*Wh-xR+lhV15LD{goHV9Mb#bh)f%2m;UsCwj2&Y}ND}r8LYYmNqTp#KW8S zn%LKO#?1$&@9cUh zsk0B~V+E`%V!k?6u{FR?;jRBzKFjvFigtOjyMaT|;rlg&|4M?w%n37@B+Z*N|HiAW z(wfV7R{1f{sMS3)D&%an{r1o-*-$KW}SU}k}&mumSyVw;qz3m>;8hTALrEhAAc(b8o({Hg^F#R7w zu!vd<#NKOmc{@nL6=VIQOAh>=(T09xG)#Howg^*6oJ2sMv|?Ado;8hJfm1I?E>Yw?4V z*=J2wh9I|XfV!n)o?S;)RWB3mGs76`Fvgc8ebDuZsN<{I+=FqGQiM_T0I^mYo@#VN zSu}0?a!-ubt|rbzikde_22z!A3xTzFN?sC zzf9U{`)h|m5RNb81^m#0&IB-CDf8|-=9dBgY|;{mpl-FKnDGVH&}W%kU(U$%%qVA4 zPX}go>hg_T&YqATxVc(l>;u?ur>xcqvieL$7Z$|w$ofh}><#z;zFp;@#u3lnC(9HI z&g9||mY_qK!8Tr}h2gx_l?~3bu7|9Ss zzShBG^tm$Zd;_kP@51Oos7)t-Ib_&OLL8ubpGKEGp#%C{i_5=EgN*MUctDS%RASdL5tHt%Hs$p|cPN8#O)f z!4I-OVmaA0VncMUO?->GFtv^^KGCfCnk{+8IAp31wR{)Q1ZzYxCoVsy&XbSlRoOP< z#uba(|4D&wzl~m@RyfN>&4Tu<%}{de&y_A0-)9W%G@i<@}0NVreC`o~;=eUB>ihyJnA z0I7fw>XH8NS?FYSz&2)-X2g91eSzW5gs2u|dr+f{H_u}58SC@>shNH24S9>;o$3JS z&@}RSYTkfqd;V`bkPpbmui2=T0de+wWhBps_4W(EAr}8wH>^X5b1HVrY-e$lUc6M5 zY6|HbmWYsTsa7pnm%vsM;k-Zm2#|*#8gEVv+3u&;4-=&+9~*K^*=j(*RS?TsXojP} zH0^e7<*N>v=pM0@(^U~kA-tRUe6bu9jxeIAESv}hLwzuA!2l?v{)ik4V!$%FRI$gk zUAXYGbNjm_{=l5Dfvdv{V}N=lggEGYd&hFd3*7aex6hF}E7RfZ&|KYY3gB&-q9H2BsG7D3|4Q z&9ij+V2{+?uUz*x;SI_OWuXG+V?#C6qp0<_KR$|g3T*6Bbp+i}0n0ZVsjPMirV1FCS$|W{~*UylSgaA6|JwZ|Zx37G@YA&-GzS zK_km!v-0muXR2in&qU*oK9ZIfAg5v!wE+4q@2rnWkpRXq9iUJsOpIyHU7*0 z7{Ss^@%vX*&Hf3ehF+L`m-iWG>DdA%IB2!VT(fM@drbsq@-qv0bz}GEygxtMX|HKM zanw`BMeKu#`oRH@IQs7*Zb_0WC#vQf9oPIA85x@OM+JXf*u9q~Q7{rw70MN^?+C;p z8N?qac&?_%)B00qMJU0qKtop_oth)Roge24w9JGQOfT+YXzExvLW97Ee> zdS$U)`6YJt>bNpA2Iw%5R07UR?$CME(LO zugYoh7iDk&g@T4jyWc_)d)|KZcd$k2e+Q*1@K&A;x}=)ihb<`eLFZ$`B--QyJ1bc| z(5B`^odz0=MTTL9jhU0RTm@)>hG~po zmGg7j6Imnl*o1H@AP#>7*`0=1DkOZcm6dym$=r%U(61wH(^W`r^+Tj-Nn@_rsCkOA zyH>9Yb?NOz{qq?92&Yd~mbegrj2O4pMK}<+`#iT1*UFlb=No~Gff&JDWJv(~+M9ER zdXGU(;U5zisC1nRAyh4Un~nQ;hCMW6Ovusu@GWL>)8;-3?jK_9wjazt(T?FlDr6BB*O(ZW;^Z@11Ke4@*-$Os}%$ zc-f?(UCu{HbS6m|f_{g(dWc~SK44iJ%8upb3+8414jBU|PU?0Qs}|H9MORFEHxXw2 zk7B#RXZKM0sZp%qN=3R0t&LpE@QT(Tn{i&*@lzJTECI`B5GS5kZ}aInli#Z{WI>i7 z0_wVpe!I_dp>PPfV3qVe-eL?ygC~%z;Nd|-9!M79GIBsHT`?qDN=jTFU8tOT4^jeK zgT~Aq&5XYTMNmhm6#RMnK9K|qWMl_0d-)ti=?w6SO81}82C^yxkc+ind|ca*`U?6(Q&PjeC~HyjBCh?lUxeU$iP&>%iCDImjE=r} z-=p;}3J?BTJmRS85ZbHG9HFLpz^55rB&#VtZA7GMe$@(wE~&wTM_Z_GmPo(Wl7V!^ z0i-UaM&-Jss`i-S}7 z#pPH22a(~Ubia9OFFvPn*Dz#~?q;=IKK=gl^!<6EM}hJaQn6ZO))Y(V@4y)KO(Qr| zKg`tFr{;*a_3+3vUnE7AS0f^A_30K<69B38ru>T!?JArKF>j+I99!`94xi_HCRW4vm*ln&Gv!e+pkchOVNVM@i817`{`(n^O&IjCVe>%3ZTwUdJpar6lH(Xjc?ZW-3&o;8gK1Ce9*+B z4*yFBk=S>z=(wi3?yY-y-Pu;7hR90bzEGRpkNdH_ua!n_WOK9cFh6!LYR+{cv|&e# zJ$n18jriHP#^*ol-1gPoxM}BC_I?6hA_FT}4FuTSOX^G|`ZH9N(oG>1w{>f48>^|Yb{v->UB)~!O1=voA@Zw3pw zcbRI*nd}p#308HO-5hD_uR=}H608DDvD$0tt>h~q)!c_0Zro;Fv^K0%`O=i^e|sNa z6{ea^O_*A4<_iZI7o(c%5wvqLc|BEMH8-!I$&kn4V1bf0Q^zhYGw02hNPU|Nroo5-t9Mg_o{SVqt+>diwHtUQWSE+jMkv8O@6)Ps`nI(dd6) zRjm|-xhn!~2_cd-_zCY|YlF5GuK}}=?D3r6lJ5uc%)_(f$DLc`6t-Fz#DV~4CE6F! z1wy9$_CCUeriA%cze;CdY}W%=A8OBgfqGnHd;qH9GGNy9)=n)7LM$iB9#)V0WFEe; zX{e=DAeR2;{n8{9!oZWxPe~i_6TWziCwbMY=6n1 zl+-t4=6!xWbo9k@a)Ohl_}bDN=jEsJ+WiKeXMdIKtNQcEtF|>t3ZPCDZcH_@>W;9y z&Er&_o8V0KSb4RJ(|67U0nIOkT-D#BCLL@au;T7Nj3uYzUI`(uPpzAcNweXles=RI z=n)C&%$2^p(Z8}s1i0}P1M#$sxq|8LZ9KO_IaV$smXU=F^drSUMKoI+VJZS7MJYO_ z_PS?}hX4&Bk|Lut4a~E|SVMVnjzTesM&{<`3cvmes}~p~+0u7-m&x=->`)d7LH=+; zg6%hjWZdFkfz4R|Rg*B0%Lx8Y(co{JERos0AK6G0Zd>l#iGy1`YKXM8Zd@*-@9o4FA0Z*0hL zm3j@xvVT2`@Hq8Qif*OBd}&UlacRJb9fwq6OXqrHI5UFF%W(ly?|zWW|Do$GprY*B zwqZd;Qc}8+?vO4el@{r45QdQMMmj`FB&3z@W@wZS$pMD$?ik>|2A}tS@Ap~Xzt**I z2G?Tt?7h!E&f`4#%E7^*NuhFAp42_g@)rd~g>+2Uc7xF3RX1&d4j)xCbJ){J5%C=; z;dDx3iT=s^^bGhsB0b7+8|{trJhv0BUb1my;sZYs0GBY-4$O~Soev_ob7<$HT^+g-v?ca~yr_E31| z%j`axelFF7((b4mzv%AB1;lr6hSf$@P#poj@}y%LR~+?o9U1w_oQj&gp}cv%XE6bH zn>?b(ci!DPMwT#hWh>2$U!}cFmXW2XkQ$LCw`9EXIU*Dm9pou9{Wo8PT?%!1pDK$T z_7)!&iTHXL(C5DNVwOfJ#y|ic?hrj=9<$!9-@g{{8hi0xJ#p#MX}JA!#9LnN3fr^e z#oNBPB&P>?50L~Eb_a3f=kgb}IX?=7nMgH772j{;A%;j&#mh04v9H^%hKkDrP=CHp z0{qu`4~8;WVhb;;j$< zSdf}N=CsI^thiEYS{VfLGxH3T5-&-uh5;@90w(ABwct%{C<}0z=V<3uJWhh z#q{JI7t{elY72onp@U;8yH8kMzbqC@9Y+Z$edlv4$~bHq6i~j@H^xAs;VPq`4XAHI zbctLz4L1B!q%w@-PV)@~{C(W;*(NT+kb|!Df^~#4!Cm=!@2NdOjvd zk-8C|(Pk`2hF+m;lJNKTt}4S}O&h=jr8Z!6Pa5Xqd2N;ucGyov1Bk|j7}_j=&9)jR zUw^b7q+*%wLLhc`xw6}-N)Gz-L>nIvgG`){p@07n5~_Q9x~S^-!CwCb>y(!f-&TEL z9rAL4FhvBa*Iul$p&v<^=n4Oy_W}-TigJ()hhq5~6nRl<7$OdT^q17w!?e&fmFXJM z@4U3rKT(KU{rizy{1QqZEq}L4xo!Ais&r<@KWm>`*=Z82tE?y$_h988;27mzVFqN9N zhb8)Ys-fvYTFYKqT#v)09@C6Y9*zW0RdsN5GoXjsY|z2jwFR&&xJyKQIzyl+bH$sG z?J~%g6v{eMKI{4_!6yjRko@^VstGT|oWgLO{XDm(c9{cOP7z z&#xOA8U&q9!494@N&nFVZYWmNZvAX^U2w|Ls-Ll9Fciy7r&?V3pq2=CU)5N*dr(L%xU4*D$;|EP{mcj!c9!2&_rK7tZWk1OpO9IWkfCedQ-ak&dM^!f+9O@d%U(}5=^a-tn9$AXm zV)h4H>uld9CE+tOGykc8wal}KI)R3R1PG$3N5@w4B02Sl-VTD!%Fmi#hz9VV#9!C^ z;0oy5Cdnno8|@=0wsNfOuy^RFmqD`Vrow=ju+~CPta4Ql*3I(?E&33b?bt{NxqA;3 zXHxmEq>6+yzId4Btj9;()}@tsuu=& z;J0b&nMywA zF2_3y%imG4$Tj{Nx?x-#>`+$z;jAehHGS`Ec{Gu25$;a(G@3kK+fb9TRG;n6%XyuE za5EABZ*Lm?U6&?4f^paq4g4Dg+iiQKe)FXQ;O5B4^U&D6|2>>hkpCFH+Q!^6^xM=F zYN7Wuajc<%GQQ8%Pp9`qn$S+NkXGOQofW3rhwva6-a;1@=bb?a_{}FBbWeetzzAwU z)}cJsA^?3sXVEBDL6p#9R1WwL+e_Xnf9TbmZC)>jB&E$T`Mj|2-H`CXB7n3&Jb@w~ zV1=|gCdyw@bn!O*dksWJ42>#K}A0syYGDj7MZ-7!f%(|?^Oa-BVS zWDD+Zbzjrd@v#rvTa3Li>IyRuh*6HJmI4TI~_g^KDp`k_3Hs2AOw-t+|n- z$s%bsWBrs&|T{w-Vt!x{{Gr}}6 zrOaLY&KBRRsqvU_p%v_AZg|*Me$0F{H#Qb1$BQPM-qYb`xin5Z9;XMRQ|Q-qlj45X zj{zxm%B{RDl$4?0mGfh*V{y1%tE189LQ-TG^>x}HTszn~NmNdUr?+Bk<#3$Mbkry`;5=xpL+4a~88{KG23Uk_9aViyn+G;Q5m0Msfeq^kV-ZV&&UXe8j>6xAi)5xo^LDCs}bRXIyQJH+AQrwVjHi`IYFeVkrX{qbbF}g%&Xjz zlEQE5!?I?!W7*sUiD+HL(G=rJs;5hg3n0;zrYuYRAG+gQiPF1X|8ag`lbC?D3{=a~aqzRr;cfLguu0>iCo}KF?yd#aq303&P{90+hEvZ2 z#w}|Yo&g)isBwPiB>+6}hgL{{qk#couUpt@eZZAG%My#fpeKgBXzbBpJGCFGo!*gq zI!neV-RaSF{iI4iynQzZw{{Z1Hr%MikNOS(!ljmyI3_B)}cBieHfeUT%79ZmM z8Nf(BxNp@L0f9t(Om5n4;RPWF(wVL2z}M2C!pBl%PpxSKJ&Q1O_Z`h0thMwzfkg!Sl>KuN7=Quho-x3`Q0dW{WjtA%^x!;fK20rX06` zTx8k9(^mG;b*B#uAOAY0=LmLx-ff!wrh|QyOn$u$;FL>{X37C{v^u;eGZD`nfZ(O@ zSSpcH04A*{)-s{eSLCZ znnaW<1z0Dn709?A0aO$^wx7csWxJjge6xLNS9_+*A9GQMJ zIj4DakW3-{{A(ii_B>djzI6QS%+!c+HV^$-;i3G}=VcB_f`a(hf*+#M$lN=w!t6Oo zWY(~!!!77Oe2qt8PB(YotSe~yd7oK15&T_hI_$C5>F!G!tBCH3nf*^jX+BDwv5Ghv z#SsLaH6vf22l~7&6B@eknzQc0t|QfSW8?y%PR-t64Z-C<87gOIZ91hiR% zJ}>ML>&=WrNQ;q9n8A|a&c7~k~I5k8|)f$9g=iY%Q_j z+kU2BLfZFlzinx*)_}!s4-(SlE zSt%UJw>Wxi9riso6P%yl)`R$-8VxQ9o9w=_q#m;;ubyU->Z7qLGz=Jq`k+#Zd0{cD zMH&+c)v<+`;w6y>fTZFWV@;!%AVzhWnL#pt+M;_BE$skCkAWAwd+#qxWaqQ_xo$_9 zE=+HO8)>kU{Y_QSSp0e{zNX| z!XebR90*wLvXayus{T(TfOm*QNL+wDL4X_M|2dt2IizZ>*?1r{4p2~A3-AMAOc3>s zVR!Zx=c^N-kQH)#{BaFbBz3%I!Hk^!hMTbM6PI9;9Bhtfjy7-Jk-n?lu zn&@g{6KQ7T_odP4=OV~Cy?fF5?;{%v*mX`(E$%_g*}w0<&lw=_;B6DJhm6Q+TuOUj z)YxG>rSucKE~~=%R*5zpirRVFHRTWs)!>+|ANSv&xWa2SxMas@BDQOMl8A$slr}V( zU{Vum2@XQh$jx@>n{2y$tlYB>iI$enKJ#5jtt->fCGt0yelHXbs1}0$yQW8(;A>4S zMW|)4O|P!*VJ1P8Q`8&dq4yO(IZ><8z^54;Rpe{Z${el+{7ea%X`J5%&Jr48oDG2{ zg_Im-jfXH-hz6c;!~FLd!4C{ANrEO`C8Gvk9~kGDZtHFaZl7apUHvS72#nq`7xg*a z7}$`H)kiBl&B!Y#WyjZPK;4H9^wGhPmsE<^Jm8aK0oB-|5^G4IOnm_iIOtK8B~MIn zDcAQaHNKY-%inB7A7BA0mw(obhH9fX1}ehRV8U6xR@aL(ld3 z7`wx;;#9U_Ct+9tY5T8s_LClFU?WGX&KQQt$PqO@1%qC4yPFxTTZ))?#5_T^&mZ~1 zO)IB^|CNTQc3%Z{IdAf3g6-+ti=U=UT6Worv;grIMvO6{=oo`uK#yQ)X zdov_@vOnf_ya&;hpx_P^M4|ZdIn$=ZO!=x@Vuntfi6ngA)oC>$es{*AV$4hZVbZ!D#7*BWV3|EzzxVA(>1OE_o@ovLR548Rqu4=PZ%0D86 zK>d3Kdrw7`~>0WzD&$J%`HG1fh>?(QX&OB zyP&7zu9fwGFX#wh(W?8cS%Cm)(C#mca>NBO9ThxqkmjH;ZT@ol!$nZV__RKk<+_u* zTTepR)IHX73z9H(_dx5c)1<>4RAu@_nMBU*@E?4(ESUWWAF>nW=yl>uB0hzoGR)^n z{nVf)d8_sETZ$*8Dk+n2v=npbHiC_@H2Rkfn%Jp@-`nj>y~srCsc%{C&5v)6eSbTn zduK6IaACWU7z;6tRHbC|uUQjWA7pa$mcAJ_jR2p1;U%Becz4!czM_(8{?>At1Cn35 zF6x~WzPnUOzV`$8E>mR~f|X7{9TIx%O!8QRcex#M7o49jG0s&}dLR(8YqU;#=5+EU@BjeWqIy2vm@PDM zx3JrNw`*Q#zULx8JYy6y4{W_ldHEnv$5#$-ymJ_AA06r7-h($*J9)C6d@Dr*i^TXC z$3OvWaFb%pvL4SXzWuj1*KrGg+>W@x5j*)&+`RcFL{4o>L=9`T?rUp=N29ttIPx#E zGE)P$UL-QURaVzORVGIV&zq8-epIh9Z?<5{wFpt2daqH#OQ(*8+|M27dPSAIkzTl$ zQ@}QtauG{Wp~0g!?Ldy6kMqg2XOScc*Vp49@6ty5bbC92U_2vha9CtXJK2>wlZSFd404XNR+xyBqd#ny z?)9>s9VI5zkF|;a-A7j_Ra?WBN2z9h!X=pC>Bao%(-(8}u=7a>*FjSrfJ%rCr)mMx zky&v1!Ma$C>c4p%6*QzvSz%ys_8kg)Nb6h5zxJh#R$^EOExq}vEzoIP4-6@WWErEZ z0Z8~|U`mwc1xh-p<{k%WD6S0C(9gK=R6mAvXWST(W4SY56(USL-zyJk=_hwUuVfLQ z%Pl#8P_q24;tixwjDUj9kcX962M2%X*3y8czMt$~1@#c6)>2G_0JT4TNDW=+b6MBe^Sd5;-%zb(SS+3jKN~}!IPHqk*~Q)wxRZ zi!G_yygM~&)&h1U$St4M+1H}}fIN3T#Q<5)JV*Tt0}h=KXnlImeFL&x<2JLFBOd>P z3*o{wjHNnd`3T~*Le2JIF*MkK_+69D@Q40i+ur!f-#}{)1quw<;sVC&)ijid7IpA7 zHjNg4djQ{JBR)|u0{Y$IoD%Pc4DGOoQ+b{On5(msH1 zswc7-#`}MlVE;xNv3VJ7h#iWFrM1SsHk8e8sh%^PQMsRA!SAe*x4c zayK!HL{vrF1$3*8w)~WRQqC$sJjnAce)Sf*P!sZ)nUgP=_=aJYYx70Cs%qx*>I*gA zoW<$$pdHlNqN&H^^JP`GdX?F|_W5FTuy?;fy5KuDNGzF&y=;OOBwr+fXD6Yy((y)) zsu9{av~Qu8gkD^Rb4bCv=wc=Ktk2lEgMDS9^BKp0C-EC({$U(6F<~e-mdfeN4`YUK zdmvE(FYIW^D(w=$edI3!2zH_4okc*xA3j`tb`I*;ORj!dR$l&z zeUJ}=0pzV!XqZJ)-2wg%8Z8)Ui_jFu6Sc`4KTC3N<@?fZUGn_YY!>kqL0ExkSk13( zds@x+XcPtfuE0PT;k5<@Ej7YzfQqx$Rgj2;Za!A2`dMaAeozg-SM zq__GdA8L50xFa29L4~+>tU=Hl>s;H%G1|zG!k&r(gRrlecAanGDAoh1%j6~TNhkC#~YH2dSm8$zIosULlp zG&4Cm4*KH0I5jE(ZT*&>FTl!zof|mK+Cg@6%UN-QuN?f#eU}b=Bys!-S@wFdVDejwybxB@M1cvp~vtq zy6eiv?a{^S>+1^7UU5Vq-H)QDbd(Z!R;88a*ZnSfVu)5ci`=SQq2;x+=_K@4Hfwl% zp;@FXFE5YW)K@7W8FVaGZ_*vX2Oc*#H$)#Trm{VOK*Mb3p3hYi1U+cE??`&IXuZ*1 z9ULS$uW)h3B_g$hFkOX^nkXF@GrVEG-k0nBVZ3E+S@ozq>F+;V)8MLRR2pb4Pv%!r zM+l!@f^0Ot3&KL?ZyLS8=tbhrBSL~y$%mvOdSj$6pUS^Mh!I5V&O~ylMHR zFAEh7uPJP}U&R`>4zDzPf2(XjAg0pQ0rhD1bBY8VwsyMKXmYkV)cBJsh>ie+thFI7 zF8E108+7YFN?xctre~7M*J5&BCoPnHJ^Iili`-uLtwuyx$V~AY4-R;tkZaW0R^Jla zR!)A=dBOn>!?F~M`dNwJM@IO&QRw{_%{IP(+_kF`otR`SEkYb$;Y>n(oK6i_i}1sy z%GK58463Y+S|z5AnqmVd*Z1IJO0U4?KM&}nO;I>1j>5+J&0jrtOeJAo0AN z9LIhZy_i!Asb00bvUA=TttR_N*JV{jYuUYhB93MEeyu4UG9#l|dfphxK5%DrD>{?8 zKg+)H7Yx<*liG;H+vr!Ibx)jvpq#Eb(dl@+z;S(oe^?0A(21+>9h7FDmui_)sv!-T!M`lFC~&7GSQVS}uc4%3K!|7oYP)KP<%TGw#l=XHE^i+~YLtLhSt?soW;ru; zG;mR1D?k7o`18*LA|Dta5OqEhS3bJjbuaE^kI0~{o^dbk_G5c{Kh^lrt8fy8C&*@> z5Qxv`!@kzryw}TCb)-|p!>fYn2R1Rb&r>-@eznS8x5OvXm_d$H*G9s3G$*#mpZeK& zaVI|f!`I7>(r%(;8|3M+zriy;uYQqnl5wEDFSsFiAc%tO);UsuZ9}hClTu*wLq@sS10XiRE$SsVZTu&6?~tS1o{`RsCDDUB78AxLtE-$7+q-DZ54p997i9 zkqR3}Lis8YClqVwx!1!KhfKY#Di5oXrvO80$u@nOL024g(=(xm$D&u!lK9oL$W3L% z;w^KROf582h&1KXC~}wjwu1Ct(h^8gM#R|VC(k`5F@{xEQ$y(=@mv875j@Y65eVp1 z;cJ!3LXQou4F8#m*6WmxdVHB~(}89P{|^FA1O{MS&1SQ6`i32WAQye1^t1r-u2Wa$ z(lnk@&mO;FP$c-`Wfzf*_`5Fc8kbP}5eH+H-tc#5;Mz5(FtgWl)Cj2X1P2B5_&c1u^H+kL6Gw~7-dDw){d|IyuM&%vjB`Q4!%+HzJXPZB-_cbmC(LbGkazJ zy8XKYh7_OP*F&;(;ua+dcFGTIPrkhkLJ5-yi0i*r4(A8G&=Rai)K1Etv7&jkUK^Nv zSY$$r2yCQ$kUzk)93T5O1O>PPtb&N<%Voh(y5zODaqwUK^RVh7;_HpIrFZSrn%M>W zpo}yBw0>ee@@aIIx%qbf;8hjzvZUqhW9DeX>?F+ZpYu^RCrE2Vg7zZleZPkdu-I(1 z2|j3ULBS*5?_Pus`7oZetk?zz9+d~Mr@%iE#SaqyA##oH=in?An) zZsQw^B3wH7LL)OJ7?w1%50-}w^ZvbM@WYj+La)bkdNTM(F57YAO!p0Q*cg%O^e3Z5 zE`cL+`z}+88-cmXJFoin?1QXH{$R$UroF8AkY61?I?6!vLcaxYK)`ihlQ%MP+QE?o|6q9ry_*i3HFr zB?oedCm%MgRvDt=1M>6F6JRcB^z#rRk&;*VGT_l;BCNVzwJD0oPGw(ufc(j1&U?v^ ziujrwxvog@+f$JMCSFSDBox>!#Uo(uW>&C77^mq?92I8Tru^4wrU-{2aoA&uWW@WE z9Q30GO2cs3>z}y3>Phf>WE74aci1Gw{q8sX(*yEkvUEyJ0IW&j#0R<0NU$_-m z6ArZf4;J7sIn0#m$Fs&8(-ErY1=?fV9g-P~lByiuD5P>iH-j$gPzLIih6yCJKHxlO$tYWk{o&1M$2_eq58Gs-3>Hw^y$It_iu2z zp7REhj6Dhq`kr(cDc5mO$tLl(9_0&;`>Dn-hwpXfqH?SlvL$Q+QsKDfI=kI~@x2wO zxnT!LOG$Jb(DbpUGQ+p0KiuErGA25DY0hc58=(C32V#6Iy@t|~jAXSpDmrlG^Uq_n zg`EW(JKkNJX_bgmgyB>NLuH6eSa)S{?PA^T#y`bRn0jB38g{o#mB_M@NKO(`p}*^T zgMu`N<%w}h@T^MAZL&=N_)=fUT!slJU<=nM%_Oi=*+vkYEUbe+MQ3jeF7wPFD9l9Bro7qDrw=wQmsatV+9$+e?7knjo`m(*QvG~k zB~w!V^Ub!1PgVK8h~HyC>A4CPr$`$u;U0EFqq6OhgH?kWF-8@>4!LKbvoE>`f z!8g?a(AIk)N&9faf`F64kEn{@P?9lKtQ`c5e7@bk0BSGKV0skblO)Utxqodbut+0h zBpp)zy4sKH-C*|^`<()P>JF-&bs2vdkSv*Z$GGF+cuZFbsc%%;lpiTekmH};p7a6^ zX@dy9(*>K5I4n1tFq~y34F;=lVYt3cq6}!xl3z+DMK&|$oXL1Uj;%VW6ZOvd_lnz9 zB5|G;@PUqaz<$=BC{F@66i`ESZ6&|mhYpNCk%wXdT!)JiE5yU@$RyFBH~Pu{_i6$c z+8I%|#plG7D8W_{hB*qY!;O8RzUw!J4(RWM!<(CfF8=#7)?JnPK<8B)q(lU>c8v-F zIbAS>;SS|Z$H8k3;!0(t^$dS-jfm;$G7cK}&B%3npgkLE0ju;8+c??4+(x3wgK*El zwIiQ3Gwa_&*QRX?cv9y~K~CNcw3U1y+)r*jrQ0|n?X9Tw%y>D5_TQUYin7{0t3fuG zooabJYsiMR$QnJOSAm{!n}1>KaK`L?=VSj0bg8NJ-1e0rU7K80;EU<)ZkrB!;U+rR zka`TW9*$Ns(m)QN^Ze?_k(L!Y4weViwb|h!6H8CEq?z~n;S`MkC`(_kwpmGZ##UY$((?=b9OC7?GCyHt z*LlKZ{YDCL{ri`Hdi-J&Cy>Wv@S?3$I?CRD0ZwWFHJG~6cAEl01PU*Q1SU#@uTlQ%iaHqrekAmesdEYP=_G>YXF2(npQ36&pgYS4%ZNhu;2!}5R z4UPGzAxLbmKD}1DJ9ehX#k&AKCW`wj?8`VgG%-{xQeSl2kc|q$-z+s)I7~!^kwbH51dgWgYFGC}T}y z#1Q!PUAXM@?%6P8Ffk@%)0G{<@*`=apKA{>D0kTNue!tno?F$9rnI<5 zK7G%^dV}4-6X3z;z%ZH|Z&S1}#yWyYQ@)ELqI0Q;b0Sw#L4OAm9PVb?iRtMb5`D^l zf5%T031I;PW=nrmH4xDM1cHDE7%B4DycK?0tm=ppAD*Z2+xPrbFPhT^4_eqn^63Bf z?g8Fwm$CoRHRESaX6m z>uT=o8Z>!-uZ!yaQ_#OZc>(-Q`AB~cSbia$;we__;$wS3yqo<)Ef~xV+^IWskH|Oq z6O;h#oOL2{o(ypD;2YO;dZNYju#M7h{5J-=9CwGl{g~QWMDzH6t6fZgW-pf_1` zuu9AybS}H!%q};kS`cn9N;~XBWe#VDeUY)wje0M&;IZjvniYEV8V@#r) zo}E#@!Bu&bf5D-d@(wofc3fMs*#EzddObiQjQ*#U$qRI@_g)wo-iLs%r)&@foxf-C z-iV}>%4;C5p^_l0l`=HI_1&O0#ScMY^*}6na>_PJC>MmESRFp`>#%gHh2lnZ|LAh5 zJ9*iO&@k%zPr&&#B>N%(Tr_h@;PzMk{y_I0zAyqp1kanX@nrD}sGr{)kjBIHCDbJ7 z)A^U92LwRpNOMEd*5r;OT@D)EBYg}@mj=I2J%|XwQ@-!wX`z6Pm)5`Ngd*4Syl+q=$SX3Wu3Q9BZfs4Hw5r+E$(MgGl1D1~v z_k?M;0K{N_f%HQPDmaWS{h+i?;GUgGfClehacse1#PwAcyE1dzFB9?gGX2r?f4n&f zr1jWG(uQ{{^&#SEw?#)?H3f@>D@tAIQ+fes(0k-%2O`xtDw`gZaX_0(jSLkfTRN}T z+PE-r4U2fdh75a{>2b8Gk#w@{aPt>qj&+BzGX)+`ld~x)FG{CL5I>konmAg(R(8Mo zd-Gr6j%W;{bL*P@-@OQvKnRF}#j05l9_Wmn7?c%6b~o= zfN4wOi=v&9jf+8rVJh`%Qq_5>wpfiF70mnz_`3RdDCvu7$ZSi5RyCThW@in=q{gm= z$MX;MX(yuL+fnWh4^i7vX~+M{SuL)>W>%2HxFUZ;$=E~K`mfUoYkN&CG0LAVkU8Zs zPW?*Do@xh5k+#ipc#h?bG=0q3dOujUxR7~^R3jiwM7Li&81-6S#@pD-rR(H4EG=g&SxODx=CUj?oy**epH)|0R6)j<1r3QOPaj zj&vOj2fmN5#|Ojc&s8uH8<#^+#%v>MhIZKN&Z@v`PRqe-bL#6yJ3g7~BKPYE3EA*( z&4@(r`a&)UdyrG+i;Pxku7Fu`?-B>l&C3rxQuAIv?l1qLqt8gt|LpIl6T=-nNq-s3poTmwEJVsl(!Sz66WCXFTpjIQ>|tSV zdr1i6*Dxiripe7GmP3qanfovSKnqJqgxmSsh_F{%JPYg^fD%8QTJRTaoD@Su!B{Ag z&!)V8qGYFgCy#RH36@R}6y!@(22kuBNq~%e_ukq5)qPGffGYgAf5wMrdFcK&08do>!r;>LVryqXz%N&iVW(=nPp&{0zVO zurSCG(sHqAKXlK`r@ngB|a=eW3I9h4yCV+vaH} z>2n|+bXGRXzO$^*Eyn#G?&IdiJt?vk>p1KKZaZ_EkCvHkP}~w@pJ#e-m&;3lqo1D? z%)0!$Ml@8FIdV!$*%FOTuZ0r|7*K5MC2T!>a>3sXmC2$0`1dIO_b2BDQbaJcP{CuuUwj&!(roX&zr(LAQZcJR1Uq33UUk<>2*8SgD#H;>RR53UB6A| zY(D#ZD7-OKiXvr%ieq$BQ3KjKIOts;j0Nn&US)0YN9{Kc9t7M1ng3&fIE^pDXk*$K z#Eww^?6`dMSWm47_?o}9oT5xVq~ZQ|c~poeud=L)HY`NKj#~GQ7>2!wT_L8JcN@Bf zo%X(-Y}W~Fg}H3Qi@=^kC@KFY|6{#QvbRkr8Z-5}h?y%@uR;WZCV+ zxA2Rnbq!Z>w;~#751yea9vp;r^h4RF&*Q%JDB8XYnEbTV>>~-1tS&rJ?}5kX|2#nU zTEaMq;imTPz_5V?40+%Cok9HRCrS@iyW8I`^_PC&XdS+&33V!BQdeCbhF9I`Xk<^` z^sl%b1Bu%DW4|RG`B8RpF{RI49*(XgJU7DwFI=J`9oNf1Tm}W0cNHogklC5{T_!@_ z5zGAau$aiH2Ctdg))c;>9~jS`nVhT-?pqcJ5`5JC)xnv{ooRXUA$dJ}xI~XlLxyK< zO8&~|_7oCfaKbj+lw5y%ZmqjP2&$`X)I{cKfTqlFd5$k}>hJSZ=nkJNNp#pB44ei( ze780c?DC;Sv76l>ry;>!OdU8WB7dA|uN06gi9?O}8)g5GmP_J7h(Imz!{H7_!J!)b zng9_l6TN;>EPS_Bf0&K#2np?1RqP!}p2RVq8*6akRm5Gbb$<3PIKco6Yrf%md5h=S zpYewn99gvZ@tZtOj6n)L)yF9o1HN+*1Ib5kTT`=}`My%sL+N3Qs;y;X?^uyzl=?IK zg8sT#iX89IqqDQKfTfxyg%_Lw-)&hl!Li_k$Kz^0-jkuv3UhLD5~t@4j+rev9bHB| zXZcpRg&yQVAtt_ZfUe+seW@p=e8~^WB|k5|&BcVpO#S+WqEV{1l|fEQ>O|(lHLx+b zVap!8^;4rL@5$V5KL=vD_p+r&{I|^UXTCBwzues2r}IKGZ?5_;OPaYU?phpIWRk@d zoKmrE%mW>dw8eS_E)x{A@{b8uvJ_ok5FIRaANO;@F1>FqZ#Op!y?Wf;aGQy+Ru)xF z2S_3DLw7?Ei!F0s9g4JiJL}^e%6?u@?p{>n);iCXyRdnAh%7`w#4M#qv&>ZqPue{3 z=t#InvH}{TvLkb8e*GJ|ysWHCruxELcc!XR@{XRDd(ii8U3|p{Ba?No)?2jd*tVHS z973D_d(84mrHd14aOib6cyq<-$Du>j;k|LuGhn+JO1^Sex#HO47B6_W7HmX--ooin>$q;Wga)JD+n+-OM!^dy0!#xZ?;-=%R;FB>va&A<>Ub$^8 z?A?e6Hbg_icWOLZINswy7zlpiw=BldmC7j-(6-BC+$d+}xF)KYerakF?=f)f)C0;E zIC!br8LW!ePV6fDN{F&-rdPn-FC#NNs5RY{L!5sJavWh&HB4(yo5{uX)#TZRDC{aa z``GApG^$B@^CzDhM*P%5^Vd0s8LTC+j3c#gd`nYOYF<6ILmMHek@F~E)(wntB*Pw% zIh62E>X5fsO1-Ul*YsNI&G{((N+MxLWS3zPmO@kapom3lCHvRStrH%cxZ#S{uMW0> z*?!`CS$p`?SK|aP4`;ZS=7JT)PT)Mn0PZFYzaVlJ74O{@`QDP#fF<5u@lcJ8X;dcw z!8zzd<`5emj@%|%F}mU!of|V<&dcm=j@_b154C<>ls6viR&-E{9qb4}M2-v^-so3Q z{i@(#c2fro=zLS=h9`de(YS+gZ%N!MDV5R=7-)LWb~b$<@p+MY?k!hlGhnFM-fK%lW`@p_WfbuZGw*_MM?5*j z0WMWK4g-AD;@au27cR$fRpGup33aia=KSdOY@+n>O4ip|<(yd?#>9U@ZNQC3SGB!RVt#z)!gWF@vWi^A#J}s zFhknTlJd!Be)%sbTWl1y-|R9%=b z^3_`sE5#r7-!yQBiC;y9w@m?K5^|>;G9I^(Gfi@fdauVxK1=o9*M(juvFN6=*Wv)E zQvPS6rz(WV_dd5fhHh)o=6xKb)v2`*oDb^(cX=E>KMsAx?Plxl?{1Wnw0pWa6|&BD z@cJ<_1#eHs^$iT%=3Oo?{O&J)&tt$26@Q5U!Qf#if3xuz=_vq=?6wp;NtN}ePdj`& z1k7I}ygTN&A45%_!}Ra%yoG>ob!C0M_g%`o$PJDnH0v8>W%2UEuhdtU_7}FSx833F z-EJ;PtRAfMes>#wNodKInLg+Ez6YLDP4JdQ@8P0PC$0F~SePi9YLI2fFk-EEhV z$?6I$GsK<8^J{BCUK^hQ)91YK(RKB@DhIu9M?Q(I_BD7oX97J=c`){d)z7~@`|6>v zb$;RRskXBoL}@;?n_i$i%O<2Cl|ps&&81)Z_Y!io~lXUv284f={EVk4^l}Wu6HJe(u^kziHD--H$N^M!ur-BVDl>(V%s>GtA z3pSy4vLZXu1*T_q%H1!M^@B6-4!uQcGa(?pxfyVYn!n=Z-WRp>5JRwN&btKj({)=> zO?uMdGmYW#Eokjd)sO|LDB(ep!H#azjbc5w|l0DbEQLGS~H z4zWFdYsC?gQy{X&q$IUS_UH-5tn6t2g*(r)=FDML#-+0@Wxv_m(;H1bLi6F4%WjU0 zx%2DNru~l#Ht7pf)f-PZ+yn3e8Q(8_>}R`N=LZXJvDrfQW{HMu*Kdb?&#Njr&xIGS zNlW1#dPuG1{kq4vn5XVXuQmAa;LBw~@N|XN_4&Nl!F>JNq^`sGS)GxUJ5geIN%rG9 z0Xk{y?wBK<%qxZF^O`c3t*kx26Mnyy;4zwo>fzuVW#8k31VkD^q6=4<5>@Gr5@-^~I55A0Dw zCd`aGBKwYZFDJ}KSMnqk;B&Q(D7QyQ@mGWxoL43*+6CMJV+|HOUS6L8GkIne9b*Gi z!f)}_rb=|DxcpN2;iGo&3@G3AH8sohDy@Wh|8|-RSNm%z*E9z(NQ55TOhh~II>g<_;-O_(TO?E~^+g;E^Hu&r*sg&W!ilysNXi09>^Vqw#uf`{tFZU+} zYt_wRO|~p(C-tl7pEy9RVbkBuQ}iY_%1~Z<5px)3oYR=iM$mKZVSH--)$IV9(ABm$ zGO&AMQnY8=MB1ZXo%puwoQvG7tNO%E*rscjMTno)zRI2w^wM{i@rqn+R*}HM?~Y=p zZI!{6Rn)uZE>-c|jUlwvhm43z*;e;U_H#>((!S%mLKxSruVr0fkX#zb!Hisklyx?V zdXjnwB1hrjJHSuMSd#`Mjr@nX_G;;;A0 zD$hQ?o?5{^`xVo3Z9y}VU;bxv8XU*k`3V=7XX#smnXk-Q(mXiE$E@kB($QK&A)u2GneCxIju}*vsaZ>MI=7vtQ$U(v*EZCjOYs&mv*M z1J4_lIHz&W>es(pb%AiOaWI)Q(`AzCQys0VUopq2Kj5cJJt%SxW#Cz})Rl?1=Z_4s z?Jfx~*O=TDH3NByfp=TvT<1#r7SsqVGr}{$J)FbCWwduvAvG1}?8X$cU1qA3IfOR@ zD{b4HndUc2?68)B$Dy@4=|;ZCcS>iS^m4lf`9XVWTqper9%KVaKDB6RTPOXptz~8) zmj$Jh&?9KHMnTl`akB(^U&@#F#ffZ@oyQY>3F&@GexXa1dtCO|J1koI$Niscp03xJ z9K4>C?O9QN@x0}NxWEHJ6cDpgFlL z`hLMnC)yB$(d%d43meG?va?M?wd&P-C?(G}R;mPk&GjH%2~Gb1Td#$kr?}yDR$U(? zfAWS^gA;uCtj%sYO^3iB^cL?c8^Qvvs#byVNxOD`uIghDo{hARnmjtlD8*GNjm?Ue zqd^H~+wrHb&Ts){xhGcp;Hk2Zerr2vDcEg*d40}wd>v!GYL=^*;#L_8-2>)%CalFS z`=42BX+&JtVI&_-&t!coxBMoJu6(khJ;`Q=DVJd;OWNO4=j@NRg4RRe!v9JeL&@ZQpa15GCz$s78C52_qxBuBLT>)DSLhN8UYM zI;1SO+w-0FDtoD=h*iql4fNa?auSQF|1`h**pY|_Q?RMgaY`5(!uw>p=3fPe6)P3nZ7SjrJ$0#%q5i0-qVii@^Q&) z8<@1@<~ZZ^J9FBc#Qh-i{wh;=DSGQ>X4p3rZW|H|q5&k{frF<<3otp-%?B+M(A$U- z1(Be=r(%Z)3>cNZK5Hz7jwX0J2>r=W!@8pb!Ay~-H~UkwXbF6+E93_bBCrx$?bc$b zGS`NBndii6(^nHBhGJp;(-*Xn+hd6Vk&)6vI>B93S7? zy5r%-^A5t+k#J+a&@6YE1D1QAg!?p)Q-F2YuM(^GP+4dzxAttbM>yHQb8GdybiiKe zPVL)K8SL^hvCKQqwB7Sp<=dm(5_=yTV^ZTK6~??ctt$PcqfR=X@HBopEC1y3CnM@!(1<|P9)IY=ay99 zz{&8nkBIc-@Sb&Vg=d`QnOGoaX;!=$m3ZwN@BO7ul&)tH_uk67Cq!l$?=W)QXvRBv z4O}MFOZA)^r|ErGX(>~^L#<84ZhpdrwYwPcm;r&KE2XeqSLMUF`Kh=5&%h zq;Uq>CmAAguNvZLbeVc84Y>AJ+s{*vW@ehNEET@xFv=~>&1SjM)Srn?yB_%r)=u;o z0A}Pksm^(|cY}%I9g3x{T&$UAd}RsKwm9VfBkL@nqU^f9uY`0-O9}`m9a2Mxil7pr zNK2=5_aNOMBAtSS(ls3!R99QKxF;3Hl4)n~e4LSF96X|DkdU&*6{&J~7TecEui}!V$YAfJ&A`|2q z+Ilt;(OFfVn9UNuZBbGOE&QlcTP7mgFo9Asnfc8_eIMW+;#;Y!VuU4Fuz;M{ny>MK z%ZN;ZNszw1Y?GWXtQdA^VV*Nj zUuVU;eYL%pr^35|`f%XlI3m5er_2D;Y6a6d@QjF5js)ZJP};JW7zq2Jg*$x)EoQr_ zWo&al%SCyfSdz}9GHrZ&$N5>%pN`+_Rh!825a(3X0A9mTvR^BmC6hJ7A8Hk|*+9DV zuXUn=F@u)=OKoQ*rQ-G%teu=+=nZ9u?W|esY6EDwN>7@y7Zr|l(A5Ol(q!rSoFc-N zQr(ar12z|6KzXU4m`)Bk>cCmYpg2EHTlUOAVt90?&5mbHIEnK5R@>@EXzXTk2`|3- z@t$Qw?}seTEGhb+5ZYXoAJ$_B5rcOrzbijF;+ov`gY~};h52V_R4Z5?mDNa`W7b`W zgMw^?_dAal4Qp2W)xcE@+|`SnmO8(YCMvT2cEVru&UTp;e6mk}7OG~w=^{9&l9ygx zm&wQwbqY)1!nC|Q-$$6HZsOp=K%L0(k>*NwbFtl3vt$(mSKYo|#Hz%%J|D^Z)_z2n z=Y1aOd|j)W#%9KcNeOq&A5X+9GH)ZNXg)c1Hyo1mPnE}M4oiHKXI|^{>jLxqJp-9`cEU&s?Hw5JYM$lu z!l-6Rr#p&5&w8I_dh`LrAy7kR#)yN|(ad9QRn`PN7%zg|<)Q{0J2BPV!uc%4k}lQ7 zSz_^caM7V-5>M_#UOd$F>}aAKJfU)nwuMY(&OG(Yhj!%C4YLvC?3FZ!dw9Dg9!l}O7A)Gc1Dp!#RzlPF+PF1M5D_CiH+!Wi#TQ+f_|J|u zU(ugopXT1;Z&`c%=dgCT1gOgJ-}d!Ah`Kn6d=p}bG`*<0fb`6#QhsNO^2UBXoARl- zNY`m@(y)eEo$==L9bygd`v=*EVTxg%_C~O%y(cWEtXb}BNWA#NLVMg+CzgcprquO71TMn(GR|$Vqp#O8icyDuJ4SI&8zeG$;^vKqk{OUvQ%^UK zMNq)+8B8vY^!AEb^)h*My);06>Ic(zS~B>pCu+$?8iFgu(Uo=*TWvuZO5LaG8wW2f zs5++&**q3Tj#44ihnu#QM2Nio9?KLc)lTG7lhAgblG4KZfn#F1)uxygFW@{R95+)S zq;}P^YR(%qL{~&?0vqM-coE#n+TQMu!I|?|6gXhT`k6P`P)_B<$2_UvGos5>i6#bV zCGpN=9q25KG(Siut2iL<7BnDEM|Yj}IG!Jf(Ce^#T<-vLd89kQP!)XPcE4k{jiJ%Y zRWaN<*}}Yf&29B|SsgeAhTOGYnzIg3N+(S*_mGIFM4z3-1nS@l)+K^ z5I@-A5L#p~LkHjZLnoVE+n@{eUo4ZeTeaxX(y&K2WLwze(HH0iS9qcFy8K{V5F0f5 zhE15=;7r+|9)0bxPa*nQHZ;GJ2Ah^|uo^AoTzKA-hRBU6lWQ7cA=``7N2>8$H8(v&&hqo{>WYGK040-z8Y8Tj11XK(=yIL!C4{w(7 zuRQvYi3K!71PU`91xV6GA5_Kb6;*V0^u&)i-($m@xpW-f?vb z48?xxZYG&xxrv{i5LpC;H!dB4~kTInM`f4Yc}Yyjoo&1#K(v;PeM= zFXi+>yzbhR{aO7Qn$4^?1oM$l+DIi9vle7JlTbp>PPK|PST~%*;e+9Ftw?%OvTUuPopMaFax~>qlA7x`$<};oRH$V0QJhU_ z%Jh>}96~L$u4RUu!r0TG(t+yd#FGl7MyXmoxo^(z`6sze@=-bfN4^tmvMS@2RDm-q zO0I`Uo?>ejyehF9e?Y@EW5d3cHxmy~JLM{&GpR}?&^fm7V!4I~DU{E}`ufRk>>k_M zvB1VNJr*RFGdm_$-pIW6J=317&rkQVKRJdQB~wB(KA13Va^P{5^aUY2z^v=Za;-0Q zCcP@7t1p~|zgoqIPt^KexSO(|1{QE3eL>sHsj}dU)1#SGSw?PQrVQ3q9CifH?O4l! z5zZU+vsuNJaFT&i>4OY5P<4b`0_CeUOKfF-BPD~~H;xZMofbYX=apeR; z)@I{os`K3&t0reA37Uh3s=6MH!%{oE$xu$5DnH}wV1IaSAqGX94bQ3+uyjKCs2TmuTs7B4Tp*1Z9R zQteF3q9h>#ygRn0$OM}U&s^Fw?cwRdm6XkuC?I#H5?~(W$J<(mavRtqt70;R51+On zdk=6z*imywi&~J;ppJW;wF*>f{-@(y9&&w!y#qpCPOv8%lwQcfTI$S!Y%lt>{bx7x z682$`XcGbj$8@tb%aSPDWZ&h!Q-5W=5JvAcC*sbt6HSM_coR9ND-YPn)pV zaYOWbF6OtV@9PhG()WP`A9bD%NmGd=mj6-i_(7Y)UT?D8xjM1N4t@_y&-aTP<n(J z%W^QOt}45zn^tjNwD4gqv?U#Bi?Q7@B@6^7=usSbxAUf^I)2-PNvyrQXI2cden#mgO4yro5~pL zYs#US1%-sPix;&t3^iE~o!U0&O5{G(#UH2a_wA`@o2FqD zqP*fmz22|thZ;w*N7AjK(jYq7li)f#yQqPjhzG>U=q{&3J`+gy5>4w&r%%jHXtmh9 zTHK-a`Gp#IEDOP5Mv9}b%9JDoHx4@3pzTjl(Io9pG;epe5Ro6;pq7f*uqcd>Qy%FX zwfIBwhkb}GxUb^7W;fAPX{SxBc9;+N;AIAS-M@D)uoUDs)1jnS(&e2>3s%W6@mrZA zI}>QGo9~#H2JM8+XvZw3R`j^mQlgc4xfpt2mMJ5)t0;sdLi7v)Z0C@6=2i1_U39?W z6LJgUxoA1EXLqw`h+11JWkkWcN}?otL=^7J4dt;hq_a2J6;0yg)@|;Y?+{x~I}nCO zw_M8d-eF1Ev~D_xF_hy~uAUf(V@Z~=JWR|$1(xT3OQ;w?w(Sk2=8d2HSP{3-TtEgE zUoKRl!<&s@XF5myowK9vd&R!#rshJ~%yM>+{mucQ@p1bAo7Ri9ouXPx%}FFAF$M~9 zC;hXYdlxA89y3cMeiw|6G8{ZW>{{m>D4X@wls&f%#Kqg?wpp=uqHYxNEPL7aht#cjX3lEl-G>;(~CrM8}k6aZ)rtT zi0x7uB+{~0o+wAEX+^}PR$}_%YyeWW{FR)m)E30obZG@8CE>MGSaQu&UBs)`J`qi zC;*!+$G}O>z#aAdFWLj=ARJNimHv}=h7fAp{$C@3k+hcwtWXQd7f*^v9zMJ`F4I-sa^at2y04h<*OY zJ}KK=8Pp6ChMYrAaf(@y(SBaX8&wc{O50oUmOIw$_UffpM1g#Zn&g24(ne2GSok;F z?2M&_;Nf_OD|OP-=mseWklAz~GjJmM%nPOYUcWZtA=Xj=)%@Pff+IpM-}+_F2A z)zL2TwAse=Yh`!{X;##F1ko)omYL69ooFwL4QQiO=!j$7S>LsN&#x#C~Fk) z3vcH}$OE6S>9crq7^fX-M3_Vwlv51KovLNc@)0-+WSxC0tyMg^aUAHB9hc5 z>nxhr)`;Yr%gb-yeH6?~EGT85C_+GSJTXU*qH`JRIS(3noOWaJ3|Z)SDI;#${RlUv za)_>kgN}jzRtLV$D6;H}BLfcK7vr6WZ%-b^J!}=26XMbdZ>J7Qi0WX7Ik>NM#~KEL zIoXQ}iV6Qh_TdaE)^4gt(eL1|zPC`B+Bl$xhkP9e4WC+W)LN|CC-vr*M)oJ#@e456 z2xYk|W~YR|_mfQ`Nur?q1q4M?7Gy!~btA3ME-++*mS+!q4y)s9t+!b3 z-K}X{Egb85P02UpGHP3KMo$VWj!Euc?BIv#6)$caTC)|M*9d(z+$=q>_OvBV_urfr zffRK;RiZqkyM-#5_YF#PShYMpaHwKs@vjqm8ONt#!pQkiH$6o_O$<`uLD=c)=D9L` zgK}>+QI4T$-)Bc$Gxx?26`cwt@sXC$bkS-pSJnq{CTAS6xSvi)9iV!SNwX=0$#$xg zWJKV@{>)5=pm0rdr7!>ObPTd@lP;U7@QrAJ@B+qq)uhMRh3v@h;@oD>o*@Xnf|Fjz zTRqfawlHl%oUT>bS|A7RkJGYSn2^>gRomH2p1|Iqg`2m+2YM7}LS%2}RZHP5f7 ztOVM>hc*y)n5#OAVbED$#GPb5L9ldguwk3p`K7pT_g7bADm>n(pC2M>-Gkjaa10re z-MoRbuPtgulizRQHW+jWP8#+AYK2q{9^8xi_@T4{C|Q>5PuxqPe%R$pzn`6%YD#J2 zTlS8t{}y%zai#oSE)vY}2bOHNm0jh4s?s@RrhS@Q3l{7X_&{Qa%u#yACXV`u@<_EQBeC8l3 zC|A<%-z+a3-K6o)5JYHjmPl{5lt_A6&duC63*+|}k`^{?eZ>l13c#DgVMe>9?N$X8 zXGU~wN?PkqE^{tQ%N6uDhbl++!7V9CfWj%0nS8$zEyfRPtJSL6>7r>8gg!+5&)m3*0&LOZahRKTUWU(OHC+x`%Vw2ZM>P83>lQ0)KNWg z*>B?`&~f*oCSfaSF_TOuJF~)DHES^ey+!4V{Y?v4h8F_2fbR)vY4sC8b+G(E?TFyS3?cag^I@y){%MuD0qV{?56x1MD&` zyCrYaeR=M5Rf5%vRy4bL^*EnR2d6vmz;%55r_Daw?bz5=8v1*&*cr8$7zi0|yUl)6 zsT1^!YattK^*sU8waji7#_bhPCfRGL5msiN5}2iJ_PWljJyX{kI_j3*=d&lo>1nd< zb^)Csn~~NgGrUri-c21J+8=se@{Q9D4-h z`5*f|fu@lzRvRwNpTqZtI=(Hw*?azDWx37wVdYgP13iZ0{d>L{=9HNVM?)6p6|Fd( zyzgHEg84r*iFFuD^O3XdYGjhb%P6{F)mhvs(MNY~(2qGUf z2BK#uYrO5DkAC+xkL+#~8>gA+;l=@d?Z}xz3=Yx|#QZe@pOq8Rq!4Kdg&0Do?9vHW6wMkFWB= zo;7#j_sT*H_{cW9AEUC|_kP&Viu+5qLl%1Ov>vg;H zGb-Q)vHLJi+%1Cz7KP>Nw=XGjw;?MdqjIv5rrG;XeUGTi+dK81InFA|1ukmn*(DKZ znDtad4m?ejX%VhPj+L>D@{Mm#YAqVb4Q2kYK}DOJR}{7Ug2-^MT4PG)@2AO$hAOOg zlKDeI+H;Tn3Pp!%TDGX%b)u}Y#Oj7&-ku#e^qQ0$>fZV0Ohun+dz|_ zG5m!)ispe2!4L3iFTp}Ct)qB%q28Ho+vCasND$9X(R5pXa7oqh_mE^mKZ|0OC@Fh) zsGGj+r=QYPb|E(LnKeV2F|+g!TH8L8iD%O=+?f7K3gtnGDhT35Yw%A9t=<C8EBiGMKRqA&nqFzX@T$)n z=)wDrt_HrcBdTM`(~21UR29p+DiTLh46T{aOg`&?m#BMN48bP zW*=yWTEm|mNTOu-QOVnzP;`}m9NNaf$0FN{eQjLP$BPA)>4nr?(B*2Ol#7h?U6|dh z3c<_Vt%is?Wb=JDKG|@x51LUuoa#vV<_8g;L{0(Oa7D;rYAFZAePO{VlTKH%79j~w zl(Tt>+B1YTVS=Arc+HL=dn;7_tf^5}^2Ncst&er!W($Ju{;kbwy{*2@7Ez@GNAQ4L z_o;yoGg_J#bOn`tKG`*^Q~j;E$DbYYCgeAu-SBUI#wizpX})sbrXr&4&j`%-=>6p6 zaJ;EoH$eV=X9Jw38}Qa#*>l_3(PBq&x%>j^morb7e3Pb^ZvRg$z_jq2$(E$cDp8fG ztmYdTai#_6MHs4OZS(6s4BQOIdHC#hm(q%?=nwob7B|sYnS<^6KPxtlu+ZWPCg!gF zp2fSxB&-DY+mPQrhgh*Sh|q%Hr4AMl)>45{$-#@?qz4B4o6v!(C~mMnv}AbtxaBj{ z-T3JBWn+W@i-GvDNBX4w-p)N41!MPgsJZEf+zXF7Ywm$XRg@^ABa`$}_)9IXf95_c zgfIMEdTo_J4}}`WG~=);c{z->q*rm$Fg)zLUjke$9kD_3tFU@7%OG{w9i4*MANT>c z@ikxfD!@p!2;9S{@*WK!`o_wbX%FG4;w`aU=NMid0fhsgbR3Yn*Y@znodGGzIA6^N zw5&I6Du#$gahjUjbdT7Cga)5>+Z!Z+=nmA9h0hsiM>2wg!!eu`m`7QgCT?2M0#p) z1u0|J-yh5f#|klat;Qu8Z`-F6Qy_+46!XRbf%#>VJ2BEE+U}*9`P?tR!YbpA%uJmh z-{yoFDGTV!6Y+=^ksU0INCQ1qlVt{nVh2P@%BHILVX|=wxz&36fXph-fXJ$)UjA4X z=Ka!4Rh%s^zfIwg-sDMbb(~kH%Rt{~^GGT`QT3ZMvnbEm%m~Y6Z9fr)RC_q{i08@C zI%=MVUOUrr)^=IB5IGg&hptKUBTjH~l#iRy(zw_K>21k_|NPv{C7i6ruob#f$`en` z!c`9!7FamvkX)DRBUwD}9Z+a1>k7O6fmuR$N6f)=k}ld3)7S?Ejze*azv5f!c%^M@ zFvrE0tq*3i_*wAw>ReM4AAw&sR~_re`(@6GraPK=H+n5r?>Yv=0LTBH-3Qg>QvqH& z7dvO@d!u%Sqb0u}*;0Dnw6~6iG^OAnWvl!|{!)|&=Eg^-=gKhC^7Z1u*3{Y6 zFu8He-a5J?0wI`Ya=V+%fNwdu!*1hg{IluX;NI#7u~N%sj6O&a$881Ll_Zu+I&5R? zbQTQtDc<`cm%H38ok61n_nm$qF_|QBhEwIA_VqNM7Wx`$UfjdjAKqA`LCh<|6YNoS z@4nnmSKH37SVyaH&28`RDR}bQUItPb-+KBy`m-ha=q<&)RXRo{=hdl7Y92wF8i?(1 zoD2w2@X2!0=F%H-25)I*Y3#R=Yc@Dhp79!ykrFLuH6``CNOh^g32?FK8 zBRnYyQy{se2IojKESAp8b~5nNFT6U(g$m;!o+$90RAx$zR4p&{3Wkm7MI6!4cbfKS zzbqD*M&U5e4k%TfPPAOgANeIAlNe+uH<0!`)I2xNOHM?VQtABE99zmpiUUr7RlB3goXD4XoQ7OgF=%z8ndSN#-dA(I;#E3zdh$E4rE2F8^UrWqF5m3((f*6b|( zY82QqXKOHzAB6V%@Rhl5n>3>K$ZUG02!bX_)4~U;j4i`vah|LDqK{ISX*03ck{7q9 zljGpC!WUm-Y#GkI&HXnQwI)e_467n2I_H3n7Hg837iXmfv{k66kFvX zv?*AxoI<`$%1WJm^Rr;&s~;jCvxcf(zt7NE?Gs55wIXqBHKn<)53BRS1@<;+3a?3`>j% z+^7S4sxP(L|2qJyt z!b|)Rpe2+`mYmVD0h&i5AuY26K#G>jW%M{W`=fid#|uaC5my&A@mFv+`xCUjhqB+9 zElkh*6#OTT#cjddGQ%kp#FDb;w$l%Dgp(kfjD=Z_2Q@DaO?LFxtOX7{8a?j9H#qS& zb7Ti;n!0ZfKQ@!lI&_3|WX3o~){i`^pBx@O!fCfn+MIc@X*>J1;&0*nM%JU3`%6bd z8wmJqs4iTjuFH@RiV+ufyp(h>w@v+~ zp5;x&(sAFQF0UVwcO@$jSucM~F8eN)s+|%QH4*##u6%F{5OCr?YoldGp~UAtflGe8 zmwO*Wm)JEc$V#9`WZ1#+D7^HnU!dz?7V8PW%XJ06{1sOqr&_TG5XtgB#2Qca%o-LE zSr#*7!c{4$t<{|u{DzlOU^WE%9w5ON@G$;*uefcL2QYNY?eX)!GPml-ISk9m@IvP%Rn&=JC&R)L#3}!{ zod4e%$(t11>}?{R4PWm2&pw=3tvj@4e=c~twC#(Uza**LFFJjnWe-RALt1vG+3*tC z$oH3z(u+8yoth4hYG1u*0pG(Ut(RV*hpZ^9xG-r+J2eK(sXZ|SoHP~tAn-7*5T?17 z``OdK8p0$j-~wOXrA-9v1jyxraR!17vx1%nBwOZ}8MMrvuPt_VcII#H@jqr1*7+}` z1u1^Y(#wM>=++^OM*-JV2N#dgjSaX@IRaEKPpu5>WyJ-!(ox$}G4(3V60!k|0aiHG zR{#`3cJ(E<)}tCrnJ9GQU;W{0EX1R9y;f+4w*gAWWsjTYp~_cU!hrRsIm#z)L9whU z(%Dnf`z3emKs6aJB7aQ?f6yf@Ke3Fkpm>F{CIUohoG?MtX)!ioT@H&US-L>`k9=xIdFydWKEs(MPv=ru@CzmH^D4J{T>l=!1pQif|a$)NOj z0lt}4Znhf!qgjw`*+bsECYmC^T-j&`6FK$F-1g?`WYVGh7R0@bqpb`iLkj8*^6WKi zW=^Dj*u2J6-YH0J>@lN8&%85@dOAji$}8AE;Q2@YnDF# zB<=n3As`2u%t)U9kE31vAqd6c9t*vH88_!%Xl+Qso(N|2JaN+Y`1!5=6bW;jPrM}a zoB0TuO1}IddXqa1hc7^Uupp9EUTj=cNh0gMyo>-A+bA3PEul6(L{Q@y2%jyQcnSbJ z$(%lbniFKCS#chjSoHOgIdA(wMPy5M1y7ay(n?cVTfSTa7JgcIE|pFaAjgz! zz8&y(@_HzRC;&f5aefK87B}B!d&P+%B_Rn|aq#ztf?Nt-e^Y+m0$?#T=ssT~KmLlT zfajDqSbtojGCneN-R#B{P56wnv=7r=m`YGvs>gvD zwhq1T2)G(EesGyr?mqmsRf0t@Z>doQgpyQdO#)~8{nO_pLJEr(gwbLZjba}PWz7XN zG_@SZ?2?V%KnQ^Hv`o*X_C>Z%31QKX7JPSg{_>Olixhe92{7&Rw_d6(j*Y2n6u;%d z+7tl7Q~@{l-GAGH3`dt4YMg(o#FH`BXMuj&?YMC1((f?uWFnJ+{7WpkBw3y7_m>VE zJ{T#ol~nzE%uw)X*1mK?${~PbXuHiu{uTd;P19r6Vk~PC5aNVmw|e8%*XJyO7IGg$ z@e>B^4jF$^5^J7SY$muXN|3MoYVr?sR0yv;xI2f6f>$2I8=5v(63{=3&m}dh)RV~j zhdASOH7g~M&o!^D@rfO(4GB;eaqqGePUonG_ZZa`45{Io`{QJY$GLAe=6XJIsarkT2jm9L=&SCgoxOdXhmkk=x4xCQ*^a|&)%cB_E9Hn$kG<5LgvlRJ51>3ZkiQb5nwcR+Hs`A~doE|3RT% z2>~PT8*ixouY`M^yy(wcp~yr}25#yqt)6|GBhkA**`(GC|R9j+z$)}tXS&HwM$Ct#Mo zK*Brn)HZScG)JIQ-1>vio&9Q_3!)gs--;<5eXkVBah09`BbGu!2;duy`0*Vpm0l^P z1&0SsOk0O>nZ|Lk2;arnQ~S}^^4V634BQ7wabMC{^a)m~xPHuyCti*pf&x->!x)Py z0{IYe?3wr7 ziv=(BmHwy+!0yW3LG)i$Pwd!;6D9$O+U;k-tjdC#{x|9{yTZ}Y=gc~TOjCDUvZ(%I zaM%PvK!6`6RSaRf<`j&w1EI8TUUOtFHax2hAR#I)HNN}T-XTda!1i@T%*R{(!5+Hd zygQwq6<8zSv7O4DET0vipufEndMyUvZ-)F2?!?H0-AiY zmvOhRNDx$4WSYo^@1lR)BL(Z#3pK^Y8nUz1Db7 zFXnLPiS+){58s*&Im_%Re5OAN4{(bZZs&%qk8gP~LPELkXHv5LLh2N|F>ezcsN)!8i^ zL6fZt^7)ekxHKaru%!@kUvL7_42{*^BHl=duIO*uHmF_H=A@+s)n2g*CSD?-F7+W7 zKzqjg5;LQh0XEFTPGS!{ui`_>Ltu7UlH_Ck<(6!PJQCANF>FxG0V_`t{G=P;xEh4LiG_fokAu_~Ved z8j)?U*~t}d{#tD_^_ay5{J?dG9;!M=!g3GOV;o#F_7qp&jD?Me%@Ky-tb@jV5Vd0~ zCSDbXUJ~ebIK1iDd(9*I0>JIHeYzUm=a%KSxvnN1QF5DW@S^W~4lIGW=0x*sBCI{5if{vBgm4u#T2?8#j(L5N-h(Z{!$MGRks9?V@b&k?fp zVdV%!$Hz=mo(;$KZf)U*el&({38p>Ac4ExQ`Z+ep++JE&-s@?~F7BUYk_&(&tz);n zuE$*u828Pj^2b-iRN&+SkkZN{L;oFyAC=?G5e5Z^C`3J=lR!}QIqm*sbFL8Y;1nw!RhyB!i; z+UN2Z&rT0=tzSx<;_VLk00-80z2q957O)L$!ay?i-v5AlLa%}%c@Fd6pY)I81eh!3 zD4pbgtTZFwExC1AczcfN8G@Vf*<``OPwg}RVVk#Kvz!X2@B zon-Ypb&w}^E&%i(G(_b&O@P9q4;_3Sf<;~DeV+(dJjs1z(Kl@CepNoweezj@O9tFq zpC|Ky?xrROMeMYTp`-mqqv1kC%&J~Sp@Y6LC}m~Jzn-Y>5+kf}`fFHE!XNlt9NZ_I zfR%Y|08=Ep9q}AoIZ*`y-<2KX?3_REm*zR+?p2ub@6ZA!s2R>77PjxtkcbcJz{Xv? zaX-yC{GfttM-}sivHtgn;}EshN@G-Oz}rOQXi~eUe5w@B{_K&&16t|Dk~dfMAONv8 zlvbqqzAVNDn1vYrrn+Wu5TXF+>wU7Wvo%2ww}|ZlCjzwlM|l1*F^4KvV!avrK9>%A zoh{I2+{B9^ItG6n<7p}EdBDsrS^1IIH~@<)He2-mw(<3p1>m>97AB9oV|tqI`^0y+ zclf5;fx`uJuMh`DHMu$KDnk9|0bnXwi5@7;dnG=)SImx2`PDd>_132z?w-|~((c)L z`xx4m+a8NGH{~$HQ)5$}U76ePC7mRlNW#}#w=Gqhb9x2l+(#hR_gnnd51Q%+LaylN zRup|7Sz{aGT*7%{K5_G-DfE{D~-Eb@x9V-`2rT4MGjrDtDh1=f{%1a6uj8;*$|1a^z`D%v) zmXno@)h!JL3I<#1cgmXfLj-)=&s7fU3^v`L{5K^$N$RHIiNfe3{hiLFN+;1ODmqW% z1&o>{Nd6cqdnPaKxH+V))1RTZ@A>KpO|U9XY}HS{9-AMgd{*a^&;I~hZlsgmuXW{Z z%7E?{IS-*Gc2x7;c;se5o);B>dz}fpx{q}v}yBat~ksTxr=K)QxIjlFD!JRoQRvNEjZXGCs zWSE;S4}R0p)3<-0C?2yCJ0V}G4`fw6SYn5E3;6qqGy86dhc_fV)>KS-NP(k&U*z1~ zh5t=i#ouW&3#7J+f870xb5gPU)Yr6Bl%9Q{Nn_H1( z{}v|+I{;XkE2&-54y0Xj)E(GtZWX;f3$WN4zOi6;JWmlK^z7O8Sh}&9E6$-AaU8J> zaK2F~(liaGnB%u8m88{kRC6#UXoG!udjE>-_g{c_DJ=62hw2$u>sf%g;5aO~nNwa) zR+#CYM)?7)*0GlhbEfCL{<%Ofxqiq{Rybkb!y~L4`Wj)O+3qPCo~PsA1{nJUGBQ`mJrA6Yq+SQr6Oy1C&nF-CmG0lf z;p6jMSs^7XHEfV5xVNPm-bksp*d*}Iv($g?w@%02+jm0HNZUFmPzXl{Cc-Cx8NP)>0Iil+HwF$NgSa=vp^v|aMP+gcloPuJ zF9n`{%WCASq)k|H*eNG(4_0cPXf545eNSq^@2p~cy?dIwT1`A`sjIG6!AE$&I14}v z_nVy8U)0r#aOsAIh5`(v8{onST55s+FM0u4yhZ?UouYb@s~}YMBLDFCgf28F@dW0N zq57T(*5A*MGhG$1e^y;N<5ibh6j0%|ksm?-2b0;Kf6%MEwXFmAza;zB!I-f5?#IcG z1*M2ok;qp0of1^(LX#fuVn5`QTAF1AWhDCe=^5zmZM>Vu{#iNpe!MUrgj zd-r$!iqBTDT=0WuV*ro!6}>Ib>3fV|F?~TBUQ+2dFGnp4EqG6(acj3FKVtv%JxbKTIa{y_ivh` zHaSjx3wIsI&{nM;ZdD{4GV`uoqNhWi(Q_B&w02Oqw9w?xb$p4EHq*QKFfl=1%kemG z=261t4Y&xdWI{aY|I`9R{OB0NjUN{Ve9Nen0Ox+y^X0n84`(vsy_F`mIN9Fg=?7oA zWGVJeF& zFy%@sa-hoN?3p$dz2g%@^0cz9D^UBDp~5Sp8DYfa<)z2(Ib%9^`Xuw2qqz}0caM!E zm)1??hGU=XO^O3}Z()Ea(6v6mo;9bdoXBwe%NZ5))+fymr%AY;ySY(IoA2h(i}dw~ zZ(YM?ro7x3e<8>hc40=Bg0In&=YP z>)Vuq*mVjLYbWJ{)e6V0tgY4kVgIxy`sa;o_`C+DZVJ1VE@xZYs~%;wRY#HcDvExY zY0JItH&9XJeTxUjMnVFDNI^6~&tiIZ1fU<{48c@K9g>2)gv2bGOW$vyr4*dd3p5Ue zPcdcFP6;hnjrJ7rYuLe*1?63PB)yD5Zd3YrQ>}tb$ErR$5sR)*m~;tsD4>33p82U) z*AZ}pAAXYPb=^LQ982bLPTEQS^w7HVs3^%d4Z_mW((jp{+c<58Jq_Y&^hp2d7euFLyPnwmjRCq+YWHXp zf2^(iwD^);hMu!uj;@r3YYK>SaUIiB0jCwA2;!JMkdR5NLlNSw& zCca#kP#6hYt=+2fs%PGr#!6#mHIj>r`T{mr>jVoSDs>-txtZ+x_`ZGHDQ>GEy2uS~ z;1=t?1I>E2o&Ho_%@~F7=ys>%T&h-+>ymT00UygnaHRccBM8irij272Yf@t5Ghh7 zl8xel7CJxuI|Klo|0J>DpV5$cY^2jAu!FwX50mR!Pc|(30m=s8nV!e<9_>VS(Kty8 zfBR!y4;^;Ejh!T6tT0-W2D$>$H&b7D4(fS@B$h)A;HV`uvyCMyd@?jtTlaZgMZp91 z4-f2|JtM@uWz+lF-@U<3K=jj+kH_Q==4IeWGd*}{_Ss%MK0J?a{MI5WUWwBvmD4$u z_xN-{sYVMQ#Tc;CX3kWY~J_>tqo zM7~yc_!$}q&XPeA2nDOq#;eRc)UboTU+Iv|F&BoFQL<2N>@MP)zKV2qqe*OB22jU z{7$(#M@6O5z-!EVs$qo?eTuOry6zR1W=Dg_j|HAnDz$DB%r{e56JEZe@cGVG{4#L- zNlw%E($=IvdtJvGUj32K0zE-rriah4G_jNfeHVkSHjs}||FHaC5cXR|`_@F0!~-BY+y7Du0-F47-MN{_o_Eqp zKX2AKVpDgb)AEOxx;s5}z7BRjj%B_sv`7IGDDOjm8(Yr_Zp;D8h<++kVyud}E>(gPlco_lTd^n#6Bbj1UJKlQn@bow-J-y=>Ngd4MX0?$I|AOxU?oS-A zlmHTo=p!t=Zrg4qPCfVLM-jtMJIQ~NF-5$67W0gp;{&Uv@}X_=kB8NFA8y~7$Vy70 z5gkg$Hlyjl_w@xTKq{tb>qwHI+)=O}T@E?)j;>6x79DT>EL_V8WJZm>jenL!>TleH zQ$s&PI6V*Ggml}sPo`|G94V;S{no!u{~ud#9aVMPw1G+^UD74p-Hnu#NGRRX4brd?kZwsq zT0*+JJEgl(Iybq24V)i5&-=dLIqUr6TIh1?zVCZxuDRx#8Fh{3vVyC4OG}CFaqJ3F z+&}K~uMB;lygCFU{q)=hs8d?r(cjJ#Wj|dcU?-33n1cikUOa2uP-AN8qg#@##mW&c8pE4H(KhjUV)A-3i-5#|Dv7h}v^EUvGypmVHqWve74l5Y}j)h{7blc zq|i*}AA7+97W-0QA@iV?k;}a<>@^S!O*bjT-8CU+khmcHIxnj9lOc_g>A2;49 z0iJ_F0W;HZG)k1oR-LBT%K@Gz$}&;2h*IbHirLGnS9r^;I;6#A?Tmf4tw_OTe8gTY zk}}=Fz?fW|ORvUE=a-fqxlXElxT<<5T4~A`6K5eg=m-Cvz@*e2?6ZDxte78Cw{L zPZ3$_CWw`0$A@1o^2o93d!ya6%<_5O(bZ7qK<$h~0m9zKVl!kB206-&mrG>@49gjJ zmtm%w0KOLolbB~pVjW5GlMI7+Vd&ZxIqbswCmR{320Y#svYTMxKfwclfHXZ#j){!v zGSH!A>?iQ`?B5WnU{oS|Ho?E(Csf5LV{iyrk_-qU-2Nh!$dz3fx!> z-P&FkS6Pe1H1I{?9Q$zg0kw;`_J|chG&oWpjUB+5#4O2~-9ydx* zqXCzFMU z>_#R#hr1!c!>G;yeR#*T_OFjC-X4c1@y6Tc%-X{jV@#@bMaeCegImsT>Ef9_xONux z@4gi8#22P`!TvUTz4rw-f{M(Gkfrzb?jk5+A_ZR(mLj#;MMLZJMUeaTzxWs#w~}G@ zrGW3cRnQQjG%RD%lQzBe-1!^e=5o2A^pKz19jMI0*MmX)@E)PXb!n3CoR~a_tNami z-CpsOH1S~w-_qde5Ue}2i{r|-qjM^&Gd5@or=so%Jbqvs9+K{H$owHRX$jaulLVz# zf8;8CBJwQyu8-e`E$SCjHS5+edg3jE`K>ZN4g`pje6OCa;^GBhjiN8~F`k!lV!?_m z>_R5>@J0cIZXW*M=uZj%2DU6o`gcjcWd8$V7`Ev~4~cPqmC-bYO!hYSqT66i6t?&! zscYH+6`*M0(TTf~ zK53+uyUVA00VpNHaMbG!i-|11kYaWrguz47pUOz~>lfN!#me8GS6UhY2_1_bcN85x zuy1-j0hKLi#~l2()!q0(?xfRsQG?0e&?OPFb&IGj8UabHe}=^p1<0rO>WOa{+G5TO zQajw@!nOB(YuD$Izj3WCXt^5Tn9Dfru%wS zHG&jiQ_t_#cMtj)V~)(mW&kSHe0tzI3`TDwGCf!c({H+e^JLNP)|*w zj1C&Vr3&$+5c5Z}&x|A_?fI-w8(m{Z=%ib}O?c=%@@s%5xP$K?)(MSPU%gBhuiaR~ z$s08{lqZF0bN(8o&}nDmmoB^X@M9|~|5C7qh^h>f&*(ZcBjQ$7T)L!;T#_XUe+fN? z;5}24{E1sHd+gYD>>K>32fY^YBbXMkEo`lUgdV2^P(#ao3wGS_{7WR0m6z?(X@c-| z@j+JB93vb@j#;1F8!znXnLfawWCS)$1sMOJk^amMHMinE^QFTXcU3Om6y9+|`S2oohSP9n(+B zqM(|it9kb|U(UVY_VsRd@sQ&4G^sDUe7PoFL7?t8LeD$`Ap_t9F%sgEK+mbL7mzBr;UTnF6{hXqi6B`B11hj!#mS6MufAr_Jq} zC=~B|Eke*nU|v5_BXeo_VaS9&BKsu?g_t+Ns^>8by zE2`t1a@I}tr_7qambB`WIZsX+@t$z}jMuqR%Km=gpPVQeXWg5jn!eC|lQLhd{tx2& zc&l#1cVaL$XolzxzYRjq%bPK7YJ!5?Fmza;&>YB?@JUtv^hqA_Pwl5T<_{wWZ~|XQ zocAjJe;_3QM*s(l4x6B@y%u7J-npCZ7pB}#@j_Wef_YZ5c0GH1F&ti)kt@+ds9=A#&fzyk{plGWeV z(=JDrfj8H%@S&1(rV3Ctr>Ts5>JzV`-{o_Bm?!VNaZZ#PF`e4;2we}Prc9W;>RWp! z)R4c~tncFbMSVIl;vo>(4f7JvL5n6=SFJCpBw*JQ#1=p#K2Y}f{=MPqTUe+k zYV4VFXrnQTarhcTQ%x1RY&XN9M~_1+X=B^Bwml7@mQaH_f#?wFfVg2Xoz$gRC0hRu z|<*(@?CXhVO1F1T}+VaRs|A7$LD9sRLs52XgQO3_n} zDS%SRc$hh?yH&PoYb8_LtZ1U^L0Ru-e*hbvzuhLs0D-u$7Zye3Ycb96RyG z@OvGw%%`z#n!oiaVm#ZS7R*zg_9CfcKOd+{n>? z!5`WdE41pyD`c^zefY8z;k9-3uIyD9O+)9~l6m7RqGA$6vW(I0EfA*ahCznu3pjIPZ?ALVi zAmzG@FE53azx)wUteL4|@wq#y*J^Og_WEtQ>p&C2OH$q{gkUw=*Fy9t6KfNcr#sLi zw;mc{FQaIUJku&Tz#Y`OI&_DC+xz1b1WE#agHLpjg zi1n$v8zOaQ#QY@B5?OJRhq_fsQGtYCDt#mXkh>x7`VN1!uOqkvjR+A1gm{?|#T+Ay zxMt|9yez(@ynOk0YM|#!Kwc+iy9hvg%yCNz(F3%HA06dpy?9L7h4M4{jCcwBS#Kb< z^1mz{aGhRcFheDj_)^t|5vF79$}-_xn{(${^*+l!CG5GIYL^Cqr-GNdHkpeb~2e^pj-bN5TA81zctA zJtR&@28Q8FmE-WenA&d;)=w@Re~34o2;ADxIjM&5NcR* zSjB;Gs5g4#VSm4U6UrRyi0sWGLDZX*Vd)zv$|;$#O-VwX7o;JhKo7%9OANE<)|4ebaC-~Rb& zmN2`yP~13bjn+bH{&;f}$(YQj6sr!7rOyV>bC*`|ok5Yd+1+^~L8~cesXzret0%Lk zfgbGa?To^`d23ID%^Osfd2)@OlMRBkuXTU11bTJq;vi9GZQWCkP*n5wM;2mSa#H*8 zkfhH-QA%{Uzp+=Hvk)w0iQ8Lt*#Yln0NS+odB{vQuJ=L0JL4#&-FZi`U&iy{`FNRM z7V73|^sv9;7$!b7r%PmqkqtPxnhK(`*0N6Cs;;Va6n?{m;EUr)?bXk>7iGB?Vpbuc4EhMD9O-Vkrs$`3*GF2pHb!K_rSCrLj%ZT2+Rty=9gb*U&NRex&YN!N zeXngmY~ai|q-nsD@7B!;K78RXFzPX5`s*BZRWQ937lu%mZJlZ! zHt%vf^5*UR`Vi3QKRPc4ma%6r3vI45z+fio5aYGmXZ4&+5UhTk{fp&c{@xnZ21@6A z?sUhyL!ESwC!5VkQyuAQ@7;aSbuRMigP7$w<%q*?%8zrQcJ7ayZP$TEr)P@l;00k7 z$uYjy75b3qV$zl(WwM#8BlPrGzUPo}`^tZFryUfTO%*hMLes%wNin+?+O*}{(A%d$5b5= zRoZL)b>8H`fQBnDQVibT;VZGmc^oo+)?T#*c)fJ4Xji@gVOkr1vkgu?Op_vG@a;lH zP>B`l11g#;SX2IexSRo+eF3gdjb4?tm^zP~4StZNQxL;GKaPiS5B(CBY>8K-q`5E} zeL|ap`F^kEpXeyjtkN!ZTSl+2lXs`4haB@rBrw z9Ig3f(>vp3?*k}eZ>U-6K7shfW+}lVx|B^IMuGv@iR zwI$EbrVv* z6Zra?iyDY%@h^nN@m)Iln9M<@#W`m47OkpjNx|{o%eJ&| z|EXUBSlMB$GMO3`2KbwZJpR@IleZ0tIUw`=8^_dpE_~9G`|y!~GJ+8fiAQ>`<-hC$ zhY_+zD#BQ3E+|U5;?8M@Iu|`CU%R)dkq<`_HC4=qJZAZZo+egC#L>u$E_^TxdlxU> zUgoxsW3tZYB|DznS!vazLxcIn?zeFJiQA(=U2(^5;_f_JrF*?_h@G5J^2gH1&-#c9 zOA~)X7xlx9*z#M@Y$%hiD||4?q!*KFnIqq_MWir=A6l}PD2_y&By=!!)};H-HXuf~ zUD%KOrcdNBm*$}L>l!wFMIBK*CXDT6kBB&DIbd;UKmyiPSSalgMQDTJj+2g0?&!nC z=$+?GGp?A49BjWs|K@0Hq1!$Ed&lCK^*>sGCPZaZGb@a?w`(xx!E|I#QCIh6!dlNO z&}NDEY=`8q&j8LCRy8+tZZeDb}_|McE7OL?#guq5}jFu5n z%XcK!rFfCQ-<){SCwpR&Qsfu8@uIS+IYRL;M9$z#9$H;bCtYGTqWm7=q|};j?Z-%j zL>&fe=7)Vav{j!T1tolvf`qSm5lJ+@Z+^&vfYuphk;B+hTvru((YAxORS1Ji$bi9j z%YPi>lX0$*a9dSAJ4Qn+;01Yd7Utz4xVpOT=U@?pT^tcev8O-hwEui~pag*XXP4o< zNpZssEk_cEiQD-mk7S`&WZ+_Jj5=K@ckGe}O3(s{Cie%jKX2NIyhb?=T}K=^JTCAp zuapFIU{6io-r5a}c;P=bT`JJL|FR*-vC>Ojm1IindO)Z2=V%xI%nL0eqpRWQx)a!? z6r-6h>R)f*gUI+P&gvF_@ROQvdrZ~sXPewlJI`MNMv1bh)! z{~=s%>6bTbfzsWMC8BQhwvE>tT7)Fg0hzEUXt0|Y+1zYst1pe`Mqo7y(sxOtp!0n9*PeJ$&!rr4aM%8Cuf-K(Z<-HYo%=psy0`Pe-(dQTq8A z$OFFv)NBx8X~DwQwuE3CY3%j~6YTbw71a_jDpD$i>=nGyrB=yCe@)B;)n8!gi|& zs5WZX)4)GkFfi=&R9TXTEy@>h5P_}c>PhV3MJJFVB`L6P7QLDW$+bM#q5f`vK*Up{ zOKpYKqh@(DQiUQK!vmYfGDu|v>z+-(1#qcu(a9x1)#J%yGKT`^@(Nzf?SHf#8ZzuK zJ&2Lc6}!EcXmW)Y$QU5{KGtmb6D2l9s}8H^Si4pYI8K~!hn^B@4(I_JkBy- z7aT`o9|jJ|KxDSs2zH5)UbK~(oXl!ugT++f`u}`y=%IS1pFf6XMDYl&U^4=GqX^r) zyXIk4FQ13aJA4AYYl50Qw7;grD9GXtK-l;@Fj)Eg*oRdBGV825p7o!npM?r&F`#Fg zN^%E-L1}Cq$;$sT2{B;V?Hu=Jp`j^@oA{MU4RK(0=X*7R67T`5^f1y`Aa*+^YXl+t z;w=?u7^#Ur%rJ&AX2#V6EwJ zk&pz3JYKr{+%K+JMtWE1V@CgZo&NS$Lbhn_z>SnjT=E})haJy_=b>owD&H>1`n5Xd zJ1Wz9vJL_3nqJ${txqBF{eJvYd%BNf3AV$7R`7fcT_YzG6l{2U3r39qsi1HFxhnhV zCxEXFdMe^0!_5*#^qMeneR1uF@cUWd1?V;Gq4+L*{KuO-S&9ujta3;`J*Q7=Y2_U| zTR;B-yxJb#qO;vNIzf~UE*FJJScpS9+Sp(x7tgN3B+)5vxt4B{D~KSEvPmLak4R>V^YlXuk0m6YQ(Kiw`BiqWYf8TGe#%;{becc8{TSZ*vb)fCi8L}J|J;q( z+?ZY=DPmw~Me({PhLWOOcC328CO-j(Swq>Y-tw0UhMLtg?#?SyvEZrsJAxhKo_(7Q z@u|#y8x`>{i$-EBYrzG*8f$2*FAhlmql#nxE|Jjs8oAfu_iv;3LQxcv#6c(J)oiN| zCvp&bWD@}GSt3gpa!Kc3GG0VuII!%_~hJ)6**tn(+$iI^b+o`!Fw zB3mOog&&mDzG;u{l7Fk*ZYf>7(Dq-f`0RJZ6uxZokK+*5Y@4SzLL5}zMoVQM3aq4Dt8<4ScpWXRq)F?R3qgp ze-BqN1fh8?=}qs};ULzrZPpVgOIbZRS-7|ty$+nFaN-eqYk_|jmFBYSt76KkA;(mE z#ayBndTtN?0t%BBPd>B*<*8N^l$}5Z2mpbfB`lr@khu93>*7+!QQo-@C(<8oi&h>0 z4Ki)4G0OS>%C{wu3yF(3IIX*nDraKe1%KqmDHS9!U)I2Gx6B96c~Ww6_(9mIX5zhS^bj2*L+wJN|w71ZXM+sTC}yjHdI9KdY*SZ0l`bwehWb zm@YeZqch!J|BQdv6-mUM0Dl~P?L`*@^i_xe4M69}B>!Krc;3zTePI@>zbS64&pe=nfeNiHV{LXKQ zNntN`P#HTbgMVD>_&MnWn-1Rp%8;NZNOj+xd z&Sgf;oPjqZ%!nUXJL2JVT=a8}2iy>01w8c2P6a0oiJWowXk9;(4TyqTt~9!F%_n?s zOgy@gdo6fReP2fLb8hd({Kf0F=taW3#Yk&5HbcWMuQ8@GUFO23&dSf|+e^Fx z+btxJnx+Qi@xt=GCT_*-9xmzB}e?9K-tAIeku_ zo#gUx*H_)5tph7MQdY#(28m0Sh<_gDg^2%e5V`A>m)qF&d!6S;MK}+Sm;l*&uWq7B z`2LejEp;yA`;1AdZ#^Ot8zDYRj!8);eyysj00sK3;D0xZNxG;Kv|4M4({GRx%?I*7Z=*v|mIh>ct}) zMI7|uqqfYT!I0I1&vE0<>5BW>FVnI@JPKSoB|u~`j;?=H?fjk3E)^#vj*@(yeQy&Cuc5lw1H+K)w>?O6XLPe*;H$K;w}C*j>%;HpUXfqQQmdL z>MVs-%s*fJ8N)Xve(C!OUoq^&#g%aE_GNDY3Xv5}s=C^B(`H|c{>7z7G(+DTqI~EP|679XudY7d@r5|L?X(Eg z&W(FuKOMdB=R~gKM5fR572IUReZ1|TNx2_P3*AY@25KE>v#i^ZTjJ!3AJUJ5M{Lqb zVYT)D373_d=- z%?$5jNoD1zZ=Vv%J*Z+HE;w>C}Rzp1t)#?S0P~r$Sp{-}c!%O_Y5( z3SSMUpEcRY?1uAY)nj5LuaNW$5`q-8@`ht?!qHgp9)wGjH)p+heS+WW?h zqdZ)1xjnXSyvManyR7U3Re$(dXB0JT$|!1?YT^ImaCaGDnGOPn9^bcs^q@X_g`#=x zPKI0|7t<+&PoNtMc38@&1CY_}i#di+Ef*8M2Z7zvx zUj|i=@~K}ke5d2bNeASoeXZB4OZGiBTOEn0HBXC81z#2u)#m0{iAIuFL`E~21Yj@D3_duStHW(G8NvQ)4`ah;==`nqn{gK@l zjF_88$>cKni*oiGusu=mH714^l>1{;FePQh33<`SwQ#iP&n2)6e%L&(qm7*?oc8_7 zD+lTD07)uUmNnMfrLVCMPmyQsC7J+u7$#582i!|AGsf^H$tpH?u{}XEE=#Fks^NCO zII$*5>>>&OX4p5Hkn>HRdp`1?HN4b*3O|umU{6AkNxW)hLbC`WwT}bP;&?LSv@O2Oy_XfR+^!^ayeb^ z6!;U#olp^-yW`^Y_qPJ-R*%u3V#B&y=4U3;&<%_X%7cXtt0{FgE1f!!Fw zXyP}6Djc@+9Io6i`eXkL-pCTkffYR|^5jmGkCN*y>|ymi{lZq+|857YiuV1J*FHlB zqh6eK1T6y}*1QRHG>8s_pM78(Ue4_h@|@oSU6tg)k?-fPaiK*z7L zBbugV@o-B5S z-K2D?eLZomN*a9zN3CBwz5qy5B3;O}2q#0;+IR@=A@@bzat>B>jy0pEq4yCltqP+| zah{KP!a)q^f|}I#VnU%p4A`{UuwPW104c$r7_#&9Ax{JD)26Y)E@39w5Y(tpOp<3G zJxN`yd;}=S2UIB^ffd4P6U}&@c$4jq_a})Jf863&wmEOE!@spnkkTuX2g0V^6LOoQ z(J80?%*noK0F+E;_T%rk1?cmYzT+5tV#vqeS^hIGQy%yh>Z~t6v*O)f#a~wfh|u)e z;x(jNowV|l|G`N>gzf;SB2N$_e{ZQYBwav59%^k|x-46ij}lG16;CsxZPjq)ym9gh=jB@cW-Vh3<%YKcqzaM4 z%%$|DhbKvJ+?#5$e!e@hFsG&CP!;nBr0W$o(McWV5=zZ*Lem;{YTsO1oED&T&8hwA z!;=f>2&d~9FbFqvJ&*mhUuIEdR~Tj$N^nNV@21{z|3E+B1v7c+3IQxt3#{cKn#!Ms zu&iOvqC3$|b@yzfpXvQ!*g=fs_RU9pBEVj81u72WL|#{5ZK$_x3%F0=lMFBINy8tE zWv_$Mywrw7xNcvWU-pgqctk`qvRk)_zZ{vbvC|8y^J%A)Ty&2^tO}l1hZh$r3{4hh zu$!?h^+c2oJA;>Ma|dVPq9ioe zEC9(<4H5PIv#{_PUobrt;}u;3tm3;Z=Ox@X&NM~+>jGjtMmf+7&FtWHP=Gl#_!++2 z*gcmbO7-X;V>Pi?S|F==8e`+w5%e2g&EPLoF!aO+VP_MOZs_i&fymUb@Y zT4#)Q9u=r;H9baX_p5aCJEvjDKhW&kH#Iy)4XMrhG=4M!HFQC&Yac?%epgD`$$Iut zO{-!cM%@R!5Vwys8TG)#_gM+C_k3%v<2dxL*`$EAzPGuWz-aQ)FW4|sI>Mow@uPFK zNdD2z&J0d3@+~U)UdU63&N9RyJl#yjxokQpQ&(+_4BiTJ!5f7{7(!t~NS#B!<@)>( z#}tP;`rK?KlgXy49y*I2GkcmM4gxbI(+0qQTf&(f{>Jk??bd_o>mK2 zJ5vqN_uNX{cL>*$l|BYGcSBaM+Y0zh;hCu&hl3DR#_ii~%qzqk%z7O1AX#Wzx6Ma& zBy3H)IciMXxbFmDp4z@}@{U*&ifmQ3!o(>6|6i*A@Bl2xEf#9Sb6eKb81@MIU6gki zd2dlK5^4ctN9;rqhr`oAM~t zT-jf#Bbh#DVYfHHLe0TT+fsQ=%oGyey+< z{^+X9T$mzy!SHt%9RC9`uMW3JeS?E|%j3)jNIf|A*>0{xK zw85p#r(N$kTrH=r2^25&7RUjbgoOKp4j_D%c%&N7fLzv!AZ~F9*et!AcMn-PwQIqb zd)VerDOgC^*}*PXWJU&BPc2Ko_$1fjeA;gX&99ph)l zLk2I#zYN%nhjfPAMB?+iND4~QREck5oD)p~&H`$^Zrt;Bi|{D$Z1DU5p%uk%vAe_5 zGJ~~#U=Qf8zpt;CRwq?ySSb7sEBiHs7uu|*@)^Sh`y0y(+fIg3%k*Eh{}s1i7SQv( zoUuk5pf6e?&70dEdy2aacgM|%pYotvO^3|t%IX>9Zu7?b?kJnXY|Tne8*X~_KG|5S zpW>Ah3b(aXIUdh5AA6MfSAIgAlm?Uj$Sw@~aw_vvm@YwLmYhWE;(^WLitHc02`L1Q zRuXc33dLzq?cv`cTk5`%j%xuva{XdTMe{uDZeuxO(p&xWQxBhly=I-COpFbn!=oSs zlZ35V^OhtFfRl+!nY}`9ryl_F@3;GJ*ETi}7X-sGJqYo-P5im)zp2H4Dof-1$O#dK zB+=wjX&)n4uLR)N)@~(g5Uk+a@~mHwqy{z_iRG`lB5eQmfXnzJf$2jyRL)ioK~Ulj zZn5ZmhtVf*p&WqAW3*TuW3nCLPGwg2`h;uhJ4|56I1U4$m8}rS!nShCz~i{F0K9R* zSaD_->K!TayOSH`l&Ev0DQ^5j>dqN)yW)?>R|rmPgzQ#%&o18&%aVPDyY~bOGVd~4 z>ohGHvYDN{+NIBeno?mvFC?mz8`{kU4W5M;L|Ga&1?;=mg1 z4vi^1a=auFI$ASc)ydU|+}qx`o`?GrzqP&ll0x~F4`4+;VDBm1JQ5q8I8^Q(US(t( zEwvcmpKc-nvrR_pWy05rsy%BVm=tCELM|lvjTui+Fj>|5_^eH>txA@wKd{|}Om>^g zIn{KmGT(N^d6@M>nt*r?I0jg_8h=o6BqB!3alm_Sfw!S=Xc+T^Dy%c(=JPI0fYDS( zJRg$3pR<1JRsHI8!Jj+M7`@PctOxj9`W65B6YCR;Eq)gc^%svT0Jph+*)tawy5`Ss z@w5N`xP{$j_|FoU)F9V+1_rQh3>Yc%%ZL8sz>?&z8(S|jcj^(XTZ14cfB4tq`7>nL z=E6NjzHT=c&7PdGw46E+32}jN4pbm7-R8F>v@)W_4ZJWS57DM|JZl8W(U`+hGSpms z=jK1t+qR#xd6*5gpWaSe#5($FwZ*)^;K4KM`nWk~%w(;c{1j&h|BW*%@Zq$G)5=1U z5KyU=gxiKsWQlUe=&RTgis0FRVN4W((~UwB9}mTteES9Ly`_~vn!D#>Vu=q5jmm5xKkMl(cp^JpHo(;TLwUMSs<1RX(kb7M55 zIbv4QBSP1Xe+r*o=^x#Pl*_fv63RRRIeOt7oWrEi;@vKU|2{z?)dRwrgT{TRMO7(^(nP?O8yX?Hl2-EYo?F ztz8?M7i@eKRo9h5cHP@@Qw=^H^^@qbk$Iymg4wn$8G<_OKI1b&g16Z8t!tk&(FNC7 zRGzlUmn0aUVT<)&60zhHY-)@|gmq6^HWXc?fn3~yv?rx>a^k(NZh2Y68L=}9wz>3A zNaFt$;61f@1DjaY4pqHW5AToN+q;3Jc+UPa?G}|dO^o0+A*t)MsA5*D%%LT0G6z${ zdh%I!^iKkxg_CtLa}UeqM(!pA!z@Z#Tq=k`;dlDFM2o}(gdc1dV&!GF=L!!iq~5B9 z%I~;g`Lf0;YW^gJTUFdkn1!Xsh|wPf;$NU9s&aF1lXAUxP()WzW-|`mB5031&55*| zK>9O}Y9EZhn{qR91UbFzpGZwdO2_S=F`kL;sBrB!zP`tVfkm1#aC`mP;dMYA9S@xn zVTCa~9XvK4~MHan~hpjWw_1rNAsnD~!3X2wYM%mZVsO7T3f;)36Rz^dvo0NaX9PBgz<_L%6QRYPg zT*^*D=%16w=}$d$wB}oraS*fg*+N;z8@@;*;k}>n8hDH{h%(4iI4=6K0cs=8*}H$h z%v0EYz`7Oza1U-`%cp5(bwWARxu{u-W>1#zKW{l)NWw+PV`3g}=h{DB>NLCT6+yPH zRP79_qQAOLYdV|oa7?jPlQZs1eHgxJV@$_cwG5uc6s`rNE$xueC1vdNsIts7dY{>0F zIYGBRBS>)j2dbul6Rm;IUhg-w{1?K&)BT_!KoCvQ>)>9{_6od|ULO4vVm6b@-B;%; z7R=LrUF)^|z5&St92+E2Ki6kHD)#^MM(w)7`w$qg_}|vf_2T1R_3qo)1=-r*JG7n& ztY$pm;WXm+w49QvC9TS2(SUdj^Q7J4-pmq4`U10ME)N`{5Zus$iDVtQVT)HWdrtyX z-hgd%5Pj&w=l6ZqL`L7l+V(uPFO7~~NeS6+Mn}ZFDd8X~H6TwlgE1h@SojhKOiHi( zgyO#H!E5e!M!tz?cOU<29O&G70o;lmsOh!n{2+Y^Ec zGQ5hyAyL*{YFA*2$7oOi!(TNvIY-_Ngwl zX>Nt2`=<2()YC3h_CnW2Rw6ZMl^eG@6L)x8PSXA3-*^@L2#DEdei>()d!k0=d{tl7 zEB}dgAO>dOXhdR|>zsxPQh%nF9OVoZt>>-b<*!yhcc#v%qU>JnJ7X$+o3!~~ zPD_cE)nMbIjCQvwJAYjmcB&>dzMaD1N){!L?jZh1x#|P`NYlFQ|0Q?qQWfaaIuYn) z$H#4VJ+Kd=;|F36UaRY>h|@8KaGeQd zR_lcQ%Y*r`2u5&vvm*ChN33!2dQ`GO$3+2!gy~Q~w~G-x@-j=Lk?Pw<`#vM@5n^IZ zwZ@t9P6KgKVv-VW6z<16zEzMJQ)=5S8mBnsn0+AKYg1qQRfXq)1DWtSEP<*ge{U5Y zNiFsg0bB}~4BYp-pI+NM#tA?Hx3M*JjiD&%SBCanqDkO^D;}6c#*d3D@mmaR%o3^d zz-uPwgt2o@1%-zarAsCM!O(GrEhl~b6_e`k{pMetr!n%e+5j|~xcpQ4)1q;>l70WQ z60}*Epw9GgzA_6EMU}z`BI@jKbEOG@AbdTDXE(#j+kt@pd`LADlLZ}O_J^pBAC$MZ zne_Wswc}Mm)9(LX9Wqa`B^UIWv+C!YvZ(<7>PXNX=xmVQX~8Txk%pr`q9tEBEgy2O z#clDshkHyU_mlH9LAb8{SU(29Lw>{C7J1q&_3RZIW0DQLxKLM(`dgL25!{F_XVS3gNTS^NZ>p7WAsfMF}80z^oB%Z^g7l;tbs ztUUJel8c1O5-_*U_`hMRMj1YJZE@qYC&KA4ise}9DXitjh&9p3UM*Nq@bL1oVuN*A zO`@H@QgJC<0yAErrkvoVm0$kMcf;}TW;a@agnvdInj`t^29V$-?vo@tNae?2S@4_` z6_V8n6c08&oQkaKfPLVD2K!%Owr0& z)oN>A0FFY#!*;4o zLM)qMLkysgsyoOIA@6#c8S~W%&x-WEe|TL;uQtndt=(x$liqbytwkCmhGO?sn)=%O)#^DkcRFd6OVCO-EQpIslY zSAHz&E{cyYO=LjqWZ>uwJb3YYWV@8nBscAoVScu27|pFHdm4}HGiesuux@jw zin76*(H&$6zsilx3(dPQ`{aNJGjKHU!%&Msx$%9<+0PVb$PjaR&;9jUd0)0#t_UdUP&K`yRAgl~!W^X~h^obP{h*+2{A zb%95;fdGBsPb;4xgJ)IV2>swYaWKO35x5y{0N@l4+g>+C#L1gb* zExERJsn=;kK7_70NP>kQ|I8WfbSvqz98G;c=@@z6E7)P@yzQvevME+$j_Pr+uyopo z8bg`*j_Txquyw)Xu<{xP(yn5$`LJaFbr#BtCSLrzfPfWoKE$@|g(clSB6@#HDKOim zl)i&+(&I95&tq%w&R7674$b*3k-q}!5yBU{o((AiVE#ycPR*Mq$CQP9n#wCqpHt+{6ona!q-slb~=4-uLv{4Y!1WjZ| zww?bL{@`my?)?D0QQ(m?j1>?c>UE$gxq&_v^DKV<=*q7>`9WcKJ8J;i2@vXCtLjS~UqnoQswD|y>7+9wOYLh`Mfmky4JdG1&Y z1npi~-BLL;pq=mC8wJ7auR)oO$4YqHK83%f1T$CwnV|~>l4KLvyX4q=%bwX8hh&A2?GO$Q zelOnd&+q;Hx7)4bwVu!Cbv>@fb-$AIfOpQtg(DNfU}1|h;4{6cD3YL;OVU~$@sefG zKs2!9G$=W(qb`}#;R0);HNgy6@89+R+3X30e%(u66)Y)#VKYRUyq*5sG`YK63^7c? z{;)buOH5ylCW^OWd0AUbo~SJ8vr6O0>weN4(&Ba84*Nq~Y<2g>Ia})Fqcr-owO7DF zyawnHVEB{`i+vOv(2T(j!=#MXhLi|uIN64JMbEb-$n{Zbfz@q%t~A4=B&0MKILMg# z<|JWQR$^Z9)rh@2Y;~jE7X{SsshyRl_dO8Qqv=H(JZBJ&udIEX@rcHbD}10|UXYB# zq^LbYc6jogTb&wz^~{_g$N$;1!K?Zk|JRyAMpSje#{*?oYeyPcua$o8iwAM_khaK~ z=XYg?XO3S|YK=8+qap?4^e)>O1NEPTD4-?>OQ@O^@t4P>!}3g8loU84hKWw{ZV3g4iVgP*PG-#+qPJ#gq&fB-7VA1rjtR%C%apu*im z06vE%o!V57=(n*_haaQ~z;J=QA1{FQ%;mFh!npD|qg@p7**s?Z*drIPDzU|hY!@9e zEO1o;)Z~mm~pISX?2E@$&@68ub}TZ~_CD;_|xzoDgwsK6f zud>@+!gRa6gj5h|tO^zl;EMxJ@@Y{zOlA#y+9|zw^q~7a-u9OcBH&qg?pX**f+08W4q8!1jlPj zBBvyA`$|5c)#6bggp5S-{XnrF$2;)k9GP>1ws8Wv_RKm7tCltG@;^bDZQJ-Cb4r{s zZ%;}-`+5(`@nCB5{-{)@Q)m4)mm{FVBgjaqW?FUtV&8JlrJMRd8R3%5P<_H{Q~Qty z#>XqqH25RWO(mj4;|%181}B!2$1CkOYX0@8+(XVif*UoWfFGcoHLq(hUtq~U-1(>$ z>I8e$FH3G2_wgmclR%KlGYj-LPb6yuzRBEy6900eJupLQv1paGAy=c7Ov=CRw-^8c z9yb)_%h=rHPMcH5#c~29?)o0l&nGm8EGkF}`tq3bzvU;10uCPdXhf?IQ z3rng3lMgAvxEad-YLdE-bDlKWQRE^C|lX<+l4X zIVGSikT?Z+txEer#5F1yLATtr<)Aa&d5t)a8$xkp9&raG=tNhs{kS)M@7FQVv4UJY zfQEBh^%s#iDqW3>U#iGbXp3q)=Kw_tMs8DaOwxe`tj$N-D)L7+nQrZx;EY1G>=uKP z&HjG57#d-%g5o%9&H@%tSbhr?UyPVp35I_TCF%`&Nlj<4^hHsRb#BA?I{H;7G9i4SNHr`Ob z$sqivg7lA$wS^9(n+Q#NPfIQ@{Pz8xiJ<8K&sUom;llgjo{J?te)eO&= zytQNaSy;%}nBUUQFTb^XDVfmVGURfz>9d~&P*{B^Y<#0gz*M$F>(zu<-RU}@h(v+&;UIW(7ZZW90C9*ERqeCx|jAbi1q zw;cF_PoRMNQ^-ta~yhyv4^bs)Rx<9Dg@(uyFW*Y1wdLOgw1qoAUZ< z(M9*0FR-L)Hc2_aDm=} zJDRpy>XMR3hn$rQ&ur#NeN=GDruetQ08k(2z_*O5wSjp3&&d#L8R&0WwWLf`aBk44 zX8gy^UtOWe=I{x)(YM-u&a?GnqxT-|LYzr8B0p1Hcs+a&t9?-~Y*^#h&hE;{fOg9m zA}p9CFFs}kSl=XyBit+SO~2}olh2}UdPlQ2bOL)?OncC!?bO?y`Y#|KVKv&9BSGm( zMiSm#806OZ-6uchI6!ax)RZh}Un1k0ue~d)$aTI8c|i zalG&RPVk2os(~dK9|EL`eSR~mHt+P!WhP=oRD=+gWfHH2Y*o$$&4N#CVwP;^c70)hv@GfAKwY zJdb#T_Dto$&^Sy5V;h9M-Vh~BhG4D4y}g~z3`X;}XB$GcY1_j6PMION1L~jLrn}Xo z1-d!o#BfICFw|~NFbv-Ey)B_gG&6`&?>)%K78Abf5jOY2`74A_)=0vS>D8fA;o*rV zj-`**O0bO9`cM;199^7eIIlk}C_b=8Q7jB-=DV%OV~!HpEr!GL*<|{&*KmgzFP+~& zxv%t#?REc}Q54b!-KL!x%%H}MPf1xib-?41wE7|Fi_ob0&QeZ1Y$Dw91E}r&<9)GzT>935*x5)!ro|_O_s-0`8Md>+-qzsZXrO*V@ytc6ODP?OWyH4Kzahx;{14-lPI;g z3zqZ!max)1F5yL|WL*R%UM9w?sfs!E$Xg@lOO`#WY}LjA=ztmC0EX+$DCG}_vsV;f zcYIrqPn~=%j%XwNl^?C!&2F37+rR$s*BOsW0L?gdkwjWxo&W+BBj*&qzNZ!4{$BqE zpG|4YR|kH0VmcFrC#VF`9T0kiFn&L*-U7(Y~z z1AZsSuf8Ku(~$Z&pzW*Wz7b6 zaR+7OKWe3Dedi%gSxt?1>KgiEGV~(>ByX!tx0BHc7zcur-M`4#0J|{fKvBNiV3McG zO6hp<=1E!YSa%l|U6##=csr2(l@KocAnUW;@3%T>7OqTcP~jhK>A2N#6Gu(3TA+s_S8Nc7XdYz6_FURtOa?wxPz4Ha6^jDK86G|E(GE>P*&sj$57@&!?$O>&*cmAGg~`OWtVNFe*icL#m9FOhyW-N~ zcRcjHomn70KM^j8H)VA5!FVs|CJSA{;H}IWVNqP)3ROKt)0~iV{nZl#r|;%Z7vFK( zj&|1B8D0v;^~>UF*c2wEVLID-zqd#$LobftO-!F?-wv>L#0Q$V(<|h*Qh;MkEb>hW zcO?g1ct^J3+AQDwVNW7EulvjI3b*fNx}W!D`o7+HukyJ|rGGQ$oofFb?EB9ox_=T9 zp7>JUP|JMFs4`GZS%dvZsJ7XX9)fb*D#(BDDNB3iqKMK$tq=DsJnj|$Q)G}Wl5ylS zn8;{|ll3*&cvU}MI$9)qCT(D6?dXP+!SbhP{qcfz7wM=@TGC^XKaOl}x+z(cL8a`y&Zv5XA7IA- z$M7DAduB6Z{-em0sQ6DTPQY%sn|N-l1E**##&RI$7cvkTU(Z9|2Oi8&<9Q?==Mtz& zhwoMGWBH#Y-QZLRmf$4r+11WV)bGv*Rjyv>!Kf3Nv@J%iRn0G1b(6I(c150K9voTu z+G4a{o;rzoFEdOF;jZvszwu@GJVwXOD=3Af$p}p!nX{sD?7`PIptiWVy*&D)xLaJj zbcmXW&i}jsIOw)TuHc{^H-jf{f(6sq>DfSeL-lsoV;ZP!kzLd%(4H2~K|^`lzG|8x z8Bg3HqFSEfZzS?3!i09B$VRXIw-beL*_&BK=*fkx#z%$x33v2*7gNZudY-YSGljuJ^alQCbZ$XYbRbA zD85UAQKTk4dY#xocI{6?YUCVmwIX5xITstBnv6wn-kezb?rErylgtx-v2ow@Afxp) zy6wKd9o;u@qx$*DhdIJXB)g43%W`$tOhx-A^GufpP5!#(cEEem;q?@Bm$}@11$wb> z$T~>kgsJgoL|9hIL&6PYNn>5nwhqL*c^h-sSAF2_-WO&pt3wanHtJD_#3GzEg0(0e zQ${UCI=CyNBYkoaq(NbDTA^2qd=sA$I=|#Qw`9rZ+WD=VnW}7wqJ2Pzdr36uKo0)f^G)n@peeh*`$QY*eQlx=?8nl6v!VUNmG^BgkIsN| z(;%#Bc>?><56MTP@8YZ792jeWE`d#zVuZ1M&2PWk;6>7oh|42+@94lw#_b3GH3rU2 zA>Ky+E-9jhr%0RKzB!l7fISit zg}tw=sW4c$?&3t$krcR)y7}pgGuTnPFbZb92HQDq&>64)&;&;_nq}X(X*T|}rXK_@ zU&=`e+kG%l;zZ!2IZQT~AX>)-r`H0cIFhM9UHP=K+crLvsq>JMn z6M6uWi_wtKrhx=soiyECUnch!pxe%e^;TB+Wm2~dpY&LH#hP4?_vMYM<%lq}vn^jNM{(3_1>V)$Mf)7*#s6FZ_Un7x$d| zr;7)*T$r*I=X@Pl7-s%2p885V59wM|KHJ=c3(nU_!5j*;xptsCHeeI-J9nDqjJ>DXomG=ugX#tA3 z%61g`6eR1dA6;8q$Jn$v=?z*4~Knm+;?7G+zAYu3Anaz{OQQr3dr(vvyo*T`}I$(`To;m z=h2Dv@GqR3Nlry~Ns$Td?x7TuindH18bl|EMLXWZ?(AU6`mdIL)GJ1E=mt8PE=+Hq zvWdz14cQw$G=@3a4yk7A@N)6WlIdGuvzQIPqDYBm8t?yc72kKU28{|+<(Mhv=e{~0 z^gSrpbqGad#q?pv`o?& z|1@{s3hlJx24mj|(0?kUwD_{27h5c0wG|j6MLG{5*{A6asp{hA)+4zR5ZPy&uIo%0 zQIGBRxE|Y#=C=(z-}7P%v7D)d?rl;1Tt7A1HPD*8tJM8leSGsy7sKh$RtPnXOVo=+ zzt@)>qx_ugle!JnK^qc#Dd-5Q<7;7LHa8Ui26|J<(SQ|G^pMx2zTxqZP;vkb=kTnI zmA%5b!Eas5`8I++SOALDwvNh8C5RNa1BLhXpoQdpMI%SzC{C-G6%|b4hV(O(ShEqQ zA>pkuiA7(X;v+E@Gu6(ba* z+;MIeq`G3!Wr5A8<+1Q%9jF9h*TU4~rj4sx)z9OKHc!~juH2Lhz8po8f}ney;ajkT zsuE2IH~N5yJ8hYn^x@VfB#+*{Th8fBdO_W?fBHG%qHeILiq>EY{cT;S{THfa-B%15 zDYp4Oz*LLY6EFy9%gwta!5+qXxAtX>pLe3oLKOmm=WMDz(Lun54Fl)Ub=`AJ7z=Q7en@P&)tmrBECMKXuBF@Mbd?#_>Rc^uZ*M#Q z++2_wRuy{8I`$Ba#UtEfP1E|!=8Rv(+-s(?teIcb!NdP{oBu}$iY0O%#m>OPy$W}Y zzKXx(&j@vdr-syHE~R-0+KL`SbdVg*%Z64#NhFP`d7!?^lEFV;e)alWt%=2TlC7P} z*VSOE;ty*$wD_LE$r1gYA1$7Rwb(y=>A|hC9CE;2A{f_MXKu_9qvGvA)KQah_ZAkz z-*C|9@$z$ZJ=hsF4vCK#=NlIfoRaUxG)ZC90B5WEQX)!sZoKK}d5CYXRPyYWxEL^A z8N(5O&*ngm+68?EY1;C-IBHjw>Em{^kM?GCk>!4s8A+ACd{(`kglu>H^U<7Hz2Y7H z8OXRXFOTK_Zdfe+9_Ne*3}>L|4bfh7tiT-Aa?p*K)@bwx$E` zj=ghe%BtYulsl-63vp!ptIz515SUOMMWsrr(%WR3-F za|$xHKXp?U)~Qh!LyJbmdrc+aTpJfaRZq&f4X%Q4-X*lH9K7Z{4p>x$X;7ESux(jH z;|q44&bvll(godHrXs{4gD4%(`x~5$ zb12Q@_8<*TM9kLqK6x_?IznxjxZMIWf8mEDM@eNF&AhqKl4%E z?z-HHvPC*;K>aT`y#b+NuiN}b&eg`uR-u{iTmUuo*`bHg|Gld}M!tI_v?OW6^A_1+ zjQbt}vwssh)Su&NscABV#1VJ2g5TuYgm!9!GfjC3<{N%7Y-^f6)*a{PiS~ zR?B|0aQGm3`y){g(~)$bK8<-EixkK5YxGJP6j}2lYoEKP|C|_z?CIA($3K?0$q#+u z+Im0vCEBcN3l66?N^b3qT@Nq40{0&88--t;T7r0kO#iGg(-7cARB$Oz4tRqA_FA=p zO2KPb-52z+Mc}u?je6Z)wOSuM#vuhR6G4B&pFaP#Ou|s}zhm#5$UrlOTS?9(W@vpz zsboQF?Yma$=UAfj=K53SMT+!6PWE5S;Wn%iXkmSB%E$c?`wg@4IX(lbr7w?pD~()p z9vHkCU^XM?!hewB5C-l!7Zt0dv)+DjHxmnB8a!@7 z94&w5z%;o@&E&GW)>r6Xb=SU+w#4v-9vTvHpilygby)u=TBHOfUKz`Md}g}kixwuM ztxuvD);~2N^OdSHZsXY=xqRODivr~OC}nbT@^_^j^T zd?WYsTY8kw%1yo9Sd9tdYmK-ofzdX-ZiO7U!s)=RO65&qA6ClmFB2>oK(r0R-fuAd zXeh6!J*0|RE3imw)nsN%Ep%$EZ z_Fg^(1?$#vHfHg)p)|j^1xW5Ex^UUR+vpOXjY;Pek9n9+^lCP2Pu2pROmr%=a!R~W@)IedG{tV75n5dW?qytUROZm!ssk=u z?2Y8L7pbj2u2U~oQE91kP*B@vb3-H4={;tif0+1!x87E2wGzBy8?-& zpT7PYV`c0Sy@vgj)?c2{W59_N>r~M!w;1AoTfqigNV1-uo(p9KOZ(iKiX=?0S!dFYsIhs3p*w zk^YfEWPVHw3RA9G;&Hf$UWiOsMSxzw9KY5tpjABmkYCBzNqt#&z+V1I2cWsJp2L6qDAJlg3O>y)HaI%yToYpM zEh#Cfaa(929O!*P2`5k_ewyjjXG>^Ug@EHEGFM*ZdkZm3k7_MScd2R?En0T@JdlGj0NdsHX?Od2_ zRg+bX?*q#0vglY0XQwIu+g75L11d_ z_2t@v=eX|W(JBF5rhp3-M+fRC3s-Dq@%4FTN?+1}b#sV$&XLH^VSP!pSg zS|K(DY56))Qf>jRyPNZDmHd&(=C9( z9keM5hKu@c(F;f*ime@JWN3>MTE%7zZ2Vz~%jgI$0z1L`=$8jZ(s`40U@Epcm2u27 zu^z!g#?b7b(>Wo)k2EbV+EI}@bX+3_MZAhTxUFD#d^Jcg-o4(F$e*;-XVL)ryJ-c1 zgTAFjgqnSPHaVreZ>i{evZ3Pge;EOXq@*(Bx{}^bh3u6qK#+;SrkM?~!J-fP+NtBM zK#orbqXZP)JY;#lQIDL}4JbHLu9iSi0yYzCLs`X6UcYpPyzG7TJ-$$sGVNsd1Q9%Ia=zgI7(ERBtV zFm`jp1w=(cKU~^rN@-Ae!msl_)~}jj4(+$8mMO$Zn(tz^O#xDR3w1EyXJUDn5>#GX z+?52@KFnN3F$GGHqAQ6bnimej@u^|-_F$KDSqoXpVf$48n?MdsM$WA33WtM=`j9w^Oe^{aSRYY;y;?b&8KYByytTdA;~AryJcM8qj0z zxb)Rb%gge{C6?{Sngly#MFhiRHnMr_h|*b}o7suIK-xpQcJcY+GbzKD31k1sqBiQ1 z@~TH~!GdOdaLqKp2y8|IeI)&cxy*p};*2ihW869Ala?FtK#|K(;@-;0&Z=Ks@11^_ zj~ck@{h}xSVu7wUSGzMoYwZGgfNf51Bd6PYVALaT*}X!N5O*3%xf2bO0S4euz(UQC z_ZuN!`YI{SvN&H}B(?$8)jyA!LvL~K&q=-!bsRqMqSfh$H;iK(Vd>i8SUI7(+GlEY zP!#wlp8O*MeB&uX`lPl=p{P(r%1?}z72m&4WW+4|mYahKiuYes8rf@zYmxGVf%|Ca z#ut%7XDf2gzIkq+)-`=0od&8Tyd@vKEGVB11$k;;Z9}y%ISgxL>PN5tcps0CUmvwL zpA^5&Hi=SuGLAS3Bu!n=FQnaQZYI2ZIaf6Gw2=>^<$ZZdu61|2{rl1vF1xt%;@d^} z?f}fhZeU1ZsROmnc{?zV%WX-t4+>Ordy@6dfL*%Py0>T%a2+^@)eW{>b2fA1A3qDH zNi5%b;df+B19W8??gzR8CE$`v>%No3uWhKK;ca=^1ZARD0B2DoekdA)Z2O(4Osd_c zzK<}%ytzK>xPuMN5Di=cPK!0bq?@1k^S4?;&`KD&C2+p5tBn*! zHE!Y|Qmd(#n>PeQF8LbUUe7rGIqL8Brc2v!Bptno|=q#6cw%@s8e!dc9GyF@t z5p;b&WfY2=40|A$snoKbFnoD-YDQGL^n^3LwwDvbHDh(QOeXi4@yftJ4$gCN?9rEu zfj5sKigydW^mZ`E_A71qEqnDL zEG5;4dQo_$&r_Cn>RL$Ck^-A2K3z1n-!lGhE>G<9ohR#)|licnj5zdD4_C_ zz*>vfO6gAYFe-d9b2X`%M;by}9hftq;lDAoyl}iz*R|`k|D%bJZmpkL8)gUBl9O*g zq9H9Z@-C&)hZ>YBxFvm{!M*H05EgIfv8qF{TMX$0IH&H>`YgOc&W%Ig>qCNc;mdGv zuCTY5c+^rfM~ebtO$d6m11S~x`nwcFAWR@WTM3a6J^f*|9dLTHd~*$W$u}q;-!B72 zRRYYddcyd1==rMS_H?n5xCFr4N#l1SYroiLWwgd3d{Q=c4*0Bp-S+S*YY8nbAh|za7Ig zT9RVVvRjv23LFwN9&RDOf{sZwb>GtKYC_|{^4E74Ztn&0AJ$Xs+?!Kp2$Xbi_&gsS zEDGC946&2J{iWOtiFj3*KhBh2{blZ@i!ABb=SoEPRd7Ecq%|CS)RLM!VEyVc!}!%O z04N;bOGfT}-PaTP-Q@WpraaVEr<{yI0mMTpF^u%B$YYj1 zt^!VwoZy@_Nf{82ummB1o|;&IL~zq{8x#WVI!c2ULuyY-LC%!P!#Z6__051kYU=SZ z4S>bwh2FwE1e&W&*m&%ZCV*AXr}>DU_TOQ^eImJNSJR6);+oQkNd$S zdldORr|1g{U7^l35H@gcRet0OoLX@lLHPk-EI``RY#^(ALKr`+>QD-*1w)(re{d=} zMnQtQm<0W^_WoS-l8*V`_M`!0dD@nkgN5l(^u}X!mkrKyJSu5ipO{-cR*INSYCLZz z5Y#aIqj}vq@TA^2BnKG?Zza=Ggb>dER_)|anUd0&ppK>j{dm=_Nx|<=V+p0A>$spW zgI>Hu>hY+g7im4OSW2lYse(5>sNUNT?<_UNil$C1iZP1#LiS0Z`JT^WAU_HgRt1;+ znIh|*rEJ{+!M2IMygWix|^3hg#y@&&Fe=q(Rw!k4LrX2gTUQ&E#^q( zAmI+#^~m}X%PJbeYS~fbsDqt(FBA3ZPh+?KXb3dqwGBI3>Z+j1*=}j(Oj?`Pv|A%Y zkVg_YyISypJ7iQVNGX>NC(u2CfPv!&%p1m$4*`3DDbm%_j?~>qeR{$P0k_1$r(PP_ z&A(eyw|$d!20h8-51F-s&%Q3(Okru)T?1B6pp+u%u>&t9pD_z?dHktmLXO%dcgj*< z{swS;WHzn5r%pm2i|w?T-oNeaNc7zhX2u$bqb+qD^)T7x6=W}14!#LWmP@B$&T?xz z(K`{7T?@WGreiDy>!uSJDQmHF&2dI4oqpWouIF|dKcd+s0l6v9`HIUO*KmqB3Y;yp zU{8)koTf0p5_AYxPG6Y1)+0k+M47dFAiY);}!hb;uJuV=u!3@sU8L zUpC(}maZlO%0K;pj!S=c+rzK=H6P~dxxHq8v8wyG#9iY6_WgrGcSC-uX(7J_Fxko~ zI4tU!F}=;1Tq}(H?Q66K?f7e3kSkEX)tOg~z!)vJOg=ksa_w7>Uar%OG*dYVL-CmcPKLxSQugZ>Q zVlVes$EJ;Ik(?6pRtyBgMQ^q;4^`%ClPN9~71A}9oPq=2EJxrv7`8W6w9N<-M z73T+pOnOqOd&jThLK5a_Nanv(!HcugyU#~>$4x$o*XAsv{9)E3e&$pTHP)k$CG!t- zTcz={^>5V0PIPF1VF8Y@uT(WeJ#&NwmSQBAK=N_D{Iez>vkw3(r=hG9d2O-?U0;^+ z=^hmQp=|3Pkr@|7W_*i;w(2u4ckw6mAa;;4_kJCeA~h#3)dBZ80TynEZeT zO;E^i+andS_z-w(SFYkqndekXf#(=}oH_9=X}CQKAQwb8*!-MHg)7w!Vf+zJeVO%_ zdfJ7LYV;+|6FkPeQhY4Lw56#3W^7UJF(%8z#c0jZQnBy;o|laBsvyx;IK%{r#R%2b zQ_Wn}Z^I&B_*v4nA^r1CbbalFhSBSmzw&eg-GDH;X)kK|Q@{G!%4Sa29xtTNl44=d z;(uNMn4cV=A~l*lsq__NOVCyS|Lh0J1Q-qQ$=-@ zbjta-l(VBd1dBG}mO`_}oYs+j$v!5&>HPTd<;PqUI=P8^NR*IGuz`B2)l{Oe{vQp4 zC%63t8;1z1criU3<;8L0!qQ(W)_t!MpLf_@*uvm2?2RAL>*L(DVa^VnwblMBB=U>m z$GgtobNU;g?O3Y(fvI+ymT$+^skTDXd5yVsR`vF@0eaWb~{T|;xL z`PH&y5*M2_?P@ZkoW~I0kOVnhFAf zE7g}w78ClpX!yz+gZ^J*;< zx53Zq?mm#z-m?B`9Wuatp83q+=gdEtUjTV)N;$VO&Npo1?2=S)DCL}D^UL!Kz!X+g zq(Qp~!x#Xt06H!9uFjVLj;I%CN2+L_^)eoar7qMG-H9-gi{$Z0kxKMPvrLuB0H>vA zWNfY>lE!1OZ5XOjSVScXo;%&&@zf&Kda8|CD@J=Y)Xr62SFt%=yD%Q@ zhX9V^Tq(_C${|nKJI>{M-b*D(DA2vA)T%}1q&RZ(LhJMovrAlV)-8#au@0OiQ~~G_ z@`y>aRIBH?K;sfC>ZvP*qZ#GiNHmMcV*ThTd6?FV$;G~B$4lLpI`crDSi4cp7LXKOsx+>q^YL%2#T-Ym zwPHu#P~v~J{&GK=h1)7rKR=(eBOu3v(cevoxPlNJdavY9(^i+7;A+B)n|B++m61qO z{D$mT3d~SdB<7SbfFlNkioQzfV(H0aed3O`R@phK4uDH%1^Z2Nm+HAnK2=9;6Kw%= zq$t@B1E&qSk&);P6asiRf|s5QAnqnTba%rUG_FVO!4@I7gTfQ|GhukZJcN)Z(g3(7 zQhU_sYf?v22&og$(4UtG6Di;Vu{66hucs(sYrAT-JOP@D(|GOT5+jYAO@OWdminrS z0A=Bk*W*SNh*xqLygySWlYoP(nlxe|+=csR3Q=ED$1Y(o?EAVLPO0`yP5DH=d?{bQ zhzpDM174mqi5>B3d>ADxX~W=k(1J2&8)=NV%_S*UQm%?{&a8En6^epAO+{p{4jtPo?&A+(X z!%e2{qxG}QhDv_l{A+IQ6Ac0!<6XG{XZz*9L5%EEk2YONy~>JT1R~u5z^ghu-G-rp ztV#uv^|cM?HY`ymWlL(_B?-2DVYKalP@WhOKiqlqfI!2=gN{~lXnL}2LODVCSk&DmxD9v z(qN)Cy%qbRWW}O(?VT>=xqVTSD5{R6@l+EJ#b__pxrTO!Hv`pVd~SHZVC&^MoI%oP zn@LV@i@Oe*H*?BpHhk+k$M-z1o1$pdi;qDAKAdP_X5)c59d^tC>2Wzb$lcoUen2>_ z)kH2vN5tF_?e0DYgIaWE-}I=t&{zgMWK|4B#^e*47igd*@+&O2@anH|1D*A=aw70I zcDYbI2vihTf$gPDjBpl-3TC!q{5NM&f0=@TL1sk25&TcvioH-fjs&(j*8Wk%@>f@x z<1Br~Ri9Cg&wAsom`tBvGDB&Bs(u{EuwFn-^ahiy+nldib6S@g8^uR6F7IXq-f#KC zJdKp-6l^_wk1hu(w=Fs3>7!r#r#uic_Vfc$R|wHHEjKS(njv$Iv1GMVHyXQJnr`pv zJh^oA^4OoEEa`s&tDm7jm6aIGQ&6cUi<*$9vj8-A*7~BIcC{=aZr0t~u250bVGm1U z>}2wh%dgImTOeOj;1`X6^<;e=7n33ReO~Ktj~-(TD+tPc<1tE2XlkaGT_RBz{q#8m ztO($o*;@hKzkH@w^x{u`@-HiN+Wb^-iZMw?3&i;VAPnhD>J^I4@cW&2#66rLDoXl5 zDpdAN)c4SwUr&yw_LLK&L2d4~H|@?jxChWI{A$)ys838*lj{A1467uPU@>L3H?OH& zqnsA$79+??)}KP*80Z6dbG zwiEl6XMWO&FF>jIy}je-LJqhwMS=iy^<1XoEjTOrVXo#LaB3gV#Kin>gtMeP@psKO zCQ+HUbtQ#V#YH_eNzpoWWG{AWBpT+SbZK&=o`A`Ra3O$tRgP-#BCIG188N_y9unF) z5=my}3RT|s&xcEZpz_^_JyC>luHp%brKGA1?l~Bn$*A7rC}@893ArwNs)3T5Al-?d z;<(|RglAfGeSX)n(#c0FW2xDj(cUK!9Dr8R7Pg&rVR_}~BT4WNIUyuRIS(IE`nxi* z-NF>&^-*(aRIVZcEc@LPXXx8qBxD+O`^pQWrBn-h3N)q?5?c&Sc~jn|c7K#O4xZ7u zn(eqQU;x{IK!JX2PRBe)$y%XwL${dBWa9Mk>LqrN5cu>5K_4?n!&6X6d&u7OSbugI z?hx*0QsF$YmFwi>gaEP>GudVjn(a`VM@vd05fJf;DPRL;t!yP0eQ!0f6$bV_7h7i@ zA3vBQoBw1Wx?9zw^RA{s3Od=iP?8@tBw6u(s_kjc!B8v_T$JrhYpF@CUnWw>j z42Sj4)&Ui4bcS~f!m-W-9gN^fMN4&xL-{_qFUtiQ2M?vN8^c^_el2L80v&6xxs0)- zMSOgG$F^54f0~C($D8p0@hq}K5&%|&nN78xzv-Yp4@jKDHVeM;O5V@e$r#vJcAw~Z zHFWs@L&p$e9E~!+^mrP92W*sIsUx4+oOO2%WQX4mosnKJeQ(GM=~5n-2z@v#*#ZTp zVR8>i`L-%J`eUo`vIfr|UwIg>3T_LmuDrO8fN`1uAcs|vKSe!$+>2u}sFsH4FOOwE z8b|U%&-B>{Fz;1gzrlK}XlA%#wX#d;drFu3Y0MNobO3`%lvfbPPyR&b=Ra-rA+GXI zIZuw2h&q5K;Py+y9MiYNX_=|u5KaHXJv-1&8M32e~nI7(4V z3$dT$f>x_rm^juME;|Ll${tP)jxUG3E7~s{A0tXZV$F1Y561`jW{glg9;w9{=c4PM zQWl*vH?LO0>86`si&!iY4w*LeoPuL1%L6B5$&0N3q&|imss_JC1Q15y6wO|Gb)4gU ze}AgzkH&hoyx!76t&(?il0yPN^?w239q%kvY(+qVpgeX|`v8Q5_r+$?9uRfj-b$A8&4Bc}@s(`13$cU?3Bm}4W!E+-Yj+@we9e3JuD^Exy&^!T z=?+PhslZhb{Y5Hr2e-U$mMBkv zNU+V_FdAX}uqE)m;>&L4v6EQ{WUvh=LNY53dwa33EIiZRM-YmEJnY7E$}f^wW6n(g zeLATMPUO9dm7jcGtA<2ng z8HO;L9Upw)Bz3V0})l1kNd)(XzEKU(zlS35Wa}3}WA^50(Yi zmlN^Oq*z#iyVZqF_O-A+%JGx@Tvs%%2(70pxi8Jm@HKKkLT@!eDseS#FUP$b$53(D zQ|oD?&RSxZIR=Z_iRgMefo+wya@(;#(j}-0Jix@$@Dy|T z$;Vja^>A6~XZbuIr8uDK*i2P&rw8f&1G9sC24`+U5avY}58 z#VzOZYo`E0Ku&sS^e6D;oq_f)_9O`HaPo$A;M~JjccFKus~A! zs$$EKLAyD|8aH#wfP3t3L1*UuKvrP*H{EDGiHxO!FH=@sXvxs6+kDz&)bt#i507 zw#D-cDTQzL>7|oh?{3MT?M|>13uBXI?*q%u-Hu+^?sAO$R%G0^RisbrP$KKtjxw*~ z07ZhAnOVT{;A3D3d2&sj9Sw83XZ;0r@Fi>1z0R+g@++`F{SCs?%I1 zt7knj2;c}MqkfMUL_nU5>r4RT;kYP;93bOV;tm41*zeq)@~mEl5{heBzK}@vdZwvk zFG5MAe6C0q<6Uq_E8uM4?d+Vp3aI36db^Zc%I`%k(oh~C=lU;uMX#rSBMy5AON*RN z_rl1{G&FE*qN@oMAt{FA|vRVKcMiQ+oe`s`E+2IEnZF}m*CGqnaa_ZgJT;2ni^}m>3_c9i zFYM)%C`A_AGD5A)NSL9iK^LDRmU)7}hiENMbK6oa4#Y2c zFna!K5XGZ~U{O>UjWcofr@F66rduYs{yT>1`J-Q7K|yOD2qGel1hQ0YrENL5RUV{J z4w-RsO)s;KV=wDh|9X?qJW)>4!diIe(ZL4dl?Ws|(NhWMTge!6XI2Y_h~7#D?=ahg z@jM$(W{lMfeiTekdy#nL&Y~Rvdu`t>-|-!dHohA$?;}kjxc$OC7#MnGK9mGziApUF z^b~yf;z`fnvD-Syrk$e|ETtV-q6i>gs;H6xnNX?@ebYNtoMjn%9Jx@~2EYJBw3Tvw zS0k0Ac5nBiJ56J>J3=ifjaT``D?U@jb3;_C(F+40~rZ8ezid7|t^F5#q0mM^F z`=Rz*_2o`tzIU5h`ymp^*-{4piDOpETy_PM?7gcfO=kQO<<X}i3X1)(K z8}FygzI({~_6>g^Q50nP_qsOSlZD5YT;(GeT8r^0L$E69v~Rq!m0ZHN?NRAA#sIGQ z#sA~$EyJq%gRWsoX*h&*hje#0N{57WcY}0Gr$`Huf~0hJNJvYEz@fXF13Vk_e?Rwi zz486T-oKbxvu4c<-8(!h$yPIu)y(vY^9fB1bOIecl>7_>4J|7mLt@5IiabmRiy2`~ zW=yHA4!00VVX4^Z*3GaNXjO!KnAlc|I7DmIeYMf$Z8bqA^9%^QsG#m2`UWMB34X*i z-~{e?UGI>G8SZbZG~Bf{pZsce$gMLFnPjmE?c2*V_b}zmzD~-(5^_H2&jx_)_Gdt| zQP*8l*?Sbjvs9R?Mu&J~q&@Q{zi>*WUHdmX4#!W{s{vfnM0e~@0C;TqoTu%x>(vBF zv&y8C`T*-Bhl}+z_KW(_)!_3kvkr*3+tO*!Z+*m*=d{*|2)?#v+r*2e*go26)=@ zN1*e1VWYAC+%!mrD3CHvQ1xJ>?_Ar+yOA#chL=wb9_Y^MuW{B@o6pQ?s;aw{sn+9= zgF`clE0H}p6Q|q27+SZ_@RtD+&-TW$d|R_yxZH8Bo){JBW~G?ZRDc$P_!4nOTzLYW z(tU~00l+WF_nbJ-->TYN zzoElbNH1+mb=*A~v-zl^HQn=u2&*n$9FAb@_3r=?eNK@21H0tC+M-fEMMeaCqtkfYZu#4{kJ(dEdEjk;ERdt~%0>+`wtGq2Q&0N4vBVLD^F zMZEYy?3Vl63pMeIA3xBda9KvCM>ec#dY%ZLq|&uymP-%DJiRCB0)mgiH!A$6+(^xwHC+Y}=fb$Ka?AOh-1Ir>Awqd`gzQu-#m5N(KE)@?9Su;9v zT$Xd~z(60=Cwi*7sK_OLGs=L;GtJB6sTV@?E}NRD{!@>-hnp>RSewJf59#@l6q!*b zTaah5#oc1~@?zUw;)r(g?s;EgDfXYTsT(HqxAAma>2?P|DL?&}lyrNY!7W{=uL4^{ z=xP=&XtN?qJ{^Rpw25=CHiYt0@N3J|G|&9&avYFW9)YwKwuglHp_i?d50C&)olEjX z2GL$T`3wvrPyK&(0+61K83&hvEmVc?5KyFLzZ6!iy(xg6ZG{Kl<)fkTZepV>o1+l? zMtkMYK^S+x=3dB65dVg1%6sjNsg%UXX z^H$%%K>9#~F7rmqUP%A2FUu3{GR%&(>#8s>woBjp0ImRJ4m1c&PK`b{eZ<@GyuX+! z*`if@3MrGE-+yNxc?>AdCu0d^E#u4!@jQe~_T8L*y7)jxL+AUXmYGtzjNdXf(8|*K z&SY+I+NZhyas#rn(J&{2^c!eRXHR{S?%kG5;Wy_#ZLmqSDeY2z$gcXKk)3p@e!*Tq zJ&MPDu$_%8>$ShRujwz#OQ9BCoPi%P0DHZbnitnu0q&kWYx+?W%WWZKARYRo9i6-Q z<)ED~?+mv5V4)3=j-o~77J%yE$NyD5Y`|)~sws#(GxOSHW#XWupP++LEJgQ1CyauL z%?Ot?%vMsz2If$}>SsVmB{R$lV|9*Oj$IPgzR3nJ`hnVisN~%G9jkoLGyhX&*i#HH zAXKrNT&wi<3p?5ZL^X0@>uhT@A_|CrhXlsxa zcE9padZ4YNlU{~DI63LEHKKkRKYGTCWRe*cl-Rdr&xC0BOBqR?>6OdXC~ml^)ZE~M zGBk%7yA)Skez@7B)nT$ouc`>Gi@ac#d2xuZu*+o*^~vB`f6_gRI=@v>$sWoct?s?Y z;Ug2G!EnU>he^&G>&DSaXX4$&`BI5tuaOLw>D!G*qIUf-ul72Mw2dwm0Z7*;nkraX zU!jy+g2bY~Fz3l}1EpTm`@_WOHYeuHs;5C_9p|BJ*Og1N=8LelwG&GS6iKNf4$S)g zU^>Q|ebm_z_8v-jf^>pO3-~_pR<0_3ZPqnvG=eTiXm9U}g#%yOP}>UpTsD|;!ADwx zt@b#wMy)AcZpg+~;?+_9h`}8R3$R5g-)sJ9h(i!-5_jjU6mvDrxK`hizKmH$PVz&F zQ}l#ByXS1NBmz+u$jUY_x)^D(LbJGF>4DD%Nd7{Oip}R+h*rZmHH;lOHguo;-R2i? z-+*Y>q_HST2ea_zw?vcx@y!7DX`Ic@#{&DX@$CI}MWIDZ0$-np#)V?3L1ATa>zh(F z1q0V{sac?RTax8wqY~c+_7IRE{mPvp5uuhS)RO;hD!=Xz$xi0Nu0#$&dm~+rrc6+o z^<7gOVB{uoDEqcBXCCsIMKj z)Oy=>qNM@%rkUjT0974cGw$@9xe|)5JIr~^P3jF~&0{bBI|%u61+?ar%Ed4%f9jE1 z*VPQ)2cId0MuvwBow!=F+x*DojXsy?Ors(MB2pEy{(gH()Ko+~NA;fapFkx?nUUwC zzJ7Ux_>n*Ih1&<=GaPHR%;yIF2!V%60Y@~~XwvNltKK2Bj2$i*C%%$Cnp39=^Dnx; z*mg^IWY9^c@<{!sPSt)=V4wQnoQah)oh&4GFunU;n@En!;74cMaKbB{eERs*#|6k@ zY8HL_APMO7)YN0vjjw^pE{BYMQD!+;xF^j`>+21dWIzNlk}lp*Nptl?ar=p6IQ8NK za=vLKtMi3CmfxX&18v|Aj@2S=Bc~}0xNeG=|EC3jHS7bGm5K%B+Yi%s{OC|zyX*-k zz7qYYj?DA5XAu2?SWegxz;3KHwL9F}^Imv0Xm1tAg?H>MxPO3u&52}xi_`%gf<@HehDT>W3!igRF^qST>xsM zyop+|<8Op3iVL(FtxtGel8Wy2M-0M#z`DVrh5}IJo3lW-xQew{0eY+MK0fJ!jsgTA zh*RWda+8q<0CQ3i(D*$(iN@{+eV}+ddYZ9{1%d~lZG9&V4Qelu)x7-6MgXsWhHxBY zPDLWd&N}vqHNeB5w6=2^uW?NuuVN%LTw0zn^`61Hd^j6%Jfq8iZ&WFM;Y^?v@T)tH!tRepQHt7Tk01>aIPQn zI*V<=e)I@>|F#Ps-B>|xCoe$98n5iI@IaD*J z2>Ya&ogCC2q7fsw91oOjw9)ydQDn|Rr{V-G%N1|MW`L}oE589oQS=7Ou$9mL7CPS{ zxYkFVvs8@KI8Yt6o!qgBLm&PI(@_tk#fIklrM*uCecRXvr<{i7n8Bpm)$i>cpn_d~r9Z>5GuG zqs25&*W?<1Tck@VH}i+N|6Ccim}r#Fg&&2?{;99h4-E5D4Cx2wEF3D9W%qSi18%^D zgyVnGdnUcV0yD-PUURTe@t-y)Wc?}{J0bphTQp0c;=Nz_@`>+d)N6W&Xh5O_9whh* z2v~@Jw4BPe0BQQ&ligiw>fnWWq?qQn!kFnIi4a(5*_bDl> zQ~hf6#X=88SAUdPK;(aluLGSaMd>!HNr)Y2FXU4Liv+cPzs)KB9RR8vvy>dSon8%~ zRP39V;0fzv_)_F0cy;_W_3@^;LUquvsBnq3NvgHHf3m`)Ia$?i@TY^HakskfVu>UuYTa)IPB}b6PMK3|HGQ|4oY4Gb@U3{ zM?ba>orxQU&jyO(>~_Wq_XOdEM$~*Q5;_Rs5&EAZ5<_(aM*pme(b~{Wy%z{_iYdJD5AII- zNKJrYsKCgZtqQ|V=x?fY)ZRBlEKTbUH*q+FrQk9q<`iM7}(Dv1Psc5eoHl z_?b2&^CJAEYRwCyhIO@>G|Uu}^7gx;~ir z9#?!I0Lb;9s`uY6S#lOze^9;MJrWvKi`$6O2MUl{M3FNTnTe8Sd|WSCJ`v6c;6pnS)ldA)xXQ40`n!2F1Wa2XM(cszlMxbBL9{y!_)we-*p zK>4~%xll-b)-4bHc($sz%Nf;ty~nn5@YCOChhdEsWuE;k|J*v1|2g8+?ADy;%$?pR zmYp}jO43X({$IwS2??O)z6z?9(DS?21Ft^S6}t14X;9TUg)Zwx=qF1EOZ!&u`Dp1z z<7!AM4$BbsAdA136kbx@3g7{Hm16bdMcK+NJP&^D{oTWorOw5e*M}Nga+Rk z801f%Yw7S;R7@<1{LBWu>?nVa8{P{pP-AU{-3`k3I8HXmxoK8X?%d}I1Zs@W3S>u~ z(@onNur-hi_9A($Q66Y4Y5tg0Fy(6VF$z{pK}1Jn7lM4nuYjb+lYTZJ;aUPm{t;*vCv@oEi8@;`~{-gG8ClbU3XjwW!>YUd!hwzYN$`}NJzhD?ro!K!dD&NYyyxrsH zU=aEbn~br+dJM1~>%s;G>et#Bz5UhIyAaohwuUbLs}Afv>~T?acQRh02HKF4dtH|6 zqo7~I`8xm{N{5oh&Vx|y^CQuzu>ajCKaQ&(w!F;b^@c)C(`bv8hJUE>Tn4pVa==`2 zsu7*LO;Lto-%xB#6Fuym#!jY5);J^0hR>ebQ{NGv*0nbcX9OAEwe?BYCEEyLpUe{h-%fc^pq5|=q zVTVxZjjF+@(80w_cXAlYv)ca214Y5^TcSjm26U8m8okblT_UtcZZ5zjAixFxNgD;< zYn}wtC6J| zc_G=D3kPnr)5Pu`cqcZu#qg{XX?-8fuDA33*ZvftlYiEQpHQHD72ztawDGJ218ny& zLU0KD3m!Fam#fdaZfU9ND8dl|!gLL|Yqt~e49JzhaVt)s)w>Rq7YuL6Sv|47hA-RK zQJL~SWdL2C*Cz-$B3AKGF)Zsx>reT{6Pu%x57)to9so$AhED9(e1frztD3K(|Fe^a zQe-Wrc>03ht}ouZ^1L%-g--Szc(WoeNd=7+Xw=0<=@?D>Tbf!rcH!m10fe%r_^#VY zw}s>2{gB5cNU|lrkTW4Xjd@WLI@wx4kW%W436y-It?6y{fE)FR+^d0_z{p+xcNh5K zkzWW<@K`Y%+Js>o(^ag}k0)Xn`dvZoX6FPz&eldZYMIvXong-pkb~c$ncmI=nk>HKIQ$;ijyG9BCBDOKaj+pQ}B)rUW1YI~b^DG5I?fObogNn9$%4 zpuSIUs3spvb^4G~`D?)h`)wJ|rNz(bVcO54g+_m(ug$(Jq!#1%g@1nYDHOy@U)+;@ z?X35W2-my?e%OuWeBO}Iws}o>)^OIG^`4VH8kVHR0`t|7AG(SNaRH?%XL;7UCQTfb z4ReBQ(ajJ=VN-L8Xu5!@=@4dPFKM{wHwMv*AE4{z@5!i!@KhBE#EMBsM^b~{Ph^U7 zp1|yr;p>-BznS|L6)^DTs$!(Ziwn=mw)&2I))_MJ=8JE+@t5{x%7H$jtTFM+^2Sac2;L8`;8Mo#=}*UlNA)-SE;lNGWO zpq$~2{U~tMqL$D7?M3XH;^xd+L(`hSk?+_BB4gY&oFFCAXn0{_U1Fu%r+}m)p{-je zLv7J-@QS58+<29gy^w7Tyh`hiwY%p7F8;5s%&f#W zC3%s}xz`_J=sCYxJC3OP_vZG$;7oPWibu$mhQ=9iI63#NUkOt(kXdwiwWE_<{tAmG z7c7J*t^Fr@Y#o2ruRX8w;WDhd6dyVa)C$E?!?9XVA}8#N7`N;~X$=8tu>bSa!5#D< zOEJi4!@jSRPl=hf(G9YQvhmD?Al@s(K88);5z_5AhzuPm0zF(7jn>q zNr3_TL;sadJT zv8sudT}pftNTjTd$GBE28NT$FU!kszM0DCQ2MS3OU4|q~t{A36t00_%qCZk{Gi)sm z)Nj-aR=Qt7zkq{UvlscnN4;CNpr|CB{6~iCZ@oa$yKMrl?fM7W{j&+R_(Hvx!{*c| z(;SM7(hkiabcLo@!v?K+yF`N;v9RJNB^o(t8asW26Hs+?2d`(H%dz6C?n-Ji82r-P zy-?$pJcV)xq(qw#D^n-0nflU_grDa56*Vbm$QYvdc+(ED7bP~U_m z(IJ#RI95ux+WtEa>HztCK+i#a|IAJTDu$3AV03e%$}unp{%(WCKjLcHpHwgz)KT>} zCHADw)C&#(mn4pOO;~H}7)XEaka{CZ$g@wo2ZiIlKmWR1*})O#nUyf+TSksXr~`W!zeZq7(A}z zG*uM<)%@wogq&Tc=_$(%V-sTo9)D#-*}Tvtw0?868|6oEjzpSfC=tD$dH1I^W4rfi z0Q_x0{M7pVL7QYipv5(4F!Edqx?TBH$9@6sx>9#N0`g-jYHA-NuwENam%f+1wPss1 zf%?_)_xAzouL5n9!9wYvfMd1_AAl2)vs4TI32!_q5!d%=Sh}chcZjM${xSoc&5&xo^oBv zSe$exHg5udo@Qu24?_~qUPQnq_v`)2hP2HY6}Sm-44v=6IYkDhGt zxag?On10_d2UUf=P?un3GJMc5rBNz9xPsnMF8PDmT23G)tP_J6^@5TsMTL5Or=8p| zAG>hFuW3+3Tq3G)b_UPf%udo-y-hmI*D9}!u*PjQ$+UIC%wkZ(rBJl|pqKpxVOSP@ zafdfM2HX>hRQ0IVT{G1DyRlFFKDYDdTQY`lKMc1$rc21n7&@f0yNHaWpBj*JOM= zq+t;$(fWS;-1bH5+{|+eRabw>q7rx8ND3YfWl?5sG$(U30TK1dp=u22u(QYBjDh;=#3@W zUMUy9+hIi^4qY-boM-xeaOuBPJL(QdhSUzv`L6u#JP#KexAsrVmW%JaXX-T!8Vt*D zBTFmVdr`KlB;9RaGw+Uvdo(+XcYAy58n^sboq&_=>F1%SS?3T^Gn?uBVq_9Hi9RmNF2LbgYPDI>ktqc&nLXB4|oeSkZC3qg8V4UFGv?PdbEBJ00)=>q6lTnxT=^yVN*)-qo#A=tELx;rnaBXQ?NrP zy2_)8X(84%N0W@QU#zLKVZ9OpklI_*9IR`BFUe+aA<9c$Z#a(#DpC4)3paB?{aa2c zgdPnF-lkrGZ9`L2SP71T91CDhkE0fpeaWt){MQa^ka z5w4et%2Ea+VtE=0uskG{sDsX*AKCeTu42(zLMMZ!Y3h%4s_!ayeuS+QstHG`*udv5 zlF=uDW-}EqEL$FU{T^dnSUd7FP2vQ8c2)Hnx)`+4z7%jvaXKuBqRRV@)dOJ?>4j?< z;#z$%V$a18U$^6ZN|fo48)b}iF;tzexM@}_d3LiV7kJiWo*1t?qvEkKJk*+yPs>;N zf~GarvkfbZMvw7z*{foCifr0fTYYxKxYeF9>JG^)pv082F}aE5MU#2nQBQkg3DXc7 zk;Q`R5%0S?{Tf*;SBg(t+wd2l{x4B?#!48EM^W1$ONiyFmDOL z5I4%Qqq~sQq=fjgXXK7FA}#$Wpkp8}b#N`;Gj6qBd-TXJ9C zv@#OL2I*tD;N}4)2=_jP`GFT%tndZV&=pTdp^|h&wzVg>v&rb%??-npwd>*66yKS; z9CGNy>zEa-z?u&tew)`-KNUbcwVG$Y1GV>f$v}}(*JhITdm9Z2T6s0c>5}_2xjVsb zIP!??837#vs{4wt+vbx;&hd?Jk&?0e(&eOIFNX$9SxQ4xmGyhP%7G144-ab``xD;cCz>#ABFEmDPK0ET&Nl~ z_7T8yX+GJ2Y}jdvK;Zvh`3|qr2|_UP#i5MK0(FT@w#w?eD3t_by@ccXFEL&!7)0;z znMyi>lE86K7FZ zS|G&Hia(FadcK%i+*>yYhwXMgEaZjanZB6Xb#05FG;4r~$T|MGT@{wG&6mdZj|9KQ zp#{&kCbCJ)haIO_=@XP8lmbPGV#G+Gj)->IAbL3}D}|cH-|u zZ6Xi%I?EkjO-xdcbixVt-5dF2{*+F4BFa?}xz1SZA^!c5iV?)Z>S}z;*zG#f@{`E8Pb()Un09yP>Z|)X6gy~Rl`+tZ)+TejyP7`wO7y%5ZbWF zDqOo!NK@{%q;{+0npw}c9h zN)7pz>Nw~L@e$%EB>g+=f`N(03=EaZ=XO*A<{ehoc+o`i$htnqg_MqvC4tqnoAQ+* z(noatbK*qjkB$xM(8UGlD7J(dHQ&Pi{=zvta9zA9vnFMqt=BioF}pYe>D%M)}jvtd`;X=fXezT3k(*3(a+h)*cy1el*sp4r1dhAD2F+=^Td2 zzTmwu67-+JX#n$xbQ)$^z;ctknyT6S+=T60J>J%`*|PgGtFlm)l33W*^EgFYMsVXURkxZvi@IT7w(N`0&2@aFclsN9|t$;YQsEp3M6 zpE25@69NQGOd+?M}muHKlzzk{7mZeM{Zn#O< zX+C)<+CVPR?5JZK^}SScPw8u+dt!&aJM>wYxRztik%r?iE!7NofnO!Z&G1!H4n@yK zPzUziwVuwmi)T;=MO}dsXy3&WqKDKX824z<;7AVOvS>WrGL4uQrTFA?Yms^!K6y!_-lF=3U z+cv>fMa&T67g^W0QXYSn1EQb^!6~ZQN0N9*n|e805=vkz}`zk5x&~`2!Ggi<2=;5f!;5Lvu=zmSpG-M z?)N|vE5U3(s(m=+vM;8&T|LJubxIU8y8tr7ZHGNJqBYw*-j{>m^+1e5R7wM#U_!q; zj+V>*KeWxL(IK_5T@IZtDu{NBMeCJ!B5;k%O1~;f%H)VKe&9KXw_@@2S}8|YRvboTsHRT78ABt z-oC}h)=~fNdO56yW6_rTK`Y0P_Jdctj%Nm}t4XfF3<@rTDdy+f3kw)q0WoE-FaN%w z5B(>2nG9jYlf-Aa52!M*@_QPAK55NG347)u^o%y2PtTN8wmPke8dy#My^+Wvy_*qz zKe?je&^XWTs@ERCA`10OP=0vX8PyfXNuCW?q06g3zmtfvwjlV15D5xMSM}Ie4Y5d7 zYQ(pl_S3OI_$M5DJq@I4E2s%HNCm^3iYXv%fja)jQjC7PY|~g7;=NYMb{|88uCXK%am0O}GI`Bbd znK8xeKP|vHt`V5S1<3gM6H|NRspy(m>m-X-L!7b6`W3ok!!o0Z(FEZy7E!eH@$K=x zoUAzxJ2cDWL}bSLNXEyFohh$mPM9DcUlVJt9uE2k7;_>me5S6(!rFh=)A)W~5M^b{ ze6@5`FZ{+xBqN9XdU1>Y*f5N(rNJn9Q<1`t@?1c8pH;LLOt`S`j2N3Dqg-N4M)LFX zYlqGw7Gcs%bqU=k2+CAHbiQj1buRqZjk0eZXZaWMPtQa>{SwRp(&VCN6}%s6o&lq z7|ikicoQQCF(wgqZ;OG(Mv@9P3K$<(Ilnr)U0-1zw(l2IL~Jjln9&NDA#KLjgh0G$ zd-9HRy#_xkf;n7ugY6M(S>xjTPkf^1CRBcs5gfkgEf)R-7dcoa5$f4`z^}#rJAl-a zcI%9GGK{SozjsRH5xs@|h*?V`KaY!Q1)@$!ShI(d7JCtMt-;vVpSTCy2SGrD{ZVjV zx09l^Zy&6(5K0Q;Hp~^M650jNylM;le2_z$H{0(A z^LVil*KN%xK8_e5T{TnnKd_7+^aiHZC~aH#U=ARnFUb?kTSTw{#33^{mEI#6987h0I3->$r(_E=4dgl0!_L5)RctG zgl8oFqVFe^{1sMmNH#fJk6>2}oZ@{cEq$)n(h%C#DTck0r-IMr-K!x%Xk`E5L!u)@ z1cDrDZWkphBu%%~QN-9a)!u|rE_{xQjmNfQn3nI{m#54xiCL~qUmfIj)Wj@N63)s) ze8jsWG3)VN;@GQO0wyzJ+bgq>r4Iq2rwl zQ%A$pN3$}57g_wUxV$MR5AX{G?vEvqMsc(jVVZuD%h9^1zGxBbsw9ACM4qTb04mo~ zPJUz6cZ!vLJeGT1i@Z$SaT(Tz>%ezP#Tff?+>zEb%#o-Mq5N&IX9=Y^5nLhHu4RY# zQZ4Ink;%wZ3b;sz`J=ig(_C270jP6W2d_5Fu~*~~!{mmkg7)~upp*eoM+Bv0odu0- z>b8gECdcN>gA#a<^hdmp68DY}l|%^jG5zS8x4|8eGZ=v&O21p#pG0G^+00{Vw@UvX zMZh2@dOLe!KcvqujFg^E+uzU;Y0R69%2QyNk%2p(!JGddxI=mZchuh}6`$P$73G-1 z^@i$*?rs{|PoQJst9(^NV6yD1&5^G*!h3G_RNeUlD$jTVaP^^xuDE_5X9FFQR%S^S z4N+pWaTLsPNG2vKnB5l|8$oPxj~AFxAH*38o=#4@_1u~7)M zp+J*rv<8vaM-H;IyeRtqz>)$^xYu|2yfn(8CC5R15 zwJhl$NekaS2RFpNUuu~K+T)UnZqEe%52=|-e@g@ynPo&iB>@5PWU(+T2E721zT}5!5qX?PXkluF6Qjdq zhIp5^qSW6%Y|-+eChqq$l4>z0DI+(?*z78U-Qn2BLd|f8m)e?1of(&$`vurG1!%Ed zJa%*Vt|wTSdz2ykMA4~2UN)CxOa7bfojX}@&AI_cUdLrbjB$69%>Jy!eX$K!j44a8 z(H(n}GOT~fR&^dBD%X@Oh806X=4yBF+*dETao>ZiFN_jjemCh1k9ogv5R)KsPWO&A ze7q1v#2F*krN?$;t?DzB?Q78wU?1B=cgJN^&6Un{zc}IM$SS%~mQixYQ{M`^(fzMP_*0&9O`H6&a}JG%Gs=t`DkafI zv_~tAGKYN|DG%5uvlNN=yhPI&y;F5N27y7}zW*g0IXpyI#nAemBWgIXXMkY z-4+&OJEPGHczG0_S)y!Bgu>$m2EKf^=kL(qKY?*dQV+)gi-&gAA}TrjrXNludnD}% zrJ|P3e_gbp5LtzfE92JyibSWPt#Nn^Ek5jm;hq|Wx(>fE;+UBFMKq1J=+L9J+O4eK zjP&xZuGdFWbXq~W?<5uryO|Q%{Kt(G?;-~a)IydKw**|iYuI%Rppwx$1dLk;57#Y0 z#oDaJq-4r)a+8`d2H9{$AMfP_H*sNcaL>V~+h3Ru&0E{R>Lb>vSvF2`P^u=-X01D} zzHnc5CO&;&)z5V25iAMi-DDp=2wr|+^11FLp-DvszkU18Xa#*#<$C^iBXpJ^;#hLck+>tC2FQizfDD0gOsmqPPgQFeAaUIr^(JKpu8bhiZgWs~e z_2NiD1s5q5h6AkP-=noNWD<}1hTd0$n)w29y|rm^M}C;~Heve>-&2y752wGpMom29 zcE&CxR1a2p(Pw$N>z3u1KaQ33MA6D-QwP=nJ)>Bxz%!*m@r|n9q)hV9_xAsd^6o9e zk)p{Bzso%NgbP{=t}9I*seTLh7Ax;B-z#fKjAfXWK8njGD2Q^{YK?QJas^*;2R`bo zYd#2T!{<;bk0vLL1!tz0cP^&?2qM%3OQT}3eUY$)l?0Ul_&;fI>|`T8Z+5?SsQ=F= zk>OfGA!q3T-F-^&@cy^SQ)nvEmW@TNH(%sPETA3CMn#xIY`-n@MnKG#oKDOi>ej|2 zq0{d(7Z_8b!%gbez2*nrEQGhG-_4(1RW-vFNUkSt+n%dL-L0NBijD|=lB1i!m@B(N zNIqK)aGR(3)3$-HxNerLWq+vohaT6`B0R&>oV=I}R~?*Ih3vv&LN~F@y_)K7DXkda zxRGlr^%JKFwU^^b2qkUeM1jfcW6!yueU?9?urF{N)Y;B zWkK-*HwI(mye2NS7WC7TQsH&3LpP%hv04k4xVIXw{h4jDOUj^Gb%t91hW-FIoF!q7 zLIL;USSTUaSy`H2rVZ=`jb>%sYWb8d)OS6 zwW@G^SUYV?R+pQ0cX>EIZJ9s*ZU>Mj1#&{k(m@HQx9%n#9Jh-6_3r5CWrg_d2@Cx4&6bxX^eUIc{w!{YMwW z*u2pa)DN0lJdG4yNI#BzL8HMK^)zJbIS|emD zVph(=74Qrd#Sw-vn{7SP=SiGQIZ`Bb`M zDeO;)w6ntNBkUpqmQKA!VpwhIu4zW|v*ju?X7Ky?8@aDkl3i#^FOeX1j>cuM$~YLEa7!m`(jFf5&d5(1wsA0~zee-aJOS)EvI?LGda(7%8wi zW?2Ei9b`HzQq4#{a`P3yit$@Y^s(KfAhcr3r^~&J%Bzd%cC&hi-r5qqt_bUmtGmkv z^4rFD53?`+D@IOBr{(l z%c(UCQ;*k62iy7LjPQgc+?mzOk;SdTH(i9oQUD0;ok z$urX@w(6=0b{Ddin!^4U)ATE{8vCznapV0YgoBE#3}NwEJz@i7tB`G;lpIz&guIJy zkBI3D41j0V@%%CU>{;JkPq9BI0hFQlf;%D++IxRa>+afr#VbL7j*WnQ8e0S8yY#k5 zZ>!GBi9KmT>blAFUaE^ox3UI9Hk(&E^5XhR%jzU*e~Svcp3J)_t&ZsU#c5`>q5rhR zZY&J-Ne%Z2{24wb$;fekb**19uxvhOh=oWmN{z`y*>4v7!wF)S%d}uV3M*BYIk>d> zd(^6VVsWB{h9IZ9Ci+{lf1>3dn!f$hk+Q~GTulMz5y|xQ)2c;iHoe0?i|M?emh|z) zVYA8w>FI)KYd$*i~hx+zT?XycNk|As6`Y3s= ztrDHF*I7Q}veAbFJBHF~utQB>rv{Y>(D zgRb|j%K_ai`OEVi)R&8<37C-a^pAdzpq3di1T5+D9;A-6|}fL_xD&v8#jK%gpwv{Kq0=abNSH{kb)*vZyFfdU633$q4UCfI3|% zNu%IEX&6%fnQ;%GX2WTZq1-V3Aje3<2HewODxQ_L@;zZ|TJ><-3gECUc6h@NQ7d!c!!+J*3w+YJ?RD*6~l4Xcq19cLn!m2qI8A&d+Ltn`cN{~R%(mEeV9h+YYa-;vnM z{0M3&e2Hd$;7JxM07jW?wbD`^G_f?qIQhJF{9UdsCVuNkDoanQOhataG8=oYf59`@ zu%|+R`BvI3U{(@Ba~a+IvU;2xxD0~b=kgp66DD-P>96Jdd(k^0F)U}ILY<;b9}N`e zVVt3)^vPCanP2GgZaOG#u6ed4?4OtXM<@J0az(@OnzY!WCn+2%4h>-VGo6s-x0)!N zK<~-t^?HTK`|7I~7bgV4L_-A9<+|^({?=oVywA`nRRT}vSh=%*P-*1~*ALEDA1CG) z#jJg>c79~1v&iWMp5iuz^mxzJZw9oQNr?1HPibPhO2~3;YInks%Aek-1eTv) z)xLEdy&H5=D1TnyDTMy;XbR@I!bX9YAqIzFo68rmcD&N-A@8R`!tDBn2Yezhh~|ja zc5?orm{xJ%^yv@K)v7%`F6#gw;ayibXG$3?oU(pum@@nbgf``8f=Ve~nWh_J3I70l zNkqVVXFjJ@YUt9Bvq>2=G5XPN#uI<`WD_KO`Eg#KaP zKblcOR!|!{XJuh{<;QiYD(Wxpz*Zh=rYhu9|6*`P@901Nb`KW0ju93iI}=^ zOWQGDv5INDKJ*A9AZ~brw{~ggMCvAZ+B$nR=HA7C0PgK6f;Y`r)^^B=PN`Ch1FH}O$=Hj0ZmNnc! zsL|P-+ds2O_Oj0e2@JS)(xx%uv3Jctix1lSwYdt!B~?q7t3H!bfwgae{i=Pdll3ns zSc7efsK4+czVY$XUEgSH+k~S+0rSRwes)>U$GZv^@4yC*W_~8tIsB7MJ_x#zXfcpo zmZQE$SSB;5bGR-rh2{0#u?8-9yxG2s*#Fi58)Af=fuOh_x6Bv&#Ri63q|#Q!J;m39 z8%;9AfGP9u$$ox(t$J8(#CCRb9kaUw(AAy|>qWd|VL$$zK^7B_~~e|u=+FXup_{Nvd>1G^wUNzK5A~)5fwf8 zmuHal9IA>Uena5w^Kp~UnNg(h8P;!)K93nX9i=&)*_>zo&)b(jd3#M~*@M5{p8bg- zrqS%tmwseltyiW6km10uKpL5+YmyDCK?;S`p0k11_my}Kfe}~3ia%EH;T<6BU{1YS zC7iWx)-wu0ZMofs@z_7yJKn`|?8xwJal1Ktr-1}(Hf%!*7G1fT9xB@eG_6jB!dm4 zuYPq3!FwLpo4wzZL4SfTPj9aF^+mmuR*MH)XlUq)40*8%apS71)-;CmeL6TCOHpdM zH+>D9lGW$7U?1i==+LmR0VsEL~@YqH7(?;iSg~f z_~-c+a>)l^?eT@KQ{A<7^rO;m5L- zjX{r{#_tvPuLxHQ82ln-j+&tC^MX1B4Y_tJC`p~x)3<^hhcU;|zA#vSyS(}O<}=r- zJ4NC-(f@vYZ|Fwdk!WI}(Qc#a`d)t`?;5tr84bo+eeCg0e3 z4#qyMaP~_eeY@~6t$^m4zMz45Vgd6~8d^9~eyq#C4t$wN#}+LNav`DFeF8~hU5_G0iz_P z8#cy>5hLE)C*Jq>{>{y?`?#<3IV*HJx$h-2lV^2u@f3|Gk}G;t-C}{! zL?_SULub^>hnPp*|Ksh}I47G`BxK(9N)vQt$7qmL1T60@?NY_=@JpA3AI^ela^r zSWH~^=w;!n(KvuN_uud8NnWH4tZQRCcX{zSZyT8Gt-)9n0%$m%x?BiOcaD3B0nvE+ zlVw8Az`Zjb6kgVpTTQ2azrB{hI%pCvAtO`iEa;?SWFFzp-6~kcqxD>Cf)ve^`F-o zhz0D(NaEkUnsIZD_r#6HoC+1a)dXM!qsRZiJ*@gaOV4>vmzwiM42AuD1Oty7s9iEa z8dB3uN3{snIt~xohRjg}CI2*7<0RU)97K)J4_H^se(9V)8vFR-+EYi=QOBD`DTV`+ zKxsC}(#55Wo6;lmS(aO!q*kw9Yr!ZDS@8vZK-$(T*7UDM|NTnxs_#FD&_p)d=G7Qo z9>&UL-&4AJvC)l^!%9i7Qs#PG{DKQ6@70aJmqOF@&lZ_(983DAV0Is{I3e?=z1LJ2 zndeij@Dp~37*%%2ZEI`muGQVm5h&#QzaK8>m)bp{6Wt|GK*0HZO(QHl>Z55}_;45# zApiLpdv@WYt#eG+iu`5FZh8BPc=e1&i?(J7c)1FxBR(@^4RJ3!(sa7es_$W>p5C`0_1!S9;dl4;s<68LQG7a z^)4S-$xbv#A~{;!I4|g&JUmci#ir{ob#1XUTIrW{!S+o4$06_sMcm9%=!jl-Opj$q zBN!!Jzs0nSjyQ08C)l-S&iYd;q&V~MFw;N*h~OHaOTk8e2dlsy0>slMD|LVIGYa!% z`fyT9&0PpsphG-qGLsbHzeOvz++zRX-+Ln2KwB~<*hUYllETwV8$L}AL3wxD@IG-V((X{DFa`;TDnpXzrlMeeES7e^8;&}z+VEu(5~N-lU?^2appX*~hLB$|JAO1Unc9An#t%15`&R5##1S!0L#_wRHi_ zGEr^6l>gb?exXk$*`Jw5G2vev#z;Yz$XY}%U87(rRk`f*V&u0(uDexJpe!lY>IdQlO$K{%@8Byn5CJG|#@$q3MIJ9aszT z=8wnOB`$Z$fjw+lR?8YJjcJVCX(3B%I?DIis_=L1LbFM&-F9U2N%v0 z-L$+G2SbUIhb;y-?@2;rQVyvl;57n7(00bSPm$^NNzt_zlw6JtM;{rX$HFIP3=A!? zo8>t&CLxQQVx%4^>P|XmPKoHXhNMvJena!%3;%yM;@{^lAmXJ4!nzdna=g2#lBEsr z&6J(1KF(siF{~J+i{xI}L|$w4*a!Q7)jXlxqrX^%IJzwd926hmrE32i6f{?4C{aTI zC@}e1=DU_2Qu&Q@Y^`qB%74{&K&NrX)&T=wd5(9)0mU$geE)BW z%)ddiCfOw*vw%P#PZ+;cT5w3*U!pw(F^9@+AfvjxV9SWQ$m^Wq-~`}m1dQY0@eOs0 zw}8mP`|WaVzTQ86Y*ZAmr!iNuwZ(sTWyVd5T#E`a`Qm=85oUB5F~;p4g~{$1D|9y6D)HGXv_s z7BS#F{&nf!Bv{=-=$65qdHqzVSrk)V+`p;TFYWhe*{O=)7B^9{mUPi;ve;d1tmoW| z?Hn;Pe+Woo@;i-kZ7j*Tf!W){`>I;GFJo1IxVsX0Qs;;at6WqjFf;Tw4kRN1O<@5P=w5RS zv;kf3KUwwbzI`lSyZB}5Jp3v~fkN_a3w7NL z+rd@mH1L6q?3KGGpEacT9|KnT!G}oVqhMXnK7w78o!H&~|L^!;LNvG8I$5+al z`EMwm9Lt(NnJCE$q4K_@!YFGCZ@L*og_2Ljzx}W2^(WODWxvE3GJvY|EDXnIK(21S|CvO+-BA<#`n2xvE0DF5*&oYnvD^eTFumAN+ z@S6^RIo|yn<@`kV*qV}p%$-L zYuPl*f%enepSXc9z$+b8xS@c|*Gi|?*=hW>^pBpo#qT*FigRK7$p5b>jtX?Tu44_{ zl48a+u&oq)%Kei-q)}brKi&yD*FB7h53js!P?Gl4k+Ww=v4bb2ZdN$MR!&lhw)u5S zaY^mZEfR`V+!h~dyyeR{EcyB4M7m4lJVRz%j<;DD1A9Gnu-W^LH>|$0lESlOjTkB_ zl-1O$uDe<}|DS2KQM}U-axQ7F_+~xD&v5)#(QDd|Ah*y<%!+S1%1u__VL2p{_VwaS zpcc~NrwVM5ePNA}d;e^awBA3_!bzqG*KhZgPY$J`w9SZJh+JKTQYkA@lFD~-oifcjRwx9(W1mtTdE$@D^${}Hp@=Nkn~wdqo`@q+uGWs3Rf z#?Wgq;8^f^O8D|VW7VgB%N+k2%14TUTCwI4ciZ1@I1C4Qk#s*&xy;#QpeE1YUuuD# zj71EPf_lerjgtRYE-+9B><|s125NJGf2QGFr>0)`gO14Xp0Nj|e`Bpp;rQn(7i0qSAfcQaF%;UqG7^5f&f_aWs@97W4U66SW}&$H zagXB*AI<$Pxw|y7h=4r!B!t^3-f~d@qOZo=P~V4c<5Isr#RA)U%=By%_K$P_`@kF6 zywa9>t5fvziqA_4VOO9|v5$RC0v_nPnfC3>+H}LP=)*rt3}n(kfyLFWD!3U)88r9s zoeF2J0Q$m?iTqGT)a*as{xbH7#1O|LjTxR7H?>)QL^0b&*wy}}^tiG-myJ`nzlF>g z8$qX|z@QO8uG4fVMiqJq`P+xtED$SfLfE}#5z`d()$F8g)@8SRZIy!z zGvQ&l+YebL-z<^JhoPz#=d~zy5kbah?Pb=xl1?8ISuOhe^yjyVpfPTbu9t~Y3B(SR zv+V8DWb3k;2`}jHbd8jZ=wf)8r3A2EhCqSw8H;K;lOapH*1BKJP2;@u${30d7h+-9 zoW^F>>bT{1rtC$-)Wp}*9d?^CO|bo=l4H%`OhJ9b80cb3l!tthTiG&r!FSMEa|A~{ zL9njfDbHR-`$Q8x8@wfr8z894JljUXUr?_SYl7XsTwT=P&#E40@z2lD4OQVUg zQjFomi79-q7(eD=H6lzCV*cq!X=AC!%{;ZWOs>K7*r>u_uu^tVj1!uFr=%fc3*%qf zU$NPJ;tP=Rqou7I}Nr zxuAUN>X?7J&OyJ_EwV+#Zu<=T4{XcGeBg6Cef#)(r*lUt!`Ztyo1LOxZ-Kl{< zRmUByEM0Y*%lbZr=057NA)A~i3DEF6WI01%7Uapk7reO*M-8hMzG?+3o!#s+=2!0A zXV1XG8dK|^k2W^L-l4^Q?zS2BlElk;mfg^e*{0Px*wjbikm(+FCrkL;DI7HU0GvI| zP20^5)tP!zmBdF|5Nesijwbm%3w;XO$!1Bk3s~;Z^uS-+9M=aKq z*jOb2^Ivw<{8ha$sO?K+1vfY%pSnZxbd@=3u}Tjr!83^hyMkyDV>81PUzc@}Xst|1 zDC_ywXg(oR@TXvKulap}lF4V2?J_@zA7*#MoMa|!HcRK@?*~>VlRGWAFX6p_@q4mU zqWMu6Ub<0bXlhBE!xU(pJl843}z!NKirSrfOldkW7upkpA^LInW234RUkU%8f7OH;Eu@4CC zHuKo+j^=8t;zopMmzHXy@k9F$oY*ne%xOA;-o>hNl6^uD=qRYd{8-3rcI&2UUbW>; zZIXRmDGU+-DuW$omatEpBzwVD)u8!!42I{1n@EMujB5z%uCT0G>RH^ulBZlMQf+qB zNtd;ZgX95+xd}KaPnuQ4fb}})dKK2mE0;~N3rFh&{`1*yB0$<#L4(mL2*nt(61&qP zG0DWwCpLoPC1r_zs=}H%(I_5>u1Kz9L|>x`w^VhitS7U@2GkLtF1^QJ4hroQPQ&-Yzp zH_^N&NT#$U(9z#qP&34a9f^2PPSm=&3QT2I=0hK235h z8!-l{PNE%8aj~gc?yT#9IX?F`cD| zQj^LeG{s=9_Yr7^iP7h*_U#$S3e1A^(CkBE?aqY&l z$9gbovxAn)G-(p=9gS-2cBCK|WgF0v;m;snFrEVbrE|7@tLeb(B^kIEj=Q#O7BC7hLoFw{cK#5_ zXZKALBv=Ese~kOCX7q_e0h)bm9Gy|qd5s@peKbtF1ONIkBf9RJ{oh&F-i4ru=DBLO zp1mhKqjeEt;YmT2QYy>s--e^eV;ELn8a>%0W(#80c&Q>^Dw%aHUsYu(Trk1!$ua%z z7H??!x_wd&nHKv>r!YTUczxGUliYVV&xUB=wILQqYi(4Z3?Qqn=?pZIU!oiuhd`jqA0e-6Q*{D^Ta5{iPNoJ z)Uro6i9%<|jzzgIeAcVAESOvDF$G=11qm*D*qZPZ@@5!1(w_5E!Zmo*Wi^%uB^%|M zw=B+vEL+3`k4Cy*!017h+6{n3apT+Sgf8joEor76IG*iDS0}(|+U5sJxAM{T)p?Bw zDQ08J;YM4h3DtC^_cm-ukU}WtYIw=sm`xmXm4qC7aCH%kTw^sddZ}n{>vU-- zG0toJ2+MN`Y5h3PhkJm5 zb;~b2r(boJq469gg&}VjtG&=p}hT zcu2*6ea6Ls1MV&Bv|V4vSDjWQZF=~(_^Sh6(LArSqt#&R$E@LuAylmN4rxi=)^j42 z%{mdUXY2&tSTXgUehh+td+GmU@6SN&d`*BX2Kg8~&c9eyY|)3he>40|qF25hf@eAG zNNE*Gu?9fo!|ros9KW22I#RtD>{U^6iZ(wJ+p11&RG)qJeF$Wa$@E#Ee5r4cQi)?y zSQTHAcGH>UgFTidgE1ap?p= zv9?&PIF4aSt50XgW#@j4pLM0`4;P|ft`4?Vwb5$!T*Ga0w#@9lOSA!-#EEo%?-f7rwz1D}M4h3NjA-zvnF>P9yzpMn<&JVO7O z$lJm91p+MXW3Z&`kC0W3K>roE-KS##dxh1{?wZnAkI#M^g>#;bmS$=~mS)o7e}X^`oTsWPm^FRQ~EZ5FnsW{5~a zg!}|iH*_{6p9*E#AS|!=ZxQiIkl9UWdmWN@2qm!v&wRlb41$-8X|2)meWb7{G5c6) ze{)b!wxN2uN@gMzGU)KytZi^g?Y8C=|A!Q*rutlTll)iQL-Hjq3L&1wQO5MR>$TlS+}{25s%K=2yRqGL$ zcd~FkhJ;i{u;fAqv@7F1z@hMBh=-;a&H9{eQ@2b<<%ti7HCpZffCbjLnt93`p0Ik2 z|B#w%|FMe#{Kg{f#r4Y0bF>XzH&E`G9+T7fOq>u^DFS|zjfP5ank94^iNmahDdlx0vL) z!owk)-xc;pUM}{ZVGrN0QYg6UMRCDm{b>U_r?#WpJ+2I^(1wBt5ju?IZUXZr$0jLU zRIHkpJ2e%~2yrn6iG8zJgBWU~%DErMhj2_lM|nIYbYd5-P#r_9pCY|-$__C1Txt2I zpkgQ=w%ru4UYnB4EWmcJT*jhDqCPCwqE2yO z-U-dNW&kPgEH@oMjRtLsA_S}_Jp2qYwSj5#KmTgB^X2hgF+`#gMde91MroMPqKNM7 z^zDwX;#8zYNwg^De2GtC!fT%egs`IA)!=>X7Tx0aOI_3Ej37s)a!z1B7iBxrY@m@| zieJ|I(KV{NQWlQaq$4>+@|>VF6E$}iSBo2fTz^5@W+&FW(Rw{i%rhtePwx5KYm;@w zM4Z0nJs{Q8H90(uk{P9;3SvR4af@^s1fCxRx|Aj$Ii{fM#vC|y_{$|2s)~mD8IfRq zJd~(lKe+!O}(dnB}wMToZcl_Aaiy~lc8+r3!TD(8ge$js)51a<;zE$ zj)ca$E{(1MiI4GtN6V>qU2BGaNJN13huR3&(20%Y;Lc&2HkrN5gGRf6eZaKAY8!4y zP=(N(>;%BkQpu-K|=PefopjwLShzoGcp5&Fujwi)PkGUFkdv~ z#SCgHoH=Q7tV6$S{(G3`&6DYEN)DIQS9YbV;zM1-sDys@n58QzeCj|p>8>Lc1YmC{a zCtoh>*sgiIv%H(@tXJ)%BZuLXK;0sl=u`vO{NcJTYcS4%Cp%98#xx|RQDe+Tm;~|< z7TKbnxL`wH%jNfVcU>~kjSG$j)Z6PPSdXr#3z$_$g9?4ye7n%sv@URkh4<#Pq^>v& zT;3n)OV0|=pr&PBle4T}YCrjKO_))HVSUrLxpxgbk@HOIR2P0nxDsP~1mM6#UTq6YC=)2q7i`R>#%U3od9wn-7HO3yC|0;w2s%aEAojqBR zj1iB(@XE@NrnH_XFj!2g5_dQcVTlmW4O#n|i)>ivvbF1-Zyl#15M*NE>+-28@!cJR0h5SydzHKtEdw(=p}tMp?%% zhoAV|fbW8ClQl@!8`#428&y~uJidv}D*N=0ilhXW1uP4UdrdjTybPDnU9y7oj>b72 z<?hH zY(7aTKIJvCDYH9Xzp#9u%f~-du5)k7(KnTF6Y(=#$e!kY%3mXWnIhNW4ApVk3imHJ zGHmVwDZgD$A_euj*wggp?NE`qtYB)fu|aJahACwwwO7|dS?mQ49c50sveO}0JDb{C-ll4<4udyc!9_| zf(u5I<>WZ!Yt!j&bpb7Nl~2r$tz6!UTNBVEEZ(7Q^mQ@&S+5L=-nOr?!FxCFvWg?) zyLlW1#>vrALN}B9W?8vf)%Z@XeRVq{!M7I~YAt=AW+-mFISzGHWUU1?+X+Jxe$XId zSiOMgGKFDLcJ@>_GvK5|ATcbc0nmym z5;8yA_^}s!YsRT|+IF&{cpLUOWj_+1Cw4K`SD=}S$t9mjJrE#x@n;$9}i{B7sctQjfuk7TAE+=@P6Dc2Il;gZPLm@?rp*CpbIwZx@D#?* z(^xliSuHy)FNT)gLDsyeF-qg~o-EO+gm(dqvo^FkPIETg&oX9%QErmsa!^kv=~E6I zNy36QR@aZv=XtZ5akOo_}ckx9#lhPn>;CN0l4T>~9=AI3d36F9bUN?hpyZ(ikNqvwm0qtZ0X6wAW(2J7Zebgjqc+BkeaDhL|I(Yq77^8V)eG|X? z+z)oId)TjQK5uGN6Xjq5QfZv-5o??Z)bz&El*>nARms8hE3{?nNbFUF8g)RO<=GB% zvs@RORY_*&qj%frpyKONN{4S=v)Sl~fuj2L)MUw3J+`XdTdvViTOAGkSao}-(|H-e zcloQ{W#tsE=J$Xf;O!2gPS(>90^=6nM}j~2qJyj?zr+KFQ-T7Tj;FbuiP#vQC`G~%qp{YA9>KsxK$dvuR|9H zzR7i?FWYU31s)0d{;-95x{a2z3=&#B!~zWi!5_a}pBuK%fjAB`!MRQua7)F$qvwui z`JozSTpB+jAk#z$hl0#opro`cKyBnYX*)3tlKGyeV;%UMl#A)fR}HbB>;x&Xzyk&s zJu|2$Q;=~iaZ(H6$owh>%v3Id4ThcZy;llu2v%Wf7)AT4t1_|{tA~H= zm7m`ZyK3IC&cDE6rGM-<+2(G@aO60_5Gx+9d?i)lubmLjp!3u>{H}W8Rpl@UR+XnG z3L=2ONW^DL#9^bpO11Dy5b88(GE_ahUcBiarJ3ZUHC#oU&yIT@$j^t-9`{_%FGFpU z7_v-yxv1Y#1T$8ohMBIZ=8Zm?qh3ftkyzA+ku7t3Hxfkl=i5`wpZp*Y(%A#at;^&n zYeDRFUoX+d2J?rohkoBW2Cj5;%0poCA|glaJcsi%!eVIo0-D_qca{0+seEQh+CRt@ zd}K^ihT>x8E6q>+m@HAR9$5I|6Up8PDC{dyJ$q2 zIuz^nyA>?YqaI;jup7~;S$nK#Cqt9MH|i_XKEYdkOV6Gra9mhTiL2}s=Mv~JZgW)f z4Wi4=FZhsVoP02%Cnp2O=fo~u9$kU4Wwl?HO38cE9fzxsLP)Kw(<`>@RWuKQRxXkx zsh&EbjUpHqPApMeI9@atnhlY>rO#1>}8ypc#@{W0^5e>Sy8edt*Ksb z@Uj*(`w2?e4?ed$UAAEoF8buDX}if?hE^F7D{FI|d@J{q8p{9{Z13U)RlQV_>%3k3 z(jVJQE;uE&D3;>gJ?jXH%4m%JAWE}ByV>+E{!A`|cZED-N=`~UFNS6JAp(2}jD#c* zcsr$s5QOO@>jgcW`>|i_stxD$S;cx~k1EG^y?qf5a^tGIr;|bOa^D2^Mu%NaA3W!PQOa54&W1 ze!F(UDGe^vUxYKP)Pf;__d z3jE!`+s`7_l#_f#pvIA7@Bj@#!Z@Ueq4N|nS`90D@9pK?9?X2Xp`gDb?I|}x8 z`n={7<`tP0rsMumlJj>9G9o{kcls*ST{)0D=WHFpu(bJAn^t8umA;-NA>0rCv{0H*EM0j55|ur-@oC#J8R)(-H5C^q+2QXs&Ju zn>W2v03-(-sUs>p1Kce^oUFqE6<^+Qi|__qlp>B>8w;x@eB=`!KFWToD$H6PeIZx; zRa@`gQ2=29UDNw}(K`;b!{0A`z_K{fC5-FXomdzD7E8_A+^E|-?^4<{>qN$8?xCD! zF}W5FYr3|SC+oyc{uK|w`^UU|uH-0lea$3$mrwRxzEgnf!nND1KDYAP7^Joq<00_g z=B~)`wp3m-jRLZE#YzXh&+hkWRG!A)Fi%;(khR$SE2^&113mP(mk%!5Ww*9ls^aH1u@7x$SloL>SHKO!ilUz| zx$irw2NOo@?K#DA;`>hSh^A1pP5yq1G#>>`SZi`>bTtnvLOtj!O{$7@@8!9}z)|Zf z_&vphMC!j}I(!ER?G6UQMwG#V{*98msmnTM%|}43jgMrYU^6E|trFqmP*Y8ZZ?Eri zyLVz!m##+~q@Gk&jZ#YKf*9I1;0&J@u(NJ2U=x{Es&jtrJPcs^LI?Y8PlLehsEQ(! z7bEOfAA1moeKq+?a)ViHB6_^8ua6%c1fF}t;!frtx5gcGH)BN=+ClJ(I%FQY`5AmW za&e^>*vp$)*jv@^hJQN18Li>t7&=TaI_Ti0^5Gf7^xGTc7Dh-z4@xK+_Era`Ys&gI50*mZr zo_@$8w)z8O;}(^hU~j?R(#;k?hO<7sN$%uVnM$jxo(FDYPElmTGJFrPl~m--I)7z6g7XZLq)v6dT?Rv-LPk13!3J4P~vOl!EZk! zU|)@720rjh(Eh9j1Kj`>;ng2rEOI9UKl}3NdAuY!`XSIoskg#R48W1WN9rOR`^I6O zHT6);FTHuKV}h1wC2TcRoz;KGpVcj9zb#t3Gm~2Nlc}jyMq~lBDmmm49!734qWLd4QD5LIUyb?DLA*A+TDCfQaYchF>{L@#QaOg@+-w@@QF zUsQSA={dHEGL8E+{D2fEBrf2vo}8DSpPrFIMjH>Iy`rdU$I_{Ta&5cu1Md3s2kAb- z9VII+)1Tpw7aRlMZl8Yfibemg2^*)ZiFI3Zuv6CE`i2w}>!ZvArOa<%zxGB)_Y{BY zEe@2^@qP_f`MjGsEA7hJe}p)YA0&x7Bh2`3?!U1gm5hoxmRFxD2o)6s zAUT^V2eb1Kwykzkmy>LU3+oaBIDriwhCZD*>Ah@0HT`RO$93i|iF#9o2-Z*LTtNn_ zOgQ}hC1$l}*kSA5uMpihH};5(<5QzqV?RNrCced!Z2#5`Q5;aIHLg(Y>ZGCrS zZ#{qZB+fyA@&@6nRjq&U(Qo!|;PA6ySW2;ECn^38y-@IgukX-7O=UM)gO0%7b zGfwrjx1%s(RH&%3e!~Se-kS{2{j6uacJ1Pvg}cegGh(h~eE!+37EJ>biHLA%F?cEY z`ocuvfnTJ~m9-li;Qa<-_O0Jy(gH*tEo`I(hgHPqgC&^GgCF1E+XBmXzHIFx&QEgr z3!a^~@fEgH!+UQHzUUDt!h!EpCMl-A)R|M_6U**X<{bimctPgGnMkDrA4pR4mY+9C z>rr=wXWisAvKaJNZ+9Aj>2aoY<$l+E{}$RRZ*Qg5C<_j4&Mdx3UrAM$P+k@aqEz4s z)4Z^jyt~hm!c1snyNKw{A!T}nI^-% zpb9O#9XA+umDjJeFq2&6Got!hw}#GEI0g4nRMVt5hhGelVgOeBDRoPAgFx4?1R8~H zxRny+#ynq@-PmXrl?f{1YqYTv2GNSxKcCH#C(owKcPn)c9&F4#3#lMBCphfq7B7|Y zRotEC@*5~=rxx;XAjnTDtnYL|?x8j#9^kr7<3h)UK17;jec`*lZ>I(`Z8g!+-U#4v zX`M9{9_2vuEG~>=GzSf!))86!&s6E+D_X>+8*|P^M|sb!3Lu_Se~<+}UA&F^3;I zQH=YO;hDK9m0g}z#wSMto}33tu2w8a`_bd#2V^@Mf4Ry_OdUFOlxnU2Fl1jf zBO2E4Cy?n+;E_AuTlASGl!b;;NlXXaC?)lYSgc$q5Tnj9QtH@F_QCe#*5WU-CEZMS zRRRVcsI$eE$hn1)k=r$j6#DVsekHFN9cNvRJTEa_6!yQMQO7*ZRi{bc;Du{zdBWyg zLuD|(AS`dlmJvR--eqUBG=h0*f9)6)O9X#HmBxL2n057=Ed&w3M_gM)o|Ie_ZFF&y!2}3+7W^H1s8?f{QkWF7enOPfkVC2%vztG#iC|6ylxM^KQ?*?gTBnXc091pYu}EDaMNx)+h`)K6Z2<7d9+MB{0@@GIX%lr zkZIoP8w9xph-qVqa=Uq*#4%*=RJnOL^J_gOW-J-07?o5n$Z4fP8phl?Ee@ajhKp`4 zdvt4=*>&U^X0IW_8Bwo!oH>9W@UzTr_Rfn_+esS*$ANJ?1_gg{ozv`@w9Tn*_h6fQ=ei2u6t% zzIJFfOB(8^F({i|XO?&I_;hSoHXF1&VOl2Nt}wi+>iftay5=_s0VEjK=BBZS0m^bh zkg_$uVSH%JPPB0Az+$A|^3a&-R1g%j9;&G3q;_Cg?|3|2OiQvoeTd=;@^dm*xR!fE zgq8KDC*oZ7@D9WI;t>1d=xS{?%eeaC;}?1_9BiWKjyi(@A|c>HB`N+o#Krh!Cv!iP zro72o-3_`T7Cw||qt^PZaUjC(3*b%g^(vMLkKLFC)_&M{Ty<(vOuDXK{Vtb}5}d_o z{`&V7KhP-kG zWgaCjaRk3Cl%+hP%P<(etdo(SnKXE*A~V3=t0>a69vTvqc_m5AUT<)-W}C|^Ld!4l zd4Fp40^Pxm?+7RZL684b0Snka@_Q;$k5A1!44>pf*mG#=T-Y2~A5;^E#Lz1(%hutS zT;SECBzc|*K5R&>bD-aL8Q*@koQIL$)dn*|VWK^)ILQ8GI~( zHV|(88y>C#6LyOk&q3$vw?#P-B3$js-7=Gu9dA1y8_5&kk6g>HKAcY3%XNRnII=vl zP#I7lPraw)`|3$7G1-p?Mc=S2E3Y`BnKT%@^>r@0T}=Ln1&^s#{YreFh|fFA4Rl!I z;BYb?LnSGmm!N++a^Bwn=OHNlDs>s#al@IQdQDxnK9rgN04DW~>6%2I^06&(U0Kem z;J3No#h_|S)7e3ZVm{#-$e{9Vqw7(KGLqZ=?mQ^Dw&-63Ia9o!LnlbMd(X0y&zDSp$3T}VO$V#{hwiCW+gSpDw& z{%J*sCiFg)*#$n-bpfKxg);elp>g-wXN0*bkvT#qTPp%}q$G#*<=SdomXRZ@(c1{J z)AukYKpPC+vtQ|vBkUyK^sVc?V1qZMg+%4uQfj$VW=0#RJm-m>oN1_es&@A-Gn-1q zU$6Y@yN~8imkQS z<*%Nvq|&tya6cU1C&c)@{X+cxZjFfFbadY^amZ&Gsh@L5iqfj3$vbP_Ek6E3M76sE zr?91j5lIahKL=V|9UDXFhAV?Cw@&vnlySG+@iZS)2WgEEE}|FQ^M`&mK4Dcsb#@)S zxcEBCim&$tRc>mblZOkF;YZ7m;Kac&-Yv4*d>R(j4ZRj{Mq4 zvHJd|5}W28=LMKSOtcTEK69T`S$*wDQ)Dj@p*!kl<@hKNmmq*p#zhg~gS$Dz_0>{g z>!t&SrbB_Z>GYqnzkGr9Fw}}n_AkhJDsY-DR#Ir6jYg3#9=#z|dm~r!X0I;de-cO< z{jdX8rlK{8U&YWI%)}bw%UZ=ciBAP)UA(`SGt{xkm^~i}H4nND8O;hPQL;*BGKLMN zR<|@0iKJtY~^S zf{9!E5G&>HRKPa8hMbMAYv!wl9K*j%nGTWSGEwmoS*xPVzf!lxY9Pn*xrV3eQwgD~ zMtd<*@VWyLa)TQhrOEUVM zCpz& z$$6$G_JxB6=ezbjv(LMn^*Sc@L`tfsR1Q>|jw8jK!|ZfTB^EO}W^zH2fHA#n&W#w# z72|^_j2-CclAQK^2iJVZi_f(m_&2Q@8nF?e5{0L{`@=CIaVpm!i9J&32T^Yn65I3- zu1@CUn?eV7M(ApdDhdWq`92g|8kAqBQ>l6H)=o~AYFUu_(?Od=$KpSkKEL$w-TuC$ z<$k@Vl+EnxakrnC^5vAGa%+;i0VpX*pDpYn`$bL06mgytvgTV*J_t|aJCRu?&bd7y zsGS~yovQn|JbL>Q>pdgKIJR57GJfG*#k}Xra;bkI6HfNpkeQ!TceAbDHhJGTg*(%! z;bhIS{BVjWol*bT>f`2@WwD#nvVF%0Tg+wCncTCYnNyY;u6~jJ0w#{=Z$?m*;iVb$W2W; z@1=fo%-46k>xWNQ!wET&H2u%D@z?CUknY`F5#YuQNxy?wVw4)5!QOXNRsodnL_BeM zrBY$&Zy;$^+zGXpvgeSUEK|a1Frz%On~m**C!4jGQa{pQcmNBWcOQF?Yhe6VA8%H{ zoh>5h`?T=+qDR#2 zDVc=lBlkB}-^{r8Nfv&#$78ttbB_8{!lS3s+hj{l)#Qet2#<4aEyuihxXqNK4Hc>;d*_zeFD_5D;0 zf3c5VrEMxM{40IyDXSLkEAgKOb$GJIjlhlY0JGT-!T|+h#>7WSO@NotE!gHuxcC-z{9Px(!{Vb2eJh;yO`bQVE@1TYW*( z#_d`#<9S?_d&ZAD9p50u_AA^!)#Js<);%nkCW*Xt5x}EyL^aoUq8-WRh=YB;&!8l+UM~N%pk?`&)5Gx1lhs3-K($!>I`m+e)3#eQ9w2KP^CRx5M)%GcT6o+vDbG zn+=Vihr@L9scC;=srOg>VlRZwW)_JtH>Oec?zcG>Od!Q?I)XVhrqUnH7hxQ{vyvYD zrA>l-_HH27$Pkhr1tq0yI!xzCTtoW>zG}Dg1rz(pr=CezP&`CtcOi{~_xw zprYKq|6vK~5Rgu3X=!NzK^l}sN5<27{1BBn0V{n1Mk;Q9zN98iwu}x(0Zk zLGS(D@BdxvS*#fr%$&2&j?a#>H%Lt5PG7LP`2mtW9gC+o^z4wJi$ub%Gb_-*DQPbu3*b{|ZEUZ1Y{MdbvX ze>*tOe;&%yn9*>^wKTqYQIE~4k{nT6M;`i20?&9W#XlHD#hcX)^pnXj@Ewpo?v&qI z?2tmVYC*wVyzgZ0M86^`&Q}@y(z!q2d4A1Etm05vEb*G;WBch>e9rfkH@=9uBm)h< z?ny1*iHlq*c3=2Wb9fU}I&Cqzl|R*S`6`cfXMbu-_~)T@()LQjIYSgYbwI9)CmRXKMi@mTiOV8e#2(B5$(q=8u}v)|e>41TerYa{ z8!x@O_g-IaRJZO4$Mof5Lcm2&hS%xszV5PZSbCes=kHLoLUb+h?4TPR0~46%*1PWl z0;UA$_|qvdrsJ;*;}KLXp6#;~z=3D30$&y3gzPhh`!^(Mtd&wHzuJb2Z!S(Z3<0f4 zMhHvY$Tbc|+=JYIu?{scmxQnYjkp%pxN`INgEja}CXYHAR!TcUM-&uaea-)s_tQ#o ze1%7W)ef&ADD=B;;LllBjjf82L!tfiv9(>14iHxX?XXsWW9Om#!J|kk-(Qi|f&L{c z9?&M>wj5w;SqTAC+h^|Z_CbLbfxy11a)wQ&l-N%)fvMcGw;SP3UMAd3gR)4uwA{^| zildyp8s#7ktX2p~OIWnktbRt22N#)_U1D@stlG2_xCe@K&BQ}B7TtqPaAtOs0 z`IlqY!VWPBcQ>{61RA*)uQN+e6h443w*2DL$QsYm`1YQh+o{G0*N)z!jQYD$f|r~@ zQ#!(12TmJBBWtBP)g$Pqk5)RK>rhs|KdZ?2Dnr{;!fZVlMe6i(^XOF90wTT{uszRp zgKi|sx`G>Beq(5T#yAKk2HC8EEKbPa)Y8S~RKlLQ9|rUAAH}b{1%|Mv`mu9teI~|V zA0d3toLR~&-DHvP#OPmZsw$Zs6`A!*z4)QQt^Cn2l|!pUCXK^lK@anwk-U6+kIA!J zZZDih^ei9^D?GpR4MR1?OB+k!ds+iUQ$zX4Q_X=Q%d=NidM-o3`S9)Yw@qjbSam52 z2W0ZKNjs==nvGiyhs%ec`o-1<=`A;8-@tVoDYzfcp;Cp3Pn?z{% zy{Sky>~uE{f?&EvLNPdjJZHH1GdbwtqeGH(mCx2`KHmodenz~z7o~q(NzT1>rrWk9 z|KVKl`I&BA^~^4hWj^rThfUd5lN%!ta$lMAhBexbuur$-v?^GjM5tQ~_qUoK9z zJzj5WoI>CmvVc_!*a@N}fLln!Ov-7w`4gWjFqWz!GPa_xGNC_{%ecCd%?Fdx43`$uO^+FRz@tGGNGknkn})y(t!%5Gd#Ge0%Gk z2TogCN9dostA~3lZIGY4&;Dh{=Ah6L!B8K>xT=vL8zrI{_jkznz-IEduOXiMjF%)q z%nexN@@P)0fHmqI&&@k@;seJA%@@qV!1T;+Gh&bzb`!F@ED%XFpa!ds{G0%)KGII` z_%v0fFi8z!j9SP>+?swG@t{24Z$r`DYVs&oG9ja-)W2;>Yo4}QG0%E(V`}N8z23u9 zz>+WhUOOCiI+K|JtxQ7RpKdfFUk5Z!04U zSRT-ojCA$pd`0*{TDp200POO|@=g?>GiPFMCmTyVQ@H(NpJU?6UmDDkiUt}`?~?59wFIA>p-&2_{@=V<|ID!8v1 zpwrrU4Msd^FCjDNDL1%NHFE#_|Coxl6*4k8-);6qpTHWZmdCkHdf_G$jH1~7sCon@844iJUdnRT17}A z)VF;G!SlG-BQJ6#wOQDbvi+VlDjl#T;~KP0-m?64pyqrhD76BTSWKqx=Mr8$Sv3AJ zs-*}uHMAA&zyZ(*Lne2Q&bUkWikBC)j)9*dFHfNtdZr?%H7%j8TwzFCJtAMC|L?M0}PY?5SpG;)t#S3xI78pyzkD zAxlSeB19q`*wLbR@peTuE`y|bwiWt63Y)6~E`BaFXHOBj{j+!wJFs|A`SCx;r2jpETJ!5p<%^X{QUjln47RBpdh=G$6sOg0;MdYT@KJZ zCYARlmHs*~Zc>=8gPTrDOLi!4BQMDm!eCsbNZ~(yM8MtaH}~EyxpS1sn@XT1?hf!Q94#ull%dzB)Pu`J4NZ?CA>^EdhyOC zMbA{H?h*H&aGBow)Rgg7`+1y+@D1ENN>Ex2V|P4cW+^STaInlgZU{qp>~h+pNtJhCGM% z9^LsCh55%xP9;RMPFQ=+)u-6_;lAR&@cA_TzCQ>k_METjChL}h?2yJ@n_kn{`<)hM zg5gErsG~9a<^BEwhI~I)(H<0n3$(9Ueo1diG%jB2UF|#Jxw$rRiK6nld?Z`}R8=(N zHc7?jTkkTX5nVwqiR1f^xuoyWKV`%A<~@~^!KU8+{T?No{dGkG01y8@mMTiZY)pgX zz{sca>nUKTLWY3#5L4lr25dmNKXMX}#4OnyV7lh(59F>k^Ppf4vwr@x_}$Gk&s?e5 z01kUSIM7*bp=GOS>do=MKhb4E&Vx%*E$U~rMkQsi89yvqywBIWNbg(N&rJ=8Y%kZI zc5v72y8>yq5>8oXI^b!?)@j++x$U<=lmDQ)PM2gN=&%^n&m+t0t`u-fb zrLZx=ja~8m#is`C-??}{3v=dQ`twZv$opzYMo9j35m`dx> zNG|5aXpn7gU?XiCF=JQK4ySyObzE&+(f_obH%)V*#jA*DD;N<)`(z+pFf%h#|Lk%X zqjvyZE{|Qm4dn>0SPAN%q>)p+8SH?(^ETEyiKG`zk+a|HKVM@v8>{F#Lpap`WT@k? zQ>nM-`#73*yJNY}G=0y2kNV|HaXG!MaYh!29pk_oQqq75vMMU-`S%!r7rumLrjjMa z5UQPK!`nW!@>4`tic}h~vhr47@qEldyhu~}`5784e0H+D7x>ucyTtm+RXaYGK-_s! zfp;S0e-z~`N9wOjDM14p*cXR8GBU~NVYcwjFee$_vqvq-*#mzMkMTxH0Zv=!ua6^X}8pPvc`{Q$ni$$w6=fM(eYLX zjZy+8eD=@Nnyche>(<(FvmpiZOBx;UVY3l zDZz^z&ezxH<_ai712HEESPT~M`wOU9+-KS5`PML}Td!HwYKd@{*=)$?K0g}lEbPl8 z5wJ+gxAx<*^8ZO_`s&WkFf*gq)>6h3f+9K*K2$~2u$C#d>^MxO;(VZiM;L&1E{6Xj zw`>w%PjdL%n9Vi*7_yy@@VAeuLbct6(7rSN0!Gq@y*Qf|R7q7YKoX-yygb7rj>O5Ut2+W<3izsn=2!lFx#@EVVF6z;L@ z%L8-hN)z`B_V`Ts!TkB9J>k>=hc9KlPW+*`$8D)%75(gCCWNV9$Y2Mfx=;ChIZ!=F zshi+~3&?B54YBvAsSdY<^gr6`-(7UZE^x+2$Nh&h*X9}iO4X3jz?eU{#0IKO5X zFvUX8Nku}h_~boqmJT{$l^>okF||6(j$-fWdxDX1r6I~^Lg-Tw{k_gKN<2wz*#zZJ zxZQ^aq5^L!xxzl-BGDC>;2C4qurt;W20!kE5HizridTfrP z_Os@^jw+9%IbsZ^R{S-ON3Fd9K;&jG{zonUVP>|Mo$pZ_FMwu8A&rT_lK3N}D^G%% zDbzjUUS+cD{bdzA0NS+`(mcb#Fs^_yHsBIvA<4NdAy03-L$whwk|t;aK?qvhS5ffz z-9Cw6Ml9EfPxlgO5E?R@*l$fCGP{JNONTZ09;QPQ{T>g(ds;i^XvS5^|Z};<2H{i)$O; zI8s&yi^+Z$X;F;&OI#3!xX|Qhx307T7uYz`TA~wm?nHISfF}<+t23+72R=L@3_uGm zv2%QgMYLSG8X2{0LfRQ~B-P*x+4M^D7)JM2YTz<2<;Hk*&t7N%u9S5My;^=K_FEXQ zcS(wJ;N!TKWV`M<*JCRY!EoFt`2FF2Q`IZw`A6u$=>(g`d~>@)!Fc>JF*~e`9$n+1 zK3tl{G+rI!FKDitSfJ%83zr>kU(N*?(+k*I8PK?3x1ze2 zH9wHs`$8J1Rq%d%Hh{G2{|MW$%mk@*p%PzFf_fIl#Ra+8@w?s}RZ&dc2Z-J=9{ubN zW;uT!B}al-RM05=Qr>l_+r(ow5@{KP69peEaNxU=J{t@((7kvH@G3yhY^D0R(4FP9 zP*EpjAdA4jL&c?GWz>90-_I)-|yMK@Bv8WFnV4VG75Z z3p+pR-6QwtUCyrR967Q|S-MN!WNlbjXsM#Cl1=bgtjn|XTHKN6BH)a*xy7XapX~;y zmmU`R;$l8VA+qD>aTF^&PzrLE4S(l+m*imD?3;EHhNjDDfUgqR^ZnZ&?JN#{uRp|o z8h_0Ht~$2E`@zJDHg^Qj8QWD-EAtQM%{=HN=y1V_3|e8J@US>(GrJTn%vJBe`e+s^ zaB+5~0M6)>M6kog+=p&$1}uZntMixSW*#J3qLSFcHv{gYDzhZ9e?z1QRDQ-q zGBGG4ni;bR-Pr%p$^g3p$Tt~jSL|jBT(RoWIdZ!W2Ne4K0`amgaQZXm+I(7LktJ7WAv? zg(!88K2aC!+0R1Khu4DP`X>S0a|~sK&?+KYK}6mtFuPMQ{mO>@W55|GQ~iB1@q!2R zeS~$lBqqhWtw};pl{Mc1jx4axRd=Np&;DAk1vM`0P@v(gKCGH) z{`?>0H$V_O!DpN9U~Oq1#>N-7?1NCiei;k|L)^CVHZl_7U&7MhnLpN+qn2O5}sbJ+uD#O~2 z;I_n@-*5frDA>SNYy~Thu%+xr$R95XxmT=P-(@|+#jLkM8wivQlm)O~35F66Atxbp z$(ufTp-jM|eMgN8ofmp>0{9$};N`!v>43{E59%3_jJQU2V*=G<(aBqm8c28S_Q7ds z!m_WYW93J@Z*C`!$vy7#+^z6}pX=FfwMv*aipC6t9tr|Cf~3OEh)C?PBa#X0DLb9# zeG7QX05xnjRdDwY5B&LQslle8SOhP+jrMc!{lSZ&^aH?CkfNaYYiKfwF{Wkl_KG1N zHBbi5>}+klwZqTu$v-|6HKYCd>Ie(4u$axwLYXr~Tx`=vL2c2S9mJh1FjK;s=;)Vl zA8RJx<$T+rbE_oByx(Ndhg}t8o8T3e&};s#tymWMFK$H3M&}qs)89m7XnAVK%(lBm z{&$I-!?&3HWJ=HK6QV#r7^3R`*5Q;OJ_7^B$#FJ}p)3WZCI{rr^ns+xey`*X+h~ zX#RtyWB3+0EW-&F}rl{vOc3{>$)P>>nreZ+%|&9R7%a zd90X9-Gts53dM=t%1=DtHXu4ML0Le6eemgx}@NY;FsTQ zl(2HmCUoPhpFF8kM8H+}Z5hc{loVux>7!)_jYV@uI1$rk?UUQwRsnQU);n6Rq06@> z?p=S%)`oLdL(xfcP0`afZl5tS=(v3UFP#0Wg8}Zl-?=9zu~PSZ?V+}|w*IkO0=Mv$ zFA~M0gz!gfgwbIBwDTS3a6D>?gycT_{%=>)XMnH5!X|PXjg8V~^W3H5Z=qwDWga;C z9X0|#&p?TFA9t&y*mR24AZLT-nl^wQ$koRbu5IC1R5i}?LUXrkO<3#TbU<|H8Ls%t z|44cP4?+KQVl0IH;;Ukx*Dv_v=Wwb~b&ji4xDZB1(fv79VrAfv!)+W(FmHz?v6x|> z^7<+tG?cIsH9AbG_jv2?GO^W@FeV4$SY0mr+w6@D6+Avbt)G)Al{65WTpmA>JLwec zH+3)t^Jvz-e+N_PPOZ6oRJh!*;xQFv^MCH=pRd!L8#Nenor>Iuo^t<^lN4XgVc>^0 zf1hjPe2@FbA2|737-M2Bq#6=2+d0hw%mJjziY6I2!4$%02ooLbQL+<35m+4C&e#D7 zKG%TSmv44QA!J=%rRRi)ralCDp*s$p0w2zUtzVpy16tCfi#~{qgROiI{~_lgDun=PJG%!~Azp9YNmA z+)?n83c6)g%*Nb+kk}NSoR#L)E=7U>R&*RnHo>U%P8fxVEnfPh&p1)}SDd+G%eETV z9-^Drod+TIOr6bm`T{Fns~R4?8-u|-j~922A6e86l^4?;T7OQrsOs%ytd6mmjk+}z zsGaKs;h%0#r!n~VpEp)g#`b;Im;{!wK0XQt@fpOgIm#M6UOp$Evj(B9`xi5K z5wy-;C8~3^fchko9>HjQa3Bzk@MmEYBgV)!8~81*IR&VvVxUf-5w49c=%clu#(elU zDXfmtjN-$c$}hDjG~c(x2<683c=hN0D&cJlZioknn7~1CQRH*8S72dca(= zi>;QlV{>FzpmBxqImVa|JWX&_hlQk*75S) z6&rZR`24%J?at=v6Z!bO!boc>YP#KRH*-|ic}P%my3<{N1DJ21W~LwoeVA@7#U)-lP!zae=OJ~sbg9qv9y!Tw)F7Wa&s$n2@_}j z=UKf8FbC1aGO43202=4R6QI$#znLCKBA25l2#%qxH4 z_5I`T_M*~9(>}Gf-FT0KcX|SV<`g25lF*LE0B zQS0&{jyHdl_93wiW$XW7kw3dwmBNlD@AJQ7>O0yN(lyvAvGt{95Gb<9@PEFIobr$u z+67o(|1O|2Lae$3STGFnoFB}I7~hlQ5{T_QtFS&N0tRj!|0-38N@A4a6Rrh=!R~?# zTNro;hH$YJ9#ueMu1Nx(g%QbILvshYdfx602=m%MwE#4(%u+l1W0e#BTblMAcOIWF zc-Wm$S=hX>ii{!*NZ+L3&V${}gjm|ykAcROYXWFuYxslkr3mJM*sjMEEisU#{@03$ zq~JOe3y+2mqI4_?JJobnf%fsa>OPrQe4Glvct9da)Jv4F{w4Jl;P?Qrmw<$F{iK)C z!*CW#``8o99eVD!xK}4^f5##DZ-7FH0JnGl{L?RprE?Y!`nT(4Rp+(f?$6J@bh^jn zg^%a$_%j{0FTOn8d_f5i72Gge^pID0h#M039~i2&xMlG(g@q=gl*y$-&8#RKl+v|caz9v(ur3-=Gj0|{_yK9(C-`+azWg}W9xY*Aj zt_j|{#mhlF)i6iv*A;SdcRKhtdF|GkpvZSGI^J?7d+ghC&S!_Z%Yx=UtAQVn&e8~r z*YMAY%&RT)H2%X|O4yY^0<^n`rNcIMWq8A+7w-{rC(QBTv0IwJ5GFSPcE<#F8J)qB zS2%>7GnEsq$?HfF#KX<~3j~R_Rf?R`<}Olr-SJPPu0hvE^Ehd{RMztN0GpV_Tg1UD zNfG>{m}G=rwTGb-62p`r1zC`5iP%8RCwmEem!(stKY|BecW*`W(Czlwv=*Vnx*E;{ zi(acahihoS9=$`ok!TeVQ*`QhBlic?{^OTCD7XtQLv8SI%c5@qg(cc{eIY4&X7Baug9kABD>u>6>bC9HMNGGFqTT$5Ag`lQ#qNO=IF82? zd~bTj62gFF)tJ2Xz6DtIBwWf{e>t~X)A8s3NihZ~MTWIE_bqHbP-W4Q3w5D*VLwIF zJ8vs?rBr5b0d)3Xk59kK&RA1C*gdAUH9;=%Bl3E1OjT`8I?OBi$Zc5ud}1Zla8ST~ z6~2LT(C?vfNz&1k7ts+0Lpa!s5n9H!ODr3n-+)!GCw!A(aG%`Bj zy(L`pjkohQQmL2V#)Hev>t(QF)YOYcl=gpk9PnM~01YaJvc0Z=-5ukS)wn8G@^LY8 zDf=sNCJYHPB=_OMFvoAo0+g(UcRPTyFko2_@JBr4yj~nPMP<`oNwy_6J03qm>a|Mo z!+RimdcRLtnpQM`t~6%yg{^twWy}Gh-Pwg--L;o86LaR#N@>CRi8b1!*G4iyGf)}3 z^TWPQcO<^axw|-ucsS^EaCOCwbydFaX#h#R(0@bBz}L$tvu(7|?eYbTcHuwSd!5of zL~7Gu2qvchxdHWuyR3hM-+351JC?^sU(q=fsIZ%|KKSwz!zFlFV%)v03Ul<5Ih$d- zW6b_KAI&crnD$N;yW%DAyXon=7E|C zL?v!s{K5xf%g?A9Ui}AH+w4%%dGjB4(w8oLd!*Ov&BtXG?q{qEW(0jM@Rla;Js}1jm(1d^XN8iUvh{70J zFbjr^>U{A}ugD0ccy|9dE#xYlf2O7)KhQ{WKJ6hGz63ynOf78ZuiU6K5&jr7vOC(* z-as@)pRg`q#X2V`?W}{Lr>nyt5}b1vvYIALlQ4oK@OKaGydV>9 zos=hA=!bpAz!S9rHH5#AOP14r;cTkX25x!=|M7Z^LcF|MG%EYMKLUUJF?j1*d~zZ+ ztOEpqr%IeZQRU(5n2_f^u3=mZzndoFzKXP_2zHwSdfVjiLuHdA;^GJoM)qLmoPS-l zUj)BYEjy&Dw14*7;+E`gHL8|L1miYcAMw*K$3=+_)Y|9Up?WV56>tYdf(ODEOhwMw zw?AzWP5XTxvN7?yg-lP~dz_}$vp>q2T6l<`9O+j@IAA_;;Bxq_EI-TgqtyS%9UC}` zOT_TNYkd-D2jMf=Ees014)Quj-f<4({muPYj7hJbj_r2&5G+0X(;pJ30>;xJ=#PCe zy@8QHp^Wb1;}*)`6uI38OfOaoTY&D3qwsq-c<*H5Us-0QNF~*0M)Sz`ozCTP46-N$ zqtaaw18_wAok+2Z{SIkVqoXa1IqdZp4peXL-bsw-zyY*(Owkpu*BYkZt3Kv$dlt5q zT}Bs1yB1!`4OUKh0{Dh$|Eud5aqC!WCh1{VLC6R}a>ZEZ-p|rs%d(UjC6Q0?0Nq?9 z>BNOuOeAm|bzHy;;1M7IEPvC(58CnVK;E#ZZpV)GjLyAnOA>&p&(Fhe#$QFhIHjETuyTy3r zJ-MC(cKyHcK6aYCtKQsMx~}i!JT&hFtCr!ATit03O9GqyFH>QIJ!R=SKc4o>n%x}W z%KSu94lbQN>X?*P<^jAF$=jHNTP4L9nxedCSDV>q#%#d6l0EzTJ3~I(h2A8-#l0w6 ztJv?^QWc&<^Cy2}U^ek#l#g*JUQ|xTAm+bHYRTRF(!ipB-_OkHs=Qc$23R?*mQH;R=HP}MFoJMmbyd`a zNgdKLFu?wq<<;}VAnqT`RYFBTf}h7KlxQZJ+0|rXrR=w+c{52MeY*`yJrAvJ8r10} zBF0gc9^cP+-Su?T-wR#|jX@fx`ahN)WvuPge=$vBF+2i^@t4 zy4JVxz#8s6)5DY5-VcFQmUQj?-+r$4_LMU9DEPOo3!ml<tw zZwS-;jp*Is=ua5eYkNY};YuFC+!%VC!-O^3FtAHBC=z+$bVW z3n1*u$gFR~aRFR=itILm7EL6;6a~$Rc$PguLye31;2NJ5T?Yad2Pbb$+D13UWbfbYJ8 zYBf<>n)8cB^KqKg=$A^0H_8G%<58u_$MAFgUIl@OwF!b8Ijav(v4RK(<<6>Wrdmx3 zo$W>BQlsu%5N$)3h^AlkaP`yQ$1P)fJ@BN#6uX^IO_YX74;M~CVsxZn(}GYGM3<-j zye0PnsKe!W^76kYO+$*+Ouhi^%mjPlzp z3%~TX3TFjwe=1$HIvezdP$jSR4CK!v1y+v+xd1sXt$KF*3NTUwa%>WbH{d-tVW$1& zD+=3;)cFmiPwI24J6)(_geW(UTf-%objV^d+M!6@0$P=%`IYF2E;oj z7L0i21w*}9Qmx}2_NWgw%^CX5t6QIif^+q3J43FI&hxe>;)uQO8EEsfUHj?aNz7#m z`PD`WRF!1iK$yT32W-a=DakJ1p{68s!lVD_)W1KK_^_R|5d(CPrk(`XqbE_KHMi`p z-8IB57z9Gb*lz-kAV4>kK77UFd-kIAOP_dWq3jRhj%C+;9~0{^QYSs4_N3uS2zdo1 zaAnUlV9UP#`Fg8r(CML1?w1huC)LuQ`$JxG&VN1UbThANySzBtNvjUNo+5uZk(jzp zZ0PXKFB$Msb2)0X|0EUw+`bH}CYN>jP}}!8l9)#<(c7lWyROWbSVRy9Ouy|?n4*Cw zJ5%x_!@8K>0bExPSh@#RkTMq?6WrE4#Q%f`aEg-89yv5528)59lIJCA9r zR=Vy8O4H+{Bf8XEV)_kdo>$q!Dt0K&buX>-{E$MoXODsd2yPtH*gUmi7<{kpAm;59 zX&o~3Idn1^=Yz^X(;c=WE3cc$q=xb)QF{`p#5~xE@skJC9nIVCe3Gh(AN~{^e*@yU z*50XXw9e&XW5^KMNU09p$(7z+Y%U1@$UtH~N+=fu7d+l7hO3TcD{*IrQMaUubLP}* zFGjlEeqkMZ%qXOkdL3(ir-J4>u5Fp)UqJt_@$Ar1>h&M>rl=yf-|Z$y)DJcy4!^Y! z^+C`|#^|Ajb!0_FRjcL_HC)9Xq9WTjkapAg1!OB5DW5EBG^FhaB6#nX;y)W$I;p8g z^Efu(oBYR>(!6C^S|aG4PR_}nU}+?K_;2m!dj^N`BAZx=Nt69T77@Pq?F=0JetoD1 zG(|~-V>WI24)?(*+;O(r!luH2!XyP{BCla5|NQ%*sW>R ziqxLc#`m%1E&1`|j_VYJ+Pzq0?Ld~6Yj1-H+Ir!lnElJz6w>10W9yW_Ss^5PA{U*J z$d=6-?qL7MWXV2`ou7W4a7|{>J>ior(1$wTubiaSfw~MC)Uq@Qw+-x#v6vw1{t$SE z2k0mHTP*nsBv6{eicyk4D|NzH+NS-WCJ6Z^6E;swMg_HirO%CI>8+ufZKG%4@>{xM-38Yuia0G4#4@F5cq?oFvdL) zTX&A#gl`^Y0>$%}+m#NMji?i*V$U<@E$<~HUD17N5=@abdp3JCFOLi)kB*5+YxdZG zz;FM3FuvEs;3}pu?uI+4cp2^NYIqX0uU{n-)1cwPHHnzBsj%F`pv%s^Wpb40B-_1;$aGzSV-$x!} z-@9~$S?PHqYIp9$O8L06Rmf$o?3O|28~vlKt?|Z6D59H0RnnxM6)a2c73l6hANQp! z1EYW4NoDOk+?u*0E*?qC0uRUcIHqTTZpa8&|@RADkE%( zCbS@iM#rE1s#Spv<^dB@HILH*DRyKMS%3>8MJ0V6eOWA2nhkTf3M&JieEX}8ZY_?cBXLdd-LOH8GAJ>0k4exhV2BZu()FFi3v_M zob!iqsN!v!zIM=5hr;|@SBs`yRN63va@nkY6}Wuw)=V-u-KkQ%c`s7+E=-2#evxjx zN|ISmM+jr&i@aL(E!YbY&A`ZfTb1)SO%&p>r8%FyBnunse~U2HIH&Zu(S>euqvvHW zle=80)2+eQWb&c%T9In3C3*<|LYc`S}8!(=%kU>`He``n~dfa1zh;$lK89|Z(CIuWH1 z>J2?b^4VdAH$+?kl+OLNA|h|pZ6D*7#hOa3b&sAu>xuG74&69P_tQ->QQszoYVru6@;^qOe|R}^ zbW>0CA>I>N|LxB|i?tAeSVTo!bU59-Pw{Kk=Qwtn>M!vY(&R*cJ9t(UOiM)n?0{C~XZH-Ln~^$^Mqv^n(T3cp-L+q;+y7eCD`z zJ4|Enft3TYlgCw~*!7z(Sq-I5(=3fEUf&13Kgroy@#t`*HcU41=z!$>Ww0!pbF&^L zG--X&828{`tLV9G4E+f)&t-k%rzkC5HVNF+Yw4?+mfFo#rFnD6kRO_cQ~-Kh+X2;! zt}G##q&KUHP{LK9c}5$^GYDppB`a(|?Ji#r^_FFKdyYlAG4j&mX}&wbyG!_7#z>U{ z#4+2duk4BL3Duk<_5Io#gpPh^yizrO_n;bO@d(S204m{O-JMCp*EZ8+cElxPe#sZn zdqBI)T5(Aq?%8JCbc(-X=hL{K+FZedK|>fZ;P2gLI8 zpf4`^YWjXI9j^&C&oM$FB(rmDa@{#VU$Cl3c>R!q?DRHorb3I{(wj$f(qyXd1=BRe z4%yON>=T~@ZJI02!AsLmV+7u(Jx`Y1mOc=xMDZ_(n8mi)f1zaZ?8N$7<(KTY<;3G* zMLZ33S7+|=1ZZjhJHHAs9fe#dYeDRupbr!cJOLR8%8XKxN_NDu8d36NFly zMT>llvaXd)%KebI_=Q*(lX^A5BE{?h+@n$O7G)vfnwU*vwb$B=s2PXi`#q)4nb&PG zqFwM~=}%@8sUxt_>q;Pm)(Ja_A%t)CGY~_4k4W+;C>swpe5Kc0P%iOoG6~%&q=`>o zs;Ubc2sgjHi6gduJeu5T`$?8o_bx=e@~eOlTJrb8UGYZQNNb0&1X29pFYEy zcx*Q;J}A-Q|5uMx0WO`nsmHo6j%2~OJ>spHkx&X;iKpY6UTe1ZCjNq%iwzdScTKkl1|B zY7CfryJ8So*h^BkXwpl$J`vpa z=96I1A?}EPGvO1D-ylac-latd%tV=Nj}jY z)xmL&(C!JT%3HN+r{MEM&mjpfdr@(u{rAs@u*9ghc^BasY>8^PneIV5N6sHhBdvm3 zp_RMOwx)m7phrH>K1Do}S=VBe;rTInucJJuY z$WD^!PB6Je99UN$JKn-;shHMweaNfW-LDqOz?-j_XUv zS5MK*GHkF|q)JK%PO{uqu4g61qL|rYID6IQOo|u0g#V?lKWv|S&Xn6bnRF;O3~3Cj z`jHy+@K8evaVrYNPuuqyxPa&;b&dO9oN)DG=AKe-avm{{RM-#Y>mVDp*ItI1jufD7 zgpwmiz?%M0Ql$0oP{VeL`|XpBF5}|l7J^M047(k%rrzRIK1+`&+z(De%a+ zK1dV;B5#tjeyl8Tiq1R1!AGo$Bz^F^U!L@`2)quu==F*c9s$}B3!&M$pB+=!8mm6$ zvR51Ykq3jU*s&>wAzo4}?sdPOEt*UaT2K0*kg!Dc`LO@vi#$ygo z_MY2L^o$J2VbK+uGAeTKHFL@t;jNr^U#5iDGM1}ZHTv_H*%Y+mK-(4vCU)k0O`lR5 z_!eu2>Aqxve#lZ^TwD|6+T;o5YH>kYpM3t9Mb}SKAbp3-(T0HPDNP%504JI3(iYvU zo1mIn^c;=f+!8HocM|lBUaeUnKF36Pu%dI{fj&Cm;1qBbA*^FrxAGv37nMIgcP+w_ zj+K*-oujT3B`8>||4mB#8x||L1HP7*RUhjS=fHN;h^KFlnE^jGV+%ZG&0 z!5^@yvAgfsb1!CSZF{H;ei&AW`qJ6abLtR5B~s?UYyjr>k!HNPoAX`g;=K1Orca8@ z22?)g$aa}LGovsI$KgcTOz@&3s{G?f%3{^_S3Sd&<)41PMR29~;9ww)n@CH+KJgH(@ z=X{r^T>hV00Qp(3?I>v!s;Kt415NYn9$VfN;o>wQdPNaJ-aUKxCABFj%9^z0Z2-eg zuVLv%TGi=fK+98{66%stTpK~y{b`}1yzceScq~wJ0dYaCIEX0QPDP;&PyY8y3fjo5 z7?QlZokaI_jTU2hf3MN@jmd$E_O%15D%N#i^Zg*1kz}%ukH>u1x+9!$`zMmiADQW_ zojihhPX~$uj#RVW&S6>)ak|robJOQ7Y?kg@Iy12mn~2HP;n`wp{+M{oX&&-tiX19` zp)+7_h>As>b^6lPHK6#dpf^ENZ9I;LDwKoto<^pQOXtLl4*u5`mPm4D#u3cOb2>be zO>(it9mTsADSwJjN8>2g_)m z{9^fB8B^$yLSFqpiFY8C;xN0AX_wrki0)S8HIPzJeBO1PcCKm4J^S%B zOQDQxcA(T8E*O2{D+}gKfolnU!o>{P`1LON_#D1=)-h#vP!R&$zHEGIc6CW@)fN?dg4msIJJ1pOjLGBw_VnmW~bz7qi>3T-oK-tgXSWN+M2=*}&h zUlV@bnAZOKteYE2B|Eaj3j*SbZrgc)ui zf7a!yUk^kdtUwaI>^$Dw!5zxkJ?@x~u{J2oQ~yS2h<-(MR;;C&0)KD(WA$t;B#Qol zeBkSM)gQ~M8E`T?MGoawpP_%BDTa&U8I)(zeZ>tp?aKCC8iGX4C-dA9Wjj+gnlr6q zeRI8$w!w)_KQ~2yJx=ngY3v#|G0tLn8LHxl(cOHWOduyy(BS`(M9yIr@Cglw>uPSs zB`}=hWA8Vwlx~mEv#A@l<@&s3QBynm!#h8LDun8bK;KJ>VHgsKOm;a%jrhD}KeYmi ziqp5H)Rso89Qw7juwkNIKJ0(XThU<>FV6*UmAI1?Bi!-^$ zh!r@w<0k#VtNAnc&uwlGvw%Vdc>}TSwTjoVf#|Wt>Mz3(_WI8=(nbXEEECjwu1E6; zET%fq#z~Q`l31bG&NZS~Be!;dRJs8;@+3i+IQAmw4y*sU)W z#$;R^DbVBq^twFgh>ifYaCyvQnBPju>uzKli}R@6pmZ~f&qpgY;9+FvYO;ZVSsh79 zZr5j3iu5(!rk`YA2MLmc2F^Su;vBIDZ}5$>dryG#L(VEP19ok3six-axorxtM{2k? za9@Xn|N61tbi>0V2{}}A?u;#J{}poBFm@DEFXMU-vQ(xhF&;)xXM9n5H@h5S-?{5 zm!a=$grlsfKoM*yFV}U^+wX`)4mW9*u*^OfzWnZ-^3@L*hK;R5;KoF8%-t?yH|eO8IkIOZHvCouQ8D`MO#l0QFc6~I=x1Skn%QJ4S6dYSVTv0$5>oiFtIlZ z=tlI|KE*mD$tg4TS)bT9k|#k8FMiZhMOu$Oc1W@2ua1PL1Rny%tXqF*_Yim#xfq9@&C@uL%=#CcB&U$x`Xf!4F3}LHMqn(F?k;cpP$k zJawDw&iWLOUI)cv%A8%@#f0moqlCDA2KVtrUK$k^E~TE{;bGL#L02|j2cYj5t?+*< z^4P!%+-%Ap14iTf`E1i1+6qyobciE<>k}eX^yrjrF?HyQMQkn-ecE!n?in*1o7Fz$ zUEN4H6g>bnoOj<(jUtAhZ~LdADwZjq>}fHJfHj_D0YMx~eL`_CCJIRdw8lb=i$vGh z=W}@Wo?X)RZgPcj!KiFb>ABFe-d|Q~4!W`pKD_xAZ#Wf4{=l5B;MdNx(I9i~agCPo z4-7;f@+oYkMA9m~BUw7cy0?3yWGPNzDlz*>?U|#s^$WmF7?z-GCNMTOfV<6-SPSQaarW-7{Ub zRq@#6rA}aU5L%KJv^B3Q*S_OD?fbh&^Y}oYP(|y}vCfR~0_}tElO+`RCk5cOD!1e-CXM$q@a zPb9}N$~1OEuCw8k4Y?fIxl&`vgQlv~{vTf?L`A80oc&_GGMcDoXLA0KX4&ZJy<1-v zjsQgko(whIeEF%UVUKlB@bjQQs9tGMi-sNyTtX2JMi76kR{&iCodB4Uz%b-|nPd7@ zrATYC(mRyPGH?V%o|9*nSaye^bWe@ay7t`?TdH{xN59Y9m9Np(g}T&=7jh*}4n<;p zO$>4E-eUyZdDS5sO>{2ehitc{PwTOT^>Hp`P(;CD5RQT9Rws`y&Vx^vKz9r1|3}t$ zhr`)zZ^!7pMTuyM1krnMDTwI3_voGIjNYP#Xi?vY-g`HLAkjM^%piIjkzp|S9`8Bl zcb)INe;NPGHP3$bUi)76eXq5&E)Cj_6!<@CM-cGFvWdP#r4QT#5ZuP_!&wtyAQ=`M zeP8`87s&K^zY49<_y-*{nh){-=Kl140u!KA2$l%>YAtF4QgN1Nbz)D}a@c-+SH(UM z{rT8DSoS+CtQ^Zk^xNZ3D+UEUhS-JfU4}YOPB>8l^gGJ$<_CA%O@%W5<=1X^v_N!j z-EA2ihRX5__yt}?1Dh|7f<+%u6T%pOSt1HX%^6$z-$jBz{VppUB}4Kf(2YI_8s@it zVds7vLHf8(wXH$SG|~DO3>_V?+T=iabLMsns24>B9q-ENWQ)bO{yr4bb@2B1tz|B(AI6G;qBiF_#Dl5vgb^T4%PYh!Ff>mRj!TkN}d5}NH^`v zK{YBpu`bBoAZ z0Y|*ezbw1|$Advr-(WLIToR9Gdev8kS`V&>7OM`2v(7YDZDF;)S|0 z-dq^7 z*&ggI6yZ0l;_oRcOGQDRye_i$KO(2tdoX}6quV(DKXLBvLtPtw6_+mJWJA5rctVWW zw;TOfCh%kd@fh^+?LmWF-!U+CHcB1*JB?%Zv<9&7zx|%7lm2Q=l!_<+XAIVppiVr< z5YV|lL1Ut!@Wt6wMlg7jF$YMlv7WEB%F+SFhBsx38Gly_1|-QT_tip(n9v96{7F>b zu-2V+`kRS~E|)zHMQ`*P~PDb`62EsHP;ij#{OuWe3EK;0QUtDUPSlRZpK$O;d0tp=?}zYn@5C6lYW)F z`R%5G02!F!kKm}7TvrxYjBqbJa{Fl6CQs}5mvY-1eG&qQ((J{*n$-Ot{S9@-n}O#N)0Ry$>!wX2|yJ4CR6-ar5L$b zjL6Z!MEAgeN{!b1q1sVFG$KY;XT3HuDhb950QDko03##^ynXgJM%c!C618^%VzQoh z6+1tM#N%^m_tn0J=yJwmgs|0I>#3sB$tK^#S40JrAeOyc!f+;V!?OF_259BJ)7EgJ zja)ITCtL?@U?_810v`>Os-gVauUPCE(Y0(=g0SSDzl9A#4h+TCe+lyA;9`nDG-m1{ z%yH`g@k$4fCajYXiG8NSb@Y_)YOsM)t;l|2ggoO6Fa0;~2)(sER)HVn9^KyhXLOFK zu*QGYDt4?tBk}O#Vk2i$ z@t(JQe`|ZxJ#iTlJl?%MF6hhZb^P||0CpNM=45;1mERaN?w`LcvmM+6t7T^aNksLo zhzR%Q`w;((6W*GuneYg~EY`1?2g=TDd#X9`TJRy1)-ByH=(f>2D?o|*mx2rMaG zp#{CCS9l+_c`^`B!--hdqW~U^HTi!YY%>&w!%_}CMzJJCXX8SST`ClQj;w!|H3{=} z!1$hWyshpjiWtO>xMJ)* z-tD|WtSuUM#EJX#`)GBws@ZY%>C1#TtDD8;^Tb8!$u7$9#Z!F`>6caF2FRz zV6cs*Yr+#OO2FYH3~C;?I+0AV%l*?`NfjFod-WJ-JT8)qk2>5V-(lD!L>)40Fjte^ zt)d0=nFawm0BBAugSBlI%;NJPn5<>l7HbRg8)vmv^aDYo-@$n%C&DR`gvhV%Bd<0F z1k?4gQjBDk+%tqz3NIdXfVC$KX5@IDJo zawf^PDGSL^S?_a7_1-q6A(px{d&dezU8#7*n+vqryQd^(Esbi&iwW)L~y$=S2p@--f*8lhB@1B6OP&5zcTf5d)pgfNXk?6dab}*R5 zdECo+K?dnX=GOn2eac>Ev4sh$(Z${b&CQ zKpH^CWB${0L6KF4ejAPOU9?@m)P!XXb|8sgcTz_iIB*61q2>agfem^H1>GN0L&V)v z8z-Gy#PvIDQeWQRff0NJK{&?K)~r{SPft+CJu!}M#8w%3$tO88gKYGS_mm=F^eMuY zSp1P)wX}>~le!&GwCC)HCB9`T`IQYS*#+aij7fNLSZImJ^+qAOpQ5%IAkX{UUJ1Bc z;F5qiM={O1>ryGTjd!S(jp>U-KdG;ulZ3{xo8JGQL%~rY=UV+`@ukM>y6CoQwH=gq zSplJZPX)ECxzGp#b4nO~sgo(WTLcjc(^(T>5uQ}t=lpjR?g1T!!zN$BvF+`^=Bjas8vXh z*pFgdCq7;~;)F!5eJ3J|#H2v)4UpMvs=yx_;f0fhkwcqYXrrT4Ec%Gk7Qv&yc%#|c z-uI->eUSJq}bTbqm*$r|uvv9bQnPWuRkb^>$#{$H8 z{`Hgp>oj~2B}0}j2^GL*mpzxeaQdY&!7j^#sFRKKc%v8QBg2MIGIM}HQu*=Melh_b za32>foyY)(qs`Vc`d_hC4*NbkE?Q(z0iIiVEcY@C=Ie2OdafxgN~9~mFNeYT(6n-h5t`aWa(m;F`f=1g zN)Jaga}hxnu@UV~7LiH&-gJ;DzCY=P>&1w|s8W%`fWXhdZEn5YhWt1e)#+zOV1jZd~=Is8*`>v6@$&1*emLiO5KPP6n?VPR$E4K&Ou4I~c z;%|-T5x%eh!IPccfY#jWt5yfBu&&*M&A`@5DA%N##1ocNd%zd7Ux9n6;?ls#;>PyKRWY*M;cO2>e z{3F@wDC9QbxdL4V6?2+v(2b9ty6qzk85`+x2|xmu0T{Di)H)YV|( zp8c%7m}R|Pmqd8xF=qg#d-0M90ot?MelX#IA7H9UQDJ>L=vEw=>4_5w0+#P9U*^G= zP*^I363)LLyG8@JBZ#lbno!ce6FBRTN4SqD4fwbu?N0V<2AtG?u7c1;3gg!mo=6ob z`j#~U_J*|d&3A4bHMQ1~dY!G+TE-!I%(!5O>QA9g!~-XGX0g3RtjyN)Wtjo2OfptE zdh4~aGPp1Mi3vglb`AIY_gg76nOx#>5IHOCosTT^^q+CUnhKy8l*vPklxlLc%8x8N z?9uMnwz7Vt^F3D_sg?`}d=X?VOI7Qf*P}nfm9pY#MkSh7m#kWKiK=&`IV=2dkIg;L zcUxz4C0PIGM7(A=ks;HQqQVrqwkGQ`8mzVc1~nm-KOiD+Fa?XXfP9? z+>yF=BlT>?jGd>H&&O%Z{2pyh6x8me(n9hwObBS@faAg-7qOzIkBuId`uhTryT3zz zxA}Kb^I3o9MhM}VooW};npCrvCmXHaL}+Tvk0jw~880n0fdrFngQK_ooN0 zi1XrPMYa@pN}lE!Vg%K*zSo}n;iJ@NIW-InhydlR#LVPhFHe{oEf7Ln<7wz)RzkGx8bVyxR_t}5v`AX{aNU$Q;-2wk?_&G z!LD9xZUjKgrIQjJ`~U2iKQ`Dnk|g8OK_FJfF46w>*JnA+JI8W zD2crep25V2F~D?owRR64oX#dZYJ~Bl`r<5%Qs7#_5dG!iV*srk#r*dfW7GuB7?OeM z$81a1yFr`9fL4!hSU8|XJ|eaPQ;oWf88J9wDcxtw=^%p^o%jNgOxPQ#-dj+89rurtf=e5;7BP4 zx0%cuf1FXbW`xyM0)i;m_v3}<>ek{woZZ0W^13Y4kYJ=%s!#k~s`6hqPOwqfqofEo z)lcuAQIhm>GW8^Ly0$T9S8#}BaL=AczvP8 zWs|OhrQEN(yokD=pQzSV zi4IV+WHR=SV>5rmr#&MgfoX$Ql#&!W^Py$uKDjlQ>dZZz1Xym`*4n>2%2p=-0 zp3v^??eY12b8iFVu@Ozu(z@r0`N+JbE6IIF0ul_1%o_}J>k1d$l1_{!)%$!@lpdR% z4;tUQ>8GB)l6!=sM}nX}Qd|lbt+g({N|!N{@_w@l3z#xVEBRm8`&sT-w{Vcy9Ys>) zzG}D)zbUm{msOXIQs4}=&PEVvSJ&%YUPtg{>Os7=Mm3KeT@2A?p5LGP0iKqR?pVF4 z0RAnM0xrUgQtgn5r|?!ta@`;Gzt`HV;%==~B@tT77JCak13UdCk2Fo=Z>vp+Ct59S zL?4~bOGyQtyIFKP<@DCBYl(}!{RrwTPCD0G`C|B4?&*DdY_P+L(=RJ@1#>_A zZ16=pL)-8Rv6WS<)o&Erw z-iYj0AzQ=e-xAsUGf({=d7ZNetaTAOVL;NedbIwHZ~klZ>bwTXHD^0r>sgY5zDhTq z2pZ8TEX6Q$i2tp#{F)mO?~tE?vEM*E+)94}go#Z~W_W8*^`(~@2ISiRU9sEWC*hA3 z_M0#mln_1`ZLzcq>r*lpr?Gq-?N4|cBb>;}Rf@lR+g%6+X4K;Fk7&*Z6LkEl1_24< zdT(Y4vJHfl`h$7Z`K0p!Lh`g_MDzCO1ud8a?HKw4*op%b!sg3Oi(}CuASc>ERWb67 zL5A?CQ@izS!`tVyLXRz{iaZyIj9K9NLubVUweNM)0i^x%OWT6OQ(~-F=_$>BCv4T^uSc#Ibibb z1-uvU-wKphMLdlIeU|eFZcc9X(wlr`s2kIRwHDCIe#t2<4QJ6jSMV3(X%!$w(n9Z2 zhoBMnF-Lme%Zsd&mD~OfRcP*o8@%Wp{hSD5iZglbL#G=BLNataB0;1k-y zk|z!0o!jtB$otNS7J*^nmy)SC5R4{yk8z~n0T!Kfz=s+Drm_gYH{YY0AoA7Jp0b)6o*%g1?_R>#-J<+@{cm!D zAm>NXCBF3@Q=#4eDGLz%R3|EA{GBq@^&Jpg5rm)gI^II*T_ zN@k;4lgtr2MadN+r;Y0jYDgrKR^naRaNwe4%p=D5R{Q)7iM^jX98sdMAj;#eQ?;i# zyf#Py>^5tl+rU|W;q!=DnwG&q1upX*ld+X|5-Ua3bEFL8iY;lR4{l!5wcY*1cgS24 zO0kQBV|W4~kCw*Pfc!{($JDEq9+IO#-i;4|t}mty375YCsUz{uBJTjA?6s2zQp%mJ z;E##nU?4z(IN_D#`|z}^IKB5oR+%5K@IjfarEPE4hFx1wF4AI`X1#nJSdbrlB(q1M zZa0wQ$NR!-(dIYzm3)glFOG~D`z95oge*4aS#>ykkfXF(aj!tTgE(JPo9*#7P+yO5 zEegJ~S#{J&DF3HoyJM{ z6mxa%>F}7>tft}`Kqf6O;ox04K*v{x&Lg^A`Wj?15pn83C@seP#c0@>T&@_fMQsjc^v?W zBH*j3c6yQ`;#7Z5&Sq8cthDHRRa81|7LO-CIKG#6HX5Ju`8Q(ffhe8ZR@LnHb_E@zr}@FO_@(4}JG~P_vrn@Ud32iYK`ilj@0q|t zAJXEb35%^SPFNQ7m-ML;Jzx61-ad3Z)IgKp$4ANcNu)R8rKCM=Bm@f*Rt>Gi>FH<( za(X`8FL?iWM>`?_Bj<8rWJ@be0KT~K8Dj7SlHL|WW-25b5Vg$(O8rl_*1`#SQWB}I z9pLeo0&u9CiwpM+Fe-xc!SIhUIgL?y1v4^Ey(;JfyIjqwU3Ww;?uMf5<%jT&NS`h~ zLz)oEB;p4GSd?sPI`f~<&v52Df+GWf;vF_0pqm% z&6%ikf$ZFL%4lKV2U|85n$TTm~5< z`6<<*tJ>ol)DWybmEk!rgIcKD%9}xyYQ#446U2cUuo1El(wOCMg{M_{NU{YOa8<6Q z^1+u5Tz+Hvm8~=!rlb8gE>SshRei4IpcbE?M3}f=kn~=b0QgaN5G#~kwVQ>+EPDYYaX&By3 zK9_c7pLct89L4a!9iFKd{Vq=hu0eve5fKYb2P(mPE;)K`khN|;w9E1Ro$YCZNeuD3 zGo_XBDxK@a`*t7`qx0X-ef#j)3TtX=#>9Fe`<+=r)Fcmu4m$R0xGO$4BLIl6sjFLT z@)l9b%eq=8u{Zw`_Urw`jC(idPa-eD=be0&@dMjZcRi8ofFoCV@e&dE!zS|?&3Y~; z!(5%)As7 zJb6_l^VdAp#kxNaq^efDl!U!1}QMA7=XhVr8KgL76CP z>T02VX@vu{h|vDg)Gp+{|gEs4Sl zampNy?8WkllJBUya44~cPK})|yFn7~B>1STj;>hy4O6UR{JwLbCz$i{6eCi=g?O^b8zN4l{)iMT3M90j69;BKT)Z=+b38pyQO-2H-E^1Q+Gg^e1h}UhjQ|i5aZU?jG?iuyj*FwtooBU4n1l9 z!>A<9#qVDDa)z=KcI^X~rzS77Z%Ur}1jcRFgITw1a{TidCFdCusZ0+lI@_G{ALeCt z@OBVolp@#KM=M3?cf_wVA!vCQdW&UhE`vfe(#txGCnwlea?w?qRCGac@2hx~uw`&S z5yr$6T&=ady8f-SRLnx}0F8Nqb+WYH+(bJRQWWPE;gxsM>?182R31-$8y3u&o~1w)w}Kf6gx(INquTkDOdi7K2`|eU>BT zkVEW0if%GZtfy4(+j#~sQJ)YXO5I)@F8@oPuFF5cuTiSQ=>pu~?1V1mo%gw%LX)-;j+g@W^4}HmMv6@C|u^YY(7pamT{_`P$zo-bxOT#ulFI?#JP2=mR z7~Q97K9`4j92kqNbp0H;sLlBLLy|yd>P9{>oAp}X)db(n9z2#tl9yU70r`qoW!5GR z`r`v;HO2#5W0xc zToGDpSchAaKm=SCMMWPU6EqbAp`_9fU)sx&hxN^wEH;i5s<_gy13W*UGT@Ez(V(>EN(~hH602vja-*mLKi!2JFi)Ir7v_n9jE-wR$y>;A(&6<%uv%~tg)oV%7Wuuk7?mFsQ zGkiu7#@Dx7uAf~JeU0^+r-BX*R~%DDU;(c6+_OrpN|*?%_^lS`F=uU8`02Q$`f~>t z-SPGzuX598gcgpfPN$ar(dFVJ9m0`zR|ci?uQA=K%Xtbm%DMll?bxdTp=^Hc0YX?) zO2*;r(TIPc^z{|&&<~C*vxc8DZ)RiwF(pVB91#i&6Ses=M5PtVd)}1no6yqB+p6x@ zllT$BSS#y=OD%n^t}`{~MaInMne)v4dg&=N6`slSF;NOdd@uP>C{=77HB)zblXlYA zwOwyN9~|qCtRs7D8O0Zb``12n9Mge~$YRL@3mnTj6!W1foRW6YX0(Ua_p1l3RFLS` zgZryjFbee5amcHml@Zzyx`?z#TKI%X+Ha?FeV>}shCiNOet;Me=zKLoFh=uC;h%54 z6cp4MkBvcLNb1U(IGggwp1?%RWI_aGma*Ro$X?<%)0H>ncQ1Vbfe&scF(1~b4yl;E z4HSm=9wQgblLL>t^?P5v7;KSa#K^#jZ@TwLZe~*1_|NWi>5u_#mB8QrC+>b=}_guWf- ztaZi!Crci+QES!lxO`wZG`@vUk&ZF~FfMC@r~Aks+b9giqV{!%W#UDe?#NEhUULsN zD$kp@B7E=&19D*#HCcE6Uds!bB*R42H478>0pU7griJTY9L(A=6(6JC{)~heSRaWC-LKOe%=}lK1Lod4IPrEUY!Nn_$M>@=?T>T^3vN zzRPM;Lz*yHA#uE2H@nt6o+1}G+Z^g~O$4@FTc!B{1cWp^&e$8`u92C}3$-EFPQpo3e~vnj*MaGA+BRhGsF`@@lWN|d zFItl#Pk<^&U9|n#AIQvb&=b^tS*<-Q-adlGL>BE% z1Z0Q8bHcO&?_xdEM{nv)oBB3Qy0<<#841N*1F4hB`EU%QCp#;XxZyx3tI)R87nu;C z8p5c#v|4sGh05#H+h!wZ0-(}uXq-wt1o$zxLKPh9)XznwjL{V&6q|!OrdaK&w#0)f66BO@ssmlwAwy_8rU-J| z4;62q4bE?YoIt3Wx+Z%@jm3nvjIM<^+O<`XMBGZEWyOQE4NOtF8o-de>W!Id8*!Ob zKZdAI8563wiaDkTI+dYliiA9cy2TXMUTXcACJv5T(B$A({X+DQ{YZO{fXIVBH{|Op zddXe9uHS1MCSJ=-bv#5Lgg5viau=HUXz<#s9Pv=qSs%4h*eBoIG33wg=RDYfpGQA* zg#wg%p@sG{yuapm`!q0=UsOJH>ms)vO48lFYXh>K>!ns{IM8(t3xT|s9(hG}1^BRD zVGOkFvD|wro^Y~Ejp7z|X_mKl13@DFB5+9uNx=wAY?AhG6&q`g%8o0%ha^|P{?qQ@ z*L2$R`%11@CLo3*b$n+qb?v#Z#inYmblIAlFZG25x4w5!(HQ+YAwFIeA?Lp}nvI<~ zp;@VOh;vat#Hj7=e|?F>R$jBxxV^oB-S+?4ozV?33%T~LTfrK3j`1-Z^?Dal+j%4* zjBCKe4Qbwg+mh_UvY5|r9qk!7cG2tkZqz)ZbYxZnn6g*R_q54I;7axNxCAw~No4Uz z#(L9?8mF_sWPQmG^}*_rE1Q-)7p|6jw!th43D8X#gx>o6fwB9KQLi6nTigB#PgU@) zfxa!3XYMLu-FOhe;$G!u5=)*0V7V7E8t{C4BHTH0nw=a$%SR=j42 z9kTjmqLj?98MT&=vHDvq3|NC? zHC+EnZ;jPS4;U%*R^^+hyL#)X#7|BZ6+UUK%>{0U{o?B4_Q-RORxFVUK_8 z#hnQPGBo(GFv5NKkM+s)6{kD0y48ZR+msqw8wEvh5`gCd4QQ>-4t~5@vT}<@-vULg z&|~mmr&1d0vy`$GXIX&e zSOj(%A$8ZNVxTZ}stqEGl%|*czIn z*J-HGPrFJ6T!f|)1-O&M$1C-!we;G!j9(fP2Q@3ZzZjE6uaR0fJGc;!c^RHZ_Orr= zD{Hu%2`1-jn_ewsY}1)EniwsEulLQ}rWi9Qu_0KUEQ(1wDdGgnMIIj=;?Rk1K`$M2 za_#F!Tc!!J1MKyuoF$trL*Ah+=RGcWOF65v3WjRC&x~v?s=VhX^C8)#i+yID>9)pr zUkU)*`;Gs0y4kZkzPF7^(K?8e{xa&gFkon^PCLSKAQkZv)5lhNFH5pG*&Ih2<;Sc^%?Z*_%DWaZ`w`JHEv#QfHU5*B^*i5>_L9diXT z+y;8{tQw(+)>jptIOBdBqO?6g(4%aAS@X#8fjta;%YlL>iN6j(I4^hu<$mB`BI0cHNTkJ`=i}2Z*z$i=SpKbiDsNB}|r>O4By~^~b&%Ec{ zKh$*YI%e_2?gkj1=V_#F7I+LkAP5p5FQl-bP?#~wxTVCKd68b{yPfAd2s9fj2`x!F zSFZeV$}48x+)bMV-j;-6^h%vNWzM2ieMf-pN5VDJ*JNF?7K%y2{*|Ld`PW47krzRN zqq5D7>bLJbg7w|bY(s9yy}9Ak4DZH?C3%tOg4L<<+nV>$du>CrpQy)Ytyyy94Gec;omzKWxWYT8<&N*StFvIj-aC;+yxe-MppNjorF++wJGc@~%rH zU0EIf6!Qivh1uVXdExyoQo?kdf9bmt`7QOb_Reg^p&8XthohJae%A6Z7;kTyhd#hg zl$)r99vi&7C%#toa@2i|gAWkpu6+lA%d=Q+sF&ANT2yCtuCc|!W!ZBs(vo#DlSiaB1V??p*DV|2RLuFAfxAfF}s!U$_ZXaPBTYYtF6%N^J@|EWyzYKOW# zuVyr7> z<{^N(=!nhrgC|@Oq_$ULeezo><0V4*9I~VL}_g5dSp3;7XS@N&KQFB5Wersw*R)vbZgUK`cvj)x<>kG^`dY?PuTt# z#uR^IxOq#ST=slpRm|ooeV^Z1T!H3x=TV1pd%5jZQ>S~Md&jcvwF){Hnl+huT?C$u zJ*q6qSdKhiO3g&>%V?6eL_qU8C|w2Jj2DRZt+=d-`#QbG-KLyfeAM|me?pKvTT zQv|OpqOp2J5>jloOtSD%E5Ga01?B(0g#9iCxv>|n9`*eU)Et*IrvhQUmJB*CiYYdc zl{h_G$e=KB+YETL@c4nk2dC(%ug%}?u)y$KEr^>MJ)0W9BE&OVmPUXn>!*K z3$BS`=sjb@qaurDPZLE~HA43N8fR58&2Ey6emd?bA+AmfZo4eJCEZq~u+xj;Hpv?p zga1rb1c5#NruK=9U9N{=60SA#$_u^zg)50afvkXo>G@$T7|DtKF`nBBV9B{FZZ8)g z)WdM}W9U!t)!_*|f|S|{+e`6bZ%vY{3a2&kra#GwNh8y&Jgn-6-uQEn;aU7-%i>a| zzk`d4lgVL5Hau{p!@Gh9WSFl$;yA9nRchcO%7oNu!mwVDG#_s-??@5P09^lEcVQ~u>2%c{GhO7J+MP;Kqu1>xoP^%P zM*VJw0nOfq)%!DrH7v!`NML)H$GqzeyL!gT9nM!Ol)fs zAe`XGZ5H6eY8H6F^CXdVmW2j4U#&bxM7TG30V?c>h?~M)Luo_8{RF~Pt|hu8>{zMC z0_ma`B45$tHHc3zsU++!=h|N=SHeDCe!E9Q4~n!^HhxRkuBstH6|=YalZ%6=6cI%D z>qZb!Beh%q-?jySd7kXNKk4?=a?(@3c8O~_3a%e$*mV;9u>Oj%lO{o}NkRzd>?Kvx z=s1zott{@4$UYfJMTS@o94e7C7J>KjV%%Sg0K>~-J~g1eJ!Q<1@UHOmuJolv+9F0o zoU*>d-bMUOtI03tTw4S1t9j_Q^8}Czg2IhE>{;)9X-PON+}LYiqI>__4Wu6qV?x7m zhY3Dj+ySL^^V>`P`2DEOJIA+SV}q+BpYys;moF5FcUfQKco^?cP_KM=4) zDOqQPT}}?-!_Xu>zZ2bhzYAIZFlJh7J`aveijNjGyEBmUqpTjoThEibn(9ia0Q;f9 zpqp+Tx!d@Sq}eBb(u(lmF!Ly#MCR5&sV~LcaB+txPL6AA(SI?{EncT=xbLhfTCI7h zTldb8eqOq`XPYvp(64v28uK;HodHu-k63}pT`Wm_S4hZK%4SpYHv9JT&EW-~V0+f7 z%H>4`gl$tK8Y^_SOhhisoRSYO+!T-+q?L-n{{-EhfSQD4aatQYRx!0ZF$R2g(jEZB z9)2xftLxKFs};sR33wTn7EzO z;kZ7qa>AkajsC;{bv%8+85elDVbPg)(2bwnp;G2L)OOVhvAhCoAnsmfQ!o3(=4S4{ z*us9@P1Ug*r^gO64?JuJwUuloV2-?seT`6F7^kNMjow~CZuO_fe%qaOaxA<-B9pjTw$i_3rHu zEku;s3Jrh+JIYrbGd6c_vzLpnv55`35s#$KT5~ibqTd^!E7YFtfUxvSL(T?1^_$Z=EJI~Y51=CxZSVkfHVAyClv#` zvblLPFP~>pbvB@Ljg%DK=lUO$Su9ApO3pVB&WQujjNn4v*ccjg`PKi<_|`%dXmMcK z{v|-OQ2)+tqED*cD(ni4Fz*|bkEecCC$IlCGGywN9}-{_*zHTgvT*#HoZr%iv!h&` zehHyA{ne#Tuz<3qg0cpY?ZLCE#__)0(gHIvFd%cW6Hc{EZ$WE~^o5BwJppXf13=Vc z0;rGOdc>#T;z~QCH36L3==R8r$WIc~Jf{?{K+=yiXmQRCkEUyLLIeWdA&#BzU04u$ zh`m)dR>eE~vHFb*U^2n;-Z{ZryS#*vz^!aM4*T+<@X$1*`-d!Vr-hl(m+|I{T1W&CuUfKJBK4~jVXVnu+~8wev- zi;V`_sCo@dgz+UsO~^)x+TLPX_Q%8yxx5XgH1uLOsifXuRqY3FutHUc`*1n+ERHrv zEs(d=)~~%mY5~#}E>Ho?X3Mw<{cewPVbqu*1LX|``jQTZQZ45%Akz{HDOUQ2NUC9o z{ds?6h1{naDDGI|T@b2mTPLVTRUc?5J7CHIX&I(P%E-=oC@v&2;f_uU*OwS~s&}O; zJ2_=9L(LT2d@;eGyblJH>|tS7kM)RyDMcoERn6EJ#%iIc3%dQ4%$@&ZVw z^20-<#S!&+J|jXixCqe(Mi3&>=V?#=pFCi6rH&?sjP&8Z2Vq(ucAniGgAm zd}yOdnQGuA(J>OMyep2B!_#{iDOolewJdB*aZIMCEXA1KHV0-M&y!e_*1pD3rVlPP zAdO*{86b#CWV?2WO0TZIX)&DZFCL7i5DhZ9M%TVKF4vVSab$}#>#btsGL(-`P0k|f zquSd>e;e>vvX58<{$MlmIu#o!C838|q2$GCgqMI2q$_3CqPtzh%9S(!6YFPH&Ss<9 zIDgV9iR3|{jB%Ys80g4|(4~1?#N%c!2n#}E`cBg_?cVc00NEN3@O2vL6KDw$Hyz9t zryQ4--aF}DS2m=K%G6Vk>UeRZ1a};Rq15m9ukTUfh1|W8(z{+K_lFDC-@O1=c~y1=5hsyF6eC#&-x^fFQ?kE^kS5?X?YOhz21;x( zL{Wz_jo?2F=H2H1I_y$d@0zJNxnH?;?3OU*O$519KB0(BqU8r&9YbVEWKb5^;ohJ1fJ9J>y2Ye!Q~aP` zQ;{L6vbLuK-tZx#YXF`Z-$co(q5;>3{lbgI2e6clgN6D%<=8iPqo!}2X->&-wD$ow zZw;KF-2|ar_FY`A(yA|jhtF#iPr|#(`Yg_yl9r~E!?HWFJXaflQ`)DtK+8AXqXq|6 z6|zw;Z?uUkfkC5fqGKyglcxRzI`vg6yU|Nxuwe~gpamc|L}gd}yN~&u@wK}p`j{=> z@AICvP`z5mu&vZvS67Qp`$QF|wX)_0nHaqRdPNroDKOfvIFoUkF)vrkzm-!1$aRiU z_RJ{heKuMd&k$@6P=BP;;%gk$#kM~V7?2eiM=i?`jl0?tY{0#nlQA)f>3c(```k3D z*A0XtwP+t{2SPrlpiH`c>?6A)hUH2Zf@nvPYTepA1tlwovRGqP5O!<|RCHKiA}x3c)F;e}AV9s7jEmF$|G<33bEm z<)tCUufW@;9r_4JlF4_2w*l~63)L6mkKS`>T`4J~XZkcylV7CAE;WwJ^_I@RAsYlPo=`H zTrGhKHssyr8*VW?X{ksMwIv z!kxjv4`?w+1-A`}x28T{e4GBhQ$8#ezA*OrQ42R~{xu;HlYb!yk}$MuBsc5^9bDLk z*9W?0&_Y%OjI@+Ovz($|uU7c+sFYAn>!w^-C;U|QE}$Js$5 zS~KblRZgpihJxSmx`)?ko}c;(T(3O$AF87$J53AC>-(mj_@WOF(YqW^_Ez+?7CJkBCDH2x?LpJq2TGmhr|2FYZH=%=2GF*lCZF%$b={f+bp zWxv;O1@^uI^Xol*j&Z)~*YUlc<+wl>)YVE0mVNVf&!;^A3+LjqwJ$$a(C>j9TSaN{o~ED04{feD zOMsZsIt|c9nl)L?NYN|5$>+FXE(<)Y4wj>St@JeBkLVQQnZOF%e}ZOt^wp%1vmicO zhP$!qi~yhpJt-*xVn!3-KCnCF$=Pg>&S^x=c_jT{9JDFH2^fa}6O)Dy{e-55GpT!w zU7B?&@c5fSkD=K-(=!Adll+J)dd9Ym1qNKmz~NYg#r(=a*+}FABYUA3p!my8 zA;QjLsw>;?qCPOpf^yV=Nb<0WMokQ`Si(=h*9Ex-17+jv$`U98_wWC3+}-vt5gtRLO32< zuxR74$_OiAj-RwY0_tuF)a0JM`G>e4kO-=04qf>;cn&$2?m9VIMoo-n+rOIzyzbrV z-ts_gD^(^yqcqhLf!eVng%Of^^1*Nx6@yO!KzRj8S9f{0c zujVY`PXWIzitR=7fd`)jYY#m|<@tRrwMXhVE9D$X69 z=ezpc)HLstva?el;3$9SN{Uvf#W?f-_3c9xKl3c$WS2kV={)!>!ajK|G%+PXF4UMv z_gE1(HW;4!i@zo6A^TC!m8=Lu1rXI_SGVsB~dFpwTODw1} zQQDLy4{wyARN#t&s(qePdBj)A@4V0Ofv$<_Om>GX$a6byjWf`KhMxthc-&3st1V4E zk&~FbYWUeIrc$1i()~p(Hi|o+28?B{43q-8&%I{)AI>uDrW^{~f}$h^k!eZdbny#S zgTCNPO0hqCc1wFJ+;$hnKg2pg;HzzuR5K*WUDaXI|0C-wl;VoEZE=U-u0eyly9N#J z?(P=cT>=CN?(Xi5Ly$mlZzQ-waJxIN>Q=pT&lhxc_ugxbIpz?T!y`=;?@Reu02;`P z8KI@YS?OCB-_n_7VXkA0VVa_kVVq(-T~SCg=;^X0kP7Jr>z_%de~Tq=V!D*4liq3t zq=vZFbUHi}3(V1oc>oE(g#&8oasT^UhG9b0L8!7udF4ZZbl*rTdIDwut{%3RXGmku zl2KsKv51%{F2 z*+@#@9KF{+aJ$^cuX+7X)DINDD({(8N2e(9%f3#%H3$yK|DqN*KAxEVZYR5>abtco zTVlK3?7?=!@r#XVOs9>MXt8X=tjP~xD$UQNGk2wyro)C9W6v8`Xi)=xBtq!wz1W_Z z091SyGQ#&*vlr>nsI+mOb?4BKnbi=jHX!J@V~TJ3Nr#Nd2XSS^u{Cvn7pVNB|?rf8za=v^&%Ua!r z&`EEY<&Z3XI*Mui&O-S;Ca?Fb>~i1^r+GC@Zq_Ruu81qVbo>f5U{oJ2HyGVI<4HH| z&$g=fdHjgd$j+L@-FI)m+dws`&efOz53W!0kic)?MPUg3a?!%#x#DJY%I_n>YlyIf zsG^}3TdF`P3q62IrIHe0N zzR?aHfav^tDk~%zMG;&rVQ0Nib1n5DV*-VK^mcKVO(&G4>7z;xm-#|%Jt97^o3E&r z`=K>DMvZNc{4yKDpII)?Il{7SGryB=f%!Bm)tC#ds|Pg+B)(R<=(gCVLN&ClyT;Pz z5+1owelK0%91Y##vAueJxtV6b`&k|NE&Y{}c%kp@tPQNm+^-}2JT&COx1V7qTIINJ z*42DsWh0^an8DkHZ#%E(5#SH2kkeF$~g3Vt?oZiHi*4nSfN zdL*Oz7JkwG8J|?43^FtVZE$o2LZ=F;7XFmiN2J>6n$_gk-TuBp1;H#7~8`_!KFKV z$7Z{6KU9mR&MzPTbOg%q7$|>rU;^}a-=bf)2ZUu09eW{0E!j4i#|4A2QT5-LeWr!( z*P)83%8OcPYdqg?>jFRJ%S`Ce4Dg8yLN))>D?&IBhM?o$@YX2WT#ZE*MEE}SkL|i6 zE0nJonu3-|TZ5gG&{ab}L-7x^0Yrz94)b&-SJlo3wY&7I=oMO$QH{3rrv}t!?UI&b zi4Q`>`7Hh2o)*<&=AUB0bfSfJHsX3|WU!#>^fU6qVqa4zU)|Nl`1xQE;{K8F6QHHi zy_>_wQ;-pN2dZ7wh;p z*(QD)xgKU_)(+_`%8>c18?-W`^`zK^qPIUi6Thn{OP*q~Owfx^w4Q)T)>lj9LJ7;c7-fI>!-E$DpQ z>CM?AD`OK&H=_dEiTkoIx73hdJu{^Pkwqg2?TmsfoLs+4$@==e&g8P}ZlD{MWxGZz zZxjP-)GVFN*X$^HSo>)QS^IUrsvL>{67~iuq5Q6XC1p`JxUSSF1)eas!J4#0|~t$WbUx>YBp6J z$mzYP!q)=0dN3ZpXX#Kgf(~%Ic(s~Jq;8fMHOpcjG9g))iY^X%JRI&L*3R9?L{9v9 z3MdikyTc`c^AP}4=MHv#d@BsM5Cj~SagCG3T+D$VhyV$5YI8gF`s`xvZy&DgInY;? z*@2X;wEQ^G*X>#(kf9iNNaK!rC0QPzkkQK?@N85JOHvr_Dqn@>*wYt;2@dOO#-*Mq z(s1Ye?V8Gu(QcdZYnKrB_^IfUmEUmqbz!w-p|v%k!C*d>E#on(!LO*Iy=Pu-(5`PAJP1(as`Lj7`k?<3TzuMAP z8&ka86xlDVUlhmE#$QhKQSI00>*i|(#lhuolhb@f&k9rjkQqSOP`b~Uf9U@+$I`Dt zo0CxaJ$l%wJN{M)C$2g)UOtZ@$cTu!8{t9YMx-7&Ez+S%dj+5C)vLJJ>Qyng76x=o z;5dXH=3>ET7p|iXhpr7b%ep;WmE=+7eArxPywtF3f=U>}=ax6z>w6xD5ef{L4}lGr zk7Xv7$0{VT{ZVCG{R6aKgwfR+@djuaac45;(cY=3Fld0ATLr7_8>=6qK{rG!Zgs%7*X=4$h99aCIS=$Wh3L2L9{OF!NC3<;y8O)S*)UOV8X1vO8Pzs?rRGF5*3 zyO(ivy>?>MEX1g%=XLThDX7&IkPfRzglT~u{}*C*@s9|7YXJrcm;mxmPkcpX21@A>)RnukTyY><%A{{XsLKMz75 zg!51`%yNwjI`#p!dIE5|y1!4b4}C5ts6)e!y*_?sm{?&{;3PowuGq%+9dlVe_?EIg z3fC_ecKM>{9XR}6@4VSmuJ=a2#R;{-Ida}5B7NS$Qe8W@Y`AQ}1bK*hXg?LGrBngoOOCc~?&7cbyqeCjMDTc^UZMiby2bxulnO2EW zr5=C4{Uj{4r7PY3A?)O~Cu72XbL__`0}Zy`c=EO)t0p`=x@(0MWiWBsz%cL-iR~|P zjAWJvBNWvOB1P*cu{{XwL{TEK+kUR?&P2$$cBwBT#=zGbH)cFXp!?$YC;^|duCg{~_4I3(MuSW_UjT@e&}QAD-1e2jSW?KRsE)uRN9=Qw`ZMyTPn zuO4)p7LM7w+0TDU$qT~C;57!n)~`AG4&1t#aM@$ALgV$$1wAEhj&>j9O4Ia^lKHnI zMO^eD2=1X}ejVrw-tI6S+QjGaposY9yAOS2bIh<4 zS;q3L!ISdt0=+fceslg3*c1P-cI`-L+?c$t8F0C-KK?mic2N}4E)dA+uyMy zAzO4p?9M zw9Gqh)s9lEE&;M;7p*NF)YR$WPJ9?4rT0hr^}WqPj5H^2FHYWl6yxT^8z%MKjL#uS zaN{5MFQ3(KUyUo{ceTd*^oM0TC673yO{=DVo%#fWhY>cw>h0E&3V)Ypa`*vN(Wt{o zLqH8cioS5FwnuJA?m`dYP&Muq#2*AL6y*ihOY`B(%KQ<1zwnY8+5^Fyp;|_M*aG$f z2we&57%poe>8QrqY`nrer5)N`Pte_<9fxW^U%o}gA0NVOP-@w?edFs?e^)5Ye7+mHzAqH4NyVc2f_BajVT$S6&uc4YJdl=D|XH_VB4n z`voogk$lT*E3RdA}JsA0J6*VPg;gp`%aHsZU^0hl~va^ z4z=o{PWMvgTs^s^oxAVj;FY`Y9MU;862C6^{t;V$c$hO!O)@+;zp(kNtHU@3B`#JE zcE-i7N(QymP4^Fi-eGM`34jKYzGlEUx9%)A<^MStAmVNN$tOGDnzs76FtraxSdWUE z*lY))$3`OhIL*em?Il}s!uJ*diCq14pu9;t!DQ|&zG-VB>WQ#4&01Ky#h@2K;!-Gy z&y0)L)Qf4DPXG??$4sx{wm9kGckVVlA;QvpJJ06(f+Gq=oc{pQhfuIFJeZ!%F>;}8 z35zU`&sk6%UgSb7w=XfeFSAJPReda|Vb+)8m%bweR?WJJ*q3YI{j9%b0yr}#H83DR zzHda4UF--w`5S4+n(t?)R_@laY-+l8- zZ7aROsUv0opEaON6Y8mw#rUiIIXhML=QPz$Po9UG&t=8m46FVSQ0eHhD%edw?%71R?g`e2mVWG~~UQUp3iIS!L5`v_W zWcTPdCn_lcWIP794%+c#9~~d-RW*Q}1v^BtS0BCG8wmSPmJvm$WXAO^+zHp(gT zqE+bhY%T?i%R=4t|Kmo>C;F!=LH#I?qgN_w-ehl*yU#OD7>?SMx^SNRnxK>ice z=yT&g!RcHn9OrQB2p{M{V153sTyH6=KNNnjk`1GDiT=`ecMp#)7F^IG%lWl82;3`3 zQGB*hD(V$vwA3uBHR-w4hoX;@$kS~4*%#M3olGJMf1~M{nRrRe1IT$qz(^cb1}`IU zSW@JE?(=}2=dmBiKR*8G5A))=7M2wApSvRd8bHUVBu@wH9u6f9;iK2o{&Nvwt4gtZ zy>ntlhz22`1CeJro3@0>J|zgugsr561v{>suYc)#SP)6iVrNd6{FDc9AUCV6ZiW#oq6z-Rr&F+ zI382rkAWGjv_AgxS`+PKx3uZ*5^-eC3qcl5QA6!kbEPr9|2$R=s(^1=yPzcpc7-)9 zh!pD$Z7`GGE#XP~Xr}A@T`?AMVj2XJ3VnC{su9isX!J!TKZ#>6$3X#!jer)!1PJ&V zFIQB3R|igjcVew0jc{x_j~gr?hNu9DA*$$03o=65(?Q$WrkU^I`_gw2l+8DL^y?q% zFF^ER-NhiXzs}L@hCx!ET>g>>e@f`**D>n^4ji}${@NwH7$IxGD1XZM?dCs6uui?; zUwz;^ZUav*k>cLB64BI)tr;L%E3PgtzU_E%8{kM_G!=3D25~RfKoPn=9KaeF?TDwHe}c-vX8~=H}=kN z6i$y4P+{Lb&LQhV&&^Uf((zi$1C!xUjQB=2i4#O9=*Gq8bq1m)#DiTd_EnG=&8#Zx2N<)YEGuVVjf?**>db|?B39V zYkzI#tsZXzJaf2O+16ondOA5U)}AE7<4f6INZ{k?b}_sakuIGFif|JbYK3;i4H zLNXk84M3AOVdL3wBjCAFz)S z2m&7PLEK=Yp5;P7B8b&|IK`A}O^#JrN`F61Q3QR{4qytAP`y5y&P(om@<0aBg#%w* zPxw`R_e4fpT}y=y{!6 z-nYgTv5OB?@TmE-y2<7}8(G@L*%FG>3BW$N(A)5;o{>O_N21>o*Z%_={ZohNVPQR!4nEa{@UjR2>j&x@F-$?Q;-#cqd_LAi zJWTeuxoocLXStAhK|3IH!J+&f?IK}T03B)~{MVn)m1XhXJpq~LE<6I(@I3-Y0=<#A zYm1u3rx)@=G&$Y{DIn@y$*?e|V z&o3x|R1r>nd5aS7RTPAc?PnZ9>pA>RwpOH27_uKo;% zN?81M*S)@@#F?r)`TMp(=yzvJS7a?5c&~!C<0%UY$cln6`3B1r0D9UDH0?2}O4Amf z3$lW$V2yv`F&df#?VG-g4C=>$HMiP?evj)rA31f(2TBG5l1G!)`kdN+E zhtpyc!n?B?{*Lc48)iEYq5P4qU+3jaHJnL#oa=?^aFL}lwYDyoS)Bfu`=<*bhnDT#?f_1N?qYwH7! zjuaU);qP72s=>r>br7H5-i%$?v^KI3u&M=}^Rpg%eT}vTnkMWJL>ZmMhKg{Dp!?jQ zy?Z0cQA}27FdOg@0uZ_VaO{MAC+Z8yn~{^xog$ z15*mJz$ZJbg$#3RceJ02ZBhIU-PiLc0uM3{hSfY_k;kGFfYqi6*I+P(jdkF}*Hryo zDC-1iI0y_m)W=8TScHnGWCB(43I1^FA=>84SILENL2+|v#z!ci&0qHna5HKbd?5NU zAQYB~uDkEdS{J_#PEd(25I7C55$S-}pAI`)+O2{8@acgl4AJ8t`ni^Wg2ydO0V?I5 zDkqbt8%J*+H?YdYpU^zK{(7;X?gt5&rpSc|2L29f^oH0=0AUeTy;@QYbROO_@ zY@ih|6v*0(SPJfh%8;W#&DZIK$2;{Pt=WWC-rwMgL|_=y?l1ssXHdzEry5tHB8gJbkb2=)`5P=w3TK!*ze+oBF$TTw zyS0vq>u=1t-fQ=mkh8w5>$5^FLvC3SvXxu163z<4`I~B~JBh$~c1!oH_1^6;7SBL1 zm|v_ZrIN5LCe3L;ueE}VWO9j4i+mzF-}8EIAexF8*WQKvl~ytpzkL2o?j993lT|?X z@LR7O30JaG?f?rN=8=y6qZDj~s#GUFKf2FL^5BzDD(?b<& zi%RmW1S=6bc~J;dWPeRW!ZFPufJ#Lc*NizXpDsfX6136|ETo%~T9|lCH2t`bnD7k+ z{yMKb+_%0np5&_pm&x6`um-y)*EhVJa0r{xz_sW2_dLJa*xV;>QgfR3pm)`@Q3PDS zlI;}naR6q>T8S0)KdU!iLD+kLJN-@7aX=^BxeGlSlrut58kc%!AJ`pjt*y03+sjH zo;>GN{`upyo-o?o8naQkaI00t68QUDOH`X2!)`o!JmA!NZ6Dt(RrNsn2XAwA1vE?k5BM_0{ zJ}Sl3Io{N4mj(XT`a%OCUjl`)7BT|Vj~r-HnYAie;Awn`Y5YaI+Q)OD zROQC!U$Q7`Y?!>9RKDvSPqmfq-L}DIvUD3-g)fRP>L$-x#2z;jT-gq#x%_SlFd}mO zNH0tb9?Q>T95aCr)+%qMA-L5yUd~w%nV=X__T{mYWIN*U)mga8X8bp%TNWzx7gXj?oX2eN2G2PZ zYJcB#s9jF$)rJ{r!>H|uq7x1pZBpVF{dvUeK*C;-ge2_szhuMb@&OPHBQcjS%T%KA zF`Pa)I5{sAWav+*`g?~93M3e!T`&bxh(Avo&4hS)hVx*7EZ>DWKRQh)Zj_r<7co+xlYiN$^I5MwUV zg9d^O*5?T&TaOShbJRL4lGXyqpIYe``*Myz3gJ#KX6U}-1s1Tsz^Y{Lp|sHoF$%B@ zPB5tu#;-qWUEU2UN~$=cWnH$~!QarxM&L?A8g?30)G~r8@#mgojTqTTKi~3Fe3}NP zj`GO`{H+n>8otSKk1qO4y>kU;`SULgX@UvP2d^b~a-K9=3mx=m;CE5MzkhbEneZ@5 z)!TLbL=@6IR^Y3)?%jGQw7lN95=C)d0;U8+_x@IGwXJ@r#LyOlNzS5zWD2HF?Hx8% z5oJo>`(&quGYK=~8n{8>3g!K%6MdZitV{5aia0=rX*_AB2*KJW3vrn#euVQWzA6T+ z(PDr5-V@zdS)J=JD5__?Era(8sdvta=0QMI2jU=@)6*zM#hD@#N-R6?MQdGKg?u%+ep#FOTRxeG2A)@|MI%p%XECkRL@?O!ADf8gRn1*_g-Nwqwkv5q8vfN6p%cFm%7BC%oAp@xs$UpI%UVlELTQk6 z0^J$SI*;xQmFIaSM}Ch1je!l0B!p|oX`w6cWLj|9Sk<>R>U2n9PNkdK^_49A7QbCr zRH`EImuT6z%0LB$soW6bLeQUV7BXe6y%)M1hM|E!W4e;Jg*+lbssz%P*n|7Ysh-PS zC|tLBdjvUAQgKFK<&;`;8gV&}NxMs8h?%v?J*q2|t#@$f04rur#npdEbL^$Y4rSir zeU)qTI7Y17;rfT&es$x%L6D3TtcTtA{sz4DpnA{pNiFYKk-<=q5VDRJ8z;4rLXt1v zv_yQ}<4{PKzNb{`*&*E9!6g=}X_X6Ir4FwT%GOk0NQGX`)_v$ z9HW8P=;#gHQ?hblCMrA3G74D<;i)xY{D3Q9_FfbiN1dCX{3W_C&8%i@X?sua_bBsr zs~wBjyda76HzP-QUE0^kJpu%a$L?cNP?YCBLj(Oqufg77=lK`G1DKbQcYQV9tZt&& zwxC^V5u|PQ=vc&T>;bB@S)Bat(r2UJ8^uTgOE1ED8-;|y3-Q1kSnF8ayLzJ0f1%ao zUuCrvi$H~BBwF8ZuH_M z2P?2dRkqhF@+VAtRpo2f)d*2hMFpaVhsU)l*Vk+F=H}-7jt-qxgVbTS=}Tv$4ZL$Z zi;2vTJwr|zkCZxR7)_)A37k(H;htXxW$}#Soyc5Sp}?c?YWv%vE216Bq5-QD-pA*5 zW|vjG$vqR3wn|UT$N^frdp-PO4>;!z3u-5%!3&U}d5e+(G?f=&_lbouX*5nM&FP=! zwrxGkZm;V77fWqRE`{r&;QqmHB)m|aHUkq}W>0@UpkuH8R!))r{mH+kzs}*<+F_bv zZ8_|gXA?!P67B5lY-}nkT-8^87%5BW@FZ?j%MaFr-bwsJZf?J{nOEcoJ!5gGu-&{0 z(rdQIsq}(a4@;X+oc zj289|Yv%$@Y>iyo{<)@Qk2#& zEuap9lyw(OiW|!>c<^7B$ii3OLQRStu#vxF3*b_Ww`~<^8KP^LH&X5PlOu_-)ngqY zxM$^FDaT`TFT2YZH{F)kIEgFrhx*l$)FAI0_fP(`)?>xGyS8Qbth#-qO9M@mC461%}-mqyhJ@1)Dt8jOwEMpwM9(Iq{5(*m1{ui$-f(B z-kl!lQ&}5mRJB%iYrpE2_;#D-9+J+yw(~$Ae<8Uc>LBI6Mv_hSbGuPf-GyOmT75`! zJ^_j8*I-o0i&BeUvUSU`mDDtwjuu#3t*@ie5GRo(IC%l@IpuNHO zIcQNBnh=Y>lYyip!7Jp>9YzdBWEOoDmwVM!Aa30cPuA9i9ns-65`_qQaBQH8F5sf) zn$tWq4M|e(wkPKXulryKBN|H^6Jp8WY=W<#oocXAmYyL-Ov}y-^0(v>Q(#i&smrw< z!6*C1SK;Ny8MPVBOBJF&&J$dCP~imi1I)uLDK_|n=10{Hzo-NhPckEQ?Jm1ocTqK- z%CbxQUTRC40<|`A(kgGdm7cs_baHOnsrx^{fig8h#f=L~-&yV3p1shQ6)$TD`xdGD z^J{H*Ir)G8IoP_hW0CIUgf9j8U9RgVNYfwiR{B8)7q1|t_ z37A&G0i9f@8u{X*pbB>P{ig53&o2S93;~y5fu>{HI$4-X;Ce6AFt}$R3ogE6*62~c z=jP-z2Erx!gW*Y|Afe%+2zgXqFp+L(o-P94iGWz0JT|AzZjH(yKJSl$Z)@N;ntYyA zISrozGg9c7RWOmRsva3IJWF4;}b~R6&7C z9SaBNoEH}Y2bMQR`&&7?Ic(rIf9&^r3AS?>4U1e*JUHNXNoXBIv$?D%Ri9bd=lX_J z@x*fKO zXL(BctB|SV@dtW1q^h`K+5|lKZOSXeQkzwd>EX355VB-GF0O(cCC13E{AWxeqlaU3Ap0Nqnq`0tW%XofKz^n~AQATBAB(2(% z;FuCYh?9Gs+&(AKxAxTt=IjPWn;DE@j+1o07xr|p5RjCqfKQ90k0aHBHk$20-Q3A5 z#s#|M5Om!g1{$f3b{PYAJ+rI1tOH8Ip=OYx}_MBkXwb(LT&u#~z0~rW~gpV^>j={MuX|bSgW2XS70YRkd_vlLV0LL9gy}S z&Vd6lBIe*2E`!H#_3T;$%aU#R;+6daAG>|_0|P+@2@nR0{bJ|?&1y58B;hk7`(0$g zXQLz$``s}CHd~u^O447qX<3c?JSIdlMWZZI#M}2iDx^3XUA3evJ(;E#{TiH6^a`#e zV2!zPS+f`|pwWh+o~|R+H`1t3^%}~}er1ECCNv=N!PdhHl8kjs$2izTDKQBfGWlIrxBUpp{y@p2myqX%rSI1kr$B?nkmYZra9lJd*;hG6gDy!J zLdWExC$S)tzqnhUXNcTn;xGFbF{^BMvr0*#223oW@epO|^ltH`!IkVlz?>#0+>($k zUEhV@n8}ro6r4i1x6(|qMBtTeK{M`q&MP~gIKee!R%+f`(nZpCsLaq3_gH=Xtv^Eu zZG^^`7*?dwI9=CeS}}dlK#)N5d(|oJrV;wX4C>t%JURU7ZB|430~lsz*g4;ozMp<7 zOTrX+{f{gGq$2495Nk>kU?`E~N6JO#^aro5JTLhDS&#;_Wz1`r`VrWq?wKbsd%u#BJo#mu zaTvpqFr12{DLLVyo;?zKZ(L<#Auv$$FT)SCW9zYh!Q-*i%9||am~9gTht?jsbkQT7 ze&6^cC@V+de#cVzqo9vdp}h0cQq%7DeA%;;+GpghK&Jxx(@qSHY!y?P#Obcnu}W-E z+%Dk5iTEdsH*KPc#_#Dysm#)#yiARTsAJh|3j_?urjr?Lt|Aw`mp;d60U&l-L!qRS zwGQ3gwpGeXjKdfR_=KgOuIdfX+bQfp4&ya z13l&mYYFG4t0`IVKTmI2hzzNCA-&*!m#d`{8u+ruNV^Np|R z{kng>;vNFz)y{DBSFELv=e4|~A_0P)V1MA=A?9@x@k89T((HG-t$g?*X{@0+osEBl zz?6M`lBFVIPSv_wswqtOUmF1C1QzO4 z1TOc(p1)n|I5;R`gBU(Y0*J4&s9HOxsgh~w?0t#&<*72f zQ8l*b!!rh){hOk-G*ZPZ)XSUJK_5zH(yUM)izcS~;@R&2ox2gXlB!Y@PRFQ=*HhI$ zal%pEP?o)S=!SXDESz=9upkC|1)ShASs}d-go|K5y;F~QS-MItsDKJyjk%VwRC(50 z1OgDjy}k?FjIjLi_t9J9E^N1|1*_iAKw%FLjPo3RcG`4IF!8a=G9L*3ICA_`Dz0Bl z;JPzf_0C60^;7kEsqC=@$&}I0_+vKlynFBGl6LCIBFafB{96kCxF6p32+4OOMZjFV z!K8YdNW5r|3l=Q~VZr1Jcx(PA%J!U|#7N~5eDlVKNmOSzNM|#DKW}oZS}G3g^5{n? z%&ZbT{do<)P&ZvOXd_K{`q|;!+)Echqt1Z3rf$L@B?-e(UjWPZaw&jDXvrH}j)t=h2g|VwC{e)~ zh8S6b{6Oli5iAxXi|k$rE<%TG0jkd^vd-WPn)1{4clToxrMd0HD|Mr=GpiXcp~x~Z ze+rYwxG$n5Fdgc^@&xm5$n#wBqlzz*D)w7BxYpK5DG$2plVu4;i&mH_PWq#lep0`= zLGOq!=)Xb$p?tkR=$3BKMvqbyH9yYG{(|in5G|!n)AlKz_xLocVBRUnwIeQnPfekH z&)$0YD*`1^FsC$v1bnu5*E|G|qCvHn&A}wKDXKb6R#?LzBsTolnh$9FC(|>8+;Jya zJYPm%wQ^}R*BVSX`&qF3(m?JbdK@LB{2wH2`QX`3leu>>W`aARiuQ`M@}Cs<9oo@O zGH)h|bP0UQa#*IN$<2pj3=`Pd(GLmy%+@KE?blVDO%NV>E=DG{ncm8=UPkh2#Fw}r z!(Hp*5JGnO3QJP7(~NX26q#?9)qgB0E-kZQz+LKyE{~}~^pc4ML(VmuT@t_|h{LOF zeYOcd7Y{AwB&b6T@e_9xsm9O0FAP*;pPGe{)nmu~s6T^ZT}k6{_INs*A*lTIoLbH} zH!A@3GG#GBuZ|zj$8qEu2bAgpk-{!V4c}=a!G^+XD0tBK zjCO%xJC^yoo80&B;P{<@y@sWhZ}IE4#?8xP(df-FcG&aWUv;vL8POEJtoVp9Rdeh z3)bN^bu<>$3z;Y17d%M)C2fK6j{kJ@uxq$?#+|4oT&MlKF;C({W)e4X zSb%vT~O6&N2s!RMiTZvs}jWv4rHQzr1b}7snt2aT`Q4FSYEg!Q- zc~54zyig7*x$uZiiXd)vSJ}SfRrvGTtFg4yy31gPGoBw3gED$2n%^+i*7*H5R%c^d zBou1R_VjtUQ}#EY`L5DY>=Vn#S^7Id8W?j>u-8s9f=WaFBYjG?azp{Y@{lRX46OM` z5R=Ny)oa$p-{7vj-y}G(SD1so+fl`jPfI4u1vL|Z!EV%9?*Ox7y!7&n(xHeDctA&(q9A;A&EFA zS3NEkvIddCQJ1SAs;EO07e_&rm|B2s+DJ-weiiVJ9r-(N=>&!?@~ZaLHJbCEcU^PN zKjHx0ex;h&vC%coC3ZhoDt%C_LY2v>5Q4anD;=X=8wvUAL!ht|YpEJmTW*9~vJbSp zK9BHGw&oT6jZQ7GcY2JOW@6?c_f5?_slJjQz8C=uvcrM)@L!%{MZ!o%VvP9ASNUj7 zA$|#5eI0%zPqh6)B3F{{HXATOrCHlwvL_KKZ($GHOPfWrr89RlgN|q(FHQp+Uf4|C zz7(gtIf@DnCG4~VgD9cC8h$Z&?IMP8#*sQ6X$o{JD9m*smqhJhpUkJbOq~|GD{D{W zVEde#8}yG}Cb<{El^7Y>^2+Z-Z`1cDZC=+u3`k$_#0-^OIN>EdRcAs9K;Tg}E^TCE zO#hCC=cygmFPVt+Q?b_JJo4Fj0HO0o#6c4YlWeW2PB=p95NBl|1MZ(f(ppiA^HD92 zPfi7yG=+UKL&AM*s3(?9M(8gzw)ckoKU7q5GqOhcT;*VGN0ScKdG4K=v_lAhDO&=O>6fR*e_sf~vv`E^hAs=eSo>;kQPA{p@_n{#TsB{>?;z zKyNb+h^eLPORqG0i{~+`1s&x2jvu%fyeTcq$J*3RPg~^}(|i9Ls$*l)pAQcY`v8AA z%V_qp+|8cdnnPRajqIuFL(UK37hh8%^%ft)EK;2fPmhoF^p`{r)Vz1(#2(ulV>XJ( zXTR_$jpmXx$F1KFr35|hQ2~6y-(Op+7BAZN{|4S2yhqrF!|<-lpH=VW^Z{B$@YE%k zFy>TEG#jTRe4nsca2WcK9!@K>ao)9-6xst|C!;#+CTeTlK&e91DH73pX6#pZ47u90 ztdjcZ-{jIB@-f#}{f&fhpy-+0(xEMP9q4_vgN544iY2eQmI`Q1`!t`$7Z~TU_qRTP zp7`RNDP}v{l=_|6U2-{9z8vO=c0G(OXcNrZAWrhm0h%P!;dZ*#sgqpM?=LKyUMSsZ zZ+gQng{ITlLoAQ>A@IzFV_UQjNcszKPYFY8R{9}#^XQNuXEP(cu6^a4-DWfv+ZvVqKJzf%>%A;&{H3Vhw zH>nDLL`#K7gcNWfmd~F7BogdYxKkgh{tIU17Hp_8oHipLq{TI7^43jDnYeXhwLkph z*WEc!D>B~iHR<){=TsSE68Ew7<(hxXBN82?Z5lON-VCSXT@)ldhUw;CE`RW z*fHzx?#SSBKq?JnN1SS+rL>S{9qJ(0gjLPcz2>xUxlO=t?YH)hNq}{Ty<9zUGU>UE zFaye1B-PYJI_~&bW*V)9H@$73T$yfQ+c9U@Fr+b!=ws&3acAvT$-~YH9LW=Dh&>h+ zXHJ$v_2|gKf+UIHt3Z4XA2$@D4jecXLPjcq|dmx`2`Zfi7Kd*~#!WZh%go#gdLUTIv1Dzt@K zF>r7wtLvc3A&)go+-fjXKtH#te)sd;gf^!aA@1~}WU{*ZUDM~tx&`ACp${X{y9q(a zx(!~L)Z#aGg~t6=5Z6~uRRi;IXXr|92$4x!r;GUU$wj5da0KgoK4061^xfL>V3 z4jdaMU6*%9{J4P7$lBt-^qsexe%w~Z0pANy;_9ljpWAxw=MuH@std|j zU;erd9z)53uN3MaDstUEYxdld5dEfa_v#N7KBwP;jHYD*Jt!P|js>m5VHp4@QJ$ki z)Q(@V{B^w-zwvG4QhhZg3+id@KrxB%FNf8XA zZ^&H5O*)e~H;6R7Z%d=4Bxn-C#2S`iBM|3q| zZwX@4WEWYlmdngL2!cE~l>>Z9%t>LZ%Pd}!fcIx``qznE zynvuiS^tDVF7UAZArN5zoGTAZ=>f!`NjuH{i-UC^7_S%8cpT<17ge4D(=W;9x-`y- zpc|94<>%{*b!m2l(0mTJxDyXcn?^?ki)|l2UYfhXF#i|6+F>!hEBB5r|40<83sLgNBqj`JB%4Mh75?-qT(b zsE@C4bIl4z6lrv^qMErqvM}qX{Lv3LT_>1K8C(G~gh_u0Y|hz;*^Y~2eLoz)iwICT z3f_&91rmc^&zwqLc_~uo#kb$I6Vu<<%u^JpaGiP|Y5s+zMnmyNKLc6qQU7`)R%49T zt-N@pv#fCq1Az2TNMye%&o6uV32<7)W+*-dFAF zwimRFhUHm$!a6(oD~=}2_Bsr_Haxt63@I(2@M^_skpua<&Sw@e7)`_gA@~F#7#Rb1 zJXc$+X1UUqvdUz{oILU23y=C7&BGYluZ^w_G%;X9%gz8qaV?=CTbM<9s4D0H$_&;G zT))2qIT;Tx5})CRQyBt$Nq^JVfpCr*d=y6O%I_}d??IXry1Vgw(mPFg_gs;Pt`*-E zR$T_7u_~Ln+(QvOE8%)&vWArz@jO=V8YaUE$kQ)G`SmDqgAB0<;p{OH(|sIzix6-B~rQz^2_J)>*1&#LtAJZB}7iTYf!MV|8y5Qla-b zbV_x3t@{;1z$hlo5G!ZmfDMv`j&^VwhD%C|FAT4Ar_kl5hpd73hn)J6$Q>oLHuBVpYW<`Xg)tIqu--gx6rmD1Rptl zor{P9_o1r!(2~x$Qh_>iH0sA10y9{vbk|VL;^tNqK^n|`3lzd42)9o-)pFt3CV9Jk ztm^agrSP!PK0gwQxyu9?c@f``w(ZB zW3)98BOY~f=P%^T8XJL6!p@)Yrw+p2*bI-c6>8nQpv7;PB2_+PjGM(2e96j#I-46t zQ=u`Y6oISayjAx`^>KY+WOOy*#}G*J3EdN*@Q7Kdy%#qWqFEgg>62wI-u zA~I3NwOiIMnPcj$T1#$rEJ<}muYyFjlnOsc`^L^RbDWe`hJ@b#g`dH1Fvdd zg1MW-(T7~s62Yk-{^5Y^SFnaIITwZ5LS2k%MNp=0mD+|(34z9}u<0%K23yToim&{* zvdW~_P+Fn-^(SemhQnc2YAF2@ILFt|+1{I}P% z3~4DL7^%Pl<2j*y8^DWo+_Wi^kF#?`aT1Ar&RYMzK8?eb*nD0TaewBDn5&ec{lR~h zo~D-*l0_(hY*x&0zF)z1^(YD%czg}=t8j_i!UBb#Sx4g6VBl#3m((1B+kxe3$WeIA)o4mG-ll2b-1la?`oYo$=I8?pV~2V(3F`@wN}UkXrg z%Ft_S<$0#KfaMo3@ui3H){Bq94qO;fSro;-Y9JS+u-`GKV8AB3!_(5}=vaW~pM3@g z9&|XOP(?Odi+sK$Ni3O^38%CK|C(?=Ccpj<)F>sWtP{$6qJTp_Q;kk5>NRawsS&Ch zXjhtW#Mw8)tXmIe=wjZCPm#+nmU$3Yp4kTAJ17;}kxE-gDIV_oe1HX z!^jT8ZijvsgE!h7P;BUyfnq5K)nH9xO~zu9aO1V&y=VT05C8QftROFAxX`Q<@Vnu- z!#lz6eK%eKER4Z#qIjk?u%8-;W6!w`g)og83s^Mu9XxUCpxVf&nvCV_lmaK$f^zF(JbdSk z@E1%*9~*F~P_B^$b2Bs=aKb6)BGoifo}pzmEihYg zm%ubaWb9hZpYa)9{^wnoGwE$)wJ5eH=Y7TpQZ@BB{D-H(X&MDzw~wd>z(Ad4i%Dvui!Na%z4)Z^IJC0ZPA$u?1k(g9gYot&r z77-X)Jl5i%pxg3!%>QxTm{zoezdaYeS5X}E{`>ED&WW$-eW|*BH3DDP2qesfeBkJ# zk6xy1K~L~ot)czZ>?u|7`#HBO0UzNUCwTbKqH2>i!dq;yg?wKye*vaWn~FJe=7>_1 zDo;XEesR);+xPF^U;Nm)J@-SU=e_sdD;F()@4j#%hwBj#_U+qO*5U70Uww7wYKfIE z!6f(3V`4p99?wP_Z6xZ@k3arctcwXHSu2*%k`R(WmJfTvL*6(B4jj1D=kxOjLJ3I; zMUOr9SZuiAhN3{7J9n;FbUg9I6QEr(kA1!M*25u(9MTDUX-!P{$q9x8V0Zrjp34q9 z>>$TWh|J$_zx{TB*2($wwn-BRB{nL)FR|gv3FE-u*(M1PPuAr*{OxalLt|s3ti$so zBu*?9xG%zMey6?it+(DP=gM|Yz$!s5 zs}t^7joPc=cXb0@U9TE}9uZg_@Jq$7<+WD9?`rm*#4&`=a;v5e zZILOEgOjP8Hu2puqI?nQ`dWcmWha1UXJQyjoJ5;8S{{Z}!}>BQt%w>igcrJS2`nrV zYPtsHfVKWCxE&pUSAu0*P#g_uTSuhjk?UB3OooYjoc?B=If&ALDuaRiS}Y%peJaBs2DLIjW|XO0IeS5#{UR?M{I|> z+6HJHb8+LZ&Vk`AMn|&!HgqosTMq>ma=a>I z(kO&ZKrC})8Z^U1DeuCnYrv9X9!O z1$;(oUZGOLP}Qx7)m7D`;m`8RM#=cqsgLtk03QTNALd=tUq=yz^;YmwBYYo zpAMyMdR#MeIpCN3@ihxYB_j$vOL3*VL~S+$uT+E~^*w0~ZOLZ@P7#^fG;)E1a-hM~ z>!F4`-<T^ZpgLcmqfFbt?0)voNj$M7GF_nUo zYnQ&@l#4K^&aQx8*(zdE3zbwrS*d}&-WVMD?2JZ@0%c3S|!F8AZ6sc$tEVMxbZF57YJ^^%> zqBmt4qT($%9@@(;jG9K7n5WIJDDomx5?fe(m46|4@WV1JgZEu0PS8=Pa;o%uvC*zB zU&aU|;Fs;^YC5QB-)@hiu;G|p5vJ0J>?}Oi(G5&M&7fQ^pf(c$UJhyAPl6Byg^phV zYK;IhC%%rS?!OhOAP)l-@H;-P@vEt<6JSkc7d=>JEj(Y9`()bokP7DD zFIS!ibKz`cBM%<$gO-WNG_lB2ap>t6Av0hjIBp7-W+9`O;f4+ZJB6a7Af-151ajJ@ z;gdI?!&?&`h8^XhDrNAUs1Rkb*D*gupH26Ir`EtLEwz`V)aaGeBGp-G3}rg zl${Rru^rrb?YYR!{t$IMo^=R*6-`4c!S6YLfnGlxdZ_~zldK4lR9`G@!;tB^e72eb zJr%Ev1{Cq|o!4Q(W3odsYqpQ8I$8l+>A(J(i#)cVyj>Qzr6&11iwfA?+x%%10-?FFw@Ap3eu*5$Nzc@ju?L!+I&sw z(`GsJi(KTGRHj6dknq;i4`IrS51>|e5g8^HyDAPn<07O7ZUo0k!P1SD_wTo!!CMpl zUg_TjQLq<~sr2tX5h%4Vg97|Q8?tF$za=P|0V96rm9+E^Kk7iHrUqq)79?dfm}NY1*G-uE=8H%xaqVA@Q#o>3Z}hGhbNJ~fL>97Y z8E-s&A11wUAL`WtQlTf~R>Q1;gA7{SG>$&wQh5DG$-G{JGaI*FeGbqu8)jJObhua> z#lFSlkTPwf19+ARM`gm6ijawpm1SLnU(QSNU4x3m!wx%anfdK1`28|IiL1x_MTtPd zC92@}i_#+ho#Uv2-_JSj1b2Lp4H`5Er=NbhSn2ZN#)mb*8x=+5u|(Ln|Ni^SL{m>N$kS2g)Q=^^WtJ2#dV)Txc)1^b<~iq_gUvSE zY^j>FiCUPSJ#XGTu_mVKm7kTUkNNX$x7{XixBJ2gPg)oAJ7FWiW1{x0t*sUC$p2C4 zI%Ucfi33%#2>{~XPCM;10iy(zT(|qeV`q%0#$_D&|AP-cSmL;J#&+)-Hi;yjY|U;8XL+IUJUsN!Ljvcyy#D&@OM&0+7SvSe?z-!)vL63_zySwHJz&=6z5DLF zJ3o(bn6P$vHYSW$0-$+pJ(r|!{o^0B;)##DJNRY0CAEH&zQo`8Sp>$9Jn~2t{PwQv zRJZCy1lAt>zKo~tzJ581-`>^4?8PZmH>pP8OBsPG_+9tSMmH6XdH6MI>z&ZF`8X7` zTA7$6XtI?6Q{H_QGv9j!wS5|~#cum!#1>nlw!RVVOBSPT(OmrdsYl?<{{&H?1u2`3 zCo$Nd;=xbVz%gpD=}vnf(=Z4djNDlMpKD)?=J|6mZ^~zw{^7r2=?h_pd6`62BORXD z0;&SwN=gO?oqP%MoEVNmIKBtRa}jz)%>LvZG|!xb!rV{9memZY#xzB(5>&GN8v`8`R2jsH+Uo1nI<&W*P^3&Ha?y-5g)$z9JW9F7;L!JUIMpM&Jt1lTK*zr zHCOC3*GBQ1Kq^jF6DAdu4E8+uIB5Mhf(sqC7NVGML$0MolqmD2eT+pPzJ+WpYcl4M zp&g|X%A`3b>TJzG+1Jr;_&2c0*xlipSriIwc=_SGF=+5WY`fdHk?l7eT5XdkcIQob z2cLfUCT4x|5mJ#OHQhKdA1?t^latOS@?j$yhi`@fn{9zjHr*2aY8ue7cp=`O_y#6_ z_9+TJaN-#kAg|T|Zae0@_JGv*)dEK>ID4~*BBfHpm6o7fXc5@qYDo1RhP@9w28EzT zCdGOFRBtygnufLopJB<&58$;ghF0=nh*~`cwp~kzvX;TvJ&r(4|Dn)K4HMse8#AX) z!KS0P!^m&!irPMdk+KymoH-SsS&gn0u>D-$6%vvcZF)z zA=loDS020@>AnN7{cihWO@X%oxG;AYL7|oqI1JmDl7SpHC!FNvnCA8X#scmBb03ZNKL_t&mFj0cLU=p6Z z{(J$ymPu8s4>trxZ8r`Lo9u{^mZiyc+&@@XlkNkGB7J$vD^H+p&J-XhGYe1F5%614 zfRhnRLJ1Q+lq?31-W%f%{2r8aUqo&T9=_!wcsh?6hnkk2iETbzv#-+T^>r%#3%c(D99A^{U%*0kZO z>wh64&>j5Jx|d41rlFflEpR=ZMNvzmX~bv@8NnLTDWuc@livIXN{eSgcX)3C@_{Qg zsd>L>UpN=DKb<5sJB`qn=lV`ZoQXl3?TSdRh2s`bmvIr~=3~;!&!A)B5@b?M*l~}8 z;M5LADWn2$5!$D{jTdhF9Ws6%s-~h4Mkv`?4BBdEjQyV*&)v!LJ>b<-Vntmtgj^k1%iE zTnyb{L(~t6m4!iMphj~r{jDeP+~5C#6dhe6w^L!tN~Li<)HPptUroU8_;YTCQ8!#5 zYDx@0%ckt+qiv7sKPTiE^R)3L;5a$AJZX~%-;A7JtPnaI@D zW1}s$MWhT8#|g!6gI-*KJFol&EWZ^&Sd#mXk*Pyl$-qI!oQ?h?w?fX;;YSWqK|5OJ zO~vAcGoffI8V8O<-w_*2pD7nh$OLU@@Z0g{i_d`WIS8yeY_s>FSbz6J1%4C!vc|8v z@I3*)@o2(?Vm4y;1HO;O&Ax@es71=ialgC&!n6$dOj^vL~+_P}cie#@$j;bV4& zT000@su7!P{Y_{M8^G}t%$PJ0V>cTC)meg9p8qGDayzyfyCdvABjKyIxNnu_e~c&o za4}T39cA|KEqBJ){eA!=+ZT0~2B&pC7JvE{=FM7B|6YtoZoC+N^E7BE$TOGVSNiv6 zd%{!d;dZp4f4we!&;0pLH zIjaW0LC-!iQ1UT)7Kfs4`XXB1XA4QN( z!(nagO5zs6WURa9_lK2jvDH>vi7=+;QU$;3HqO<(eZ3-31;1af_FmE1R>ALopdS!; z66O)eCicV$L`>?yCg1t@?|%2YqL`(Mm5`SZm@xFnBaalA$4SLA&pZmd^`q1 z!+Y+zN1$c*g~#@nzx+k&$!xv#)}nS!l({@Uf?Gn`XP$XRtcrO|C!c(>*b^uA%_p9C zB0l}}Q#qcV;FmFc^2sL!RC0g+`q#e%v@%9qH@2qh{gQyF1b!1#v(0!8%gb(Kj0xEZ z(RoZ%GhcJfHKL5>xhL&9eE4wOdFP$7A08`}wNzpA^Z50LKl}lkZ@zh@jz<|s9C5@l zU_XJ#j7!o!iK06>W1c^i#g|`xd8cALsXNSLN)*D0TAceN4CcA9X7T;^-!H#ct)6?| zhpJnyegxJP_%+Kdu(SZ>inJ!4q^?fct$s^X*ZjIgpbCE1eT(yc#^)!;3?|G3~=Q@cPpaOIADj3_-i6z$Z+li-Dn|26;Sx*Y7doy;opXtZ04J6u46} zbR&HGh!apU>X2%Plg+5&VU5$QVW8aFhT`JS@WcbZh2Q!aGC>h~NKlm#<*vaJA?X1) zqXJkJ)vvDkP@GnAQkXaSO}zKw1a!=sfv8**OFFQ=7Zc)$L_!~BEd{53 z5WaWX1&HhcCJjSdmnW&{HBdy)b|x|N+mU|gbf<{x;Zq@`4s z0XqDq?G)O}CWe1wZ+vtA<4`c`U|Je(zwQd0cFM^JwKQ5wA=)b}Hlw+su$VRJ9lZSb z{V)r2VEIK+o>0%^sxHDzUu-?@C~P=(Pej>Tl$-#jOU*Fn^71g#25!Ccb{u)ak5LM< z61y31+>bXW+#^a4E$BV?r6n)Hy3~S$hJ#0DFqrvEU3 z*}7o}2(Jx%WWxfUy8kxJ{P+#{FXNeq?}Son6%ZrI15_v@>a4rGtm3x~4_49z~CJ!UH(-KgN?c#8%IA1K>NF6RFtspzmVv4hNy2)mPN%#5{WE zL~$l;!}IrEg9Vdcmq}+Tx};3e3cxR|Z#UlM5NxyOA<%021H~n{|92-t%}p1zus~(L zKZ=T0729cElZm37&m*e6TgoB)+V$|`^R9uL87ZM#@bU9_{O12d)+h;(CKM;oDQWRI zPxm~SlIR+s6a>)gYhm{rfx~`u9`Z^9%E-W1Q_yI^sw!eRtI?+03y@7kFoHRF@Xp_3 z$+QoU@f_H`3N_T=X;i0I$C171zyI}ZD$4)(3RlGnIaE>z`CZfCI0i=TdmJ|3es9=z z1N__~JbT~o&^qgVsD*-DE3~0CEfsFLB<@4mbV`64uSs4vJ01Qr4BmWKfpDIagYC}5 z-M_sET5%Dwpxu2IeK*(!J0JZsw8KJUeH!gE-o<0TI}2IALrOoCR0}1$7T-PlGPv1I zpehC$wKm*)(-p|go(#A}xMBxfi}knM8UJ(CkFdzmU`GxAde-#@cY5TIg;YK78#Ny!zf?LL|;Rp9k~fBYrN zt<&LnIlyeh5vN{`Y`^v3dPPyH{^{HwN`J~*HTd3Hm%*#qL|kN=v^Lyx;}z(bvy%Re z5Qf}>x1)fK5OqX&W0uBK&n4Rp9RGH@~N)d$7yo9c0R<@h;n^}*N!PQs`$dmsud7-0bq-f<}wPWd+) z)iN~CgBQ__qz)VJv_E#(XFNQ!7BzN=r~Y~+=DhzR^m4l-sjLg|Tj#p?!EQU_z`gdw zlEBB$FS{Iz119{L0<~q#B;a@5Js*>DfCRt4{`IdV#@&}H_+68`Z*|>j1eT6K75r9v z1S3!dzn^nIAmlsgq>}_}Cg6^Mk_u1S9TN)E(qhPvA@RhYZHwje1s7b<2{(C&BS(%D zSV-Go4meqhmEbS|vGQ^ghN6@u=w%JpWCHT=!w<)zMT-yfXD=<(terBUrrh%HF(F28H1*#CfSxCns&!L2dYi?+G{U~EP*#Ip1Cg1 zllw~)rc}K0XR3F9^{ZbAgeEMd>h$-&|9$6rRMGG_35@sKZ$F%W{`oSo%GeS5KK=C5 zolus4Ul$9{BZ0QvfiI7bkeA28IF21V7FS$xMW+(_h8u3^R9h2d6Ld3XtXDC9{CJ5E zw>$UTb3t3{#5#K1xN#C+eiq>{>+iBI@A6c?-Oq1%3S=e-uud?cHMb{7r(&DO($lgr znW(OU-*wSzs{2^e2&@YH7IN)~YKP<4OYTF5(npj>K^Q_yrO;q|Sn~eUc5_>X|!o+r3W_W2st`0cs<+D^*%G8=wH z?Z*2V#|U&Q*?s@x(0{A_kkcBWnQ0WtanfB|%M#3;J{4ofjz-Zb!px*(;)oW!tSuVk z7h^!U5Vv0W3n;mH$d?KTGW{@Q)9rEKu_vP=w8f6LBcFq5#p5?lGEr@qQZfiitw3=m z?)~E>s42}vD&j;?Mhs3h-$!=9DE!Z<*PuPDMTXYDek8EO3j?IHDHIFsXsWdYcs=x| zOHo)fO(wmW42CGX9%_edj-!A6do+8dfIv?6>1rqdO7&fMP66p!3$qurAl0-X zyh0xRwH$8z)lZS~V(=S~w)T2i7W{TZDV%W5uYrM^qt)RVS8B$xR-qEcHr65Swwv+P z{kLGj`_CbxI-*RXjcpXD$oUy;F!o@KJNS4k@_Z;(8uRAP$B6X?BXTTaa{cIaW;Z`O2)Cm)vvzrJRR(rx6J-LdQ z+I>q@$#Z7S!1{v*A;@*WHfS?wA|LqhBte3JUdQ5T6Y<2KFGWq1m-@|+I)rfzUQMCZ zb7K#y=XU(<|4v&9{0iXUm)r+bnFww}8hf8`4jM;{K@g_UXoR@;%AcXU@MBmSQ7>&Y zefV_iHZln7N8-qz{RXa94_lAWI_(WSdM*2SGmKPSufZ=hr%YPlxrxoY-hhFd?16ob z{t4O?YI#*qiDa7wES^K)OWkTeRN({$pqa324W$L2;2$@f1G~6D>UtLPMJTq7ZTCC? zBX&Lr#fl>V;X>%^(9JYVBMr4UAFn+0JIs9lUoaF8k>^6!Rk*bNRxGJ$OzTvWPDqi8 zl3I&x4>$?ykJ%F)K?Z~CG+cM~{-_BTb%J%iuLOaIvQvPq02#_}L!d~2rn5_ncHcvW7To0c**fIZ49uZ~EI)&u zULOV5MA7x6rnIK{FttGJK3T)i^DLzF0BLOzuDk3^XyxWA_+8T*%@=X~%D5o5+mDkv z;@YPbFcul2zT9B<@sg+jwMHzQAH4odxbr`OMYza>Al9H({4Sj-YqeIYrWUjY>~#1~ zF?jRc;2HHmxfvfmcPl2p@Bpl0GwN*%x|%_Y+JtYPdIj_W8^a4iG^ri9`MkrC_M2fC zS>yu)jT>%-{f<2au09ZvlgIpb9>eQT-HSBsRoQn#6@igKL94-WXIu!segtY#Hd-b= ziwFMnYh*1CuJ6FrYVe(7PDZ~?_CTPfP_GAg;o;jc>G=oHXu9x22W35lQf3g2Kl@h* zt$tF{p-;2~*I#xv)be~7iX-R9*y((iT8l?C!EZYnapXCFf?7X9?q^}X84vyW3bf6B zABIiKc2^X;u2PSFo9vG79QGr)p()D#+3!AtXCJ*C2HVs3V5zK)$~%QG@vHhNhqe!)FP(b_C*YVgrH=xOmP%7n7(9_uW=+n_KY7h8&3R<)n zZ@=(&eEj^qKw$||nM~)^!7&;WqBs}!#tZ*Mkk3J9ZD&tGlxo13y$-?I&O;ZHQC{Wu*>#9(g7< z-ewoHv@b!O=HZVQ{|u4qNdMmF$REQ`ttZKkA3t{+K70FVs7?Wa=fe*a1ZGwe8jm^c z0w{eqM%vWS^8N%oaO363ro*^@>lu9K=(vA-S_-x|{APlftEsRD zY=nKk_Y3HBz6gtGoB1vt{o^G_hpq9wfPGvwP;%8(T0Qqv{4&N;C?JZUs+@NSp=+#% zZ9>y*sH|JfM5ahXOWs9P*9V9G( z{d19bYtUGq#fR@aisv8v6B@NVlu{9%ucM^*#kgaCiUAvphH2VR$_w$QU!R1!r~{Tm zCkr3G&g2cqbfbOowmb4d+&`FggEV&zeEd5I&^|xCWWlq1beblhkeBB~Y1;1an7GM7PR>AM*+z)7_mM` zP~O_w+6gPUKOP%Z!i1!W%`&G5@tP}DwhJ@QY@4T}>W$tV0)Twyyz4zq2R<`s6zx>P_ zZ@f`zNHa#<_dy38Bz2nEH+q8PD)?R3y`{RpHHpBgz%Q-)SljoA|Gf)EqaQ3Yu5(VK zM%tGx`QT|xxc7Rb!w#fWzzwu2_+66*sIL39jX-a~uO2z^WSSB~mNi-<3M|bkgK zn6B!oQ&p!7hq3fvq!#`*BJZR{)P}d(T*`RXYlF4dn8hQ|`s>O__Q0#kRp`+ zj)yNjj-=bBCKOAd7+ym^4mj~VnhSB28SLffD(mCvA5F#7P363^=DI`4mlAbm@Vs#P zC(QeHrc#*n=s$pds|-@=s6t>-7cMgCx|3-7>Rkon1^ia@A)*M&>UFHO;}I-3xRUXk z;^B*qS`7T!D%V&kqyi}-C_5rK;aVC84rSZDk0eJulA1XGgXekq(TT)@3}IiOp+FX$ ztW`y?H8*3U-NxdFs__atfHvlcJD#A4VK{A(5Ud z(B_EkBeX2|o^PhTPpM@Ab@kHEY(TuOFU26D3KLArpg8|Cp1t7|qJ_Cc!%}SED)8HD zz^cT0mu;dwv8Np7RRhs1r@iRy+2{EP34J8ey}Kne90r%9gI>|*`v!Oc`8V%tZT{R zHmV!&h;jItmyvc7giM34K6#0MJ$5_zX>^L}Ztz8FrTN4ypSaV8ohuNk=|~ zvdT}uuYed&fxz;cwecK} z2Ct3lRpG-NOa*>ryZ|92<8#T2W6R*ThP_U{8LehH0yiM;rMcytBe0x#XhyM|wJvtK zdKGqJSq?dFJXW$BUZIWopS{4758OmZK)hEZ+@3)!D$=ykz|JR4AgKRk@EhnqC=#Wg z3h5yQG8VX`2+EpyFW9X(&(~LKG zeBz}vm^MYbL^fQ_E{B~;sK zOe;a&(c8iA?)2SgXQi+;vQ0cP@oKzYe8b~lf% zJIZZE6$TuACDCpxVwoO6>rC#s@JQlbYkRzd6p<|%_?6>zRiuZna)d=i9~jZ{b5!KS zR-9Ngq3+S<19gk6YCNwmoXR6NTtKqW(r!I%mw{@cH+vj(DqeIsRV-Rl zmE*>XPQ;xvjk>Uf zvP?e?AG1HruFKdn&!7|uV=rd)yfl5oFFh$_4q3qOl~-Q*laI)|&5$8O)ZC`LXUSYp z*V5GjT`lnYTA&O3{=Rnlzy7H%@cS2^VNrRC;#VdV1n!CYa>|q`3XsY}tD*|6uCC^R z2Od!WCpV|$*j50dOghQB0(!+BP=KziCqVbDx8C{@_}zNztvU0|Gu4Ap0PQ80T*C9u zKd+8b;GY0X0f7Q>1p*3KJnXQ;lp<7qmWe&teg!x`_~3&|St|hX(n~MprI%jn0KXN8 zDEln1ag|k8>Bs>VMY4RpQo$t?qN}dDs@fl=95GGCjvcEqaiuLJvsje8vWQ|v6u+Y6 zm2Jy+SD?1+kEm&6zu$P{4Rx&YyGqt^#X9-VI<1}UoyBTe+EKn!@{J`wS3V08R{{GK1+lcRoKN|k9Ea?;d|rN*W0iK1zeTP4dyb_u_}y&N%{c#p^A&KG zfzd0kyrR^@m2BqD$I}IV|Ew#eYdcHQ0>1@*BbGsvTg5)-Jb+QR0-01Zu1tg)2GwSf z8UK2ie?5IWNv)_d)v>{rOe|{O0E?+v_wRxOs8_03ZNKL_t*9SW))=8!LY0nOhm}sQ3geswOiX zs~bah+Lt~92NQ|cD21A+LO+`PG#|Y9Fkahy63GPJR$7Zq_BnxqnN)D;o#!85)?1G% zrP_*{@5OpsjiTh5gxwO(oX>dR<|}dSR$(t_;RITwo;?pbnM99em|tk4Thiyg^A01D zZ=zIk@NAcc>KZcX0wu{F4aF%q67f-1_J{>$ZLbyBe$>HOJyxVxDiJCz;`xVerg_Fk zL@1C=r!W(ZP`4bTk3XAIqOTgO2&jJIj;m>%^%=U8M>9hN@jBMsX%CiPdvgkY4RIZ! z>2JAy{Bb0lIb~}?v9N4cQ2mk1=qH~Q>pijX)GeIAAY=QOR!FHORTf($iDxC)Vb25T zw%lqM)jeogFq1DPKgj3rOhPNA(QJo=871e&S$^0UvGS1Z(9AeNejy7#d!9F* zdk{BQR1^BLp-8s@>~zSvXjOeE+AgM@V#eFgDZ9v+w+K`00GD|A@%+ zL_cDIhLKN^pZ^WdJa{u+W)4m%g&s}f$9phh?1d1n!_a+tgo`|O-O04f_)w|01pM~h zU}rWNF$N;FgbW=&^*t{PsR_k_t9q&>Ax=dl14JfaoeSTC*1liNjFPK zlyinfbGVAF_B@=1mH$Ra^1UDBCe%$fKN192=do3*j$f6lWvoJ6!RzulTq zSJvi%spFa&z7|pWykZU8XNC3Ie%H}BdXk{n%zIB>%giYi@SDdlGy^!S&h`{s7CY*kC#pT4DlNbi+xGU!cBN*L>HS=aNe@q?<+GQuf+8h7LP~Go@Jk;e zZ*?c1%~K!Mx&87pD9xFMrHfA5R#u-f=`Z%vlGW{d1#t7kBjqenDUy+8$&uF*obIkw z{@fPmuzHr>;0N%l?#CcRUaw`PO?G9St#-#T>XpLu$!m|roQC89GZeS>yA7|Wy%(dCbedUn@-=2 zn7a@&Fvxq=ti8o(25qrBg?Jr>LYi40zod#}3{w-Abv)NXvqJb0i<%z&Xy`oYxml5VSRtn93l!6L3bYb+7F6{D0P7E6*RqW1l7yNqG^^@pkF zP=lA@y7R{oD$Pdh{-r;Z`#`|2;V=1JApM0Xe)l-#CbXIXc&>((nadq#A4rYBQGru9 zDzjU_VcP7@=#wraTGt2P$&;V+5%=D7IoLVn4q*8C_Kam)6?p7;(k%q_EB{3CE2i~j zb+37ZJ_QLo11rGg$`cLAh8( za+M8nlPhJZO2)|+9I|#m5UpqM*1OYdgPqYrRZ6}3;Tx~hyIUXn_3B4Fnn1z{a%LPO zYEa0{qIu>=ym;r8M4hyX`H-robo=*&rN~O^V%oDaNP(mUCxlnzHyGvinI47`!DC)E%|wbMq;ClJJ^UF_t_- zOwIURir=4$c?)!3<$vc&Hbp-~b+46JcC`&zcHp{Hbz6plr%_eipzfnmE~Un%Uc3>v zl%Y01n~CS1LRBe8*w5jbS!@$BuEAPc?a7dB4khavc+NtmzWEf>Uw)h_&86s?bX#c% zn~y$?LJ(2c;VVzv%=e!@rCiTUw;Xk+z^^YoD6CW~RluddQ?Y@O--{KrST;*_SOqqA>()&vV=M4btb!#6VR6Op z$tRzTSSZVbQDCxU?FxJp6|MkR0e}LJ1^&wK1^7ySuk3^TPJpFSm}Il+-vVkk*kA)S zp(#1RS6y|L0>QEma!hgzm5kbzR$8eeA6BfSzyA7bwI3BbW3h4;8{&%HvshY-4YVAu z?2A}LR~7-n0)i`9zOpYpd-haz*RsBR?-f^E!NU(f+>uRO$-ezFPw6j#Qu%(#&J~rm zEOMN3E+$Qyq*U7icjfO2T(2mjWuPMMQTctPZ7K?3`F#Z%SJcJ7c08TI@8gd@t}M4J z`z;D<$!4x7q&q8!yTI?Cd9`$H>yNg;uYq4R@#fmJ`c)i##bY!szYhI+8GP``TxQO1 zp)qbSb<(YT^zx%5^dh(=bWQMYIR~n|Ecv9xA3c_?&v&)Je^Lwl9`Gv__%XMQ&)dPT z%2y0ZWqVYWb@#sve*cXcT*hHCj;a)F1)7Cn#%L~?Y<18n^jU2Kik^kxF;m~;=JU#OuPPQr*h$fGl@+Wb<2Oyil&mh@ z!nLaLl6~3t_)8URGJ*{6zVt9tU%Xd2EGX;bXahah+L~?lI*f&mp{$cqyn~XP@K7<5@ik$2n zt~MNslF#Ol$xF+M;e_I3NwWXAbJ4o3NU6|9_S?63?!N1=ifzPY_R^Ng^Ek`4D}Gxe zaSXjg)6_})^Zpx&czFy_EE)lBxPf&?98SMAH)D27mWGJWqZc1VI6nusm?P^a+5e(P z6oArfo1W1k*PlL;YT77d^VBxfk;&yq`$+~4JB&da?@VBZ${D|JJ;%F~9w6c_x#CxD z4Z#$$R8(pLHcd?@5|2@EG;)$bt7}9JDf-m0%><<&PQP`wV(Xpu!*N4IgADgya}-|N zOiWu05?wqqudJST95(@@dO4DsK`6h7=Wf4*MKeAmVmU-58`E~khwA9J!6*i8zB6Vd zt`wk6pZtT@AHSWDlPBX_^!nS*Y_{8hw3IXwmPYI6Z}7rHw_z6A6!?|$03(zjT{IYT z+U2-C*1>a%tP=g63r{*wDZw{$kb=^2x*;z*^>|CF~iqU`Y z=4`mx+(E(N<#&eMxFu;p%t(75vU1V)^=k>VfsT|>*4Z(|e}U^+#0Kc{Vr{_AeT`Xi22 z_V9sUR2jejxbZ|~^{lX!ZsHmyMY}*OS_WB;7NbZFJyzI&ZATsg;YJK6!<&y@RaX3( zu2RK|;#U;9(mtE+wm;nlZB0pw6Y+CMVs9P+6q0usCU9I#ub@=R0)EAYThzW)RzSXK z4tHF0K5;Ubi5m7g;bQz`cX9=Jwitw>F=gwW5@oZdku<W>G0hOhTFSE!=hG8IYSp*l$saJ~=0%R?=Lb+GK)KK8xeZ zd!^`qE09k!U;nYiUbjipibbq2q3FE@SlJ}=&M261Nrx{Br! zrv_JnU%O@*b{ca$e!MU7h)yK)6}Mb=7QT}s88%gMhNyi7&Ss28b~@okw1!nwuBuo) zKYH20B)k^Tr9YQ9L0gSgmF2g%Is_V}aCcVU?qF8icxPIRMp@~7?ePT7UlDj^HvqYA zBn#LvYT1A6IhcLdCuVs3y&r4HsfhJ#bh6jIOhfy=Bx2-mq^PrWle0f&89XU z#g@Vq8J-1F7~|vWAxs(T*(FMLL0Lho{CWYuTa99wp}Uj!4C)MrKrFtsDgxgm9*wAs z+m!7Rk9#bb`6kcadNDOx0ke=+N@|Jf@Fmegiex%Kclmn#S@7F#fBxh9v_1MjdcgoJ z%~a!oT(T#Jopvd0RI6uS;oJ9l;;yUEofKA3Qkm4usQx~1i{yhUHrn-g zmRol_Qf5-c7F3a^DjMKQ5u+moPL`TebR|+iH_+sM^ow-Y7jmKG1p65kGmx_lw%_|O zx~;OA%IK}vo4NX;Q}OIpqK1#=l+Yy$8j~Eg4BY;3mRW04%vgddzm?lh--m>^sG~h( z%u{s5X1bP*ci4|UgNIY}e7aj{CR}0IDwgq)c!ZX$L(di0WBZW@ zp~ZUOSxIu9sdBj`$wXo&l7^|yj~h;aou#MI#uX9-Js+z$pU1B|i+E5XRq)to`@I-2bT2iQO6KQr*VSj>r9a2dw-DA6QTjPptU}Ysy!mhQ}T1piw@KG{v#Wq?NSyvRE@-UQb z3K$j;y!ay97A3CilYm9}BNoyEmou4+g2NRxvTSGAuwnG=+gAn*q3QrTlyzI*Q6xyrh_qJpld zf8_U_bC?(3ug>7>=PC<-?KlK3%f89=BWiMKcWFD>@7G^{U0F}dIj$@}mzkl|NxrYn zvlJG{&$7?gUVAN%KKkgdHsc@N-#_D=bZz7RP7C}R_?0PZ$*~M~TZS>GUB>zw52IH@ zm?_id^YOPif>NHZCf~zHuRKiH&6TtFq>@)&INM)#)CAQ~Pzq|#0 z5BL?aM~|4#)aNED1!_p;Tgqfw8L<4MuHt{?ep<4LjX%36WbyGw<(V&_Hl@YcVDDoX zu+COW@ndA0nfvx*yz|0Cgf*F*EVre=xsA5kfpEN9O+3z;Ii2ZKUZZGLbHM3$DeF4n zQ(u1aCht9VJ6fTMh+YPJwidxNVi4)Zp2wbwQPYTb;_0WCA%WzwzZkPzU7PsvEK>gIc~&amA8 zEJ#u5eWBD*qTaDQlM+?5X$_1x<{Uyj1`-Y%B<)#TKmJ$}_Cf`I z3%1y4F6sFz_w8ak?(1UiC!5mobr};G%2v}NjPC1bx~AZsqbJyKr~O!Q$S|_GHX5}# z+<5-+#B5PZE1#)(vy!S6edE5)znWYt9L5-l?EC)ZXWHl96BX6i76B9Iag!j9S>l+&Bmx{{RZ!QZn^kW?53G2zuA@XcDv$NGJevkz@Z(b-#_sq>9uyP26<;@q~j` z1*`;u$|81?#b_@^oqVmbn3wM_&i{gEZ#;=;ey*hGst_|#H*6*deAr`)je=rM`Z|?0uv)vuk7f3$rAQk z@bb;Xtn&Gl^CkD8pKs-bNA9F~`a2{-0j?gV$**G9V=qBZ_9Gz^;f{l2w=nbDkC^h_ zi=>*r#xFEc9WwDUB_aW;@FAN?VMXKWI&&mQTDnAaE&cM+_5QO&J=6c(N{i)p|9HS6 znbg&+v)uu#ve9r-MjXpc@%o+T(K7ub)o)cL;)xD@dno0V=TP}Zip5nsMB-Ya{36W+y!czZGN0Jb~v0> zHyMd%HYlaAVKYQ_})j-FmMyl zHR`klTz=-!MBNk#T>zV>vQdkndN$d43_S;JiQ@z`crDy`{t1Li^VGd8SG~ISwWV`S zlKv;9)iLVii7I|UH(heyf5J1D97mO(QmzAK>un2Lk5G)%Fy^@PDTW6qYv)kY8{Bi_ zxvCG1O5fo+s?V06B?4&I6K)`=UcTIaw|vc$7avSj(2Aplm6A9VNTH&rcg54BT-4F* zCRl6dLs@;Zk>tE2F+a=XJ5M7$>wQX2S``opFbjxLAi*IgU4-9bb#jFw4bFV7J$Eco z7N~8NV*NStv@2L-S7GW9&wkIXmz_?$l)?;(=(;?m%0-fDtkS*@(zZ&GX`$Q^D&TkJ zVf)dPa_AljC>8U{R#y+l)czC;uF77I8%4f&?=dDndpl-+5eY*_^Tch&R1Oici1ogD z9<*PtmExlx%Ah*~!SZj)nfL7$#sYrTh(Am&*Z^=*iJRzYfc1nXE%hHf-* z`_<#gwk}lXP~1Vb+viB?2X3q!KI&=WiZhM`H$yUMK_Mq)M}&Nfg4w`2BaWiq+MAFq z`gD)FOgLo}Rc=ei{D3cxBaRKpIyTvHG<^r{g5w%=kEFTs??;db7O62vxjA)pzsml} zzPMO8nod=(l^A)**?6HwRYW0?7NL;Xic2na38!RZhLaSzuZw?W1y~An6>uv+R}`QB`q#e{wC((0+jiS+IsNq0%Y!uESC-8mee{v~S-wZU zPu7u}`>em8#VfDA@}nb^Ngsi<@>x-#s^N@O>d6a%yl@yshuv@m;>tr-KKt2cpQ+qm z*@pa|98)Fh_V(LvR~feQJAu_=OD+GF?f%wM$(|Lpvw%vKQKxC#cH3=gU*$Mun_`8% z{PN4I@0M&}$=3a=mr7d-NS9-g{TelDl(MOorIN8M6*RME&FZkhuDqmmhS5K#AYS|! z{^&%4KwjAgvAmWdfJ%FdlKYrrj!`hba;|<3vUMqb|LQBKYx}>}0>1@*OPWW?N^lnJlaFv_&+4R|bHdkIMOH7}9Hy=-a6tk2^E<(xtdUe%C&Q)LBsfKYl+g1vD&=qW;t~ z*!g*{6fatw_4huG-GL7=ZdL9j)|SPUT1$l#54WSPFJuuQN1XugyIX=<{rDQiQ#8&0edOLIurGr8eh z0nv+;RjjC~+WaW}HyF-lyBd?y@%kirN7SWa ziyfz>wLnd`zGOU&u#@5Qb`Tr#Z3QC)X=IIBMxS{brDzWYT!V~c9FHaGwY1v|g8Bao zI_0{|#`>`LF=tb%>WvrpWWM`^Nq5~ywbMq)3Ciug+Za-An7HQf!DBb@^?NU%2RSqx zrC<@2XVKRfwAlz&-}XqQEUk8D^Wcpal9@G)q$#<3<@-TYY6ZQ%6Zmc8p^L_%w|=XR zL7=#RPgw-^soqZPKy`_VFiBfAY<}?R#QLm4U_?j+DW1CW1ZbIwX&5wTEY{lX1Xf&c zn6jx%1g*Sy^J%2No2u;DRsNrvVqWq-CKQd(W<$Y@GwRsC6K-4+uiy|%eakJEok}Fg zU{O>Sm+H6;i$Xj26;+>MgeWffoPXSKB9X#;RRp1q*D8Zk2;JX z+NhLHZ$5N2-zmkfjbmpC1b~VCr>|jEHQ=A;Hh8Y3RYb9nCXq)#1 z-+wWU=^uPbv3U_%sRc_fLoR6-SB8)1WsBHf>7f4T``QxsbzLhiSquCk_=SQ|%_#N^MNK@$Y26okBxY3KU$5fmc?}sXj&CFUnVgT!UgYyxtyV@N0?_hXTKc z5GgJ|_oaYdRHGibNDV{wK8D`y;I}~&@H>)(yFjVgGg^$E>+itOT@FSwd*PN`K74Z$ zsd-;24ZY{&@dZYzE1r%RFNsPJ_kpSJP9|&`7||+P{Se#jGnU3xHxWrZwcY}*IP+-Y zl1;AJsq>%*JI!2u$ z;I|K&RU|w66P~(c9Cf5{^m5-D5T&HWutW76bLKV7E7TFyiljchpO>GQpzg_#Y2k|F zfIw4pvPJ_t9d!jp;|lm@SoOe(%YU9O&>;Kn(8{Eeg|z3_@Qp{8lvf8AL`ScI>e*x`Qc zdD!WA)&12oLHgJ`EVRw6G<^ z3M&pGQwR|aL0zbcyKg*?)T~cQsG{0eLi2O&yY zm_njaf!|iIQC<7ry#0g%zoN=5KYIk=mqiX0$Sq?tJt&eQOy3Q6Vd#zrf!PDMNcp+-5I&t+owD zq!Dr{zWDfE-g@mHc-bZ>WgwaatA>rX9m#+-h7wRB=FR2C3&vr1t!415t~sB;b4cq+ zYF68d9riz-ycZ#1c+C3nDPDf~E~5QbWUnz}v5h)vLq5|cP2`d`D|zKyLi^^@5x_~@2X_o%D*R0oXCnRuBeVreiqwmQ2_Vq z)r%*ed{PYxMBOTY`;kW;`CVXnX-=`UksPBOi?opdZGpbBZxxU)?f&x1FC$jg6*#O0 z+fwq|Y5B9kuM`VNA;3l(ZPWqU+uGVV`skyXKYxBlZu2hzwl46yG*?I0x=Y&vzY2ck zvy%llC{7ugWswS6{-|;$LwZ4d;V5FfBQKMQ88#`rJR}s>HLWMqxVl|HQXOve+g~O zerT~JhcZ9c>c0hkol=CPo#EMQ$B~)+X}i_o4{DyuHa*X!NU`J*tFECK1Vo+IAHc8VF}5pZvqla3opI~0fM1!2 zE-1xNHqwuMjyad0e#J7xXr95nSD!@u~ozRSWqNO$-y5tC=_9B%dEowt~pNd7JY8@#q ziiU)&YBU!;d_9Eky2NW5Nd*y(JaIg(-iT`#`DW@%y!p>ZvAk4S-5i9<@TaYfNW=m= zkEMwXZit+t;dmzN4jaL`o9~Ki)!`ZmS~Gd-8tW+88639-(=X6HlH{vT-s2w+-9gw- zVY+E`eJd5B+&#YD1%8*}`u%rawP?iYxyGi-X+i9NOPMyNK63|Oz4Z)@aUZ*o!nY#j z(hZfDSq8pkmjJy3{PtdLLo7Yq3H(~^ir*i=Z_j}nVgv~k_&se8WjQPTt!+fHx-ZYD zLr%dq`>3q-X>UEpNB?>n%Uy);X7DAmRiIj+W0(e>>k%>yis=-(TT*%2qIPffB5b?w zSn5|3#cx2ZKaZ==7)#X864A@xS6n{qNE!V0Uw3;kCQ#t_rtv2z)w1Ci%ivdmJQ?6E z=@?1J)WW1_VC1Pc5!Cm=u(D*npUUHxok*3I!ZVAiaKs^r7t}Feo#AXV@-XJ*WMqKKW+QAfhErMysc(61;)P1}Ysh?* z#JTCJ#H|d+r3hN`grzOo$Jswt{FXoSl166LN3-0P5tpeGFX+zf3|sS747rPobpumgsCged{?seDfK?UaI5z zwxkHMef%%uXy0|v99!8p$1RB`h=@M~+q7TbQsfgaHZkGiQ;55gbKinrOkv9XVI)ZF z-Pq}*>j*V0N8l9j7k|ivbiV$r)cGqRJe*3zL z2k6{JN{LU1g3-W%$B)NP48SttG<`LVr|-Xwkkd>)zmQOMR5@a}b`>KJKNYLjN{D>F z&YsH+=Z(V(TCvEr7ZZv(MM>q)Y2jCDOrY$jf3!%f!_tgCe!KVnvkRYOd*Wia`s%BzSi#QQ?*hNSeNMVQ(bWRK z)B=_JNnB&EzyA84++P*&E26r;y6@;ZOaFT<&;@?~;8Craq6G-c=Q>w( z2+);GU$IH9WaWy*vKl70+jdm!r)59H*0^H*EBo~2mtQIC{g-;>aU9 zK=!XSu^Qm{zWN^dTdbZ1qzden;}bh&Q8Y`I?kT67q9Cm7w`2t`Sg_zP>6~OgDxIDj zyPO+oN3l|_sA>gP%efFM@8_O-t^>ALV0r}<|CZu;X|0|G;tv`$NVSRVr)*Cwr$zBA z`&dzJ|I#^FQt7eiswjRt*Qx$WWBg(8^l#d*UAxuQ0>9A$ox$(wo9@cMEyqxhiLrJl z;@B?TLM0|mJeSsQKOpcjWu;sf`2E?|?F@cJJ($uG4B7iQmK(eko*kvy%W>yf`(x$i zc7R{mSj8S%z?d&8IK8Y=DVp^ha>fK@nW#mR*mjYymLX<(Dz7(?AhGuRLFFt%fT3Fi z#YKGZ%Dqf`_XWaUUggUT9J&LmZZe#_Rj;z-B%4*>shscfB(IL|hl$5~lP}u1mP7N* z_juvfzmxP9VQB(>lMFxPETX+uCrg5)QQ-Ddb|;2FnMm2{IS}<|n6%!5t@jv9zqPie z=oE=MGnp{{IHILi1%5r51zf^KCQwaJfRRPkNK!CsIPmXRlhcxfVo7j3!hZXtuhhV zx13G$j1RF)AG3COhL61l+vZ#>P%|G1s7+g!HsZqKz{;;Gl5 zoHs1W=3=CtLr=YgHnRs-Sf}Z`*Lm)ii?KY1$nOTfb5+5DERG@SV1b(EuYlitS@El^ zo^2c_jb$0Mrc+ckbW`u9A;}hQhw>S-hV^$of)&@@p0=z(H_PMx^Y0>Eynh`XaABdk=c48B6sUBFUA= zA6>okWYcZLVsZ5y>YR#{E>c5UuV%S{Ls)gt5W4kQUIC(lE4I{PGioZ^(TMazsX06{ z@e1^|ZwLk1cF-qK{MXN3*L%QWNFcL(9G?N~?o$tPQ4V*N%;x<+;&5|5@&vlgoRxIVix<*KYtOIRcgkk$0 zrL3Ms5h>vJ`ZJFrgDFiI>y*-E;+h<*Ao{i)gLNn(ndQ zaLjluky0D?Tyri}*>4Go&3J}|9q-GCmfZZHCBn&9=Mj-pS-Tz6@o&BxG9R? zcAIB;{{JHQl~!!G+3sl3_Vp^on1RekN@jahMS2`{+IeJxq*A4R_0A+FKmVwTeUZFn zQ~eRhUWUKQ1wyV>-BEVlB_qn=r(Q&h+XKy%OnAAz?_lSu>%Nkqni(|wv?KICk^#seUwl*uQv?`uFD+>#G_=<7F z+*a^@D2P}b052`~U3@O|=eq1a`}rqxm@;=N^V56nwU-(zEWUJs-+zM#XV)(L$F@Kv zCR4z#2nIX9|8#-h|JYOehjzRR{QjZl`r{uSK76=R$%!Sh5hFPK@WWLb zRQ6}aj2RtP+_E1sfRb}1*M?*`%RbBh*VNRgT#0RxcpD2ufJO*h@7?6oV4 ztgl|c+HJne1LvQVjQ;VPq-%};+7|dNt7ief8EQD-?0d-tHN+Iz)v@)EnxOma<=dI^ z>Z53OMomgfrqoZt?_$l{c_MGA6^H+|r>^V3|HE6L41OgaR`RgoEDnCl3IQMA^{FAt zJn@`<9G{|7+5lY3fMX5{#*VI;#Zf#R= zSTZjcd_R?RY7tHlPzcpC=B(?<7&U5QPHg(#c=Tr4W=tpB+)6lN5kxeKMlHMScOK!| z-k33u=2`DEY2ukA{U$WGL{YD1gI$lJ|JqwAYsr{i;=aF+!3r`&U8Af3(euiFI_=f6 z^&ZF2d(al-OL?lbx!gSdI3n%fSFHAyW)a(9D^W|zG1z>U(ezzs7+FsvX}L^!`&B;p z=ks`(HX;TTiw7o~*2=WwK9fjurS_e>*nYO{|`yYSQ?% zd&krA^?T^9SR4K__i<$lD8*0K+l-h@$_aLoqQYnBxdgCcRed!jw)>$O28bZ23 zpxa6<>{YGEp(k8G&Zz!D@jK~3rREeHz$ISVpKoP_a-3Md)fqnKXzbd41ipdZJe$c+ z+{2u2-oba;u&fA9$)LK=n(ROJG}?@~%4q$3(j9#I+B2A5=||vq-_s|yD}Fr%ejmT` zSR$nb;wh$VEUf0K>63YM;-y6GECo-a|By2HEtpB7L5WxH zxJb35X$ZKN31wF)Y{Xu*_nJf5Y}BEovKf}Gak=BFvq*hAm88`k@z7!QT+85{j$(!N zw;}JF)MzE1xMCd6!s*0A8jh1!0Hq_3Tho5z(xY-@;|;8|!7%!5up^G)!Pcsq3{YD-K$p9?Psweg9RMllBoiZOdt4$}5lX_2m1A z$@x>SHC^C$iTj=ZfE5M&7OX}NI{h+8EK53TQ)Rk*^T`{0{poAWpY@r_>6UllXrhK> zSSR)EC#p}DzM%vBu09k?3ll2LUp7x0#Kk*bK9++kE7?njnLfakHD{3vtx#F z4U@WM2C?N{roj;Fj&pe8-pHcbru3OIdwIlUP@w?36?K`ZV1^h-!i&WPAp9_BL*yGge zl-09m*ev>P8vmSlHHM$Zljnat$+9b~$*}GB!SQM_%ov_u;LR8Bi17gK_JbvE<(k%;+*o^ANUaZu=56xe_&&)4ADaUA(Jgn;O z4BBKzR@r6B9{xXa6IP#ST>| zNICuOmw9vYbNEgX)AW?+x_#Cj%-^=&6{Bu|a(qcRt-SQWO)UK6WujiTtb$h0j{x1Y)IAYuT!CE=zkndp z9n&(&HqGXpe?F?Vn{Hc3k3K!A>(K+8R=)e_CGbmRLye3$sPDHtZpp!P+IatkhnfA(v&8(gvZYrdnojQzOACH04u+L^&5rq+ z_V>J{9lv*eOjPDKM~@!Oe*5j$W%c|Y@Eq;h#s8cZko!y6gn(b+DnDoSeAiufar4bL ztKa<=zxdBN@~%Byyal?z?_Yd9ti1Bd++!GioH?<4^w%B3|^>^nRTeh|AvdbzQ|6=bt-&mH+#r`z!ES+E%Pk zq+Mlw`98TG#*ZJb+N^T^q^;y<*+)6A(&@|jkam=ToLnoFVguQq-o1Ns_uY4Q?EiJw zUB}~(Ki;wX6*W_56}o(&3jqJc9m)UE&Hp<1)v+}VC)t<1Pra2?P(`eLLe3*X%x`1b z%eU~!VZy$pYmxd{KCMKBnQT^CgqWq7?S)`m^(KmtjY{VHvubIQr(LJE^i; z(HtGu4`?g2LHCszee47ZRvm;(i&;H4l`}QXS_W)9igmW$lSMYvCBnRV@3nmY?kgnB z0L!=WjUuiVBCmC0tKANxah0LUda_EJ^QT!o%jBIexxSuG5N_b06V4+Q>4$GwxJ_U2 z#BEoiw=E#%mM6xuE|emT>~iop%wB7#F-v!?nQP7*OWa;WRIH~lRL1XeL$+nJ-3}rb zBnUYfUcdi*n!bD&!z(E8OJFI>$mRdG6Mt*Z_!X^{T1ZT=m1|>!O$A14mL0;#15fId@q0g(C$^JISe($Ffm~Kcip6C$ z12*27!P|}^WhP04;2#q%rDghCMEr#qOy#DN+ScQdzgF-o*Se%*K!LeA~ z%Y-?aff2IsoRTVp5!e!lO{`>X8YTrP9w=xK?afLCXFMLg6reF^%b^(Y-9msJNR2IDtmQ&2ZZ} zV-+~^be%S{p{)2_4E)}A{=vjbi+&OO26mW6J+G{uMe(cD#A3Ow;8;Dw_d7!YU?&{I zwew0DDc1jTe}s*&f|<50_mYKGYU8$xk0<73)p*S%^kb`MO`|pttLGz?)w6(KQN-K2 zWc=2$@y=uDyY{v}Wc;2xj=0nGr-EMr+bL?;>y#Uyt{;RA3ayKX=S8{iqDSNEPp;?^ z_!iaCYCJoy^2M*a_++BaY{J2h=U6HiPDx7=X;_(2hn`A0R7=)zsE!2GB`oY*vvN%l zRcH*G&t86j*PnlkWHL@Jn@0k_joLnpIQl%S+GQyf0*aX|HI`0DIwZ@$H4L)yOz>@@ zxfJid@F-ur`!rEAj~h7nu1B?5O{-=k5GwMH#y&|4|t{>D4^OS z7nT4qJAu!hFavEHe+&gb1H()Kb`m*;MuS=CgyxixXiaf$?AM=p5KlgM7YxpaOKBN; zKmSPeO96#PpL_|AZ^tHKMH#lc5}{k<@l8a!6c*4kIOb;Yn=8*l%#YGFs0QV?KC*9uHoe;t@H8*l@7-JO^auVBgR&)}io{|u%_ zfe;jYM&xhE&PxZLa1N$yz7ur21>MB}TA~%9S44;2jX(YJ3b>12MU05?ND1F;;#U$s zv`$EChE?H?B7Rr-^Gj7gAuwD7YKhz>D62_j5RrG*S!c1`u_WV2 zJxoeYD*yMt|2^oUmWtcjCQ4$H>O>aNQn^dy?;UsC!MSRQ93)bch)c>_J89A+e&8=$ zxRAe-If01?m3eMSZM@Z1TXEgyzBU&#|7T{E_@!g%yi%PySQH5@4t(^ZAH`KyUB&05 zbNuaZe`5=1IybfV)KgDo%V|l(_MX>{W~rse?waaBs%naiA&dwM!jo*LDH?|GW=k7d zS*h8mPJ{v3V#|K5{l17WGY}r|p14 zXI_nLn1Ml&@T6J^(@6PUc9Sy!T*B;wb^a}WL*uipDJw1!#E4eT0m zRO~di*m*kkIqYk!L=9sBmRGHwm$U6&5Ss{l+8+4yi8G$Y3R` zu*ix+SBt|lC*#W({1Exr`mo{_^!)3>>N(2sJK7gZB7buw4Q6sZoPFuf@QynXHZA(S z{~Z4G<8L7uW|7nax5iTnux{<|uS`A(dZ%8!o?qbNrIUay;F_IpsGd z;`iuNQ6%Em%j1dLFT;vAXCo|SVN&0QI*NLGKg92qSoG>sY=`K_C*sI6uYlgMIs7n* z_PCF`fB6k`z4IR=gFG^+I0_XH-A)YKeDbr{^Pn&GPyCKh70tO@iGB>p(vcLIt=MY+ zFJPwwK7$!~6Yu+5B^c*kf8g?5h=2RT;xv#;VyeZ5nlVEt=c;)_^@$wUY zMO+UA@k^ws-hrdez7ApQhA^xctny;~;fk+vKm+B0)h&}3GLe{!-3~Yf8*RN8f>a0G z;s%hA~TsFOh?Pu%IkD}VhZ-hKWbs1&*o z`V|iDq&YBI5tD5&=!cKzf00U0zz4Yyhn{i)CT+eW{8$EsVjg$h{4;#{t23~|Pa|oD zc=zf5!>l`RK)acTMamsrzy0DIBub09K6Hnm`jO(-(5zut zJ@;1pQr(G=C#)&|a^7smb_bt`t@k_p=);gGR>Cce!+u4Ggh~FpgyA@XXeZ&I_28hNsFL;YKL9^hJ9k?H| zr8B9y(g>~e4wKc+GyY6WBAkw=c9-?QdYpk?~}I(;+KdEs$ZkGNu`Jw z`m=I%+cTtm^j0hU#AJN_tP2pdtb?+iW(6*#5O6C6#4~ABT!%@IcyTH2`r%n;(f5zdFKzEN?yJW3AZZ_;XnCTTlFQa0%UWNSSG*U?eugE_)x2%|5mtxrL!eadPvKeUgx;XfP9TCDL4m$oUk>j_H_>F_y%KQ(H_$6{t z2iqzif59&iwr_?ooMb19Jz7{7CkG-al&1O&iQlk&-?B|VfjMgFoXUb zF9O668tvHc=#w#Vn@_??bRb`@Akz|u9#FuW4>Oj5SzeB%&)$bw_uPVH+(t29fag01 z>{hgFygfd3%o)()8vr_`tq_6q1)A3xK1_N}BZoJi{41WDbstd5Aq>i>Xcb9qVng2R?S>=}g!qGij7u2So}}frXfsM%*`GxCIzt7uqvrTyen+B)k$#FMuCw z#m)yDjrBhMDVQlDepld+*Ix!}!CWT33pP;EW6;7BHr)D?*zcroqT8|9>Objp;-Oow z#`1Zy*&>{XU%DUDGoNzA_uBdpzcp!)LLG&28L4Cj#bO1YKlL0;-SKcN=+e=anFPlV zP%_9$+C#itK`Le=@I#PM`Ro6_3lHCO15$Po(97cdltzG43+S8!J~WbuC8uH6eU88; zyYGiA-?~Y0D+ph*@irhF5IMzKc^UqA)&C$}S;@};B7U=4Mi9Rr-5Z_}L&oy)#!D|@ z^NqJ+yY))NMTJ~EEDed20jJoF(z18(_+7t7cG2rd=pN72>HbauMTS-oxuEqH6s&ek z+-^D!`0STa(OM9AHWFqW@QScdLbjBH(VBsqU4TDbb~4ghjuvh?&+qZ){S;cuG$!#$ zGOZgfU!$&PA(b$oRTkl%zub!hjy?s+wsq0nRYKsEk%*O%oBuricEcr@U{+9}08XDX zQ=_EWn7YM|IPBQ3!cMP`l9ocw(_v6x*^)W9^M)U>tA^#10|ups=^G$8TE(wS^C#Dh zMR^oRD4vX~Lh(zYCVK8t-PHQy>h@Gs^;w+}7%K65Na!)6!ANB ztVXmaseEYyKq^(TQSRvIs9FfqLrX4vNvmi2pA@ARUwm&h^3zFRG|iM$G5)qEq+<7pO}oczC^^* zxv6aG7ujD#PSaST^HaYOR)i1XM4#Vs%PrjBQUy*{Dx?sXs&PpG)87(vIq9pd^LM`e z9sK*Le^*IoHSrqJZ9yFrA<&5UEx8t^Zo4=3JMFv3BE>na1o4|>#V--RFFyWfMf|D} zr4Sea0_2W~j(ISH%DSxY{UW=@r zS#ipFJcXqlZKT2oNEfS_tSX>SJuQu$jy(e(*?C_S@&Ve-5Vw5)Oz5Q*oMViRC9+fx z%Mmd{>ne0)!0MME-Ug>-Bb;*f_ks9^oZB>J6;N6EE?$1@KFoXZIdrY;Lb|O3rLu#T z_72$b7=ls(<>m8XxJ6hrS7_S^N1pv7wqzxRT*}Ym?(4sW{Ni`u2O;c^$=GbWow4sB z#~_SPMOPV!wIs20{>!-krYn%~m!p(j2s_z|a%?@E@b&Ma+irtpC(s^hnET8uJpR|; z!9B)Qr=Jb0Wj(l=4UsQ&p~LQE;x}1Y!a06PWjvbAtyEC)4FvXNoOI3)Q0my0 z7f!A60^I+HpJUk@FCby*$a^{_Z@dK#`TR*BRcYC3<9qr_!JbwF?Sn%po z2uO(->%ioX?v4G9Ivt@k3BFrG%KrfO-gZ5f%zc*CjYgscJM4As8l zX0YF3pU0*fi-DUwurXePPhx6 z!?XW<0I$xS3((@ZW#eO?I2fNi;8=KChV$>m^={mD{aHZQ0vHvb;3lgIYbQPxrQ8a1 zI1BN@lYhsPPyUlFfo+ejqa;o`=ljq)rlM?HtWbX8AAi6LkNpK^aRrh_(2Mvbh4X}u z?1O{9I0Ih11AZ}!4(9_r`M{sB=-s&}7Ymp=WgV=y-a07dSK`$d=P((}3RGI&42Zoo zV5TOaWs~i3$d}GSIX#8dY45%HGB(^`1B9_QgoUMe=++;gvglRVUN;Qg6XU-L#IF_v z2u%ZJ-G@DS6MXUOSEI{sMQEjAI(a;K&o8j}jX7{jd44C@Y>O>%;Ac*RH(@Juc`=wV zlNG;z|M|JX>N(WmYB3bdB%dC1z;L_p(yTl2&aB&z2)Z~Q zw#e%`_@;>8(cHaP^|3lM-#}T5A#9tB?LKh;w%PLlw)Q1;EoJg`A;vd3aZoPJTQ8YW z%@IlgavwkDENry(9x#m<^x`u7=JK-;ce9+&l~j9S=%Hk1u+JA~V8f5@3g5HQu6y{| zw@yJU=;ATPfnszH%|brwLrYD<`rGV*ZTI{nHrZxt6iOnf!!#|J23$F!v>{G>BJ4!Tm~b%0y^L0li2OR!?DR8`-}OD+lkwLas{-7^t>&hVCx`G(I(Yv zIfi4;znW8V_+@XU$q z7*cjUgs~9hLKEil(7b@hui@tK?wp75?BjRCtt^d*UvW=1d~z(LF;4rMDCQ|BXA0Ka zes^rU=U&+8BOgJbRE9@^SEd2q6Y1^9YC7q4 zd?w7|msekj{ilBl+i$xIT+a~CT_ZruaPjV&|Ha?$z76P_4-`5PC(D0Ah}FeC#4j|u zCR$JkTCmaX)3N`jj)7sfppta|-GcktJT9z=nhMfj;G$lqoYztY8VFso-p{I2TfraJ$4ML-e1}DHW$x6 zr4i{y1RdERpMCb(*kg}9cpoC{NzMA@FMqjT;*c&}dN`5=^4)je&9=?b z@|TD{B1Vrm;)trPFOieAo6Mm=iv-d(m`G$vAkybVP)iGEB3$YB7hil47hG@w6P?r^ zvR|fr#Zpx&31QAN5d{1>{Z51;xmyC8>#$HfBMrn|NQg0>O{hl z0+s4P7S%W3d^3~XlI$E!7uKI&T5ePM*IaWAcHMQ?DmH9K7UcnxIehESO~*g;%ri{A zWmKC_v^|U#T8e9NcXxM(V#O(1N^y4y?q0OGQ`}vGYjJlgE+Ig0`KP~o?|R=)S!At* z%sg|>?AiOAJ>(tM3MJ^;z#XhoRbjAn6TcpQSx;(6hvv&{T#+&@)_tki3&TYRNOUOn zF_5_uYP^15?uSA`H4H;ScgniPtg84eJ;F|b;LC&c7vvZmDCf$y;@NC}%i(*%q*Gwt zDCH8mnq^lV#QMkYOnVYsAta!^I*o#1Cp|KaGG@8RBD+fKmL+M@NiL4UsLveBI@O2v zL>63iE0pt|L;_b~w*@?}hRP z^I)p=!b$|>`B^bm>DQ7tzBd>;&MO=`{q^5OS?*i;sAzkve9a=o$8Y?J$?zlm8{l%v zb8lu1%OsoQfBJt3nVH}w!s*)}(VeJxbco<>qcoD)0zSFIX7Zk-7jOL@(e z$=nb=72SNDQ=`{e_~q!c=Z;u>ADi{Ecf6|UQ@^T=f`g0_^N{tnnLEwLY-Jr(0`slm z>N!C(k}qnkhnOz-dqZS?@GSdxrmZUx%jg8-?q`E^U?u6f|*uwd4 zgo9E{b)p=v5tqwwD$28_`+Y!c{b6&>KMdA=oftGy!;7>nIB!9~Cx$~5L;o~*@Mv(n z+@GA7R-L{|F{9|>>zpthBP^E6j7rb!=F=aSk*XvpMYHm!GQ!W9AhXx?((UPjf(^Kd ztbjcgAPl(7z*J^5{ClSSARP5G<+|eg6mqn51taM~nNv81FI}viZ#7~jll?6&0K*VU zw~@=1)zjpNE}Wal%$?+GKmcxfz)v!{)hG(;j!{L@ZXI!Ei%74g>b;2vC3e#n7OzA$ zykvo;x(ntZzm_-R=mSw%by$9g4^U<$V{ETRV}#1e#@B&f0QYY+V&msuebH?emK4RO zwHv|~V(#cUfFteZr3HDzeHsqCc&S>@jIr!sPLyO@NTe0n;4`PWC8n5x2^?>tt;Fck z&QAiSYKlnv=hv%-9p-E#rNX=9T72-SH3R>x=AYK1@FU)}08bRgV&OwsYG)D(buQZN zw8W=vw{^f=Q^eEY$e6*Q%!{;T-tOrLAww2m2iK9-VGJ*+xvWrggaR|VLGbi&OB55k zk$F4W6HOyBM7zj@aOLRTlwrg@jZ9vldNl~=_G$govh5-KG)Z&eSF{=Xz#3yTvpiS9 z_OOCIuE^8lfN0J!C8OScv8Q)G_i5zmW=Bpw+17(Nmc3hErZbMV-LJZ$`@NZ)V~Stt zLQNw+94_w@@0$L?-y`cd^w7PZ1)69(bN6Ej`2V^;??KBIBoR7;Qi8bnU$AM`0V@jA z_16`OTdNPG*-L1)re7+A&alKKXawlOvXXk2y6=GWP0HWgHiLeEhLherxOVsVBan(_ zDa|(ah*uh?Rm;SDC(I7v+I0XA9YE<(OBwOrA#Us<4yi!o_8~@BK7=f_V2dJC1YGm6 zXs5eaoj$KOne>q*3}CQWKJ+m7+%bFN7E;?^oV9S{TYWFixe*rE%u79IWfVPvd(~+q zwe)NM;8()mJmv{$1*O{A)FCQFX|*^lM^c-z#v3mcwOJW3szxgdgErDP zQi>YEF-roaLi@)hS?9sAcsqJhcPw&rhQLS&?>fDp_`Fc&L_fpfPMQlGl74@F8w-%` z!*`m*0M|Ei;t`1H>;QwWs$R40`@Wt?xa+a6bnR1*(#P1^jd(cHc#selWz8GQ965== z>AIaptHd23^wwuLx4tr*Ru$q)xgrOG+LHx}itif-F3}+2Wrlkka9T=Yi+>e_epgCpD1|FP$C-D1$EI(9(iM(~r1)5kY0X+1aoLLi&j+ z#kIC8oXIG9no&&rB(m*xAL4bwxTZa;t5X_AJoN~YNe56oiKmJC`IsjSa#8n|!L`_R zqiDGyWYp=%z&9QI-akO3+yR>PxXsC6sV)GLfxlH5|NRNw?GVM>oeW1(kJI5F>B)L^ z4g_A})`j>nPwqR9w$#t3Nz}?^Tk_Jb7j1`Eg0vz&;T-_MjP|l`xNv(Uurkkacciu| zFSy_}Vqzp_87wspbOW2ZDh77eZiiv75{ZZH(& z{n-rLlUY)&D5c!;9x;Edk%5(-l!AVB(?P&X-M2i|@BECO>c75imt4;bL;~dV6{9~j zDiowlD(ovr`Zcm`LJrdwmozWf0AQ3M>01dFFcU`&$dmG~vG*~}MOr?!9NERd_r*%R zg)fS`I&macD6CrR!V7W8hSn(irX~2yxWO$N!A5A_U`x z#t4ih%kOyz0`oC|=b0Bl!73fZaU!to2HW^N`oAd*D%u(V(B8WaIZU9Qwd6-a{XSgJ=esS;0SC*QW!pHx=;CS5*rh7PXN}DU zHRkCQUwD0I%Dyyg3@JXkGrK>j?I-mGV$p4M9B^cPLEFf@5hxb|VoksNBnMC@{+d=GU+(+HruGM(*9{r(hiVKVasGFkkFzT1keDzVw5mx6!iS`>tv z{&Ixs`A~*rnj-SIliU5}&#z_MCBKzVVF#_L-3e%{~9luE93V;=ateS$+?HQa0_xXQkxwq1qMeEIbPXbu+gUV}~c)zUQa( zgvde-M)JeqI`BofDX>sgs$~ssBF?n;Szy_G4|3lbSR=7ymbEnN#K_JshXO^10&k42+lt1WAo>3wRqB$y`sS zFxAbJoaP_7=66233A^^+B~(@hd~t7!VG;A=Y~Fk^(Ki1CXvFwDib&-g@mlJV%8-q^ z=KkSI?7(#w++p8Wuk(u7Fpt<@8?vhd9}gly7bWR9bD$=0o;8YFSuZ{z$7Oio3T7vdfKX7|XTEtcUh`l3I70p7$9H$O-q=#14WU>7I;3?^ z^-LeQtVf_k?CpySLW_vo2muB8b@XhPQgvx9VioAl`DNF`?ATL-F5+CzATg4g-r>Gy zj9;M>f}f{vOx8{72lh>|!4ym!m424yV$c{_?1%#Md_JyWAmA^}0677vJxKUw8GG2AUobY1lD}kzHd~LNbR=KKXsL-{q&pS%##QWmN-NG`TtCD` zx)QCJ^kwM_!a7mdVyPHmQ^l=MWD>RcubcfiG820qa~Ld}?xli7C)vzm5T{)!h|BEf`2u^ZKk@+^&aQL26he&K4^;Sf5Zse1fpQGHEx#-)>KA#TG<-)h#PjW3 zYXC$L51*ktXPylxdSPYxIdhmSlm@U&`b(17EnTl@R+W06!bZh88%c|K(?NZ7?SL;j~tG^7%x6{GnaFW53u#o+|Qn5}1cdt)tv~!dj0ntgQ zg25QE$AW(de$j=E2+%hEX~P*(3AhT7iS8dy^$!?+d+=cPb-is>|=~jp{?=Gbe)kf`+h?^5Wa=! z+f~#};lz^u8zxvU@sSARPJb)*Xzpr(y39f14?Lt*{*~TSEp#5G*@+1D{Q2Hvc;ee} z{hi|*ya)N+CXu`*F}2hE45A*3G>%wFGoNGUgsJ=BF4`Xm{~o>} zanzu2j4g}~ura)kytlK(7uKE9jwG1)hFk7Xypm{ocPzI@wRRq+OgWlb=BqL_SRe!1 zs}_5Ck3{Ut&b#3X_5TxW7xl#Ucm{nJccQ_nqrU16Jx3EJ>;U-?j3j)TCG~~Sx;yu_0PiF)Bg&TL_qKIN(_G3Ln4c}bAP4&X=k-8fwu(kc~?n} zl#6$1gUn=AY!@a8J|&U_8@TfqC^Mrka{CEuvzj!6lS58FHL4^kLu^Wb+ki>zY9v>c-~%MHDqMf{y}T6`ei_+P(|s1i zGDYLa4l;q$hOO_b?(K6gyzAI8ZaRt33Jv(^TutLsoTOJ3t!?-6NT(W_(Rj!E{7c0j za@-L{IoZE>8a&MokzNm%eU7W=%9ME4z%3u3#su=Xm4n3esq(k|RxMGL-_6C>e?mn! z*0N!k=xYJKTTI;`0~?5S?>{EtuwJ?mbWQo_k5miTF9*WMWk$NSWKBw2`S6o1`8&u$ zE0jEb{iMnQ?s8zK(4T)*eR=Favk{Fxz=U^OHW^@>05iBnJBqjpOe-wOB!t;p62tVK z)~I`9ucqaEF0tsvQsw$f&puv@x%w&jTUC1PAR7PW(O&+rfsD8z7=GR5I$k~Jjk8gM z4+GICHgvb8YIl2^;dIYmMT}(p8->c#0iPb&fLD6cLgVFKJF>FPBJGl^0{AJE%{O1Q zPWm~=W7DhifaMT-La>EVj-s}NDlcM-rz<>sQB>v`4Yd>#q@~m|+n4_5*Lbu`kwye% z4>eecn&GpkPNi-yiq(s(Ll`A`*t&I9&Wba%t*l(knu&GPym?`Un^i1roMVM)ipoE& z+-x;`ENN>gM@MDd9H(5fb=J$;v<_ zebj8x8b5{N*TUD5cD3w%{-phdZu!M-1a)J@-Iiul=iF5V??T?V694%+8(ugm3ILs5 zHwv2Ijsfy6NkGy!%3|}VP#GvEys{_NWN(WpKlE)xh$2dxDNgtyi*z^bN^>UY7k&Y5 zHatB*&bY;bl`y88x1UJ@3z$3+Z+9&JkU5e^a6*5oK+CcHq)hFIB?g|3dyS_K{2Npt zWLf7tJw|hc8||{^9Y)^Z`gyq$%jz&3k61^JaU;xkCh#Bvp|YgmNtge-R00;MF0x@~57+1W}vsm=kP$VKvl-)j-R zPj%?KB|!+i!<0fCIBDyS4Z;m~_#oq=RDyVZeomhF>VY`!-!H+wh>OoqzbOT>`waS2&@^AH!6S=sMk-$sjFTbD^c33GBx zC)#~DYX51xJWcT^oWf+WTI=}q@SKP`krtvM7_Kk|B3l?P$6Cu?--6RTnqA~^R{hn| z1U>rouXts%oarj5Eab(CHvIgURx+tOMXS_T>C7~vA2^Vi5wx*#W`cSY`zITv;`QXy zX>CzF-@3ezvnXAuq=geDEU8z<5=JDbV4~V^q59m%21hKC`?CjNK&C!Y35cATxgS*X ziWU=ami&2AYq@>%ARTB)(TVE-FM;MnC9ZLYiDSqghs3lIbwM8ZIr3iQ#60~Lp;@yF z6lYWWcOwl0PsuWy9rIX|$l%r8odxQ~@m`n4GACHsvJSV68sLMRHu7`!qq<|@&s2%d zZ-%;~ywu+m8jG+T6}_0x^*@xRhvrTc3{IOqen&8Wv-}xKMFVQaY?!NzI%i({UBuUW zIC)R|Gig!k&xdVhANZE9cXrgBxV7u7y0Y{*zhlJoe#Zsf?+fQkvdHC=X;u>@kXRC< zX-uDS15pySXzfCMNsI04Yta&I9ZGpjIsHS8Pb8flB$Y>~AN2Fl z*kkt+tr)#)@BgbjifS^vrdF=Tt_as{N{QK`uHOz*#nH1H>Sfw64ie0A$N^A2DdZKT z1!|&<-AWUHBabX|M+EpE47HZjSW^THF*nG9ebG3+gdeAxdTrD;esIlZb}9UNK2zn* zex-v6>~CYL+I(7kCpw<2fAgymx5~dI6*T?QVtq5<;OA$()v)i;&4yf99^?iS5nj-P z6CT&SIdQgcft>^aWkQx5{Cm$QY^P4NwHv@y4qIxt=gwWV+B1{XR?^JQg0L)+iqnC? zKu_MqHD_a5fB^@YSoe)`W0;rr^8q7lrRT?H33VlkPElpM7b3Zhtzb-`X}no?X?9l) ze!WA<1bZaHBF0Y^PYlVq(Crf5+9)`|6|KmxpGUManO9>GO6(hHn%C{=lviDu_{$>Y z-IQf@-A!^p z+r?&U{oedc3wTe3#cJp@h`VeWPU2lfgWdT+rsS5l{|6?wbc z_xbnxpdKieG%5P>4;9$NQC3; z%DB~uXfu7a35p+Kgp%>IJ_F0v$+q5pW1r|J5~!Z^5`G3$si11!V5odv>B39(6o{0! zlBl;dpZok_pZ+c*VwujSGlD-_J|)39morhSF=>QAZUBD;IYS@O{07fkQsMAZGJ50G zS*qCfSMMTi8aSr0eERF4I_(VG+k*9xeo_UfmkC}Oo%E|fpYrdjqd(I2cN#d@ktzci zkmy{CpzOP)5exKMIHbDT9_gPp)Sq<5aFspPL@i7UuSOP6W6{^*~8N#LT>Jlhi8Q7h}@r)Fx~WUJ@LWSf~?-(*9H4|DEfI7p;!oN(sp z%)UqQVgo)dZ-`&0*1osvO0z1`_A+ec7#*CBefU`CNoY5_t5M!5;E`M1KgZ%Jiylgc z<185{^kn6wvo$i71~5VD)nG946)Lu|uYa_)|BE4u5+>>=z*EhX$W#ddk?5>Gk8 zYQbb{X)dxZrK|y5EYA)T{t>rItyN079WcKoSfM_8`)i1oLGS6S-_Oq2YNN4Qh1687 zR!NnsDaG*l zjQu&VQd9=qa;p*_+T?F{5#(aU!(NLHbwGBGyDjea6r`G6bRAks=O+zc_(3CwpQl_3 zL4E<{8l!0@g9g+4!;ZZ)I=u>?B`}@-!e?Gj8z^KR29n4|jMaO!KD0+;pu4a$Fe%Bq z2xkCe5y3>4S$z9ZcWLzsy_FI?d<}m#gf*Ei^W&6HJg~$PM}BmiH=i~Pl}BvlQjDWrdQchyfzLirxq9&LXj0vj z=o5Rd5;eP>G{PJR-_R8^$LrvecI8;-$b_Xq;Ujw5J&8RK)r*3jvGvmc$no^|tqyVw z?!UaUtk75k#6=U@@jdgH7QIyLsc`6uZX36xXp0cI+X`s;AtoHL&_D?oE(dA0?UBJ! z0u(THxYtf1jP-?KeV1f9Hmf4G-WjssdAi;!w@c5wYOE^_upc7ggvv`Q{Kl~|l{q+? z!_?@WNI({kDyM@h)1?9YGNCP?q!bcXyq1tg^uw?-Q^84O_=--MBW*nUP^E_DX=W8A zuOxxB$f}I_i%OncP=&cK@)6sVR)MDujW#knZK0sLD5Qusa-8pN_8Y7u?9K zQzDb)BdAZr^M?g`%s-SKRcaL|Oy+ft^j9#^bu@UNi&JBsa@JUfKE!@nLQdckbo_^w zHrp}RwA1jq!hmmQl8v5BE2~*|Tnr#ah9$pdZ3~DHrX6NZ(fyxy47RnyiIw*>yGtcm z6)nbLcg8+&7WBwQA>bStofA1qFi-i9$|W;f{{gmzl|q1dWh8&QTevS$oGM>UQ~&pY;jL_$^2vhoXT=?|0!nn~`RY@gWk-*8 zLQBwnu@gf#_MPx#6}_x?yq!y^C8 z6aNv0T@TkitD_gb_Uqq;nZW_deBv%F&dKzlR>a^3uG+~GvT)N(%jiR=iQ+@?DL_DK zQKaOrgCy?E=p27CN-&fD`cu1u^fRq8ikF-mukzlX=o;yj3-Li~2@_hA+0H-X&QU2X zhr*#q7tbu5-lP`d3{?YOLvi=TKdRHwXJyf7iMQopQ)aGHa(48|N%%76wxBl*U;>3M zTEFE={*$$`v>0sY;GjT4=&7rFA?luF&k1@E+=N-$t9yMs-Qxp3pt8ki6ybNt&w z#IUR@dVBhcF2SfUlc23dDH$}fEY9>{-?kBkibW%`vLIHT*7Ve1YqY>3s1@@G>;3mI zNg6pKCdj>~MA!m0=}p}}b_4SXLj+mAE)5TZLsMs}rj(kVx|{qA<8?LH3clXjsNh2u z(EYxUO8?f3r#g?jP`J1t<%pQ_Bh{co0vjnE5VIbd7qp5&*Kv1MIA6L}yx`pbr4=iPTTR&WGRa%v2Q2rd-z%Rag1wZn&2b1r_uXBb%c*VLRWUd zUB31MO0%T*J|Bi=VCbyt&l+Bbq^3k98iA>2-|NLFuYbKJ--QtFrX_cC8~xS&>m(jX zbM;vAgoNE>rk?J0G1E$8mcb_QtH=`uW#V!DP(<3PXr0dIMs&;osDptTuXkY|{NCv` zcHu!3iDX*U?`g2>w{|ZLUSr$i*{oXd5xM&YbbTMGG+F>QExSxA4WRjdhN83l$-%YB z_$lrHhNqn@sUrjp=PP`;?=7`$_ls*n2im&*y647zt>Snh`CttlvBiLG92~5VYRurZ zMZ$x{hKn_S)K+)b7gT|>yR4gMCq>+zF>+fyq+BtpcdJVqI4RTO@`3(FpV*$CF#q`P zc`^SI!+)=YBn-Aa`E?lS)tZ%y>Jjp>RdfOo0##?HhLB4w#NMEF6YHd!PfQ45aRHQo z@VJGH&Qj<`*6IO`SfXuj;S!bXs5SeJ*z>H&`C=r0&R>-W3;AEs(S+2G(A0ML(oywUn(BlI5@_crn$&*pe(;Hq74?=YQ1)d!z+4yJch!cZ~qbrZ2k z9CrS|g-QqL)C;ee6y%Xx^)rF}BPU+hBI@%DKg>nI1~w4pk-Qz-{xu(dnwnL=Ck&vn zG!`Nzi-^|40I74idw$7q#xl;#jn=LVvsx(mlWzWNa`_r5kAdo%4a?!toV9%uBNtPp zdTgzJ!-#c;bNC`8j@l7gG&_=cA}Y8-cZUI7Xl(;9UzQI`OMghz)JicdBDduQkDD{w zNf7JP%TF@y(4-VR`)M%SVg|~)H<~l(N|Aqw;@99}FC$JkuTcxX{T)s01DgY*0C<_b0S9HyHuKWqs_(h`E z5#0k`Q|1IejgYYpBC)*@)rtR*Y?m9n2)OF~0GE47 zJ?QW@z?PJ${P(@G&k)>7z?X`V#;^1ZpIO+3nzG+veYq^=>W*dSLHy2wita>EjkX0g zT_)@s)A-|eYxDgAYHJTGEzPn3kZ6RFxC#8!-#P{zG}V9m%;DsY-Zq>{L78Ew;VS{%m8KzQueOQ zW$+B{vng^ai$O8@jf*=sn|^HgU1FOHfV7!MgF#s6VR*4u6x+vw|Lzw zY;2i3O&Hx@d{Kw=T8D}kLW1h5D=7hT;g{tXz?lWWxsZNIzKI9!2X0QYO9;Z(F5Egz zp)9l1jqacH{Y>~U*%}t}2o(?t02HKf@9GG8g4Pw?d2But*1Cl+iK2S*6>6fIX$~O!CH^BDG z0j;~LEP4K9X{lVcf((vhPLe_6`4$66In2h!)Qa0kl->I96DoqWmp{so>+}DVw)2at z>mEk_PGr~g2vbLf32cg5_D)%K9LC;9sEXbt6OYtk(Nh8b1z?&ojVROuNJkKwTB;>4 zv}jDJ%orjxoBHh(ri$3x{dGox@*8?m?`}0f3ys11V}##vW96F#zw<6=F=F^UmJA!E z;%yY^XPjc|(cdi%`)Q1WRUpfdgeFMaU)d=1ed|=W!nfkSdZ5v1N_2DihZ-%yyWiuH zzJE@!!N1-f794g%z| zV(PKY%=&so_>3afgVId3>04|j)YXm7Rz*)^V>SDtRTc6WYgk`}KIG+X2^?}_0yB_M zq@UkGN!L6(sx03No}=5@>2C@M-{W2W(DI;%We!(9fqK>j;wkb|@{w@U30E+ppL_B# zZ*3A;PWf-{Vlz1Oq|~BJ7Yl!r-JevDaX_xKHs!SKj;JKH*8RNW)${LTon@RCOfVdywHEO?e1AwabZ$-Gwc zd@jE<7J`PY(WXyA29IzbLtHF-ibB~FpOZ^5Ay;ttTX`a|2mj%)V(NPZotE<7T*?EU z*vUD6l*d1E<#@AZZ~CPadXj8>pN@|>oC6+-75>gcyP`w0-B-(|JQ`_*zYWEd^Z(YH zjQ?EO&8SFGyGjYgdJD}qqVrKupc=Dde^Vf~NtYA5x`b?0?<#8Y27lSRi(c!vBiZap z%flk$GV@k+Rke{3yCi?g#!^Pq;A3R*?|WJ^`cOgE_hL$m7OQ}{+v$dN5kc(-!CEpi|;7swd0xca_&i-{2{R1;_43l zhm)Pe7Hf}{@7&w*cG2H_-tU^?g&j0#HIKmSf{ zJkobfQC7ADiMMqSfw~|cHhuQKcS3f#E>W0FL(%j#co|g!Bfn{|eOXDy*ZH?cT)uo zn8`wF2H2JRl?SSFltjF{6Q;?mysx4Fe0S8U!NKH=QRzP9{&nAEmzgpFy++6^5&gxm zVlh;})wWS>!Sop450g-;UMM1pWO#3|)pKMJl=Y2~utet|k=8h}%I!aL|F(mJh z2NILMp6}~E+`6!DI0FTskfuYiT`Sv_m>k-b#3ShFX%6|yE)OQ!_k$2OS~2N?Gu_kJ z6D686M6{<~*y@(%KKi*#e+YQgk&J`C4AUK)`4jIMSUQTr)L*4qc#YoD>Ag&tA|{=l z*}Rs~%`Uw9`cpiqZ(zyRQ_0FVkP}J-w+t&b$RHeYEWKg zUT{?&7+;9GURl8|qH17Z;I!p;S667()xXy;B^vGEyGKNyHXbp&2Sw74J(@m@2ze34 zJjnN5ntCGf=i-((1;U%g#L)c{vt9a917Q4^ix*M!Gu2+LQC>55u4KhCw)r)7m2Yv2 zY1g9?Q+q$2=HFw#D2PUF-atXh^5)PMbW)aYaxzMz*%)URz}~I_K9XulUZ_L>M+X<-WS*v0fhAij4LNWtR`*!MF*gS=Gx7cFN7P=qG)T++dBkhcI=42POgq zw_z|S6d4}1kmu!rUAwi4 zb-(=8z|gxu0YVUQUuA9PlUo|U+fq%q7k!Ug#5Ps&&eEYUnF4+aY49mmBCwPY@!@!v zF8PA$Z?a@HOCeSJsT^z-{M;(2FkW=bKK{nz{5D+d8J2I|5gxmHB4eD_in`nk2d+W{ zuAc(%gwm;dPV}3RpZZ^nQm4yyCwcRu#u*mkAId_bs9SxChY>gfwt2cM9Q3yvcCd5s zr`m17B1~^RuI4`)-;%(rnl8M9(Ks2Hi9~W}!(lp@_yh-DTgOjMyjz3uB9SxNW{=lA z1fed6i?C{wJs%Hl@Uq|(5(Bj=e$WDBHh=f971kcf2s_U4CB9FULUBun-Os>ng~ZC^ zA2ANr^hivw$@0As9ZcBGcT~I*5{5|rl0*p%f<5RU++Uw7ARbY-KM4R~bLKj1Wb05- zF2`I@1-2f}@5ADKmy1fJWdwZBSr;$8e#b5;@%%0jy`OWt-aBnlNt#=|eYbisX^WN8 zS0Rx&LPkS`9I*kJKWWV}aw{JawrZnAk+6-5m8X^Ow6gpyBlNGehDu@U zg{gK!&V7|XU>2h zfeqHy#58yBdgi)ge{L>@-}@acg1EpoUNi)vWg!ESuEv9)@Xwd1^9>c~U^?r2)4hV# zEg$nr!RK(rXFUAw2;kde=Eyx0l<7_En@5B07yu-l>u9Jmn&{bK;Ym$Ch04^DzR(Yi zu;~J;JeoEbcr@&0QDH#eM{@|P%g(JInv8%m)wox){TbabNWLnE2CQU_r^lPr!o!G% zxmM87o;LOQ&B~~W<%W+<{OTnknY}R{dZOhGT8!zMf11jeiI=hqr9=ylnm=jFJpnE> z7|VWTl6mHI{U-5DD~!}p1!SFk9xF>P%CaN}U7@V}pB>6!0Q+Lb?@oD1HF(LbOjIdLq~xsp z(dLMqGam&>R$hCQR5INW!fS-V4Ij<|Co)lJAe$Q-sqvWXJY)q6w*jk5qn!C(q*hNt zY0Ig3`*)0}z~y7|#?u3@TAVR$3{|FNzW%WQ(kIO?B)@mR5+ zB^D>6A5i0aYCx~c%h=9xU`HOFMo;MwvZ3RzD*{$>Z)jSpYT4A+dS--f(|HwUi-iAR z9>Tz%UtHj=x7QZqK|c{S>sKMsaHxaOaO>8*Wu+lV1d{A5ZG4`(4?Qonk0<`&7GIN?dq() zn|%q5*wW{@OveCrnkk0V+?#TyYmK`Cl4!b%^r5w}%G!7IA!RB#pRSx{A9J+W$4E|#DxL>Hvy@92E_}{tUYm#L+b0mMv^oOK} zzjfRaw~lXx;PF#V872E9<2gKp{#KW^E_$>gOdx)5(1Ds{No?mg|A6}%90%OI!~h$h z*(TrM`R*Gm_C7IZ{v8Z%N;jxzn?yG?mq za1f|YS?AfIErs+5IB*|e{~OS>7+gOZcV#ZVi~v9`3S@GqDgb-?)q~Ii@co^U-X(8! z+p*3_U+A}KVO4}y9(|)v_FWO_l$=)3eg^s!MR{786O6W(6CGfmUCYKh4Y;{GsZlAx zb-~{+xNtU=Y*gZJj{|~j+3$!z+W!)Ji zNhkF?&mW1^7*UPy^azUx(64&dFxqmN5=nzTZo-XRC@v6{3xfvo4`7)-(|>c|6fX`% z0cWS_?GNt!e+PcukDzs!zKm9RTHm7RYN+%(e`tMJqodqotvY~K4EQuE*vJ>(YYjMn zqNSmvav`FtKdV3(U*n)g%4Dlk+rxg4OZNGgzs36S4AmI9k zODv|#gK+d>H1ZpLPlwVcALD`<_5A!-xF3atG`3@?t*u9G? z|M=fO%!68oLnmJtL0e}xEfEdwK1Y7VR38t{_?h%1c}r4Pgb#`thzr+j8&K_AciuJo zRu3O$xlHz6Pl-)MZD}$Nx=;NVID}Mcs?R^mb8xzwGBuughnzkyOF>(2B-zS&|BI?X zrwHhi!*|C@Dzyt$B@=_6Lx1Gn4GaHa41lj;zJl6st$WUZc?KM&q9Z~I%I`_4LP2VE zcJiHz_?o5SBz7VT52rBp$))CY{lFH#)O`4AJUMj-Xj9QR+moXSl41JwNVX?r$6&f7 z#k42x5|w{hQt#y>WGD}+14GBBi}2UQKu@Hm&ajfORizo{UcH7;Si-AJFH^Q@L z0FPZNf0^qSo7DCev5#dULZD7A1CgYZhC}raH96;hxTXfO1Y9uUhkxCiPocHD`;X}< zes64SB+s0<9sMMn1t_Zj4Y={2dem^(tyK3aGAZ#^^ZRWCi8_!St^f_A!FsWba>(=| z-sp)42xbvZ>*kA=1cW)a#YjsEmY|j{yT!OxyoU@* zKUYWK07K*)C{;0mQTN2pjOqhWi zA5#`^HouF?_V|_Wu@-N9xLS681`V1eo=QOS+qo&LGxc<|w7((zHfBuukaShp#k&ud zCjBBdhmP5EGglcqDIun53~jei`q((5J7LZ_a(KOL-*J99m;0)T37j~IaMJ#_vFnAG zoiP4SPDBjsrPU!-)|7-#YF>UH4!!Z?*^}r`UP%330YNimVG0!9$IC+!kmeTDn~ZzuSl@#4UdeSmAn z+pi@lnM=dNtOvy3yGlbyY+pi=jVJ-dvuVyxvqK?{H$Dvt+pzTIogi#c!v$ zy%2H?0oyg+3Kpxa-iSI`7+xi?5`-2m{e(yJs$G3!{;p6i09j9ARs%TC*dg~t(D0jc zrAAvJO|s1?rPipXJu{wEz8XPox%9IJ(eaz1`%0)51iIyOiwFVYp}f`oxsaC2f5FUN z{cjf*^d?>d`4Kb=ZkzMFGnvW4cpoyfzB}8+S_d4Lp+*>cOO;XGg+(p18xOTE&*-zD zH`z;VI;0Pn%)#w4g?)SOXs5+a& zgaHe2|GT~Z-=zNcr^$qPz>=8t7N`;Cd6Q0T@2)p+t8T>2liR@rz6z*Dl73m`1ElI( znu99IH1&3++3tOiCF7|gOOZM#E2qGq^rYZsQJ7f!a=X^B`ZJCQ6|8iaUvJ&A!w5Lm zr9AyDs+kJa!XcaCPa+W16-N&@w;kx`$snT)+j&)0sBK@-9J(JY=3F%9+%AWR9P%0( zl1@d^3k(o0KJ-MI98H<@C#39ls1;nhH>GtpY(rLAc zj|C7U6YO*(O*<$H5ug5s9M=GXQG;cRwlGe3lGSCgMIUF};4?^roK7KOb8n~LF>c75 z|6uke!)vE<1mM!n{p7K;#gQPk_KW<(KD~|9eZ+KfVdO^kx5<-w8L~#!Pn|}kd@`I& zU<2pj&A|;B(H1CjTJUEZqdyCL)U9a|tzifUqWHQVFd=iD?w)S7y<53&ANgblHT%=e z4|lI2niBCOGV;8?({(6_wWSy@EqGKjj^*9;rT_Njyzu{0>HmPH|9PrITyR>0%)qIW zY52?~BJPPmmBe$3m(QSSl|}%jK)MXb;hB{9)4O?ah%6_UYLR~mx{v!S5L;coN7%vgKAi+7b$DnG6r3h z!lu-XS{4|=`mE9F@r5U!zkKm!*Q9~3R6gB(*vP(66BT`_Jo~Hn&^DZxmQwc=A4$?O z8Y`XMk6upY+Yej5Jf}{fVv}~*Bs31m_CQvCz$LlwWIZ>*Ow--W8Ed)!j1@+mvuQQ!#U zKRJo#c}NLk;1m2y>2o8uIXeDuhl8~I7@Dx^AH1haw-sMze~F(C2`YJ(XSaSP^5u6T z;=*yZu}a6&a9&YD$0NR*?ia={5+sxk&|esgPV`aPeKB2$@jSGd|2(aD-pQTITPbi| zoOHAv$qu3>Aa6bi4E_9=9oW7jSCY1U>6qL5%`rw3vs0)@1{mh%~Y)=QrK17hCyPyJgjn;AaX>;1VQdF*ig zHEU3B-n+DGmPeX5@+B(7hzn(Z>VH7gC!sGL&YRkfH!y$f?&7cbMCCBzY!k3FB^7e75$_G~30`O?VKJja0B)Bs;kI*6(n&YNmNg;kA}raGQHtaAI&p;N8rMZl@Rj^;p?XvYiYo z5oY|Tww`P)x9Vh8eyA2J8yD77VqZyR;!7tg(F2IX__Bz)eD%!lByO%)7Ixk7!49cah2u#>8ynTE(hzEtz+dkZr{SStF@9> zkdn8eyhEQnJjB*8eC3w99z1e4z%6>Hogv7W`E#hm#~G2Y#JevkvZ&M^JSZUa$cFr*VtrwbxCIp#}9YJ|y za(>R=x?1Xfn7|D3M=q27?Ig&^mRiKr_M=eLH@t+riG!K$=XklKhehK{pD?>1 z^=!l!hZ?R!(!BFd*K3~GjfZ>+9?B0cO)lMcr1p|bc(y~Gwk%A)#$aGprA$LmMedDR zqF;nnzJ+rb^ONIkxGdk3Dk~<{<{1bOuVYoC*Jm5;u@rX^{|H2=i7W%$ar4%ohwod< z<+^QGhZmq;skF%~53hysYQ$!(*)#3yZu`fPN5Ff@FP~@@<(nu74A0663!O+S*^>s> z7IF5WMyv-Nm$3@<3KY!r`z$E|QA9c6i*gnaD zjovpi0$QJ%M(1EdJ$h&1HjNs+83c|q5{WHYMSPrMzE5bDKYrD zwJ!0YHt`_@Wf(hDG!*rG#qMz|GRdDx!XXM3h&HexTM8j8?5I*mtil_}WGubz^z<|Z z8BmdFGF0wRxRb1L2syY{lcBL}bz;l5eLsST)1tB+Za&xlehz8+suujv2$?`KBsj25 z;&c$|#NHn8D$ch3>GCy>h7*f^k@X`xi;cz&P7UIV=LEy$?~lD|>ezx{lTuR$27%XL zLB`$?k}aFew>xy-r9{dm6GIg@&igM~@~wkvdRCUvuWkt*LD~WisUN#~gcctHktYom zcidsMXUxbq5Pd=%J9^wSS6Z_D{gXp2s&XWcke5VB3z#MDW)zK6j06EL{%TwI+_chYd^Y zI(v$k23K!CTv=ml4nB|9sR<5ZhCkoWAn}|Sc-rc&xF&1dxaOriv=qeiUCo*g6W~AT zRh#X;%oD+YS|@e)n&8e%`?IyMliRhkN)R!1)N_aejiGbC8zCri7VqA8qF#@6{#A$y z;AiXsq>AJJ4uF88jhvv{E#poc$;v$^zP%fP^l6^h>6n)WlL)p^=nVqI7e1@@^_0V=J)mMu z_O+&2NyJw+1Jg;CkMD(SbazpFR&JTW>!F^A z^tM9`CwBbHq&RE$M}5OxmADt9pFAPqfFlQC+X|gE(OYuXkE3R7I;>bEM_hUpw&zT6 zdkKnbZ=-h@1i~4(7frDNq9*DuCOqxZwW5yDq;iu^gdU|`m$Q;2Fpme50z}F(vJ8_k z&K=Z43ZjXF9&clE&X*zID|}(X1cE+6LgfB7h9xN0X7 z#jA&JGr>-HdQqV_4_Tfoo$^$kGkJ;d$DSMaI3+IOxhkGU@PtyVd~p-KiMG3bNhscn znYke(K8nD`6#8T1YR(mZ64-qO>)E|S7?_Eli$GS?Ow`J)mzV7Y*FJw3Z42uMRavra zjU|kUAkQm3v>uQ8*{d1tF9)h2^Xfn*s&%lMf(>TWniLP|!wtzqwb)om`qk<(QYU_M z#b0Ug5Y?@U4ygxHkLTAGDxWVpArWV^u+Gu2ESg^|Tp9&k%1;#G<{AK#iC}oVRrB4! zw}$qjDw2pM;|bf=L0NoSt}Svw39Bd%x%u}jK!tnwRu%1}G!DHv_T{>j_ssjgVspa7 zN~FaqhG7YqQbbM7?uL1AhDv;e0~%_L=<>Hl8gpQw6#>3Thv>@v$rbO?_jB>>;KQ|T zB$IhL(Wv69e3A!hT%SEIB`!DM_RCnD45~&B9ol;HB81BBc!Gf{;?NZZXN-U}Z4R41 zs_!RBE%Xw-7G>-!oY1`G<uO&J|{CUss^SjqPofZqK)d^7ik1zXro9fx@}oI<8>hjYN9g-NF+Zh<`eCd&Ru@Izj^No!F@6Jad?5?nAin+aS05xFd&{@QaA$ zQK?Xo`wbYKEUn)xZ$hBrD~NuQ=e@rge={!?+rFRa>T}tY z)Koq#$a`H7^Eg#?Ro)U;mF3qF#gj_$GFTfaQxe!2D|?nPO~Z)8MizAwXh-4pG~L`Y zMP`ckGf8{h81%!9K1#h~(=^GpFpw^!X#!0R7I$U3gQp(k>983;hlwY5YXzvAkmMkT zVj1k5Q-oDx8;3Mg1dOFR$}3$U6kU#2;r7j(4ZCwal2ut%Zs-B`_4@H!g3P+Ar~)E< zFBve!Um0t;`3bMjW6FLc$R>sFu_9#(pUZ&WYAzw&v*N59_xs>+qi&>f zgIw8W*NX~>gN;v(?PDcgzd9RQTl@SY1n0#5rJ~{9WslVa3Kw?Dv^{+UERxNf|4D53 zsZN zyaKw9YH?J@jWl1aKMZIJ5afx$xRs>t4;lAmTbC?@o=e|)GByWr>R z8X6k&@1~=+1Ixsc6?QQScSx=khR>xyTm#>Gc+OP`OxCwPqv*k7`)zp-cyeHI6&<{U zT1abNQ^4f)fYhhctNQJChmx8qHjo81`o!Y1ppbw}85@$#;A2#i*~Fh(Gb*OOR!cQmd>%CXbBC9?;Ds+l^t}-sAqSGcX9v|d@bY2QMbn*#`<;a97f#&+e+?ey*R;nPT1<#08O=+7j7qkfkQJm8fyX9wQF zsioc!hUbFd*=u?l-I}^X#zxx0`BEAQs8Eq0+;a<`yeZw&R3|5X>I-M8zk@Tv%rAN( zvFj`?t((#vMx_$`t{DRm$fAp}WN=~B@li^84TL~`KqaE)zYhf5;iJrTY<`WVN`T$D zJ{z;SL!MkOzIG$G^TWIMOXRETu6kc^Ev#Y={vmb=yKe?VybV)?WA%G5F}1I1`aGR@ zN2|YXI7GJ#2uMobxZx#UpPjng6sID$|GIEn_!S5I#1cf_Qb}vdu!F}l?TW|uz$rf` zv&Ac@qo&pUGOlv-d1uyFw+zVE!PzS=99nZ1_!LdDj0ms7nYU~GgTd;K+e`ir2W~Lb zzg7{MQ5-TaO5NjfY$&s4;q3zo^}(f^95WK{F?>op76@T!RN4BLS&iXHidEPaaf}db zZqhjf!&JF?go^9K!|gNjaz0iePWB*HFClkLV#YO^73PU}XQtEC1vrk_B>pJj$by@_ z6w&al!d72mHI~AVK=1lcd$+-g;xc>t=VHyG`|u*AEKRjkv&17m$8GV_{sI>T9>h~< zJIpJG(0ne{qvY$PJnl@ZBrvLjX%YHjDojB&@A zXVF*S#d5@gJETV!mUcQkBU*;(Jwgqt1*%S1?kW_5gY=T!6DpNF`FI89%&G=@U(o=& z`zih%GY$`raGJ4|o35l9+U4?Z4k6MnZJwXIk?Eb$yxp|C%hX;RrW(SI!F-b%Q!|7o zbmKSPbk5vzGqJ6>EvmUD3n_6xKp4_z)<1|2V+4_JWV%`7)f6wSyLJe2KJPVI6ywv( zyCk-I!0TN&k~7w!%CMvx;m_kK7hcsE50QAC0k&(MJoc((EcIRXZX^R<%Di#Es_N?1 za(v(+KA4H?qMwlCG2e^Can2~O{S&?P4gKHB>8F?+l6-yn+(~U%E^pGl!QHGP?p@c| zOFWl7EN`*CVh=2hl+ct*m?b4l&!|~hK$GdEzxoGOK!~P?o5#?iHM_QQOu2nv1ZdL^ z{kqCdT7$JCjOzdA;@v~=9i$Y#Io>-(scx*4n|A~PmVAd=8qiZdxi>8W71--t3)*#N zggzi&wvha#DCnW1tS;V!6nt~ZwjOY%w=5(~agcL*WVX47qGZh(rXD$;R1NgkeA_7u zjUKI2CvBr=a$-=0Y3a!z&ZjQG3j$uplkJ3#`CVSw1n2ln@CEy&$DVB;2@@jGtKXceK8;# z&&;WH6_qAM5JjJF-f|;H^sdV*h36Zq^0~nAik7~%KFu~>lsC7(hSzZh5^C=9%47t> z2+9@tvuaBU@n)t$5r=1)$K_X;`$%F7#6E*s3D@`RrWzlg4yiYGC?2y={v`pPG&F9h z)vgIQzT6q4$K(4+u1F}VBd$K&gU_?EM{fHTA31~iEmPZ2@xJpTeUr=Gh9tb&_rey~ zyULW!dKAOhwQFVf{iOmgV25 zO@px}4B1QR!C!Y}oT{0J7DQo7?>FS@=;<^JfNY#7g;snec=APBRqFC(;$D@TV@`0xg>5lAO@{E@)Y z_p!Xagc&)Q^U@DAE=8+5+T{gaGt%-BJDqy1&k}s0Tl6pyl)U#wn(B(U;F4wL(9c1> zq66{4&WN0eaM)x()I`&ZrI5CeI?t0bZ=+KU+?~EKYb`tR8yrUMLfOxv6s~3%BiWde&_?MY--IA%N*GJzC20v20HPtFM-0oXZ zyH$hZ$P2x@S?QfymY-k+D+#6Bno2Q0*|~)b*fP+qUGUt&m@F87K}(di?&5%#cHIJZ zkMWVCJW3^;)1$o14Uhk*FaG{8WDd+xA9&@RG=ys~`H1MQ!v=sIwin;h*_EczcbaBh zatddI-E@APVx)>U-{DT*sxGuct3G=UwtM*4p*E+lfTX&wr!93b9(9H}4Q4qFWy%8i zgW`3J^81RlGgl8fCVjDm>7h5ld(~TtUYSt0b$3ji-NQ14+$`N~LOV#FTP8l|57*6l zT(sDi%?5UZ%UD_&6x9p}{;Tw}=pTJAc2%)aHL1*6PesP{8&iTJ88XBp&k|I&LamHy z{8mg38>;JvJARmuMul-PaYhWIMY?sdd}MJ(E)Mgc-PyeqIZu2DQr42gWU<|?&Z=Ue z-wLindcQ?}w_X};ylO{?!YtGUkkonFf(TFN!tj$5p2a5{N1)5Eyptz9-?{j<)`6dv zcQK%Lxj-(p?u&;QmK~0lk~AI8v1&aUNqf{KRt=q%@c2148$JkZ;k8)u<&;Bltx-{q zo+1<|rhrL!sc=v3j{EW`5$aH;2oEb(b&nHve`}3Dr(>n_*!zyE&etl+M>aXaH@u)= zq9k@vP;e+tcul#&^=(SGwWsEUjf!@WPJY3qp22YWl%o4;1Yh@WdMY;SS?#9k;N5S9 zZiGlQ%zH&UzQ4eu?va+lW|-XeS*2nWIaRBeM~?3vM<#=)wEK`;hODm4XZ?XC4-!gSXju-wJ9kl8(#;)r)&@+sTJ66kR;K`RV>` zH6Ah{!F`H(QF2gpSm*cZPar zHS+vzcQhr}Q4QNYLj5!mGzceIaW7 zAC{Hom^DNBZq~-KV%_vrZL6p-Q}NyP;YBySGEf|SwsE?l@=0Om%O6{l^pg9RVl@Y* zHT}ag{O8s6wJu(EeCRTCYho!iQZV3=ypk}NI_Fa+)-E*bGY45@yvM9tSGV-sFkGor z+H~XC{Y1#<1J16Gt=;hYsx6W4Z|1@xcOKRv;@*%jJDn_<9Chx0N z!2%-NDq}2~s|JHkXoG2Ty)Q7fB*I})3nU0ifZL3TxVM<>`Z#G3Xi0@c$= z>E|+&9-W-cwuKLX>U7>R(;=}uyLIknMKaDXfn*F!P$*JFTx&j!S>h{j6kfiE{R6|d zTq!9+eD=~MVo+JYp5pGa!)=_?h>nKy%375X zdcuVMDiF#KZTOl_& zL!vkjNU-g&<2-A|N=h_icBOiUm%L@Xc%WWm%{A_(LSQt^*P$XD%nT*^I>Sp_=qbJI zCkhFc{E?}44bHh$cd&xMhcd~F5pGt2JS7mou;pf zp8wEUqDWfZUa0VO)QOXf-z{%sgeH%N;uL9RRTaRMW5SJ(GC-aS6uVNc^JaM7jm#}u zGcIDj$`r8f3cQzUVG9c}KpN}kejRyoA(h}%W3p1)zgb_`Gd>yZ(U4AU!huscG_y#)Fc6xX< z4RI54TFd!dcx%`@IF1AcYC)!p4~V{d;9=gvCf*osp_6XY_&-A6Y)Sah%BtB3vdvE`3CcU!Cos&4lpZs!zuYK#^?)qioCp0_%KX3K4x~|hMd=OtvfL^ zz##TBuhARF2$odR>`C5D86y^&C0h6&2Kpf#=}oltAgsAXK7Xk>!p_#V(_jlauMS=# zMp71gDGviE_<)SwMAtt{b7~dH=MLJ}PDB$NR^E>~;sYX~4kO2YvQ$QHp@Yn_!G;b7 z!)YE1$EoxF7`T97lAJm5-PhyVcuz&sk8-vj5iEwGclyj{F(l}?i+4qSfYL|KAU8$x znq{Mt0PyvweR8n0Yx}8K(!SlQdj<4bG^Aqs!p`xj-Rzxf#w)4RXUZN=sCKssEWf>8 zqV{mO@8ihDuu)~9=cMg?;WK33VnPCu#?yn_FDAAbvno|-nKUpSSZwVt$?^2?7-nb$ z-p~xRX3@A8i&R2!UcG(%jC;sTWUUp)@iJfjfEsio$% zc*Nl7v&_<3V50-Ycld~0c#fE5Yv8|Q%isRil6@2ieWIH~0Y|~(rz=8 z3=tTT$MCXyqlJz6IL<>BOawaiCRvuXC*ZE@Qaz8kKUqn|LdK!8>WAE89zK@s_i}$| z^9>|o2S1y)@RAMr{wR;{+Q7z6Y=BO5zja9V^ zi$BRCPZy7Avbv>U^hHE$m(kJ7dr^+0cBO@hi%NaX4|pGqeiW8v_M6@no?jxjiqL@z{ zgueBV$Npp5;mdqI_6jeJMd@~2P`eU#{barUA`ht8W5I0`Z`!_$+v zxi;Y@BiJnx8%uBLT&z!d<^7!NrCR^PQW^ax5J8RS0Q_E<;xI28)qGHx;hlhKO~OpR zoO0@{_37y^h;y$GearXkq#X6E+PCLLr=>UFRXmCwca3%2Tf@8IwjFiqo)Y4|2iw?KeZCEJi3EZe#%M#gu64U3w-CnK2awt=fT2TI|;6 ztixFu-U0<~cz&!_KzNS=)mv^d&Q&jEBtO~1L|IQ?;b`0uHmk=PE(wFPt%&G%RZ&;q zRv|{PV1}*dxVzi%1b4Unp#a2#C2~Pj=4;m8#qG?hOWOK9^umQzor5|ZGp>x!lt0&I zh|kh(#drQC?9}@}t{zAeK0@m|*^33n2jTLghBFK|iJ|?u0aZ1~o&yhp|7~c0&l&K3ObARL#?-IZ?Q^%xRJY40iA(KklBU<1)|`>VJ5$RB z%}4GFGLo2>E;Su?_qNA{3nTBAgNC2i4r&?;M<=p{6nEO_&Uk@h%Ssy@cMFdrnOC*ey-9CG_Afxq2(A4l zH1mR%2D;DRUTKi+Ig|TSO#fkT7VAevdH1)3A<;VrX9jfXjUHSc+Rt=w90y%3u68Mj z@+QY_ep8n~+k?I?>1yiglnI6lE5=3Y?t;EJCm7b1vVg@f$NT@dOdlR6sHdwemn)b) zaB&-|g=?l5k9l@DFb!&7{^NUpO;BDjzYWRTFIVu5*smX9J+o#-PEv>WvmV*{j)j zn|5E)eshH>_qK9^VApB-gr0)1lGRG}8!FBPSHmRF=I>3K;f6dN>p|N%e|XMTwQ2vx zW5@3$G3c1-sHd$J;F#Tn0jA5Tchqg9cN_>f@YbEC6(f%Ms6FMT85B9&8m=$d<}z-K z5f1*yje4*r_n1VBa>h(2$z}Ohuy<%W;54$3Y(<1LmV&w<4LZfI8(BdAS4+UZu`|6s zeF70;0=xR=eV>qK8n3#*h6rl=ad!e=;U#8CDXDzT+*}<)kbn*RYtq1A)V0fMdZqD0 zWqf+FaFOSlgtw$-1QzArv_vX|z~hGXds3LH42N&k8;X+k-_%;25IwLZRi4mf6E`F;7LLr^AGVd_Cz(Y=1Uk?%>H`%Z4;5Xl zI`ac?Jf?*O{!b9om=76l-EK4S1`BO`lya&$cZc!({+~Z$;jej^ATkm6A-Qv=a?v%) z6}%&{xr1-kVaS~(qpeh!bPMw=+3I=X+w`MUEyR&`$M*&}#7aXAa5nm4WYbw++uY-9=h4ZI})i*g~Tr4OHB=hy)sdj)jGV?q9BT6ph+1L{sLX-6*?wLL|g5 zcd2n*lZdU=LK-_n-hXfXQ;6u57JPl+z_~-J_muVi70d%Ko}aiWjqR^!;42*Ux!a(n zWWYgPGl|zs&%2#1XCrF$)>b>urB7!hS(}ruugUXU4g^fKQrHNVzbU1yeOj9JgF{F- z8>X?Qf;epMSXudwr=3Nbit(CSE|BX{Z5&A+Q%{kJD zrAW4-(Xm%N#j)$UmTa5Nn#w1|&hBC4Ux`3TO`;pbmpYjAD`j)NjYsSPu566; z8JrEKrHms0V5#H(A-rJ~i~I>sM@P*wXU;U&ZwK>{vwGwRqD{bNTeODsSRODSDY&5c zwO%~;S>ZpR7bZIPJ@!_}#Gv!r*>lBe^3l2ipIL=;hf74h=-?b>S&Y+JOw#FOjaIN* z2*t(Y--B_xvi@lC9kKW3LEti5n&rAO1NG?AddD11{Td>`(vY1)$fA;+_tOad9doy@ zDvBFO{*gOik}UJj^~+!WSbmd z-d^dN38dLzxj0=HJ!*J(barENTMR%=xQdEvlIJ4Fx09zm%`~~>Ret28pFV(7yTT`G z8_MWWVb|%E3j~ElnD1B_UErwR-!4g${^O8U!bX8kg7w<@l0CYy7F++)Z5KIQ)Tprn ziRIA=u^nWu#a^Ob;Pq#pAryaOg=m*bBL={zMq~hF#)UV%diuX$*WacuCTO8wz2!s- zI$=Hb;+=H#QqX}~r9PfxHV+;f7f+|q_bGq(>-!ca&(xpSFzSa=j9W5ETJ~ghAEqsM z3~K>EX@7C_2QvRTGySdg7O9h;umWART8b%vFC)6InQH3f+dSaG$Aw(Z?_@2M`XdOJ z>zy&eK@%DXAlB~kXOI7OP?~?xgf()TCfDionjWPsL$jSP&*nN*O)@D9NDN0|cyMKq zDCB&)0lBQX-|Nt+tE@CbF#+ZP9N`1qt-nr1{!Jf1vpq-POc2g8MPt7`gk$cRMAJR# zWnaC36(x!u>3U&;h`P~e?{exW6@W^KT_Mx=fc^(RfHCMbFNFSu{Qi;e@)noY<)!<& z5Y)!?{Y9(9xjlw}Je>D3AHf1yVEw!mlHVYdx}F#gE)%7Z`E!5eZ)X3cp})TxhR|-= z12RE*e`a=@r^gctLYL7EN~_w$qj?4TWZ0$DM43-OQPJ8Bqov+@z(vYPLKX*p;`P^j z#K?W{AU@&SoR`2*m6qaN1Zs-Of07GLIFm^+k-Sg{n)91WJrdKUvugv5xI71AZ9w(+1VOaW@Xrdf&^^l93OO}`nP|v%5Q@Jt{fDxs{uP*9Uy{B+oSeAc z>F*D0R+nxMNwh8($n76v2Rgi&Ks?-%$8%r%Z>Rki%4TORR~w^TX3X)6GDmMA@{3@X z-fk`93Ok63=E{q#wNLVuVHKDpQ=!2{Q{D7?Ou$^rWmHAllh2{eAN^3d`s4zk^+S-> zuGe?(90)uv3MevA>%`sv!GvC5>jYYe1v+iMVj`{|m!F*ysW;Ra7pI~Ip!(sG{+pmT z`7k|U7v&<&(;~0aSo}(7TzSc5j^}sG(mCvCC?Hhi_KNSjpvE6j{vV|kc%1(#H`$t< z%pPB_Tyn`q_>sm?zp8DItctP`FbUy&J-2S}pvFU)d>dWW;h3t0$G!;2aWgVH;LX1B z$`o4*;NufH7L0%kl>$M!c4&Uq;^%hMFVC!jwd4R=!B}hN>*NV2PP&=7QF@DXFxjp> ziYBW4Uz@h~Z*TiiPYmIC2i$>Dc684Y)8u<+v%T4I$1c8&ooPFy33Qa3H4so6sHs3k z{Qq<(etFn`45RUOz&G2s_NT}N&@yo0ZL1Jv{p{4fqa6xHsFihtxepzgi+G}g)HA*C zyow5;%f#>#iwL_$W8De4`aSi4LJ8A#>2Tp23zu#P>tB#eaaVaP!_W6LSm1qK>w%<>wc|dsb@=&)+Oa%4)K+voVaY@fLe$Pb*O~t|O@wslk14$wCsPR$UIJZ~ zF0k-jAfpV}hd?;Qw`b{3rEs}9Z9(Hj#!^fXL&yl*EZ^@b|X*=j~H_^LUNMT)k4ghWXmW_$ zd|UHORb?g1>qF1jT8hK#pv6057dSXWf$W_Mq80xcaz}n0xcw<~mh;d9`$(q72N}wX z>5!tiOI3rY`kepbm*};HPd0|J) zVOt)I)$}Vf-?yV`Hh5^vxkLOyOlOqoC)1viswSk%*;G~cA}9o8@!eWEOP{|#rYuX(w4YhLGowbsDpIci zGTLQWUeo=ajJJo^CZrRX0nr#skUV;Ope`lmJB`u{qa5G@3Fzi@{VYby#$)SXq!x(n z0~`2kiA5Q9!5e+O6=Gy`y{s^8i@RwloNC}XzHjC~aghpA_SRDDMbI}ZCmB$TJDy=< za8zp(fMRS;ri$Y?*#{tc_+A!j3Y{W`O>1mkt@3}Hk!3)ED-9}HsAg*AP>Vc!1|u#Ejq73z2p%FK>5!qudn*7I#Gptv@mZNn9kSKzx{kY4!`|z>%;^W8wMw|K-K1pMe`gMX4(A`eL@DO!n}0OnN$qbv zblJ>h74nT# zTgmJD>%eTagbxM2*@T4*>GP4y=<_=+8Ej*Z#@OO~FXM}L#@IRd7=RT%E?OXI%Q$!B zf8?Wny0W4gVhu!-O^Enz6;>m} zZe;p2i`VT~0V=K1D^>BqF;Mt$!g8>l;_oGDBf>!pA|bxM?!nV)VjnZRq&3f+1jQ7JyS{2cIc!*ZRy&Hn1_Uz=ymaYJV0EYHN(% zcmhP2deb9|tbomO0{!!ul>bwd{qz-3j;PM2yc4H#i70biyOmI>u#odpc7LRC2Pnf0HzB`u$>P=m( zeDVc|uik*IN-x)0e*b-Zf6e;EI0An!`3|1-{@WmXQ=@0Km3evmi-O$?ot0<7M@zDZ zBCpaT{Meiz=XYjoe=Go0({P2N!m$&-cSq!@KjLld{Y)9Q_y#}hXmg4Ty`y%}tt;_f2(qGeVXb9BWT`E7CD*u>Ufd)gl6FO+@Nsl?l z2A><1+!*Y-X`DLum3?-Hr#lQuvz?1TW5)NX>8SAUgCLhKe>uN&JaYn*@A`5y?+$yY z>Po{kEty=PN+ezYszkG%n;pMQZTzB*+nF!Am*vZMZjU68-M`-f>;1@uQoyv-@++C+ z$NmRs$`zWZ=HCH!znUC>xZ-=Wm!3pxl@HSvpd7eInSM`k>OFqQ=UXESMoo1A5y6Hd z3w6(DZr#K~w1v`+*ZPcK;y!i;5Z}lJKp!UG@40aPLhmmq<@J@m<4}W7Hbtuxei^0$h6g!p@BFPfqdcMYk=u zfD+BIEw<zthFfRXlLejLSm%A{uZiA-JTwy?;dUc~` ze=i*_+O&;j8pC~><&?w&dN5Y40Nv-YC4&PU(Z7iGA7W;zIdJ2F%sVIkk68ZuYfS?{j|YlQ2KsxN{}S2A z2B6_o3aiWi==1zVO8FQKsgc-PR{vpb;urI1DWI{mQTXQ2PdxuOku+Tp#R2CSLmY_u zeRl(pDD5^)x8{HUsE-YdIf H5%m87S#Q7x literal 0 HcmV?d00001 diff --git a/consensus-types/hdiff/fuzz_test.go b/consensus-types/hdiff/fuzz_test.go new file mode 100644 index 0000000000..6ca31f5192 --- /dev/null +++ b/consensus-types/hdiff/fuzz_test.go @@ -0,0 +1,636 @@ +package hdiff + +import ( + "context" + "encoding/binary" + "strconv" + "strings" + "testing" + + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v6/testing/util" +) + +const maxFuzzValidators = 10000 +const maxFuzzStateDiffSize = 1000 +const maxFuzzHistoricalRoots = 10000 +const maxFuzzDecodedSize = maxFuzzStateDiffSize * 10 +const maxFuzzScanRange = 200 +const fuzzRootsLengthOffset = 16 +const maxFuzzInputSize = 10 +const oneEthInGwei = 1000000000 + +// FuzzNewHdiff tests parsing variations of realistic diffs +func FuzzNewHdiff(f *testing.F) { + // Add seed corpus with various valid diffs from realistic scenarios + sizes := []uint64{8, 16, 32} + for _, size := range sizes { + source, _ := util.DeterministicGenesisStateElectra(f, size) + + // Create various realistic target states + scenarios := []string{"slot_change", "balance_change", "validator_change", "multiple_changes"} + for _, scenario := range scenarios { + target := source.Copy() + + switch scenario { + case "slot_change": + _ = target.SetSlot(source.Slot() + 1) + case "balance_change": + balances := target.Balances() + if len(balances) > 0 { + balances[0] += 1000000000 + _ = target.SetBalances(balances) + } + case "validator_change": + validators := target.Validators() + if len(validators) > 0 { + validators[0].EffectiveBalance += 1000000000 + _ = target.SetValidators(validators) + } + case "multiple_changes": + _ = target.SetSlot(source.Slot() + 5) + balances := target.Balances() + validators := target.Validators() + if len(balances) > 0 && len(validators) > 0 { + balances[0] += 2000000000 + validators[0].EffectiveBalance += 1000000000 + _ = target.SetBalances(balances) + _ = target.SetValidators(validators) + } + } + + validDiff, err := Diff(source, target) + if err == nil { + f.Add(validDiff.StateDiff, validDiff.ValidatorDiffs, validDiff.BalancesDiff) + } + } + } + + f.Fuzz(func(t *testing.T, stateDiff, validatorDiffs, balancesDiff []byte) { + // Limit input sizes to reasonable bounds + if len(stateDiff) > 5000 || len(validatorDiffs) > 5000 || len(balancesDiff) > 5000 { + return + } + + // Bound historical roots length in stateDiff (if it contains snappy-compressed data) + // The historicalRootsLength is read after snappy decompression, but we can still + // limit the compressed input size to prevent extreme decompression ratios + if len(stateDiff) > maxFuzzStateDiffSize { + // Limit stateDiff to prevent potential memory bombs from snappy decompression + stateDiff = stateDiff[:maxFuzzStateDiffSize] + } + + // Bound validator count in validatorDiffs + if len(validatorDiffs) >= 8 { + count := binary.LittleEndian.Uint64(validatorDiffs[0:8]) + if count >= maxFuzzValidators { + boundedCount := count % maxFuzzValidators + binary.LittleEndian.PutUint64(validatorDiffs[0:8], boundedCount) + } + } + + // Bound balance count in balancesDiff + if len(balancesDiff) >= 8 { + count := binary.LittleEndian.Uint64(balancesDiff[0:8]) + if count >= maxFuzzValidators { + boundedCount := count % maxFuzzValidators + binary.LittleEndian.PutUint64(balancesDiff[0:8], boundedCount) + } + } + + input := HdiffBytes{ + StateDiff: stateDiff, + ValidatorDiffs: validatorDiffs, + BalancesDiff: balancesDiff, + } + + // Test parsing - should not panic even with corrupted but bounded data + _, err := newHdiff(input) + _ = err // Expected to fail with corrupted data + }) +} + +// FuzzNewStateDiff tests the newStateDiff function with valid random state diffs +func FuzzNewStateDiff(f *testing.F) { + f.Fuzz(func(t *testing.T, validatorCount uint8, slotDelta uint64, balanceData []byte, validatorData []byte) { + defer func() { + if r := recover(); r != nil { + t.Errorf("newStateDiff panicked: %v", r) + } + }() + + // Bound validator count to reasonable range + validators := uint64(validatorCount%32 + 8) // 8-39 validators + if slotDelta > 100 { + slotDelta = slotDelta % 100 + } + + // Generate random source state + source, _ := util.DeterministicGenesisStateElectra(t, validators) + target := source.Copy() + + // Apply random slot change + _ = target.SetSlot(source.Slot() + primitives.Slot(slotDelta)) + + // Apply random balance changes + if len(balanceData) >= 8 { + balances := target.Balances() + numChanges := int(binary.LittleEndian.Uint64(balanceData[:8])) % len(balances) + for i := 0; i < numChanges && i*8+8 < len(balanceData); i++ { + idx := i % len(balances) + delta := int64(binary.LittleEndian.Uint64(balanceData[i*8+8:(i+1)*8+8])) + // Keep delta reasonable + delta = delta % oneEthInGwei // Max 1 ETH change + + if delta < 0 && uint64(-delta) > balances[idx] { + balances[idx] = 0 + } else if delta < 0 { + balances[idx] -= uint64(-delta) + } else { + balances[idx] += uint64(delta) + } + } + _ = target.SetBalances(balances) + } + + // Apply random validator changes + if len(validatorData) > 0 { + validators := target.Validators() + numChanges := int(validatorData[0]) % len(validators) + for i := 0; i < numChanges && i < len(validatorData)-1; i++ { + idx := i % len(validators) + if validatorData[i+1]%2 == 0 { + validators[idx].EffectiveBalance += oneEthInGwei // 1 ETH + } + } + _ = target.SetValidators(validators) + } + + // Create diff between source and target + diff, err := Diff(source, target) + if err != nil { + return // Skip if diff creation fails + } + + // Test newStateDiff with the valid serialized diff from StateDiff field + reconstructed, err := newStateDiff(diff.StateDiff) + if err != nil { + t.Errorf("newStateDiff failed on valid diff: %v", err) + return + } + + // Basic validation that reconstruction worked + if reconstructed == nil { + t.Error("newStateDiff returned nil without error") + } + }) +} + +// FuzzNewValidatorDiffs tests validator diff deserialization with valid diffs +func FuzzNewValidatorDiffs(f *testing.F) { + f.Fuzz(func(t *testing.T, validatorCount uint8, changeData []byte) { + defer func() { + if r := recover(); r != nil { + t.Errorf("newValidatorDiffs panicked: %v", r) + } + }() + + // Bound validator count to reasonable range + validators := uint64(validatorCount%16 + 4) // 4-19 validators + + // Generate random source state + source, _ := util.DeterministicGenesisStateElectra(t, validators) + target := source.Copy() + + // Apply random validator changes based on changeData + if len(changeData) > 0 { + vals := target.Validators() + numChanges := int(changeData[0]) % len(vals) + + for i := 0; i < numChanges && i < len(changeData)-1; i++ { + idx := i % len(vals) + changeType := changeData[i+1] % 4 + + switch changeType { + case 0: // Change effective balance + vals[idx].EffectiveBalance += oneEthInGwei + case 1: // Toggle slashed status + vals[idx].Slashed = !vals[idx].Slashed + case 2: // Change activation epoch + vals[idx].ActivationEpoch++ + case 3: // Change exit epoch + vals[idx].ExitEpoch++ + } + } + _ = target.SetValidators(vals) + } + + // Create diff between source and target + diff, err := Diff(source, target) + if err != nil { + return // Skip if diff creation fails + } + + // Test newValidatorDiffs with the valid serialized diff + reconstructed, err := newValidatorDiffs(diff.ValidatorDiffs) + if err != nil { + t.Errorf("newValidatorDiffs failed on valid diff: %v", err) + return + } + + // Basic validation that reconstruction worked + if reconstructed == nil { + t.Error("newValidatorDiffs returned nil without error") + } + }) +} + +// FuzzNewBalancesDiff tests balance diff deserialization with valid diffs +func FuzzNewBalancesDiff(f *testing.F) { + f.Fuzz(func(t *testing.T, balanceCount uint8, balanceData []byte) { + defer func() { + if r := recover(); r != nil { + t.Errorf("newBalancesDiff panicked: %v", r) + } + }() + + // Bound balance count to reasonable range + numBalances := int(balanceCount%32 + 8) // 8-39 balances + + // Generate simple source state + source, _ := util.DeterministicGenesisStateElectra(t, uint64(numBalances)) + target := source.Copy() + + // Apply random balance changes based on balanceData + if len(balanceData) >= 8 { + balances := target.Balances() + numChanges := int(binary.LittleEndian.Uint64(balanceData[:8])) % numBalances + + for i := 0; i < numChanges && i*8+8 < len(balanceData); i++ { + idx := i % numBalances + delta := int64(binary.LittleEndian.Uint64(balanceData[i*8+8:(i+1)*8+8])) + // Keep delta reasonable + delta = delta % oneEthInGwei // Max 1 ETH change + + if delta < 0 && uint64(-delta) > balances[idx] { + balances[idx] = 0 + } else if delta < 0 { + balances[idx] -= uint64(-delta) + } else { + balances[idx] += uint64(delta) + } + } + _ = target.SetBalances(balances) + } + + // Create diff between source and target to get BalancesDiff + diff, err := Diff(source, target) + if err != nil { + return // Skip if diff creation fails + } + + // Test newBalancesDiff with the valid serialized diff + reconstructed, err := newBalancesDiff(diff.BalancesDiff) + if err != nil { + t.Errorf("newBalancesDiff failed on valid diff: %v", err) + return + } + + // Basic validation that reconstruction worked + if reconstructed == nil { + t.Error("newBalancesDiff returned nil without error") + } + }) +} + +// FuzzApplyDiff tests applying variations of valid diffs +func FuzzApplyDiff(f *testing.F) { + // Test with realistic state variations, not random data + ctx := context.Background() + + // Add seed corpus with various valid scenarios + sizes := []uint64{8, 16, 32, 64} + for _, size := range sizes { + source, _ := util.DeterministicGenesisStateElectra(f, size) + target := source.Copy() + + // Different types of realistic changes + scenarios := []func(){ + func() { _ = target.SetSlot(source.Slot() + 1) }, // Slot change + func() { // Balance change + balances := target.Balances() + if len(balances) > 0 { + balances[0] += 1000000000 // 1 ETH + _ = target.SetBalances(balances) + } + }, + func() { // Validator change + validators := target.Validators() + if len(validators) > 0 { + validators[0].EffectiveBalance += 1000000000 + _ = target.SetValidators(validators) + } + }, + } + + for _, scenario := range scenarios { + testTarget := source.Copy() + scenario() + + validDiff, err := Diff(source, testTarget) + if err == nil { + f.Add(validDiff.StateDiff, validDiff.ValidatorDiffs, validDiff.BalancesDiff) + } + } + } + + f.Fuzz(func(t *testing.T, stateDiff, validatorDiffs, balancesDiff []byte) { + // Only test with reasonable sized inputs + if len(stateDiff) > 10000 || len(validatorDiffs) > 10000 || len(balancesDiff) > 10000 { + return + } + + // Bound historical roots length in stateDiff (same as FuzzNewHdiff) + if len(stateDiff) > maxFuzzStateDiffSize { + stateDiff = stateDiff[:maxFuzzStateDiffSize] + } + + // Bound validator count in validatorDiffs + if len(validatorDiffs) >= 8 { + count := binary.LittleEndian.Uint64(validatorDiffs[0:8]) + if count >= maxFuzzValidators { + boundedCount := count % maxFuzzValidators + binary.LittleEndian.PutUint64(validatorDiffs[0:8], boundedCount) + } + } + + // Bound balance count in balancesDiff + if len(balancesDiff) >= 8 { + count := binary.LittleEndian.Uint64(balancesDiff[0:8]) + if count >= maxFuzzValidators { + boundedCount := count % maxFuzzValidators + binary.LittleEndian.PutUint64(balancesDiff[0:8], boundedCount) + } + } + + // Create fresh source state for each test + source, _ := util.DeterministicGenesisStateElectra(t, 8) + + diff := HdiffBytes{ + StateDiff: stateDiff, + ValidatorDiffs: validatorDiffs, + BalancesDiff: balancesDiff, + } + + // Apply diff - errors are expected for fuzzed data + _, err := ApplyDiff(ctx, source, diff) + _ = err // Expected to fail with invalid data + }) +} + +// FuzzReadPendingAttestation tests the pending attestation deserialization +func FuzzReadPendingAttestation(f *testing.F) { + // Add edge cases - this function is particularly vulnerable + f.Add([]byte{}) + f.Add([]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08}) // 8 bytes + f.Add(make([]byte, 200)) // Larger than expected + + // Add a case with large reported length + largeLength := make([]byte, 8) + binary.LittleEndian.PutUint64(largeLength, 0xFFFFFFFF) // Large bits length + f.Add(largeLength) + + f.Fuzz(func(t *testing.T, data []byte) { + defer func() { + if r := recover(); r != nil { + t.Errorf("readPendingAttestation panicked: %v", r) + } + }() + + // Make a copy since the function modifies the slice + dataCopy := make([]byte, len(data)) + copy(dataCopy, data) + + // Bound the bits length by modifying the first 8 bytes if they exist + if len(dataCopy) >= 8 { + // Read the bits length and bound it to maxFuzzValidators + bitsLength := binary.LittleEndian.Uint64(dataCopy[0:8]) + if bitsLength >= maxFuzzValidators { + boundedLength := bitsLength % maxFuzzValidators + binary.LittleEndian.PutUint64(dataCopy[0:8], boundedLength) + } + } + + _, err := readPendingAttestation(&dataCopy) + _ = err + }) +} + +// FuzzKmpIndex tests the KMP algorithm implementation +func FuzzKmpIndex(f *testing.F) { + // Test with integer pointers to match the actual usage + f.Add("1,2,3", "4,5,6") + f.Add("1,2,3", "1,2,3") + f.Add("", "1,2,3") + f.Add("1,1,1", "2,2,2") + + f.Fuzz(func(t *testing.T, sourceStr string, targetStr string) { + defer func() { + if r := recover(); r != nil { + t.Errorf("kmpIndex panicked: %v", r) + } + }() + + // Parse comma-separated strings into int slices + var source, target []int + if sourceStr != "" { + for _, s := range strings.Split(sourceStr, ",") { + if val, err := strconv.Atoi(strings.TrimSpace(s)); err == nil { + source = append(source, val) + } + } + } + if targetStr != "" { + for _, s := range strings.Split(targetStr, ",") { + if val, err := strconv.Atoi(strings.TrimSpace(s)); err == nil { + target = append(target, val) + } + } + } + + // Maintain the precondition: concatenate target with source + // This matches how kmpIndex is actually called in production + combined := make([]int, len(target)+len(source)) + copy(combined, target) + copy(combined[len(target):], source) + + // Convert to pointer slices as used in actual code + combinedPtrs := make([]*int, len(combined)) + for i := range combined { + val := combined[i] + combinedPtrs[i] = &val + } + + integerEquals := func(a, b *int) bool { + if a == nil && b == nil { + return true + } + if a == nil || b == nil { + return false + } + return *a == *b + } + + result := kmpIndex(len(source), combinedPtrs, integerEquals) + + // Basic sanity check: result should be in [0, len(source)] + if result < 0 || result > len(source) { + t.Errorf("kmpIndex returned invalid result: %d for source length=%d", result, len(source)) + } + }) +} + +// FuzzComputeLPS tests the LPS computation for KMP +func FuzzComputeLPS(f *testing.F) { + // Add seed cases + f.Add("1,2,1") + f.Add("1,1,1") + f.Add("1,2,3,4") + f.Add("") + + f.Fuzz(func(t *testing.T, patternStr string) { + defer func() { + if r := recover(); r != nil { + t.Errorf("computeLPS panicked: %v", r) + } + }() + + // Parse comma-separated string into int slice + var pattern []int + if patternStr != "" { + for _, s := range strings.Split(patternStr, ",") { + if val, err := strconv.Atoi(strings.TrimSpace(s)); err == nil { + pattern = append(pattern, val) + } + } + } + + // Convert to pointer slice + patternPtrs := make([]*int, len(pattern)) + for i := range pattern { + val := pattern[i] + patternPtrs[i] = &val + } + + integerEquals := func(a, b *int) bool { + if a == nil && b == nil { + return true + } + if a == nil || b == nil { + return false + } + return *a == *b + } + + result := computeLPS(patternPtrs, integerEquals) + + // Verify result length matches input + if len(result) != len(pattern) { + t.Errorf("computeLPS returned wrong length: got %d, expected %d", len(result), len(pattern)) + } + + // Verify all LPS values are non-negative and within bounds + for i, lps := range result { + if lps < 0 || lps > i { + t.Errorf("Invalid LPS value at index %d: %d", i, lps) + } + } + }) +} + +// FuzzDiffToBalances tests balance diff computation +func FuzzDiffToBalances(f *testing.F) { + f.Fuzz(func(t *testing.T, sourceData, targetData []byte) { + defer func() { + if r := recover(); r != nil { + t.Errorf("diffToBalances panicked: %v", r) + } + }() + + // Convert byte data to balance arrays + var sourceBalances, targetBalances []uint64 + + // Parse source balances (8 bytes per uint64) + for i := 0; i+7 < len(sourceData) && len(sourceBalances) < 100; i += 8 { + balance := binary.LittleEndian.Uint64(sourceData[i : i+8]) + sourceBalances = append(sourceBalances, balance) + } + + // Parse target balances + for i := 0; i+7 < len(targetData) && len(targetBalances) < 100; i += 8 { + balance := binary.LittleEndian.Uint64(targetData[i : i+8]) + targetBalances = append(targetBalances, balance) + } + + // Create states with the provided balances + source, _ := util.DeterministicGenesisStateElectra(t, 1) + target, _ := util.DeterministicGenesisStateElectra(t, 1) + + if len(sourceBalances) > 0 { + _ = source.SetBalances(sourceBalances) + } + if len(targetBalances) > 0 { + _ = target.SetBalances(targetBalances) + } + + result, err := diffToBalances(source, target) + + // If no error, verify result consistency + if err == nil && len(result) > 0 { + // Result length should match target length + if len(result) != len(target.Balances()) { + t.Errorf("diffToBalances result length mismatch: got %d, expected %d", + len(result), len(target.Balances())) + } + } + }) +} + +// FuzzValidatorsEqual tests validator comparison +func FuzzValidatorsEqual(f *testing.F) { + f.Fuzz(func(t *testing.T, data []byte) { + defer func() { + if r := recover(); r != nil { + t.Errorf("validatorsEqual panicked: %v", r) + } + }() + + // Create two validators and fuzz their fields + if len(data) < 16 { + return + } + + source, _ := util.DeterministicGenesisStateElectra(t, 2) + validators := source.Validators() + if len(validators) < 2 { + return + } + + val1 := validators[0] + val2 := validators[1] + + // Modify validator fields based on fuzz data + if len(data) > 0 && data[0]%2 == 0 { + val2.EffectiveBalance = val1.EffectiveBalance + uint64(data[0]) + } + if len(data) > 1 && data[1]%2 == 0 { + val2.Slashed = !val1.Slashed + } + + // Create ReadOnlyValidator wrappers if needed + // Since validatorsEqual expects ReadOnlyValidator interface, + // we'll skip this test for now as it requires state wrapper implementation + _ = val1 + _ = val2 + }) +} \ No newline at end of file diff --git a/consensus-types/hdiff/property_test.go b/consensus-types/hdiff/property_test.go new file mode 100644 index 0000000000..058c62c8b6 --- /dev/null +++ b/consensus-types/hdiff/property_test.go @@ -0,0 +1,403 @@ +package hdiff + +import ( + "encoding/binary" + "math" + "testing" + "time" + + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v6/testing/util" +) + +// maxSafeBalance ensures balances can be safely cast to int64 for diff computation +const maxSafeBalance = 1<<52 - 1 + +// PropertyTestRoundTrip verifies that diff->apply is idempotent with realistic data +func FuzzPropertyRoundTrip(f *testing.F) { + f.Fuzz(func(t *testing.T, slotDelta uint64, balanceData []byte, validatorData []byte) { + // Limit to realistic ranges + if slotDelta > 32 { // Max one epoch + slotDelta = slotDelta % 32 + } + + // Convert byte data to realistic deltas and changes + var balanceDeltas []int64 + var validatorChanges []bool + + // Parse balance deltas - limit to realistic amounts (8 bytes per int64) + for i := 0; i+7 < len(balanceData) && len(balanceDeltas) < 20; i += 8 { + delta := int64(binary.LittleEndian.Uint64(balanceData[i : i+8])) + // Keep deltas realistic (max 10 ETH change) + if delta > 10000000000 { + delta = delta % 10000000000 + } + if delta < -10000000000 { + delta = -((-delta) % 10000000000) + } + balanceDeltas = append(balanceDeltas, delta) + } + + // Parse validator changes (1 byte per bool) - limit to small number + for i := 0; i < len(validatorData) && len(validatorChanges) < 10; i++ { + validatorChanges = append(validatorChanges, validatorData[i]%2 == 0) + } + + ctx := t.Context() + + // Create source state with reasonable size + validatorCount := uint64(len(validatorChanges) + 8) // Minimum 8 validators + if validatorCount > 64 { + validatorCount = 64 // Cap at 64 for performance + } + source, _ := util.DeterministicGenesisStateElectra(t, validatorCount) + + // Create target state with modifications + target := source.Copy() + + // Apply slot change + _ = target.SetSlot(source.Slot() + primitives.Slot(slotDelta)) + + // Apply realistic balance changes + if len(balanceDeltas) > 0 { + balances := target.Balances() + for i, delta := range balanceDeltas { + if i >= len(balances) { + break + } + // Apply realistic balance changes with safe bounds + if delta < 0 { + if uint64(-delta) > balances[i] { + balances[i] = 0 // Can't go below 0 + } else { + balances[i] -= uint64(-delta) + } + } else { + // Cap at reasonable maximum (1000 ETH) + maxBalance := uint64(1000000000000) // 1000 ETH in Gwei + if balances[i]+uint64(delta) > maxBalance { + balances[i] = maxBalance + } else { + balances[i] += uint64(delta) + } + } + } + _ = target.SetBalances(balances) + } + + // Apply realistic validator changes + if len(validatorChanges) > 0 { + validators := target.Validators() + for i, shouldChange := range validatorChanges { + if i >= len(validators) { + break + } + if shouldChange { + // Make realistic changes - small effective balance adjustments + validators[i].EffectiveBalance += 1000000000 // 1 ETH + } + } + _ = target.SetValidators(validators) + } + + // Create diff + diff, err := Diff(source, target) + if err != nil { + // If diff creation fails, that's acceptable for malformed inputs + return + } + + // Apply diff + result, err := ApplyDiff(ctx, source, diff) + if err != nil { + // If diff application fails, that's acceptable + return + } + + // Verify round-trip property: source + diff = target + require.Equal(t, target.Slot(), result.Slot()) + + // Verify balance consistency + targetBalances := target.Balances() + resultBalances := result.Balances() + require.Equal(t, len(targetBalances), len(resultBalances)) + for i := range targetBalances { + require.Equal(t, targetBalances[i], resultBalances[i], "Balance mismatch at index %d", i) + } + + // Verify validator consistency + targetVals := target.Validators() + resultVals := result.Validators() + require.Equal(t, len(targetVals), len(resultVals)) + for i := range targetVals { + require.Equal(t, targetVals[i].Slashed, resultVals[i].Slashed, "Validator slashing mismatch at index %d", i) + require.Equal(t, targetVals[i].EffectiveBalance, resultVals[i].EffectiveBalance, "Validator balance mismatch at index %d", i) + } + }) +} + +// PropertyTestReasonablePerformance verifies operations complete quickly with realistic data +func FuzzPropertyResourceBounds(f *testing.F) { + f.Fuzz(func(t *testing.T, validatorCount uint8, slotDelta uint8, changeCount uint8) { + // Use realistic parameters + validators := uint64(validatorCount%64 + 8) // 8-71 validators + slots := uint64(slotDelta % 32) // 0-31 slots + changes := int(changeCount % 10) // 0-9 changes + + // Create realistic states + source, _ := util.DeterministicGenesisStateElectra(t, validators) + target := source.Copy() + + // Apply realistic changes + _ = target.SetSlot(source.Slot() + primitives.Slot(slots)) + + if changes > 0 { + validatorList := target.Validators() + for i := 0; i < changes && i < len(validatorList); i++ { + validatorList[i].EffectiveBalance += 1000000000 // 1 ETH + } + _ = target.SetValidators(validatorList) + } + + // Operations should complete quickly + start := time.Now() + diff, err := Diff(source, target) + duration := time.Since(start) + + if err == nil { + // Should be fast + require.Equal(t, true, duration < time.Second, "Diff creation too slow: %v", duration) + + // Apply should also be fast + start = time.Now() + _, err = ApplyDiff(t.Context(), source, diff) + duration = time.Since(start) + + if err == nil { + require.Equal(t, true, duration < time.Second, "Diff application too slow: %v", duration) + } + } + }) +} + +// PropertyTestDiffSize verifies that diffs are smaller than full states for typical cases +func FuzzPropertyDiffEfficiency(f *testing.F) { + f.Fuzz(func(t *testing.T, slotDelta uint64, numChanges uint8) { + if slotDelta > 100 { + slotDelta = slotDelta % 100 + } + if numChanges > 10 { + numChanges = numChanges % 10 + } + + // Create states with small differences + source, _ := util.DeterministicGenesisStateElectra(t, 64) + target := source.Copy() + + _ = target.SetSlot(source.Slot() + primitives.Slot(slotDelta)) + + // Make a few small changes + if numChanges > 0 { + validators := target.Validators() + for i := uint8(0); i < numChanges && int(i) < len(validators); i++ { + validators[i].EffectiveBalance += 1000 + } + _ = target.SetValidators(validators) + } + + // Create diff + diff, err := Diff(source, target) + if err != nil { + return + } + + // For small changes, diff should be much smaller than full state + sourceSSZ, err := source.MarshalSSZ() + if err != nil { + return + } + + diffSize := len(diff.StateDiff) + len(diff.ValidatorDiffs) + len(diff.BalancesDiff) + + // Diff should be smaller than full state for small changes + if numChanges <= 5 && slotDelta <= 10 { + require.Equal(t, true, diffSize < len(sourceSSZ)/2, + "Diff size %d should be less than half of state size %d", diffSize, len(sourceSSZ)) + } + }) +} + +// PropertyTestBalanceConservation verifies that balance operations don't create/destroy value unexpectedly +func FuzzPropertyBalanceConservation(f *testing.F) { + f.Fuzz(func(t *testing.T, balanceData []byte) { + // Convert byte data to balance changes, bounded to safe range + var balanceChanges []int64 + for i := 0; i+7 < len(balanceData) && len(balanceChanges) < 50; i += 8 { + rawChange := int64(binary.LittleEndian.Uint64(balanceData[i : i+8])) + // Bound the change to ensure resulting balances stay within safe range + change := rawChange % (maxSafeBalance / 2) // Divide by 2 to allow for addition/subtraction + balanceChanges = append(balanceChanges, change) + } + + source, _ := util.DeterministicGenesisStateElectra(t, uint64(len(balanceChanges)+10)) + originalBalances := source.Balances() + + // Ensure initial balances are within safe range for int64 casting + for i, balance := range originalBalances { + if balance > maxSafeBalance { + originalBalances[i] = balance % maxSafeBalance + } + } + _ = source.SetBalances(originalBalances) + + // Calculate total before + var totalBefore uint64 + for _, balance := range originalBalances { + totalBefore += balance + } + + // Apply balance changes via diff system + target := source.Copy() + targetBalances := target.Balances() + + var totalDelta int64 + for i, delta := range balanceChanges { + if i >= len(targetBalances) { + break + } + + // Prevent underflow + if delta < 0 && uint64(-delta) > targetBalances[i] { + totalDelta -= int64(targetBalances[i]) // Actually lost amount (negative) + targetBalances[i] = 0 + } else if delta < 0 { + targetBalances[i] -= uint64(-delta) + totalDelta += delta + } else { + // Prevent overflow + if uint64(delta) > math.MaxUint64-targetBalances[i] { + gained := math.MaxUint64 - targetBalances[i] + totalDelta += int64(gained) + targetBalances[i] = math.MaxUint64 + } else { + targetBalances[i] += uint64(delta) + totalDelta += delta + } + } + } + _ = target.SetBalances(targetBalances) + + // Apply through diff system + diff, err := Diff(source, target) + if err != nil { + return + } + + result, err := ApplyDiff(t.Context(), source, diff) + if err != nil { + return + } + + // Calculate total after + resultBalances := result.Balances() + var totalAfter uint64 + for _, balance := range resultBalances { + totalAfter += balance + } + + // Verify conservation (accounting for intended changes) + expectedTotal := totalBefore + if totalDelta >= 0 { + expectedTotal += uint64(totalDelta) + } else { + if uint64(-totalDelta) <= expectedTotal { + expectedTotal -= uint64(-totalDelta) + } else { + expectedTotal = 0 + } + } + + require.Equal(t, expectedTotal, totalAfter, + "Balance conservation violated: before=%d, delta=%d, expected=%d, actual=%d", + totalBefore, totalDelta, expectedTotal, totalAfter) + }) +} + +// PropertyTestMonotonicSlot verifies slot only increases +func FuzzPropertyMonotonicSlot(f *testing.F) { + f.Fuzz(func(t *testing.T, slotDelta uint64) { + source, _ := util.DeterministicGenesisStateElectra(t, 16) + target := source.Copy() + + targetSlot := source.Slot() + primitives.Slot(slotDelta) + _ = target.SetSlot(targetSlot) + + diff, err := Diff(source, target) + if err != nil { + return + } + + result, err := ApplyDiff(t.Context(), source, diff) + if err != nil { + return + } + + // Slot should never decrease + require.Equal(t, true, result.Slot() >= source.Slot(), + "Slot decreased from %d to %d", source.Slot(), result.Slot()) + + // Slot should match target + require.Equal(t, targetSlot, result.Slot()) + }) +} + +// PropertyTestValidatorIndexIntegrity verifies validator indices remain consistent +func FuzzPropertyValidatorIndices(f *testing.F) { + f.Fuzz(func(t *testing.T, changeData []byte) { + // Convert byte data to boolean changes + var changes []bool + for i := 0; i < len(changeData) && len(changes) < 20; i++ { + changes = append(changes, changeData[i]%2 == 0) + } + + source, _ := util.DeterministicGenesisStateElectra(t, uint64(len(changes)+5)) + target := source.Copy() + + // Apply changes + validators := target.Validators() + for i, shouldChange := range changes { + if i >= len(validators) { + break + } + if shouldChange { + validators[i].EffectiveBalance += 1000 + } + } + _ = target.SetValidators(validators) + + diff, err := Diff(source, target) + if err != nil { + return + } + + result, err := ApplyDiff(t.Context(), source, diff) + if err != nil { + return + } + + // Validator count should not decrease + require.Equal(t, true, len(result.Validators()) >= len(source.Validators()), + "Validator count decreased from %d to %d", len(source.Validators()), len(result.Validators())) + + // Public keys should be preserved for existing validators + sourceVals := source.Validators() + resultVals := result.Validators() + for i := range sourceVals { + if i < len(resultVals) { + require.DeepEqual(t, sourceVals[i].PublicKey, resultVals[i].PublicKey, + "Public key changed at validator index %d", i) + } + } + }) +} \ No newline at end of file diff --git a/consensus-types/hdiff/security_test.go b/consensus-types/hdiff/security_test.go new file mode 100644 index 0000000000..697fb25daf --- /dev/null +++ b/consensus-types/hdiff/security_test.go @@ -0,0 +1,392 @@ +package hdiff + +import ( + "fmt" + "sync" + "testing" + "time" + + "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v6/testing/util" +) + +// TestIntegerOverflowProtection tests protection against balance overflow attacks +func TestIntegerOverflowProtection(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 8) + + // Test balance overflow in diffToBalances - use realistic values + t.Run("balance_diff_overflow", func(t *testing.T) { + target := source.Copy() + balances := target.Balances() + + // Set high but realistic balance values (32 ETH in Gwei = 32e9) + balances[0] = 32000000000 // 32 ETH + balances[1] = 64000000000 // 64 ETH + _ = target.SetBalances(balances) + + // This should work fine with realistic values + diffs, err := diffToBalances(source, target) + require.NoError(t, err) + + // Verify the diffs are reasonable + require.Equal(t, true, len(diffs) > 0, "Should have balance diffs") + }) + + // Test reasonable balance changes + t.Run("realistic_balance_changes", func(t *testing.T) { + // Create realistic balance changes (slashing, rewards) + balancesDiff := []int64{1000000000, -500000000, 2000000000} // 1 ETH gain, 0.5 ETH loss, 2 ETH gain + + // Apply to state with normal balances + testSource := source.Copy() + normalBalances := []uint64{32000000000, 32000000000, 32000000000} // 32 ETH each + _ = testSource.SetBalances(normalBalances) + + // This should work fine + result, err := applyBalancesDiff(testSource, balancesDiff) + require.NoError(t, err) + + resultBalances := result.Balances() + require.Equal(t, uint64(33000000000), resultBalances[0]) // 33 ETH + require.Equal(t, uint64(31500000000), resultBalances[1]) // 31.5 ETH + require.Equal(t, uint64(34000000000), resultBalances[2]) // 34 ETH + }) +} + +// TestReasonablePerformance tests that operations complete in reasonable time +func TestReasonablePerformance(t *testing.T) { + t.Run("large_state_performance", func(t *testing.T) { + // Test with a large but realistic validator set + source, _ := util.DeterministicGenesisStateElectra(t, 1000) // 1000 validators + target := source.Copy() + + // Make realistic changes + _ = target.SetSlot(source.Slot() + 32) // One epoch + validators := target.Validators() + for i := 0; i < 100; i++ { // 10% of validators changed + validators[i].EffectiveBalance += 1000000000 // 1 ETH change + } + _ = target.SetValidators(validators) + + // Should complete quickly + start := time.Now() + diff, err := Diff(source, target) + duration := time.Since(start) + + require.NoError(t, err) + require.Equal(t, true, duration < time.Second, "Diff creation took too long: %v", duration) + require.Equal(t, true, len(diff.StateDiff) > 0, "Should have state diff") + }) + + t.Run("realistic_diff_application", func(t *testing.T) { + // Test applying diffs to large states + source, _ := util.DeterministicGenesisStateElectra(t, 500) + target := source.Copy() + _ = target.SetSlot(source.Slot() + 1) + + // Create and apply diff + diff, err := Diff(source, target) + require.NoError(t, err) + + start := time.Now() + result, err := ApplyDiff(t.Context(), source, diff) + duration := time.Since(start) + + require.NoError(t, err) + require.Equal(t, target.Slot(), result.Slot()) + require.Equal(t, true, duration < time.Second, "Diff application took too long: %v", duration) + }) +} + +// TestStateTransitionValidation tests realistic state transition scenarios +func TestStateTransitionValidation(t *testing.T) { + t.Run("validator_slashing_scenario", func(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 10) + target := source.Copy() + + // Simulate validator slashing (realistic scenario) + validators := target.Validators() + validators[0].Slashed = true + validators[0].EffectiveBalance = 0 // Slashed validator loses balance + _ = target.SetValidators(validators) + + // This should work fine + diff, err := Diff(source, target) + require.NoError(t, err) + + result, err := ApplyDiff(t.Context(), source, diff) + require.NoError(t, err) + require.Equal(t, true, result.Validators()[0].Slashed) + require.Equal(t, uint64(0), result.Validators()[0].EffectiveBalance) + }) + + t.Run("epoch_transition_scenario", func(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 64) + target := source.Copy() + + // Simulate epoch transition with multiple changes + _ = target.SetSlot(source.Slot() + 32) // One epoch + + // Some validators get rewards, others get penalties + balances := target.Balances() + for i := 0; i < len(balances); i++ { + if i%2 == 0 { + balances[i] += 100000000 // 0.1 ETH reward + } else { + if balances[i] > 50000000 { + balances[i] -= 50000000 // 0.05 ETH penalty + } + } + } + _ = target.SetBalances(balances) + + // This should work smoothly + diff, err := Diff(source, target) + require.NoError(t, err) + + result, err := ApplyDiff(t.Context(), source, diff) + require.NoError(t, err) + require.Equal(t, target.Slot(), result.Slot()) + }) + + t.Run("consistent_state_root", func(t *testing.T) { + // Test that diffs preserve state consistency + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + + // Make minimal changes + _ = target.SetSlot(source.Slot() + 1) + + // Diff and apply should be consistent + diff, err := Diff(source, target) + require.NoError(t, err) + + result, err := ApplyDiff(t.Context(), source, diff) + require.NoError(t, err) + + // Result should match target + require.Equal(t, target.Slot(), result.Slot()) + require.Equal(t, len(target.Validators()), len(result.Validators())) + require.Equal(t, len(target.Balances()), len(result.Balances())) + }) +} + +// TestSerializationRoundTrip tests serialization consistency +func TestSerializationRoundTrip(t *testing.T) { + t.Run("diff_serialization_consistency", func(t *testing.T) { + // Test that serialization and deserialization are consistent + source, _ := util.DeterministicGenesisStateElectra(t, 16) + target := source.Copy() + + // Make changes + _ = target.SetSlot(source.Slot() + 5) + validators := target.Validators() + validators[0].EffectiveBalance += 1000000000 + _ = target.SetValidators(validators) + + // Create diff + diff1, err := Diff(source, target) + require.NoError(t, err) + + // Deserialize and re-serialize + hdiff, err := newHdiff(diff1) + require.NoError(t, err) + + diff2 := hdiff.serialize() + + // Apply both diffs - should get same result + result1, err := ApplyDiff(t.Context(), source, diff1) + require.NoError(t, err) + + result2, err := ApplyDiff(t.Context(), source, diff2) + require.NoError(t, err) + + require.Equal(t, result1.Slot(), result2.Slot()) + require.Equal(t, result1.Validators()[0].EffectiveBalance, result2.Validators()[0].EffectiveBalance) + }) + + t.Run("empty_diff_handling", func(t *testing.T) { + // Test that empty diffs are handled correctly + source, _ := util.DeterministicGenesisStateElectra(t, 8) + target := source.Copy() // No changes + + // Should create minimal diff + diff, err := Diff(source, target) + require.NoError(t, err) + + // Apply should work and return equivalent state + result, err := ApplyDiff(t.Context(), source, diff) + require.NoError(t, err) + + require.Equal(t, source.Slot(), result.Slot()) + require.Equal(t, len(source.Validators()), len(result.Validators())) + }) + + t.Run("compression_efficiency", func(t *testing.T) { + // Test that compression is working effectively + source, _ := util.DeterministicGenesisStateElectra(t, 100) + target := source.Copy() + + // Make small changes + _ = target.SetSlot(source.Slot() + 1) + validators := target.Validators() + validators[0].EffectiveBalance += 1000000000 + _ = target.SetValidators(validators) + + // Create diff + diff, err := Diff(source, target) + require.NoError(t, err) + + // Get full state size + fullStateSSZ, err := target.MarshalSSZ() + require.NoError(t, err) + + // Diff should be much smaller than full state + diffSize := len(diff.StateDiff) + len(diff.ValidatorDiffs) + len(diff.BalancesDiff) + require.Equal(t, true, diffSize < len(fullStateSSZ)/2, + "Diff should be smaller than full state: diff=%d, full=%d", diffSize, len(fullStateSSZ)) + }) +} + +// TestKMPSecurity tests the KMP algorithm for security issues +func TestKMPSecurity(t *testing.T) { + t.Run("nil_pointer_handling", func(t *testing.T) { + // Test with nil pointers in the pattern/text + pattern := []*int{nil, nil, nil} + text := []*int{nil, nil, nil, nil, nil} + + equals := func(a, b *int) bool { + if a == nil && b == nil { + return true + } + if a == nil || b == nil { + return false + } + return *a == *b + } + + // Should not panic - result can be any integer + result := kmpIndex(len(pattern), text, equals) + _ = result // Any result is valid, just ensure no panic + }) + + t.Run("empty_pattern_edge_case", func(t *testing.T) { + var pattern []*int + text := []*int{new(int), new(int)} + + equals := func(a, b *int) bool { return a == b } + + result := kmpIndex(0, text, equals) + require.Equal(t, 0, result, "Empty pattern should return 0") + _ = pattern // Silence unused variable warning + }) + + t.Run("realistic_pattern_performance", func(t *testing.T) { + // Test with realistic sizes to ensure good performance + realisticSize := 100 // More realistic for validator arrays + pattern := make([]*int, realisticSize) + text := make([]*int, realisticSize*2) + + // Create realistic pattern + for i := range pattern { + val := i % 10 // More variation + pattern[i] = &val + } + for i := range text { + val := i % 10 + text[i] = &val + } + + equals := func(a, b *int) bool { + if a == nil && b == nil { + return true + } + if a == nil || b == nil { + return false + } + return *a == *b + } + + start := time.Now() + result := kmpIndex(len(pattern), text, equals) + duration := time.Since(start) + + // Should complete quickly with realistic inputs + require.Equal(t, true, duration < time.Second, + "KMP took too long: %v", duration) + _ = result // Any result is valid, just ensure performance is good + }) +} + +// TestConcurrencySafety tests thread safety of the hdiff operations +func TestConcurrencySafety(t *testing.T) { + t.Run("concurrent_diff_creation", func(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + _ = target.SetSlot(source.Slot() + 1) + + const numGoroutines = 10 + const iterations = 100 + + var wg sync.WaitGroup + errors := make(chan error, numGoroutines*iterations) + + for i := 0; i < numGoroutines; i++ { + wg.Add(1) + go func(workerID int) { + defer wg.Done() + + for j := 0; j < iterations; j++ { + _, err := Diff(source, target) + if err != nil { + errors <- fmt.Errorf("worker %d iteration %d: %v", workerID, j, err) + } + } + }(i) + } + + wg.Wait() + close(errors) + + // Check for any errors + for err := range errors { + t.Error(err) + } + }) + + t.Run("concurrent_diff_application", func(t *testing.T) { + ctx := t.Context() + source, _ := util.DeterministicGenesisStateElectra(t, 16) + target := source.Copy() + _ = target.SetSlot(source.Slot() + 5) + + diff, err := Diff(source, target) + require.NoError(t, err) + + const numGoroutines = 10 + var wg sync.WaitGroup + errors := make(chan error, numGoroutines) + + for i := 0; i < numGoroutines; i++ { + wg.Add(1) + go func(workerID int) { + defer wg.Done() + + // Each goroutine needs its own copy of the source state + localSource := source.Copy() + _, err := ApplyDiff(ctx, localSource, diff) + if err != nil { + errors <- fmt.Errorf("worker %d: %v", workerID, err) + } + }(i) + } + + wg.Wait() + close(errors) + + // Check for any errors + for err := range errors { + t.Error(err) + } + }) +} \ No newline at end of file diff --git a/consensus-types/hdiff/state_diff.go b/consensus-types/hdiff/state_diff.go new file mode 100644 index 0000000000..608b37e115 --- /dev/null +++ b/consensus-types/hdiff/state_diff.go @@ -0,0 +1,2145 @@ +package hdiff + +import ( + "bytes" + "context" + "encoding/binary" + "slices" + + "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v6/beacon-chain/core/capella" + "github.com/OffchainLabs/prysm/v6/beacon-chain/core/deneb" + "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v6/beacon-chain/core/execution" + "github.com/OffchainLabs/prysm/v6/beacon-chain/core/fulu" + "github.com/OffchainLabs/prysm/v6/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" + "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v6/consensus-types/helpers" + "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/golang/snappy" + "github.com/pkg/errors" + ssz "github.com/prysmaticlabs/fastssz" + "github.com/prysmaticlabs/go-bitfield" + "github.com/sirupsen/logrus" + "google.golang.org/protobuf/proto" +) + +// HdiffBytes represents the serialized difference between two beacon states. +type HdiffBytes struct { + StateDiff []byte + ValidatorDiffs []byte + BalancesDiff []byte +} + +// Diff computes the difference between two beacon states and returns it as a serialized HdiffBytes object. +func Diff(source, target state.ReadOnlyBeaconState) (HdiffBytes, error) { + h, err := diffInternal(source, target) + if err != nil { + return HdiffBytes{}, err + } + return h.serialize(), nil +} + +// ApplyDiff appplies the given serialized diff to the source beacon state and returns the resulting state. +func ApplyDiff(ctx context.Context, source state.BeaconState, diff HdiffBytes) (state.BeaconState, error) { + hdiff, err := newHdiff(diff) + if err != nil { + return nil, errors.Wrap(err, "failed to create Hdiff") + } + if source, err = applyStateDiff(ctx, source, hdiff.stateDiff); err != nil { + return nil, errors.Wrap(err, "failed to apply state diff") + } + if source, err = applyBalancesDiff(source, hdiff.balancesDiff); err != nil { + return nil, errors.Wrap(err, "failed to apply balances diff") + } + if source, err = applyValidatorDiff(source, hdiff.validatorDiffs); err != nil { + return nil, errors.Wrap(err, "failed to apply validator diff") + } + return source, nil +} + +// stateDiff is a type that represents a difference between two different beacon states. Except from the validator registry and the balances. +// Fields marked as "override" are either zeroed out or nil when there is no diff or the full new value when there is a diff. +// Except when zero may be a valid value, in which case override means the new value (eg. justificationBits). +// Fields marked as "append only" consist of a list of items that are appended to the existing list. +type stateDiff struct { + // genesis_time does not change. + // genesis_validators_root does not change. + targetVersion int + eth1VotesAppend bool // Positioned here because of alignement. + justificationBits byte // override. + slot primitives.Slot // override. + fork *ethpb.Fork // override. + latestBlockHeader *ethpb.BeaconBlockHeader // override. + blockRoots [fieldparams.BlockRootsLength][fieldparams.RootLength]byte // zero or override. + stateRoots [fieldparams.StateRootsLength][fieldparams.RootLength]byte // zero or override. + historicalRoots [][fieldparams.RootLength]byte // append only. + eth1Data *ethpb.Eth1Data // override. + eth1DataVotes []*ethpb.Eth1Data // append only or override. + eth1DepositIndex uint64 // override. + randaoMixes [fieldparams.RandaoMixesLength][fieldparams.RootLength]byte // zero or override. + slashings [fieldparams.SlashingsLength]int64 // algebraic diff. + previousEpochAttestations []*ethpb.PendingAttestation // override. + currentEpochAttestations []*ethpb.PendingAttestation // override. + previousJustifiedCheckpoint *ethpb.Checkpoint // override. + currentJustifiedCheckpoint *ethpb.Checkpoint // override. + finalizedCheckpoint *ethpb.Checkpoint // override. + // Altair Fields + previousEpochParticipation []byte // override. + currentEpochParticipation []byte // override. + inactivityScores []uint64 // override. + currentSyncCommittee *ethpb.SyncCommittee // override. + nextSyncCommittee *ethpb.SyncCommittee // override. + // Bellatrix + executionPayloadHeader interfaces.ExecutionData // override. + // Capella + nextWithdrawalIndex uint64 // override. + nextWithdrawalValidatorIndex primitives.ValidatorIndex // override. + historicalSummaries []*ethpb.HistoricalSummary // append only. + // Electra + depositRequestsStartIndex uint64 // override. + depositBalanceToConsume primitives.Gwei // override. + exitBalanceToConsume primitives.Gwei // override. + earliestExitEpoch primitives.Epoch // override. + consolidationBalanceToConsume primitives.Gwei // override. + earliestConsolidationEpoch primitives.Epoch // override. + + pendingDepositIndex uint64 // override. + pendingPartialWithdrawalsIndex uint64 // override. + pendingConsolidationsIndex uint64 // override. + pendingDepositDiff []*ethpb.PendingDeposit // override. + pendingPartialWithdrawalsDiff []*ethpb.PendingPartialWithdrawal // override. + pendingConsolidationsDiffs []*ethpb.PendingConsolidation // override. + // Fulu + proposerLookahead []uint64 // override +} + +type hdiff struct { + stateDiff *stateDiff + validatorDiffs []validatorDiff + balancesDiff []int64 +} + +// validatorDiff is a type that represents a difference between two validators. +type validatorDiff struct { + Slashed bool // new value (here because of alignement) + index uint32 // override. + PublicKey []byte // override. + WithdrawalCredentials []byte // override. + EffectiveBalance uint64 // override. + ActivationEligibilityEpoch primitives.Epoch // override + ActivationEpoch primitives.Epoch // override + ExitEpoch primitives.Epoch // override + WithdrawableEpoch primitives.Epoch // override +} + +var ( + errDataSmall = errors.New("data is too small") +) + +const ( + nilMarker = byte(0) + notNilMarker = byte(1) + forkLength = 2*fieldparams.VersionLength + 8 // previous_version + current_version + epoch + blockHeaderLength = 8 + 8 + 3*fieldparams.RootLength // slot + proposer_index + parent_root + state_root + body_root + blockRootsLength = fieldparams.BlockRootsLength * fieldparams.RootLength + stateRootsLength = fieldparams.StateRootsLength * fieldparams.RootLength + eth1DataLength = 8 + 2*fieldparams.RootLength // deposit_count + deposit_root + block_hash + randaoMixesLength = fieldparams.RandaoMixesLength * fieldparams.RootLength + checkpointLength = 8 + fieldparams.RootLength // epoch + root + syncCommitteeLength = (fieldparams.SyncCommitteeLength + 1) * fieldparams.BLSPubkeyLength + pendingDepositLength = fieldparams.BLSPubkeyLength + fieldparams.RootLength + 8 + fieldparams.BLSSignatureLength + 8 // pubkey + withdrawal_credentials + amount + signature + index + pendingPartialWithdrawalLength = 8 + 8 + 8 // validator_index + amount + withdrawable_epoch + pendingConsolidationLength = 8 + 8 // souce and target index + proposerLookaheadLength = 8 * 2 * fieldparams.SlotsPerEpoch +) + +// newHdiff deserializes a new Hdiff object from the given serialized data. +func newHdiff(data HdiffBytes) (*hdiff, error) { + stateDiff, err := newStateDiff(data.StateDiff) + if err != nil { + return nil, errors.Wrap(err, "failed to create state diff") + } + + validatorDiffs, err := newValidatorDiffs(data.ValidatorDiffs) + if err != nil { + return nil, errors.Wrap(err, "failed to create validator diffs") + } + + balancesDiff, err := newBalancesDiff(data.BalancesDiff) + if err != nil { + return nil, errors.Wrap(err, "failed to create balances diff") + } + + return &hdiff{ + stateDiff: stateDiff, + validatorDiffs: validatorDiffs, + balancesDiff: balancesDiff, + }, nil +} + +func (ret *stateDiff) readTargetVersion(data *[]byte) error { + if len(*data) < 8 { + return errors.Wrap(errDataSmall, "targetVersion") + } + ret.targetVersion = int(binary.LittleEndian.Uint64((*data)[:8])) // lint:ignore uintcast + *data = (*data)[8:] + return nil +} + +func (ret *stateDiff) readSlot(data *[]byte) error { + if len(*data) < 8 { + return errors.Wrap(errDataSmall, "slot") + } + ret.slot = primitives.Slot(binary.LittleEndian.Uint64((*data)[:8])) + *data = (*data)[8:] + return nil +} + +func (ret *stateDiff) readFork(data *[]byte) error { + if len(*data) < 1 { + return errors.Wrap(errDataSmall, "fork") + } + if (*data)[0] == nilMarker { + *data = (*data)[1:] + return nil + } + *data = (*data)[1:] + if len(*data) < forkLength { + return errors.Wrap(errDataSmall, "fork") + } + ret.fork = ðpb.Fork{ + PreviousVersion: slices.Clone((*data)[:fieldparams.VersionLength]), + CurrentVersion: slices.Clone((*data)[fieldparams.VersionLength : fieldparams.VersionLength*2]), + Epoch: primitives.Epoch(binary.LittleEndian.Uint64((*data)[2*fieldparams.VersionLength : 2*fieldparams.VersionLength+8])), + } + *data = (*data)[forkLength:] + return nil +} + +func (ret *stateDiff) readLatestBlockHeader(data *[]byte) error { + // Read latestBlockHeader. + if len((*data)) < 1 { + return errors.Wrap(errDataSmall, "latestBlockHeader") + } + if (*data)[0] == nilMarker { + *data = (*data)[1:] + return nil + } + *data = (*data)[1:] + if len(*data) < blockHeaderLength { + return errors.Wrap(errDataSmall, "latestBlockHeader") + } + ret.latestBlockHeader = ðpb.BeaconBlockHeader{ + Slot: primitives.Slot(binary.LittleEndian.Uint64((*data)[:8])), + ProposerIndex: primitives.ValidatorIndex(binary.LittleEndian.Uint64((*data)[8:16])), + ParentRoot: slices.Clone((*data)[16 : 16+fieldparams.RootLength]), + StateRoot: slices.Clone((*data)[16+fieldparams.RootLength : 16+2*fieldparams.RootLength]), + BodyRoot: slices.Clone((*data)[16+2*fieldparams.RootLength : 16+3*fieldparams.RootLength]), + } + *data = (*data)[blockHeaderLength:] + return nil +} + +func (ret *stateDiff) readBlockRoots(data *[]byte) error { + if len(*data) < blockRootsLength { + return errors.Wrap(errDataSmall, "blockRoots") + } + for i := range fieldparams.BlockRootsLength { + copy(ret.blockRoots[i][:], (*data)[i*fieldparams.RootLength:(i+1)*fieldparams.RootLength]) + } + *data = (*data)[blockRootsLength:] + return nil +} + +func (ret *stateDiff) readStateRoots(data *[]byte) error { + if len(*data) < stateRootsLength { + return errors.Wrap(errDataSmall, "stateRoots") + } + for i := range fieldparams.StateRootsLength { + copy(ret.stateRoots[i][:], (*data)[i*fieldparams.RootLength:(i+1)*fieldparams.RootLength]) + } + *data = (*data)[stateRootsLength:] + return nil +} + +func (ret *stateDiff) readHistoricalRoots(data *[]byte) error { + if len(*data) < 8 { + return errors.Wrap(errDataSmall, "historicalRoots") + } + historicalRootsLength := int(binary.LittleEndian.Uint64((*data)[:8])) // lint:ignore uintcast + (*data) = (*data)[8:] + if len(*data) < historicalRootsLength*fieldparams.RootLength { + return errors.Wrap(errDataSmall, "historicalRoots") + } + ret.historicalRoots = make([][fieldparams.RootLength]byte, historicalRootsLength) + for i := range historicalRootsLength { + copy(ret.historicalRoots[i][:], (*data)[i*fieldparams.RootLength:(i+1)*fieldparams.RootLength]) + } + *data = (*data)[historicalRootsLength*fieldparams.RootLength:] + return nil +} + +func (ret *stateDiff) readEth1Data(data *[]byte) error { + if len(*data) < 1 { + return errors.Wrap(errDataSmall, "eth1Data") + } + if (*data)[0] == nilMarker { + *data = (*data)[1:] + return nil + } + *data = (*data)[1:] + if len(*data) < eth1DataLength { + return errors.Wrap(errDataSmall, "eth1Data") + } + ret.eth1Data = ðpb.Eth1Data{ + DepositRoot: slices.Clone((*data)[:fieldparams.RootLength]), + DepositCount: binary.LittleEndian.Uint64((*data)[fieldparams.RootLength : fieldparams.RootLength+8]), + BlockHash: slices.Clone((*data)[fieldparams.RootLength+8 : 2*fieldparams.RootLength+8]), + } + *data = (*data)[eth1DataLength:] + return nil +} + +func (ret *stateDiff) readEth1DataVotes(data *[]byte) error { + // Read eth1DataVotes. + if len(*data) < 9 { + return errors.Wrap(errDataSmall, "eth1DataVotes") + } + ret.eth1VotesAppend = ((*data)[0] == nilMarker) + eth1DataVotesLength := int(binary.LittleEndian.Uint64((*data)[1 : 1+8])) // lint:ignore uintcast + if len(*data) < 1+8+eth1DataVotesLength*eth1DataLength { + return errors.Wrap(errDataSmall, "eth1DataVotes") + } + ret.eth1DataVotes = make([]*ethpb.Eth1Data, eth1DataVotesLength) + cursor := 9 + for i := range eth1DataVotesLength { + ret.eth1DataVotes[i] = ðpb.Eth1Data{ + DepositRoot: slices.Clone((*data)[cursor : cursor+fieldparams.RootLength]), + DepositCount: binary.LittleEndian.Uint64((*data)[cursor+fieldparams.RootLength : cursor+fieldparams.RootLength+8]), + BlockHash: slices.Clone((*data)[cursor+fieldparams.RootLength+8 : cursor+2*fieldparams.RootLength+8]), + } + cursor += eth1DataLength + } + *data = (*data)[1+8+eth1DataVotesLength*eth1DataLength:] + return nil +} + +func (ret *stateDiff) readEth1DepositIndex(data *[]byte) error { + if len(*data) < 8 { + return errors.Wrap(errDataSmall, "eth1DepositIndex") + } + ret.eth1DepositIndex = binary.LittleEndian.Uint64((*data)[:8]) + *data = (*data)[8:] + return nil +} + +func (ret *stateDiff) readRandaoMixes(data *[]byte) error { + if len(*data) < randaoMixesLength { + return errors.Wrap(errDataSmall, "randaoMixes") + } + cursor := 0 + for i := range fieldparams.RandaoMixesLength { + copy(ret.randaoMixes[i][:], (*data)[cursor:cursor+fieldparams.RootLength]) + cursor += fieldparams.RootLength + } + *data = (*data)[randaoMixesLength:] + return nil +} + +func (ret *stateDiff) readSlashings(data *[]byte) error { + if len(*data) < fieldparams.SlashingsLength*8 { + return errors.Wrap(errDataSmall, "slashings") + } + cursor := 0 + for i := range fieldparams.SlashingsLength { + ret.slashings[i] = int64(binary.LittleEndian.Uint64((*data)[cursor : cursor+8])) // lint:ignore uintcast + cursor += 8 + } + *data = (*data)[fieldparams.SlashingsLength*8:] + return nil +} + +func readPendingAttestation(data *[]byte) (*ethpb.PendingAttestation, error) { + if len(*data) < 8 { + return nil, errors.Wrap(errDataSmall, "pendingAttestation") + } + bitsLength := int(binary.LittleEndian.Uint64((*data)[:8])) // lint:ignore uintcast + if bitsLength < 0 { + return nil, errors.Wrap(errDataSmall, "pendingAttestation: negative bitsLength") + } + // Check for integer overflow: 8 + bitsLength + 144 + const fixedSize = 152 // 8 (length field) + 144 (fixed fields) + if bitsLength > len(*data)-fixedSize { + return nil, errors.Wrap(errDataSmall, "pendingAttestation") + } + pending := ðpb.PendingAttestation{} + pending.AggregationBits = bitfield.Bitlist(slices.Clone((*data)[8 : 8+bitsLength])) + *data = (*data)[8+bitsLength:] + pending.Data = ðpb.AttestationData{} + if err := pending.Data.UnmarshalSSZ((*data)[:128]); err != nil { // pending.Data is 128 bytes + return nil, errors.Wrap(err, "failed to unmarshal pendingAttestation") + } + pending.InclusionDelay = primitives.Slot(binary.LittleEndian.Uint64((*data)[128:136])) + pending.ProposerIndex = primitives.ValidatorIndex(binary.LittleEndian.Uint64((*data)[136:144])) + *data = (*data)[144:] + return pending, nil +} + +func (ret *stateDiff) readPreviousEpochAttestations(data *[]byte) error { + if len(*data) < 8 { + return errors.Wrap(errDataSmall, "previousEpochAttestations") + } + previousEpochAttestationsLength := int(binary.LittleEndian.Uint64((*data)[:8])) // lint:ignore uintcast + if previousEpochAttestationsLength < 0 { + return errors.Wrap(errDataSmall, "previousEpochAttestations: negative length") + } + ret.previousEpochAttestations = make([]*ethpb.PendingAttestation, previousEpochAttestationsLength) + (*data) = (*data)[8:] + var err error + for i := range previousEpochAttestationsLength { + ret.previousEpochAttestations[i], err = readPendingAttestation(data) + if err != nil { + return errors.Wrap(err, "failed to read previousEpochAttestation") + } + } + return nil +} + +func (ret *stateDiff) readCurrentEpochAttestations(data *[]byte) error { + if len(*data) < 8 { + return errors.Wrap(errDataSmall, "currentEpochAttestations") + } + currentEpochAttestationsLength := int(binary.LittleEndian.Uint64((*data)[:8])) // lint:ignore uintcast + if currentEpochAttestationsLength < 0 { + return errors.Wrap(errDataSmall, "currentEpochAttestations: negative length") + } + ret.currentEpochAttestations = make([]*ethpb.PendingAttestation, currentEpochAttestationsLength) + (*data) = (*data)[8:] + var err error + for i := range currentEpochAttestationsLength { + ret.currentEpochAttestations[i], err = readPendingAttestation(data) + if err != nil { + return errors.Wrap(err, "failed to read currentEpochAttestation") + } + } + return nil +} + +func (ret *stateDiff) readPreviousEpochParticipation(data *[]byte) error { + if len(*data) < 8 { + return errors.Wrap(errDataSmall, "previousEpochParticipation") + } + previousEpochParticipationLength := int(binary.LittleEndian.Uint64((*data)[:8])) // lint:ignore uintcast + if previousEpochParticipationLength < 0 { + return errors.Wrap(errDataSmall, "previousEpochParticipation: negative length") + } + if len(*data)-8 < previousEpochParticipationLength { + return errors.Wrap(errDataSmall, "previousEpochParticipation") + } + ret.previousEpochParticipation = make([]byte, previousEpochParticipationLength) + copy(ret.previousEpochParticipation, (*data)[8:8+previousEpochParticipationLength]) + *data = (*data)[8+previousEpochParticipationLength:] + return nil +} + +func (ret *stateDiff) readCurrentEpochParticipation(data *[]byte) error { + if len(*data) < 8 { + return errors.Wrap(errDataSmall, "currentEpochParticipation") + } + currentEpochParticipationLength := int(binary.LittleEndian.Uint64((*data)[:8])) // lint:ignore uintcast + if currentEpochParticipationLength < 0 { + return errors.Wrap(errDataSmall, "currentEpochParticipation: negative length") + } + if len(*data)-8 < currentEpochParticipationLength { + return errors.Wrap(errDataSmall, "currentEpochParticipation") + } + ret.currentEpochParticipation = make([]byte, currentEpochParticipationLength) + copy(ret.currentEpochParticipation, (*data)[8:8+currentEpochParticipationLength]) + *data = (*data)[8+currentEpochParticipationLength:] + return nil +} + +func (ret *stateDiff) readJustificationBits(data *[]byte) error { + if len(*data) < 1 { + return errors.Wrap(errDataSmall, "justificationBits") + } + ret.justificationBits = (*data)[0] + *data = (*data)[1:] + return nil +} + +func (ret *stateDiff) readPreviousJustifiedCheckpoint(data *[]byte) error { + if len(*data) < checkpointLength { + return errors.Wrap(errDataSmall, "previousJustifiedCheckpoint") + } + ret.previousJustifiedCheckpoint = ðpb.Checkpoint{ + Epoch: primitives.Epoch(binary.LittleEndian.Uint64((*data)[:8])), + Root: slices.Clone((*data)[8 : 8+fieldparams.RootLength]), + } + *data = (*data)[checkpointLength:] + return nil +} + +func (ret *stateDiff) readCurrentJustifiedCheckpoint(data *[]byte) error { + if len(*data) < checkpointLength { + return errors.Wrap(errDataSmall, "currentJustifiedCheckpoint") + } + ret.currentJustifiedCheckpoint = ðpb.Checkpoint{ + Epoch: primitives.Epoch(binary.LittleEndian.Uint64((*data)[:8])), + Root: slices.Clone((*data)[8 : 8+fieldparams.RootLength]), + } + *data = (*data)[checkpointLength:] + return nil +} + +func (ret *stateDiff) readFinalizedCheckpoint(data *[]byte) error { + if len(*data) < checkpointLength { + return errors.Wrap(errDataSmall, "finalizedCheckpoint") + } + ret.finalizedCheckpoint = ðpb.Checkpoint{ + Epoch: primitives.Epoch(binary.LittleEndian.Uint64((*data)[:8])), + Root: slices.Clone((*data)[8 : 8+fieldparams.RootLength]), + } + *data = (*data)[checkpointLength:] + return nil +} + +func (ret *stateDiff) readInactivityScores(data *[]byte) error { + if len(*data) < 8 { + return errors.Wrap(errDataSmall, "inactivityScores") + } + inactivityScoresLength := int(binary.LittleEndian.Uint64((*data)[:8])) // lint:ignore uintcast + if inactivityScoresLength < 0 { + return errors.Wrap(errDataSmall, "inactivityScores: negative length") + } + if len(*data)-8 < inactivityScoresLength*8 { + return errors.Wrap(errDataSmall, "inactivityScores") + } + ret.inactivityScores = make([]uint64, inactivityScoresLength) + cursor := 8 + for i := range inactivityScoresLength { + ret.inactivityScores[i] = binary.LittleEndian.Uint64((*data)[cursor : cursor+8]) + cursor += 8 + } + *data = (*data)[cursor:] + return nil +} + +func (ret *stateDiff) readCurrentSyncCommittee(data *[]byte) error { + if len(*data) < 1 { + return errors.Wrap(errDataSmall, "currentSyncCommittee") + } + if (*data)[0] == nilMarker { + *data = (*data)[1:] + return nil + } + *data = (*data)[1:] + if len(*data) < syncCommitteeLength { + return errors.Wrap(errDataSmall, "currentSyncCommittee") + } + ret.currentSyncCommittee = ðpb.SyncCommittee{} + if err := ret.currentSyncCommittee.UnmarshalSSZ((*data)[:syncCommitteeLength]); err != nil { + return errors.Wrap(err, "failed to unmarshal currentSyncCommittee") + } + *data = (*data)[syncCommitteeLength:] + return nil +} + +func (ret *stateDiff) readNextSyncCommittee(data *[]byte) error { + if len(*data) < 1 { + return errors.Wrap(errDataSmall, "nextSyncCommittee") + } + if (*data)[0] == nilMarker { + *data = (*data)[1:] + return nil + } + *data = (*data)[1:] + if len(*data) < syncCommitteeLength { + return errors.Wrap(errDataSmall, "nextSyncCommittee") + } + ret.nextSyncCommittee = ðpb.SyncCommittee{} + if err := ret.nextSyncCommittee.UnmarshalSSZ((*data)[:syncCommitteeLength]); err != nil { + return errors.Wrap(err, "failed to unmarshal nextSyncCommittee") + } + *data = (*data)[syncCommitteeLength:] + return nil +} + +func (ret *stateDiff) readExecutionPayloadHeader(data *[]byte) error { + if len(*data) < 1 { + return errors.Wrap(errDataSmall, "executionPayloadHeader") + } + if (*data)[0] == nilMarker { + *data = (*data)[1:] + return nil + } + if len(*data) < 9 { + return errors.Wrap(errDataSmall, "executionPayloadHeader") + } + headerLength := int(binary.LittleEndian.Uint64((*data)[1:9])) // lint:ignore uintcast + if headerLength < 0 { + return errors.Wrap(errDataSmall, "executionPayloadHeader: negative length") + } + *data = (*data)[9:] + type sszSizeUnmarshaler interface { + ssz.Unmarshaler + ssz.Marshaler + proto.Message + } + var header sszSizeUnmarshaler + switch ret.targetVersion { + case version.Bellatrix: + header = &enginev1.ExecutionPayloadHeader{} + case version.Capella: + header = &enginev1.ExecutionPayloadHeaderCapella{} + case version.Deneb, version.Electra, version.Fulu: + header = &enginev1.ExecutionPayloadHeaderDeneb{} + default: + return errors.Errorf("unknown target version %d", ret.targetVersion) + } + if len(*data) < headerLength { + return errors.Wrap(errDataSmall, "executionPayloadHeader") + } + if err := header.UnmarshalSSZ((*data)[:headerLength]); err != nil { + return errors.Wrap(err, "failed to unmarshal executionPayloadHeader") + } + var err error + ret.executionPayloadHeader, err = blocks.NewWrappedExecutionData(header) + if err != nil { + return err + } + *data = (*data)[headerLength:] + return nil +} + +func (ret *stateDiff) readWithdrawalIndices(data *[]byte) error { + if len(*data) < 16 { + return errors.Wrap(errDataSmall, "withdrawalIndices") + } + ret.nextWithdrawalIndex = binary.LittleEndian.Uint64((*data)[:8]) + ret.nextWithdrawalValidatorIndex = primitives.ValidatorIndex(binary.LittleEndian.Uint64((*data)[8:16])) + *data = (*data)[16:] + return nil +} + +func (ret *stateDiff) readHistoricalSummaries(data *[]byte) error { + if len(*data) < 8 { + return errors.Wrap(errDataSmall, "historicalSummaries") + } + historicalSummariesLength := int(binary.LittleEndian.Uint64((*data)[:8])) // lint:ignore uintcast + if historicalSummariesLength < 0 { + return errors.Wrap(errDataSmall, "historicalSummaries: negative length") + } + if len(*data) < 8+historicalSummariesLength*fieldparams.RootLength*2 { + return errors.Wrap(errDataSmall, "historicalSummaries") + } + ret.historicalSummaries = make([]*ethpb.HistoricalSummary, historicalSummariesLength) + cursor := 8 + for i := range historicalSummariesLength { + ret.historicalSummaries[i] = ðpb.HistoricalSummary{ + BlockSummaryRoot: slices.Clone((*data)[cursor : cursor+fieldparams.RootLength]), + StateSummaryRoot: slices.Clone((*data)[cursor+fieldparams.RootLength : cursor+2*fieldparams.RootLength]), + } + cursor += 2 * fieldparams.RootLength + } + *data = (*data)[cursor:] + return nil +} + +func (ret *stateDiff) readElectraPendingIndices(data *[]byte) error { + if len(*data) < 8*6 { + return errors.Wrap(errDataSmall, "electraPendingIndices") + } + ret.depositRequestsStartIndex = binary.LittleEndian.Uint64((*data)[:8]) + ret.depositBalanceToConsume = primitives.Gwei(binary.LittleEndian.Uint64((*data)[8:16])) + ret.exitBalanceToConsume = primitives.Gwei(binary.LittleEndian.Uint64((*data)[16:24])) + ret.earliestExitEpoch = primitives.Epoch(binary.LittleEndian.Uint64((*data)[24:32])) + ret.consolidationBalanceToConsume = primitives.Gwei(binary.LittleEndian.Uint64((*data)[32:40])) + ret.earliestConsolidationEpoch = primitives.Epoch(binary.LittleEndian.Uint64((*data)[40:48])) + *data = (*data)[48:] + return nil +} + +func (ret *stateDiff) readPendingDeposits(data *[]byte) error { + if len(*data) < 16 { + return errors.Wrap(errDataSmall, "pendingDeposits") + } + ret.pendingDepositIndex = binary.LittleEndian.Uint64((*data)[:8]) + pendingDepositDiffLength := int(binary.LittleEndian.Uint64((*data)[8:16])) // lint:ignore uintcast + if pendingDepositDiffLength < 0 { + return errors.Wrap(errDataSmall, "pendingDeposits: negative length") + } + if len(*data) < 16+pendingDepositDiffLength*pendingDepositLength { + return errors.Wrap(errDataSmall, "pendingDepositDiff") + } + ret.pendingDepositDiff = make([]*ethpb.PendingDeposit, pendingDepositDiffLength) + cursor := 16 + for i := range pendingDepositDiffLength { + ret.pendingDepositDiff[i] = ðpb.PendingDeposit{ + PublicKey: slices.Clone((*data)[cursor : cursor+fieldparams.BLSPubkeyLength]), + WithdrawalCredentials: slices.Clone((*data)[cursor+fieldparams.BLSPubkeyLength : cursor+fieldparams.BLSPubkeyLength+fieldparams.RootLength]), + Amount: binary.LittleEndian.Uint64((*data)[cursor+fieldparams.BLSPubkeyLength+fieldparams.RootLength : cursor+fieldparams.BLSPubkeyLength+fieldparams.RootLength+8]), + Signature: slices.Clone((*data)[cursor+fieldparams.BLSPubkeyLength+fieldparams.RootLength+8 : cursor+fieldparams.BLSPubkeyLength+fieldparams.RootLength+8+fieldparams.BLSSignatureLength]), + Slot: primitives.Slot(binary.LittleEndian.Uint64((*data)[cursor+fieldparams.BLSPubkeyLength+fieldparams.RootLength+8+fieldparams.BLSSignatureLength : cursor+fieldparams.BLSPubkeyLength+fieldparams.RootLength+8+fieldparams.BLSSignatureLength+8])), + } + cursor += pendingDepositLength + } + *data = (*data)[cursor:] + return nil +} + +func (ret *stateDiff) readPendingPartialWithdrawals(data *[]byte) error { + if len(*data) < 16 { + return errors.Wrap(errDataSmall, "pendingPartialWithdrawals") + } + ret.pendingPartialWithdrawalsIndex = binary.LittleEndian.Uint64((*data)[:8]) + pendingPartialWithdrawalsDiffLength := int(binary.LittleEndian.Uint64((*data)[8:16])) // lint:ignore uintcast + if pendingPartialWithdrawalsDiffLength < 0 { + return errors.Wrap(errDataSmall, "pendingPartialWithdrawals: negative length") + } + if len(*data) < 16+pendingPartialWithdrawalsDiffLength*pendingPartialWithdrawalLength { + return errors.Wrap(errDataSmall, "pendingPartialWithdrawalsDiff") + } + ret.pendingPartialWithdrawalsDiff = make([]*ethpb.PendingPartialWithdrawal, pendingPartialWithdrawalsDiffLength) + cursor := 16 + for i := range pendingPartialWithdrawalsDiffLength { + ret.pendingPartialWithdrawalsDiff[i] = ðpb.PendingPartialWithdrawal{ + Index: primitives.ValidatorIndex(binary.LittleEndian.Uint64((*data)[cursor : cursor+8])), + Amount: binary.LittleEndian.Uint64((*data)[cursor+8 : cursor+16]), + WithdrawableEpoch: primitives.Epoch(binary.LittleEndian.Uint64((*data)[cursor+16 : cursor+24])), + } + cursor += pendingPartialWithdrawalLength + } + *data = (*data)[cursor:] + return nil +} + +func (ret *stateDiff) readPendingConsolidations(data *[]byte) error { + if len(*data) < 16 { + return errors.Wrap(errDataSmall, "pendingConsolidations") + } + ret.pendingConsolidationsIndex = binary.LittleEndian.Uint64((*data)[:8]) + pendingConsolidationsDiffsLength := int(binary.LittleEndian.Uint64((*data)[8:16])) // lint:ignore uintcast + if pendingConsolidationsDiffsLength < 0 { + return errors.Wrap(errDataSmall, "pendingConsolidations: negative length") + } + if len(*data) < 16+pendingConsolidationsDiffsLength*pendingConsolidationLength { + return errors.Wrap(errDataSmall, "pendingConsolidationsDiffs") + } + ret.pendingConsolidationsDiffs = make([]*ethpb.PendingConsolidation, pendingConsolidationsDiffsLength) + cursor := 16 + for i := range pendingConsolidationsDiffsLength { + ret.pendingConsolidationsDiffs[i] = ðpb.PendingConsolidation{ + SourceIndex: primitives.ValidatorIndex(binary.LittleEndian.Uint64((*data)[cursor : cursor+8])), + TargetIndex: primitives.ValidatorIndex(binary.LittleEndian.Uint64((*data)[cursor+8 : cursor+16])), + } + cursor += pendingConsolidationLength + } + *data = (*data)[cursor:] + return nil +} + +func (ret *stateDiff) readProposerLookahead(data *[]byte) error { + if len(*data) < proposerLookaheadLength { + return errors.Wrap(errDataSmall, "proposerLookahead data") + } + // Read the proposer lookahead (2 * SlotsPerEpoch uint64 values) + numProposers := 2 * fieldparams.SlotsPerEpoch + ret.proposerLookahead = make([]uint64, numProposers) + for i := 0; i < numProposers; i++ { + ret.proposerLookahead[i] = binary.LittleEndian.Uint64((*data)[i*8 : (i+1)*8]) + } + *data = (*data)[proposerLookaheadLength:] + return nil +} + +// newStateDiff deserializes a new stateDiff object from the given data. +func newStateDiff(input []byte) (*stateDiff, error) { + data, err := snappy.Decode(nil, input) + if err != nil { + return nil, errors.Wrap(err, "failed to decode snappy") + } + ret := &stateDiff{} + if err := ret.readTargetVersion(&data); err != nil { + return nil, err + } + if err := ret.readSlot(&data); err != nil { + return nil, err + } + if err := ret.readFork(&data); err != nil { + return nil, err + } + if err := ret.readLatestBlockHeader(&data); err != nil { + return nil, err + } + if err := ret.readBlockRoots(&data); err != nil { + return nil, err + } + if err := ret.readStateRoots(&data); err != nil { + return nil, err + } + if err := ret.readHistoricalRoots(&data); err != nil { + return nil, err + } + if err := ret.readEth1Data(&data); err != nil { + return nil, err + } + if err := ret.readEth1DataVotes(&data); err != nil { + return nil, err + } + if err := ret.readEth1DepositIndex(&data); err != nil { + return nil, err + } + if err := ret.readRandaoMixes(&data); err != nil { + return nil, err + } + if err := ret.readSlashings(&data); err != nil { + return nil, err + } + if ret.targetVersion == version.Phase0 { + if err := ret.readPreviousEpochAttestations(&data); err != nil { + return nil, err + } + if err := ret.readCurrentEpochAttestations(&data); err != nil { + return nil, err + } + } else { + if err := ret.readPreviousEpochParticipation(&data); err != nil { + return nil, err + } + if err := ret.readCurrentEpochParticipation(&data); err != nil { + return nil, err + } + } + if err := ret.readJustificationBits(&data); err != nil { + return nil, err + } + if err := ret.readPreviousJustifiedCheckpoint(&data); err != nil { + return nil, err + } + if err := ret.readCurrentJustifiedCheckpoint(&data); err != nil { + return nil, err + } + if err := ret.readFinalizedCheckpoint(&data); err != nil { + return nil, err + } + if err := ret.readInactivityScores(&data); err != nil { + return nil, err + } + if err := ret.readCurrentSyncCommittee(&data); err != nil { + return nil, err + } + if err := ret.readNextSyncCommittee(&data); err != nil { + return nil, err + } + if err := ret.readExecutionPayloadHeader(&data); err != nil { + return nil, err + } + if err := ret.readWithdrawalIndices(&data); err != nil { + return nil, err + } + if err := ret.readHistoricalSummaries(&data); err != nil { + return nil, err + } + if err := ret.readElectraPendingIndices(&data); err != nil { + return nil, err + } + if err := ret.readPendingDeposits(&data); err != nil { + return nil, err + } + if err := ret.readPendingPartialWithdrawals(&data); err != nil { + return nil, err + } + if err := ret.readPendingConsolidations(&data); err != nil { + return nil, err + } + if ret.targetVersion >= version.Fulu { + // Proposer lookahead has fixed size and it is not added for forks previous to Fulu. + if err := ret.readProposerLookahead(&data); err != nil { + return nil, err + } + } + if len(data) > 0 { + return nil, errors.Errorf("data is too large, exceeded by %d bytes", len(data)) + } + return ret, nil +} + +// newValidatorDiffs deserializes a new validator diffs from the given data. +func newValidatorDiffs(input []byte) ([]validatorDiff, error) { + data, err := snappy.Decode(nil, input) + if err != nil { + return nil, errors.Wrap(err, "failed to decode snappy") + } + cursor := 0 + if len(data[cursor:]) < 8 { + return nil, errors.Wrap(errDataSmall, "validatorDiffs") + } + validatorDiffsLength := binary.LittleEndian.Uint64(data[cursor : cursor+8]) + cursor += 8 + validatorDiffs := make([]validatorDiff, validatorDiffsLength) + for i := range validatorDiffsLength { + if len(data[cursor:]) < 4 { + return nil, errors.Wrap(errDataSmall, "validatorDiffs: index") + } + validatorDiffs[i].index = binary.LittleEndian.Uint32(data[cursor : cursor+4]) + cursor += 4 + if len(data[cursor:]) < 1 { + return nil, errors.Wrap(errDataSmall, "validatorDiffs: PublicKey") + } + cursor++ + if data[cursor-1] != nilMarker { + if len(data[cursor:]) < fieldparams.BLSPubkeyLength { + return nil, errors.Wrap(errDataSmall, "validatorDiffs: PublicKey") + } + validatorDiffs[i].PublicKey = data[cursor : cursor+fieldparams.BLSPubkeyLength] + cursor += fieldparams.BLSPubkeyLength + } + if len(data[cursor:]) < 1 { + return nil, errors.Wrap(errDataSmall, "validatorDiffs: WithdrawalCredentials") + } + cursor++ + if data[cursor-1] != nilMarker { + if len(data[cursor:]) < fieldparams.RootLength { + return nil, errors.Wrap(errDataSmall, "validatorDiffs: WithdrawalCredentials") + } + validatorDiffs[i].WithdrawalCredentials = data[cursor : cursor+fieldparams.RootLength] + cursor += fieldparams.RootLength + } + if len(data[cursor:]) < 8 { + return nil, errors.Wrap(errDataSmall, "validatorDiffs: EffectiveBalance") + } + validatorDiffs[i].EffectiveBalance = binary.LittleEndian.Uint64(data[cursor : cursor+8]) + cursor += 8 + if len(data[cursor:]) < 1 { + return nil, errors.Wrap(errDataSmall, "validatorDiffs: Slashed") + } + validatorDiffs[i].Slashed = data[cursor] != nilMarker + cursor++ + if len(data[cursor:]) < 8 { + return nil, errors.Wrap(errDataSmall, "validatorDiffs: ActivationEligibilityEpoch") + } + validatorDiffs[i].ActivationEligibilityEpoch = primitives.Epoch(binary.LittleEndian.Uint64(data[cursor : cursor+8])) + cursor += 8 + if len(data[cursor:]) < 8 { + return nil, errors.Wrap(errDataSmall, "validatorDiffs: ActivationEpoch") + } + validatorDiffs[i].ActivationEpoch = primitives.Epoch(binary.LittleEndian.Uint64(data[cursor : cursor+8])) + cursor += 8 + if len(data[cursor:]) < 8 { + return nil, errors.Wrap(errDataSmall, "validatorDiffs: ExitEpoch") + } + validatorDiffs[i].ExitEpoch = primitives.Epoch(binary.LittleEndian.Uint64(data[cursor : cursor+8])) + cursor += 8 + if len(data[cursor:]) < 8 { + return nil, errors.Wrap(errDataSmall, "validatorDiffs: WithdrawableEpoch") + } + validatorDiffs[i].WithdrawableEpoch = primitives.Epoch(binary.LittleEndian.Uint64(data[cursor : cursor+8])) + cursor += 8 + } + if cursor != len(data) { + return nil, errors.Errorf("data is too large, expected %d bytes, got %d", len(data), cursor) + } + return validatorDiffs, nil +} + +// newBalancesDiff deserializes a new balances diff from the given data. +func newBalancesDiff(input []byte) ([]int64, error) { + data, err := snappy.Decode(nil, input) + if err != nil { + return nil, errors.Wrap(err, "failed to decode snappy") + } + if len(data) < 8 { + return nil, errors.Wrap(errDataSmall, "balancesDiff") + } + balancesLength := int(binary.LittleEndian.Uint64(data[:8])) // lint:ignore uintcast + if balancesLength < 0 { + return nil, errors.Wrap(errDataSmall, "balancesDiff: negative length") + } + if len(data) != 8+balancesLength*8 { + return nil, errors.Errorf("incorrect length of balancesDiff, expected %d, got %d", 8+balancesLength*8, len(data)) + } + balances := make([]int64, balancesLength) + for i := range balancesLength { + balances[i] = int64(binary.LittleEndian.Uint64(data[8*(i+1) : 8*(i+2)])) // lint:ignore uintcast + } + return balances, nil +} + +func (s *stateDiff) serialize() []byte { + ret := make([]byte, 0) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.targetVersion)) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.slot)) + if s.fork == nil { + ret = append(ret, nilMarker) + } else { + ret = append(ret, notNilMarker) + ret = append(ret, s.fork.PreviousVersion...) + ret = append(ret, s.fork.CurrentVersion...) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.fork.Epoch)) + } + + if s.latestBlockHeader == nil { + ret = append(ret, nilMarker) + } else { + ret = append(ret, notNilMarker) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.latestBlockHeader.Slot)) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.latestBlockHeader.ProposerIndex)) + ret = append(ret, s.latestBlockHeader.ParentRoot...) + ret = append(ret, s.latestBlockHeader.StateRoot...) + ret = append(ret, s.latestBlockHeader.BodyRoot...) + } + + for _, r := range s.blockRoots { + ret = append(ret, r[:]...) + } + + for _, r := range s.stateRoots { + ret = append(ret, r[:]...) + } + + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(s.historicalRoots))) + for _, r := range s.historicalRoots { + ret = append(ret, r[:]...) + } + + if s.eth1Data == nil { + ret = append(ret, nilMarker) + } else { + ret = append(ret, notNilMarker) + ret = append(ret, s.eth1Data.DepositRoot...) + ret = binary.LittleEndian.AppendUint64(ret, s.eth1Data.DepositCount) + ret = append(ret, s.eth1Data.BlockHash...) + } + + if s.eth1VotesAppend { + ret = append(ret, nilMarker) + } else { + ret = append(ret, notNilMarker) + } + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(s.eth1DataVotes))) + for _, v := range s.eth1DataVotes { + ret = append(ret, v.DepositRoot...) + ret = binary.LittleEndian.AppendUint64(ret, v.DepositCount) + ret = append(ret, v.BlockHash...) + } + ret = binary.LittleEndian.AppendUint64(ret, s.eth1DepositIndex) + + for _, r := range s.randaoMixes { + ret = append(ret, r[:]...) + } + + for _, s := range s.slashings { + ret = binary.LittleEndian.AppendUint64(ret, uint64(s)) + } + + if s.targetVersion == version.Phase0 { + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(s.previousEpochAttestations))) + for _, a := range s.previousEpochAttestations { + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(a.AggregationBits))) + ret = append(ret, a.AggregationBits...) + var err error + ret, err = a.Data.MarshalSSZTo(ret) + if err != nil { + // this is impossible to happen. + logrus.WithError(err).Error("Failed to marshal previousEpochAttestation") + return nil + } + ret = binary.LittleEndian.AppendUint64(ret, uint64(a.InclusionDelay)) + ret = binary.LittleEndian.AppendUint64(ret, uint64(a.ProposerIndex)) + } + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(s.currentEpochAttestations))) + for _, a := range s.currentEpochAttestations { + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(a.AggregationBits))) + ret = append(ret, a.AggregationBits...) + var err error + ret, err = a.Data.MarshalSSZTo(ret) + if err != nil { + // this is impossible to happen. + logrus.WithError(err).Error("Failed to marshal currentEpochAttestation") + return nil + } + ret = binary.LittleEndian.AppendUint64(ret, uint64(a.InclusionDelay)) + ret = binary.LittleEndian.AppendUint64(ret, uint64(a.ProposerIndex)) + } + } else { + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(s.previousEpochParticipation))) + ret = append(ret, s.previousEpochParticipation...) + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(s.currentEpochParticipation))) + ret = append(ret, s.currentEpochParticipation...) + } + + ret = append(ret, s.justificationBits) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.previousJustifiedCheckpoint.Epoch)) + ret = append(ret, s.previousJustifiedCheckpoint.Root...) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.currentJustifiedCheckpoint.Epoch)) + ret = append(ret, s.currentJustifiedCheckpoint.Root...) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.finalizedCheckpoint.Epoch)) + ret = append(ret, s.finalizedCheckpoint.Root...) + + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(s.inactivityScores))) + for _, s := range s.inactivityScores { + ret = binary.LittleEndian.AppendUint64(ret, s) + } + + if s.currentSyncCommittee == nil { + ret = append(ret, nilMarker) + } else { + ret = append(ret, notNilMarker) + for _, pubkey := range s.currentSyncCommittee.Pubkeys { + ret = append(ret, pubkey...) + } + ret = append(ret, s.currentSyncCommittee.AggregatePubkey...) + } + + if s.nextSyncCommittee == nil { + ret = append(ret, nilMarker) + } else { + ret = append(ret, notNilMarker) + for _, pubkey := range s.nextSyncCommittee.Pubkeys { + ret = append(ret, pubkey...) + } + ret = append(ret, s.nextSyncCommittee.AggregatePubkey...) + } + + if s.executionPayloadHeader == nil { + ret = append(ret, nilMarker) + } else { + ret = append(ret, notNilMarker) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.executionPayloadHeader.SizeSSZ())) + var err error + ret, err = s.executionPayloadHeader.MarshalSSZTo(ret) + if err != nil { + // this is impossible to happen. + logrus.WithError(err).Error("Failed to marshal executionPayloadHeader") + return nil + } + } + + ret = binary.LittleEndian.AppendUint64(ret, s.nextWithdrawalIndex) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.nextWithdrawalValidatorIndex)) + + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(s.historicalSummaries))) + for i := range s.historicalSummaries { + ret = append(ret, s.historicalSummaries[i].BlockSummaryRoot...) + ret = append(ret, s.historicalSummaries[i].StateSummaryRoot...) + } + + ret = binary.LittleEndian.AppendUint64(ret, s.depositRequestsStartIndex) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.depositBalanceToConsume)) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.exitBalanceToConsume)) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.earliestExitEpoch)) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.consolidationBalanceToConsume)) + ret = binary.LittleEndian.AppendUint64(ret, uint64(s.earliestConsolidationEpoch)) + + ret = binary.LittleEndian.AppendUint64(ret, s.pendingDepositIndex) + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(s.pendingDepositDiff))) + for _, d := range s.pendingDepositDiff { + ret = append(ret, d.PublicKey...) + ret = append(ret, d.WithdrawalCredentials...) + ret = binary.LittleEndian.AppendUint64(ret, d.Amount) + ret = append(ret, d.Signature...) + ret = binary.LittleEndian.AppendUint64(ret, uint64(d.Slot)) + } + ret = binary.LittleEndian.AppendUint64(ret, s.pendingPartialWithdrawalsIndex) + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(s.pendingPartialWithdrawalsDiff))) + for _, d := range s.pendingPartialWithdrawalsDiff { + ret = binary.LittleEndian.AppendUint64(ret, uint64(d.Index)) + ret = binary.LittleEndian.AppendUint64(ret, d.Amount) + ret = binary.LittleEndian.AppendUint64(ret, uint64(d.WithdrawableEpoch)) + } + ret = binary.LittleEndian.AppendUint64(ret, s.pendingConsolidationsIndex) + ret = binary.LittleEndian.AppendUint64(ret, uint64(len(s.pendingConsolidationsDiffs))) + for _, d := range s.pendingConsolidationsDiffs { + ret = binary.LittleEndian.AppendUint64(ret, uint64(d.SourceIndex)) + ret = binary.LittleEndian.AppendUint64(ret, uint64(d.TargetIndex)) + } + // Fulu: Proposer lookahead (override strategy - always fixed size) + if s.targetVersion >= version.Fulu { + for _, proposer := range s.proposerLookahead { + ret = binary.LittleEndian.AppendUint64(ret, proposer) + } + } + return ret +} + +func (h *hdiff) serialize() HdiffBytes { + vals := make([]byte, 0) + vals = binary.LittleEndian.AppendUint64(vals, uint64(len(h.validatorDiffs))) + for _, v := range h.validatorDiffs { + vals = binary.LittleEndian.AppendUint32(vals, v.index) + if v.PublicKey == nil { + vals = append(vals, nilMarker) + } else { + vals = append(vals, notNilMarker) + vals = append(vals, v.PublicKey...) + } + if v.WithdrawalCredentials == nil { + vals = append(vals, nilMarker) + } else { + vals = append(vals, notNilMarker) + vals = append(vals, v.WithdrawalCredentials...) + } + vals = binary.LittleEndian.AppendUint64(vals, v.EffectiveBalance) + if v.Slashed { + vals = append(vals, notNilMarker) + } else { + vals = append(vals, nilMarker) + } + vals = binary.LittleEndian.AppendUint64(vals, uint64(v.ActivationEligibilityEpoch)) + vals = binary.LittleEndian.AppendUint64(vals, uint64(v.ActivationEpoch)) + vals = binary.LittleEndian.AppendUint64(vals, uint64(v.ExitEpoch)) + vals = binary.LittleEndian.AppendUint64(vals, uint64(v.WithdrawableEpoch)) + } + + bals := make([]byte, 0, 8+len(h.balancesDiff)*8) + bals = binary.LittleEndian.AppendUint64(bals, uint64(len(h.balancesDiff))) + for _, b := range h.balancesDiff { + bals = binary.LittleEndian.AppendUint64(bals, uint64(b)) + } + return HdiffBytes{ + StateDiff: snappy.Encode(nil, h.stateDiff.serialize()), + ValidatorDiffs: snappy.Encode(nil, vals), + BalancesDiff: snappy.Encode(nil, bals), + } +} + +// diffToVals computes the difference between two BeaconStates and returns a slice of validatorDiffs. +func diffToVals(source, target state.ReadOnlyBeaconState) ([]validatorDiff, error) { + sVals := source.ValidatorsReadOnly() + tVals := target.ValidatorsReadOnly() + if len(tVals) < len(sVals) { + return nil, errors.Errorf("target validators length %d is less than source %d", len(tVals), len(sVals)) + } + diffs := make([]validatorDiff, 0) + for i, s := range sVals { + ti := tVals[i] + if validatorsEqual(s, ti) { + continue + } + d := validatorDiff{ + Slashed: ti.Slashed(), + index: uint32(i), + EffectiveBalance: ti.EffectiveBalance(), + ActivationEligibilityEpoch: ti.ActivationEligibilityEpoch(), + ActivationEpoch: ti.ActivationEpoch(), + ExitEpoch: ti.ExitEpoch(), + WithdrawableEpoch: ti.WithdrawableEpoch(), + } + if !bytes.Equal(s.GetWithdrawalCredentials(), tVals[i].GetWithdrawalCredentials()) { + d.WithdrawalCredentials = slices.Clone(tVals[i].GetWithdrawalCredentials()) + } + diffs = append(diffs, d) + } + for i, ti := range tVals[len(sVals):] { + pubkey := ti.PublicKey() + diffs = append(diffs, validatorDiff{ + Slashed: ti.Slashed(), + index: uint32(i + len(sVals)), + PublicKey: pubkey[:], + WithdrawalCredentials: slices.Clone(ti.GetWithdrawalCredentials()), + EffectiveBalance: ti.EffectiveBalance(), + ActivationEligibilityEpoch: ti.ActivationEligibilityEpoch(), + ActivationEpoch: ti.ActivationEpoch(), + ExitEpoch: ti.ExitEpoch(), + WithdrawableEpoch: ti.WithdrawableEpoch(), + }) + } + return diffs, nil +} + +// validatorsEqual compares two ReadOnlyValidator objects for equality. This function makes extra assumptions that the validators +// are of the same index and thus does not check for certain fields that cannot change, like the PublicKey. +func validatorsEqual(s, t state.ReadOnlyValidator) bool { + if s == nil && t == nil { + return true + } + if s == nil || t == nil { + return false + } + if !bytes.Equal(s.GetWithdrawalCredentials(), t.GetWithdrawalCredentials()) { + return false + } + if s.EffectiveBalance() != t.EffectiveBalance() { + return false + } + if s.Slashed() != t.Slashed() { + return false + } + if s.ActivationEligibilityEpoch() != t.ActivationEligibilityEpoch() { + return false + } + if s.ActivationEpoch() != t.ActivationEpoch() { + return false + } + if s.ExitEpoch() != t.ExitEpoch() { + return false + } + return s.WithdrawableEpoch() == t.WithdrawableEpoch() +} + +// diffToBalances computes the difference between two BeaconStates' balances. +func diffToBalances(source, target state.ReadOnlyBeaconState) ([]int64, error) { + sBalances := source.Balances() + tBalances := target.Balances() + if len(tBalances) < len(sBalances) { + return nil, errors.Errorf("target balances length %d is less than source %d", len(tBalances), len(sBalances)) + } + diffs := make([]int64, len(tBalances)) + for i, s := range sBalances { + if tBalances[i] >= s { + diffs[i] = int64(tBalances[i] - s) + } else { + diffs[i] = -int64(s - tBalances[i]) + } + } + for i, t := range tBalances[len(sBalances):] { + diffs[i+len(sBalances)] = int64(t) // lint:ignore uintcast + } + return diffs, nil +} + +func diffInternal(source, target state.ReadOnlyBeaconState) (*hdiff, error) { + stateDiff, err := diffToState(source, target) + if err != nil { + return nil, err + } + validatorDiffs, err := diffToVals(source, target) + if err != nil { + return nil, err + } + balancesDiffs, err := diffToBalances(source, target) + if err != nil { + return nil, err + } + return &hdiff{ + stateDiff: stateDiff, + validatorDiffs: validatorDiffs, + balancesDiff: balancesDiffs, + }, nil +} + +// diffToState computes the difference between two BeaconStates and returns a stateDiff object. +func diffToState(source, target state.ReadOnlyBeaconState) (*stateDiff, error) { + ret := &stateDiff{} + ret.targetVersion = target.Version() + ret.slot = target.Slot() + if !helpers.ForksEqual(source.Fork(), target.Fork()) { + ret.fork = target.Fork() + } + if !helpers.BlockHeadersEqual(source.LatestBlockHeader(), target.LatestBlockHeader()) { + ret.latestBlockHeader = target.LatestBlockHeader() + } + diffBlockRoots(ret, source, target) + diffStateRoots(ret, source, target) + var err error + ret.historicalRoots, err = diffHistoricalRoots(source, target) + if err != nil { + return nil, err + } + if !helpers.Eth1DataEqual(source.Eth1Data(), target.Eth1Data()) { + ret.eth1Data = target.Eth1Data() + } + diffEth1DataVotes(ret, source, target) + ret.eth1DepositIndex = target.Eth1DepositIndex() + diffRandaoMixes(ret, source, target) + diffSlashings(ret, source, target) + if target.Version() < version.Altair { + ret.previousEpochAttestations, err = target.PreviousEpochAttestations() + if err != nil { + return nil, err + } + ret.currentEpochAttestations, err = target.CurrentEpochAttestations() + if err != nil { + return nil, err + } + } else { + ret.previousEpochParticipation, err = target.PreviousEpochParticipation() + if err != nil { + return nil, err + } + ret.currentEpochParticipation, err = target.CurrentEpochParticipation() + if err != nil { + return nil, err + } + } + ret.justificationBits = diffJustificationBits(target) + ret.previousJustifiedCheckpoint = target.PreviousJustifiedCheckpoint() + ret.currentJustifiedCheckpoint = target.CurrentJustifiedCheckpoint() + ret.finalizedCheckpoint = target.FinalizedCheckpoint() + if target.Version() < version.Altair { + return ret, nil + } + ret.inactivityScores, err = target.InactivityScores() + if err != nil { + return nil, err + } + ret.currentSyncCommittee, err = target.CurrentSyncCommittee() + if err != nil { + return nil, err + } + ret.nextSyncCommittee, err = target.NextSyncCommittee() + if err != nil { + return nil, err + } + if target.Version() < version.Bellatrix { + return ret, nil + } + ret.executionPayloadHeader, err = target.LatestExecutionPayloadHeader() + if err != nil { + return nil, err + } + if target.Version() < version.Capella { + return ret, nil + } + ret.nextWithdrawalIndex, err = target.NextWithdrawalIndex() + if err != nil { + return nil, err + } + ret.nextWithdrawalValidatorIndex, err = target.NextWithdrawalValidatorIndex() + if err != nil { + return nil, err + } + if err := diffHistoricalSummaries(ret, source, target); err != nil { + return nil, err + } + if target.Version() < version.Electra { + return ret, nil + } + + if err := diffElectraFields(ret, source, target); err != nil { + return nil, err + } + if target.Version() < version.Fulu { + return ret, nil + } + + // Fulu: Proposer lookahead (override strategy - always use target's lookahead) + proposerLookahead, err := target.ProposerLookahead() + if err != nil { + return nil, errors.Wrap(err, "failed to get proposer lookahead from Fulu target state") + } + // Convert []primitives.ValidatorIndex to []uint64 + ret.proposerLookahead = make([]uint64, len(proposerLookahead)) + for i, idx := range proposerLookahead { + ret.proposerLookahead[i] = uint64(idx) + } + + return ret, nil +} + +func diffJustificationBits(target state.ReadOnlyBeaconState) byte { + j := target.JustificationBits().Bytes() + if len(j) != 0 { + return j[0] + } + return 0 +} + +// diffBlockRoots computes the difference between two BeaconStates' block roots. +func diffBlockRoots(diff *stateDiff, source, target state.ReadOnlyBeaconState) { + sRoots := source.BlockRoots() + tRoots := target.BlockRoots() + if len(sRoots) != len(tRoots) { + logrus.Errorf("Block roots length mismatch: source %d, target %d", len(sRoots), len(tRoots)) + return + } + if len(sRoots) != fieldparams.BlockRootsLength { + logrus.Errorf("Block roots length mismatch: expected: %d, source %d", fieldparams.BlockRootsLength, len(sRoots)) + return + } + for i := range fieldparams.BlockRootsLength { + if !bytes.Equal(sRoots[i], tRoots[i]) { + // This copy can be avoided if we use [][]byte instead of [][32]byte. + copy(diff.blockRoots[i][:], tRoots[i]) + } + } +} + +// diffStateRoots computes the difference between two BeaconStates' state roots. +func diffStateRoots(diff *stateDiff, source, target state.ReadOnlyBeaconState) { + sRoots := source.StateRoots() + tRoots := target.StateRoots() + if len(sRoots) != len(tRoots) { + logrus.Errorf("State roots length mismatch: source %d, target %d", len(sRoots), len(tRoots)) + return + } + if len(sRoots) != fieldparams.StateRootsLength { + logrus.Errorf("State roots length mismatch: expected %d, source %d", fieldparams.StateRootsLength, len(sRoots)) + return + } + for i := range fieldparams.StateRootsLength { + if !bytes.Equal(sRoots[i], tRoots[i]) { + // This copy can be avoided if we use [][]byte instead of [][32]byte. + copy(diff.stateRoots[i][:], tRoots[i]) + } + } +} + +func diffHistoricalRoots(source, target state.ReadOnlyBeaconState) ([][fieldparams.RootLength]byte, error) { + sRoots := source.HistoricalRoots() + tRoots := target.HistoricalRoots() + if len(tRoots) < len(sRoots) { + return nil, errors.New("target historical roots length is less than source") + } + ret := make([][fieldparams.RootLength]byte, len(tRoots)-len(sRoots)) + // We assume the states are consistent. + for i, root := range tRoots[len(sRoots):] { + // This copy can be avoided if we use [][]byte instead of [][32]byte. + copy(ret[i][:], root) + } + return ret, nil +} + +func shouldAppendEth1DataVotes(sVotes, tVotes []*ethpb.Eth1Data) bool { + if len(tVotes) < len(sVotes) { + return false + } + for i, v := range sVotes { + if !helpers.Eth1DataEqual(v, tVotes[i]) { + return false + } + } + return true +} + +func diffEth1DataVotes(diff *stateDiff, source, target state.ReadOnlyBeaconState) { + sVotes := source.Eth1DataVotes() + tVotes := target.Eth1DataVotes() + if shouldAppendEth1DataVotes(sVotes, tVotes) { + diff.eth1VotesAppend = true + diff.eth1DataVotes = tVotes[len(sVotes):] + return + } + diff.eth1VotesAppend = false + diff.eth1DataVotes = tVotes +} + +func diffRandaoMixes(diff *stateDiff, source, target state.ReadOnlyBeaconState) { + sMixes := source.RandaoMixes() + tMixes := target.RandaoMixes() + if len(sMixes) != len(tMixes) { + logrus.Errorf("Randao mixes length mismatch: source %d, target %d", len(sMixes), len(tMixes)) + return + } + if len(sMixes) != fieldparams.RandaoMixesLength { + logrus.Errorf("Randao mixes length mismatch: expected %d, source %d", fieldparams.RandaoMixesLength, len(sMixes)) + return + } + for i := range fieldparams.RandaoMixesLength { + if !bytes.Equal(sMixes[i], tMixes[i]) { + // This copy can be avoided if we use [][]byte instead of [][32]byte. + copy(diff.randaoMixes[i][:], tMixes[i]) + } + } +} + +func diffSlashings(diff *stateDiff, source, target state.ReadOnlyBeaconState) { + sSlashings := source.Slashings() + tSlashings := target.Slashings() + for i := range fieldparams.SlashingsLength { + if tSlashings[i] < sSlashings[i] { + diff.slashings[i] = -int64(sSlashings[i] - tSlashings[i]) // lint:ignore uintcast + } else { + diff.slashings[i] = int64(tSlashings[i] - sSlashings[i]) // lint:ignore uintcast + } + } +} + +func diffHistoricalSummaries(diff *stateDiff, source, target state.ReadOnlyBeaconState) error { + tSummaries, err := target.HistoricalSummaries() + if err != nil { + return err + } + start := 0 + if source.Version() >= version.Capella { + sSummaries, err := source.HistoricalSummaries() + if err != nil { + return err + } + start = len(sSummaries) + } + if len(tSummaries) < start { + return errors.New("target historical summaries length is less than source") + } + diff.historicalSummaries = make([]*ethpb.HistoricalSummary, len(tSummaries)-start) + for i, summary := range tSummaries[start:] { + diff.historicalSummaries[i] = ðpb.HistoricalSummary{ + BlockSummaryRoot: slices.Clone(summary.BlockSummaryRoot), + StateSummaryRoot: slices.Clone(summary.StateSummaryRoot), + } + } + return nil +} + +func diffElectraFields(diff *stateDiff, source, target state.ReadOnlyBeaconState) (err error) { + diff.depositRequestsStartIndex, err = target.DepositRequestsStartIndex() + if err != nil { + return + } + diff.depositBalanceToConsume, err = target.DepositBalanceToConsume() + if err != nil { + return + } + diff.exitBalanceToConsume, err = target.ExitBalanceToConsume() + if err != nil { + return + } + diff.earliestExitEpoch, err = target.EarliestExitEpoch() + if err != nil { + return + } + diff.consolidationBalanceToConsume, err = target.ConsolidationBalanceToConsume() + if err != nil { + return + } + diff.earliestConsolidationEpoch, err = target.EarliestConsolidationEpoch() + if err != nil { + return + } + if err := diffPendingDeposits(diff, source, target); err != nil { + return err + } + if err := diffPendingPartialWithdrawals(diff, source, target); err != nil { + return err + } + return diffPendingConsolidations(diff, source, target) +} + +// kmpIndex returns the index of the first occurrence of the pattern in the slice using the Knuth-Morris-Pratt algorithm. +func kmpIndex[T any](lens int, t []*T, equals func(a, b *T) bool) int { + if lens == 0 || len(t) <= 1 { + return lens + } + + lps := computeLPS(t, equals) + result := lens - lps[len(lps)-1] + // Clamp result to valid range [0, lens] to handle cases where + // the LPS value exceeds lens due to repetitive patterns + if result < 0 { + return 0 + } + return result +} + +// computeLPS computes the longest prefix-suffix (LPS) array for the given pattern. +func computeLPS[T any](combined []*T, equals func(a, b *T) bool) []int { + lps := make([]int, len(combined)) + length := 0 + i := 1 + + for i < len(combined) { + if equals(combined[i], combined[length]) { + length++ + lps[i] = length + i++ + } else { + if length != 0 { + length = lps[length-1] + } else { + lps[i] = 0 + i++ + } + } + } + return lps +} + +func diffPendingDeposits(diff *stateDiff, source, target state.ReadOnlyBeaconState) error { + tPendingDeposits, err := target.PendingDeposits() + if err != nil { + return err + } + tlen := len(tPendingDeposits) + tPendingDeposits = append(tPendingDeposits, nil) + var sPendingDeposits []*ethpb.PendingDeposit + if source.Version() >= version.Electra { + sPendingDeposits, err = source.PendingDeposits() + if err != nil { + return err + } + } + tPendingDeposits = append(tPendingDeposits, sPendingDeposits...) + index := kmpIndex(len(sPendingDeposits), tPendingDeposits, helpers.PendingDepositsEqual) + + diff.pendingDepositIndex = uint64(index) + diff.pendingDepositDiff = make([]*ethpb.PendingDeposit, tlen+index-len(sPendingDeposits)) + for i, d := range tPendingDeposits[len(sPendingDeposits)-index : tlen] { + diff.pendingDepositDiff[i] = ðpb.PendingDeposit{ + PublicKey: slices.Clone(d.PublicKey), + WithdrawalCredentials: slices.Clone(d.WithdrawalCredentials), + Amount: d.Amount, + Signature: slices.Clone(d.Signature), + Slot: d.Slot, + } + } + return nil +} + +func diffPendingPartialWithdrawals(diff *stateDiff, source, target state.ReadOnlyBeaconState) error { + tPendingPartialWithdrawals, err := target.PendingPartialWithdrawals() + if err != nil { + return err + } + tlen := len(tPendingPartialWithdrawals) + tPendingPartialWithdrawals = append(tPendingPartialWithdrawals, nil) + var sPendingPartialWithdrawals []*ethpb.PendingPartialWithdrawal + if source.Version() >= version.Electra { + sPendingPartialWithdrawals, err = source.PendingPartialWithdrawals() + if err != nil { + return err + } + } + tPendingPartialWithdrawals = append(tPendingPartialWithdrawals, sPendingPartialWithdrawals...) + index := kmpIndex(len(sPendingPartialWithdrawals), tPendingPartialWithdrawals, helpers.PendingPartialWithdrawalsEqual) + diff.pendingPartialWithdrawalsIndex = uint64(index) + diff.pendingPartialWithdrawalsDiff = make([]*ethpb.PendingPartialWithdrawal, tlen+index-len(sPendingPartialWithdrawals)) + for i, d := range tPendingPartialWithdrawals[len(sPendingPartialWithdrawals)-index : tlen] { + diff.pendingPartialWithdrawalsDiff[i] = ðpb.PendingPartialWithdrawal{ + Index: d.Index, + Amount: d.Amount, + WithdrawableEpoch: d.WithdrawableEpoch, + } + } + return nil +} + +func diffPendingConsolidations(diff *stateDiff, source, target state.ReadOnlyBeaconState) error { + tPendingConsolidations, err := target.PendingConsolidations() + if err != nil { + return err + } + tlen := len(tPendingConsolidations) + tPendingConsolidations = append(tPendingConsolidations, nil) + var sPendingConsolidations []*ethpb.PendingConsolidation + if source.Version() >= version.Electra { + sPendingConsolidations, err = source.PendingConsolidations() + if err != nil { + return err + } + } + tPendingConsolidations = append(tPendingConsolidations, sPendingConsolidations...) + index := kmpIndex(len(sPendingConsolidations), tPendingConsolidations, helpers.PendingConsolidationsEqual) + diff.pendingConsolidationsIndex = uint64(index) + diff.pendingConsolidationsDiffs = make([]*ethpb.PendingConsolidation, tlen+index-len(sPendingConsolidations)) + for i, d := range tPendingConsolidations[len(sPendingConsolidations)-index : tlen] { + diff.pendingConsolidationsDiffs[i] = ðpb.PendingConsolidation{ + SourceIndex: d.SourceIndex, + TargetIndex: d.TargetIndex, + } + } + return nil +} + +// applyValidatorDiff applies the validator diff to the source state in place. +func applyValidatorDiff(source state.BeaconState, diff []validatorDiff) (state.BeaconState, error) { + sVals := source.Validators() + if len(sVals) < len(diff) { + return nil, errors.Errorf("target validators length %d is less than source %d", len(diff), len(sVals)) + } + for _, d := range diff { + if d.index > uint32(len(sVals)) { + return nil, errors.Errorf("validator index %d is greater than length %d", d.index, len(sVals)) + } + if d.index == uint32(len(sVals)) { + // A valid diff should never have an index greater than the length of the source validators. + sVals = append(sVals, ðpb.Validator{}) + } + if d.PublicKey != nil { + sVals[d.index].PublicKey = slices.Clone(d.PublicKey) + } + if d.WithdrawalCredentials != nil { + sVals[d.index].WithdrawalCredentials = slices.Clone(d.WithdrawalCredentials) + } + sVals[d.index].EffectiveBalance = d.EffectiveBalance + sVals[d.index].Slashed = d.Slashed + sVals[d.index].ActivationEligibilityEpoch = d.ActivationEligibilityEpoch + sVals[d.index].ActivationEpoch = d.ActivationEpoch + sVals[d.index].ExitEpoch = d.ExitEpoch + sVals[d.index].WithdrawableEpoch = d.WithdrawableEpoch + } + if err := source.SetValidators(sVals); err != nil { + return nil, errors.Wrap(err, "failed to set validators") + } + return source, nil +} + +// applyBalancesDiff applies the balances diff to the source state in place. +func applyBalancesDiff(source state.BeaconState, diff []int64) (state.BeaconState, error) { + sBalances := source.Balances() + if len(diff) < len(sBalances) { + return nil, errors.Errorf("target balances length %d is less than source %d", len(diff), len(sBalances)) + } + sBalances = append(sBalances, make([]uint64, len(diff)-len(sBalances))...) + for i, t := range diff { + if t >= 0 { + sBalances[i] += uint64(t) + } else { + sBalances[i] -= uint64(-t) + } + } + if err := source.SetBalances(sBalances); err != nil { + return nil, errors.Wrap(err, "failed to set balances") + } + return source, nil +} + +// applyStateDiff applies the given diff to the source state in place. +func applyStateDiff(ctx context.Context, source state.BeaconState, diff *stateDiff) (state.BeaconState, error) { + var err error + if source, err = updateToVersion(ctx, source, diff.targetVersion); err != nil { + return nil, errors.Wrap(err, "failed to update state to target version") + } + if err := source.SetSlot(diff.slot); err != nil { + return nil, errors.Wrap(err, "failed to set slot") + } + if diff.fork != nil { + if err := source.SetFork(diff.fork); err != nil { + return nil, errors.Wrap(err, "failed to set fork") + } + } + if diff.latestBlockHeader != nil { + if err := source.SetLatestBlockHeader(diff.latestBlockHeader); err != nil { + return nil, errors.Wrap(err, "failed to set latest block header") + } + } + if err := applyBlockRootsDiff(source, diff); err != nil { + return nil, errors.Wrap(err, "failed to apply block roots diff") + } + if err := applyStateRootsDiff(source, diff); err != nil { + return nil, errors.Wrap(err, "failed to apply state roots diff") + } + if err := applyHistoricalRootsDiff(source, diff); err != nil { + return nil, errors.Wrap(err, "failed to apply historical roots diff") + } + if diff.eth1Data != nil { + if err := source.SetEth1Data(diff.eth1Data); err != nil { + return nil, errors.Wrap(err, "failed to set eth1 data") + } + } + if err := applyEth1DataVotesDiff(source, diff); err != nil { + return nil, errors.Wrap(err, "failed to apply eth1 data votes diff") + } + if err := source.SetEth1DepositIndex(diff.eth1DepositIndex); err != nil { + return nil, errors.Wrap(err, "failed to set eth1 deposit index") + } + if err := applyRandaoMixesDiff(source, diff); err != nil { + return nil, errors.Wrap(err, "failed to apply randao mixes diff") + } + if err := applySlashingsDiff(source, diff); err != nil { + return nil, errors.Wrap(err, "failed to apply slashings diff") + } + if diff.targetVersion == version.Phase0 { + if err := source.SetPreviousEpochAttestations(diff.previousEpochAttestations); err != nil { + return nil, errors.Wrap(err, "failed to set previous epoch attestations") + } + if err := source.SetCurrentEpochAttestations(diff.currentEpochAttestations); err != nil { + return nil, errors.Wrap(err, "failed to set current epoch attestations") + } + } else { + if err := source.SetPreviousParticipationBits(diff.previousEpochParticipation); err != nil { + return nil, errors.Wrap(err, "failed to set previous epoch participation") + } + if err := source.SetCurrentParticipationBits(diff.currentEpochParticipation); err != nil { + return nil, errors.Wrap(err, "failed to set current epoch participation") + } + } + if err := source.SetJustificationBits([]byte{diff.justificationBits}); err != nil { + return nil, errors.Wrap(err, "failed to set justification bits") + } + if diff.previousJustifiedCheckpoint != nil { + if err := source.SetPreviousJustifiedCheckpoint(diff.previousJustifiedCheckpoint); err != nil { + return nil, errors.Wrap(err, "failed to set previous justified checkpoint") + } + } + if diff.currentJustifiedCheckpoint != nil { + if err := source.SetCurrentJustifiedCheckpoint(diff.currentJustifiedCheckpoint); err != nil { + return nil, errors.Wrap(err, "failed to set current justified checkpoint") + } + } + if diff.finalizedCheckpoint != nil { + if err := source.SetFinalizedCheckpoint(diff.finalizedCheckpoint); err != nil { + return nil, errors.Wrap(err, "failed to set finalized checkpoint") + } + } + if diff.targetVersion < version.Altair { + return source, nil + } + if err := source.SetInactivityScores(diff.inactivityScores); err != nil { + return nil, errors.Wrap(err, "failed to set inactivity scores") + } + if diff.currentSyncCommittee != nil { + if err := source.SetCurrentSyncCommittee(diff.currentSyncCommittee); err != nil { + return nil, errors.Wrap(err, "failed to set current sync committee") + } + } + if diff.nextSyncCommittee != nil { + if err := source.SetNextSyncCommittee(diff.nextSyncCommittee); err != nil { + return nil, errors.Wrap(err, "failed to set next sync committee") + } + } + if diff.targetVersion < version.Bellatrix { + return source, nil + } + if diff.executionPayloadHeader != nil { + if err := source.SetLatestExecutionPayloadHeader(diff.executionPayloadHeader); err != nil { + return nil, errors.Wrap(err, "failed to set latest execution payload header") + } + } + if diff.targetVersion < version.Capella { + return source, nil + } + if err := source.SetNextWithdrawalIndex(diff.nextWithdrawalIndex); err != nil { + return nil, errors.Wrap(err, "failed to set next withdrawal index") + } + if err := source.SetNextWithdrawalValidatorIndex(diff.nextWithdrawalValidatorIndex); err != nil { + return nil, errors.Wrap(err, "failed to set next withdrawal validator index") + } + if err := applyHistoricalSummariesDiff(source, diff); err != nil { + return nil, errors.Wrap(err, "failed to apply historical summaries diff") + } + if diff.targetVersion < version.Electra { + return source, nil + } + if err := source.SetDepositRequestsStartIndex(diff.depositRequestsStartIndex); err != nil { + return nil, errors.Wrap(err, "failed to set deposit requests start index") + } + if err := source.SetDepositBalanceToConsume(diff.depositBalanceToConsume); err != nil { + return nil, errors.Wrap(err, "failed to set deposit balance to consume") + } + if err := source.SetExitBalanceToConsume(diff.exitBalanceToConsume); err != nil { + return nil, errors.Wrap(err, "failed to set exit balance to consume") + } + if err := source.SetEarliestExitEpoch(diff.earliestExitEpoch); err != nil { + return nil, errors.Wrap(err, "failed to set earliest exit epoch") + } + if err := source.SetConsolidationBalanceToConsume(diff.consolidationBalanceToConsume); err != nil { + return nil, errors.Wrap(err, "failed to set consolidation balance to consume") + } + if err := source.SetEarliestConsolidationEpoch(diff.earliestConsolidationEpoch); err != nil { + return nil, errors.Wrap(err, "failed to set earliest consolidation epoch") + } + if err := applyPendingDepositsDiff(source, diff); err != nil { + return nil, errors.Wrap(err, "failed to apply pending deposits diff") + } + if err := applyPendingPartialWithdrawalsDiff(source, diff); err != nil { + return nil, errors.Wrap(err, "failed to apply pending partial withdrawals diff") + } + if err := applyPendingConsolidationsDiff(source, diff); err != nil { + return nil, errors.Wrap(err, "failed to apply pending consolidations diff") + } + if diff.targetVersion < version.Fulu { + return source, nil + } + if err := applyProposerLookaheadDiff(source, diff); err != nil { + return nil, errors.Wrap(err, "failed to apply proposer lookahead diff") + } + return source, nil +} + +// applyPendingDepositsDiff applies the pending deposits diff to the source state in place. +func applyPendingDepositsDiff(source state.BeaconState, diff *stateDiff) error { + sPendingDeposits, err := source.PendingDeposits() + if err != nil { + return errors.Wrap(err, "failed to get pending deposits") + } + sPendingDeposits = sPendingDeposits[int(diff.pendingDepositIndex):] + for _, t := range diff.pendingDepositDiff { + sPendingDeposits = append(sPendingDeposits, ðpb.PendingDeposit{ + PublicKey: slices.Clone(t.PublicKey), + WithdrawalCredentials: slices.Clone(t.WithdrawalCredentials), + Amount: t.Amount, + Signature: slices.Clone(t.Signature), + Slot: t.Slot, + }) + } + return source.SetPendingDeposits(sPendingDeposits) +} + +// applyPendingPartialWithdrawalsDiff applies the pending partial withdrawals diff to the source state in place. +func applyPendingPartialWithdrawalsDiff(source state.BeaconState, diff *stateDiff) error { + sPendingPartialWithdrawals, err := source.PendingPartialWithdrawals() + if err != nil { + return errors.Wrap(err, "failed to get pending partial withdrawals") + } + sPendingPartialWithdrawals = sPendingPartialWithdrawals[int(diff.pendingPartialWithdrawalsIndex):] + for _, t := range diff.pendingPartialWithdrawalsDiff { + sPendingPartialWithdrawals = append(sPendingPartialWithdrawals, ðpb.PendingPartialWithdrawal{ + Index: t.Index, + Amount: t.Amount, + WithdrawableEpoch: t.WithdrawableEpoch, + }) + } + return source.SetPendingPartialWithdrawals(sPendingPartialWithdrawals) +} + +// applyPendingConsolidationsDiff applies the pending consolidations diff to the source state in place. +func applyPendingConsolidationsDiff(source state.BeaconState, diff *stateDiff) error { + sPendingConsolidations, err := source.PendingConsolidations() + if err != nil { + return errors.Wrap(err, "failed to get pending consolidations") + } + sPendingConsolidations = sPendingConsolidations[int(diff.pendingConsolidationsIndex):] + for _, t := range diff.pendingConsolidationsDiffs { + sPendingConsolidations = append(sPendingConsolidations, ðpb.PendingConsolidation{ + SourceIndex: t.SourceIndex, + TargetIndex: t.TargetIndex, + }) + } + return source.SetPendingConsolidations(sPendingConsolidations) +} + +// applyHistoricalSummariesDiff applies the historical summaries diff to the source state in place. +func applyHistoricalSummariesDiff(source state.BeaconState, diff *stateDiff) error { + tSummaries := diff.historicalSummaries + for _, t := range tSummaries { + if err := source.AppendHistoricalSummaries(ðpb.HistoricalSummary{ + BlockSummaryRoot: slices.Clone(t.BlockSummaryRoot), + StateSummaryRoot: slices.Clone(t.StateSummaryRoot), + }); err != nil { + return errors.Wrap(err, "failed to append historical summary") + } + } + return nil +} + +// applySlashingsDiff applies the slashings diff to the source state in place. +func applySlashingsDiff(source state.BeaconState, diff *stateDiff) error { + sSlashings := source.Slashings() + tSlashings := diff.slashings + if len(sSlashings) != len(tSlashings) { + return errors.Errorf("slashings length mismatch source %d, target %d", len(sSlashings), len(tSlashings)) + } + if len(sSlashings) != fieldparams.SlashingsLength { + return errors.Errorf("slashings length mismatch expected %d, source %d", fieldparams.SlashingsLength, len(sSlashings)) + } + for i, t := range tSlashings { + if t > 0 { + sSlashings[i] += uint64(t) + } else { + sSlashings[i] -= uint64(-t) + } + } + return source.SetSlashings(sSlashings) +} + +// applyRandaoMixesDiff applies the randao mixes diff to the source state in place. +func applyRandaoMixesDiff(source state.BeaconState, diff *stateDiff) error { + sMixes := source.RandaoMixes() + tMixes := diff.randaoMixes + if len(sMixes) != len(tMixes) { + return errors.Errorf("randao mixes length mismatch, source %d, target %d", len(sMixes), len(tMixes)) + } + if len(sMixes) != fieldparams.RandaoMixesLength { + return errors.Errorf("randao mixes length mismatch, expected %d, source %d", fieldparams.RandaoMixesLength, len(sMixes)) + } + for i := range fieldparams.RandaoMixesLength { + if tMixes[i] != [fieldparams.RootLength]byte{} { + sMixes[i] = slices.Clone(tMixes[i][:]) + } + } + return source.SetRandaoMixes(sMixes) +} + +// applyEth1DataVotesDiff applies the eth1 data votes diff to the source state in place. +func applyEth1DataVotesDiff(source state.BeaconState, diff *stateDiff) error { + sVotes := source.Eth1DataVotes() + tVotes := diff.eth1DataVotes + if diff.eth1VotesAppend { + sVotes = append(sVotes, tVotes...) + return source.SetEth1DataVotes(sVotes) + } + return source.SetEth1DataVotes(tVotes) +} + +// applyHistoricalRootsDiff applies the historical roots diff to the source state in place. +func applyHistoricalRootsDiff(source state.BeaconState, diff *stateDiff) error { + sRoots := source.HistoricalRoots() + tRoots := diff.historicalRoots + for _, t := range tRoots { + sRoots = append(sRoots, t[:]) + } + return source.SetHistoricalRoots(sRoots) +} + +// applyStateRootsDiff applies the state roots diff to the source state in place. +func applyStateRootsDiff(source state.BeaconState, diff *stateDiff) error { + sRoots := source.StateRoots() + tRoots := diff.stateRoots + if len(sRoots) != len(tRoots) { + return errors.Errorf("state roots length mismatch, source %d, target %d", len(sRoots), len(tRoots)) + } + if len(sRoots) != fieldparams.StateRootsLength { + return errors.Errorf("state roots length mismatch, expected %d, source %d", fieldparams.StateRootsLength, len(sRoots)) + } + for i := range fieldparams.StateRootsLength { + if tRoots[i] != [fieldparams.RootLength]byte{} { + sRoots[i] = slices.Clone(tRoots[i][:]) + } + } + return source.SetStateRoots(sRoots) +} + +// applyBlockRootsDiff applies the block roots diff to the source state in place. +func applyBlockRootsDiff(source state.BeaconState, diff *stateDiff) error { + sRoots := source.BlockRoots() + tRoots := diff.blockRoots + if len(sRoots) != len(tRoots) { + return errors.Errorf("block roots length mismatch, source %d, target %d", len(sRoots), len(tRoots)) + } + if len(sRoots) != fieldparams.BlockRootsLength { + return errors.Errorf("block roots length mismatch, expected %d, source %d", fieldparams.BlockRootsLength, len(sRoots)) + } + for i := range fieldparams.BlockRootsLength { + if tRoots[i] != [fieldparams.RootLength]byte{} { + sRoots[i] = slices.Clone(tRoots[i][:]) + } + } + return source.SetBlockRoots(sRoots) +} + +// applyProposerLookaheadDiff applies the proposer lookahead diff to the source state in place. +func applyProposerLookaheadDiff(source state.BeaconState, diff *stateDiff) error { + // Fulu: Proposer lookahead (override strategy - always use target's lookahead) + proposerIndices := make([]primitives.ValidatorIndex, len(diff.proposerLookahead)) + for i, idx := range diff.proposerLookahead { + proposerIndices[i] = primitives.ValidatorIndex(idx) + } + return source.SetProposerLookahead(proposerIndices) +} + +// updateToVersion updates the state to the given version in place. +func updateToVersion(ctx context.Context, source state.BeaconState, target int) (ret state.BeaconState, err error) { + if source.Version() == target { + return source, nil + } + if source.Version() > target { + return nil, errors.Errorf("cannot downgrade state from %s to %s", version.String(source.Version()), version.String(target)) + } + switch source.Version() { + case version.Phase0: + ret, err = altair.ConvertToAltair(source) + case version.Altair: + ret, err = execution.UpgradeToBellatrix(source) + case version.Bellatrix: + ret, err = capella.UpgradeToCapella(source) + case version.Capella: + ret, err = deneb.UpgradeToDeneb(source) + case version.Deneb: + ret, err = electra.ConvertToElectra(source) + case version.Electra: + ret, err = fulu.ConvertToFulu(source) + default: + return nil, errors.Errorf("unsupported version %s", version.String(source.Version())) + } + if err != nil { + return nil, errors.Wrap(err, "failed to upgrade state") + } + return updateToVersion(ctx, ret, target) +} diff --git a/consensus-types/hdiff/state_diff.md b/consensus-types/hdiff/state_diff.md new file mode 100644 index 0000000000..29977478c2 --- /dev/null +++ b/consensus-types/hdiff/state_diff.md @@ -0,0 +1,399 @@ +# State diffs in Prysm + +The current document describes the implementation details and the design of hierarchical state diffs on Prysm. They follow the same design as [Lighthouse](https://github.com/dapplion/tree-states-review-guide/blob/main/persisted_hdiff.md) which in turn is an implementation of A. Nashatyrev's [design](https://hackmd.io/G82DNSdvR5Osw2kg565lBA). + +Incremental state diffs can be used both for databases and memory representations of states. This document focuses on the state diffs necessary for the first usage. Prysm already handles memory deduplication of states with multi value slices, thus a diff mechanism would result in less impact. + +## The basic design. + +The idea is to diagram the cold-state database as a forest: +- Each tree in the forest is rooted by a full state snapshot, saved every λ_0 slots (think once a year). +- Each tree has the same height h. The root is unique and corresponds to the full snapshot, but on each level *1 ≤ i ≤ h*, there are β_i bifurcation nodes, which are stored every λ_i slots. Thus for example if we had *h = 2*, *λ_0 = 2^21*, *λ_1 = 2^18*, *λ_2 = 2^5*, we would have *β_1 = 7* and *β_2 = 8191* (notice that we subtract 1 since the first bifurcation node is just the state of the upper level). On the first level we would have 7 nodes written every ~36 days and on the second level we would have 8191 nodes written once every epoch. +- At each level *1 ≤ i ≤ h*, in the *β_i* nodes that are stored, instead of writing a full state snapshot, we store the diff between the state at that given slot and the state corresponding to the parent node in level *i-1*. + +![database layout](./db_layout.png) + +### Saving state diffs. + +Let us assume that we have a running node that already has an hdiff compatible database. That is, some snapshot with a full state is saved at some slot `o` (for *offset*). Suppose that we have just updated finalization, thus we have some blocks that we may need to save a state diff (or even a snapshot) for. Suppose we try for a block with slot `c`. Then at each of the slots + +o, o + λ_0, o + 2 λ_0, ..., o + k_0 λ_0 + +we have a full snapshot state saved. We assume that o + (k_0+1) λ_0 > c, so that our latest snapshot is in fact at slot o + k λ_0. Let us call this state *s_0*. At each of the slots + +o + k_0 λ_0 + λ_1, o + k_0 λ_0 + 2 λ_1, ..., o + k_0 λ_0 + k_1 λ_1 + +we have stored a state diff between the state at that slot and *s_0*. We assume that + +o + k_0 λ_0 + (k_1+1) λ_1 > c + +so that the latest diff at level one is in fact at slot o + k_0 λ_0 + k_1 λ_1. Let us call the sate at that slot *s_1*. it is obtained by applying the state diff saved at that slot to the state *s_0*. Similarly at the next level, for each slot + +o + k_0 λ_0 + k_1 λ_1 + λ_2, o + k_0 λ_0 + k_1 λ_1 + 2 λ_2, ..., o + k_0 λ_0 + k_1 λ_1 + k_2 λ_2 + +we have stored a state diff to the state *s_1*. We assume that + +o + k_0 λ_0 + k_1 λ_1 + (k_2+1) λ_2 > c + +so that the latest diff at level two is indeed at slot o + k_0 λ_0 + k_1 λ_1 + k_2 λ_2. Let us call the corresponding state *s_2*. It is obtained applying the last diff at level 2 to the state *s_1*, which in turn was obtained appplying a diff to the state *s_0*. + +We continue until we have covered all of our levels up to level h. That is we have states *s_0*, *s_1*, ..., *s_{h}* and the last one is the state at slot + +o + k_0 λ_0 + k_1 λ_1 + ... + k_h λ_h + +So now we want to decide what do to with our state *t* at slot c. We act as follows. If o + k_0 λ_0 + k_1 λ_1 + ... + (k_h+1) λ_h > c. In this case we don't store anything. If on the other hand we have o + k_0 λ_0 + k_1 λ_1 + ... + (k_h+1) λ_h = c. In this case we will store either a state diff or an entire new snapshot. We proceed as follows. + +If k_h < β_h, in this case we store a new state diff `Diff(s_{h-1},t)` at the slot c in level `h`. + +If k_h = β_h, we check the next level. If k_{h-1} < β_{h-1}, then we store a new state diff `Diff(s_{h-2},t)` at level `h-1` at the slot `c`. + +If k_{h-1} = β_{h-1} then we compare the next level: if k_{h-2} < β_{h-2}, then we store a new state diff `Diff(s_{h-3}, t)` at level `h-2` at the slot `c`. + +We continue like this, if we reach the point in which all k_i = β_i for ì=1,...,h, then we store a new full snapshot with the state `t` at the slot `c`. + +### Triggering storage + +When we update finalization, we call `MigrateToCold`, this function, instead of calling the database to store a full state every few epochs (as we do today), will send the state `t` at slot `c` as in the previous section, to save the corresponding diff. The package that handles state saving internally is the `database` package. However, the function `MigrateToCold` is aware of the values of the offset *o* and the configuration constants λ_1, ..., λ_h so as to only send the states `t` for which `c` is of the form `o + k λ_h`. + +### Database changes + +The database exposes the following API to save states + +``` +SaveState(ctx context.Context, state state.ReadOnlyBeaconState, blockRoot [32]byte) error +``` + +This functions will change internally to save just the diff or a snapshot if appropriate. On the other hand, the following is the API to recover a state: + +```go +HasState(ctx context.Context, blockRoot [32]byte) bool +State(ctx context.Context, blockRoot [32]byte) (state.BeaconState, error) +``` +The first function can return true now in a static manner according to the slot of the corresponing `blockRoot`, it simply checks that it is of the form o + k λ_h. The second function can recover those states by applying the corresponding diffs. + +Summarizing, the database has no changes in the exposed API, minimizing changes in the overal Prysm implementation, while the database internally changes the functions `State` and `SaveState` to use the `consensus-types/hdiff` package. This makes the serialization package fairly contained and only accessible from within the database package. + +### Stategen changes + +The `stategen` package is respondible for the migration to cold database, it exposes the function + +```go +func (s *State) MigrateToCold(ctx context.Context, fRoot [32]byte) error { +``` +that takes the finalized root and decides which states to save. This function is now changed to save only based on the slot of the state, for those slots that have the form o + k λ_h. A **warning** has to be said about missing blocks. Since the database will have to keep the state by slots now, a good approach in this function when there is a missing block at the corresponding slot, is to actually process the state to the right slot and save it already processed. + +Another function that needs to change minimally is the function +``` +func (s *State) StateByRoot(ctx context.Context, blockRoot [32]byte) (state.BeaconState, error) +``` +That will get the ancestor from db simply by the slot rather than the root. + + +### Longer term changes + +We could change the database API to include getters and setters by slot in the cold database, since anyway this will keep only canonical states this would make things easier at the stategen level. + +### Configuration + +We can make the constants h and λ_0, ... , λ_h user-configuratble. Thus, someone that is less storage constained and wants to run an archive RPC node, will set h higher and λ_h smaller (say 32 to save one diff every epoch), while a user that doesn't care about past states may even set `h=0` and not save anything. + +### Database migration + +There is no migration support expected. + +### Startup from clean database + +Starting up from a clean database and checkpoint sync will download the checkpoint state at slot o and set that slot as the offset in the database and save the first full snapshot with the checkpoint state. + +Starting up from a clean database and from genesis will set o = 0 and start syncing from genesis as usual. + +### Backfill + +The following is added as an configurable option, pass the flag `--backfill-origin-state ssz`, in this case the node will download the state `ssz` and set as offset this state's slot. Will download the checkpoint state and start syncing forward as usual but will not call `MigrateToCold` until the backfill service is finished. In the background the node will download all blocks all the way up to the state ssz, then start forward syncing those blocks regenerating the finalized states and when they are of the form o + k λ_h. Once the forward syncing has caught up with the finalized checkpoint, we can start calling `MigrateToCold` again. This backfill mechanism is much faster than the current foward syncing to regenerate the states: we do not need to do any checks on the EL since the blocks are already finalized and trusted, the hashes are already confirmed. + +### Database Prunning + +Currently we have a flag `--pruner-retention-epochs` which will be deprecated. Instead, the pruning mechanism is simply the following, the user specifies how many snapshopts wants to keep (by default 0 means keep all snapshots). If the user say specifies `--pruner-retention-snapshots 1`, then the node will delete everything in the database everytime we save a new snapshot every λ_0 slots. So in particular, a user that wants to keep its database to a minimum, it will set h=0, λ_0 to a very large value, and pass 1 to this flag, thus the node will only keep one state at any time and will not update it. + + +## Implementation details. + +This section contains actual implementation details of the feature. It will be populated as pull requests are being opened with the final details of the implementation. For a high level design document please refer to [this previous section](#the-basic-design). + +### Serialization + +The package `hdiff` located in `consensus-types/hdiff` is responsible for computing and applying state diffs between two different beacon states and serializing/deserializing them to/from a byte sequence. + +#### Exported API + +The only exported API consists of + +```go +type HdiffBytes struct { + StateDiff []byte + ValidatorDiffs []byte + BalancesDiff []byte +} + + +func Diff(source, target state.ReadOnlyBeaconState) (HdiffBytes, error) + +func ApplyDiff(ctx context.Context, source state.BeaconState, diff HdiffBytes) (state.BeaconState, error) +``` + +The structure `HdiffBytes` contains three different slices that can be handled independently by the caller (typically this will be database methods). These three slices are the serialized and Snappy compressed form of a state diff between two different states. + +The function `Diff` takes two states and returns the serialized diff between them. The function `ApplyDiff` takes a state and a diff and returns the target state after having applied the diff to the source state. + +#### The `hdiff` structure + +When comparing a source state *s* and a target state *t*, before serializing, their difference is kept in a native structure `hdiff` which itself consist of three separate diffs. +```go +type hdiff struct { + stateDiff *stateDiff + validatorDiffs []validatorDiff + balancesDiff []int64 +} +``` + +The `stateDiff` entry contains the bulk of the state diff, except the validator registry diff and the balance slice diff. These last two are separated to be able to store them separatedly. Often times, local RPC requests are for balances or validator status, and with the hierarchical strcutrure, we can reproduce them without regenerating the full state. + +#### The `stateDiff` structure + +This structure encodes the possible differences between two beacon states. + +```go +type stateDiff struct { + targetVersion int + eth1VotesAppend bool + justificationBits byte + slot primitives.Slot + fork *ethpb.Fork + latestBlockHeader *ethpb.BeaconBlockHeader + blockRoots [fieldparams.BlockRootsLength][fieldparams.RootLength]byte + stateRoots [fieldparams.StateRootsLength][fieldparams.RootLength]byte + historicalRoots [][fieldparams.RootLength]byte + eth1Data *ethpb.Eth1Data + eth1DataVotes []*ethpb.Eth1Data + eth1DepositIndex uint64 + randaoMixes [fieldparams.RandaoMixesLength][fieldparams.RootLength]byte + slashings [fieldparams.SlashingsLength]int64 + previousEpochAttestations []*ethpb.PendingAttestation + currentEpochAttestations []*ethpb.PendingAttestation + previousJustifiedCheckpoint *ethpb.Checkpoint + currentJustifiedCheckpoint *ethpb.Checkpoint + finalizedCheckpoint *ethpb.Checkpoint + + previousEpochParticipation []byte + currentEpochParticipation []byte + inactivityScores []uint64 + currentSyncCommittee *ethpb.SyncCommittee + nextSyncCommittee *ethpb.SyncCommittee + + executionPayloadHeader interfaces.ExecutionData + + nextWithdrawalIndex uint64 + nextWithdrawalValidatorIndex primitives.ValidatorIndex + historicalSummaries []*ethpb.HistoricalSummary + + depositRequestsStartIndex uint64 + depositBalanceToConsume primitives.Gwei + exitBalanceToConsume primitives.Gwei + earliestExitEpoch primitives.Epoch + consolidationBalanceToConsume primitives.Gwei + earliestConsolidationEpoch primitives.Epoch + + pendingDepositIndex uint64 + pendingPartialWithdrawalsIndex uint64 + pendingConsolidationsIndex uint64 + pendingDepositDiff []*ethpb.PendingDeposit + pendingPartialWithdrawalsDiff []*ethpb.PendingPartialWithdrawal + pendingConsolidationsDiffs []*ethpb.PendingConsolidation + + proposerLookahead []uint64 +} +``` + +This type is only used internally when serializing/deserializing and applying state diffs. We could in principle avoid double allocations and increase performance by avoiding entirely having a native type and working directly with the serialized bytes. The tradeoff is readability of the serialization functions. + +#### The `validatorDiff` structure + +This structure is similar to the `stateDiff` one, it is only used internally in the `hdiff` package in `consensus-types` + +```go +type validatorDiff struct { + Slashed bool + index uint32 + PublicKey []byte + WithdrawalCredentials []byte + EffectiveBalance uint64 + ActivationEligibilityEpoch primitives.Epoch + ActivationEpoch primitives.Epoch + ExitEpoch primitives.Epoch + WithdrawableEpoch primitives.Epoch +} +``` + +#### The `balancesDiff` slice + +Given a source state `s` and a target state `t` assumed to be newer than `s`, so that the length of `t.balances` is greater or equal than that of `s.balances`. Then the `balancesDiff` slice inside the `hdiff` structure is computed simply as the algebraic difference, it's *i-th* entry is given by `t.balances[i] - s.balances[i]` where the second term is considered as zero if `i ≥ len(s.balances)`. + +#### Deserializing with `newHdiff` + +The function +```go +func newHdiff(data HdiffBytes) (*hdiff, error) +``` +takes a serialized diff and produces the native internal type `hdiff`. This function encodes the internal logic for deserialization. It internally calls the functions ` newStateDiff`, `newValidatorDiffs` and `newBalancesDiff` to obtain the three inner structures. + +The main deserialization routines take the byte slices and they first decompress them with `snappy.Decode`. They create an empty `stateDiff`, `validatorDiff` or `balancesDiff` object `ret` and after that they pass a pointer to the decompressed byte slice `data` to helper functions `ret.readXXX(&data)` that populate each of the entries of `ret`. Here `XXX` corresponds to each of the entries in the beacon state, like `fork`, `slot`, etc. Each one of the helpers receives a pointer to the `data` slice that contains the byte slice of the diff that **is still yet to be deserialized**. The helper populates the corresponding entry in the hdiff structure and then modifies the `data` slice to drop the deserialized bytes. That is, each helper receives a slice that needs to be deserialized since its first byte. + +The following list documents the method that is used for serialization/deserialization of each entry + +##### Version + +The version is stored as a little endian `uint64` in fixed 8 bytes of `data`. This version is the target version, that is, we override whatever the source state version is, with this target version. + +##### Slot + +The slot is treated exactly the same as the version entry. + +##### Fork +The fork is deserialized as follows. If the first byte of `data` is zero (a constant called `nilMarker` in the package) then the fork pointer is `nil` in the `hdiff` struture. If the first byte of `data` is not zero then the remaining bytes deserialize to a full `Fork` object. + +When applying the diff, if the fork pointer is `nil` then the source's Fork is not changed, while if it is not-nil, then the source's Fork is changed to whatever the `hdiff` pointer is. + +##### Latest Block Header + +The latest Block header is treated exactly like the Fork pointer. + +##### Block Roots + +The block roots slice is deserialized literally as a full slice of beacon block roots, this may seem like a large waste of memory and space since this slice is 8192 roots, each 32 bytes. However, the serialization process is as follows, if a blockroot has not changed between the source and the target state, we store a full zero root `0x00...`. For states that are *close by*, the block roots slice will not have changed much, this will produce a slice that is mostly zeroes, and these gets stored occupying minimal space with Snappy compression. When two states are more than 8192 slots appart, the target block roots slice will have to be saved in its entirety, which is what this method achieves. + +We could get a little more performance here if instead of keeping a full zeroed out root in the internal `hdiff` structure, we stored an empty slice. But this way the check for lengths becomes slightly more complicated. + +##### State Roots + +The state roots slice is treated exactly like the block roots one. + +##### Historical Roots + +The historical roots slice diff is stored as follows, the first 8 bytes store a little endian `uint64` that determines the length of the slice. After this, the following bytes contain as many 32 byte roots as this length indicates. Again, as in the previous root slices, if the root is not to be changed from the source state, we store a zero root. + + +##### Eth1 Data + +The Eth1 Data diff object is treated exactly like the fork object. + +##### Eth1 Data Votes + +The `stateDiff` structure has two fields related to Eth1 data votes. The boolean entry `eth1VotesAppend` and a slice `eth1DataVotes`. The boolean indicates if the slice is to be *appended* to the source target or if the eth1 data vote slice needs to be completely replaced with the slice in the diff. + +Deserialization then goes as follows, if the first byte is `nilMarker` then `eth1VotesAppend` is set to `True`, and `False` otherwise. The following 8 bytes contain a `uint64` serialization of the length of the slice. The remaining bytes contain the serialized slice. + +##### Eth1 Deposit Index + +This field always overrides the source's value. It is stored as an 8 bytes serialized `uint64`. + +##### Randao Mixes + +This field is treated exactly like the block roots slice. + +##### Slashings + +The slashings slice is stored as the algebraic difference between the target and the source state `t.slashings - s.slashings`. Thus the data is read as a sequence of 8 bytes serialized little Endian `int64`. When applying this diff to a source state, we add this number to the source state's slashings. This way, numbers are kept small and they snappy compress better. + +##### Pending Attestations + +Pending attestations are only present in Phase 0 states. So the paths to deserialize them (both for *previous and current epoch attestations*) is only executed in case the target state is a Phase 0 state (notice that this implies that the source state must have been a Phase0 state as well). + +For both of these slices we store first the length in the first 8 bytes. Then we loop over the remaining bytes deserializing each pending attestation. Each of them is of variable size and is deserialized as follows, the first 8 bytes contain the attestation aggregation bits length. The next bytes (how many is determined by the aggregation bits length) encode the aggregation bits. The next 128 bytes are the SSZ encoded attestation data. Finally the inclusion delay and the proposer index are serialized as 8 bytes `uint64`. + +##### Previous and Current epoch participation + +These slices are there post Altair. They are serialized as follows, the first 8 bytes contain the length, and the remaining bytes (indicated by the length) are just stored directly as a byte slice. + +##### Justification Bits +These are stored as a single byte and they always override the value of the source state with this byte stored in the `hdiff` structure. + +##### Finalized and Previous/Current justified Checkpoints + +These are stored as SSZ serialized checkpoints. + +##### Inactivity Scores + +The first 8 bytes contain the little Endian encoded length, and the remaining bytes contain the `uint64` serialized slice. + +##### Current and Next Sync committees + +If the first byte is 0, then the sync committee is set to be nil (and therefore the source's sync committee is not changed). Otherwise the remaining bytes contain the SSZ serialized sync committee. + +##### Execution Payload Header + +This is serialized exactly like the sync committes. Notice that the implementation of `readExecutionPayloadHeader` is more involved because the SSZ serialization of the header depends on the state's version. + +##### Withdrawal Indices +The fields `nextWithdrawalIndex` and `nextWithdrawalValidatorIndex` are treated just like the `Slot` field. + +##### Historical Summaries + +The first 8 bytes store the length of the list and the remaining bytes are stored as SSZ serializations of the summary entry. This slice is **appended** to the source state's historical summary state. + +##### Electra requests indices + +The fields `depositRequestsStartIndex`, `depositBalanceToConsume`, `exitBalanceToConsume`, `earliestExitEpoch`, `consolidationBalanceToConsume` and `earliestConsolidationEpoch` are stored like the `Slot` field. + +##### Pending Deposits + +The first 8 bytes store the `pendingDepositIndex`, the next 8 bytes store the length of the pending deposit diff slice. The remaining bytes store a slice of SSZ serialized `PendingDeposit` objects. + +This diff slice is different than others, we store the extra index `pendingDepositIndex` in the `hdiff` structure that is used as follows. This index indicates how many pending deposits need to be dropped from the source state. The remaining slice is added to the end of the source state's pending deposits. The rationale for this serialization algorithm is that if taking the diff of two close enough states, the pending deposit queue may be very large. Between the source and the target, the first few deposits may have already been consumed, but the remaining large majority would still be there in the target. The target state may have some more extra deposits to be added in the end. + +Similarly, when computing the diff between the source and the target state, we need to find the index of the first deposit in common. We use the [Knuth-Morris-Pratt](https://en.wikipedia.org/wiki/Knuth%E2%80%93Morris%E2%80%93Pratt_algorithm) algorith to find it. + +Suppose that the source pending deposits are + +``` +[A, B, C, D, E, F, G, H] +``` + +And the target pending deposits are +``` +[C, D, E, F, G, H, I, J, K] +``` + +Then we will store `pendingDepositIndex = 2` and the diff slice will be +``` +[I, J, K] +``` + +##### Pending Partial Withdrawals + +This field is treated exactly like the pending deposits. + +##### Pending Consolidations + +This field is treated exactly like the pending deposits. + +##### Proposer Lookahead + +The proposer lookahead is stored as the SSZ serialized version of the field. It always overrides the source's field. + +#### Applying a diff + +The exported function + +```go +func ApplyDiff(ctx context.Context, source state.BeaconState, diff HdiffBytes) (state.BeaconState, error) +``` + +Takes care of applying the diff, it first calls `newHdiff` to convert the raw bytes in `diff` into an internal `hdiff` structure, and then it modifies the `source` state as explained above returning the modified state. + +#### Computing a Diff + +The exported function +```go +func Diff(source, target state.ReadOnlyBeaconState) (HdiffBytes, error) +``` +Takes two states and returns the corresponding diff bytes. This function calls the function `diffInternal` which in turn calls `diffToState`, `diffToVals` and `diffToBalances` that each return the corresponding component of an internal `hdiff` structure. Then we call `serialize()` on the correponding `hdiff` structure. The function `serialize` constructs the `data` byte slice as described above in the [Deserialization](#deserialization) section and finally it calls `snappy.Encode()` on each of the three slices. diff --git a/consensus-types/hdiff/state_diff_test.go b/consensus-types/hdiff/state_diff_test.go new file mode 100644 index 0000000000..c556354d73 --- /dev/null +++ b/consensus-types/hdiff/state_diff_test.go @@ -0,0 +1,1286 @@ +package hdiff + +import ( + "bytes" + "encoding/binary" + "flag" + "fmt" + "os" + "testing" + + "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v6/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" + "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/golang/snappy" + "github.com/pkg/errors" +) + +var sourceFile = flag.String("source", "", "Path to the source file") +var targetFile = flag.String("target", "", "Path to the target file") + +func TestMain(m *testing.M) { + flag.Parse() + os.Exit(m.Run()) +} + +func Test_diffToState(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 256) + target := source.Copy() + require.NoError(t, target.SetSlot(source.Slot()+1)) + hdiff, err := diffToState(source, target) + require.NoError(t, err) + require.Equal(t, hdiff.slot, target.Slot()) + require.Equal(t, hdiff.targetVersion, target.Version()) +} + +func Test_kmpIndex(t *testing.T) { + intSlice := make([]*int, 10) + for i := 0; i < len(intSlice); i++ { + intSlice[i] = new(int) + *intSlice[i] = i + } + integerEquals := func(a, b *int) bool { + if a == nil && b == nil { + return true + } + if a == nil || b == nil { + return false + } + return *a == *b + } + t.Run("integer entries match", func(t *testing.T) { + source := []*int{intSlice[0], intSlice[1], intSlice[2], intSlice[3], intSlice[4]} + target := []*int{intSlice[2], intSlice[3], intSlice[4], intSlice[5], intSlice[6], intSlice[7], nil} + target = append(target, source...) + require.Equal(t, 2, kmpIndex(len(source), target, integerEquals)) + }) + t.Run("integer entries skipped", func(t *testing.T) { + source := []*int{intSlice[0], intSlice[1], intSlice[2], intSlice[3], intSlice[4]} + target := []*int{intSlice[2], intSlice[3], intSlice[4], intSlice[0], intSlice[5], nil} + target = append(target, source...) + require.Equal(t, 2, kmpIndex(len(source), target, integerEquals)) + }) + t.Run("integer entries repetitions", func(t *testing.T) { + source := []*int{intSlice[0], intSlice[1], intSlice[0], intSlice[0], intSlice[0]} + target := []*int{intSlice[0], intSlice[0], intSlice[1], intSlice[2], intSlice[5], nil} + target = append(target, source...) + require.Equal(t, 3, kmpIndex(len(source), target, integerEquals)) + }) + t.Run("integer entries no match", func(t *testing.T) { + source := []*int{intSlice[0], intSlice[1], intSlice[2], intSlice[3]} + target := []*int{intSlice[4], intSlice[5], intSlice[6], nil} + target = append(target, source...) + require.Equal(t, len(source), kmpIndex(len(source), target, integerEquals)) + }) + +} + +func TestApplyDiff(t *testing.T) { + source, keys := util.DeterministicGenesisStateElectra(t, 256) + blk, err := util.GenerateFullBlockElectra(source, keys, util.DefaultBlockGenConfig(), 1) + require.NoError(t, err) + wsb, err := blocks.NewSignedBeaconBlock(blk) + require.NoError(t, err) + ctx := t.Context() + target, err := transition.ExecuteStateTransition(ctx, source, wsb) + require.NoError(t, err) + + // Add non-trivial eth1Data, regression check + depositRoot := make([]byte, fieldparams.RootLength) + for i := range depositRoot { + depositRoot[i] = byte(i + 42) + } + blockHash := make([]byte, fieldparams.RootLength) + for i := range blockHash { + blockHash[i] = byte(i + 100) + } + require.NoError(t, target.SetEth1Data(ðpb.Eth1Data{ + DepositRoot: depositRoot, + DepositCount: 99999, + BlockHash: blockHash, + })) + + hdiff, err := Diff(source, target) + require.NoError(t, err) + source, err = ApplyDiff(ctx, source, hdiff) + require.NoError(t, err) + require.DeepEqual(t, source, target) +} + +func getMainnetStates() (state.BeaconState, state.BeaconState, error) { + sourceBytes, err := os.ReadFile(*sourceFile) + if err != nil { + return nil, nil, errors.Wrap(err, "failed to read source file") + } + targetBytes, err := os.ReadFile(*targetFile) + if err != nil { + return nil, nil, errors.Wrap(err, "failed to read target file") + } + sourceProto := ðpb.BeaconStateDeneb{} + if err := sourceProto.UnmarshalSSZ(sourceBytes); err != nil { + return nil, nil, errors.Wrap(err, "failed to unmarshal source proto") + } + source, err := state_native.InitializeFromProtoDeneb(sourceProto) + if err != nil { + return nil, nil, errors.Wrap(err, "failed to initialize source state") + } + targetProto := ðpb.BeaconStateElectra{} + if err := targetProto.UnmarshalSSZ(targetBytes); err != nil { + return nil, nil, errors.Wrap(err, "failed to unmarshal target proto") + } + target, err := state_native.InitializeFromProtoElectra(targetProto) + if err != nil { + return nil, nil, errors.Wrap(err, "failed to initialize target state") + } + return source, target, nil +} + +func TestApplyDiffMainnet(t *testing.T) { + if *sourceFile == "" || *targetFile == "" { + t.Skip("source and target files not provided") + } + source, target, err := getMainnetStates() + require.NoError(t, err) + hdiff, err := Diff(source, target) + require.NoError(t, err) + source, err = ApplyDiff(t.Context(), source, hdiff) + require.NoError(t, err) + sourceSSZ, err := source.MarshalSSZ() + require.NoError(t, err) + targetSSZ, err := target.MarshalSSZ() + require.NoError(t, err) + require.DeepEqual(t, sourceSSZ, targetSSZ) + sVals := source.Validators() + tVals := target.Validators() + require.Equal(t, len(sVals), len(tVals)) + for i, v := range sVals { + require.Equal(t, true, bytes.Equal(v.PublicKey, tVals[i].PublicKey)) + require.Equal(t, true, bytes.Equal(v.WithdrawalCredentials, tVals[i].WithdrawalCredentials)) + require.Equal(t, v.EffectiveBalance, tVals[i].EffectiveBalance) + require.Equal(t, v.Slashed, tVals[i].Slashed) + require.Equal(t, v.ActivationEligibilityEpoch, tVals[i].ActivationEligibilityEpoch) + require.Equal(t, v.ActivationEpoch, tVals[i].ActivationEpoch) + require.Equal(t, v.ExitEpoch, tVals[i].ExitEpoch) + require.Equal(t, v.WithdrawableEpoch, tVals[i].WithdrawableEpoch) + } +} + +// Test_newHdiff tests the newHdiff function that deserializes HdiffBytes into hdiff struct +func Test_newHdiff(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + require.NoError(t, target.SetSlot(source.Slot()+1)) + + // Create a valid diff + diffBytes, err := Diff(source, target) + require.NoError(t, err) + + // Test successful deserialization + hdiff, err := newHdiff(diffBytes) + require.NoError(t, err) + require.NotNil(t, hdiff) + require.NotNil(t, hdiff.stateDiff) + require.NotNil(t, hdiff.validatorDiffs) + require.NotNil(t, hdiff.balancesDiff) + require.Equal(t, target.Slot(), hdiff.stateDiff.slot) + + // Test with invalid state diff data + invalidDiff := HdiffBytes{ + StateDiff: []byte{0x01, 0x02}, // too small + ValidatorDiffs: diffBytes.ValidatorDiffs, + BalancesDiff: diffBytes.BalancesDiff, + } + _, err = newHdiff(invalidDiff) + require.ErrorContains(t, "failed to create state diff", err) + + // Test with invalid validator diff data + invalidDiff = HdiffBytes{ + StateDiff: diffBytes.StateDiff, + ValidatorDiffs: []byte{0x01, 0x02}, // too small + BalancesDiff: diffBytes.BalancesDiff, + } + _, err = newHdiff(invalidDiff) + require.ErrorContains(t, "failed to create validator diffs", err) + + // Test with invalid balances diff data + invalidDiff = HdiffBytes{ + StateDiff: diffBytes.StateDiff, + ValidatorDiffs: diffBytes.ValidatorDiffs, + BalancesDiff: []byte{0x01, 0x02}, // too small + } + _, err = newHdiff(invalidDiff) + require.ErrorContains(t, "failed to create balances diff", err) +} + +// Test_diffInternal tests the internal diff computation logic +func Test_diffInternal(t *testing.T) { + source, keys := util.DeterministicGenesisStateFulu(t, 32) + target := source.Copy() + + t.Run("same state", func(t *testing.T) { + hdiff, err := diffInternal(source, source) + require.NoError(t, err) + require.NotNil(t, hdiff) + require.Equal(t, 0, len(hdiff.validatorDiffs)) + // Balance diff should have same length as validators but all zeros + require.Equal(t, len(source.Balances()), len(hdiff.balancesDiff)) + for _, diff := range hdiff.balancesDiff { + require.Equal(t, int64(0), diff) + } + }) + + t.Run("slot change", func(t *testing.T) { + require.NoError(t, target.SetSlot(source.Slot()+5)) + hdiff, err := diffInternal(source, target) + require.NoError(t, err) + require.NotNil(t, hdiff) + require.Equal(t, target.Slot(), hdiff.stateDiff.slot) + require.Equal(t, target.Version(), hdiff.stateDiff.targetVersion) + }) + + t.Run("lookahead change", func(t *testing.T) { + proposerLookahead, err := source.ProposerLookahead() + require.NoError(t, err) + proposerLookahead[0] = proposerLookahead[0] + 1 + require.NoError(t, target.SetProposerLookahead(proposerLookahead)) + hdiff, err := diffInternal(source, target) + require.NoError(t, err) + require.NotNil(t, hdiff) + require.Equal(t, len(proposerLookahead), len(hdiff.stateDiff.proposerLookahead)) + for i, v := range proposerLookahead { + require.Equal(t, uint64(v), hdiff.stateDiff.proposerLookahead[i]) + } + }) + + t.Run("with block transition", func(t *testing.T) { + blk, err := util.GenerateFullBlockFulu(source, keys, util.DefaultBlockGenConfig(), 1) + require.NoError(t, err) + wsb, err := blocks.NewSignedBeaconBlock(blk) + require.NoError(t, err) + ctx := t.Context() + target, err := transition.ExecuteStateTransition(ctx, source, wsb) + require.NoError(t, err) + + hdiff, err := diffInternal(source, target) + require.NoError(t, err) + require.NotNil(t, hdiff) + require.Equal(t, target.Slot(), hdiff.stateDiff.slot) + require.Equal(t, target.Version(), hdiff.stateDiff.targetVersion) + }) +} + +// Test_validatorsEqual tests the validator comparison function +func Test_validatorsEqual(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + + t.Run("nil validators", func(t *testing.T) { + require.Equal(t, true, validatorsEqual(nil, nil)) + }) + + // Create two different states to test validator comparison + target := source.Copy() + targetVals := target.Validators() + modifiedVal := ðpb.Validator{ + PublicKey: targetVals[0].PublicKey, + WithdrawalCredentials: targetVals[0].WithdrawalCredentials, + EffectiveBalance: targetVals[0].EffectiveBalance, + Slashed: targetVals[0].Slashed, + ActivationEligibilityEpoch: targetVals[0].ActivationEligibilityEpoch, + ActivationEpoch: targetVals[0].ActivationEpoch, + ExitEpoch: targetVals[0].ExitEpoch, + WithdrawableEpoch: targetVals[0].WithdrawableEpoch, + } + modifiedVal.Slashed = !targetVals[0].Slashed + targetVals[0] = modifiedVal + require.NoError(t, target.SetValidators(targetVals)) + + // Test that different validators are detected as different + sourceDiffs, err := diffToVals(source, target) + require.NoError(t, err) + require.NotEqual(t, 0, len(sourceDiffs), "Should detect validator differences") +} + +// Test_updateToVersion tests the version upgrade functionality +func Test_updateToVersion(t *testing.T) { + ctx := t.Context() + + t.Run("no upgrade needed", func(t *testing.T) { + source, _ := util.DeterministicGenesisStateFulu(t, 32) + targetVersion := source.Version() + + result, err := updateToVersion(ctx, source, targetVersion) + require.NoError(t, err) + require.Equal(t, targetVersion, result.Version()) + require.Equal(t, source.Slot(), result.Slot()) + }) + t.Run("upgrade to Fulu", func(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + targetVersion := version.Fulu + + result, err := updateToVersion(ctx, source, targetVersion) + require.NoError(t, err) + require.Equal(t, targetVersion, result.Version()) + require.Equal(t, source.Slot(), result.Slot()) + lookahead, err := result.ProposerLookahead() + require.NoError(t, err) + require.Equal(t, 2*fieldparams.SlotsPerEpoch, len(lookahead)) + }) +} + +func TestApplyDiffMainnetComplete(t *testing.T) { + if *sourceFile == "" || *targetFile == "" { + t.Skip("source and target files not provided") + } + source, target, err := getMainnetStates() + require.NoError(t, err) + hdiff, err := Diff(source, target) + require.NoError(t, err) + source, err = ApplyDiff(t.Context(), source, hdiff) + require.NoError(t, err) + + sBals := source.Balances() + tBals := target.Balances() + require.Equal(t, len(sBals), len(tBals)) + for i, v := range sBals { + require.Equal(t, v, tBals[i], "i: %d", i) + } + + sourceSSZ, err := source.MarshalSSZ() + require.NoError(t, err) + targetSSZ, err := target.MarshalSSZ() + require.NoError(t, err) + require.Equal(t, true, bytes.Equal(sourceSSZ, targetSSZ)) +} + +// Test_diffToVals tests validator diff computation +func Test_diffToVals(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + + t.Run("no validator changes", func(t *testing.T) { + diffs, err := diffToVals(source, target) + require.NoError(t, err) + require.Equal(t, 0, len(diffs)) + }) + + t.Run("validator slashed", func(t *testing.T) { + vals := target.Validators() + modifiedVal := ðpb.Validator{ + PublicKey: vals[0].PublicKey, + WithdrawalCredentials: vals[0].WithdrawalCredentials, + EffectiveBalance: vals[0].EffectiveBalance, + Slashed: vals[0].Slashed, + ActivationEligibilityEpoch: vals[0].ActivationEligibilityEpoch, + ActivationEpoch: vals[0].ActivationEpoch, + ExitEpoch: vals[0].ExitEpoch, + WithdrawableEpoch: vals[0].WithdrawableEpoch, + } + modifiedVal.Slashed = true + vals[0] = modifiedVal + require.NoError(t, target.SetValidators(vals)) + + diffs, err := diffToVals(source, target) + require.NoError(t, err) + require.Equal(t, 1, len(diffs)) + require.Equal(t, uint32(0), diffs[0].index) + require.Equal(t, true, diffs[0].Slashed) + }) + + t.Run("validator effective balance changed", func(t *testing.T) { + vals := target.Validators() + modifiedVal := ðpb.Validator{ + PublicKey: vals[1].PublicKey, + WithdrawalCredentials: vals[1].WithdrawalCredentials, + EffectiveBalance: vals[1].EffectiveBalance, + Slashed: vals[1].Slashed, + ActivationEligibilityEpoch: vals[1].ActivationEligibilityEpoch, + ActivationEpoch: vals[1].ActivationEpoch, + ExitEpoch: vals[1].ExitEpoch, + WithdrawableEpoch: vals[1].WithdrawableEpoch, + } + modifiedVal.EffectiveBalance = vals[1].EffectiveBalance + 1000 + vals[1] = modifiedVal + require.NoError(t, target.SetValidators(vals)) + + diffs, err := diffToVals(source, target) + require.NoError(t, err) + found := false + for _, diff := range diffs { + if diff.index == 1 { + require.Equal(t, modifiedVal.EffectiveBalance, diff.EffectiveBalance) + found = true + break + } + } + require.Equal(t, true, found) + }) +} + +// Test_newValidatorDiffs tests validator diff deserialization +func Test_newValidatorDiffs(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + + // Modify a validator to create diffs + vals := target.Validators() + modifiedVal := ðpb.Validator{ + PublicKey: vals[0].PublicKey, + WithdrawalCredentials: vals[0].WithdrawalCredentials, + EffectiveBalance: vals[0].EffectiveBalance, + Slashed: vals[0].Slashed, + ActivationEligibilityEpoch: vals[0].ActivationEligibilityEpoch, + ActivationEpoch: vals[0].ActivationEpoch, + ExitEpoch: vals[0].ExitEpoch, + WithdrawableEpoch: vals[0].WithdrawableEpoch, + } + modifiedVal.Slashed = true + vals[0] = modifiedVal + require.NoError(t, target.SetValidators(vals)) + + // Create diff and serialize + originalDiffs, err := diffToVals(source, target) + require.NoError(t, err) + + hdiffBytes, err := Diff(source, target) + require.NoError(t, err) + + // Test deserialization + deserializedDiffs, err := newValidatorDiffs(hdiffBytes.ValidatorDiffs) + require.NoError(t, err) + require.Equal(t, len(originalDiffs), len(deserializedDiffs)) + + if len(originalDiffs) > 0 { + require.Equal(t, originalDiffs[0].index, deserializedDiffs[0].index) + require.Equal(t, originalDiffs[0].Slashed, deserializedDiffs[0].Slashed) + } + + // Test with invalid data + _, err = newValidatorDiffs([]byte{0x01, 0x02}) + require.NotNil(t, err) +} + +// Test_applyValidatorDiff tests applying validator changes to state +func Test_applyValidatorDiff(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + + // Modify validators in target + vals := target.Validators() + modifiedVal := ðpb.Validator{ + PublicKey: vals[0].PublicKey, + WithdrawalCredentials: vals[0].WithdrawalCredentials, + EffectiveBalance: vals[0].EffectiveBalance, + Slashed: vals[0].Slashed, + ActivationEligibilityEpoch: vals[0].ActivationEligibilityEpoch, + ActivationEpoch: vals[0].ActivationEpoch, + ExitEpoch: vals[0].ExitEpoch, + WithdrawableEpoch: vals[0].WithdrawableEpoch, + } + modifiedVal.Slashed = true + modifiedVal.EffectiveBalance = vals[0].EffectiveBalance + 1000 + vals[0] = modifiedVal + require.NoError(t, target.SetValidators(vals)) + + // Create validator diffs + diffs, err := diffToVals(source, target) + require.NoError(t, err) + + // Apply diffs to source + result, err := applyValidatorDiff(source, diffs) + require.NoError(t, err) + + // Verify result matches target + resultVals := result.Validators() + targetVals := target.Validators() + require.Equal(t, len(targetVals), len(resultVals)) + + for i, val := range resultVals { + require.Equal(t, targetVals[i].Slashed, val.Slashed) + require.Equal(t, targetVals[i].EffectiveBalance, val.EffectiveBalance) + } +} + +// Test_diffToBalances tests balance diff computation +func Test_diffToBalances(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + + t.Run("no balance changes", func(t *testing.T) { + diffs, err := diffToBalances(source, target) + require.NoError(t, err) + // Balance diff should have same length as validators but all zeros + require.Equal(t, len(source.Balances()), len(diffs)) + for _, diff := range diffs { + require.Equal(t, int64(0), diff) + } + }) + + t.Run("balance changes", func(t *testing.T) { + bals := target.Balances() + bals[0] += 1000 + bals[1] -= 500 + bals[5] += 2000 + require.NoError(t, target.SetBalances(bals)) + + diffs, err := diffToBalances(source, target) + require.NoError(t, err) + + // Should have diffs for changed balances only + require.NotEqual(t, 0, len(diffs)) + + // Apply diffs to verify correctness + sourceBals := source.Balances() + for i, diff := range diffs { + if diff != 0 { + sourceBals[i] += uint64(diff) + } + } + + targetBals := target.Balances() + for i := 0; i < len(sourceBals); i++ { + require.Equal(t, targetBals[i], sourceBals[i], "balance mismatch at index %d", i) + } + }) +} + +// Test_newBalancesDiff tests balance diff deserialization +func Test_newBalancesDiff(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + + // Modify balances to create diffs + bals := target.Balances() + bals[0] += 1000 + bals[1] -= 500 + require.NoError(t, target.SetBalances(bals)) + + // Create diff and serialize + originalDiffs, err := diffToBalances(source, target) + require.NoError(t, err) + + hdiffBytes, err := Diff(source, target) + require.NoError(t, err) + + // Test deserialization + deserializedDiffs, err := newBalancesDiff(hdiffBytes.BalancesDiff) + require.NoError(t, err) + require.Equal(t, len(originalDiffs), len(deserializedDiffs)) + + for i, diff := range originalDiffs { + require.Equal(t, diff, deserializedDiffs[i]) + } + + // Test with invalid data + _, err = newBalancesDiff([]byte{0x01, 0x02}) + require.NotNil(t, err) +} + +// Test_applyBalancesDiff tests applying balance changes to state +func Test_applyBalancesDiff(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + + // Modify balances in target + bals := target.Balances() + bals[0] += 1000 + bals[1] -= 500 + bals[5] += 2000 + require.NoError(t, target.SetBalances(bals)) + + // Create balance diffs + diffs, err := diffToBalances(source, target) + require.NoError(t, err) + + // Apply diffs to source + result, err := applyBalancesDiff(source, diffs) + require.NoError(t, err) + + // Verify result matches target + resultBals := result.Balances() + targetBals := target.Balances() + require.Equal(t, len(targetBals), len(resultBals)) + + for i, bal := range resultBals { + require.Equal(t, targetBals[i], bal, "balance mismatch at index %d", i) + } +} + +// Test_newStateDiff tests state diff deserialization +func Test_newStateDiff(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + require.NoError(t, target.SetSlot(source.Slot()+5)) + + // Create diff and serialize + hdiffBytes, err := Diff(source, target) + require.NoError(t, err) + + // Test successful deserialization + stateDiff, err := newStateDiff(hdiffBytes.StateDiff) + require.NoError(t, err) + require.NotNil(t, stateDiff) + require.Equal(t, target.Slot(), stateDiff.slot) + require.Equal(t, target.Version(), stateDiff.targetVersion) + + // Test with invalid data (too small) + _, err = newStateDiff([]byte{0x01, 0x02}) + require.ErrorContains(t, "failed to decode snappy", err) + + // Test with valid snappy data but insufficient content (need 8 bytes for targetVersion) + insuffData := []byte{0x01, 0x02, 0x03, 0x04} // only 4 bytes + validSnappyButInsufficientData := snappy.Encode(nil, insuffData) + _, err = newStateDiff(validSnappyButInsufficientData) + require.ErrorContains(t, "data is too small", err) +} + +// Test_applyStateDiff tests applying state changes +func Test_applyStateDiff(t *testing.T) { + ctx := t.Context() + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + + // Modify target state + require.NoError(t, target.SetSlot(source.Slot()+5)) + + // Create state diff + stateDiff, err := diffToState(source, target) + require.NoError(t, err) + + // Apply diff to source + result, err := applyStateDiff(ctx, source, stateDiff) + require.NoError(t, err) + + // Verify result matches target + require.Equal(t, target.Slot(), result.Slot()) + require.Equal(t, target.Version(), result.Version()) +} + +// Test_computeLPS tests the LPS array computation for KMP algorithm +func Test_computeLPS(t *testing.T) { + intSlice := make([]*int, 10) + for i := 0; i < len(intSlice); i++ { + intSlice[i] = new(int) + *intSlice[i] = i + } + integerEquals := func(a, b *int) bool { + if a == nil && b == nil { + return true + } + if a == nil || b == nil { + return false + } + return *a == *b + } + + t.Run("simple pattern", func(t *testing.T) { + pattern := []*int{intSlice[0], intSlice[1], intSlice[0]} + lps := computeLPS(pattern, integerEquals) + expected := []int{0, 0, 1} + require.Equal(t, len(expected), len(lps)) + for i, exp := range expected { + require.Equal(t, exp, lps[i]) + } + }) + + t.Run("repeating pattern", func(t *testing.T) { + pattern := []*int{intSlice[0], intSlice[0], intSlice[0]} + lps := computeLPS(pattern, integerEquals) + expected := []int{0, 1, 2} + require.Equal(t, len(expected), len(lps)) + for i, exp := range expected { + require.Equal(t, exp, lps[i]) + } + }) + + t.Run("complex pattern", func(t *testing.T) { + pattern := []*int{intSlice[0], intSlice[1], intSlice[0], intSlice[1], intSlice[0]} + lps := computeLPS(pattern, integerEquals) + expected := []int{0, 0, 1, 2, 3} + require.Equal(t, len(expected), len(lps)) + for i, exp := range expected { + require.Equal(t, exp, lps[i]) + } + }) + + t.Run("no repetition", func(t *testing.T) { + pattern := []*int{intSlice[0], intSlice[1], intSlice[2], intSlice[3]} + lps := computeLPS(pattern, integerEquals) + expected := []int{0, 0, 0, 0} + require.Equal(t, len(expected), len(lps)) + for i, exp := range expected { + require.Equal(t, exp, lps[i]) + } + }) +} + +// Test field-specific diff functions +func Test_diffJustificationBits(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + + // Test justification bits extraction + bits := diffJustificationBits(source) + sourceBits := source.JustificationBits() + require.Equal(t, sourceBits[0], bits) +} + +func Test_diffBlockRoots(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + + // Modify block roots in target + blockRoots := target.BlockRoots() + copy(blockRoots[0], []byte{0x01, 0x02, 0x03}) + copy(blockRoots[1], []byte{0x04, 0x05, 0x06}) + require.NoError(t, target.SetBlockRoots(blockRoots)) + + // Create diff + diff := &stateDiff{} + diffBlockRoots(diff, source, target) + + // Verify diff contains changes + require.NotEqual(t, [32]byte{}, diff.blockRoots[0]) + require.NotEqual(t, [32]byte{}, diff.blockRoots[1]) +} + +func Test_diffStateRoots(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + + // Modify state roots in target + stateRoots := target.StateRoots() + copy(stateRoots[0], []byte{0x01, 0x02, 0x03}) + copy(stateRoots[1], []byte{0x04, 0x05, 0x06}) + require.NoError(t, target.SetStateRoots(stateRoots)) + + // Create diff + diff := &stateDiff{} + diffStateRoots(diff, source, target) + + // Verify diff contains changes + require.NotEqual(t, [32]byte{}, diff.stateRoots[0]) + require.NotEqual(t, [32]byte{}, diff.stateRoots[1]) +} + +func Test_shouldAppendEth1DataVotes(t *testing.T) { + // Test empty votes + root1 := make([]byte, 32) + root1[0] = 0x01 + require.Equal(t, true, shouldAppendEth1DataVotes([]*ethpb.Eth1Data{}, []*ethpb.Eth1Data{{BlockHash: root1}})) + + // Test appending to existing votes + root2 := make([]byte, 32) + root2[0] = 0x02 + sourceVotes := []*ethpb.Eth1Data{{BlockHash: root1}} + targetVotes := []*ethpb.Eth1Data{{BlockHash: root1}, {BlockHash: root2}} + require.Equal(t, true, shouldAppendEth1DataVotes(sourceVotes, targetVotes)) + + // Test complete replacement + root3 := make([]byte, 32) + root3[0] = 0x03 + sourceVotes = []*ethpb.Eth1Data{{BlockHash: root1}, {BlockHash: root2}} + targetVotes = []*ethpb.Eth1Data{{BlockHash: root3}} + require.Equal(t, false, shouldAppendEth1DataVotes(sourceVotes, targetVotes)) +} + +// Test key serialization methods +func Test_stateDiff_serialize(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + require.NoError(t, target.SetSlot(source.Slot()+5)) + + // Create state diff + stateDiff, err := diffToState(source, target) + require.NoError(t, err) + + // Serialize + serialized := stateDiff.serialize() + require.Equal(t, true, len(serialized) > 0) + + // Verify it can be deserialized back (need to compress with snappy first) + compressed := snappy.Encode(nil, serialized) + deserializedDiff, err := newStateDiff(compressed) + require.NoError(t, err) + require.Equal(t, stateDiff.slot, deserializedDiff.slot) + require.Equal(t, stateDiff.targetVersion, deserializedDiff.targetVersion) +} + +func Test_hdiff_serialize(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + target := source.Copy() + require.NoError(t, target.SetSlot(source.Slot()+5)) + + // Create hdiff + hdiff, err := diffInternal(source, target) + require.NoError(t, err) + + // Serialize + serialized := hdiff.serialize() + require.Equal(t, true, len(serialized.StateDiff) > 0) + require.Equal(t, true, len(serialized.ValidatorDiffs) >= 0) + require.Equal(t, true, len(serialized.BalancesDiff) >= 0) + + // Verify it can be deserialized back + deserializedHdiff, err := newHdiff(serialized) + require.NoError(t, err) + require.Equal(t, hdiff.stateDiff.slot, deserializedHdiff.stateDiff.slot) + require.Equal(t, hdiff.stateDiff.targetVersion, deserializedHdiff.stateDiff.targetVersion) +} + +// Test some key read methods +func Test_readTargetVersion(t *testing.T) { + diff := &stateDiff{} + + // Test successful read + data := make([]byte, 8) + binary.LittleEndian.PutUint64(data, 5) + err := diff.readTargetVersion(&data) + require.NoError(t, err) + require.Equal(t, 5, diff.targetVersion) + require.Equal(t, 0, len(data)) + + // Test insufficient data + data = []byte{0x01, 0x02} + err = diff.readTargetVersion(&data) + require.ErrorContains(t, "targetVersion", err) +} + +func Test_readSlot(t *testing.T) { + diff := &stateDiff{} + + // Test successful read + data := make([]byte, 8) + binary.LittleEndian.PutUint64(data, 100) + err := diff.readSlot(&data) + require.NoError(t, err) + require.Equal(t, primitives.Slot(100), diff.slot) + require.Equal(t, 0, len(data)) + + // Test insufficient data + data = []byte{0x01, 0x02} + err = diff.readSlot(&data) + require.ErrorContains(t, "slot", err) +} + +// Test a sample apply method +func Test_applySlashingsDiff(t *testing.T) { + source, _ := util.DeterministicGenesisStateElectra(t, 32) + + // Create a diff with slashing changes + diff := &stateDiff{} + originalSlashings := source.Slashings() + diff.slashings[0] = 1000 // Algebraic diff + diff.slashings[1] = 500 // Algebraic diff (positive to avoid underflow) + + // Apply the diff + err := applySlashingsDiff(source, diff) + require.NoError(t, err) + + // Verify the changes were applied + resultSlashings := source.Slashings() + require.Equal(t, originalSlashings[0]+1000, resultSlashings[0]) + require.Equal(t, originalSlashings[1]+500, resultSlashings[1]) +} + +// Test readPendingAttestation utility +func Test_readPendingAttestation(t *testing.T) { + // Test insufficient data + data := []byte{0x01, 0x02} + _, err := readPendingAttestation(&data) + require.ErrorContains(t, "data is too small", err) +} + +// Test readEth1Data - regression test for bug where indices were off by 1 +func Test_readEth1Data(t *testing.T) { + diff := &stateDiff{} + + // Test nil marker + data := []byte{nilMarker} + err := diff.readEth1Data(&data) + require.NoError(t, err) + require.IsNil(t, diff.eth1Data) + require.Equal(t, 0, len(data)) + + // Test successful read with actual data + // Create test data: marker + depositRoot + depositCount + blockHash + depositRoot := make([]byte, fieldparams.RootLength) + for i := range depositRoot { + depositRoot[i] = byte(i % 256) + } + blockHash := make([]byte, fieldparams.RootLength) + for i := range blockHash { + blockHash[i] = byte((i + 100) % 256) + } + depositCount := uint64(12345) + + data = []byte{notNilMarker} + data = append(data, depositRoot...) + countBytes := make([]byte, 8) + binary.LittleEndian.PutUint64(countBytes, depositCount) + data = append(data, countBytes...) + data = append(data, blockHash...) + + diff = &stateDiff{} + err = diff.readEth1Data(&data) + require.NoError(t, err) + require.NotNil(t, diff.eth1Data) + require.DeepEqual(t, depositRoot, diff.eth1Data.DepositRoot) + require.Equal(t, depositCount, diff.eth1Data.DepositCount) + require.DeepEqual(t, blockHash, diff.eth1Data.BlockHash) + require.Equal(t, 0, len(data)) + + // Test insufficient data for marker + data = []byte{} + diff = &stateDiff{} + err = diff.readEth1Data(&data) + require.ErrorContains(t, "eth1Data", err) + + // Test insufficient data after marker + data = []byte{notNilMarker} + diff = &stateDiff{} + err = diff.readEth1Data(&data) + require.ErrorContains(t, "eth1Data", err) +} + +func BenchmarkGetDiff(b *testing.B) { + if *sourceFile == "" || *targetFile == "" { + b.Skip("source and target files not provided") + } + source, target, err := getMainnetStates() + require.NoError(b, err) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + hdiff, err := Diff(source, target) + b.Log("Diff size:", len(hdiff.StateDiff)+len(hdiff.BalancesDiff)+len(hdiff.ValidatorDiffs)) + require.NoError(b, err) + } +} + +func BenchmarkApplyDiff(b *testing.B) { + if *sourceFile == "" || *targetFile == "" { + b.Skip("source and target files not provided") + } + source, target, err := getMainnetStates() + require.NoError(b, err) + hdiff, err := Diff(source, target) + require.NoError(b, err) + b.ResetTimer() + for i := 0; i < b.N; i++ { + source, err = ApplyDiff(b.Context(), source, hdiff) + require.NoError(b, err) + } +} + +// BenchmarkDiffCreation measures the time to create diffs of various sizes +func BenchmarkDiffCreation(b *testing.B) { + sizes := []uint64{32, 64, 128, 256, 512, 1024} + + for _, size := range sizes { + b.Run(fmt.Sprintf("validators_%d", size), func(b *testing.B) { + source, _ := util.DeterministicGenesisStateElectra(b, size) + target := source.Copy() + _ = target.SetSlot(source.Slot() + 1) + + // Modify some validators + validators := target.Validators() + for i := 0; i < int(size/10); i++ { + if i < len(validators) { + validators[i].EffectiveBalance += 1000 + } + } + _ = target.SetValidators(validators) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, err := Diff(source, target) + if err != nil { + b.Fatal(err) + } + } + }) + } +} + +// BenchmarkDiffApplication measures the time to apply diffs +func BenchmarkDiffApplication(b *testing.B) { + sizes := []uint64{32, 64, 128, 256, 512} + ctx := b.Context() + + for _, size := range sizes { + b.Run(fmt.Sprintf("validators_%d", size), func(b *testing.B) { + source, _ := util.DeterministicGenesisStateElectra(b, size) + target := source.Copy() + _ = target.SetSlot(source.Slot() + 10) + + // Create diff once + diff, err := Diff(source, target) + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + // Need fresh source for each iteration + freshSource := source.Copy() + _, err := ApplyDiff(ctx, freshSource, diff) + if err != nil { + b.Fatal(err) + } + } + }) + } +} + +// BenchmarkSerialization measures serialization performance +func BenchmarkSerialization(b *testing.B) { + source, _ := util.DeterministicGenesisStateElectra(b, 256) + target := source.Copy() + _ = target.SetSlot(source.Slot() + 5) + + hdiff, err := diffInternal(source, target) + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = hdiff.serialize() + } +} + +// BenchmarkDeserialization measures deserialization performance +func BenchmarkDeserialization(b *testing.B) { + source, _ := util.DeterministicGenesisStateElectra(b, 256) + target := source.Copy() + _ = target.SetSlot(source.Slot() + 5) + + // Create serialized diff + diff, err := Diff(source, target) + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, err := newHdiff(diff) + if err != nil { + b.Fatal(err) + } + } +} + +// BenchmarkBalanceDiff measures balance diff computation +func BenchmarkBalanceDiff(b *testing.B) { + sizes := []uint64{100, 500, 1000, 5000, 10000} + + for _, size := range sizes { + b.Run(fmt.Sprintf("balances_%d", size), func(b *testing.B) { + source, _ := util.DeterministicGenesisStateElectra(b, size) + target := source.Copy() + + // Modify all balances + balances := target.Balances() + for i := range balances { + balances[i] += uint64(i % 1000) + } + _ = target.SetBalances(balances) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, err := diffToBalances(source, target) + if err != nil { + b.Fatal(err) + } + } + }) + } +} + +// BenchmarkValidatorDiff measures validator diff computation +func BenchmarkValidatorDiff(b *testing.B) { + sizes := []uint64{100, 500, 1000, 2000} + + for _, size := range sizes { + b.Run(fmt.Sprintf("validators_%d", size), func(b *testing.B) { + source, _ := util.DeterministicGenesisStateElectra(b, size) + target := source.Copy() + + // Modify some validators + validators := target.Validators() + for i := 0; i < int(size/10); i++ { + if i < len(validators) { + validators[i].EffectiveBalance += 1000 + validators[i].Slashed = true + } + } + _ = target.SetValidators(validators) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, err := diffToVals(source, target) + if err != nil { + b.Fatal(err) + } + } + }) + } +} + +// BenchmarkKMPAlgorithm measures KMP performance with different pattern sizes +func BenchmarkKMPAlgorithm(b *testing.B) { + patternSizes := []int{10, 50, 100, 500} + textSizes := []int{100, 500, 1000, 5000} + + for _, pSize := range patternSizes { + for _, tSize := range textSizes { + if pSize > tSize { + continue + } + + b.Run(fmt.Sprintf("pattern_%d_text_%d", pSize, tSize), func(b *testing.B) { + // Create pattern and text + pattern := make([]*int, pSize) + for i := range pattern { + val := i % 10 + pattern[i] = &val + } + + text := make([]*int, tSize) + for i := range text { + val := i % 10 + text[i] = &val + } + + // Add pattern to end of text + text = append(text, pattern...) + + intEquals := func(a, b *int) bool { + if a == nil && b == nil { + return true + } + if a == nil || b == nil { + return false + } + return *a == *b + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = kmpIndex(len(pattern), text, intEquals) + } + }) + } + } +} + +// BenchmarkCompressionRatio measures compression effectiveness +func BenchmarkCompressionRatio(b *testing.B) { + source, _ := util.DeterministicGenesisStateElectra(b, 512) + target := source.Copy() + _ = target.SetSlot(source.Slot() + 1) + + // Create different types of changes + testCases := []struct { + name string + modifier func(target state.BeaconState) + }{ + { + name: "minimal_change", + modifier: func(target state.BeaconState) { + // Just slot change, already done + }, + }, + { + name: "balance_changes", + modifier: func(target state.BeaconState) { + balances := target.Balances() + for i := 0; i < 10; i++ { + if i < len(balances) { + balances[i] += 1000 + } + } + _ = target.SetBalances(balances) + }, + }, + { + name: "validator_changes", + modifier: func(target state.BeaconState) { + validators := target.Validators() + for i := 0; i < 10; i++ { + if i < len(validators) { + validators[i].EffectiveBalance += 1000 + } + } + _ = target.SetValidators(validators) + }, + }, + } + + for _, tc := range testCases { + b.Run(tc.name, func(b *testing.B) { + testTarget := target.Copy() + tc.modifier(testTarget) + + // Get full state size + fullStateSSZ, err := testTarget.MarshalSSZ() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + diff, err := Diff(source, testTarget) + if err != nil { + b.Fatal(err) + } + + diffSize := len(diff.StateDiff) + len(diff.ValidatorDiffs) + len(diff.BalancesDiff) + + // Report compression ratio in the first iteration + if i == 0 { + ratio := float64(len(fullStateSSZ)) / float64(diffSize) + b.Logf("Compression ratio: %.2fx (full: %d bytes, diff: %d bytes)", + ratio, len(fullStateSSZ), diffSize) + } + } + }) + } +} + +// BenchmarkMemoryUsage measures memory allocations +func BenchmarkMemoryUsage(b *testing.B) { + source, _ := util.DeterministicGenesisStateElectra(b, 256) + target := source.Copy() + _ = target.SetSlot(source.Slot() + 10) + + // Modify some data + validators := target.Validators() + for i := 0; i < 25; i++ { + if i < len(validators) { + validators[i].EffectiveBalance += 1000 + } + } + _ = target.SetValidators(validators) + + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + diff, err := Diff(source, target) + if err != nil { + b.Fatal(err) + } + + _, err = ApplyDiff(b.Context(), source.Copy(), diff) + if err != nil { + b.Fatal(err) + } + } +} diff --git a/consensus-types/hdiff/testdata/fuzz/FuzzNewStateDiff/d5bce2d6a168dcf4 b/consensus-types/hdiff/testdata/fuzz/FuzzNewStateDiff/d5bce2d6a168dcf4 new file mode 100644 index 0000000000..5a0290d0f1 --- /dev/null +++ b/consensus-types/hdiff/testdata/fuzz/FuzzNewStateDiff/d5bce2d6a168dcf4 @@ -0,0 +1,5 @@ +go test fuzz v1 +byte('\x00') +uint64(0) +[]byte("0") +[]byte("") diff --git a/consensus-types/hdiff/testdata/fuzz/FuzzPropertyValidatorIndices/582528ddfad69eb5 b/consensus-types/hdiff/testdata/fuzz/FuzzPropertyValidatorIndices/582528ddfad69eb5 new file mode 100644 index 0000000000..a96f5599e6 --- /dev/null +++ b/consensus-types/hdiff/testdata/fuzz/FuzzPropertyValidatorIndices/582528ddfad69eb5 @@ -0,0 +1,2 @@ +go test fuzz v1 +[]byte("0") diff --git a/consensus-types/hdiff/testdata/fuzz/FuzzReadPendingAttestation/a40f5c684fca518d b/consensus-types/hdiff/testdata/fuzz/FuzzReadPendingAttestation/a40f5c684fca518d new file mode 100644 index 0000000000..8e6a5d2872 --- /dev/null +++ b/consensus-types/hdiff/testdata/fuzz/FuzzReadPendingAttestation/a40f5c684fca518d @@ -0,0 +1,2 @@ +go test fuzz v1 +[]byte("0000000\xff") diff --git a/consensus-types/helpers/BUILD.bazel b/consensus-types/helpers/BUILD.bazel new file mode 100644 index 0000000000..aac519dd5a --- /dev/null +++ b/consensus-types/helpers/BUILD.bazel @@ -0,0 +1,16 @@ +load("@prysm//tools/go:def.bzl", "go_library", "go_test") + +go_library( + name = "go_default_library", + srcs = ["comparisons.go"], + importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/helpers", + visibility = ["//visibility:public"], + deps = ["//proto/prysm/v1alpha1:go_default_library"], +) + +go_test( + name = "go_default_test", + srcs = ["comparisons_test.go"], + embed = [":go_default_library"], + deps = ["//proto/prysm/v1alpha1:go_default_library"], +) diff --git a/consensus-types/helpers/comparisons.go b/consensus-types/helpers/comparisons.go new file mode 100644 index 0000000000..49861b2a73 --- /dev/null +++ b/consensus-types/helpers/comparisons.go @@ -0,0 +1,109 @@ +package helpers + +import ( + "bytes" + + ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" +) + +func ForksEqual(s, t *ethpb.Fork) bool { + if s == nil && t == nil { + return true + } + if s == nil || t == nil { + return false + } + if s.Epoch != t.Epoch { + return false + } + if !bytes.Equal(s.PreviousVersion, t.PreviousVersion) { + return false + } + return bytes.Equal(s.CurrentVersion, t.CurrentVersion) +} + +func BlockHeadersEqual(s, t *ethpb.BeaconBlockHeader) bool { + if s == nil && t == nil { + return true + } + if s == nil || t == nil { + return false + } + if s.Slot != t.Slot { + return false + } + if s.ProposerIndex != t.ProposerIndex { + return false + } + if !bytes.Equal(s.ParentRoot, t.ParentRoot) { + return false + } + if !bytes.Equal(s.StateRoot, t.StateRoot) { + return false + } + return bytes.Equal(s.BodyRoot, t.BodyRoot) +} + +func Eth1DataEqual(s, t *ethpb.Eth1Data) bool { + if s == nil && t == nil { + return true + } + if s == nil || t == nil { + return false + } + if !bytes.Equal(s.DepositRoot, t.DepositRoot) { + return false + } + if s.DepositCount != t.DepositCount { + return false + } + return bytes.Equal(s.BlockHash, t.BlockHash) +} + +func PendingDepositsEqual(s, t *ethpb.PendingDeposit) bool { + if s == nil && t == nil { + return true + } + if s == nil || t == nil { + return false + } + if !bytes.Equal(s.PublicKey, t.PublicKey) { + return false + } + if !bytes.Equal(s.WithdrawalCredentials, t.WithdrawalCredentials) { + return false + } + if s.Amount != t.Amount { + return false + } + if !bytes.Equal(s.Signature, t.Signature) { + return false + } + return s.Slot == t.Slot +} + +func PendingPartialWithdrawalsEqual(s, t *ethpb.PendingPartialWithdrawal) bool { + if s == nil && t == nil { + return true + } + if s == nil || t == nil { + return false + } + if s.Index != t.Index { + return false + } + if s.Amount != t.Amount { + return false + } + return s.WithdrawableEpoch == t.WithdrawableEpoch +} + +func PendingConsolidationsEqual(s, t *ethpb.PendingConsolidation) bool { + if s == nil && t == nil { + return true + } + if s == nil || t == nil { + return false + } + return s.SourceIndex == t.SourceIndex && s.TargetIndex == t.TargetIndex +} diff --git a/consensus-types/helpers/comparisons_test.go b/consensus-types/helpers/comparisons_test.go new file mode 100644 index 0000000000..e4d3486fe9 --- /dev/null +++ b/consensus-types/helpers/comparisons_test.go @@ -0,0 +1,637 @@ +package helpers + +import ( + "testing" + + ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" +) + +func TestForksEqual(t *testing.T) { + tests := []struct { + name string + s *ethpb.Fork + t *ethpb.Fork + want bool + }{ + { + name: "both nil", + s: nil, + t: nil, + want: true, + }, + { + name: "first nil", + s: nil, + t: ðpb.Fork{Epoch: 1}, + want: false, + }, + { + name: "second nil", + s: ðpb.Fork{Epoch: 1}, + t: nil, + want: false, + }, + { + name: "equal forks", + s: ðpb.Fork{ + Epoch: 100, + PreviousVersion: []byte{1, 2, 3, 4}, + CurrentVersion: []byte{5, 6, 7, 8}, + }, + t: ðpb.Fork{ + Epoch: 100, + PreviousVersion: []byte{1, 2, 3, 4}, + CurrentVersion: []byte{5, 6, 7, 8}, + }, + want: true, + }, + { + name: "different epoch", + s: ðpb.Fork{ + Epoch: 100, + PreviousVersion: []byte{1, 2, 3, 4}, + CurrentVersion: []byte{5, 6, 7, 8}, + }, + t: ðpb.Fork{ + Epoch: 200, + PreviousVersion: []byte{1, 2, 3, 4}, + CurrentVersion: []byte{5, 6, 7, 8}, + }, + want: false, + }, + { + name: "different previous version", + s: ðpb.Fork{ + Epoch: 100, + PreviousVersion: []byte{1, 2, 3, 4}, + CurrentVersion: []byte{5, 6, 7, 8}, + }, + t: ðpb.Fork{ + Epoch: 100, + PreviousVersion: []byte{9, 10, 11, 12}, + CurrentVersion: []byte{5, 6, 7, 8}, + }, + want: false, + }, + { + name: "different current version", + s: ðpb.Fork{ + Epoch: 100, + PreviousVersion: []byte{1, 2, 3, 4}, + CurrentVersion: []byte{5, 6, 7, 8}, + }, + t: ðpb.Fork{ + Epoch: 100, + PreviousVersion: []byte{1, 2, 3, 4}, + CurrentVersion: []byte{9, 10, 11, 12}, + }, + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := ForksEqual(tt.s, tt.t); got != tt.want { + t.Errorf("ForksEqual() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestBlockHeadersEqual(t *testing.T) { + tests := []struct { + name string + s *ethpb.BeaconBlockHeader + t *ethpb.BeaconBlockHeader + want bool + }{ + { + name: "both nil", + s: nil, + t: nil, + want: true, + }, + { + name: "first nil", + s: nil, + t: ðpb.BeaconBlockHeader{Slot: 1}, + want: false, + }, + { + name: "second nil", + s: ðpb.BeaconBlockHeader{Slot: 1}, + t: nil, + want: false, + }, + { + name: "equal headers", + s: ðpb.BeaconBlockHeader{ + Slot: 100, + ProposerIndex: 50, + ParentRoot: []byte{1, 2, 3, 4}, + StateRoot: []byte{5, 6, 7, 8}, + BodyRoot: []byte{9, 10, 11, 12}, + }, + t: ðpb.BeaconBlockHeader{ + Slot: 100, + ProposerIndex: 50, + ParentRoot: []byte{1, 2, 3, 4}, + StateRoot: []byte{5, 6, 7, 8}, + BodyRoot: []byte{9, 10, 11, 12}, + }, + want: true, + }, + { + name: "different slot", + s: ðpb.BeaconBlockHeader{ + Slot: 100, + ProposerIndex: 50, + ParentRoot: []byte{1, 2, 3, 4}, + StateRoot: []byte{5, 6, 7, 8}, + BodyRoot: []byte{9, 10, 11, 12}, + }, + t: ðpb.BeaconBlockHeader{ + Slot: 200, + ProposerIndex: 50, + ParentRoot: []byte{1, 2, 3, 4}, + StateRoot: []byte{5, 6, 7, 8}, + BodyRoot: []byte{9, 10, 11, 12}, + }, + want: false, + }, + { + name: "different proposer index", + s: ðpb.BeaconBlockHeader{ + Slot: 100, + ProposerIndex: 50, + ParentRoot: []byte{1, 2, 3, 4}, + StateRoot: []byte{5, 6, 7, 8}, + BodyRoot: []byte{9, 10, 11, 12}, + }, + t: ðpb.BeaconBlockHeader{ + Slot: 100, + ProposerIndex: 75, + ParentRoot: []byte{1, 2, 3, 4}, + StateRoot: []byte{5, 6, 7, 8}, + BodyRoot: []byte{9, 10, 11, 12}, + }, + want: false, + }, + { + name: "different parent root", + s: ðpb.BeaconBlockHeader{ + Slot: 100, + ProposerIndex: 50, + ParentRoot: []byte{1, 2, 3, 4}, + StateRoot: []byte{5, 6, 7, 8}, + BodyRoot: []byte{9, 10, 11, 12}, + }, + t: ðpb.BeaconBlockHeader{ + Slot: 100, + ProposerIndex: 50, + ParentRoot: []byte{13, 14, 15, 16}, + StateRoot: []byte{5, 6, 7, 8}, + BodyRoot: []byte{9, 10, 11, 12}, + }, + want: false, + }, + { + name: "different state root", + s: ðpb.BeaconBlockHeader{ + Slot: 100, + ProposerIndex: 50, + ParentRoot: []byte{1, 2, 3, 4}, + StateRoot: []byte{5, 6, 7, 8}, + BodyRoot: []byte{9, 10, 11, 12}, + }, + t: ðpb.BeaconBlockHeader{ + Slot: 100, + ProposerIndex: 50, + ParentRoot: []byte{1, 2, 3, 4}, + StateRoot: []byte{13, 14, 15, 16}, + BodyRoot: []byte{9, 10, 11, 12}, + }, + want: false, + }, + { + name: "different body root", + s: ðpb.BeaconBlockHeader{ + Slot: 100, + ProposerIndex: 50, + ParentRoot: []byte{1, 2, 3, 4}, + StateRoot: []byte{5, 6, 7, 8}, + BodyRoot: []byte{9, 10, 11, 12}, + }, + t: ðpb.BeaconBlockHeader{ + Slot: 100, + ProposerIndex: 50, + ParentRoot: []byte{1, 2, 3, 4}, + StateRoot: []byte{5, 6, 7, 8}, + BodyRoot: []byte{13, 14, 15, 16}, + }, + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := BlockHeadersEqual(tt.s, tt.t); got != tt.want { + t.Errorf("BlockHeadersEqual() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestEth1DataEqual(t *testing.T) { + tests := []struct { + name string + s *ethpb.Eth1Data + t *ethpb.Eth1Data + want bool + }{ + { + name: "both nil", + s: nil, + t: nil, + want: true, + }, + { + name: "first nil", + s: nil, + t: ðpb.Eth1Data{DepositCount: 1}, + want: false, + }, + { + name: "second nil", + s: ðpb.Eth1Data{DepositCount: 1}, + t: nil, + want: false, + }, + { + name: "equal eth1 data", + s: ðpb.Eth1Data{ + DepositRoot: []byte{1, 2, 3, 4}, + DepositCount: 100, + BlockHash: []byte{5, 6, 7, 8}, + }, + t: ðpb.Eth1Data{ + DepositRoot: []byte{1, 2, 3, 4}, + DepositCount: 100, + BlockHash: []byte{5, 6, 7, 8}, + }, + want: true, + }, + { + name: "different deposit root", + s: ðpb.Eth1Data{ + DepositRoot: []byte{1, 2, 3, 4}, + DepositCount: 100, + BlockHash: []byte{5, 6, 7, 8}, + }, + t: ðpb.Eth1Data{ + DepositRoot: []byte{9, 10, 11, 12}, + DepositCount: 100, + BlockHash: []byte{5, 6, 7, 8}, + }, + want: false, + }, + { + name: "different deposit count", + s: ðpb.Eth1Data{ + DepositRoot: []byte{1, 2, 3, 4}, + DepositCount: 100, + BlockHash: []byte{5, 6, 7, 8}, + }, + t: ðpb.Eth1Data{ + DepositRoot: []byte{1, 2, 3, 4}, + DepositCount: 200, + BlockHash: []byte{5, 6, 7, 8}, + }, + want: false, + }, + { + name: "different block hash", + s: ðpb.Eth1Data{ + DepositRoot: []byte{1, 2, 3, 4}, + DepositCount: 100, + BlockHash: []byte{5, 6, 7, 8}, + }, + t: ðpb.Eth1Data{ + DepositRoot: []byte{1, 2, 3, 4}, + DepositCount: 100, + BlockHash: []byte{9, 10, 11, 12}, + }, + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := Eth1DataEqual(tt.s, tt.t); got != tt.want { + t.Errorf("Eth1DataEqual() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestPendingDepositsEqual(t *testing.T) { + tests := []struct { + name string + s *ethpb.PendingDeposit + t *ethpb.PendingDeposit + want bool + }{ + { + name: "both nil", + s: nil, + t: nil, + want: true, + }, + { + name: "first nil", + s: nil, + t: ðpb.PendingDeposit{Amount: 1}, + want: false, + }, + { + name: "second nil", + s: ðpb.PendingDeposit{Amount: 1}, + t: nil, + want: false, + }, + { + name: "equal pending deposits", + s: ðpb.PendingDeposit{ + PublicKey: []byte{1, 2, 3, 4}, + WithdrawalCredentials: []byte{5, 6, 7, 8}, + Amount: 32000000000, + Signature: []byte{9, 10, 11, 12}, + Slot: 100, + }, + t: ðpb.PendingDeposit{ + PublicKey: []byte{1, 2, 3, 4}, + WithdrawalCredentials: []byte{5, 6, 7, 8}, + Amount: 32000000000, + Signature: []byte{9, 10, 11, 12}, + Slot: 100, + }, + want: true, + }, + { + name: "different public key", + s: ðpb.PendingDeposit{ + PublicKey: []byte{1, 2, 3, 4}, + WithdrawalCredentials: []byte{5, 6, 7, 8}, + Amount: 32000000000, + Signature: []byte{9, 10, 11, 12}, + Slot: 100, + }, + t: ðpb.PendingDeposit{ + PublicKey: []byte{13, 14, 15, 16}, + WithdrawalCredentials: []byte{5, 6, 7, 8}, + Amount: 32000000000, + Signature: []byte{9, 10, 11, 12}, + Slot: 100, + }, + want: false, + }, + { + name: "different withdrawal credentials", + s: ðpb.PendingDeposit{ + PublicKey: []byte{1, 2, 3, 4}, + WithdrawalCredentials: []byte{5, 6, 7, 8}, + Amount: 32000000000, + Signature: []byte{9, 10, 11, 12}, + Slot: 100, + }, + t: ðpb.PendingDeposit{ + PublicKey: []byte{1, 2, 3, 4}, + WithdrawalCredentials: []byte{13, 14, 15, 16}, + Amount: 32000000000, + Signature: []byte{9, 10, 11, 12}, + Slot: 100, + }, + want: false, + }, + { + name: "different amount", + s: ðpb.PendingDeposit{ + PublicKey: []byte{1, 2, 3, 4}, + WithdrawalCredentials: []byte{5, 6, 7, 8}, + Amount: 32000000000, + Signature: []byte{9, 10, 11, 12}, + Slot: 100, + }, + t: ðpb.PendingDeposit{ + PublicKey: []byte{1, 2, 3, 4}, + WithdrawalCredentials: []byte{5, 6, 7, 8}, + Amount: 16000000000, + Signature: []byte{9, 10, 11, 12}, + Slot: 100, + }, + want: false, + }, + { + name: "different signature", + s: ðpb.PendingDeposit{ + PublicKey: []byte{1, 2, 3, 4}, + WithdrawalCredentials: []byte{5, 6, 7, 8}, + Amount: 32000000000, + Signature: []byte{9, 10, 11, 12}, + Slot: 100, + }, + t: ðpb.PendingDeposit{ + PublicKey: []byte{1, 2, 3, 4}, + WithdrawalCredentials: []byte{5, 6, 7, 8}, + Amount: 32000000000, + Signature: []byte{13, 14, 15, 16}, + Slot: 100, + }, + want: false, + }, + { + name: "different slot", + s: ðpb.PendingDeposit{ + PublicKey: []byte{1, 2, 3, 4}, + WithdrawalCredentials: []byte{5, 6, 7, 8}, + Amount: 32000000000, + Signature: []byte{9, 10, 11, 12}, + Slot: 100, + }, + t: ðpb.PendingDeposit{ + PublicKey: []byte{1, 2, 3, 4}, + WithdrawalCredentials: []byte{5, 6, 7, 8}, + Amount: 32000000000, + Signature: []byte{9, 10, 11, 12}, + Slot: 200, + }, + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := PendingDepositsEqual(tt.s, tt.t); got != tt.want { + t.Errorf("PendingDepositsEqual() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestPendingPartialWithdrawalsEqual(t *testing.T) { + tests := []struct { + name string + s *ethpb.PendingPartialWithdrawal + t *ethpb.PendingPartialWithdrawal + want bool + }{ + { + name: "both nil", + s: nil, + t: nil, + want: true, + }, + { + name: "first nil", + s: nil, + t: ðpb.PendingPartialWithdrawal{Index: 1}, + want: false, + }, + { + name: "second nil", + s: ðpb.PendingPartialWithdrawal{Index: 1}, + t: nil, + want: false, + }, + { + name: "equal pending partial withdrawals", + s: ðpb.PendingPartialWithdrawal{ + Index: 50, + Amount: 1000000000, + WithdrawableEpoch: 200, + }, + t: ðpb.PendingPartialWithdrawal{ + Index: 50, + Amount: 1000000000, + WithdrawableEpoch: 200, + }, + want: true, + }, + { + name: "different index", + s: ðpb.PendingPartialWithdrawal{ + Index: 50, + Amount: 1000000000, + WithdrawableEpoch: 200, + }, + t: ðpb.PendingPartialWithdrawal{ + Index: 75, + Amount: 1000000000, + WithdrawableEpoch: 200, + }, + want: false, + }, + { + name: "different amount", + s: ðpb.PendingPartialWithdrawal{ + Index: 50, + Amount: 1000000000, + WithdrawableEpoch: 200, + }, + t: ðpb.PendingPartialWithdrawal{ + Index: 50, + Amount: 2000000000, + WithdrawableEpoch: 200, + }, + want: false, + }, + { + name: "different withdrawable epoch", + s: ðpb.PendingPartialWithdrawal{ + Index: 50, + Amount: 1000000000, + WithdrawableEpoch: 200, + }, + t: ðpb.PendingPartialWithdrawal{ + Index: 50, + Amount: 1000000000, + WithdrawableEpoch: 300, + }, + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := PendingPartialWithdrawalsEqual(tt.s, tt.t); got != tt.want { + t.Errorf("PendingPartialWithdrawalsEqual() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestPendingConsolidationsEqual(t *testing.T) { + tests := []struct { + name string + s *ethpb.PendingConsolidation + t *ethpb.PendingConsolidation + want bool + }{ + { + name: "both nil", + s: nil, + t: nil, + want: true, + }, + { + name: "first nil", + s: nil, + t: ðpb.PendingConsolidation{SourceIndex: 1}, + want: false, + }, + { + name: "second nil", + s: ðpb.PendingConsolidation{SourceIndex: 1}, + t: nil, + want: false, + }, + { + name: "equal pending consolidations", + s: ðpb.PendingConsolidation{ + SourceIndex: 10, + TargetIndex: 20, + }, + t: ðpb.PendingConsolidation{ + SourceIndex: 10, + TargetIndex: 20, + }, + want: true, + }, + { + name: "different source index", + s: ðpb.PendingConsolidation{ + SourceIndex: 10, + TargetIndex: 20, + }, + t: ðpb.PendingConsolidation{ + SourceIndex: 15, + TargetIndex: 20, + }, + want: false, + }, + { + name: "different target index", + s: ðpb.PendingConsolidation{ + SourceIndex: 10, + TargetIndex: 20, + }, + t: ðpb.PendingConsolidation{ + SourceIndex: 10, + TargetIndex: 25, + }, + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := PendingConsolidationsEqual(tt.s, tt.t); got != tt.want { + t.Errorf("PendingConsolidationsEqual() = %v, want %v", got, tt.want) + } + }) + } +} From 5751dbf134608fab9065b25a45364713f9c95af4 Mon Sep 17 00:00:00 2001 From: MozirDmitriy Date: Tue, 21 Oct 2025 14:21:10 +0300 Subject: [PATCH 042/103] kv: write recovered state summaries to stateSummaryBucket (#15896) * kv: write recovered state summaries to stateSummaryBucket * Create MozirDmitriy_fix_kv-recover-state-summurt-bucket.md * add a test --- beacon-chain/db/kv/checkpoint.go | 2 +- beacon-chain/db/kv/checkpoint_test.go | 29 +++++++++++++++++++ ...riy_fix_kv-recover-state-summurt-bucket.md | 3 ++ 3 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 changelog/MozirDmitriy_fix_kv-recover-state-summurt-bucket.md diff --git a/beacon-chain/db/kv/checkpoint.go b/beacon-chain/db/kv/checkpoint.go index 55d8ba2586..90435ae737 100644 --- a/beacon-chain/db/kv/checkpoint.go +++ b/beacon-chain/db/kv/checkpoint.go @@ -132,6 +132,6 @@ func recoverStateSummary(ctx context.Context, tx *bolt.Tx, root []byte) error { if err != nil { return err } - summaryBucket := tx.Bucket(stateBucket) + summaryBucket := tx.Bucket(stateSummaryBucket) return summaryBucket.Put(root, summaryEnc) } diff --git a/beacon-chain/db/kv/checkpoint_test.go b/beacon-chain/db/kv/checkpoint_test.go index 48c87142b5..1b4389b45c 100644 --- a/beacon-chain/db/kv/checkpoint_test.go +++ b/beacon-chain/db/kv/checkpoint_test.go @@ -137,3 +137,32 @@ func TestStore_FinalizedCheckpoint_StateMustExist(t *testing.T) { require.ErrorContains(t, errMissingStateForCheckpoint.Error(), db.SaveFinalizedCheckpoint(ctx, cp)) } + +// Regression test: verify that saving a checkpoint triggers recovery which writes +// the state summary into the correct stateSummaryBucket so that HasStateSummary/StateSummary see it. +func TestRecoverStateSummary_WritesToStateSummaryBucket(t *testing.T) { + db := setupDB(t) + ctx := t.Context() + + // Create a block without saving a state or summary, so recovery is needed. + blk := util.HydrateSignedBeaconBlock(ðpb.SignedBeaconBlock{}) + root, err := blk.Block.HashTreeRoot() + require.NoError(t, err) + wsb, err := blocks.NewSignedBeaconBlock(blk) + require.NoError(t, err) + require.NoError(t, db.SaveBlock(ctx, wsb)) + + // Precondition: summary not present yet. + require.Equal(t, false, db.HasStateSummary(ctx, root)) + + // Saving justified checkpoint should trigger recovery path calling recoverStateSummary. + cp := ðpb.Checkpoint{Epoch: 2, Root: root[:]} + require.NoError(t, db.SaveJustifiedCheckpoint(ctx, cp)) + + // Postcondition: summary is visible via the public summary APIs (which read stateSummaryBucket). + require.Equal(t, true, db.HasStateSummary(ctx, root)) + summary, err := db.StateSummary(ctx, root) + require.NoError(t, err) + require.NotNil(t, summary) + assert.DeepEqual(t, ðpb.StateSummary{Slot: blk.Block.Slot, Root: root[:]}, summary) +} diff --git a/changelog/MozirDmitriy_fix_kv-recover-state-summurt-bucket.md b/changelog/MozirDmitriy_fix_kv-recover-state-summurt-bucket.md new file mode 100644 index 0000000000..019e0aba32 --- /dev/null +++ b/changelog/MozirDmitriy_fix_kv-recover-state-summurt-bucket.md @@ -0,0 +1,3 @@ +### Fixed + +- Fix recoverStateSummary to persist state summaries in stateSummaryBucket instead of stateBucket (#15896). From d613f3a2621e0d16472f582581fc5678ae2f433f Mon Sep 17 00:00:00 2001 From: satushh Date: Tue, 21 Oct 2025 14:54:52 +0100 Subject: [PATCH 043/103] Update Earliest available slot when pruning (#15694) * Update Earliest available slot when pruning * bazel run //:gazelle -- fix * custodyUpdater interface to avoid import cycle * bazel run //:gazelle -- fix * simplify test * separation of concerns * debug log for updating eas * UpdateEarliestAvailableSlot function in CustodyManager * fix test * UpdateEarliestAvailableSlot function for FakeP2P * lint * UpdateEarliestAvailableSlot instead of UpdateCustodyInfo + check for Fulu * fix test and lint * bugfix: enforce minimum retention period in pruner * remove MinEpochsForBlockRequests function and use from config * remove modifying earliest_available_slot after data column pruning * correct earliestAvailableSlot validation: allow backfill decrease but prevent increase within MIN_EPOCHS_FOR_BLOCK_REQUESTS * lint * bazel run //:gazelle -- fix * lint and remove unwanted debug logs * Return a wrapped error, and let the caller decide what to do * fix tests because updateEarliestSlot returns error now * avoid re-doing computation in the test function * lint and correct changelog * custody updater should be a mandatory part of the pruner service * ensure never increase eas if we are in the block requests window * slot level granularity edge case * update the value stored in the DB * log tidy up * use errNoCustodyInfo * allow earliestAvailableSlot edit when custodyGroupCount doesnt change * undo the minimal config change * add context to CustodyGroupCount after merging from develop * cosmetic change * shift responsibility from caller to callee, protection for updateEarliestSlot. UpdateEarliestAvailableSlot returns cgc * allow increase in earliestAvailableSlot only when custodyGroupCount also increases * remove CustodyGroupCount as it is no longer needed as UpdateEarliestAvailableSlot returns cgc now * proper place for log and name refactor * test for Nil custody info * allow decreasing earliest slot in DB (just like in memory) * invert if statement to make more readable * UpdateEarliestAvailableSlot for DB (equivalent of p2p's UpdateEarliestAvailableSlot) & undo changes made to UpdateCustodyInfo * in UpdateEarliestAvailableSlot, no need to return unused values * no need to log stored group count * log.WithField instead of log.WithFields --- beacon-chain/blockchain/setup_test.go | 8 + .../core/helpers/weak_subjectivity.go | 11 - .../core/helpers/weak_subjectivity_test.go | 17 -- beacon-chain/db/iface/interface.go | 1 + beacon-chain/db/kv/custody.go | 78 +++++- beacon-chain/db/kv/custody_test.go | 128 +++++++++ beacon-chain/db/pruner/BUILD.bazel | 2 +- beacon-chain/db/pruner/pruner.go | 64 ++++- beacon-chain/db/pruner/pruner_test.go | 247 +++++++++++++++++- beacon-chain/node/node.go | 1 + beacon-chain/p2p/custody.go | 51 ++++ beacon-chain/p2p/custody_test.go | 143 ++++++++++ beacon-chain/p2p/interfaces.go | 1 + beacon-chain/p2p/testing/fuzz_p2p.go | 5 + beacon-chain/p2p/testing/p2p.go | 9 + beacon-chain/sync/backfill/BUILD.bazel | 2 - beacon-chain/sync/backfill/service.go | 4 +- beacon-chain/sync/backfill/service_test.go | 5 +- changelog/satushh-update-easlot-pruning.md | 3 + 19 files changed, 730 insertions(+), 50 deletions(-) create mode 100644 changelog/satushh-update-easlot-pruning.md diff --git a/beacon-chain/blockchain/setup_test.go b/beacon-chain/blockchain/setup_test.go index b18640f327..7c453cf2d7 100644 --- a/beacon-chain/blockchain/setup_test.go +++ b/beacon-chain/blockchain/setup_test.go @@ -130,6 +130,14 @@ func (dch *mockCustodyManager) UpdateCustodyInfo(earliestAvailableSlot primitive return earliestAvailableSlot, custodyGroupCount, nil } +func (dch *mockCustodyManager) UpdateEarliestAvailableSlot(earliestAvailableSlot primitives.Slot) error { + dch.mut.Lock() + defer dch.mut.Unlock() + + dch.earliestAvailableSlot = earliestAvailableSlot + return nil +} + func (dch *mockCustodyManager) CustodyGroupCountFromPeer(peer.ID) uint64 { return 0 } diff --git a/beacon-chain/core/helpers/weak_subjectivity.go b/beacon-chain/core/helpers/weak_subjectivity.go index 4260bea2d8..1280f89f6c 100644 --- a/beacon-chain/core/helpers/weak_subjectivity.go +++ b/beacon-chain/core/helpers/weak_subjectivity.go @@ -201,14 +201,3 @@ func ParseWeakSubjectivityInputString(wsCheckpointString string) (*v1alpha1.Chec Root: bRoot, }, nil } - -// MinEpochsForBlockRequests computes the number of epochs of block history that we need to maintain, -// relative to the current epoch, per the p2p specs. This is used to compute the slot where backfill is complete. -// value defined: -// https://github.com/ethereum/consensus-specs/blob/master/specs/phase0/p2p-interface.md#configuration -// MIN_VALIDATOR_WITHDRAWABILITY_DELAY + CHURN_LIMIT_QUOTIENT // 2 (= 33024, ~5 months) -// detailed rationale: https://github.com/ethereum/consensus-specs/blob/master/specs/phase0/p2p-interface.md#why-are-blocksbyrange-requests-only-required-to-be-served-for-the-latest-min_epochs_for_block_requests-epochs -func MinEpochsForBlockRequests() primitives.Epoch { - return params.BeaconConfig().MinValidatorWithdrawabilityDelay + - primitives.Epoch(params.BeaconConfig().ChurnLimitQuotient/2) -} diff --git a/beacon-chain/core/helpers/weak_subjectivity_test.go b/beacon-chain/core/helpers/weak_subjectivity_test.go index b488008365..8cd74e7819 100644 --- a/beacon-chain/core/helpers/weak_subjectivity_test.go +++ b/beacon-chain/core/helpers/weak_subjectivity_test.go @@ -286,20 +286,3 @@ func genState(t *testing.T, valCount, avgBalance uint64) state.BeaconState { return beaconState } -func TestMinEpochsForBlockRequests(t *testing.T) { - helpers.ClearCache() - - params.SetActiveTestCleanup(t, params.MainnetConfig()) - var expected primitives.Epoch = 33024 - // expected value of 33024 via spec commentary: - // https://github.com/ethereum/consensus-specs/blob/master/specs/phase0/p2p-interface.md#why-are-blocksbyrange-requests-only-required-to-be-served-for-the-latest-min_epochs_for_block_requests-epochs - // MIN_EPOCHS_FOR_BLOCK_REQUESTS is calculated using the arithmetic from compute_weak_subjectivity_period found in the weak subjectivity guide. Specifically to find this max epoch range, we use the worst case event of a very large validator size (>= MIN_PER_EPOCH_CHURN_LIMIT * CHURN_LIMIT_QUOTIENT). - // - // MIN_EPOCHS_FOR_BLOCK_REQUESTS = ( - // MIN_VALIDATOR_WITHDRAWABILITY_DELAY - // + MAX_SAFETY_DECAY * CHURN_LIMIT_QUOTIENT // (2 * 100) - // ) - // - // Where MAX_SAFETY_DECAY = 100 and thus MIN_EPOCHS_FOR_BLOCK_REQUESTS = 33024 (~5 months). - require.Equal(t, expected, helpers.MinEpochsForBlockRequests()) -} diff --git a/beacon-chain/db/iface/interface.go b/beacon-chain/db/iface/interface.go index 7595c93a86..58233ffe1b 100644 --- a/beacon-chain/db/iface/interface.go +++ b/beacon-chain/db/iface/interface.go @@ -129,6 +129,7 @@ type NoHeadAccessDatabase interface { // Custody operations. UpdateSubscribedToAllDataSubnets(ctx context.Context, subscribed bool) (bool, error) UpdateCustodyInfo(ctx context.Context, earliestAvailableSlot primitives.Slot, custodyGroupCount uint64) (primitives.Slot, uint64, error) + UpdateEarliestAvailableSlot(ctx context.Context, earliestAvailableSlot primitives.Slot) error // P2P Metadata operations. SaveMetadataSeqNum(ctx context.Context, seqNum uint64) error diff --git a/beacon-chain/db/kv/custody.go b/beacon-chain/db/kv/custody.go index 9d7fa60234..63df838bed 100644 --- a/beacon-chain/db/kv/custody.go +++ b/beacon-chain/db/kv/custody.go @@ -2,16 +2,19 @@ package kv import ( "context" + "time" + "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" bolt "go.etcd.io/bbolt" ) -// UpdateCustodyInfo atomically updates the custody group count only it is greater than the stored one. +// UpdateCustodyInfo atomically updates the custody group count only if it is greater than the stored one. // In this case, it also updates the earliest available slot with the provided value. // It returns the (potentially updated) custody group count and earliest available slot. func (s *Store) UpdateCustodyInfo(ctx context.Context, earliestAvailableSlot primitives.Slot, custodyGroupCount uint64) (primitives.Slot, uint64, error) { @@ -70,6 +73,79 @@ func (s *Store) UpdateCustodyInfo(ctx context.Context, earliestAvailableSlot pri return storedEarliestAvailableSlot, storedGroupCount, nil } +// UpdateEarliestAvailableSlot updates the earliest available slot. +func (s *Store) UpdateEarliestAvailableSlot(ctx context.Context, earliestAvailableSlot primitives.Slot) error { + _, span := trace.StartSpan(ctx, "BeaconDB.UpdateEarliestAvailableSlot") + defer span.End() + + storedEarliestAvailableSlot := primitives.Slot(0) + if err := s.db.Update(func(tx *bolt.Tx) error { + // Retrieve the custody bucket. + bucket, err := tx.CreateBucketIfNotExists(custodyBucket) + if err != nil { + return errors.Wrap(err, "create custody bucket") + } + + // Retrieve the stored earliest available slot. + storedEarliestAvailableSlotBytes := bucket.Get(earliestAvailableSlotKey) + if len(storedEarliestAvailableSlotBytes) != 0 { + storedEarliestAvailableSlot = primitives.Slot(bytesutil.BytesToUint64BigEndian(storedEarliestAvailableSlotBytes)) + } + + // Allow decrease (for backfill scenarios) + if earliestAvailableSlot <= storedEarliestAvailableSlot { + storedEarliestAvailableSlot = earliestAvailableSlot + bytes := bytesutil.Uint64ToBytesBigEndian(uint64(earliestAvailableSlot)) + if err := bucket.Put(earliestAvailableSlotKey, bytes); err != nil { + return errors.Wrap(err, "put earliest available slot") + } + return nil + } + + // Prevent increase within the MIN_EPOCHS_FOR_BLOCK_REQUESTS period + // This ensures we don't voluntarily refuse to serve mandatory block data + genesisTime := time.Unix(int64(params.BeaconConfig().MinGenesisTime+params.BeaconConfig().GenesisDelay), 0) + currentSlot := slots.CurrentSlot(genesisTime) + currentEpoch := slots.ToEpoch(currentSlot) + minEpochsForBlocks := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) + + // Calculate the minimum required epoch (or 0 if we're early in the chain) + minRequiredEpoch := primitives.Epoch(0) + if currentEpoch > minEpochsForBlocks { + minRequiredEpoch = currentEpoch - minEpochsForBlocks + } + + // Convert to slot to ensure we compare at slot-level granularity + minRequiredSlot, err := slots.EpochStart(minRequiredEpoch) + if err != nil { + return errors.Wrap(err, "calculate minimum required slot") + } + + // Prevent any increase that would put earliest available slot beyond the minimum required slot + if earliestAvailableSlot > minRequiredSlot { + return errors.Errorf( + "cannot increase earliest available slot to %d (epoch %d) as it exceeds minimum required slot %d (epoch %d)", + earliestAvailableSlot, slots.ToEpoch(earliestAvailableSlot), + minRequiredSlot, minRequiredEpoch, + ) + } + + storedEarliestAvailableSlot = earliestAvailableSlot + bytes := bytesutil.Uint64ToBytesBigEndian(uint64(earliestAvailableSlot)) + if err := bucket.Put(earliestAvailableSlotKey, bytes); err != nil { + return errors.Wrap(err, "put earliest available slot") + } + + return nil + }); err != nil { + return err + } + + log.WithField("earliestAvailableSlot", storedEarliestAvailableSlot).Debug("Updated earliest available slot") + + return nil +} + // UpdateSubscribedToAllDataSubnets updates the "subscribed to all data subnets" status in the database // only if `subscribed` is `true`. // It returns the previous subscription status. diff --git a/beacon-chain/db/kv/custody_test.go b/beacon-chain/db/kv/custody_test.go index 3c1c371731..6db0148109 100644 --- a/beacon-chain/db/kv/custody_test.go +++ b/beacon-chain/db/kv/custody_test.go @@ -3,10 +3,13 @@ package kv import ( "context" "testing" + "time" + "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v6/time/slots" bolt "go.etcd.io/bbolt" ) @@ -132,6 +135,131 @@ func TestUpdateCustodyInfo(t *testing.T) { }) } +func TestUpdateEarliestAvailableSlot(t *testing.T) { + ctx := t.Context() + + t.Run("allow decreasing earliest slot (backfill scenario)", func(t *testing.T) { + const ( + initialSlot = primitives.Slot(300) + initialCount = uint64(10) + earliestSlot = primitives.Slot(200) // Lower than initial (backfill discovered earlier blocks) + ) + + db := setupDB(t) + + // Initialize custody info + _, _, err := db.UpdateCustodyInfo(ctx, initialSlot, initialCount) + require.NoError(t, err) + + // Update with a lower slot (should update for backfill) + err = db.UpdateEarliestAvailableSlot(ctx, earliestSlot) + require.NoError(t, err) + + storedSlot, storedCount := getCustodyInfoFromDB(t, db) + require.Equal(t, earliestSlot, storedSlot) + require.Equal(t, initialCount, storedCount) + }) + + t.Run("allow increasing slot within MIN_EPOCHS_FOR_BLOCK_REQUESTS (pruning scenario)", func(t *testing.T) { + db := setupDB(t) + + // Calculate the current slot and minimum required slot based on actual current time + genesisTime := time.Unix(int64(params.BeaconConfig().MinGenesisTime+params.BeaconConfig().GenesisDelay), 0) + currentSlot := slots.CurrentSlot(genesisTime) + currentEpoch := slots.ToEpoch(currentSlot) + minEpochsForBlocks := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) + + var minRequiredEpoch primitives.Epoch + if currentEpoch > minEpochsForBlocks { + minRequiredEpoch = currentEpoch - minEpochsForBlocks + } else { + minRequiredEpoch = 0 + } + + minRequiredSlot, err := slots.EpochStart(minRequiredEpoch) + require.NoError(t, err) + + // Initial setup: set earliest slot well before minRequiredSlot + const groupCount = uint64(5) + initialSlot := primitives.Slot(1000) + + _, _, err = db.UpdateCustodyInfo(ctx, initialSlot, groupCount) + require.NoError(t, err) + + // Try to increase to a slot that's still BEFORE minRequiredSlot (should succeed) + validSlot := minRequiredSlot - 100 + + err = db.UpdateEarliestAvailableSlot(ctx, validSlot) + require.NoError(t, err) + + // Verify the database was updated + storedSlot, storedCount := getCustodyInfoFromDB(t, db) + require.Equal(t, validSlot, storedSlot) + require.Equal(t, groupCount, storedCount) + }) + + t.Run("prevent increasing slot beyond MIN_EPOCHS_FOR_BLOCK_REQUESTS", func(t *testing.T) { + db := setupDB(t) + + // Calculate the current slot and minimum required slot based on actual current time + genesisTime := time.Unix(int64(params.BeaconConfig().MinGenesisTime+params.BeaconConfig().GenesisDelay), 0) + currentSlot := slots.CurrentSlot(genesisTime) + currentEpoch := slots.ToEpoch(currentSlot) + minEpochsForBlocks := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) + + var minRequiredEpoch primitives.Epoch + if currentEpoch > minEpochsForBlocks { + minRequiredEpoch = currentEpoch - minEpochsForBlocks + } else { + minRequiredEpoch = 0 + } + + minRequiredSlot, err := slots.EpochStart(minRequiredEpoch) + require.NoError(t, err) + + // Initial setup: set a valid earliest slot (well before minRequiredSlot) + const initialCount = uint64(5) + initialSlot := primitives.Slot(1000) + + _, _, err = db.UpdateCustodyInfo(ctx, initialSlot, initialCount) + require.NoError(t, err) + + // Try to set earliest slot beyond the minimum required slot + invalidSlot := minRequiredSlot + 100 + + // This should fail + err = db.UpdateEarliestAvailableSlot(ctx, invalidSlot) + require.ErrorContains(t, "cannot increase earliest available slot", err) + require.ErrorContains(t, "exceeds minimum required slot", err) + + // Verify the database wasn't updated + storedSlot, storedCount := getCustodyInfoFromDB(t, db) + require.Equal(t, initialSlot, storedSlot) + require.Equal(t, initialCount, storedCount) + }) + + t.Run("no change when slot equals current slot", func(t *testing.T) { + const ( + initialSlot = primitives.Slot(100) + initialCount = uint64(5) + ) + + db := setupDB(t) + + // Initialize custody info + _, _, err := db.UpdateCustodyInfo(ctx, initialSlot, initialCount) + require.NoError(t, err) + + // Update with the same slot + err = db.UpdateEarliestAvailableSlot(ctx, initialSlot) + require.NoError(t, err) + + storedSlot, storedCount := getCustodyInfoFromDB(t, db) + require.Equal(t, initialSlot, storedSlot) + require.Equal(t, initialCount, storedCount) + }) +} + func TestUpdateSubscribedToAllDataSubnets(t *testing.T) { ctx := context.Background() diff --git a/beacon-chain/db/pruner/BUILD.bazel b/beacon-chain/db/pruner/BUILD.bazel index 21e31bb7d0..aea71592ef 100644 --- a/beacon-chain/db/pruner/BUILD.bazel +++ b/beacon-chain/db/pruner/BUILD.bazel @@ -8,7 +8,6 @@ go_library( "//beacon-chain:__subpackages__", ], deps = [ - "//beacon-chain/core/helpers:go_default_library", "//beacon-chain/db:go_default_library", "//beacon-chain/db/iface:go_default_library", "//config/params:go_default_library", @@ -29,6 +28,7 @@ go_test( "//consensus-types/blocks:go_default_library", "//consensus-types/primitives:go_default_library", "//proto/prysm/v1alpha1:go_default_library", + "//testing/assert:go_default_library", "//testing/require:go_default_library", "//testing/util:go_default_library", "//time/slots/testing:go_default_library", diff --git a/beacon-chain/db/pruner/pruner.go b/beacon-chain/db/pruner/pruner.go index 2b5cb02d59..6e3699a9e6 100644 --- a/beacon-chain/db/pruner/pruner.go +++ b/beacon-chain/db/pruner/pruner.go @@ -4,7 +4,6 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/db" "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface" "github.com/OffchainLabs/prysm/v6/config/params" @@ -25,17 +24,24 @@ const ( defaultNumBatchesToPrune = 15 ) +// custodyUpdater is a tiny interface that p2p service implements; kept here to avoid +// importing the p2p package and creating a cycle. +type custodyUpdater interface { + UpdateEarliestAvailableSlot(earliestAvailableSlot primitives.Slot) error +} + type ServiceOption func(*Service) // WithRetentionPeriod allows the user to specify a different data retention period than the spec default. // The retention period is specified in epochs, and must be >= MIN_EPOCHS_FOR_BLOCK_REQUESTS. func WithRetentionPeriod(retentionEpochs primitives.Epoch) ServiceOption { return func(s *Service) { - defaultRetentionEpochs := helpers.MinEpochsForBlockRequests() + 1 + defaultRetentionEpochs := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) + 1 if retentionEpochs < defaultRetentionEpochs { log.WithField("userEpochs", retentionEpochs). WithField("minRequired", defaultRetentionEpochs). - Warn("Retention period too low, using minimum required value") + Warn("Retention period too low, ignoring and using minimum required value") + retentionEpochs = defaultRetentionEpochs } s.ps = pruneStartSlotFunc(retentionEpochs) @@ -58,17 +64,23 @@ type Service struct { slotTicker slots.Ticker backfillWaiter func() error initSyncWaiter func() error + custody custodyUpdater } -func New(ctx context.Context, db iface.Database, genesisTime time.Time, initSyncWaiter, backfillWaiter func() error, opts ...ServiceOption) (*Service, error) { +func New(ctx context.Context, db iface.Database, genesisTime time.Time, initSyncWaiter, backfillWaiter func() error, custody custodyUpdater, opts ...ServiceOption) (*Service, error) { + if custody == nil { + return nil, errors.New("custody updater is required for pruner but was not provided") + } + p := &Service{ ctx: ctx, db: db, - ps: pruneStartSlotFunc(helpers.MinEpochsForBlockRequests() + 1), // Default retention epochs is MIN_EPOCHS_FOR_BLOCK_REQUESTS + 1 from the current slot. + ps: pruneStartSlotFunc(primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) + 1), // Default retention epochs is MIN_EPOCHS_FOR_BLOCK_REQUESTS + 1 from the current slot. done: make(chan struct{}), slotTicker: slots.NewSlotTicker(slots.UnsafeStartTime(genesisTime, 0), params.BeaconConfig().SecondsPerSlot), initSyncWaiter: initSyncWaiter, backfillWaiter: backfillWaiter, + custody: custody, } for _, o := range opts { @@ -157,17 +169,45 @@ func (p *Service) prune(slot primitives.Slot) error { return errors.Wrap(err, "failed to prune batches") } - log.WithFields(logrus.Fields{ - "prunedUpto": pruneUpto, - "duration": time.Since(tt), - "currentSlot": slot, - "batchSize": defaultPrunableBatchSize, - "numBatches": numBatches, - }).Debug("Successfully pruned chain data") + earliestAvailableSlot := pruneUpto + 1 // Update pruning checkpoint. p.prunedUpto = pruneUpto + // Update the earliest available slot after pruning + if err := p.updateEarliestAvailableSlot(earliestAvailableSlot); err != nil { + return errors.Wrap(err, "update earliest available slot") + } + + log.WithFields(logrus.Fields{ + "prunedUpto": pruneUpto, + "earliestAvailableSlot": earliestAvailableSlot, + "duration": time.Since(tt), + "currentSlot": slot, + "batchSize": defaultPrunableBatchSize, + "numBatches": numBatches, + }).Debug("Successfully pruned chain data") + + return nil +} + +// updateEarliestAvailableSlot updates the earliest available slot via the injected custody updater +// and also persists it to the database. +func (p *Service) updateEarliestAvailableSlot(earliestAvailableSlot primitives.Slot) error { + if !params.FuluEnabled() { + return nil + } + + // Update the p2p in-memory state + if err := p.custody.UpdateEarliestAvailableSlot(earliestAvailableSlot); err != nil { + return errors.Wrapf(err, "update earliest available slot after pruning to %d", earliestAvailableSlot) + } + + // Persist to database to ensure it survives restarts + if err := p.db.UpdateEarliestAvailableSlot(p.ctx, earliestAvailableSlot); err != nil { + return errors.Wrapf(err, "update earliest available slot in database for slot %d", earliestAvailableSlot) + } + return nil } diff --git a/beacon-chain/db/pruner/pruner_test.go b/beacon-chain/db/pruner/pruner_test.go index e0cc1d254d..55e3c109a2 100644 --- a/beacon-chain/db/pruner/pruner_test.go +++ b/beacon-chain/db/pruner/pruner_test.go @@ -2,6 +2,7 @@ package pruner import ( "context" + "errors" "testing" "time" @@ -15,6 +16,7 @@ import ( dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" ) @@ -62,7 +64,9 @@ func TestPruner_PruningConditions(t *testing.T) { if !tt.backfillCompleted { backfillWaiter = waiter } - p, err := New(ctx, beaconDB, time.Now(), initSyncWaiter, backfillWaiter, WithSlotTicker(slotTicker)) + + mockCustody := &mockCustodyUpdater{} + p, err := New(ctx, beaconDB, time.Now(), initSyncWaiter, backfillWaiter, mockCustody, WithSlotTicker(slotTicker)) require.NoError(t, err) go p.Start() @@ -97,12 +101,14 @@ func TestPruner_PruneSuccess(t *testing.T) { retentionEpochs := primitives.Epoch(2) slotTicker := &slottest.MockTicker{Channel: make(chan primitives.Slot)} + mockCustody := &mockCustodyUpdater{} p, err := New( ctx, beaconDB, time.Now(), nil, nil, + mockCustody, WithSlotTicker(slotTicker), ) require.NoError(t, err) @@ -133,3 +139,242 @@ func TestPruner_PruneSuccess(t *testing.T) { require.NoError(t, p.Stop()) } + +// Mock custody updater for testing +type mockCustodyUpdater struct { + custodyGroupCount uint64 + earliestAvailableSlot primitives.Slot + updateCallCount int +} + +func (m *mockCustodyUpdater) UpdateEarliestAvailableSlot(earliestAvailableSlot primitives.Slot) error { + m.updateCallCount++ + m.earliestAvailableSlot = earliestAvailableSlot + return nil +} + +func TestPruner_UpdatesEarliestAvailableSlot(t *testing.T) { + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.FuluForkEpoch = 0 // Enable Fulu from epoch 0 + params.OverrideBeaconConfig(config) + + logrus.SetLevel(logrus.DebugLevel) + hook := logTest.NewGlobal() + ctx, cancel := context.WithCancel(t.Context()) + defer cancel() + + beaconDB := dbtest.SetupDB(t) + retentionEpochs := primitives.Epoch(2) + + slotTicker := &slottest.MockTicker{Channel: make(chan primitives.Slot)} + + // Create mock custody updater + mockCustody := &mockCustodyUpdater{ + custodyGroupCount: 4, + earliestAvailableSlot: 0, + } + + // Create pruner with mock custody updater + p, err := New( + ctx, + beaconDB, + time.Now(), + nil, + nil, + mockCustody, + WithSlotTicker(slotTicker), + ) + require.NoError(t, err) + + p.ps = func(current primitives.Slot) primitives.Slot { + return current - primitives.Slot(retentionEpochs)*params.BeaconConfig().SlotsPerEpoch + } + + // Save some blocks to be pruned + for i := primitives.Slot(1); i <= 32; i++ { + blk := util.NewBeaconBlock() + blk.Block.Slot = i + wsb, err := blocks.NewSignedBeaconBlock(blk) + require.NoError(t, err) + require.NoError(t, beaconDB.SaveBlock(ctx, wsb)) + } + + // Start pruner and trigger at slot 80 (middle of 3rd epoch) + go p.Start() + currentSlot := primitives.Slot(80) + slotTicker.Channel <- currentSlot + + // Wait for pruning to complete + time.Sleep(100 * time.Millisecond) + + // Check that UpdateEarliestAvailableSlot was called + assert.Equal(t, true, mockCustody.updateCallCount > 0, "UpdateEarliestAvailableSlot should have been called") + + // The earliest available slot should be pruneUpto + 1 + // pruneUpto = currentSlot - retentionEpochs*slotsPerEpoch = 80 - 2*32 = 16 + // So earliest available slot should be 16 + 1 = 17 + expectedEarliestSlot := primitives.Slot(17) + require.Equal(t, expectedEarliestSlot, mockCustody.earliestAvailableSlot, "Earliest available slot should be updated correctly") + require.Equal(t, uint64(4), mockCustody.custodyGroupCount, "Custody group count should be preserved") + + // Verify that no error was logged + for _, entry := range hook.AllEntries() { + if entry.Level == logrus.ErrorLevel { + t.Errorf("Unexpected error log: %s", entry.Message) + } + } + + require.NoError(t, p.Stop()) +} + +// Mock custody updater that returns an error for UpdateEarliestAvailableSlot +type mockCustodyUpdaterWithUpdateError struct { + updateCallCount int +} + +func (m *mockCustodyUpdaterWithUpdateError) UpdateEarliestAvailableSlot(earliestAvailableSlot primitives.Slot) error { + m.updateCallCount++ + return errors.New("failed to update earliest available slot") +} + +func TestWithRetentionPeriod_EnforcesMinimum(t *testing.T) { + // Use minimal config for testing + params.SetupTestConfigCleanup(t) + config := params.MinimalSpecConfig() + params.OverrideBeaconConfig(config) + + ctx := t.Context() + beaconDB := dbtest.SetupDB(t) + + // Get the minimum required epochs (272 + 1 = 273 for minimal) + minRequiredEpochs := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests + 1) + + // Use a slot that's guaranteed to be after the minimum retention period + currentSlot := primitives.Slot(minRequiredEpochs+100) * (params.BeaconConfig().SlotsPerEpoch) + + tests := []struct { + name string + userRetentionEpochs primitives.Epoch + expectedPruneSlot primitives.Slot + description string + }{ + { + name: "User value below minimum - should use minimum", + userRetentionEpochs: 2, // Way below minimum + expectedPruneSlot: currentSlot - primitives.Slot(minRequiredEpochs)*params.BeaconConfig().SlotsPerEpoch, + description: "Should use minimum when user value is too low", + }, + { + name: "User value at minimum", + userRetentionEpochs: minRequiredEpochs, + expectedPruneSlot: currentSlot - primitives.Slot(minRequiredEpochs)*params.BeaconConfig().SlotsPerEpoch, + description: "Should use user value when at minimum", + }, + { + name: "User value above minimum", + userRetentionEpochs: minRequiredEpochs + 10, + expectedPruneSlot: currentSlot - primitives.Slot(minRequiredEpochs+10)*params.BeaconConfig().SlotsPerEpoch, + description: "Should use user value when above minimum", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + hook := logTest.NewGlobal() + logrus.SetLevel(logrus.WarnLevel) + + mockCustody := &mockCustodyUpdater{} + // Create pruner with retention period + p, err := New( + ctx, + beaconDB, + time.Now(), + nil, + nil, + mockCustody, + WithRetentionPeriod(tt.userRetentionEpochs), + ) + require.NoError(t, err) + + // Test the pruning calculation + pruneUptoSlot := p.ps(currentSlot) + + // Verify the pruning slot + assert.Equal(t, tt.expectedPruneSlot, pruneUptoSlot, tt.description) + + // Check if warning was logged when value was too low + if tt.userRetentionEpochs < minRequiredEpochs { + assert.LogsContain(t, hook, "Retention period too low, ignoring and using minimum required value") + } + }) + } +} + +func TestPruner_UpdateEarliestSlotError(t *testing.T) { + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.FuluForkEpoch = 0 // Enable Fulu from epoch 0 + params.OverrideBeaconConfig(config) + + logrus.SetLevel(logrus.DebugLevel) + hook := logTest.NewGlobal() + ctx, cancel := context.WithCancel(t.Context()) + defer cancel() + + beaconDB := dbtest.SetupDB(t) + retentionEpochs := primitives.Epoch(2) + + slotTicker := &slottest.MockTicker{Channel: make(chan primitives.Slot)} + + // Create mock custody updater that returns an error for UpdateEarliestAvailableSlot + mockCustody := &mockCustodyUpdaterWithUpdateError{} + + // Create pruner with mock custody updater + p, err := New( + ctx, + beaconDB, + time.Now(), + nil, + nil, + mockCustody, + WithSlotTicker(slotTicker), + ) + require.NoError(t, err) + + p.ps = func(current primitives.Slot) primitives.Slot { + return current - primitives.Slot(retentionEpochs)*params.BeaconConfig().SlotsPerEpoch + } + + // Save some blocks to be pruned + for i := primitives.Slot(1); i <= 32; i++ { + blk := util.NewBeaconBlock() + blk.Block.Slot = i + wsb, err := blocks.NewSignedBeaconBlock(blk) + require.NoError(t, err) + require.NoError(t, beaconDB.SaveBlock(ctx, wsb)) + } + + // Start pruner and trigger at slot 80 + go p.Start() + currentSlot := primitives.Slot(80) + slotTicker.Channel <- currentSlot + + // Wait for pruning to complete + time.Sleep(100 * time.Millisecond) + + // Should have called UpdateEarliestAvailableSlot + assert.Equal(t, 1, mockCustody.updateCallCount, "UpdateEarliestAvailableSlot should be called") + + // Check that error was logged by the prune function + found := false + for _, entry := range hook.AllEntries() { + if entry.Level == logrus.ErrorLevel && entry.Message == "Failed to prune database" { + found = true + break + } + } + assert.Equal(t, true, found, "Should log error when UpdateEarliestAvailableSlot fails") + + require.NoError(t, p.Stop()) +} diff --git a/beacon-chain/node/node.go b/beacon-chain/node/node.go index 36678afb70..6b305d8d49 100644 --- a/beacon-chain/node/node.go +++ b/beacon-chain/node/node.go @@ -1108,6 +1108,7 @@ func (b *BeaconNode) registerPrunerService(cliCtx *cli.Context) error { genesis, initSyncWaiter(cliCtx.Context, b.initialSyncComplete), backfillService.WaitForCompletion, + b.fetchP2P(), opts..., ) if err != nil { diff --git a/beacon-chain/p2p/custody.go b/beacon-chain/p2p/custody.go index 0ca21da26d..2318b9aaad 100644 --- a/beacon-chain/p2p/custody.go +++ b/beacon-chain/p2p/custody.go @@ -115,6 +115,57 @@ func (s *Service) UpdateCustodyInfo(earliestAvailableSlot primitives.Slot, custo return earliestAvailableSlot, custodyGroupCount, nil } +// UpdateEarliestAvailableSlot updates the earliest available slot. +// +// IMPORTANT: This function should only be called when Fulu is enabled. The caller is responsible +// for checking params.FuluEnabled() before calling this function. +func (s *Service) UpdateEarliestAvailableSlot(earliestAvailableSlot primitives.Slot) error { + s.custodyInfoLock.Lock() + defer s.custodyInfoLock.Unlock() + + if s.custodyInfo == nil { + return errors.New("no custody info available") + } + + currentSlot := slots.CurrentSlot(s.genesisTime) + currentEpoch := slots.ToEpoch(currentSlot) + + // Allow decrease (for backfill scenarios) + if earliestAvailableSlot < s.custodyInfo.earliestAvailableSlot { + s.custodyInfo.earliestAvailableSlot = earliestAvailableSlot + return nil + } + + // Prevent increase within the MIN_EPOCHS_FOR_BLOCK_REQUESTS period + // This ensures we don't voluntarily refuse to serve mandatory block data + // This check applies regardless of whether we're early or late in the chain + minEpochsForBlocks := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) + + // Calculate the minimum required epoch (or 0 if we're early in the chain) + minRequiredEpoch := primitives.Epoch(0) + if currentEpoch > minEpochsForBlocks { + minRequiredEpoch = currentEpoch - minEpochsForBlocks + } + + // Convert to slot to ensure we compare at slot-level granularity, not epoch-level + // This prevents allowing increases to slots within minRequiredEpoch that are after its first slot + minRequiredSlot, err := slots.EpochStart(minRequiredEpoch) + if err != nil { + return errors.Wrap(err, "epoch start") + } + + // Prevent any increase that would put earliest slot beyond the minimum required slot + if earliestAvailableSlot > s.custodyInfo.earliestAvailableSlot && earliestAvailableSlot > minRequiredSlot { + return errors.Errorf( + "cannot increase earliest available slot to %d (epoch %d) as it exceeds minimum required slot %d (epoch %d)", + earliestAvailableSlot, slots.ToEpoch(earliestAvailableSlot), minRequiredSlot, minRequiredEpoch, + ) + } + + s.custodyInfo.earliestAvailableSlot = earliestAvailableSlot + return nil +} + // CustodyGroupCountFromPeer retrieves custody group count from a peer. // It first tries to get the custody group count from the peer's metadata, // then falls back to the ENR value if the metadata is not available, then diff --git a/beacon-chain/p2p/custody_test.go b/beacon-chain/p2p/custody_test.go index 0a875e1127..5c6b3b29e7 100644 --- a/beacon-chain/p2p/custody_test.go +++ b/beacon-chain/p2p/custody_test.go @@ -4,6 +4,7 @@ import ( "context" "strings" "testing" + "time" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" @@ -167,6 +168,148 @@ func TestUpdateCustodyInfo(t *testing.T) { } } +func TestUpdateEarliestAvailableSlot(t *testing.T) { + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.FuluForkEpoch = 0 // Enable Fulu from epoch 0 + params.OverrideBeaconConfig(config) + + t.Run("Valid update", func(t *testing.T) { + const ( + initialSlot primitives.Slot = 50 + newSlot primitives.Slot = 100 + groupCount uint64 = 5 + ) + + // Set up a scenario where we're far enough in the chain that increasing to newSlot is valid + minEpochsForBlocks := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) + currentEpoch := minEpochsForBlocks + 100 // Well beyond MIN_EPOCHS_FOR_BLOCK_REQUESTS + currentSlot := primitives.Slot(currentEpoch) * primitives.Slot(params.BeaconConfig().SlotsPerEpoch) + + service := &Service{ + // Set genesis time in the past so currentSlot is the "current" slot + genesisTime: time.Now().Add(-time.Duration(currentSlot) * time.Duration(params.BeaconConfig().SecondsPerSlot) * time.Second), + custodyInfo: &custodyInfo{ + earliestAvailableSlot: initialSlot, + groupCount: groupCount, + }, + } + + err := service.UpdateEarliestAvailableSlot(newSlot) + + require.NoError(t, err) + require.Equal(t, newSlot, service.custodyInfo.earliestAvailableSlot) + require.Equal(t, groupCount, service.custodyInfo.groupCount) // Should preserve group count + }) + + t.Run("Earlier slot - allowed for backfill", func(t *testing.T) { + const initialSlot primitives.Slot = 100 + const earlierSlot primitives.Slot = 50 + + service := &Service{ + genesisTime: time.Now(), + custodyInfo: &custodyInfo{ + earliestAvailableSlot: initialSlot, + groupCount: 5, + }, + } + + err := service.UpdateEarliestAvailableSlot(earlierSlot) + + require.NoError(t, err) + require.Equal(t, earlierSlot, service.custodyInfo.earliestAvailableSlot) // Should decrease for backfill + }) + + t.Run("Prevent increase within MIN_EPOCHS_FOR_BLOCK_REQUESTS - late in chain", func(t *testing.T) { + // Set current time far enough in the future to have a meaningful MIN_EPOCHS_FOR_BLOCK_REQUESTS period + minEpochsForBlocks := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) + currentEpoch := minEpochsForBlocks + 100 // Well beyond the minimum + currentSlot := primitives.Slot(currentEpoch) * primitives.Slot(params.BeaconConfig().SlotsPerEpoch) + + // Calculate the minimum allowed epoch + minRequiredEpoch := currentEpoch - minEpochsForBlocks + minRequiredSlot := primitives.Slot(minRequiredEpoch) * primitives.Slot(params.BeaconConfig().SlotsPerEpoch) + + // Try to set earliest slot to a value within the MIN_EPOCHS_FOR_BLOCK_REQUESTS period (should fail) + attemptedSlot := minRequiredSlot + 1000 // Within the mandatory retention period + + service := &Service{ + genesisTime: time.Now().Add(-time.Duration(currentSlot) * time.Duration(params.BeaconConfig().SecondsPerSlot) * time.Second), + custodyInfo: &custodyInfo{ + earliestAvailableSlot: minRequiredSlot - 100, // Current value is before the min required + groupCount: 5, + }, + } + + err := service.UpdateEarliestAvailableSlot(attemptedSlot) + + require.NotNil(t, err) + require.Equal(t, true, strings.Contains(err.Error(), "cannot increase earliest available slot")) + }) + + t.Run("Prevent increase at epoch boundary - slot precision matters", func(t *testing.T) { + minEpochsForBlocks := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) + currentEpoch := minEpochsForBlocks + 976 // Current epoch + currentSlot := primitives.Slot(currentEpoch) * primitives.Slot(params.BeaconConfig().SlotsPerEpoch) + + minRequiredEpoch := currentEpoch - minEpochsForBlocks // = 976 + storedEarliestSlot := primitives.Slot(minRequiredEpoch)*primitives.Slot(params.BeaconConfig().SlotsPerEpoch) - 232 // Before minRequired + + // Try to set earliest to slot 8 of the minRequiredEpoch (should fail with slot comparison) + attemptedSlot := primitives.Slot(minRequiredEpoch)*primitives.Slot(params.BeaconConfig().SlotsPerEpoch) + 8 + + service := &Service{ + genesisTime: time.Now().Add(-time.Duration(currentSlot) * time.Duration(params.BeaconConfig().SecondsPerSlot) * time.Second), + custodyInfo: &custodyInfo{ + earliestAvailableSlot: storedEarliestSlot, + groupCount: 5, + }, + } + + err := service.UpdateEarliestAvailableSlot(attemptedSlot) + + require.NotNil(t, err, "Should prevent increasing earliest slot beyond the minimum required SLOT (not just epoch)") + require.Equal(t, true, strings.Contains(err.Error(), "cannot increase earliest available slot")) + }) + + t.Run("Prevent increase within MIN_EPOCHS_FOR_BLOCK_REQUESTS - early in chain", func(t *testing.T) { + minEpochsForBlocks := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) + currentEpoch := minEpochsForBlocks - 10 // Early in chain, BEFORE we have MIN_EPOCHS_FOR_BLOCK_REQUESTS of history + currentSlot := primitives.Slot(currentEpoch) * primitives.Slot(params.BeaconConfig().SlotsPerEpoch) + + // Current earliest slot is at slot 100 + currentEarliestSlot := primitives.Slot(100) + + // Try to increase earliest slot to slot 1000 (which would be within the mandatory window from currentSlot) + attemptedSlot := primitives.Slot(1000) + + service := &Service{ + genesisTime: time.Now().Add(-time.Duration(currentSlot) * time.Duration(params.BeaconConfig().SecondsPerSlot) * time.Second), + custodyInfo: &custodyInfo{ + earliestAvailableSlot: currentEarliestSlot, + groupCount: 5, + }, + } + + err := service.UpdateEarliestAvailableSlot(attemptedSlot) + + require.NotNil(t, err, "Should prevent increasing earliest slot within the mandatory retention window, even early in chain") + require.Equal(t, true, strings.Contains(err.Error(), "cannot increase earliest available slot")) + }) + + t.Run("Nil custody info - should return error", func(t *testing.T) { + service := &Service{ + genesisTime: time.Now(), + custodyInfo: nil, // No custody info set + } + + err := service.UpdateEarliestAvailableSlot(100) + + require.NotNil(t, err) + require.Equal(t, true, strings.Contains(err.Error(), "no custody info available")) + }) +} + func TestCustodyGroupCountFromPeer(t *testing.T) { const ( expectedENR uint64 = 7 diff --git a/beacon-chain/p2p/interfaces.go b/beacon-chain/p2p/interfaces.go index f648bb53cc..2d2d0993d3 100644 --- a/beacon-chain/p2p/interfaces.go +++ b/beacon-chain/p2p/interfaces.go @@ -126,6 +126,7 @@ type ( EarliestAvailableSlot(ctx context.Context) (primitives.Slot, error) CustodyGroupCount(ctx context.Context) (uint64, error) UpdateCustodyInfo(earliestAvailableSlot primitives.Slot, custodyGroupCount uint64) (primitives.Slot, uint64, error) + UpdateEarliestAvailableSlot(earliestAvailableSlot primitives.Slot) error CustodyGroupCountFromPeer(peer.ID) uint64 } ) diff --git a/beacon-chain/p2p/testing/fuzz_p2p.go b/beacon-chain/p2p/testing/fuzz_p2p.go index 3bfea3c5ce..b9dbb71e41 100644 --- a/beacon-chain/p2p/testing/fuzz_p2p.go +++ b/beacon-chain/p2p/testing/fuzz_p2p.go @@ -213,6 +213,11 @@ func (s *FakeP2P) UpdateCustodyInfo(earliestAvailableSlot primitives.Slot, custo return earliestAvailableSlot, custodyGroupCount, nil } +// UpdateEarliestAvailableSlot -- fake. +func (*FakeP2P) UpdateEarliestAvailableSlot(earliestAvailableSlot primitives.Slot) error { + return nil +} + // CustodyGroupCountFromPeer -- fake. func (*FakeP2P) CustodyGroupCountFromPeer(peer.ID) uint64 { return 0 diff --git a/beacon-chain/p2p/testing/p2p.go b/beacon-chain/p2p/testing/p2p.go index b4beaf7633..641172ea07 100644 --- a/beacon-chain/p2p/testing/p2p.go +++ b/beacon-chain/p2p/testing/p2p.go @@ -499,6 +499,15 @@ func (s *TestP2P) UpdateCustodyInfo(earliestAvailableSlot primitives.Slot, custo return s.earliestAvailableSlot, s.custodyGroupCount, nil } +// UpdateEarliestAvailableSlot . +func (s *TestP2P) UpdateEarliestAvailableSlot(earliestAvailableSlot primitives.Slot) error { + s.custodyInfoMut.Lock() + defer s.custodyInfoMut.Unlock() + + s.earliestAvailableSlot = earliestAvailableSlot + return nil +} + // CustodyGroupCountFromPeer retrieves custody group count from a peer. // It first tries to get the custody group count from the peer's metadata, // then falls back to the ENR value if the metadata is not available, then diff --git a/beacon-chain/sync/backfill/BUILD.bazel b/beacon-chain/sync/backfill/BUILD.bazel index 053ee571c9..bbd2691dc4 100644 --- a/beacon-chain/sync/backfill/BUILD.bazel +++ b/beacon-chain/sync/backfill/BUILD.bazel @@ -17,7 +17,6 @@ go_library( importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/backfill", visibility = ["//visibility:public"], deps = [ - "//beacon-chain/core/helpers:go_default_library", "//beacon-chain/core/signing:go_default_library", "//beacon-chain/das:go_default_library", "//beacon-chain/db:go_default_library", @@ -61,7 +60,6 @@ go_test( ], embed = [":go_default_library"], deps = [ - "//beacon-chain/core/helpers:go_default_library", "//beacon-chain/core/signing:go_default_library", "//beacon-chain/das:go_default_library", "//beacon-chain/db:go_default_library", diff --git a/beacon-chain/sync/backfill/service.go b/beacon-chain/sync/backfill/service.go index 2bf7d4f1f3..fcbd0086fe 100644 --- a/beacon-chain/sync/backfill/service.go +++ b/beacon-chain/sync/backfill/service.go @@ -3,12 +3,12 @@ package backfill import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" @@ -348,7 +348,7 @@ func (*Service) Status() error { // minimumBackfillSlot determines the lowest slot that backfill needs to download based on looking back // MIN_EPOCHS_FOR_BLOCK_REQUESTS from the current slot. func minimumBackfillSlot(current primitives.Slot) primitives.Slot { - oe := helpers.MinEpochsForBlockRequests() + oe := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) if oe > slots.MaxSafeEpoch() { oe = slots.MaxSafeEpoch() } diff --git a/beacon-chain/sync/backfill/service_test.go b/beacon-chain/sync/backfill/service_test.go index d6ba959837..59efb21f0e 100644 --- a/beacon-chain/sync/backfill/service_test.go +++ b/beacon-chain/sync/backfill/service_test.go @@ -5,7 +5,6 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" @@ -84,7 +83,7 @@ func TestServiceInit(t *testing.T) { } func TestMinimumBackfillSlot(t *testing.T) { - oe := helpers.MinEpochsForBlockRequests() + oe := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) currSlot := (oe + 100).Mul(uint64(params.BeaconConfig().SlotsPerEpoch)) minSlot := minimumBackfillSlot(primitives.Slot(currSlot)) @@ -109,7 +108,7 @@ func testReadN(ctx context.Context, t *testing.T, c chan batch, n int, into []ba } func TestBackfillMinSlotDefault(t *testing.T) { - oe := helpers.MinEpochsForBlockRequests() + oe := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) current := primitives.Slot((oe + 100).Mul(uint64(params.BeaconConfig().SlotsPerEpoch))) s := &Service{} specMin := minimumBackfillSlot(current) diff --git a/changelog/satushh-update-easlot-pruning.md b/changelog/satushh-update-easlot-pruning.md new file mode 100644 index 0000000000..2944788dfc --- /dev/null +++ b/changelog/satushh-update-easlot-pruning.md @@ -0,0 +1,3 @@ +### Added + +- Update the earliest available slot after pruning operations in beacon chain database pruner. This ensures the P2P layer accurately knows which historical data is available after pruning, preventing nodes from advertising or attempting to serve data that has been pruned. \ No newline at end of file From 96429c5089c918b935920fddeb3b0174b30cf32a Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Tue, 21 Oct 2025 16:37:04 +0200 Subject: [PATCH 044/103] `updateCustodyInfoInDB`: Use `NumberOfCustodyGroups` instead of `NumberOfColumns`. (#15908) * `updateCustodyInfoInDB`: Add tests. * `updateCustodyInfoInDB`: Use `NumberOfCustodyGroups` instead of `NumberOfColumns`. * Add changelog. * Fix Potuz's comment. --- beacon-chain/blockchain/BUILD.bazel | 1 + beacon-chain/blockchain/service.go | 2 +- beacon-chain/blockchain/service_test.go | 102 ++++++++++++++++++++++++ changelog/manu-number-custody-groups.md | 2 + 4 files changed, 106 insertions(+), 1 deletion(-) create mode 100644 changelog/manu-number-custody-groups.md diff --git a/beacon-chain/blockchain/BUILD.bazel b/beacon-chain/blockchain/BUILD.bazel index 86f0718b9b..c64c8edd7c 100644 --- a/beacon-chain/blockchain/BUILD.bazel +++ b/beacon-chain/blockchain/BUILD.bazel @@ -173,6 +173,7 @@ go_test( "//beacon-chain/state/state-native:go_default_library", "//beacon-chain/state/stategen:go_default_library", "//beacon-chain/verification:go_default_library", + "//cmd/beacon-chain/flags:go_default_library", "//config/features:go_default_library", "//config/fieldparams:go_default_library", "//config/params:go_default_library", diff --git a/beacon-chain/blockchain/service.go b/beacon-chain/blockchain/service.go index dd1ba13d3f..66b68f9114 100644 --- a/beacon-chain/blockchain/service.go +++ b/beacon-chain/blockchain/service.go @@ -493,7 +493,7 @@ func (s *Service) updateCustodyInfoInDB(slot primitives.Slot) (primitives.Slot, // Compute the custody group count. custodyGroupCount := custodyRequirement if isSubscribedToAllDataSubnets { - custodyGroupCount = beaconConfig.NumberOfColumns + custodyGroupCount = beaconConfig.NumberOfCustodyGroups } // Safely compute the fulu fork slot. diff --git a/beacon-chain/blockchain/service_test.go b/beacon-chain/blockchain/service_test.go index 9526e30d32..5ad85bebc3 100644 --- a/beacon-chain/blockchain/service_test.go +++ b/beacon-chain/blockchain/service_test.go @@ -23,9 +23,11 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" "github.com/OffchainLabs/prysm/v6/config/features" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" @@ -596,3 +598,103 @@ func TestNotifyIndex(t *testing.T) { t.Errorf("Notifier channel did not receive the index") } } + +func TestUpdateCustodyInfoInDB(t *testing.T) { + const ( + fuluForkEpoch = 10 + custodyRequirement = uint64(4) + earliestStoredSlot = primitives.Slot(12) + numberOfCustodyGroups = uint64(64) + numberOfColumns = uint64(128) + ) + + params.SetupTestConfigCleanup(t) + cfg := params.BeaconConfig() + cfg.FuluForkEpoch = fuluForkEpoch + cfg.CustodyRequirement = custodyRequirement + cfg.NumberOfCustodyGroups = numberOfCustodyGroups + cfg.NumberOfColumns = numberOfColumns + params.OverrideBeaconConfig(cfg) + + ctx := t.Context() + pbBlock := util.NewBeaconBlock() + pbBlock.Block.Slot = 12 + signedBeaconBlock, err := blocks.NewSignedBeaconBlock(pbBlock) + require.NoError(t, err) + + roBlock, err := blocks.NewROBlock(signedBeaconBlock) + require.NoError(t, err) + + t.Run("CGC increases before fulu", func(t *testing.T) { + service, requirements := minimalTestService(t) + err = requirements.db.SaveBlock(ctx, roBlock) + require.NoError(t, err) + + // Before Fulu + // ----------- + actualEas, actualCgc, err := service.updateCustodyInfoInDB(15) + require.NoError(t, err) + require.Equal(t, earliestStoredSlot, actualEas) + require.Equal(t, custodyRequirement, actualCgc) + + actualEas, actualCgc, err = service.updateCustodyInfoInDB(17) + require.NoError(t, err) + require.Equal(t, earliestStoredSlot, actualEas) + require.Equal(t, custodyRequirement, actualCgc) + + resetFlags := flags.Get() + gFlags := new(flags.GlobalFlags) + gFlags.SubscribeAllDataSubnets = true + flags.Init(gFlags) + defer flags.Init(resetFlags) + + actualEas, actualCgc, err = service.updateCustodyInfoInDB(19) + require.NoError(t, err) + require.Equal(t, earliestStoredSlot, actualEas) + require.Equal(t, numberOfCustodyGroups, actualCgc) + + // After Fulu + // ---------- + actualEas, actualCgc, err = service.updateCustodyInfoInDB(fuluForkEpoch*primitives.Slot(cfg.SlotsPerEpoch) + 1) + require.NoError(t, err) + require.Equal(t, earliestStoredSlot, actualEas) + require.Equal(t, numberOfCustodyGroups, actualCgc) + }) + + t.Run("CGC increases after fulu", func(t *testing.T) { + service, requirements := minimalTestService(t) + err = requirements.db.SaveBlock(ctx, roBlock) + require.NoError(t, err) + + // Before Fulu + // ----------- + actualEas, actualCgc, err := service.updateCustodyInfoInDB(15) + require.NoError(t, err) + require.Equal(t, earliestStoredSlot, actualEas) + require.Equal(t, custodyRequirement, actualCgc) + + actualEas, actualCgc, err = service.updateCustodyInfoInDB(17) + require.NoError(t, err) + require.Equal(t, earliestStoredSlot, actualEas) + require.Equal(t, custodyRequirement, actualCgc) + + // After Fulu + // ---------- + resetFlags := flags.Get() + gFlags := new(flags.GlobalFlags) + gFlags.SubscribeAllDataSubnets = true + flags.Init(gFlags) + defer flags.Init(resetFlags) + + slot := fuluForkEpoch*primitives.Slot(cfg.SlotsPerEpoch) + 1 + actualEas, actualCgc, err = service.updateCustodyInfoInDB(slot) + require.NoError(t, err) + require.Equal(t, slot, actualEas) + require.Equal(t, numberOfCustodyGroups, actualCgc) + + actualEas, actualCgc, err = service.updateCustodyInfoInDB(slot + 2) + require.NoError(t, err) + require.Equal(t, slot, actualEas) + require.Equal(t, numberOfCustodyGroups, actualCgc) + }) +} diff --git a/changelog/manu-number-custody-groups.md b/changelog/manu-number-custody-groups.md new file mode 100644 index 0000000000..1d3710dc95 --- /dev/null +++ b/changelog/manu-number-custody-groups.md @@ -0,0 +1,2 @@ +### Fixed +- `updateCustodyInfoInDB`: Use `NumberOfCustodyGroups` instead of `NumberOfColumns`. \ No newline at end of file From 7c3e45637f1a4c890ea972d2e781cf38b9734d0f Mon Sep 17 00:00:00 2001 From: terence Date: Tue, 21 Oct 2025 08:29:46 -0700 Subject: [PATCH 045/103] Fix proposer to use advanced state for sync committee position calculation (#15905) * Sync committee use correct state to calculate position * Unit test --- .../rpc/prysm/v1alpha1/validator/proposer.go | 2 +- .../v1alpha1/validator/proposer_altair.go | 15 ++- .../validator/proposer_altair_test.go | 104 +++++++++++++++++- changelog/ttsao_fix-sync-aggregate-state.md | 3 + 4 files changed, 109 insertions(+), 15 deletions(-) create mode 100644 changelog/ttsao_fix-sync-aggregate-state.md diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go index ad326a8af1..0d97b31afa 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go @@ -229,7 +229,7 @@ func (vs *Server) BuildBlockParallel(ctx context.Context, sBlk interfaces.Signed sBlk.SetVoluntaryExits(vs.getExits(head, sBlk.Block().Slot())) // Set sync aggregate. New in Altair. - vs.setSyncAggregate(ctx, sBlk) + vs.setSyncAggregate(ctx, sBlk, head) // Set bls to execution change. New in Capella. vs.setBlsToExecData(sBlk, head) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go index ad6c50ba19..74de8a4030 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go @@ -5,6 +5,7 @@ import ( "context" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v6/beacon-chain/state" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" @@ -20,12 +21,12 @@ import ( "github.com/prysmaticlabs/go-bitfield" ) -func (vs *Server) setSyncAggregate(ctx context.Context, blk interfaces.SignedBeaconBlock) { +func (vs *Server) setSyncAggregate(ctx context.Context, blk interfaces.SignedBeaconBlock, headState state.BeaconState) { if blk.Version() < version.Altair { return } - syncAggregate, err := vs.getSyncAggregate(ctx, slots.PrevSlot(blk.Block().Slot()), blk.Block().ParentRoot()) + syncAggregate, err := vs.getSyncAggregate(ctx, slots.PrevSlot(blk.Block().Slot()), blk.Block().ParentRoot(), headState) if err != nil { log.WithError(err).Error("Could not get sync aggregate") emptySig := [96]byte{0xC0} @@ -47,7 +48,7 @@ func (vs *Server) setSyncAggregate(ctx context.Context, blk interfaces.SignedBea // getSyncAggregate retrieves the sync contributions from the pool to construct the sync aggregate object. // The contributions are filtered based on matching of the input root and slot then profitability. -func (vs *Server) getSyncAggregate(ctx context.Context, slot primitives.Slot, root [32]byte) (*ethpb.SyncAggregate, error) { +func (vs *Server) getSyncAggregate(ctx context.Context, slot primitives.Slot, root [32]byte, headState state.BeaconState) (*ethpb.SyncAggregate, error) { _, span := trace.StartSpan(ctx, "ProposerServer.getSyncAggregate") defer span.End() @@ -62,7 +63,7 @@ func (vs *Server) getSyncAggregate(ctx context.Context, slot primitives.Slot, ro // Contributions have to match the input root proposerContributions := proposerSyncContributions(poolContributions).filterByBlockRoot(root) - aggregatedContributions, err := vs.aggregatedSyncCommitteeMessages(ctx, slot, root, poolContributions) + aggregatedContributions, err := vs.aggregatedSyncCommitteeMessages(ctx, slot, root, poolContributions, headState) if err != nil { return nil, errors.Wrap(err, "could not get aggregated sync committee messages") } @@ -123,6 +124,7 @@ func (vs *Server) aggregatedSyncCommitteeMessages( slot primitives.Slot, root [32]byte, poolContributions []*ethpb.SyncCommitteeContribution, + st state.BeaconState, ) ([]*ethpb.SyncCommitteeContribution, error) { subcommitteeCount := params.BeaconConfig().SyncCommitteeSubnetCount subcommitteeSize := params.BeaconConfig().SyncCommitteeSize / subcommitteeCount @@ -146,10 +148,7 @@ func (vs *Server) aggregatedSyncCommitteeMessages( messageSigs = append(messageSigs, msg.Signature) } } - st, err := vs.HeadFetcher.HeadState(ctx) - if err != nil { - return nil, errors.Wrap(err, "could not get head state") - } + positions, err := helpers.CurrentPeriodPositions(st, messageIndices) if err != nil { return nil, errors.Wrap(err, "could not get sync committee positions") diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go index 63123eb70e..440d29826e 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go @@ -9,6 +9,7 @@ import ( mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/crypto/bls" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" @@ -51,15 +52,15 @@ func TestProposer_GetSyncAggregate_OK(t *testing.T) { require.NoError(t, proposerServer.SyncCommitteePool.SaveSyncCommitteeContribution(cont)) } - aggregate, err := proposerServer.getSyncAggregate(t.Context(), 1, bytesutil.ToBytes32(conts[0].BlockRoot)) + aggregate, err := proposerServer.getSyncAggregate(t.Context(), 1, bytesutil.ToBytes32(conts[0].BlockRoot), st) require.NoError(t, err) require.DeepEqual(t, bitfield.Bitvector32{0xf, 0xf, 0xf, 0xf}, aggregate.SyncCommitteeBits) - aggregate, err = proposerServer.getSyncAggregate(t.Context(), 2, bytesutil.ToBytes32(conts[0].BlockRoot)) + aggregate, err = proposerServer.getSyncAggregate(t.Context(), 2, bytesutil.ToBytes32(conts[0].BlockRoot), st) require.NoError(t, err) require.DeepEqual(t, bitfield.Bitvector32{0xaa, 0xaa, 0xaa, 0xaa}, aggregate.SyncCommitteeBits) - aggregate, err = proposerServer.getSyncAggregate(t.Context(), 3, bytesutil.ToBytes32(conts[0].BlockRoot)) + aggregate, err = proposerServer.getSyncAggregate(t.Context(), 3, bytesutil.ToBytes32(conts[0].BlockRoot), st) require.NoError(t, err) require.DeepEqual(t, bitfield.NewBitvector32(), aggregate.SyncCommitteeBits) } @@ -68,7 +69,7 @@ func TestServer_SetSyncAggregate_EmptyCase(t *testing.T) { b, err := blocks.NewSignedBeaconBlock(util.NewBeaconBlockAltair()) require.NoError(t, err) s := &Server{} // Sever is not initialized with sync committee pool. - s.setSyncAggregate(t.Context(), b) + s.setSyncAggregate(t.Context(), b, nil) agg, err := b.Block().Body().SyncAggregate() require.NoError(t, err) @@ -138,7 +139,7 @@ func TestProposer_GetSyncAggregate_IncludesSyncCommitteeMessages(t *testing.T) { } // The final sync aggregates must have indexes [0,1,2,3] set for both subcommittees - sa, err := proposerServer.getSyncAggregate(t.Context(), 1, r) + sa, err := proposerServer.getSyncAggregate(t.Context(), 1, r, st) require.NoError(t, err) assert.Equal(t, true, sa.SyncCommitteeBits.BitAt(0)) assert.Equal(t, true, sa.SyncCommitteeBits.BitAt(1)) @@ -194,8 +195,99 @@ func Test_aggregatedSyncCommitteeMessages_NoIntersectionWithPoolContributions(t BlockRoot: r[:], } - aggregated, err := proposerServer.aggregatedSyncCommitteeMessages(t.Context(), 1, r, []*ethpb.SyncCommitteeContribution{cont}) + aggregated, err := proposerServer.aggregatedSyncCommitteeMessages(t.Context(), 1, r, []*ethpb.SyncCommitteeContribution{cont}, st) require.NoError(t, err) require.Equal(t, 1, len(aggregated)) assert.Equal(t, false, aggregated[0].AggregationBits.BitAt(3)) } + +func TestGetSyncAggregate_CorrectStateAtSyncCommitteePeriodBoundary(t *testing.T) { + helpers.ClearCache() + syncPeriodBoundaryEpoch := primitives.Epoch(274176) // Real epoch from the bug report + slotsPerEpoch := params.BeaconConfig().SlotsPerEpoch + + preEpochState, keys := util.DeterministicGenesisStateAltair(t, 100) + require.NoError(t, preEpochState.SetSlot(primitives.Slot(syncPeriodBoundaryEpoch)*slotsPerEpoch-1)) // Last slot of previous epoch + + postEpochState := preEpochState.Copy() + require.NoError(t, postEpochState.SetSlot(primitives.Slot(syncPeriodBoundaryEpoch)*slotsPerEpoch+2)) // After 2 missed slots + + oldCommittee := ðpb.SyncCommittee{ + Pubkeys: make([][]byte, params.BeaconConfig().SyncCommitteeSize), + } + newCommittee := ðpb.SyncCommittee{ + Pubkeys: make([][]byte, params.BeaconConfig().SyncCommitteeSize), + } + + for i := 0; i < int(params.BeaconConfig().SyncCommitteeSize); i++ { + if i < len(keys) { + oldCommittee.Pubkeys[i] = keys[i%len(keys)].PublicKey().Marshal() + // Use different keys for new committee to simulate rotation + newCommittee.Pubkeys[i] = keys[(i+10)%len(keys)].PublicKey().Marshal() + } + } + + require.NoError(t, preEpochState.SetCurrentSyncCommittee(oldCommittee)) + require.NoError(t, postEpochState.SetCurrentSyncCommittee(newCommittee)) + + mockChainService := &chainmock.ChainService{ + State: postEpochState, + } + + proposerServer := &Server{ + HeadFetcher: mockChainService, + SyncChecker: &mockSync.Sync{IsSyncing: false}, + SyncCommitteePool: synccommittee.NewStore(), + } + + slot := primitives.Slot(syncPeriodBoundaryEpoch)*slotsPerEpoch + 1 // First slot of new epoch + blockRoot := [32]byte{0x01, 0x02, 0x03} + + msg1 := ðpb.SyncCommitteeMessage{ + Slot: slot, + BlockRoot: blockRoot[:], + ValidatorIndex: 0, // This validator is in position 0 of OLD committee + Signature: bls.NewAggregateSignature().Marshal(), + } + msg2 := ðpb.SyncCommitteeMessage{ + Slot: slot, + BlockRoot: blockRoot[:], + ValidatorIndex: 1, // This validator is in position 1 of OLD committee + Signature: bls.NewAggregateSignature().Marshal(), + } + + require.NoError(t, proposerServer.SyncCommitteePool.SaveSyncCommitteeMessage(msg1)) + require.NoError(t, proposerServer.SyncCommitteePool.SaveSyncCommitteeMessage(msg2)) + + aggregateWrongState, err := proposerServer.getSyncAggregate(t.Context(), slot, blockRoot, postEpochState) + require.NoError(t, err) + + aggregateCorrectState, err := proposerServer.getSyncAggregate(t.Context(), slot, blockRoot, preEpochState) + require.NoError(t, err) + + wrongStateBits := bitfield.Bitlist(aggregateWrongState.SyncCommitteeBits) + correctStateBits := bitfield.Bitlist(aggregateCorrectState.SyncCommitteeBits) + + wrongStateHasValidators := false + correctStateHasValidators := false + + for i := 0; i < len(wrongStateBits); i++ { + if wrongStateBits[i] != 0 { + wrongStateHasValidators = true + break + } + } + + for i := 0; i < len(correctStateBits); i++ { + if correctStateBits[i] != 0 { + correctStateHasValidators = true + break + } + } + + assert.Equal(t, true, correctStateHasValidators, "Correct state should include validators that sent messages") + assert.Equal(t, false, wrongStateHasValidators, "Wrong state should not find validators in incorrect sync committee") + + t.Logf("Wrong state aggregate bits: %x (has validators: %v)", wrongStateBits, wrongStateHasValidators) + t.Logf("Correct state aggregate bits: %x (has validators: %v)", correctStateBits, correctStateHasValidators) +} diff --git a/changelog/ttsao_fix-sync-aggregate-state.md b/changelog/ttsao_fix-sync-aggregate-state.md new file mode 100644 index 0000000000..d4401c58db --- /dev/null +++ b/changelog/ttsao_fix-sync-aggregate-state.md @@ -0,0 +1,3 @@ +### Fixed + +- Sync committee uses correct state to calculate position \ No newline at end of file From 253f91930ab5f9506f7133c89dcfd1b3463001fc Mon Sep 17 00:00:00 2001 From: james-prysm <90280386+james-prysm@users.noreply.github.com> Date: Tue, 21 Oct 2025 11:46:44 -0500 Subject: [PATCH 046/103] changelog v6.1.3 (#15901) * updating changelog * adding changelog * kasey's comment --- CHANGELOG.md | 40 ++++++++++++++++++- ...ka_feature-slasher-feed-use-service-ctx.md | 3 -- ..._ssz-ql-use-fastssz-generated-functions.md | 3 -- .../james-prysm_cleanup-process-aggregate.md | 3 -- changelog/james-prysm_fix-block-event.md | 3 -- changelog/james-prysm_fix-web3signer-e2e.md | 7 ---- changelog/james-prysm_v6.1.3.md | 3 ++ changelog/kasey_att-batch-5ms-deadline.md | 2 - changelog/manu-adjust-log-levels.md | 2 - changelog/manu-blob-sidecars-beacon-api.md | 2 - changelog/manu-check-commitment-count.md | 2 - .../manu-data-column-retention-period.md | 2 - changelog/manu-gracefully-close-stream.md | 2 - changelog/manu-grandine-known-agents.md | 2 - changelog/manu-has-at-least-one-index.md | 2 - changelog/marcopolo_push-nxynxywxtlpo.md | 2 - ...k_fix-attestation-committee-index-bound.md | 3 -- changelog/potuz_fix_head_event.md | 3 -- changelog/potuz_invalid_not_descendant.md | 3 -- changelog/potuz_invalid_sig.md | 3 -- changelog/potuz_redundant_check.md | 3 -- changelog/pvl-v6.1.2.md | 3 -- changelog/radek_api-individual-failure.md | 3 -- changelog/radek_read-non-json-error.md | 3 -- changelog/radek_rewards-no-verify.md | 3 -- changelog/satushh-gossip.md | 3 -- changelog/syjn99_ssz-ql-fastssz-sizessz.md | 7 ---- changelog/syjn99_ssz-ql-index-accessing.md | 3 -- changelog/ttsao_handle-relay-502-errors.md | 3 -- ...optimize-pending-attestation-processing.md | 3 -- 30 files changed, 42 insertions(+), 84 deletions(-) delete mode 100644 changelog/Galoretka_feature-slasher-feed-use-service-ctx.md delete mode 100644 changelog/fernantho_ssz-ql-use-fastssz-generated-functions.md delete mode 100644 changelog/james-prysm_cleanup-process-aggregate.md delete mode 100644 changelog/james-prysm_fix-block-event.md delete mode 100644 changelog/james-prysm_fix-web3signer-e2e.md create mode 100644 changelog/james-prysm_v6.1.3.md delete mode 100644 changelog/kasey_att-batch-5ms-deadline.md delete mode 100644 changelog/manu-adjust-log-levels.md delete mode 100644 changelog/manu-blob-sidecars-beacon-api.md delete mode 100644 changelog/manu-check-commitment-count.md delete mode 100644 changelog/manu-data-column-retention-period.md delete mode 100644 changelog/manu-gracefully-close-stream.md delete mode 100644 changelog/manu-grandine-known-agents.md delete mode 100644 changelog/manu-has-at-least-one-index.md delete mode 100644 changelog/marcopolo_push-nxynxywxtlpo.md delete mode 100644 changelog/phrwlk_fix-attestation-committee-index-bound.md delete mode 100644 changelog/potuz_fix_head_event.md delete mode 100644 changelog/potuz_invalid_not_descendant.md delete mode 100644 changelog/potuz_invalid_sig.md delete mode 100644 changelog/potuz_redundant_check.md delete mode 100644 changelog/pvl-v6.1.2.md delete mode 100644 changelog/radek_api-individual-failure.md delete mode 100644 changelog/radek_read-non-json-error.md delete mode 100644 changelog/radek_rewards-no-verify.md delete mode 100644 changelog/satushh-gossip.md delete mode 100644 changelog/syjn99_ssz-ql-fastssz-sizessz.md delete mode 100644 changelog/syjn99_ssz-ql-index-accessing.md delete mode 100644 changelog/ttsao_handle-relay-502-errors.md delete mode 100644 changelog/ttsao_optimize-pending-attestation-processing.md diff --git a/CHANGELOG.md b/CHANGELOG.md index eefbd31817..95754b1503 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,44 @@ All notable changes to this project will be documented in this file. The format is based on Keep a Changelog, and this project adheres to Semantic Versioning. +## [v6.1.3](https://github.com/prysmaticlabs/prysm/compare/v6.1.2...v6.1.3) - 2025-10-20 + +This release has several important beacon API and p2p fixes. + +### Added + +- Add Grandine to P2P known agents. (Useful for metrics). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15829) +- Delegate sszInfo HashTreeRoot to FastSSZ-generated implementations via SSZObject, enabling roots calculation for generated types while avoiding duplicate logic. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15805) +- SSZ-QL: Use `fastssz`'s `SizeSSZ` method for calculating the size of `Container` type. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15864) +- SSZ-QL: Access n-th element in `List`/`Vector`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15767) + +### Changed + +- Do not verify block data when calculating rewards. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15819) +- Process pending attestations after pending blocks are cleared. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15824) +- updated web3signer to 25.9.1. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15832) +- Gracefully handle submit blind block returning 502 errors. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15848) +- Improve returning individual message errors from Beacon API. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15835) +- SSZ-QL: Clarify `Size` method with more sophisticated `SSZType`s. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15864) + +### Fixed + +- Use service context and continue on slasher attestation errors (#15803). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15803) +- block event probably shouldn't be sent on certain block processing failures, now sends only on successing processing Block is NON-CANONICAL, Block IS CANONICAL but getFCUArgs FAILS, and Full success. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15814) +- Fixed web3signer e2e, issues caused due to a regression on old fork support. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15832) +- Do not mark blocks as invalid from ErrNotDescendantOfFinalized. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15846) +- Fixed [#15812](https://github.com/OffchainLabs/prysm/issues/15812): Gossip attestation validation incorrectly rejecting attestations that arrive before their referenced blocks. Previously, attestations were saved to the pending queue but immediately rejected by forkchoice validation, causing "not descendant of finalized checkpoint" errors. Now attestations for missing blocks return `ValidationIgnore` without error, allowing them to be properly processed when their blocks arrive. This eliminates false positive rejections and prevents potential incorrect peer downscoring during network congestion. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15840) +- Mark the block as invalid if it has an invalid signature. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15847) +- Display error messages from the server verbatim when they are not encoded as `application/json`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15860) +- `HasAtLeastOneIndex`: Check the index is not too high. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15865) +- Fix `/eth/v1/beacon/blob_sidecars/` beacon API is the fulu fork epoch is set to the far future epoch. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15867) +- `dataColumnSidecarsByRangeRPCHandler`: Gracefully close the stream if no data to return. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15866) +- `VerifyDataColumnSidecar`: Check if there is no too many commitments. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15859) +- `WithDataColumnRetentionEpochs`: Use `dataColumnRetentionEpoch` instead of `blobColumnRetentionEpoch`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15872) +- Mark epoch transition correctly on new head events. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15871) +- reject committee index >= committees_per_slot in unaggregated attestation validation. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15855) +- Decreased attestation gossip validation batch deadline to 5ms. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15882) + ## [v6.1.2](https://github.com/prysmaticlabs/prysm/compare/v6.1.1...v6.1.2) - 2025-10-10 This release has several important fixes to improve Prysm's peering, stability, and attestation inclusion on mainnet and all testnets. All node operators are encouraged to update to this release as soon as practical for the best mainnet performance. @@ -3759,4 +3797,4 @@ There are no security updates in this release. # Older than v2.0.0 -For changelog history for releases older than v2.0.0, please refer to https://github.com/prysmaticlabs/prysm/releases +For changelog history for releases older than v2.0.0, please refer to https://github.com/prysmaticlabs/prysm/releases \ No newline at end of file diff --git a/changelog/Galoretka_feature-slasher-feed-use-service-ctx.md b/changelog/Galoretka_feature-slasher-feed-use-service-ctx.md deleted file mode 100644 index a5d0b8752c..0000000000 --- a/changelog/Galoretka_feature-slasher-feed-use-service-ctx.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Use service context and continue on slasher attestation errors (#15803). diff --git a/changelog/fernantho_ssz-ql-use-fastssz-generated-functions.md b/changelog/fernantho_ssz-ql-use-fastssz-generated-functions.md deleted file mode 100644 index 1743dca5cd..0000000000 --- a/changelog/fernantho_ssz-ql-use-fastssz-generated-functions.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Delegate sszInfo HashTreeRoot to FastSSZ-generated implementations via SSZObject, enabling roots calculation for generated types while avoiding duplicate logic. diff --git a/changelog/james-prysm_cleanup-process-aggregate.md b/changelog/james-prysm_cleanup-process-aggregate.md deleted file mode 100644 index b41b7f4fb4..0000000000 --- a/changelog/james-prysm_cleanup-process-aggregate.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Small code changes for reusability and readability to processAggregate. \ No newline at end of file diff --git a/changelog/james-prysm_fix-block-event.md b/changelog/james-prysm_fix-block-event.md deleted file mode 100644 index 6b290e1563..0000000000 --- a/changelog/james-prysm_fix-block-event.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- block event probably shouldn't be sent on certain block processing failures, now sends only on successing processing Block is NON-CANONICAL, Block IS CANONICAL but getFCUArgs FAILS, and Full success \ No newline at end of file diff --git a/changelog/james-prysm_fix-web3signer-e2e.md b/changelog/james-prysm_fix-web3signer-e2e.md deleted file mode 100644 index 87f95f2d3f..0000000000 --- a/changelog/james-prysm_fix-web3signer-e2e.md +++ /dev/null @@ -1,7 +0,0 @@ -### Fixed - -- Fixed web3signer e2e, issues caused due to a regression on old fork support - -### Changed - -- updated web3signer to 25.9.1 diff --git a/changelog/james-prysm_v6.1.3.md b/changelog/james-prysm_v6.1.3.md new file mode 100644 index 0000000000..a49eaf09db --- /dev/null +++ b/changelog/james-prysm_v6.1.3.md @@ -0,0 +1,3 @@ +### Ignored + +- Changelog entries for v6.1.3 through v6.1.2 \ No newline at end of file diff --git a/changelog/kasey_att-batch-5ms-deadline.md b/changelog/kasey_att-batch-5ms-deadline.md deleted file mode 100644 index 8afb133ac5..0000000000 --- a/changelog/kasey_att-batch-5ms-deadline.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Decreased attestation gossip validation batch deadline to 5ms. diff --git a/changelog/manu-adjust-log-levels.md b/changelog/manu-adjust-log-levels.md deleted file mode 100644 index 64e0826df1..0000000000 --- a/changelog/manu-adjust-log-levels.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- Data column sidecars fetch: Adjust log levels. diff --git a/changelog/manu-blob-sidecars-beacon-api.md b/changelog/manu-blob-sidecars-beacon-api.md deleted file mode 100644 index c5360e80e9..0000000000 --- a/changelog/manu-blob-sidecars-beacon-api.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Fix `/eth/v1/beacon/blob_sidecars/` beacon API is the fulu fork epoch is set to the far future epoch. diff --git a/changelog/manu-check-commitment-count.md b/changelog/manu-check-commitment-count.md deleted file mode 100644 index 404d96eb8b..0000000000 --- a/changelog/manu-check-commitment-count.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- `VerifyDataColumnSidecar`: Check if there is no too many commitments. diff --git a/changelog/manu-data-column-retention-period.md b/changelog/manu-data-column-retention-period.md deleted file mode 100644 index cec4f0b2c8..0000000000 --- a/changelog/manu-data-column-retention-period.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- `WithDataColumnRetentionEpochs`: Use `dataColumnRetentionEpoch` instead of `blobColumnRetentionEpoch`. \ No newline at end of file diff --git a/changelog/manu-gracefully-close-stream.md b/changelog/manu-gracefully-close-stream.md deleted file mode 100644 index 236706058e..0000000000 --- a/changelog/manu-gracefully-close-stream.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- `dataColumnSidecarsByRangeRPCHandler`: Gracefully close the stream if no data to return. diff --git a/changelog/manu-grandine-known-agents.md b/changelog/manu-grandine-known-agents.md deleted file mode 100644 index 849526554d..0000000000 --- a/changelog/manu-grandine-known-agents.md +++ /dev/null @@ -1,2 +0,0 @@ -### Added -- Add Grandine to P2P known agents. (Useful for metrics) \ No newline at end of file diff --git a/changelog/manu-has-at-least-one-index.md b/changelog/manu-has-at-least-one-index.md deleted file mode 100644 index a742f2a2a8..0000000000 --- a/changelog/manu-has-at-least-one-index.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- `HasAtLeastOneIndex`: Check the index is not too high. diff --git a/changelog/marcopolo_push-nxynxywxtlpo.md b/changelog/marcopolo_push-nxynxywxtlpo.md deleted file mode 100644 index f450494aa7..0000000000 --- a/changelog/marcopolo_push-nxynxywxtlpo.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- Fix (unreleased) bug where the preallocated slice for KZG Proofs was 48x bigger than it needed to be. diff --git a/changelog/phrwlk_fix-attestation-committee-index-bound.md b/changelog/phrwlk_fix-attestation-committee-index-bound.md deleted file mode 100644 index 86ac126bfd..0000000000 --- a/changelog/phrwlk_fix-attestation-committee-index-bound.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- reject committee index >= committees_per_slot in unaggregated attestation validation diff --git a/changelog/potuz_fix_head_event.md b/changelog/potuz_fix_head_event.md deleted file mode 100644 index 7d68a89d7f..0000000000 --- a/changelog/potuz_fix_head_event.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Mark epoch transition correctly on new head events diff --git a/changelog/potuz_invalid_not_descendant.md b/changelog/potuz_invalid_not_descendant.md deleted file mode 100644 index 5a647d2f48..0000000000 --- a/changelog/potuz_invalid_not_descendant.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Do not mark blocks as invalid from ErrNotDescendantOfFinalized diff --git a/changelog/potuz_invalid_sig.md b/changelog/potuz_invalid_sig.md deleted file mode 100644 index 756ed7748f..0000000000 --- a/changelog/potuz_invalid_sig.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Mark the block as invalid if it has an invalid signature. diff --git a/changelog/potuz_redundant_check.md b/changelog/potuz_redundant_check.md deleted file mode 100644 index 6f4531bd01..0000000000 --- a/changelog/potuz_redundant_check.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Remove redundant check for genesis root at startup. diff --git a/changelog/pvl-v6.1.2.md b/changelog/pvl-v6.1.2.md deleted file mode 100644 index 6d2726fa8c..0000000000 --- a/changelog/pvl-v6.1.2.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Changelog entries for v6.1.2 through v6.0.5 diff --git a/changelog/radek_api-individual-failure.md b/changelog/radek_api-individual-failure.md deleted file mode 100644 index 26480c06fc..0000000000 --- a/changelog/radek_api-individual-failure.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Improve returning individual message errors from Beacon API. \ No newline at end of file diff --git a/changelog/radek_read-non-json-error.md b/changelog/radek_read-non-json-error.md deleted file mode 100644 index 11be946ad4..0000000000 --- a/changelog/radek_read-non-json-error.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Display error messages from the server verbatim when they are not encoded as `application/json`. \ No newline at end of file diff --git a/changelog/radek_rewards-no-verify.md b/changelog/radek_rewards-no-verify.md deleted file mode 100644 index 0bf8a9bd94..0000000000 --- a/changelog/radek_rewards-no-verify.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Do not verify block data when calculating rewards. \ No newline at end of file diff --git a/changelog/satushh-gossip.md b/changelog/satushh-gossip.md deleted file mode 100644 index 36deb8e6ea..0000000000 --- a/changelog/satushh-gossip.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fixed [#15812](https://github.com/OffchainLabs/prysm/issues/15812): Gossip attestation validation incorrectly rejecting attestations that arrive before their referenced blocks. Previously, attestations were saved to the pending queue but immediately rejected by forkchoice validation, causing "not descendant of finalized checkpoint" errors. Now attestations for missing blocks return `ValidationIgnore` without error, allowing them to be properly processed when their blocks arrive. This eliminates false positive rejections and prevents potential incorrect peer downscoring during network congestion. \ No newline at end of file diff --git a/changelog/syjn99_ssz-ql-fastssz-sizessz.md b/changelog/syjn99_ssz-ql-fastssz-sizessz.md deleted file mode 100644 index 7a3a539c2a..0000000000 --- a/changelog/syjn99_ssz-ql-fastssz-sizessz.md +++ /dev/null @@ -1,7 +0,0 @@ -### Added - -- SSZ-QL: Use `fastssz`'s `SizeSSZ` method for calculating the size of `Container` type. - -### Changed - -- SSZ-QL: Clarify `Size` method with more sophisticated `SSZType`s. diff --git a/changelog/syjn99_ssz-ql-index-accessing.md b/changelog/syjn99_ssz-ql-index-accessing.md deleted file mode 100644 index 1ebefa4965..0000000000 --- a/changelog/syjn99_ssz-ql-index-accessing.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- SSZ-QL: Access n-th element in `List`/`Vector`. diff --git a/changelog/ttsao_handle-relay-502-errors.md b/changelog/ttsao_handle-relay-502-errors.md deleted file mode 100644 index e4fb005bd9..0000000000 --- a/changelog/ttsao_handle-relay-502-errors.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Gracefully handle submit blind block returning 502 errors. diff --git a/changelog/ttsao_optimize-pending-attestation-processing.md b/changelog/ttsao_optimize-pending-attestation-processing.md deleted file mode 100644 index 44e571a324..0000000000 --- a/changelog/ttsao_optimize-pending-attestation-processing.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Process pending attestations after pending blocks are cleared From 3ecb5d0b676651119c2f57d02bbb88cc68b4f5cf Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Wed, 22 Oct 2025 13:52:31 +0200 Subject: [PATCH 047/103] Remove `Reading static P2P private key from a file.` log if Fulu is enabled. (#15913) --- beacon-chain/p2p/utils.go | 5 ++++- changelog/manu-fulu-log.md | 2 ++ 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 changelog/manu-fulu-log.md diff --git a/beacon-chain/p2p/utils.go b/beacon-chain/p2p/utils.go index 57fe239d9b..ffc9852b60 100644 --- a/beacon-chain/p2p/utils.go +++ b/beacon-chain/p2p/utils.go @@ -68,7 +68,10 @@ func privKey(cfg *Config) (*ecdsa.PrivateKey, error) { } if defaultKeysExist { - log.WithField("filePath", defaultKeyPath).Info("Reading static P2P private key from a file. To generate a new random private key at every start, please remove this file.") + if !params.FuluEnabled() { + log.WithField("filePath", defaultKeyPath).Info("Reading static P2P private key from a file. To generate a new random private key at every start, please remove this file.") + } + return privKeyFromFile(defaultKeyPath) } diff --git a/changelog/manu-fulu-log.md b/changelog/manu-fulu-log.md new file mode 100644 index 0000000000..eb64046805 --- /dev/null +++ b/changelog/manu-fulu-log.md @@ -0,0 +1,2 @@ +### Fixed +- Remove `Reading static P2P private key from a file.` log if Fulu is enabled. \ No newline at end of file From 2f090c52d9220bbf6cb91f7eeb192dcfe88a3bc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Kapka?= Date: Wed, 22 Oct 2025 15:47:35 +0200 Subject: [PATCH 048/103] Allow custom headers in validator client HTTP requests (#15884) * Allow custom headers in validator client HTTP requests * changelog <3 * improve flag description * Bastin's review * James' review * add godoc for NodeConnectionOption --- api/client/BUILD.bazel | 11 ++++++-- api/client/transport.go | 25 +++++++++++++++++ api/client/transport_test.go | 25 +++++++++++++++++ changelog/radek_rest-custom-headers.md | 3 ++ cmd/validator/flags/flags.go | 7 +++++ cmd/validator/main.go | 1 + cmd/validator/usage.go | 1 + validator/accounts/cli_manager.go | 2 +- validator/client/service.go | 8 ++++-- validator/helpers/node_connection.go | 39 ++++++++++++++++++++++++-- validator/node/node.go | 22 +++++++++++++++ validator/node/node_test.go | 14 +++++++++ validator/rpc/BUILD.bazel | 1 + validator/rpc/beacon.go | 7 +++-- validator/rpc/server.go | 3 ++ 15 files changed, 160 insertions(+), 9 deletions(-) create mode 100644 api/client/transport.go create mode 100644 api/client/transport_test.go create mode 100644 changelog/radek_rest-custom-headers.md diff --git a/api/client/BUILD.bazel b/api/client/BUILD.bazel index f5ddc9bb6c..131d74c4f1 100644 --- a/api/client/BUILD.bazel +++ b/api/client/BUILD.bazel @@ -6,6 +6,7 @@ go_library( "client.go", "errors.go", "options.go", + "transport.go", ], importpath = "github.com/OffchainLabs/prysm/v6/api/client", visibility = ["//visibility:public"], @@ -14,7 +15,13 @@ go_library( go_test( name = "go_default_test", - srcs = ["client_test.go"], + srcs = [ + "client_test.go", + "transport_test.go", + ], embed = [":go_default_library"], - deps = ["//testing/require:go_default_library"], + deps = [ + "//testing/assert:go_default_library", + "//testing/require:go_default_library", + ], ) diff --git a/api/client/transport.go b/api/client/transport.go new file mode 100644 index 0000000000..af29e1168c --- /dev/null +++ b/api/client/transport.go @@ -0,0 +1,25 @@ +package client + +import "net/http" + +// CustomHeadersTransport adds custom headers to each request +type CustomHeadersTransport struct { + base http.RoundTripper + headers map[string][]string +} + +func NewCustomHeadersTransport(base http.RoundTripper, headers map[string][]string) *CustomHeadersTransport { + return &CustomHeadersTransport{ + base: base, + headers: headers, + } +} + +func (t *CustomHeadersTransport) RoundTrip(req *http.Request) (*http.Response, error) { + for header, values := range t.headers { + for _, value := range values { + req.Header.Add(header, value) + } + } + return t.base.RoundTrip(req) +} diff --git a/api/client/transport_test.go b/api/client/transport_test.go new file mode 100644 index 0000000000..0a2eca3103 --- /dev/null +++ b/api/client/transport_test.go @@ -0,0 +1,25 @@ +package client + +import ( + "net/http" + "net/http/httptest" + "testing" + + "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v6/testing/require" +) + +type noopTransport struct{} + +func (*noopTransport) RoundTrip(*http.Request) (*http.Response, error) { + return nil, nil +} + +func TestRoundTrip(t *testing.T) { + tr := &CustomHeadersTransport{base: &noopTransport{}, headers: map[string][]string{"key1": []string{"value1", "value2"}, "key2": []string{"value3"}}} + req := httptest.NewRequest("GET", "http://foo", nil) + _, err := tr.RoundTrip(req) + require.NoError(t, err) + assert.DeepEqual(t, []string{"value1", "value2"}, req.Header.Values("key1")) + assert.DeepEqual(t, []string{"value3"}, req.Header.Values("key2")) +} diff --git a/changelog/radek_rest-custom-headers.md b/changelog/radek_rest-custom-headers.md new file mode 100644 index 0000000000..e0ca1b18da --- /dev/null +++ b/changelog/radek_rest-custom-headers.md @@ -0,0 +1,3 @@ +### Added + +- Allow custom headers in validator client HTTP requests. \ No newline at end of file diff --git a/cmd/validator/flags/flags.go b/cmd/validator/flags/flags.go index 51414da2c0..003ad0d517 100644 --- a/cmd/validator/flags/flags.go +++ b/cmd/validator/flags/flags.go @@ -45,6 +45,13 @@ var ( Usage: "Beacon node REST API provider endpoint.", Value: "http://127.0.0.1:3500", } + // BeaconRESTApiHeaders defines a list of headers to send with all HTTP requests to the beacon node. + BeaconRESTApiHeaders = &cli.StringFlag{ + Name: "beacon-rest-api-headers", + Usage: `Comma-separated list of key value pairs to pass as headers for all HTTP calls to the beacon node. + To provide multiple values for the same key, specify the same key for each value. + Example: --grpc-headers=key1=value1,key1=value2,key2=value3`, + } // CertFlag defines a flag for the node's TLS certificate. CertFlag = &cli.StringFlag{ Name: "tls-cert", diff --git a/cmd/validator/main.go b/cmd/validator/main.go index b7617eb7b9..3a212cfe25 100644 --- a/cmd/validator/main.go +++ b/cmd/validator/main.go @@ -51,6 +51,7 @@ func startNode(ctx *cli.Context) error { var appFlags = []cli.Flag{ flags.BeaconRPCProviderFlag, flags.BeaconRESTApiProviderFlag, + flags.BeaconRESTApiHeaders, flags.CertFlag, flags.GraffitiFlag, flags.DisablePenaltyRewardLogFlag, diff --git a/cmd/validator/usage.go b/cmd/validator/usage.go index 00c6c13a68..4030c02210 100644 --- a/cmd/validator/usage.go +++ b/cmd/validator/usage.go @@ -93,6 +93,7 @@ var appHelpFlagGroups = []flagGroup{ Flags: []cli.Flag{ flags.CertFlag, flags.BeaconRPCProviderFlag, + flags.BeaconRESTApiHeaders, flags.EnableRPCFlag, flags.RPCHost, flags.RPCPort, diff --git a/validator/accounts/cli_manager.go b/validator/accounts/cli_manager.go index ed4851d303..48f4dc5b3f 100644 --- a/validator/accounts/cli_manager.go +++ b/validator/accounts/cli_manager.go @@ -84,7 +84,7 @@ func (acm *CLIManager) prepareBeaconClients(ctx context.Context) (*iface.Validat conn := validatorHelpers.NewNodeConnection( grpcConn, acm.beaconApiEndpoint, - acm.beaconApiTimeout, + validatorHelpers.WithBeaconApiTimeout(acm.beaconApiTimeout), ) restHandler := beaconApi.NewBeaconApiRestHandler( diff --git a/validator/client/service.go b/validator/client/service.go index 22487b386b..c3d40cd886 100644 --- a/validator/client/service.go +++ b/validator/client/service.go @@ -6,6 +6,7 @@ import ( "strings" "time" + api "github.com/OffchainLabs/prysm/v6/api/client" eventClient "github.com/OffchainLabs/prysm/v6/api/client/event" grpcutil "github.com/OffchainLabs/prysm/v6/api/grpc" "github.com/OffchainLabs/prysm/v6/async/event" @@ -79,6 +80,7 @@ type Config struct { BeaconNodeGRPCEndpoint string BeaconNodeCert string BeaconApiEndpoint string + BeaconApiHeaders map[string][]string BeaconApiTimeout time.Duration Graffiti string GraffitiStruct *graffiti.Graffiti @@ -142,7 +144,8 @@ func NewValidatorService(ctx context.Context, cfg *Config) (*ValidatorService, e s.conn = validatorHelpers.NewNodeConnection( grpcConn, cfg.BeaconApiEndpoint, - cfg.BeaconApiTimeout, + validatorHelpers.WithBeaconApiHeaders(cfg.BeaconApiHeaders), + validatorHelpers.WithBeaconApiTimeout(cfg.BeaconApiTimeout), ) return s, nil @@ -185,8 +188,9 @@ func (v *ValidatorService) Start() { return } + headersTransport := api.NewCustomHeadersTransport(http.DefaultTransport, v.conn.GetBeaconApiHeaders()) restHandler := beaconApi.NewBeaconApiRestHandler( - http.Client{Timeout: v.conn.GetBeaconApiTimeout(), Transport: otelhttp.NewTransport(http.DefaultTransport)}, + http.Client{Timeout: v.conn.GetBeaconApiTimeout(), Transport: otelhttp.NewTransport(headersTransport)}, hosts[0], ) diff --git a/validator/helpers/node_connection.go b/validator/helpers/node_connection.go index 8f2de45947..e5a7e7970a 100644 --- a/validator/helpers/node_connection.go +++ b/validator/helpers/node_connection.go @@ -10,16 +10,37 @@ import ( type NodeConnection interface { GetGrpcClientConn() *grpc.ClientConn GetBeaconApiUrl() string + GetBeaconApiHeaders() map[string][]string + setBeaconApiHeaders(map[string][]string) GetBeaconApiTimeout() time.Duration + setBeaconApiTimeout(time.Duration) dummy() } type nodeConnection struct { grpcClientConn *grpc.ClientConn beaconApiUrl string + beaconApiHeaders map[string][]string beaconApiTimeout time.Duration } +// NodeConnectionOption is a functional option for configuring the node connection. +type NodeConnectionOption func(nc NodeConnection) + +// WithBeaconApiHeaders sets the HTTP headers that should be sent to the server along with each request. +func WithBeaconApiHeaders(headers map[string][]string) NodeConnectionOption { + return func(nc NodeConnection) { + nc.setBeaconApiHeaders(headers) + } +} + +// WithBeaconApiTimeout sets the HTTP request timeout. +func WithBeaconApiTimeout(timeout time.Duration) NodeConnectionOption { + return func(nc NodeConnection) { + nc.setBeaconApiTimeout(timeout) + } +} + func (c *nodeConnection) GetGrpcClientConn() *grpc.ClientConn { return c.grpcClientConn } @@ -28,16 +49,30 @@ func (c *nodeConnection) GetBeaconApiUrl() string { return c.beaconApiUrl } +func (c *nodeConnection) GetBeaconApiHeaders() map[string][]string { + return c.beaconApiHeaders +} + +func (c *nodeConnection) setBeaconApiHeaders(headers map[string][]string) { + c.beaconApiHeaders = headers +} + func (c *nodeConnection) GetBeaconApiTimeout() time.Duration { return c.beaconApiTimeout } +func (c *nodeConnection) setBeaconApiTimeout(timeout time.Duration) { + c.beaconApiTimeout = timeout +} + func (*nodeConnection) dummy() {} -func NewNodeConnection(grpcConn *grpc.ClientConn, beaconApiUrl string, beaconApiTimeout time.Duration) NodeConnection { +func NewNodeConnection(grpcConn *grpc.ClientConn, beaconApiUrl string, opts ...NodeConnectionOption) NodeConnection { conn := &nodeConnection{} conn.grpcClientConn = grpcConn conn.beaconApiUrl = beaconApiUrl - conn.beaconApiTimeout = beaconApiTimeout + for _, opt := range opts { + opt(conn) + } return conn } diff --git a/validator/node/node.go b/validator/node/node.go index 48f811d65e..84c1e2bc90 100644 --- a/validator/node/node.go +++ b/validator/node/node.go @@ -433,6 +433,7 @@ func (c *ValidatorClient) registerValidatorService(cliCtx *cli.Context) error { BeaconNodeGRPCEndpoint: cliCtx.String(flags.BeaconRPCProviderFlag.Name), BeaconNodeCert: cliCtx.String(flags.CertFlag.Name), BeaconApiEndpoint: cliCtx.String(flags.BeaconRESTApiProviderFlag.Name), + BeaconApiHeaders: parseBeaconApiHeaders(cliCtx.String(flags.BeaconRESTApiHeaders.Name)), BeaconApiTimeout: time.Second * 30, Graffiti: g.ParseHexGraffiti(cliCtx.String(flags.GraffitiFlag.Name)), GraffitiStruct: graffitiStruct, @@ -552,6 +553,7 @@ func (c *ValidatorClient) registerRPCService(cliCtx *cli.Context) error { GRPCHeaders: strings.Split(cliCtx.String(flags.GRPCHeadersFlag.Name), ","), BeaconNodeGRPCEndpoint: cliCtx.String(flags.BeaconRPCProviderFlag.Name), BeaconApiEndpoint: cliCtx.String(flags.BeaconRESTApiProviderFlag.Name), + BeaconAPIHeaders: parseBeaconApiHeaders(cliCtx.String(flags.BeaconRESTApiHeaders.Name)), BeaconApiTimeout: time.Second * 30, BeaconNodeCert: cliCtx.String(flags.CertFlag.Name), DB: c.db, @@ -636,3 +638,23 @@ func clearDB(ctx context.Context, dataDir string, force bool, isDatabaseMinimal return nil } + +func parseBeaconApiHeaders(rawHeaders string) map[string][]string { + result := make(map[string][]string) + pairs := strings.Split(rawHeaders, ",") + for _, pair := range pairs { + key, value, found := strings.Cut(pair, "=") + if !found { + // Skip malformed pairs + continue + } + key = strings.TrimSpace(key) + value = strings.TrimSpace(value) + if key == "" || value == "" { + // Skip malformed pairs + continue + } + result[key] = append(result[key], value) + } + return result +} diff --git a/validator/node/node_test.go b/validator/node/node_test.go index 02f2657793..c41b7a1091 100644 --- a/validator/node/node_test.go +++ b/validator/node/node_test.go @@ -308,3 +308,17 @@ func TestWeb3SignerConfig(t *testing.T) { }) } } + +func Test_parseBeaconApiHeaders(t *testing.T) { + t.Run("ok", func(t *testing.T) { + h := parseBeaconApiHeaders("key1=value1,key1=value2,key2=value3") + assert.Equal(t, 2, len(h)) + assert.DeepEqual(t, []string{"value1", "value2"}, h["key1"]) + assert.DeepEqual(t, []string{"value3"}, h["key2"]) + }) + t.Run("ignores malformed", func(t *testing.T) { + h := parseBeaconApiHeaders("key1=value1,key2value2,key3=,=key4") + assert.Equal(t, 1, len(h)) + assert.DeepEqual(t, []string{"value1"}, h["key1"]) + }) +} diff --git a/validator/rpc/BUILD.bazel b/validator/rpc/BUILD.bazel index 64a690d69d..f7e4b41fe1 100644 --- a/validator/rpc/BUILD.bazel +++ b/validator/rpc/BUILD.bazel @@ -23,6 +23,7 @@ go_library( ], deps = [ "//api:go_default_library", + "//api/client:go_default_library", "//api/grpc:go_default_library", "//api/pagination:go_default_library", "//api/server:go_default_library", diff --git a/validator/rpc/beacon.go b/validator/rpc/beacon.go index 596eef0ae9..e18988e20c 100644 --- a/validator/rpc/beacon.go +++ b/validator/rpc/beacon.go @@ -3,6 +3,7 @@ package rpc import ( "net/http" + api "github.com/OffchainLabs/prysm/v6/api/client" grpcutil "github.com/OffchainLabs/prysm/v6/api/grpc" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/validator/client" @@ -52,11 +53,13 @@ func (s *Server) registerBeaconClient() error { conn := validatorHelpers.NewNodeConnection( grpcConn, s.beaconApiEndpoint, - s.beaconApiTimeout, + validatorHelpers.WithBeaconApiHeaders(s.beaconApiHeaders), + validatorHelpers.WithBeaconApiTimeout(s.beaconApiTimeout), ) + headersTransport := api.NewCustomHeadersTransport(http.DefaultTransport, conn.GetBeaconApiHeaders()) restHandler := beaconApi.NewBeaconApiRestHandler( - http.Client{Timeout: s.beaconApiTimeout, Transport: otelhttp.NewTransport(http.DefaultTransport)}, + http.Client{Timeout: s.beaconApiTimeout, Transport: otelhttp.NewTransport(headersTransport)}, s.beaconApiEndpoint, ) diff --git a/validator/rpc/server.go b/validator/rpc/server.go index 5e9b4da703..85322a910f 100644 --- a/validator/rpc/server.go +++ b/validator/rpc/server.go @@ -34,6 +34,7 @@ type Config struct { GRPCHeaders []string BeaconNodeGRPCEndpoint string BeaconApiEndpoint string + BeaconAPIHeaders map[string][]string BeaconApiTimeout time.Duration BeaconNodeCert string DB db.Database @@ -64,6 +65,7 @@ type Server struct { authTokenPath string beaconNodeCert string beaconApiEndpoint string + beaconApiHeaders map[string][]string beaconNodeEndpoint string healthClient ethpb.HealthClient nodeClient iface.NodeClient @@ -103,6 +105,7 @@ func NewServer(ctx context.Context, cfg *Config) *Server { wallet: cfg.Wallet, beaconApiTimeout: cfg.BeaconApiTimeout, beaconApiEndpoint: cfg.BeaconApiEndpoint, + beaconApiHeaders: cfg.BeaconAPIHeaders, beaconNodeEndpoint: cfg.BeaconNodeGRPCEndpoint, router: cfg.Router, } From 7dd4f5948cdba1a03476abd29a504043a4eed026 Mon Sep 17 00:00:00 2001 From: terence Date: Wed, 22 Oct 2025 11:22:19 -0700 Subject: [PATCH 049/103] Update consensus spec tests to v1.6.0-beta.1 with new hashes and URL template (#15918) --- WORKSPACE | 10 ++--- changelog/ttsao_update-spec-tests.md | 3 ++ specrefs/.ethspecify.yml | 15 ++++++- specrefs/containers.yml | 6 +-- specrefs/functions.yml | 60 +++++++++++++++++++--------- tools/download_spectests.bzl | 2 +- 6 files changed, 66 insertions(+), 30 deletions(-) create mode 100644 changelog/ttsao_update-spec-tests.md diff --git a/WORKSPACE b/WORKSPACE index 4dc493b5ed..39d0904098 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -253,16 +253,16 @@ filegroup( url = "https://github.com/ethereum/EIPs/archive/5480440fe51742ed23342b68cf106cefd427e39d.tar.gz", ) -consensus_spec_version = "v1.6.0-beta.0" +consensus_spec_version = "v1.6.0-beta.1" load("@prysm//tools:download_spectests.bzl", "consensus_spec_tests") consensus_spec_tests( name = "consensus_spec_tests", flavors = { - "general": "sha256-rT3jQp2+ZaDiO66gIQggetzqr+kGeexaLqEhbx4HDMY=", - "minimal": "sha256-wowwwyvd0KJLsE+oDOtPkrhZyJndJpJ0lbXYsLH6XBw=", - "mainnet": "sha256-4ZLrLNeO7NihZ4TuWH5V5fUhvW9Y3mAPBQDCqrfShps=", + "general": "sha256-oEj0MTViJHjZo32nABK36gfvSXpbwkBk/jt6Mj7pWFI=", + "minimal": "sha256-cS4NPv6IRBoCSmWomQ8OEo8IsVNW9YawUFqoRZQBUj4=", + "mainnet": "sha256-BYuLndMPAh4p13IRJgNfVakrCVL69KRrNw2tdc3ETbE=", }, version = consensus_spec_version, ) @@ -278,7 +278,7 @@ filegroup( visibility = ["//visibility:public"], ) """, - integrity = "sha256-sBe3Rx8zGq9IrvfgIhZQpYidGjy3mE1SiCb6/+pjLdY=", + integrity = "sha256-yrq3tdwPS8Ri+ueeLAHssIT3ssMrX7zvHiJ8Xf9GVYs=", strip_prefix = "consensus-specs-" + consensus_spec_version[1:], url = "https://github.com/ethereum/consensus-specs/archive/refs/tags/%s.tar.gz" % consensus_spec_version, ) diff --git a/changelog/ttsao_update-spec-tests.md b/changelog/ttsao_update-spec-tests.md new file mode 100644 index 0000000000..ca6b5849fa --- /dev/null +++ b/changelog/ttsao_update-spec-tests.md @@ -0,0 +1,3 @@ +### Changed + +- Updated consensus spec tests to v1.6.0-beta.1 with new hashes and URL template diff --git a/specrefs/.ethspecify.yml b/specrefs/.ethspecify.yml index c775c4ab13..5e8446447f 100644 --- a/specrefs/.ethspecify.yml +++ b/specrefs/.ethspecify.yml @@ -1,4 +1,4 @@ -version: v1.6.0-beta.0 +version: v1.6.0-beta.1 style: full specrefs: @@ -18,6 +18,7 @@ exceptions: - UPDATE_TIMEOUT#altair # Not implemented: gloas (future fork) + - BUILDER_PENDING_WITHDRAWALS_LIMIT#gloas - MAX_PAYLOAD_ATTESTATIONS#gloas - PTC_SIZE#gloas @@ -50,7 +51,6 @@ exceptions: # Not implemented: gloas (future fork) - BUILDER_PAYMENT_THRESHOLD_DENOMINATOR#gloas - BUILDER_PAYMENT_THRESHOLD_NUMERATOR#gloas - - BUILDER_PENDING_WITHDRAWALS_LIMIT#gloas - BUILDER_WITHDRAWAL_PREFIX#gloas - DOMAIN_BEACON_BUILDER#gloas - DOMAIN_PTC_ATTESTER#gloas @@ -82,6 +82,12 @@ exceptions: - Eth1Block#phase0 - MatrixEntry#fulu + # Not implemented: capella + - LightClientBootstrap#capella + - LightClientFinalityUpdate#capella + - LightClientOptimisticUpdate#capella + - LightClientUpdate#capella + # Not implemented: gloas (future fork) - BeaconBlockBody#gloas - BeaconState#gloas @@ -106,6 +112,9 @@ exceptions: - OptimisticStore#bellatrix - Store#phase0 + # Not implemented: capella + - LightClientStore#capella + # Not implemented: gloas (future fork) - LatestMessage#gloas - Store#gloas @@ -213,6 +222,7 @@ exceptions: - xor#phase0 # Not implemented: altair + - compute_merkle_proof#altair - compute_sync_committee_period_at_slot#altair - get_contribution_and_proof#altair - get_contribution_due_ms#altair @@ -354,6 +364,7 @@ exceptions: - upgrade_to_gloas#gloas - validate_merge_block#gloas - validate_on_attestation#gloas + - verify_data_column_sidecar#gloas - verify_data_column_sidecar_inclusion_proof#gloas - verify_execution_payload_envelope_signature#gloas - verify_execution_payload_bid_signature#gloas diff --git a/specrefs/containers.yml b/specrefs/containers.yml index dccf8dfbe9..dddd7d1d47 100644 --- a/specrefs/containers.yml +++ b/specrefs/containers.yml @@ -971,12 +971,12 @@ - file: proto/prysm/v1alpha1/light_client.proto search: message LightClientHeaderCapella { spec: | - + class LightClientHeader(Container): - # Beacon block header beacon: BeaconBlockHeader - # Execution payload header corresponding to `beacon.body_root` (from Capella onward) + # [New in Capella] execution: ExecutionPayloadHeader + # [New in Capella] execution_branch: ExecutionBranch diff --git a/specrefs/functions.yml b/specrefs/functions.yml index 7f8e9c3ff5..0dc9e4799b 100644 --- a/specrefs/functions.yml +++ b/specrefs/functions.yml @@ -1303,9 +1303,24 @@ - file: crypto/bls/bls.go search: func AggregatePublicKeys( spec: | - + def eth_aggregate_pubkeys(pubkeys: Sequence[BLSPubkey]) -> BLSPubkey: - return bls.AggregatePKs(pubkeys) + """ + Return the aggregate public key for the public keys in ``pubkeys``. + + Note: the ``+`` operation should be interpreted as elliptic curve point addition, which takes as input + elliptic curve points that must be decoded from the input ``BLSPubkey``s. + This implementation is for demonstrative purposes only and ignores encoding/decoding concerns. + Refer to the BLS signature draft standard for more information. + """ + assert len(pubkeys) > 0 + # Ensure that the given inputs are valid pubkeys + assert all(bls.KeyValidate(pubkey) for pubkey in pubkeys) + + result = copy(pubkeys[0]) + for pubkey in pubkeys[1:]: + result += pubkey + return result - name: eth_fast_aggregate_verify @@ -4494,12 +4509,12 @@ - file: beacon-chain/core/helpers/weak_subjectivity.go search: func IsWithinWeakSubjectivityPeriod( spec: | - + def is_within_weak_subjectivity_period( store: Store, ws_state: BeaconState, ws_checkpoint: Checkpoint ) -> bool: # Clients may choose to validate the input state against the input Weak Subjectivity Checkpoint - assert ws_state.latest_block_header.state_root == ws_checkpoint.root + assert get_block_root(ws_state, ws_checkpoint.epoch) == ws_checkpoint.root assert compute_epoch_at_slot(ws_state.slot) == ws_checkpoint.epoch ws_period = compute_weak_subjectivity_period(ws_state) @@ -4511,12 +4526,12 @@ - name: is_within_weak_subjectivity_period#electra sources: [] spec: | - + def is_within_weak_subjectivity_period( store: Store, ws_state: BeaconState, ws_checkpoint: Checkpoint ) -> bool: # Clients may choose to validate the input state against the input Weak Subjectivity Checkpoint - assert ws_state.latest_block_header.state_root == ws_checkpoint.root + assert get_block_root(ws_state, ws_checkpoint.epoch) == ws_checkpoint.root assert compute_epoch_at_slot(ws_state.slot) == ws_checkpoint.epoch # [Modified in Electra] @@ -7649,8 +7664,8 @@ - name: upgrade_lc_bootstrap_to_capella sources: [] spec: | - - def upgrade_lc_bootstrap_to_capella(pre: bellatrix.LightClientBootstrap) -> LightClientBootstrap: + + def upgrade_lc_bootstrap_to_capella(pre: altair.LightClientBootstrap) -> LightClientBootstrap: return LightClientBootstrap( header=upgrade_lc_header_to_capella(pre.header), current_sync_committee=pre.current_sync_committee, @@ -7687,9 +7702,9 @@ - name: upgrade_lc_finality_update_to_capella sources: [] spec: | - + def upgrade_lc_finality_update_to_capella( - pre: bellatrix.LightClientFinalityUpdate, + pre: altair.LightClientFinalityUpdate, ) -> LightClientFinalityUpdate: return LightClientFinalityUpdate( attested_header=upgrade_lc_header_to_capella(pre.attested_header), @@ -7735,10 +7750,12 @@ - name: upgrade_lc_header_to_capella sources: [] spec: | - - def upgrade_lc_header_to_capella(pre: bellatrix.LightClientHeader) -> LightClientHeader: + + def upgrade_lc_header_to_capella(pre: altair.LightClientHeader) -> LightClientHeader: return LightClientHeader( beacon=pre.beacon, + execution=ExecutionPayloadHeader(), + execution_branch=ExecutionBranch(), ) @@ -7789,9 +7806,9 @@ - name: upgrade_lc_optimistic_update_to_capella sources: [] spec: | - + def upgrade_lc_optimistic_update_to_capella( - pre: bellatrix.LightClientOptimisticUpdate, + pre: altair.LightClientOptimisticUpdate, ) -> LightClientOptimisticUpdate: return LightClientOptimisticUpdate( attested_header=upgrade_lc_header_to_capella(pre.attested_header), @@ -7831,8 +7848,8 @@ - name: upgrade_lc_store_to_capella sources: [] spec: | - - def upgrade_lc_store_to_capella(pre: bellatrix.LightClientStore) -> LightClientStore: + + def upgrade_lc_store_to_capella(pre: altair.LightClientStore) -> LightClientStore: if pre.best_valid_update is None: best_valid_update = None else: @@ -7891,8 +7908,8 @@ - name: upgrade_lc_update_to_capella sources: [] spec: | - - def upgrade_lc_update_to_capella(pre: bellatrix.LightClientUpdate) -> LightClientUpdate: + + def upgrade_lc_update_to_capella(pre: altair.LightClientUpdate) -> LightClientUpdate: return LightClientUpdate( attested_header=upgrade_lc_header_to_capella(pre.attested_header), next_sync_committee=pre.next_sync_committee, @@ -8539,7 +8556,7 @@ - file: beacon-chain/core/peerdas/p2p_interface.go search: func VerifyDataColumnSidecar( spec: | - + def verify_data_column_sidecar(sidecar: DataColumnSidecar) -> bool: """ Verify if the data column sidecar is valid. @@ -8552,6 +8569,11 @@ if len(sidecar.kzg_commitments) == 0: return False + # Check that the sidecar respects the blob limit + epoch = compute_epoch_at_slot(sidecar.signed_block_header.message.slot) + if len(sidecar.kzg_commitments) > get_blob_parameters(epoch).max_blobs_per_block: + return False + # The column length must be equal to the number of commitments/proofs if len(sidecar.column) != len(sidecar.kzg_commitments) or len(sidecar.column) != len( sidecar.kzg_proofs diff --git a/tools/download_spectests.bzl b/tools/download_spectests.bzl index b9518daf66..c09c6c805d 100644 --- a/tools/download_spectests.bzl +++ b/tools/download_spectests.bzl @@ -110,6 +110,6 @@ consensus_spec_tests = repository_rule( "repo": attr.string(default = "ethereum/consensus-specs"), "workflow": attr.string(default = "generate_vectors.yml"), "branch": attr.string(default = "dev"), - "release_url_template": attr.string(default = "https://github.com/ethereum/consensus-spec-tests/releases/download/%s"), + "release_url_template": attr.string(default = "https://github.com/ethereum/consensus-specs/releases/download/%s"), }, ) From 9c4774b82ede85c184cbbbc235b000fd9dd1be12 Mon Sep 17 00:00:00 2001 From: kasey <489222+kasey@users.noreply.github.com> Date: Wed, 22 Oct 2025 15:09:18 -0500 Subject: [PATCH 050/103] default new blob storage layouts to by-epoch (#15904) * default new blob storage layouts to by-epoch also, do not log migration message until we see a directory that needs to be migrated Co-authored-by: Manu NALEPA * manu feedback --------- Co-authored-by: Kasey Kirkham Co-authored-by: Manu NALEPA --- beacon-chain/db/filesystem/iteration.go | 3 +- beacon-chain/db/filesystem/iteration_test.go | 2 +- beacon-chain/db/filesystem/layout.go | 17 ++- beacon-chain/db/filesystem/layout_by_epoch.go | 18 +-- beacon-chain/db/filesystem/layout_flat.go | 6 +- changelog/kasey_default-layout-by-epoch.md | 2 + cmd/beacon-chain/storage/BUILD.bazel | 3 + cmd/beacon-chain/storage/options.go | 78 ++++++++++++- cmd/beacon-chain/storage/options_test.go | 108 ++++++++++++++++++ 9 files changed, 213 insertions(+), 24 deletions(-) create mode 100644 changelog/kasey_default-layout-by-epoch.md diff --git a/beacon-chain/db/filesystem/iteration.go b/beacon-chain/db/filesystem/iteration.go index fed8f187e6..3cadf8d8ae 100644 --- a/beacon-chain/db/filesystem/iteration.go +++ b/beacon-chain/db/filesystem/iteration.go @@ -212,7 +212,8 @@ func filterNoop(_ string) bool { return true } -func isRootDir(p string) bool { +// IsBlockRootDir returns true if the path segment looks like a block root directory. +func IsBlockRootDir(p string) bool { dir := filepath.Base(p) return len(dir) == rootStringLen && strings.HasPrefix(dir, "0x") } diff --git a/beacon-chain/db/filesystem/iteration_test.go b/beacon-chain/db/filesystem/iteration_test.go index 25acd897a3..e2a44a17d0 100644 --- a/beacon-chain/db/filesystem/iteration_test.go +++ b/beacon-chain/db/filesystem/iteration_test.go @@ -188,7 +188,7 @@ func TestListDir(t *testing.T) { name: "root filter", dirPath: ".", expected: []string{childlessBlob.name, blobWithSsz.name, blobWithSszAndTmp.name}, - filter: isRootDir, + filter: IsBlockRootDir, }, { name: "ssz filter", diff --git a/beacon-chain/db/filesystem/layout.go b/beacon-chain/db/filesystem/layout.go index ab25e125b9..b3f3782637 100644 --- a/beacon-chain/db/filesystem/layout.go +++ b/beacon-chain/db/filesystem/layout.go @@ -19,12 +19,14 @@ import ( const ( // Full root in directory will be 66 chars, eg: // >>> len('0x0002fb4db510b8618b04dc82d023793739c26346a8b02eb73482e24b0fec0555') == 66 - rootStringLen = 66 - sszExt = "ssz" - partExt = "part" - periodicEpochBaseDir = "by-epoch" + rootStringLen = 66 + sszExt = "ssz" + partExt = "part" ) +// PeriodicEpochBaseDir is the name of the base directory for the by-epoch layout. +const PeriodicEpochBaseDir = "by-epoch" + const ( LayoutNameFlat = "flat" LayoutNameByEpoch = "by-epoch" @@ -130,11 +132,11 @@ func migrateLayout(fs afero.Fs, from, to fsLayout, cache *blobStorageSummaryCach if iter.atEOF() { return errLayoutNotDetected } - log.WithField("fromLayout", from.name()).WithField("toLayout", to.name()).Info("Migrating blob filesystem layout. This one-time operation can take extra time (up to a few minutes for systems with extended blob storage and a cold disk cache).") lastMoved := "" parentDirs := make(map[string]bool) // this map should have < 65k keys by design moved := 0 dc := newDirCleaner() + migrationLogged := false for ident, err := iter.next(); !errors.Is(err, io.EOF); ident, err = iter.next() { if err != nil { if errors.Is(err, errIdentFailure) { @@ -146,6 +148,11 @@ func migrateLayout(fs afero.Fs, from, to fsLayout, cache *blobStorageSummaryCach } return errors.Wrapf(errMigrationFailure, "failed to iterate previous layout structure while migrating blobs, err=%s", err.Error()) } + if !migrationLogged { + log.WithField("fromLayout", from.name()).WithField("toLayout", to.name()). + Info("Migrating blob filesystem layout. This one-time operation can take extra time (up to a few minutes for systems with extended blob storage and a cold disk cache).") + migrationLogged = true + } src := from.dir(ident) target := to.dir(ident) if src != lastMoved { diff --git a/beacon-chain/db/filesystem/layout_by_epoch.go b/beacon-chain/db/filesystem/layout_by_epoch.go index 08f28cb1dd..3e6adb4c93 100644 --- a/beacon-chain/db/filesystem/layout_by_epoch.go +++ b/beacon-chain/db/filesystem/layout_by_epoch.go @@ -34,7 +34,7 @@ func (l *periodicEpochLayout) name() string { func (l *periodicEpochLayout) blockParentDirs(ident blobIdent) []string { return []string{ - periodicEpochBaseDir, + PeriodicEpochBaseDir, l.periodDir(ident.epoch), l.epochDir(ident.epoch), } @@ -50,28 +50,28 @@ func (l *periodicEpochLayout) notify(ident blobIdent) error { // If before == 0, it won't be used as a filter and all idents will be returned. func (l *periodicEpochLayout) iterateIdents(before primitives.Epoch) (*identIterator, error) { - _, err := l.fs.Stat(periodicEpochBaseDir) + _, err := l.fs.Stat(PeriodicEpochBaseDir) if err != nil { if os.IsNotExist(err) { return &identIterator{eof: true}, nil // The directory is non-existent, which is fine; stop iteration. } - return nil, errors.Wrapf(err, "error reading path %s", periodicEpochBaseDir) + return nil, errors.Wrapf(err, "error reading path %s", PeriodicEpochBaseDir) } // iterate root, which should have directories named by "period" - entries, err := listDir(l.fs, periodicEpochBaseDir) + entries, err := listDir(l.fs, PeriodicEpochBaseDir) if err != nil { - return nil, errors.Wrapf(err, "failed to list %s", periodicEpochBaseDir) + return nil, errors.Wrapf(err, "failed to list %s", PeriodicEpochBaseDir) } return &identIterator{ fs: l.fs, - path: periodicEpochBaseDir, + path: PeriodicEpochBaseDir, // Please see comments on the `layers` field in `identIterator`` if the role of the layers is unclear. layers: []layoutLayer{ {populateIdent: populateNoop, filter: isBeforePeriod(before)}, {populateIdent: populateEpoch, filter: isBeforeEpoch(before)}, - {populateIdent: populateRoot, filter: isRootDir}, // extract root from path - {populateIdent: populateIndex, filter: isSszFile}, // extract index from filename + {populateIdent: populateRoot, filter: IsBlockRootDir}, // extract root from path + {populateIdent: populateIndex, filter: isSszFile}, // extract index from filename }, entries: entries, }, nil @@ -98,7 +98,7 @@ func (l *periodicEpochLayout) epochDir(epoch primitives.Epoch) string { } func (l *periodicEpochLayout) periodDir(epoch primitives.Epoch) string { - return filepath.Join(periodicEpochBaseDir, fmt.Sprintf("%d", periodForEpoch(epoch))) + return filepath.Join(PeriodicEpochBaseDir, fmt.Sprintf("%d", periodForEpoch(epoch))) } func (l *periodicEpochLayout) sszPath(n blobIdent) string { diff --git a/beacon-chain/db/filesystem/layout_flat.go b/beacon-chain/db/filesystem/layout_flat.go index 5da28711d7..3206082ddf 100644 --- a/beacon-chain/db/filesystem/layout_flat.go +++ b/beacon-chain/db/filesystem/layout_flat.go @@ -30,7 +30,7 @@ func (l *flatLayout) iterateIdents(before primitives.Epoch) (*identIterator, err if os.IsNotExist(err) { return &identIterator{eof: true}, nil // The directory is non-existent, which is fine; stop iteration. } - return nil, errors.Wrapf(err, "error reading path %s", periodicEpochBaseDir) + return nil, errors.Wrap(err, "error reading blob base dir") } entries, err := listDir(l.fs, ".") if err != nil { @@ -199,10 +199,10 @@ func (l *flatSlotReader) isSSZAndBefore(fname string) bool { // the epoch can be determined. func isFlatCachedAndBefore(cache *blobStorageSummaryCache, before primitives.Epoch) func(string) bool { if before == 0 { - return isRootDir + return IsBlockRootDir } return func(p string) bool { - if !isRootDir(p) { + if !IsBlockRootDir(p) { return false } root, err := rootFromPath(p) diff --git a/changelog/kasey_default-layout-by-epoch.md b/changelog/kasey_default-layout-by-epoch.md new file mode 100644 index 0000000000..f467dbf4f0 --- /dev/null +++ b/changelog/kasey_default-layout-by-epoch.md @@ -0,0 +1,2 @@ +### Changed +- Use the `by-epoch' blob storage layout by default and log a warning to users who continue to use the flat layout, encouraging them to switch. diff --git a/cmd/beacon-chain/storage/BUILD.bazel b/cmd/beacon-chain/storage/BUILD.bazel index 63d0d03ef2..c966205507 100644 --- a/cmd/beacon-chain/storage/BUILD.bazel +++ b/cmd/beacon-chain/storage/BUILD.bazel @@ -12,6 +12,7 @@ go_library( "//config/params:go_default_library", "//consensus-types/primitives:go_default_library", "@com_github_pkg_errors//:go_default_library", + "@com_github_sirupsen_logrus//:go_default_library", "@com_github_urfave_cli_v2//:go_default_library", ], ) @@ -19,8 +20,10 @@ go_library( go_test( name = "go_default_test", srcs = ["options_test.go"], + data = glob(["testdata/**"]), embed = [":go_default_library"], deps = [ + "//beacon-chain/db/filesystem:go_default_library", "//cmd:go_default_library", "//config/params:go_default_library", "//consensus-types/primitives:go_default_library", diff --git a/cmd/beacon-chain/storage/options.go b/cmd/beacon-chain/storage/options.go index 598ef99a86..e930c30aae 100644 --- a/cmd/beacon-chain/storage/options.go +++ b/cmd/beacon-chain/storage/options.go @@ -1,6 +1,8 @@ package storage import ( + "fmt" + "os" "path" "strings" @@ -10,6 +12,7 @@ import ( "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/pkg/errors" + log "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" ) @@ -25,9 +28,9 @@ var ( Aliases: []string{"extend-blob-retention-epoch"}, } BlobStorageLayout = &cli.StringFlag{ - Name: "blob-storage-layout", - Usage: layoutFlagUsage(), - Value: filesystem.LayoutNameFlat, + Name: "blob-storage-layout", + Usage: layoutFlagUsage(), + DefaultText: fmt.Sprintf("\"%s\", unless a different existing layout is detected", filesystem.LayoutNameByEpoch), } DataColumnStoragePathFlag = &cli.PathFlag{ Name: "data-column-path", @@ -35,6 +38,14 @@ var ( } ) +// Flags is the list of CLI flags for configuring blob storage. +var Flags = []cli.Flag{ + BlobStoragePathFlag, + BlobRetentionEpochFlag, + BlobStorageLayout, + DataColumnStoragePathFlag, +} + func layoutOptions() string { return "available options are: " + strings.Join(filesystem.LayoutNames, ", ") + "." } @@ -62,10 +73,20 @@ func BeaconNodeOptions(c *cli.Context) ([]node.Option, error) { return nil, errors.Wrap(err, "blob retention epoch") } + blobPath := blobStoragePath(c) + layout, err := detectLayout(blobPath, c) + if err != nil { + return nil, errors.Wrap(err, "detecting blob storage layout") + } + if layout == filesystem.LayoutNameFlat { + log.Warnf("Existing '%s' blob storage layout detected. Consider setting the flag --%s=%s for faster startup and more reliable pruning. Setting this flag will automatically migrate your existing blob storage to the newer layout on the next restart.", + + filesystem.LayoutNameFlat, BlobStorageLayout.Name, filesystem.LayoutNameByEpoch) + } blobStorageOptions := node.WithBlobStorageOptions( filesystem.WithBlobRetentionEpochs(blobRetentionEpoch), - filesystem.WithBasePath(blobStoragePath(c)), - filesystem.WithLayout(c.String(BlobStorageLayout.Name)), // This is validated in the Action func for BlobStorageLayout. + filesystem.WithBasePath(blobPath), + filesystem.WithLayout(layout), // This is validated in the Action func for BlobStorageLayout. ) dataColumnRetentionEpoch, err := dataColumnRetentionEpoch(c) @@ -82,6 +103,53 @@ func BeaconNodeOptions(c *cli.Context) ([]node.Option, error) { return opts, nil } +// stringFlagGetter makes testing detectLayout easier +// because we don't need to mess with FlagSets and cli types. +type stringFlagGetter interface { + String(name string) string +} + +// detectLayout determines which layout to use based on explicit user flags or by probing the +// blob directory to determine the previously used layout. +// - explicit: If the user has specified a layout flag, that layout is returned. +// - flat: If directories that look like flat layout's block root paths are present. +// - by-epoch: default if neither of the above is true. +func detectLayout(dir string, c stringFlagGetter) (string, error) { + explicit := c.String(BlobStorageLayout.Name) + if explicit != "" { + return explicit, nil + } + + dir = path.Clean(dir) + // nosec: this path is provided by the node operator via flag + base, err := os.Open(dir) // #nosec G304 + if err != nil { + // 'blobs' directory does not exist yet, so default to by-epoch. + return filesystem.LayoutNameByEpoch, nil + } + defer func() { + if err := base.Close(); err != nil { + log.WithError(err).Errorf("Could not close blob storage directory") + } + }() + + // When we go looking for existing by-root directories, we only need to find one directory + // but one of those directories could be the `by-epoch` layout's top-level directory, + // and it seems possible that on some platforms we could get extra system directories that I don't + // know how to anticipate (looking at you, Windows), so I picked 16 as a small number with a generous + // amount of wiggle room to be confident that we'll likely see a by-root director if one exists. + entries, err := base.Readdirnames(16) + if err != nil { + return "", errors.Wrap(err, "reading blob storage directory") + } + for _, entry := range entries { + if filesystem.IsBlockRootDir(entry) { + return filesystem.LayoutNameFlat, nil + } + } + return filesystem.LayoutNameByEpoch, nil +} + func blobStoragePath(c *cli.Context) string { blobsPath := c.Path(BlobStoragePathFlag.Name) if blobsPath == "" { diff --git a/cmd/beacon-chain/storage/options_test.go b/cmd/beacon-chain/storage/options_test.go index 7136a8911a..a8ecd23c56 100644 --- a/cmd/beacon-chain/storage/options_test.go +++ b/cmd/beacon-chain/storage/options_test.go @@ -3,8 +3,14 @@ package storage import ( "flag" "fmt" + "os" + "path" + "path/filepath" + "strings" + "syscall" "testing" + "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" "github.com/OffchainLabs/prysm/v6/cmd" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" @@ -109,3 +115,105 @@ func TestDataColumnStoragePath_FlagSpecified(t *testing.T) { assert.Equal(t, "/blah/blah", storagePath) } + +type mockStringFlagGetter struct { + v string +} + +func (m mockStringFlagGetter) String(name string) string { + return m.v +} + +func TestDetectLayout(t *testing.T) { + fakeRoot := "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890" + require.Equal(t, true, filesystem.IsBlockRootDir(fakeRoot)) + withFlatRoot := func(t *testing.T, dir string) { + require.NoError(t, os.MkdirAll(path.Join(dir, fakeRoot), 0o755)) + } + withByEpoch := func(t *testing.T, dir string) { + require.NoError(t, os.MkdirAll(path.Join(dir, filesystem.PeriodicEpochBaseDir), 0o755)) + } + + cases := []struct { + name string + expected string + expectedErr error + setup func(t *testing.T, dir string) + getter mockStringFlagGetter + }{ + { + name: "no blobs dir", + expected: filesystem.LayoutNameByEpoch, + }, + { + name: "blobs dir without root dirs", + expected: filesystem.LayoutNameByEpoch, + // empty subdirectory under blobs which doesn't match the block root pattern + setup: func(t *testing.T, dir string) { + require.NoError(t, os.MkdirAll(path.Join(dir, "some-dir"), 0o755)) + }, + }, + { + name: "blobs dir with root dir", + setup: withFlatRoot, + expected: filesystem.LayoutNameFlat, + }, + { + name: "blobs dir with root dir overridden by flag", + setup: withFlatRoot, + expected: filesystem.LayoutNameByEpoch, + getter: mockStringFlagGetter{v: filesystem.LayoutNameByEpoch}, + }, + { + name: "only has by-epoch dir", + setup: withByEpoch, + expected: filesystem.LayoutNameByEpoch, + }, + { + name: "contains by-epoch dir and root dirs", + setup: func(t *testing.T, dir string) { + withFlatRoot(t, dir) + withByEpoch(t, dir) + }, + expected: filesystem.LayoutNameFlat, + }, + { + name: "unreadable dir", + // It isn't detectLayout's job to detect any errors reading the directory, + // so it ignores errors from the os.Open call. But we can also get errors + // from readdirnames, but this is hard to simulate in a test. So in the test + // write a file in place of the dir, which will succeed in the Open call, but + // fail when read as a directory. This is why the expected error is syscall.ENOTDIR + // (syscall error code from using readdirnames syscall on an ordinary file). + setup: func(t *testing.T, dir string) { + parent := filepath.Dir(dir) + require.NoError(t, os.MkdirAll(parent, 0o755)) + require.NoError(t, os.WriteFile(dir, []byte{}, 0o755)) + }, + expectedErr: syscall.ENOTDIR, + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + dir := strings.Replace(t.Name(), " ", "_", -1) + dir = path.Join(os.TempDir(), dir) + if tc.setup != nil { + tc.setup(t, dir) + } + if tc.expectedErr != nil { + t.Log("hi") + } + layout, err := detectLayout(dir, tc.getter) + if tc.expectedErr != nil { + require.ErrorIs(t, err, tc.expectedErr) + return + } + require.NoError(t, err) + require.Equal(t, tc.expected, layout) + + assert.Equal(t, tc.expectedErr, err) + assert.Equal(t, tc.expected, layout) + }) + } +} From 46bc81b4c82bb47626b1155b8db543a4850ae833 Mon Sep 17 00:00:00 2001 From: terence Date: Thu, 23 Oct 2025 08:50:25 -0700 Subject: [PATCH 051/103] Add metric to track data columns recovered from execution layer (#15924) --- beacon-chain/sync/metrics.go | 7 +++++++ beacon-chain/sync/subscriber_beacon_blocks.go | 2 ++ changelog/ttsao_add-columns-recovery-metric.md | 3 +++ 3 files changed, 12 insertions(+) create mode 100644 changelog/ttsao_add-columns-recovery-metric.md diff --git a/beacon-chain/sync/metrics.go b/beacon-chain/sync/metrics.go index 64b4c2bb9d..8e107273d6 100644 --- a/beacon-chain/sync/metrics.go +++ b/beacon-chain/sync/metrics.go @@ -192,6 +192,13 @@ var ( }, ) + dataColumnsRecoveredFromELTotal = promauto.NewCounter( + prometheus.CounterOpts{ + Name: "data_columns_recovered_from_el_total", + Help: "Count the number of times data columns have been recovered from the execution layer.", + }, + ) + // Data column sidecar validation, beacon metrics specs dataColumnSidecarVerificationRequestsCounter = promauto.NewCounter(prometheus.CounterOpts{ Name: "beacon_data_column_sidecar_processing_requests_total", diff --git a/beacon-chain/sync/subscriber_beacon_blocks.go b/beacon-chain/sync/subscriber_beacon_blocks.go index 8278f67ed7..92017f56b6 100644 --- a/beacon-chain/sync/subscriber_beacon_blocks.go +++ b/beacon-chain/sync/subscriber_beacon_blocks.go @@ -224,6 +224,8 @@ func (s *Service) processDataColumnSidecarsFromExecution(ctx context.Context, so } if len(unseenIndices) > 0 { + dataColumnsRecoveredFromELTotal.Inc() + log.WithFields(logrus.Fields{ "root": fmt.Sprintf("%#x", source.Root()), "slot": source.Slot(), diff --git a/changelog/ttsao_add-columns-recovery-metric.md b/changelog/ttsao_add-columns-recovery-metric.md new file mode 100644 index 0000000000..b4e4212e3a --- /dev/null +++ b/changelog/ttsao_add-columns-recovery-metric.md @@ -0,0 +1,3 @@ +### Added + +- Metric to track data columns recovered from execution layer From 255ea2fac1b8af9d9b7207ac75ea2371d98a1d7d Mon Sep 17 00:00:00 2001 From: terence Date: Thu, 23 Oct 2025 20:37:32 -0700 Subject: [PATCH 052/103] Return optimistic response only when handling blinded blocks (#15925) * Return optimistic response only when handling blinded blocks in proposer * Remove blind condition --- beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go | 8 ++++---- changelog/ttsao_fix-optimistic-blinded-blocks.md | 3 +++ 2 files changed, 7 insertions(+), 4 deletions(-) create mode 100644 changelog/ttsao_fix-optimistic-blinded-blocks.md diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go index 0d97b31afa..091a745737 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go @@ -312,14 +312,14 @@ func (vs *Server) ProposeBeaconBlock(ctx context.Context, req *ethpb.GenericSign rob, err := blocks.NewROBlockWithRoot(block, root) if block.IsBlinded() { block, blobSidecars, err = vs.handleBlindedBlock(ctx, block) + if errors.Is(err, builderapi.ErrBadGateway) { + log.WithError(err).Info("Optimistically proposed block - builder relay temporarily unavailable, block may arrive over P2P") + return ðpb.ProposeResponse{BlockRoot: root[:]}, nil + } } else if block.Version() >= version.Deneb { blobSidecars, dataColumnSidecars, err = vs.handleUnblindedBlock(rob, req) } if err != nil { - if errors.Is(err, builderapi.ErrBadGateway) && block.IsBlinded() { - log.WithError(err).Info("Optimistically proposed block - builder relay temporarily unavailable, block may arrive over P2P") - return ðpb.ProposeResponse{BlockRoot: root[:]}, nil - } return nil, status.Errorf(codes.Internal, "%s: %v", "handle block failed", err) } diff --git a/changelog/ttsao_fix-optimistic-blinded-blocks.md b/changelog/ttsao_fix-optimistic-blinded-blocks.md new file mode 100644 index 0000000000..e125004b9d --- /dev/null +++ b/changelog/ttsao_fix-optimistic-blinded-blocks.md @@ -0,0 +1,3 @@ +### Ignored + +- Return optimistic response only when handling blinded blocks in proposer From 26ce94e224eb0b5cb4a6aefed5526aa36ae7ca0a Mon Sep 17 00:00:00 2001 From: james-prysm <90280386+james-prysm@users.noreply.github.com> Date: Fri, 24 Oct 2025 09:28:30 -0500 Subject: [PATCH 053/103] removes misleading keymanager info log (#15926) * simple change * fixing test" " --- changelog/james-prysm_remove-deposit-keymanager-log.md | 3 +++ validator/accounts/accounts_list_test.go | 10 +++++----- validator/keymanager/local/keymanager.go | 4 ---- 3 files changed, 8 insertions(+), 9 deletions(-) create mode 100644 changelog/james-prysm_remove-deposit-keymanager-log.md diff --git a/changelog/james-prysm_remove-deposit-keymanager-log.md b/changelog/james-prysm_remove-deposit-keymanager-log.md new file mode 100644 index 0000000000..ff2662c82e --- /dev/null +++ b/changelog/james-prysm_remove-deposit-keymanager-log.md @@ -0,0 +1,3 @@ +### Removed + +- log mentioning removed flag `--show-deposit-data` \ No newline at end of file diff --git a/validator/accounts/accounts_list_test.go b/validator/accounts/accounts_list_test.go index 1dd089a9f3..a1798a2134 100644 --- a/validator/accounts/accounts_list_test.go +++ b/validator/accounts/accounts_list_test.go @@ -221,10 +221,10 @@ func TestListAccounts_LocalKeymanager(t *testing.T) { // Expected output format definition const prologLength = 4 const accountLength = 4 - const epilogLength = 2 - const nameOffset = 1 - const keyOffset = 2 - const privkeyOffset = 3 + const epilogLength = 1 + + const keyOffset = 1 + const privkeyOffset = 2 // Require the output has correct number of lines lineCount := prologLength + accountLength*numAccounts + epilogLength @@ -242,7 +242,7 @@ func TestListAccounts_LocalKeymanager(t *testing.T) { // Assert that account names are printed on the correct lines for i, accountName := range accountNames { - lineNumber := prologLength + accountLength*i + nameOffset + lineNumber := prologLength + accountLength*i accountNameFound := strings.Contains(lines[lineNumber], accountName) assert.Equal(t, true, accountNameFound, "Account Name %s not found on line number %d", accountName, lineNumber) } diff --git a/validator/keymanager/local/keymanager.go b/validator/keymanager/local/keymanager.go index 27cb8adcbf..8c6ea90cbf 100644 --- a/validator/keymanager/local/keymanager.go +++ b/validator/keymanager/local/keymanager.go @@ -402,10 +402,6 @@ func (km *Keymanager) ListKeymanagerAccounts(ctx context.Context, cfg keymanager } else { fmt.Printf("Showing %d validator accounts\n", numAccounts) } - fmt.Println( - au.BrightRed("View the eth1 deposit transaction data for your accounts " + - "by running `validator accounts list --show-deposit-data`"), - ) pubKeys, err := km.FetchValidatingPublicKeys(ctx) if err != nil { From 9153c5a202d0d191111d3dd6e03f296f8ca5a04e Mon Sep 17 00:00:00 2001 From: Bastin <43618253+Inspector-Butters@users.noreply.github.com> Date: Fri, 24 Oct 2025 16:42:27 +0200 Subject: [PATCH 054/103] light client logging (#15927) --- beacon-chain/light-client/BUILD.bazel | 1 + beacon-chain/light-client/log.go | 5 +++++ beacon-chain/light-client/store.go | 1 - changelog/bastin_lc-prefix.md | 3 +++ 4 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 beacon-chain/light-client/log.go create mode 100644 changelog/bastin_lc-prefix.md diff --git a/beacon-chain/light-client/BUILD.bazel b/beacon-chain/light-client/BUILD.bazel index 39d8392fb1..06d1da2643 100644 --- a/beacon-chain/light-client/BUILD.bazel +++ b/beacon-chain/light-client/BUILD.bazel @@ -6,6 +6,7 @@ go_library( "cache.go", "helpers.go", "lightclient.go", + "log.go", "store.go", ], importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client", diff --git a/beacon-chain/light-client/log.go b/beacon-chain/light-client/log.go new file mode 100644 index 0000000000..755e9b685a --- /dev/null +++ b/beacon-chain/light-client/log.go @@ -0,0 +1,5 @@ +package light_client + +import "github.com/sirupsen/logrus" + +var log = logrus.WithField("prefix", "light-client") diff --git a/beacon-chain/light-client/store.go b/beacon-chain/light-client/store.go index 177ec9b6e4..fecce9913d 100644 --- a/beacon-chain/light-client/store.go +++ b/beacon-chain/light-client/store.go @@ -14,7 +14,6 @@ import ( "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" - log "github.com/sirupsen/logrus" ) var ErrLightClientBootstrapNotFound = errors.New("light client bootstrap not found") diff --git a/changelog/bastin_lc-prefix.md b/changelog/bastin_lc-prefix.md new file mode 100644 index 0000000000..3ee434cbdc --- /dev/null +++ b/changelog/bastin_lc-prefix.md @@ -0,0 +1,3 @@ +### Ignored + +- Add log prefix to the light-client package. \ No newline at end of file From 6d596edea2dc4df1283634469242708bb24e7e14 Mon Sep 17 00:00:00 2001 From: terence Date: Fri, 24 Oct 2025 08:35:26 -0700 Subject: [PATCH 055/103] Use `SlotTicker` instead of `time.Ticker` for attestation pool pruning (#15917) * Use SlotTicker instead of time.Ticker for attestation pool pruning * Offset one second before slot start --- beacon-chain/operations/attestations/prune_expired.go | 8 +++++--- .../operations/attestations/prune_expired_test.go | 4 +++- changelog/ttsao_use-slot-ticker-pruning.md | 3 +++ 3 files changed, 11 insertions(+), 4 deletions(-) create mode 100644 changelog/ttsao_use-slot-ticker-pruning.md diff --git a/beacon-chain/operations/attestations/prune_expired.go b/beacon-chain/operations/attestations/prune_expired.go index 30200c26b4..6bd66ebaef 100644 --- a/beacon-chain/operations/attestations/prune_expired.go +++ b/beacon-chain/operations/attestations/prune_expired.go @@ -10,11 +10,13 @@ import ( // pruneExpired prunes attestations pool on every slot interval. func (s *Service) pruneExpired() { - ticker := time.NewTicker(s.cfg.pruneInterval) - defer ticker.Stop() + secondsPerSlot := params.BeaconConfig().SecondsPerSlot + offset := time.Duration(secondsPerSlot-1) * time.Second + slotTicker := slots.NewSlotTickerWithOffset(s.genesisTime, offset, secondsPerSlot) + defer slotTicker.Done() for { select { - case <-ticker.C: + case <-slotTicker.C(): s.pruneExpiredAtts() s.updateMetrics() case <-s.ctx.Done(): diff --git a/beacon-chain/operations/attestations/prune_expired_test.go b/beacon-chain/operations/attestations/prune_expired_test.go index f7410c9f98..70533fa8ae 100644 --- a/beacon-chain/operations/attestations/prune_expired_test.go +++ b/beacon-chain/operations/attestations/prune_expired_test.go @@ -17,7 +17,9 @@ import ( ) func TestPruneExpired_Ticker(t *testing.T) { - ctx, cancel := context.WithTimeout(t.Context(), 3*time.Second) + // Need timeout longer than the offset (secondsPerSlot - 1) + some buffer + timeout := time.Duration(params.BeaconConfig().SecondsPerSlot+5) * time.Second + ctx, cancel := context.WithTimeout(t.Context(), timeout) defer cancel() s, err := NewService(ctx, &Config{ diff --git a/changelog/ttsao_use-slot-ticker-pruning.md b/changelog/ttsao_use-slot-ticker-pruning.md new file mode 100644 index 0000000000..80a6d2caf0 --- /dev/null +++ b/changelog/ttsao_use-slot-ticker-pruning.md @@ -0,0 +1,3 @@ +### Ignored + +- Use SlotTicker with offset instead of time.Ticker for attestation pool pruning to avoid conflicts with slot boundary operations From 4fb75d6d0b1fc01344c21a7ac030eda7001d9e76 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Sun, 26 Oct 2025 16:16:05 +0100 Subject: [PATCH 056/103] Add some metrics improvements (#15922) * Define TCP and QUIC as `InternetProtocol` (no functional change). * Group types. (No functional changes) * Rename variables and use range syntax. * Add `p2pMaxPeers` and `p2pPeerCountDirectionType` metrics * `p2p_subscribed_topic_peer_total`: Reset to avoid dangling values. * `validateConfig`: - Use `Warning` with fields instead of `Warnf`. - Avoid to both modify in place the input value and return it. * Add `p2p_minimum_peers_per_subnet` metric. * `beaconConfig` => `cfg`. https://github.com/OffchainLabs/prysm/pull/15880#discussion_r2436826215 * Add changelog --------- Co-authored-by: james-prysm <90280386+james-prysm@users.noreply.github.com> --- beacon-chain/blockchain/service.go | 14 +++---- beacon-chain/core/helpers/validators.go | 6 +-- beacon-chain/core/peerdas/das_core.go | 12 +++--- beacon-chain/core/peerdas/validator.go | 8 ++-- beacon-chain/core/time/slot_epoch_test.go | 16 +++---- beacon-chain/p2p/config.go | 20 +++++---- beacon-chain/p2p/custody.go | 8 ++-- beacon-chain/p2p/monitoring.go | 36 +++++++++++++--- beacon-chain/p2p/peers/status.go | 42 ++++++++++--------- beacon-chain/p2p/rpc_topic_mappings.go | 6 +-- beacon-chain/p2p/service.go | 7 +++- beacon-chain/p2p/subnets.go | 10 ++--- beacon-chain/p2p/subnets_test.go | 8 ++-- beacon-chain/sync/custody.go | 6 +-- beacon-chain/sync/custody_test.go | 12 +++--- .../sync/initial-sync/blocks_fetcher_test.go | 12 +++--- .../sync/initial-sync/service_test.go | 8 ++-- beacon-chain/sync/metrics.go | 1 + .../sync/rpc_blob_sidecars_by_root.go | 8 ++-- .../sync/rpc_data_column_sidecars_by_range.go | 6 +-- .../rpc_data_column_sidecars_by_range_test.go | 6 +-- .../sync/rpc_data_column_sidecars_by_root.go | 6 +-- .../rpc_data_column_sidecars_by_root_test.go | 18 ++++---- beacon-chain/sync/rpc_send_request.go | 4 +- beacon-chain/sync/rpc_send_request_test.go | 24 +++++------ beacon-chain/sync/rpc_status_test.go | 4 +- beacon-chain/sync/subscriber.go | 6 +-- changelog/manu-metrics.md | 4 ++ 28 files changed, 180 insertions(+), 138 deletions(-) create mode 100644 changelog/manu-metrics.md diff --git a/beacon-chain/blockchain/service.go b/beacon-chain/blockchain/service.go index 66b68f9114..bb152b5339 100644 --- a/beacon-chain/blockchain/service.go +++ b/beacon-chain/blockchain/service.go @@ -472,8 +472,8 @@ func (s *Service) removeStartupState() { func (s *Service) updateCustodyInfoInDB(slot primitives.Slot) (primitives.Slot, uint64, error) { isSubscribedToAllDataSubnets := flags.Get().SubscribeAllDataSubnets - beaconConfig := params.BeaconConfig() - custodyRequirement := beaconConfig.CustodyRequirement + cfg := params.BeaconConfig() + custodyRequirement := cfg.CustodyRequirement // Check if the node was previously subscribed to all data subnets, and if so, // store the new status accordingly. @@ -493,7 +493,7 @@ func (s *Service) updateCustodyInfoInDB(slot primitives.Slot) (primitives.Slot, // Compute the custody group count. custodyGroupCount := custodyRequirement if isSubscribedToAllDataSubnets { - custodyGroupCount = beaconConfig.NumberOfCustodyGroups + custodyGroupCount = cfg.NumberOfCustodyGroups } // Safely compute the fulu fork slot. @@ -536,11 +536,11 @@ func spawnCountdownIfPreGenesis(ctx context.Context, genesisTime time.Time, db d } func fuluForkSlot() (primitives.Slot, error) { - beaconConfig := params.BeaconConfig() + cfg := params.BeaconConfig() - fuluForkEpoch := beaconConfig.FuluForkEpoch - if fuluForkEpoch == beaconConfig.FarFutureEpoch { - return beaconConfig.FarFutureSlot, nil + fuluForkEpoch := cfg.FuluForkEpoch + if fuluForkEpoch == cfg.FarFutureEpoch { + return cfg.FarFutureSlot, nil } forkFuluSlot, err := slots.EpochStart(fuluForkEpoch) diff --git a/beacon-chain/core/helpers/validators.go b/beacon-chain/core/helpers/validators.go index 0675ffc1d2..0bfe30c4e4 100644 --- a/beacon-chain/core/helpers/validators.go +++ b/beacon-chain/core/helpers/validators.go @@ -401,7 +401,7 @@ func ComputeProposerIndex(bState state.ReadOnlyBeaconState, activeIndices []prim return 0, errors.New("empty active indices list") } hashFunc := hash.CustomSHA256Hasher() - beaconConfig := params.BeaconConfig() + cfg := params.BeaconConfig() seedBuffer := make([]byte, len(seed)+8) copy(seedBuffer, seed[:]) @@ -426,14 +426,14 @@ func ComputeProposerIndex(bState state.ReadOnlyBeaconState, activeIndices []prim offset := (i % 16) * 2 randomValue := uint64(randomBytes[offset]) | uint64(randomBytes[offset+1])<<8 - if effectiveBal*fieldparams.MaxRandomValueElectra >= beaconConfig.MaxEffectiveBalanceElectra*randomValue { + if effectiveBal*fieldparams.MaxRandomValueElectra >= cfg.MaxEffectiveBalanceElectra*randomValue { return candidateIndex, nil } } else { binary.LittleEndian.PutUint64(seedBuffer[len(seed):], i/32) randomByte := hashFunc(seedBuffer)[i%32] - if effectiveBal*fieldparams.MaxRandomByte >= beaconConfig.MaxEffectiveBalance*uint64(randomByte) { + if effectiveBal*fieldparams.MaxRandomByte >= cfg.MaxEffectiveBalance*uint64(randomByte) { return candidateIndex, nil } } diff --git a/beacon-chain/core/peerdas/das_core.go b/beacon-chain/core/peerdas/das_core.go index 6cb0ad3ffd..0d8a357b49 100644 --- a/beacon-chain/core/peerdas/das_core.go +++ b/beacon-chain/core/peerdas/das_core.go @@ -89,14 +89,14 @@ func CustodyGroups(nodeId enode.ID, custodyGroupCount uint64) ([]uint64, error) // ComputeColumnsForCustodyGroup computes the columns for a given custody group. // https://github.com/ethereum/consensus-specs/blob/master/specs/fulu/das-core.md#compute_columns_for_custody_group func ComputeColumnsForCustodyGroup(custodyGroup uint64) ([]uint64, error) { - beaconConfig := params.BeaconConfig() - numberOfCustodyGroups := beaconConfig.NumberOfCustodyGroups + cfg := params.BeaconConfig() + numberOfCustodyGroups := cfg.NumberOfCustodyGroups if custodyGroup >= numberOfCustodyGroups { return nil, ErrCustodyGroupTooLarge } - numberOfColumns := beaconConfig.NumberOfColumns + numberOfColumns := cfg.NumberOfColumns columnsPerGroup := numberOfColumns / numberOfCustodyGroups @@ -112,9 +112,9 @@ func ComputeColumnsForCustodyGroup(custodyGroup uint64) ([]uint64, error) { // ComputeCustodyGroupForColumn computes the custody group for a given column. // It is the reciprocal function of ComputeColumnsForCustodyGroup. func ComputeCustodyGroupForColumn(columnIndex uint64) (uint64, error) { - beaconConfig := params.BeaconConfig() - numberOfColumns := beaconConfig.NumberOfColumns - numberOfCustodyGroups := beaconConfig.NumberOfCustodyGroups + cfg := params.BeaconConfig() + numberOfColumns := cfg.NumberOfColumns + numberOfCustodyGroups := cfg.NumberOfCustodyGroups if columnIndex >= numberOfColumns { return 0, ErrIndexTooLarge diff --git a/beacon-chain/core/peerdas/validator.go b/beacon-chain/core/peerdas/validator.go index 8e448aa001..65575aef46 100644 --- a/beacon-chain/core/peerdas/validator.go +++ b/beacon-chain/core/peerdas/validator.go @@ -84,10 +84,10 @@ func ValidatorsCustodyRequirement(state beaconState.ReadOnlyBeaconState, validat totalNodeBalance += validator.EffectiveBalance() } - beaconConfig := params.BeaconConfig() - numberOfCustodyGroups := beaconConfig.NumberOfCustodyGroups - validatorCustodyRequirement := beaconConfig.ValidatorCustodyRequirement - balancePerAdditionalCustodyGroup := beaconConfig.BalancePerAdditionalCustodyGroup + cfg := params.BeaconConfig() + numberOfCustodyGroups := cfg.NumberOfCustodyGroups + validatorCustodyRequirement := cfg.ValidatorCustodyRequirement + balancePerAdditionalCustodyGroup := cfg.BalancePerAdditionalCustodyGroup count := totalNodeBalance / balancePerAdditionalCustodyGroup return min(max(count, validatorCustodyRequirement), numberOfCustodyGroups), nil diff --git a/beacon-chain/core/time/slot_epoch_test.go b/beacon-chain/core/time/slot_epoch_test.go index 4e30399f5e..ae8b16cb13 100644 --- a/beacon-chain/core/time/slot_epoch_test.go +++ b/beacon-chain/core/time/slot_epoch_test.go @@ -196,7 +196,7 @@ func TestAltairCompatible(t *testing.T) { } func TestCanUpgradeTo(t *testing.T) { - beaconConfig := params.BeaconConfig() + cfg := params.BeaconConfig() outerTestCases := []struct { name string @@ -205,32 +205,32 @@ func TestCanUpgradeTo(t *testing.T) { }{ { name: "Altair", - forkEpoch: &beaconConfig.AltairForkEpoch, + forkEpoch: &cfg.AltairForkEpoch, upgradeFunc: time.CanUpgradeToAltair, }, { name: "Bellatrix", - forkEpoch: &beaconConfig.BellatrixForkEpoch, + forkEpoch: &cfg.BellatrixForkEpoch, upgradeFunc: time.CanUpgradeToBellatrix, }, { name: "Capella", - forkEpoch: &beaconConfig.CapellaForkEpoch, + forkEpoch: &cfg.CapellaForkEpoch, upgradeFunc: time.CanUpgradeToCapella, }, { name: "Deneb", - forkEpoch: &beaconConfig.DenebForkEpoch, + forkEpoch: &cfg.DenebForkEpoch, upgradeFunc: time.CanUpgradeToDeneb, }, { name: "Electra", - forkEpoch: &beaconConfig.ElectraForkEpoch, + forkEpoch: &cfg.ElectraForkEpoch, upgradeFunc: time.CanUpgradeToElectra, }, { name: "Fulu", - forkEpoch: &beaconConfig.FuluForkEpoch, + forkEpoch: &cfg.FuluForkEpoch, upgradeFunc: time.CanUpgradeToFulu, }, } @@ -238,7 +238,7 @@ func TestCanUpgradeTo(t *testing.T) { for _, otc := range outerTestCases { params.SetupTestConfigCleanup(t) *otc.forkEpoch = 5 - params.OverrideBeaconConfig(beaconConfig) + params.OverrideBeaconConfig(cfg) innerTestCases := []struct { name string diff --git a/beacon-chain/p2p/config.go b/beacon-chain/p2p/config.go index 7b2e486245..15267532ea 100644 --- a/beacon-chain/p2p/config.go +++ b/beacon-chain/p2p/config.go @@ -7,6 +7,7 @@ import ( statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" "github.com/OffchainLabs/prysm/v6/beacon-chain/db" "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" + "github.com/sirupsen/logrus" ) // This is the default queue size used if we have specified an invalid one. @@ -63,12 +64,17 @@ func (cfg *Config) connManagerLowHigh() (int, int) { return low, high } -// validateConfig validates whether the values provided are accurate and will set -// the appropriate values for those that are invalid. -func validateConfig(cfg *Config) *Config { - if cfg.QueueSize == 0 { - log.Warnf("Invalid pubsub queue size of %d initialized, setting the quese size as %d instead", cfg.QueueSize, defaultPubsubQueueSize) - cfg.QueueSize = defaultPubsubQueueSize +// validateConfig validates whether the provided config has valid values and sets +// the invalid ones to default. +func validateConfig(cfg *Config) { + if cfg.QueueSize > 0 { + return } - return cfg + + log.WithFields(logrus.Fields{ + "queueSize": cfg.QueueSize, + "default": defaultPubsubQueueSize, + }).Warning("Invalid pubsub queue size, setting the queue size to the default value") + + cfg.QueueSize = defaultPubsubQueueSize } diff --git a/beacon-chain/p2p/custody.go b/beacon-chain/p2p/custody.go index 2318b9aaad..01ff28b3da 100644 --- a/beacon-chain/p2p/custody.go +++ b/beacon-chain/p2p/custody.go @@ -259,11 +259,11 @@ func (s *Service) custodyGroupCountFromPeerENR(pid peer.ID) uint64 { } func fuluForkSlot() (primitives.Slot, error) { - beaconConfig := params.BeaconConfig() + cfg := params.BeaconConfig() - fuluForkEpoch := beaconConfig.FuluForkEpoch - if fuluForkEpoch == beaconConfig.FarFutureEpoch { - return beaconConfig.FarFutureSlot, nil + fuluForkEpoch := cfg.FuluForkEpoch + if fuluForkEpoch == cfg.FarFutureEpoch { + return cfg.FarFutureSlot, nil } forkFuluSlot, err := slots.EpochStart(fuluForkEpoch) diff --git a/beacon-chain/p2p/monitoring.go b/beacon-chain/p2p/monitoring.go index 35cb266b47..d6bc829a80 100644 --- a/beacon-chain/p2p/monitoring.go +++ b/beacon-chain/p2p/monitoring.go @@ -3,6 +3,7 @@ package p2p import ( "strings" + "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" "github.com/libp2p/go-libp2p/core/peer" "github.com/libp2p/go-libp2p/core/peerstore" "github.com/prometheus/client_golang/prometheus" @@ -26,12 +27,25 @@ var ( Help: "The number of peers in a given state.", }, []string{"state"}) + p2pMaxPeers = promauto.NewGauge(prometheus.GaugeOpts{ + Name: "p2p_max_peers", + Help: "The target maximum number of peers.", + }) + p2pPeerCountDirectionType = promauto.NewGaugeVec(prometheus.GaugeOpts{ + Name: "p2p_peer_count_direction_type", + Help: "The number of peers in a given direction and type.", + }, + []string{"direction", "type"}) connectedPeersCount = promauto.NewGaugeVec(prometheus.GaugeOpts{ Name: "connected_libp2p_peers", Help: "Tracks the total number of connected libp2p peers by agent string", }, []string{"agent"}, ) + minimumPeersPerSubnet = promauto.NewGauge(prometheus.GaugeOpts{ + Name: "p2p_minimum_peers_per_subnet", + Help: "The minimum number of peers to connect to per subnet", + }) avgScoreConnectedClients = promauto.NewGaugeVec(prometheus.GaugeOpts{ Name: "connected_libp2p_peers_average_scores", Help: "Tracks the overall p2p scores of connected libp2p peers by agent string", @@ -174,18 +188,26 @@ var ( ) func (s *Service) updateMetrics() { + store := s.Host().Peerstore() connectedPeers := s.peers.Connected() + p2pPeerCount.WithLabelValues("Connected").Set(float64(len(connectedPeers))) p2pPeerCount.WithLabelValues("Disconnected").Set(float64(len(s.peers.Disconnected()))) p2pPeerCount.WithLabelValues("Connecting").Set(float64(len(s.peers.Connecting()))) p2pPeerCount.WithLabelValues("Disconnecting").Set(float64(len(s.peers.Disconnecting()))) p2pPeerCount.WithLabelValues("Bad").Set(float64(len(s.peers.Bad()))) - store := s.Host().Peerstore() - numConnectedPeersByClient := make(map[string]float64) + upperTCP := strings.ToUpper(string(peers.TCP)) + upperQUIC := strings.ToUpper(string(peers.QUIC)) + + p2pPeerCountDirectionType.WithLabelValues("inbound", upperTCP).Set(float64(len(s.peers.InboundConnectedWithProtocol(peers.TCP)))) + p2pPeerCountDirectionType.WithLabelValues("inbound", upperQUIC).Set(float64(len(s.peers.InboundConnectedWithProtocol(peers.QUIC)))) + p2pPeerCountDirectionType.WithLabelValues("outbound", upperTCP).Set(float64(len(s.peers.OutboundConnectedWithProtocol(peers.TCP)))) + p2pPeerCountDirectionType.WithLabelValues("outbound", upperQUIC).Set(float64(len(s.peers.OutboundConnectedWithProtocol(peers.QUIC)))) + + connectedPeersCountByClient := make(map[string]float64) peerScoresByClient := make(map[string][]float64) - for i := 0; i < len(connectedPeers); i++ { - p := connectedPeers[i] + for _, p := range connectedPeers { pid, err := peer.Decode(p.String()) if err != nil { log.WithError(err).Debug("Could not decode peer string") @@ -193,16 +215,18 @@ func (s *Service) updateMetrics() { } foundName := agentFromPid(pid, store) - numConnectedPeersByClient[foundName] += 1 + connectedPeersCountByClient[foundName] += 1 // Get peer scoring data. overallScore := s.peers.Scorers().Score(pid) peerScoresByClient[foundName] = append(peerScoresByClient[foundName], overallScore) } + connectedPeersCount.Reset() // Clear out previous results. - for agent, total := range numConnectedPeersByClient { + for agent, total := range connectedPeersCountByClient { connectedPeersCount.WithLabelValues(agent).Set(total) } + avgScoreConnectedClients.Reset() // Clear out previous results. for agent, scoringData := range peerScoresByClient { avgScore := average(scoringData) diff --git a/beacon-chain/p2p/peers/status.go b/beacon-chain/p2p/peers/status.go index 4c9928d4cb..5b0e9977f9 100644 --- a/beacon-chain/p2p/peers/status.go +++ b/beacon-chain/p2p/peers/status.go @@ -81,29 +81,31 @@ const ( type InternetProtocol string const ( - TCP = "tcp" - QUIC = "quic" + TCP = InternetProtocol("tcp") + QUIC = InternetProtocol("quic") ) -// Status is the structure holding the peer status information. -type Status struct { - ctx context.Context - scorers *scorers.Service - store *peerdata.Store - ipTracker map[string]uint64 - rand *rand.Rand - ipColocationWhitelist []*net.IPNet -} +type ( + // Status is the structure holding the peer status information. + Status struct { + ctx context.Context + scorers *scorers.Service + store *peerdata.Store + ipTracker map[string]uint64 + rand *rand.Rand + ipColocationWhitelist []*net.IPNet + } -// StatusConfig represents peer status service params. -type StatusConfig struct { - // PeerLimit specifies maximum amount of concurrent peers that are expected to be connect to the node. - PeerLimit int - // ScorerParams holds peer scorer configuration params. - ScorerParams *scorers.Config - // IPColocationWhitelist contains CIDR ranges that are exempt from IP colocation limits. - IPColocationWhitelist []*net.IPNet -} + // StatusConfig represents peer status service params. + StatusConfig struct { + // PeerLimit specifies maximum amount of concurrent peers that are expected to be connect to the node. + PeerLimit int + // ScorerParams holds peer scorer configuration params. + ScorerParams *scorers.Config + // IPColocationWhitelist contains CIDR ranges that are exempt from IP colocation limits. + IPColocationWhitelist []*net.IPNet + } +) // NewStatus creates a new status entity. func NewStatus(ctx context.Context, config *StatusConfig) *Status { diff --git a/beacon-chain/p2p/rpc_topic_mappings.go b/beacon-chain/p2p/rpc_topic_mappings.go index c73395352c..68a965553a 100644 --- a/beacon-chain/p2p/rpc_topic_mappings.go +++ b/beacon-chain/p2p/rpc_topic_mappings.go @@ -345,17 +345,17 @@ func TopicFromMessage(msg string, epoch primitives.Epoch) (string, error) { return "", errors.Errorf("%s: %s", invalidRPCMessageType, msg) } - beaconConfig := params.BeaconConfig() + cfg := params.BeaconConfig() // Check if the message is to be updated in fulu. - if epoch >= beaconConfig.FuluForkEpoch { + if epoch >= cfg.FuluForkEpoch { if version, ok := fuluMapping[msg]; ok { return protocolPrefix + msg + version, nil } } // Check if the message is to be updated in altair. - if epoch >= beaconConfig.AltairForkEpoch { + if epoch >= cfg.AltairForkEpoch { if version, ok := altairMapping[msg]; ok { return protocolPrefix + msg + version, nil } diff --git a/beacon-chain/p2p/service.go b/beacon-chain/p2p/service.go index d003ba0176..000f65ce88 100644 --- a/beacon-chain/p2p/service.go +++ b/beacon-chain/p2p/service.go @@ -14,6 +14,7 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" "github.com/OffchainLabs/prysm/v6/config/features" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" @@ -106,12 +107,16 @@ func NewService(ctx context.Context, cfg *Config) (*Service, error) { ctx, cancel := context.WithCancel(ctx) _ = cancel // govet fix for lost cancel. Cancel is handled in service.Stop(). - cfg = validateConfig(cfg) + validateConfig(cfg) + privKey, err := privKey(cfg) if err != nil { return nil, errors.Wrapf(err, "failed to generate p2p private key") } + p2pMaxPeers.Set(float64(cfg.MaxPeers)) + minimumPeersPerSubnet.Set(float64(flags.Get().MinimumPeersPerSubnet)) + metaData, err := metaDataFromDB(ctx, cfg.DB) if err != nil { log.WithError(err).Error("Failed to create peer metadata") diff --git a/beacon-chain/p2p/subnets.go b/beacon-chain/p2p/subnets.go index 9ba953017c..13638dd3bd 100644 --- a/beacon-chain/p2p/subnets.go +++ b/beacon-chain/p2p/subnets.go @@ -514,18 +514,18 @@ func initializePersistentSubnets(id enode.ID, epoch primitives.Epoch) error { // // return [compute_subscribed_subnet(node_id, epoch, index) for index in range(SUBNETS_PER_NODE)] func computeSubscribedSubnets(nodeID enode.ID, epoch primitives.Epoch) ([]uint64, error) { - beaconConfig := params.BeaconConfig() + cfg := params.BeaconConfig() if flags.Get().SubscribeToAllSubnets { - subnets := make([]uint64, 0, beaconConfig.AttestationSubnetCount) - for i := range beaconConfig.AttestationSubnetCount { + subnets := make([]uint64, 0, cfg.AttestationSubnetCount) + for i := range cfg.AttestationSubnetCount { subnets = append(subnets, i) } return subnets, nil } - subnets := make([]uint64, 0, beaconConfig.SubnetsPerNode) - for i := range beaconConfig.SubnetsPerNode { + subnets := make([]uint64, 0, cfg.SubnetsPerNode) + for i := range cfg.SubnetsPerNode { sub, err := computeSubscribedSubnet(nodeID, epoch, i) if err != nil { return nil, errors.Wrap(err, "compute subscribed subnet") diff --git a/beacon-chain/p2p/subnets_test.go b/beacon-chain/p2p/subnets_test.go index 3ae4b52f7c..ad2855af6b 100644 --- a/beacon-chain/p2p/subnets_test.go +++ b/beacon-chain/p2p/subnets_test.go @@ -524,12 +524,12 @@ func TestSubnetComputation(t *testing.T) { require.NoError(t, err) localNode := enode.NewLocalNode(db, convertedKey) - beaconConfig := params.BeaconConfig() + cfg := params.BeaconConfig() t.Run("standard", func(t *testing.T) { retrievedSubnets, err := computeSubscribedSubnets(localNode.ID(), 1000) require.NoError(t, err) - require.Equal(t, beaconConfig.SubnetsPerNode, uint64(len(retrievedSubnets))) + require.Equal(t, cfg.SubnetsPerNode, uint64(len(retrievedSubnets))) require.Equal(t, retrievedSubnets[0]+1, retrievedSubnets[1]) }) @@ -541,8 +541,8 @@ func TestSubnetComputation(t *testing.T) { retrievedSubnets, err := computeSubscribedSubnets(localNode.ID(), 1000) require.NoError(t, err) - require.Equal(t, beaconConfig.AttestationSubnetCount, uint64(len(retrievedSubnets))) - for i := range beaconConfig.AttestationSubnetCount { + require.Equal(t, cfg.AttestationSubnetCount, uint64(len(retrievedSubnets))) + for i := range cfg.AttestationSubnetCount { require.Equal(t, i, retrievedSubnets[i]) } }) diff --git a/beacon-chain/sync/custody.go b/beacon-chain/sync/custody.go index 0e9bc3507f..e97a57a472 100644 --- a/beacon-chain/sync/custody.go +++ b/beacon-chain/sync/custody.go @@ -90,10 +90,10 @@ func (s *Service) updateCustodyInfoIfNeeded() error { // custodyGroupCount computes the custody group count based on the custody requirement, // the validators custody requirement, and whether the node is subscribed to all data subnets. func (s *Service) custodyGroupCount(context.Context) (uint64, error) { - beaconConfig := params.BeaconConfig() + cfg := params.BeaconConfig() if flags.Get().SubscribeAllDataSubnets { - return beaconConfig.NumberOfCustodyGroups, nil + return cfg.NumberOfCustodyGroups, nil } validatorsCustodyRequirement, err := s.validatorsCustodyRequirement() @@ -101,7 +101,7 @@ func (s *Service) custodyGroupCount(context.Context) (uint64, error) { return 0, errors.Wrap(err, "validators custody requirement") } - return max(beaconConfig.CustodyRequirement, validatorsCustodyRequirement), nil + return max(cfg.CustodyRequirement, validatorsCustodyRequirement), nil } // validatorsCustodyRequirements computes the custody requirements based on the diff --git a/beacon-chain/sync/custody_test.go b/beacon-chain/sync/custody_test.go index 4690d58bbc..5408c1a81f 100644 --- a/beacon-chain/sync/custody_test.go +++ b/beacon-chain/sync/custody_test.go @@ -116,11 +116,11 @@ func withSubscribeAllDataSubnets(t *testing.T, fn func()) { func TestUpdateCustodyInfoIfNeeded(t *testing.T) { params.SetupTestConfigCleanup(t) - beaconConfig := params.BeaconConfig() - beaconConfig.NumberOfCustodyGroups = 128 - beaconConfig.CustodyRequirement = 4 - beaconConfig.SamplesPerSlot = 8 - params.OverrideBeaconConfig(beaconConfig) + cfg := params.BeaconConfig() + cfg.NumberOfCustodyGroups = 128 + cfg.CustodyRequirement = 4 + cfg.SamplesPerSlot = 8 + params.OverrideBeaconConfig(cfg) t.Run("Skip update when actual custody count >= target", func(t *testing.T) { setup := setupCustodyTest(t, false) @@ -159,7 +159,7 @@ func TestUpdateCustodyInfoIfNeeded(t *testing.T) { require.NoError(t, err) const expectedSlot = primitives.Slot(100) - setup.assertCustodyInfo(t, expectedSlot, beaconConfig.NumberOfCustodyGroups) + setup.assertCustodyInfo(t, expectedSlot, cfg.NumberOfCustodyGroups) }) }) } diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher_test.go b/beacon-chain/sync/initial-sync/blocks_fetcher_test.go index b0fcd1df7e..5e377c9f7d 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher_test.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher_test.go @@ -1366,16 +1366,16 @@ func TestFetchSidecars(t *testing.T) { }) t.Run("Nominal", func(t *testing.T) { - beaconConfig := params.BeaconConfig() - numberOfColumns := beaconConfig.NumberOfColumns - samplesPerSlot := beaconConfig.SamplesPerSlot + cfg := params.BeaconConfig() + numberOfColumns := cfg.NumberOfColumns + samplesPerSlot := cfg.SamplesPerSlot // Define "now" to be one epoch after genesis time + retention period. genesisTime := time.Date(2025, time.August, 10, 0, 0, 0, 0, time.UTC) - secondsPerSlot := beaconConfig.SecondsPerSlot - slotsPerEpoch := beaconConfig.SlotsPerEpoch + secondsPerSlot := cfg.SecondsPerSlot + slotsPerEpoch := cfg.SlotsPerEpoch secondsPerEpoch := uint64(slotsPerEpoch.Mul(secondsPerSlot)) - retentionEpochs := beaconConfig.MinEpochsForDataColumnSidecarsRequest + retentionEpochs := cfg.MinEpochsForDataColumnSidecarsRequest nowWrtGenesisSecs := retentionEpochs.Add(1).Mul(secondsPerEpoch) now := genesisTime.Add(time.Duration(nowWrtGenesisSecs) * time.Second) diff --git a/beacon-chain/sync/initial-sync/service_test.go b/beacon-chain/sync/initial-sync/service_test.go index e2b4bccbb0..6dc7cce17a 100644 --- a/beacon-chain/sync/initial-sync/service_test.go +++ b/beacon-chain/sync/initial-sync/service_test.go @@ -530,12 +530,12 @@ func TestOriginOutsideRetention(t *testing.T) { func TestFetchOriginSidecars(t *testing.T) { ctx := t.Context() - beaconConfig := params.BeaconConfig() + cfg := params.BeaconConfig() genesisTime := time.Date(2025, time.August, 10, 0, 0, 0, 0, time.UTC) - secondsPerSlot := beaconConfig.SecondsPerSlot - slotsPerEpoch := beaconConfig.SlotsPerEpoch + secondsPerSlot := cfg.SecondsPerSlot + slotsPerEpoch := cfg.SlotsPerEpoch secondsPerEpoch := uint64(slotsPerEpoch.Mul(secondsPerSlot)) - retentionEpochs := beaconConfig.MinEpochsForDataColumnSidecarsRequest + retentionEpochs := cfg.MinEpochsForDataColumnSidecarsRequest genesisValidatorRoot := [fieldparams.RootLength]byte{} diff --git a/beacon-chain/sync/metrics.go b/beacon-chain/sync/metrics.go index 8e107273d6..90735c030a 100644 --- a/beacon-chain/sync/metrics.go +++ b/beacon-chain/sync/metrics.go @@ -286,6 +286,7 @@ func (s *Service) updateMetrics() { topicPeerCount.WithLabelValues(formattedTopic).Set(float64(len(s.cfg.p2p.PubSub().ListPeers(formattedTopic)))) } + subscribedTopicPeerCount.Reset() for _, topic := range s.cfg.p2p.PubSub().GetTopics() { subscribedTopicPeerCount.WithLabelValues(topic).Set(float64(len(s.cfg.p2p.PubSub().ListPeers(topic)))) } diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_root.go b/beacon-chain/sync/rpc_blob_sidecars_by_root.go index f51ec93c3e..c1e9e64555 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_root.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_root.go @@ -113,19 +113,19 @@ func (s *Service) blobSidecarByRootRPCHandler(ctx context.Context, msg interface } func validateBlobByRootRequest(blobIdents types.BlobSidecarsByRootReq, slot primitives.Slot) error { - beaconConfig := params.BeaconConfig() + cfg := params.BeaconConfig() epoch := slots.ToEpoch(slot) blobIdentCount := uint64(len(blobIdents)) - if epoch >= beaconConfig.ElectraForkEpoch { - if blobIdentCount > beaconConfig.MaxRequestBlobSidecarsElectra { + if epoch >= cfg.ElectraForkEpoch { + if blobIdentCount > cfg.MaxRequestBlobSidecarsElectra { return types.ErrMaxBlobReqExceeded } return nil } - if blobIdentCount > beaconConfig.MaxRequestBlobSidecars { + if blobIdentCount > cfg.MaxRequestBlobSidecars { return types.ErrMaxBlobReqExceeded } diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_range.go b/beacon-chain/sync/rpc_data_column_sidecars_by_range.go index a2be4b4587..9c9fb3c8f6 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_range.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_range.go @@ -38,8 +38,8 @@ func (s *Service) dataColumnSidecarsByRangeRPCHandler(ctx context.Context, msg i defer cancel() SetRPCStreamDeadlines(stream) - beaconConfig := params.BeaconConfig() - maxRequestDataColumnSidecars := beaconConfig.MaxRequestDataColumnSidecars + cfg := params.BeaconConfig() + maxRequestDataColumnSidecars := cfg.MaxRequestDataColumnSidecars remotePeer := stream.Conn().RemotePeer() log := log.WithFields(logrus.Fields{ @@ -102,7 +102,7 @@ func (s *Service) dataColumnSidecarsByRangeRPCHandler(ctx context.Context, msg i // Once the quota is reached, we're done serving the request. if maxRequestDataColumnSidecars == 0 { - log.WithField("initialQuota", beaconConfig.MaxRequestDataColumnSidecars).Trace("Reached quota for data column sidecars by range request") + log.WithField("initialQuota", cfg.MaxRequestDataColumnSidecars).Trace("Reached quota for data column sidecars by range request") break } } diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_range_test.go b/beacon-chain/sync/rpc_data_column_sidecars_by_range_test.go index 07d0f2a66a..8b7d212668 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_range_test.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_range_test.go @@ -31,9 +31,9 @@ import ( func TestDataColumnSidecarsByRangeRPCHandler(t *testing.T) { params.SetupTestConfigCleanup(t) - beaconConfig := params.BeaconConfig() - beaconConfig.FuluForkEpoch = 0 - params.OverrideBeaconConfig(beaconConfig) + cfg := params.BeaconConfig() + cfg.FuluForkEpoch = 0 + params.OverrideBeaconConfig(cfg) params.BeaconConfig().InitializeForkSchedule() ctx := context.Background() t.Run("wrong message type", func(t *testing.T) { diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_root.go b/beacon-chain/sync/rpc_data_column_sidecars_by_root.go index 1de8a21bfb..d1b673de0f 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_root.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_root.go @@ -163,9 +163,9 @@ func dataColumnsRPCMinValidSlot(currentSlot primitives.Slot) (primitives.Slot, e return primitives.Slot(math.MaxUint64), nil } - beaconConfig := params.BeaconConfig() - minReqEpochs := beaconConfig.MinEpochsForDataColumnSidecarsRequest - minStartEpoch := beaconConfig.FuluForkEpoch + cfg := params.BeaconConfig() + minReqEpochs := cfg.MinEpochsForDataColumnSidecarsRequest + minStartEpoch := cfg.FuluForkEpoch currEpoch := slots.ToEpoch(currentSlot) if currEpoch > minReqEpochs && currEpoch-minReqEpochs > minStartEpoch { diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go b/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go index 61bd069faf..9de0b5886a 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go @@ -28,9 +28,9 @@ import ( func TestDataColumnSidecarsByRootRPCHandler(t *testing.T) { params.SetupTestConfigCleanup(t) - beaconConfig := params.BeaconConfig() - beaconConfig.FuluForkEpoch = 0 - params.OverrideBeaconConfig(beaconConfig) + cfg := params.BeaconConfig() + cfg.FuluForkEpoch = 0 + params.OverrideBeaconConfig(cfg) params.BeaconConfig().InitializeForkSchedule() ctxMap, err := ContextByteVersionsForValRoot(params.BeaconConfig().GenesisValidatorsRoot) require.NoError(t, err) @@ -43,9 +43,9 @@ func TestDataColumnSidecarsByRootRPCHandler(t *testing.T) { t.Run("invalid request", func(t *testing.T) { params.SetupTestConfigCleanup(t) - beaconConfig := params.BeaconConfig() - beaconConfig.MaxRequestDataColumnSidecars = 1 - params.OverrideBeaconConfig(beaconConfig) + cfg := params.BeaconConfig() + cfg.MaxRequestDataColumnSidecars = 1 + params.OverrideBeaconConfig(cfg) localP2P := p2ptest.NewTestP2P(t) service := &Service{cfg: &config{p2p: localP2P}} @@ -96,9 +96,9 @@ func TestDataColumnSidecarsByRootRPCHandler(t *testing.T) { }() params.SetupTestConfigCleanup(t) - beaconConfig := params.BeaconConfig() - beaconConfig.FuluForkEpoch = 1 - params.OverrideBeaconConfig(beaconConfig) + cfg := params.BeaconConfig() + cfg.FuluForkEpoch = 1 + params.OverrideBeaconConfig(cfg) localP2P := p2ptest.NewTestP2P(t) clock := startup.NewClock(time.Now(), [fieldparams.RootLength]byte{}) diff --git a/beacon-chain/sync/rpc_send_request.go b/beacon-chain/sync/rpc_send_request.go index b2a3a5045b..64096e4a51 100644 --- a/beacon-chain/sync/rpc_send_request.go +++ b/beacon-chain/sync/rpc_send_request.go @@ -465,8 +465,8 @@ func SendDataColumnSidecarsByRangeRequest( return nil, nil } - beaconConfig := params.BeaconConfig() - numberOfColumns := beaconConfig.NumberOfColumns + cfg := params.BeaconConfig() + numberOfColumns := cfg.NumberOfColumns maxRequestDataColumnSidecars := params.BeaconConfig().MaxRequestDataColumnSidecars // Check if we do not request too many sidecars. diff --git a/beacon-chain/sync/rpc_send_request_test.go b/beacon-chain/sync/rpc_send_request_test.go index 8db82508cc..8c7af5e03f 100644 --- a/beacon-chain/sync/rpc_send_request_test.go +++ b/beacon-chain/sync/rpc_send_request_test.go @@ -889,9 +889,9 @@ func TestErrInvalidFetchedDataDistinction(t *testing.T) { func TestSendDataColumnSidecarsByRangeRequest(t *testing.T) { params.SetupTestConfigCleanup(t) - beaconConfig := params.BeaconConfig() - beaconConfig.FuluForkEpoch = 0 - params.OverrideBeaconConfig(beaconConfig) + cfg := params.BeaconConfig() + cfg.FuluForkEpoch = 0 + params.OverrideBeaconConfig(cfg) params.BeaconConfig().InitializeForkSchedule() ctxMap, err := ContextByteVersionsForValRoot(params.BeaconConfig().GenesisValidatorsRoot) require.NoError(t, err) @@ -923,9 +923,9 @@ func TestSendDataColumnSidecarsByRangeRequest(t *testing.T) { t.Run("too many columns in request", func(t *testing.T) { params.SetupTestConfigCleanup(t) - beaconConfig := params.BeaconConfig() - beaconConfig.MaxRequestDataColumnSidecars = 0 - params.OverrideBeaconConfig(beaconConfig) + cfg := params.BeaconConfig() + cfg.MaxRequestDataColumnSidecars = 0 + params.OverrideBeaconConfig(cfg) request := ðpb.DataColumnSidecarsByRangeRequest{Count: 1, Columns: []uint64{1, 2, 3}} _, err := SendDataColumnSidecarsByRangeRequest(DataColumnSidecarsParams{Ctx: t.Context()}, "", request) @@ -1193,9 +1193,9 @@ func TestIsSidecarIndexRequested(t *testing.T) { func TestSendDataColumnSidecarsByRootRequest(t *testing.T) { params.SetupTestConfigCleanup(t) - beaconConfig := params.BeaconConfig() - beaconConfig.FuluForkEpoch = 0 - params.OverrideBeaconConfig(beaconConfig) + cfg := params.BeaconConfig() + cfg.FuluForkEpoch = 0 + params.OverrideBeaconConfig(cfg) params.BeaconConfig().InitializeForkSchedule() ctxMap, err := ContextByteVersionsForValRoot(params.BeaconConfig().GenesisValidatorsRoot) require.NoError(t, err) @@ -1223,9 +1223,9 @@ func TestSendDataColumnSidecarsByRootRequest(t *testing.T) { t.Run("too many columns in request", func(t *testing.T) { params.SetupTestConfigCleanup(t) - beaconConfig := params.BeaconConfig() - beaconConfig.MaxRequestDataColumnSidecars = 4 - params.OverrideBeaconConfig(beaconConfig) + cfg := params.BeaconConfig() + cfg.MaxRequestDataColumnSidecars = 4 + params.OverrideBeaconConfig(cfg) request := p2ptypes.DataColumnsByRootIdentifiers{ {Columns: []uint64{1, 2, 3}}, diff --git a/beacon-chain/sync/rpc_status_test.go b/beacon-chain/sync/rpc_status_test.go index c9ce176f62..c19a70a4a5 100644 --- a/beacon-chain/sync/rpc_status_test.go +++ b/beacon-chain/sync/rpc_status_test.go @@ -445,7 +445,7 @@ func TestStatusRPCRequest_RequestSent(t *testing.T) { custodyGroupCount = uint64(4) ) - beaconConfig := params.BeaconConfig() + cfg := params.BeaconConfig() ctx := t.Context() testCases := []struct { @@ -456,7 +456,7 @@ func TestStatusRPCRequest_RequestSent(t *testing.T) { }{ { name: "before fulu", - fuluForkEpoch: beaconConfig.FarFutureEpoch, + fuluForkEpoch: cfg.FarFutureEpoch, topic: "/eth2/beacon_chain/req/status/1/ssz_snappy", streamHandler: func(service *Service, stream network.Stream, genesisState beaconState.BeaconState, beaconRoot, headRoot, finalizedRoot []byte) { out := ðpb.Status{} diff --git a/beacon-chain/sync/subscriber.go b/beacon-chain/sync/subscriber.go index e63c554171..babe32eb9f 100644 --- a/beacon-chain/sync/subscriber.go +++ b/beacon-chain/sync/subscriber.go @@ -695,10 +695,10 @@ func (s *Service) dataColumnSubnetIndices(primitives.Slot) map[uint64]bool { // the validators custody requirement, and whether the node is subscribed to all data subnets. // https://github.com/ethereum/consensus-specs/blob/master/specs/fulu/das-core.md#custody-sampling func (s *Service) samplingSize() (uint64, error) { - beaconConfig := params.BeaconConfig() + cfg := params.BeaconConfig() if flags.Get().SubscribeAllDataSubnets { - return beaconConfig.DataColumnSidecarSubnetCount, nil + return cfg.DataColumnSidecarSubnetCount, nil } // Compute the validators custody requirement. @@ -712,7 +712,7 @@ func (s *Service) samplingSize() (uint64, error) { return 0, errors.Wrap(err, "custody group count") } - return max(beaconConfig.SamplesPerSlot, validatorsCustodyRequirement, custodyGroupCount), nil + return max(cfg.SamplesPerSlot, validatorsCustodyRequirement, custodyGroupCount), nil } func (s *Service) persistentAndAggregatorSubnetIndices(currentSlot primitives.Slot) map[uint64]bool { diff --git a/changelog/manu-metrics.md b/changelog/manu-metrics.md new file mode 100644 index 0000000000..edd98236e5 --- /dev/null +++ b/changelog/manu-metrics.md @@ -0,0 +1,4 @@ +### Added +- Metrics: Add count of peers per direction and type (inbound/outbound), (TCP/QUIC). +- `p2p_subscribed_topic_peer_total`: Reset to avoid dangling values. +- Add `p2p_minimum_peers_per_subnet` metric. \ No newline at end of file From 10a2f0687b67987ff0fae746015fe773ddd1a4f2 Mon Sep 17 00:00:00 2001 From: fernantho Date: Tue, 28 Oct 2025 00:27:34 +0100 Subject: [PATCH 057/103] SSZ-QL: calculate generalized indices for elements (#15873) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * added tests for calculating generalized indices * added first version of GI calculation walking the specified path with no recursion. Extended test coverage for bitlist and bitvectors. vectors need more testing * refactored code. Detached PathElement processing, currently done at the beginning. Swap to regex to gain flexibility. * added an updateRoot function with the GI formula. more refactoring * added changelog * replaced TODO tag * udpated some comments * simplified code - removed duplicated code in processingLengthField function * run gazelle * merging all input path processing into path.go * reviewed Jun's feedback * removed unnecessary idx pointer var + fixed error with length data type (uint64 instead of uint8) * refactored path.go after merging path elements from generalized_indices.go * re-computed GIs for tests as VariableTestContainer added a new field. * added minor comment - rawPath MUST be snake case removed extractFieldName func. * fixed vector GI calculation - updated tests GIs * removed updateRoot function in favor of inline code * path input data enforced to be snake case * added sanity checks for accessing outbound element indices - checked against vector.length/list.limit * fixed issues triggered after merging develop * Removed redundant comment Co-authored-by: Jun Song <87601811+syjn99@users.noreply.github.com> * removed unreachable condition as `strings.Split` always return a slice with length >= 1 If s does not contain sep and sep is not empty, Split returns a slice of length 1 whose only element is s. * added tests to cover edge cases + cleaned code (toLower is no longer needed in extractFieldName function * added Jun's feedback + more testing * postponed snake case conversion to do it on a per-element-basis. Added more testing focused mainly in snake case conversion * addressed several Jun's comments. * added sanity check to prevent length of a multi-dimensional array. added more tests with extended paths * Update encoding/ssz/query/generalized_index.go Co-authored-by: Radosław Kapka * Update encoding/ssz/query/generalized_index.go Co-authored-by: Radosław Kapka * Update encoding/ssz/query/generalized_index.go Co-authored-by: Radosław Kapka * placed constant bitsPerChunk in the right place. Exported BitsPerChunk and BytesPerChunk and updated code that use them * added helpers for computing GI of each data type * changed %q in favor of %s * Update encoding/ssz/query/path.go Co-authored-by: Jun Song <87601811+syjn99@users.noreply.github.com> * removed the least restrictive condition isBasicType * replaced length of containerInfo.order for containerInfo.fields for clarity * removed outdated comment * removed toSnakeCase conversion. * moved isBasicType func to its natural place, SSZType * cosmetic refactor - renamed itemLengthFromInfo to itemLength (same name is in spec). - arranged all SSZ helpers. * cleaned tests * renamed "root" to "index" * removed unnecessary check for negative integers. Replaced %q for %s. * refactored regex variables and prevented re-assignation * added length regex explanation * added more testing for stressing regex for path processing * renamed currentIndex to parentIndex for clarity and documented the returns from calculateGeneralizedIndex functions * Update encoding/ssz/query/generalized_index.go Co-authored-by: Radosław Kapka * run gazelle * fixed never asserted error. Updated error message --------- Co-authored-by: Jun Song <87601811+syjn99@users.noreply.github.com> Co-authored-by: Radosław Kapka Co-authored-by: Radosław Kapka --- ...ho_ssz-ql-calculate-generalized-indices.md | 3 + encoding/ssz/helpers.go | 25 +- encoding/ssz/query/BUILD.bazel | 7 +- encoding/ssz/query/generalized_index.go | 321 +++++++++++++++ encoding/ssz/query/generalized_index_test.go | 370 ++++++++++++++++++ encoding/ssz/query/path.go | 103 +++-- encoding/ssz/query/path_test.go | 176 ++++++++- encoding/ssz/query/ssz_type.go | 5 + 8 files changed, 971 insertions(+), 39 deletions(-) create mode 100644 changelog/fernantho_ssz-ql-calculate-generalized-indices.md create mode 100644 encoding/ssz/query/generalized_index.go create mode 100644 encoding/ssz/query/generalized_index_test.go diff --git a/changelog/fernantho_ssz-ql-calculate-generalized-indices.md b/changelog/fernantho_ssz-ql-calculate-generalized-indices.md new file mode 100644 index 0000000000..737e26a617 --- /dev/null +++ b/changelog/fernantho_ssz-ql-calculate-generalized-indices.md @@ -0,0 +1,3 @@ +### Added + +- Added GeneralizedIndicesFromPath function to calculate the GIs for a given sszInfo object and a PathElement diff --git a/encoding/ssz/helpers.go b/encoding/ssz/helpers.go index 91b3f2e4dd..fa46ec927c 100644 --- a/encoding/ssz/helpers.go +++ b/encoding/ssz/helpers.go @@ -11,7 +11,10 @@ import ( "github.com/prysmaticlabs/go-bitfield" ) -const bytesPerChunk = 32 +const ( + BitsPerChunk = 256 + BytesPerChunk = 32 +) // BitlistRoot returns the mix in length of a bitwise Merkleized bitfield. func BitlistRoot(bfield bitfield.Bitfield, maxCapacity uint64) ([32]byte, error) { @@ -54,14 +57,14 @@ func BitwiseMerkleize(chunks [][32]byte, count, limit uint64) ([32]byte, error) } // PackByChunk a given byte array's final chunk with zeroes if needed. -func PackByChunk(serializedItems [][]byte) ([][bytesPerChunk]byte, error) { - var emptyChunk [bytesPerChunk]byte +func PackByChunk(serializedItems [][]byte) ([][BytesPerChunk]byte, error) { + var emptyChunk [BytesPerChunk]byte // If there are no items, we return an empty chunk. if len(serializedItems) == 0 { - return [][bytesPerChunk]byte{emptyChunk}, nil - } else if len(serializedItems[0]) == bytesPerChunk { + return [][BytesPerChunk]byte{emptyChunk}, nil + } else if len(serializedItems[0]) == BytesPerChunk { // If each item has exactly BYTES_PER_CHUNK length, we return the list of serialized items. - chunks := make([][bytesPerChunk]byte, 0, len(serializedItems)) + chunks := make([][BytesPerChunk]byte, 0, len(serializedItems)) for _, c := range serializedItems { chunks = append(chunks, bytesutil.ToBytes32(c)) } @@ -75,12 +78,12 @@ func PackByChunk(serializedItems [][]byte) ([][bytesPerChunk]byte, error) { // If all our serialized item slices are length zero, we // exit early. if len(orderedItems) == 0 { - return [][bytesPerChunk]byte{emptyChunk}, nil + return [][BytesPerChunk]byte{emptyChunk}, nil } numItems := len(orderedItems) - var chunks [][bytesPerChunk]byte - for i := 0; i < numItems; i += bytesPerChunk { - j := i + bytesPerChunk + var chunks [][BytesPerChunk]byte + for i := 0; i < numItems; i += BytesPerChunk { + j := i + BytesPerChunk // We create our upper bound index of the chunk, if it is greater than numItems, // we set it as numItems itself. if j > numItems { @@ -89,7 +92,7 @@ func PackByChunk(serializedItems [][]byte) ([][bytesPerChunk]byte, error) { // We create chunks from the list of items based on the // indices determined above. // Right-pad the last chunk with zero bytes if it does not - // have length bytesPerChunk from the helper. + // have length BytesPerChunk from the helper. // The ToBytes32 helper allocates a 32-byte array, before // copying the ordered items in. This ensures that even if // the last chunk is != 32 in length, we will right-pad it with diff --git a/encoding/ssz/query/BUILD.bazel b/encoding/ssz/query/BUILD.bazel index e6a21f6aa3..5963ad69c3 100644 --- a/encoding/ssz/query/BUILD.bazel +++ b/encoding/ssz/query/BUILD.bazel @@ -7,6 +7,7 @@ go_library( "bitlist.go", "bitvector.go", "container.go", + "generalized_index.go", "list.go", "path.go", "query.go", @@ -18,12 +19,16 @@ go_library( ], importpath = "github.com/OffchainLabs/prysm/v6/encoding/ssz/query", visibility = ["//visibility:public"], - deps = ["@com_github_prysmaticlabs_go_bitfield//:go_default_library"], + deps = [ + "//encoding/ssz:go_default_library", + "@com_github_prysmaticlabs_go_bitfield//:go_default_library", + ], ) go_test( name = "go_default_test", srcs = [ + "generalized_index_test.go", "path_test.go", "query_test.go", "tag_parser_test.go", diff --git a/encoding/ssz/query/generalized_index.go b/encoding/ssz/query/generalized_index.go new file mode 100644 index 0000000000..8c8f894266 --- /dev/null +++ b/encoding/ssz/query/generalized_index.go @@ -0,0 +1,321 @@ +package query + +import ( + "errors" + "fmt" + + "github.com/OffchainLabs/prysm/v6/encoding/ssz" +) + +const listBaseIndex = 2 + +// GetGeneralizedIndexFromPath calculates the generalized index for a given path. +// To calculate the generalized index, two inputs are needed: +// 1. The sszInfo of the root object, to be able to navigate the SSZ structure +// 2. The path to the field (e.g., "field_a.field_b[3].field_c") +// It walks the path step by step, updating the generalized index at each step. +func GetGeneralizedIndexFromPath(info *SszInfo, path []PathElement) (uint64, error) { + if info == nil { + return 0, errors.New("SszInfo is nil") + } + + // If path is empty, no generalized index can be computed. + if len(path) == 0 { + return 0, errors.New("cannot compute generalized index for an empty path") + } + + // Starting from the root generalized index + currentIndex := uint64(1) + currentInfo := info + + for _, pathElement := range path { + element := pathElement + + // Check that we are in a container to access fields + if currentInfo.sszType != Container { + return 0, fmt.Errorf("indexing requires a container field step first, got %s", currentInfo.sszType) + } + + // Retrieve the field position and SSZInfo for the field in the current container + fieldPos, fieldSsz, err := getContainerFieldByName(currentInfo, element.Name) + if err != nil { + return 0, fmt.Errorf("container field %s not found: %w", element.Name, err) + } + + // Get the chunk count for the current container + chunkCount, err := getChunkCount(currentInfo) + if err != nil { + return 0, fmt.Errorf("chunk count error: %w", err) + } + + // Update the generalized index to point to the specified field + currentIndex = currentIndex*nextPowerOfTwo(chunkCount) + fieldPos + currentInfo = fieldSsz + + // Check if a path element is a length field + if element.Length { + currentInfo, currentIndex, err = calculateLengthGeneralizedIndex(fieldSsz, element, currentIndex) + if err != nil { + return 0, fmt.Errorf("length calculation error: %w", err) + } + continue + } + + if element.Index == nil { + continue + } + + switch fieldSsz.sszType { + case List: + currentInfo, currentIndex, err = calculateListGeneralizedIndex(fieldSsz, element, currentIndex) + if err != nil { + return 0, fmt.Errorf("list calculation error: %w", err) + } + + case Vector: + currentInfo, currentIndex, err = calculateVectorGeneralizedIndex(fieldSsz, element, currentIndex) + if err != nil { + return 0, fmt.Errorf("vector calculation error: %w", err) + } + + case Bitlist: + currentInfo, currentIndex, err = calculateBitlistGeneralizedIndex(fieldSsz, element, currentIndex) + if err != nil { + return 0, fmt.Errorf("bitlist calculation error: %w", err) + } + + case Bitvector: + currentInfo, currentIndex, err = calculateBitvectorGeneralizedIndex(fieldSsz, element, currentIndex) + if err != nil { + return 0, fmt.Errorf("bitvector calculation error: %w", err) + } + + default: + return 0, fmt.Errorf("indexing not supported for type %s", fieldSsz.sszType) + } + + } + + return currentIndex, nil +} + +// getContainerFieldByName finds a container field by its name +// and returns its index and SSZInfo. +func getContainerFieldByName(info *SszInfo, fieldName string) (uint64, *SszInfo, error) { + containerInfo, err := info.ContainerInfo() + if err != nil { + return 0, nil, err + } + + for index, name := range containerInfo.order { + if name == fieldName { + fieldInfo := containerInfo.fields[name] + if fieldInfo == nil || fieldInfo.sszInfo == nil { + return 0, nil, fmt.Errorf("field %s has no ssz info", name) + } + return uint64(index), fieldInfo.sszInfo, nil + } + } + + return 0, nil, fmt.Errorf("field %s not found", fieldName) +} + +// Helpers for Generalized Index calculation per type + +// calculateLengthGeneralizedIndex calculates the generalized index for a length field. +// note: length fields are only valid for List and Bitlist types. Multi-dimensional arrays are not supported. +// Returns: +// - its descendant SSZInfo (length field i.e. uint64) +// - its generalized index. +func calculateLengthGeneralizedIndex(fieldSsz *SszInfo, element PathElement, parentIndex uint64) (*SszInfo, uint64, error) { + if element.Index != nil { + return nil, 0, fmt.Errorf("len() is not supported for multi-dimensional arrays") + } + // Length field is only valid for List and Bitlist types + if fieldSsz.sszType != List && fieldSsz.sszType != Bitlist { + return nil, 0, fmt.Errorf("len() is only supported for List and Bitlist types, got %s", fieldSsz.sszType) + } + // Length is a uint64 per SSZ spec + currentInfo := &SszInfo{sszType: Uint64} + lengthIndex := parentIndex*2 + 1 + return currentInfo, lengthIndex, nil +} + +// calculateListGeneralizedIndex calculates the generalized index for a list element. +// Returns: +// - its descendant SSZInfo (list element) +// - its generalized index. +func calculateListGeneralizedIndex(fieldSsz *SszInfo, element PathElement, parentIndex uint64) (*SszInfo, uint64, error) { + li, err := fieldSsz.ListInfo() + if err != nil { + return nil, 0, fmt.Errorf("list info error: %w", err) + } + elem, err := li.Element() + if err != nil { + return nil, 0, fmt.Errorf("list element error: %w", err) + } + if *element.Index >= li.Limit() { + return nil, 0, fmt.Errorf("index %d out of bounds for list with limit %d", *element.Index, li.Limit()) + } + // Compute chunk position for the element + var chunkPos uint64 + if elem.sszType.isBasic() { + start := *element.Index * itemLength(elem) + chunkPos = start / ssz.BytesPerChunk + } else { + chunkPos = *element.Index + } + innerChunkCount, err := getChunkCount(fieldSsz) + if err != nil { + return nil, 0, fmt.Errorf("chunk count error: %w", err) + } + // root = root * base_index * pow2ceil(chunk_count(container)) + fieldPos + listIndex := parentIndex*listBaseIndex*nextPowerOfTwo(innerChunkCount) + chunkPos + currentInfo := elem + + return currentInfo, listIndex, nil +} + +// calculateVectorGeneralizedIndex calculates the generalized index for a vector element. +// Returns: +// - its descendant SSZInfo (vector element) +// - its generalized index. +func calculateVectorGeneralizedIndex(fieldSsz *SszInfo, element PathElement, parentIndex uint64) (*SszInfo, uint64, error) { + vi, err := fieldSsz.VectorInfo() + if err != nil { + return nil, 0, fmt.Errorf("vector info error: %w", err) + } + elem, err := vi.Element() + if err != nil { + return nil, 0, fmt.Errorf("vector element error: %w", err) + } + if *element.Index >= vi.Length() { + return nil, 0, fmt.Errorf("index %d out of bounds for vector with length %d", *element.Index, vi.Length()) + } + var chunkPos uint64 + if elem.sszType.isBasic() { + start := *element.Index * itemLength(elem) + chunkPos = start / ssz.BytesPerChunk + } else { + chunkPos = *element.Index + } + innerChunkCount, err := getChunkCount(fieldSsz) + if err != nil { + return nil, 0, fmt.Errorf("chunk count error: %w", err) + } + vectorIndex := parentIndex*nextPowerOfTwo(innerChunkCount) + chunkPos + + currentInfo := elem + return currentInfo, vectorIndex, nil +} + +// calculateBitlistGeneralizedIndex calculates the generalized index for a bitlist element. +// Returns: +// - its descendant SSZInfo (bitlist element i.e. a boolean) +// - its generalized index. +func calculateBitlistGeneralizedIndex(fieldSsz *SszInfo, element PathElement, parentIndex uint64) (*SszInfo, uint64, error) { + // Bits packed into 256-bit chunks; select the chunk containing the bit + chunkPos := *element.Index / ssz.BitsPerChunk + innerChunkCount, err := getChunkCount(fieldSsz) + if err != nil { + return nil, 0, fmt.Errorf("chunk count error: %w", err) + } + bitlistIndex := parentIndex*listBaseIndex*nextPowerOfTwo(innerChunkCount) + chunkPos + + // Bits element is not further descendable; set to basic to guard further steps + currentInfo := &SszInfo{sszType: Boolean} + return currentInfo, bitlistIndex, nil +} + +// calculateBitvectorGeneralizedIndex calculates the generalized index for a bitvector element. +// Returns: +// - its descendant SSZInfo (bitvector element i.e. a boolean) +// - its generalized index. +func calculateBitvectorGeneralizedIndex(fieldSsz *SszInfo, element PathElement, parentIndex uint64) (*SszInfo, uint64, error) { + chunkPos := *element.Index / ssz.BitsPerChunk + innerChunkCount, err := getChunkCount(fieldSsz) + if err != nil { + return nil, 0, fmt.Errorf("chunk count error: %w", err) + } + bitvectorIndex := parentIndex*nextPowerOfTwo(innerChunkCount) + chunkPos + + // Bits element is not further descendable; set to basic to guard further steps + currentInfo := &SszInfo{sszType: Boolean} + return currentInfo, bitvectorIndex, nil +} + +// Helper functions from SSZ spec + +// itemLength calculates the byte length of an SSZ item based on its type information. +// For basic SSZ types (uint8, uint16, uint32, uint64, bool, etc.), it returns the actual +// size of the type in bytes. For compound types (containers, lists, vectors), it returns +// BytesPerChunk which represents the standard SSZ chunk size (32 bytes) used for +// Merkle tree operations in the SSZ serialization format. +func itemLength(info *SszInfo) uint64 { + if info.sszType.isBasic() { + return info.Size() + } + return ssz.BytesPerChunk +} + +// nextPowerOfTwo computes the next power of two greater than or equal to v. +func nextPowerOfTwo(v uint64) uint64 { + v-- + v |= v >> 1 + v |= v >> 2 + v |= v >> 4 + v |= v >> 8 + v |= v >> 16 + v++ + return uint64(v) +} + +// getChunkCount returns the number of chunks for the given SSZInfo (equivalent to chunk_count in the spec) +func getChunkCount(info *SszInfo) (uint64, error) { + switch info.sszType { + case Uint8, Uint16, Uint32, Uint64, Boolean: + return 1, nil + case Container: + containerInfo, err := info.ContainerInfo() + if err != nil { + return 0, err + } + return uint64(len(containerInfo.fields)), nil + case List: + listInfo, err := info.ListInfo() + if err != nil { + return 0, err + } + elementInfo, err := listInfo.Element() + if err != nil { + return 0, err + } + elemLength := itemLength(elementInfo) + return (listInfo.Limit()*elemLength + 31) / ssz.BytesPerChunk, nil + case Vector: + vectorInfo, err := info.VectorInfo() + if err != nil { + return 0, err + } + elementInfo, err := vectorInfo.Element() + if err != nil { + return 0, err + } + elemLength := itemLength(elementInfo) + return (vectorInfo.Length()*elemLength + 31) / ssz.BytesPerChunk, nil + case Bitlist: + bitlistInfo, err := info.BitlistInfo() + if err != nil { + return 0, err + } + return (bitlistInfo.Limit() + 255) / ssz.BitsPerChunk, nil // Bits are packed into 256-bit chunks + case Bitvector: + bitvectorInfo, err := info.BitvectorInfo() + if err != nil { + return 0, err + } + return (bitvectorInfo.Length() + 255) / ssz.BitsPerChunk, nil // Bits are packed into 256-bit chunks + default: + return 0, errors.New("unsupported SSZ type for chunk count calculation") + } +} diff --git a/encoding/ssz/query/generalized_index_test.go b/encoding/ssz/query/generalized_index_test.go new file mode 100644 index 0000000000..080cf4a6f9 --- /dev/null +++ b/encoding/ssz/query/generalized_index_test.go @@ -0,0 +1,370 @@ +package query_test + +import ( + "strings" + "testing" + + "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" + sszquerypb "github.com/OffchainLabs/prysm/v6/proto/ssz_query/testing" + "github.com/OffchainLabs/prysm/v6/testing/require" +) + +func TestGetIndicesFromPath_FixedNestedContainer(t *testing.T) { + fixedNestedContainer := &sszquerypb.FixedNestedContainer{} + + info, err := query.AnalyzeObject(fixedNestedContainer) + require.NoError(t, err) + require.NotNil(t, info, "Expected non-nil SSZ info") + + testCases := []struct { + name string + path string + expectedIndex uint64 + expectError bool + errorMessage string + }{ + { + name: "Value1 field", + path: ".value1", + expectedIndex: 2, + expectError: false, + }, + { + name: "Value3 field", + path: ".value3", + expectError: true, + errorMessage: "field value3 not found", + }, + { + name: "Basic field cannot descend", + path: "value1.value1", + expectError: true, + errorMessage: "indexing requires a container field step first, got Uint64", + }, + { + name: "Indexing without container step", + path: "value2.value2[0]", + expectError: true, + errorMessage: "indexing requires a container field step first", + }, + { + name: "Value2 field", + path: "value2", + expectedIndex: 3, + expectError: false, + }, + { + name: "Value2 -> element[0]", + path: "value2[0]", + expectedIndex: 3, + expectError: false, + }, + { + name: "Value2 -> element[31]", + path: "value2[31]", + expectedIndex: 3, + expectError: false, + }, + { + name: "Empty path error", + path: "", + expectError: true, + errorMessage: "empty path", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + provingFields, err := query.ParsePath(tc.path) + require.NoError(t, err) + + actualIndex, err := query.GetGeneralizedIndexFromPath(info, provingFields) + if tc.expectError { + require.NotNil(t, err) + if tc.errorMessage != "" { + if !strings.Contains(err.Error(), tc.errorMessage) { + t.Errorf("Expected error message to contain '%s', but got: %s", tc.errorMessage, err.Error()) + } + } + } else { + require.NoError(t, err) + require.Equal(t, tc.expectedIndex, actualIndex, "Generalized index mismatch for path: %s", tc.path) + t.Logf("Path: %s -> Generalized Index: %v", tc.path, actualIndex) + } + }) + } +} + +func TestGetIndicesFromPath_VariableTestContainer(t *testing.T) { + testSpec := &sszquerypb.VariableTestContainer{} + info, err := query.AnalyzeObject(testSpec) + require.NoError(t, err) + require.NotNil(t, info, "Expected non-nil SSZ info") + + testCases := []struct { + name string + path string + expectedIndex uint64 + expectError bool + errorMessage string + }{ + { + name: "leading_field", + path: "leading_field", + expectedIndex: 16, + expectError: false, + }, + { + name: "field_list_uint64", + path: "field_list_uint64", + expectedIndex: 17, + expectError: false, + }, + { + name: "len(field_list_uint64)", + path: "len(field_list_uint64)", + expectedIndex: 35, + expectError: false, + }, + { + name: "field_list_uint64[0]", + path: "field_list_uint64[0]", + expectedIndex: 17408, + expectError: false, + }, + { + name: "field_list_uint64[2047]", + path: "field_list_uint64[2047]", + expectedIndex: 17919, + expectError: false, + }, + { + name: "bitlist_field", + path: "bitlist_field", + expectedIndex: 22, + expectError: false, + }, + { + name: "bitlist_field[0]", + path: "bitlist_field[0]", + expectedIndex: 352, + expectError: false, + }, + { + name: "bitlist_field[1]", + path: "bitlist_field[1]", + expectedIndex: 352, + expectError: false, + }, + { + name: "len(bitlist_field)", + path: "len(bitlist_field)", + expectedIndex: 45, + expectError: false, + }, + { + name: "len(trailing_field)", + path: "len(trailing_field)", + expectError: true, + errorMessage: "len() is only supported for List and Bitlist types, got Vector", + }, + { + name: "field_list_container[0]", + path: "field_list_container[0]", + expectedIndex: 4608, + expectError: false, + }, + { + name: "nested", + path: "nested", + expectedIndex: 20, + expectError: false, + }, + { + name: "nested.field_list_uint64[10]", + path: "nested.field_list_uint64[10]", + expectedIndex: 5186, + expectError: false, + }, + { + name: "variable_container_list", + path: "variable_container_list", + expectedIndex: 21, + expectError: false, + }, + { + name: "len(variable_container_list)", + path: "len(variable_container_list)", + expectedIndex: 43, + expectError: false, + }, + { + name: "variable_container_list[0]", + path: "variable_container_list[0]", + expectedIndex: 672, + expectError: false, + }, + { + name: "variable_container_list[0].inner_1", + path: "variable_container_list[0].inner_1", + expectedIndex: 1344, + expectError: false, + }, + { + name: "variable_container_list[0].inner_1.field_list_uint64[1]", + path: "variable_container_list[0].inner_1.field_list_uint64[1]", + expectedIndex: 344128, + expectError: false, + }, + { + name: "variable_container_list[0].inner_1.len(nested_list_field[3])", + path: "variable_container_list[0].inner_1.len(nested_list_field[3])", + expectError: true, + errorMessage: "length calculation error: len() is not supported for multi-dimensional arrays", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + provingFields, err := query.ParsePath(tc.path) + require.NoError(t, err) + + actualIndex, err := query.GetGeneralizedIndexFromPath(info, provingFields) + + if tc.expectError { + require.NotNil(t, err) + if tc.errorMessage != "" { + if !strings.Contains(err.Error(), tc.errorMessage) { + t.Errorf("Expected error message to contain '%s', but got: %s", tc.errorMessage, err.Error()) + } + } + } else { + require.NoError(t, err) + require.Equal(t, tc.expectedIndex, actualIndex, "Generalized index mismatch for path: %s", tc.path) + t.Logf("Path: %s -> Generalized Index: %v", tc.path, actualIndex) + } + }) + } +} + +func TestGetIndicesFromPath_FixedTestContainer(t *testing.T) { + testSpec := &sszquerypb.FixedTestContainer{} + info, err := query.AnalyzeObject(testSpec) + require.NoError(t, err) + require.NotNil(t, info, "Expected non-nil SSZ info") + + testCases := []struct { + name string + path string + expectedIndex uint64 + expectError bool + errorMessage string + }{ + { + name: "field_uint32", + path: "field_uint32", + expectedIndex: 16, + expectError: false, + }, + { + name: ".field_uint64", + path: ".field_uint64", + expectedIndex: 17, + expectError: false, + }, + { + name: "field_bool", + path: "field_bool", + expectedIndex: 18, + expectError: false, + }, + { + name: "field_bytes32", + path: "field_bytes32", + expectedIndex: 19, + expectError: false, + }, + { + name: "nested", + path: "nested", + expectedIndex: 20, + expectError: false, + }, + { + name: "vector_field", + path: "vector_field", + expectedIndex: 21, + expectError: false, + }, + { + name: "two_dimension_bytes_field", + path: "two_dimension_bytes_field", + expectedIndex: 22, + expectError: false, + }, + { + name: "bitvector64_field", + path: "bitvector64_field", + expectedIndex: 23, + expectError: false, + }, + { + name: "bitvector512_field", + path: "bitvector512_field", + expectedIndex: 24, + expectError: false, + }, + { + name: "bitvector64_field[0]", + path: "bitvector64_field[0]", + expectedIndex: 23, + expectError: false, + }, + { + name: "bitvector64_field[63]", + path: "bitvector64_field[63]", + expectedIndex: 23, + expectError: false, + }, + { + name: "bitvector512_field[0]", + path: "bitvector512_field[0]", + expectedIndex: 48, + expectError: false, + }, + { + name: "bitvector512_field[511]", + path: "bitvector512_field[511]", + expectedIndex: 49, + expectError: false, + }, + { + name: "trailing_field", + path: "trailing_field", + expectedIndex: 25, + expectError: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + provingFields, err := query.ParsePath(tc.path) + require.NoError(t, err) + + actualIndex, err := query.GetGeneralizedIndexFromPath(info, provingFields) + + if tc.expectError { + require.NotNil(t, err) + if tc.errorMessage != "" { + if !strings.Contains(err.Error(), tc.errorMessage) { + t.Errorf("Expected error message to contain '%s', but got: %s", tc.errorMessage, err.Error()) + } + } + } else { + require.NoError(t, err) + require.Equal(t, tc.expectedIndex, actualIndex, "Generalized index mismatch for path: %s", tc.path) + t.Logf("Path: %s -> Generalized Index: %v", tc.path, actualIndex) + } + }) + } +} diff --git a/encoding/ssz/query/path.go b/encoding/ssz/query/path.go index a45ed171d3..569c96fdb5 100644 --- a/encoding/ssz/query/path.go +++ b/encoding/ssz/query/path.go @@ -3,23 +3,31 @@ package query import ( "errors" "fmt" + "regexp" "strconv" "strings" ) // PathElement represents a single element in a path. type PathElement struct { - Name string + Length bool + Name string // [Optional] Index for List/Vector elements Index *uint64 } +var arrayIndexRegex = regexp.MustCompile(`\[\s*([^\]]+)\s*\]`) + +var lengthRegex = regexp.MustCompile(`^\s*len\s*\(\s*([^)]+?)\s*\)\s*$`) + +// ParsePath parses a raw path string into a slice of PathElements. +// note: field names are stored in snake case format. rawPath has to be provided in snake case. +// 1. Supports dot notation for field access (e.g., "field1.field2"). +// 2. Supports array indexing using square brackets (e.g., "array_field[0]"). +// 3. Supports length access using len() notation (e.g., "len(array_field)"). +// 4. Handles leading dots and validates path format. func ParsePath(rawPath string) ([]PathElement, error) { - // We use dot notation, so we split the path by '.'. rawElements := strings.Split(rawPath, ".") - if len(rawElements) == 0 { - return nil, errors.New("empty path provided") - } if rawElements[0] == "" { // Remove leading dot if present @@ -32,31 +40,74 @@ func ParsePath(rawPath string) ([]PathElement, error) { return nil, errors.New("invalid path: consecutive dots or trailing dot") } - fieldName := elem - var index *uint64 + // Processing element string + processingField := elem + var pathElement PathElement - // Check for index notation, e.g., "field[0]" - if strings.Contains(elem, "[") { - parts := strings.SplitN(elem, "[", 2) - if len(parts) != 2 { - return nil, fmt.Errorf("invalid index notation in path element %s", elem) - } - - fieldName = parts[0] - indexPart := strings.TrimSuffix(parts[1], "]") - if indexPart == "" { - return nil, errors.New("index cannot be empty") - } - - indexValue, err := strconv.ParseUint(indexPart, 10, 64) - if err != nil { - return nil, fmt.Errorf("invalid index in path element %s: %w", elem, err) - } - index = &indexValue + matches := lengthRegex.FindStringSubmatch(processingField) + // FindStringSubmatch matches a whole string like "len(field_name)" and its inner expression. + // For a path element to be a length query, len(matches) should be 2: + // 1. Full match: "len(field_name)" + // 2. Inner expression: "field_name" + if len(matches) == 2 { + pathElement.Length = true + // Extract the inner expression between len( and ) and continue parsing on that + processingField = matches[1] } - path = append(path, PathElement{Name: fieldName, Index: index}) + // Default name is the full working string (may be updated below if it contains indices) + pathElement.Name = processingField + + if strings.Contains(processingField, "[") { + // Split into field and indices, e.g., "array[0][1]" -> name:"array", indices:{0,1} + pathElement.Name = extractFieldName(processingField) + indices, err := extractArrayIndices(processingField) + if err != nil { + return nil, err + } + // Although extractArrayIndices supports multiple indices, + // only a single index is supported per PathElement, e.g., "transactions[0]" is valid + // while "transactions[0][0]" is rejected explicitly. + if len(indices) != 1 { + return nil, fmt.Errorf("multiple indices not supported in token %s", processingField) + } + pathElement.Index = &indices[0] + + } + + path = append(path, pathElement) } return path, nil } + +// extractFieldName extracts the field name from a path element name (removes array indices) +// For example: "field_name[5]" returns "field_name" +func extractFieldName(name string) string { + if idx := strings.Index(name, "["); idx != -1 { + return name[:idx] + } + return name +} + +// extractArrayIndices returns every bracketed, non-negative index in the name, +// e.g. "array[0][1]" -> []uint64{0, 1}. Errors if none are found or if any index is invalid. +func extractArrayIndices(name string) ([]uint64, error) { + // Match all bracketed content, then we'll parse as unsigned to catch negatives explicitly + matches := arrayIndexRegex.FindAllStringSubmatch(name, -1) + + if len(matches) == 0 { + return nil, errors.New("no array indices found") + } + + indices := make([]uint64, 0, len(matches)) + for _, m := range matches { + raw := strings.TrimSpace(m[1]) + idx, err := strconv.ParseUint(raw, 10, 64) + if err != nil { + return nil, fmt.Errorf("invalid array index: %w", err) + } + indices = append(indices, idx) + } + return indices, nil +} diff --git a/encoding/ssz/query/path_test.go b/encoding/ssz/query/path_test.go index 0594459d38..62f4a86c43 100644 --- a/encoding/ssz/query/path_test.go +++ b/encoding/ssz/query/path_test.go @@ -7,6 +7,9 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" ) +// Helper to get pointer to uint64 +func u64(v uint64) *uint64 { return &v } + func TestParsePath(t *testing.T) { tests := []struct { name string @@ -34,6 +37,177 @@ func TestParsePath(t *testing.T) { }, wantErr: false, }, + { + name: "simple length path with length field", + path: "data.target.len(root)", + expected: []query.PathElement{ + {Name: "data"}, + {Name: "target"}, + {Name: "root", Length: true}, + }, + wantErr: false, + }, + { + name: "len with top-level identifier", + path: "len(data)", + expected: []query.PathElement{{Name: "data", Length: true}}, + wantErr: false, + }, + { + name: "length with messy whitespace", + path: "data.target. \tlen ( root ) ", + expected: []query.PathElement{ + {Name: "data"}, + {Name: "target"}, + {Name: "root", Length: true}, + }, + wantErr: false, + }, + { + name: "len with numeric index inside argument", + path: "data.len(a[10])", + expected: []query.PathElement{ + {Name: "data"}, + {Name: "a", Length: true, Index: u64(10)}, + }, + wantErr: false, + }, + { + name: "array index with spaces", + path: "arr[ 42 ]", + expected: []query.PathElement{{Name: "arr", Index: u64(42)}}, + wantErr: false, + }, + { + name: "array leading zeros", + path: "arr[001]", + expected: []query.PathElement{{Name: "arr", Index: u64(1)}}, + wantErr: false, + }, + { + name: "array max uint64", + path: "arr[18446744073709551615]", + expected: []query.PathElement{{Name: "arr", Index: u64(18446744073709551615)}}, + wantErr: false, + }, + { + name: "len with dotted path inside - no input validation - reverts at a later stage", + path: "len(data.target.root)", + expected: []query.PathElement{{Name: "len(data", Length: false}, {Name: "target", Length: false}, {Name: "root)", Length: false}}, + wantErr: false, + }, + { + name: "len with dotted path then more - no input validation - reverts at a later stage", + path: "len(data.target.root).foo", + expected: []query.PathElement{{Name: "len(data", Length: false}, {Name: "target", Length: false}, {Name: "root)", Length: false}, {Name: "foo", Length: false}}, + wantErr: false, + }, + { + name: "len without closing paren - no input validation - reverts at a later stage", + path: "len(root", + expected: []query.PathElement{{Name: "len(root"}}, + wantErr: false, + }, + { + name: "len with extra closing paren - no input validation - reverts at a later stage", + path: "len(root))", + expected: []query.PathElement{{Name: "len(root))"}}, + wantErr: false, + }, + { + name: "empty len argument - no input validation - reverts at a later stage", + path: "len()", + expected: []query.PathElement{{Name: "len()"}}, + wantErr: false, + }, + { + name: "len with comma-separated args - no input validation - reverts at a later stage", + path: "len(a,b)", + expected: []query.PathElement{{Name: "a,b", Length: true}}, + wantErr: false, + }, + { + name: "len call followed by index (outer) - no input validation - reverts at a later stage", + path: "data.len(root)[0]", + expected: []query.PathElement{ + {Name: "data"}, + {Name: "len(root)", Index: u64(0)}, + }, + wantErr: false, + }, + { + name: "cannot provide consecutive dots in raw path", + path: "data..target.root", + wantErr: true, + }, + { + name: "cannot provide a negative index in array path", + path: ".data.target.root[-1]", + wantErr: true, + }, + { + name: "invalid index in array path", + path: ".data.target.root[a]", + wantErr: true, + }, + { + name: "multidimensional array index in path", + path: ".data.target.root[0][1]", + wantErr: true, + }, + { + name: "leading double dot", + path: "..data", + expected: nil, + wantErr: true, + }, + { + name: "trailing dot", + path: "data.target.", + expected: nil, + wantErr: true, + }, + { + name: "len with inner bracket non-numeric index", + path: "data.len(a[b])", + wantErr: true, + }, + { + name: "array empty index", + path: "arr[]", + wantErr: true, + }, + { + name: "array hex index", + path: "arr[0x10]", + wantErr: true, + }, + { + name: "array missing closing bracket", + path: "arr[12", + wantErr: true, + }, + { + name: "array plus sign index", + path: "arr[+3]", + wantErr: true, + }, + { + name: "array unicode digits", + path: "arr[12]", + wantErr: true, + }, + { + name: "array overflow uint64", + path: "arr[18446744073709551616]", + wantErr: true, + }, + { + name: "array index then suffix", + path: "field[1]suffix", + expected: []query.PathElement{{Name: "field", Index: u64(1)}}, + wantErr: false, + }, } for _, tt := range tests { @@ -41,7 +215,7 @@ func TestParsePath(t *testing.T) { parsedPath, err := query.ParsePath(tt.path) if tt.wantErr { - require.NotNil(t, err, "Expected error but got none") + require.NotNil(t, err, "Expected error did not occur") return } diff --git a/encoding/ssz/query/ssz_type.go b/encoding/ssz/query/ssz_type.go index a31e5e1b73..671203505d 100644 --- a/encoding/ssz/query/ssz_type.go +++ b/encoding/ssz/query/ssz_type.go @@ -57,3 +57,8 @@ func (t SSZType) String() string { return fmt.Sprintf("Unknown(%d)", t) } } + +// isBasic returns true if the SSZType is a basic type. +func (t SSZType) isBasic() bool { + return t == Uint8 || t == Uint16 || t == Uint32 || t == Uint64 || t == Boolean +} From c9b34d556d4955569d1c16c73afb8a255811fe9f Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Tue, 28 Oct 2025 13:57:14 +0100 Subject: [PATCH 058/103] Update go-netroute to `v0.3.0` (#15934) --- changelog/manu-go-netroute.md | 2 ++ deps.bzl | 4 ++-- go.mod | 2 +- go.sum | 4 ++-- 4 files changed, 7 insertions(+), 5 deletions(-) create mode 100644 changelog/manu-go-netroute.md diff --git a/changelog/manu-go-netroute.md b/changelog/manu-go-netroute.md new file mode 100644 index 0000000000..45ec7db1da --- /dev/null +++ b/changelog/manu-go-netroute.md @@ -0,0 +1,2 @@ +### Changed +- Update go-netroute to `v0.3.0` \ No newline at end of file diff --git a/deps.bzl b/deps.bzl index 64e6d1983a..869f799d77 100644 --- a/deps.bzl +++ b/deps.bzl @@ -2023,8 +2023,8 @@ def prysm_deps(): go_repository( name = "com_github_libp2p_go_netroute", importpath = "github.com/libp2p/go-netroute", - sum = "h1:Dejd8cQ47Qx2kRABg6lPwknU7+nBnFRpko45/fFPuZ8=", - version = "v0.2.2", + sum = "h1:nqPCXHmeNmgTJnktosJ/sIef9hvwYCrsLxXmfNks/oc=", + version = "v0.3.0", ) go_repository( name = "com_github_libp2p_go_reuseport", diff --git a/go.mod b/go.mod index 32759e63b7..5a81a8ec4f 100644 --- a/go.mod +++ b/go.mod @@ -177,7 +177,7 @@ require ( github.com/libp2p/go-libp2p-asn-util v0.4.1 // indirect github.com/libp2p/go-msgio v0.3.0 // indirect github.com/libp2p/go-nat v0.2.0 // indirect - github.com/libp2p/go-netroute v0.2.2 // indirect + github.com/libp2p/go-netroute v0.3.0 // indirect github.com/libp2p/go-reuseport v0.4.0 // indirect github.com/libp2p/go-yamux/v4 v4.0.2 // indirect github.com/lunixbochs/vtclean v1.0.0 // indirect diff --git a/go.sum b/go.sum index e5ba50cf80..c72f739e4b 100644 --- a/go.sum +++ b/go.sum @@ -599,8 +599,8 @@ github.com/libp2p/go-msgio v0.3.0 h1:mf3Z8B1xcFN314sWX+2vOTShIE0Mmn2TXn3YCUQGNj0 github.com/libp2p/go-msgio v0.3.0/go.mod h1:nyRM819GmVaF9LX3l03RMh10QdOroF++NBbxAb0mmDM= github.com/libp2p/go-nat v0.2.0 h1:Tyz+bUFAYqGyJ/ppPPymMGbIgNRH+WqC5QrT5fKrrGk= github.com/libp2p/go-nat v0.2.0/go.mod h1:3MJr+GRpRkyT65EpVPBstXLvOlAPzUVlG6Pwg9ohLJk= -github.com/libp2p/go-netroute v0.2.2 h1:Dejd8cQ47Qx2kRABg6lPwknU7+nBnFRpko45/fFPuZ8= -github.com/libp2p/go-netroute v0.2.2/go.mod h1:Rntq6jUAH0l9Gg17w5bFGhcC9a+vk4KNXs6s7IljKYE= +github.com/libp2p/go-netroute v0.3.0 h1:nqPCXHmeNmgTJnktosJ/sIef9hvwYCrsLxXmfNks/oc= +github.com/libp2p/go-netroute v0.3.0/go.mod h1:Nkd5ShYgSMS5MUKy/MU2T57xFoOKvvLR92Lic48LEyA= github.com/libp2p/go-reuseport v0.4.0 h1:nR5KU7hD0WxXCJbmw7r2rhRYruNRl2koHw8fQscQm2s= github.com/libp2p/go-reuseport v0.4.0/go.mod h1:ZtI03j/wO5hZVDFo2jKywN6bYKWLOy8Se6DrI2E1cLU= github.com/libp2p/go-yamux/v4 v4.0.2 h1:nrLh89LN/LEiqcFiqdKDRHjGstN300C1269K/EX0CPU= From 7b3c11c8180de1ac959f16c4b86db1fe56add800 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Tue, 28 Oct 2025 16:39:35 +0100 Subject: [PATCH 059/103] Do not serve sidecars if corresponding block is not available in the database (#15933) * Implement `AvailableBlocks`. * `blobSidecarByRootRPCHandler`: Do not serve a sidecar if the corresponding block is not available. * `dataColumnSidecarByRootRPCHandler`: Do not do extra work if only needed for TRACE logging. * `TestDataColumnSidecarsByRootRPCHandler`: Re-arrange (no functional change). * `TestDataColumnSidecarsByRootRPCHandler`: Save blocks corresponding to sidecars into DB. * `dataColumnSidecarByRootRPCHandler`: Do not serve a sidecar if the corresponding block is not available. * Add changelog * `TestDataColumnSidecarsByRootRPCHandler`: Use `assert` instead of `require` in goroutines. https://github.com/stretchr/testify?tab=readme-ov-file#require-package --- beacon-chain/blockchain/testing/mock.go | 30 ++++++++ beacon-chain/db/iface/interface.go | 1 + beacon-chain/db/kv/blocks.go | 36 +++++++++ beacon-chain/db/kv/blocks_test.go | 38 ++++++++++ .../sync/rpc_blob_sidecars_by_root.go | 20 +++++ .../sync/rpc_data_column_sidecars_by_root.go | 35 ++++++--- .../rpc_data_column_sidecars_by_root_test.go | 73 +++++++++++++------ changelog/manu-by-root-sidecars.md | 3 + 8 files changed, 203 insertions(+), 33 deletions(-) create mode 100644 changelog/manu-by-root-sidecars.md diff --git a/beacon-chain/blockchain/testing/mock.go b/beacon-chain/blockchain/testing/mock.go index 61066ce9f0..433a8dc759 100644 --- a/beacon-chain/blockchain/testing/mock.go +++ b/beacon-chain/blockchain/testing/mock.go @@ -472,6 +472,36 @@ func (s *ChainService) HasBlock(ctx context.Context, rt [32]byte) bool { return s.InitSyncBlockRoots[rt] } +func (s *ChainService) AvailableBlocks(ctx context.Context, blockRoots [][32]byte) map[[32]byte]bool { + if s.DB == nil { + return nil + } + + count := len(blockRoots) + availableRoots := make(map[[32]byte]bool, count) + notInDBRoots := make([][32]byte, 0, count) + for _, root := range blockRoots { + if s.DB.HasBlock(ctx, root) { + availableRoots[root] = true + continue + } + + notInDBRoots = append(notInDBRoots, root) + } + + if s.InitSyncBlockRoots == nil { + return availableRoots + } + + for _, root := range notInDBRoots { + if s.InitSyncBlockRoots[root] { + availableRoots[root] = true + } + } + + return availableRoots +} + // RecentBlockSlot mocks the same method in the chain service. func (s *ChainService) RecentBlockSlot([32]byte) (primitives.Slot, error) { return s.BlockSlot, nil diff --git a/beacon-chain/db/iface/interface.go b/beacon-chain/db/iface/interface.go index 58233ffe1b..191a807a87 100644 --- a/beacon-chain/db/iface/interface.go +++ b/beacon-chain/db/iface/interface.go @@ -28,6 +28,7 @@ type ReadOnlyDatabase interface { BlocksBySlot(ctx context.Context, slot primitives.Slot) ([]interfaces.ReadOnlySignedBeaconBlock, error) BlockRootsBySlot(ctx context.Context, slot primitives.Slot) (bool, [][32]byte, error) HasBlock(ctx context.Context, blockRoot [32]byte) bool + AvailableBlocks(ctx context.Context, blockRoots [][32]byte) map[[32]byte]bool GenesisBlock(ctx context.Context) (interfaces.ReadOnlySignedBeaconBlock, error) GenesisBlockRoot(ctx context.Context) ([32]byte, error) IsFinalizedBlock(ctx context.Context, blockRoot [32]byte) bool diff --git a/beacon-chain/db/kv/blocks.go b/beacon-chain/db/kv/blocks.go index afc7ef19e0..aeaa314567 100644 --- a/beacon-chain/db/kv/blocks.go +++ b/beacon-chain/db/kv/blocks.go @@ -336,6 +336,42 @@ func (s *Store) HasBlock(ctx context.Context, blockRoot [32]byte) bool { return exists } +// AvailableBlocks returns a set of roots indicating which blocks corresponding to `blockRoots` are available in the storage. +func (s *Store) AvailableBlocks(ctx context.Context, blockRoots [][32]byte) map[[32]byte]bool { + _, span := trace.StartSpan(ctx, "BeaconDB.AvailableBlocks") + defer span.End() + + count := len(blockRoots) + availableRoots := make(map[[32]byte]bool, count) + + // First, check the cache for each block root. + notInCacheRoots := make([][32]byte, 0, count) + for _, root := range blockRoots { + if v, ok := s.blockCache.Get(string(root[:])); v != nil && ok { + availableRoots[root] = true + continue + } + + notInCacheRoots = append(notInCacheRoots, root) + } + + // Next, check the database for the remaining block roots. + if err := s.db.View(func(tx *bolt.Tx) error { + bkt := tx.Bucket(blocksBucket) + for _, root := range notInCacheRoots { + if bkt.Get(root[:]) != nil { + availableRoots[root] = true + } + } + + return nil + }); err != nil { + panic(err) // lint:nopanic -- View never returns an error. + } + + return availableRoots +} + // BlocksBySlot retrieves a list of beacon blocks and its respective roots by slot. func (s *Store) BlocksBySlot(ctx context.Context, slot primitives.Slot) ([]interfaces.ReadOnlySignedBeaconBlock, error) { ctx, span := trace.StartSpan(ctx, "BeaconDB.BlocksBySlot") diff --git a/beacon-chain/db/kv/blocks_test.go b/beacon-chain/db/kv/blocks_test.go index 0cbb15dd9a..a35708d3c1 100644 --- a/beacon-chain/db/kv/blocks_test.go +++ b/beacon-chain/db/kv/blocks_test.go @@ -656,6 +656,44 @@ func TestStore_BlocksCRUD_NoCache(t *testing.T) { } } +func TestAvailableBlocks(t *testing.T) { + ctx := t.Context() + db := setupDB(t) + + b0, b1, b2 := util.NewBeaconBlock(), util.NewBeaconBlock(), util.NewBeaconBlock() + b0.Block.Slot, b1.Block.Slot, b2.Block.Slot = 10, 20, 30 + + sb0, err := blocks.NewSignedBeaconBlock(b0) + require.NoError(t, err) + r0, err := b0.Block.HashTreeRoot() + require.NoError(t, err) + + // Save b0 but remove it from cache. + err = db.SaveBlock(ctx, sb0) + require.NoError(t, err) + db.blockCache.Del(string(r0[:])) + + // b1 is not saved at all. + r1, err := b1.Block.HashTreeRoot() + require.NoError(t, err) + + // Save b2 in cache and DB. + sb2, err := blocks.NewSignedBeaconBlock(b2) + require.NoError(t, err) + r2, err := b2.Block.HashTreeRoot() + require.NoError(t, err) + require.NoError(t, db.SaveBlock(ctx, sb2)) + require.NoError(t, err) + + expected := map[[32]byte]bool{r0: true, r2: true} + actual := db.AvailableBlocks(ctx, [][32]byte{r0, r1, r2}) + + require.Equal(t, len(expected), len(actual)) + for i := range expected { + require.Equal(t, true, actual[i]) + } +} + func TestStore_Blocks_FiltersCorrectly(t *testing.T) { for _, tt := range blockTests { t.Run(tt.name, func(t *testing.T) { diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_root.go b/beacon-chain/sync/rpc_blob_sidecars_by_root.go index c1e9e64555..63b31f113c 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_root.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_root.go @@ -10,6 +10,7 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" + fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" @@ -58,6 +59,17 @@ func (s *Service) blobSidecarByRootRPCHandler(ctx context.Context, msg interface return errors.Wrapf(err, "unexpected error computing min valid blob request slot, current_slot=%d", cs) } + // Extract all needed roots. + roots := make([][fieldparams.RootLength]byte, 0, len(blobIdents)) + for _, ident := range blobIdents { + root := bytesutil.ToBytes32(ident.BlockRoot) + roots = append(roots, root) + } + + // Filter all available roots in block storage. + availableRoots := s.cfg.beaconDB.AvailableBlocks(ctx, roots) + + // Serve each requested blob sidecar. for i := range blobIdents { if err := ctx.Err(); err != nil { closeStream(stream, log) @@ -69,7 +81,15 @@ func (s *Service) blobSidecarByRootRPCHandler(ctx context.Context, msg interface <-ticker.C } s.rateLimiter.add(stream, 1) + root, idx := bytesutil.ToBytes32(blobIdents[i].BlockRoot), blobIdents[i].Index + + // Do not serve a blob sidecar if the corresponding block is not available. + if !availableRoots[root] { + log.Trace("Peer requested blob sidecar by root but corresponding block not found in db") + continue + } + sc, err := s.cfg.blobStorage.Get(root, idx) if err != nil { log := log.WithFields(logrus.Fields{ diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_root.go b/beacon-chain/sync/rpc_data_column_sidecars_by_root.go index d1b673de0f..2c0f86ac20 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_root.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_root.go @@ -56,18 +56,6 @@ func (s *Service) dataColumnSidecarByRootRPCHandler(ctx context.Context, msg int return errors.Wrap(err, "validate data columns by root request") } - requestedColumnsByRoot := make(map[[fieldparams.RootLength]byte][]uint64) - for _, columnIdent := range requestedColumnIdents { - var root [fieldparams.RootLength]byte - copy(root[:], columnIdent.BlockRoot) - requestedColumnsByRoot[root] = append(requestedColumnsByRoot[root], columnIdent.Columns...) - } - - // Sort by column index for each root. - for _, columns := range requestedColumnsByRoot { - slices.Sort(columns) - } - // Compute the oldest slot we'll allow a peer to request, based on the current slot. minReqSlot, err := dataColumnsRPCMinValidSlot(s.cfg.clock.CurrentSlot()) if err != nil { @@ -84,6 +72,12 @@ func (s *Service) dataColumnSidecarByRootRPCHandler(ctx context.Context, msg int } if log.Logger.Level >= logrus.TraceLevel { + requestedColumnsByRoot := make(map[[fieldparams.RootLength]byte][]uint64) + for _, ident := range requestedColumnIdents { + root := bytesutil.ToBytes32(ident.BlockRoot) + requestedColumnsByRoot[root] = append(requestedColumnsByRoot[root], ident.Columns...) + } + // We optimistially assume the peer requests the same set of columns for all roots, // pre-sizing the map accordingly. requestedRootsByColumnSet := make(map[string][]string, 1) @@ -96,6 +90,17 @@ func (s *Service) dataColumnSidecarByRootRPCHandler(ctx context.Context, msg int log.WithField("requested", requestedRootsByColumnSet).Trace("Serving data column sidecars by root") } + // Extract all requested roots. + roots := make([][fieldparams.RootLength]byte, 0, len(requestedColumnIdents)) + for _, ident := range requestedColumnIdents { + root := bytesutil.ToBytes32(ident.BlockRoot) + roots = append(roots, root) + } + + // Filter all available roots in block storage. + availableRoots := s.cfg.beaconDB.AvailableBlocks(ctx, roots) + + // Serve each requested data column sidecar. count := 0 for _, ident := range requestedColumnIdents { if err := ctx.Err(); err != nil { @@ -117,6 +122,12 @@ func (s *Service) dataColumnSidecarByRootRPCHandler(ctx context.Context, msg int s.rateLimiter.add(stream, int64(len(columns))) + // Do not serve a blob sidecar if the corresponding block is not available. + if !availableRoots[root] { + log.Trace("Peer requested blob sidecar by root but corresponding block not found in db") + continue + } + // Retrieve the requested sidecars from the store. verifiedRODataColumns, err := s.cfg.dataColumnStorage.Get(root, columns) if err != nil { diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go b/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go index 9de0b5886a..a0ba567985 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go @@ -10,6 +10,7 @@ import ( chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" + testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" @@ -19,6 +20,7 @@ import ( "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/libp2p/go-libp2p/core/network" @@ -103,23 +105,47 @@ func TestDataColumnSidecarsByRootRPCHandler(t *testing.T) { localP2P := p2ptest.NewTestP2P(t) clock := startup.NewClock(time.Now(), [fieldparams.RootLength]byte{}) - params := []util.DataColumnParam{ - {Slot: 10, Index: 1}, {Slot: 10, Index: 2}, {Slot: 10, Index: 3}, - {Slot: 40, Index: 4}, {Slot: 40, Index: 6}, - {Slot: 45, Index: 7}, {Slot: 45, Index: 8}, {Slot: 45, Index: 9}, + _, verifiedRODataColumns := util.CreateTestVerifiedRoDataColumnSidecars( + t, + []util.DataColumnParam{ + {Slot: 10, Index: 1}, {Slot: 10, Index: 2}, {Slot: 10, Index: 3}, + {Slot: 40, Index: 4}, {Slot: 40, Index: 6}, + {Slot: 45, Index: 7}, {Slot: 45, Index: 8}, {Slot: 45, Index: 9}, + {Slot: 46, Index: 10}, // Corresponding block won't be saved in DB + }, + ) + + dataColumnStorage := filesystem.NewEphemeralDataColumnStorage(t) + err := dataColumnStorage.Save(verifiedRODataColumns) + require.NoError(t, err) + + beaconDB := testDB.SetupDB(t) + indices := [...]int{0, 3, 5} + + roBlocks := make([]blocks.ROBlock, 0, len(indices)) + for _, i := range indices { + blockPb := util.NewBeaconBlock() + + signedBeaconBlock, err := blocks.NewSignedBeaconBlock(blockPb) + require.NoError(t, err) + + // Here the block root has to match the sidecar's block root. + // (However, the block root does not match the actual root of the block, but we don't care for this test.) + roBlock, err := blocks.NewROBlockWithRoot(signedBeaconBlock, verifiedRODataColumns[i].BlockRoot()) + require.NoError(t, err) + + roBlocks = append(roBlocks, roBlock) } - _, verifiedRODataColumns := util.CreateTestVerifiedRoDataColumnSidecars(t, params) - - storage := filesystem.NewEphemeralDataColumnStorage(t) - err := storage.Save(verifiedRODataColumns) + err = beaconDB.SaveROBlocks(ctx, roBlocks, false /*cache*/) require.NoError(t, err) service := &Service{ cfg: &config{ p2p: localP2P, + beaconDB: beaconDB, clock: clock, - dataColumnStorage: storage, + dataColumnStorage: dataColumnStorage, chain: &chainMock.ChainService{}, }, rateLimiter: newRateLimiter(localP2P), @@ -134,6 +160,7 @@ func TestDataColumnSidecarsByRootRPCHandler(t *testing.T) { root0 := verifiedRODataColumns[0].BlockRoot() root3 := verifiedRODataColumns[3].BlockRoot() root5 := verifiedRODataColumns[5].BlockRoot() + root8 := verifiedRODataColumns[8].BlockRoot() remoteP2P.BHost.SetStreamHandler(protocolID, func(stream network.Stream) { defer wg.Done() @@ -147,22 +174,22 @@ func TestDataColumnSidecarsByRootRPCHandler(t *testing.T) { break } - require.NoError(t, err) + assert.NoError(t, err) sidecars = append(sidecars, sidecar) } - require.Equal(t, 5, len(sidecars)) - require.Equal(t, root3, sidecars[0].BlockRoot()) - require.Equal(t, root3, sidecars[1].BlockRoot()) - require.Equal(t, root5, sidecars[2].BlockRoot()) - require.Equal(t, root5, sidecars[3].BlockRoot()) - require.Equal(t, root5, sidecars[4].BlockRoot()) + assert.Equal(t, 5, len(sidecars)) + assert.Equal(t, root3, sidecars[0].BlockRoot()) + assert.Equal(t, root3, sidecars[1].BlockRoot()) + assert.Equal(t, root5, sidecars[2].BlockRoot()) + assert.Equal(t, root5, sidecars[3].BlockRoot()) + assert.Equal(t, root5, sidecars[4].BlockRoot()) - require.Equal(t, uint64(4), sidecars[0].Index) - require.Equal(t, uint64(6), sidecars[1].Index) - require.Equal(t, uint64(7), sidecars[2].Index) - require.Equal(t, uint64(8), sidecars[3].Index) - require.Equal(t, uint64(9), sidecars[4].Index) + assert.Equal(t, uint64(4), sidecars[0].Index) + assert.Equal(t, uint64(6), sidecars[1].Index) + assert.Equal(t, uint64(7), sidecars[2].Index) + assert.Equal(t, uint64(8), sidecars[3].Index) + assert.Equal(t, uint64(9), sidecars[4].Index) }) localP2P.Connect(remoteP2P) @@ -182,6 +209,10 @@ func TestDataColumnSidecarsByRootRPCHandler(t *testing.T) { BlockRoot: root5[:], Columns: []uint64{7, 8, 9}, }, + { + BlockRoot: root8[:], + Columns: []uint64{10}, + }, } err = service.dataColumnSidecarByRootRPCHandler(ctx, msg, stream) diff --git a/changelog/manu-by-root-sidecars.md b/changelog/manu-by-root-sidecars.md new file mode 100644 index 0000000000..390077824f --- /dev/null +++ b/changelog/manu-by-root-sidecars.md @@ -0,0 +1,3 @@ +### Fixed +- `blobSidecarByRootRPCHandler`: Do not serve a sidecar if the corresponding block is not available. +- `dataColumnSidecarByRootRPCHandler`: Do not serve a sidecar if the corresponding block is not available. \ No newline at end of file From dbb2f0b047462a2c0b21d88e82baa20866e75487 Mon Sep 17 00:00:00 2001 From: james-prysm <90280386+james-prysm@users.noreply.github.com> Date: Tue, 28 Oct 2025 08:42:05 -0700 Subject: [PATCH 060/103] changelog (#15929) --- CHANGELOG.md | 23 +++++++++++++++++++ ...riy_fix_kv-recover-state-summurt-bucket.md | 3 --- changelog/james-prysm_v6.1.3.md | 3 --- changelog/james-prysm_v6.1.4.md | 3 +++ changelog/kasey_clear-db-rm-genesis.md | 2 -- changelog/manu-advertise-atts.md | 2 -- changelog/manu-number-custody-groups.md | 2 -- changelog/manu-random-peer.md | 2 -- .../manu-read-columns-from-disk-error.md | 2 -- ...u-verify-data-column-sidecar-kzg-proofs.md | 2 -- changelog/muzry_fix_extract_metadata_file.md | 2 -- changelog/potuz_hdiff_diff_type.md | 3 --- changelog/satushh-update-easlot-pruning.md | 3 --- changelog/syjn99_ssz-ql-endpoints.md | 3 --- changelog/ttsao_fix-sync-aggregate-state.md | 3 --- ...ttsao_fix-sync-committee-subnet-indices.md | 3 --- 16 files changed, 26 insertions(+), 35 deletions(-) delete mode 100644 changelog/MozirDmitriy_fix_kv-recover-state-summurt-bucket.md delete mode 100644 changelog/james-prysm_v6.1.3.md create mode 100644 changelog/james-prysm_v6.1.4.md delete mode 100644 changelog/kasey_clear-db-rm-genesis.md delete mode 100644 changelog/manu-advertise-atts.md delete mode 100644 changelog/manu-number-custody-groups.md delete mode 100644 changelog/manu-random-peer.md delete mode 100644 changelog/manu-read-columns-from-disk-error.md delete mode 100644 changelog/manu-verify-data-column-sidecar-kzg-proofs.md delete mode 100644 changelog/muzry_fix_extract_metadata_file.md delete mode 100644 changelog/potuz_hdiff_diff_type.md delete mode 100644 changelog/satushh-update-easlot-pruning.md delete mode 100644 changelog/syjn99_ssz-ql-endpoints.md delete mode 100644 changelog/ttsao_fix-sync-aggregate-state.md delete mode 100644 changelog/ttsao_fix-sync-committee-subnet-indices.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 95754b1503..4687007fb4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,29 @@ All notable changes to this project will be documented in this file. The format is based on Keep a Changelog, and this project adheres to Semantic Versioning. +## [v6.1.4](https://github.com/prysmaticlabs/prysm/compare/v6.1.3...v6.1.4) - 2025-10-24 + +This release includes a bug fix affecting block proposals in rare cases, along with an important update for Windows users running post-Fusaka fork. + +### Added + +- SSZ-QL: Add endpoints for `BeaconState`/`BeaconBlock`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15888) +- Add native state diff type and marshalling functions. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15250) +- Update the earliest available slot after pruning operations in beacon chain database pruner. This ensures the P2P layer accurately knows which historical data is available after pruning, preventing nodes from advertising or attempting to serve data that has been pruned. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15694) + +### Fixed + +- Correctly advertise (in ENR and beacon API) attestation subnets when using `--subscribe-all-subnets`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15880) +- `randomPeer`: Return if the context is cancelled when waiting for peers. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15876) +- Improve error message when the byte count read from disk when reading a data column sidecars is lower than expected. (Mostly, because the file is truncated.). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15881) +- Delete the genesis state file when --clear-db / --force-clear-db is specified. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15883) +- Fix sync committee subscription to use subnet indices instead of committee indices. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15885) +- Fixed metadata extraction on Windows by correctly splitting file paths. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15899) +- `VerifyDataColumnsSidecarKZGProofs`: Check if sizes match. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15892) +- Fix recoverStateSummary to persist state summaries in stateSummaryBucket instead of stateBucket (#15896). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15896) +- `updateCustodyInfoInDB`: Use `NumberOfCustodyGroups` instead of `NumberOfColumns`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15908) +- Sync committee uses correct state to calculate position. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15905) + ## [v6.1.3](https://github.com/prysmaticlabs/prysm/compare/v6.1.2...v6.1.3) - 2025-10-20 This release has several important beacon API and p2p fixes. diff --git a/changelog/MozirDmitriy_fix_kv-recover-state-summurt-bucket.md b/changelog/MozirDmitriy_fix_kv-recover-state-summurt-bucket.md deleted file mode 100644 index 019e0aba32..0000000000 --- a/changelog/MozirDmitriy_fix_kv-recover-state-summurt-bucket.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fix recoverStateSummary to persist state summaries in stateSummaryBucket instead of stateBucket (#15896). diff --git a/changelog/james-prysm_v6.1.3.md b/changelog/james-prysm_v6.1.3.md deleted file mode 100644 index a49eaf09db..0000000000 --- a/changelog/james-prysm_v6.1.3.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Changelog entries for v6.1.3 through v6.1.2 \ No newline at end of file diff --git a/changelog/james-prysm_v6.1.4.md b/changelog/james-prysm_v6.1.4.md new file mode 100644 index 0000000000..8a66bd68f1 --- /dev/null +++ b/changelog/james-prysm_v6.1.4.md @@ -0,0 +1,3 @@ +### Ignored + +- Changelog entries for v6.1.4 through v6.1.3 \ No newline at end of file diff --git a/changelog/kasey_clear-db-rm-genesis.md b/changelog/kasey_clear-db-rm-genesis.md deleted file mode 100644 index 2cc3153512..0000000000 --- a/changelog/kasey_clear-db-rm-genesis.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Delete the genesis state file when --clear-db / --force-clear-db is specified. diff --git a/changelog/manu-advertise-atts.md b/changelog/manu-advertise-atts.md deleted file mode 100644 index dc95b4fe4d..0000000000 --- a/changelog/manu-advertise-atts.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Correctly advertise (in ENR and beacon API) attestation subnets when using `--subscribe-all-subnets`. diff --git a/changelog/manu-number-custody-groups.md b/changelog/manu-number-custody-groups.md deleted file mode 100644 index 1d3710dc95..0000000000 --- a/changelog/manu-number-custody-groups.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- `updateCustodyInfoInDB`: Use `NumberOfCustodyGroups` instead of `NumberOfColumns`. \ No newline at end of file diff --git a/changelog/manu-random-peer.md b/changelog/manu-random-peer.md deleted file mode 100644 index eb37eed1b3..0000000000 --- a/changelog/manu-random-peer.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- `randomPeer`: Return if the context is cancelled when waiting for peers. \ No newline at end of file diff --git a/changelog/manu-read-columns-from-disk-error.md b/changelog/manu-read-columns-from-disk-error.md deleted file mode 100644 index 60a895b328..0000000000 --- a/changelog/manu-read-columns-from-disk-error.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Improve error message when the byte count read from disk when reading a data column sidecars is lower than expected. (Mostly, because the file is truncated.) \ No newline at end of file diff --git a/changelog/manu-verify-data-column-sidecar-kzg-proofs.md b/changelog/manu-verify-data-column-sidecar-kzg-proofs.md deleted file mode 100644 index 9f54a3f97a..0000000000 --- a/changelog/manu-verify-data-column-sidecar-kzg-proofs.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- `VerifyDataColumnsSidecarKZGProofs`: Check if sizes match. diff --git a/changelog/muzry_fix_extract_metadata_file.md b/changelog/muzry_fix_extract_metadata_file.md deleted file mode 100644 index 445f15f94f..0000000000 --- a/changelog/muzry_fix_extract_metadata_file.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Fixed metadata extraction on Windows by correctly splitting file paths \ No newline at end of file diff --git a/changelog/potuz_hdiff_diff_type.md b/changelog/potuz_hdiff_diff_type.md deleted file mode 100644 index ee26b598e2..0000000000 --- a/changelog/potuz_hdiff_diff_type.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Add native state diff type and marshalling functions diff --git a/changelog/satushh-update-easlot-pruning.md b/changelog/satushh-update-easlot-pruning.md deleted file mode 100644 index 2944788dfc..0000000000 --- a/changelog/satushh-update-easlot-pruning.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Update the earliest available slot after pruning operations in beacon chain database pruner. This ensures the P2P layer accurately knows which historical data is available after pruning, preventing nodes from advertising or attempting to serve data that has been pruned. \ No newline at end of file diff --git a/changelog/syjn99_ssz-ql-endpoints.md b/changelog/syjn99_ssz-ql-endpoints.md deleted file mode 100644 index b5fccec18b..0000000000 --- a/changelog/syjn99_ssz-ql-endpoints.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- SSZ-QL: Add endpoints for `BeaconState`/`BeaconBlock`. diff --git a/changelog/ttsao_fix-sync-aggregate-state.md b/changelog/ttsao_fix-sync-aggregate-state.md deleted file mode 100644 index d4401c58db..0000000000 --- a/changelog/ttsao_fix-sync-aggregate-state.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Sync committee uses correct state to calculate position \ No newline at end of file diff --git a/changelog/ttsao_fix-sync-committee-subnet-indices.md b/changelog/ttsao_fix-sync-committee-subnet-indices.md deleted file mode 100644 index e2793410b2..0000000000 --- a/changelog/ttsao_fix-sync-committee-subnet-indices.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Fix sync committee subscription to use subnet indices instead of committee indices From 5a1a5b5ae5749cbc5c4ea1886f9a9c4724b1fea2 Mon Sep 17 00:00:00 2001 From: rocksload Date: Wed, 29 Oct 2025 22:40:33 +0800 Subject: [PATCH 061/103] refactor: use slices.Contains to simplify code (#15646) Signed-off-by: rocksload --- .../blockchain/weak_subjectivity_checks.go | 11 +++++------ beacon-chain/node/node.go | 7 +++---- beacon-chain/p2p/peers/status.go | 8 +++----- beacon-chain/rpc/eth/rewards/handlers.go | 10 ++++------ beacon-chain/sync/pending_attestations_queue.go | 7 +++---- beacon-chain/sync/validate_aggregate_proof.go | 8 +++----- beacon-chain/sync/validate_beacon_attestation.go | 9 ++------- changelog/rocksload_use_slices_contains.md | 3 +++ cmd/beacon-chain/storage/options.go | 7 +++---- cmd/flags/enum.go | 9 ++++----- container/multi-value-slice/multi_value_slice.go | 12 ++++-------- testing/util/logging_test.go | 9 ++------- tools/analyzers/logcapitalization/analyzer.go | 14 ++++---------- 13 files changed, 43 insertions(+), 71 deletions(-) create mode 100644 changelog/rocksload_use_slices_contains.md diff --git a/beacon-chain/blockchain/weak_subjectivity_checks.go b/beacon-chain/blockchain/weak_subjectivity_checks.go index 20df2ead67..dd55279771 100644 --- a/beacon-chain/blockchain/weak_subjectivity_checks.go +++ b/beacon-chain/blockchain/weak_subjectivity_checks.go @@ -3,6 +3,7 @@ package blockchain import ( "context" "fmt" + "slices" "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" "github.com/OffchainLabs/prysm/v6/config/params" @@ -81,12 +82,10 @@ func (v *WeakSubjectivityVerifier) VerifyWeakSubjectivity(ctx context.Context, f if err != nil { return errors.Wrap(err, "error while retrieving block roots to verify weak subjectivity") } - for _, root := range roots { - if v.root == root { - log.Info("Weak subjectivity check has passed!!") - v.verified = true - return nil - } + if slices.Contains(roots, v.root) { + log.Info("Weak subjectivity check has passed!!") + v.verified = true + return nil } return errors.Wrap(errWSBlockNotFoundInEpoch, fmt.Sprintf("root=%#x, epoch=%d", v.root, v.epoch)) } diff --git a/beacon-chain/node/node.go b/beacon-chain/node/node.go index 6b305d8d49..045c61e9dd 100644 --- a/beacon-chain/node/node.go +++ b/beacon-chain/node/node.go @@ -12,6 +12,7 @@ import ( "os" "os/signal" "path/filepath" + "slices" "strconv" "strings" "sync" @@ -1135,10 +1136,8 @@ func (b *BeaconNode) registerLightClientStore() { func hasNetworkFlag(cliCtx *cli.Context) bool { for _, flag := range features.NetworkFlags { - for _, name := range flag.Names() { - if cliCtx.IsSet(name) { - return true - } + if slices.ContainsFunc(flag.Names(), cliCtx.IsSet) { + return true } } return false diff --git a/beacon-chain/p2p/peers/status.go b/beacon-chain/p2p/peers/status.go index 5b0e9977f9..048f8e3251 100644 --- a/beacon-chain/p2p/peers/status.go +++ b/beacon-chain/p2p/peers/status.go @@ -25,6 +25,7 @@ package peers import ( "context" "net" + "slices" "sort" "strings" "time" @@ -306,11 +307,8 @@ func (p *Status) SubscribedToSubnet(index uint64) []peer.ID { connectedStatus := peerData.ConnState == Connecting || peerData.ConnState == Connected if connectedStatus && peerData.MetaData != nil && !peerData.MetaData.IsNil() && peerData.MetaData.AttnetsBitfield() != nil { indices := indicesFromBitfield(peerData.MetaData.AttnetsBitfield()) - for _, idx := range indices { - if idx == index { - peers = append(peers, pid) - break - } + if slices.Contains(indices, index) { + peers = append(peers, pid) } } } diff --git a/beacon-chain/rpc/eth/rewards/handlers.go b/beacon-chain/rpc/eth/rewards/handlers.go index a95e7f9bdd..56a87c5b6f 100644 --- a/beacon-chain/rpc/eth/rewards/handlers.go +++ b/beacon-chain/rpc/eth/rewards/handlers.go @@ -4,6 +4,7 @@ import ( "encoding/json" "fmt" "net/http" + "slices" "strconv" "strings" @@ -388,12 +389,9 @@ func syncRewardsVals( scIndices := make([]primitives.ValidatorIndex, 0, len(allScIndices)) scVals := make([]*precompute.Validator, 0, len(allScIndices)) for _, valIdx := range valIndices { - for _, scIdx := range allScIndices { - if valIdx == scIdx { - scVals = append(scVals, allVals[valIdx]) - scIndices = append(scIndices, valIdx) - break - } + if slices.Contains(allScIndices, valIdx) { + scVals = append(scVals, allVals[valIdx]) + scIndices = append(scIndices, valIdx) } } diff --git a/beacon-chain/sync/pending_attestations_queue.go b/beacon-chain/sync/pending_attestations_queue.go index 7073a058aa..64c0033ad1 100644 --- a/beacon-chain/sync/pending_attestations_queue.go +++ b/beacon-chain/sync/pending_attestations_queue.go @@ -5,6 +5,7 @@ import ( "context" "encoding/hex" "fmt" + "slices" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" @@ -382,10 +383,8 @@ func (s *Service) savePending(root [32]byte, pending any, isEqual func(other any // Skip if the attestation/aggregate from the same validator already exists in // the pending queue. - for _, a := range s.blkRootToPendingAtts[root] { - if isEqual(a) { - return - } + if slices.ContainsFunc(s.blkRootToPendingAtts[root], isEqual) { + return } pendingAttCount.Inc() diff --git a/beacon-chain/sync/validate_aggregate_proof.go b/beacon-chain/sync/validate_aggregate_proof.go index 4ecfd9d123..0f64ab462d 100644 --- a/beacon-chain/sync/validate_aggregate_proof.go +++ b/beacon-chain/sync/validate_aggregate_proof.go @@ -3,6 +3,7 @@ package sync import ( "context" "fmt" + "slices" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" @@ -290,11 +291,8 @@ func (s *Service) validateIndexInCommittee(ctx context.Context, a ethpb.Att, val } var withinCommittee bool - for _, i := range committee { - if validatorIndex == i { - withinCommittee = true - break - } + if slices.Contains(committee, validatorIndex) { + withinCommittee = true } if !withinCommittee { return pubsub.ValidationReject, fmt.Errorf("validator index %d is not within the committee: %v", diff --git a/beacon-chain/sync/validate_beacon_attestation.go b/beacon-chain/sync/validate_beacon_attestation.go index 6a603aabd2..0d6237d81d 100644 --- a/beacon-chain/sync/validate_beacon_attestation.go +++ b/beacon-chain/sync/validate_beacon_attestation.go @@ -5,6 +5,7 @@ import ( "encoding/binary" "fmt" "reflect" + "slices" "strings" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" @@ -336,13 +337,7 @@ func validateAttestingIndex( // _[REJECT]_ The attester is a member of the committee -- i.e. // `attestation.attester_index in get_beacon_committee(state, attestation.data.slot, index)`. - inCommittee := false - for _, ix := range committee { - if attestingIndex == ix { - inCommittee = true - break - } - } + inCommittee := slices.Contains(committee, attestingIndex) if !inCommittee { return pubsub.ValidationReject, errors.New("attester is not a member of the committee") } diff --git a/changelog/rocksload_use_slices_contains.md b/changelog/rocksload_use_slices_contains.md new file mode 100644 index 0000000000..74c4d3210b --- /dev/null +++ b/changelog/rocksload_use_slices_contains.md @@ -0,0 +1,3 @@ +### Ignored + +- Use slices.Contains to simplify code \ No newline at end of file diff --git a/cmd/beacon-chain/storage/options.go b/cmd/beacon-chain/storage/options.go index e930c30aae..af2071d178 100644 --- a/cmd/beacon-chain/storage/options.go +++ b/cmd/beacon-chain/storage/options.go @@ -4,6 +4,7 @@ import ( "fmt" "os" "path" + "slices" "strings" "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" @@ -55,10 +56,8 @@ func layoutFlagUsage() string { } func validateLayoutFlag(_ *cli.Context, v string) error { - for _, l := range filesystem.LayoutNames { - if v == l { - return nil - } + if slices.Contains(filesystem.LayoutNames, v) { + return nil } return errors.Errorf("invalid value '%s' for flag --%s, %s", v, BlobStorageLayout.Name, layoutOptions()) } diff --git a/cmd/flags/enum.go b/cmd/flags/enum.go index c7706c43fb..bf17670f0f 100644 --- a/cmd/flags/enum.go +++ b/cmd/flags/enum.go @@ -4,6 +4,7 @@ package flags import ( "fmt" + "slices" "strings" "github.com/urfave/cli/v2" @@ -19,11 +20,9 @@ type EnumValue struct { } func (e *EnumValue) Set(value string) error { - for _, enum := range e.Enum { - if enum == value { - *e.Destination = value - return nil - } + if slices.Contains(e.Enum, value) { + *e.Destination = value + return nil } return fmt.Errorf("allowed values are %s", strings.Join(e.Enum, ", ")) diff --git a/container/multi-value-slice/multi_value_slice.go b/container/multi-value-slice/multi_value_slice.go index 5c33535713..43df61ad1d 100644 --- a/container/multi-value-slice/multi_value_slice.go +++ b/container/multi-value-slice/multi_value_slice.go @@ -268,20 +268,16 @@ func (s *Slice[V]) At(obj Identifiable, index uint64) (V, error) { return s.sharedItems[index], nil } for _, v := range ind.Values { - for _, id := range v.ids { - if id == obj.Id() { - return v.val, nil - } + if slices.Contains(v.ids, obj.Id()) { + return v.val, nil } } return s.sharedItems[index], nil } else { item := s.appendedItems[index-uint64(len(s.sharedItems))] for _, v := range item.Values { - for _, id := range v.ids { - if id == obj.Id() { - return v.val, nil - } + if slices.Contains(v.ids, obj.Id()) { + return v.val, nil } } var def V diff --git a/testing/util/logging_test.go b/testing/util/logging_test.go index fe13716295..53894487cd 100644 --- a/testing/util/logging_test.go +++ b/testing/util/logging_test.go @@ -1,6 +1,7 @@ package util import ( + "slices" "testing" "time" @@ -66,13 +67,7 @@ func assertNoHooks(t *testing.T, logger *logrus.Logger) { func assertRegistered(t *testing.T, logger *logrus.Logger, hook ComparableHook) { for _, lvl := range hook.Levels() { registered := logger.Hooks[lvl] - found := false - for _, h := range registered { - if hook.Equal(h) { - found = true - break - } - } + found := slices.ContainsFunc(registered, hook.Equal) require.Equal(t, true, found, "Expected hook %v to be registered at level %s, but it was not", hook, lvl.String()) } } diff --git a/tools/analyzers/logcapitalization/analyzer.go b/tools/analyzers/logcapitalization/analyzer.go index 6f128a2255..f344296eea 100644 --- a/tools/analyzers/logcapitalization/analyzer.go +++ b/tools/analyzers/logcapitalization/analyzer.go @@ -6,6 +6,7 @@ import ( "errors" "go/ast" "go/token" + "slices" "strconv" "strings" "unicode" @@ -178,12 +179,7 @@ func isLoggingCall(call *ast.CallExpr, logFunctions []string, aliases map[string // isCommonLogPackage checks for common logging package names func isCommonLogPackage(pkg string) bool { common := []string{"log", "logrus", "zerolog", "zap", "glog", "klog"} - for _, c := range common { - if pkg == c { - return true - } - } - return false + return slices.Contains(common, pkg) } // isFormatFunction checks if this is a format function (ending with 'f') @@ -274,10 +270,8 @@ func isAcceptableStart(firstRune rune, s string) bool { // Special characters that are OK to start with acceptableChars := []rune{'%', '$', '/', '\\', '[', '(', '{', '"', '\'', '`', '-'} - for _, char := range acceptableChars { - if firstRune == char { - return true - } + if slices.Contains(acceptableChars, firstRune) { + return true } // URLs/paths are OK From 3e0492a6367df9c33048e618fc3b6b278ac27b3c Mon Sep 17 00:00:00 2001 From: kasey <489222+kasey@users.noreply.github.com> Date: Thu, 30 Oct 2025 14:02:25 -0500 Subject: [PATCH 062/103] also ignore errors from readdirnames (#15947) * also ignore errors from readdirnames * test case for empty blobs dir --------- Co-authored-by: Kasey Kirkham --- changelog/kasey_ignore-readdir-errors.md | 2 ++ cmd/beacon-chain/storage/options.go | 5 +++++ cmd/beacon-chain/storage/options_test.go | 7 +++++++ 3 files changed, 14 insertions(+) create mode 100644 changelog/kasey_ignore-readdir-errors.md diff --git a/changelog/kasey_ignore-readdir-errors.md b/changelog/kasey_ignore-readdir-errors.md new file mode 100644 index 0000000000..882415cd82 --- /dev/null +++ b/changelog/kasey_ignore-readdir-errors.md @@ -0,0 +1,2 @@ +### Ignored +- Fix bug with layout detection when readdirnames returns io.EOF. diff --git a/cmd/beacon-chain/storage/options.go b/cmd/beacon-chain/storage/options.go index af2071d178..18dc9581f8 100644 --- a/cmd/beacon-chain/storage/options.go +++ b/cmd/beacon-chain/storage/options.go @@ -2,6 +2,7 @@ package storage import ( "fmt" + "io" "os" "path" "slices" @@ -139,6 +140,10 @@ func detectLayout(dir string, c stringFlagGetter) (string, error) { // amount of wiggle room to be confident that we'll likely see a by-root director if one exists. entries, err := base.Readdirnames(16) if err != nil { + // We can get this error if the directory exists and is empty + if errors.Is(err, io.EOF) { + return filesystem.LayoutNameByEpoch, nil + } return "", errors.Wrap(err, "reading blob storage directory") } for _, entry := range entries { diff --git a/cmd/beacon-chain/storage/options_test.go b/cmd/beacon-chain/storage/options_test.go index a8ecd23c56..0af3307256 100644 --- a/cmd/beacon-chain/storage/options_test.go +++ b/cmd/beacon-chain/storage/options_test.go @@ -192,6 +192,13 @@ func TestDetectLayout(t *testing.T) { }, expectedErr: syscall.ENOTDIR, }, + { + name: "empty blobs dir", + setup: func(t *testing.T, dir string) { + require.NoError(t, os.MkdirAll(dir, 0o755)) + }, + expected: filesystem.LayoutNameByEpoch, + }, } for _, tc := range cases { From 374bae9c813ba89385f36aa73f6a5c46fade9f10 Mon Sep 17 00:00:00 2001 From: Muzry Date: Fri, 31 Oct 2025 21:17:44 +0800 Subject: [PATCH 063/103] Fix incorrect version used when sending attestation version in Fulu (#15950) * Fix incorrect version used when sending attestation version in Fulu * update typo * fix Eth-Consensus-Version in submit_signed_aggregate_proof.go --- .../muzry_fix_attestation_send_on_fulu.md | 2 + .../client/beacon-api/propose_attestation.go | 4 +- .../beacon-api/propose_attestation_test.go | 89 +++++++++++++------ .../submit_signed_aggregate_proof.go | 5 +- .../submit_signed_aggregate_proof_test.go | 53 ++++++++++- 5 files changed, 120 insertions(+), 33 deletions(-) create mode 100644 changelog/muzry_fix_attestation_send_on_fulu.md diff --git a/changelog/muzry_fix_attestation_send_on_fulu.md b/changelog/muzry_fix_attestation_send_on_fulu.md new file mode 100644 index 0000000000..035e24b761 --- /dev/null +++ b/changelog/muzry_fix_attestation_send_on_fulu.md @@ -0,0 +1,2 @@ +### Fixed +- Fix incorrect version used when sending attestation version in Fulu diff --git a/validator/client/beacon-api/propose_attestation.go b/validator/client/beacon-api/propose_attestation.go index 6127603db8..5069ce4812 100644 --- a/validator/client/beacon-api/propose_attestation.go +++ b/validator/client/beacon-api/propose_attestation.go @@ -10,6 +10,7 @@ import ( "github.com/OffchainLabs/prysm/v6/network/httputil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" ) @@ -67,7 +68,8 @@ func (c *beaconApiValidatorClient) proposeAttestationElectra(ctx context.Context if err != nil { return nil, err } - headers := map[string]string{"Eth-Consensus-Version": version.String(attestation.Version())} + consensusVersion := version.String(slots.ToForkVersion(attestation.Data.Slot)) + headers := map[string]string{"Eth-Consensus-Version": consensusVersion} if err = c.jsonRestHandler.Post( ctx, "/eth/v2/beacon/pool/attestations", diff --git a/validator/client/beacon-api/propose_attestation_test.go b/validator/client/beacon-api/propose_attestation_test.go index 4b2a5c86e5..3dc9742b8c 100644 --- a/validator/client/beacon-api/propose_attestation_test.go +++ b/validator/client/beacon-api/propose_attestation_test.go @@ -8,11 +8,14 @@ import ( "testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/network/httputil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" testhelpers "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/test-helpers" "go.uber.org/mock/gomock" @@ -214,36 +217,58 @@ func TestProposeAttestationFallBack(t *testing.T) { } func TestProposeAttestationElectra(t *testing.T) { - attestation := ðpb.SingleAttestation{ - AttesterIndex: 74, - Data: ðpb.AttestationData{ - Slot: 75, - CommitteeIndex: 76, - BeaconBlockRoot: testhelpers.FillByteSlice(32, 38), - Source: ðpb.Checkpoint{ - Epoch: 78, - Root: testhelpers.FillByteSlice(32, 79), + params.SetupTestConfigCleanup(t) + params.BeaconConfig().ElectraForkEpoch = 0 + params.BeaconConfig().FuluForkEpoch = 1 + + buildSingleAttestation := func(slot primitives.Slot) *ethpb.SingleAttestation { + targetEpoch := slots.ToEpoch(slot) + sourceEpoch := targetEpoch + if targetEpoch > 0 { + sourceEpoch = targetEpoch - 1 + } + return ðpb.SingleAttestation{ + AttesterIndex: 74, + Data: ðpb.AttestationData{ + Slot: slot, + CommitteeIndex: 76, + BeaconBlockRoot: testhelpers.FillByteSlice(32, 38), + Source: ðpb.Checkpoint{ + Epoch: sourceEpoch, + Root: testhelpers.FillByteSlice(32, 79), + }, + Target: ðpb.Checkpoint{ + Epoch: targetEpoch, + Root: testhelpers.FillByteSlice(32, 81), + }, }, - Target: ðpb.Checkpoint{ - Epoch: 80, - Root: testhelpers.FillByteSlice(32, 81), - }, - }, - Signature: testhelpers.FillByteSlice(96, 82), - CommitteeId: 83, + Signature: testhelpers.FillByteSlice(96, 82), + CommitteeId: 83, + } } + attestationElectra := buildSingleAttestation(0) + attestationFulu := buildSingleAttestation(params.BeaconConfig().SlotsPerEpoch) + tests := []struct { - name string - attestation *ethpb.SingleAttestation - expectedErrorMessage string - endpointError error - endpointCall int + name string + attestation *ethpb.SingleAttestation + expectedConsensusVersion string + expectedErrorMessage string + endpointError error + endpointCall int }{ { - name: "valid", - attestation: attestation, - endpointCall: 1, + name: "valid electra", + attestation: attestationElectra, + expectedConsensusVersion: version.String(slots.ToForkVersion(attestationElectra.GetData().GetSlot())), + endpointCall: 1, + }, + { + name: "valid fulu consensus version", + attestation: attestationFulu, + expectedConsensusVersion: version.String(slots.ToForkVersion(attestationFulu.GetData().GetSlot())), + endpointCall: 1, }, { name: "nil attestation", @@ -283,8 +308,11 @@ func TestProposeAttestationElectra(t *testing.T) { expectedErrorMessage: "attestation's target can't be nil", }, { - name: "bad request", - attestation: attestation, + name: "bad request", + attestation: attestationElectra, + expectedConsensusVersion: version.String( + slots.ToForkVersion(attestationElectra.GetData().GetSlot()), + ), expectedErrorMessage: "bad request", endpointError: errors.New("bad request"), endpointCall: 1, @@ -304,11 +332,14 @@ func TestProposeAttestationElectra(t *testing.T) { } ctx := t.Context() - headers := map[string]string{"Eth-Consensus-Version": version.String(test.attestation.Version())} + headerMatcher := gomock.Any() + if test.expectedConsensusVersion != "" { + headerMatcher = gomock.Eq(map[string]string{"Eth-Consensus-Version": test.expectedConsensusVersion}) + } jsonRestHandler.EXPECT().Post( gomock.Any(), "/eth/v2/beacon/pool/attestations", - headers, + headerMatcher, bytes.NewBuffer(marshalledAttestations), nil, ).Return( @@ -325,7 +356,7 @@ func TestProposeAttestationElectra(t *testing.T) { require.NoError(t, err) require.NotNil(t, proposeResponse) - expectedAttestationDataRoot, err := attestation.Data.HashTreeRoot() + expectedAttestationDataRoot, err := test.attestation.Data.HashTreeRoot() require.NoError(t, err) // Make sure that the attestation data root is set diff --git a/validator/client/beacon-api/submit_signed_aggregate_proof.go b/validator/client/beacon-api/submit_signed_aggregate_proof.go index a31086deed..224b5c36d4 100644 --- a/validator/client/beacon-api/submit_signed_aggregate_proof.go +++ b/validator/client/beacon-api/submit_signed_aggregate_proof.go @@ -10,6 +10,7 @@ import ( "github.com/OffchainLabs/prysm/v6/network/httputil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" ) @@ -54,7 +55,9 @@ func (c *beaconApiValidatorClient) submitSignedAggregateSelectionProofElectra(ct if err != nil { return nil, errors.Wrap(err, "failed to marshal SignedAggregateAttestationAndProofElectra") } - headers := map[string]string{"Eth-Consensus-Version": version.String(in.SignedAggregateAndProof.Version())} + dataSlot := in.SignedAggregateAndProof.Message.Aggregate.Data.Slot + consensusVersion := version.String(slots.ToForkVersion(dataSlot)) + headers := map[string]string{"Eth-Consensus-Version": consensusVersion} if err = c.jsonRestHandler.Post(ctx, "/eth/v2/validator/aggregate_and_proofs", headers, bytes.NewBuffer(body), nil); err != nil { return nil, err } diff --git a/validator/client/beacon-api/submit_signed_aggregate_proof_test.go b/validator/client/beacon-api/submit_signed_aggregate_proof_test.go index d41b9a89ab..26306c1c20 100644 --- a/validator/client/beacon-api/submit_signed_aggregate_proof_test.go +++ b/validator/client/beacon-api/submit_signed_aggregate_proof_test.go @@ -7,11 +7,13 @@ import ( "testing" "github.com/OffchainLabs/prysm/v6/api/server/structs" + "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/network/httputil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" testhelpers "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/test-helpers" "github.com/pkg/errors" @@ -123,6 +125,10 @@ func TestSubmitSignedAggregateSelectionProof_Fallback(t *testing.T) { } func TestSubmitSignedAggregateSelectionProofElectra_Valid(t *testing.T) { + params.SetupTestConfigCleanup(t) + params.BeaconConfig().ElectraForkEpoch = 0 + params.BeaconConfig().FuluForkEpoch = 100 + ctrl := gomock.NewController(t) defer ctrl.Finish() @@ -131,7 +137,8 @@ func TestSubmitSignedAggregateSelectionProofElectra_Valid(t *testing.T) { require.NoError(t, err) ctx := t.Context() - headers := map[string]string{"Eth-Consensus-Version": version.String(signedAggregateAndProofElectra.Message.Version())} + expectedVersion := version.String(slots.ToForkVersion(signedAggregateAndProofElectra.Message.Aggregate.Data.Slot)) + headers := map[string]string{"Eth-Consensus-Version": expectedVersion} jsonRestHandler := mock.NewMockJsonRestHandler(ctrl) jsonRestHandler.EXPECT().Post( gomock.Any(), @@ -155,6 +162,10 @@ func TestSubmitSignedAggregateSelectionProofElectra_Valid(t *testing.T) { } func TestSubmitSignedAggregateSelectionProofElectra_BadRequest(t *testing.T) { + params.SetupTestConfigCleanup(t) + params.BeaconConfig().ElectraForkEpoch = 0 + params.BeaconConfig().FuluForkEpoch = 100 + ctrl := gomock.NewController(t) defer ctrl.Finish() @@ -163,7 +174,8 @@ func TestSubmitSignedAggregateSelectionProofElectra_BadRequest(t *testing.T) { require.NoError(t, err) ctx := t.Context() - headers := map[string]string{"Eth-Consensus-Version": version.String(signedAggregateAndProofElectra.Message.Version())} + expectedVersion := version.String(slots.ToForkVersion(signedAggregateAndProofElectra.Message.Aggregate.Data.Slot)) + headers := map[string]string{"Eth-Consensus-Version": expectedVersion} jsonRestHandler := mock.NewMockJsonRestHandler(ctrl) jsonRestHandler.EXPECT().Post( gomock.Any(), @@ -182,6 +194,43 @@ func TestSubmitSignedAggregateSelectionProofElectra_BadRequest(t *testing.T) { assert.ErrorContains(t, "bad request", err) } +func TestSubmitSignedAggregateSelectionProofElectra_FuluVersion(t *testing.T) { + params.SetupTestConfigCleanup(t) + params.BeaconConfig().ElectraForkEpoch = 0 + params.BeaconConfig().FuluForkEpoch = 1 + + ctrl := gomock.NewController(t) + defer ctrl.Finish() + + signedAggregateAndProofElectra := generateSignedAggregateAndProofElectraJson() + marshalledSignedAggregateSignedAndProofElectra, err := json.Marshal([]*structs.SignedAggregateAttestationAndProofElectra{jsonifySignedAggregateAndProofElectra(signedAggregateAndProofElectra)}) + require.NoError(t, err) + + ctx := t.Context() + expectedVersion := version.String(slots.ToForkVersion(signedAggregateAndProofElectra.Message.Aggregate.Data.Slot)) + headers := map[string]string{"Eth-Consensus-Version": expectedVersion} + jsonRestHandler := mock.NewMockJsonRestHandler(ctrl) + jsonRestHandler.EXPECT().Post( + gomock.Any(), + "/eth/v2/validator/aggregate_and_proofs", + headers, + bytes.NewBuffer(marshalledSignedAggregateSignedAndProofElectra), + nil, + ).Return( + nil, + ).Times(1) + + attestationDataRoot, err := signedAggregateAndProofElectra.Message.Aggregate.Data.HashTreeRoot() + require.NoError(t, err) + + validatorClient := &beaconApiValidatorClient{jsonRestHandler: jsonRestHandler} + resp, err := validatorClient.submitSignedAggregateSelectionProofElectra(ctx, ðpb.SignedAggregateSubmitElectraRequest{ + SignedAggregateAndProof: signedAggregateAndProofElectra, + }) + require.NoError(t, err) + assert.DeepEqual(t, attestationDataRoot[:], resp.AttestationDataRoot) +} + func generateSignedAggregateAndProofJson() *ethpb.SignedAggregateAttestationAndProof { return ðpb.SignedAggregateAttestationAndProof{ Message: ðpb.AggregateAttestationAndProof{ From 577899bfec0ff52d4db9fdb564140770435553fc Mon Sep 17 00:00:00 2001 From: Preston Van Loon Date: Fri, 31 Oct 2025 10:25:18 -0500 Subject: [PATCH 064/103] P2p active val count lock (#15955) * Add a lock for p2p computation of active validator count and limit only to topics that need it. * Changelog fragment * Update gossip_scoring_params.go Wrap errors --- beacon-chain/p2p/gossip_scoring_params.go | 19 ++++++-- beacon-chain/p2p/service.go | 59 ++++++++++++----------- changelog/pvl-active-val-count-lock.md | 4 ++ 3 files changed, 49 insertions(+), 33 deletions(-) create mode 100644 changelog/pvl-active-val-count-lock.md diff --git a/beacon-chain/p2p/gossip_scoring_params.go b/beacon-chain/p2p/gossip_scoring_params.go index 84c80d4f1a..69a853566a 100644 --- a/beacon-chain/p2p/gossip_scoring_params.go +++ b/beacon-chain/p2p/gossip_scoring_params.go @@ -2,6 +2,7 @@ package p2p import ( "context" + "fmt" "math" "net" "reflect" @@ -106,18 +107,26 @@ func peerScoringParams(colocationWhitelist []*net.IPNet) (*pubsub.PeerScoreParam } func (s *Service) topicScoreParams(topic string) (*pubsub.TopicScoreParams, error) { - activeValidators, err := s.retrieveActiveValidators() - if err != nil { - return nil, err - } switch { case strings.Contains(topic, GossipBlockMessage): return defaultBlockTopicParams(), nil case strings.Contains(topic, GossipAggregateAndProofMessage): + activeValidators, err := s.retrieveActiveValidators() + if err != nil { + return nil, fmt.Errorf("failed to compute active validator count for topic %s: %w", GossipAggregateAndProofMessage, err) + } return defaultAggregateTopicParams(activeValidators), nil case strings.Contains(topic, GossipAttestationMessage): + activeValidators, err := s.retrieveActiveValidators() + if err != nil { + return nil, fmt.Errorf("failed to compute active validator count for topic %s: %w", GossipAttestationMessage, err) + } return defaultAggregateSubnetTopicParams(activeValidators), nil case strings.Contains(topic, GossipSyncCommitteeMessage): + activeValidators, err := s.retrieveActiveValidators() + if err != nil { + return nil, fmt.Errorf("failed to compute active validator count for topic %s: %w", GossipSyncCommitteeMessage, err) + } return defaultSyncSubnetTopicParams(activeValidators), nil case strings.Contains(topic, GossipContributionAndProofMessage): return defaultSyncContributionTopicParams(), nil @@ -142,6 +151,8 @@ func (s *Service) topicScoreParams(topic string) (*pubsub.TopicScoreParams, erro } func (s *Service) retrieveActiveValidators() (uint64, error) { + s.activeValidatorCountLock.Lock() + defer s.activeValidatorCountLock.Unlock() if s.activeValidatorCount != 0 { return s.activeValidatorCount, nil } diff --git a/beacon-chain/p2p/service.go b/beacon-chain/p2p/service.go index 000f65ce88..40cff9e35f 100644 --- a/beacon-chain/p2p/service.go +++ b/beacon-chain/p2p/service.go @@ -65,35 +65,36 @@ var ( // Service for managing peer to peer (p2p) networking. type Service struct { - started bool - isPreGenesis bool - pingMethod func(ctx context.Context, id peer.ID) error - pingMethodLock sync.RWMutex - cancel context.CancelFunc - cfg *Config - peers *peers.Status - addrFilter *multiaddr.Filters - ipLimiter *leakybucket.Collector - privKey *ecdsa.PrivateKey - metaData metadata.Metadata - pubsub *pubsub.PubSub - joinedTopics map[string]*pubsub.Topic - joinedTopicsLock sync.RWMutex - subnetsLock map[uint64]*sync.RWMutex - subnetsLockLock sync.Mutex // Lock access to subnetsLock - initializationLock sync.Mutex - dv5Listener ListenerRebooter - startupErr error - ctx context.Context - host host.Host - genesisTime time.Time - genesisValidatorsRoot []byte - activeValidatorCount uint64 - peerDisconnectionTime *cache.Cache - custodyInfo *custodyInfo - custodyInfoLock sync.RWMutex // Lock access to custodyInfo - custodyInfoSet chan struct{} - allForkDigests map[[4]byte]struct{} + started bool + isPreGenesis bool + pingMethod func(ctx context.Context, id peer.ID) error + pingMethodLock sync.RWMutex + cancel context.CancelFunc + cfg *Config + peers *peers.Status + addrFilter *multiaddr.Filters + ipLimiter *leakybucket.Collector + privKey *ecdsa.PrivateKey + metaData metadata.Metadata + pubsub *pubsub.PubSub + joinedTopics map[string]*pubsub.Topic + joinedTopicsLock sync.RWMutex + subnetsLock map[uint64]*sync.RWMutex + subnetsLockLock sync.Mutex // Lock access to subnetsLock + initializationLock sync.Mutex + dv5Listener ListenerRebooter + startupErr error + ctx context.Context + host host.Host + genesisTime time.Time + genesisValidatorsRoot []byte + activeValidatorCount uint64 + activeValidatorCountLock sync.Mutex + peerDisconnectionTime *cache.Cache + custodyInfo *custodyInfo + custodyInfoLock sync.RWMutex // Lock access to custodyInfo + custodyInfoSet chan struct{} + allForkDigests map[[4]byte]struct{} } type custodyInfo struct { diff --git a/changelog/pvl-active-val-count-lock.md b/changelog/pvl-active-val-count-lock.md new file mode 100644 index 0000000000..68a9b3b546 --- /dev/null +++ b/changelog/pvl-active-val-count-lock.md @@ -0,0 +1,4 @@ +### Fixed + +- Changed the behavior of topic subscriptions such that only topics that require the active validator count will compute that value. +- Added a Mutex to the computation of active validator count during topic subscription to avoid a race condition where multiple goroutines are computing the same work. From d3bd0eaa30bbd06a46bb8356c45cd63b2c65da07 Mon Sep 17 00:00:00 2001 From: fernantho Date: Fri, 31 Oct 2025 18:37:59 +0100 Subject: [PATCH 065/103] SSZ-QL: update "path parsing" data types (#15935) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * updated path processing data types, refactored ParsePath and fixed tests * updated generalized index accordingly, changed input parameter path type from []PathElemen to Path * updated query.go accordingly, changed input parameter path type from []PathElemen to Path * added descriptive changelog * Update encoding/ssz/query/path.go Co-authored-by: Jun Song <87601811+syjn99@users.noreply.github.com> * Added documentation for Path struct and renamed to for clarity * Update encoding/ssz/query/path.go Co-authored-by: Radosław Kapka * updated changelog to its correct type: Changed * updated outdated comment in generalized_index.go and removed test in generalized_index_test.go as this one belongs in path_test.go * Added validateRawPath with strict raw-path validation only - no raw-path fixing is added. Added test suite covering * added extra tests for wrongly formated paths --------- Co-authored-by: Jun Song <87601811+syjn99@users.noreply.github.com> Co-authored-by: Radosław Kapka Co-authored-by: Radosław Kapka --- ...fernantho_ssz-ql-update-path-processing.md | 3 + encoding/ssz/query/generalized_index.go | 10 +- encoding/ssz/query/generalized_index_test.go | 10 +- encoding/ssz/query/path.go | 119 ++++-- encoding/ssz/query/path_test.go | 346 +++++++++++------- encoding/ssz/query/query.go | 8 +- 6 files changed, 327 insertions(+), 169 deletions(-) create mode 100644 changelog/fernantho_ssz-ql-update-path-processing.md diff --git a/changelog/fernantho_ssz-ql-update-path-processing.md b/changelog/fernantho_ssz-ql-update-path-processing.md new file mode 100644 index 0000000000..de626041a8 --- /dev/null +++ b/changelog/fernantho_ssz-ql-update-path-processing.md @@ -0,0 +1,3 @@ +## Changed +- Introduced Path type for SSZ-QL queries and updated PathElement (removed Length field, kept Index) enforcing that len queries are terminal (at most one per path). +- Changed length query syntax from `block.payload.len(transactions)` to `len(block.payload.transactions)` diff --git a/encoding/ssz/query/generalized_index.go b/encoding/ssz/query/generalized_index.go index 8c8f894266..9a581c8287 100644 --- a/encoding/ssz/query/generalized_index.go +++ b/encoding/ssz/query/generalized_index.go @@ -14,13 +14,13 @@ const listBaseIndex = 2 // 1. The sszInfo of the root object, to be able to navigate the SSZ structure // 2. The path to the field (e.g., "field_a.field_b[3].field_c") // It walks the path step by step, updating the generalized index at each step. -func GetGeneralizedIndexFromPath(info *SszInfo, path []PathElement) (uint64, error) { +func GetGeneralizedIndexFromPath(info *SszInfo, path Path) (uint64, error) { if info == nil { return 0, errors.New("SszInfo is nil") } // If path is empty, no generalized index can be computed. - if len(path) == 0 { + if len(path.Elements) == 0 { return 0, errors.New("cannot compute generalized index for an empty path") } @@ -28,7 +28,7 @@ func GetGeneralizedIndexFromPath(info *SszInfo, path []PathElement) (uint64, err currentIndex := uint64(1) currentInfo := info - for _, pathElement := range path { + for index, pathElement := range path.Elements { element := pathElement // Check that we are in a container to access fields @@ -52,8 +52,8 @@ func GetGeneralizedIndexFromPath(info *SszInfo, path []PathElement) (uint64, err currentIndex = currentIndex*nextPowerOfTwo(chunkCount) + fieldPos currentInfo = fieldSsz - // Check if a path element is a length field - if element.Length { + // Check for length access: element is the last in the path and requests length + if path.Length && index == len(path.Elements)-1 { currentInfo, currentIndex, err = calculateLengthGeneralizedIndex(fieldSsz, element, currentIndex) if err != nil { return 0, fmt.Errorf("length calculation error: %w", err) diff --git a/encoding/ssz/query/generalized_index_test.go b/encoding/ssz/query/generalized_index_test.go index 080cf4a6f9..25792807d4 100644 --- a/encoding/ssz/query/generalized_index_test.go +++ b/encoding/ssz/query/generalized_index_test.go @@ -65,12 +65,6 @@ func TestGetIndicesFromPath_FixedNestedContainer(t *testing.T) { expectedIndex: 3, expectError: false, }, - { - name: "Empty path error", - path: "", - expectError: true, - errorMessage: "empty path", - }, } for _, tc := range testCases { @@ -217,8 +211,8 @@ func TestGetIndicesFromPath_VariableTestContainer(t *testing.T) { expectError: false, }, { - name: "variable_container_list[0].inner_1.len(nested_list_field[3])", - path: "variable_container_list[0].inner_1.len(nested_list_field[3])", + name: "len(variable_container_list[0].inner_1.nested_list_field[3])", + path: "len(variable_container_list[0].inner_1.nested_list_field[3])", expectError: true, errorMessage: "length calculation error: len() is not supported for multi-dimensional arrays", }, diff --git a/encoding/ssz/query/path.go b/encoding/ssz/query/path.go index 569c96fdb5..6edd529649 100644 --- a/encoding/ssz/query/path.go +++ b/encoding/ssz/query/path.go @@ -10,51 +10,77 @@ import ( // PathElement represents a single element in a path. type PathElement struct { - Length bool - Name string + Name string // [Optional] Index for List/Vector elements Index *uint64 } -var arrayIndexRegex = regexp.MustCompile(`\[\s*([^\]]+)\s*\]`) +// Path represents the entire path structure for SSZ-QL queries. It consists of multiple PathElements +// and a flag indicating if the path is querying for length. +type Path struct { + // If true, the path is querying for the length of the final element in Elements field + Length bool + // Sequence of path elements representing the navigation through the SSZ structure + Elements []PathElement +} +// Matches an array index expression like [123] or [ foo ] and captures the inner content without the brackets. +var arrayIndexRegex = regexp.MustCompile(`\[(\d+)\]`) + +// Matches an entire string that’s a len() call (whitespace flexible), capturing the inner expression and disallowing any trailing characters. var lengthRegex = regexp.MustCompile(`^\s*len\s*\(\s*([^)]+?)\s*\)\s*$`) +// Valid path characters: letters, digits, dot, slash, square brackets and parentheses only. +// Any other character will render the path invalid. +var validPathChars = regexp.MustCompile(`^[A-Za-z0-9._\[\]\(\)]*$`) + +// Invalid patterns: a closing bracket followed directly by a letter or underscore +var invalidBracketPattern = regexp.MustCompile(`\][^.\[\)]|\).`) + // ParsePath parses a raw path string into a slice of PathElements. // note: field names are stored in snake case format. rawPath has to be provided in snake case. // 1. Supports dot notation for field access (e.g., "field1.field2"). // 2. Supports array indexing using square brackets (e.g., "array_field[0]"). // 3. Supports length access using len() notation (e.g., "len(array_field)"). // 4. Handles leading dots and validates path format. -func ParsePath(rawPath string) ([]PathElement, error) { - rawElements := strings.Split(rawPath, ".") +func ParsePath(rawPath string) (Path, error) { + if err := validateRawPath(rawPath); err != nil { + return Path{}, err + } + + var rawElements []string + var processedPath Path + + matches := lengthRegex.FindStringSubmatch(rawPath) + + // FindStringSubmatch matches a whole string like "len(field_name)" and its inner expression. + // For a path element to be a length query, len(matches) should be 2: + // 1. Full match: "len(field_name)" + // 2. Inner expression: "field_name" + if len(matches) == 2 { + processedPath.Length = true + // If we have found a len() expression, we only want to parse its inner expression. + rawElements = strings.Split(matches[1], ".") + } else { + // Normal path parsing + rawElements = strings.Split(rawPath, ".") + } if rawElements[0] == "" { // Remove leading dot if present rawElements = rawElements[1:] } - var path []PathElement + var pathElements []PathElement for _, elem := range rawElements { if elem == "" { - return nil, errors.New("invalid path: consecutive dots or trailing dot") + return Path{}, errors.New("invalid path: consecutive dots or trailing dot") } // Processing element string processingField := elem var pathElement PathElement - matches := lengthRegex.FindStringSubmatch(processingField) - // FindStringSubmatch matches a whole string like "len(field_name)" and its inner expression. - // For a path element to be a length query, len(matches) should be 2: - // 1. Full match: "len(field_name)" - // 2. Inner expression: "field_name" - if len(matches) == 2 { - pathElement.Length = true - // Extract the inner expression between len( and ) and continue parsing on that - processingField = matches[1] - } - // Default name is the full working string (may be updated below if it contains indices) pathElement.Name = processingField @@ -63,22 +89,71 @@ func ParsePath(rawPath string) ([]PathElement, error) { pathElement.Name = extractFieldName(processingField) indices, err := extractArrayIndices(processingField) if err != nil { - return nil, err + return Path{}, err } // Although extractArrayIndices supports multiple indices, // only a single index is supported per PathElement, e.g., "transactions[0]" is valid // while "transactions[0][0]" is rejected explicitly. if len(indices) != 1 { - return nil, fmt.Errorf("multiple indices not supported in token %s", processingField) + return Path{}, fmt.Errorf("multiple indices not supported in token %s", processingField) } pathElement.Index = &indices[0] } - path = append(path, pathElement) + pathElements = append(pathElements, pathElement) } - return path, nil + processedPath.Elements = pathElements + return processedPath, nil +} + +// validateRawPath performs initial validation of the raw path string: +// 1. Rejects invalid characters (only letters, digits, '.', '[]', and '()' are allowed). +// 2. Validates balanced parentheses +// 3. Validates balanced brackets. +// 4. Ensures len() calls are only at the start of the path. +// 5. Rejects empty len() calls. +// 6. Rejects invalid patterns like "][a" or "][_" which indicate malformed paths. +func validateRawPath(rawPath string) error { + // 1. Reject any path containing invalid characters (this includes spaces). + if !validPathChars.MatchString(rawPath) { + return fmt.Errorf("invalid character in path: only letters, digits, '.', '[]' and '()' are allowed") + } + + // 2. Basic validation for balanced parentheses: wrongly formatted paths like "test))((" are not rejected in this condition but later. + if strings.Count(rawPath, "(") != strings.Count(rawPath, ")") { + return fmt.Errorf("unmatched parentheses in path: %s", rawPath) + } + + // 3. Basic validation for balanced brackets: + // wrongly formatted paths like "array][0][" are rejected by checking bracket counts and format. + matches := arrayIndexRegex.FindAllStringSubmatch(rawPath, -1) + openBracketsCount := strings.Count(rawPath, "[") + closeBracketsCount := strings.Count(rawPath, "]") + if openBracketsCount != closeBracketsCount { + return fmt.Errorf("unmatched brackets in path: %s", rawPath) + } + if len(matches) != openBracketsCount || len(matches) != closeBracketsCount { + return fmt.Errorf("invalid bracket format in path: %s", rawPath) + } + + // 4. Reject len() calls not at the start of the path + if strings.Index(rawPath, "len(") > 0 { + return fmt.Errorf("len() call must be at the start of the path: %s", rawPath) + } + + // 5. Reject empty len() calls + if strings.Contains(rawPath, "len()") { + return fmt.Errorf("len() call must not be empty: %s", rawPath) + } + + // 6. Reject invalid patterns like "][a" or "][_" which indicate malformed paths + if invalidBracketPattern.MatchString(rawPath) { + return fmt.Errorf("invalid path format near brackets in path: %s", rawPath) + } + + return nil } // extractFieldName extracts the field name from a path element name (removes array indices) diff --git a/encoding/ssz/query/path_test.go b/encoding/ssz/query/path_test.go index 62f4a86c43..363aacfa44 100644 --- a/encoding/ssz/query/path_test.go +++ b/encoding/ssz/query/path_test.go @@ -14,199 +14,285 @@ func TestParsePath(t *testing.T) { tests := []struct { name string path string - expected []query.PathElement + expected query.Path wantErr bool }{ + { + name: "simple path", + path: "data", + expected: query.Path{ + Length: false, + Elements: []query.PathElement{ + {Name: "data"}, + }, + }, + wantErr: false, + }, + { + name: "simple path beginning with dot", + path: ".data", + expected: query.Path{ + Length: false, + Elements: []query.PathElement{ + {Name: "data"}, + }, + }, + wantErr: false, + }, + { + name: "simple path trailing dot", + path: "data.", + wantErr: true, + }, + { + name: "simple path surrounded by dot", + path: ".data.", + wantErr: true, + }, + { + name: "simple path beginning with two dots", + path: "..data", + wantErr: true, + }, { name: "simple nested path", path: "data.target.root", - expected: []query.PathElement{ - {Name: "data"}, - {Name: "target"}, - {Name: "root"}, + expected: query.Path{ + Length: false, + Elements: []query.PathElement{ + {Name: "data"}, + {Name: "target"}, + {Name: "root"}, + }, }, wantErr: false, }, { - name: "simple nested path with leading dot", - path: ".data.target.root", - expected: []query.PathElement{ - {Name: "data"}, - {Name: "target"}, - {Name: "root"}, + name: "len with top-level identifier", + path: "len(data)", + expected: query.Path{ + Length: true, + Elements: []query.PathElement{ + {Name: "data"}, + }, }, wantErr: false, }, { - name: "simple length path with length field", - path: "data.target.len(root)", - expected: []query.PathElement{ - {Name: "data"}, - {Name: "target"}, - {Name: "root", Length: true}, + name: "len with top-level identifier and leading dot", + path: "len(.data)", + expected: query.Path{ + Length: true, + Elements: []query.PathElement{ + {Name: "data"}, + }, }, wantErr: false, }, { - name: "len with top-level identifier", - path: "len(data)", - expected: []query.PathElement{{Name: "data", Length: true}}, - wantErr: false, + name: "len with top-level identifier and trailing dot", + path: "len(data.)", + wantErr: true, }, { - name: "length with messy whitespace", - path: "data.target. \tlen ( root ) ", - expected: []query.PathElement{ - {Name: "data"}, - {Name: "target"}, - {Name: "root", Length: true}, + name: "len with top-level identifier beginning dot", + path: ".len(data)", + wantErr: true, + }, + { + name: "len with dotted path inside", + path: "len(data.target.root)", + expected: query.Path{ + Length: true, + Elements: []query.PathElement{ + {Name: "data"}, + {Name: "target"}, + {Name: "root"}, + }, }, wantErr: false, }, { - name: "len with numeric index inside argument", - path: "data.len(a[10])", - expected: []query.PathElement{ - {Name: "data"}, - {Name: "a", Length: true, Index: u64(10)}, + name: "simple length path with non-outer length field", + path: "data.target.len(root)", + wantErr: true, + }, + { + name: "simple path with `len` used as a field name", + path: "data.len", + expected: query.Path{ + Length: false, + Elements: []query.PathElement{ + {Name: "data"}, + {Name: "len"}, + }, }, wantErr: false, }, { - name: "array index with spaces", - path: "arr[ 42 ]", - expected: []query.PathElement{{Name: "arr", Index: u64(42)}}, - wantErr: false, - }, - { - name: "array leading zeros", - path: "arr[001]", - expected: []query.PathElement{{Name: "arr", Index: u64(1)}}, - wantErr: false, - }, - { - name: "array max uint64", - path: "arr[18446744073709551615]", - expected: []query.PathElement{{Name: "arr", Index: u64(18446744073709551615)}}, - wantErr: false, - }, - { - name: "len with dotted path inside - no input validation - reverts at a later stage", - path: "len(data.target.root)", - expected: []query.PathElement{{Name: "len(data", Length: false}, {Name: "target", Length: false}, {Name: "root)", Length: false}}, - wantErr: false, - }, - { - name: "len with dotted path then more - no input validation - reverts at a later stage", - path: "len(data.target.root).foo", - expected: []query.PathElement{{Name: "len(data", Length: false}, {Name: "target", Length: false}, {Name: "root)", Length: false}, {Name: "foo", Length: false}}, - wantErr: false, - }, - { - name: "len without closing paren - no input validation - reverts at a later stage", - path: "len(root", - expected: []query.PathElement{{Name: "len(root"}}, - wantErr: false, - }, - { - name: "len with extra closing paren - no input validation - reverts at a later stage", - path: "len(root))", - expected: []query.PathElement{{Name: "len(root))"}}, - wantErr: false, - }, - { - name: "empty len argument - no input validation - reverts at a later stage", - path: "len()", - expected: []query.PathElement{{Name: "len()"}}, - wantErr: false, - }, - { - name: "len with comma-separated args - no input validation - reverts at a later stage", - path: "len(a,b)", - expected: []query.PathElement{{Name: "a,b", Length: true}}, - wantErr: false, - }, - { - name: "len call followed by index (outer) - no input validation - reverts at a later stage", - path: "data.len(root)[0]", - expected: []query.PathElement{ - {Name: "data"}, - {Name: "len(root)", Index: u64(0)}, + name: "simple path with `len` used as a field name + trailing field", + path: "data.len.value", + expected: query.Path{ + Length: false, + Elements: []query.PathElement{ + {Name: "data"}, + {Name: "len"}, + {Name: "value"}, + }, }, wantErr: false, }, { - name: "cannot provide consecutive dots in raw path", - path: "data..target.root", + name: "simple path with `len`", + path: "len.len", + expected: query.Path{ + Length: false, + Elements: []query.PathElement{ + {Name: "len"}, + {Name: "len"}, + }, + }, + wantErr: false, + }, + { + name: "simple length path with length field", + path: "len.len(root)", wantErr: true, }, { - name: "cannot provide a negative index in array path", - path: ".data.target.root[-1]", + name: "empty length field", + path: "len()", wantErr: true, }, { - name: "invalid index in array path", - path: ".data.target.root[a]", + name: "length field not terminal", + path: "len(data).foo", wantErr: true, }, { - name: "multidimensional array index in path", - path: ".data.target.root[0][1]", + name: "length field with missing closing paren", + path: "len(data", wantErr: true, }, { - name: "leading double dot", - path: "..data", - expected: nil, - wantErr: true, - }, - { - name: "trailing dot", - path: "data.target.", - expected: nil, - wantErr: true, - }, - { - name: "len with inner bracket non-numeric index", - path: "data.len(a[b])", + name: "length field with two closing paren", + path: "len(data))", wantErr: true, }, { - name: "array empty index", - path: "arr[]", + name: "len with comma-separated args", + path: "len(a,b)", wantErr: true, }, { - name: "array hex index", - path: "arr[0x10]", + name: "array index path", + path: "arr[42]", + expected: query.Path{ + Length: false, + Elements: []query.PathElement{ + {Name: "arr", Index: u64(42)}, + }, + }, + wantErr: false, + }, + { + name: "array index path with max uint64", + path: "arr[18446744073709551615]", + expected: query.Path{ + Length: false, + Elements: []query.PathElement{ + {Name: "arr", Index: u64(18446744073709551615)}, + }, + }, + wantErr: false, + }, + { + name: "array element in wrong nested path", + path: "arr[42]foo", wantErr: true, }, { - name: "array missing closing bracket", - path: "arr[12", + name: "array index in nested path", + path: "arr[42].foo", + expected: query.Path{ + Length: false, + Elements: []query.PathElement{ + {Name: "arr", Index: u64(42)}, + {Name: "foo"}, + }, + }, + wantErr: false, + }, + { + name: "array index in deeper nested path", + path: "arr[42].foo.bar[10]", + expected: query.Path{ + Length: false, + Elements: []query.PathElement{ + {Name: "arr", Index: u64(42)}, + {Name: "foo"}, + {Name: "bar", Index: u64(10)}, + }, + }, + wantErr: false, + }, + { + name: "length of array element", + path: "len(arr[42])", + expected: query.Path{ + Length: true, + Elements: []query.PathElement{ + {Name: "arr", Index: u64(42)}, + }, + }, + wantErr: false, + }, + { + name: "length of array + trailing item", + path: "len(arr)[0]", wantErr: true, }, { - name: "array plus sign index", - path: "arr[+3]", + name: "length of nested path within array element", + path: "len(arr[42].foo)", + expected: query.Path{ + Length: true, + Elements: []query.PathElement{ + {Name: "arr", Index: u64(42)}, + {Name: "foo"}, + }, + }, + wantErr: false, + }, + { + name: "empty spaces in path", + path: "data . target", wantErr: true, }, { - name: "array unicode digits", - path: "arr[12]", + name: "leading dot + empty spaces", + path: ". data", wantErr: true, }, { - name: "array overflow uint64", - path: "arr[18446744073709551616]", + name: "length with leading dot + empty spaces", + path: "len(. data)", wantErr: true, }, { - name: "array index then suffix", - path: "field[1]suffix", - expected: []query.PathElement{{Name: "field", Index: u64(1)}}, - wantErr: false, + name: "Empty path error", + path: "", + expected: query.Path{}, + }, + { + name: "length with leading dot + empty spaces", + path: "test))((", + wantErr: true, + }, + { + name: "length with leading dot + empty spaces", + path: "array][0][", + wantErr: true, }, } @@ -220,7 +306,7 @@ func TestParsePath(t *testing.T) { } require.NoError(t, err) - require.Equal(t, len(tt.expected), len(parsedPath), "Expected %d path elements, got %d", len(tt.expected), len(parsedPath)) + require.Equal(t, len(tt.expected.Elements), len(parsedPath.Elements), "Expected %d path elements, got %d", len(tt.expected.Elements), len(parsedPath.Elements)) require.DeepEqual(t, tt.expected, parsedPath, "Parsed path does not match expected path") }) } diff --git a/encoding/ssz/query/query.go b/encoding/ssz/query/query.go index 8b90788eed..44467b5e63 100644 --- a/encoding/ssz/query/query.go +++ b/encoding/ssz/query/query.go @@ -7,19 +7,19 @@ import ( // CalculateOffsetAndLength calculates the offset and length of a given path within the SSZ object. // By walking the given path, it accumulates the offsets based on SszInfo. -func CalculateOffsetAndLength(sszInfo *SszInfo, path []PathElement) (*SszInfo, uint64, uint64, error) { +func CalculateOffsetAndLength(sszInfo *SszInfo, path Path) (*SszInfo, uint64, uint64, error) { if sszInfo == nil { return nil, 0, 0, errors.New("sszInfo is nil") } - if len(path) == 0 { + if len(path.Elements) == 0 { return nil, 0, 0, errors.New("path is empty") } walk := sszInfo offset := uint64(0) - for pathIndex, elem := range path { + for pathIndex, elem := range path.Elements { containerInfo, err := walk.ContainerInfo() if err != nil { return nil, 0, 0, fmt.Errorf("could not get field infos: %w", err) @@ -56,7 +56,7 @@ func CalculateOffsetAndLength(sszInfo *SszInfo, path []PathElement) (*SszInfo, u // to the next field's sszInfo, which would have the correct size information. // However, if this is the last element in the path, we need to ensure we return the correct size // for the indexed element. Hence, we return the size from elementSizes. - if pathIndex == len(path)-1 { + if pathIndex == len(path.Elements)-1 { return walk, offset, listInfo.elementSizes[index], nil } } else { From 040661bd68f7ba0d3b23c0fb350131aa5e7c355b Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Fri, 31 Oct 2025 21:46:06 +0100 Subject: [PATCH 066/103] Update `go-netroute` to `v0.4.0`. (#15949) --- changelog/manu-go-netroute-2.md | 2 ++ deps.bzl | 4 ++-- go.mod | 2 +- go.sum | 4 ++-- 4 files changed, 7 insertions(+), 5 deletions(-) create mode 100644 changelog/manu-go-netroute-2.md diff --git a/changelog/manu-go-netroute-2.md b/changelog/manu-go-netroute-2.md new file mode 100644 index 0000000000..e1f10e0466 --- /dev/null +++ b/changelog/manu-go-netroute-2.md @@ -0,0 +1,2 @@ +### Changed +- Update `go-netroute` to `v0.4.0` \ No newline at end of file diff --git a/deps.bzl b/deps.bzl index 869f799d77..fda38f0761 100644 --- a/deps.bzl +++ b/deps.bzl @@ -2023,8 +2023,8 @@ def prysm_deps(): go_repository( name = "com_github_libp2p_go_netroute", importpath = "github.com/libp2p/go-netroute", - sum = "h1:nqPCXHmeNmgTJnktosJ/sIef9hvwYCrsLxXmfNks/oc=", - version = "v0.3.0", + sum = "h1:sZZx9hyANYUx9PZyqcgE/E1GUG3iEtTZHUEvdtXT7/Q=", + version = "v0.4.0", ) go_repository( name = "com_github_libp2p_go_reuseport", diff --git a/go.mod b/go.mod index 5a81a8ec4f..8bb6cdf86a 100644 --- a/go.mod +++ b/go.mod @@ -177,7 +177,7 @@ require ( github.com/libp2p/go-libp2p-asn-util v0.4.1 // indirect github.com/libp2p/go-msgio v0.3.0 // indirect github.com/libp2p/go-nat v0.2.0 // indirect - github.com/libp2p/go-netroute v0.3.0 // indirect + github.com/libp2p/go-netroute v0.4.0 // indirect github.com/libp2p/go-reuseport v0.4.0 // indirect github.com/libp2p/go-yamux/v4 v4.0.2 // indirect github.com/lunixbochs/vtclean v1.0.0 // indirect diff --git a/go.sum b/go.sum index c72f739e4b..ed1ef641d7 100644 --- a/go.sum +++ b/go.sum @@ -599,8 +599,8 @@ github.com/libp2p/go-msgio v0.3.0 h1:mf3Z8B1xcFN314sWX+2vOTShIE0Mmn2TXn3YCUQGNj0 github.com/libp2p/go-msgio v0.3.0/go.mod h1:nyRM819GmVaF9LX3l03RMh10QdOroF++NBbxAb0mmDM= github.com/libp2p/go-nat v0.2.0 h1:Tyz+bUFAYqGyJ/ppPPymMGbIgNRH+WqC5QrT5fKrrGk= github.com/libp2p/go-nat v0.2.0/go.mod h1:3MJr+GRpRkyT65EpVPBstXLvOlAPzUVlG6Pwg9ohLJk= -github.com/libp2p/go-netroute v0.3.0 h1:nqPCXHmeNmgTJnktosJ/sIef9hvwYCrsLxXmfNks/oc= -github.com/libp2p/go-netroute v0.3.0/go.mod h1:Nkd5ShYgSMS5MUKy/MU2T57xFoOKvvLR92Lic48LEyA= +github.com/libp2p/go-netroute v0.4.0 h1:sZZx9hyANYUx9PZyqcgE/E1GUG3iEtTZHUEvdtXT7/Q= +github.com/libp2p/go-netroute v0.4.0/go.mod h1:Nkd5ShYgSMS5MUKy/MU2T57xFoOKvvLR92Lic48LEyA= github.com/libp2p/go-reuseport v0.4.0 h1:nR5KU7hD0WxXCJbmw7r2rhRYruNRl2koHw8fQscQm2s= github.com/libp2p/go-reuseport v0.4.0/go.mod h1:ZtI03j/wO5hZVDFo2jKywN6bYKWLOy8Se6DrI2E1cLU= github.com/libp2p/go-yamux/v4 v4.0.2 h1:nrLh89LN/LEiqcFiqdKDRHjGstN300C1269K/EX0CPU= From b2a9db0826ffba64193bdc3bdc97c4603f276931 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Sun, 2 Nov 2025 00:18:42 +0100 Subject: [PATCH 067/103] `BeaconBlockContainerToSignedBeaconBlock`: Add Fulu. (#15940) --- changelog/manu-factory.md | 2 ++ consensus-types/blocks/factory.go | 4 ++++ 2 files changed, 6 insertions(+) create mode 100644 changelog/manu-factory.md diff --git a/changelog/manu-factory.md b/changelog/manu-factory.md new file mode 100644 index 0000000000..765a26c571 --- /dev/null +++ b/changelog/manu-factory.md @@ -0,0 +1,2 @@ +### Ignored +- `BeaconBlockContainerToSignedBeaconBlock`: Add Fulu. \ No newline at end of file diff --git a/consensus-types/blocks/factory.go b/consensus-types/blocks/factory.go index 2eb413ccf5..54c34e7271 100644 --- a/consensus-types/blocks/factory.go +++ b/consensus-types/blocks/factory.go @@ -640,6 +640,10 @@ func BuildSignedBeaconBlockFromExecutionPayload(blk interfaces.ReadOnlySignedBea // This is particularly useful for using the values from API calls. func BeaconBlockContainerToSignedBeaconBlock(obj *eth.BeaconBlockContainer) (interfaces.ReadOnlySignedBeaconBlock, error) { switch obj.Block.(type) { + case *eth.BeaconBlockContainer_BlindedFuluBlock: + return NewSignedBeaconBlock(obj.GetBlindedFuluBlock()) + case *eth.BeaconBlockContainer_FuluBlock: + return NewSignedBeaconBlock(obj.GetFuluBlock()) case *eth.BeaconBlockContainer_BlindedElectraBlock: return NewSignedBeaconBlock(obj.GetBlindedElectraBlock()) case *eth.BeaconBlockContainer_ElectraBlock: From ec524ce99c56b37e237d6187942a59d71c8a4360 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Mon, 3 Nov 2025 15:48:41 +0100 Subject: [PATCH 068/103] `RODataColumnsVerifier.ValidProposerSignature`: Ensure the expensive signature verification is only performed once for concurrent requests for the same signature data. (#15954) * `signatureData`: Add `string` function. * `RODataColumnsVerifier.ValidProposerSignature`: Ensure the expensive signature verification is only performed once for concurrent requests for the same signature data. Share flight group * `parentState` ==> `state`. * `RODataColumnsVerifier.SidecarProposerExpected: Ensure the expensive index computation is only performed once for concurrent requests.` * Add `wrapAttestationError` * Fix Kasey's comment. * Fix Terence's comment. --- .../sync/validate_beacon_attestation.go | 38 ++++++++--- beacon-chain/verification/BUILD.bazel | 1 + beacon-chain/verification/cache.go | 4 ++ beacon-chain/verification/cache_test.go | 12 ++++ beacon-chain/verification/data_column.go | 64 ++++++++++++------- beacon-chain/verification/initializer.go | 2 + changelog/manu-singleflight.md | 2 + 7 files changed, 91 insertions(+), 32 deletions(-) create mode 100644 changelog/manu-singleflight.md diff --git a/beacon-chain/sync/validate_beacon_attestation.go b/beacon-chain/sync/validate_beacon_attestation.go index 0d6237d81d..aee654cf0f 100644 --- a/beacon-chain/sync/validate_beacon_attestation.go +++ b/beacon-chain/sync/validate_beacon_attestation.go @@ -16,6 +16,7 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" "github.com/OffchainLabs/prysm/v6/beacon-chain/state" + "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" "github.com/OffchainLabs/prysm/v6/monitoring/tracing" @@ -67,7 +68,7 @@ func (s *Service) validateCommitteeIndexBeaconAttestation( return pubsub.ValidationReject, errWrongMessage } if err := helpers.ValidateNilAttestation(att); err != nil { - return pubsub.ValidationReject, err + return pubsub.ValidationReject, wrapAttestationError(err, att) } data := att.GetData() @@ -84,7 +85,7 @@ func (s *Service) validateCommitteeIndexBeaconAttestation( return pubsub.ValidationIgnore, err } if err := helpers.ValidateSlotTargetEpoch(data); err != nil { - return pubsub.ValidationReject, err + return pubsub.ValidationReject, wrapAttestationError(err, att) } committeeIndex := att.GetCommitteeIndex() @@ -106,7 +107,7 @@ func (s *Service) validateCommitteeIndexBeaconAttestation( s.hasBadBlock(bytesutil.ToBytes32(data.Target.Root)) || s.hasBadBlock(bytesutil.ToBytes32(data.Source.Root)) { attBadBlockCount.Inc() - return pubsub.ValidationReject, errors.New("attestation data references bad block root") + return pubsub.ValidationReject, wrapAttestationError(errors.New("attestation data references bad block root"), att) } } @@ -126,7 +127,7 @@ func (s *Service) validateCommitteeIndexBeaconAttestation( if err = s.cfg.chain.VerifyLmdFfgConsistency(ctx, att); err != nil { tracing.AnnotateError(span, err) attBadLmdConsistencyCount.Inc() - return pubsub.ValidationReject, err + return pubsub.ValidationReject, wrapAttestationError(err, att) } preState, err := s.cfg.chain.AttestationTargetState(ctx, data.Target) @@ -137,7 +138,7 @@ func (s *Service) validateCommitteeIndexBeaconAttestation( validationRes, err := s.validateUnaggregatedAttTopic(ctx, att, preState, *msg.Topic) if validationRes != pubsub.ValidationAccept { - return validationRes, err + return validationRes, wrapAttestationError(err, att) } committee, err := helpers.BeaconCommitteeFromState(ctx, preState, data.Slot, committeeIndex) @@ -148,7 +149,7 @@ func (s *Service) validateCommitteeIndexBeaconAttestation( validationRes, err = validateAttesterData(ctx, att, committee) if validationRes != pubsub.ValidationAccept { - return validationRes, err + return validationRes, wrapAttestationError(err, att) } // Consolidated handling of Electra SingleAttestation vs Phase0 unaggregated attestation @@ -183,7 +184,7 @@ func (s *Service) validateCommitteeIndexBeaconAttestation( validationRes, err = s.validateUnaggregatedAttWithState(ctx, attForValidation, preState) if validationRes != pubsub.ValidationAccept { - return validationRes, err + return validationRes, wrapAttestationError(err, att) } if s.slasherEnabled { @@ -339,7 +340,7 @@ func validateAttestingIndex( // `attestation.attester_index in get_beacon_committee(state, attestation.data.slot, index)`. inCommittee := slices.Contains(committee, attestingIndex) if !inCommittee { - return pubsub.ValidationReject, errors.New("attester is not a member of the committee") + return pubsub.ValidationReject, errors.Errorf("attester %d is not a member of the committee", attestingIndex) } return pubsub.ValidationAccept, nil @@ -392,3 +393,24 @@ func (s *Service) hasBlockAndState(ctx context.Context, blockRoot [32]byte) bool hasState := hasStateSummary || s.cfg.beaconDB.HasState(ctx, blockRoot) return hasState && s.cfg.chain.HasBlock(ctx, blockRoot) } + +func wrapAttestationError(err error, att eth.Att) error { + slotsPerEpoch := params.BeaconConfig().SlotsPerEpoch + committeeIndex := att.GetCommitteeIndex() + + attData := att.GetData() + slot := attData.Slot + slotInEpoch := slot % slotsPerEpoch + oldCommitteeIndex := attData.CommitteeIndex + blockRoot := fmt.Sprintf("%#x", attData.BeaconBlockRoot) + sourceRoot := fmt.Sprintf("%#x", attData.Source.Root) + sourceEpoch := attData.Source.Epoch + targetEpoch := attData.Target.Epoch + targetRoot := fmt.Sprintf("%#x", attData.Target.Root) + + return errors.Wrapf( + err, + "attSlot: %d, attSlotInEpoch: %d, attOldCommitteeIndex: %d, attCommitteeIndex: %d, attBlockRoot: %s, attSource: {root: %s, epoch: %d}, attTarget: {root: %s, epoch: %d}", + slot, slotInEpoch, oldCommitteeIndex, committeeIndex, blockRoot, sourceRoot, sourceEpoch, targetRoot, targetEpoch, + ) +} diff --git a/beacon-chain/verification/BUILD.bazel b/beacon-chain/verification/BUILD.bazel index fc48a5f9bb..d2d6621da6 100644 --- a/beacon-chain/verification/BUILD.bazel +++ b/beacon-chain/verification/BUILD.bazel @@ -44,6 +44,7 @@ go_library( "@com_github_prometheus_client_golang//prometheus/promauto:go_default_library", "@com_github_sirupsen_logrus//:go_default_library", "@com_github_spf13_afero//:go_default_library", + "@org_golang_x_sync//singleflight:go_default_library", ], ) diff --git a/beacon-chain/verification/cache.go b/beacon-chain/verification/cache.go index c8801c9e29..71931d002c 100644 --- a/beacon-chain/verification/cache.go +++ b/beacon-chain/verification/cache.go @@ -50,6 +50,10 @@ type signatureData struct { Slot primitives.Slot } +func (d signatureData) concat() string { + return string(d.Root[:]) + string(d.Signature[:]) +} + func (d signatureData) logFields() logrus.Fields { return logrus.Fields{ "root": fmt.Sprintf("%#x", d.Root), diff --git a/beacon-chain/verification/cache_test.go b/beacon-chain/verification/cache_test.go index 06fba601dc..91dd4fc658 100644 --- a/beacon-chain/verification/cache_test.go +++ b/beacon-chain/verification/cache_test.go @@ -21,6 +21,18 @@ func testSignedBlockBlobKeys(t *testing.T, valRoot []byte, slot primitives.Slot, return block, blobs, sks[0], pks[0] } +func TestSignatureDataString(t *testing.T) { + const expected = "\x01\x02\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + + sigData := signatureData{ + Root: [32]byte{1, 2, 3}, + Signature: [96]byte{4, 5, 6}, + } + + actual := sigData.concat() + require.Equal(t, expected, actual) +} + func TestVerifySignature(t *testing.T) { valRoot := [32]byte{} _, blobs, _, pk := testSignedBlockBlobKeys(t, valRoot[:], 0, 1) diff --git a/beacon-chain/verification/data_column.go b/beacon-chain/verification/data_column.go index c6ae4891c3..e094a7365b 100644 --- a/beacon-chain/verification/data_column.go +++ b/beacon-chain/verification/data_column.go @@ -257,17 +257,25 @@ func (dv *RODataColumnsVerifier) ValidProposerSignature(ctx context.Context) (er continue } - columnVerificationProposerSignatureCache.WithLabelValues("miss").Inc() + // Ensure the expensive signature verification is only performed once for + // concurrent requests for the same signature data. + if _, err, _ = dv.sg.Do(signatureData.concat(), func() (any, error) { + columnVerificationProposerSignatureCache.WithLabelValues("miss").Inc() - // Retrieve the parent state. - parentState, err := dv.parentState(ctx, dataColumn) - if err != nil { - return columnErrBuilder(errors.Wrap(err, "parent state")) - } + // Retrieve the parent state. + parentState, err := dv.state(ctx, dataColumn.ParentRoot()) + if err != nil { + return nil, columnErrBuilder(errors.Wrap(err, "parent state")) + } - // Full verification, which will subsequently be cached for anything sharing the signature cache. - if err = dv.sc.VerifySignature(signatureData, parentState); err != nil { - return columnErrBuilder(errors.Wrap(err, "verify signature")) + // Full verification, which will subsequently be cached for anything sharing the signature cache. + if err = dv.sc.VerifySignature(signatureData, parentState); err != nil { + return nil, columnErrBuilder(errors.Wrap(err, "verify signature")) + } + + return nil, nil + }); err != nil { + return err } } @@ -470,15 +478,25 @@ func (dv *RODataColumnsVerifier) SidecarProposerExpected(ctx context.Context) (e idx, cached := dv.pc.Proposer(checkpoint, dataColumnSlot) if !cached { - // Retrieve the parent state. - parentState, err := dv.parentState(ctx, dataColumn) - if err != nil { - return columnErrBuilder(errors.Wrap(err, "parent state")) - } + parentRoot := dataColumn.ParentRoot() + // Ensure the expensive index computation is only performed once for + // concurrent requests for the same signature data. + if _, err, _ := dv.sg.Do(fmt.Sprintf("%#x", parentRoot), func() (any, error) { + // Retrieve the parent state. + parentState, err := dv.state(ctx, parentRoot) + if err != nil { + return nil, columnErrBuilder(errors.Wrap(err, "parent state")) + } - idx, err = dv.pc.ComputeProposer(ctx, parentRoot, dataColumnSlot, parentState) - if err != nil { - return columnErrBuilder(errors.Wrap(err, "compute proposer")) + // Compute the proposer index. + idx, err = dv.pc.ComputeProposer(ctx, parentRoot, dataColumnSlot, parentState) + if err != nil { + return nil, columnErrBuilder(errors.Wrap(err, "compute proposer")) + } + + return nil, nil + }); err != nil { + return err } } @@ -490,23 +508,21 @@ func (dv *RODataColumnsVerifier) SidecarProposerExpected(ctx context.Context) (e return nil } -// parentState retrieves the parent state of the data column from the cache if possible, else retrieves it from the state by rooter. -func (dv *RODataColumnsVerifier) parentState(ctx context.Context, dataColumn blocks.RODataColumn) (state.BeaconState, error) { - parentRoot := dataColumn.ParentRoot() - +// state retrieves the state of the corresponding root from the cache if possible, else retrieves it from the state by rooter. +func (dv *RODataColumnsVerifier) state(ctx context.Context, root [fieldparams.RootLength]byte) (state.BeaconState, error) { // If the parent root is already in the cache, return it. - if st, ok := dv.stateByRoot[parentRoot]; ok { + if st, ok := dv.stateByRoot[root]; ok { return st, nil } // Retrieve the parent state from the state by rooter. - st, err := dv.sr.StateByRoot(ctx, parentRoot) + st, err := dv.sr.StateByRoot(ctx, root) if err != nil { return nil, errors.Wrap(err, "state by root") } // Store the parent state in the cache. - dv.stateByRoot[parentRoot] = st + dv.stateByRoot[root] = st return st, nil } diff --git a/beacon-chain/verification/initializer.go b/beacon-chain/verification/initializer.go index 79511e4a22..a949690055 100644 --- a/beacon-chain/verification/initializer.go +++ b/beacon-chain/verification/initializer.go @@ -14,6 +14,7 @@ import ( "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "golang.org/x/sync/singleflight" ) // Forkchoicer represents the forkchoice methods that the verifiers need. @@ -41,6 +42,7 @@ type sharedResources struct { pc proposerCache sr StateByRooter ic *inclusionProofCache + sg singleflight.Group } // Initializer is used to create different Verifiers. diff --git a/changelog/manu-singleflight.md b/changelog/manu-singleflight.md new file mode 100644 index 0000000000..0b0e7174e1 --- /dev/null +++ b/changelog/manu-singleflight.md @@ -0,0 +1,2 @@ +### Fixed +- `RODataColumnsVerifier.ValidProposerSignature`: Ensure the expensive signature verification is only performed once for concurrent requests for the same signature data. \ No newline at end of file From 11c6325b54dfc708213639c9e534b8fab23bac72 Mon Sep 17 00:00:00 2001 From: Muzry Date: Mon, 3 Nov 2025 23:14:08 +0800 Subject: [PATCH 069/103] use filepath to perform path operations correctly on Windows (#15953) --- changelog/muzry_fix_filepath_on_windows.md | 3 +++ cmd/beacon-chain/storage/options.go | 8 ++++---- 2 files changed, 7 insertions(+), 4 deletions(-) create mode 100644 changelog/muzry_fix_filepath_on_windows.md diff --git a/changelog/muzry_fix_filepath_on_windows.md b/changelog/muzry_fix_filepath_on_windows.md new file mode 100644 index 0000000000..a15408aa6d --- /dev/null +++ b/changelog/muzry_fix_filepath_on_windows.md @@ -0,0 +1,3 @@ +### Fixed + +- use filepath for path operations (clean, join, etc.) to ensure correct behavior on Windows diff --git a/cmd/beacon-chain/storage/options.go b/cmd/beacon-chain/storage/options.go index 18dc9581f8..a6bf4aa8dc 100644 --- a/cmd/beacon-chain/storage/options.go +++ b/cmd/beacon-chain/storage/options.go @@ -4,7 +4,7 @@ import ( "fmt" "io" "os" - "path" + "path/filepath" "slices" "strings" @@ -120,7 +120,7 @@ func detectLayout(dir string, c stringFlagGetter) (string, error) { return explicit, nil } - dir = path.Clean(dir) + dir = filepath.Clean(dir) // nosec: this path is provided by the node operator via flag base, err := os.Open(dir) // #nosec G304 if err != nil { @@ -158,7 +158,7 @@ func blobStoragePath(c *cli.Context) string { blobsPath := c.Path(BlobStoragePathFlag.Name) if blobsPath == "" { // append a "blobs" subdir to the end of the data dir path - blobsPath = path.Join(c.String(cmd.DataDirFlag.Name), "blobs") + blobsPath = filepath.Join(c.String(cmd.DataDirFlag.Name), "blobs") } return blobsPath } @@ -167,7 +167,7 @@ func dataColumnStoragePath(c *cli.Context) string { dataColumnsPath := c.Path(DataColumnStoragePathFlag.Name) if dataColumnsPath == "" { // append a "data-columns" subdir to the end of the data dir path - dataColumnsPath = path.Join(c.String(cmd.DataDirFlag.Name), "data-columns") + dataColumnsPath = filepath.Join(c.String(cmd.DataDirFlag.Name), "data-columns") } return dataColumnsPath From d394f00e9f3dde049f44651b6e56c2023e334290 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Mon, 3 Nov 2025 18:00:37 +0100 Subject: [PATCH 070/103] `beacon_data_column_sidecar_gossip_verification_milliseconds`: Adjust buckets. (#15964) * `beacon_data_column_sidecar_gossip_verification_milliseconds`: Divide by 10. * Fix Kasey's comment. --- beacon-chain/sync/metrics.go | 2 +- changelog/manu-metric.md | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog/manu-metric.md diff --git a/beacon-chain/sync/metrics.go b/beacon-chain/sync/metrics.go index 90735c030a..96601bac06 100644 --- a/beacon-chain/sync/metrics.go +++ b/beacon-chain/sync/metrics.go @@ -214,7 +214,7 @@ var ( prometheus.HistogramOpts{ Name: "beacon_data_column_sidecar_gossip_verification_milliseconds", Help: "Captures the time taken to verify data column sidecars.", - Buckets: []float64{100, 250, 500, 750, 1000, 1500, 2000, 4000, 8000, 12000, 16000}, + Buckets: []float64{2, 5, 10, 25, 50, 75, 100, 250, 500, 1000, 2000}, }, ) diff --git a/changelog/manu-metric.md b/changelog/manu-metric.md new file mode 100644 index 0000000000..afa1fd2245 --- /dev/null +++ b/changelog/manu-metric.md @@ -0,0 +1,2 @@ +### Ignored +- `beacon_data_column_sidecar_gossip_verification_milliseconds`: Divide by 10. \ No newline at end of file From 1f926142b8492e67c5c1cb9efafdcb85d4f144a3 Mon Sep 17 00:00:00 2001 From: terence Date: Mon, 3 Nov 2025 12:54:02 -0500 Subject: [PATCH 071/103] Update spec test to v1.6.0-beta.2 (#15960) --- WORKSPACE | 4 +- changelog/ttsao-v1.6.0-beta.2.md | 3 + specrefs/.ethspecify.yml | 2 +- specrefs/functions.yml | 115 +++++++++++++++++++++---------- 4 files changed, 86 insertions(+), 38 deletions(-) create mode 100644 changelog/ttsao-v1.6.0-beta.2.md diff --git a/WORKSPACE b/WORKSPACE index 39d0904098..651d8177c3 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -253,7 +253,7 @@ filegroup( url = "https://github.com/ethereum/EIPs/archive/5480440fe51742ed23342b68cf106cefd427e39d.tar.gz", ) -consensus_spec_version = "v1.6.0-beta.1" +consensus_spec_version = "v1.6.0-beta.2" load("@prysm//tools:download_spectests.bzl", "consensus_spec_tests") @@ -278,7 +278,7 @@ filegroup( visibility = ["//visibility:public"], ) """, - integrity = "sha256-yrq3tdwPS8Ri+ueeLAHssIT3ssMrX7zvHiJ8Xf9GVYs=", + integrity = "sha256-MForEP9dTe0z3ZkTHjX4H6waSkSTghf3gQHPwrSCCro=", strip_prefix = "consensus-specs-" + consensus_spec_version[1:], url = "https://github.com/ethereum/consensus-specs/archive/refs/tags/%s.tar.gz" % consensus_spec_version, ) diff --git a/changelog/ttsao-v1.6.0-beta.2.md b/changelog/ttsao-v1.6.0-beta.2.md new file mode 100644 index 0000000000..94b777e0f4 --- /dev/null +++ b/changelog/ttsao-v1.6.0-beta.2.md @@ -0,0 +1,3 @@ +### Changed + +- Updated consensus spec tests to v1.6.0-beta.2 \ No newline at end of file diff --git a/specrefs/.ethspecify.yml b/specrefs/.ethspecify.yml index 5e8446447f..2fd5381136 100644 --- a/specrefs/.ethspecify.yml +++ b/specrefs/.ethspecify.yml @@ -1,4 +1,4 @@ -version: v1.6.0-beta.1 +version: v1.6.0-beta.2 style: full specrefs: diff --git a/specrefs/functions.yml b/specrefs/functions.yml index 0dc9e4799b..843c822400 100644 --- a/specrefs/functions.yml +++ b/specrefs/functions.yml @@ -1560,26 +1560,30 @@ - file: beacon-chain/core/altair/attestation.go search: func AttestationParticipationFlagIndices( spec: | - + def get_attestation_participation_flag_indices( state: BeaconState, data: AttestationData, inclusion_delay: uint64 ) -> Sequence[int]: """ Return the flag indices that are satisfied by an attestation. """ + # Matching source if data.target.epoch == get_current_epoch(state): justified_checkpoint = state.current_justified_checkpoint else: justified_checkpoint = state.previous_justified_checkpoint - - # Matching roots is_matching_source = data.source == justified_checkpoint - is_matching_target = is_matching_source and data.target.root == get_block_root( - state, data.target.epoch - ) - is_matching_head = is_matching_target and data.beacon_block_root == get_block_root_at_slot( - state, data.slot - ) + + # Matching target + target_root = get_block_root(state, data.target.epoch) + target_root_matches = data.target.root == target_root + is_matching_target = is_matching_source and target_root_matches + + # Matching head + head_root = get_block_root_at_slot(state, data.slot) + head_root_matches = data.beacon_block_root == head_root + is_matching_head = is_matching_target and head_root_matches + assert is_matching_source participation_flag_indices = [] @@ -1598,26 +1602,30 @@ - file: beacon-chain/core/altair/attestation.go search: func AttestationParticipationFlagIndices( spec: | - + def get_attestation_participation_flag_indices( state: BeaconState, data: AttestationData, inclusion_delay: uint64 ) -> Sequence[int]: """ Return the flag indices that are satisfied by an attestation. """ + # Matching source if data.target.epoch == get_current_epoch(state): justified_checkpoint = state.current_justified_checkpoint else: justified_checkpoint = state.previous_justified_checkpoint - - # Matching roots is_matching_source = data.source == justified_checkpoint - is_matching_target = is_matching_source and data.target.root == get_block_root( - state, data.target.epoch - ) - is_matching_head = is_matching_target and data.beacon_block_root == get_block_root_at_slot( - state, data.slot - ) + + # Matching target + target_root = get_block_root(state, data.target.epoch) + target_root_matches = data.target.root == target_root + is_matching_target = is_matching_source and target_root_matches + + # Matching head + head_root = get_block_root_at_slot(state, data.slot) + head_root_matches = data.beacon_block_root == head_root + is_matching_head = is_matching_target and head_root_matches + assert is_matching_source participation_flag_indices = [] @@ -5949,7 +5957,7 @@ - file: beacon-chain/core/blocks/payload.go search: func ProcessPayload( spec: | - + def process_execution_payload( state: BeaconState, body: BeaconBlockBody, execution_engine: ExecutionEngine ) -> None: @@ -5961,20 +5969,23 @@ assert payload.prev_randao == get_randao_mix(state, get_current_epoch(state)) # Verify timestamp assert payload.timestamp == compute_time_at_slot(state, state.slot) - - # [New in Deneb:EIP4844] Verify commitments are under limit + # [New in Deneb:EIP4844] + # Verify commitments are under limit assert len(body.blob_kzg_commitments) <= MAX_BLOBS_PER_BLOCK - # Verify the execution payload is valid - # [Modified in Deneb:EIP4844] Pass `versioned_hashes` to Execution Engine - # [Modified in Deneb:EIP4788] Pass `parent_beacon_block_root` to Execution Engine + # [New in Deneb:EIP4844] + # Compute list of versioned hashes versioned_hashes = [ kzg_commitment_to_versioned_hash(commitment) for commitment in body.blob_kzg_commitments ] + + # Verify the execution payload is valid assert execution_engine.verify_and_notify_new_payload( NewPayloadRequest( execution_payload=payload, + # [New in Deneb:EIP4844] versioned_hashes=versioned_hashes, + # [New in Deneb:EIP4788] parent_beacon_block_root=state.latest_block_header.parent_root, ) ) @@ -6008,7 +6019,7 @@ - file: beacon-chain/core/blocks/payload.go search: func ProcessPayload( spec: | - + def process_execution_payload( state: BeaconState, body: BeaconBlockBody, execution_engine: ExecutionEngine ) -> None: @@ -6020,12 +6031,16 @@ assert payload.prev_randao == get_randao_mix(state, get_current_epoch(state)) # Verify timestamp assert payload.timestamp == compute_time_at_slot(state, state.slot) - # [Modified in Electra:EIP7691] Verify commitments are under limit + # [Modified in Electra:EIP7691] + # Verify commitments are under limit assert len(body.blob_kzg_commitments) <= MAX_BLOBS_PER_BLOCK_ELECTRA - # Verify the execution payload is valid + + # Compute list of versioned hashes versioned_hashes = [ kzg_commitment_to_versioned_hash(commitment) for commitment in body.blob_kzg_commitments ] + + # Verify the execution payload is valid assert execution_engine.verify_and_notify_new_payload( NewPayloadRequest( execution_payload=payload, @@ -6035,6 +6050,7 @@ execution_requests=body.execution_requests, ) ) + # Cache execution payload header state.latest_execution_payload_header = ExecutionPayloadHeader( parent_hash=payload.parent_hash, @@ -6062,7 +6078,7 @@ - file: beacon-chain/core/blocks/payload.go search: func ProcessPayload( spec: | - + def process_execution_payload( state: BeaconState, body: BeaconBlockBody, execution_engine: ExecutionEngine ) -> None: @@ -6080,10 +6096,13 @@ len(body.blob_kzg_commitments) <= get_blob_parameters(get_current_epoch(state)).max_blobs_per_block ) - # Verify the execution payload is valid + + # Compute list of versioned hashes versioned_hashes = [ kzg_commitment_to_versioned_hash(commitment) for commitment in body.blob_kzg_commitments ] + + # Verify the execution payload is valid assert execution_engine.verify_and_notify_new_payload( NewPayloadRequest( execution_payload=payload, @@ -6092,6 +6111,7 @@ execution_requests=body.execution_requests, ) ) + # Cache execution payload header state.latest_execution_payload_header = ExecutionPayloadHeader( parent_hash=payload.parent_hash, @@ -6924,16 +6944,40 @@ - file: beacon-chain/core/altair/block.go search: func ProcessSyncAggregate( spec: | - + def process_sync_aggregate(state: BeaconState, sync_aggregate: SyncAggregate) -> None: # Verify sync committee aggregate signature signing over the previous slot block root committee_pubkeys = state.current_sync_committee.pubkeys - participant_pubkeys = [ - pubkey for pubkey, bit in zip(committee_pubkeys, sync_aggregate.sync_committee_bits) if bit - ] + committee_bits = sync_aggregate.sync_committee_bits + if sum(committee_bits) == SYNC_COMMITTEE_SIZE: + # All members participated - use precomputed aggregate key + participant_pubkeys = [state.current_sync_committee.aggregate_pubkey] + elif sum(committee_bits) > SYNC_COMMITTEE_SIZE // 2: + # More than half participated - subtract non-participant keys. + # First determine nonparticipating members + non_participant_pubkeys = [ + pubkey for pubkey, bit in zip(committee_pubkeys, committee_bits) if not bit + ] + # Compute aggregate of non-participants + non_participant_aggregate = eth_aggregate_pubkeys(non_participant_pubkeys) + # Subtract non-participants from the full aggregate + # This is equivalent to: aggregate_pubkey + (-non_participant_aggregate) + participant_pubkey = bls.add( + bls.bytes48_to_G1(state.current_sync_committee.aggregate_pubkey), + bls.neg(bls.bytes48_to_G1(non_participant_aggregate)), + ) + participant_pubkeys = [BLSPubkey(bls.G1_to_bytes48(participant_pubkey))] + else: + # Less than half participated - aggregate participant keys + participant_pubkeys = [ + pubkey + for pubkey, bit in zip(committee_pubkeys, sync_aggregate.sync_committee_bits) + if bit + ] previous_slot = max(state.slot, Slot(1)) - Slot(1) domain = get_domain(state, DOMAIN_SYNC_COMMITTEE, compute_epoch_at_slot(previous_slot)) signing_root = compute_signing_root(get_block_root_at_slot(state, previous_slot), domain) + # Note: eth_fast_aggregate_verify works with a singleton list containing an aggregated key assert eth_fast_aggregate_verify( participant_pubkeys, signing_root, sync_aggregate.sync_committee_signature ) @@ -7196,7 +7240,7 @@ - file: beacon-chain/core/blocks/withdrawals.go search: func ProcessWithdrawals( spec: | - + def process_withdrawals(state: BeaconState, payload: ExecutionPayload) -> None: # [Modified in Electra:EIP7251] expected_withdrawals, processed_partial_withdrawals_count = get_expected_withdrawals(state) @@ -7206,7 +7250,8 @@ for withdrawal in expected_withdrawals: decrease_balance(state, withdrawal.validator_index, withdrawal.amount) - # [New in Electra:EIP7251] Update pending partial withdrawals + # [New in Electra:EIP7251] + # Update pending partial withdrawals state.pending_partial_withdrawals = state.pending_partial_withdrawals[ processed_partial_withdrawals_count: ] From d0f5253b8d22357e6cbba477fbd9f94cde51c0e6 Mon Sep 17 00:00:00 2001 From: terence Date: Mon, 3 Nov 2025 18:59:03 -0500 Subject: [PATCH 072/103] Update go-bitfield from prysmaticlabs to OffchainLabs (#15968) --- api/client/builder/client_test.go | 2 +- api/client/builder/types_test.go | 2 +- beacon-chain/blockchain/process_block.go | 2 +- beacon-chain/blockchain/process_block_test.go | 2 +- beacon-chain/cache/attestation_test.go | 2 +- beacon-chain/core/altair/attestation_test.go | 2 +- beacon-chain/core/altair/block_test.go | 2 +- beacon-chain/core/altair/upgrade_test.go | 2 +- .../core/blocks/attestation_regression_test.go | 2 +- beacon-chain/core/blocks/attestation_test.go | 2 +- .../core/epoch/precompute/attestation_test.go | 2 +- .../precompute/justification_finalization.go | 2 +- .../justification_finalization_test.go | 2 +- .../epoch/precompute/reward_penalty_test.go | 2 +- beacon-chain/core/helpers/beacon_committee.go | 2 +- .../core/helpers/beacon_committee_test.go | 2 +- .../core/helpers/weak_subjectivity_test.go | 1 - .../altair_transition_no_verify_sig_test.go | 2 +- .../bellatrix_transition_no_verify_sig_test.go | 2 +- .../core/transition/transition_test.go | 2 +- .../monitor/process_attestation_test.go | 2 +- .../monitor/process_sync_committee_test.go | 2 +- .../attestations/kv/aggregated_test.go | 2 +- .../operations/attestations/kv/block_test.go | 2 +- .../attestations/kv/forkchoice_test.go | 2 +- .../operations/attestations/kv/seen_bits.go | 2 +- .../attestations/kv/seen_bits_test.go | 2 +- .../attestations/kv/unaggregated_test.go | 2 +- .../attestations/prepare_forkchoice.go | 2 +- .../attestations/prepare_forkchoice_test.go | 2 +- .../attestations/prune_expired_test.go | 2 +- beacon-chain/p2p/broadcaster_test.go | 2 +- beacon-chain/p2p/discovery.go | 2 +- beacon-chain/p2p/discovery_test.go | 2 +- beacon-chain/p2p/peers/benchmark_test.go | 2 +- beacon-chain/p2p/peers/status.go | 2 +- beacon-chain/p2p/peers/status_test.go | 2 +- beacon-chain/p2p/subnets.go | 2 +- beacon-chain/p2p/subnets_test.go | 2 +- beacon-chain/p2p/utils.go | 2 +- .../rpc/eth/beacon/handlers_pool_test.go | 2 +- beacon-chain/rpc/eth/beacon/handlers_test.go | 2 +- beacon-chain/rpc/eth/node/handlers_test.go | 2 +- beacon-chain/rpc/eth/rewards/handlers_test.go | 2 +- .../rpc/eth/validator/handlers_test.go | 2 +- beacon-chain/rpc/prysm/beacon/handlers_test.go | 2 +- .../rpc/prysm/beacon/ssz_query_test.go | 2 +- .../prysm/v1alpha1/beacon/attestations_test.go | 2 +- .../prysm/v1alpha1/beacon/validators_test.go | 2 +- .../rpc/prysm/v1alpha1/debug/block_test.go | 2 +- .../v1alpha1/validator/aggregator_test.go | 2 +- .../v1alpha1/validator/proposer_altair.go | 2 +- .../v1alpha1/validator/proposer_altair_test.go | 2 +- .../validator/proposer_attestations.go | 2 +- .../validator/proposer_attestations_electra.go | 2 +- .../proposer_attestations_electra_test.go | 2 +- .../validator/proposer_attestations_test.go | 2 +- .../prysm/v1alpha1/validator/proposer_test.go | 2 +- .../validator/proposer_utils_bench_test.go | 2 +- .../rpc/prysm/validator/handlers_test.go | 2 +- .../validator/validator_performance_test.go | 2 +- beacon-chain/state/interfaces.go | 2 +- .../state/state-native/beacon_state.go | 2 +- .../state/state-native/getters_checkpoint.go | 2 +- .../state-native/getters_checkpoint_test.go | 2 +- beacon-chain/state/state-native/hasher_test.go | 2 +- .../state/state-native/references_test.go | 2 +- .../state/state-native/setters_checkpoint.go | 2 +- beacon-chain/state/state-native/state_test.go | 2 +- .../state/testing/getters_checkpoint.go | 2 +- .../sync/pending_attestations_queue_test.go | 2 +- beacon-chain/sync/rpc_metadata.go | 2 +- beacon-chain/sync/rpc_metadata_test.go | 2 +- .../subscriber_beacon_aggregate_proof_test.go | 2 +- .../sync/subscriber_beacon_blocks_test.go | 2 +- .../sync/validate_aggregate_proof_test.go | 2 +- .../sync/validate_beacon_attestation_test.go | 2 +- .../sync/validate_proposer_slashing_test.go | 2 +- .../validate_sync_contribution_proof_test.go | 2 +- changelog/ttsao-go_bitfield.md | 3 +++ cmd/prysmctl/p2p/client.go | 2 +- consensus-types/blocks/proto_test.go | 2 +- consensus-types/hdiff/state_diff.go | 2 +- .../primitives/committee_bits_mainnet.go | 2 +- .../primitives/committee_bits_minimal.go | 2 +- consensus-types/wrapper/metadata.go | 2 +- deps.bzl | 6 +++--- encoding/ssz/helpers.go | 2 +- encoding/ssz/helpers_test.go | 2 +- encoding/ssz/merkleize_test.go | 2 +- encoding/ssz/query/bitlist.go | 2 +- encoding/ssz/query/query_test.go | 2 +- encoding/ssz/query/tag_parser.go | 2 +- encoding/ssz/query/testutil/util.go | 2 +- go.mod | 2 +- go.sum | 4 ++-- proto/eth/v1/attestation.pb.go | 4 ++-- proto/eth/v1/attestation.proto | 2 +- proto/eth/v1/beacon_block.pb.go | 4 ++-- proto/prysm/v1alpha1/attestation.go | 2 +- proto/prysm/v1alpha1/attestation.pb.go | 8 ++++---- proto/prysm/v1alpha1/attestation.proto | 4 ++-- .../attestation/aggregation/aggregation.go | 2 +- .../attestations/attestations_test.go | 2 +- .../aggregation/attestations/maxcover.go | 2 +- .../aggregation/attestations/maxcover_test.go | 2 +- .../attestation/aggregation/maxcover.go | 2 +- .../aggregation/maxcover_bench_test.go | 2 +- .../attestation/aggregation/maxcover_test.go | 2 +- .../sync_contribution/naive_test.go | 2 +- .../aggregation/testing/bitlistutils.go | 2 +- .../v1alpha1/attestation/attestation_utils.go | 2 +- .../attestation/attestation_utils_test.go | 2 +- proto/prysm/v1alpha1/beacon_core_types.pb.go | 4 ++-- proto/prysm/v1alpha1/beacon_state.pb.go | 18 +++++++++--------- proto/prysm/v1alpha1/beacon_state.proto | 16 ++++++++-------- .../v1alpha1/metadata/metadata_interfaces.go | 2 +- proto/prysm/v1alpha1/p2p_messages.pb.go | 12 ++++++------ proto/prysm/v1alpha1/p2p_messages.proto | 10 +++++----- proto/prysm/v1alpha1/sync_committee.pb.go | 4 ++-- proto/prysm/v1alpha1/sync_committee_mainnet.go | 2 +- proto/prysm/v1alpha1/sync_committee_minimal.go | 2 +- proto/ssz_proto_library.bzl | 12 ++++++------ proto/ssz_query/testing/test_containers.pb.go | 8 ++++---- proto/ssz_query/testing/test_containers.proto | 6 +++--- testing/endtoend/evaluators/slashing_helper.go | 2 +- testing/util/altair.go | 2 +- testing/util/attestation.go | 2 +- testing/util/bellatrix.go | 2 +- testing/util/capella_block.go | 2 +- testing/util/electra_block.go | 2 +- testing/util/fulu_block.go | 2 +- testing/util/state.go | 2 +- testing/util/sync_aggregate.go | 2 +- tools/bootnode/bootnode.go | 2 +- validator/client/aggregate_test.go | 2 +- validator/client/attest.go | 2 +- validator/client/attest_test.go | 2 +- validator/client/runner_test.go | 2 +- validator/client/sync_committee_test.go | 2 +- .../types/custom_mappers_test.go | 2 +- .../remote-web3signer/types/mock/mocks.go | 2 +- 142 files changed, 188 insertions(+), 186 deletions(-) create mode 100644 changelog/ttsao-go_bitfield.md diff --git a/api/client/builder/client_test.go b/api/client/builder/client_test.go index 79be7ae79d..9f058ac0f3 100644 --- a/api/client/builder/client_test.go +++ b/api/client/builder/client_test.go @@ -10,6 +10,7 @@ import ( "net/url" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/api" "github.com/OffchainLabs/prysm/v6/api/server/structs" "github.com/OffchainLabs/prysm/v6/config/params" @@ -22,7 +23,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" log "github.com/sirupsen/logrus" ) diff --git a/api/client/builder/types_test.go b/api/client/builder/types_test.go index 9dff0c049e..016bf9678b 100644 --- a/api/client/builder/types_test.go +++ b/api/client/builder/types_test.go @@ -11,6 +11,7 @@ import ( "os" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/api/server/structs" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" @@ -22,7 +23,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) func ezDecode(t *testing.T, s string) []byte { diff --git a/beacon-chain/blockchain/process_block.go b/beacon-chain/blockchain/process_block.go index 3ff4d88089..bb7169c2f2 100644 --- a/beacon-chain/blockchain/process_block.go +++ b/beacon-chain/blockchain/process_block.go @@ -5,6 +5,7 @@ import ( "fmt" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" @@ -29,7 +30,6 @@ import ( "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/blockchain/process_block_test.go b/beacon-chain/blockchain/process_block_test.go index 1494d3da8e..f1daba177e 100644 --- a/beacon-chain/blockchain/process_block_test.go +++ b/beacon-chain/blockchain/process_block_test.go @@ -9,6 +9,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" @@ -49,7 +50,6 @@ import ( "github.com/ethereum/go-ethereum/common" gethtypes "github.com/ethereum/go-ethereum/core/types" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/cache/attestation_test.go b/beacon-chain/cache/attestation_test.go index a4d5aa548d..82ab952f15 100644 --- a/beacon-chain/cache/attestation_test.go +++ b/beacon-chain/cache/attestation_test.go @@ -3,6 +3,7 @@ package cache import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/crypto/bls/blst" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" @@ -10,7 +11,6 @@ import ( "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) func TestAdd(t *testing.T) { diff --git a/beacon-chain/core/altair/attestation_test.go b/beacon-chain/core/altair/attestation_test.go index 7aa93e0142..d7af583c23 100644 --- a/beacon-chain/core/altair/attestation_test.go +++ b/beacon-chain/core/altair/attestation_test.go @@ -4,6 +4,7 @@ import ( "fmt" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" @@ -22,7 +23,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" gofuzz "github.com/google/gofuzz" - "github.com/prysmaticlabs/go-bitfield" ) func TestProcessAttestations_InclusionDelayFailure(t *testing.T) { diff --git a/beacon-chain/core/altair/block_test.go b/beacon-chain/core/altair/block_test.go index 2642390f86..be8041d242 100644 --- a/beacon-chain/core/altair/block_test.go +++ b/beacon-chain/core/altair/block_test.go @@ -4,6 +4,7 @@ import ( "math" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" @@ -19,7 +20,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/prysmaticlabs/go-bitfield" ) func TestProcessSyncCommittee_PerfectParticipation(t *testing.T) { diff --git a/beacon-chain/core/altair/upgrade_test.go b/beacon-chain/core/altair/upgrade_test.go index 15117ac696..715f57b0c6 100644 --- a/beacon-chain/core/altair/upgrade_test.go +++ b/beacon-chain/core/altair/upgrade_test.go @@ -3,6 +3,7 @@ package altair_test import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" @@ -12,7 +13,6 @@ import ( "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestTranslateParticipation(t *testing.T) { diff --git a/beacon-chain/core/blocks/attestation_regression_test.go b/beacon-chain/core/blocks/attestation_regression_test.go index cc3c367beb..c23a110806 100644 --- a/beacon-chain/core/blocks/attestation_regression_test.go +++ b/beacon-chain/core/blocks/attestation_regression_test.go @@ -5,6 +5,7 @@ import ( "os" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" "github.com/OffchainLabs/prysm/v6/config/params" @@ -12,7 +13,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) // Beaconfuzz discovered an off by one issue where an attestation could be produced which would pass diff --git a/beacon-chain/core/blocks/attestation_test.go b/beacon-chain/core/blocks/attestation_test.go index 916c31c256..9f9135a7b4 100644 --- a/beacon-chain/core/blocks/attestation_test.go +++ b/beacon-chain/core/blocks/attestation_test.go @@ -4,6 +4,7 @@ import ( "context" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" @@ -20,7 +21,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestProcessAggregatedAttestation_OverlappingBits(t *testing.T) { diff --git a/beacon-chain/core/epoch/precompute/attestation_test.go b/beacon-chain/core/epoch/precompute/attestation_test.go index 7a45fb7181..099fe7b749 100644 --- a/beacon-chain/core/epoch/precompute/attestation_test.go +++ b/beacon-chain/core/epoch/precompute/attestation_test.go @@ -3,6 +3,7 @@ package precompute_test import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/config/params" @@ -12,7 +13,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestUpdateValidator_Works(t *testing.T) { diff --git a/beacon-chain/core/epoch/precompute/justification_finalization.go b/beacon-chain/core/epoch/precompute/justification_finalization.go index 2a0583ef2b..d219192cab 100644 --- a/beacon-chain/core/epoch/precompute/justification_finalization.go +++ b/beacon-chain/core/epoch/precompute/justification_finalization.go @@ -1,6 +1,7 @@ package precompute import ( + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" "github.com/OffchainLabs/prysm/v6/beacon-chain/state" @@ -8,7 +9,6 @@ import ( ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) var errNilState = errors.New("nil state") diff --git a/beacon-chain/core/epoch/precompute/justification_finalization_test.go b/beacon-chain/core/epoch/precompute/justification_finalization_test.go index 95d7d92d2b..1eaed13fb5 100644 --- a/beacon-chain/core/epoch/precompute/justification_finalization_test.go +++ b/beacon-chain/core/epoch/precompute/justification_finalization_test.go @@ -3,6 +3,7 @@ package precompute_test import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" @@ -12,7 +13,6 @@ import ( ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) func TestProcessJustificationAndFinalizationPreCompute_ConsecutiveEpochs(t *testing.T) { diff --git a/beacon-chain/core/epoch/precompute/reward_penalty_test.go b/beacon-chain/core/epoch/precompute/reward_penalty_test.go index 279f819970..a9a9084ebb 100644 --- a/beacon-chain/core/epoch/precompute/reward_penalty_test.go +++ b/beacon-chain/core/epoch/precompute/reward_penalty_test.go @@ -3,6 +3,7 @@ package precompute import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" "github.com/OffchainLabs/prysm/v6/beacon-chain/state" @@ -16,7 +17,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) func TestProcessRewardsAndPenaltiesPrecompute(t *testing.T) { diff --git a/beacon-chain/core/helpers/beacon_committee.go b/beacon-chain/core/helpers/beacon_committee.go index 8048b1feac..198832d681 100644 --- a/beacon-chain/core/helpers/beacon_committee.go +++ b/beacon-chain/core/helpers/beacon_committee.go @@ -7,6 +7,7 @@ import ( "fmt" "sort" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" @@ -23,7 +24,6 @@ import ( "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) var ( diff --git a/beacon-chain/core/helpers/beacon_committee_test.go b/beacon-chain/core/helpers/beacon_committee_test.go index d168e8e8ab..efb3214aa0 100644 --- a/beacon-chain/core/helpers/beacon_committee_test.go +++ b/beacon-chain/core/helpers/beacon_committee_test.go @@ -5,6 +5,7 @@ import ( "strconv" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" @@ -18,7 +19,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/prysmaticlabs/go-bitfield" ) func TestComputeCommittee_WithoutCache(t *testing.T) { diff --git a/beacon-chain/core/helpers/weak_subjectivity_test.go b/beacon-chain/core/helpers/weak_subjectivity_test.go index 8cd74e7819..cbe788541a 100644 --- a/beacon-chain/core/helpers/weak_subjectivity_test.go +++ b/beacon-chain/core/helpers/weak_subjectivity_test.go @@ -285,4 +285,3 @@ func genState(t *testing.T, valCount, avgBalance uint64) state.BeaconState { return beaconState } - diff --git a/beacon-chain/core/transition/altair_transition_no_verify_sig_test.go b/beacon-chain/core/transition/altair_transition_no_verify_sig_test.go index 5e6ec67d0d..4eab874cc5 100644 --- a/beacon-chain/core/transition/altair_transition_no_verify_sig_test.go +++ b/beacon-chain/core/transition/altair_transition_no_verify_sig_test.go @@ -4,6 +4,7 @@ import ( "math" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" @@ -20,7 +21,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/prysmaticlabs/go-bitfield" ) func TestExecuteAltairStateTransitionNoVerify_FullProcess(t *testing.T) { diff --git a/beacon-chain/core/transition/bellatrix_transition_no_verify_sig_test.go b/beacon-chain/core/transition/bellatrix_transition_no_verify_sig_test.go index 0f9e47d4dd..4fb6e13495 100644 --- a/beacon-chain/core/transition/bellatrix_transition_no_verify_sig_test.go +++ b/beacon-chain/core/transition/bellatrix_transition_no_verify_sig_test.go @@ -4,6 +4,7 @@ import ( "math" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" @@ -22,7 +23,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/prysmaticlabs/go-bitfield" ) func TestExecuteBellatrixStateTransitionNoVerify_FullProcess(t *testing.T) { diff --git a/beacon-chain/core/transition/transition_test.go b/beacon-chain/core/transition/transition_test.go index 42a24d5e98..bee546fd13 100644 --- a/beacon-chain/core/transition/transition_test.go +++ b/beacon-chain/core/transition/transition_test.go @@ -4,6 +4,7 @@ import ( "fmt" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" @@ -23,7 +24,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func init() { diff --git a/beacon-chain/monitor/process_attestation_test.go b/beacon-chain/monitor/process_attestation_test.go index 820d0871b5..03bdad84ed 100644 --- a/beacon-chain/monitor/process_attestation_test.go +++ b/beacon-chain/monitor/process_attestation_test.go @@ -4,12 +4,12 @@ import ( "bytes" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" "github.com/sirupsen/logrus" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/monitor/process_sync_committee_test.go b/beacon-chain/monitor/process_sync_committee_test.go index bf0b59dc4b..35848ed0ad 100644 --- a/beacon-chain/monitor/process_sync_committee_test.go +++ b/beacon-chain/monitor/process_sync_committee_test.go @@ -3,11 +3,11 @@ package monitor import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/operations/attestations/kv/aggregated_test.go b/beacon-chain/operations/attestations/kv/aggregated_test.go index e91b589d30..c285fab015 100644 --- a/beacon-chain/operations/attestations/kv/aggregated_test.go +++ b/beacon-chain/operations/attestations/kv/aggregated_test.go @@ -4,6 +4,7 @@ import ( "sort" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/crypto/bls" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" @@ -13,7 +14,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/util" c "github.com/patrickmn/go-cache" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) func TestKV_Aggregated_AggregateUnaggregatedAttestations(t *testing.T) { diff --git a/beacon-chain/operations/attestations/kv/block_test.go b/beacon-chain/operations/attestations/kv/block_test.go index 7d683c5f4c..492c5ffeee 100644 --- a/beacon-chain/operations/attestations/kv/block_test.go +++ b/beacon-chain/operations/attestations/kv/block_test.go @@ -4,11 +4,11 @@ import ( "sort" "testing" + "github.com/OffchainLabs/go-bitfield" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestKV_BlockAttestation_CanSaveRetrieve(t *testing.T) { diff --git a/beacon-chain/operations/attestations/kv/forkchoice_test.go b/beacon-chain/operations/attestations/kv/forkchoice_test.go index c56b4fd776..9f7fe460d3 100644 --- a/beacon-chain/operations/attestations/kv/forkchoice_test.go +++ b/beacon-chain/operations/attestations/kv/forkchoice_test.go @@ -4,11 +4,11 @@ import ( "sort" "testing" + "github.com/OffchainLabs/go-bitfield" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestKV_Forkchoice_CanSaveRetrieve(t *testing.T) { diff --git a/beacon-chain/operations/attestations/kv/seen_bits.go b/beacon-chain/operations/attestations/kv/seen_bits.go index c1c710ae8e..5dfe00cd69 100644 --- a/beacon-chain/operations/attestations/kv/seen_bits.go +++ b/beacon-chain/operations/attestations/kv/seen_bits.go @@ -1,11 +1,11 @@ package kv import ( + "github.com/OffchainLabs/go-bitfield" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" "github.com/patrickmn/go-cache" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) func (c *AttCaches) insertSeenBit(att ethpb.Att) error { diff --git a/beacon-chain/operations/attestations/kv/seen_bits_test.go b/beacon-chain/operations/attestations/kv/seen_bits_test.go index ec8611fdc8..e8a7037307 100644 --- a/beacon-chain/operations/attestations/kv/seen_bits_test.go +++ b/beacon-chain/operations/attestations/kv/seen_bits_test.go @@ -3,11 +3,11 @@ package kv import ( "testing" + "github.com/OffchainLabs/go-bitfield" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestAttCaches_hasSeenBit(t *testing.T) { diff --git a/beacon-chain/operations/attestations/kv/unaggregated_test.go b/beacon-chain/operations/attestations/kv/unaggregated_test.go index 56750dc44c..3d53c392db 100644 --- a/beacon-chain/operations/attestations/kv/unaggregated_test.go +++ b/beacon-chain/operations/attestations/kv/unaggregated_test.go @@ -5,6 +5,7 @@ import ( "sort" "testing" + "github.com/OffchainLabs/go-bitfield" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" @@ -13,7 +14,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" c "github.com/patrickmn/go-cache" - "github.com/prysmaticlabs/go-bitfield" ) func TestKV_Unaggregated_UnaggregatedAttestations(t *testing.T) { diff --git a/beacon-chain/operations/attestations/prepare_forkchoice.go b/beacon-chain/operations/attestations/prepare_forkchoice.go index bf399546c3..99a9b3f90e 100644 --- a/beacon-chain/operations/attestations/prepare_forkchoice.go +++ b/beacon-chain/operations/attestations/prepare_forkchoice.go @@ -5,6 +5,7 @@ import ( "context" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/config/features" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" @@ -13,7 +14,6 @@ import ( attaggregation "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) // This prepares fork choice attestations by running batchForkChoiceAtts diff --git a/beacon-chain/operations/attestations/prepare_forkchoice_test.go b/beacon-chain/operations/attestations/prepare_forkchoice_test.go index 108ebb4db5..598630c428 100644 --- a/beacon-chain/operations/attestations/prepare_forkchoice_test.go +++ b/beacon-chain/operations/attestations/prepare_forkchoice_test.go @@ -5,13 +5,13 @@ import ( "sort" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/crypto/bls" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" attaggregation "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/operations/attestations/prune_expired_test.go b/beacon-chain/operations/attestations/prune_expired_test.go index 70533fa8ae..d5ef9b0c29 100644 --- a/beacon-chain/operations/attestations/prune_expired_test.go +++ b/beacon-chain/operations/attestations/prune_expired_test.go @@ -5,6 +5,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/async" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/config/params" @@ -13,7 +14,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestPruneExpired_Ticker(t *testing.T) { diff --git a/beacon-chain/p2p/broadcaster_test.go b/beacon-chain/p2p/broadcaster_test.go index 36696b85e3..9565008318 100644 --- a/beacon-chain/p2p/broadcaster_test.go +++ b/beacon-chain/p2p/broadcaster_test.go @@ -9,6 +9,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" @@ -31,7 +32,6 @@ import ( "github.com/OffchainLabs/prysm/v6/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/host" - "github.com/prysmaticlabs/go-bitfield" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/p2p/discovery.go b/beacon-chain/p2p/discovery.go index a8b0f59d6b..a5f324ad5c 100644 --- a/beacon-chain/p2p/discovery.go +++ b/beacon-chain/p2p/discovery.go @@ -9,6 +9,7 @@ import ( "sync" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" @@ -24,7 +25,6 @@ import ( "github.com/libp2p/go-libp2p/core/peer" ma "github.com/multiformats/go-multiaddr" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/p2p/discovery_test.go b/beacon-chain/p2p/discovery_test.go index b5cb188b4f..cd48877690 100644 --- a/beacon-chain/p2p/discovery_test.go +++ b/beacon-chain/p2p/discovery_test.go @@ -16,6 +16,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" @@ -41,7 +42,6 @@ import ( "github.com/libp2p/go-libp2p/core/host" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" - "github.com/prysmaticlabs/go-bitfield" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/p2p/peers/benchmark_test.go b/beacon-chain/p2p/peers/benchmark_test.go index 2a9d0079ac..e756a65baa 100644 --- a/beacon-chain/p2p/peers/benchmark_test.go +++ b/beacon-chain/p2p/peers/benchmark_test.go @@ -3,7 +3,7 @@ package peers import ( "testing" - "github.com/prysmaticlabs/go-bitfield" + "github.com/OffchainLabs/go-bitfield" ) func Benchmark_retrieveIndicesFromBitfield(b *testing.B) { diff --git a/beacon-chain/p2p/peers/status.go b/beacon-chain/p2p/peers/status.go index 048f8e3251..f533cf31e3 100644 --- a/beacon-chain/p2p/peers/status.go +++ b/beacon-chain/p2p/peers/status.go @@ -30,6 +30,7 @@ import ( "strings" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" "github.com/OffchainLabs/prysm/v6/config/features" @@ -47,7 +48,6 @@ import ( ma "github.com/multiformats/go-multiaddr" manet "github.com/multiformats/go-multiaddr/net" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) const ( diff --git a/beacon-chain/p2p/peers/status_test.go b/beacon-chain/p2p/peers/status_test.go index c57344c041..d5d45cbeef 100644 --- a/beacon-chain/p2p/peers/status_test.go +++ b/beacon-chain/p2p/peers/status_test.go @@ -6,6 +6,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" @@ -21,7 +22,6 @@ import ( "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" ma "github.com/multiformats/go-multiaddr" - "github.com/prysmaticlabs/go-bitfield" ) func TestStatus(t *testing.T) { diff --git a/beacon-chain/p2p/subnets.go b/beacon-chain/p2p/subnets.go index 13638dd3bd..0c7d3a1f54 100644 --- a/beacon-chain/p2p/subnets.go +++ b/beacon-chain/p2p/subnets.go @@ -8,6 +8,7 @@ import ( "sync" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" @@ -24,7 +25,6 @@ import ( "github.com/ethereum/go-ethereum/p2p/enr" "github.com/holiman/uint256" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/p2p/subnets_test.go b/beacon-chain/p2p/subnets_test.go index ad2855af6b..357784421f 100644 --- a/beacon-chain/p2p/subnets_test.go +++ b/beacon-chain/p2p/subnets_test.go @@ -7,6 +7,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" @@ -23,7 +24,6 @@ import ( "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/crypto" "github.com/libp2p/go-libp2p/core/network" - "github.com/prysmaticlabs/go-bitfield" ) func TestStartDiscV5_FindAndDialPeersWithSubnet(t *testing.T) { diff --git a/beacon-chain/p2p/utils.go b/beacon-chain/p2p/utils.go index ffc9852b60..f65b4d5e17 100644 --- a/beacon-chain/p2p/utils.go +++ b/beacon-chain/p2p/utils.go @@ -13,6 +13,7 @@ import ( "path" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/db" "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" "github.com/OffchainLabs/prysm/v6/config/params" @@ -28,7 +29,6 @@ import ( "github.com/libp2p/go-libp2p/core/crypto" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/rpc/eth/beacon/handlers_pool_test.go b/beacon-chain/rpc/eth/beacon/handlers_pool_test.go index 4fb0c7d238..855a79b400 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_pool_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_pool_test.go @@ -10,6 +10,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/api" "github.com/OffchainLabs/prysm/v6/api/server" "github.com/OffchainLabs/prysm/v6/api/server/structs" @@ -42,7 +43,6 @@ import ( "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) func TestListAttestations(t *testing.T) { diff --git a/beacon-chain/rpc/eth/beacon/handlers_test.go b/beacon-chain/rpc/eth/beacon/handlers_test.go index d39830dec9..86e24e33b4 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_test.go @@ -12,6 +12,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/api" "github.com/OffchainLabs/prysm/v6/api/server/structs" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" @@ -44,7 +45,6 @@ import ( "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" - "github.com/prysmaticlabs/go-bitfield" logTest "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/mock" "go.uber.org/mock/gomock" diff --git a/beacon-chain/rpc/eth/node/handlers_test.go b/beacon-chain/rpc/eth/node/handlers_test.go index 483b2908e9..b9cf95b11d 100644 --- a/beacon-chain/rpc/eth/node/handlers_test.go +++ b/beacon-chain/rpc/eth/node/handlers_test.go @@ -9,6 +9,7 @@ import ( "runtime" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/api/server/structs" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" @@ -28,7 +29,6 @@ import ( "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/peer" ma "github.com/multiformats/go-multiaddr" - "github.com/prysmaticlabs/go-bitfield" ) type dummyIdentity enode.ID diff --git a/beacon-chain/rpc/eth/rewards/handlers_test.go b/beacon-chain/rpc/eth/rewards/handlers_test.go index b3d5f2c9ae..4e596804a8 100644 --- a/beacon-chain/rpc/eth/rewards/handlers_test.go +++ b/beacon-chain/rpc/eth/rewards/handlers_test.go @@ -10,6 +10,7 @@ import ( "strings" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/api/server/structs" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" @@ -34,7 +35,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) func BlockRewardTestSetup(t *testing.T, ver int) (state.BeaconState, interfaces.SignedBeaconBlock, error) { diff --git a/beacon-chain/rpc/eth/validator/handlers_test.go b/beacon-chain/rpc/eth/validator/handlers_test.go index d0f0d90747..88661139c0 100644 --- a/beacon-chain/rpc/eth/validator/handlers_test.go +++ b/beacon-chain/rpc/eth/validator/handlers_test.go @@ -11,6 +11,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/api" "github.com/OffchainLabs/prysm/v6/api/server/structs" mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" @@ -43,7 +44,6 @@ import ( "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/rpc/prysm/beacon/handlers_test.go b/beacon-chain/rpc/prysm/beacon/handlers_test.go index 6c10f02258..2b40edba71 100644 --- a/beacon-chain/rpc/prysm/beacon/handlers_test.go +++ b/beacon-chain/rpc/prysm/beacon/handlers_test.go @@ -10,6 +10,7 @@ import ( "net/http/httptest" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/api/server/structs" chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" @@ -33,7 +34,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/prysmaticlabs/go-bitfield" ) func individualVotesHelper(t *testing.T, request *structs.GetIndividualVotesRequest, s *Server) (string, *structs.GetIndividualVotesResponse) { diff --git a/beacon-chain/rpc/prysm/beacon/ssz_query_test.go b/beacon-chain/rpc/prysm/beacon/ssz_query_test.go index 15fdc7fe3b..099faef713 100644 --- a/beacon-chain/rpc/prysm/beacon/ssz_query_test.go +++ b/beacon-chain/rpc/prysm/beacon/ssz_query_test.go @@ -10,6 +10,7 @@ import ( "strings" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/api" "github.com/OffchainLabs/prysm/v6/api/server/structs" chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" @@ -24,7 +25,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/prysmaticlabs/go-bitfield" ) func TestQueryBeaconState(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations_test.go index bb8df746d8..16d5ed9392 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations_test.go @@ -7,6 +7,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" @@ -27,7 +28,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/prysmaticlabs/go-bitfield" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/validators_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/validators_test.go index eaf217b013..b3f1370785 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/validators_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/validators_test.go @@ -8,6 +8,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" @@ -37,7 +38,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/util" prysmTime "github.com/OffchainLabs/prysm/v6/time" "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/prysmaticlabs/go-bitfield" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/emptypb" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/debug/block_test.go b/beacon-chain/rpc/prysm/v1alpha1/debug/block_test.go index a4e4af1a2b..1613732094 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/debug/block_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/debug/block_test.go @@ -4,6 +4,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" @@ -15,7 +16,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestServer_GetBlock(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/aggregator_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/aggregator_test.go index 6bb62c397e..9fe8fa4713 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/aggregator_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/aggregator_test.go @@ -6,6 +6,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" @@ -25,7 +26,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestSubmitAggregateAndProof_Syncing(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go index 74de8a4030..5a7ae057ac 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go @@ -4,6 +4,7 @@ import ( "bytes" "context" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/state" "github.com/OffchainLabs/prysm/v6/config/params" @@ -18,7 +19,6 @@ import ( "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) func (vs *Server) setSyncAggregate(ctx context.Context, blk interfaces.SignedBeaconBlock, headState state.BeaconState) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go index 440d29826e..04f086079a 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go @@ -3,6 +3,7 @@ package validator import ( "testing" + "github.com/OffchainLabs/go-bitfield" chainmock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" @@ -16,7 +17,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestProposer_GetSyncAggregate_OK(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations.go index 642e3b81e9..2e84393854 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations.go @@ -8,6 +8,7 @@ import ( "slices" "sort" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" @@ -23,7 +24,6 @@ import ( "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra.go index c6547ecddb..7ca2a62f5c 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra.go @@ -4,10 +4,10 @@ import ( "cmp" "slices" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/crypto/bls" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/prysmaticlabs/go-bitfield" ) // computeOnChainAggregate constructs a final aggregate form a list of network aggregates with equal attestation data. diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra_test.go index 3f86c54098..ddda9b4f19 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra_test.go @@ -4,6 +4,7 @@ import ( "reflect" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/crypto/bls/blst" @@ -11,7 +12,6 @@ import ( ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) func Test_computeOnChainAggregate(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_test.go index d6269acf06..93056e4bea 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_test.go @@ -7,6 +7,7 @@ import ( "strconv" "testing" + "github.com/OffchainLabs/go-bitfield" chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" @@ -21,7 +22,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/prysmaticlabs/go-bitfield" ) func TestProposer_ProposerAtts_committeeAwareSort(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go index f3773085d8..447d30fa02 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go @@ -6,6 +6,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" builderapi "github.com/OffchainLabs/prysm/v6/api/client/builder" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" @@ -52,7 +53,6 @@ import ( "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" logTest "github.com/sirupsen/logrus/hooks/test" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_utils_bench_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_utils_bench_test.go index 3f6b2c352f..9eb09e077f 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_utils_bench_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_utils_bench_test.go @@ -4,11 +4,11 @@ import ( "fmt" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/config/params" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" aggtesting "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/testing" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) func BenchmarkProposerAtts_sortByProfitability(b *testing.B) { diff --git a/beacon-chain/rpc/prysm/validator/handlers_test.go b/beacon-chain/rpc/prysm/validator/handlers_test.go index dcc4f8ef13..d38ac2ee1f 100644 --- a/beacon-chain/rpc/prysm/validator/handlers_test.go +++ b/beacon-chain/rpc/prysm/validator/handlers_test.go @@ -11,6 +11,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/api/server/structs" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" @@ -37,7 +38,6 @@ import ( prysmTime "github.com/OffchainLabs/prysm/v6/time" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/prysmaticlabs/go-bitfield" ) func addDefaultReplayerBuilder(s *Server, h stategen.HistoryAccessor) { diff --git a/beacon-chain/rpc/prysm/validator/validator_performance_test.go b/beacon-chain/rpc/prysm/validator/validator_performance_test.go index 0420132692..e85668f04b 100644 --- a/beacon-chain/rpc/prysm/validator/validator_performance_test.go +++ b/beacon-chain/rpc/prysm/validator/validator_performance_test.go @@ -9,6 +9,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/api/server/structs" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" @@ -23,7 +24,6 @@ import ( "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestServer_GetValidatorPerformance(t *testing.T) { diff --git a/beacon-chain/state/interfaces.go b/beacon-chain/state/interfaces.go index 54f1aceae6..38b1421e95 100644 --- a/beacon-chain/state/interfaces.go +++ b/beacon-chain/state/interfaces.go @@ -8,6 +8,7 @@ import ( "encoding/json" "time" + "github.com/OffchainLabs/go-bitfield" customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" @@ -15,7 +16,6 @@ import ( "github.com/OffchainLabs/prysm/v6/crypto/bls" enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/prysmaticlabs/go-bitfield" ) // BeaconState has read and write access to beacon state methods. diff --git a/beacon-chain/state/state-native/beacon_state.go b/beacon-chain/state/state-native/beacon_state.go index 8dd7285e47..e44579978d 100644 --- a/beacon-chain/state/state-native/beacon_state.go +++ b/beacon-chain/state/state-native/beacon_state.go @@ -4,6 +4,7 @@ import ( "encoding/json" "sync" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/state/fieldtrie" customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" @@ -11,7 +12,6 @@ import ( "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/prysmaticlabs/go-bitfield" ) // BeaconState defines a struct containing utilities for the Ethereum Beacon Chain state, defining diff --git a/beacon-chain/state/state-native/getters_checkpoint.go b/beacon-chain/state/state-native/getters_checkpoint.go index 103057067e..76b760e170 100644 --- a/beacon-chain/state/state-native/getters_checkpoint.go +++ b/beacon-chain/state/state-native/getters_checkpoint.go @@ -3,9 +3,9 @@ package state_native import ( "bytes" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/prysmaticlabs/go-bitfield" ) // JustificationBits marking which epochs have been justified in the beacon chain. diff --git a/beacon-chain/state/state-native/getters_checkpoint_test.go b/beacon-chain/state/state-native/getters_checkpoint_test.go index 79d1b5fcfc..852d48cf71 100644 --- a/beacon-chain/state/state-native/getters_checkpoint_test.go +++ b/beacon-chain/state/state-native/getters_checkpoint_test.go @@ -3,10 +3,10 @@ package state_native import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/state" testtmpl "github.com/OffchainLabs/prysm/v6/beacon-chain/state/testing" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/prysmaticlabs/go-bitfield" ) func TestBeaconState_PreviousJustifiedCheckpointNil_Phase0(t *testing.T) { diff --git a/beacon-chain/state/state-native/hasher_test.go b/beacon-chain/state/state-native/hasher_test.go index e73f4fbaf6..2b7407d8e9 100644 --- a/beacon-chain/state/state-native/hasher_test.go +++ b/beacon-chain/state/state-native/hasher_test.go @@ -4,6 +4,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" @@ -13,7 +14,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestComputeFieldRootsWithHasher_Phase0(t *testing.T) { diff --git a/beacon-chain/state/state-native/references_test.go b/beacon-chain/state/state-native/references_test.go index 67b9b406c3..04d44c74c9 100644 --- a/beacon-chain/state/state-native/references_test.go +++ b/beacon-chain/state/state-native/references_test.go @@ -6,13 +6,13 @@ import ( "runtime/debug" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/state" "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) func TestStateReferenceSharing_Finalizer_Phase0(t *testing.T) { diff --git a/beacon-chain/state/state-native/setters_checkpoint.go b/beacon-chain/state/state-native/setters_checkpoint.go index 16f20d58f5..f405f3a66d 100644 --- a/beacon-chain/state/state-native/setters_checkpoint.go +++ b/beacon-chain/state/state-native/setters_checkpoint.go @@ -1,9 +1,9 @@ package state_native import ( + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/prysmaticlabs/go-bitfield" ) // SetJustificationBits for the beacon state. diff --git a/beacon-chain/state/state-native/state_test.go b/beacon-chain/state/state-native/state_test.go index 57f04eef63..980db230b1 100644 --- a/beacon-chain/state/state-native/state_test.go +++ b/beacon-chain/state/state-native/state_test.go @@ -6,6 +6,7 @@ import ( "sync" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/state" "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" @@ -16,7 +17,6 @@ import ( ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) func TestBeaconState_NoDeadlock_Phase0(t *testing.T) { diff --git a/beacon-chain/state/testing/getters_checkpoint.go b/beacon-chain/state/testing/getters_checkpoint.go index c295b50ef8..a458ae1804 100644 --- a/beacon-chain/state/testing/getters_checkpoint.go +++ b/beacon-chain/state/testing/getters_checkpoint.go @@ -3,12 +3,12 @@ package testing import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/state" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) func VerifyBeaconStateJustificationBitsNil(t *testing.T, factory getState) { diff --git a/beacon-chain/sync/pending_attestations_queue_test.go b/beacon-chain/sync/pending_attestations_queue_test.go index 146f75ac18..fbe2113ef8 100644 --- a/beacon-chain/sync/pending_attestations_queue_test.go +++ b/beacon-chain/sync/pending_attestations_queue_test.go @@ -8,6 +8,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/async/abool" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" @@ -36,7 +37,6 @@ import ( pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" "github.com/libp2p/go-libp2p/core/network" - "github.com/prysmaticlabs/go-bitfield" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/sync/rpc_metadata.go b/beacon-chain/sync/rpc_metadata.go index 17111b7e6a..5fe051741a 100644 --- a/beacon-chain/sync/rpc_metadata.go +++ b/beacon-chain/sync/rpc_metadata.go @@ -3,6 +3,7 @@ package sync import ( "context" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" "github.com/OffchainLabs/prysm/v6/config/params" @@ -14,7 +15,6 @@ import ( libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) // metaDataHandler reads the incoming metadata RPC request from the peer. diff --git a/beacon-chain/sync/rpc_metadata_test.go b/beacon-chain/sync/rpc_metadata_test.go index e0288b9111..cba6f95f97 100644 --- a/beacon-chain/sync/rpc_metadata_test.go +++ b/beacon-chain/sync/rpc_metadata_test.go @@ -6,6 +6,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" db "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" @@ -24,7 +25,6 @@ import ( "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" libp2pquic "github.com/libp2p/go-libp2p/p2p/transport/quic" - "github.com/prysmaticlabs/go-bitfield" ) func TestMetaDataRPCHandler_ReceivesMetadata(t *testing.T) { diff --git a/beacon-chain/sync/subscriber_beacon_aggregate_proof_test.go b/beacon-chain/sync/subscriber_beacon_aggregate_proof_test.go index 8e1f9199dc..f4767a416e 100644 --- a/beacon-chain/sync/subscriber_beacon_aggregate_proof_test.go +++ b/beacon-chain/sync/subscriber_beacon_aggregate_proof_test.go @@ -3,6 +3,7 @@ package sync import ( "testing" + "github.com/OffchainLabs/go-bitfield" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" @@ -11,7 +12,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" ) func TestBeaconAggregateProofSubscriber_CanSaveAggregatedAttestation(t *testing.T) { diff --git a/beacon-chain/sync/subscriber_beacon_blocks_test.go b/beacon-chain/sync/subscriber_beacon_blocks_test.go index 4652542baf..ac0bc8ea76 100644 --- a/beacon-chain/sync/subscriber_beacon_blocks_test.go +++ b/beacon-chain/sync/subscriber_beacon_blocks_test.go @@ -3,6 +3,7 @@ package sync import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" @@ -23,7 +24,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/OffchainLabs/prysm/v6/time" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/sync/validate_aggregate_proof_test.go b/beacon-chain/sync/validate_aggregate_proof_test.go index 035f1d81e2..1244ed4888 100644 --- a/beacon-chain/sync/validate_aggregate_proof_test.go +++ b/beacon-chain/sync/validate_aggregate_proof_test.go @@ -7,6 +7,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" @@ -29,7 +30,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/util" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" - "github.com/prysmaticlabs/go-bitfield" ) func TestVerifyIndexInCommittee_CanVerify(t *testing.T) { diff --git a/beacon-chain/sync/validate_beacon_attestation_test.go b/beacon-chain/sync/validate_beacon_attestation_test.go index 991010dfe1..dc47650c03 100644 --- a/beacon-chain/sync/validate_beacon_attestation_test.go +++ b/beacon-chain/sync/validate_beacon_attestation_test.go @@ -7,6 +7,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" @@ -25,7 +26,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/util" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" - "github.com/prysmaticlabs/go-bitfield" ) func TestService_validateCommitteeIndexBeaconAttestation(t *testing.T) { diff --git a/beacon-chain/sync/validate_proposer_slashing_test.go b/beacon-chain/sync/validate_proposer_slashing_test.go index 7ca4ea5725..2e6a76b609 100644 --- a/beacon-chain/sync/validate_proposer_slashing_test.go +++ b/beacon-chain/sync/validate_proposer_slashing_test.go @@ -8,6 +8,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" @@ -26,7 +27,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/require" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" - "github.com/prysmaticlabs/go-bitfield" ) func setupValidProposerSlashing(t *testing.T) (*ethpb.ProposerSlashing, state.BeaconState) { diff --git a/beacon-chain/sync/validate_sync_contribution_proof_test.go b/beacon-chain/sync/validate_sync_contribution_proof_test.go index 7b99ad7a6f..1fd598e062 100644 --- a/beacon-chain/sync/validate_sync_contribution_proof_test.go +++ b/beacon-chain/sync/validate_sync_contribution_proof_test.go @@ -6,6 +6,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" @@ -38,7 +39,6 @@ import ( pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" "github.com/libp2p/go-libp2p/core/peer" - "github.com/prysmaticlabs/go-bitfield" ) func TestService_ValidateSyncContributionAndProof(t *testing.T) { diff --git a/changelog/ttsao-go_bitfield.md b/changelog/ttsao-go_bitfield.md new file mode 100644 index 0000000000..391e6a4b77 --- /dev/null +++ b/changelog/ttsao-go_bitfield.md @@ -0,0 +1,3 @@ +### Changed + +- Updated go bitfield from prysmaticlabs to offchainlabs diff --git a/cmd/prysmctl/p2p/client.go b/cmd/prysmctl/p2p/client.go index 23ba11242e..5a660d9132 100644 --- a/cmd/prysmctl/p2p/client.go +++ b/cmd/prysmctl/p2p/client.go @@ -7,6 +7,7 @@ import ( "net" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" "github.com/OffchainLabs/prysm/v6/config/params" @@ -31,7 +32,6 @@ import ( libp2ptcp "github.com/libp2p/go-libp2p/p2p/transport/tcp" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" - "github.com/prysmaticlabs/go-bitfield" "google.golang.org/grpc" "google.golang.org/protobuf/types/known/emptypb" ) diff --git a/consensus-types/blocks/proto_test.go b/consensus-types/blocks/proto_test.go index af513ee979..1f64ec639b 100644 --- a/consensus-types/blocks/proto_test.go +++ b/consensus-types/blocks/proto_test.go @@ -3,6 +3,7 @@ package blocks import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" @@ -11,7 +12,6 @@ import ( "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) type fields struct { diff --git a/consensus-types/hdiff/state_diff.go b/consensus-types/hdiff/state_diff.go index 608b37e115..eb009808a3 100644 --- a/consensus-types/hdiff/state_diff.go +++ b/consensus-types/hdiff/state_diff.go @@ -6,6 +6,7 @@ import ( "encoding/binary" "slices" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/capella" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/deneb" @@ -24,7 +25,6 @@ import ( "github.com/golang/snappy" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" - "github.com/prysmaticlabs/go-bitfield" "github.com/sirupsen/logrus" "google.golang.org/protobuf/proto" ) diff --git a/consensus-types/primitives/committee_bits_mainnet.go b/consensus-types/primitives/committee_bits_mainnet.go index b815dd12c3..d507d7fd79 100644 --- a/consensus-types/primitives/committee_bits_mainnet.go +++ b/consensus-types/primitives/committee_bits_mainnet.go @@ -2,7 +2,7 @@ package primitives -import "github.com/prysmaticlabs/go-bitfield" +import "github.com/OffchainLabs/go-bitfield" func NewAttestationCommitteeBits() bitfield.Bitvector64 { return bitfield.NewBitvector64() diff --git a/consensus-types/primitives/committee_bits_minimal.go b/consensus-types/primitives/committee_bits_minimal.go index caec3090a8..d9ad25eca1 100644 --- a/consensus-types/primitives/committee_bits_minimal.go +++ b/consensus-types/primitives/committee_bits_minimal.go @@ -2,7 +2,7 @@ package primitives -import "github.com/prysmaticlabs/go-bitfield" +import "github.com/OffchainLabs/go-bitfield" func NewAttestationCommitteeBits() bitfield.Bitvector4 { return bitfield.NewBitvector4() diff --git a/consensus-types/wrapper/metadata.go b/consensus-types/wrapper/metadata.go index e0728f5bb8..1fb14668ab 100644 --- a/consensus-types/wrapper/metadata.go +++ b/consensus-types/wrapper/metadata.go @@ -1,10 +1,10 @@ package wrapper import ( + "github.com/OffchainLabs/go-bitfield" pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/prysmaticlabs/go-bitfield" "google.golang.org/protobuf/proto" ) diff --git a/deps.bzl b/deps.bzl index fda38f0761..fe710020b4 100644 --- a/deps.bzl +++ b/deps.bzl @@ -2877,9 +2877,9 @@ def prysm_deps(): ) go_repository( name = "com_github_prysmaticlabs_go_bitfield", - importpath = "github.com/prysmaticlabs/go-bitfield", - sum = "h1:ATgOe+abbzfx9kCPeXIW4fiWyDdxlwHw07j8UGhdTd4=", - version = "v0.0.0-20240328144219-a1caa50c3a1e", + importpath = "github.com/OffchainLabs/go-bitfield", + sum = "h1:d/SJkN8/9Ca+1YmuDiUJxAiV4w/a9S8NcsG7GMQSrVI=", + version = "v0.0.0-20251031151322-f427d04d8506", ) go_repository( name = "com_github_prysmaticlabs_gohashtree", diff --git a/encoding/ssz/helpers.go b/encoding/ssz/helpers.go index fa46ec927c..f2c6d4ca4e 100644 --- a/encoding/ssz/helpers.go +++ b/encoding/ssz/helpers.go @@ -5,10 +5,10 @@ import ( "bytes" "encoding/binary" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" "github.com/minio/sha256-simd" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) const ( diff --git a/encoding/ssz/helpers_test.go b/encoding/ssz/helpers_test.go index 651d9077e3..db66f467ff 100644 --- a/encoding/ssz/helpers_test.go +++ b/encoding/ssz/helpers_test.go @@ -3,10 +3,10 @@ package ssz_test import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/encoding/ssz" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) const merkleizingListLimitError = "merkleizing list that is too large, over limit" diff --git a/encoding/ssz/merkleize_test.go b/encoding/ssz/merkleize_test.go index f0ff1b29d8..c908b644c7 100644 --- a/encoding/ssz/merkleize_test.go +++ b/encoding/ssz/merkleize_test.go @@ -3,12 +3,12 @@ package ssz_test import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/crypto/hash" "github.com/OffchainLabs/prysm/v6/encoding/ssz" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) func TestGetDepth(t *testing.T) { diff --git a/encoding/ssz/query/bitlist.go b/encoding/ssz/query/bitlist.go index 91e39690c9..b96321d841 100644 --- a/encoding/ssz/query/bitlist.go +++ b/encoding/ssz/query/bitlist.go @@ -4,7 +4,7 @@ import ( "errors" "fmt" - "github.com/prysmaticlabs/go-bitfield" + "github.com/OffchainLabs/go-bitfield" ) // bitlistInfo holds information about a SSZ Bitlist type. diff --git a/encoding/ssz/query/query_test.go b/encoding/ssz/query/query_test.go index 3935bd9ea0..3c8de53cc1 100644 --- a/encoding/ssz/query/query_test.go +++ b/encoding/ssz/query/query_test.go @@ -4,11 +4,11 @@ import ( "math" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" "github.com/OffchainLabs/prysm/v6/encoding/ssz/query/testutil" sszquerypb "github.com/OffchainLabs/prysm/v6/proto/ssz_query/testing" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) func TestSize(t *testing.T) { diff --git a/encoding/ssz/query/tag_parser.go b/encoding/ssz/query/tag_parser.go index 0dd9e26735..e60733676d 100644 --- a/encoding/ssz/query/tag_parser.go +++ b/encoding/ssz/query/tag_parser.go @@ -17,7 +17,7 @@ const ( sszSizeTag = "ssz-size" // castTypeTag specifies special custom casting instructions. - // e.g., "github.com/prysmaticlabs/go-bitfield.Bitlist". + // e.g., "github.com/OffchainLabs/go-bitfield.Bitlist". castTypeTag = "cast-type" ) diff --git a/encoding/ssz/query/testutil/util.go b/encoding/ssz/query/testutil/util.go index f9ec4bd34f..c85058a314 100644 --- a/encoding/ssz/query/testutil/util.go +++ b/encoding/ssz/query/testutil/util.go @@ -5,8 +5,8 @@ import ( "reflect" "strings" + "github.com/OffchainLabs/go-bitfield" ssz "github.com/prysmaticlabs/fastssz" - "github.com/prysmaticlabs/go-bitfield" ) // marshalAny marshals any value into SSZ format. diff --git a/go.mod b/go.mod index 8bb6cdf86a..36f4d14f33 100644 --- a/go.mod +++ b/go.mod @@ -5,6 +5,7 @@ go 1.25.1 require ( github.com/MariusVanDerWijden/FuzzyVM v0.0.0-20240516070431-7828990cad7d github.com/MariusVanDerWijden/tx-fuzz v1.4.0 + github.com/OffchainLabs/go-bitfield v0.0.0-20251031151322-f427d04d8506 github.com/aristanetworks/goarista v0.0.0-20200805130819-fd197cf57d96 github.com/bazelbuild/rules_go v0.23.2 github.com/btcsuite/btcd/btcec/v2 v2.3.4 @@ -60,7 +61,6 @@ require ( github.com/prometheus/client_model v0.6.1 github.com/prometheus/prom2json v1.3.0 github.com/prysmaticlabs/fastssz v0.0.0-20241008181541-518c4ce73516 - github.com/prysmaticlabs/go-bitfield v0.0.0-20240328144219-a1caa50c3a1e github.com/prysmaticlabs/prombbolt v0.0.0-20210126082820-9b7adba6db7c github.com/prysmaticlabs/protoc-gen-go-cast v0.0.0-20230228205207-28762a7b9294 github.com/r3labs/sse/v2 v2.10.0 diff --git a/go.sum b/go.sum index ed1ef641d7..b8dda499b2 100644 --- a/go.sum +++ b/go.sum @@ -57,6 +57,8 @@ github.com/MariusVanDerWijden/tx-fuzz v1.4.0 h1:Tq4lXivsR8mtoP4RpasUDIUpDLHfN1Yh github.com/MariusVanDerWijden/tx-fuzz v1.4.0/go.mod h1:gmOVECg7o5FY5VU3DQ/fY0zTk/ExBdMkUGz0vA8qqms= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/OffchainLabs/go-bitfield v0.0.0-20251031151322-f427d04d8506 h1:d/SJkN8/9Ca+1YmuDiUJxAiV4w/a9S8NcsG7GMQSrVI= +github.com/OffchainLabs/go-bitfield v0.0.0-20251031151322-f427d04d8506/go.mod h1:6TZI4FU6zT8x6ZfWa1J8YQ2NgW0wLV/W3fHRca8ISBo= github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= github.com/Shopify/sarama v1.26.1/go.mod h1:NbSGBSSndYaIhRcBtY9V0U7AyH+x71bG668AuWys/yU= github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= @@ -906,8 +908,6 @@ github.com/prometheus/prom2json v1.3.0/go.mod h1:rMN7m0ApCowcoDlypBHlkNbp5eJQf/+ github.com/prysmaticlabs/fastssz v0.0.0-20241008181541-518c4ce73516 h1:xuVAdtz5ShYblG2sPyb4gw01DF8InbOI/kBCQjk7NiM= github.com/prysmaticlabs/fastssz v0.0.0-20241008181541-518c4ce73516/go.mod h1:h2OlIZD/M6wFvV3YMZbW16lFgh3Rsye00G44J2cwLyU= github.com/prysmaticlabs/go-bitfield v0.0.0-20210108222456-8e92c3709aa0/go.mod h1:hCwmef+4qXWjv0jLDbQdWnL0Ol7cS7/lCSS26WR+u6s= -github.com/prysmaticlabs/go-bitfield v0.0.0-20240328144219-a1caa50c3a1e h1:ATgOe+abbzfx9kCPeXIW4fiWyDdxlwHw07j8UGhdTd4= -github.com/prysmaticlabs/go-bitfield v0.0.0-20240328144219-a1caa50c3a1e/go.mod h1:wmuf/mdK4VMD+jA9ThwcUKjg3a2XWM9cVfFYjDyY4j4= github.com/prysmaticlabs/gohashtree v0.0.5-beta h1:ct41mg7HyIZd7uoSM/ud23f+3DxQG9tlMlQG+BVX23c= github.com/prysmaticlabs/gohashtree v0.0.5-beta/go.mod h1:HRuvtXLZ4WkaB1MItToVH2e8ZwKwZPY5/Rcby+CvvLY= github.com/prysmaticlabs/prombbolt v0.0.0-20210126082820-9b7adba6db7c h1:9PHRCuO/VN0s9k+RmLykho7AjDxblNYI5bYKed16NPU= diff --git a/proto/eth/v1/attestation.pb.go b/proto/eth/v1/attestation.pb.go index 1945918134..598cb7d6e0 100755 --- a/proto/eth/v1/attestation.pb.go +++ b/proto/eth/v1/attestation.pb.go @@ -10,9 +10,9 @@ import ( reflect "reflect" sync "sync" + github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" - github_com_prysmaticlabs_go_bitfield "github.com/prysmaticlabs/go-bitfield" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" _ "google.golang.org/protobuf/types/descriptorpb" @@ -27,7 +27,7 @@ const ( type Attestation struct { state protoimpl.MessageState `protogen:"open.v1"` - AggregationBits github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitlist" ssz-max:"2048"` + AggregationBits github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` Data *AttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` Signature []byte `protobuf:"bytes,3,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields diff --git a/proto/eth/v1/attestation.proto b/proto/eth/v1/attestation.proto index e844b839c7..f86a0fb14c 100644 --- a/proto/eth/v1/attestation.proto +++ b/proto/eth/v1/attestation.proto @@ -31,7 +31,7 @@ message Attestation { bytes aggregation_bits = 1 [ (ethereum.eth.ext.ssz_max) = "2048", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitlist" + "github.com/OffchainLabs/go-bitfield.Bitlist" ]; AttestationData data = 2; diff --git a/proto/eth/v1/beacon_block.pb.go b/proto/eth/v1/beacon_block.pb.go index 962741fd1f..35aaa2e3db 100755 --- a/proto/eth/v1/beacon_block.pb.go +++ b/proto/eth/v1/beacon_block.pb.go @@ -10,9 +10,9 @@ import ( reflect "reflect" sync "sync" + github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" - github_com_prysmaticlabs_go_bitfield "github.com/prysmaticlabs/go-bitfield" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" _ "google.golang.org/protobuf/types/descriptorpb" @@ -763,7 +763,7 @@ func (x *IndexedAttestation) GetSignature() []byte { type SyncAggregate struct { state protoimpl.MessageState `protogen:"open.v1"` - SyncCommitteeBits github_com_prysmaticlabs_go_bitfield.Bitvector512 `protobuf:"bytes,1,opt,name=sync_committee_bits,json=syncCommitteeBits,proto3" json:"sync_committee_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector512" ssz-size:"64"` + SyncCommitteeBits github_com_prysmaticlabs_go_bitfield.Bitvector512 `protobuf:"bytes,1,opt,name=sync_committee_bits,json=syncCommitteeBits,proto3" json:"sync_committee_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector512" ssz-size:"64"` SyncCommitteeSignature []byte `protobuf:"bytes,2,opt,name=sync_committee_signature,json=syncCommitteeSignature,proto3" json:"sync_committee_signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache diff --git a/proto/prysm/v1alpha1/attestation.go b/proto/prysm/v1alpha1/attestation.go index 5c76c74683..8c185940b0 100644 --- a/proto/prysm/v1alpha1/attestation.go +++ b/proto/prysm/v1alpha1/attestation.go @@ -1,11 +1,11 @@ package eth import ( + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" "github.com/OffchainLabs/prysm/v6/runtime/version" ssz "github.com/prysmaticlabs/fastssz" - "github.com/prysmaticlabs/go-bitfield" "google.golang.org/protobuf/proto" ) diff --git a/proto/prysm/v1alpha1/attestation.pb.go b/proto/prysm/v1alpha1/attestation.pb.go index 6868bd4ed5..e05f9187a2 100755 --- a/proto/prysm/v1alpha1/attestation.pb.go +++ b/proto/prysm/v1alpha1/attestation.pb.go @@ -10,9 +10,9 @@ import ( reflect "reflect" sync "sync" + github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" - github_com_prysmaticlabs_go_bitfield "github.com/prysmaticlabs/go-bitfield" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -138,7 +138,7 @@ func (x *AggregateAttestationAndProof) GetSelectionProof() []byte { type Attestation struct { state protoimpl.MessageState `protogen:"open.v1"` - AggregationBits github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitlist" ssz-max:"2048"` + AggregationBits github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` Data *AttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` Signature []byte `protobuf:"bytes,3,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields @@ -438,10 +438,10 @@ func (x *AggregateAttestationAndProofElectra) GetSelectionProof() []byte { type AttestationElectra struct { state protoimpl.MessageState `protogen:"open.v1"` - AggregationBits github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitlist" ssz-max:"131072"` + AggregationBits github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"131072"` Data *AttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` Signature []byte `protobuf:"bytes,3,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` - CommitteeBits github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,4,opt,name=committee_bits,json=committeeBits,proto3" json:"committee_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector64" ssz-size:"8"` + CommitteeBits github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,4,opt,name=committee_bits,json=committeeBits,proto3" json:"committee_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } diff --git a/proto/prysm/v1alpha1/attestation.proto b/proto/prysm/v1alpha1/attestation.proto index 93f7ee4ea4..b3069fd6cf 100644 --- a/proto/prysm/v1alpha1/attestation.proto +++ b/proto/prysm/v1alpha1/attestation.proto @@ -57,7 +57,7 @@ message Attestation { bytes aggregation_bits = 1 [ (ethereum.eth.ext.ssz_max) = "2048", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitlist" + "github.com/OffchainLabs/go-bitfield.Bitlist" ]; AttestationData data = 2; @@ -139,7 +139,7 @@ message AttestationElectra { bytes aggregation_bits = 1 [ (ethereum.eth.ext.ssz_max) = "max_attesting_indices.size", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitlist" + "github.com/OffchainLabs/go-bitfield.Bitlist" ]; AttestationData data = 2; diff --git a/proto/prysm/v1alpha1/attestation/aggregation/aggregation.go b/proto/prysm/v1alpha1/attestation/aggregation/aggregation.go index 16e97170bb..ad161f2e8d 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/aggregation.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/aggregation.go @@ -4,7 +4,7 @@ package aggregation import ( "errors" - "github.com/prysmaticlabs/go-bitfield" + "github.com/OffchainLabs/go-bitfield" "github.com/sirupsen/logrus" ) diff --git a/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations_test.go b/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations_test.go index 910cf72039..0d2cd0e85a 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations_test.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations_test.go @@ -6,6 +6,7 @@ import ( "sort" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/encoding/ssz/equality" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" @@ -14,7 +15,6 @@ import ( aggtesting "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/testing" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" "github.com/sirupsen/logrus" ) diff --git a/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover.go b/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover.go index 698d03b68a..0b91af8c59 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover.go @@ -3,12 +3,12 @@ package attestations import ( "sort" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/crypto/bls" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) // MaxCoverAttestationAggregation relies on Maximum Coverage greedy algorithm for aggregation. diff --git a/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover_test.go b/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover_test.go index cc03137b9f..a7d03a8b7a 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover_test.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover_test.go @@ -3,12 +3,12 @@ package attestations_test import ( "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/crypto/bls" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/prysmaticlabs/go-bitfield" ) func TestAggregateAttestations_MaxCover_NewMaxCover(t *testing.T) { diff --git a/proto/prysm/v1alpha1/attestation/aggregation/maxcover.go b/proto/prysm/v1alpha1/attestation/aggregation/maxcover.go index e4d7dd6893..1bf68af446 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/maxcover.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/maxcover.go @@ -3,8 +3,8 @@ package aggregation import ( "sort" + "github.com/OffchainLabs/go-bitfield" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) // ErrInvalidMaxCoverProblem is returned when Maximum Coverage problem was initialized incorrectly. diff --git a/proto/prysm/v1alpha1/attestation/aggregation/maxcover_bench_test.go b/proto/prysm/v1alpha1/attestation/aggregation/maxcover_bench_test.go index 250551cff0..61dea9be3e 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/maxcover_bench_test.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/maxcover_bench_test.go @@ -4,10 +4,10 @@ import ( "fmt" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" aggtesting "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/testing" - "github.com/prysmaticlabs/go-bitfield" ) func BenchmarkMaxCoverProblem_MaxCover(b *testing.B) { diff --git a/proto/prysm/v1alpha1/attestation/aggregation/maxcover_test.go b/proto/prysm/v1alpha1/attestation/aggregation/maxcover_test.go index 120c968379..7c385062b2 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/maxcover_test.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/maxcover_test.go @@ -5,9 +5,9 @@ import ( "sort" "testing" + "github.com/OffchainLabs/go-bitfield" aggtesting "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/testing" "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/prysmaticlabs/go-bitfield" ) func TestMaxCover_MaxCoverCandidates_filter(t *testing.T) { diff --git a/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/naive_test.go b/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/naive_test.go index ad5c9f4da3..5c9c91af54 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/naive_test.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/naive_test.go @@ -5,13 +5,13 @@ import ( "sort" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/crypto/bls" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" aggtesting "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/testing" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) func TestAggregateAttestations_aggregate(t *testing.T) { diff --git a/proto/prysm/v1alpha1/attestation/aggregation/testing/bitlistutils.go b/proto/prysm/v1alpha1/attestation/aggregation/testing/bitlistutils.go index 2c2ede37c4..6593084efc 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/testing/bitlistutils.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/testing/bitlistutils.go @@ -4,11 +4,11 @@ import ( "math/rand" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/crypto/bls" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/time" - "github.com/prysmaticlabs/go-bitfield" ) // BitlistWithAllBitsSet creates list of bitlists with all bits set. diff --git a/proto/prysm/v1alpha1/attestation/attestation_utils.go b/proto/prysm/v1alpha1/attestation/attestation_utils.go index 94c93d9fae..19525cef86 100644 --- a/proto/prysm/v1alpha1/attestation/attestation_utils.go +++ b/proto/prysm/v1alpha1/attestation/attestation_utils.go @@ -10,6 +10,7 @@ import ( "slices" "sort" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/crypto/bls" @@ -17,7 +18,6 @@ import ( ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) // ConvertToIndexed converts attestation to (almost) indexed-verifiable form. diff --git a/proto/prysm/v1alpha1/attestation/attestation_utils_test.go b/proto/prysm/v1alpha1/attestation/attestation_utils_test.go index 44a19d3d21..d4b33bdb8a 100644 --- a/proto/prysm/v1alpha1/attestation/attestation_utils_test.go +++ b/proto/prysm/v1alpha1/attestation/attestation_utils_test.go @@ -3,6 +3,7 @@ package attestation_test import ( "testing" + "github.com/OffchainLabs/go-bitfield" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" @@ -10,7 +11,6 @@ import ( "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/prysmaticlabs/go-bitfield" ) func TestAttestingIndices(t *testing.T) { diff --git a/proto/prysm/v1alpha1/beacon_core_types.pb.go b/proto/prysm/v1alpha1/beacon_core_types.pb.go index 405fac3219..28077d3fb6 100755 --- a/proto/prysm/v1alpha1/beacon_core_types.pb.go +++ b/proto/prysm/v1alpha1/beacon_core_types.pb.go @@ -10,9 +10,9 @@ import ( reflect "reflect" sync "sync" + github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" - github_com_prysmaticlabs_go_bitfield "github.com/prysmaticlabs/go-bitfield" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -534,7 +534,7 @@ func (x *IndexedAttestationElectra) GetSignature() []byte { type SyncAggregate struct { state protoimpl.MessageState `protogen:"open.v1"` - SyncCommitteeBits github_com_prysmaticlabs_go_bitfield.Bitvector512 `protobuf:"bytes,1,opt,name=sync_committee_bits,json=syncCommitteeBits,proto3" json:"sync_committee_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector512" ssz-size:"64"` + SyncCommitteeBits github_com_prysmaticlabs_go_bitfield.Bitvector512 `protobuf:"bytes,1,opt,name=sync_committee_bits,json=syncCommitteeBits,proto3" json:"sync_committee_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector512" ssz-size:"64"` SyncCommitteeSignature []byte `protobuf:"bytes,2,opt,name=sync_committee_signature,json=syncCommitteeSignature,proto3" json:"sync_committee_signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache diff --git a/proto/prysm/v1alpha1/beacon_state.pb.go b/proto/prysm/v1alpha1/beacon_state.pb.go index ceeaaa2a4e..7fbe790984 100755 --- a/proto/prysm/v1alpha1/beacon_state.pb.go +++ b/proto/prysm/v1alpha1/beacon_state.pb.go @@ -10,10 +10,10 @@ import ( reflect "reflect" sync "sync" + github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" - github_com_prysmaticlabs_go_bitfield "github.com/prysmaticlabs/go-bitfield" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -44,7 +44,7 @@ type BeaconState struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochAttestations []*PendingAttestation `protobuf:"bytes,7001,rep,name=previous_epoch_attestations,json=previousEpochAttestations,proto3" json:"previous_epoch_attestations,omitempty" ssz-max:"4096"` CurrentEpochAttestations []*PendingAttestation `protobuf:"bytes,7002,rep,name=current_epoch_attestations,json=currentEpochAttestations,proto3" json:"current_epoch_attestations,omitempty" ssz-max:"4096"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -231,7 +231,7 @@ func (x *BeaconState) GetFinalizedCheckpoint() *Checkpoint { type PendingAttestation struct { state protoimpl.MessageState `protogen:"open.v1"` - AggregationBits github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitlist" ssz-max:"2048"` + AggregationBits github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` Data *AttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` InclusionDelay github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=inclusion_delay,json=inclusionDelay,proto3" json:"inclusion_delay,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,4,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` @@ -720,7 +720,7 @@ type BeaconStateAltair struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -1000,7 +1000,7 @@ type BeaconStateBellatrix struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -1236,7 +1236,7 @@ type BeaconStateCapella struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -1496,7 +1496,7 @@ type BeaconStateDeneb struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -1756,7 +1756,7 @@ type BeaconStateElectra struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -2088,7 +2088,7 @@ type BeaconStateFulu struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` diff --git a/proto/prysm/v1alpha1/beacon_state.proto b/proto/prysm/v1alpha1/beacon_state.proto index 16f0d864d4..59ea245fc5 100644 --- a/proto/prysm/v1alpha1/beacon_state.proto +++ b/proto/prysm/v1alpha1/beacon_state.proto @@ -71,7 +71,7 @@ message BeaconState { bytes justification_bits = 8001 [ (ethereum.eth.ext.ssz_size) = "1", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector4" + "github.com/OffchainLabs/go-bitfield.Bitvector4" ]; Checkpoint previous_justified_checkpoint = 8002; Checkpoint current_justified_checkpoint = 8003; @@ -84,7 +84,7 @@ message PendingAttestation { bytes aggregation_bits = 1 [ (ethereum.eth.ext.ssz_max) = "2048", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitlist" + "github.com/OffchainLabs/go-bitfield.Bitlist" ]; AttestationData data = 2; // The difference of when attestation gets created and get included on chain. @@ -236,7 +236,7 @@ message BeaconStateAltair { bytes justification_bits = 8001 [ (ethereum.eth.ext.ssz_size) = "1", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector4" + "github.com/OffchainLabs/go-bitfield.Bitvector4" ]; Checkpoint previous_justified_checkpoint = 8002; Checkpoint current_justified_checkpoint = 8003; @@ -317,7 +317,7 @@ message BeaconStateBellatrix { bytes justification_bits = 8001 [ (ethereum.eth.ext.ssz_size) = "1", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector4" + "github.com/OffchainLabs/go-bitfield.Bitvector4" ]; Checkpoint previous_justified_checkpoint = 8002; Checkpoint current_justified_checkpoint = 8003; @@ -390,7 +390,7 @@ message BeaconStateCapella { bytes justification_bits = 8001 [ (ethereum.eth.ext.ssz_size) = "1", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector4" + "github.com/OffchainLabs/go-bitfield.Bitvector4" ]; Checkpoint previous_justified_checkpoint = 8002; Checkpoint current_justified_checkpoint = 8003; @@ -472,7 +472,7 @@ message BeaconStateDeneb { bytes justification_bits = 8001 [ (ethereum.eth.ext.ssz_size) = "1", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector4" + "github.com/OffchainLabs/go-bitfield.Bitvector4" ]; Checkpoint previous_justified_checkpoint = 8002; Checkpoint current_justified_checkpoint = 8003; @@ -554,7 +554,7 @@ message BeaconStateElectra { bytes justification_bits = 8001 [ (ethereum.eth.ext.ssz_size) = "1", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector4" + "github.com/OffchainLabs/go-bitfield.Bitvector4" ]; Checkpoint previous_justified_checkpoint = 8002; Checkpoint current_justified_checkpoint = 8003; @@ -665,7 +665,7 @@ message BeaconStateFulu { bytes justification_bits = 8001 [ (ethereum.eth.ext.ssz_size) = "1", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector4" + "github.com/OffchainLabs/go-bitfield.Bitvector4" ]; Checkpoint previous_justified_checkpoint = 8002; Checkpoint current_justified_checkpoint = 8003; diff --git a/proto/prysm/v1alpha1/metadata/metadata_interfaces.go b/proto/prysm/v1alpha1/metadata/metadata_interfaces.go index 032fd26e3a..60d6d892d9 100644 --- a/proto/prysm/v1alpha1/metadata/metadata_interfaces.go +++ b/proto/prysm/v1alpha1/metadata/metadata_interfaces.go @@ -1,9 +1,9 @@ package metadata import ( + "github.com/OffchainLabs/go-bitfield" pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" ssz "github.com/prysmaticlabs/fastssz" - "github.com/prysmaticlabs/go-bitfield" ) // Metadata returns the interface of a p2p metadata type. diff --git a/proto/prysm/v1alpha1/p2p_messages.pb.go b/proto/prysm/v1alpha1/p2p_messages.pb.go index 732890f35d..82f7a3ce95 100755 --- a/proto/prysm/v1alpha1/p2p_messages.pb.go +++ b/proto/prysm/v1alpha1/p2p_messages.pb.go @@ -10,9 +10,9 @@ import ( reflect "reflect" sync "sync" + github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" - github_com_prysmaticlabs_go_bitfield "github.com/prysmaticlabs/go-bitfield" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" _ "google.golang.org/protobuf/types/descriptorpb" @@ -308,7 +308,7 @@ func (x *ENRForkID) GetNextForkEpoch() github_com_OffchainLabs_prysm_v6_consensu type MetaDataV0 struct { state protoimpl.MessageState `protogen:"open.v1"` SeqNumber uint64 `protobuf:"varint,1,opt,name=seq_number,json=seqNumber,proto3" json:"seq_number,omitempty"` - Attnets github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,2,opt,name=attnets,proto3" json:"attnets,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector64" ssz-size:"8"` + Attnets github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,2,opt,name=attnets,proto3" json:"attnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -360,8 +360,8 @@ func (x *MetaDataV0) GetAttnets() github_com_prysmaticlabs_go_bitfield.Bitvector type MetaDataV1 struct { state protoimpl.MessageState `protogen:"open.v1"` SeqNumber uint64 `protobuf:"varint,1,opt,name=seq_number,json=seqNumber,proto3" json:"seq_number,omitempty"` - Attnets github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,2,opt,name=attnets,proto3" json:"attnets,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector64" ssz-size:"8"` - Syncnets github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,3,opt,name=syncnets,proto3" json:"syncnets,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector4" ssz-size:"1"` + Attnets github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,2,opt,name=attnets,proto3" json:"attnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` + Syncnets github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,3,opt,name=syncnets,proto3" json:"syncnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -420,8 +420,8 @@ func (x *MetaDataV1) GetSyncnets() github_com_prysmaticlabs_go_bitfield.Bitvecto type MetaDataV2 struct { state protoimpl.MessageState `protogen:"open.v1"` SeqNumber uint64 `protobuf:"varint,1,opt,name=seq_number,json=seqNumber,proto3" json:"seq_number,omitempty"` - Attnets github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,2,opt,name=attnets,proto3" json:"attnets,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector64" ssz-size:"8"` - Syncnets github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,3,opt,name=syncnets,proto3" json:"syncnets,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector4" ssz-size:"1"` + Attnets github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,2,opt,name=attnets,proto3" json:"attnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` + Syncnets github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,3,opt,name=syncnets,proto3" json:"syncnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` CustodyGroupCount uint64 `protobuf:"varint,4,opt,name=custody_group_count,json=custodyGroupCount,proto3" json:"custody_group_count,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache diff --git a/proto/prysm/v1alpha1/p2p_messages.proto b/proto/prysm/v1alpha1/p2p_messages.proto index 7fbe1428b9..620e07b403 100644 --- a/proto/prysm/v1alpha1/p2p_messages.proto +++ b/proto/prysm/v1alpha1/p2p_messages.proto @@ -75,7 +75,7 @@ message MetaDataV0 { bytes attnets = 2 [ (ethereum.eth.ext.ssz_size) = "8", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector64" + "github.com/OffchainLabs/go-bitfield.Bitvector64" ]; } @@ -93,12 +93,12 @@ message MetaDataV1 { bytes attnets = 2 [ (ethereum.eth.ext.ssz_size) = "8", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector64" + "github.com/OffchainLabs/go-bitfield.Bitvector64" ]; bytes syncnets = 3 [ (ethereum.eth.ext.ssz_size) = "1", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector4" + "github.com/OffchainLabs/go-bitfield.Bitvector4" ]; } @@ -117,12 +117,12 @@ message MetaDataV2 { bytes attnets = 2 [ (ethereum.eth.ext.ssz_size) = "8", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector64" + "github.com/OffchainLabs/go-bitfield.Bitvector64" ]; bytes syncnets = 3 [ (ethereum.eth.ext.ssz_size) = "1", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector4" + "github.com/OffchainLabs/go-bitfield.Bitvector4" ]; uint64 custody_group_count = 4; } diff --git a/proto/prysm/v1alpha1/sync_committee.pb.go b/proto/prysm/v1alpha1/sync_committee.pb.go index 7ba0085fe5..84628f75b6 100755 --- a/proto/prysm/v1alpha1/sync_committee.pb.go +++ b/proto/prysm/v1alpha1/sync_committee.pb.go @@ -10,9 +10,9 @@ import ( reflect "reflect" sync "sync" + github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" - github_com_prysmaticlabs_go_bitfield "github.com/prysmaticlabs/go-bitfield" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -210,7 +210,7 @@ type SyncCommitteeContribution struct { Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` BlockRoot []byte `protobuf:"bytes,2,opt,name=block_root,json=blockRoot,proto3" json:"block_root,omitempty" ssz-size:"32"` SubcommitteeIndex uint64 `protobuf:"varint,3,opt,name=subcommittee_index,json=subcommitteeIndex,proto3" json:"subcommittee_index,omitempty"` - AggregationBits github_com_prysmaticlabs_go_bitfield.Bitvector128 `protobuf:"bytes,4,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector128" ssz-size:"16"` + AggregationBits github_com_prysmaticlabs_go_bitfield.Bitvector128 `protobuf:"bytes,4,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector128" ssz-size:"16"` Signature []byte `protobuf:"bytes,5,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache diff --git a/proto/prysm/v1alpha1/sync_committee_mainnet.go b/proto/prysm/v1alpha1/sync_committee_mainnet.go index 4dd2d62e8b..278128a416 100644 --- a/proto/prysm/v1alpha1/sync_committee_mainnet.go +++ b/proto/prysm/v1alpha1/sync_committee_mainnet.go @@ -3,7 +3,7 @@ package eth import ( - "github.com/prysmaticlabs/go-bitfield" + "github.com/OffchainLabs/go-bitfield" ) func NewSyncCommitteeAggregationBits() bitfield.Bitvector128 { diff --git a/proto/prysm/v1alpha1/sync_committee_minimal.go b/proto/prysm/v1alpha1/sync_committee_minimal.go index 40beedfab7..4cfbb889e1 100644 --- a/proto/prysm/v1alpha1/sync_committee_minimal.go +++ b/proto/prysm/v1alpha1/sync_committee_minimal.go @@ -3,7 +3,7 @@ package eth import ( - "github.com/prysmaticlabs/go-bitfield" + "github.com/OffchainLabs/go-bitfield" ) func NewSyncCommitteeAggregationBits() bitfield.Bitvector8 { diff --git a/proto/ssz_proto_library.bzl b/proto/ssz_proto_library.bzl index 47033b5aa2..1b677a977c 100644 --- a/proto/ssz_proto_library.bzl +++ b/proto/ssz_proto_library.bzl @@ -16,9 +16,9 @@ mainnet = { "slashings.size": "8192", # EPOCHS_PER_SLASHINGS_VECTOR "sync_committee_bits.size": "512", # SYNC_COMMITTEE_SIZE "sync_committee_bytes.size": "64", - "sync_committee_bits.type": "github.com/prysmaticlabs/go-bitfield.Bitvector512", + "sync_committee_bits.type": "github.com/OffchainLabs/go-bitfield.Bitvector512", "sync_committee_aggregate_bytes.size": "16", - "sync_committee_aggregate_bits.type": "github.com/prysmaticlabs/go-bitfield.Bitvector128", + "sync_committee_aggregate_bits.type": "github.com/OffchainLabs/go-bitfield.Bitvector128", "withdrawal.size": "16", "blob.size": "131072", # BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_BLOB "logs_bloom.size": "256", @@ -32,7 +32,7 @@ mainnet = { "max_attesting_indices.size": "131072", "max_committees_per_slot.size": "64", "committee_bits.size": "8", - "committee_bits.type": "github.com/prysmaticlabs/go-bitfield.Bitvector64", + "committee_bits.type": "github.com/OffchainLabs/go-bitfield.Bitvector64", "pending_deposits_limit": "134217728", "pending_partial_withdrawals_limit": "134217728", "pending_consolidations_limit": "262144", @@ -55,9 +55,9 @@ minimal = { "slashings.size": "64", "sync_committee_bits.size": "32", "sync_committee_bytes.size": "4", - "sync_committee_bits.type": "github.com/prysmaticlabs/go-bitfield.Bitvector32", + "sync_committee_bits.type": "github.com/OffchainLabs/go-bitfield.Bitvector32", "sync_committee_aggregate_bytes.size": "1", - "sync_committee_aggregate_bits.type": "github.com/prysmaticlabs/go-bitfield.Bitvector8", + "sync_committee_aggregate_bits.type": "github.com/OffchainLabs/go-bitfield.Bitvector8", "withdrawal.size": "4", "blob.size": "131072", "logs_bloom.size": "256", @@ -71,7 +71,7 @@ minimal = { "max_attesting_indices.size": "8192", "max_committees_per_slot.size": "4", "committee_bits.size": "1", - "committee_bits.type": "github.com/prysmaticlabs/go-bitfield.Bitvector4", + "committee_bits.type": "github.com/OffchainLabs/go-bitfield.Bitvector4", "pending_deposits_limit": "134217728", "pending_partial_withdrawals_limit": "64", "pending_consolidations_limit": "64", diff --git a/proto/ssz_query/testing/test_containers.pb.go b/proto/ssz_query/testing/test_containers.pb.go index 695e68103c..141885b2ce 100755 --- a/proto/ssz_query/testing/test_containers.pb.go +++ b/proto/ssz_query/testing/test_containers.pb.go @@ -10,8 +10,8 @@ import ( reflect "reflect" sync "sync" + github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" - github_com_prysmaticlabs_go_bitfield "github.com/prysmaticlabs/go-bitfield" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -84,8 +84,8 @@ type FixedTestContainer struct { Nested *FixedNestedContainer `protobuf:"bytes,5,opt,name=nested,proto3" json:"nested,omitempty"` VectorField []uint64 `protobuf:"varint,6,rep,packed,name=vector_field,json=vectorField,proto3" json:"vector_field,omitempty" ssz-size:"24"` TwoDimensionBytesField [][]byte `protobuf:"bytes,7,rep,name=two_dimension_bytes_field,json=twoDimensionBytesField,proto3" json:"two_dimension_bytes_field,omitempty" ssz-size:"5,32"` - Bitvector64Field github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,8,opt,name=bitvector64_field,json=bitvector64Field,proto3" json:"bitvector64_field,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector64" ssz-size:"8"` - Bitvector512Field github_com_prysmaticlabs_go_bitfield.Bitvector512 `protobuf:"bytes,9,opt,name=bitvector512_field,json=bitvector512Field,proto3" json:"bitvector512_field,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitvector512" ssz-size:"64"` + Bitvector64Field github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,8,opt,name=bitvector64_field,json=bitvector64Field,proto3" json:"bitvector64_field,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` + Bitvector512Field github_com_prysmaticlabs_go_bitfield.Bitvector512 `protobuf:"bytes,9,opt,name=bitvector512_field,json=bitvector512Field,proto3" json:"bitvector512_field,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector512" ssz-size:"64"` TrailingField []byte `protobuf:"bytes,10,opt,name=trailing_field,json=trailingField,proto3" json:"trailing_field,omitempty" ssz-size:"56"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -311,7 +311,7 @@ type VariableTestContainer struct { FieldListBytes32 [][]byte `protobuf:"bytes,4,rep,name=field_list_bytes32,json=fieldListBytes32,proto3" json:"field_list_bytes32,omitempty" ssz-max:"100" ssz-size:"?,32"` Nested *VariableNestedContainer `protobuf:"bytes,5,opt,name=nested,proto3" json:"nested,omitempty"` VariableContainerList []*VariableOuterContainer `protobuf:"bytes,6,rep,name=variable_container_list,json=variableContainerList,proto3" json:"variable_container_list,omitempty" ssz-max:"10"` - BitlistField github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,7,opt,name=bitlist_field,json=bitlistField,proto3" json:"bitlist_field,omitempty" cast-type:"github.com/prysmaticlabs/go-bitfield.Bitlist" ssz-max:"2048"` + BitlistField github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,7,opt,name=bitlist_field,json=bitlistField,proto3" json:"bitlist_field,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` NestedListField [][]byte `protobuf:"bytes,8,rep,name=nested_list_field,json=nestedListField,proto3" json:"nested_list_field,omitempty" ssz-max:"100,50" ssz-size:"?,?"` TrailingField []byte `protobuf:"bytes,9,opt,name=trailing_field,json=trailingField,proto3" json:"trailing_field,omitempty" ssz-size:"56"` unknownFields protoimpl.UnknownFields diff --git a/proto/ssz_query/testing/test_containers.proto b/proto/ssz_query/testing/test_containers.proto index a3a5656c22..506a187d27 100644 --- a/proto/ssz_query/testing/test_containers.proto +++ b/proto/ssz_query/testing/test_containers.proto @@ -45,13 +45,13 @@ message FixedTestContainer { bytes bitvector64_field = 8 [ (ethereum.eth.ext.ssz_size) = "8", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector64" + "github.com/OffchainLabs/go-bitfield.Bitvector64" ]; // Test: Bitvector64 (8 bytes), offset: 437 bytes bitvector512_field = 9 [ (ethereum.eth.ext.ssz_size) = "64", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitvector512" + "github.com/OffchainLabs/go-bitfield.Bitvector512" ]; // Test: Bitvector512 (64 bytes), offset: 445 // Additional bytes field - test field ordering and offset calculation @@ -108,7 +108,7 @@ message VariableTestContainer { bytes bitlist_field = 7 [ (ethereum.eth.ext.ssz_max) = "2048", (ethereum.eth.ext.cast_type) = - "github.com/prysmaticlabs/go-bitfield.Bitlist" + "github.com/OffchainLabs/go-bitfield.Bitlist" ]; // 2D bytes list - test list of bytelists. diff --git a/testing/endtoend/evaluators/slashing_helper.go b/testing/endtoend/evaluators/slashing_helper.go index 46367c230c..30d9112cd6 100644 --- a/testing/endtoend/evaluators/slashing_helper.go +++ b/testing/endtoend/evaluators/slashing_helper.go @@ -4,6 +4,7 @@ import ( "context" "crypto/rand" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" @@ -12,7 +13,6 @@ import ( eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" "google.golang.org/protobuf/types/known/emptypb" ) diff --git a/testing/util/altair.go b/testing/util/altair.go index dba6279944..e7a9c09d67 100644 --- a/testing/util/altair.go +++ b/testing/util/altair.go @@ -5,6 +5,7 @@ import ( "fmt" "testing" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" @@ -22,7 +23,6 @@ import ( "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) // DeterministicGenesisStateAltair returns a genesis state in hard fork 1 format made using the deterministic deposits. diff --git a/testing/util/attestation.go b/testing/util/attestation.go index 7c2cf7c9f8..96208edd5b 100644 --- a/testing/util/attestation.go +++ b/testing/util/attestation.go @@ -5,6 +5,7 @@ import ( "fmt" "math" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" @@ -19,7 +20,6 @@ import ( ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/prysmaticlabs/go-bitfield" log "github.com/sirupsen/logrus" ) diff --git a/testing/util/bellatrix.go b/testing/util/bellatrix.go index 905fd43edd..4b38f36240 100644 --- a/testing/util/bellatrix.go +++ b/testing/util/bellatrix.go @@ -5,6 +5,7 @@ import ( "encoding/binary" "fmt" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" @@ -19,7 +20,6 @@ import ( ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) // GenerateFullBlockBellatrix generates a fully valid Bellatrix block with the requested parameters. diff --git a/testing/util/capella_block.go b/testing/util/capella_block.go index b3c0548f24..68cfe01e6e 100644 --- a/testing/util/capella_block.go +++ b/testing/util/capella_block.go @@ -4,6 +4,7 @@ import ( "context" "fmt" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" @@ -18,7 +19,6 @@ import ( ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) // GenerateFullBlockCapella generates a fully valid Capella block with the requested parameters. diff --git a/testing/util/electra_block.go b/testing/util/electra_block.go index db434466cc..d61d8b32b7 100644 --- a/testing/util/electra_block.go +++ b/testing/util/electra_block.go @@ -6,6 +6,7 @@ import ( "fmt" "math/big" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" @@ -21,7 +22,6 @@ import ( "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) // GenerateFullBlockElectra generates a fully valid Electra block with the requested parameters. diff --git a/testing/util/fulu_block.go b/testing/util/fulu_block.go index 059c0961b5..c9befda671 100644 --- a/testing/util/fulu_block.go +++ b/testing/util/fulu_block.go @@ -4,6 +4,7 @@ import ( "context" "fmt" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" @@ -17,7 +18,6 @@ import ( ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) // GenerateFullBlockFulu generates a fully valid Fulu block with the requested parameters. diff --git a/testing/util/state.go b/testing/util/state.go index a0cae966ac..319cb6e5af 100644 --- a/testing/util/state.go +++ b/testing/util/state.go @@ -5,6 +5,7 @@ import ( "fmt" "testing" + "github.com/OffchainLabs/go-bitfield" b "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface" "github.com/OffchainLabs/prysm/v6/beacon-chain/state" @@ -16,7 +17,6 @@ import ( ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/prysmaticlabs/go-bitfield" ) // FillRootsNaturalOpt is meant to be used as an option when calling NewBeaconState. diff --git a/testing/util/sync_aggregate.go b/testing/util/sync_aggregate.go index ff868032a6..3f2552e025 100644 --- a/testing/util/sync_aggregate.go +++ b/testing/util/sync_aggregate.go @@ -1,6 +1,7 @@ package util import ( + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" p2pType "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" "github.com/OffchainLabs/prysm/v6/beacon-chain/state" @@ -10,7 +11,6 @@ import ( ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" ) func generateSyncAggregate(st state.BeaconState, privs []bls.SecretKey, parentRoot [32]byte) (*ethpb.SyncAggregate, error) { diff --git a/tools/bootnode/bootnode.go b/tools/bootnode/bootnode.go index 3ed313ccaa..c0f955e873 100644 --- a/tools/bootnode/bootnode.go +++ b/tools/bootnode/bootnode.go @@ -23,6 +23,7 @@ import ( "os" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/async" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" "github.com/OffchainLabs/prysm/v6/config/params" @@ -42,7 +43,6 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/prysmaticlabs/go-bitfield" "github.com/sirupsen/logrus" ) diff --git a/validator/client/aggregate_test.go b/validator/client/aggregate_test.go index 4741ba5739..dd29748fef 100644 --- a/validator/client/aggregate_test.go +++ b/validator/client/aggregate_test.go @@ -7,6 +7,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" @@ -17,7 +18,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/OffchainLabs/prysm/v6/validator/client/iface" - "github.com/prysmaticlabs/go-bitfield" logTest "github.com/sirupsen/logrus/hooks/test" "go.uber.org/mock/gomock" ) diff --git a/validator/client/attest.go b/validator/client/attest.go index 3e877084ca..87f99e9ab8 100644 --- a/validator/client/attest.go +++ b/validator/client/attest.go @@ -7,6 +7,7 @@ import ( "strings" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/async" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" "github.com/OffchainLabs/prysm/v6/config/features" @@ -22,7 +23,6 @@ import ( "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/OffchainLabs/prysm/v6/validator/client/iface" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" "github.com/sirupsen/logrus" ) diff --git a/validator/client/attest_test.go b/validator/client/attest_test.go index 91faa36296..0d62ae7be1 100644 --- a/validator/client/attest_test.go +++ b/validator/client/attest_test.go @@ -10,6 +10,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/async/event" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" "github.com/OffchainLabs/prysm/v6/config/features" @@ -22,7 +23,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/prysmaticlabs/go-bitfield" logTest "github.com/sirupsen/logrus/hooks/test" "go.uber.org/mock/gomock" "gopkg.in/d4l3k/messagediff.v1" diff --git a/validator/client/runner_test.go b/validator/client/runner_test.go index c126f570af..9d332472fd 100644 --- a/validator/client/runner_test.go +++ b/validator/client/runner_test.go @@ -10,6 +10,7 @@ import ( "testing" "time" + "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v6/async/event" "github.com/OffchainLabs/prysm/v6/cache/lru" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" @@ -29,7 +30,6 @@ import ( "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" "github.com/sirupsen/logrus" logTest "github.com/sirupsen/logrus/hooks/test" "go.uber.org/mock/gomock" diff --git a/validator/client/sync_committee_test.go b/validator/client/sync_committee_test.go index 6c49d86b2d..156500c046 100644 --- a/validator/client/sync_committee_test.go +++ b/validator/client/sync_committee_test.go @@ -6,6 +6,7 @@ import ( "fmt" "testing" + "github.com/OffchainLabs/go-bitfield" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" "github.com/OffchainLabs/prysm/v6/crypto/bls" @@ -14,7 +15,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/pkg/errors" - "github.com/prysmaticlabs/go-bitfield" logTest "github.com/sirupsen/logrus/hooks/test" "go.uber.org/mock/gomock" "google.golang.org/protobuf/types/known/emptypb" diff --git a/validator/keymanager/remote-web3signer/types/custom_mappers_test.go b/validator/keymanager/remote-web3signer/types/custom_mappers_test.go index cda1c99830..2e1d230bcb 100644 --- a/validator/keymanager/remote-web3signer/types/custom_mappers_test.go +++ b/validator/keymanager/remote-web3signer/types/custom_mappers_test.go @@ -4,12 +4,12 @@ import ( "reflect" "testing" + "github.com/OffchainLabs/go-bitfield" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/types" "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/types/mock" - "github.com/prysmaticlabs/go-bitfield" ) func TestMapAggregateAndProof(t *testing.T) { diff --git a/validator/keymanager/remote-web3signer/types/mock/mocks.go b/validator/keymanager/remote-web3signer/types/mock/mocks.go index 765980508a..4c70815f53 100644 --- a/validator/keymanager/remote-web3signer/types/mock/mocks.go +++ b/validator/keymanager/remote-web3signer/types/mock/mocks.go @@ -5,6 +5,7 @@ import ( "fmt" "strings" + "github.com/OffchainLabs/go-bitfield" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" @@ -14,7 +15,6 @@ import ( "github.com/OffchainLabs/prysm/v6/testing/util" "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/types" "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/prysmaticlabs/go-bitfield" ) ///////////////////////////////////////////////////////////////////////////////////////////////// From 165c4b0af16a05ab4e926b543c4c1602c9f5d0fe Mon Sep 17 00:00:00 2001 From: Jun Song <87601811+syjn99@users.noreply.github.com> Date: Tue, 4 Nov 2025 16:14:47 +0000 Subject: [PATCH 073/103] Handle addition overflow in `/eth/v1/beacon/rewards/attestations/{epoch}` (#15970) * Handle addition overflow in `/eth/v1/beacon/rewards/attestations/{epoch}` * Changelog --- beacon-chain/rpc/eth/rewards/handlers.go | 9 +++++++-- beacon-chain/rpc/eth/rewards/handlers_test.go | 13 +++++++++++++ changelog/syjn99-attestation-epoch-overflow.md | 2 ++ 3 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 changelog/syjn99-attestation-epoch-overflow.md diff --git a/beacon-chain/rpc/eth/rewards/handlers.go b/beacon-chain/rpc/eth/rewards/handlers.go index 56a87c5b6f..1d392196a0 100644 --- a/beacon-chain/rpc/eth/rewards/handlers.go +++ b/beacon-chain/rpc/eth/rewards/handlers.go @@ -209,8 +209,13 @@ func (s *Server) attRewardsState(w http.ResponseWriter, r *http.Request) (state. httputil.HandleError(w, "Attestation rewards are not supported for Phase 0", http.StatusNotFound) return nil, false } - currentEpoch := uint64(slots.ToEpoch(s.TimeFetcher.CurrentSlot())) - if requestedEpoch+1 >= currentEpoch { + currentEpoch := slots.ToEpoch(s.TimeFetcher.CurrentSlot()) + bufferedEpoch, err := primitives.Epoch(requestedEpoch).SafeAdd(1) + if err != nil { + httputil.HandleError(w, "Could not increment epoch: "+err.Error(), http.StatusNotFound) + return nil, false + } + if bufferedEpoch >= currentEpoch { httputil.HandleError(w, "Attestation rewards are available after two epoch transitions to ensure all attestations have a chance of inclusion", http.StatusNotFound) diff --git a/beacon-chain/rpc/eth/rewards/handlers_test.go b/beacon-chain/rpc/eth/rewards/handlers_test.go index 4e596804a8..f0604ffb9a 100644 --- a/beacon-chain/rpc/eth/rewards/handlers_test.go +++ b/beacon-chain/rpc/eth/rewards/handlers_test.go @@ -4,6 +4,7 @@ import ( "bytes" "encoding/json" "fmt" + "math" "net/http" "net/http/httptest" "strconv" @@ -747,6 +748,18 @@ func TestAttestationRewards(t *testing.T) { assert.Equal(t, http.StatusNotFound, e.Code) assert.Equal(t, "Attestation rewards are available after two epoch transitions to ensure all attestations have a chance of inclusion", e.Message) }) + t.Run("epoch overflow", func(t *testing.T) { + url := "http://only.the.epoch.number.at.the.end.is.important/" + strconv.FormatUint(math.MaxUint64, 10) + request := httptest.NewRequest("POST", url, nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.AttestationRewards(writer, request) + assert.Equal(t, http.StatusNotFound, writer.Code) + e := &httputil.DefaultJsonError{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusNotFound, e.Code) + }) } func TestSyncCommiteeRewards(t *testing.T) { diff --git a/changelog/syjn99-attestation-epoch-overflow.md b/changelog/syjn99-attestation-epoch-overflow.md new file mode 100644 index 0000000000..90f9b08ed3 --- /dev/null +++ b/changelog/syjn99-attestation-epoch-overflow.md @@ -0,0 +1,2 @@ +### Fixed +- Fix #15969: Handle addition overflow in `/eth/v1/beacon/rewards/attestations/{epoch}`. From 7df60e8c9b75314dc39f19aedaac890e39298ea8 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Tue, 4 Nov 2025 20:56:38 +0100 Subject: [PATCH 074/103] `SidecarProposerExpected`: Add the slot in the single flight key. (#15976) * `SidecarProposerExpected`: Add the slot in the single flight key. * Fix Kasey's comment. * Revert "Fix Kasey's comment." This reverts commit 9e3b4b7acf373bd355e05249c82a9bc6bbb78b53. --- beacon-chain/verification/data_column.go | 6 +++++- beacon-chain/verification/data_column_test.go | 10 ++++++++++ changelog/manu-column-slot.md | 2 ++ 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 changelog/manu-column-slot.md diff --git a/beacon-chain/verification/data_column.go b/beacon-chain/verification/data_column.go index e094a7365b..dc35ae3f1b 100644 --- a/beacon-chain/verification/data_column.go +++ b/beacon-chain/verification/data_column.go @@ -481,7 +481,7 @@ func (dv *RODataColumnsVerifier) SidecarProposerExpected(ctx context.Context) (e parentRoot := dataColumn.ParentRoot() // Ensure the expensive index computation is only performed once for // concurrent requests for the same signature data. - if _, err, _ := dv.sg.Do(fmt.Sprintf("%#x", parentRoot), func() (any, error) { + if _, err, _ := dv.sg.Do(concatRootSlot(parentRoot, dataColumnSlot), func() (any, error) { // Retrieve the parent state. parentState, err := dv.state(ctx, parentRoot) if err != nil { @@ -577,3 +577,7 @@ func inclusionProofKey(c blocks.RODataColumn) ([32]byte, error) { return sha256.Sum256(unhashedKey), nil } + +func concatRootSlot(root [fieldparams.RootLength]byte, slot primitives.Slot) string { + return string(root[:]) + fmt.Sprintf("%d", slot) +} diff --git a/beacon-chain/verification/data_column_test.go b/beacon-chain/verification/data_column_test.go index e4a92d41bf..8e6f0b5675 100644 --- a/beacon-chain/verification/data_column_test.go +++ b/beacon-chain/verification/data_column_test.go @@ -976,3 +976,13 @@ func TestColumnRequirementSatisfaction(t *testing.T) { _, err = verifier.VerifiedRODataColumns() require.NoError(t, err) } + +func TestConcatRootSlot(t *testing.T) { + root := [fieldparams.RootLength]byte{1, 2, 3} + const slot = primitives.Slot(3210) + + const expected = "\x01\x02\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x003210" + + actual := concatRootSlot(root, slot) + require.Equal(t, expected, actual) +} diff --git a/changelog/manu-column-slot.md b/changelog/manu-column-slot.md new file mode 100644 index 0000000000..a7e1a484d8 --- /dev/null +++ b/changelog/manu-column-slot.md @@ -0,0 +1,2 @@ +### Fixed +- `SidecarProposerExpected`: Add the slot in the single flight key. \ No newline at end of file From d945b1d9051f8a00a50dad6f5c7e62d050995ec5 Mon Sep 17 00:00:00 2001 From: terence Date: Wed, 5 Nov 2025 08:37:16 -0500 Subject: [PATCH 075/103] Add Gloas protobuf definitions with spec tests (#15601) * Add Gloas protobuf definitions with spec tests * Potuz's feedback * Update comment * Update final commits for fastssz and go-bitfield * Sync with develop offchain labs go bitfield changes * Update deps.bzl Co-authored-by: Preston Van Loon * Update deps.bzl Co-authored-by: Preston Van Loon * Gazelle fix build --------- Co-authored-by: Preston Van Loon --- changelog/ttsao_add-gloas-protobufs.md | 3 + deps.bzl | 4 +- go.mod | 2 +- go.sum | 4 +- proto/eth/v1/attestation.pb.go | 164 +- proto/eth/v1/beacon_block.pb.go | 52 +- proto/prysm/v1alpha1/BUILD.bazel | 31 + proto/prysm/v1alpha1/attestation.pb.go | 280 +- proto/prysm/v1alpha1/beacon_block.pb.go | 2842 +++++++------- proto/prysm/v1alpha1/beacon_block.proto | 5 + proto/prysm/v1alpha1/beacon_core_types.pb.go | 196 +- proto/prysm/v1alpha1/beacon_state.pb.go | 1920 ++++----- proto/prysm/v1alpha1/gloas.pb.go | 2037 ++++++++++ proto/prysm/v1alpha1/gloas.proto | 423 ++ proto/prysm/v1alpha1/gloas.ssz.go | 3474 +++++++++++++++++ proto/prysm/v1alpha1/p2p_messages.pb.go | 196 +- proto/prysm/v1alpha1/sync_committee.pb.go | 48 +- proto/ssz_proto_library.bzl | 10 + proto/ssz_query/testing/test_containers.pb.go | 218 +- testing/spectest/mainnet/BUILD.bazel | 2 + .../gloas__ssz_static__ssz_static_test.go | 11 + testing/spectest/minimal/BUILD.bazel | 2 + .../gloas__ssz_static__ssz_static_test.go | 11 + .../shared/gloas/ssz_static/BUILD.bazel | 15 + .../shared/gloas/ssz_static/ssz_static.go | 184 + 25 files changed, 9196 insertions(+), 2938 deletions(-) create mode 100644 changelog/ttsao_add-gloas-protobufs.md create mode 100755 proto/prysm/v1alpha1/gloas.pb.go create mode 100644 proto/prysm/v1alpha1/gloas.proto create mode 100644 proto/prysm/v1alpha1/gloas.ssz.go create mode 100644 testing/spectest/mainnet/gloas__ssz_static__ssz_static_test.go create mode 100644 testing/spectest/minimal/gloas__ssz_static__ssz_static_test.go create mode 100644 testing/spectest/shared/gloas/ssz_static/BUILD.bazel create mode 100644 testing/spectest/shared/gloas/ssz_static/ssz_static.go diff --git a/changelog/ttsao_add-gloas-protobufs.md b/changelog/ttsao_add-gloas-protobufs.md new file mode 100644 index 0000000000..85a288ef91 --- /dev/null +++ b/changelog/ttsao_add-gloas-protobufs.md @@ -0,0 +1,3 @@ +### Added + +- Add Gloas protobuf definitions with spec tests and SSZ serialization support diff --git a/deps.bzl b/deps.bzl index fe710020b4..8082c17f83 100644 --- a/deps.bzl +++ b/deps.bzl @@ -2872,8 +2872,8 @@ def prysm_deps(): go_repository( name = "com_github_prysmaticlabs_fastssz", importpath = "github.com/prysmaticlabs/fastssz", - sum = "h1:xuVAdtz5ShYblG2sPyb4gw01DF8InbOI/kBCQjk7NiM=", - version = "v0.0.0-20241008181541-518c4ce73516", + sum = "h1:ASmh3y4ALne2OoabF5pPL8OcIpBko8gFMg5018MxkBI=", + version = "v0.0.0-20251103153600-259302269bfc", ) go_repository( name = "com_github_prysmaticlabs_go_bitfield", diff --git a/go.mod b/go.mod index 36f4d14f33..5e32370d4b 100644 --- a/go.mod +++ b/go.mod @@ -60,7 +60,7 @@ require ( github.com/prometheus/client_golang v1.20.5 github.com/prometheus/client_model v0.6.1 github.com/prometheus/prom2json v1.3.0 - github.com/prysmaticlabs/fastssz v0.0.0-20241008181541-518c4ce73516 + github.com/prysmaticlabs/fastssz v0.0.0-20251103153600-259302269bfc github.com/prysmaticlabs/prombbolt v0.0.0-20210126082820-9b7adba6db7c github.com/prysmaticlabs/protoc-gen-go-cast v0.0.0-20230228205207-28762a7b9294 github.com/r3labs/sse/v2 v2.10.0 diff --git a/go.sum b/go.sum index b8dda499b2..46d405baa0 100644 --- a/go.sum +++ b/go.sum @@ -905,8 +905,8 @@ github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0leargg github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= github.com/prometheus/prom2json v1.3.0 h1:BlqrtbT9lLH3ZsOVhXPsHzFrApCTKRifB7gjJuypu6Y= github.com/prometheus/prom2json v1.3.0/go.mod h1:rMN7m0ApCowcoDlypBHlkNbp5eJQf/+1isKykIP5ZnM= -github.com/prysmaticlabs/fastssz v0.0.0-20241008181541-518c4ce73516 h1:xuVAdtz5ShYblG2sPyb4gw01DF8InbOI/kBCQjk7NiM= -github.com/prysmaticlabs/fastssz v0.0.0-20241008181541-518c4ce73516/go.mod h1:h2OlIZD/M6wFvV3YMZbW16lFgh3Rsye00G44J2cwLyU= +github.com/prysmaticlabs/fastssz v0.0.0-20251103153600-259302269bfc h1:ASmh3y4ALne2OoabF5pPL8OcIpBko8gFMg5018MxkBI= +github.com/prysmaticlabs/fastssz v0.0.0-20251103153600-259302269bfc/go.mod h1:h2OlIZD/M6wFvV3YMZbW16lFgh3Rsye00G44J2cwLyU= github.com/prysmaticlabs/go-bitfield v0.0.0-20210108222456-8e92c3709aa0/go.mod h1:hCwmef+4qXWjv0jLDbQdWnL0Ol7cS7/lCSS26WR+u6s= github.com/prysmaticlabs/gohashtree v0.0.5-beta h1:ct41mg7HyIZd7uoSM/ud23f+3DxQG9tlMlQG+BVX23c= github.com/prysmaticlabs/gohashtree v0.0.5-beta/go.mod h1:HRuvtXLZ4WkaB1MItToVH2e8ZwKwZPY5/Rcby+CvvLY= diff --git a/proto/eth/v1/attestation.pb.go b/proto/eth/v1/attestation.pb.go index 598cb7d6e0..dd317f0428 100755 --- a/proto/eth/v1/attestation.pb.go +++ b/proto/eth/v1/attestation.pb.go @@ -10,7 +10,7 @@ import ( reflect "reflect" sync "sync" - github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" + github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" @@ -26,10 +26,10 @@ const ( ) type Attestation struct { - state protoimpl.MessageState `protogen:"open.v1"` - AggregationBits github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` - Data *AttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` - Signature []byte `protobuf:"bytes,3,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` + state protoimpl.MessageState `protogen:"open.v1"` + AggregationBits github_com_OffchainLabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` + Data *AttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + Signature []byte `protobuf:"bytes,3,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -64,11 +64,11 @@ func (*Attestation) Descriptor() ([]byte, []int) { return file_proto_eth_v1_attestation_proto_rawDescGZIP(), []int{0} } -func (x *Attestation) GetAggregationBits() github_com_prysmaticlabs_go_bitfield.Bitlist { +func (x *Attestation) GetAggregationBits() github_com_OffchainLabs_go_bitfield.Bitlist { if x != nil { return x.AggregationBits } - return github_com_prysmaticlabs_go_bitfield.Bitlist(nil) + return github_com_OffchainLabs_go_bitfield.Bitlist(nil) } func (x *Attestation) GetData() *AttestationData { @@ -335,85 +335,85 @@ var file_proto_eth_v1_attestation_proto_rawDesc = []byte{ 0x75, 0x66, 0x2f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x65, 0x78, 0x74, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x22, 0xce, 0x01, 0x0a, 0x0b, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x12, 0x63, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x62, 0x69, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2c, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x92, 0xb5, 0x18, 0x04, - 0x32, 0x30, 0x34, 0x38, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x34, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, 0x73, - 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, - 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x22, 0x86, 0x02, 0x0a, 0x1c, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x41, - 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x64, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x12, 0x79, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, - 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, + 0x22, 0xcd, 0x01, 0x0a, 0x0b, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x62, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x62, 0x69, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x37, 0x82, 0xb5, 0x18, 0x2b, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x92, 0xb5, 0x18, 0x04, 0x32, + 0x30, 0x34, 0x38, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x42, 0x69, 0x74, 0x73, 0x12, 0x34, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, + 0x68, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, + 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x22, 0x86, 0x02, 0x0a, 0x1c, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x41, 0x74, + 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, + 0x66, 0x12, 0x79, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x5f, + 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, + 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, + 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, + 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0f, 0x61, 0x67, 0x67, + 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x3a, 0x0a, 0x09, + 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1c, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, + 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x61, + 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x0f, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0e, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0x93, 0x01, 0x0a, 0x22, 0x53, 0x69, + 0x67, 0x6e, 0x65, 0x64, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x41, 0x74, 0x74, + 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, + 0x12, 0x47, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x41, 0x74, 0x74, + 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, + 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, + 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, + 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, + 0xef, 0x02, 0x0a, 0x0f, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, + 0x61, 0x74, 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, + 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x64, 0x0a, + 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, - 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, - 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0f, 0x61, 0x67, - 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x3a, 0x0a, - 0x09, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1c, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, - 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x0f, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0e, 0x73, 0x65, 0x6c, 0x65, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0x93, 0x01, 0x0a, 0x22, 0x53, - 0x69, 0x67, 0x6e, 0x65, 0x64, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x41, 0x74, - 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, - 0x66, 0x12, 0x47, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, - 0x68, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x41, 0x74, - 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, - 0x66, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, - 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, - 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x22, 0xef, 0x02, 0x0a, 0x0f, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x44, 0x61, 0x74, 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, - 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, - 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x64, - 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, - 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, - 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, - 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, - 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x05, 0x69, - 0x6e, 0x64, 0x65, 0x78, 0x12, 0x32, 0x0a, 0x11, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x5f, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, - 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0f, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x33, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, - 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, - 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x33, 0x0a, - 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, - 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x2e, - 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x06, 0x74, 0x61, 0x72, 0x67, - 0x65, 0x74, 0x22, 0x85, 0x01, 0x0a, 0x0a, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, - 0x74, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, - 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, + 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x05, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x12, 0x32, 0x0a, 0x11, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x5f, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, + 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0f, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, + 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x33, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, + 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x33, 0x0a, 0x06, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, + 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x2e, 0x43, + 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, + 0x74, 0x22, 0x85, 0x01, 0x0a, 0x0a, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, + 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, + 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, + 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, + 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, + 0x02, 0x33, 0x32, 0x52, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x42, 0x7c, 0x0a, 0x13, 0x6f, 0x72, 0x67, + 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, + 0x42, 0x10, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, + 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, - 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, - 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, - 0x0a, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x33, 0x32, 0x52, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x42, 0x7c, 0x0a, 0x13, 0x6f, 0x72, - 0x67, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x42, 0x10, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, - 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, - 0x68, 0x2f, 0x76, 0x31, 0xaa, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, + 0x2f, 0x76, 0x31, 0xaa, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, + 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/proto/eth/v1/beacon_block.pb.go b/proto/eth/v1/beacon_block.pb.go index 35aaa2e3db..c8357bb234 100755 --- a/proto/eth/v1/beacon_block.pb.go +++ b/proto/eth/v1/beacon_block.pb.go @@ -10,7 +10,7 @@ import ( reflect "reflect" sync "sync" - github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" + github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" @@ -762,9 +762,9 @@ func (x *IndexedAttestation) GetSignature() []byte { } type SyncAggregate struct { - state protoimpl.MessageState `protogen:"open.v1"` - SyncCommitteeBits github_com_prysmaticlabs_go_bitfield.Bitvector512 `protobuf:"bytes,1,opt,name=sync_committee_bits,json=syncCommitteeBits,proto3" json:"sync_committee_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector512" ssz-size:"64"` - SyncCommitteeSignature []byte `protobuf:"bytes,2,opt,name=sync_committee_signature,json=syncCommitteeSignature,proto3" json:"sync_committee_signature,omitempty" ssz-size:"96"` + state protoimpl.MessageState `protogen:"open.v1"` + SyncCommitteeBits github_com_OffchainLabs_go_bitfield.Bitvector512 `protobuf:"bytes,1,opt,name=sync_committee_bits,json=syncCommitteeBits,proto3" json:"sync_committee_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector512" ssz-size:"64"` + SyncCommitteeSignature []byte `protobuf:"bytes,2,opt,name=sync_committee_signature,json=syncCommitteeSignature,proto3" json:"sync_committee_signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -799,11 +799,11 @@ func (*SyncAggregate) Descriptor() ([]byte, []int) { return file_proto_eth_v1_beacon_block_proto_rawDescGZIP(), []int{12} } -func (x *SyncAggregate) GetSyncCommitteeBits() github_com_prysmaticlabs_go_bitfield.Bitvector512 { +func (x *SyncAggregate) GetSyncCommitteeBits() github_com_OffchainLabs_go_bitfield.Bitvector512 { if x != nil { return x.SyncCommitteeBits } - return github_com_prysmaticlabs_go_bitfield.Bitvector512(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector512(nil) } func (x *SyncAggregate) GetSyncCommitteeSignature() []byte { @@ -1067,27 +1067,27 @@ var file_proto_eth_v1_beacon_block_proto_rawDesc = []byte{ 0x6e, 0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x22, 0xbe, 0x01, 0x0a, 0x0d, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, - 0x61, 0x74, 0x65, 0x12, 0x6b, 0x0a, 0x13, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, + 0x65, 0x22, 0xbd, 0x01, 0x0a, 0x0d, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, + 0x61, 0x74, 0x65, 0x12, 0x6a, 0x0a, 0x13, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, - 0x42, 0x3b, 0x82, 0xb5, 0x18, 0x31, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, - 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x35, 0x31, 0x32, 0x8a, 0xb5, 0x18, 0x02, 0x36, 0x34, 0x52, 0x11, 0x73, - 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x42, 0x69, 0x74, 0x73, - 0x12, 0x40, 0x0a, 0x18, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, - 0x65, 0x65, 0x5f, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x16, 0x73, 0x79, 0x6e, 0x63, - 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x42, 0x7c, 0x0a, 0x13, 0x6f, 0x72, 0x67, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x42, 0x10, 0x42, 0x65, 0x61, 0x63, 0x6f, - 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2d, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x76, 0x31, 0xaa, 0x02, 0x0f, 0x45, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, 0x02, - 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, - 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x42, 0x3a, 0x82, 0xb5, 0x18, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, + 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x35, 0x31, 0x32, 0x8a, 0xb5, 0x18, 0x02, 0x36, 0x34, 0x52, 0x11, 0x73, 0x79, + 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x42, 0x69, 0x74, 0x73, 0x12, + 0x40, 0x0a, 0x18, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, + 0x65, 0x5f, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x16, 0x73, 0x79, 0x6e, 0x63, 0x43, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, + 0x65, 0x42, 0x7c, 0x0a, 0x13, 0x6f, 0x72, 0x67, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x42, 0x10, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, + 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2d, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x76, 0x31, 0xaa, 0x02, 0x0f, 0x45, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x0f, + 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x62, + 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/proto/prysm/v1alpha1/BUILD.bazel b/proto/prysm/v1alpha1/BUILD.bazel index 2c6405763f..40a383981c 100644 --- a/proto/prysm/v1alpha1/BUILD.bazel +++ b/proto/prysm/v1alpha1/BUILD.bazel @@ -194,6 +194,22 @@ ssz_fulu_objs = [ "SignedBlindedBeaconBlockFulu", ] +ssz_gloas_objs = [ + "BuilderPendingPayment", + "BuilderPendingWithdrawal", + "DataColumnSidecarGloas", + "ExecutionPayloadEnvelope", + "PayloadAttestation", + "PayloadAttestationData", + "PayloadAttestationMessage", + "ExecutionPayloadBid", + "SignedExecutionPayloadBid", + "SignedExecutionPayloadEnvelope", + "BeaconBlockGloas", + "SignedBeaconBlockGloas", + "BeaconStateGloas", +] + ssz_gen_marshal( name = "ssz_generated_phase0", out = "phase0.ssz.go", @@ -284,6 +300,19 @@ ssz_gen_marshal( objs = ssz_fulu_objs, ) +ssz_gen_marshal( + name = "ssz_generated_gloas", + out = "gloas.ssz.go", + exclude_objs = ssz_phase0_objs + ssz_altair_objs + ssz_bellatrix_objs + ssz_capella_objs + ssz_deneb_objs + ssz_electra_objs + ssz_fulu_objs, + go_proto = ":go_proto", + includes = [ + "//consensus-types/primitives:go_default_library", + "//math:go_default_library", + "//proto/engine/v1:go_default_library", + ], + objs = ssz_gloas_objs, +) + ssz_gen_marshal( name = "ssz_generated_non_core", out = "non-core.ssz.go", @@ -352,6 +381,7 @@ go_library( ":ssz_generated_deneb", # keep ":ssz_generated_electra", # keep ":ssz_generated_fulu", # keep + ":ssz_generated_gloas", # keep ":ssz_generated_non_core", # keep ":ssz_generated_phase0", # keep ], @@ -396,6 +426,7 @@ ssz_proto_files( "beacon_state.proto", "blobs.proto", "data_columns.proto", + "gloas.proto", "light_client.proto", "sync_committee.proto", "withdrawals.proto", diff --git a/proto/prysm/v1alpha1/attestation.pb.go b/proto/prysm/v1alpha1/attestation.pb.go index e05f9187a2..ea687d12f1 100755 --- a/proto/prysm/v1alpha1/attestation.pb.go +++ b/proto/prysm/v1alpha1/attestation.pb.go @@ -10,7 +10,7 @@ import ( reflect "reflect" sync "sync" - github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" + github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" @@ -137,10 +137,10 @@ func (x *AggregateAttestationAndProof) GetSelectionProof() []byte { } type Attestation struct { - state protoimpl.MessageState `protogen:"open.v1"` - AggregationBits github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` - Data *AttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` - Signature []byte `protobuf:"bytes,3,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` + state protoimpl.MessageState `protogen:"open.v1"` + AggregationBits github_com_OffchainLabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` + Data *AttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + Signature []byte `protobuf:"bytes,3,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -175,11 +175,11 @@ func (*Attestation) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_attestation_proto_rawDescGZIP(), []int{2} } -func (x *Attestation) GetAggregationBits() github_com_prysmaticlabs_go_bitfield.Bitlist { +func (x *Attestation) GetAggregationBits() github_com_OffchainLabs_go_bitfield.Bitlist { if x != nil { return x.AggregationBits } - return github_com_prysmaticlabs_go_bitfield.Bitlist(nil) + return github_com_OffchainLabs_go_bitfield.Bitlist(nil) } func (x *Attestation) GetData() *AttestationData { @@ -437,11 +437,11 @@ func (x *AggregateAttestationAndProofElectra) GetSelectionProof() []byte { } type AttestationElectra struct { - state protoimpl.MessageState `protogen:"open.v1"` - AggregationBits github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"131072"` - Data *AttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` - Signature []byte `protobuf:"bytes,3,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` - CommitteeBits github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,4,opt,name=committee_bits,json=committeeBits,proto3" json:"committee_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` + state protoimpl.MessageState `protogen:"open.v1"` + AggregationBits github_com_OffchainLabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"131072"` + Data *AttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + Signature []byte `protobuf:"bytes,3,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` + CommitteeBits github_com_OffchainLabs_go_bitfield.Bitvector64 `protobuf:"bytes,4,opt,name=committee_bits,json=committeeBits,proto3" json:"committee_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -476,11 +476,11 @@ func (*AttestationElectra) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_attestation_proto_rawDescGZIP(), []int{7} } -func (x *AttestationElectra) GetAggregationBits() github_com_prysmaticlabs_go_bitfield.Bitlist { +func (x *AttestationElectra) GetAggregationBits() github_com_OffchainLabs_go_bitfield.Bitlist { if x != nil { return x.AggregationBits } - return github_com_prysmaticlabs_go_bitfield.Bitlist(nil) + return github_com_OffchainLabs_go_bitfield.Bitlist(nil) } func (x *AttestationElectra) GetData() *AttestationData { @@ -497,11 +497,11 @@ func (x *AttestationElectra) GetSignature() []byte { return nil } -func (x *AttestationElectra) GetCommitteeBits() github_com_prysmaticlabs_go_bitfield.Bitvector64 { +func (x *AttestationElectra) GetCommitteeBits() github_com_OffchainLabs_go_bitfield.Bitvector64 { if x != nil { return x.CommitteeBits } - return github_com_prysmaticlabs_go_bitfield.Bitvector64(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector64(nil) } type SingleAttestation struct { @@ -607,135 +607,135 @@ var file_proto_prysm_v1alpha1_attestation_proto_rawDesc = []byte{ 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x0f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0e, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0xd4, 0x01, 0x0a, 0x0b, 0x41, 0x74, 0x74, 0x65, - 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x63, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, + 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0xd3, 0x01, 0x0a, 0x0b, 0x41, 0x74, 0x74, 0x65, + 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x62, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, - 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x6c, - 0x69, 0x73, 0x74, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0f, 0x61, 0x67, 0x67, - 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x04, - 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, - 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, - 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x8e, - 0x03, 0x0a, 0x0f, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, - 0x74, 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, - 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, - 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, - 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x77, 0x0a, 0x0f, - 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, - 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, - 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, - 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, - 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x32, 0x0a, 0x11, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x5f, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, - 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0f, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x39, 0x0a, 0x06, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x0c, 0x42, 0x37, 0x82, 0xb5, 0x18, 0x2b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, + 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x6c, 0x69, + 0x73, 0x74, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, + 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x04, 0x64, + 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x06, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x12, 0x39, 0x0a, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, - 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x22, - 0x85, 0x01, 0x0a, 0x0a, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x5b, - 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, - 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, - 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, - 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, - 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x04, 0x72, - 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, - 0x32, 0x52, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x22, 0xa7, 0x01, 0x0a, 0x29, 0x53, 0x69, 0x67, 0x6e, - 0x65, 0x64, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x41, 0x74, 0x74, 0x65, 0x73, - 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x45, 0x6c, - 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x54, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, - 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x45, 0x6c, 0x65, 0x63, 0x74, - 0x72, 0x61, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, - 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, - 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x22, 0x9a, 0x02, 0x0a, 0x23, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x41, - 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x64, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x79, 0x0a, 0x10, 0x61, 0x67, 0x67, - 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, - 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, - 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, - 0x64, 0x65, 0x78, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x49, - 0x6e, 0x64, 0x65, 0x78, 0x12, 0x47, 0x0a, 0x09, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6c, 0x65, 0x63, 0x74, - 0x72, 0x61, 0x52, 0x09, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, - 0x0f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0e, - 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0xbf, - 0x02, 0x0a, 0x12, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6c, - 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x65, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, - 0x3a, 0x82, 0xb5, 0x18, 0x2c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, - 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x6c, 0x69, 0x73, - 0x74, 0x92, 0xb5, 0x18, 0x06, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x52, 0x0f, 0x61, 0x67, 0x67, - 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x04, - 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, - 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, - 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x60, - 0x0a, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x62, 0x69, 0x74, 0x73, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x39, 0x82, 0xb5, 0x18, 0x30, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, - 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x8a, 0xb5, 0x18, 0x01, - 0x38, 0x52, 0x0d, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x42, 0x69, 0x74, 0x73, - 0x22, 0xdf, 0x02, 0x0a, 0x11, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x41, 0x74, 0x74, 0x65, 0x73, - 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x71, 0x0a, 0x0c, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x74, 0x65, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, - 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, + 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, + 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, + 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x8e, 0x03, + 0x0a, 0x0f, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, + 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, + 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, + 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x77, 0x0a, 0x0f, 0x63, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, + 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, + 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, + 0x6e, 0x64, 0x65, 0x78, 0x12, 0x32, 0x0a, 0x11, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x5f, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0f, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x39, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x06, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x12, 0x39, 0x0a, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x18, 0x05, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, + 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x22, 0x85, + 0x01, 0x0a, 0x0a, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x5b, 0x0a, + 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, + 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, - 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, - 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0b, 0x63, 0x6f, - 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x64, 0x12, 0x75, 0x0a, 0x0e, 0x61, 0x74, 0x74, - 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, - 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, - 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, - 0x78, 0x52, 0x0d, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, - 0x12, 0x3a, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, - 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, - 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x42, 0x9a, 0x01, 0x0a, 0x19, 0x6f, 0x72, 0x67, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, + 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x04, 0x72, 0x6f, + 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, + 0x52, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x22, 0xa7, 0x01, 0x0a, 0x29, 0x53, 0x69, 0x67, 0x6e, 0x65, + 0x64, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x45, 0x6c, 0x65, + 0x63, 0x74, 0x72, 0x61, 0x12, 0x54, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x67, + 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x41, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, + 0x61, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, + 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x22, 0x9a, 0x02, 0x0a, 0x23, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x41, 0x74, + 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, + 0x66, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x79, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, + 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, + 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, + 0x65, 0x78, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, + 0x64, 0x65, 0x78, 0x12, 0x47, 0x0a, 0x09, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, + 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, + 0x61, 0x52, 0x09, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x0f, + 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0e, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0xbd, 0x02, + 0x0a, 0x12, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6c, 0x65, + 0x63, 0x74, 0x72, 0x61, 0x12, 0x64, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x39, + 0x82, 0xb5, 0x18, 0x2b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, + 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, + 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x92, + 0xb5, 0x18, 0x06, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, + 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x04, 0x64, 0x61, + 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, - 0x42, 0x10, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, - 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, - 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, + 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, + 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x5f, 0x0a, 0x0e, + 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, + 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, + 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x38, 0x52, 0x0d, + 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x42, 0x69, 0x74, 0x73, 0x22, 0xdf, 0x02, + 0x0a, 0x11, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x12, 0x71, 0x0a, 0x0c, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, + 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, + 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0b, 0x63, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x74, 0x65, 0x65, 0x49, 0x64, 0x12, 0x75, 0x0a, 0x0e, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, + 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, + 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, + 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, + 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, + 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, + 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x3a, 0x0a, + 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, + 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, + 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, + 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x42, + 0x9a, 0x01, 0x0a, 0x19, 0x6f, 0x72, 0x67, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x10, 0x41, + 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, + 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, + 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, + 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, + 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, + 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/proto/prysm/v1alpha1/beacon_block.pb.go b/proto/prysm/v1alpha1/beacon_block.pb.go index 8223f7e1b2..760bdb149b 100755 --- a/proto/prysm/v1alpha1/beacon_block.pb.go +++ b/proto/prysm/v1alpha1/beacon_block.pb.go @@ -41,6 +41,7 @@ type GenericSignedBeaconBlock struct { // *GenericSignedBeaconBlock_BlindedElectra // *GenericSignedBeaconBlock_Fulu // *GenericSignedBeaconBlock_BlindedFulu + // *GenericSignedBeaconBlock_Gloas Block isGenericSignedBeaconBlock_Block `protobuf_oneof:"block"` IsBlinded bool `protobuf:"varint,100,opt,name=is_blinded,json=isBlinded,proto3" json:"is_blinded,omitempty"` unknownFields protoimpl.UnknownFields @@ -192,6 +193,15 @@ func (x *GenericSignedBeaconBlock) GetBlindedFulu() *SignedBlindedBeaconBlockFul return nil } +func (x *GenericSignedBeaconBlock) GetGloas() *SignedBeaconBlockGloas { + if x != nil { + if x, ok := x.Block.(*GenericSignedBeaconBlock_Gloas); ok { + return x.Gloas + } + } + return nil +} + func (x *GenericSignedBeaconBlock) GetIsBlinded() bool { if x != nil { return x.IsBlinded @@ -251,6 +261,10 @@ type GenericSignedBeaconBlock_BlindedFulu struct { BlindedFulu *SignedBlindedBeaconBlockFulu `protobuf:"bytes,12,opt,name=blinded_fulu,json=blindedFulu,proto3,oneof"` } +type GenericSignedBeaconBlock_Gloas struct { + Gloas *SignedBeaconBlockGloas `protobuf:"bytes,13,opt,name=gloas,proto3,oneof"` +} + func (*GenericSignedBeaconBlock_Phase0) isGenericSignedBeaconBlock_Block() {} func (*GenericSignedBeaconBlock_Altair) isGenericSignedBeaconBlock_Block() {} @@ -275,6 +289,8 @@ func (*GenericSignedBeaconBlock_Fulu) isGenericSignedBeaconBlock_Block() {} func (*GenericSignedBeaconBlock_BlindedFulu) isGenericSignedBeaconBlock_Block() {} +func (*GenericSignedBeaconBlock_Gloas) isGenericSignedBeaconBlock_Block() {} + // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_block.proto. type GenericBeaconBlock struct { state protoimpl.MessageState `protogen:"open.v1"` @@ -292,6 +308,7 @@ type GenericBeaconBlock struct { // *GenericBeaconBlock_BlindedElectra // *GenericBeaconBlock_Fulu // *GenericBeaconBlock_BlindedFulu + // *GenericBeaconBlock_Gloas Block isGenericBeaconBlock_Block `protobuf_oneof:"block"` IsBlinded bool `protobuf:"varint,100,opt,name=is_blinded,json=isBlinded,proto3" json:"is_blinded,omitempty"` PayloadValue string `protobuf:"bytes,101,opt,name=payload_value,json=payloadValue,proto3" json:"payload_value,omitempty"` @@ -444,6 +461,15 @@ func (x *GenericBeaconBlock) GetBlindedFulu() *BlindedBeaconBlockFulu { return nil } +func (x *GenericBeaconBlock) GetGloas() *BeaconBlockGloas { + if x != nil { + if x, ok := x.Block.(*GenericBeaconBlock_Gloas); ok { + return x.Gloas + } + } + return nil +} + func (x *GenericBeaconBlock) GetIsBlinded() bool { if x != nil { return x.IsBlinded @@ -510,6 +536,10 @@ type GenericBeaconBlock_BlindedFulu struct { BlindedFulu *BlindedBeaconBlockFulu `protobuf:"bytes,12,opt,name=blinded_fulu,json=blindedFulu,proto3,oneof"` } +type GenericBeaconBlock_Gloas struct { + Gloas *BeaconBlockGloas `protobuf:"bytes,13,opt,name=gloas,proto3,oneof"` +} + func (*GenericBeaconBlock_Phase0) isGenericBeaconBlock_Block() {} func (*GenericBeaconBlock_Altair) isGenericBeaconBlock_Block() {} @@ -534,6 +564,8 @@ func (*GenericBeaconBlock_Fulu) isGenericBeaconBlock_Block() {} func (*GenericBeaconBlock_BlindedFulu) isGenericBeaconBlock_Block() {} +func (*GenericBeaconBlock_Gloas) isGenericBeaconBlock_Block() {} + type SignedBeaconBlock struct { state protoimpl.MessageState `protogen:"open.v1"` Block *BeaconBlock `protobuf:"bytes,1,opt,name=block,proto3" json:"block,omitempty"` @@ -4479,615 +4511,537 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x72, 0x65, 0x5f, - 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x26, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x2f, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x65, 0x78, 0x65, - 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x6e, 0x67, 0x69, 0x6e, - 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x22, 0xd0, 0x08, 0x0a, 0x18, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x53, 0x69, - 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, - 0x42, 0x0a, 0x06, 0x70, 0x68, 0x61, 0x73, 0x65, 0x30, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, - 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x00, 0x52, 0x06, 0x70, 0x68, 0x61, - 0x73, 0x65, 0x30, 0x12, 0x48, 0x0a, 0x06, 0x61, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, - 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x41, 0x6c, 0x74, - 0x61, 0x69, 0x72, 0x48, 0x00, 0x52, 0x06, 0x61, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x12, 0x51, 0x0a, - 0x09, 0x62, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, - 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, - 0x72, 0x69, 0x78, 0x48, 0x00, 0x52, 0x09, 0x62, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, - 0x12, 0x67, 0x0a, 0x11, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x62, 0x65, 0x6c, 0x6c, - 0x61, 0x74, 0x72, 0x69, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x20, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2f, 0x67, 0x6c, 0x6f, 0x61, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x26, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x65, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x6e, 0x67, + 0x69, 0x6e, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x97, 0x09, 0x0a, 0x18, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, + 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, + 0x6b, 0x12, 0x42, 0x0a, 0x06, 0x70, 0x68, 0x61, 0x73, 0x65, 0x30, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, + 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x00, 0x52, 0x06, 0x70, + 0x68, 0x61, 0x73, 0x65, 0x30, 0x12, 0x48, 0x0a, 0x06, 0x61, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, + 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x41, + 0x6c, 0x74, 0x61, 0x69, 0x72, 0x48, 0x00, 0x52, 0x06, 0x61, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x12, + 0x51, 0x0a, 0x09, 0x62, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, + 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, 0x6c, - 0x61, 0x74, 0x72, 0x69, 0x78, 0x48, 0x00, 0x52, 0x10, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, - 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x4b, 0x0a, 0x07, 0x63, 0x61, 0x70, - 0x65, 0x6c, 0x6c, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, + 0x61, 0x74, 0x72, 0x69, 0x78, 0x48, 0x00, 0x52, 0x09, 0x62, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, + 0x69, 0x78, 0x12, 0x67, 0x0a, 0x11, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x62, 0x65, + 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x38, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, 0x69, 0x6e, + 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, + 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x48, 0x00, 0x52, 0x10, 0x62, 0x6c, 0x69, 0x6e, 0x64, + 0x65, 0x64, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x4b, 0x0a, 0x07, 0x63, + 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, + 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, + 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x48, 0x00, 0x52, + 0x07, 0x63, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x61, 0x0a, 0x0f, 0x62, 0x6c, 0x69, 0x6e, + 0x64, 0x65, 0x64, 0x5f, 0x63, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x36, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, + 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x48, 0x00, 0x52, 0x0e, 0x62, 0x6c, 0x69, + 0x6e, 0x64, 0x65, 0x64, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x4d, 0x0a, 0x05, 0x64, + 0x65, 0x6e, 0x65, 0x62, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x48, 0x00, 0x52, 0x07, 0x63, - 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x61, 0x0a, 0x0f, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, - 0x64, 0x5f, 0x63, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x36, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, - 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x48, 0x00, 0x52, 0x0e, 0x62, 0x6c, 0x69, 0x6e, 0x64, - 0x65, 0x64, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x4d, 0x0a, 0x05, 0x64, 0x65, 0x6e, - 0x65, 0x62, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x44, 0x65, 0x6e, 0x65, + 0x62, 0x48, 0x00, 0x52, 0x05, 0x64, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x5b, 0x0a, 0x0d, 0x62, 0x6c, + 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x64, 0x65, 0x6e, 0x65, 0x62, 0x18, 0x08, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x34, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, + 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x48, 0x00, 0x52, 0x0c, 0x62, 0x6c, 0x69, 0x6e, 0x64, + 0x65, 0x64, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x53, 0x0a, 0x07, 0x65, 0x6c, 0x65, 0x63, 0x74, + 0x72, 0x61, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x48, - 0x00, 0x52, 0x05, 0x64, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x5b, 0x0a, 0x0d, 0x62, 0x6c, 0x69, 0x6e, - 0x64, 0x65, 0x64, 0x5f, 0x64, 0x65, 0x6e, 0x65, 0x62, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x34, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, - 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x44, 0x65, 0x6e, 0x65, 0x62, 0x48, 0x00, 0x52, 0x0c, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, - 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x53, 0x0a, 0x07, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, - 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, - 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x48, - 0x00, 0x52, 0x07, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x61, 0x0a, 0x0f, 0x62, 0x6c, - 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x18, 0x0a, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, - 0x65, 0x64, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x48, 0x00, 0x52, 0x0e, 0x62, - 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x4a, 0x0a, - 0x04, 0x66, 0x75, 0x6c, 0x75, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x46, 0x75, 0x6c, - 0x75, 0x48, 0x00, 0x52, 0x04, 0x66, 0x75, 0x6c, 0x75, 0x12, 0x58, 0x0a, 0x0c, 0x62, 0x6c, 0x69, - 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x66, 0x75, 0x6c, 0x75, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x33, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, - 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x46, 0x75, 0x6c, 0x75, 0x48, 0x00, 0x52, 0x0b, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x46, - 0x75, 0x6c, 0x75, 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x73, 0x5f, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, - 0x64, 0x18, 0x64, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x69, 0x73, 0x42, 0x6c, 0x69, 0x6e, 0x64, - 0x65, 0x64, 0x3a, 0x02, 0x18, 0x01, 0x42, 0x07, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x4a, - 0x04, 0x08, 0x65, 0x10, 0x66, 0x22, 0xa1, 0x08, 0x0a, 0x12, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, - 0x63, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x3c, 0x0a, 0x06, - 0x70, 0x68, 0x61, 0x73, 0x65, 0x30, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x48, 0x00, 0x52, 0x06, 0x70, 0x68, 0x61, 0x73, 0x65, 0x30, 0x12, 0x42, 0x0a, 0x06, 0x61, 0x6c, - 0x74, 0x61, 0x69, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x41, 0x6c, - 0x74, 0x61, 0x69, 0x72, 0x48, 0x00, 0x52, 0x06, 0x61, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x12, 0x4b, - 0x0a, 0x09, 0x62, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x48, 0x00, - 0x52, 0x09, 0x62, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x61, 0x0a, 0x11, 0x62, - 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x62, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, - 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, - 0x6b, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x48, 0x00, 0x52, 0x10, 0x62, 0x6c, - 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x45, - 0x0a, 0x07, 0x63, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, - 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x48, 0x00, 0x52, 0x07, 0x63, 0x61, - 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x5b, 0x0a, 0x0f, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, - 0x5f, 0x63, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, - 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, - 0x48, 0x00, 0x52, 0x0e, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x43, 0x61, 0x70, 0x65, 0x6c, - 0x6c, 0x61, 0x12, 0x47, 0x0a, 0x05, 0x64, 0x65, 0x6e, 0x65, 0x62, 0x18, 0x07, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x44, 0x65, 0x6e, - 0x65, 0x62, 0x48, 0x00, 0x52, 0x05, 0x64, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x55, 0x0a, 0x0d, 0x62, - 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x64, 0x65, 0x6e, 0x65, 0x62, 0x18, 0x08, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, - 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, - 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, - 0x65, 0x62, 0x48, 0x00, 0x52, 0x0c, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x44, 0x65, 0x6e, - 0x65, 0x62, 0x12, 0x4d, 0x0a, 0x07, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x18, 0x09, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, - 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x45, - 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x48, 0x00, 0x52, 0x07, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, - 0x61, 0x12, 0x5b, 0x0a, 0x0f, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x65, 0x6c, 0x65, - 0x63, 0x74, 0x72, 0x61, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x48, 0x00, 0x52, 0x0e, - 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x44, - 0x0a, 0x04, 0x66, 0x75, 0x6c, 0x75, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x46, 0x75, 0x6c, 0x75, 0x48, 0x00, 0x52, 0x04, - 0x66, 0x75, 0x6c, 0x75, 0x12, 0x52, 0x0a, 0x0c, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, - 0x66, 0x75, 0x6c, 0x75, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x46, 0x75, 0x6c, 0x75, 0x48, 0x00, 0x52, 0x0b, 0x62, 0x6c, 0x69, - 0x6e, 0x64, 0x65, 0x64, 0x46, 0x75, 0x6c, 0x75, 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x73, 0x5f, 0x62, - 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x18, 0x64, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x69, 0x73, - 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x61, 0x79, 0x6c, 0x6f, - 0x61, 0x64, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x65, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, - 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x18, 0x01, - 0x42, 0x07, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x22, 0x73, 0x0a, 0x11, 0x53, 0x69, 0x67, - 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x38, - 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, + 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, + 0x61, 0x48, 0x00, 0x52, 0x07, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x61, 0x0a, 0x0f, + 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x18, + 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, + 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, + 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x48, 0x00, 0x52, + 0x0e, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, + 0x4a, 0x0a, 0x04, 0x66, 0x75, 0x6c, 0x75, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, - 0x6b, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, - 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xea, - 0x02, 0x0a, 0x0b, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x58, - 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, - 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, - 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, - 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, - 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, - 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, - 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, - 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, - 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, - 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, - 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, - 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, - 0x3a, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, - 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, - 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0xd1, 0x04, 0x0a, 0x0f, - 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x12, - 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, - 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, - 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, - 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, - 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, - 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, - 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, - 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, - 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, - 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, - 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, - 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, - 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, - 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, - 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, - 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, - 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, - 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x22, - 0xb2, 0x01, 0x0a, 0x10, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, - 0x68, 0x69, 0x6e, 0x67, 0x12, 0x4e, 0x0a, 0x0d, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x31, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x65, 0x64, 0x41, 0x74, 0x74, 0x65, 0x73, - 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x31, 0x12, 0x4e, 0x0a, 0x0d, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x32, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x65, 0x64, 0x41, 0x74, 0x74, 0x65, 0x73, - 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x32, 0x22, 0xad, 0x01, 0x0a, 0x12, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x65, 0x64, - 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x35, 0x0a, 0x11, 0x61, - 0x74, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, - 0x52, 0x10, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x49, 0x6e, 0x64, 0x69, 0x63, - 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x26, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, - 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x22, 0x76, 0x0a, 0x1e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x61, - 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x56, 0x31, 0x12, 0x50, 0x0a, 0x08, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, - 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, - 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, - 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x31, 0x52, 0x08, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x3a, 0x02, 0x18, 0x01, 0x22, 0x8f, 0x01, 0x0a, - 0x1d, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, - 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x31, 0x12, 0x48, - 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x2e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, - 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x31, 0x52, - 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, - 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xa1, - 0x01, 0x0a, 0x17, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x67, 0x69, - 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x31, 0x12, 0x2b, 0x0a, 0x0d, 0x66, 0x65, - 0x65, 0x5f, 0x72, 0x65, 0x63, 0x69, 0x70, 0x69, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x32, 0x30, 0x52, 0x0c, 0x66, 0x65, 0x65, 0x52, 0x65, - 0x63, 0x69, 0x70, 0x69, 0x65, 0x6e, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x67, 0x61, 0x73, 0x5f, 0x6c, - 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x67, 0x61, 0x73, 0x4c, - 0x69, 0x6d, 0x69, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, - 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, - 0x6d, 0x70, 0x12, 0x1e, 0x0a, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, 0x52, 0x06, 0x70, 0x75, 0x62, 0x6b, - 0x65, 0x79, 0x22, 0x75, 0x0a, 0x10, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x75, 0x69, 0x6c, - 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x12, 0x3b, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, - 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x8e, 0x01, 0x0a, 0x0a, 0x42, 0x75, - 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x12, 0x42, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, - 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, - 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, - 0x61, 0x64, 0x65, 0x72, 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x1c, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, - 0x02, 0x33, 0x32, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1e, 0x0a, 0x06, 0x70, 0x75, - 0x62, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, - 0x34, 0x38, 0x52, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x22, 0x7f, 0x0a, 0x17, 0x53, 0x69, - 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x41, - 0x6c, 0x74, 0x61, 0x69, 0x72, 0x12, 0x3e, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, - 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x41, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x52, 0x05, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, - 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xf6, 0x02, 0x0a, 0x11, - 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x41, 0x6c, 0x74, 0x61, 0x69, - 0x72, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, - 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, - 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, - 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, - 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, - 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, - 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, - 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, - 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, - 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, - 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, - 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, - 0x6f, 0x74, 0x12, 0x40, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x2c, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x41, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x52, 0x04, - 0x62, 0x6f, 0x64, 0x79, 0x22, 0xa4, 0x05, 0x0a, 0x15, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x41, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x12, 0x2b, - 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, - 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, - 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, - 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, - 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, - 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, - 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, - 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, - 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, - 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, - 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, 0x0a, - 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, - 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, - 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, - 0x6e, 0x67, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, - 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, 0x0c, - 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, - 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, - 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, - 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, - 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, - 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, - 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, - 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, - 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, - 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, - 0x0a, 0x0e, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, - 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, - 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, - 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x22, 0x85, 0x01, 0x0a, 0x1a, - 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, - 0x6b, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x41, 0x0a, 0x05, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x74, 0x68, 0x65, - 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, - 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, - 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, - 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x22, 0xfc, 0x02, 0x0a, 0x14, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, - 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x58, 0x0a, 0x04, - 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, - 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, - 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, - 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, - 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, - 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, - 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, - 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, - 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, - 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, - 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, - 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, - 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, - 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x43, 0x0a, - 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, - 0x6f, 0x64, 0x79, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x52, 0x04, 0x62, 0x6f, - 0x64, 0x79, 0x22, 0xfa, 0x05, 0x0a, 0x18, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, - 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, - 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, - 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, - 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, - 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, - 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, - 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, - 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, - 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, - 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, - 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, - 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, - 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, - 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, - 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, - 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, - 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, - 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, - 0x4b, 0x0a, 0x0e, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, - 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, - 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x51, 0x0a, 0x11, - 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, - 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, - 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x10, 0x65, - 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x22, - 0x93, 0x01, 0x0a, 0x21, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, - 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, 0x6c, - 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x48, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, - 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, - 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, - 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x8a, 0x03, 0x0a, 0x1b, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, - 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, 0x6c, - 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, - 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, - 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, - 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, - 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, - 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, - 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, - 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, - 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, - 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, - 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, - 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x4a, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, - 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, - 0x6f, 0x64, 0x79, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x52, 0x04, 0x62, 0x6f, - 0x64, 0x79, 0x22, 0x94, 0x06, 0x0a, 0x1f, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, - 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x42, 0x65, 0x6c, - 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, - 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, - 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, - 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, - 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, - 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, - 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, - 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, - 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, - 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, - 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, - 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, - 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, - 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x07, - 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, - 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, - 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, - 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, - 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, - 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, - 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, - 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, - 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, - 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, - 0x61, 0x74, 0x65, 0x12, 0x64, 0x0a, 0x18, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, - 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, - 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x52, 0x16, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, - 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x22, 0x81, 0x01, 0x0a, 0x18, 0x53, 0x69, - 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, - 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x3f, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, + 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x46, + 0x75, 0x6c, 0x75, 0x48, 0x00, 0x52, 0x04, 0x66, 0x75, 0x6c, 0x75, 0x12, 0x58, 0x0a, 0x0c, 0x62, + 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x66, 0x75, 0x6c, 0x75, 0x18, 0x0c, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x33, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, + 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x46, 0x75, 0x6c, 0x75, 0x48, 0x00, 0x52, 0x0b, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, + 0x64, 0x46, 0x75, 0x6c, 0x75, 0x12, 0x45, 0x0a, 0x05, 0x67, 0x6c, 0x6f, 0x61, 0x73, 0x18, 0x0d, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, + 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x47, 0x6c, + 0x6f, 0x61, 0x73, 0x48, 0x00, 0x52, 0x05, 0x67, 0x6c, 0x6f, 0x61, 0x73, 0x12, 0x1d, 0x0a, 0x0a, + 0x69, 0x73, 0x5f, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x18, 0x64, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x09, 0x69, 0x73, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x3a, 0x02, 0x18, 0x01, 0x42, + 0x07, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x4a, 0x04, 0x08, 0x65, 0x10, 0x66, 0x22, 0xe2, + 0x08, 0x0a, 0x12, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, + 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x3c, 0x0a, 0x06, 0x70, 0x68, 0x61, 0x73, 0x65, 0x30, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, - 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, - 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, - 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xf8, 0x02, - 0x0a, 0x12, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, - 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, - 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, - 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, - 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, + 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x00, 0x52, 0x06, 0x70, 0x68, 0x61, + 0x73, 0x65, 0x30, 0x12, 0x42, 0x0a, 0x06, 0x61, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, + 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x41, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x48, 0x00, 0x52, + 0x06, 0x61, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x12, 0x4b, 0x0a, 0x09, 0x62, 0x65, 0x6c, 0x6c, 0x61, + 0x74, 0x72, 0x69, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, + 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x48, 0x00, 0x52, 0x09, 0x62, 0x65, 0x6c, 0x6c, 0x61, + 0x74, 0x72, 0x69, 0x78, 0x12, 0x61, 0x0a, 0x11, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, + 0x62, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x32, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, + 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, + 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, + 0x72, 0x69, 0x78, 0x48, 0x00, 0x52, 0x10, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, + 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x45, 0x0a, 0x07, 0x63, 0x61, 0x70, 0x65, 0x6c, + 0x6c, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, + 0x6c, 0x6c, 0x61, 0x48, 0x00, 0x52, 0x07, 0x63, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x5b, + 0x0a, 0x0f, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x63, 0x61, 0x70, 0x65, 0x6c, 0x6c, + 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, + 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x48, 0x00, 0x52, 0x0e, 0x62, 0x6c, 0x69, + 0x6e, 0x64, 0x65, 0x64, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x47, 0x0a, 0x05, 0x64, + 0x65, 0x6e, 0x65, 0x62, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x48, 0x00, 0x52, 0x05, 0x64, + 0x65, 0x6e, 0x65, 0x62, 0x12, 0x55, 0x0a, 0x0d, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, + 0x64, 0x65, 0x6e, 0x65, 0x62, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, + 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x48, 0x00, 0x52, 0x0c, 0x62, + 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x4d, 0x0a, 0x07, 0x65, + 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, + 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, + 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x48, + 0x00, 0x52, 0x07, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x5b, 0x0a, 0x0f, 0x62, 0x6c, + 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x18, 0x0a, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, + 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, + 0x65, 0x63, 0x74, 0x72, 0x61, 0x48, 0x00, 0x52, 0x0e, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, + 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x44, 0x0a, 0x04, 0x66, 0x75, 0x6c, 0x75, 0x18, + 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, + 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x73, 0x46, 0x75, 0x6c, 0x75, 0x48, 0x00, 0x52, 0x04, 0x66, 0x75, 0x6c, 0x75, 0x12, 0x52, 0x0a, + 0x0c, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x66, 0x75, 0x6c, 0x75, 0x18, 0x0c, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, + 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x46, 0x75, + 0x6c, 0x75, 0x48, 0x00, 0x52, 0x0b, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x46, 0x75, 0x6c, + 0x75, 0x12, 0x3f, 0x0a, 0x05, 0x67, 0x6c, 0x6f, 0x61, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x47, 0x6c, 0x6f, 0x61, 0x73, 0x48, 0x00, 0x52, 0x05, 0x67, 0x6c, 0x6f, + 0x61, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x73, 0x5f, 0x62, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, + 0x18, 0x64, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x69, 0x73, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, + 0x64, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x65, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, + 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x18, 0x01, 0x42, 0x07, 0x0a, 0x05, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x22, 0x73, 0x0a, 0x11, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, + 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x38, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, + 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x05, 0x62, 0x6c, 0x6f, + 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, + 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xea, 0x02, 0x0a, 0x0b, 0x42, 0x65, 0x61, + 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, - 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, - 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, - 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, - 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, - 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, - 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x41, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, - 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x43, 0x61, 0x70, 0x65, 0x6c, - 0x6c, 0x61, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0xf3, 0x06, 0x0a, 0x16, 0x42, 0x65, 0x61, - 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x43, 0x61, 0x70, 0x65, - 0x6c, 0x6c, 0x61, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, - 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, - 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, - 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, - 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, - 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, - 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, - 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, - 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, - 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, - 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, - 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, - 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, - 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x32, 0x52, 0x11, - 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, - 0x73, 0x12, 0x4f, 0x0a, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, + 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, + 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, + 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, + 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, + 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, + 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, + 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x3a, 0x0a, 0x04, 0x62, 0x6f, 0x64, + 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x92, 0xb5, 0x18, - 0x03, 0x31, 0x32, 0x38, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, - 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, - 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, - 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, - 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, - 0x69, 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, - 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, - 0x65, 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, - 0x12, 0x58, 0x0a, 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, - 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, - 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, - 0x64, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x52, 0x10, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x72, 0x0a, 0x18, 0x62, 0x6c, - 0x73, 0x5f, 0x74, 0x6f, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, + 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x52, + 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0xd1, 0x04, 0x0a, 0x0f, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, + 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, + 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, + 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, + 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, + 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, + 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, + 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, + 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, + 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, + 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, + 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, + 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, + 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x05, 0x92, + 0xb5, 0x18, 0x01, 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, + 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, + 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, + 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, + 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, + 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, + 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, + 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, + 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, + 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x22, 0xb2, 0x01, 0x0a, 0x10, 0x41, 0x74, + 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x12, 0x4e, + 0x0a, 0x0d, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x31, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x49, 0x6e, + 0x64, 0x65, 0x78, 0x65, 0x64, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x31, 0x12, 0x4e, + 0x0a, 0x0d, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x32, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x49, 0x6e, + 0x64, 0x65, 0x78, 0x65, 0x64, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x32, 0x22, 0xad, + 0x01, 0x0a, 0x12, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x65, 0x64, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x35, 0x0a, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x69, + 0x6e, 0x67, 0x5f, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x04, + 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x10, 0x61, 0x74, 0x74, 0x65, + 0x73, 0x74, 0x69, 0x6e, 0x67, 0x49, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x04, + 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, + 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, + 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, + 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x76, + 0x0a, 0x1e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, + 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x56, 0x31, + 0x12, 0x50, 0x0a, 0x08, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, + 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, + 0x64, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x31, 0x52, 0x08, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x73, 0x3a, 0x02, 0x18, 0x01, 0x22, 0x8f, 0x01, 0x0a, 0x1d, 0x53, 0x69, 0x67, 0x6e, 0x65, + 0x64, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x31, 0x12, 0x48, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, + 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x31, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, + 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xa1, 0x01, 0x0a, 0x17, 0x56, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x56, 0x31, 0x12, 0x2b, 0x0a, 0x0d, 0x66, 0x65, 0x65, 0x5f, 0x72, 0x65, 0x63, 0x69, + 0x70, 0x69, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, + 0x02, 0x32, 0x30, 0x52, 0x0c, 0x66, 0x65, 0x65, 0x52, 0x65, 0x63, 0x69, 0x70, 0x69, 0x65, 0x6e, + 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x67, 0x61, 0x73, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x67, 0x61, 0x73, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x1c, + 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x04, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x1e, 0x0a, 0x06, + 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, + 0x18, 0x02, 0x34, 0x38, 0x52, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x22, 0x75, 0x0a, 0x10, + 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, + 0x12, 0x3b, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, + 0x72, 0x42, 0x69, 0x64, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, + 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, + 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x22, 0x8e, 0x01, 0x0a, 0x0a, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, + 0x69, 0x64, 0x12, 0x42, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, + 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, + 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x06, + 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x1c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1e, 0x0a, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, 0x52, 0x06, 0x70, 0x75, + 0x62, 0x6b, 0x65, 0x79, 0x22, 0x7f, 0x0a, 0x17, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, + 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x41, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x12, + 0x3e, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, + 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x41, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, + 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, + 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xf6, 0x02, 0x0a, 0x11, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, + 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x41, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x12, 0x58, 0x0a, 0x04, 0x73, + 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, + 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, + 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, + 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, + 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, + 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, + 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, + 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, + 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, + 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, + 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x40, 0x0a, 0x04, + 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, + 0x64, 0x79, 0x41, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0xa4, + 0x05, 0x0a, 0x15, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, + 0x64, 0x79, 0x41, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, + 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, + 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, + 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, + 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, + 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, + 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, + 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, + 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, + 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, + 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, + 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x05, 0x92, 0xb5, + 0x18, 0x01, 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, + 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x4c, 0x53, 0x54, 0x6f, - 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x42, - 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x15, 0x62, 0x6c, 0x73, 0x54, 0x6f, 0x45, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x22, 0x8f, - 0x01, 0x0a, 0x1f, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, - 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, - 0x6c, 0x61, 0x12, 0x46, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x30, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, - 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, - 0x6c, 0x6c, 0x61, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, - 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, - 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x22, 0x86, 0x03, 0x0a, 0x19, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, + 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, + 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, + 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, + 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, + 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, + 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, 0x73, 0x79, 0x6e, 0x63, + 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, + 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, + 0x65, 0x67, 0x61, 0x74, 0x65, 0x22, 0x85, 0x01, 0x0a, 0x1a, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, + 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, 0x6c, 0x61, + 0x74, 0x72, 0x69, 0x78, 0x12, 0x41, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, + 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, + 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, + 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xfc, 0x02, + 0x0a, 0x14, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, + 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, + 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, + 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, + 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, + 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, + 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, + 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, + 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, + 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, + 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x43, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, + 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x42, 0x65, 0x6c, + 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0xfa, 0x05, 0x0a, + 0x18, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, + 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, + 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, + 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, + 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, + 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, + 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, + 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, + 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, + 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, + 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, + 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, + 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, + 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x05, 0x92, + 0xb5, 0x18, 0x01, 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, + 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, + 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, + 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, + 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, + 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, + 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, + 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, + 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, + 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, 0x73, 0x79, 0x6e, + 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, + 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, + 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x51, 0x0a, 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, + 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x10, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, + 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x93, 0x01, 0x0a, 0x21, 0x53, 0x69, + 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, + 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, + 0x48, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, + 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, + 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, + 0x69, 0x78, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, + 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, + 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, + 0x8a, 0x03, 0x0a, 0x1b, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, + 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, + 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, + 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, + 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, + 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, + 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, + 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, + 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, + 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, + 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, + 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, + 0x12, 0x4a, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x36, + 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, + 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x42, 0x65, 0x6c, + 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0x94, 0x06, 0x0a, + 0x1f, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, + 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, + 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, + 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, + 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, + 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, + 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, + 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, + 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, + 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, + 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, + 0x5d, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, + 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, + 0x0a, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, + 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, + 0x38, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, + 0x42, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, + 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, + 0x5f, 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, + 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, + 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, + 0x52, 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, + 0x12, 0x4b, 0x0a, 0x0e, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, + 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, + 0x73, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x64, 0x0a, + 0x18, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, + 0x61, 0x64, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, + 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x16, 0x65, 0x78, 0x65, + 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x22, 0x81, 0x01, 0x0a, 0x18, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, + 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, + 0x12, 0x3f, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, + 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, + 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xf8, 0x02, 0x0a, 0x12, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, @@ -5107,637 +5061,77 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, - 0x48, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x34, 0x2e, + 0x41, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, - 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x43, 0x61, 0x70, 0x65, - 0x6c, 0x6c, 0x61, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0x8d, 0x07, 0x0a, 0x1d, 0x42, 0x6c, - 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x42, 0x6f, 0x64, 0x79, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x2b, 0x0a, 0x0d, 0x72, - 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, - 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, - 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, - 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, - 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, - 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, - 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, - 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, - 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, - 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, - 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x12, 0x61, 0x74, - 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, - 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, - 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, - 0x05, 0x92, 0xb5, 0x18, 0x01, 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, - 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, 0x0c, 0x61, 0x74, 0x74, - 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, 0x0c, 0x61, 0x74, - 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, 0x65, - 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, - 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x5b, - 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, 0x74, - 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, - 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, 0x6c, - 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, 0x73, - 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, - 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, 0x41, - 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x6b, 0x0a, 0x18, 0x65, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x68, 0x65, - 0x61, 0x64, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, - 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, - 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x52, 0x16, 0x65, - 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x72, 0x0a, 0x18, 0x62, 0x6c, 0x73, 0x5f, 0x74, 0x6f, 0x5f, - 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, - 0x73, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x4c, 0x53, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, - 0x31, 0x36, 0x52, 0x15, 0x62, 0x6c, 0x73, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, - 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x22, 0x83, 0x01, 0x0a, 0x17, 0x53, 0x69, - 0x67, 0x6e, 0x65, 0x64, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x43, 0x61, - 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x42, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, - 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, - 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, - 0x9c, 0x01, 0x0a, 0x11, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x43, 0x61, - 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x49, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, - 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, - 0x12, 0x1c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, - 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1e, - 0x0a, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, - 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, 0x52, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x22, 0xc2, - 0x01, 0x0a, 0x1e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x44, 0x65, 0x6e, 0x65, - 0x62, 0x12, 0x43, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, - 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, - 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x2f, 0x0a, 0x0a, 0x6b, 0x7a, 0x67, 0x5f, 0x70, 0x72, - 0x6f, 0x6f, 0x66, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, - 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x09, 0x6b, 0x7a, - 0x67, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x12, 0x2a, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, - 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x08, 0x3f, 0x2c, 0x31, 0x33, - 0x31, 0x30, 0x37, 0x32, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x05, 0x62, 0x6c, - 0x6f, 0x62, 0x73, 0x22, 0x7d, 0x0a, 0x16, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, - 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x3d, 0x0a, - 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, - 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, - 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x22, 0xb6, 0x01, 0x0a, 0x18, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, - 0x3d, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, + 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x52, 0x04, 0x62, 0x6f, + 0x64, 0x79, 0x22, 0xf3, 0x06, 0x0a, 0x16, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x2b, 0x0a, + 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, + 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, + 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, + 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, + 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, + 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, + 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, + 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, + 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, + 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x12, + 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, + 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, + 0x67, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, + 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, 0x0c, 0x61, + 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, 0x0c, + 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, + 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x2f, - 0x0a, 0x0a, 0x6b, 0x7a, 0x67, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x18, 0x02, 0x20, 0x03, - 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, - 0x34, 0x30, 0x39, 0x36, 0x52, 0x09, 0x6b, 0x7a, 0x67, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x12, - 0x2a, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, - 0x8a, 0xb5, 0x18, 0x08, 0x3f, 0x2c, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x92, 0xb5, 0x18, 0x04, - 0x34, 0x30, 0x39, 0x36, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x22, 0xf4, 0x02, 0x0a, 0x10, - 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, - 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, - 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, - 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, - 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, - 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, - 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, - 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, - 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, - 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, - 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, - 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, - 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, - 0x74, 0x12, 0x3f, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x2b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, - 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x04, 0x62, 0x6f, - 0x64, 0x79, 0x22, 0xb3, 0x07, 0x0a, 0x14, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x2b, 0x0a, 0x0d, 0x72, - 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, - 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, - 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, - 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, - 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, - 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, - 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, - 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, - 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, - 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, - 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x12, 0x61, 0x74, - 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, - 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, - 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, - 0x05, 0x92, 0xb5, 0x18, 0x01, 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, - 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, 0x0c, 0x61, 0x74, 0x74, - 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, 0x0c, 0x61, 0x74, - 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, 0x65, - 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, - 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x5b, - 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, 0x74, - 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, - 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, 0x6c, - 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, 0x73, - 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, - 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, 0x41, - 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x56, 0x0a, 0x11, 0x65, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x0a, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, - 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x10, - 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, - 0x12, 0x72, 0x0a, 0x18, 0x62, 0x6c, 0x73, 0x5f, 0x74, 0x6f, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, - 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, - 0x64, 0x42, 0x4c, 0x53, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x15, 0x62, - 0x6c, 0x73, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, - 0x6e, 0x67, 0x65, 0x73, 0x12, 0x42, 0x0a, 0x14, 0x62, 0x6c, 0x6f, 0x62, 0x5f, 0x6b, 0x7a, 0x67, - 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0c, 0x20, 0x03, - 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, - 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, 0x62, 0x6c, 0x6f, 0x62, 0x4b, 0x7a, 0x67, 0x43, 0x6f, 0x6d, - 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x8f, 0x01, 0x0a, 0x1d, 0x53, 0x69, 0x67, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, + 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, + 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, + 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, + 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, + 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, 0x0a, + 0x0e, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, + 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, + 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, 0x6e, + 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x58, 0x0a, 0x11, 0x65, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, + 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, + 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x43, 0x61, 0x70, 0x65, 0x6c, + 0x6c, 0x61, 0x52, 0x10, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, + 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x72, 0x0a, 0x18, 0x62, 0x6c, 0x73, 0x5f, 0x74, 0x6f, 0x5f, 0x65, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, + 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, + 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x4c, 0x53, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, + 0x36, 0x52, 0x15, 0x62, 0x6c, 0x73, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, + 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x22, 0x8f, 0x01, 0x0a, 0x1f, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x48, 0x0a, 0x07, 0x6d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, + 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x46, 0x0a, 0x05, + 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, - 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x07, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, + 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x52, 0x05, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, - 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x82, 0x03, 0x0a, 0x17, 0x42, + 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x86, 0x03, 0x0a, 0x19, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, - 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, - 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, - 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, - 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, - 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, - 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, - 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, - 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, - 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, - 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x46, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, - 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, - 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, - 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x42, 0x6f, 0x64, 0x79, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, - 0xcd, 0x07, 0x0a, 0x1b, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, - 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, - 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, - 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, - 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, - 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, - 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, - 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, - 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, - 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, - 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, - 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, - 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, - 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, - 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, - 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, - 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, - 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, - 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, - 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, - 0x4b, 0x0a, 0x0e, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, - 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, - 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x69, 0x0a, 0x18, - 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, - 0x64, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, - 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, - 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, - 0x16, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, - 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x72, 0x0a, 0x18, 0x62, 0x6c, 0x73, 0x5f, 0x74, - 0x6f, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x68, 0x61, 0x6e, - 0x67, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, - 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x4c, 0x53, 0x54, 0x6f, 0x45, 0x78, 0x65, - 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x42, 0x06, 0x92, 0xb5, - 0x18, 0x02, 0x31, 0x36, 0x52, 0x15, 0x62, 0x6c, 0x73, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x42, 0x0a, 0x14, 0x62, - 0x6c, 0x6f, 0x62, 0x5f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, - 0x6e, 0x74, 0x73, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, - 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, 0x62, 0x6c, 0x6f, - 0x62, 0x4b, 0x7a, 0x67, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, - 0x7f, 0x0a, 0x15, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, - 0x42, 0x69, 0x64, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x40, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x65, 0x74, 0x68, 0x65, - 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x44, 0x65, 0x6e, 0x65, - 0x62, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, - 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, - 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x22, 0xdc, 0x01, 0x0a, 0x0f, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x44, - 0x65, 0x6e, 0x65, 0x62, 0x12, 0x47, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, - 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x42, 0x0a, - 0x14, 0x62, 0x6c, 0x6f, 0x62, 0x5f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, - 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, 0x62, - 0x6c, 0x6f, 0x62, 0x4b, 0x7a, 0x67, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, - 0x73, 0x12, 0x1c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, - 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, - 0x1e, 0x0a, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, - 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, 0x52, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x22, - 0xb4, 0x02, 0x0a, 0x11, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x45, 0x6c, - 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x47, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, - 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x42, - 0x0a, 0x14, 0x62, 0x6c, 0x6f, 0x62, 0x5f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, - 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, - 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, - 0x62, 0x6c, 0x6f, 0x62, 0x4b, 0x7a, 0x67, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, - 0x74, 0x73, 0x12, 0x54, 0x0a, 0x12, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, - 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x73, 0x52, 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x12, 0x1c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1e, 0x0a, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, 0x52, 0x06, - 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x22, 0x83, 0x01, 0x0a, 0x17, 0x53, 0x69, 0x67, 0x6e, 0x65, - 0x64, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x45, 0x6c, 0x65, 0x63, 0x74, - 0x72, 0x61, 0x12, 0x42, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x75, 0x69, 0x6c, - 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x07, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, - 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x55, 0x0a, 0x0c, - 0x42, 0x6c, 0x6f, 0x62, 0x53, 0x69, 0x64, 0x65, 0x63, 0x61, 0x72, 0x73, 0x12, 0x45, 0x0a, 0x08, - 0x73, 0x69, 0x64, 0x65, 0x63, 0x61, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x6f, 0x62, 0x53, 0x69, 0x64, 0x65, 0x63, - 0x61, 0x72, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x36, 0x52, 0x08, 0x73, 0x69, 0x64, 0x65, 0x63, - 0x61, 0x72, 0x73, 0x22, 0xc0, 0x02, 0x0a, 0x0b, 0x42, 0x6c, 0x6f, 0x62, 0x53, 0x69, 0x64, 0x65, - 0x63, 0x61, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x04, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x1e, 0x0a, 0x04, 0x62, 0x6c, 0x6f, - 0x62, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x0a, 0x8a, 0xb5, 0x18, 0x06, 0x31, 0x33, 0x31, - 0x30, 0x37, 0x32, 0x52, 0x04, 0x62, 0x6c, 0x6f, 0x62, 0x12, 0x2d, 0x0a, 0x0e, 0x6b, 0x7a, 0x67, - 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, 0x52, 0x0d, 0x6b, 0x7a, 0x67, 0x43, 0x6f, - 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x23, 0x0a, 0x09, 0x6b, 0x7a, 0x67, 0x5f, - 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, - 0x02, 0x34, 0x38, 0x52, 0x08, 0x6b, 0x7a, 0x67, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x5e, 0x0a, - 0x13, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, - 0x61, 0x64, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x11, 0x73, 0x69, 0x67, 0x6e, - 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x47, 0x0a, - 0x1a, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x6e, 0x63, 0x6c, - 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x06, 0x20, 0x03, 0x28, - 0x0c, 0x42, 0x09, 0x8a, 0xb5, 0x18, 0x05, 0x31, 0x37, 0x2c, 0x33, 0x32, 0x52, 0x18, 0x63, 0x6f, - 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, - 0x6e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0xc6, 0x01, 0x0a, 0x20, 0x53, 0x69, 0x67, 0x6e, 0x65, - 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x45, 0x0a, 0x05, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x05, 0x62, 0x6c, 0x6f, - 0x63, 0x6b, 0x12, 0x2f, 0x0a, 0x0a, 0x6b, 0x7a, 0x67, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x73, - 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, - 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x09, 0x6b, 0x7a, 0x67, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x73, 0x12, 0x2a, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x08, 0x3f, 0x2c, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, - 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x22, - 0x81, 0x01, 0x0a, 0x18, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x3f, 0x0a, 0x05, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, - 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, - 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, - 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x22, 0xba, 0x01, 0x0a, 0x1a, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, - 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6c, 0x65, 0x63, 0x74, - 0x72, 0x61, 0x12, 0x3f, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x05, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x12, 0x2f, 0x0a, 0x0a, 0x6b, 0x7a, 0x67, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, - 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, - 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x09, 0x6b, 0x7a, 0x67, 0x50, 0x72, - 0x6f, 0x6f, 0x66, 0x73, 0x12, 0x2a, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x18, 0x03, 0x20, - 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x08, 0x3f, 0x2c, 0x31, 0x33, 0x31, 0x30, 0x37, - 0x32, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, - 0x22, 0xf8, 0x02, 0x0a, 0x12, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, - 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, - 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, - 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, - 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, - 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, - 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, - 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, - 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, - 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, - 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, - 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x41, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, - 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x45, 0x6c, - 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0x97, 0x08, 0x0a, 0x16, - 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x45, - 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, - 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, - 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, - 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, - 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, - 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, - 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, - 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, - 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, - 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, - 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x64, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, - 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, - 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, - 0x61, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, - 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x0c, 0x61, - 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x42, 0x05, 0x92, 0xb5, - 0x18, 0x01, 0x38, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, - 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, - 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, - 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, - 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, - 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, - 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, - 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, - 0x56, 0x0a, 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, - 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, - 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, - 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x10, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, - 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x72, 0x0a, 0x18, 0x62, 0x6c, 0x73, 0x5f, 0x74, - 0x6f, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x68, 0x61, 0x6e, - 0x67, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, - 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x4c, 0x53, 0x54, 0x6f, 0x45, 0x78, 0x65, - 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x42, 0x06, 0x92, 0xb5, - 0x18, 0x02, 0x31, 0x36, 0x52, 0x15, 0x62, 0x6c, 0x73, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x42, 0x0a, 0x14, 0x62, - 0x6c, 0x6f, 0x62, 0x5f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, - 0x6e, 0x74, 0x73, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, - 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, 0x62, 0x6c, 0x6f, - 0x62, 0x4b, 0x7a, 0x67, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, - 0x54, 0x0a, 0x12, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, - 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x73, 0x52, 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x73, 0x22, 0x93, 0x01, 0x0a, 0x1f, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, - 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x4a, 0x0a, 0x07, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x07, 0x6d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, - 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x86, 0x03, 0x0a, 0x19, - 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, - 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, - 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, - 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, - 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, - 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, - 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, - 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, - 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, - 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, - 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, - 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, - 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, - 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, - 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x48, 0x0a, 0x04, 0x62, 0x6f, - 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, - 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, - 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, - 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x04, - 0x62, 0x6f, 0x64, 0x79, 0x22, 0xb1, 0x08, 0x0a, 0x1d, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, - 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x45, - 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, - 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, - 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, - 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, - 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, - 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, - 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, - 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, - 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, - 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, - 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x64, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, - 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, - 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, - 0x61, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, - 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x0c, 0x61, - 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x42, 0x05, 0x92, 0xb5, - 0x18, 0x01, 0x38, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, - 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, - 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, - 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, - 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, - 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, - 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, - 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, - 0x69, 0x0a, 0x18, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, - 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, - 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, - 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x44, 0x65, 0x6e, - 0x65, 0x62, 0x52, 0x16, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, - 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x72, 0x0a, 0x18, 0x62, 0x6c, - 0x73, 0x5f, 0x74, 0x6f, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x4c, 0x53, 0x54, 0x6f, - 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x42, - 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x15, 0x62, 0x6c, 0x73, 0x54, 0x6f, 0x45, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x42, - 0x0a, 0x14, 0x62, 0x6c, 0x6f, 0x62, 0x5f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, - 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, - 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, - 0x62, 0x6c, 0x6f, 0x62, 0x4b, 0x7a, 0x67, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, - 0x74, 0x73, 0x12, 0x54, 0x0a, 0x12, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, - 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x73, 0x52, 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x22, 0xc4, 0x01, 0x0a, 0x1d, 0x53, 0x69, 0x67, - 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x46, 0x75, 0x6c, 0x75, 0x12, 0x42, 0x0a, 0x05, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x65, 0x74, 0x68, 0x65, - 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, - 0x6f, 0x63, 0x6b, 0x46, 0x75, 0x6c, 0x75, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x33, - 0x0a, 0x0a, 0x6b, 0x7a, 0x67, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x18, 0x02, 0x20, 0x03, - 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x08, - 0x33, 0x33, 0x35, 0x35, 0x34, 0x34, 0x33, 0x32, 0x52, 0x09, 0x6b, 0x7a, 0x67, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x73, 0x12, 0x2a, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x08, 0x3f, 0x2c, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, - 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x22, - 0x7e, 0x0a, 0x15, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x46, 0x75, 0x6c, 0x75, 0x12, 0x3f, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, - 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, - 0x72, 0x61, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, - 0xbb, 0x01, 0x0a, 0x17, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x46, 0x75, 0x6c, 0x75, 0x12, 0x3f, 0x0a, 0x05, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, - 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x33, 0x0a, 0x0a, - 0x6b, 0x7a, 0x67, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, - 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x08, 0x33, 0x33, - 0x35, 0x35, 0x34, 0x34, 0x33, 0x32, 0x52, 0x09, 0x6b, 0x7a, 0x67, 0x50, 0x72, 0x6f, 0x6f, 0x66, - 0x73, 0x12, 0x2a, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, - 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x08, 0x3f, 0x2c, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x92, 0xb5, - 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x22, 0x8d, 0x01, - 0x0a, 0x1c, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, - 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x46, 0x75, 0x6c, 0x75, 0x12, 0x47, - 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, - 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x46, 0x75, 0x6c, 0x75, 0x52, 0x07, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, - 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x83, 0x03, - 0x0a, 0x16, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x46, 0x75, 0x6c, 0x75, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, + 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, @@ -5759,18 +5153,667 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x04, 0x62, - 0x6f, 0x64, 0x79, 0x42, 0x9a, 0x01, 0x0a, 0x19, 0x6f, 0x72, 0x67, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x52, 0x04, 0x62, + 0x6f, 0x64, 0x79, 0x22, 0x8d, 0x07, 0x0a, 0x1d, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, + 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x43, 0x61, + 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, + 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, + 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, + 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, + 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, + 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, + 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, + 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, + 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, + 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, + 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, + 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, + 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x32, + 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, + 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x42, 0x10, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, - 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x92, + 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, + 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, + 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, + 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, + 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, + 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, + 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, + 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, + 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, + 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, + 0x74, 0x65, 0x12, 0x6b, 0x0a, 0x18, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x0a, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x52, 0x16, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, + 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, + 0x72, 0x0a, 0x18, 0x62, 0x6c, 0x73, 0x5f, 0x74, 0x6f, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, + 0x42, 0x4c, 0x53, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, + 0x61, 0x6e, 0x67, 0x65, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x15, 0x62, 0x6c, + 0x73, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, + 0x67, 0x65, 0x73, 0x22, 0x83, 0x01, 0x0a, 0x17, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x75, + 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, + 0x42, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, + 0x42, 0x69, 0x64, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x9c, 0x01, 0x0a, 0x11, 0x42, 0x75, + 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, + 0x49, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, + 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x43, 0x61, 0x70, 0x65, 0x6c, + 0x6c, 0x61, 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x1c, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, + 0x32, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1e, 0x0a, 0x06, 0x70, 0x75, 0x62, 0x6b, + 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, + 0x52, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x22, 0xc2, 0x01, 0x0a, 0x1e, 0x53, 0x69, 0x67, + 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x43, 0x0a, 0x05, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x12, 0x2f, 0x0a, 0x0a, 0x6b, 0x7a, 0x67, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, + 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x09, 0x6b, 0x7a, 0x67, 0x50, 0x72, 0x6f, 0x6f, 0x66, + 0x73, 0x12, 0x2a, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, + 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x08, 0x3f, 0x2c, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x92, 0xb5, + 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x22, 0x7d, 0x0a, + 0x16, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x3d, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, + 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, + 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, + 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xb6, 0x01, 0x0a, + 0x18, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x73, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x3d, 0x0a, 0x05, 0x62, 0x6c, 0x6f, + 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, + 0x62, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x2f, 0x0a, 0x0a, 0x6b, 0x7a, 0x67, 0x5f, + 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, + 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x09, + 0x6b, 0x7a, 0x67, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x12, 0x2a, 0x0a, 0x05, 0x62, 0x6c, 0x6f, + 0x62, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x08, 0x3f, 0x2c, + 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x05, + 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x22, 0xf4, 0x02, 0x0a, 0x10, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, + 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, + 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, + 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, + 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, + 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, + 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, + 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, + 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, + 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, + 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, + 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, + 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, + 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x3f, 0x0a, 0x04, 0x62, + 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, + 0x79, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0xb3, 0x07, 0x0a, + 0x14, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, + 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, + 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, + 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, + 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, + 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, + 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, + 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, + 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, + 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, + 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, + 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, + 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, + 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x32, + 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, + 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x92, + 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, + 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, + 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, + 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, + 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, + 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, + 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, + 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, + 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, + 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, + 0x74, 0x65, 0x12, 0x56, 0x0a, 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, + 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, + 0x6f, 0x61, 0x64, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x10, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x72, 0x0a, 0x18, 0x62, 0x6c, + 0x73, 0x5f, 0x74, 0x6f, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, + 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, + 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x4c, 0x53, 0x54, 0x6f, + 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x42, + 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x15, 0x62, 0x6c, 0x73, 0x54, 0x6f, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x42, + 0x0a, 0x14, 0x62, 0x6c, 0x6f, 0x62, 0x5f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, + 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, + 0x62, 0x6c, 0x6f, 0x62, 0x4b, 0x7a, 0x67, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, + 0x74, 0x73, 0x22, 0x8f, 0x01, 0x0a, 0x1d, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, 0x69, + 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, + 0x65, 0x6e, 0x65, 0x62, 0x12, 0x48, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, + 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, + 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, + 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x22, 0x82, 0x03, 0x0a, 0x17, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, + 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, + 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, + 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, + 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, + 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, + 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, + 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, + 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, + 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, + 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, + 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, + 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, + 0x74, 0x12, 0x46, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x32, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, + 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, + 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x44, 0x65, + 0x6e, 0x65, 0x62, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0xcd, 0x07, 0x0a, 0x1b, 0x42, 0x6c, + 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, + 0x42, 0x6f, 0x64, 0x79, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, + 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, + 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, + 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, + 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, + 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, + 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, + 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, + 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, + 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, + 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, + 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, + 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x05, 0x92, + 0xb5, 0x18, 0x01, 0x32, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, + 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x4f, 0x0a, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, + 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6f, + 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, + 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x5b, 0x0a, 0x0f, + 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x73, 0x18, + 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, + 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, + 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, + 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, 0x73, 0x79, 0x6e, + 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, + 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, + 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x69, 0x0a, 0x18, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x68, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x16, 0x65, 0x78, 0x65, 0x63, 0x75, + 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x12, 0x72, 0x0a, 0x18, 0x62, 0x6c, 0x73, 0x5f, 0x74, 0x6f, 0x5f, 0x65, 0x78, 0x65, 0x63, + 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x0b, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, + 0x65, 0x64, 0x42, 0x4c, 0x53, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x15, + 0x62, 0x6c, 0x73, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, + 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x42, 0x0a, 0x14, 0x62, 0x6c, 0x6f, 0x62, 0x5f, 0x6b, 0x7a, + 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0c, 0x20, + 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, + 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, 0x62, 0x6c, 0x6f, 0x62, 0x4b, 0x7a, 0x67, 0x43, 0x6f, + 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x7f, 0x0a, 0x15, 0x53, 0x69, 0x67, + 0x6e, 0x65, 0x64, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x44, 0x65, 0x6e, + 0x65, 0x62, 0x12, 0x40, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x75, 0x69, 0x6c, + 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x07, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, + 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xdc, 0x01, 0x0a, 0x0f, 0x42, + 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x47, + 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, + 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, + 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, + 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, + 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x42, 0x0a, 0x14, 0x62, 0x6c, 0x6f, 0x62, 0x5f, + 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, + 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, + 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, 0x62, 0x6c, 0x6f, 0x62, 0x4b, 0x7a, 0x67, + 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1c, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, + 0x33, 0x32, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1e, 0x0a, 0x06, 0x70, 0x75, 0x62, + 0x6b, 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x34, + 0x38, 0x52, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x22, 0xb4, 0x02, 0x0a, 0x11, 0x42, 0x75, + 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, + 0x47, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, + 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x44, 0x65, 0x6e, 0x65, 0x62, + 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x42, 0x0a, 0x14, 0x62, 0x6c, 0x6f, 0x62, + 0x5f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, + 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, + 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, 0x62, 0x6c, 0x6f, 0x62, 0x4b, 0x7a, + 0x67, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x54, 0x0a, 0x12, + 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x52, + 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x73, 0x12, 0x1c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x12, 0x1e, 0x0a, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, + 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, 0x52, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, + 0x22, 0x83, 0x01, 0x0a, 0x17, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x75, 0x69, 0x6c, 0x64, + 0x65, 0x72, 0x42, 0x69, 0x64, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x42, 0x0a, 0x07, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x42, 0x69, 0x64, + 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, + 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x55, 0x0a, 0x0c, 0x42, 0x6c, 0x6f, 0x62, 0x53, 0x69, + 0x64, 0x65, 0x63, 0x61, 0x72, 0x73, 0x12, 0x45, 0x0a, 0x08, 0x73, 0x69, 0x64, 0x65, 0x63, 0x61, + 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x42, 0x6c, 0x6f, 0x62, 0x53, 0x69, 0x64, 0x65, 0x63, 0x61, 0x72, 0x42, 0x05, 0x92, 0xb5, + 0x18, 0x01, 0x36, 0x52, 0x08, 0x73, 0x69, 0x64, 0x65, 0x63, 0x61, 0x72, 0x73, 0x22, 0xc0, 0x02, + 0x0a, 0x0b, 0x42, 0x6c, 0x6f, 0x62, 0x53, 0x69, 0x64, 0x65, 0x63, 0x61, 0x72, 0x12, 0x14, 0x0a, + 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x12, 0x1e, 0x0a, 0x04, 0x62, 0x6c, 0x6f, 0x62, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0c, 0x42, 0x0a, 0x8a, 0xb5, 0x18, 0x06, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x52, 0x04, 0x62, + 0x6c, 0x6f, 0x62, 0x12, 0x2d, 0x0a, 0x0e, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, + 0x02, 0x34, 0x38, 0x52, 0x0d, 0x6b, 0x7a, 0x67, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, + 0x6e, 0x74, 0x12, 0x23, 0x0a, 0x09, 0x6b, 0x7a, 0x67, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, 0x52, 0x08, 0x6b, + 0x7a, 0x67, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x5e, 0x0a, 0x13, 0x73, 0x69, 0x67, 0x6e, 0x65, + 0x64, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, + 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x52, 0x11, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, + 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x47, 0x0a, 0x1a, 0x63, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x5f, + 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x09, 0x8a, 0xb5, 0x18, + 0x05, 0x31, 0x37, 0x2c, 0x33, 0x32, 0x52, 0x18, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, + 0x6e, 0x74, 0x49, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6f, 0x66, + 0x22, 0xc6, 0x01, 0x0a, 0x20, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, + 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6c, + 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x45, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, + 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, + 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x2f, 0x0a, 0x0a, + 0x6b, 0x7a, 0x67, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, + 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, + 0x39, 0x36, 0x52, 0x09, 0x6b, 0x7a, 0x67, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x12, 0x2a, 0x0a, + 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, + 0x18, 0x08, 0x3f, 0x2c, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, + 0x39, 0x36, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x22, 0x81, 0x01, 0x0a, 0x18, 0x53, 0x69, + 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, + 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x3f, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, + 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, + 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, + 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xba, 0x01, + 0x0a, 0x1a, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x3f, 0x0a, 0x05, + 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, + 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x2f, 0x0a, + 0x0a, 0x6b, 0x7a, 0x67, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, + 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, + 0x30, 0x39, 0x36, 0x52, 0x09, 0x6b, 0x7a, 0x67, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x12, 0x2a, + 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, + 0xb5, 0x18, 0x08, 0x3f, 0x2c, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x92, 0xb5, 0x18, 0x04, 0x34, + 0x30, 0x39, 0x36, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x22, 0xf8, 0x02, 0x0a, 0x12, 0x42, + 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, + 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, + 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, + 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, + 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, + 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, + 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, + 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, + 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, + 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, + 0x6f, 0x74, 0x12, 0x41, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, + 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0x97, 0x08, 0x0a, 0x16, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, + 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, + 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, + 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, + 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, + 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, + 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, + 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, + 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, + 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, + 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, + 0x64, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x42, 0x05, 0x92, 0xb5, 0x18, + 0x01, 0x31, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, + 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x38, 0x52, 0x0c, 0x61, + 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, + 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, + 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, + 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, + 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, + 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, + 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, + 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, + 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, + 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x56, 0x0a, 0x11, 0x65, 0x78, 0x65, + 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x0a, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, + 0x10, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, + 0x64, 0x12, 0x72, 0x0a, 0x18, 0x62, 0x6c, 0x73, 0x5f, 0x74, 0x6f, 0x5f, 0x65, 0x78, 0x65, 0x63, + 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x0b, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, + 0x65, 0x64, 0x42, 0x4c, 0x53, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x15, + 0x62, 0x6c, 0x73, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, + 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x42, 0x0a, 0x14, 0x62, 0x6c, 0x6f, 0x62, 0x5f, 0x6b, 0x7a, + 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0c, 0x20, + 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, + 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, 0x62, 0x6c, 0x6f, 0x62, 0x4b, 0x7a, 0x67, 0x43, 0x6f, + 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x54, 0x0a, 0x12, 0x65, 0x78, 0x65, + 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x18, + 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, + 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x52, 0x11, 0x65, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x22, + 0x93, 0x01, 0x0a, 0x1f, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, + 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, + 0x74, 0x72, 0x61, 0x12, 0x4a, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, + 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, + 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, + 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, + 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x86, 0x03, 0x0a, 0x19, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, + 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, + 0x74, 0x72, 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, - 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, - 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, - 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, + 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, + 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, + 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, + 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, + 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, + 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, + 0x32, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, + 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x48, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, + 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, + 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, + 0x79, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0xb1, + 0x08, 0x0a, 0x1d, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, + 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, + 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, + 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, + 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, 0x3c, 0x0a, + 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, + 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x08, 0x67, + 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, + 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x12, + 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, 0x70, 0x72, + 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, + 0x64, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x42, 0x05, 0x92, 0xb5, 0x18, + 0x01, 0x31, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, + 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x38, 0x52, 0x0c, 0x61, + 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x08, 0x64, + 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x06, 0x92, + 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, + 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, 0x78, 0x69, + 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, + 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, 0x76, 0x6f, + 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, 0x0a, 0x0e, + 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x09, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, + 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, 0x6e, 0x63, + 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x69, 0x0a, 0x18, 0x65, 0x78, 0x65, + 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x68, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, + 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x16, 0x65, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x12, 0x72, 0x0a, 0x18, 0x62, 0x6c, 0x73, 0x5f, 0x74, 0x6f, 0x5f, 0x65, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, + 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, + 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x4c, 0x53, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, + 0x36, 0x52, 0x15, 0x62, 0x6c, 0x73, 0x54, 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, + 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x42, 0x0a, 0x14, 0x62, 0x6c, 0x6f, 0x62, + 0x5f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, + 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, + 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, 0x62, 0x6c, 0x6f, 0x62, 0x4b, 0x7a, + 0x67, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x54, 0x0a, 0x12, + 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x52, + 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x73, 0x22, 0xc4, 0x01, 0x0a, 0x1d, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, + 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, + 0x46, 0x75, 0x6c, 0x75, 0x12, 0x42, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, + 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x46, 0x75, 0x6c, + 0x75, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x33, 0x0a, 0x0a, 0x6b, 0x7a, 0x67, 0x5f, + 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, + 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x08, 0x33, 0x33, 0x35, 0x35, 0x34, 0x34, + 0x33, 0x32, 0x52, 0x09, 0x6b, 0x7a, 0x67, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x12, 0x2a, 0x0a, + 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, + 0x18, 0x08, 0x3f, 0x2c, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, + 0x39, 0x36, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x22, 0x7e, 0x0a, 0x15, 0x53, 0x69, 0x67, + 0x6e, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x46, 0x75, + 0x6c, 0x75, 0x12, 0x3f, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, + 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x05, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xbb, 0x01, 0x0a, 0x17, 0x42, 0x65, + 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x73, 0x46, 0x75, 0x6c, 0x75, 0x12, 0x3f, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, + 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, + 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x33, 0x0a, 0x0a, 0x6b, 0x7a, 0x67, 0x5f, 0x70, 0x72, + 0x6f, 0x6f, 0x66, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, + 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x08, 0x33, 0x33, 0x35, 0x35, 0x34, 0x34, 0x33, 0x32, + 0x52, 0x09, 0x6b, 0x7a, 0x67, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x12, 0x2a, 0x0a, 0x05, 0x62, + 0x6c, 0x6f, 0x62, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x08, + 0x3f, 0x2c, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, + 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x22, 0x8d, 0x01, 0x0a, 0x1c, 0x53, 0x69, 0x67, 0x6e, + 0x65, 0x64, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x46, 0x75, 0x6c, 0x75, 0x12, 0x47, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x46, 0x75, 0x6c, 0x75, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x83, 0x03, 0x0a, 0x16, 0x42, 0x6c, 0x69, 0x6e, + 0x64, 0x65, 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x46, 0x75, + 0x6c, 0x75, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, + 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, + 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, + 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, + 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, + 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, + 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x6f, + 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, + 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, + 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, + 0x6f, 0x6f, 0x74, 0x12, 0x48, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x34, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x6c, 0x69, 0x6e, 0x64, 0x65, + 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, + 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x42, 0x9a, 0x01, + 0x0a, 0x19, 0x6f, 0x72, 0x67, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x10, 0x42, 0x65, 0x61, + 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, + 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, + 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, + 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, + 0x68, 0x5c, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, } var ( @@ -5843,24 +5886,26 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_goTypes = []any{ (*BeaconBlockContentsFulu)(nil), // 53: ethereum.eth.v1alpha1.BeaconBlockContentsFulu (*SignedBlindedBeaconBlockFulu)(nil), // 54: ethereum.eth.v1alpha1.SignedBlindedBeaconBlockFulu (*BlindedBeaconBlockFulu)(nil), // 55: ethereum.eth.v1alpha1.BlindedBeaconBlockFulu - (*Eth1Data)(nil), // 56: ethereum.eth.v1alpha1.Eth1Data - (*ProposerSlashing)(nil), // 57: ethereum.eth.v1alpha1.ProposerSlashing - (*Attestation)(nil), // 58: ethereum.eth.v1alpha1.Attestation - (*Deposit)(nil), // 59: ethereum.eth.v1alpha1.Deposit - (*SignedVoluntaryExit)(nil), // 60: ethereum.eth.v1alpha1.SignedVoluntaryExit - (*AttestationData)(nil), // 61: ethereum.eth.v1alpha1.AttestationData - (*v1.ExecutionPayloadHeader)(nil), // 62: ethereum.engine.v1.ExecutionPayloadHeader - (*SyncAggregate)(nil), // 63: ethereum.eth.v1alpha1.SyncAggregate - (*v1.ExecutionPayload)(nil), // 64: ethereum.engine.v1.ExecutionPayload - (*v1.ExecutionPayloadCapella)(nil), // 65: ethereum.engine.v1.ExecutionPayloadCapella - (*SignedBLSToExecutionChange)(nil), // 66: ethereum.eth.v1alpha1.SignedBLSToExecutionChange - (*v1.ExecutionPayloadHeaderCapella)(nil), // 67: ethereum.engine.v1.ExecutionPayloadHeaderCapella - (*v1.ExecutionPayloadDeneb)(nil), // 68: ethereum.engine.v1.ExecutionPayloadDeneb - (*v1.ExecutionPayloadHeaderDeneb)(nil), // 69: ethereum.engine.v1.ExecutionPayloadHeaderDeneb - (*v1.ExecutionRequests)(nil), // 70: ethereum.engine.v1.ExecutionRequests - (*SignedBeaconBlockHeader)(nil), // 71: ethereum.eth.v1alpha1.SignedBeaconBlockHeader - (*AttesterSlashingElectra)(nil), // 72: ethereum.eth.v1alpha1.AttesterSlashingElectra - (*AttestationElectra)(nil), // 73: ethereum.eth.v1alpha1.AttestationElectra + (*SignedBeaconBlockGloas)(nil), // 56: ethereum.eth.v1alpha1.SignedBeaconBlockGloas + (*BeaconBlockGloas)(nil), // 57: ethereum.eth.v1alpha1.BeaconBlockGloas + (*Eth1Data)(nil), // 58: ethereum.eth.v1alpha1.Eth1Data + (*ProposerSlashing)(nil), // 59: ethereum.eth.v1alpha1.ProposerSlashing + (*Attestation)(nil), // 60: ethereum.eth.v1alpha1.Attestation + (*Deposit)(nil), // 61: ethereum.eth.v1alpha1.Deposit + (*SignedVoluntaryExit)(nil), // 62: ethereum.eth.v1alpha1.SignedVoluntaryExit + (*AttestationData)(nil), // 63: ethereum.eth.v1alpha1.AttestationData + (*v1.ExecutionPayloadHeader)(nil), // 64: ethereum.engine.v1.ExecutionPayloadHeader + (*SyncAggregate)(nil), // 65: ethereum.eth.v1alpha1.SyncAggregate + (*v1.ExecutionPayload)(nil), // 66: ethereum.engine.v1.ExecutionPayload + (*v1.ExecutionPayloadCapella)(nil), // 67: ethereum.engine.v1.ExecutionPayloadCapella + (*SignedBLSToExecutionChange)(nil), // 68: ethereum.eth.v1alpha1.SignedBLSToExecutionChange + (*v1.ExecutionPayloadHeaderCapella)(nil), // 69: ethereum.engine.v1.ExecutionPayloadHeaderCapella + (*v1.ExecutionPayloadDeneb)(nil), // 70: ethereum.engine.v1.ExecutionPayloadDeneb + (*v1.ExecutionPayloadHeaderDeneb)(nil), // 71: ethereum.engine.v1.ExecutionPayloadHeaderDeneb + (*v1.ExecutionRequests)(nil), // 72: ethereum.engine.v1.ExecutionRequests + (*SignedBeaconBlockHeader)(nil), // 73: ethereum.eth.v1alpha1.SignedBeaconBlockHeader + (*AttesterSlashingElectra)(nil), // 74: ethereum.eth.v1alpha1.AttesterSlashingElectra + (*AttestationElectra)(nil), // 75: ethereum.eth.v1alpha1.AttestationElectra } var file_proto_prysm_v1alpha1_beacon_block_proto_depIdxs = []int32{ 2, // 0: ethereum.eth.v1alpha1.GenericSignedBeaconBlock.phase0:type_name -> ethereum.eth.v1alpha1.SignedBeaconBlock @@ -5875,153 +5920,155 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_depIdxs = []int32{ 48, // 9: ethereum.eth.v1alpha1.GenericSignedBeaconBlock.blinded_electra:type_name -> ethereum.eth.v1alpha1.SignedBlindedBeaconBlockElectra 51, // 10: ethereum.eth.v1alpha1.GenericSignedBeaconBlock.fulu:type_name -> ethereum.eth.v1alpha1.SignedBeaconBlockContentsFulu 54, // 11: ethereum.eth.v1alpha1.GenericSignedBeaconBlock.blinded_fulu:type_name -> ethereum.eth.v1alpha1.SignedBlindedBeaconBlockFulu - 3, // 12: ethereum.eth.v1alpha1.GenericBeaconBlock.phase0:type_name -> ethereum.eth.v1alpha1.BeaconBlock - 13, // 13: ethereum.eth.v1alpha1.GenericBeaconBlock.altair:type_name -> ethereum.eth.v1alpha1.BeaconBlockAltair - 16, // 14: ethereum.eth.v1alpha1.GenericBeaconBlock.bellatrix:type_name -> ethereum.eth.v1alpha1.BeaconBlockBellatrix - 19, // 15: ethereum.eth.v1alpha1.GenericBeaconBlock.blinded_bellatrix:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBellatrix - 22, // 16: ethereum.eth.v1alpha1.GenericBeaconBlock.capella:type_name -> ethereum.eth.v1alpha1.BeaconBlockCapella - 25, // 17: ethereum.eth.v1alpha1.GenericBeaconBlock.blinded_capella:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockCapella - 31, // 18: ethereum.eth.v1alpha1.GenericBeaconBlock.deneb:type_name -> ethereum.eth.v1alpha1.BeaconBlockContentsDeneb - 35, // 19: ethereum.eth.v1alpha1.GenericBeaconBlock.blinded_deneb:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockDeneb - 45, // 20: ethereum.eth.v1alpha1.GenericBeaconBlock.electra:type_name -> ethereum.eth.v1alpha1.BeaconBlockContentsElectra - 49, // 21: ethereum.eth.v1alpha1.GenericBeaconBlock.blinded_electra:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockElectra - 53, // 22: ethereum.eth.v1alpha1.GenericBeaconBlock.fulu:type_name -> ethereum.eth.v1alpha1.BeaconBlockContentsFulu - 55, // 23: ethereum.eth.v1alpha1.GenericBeaconBlock.blinded_fulu:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockFulu - 3, // 24: ethereum.eth.v1alpha1.SignedBeaconBlock.block:type_name -> ethereum.eth.v1alpha1.BeaconBlock - 4, // 25: ethereum.eth.v1alpha1.BeaconBlock.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBody - 56, // 26: ethereum.eth.v1alpha1.BeaconBlockBody.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data - 57, // 27: ethereum.eth.v1alpha1.BeaconBlockBody.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing - 5, // 28: ethereum.eth.v1alpha1.BeaconBlockBody.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing - 58, // 29: ethereum.eth.v1alpha1.BeaconBlockBody.attestations:type_name -> ethereum.eth.v1alpha1.Attestation - 59, // 30: ethereum.eth.v1alpha1.BeaconBlockBody.deposits:type_name -> ethereum.eth.v1alpha1.Deposit - 60, // 31: ethereum.eth.v1alpha1.BeaconBlockBody.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit - 6, // 32: ethereum.eth.v1alpha1.AttesterSlashing.attestation_1:type_name -> ethereum.eth.v1alpha1.IndexedAttestation - 6, // 33: ethereum.eth.v1alpha1.AttesterSlashing.attestation_2:type_name -> ethereum.eth.v1alpha1.IndexedAttestation - 61, // 34: ethereum.eth.v1alpha1.IndexedAttestation.data:type_name -> ethereum.eth.v1alpha1.AttestationData - 8, // 35: ethereum.eth.v1alpha1.SignedValidatorRegistrationsV1.messages:type_name -> ethereum.eth.v1alpha1.SignedValidatorRegistrationV1 - 9, // 36: ethereum.eth.v1alpha1.SignedValidatorRegistrationV1.message:type_name -> ethereum.eth.v1alpha1.ValidatorRegistrationV1 - 11, // 37: ethereum.eth.v1alpha1.SignedBuilderBid.message:type_name -> ethereum.eth.v1alpha1.BuilderBid - 62, // 38: ethereum.eth.v1alpha1.BuilderBid.header:type_name -> ethereum.engine.v1.ExecutionPayloadHeader - 13, // 39: ethereum.eth.v1alpha1.SignedBeaconBlockAltair.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockAltair - 14, // 40: ethereum.eth.v1alpha1.BeaconBlockAltair.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBodyAltair - 56, // 41: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data - 57, // 42: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing - 5, // 43: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing - 58, // 44: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.attestations:type_name -> ethereum.eth.v1alpha1.Attestation - 59, // 45: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.deposits:type_name -> ethereum.eth.v1alpha1.Deposit - 60, // 46: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit - 63, // 47: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate - 16, // 48: ethereum.eth.v1alpha1.SignedBeaconBlockBellatrix.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockBellatrix - 17, // 49: ethereum.eth.v1alpha1.BeaconBlockBellatrix.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix - 56, // 50: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data - 57, // 51: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing - 5, // 52: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing - 58, // 53: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.attestations:type_name -> ethereum.eth.v1alpha1.Attestation - 59, // 54: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.deposits:type_name -> ethereum.eth.v1alpha1.Deposit - 60, // 55: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit - 63, // 56: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate - 64, // 57: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.execution_payload:type_name -> ethereum.engine.v1.ExecutionPayload - 19, // 58: ethereum.eth.v1alpha1.SignedBlindedBeaconBlockBellatrix.block:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBellatrix - 20, // 59: ethereum.eth.v1alpha1.BlindedBeaconBlockBellatrix.body:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix - 56, // 60: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data - 57, // 61: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing - 5, // 62: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing - 58, // 63: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.attestations:type_name -> ethereum.eth.v1alpha1.Attestation - 59, // 64: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.deposits:type_name -> ethereum.eth.v1alpha1.Deposit - 60, // 65: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit - 63, // 66: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate - 62, // 67: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.execution_payload_header:type_name -> ethereum.engine.v1.ExecutionPayloadHeader - 22, // 68: ethereum.eth.v1alpha1.SignedBeaconBlockCapella.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockCapella - 23, // 69: ethereum.eth.v1alpha1.BeaconBlockCapella.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBodyCapella - 56, // 70: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data - 57, // 71: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing - 5, // 72: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing - 58, // 73: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.attestations:type_name -> ethereum.eth.v1alpha1.Attestation - 59, // 74: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.deposits:type_name -> ethereum.eth.v1alpha1.Deposit - 60, // 75: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit - 63, // 76: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate - 65, // 77: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.execution_payload:type_name -> ethereum.engine.v1.ExecutionPayloadCapella - 66, // 78: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange - 25, // 79: ethereum.eth.v1alpha1.SignedBlindedBeaconBlockCapella.block:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockCapella - 26, // 80: ethereum.eth.v1alpha1.BlindedBeaconBlockCapella.body:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella - 56, // 81: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data - 57, // 82: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing - 5, // 83: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing - 58, // 84: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.attestations:type_name -> ethereum.eth.v1alpha1.Attestation - 59, // 85: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.deposits:type_name -> ethereum.eth.v1alpha1.Deposit - 60, // 86: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit - 63, // 87: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate - 67, // 88: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.execution_payload_header:type_name -> ethereum.engine.v1.ExecutionPayloadHeaderCapella - 66, // 89: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange - 28, // 90: ethereum.eth.v1alpha1.SignedBuilderBidCapella.message:type_name -> ethereum.eth.v1alpha1.BuilderBidCapella - 67, // 91: ethereum.eth.v1alpha1.BuilderBidCapella.header:type_name -> ethereum.engine.v1.ExecutionPayloadHeaderCapella - 30, // 92: ethereum.eth.v1alpha1.SignedBeaconBlockContentsDeneb.block:type_name -> ethereum.eth.v1alpha1.SignedBeaconBlockDeneb - 32, // 93: ethereum.eth.v1alpha1.SignedBeaconBlockDeneb.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockDeneb - 32, // 94: ethereum.eth.v1alpha1.BeaconBlockContentsDeneb.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockDeneb - 33, // 95: ethereum.eth.v1alpha1.BeaconBlockDeneb.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBodyDeneb - 56, // 96: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data - 57, // 97: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing - 5, // 98: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing - 58, // 99: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.attestations:type_name -> ethereum.eth.v1alpha1.Attestation - 59, // 100: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.deposits:type_name -> ethereum.eth.v1alpha1.Deposit - 60, // 101: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit - 63, // 102: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate - 68, // 103: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.execution_payload:type_name -> ethereum.engine.v1.ExecutionPayloadDeneb - 66, // 104: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange - 35, // 105: ethereum.eth.v1alpha1.SignedBlindedBeaconBlockDeneb.message:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockDeneb - 36, // 106: ethereum.eth.v1alpha1.BlindedBeaconBlockDeneb.body:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb - 56, // 107: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data - 57, // 108: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing - 5, // 109: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing - 58, // 110: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.attestations:type_name -> ethereum.eth.v1alpha1.Attestation - 59, // 111: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.deposits:type_name -> ethereum.eth.v1alpha1.Deposit - 60, // 112: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit - 63, // 113: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate - 69, // 114: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.execution_payload_header:type_name -> ethereum.engine.v1.ExecutionPayloadHeaderDeneb - 66, // 115: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange - 38, // 116: ethereum.eth.v1alpha1.SignedBuilderBidDeneb.message:type_name -> ethereum.eth.v1alpha1.BuilderBidDeneb - 69, // 117: ethereum.eth.v1alpha1.BuilderBidDeneb.header:type_name -> ethereum.engine.v1.ExecutionPayloadHeaderDeneb - 69, // 118: ethereum.eth.v1alpha1.BuilderBidElectra.header:type_name -> ethereum.engine.v1.ExecutionPayloadHeaderDeneb - 70, // 119: ethereum.eth.v1alpha1.BuilderBidElectra.execution_requests:type_name -> ethereum.engine.v1.ExecutionRequests - 39, // 120: ethereum.eth.v1alpha1.SignedBuilderBidElectra.message:type_name -> ethereum.eth.v1alpha1.BuilderBidElectra - 42, // 121: ethereum.eth.v1alpha1.BlobSidecars.sidecars:type_name -> ethereum.eth.v1alpha1.BlobSidecar - 71, // 122: ethereum.eth.v1alpha1.BlobSidecar.signed_block_header:type_name -> ethereum.eth.v1alpha1.SignedBeaconBlockHeader - 44, // 123: ethereum.eth.v1alpha1.SignedBeaconBlockContentsElectra.block:type_name -> ethereum.eth.v1alpha1.SignedBeaconBlockElectra - 46, // 124: ethereum.eth.v1alpha1.SignedBeaconBlockElectra.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockElectra - 46, // 125: ethereum.eth.v1alpha1.BeaconBlockContentsElectra.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockElectra - 47, // 126: ethereum.eth.v1alpha1.BeaconBlockElectra.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBodyElectra - 56, // 127: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data - 57, // 128: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing - 72, // 129: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashingElectra - 73, // 130: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.attestations:type_name -> ethereum.eth.v1alpha1.AttestationElectra - 59, // 131: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.deposits:type_name -> ethereum.eth.v1alpha1.Deposit - 60, // 132: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit - 63, // 133: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate - 68, // 134: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.execution_payload:type_name -> ethereum.engine.v1.ExecutionPayloadDeneb - 66, // 135: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange - 70, // 136: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.execution_requests:type_name -> ethereum.engine.v1.ExecutionRequests - 49, // 137: ethereum.eth.v1alpha1.SignedBlindedBeaconBlockElectra.message:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockElectra - 50, // 138: ethereum.eth.v1alpha1.BlindedBeaconBlockElectra.body:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra - 56, // 139: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data - 57, // 140: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing - 72, // 141: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashingElectra - 73, // 142: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.attestations:type_name -> ethereum.eth.v1alpha1.AttestationElectra - 59, // 143: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.deposits:type_name -> ethereum.eth.v1alpha1.Deposit - 60, // 144: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit - 63, // 145: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate - 69, // 146: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.execution_payload_header:type_name -> ethereum.engine.v1.ExecutionPayloadHeaderDeneb - 66, // 147: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange - 70, // 148: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.execution_requests:type_name -> ethereum.engine.v1.ExecutionRequests - 52, // 149: ethereum.eth.v1alpha1.SignedBeaconBlockContentsFulu.block:type_name -> ethereum.eth.v1alpha1.SignedBeaconBlockFulu - 46, // 150: ethereum.eth.v1alpha1.SignedBeaconBlockFulu.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockElectra - 46, // 151: ethereum.eth.v1alpha1.BeaconBlockContentsFulu.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockElectra - 55, // 152: ethereum.eth.v1alpha1.SignedBlindedBeaconBlockFulu.message:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockFulu - 50, // 153: ethereum.eth.v1alpha1.BlindedBeaconBlockFulu.body:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra - 154, // [154:154] is the sub-list for method output_type - 154, // [154:154] is the sub-list for method input_type - 154, // [154:154] is the sub-list for extension type_name - 154, // [154:154] is the sub-list for extension extendee - 0, // [0:154] is the sub-list for field type_name + 56, // 12: ethereum.eth.v1alpha1.GenericSignedBeaconBlock.gloas:type_name -> ethereum.eth.v1alpha1.SignedBeaconBlockGloas + 3, // 13: ethereum.eth.v1alpha1.GenericBeaconBlock.phase0:type_name -> ethereum.eth.v1alpha1.BeaconBlock + 13, // 14: ethereum.eth.v1alpha1.GenericBeaconBlock.altair:type_name -> ethereum.eth.v1alpha1.BeaconBlockAltair + 16, // 15: ethereum.eth.v1alpha1.GenericBeaconBlock.bellatrix:type_name -> ethereum.eth.v1alpha1.BeaconBlockBellatrix + 19, // 16: ethereum.eth.v1alpha1.GenericBeaconBlock.blinded_bellatrix:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBellatrix + 22, // 17: ethereum.eth.v1alpha1.GenericBeaconBlock.capella:type_name -> ethereum.eth.v1alpha1.BeaconBlockCapella + 25, // 18: ethereum.eth.v1alpha1.GenericBeaconBlock.blinded_capella:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockCapella + 31, // 19: ethereum.eth.v1alpha1.GenericBeaconBlock.deneb:type_name -> ethereum.eth.v1alpha1.BeaconBlockContentsDeneb + 35, // 20: ethereum.eth.v1alpha1.GenericBeaconBlock.blinded_deneb:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockDeneb + 45, // 21: ethereum.eth.v1alpha1.GenericBeaconBlock.electra:type_name -> ethereum.eth.v1alpha1.BeaconBlockContentsElectra + 49, // 22: ethereum.eth.v1alpha1.GenericBeaconBlock.blinded_electra:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockElectra + 53, // 23: ethereum.eth.v1alpha1.GenericBeaconBlock.fulu:type_name -> ethereum.eth.v1alpha1.BeaconBlockContentsFulu + 55, // 24: ethereum.eth.v1alpha1.GenericBeaconBlock.blinded_fulu:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockFulu + 57, // 25: ethereum.eth.v1alpha1.GenericBeaconBlock.gloas:type_name -> ethereum.eth.v1alpha1.BeaconBlockGloas + 3, // 26: ethereum.eth.v1alpha1.SignedBeaconBlock.block:type_name -> ethereum.eth.v1alpha1.BeaconBlock + 4, // 27: ethereum.eth.v1alpha1.BeaconBlock.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBody + 58, // 28: ethereum.eth.v1alpha1.BeaconBlockBody.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data + 59, // 29: ethereum.eth.v1alpha1.BeaconBlockBody.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing + 5, // 30: ethereum.eth.v1alpha1.BeaconBlockBody.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing + 60, // 31: ethereum.eth.v1alpha1.BeaconBlockBody.attestations:type_name -> ethereum.eth.v1alpha1.Attestation + 61, // 32: ethereum.eth.v1alpha1.BeaconBlockBody.deposits:type_name -> ethereum.eth.v1alpha1.Deposit + 62, // 33: ethereum.eth.v1alpha1.BeaconBlockBody.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit + 6, // 34: ethereum.eth.v1alpha1.AttesterSlashing.attestation_1:type_name -> ethereum.eth.v1alpha1.IndexedAttestation + 6, // 35: ethereum.eth.v1alpha1.AttesterSlashing.attestation_2:type_name -> ethereum.eth.v1alpha1.IndexedAttestation + 63, // 36: ethereum.eth.v1alpha1.IndexedAttestation.data:type_name -> ethereum.eth.v1alpha1.AttestationData + 8, // 37: ethereum.eth.v1alpha1.SignedValidatorRegistrationsV1.messages:type_name -> ethereum.eth.v1alpha1.SignedValidatorRegistrationV1 + 9, // 38: ethereum.eth.v1alpha1.SignedValidatorRegistrationV1.message:type_name -> ethereum.eth.v1alpha1.ValidatorRegistrationV1 + 11, // 39: ethereum.eth.v1alpha1.SignedBuilderBid.message:type_name -> ethereum.eth.v1alpha1.BuilderBid + 64, // 40: ethereum.eth.v1alpha1.BuilderBid.header:type_name -> ethereum.engine.v1.ExecutionPayloadHeader + 13, // 41: ethereum.eth.v1alpha1.SignedBeaconBlockAltair.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockAltair + 14, // 42: ethereum.eth.v1alpha1.BeaconBlockAltair.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBodyAltair + 58, // 43: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data + 59, // 44: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing + 5, // 45: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing + 60, // 46: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.attestations:type_name -> ethereum.eth.v1alpha1.Attestation + 61, // 47: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.deposits:type_name -> ethereum.eth.v1alpha1.Deposit + 62, // 48: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit + 65, // 49: ethereum.eth.v1alpha1.BeaconBlockBodyAltair.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate + 16, // 50: ethereum.eth.v1alpha1.SignedBeaconBlockBellatrix.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockBellatrix + 17, // 51: ethereum.eth.v1alpha1.BeaconBlockBellatrix.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix + 58, // 52: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data + 59, // 53: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing + 5, // 54: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing + 60, // 55: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.attestations:type_name -> ethereum.eth.v1alpha1.Attestation + 61, // 56: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.deposits:type_name -> ethereum.eth.v1alpha1.Deposit + 62, // 57: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit + 65, // 58: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate + 66, // 59: ethereum.eth.v1alpha1.BeaconBlockBodyBellatrix.execution_payload:type_name -> ethereum.engine.v1.ExecutionPayload + 19, // 60: ethereum.eth.v1alpha1.SignedBlindedBeaconBlockBellatrix.block:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBellatrix + 20, // 61: ethereum.eth.v1alpha1.BlindedBeaconBlockBellatrix.body:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix + 58, // 62: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data + 59, // 63: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing + 5, // 64: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing + 60, // 65: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.attestations:type_name -> ethereum.eth.v1alpha1.Attestation + 61, // 66: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.deposits:type_name -> ethereum.eth.v1alpha1.Deposit + 62, // 67: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit + 65, // 68: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate + 64, // 69: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyBellatrix.execution_payload_header:type_name -> ethereum.engine.v1.ExecutionPayloadHeader + 22, // 70: ethereum.eth.v1alpha1.SignedBeaconBlockCapella.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockCapella + 23, // 71: ethereum.eth.v1alpha1.BeaconBlockCapella.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBodyCapella + 58, // 72: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data + 59, // 73: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing + 5, // 74: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing + 60, // 75: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.attestations:type_name -> ethereum.eth.v1alpha1.Attestation + 61, // 76: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.deposits:type_name -> ethereum.eth.v1alpha1.Deposit + 62, // 77: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit + 65, // 78: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate + 67, // 79: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.execution_payload:type_name -> ethereum.engine.v1.ExecutionPayloadCapella + 68, // 80: ethereum.eth.v1alpha1.BeaconBlockBodyCapella.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange + 25, // 81: ethereum.eth.v1alpha1.SignedBlindedBeaconBlockCapella.block:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockCapella + 26, // 82: ethereum.eth.v1alpha1.BlindedBeaconBlockCapella.body:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella + 58, // 83: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data + 59, // 84: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing + 5, // 85: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing + 60, // 86: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.attestations:type_name -> ethereum.eth.v1alpha1.Attestation + 61, // 87: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.deposits:type_name -> ethereum.eth.v1alpha1.Deposit + 62, // 88: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit + 65, // 89: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate + 69, // 90: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.execution_payload_header:type_name -> ethereum.engine.v1.ExecutionPayloadHeaderCapella + 68, // 91: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyCapella.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange + 28, // 92: ethereum.eth.v1alpha1.SignedBuilderBidCapella.message:type_name -> ethereum.eth.v1alpha1.BuilderBidCapella + 69, // 93: ethereum.eth.v1alpha1.BuilderBidCapella.header:type_name -> ethereum.engine.v1.ExecutionPayloadHeaderCapella + 30, // 94: ethereum.eth.v1alpha1.SignedBeaconBlockContentsDeneb.block:type_name -> ethereum.eth.v1alpha1.SignedBeaconBlockDeneb + 32, // 95: ethereum.eth.v1alpha1.SignedBeaconBlockDeneb.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockDeneb + 32, // 96: ethereum.eth.v1alpha1.BeaconBlockContentsDeneb.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockDeneb + 33, // 97: ethereum.eth.v1alpha1.BeaconBlockDeneb.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBodyDeneb + 58, // 98: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data + 59, // 99: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing + 5, // 100: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing + 60, // 101: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.attestations:type_name -> ethereum.eth.v1alpha1.Attestation + 61, // 102: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.deposits:type_name -> ethereum.eth.v1alpha1.Deposit + 62, // 103: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit + 65, // 104: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate + 70, // 105: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.execution_payload:type_name -> ethereum.engine.v1.ExecutionPayloadDeneb + 68, // 106: ethereum.eth.v1alpha1.BeaconBlockBodyDeneb.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange + 35, // 107: ethereum.eth.v1alpha1.SignedBlindedBeaconBlockDeneb.message:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockDeneb + 36, // 108: ethereum.eth.v1alpha1.BlindedBeaconBlockDeneb.body:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb + 58, // 109: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data + 59, // 110: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing + 5, // 111: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashing + 60, // 112: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.attestations:type_name -> ethereum.eth.v1alpha1.Attestation + 61, // 113: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.deposits:type_name -> ethereum.eth.v1alpha1.Deposit + 62, // 114: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit + 65, // 115: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate + 71, // 116: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.execution_payload_header:type_name -> ethereum.engine.v1.ExecutionPayloadHeaderDeneb + 68, // 117: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyDeneb.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange + 38, // 118: ethereum.eth.v1alpha1.SignedBuilderBidDeneb.message:type_name -> ethereum.eth.v1alpha1.BuilderBidDeneb + 71, // 119: ethereum.eth.v1alpha1.BuilderBidDeneb.header:type_name -> ethereum.engine.v1.ExecutionPayloadHeaderDeneb + 71, // 120: ethereum.eth.v1alpha1.BuilderBidElectra.header:type_name -> ethereum.engine.v1.ExecutionPayloadHeaderDeneb + 72, // 121: ethereum.eth.v1alpha1.BuilderBidElectra.execution_requests:type_name -> ethereum.engine.v1.ExecutionRequests + 39, // 122: ethereum.eth.v1alpha1.SignedBuilderBidElectra.message:type_name -> ethereum.eth.v1alpha1.BuilderBidElectra + 42, // 123: ethereum.eth.v1alpha1.BlobSidecars.sidecars:type_name -> ethereum.eth.v1alpha1.BlobSidecar + 73, // 124: ethereum.eth.v1alpha1.BlobSidecar.signed_block_header:type_name -> ethereum.eth.v1alpha1.SignedBeaconBlockHeader + 44, // 125: ethereum.eth.v1alpha1.SignedBeaconBlockContentsElectra.block:type_name -> ethereum.eth.v1alpha1.SignedBeaconBlockElectra + 46, // 126: ethereum.eth.v1alpha1.SignedBeaconBlockElectra.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockElectra + 46, // 127: ethereum.eth.v1alpha1.BeaconBlockContentsElectra.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockElectra + 47, // 128: ethereum.eth.v1alpha1.BeaconBlockElectra.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBodyElectra + 58, // 129: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data + 59, // 130: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing + 74, // 131: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashingElectra + 75, // 132: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.attestations:type_name -> ethereum.eth.v1alpha1.AttestationElectra + 61, // 133: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.deposits:type_name -> ethereum.eth.v1alpha1.Deposit + 62, // 134: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit + 65, // 135: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate + 70, // 136: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.execution_payload:type_name -> ethereum.engine.v1.ExecutionPayloadDeneb + 68, // 137: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange + 72, // 138: ethereum.eth.v1alpha1.BeaconBlockBodyElectra.execution_requests:type_name -> ethereum.engine.v1.ExecutionRequests + 49, // 139: ethereum.eth.v1alpha1.SignedBlindedBeaconBlockElectra.message:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockElectra + 50, // 140: ethereum.eth.v1alpha1.BlindedBeaconBlockElectra.body:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra + 58, // 141: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data + 59, // 142: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing + 74, // 143: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashingElectra + 75, // 144: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.attestations:type_name -> ethereum.eth.v1alpha1.AttestationElectra + 61, // 145: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.deposits:type_name -> ethereum.eth.v1alpha1.Deposit + 62, // 146: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit + 65, // 147: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate + 71, // 148: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.execution_payload_header:type_name -> ethereum.engine.v1.ExecutionPayloadHeaderDeneb + 68, // 149: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange + 72, // 150: ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra.execution_requests:type_name -> ethereum.engine.v1.ExecutionRequests + 52, // 151: ethereum.eth.v1alpha1.SignedBeaconBlockContentsFulu.block:type_name -> ethereum.eth.v1alpha1.SignedBeaconBlockFulu + 46, // 152: ethereum.eth.v1alpha1.SignedBeaconBlockFulu.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockElectra + 46, // 153: ethereum.eth.v1alpha1.BeaconBlockContentsFulu.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockElectra + 55, // 154: ethereum.eth.v1alpha1.SignedBlindedBeaconBlockFulu.message:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockFulu + 50, // 155: ethereum.eth.v1alpha1.BlindedBeaconBlockFulu.body:type_name -> ethereum.eth.v1alpha1.BlindedBeaconBlockBodyElectra + 156, // [156:156] is the sub-list for method output_type + 156, // [156:156] is the sub-list for method input_type + 156, // [156:156] is the sub-list for extension type_name + 156, // [156:156] is the sub-list for extension extendee + 0, // [0:156] is the sub-list for field type_name } func init() { file_proto_prysm_v1alpha1_beacon_block_proto_init() } @@ -6032,6 +6079,7 @@ func file_proto_prysm_v1alpha1_beacon_block_proto_init() { file_proto_prysm_v1alpha1_attestation_proto_init() file_proto_prysm_v1alpha1_withdrawals_proto_init() file_proto_prysm_v1alpha1_beacon_core_types_proto_init() + file_proto_prysm_v1alpha1_gloas_proto_init() file_proto_prysm_v1alpha1_beacon_block_proto_msgTypes[0].OneofWrappers = []any{ (*GenericSignedBeaconBlock_Phase0)(nil), (*GenericSignedBeaconBlock_Altair)(nil), @@ -6045,6 +6093,7 @@ func file_proto_prysm_v1alpha1_beacon_block_proto_init() { (*GenericSignedBeaconBlock_BlindedElectra)(nil), (*GenericSignedBeaconBlock_Fulu)(nil), (*GenericSignedBeaconBlock_BlindedFulu)(nil), + (*GenericSignedBeaconBlock_Gloas)(nil), } file_proto_prysm_v1alpha1_beacon_block_proto_msgTypes[1].OneofWrappers = []any{ (*GenericBeaconBlock_Phase0)(nil), @@ -6059,6 +6108,7 @@ func file_proto_prysm_v1alpha1_beacon_block_proto_init() { (*GenericBeaconBlock_BlindedElectra)(nil), (*GenericBeaconBlock_Fulu)(nil), (*GenericBeaconBlock_BlindedFulu)(nil), + (*GenericBeaconBlock_Gloas)(nil), } type x struct{} out := protoimpl.TypeBuilder{ diff --git a/proto/prysm/v1alpha1/beacon_block.proto b/proto/prysm/v1alpha1/beacon_block.proto index 6eedf8138a..82dbcdcfea 100644 --- a/proto/prysm/v1alpha1/beacon_block.proto +++ b/proto/prysm/v1alpha1/beacon_block.proto @@ -19,6 +19,7 @@ import "proto/eth/ext/options.proto"; import "proto/prysm/v1alpha1/attestation.proto"; import "proto/prysm/v1alpha1/withdrawals.proto"; import "proto/prysm/v1alpha1/beacon_core_types.proto"; +import "proto/prysm/v1alpha1/gloas.proto"; import "proto/engine/v1/execution_engine.proto"; import "proto/engine/v1/electra.proto"; @@ -72,6 +73,8 @@ message GenericSignedBeaconBlock { // Representing a signed, post-Fulu fork blinded beacon block. SignedBlindedBeaconBlockFulu blinded_fulu = 12; + + SignedBeaconBlockGloas gloas = 13; } bool is_blinded = 100; reserved 101; // Deprecated fields @@ -116,6 +119,8 @@ message GenericBeaconBlock { // Representing a post-Fulu fork blinded beacon block. BlindedBeaconBlockFulu blinded_fulu = 12; + + BeaconBlockGloas gloas = 13; } bool is_blinded = 100; string payload_value = 101; diff --git a/proto/prysm/v1alpha1/beacon_core_types.pb.go b/proto/prysm/v1alpha1/beacon_core_types.pb.go index 28077d3fb6..d63fec3a30 100755 --- a/proto/prysm/v1alpha1/beacon_core_types.pb.go +++ b/proto/prysm/v1alpha1/beacon_core_types.pb.go @@ -10,7 +10,7 @@ import ( reflect "reflect" sync "sync" - github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" + github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" @@ -533,9 +533,9 @@ func (x *IndexedAttestationElectra) GetSignature() []byte { } type SyncAggregate struct { - state protoimpl.MessageState `protogen:"open.v1"` - SyncCommitteeBits github_com_prysmaticlabs_go_bitfield.Bitvector512 `protobuf:"bytes,1,opt,name=sync_committee_bits,json=syncCommitteeBits,proto3" json:"sync_committee_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector512" ssz-size:"64"` - SyncCommitteeSignature []byte `protobuf:"bytes,2,opt,name=sync_committee_signature,json=syncCommitteeSignature,proto3" json:"sync_committee_signature,omitempty" ssz-size:"96"` + state protoimpl.MessageState `protogen:"open.v1"` + SyncCommitteeBits github_com_OffchainLabs_go_bitfield.Bitvector512 `protobuf:"bytes,1,opt,name=sync_committee_bits,json=syncCommitteeBits,proto3" json:"sync_committee_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector512" ssz-size:"64"` + SyncCommitteeSignature []byte `protobuf:"bytes,2,opt,name=sync_committee_signature,json=syncCommitteeSignature,proto3" json:"sync_committee_signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -570,11 +570,11 @@ func (*SyncAggregate) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_core_types_proto_rawDescGZIP(), []int{9} } -func (x *SyncAggregate) GetSyncCommitteeBits() github_com_prysmaticlabs_go_bitfield.Bitvector512 { +func (x *SyncAggregate) GetSyncCommitteeBits() github_com_OffchainLabs_go_bitfield.Bitvector512 { if x != nil { return x.SyncCommitteeBits } - return github_com_prysmaticlabs_go_bitfield.Bitvector512(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector512(nil) } func (x *SyncAggregate) GetSyncCommitteeSignature() []byte { @@ -1040,99 +1040,99 @@ var file_proto_prysm_v1alpha1_beacon_core_types_proto_rawDesc = []byte{ 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, - 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xbe, 0x01, 0x0a, 0x0d, 0x53, 0x79, 0x6e, - 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x6b, 0x0a, 0x13, 0x73, 0x79, + 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xbd, 0x01, 0x0a, 0x0d, 0x53, 0x79, 0x6e, + 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x6a, 0x0a, 0x13, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x62, 0x69, 0x74, - 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x3b, 0x82, 0xb5, 0x18, 0x31, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, - 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x35, 0x31, 0x32, 0x8a, 0xb5, - 0x18, 0x02, 0x36, 0x34, 0x52, 0x11, 0x73, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x74, 0x65, 0x65, 0x42, 0x69, 0x74, 0x73, 0x12, 0x40, 0x0a, 0x18, 0x73, 0x79, 0x6e, 0x63, 0x5f, - 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, - 0x36, 0x52, 0x16, 0x73, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, - 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xc5, 0x01, 0x0a, 0x04, 0x46, 0x6f, - 0x72, 0x6b, 0x12, 0x30, 0x0a, 0x10, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x76, - 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x05, 0x8a, 0xb5, - 0x18, 0x01, 0x34, 0x52, 0x0f, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x56, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2e, 0x0a, 0x0f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, - 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x05, 0x8a, - 0xb5, 0x18, 0x01, 0x34, 0x52, 0x0e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x56, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, - 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, - 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, - 0x68, 0x22, 0x68, 0x0a, 0x0d, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, - 0x65, 0x65, 0x12, 0x24, 0x0a, 0x07, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0c, 0x42, 0x0a, 0x8a, 0xb5, 0x18, 0x06, 0x35, 0x31, 0x32, 0x2c, 0x34, 0x38, 0x52, - 0x07, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x73, 0x12, 0x31, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, - 0x65, 0x67, 0x61, 0x74, 0x65, 0x5f, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, - 0x65, 0x67, 0x61, 0x74, 0x65, 0x50, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x22, 0x7f, 0x0a, 0x11, 0x48, - 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, - 0x12, 0x34, 0x0a, 0x12, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, - 0x79, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x33, 0x32, 0x52, 0x10, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x53, 0x75, 0x6d, 0x6d, 0x61, - 0x72, 0x79, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x34, 0x0a, 0x12, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, - 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x10, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x52, 0x6f, 0x6f, 0x74, 0x22, 0x9a, 0x05, 0x0a, - 0x09, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x2f, 0x0a, 0x0a, 0x70, 0x75, - 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x10, - 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, 0x9a, 0xb5, 0x18, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, - 0x52, 0x09, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x3d, 0x0a, 0x16, 0x77, - 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, - 0x74, 0x69, 0x61, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, - 0x02, 0x33, 0x32, 0x52, 0x15, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x43, - 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x73, 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x66, - 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, - 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x6c, 0x61, 0x73, 0x68, - 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x65, - 0x64, 0x12, 0x87, 0x01, 0x0a, 0x1c, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x65, 0x6c, 0x69, 0x67, 0x69, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x65, 0x70, 0x6f, - 0x63, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, - 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, - 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, - 0x1a, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6c, 0x69, 0x67, 0x69, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x70, 0x0a, 0x10, 0x61, - 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, - 0x06, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, - 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, - 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x0f, 0x61, 0x63, - 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x64, 0x0a, - 0x0a, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x07, 0x20, 0x01, 0x28, - 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, - 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, - 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x09, 0x65, 0x78, 0x69, 0x74, 0x45, 0x70, - 0x6f, 0x63, 0x68, 0x12, 0x74, 0x0a, 0x12, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, - 0x62, 0x6c, 0x65, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x42, - 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, - 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, - 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, - 0x61, 0x62, 0x6c, 0x65, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x42, 0x9e, 0x01, 0x0a, 0x19, 0x6f, 0x72, - 0x67, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x14, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x43, - 0x6f, 0x72, 0x65, 0x54, 0x79, 0x70, 0x65, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, - 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, - 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, - 0x68, 0x5c, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, + 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x3a, 0x82, 0xb5, 0x18, 0x30, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x35, 0x31, 0x32, 0x8a, 0xb5, 0x18, + 0x02, 0x36, 0x34, 0x52, 0x11, 0x73, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, + 0x65, 0x65, 0x42, 0x69, 0x74, 0x73, 0x12, 0x40, 0x0a, 0x18, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, + 0x52, 0x16, 0x73, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x53, + 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xc5, 0x01, 0x0a, 0x04, 0x46, 0x6f, 0x72, + 0x6b, 0x12, 0x30, 0x0a, 0x10, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x76, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x05, 0x8a, 0xb5, 0x18, + 0x01, 0x34, 0x52, 0x0f, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x56, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x12, 0x2e, 0x0a, 0x0f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x76, + 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x05, 0x8a, 0xb5, + 0x18, 0x01, 0x34, 0x52, 0x0e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x56, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, + 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, + 0x22, 0x68, 0x0a, 0x0d, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, + 0x65, 0x12, 0x24, 0x0a, 0x07, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0c, 0x42, 0x0a, 0x8a, 0xb5, 0x18, 0x06, 0x35, 0x31, 0x32, 0x2c, 0x34, 0x38, 0x52, 0x07, + 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x73, 0x12, 0x31, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, + 0x67, 0x61, 0x74, 0x65, 0x5f, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, + 0x67, 0x61, 0x74, 0x65, 0x50, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x22, 0x7f, 0x0a, 0x11, 0x48, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, + 0x34, 0x0a, 0x12, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, + 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, + 0x02, 0x33, 0x32, 0x52, 0x10, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, + 0x79, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x34, 0x0a, 0x12, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x73, + 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x10, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x52, 0x6f, 0x6f, 0x74, 0x22, 0x9a, 0x05, 0x0a, 0x09, + 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x2f, 0x0a, 0x0a, 0x70, 0x75, 0x62, + 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x10, 0x8a, + 0xb5, 0x18, 0x02, 0x34, 0x38, 0x9a, 0xb5, 0x18, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x52, + 0x09, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x3d, 0x0a, 0x16, 0x77, 0x69, + 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, + 0x69, 0x61, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, + 0x33, 0x32, 0x52, 0x15, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x43, 0x72, + 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x73, 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x66, 0x66, + 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x42, + 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x65, + 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x65, 0x64, + 0x12, 0x87, 0x01, 0x0a, 0x1c, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x65, 0x6c, 0x69, 0x67, 0x69, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x65, 0x70, 0x6f, 0x63, + 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, + 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, + 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x1a, + 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6c, 0x69, 0x67, 0x69, 0x62, + 0x69, 0x6c, 0x69, 0x74, 0x79, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x70, 0x0a, 0x10, 0x61, 0x63, + 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, + 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x0f, 0x61, 0x63, 0x74, + 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x64, 0x0a, 0x0a, + 0x65, 0x78, 0x69, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, + 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, + 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x09, 0x65, 0x78, 0x69, 0x74, 0x45, 0x70, 0x6f, + 0x63, 0x68, 0x12, 0x74, 0x0a, 0x12, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x62, + 0x6c, 0x65, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, + 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, + 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, + 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, + 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, + 0x62, 0x6c, 0x65, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x42, 0x9e, 0x01, 0x0a, 0x19, 0x6f, 0x72, 0x67, + 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x14, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x43, 0x6f, + 0x72, 0x65, 0x54, 0x79, 0x70, 0x65, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, + 0x5c, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x33, } var ( diff --git a/proto/prysm/v1alpha1/beacon_state.pb.go b/proto/prysm/v1alpha1/beacon_state.pb.go index 7fbe790984..51274ecf97 100755 --- a/proto/prysm/v1alpha1/beacon_state.pb.go +++ b/proto/prysm/v1alpha1/beacon_state.pb.go @@ -10,7 +10,7 @@ import ( reflect "reflect" sync "sync" - github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" + github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" @@ -44,7 +44,7 @@ type BeaconState struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochAttestations []*PendingAttestation `protobuf:"bytes,7001,rep,name=previous_epoch_attestations,json=previousEpochAttestations,proto3" json:"previous_epoch_attestations,omitempty" ssz-max:"4096"` CurrentEpochAttestations []*PendingAttestation `protobuf:"bytes,7002,rep,name=current_epoch_attestations,json=currentEpochAttestations,proto3" json:"current_epoch_attestations,omitempty" ssz-max:"4096"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_OffchainLabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -201,11 +201,11 @@ func (x *BeaconState) GetCurrentEpochAttestations() []*PendingAttestation { return nil } -func (x *BeaconState) GetJustificationBits() github_com_prysmaticlabs_go_bitfield.Bitvector4 { +func (x *BeaconState) GetJustificationBits() github_com_OffchainLabs_go_bitfield.Bitvector4 { if x != nil { return x.JustificationBits } - return github_com_prysmaticlabs_go_bitfield.Bitvector4(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector4(nil) } func (x *BeaconState) GetPreviousJustifiedCheckpoint() *Checkpoint { @@ -231,7 +231,7 @@ func (x *BeaconState) GetFinalizedCheckpoint() *Checkpoint { type PendingAttestation struct { state protoimpl.MessageState `protogen:"open.v1"` - AggregationBits github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` + AggregationBits github_com_OffchainLabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` Data *AttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` InclusionDelay github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=inclusion_delay,json=inclusionDelay,proto3" json:"inclusion_delay,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,4,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` @@ -269,11 +269,11 @@ func (*PendingAttestation) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_state_proto_rawDescGZIP(), []int{1} } -func (x *PendingAttestation) GetAggregationBits() github_com_prysmaticlabs_go_bitfield.Bitlist { +func (x *PendingAttestation) GetAggregationBits() github_com_OffchainLabs_go_bitfield.Bitlist { if x != nil { return x.AggregationBits } - return github_com_prysmaticlabs_go_bitfield.Bitlist(nil) + return github_com_OffchainLabs_go_bitfield.Bitlist(nil) } func (x *PendingAttestation) GetData() *AttestationData { @@ -720,7 +720,7 @@ type BeaconStateAltair struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_OffchainLabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -880,11 +880,11 @@ func (x *BeaconStateAltair) GetCurrentEpochParticipation() []byte { return nil } -func (x *BeaconStateAltair) GetJustificationBits() github_com_prysmaticlabs_go_bitfield.Bitvector4 { +func (x *BeaconStateAltair) GetJustificationBits() github_com_OffchainLabs_go_bitfield.Bitvector4 { if x != nil { return x.JustificationBits } - return github_com_prysmaticlabs_go_bitfield.Bitvector4(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector4(nil) } func (x *BeaconStateAltair) GetPreviousJustifiedCheckpoint() *Checkpoint { @@ -1000,7 +1000,7 @@ type BeaconStateBellatrix struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_OffchainLabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -1161,11 +1161,11 @@ func (x *BeaconStateBellatrix) GetCurrentEpochParticipation() []byte { return nil } -func (x *BeaconStateBellatrix) GetJustificationBits() github_com_prysmaticlabs_go_bitfield.Bitvector4 { +func (x *BeaconStateBellatrix) GetJustificationBits() github_com_OffchainLabs_go_bitfield.Bitvector4 { if x != nil { return x.JustificationBits } - return github_com_prysmaticlabs_go_bitfield.Bitvector4(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector4(nil) } func (x *BeaconStateBellatrix) GetPreviousJustifiedCheckpoint() *Checkpoint { @@ -1236,7 +1236,7 @@ type BeaconStateCapella struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_OffchainLabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -1400,11 +1400,11 @@ func (x *BeaconStateCapella) GetCurrentEpochParticipation() []byte { return nil } -func (x *BeaconStateCapella) GetJustificationBits() github_com_prysmaticlabs_go_bitfield.Bitvector4 { +func (x *BeaconStateCapella) GetJustificationBits() github_com_OffchainLabs_go_bitfield.Bitvector4 { if x != nil { return x.JustificationBits } - return github_com_prysmaticlabs_go_bitfield.Bitvector4(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector4(nil) } func (x *BeaconStateCapella) GetPreviousJustifiedCheckpoint() *Checkpoint { @@ -1496,7 +1496,7 @@ type BeaconStateDeneb struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_OffchainLabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -1660,11 +1660,11 @@ func (x *BeaconStateDeneb) GetCurrentEpochParticipation() []byte { return nil } -func (x *BeaconStateDeneb) GetJustificationBits() github_com_prysmaticlabs_go_bitfield.Bitvector4 { +func (x *BeaconStateDeneb) GetJustificationBits() github_com_OffchainLabs_go_bitfield.Bitvector4 { if x != nil { return x.JustificationBits } - return github_com_prysmaticlabs_go_bitfield.Bitvector4(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector4(nil) } func (x *BeaconStateDeneb) GetPreviousJustifiedCheckpoint() *Checkpoint { @@ -1756,7 +1756,7 @@ type BeaconStateElectra struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_OffchainLabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -1929,11 +1929,11 @@ func (x *BeaconStateElectra) GetCurrentEpochParticipation() []byte { return nil } -func (x *BeaconStateElectra) GetJustificationBits() github_com_prysmaticlabs_go_bitfield.Bitvector4 { +func (x *BeaconStateElectra) GetJustificationBits() github_com_OffchainLabs_go_bitfield.Bitvector4 { if x != nil { return x.JustificationBits } - return github_com_prysmaticlabs_go_bitfield.Bitvector4(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector4(nil) } func (x *BeaconStateElectra) GetPreviousJustifiedCheckpoint() *Checkpoint { @@ -2088,7 +2088,7 @@ type BeaconStateFulu struct { Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` - JustificationBits github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` + JustificationBits github_com_OffchainLabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` @@ -2262,11 +2262,11 @@ func (x *BeaconStateFulu) GetCurrentEpochParticipation() []byte { return nil } -func (x *BeaconStateFulu) GetJustificationBits() github_com_prysmaticlabs_go_bitfield.Bitvector4 { +func (x *BeaconStateFulu) GetJustificationBits() github_com_OffchainLabs_go_bitfield.Bitvector4 { if x != nil { return x.JustificationBits } - return github_com_prysmaticlabs_go_bitfield.Bitvector4(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector4(nil) } func (x *BeaconStateFulu) GetPreviousJustifiedCheckpoint() *Checkpoint { @@ -2428,7 +2428,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x23, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x65, 0x69, 0x70, 0x5f, 0x37, 0x32, 0x35, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x22, 0x96, 0x0c, 0x0a, 0x0b, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, + 0x22, 0x95, 0x0c, 0x0a, 0x0b, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x22, 0x0a, 0x0c, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0xe9, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, @@ -2500,11 +2500,189 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x68, 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x18, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x41, 0x74, - 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x68, 0x0a, 0x12, 0x6a, 0x75, + 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x67, 0x0a, 0x12, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, - 0x18, 0xc1, 0x3e, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2f, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, - 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, + 0x18, 0xc1, 0x3e, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x37, 0x82, 0xb5, 0x18, 0x2e, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, + 0x52, 0x11, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, + 0x69, 0x74, 0x73, 0x12, 0x66, 0x0a, 0x1d, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, + 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, + 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc2, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1b, + 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, + 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x64, 0x0a, 0x1c, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, + 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc3, 0x3e, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, + 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, + 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1a, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4a, 0x75, + 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, + 0x74, 0x12, 0x55, 0x0a, 0x14, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x63, + 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc4, 0x3e, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, + 0x69, 0x6e, 0x74, 0x52, 0x13, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x43, 0x68, + 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x22, 0x9a, 0x03, 0x0a, 0x12, 0x50, 0x65, 0x6e, + 0x64, 0x69, 0x6e, 0x67, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x62, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, + 0x69, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x37, 0x82, 0xb5, 0x18, 0x2b, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, + 0x34, 0x38, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, + 0x69, 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x26, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, + 0x6d, 0x0a, 0x0f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x64, 0x65, 0x6c, + 0x61, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, + 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0e, + 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x44, 0x65, 0x6c, 0x61, 0x79, 0x12, 0x75, + 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, + 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, + 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, + 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x6d, 0x0a, 0x0f, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, + 0x63, 0x61, 0x6c, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x2c, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, + 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2c, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, + 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, + 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, + 0x6f, 0x6f, 0x74, 0x73, 0x22, 0x7c, 0x0a, 0x0c, 0x53, 0x74, 0x61, 0x74, 0x65, 0x53, 0x75, 0x6d, + 0x6d, 0x61, 0x72, 0x79, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, + 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x12, + 0x0a, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x72, 0x6f, + 0x6f, 0x74, 0x22, 0x56, 0x0a, 0x0b, 0x53, 0x69, 0x67, 0x6e, 0x69, 0x6e, 0x67, 0x44, 0x61, 0x74, + 0x61, 0x12, 0x27, 0x0a, 0x0b, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, + 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x1e, 0x0a, 0x06, 0x64, 0x6f, + 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, + 0x33, 0x32, 0x52, 0x06, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x22, 0x7a, 0x0a, 0x08, 0x46, 0x6f, + 0x72, 0x6b, 0x44, 0x61, 0x74, 0x61, 0x12, 0x2e, 0x0a, 0x0f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x05, 0x8a, 0xb5, 0x18, 0x01, 0x34, 0x52, 0x0e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x56, + 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3e, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, + 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x5f, 0x72, 0x6f, 0x6f, + 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, + 0x15, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, + 0x72, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x22, 0xb7, 0x01, 0x0a, 0x0b, 0x43, 0x68, 0x65, 0x63, 0x6b, + 0x50, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x65, 0x65, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x73, 0x65, 0x65, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x67, 0x65, + 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, + 0x52, 0x0b, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x25, 0x0a, + 0x0e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, + 0x03, 0x20, 0x03, 0x28, 0x04, 0x52, 0x0d, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x49, 0x6e, 0x64, + 0x69, 0x63, 0x65, 0x73, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x75, 0x62, 0x5f, 0x6b, 0x65, 0x79, 0x73, + 0x18, 0x04, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x75, 0x62, 0x4b, 0x65, 0x79, 0x73, 0x12, + 0x2f, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x04, 0x66, 0x6f, 0x72, 0x6b, + 0x22, 0x98, 0x01, 0x0a, 0x0e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x12, 0x2f, 0x0a, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x02, 0x34, 0x38, 0x9a, + 0xb5, 0x18, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x52, 0x09, 0x70, 0x75, 0x62, 0x6c, 0x69, + 0x63, 0x4b, 0x65, 0x79, 0x12, 0x3d, 0x0a, 0x16, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, + 0x61, 0x6c, 0x5f, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x73, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x15, 0x77, 0x69, + 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, + 0x61, 0x6c, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x04, 0x52, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x8d, 0x01, 0x0a, 0x08, + 0x50, 0x6f, 0x77, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x25, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, + 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x61, 0x73, 0x68, 0x12, + 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, + 0x72, 0x65, 0x6e, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x31, 0x0a, 0x10, 0x74, 0x6f, 0x74, 0x61, + 0x6c, 0x5f, 0x64, 0x69, 0x66, 0x66, 0x69, 0x63, 0x75, 0x6c, 0x74, 0x79, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0f, 0x74, 0x6f, 0x74, 0x61, + 0x6c, 0x44, 0x69, 0x66, 0x66, 0x69, 0x63, 0x75, 0x6c, 0x74, 0x79, 0x22, 0xd0, 0x0d, 0x0a, 0x11, + 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x41, 0x6c, 0x74, 0x61, 0x69, + 0x72, 0x12, 0x22, 0x0a, 0x0c, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x74, 0x69, 0x6d, + 0x65, 0x18, 0xe9, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, + 0x73, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, + 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, + 0x18, 0xea, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, + 0x15, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, + 0x72, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, + 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, + 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, + 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x04, 0x66, + 0x6f, 0x72, 0x6b, 0x12, 0x59, 0x0a, 0x13, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0xd1, 0x0f, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, + 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x11, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x2d, + 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd2, 0x0f, + 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, + 0x32, 0x52, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2d, 0x0a, + 0x0b, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd3, 0x0f, 0x20, + 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, + 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x40, 0x0a, 0x10, + 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, + 0x18, 0xd4, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, + 0x32, 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, 0x0f, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x3d, + 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0xb9, 0x17, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, + 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, + 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x52, 0x0a, + 0x0f, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x76, 0x6f, 0x74, 0x65, 0x73, + 0x18, 0xba, 0x17, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, + 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, + 0x34, 0x38, 0x52, 0x0d, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x56, 0x6f, 0x74, 0x65, + 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, + 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xbb, 0x17, 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, + 0x65, 0x74, 0x68, 0x31, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, + 0x12, 0x54, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0xa1, + 0x1f, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, 0x61, + 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, + 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x69, + 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, + 0x65, 0x73, 0x18, 0xa2, 0x1f, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, + 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x08, 0x62, 0x61, + 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x30, 0x0a, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, + 0x5f, 0x6d, 0x69, 0x78, 0x65, 0x73, 0x18, 0x89, 0x27, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0c, 0x8a, + 0xb5, 0x18, 0x08, 0x36, 0x35, 0x35, 0x33, 0x36, 0x2c, 0x33, 0x32, 0x52, 0x0b, 0x72, 0x61, 0x6e, + 0x64, 0x61, 0x6f, 0x4d, 0x69, 0x78, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x09, 0x73, 0x6c, 0x61, 0x73, + 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0xf1, 0x2e, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, 0x8a, 0xb5, + 0x18, 0x04, 0x38, 0x31, 0x39, 0x32, 0x52, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, + 0x73, 0x12, 0x54, 0x0a, 0x1c, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x65, 0x70, + 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x18, 0xd9, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, + 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x1a, 0x70, 0x72, 0x65, + 0x76, 0x69, 0x6f, 0x75, 0x73, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, + 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x52, 0x0a, 0x1b, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, + 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xda, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, + 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, + 0x52, 0x19, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, + 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x67, 0x0a, 0x12, 0x6a, + 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, + 0x73, 0x18, 0xc1, 0x3e, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x37, 0x82, 0xb5, 0x18, 0x2e, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x66, 0x0a, 0x1d, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, @@ -2525,876 +2703,358 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x13, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x43, - 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x22, 0x9b, 0x03, 0x0a, 0x12, 0x50, 0x65, - 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x12, 0x63, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x62, 0x69, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2c, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x92, 0xb5, 0x18, 0x04, - 0x32, 0x30, 0x34, 0x38, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, - 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, - 0x61, 0x12, 0x6d, 0x0a, 0x0f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x64, - 0x65, 0x6c, 0x61, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, - 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, - 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, - 0x52, 0x0e, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x44, 0x65, 0x6c, 0x61, 0x79, - 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, - 0x65, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, - 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, - 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, - 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, - 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x6d, 0x0a, 0x0f, 0x48, 0x69, 0x73, 0x74, 0x6f, - 0x72, 0x69, 0x63, 0x61, 0x6c, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x2c, 0x0a, 0x0b, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0c, 0x42, - 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2c, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, - 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x22, 0x7c, 0x0a, 0x0c, 0x53, 0x74, 0x61, 0x74, 0x65, 0x53, - 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, + 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x3f, 0x0a, 0x11, 0x69, 0x6e, 0x61, + 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x18, 0xa9, + 0x46, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, + 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x10, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, + 0x76, 0x69, 0x74, 0x79, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x12, 0x5b, 0x0a, 0x16, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x74, 0x65, 0x65, 0x18, 0xaa, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, + 0x65, 0x52, 0x14, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, + 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x55, 0x0a, 0x13, 0x6e, 0x65, 0x78, 0x74, 0x5f, + 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xab, + 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, + 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x11, 0x6e, 0x65, 0x78, + 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x22, 0xa6, + 0x01, 0x0a, 0x1b, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, + 0x72, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x12, 0x58, + 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, + 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, + 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, + 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, + 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x2d, 0x0a, 0x12, 0x73, 0x75, 0x62, 0x63, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x04, 0x52, 0x11, 0x73, 0x75, 0x62, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, + 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0xc7, 0x0e, 0x0a, 0x14, 0x42, 0x65, 0x61, 0x63, + 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, + 0x12, 0x22, 0x0a, 0x0c, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x74, 0x69, 0x6d, 0x65, + 0x18, 0xe9, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, + 0x54, 0x69, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, + 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, + 0xea, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x15, + 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, + 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, - 0x12, 0x12, 0x0a, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, - 0x72, 0x6f, 0x6f, 0x74, 0x22, 0x56, 0x0a, 0x0b, 0x53, 0x69, 0x67, 0x6e, 0x69, 0x6e, 0x67, 0x44, - 0x61, 0x74, 0x61, 0x12, 0x27, 0x0a, 0x0b, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x72, 0x6f, - 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, - 0x52, 0x0a, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x1e, 0x0a, 0x06, - 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x33, 0x32, 0x52, 0x06, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x22, 0x7a, 0x0a, 0x08, - 0x46, 0x6f, 0x72, 0x6b, 0x44, 0x61, 0x74, 0x61, 0x12, 0x2e, 0x0a, 0x0f, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x0c, 0x42, 0x05, 0x8a, 0xb5, 0x18, 0x01, 0x34, 0x52, 0x0e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3e, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, - 0x73, 0x69, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x5f, 0x72, - 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, - 0x32, 0x52, 0x15, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, - 0x74, 0x6f, 0x72, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x22, 0xb7, 0x01, 0x0a, 0x0b, 0x43, 0x68, 0x65, - 0x63, 0x6b, 0x50, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x65, 0x65, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x73, 0x65, 0x65, 0x64, 0x12, 0x21, 0x0a, 0x0c, - 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x0b, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, - 0x25, 0x0a, 0x0e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, - 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x04, 0x52, 0x0d, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x49, - 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x75, 0x62, 0x5f, 0x6b, 0x65, - 0x79, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x75, 0x62, 0x4b, 0x65, 0x79, - 0x73, 0x12, 0x2f, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x04, 0x66, 0x6f, - 0x72, 0x6b, 0x22, 0x98, 0x01, 0x0a, 0x0e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x2f, 0x0a, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, - 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x02, 0x34, - 0x38, 0x9a, 0xb5, 0x18, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x52, 0x09, 0x70, 0x75, 0x62, - 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x3d, 0x0a, 0x16, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, - 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x73, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x15, - 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, - 0x74, 0x69, 0x61, 0x6c, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x8d, 0x01, - 0x0a, 0x08, 0x50, 0x6f, 0x77, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x25, 0x0a, 0x0a, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, - 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x61, 0x73, - 0x68, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, - 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x31, 0x0a, 0x10, 0x74, 0x6f, - 0x74, 0x61, 0x6c, 0x5f, 0x64, 0x69, 0x66, 0x66, 0x69, 0x63, 0x75, 0x6c, 0x74, 0x79, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0f, 0x74, 0x6f, - 0x74, 0x61, 0x6c, 0x44, 0x69, 0x66, 0x66, 0x69, 0x63, 0x75, 0x6c, 0x74, 0x79, 0x22, 0xd1, 0x0d, - 0x0a, 0x11, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x41, 0x6c, 0x74, - 0x61, 0x69, 0x72, 0x12, 0x22, 0x0a, 0x0c, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x74, - 0x69, 0x6d, 0x65, 0x18, 0xe9, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x67, 0x65, 0x6e, 0x65, - 0x73, 0x69, 0x73, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, 0x73, - 0x69, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x5f, 0x72, 0x6f, - 0x6f, 0x74, 0x18, 0xea, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, - 0x32, 0x52, 0x15, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, - 0x74, 0x6f, 0x72, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, - 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, - 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, - 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, - 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, - 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x6f, 0x72, 0x6b, 0x52, - 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x12, 0x59, 0x0a, 0x13, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0xd1, 0x0f, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, - 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x11, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, - 0x12, 0x2d, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, - 0xd2, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, - 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, - 0x2d, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd3, - 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, - 0x33, 0x32, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x40, - 0x0a, 0x10, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x72, 0x6f, 0x6f, - 0x74, 0x73, 0x18, 0xd4, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, 0x3f, - 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, - 0x0f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x52, 0x6f, 0x6f, 0x74, 0x73, - 0x12, 0x3d, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0xb9, 0x17, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, - 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, - 0x52, 0x0a, 0x0f, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x76, 0x6f, 0x74, - 0x65, 0x73, 0x18, 0xba, 0x17, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, - 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, - 0x32, 0x30, 0x34, 0x38, 0x52, 0x0d, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x56, 0x6f, - 0x74, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x65, 0x70, 0x6f, - 0x73, 0x69, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xbb, 0x17, 0x20, 0x01, 0x28, 0x04, - 0x52, 0x10, 0x65, 0x74, 0x68, 0x31, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x49, 0x6e, 0x64, - 0x65, 0x78, 0x12, 0x54, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, - 0x18, 0xa1, 0x1f, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, - 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x0a, 0x76, 0x61, - 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x62, 0x61, 0x6c, 0x61, - 0x6e, 0x63, 0x65, 0x73, 0x18, 0xa2, 0x1f, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, - 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x08, - 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x30, 0x0a, 0x0c, 0x72, 0x61, 0x6e, 0x64, - 0x61, 0x6f, 0x5f, 0x6d, 0x69, 0x78, 0x65, 0x73, 0x18, 0x89, 0x27, 0x20, 0x03, 0x28, 0x0c, 0x42, - 0x0c, 0x8a, 0xb5, 0x18, 0x08, 0x36, 0x35, 0x35, 0x33, 0x36, 0x2c, 0x33, 0x32, 0x52, 0x0b, 0x72, - 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x4d, 0x69, 0x78, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x09, 0x73, 0x6c, - 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0xf1, 0x2e, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, - 0x8a, 0xb5, 0x18, 0x04, 0x38, 0x31, 0x39, 0x32, 0x52, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, - 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x1c, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, - 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0xd9, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, - 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x1a, 0x70, - 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, - 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x52, 0x0a, 0x1b, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, - 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xda, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, - 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, - 0x37, 0x36, 0x52, 0x19, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, - 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x68, 0x0a, - 0x12, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, - 0x69, 0x74, 0x73, 0x18, 0xc1, 0x3e, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2f, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x34, 0x8a, - 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x66, 0x0a, 0x1d, 0x70, 0x72, 0x65, 0x76, 0x69, - 0x6f, 0x75, 0x73, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, - 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc2, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, - 0x6e, 0x74, 0x52, 0x1b, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x4a, 0x75, 0x73, 0x74, - 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, - 0x64, 0x0a, 0x1c, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, - 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, - 0xc3, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, - 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1a, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, - 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x55, 0x0a, 0x14, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, - 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc4, 0x3e, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, - 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x13, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, - 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x3f, 0x0a, 0x11, - 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, - 0x73, 0x18, 0xa9, 0x46, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, - 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x10, 0x69, 0x6e, 0x61, - 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x12, 0x5b, 0x0a, - 0x16, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, - 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xaa, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, - 0x74, 0x74, 0x65, 0x65, 0x52, 0x14, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x53, 0x79, 0x6e, - 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x55, 0x0a, 0x13, 0x6e, 0x65, - 0x78, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, - 0x65, 0x18, 0xab, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, - 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, - 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x11, - 0x6e, 0x65, 0x78, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, - 0x65, 0x22, 0xa6, 0x01, 0x0a, 0x1b, 0x53, 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, - 0x61, 0x74, 0x6f, 0x72, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, - 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, - 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, - 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, - 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x2d, 0x0a, 0x12, 0x73, - 0x75, 0x62, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, - 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x11, 0x73, 0x75, 0x62, 0x63, 0x6f, 0x6d, 0x6d, - 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0xc8, 0x0e, 0x0a, 0x14, 0x42, - 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x42, 0x65, 0x6c, 0x6c, 0x61, 0x74, - 0x72, 0x69, 0x78, 0x12, 0x22, 0x0a, 0x0c, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x74, - 0x69, 0x6d, 0x65, 0x18, 0xe9, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x67, 0x65, 0x6e, 0x65, - 0x73, 0x69, 0x73, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, 0x73, - 0x69, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x5f, 0x72, 0x6f, - 0x6f, 0x74, 0x18, 0xea, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, - 0x32, 0x52, 0x15, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, - 0x74, 0x6f, 0x72, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, - 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, - 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, - 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, - 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, - 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x6f, 0x72, 0x6b, 0x52, - 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x12, 0x59, 0x0a, 0x13, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0xd1, 0x0f, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, - 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x11, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, - 0x12, 0x2d, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, - 0xd2, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, - 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, - 0x2d, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd3, - 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, - 0x33, 0x32, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x40, - 0x0a, 0x10, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x72, 0x6f, 0x6f, - 0x74, 0x73, 0x18, 0xd4, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, 0x3f, - 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, - 0x0f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x52, 0x6f, 0x6f, 0x74, 0x73, - 0x12, 0x3d, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0xb9, 0x17, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, - 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, - 0x52, 0x0a, 0x0f, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x76, 0x6f, 0x74, - 0x65, 0x73, 0x18, 0xba, 0x17, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, - 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, - 0x32, 0x30, 0x34, 0x38, 0x52, 0x0d, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x56, 0x6f, - 0x74, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x65, 0x70, 0x6f, - 0x73, 0x69, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xbb, 0x17, 0x20, 0x01, 0x28, 0x04, - 0x52, 0x10, 0x65, 0x74, 0x68, 0x31, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x49, 0x6e, 0x64, - 0x65, 0x78, 0x12, 0x54, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, - 0x18, 0xa1, 0x1f, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, - 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x0a, 0x76, 0x61, - 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x62, 0x61, 0x6c, 0x61, - 0x6e, 0x63, 0x65, 0x73, 0x18, 0xa2, 0x1f, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, - 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x08, - 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x30, 0x0a, 0x0c, 0x72, 0x61, 0x6e, 0x64, - 0x61, 0x6f, 0x5f, 0x6d, 0x69, 0x78, 0x65, 0x73, 0x18, 0x89, 0x27, 0x20, 0x03, 0x28, 0x0c, 0x42, - 0x0c, 0x8a, 0xb5, 0x18, 0x08, 0x36, 0x35, 0x35, 0x33, 0x36, 0x2c, 0x33, 0x32, 0x52, 0x0b, 0x72, - 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x4d, 0x69, 0x78, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x09, 0x73, 0x6c, - 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0xf1, 0x2e, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, - 0x8a, 0xb5, 0x18, 0x04, 0x38, 0x31, 0x39, 0x32, 0x52, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, - 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x1c, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, - 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0xd9, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, - 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x1a, 0x70, - 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, - 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x52, 0x0a, 0x1b, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, - 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xda, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, - 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, - 0x37, 0x36, 0x52, 0x19, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, - 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x68, 0x0a, - 0x12, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, - 0x69, 0x74, 0x73, 0x18, 0xc1, 0x3e, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2f, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x34, 0x8a, - 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x66, 0x0a, 0x1d, 0x70, 0x72, 0x65, 0x76, 0x69, - 0x6f, 0x75, 0x73, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, - 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc2, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, - 0x6e, 0x74, 0x52, 0x1b, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x4a, 0x75, 0x73, 0x74, - 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, - 0x64, 0x0a, 0x1c, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, - 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, - 0xc3, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, - 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, - 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1a, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, - 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x55, 0x0a, 0x14, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, - 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc4, 0x3e, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, - 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x13, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, - 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x3f, 0x0a, 0x11, - 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, - 0x73, 0x18, 0xa9, 0x46, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, - 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x10, 0x69, 0x6e, 0x61, - 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x12, 0x5b, 0x0a, - 0x16, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, - 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xaa, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, - 0x74, 0x74, 0x65, 0x65, 0x52, 0x14, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x53, 0x79, 0x6e, - 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x55, 0x0a, 0x13, 0x6e, 0x65, - 0x78, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, - 0x65, 0x18, 0xab, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, - 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, - 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x11, - 0x6e, 0x65, 0x78, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, - 0x65, 0x12, 0x72, 0x0a, 0x1f, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x68, 0x65, - 0x61, 0x64, 0x65, 0x72, 0x18, 0x91, 0x4e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, - 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, - 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x1c, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x45, - 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x22, 0x87, 0x11, 0x0a, 0x12, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, - 0x53, 0x74, 0x61, 0x74, 0x65, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x22, 0x0a, 0x0c, - 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0xe9, 0x07, 0x20, - 0x01, 0x28, 0x04, 0x52, 0x0b, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x54, 0x69, 0x6d, 0x65, - 0x12, 0x3f, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x69, - 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0xea, 0x07, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x15, 0x67, 0x65, 0x6e, 0x65, - 0x73, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x6f, 0x6f, - 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, - 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, - 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, - 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, - 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x12, 0x59, - 0x0a, 0x13, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0xd1, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x11, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, - 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x2d, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, - 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd2, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, - 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2d, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd3, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, - 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x40, 0x0a, 0x10, 0x68, 0x69, 0x73, 0x74, 0x6f, - 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd4, 0x0f, 0x20, 0x03, - 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x08, - 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, 0x0f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x69, 0x63, 0x61, 0x6c, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x3d, 0x0a, 0x09, 0x65, 0x74, 0x68, - 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0xb9, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, - 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, - 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x52, 0x0a, 0x0f, 0x65, 0x74, 0x68, 0x31, - 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x76, 0x6f, 0x74, 0x65, 0x73, 0x18, 0xba, 0x17, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, - 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, - 0x61, 0x74, 0x61, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0d, 0x65, - 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x56, 0x6f, 0x74, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x12, - 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x69, 0x6e, 0x64, - 0x65, 0x78, 0x18, 0xbb, 0x17, 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, 0x65, 0x74, 0x68, 0x31, 0x44, - 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x54, 0x0a, 0x0a, 0x76, - 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0xa1, 0x1f, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, - 0x6f, 0x72, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, - 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, - 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0xa2, 0x1f, - 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, - 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, - 0x73, 0x12, 0x30, 0x0a, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x6d, 0x69, 0x78, 0x65, - 0x73, 0x18, 0x89, 0x27, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0c, 0x8a, 0xb5, 0x18, 0x08, 0x36, 0x35, - 0x35, 0x33, 0x36, 0x2c, 0x33, 0x32, 0x52, 0x0b, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x4d, 0x69, - 0x78, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, - 0x18, 0xf1, 0x2e, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, 0x8a, 0xb5, 0x18, 0x04, 0x38, 0x31, 0x39, - 0x32, 0x52, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x1c, - 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, - 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xd9, 0x36, 0x20, - 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, - 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x1a, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, - 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x12, 0x52, 0x0a, 0x1b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x70, - 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0xda, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, - 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x19, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, - 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x68, 0x0a, 0x12, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, - 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0xc1, 0x3e, 0x20, - 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, - 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, - 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x6a, - 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, - 0x12, 0x66, 0x0a, 0x1d, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x6a, 0x75, 0x73, - 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, - 0x74, 0x18, 0xc2, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, - 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, - 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1b, 0x70, 0x72, 0x65, - 0x76, 0x69, 0x6f, 0x75, 0x73, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, - 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x64, 0x0a, 0x1c, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, - 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc3, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, - 0x6e, 0x74, 0x52, 0x1a, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4a, 0x75, 0x73, 0x74, 0x69, - 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x55, - 0x0a, 0x14, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, - 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc4, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, - 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, - 0x52, 0x13, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, - 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x3f, 0x0a, 0x11, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, - 0x69, 0x74, 0x79, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x18, 0xa9, 0x46, 0x20, 0x03, 0x28, - 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, - 0x37, 0x37, 0x37, 0x36, 0x52, 0x10, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, - 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x12, 0x5b, 0x0a, 0x16, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, - 0x18, 0xaa, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x14, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x74, 0x65, 0x65, 0x12, 0x55, 0x0a, 0x13, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, - 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xab, 0x46, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, - 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x11, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x79, 0x6e, - 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x79, 0x0a, 0x1f, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, - 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x91, 0x4e, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, - 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x52, 0x1c, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x45, - 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x33, 0x0a, 0x15, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x77, 0x69, - 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xf9, - 0x55, 0x20, 0x01, 0x28, 0x04, 0x52, 0x13, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, - 0x72, 0x61, 0x77, 0x61, 0x6c, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x96, 0x01, 0x0a, 0x1f, 0x6e, - 0x65, 0x78, 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x76, - 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xfa, - 0x55, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, - 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, - 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, - 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x1c, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, - 0x72, 0x61, 0x77, 0x61, 0x6c, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, - 0x64, 0x65, 0x78, 0x12, 0x6a, 0x0a, 0x14, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, - 0x6c, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x18, 0xfb, 0x55, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, - 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, - 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x42, 0x0c, 0x92, 0xb5, - 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, 0x13, 0x68, 0x69, 0x73, 0x74, - 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x22, - 0x83, 0x11, 0x0a, 0x10, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x44, - 0x65, 0x6e, 0x65, 0x62, 0x12, 0x22, 0x0a, 0x0c, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, - 0x74, 0x69, 0x6d, 0x65, 0x18, 0xe9, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x67, 0x65, 0x6e, - 0x65, 0x73, 0x69, 0x73, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, - 0x73, 0x69, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x5f, 0x72, - 0x6f, 0x6f, 0x74, 0x18, 0xea, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, - 0x33, 0x32, 0x52, 0x15, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, - 0x61, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, - 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, - 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, - 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, - 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x6f, 0x72, 0x6b, - 0x52, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x12, 0x59, 0x0a, 0x13, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, - 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0xd1, 0x0f, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, - 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x11, - 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x12, 0x2d, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, - 0x18, 0xd2, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, - 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x73, - 0x12, 0x2d, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, - 0xd3, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, - 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, - 0x40, 0x0a, 0x10, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x72, 0x6f, - 0x6f, 0x74, 0x73, 0x18, 0xd4, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, - 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, - 0x52, 0x0f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x52, 0x6f, 0x6f, 0x74, - 0x73, 0x12, 0x3d, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0xb9, - 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, - 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, - 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, - 0x12, 0x52, 0x0a, 0x0f, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x76, 0x6f, - 0x74, 0x65, 0x73, 0x18, 0xba, 0x17, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x42, 0x08, 0x92, 0xb5, 0x18, - 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0d, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x56, - 0x6f, 0x74, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x65, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xbb, 0x17, 0x20, 0x01, 0x28, - 0x04, 0x52, 0x10, 0x65, 0x74, 0x68, 0x31, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x49, 0x6e, - 0x64, 0x65, 0x78, 0x12, 0x54, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, - 0x73, 0x18, 0xa1, 0x1f, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, - 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, - 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, - 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x0a, 0x76, - 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x62, 0x61, 0x6c, - 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0xa2, 0x1f, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, + 0x72, 0x6b, 0x12, 0x59, 0x0a, 0x13, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x62, 0x6c, 0x6f, + 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0xd1, 0x0f, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x11, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x2d, 0x0a, + 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd2, 0x0f, 0x20, + 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, + 0x52, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2d, 0x0a, 0x0b, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd3, 0x0f, 0x20, 0x03, + 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, + 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x40, 0x0a, 0x10, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, + 0xd4, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, + 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, 0x0f, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x3d, 0x0a, + 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0xb9, 0x17, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, + 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x52, 0x0a, 0x0f, + 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x76, 0x6f, 0x74, 0x65, 0x73, 0x18, + 0xba, 0x17, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, + 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, + 0x38, 0x52, 0x0d, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x56, 0x6f, 0x74, 0x65, 0x73, + 0x12, 0x2d, 0x0a, 0x12, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xbb, 0x17, 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, 0x65, + 0x74, 0x68, 0x31, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, + 0x54, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0xa1, 0x1f, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, + 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, + 0x73, 0x18, 0xa2, 0x1f, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, + 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x08, 0x62, 0x61, 0x6c, + 0x61, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x30, 0x0a, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, + 0x6d, 0x69, 0x78, 0x65, 0x73, 0x18, 0x89, 0x27, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0c, 0x8a, 0xb5, + 0x18, 0x08, 0x36, 0x35, 0x35, 0x33, 0x36, 0x2c, 0x33, 0x32, 0x52, 0x0b, 0x72, 0x61, 0x6e, 0x64, + 0x61, 0x6f, 0x4d, 0x69, 0x78, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, + 0x69, 0x6e, 0x67, 0x73, 0x18, 0xf1, 0x2e, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, 0x8a, 0xb5, 0x18, + 0x04, 0x38, 0x31, 0x39, 0x32, 0x52, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, + 0x12, 0x54, 0x0a, 0x1c, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x65, 0x70, 0x6f, + 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0xd9, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, + 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x1a, 0x70, 0x72, 0x65, 0x76, + 0x69, 0x6f, 0x75, 0x73, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, + 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x52, 0x0a, 0x1b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xda, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, - 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x30, 0x0a, 0x0c, 0x72, 0x61, 0x6e, - 0x64, 0x61, 0x6f, 0x5f, 0x6d, 0x69, 0x78, 0x65, 0x73, 0x18, 0x89, 0x27, 0x20, 0x03, 0x28, 0x0c, - 0x42, 0x0c, 0x8a, 0xb5, 0x18, 0x08, 0x36, 0x35, 0x35, 0x33, 0x36, 0x2c, 0x33, 0x32, 0x52, 0x0b, - 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x4d, 0x69, 0x78, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x09, 0x73, - 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0xf1, 0x2e, 0x20, 0x03, 0x28, 0x04, 0x42, - 0x08, 0x8a, 0xb5, 0x18, 0x04, 0x38, 0x31, 0x39, 0x32, 0x52, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, - 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x1c, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, - 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xd9, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, - 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x1a, - 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, - 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x52, 0x0a, 0x1b, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, - 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xda, 0x36, 0x20, 0x01, 0x28, 0x0c, - 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, - 0x37, 0x37, 0x36, 0x52, 0x19, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x45, 0x70, 0x6f, 0x63, - 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x68, - 0x0a, 0x12, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x62, 0x69, 0x74, 0x73, 0x18, 0xc1, 0x3e, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, - 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x34, - 0x8a, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x66, 0x0a, 0x1d, 0x70, 0x72, 0x65, 0x76, - 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, - 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc2, 0x3e, 0x20, 0x01, 0x28, 0x0b, + 0x19, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, + 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x67, 0x0a, 0x12, 0x6a, 0x75, + 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, + 0x18, 0xc1, 0x3e, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x37, 0x82, 0xb5, 0x18, 0x2e, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, + 0x52, 0x11, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, + 0x69, 0x74, 0x73, 0x12, 0x66, 0x0a, 0x1d, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, + 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, + 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc2, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1b, + 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, + 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x64, 0x0a, 0x1c, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, + 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc3, 0x3e, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, + 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, + 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1a, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4a, 0x75, + 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, + 0x74, 0x12, 0x55, 0x0a, 0x14, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x63, + 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc4, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, - 0x69, 0x6e, 0x74, 0x52, 0x1b, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x4a, 0x75, 0x73, - 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, - 0x12, 0x64, 0x0a, 0x1c, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x6a, 0x75, 0x73, 0x74, - 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, - 0x18, 0xc3, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1a, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, - 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x55, 0x0a, 0x14, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, - 0x7a, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc4, - 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, - 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, - 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x13, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, - 0x7a, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x3f, 0x0a, - 0x11, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x5f, 0x73, 0x63, 0x6f, 0x72, - 0x65, 0x73, 0x18, 0xa9, 0x46, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, - 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x10, 0x69, 0x6e, - 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x12, 0x5b, - 0x0a, 0x16, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, - 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xaa, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, - 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x14, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x53, 0x79, - 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x55, 0x0a, 0x13, 0x6e, - 0x65, 0x78, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, - 0x65, 0x65, 0x18, 0xab, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, - 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, - 0x11, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, - 0x65, 0x65, 0x12, 0x77, 0x0a, 0x1f, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x78, 0x65, - 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x68, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x91, 0x4e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, - 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, - 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x1c, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x74, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, - 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x33, 0x0a, 0x15, 0x6e, - 0x65, 0x78, 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x69, - 0x6e, 0x64, 0x65, 0x78, 0x18, 0xf9, 0x55, 0x20, 0x01, 0x28, 0x04, 0x52, 0x13, 0x6e, 0x65, 0x78, - 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x49, 0x6e, 0x64, 0x65, 0x78, - 0x12, 0x96, 0x01, 0x0a, 0x1f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, - 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, - 0x6e, 0x64, 0x65, 0x78, 0x18, 0xfa, 0x55, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, - 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, - 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, - 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, - 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x1c, 0x6e, 0x65, 0x78, - 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x56, 0x61, 0x6c, 0x69, 0x64, - 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x6a, 0x0a, 0x14, 0x68, 0x69, 0x73, - 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, - 0x73, 0x18, 0xfb, 0x55, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, - 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, - 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, - 0x72, 0x79, 0x42, 0x0c, 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, - 0x52, 0x13, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, - 0x61, 0x72, 0x69, 0x65, 0x73, 0x22, 0xb2, 0x19, 0x0a, 0x12, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, - 0x53, 0x74, 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x22, 0x0a, 0x0c, - 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0xe9, 0x07, 0x20, - 0x01, 0x28, 0x04, 0x52, 0x0b, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x54, 0x69, 0x6d, 0x65, - 0x12, 0x3f, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x69, - 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0xea, 0x07, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x15, 0x67, 0x65, 0x6e, 0x65, - 0x73, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x6f, 0x6f, - 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, - 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, - 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, - 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, - 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x12, 0x59, - 0x0a, 0x13, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0xd1, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x11, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, - 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x2d, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, - 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd2, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, - 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2d, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd3, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, - 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x40, 0x0a, 0x10, 0x68, 0x69, 0x73, 0x74, 0x6f, - 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd4, 0x0f, 0x20, 0x03, - 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x08, - 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, 0x0f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x69, 0x63, 0x61, 0x6c, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x3d, 0x0a, 0x09, 0x65, 0x74, 0x68, - 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0xb9, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, - 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, - 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x52, 0x0a, 0x0f, 0x65, 0x74, 0x68, 0x31, - 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x76, 0x6f, 0x74, 0x65, 0x73, 0x18, 0xba, 0x17, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, - 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, - 0x61, 0x74, 0x61, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0d, 0x65, - 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x56, 0x6f, 0x74, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x12, - 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x69, 0x6e, 0x64, - 0x65, 0x78, 0x18, 0xbb, 0x17, 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, 0x65, 0x74, 0x68, 0x31, 0x44, - 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x54, 0x0a, 0x0a, 0x76, - 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0xa1, 0x1f, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, - 0x6f, 0x72, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, - 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, - 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0xa2, 0x1f, + 0x69, 0x6e, 0x74, 0x52, 0x13, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x43, 0x68, + 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x3f, 0x0a, 0x11, 0x69, 0x6e, 0x61, 0x63, + 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x18, 0xa9, 0x46, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, - 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, - 0x73, 0x12, 0x30, 0x0a, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x6d, 0x69, 0x78, 0x65, - 0x73, 0x18, 0x89, 0x27, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0c, 0x8a, 0xb5, 0x18, 0x08, 0x36, 0x35, - 0x35, 0x33, 0x36, 0x2c, 0x33, 0x32, 0x52, 0x0b, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x4d, 0x69, - 0x78, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, - 0x18, 0xf1, 0x2e, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, 0x8a, 0xb5, 0x18, 0x04, 0x38, 0x31, 0x39, - 0x32, 0x52, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x1c, - 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, - 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xd9, 0x36, 0x20, - 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, - 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x1a, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, - 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x12, 0x52, 0x0a, 0x1b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x70, - 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0xda, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, - 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x19, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, - 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x68, 0x0a, 0x12, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, - 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0xc1, 0x3e, 0x20, - 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, - 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, - 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x6a, - 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, - 0x12, 0x66, 0x0a, 0x1d, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x6a, 0x75, 0x73, - 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, - 0x74, 0x18, 0xc2, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, - 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, - 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1b, 0x70, 0x72, 0x65, - 0x76, 0x69, 0x6f, 0x75, 0x73, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, - 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x64, 0x0a, 0x1c, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, - 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc3, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, - 0x6e, 0x74, 0x52, 0x1a, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4a, 0x75, 0x73, 0x74, 0x69, - 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x55, - 0x0a, 0x14, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, - 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc4, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, - 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, - 0x52, 0x13, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, - 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x3f, 0x0a, 0x11, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, - 0x69, 0x74, 0x79, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x18, 0xa9, 0x46, 0x20, 0x03, 0x28, - 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, - 0x37, 0x37, 0x37, 0x36, 0x52, 0x10, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, - 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x12, 0x5b, 0x0a, 0x16, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, - 0x18, 0xaa, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, - 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, - 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x14, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x74, 0x65, 0x65, 0x12, 0x55, 0x0a, 0x13, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, - 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xab, 0x46, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, - 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x11, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x79, 0x6e, - 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x77, 0x0a, 0x1f, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, - 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x91, 0x4e, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, - 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x1c, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x45, 0x78, 0x65, + 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x10, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, + 0x69, 0x74, 0x79, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x12, 0x5b, 0x0a, 0x16, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x74, 0x65, 0x65, 0x18, 0xaa, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, + 0x52, 0x14, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x55, 0x0a, 0x13, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x73, + 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xab, 0x46, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, + 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x11, 0x6e, 0x65, 0x78, 0x74, + 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x72, 0x0a, + 0x1f, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x18, 0x91, 0x4e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, - 0x64, 0x65, 0x72, 0x12, 0x33, 0x0a, 0x15, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, - 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xf9, 0x55, 0x20, - 0x01, 0x28, 0x04, 0x52, 0x13, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, - 0x77, 0x61, 0x6c, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x96, 0x01, 0x0a, 0x1f, 0x6e, 0x65, 0x78, - 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x76, 0x61, 0x6c, - 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xfa, 0x55, 0x20, - 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, - 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, - 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, - 0x64, 0x65, 0x78, 0x52, 0x1c, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, - 0x77, 0x61, 0x6c, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, - 0x78, 0x12, 0x6a, 0x0a, 0x14, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, - 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x18, 0xfb, 0x55, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, - 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x42, 0x0c, 0x92, 0xb5, 0x18, 0x08, - 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, 0x13, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x12, 0x40, 0x0a, - 0x1c, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x73, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xe1, 0x5d, - 0x20, 0x01, 0x28, 0x04, 0x52, 0x19, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x73, 0x53, 0x74, 0x61, 0x72, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, - 0x82, 0x01, 0x0a, 0x1a, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x62, 0x61, 0x6c, 0x61, - 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe2, - 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, - 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, - 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x17, 0x64, 0x65, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, - 0x73, 0x75, 0x6d, 0x65, 0x12, 0x7c, 0x0a, 0x17, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x62, 0x61, 0x6c, - 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, - 0xe3, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, - 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, - 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x14, 0x65, 0x78, - 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, 0x73, 0x75, - 0x6d, 0x65, 0x12, 0x76, 0x0a, 0x13, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x5f, 0x65, - 0x78, 0x69, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe4, 0x5d, 0x20, 0x01, 0x28, 0x04, - 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x64, 0x65, 0x72, 0x52, 0x1c, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x45, 0x78, 0x65, 0x63, 0x75, + 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x22, 0x86, 0x11, 0x0a, 0x12, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, + 0x65, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x22, 0x0a, 0x0c, 0x67, 0x65, 0x6e, 0x65, + 0x73, 0x69, 0x73, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0xe9, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, + 0x0b, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x17, + 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, + 0x72, 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0xea, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, + 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x15, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x56, + 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, + 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, + 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, + 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, + 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, + 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, + 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, + 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x12, 0x59, 0x0a, 0x13, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x18, 0xd1, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x52, 0x11, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x2d, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, + 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd2, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, + 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x52, + 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2d, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, + 0x6f, 0x74, 0x73, 0x18, 0xd3, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, + 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, + 0x6f, 0x74, 0x73, 0x12, 0x40, 0x0a, 0x10, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, + 0x6c, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd4, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, + 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, + 0x37, 0x32, 0x31, 0x36, 0x52, 0x0f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, + 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x3d, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, + 0x74, 0x61, 0x18, 0xb9, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, + 0x44, 0x61, 0x74, 0x61, 0x12, 0x52, 0x0a, 0x0f, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, + 0x61, 0x5f, 0x76, 0x6f, 0x74, 0x65, 0x73, 0x18, 0xba, 0x17, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, + 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x42, + 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0d, 0x65, 0x74, 0x68, 0x31, 0x44, + 0x61, 0x74, 0x61, 0x56, 0x6f, 0x74, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x65, 0x74, 0x68, 0x31, + 0x5f, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xbb, + 0x17, 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, 0x65, 0x74, 0x68, 0x31, 0x44, 0x65, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x54, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0xa1, 0x1f, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, + 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x11, + 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, + 0x36, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x2e, 0x0a, + 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0xa2, 0x1f, 0x20, 0x03, 0x28, 0x04, + 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, + 0x37, 0x37, 0x36, 0x52, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x30, 0x0a, + 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x6d, 0x69, 0x78, 0x65, 0x73, 0x18, 0x89, 0x27, + 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0c, 0x8a, 0xb5, 0x18, 0x08, 0x36, 0x35, 0x35, 0x33, 0x36, 0x2c, + 0x33, 0x32, 0x52, 0x0b, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x4d, 0x69, 0x78, 0x65, 0x73, 0x12, + 0x27, 0x0a, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0xf1, 0x2e, 0x20, + 0x03, 0x28, 0x04, 0x42, 0x08, 0x8a, 0xb5, 0x18, 0x04, 0x38, 0x31, 0x39, 0x32, 0x52, 0x09, 0x73, + 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x1c, 0x70, 0x72, 0x65, 0x76, + 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, + 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xd9, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, + 0x37, 0x36, 0x52, 0x1a, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x45, 0x70, 0x6f, 0x63, + 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x52, + 0x0a, 0x1b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, + 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xda, 0x36, + 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, + 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x19, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x12, 0x67, 0x0a, 0x12, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0xc1, 0x3e, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x37, 0x82, 0xb5, 0x18, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, + 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, + 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x66, 0x0a, 0x1d, 0x70, + 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, + 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc2, 0x3e, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, + 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1b, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, + 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, + 0x69, 0x6e, 0x74, 0x12, 0x64, 0x0a, 0x1c, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x6a, + 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, + 0x69, 0x6e, 0x74, 0x18, 0xc3, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1a, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, + 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x55, 0x0a, 0x14, 0x66, 0x69, 0x6e, + 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, + 0x74, 0x18, 0xc4, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x13, 0x66, 0x69, 0x6e, + 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, + 0x12, 0x3f, 0x0a, 0x11, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x5f, 0x73, + 0x63, 0x6f, 0x72, 0x65, 0x73, 0x18, 0xa9, 0x46, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, + 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, + 0x10, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x53, 0x63, 0x6f, 0x72, 0x65, + 0x73, 0x12, 0x5b, 0x0a, 0x16, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x79, 0x6e, + 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xaa, 0x46, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, + 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x14, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x55, + 0x0a, 0x13, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xab, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, + 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, + 0x65, 0x65, 0x52, 0x11, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x79, 0x0a, 0x1f, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, + 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, + 0x64, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x91, 0x4e, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x31, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, + 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x43, 0x61, 0x70, 0x65, 0x6c, + 0x6c, 0x61, 0x52, 0x1c, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x12, 0x33, 0x0a, 0x15, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, + 0x77, 0x61, 0x6c, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xf9, 0x55, 0x20, 0x01, 0x28, 0x04, + 0x52, 0x13, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, + 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x96, 0x01, 0x0a, 0x1f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x77, + 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, + 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xfa, 0x55, 0x20, 0x01, 0x28, 0x04, + 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, - 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, - 0x74, 0x45, 0x78, 0x69, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x8e, 0x01, 0x0a, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x61, 0x6c, - 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, - 0xe5, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, - 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, - 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x1d, 0x63, 0x6f, - 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, 0x6c, 0x61, 0x6e, - 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x88, 0x01, 0x0a, 0x1c, - 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, - 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe6, 0x5d, 0x20, - 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, + 0x52, 0x1c, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, + 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x6a, + 0x0a, 0x14, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x75, 0x6d, + 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x18, 0xfb, 0x55, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, + 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x42, 0x0c, 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, + 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, 0x13, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, + 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x22, 0x82, 0x11, 0x0a, 0x10, 0x42, + 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, + 0x22, 0x0a, 0x0c, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, + 0xe9, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x54, + 0x69, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x76, + 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0xea, + 0x07, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x15, 0x67, + 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, + 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, + 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, - 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x1a, 0x65, 0x61, 0x72, 0x6c, - 0x69, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x60, 0x0a, 0x10, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, - 0x67, 0x5f, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0xe7, 0x5d, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x25, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, - 0x67, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x0d, 0x92, 0xb5, 0x18, 0x09, 0x31, 0x33, - 0x34, 0x32, 0x31, 0x37, 0x37, 0x32, 0x38, 0x52, 0x0f, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, - 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x7f, 0x0a, 0x1b, 0x70, 0x65, 0x6e, 0x64, - 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x77, 0x69, 0x74, 0x68, - 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, 0x18, 0xe8, 0x5d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, + 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, + 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x50, 0x61, - 0x72, 0x74, 0x69, 0x61, 0x6c, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x42, - 0x0d, 0x92, 0xb5, 0x18, 0x09, 0x31, 0x33, 0x34, 0x32, 0x31, 0x37, 0x37, 0x32, 0x38, 0x52, 0x19, - 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x57, 0x69, - 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, 0x12, 0x6f, 0x0a, 0x16, 0x70, 0x65, 0x6e, - 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x18, 0xe9, 0x5d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x74, 0x68, - 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, - 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x0a, 0x92, 0xb5, 0x18, 0x06, 0x32, 0x36, 0x32, - 0x31, 0x34, 0x34, 0x52, 0x15, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x73, - 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xe7, 0x19, 0x0a, 0x0f, 0x42, - 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x75, 0x6c, 0x75, 0x12, 0x22, - 0x0a, 0x0c, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0xe9, - 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x54, 0x69, - 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x76, 0x61, - 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0xea, 0x07, - 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x15, 0x67, 0x65, - 0x6e, 0x65, 0x73, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x52, - 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, - 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, - 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, - 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, - 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, - 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x04, 0x66, 0x6f, 0x72, 0x6b, - 0x12, 0x59, 0x0a, 0x13, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, - 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0xd1, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, - 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x11, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x2d, 0x0a, 0x0b, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd2, 0x0f, 0x20, 0x03, 0x28, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x04, 0x66, 0x6f, 0x72, + 0x6b, 0x12, 0x59, 0x0a, 0x13, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0xd1, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, + 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, + 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x11, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x2d, 0x0a, 0x0b, + 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd2, 0x0f, 0x20, 0x03, + 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, + 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2d, 0x0a, 0x0b, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd3, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2d, 0x0a, 0x0b, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd3, 0x0f, 0x20, 0x03, 0x28, 0x0c, - 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x40, 0x0a, 0x10, 0x68, 0x69, 0x73, - 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd4, 0x0f, - 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, - 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, 0x0f, 0x68, 0x69, 0x73, 0x74, - 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x3d, 0x0a, 0x09, 0x65, - 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0xb9, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, - 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x52, 0x0a, 0x0f, 0x65, 0x74, - 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x76, 0x6f, 0x74, 0x65, 0x73, 0x18, 0xba, 0x17, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, - 0x31, 0x44, 0x61, 0x74, 0x61, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, - 0x0d, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x56, 0x6f, 0x74, 0x65, 0x73, 0x12, 0x2d, - 0x0a, 0x12, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x69, - 0x6e, 0x64, 0x65, 0x78, 0x18, 0xbb, 0x17, 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, 0x65, 0x74, 0x68, - 0x31, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x54, 0x0a, - 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0xa1, 0x1f, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, - 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, - 0x61, 0x74, 0x6f, 0x72, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, - 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, - 0x6f, 0x72, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, - 0xa2, 0x1f, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, - 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, - 0x63, 0x65, 0x73, 0x12, 0x30, 0x0a, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x6d, 0x69, - 0x78, 0x65, 0x73, 0x18, 0x89, 0x27, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0c, 0x8a, 0xb5, 0x18, 0x08, - 0x36, 0x35, 0x35, 0x33, 0x36, 0x2c, 0x33, 0x32, 0x52, 0x0b, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, - 0x4d, 0x69, 0x78, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, - 0x67, 0x73, 0x18, 0xf1, 0x2e, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, 0x8a, 0xb5, 0x18, 0x04, 0x38, - 0x31, 0x39, 0x32, 0x52, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, - 0x0a, 0x1c, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, - 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xd9, - 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, - 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x1a, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, - 0x75, 0x73, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x52, 0x0a, 0x1b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, - 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0xda, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, - 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x19, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, - 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x68, 0x0a, 0x12, 0x6a, 0x75, 0x73, 0x74, - 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0xc1, - 0x3e, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x40, 0x0a, 0x10, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd4, + 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, + 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, 0x0f, 0x68, 0x69, 0x73, + 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x3d, 0x0a, 0x09, + 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0xb9, 0x17, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, + 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x52, 0x0a, 0x0f, 0x65, + 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x76, 0x6f, 0x74, 0x65, 0x73, 0x18, 0xba, + 0x17, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, + 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, + 0x52, 0x0d, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x56, 0x6f, 0x74, 0x65, 0x73, 0x12, + 0x2d, 0x0a, 0x12, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, + 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xbb, 0x17, 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, 0x65, 0x74, + 0x68, 0x31, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x54, + 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0xa1, 0x1f, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, + 0x64, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, + 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, + 0x74, 0x6f, 0x72, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, + 0x18, 0xa2, 0x1f, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, + 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x08, 0x62, 0x61, 0x6c, 0x61, + 0x6e, 0x63, 0x65, 0x73, 0x12, 0x30, 0x0a, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x6d, + 0x69, 0x78, 0x65, 0x73, 0x18, 0x89, 0x27, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0c, 0x8a, 0xb5, 0x18, + 0x08, 0x36, 0x35, 0x35, 0x33, 0x36, 0x2c, 0x33, 0x32, 0x52, 0x0b, 0x72, 0x61, 0x6e, 0x64, 0x61, + 0x6f, 0x4d, 0x69, 0x78, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, + 0x6e, 0x67, 0x73, 0x18, 0xf1, 0x2e, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, 0x8a, 0xb5, 0x18, 0x04, + 0x38, 0x31, 0x39, 0x32, 0x52, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, + 0x54, 0x0a, 0x1c, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x65, 0x70, 0x6f, 0x63, + 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, + 0xd9, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, + 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x1a, 0x70, 0x72, 0x65, 0x76, 0x69, + 0x6f, 0x75, 0x73, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x52, 0x0a, 0x1b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xda, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, + 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x19, + 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, + 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x67, 0x0a, 0x12, 0x6a, 0x75, 0x73, + 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, + 0xc1, 0x3e, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x37, 0x82, 0xb5, 0x18, 0x2e, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, @@ -3458,88 +3118,428 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x42, 0x0c, 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, 0x13, 0x68, 0x69, 0x73, 0x74, - 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x12, - 0x40, 0x0a, 0x1c, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x73, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, - 0xe1, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x52, 0x19, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x53, 0x74, 0x61, 0x72, 0x74, 0x49, 0x6e, 0x64, 0x65, - 0x78, 0x12, 0x82, 0x01, 0x0a, 0x1a, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x62, 0x61, - 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, - 0x18, 0xe2, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, - 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, - 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x17, 0x64, - 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, - 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x7c, 0x0a, 0x17, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x62, - 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, - 0x65, 0x18, 0xe3, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, - 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, - 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x14, - 0x65, 0x78, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, - 0x73, 0x75, 0x6d, 0x65, 0x12, 0x76, 0x0a, 0x13, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, - 0x5f, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe4, 0x5d, 0x20, 0x01, - 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, - 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, - 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x65, 0x61, 0x72, 0x6c, 0x69, - 0x65, 0x73, 0x74, 0x45, 0x78, 0x69, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x8e, 0x01, 0x0a, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, - 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, - 0x65, 0x18, 0xe5, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, - 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, - 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x1d, - 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, 0x6c, - 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x88, 0x01, - 0x0a, 0x1c, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, - 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe6, - 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, - 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, - 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x1a, 0x65, 0x61, - 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x60, 0x0a, 0x10, 0x70, 0x65, 0x6e, 0x64, - 0x69, 0x6e, 0x67, 0x5f, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0xe7, 0x5d, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, - 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, 0x64, - 0x69, 0x6e, 0x67, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x0d, 0x92, 0xb5, 0x18, 0x09, - 0x31, 0x33, 0x34, 0x32, 0x31, 0x37, 0x37, 0x32, 0x38, 0x52, 0x0f, 0x70, 0x65, 0x6e, 0x64, 0x69, - 0x6e, 0x67, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x7f, 0x0a, 0x1b, 0x70, 0x65, - 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x77, 0x69, - 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, 0x18, 0xe8, 0x5d, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, - 0x50, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, - 0x6c, 0x42, 0x0d, 0x92, 0xb5, 0x18, 0x09, 0x31, 0x33, 0x34, 0x32, 0x31, 0x37, 0x37, 0x32, 0x38, - 0x52, 0x19, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, - 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, 0x12, 0x6f, 0x0a, 0x16, 0x70, - 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0xe9, 0x5d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, + 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x22, + 0xb1, 0x19, 0x0a, 0x12, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x45, + 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x12, 0x22, 0x0a, 0x0c, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, + 0x73, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0xe9, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x67, + 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x17, 0x67, 0x65, + 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, + 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0xea, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, + 0x18, 0x02, 0x33, 0x32, 0x52, 0x15, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x56, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, + 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, + 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, + 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, + 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x6f, + 0x72, 0x6b, 0x52, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x12, 0x59, 0x0a, 0x13, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, + 0xd1, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, + 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x52, 0x11, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x12, 0x2d, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, + 0x74, 0x73, 0x18, 0xd2, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, + 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, + 0x74, 0x73, 0x12, 0x2d, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, + 0x73, 0x18, 0xd3, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, + 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, + 0x73, 0x12, 0x40, 0x0a, 0x10, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, + 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd4, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, 0xb5, + 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, + 0x31, 0x36, 0x52, 0x0f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x52, 0x6f, + 0x6f, 0x74, 0x73, 0x12, 0x3d, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, + 0x18, 0xb9, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, + 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, + 0x74, 0x61, 0x12, 0x52, 0x0a, 0x0f, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, + 0x76, 0x6f, 0x74, 0x65, 0x73, 0x18, 0xba, 0x17, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x73, - 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x0a, 0x92, 0xb5, 0x18, 0x06, 0x32, - 0x36, 0x32, 0x31, 0x34, 0x34, 0x52, 0x15, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, - 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x36, 0x0a, 0x12, - 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x6f, 0x6b, 0x61, 0x68, 0x65, - 0x61, 0x64, 0x18, 0xc9, 0x65, 0x20, 0x03, 0x28, 0x04, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x36, - 0x34, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x4c, 0x6f, 0x6f, 0x6b, 0x61, - 0x68, 0x65, 0x61, 0x64, 0x42, 0x9a, 0x01, 0x0a, 0x19, 0x6f, 0x72, 0x67, 0x2e, 0x65, 0x74, 0x68, + 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x42, 0x08, 0x92, + 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0d, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, + 0x61, 0x56, 0x6f, 0x74, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, + 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xbb, 0x17, 0x20, + 0x01, 0x28, 0x04, 0x52, 0x10, 0x65, 0x74, 0x68, 0x31, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x54, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, + 0x6f, 0x72, 0x73, 0x18, 0xa1, 0x1f, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x42, 0x10, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x50, - 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, - 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, - 0x2e, 0x56, 0x31, 0x41, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, - 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x61, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x11, 0x92, 0xb5, + 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, + 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x62, + 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0xa2, 0x1f, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, + 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, + 0x36, 0x52, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x30, 0x0a, 0x0c, 0x72, + 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x6d, 0x69, 0x78, 0x65, 0x73, 0x18, 0x89, 0x27, 0x20, 0x03, + 0x28, 0x0c, 0x42, 0x0c, 0x8a, 0xb5, 0x18, 0x08, 0x36, 0x35, 0x35, 0x33, 0x36, 0x2c, 0x33, 0x32, + 0x52, 0x0b, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x4d, 0x69, 0x78, 0x65, 0x73, 0x12, 0x27, 0x0a, + 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0xf1, 0x2e, 0x20, 0x03, 0x28, + 0x04, 0x42, 0x08, 0x8a, 0xb5, 0x18, 0x04, 0x38, 0x31, 0x39, 0x32, 0x52, 0x09, 0x73, 0x6c, 0x61, + 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x1c, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, + 0x75, 0x73, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, + 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xd9, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, + 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, + 0x52, 0x1a, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, + 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x52, 0x0a, 0x1b, + 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, + 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xda, 0x36, 0x20, 0x01, + 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, + 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x19, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x45, 0x70, + 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x67, 0x0a, 0x12, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0xc1, 0x3e, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x37, 0x82, + 0xb5, 0x18, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, + 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, + 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x66, 0x0a, 0x1d, 0x70, 0x72, 0x65, + 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, + 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc2, 0x3e, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, + 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1b, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x4a, 0x75, + 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, + 0x74, 0x12, 0x64, 0x0a, 0x1c, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x6a, 0x75, 0x73, + 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, + 0x74, 0x18, 0xc3, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1a, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, + 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x55, 0x0a, 0x14, 0x66, 0x69, 0x6e, 0x61, 0x6c, + 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, + 0xc4, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, + 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x13, 0x66, 0x69, 0x6e, 0x61, 0x6c, + 0x69, 0x7a, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x3f, + 0x0a, 0x11, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x5f, 0x73, 0x63, 0x6f, + 0x72, 0x65, 0x73, 0x18, 0xa9, 0x46, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, + 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x10, 0x69, + 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x12, + 0x5b, 0x0a, 0x16, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, + 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xaa, 0x46, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x14, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x53, + 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x55, 0x0a, 0x13, + 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x74, 0x65, 0x65, 0x18, 0xab, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, + 0x52, 0x11, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x74, 0x65, 0x65, 0x12, 0x77, 0x0a, 0x1f, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, + 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x91, 0x4e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, + 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, + 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x1c, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, + 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x33, 0x0a, 0x15, + 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, + 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xf9, 0x55, 0x20, 0x01, 0x28, 0x04, 0x52, 0x13, 0x6e, 0x65, + 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x49, 0x6e, 0x64, 0x65, + 0x78, 0x12, 0x96, 0x01, 0x0a, 0x1f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, + 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, + 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xfa, 0x55, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, + 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, + 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, + 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, + 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x1c, 0x6e, 0x65, + 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x56, 0x61, 0x6c, 0x69, + 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x6a, 0x0a, 0x14, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, + 0x65, 0x73, 0x18, 0xfb, 0x55, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, + 0x61, 0x72, 0x79, 0x42, 0x0c, 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, + 0x36, 0x52, 0x13, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, + 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x12, 0x40, 0x0a, 0x1c, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, + 0x74, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, + 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xe1, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x52, 0x19, 0x64, + 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x53, 0x74, + 0x61, 0x72, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x82, 0x01, 0x0a, 0x1a, 0x64, 0x65, 0x70, + 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, + 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe2, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, + 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, + 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, + 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, + 0x47, 0x77, 0x65, 0x69, 0x52, 0x17, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x61, 0x6c, + 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x7c, 0x0a, + 0x17, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, + 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe3, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, + 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, + 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x14, 0x65, 0x78, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, + 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x76, 0x0a, 0x13, 0x65, + 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x65, 0x70, 0x6f, + 0x63, 0x68, 0x18, 0xe4, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, + 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, + 0x52, 0x11, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x45, 0x78, 0x69, 0x74, 0x45, 0x70, + 0x6f, 0x63, 0x68, 0x12, 0x8e, 0x01, 0x0a, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, + 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe5, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, + 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, + 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x1d, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, + 0x73, 0x75, 0x6d, 0x65, 0x12, 0x88, 0x01, 0x0a, 0x1c, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, + 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe6, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, + 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, + 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, + 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, + 0x6f, 0x63, 0x68, 0x52, 0x1a, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, + 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, + 0x60, 0x0a, 0x10, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x64, 0x65, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x73, 0x18, 0xe7, 0x5d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, + 0x74, 0x42, 0x0d, 0x92, 0xb5, 0x18, 0x09, 0x31, 0x33, 0x34, 0x32, 0x31, 0x37, 0x37, 0x32, 0x38, + 0x52, 0x0f, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x73, 0x12, 0x7f, 0x0a, 0x1b, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x61, 0x72, + 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, + 0x18, 0xe8, 0x5d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, + 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x57, 0x69, + 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x42, 0x0d, 0x92, 0xb5, 0x18, 0x09, 0x31, 0x33, + 0x34, 0x32, 0x31, 0x37, 0x37, 0x32, 0x38, 0x52, 0x19, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, + 0x50, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, + 0x6c, 0x73, 0x12, 0x6f, 0x0a, 0x16, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, + 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0xe9, 0x5d, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, 0x64, + 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x42, 0x0a, 0x92, 0xb5, 0x18, 0x06, 0x32, 0x36, 0x32, 0x31, 0x34, 0x34, 0x52, 0x15, 0x70, 0x65, + 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x22, 0xe6, 0x19, 0x0a, 0x0f, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, + 0x61, 0x74, 0x65, 0x46, 0x75, 0x6c, 0x75, 0x12, 0x22, 0x0a, 0x0c, 0x67, 0x65, 0x6e, 0x65, 0x73, + 0x69, 0x73, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0xe9, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, + 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x17, 0x67, + 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, + 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0xea, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, + 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x15, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x56, 0x61, + 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, + 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, + 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, + 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, + 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, + 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, + 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, + 0x6f, 0x72, 0x6b, 0x52, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x12, 0x59, 0x0a, 0x13, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x18, 0xd1, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, + 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x52, 0x11, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x12, 0x2d, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, + 0x6f, 0x74, 0x73, 0x18, 0xd2, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, + 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, + 0x6f, 0x74, 0x73, 0x12, 0x2d, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, + 0x74, 0x73, 0x18, 0xd3, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, 0xb5, 0x18, 0x07, 0x38, + 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, + 0x74, 0x73, 0x12, 0x40, 0x0a, 0x10, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, + 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd4, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x14, 0x8a, + 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, + 0x32, 0x31, 0x36, 0x52, 0x0f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x52, + 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x3d, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, + 0x61, 0x18, 0xb9, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, + 0x61, 0x74, 0x61, 0x12, 0x52, 0x0a, 0x0f, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, + 0x5f, 0x76, 0x6f, 0x74, 0x65, 0x73, 0x18, 0xba, 0x17, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x42, 0x08, + 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0d, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, + 0x74, 0x61, 0x56, 0x6f, 0x74, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x65, 0x74, 0x68, 0x31, 0x5f, + 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xbb, 0x17, + 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, 0x65, 0x74, 0x68, 0x31, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, + 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x54, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, + 0x74, 0x6f, 0x72, 0x73, 0x18, 0xa1, 0x1f, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x11, 0x92, + 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, + 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x2e, 0x0a, 0x08, + 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0xa2, 0x1f, 0x20, 0x03, 0x28, 0x04, 0x42, + 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, + 0x37, 0x36, 0x52, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x30, 0x0a, 0x0c, + 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x6d, 0x69, 0x78, 0x65, 0x73, 0x18, 0x89, 0x27, 0x20, + 0x03, 0x28, 0x0c, 0x42, 0x0c, 0x8a, 0xb5, 0x18, 0x08, 0x36, 0x35, 0x35, 0x33, 0x36, 0x2c, 0x33, + 0x32, 0x52, 0x0b, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x4d, 0x69, 0x78, 0x65, 0x73, 0x12, 0x27, + 0x0a, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0xf1, 0x2e, 0x20, 0x03, + 0x28, 0x04, 0x42, 0x08, 0x8a, 0xb5, 0x18, 0x04, 0x38, 0x31, 0x39, 0x32, 0x52, 0x09, 0x73, 0x6c, + 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x1c, 0x70, 0x72, 0x65, 0x76, 0x69, + 0x6f, 0x75, 0x73, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, + 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xd9, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, + 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, + 0x36, 0x52, 0x1a, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x45, 0x70, 0x6f, 0x63, 0x68, + 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x52, 0x0a, + 0x1b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, + 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xda, 0x36, 0x20, + 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, + 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x19, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x45, + 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x67, 0x0a, 0x12, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0xc1, 0x3e, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x37, + 0x82, 0xb5, 0x18, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, + 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, + 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, + 0x72, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, + 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x66, 0x0a, 0x1d, 0x70, 0x72, + 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, + 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc2, 0x3e, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, + 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, + 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1b, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x4a, + 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, + 0x6e, 0x74, 0x12, 0x64, 0x0a, 0x1c, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x6a, 0x75, + 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, + 0x6e, 0x74, 0x18, 0xc3, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1a, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, + 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x55, 0x0a, 0x14, 0x66, 0x69, 0x6e, 0x61, + 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, + 0x18, 0xc4, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, + 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x13, 0x66, 0x69, 0x6e, 0x61, + 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, + 0x3f, 0x0a, 0x11, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x5f, 0x73, 0x63, + 0x6f, 0x72, 0x65, 0x73, 0x18, 0xa9, 0x46, 0x20, 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, + 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x10, + 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, + 0x12, 0x5b, 0x0a, 0x16, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, + 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xaa, 0x46, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, + 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x14, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x55, 0x0a, + 0x13, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x74, 0x65, 0x65, 0x18, 0xab, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, + 0x65, 0x52, 0x11, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x74, 0x65, 0x65, 0x12, 0x77, 0x0a, 0x1f, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x65, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, + 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x91, 0x4e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, + 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, + 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, + 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, + 0x1c, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x33, 0x0a, + 0x15, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, + 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xf9, 0x55, 0x20, 0x01, 0x28, 0x04, 0x52, 0x13, 0x6e, + 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x49, 0x6e, 0x64, + 0x65, 0x78, 0x12, 0x96, 0x01, 0x0a, 0x1f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, + 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, + 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xfa, 0x55, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, + 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, + 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, + 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, + 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x1c, 0x6e, + 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x56, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x6a, 0x0a, 0x14, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, + 0x69, 0x65, 0x73, 0x18, 0xfb, 0x55, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, + 0x6d, 0x61, 0x72, 0x79, 0x42, 0x0c, 0x92, 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, + 0x31, 0x36, 0x52, 0x13, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, + 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x12, 0x40, 0x0a, 0x1c, 0x64, 0x65, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x5f, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xe1, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x52, 0x19, + 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x53, + 0x74, 0x61, 0x72, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x82, 0x01, 0x0a, 0x1a, 0x64, 0x65, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, + 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe2, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, + 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, + 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x17, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x61, + 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x7c, + 0x0a, 0x17, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, + 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe3, 0x5d, 0x20, 0x01, 0x28, 0x04, + 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, + 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x14, 0x65, 0x78, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, + 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x76, 0x0a, 0x13, + 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x65, 0x70, + 0x6f, 0x63, 0x68, 0x18, 0xe4, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, + 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, + 0x68, 0x52, 0x11, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x45, 0x78, 0x69, 0x74, 0x45, + 0x70, 0x6f, 0x63, 0x68, 0x12, 0x8e, 0x01, 0x0a, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, + 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, + 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe5, 0x5d, 0x20, 0x01, 0x28, 0x04, + 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, + 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x1d, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, + 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x88, 0x01, 0x0a, 0x1c, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, + 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe6, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, + 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, + 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, + 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, + 0x70, 0x6f, 0x63, 0x68, 0x52, 0x1a, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x43, 0x6f, + 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, + 0x12, 0x60, 0x0a, 0x10, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x64, 0x65, 0x70, 0x6f, + 0x73, 0x69, 0x74, 0x73, 0x18, 0xe7, 0x5d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x44, 0x65, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x42, 0x0d, 0x92, 0xb5, 0x18, 0x09, 0x31, 0x33, 0x34, 0x32, 0x31, 0x37, 0x37, 0x32, + 0x38, 0x52, 0x0f, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, + 0x74, 0x73, 0x12, 0x7f, 0x0a, 0x1b, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x61, + 0x72, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, + 0x73, 0x18, 0xe8, 0x5d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x57, + 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x42, 0x0d, 0x92, 0xb5, 0x18, 0x09, 0x31, + 0x33, 0x34, 0x32, 0x31, 0x37, 0x37, 0x32, 0x38, 0x52, 0x19, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, + 0x67, 0x50, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, + 0x61, 0x6c, 0x73, 0x12, 0x6f, 0x0a, 0x16, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0xe9, 0x5d, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, + 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x42, 0x0a, 0x92, 0xb5, 0x18, 0x06, 0x32, 0x36, 0x32, 0x31, 0x34, 0x34, 0x52, 0x15, 0x70, + 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x36, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, + 0x5f, 0x6c, 0x6f, 0x6f, 0x6b, 0x61, 0x68, 0x65, 0x61, 0x64, 0x18, 0xc9, 0x65, 0x20, 0x03, 0x28, + 0x04, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x36, 0x34, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, + 0x73, 0x65, 0x72, 0x4c, 0x6f, 0x6f, 0x6b, 0x61, 0x68, 0x65, 0x61, 0x64, 0x42, 0x9a, 0x01, 0x0a, + 0x19, 0x6f, 0x72, 0x67, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, + 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x10, 0x42, 0x65, 0x61, 0x63, + 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0x41, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, + 0x5c, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x33, } var ( diff --git a/proto/prysm/v1alpha1/gloas.pb.go b/proto/prysm/v1alpha1/gloas.pb.go new file mode 100755 index 0000000000..4e54da097a --- /dev/null +++ b/proto/prysm/v1alpha1/gloas.pb.go @@ -0,0 +1,2037 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.36.3 +// protoc v3.21.7 +// source: proto/prysm/v1alpha1/gloas.proto + +package eth + +import ( + reflect "reflect" + sync "sync" + + github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" + github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type ExecutionPayloadBid struct { + state protoimpl.MessageState `protogen:"open.v1"` + ParentBlockHash []byte `protobuf:"bytes,1,opt,name=parent_block_hash,json=parentBlockHash,proto3" json:"parent_block_hash,omitempty" ssz-size:"32"` + ParentBlockRoot []byte `protobuf:"bytes,2,opt,name=parent_block_root,json=parentBlockRoot,proto3" json:"parent_block_root,omitempty" ssz-size:"32"` + BlockHash []byte `protobuf:"bytes,3,opt,name=block_hash,json=blockHash,proto3" json:"block_hash,omitempty" ssz-size:"32"` + FeeRecipient []byte `protobuf:"bytes,4,opt,name=fee_recipient,json=feeRecipient,proto3" json:"fee_recipient,omitempty" ssz-size:"20"` + GasLimit uint64 `protobuf:"varint,5,opt,name=gas_limit,json=gasLimit,proto3" json:"gas_limit,omitempty"` + BuilderIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,6,opt,name=builder_index,json=builderIndex,proto3" json:"builder_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Value github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,8,opt,name=value,proto3" json:"value,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` + BlobKzgCommitmentsRoot []byte `protobuf:"bytes,9,opt,name=blob_kzg_commitments_root,json=blobKzgCommitmentsRoot,proto3" json:"blob_kzg_commitments_root,omitempty" ssz-size:"32"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExecutionPayloadBid) Reset() { + *x = ExecutionPayloadBid{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExecutionPayloadBid) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExecutionPayloadBid) ProtoMessage() {} + +func (x *ExecutionPayloadBid) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExecutionPayloadBid.ProtoReflect.Descriptor instead. +func (*ExecutionPayloadBid) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{0} +} + +func (x *ExecutionPayloadBid) GetParentBlockHash() []byte { + if x != nil { + return x.ParentBlockHash + } + return nil +} + +func (x *ExecutionPayloadBid) GetParentBlockRoot() []byte { + if x != nil { + return x.ParentBlockRoot + } + return nil +} + +func (x *ExecutionPayloadBid) GetBlockHash() []byte { + if x != nil { + return x.BlockHash + } + return nil +} + +func (x *ExecutionPayloadBid) GetFeeRecipient() []byte { + if x != nil { + return x.FeeRecipient + } + return nil +} + +func (x *ExecutionPayloadBid) GetGasLimit() uint64 { + if x != nil { + return x.GasLimit + } + return 0 +} + +func (x *ExecutionPayloadBid) GetBuilderIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { + if x != nil { + return x.BuilderIndex + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) +} + +func (x *ExecutionPayloadBid) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { + if x != nil { + return x.Slot + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) +} + +func (x *ExecutionPayloadBid) GetValue() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { + if x != nil { + return x.Value + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) +} + +func (x *ExecutionPayloadBid) GetBlobKzgCommitmentsRoot() []byte { + if x != nil { + return x.BlobKzgCommitmentsRoot + } + return nil +} + +type SignedExecutionPayloadBid struct { + state protoimpl.MessageState `protogen:"open.v1"` + Message *ExecutionPayloadBid `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + Signature []byte `protobuf:"bytes,2,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SignedExecutionPayloadBid) Reset() { + *x = SignedExecutionPayloadBid{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SignedExecutionPayloadBid) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignedExecutionPayloadBid) ProtoMessage() {} + +func (x *SignedExecutionPayloadBid) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignedExecutionPayloadBid.ProtoReflect.Descriptor instead. +func (*SignedExecutionPayloadBid) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{1} +} + +func (x *SignedExecutionPayloadBid) GetMessage() *ExecutionPayloadBid { + if x != nil { + return x.Message + } + return nil +} + +func (x *SignedExecutionPayloadBid) GetSignature() []byte { + if x != nil { + return x.Signature + } + return nil +} + +type PayloadAttestationData struct { + state protoimpl.MessageState `protogen:"open.v1"` + BeaconBlockRoot []byte `protobuf:"bytes,1,opt,name=beacon_block_root,json=beaconBlockRoot,proto3" json:"beacon_block_root,omitempty" ssz-size:"32"` + Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + PayloadPresent bool `protobuf:"varint,3,opt,name=payload_present,json=payloadPresent,proto3" json:"payload_present,omitempty"` + BlobDataAvailable bool `protobuf:"varint,4,opt,name=blob_data_available,json=blobDataAvailable,proto3" json:"blob_data_available,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PayloadAttestationData) Reset() { + *x = PayloadAttestationData{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PayloadAttestationData) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PayloadAttestationData) ProtoMessage() {} + +func (x *PayloadAttestationData) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PayloadAttestationData.ProtoReflect.Descriptor instead. +func (*PayloadAttestationData) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{2} +} + +func (x *PayloadAttestationData) GetBeaconBlockRoot() []byte { + if x != nil { + return x.BeaconBlockRoot + } + return nil +} + +func (x *PayloadAttestationData) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { + if x != nil { + return x.Slot + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) +} + +func (x *PayloadAttestationData) GetPayloadPresent() bool { + if x != nil { + return x.PayloadPresent + } + return false +} + +func (x *PayloadAttestationData) GetBlobDataAvailable() bool { + if x != nil { + return x.BlobDataAvailable + } + return false +} + +type PayloadAttestation struct { + state protoimpl.MessageState `protogen:"open.v1"` + AggregationBits github_com_OffchainLabs_go_bitfield.Bitvector512 `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector512" ssz-size:"64"` + Data *PayloadAttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + Signature []byte `protobuf:"bytes,3,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PayloadAttestation) Reset() { + *x = PayloadAttestation{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PayloadAttestation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PayloadAttestation) ProtoMessage() {} + +func (x *PayloadAttestation) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PayloadAttestation.ProtoReflect.Descriptor instead. +func (*PayloadAttestation) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{3} +} + +func (x *PayloadAttestation) GetAggregationBits() github_com_OffchainLabs_go_bitfield.Bitvector512 { + if x != nil { + return x.AggregationBits + } + return github_com_OffchainLabs_go_bitfield.Bitvector512(nil) +} + +func (x *PayloadAttestation) GetData() *PayloadAttestationData { + if x != nil { + return x.Data + } + return nil +} + +func (x *PayloadAttestation) GetSignature() []byte { + if x != nil { + return x.Signature + } + return nil +} + +type PayloadAttestationMessage struct { + state protoimpl.MessageState `protogen:"open.v1"` + ValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Data *PayloadAttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + Signature []byte `protobuf:"bytes,3,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PayloadAttestationMessage) Reset() { + *x = PayloadAttestationMessage{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PayloadAttestationMessage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PayloadAttestationMessage) ProtoMessage() {} + +func (x *PayloadAttestationMessage) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PayloadAttestationMessage.ProtoReflect.Descriptor instead. +func (*PayloadAttestationMessage) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{4} +} + +func (x *PayloadAttestationMessage) GetValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { + if x != nil { + return x.ValidatorIndex + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) +} + +func (x *PayloadAttestationMessage) GetData() *PayloadAttestationData { + if x != nil { + return x.Data + } + return nil +} + +func (x *PayloadAttestationMessage) GetSignature() []byte { + if x != nil { + return x.Signature + } + return nil +} + +type BeaconBlockGloas struct { + state protoimpl.MessageState `protogen:"open.v1"` + Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` + StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` + Body *BeaconBlockBodyGloas `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BeaconBlockGloas) Reset() { + *x = BeaconBlockGloas{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BeaconBlockGloas) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BeaconBlockGloas) ProtoMessage() {} + +func (x *BeaconBlockGloas) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BeaconBlockGloas.ProtoReflect.Descriptor instead. +func (*BeaconBlockGloas) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{5} +} + +func (x *BeaconBlockGloas) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { + if x != nil { + return x.Slot + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) +} + +func (x *BeaconBlockGloas) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { + if x != nil { + return x.ProposerIndex + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) +} + +func (x *BeaconBlockGloas) GetParentRoot() []byte { + if x != nil { + return x.ParentRoot + } + return nil +} + +func (x *BeaconBlockGloas) GetStateRoot() []byte { + if x != nil { + return x.StateRoot + } + return nil +} + +func (x *BeaconBlockGloas) GetBody() *BeaconBlockBodyGloas { + if x != nil { + return x.Body + } + return nil +} + +type BeaconBlockBodyGloas struct { + state protoimpl.MessageState `protogen:"open.v1"` + RandaoReveal []byte `protobuf:"bytes,1,opt,name=randao_reveal,json=randaoReveal,proto3" json:"randao_reveal,omitempty" ssz-size:"96"` + Eth1Data *Eth1Data `protobuf:"bytes,2,opt,name=eth1_data,json=eth1Data,proto3" json:"eth1_data,omitempty"` + Graffiti []byte `protobuf:"bytes,3,opt,name=graffiti,proto3" json:"graffiti,omitempty" ssz-size:"32"` + ProposerSlashings []*ProposerSlashing `protobuf:"bytes,4,rep,name=proposer_slashings,json=proposerSlashings,proto3" json:"proposer_slashings,omitempty" ssz-max:"16"` + AttesterSlashings []*AttesterSlashingElectra `protobuf:"bytes,5,rep,name=attester_slashings,json=attesterSlashings,proto3" json:"attester_slashings,omitempty" ssz-max:"1"` + Attestations []*AttestationElectra `protobuf:"bytes,6,rep,name=attestations,proto3" json:"attestations,omitempty" ssz-max:"8"` + Deposits []*Deposit `protobuf:"bytes,7,rep,name=deposits,proto3" json:"deposits,omitempty" ssz-max:"16"` + VoluntaryExits []*SignedVoluntaryExit `protobuf:"bytes,8,rep,name=voluntary_exits,json=voluntaryExits,proto3" json:"voluntary_exits,omitempty" ssz-max:"16"` + SyncAggregate *SyncAggregate `protobuf:"bytes,9,opt,name=sync_aggregate,json=syncAggregate,proto3" json:"sync_aggregate,omitempty"` + BlsToExecutionChanges []*SignedBLSToExecutionChange `protobuf:"bytes,10,rep,name=bls_to_execution_changes,json=blsToExecutionChanges,proto3" json:"bls_to_execution_changes,omitempty" ssz-max:"16"` + SignedExecutionPayloadBid *SignedExecutionPayloadBid `protobuf:"bytes,11,opt,name=signed_execution_payload_bid,json=signedExecutionPayloadBid,proto3" json:"signed_execution_payload_bid,omitempty"` + PayloadAttestations []*PayloadAttestation `protobuf:"bytes,12,rep,name=payload_attestations,json=payloadAttestations,proto3" json:"payload_attestations,omitempty" ssz-max:"4"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BeaconBlockBodyGloas) Reset() { + *x = BeaconBlockBodyGloas{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BeaconBlockBodyGloas) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BeaconBlockBodyGloas) ProtoMessage() {} + +func (x *BeaconBlockBodyGloas) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BeaconBlockBodyGloas.ProtoReflect.Descriptor instead. +func (*BeaconBlockBodyGloas) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{6} +} + +func (x *BeaconBlockBodyGloas) GetRandaoReveal() []byte { + if x != nil { + return x.RandaoReveal + } + return nil +} + +func (x *BeaconBlockBodyGloas) GetEth1Data() *Eth1Data { + if x != nil { + return x.Eth1Data + } + return nil +} + +func (x *BeaconBlockBodyGloas) GetGraffiti() []byte { + if x != nil { + return x.Graffiti + } + return nil +} + +func (x *BeaconBlockBodyGloas) GetProposerSlashings() []*ProposerSlashing { + if x != nil { + return x.ProposerSlashings + } + return nil +} + +func (x *BeaconBlockBodyGloas) GetAttesterSlashings() []*AttesterSlashingElectra { + if x != nil { + return x.AttesterSlashings + } + return nil +} + +func (x *BeaconBlockBodyGloas) GetAttestations() []*AttestationElectra { + if x != nil { + return x.Attestations + } + return nil +} + +func (x *BeaconBlockBodyGloas) GetDeposits() []*Deposit { + if x != nil { + return x.Deposits + } + return nil +} + +func (x *BeaconBlockBodyGloas) GetVoluntaryExits() []*SignedVoluntaryExit { + if x != nil { + return x.VoluntaryExits + } + return nil +} + +func (x *BeaconBlockBodyGloas) GetSyncAggregate() *SyncAggregate { + if x != nil { + return x.SyncAggregate + } + return nil +} + +func (x *BeaconBlockBodyGloas) GetBlsToExecutionChanges() []*SignedBLSToExecutionChange { + if x != nil { + return x.BlsToExecutionChanges + } + return nil +} + +func (x *BeaconBlockBodyGloas) GetSignedExecutionPayloadBid() *SignedExecutionPayloadBid { + if x != nil { + return x.SignedExecutionPayloadBid + } + return nil +} + +func (x *BeaconBlockBodyGloas) GetPayloadAttestations() []*PayloadAttestation { + if x != nil { + return x.PayloadAttestations + } + return nil +} + +type SignedBeaconBlockGloas struct { + state protoimpl.MessageState `protogen:"open.v1"` + Block *BeaconBlockGloas `protobuf:"bytes,1,opt,name=block,proto3" json:"block,omitempty"` + Signature []byte `protobuf:"bytes,2,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SignedBeaconBlockGloas) Reset() { + *x = SignedBeaconBlockGloas{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SignedBeaconBlockGloas) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignedBeaconBlockGloas) ProtoMessage() {} + +func (x *SignedBeaconBlockGloas) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignedBeaconBlockGloas.ProtoReflect.Descriptor instead. +func (*SignedBeaconBlockGloas) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{7} +} + +func (x *SignedBeaconBlockGloas) GetBlock() *BeaconBlockGloas { + if x != nil { + return x.Block + } + return nil +} + +func (x *SignedBeaconBlockGloas) GetSignature() []byte { + if x != nil { + return x.Signature + } + return nil +} + +type BeaconStateGloas struct { + state protoimpl.MessageState `protogen:"open.v1"` + GenesisTime uint64 `protobuf:"varint,1001,opt,name=genesis_time,json=genesisTime,proto3" json:"genesis_time,omitempty"` + GenesisValidatorsRoot []byte `protobuf:"bytes,1002,opt,name=genesis_validators_root,json=genesisValidatorsRoot,proto3" json:"genesis_validators_root,omitempty" ssz-size:"32"` + Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Fork *Fork `protobuf:"bytes,1004,opt,name=fork,proto3" json:"fork,omitempty"` + LatestBlockHeader *BeaconBlockHeader `protobuf:"bytes,2001,opt,name=latest_block_header,json=latestBlockHeader,proto3" json:"latest_block_header,omitempty"` + BlockRoots [][]byte `protobuf:"bytes,2002,rep,name=block_roots,json=blockRoots,proto3" json:"block_roots,omitempty" ssz-size:"8192,32"` + StateRoots [][]byte `protobuf:"bytes,2003,rep,name=state_roots,json=stateRoots,proto3" json:"state_roots,omitempty" ssz-size:"8192,32"` + HistoricalRoots [][]byte `protobuf:"bytes,2004,rep,name=historical_roots,json=historicalRoots,proto3" json:"historical_roots,omitempty" ssz-max:"16777216" ssz-size:"?,32"` + Eth1Data *Eth1Data `protobuf:"bytes,3001,opt,name=eth1_data,json=eth1Data,proto3" json:"eth1_data,omitempty"` + Eth1DataVotes []*Eth1Data `protobuf:"bytes,3002,rep,name=eth1_data_votes,json=eth1DataVotes,proto3" json:"eth1_data_votes,omitempty" ssz-max:"2048"` + Eth1DepositIndex uint64 `protobuf:"varint,3003,opt,name=eth1_deposit_index,json=eth1DepositIndex,proto3" json:"eth1_deposit_index,omitempty"` + Validators []*Validator `protobuf:"bytes,4001,rep,name=validators,proto3" json:"validators,omitempty" ssz-max:"1099511627776"` + Balances []uint64 `protobuf:"varint,4002,rep,packed,name=balances,proto3" json:"balances,omitempty" ssz-max:"1099511627776"` + RandaoMixes [][]byte `protobuf:"bytes,5001,rep,name=randao_mixes,json=randaoMixes,proto3" json:"randao_mixes,omitempty" ssz-size:"65536,32"` + Slashings []uint64 `protobuf:"varint,6001,rep,packed,name=slashings,proto3" json:"slashings,omitempty" ssz-size:"8192"` + PreviousEpochParticipation []byte `protobuf:"bytes,7001,opt,name=previous_epoch_participation,json=previousEpochParticipation,proto3" json:"previous_epoch_participation,omitempty" ssz-max:"1099511627776"` + CurrentEpochParticipation []byte `protobuf:"bytes,7002,opt,name=current_epoch_participation,json=currentEpochParticipation,proto3" json:"current_epoch_participation,omitempty" ssz-max:"1099511627776"` + JustificationBits github_com_OffchainLabs_go_bitfield.Bitvector4 `protobuf:"bytes,8001,opt,name=justification_bits,json=justificationBits,proto3" json:"justification_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` + PreviousJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8002,opt,name=previous_justified_checkpoint,json=previousJustifiedCheckpoint,proto3" json:"previous_justified_checkpoint,omitempty"` + CurrentJustifiedCheckpoint *Checkpoint `protobuf:"bytes,8003,opt,name=current_justified_checkpoint,json=currentJustifiedCheckpoint,proto3" json:"current_justified_checkpoint,omitempty"` + FinalizedCheckpoint *Checkpoint `protobuf:"bytes,8004,opt,name=finalized_checkpoint,json=finalizedCheckpoint,proto3" json:"finalized_checkpoint,omitempty"` + InactivityScores []uint64 `protobuf:"varint,9001,rep,packed,name=inactivity_scores,json=inactivityScores,proto3" json:"inactivity_scores,omitempty" ssz-max:"1099511627776"` + CurrentSyncCommittee *SyncCommittee `protobuf:"bytes,9002,opt,name=current_sync_committee,json=currentSyncCommittee,proto3" json:"current_sync_committee,omitempty"` + NextSyncCommittee *SyncCommittee `protobuf:"bytes,9003,opt,name=next_sync_committee,json=nextSyncCommittee,proto3" json:"next_sync_committee,omitempty"` + LatestExecutionPayloadBid *ExecutionPayloadBid `protobuf:"bytes,10001,opt,name=latest_execution_payload_bid,json=latestExecutionPayloadBid,proto3" json:"latest_execution_payload_bid,omitempty"` + NextWithdrawalIndex uint64 `protobuf:"varint,11001,opt,name=next_withdrawal_index,json=nextWithdrawalIndex,proto3" json:"next_withdrawal_index,omitempty"` + NextWithdrawalValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,11002,opt,name=next_withdrawal_validator_index,json=nextWithdrawalValidatorIndex,proto3" json:"next_withdrawal_validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + HistoricalSummaries []*HistoricalSummary `protobuf:"bytes,11003,rep,name=historical_summaries,json=historicalSummaries,proto3" json:"historical_summaries,omitempty" ssz-max:"16777216"` + DepositRequestsStartIndex uint64 `protobuf:"varint,12001,opt,name=deposit_requests_start_index,json=depositRequestsStartIndex,proto3" json:"deposit_requests_start_index,omitempty"` + DepositBalanceToConsume github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,12002,opt,name=deposit_balance_to_consume,json=depositBalanceToConsume,proto3" json:"deposit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` + ExitBalanceToConsume github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,12003,opt,name=exit_balance_to_consume,json=exitBalanceToConsume,proto3" json:"exit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` + EarliestExitEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,12004,opt,name=earliest_exit_epoch,json=earliestExitEpoch,proto3" json:"earliest_exit_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + ConsolidationBalanceToConsume github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,12005,opt,name=consolidation_balance_to_consume,json=consolidationBalanceToConsume,proto3" json:"consolidation_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` + EarliestConsolidationEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,12006,opt,name=earliest_consolidation_epoch,json=earliestConsolidationEpoch,proto3" json:"earliest_consolidation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + PendingDeposits []*PendingDeposit `protobuf:"bytes,12007,rep,name=pending_deposits,json=pendingDeposits,proto3" json:"pending_deposits,omitempty" ssz-max:"134217728"` + PendingPartialWithdrawals []*PendingPartialWithdrawal `protobuf:"bytes,12008,rep,name=pending_partial_withdrawals,json=pendingPartialWithdrawals,proto3" json:"pending_partial_withdrawals,omitempty" ssz-max:"134217728"` + PendingConsolidations []*PendingConsolidation `protobuf:"bytes,12009,rep,name=pending_consolidations,json=pendingConsolidations,proto3" json:"pending_consolidations,omitempty" ssz-max:"262144"` + ProposerLookahead []uint64 `protobuf:"varint,13001,rep,packed,name=proposer_lookahead,json=proposerLookahead,proto3" json:"proposer_lookahead,omitempty" ssz-size:"64"` + ExecutionPayloadAvailability []byte `protobuf:"bytes,14001,opt,name=execution_payload_availability,json=executionPayloadAvailability,proto3" json:"execution_payload_availability,omitempty" ssz-size:"1024"` + BuilderPendingPayments []*BuilderPendingPayment `protobuf:"bytes,14002,rep,name=builder_pending_payments,json=builderPendingPayments,proto3" json:"builder_pending_payments,omitempty" ssz-size:"64"` + BuilderPendingWithdrawals []*BuilderPendingWithdrawal `protobuf:"bytes,14003,rep,name=builder_pending_withdrawals,json=builderPendingWithdrawals,proto3" json:"builder_pending_withdrawals,omitempty" ssz-max:"1048576"` + LatestBlockHash []byte `protobuf:"bytes,14004,opt,name=latest_block_hash,json=latestBlockHash,proto3" json:"latest_block_hash,omitempty" ssz-size:"32"` + LatestWithdrawalsRoot []byte `protobuf:"bytes,14005,opt,name=latest_withdrawals_root,json=latestWithdrawalsRoot,proto3" json:"latest_withdrawals_root,omitempty" ssz-size:"32"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BeaconStateGloas) Reset() { + *x = BeaconStateGloas{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BeaconStateGloas) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BeaconStateGloas) ProtoMessage() {} + +func (x *BeaconStateGloas) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BeaconStateGloas.ProtoReflect.Descriptor instead. +func (*BeaconStateGloas) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{8} +} + +func (x *BeaconStateGloas) GetGenesisTime() uint64 { + if x != nil { + return x.GenesisTime + } + return 0 +} + +func (x *BeaconStateGloas) GetGenesisValidatorsRoot() []byte { + if x != nil { + return x.GenesisValidatorsRoot + } + return nil +} + +func (x *BeaconStateGloas) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { + if x != nil { + return x.Slot + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) +} + +func (x *BeaconStateGloas) GetFork() *Fork { + if x != nil { + return x.Fork + } + return nil +} + +func (x *BeaconStateGloas) GetLatestBlockHeader() *BeaconBlockHeader { + if x != nil { + return x.LatestBlockHeader + } + return nil +} + +func (x *BeaconStateGloas) GetBlockRoots() [][]byte { + if x != nil { + return x.BlockRoots + } + return nil +} + +func (x *BeaconStateGloas) GetStateRoots() [][]byte { + if x != nil { + return x.StateRoots + } + return nil +} + +func (x *BeaconStateGloas) GetHistoricalRoots() [][]byte { + if x != nil { + return x.HistoricalRoots + } + return nil +} + +func (x *BeaconStateGloas) GetEth1Data() *Eth1Data { + if x != nil { + return x.Eth1Data + } + return nil +} + +func (x *BeaconStateGloas) GetEth1DataVotes() []*Eth1Data { + if x != nil { + return x.Eth1DataVotes + } + return nil +} + +func (x *BeaconStateGloas) GetEth1DepositIndex() uint64 { + if x != nil { + return x.Eth1DepositIndex + } + return 0 +} + +func (x *BeaconStateGloas) GetValidators() []*Validator { + if x != nil { + return x.Validators + } + return nil +} + +func (x *BeaconStateGloas) GetBalances() []uint64 { + if x != nil { + return x.Balances + } + return nil +} + +func (x *BeaconStateGloas) GetRandaoMixes() [][]byte { + if x != nil { + return x.RandaoMixes + } + return nil +} + +func (x *BeaconStateGloas) GetSlashings() []uint64 { + if x != nil { + return x.Slashings + } + return nil +} + +func (x *BeaconStateGloas) GetPreviousEpochParticipation() []byte { + if x != nil { + return x.PreviousEpochParticipation + } + return nil +} + +func (x *BeaconStateGloas) GetCurrentEpochParticipation() []byte { + if x != nil { + return x.CurrentEpochParticipation + } + return nil +} + +func (x *BeaconStateGloas) GetJustificationBits() github_com_OffchainLabs_go_bitfield.Bitvector4 { + if x != nil { + return x.JustificationBits + } + return github_com_OffchainLabs_go_bitfield.Bitvector4(nil) +} + +func (x *BeaconStateGloas) GetPreviousJustifiedCheckpoint() *Checkpoint { + if x != nil { + return x.PreviousJustifiedCheckpoint + } + return nil +} + +func (x *BeaconStateGloas) GetCurrentJustifiedCheckpoint() *Checkpoint { + if x != nil { + return x.CurrentJustifiedCheckpoint + } + return nil +} + +func (x *BeaconStateGloas) GetFinalizedCheckpoint() *Checkpoint { + if x != nil { + return x.FinalizedCheckpoint + } + return nil +} + +func (x *BeaconStateGloas) GetInactivityScores() []uint64 { + if x != nil { + return x.InactivityScores + } + return nil +} + +func (x *BeaconStateGloas) GetCurrentSyncCommittee() *SyncCommittee { + if x != nil { + return x.CurrentSyncCommittee + } + return nil +} + +func (x *BeaconStateGloas) GetNextSyncCommittee() *SyncCommittee { + if x != nil { + return x.NextSyncCommittee + } + return nil +} + +func (x *BeaconStateGloas) GetLatestExecutionPayloadBid() *ExecutionPayloadBid { + if x != nil { + return x.LatestExecutionPayloadBid + } + return nil +} + +func (x *BeaconStateGloas) GetNextWithdrawalIndex() uint64 { + if x != nil { + return x.NextWithdrawalIndex + } + return 0 +} + +func (x *BeaconStateGloas) GetNextWithdrawalValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { + if x != nil { + return x.NextWithdrawalValidatorIndex + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) +} + +func (x *BeaconStateGloas) GetHistoricalSummaries() []*HistoricalSummary { + if x != nil { + return x.HistoricalSummaries + } + return nil +} + +func (x *BeaconStateGloas) GetDepositRequestsStartIndex() uint64 { + if x != nil { + return x.DepositRequestsStartIndex + } + return 0 +} + +func (x *BeaconStateGloas) GetDepositBalanceToConsume() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { + if x != nil { + return x.DepositBalanceToConsume + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) +} + +func (x *BeaconStateGloas) GetExitBalanceToConsume() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { + if x != nil { + return x.ExitBalanceToConsume + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) +} + +func (x *BeaconStateGloas) GetEarliestExitEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { + if x != nil { + return x.EarliestExitEpoch + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) +} + +func (x *BeaconStateGloas) GetConsolidationBalanceToConsume() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { + if x != nil { + return x.ConsolidationBalanceToConsume + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) +} + +func (x *BeaconStateGloas) GetEarliestConsolidationEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { + if x != nil { + return x.EarliestConsolidationEpoch + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) +} + +func (x *BeaconStateGloas) GetPendingDeposits() []*PendingDeposit { + if x != nil { + return x.PendingDeposits + } + return nil +} + +func (x *BeaconStateGloas) GetPendingPartialWithdrawals() []*PendingPartialWithdrawal { + if x != nil { + return x.PendingPartialWithdrawals + } + return nil +} + +func (x *BeaconStateGloas) GetPendingConsolidations() []*PendingConsolidation { + if x != nil { + return x.PendingConsolidations + } + return nil +} + +func (x *BeaconStateGloas) GetProposerLookahead() []uint64 { + if x != nil { + return x.ProposerLookahead + } + return nil +} + +func (x *BeaconStateGloas) GetExecutionPayloadAvailability() []byte { + if x != nil { + return x.ExecutionPayloadAvailability + } + return nil +} + +func (x *BeaconStateGloas) GetBuilderPendingPayments() []*BuilderPendingPayment { + if x != nil { + return x.BuilderPendingPayments + } + return nil +} + +func (x *BeaconStateGloas) GetBuilderPendingWithdrawals() []*BuilderPendingWithdrawal { + if x != nil { + return x.BuilderPendingWithdrawals + } + return nil +} + +func (x *BeaconStateGloas) GetLatestBlockHash() []byte { + if x != nil { + return x.LatestBlockHash + } + return nil +} + +func (x *BeaconStateGloas) GetLatestWithdrawalsRoot() []byte { + if x != nil { + return x.LatestWithdrawalsRoot + } + return nil +} + +type BuilderPendingPayment struct { + state protoimpl.MessageState `protogen:"open.v1"` + Weight github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,1,opt,name=weight,proto3" json:"weight,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` + Withdrawal *BuilderPendingWithdrawal `protobuf:"bytes,2,opt,name=withdrawal,proto3" json:"withdrawal,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BuilderPendingPayment) Reset() { + *x = BuilderPendingPayment{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BuilderPendingPayment) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BuilderPendingPayment) ProtoMessage() {} + +func (x *BuilderPendingPayment) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BuilderPendingPayment.ProtoReflect.Descriptor instead. +func (*BuilderPendingPayment) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{9} +} + +func (x *BuilderPendingPayment) GetWeight() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { + if x != nil { + return x.Weight + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) +} + +func (x *BuilderPendingPayment) GetWithdrawal() *BuilderPendingWithdrawal { + if x != nil { + return x.Withdrawal + } + return nil +} + +type BuilderPendingWithdrawal struct { + state protoimpl.MessageState `protogen:"open.v1"` + FeeRecipient []byte `protobuf:"bytes,1,opt,name=fee_recipient,json=feeRecipient,proto3" json:"fee_recipient,omitempty" ssz-size:"20"` + Amount github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,2,opt,name=amount,proto3" json:"amount,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` + BuilderIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,opt,name=builder_index,json=builderIndex,proto3" json:"builder_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + WithdrawableEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,4,opt,name=withdrawable_epoch,json=withdrawableEpoch,proto3" json:"withdrawable_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BuilderPendingWithdrawal) Reset() { + *x = BuilderPendingWithdrawal{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BuilderPendingWithdrawal) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BuilderPendingWithdrawal) ProtoMessage() {} + +func (x *BuilderPendingWithdrawal) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BuilderPendingWithdrawal.ProtoReflect.Descriptor instead. +func (*BuilderPendingWithdrawal) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{10} +} + +func (x *BuilderPendingWithdrawal) GetFeeRecipient() []byte { + if x != nil { + return x.FeeRecipient + } + return nil +} + +func (x *BuilderPendingWithdrawal) GetAmount() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { + if x != nil { + return x.Amount + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) +} + +func (x *BuilderPendingWithdrawal) GetBuilderIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { + if x != nil { + return x.BuilderIndex + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) +} + +func (x *BuilderPendingWithdrawal) GetWithdrawableEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { + if x != nil { + return x.WithdrawableEpoch + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) +} + +type DataColumnSidecarGloas struct { + state protoimpl.MessageState `protogen:"open.v1"` + Index uint64 `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty"` + Column [][]byte `protobuf:"bytes,2,rep,name=column,proto3" json:"column,omitempty" ssz-max:"4096" ssz-size:"?,2048"` + KzgCommitments [][]byte `protobuf:"bytes,3,rep,name=kzg_commitments,json=kzgCommitments,proto3" json:"kzg_commitments,omitempty" ssz-max:"4096" ssz-size:"?,48"` + KzgProofs [][]byte `protobuf:"bytes,4,rep,name=kzg_proofs,json=kzgProofs,proto3" json:"kzg_proofs,omitempty" ssz-max:"4096" ssz-size:"?,48"` + Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + BeaconBlockRoot []byte `protobuf:"bytes,6,opt,name=beacon_block_root,json=beaconBlockRoot,proto3" json:"beacon_block_root,omitempty" ssz-size:"32"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *DataColumnSidecarGloas) Reset() { + *x = DataColumnSidecarGloas{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DataColumnSidecarGloas) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DataColumnSidecarGloas) ProtoMessage() {} + +func (x *DataColumnSidecarGloas) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DataColumnSidecarGloas.ProtoReflect.Descriptor instead. +func (*DataColumnSidecarGloas) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{11} +} + +func (x *DataColumnSidecarGloas) GetIndex() uint64 { + if x != nil { + return x.Index + } + return 0 +} + +func (x *DataColumnSidecarGloas) GetColumn() [][]byte { + if x != nil { + return x.Column + } + return nil +} + +func (x *DataColumnSidecarGloas) GetKzgCommitments() [][]byte { + if x != nil { + return x.KzgCommitments + } + return nil +} + +func (x *DataColumnSidecarGloas) GetKzgProofs() [][]byte { + if x != nil { + return x.KzgProofs + } + return nil +} + +func (x *DataColumnSidecarGloas) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { + if x != nil { + return x.Slot + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) +} + +func (x *DataColumnSidecarGloas) GetBeaconBlockRoot() []byte { + if x != nil { + return x.BeaconBlockRoot + } + return nil +} + +type ExecutionPayloadEnvelope struct { + state protoimpl.MessageState `protogen:"open.v1"` + Payload *v1.ExecutionPayloadDeneb `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + ExecutionRequests *v1.ExecutionRequests `protobuf:"bytes,2,opt,name=execution_requests,json=executionRequests,proto3" json:"execution_requests,omitempty"` + BuilderIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,opt,name=builder_index,json=builderIndex,proto3" json:"builder_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + BeaconBlockRoot []byte `protobuf:"bytes,4,opt,name=beacon_block_root,json=beaconBlockRoot,proto3" json:"beacon_block_root,omitempty" ssz-size:"32"` + Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + BlobKzgCommitments [][]byte `protobuf:"bytes,6,rep,name=blob_kzg_commitments,json=blobKzgCommitments,proto3" json:"blob_kzg_commitments,omitempty" ssz-max:"4096" ssz-size:"?,48"` + StateRoot []byte `protobuf:"bytes,7,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExecutionPayloadEnvelope) Reset() { + *x = ExecutionPayloadEnvelope{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExecutionPayloadEnvelope) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExecutionPayloadEnvelope) ProtoMessage() {} + +func (x *ExecutionPayloadEnvelope) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExecutionPayloadEnvelope.ProtoReflect.Descriptor instead. +func (*ExecutionPayloadEnvelope) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{12} +} + +func (x *ExecutionPayloadEnvelope) GetPayload() *v1.ExecutionPayloadDeneb { + if x != nil { + return x.Payload + } + return nil +} + +func (x *ExecutionPayloadEnvelope) GetExecutionRequests() *v1.ExecutionRequests { + if x != nil { + return x.ExecutionRequests + } + return nil +} + +func (x *ExecutionPayloadEnvelope) GetBuilderIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { + if x != nil { + return x.BuilderIndex + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) +} + +func (x *ExecutionPayloadEnvelope) GetBeaconBlockRoot() []byte { + if x != nil { + return x.BeaconBlockRoot + } + return nil +} + +func (x *ExecutionPayloadEnvelope) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { + if x != nil { + return x.Slot + } + return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) +} + +func (x *ExecutionPayloadEnvelope) GetBlobKzgCommitments() [][]byte { + if x != nil { + return x.BlobKzgCommitments + } + return nil +} + +func (x *ExecutionPayloadEnvelope) GetStateRoot() []byte { + if x != nil { + return x.StateRoot + } + return nil +} + +type SignedExecutionPayloadEnvelope struct { + state protoimpl.MessageState `protogen:"open.v1"` + Message *ExecutionPayloadEnvelope `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + Signature []byte `protobuf:"bytes,2,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SignedExecutionPayloadEnvelope) Reset() { + *x = SignedExecutionPayloadEnvelope{} + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SignedExecutionPayloadEnvelope) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignedExecutionPayloadEnvelope) ProtoMessage() {} + +func (x *SignedExecutionPayloadEnvelope) ProtoReflect() protoreflect.Message { + mi := &file_proto_prysm_v1alpha1_gloas_proto_msgTypes[13] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignedExecutionPayloadEnvelope.ProtoReflect.Descriptor instead. +func (*SignedExecutionPayloadEnvelope) Descriptor() ([]byte, []int) { + return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{13} +} + +func (x *SignedExecutionPayloadEnvelope) GetMessage() *ExecutionPayloadEnvelope { + if x != nil { + return x.Message + } + return nil +} + +func (x *SignedExecutionPayloadEnvelope) GetSignature() []byte { + if x != nil { + return x.Signature + } + return nil +} + +var File_proto_prysm_v1alpha1_gloas_proto protoreflect.FileDescriptor + +var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ + 0x0a, 0x20, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x67, 0x6c, 0x6f, 0x61, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x12, 0x15, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x1a, 0x26, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2f, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x65, 0x78, 0x65, 0x63, 0x75, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x1a, 0x1d, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2f, + 0x76, 0x31, 0x2f, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x1a, 0x1b, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x65, 0x78, 0x74, 0x2f, + 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x26, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2f, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x26, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, + 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x77, 0x69, 0x74, 0x68, + 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2f, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x72, 0x65, 0x5f, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x23, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2f, 0x65, 0x69, 0x70, 0x5f, 0x37, 0x32, 0x35, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x22, 0xdc, 0x04, 0x0a, 0x13, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, + 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x42, 0x69, 0x64, 0x12, 0x32, 0x0a, 0x11, 0x70, 0x61, 0x72, 0x65, + 0x6e, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0f, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x61, 0x73, 0x68, 0x12, 0x32, 0x0a, 0x11, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, + 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, + 0x0f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, + 0x12, 0x25, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x48, 0x61, 0x73, 0x68, 0x12, 0x2b, 0x0a, 0x0d, 0x66, 0x65, 0x65, 0x5f, 0x72, + 0x65, 0x63, 0x69, 0x70, 0x69, 0x65, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, + 0x8a, 0xb5, 0x18, 0x02, 0x32, 0x30, 0x52, 0x0c, 0x66, 0x65, 0x65, 0x52, 0x65, 0x63, 0x69, 0x70, + 0x69, 0x65, 0x6e, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x67, 0x61, 0x73, 0x5f, 0x6c, 0x69, 0x6d, 0x69, + 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x67, 0x61, 0x73, 0x4c, 0x69, 0x6d, 0x69, + 0x74, 0x12, 0x73, 0x0a, 0x0d, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, + 0x65, 0x78, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, + 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, + 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0c, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x65, + 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, + 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, + 0x12, 0x5a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, + 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, + 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x41, 0x0a, 0x19, + 0x62, 0x6c, 0x6f, 0x62, 0x5f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x16, 0x62, 0x6c, 0x6f, 0x62, 0x4b, 0x7a, 0x67, + 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x22, + 0x87, 0x01, 0x0a, 0x19, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x42, 0x69, 0x64, 0x12, 0x44, 0x0a, + 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, + 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x42, 0x69, 0x64, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xff, 0x01, 0x0a, 0x16, 0x50, 0x61, + 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x44, 0x61, 0x74, 0x61, 0x12, 0x32, 0x0a, 0x11, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x5f, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0f, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, + 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, + 0x6f, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x70, 0x72, + 0x65, 0x73, 0x65, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x70, 0x61, 0x79, + 0x6c, 0x6f, 0x61, 0x64, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x12, 0x2e, 0x0a, 0x13, 0x62, + 0x6c, 0x6f, 0x62, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, + 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x62, 0x6c, 0x6f, 0x62, 0x44, 0x61, + 0x74, 0x61, 0x41, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x22, 0xe4, 0x01, 0x0a, 0x12, + 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x12, 0x65, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x3a, 0x82, 0xb5, + 0x18, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, + 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x35, + 0x31, 0x32, 0x8a, 0xb5, 0x18, 0x02, 0x36, 0x34, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x41, 0x0a, 0x04, 0x64, 0x61, 0x74, + 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, + 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x22, 0xfd, 0x01, 0x0a, 0x19, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x41, 0x74, + 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x12, 0x77, 0x0a, 0x0f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, + 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x41, 0x0a, 0x04, 0x64, 0x61, 0x74, + 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, + 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x09, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x22, 0xf4, 0x02, 0x0a, 0x10, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x47, 0x6c, 0x6f, 0x61, 0x73, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, + 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, + 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, + 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, + 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, + 0x6e, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, + 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x6f, 0x6f, + 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x3f, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, + 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x47, 0x6c, + 0x6f, 0x61, 0x73, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0xfb, 0x07, 0x0a, 0x14, 0x42, 0x65, + 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x42, 0x6f, 0x64, 0x79, 0x47, 0x6c, 0x6f, + 0x61, 0x73, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, + 0x65, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, + 0x36, 0x52, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x52, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x12, + 0x3c, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, + 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, + 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, + 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x08, 0x67, 0x72, 0x61, 0x66, 0x66, 0x69, 0x74, + 0x69, 0x12, 0x5e, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, + 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, + 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x11, + 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, + 0x73, 0x12, 0x64, 0x0a, 0x12, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, + 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, + 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x42, 0x05, 0x92, + 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, + 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x42, 0x05, 0x92, 0xb5, 0x18, 0x01, 0x38, 0x52, + 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, + 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x1e, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, + 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, + 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x73, 0x12, 0x5b, 0x0a, 0x0f, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x65, + 0x78, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, + 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0e, + 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x73, 0x12, 0x4b, + 0x0a, 0x0e, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, + 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, + 0x79, 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x52, 0x0d, 0x73, 0x79, + 0x6e, 0x63, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x72, 0x0a, 0x18, 0x62, + 0x6c, 0x73, 0x5f, 0x74, 0x6f, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x42, 0x4c, 0x53, 0x54, + 0x6f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, + 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x15, 0x62, 0x6c, 0x73, 0x54, 0x6f, 0x45, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, + 0x71, 0x0a, 0x1c, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x62, 0x69, 0x64, 0x18, + 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, + 0x67, 0x6e, 0x65, 0x64, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, + 0x6c, 0x6f, 0x61, 0x64, 0x42, 0x69, 0x64, 0x52, 0x19, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x45, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x42, + 0x69, 0x64, 0x12, 0x63, 0x0a, 0x14, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x61, 0x74, + 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x29, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, + 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x05, 0x92, 0xb5, 0x18, + 0x01, 0x34, 0x52, 0x13, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x41, 0x74, 0x74, 0x65, 0x73, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x7d, 0x0a, 0x16, 0x53, 0x69, 0x67, 0x6e, 0x65, + 0x64, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x47, 0x6c, 0x6f, 0x61, + 0x73, 0x12, 0x3d, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x27, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x47, 0x6c, 0x6f, 0x61, 0x73, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, + 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x93, 0x1d, 0x0a, 0x10, 0x42, 0x65, 0x61, 0x63, 0x6f, + 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x47, 0x6c, 0x6f, 0x61, 0x73, 0x12, 0x22, 0x0a, 0x0c, 0x67, + 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0xe9, 0x07, 0x20, 0x01, + 0x28, 0x04, 0x52, 0x0b, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x54, 0x69, 0x6d, 0x65, 0x12, + 0x3f, 0x0a, 0x17, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x6f, 0x72, 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0xea, 0x07, 0x20, 0x01, 0x28, + 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x15, 0x67, 0x65, 0x6e, 0x65, 0x73, + 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x6f, 0x6f, 0x74, + 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, + 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, + 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, + 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, + 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x2e, 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x12, 0x59, 0x0a, + 0x13, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x18, 0xd1, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x11, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x2d, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd2, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, + 0x8a, 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x62, 0x6c, 0x6f, + 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2d, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd3, 0x0f, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0b, 0x8a, + 0xb5, 0x18, 0x07, 0x38, 0x31, 0x39, 0x32, 0x2c, 0x33, 0x32, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x40, 0x0a, 0x10, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, + 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0xd4, 0x0f, 0x20, 0x03, 0x28, + 0x0c, 0x42, 0x14, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x08, 0x31, + 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, 0x0f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, + 0x63, 0x61, 0x6c, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x3d, 0x0a, 0x09, 0x65, 0x74, 0x68, 0x31, + 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0xb9, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x65, + 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x65, + 0x74, 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x12, 0x52, 0x0a, 0x0f, 0x65, 0x74, 0x68, 0x31, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x5f, 0x76, 0x6f, 0x74, 0x65, 0x73, 0x18, 0xba, 0x17, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x1f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x74, 0x68, 0x31, 0x44, 0x61, + 0x74, 0x61, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0d, 0x65, 0x74, + 0x68, 0x31, 0x44, 0x61, 0x74, 0x61, 0x56, 0x6f, 0x74, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x65, + 0x74, 0x68, 0x31, 0x5f, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, + 0x78, 0x18, 0xbb, 0x17, 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, 0x65, 0x74, 0x68, 0x31, 0x44, 0x65, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x54, 0x0a, 0x0a, 0x76, 0x61, + 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0xa1, 0x1f, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x20, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, + 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, + 0x72, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, + 0x37, 0x37, 0x37, 0x36, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, + 0x12, 0x2e, 0x0a, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0xa2, 0x1f, 0x20, + 0x03, 0x28, 0x04, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, + 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, + 0x12, 0x30, 0x0a, 0x0c, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x6d, 0x69, 0x78, 0x65, 0x73, + 0x18, 0x89, 0x27, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0c, 0x8a, 0xb5, 0x18, 0x08, 0x36, 0x35, 0x35, + 0x33, 0x36, 0x2c, 0x33, 0x32, 0x52, 0x0b, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x4d, 0x69, 0x78, + 0x65, 0x73, 0x12, 0x27, 0x0a, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x18, + 0xf1, 0x2e, 0x20, 0x03, 0x28, 0x04, 0x42, 0x08, 0x8a, 0xb5, 0x18, 0x04, 0x38, 0x31, 0x39, 0x32, + 0x52, 0x09, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x54, 0x0a, 0x1c, 0x70, + 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x5f, 0x70, 0x61, + 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xd9, 0x36, 0x20, 0x01, + 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, + 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x1a, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x45, + 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x52, 0x0a, 0x1b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x70, 0x6f, + 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0xda, 0x36, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, + 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, 0x37, 0x36, 0x52, 0x19, 0x63, 0x75, 0x72, 0x72, + 0x65, 0x6e, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x50, 0x61, 0x72, 0x74, 0x69, 0x63, 0x69, 0x70, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x67, 0x0a, 0x12, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, + 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0xc1, 0x3e, 0x20, 0x01, + 0x28, 0x0c, 0x42, 0x37, 0x82, 0xb5, 0x18, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, + 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x11, 0x6a, 0x75, 0x73, + 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x66, + 0x0a, 0x1d, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, + 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, + 0xc2, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, + 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x1b, 0x70, 0x72, 0x65, 0x76, 0x69, + 0x6f, 0x75, 0x73, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, + 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x64, 0x0a, 0x1c, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, + 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc3, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, + 0x52, 0x1a, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, + 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x55, 0x0a, 0x14, + 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, + 0x6f, 0x69, 0x6e, 0x74, 0x18, 0xc4, 0x3e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x74, + 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x13, + 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, + 0x69, 0x6e, 0x74, 0x12, 0x3f, 0x0a, 0x11, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, + 0x79, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x18, 0xa9, 0x46, 0x20, 0x03, 0x28, 0x04, 0x42, + 0x11, 0x92, 0xb5, 0x18, 0x0d, 0x31, 0x30, 0x39, 0x39, 0x35, 0x31, 0x31, 0x36, 0x32, 0x37, 0x37, + 0x37, 0x36, 0x52, 0x10, 0x69, 0x6e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x53, 0x63, + 0x6f, 0x72, 0x65, 0x73, 0x12, 0x5b, 0x0a, 0x16, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, + 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xaa, + 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, + 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, + 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x14, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, + 0x65, 0x12, 0x55, 0x0a, 0x13, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x63, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0xab, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x24, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, + 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x74, 0x65, 0x65, 0x52, 0x11, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x43, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x6c, 0x0a, 0x1c, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x74, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, + 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x62, 0x69, 0x64, 0x18, 0x91, 0x4e, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x2a, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, + 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, + 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x42, 0x69, 0x64, 0x52, 0x19, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x74, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, + 0x6f, 0x61, 0x64, 0x42, 0x69, 0x64, 0x12, 0x33, 0x0a, 0x15, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x77, + 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, + 0xf9, 0x55, 0x20, 0x01, 0x28, 0x04, 0x52, 0x13, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, + 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x96, 0x01, 0x0a, 0x1f, + 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x5f, + 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, + 0xfa, 0x55, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, + 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, + 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x1c, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, + 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, + 0x6e, 0x64, 0x65, 0x78, 0x12, 0x6a, 0x0a, 0x14, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, + 0x61, 0x6c, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x18, 0xfb, 0x55, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, + 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x48, 0x69, 0x73, 0x74, + 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x42, 0x0c, 0x92, + 0xb5, 0x18, 0x08, 0x31, 0x36, 0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0x52, 0x13, 0x68, 0x69, 0x73, + 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, + 0x12, 0x40, 0x0a, 0x1c, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x72, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x73, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, + 0x18, 0xe1, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x52, 0x19, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x53, 0x74, 0x61, 0x72, 0x74, 0x49, 0x6e, 0x64, + 0x65, 0x78, 0x12, 0x82, 0x01, 0x0a, 0x1a, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x5f, 0x62, + 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, + 0x65, 0x18, 0xe2, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, + 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x17, + 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, + 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x7c, 0x0a, 0x17, 0x65, 0x78, 0x69, 0x74, 0x5f, + 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, + 0x6d, 0x65, 0x18, 0xe3, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, + 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, + 0x14, 0x65, 0x78, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, + 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x76, 0x0a, 0x13, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, + 0x74, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe4, 0x5d, 0x20, + 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, + 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x65, 0x61, 0x72, 0x6c, + 0x69, 0x65, 0x73, 0x74, 0x45, 0x78, 0x69, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x8e, 0x01, + 0x0a, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, + 0x6d, 0x65, 0x18, 0xe5, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, + 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, + 0x1d, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, + 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x88, + 0x01, 0x0a, 0x1c, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x73, + 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, + 0xe6, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, + 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x1a, 0x65, + 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x60, 0x0a, 0x10, 0x70, 0x65, 0x6e, + 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x18, 0xe7, 0x5d, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, + 0x64, 0x69, 0x6e, 0x67, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x0d, 0x92, 0xb5, 0x18, + 0x09, 0x31, 0x33, 0x34, 0x32, 0x31, 0x37, 0x37, 0x32, 0x38, 0x52, 0x0f, 0x70, 0x65, 0x6e, 0x64, + 0x69, 0x6e, 0x67, 0x44, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x73, 0x12, 0x7f, 0x0a, 0x1b, 0x70, + 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x77, + 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, 0x18, 0xe8, 0x5d, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, + 0x67, 0x50, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, + 0x61, 0x6c, 0x42, 0x0d, 0x92, 0xb5, 0x18, 0x09, 0x31, 0x33, 0x34, 0x32, 0x31, 0x37, 0x37, 0x32, + 0x38, 0x52, 0x19, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x74, 0x69, 0x61, + 0x6c, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, 0x12, 0x6f, 0x0a, 0x16, + 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0xe9, 0x5d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, + 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x0a, 0x92, 0xb5, 0x18, 0x06, + 0x32, 0x36, 0x32, 0x31, 0x34, 0x34, 0x52, 0x15, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x36, 0x0a, + 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x6f, 0x6b, 0x61, 0x68, + 0x65, 0x61, 0x64, 0x18, 0xc9, 0x65, 0x20, 0x03, 0x28, 0x04, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, + 0x36, 0x34, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x4c, 0x6f, 0x6f, 0x6b, + 0x61, 0x68, 0x65, 0x61, 0x64, 0x12, 0x4f, 0x0a, 0x1e, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x61, 0x76, 0x61, 0x69, 0x6c, + 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x18, 0xb1, 0x6d, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x08, + 0x8a, 0xb5, 0x18, 0x04, 0x31, 0x30, 0x32, 0x34, 0x52, 0x1c, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x41, 0x76, 0x61, 0x69, 0x6c, 0x61, + 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x12, 0x6f, 0x0a, 0x18, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x65, + 0x72, 0x5f, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x61, 0x79, 0x6d, 0x65, 0x6e, + 0x74, 0x73, 0x18, 0xb2, 0x6d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, + 0x50, 0x61, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x36, 0x34, 0x52, + 0x16, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x50, + 0x61, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x7d, 0x0a, 0x1b, 0x62, 0x75, 0x69, 0x6c, 0x64, + 0x65, 0x72, 0x5f, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, + 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, 0x18, 0xb3, 0x6d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, + 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x50, 0x65, 0x6e, + 0x64, 0x69, 0x6e, 0x67, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x42, 0x0b, + 0x92, 0xb5, 0x18, 0x07, 0x31, 0x30, 0x34, 0x38, 0x35, 0x37, 0x36, 0x52, 0x19, 0x62, 0x75, 0x69, + 0x6c, 0x64, 0x65, 0x72, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x57, 0x69, 0x74, 0x68, 0x64, + 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, 0x12, 0x33, 0x0a, 0x11, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, + 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0xb4, 0x6d, 0x20, 0x01, + 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0f, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x61, 0x73, 0x68, 0x12, 0x3f, 0x0a, 0x17, 0x6c, + 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, + 0x73, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0xb5, 0x6d, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, + 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x15, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x57, 0x69, 0x74, + 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x22, 0xc6, 0x01, 0x0a, + 0x15, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x50, + 0x61, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x5c, 0x0a, 0x06, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, + 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x06, 0x77, 0x65, + 0x69, 0x67, 0x68, 0x74, 0x12, 0x4f, 0x0a, 0x0a, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, + 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, + 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x57, + 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x52, 0x0a, 0x77, 0x69, 0x74, 0x68, 0x64, + 0x72, 0x61, 0x77, 0x61, 0x6c, 0x22, 0x90, 0x03, 0x0a, 0x18, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x65, + 0x72, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, + 0x61, 0x6c, 0x12, 0x2b, 0x0a, 0x0d, 0x66, 0x65, 0x65, 0x5f, 0x72, 0x65, 0x63, 0x69, 0x70, 0x69, + 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x32, + 0x30, 0x52, 0x0c, 0x66, 0x65, 0x65, 0x52, 0x65, 0x63, 0x69, 0x70, 0x69, 0x65, 0x6e, 0x74, 0x12, + 0x5c, 0x0a, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, + 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, + 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x73, 0x0a, + 0x0d, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, + 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, + 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0c, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x49, 0x6e, 0x64, + 0x65, 0x78, 0x12, 0x74, 0x0a, 0x12, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x62, + 0x6c, 0x65, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, + 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, + 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, + 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, + 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, + 0x62, 0x6c, 0x65, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x22, 0xd4, 0x02, 0x0a, 0x16, 0x44, 0x61, 0x74, + 0x61, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x53, 0x69, 0x64, 0x65, 0x63, 0x61, 0x72, 0x47, 0x6c, + 0x6f, 0x61, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x04, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x2a, 0x0a, 0x06, 0x63, 0x6f, 0x6c, + 0x75, 0x6d, 0x6e, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x12, 0x8a, 0xb5, 0x18, 0x06, 0x3f, + 0x2c, 0x32, 0x30, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x06, 0x63, + 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x12, 0x39, 0x0a, 0x0f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, + 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, + 0x52, 0x0e, 0x6b, 0x7a, 0x67, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, + 0x12, 0x2f, 0x0a, 0x0a, 0x6b, 0x7a, 0x67, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x73, 0x18, 0x04, + 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x34, 0x38, 0x92, 0xb5, + 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x09, 0x6b, 0x7a, 0x67, 0x50, 0x72, 0x6f, 0x6f, 0x66, + 0x73, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, + 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, + 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, + 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x32, 0x0a, 0x11, 0x62, + 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0f, + 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x22, + 0xa3, 0x04, 0x0a, 0x18, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, + 0x6c, 0x6f, 0x61, 0x64, 0x45, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x12, 0x43, 0x0a, 0x07, + 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, + 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, + 0x6f, 0x61, 0x64, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, + 0x64, 0x12, 0x54, 0x0a, 0x12, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, + 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, + 0x76, 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x73, 0x52, 0x11, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x12, 0x73, 0x0a, 0x0d, 0x62, 0x75, 0x69, 0x6c, 0x64, + 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, + 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, + 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, + 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, + 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0c, + 0x62, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x32, 0x0a, 0x11, + 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, + 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, + 0x0f, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, + 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, + 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, + 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, + 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, + 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x42, 0x0a, 0x14, 0x62, 0x6c, + 0x6f, 0x62, 0x5f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, + 0x74, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x10, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, + 0x34, 0x38, 0x92, 0xb5, 0x18, 0x04, 0x34, 0x30, 0x39, 0x36, 0x52, 0x12, 0x62, 0x6c, 0x6f, 0x62, + 0x4b, 0x7a, 0x67, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x25, + 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x07, 0x20, 0x01, + 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x22, 0x91, 0x01, 0x0a, 0x1e, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, + 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, + 0x45, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x12, 0x49, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, + 0x61, 0x64, 0x45, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x42, 0x3b, 0x5a, 0x39, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, + 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_proto_prysm_v1alpha1_gloas_proto_rawDescOnce sync.Once + file_proto_prysm_v1alpha1_gloas_proto_rawDescData = file_proto_prysm_v1alpha1_gloas_proto_rawDesc +) + +func file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP() []byte { + file_proto_prysm_v1alpha1_gloas_proto_rawDescOnce.Do(func() { + file_proto_prysm_v1alpha1_gloas_proto_rawDescData = protoimpl.X.CompressGZIP(file_proto_prysm_v1alpha1_gloas_proto_rawDescData) + }) + return file_proto_prysm_v1alpha1_gloas_proto_rawDescData +} + +var file_proto_prysm_v1alpha1_gloas_proto_msgTypes = make([]protoimpl.MessageInfo, 14) +var file_proto_prysm_v1alpha1_gloas_proto_goTypes = []any{ + (*ExecutionPayloadBid)(nil), // 0: ethereum.eth.v1alpha1.ExecutionPayloadBid + (*SignedExecutionPayloadBid)(nil), // 1: ethereum.eth.v1alpha1.SignedExecutionPayloadBid + (*PayloadAttestationData)(nil), // 2: ethereum.eth.v1alpha1.PayloadAttestationData + (*PayloadAttestation)(nil), // 3: ethereum.eth.v1alpha1.PayloadAttestation + (*PayloadAttestationMessage)(nil), // 4: ethereum.eth.v1alpha1.PayloadAttestationMessage + (*BeaconBlockGloas)(nil), // 5: ethereum.eth.v1alpha1.BeaconBlockGloas + (*BeaconBlockBodyGloas)(nil), // 6: ethereum.eth.v1alpha1.BeaconBlockBodyGloas + (*SignedBeaconBlockGloas)(nil), // 7: ethereum.eth.v1alpha1.SignedBeaconBlockGloas + (*BeaconStateGloas)(nil), // 8: ethereum.eth.v1alpha1.BeaconStateGloas + (*BuilderPendingPayment)(nil), // 9: ethereum.eth.v1alpha1.BuilderPendingPayment + (*BuilderPendingWithdrawal)(nil), // 10: ethereum.eth.v1alpha1.BuilderPendingWithdrawal + (*DataColumnSidecarGloas)(nil), // 11: ethereum.eth.v1alpha1.DataColumnSidecarGloas + (*ExecutionPayloadEnvelope)(nil), // 12: ethereum.eth.v1alpha1.ExecutionPayloadEnvelope + (*SignedExecutionPayloadEnvelope)(nil), // 13: ethereum.eth.v1alpha1.SignedExecutionPayloadEnvelope + (*Eth1Data)(nil), // 14: ethereum.eth.v1alpha1.Eth1Data + (*ProposerSlashing)(nil), // 15: ethereum.eth.v1alpha1.ProposerSlashing + (*AttesterSlashingElectra)(nil), // 16: ethereum.eth.v1alpha1.AttesterSlashingElectra + (*AttestationElectra)(nil), // 17: ethereum.eth.v1alpha1.AttestationElectra + (*Deposit)(nil), // 18: ethereum.eth.v1alpha1.Deposit + (*SignedVoluntaryExit)(nil), // 19: ethereum.eth.v1alpha1.SignedVoluntaryExit + (*SyncAggregate)(nil), // 20: ethereum.eth.v1alpha1.SyncAggregate + (*SignedBLSToExecutionChange)(nil), // 21: ethereum.eth.v1alpha1.SignedBLSToExecutionChange + (*Fork)(nil), // 22: ethereum.eth.v1alpha1.Fork + (*BeaconBlockHeader)(nil), // 23: ethereum.eth.v1alpha1.BeaconBlockHeader + (*Validator)(nil), // 24: ethereum.eth.v1alpha1.Validator + (*Checkpoint)(nil), // 25: ethereum.eth.v1alpha1.Checkpoint + (*SyncCommittee)(nil), // 26: ethereum.eth.v1alpha1.SyncCommittee + (*HistoricalSummary)(nil), // 27: ethereum.eth.v1alpha1.HistoricalSummary + (*PendingDeposit)(nil), // 28: ethereum.eth.v1alpha1.PendingDeposit + (*PendingPartialWithdrawal)(nil), // 29: ethereum.eth.v1alpha1.PendingPartialWithdrawal + (*PendingConsolidation)(nil), // 30: ethereum.eth.v1alpha1.PendingConsolidation + (*v1.ExecutionPayloadDeneb)(nil), // 31: ethereum.engine.v1.ExecutionPayloadDeneb + (*v1.ExecutionRequests)(nil), // 32: ethereum.engine.v1.ExecutionRequests +} +var file_proto_prysm_v1alpha1_gloas_proto_depIdxs = []int32{ + 0, // 0: ethereum.eth.v1alpha1.SignedExecutionPayloadBid.message:type_name -> ethereum.eth.v1alpha1.ExecutionPayloadBid + 2, // 1: ethereum.eth.v1alpha1.PayloadAttestation.data:type_name -> ethereum.eth.v1alpha1.PayloadAttestationData + 2, // 2: ethereum.eth.v1alpha1.PayloadAttestationMessage.data:type_name -> ethereum.eth.v1alpha1.PayloadAttestationData + 6, // 3: ethereum.eth.v1alpha1.BeaconBlockGloas.body:type_name -> ethereum.eth.v1alpha1.BeaconBlockBodyGloas + 14, // 4: ethereum.eth.v1alpha1.BeaconBlockBodyGloas.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data + 15, // 5: ethereum.eth.v1alpha1.BeaconBlockBodyGloas.proposer_slashings:type_name -> ethereum.eth.v1alpha1.ProposerSlashing + 16, // 6: ethereum.eth.v1alpha1.BeaconBlockBodyGloas.attester_slashings:type_name -> ethereum.eth.v1alpha1.AttesterSlashingElectra + 17, // 7: ethereum.eth.v1alpha1.BeaconBlockBodyGloas.attestations:type_name -> ethereum.eth.v1alpha1.AttestationElectra + 18, // 8: ethereum.eth.v1alpha1.BeaconBlockBodyGloas.deposits:type_name -> ethereum.eth.v1alpha1.Deposit + 19, // 9: ethereum.eth.v1alpha1.BeaconBlockBodyGloas.voluntary_exits:type_name -> ethereum.eth.v1alpha1.SignedVoluntaryExit + 20, // 10: ethereum.eth.v1alpha1.BeaconBlockBodyGloas.sync_aggregate:type_name -> ethereum.eth.v1alpha1.SyncAggregate + 21, // 11: ethereum.eth.v1alpha1.BeaconBlockBodyGloas.bls_to_execution_changes:type_name -> ethereum.eth.v1alpha1.SignedBLSToExecutionChange + 1, // 12: ethereum.eth.v1alpha1.BeaconBlockBodyGloas.signed_execution_payload_bid:type_name -> ethereum.eth.v1alpha1.SignedExecutionPayloadBid + 3, // 13: ethereum.eth.v1alpha1.BeaconBlockBodyGloas.payload_attestations:type_name -> ethereum.eth.v1alpha1.PayloadAttestation + 5, // 14: ethereum.eth.v1alpha1.SignedBeaconBlockGloas.block:type_name -> ethereum.eth.v1alpha1.BeaconBlockGloas + 22, // 15: ethereum.eth.v1alpha1.BeaconStateGloas.fork:type_name -> ethereum.eth.v1alpha1.Fork + 23, // 16: ethereum.eth.v1alpha1.BeaconStateGloas.latest_block_header:type_name -> ethereum.eth.v1alpha1.BeaconBlockHeader + 14, // 17: ethereum.eth.v1alpha1.BeaconStateGloas.eth1_data:type_name -> ethereum.eth.v1alpha1.Eth1Data + 14, // 18: ethereum.eth.v1alpha1.BeaconStateGloas.eth1_data_votes:type_name -> ethereum.eth.v1alpha1.Eth1Data + 24, // 19: ethereum.eth.v1alpha1.BeaconStateGloas.validators:type_name -> ethereum.eth.v1alpha1.Validator + 25, // 20: ethereum.eth.v1alpha1.BeaconStateGloas.previous_justified_checkpoint:type_name -> ethereum.eth.v1alpha1.Checkpoint + 25, // 21: ethereum.eth.v1alpha1.BeaconStateGloas.current_justified_checkpoint:type_name -> ethereum.eth.v1alpha1.Checkpoint + 25, // 22: ethereum.eth.v1alpha1.BeaconStateGloas.finalized_checkpoint:type_name -> ethereum.eth.v1alpha1.Checkpoint + 26, // 23: ethereum.eth.v1alpha1.BeaconStateGloas.current_sync_committee:type_name -> ethereum.eth.v1alpha1.SyncCommittee + 26, // 24: ethereum.eth.v1alpha1.BeaconStateGloas.next_sync_committee:type_name -> ethereum.eth.v1alpha1.SyncCommittee + 0, // 25: ethereum.eth.v1alpha1.BeaconStateGloas.latest_execution_payload_bid:type_name -> ethereum.eth.v1alpha1.ExecutionPayloadBid + 27, // 26: ethereum.eth.v1alpha1.BeaconStateGloas.historical_summaries:type_name -> ethereum.eth.v1alpha1.HistoricalSummary + 28, // 27: ethereum.eth.v1alpha1.BeaconStateGloas.pending_deposits:type_name -> ethereum.eth.v1alpha1.PendingDeposit + 29, // 28: ethereum.eth.v1alpha1.BeaconStateGloas.pending_partial_withdrawals:type_name -> ethereum.eth.v1alpha1.PendingPartialWithdrawal + 30, // 29: ethereum.eth.v1alpha1.BeaconStateGloas.pending_consolidations:type_name -> ethereum.eth.v1alpha1.PendingConsolidation + 9, // 30: ethereum.eth.v1alpha1.BeaconStateGloas.builder_pending_payments:type_name -> ethereum.eth.v1alpha1.BuilderPendingPayment + 10, // 31: ethereum.eth.v1alpha1.BeaconStateGloas.builder_pending_withdrawals:type_name -> ethereum.eth.v1alpha1.BuilderPendingWithdrawal + 10, // 32: ethereum.eth.v1alpha1.BuilderPendingPayment.withdrawal:type_name -> ethereum.eth.v1alpha1.BuilderPendingWithdrawal + 31, // 33: ethereum.eth.v1alpha1.ExecutionPayloadEnvelope.payload:type_name -> ethereum.engine.v1.ExecutionPayloadDeneb + 32, // 34: ethereum.eth.v1alpha1.ExecutionPayloadEnvelope.execution_requests:type_name -> ethereum.engine.v1.ExecutionRequests + 12, // 35: ethereum.eth.v1alpha1.SignedExecutionPayloadEnvelope.message:type_name -> ethereum.eth.v1alpha1.ExecutionPayloadEnvelope + 36, // [36:36] is the sub-list for method output_type + 36, // [36:36] is the sub-list for method input_type + 36, // [36:36] is the sub-list for extension type_name + 36, // [36:36] is the sub-list for extension extendee + 0, // [0:36] is the sub-list for field type_name +} + +func init() { file_proto_prysm_v1alpha1_gloas_proto_init() } +func file_proto_prysm_v1alpha1_gloas_proto_init() { + if File_proto_prysm_v1alpha1_gloas_proto != nil { + return + } + file_proto_prysm_v1alpha1_attestation_proto_init() + file_proto_prysm_v1alpha1_withdrawals_proto_init() + file_proto_prysm_v1alpha1_beacon_core_types_proto_init() + file_proto_prysm_v1alpha1_eip_7251_proto_init() + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_proto_prysm_v1alpha1_gloas_proto_rawDesc, + NumEnums: 0, + NumMessages: 14, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_proto_prysm_v1alpha1_gloas_proto_goTypes, + DependencyIndexes: file_proto_prysm_v1alpha1_gloas_proto_depIdxs, + MessageInfos: file_proto_prysm_v1alpha1_gloas_proto_msgTypes, + }.Build() + File_proto_prysm_v1alpha1_gloas_proto = out.File + file_proto_prysm_v1alpha1_gloas_proto_rawDesc = nil + file_proto_prysm_v1alpha1_gloas_proto_goTypes = nil + file_proto_prysm_v1alpha1_gloas_proto_depIdxs = nil +} diff --git a/proto/prysm/v1alpha1/gloas.proto b/proto/prysm/v1alpha1/gloas.proto new file mode 100644 index 0000000000..461c017c91 --- /dev/null +++ b/proto/prysm/v1alpha1/gloas.proto @@ -0,0 +1,423 @@ +syntax = "proto3"; + +package ethereum.eth.v1alpha1; + +import "proto/engine/v1/execution_engine.proto"; +import "proto/engine/v1/electra.proto"; +import "proto/eth/ext/options.proto"; +import "proto/prysm/v1alpha1/attestation.proto"; +import "proto/prysm/v1alpha1/withdrawals.proto"; +import "proto/prysm/v1alpha1/beacon_core_types.proto"; +import "proto/prysm/v1alpha1/eip_7251.proto"; + +option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; + +// ============================================================================= +// Gloas Fork Specification +// ============================================================================= +// This file implements the Gloas fork of the Ethereum consensus specification +// Reference: https://github.com/ethereum/consensus-specs/blob/master/specs/gloas/beacon-chain.md + + +// ExecutionPayloadBid represents an execution payload bid in the Gloas fork. +// +// Spec: +// class ExecutionPayloadBid(Container): +// parent_block_hash: Hash32 +// parent_block_root: Root +// block_hash: Hash32 +// fee_recipient: ExecutionAddress +// gas_limit: uint64 +// builder_index: ValidatorIndex +// slot: Slot +// value: Gwei +// blob_kzg_commitments_root: Root +message ExecutionPayloadBid { + bytes parent_block_hash = 1 [ (ethereum.eth.ext.ssz_size) = "32" ]; + bytes parent_block_root = 2 [ (ethereum.eth.ext.ssz_size) = "32" ]; + bytes block_hash = 3 [ (ethereum.eth.ext.ssz_size) = "32" ]; + bytes fee_recipient = 4 [ (ethereum.eth.ext.ssz_size) = "20" ]; + uint64 gas_limit = 5; + uint64 builder_index = 6 [ (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/" + "consensus-types/primitives.ValidatorIndex" ]; + uint64 slot = 7 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + ]; + uint64 value = 8 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + ]; + bytes blob_kzg_commitments_root = 9 [ (ethereum.eth.ext.ssz_size) = "32" ]; +} + +// SignedExecutionPayloadBid wraps an execution payload bid with a signature. +// +// Spec: +// class SignedExecutionPayloadBid(Container): +// message: ExecutionPayloadBid +// signature: BLSSignature +message SignedExecutionPayloadBid { + ExecutionPayloadBid message = 1; + bytes signature = 2 [ (ethereum.eth.ext.ssz_size) = "96" ]; +} + +// PayloadAttestationData contains the core data for Payload Timeliness Committee (PTC) attestations. +// +// Spec: +// class PayloadAttestationData(Container): +// beacon_block_root: Root +// slot: Slot +// payload_present: boolean +// blob_data_available: boolean +message PayloadAttestationData { + bytes beacon_block_root = 1 [ (ethereum.eth.ext.ssz_size) = "32" ]; + uint64 slot = 2 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + ]; + bool payload_present = 3; + bool blob_data_available = 4; +} + +// PayloadAttestation represents an aggregated attestation from the Payload Timeliness Committee (PTC). +// +// Spec: +// class PayloadAttestation(Container): +// aggregation_bits: Bitvector[PTC_SIZE] +// data: PayloadAttestationData +// signature: BLSSignature +message PayloadAttestation { + bytes aggregation_bits = 1 [ + (ethereum.eth.ext.ssz_size) = "ptc.size", + (ethereum.eth.ext.cast_type) = "ptc.type" + ]; + PayloadAttestationData data = 2; + bytes signature = 3 [ (ethereum.eth.ext.ssz_size) = "96" ]; +} + +// PayloadAttestationMessage represents an individual payload attestation message. +// +// Spec: +// class PayloadAttestationMessage(Container): +// validator_index: ValidatorIndex +// data: PayloadAttestationData +// signature: BLSSignature +message PayloadAttestationMessage { + uint64 validator_index = 1 + [ (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/" + "primitives.ValidatorIndex" ]; + PayloadAttestationData data = 2; + bytes signature = 3 [ (ethereum.eth.ext.ssz_size) = "96" ]; +} + +// BeaconBlockGloas represents a beacon block in the Gloas fork. +// The block structure remains the same but contains a modified BeaconBlockBodyGloas. +message BeaconBlockGloas { + uint64 slot = 1 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + ]; + uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/" + "consensus-types/primitives.ValidatorIndex" ]; + bytes parent_root = 3 [ (ethereum.eth.ext.ssz_size) = "32" ]; + bytes state_root = 4 [ (ethereum.eth.ext.ssz_size) = "32" ]; + BeaconBlockBodyGloas body = 5; +} + +// BeaconBlockBodyGloas represents the body of a beacon block in the Gloas fork. +// New fields: signed_execution_payload_bid and payload_attestations +// Removed fields: execution_payload, blob_kzg_commitments, execution_requests +// +// Spec: +// class BeaconBlockBody(Container): +// randao_reveal: BLSSignature +// eth1_data: Eth1Data +// graffiti: Bytes32 +// proposer_slashings: List[ProposerSlashing, MAX_PROPOSER_SLASHINGS] +// attester_slashings: List[AttesterSlashing, MAX_ATTESTER_SLASHINGS_ELECTRA] +// attestations: List[Attestation, MAX_ATTESTATIONS_ELECTRA] +// deposits: List[Deposit, MAX_DEPOSITS] +// voluntary_exits: List[SignedVoluntaryExit, MAX_VOLUNTARY_EXITS] +// sync_aggregate: SyncAggregate +// bls_to_execution_changes: List[SignedBLSToExecutionChange, MAX_BLS_TO_EXECUTION_CHANGES] +// signed_execution_payload_bid: SignedExecutionPayloadBid +// payload_attestations: List[PayloadAttestation, MAX_PAYLOAD_ATTESTATIONS] +message BeaconBlockBodyGloas { + bytes randao_reveal = 1 [ (ethereum.eth.ext.ssz_size) = "96" ]; + Eth1Data eth1_data = 2; + bytes graffiti = 3 [ (ethereum.eth.ext.ssz_size) = "32" ]; + repeated ProposerSlashing proposer_slashings = 4 + [ (ethereum.eth.ext.ssz_max) = "16" ]; + repeated AttesterSlashingElectra attester_slashings = 5 + [ (ethereum.eth.ext.ssz_max) = "1" ]; + repeated AttestationElectra attestations = 6 + [ (ethereum.eth.ext.ssz_max) = "8" ]; + repeated Deposit deposits = 7 [ (ethereum.eth.ext.ssz_max) = "16" ]; + repeated SignedVoluntaryExit voluntary_exits = 8 + [ (ethereum.eth.ext.ssz_max) = "16" ]; + SyncAggregate sync_aggregate = 9; + repeated SignedBLSToExecutionChange bls_to_execution_changes = 10 + [ (ethereum.eth.ext.ssz_max) = "16" ]; + + // New in Gloas + SignedExecutionPayloadBid signed_execution_payload_bid = 11; + repeated PayloadAttestation payload_attestations = 12 + [ (ethereum.eth.ext.ssz_max) = "payload_attestation.size" ]; +} + +// SignedBeaconBlockGloas represents a signed beacon block in the Gloas fork. +// +// Spec: +// Standard SignedBeaconBlock structure with BeaconBlockGloas +message SignedBeaconBlockGloas { + BeaconBlockGloas block = 1; + bytes signature = 2 [ (ethereum.eth.ext.ssz_size) = "96" ]; +} + +// BeaconStateGloas represents the beacon state in the Gloas fork. +// +// Spec: +// class BeaconState(Container): +// [All previous fields from earlier forks] +// # Replaced existing latest execution header position +// latest_execution_payload_bid: ExecutionPayloadBid +// # New fields in Gloas:EIP7732 +// execution_payload_availability: Bitvector[SLOTS_PER_HISTORICAL_ROOT] +// builder_pending_payments: Vector[BuilderPendingPayment, 2 * SLOTS_PER_EPOCH] +// builder_pending_withdrawals: List[BuilderPendingWithdrawal, BUILDER_PENDING_WITHDRAWALS_LIMIT] +// latest_block_hash: Hash32 +// latest_withdrawals_root: Root +message BeaconStateGloas { + // Versioning [1001-2000] + uint64 genesis_time = 1001; + bytes genesis_validators_root = 1002 [ (ethereum.eth.ext.ssz_size) = "32" ]; + uint64 slot = 1003 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + ]; + Fork fork = 1004; + + // History [2001-3000] + BeaconBlockHeader latest_block_header = 2001; + repeated bytes block_roots = 2002 + [ (ethereum.eth.ext.ssz_size) = "block_roots.size" ]; + repeated bytes state_roots = 2003 + [ (ethereum.eth.ext.ssz_size) = "state_roots.size" ]; + repeated bytes historical_roots = 2004 [ + (ethereum.eth.ext.ssz_size) = "?,32", + (ethereum.eth.ext.ssz_max) = "16777216" + ]; + + // Eth1 [3001-4000] + Eth1Data eth1_data = 3001; + repeated Eth1Data eth1_data_votes = 3002 + [ (ethereum.eth.ext.ssz_max) = "eth1_data_votes.size" ]; + uint64 eth1_deposit_index = 3003; + + // Registry [4001-5000] + repeated Validator validators = 4001 + [ (ethereum.eth.ext.ssz_max) = "1099511627776" ]; + repeated uint64 balances = 4002 + [ (ethereum.eth.ext.ssz_max) = "1099511627776" ]; + + // Randomness [5001-6000] + repeated bytes randao_mixes = 5001 + [ (ethereum.eth.ext.ssz_size) = "randao_mixes.size" ]; + + // Slashings [6001-7000] + repeated uint64 slashings = 6001 + [ (ethereum.eth.ext.ssz_size) = "slashings.size" ]; + + // Participation [7001-8000] + bytes previous_epoch_participation = 7001 + [ (ethereum.eth.ext.ssz_max) = "1099511627776" ]; + bytes current_epoch_participation = 7002 + [ (ethereum.eth.ext.ssz_max) = "1099511627776" ]; + + // Finality [8001-9000] + bytes justification_bits = 8001 [ + (ethereum.eth.ext.ssz_size) = "1", + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/go-bitfield.Bitvector4" + ]; + Checkpoint previous_justified_checkpoint = 8002; + Checkpoint current_justified_checkpoint = 8003; + Checkpoint finalized_checkpoint = 8004; + + // Fields introduced in Altair fork [9001-10000] + repeated uint64 inactivity_scores = 9001 + [ (ethereum.eth.ext.ssz_max) = "1099511627776" ]; + SyncCommittee current_sync_committee = 9002; + SyncCommittee next_sync_committee = 9003; + + // Note: latest_execution_payload_header replaced with ExecutionPayloadBid in Gloas + ExecutionPayloadBid latest_execution_payload_bid = 10001; + + // Fields introduced in Capella fork [11001-12000] + uint64 next_withdrawal_index = 11001; + uint64 next_withdrawal_validator_index = 11002 + [ (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/" + "primitives.ValidatorIndex" ]; + repeated HistoricalSummary historical_summaries = 11003 + [ (ethereum.eth.ext.ssz_max) = "16777216" ]; + + // Fields introduced in Electra fork [12001-13000] + uint64 deposit_requests_start_index = 12001; + uint64 deposit_balance_to_consume = 12002 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + ]; + uint64 exit_balance_to_consume = 12003 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + ]; + uint64 earliest_exit_epoch = 12004 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + ]; + uint64 consolidation_balance_to_consume = 12005 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + ]; + uint64 earliest_consolidation_epoch = 12006 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + ]; + repeated PendingDeposit pending_deposits = 12007 + [ (ethereum.eth.ext.ssz_max) = "pending_deposits_limit" ]; + repeated PendingPartialWithdrawal pending_partial_withdrawals = 12008 + [ (ethereum.eth.ext.ssz_max) = "pending_partial_withdrawals_limit" ]; + repeated PendingConsolidation pending_consolidations = 12009 + [ (ethereum.eth.ext.ssz_max) = "pending_consolidations_limit" ]; + + // Fields introduced in Fulu fork [13001-14000] + repeated uint64 proposer_lookahead = 13001 + [ (ethereum.eth.ext.ssz_size) = "proposer_lookahead_size" ]; + + // Fields introduced in Gloas fork [14001-15000] + bytes execution_payload_availability = 14001 [ + (ethereum.eth.ext.ssz_size) = "execution_payload_availability.size" + ]; + repeated BuilderPendingPayment builder_pending_payments = 14002 [(ethereum.eth.ext.ssz_size) = "builder_pending_payments.size"]; + repeated BuilderPendingWithdrawal builder_pending_withdrawals = 14003 [(ethereum.eth.ext.ssz_max) = "1048576"]; + bytes latest_block_hash = 14004 [ (ethereum.eth.ext.ssz_size) = "32" ]; + bytes latest_withdrawals_root = 14005 [ (ethereum.eth.ext.ssz_size) = "32" ]; +} + +// BuilderPendingPayment represents a pending payment to a builder. +// +// Spec: +// class BuilderPendingPayment(Container): +// weight: Gwei +// withdrawal: BuilderPendingWithdrawal +message BuilderPendingPayment { + uint64 weight = 1 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + ]; + BuilderPendingWithdrawal withdrawal = 2; +} + +// BuilderPendingWithdrawal represents a pending withdrawal for a builder. +// +// Spec: +// class BuilderPendingWithdrawal(Container): +// fee_recipient: ExecutionAddress +// amount: Gwei +// builder_index: ValidatorIndex +// withdrawable_epoch: Epoch +message BuilderPendingWithdrawal { + bytes fee_recipient = 1 [ (ethereum.eth.ext.ssz_size) = "20" ]; + uint64 amount = 2 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + ]; + uint64 builder_index = 3 + [ (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/" + "primitives.ValidatorIndex" ]; + uint64 withdrawable_epoch = 4 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + ]; +} + +// DataColumnSidecarGloas represents a data column sidecar in the Gloas fork. +// Note: signed_block_header and kzg_commitments_inclusion_proof fields have been removed in Gloas. +// +// Spec: +// class DataColumnSidecar(Container): +// index: ColumnIndex +// column: List[Cell, MAX_BLOB_COMMITMENTS_PER_BLOCK] +// kzg_commitents: List[KZGCommitment, MAX_BLOB_COMMITMENTS_PER_BLOCK] +// kzg_proofs: List[KZGProof, MAX_BLOB_COMMITMENTS_PER_BLOCK] +// slot: Slot +// beacon_block_root: Root +message DataColumnSidecarGloas { + uint64 index = 1; + repeated bytes column = 2 [ + (ethereum.eth.ext.ssz_size) = "?,bytes_per_cell.size", + (ethereum.eth.ext.ssz_max) = "max_blob_commitments.size" + ]; + repeated bytes kzg_commitments = 3 [ + (ethereum.eth.ext.ssz_size) = "?,48", + (ethereum.eth.ext.ssz_max) = "max_blob_commitments.size" + ]; + repeated bytes kzg_proofs = 4 [ + (ethereum.eth.ext.ssz_size) = "?,48", + (ethereum.eth.ext.ssz_max) = "max_blob_commitments.size" + ]; + uint64 slot = 5 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + ]; + bytes beacon_block_root = 6 [(ethereum.eth.ext.ssz_size) = "32"]; +} + +// ExecutionPayloadEnvelope wraps an execution payload with builder index. +// This is used in the Gloas fork to associate execution payloads with their builders +// and provide additional context needed for payload processing. +// +// Spec: +// class ExecutionPayloadEnvelope(Container): +// payload: ExecutionPayload +// execution_requests: ExecutionRequests +// builder_index: ValidatorIndex +// beacon_block_root: Root +// slot: Slot +// blob_kzg_commitments: List[KZGCommitment, MAX_BLOB_COMMITMENTS_PER_BLOCK] +// state_root: Root +message ExecutionPayloadEnvelope { + ethereum.engine.v1.ExecutionPayloadDeneb payload = 1; + ethereum.engine.v1.ExecutionRequests execution_requests = 2; + uint64 builder_index = 3 [ (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/" + "consensus-types/primitives.ValidatorIndex" ]; + bytes beacon_block_root = 4 [ (ethereum.eth.ext.ssz_size) = "32" ]; + uint64 slot = 5 [ + (ethereum.eth.ext.cast_type) = + "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + ]; + repeated bytes blob_kzg_commitments = 6 [ + (ethereum.eth.ext.ssz_size) = "?,48", + (ethereum.eth.ext.ssz_max) = "max_blob_commitments.size" + ]; + bytes state_root = 7 [ (ethereum.eth.ext.ssz_size) = "32" ]; +} + +// SignedExecutionPayloadEnvelope wraps an execution payload envelope with a signature. +// The signature is provided by the builder who created the execution payload. +// +// Spec: +// class SignedExecutionPayloadEnvelope(Container): +// message: ExecutionPayloadEnvelope +// signature: BLSSignature +message SignedExecutionPayloadEnvelope { + ExecutionPayloadEnvelope message = 1; + bytes signature = 2 [ (ethereum.eth.ext.ssz_size) = "96" ]; +} diff --git a/proto/prysm/v1alpha1/gloas.ssz.go b/proto/prysm/v1alpha1/gloas.ssz.go new file mode 100644 index 0000000000..7a1618413d --- /dev/null +++ b/proto/prysm/v1alpha1/gloas.ssz.go @@ -0,0 +1,3474 @@ +// Code generated by fastssz. DO NOT EDIT. +package eth + +import ( + github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + ssz "github.com/prysmaticlabs/fastssz" +) + +// MarshalSSZ ssz marshals the ExecutionPayloadBid object +func (e *ExecutionPayloadBid) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(e) +} + +// MarshalSSZTo ssz marshals the ExecutionPayloadBid object to a target array +func (e *ExecutionPayloadBid) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + + // Field (0) 'ParentBlockHash' + if size := len(e.ParentBlockHash); size != 32 { + err = ssz.ErrBytesLengthFn("--.ParentBlockHash", size, 32) + return + } + dst = append(dst, e.ParentBlockHash...) + + // Field (1) 'ParentBlockRoot' + if size := len(e.ParentBlockRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.ParentBlockRoot", size, 32) + return + } + dst = append(dst, e.ParentBlockRoot...) + + // Field (2) 'BlockHash' + if size := len(e.BlockHash); size != 32 { + err = ssz.ErrBytesLengthFn("--.BlockHash", size, 32) + return + } + dst = append(dst, e.BlockHash...) + + // Field (3) 'FeeRecipient' + if size := len(e.FeeRecipient); size != 20 { + err = ssz.ErrBytesLengthFn("--.FeeRecipient", size, 20) + return + } + dst = append(dst, e.FeeRecipient...) + + // Field (4) 'GasLimit' + dst = ssz.MarshalUint64(dst, e.GasLimit) + + // Field (5) 'BuilderIndex' + dst = ssz.MarshalUint64(dst, uint64(e.BuilderIndex)) + + // Field (6) 'Slot' + dst = ssz.MarshalUint64(dst, uint64(e.Slot)) + + // Field (7) 'Value' + dst = ssz.MarshalUint64(dst, uint64(e.Value)) + + // Field (8) 'BlobKzgCommitmentsRoot' + if size := len(e.BlobKzgCommitmentsRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.BlobKzgCommitmentsRoot", size, 32) + return + } + dst = append(dst, e.BlobKzgCommitmentsRoot...) + + return +} + +// UnmarshalSSZ ssz unmarshals the ExecutionPayloadBid object +func (e *ExecutionPayloadBid) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size != 180 { + return ssz.ErrSize + } + + // Field (0) 'ParentBlockHash' + if cap(e.ParentBlockHash) == 0 { + e.ParentBlockHash = make([]byte, 0, len(buf[0:32])) + } + e.ParentBlockHash = append(e.ParentBlockHash, buf[0:32]...) + + // Field (1) 'ParentBlockRoot' + if cap(e.ParentBlockRoot) == 0 { + e.ParentBlockRoot = make([]byte, 0, len(buf[32:64])) + } + e.ParentBlockRoot = append(e.ParentBlockRoot, buf[32:64]...) + + // Field (2) 'BlockHash' + if cap(e.BlockHash) == 0 { + e.BlockHash = make([]byte, 0, len(buf[64:96])) + } + e.BlockHash = append(e.BlockHash, buf[64:96]...) + + // Field (3) 'FeeRecipient' + if cap(e.FeeRecipient) == 0 { + e.FeeRecipient = make([]byte, 0, len(buf[96:116])) + } + e.FeeRecipient = append(e.FeeRecipient, buf[96:116]...) + + // Field (4) 'GasLimit' + e.GasLimit = ssz.UnmarshallUint64(buf[116:124]) + + // Field (5) 'BuilderIndex' + e.BuilderIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[124:132])) + + // Field (6) 'Slot' + e.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[132:140])) + + // Field (7) 'Value' + e.Value = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[140:148])) + + // Field (8) 'BlobKzgCommitmentsRoot' + if cap(e.BlobKzgCommitmentsRoot) == 0 { + e.BlobKzgCommitmentsRoot = make([]byte, 0, len(buf[148:180])) + } + e.BlobKzgCommitmentsRoot = append(e.BlobKzgCommitmentsRoot, buf[148:180]...) + + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the ExecutionPayloadBid object +func (e *ExecutionPayloadBid) SizeSSZ() (size int) { + size = 180 + return +} + +// HashTreeRoot ssz hashes the ExecutionPayloadBid object +func (e *ExecutionPayloadBid) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(e) +} + +// HashTreeRootWith ssz hashes the ExecutionPayloadBid object with a hasher +func (e *ExecutionPayloadBid) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'ParentBlockHash' + if size := len(e.ParentBlockHash); size != 32 { + err = ssz.ErrBytesLengthFn("--.ParentBlockHash", size, 32) + return + } + hh.PutBytes(e.ParentBlockHash) + + // Field (1) 'ParentBlockRoot' + if size := len(e.ParentBlockRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.ParentBlockRoot", size, 32) + return + } + hh.PutBytes(e.ParentBlockRoot) + + // Field (2) 'BlockHash' + if size := len(e.BlockHash); size != 32 { + err = ssz.ErrBytesLengthFn("--.BlockHash", size, 32) + return + } + hh.PutBytes(e.BlockHash) + + // Field (3) 'FeeRecipient' + if size := len(e.FeeRecipient); size != 20 { + err = ssz.ErrBytesLengthFn("--.FeeRecipient", size, 20) + return + } + hh.PutBytes(e.FeeRecipient) + + // Field (4) 'GasLimit' + hh.PutUint64(e.GasLimit) + + // Field (5) 'BuilderIndex' + hh.PutUint64(uint64(e.BuilderIndex)) + + // Field (6) 'Slot' + hh.PutUint64(uint64(e.Slot)) + + // Field (7) 'Value' + hh.PutUint64(uint64(e.Value)) + + // Field (8) 'BlobKzgCommitmentsRoot' + if size := len(e.BlobKzgCommitmentsRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.BlobKzgCommitmentsRoot", size, 32) + return + } + hh.PutBytes(e.BlobKzgCommitmentsRoot) + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the SignedExecutionPayloadBid object +func (s *SignedExecutionPayloadBid) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(s) +} + +// MarshalSSZTo ssz marshals the SignedExecutionPayloadBid object to a target array +func (s *SignedExecutionPayloadBid) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + + // Field (0) 'Message' + if s.Message == nil { + s.Message = new(ExecutionPayloadBid) + } + if dst, err = s.Message.MarshalSSZTo(dst); err != nil { + return + } + + // Field (1) 'Signature' + if size := len(s.Signature); size != 96 { + err = ssz.ErrBytesLengthFn("--.Signature", size, 96) + return + } + dst = append(dst, s.Signature...) + + return +} + +// UnmarshalSSZ ssz unmarshals the SignedExecutionPayloadBid object +func (s *SignedExecutionPayloadBid) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size != 276 { + return ssz.ErrSize + } + + // Field (0) 'Message' + if s.Message == nil { + s.Message = new(ExecutionPayloadBid) + } + if err = s.Message.UnmarshalSSZ(buf[0:180]); err != nil { + return err + } + + // Field (1) 'Signature' + if cap(s.Signature) == 0 { + s.Signature = make([]byte, 0, len(buf[180:276])) + } + s.Signature = append(s.Signature, buf[180:276]...) + + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the SignedExecutionPayloadBid object +func (s *SignedExecutionPayloadBid) SizeSSZ() (size int) { + size = 276 + return +} + +// HashTreeRoot ssz hashes the SignedExecutionPayloadBid object +func (s *SignedExecutionPayloadBid) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(s) +} + +// HashTreeRootWith ssz hashes the SignedExecutionPayloadBid object with a hasher +func (s *SignedExecutionPayloadBid) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'Message' + if err = s.Message.HashTreeRootWith(hh); err != nil { + return + } + + // Field (1) 'Signature' + if size := len(s.Signature); size != 96 { + err = ssz.ErrBytesLengthFn("--.Signature", size, 96) + return + } + hh.PutBytes(s.Signature) + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the PayloadAttestationData object +func (p *PayloadAttestationData) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(p) +} + +// MarshalSSZTo ssz marshals the PayloadAttestationData object to a target array +func (p *PayloadAttestationData) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + + // Field (0) 'BeaconBlockRoot' + if size := len(p.BeaconBlockRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.BeaconBlockRoot", size, 32) + return + } + dst = append(dst, p.BeaconBlockRoot...) + + // Field (1) 'Slot' + dst = ssz.MarshalUint64(dst, uint64(p.Slot)) + + // Field (2) 'PayloadPresent' + dst = ssz.MarshalBool(dst, p.PayloadPresent) + + // Field (3) 'BlobDataAvailable' + dst = ssz.MarshalBool(dst, p.BlobDataAvailable) + + return +} + +// UnmarshalSSZ ssz unmarshals the PayloadAttestationData object +func (p *PayloadAttestationData) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size != 42 { + return ssz.ErrSize + } + + // Field (0) 'BeaconBlockRoot' + if cap(p.BeaconBlockRoot) == 0 { + p.BeaconBlockRoot = make([]byte, 0, len(buf[0:32])) + } + p.BeaconBlockRoot = append(p.BeaconBlockRoot, buf[0:32]...) + + // Field (1) 'Slot' + p.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[32:40])) + + // Field (2) 'PayloadPresent' + p.PayloadPresent, err = ssz.DecodeBool(buf[40:41]) + if err != nil { + return err + } + + // Field (3) 'BlobDataAvailable' + p.BlobDataAvailable, err = ssz.DecodeBool(buf[41:42]) + if err != nil { + return err + } + + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the PayloadAttestationData object +func (p *PayloadAttestationData) SizeSSZ() (size int) { + size = 42 + return +} + +// HashTreeRoot ssz hashes the PayloadAttestationData object +func (p *PayloadAttestationData) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(p) +} + +// HashTreeRootWith ssz hashes the PayloadAttestationData object with a hasher +func (p *PayloadAttestationData) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'BeaconBlockRoot' + if size := len(p.BeaconBlockRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.BeaconBlockRoot", size, 32) + return + } + hh.PutBytes(p.BeaconBlockRoot) + + // Field (1) 'Slot' + hh.PutUint64(uint64(p.Slot)) + + // Field (2) 'PayloadPresent' + hh.PutBool(p.PayloadPresent) + + // Field (3) 'BlobDataAvailable' + hh.PutBool(p.BlobDataAvailable) + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the PayloadAttestation object +func (p *PayloadAttestation) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(p) +} + +// MarshalSSZTo ssz marshals the PayloadAttestation object to a target array +func (p *PayloadAttestation) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + + // Field (0) 'AggregationBits' + if size := len(p.AggregationBits); size != 64 { + err = ssz.ErrBytesLengthFn("--.AggregationBits", size, 64) + return + } + dst = append(dst, p.AggregationBits...) + + // Field (1) 'Data' + if p.Data == nil { + p.Data = new(PayloadAttestationData) + } + if dst, err = p.Data.MarshalSSZTo(dst); err != nil { + return + } + + // Field (2) 'Signature' + if size := len(p.Signature); size != 96 { + err = ssz.ErrBytesLengthFn("--.Signature", size, 96) + return + } + dst = append(dst, p.Signature...) + + return +} + +// UnmarshalSSZ ssz unmarshals the PayloadAttestation object +func (p *PayloadAttestation) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size != 202 { + return ssz.ErrSize + } + + // Field (0) 'AggregationBits' + if cap(p.AggregationBits) == 0 { + p.AggregationBits = make([]byte, 0, len(buf[0:64])) + } + p.AggregationBits = append(p.AggregationBits, buf[0:64]...) + + // Field (1) 'Data' + if p.Data == nil { + p.Data = new(PayloadAttestationData) + } + if err = p.Data.UnmarshalSSZ(buf[64:106]); err != nil { + return err + } + + // Field (2) 'Signature' + if cap(p.Signature) == 0 { + p.Signature = make([]byte, 0, len(buf[106:202])) + } + p.Signature = append(p.Signature, buf[106:202]...) + + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the PayloadAttestation object +func (p *PayloadAttestation) SizeSSZ() (size int) { + size = 202 + return +} + +// HashTreeRoot ssz hashes the PayloadAttestation object +func (p *PayloadAttestation) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(p) +} + +// HashTreeRootWith ssz hashes the PayloadAttestation object with a hasher +func (p *PayloadAttestation) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'AggregationBits' + if size := len(p.AggregationBits); size != 64 { + err = ssz.ErrBytesLengthFn("--.AggregationBits", size, 64) + return + } + hh.PutBytes(p.AggregationBits) + + // Field (1) 'Data' + if err = p.Data.HashTreeRootWith(hh); err != nil { + return + } + + // Field (2) 'Signature' + if size := len(p.Signature); size != 96 { + err = ssz.ErrBytesLengthFn("--.Signature", size, 96) + return + } + hh.PutBytes(p.Signature) + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the PayloadAttestationMessage object +func (p *PayloadAttestationMessage) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(p) +} + +// MarshalSSZTo ssz marshals the PayloadAttestationMessage object to a target array +func (p *PayloadAttestationMessage) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + + // Field (0) 'ValidatorIndex' + dst = ssz.MarshalUint64(dst, uint64(p.ValidatorIndex)) + + // Field (1) 'Data' + if p.Data == nil { + p.Data = new(PayloadAttestationData) + } + if dst, err = p.Data.MarshalSSZTo(dst); err != nil { + return + } + + // Field (2) 'Signature' + if size := len(p.Signature); size != 96 { + err = ssz.ErrBytesLengthFn("--.Signature", size, 96) + return + } + dst = append(dst, p.Signature...) + + return +} + +// UnmarshalSSZ ssz unmarshals the PayloadAttestationMessage object +func (p *PayloadAttestationMessage) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size != 146 { + return ssz.ErrSize + } + + // Field (0) 'ValidatorIndex' + p.ValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) + + // Field (1) 'Data' + if p.Data == nil { + p.Data = new(PayloadAttestationData) + } + if err = p.Data.UnmarshalSSZ(buf[8:50]); err != nil { + return err + } + + // Field (2) 'Signature' + if cap(p.Signature) == 0 { + p.Signature = make([]byte, 0, len(buf[50:146])) + } + p.Signature = append(p.Signature, buf[50:146]...) + + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the PayloadAttestationMessage object +func (p *PayloadAttestationMessage) SizeSSZ() (size int) { + size = 146 + return +} + +// HashTreeRoot ssz hashes the PayloadAttestationMessage object +func (p *PayloadAttestationMessage) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(p) +} + +// HashTreeRootWith ssz hashes the PayloadAttestationMessage object with a hasher +func (p *PayloadAttestationMessage) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'ValidatorIndex' + hh.PutUint64(uint64(p.ValidatorIndex)) + + // Field (1) 'Data' + if err = p.Data.HashTreeRootWith(hh); err != nil { + return + } + + // Field (2) 'Signature' + if size := len(p.Signature); size != 96 { + err = ssz.ErrBytesLengthFn("--.Signature", size, 96) + return + } + hh.PutBytes(p.Signature) + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the BeaconBlockGloas object +func (b *BeaconBlockGloas) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(b) +} + +// MarshalSSZTo ssz marshals the BeaconBlockGloas object to a target array +func (b *BeaconBlockGloas) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + offset := int(84) + + // Field (0) 'Slot' + dst = ssz.MarshalUint64(dst, uint64(b.Slot)) + + // Field (1) 'ProposerIndex' + dst = ssz.MarshalUint64(dst, uint64(b.ProposerIndex)) + + // Field (2) 'ParentRoot' + if size := len(b.ParentRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.ParentRoot", size, 32) + return + } + dst = append(dst, b.ParentRoot...) + + // Field (3) 'StateRoot' + if size := len(b.StateRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.StateRoot", size, 32) + return + } + dst = append(dst, b.StateRoot...) + + // Offset (4) 'Body' + dst = ssz.WriteOffset(dst, offset) + if b.Body == nil { + b.Body = new(BeaconBlockBodyGloas) + } + offset += b.Body.SizeSSZ() + + // Field (4) 'Body' + if dst, err = b.Body.MarshalSSZTo(dst); err != nil { + return + } + + return +} + +// UnmarshalSSZ ssz unmarshals the BeaconBlockGloas object +func (b *BeaconBlockGloas) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size < 84 { + return ssz.ErrSize + } + + tail := buf + var o4 uint64 + + // Field (0) 'Slot' + b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + + // Field (1) 'ProposerIndex' + b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + + // Field (2) 'ParentRoot' + if cap(b.ParentRoot) == 0 { + b.ParentRoot = make([]byte, 0, len(buf[16:48])) + } + b.ParentRoot = append(b.ParentRoot, buf[16:48]...) + + // Field (3) 'StateRoot' + if cap(b.StateRoot) == 0 { + b.StateRoot = make([]byte, 0, len(buf[48:80])) + } + b.StateRoot = append(b.StateRoot, buf[48:80]...) + + // Offset (4) 'Body' + if o4 = ssz.ReadOffset(buf[80:84]); o4 > size { + return ssz.ErrOffset + } + + if o4 != 84 { + return ssz.ErrInvalidVariableOffset + } + + // Field (4) 'Body' + { + buf = tail[o4:] + if b.Body == nil { + b.Body = new(BeaconBlockBodyGloas) + } + if err = b.Body.UnmarshalSSZ(buf); err != nil { + return err + } + } + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the BeaconBlockGloas object +func (b *BeaconBlockGloas) SizeSSZ() (size int) { + size = 84 + + // Field (4) 'Body' + if b.Body == nil { + b.Body = new(BeaconBlockBodyGloas) + } + size += b.Body.SizeSSZ() + + return +} + +// HashTreeRoot ssz hashes the BeaconBlockGloas object +func (b *BeaconBlockGloas) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(b) +} + +// HashTreeRootWith ssz hashes the BeaconBlockGloas object with a hasher +func (b *BeaconBlockGloas) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'Slot' + hh.PutUint64(uint64(b.Slot)) + + // Field (1) 'ProposerIndex' + hh.PutUint64(uint64(b.ProposerIndex)) + + // Field (2) 'ParentRoot' + if size := len(b.ParentRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.ParentRoot", size, 32) + return + } + hh.PutBytes(b.ParentRoot) + + // Field (3) 'StateRoot' + if size := len(b.StateRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.StateRoot", size, 32) + return + } + hh.PutBytes(b.StateRoot) + + // Field (4) 'Body' + if err = b.Body.HashTreeRootWith(hh); err != nil { + return + } + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the BeaconBlockBodyGloas object +func (b *BeaconBlockBodyGloas) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(b) +} + +// MarshalSSZTo ssz marshals the BeaconBlockBodyGloas object to a target array +func (b *BeaconBlockBodyGloas) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + offset := int(664) + + // Field (0) 'RandaoReveal' + if size := len(b.RandaoReveal); size != 96 { + err = ssz.ErrBytesLengthFn("--.RandaoReveal", size, 96) + return + } + dst = append(dst, b.RandaoReveal...) + + // Field (1) 'Eth1Data' + if b.Eth1Data == nil { + b.Eth1Data = new(Eth1Data) + } + if dst, err = b.Eth1Data.MarshalSSZTo(dst); err != nil { + return + } + + // Field (2) 'Graffiti' + if size := len(b.Graffiti); size != 32 { + err = ssz.ErrBytesLengthFn("--.Graffiti", size, 32) + return + } + dst = append(dst, b.Graffiti...) + + // Offset (3) 'ProposerSlashings' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.ProposerSlashings) * 416 + + // Offset (4) 'AttesterSlashings' + dst = ssz.WriteOffset(dst, offset) + for ii := 0; ii < len(b.AttesterSlashings); ii++ { + offset += 4 + offset += b.AttesterSlashings[ii].SizeSSZ() + } + + // Offset (5) 'Attestations' + dst = ssz.WriteOffset(dst, offset) + for ii := 0; ii < len(b.Attestations); ii++ { + offset += 4 + offset += b.Attestations[ii].SizeSSZ() + } + + // Offset (6) 'Deposits' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.Deposits) * 1240 + + // Offset (7) 'VoluntaryExits' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.VoluntaryExits) * 112 + + // Field (8) 'SyncAggregate' + if b.SyncAggregate == nil { + b.SyncAggregate = new(SyncAggregate) + } + if dst, err = b.SyncAggregate.MarshalSSZTo(dst); err != nil { + return + } + + // Offset (9) 'BlsToExecutionChanges' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.BlsToExecutionChanges) * 172 + + // Field (10) 'SignedExecutionPayloadBid' + if b.SignedExecutionPayloadBid == nil { + b.SignedExecutionPayloadBid = new(SignedExecutionPayloadBid) + } + if dst, err = b.SignedExecutionPayloadBid.MarshalSSZTo(dst); err != nil { + return + } + + // Offset (11) 'PayloadAttestations' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.PayloadAttestations) * 202 + + // Field (3) 'ProposerSlashings' + if size := len(b.ProposerSlashings); size > 16 { + err = ssz.ErrListTooBigFn("--.ProposerSlashings", size, 16) + return + } + for ii := 0; ii < len(b.ProposerSlashings); ii++ { + if dst, err = b.ProposerSlashings[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (4) 'AttesterSlashings' + if size := len(b.AttesterSlashings); size > 1 { + err = ssz.ErrListTooBigFn("--.AttesterSlashings", size, 1) + return + } + { + offset = 4 * len(b.AttesterSlashings) + for ii := 0; ii < len(b.AttesterSlashings); ii++ { + dst = ssz.WriteOffset(dst, offset) + offset += b.AttesterSlashings[ii].SizeSSZ() + } + } + for ii := 0; ii < len(b.AttesterSlashings); ii++ { + if dst, err = b.AttesterSlashings[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (5) 'Attestations' + if size := len(b.Attestations); size > 8 { + err = ssz.ErrListTooBigFn("--.Attestations", size, 8) + return + } + { + offset = 4 * len(b.Attestations) + for ii := 0; ii < len(b.Attestations); ii++ { + dst = ssz.WriteOffset(dst, offset) + offset += b.Attestations[ii].SizeSSZ() + } + } + for ii := 0; ii < len(b.Attestations); ii++ { + if dst, err = b.Attestations[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (6) 'Deposits' + if size := len(b.Deposits); size > 16 { + err = ssz.ErrListTooBigFn("--.Deposits", size, 16) + return + } + for ii := 0; ii < len(b.Deposits); ii++ { + if dst, err = b.Deposits[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (7) 'VoluntaryExits' + if size := len(b.VoluntaryExits); size > 16 { + err = ssz.ErrListTooBigFn("--.VoluntaryExits", size, 16) + return + } + for ii := 0; ii < len(b.VoluntaryExits); ii++ { + if dst, err = b.VoluntaryExits[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (9) 'BlsToExecutionChanges' + if size := len(b.BlsToExecutionChanges); size > 16 { + err = ssz.ErrListTooBigFn("--.BlsToExecutionChanges", size, 16) + return + } + for ii := 0; ii < len(b.BlsToExecutionChanges); ii++ { + if dst, err = b.BlsToExecutionChanges[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (11) 'PayloadAttestations' + if size := len(b.PayloadAttestations); size > 4 { + err = ssz.ErrListTooBigFn("--.PayloadAttestations", size, 4) + return + } + for ii := 0; ii < len(b.PayloadAttestations); ii++ { + if dst, err = b.PayloadAttestations[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + return +} + +// UnmarshalSSZ ssz unmarshals the BeaconBlockBodyGloas object +func (b *BeaconBlockBodyGloas) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size < 664 { + return ssz.ErrSize + } + + tail := buf + var o3, o4, o5, o6, o7, o9, o11 uint64 + + // Field (0) 'RandaoReveal' + if cap(b.RandaoReveal) == 0 { + b.RandaoReveal = make([]byte, 0, len(buf[0:96])) + } + b.RandaoReveal = append(b.RandaoReveal, buf[0:96]...) + + // Field (1) 'Eth1Data' + if b.Eth1Data == nil { + b.Eth1Data = new(Eth1Data) + } + if err = b.Eth1Data.UnmarshalSSZ(buf[96:168]); err != nil { + return err + } + + // Field (2) 'Graffiti' + if cap(b.Graffiti) == 0 { + b.Graffiti = make([]byte, 0, len(buf[168:200])) + } + b.Graffiti = append(b.Graffiti, buf[168:200]...) + + // Offset (3) 'ProposerSlashings' + if o3 = ssz.ReadOffset(buf[200:204]); o3 > size { + return ssz.ErrOffset + } + + if o3 != 664 { + return ssz.ErrInvalidVariableOffset + } + + // Offset (4) 'AttesterSlashings' + if o4 = ssz.ReadOffset(buf[204:208]); o4 > size || o3 > o4 { + return ssz.ErrOffset + } + + // Offset (5) 'Attestations' + if o5 = ssz.ReadOffset(buf[208:212]); o5 > size || o4 > o5 { + return ssz.ErrOffset + } + + // Offset (6) 'Deposits' + if o6 = ssz.ReadOffset(buf[212:216]); o6 > size || o5 > o6 { + return ssz.ErrOffset + } + + // Offset (7) 'VoluntaryExits' + if o7 = ssz.ReadOffset(buf[216:220]); o7 > size || o6 > o7 { + return ssz.ErrOffset + } + + // Field (8) 'SyncAggregate' + if b.SyncAggregate == nil { + b.SyncAggregate = new(SyncAggregate) + } + if err = b.SyncAggregate.UnmarshalSSZ(buf[220:380]); err != nil { + return err + } + + // Offset (9) 'BlsToExecutionChanges' + if o9 = ssz.ReadOffset(buf[380:384]); o9 > size || o7 > o9 { + return ssz.ErrOffset + } + + // Field (10) 'SignedExecutionPayloadBid' + if b.SignedExecutionPayloadBid == nil { + b.SignedExecutionPayloadBid = new(SignedExecutionPayloadBid) + } + if err = b.SignedExecutionPayloadBid.UnmarshalSSZ(buf[384:660]); err != nil { + return err + } + + // Offset (11) 'PayloadAttestations' + if o11 = ssz.ReadOffset(buf[660:664]); o11 > size || o9 > o11 { + return ssz.ErrOffset + } + + // Field (3) 'ProposerSlashings' + { + buf = tail[o3:o4] + num, err := ssz.DivideInt2(len(buf), 416, 16) + if err != nil { + return err + } + b.ProposerSlashings = make([]*ProposerSlashing, num) + for ii := 0; ii < num; ii++ { + if b.ProposerSlashings[ii] == nil { + b.ProposerSlashings[ii] = new(ProposerSlashing) + } + if err = b.ProposerSlashings[ii].UnmarshalSSZ(buf[ii*416 : (ii+1)*416]); err != nil { + return err + } + } + } + + // Field (4) 'AttesterSlashings' + { + buf = tail[o4:o5] + num, err := ssz.DecodeDynamicLength(buf, 1) + if err != nil { + return err + } + b.AttesterSlashings = make([]*AttesterSlashingElectra, num) + err = ssz.UnmarshalDynamic(buf, num, func(indx int, buf []byte) (err error) { + if b.AttesterSlashings[indx] == nil { + b.AttesterSlashings[indx] = new(AttesterSlashingElectra) + } + if err = b.AttesterSlashings[indx].UnmarshalSSZ(buf); err != nil { + return err + } + return nil + }) + if err != nil { + return err + } + } + + // Field (5) 'Attestations' + { + buf = tail[o5:o6] + num, err := ssz.DecodeDynamicLength(buf, 8) + if err != nil { + return err + } + b.Attestations = make([]*AttestationElectra, num) + err = ssz.UnmarshalDynamic(buf, num, func(indx int, buf []byte) (err error) { + if b.Attestations[indx] == nil { + b.Attestations[indx] = new(AttestationElectra) + } + if err = b.Attestations[indx].UnmarshalSSZ(buf); err != nil { + return err + } + return nil + }) + if err != nil { + return err + } + } + + // Field (6) 'Deposits' + { + buf = tail[o6:o7] + num, err := ssz.DivideInt2(len(buf), 1240, 16) + if err != nil { + return err + } + b.Deposits = make([]*Deposit, num) + for ii := 0; ii < num; ii++ { + if b.Deposits[ii] == nil { + b.Deposits[ii] = new(Deposit) + } + if err = b.Deposits[ii].UnmarshalSSZ(buf[ii*1240 : (ii+1)*1240]); err != nil { + return err + } + } + } + + // Field (7) 'VoluntaryExits' + { + buf = tail[o7:o9] + num, err := ssz.DivideInt2(len(buf), 112, 16) + if err != nil { + return err + } + b.VoluntaryExits = make([]*SignedVoluntaryExit, num) + for ii := 0; ii < num; ii++ { + if b.VoluntaryExits[ii] == nil { + b.VoluntaryExits[ii] = new(SignedVoluntaryExit) + } + if err = b.VoluntaryExits[ii].UnmarshalSSZ(buf[ii*112 : (ii+1)*112]); err != nil { + return err + } + } + } + + // Field (9) 'BlsToExecutionChanges' + { + buf = tail[o9:o11] + num, err := ssz.DivideInt2(len(buf), 172, 16) + if err != nil { + return err + } + b.BlsToExecutionChanges = make([]*SignedBLSToExecutionChange, num) + for ii := 0; ii < num; ii++ { + if b.BlsToExecutionChanges[ii] == nil { + b.BlsToExecutionChanges[ii] = new(SignedBLSToExecutionChange) + } + if err = b.BlsToExecutionChanges[ii].UnmarshalSSZ(buf[ii*172 : (ii+1)*172]); err != nil { + return err + } + } + } + + // Field (11) 'PayloadAttestations' + { + buf = tail[o11:] + num, err := ssz.DivideInt2(len(buf), 202, 4) + if err != nil { + return err + } + b.PayloadAttestations = make([]*PayloadAttestation, num) + for ii := 0; ii < num; ii++ { + if b.PayloadAttestations[ii] == nil { + b.PayloadAttestations[ii] = new(PayloadAttestation) + } + if err = b.PayloadAttestations[ii].UnmarshalSSZ(buf[ii*202 : (ii+1)*202]); err != nil { + return err + } + } + } + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the BeaconBlockBodyGloas object +func (b *BeaconBlockBodyGloas) SizeSSZ() (size int) { + size = 664 + + // Field (3) 'ProposerSlashings' + size += len(b.ProposerSlashings) * 416 + + // Field (4) 'AttesterSlashings' + for ii := 0; ii < len(b.AttesterSlashings); ii++ { + size += 4 + size += b.AttesterSlashings[ii].SizeSSZ() + } + + // Field (5) 'Attestations' + for ii := 0; ii < len(b.Attestations); ii++ { + size += 4 + size += b.Attestations[ii].SizeSSZ() + } + + // Field (6) 'Deposits' + size += len(b.Deposits) * 1240 + + // Field (7) 'VoluntaryExits' + size += len(b.VoluntaryExits) * 112 + + // Field (9) 'BlsToExecutionChanges' + size += len(b.BlsToExecutionChanges) * 172 + + // Field (11) 'PayloadAttestations' + size += len(b.PayloadAttestations) * 202 + + return +} + +// HashTreeRoot ssz hashes the BeaconBlockBodyGloas object +func (b *BeaconBlockBodyGloas) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(b) +} + +// HashTreeRootWith ssz hashes the BeaconBlockBodyGloas object with a hasher +func (b *BeaconBlockBodyGloas) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'RandaoReveal' + if size := len(b.RandaoReveal); size != 96 { + err = ssz.ErrBytesLengthFn("--.RandaoReveal", size, 96) + return + } + hh.PutBytes(b.RandaoReveal) + + // Field (1) 'Eth1Data' + if err = b.Eth1Data.HashTreeRootWith(hh); err != nil { + return + } + + // Field (2) 'Graffiti' + if size := len(b.Graffiti); size != 32 { + err = ssz.ErrBytesLengthFn("--.Graffiti", size, 32) + return + } + hh.PutBytes(b.Graffiti) + + // Field (3) 'ProposerSlashings' + { + subIndx := hh.Index() + num := uint64(len(b.ProposerSlashings)) + if num > 16 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.ProposerSlashings { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 16) + } + + // Field (4) 'AttesterSlashings' + { + subIndx := hh.Index() + num := uint64(len(b.AttesterSlashings)) + if num > 1 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.AttesterSlashings { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 1) + } + + // Field (5) 'Attestations' + { + subIndx := hh.Index() + num := uint64(len(b.Attestations)) + if num > 8 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.Attestations { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 8) + } + + // Field (6) 'Deposits' + { + subIndx := hh.Index() + num := uint64(len(b.Deposits)) + if num > 16 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.Deposits { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 16) + } + + // Field (7) 'VoluntaryExits' + { + subIndx := hh.Index() + num := uint64(len(b.VoluntaryExits)) + if num > 16 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.VoluntaryExits { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 16) + } + + // Field (8) 'SyncAggregate' + if err = b.SyncAggregate.HashTreeRootWith(hh); err != nil { + return + } + + // Field (9) 'BlsToExecutionChanges' + { + subIndx := hh.Index() + num := uint64(len(b.BlsToExecutionChanges)) + if num > 16 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.BlsToExecutionChanges { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 16) + } + + // Field (10) 'SignedExecutionPayloadBid' + if err = b.SignedExecutionPayloadBid.HashTreeRootWith(hh); err != nil { + return + } + + // Field (11) 'PayloadAttestations' + { + subIndx := hh.Index() + num := uint64(len(b.PayloadAttestations)) + if num > 4 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.PayloadAttestations { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 4) + } + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the SignedBeaconBlockGloas object +func (s *SignedBeaconBlockGloas) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(s) +} + +// MarshalSSZTo ssz marshals the SignedBeaconBlockGloas object to a target array +func (s *SignedBeaconBlockGloas) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + offset := int(100) + + // Offset (0) 'Block' + dst = ssz.WriteOffset(dst, offset) + if s.Block == nil { + s.Block = new(BeaconBlockGloas) + } + offset += s.Block.SizeSSZ() + + // Field (1) 'Signature' + if size := len(s.Signature); size != 96 { + err = ssz.ErrBytesLengthFn("--.Signature", size, 96) + return + } + dst = append(dst, s.Signature...) + + // Field (0) 'Block' + if dst, err = s.Block.MarshalSSZTo(dst); err != nil { + return + } + + return +} + +// UnmarshalSSZ ssz unmarshals the SignedBeaconBlockGloas object +func (s *SignedBeaconBlockGloas) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size < 100 { + return ssz.ErrSize + } + + tail := buf + var o0 uint64 + + // Offset (0) 'Block' + if o0 = ssz.ReadOffset(buf[0:4]); o0 > size { + return ssz.ErrOffset + } + + if o0 != 100 { + return ssz.ErrInvalidVariableOffset + } + + // Field (1) 'Signature' + if cap(s.Signature) == 0 { + s.Signature = make([]byte, 0, len(buf[4:100])) + } + s.Signature = append(s.Signature, buf[4:100]...) + + // Field (0) 'Block' + { + buf = tail[o0:] + if s.Block == nil { + s.Block = new(BeaconBlockGloas) + } + if err = s.Block.UnmarshalSSZ(buf); err != nil { + return err + } + } + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the SignedBeaconBlockGloas object +func (s *SignedBeaconBlockGloas) SizeSSZ() (size int) { + size = 100 + + // Field (0) 'Block' + if s.Block == nil { + s.Block = new(BeaconBlockGloas) + } + size += s.Block.SizeSSZ() + + return +} + +// HashTreeRoot ssz hashes the SignedBeaconBlockGloas object +func (s *SignedBeaconBlockGloas) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(s) +} + +// HashTreeRootWith ssz hashes the SignedBeaconBlockGloas object with a hasher +func (s *SignedBeaconBlockGloas) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'Block' + if err = s.Block.HashTreeRootWith(hh); err != nil { + return + } + + // Field (1) 'Signature' + if size := len(s.Signature); size != 96 { + err = ssz.ErrBytesLengthFn("--.Signature", size, 96) + return + } + hh.PutBytes(s.Signature) + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the BeaconStateGloas object +func (b *BeaconStateGloas) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(b) +} + +// MarshalSSZTo ssz marshals the BeaconStateGloas object to a target array +func (b *BeaconStateGloas) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + offset := int(2741821) + + // Field (0) 'GenesisTime' + dst = ssz.MarshalUint64(dst, b.GenesisTime) + + // Field (1) 'GenesisValidatorsRoot' + if size := len(b.GenesisValidatorsRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.GenesisValidatorsRoot", size, 32) + return + } + dst = append(dst, b.GenesisValidatorsRoot...) + + // Field (2) 'Slot' + dst = ssz.MarshalUint64(dst, uint64(b.Slot)) + + // Field (3) 'Fork' + if b.Fork == nil { + b.Fork = new(Fork) + } + if dst, err = b.Fork.MarshalSSZTo(dst); err != nil { + return + } + + // Field (4) 'LatestBlockHeader' + if b.LatestBlockHeader == nil { + b.LatestBlockHeader = new(BeaconBlockHeader) + } + if dst, err = b.LatestBlockHeader.MarshalSSZTo(dst); err != nil { + return + } + + // Field (5) 'BlockRoots' + if size := len(b.BlockRoots); size != 8192 { + err = ssz.ErrVectorLengthFn("--.BlockRoots", size, 8192) + return + } + for ii := 0; ii < 8192; ii++ { + if size := len(b.BlockRoots[ii]); size != 32 { + err = ssz.ErrBytesLengthFn("--.BlockRoots[ii]", size, 32) + return + } + dst = append(dst, b.BlockRoots[ii]...) + } + + // Field (6) 'StateRoots' + if size := len(b.StateRoots); size != 8192 { + err = ssz.ErrVectorLengthFn("--.StateRoots", size, 8192) + return + } + for ii := 0; ii < 8192; ii++ { + if size := len(b.StateRoots[ii]); size != 32 { + err = ssz.ErrBytesLengthFn("--.StateRoots[ii]", size, 32) + return + } + dst = append(dst, b.StateRoots[ii]...) + } + + // Offset (7) 'HistoricalRoots' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.HistoricalRoots) * 32 + + // Field (8) 'Eth1Data' + if b.Eth1Data == nil { + b.Eth1Data = new(Eth1Data) + } + if dst, err = b.Eth1Data.MarshalSSZTo(dst); err != nil { + return + } + + // Offset (9) 'Eth1DataVotes' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.Eth1DataVotes) * 72 + + // Field (10) 'Eth1DepositIndex' + dst = ssz.MarshalUint64(dst, b.Eth1DepositIndex) + + // Offset (11) 'Validators' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.Validators) * 121 + + // Offset (12) 'Balances' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.Balances) * 8 + + // Field (13) 'RandaoMixes' + if size := len(b.RandaoMixes); size != 65536 { + err = ssz.ErrVectorLengthFn("--.RandaoMixes", size, 65536) + return + } + for ii := 0; ii < 65536; ii++ { + if size := len(b.RandaoMixes[ii]); size != 32 { + err = ssz.ErrBytesLengthFn("--.RandaoMixes[ii]", size, 32) + return + } + dst = append(dst, b.RandaoMixes[ii]...) + } + + // Field (14) 'Slashings' + if size := len(b.Slashings); size != 8192 { + err = ssz.ErrVectorLengthFn("--.Slashings", size, 8192) + return + } + for ii := 0; ii < 8192; ii++ { + dst = ssz.MarshalUint64(dst, b.Slashings[ii]) + } + + // Offset (15) 'PreviousEpochParticipation' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.PreviousEpochParticipation) + + // Offset (16) 'CurrentEpochParticipation' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.CurrentEpochParticipation) + + // Field (17) 'JustificationBits' + if size := len(b.JustificationBits); size != 1 { + err = ssz.ErrBytesLengthFn("--.JustificationBits", size, 1) + return + } + dst = append(dst, b.JustificationBits...) + + // Field (18) 'PreviousJustifiedCheckpoint' + if b.PreviousJustifiedCheckpoint == nil { + b.PreviousJustifiedCheckpoint = new(Checkpoint) + } + if dst, err = b.PreviousJustifiedCheckpoint.MarshalSSZTo(dst); err != nil { + return + } + + // Field (19) 'CurrentJustifiedCheckpoint' + if b.CurrentJustifiedCheckpoint == nil { + b.CurrentJustifiedCheckpoint = new(Checkpoint) + } + if dst, err = b.CurrentJustifiedCheckpoint.MarshalSSZTo(dst); err != nil { + return + } + + // Field (20) 'FinalizedCheckpoint' + if b.FinalizedCheckpoint == nil { + b.FinalizedCheckpoint = new(Checkpoint) + } + if dst, err = b.FinalizedCheckpoint.MarshalSSZTo(dst); err != nil { + return + } + + // Offset (21) 'InactivityScores' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.InactivityScores) * 8 + + // Field (22) 'CurrentSyncCommittee' + if b.CurrentSyncCommittee == nil { + b.CurrentSyncCommittee = new(SyncCommittee) + } + if dst, err = b.CurrentSyncCommittee.MarshalSSZTo(dst); err != nil { + return + } + + // Field (23) 'NextSyncCommittee' + if b.NextSyncCommittee == nil { + b.NextSyncCommittee = new(SyncCommittee) + } + if dst, err = b.NextSyncCommittee.MarshalSSZTo(dst); err != nil { + return + } + + // Field (24) 'LatestExecutionPayloadBid' + if b.LatestExecutionPayloadBid == nil { + b.LatestExecutionPayloadBid = new(ExecutionPayloadBid) + } + if dst, err = b.LatestExecutionPayloadBid.MarshalSSZTo(dst); err != nil { + return + } + + // Field (25) 'NextWithdrawalIndex' + dst = ssz.MarshalUint64(dst, b.NextWithdrawalIndex) + + // Field (26) 'NextWithdrawalValidatorIndex' + dst = ssz.MarshalUint64(dst, uint64(b.NextWithdrawalValidatorIndex)) + + // Offset (27) 'HistoricalSummaries' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.HistoricalSummaries) * 64 + + // Field (28) 'DepositRequestsStartIndex' + dst = ssz.MarshalUint64(dst, b.DepositRequestsStartIndex) + + // Field (29) 'DepositBalanceToConsume' + dst = ssz.MarshalUint64(dst, uint64(b.DepositBalanceToConsume)) + + // Field (30) 'ExitBalanceToConsume' + dst = ssz.MarshalUint64(dst, uint64(b.ExitBalanceToConsume)) + + // Field (31) 'EarliestExitEpoch' + dst = ssz.MarshalUint64(dst, uint64(b.EarliestExitEpoch)) + + // Field (32) 'ConsolidationBalanceToConsume' + dst = ssz.MarshalUint64(dst, uint64(b.ConsolidationBalanceToConsume)) + + // Field (33) 'EarliestConsolidationEpoch' + dst = ssz.MarshalUint64(dst, uint64(b.EarliestConsolidationEpoch)) + + // Offset (34) 'PendingDeposits' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.PendingDeposits) * 192 + + // Offset (35) 'PendingPartialWithdrawals' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.PendingPartialWithdrawals) * 24 + + // Offset (36) 'PendingConsolidations' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.PendingConsolidations) * 16 + + // Field (37) 'ProposerLookahead' + if size := len(b.ProposerLookahead); size != 64 { + err = ssz.ErrVectorLengthFn("--.ProposerLookahead", size, 64) + return + } + for ii := 0; ii < 64; ii++ { + dst = ssz.MarshalUint64(dst, b.ProposerLookahead[ii]) + } + + // Field (38) 'ExecutionPayloadAvailability' + if size := len(b.ExecutionPayloadAvailability); size != 1024 { + err = ssz.ErrBytesLengthFn("--.ExecutionPayloadAvailability", size, 1024) + return + } + dst = append(dst, b.ExecutionPayloadAvailability...) + + // Field (39) 'BuilderPendingPayments' + if size := len(b.BuilderPendingPayments); size != 64 { + err = ssz.ErrVectorLengthFn("--.BuilderPendingPayments", size, 64) + return + } + for ii := 0; ii < 64; ii++ { + if dst, err = b.BuilderPendingPayments[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Offset (40) 'BuilderPendingWithdrawals' + dst = ssz.WriteOffset(dst, offset) + offset += len(b.BuilderPendingWithdrawals) * 44 + + // Field (41) 'LatestBlockHash' + if size := len(b.LatestBlockHash); size != 32 { + err = ssz.ErrBytesLengthFn("--.LatestBlockHash", size, 32) + return + } + dst = append(dst, b.LatestBlockHash...) + + // Field (42) 'LatestWithdrawalsRoot' + if size := len(b.LatestWithdrawalsRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.LatestWithdrawalsRoot", size, 32) + return + } + dst = append(dst, b.LatestWithdrawalsRoot...) + + // Field (7) 'HistoricalRoots' + if size := len(b.HistoricalRoots); size > 16777216 { + err = ssz.ErrListTooBigFn("--.HistoricalRoots", size, 16777216) + return + } + for ii := 0; ii < len(b.HistoricalRoots); ii++ { + if size := len(b.HistoricalRoots[ii]); size != 32 { + err = ssz.ErrBytesLengthFn("--.HistoricalRoots[ii]", size, 32) + return + } + dst = append(dst, b.HistoricalRoots[ii]...) + } + + // Field (9) 'Eth1DataVotes' + if size := len(b.Eth1DataVotes); size > 2048 { + err = ssz.ErrListTooBigFn("--.Eth1DataVotes", size, 2048) + return + } + for ii := 0; ii < len(b.Eth1DataVotes); ii++ { + if dst, err = b.Eth1DataVotes[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (11) 'Validators' + if size := len(b.Validators); size > 1099511627776 { + err = ssz.ErrListTooBigFn("--.Validators", size, 1099511627776) + return + } + for ii := 0; ii < len(b.Validators); ii++ { + if dst, err = b.Validators[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (12) 'Balances' + if size := len(b.Balances); size > 1099511627776 { + err = ssz.ErrListTooBigFn("--.Balances", size, 1099511627776) + return + } + for ii := 0; ii < len(b.Balances); ii++ { + dst = ssz.MarshalUint64(dst, b.Balances[ii]) + } + + // Field (15) 'PreviousEpochParticipation' + if size := len(b.PreviousEpochParticipation); size > 1099511627776 { + err = ssz.ErrBytesLengthFn("--.PreviousEpochParticipation", size, 1099511627776) + return + } + dst = append(dst, b.PreviousEpochParticipation...) + + // Field (16) 'CurrentEpochParticipation' + if size := len(b.CurrentEpochParticipation); size > 1099511627776 { + err = ssz.ErrBytesLengthFn("--.CurrentEpochParticipation", size, 1099511627776) + return + } + dst = append(dst, b.CurrentEpochParticipation...) + + // Field (21) 'InactivityScores' + if size := len(b.InactivityScores); size > 1099511627776 { + err = ssz.ErrListTooBigFn("--.InactivityScores", size, 1099511627776) + return + } + for ii := 0; ii < len(b.InactivityScores); ii++ { + dst = ssz.MarshalUint64(dst, b.InactivityScores[ii]) + } + + // Field (27) 'HistoricalSummaries' + if size := len(b.HistoricalSummaries); size > 16777216 { + err = ssz.ErrListTooBigFn("--.HistoricalSummaries", size, 16777216) + return + } + for ii := 0; ii < len(b.HistoricalSummaries); ii++ { + if dst, err = b.HistoricalSummaries[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (34) 'PendingDeposits' + if size := len(b.PendingDeposits); size > 134217728 { + err = ssz.ErrListTooBigFn("--.PendingDeposits", size, 134217728) + return + } + for ii := 0; ii < len(b.PendingDeposits); ii++ { + if dst, err = b.PendingDeposits[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (35) 'PendingPartialWithdrawals' + if size := len(b.PendingPartialWithdrawals); size > 134217728 { + err = ssz.ErrListTooBigFn("--.PendingPartialWithdrawals", size, 134217728) + return + } + for ii := 0; ii < len(b.PendingPartialWithdrawals); ii++ { + if dst, err = b.PendingPartialWithdrawals[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (36) 'PendingConsolidations' + if size := len(b.PendingConsolidations); size > 262144 { + err = ssz.ErrListTooBigFn("--.PendingConsolidations", size, 262144) + return + } + for ii := 0; ii < len(b.PendingConsolidations); ii++ { + if dst, err = b.PendingConsolidations[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + // Field (40) 'BuilderPendingWithdrawals' + if size := len(b.BuilderPendingWithdrawals); size > 1048576 { + err = ssz.ErrListTooBigFn("--.BuilderPendingWithdrawals", size, 1048576) + return + } + for ii := 0; ii < len(b.BuilderPendingWithdrawals); ii++ { + if dst, err = b.BuilderPendingWithdrawals[ii].MarshalSSZTo(dst); err != nil { + return + } + } + + return +} + +// UnmarshalSSZ ssz unmarshals the BeaconStateGloas object +func (b *BeaconStateGloas) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size < 2741821 { + return ssz.ErrSize + } + + tail := buf + var o7, o9, o11, o12, o15, o16, o21, o27, o34, o35, o36, o40 uint64 + + // Field (0) 'GenesisTime' + b.GenesisTime = ssz.UnmarshallUint64(buf[0:8]) + + // Field (1) 'GenesisValidatorsRoot' + if cap(b.GenesisValidatorsRoot) == 0 { + b.GenesisValidatorsRoot = make([]byte, 0, len(buf[8:40])) + } + b.GenesisValidatorsRoot = append(b.GenesisValidatorsRoot, buf[8:40]...) + + // Field (2) 'Slot' + b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) + + // Field (3) 'Fork' + if b.Fork == nil { + b.Fork = new(Fork) + } + if err = b.Fork.UnmarshalSSZ(buf[48:64]); err != nil { + return err + } + + // Field (4) 'LatestBlockHeader' + if b.LatestBlockHeader == nil { + b.LatestBlockHeader = new(BeaconBlockHeader) + } + if err = b.LatestBlockHeader.UnmarshalSSZ(buf[64:176]); err != nil { + return err + } + + // Field (5) 'BlockRoots' + b.BlockRoots = make([][]byte, 8192) + for ii := 0; ii < 8192; ii++ { + if cap(b.BlockRoots[ii]) == 0 { + b.BlockRoots[ii] = make([]byte, 0, len(buf[176:262320][ii*32:(ii+1)*32])) + } + b.BlockRoots[ii] = append(b.BlockRoots[ii], buf[176:262320][ii*32:(ii+1)*32]...) + } + + // Field (6) 'StateRoots' + b.StateRoots = make([][]byte, 8192) + for ii := 0; ii < 8192; ii++ { + if cap(b.StateRoots[ii]) == 0 { + b.StateRoots[ii] = make([]byte, 0, len(buf[262320:524464][ii*32:(ii+1)*32])) + } + b.StateRoots[ii] = append(b.StateRoots[ii], buf[262320:524464][ii*32:(ii+1)*32]...) + } + + // Offset (7) 'HistoricalRoots' + if o7 = ssz.ReadOffset(buf[524464:524468]); o7 > size { + return ssz.ErrOffset + } + + if o7 != 2741821 { + return ssz.ErrInvalidVariableOffset + } + + // Field (8) 'Eth1Data' + if b.Eth1Data == nil { + b.Eth1Data = new(Eth1Data) + } + if err = b.Eth1Data.UnmarshalSSZ(buf[524468:524540]); err != nil { + return err + } + + // Offset (9) 'Eth1DataVotes' + if o9 = ssz.ReadOffset(buf[524540:524544]); o9 > size || o7 > o9 { + return ssz.ErrOffset + } + + // Field (10) 'Eth1DepositIndex' + b.Eth1DepositIndex = ssz.UnmarshallUint64(buf[524544:524552]) + + // Offset (11) 'Validators' + if o11 = ssz.ReadOffset(buf[524552:524556]); o11 > size || o9 > o11 { + return ssz.ErrOffset + } + + // Offset (12) 'Balances' + if o12 = ssz.ReadOffset(buf[524556:524560]); o12 > size || o11 > o12 { + return ssz.ErrOffset + } + + // Field (13) 'RandaoMixes' + b.RandaoMixes = make([][]byte, 65536) + for ii := 0; ii < 65536; ii++ { + if cap(b.RandaoMixes[ii]) == 0 { + b.RandaoMixes[ii] = make([]byte, 0, len(buf[524560:2621712][ii*32:(ii+1)*32])) + } + b.RandaoMixes[ii] = append(b.RandaoMixes[ii], buf[524560:2621712][ii*32:(ii+1)*32]...) + } + + // Field (14) 'Slashings' + b.Slashings = ssz.ExtendUint64(b.Slashings, 8192) + for ii := 0; ii < 8192; ii++ { + b.Slashings[ii] = ssz.UnmarshallUint64(buf[2621712:2687248][ii*8 : (ii+1)*8]) + } + + // Offset (15) 'PreviousEpochParticipation' + if o15 = ssz.ReadOffset(buf[2687248:2687252]); o15 > size || o12 > o15 { + return ssz.ErrOffset + } + + // Offset (16) 'CurrentEpochParticipation' + if o16 = ssz.ReadOffset(buf[2687252:2687256]); o16 > size || o15 > o16 { + return ssz.ErrOffset + } + + // Field (17) 'JustificationBits' + if cap(b.JustificationBits) == 0 { + b.JustificationBits = make([]byte, 0, len(buf[2687256:2687257])) + } + b.JustificationBits = append(b.JustificationBits, buf[2687256:2687257]...) + + // Field (18) 'PreviousJustifiedCheckpoint' + if b.PreviousJustifiedCheckpoint == nil { + b.PreviousJustifiedCheckpoint = new(Checkpoint) + } + if err = b.PreviousJustifiedCheckpoint.UnmarshalSSZ(buf[2687257:2687297]); err != nil { + return err + } + + // Field (19) 'CurrentJustifiedCheckpoint' + if b.CurrentJustifiedCheckpoint == nil { + b.CurrentJustifiedCheckpoint = new(Checkpoint) + } + if err = b.CurrentJustifiedCheckpoint.UnmarshalSSZ(buf[2687297:2687337]); err != nil { + return err + } + + // Field (20) 'FinalizedCheckpoint' + if b.FinalizedCheckpoint == nil { + b.FinalizedCheckpoint = new(Checkpoint) + } + if err = b.FinalizedCheckpoint.UnmarshalSSZ(buf[2687337:2687377]); err != nil { + return err + } + + // Offset (21) 'InactivityScores' + if o21 = ssz.ReadOffset(buf[2687377:2687381]); o21 > size || o16 > o21 { + return ssz.ErrOffset + } + + // Field (22) 'CurrentSyncCommittee' + if b.CurrentSyncCommittee == nil { + b.CurrentSyncCommittee = new(SyncCommittee) + } + if err = b.CurrentSyncCommittee.UnmarshalSSZ(buf[2687381:2712005]); err != nil { + return err + } + + // Field (23) 'NextSyncCommittee' + if b.NextSyncCommittee == nil { + b.NextSyncCommittee = new(SyncCommittee) + } + if err = b.NextSyncCommittee.UnmarshalSSZ(buf[2712005:2736629]); err != nil { + return err + } + + // Field (24) 'LatestExecutionPayloadBid' + if b.LatestExecutionPayloadBid == nil { + b.LatestExecutionPayloadBid = new(ExecutionPayloadBid) + } + if err = b.LatestExecutionPayloadBid.UnmarshalSSZ(buf[2736629:2736809]); err != nil { + return err + } + + // Field (25) 'NextWithdrawalIndex' + b.NextWithdrawalIndex = ssz.UnmarshallUint64(buf[2736809:2736817]) + + // Field (26) 'NextWithdrawalValidatorIndex' + b.NextWithdrawalValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[2736817:2736825])) + + // Offset (27) 'HistoricalSummaries' + if o27 = ssz.ReadOffset(buf[2736825:2736829]); o27 > size || o21 > o27 { + return ssz.ErrOffset + } + + // Field (28) 'DepositRequestsStartIndex' + b.DepositRequestsStartIndex = ssz.UnmarshallUint64(buf[2736829:2736837]) + + // Field (29) 'DepositBalanceToConsume' + b.DepositBalanceToConsume = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736837:2736845])) + + // Field (30) 'ExitBalanceToConsume' + b.ExitBalanceToConsume = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736845:2736853])) + + // Field (31) 'EarliestExitEpoch' + b.EarliestExitEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736853:2736861])) + + // Field (32) 'ConsolidationBalanceToConsume' + b.ConsolidationBalanceToConsume = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736861:2736869])) + + // Field (33) 'EarliestConsolidationEpoch' + b.EarliestConsolidationEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736869:2736877])) + + // Offset (34) 'PendingDeposits' + if o34 = ssz.ReadOffset(buf[2736877:2736881]); o34 > size || o27 > o34 { + return ssz.ErrOffset + } + + // Offset (35) 'PendingPartialWithdrawals' + if o35 = ssz.ReadOffset(buf[2736881:2736885]); o35 > size || o34 > o35 { + return ssz.ErrOffset + } + + // Offset (36) 'PendingConsolidations' + if o36 = ssz.ReadOffset(buf[2736885:2736889]); o36 > size || o35 > o36 { + return ssz.ErrOffset + } + + // Field (37) 'ProposerLookahead' + b.ProposerLookahead = ssz.ExtendUint64(b.ProposerLookahead, 64) + for ii := 0; ii < 64; ii++ { + b.ProposerLookahead[ii] = ssz.UnmarshallUint64(buf[2736889:2737401][ii*8 : (ii+1)*8]) + } + + // Field (38) 'ExecutionPayloadAvailability' + if cap(b.ExecutionPayloadAvailability) == 0 { + b.ExecutionPayloadAvailability = make([]byte, 0, len(buf[2737401:2738425])) + } + b.ExecutionPayloadAvailability = append(b.ExecutionPayloadAvailability, buf[2737401:2738425]...) + + // Field (39) 'BuilderPendingPayments' + b.BuilderPendingPayments = make([]*BuilderPendingPayment, 64) + for ii := 0; ii < 64; ii++ { + if b.BuilderPendingPayments[ii] == nil { + b.BuilderPendingPayments[ii] = new(BuilderPendingPayment) + } + if err = b.BuilderPendingPayments[ii].UnmarshalSSZ(buf[2738425:2741753][ii*52 : (ii+1)*52]); err != nil { + return err + } + } + + // Offset (40) 'BuilderPendingWithdrawals' + if o40 = ssz.ReadOffset(buf[2741753:2741757]); o40 > size || o36 > o40 { + return ssz.ErrOffset + } + + // Field (41) 'LatestBlockHash' + if cap(b.LatestBlockHash) == 0 { + b.LatestBlockHash = make([]byte, 0, len(buf[2741757:2741789])) + } + b.LatestBlockHash = append(b.LatestBlockHash, buf[2741757:2741789]...) + + // Field (42) 'LatestWithdrawalsRoot' + if cap(b.LatestWithdrawalsRoot) == 0 { + b.LatestWithdrawalsRoot = make([]byte, 0, len(buf[2741789:2741821])) + } + b.LatestWithdrawalsRoot = append(b.LatestWithdrawalsRoot, buf[2741789:2741821]...) + + // Field (7) 'HistoricalRoots' + { + buf = tail[o7:o9] + num, err := ssz.DivideInt2(len(buf), 32, 16777216) + if err != nil { + return err + } + b.HistoricalRoots = make([][]byte, num) + for ii := 0; ii < num; ii++ { + if cap(b.HistoricalRoots[ii]) == 0 { + b.HistoricalRoots[ii] = make([]byte, 0, len(buf[ii*32:(ii+1)*32])) + } + b.HistoricalRoots[ii] = append(b.HistoricalRoots[ii], buf[ii*32:(ii+1)*32]...) + } + } + + // Field (9) 'Eth1DataVotes' + { + buf = tail[o9:o11] + num, err := ssz.DivideInt2(len(buf), 72, 2048) + if err != nil { + return err + } + b.Eth1DataVotes = make([]*Eth1Data, num) + for ii := 0; ii < num; ii++ { + if b.Eth1DataVotes[ii] == nil { + b.Eth1DataVotes[ii] = new(Eth1Data) + } + if err = b.Eth1DataVotes[ii].UnmarshalSSZ(buf[ii*72 : (ii+1)*72]); err != nil { + return err + } + } + } + + // Field (11) 'Validators' + { + buf = tail[o11:o12] + num, err := ssz.DivideInt2(len(buf), 121, 1099511627776) + if err != nil { + return err + } + b.Validators = make([]*Validator, num) + for ii := 0; ii < num; ii++ { + if b.Validators[ii] == nil { + b.Validators[ii] = new(Validator) + } + if err = b.Validators[ii].UnmarshalSSZ(buf[ii*121 : (ii+1)*121]); err != nil { + return err + } + } + } + + // Field (12) 'Balances' + { + buf = tail[o12:o15] + num, err := ssz.DivideInt2(len(buf), 8, 1099511627776) + if err != nil { + return err + } + b.Balances = ssz.ExtendUint64(b.Balances, num) + for ii := 0; ii < num; ii++ { + b.Balances[ii] = ssz.UnmarshallUint64(buf[ii*8 : (ii+1)*8]) + } + } + + // Field (15) 'PreviousEpochParticipation' + { + buf = tail[o15:o16] + if len(buf) > 1099511627776 { + return ssz.ErrBytesLength + } + if cap(b.PreviousEpochParticipation) == 0 { + b.PreviousEpochParticipation = make([]byte, 0, len(buf)) + } + b.PreviousEpochParticipation = append(b.PreviousEpochParticipation, buf...) + } + + // Field (16) 'CurrentEpochParticipation' + { + buf = tail[o16:o21] + if len(buf) > 1099511627776 { + return ssz.ErrBytesLength + } + if cap(b.CurrentEpochParticipation) == 0 { + b.CurrentEpochParticipation = make([]byte, 0, len(buf)) + } + b.CurrentEpochParticipation = append(b.CurrentEpochParticipation, buf...) + } + + // Field (21) 'InactivityScores' + { + buf = tail[o21:o27] + num, err := ssz.DivideInt2(len(buf), 8, 1099511627776) + if err != nil { + return err + } + b.InactivityScores = ssz.ExtendUint64(b.InactivityScores, num) + for ii := 0; ii < num; ii++ { + b.InactivityScores[ii] = ssz.UnmarshallUint64(buf[ii*8 : (ii+1)*8]) + } + } + + // Field (27) 'HistoricalSummaries' + { + buf = tail[o27:o34] + num, err := ssz.DivideInt2(len(buf), 64, 16777216) + if err != nil { + return err + } + b.HistoricalSummaries = make([]*HistoricalSummary, num) + for ii := 0; ii < num; ii++ { + if b.HistoricalSummaries[ii] == nil { + b.HistoricalSummaries[ii] = new(HistoricalSummary) + } + if err = b.HistoricalSummaries[ii].UnmarshalSSZ(buf[ii*64 : (ii+1)*64]); err != nil { + return err + } + } + } + + // Field (34) 'PendingDeposits' + { + buf = tail[o34:o35] + num, err := ssz.DivideInt2(len(buf), 192, 134217728) + if err != nil { + return err + } + b.PendingDeposits = make([]*PendingDeposit, num) + for ii := 0; ii < num; ii++ { + if b.PendingDeposits[ii] == nil { + b.PendingDeposits[ii] = new(PendingDeposit) + } + if err = b.PendingDeposits[ii].UnmarshalSSZ(buf[ii*192 : (ii+1)*192]); err != nil { + return err + } + } + } + + // Field (35) 'PendingPartialWithdrawals' + { + buf = tail[o35:o36] + num, err := ssz.DivideInt2(len(buf), 24, 134217728) + if err != nil { + return err + } + b.PendingPartialWithdrawals = make([]*PendingPartialWithdrawal, num) + for ii := 0; ii < num; ii++ { + if b.PendingPartialWithdrawals[ii] == nil { + b.PendingPartialWithdrawals[ii] = new(PendingPartialWithdrawal) + } + if err = b.PendingPartialWithdrawals[ii].UnmarshalSSZ(buf[ii*24 : (ii+1)*24]); err != nil { + return err + } + } + } + + // Field (36) 'PendingConsolidations' + { + buf = tail[o36:o40] + num, err := ssz.DivideInt2(len(buf), 16, 262144) + if err != nil { + return err + } + b.PendingConsolidations = make([]*PendingConsolidation, num) + for ii := 0; ii < num; ii++ { + if b.PendingConsolidations[ii] == nil { + b.PendingConsolidations[ii] = new(PendingConsolidation) + } + if err = b.PendingConsolidations[ii].UnmarshalSSZ(buf[ii*16 : (ii+1)*16]); err != nil { + return err + } + } + } + + // Field (40) 'BuilderPendingWithdrawals' + { + buf = tail[o40:] + num, err := ssz.DivideInt2(len(buf), 44, 1048576) + if err != nil { + return err + } + b.BuilderPendingWithdrawals = make([]*BuilderPendingWithdrawal, num) + for ii := 0; ii < num; ii++ { + if b.BuilderPendingWithdrawals[ii] == nil { + b.BuilderPendingWithdrawals[ii] = new(BuilderPendingWithdrawal) + } + if err = b.BuilderPendingWithdrawals[ii].UnmarshalSSZ(buf[ii*44 : (ii+1)*44]); err != nil { + return err + } + } + } + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the BeaconStateGloas object +func (b *BeaconStateGloas) SizeSSZ() (size int) { + size = 2741821 + + // Field (7) 'HistoricalRoots' + size += len(b.HistoricalRoots) * 32 + + // Field (9) 'Eth1DataVotes' + size += len(b.Eth1DataVotes) * 72 + + // Field (11) 'Validators' + size += len(b.Validators) * 121 + + // Field (12) 'Balances' + size += len(b.Balances) * 8 + + // Field (15) 'PreviousEpochParticipation' + size += len(b.PreviousEpochParticipation) + + // Field (16) 'CurrentEpochParticipation' + size += len(b.CurrentEpochParticipation) + + // Field (21) 'InactivityScores' + size += len(b.InactivityScores) * 8 + + // Field (27) 'HistoricalSummaries' + size += len(b.HistoricalSummaries) * 64 + + // Field (34) 'PendingDeposits' + size += len(b.PendingDeposits) * 192 + + // Field (35) 'PendingPartialWithdrawals' + size += len(b.PendingPartialWithdrawals) * 24 + + // Field (36) 'PendingConsolidations' + size += len(b.PendingConsolidations) * 16 + + // Field (40) 'BuilderPendingWithdrawals' + size += len(b.BuilderPendingWithdrawals) * 44 + + return +} + +// HashTreeRoot ssz hashes the BeaconStateGloas object +func (b *BeaconStateGloas) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(b) +} + +// HashTreeRootWith ssz hashes the BeaconStateGloas object with a hasher +func (b *BeaconStateGloas) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'GenesisTime' + hh.PutUint64(b.GenesisTime) + + // Field (1) 'GenesisValidatorsRoot' + if size := len(b.GenesisValidatorsRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.GenesisValidatorsRoot", size, 32) + return + } + hh.PutBytes(b.GenesisValidatorsRoot) + + // Field (2) 'Slot' + hh.PutUint64(uint64(b.Slot)) + + // Field (3) 'Fork' + if err = b.Fork.HashTreeRootWith(hh); err != nil { + return + } + + // Field (4) 'LatestBlockHeader' + if err = b.LatestBlockHeader.HashTreeRootWith(hh); err != nil { + return + } + + // Field (5) 'BlockRoots' + { + if size := len(b.BlockRoots); size != 8192 { + err = ssz.ErrVectorLengthFn("--.BlockRoots", size, 8192) + return + } + subIndx := hh.Index() + for _, i := range b.BlockRoots { + if len(i) != 32 { + err = ssz.ErrBytesLength + return + } + hh.Append(i) + } + hh.Merkleize(subIndx) + } + + // Field (6) 'StateRoots' + { + if size := len(b.StateRoots); size != 8192 { + err = ssz.ErrVectorLengthFn("--.StateRoots", size, 8192) + return + } + subIndx := hh.Index() + for _, i := range b.StateRoots { + if len(i) != 32 { + err = ssz.ErrBytesLength + return + } + hh.Append(i) + } + hh.Merkleize(subIndx) + } + + // Field (7) 'HistoricalRoots' + { + if size := len(b.HistoricalRoots); size > 16777216 { + err = ssz.ErrListTooBigFn("--.HistoricalRoots", size, 16777216) + return + } + subIndx := hh.Index() + for _, i := range b.HistoricalRoots { + if len(i) != 32 { + err = ssz.ErrBytesLength + return + } + hh.Append(i) + } + + numItems := uint64(len(b.HistoricalRoots)) + hh.MerkleizeWithMixin(subIndx, numItems, 16777216) + } + + // Field (8) 'Eth1Data' + if err = b.Eth1Data.HashTreeRootWith(hh); err != nil { + return + } + + // Field (9) 'Eth1DataVotes' + { + subIndx := hh.Index() + num := uint64(len(b.Eth1DataVotes)) + if num > 2048 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.Eth1DataVotes { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 2048) + } + + // Field (10) 'Eth1DepositIndex' + hh.PutUint64(b.Eth1DepositIndex) + + // Field (11) 'Validators' + { + subIndx := hh.Index() + num := uint64(len(b.Validators)) + if num > 1099511627776 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.Validators { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 1099511627776) + } + + // Field (12) 'Balances' + { + if size := len(b.Balances); size > 1099511627776 { + err = ssz.ErrListTooBigFn("--.Balances", size, 1099511627776) + return + } + subIndx := hh.Index() + for _, i := range b.Balances { + hh.AppendUint64(i) + } + hh.FillUpTo32() + + numItems := uint64(len(b.Balances)) + hh.MerkleizeWithMixin(subIndx, numItems, ssz.CalculateLimit(1099511627776, numItems, 8)) + } + + // Field (13) 'RandaoMixes' + { + if size := len(b.RandaoMixes); size != 65536 { + err = ssz.ErrVectorLengthFn("--.RandaoMixes", size, 65536) + return + } + subIndx := hh.Index() + for _, i := range b.RandaoMixes { + if len(i) != 32 { + err = ssz.ErrBytesLength + return + } + hh.Append(i) + } + hh.Merkleize(subIndx) + } + + // Field (14) 'Slashings' + { + if size := len(b.Slashings); size != 8192 { + err = ssz.ErrVectorLengthFn("--.Slashings", size, 8192) + return + } + subIndx := hh.Index() + for _, i := range b.Slashings { + hh.AppendUint64(i) + } + hh.Merkleize(subIndx) + } + + // Field (15) 'PreviousEpochParticipation' + { + elemIndx := hh.Index() + byteLen := uint64(len(b.PreviousEpochParticipation)) + if byteLen > 1099511627776 { + err = ssz.ErrIncorrectListSize + return + } + hh.PutBytes(b.PreviousEpochParticipation) + hh.MerkleizeWithMixin(elemIndx, byteLen, (1099511627776+31)/32) + } + + // Field (16) 'CurrentEpochParticipation' + { + elemIndx := hh.Index() + byteLen := uint64(len(b.CurrentEpochParticipation)) + if byteLen > 1099511627776 { + err = ssz.ErrIncorrectListSize + return + } + hh.PutBytes(b.CurrentEpochParticipation) + hh.MerkleizeWithMixin(elemIndx, byteLen, (1099511627776+31)/32) + } + + // Field (17) 'JustificationBits' + if size := len(b.JustificationBits); size != 1 { + err = ssz.ErrBytesLengthFn("--.JustificationBits", size, 1) + return + } + hh.PutBytes(b.JustificationBits) + + // Field (18) 'PreviousJustifiedCheckpoint' + if err = b.PreviousJustifiedCheckpoint.HashTreeRootWith(hh); err != nil { + return + } + + // Field (19) 'CurrentJustifiedCheckpoint' + if err = b.CurrentJustifiedCheckpoint.HashTreeRootWith(hh); err != nil { + return + } + + // Field (20) 'FinalizedCheckpoint' + if err = b.FinalizedCheckpoint.HashTreeRootWith(hh); err != nil { + return + } + + // Field (21) 'InactivityScores' + { + if size := len(b.InactivityScores); size > 1099511627776 { + err = ssz.ErrListTooBigFn("--.InactivityScores", size, 1099511627776) + return + } + subIndx := hh.Index() + for _, i := range b.InactivityScores { + hh.AppendUint64(i) + } + hh.FillUpTo32() + + numItems := uint64(len(b.InactivityScores)) + hh.MerkleizeWithMixin(subIndx, numItems, ssz.CalculateLimit(1099511627776, numItems, 8)) + } + + // Field (22) 'CurrentSyncCommittee' + if err = b.CurrentSyncCommittee.HashTreeRootWith(hh); err != nil { + return + } + + // Field (23) 'NextSyncCommittee' + if err = b.NextSyncCommittee.HashTreeRootWith(hh); err != nil { + return + } + + // Field (24) 'LatestExecutionPayloadBid' + if err = b.LatestExecutionPayloadBid.HashTreeRootWith(hh); err != nil { + return + } + + // Field (25) 'NextWithdrawalIndex' + hh.PutUint64(b.NextWithdrawalIndex) + + // Field (26) 'NextWithdrawalValidatorIndex' + hh.PutUint64(uint64(b.NextWithdrawalValidatorIndex)) + + // Field (27) 'HistoricalSummaries' + { + subIndx := hh.Index() + num := uint64(len(b.HistoricalSummaries)) + if num > 16777216 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.HistoricalSummaries { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 16777216) + } + + // Field (28) 'DepositRequestsStartIndex' + hh.PutUint64(b.DepositRequestsStartIndex) + + // Field (29) 'DepositBalanceToConsume' + hh.PutUint64(uint64(b.DepositBalanceToConsume)) + + // Field (30) 'ExitBalanceToConsume' + hh.PutUint64(uint64(b.ExitBalanceToConsume)) + + // Field (31) 'EarliestExitEpoch' + hh.PutUint64(uint64(b.EarliestExitEpoch)) + + // Field (32) 'ConsolidationBalanceToConsume' + hh.PutUint64(uint64(b.ConsolidationBalanceToConsume)) + + // Field (33) 'EarliestConsolidationEpoch' + hh.PutUint64(uint64(b.EarliestConsolidationEpoch)) + + // Field (34) 'PendingDeposits' + { + subIndx := hh.Index() + num := uint64(len(b.PendingDeposits)) + if num > 134217728 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.PendingDeposits { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 134217728) + } + + // Field (35) 'PendingPartialWithdrawals' + { + subIndx := hh.Index() + num := uint64(len(b.PendingPartialWithdrawals)) + if num > 134217728 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.PendingPartialWithdrawals { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 134217728) + } + + // Field (36) 'PendingConsolidations' + { + subIndx := hh.Index() + num := uint64(len(b.PendingConsolidations)) + if num > 262144 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.PendingConsolidations { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 262144) + } + + // Field (37) 'ProposerLookahead' + { + if size := len(b.ProposerLookahead); size != 64 { + err = ssz.ErrVectorLengthFn("--.ProposerLookahead", size, 64) + return + } + subIndx := hh.Index() + for _, i := range b.ProposerLookahead { + hh.AppendUint64(i) + } + hh.Merkleize(subIndx) + } + + // Field (38) 'ExecutionPayloadAvailability' + if size := len(b.ExecutionPayloadAvailability); size != 1024 { + err = ssz.ErrBytesLengthFn("--.ExecutionPayloadAvailability", size, 1024) + return + } + hh.PutBytes(b.ExecutionPayloadAvailability) + + // Field (39) 'BuilderPendingPayments' + { + subIndx := hh.Index() + for _, elem := range b.BuilderPendingPayments { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.Merkleize(subIndx) + } + + // Field (40) 'BuilderPendingWithdrawals' + { + subIndx := hh.Index() + num := uint64(len(b.BuilderPendingWithdrawals)) + if num > 1048576 { + err = ssz.ErrIncorrectListSize + return + } + for _, elem := range b.BuilderPendingWithdrawals { + if err = elem.HashTreeRootWith(hh); err != nil { + return + } + } + hh.MerkleizeWithMixin(subIndx, num, 1048576) + } + + // Field (41) 'LatestBlockHash' + if size := len(b.LatestBlockHash); size != 32 { + err = ssz.ErrBytesLengthFn("--.LatestBlockHash", size, 32) + return + } + hh.PutBytes(b.LatestBlockHash) + + // Field (42) 'LatestWithdrawalsRoot' + if size := len(b.LatestWithdrawalsRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.LatestWithdrawalsRoot", size, 32) + return + } + hh.PutBytes(b.LatestWithdrawalsRoot) + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the BuilderPendingPayment object +func (b *BuilderPendingPayment) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(b) +} + +// MarshalSSZTo ssz marshals the BuilderPendingPayment object to a target array +func (b *BuilderPendingPayment) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + + // Field (0) 'Weight' + dst = ssz.MarshalUint64(dst, uint64(b.Weight)) + + // Field (1) 'Withdrawal' + if b.Withdrawal == nil { + b.Withdrawal = new(BuilderPendingWithdrawal) + } + if dst, err = b.Withdrawal.MarshalSSZTo(dst); err != nil { + return + } + + return +} + +// UnmarshalSSZ ssz unmarshals the BuilderPendingPayment object +func (b *BuilderPendingPayment) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size != 52 { + return ssz.ErrSize + } + + // Field (0) 'Weight' + b.Weight = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[0:8])) + + // Field (1) 'Withdrawal' + if b.Withdrawal == nil { + b.Withdrawal = new(BuilderPendingWithdrawal) + } + if err = b.Withdrawal.UnmarshalSSZ(buf[8:52]); err != nil { + return err + } + + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the BuilderPendingPayment object +func (b *BuilderPendingPayment) SizeSSZ() (size int) { + size = 52 + return +} + +// HashTreeRoot ssz hashes the BuilderPendingPayment object +func (b *BuilderPendingPayment) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(b) +} + +// HashTreeRootWith ssz hashes the BuilderPendingPayment object with a hasher +func (b *BuilderPendingPayment) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'Weight' + hh.PutUint64(uint64(b.Weight)) + + // Field (1) 'Withdrawal' + if err = b.Withdrawal.HashTreeRootWith(hh); err != nil { + return + } + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the BuilderPendingWithdrawal object +func (b *BuilderPendingWithdrawal) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(b) +} + +// MarshalSSZTo ssz marshals the BuilderPendingWithdrawal object to a target array +func (b *BuilderPendingWithdrawal) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + + // Field (0) 'FeeRecipient' + if size := len(b.FeeRecipient); size != 20 { + err = ssz.ErrBytesLengthFn("--.FeeRecipient", size, 20) + return + } + dst = append(dst, b.FeeRecipient...) + + // Field (1) 'Amount' + dst = ssz.MarshalUint64(dst, uint64(b.Amount)) + + // Field (2) 'BuilderIndex' + dst = ssz.MarshalUint64(dst, uint64(b.BuilderIndex)) + + // Field (3) 'WithdrawableEpoch' + dst = ssz.MarshalUint64(dst, uint64(b.WithdrawableEpoch)) + + return +} + +// UnmarshalSSZ ssz unmarshals the BuilderPendingWithdrawal object +func (b *BuilderPendingWithdrawal) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size != 44 { + return ssz.ErrSize + } + + // Field (0) 'FeeRecipient' + if cap(b.FeeRecipient) == 0 { + b.FeeRecipient = make([]byte, 0, len(buf[0:20])) + } + b.FeeRecipient = append(b.FeeRecipient, buf[0:20]...) + + // Field (1) 'Amount' + b.Amount = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[20:28])) + + // Field (2) 'BuilderIndex' + b.BuilderIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[28:36])) + + // Field (3) 'WithdrawableEpoch' + b.WithdrawableEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[36:44])) + + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the BuilderPendingWithdrawal object +func (b *BuilderPendingWithdrawal) SizeSSZ() (size int) { + size = 44 + return +} + +// HashTreeRoot ssz hashes the BuilderPendingWithdrawal object +func (b *BuilderPendingWithdrawal) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(b) +} + +// HashTreeRootWith ssz hashes the BuilderPendingWithdrawal object with a hasher +func (b *BuilderPendingWithdrawal) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'FeeRecipient' + if size := len(b.FeeRecipient); size != 20 { + err = ssz.ErrBytesLengthFn("--.FeeRecipient", size, 20) + return + } + hh.PutBytes(b.FeeRecipient) + + // Field (1) 'Amount' + hh.PutUint64(uint64(b.Amount)) + + // Field (2) 'BuilderIndex' + hh.PutUint64(uint64(b.BuilderIndex)) + + // Field (3) 'WithdrawableEpoch' + hh.PutUint64(uint64(b.WithdrawableEpoch)) + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the DataColumnSidecarGloas object +func (d *DataColumnSidecarGloas) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(d) +} + +// MarshalSSZTo ssz marshals the DataColumnSidecarGloas object to a target array +func (d *DataColumnSidecarGloas) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + offset := int(60) + + // Field (0) 'Index' + dst = ssz.MarshalUint64(dst, d.Index) + + // Offset (1) 'Column' + dst = ssz.WriteOffset(dst, offset) + offset += len(d.Column) * 2048 + + // Offset (2) 'KzgCommitments' + dst = ssz.WriteOffset(dst, offset) + offset += len(d.KzgCommitments) * 48 + + // Offset (3) 'KzgProofs' + dst = ssz.WriteOffset(dst, offset) + offset += len(d.KzgProofs) * 48 + + // Field (4) 'Slot' + dst = ssz.MarshalUint64(dst, uint64(d.Slot)) + + // Field (5) 'BeaconBlockRoot' + if size := len(d.BeaconBlockRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.BeaconBlockRoot", size, 32) + return + } + dst = append(dst, d.BeaconBlockRoot...) + + // Field (1) 'Column' + if size := len(d.Column); size > 4096 { + err = ssz.ErrListTooBigFn("--.Column", size, 4096) + return + } + for ii := 0; ii < len(d.Column); ii++ { + if size := len(d.Column[ii]); size != 2048 { + err = ssz.ErrBytesLengthFn("--.Column[ii]", size, 2048) + return + } + dst = append(dst, d.Column[ii]...) + } + + // Field (2) 'KzgCommitments' + if size := len(d.KzgCommitments); size > 4096 { + err = ssz.ErrListTooBigFn("--.KzgCommitments", size, 4096) + return + } + for ii := 0; ii < len(d.KzgCommitments); ii++ { + if size := len(d.KzgCommitments[ii]); size != 48 { + err = ssz.ErrBytesLengthFn("--.KzgCommitments[ii]", size, 48) + return + } + dst = append(dst, d.KzgCommitments[ii]...) + } + + // Field (3) 'KzgProofs' + if size := len(d.KzgProofs); size > 4096 { + err = ssz.ErrListTooBigFn("--.KzgProofs", size, 4096) + return + } + for ii := 0; ii < len(d.KzgProofs); ii++ { + if size := len(d.KzgProofs[ii]); size != 48 { + err = ssz.ErrBytesLengthFn("--.KzgProofs[ii]", size, 48) + return + } + dst = append(dst, d.KzgProofs[ii]...) + } + + return +} + +// UnmarshalSSZ ssz unmarshals the DataColumnSidecarGloas object +func (d *DataColumnSidecarGloas) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size < 60 { + return ssz.ErrSize + } + + tail := buf + var o1, o2, o3 uint64 + + // Field (0) 'Index' + d.Index = ssz.UnmarshallUint64(buf[0:8]) + + // Offset (1) 'Column' + if o1 = ssz.ReadOffset(buf[8:12]); o1 > size { + return ssz.ErrOffset + } + + if o1 != 60 { + return ssz.ErrInvalidVariableOffset + } + + // Offset (2) 'KzgCommitments' + if o2 = ssz.ReadOffset(buf[12:16]); o2 > size || o1 > o2 { + return ssz.ErrOffset + } + + // Offset (3) 'KzgProofs' + if o3 = ssz.ReadOffset(buf[16:20]); o3 > size || o2 > o3 { + return ssz.ErrOffset + } + + // Field (4) 'Slot' + d.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[20:28])) + + // Field (5) 'BeaconBlockRoot' + if cap(d.BeaconBlockRoot) == 0 { + d.BeaconBlockRoot = make([]byte, 0, len(buf[28:60])) + } + d.BeaconBlockRoot = append(d.BeaconBlockRoot, buf[28:60]...) + + // Field (1) 'Column' + { + buf = tail[o1:o2] + num, err := ssz.DivideInt2(len(buf), 2048, 4096) + if err != nil { + return err + } + d.Column = make([][]byte, num) + for ii := 0; ii < num; ii++ { + if cap(d.Column[ii]) == 0 { + d.Column[ii] = make([]byte, 0, len(buf[ii*2048:(ii+1)*2048])) + } + d.Column[ii] = append(d.Column[ii], buf[ii*2048:(ii+1)*2048]...) + } + } + + // Field (2) 'KzgCommitments' + { + buf = tail[o2:o3] + num, err := ssz.DivideInt2(len(buf), 48, 4096) + if err != nil { + return err + } + d.KzgCommitments = make([][]byte, num) + for ii := 0; ii < num; ii++ { + if cap(d.KzgCommitments[ii]) == 0 { + d.KzgCommitments[ii] = make([]byte, 0, len(buf[ii*48:(ii+1)*48])) + } + d.KzgCommitments[ii] = append(d.KzgCommitments[ii], buf[ii*48:(ii+1)*48]...) + } + } + + // Field (3) 'KzgProofs' + { + buf = tail[o3:] + num, err := ssz.DivideInt2(len(buf), 48, 4096) + if err != nil { + return err + } + d.KzgProofs = make([][]byte, num) + for ii := 0; ii < num; ii++ { + if cap(d.KzgProofs[ii]) == 0 { + d.KzgProofs[ii] = make([]byte, 0, len(buf[ii*48:(ii+1)*48])) + } + d.KzgProofs[ii] = append(d.KzgProofs[ii], buf[ii*48:(ii+1)*48]...) + } + } + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the DataColumnSidecarGloas object +func (d *DataColumnSidecarGloas) SizeSSZ() (size int) { + size = 60 + + // Field (1) 'Column' + size += len(d.Column) * 2048 + + // Field (2) 'KzgCommitments' + size += len(d.KzgCommitments) * 48 + + // Field (3) 'KzgProofs' + size += len(d.KzgProofs) * 48 + + return +} + +// HashTreeRoot ssz hashes the DataColumnSidecarGloas object +func (d *DataColumnSidecarGloas) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(d) +} + +// HashTreeRootWith ssz hashes the DataColumnSidecarGloas object with a hasher +func (d *DataColumnSidecarGloas) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'Index' + hh.PutUint64(d.Index) + + // Field (1) 'Column' + { + if size := len(d.Column); size > 4096 { + err = ssz.ErrListTooBigFn("--.Column", size, 4096) + return + } + subIndx := hh.Index() + for _, i := range d.Column { + if len(i) != 2048 { + err = ssz.ErrBytesLength + return + } + hh.PutBytes(i) + } + + numItems := uint64(len(d.Column)) + hh.MerkleizeWithMixin(subIndx, numItems, 4096) + } + + // Field (2) 'KzgCommitments' + { + if size := len(d.KzgCommitments); size > 4096 { + err = ssz.ErrListTooBigFn("--.KzgCommitments", size, 4096) + return + } + subIndx := hh.Index() + for _, i := range d.KzgCommitments { + if len(i) != 48 { + err = ssz.ErrBytesLength + return + } + hh.PutBytes(i) + } + + numItems := uint64(len(d.KzgCommitments)) + hh.MerkleizeWithMixin(subIndx, numItems, 4096) + } + + // Field (3) 'KzgProofs' + { + if size := len(d.KzgProofs); size > 4096 { + err = ssz.ErrListTooBigFn("--.KzgProofs", size, 4096) + return + } + subIndx := hh.Index() + for _, i := range d.KzgProofs { + if len(i) != 48 { + err = ssz.ErrBytesLength + return + } + hh.PutBytes(i) + } + + numItems := uint64(len(d.KzgProofs)) + hh.MerkleizeWithMixin(subIndx, numItems, 4096) + } + + // Field (4) 'Slot' + hh.PutUint64(uint64(d.Slot)) + + // Field (5) 'BeaconBlockRoot' + if size := len(d.BeaconBlockRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.BeaconBlockRoot", size, 32) + return + } + hh.PutBytes(d.BeaconBlockRoot) + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the ExecutionPayloadEnvelope object +func (e *ExecutionPayloadEnvelope) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(e) +} + +// MarshalSSZTo ssz marshals the ExecutionPayloadEnvelope object to a target array +func (e *ExecutionPayloadEnvelope) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + offset := int(92) + + // Offset (0) 'Payload' + dst = ssz.WriteOffset(dst, offset) + if e.Payload == nil { + e.Payload = new(v1.ExecutionPayloadDeneb) + } + offset += e.Payload.SizeSSZ() + + // Offset (1) 'ExecutionRequests' + dst = ssz.WriteOffset(dst, offset) + if e.ExecutionRequests == nil { + e.ExecutionRequests = new(v1.ExecutionRequests) + } + offset += e.ExecutionRequests.SizeSSZ() + + // Field (2) 'BuilderIndex' + dst = ssz.MarshalUint64(dst, uint64(e.BuilderIndex)) + + // Field (3) 'BeaconBlockRoot' + if size := len(e.BeaconBlockRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.BeaconBlockRoot", size, 32) + return + } + dst = append(dst, e.BeaconBlockRoot...) + + // Field (4) 'Slot' + dst = ssz.MarshalUint64(dst, uint64(e.Slot)) + + // Offset (5) 'BlobKzgCommitments' + dst = ssz.WriteOffset(dst, offset) + offset += len(e.BlobKzgCommitments) * 48 + + // Field (6) 'StateRoot' + if size := len(e.StateRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.StateRoot", size, 32) + return + } + dst = append(dst, e.StateRoot...) + + // Field (0) 'Payload' + if dst, err = e.Payload.MarshalSSZTo(dst); err != nil { + return + } + + // Field (1) 'ExecutionRequests' + if dst, err = e.ExecutionRequests.MarshalSSZTo(dst); err != nil { + return + } + + // Field (5) 'BlobKzgCommitments' + if size := len(e.BlobKzgCommitments); size > 4096 { + err = ssz.ErrListTooBigFn("--.BlobKzgCommitments", size, 4096) + return + } + for ii := 0; ii < len(e.BlobKzgCommitments); ii++ { + if size := len(e.BlobKzgCommitments[ii]); size != 48 { + err = ssz.ErrBytesLengthFn("--.BlobKzgCommitments[ii]", size, 48) + return + } + dst = append(dst, e.BlobKzgCommitments[ii]...) + } + + return +} + +// UnmarshalSSZ ssz unmarshals the ExecutionPayloadEnvelope object +func (e *ExecutionPayloadEnvelope) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size < 92 { + return ssz.ErrSize + } + + tail := buf + var o0, o1, o5 uint64 + + // Offset (0) 'Payload' + if o0 = ssz.ReadOffset(buf[0:4]); o0 > size { + return ssz.ErrOffset + } + + if o0 != 92 { + return ssz.ErrInvalidVariableOffset + } + + // Offset (1) 'ExecutionRequests' + if o1 = ssz.ReadOffset(buf[4:8]); o1 > size || o0 > o1 { + return ssz.ErrOffset + } + + // Field (2) 'BuilderIndex' + e.BuilderIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + + // Field (3) 'BeaconBlockRoot' + if cap(e.BeaconBlockRoot) == 0 { + e.BeaconBlockRoot = make([]byte, 0, len(buf[16:48])) + } + e.BeaconBlockRoot = append(e.BeaconBlockRoot, buf[16:48]...) + + // Field (4) 'Slot' + e.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[48:56])) + + // Offset (5) 'BlobKzgCommitments' + if o5 = ssz.ReadOffset(buf[56:60]); o5 > size || o1 > o5 { + return ssz.ErrOffset + } + + // Field (6) 'StateRoot' + if cap(e.StateRoot) == 0 { + e.StateRoot = make([]byte, 0, len(buf[60:92])) + } + e.StateRoot = append(e.StateRoot, buf[60:92]...) + + // Field (0) 'Payload' + { + buf = tail[o0:o1] + if e.Payload == nil { + e.Payload = new(v1.ExecutionPayloadDeneb) + } + if err = e.Payload.UnmarshalSSZ(buf); err != nil { + return err + } + } + + // Field (1) 'ExecutionRequests' + { + buf = tail[o1:o5] + if e.ExecutionRequests == nil { + e.ExecutionRequests = new(v1.ExecutionRequests) + } + if err = e.ExecutionRequests.UnmarshalSSZ(buf); err != nil { + return err + } + } + + // Field (5) 'BlobKzgCommitments' + { + buf = tail[o5:] + num, err := ssz.DivideInt2(len(buf), 48, 4096) + if err != nil { + return err + } + e.BlobKzgCommitments = make([][]byte, num) + for ii := 0; ii < num; ii++ { + if cap(e.BlobKzgCommitments[ii]) == 0 { + e.BlobKzgCommitments[ii] = make([]byte, 0, len(buf[ii*48:(ii+1)*48])) + } + e.BlobKzgCommitments[ii] = append(e.BlobKzgCommitments[ii], buf[ii*48:(ii+1)*48]...) + } + } + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the ExecutionPayloadEnvelope object +func (e *ExecutionPayloadEnvelope) SizeSSZ() (size int) { + size = 92 + + // Field (0) 'Payload' + if e.Payload == nil { + e.Payload = new(v1.ExecutionPayloadDeneb) + } + size += e.Payload.SizeSSZ() + + // Field (1) 'ExecutionRequests' + if e.ExecutionRequests == nil { + e.ExecutionRequests = new(v1.ExecutionRequests) + } + size += e.ExecutionRequests.SizeSSZ() + + // Field (5) 'BlobKzgCommitments' + size += len(e.BlobKzgCommitments) * 48 + + return +} + +// HashTreeRoot ssz hashes the ExecutionPayloadEnvelope object +func (e *ExecutionPayloadEnvelope) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(e) +} + +// HashTreeRootWith ssz hashes the ExecutionPayloadEnvelope object with a hasher +func (e *ExecutionPayloadEnvelope) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'Payload' + if err = e.Payload.HashTreeRootWith(hh); err != nil { + return + } + + // Field (1) 'ExecutionRequests' + if err = e.ExecutionRequests.HashTreeRootWith(hh); err != nil { + return + } + + // Field (2) 'BuilderIndex' + hh.PutUint64(uint64(e.BuilderIndex)) + + // Field (3) 'BeaconBlockRoot' + if size := len(e.BeaconBlockRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.BeaconBlockRoot", size, 32) + return + } + hh.PutBytes(e.BeaconBlockRoot) + + // Field (4) 'Slot' + hh.PutUint64(uint64(e.Slot)) + + // Field (5) 'BlobKzgCommitments' + { + if size := len(e.BlobKzgCommitments); size > 4096 { + err = ssz.ErrListTooBigFn("--.BlobKzgCommitments", size, 4096) + return + } + subIndx := hh.Index() + for _, i := range e.BlobKzgCommitments { + if len(i) != 48 { + err = ssz.ErrBytesLength + return + } + hh.PutBytes(i) + } + + numItems := uint64(len(e.BlobKzgCommitments)) + hh.MerkleizeWithMixin(subIndx, numItems, 4096) + } + + // Field (6) 'StateRoot' + if size := len(e.StateRoot); size != 32 { + err = ssz.ErrBytesLengthFn("--.StateRoot", size, 32) + return + } + hh.PutBytes(e.StateRoot) + + hh.Merkleize(indx) + return +} + +// MarshalSSZ ssz marshals the SignedExecutionPayloadEnvelope object +func (s *SignedExecutionPayloadEnvelope) MarshalSSZ() ([]byte, error) { + return ssz.MarshalSSZ(s) +} + +// MarshalSSZTo ssz marshals the SignedExecutionPayloadEnvelope object to a target array +func (s *SignedExecutionPayloadEnvelope) MarshalSSZTo(buf []byte) (dst []byte, err error) { + dst = buf + offset := int(100) + + // Offset (0) 'Message' + dst = ssz.WriteOffset(dst, offset) + if s.Message == nil { + s.Message = new(ExecutionPayloadEnvelope) + } + offset += s.Message.SizeSSZ() + + // Field (1) 'Signature' + if size := len(s.Signature); size != 96 { + err = ssz.ErrBytesLengthFn("--.Signature", size, 96) + return + } + dst = append(dst, s.Signature...) + + // Field (0) 'Message' + if dst, err = s.Message.MarshalSSZTo(dst); err != nil { + return + } + + return +} + +// UnmarshalSSZ ssz unmarshals the SignedExecutionPayloadEnvelope object +func (s *SignedExecutionPayloadEnvelope) UnmarshalSSZ(buf []byte) error { + var err error + size := uint64(len(buf)) + if size < 100 { + return ssz.ErrSize + } + + tail := buf + var o0 uint64 + + // Offset (0) 'Message' + if o0 = ssz.ReadOffset(buf[0:4]); o0 > size { + return ssz.ErrOffset + } + + if o0 != 100 { + return ssz.ErrInvalidVariableOffset + } + + // Field (1) 'Signature' + if cap(s.Signature) == 0 { + s.Signature = make([]byte, 0, len(buf[4:100])) + } + s.Signature = append(s.Signature, buf[4:100]...) + + // Field (0) 'Message' + { + buf = tail[o0:] + if s.Message == nil { + s.Message = new(ExecutionPayloadEnvelope) + } + if err = s.Message.UnmarshalSSZ(buf); err != nil { + return err + } + } + return err +} + +// SizeSSZ returns the ssz encoded size in bytes for the SignedExecutionPayloadEnvelope object +func (s *SignedExecutionPayloadEnvelope) SizeSSZ() (size int) { + size = 100 + + // Field (0) 'Message' + if s.Message == nil { + s.Message = new(ExecutionPayloadEnvelope) + } + size += s.Message.SizeSSZ() + + return +} + +// HashTreeRoot ssz hashes the SignedExecutionPayloadEnvelope object +func (s *SignedExecutionPayloadEnvelope) HashTreeRoot() ([32]byte, error) { + return ssz.HashWithDefaultHasher(s) +} + +// HashTreeRootWith ssz hashes the SignedExecutionPayloadEnvelope object with a hasher +func (s *SignedExecutionPayloadEnvelope) HashTreeRootWith(hh *ssz.Hasher) (err error) { + indx := hh.Index() + + // Field (0) 'Message' + if err = s.Message.HashTreeRootWith(hh); err != nil { + return + } + + // Field (1) 'Signature' + if size := len(s.Signature); size != 96 { + err = ssz.ErrBytesLengthFn("--.Signature", size, 96) + return + } + hh.PutBytes(s.Signature) + + hh.Merkleize(indx) + return +} diff --git a/proto/prysm/v1alpha1/p2p_messages.pb.go b/proto/prysm/v1alpha1/p2p_messages.pb.go index 82f7a3ce95..32ad7c62ec 100755 --- a/proto/prysm/v1alpha1/p2p_messages.pb.go +++ b/proto/prysm/v1alpha1/p2p_messages.pb.go @@ -10,7 +10,7 @@ import ( reflect "reflect" sync "sync" - github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" + github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" @@ -306,9 +306,9 @@ func (x *ENRForkID) GetNextForkEpoch() github_com_OffchainLabs_prysm_v6_consensu } type MetaDataV0 struct { - state protoimpl.MessageState `protogen:"open.v1"` - SeqNumber uint64 `protobuf:"varint,1,opt,name=seq_number,json=seqNumber,proto3" json:"seq_number,omitempty"` - Attnets github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,2,opt,name=attnets,proto3" json:"attnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` + state protoimpl.MessageState `protogen:"open.v1"` + SeqNumber uint64 `protobuf:"varint,1,opt,name=seq_number,json=seqNumber,proto3" json:"seq_number,omitempty"` + Attnets github_com_OffchainLabs_go_bitfield.Bitvector64 `protobuf:"bytes,2,opt,name=attnets,proto3" json:"attnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -350,18 +350,18 @@ func (x *MetaDataV0) GetSeqNumber() uint64 { return 0 } -func (x *MetaDataV0) GetAttnets() github_com_prysmaticlabs_go_bitfield.Bitvector64 { +func (x *MetaDataV0) GetAttnets() github_com_OffchainLabs_go_bitfield.Bitvector64 { if x != nil { return x.Attnets } - return github_com_prysmaticlabs_go_bitfield.Bitvector64(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector64(nil) } type MetaDataV1 struct { - state protoimpl.MessageState `protogen:"open.v1"` - SeqNumber uint64 `protobuf:"varint,1,opt,name=seq_number,json=seqNumber,proto3" json:"seq_number,omitempty"` - Attnets github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,2,opt,name=attnets,proto3" json:"attnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` - Syncnets github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,3,opt,name=syncnets,proto3" json:"syncnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` + state protoimpl.MessageState `protogen:"open.v1"` + SeqNumber uint64 `protobuf:"varint,1,opt,name=seq_number,json=seqNumber,proto3" json:"seq_number,omitempty"` + Attnets github_com_OffchainLabs_go_bitfield.Bitvector64 `protobuf:"bytes,2,opt,name=attnets,proto3" json:"attnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` + Syncnets github_com_OffchainLabs_go_bitfield.Bitvector4 `protobuf:"bytes,3,opt,name=syncnets,proto3" json:"syncnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -403,26 +403,26 @@ func (x *MetaDataV1) GetSeqNumber() uint64 { return 0 } -func (x *MetaDataV1) GetAttnets() github_com_prysmaticlabs_go_bitfield.Bitvector64 { +func (x *MetaDataV1) GetAttnets() github_com_OffchainLabs_go_bitfield.Bitvector64 { if x != nil { return x.Attnets } - return github_com_prysmaticlabs_go_bitfield.Bitvector64(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector64(nil) } -func (x *MetaDataV1) GetSyncnets() github_com_prysmaticlabs_go_bitfield.Bitvector4 { +func (x *MetaDataV1) GetSyncnets() github_com_OffchainLabs_go_bitfield.Bitvector4 { if x != nil { return x.Syncnets } - return github_com_prysmaticlabs_go_bitfield.Bitvector4(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector4(nil) } type MetaDataV2 struct { - state protoimpl.MessageState `protogen:"open.v1"` - SeqNumber uint64 `protobuf:"varint,1,opt,name=seq_number,json=seqNumber,proto3" json:"seq_number,omitempty"` - Attnets github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,2,opt,name=attnets,proto3" json:"attnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` - Syncnets github_com_prysmaticlabs_go_bitfield.Bitvector4 `protobuf:"bytes,3,opt,name=syncnets,proto3" json:"syncnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` - CustodyGroupCount uint64 `protobuf:"varint,4,opt,name=custody_group_count,json=custodyGroupCount,proto3" json:"custody_group_count,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + SeqNumber uint64 `protobuf:"varint,1,opt,name=seq_number,json=seqNumber,proto3" json:"seq_number,omitempty"` + Attnets github_com_OffchainLabs_go_bitfield.Bitvector64 `protobuf:"bytes,2,opt,name=attnets,proto3" json:"attnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` + Syncnets github_com_OffchainLabs_go_bitfield.Bitvector4 `protobuf:"bytes,3,opt,name=syncnets,proto3" json:"syncnets,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector4" ssz-size:"1"` + CustodyGroupCount uint64 `protobuf:"varint,4,opt,name=custody_group_count,json=custodyGroupCount,proto3" json:"custody_group_count,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -464,18 +464,18 @@ func (x *MetaDataV2) GetSeqNumber() uint64 { return 0 } -func (x *MetaDataV2) GetAttnets() github_com_prysmaticlabs_go_bitfield.Bitvector64 { +func (x *MetaDataV2) GetAttnets() github_com_OffchainLabs_go_bitfield.Bitvector64 { if x != nil { return x.Attnets } - return github_com_prysmaticlabs_go_bitfield.Bitvector64(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector64(nil) } -func (x *MetaDataV2) GetSyncnets() github_com_prysmaticlabs_go_bitfield.Bitvector4 { +func (x *MetaDataV2) GetSyncnets() github_com_OffchainLabs_go_bitfield.Bitvector4 { if x != nil { return x.Syncnets } - return github_com_prysmaticlabs_go_bitfield.Bitvector4(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector4(nil) } func (x *MetaDataV2) GetCustodyGroupCount() uint64 { @@ -736,83 +736,83 @@ var file_proto_prysm_v1alpha1_p2p_messages_proto_rawDesc = []byte{ 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x46, - 0x6f, 0x72, 0x6b, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x22, 0x80, 0x01, 0x0a, 0x0a, 0x4d, 0x65, 0x74, - 0x61, 0x44, 0x61, 0x74, 0x61, 0x56, 0x30, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x71, 0x5f, 0x6e, - 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x73, 0x65, 0x71, - 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x53, 0x0a, 0x07, 0x61, 0x74, 0x74, 0x6e, 0x65, 0x74, - 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x39, 0x82, 0xb5, 0x18, 0x30, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, - 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, + 0x6f, 0x72, 0x6b, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x22, 0x7f, 0x0a, 0x0a, 0x4d, 0x65, 0x74, 0x61, + 0x44, 0x61, 0x74, 0x61, 0x56, 0x30, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x71, 0x5f, 0x6e, 0x75, + 0x6d, 0x62, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x73, 0x65, 0x71, 0x4e, + 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x52, 0x0a, 0x07, 0x61, 0x74, 0x74, 0x6e, 0x65, 0x74, 0x73, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2f, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, + 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, + 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x38, + 0x52, 0x07, 0x61, 0x74, 0x74, 0x6e, 0x65, 0x74, 0x73, 0x22, 0xd4, 0x01, 0x0a, 0x0a, 0x4d, 0x65, + 0x74, 0x61, 0x44, 0x61, 0x74, 0x61, 0x56, 0x31, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x71, 0x5f, + 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x73, 0x65, + 0x71, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x52, 0x0a, 0x07, 0x61, 0x74, 0x74, 0x6e, 0x65, + 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2f, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x8a, 0xb5, 0x18, - 0x01, 0x38, 0x52, 0x07, 0x61, 0x74, 0x74, 0x6e, 0x65, 0x74, 0x73, 0x22, 0xd6, 0x01, 0x0a, 0x0a, - 0x4d, 0x65, 0x74, 0x61, 0x44, 0x61, 0x74, 0x61, 0x56, 0x31, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, - 0x71, 0x5f, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, - 0x73, 0x65, 0x71, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x53, 0x0a, 0x07, 0x61, 0x74, 0x74, - 0x6e, 0x65, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x39, 0x82, 0xb5, 0x18, 0x30, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, - 0x8a, 0xb5, 0x18, 0x01, 0x38, 0x52, 0x07, 0x61, 0x74, 0x74, 0x6e, 0x65, 0x74, 0x73, 0x12, 0x54, - 0x0a, 0x08, 0x73, 0x79, 0x6e, 0x63, 0x6e, 0x65, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, - 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, - 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x08, 0x73, 0x79, 0x6e, 0x63, - 0x6e, 0x65, 0x74, 0x73, 0x22, 0x86, 0x02, 0x0a, 0x0a, 0x4d, 0x65, 0x74, 0x61, 0x44, 0x61, 0x74, - 0x61, 0x56, 0x32, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x71, 0x5f, 0x6e, 0x75, 0x6d, 0x62, 0x65, - 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x73, 0x65, 0x71, 0x4e, 0x75, 0x6d, 0x62, - 0x65, 0x72, 0x12, 0x53, 0x0a, 0x07, 0x61, 0x74, 0x74, 0x6e, 0x65, 0x74, 0x73, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0c, 0x42, 0x39, 0x82, 0xb5, 0x18, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, + 0x01, 0x38, 0x52, 0x07, 0x61, 0x74, 0x74, 0x6e, 0x65, 0x74, 0x73, 0x12, 0x53, 0x0a, 0x08, 0x73, + 0x79, 0x6e, 0x63, 0x6e, 0x65, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x37, 0x82, + 0xb5, 0x18, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, + 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, + 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x08, 0x73, 0x79, 0x6e, 0x63, 0x6e, 0x65, 0x74, 0x73, + 0x22, 0x84, 0x02, 0x0a, 0x0a, 0x4d, 0x65, 0x74, 0x61, 0x44, 0x61, 0x74, 0x61, 0x56, 0x32, 0x12, + 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x71, 0x5f, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x04, 0x52, 0x09, 0x73, 0x65, 0x71, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x52, + 0x0a, 0x07, 0x61, 0x74, 0x74, 0x6e, 0x65, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x38, 0x82, 0xb5, 0x18, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, + 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x36, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x38, 0x52, 0x07, 0x61, 0x74, 0x74, 0x6e, 0x65, + 0x74, 0x73, 0x12, 0x53, 0x0a, 0x08, 0x73, 0x79, 0x6e, 0x63, 0x6e, 0x65, 0x74, 0x73, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x0c, 0x42, 0x37, 0x82, 0xb5, 0x18, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, - 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x38, 0x52, 0x07, - 0x61, 0x74, 0x74, 0x6e, 0x65, 0x74, 0x73, 0x12, 0x54, 0x0a, 0x08, 0x73, 0x79, 0x6e, 0x63, 0x6e, - 0x65, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2f, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, - 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x34, 0x8a, 0xb5, - 0x18, 0x01, 0x31, 0x52, 0x08, 0x73, 0x79, 0x6e, 0x63, 0x6e, 0x65, 0x74, 0x73, 0x12, 0x2e, 0x0a, - 0x13, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x64, 0x79, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x63, - 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x11, 0x63, 0x75, 0x73, 0x74, - 0x6f, 0x64, 0x79, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x97, 0x01, - 0x0a, 0x1a, 0x42, 0x6c, 0x6f, 0x62, 0x53, 0x69, 0x64, 0x65, 0x63, 0x61, 0x72, 0x73, 0x42, 0x79, - 0x52, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x63, 0x0a, 0x0a, - 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, - 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, - 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, - 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x53, 0x6c, 0x6f, - 0x74, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, - 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xc0, 0x01, 0x0a, 0x20, 0x44, 0x61, 0x74, 0x61, - 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x53, 0x69, 0x64, 0x65, 0x63, 0x61, 0x72, 0x73, 0x42, 0x79, - 0x52, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x63, 0x0a, 0x0a, - 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, - 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, - 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, - 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x53, 0x6c, 0x6f, - 0x74, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, - 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x21, 0x0a, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, - 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x04, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, - 0x38, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x22, 0x5b, 0x0a, 0x20, 0x4c, 0x69, - 0x67, 0x68, 0x74, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x73, - 0x42, 0x79, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x21, - 0x0a, 0x0c, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x50, 0x65, 0x72, 0x69, 0x6f, - 0x64, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, - 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x9a, 0x01, 0x0a, 0x19, 0x6f, 0x72, 0x67, 0x2e, - 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x10, 0x50, 0x32, 0x50, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, - 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, - 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x31, 0x52, 0x08, 0x73, + 0x79, 0x6e, 0x63, 0x6e, 0x65, 0x74, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x63, 0x75, 0x73, 0x74, 0x6f, + 0x64, 0x79, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x04, 0x52, 0x11, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x64, 0x79, 0x47, 0x72, 0x6f, + 0x75, 0x70, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x97, 0x01, 0x0a, 0x1a, 0x42, 0x6c, 0x6f, 0x62, + 0x53, 0x69, 0x64, 0x65, 0x63, 0x61, 0x72, 0x73, 0x42, 0x79, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x63, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, + 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, + 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, + 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x63, + 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, + 0x74, 0x22, 0xc0, 0x01, 0x0a, 0x20, 0x44, 0x61, 0x74, 0x61, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, + 0x53, 0x69, 0x64, 0x65, 0x63, 0x61, 0x72, 0x73, 0x42, 0x79, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x63, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, + 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, + 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, + 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x63, + 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, + 0x74, 0x12, 0x21, 0x0a, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, + 0x28, 0x04, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x32, 0x38, 0x52, 0x07, 0x63, 0x6f, 0x6c, + 0x75, 0x6d, 0x6e, 0x73, 0x22, 0x5b, 0x0a, 0x20, 0x4c, 0x69, 0x67, 0x68, 0x74, 0x43, 0x6c, 0x69, + 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x73, 0x42, 0x79, 0x52, 0x61, 0x6e, 0x67, + 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x5f, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, + 0x73, 0x74, 0x61, 0x72, 0x74, 0x50, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x63, + 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, + 0x74, 0x42, 0x9a, 0x01, 0x0a, 0x19, 0x6f, 0x72, 0x67, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, + 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, + 0x10, 0x50, 0x32, 0x50, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x50, 0x72, 0x6f, 0x74, + 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, + 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, + 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, + 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, + 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/proto/prysm/v1alpha1/sync_committee.pb.go b/proto/prysm/v1alpha1/sync_committee.pb.go index 84628f75b6..5b4ba42a35 100755 --- a/proto/prysm/v1alpha1/sync_committee.pb.go +++ b/proto/prysm/v1alpha1/sync_committee.pb.go @@ -10,7 +10,7 @@ import ( reflect "reflect" sync "sync" - github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" + github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" @@ -210,7 +210,7 @@ type SyncCommitteeContribution struct { Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` BlockRoot []byte `protobuf:"bytes,2,opt,name=block_root,json=blockRoot,proto3" json:"block_root,omitempty" ssz-size:"32"` SubcommitteeIndex uint64 `protobuf:"varint,3,opt,name=subcommittee_index,json=subcommitteeIndex,proto3" json:"subcommittee_index,omitempty"` - AggregationBits github_com_prysmaticlabs_go_bitfield.Bitvector128 `protobuf:"bytes,4,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector128" ssz-size:"16"` + AggregationBits github_com_OffchainLabs_go_bitfield.Bitvector128 `protobuf:"bytes,4,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector128" ssz-size:"16"` Signature []byte `protobuf:"bytes,5,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -267,11 +267,11 @@ func (x *SyncCommitteeContribution) GetSubcommitteeIndex() uint64 { return 0 } -func (x *SyncCommitteeContribution) GetAggregationBits() github_com_prysmaticlabs_go_bitfield.Bitvector128 { +func (x *SyncCommitteeContribution) GetAggregationBits() github_com_OffchainLabs_go_bitfield.Bitvector128 { if x != nil { return x.AggregationBits } - return github_com_prysmaticlabs_go_bitfield.Bitvector128(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector128(nil) } func (x *SyncCommitteeContribution) GetSignature() []byte { @@ -336,7 +336,7 @@ var file_proto_prysm_v1alpha1_sync_committee_proto_rawDesc = []byte{ 0x6f, 0x6e, 0x12, 0x2f, 0x0a, 0x0f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x0e, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, - 0x6f, 0x6f, 0x66, 0x22, 0xd9, 0x02, 0x0a, 0x19, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, + 0x6f, 0x6f, 0x66, 0x22, 0xd8, 0x02, 0x0a, 0x19, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, @@ -349,25 +349,25 @@ var file_proto_prysm_v1alpha1_sync_committee_proto_rawDesc = []byte{ 0x6f, 0x74, 0x12, 0x2d, 0x0a, 0x12, 0x73, 0x75, 0x62, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x11, 0x73, 0x75, 0x62, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, - 0x78, 0x12, 0x66, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x3b, 0x82, 0xb5, 0x18, - 0x31, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x31, - 0x32, 0x38, 0x8a, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, - 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x42, - 0x96, 0x01, 0x0a, 0x19, 0x6f, 0x72, 0x67, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, - 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x12, 0x53, - 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x50, 0x72, 0x6f, 0x74, - 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, - 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, - 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x78, 0x12, 0x65, 0x0a, 0x10, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x3a, 0x82, 0xb5, 0x18, + 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, + 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x31, 0x32, + 0x38, 0x8a, 0xb5, 0x18, 0x02, 0x31, 0x36, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x69, 0x74, 0x73, 0x12, 0x24, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, + 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, + 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x42, 0x96, + 0x01, 0x0a, 0x19, 0x6f, 0x72, 0x67, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, + 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x12, 0x53, 0x79, + 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, + 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, + 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, + 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, + 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x0f, + 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, + 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, + 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/proto/ssz_proto_library.bzl b/proto/ssz_proto_library.bzl index 1b677a977c..44d1fa9372 100644 --- a/proto/ssz_proto_library.bzl +++ b/proto/ssz_proto_library.bzl @@ -43,6 +43,11 @@ mainnet = { "cells_per_blob.size": "128", "kzg_commitments_inclusion_proof_depth.size": "4", "proposer_lookahead_size": "64", # (MIN_SEED_LOOKAHEAD + 1) * SLOTS_PER_EPOCH + "ptc.size": "64", # Gloas: Payload Timeliness Committee aggregation bits (PTC_SIZE = 512) + "ptc.type": "github.com/OffchainLabs/go-bitfield.Bitvector512", + "payload_attestation.size": "4", # Gloas: MAX_PAYLOAD_ATTESTATIONS defined in block body + "execution_payload_availability.size": "1024", # Gloas: SLOTS_PER_HISTORICAL_ROOT + "builder_pending_payments.size": "64", # Gloas: vector length (2 * SLOTS_PER_EPOCH) } minimal = { @@ -82,6 +87,11 @@ minimal = { "cells_per_blob.size": "128", "kzg_commitments_inclusion_proof_depth.size": "4", "proposer_lookahead_size": "16", # (MIN_SEED_LOOKAHEAD + 1) * SLOTS_PER_EPOCH + "ptc.size": "1", # Gloas: Payload Timeliness Committee aggregation bits + "ptc.type": "github.com/OffchainLabs/go-bitfield.Bitvector2", + "payload_attestation.size": "4", # Gloas: MAX_PAYLOAD_ATTESTATIONS defined in block body + "execution_payload_availability.size": "8", # Gloas: SLOTS_PER_HISTORICAL_ROOT + "builder_pending_payments.size": "16" # Gloas: vector length (2 * SLOTS_PER_EPOCH) } ###### Rules definitions ####### diff --git a/proto/ssz_query/testing/test_containers.pb.go b/proto/ssz_query/testing/test_containers.pb.go index 141885b2ce..b1af1311c1 100755 --- a/proto/ssz_query/testing/test_containers.pb.go +++ b/proto/ssz_query/testing/test_containers.pb.go @@ -10,7 +10,7 @@ import ( reflect "reflect" sync "sync" - github_com_prysmaticlabs_go_bitfield "github.com/OffchainLabs/go-bitfield" + github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" @@ -76,17 +76,17 @@ func (x *FixedNestedContainer) GetValue2() []byte { } type FixedTestContainer struct { - state protoimpl.MessageState `protogen:"open.v1"` - FieldUint32 uint32 `protobuf:"varint,1,opt,name=field_uint32,json=fieldUint32,proto3" json:"field_uint32,omitempty"` - FieldUint64 uint64 `protobuf:"varint,2,opt,name=field_uint64,json=fieldUint64,proto3" json:"field_uint64,omitempty"` - FieldBool bool `protobuf:"varint,3,opt,name=field_bool,json=fieldBool,proto3" json:"field_bool,omitempty"` - FieldBytes32 []byte `protobuf:"bytes,4,opt,name=field_bytes32,json=fieldBytes32,proto3" json:"field_bytes32,omitempty" ssz-size:"32"` - Nested *FixedNestedContainer `protobuf:"bytes,5,opt,name=nested,proto3" json:"nested,omitempty"` - VectorField []uint64 `protobuf:"varint,6,rep,packed,name=vector_field,json=vectorField,proto3" json:"vector_field,omitempty" ssz-size:"24"` - TwoDimensionBytesField [][]byte `protobuf:"bytes,7,rep,name=two_dimension_bytes_field,json=twoDimensionBytesField,proto3" json:"two_dimension_bytes_field,omitempty" ssz-size:"5,32"` - Bitvector64Field github_com_prysmaticlabs_go_bitfield.Bitvector64 `protobuf:"bytes,8,opt,name=bitvector64_field,json=bitvector64Field,proto3" json:"bitvector64_field,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` - Bitvector512Field github_com_prysmaticlabs_go_bitfield.Bitvector512 `protobuf:"bytes,9,opt,name=bitvector512_field,json=bitvector512Field,proto3" json:"bitvector512_field,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector512" ssz-size:"64"` - TrailingField []byte `protobuf:"bytes,10,opt,name=trailing_field,json=trailingField,proto3" json:"trailing_field,omitempty" ssz-size:"56"` + state protoimpl.MessageState `protogen:"open.v1"` + FieldUint32 uint32 `protobuf:"varint,1,opt,name=field_uint32,json=fieldUint32,proto3" json:"field_uint32,omitempty"` + FieldUint64 uint64 `protobuf:"varint,2,opt,name=field_uint64,json=fieldUint64,proto3" json:"field_uint64,omitempty"` + FieldBool bool `protobuf:"varint,3,opt,name=field_bool,json=fieldBool,proto3" json:"field_bool,omitempty"` + FieldBytes32 []byte `protobuf:"bytes,4,opt,name=field_bytes32,json=fieldBytes32,proto3" json:"field_bytes32,omitempty" ssz-size:"32"` + Nested *FixedNestedContainer `protobuf:"bytes,5,opt,name=nested,proto3" json:"nested,omitempty"` + VectorField []uint64 `protobuf:"varint,6,rep,packed,name=vector_field,json=vectorField,proto3" json:"vector_field,omitempty" ssz-size:"24"` + TwoDimensionBytesField [][]byte `protobuf:"bytes,7,rep,name=two_dimension_bytes_field,json=twoDimensionBytesField,proto3" json:"two_dimension_bytes_field,omitempty" ssz-size:"5,32"` + Bitvector64Field github_com_OffchainLabs_go_bitfield.Bitvector64 `protobuf:"bytes,8,opt,name=bitvector64_field,json=bitvector64Field,proto3" json:"bitvector64_field,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector64" ssz-size:"8"` + Bitvector512Field github_com_OffchainLabs_go_bitfield.Bitvector512 `protobuf:"bytes,9,opt,name=bitvector512_field,json=bitvector512Field,proto3" json:"bitvector512_field,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector512" ssz-size:"64"` + TrailingField []byte `protobuf:"bytes,10,opt,name=trailing_field,json=trailingField,proto3" json:"trailing_field,omitempty" ssz-size:"56"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -170,18 +170,18 @@ func (x *FixedTestContainer) GetTwoDimensionBytesField() [][]byte { return nil } -func (x *FixedTestContainer) GetBitvector64Field() github_com_prysmaticlabs_go_bitfield.Bitvector64 { +func (x *FixedTestContainer) GetBitvector64Field() github_com_OffchainLabs_go_bitfield.Bitvector64 { if x != nil { return x.Bitvector64Field } - return github_com_prysmaticlabs_go_bitfield.Bitvector64(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector64(nil) } -func (x *FixedTestContainer) GetBitvector512Field() github_com_prysmaticlabs_go_bitfield.Bitvector512 { +func (x *FixedTestContainer) GetBitvector512Field() github_com_OffchainLabs_go_bitfield.Bitvector512 { if x != nil { return x.Bitvector512Field } - return github_com_prysmaticlabs_go_bitfield.Bitvector512(nil) + return github_com_OffchainLabs_go_bitfield.Bitvector512(nil) } func (x *FixedTestContainer) GetTrailingField() []byte { @@ -304,16 +304,16 @@ func (x *VariableOuterContainer) GetInner_2() *VariableNestedContainer { } type VariableTestContainer struct { - state protoimpl.MessageState `protogen:"open.v1"` - LeadingField []byte `protobuf:"bytes,1,opt,name=leading_field,json=leadingField,proto3" json:"leading_field,omitempty" ssz-size:"32"` - FieldListUint64 []uint64 `protobuf:"varint,2,rep,packed,name=field_list_uint64,json=fieldListUint64,proto3" json:"field_list_uint64,omitempty" ssz-max:"2048"` - FieldListContainer []*FixedNestedContainer `protobuf:"bytes,3,rep,name=field_list_container,json=fieldListContainer,proto3" json:"field_list_container,omitempty" ssz-max:"128"` - FieldListBytes32 [][]byte `protobuf:"bytes,4,rep,name=field_list_bytes32,json=fieldListBytes32,proto3" json:"field_list_bytes32,omitempty" ssz-max:"100" ssz-size:"?,32"` - Nested *VariableNestedContainer `protobuf:"bytes,5,opt,name=nested,proto3" json:"nested,omitempty"` - VariableContainerList []*VariableOuterContainer `protobuf:"bytes,6,rep,name=variable_container_list,json=variableContainerList,proto3" json:"variable_container_list,omitempty" ssz-max:"10"` - BitlistField github_com_prysmaticlabs_go_bitfield.Bitlist `protobuf:"bytes,7,opt,name=bitlist_field,json=bitlistField,proto3" json:"bitlist_field,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` - NestedListField [][]byte `protobuf:"bytes,8,rep,name=nested_list_field,json=nestedListField,proto3" json:"nested_list_field,omitempty" ssz-max:"100,50" ssz-size:"?,?"` - TrailingField []byte `protobuf:"bytes,9,opt,name=trailing_field,json=trailingField,proto3" json:"trailing_field,omitempty" ssz-size:"56"` + state protoimpl.MessageState `protogen:"open.v1"` + LeadingField []byte `protobuf:"bytes,1,opt,name=leading_field,json=leadingField,proto3" json:"leading_field,omitempty" ssz-size:"32"` + FieldListUint64 []uint64 `protobuf:"varint,2,rep,packed,name=field_list_uint64,json=fieldListUint64,proto3" json:"field_list_uint64,omitempty" ssz-max:"2048"` + FieldListContainer []*FixedNestedContainer `protobuf:"bytes,3,rep,name=field_list_container,json=fieldListContainer,proto3" json:"field_list_container,omitempty" ssz-max:"128"` + FieldListBytes32 [][]byte `protobuf:"bytes,4,rep,name=field_list_bytes32,json=fieldListBytes32,proto3" json:"field_list_bytes32,omitempty" ssz-max:"100" ssz-size:"?,32"` + Nested *VariableNestedContainer `protobuf:"bytes,5,opt,name=nested,proto3" json:"nested,omitempty"` + VariableContainerList []*VariableOuterContainer `protobuf:"bytes,6,rep,name=variable_container_list,json=variableContainerList,proto3" json:"variable_container_list,omitempty" ssz-max:"10"` + BitlistField github_com_OffchainLabs_go_bitfield.Bitlist `protobuf:"bytes,7,opt,name=bitlist_field,json=bitlistField,proto3" json:"bitlist_field,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` + NestedListField [][]byte `protobuf:"bytes,8,rep,name=nested_list_field,json=nestedListField,proto3" json:"nested_list_field,omitempty" ssz-max:"100,50" ssz-size:"?,?"` + TrailingField []byte `protobuf:"bytes,9,opt,name=trailing_field,json=trailingField,proto3" json:"trailing_field,omitempty" ssz-size:"56"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -390,11 +390,11 @@ func (x *VariableTestContainer) GetVariableContainerList() []*VariableOuterConta return nil } -func (x *VariableTestContainer) GetBitlistField() github_com_prysmaticlabs_go_bitfield.Bitlist { +func (x *VariableTestContainer) GetBitlistField() github_com_OffchainLabs_go_bitfield.Bitlist { if x != nil { return x.BitlistField } - return github_com_prysmaticlabs_go_bitfield.Bitlist(nil) + return github_com_OffchainLabs_go_bitfield.Bitlist(nil) } func (x *VariableTestContainer) GetNestedListField() [][]byte { @@ -424,7 +424,7 @@ var file_proto_ssz_query_testing_test_containers_proto_rawDesc = []byte{ 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x31, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x31, 0x12, 0x1e, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x32, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x06, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x32, 0x22, 0xd0, 0x04, 0x0a, 0x12, 0x46, 0x69, 0x78, 0x65, 0x64, 0x54, + 0x61, 0x6c, 0x75, 0x65, 0x32, 0x22, 0xce, 0x04, 0x0a, 0x12, 0x46, 0x69, 0x78, 0x65, 0x64, 0x54, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x21, 0x0a, 0x0c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0b, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x55, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x12, @@ -445,87 +445,87 @@ var file_proto_ssz_query_testing_test_containers_proto_rawDesc = []byte{ 0x6e, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x08, 0x8a, 0xb5, 0x18, 0x04, 0x35, 0x2c, 0x33, 0x32, 0x52, 0x16, 0x74, 0x77, 0x6f, 0x44, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x79, 0x74, 0x65, 0x73, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x66, 0x0a, 0x11, 0x62, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x65, 0x0a, 0x11, 0x62, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0c, - 0x42, 0x39, 0x82, 0xb5, 0x18, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, - 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x38, 0x52, 0x10, 0x62, 0x69, 0x74, - 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x6a, 0x0a, - 0x12, 0x62, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x35, 0x31, 0x32, 0x5f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x3b, 0x82, 0xb5, 0x18, 0x31, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, - 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x35, 0x31, 0x32, - 0x8a, 0xb5, 0x18, 0x02, 0x36, 0x34, 0x52, 0x11, 0x62, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x35, 0x31, 0x32, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x2d, 0x0a, 0x0e, 0x74, 0x72, 0x61, - 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, - 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x35, 0x36, 0x52, 0x0d, 0x74, 0x72, 0x61, 0x69, 0x6c, - 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x22, 0xa5, 0x01, 0x0a, 0x17, 0x56, 0x61, 0x72, - 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, - 0x69, 0x6e, 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x31, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x31, 0x12, 0x33, 0x0a, 0x11, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x36, - 0x34, 0x18, 0x02, 0x20, 0x03, 0x28, 0x04, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x30, 0x30, - 0x52, 0x0f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x55, 0x69, 0x6e, 0x74, 0x36, - 0x34, 0x12, 0x3d, 0x0a, 0x11, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, - 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x11, 0x8a, 0xb5, - 0x18, 0x03, 0x3f, 0x2c, 0x3f, 0x92, 0xb5, 0x18, 0x06, 0x31, 0x30, 0x30, 0x2c, 0x35, 0x30, 0x52, - 0x0f, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x22, 0x8e, 0x01, 0x0a, 0x16, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x75, 0x74, - 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x39, 0x0a, 0x07, 0x69, - 0x6e, 0x6e, 0x65, 0x72, 0x5f, 0x31, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, - 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, - 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, - 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x31, 0x12, 0x39, 0x0a, 0x07, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x5f, - 0x32, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, - 0x67, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, - 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, 0x69, 0x6e, 0x6e, 0x65, 0x72, - 0x32, 0x22, 0xfb, 0x04, 0x0a, 0x15, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x54, 0x65, - 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x2b, 0x0a, 0x0d, 0x6c, - 0x65, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0c, 0x6c, 0x65, 0x61, 0x64, - 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x34, 0x0a, 0x11, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x18, 0x02, 0x20, - 0x03, 0x28, 0x04, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0f, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x55, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x12, 0x58, - 0x0a, 0x14, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6e, - 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, - 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x4e, 0x65, 0x73, 0x74, - 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x42, 0x07, 0x92, 0xb5, 0x18, - 0x03, 0x31, 0x32, 0x38, 0x52, 0x12, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x43, - 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x12, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x33, 0x32, 0x18, 0x04, - 0x20, 0x03, 0x28, 0x0c, 0x42, 0x0f, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, - 0x18, 0x03, 0x31, 0x30, 0x30, 0x52, 0x10, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, - 0x42, 0x79, 0x74, 0x65, 0x73, 0x33, 0x32, 0x12, 0x38, 0x0a, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, - 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, - 0x67, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, - 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, - 0x64, 0x12, 0x5f, 0x0a, 0x17, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x63, 0x6f, - 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x18, 0x06, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x56, 0x61, 0x72, - 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x75, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, - 0x6e, 0x65, 0x72, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x30, 0x52, 0x15, 0x76, 0x61, 0x72, - 0x69, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4c, 0x69, - 0x73, 0x74, 0x12, 0x5d, 0x0a, 0x0d, 0x62, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2c, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x61, - 0x74, 0x69, 0x63, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x92, 0xb5, 0x18, 0x04, 0x32, - 0x30, 0x34, 0x38, 0x52, 0x0c, 0x62, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x65, 0x6c, - 0x64, 0x12, 0x3d, 0x0a, 0x11, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, - 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x11, 0x8a, 0xb5, - 0x18, 0x03, 0x3f, 0x2c, 0x3f, 0x92, 0xb5, 0x18, 0x06, 0x31, 0x30, 0x30, 0x2c, 0x35, 0x30, 0x52, - 0x0f, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x12, 0x2d, 0x0a, 0x0e, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x35, 0x36, - 0x52, 0x0d, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x42, - 0x42, 0x5a, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, - 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, - 0x65, 0x72, 0x79, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x3b, 0x74, 0x65, 0x73, 0x74, - 0x69, 0x6e, 0x67, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x42, 0x38, 0x82, 0xb5, 0x18, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, + 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x36, 0x34, 0x8a, 0xb5, 0x18, 0x01, 0x38, 0x52, 0x10, 0x62, 0x69, 0x74, 0x76, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x36, 0x34, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x69, 0x0a, 0x12, + 0x62, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x35, 0x31, 0x32, 0x5f, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x3a, 0x82, 0xb5, 0x18, 0x30, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x2e, 0x42, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x35, 0x31, 0x32, 0x8a, 0xb5, + 0x18, 0x02, 0x36, 0x34, 0x52, 0x11, 0x62, 0x69, 0x74, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x35, + 0x31, 0x32, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x2d, 0x0a, 0x0e, 0x74, 0x72, 0x61, 0x69, 0x6c, + 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0c, 0x42, + 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x35, 0x36, 0x52, 0x0d, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, + 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x22, 0xa5, 0x01, 0x0a, 0x17, 0x56, 0x61, 0x72, 0x69, 0x61, + 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, + 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x31, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x04, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x31, 0x12, 0x33, 0x0a, 0x11, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x18, + 0x02, 0x20, 0x03, 0x28, 0x04, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, 0x30, 0x30, 0x52, 0x0f, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x55, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x12, + 0x3d, 0x0a, 0x11, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x11, 0x8a, 0xb5, 0x18, 0x03, + 0x3f, 0x2c, 0x3f, 0x92, 0xb5, 0x18, 0x06, 0x31, 0x30, 0x30, 0x2c, 0x35, 0x30, 0x52, 0x0f, 0x6e, + 0x65, 0x73, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x22, 0x8e, + 0x01, 0x0a, 0x16, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x75, 0x74, 0x65, 0x72, + 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x39, 0x0a, 0x07, 0x69, 0x6e, 0x6e, + 0x65, 0x72, 0x5f, 0x31, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, 0x65, 0x73, + 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, + 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, 0x69, 0x6e, + 0x6e, 0x65, 0x72, 0x31, 0x12, 0x39, 0x0a, 0x07, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x5f, 0x32, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, + 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x32, 0x22, + 0xfa, 0x04, 0x0a, 0x15, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x54, 0x65, 0x73, 0x74, + 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x2b, 0x0a, 0x0d, 0x6c, 0x65, 0x61, + 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, + 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, 0x52, 0x0c, 0x6c, 0x65, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x34, 0x0a, 0x11, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, + 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x75, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x18, 0x02, 0x20, 0x03, 0x28, + 0x04, 0x42, 0x08, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, 0x52, 0x0f, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x55, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x12, 0x58, 0x0a, 0x14, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x65, 0x73, + 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, + 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x42, 0x07, 0x92, 0xb5, 0x18, 0x03, 0x31, + 0x32, 0x38, 0x52, 0x12, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, + 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x12, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, + 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x33, 0x32, 0x18, 0x04, 0x20, 0x03, + 0x28, 0x0c, 0x42, 0x0f, 0x8a, 0xb5, 0x18, 0x04, 0x3f, 0x2c, 0x33, 0x32, 0x92, 0xb5, 0x18, 0x03, + 0x31, 0x30, 0x30, 0x52, 0x10, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x79, + 0x74, 0x65, 0x73, 0x33, 0x32, 0x12, 0x38, 0x0a, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, + 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x43, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x06, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x12, + 0x5f, 0x0a, 0x17, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x74, + 0x61, 0x69, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x1f, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, + 0x62, 0x6c, 0x65, 0x4f, 0x75, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, + 0x72, 0x42, 0x06, 0x92, 0xb5, 0x18, 0x02, 0x31, 0x30, 0x52, 0x15, 0x76, 0x61, 0x72, 0x69, 0x61, + 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4c, 0x69, 0x73, 0x74, + 0x12, 0x5c, 0x0a, 0x0d, 0x62, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x37, 0x82, 0xb5, 0x18, 0x2b, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x62, 0x69, 0x74, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x2e, 0x42, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x92, 0xb5, 0x18, 0x04, 0x32, 0x30, 0x34, 0x38, + 0x52, 0x0c, 0x62, 0x69, 0x74, 0x6c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x3d, + 0x0a, 0x11, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x11, 0x8a, 0xb5, 0x18, 0x03, 0x3f, + 0x2c, 0x3f, 0x92, 0xb5, 0x18, 0x06, 0x31, 0x30, 0x30, 0x2c, 0x35, 0x30, 0x52, 0x0f, 0x6e, 0x65, + 0x73, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x2d, 0x0a, + 0x0e, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, + 0x09, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x35, 0x36, 0x52, 0x0d, 0x74, + 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x42, 0x42, 0x5a, 0x40, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, + 0x2f, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x3b, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/testing/spectest/mainnet/BUILD.bazel b/testing/spectest/mainnet/BUILD.bazel index 5582a8dfad..75d3340e6a 100644 --- a/testing/spectest/mainnet/BUILD.bazel +++ b/testing/spectest/mainnet/BUILD.bazel @@ -200,6 +200,7 @@ go_test( "fulu__sanity__blocks_test.go", "fulu__sanity__slots_test.go", "fulu__ssz_static__ssz_static_test.go", + "gloas__ssz_static__ssz_static_test.go", "phase0__epoch_processing__effective_balance_updates_test.go", "phase0__epoch_processing__epoch_processing_test.go", "phase0__epoch_processing__eth1_data_reset_test.go", @@ -277,6 +278,7 @@ go_test( "//testing/spectest/shared/fulu/rewards:go_default_library", "//testing/spectest/shared/fulu/sanity:go_default_library", "//testing/spectest/shared/fulu/ssz_static:go_default_library", + "//testing/spectest/shared/gloas/ssz_static:go_default_library", "//testing/spectest/shared/phase0/epoch_processing:go_default_library", "//testing/spectest/shared/phase0/finality:go_default_library", "//testing/spectest/shared/phase0/operations:go_default_library", diff --git a/testing/spectest/mainnet/gloas__ssz_static__ssz_static_test.go b/testing/spectest/mainnet/gloas__ssz_static__ssz_static_test.go new file mode 100644 index 0000000000..653e9d340e --- /dev/null +++ b/testing/spectest/mainnet/gloas__ssz_static__ssz_static_test.go @@ -0,0 +1,11 @@ +package mainnet + +import ( + "testing" + + "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/gloas/ssz_static" +) + +func TestMainnet_Gloas_SSZStatic(t *testing.T) { + ssz_static.RunSSZStaticTests(t, "mainnet") +} diff --git a/testing/spectest/minimal/BUILD.bazel b/testing/spectest/minimal/BUILD.bazel index 4eace54866..5f80e7f82d 100644 --- a/testing/spectest/minimal/BUILD.bazel +++ b/testing/spectest/minimal/BUILD.bazel @@ -206,6 +206,7 @@ go_test( "fulu__sanity__blocks_test.go", "fulu__sanity__slots_test.go", "fulu__ssz_static__ssz_static_test.go", + "gloas__ssz_static__ssz_static_test.go", "phase0__epoch_processing__effective_balance_updates_test.go", "phase0__epoch_processing__epoch_processing_test.go", "phase0__epoch_processing__eth1_data_reset_test.go", @@ -287,6 +288,7 @@ go_test( "//testing/spectest/shared/fulu/rewards:go_default_library", "//testing/spectest/shared/fulu/sanity:go_default_library", "//testing/spectest/shared/fulu/ssz_static:go_default_library", + "//testing/spectest/shared/gloas/ssz_static:go_default_library", "//testing/spectest/shared/phase0/epoch_processing:go_default_library", "//testing/spectest/shared/phase0/finality:go_default_library", "//testing/spectest/shared/phase0/operations:go_default_library", diff --git a/testing/spectest/minimal/gloas__ssz_static__ssz_static_test.go b/testing/spectest/minimal/gloas__ssz_static__ssz_static_test.go new file mode 100644 index 0000000000..381d6c0430 --- /dev/null +++ b/testing/spectest/minimal/gloas__ssz_static__ssz_static_test.go @@ -0,0 +1,11 @@ +package minimal + +import ( + "testing" + + "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/gloas/ssz_static" +) + +func TestMinimal_Gloas_SSZStatic(t *testing.T) { + ssz_static.RunSSZStaticTests(t, "minimal") +} diff --git a/testing/spectest/shared/gloas/ssz_static/BUILD.bazel b/testing/spectest/shared/gloas/ssz_static/BUILD.bazel new file mode 100644 index 0000000000..50eca0ed7b --- /dev/null +++ b/testing/spectest/shared/gloas/ssz_static/BUILD.bazel @@ -0,0 +1,15 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + testonly = True, + srcs = ["ssz_static.go"], + importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/gloas/ssz_static", + visibility = ["//testing/spectest:__subpackages__"], + deps = [ + "//proto/engine/v1:go_default_library", + "//proto/prysm/v1alpha1:go_default_library", + "//testing/spectest/shared/common/ssz_static:go_default_library", + "@com_github_prysmaticlabs_fastssz//:go_default_library", + ], +) diff --git a/testing/spectest/shared/gloas/ssz_static/ssz_static.go b/testing/spectest/shared/gloas/ssz_static/ssz_static.go new file mode 100644 index 0000000000..0a99c14f54 --- /dev/null +++ b/testing/spectest/shared/gloas/ssz_static/ssz_static.go @@ -0,0 +1,184 @@ +package ssz_static + +import ( + "errors" + "testing" + + enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/ssz_static" + fssz "github.com/prysmaticlabs/fastssz" +) + +// RunSSZStaticTests executes "ssz_static" tests. +func RunSSZStaticTests(t *testing.T, config string) { + common.RunSSZStaticTests(t, config, "gloas", unmarshalledSSZ, customHtr) +} + +func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR { + // TODO: Add custom HTR for BeaconStateGloas when state-native support is implemented + // For now, only use the default fastssz HTR methods + return htrs +} + +// unmarshalledSSZ unmarshalls serialized input. +func unmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (interface{}, error) { + var obj interface{} + + switch folderName { + // Gloas specific types + case "ExecutionPayloadBid": + obj = ðpb.ExecutionPayloadBid{} + case "SignedExecutionPayloadBid": + obj = ðpb.SignedExecutionPayloadBid{} + case "PayloadAttestationData": + obj = ðpb.PayloadAttestationData{} + case "PayloadAttestation": + obj = ðpb.PayloadAttestation{} + case "PayloadAttestationMessage": + obj = ðpb.PayloadAttestationMessage{} + case "BeaconBlock": + obj = ðpb.BeaconBlockGloas{} + case "BeaconBlockBody": + obj = ðpb.BeaconBlockBodyGloas{} + case "BeaconState": + obj = ðpb.BeaconStateGloas{} + case "BuilderPendingPayment": + obj = ðpb.BuilderPendingPayment{} + case "BuilderPendingWithdrawal": + obj = ðpb.BuilderPendingWithdrawal{} + case "ExecutionPayloadEnvelope": + obj = ðpb.ExecutionPayloadEnvelope{} + case "SignedExecutionPayloadEnvelope": + obj = ðpb.SignedExecutionPayloadEnvelope{} + case "ForkChoiceNode": + t.Skip("Not a consensus type") + case "IndexedPayloadAttestation": + t.Skip("Not a consensus type") + case "DataColumnSidecar": + obj = ðpb.DataColumnSidecarGloas{} + + // Standard types that also exist in gloas + case "ExecutionPayload": + obj = &enginev1.ExecutionPayloadDeneb{} + case "ExecutionPayloadHeader": + obj = &enginev1.ExecutionPayloadHeaderDeneb{} + case "Attestation": + obj = ðpb.AttestationElectra{} + case "AttestationData": + obj = ðpb.AttestationData{} + case "AttesterSlashing": + obj = ðpb.AttesterSlashingElectra{} + case "AggregateAndProof": + obj = ðpb.AggregateAttestationAndProofElectra{} + case "BeaconBlockHeader": + obj = ðpb.BeaconBlockHeader{} + case "Checkpoint": + obj = ðpb.Checkpoint{} + case "Deposit": + obj = ðpb.Deposit{} + case "DepositMessage": + obj = ðpb.DepositMessage{} + case "DepositData": + obj = ðpb.Deposit_Data{} + case "Eth1Data": + obj = ðpb.Eth1Data{} + case "Eth1Block": + t.Skip("Unused type") + case "Fork": + obj = ðpb.Fork{} + case "ForkData": + obj = ðpb.ForkData{} + case "HistoricalBatch": + obj = ðpb.HistoricalBatch{} + case "IndexedAttestation": + obj = ðpb.IndexedAttestationElectra{} + case "PendingAttestation": + obj = ðpb.PendingAttestation{} + case "ProposerSlashing": + obj = ðpb.ProposerSlashing{} + case "SignedAggregateAndProof": + obj = ðpb.SignedAggregateAttestationAndProofElectra{} + case "SignedBeaconBlock": + obj = ðpb.SignedBeaconBlockGloas{} + case "SignedBeaconBlockHeader": + obj = ðpb.SignedBeaconBlockHeader{} + case "SignedVoluntaryExit": + obj = ðpb.SignedVoluntaryExit{} + case "SigningData": + obj = ðpb.SigningData{} + case "Validator": + obj = ðpb.Validator{} + case "VoluntaryExit": + obj = ðpb.VoluntaryExit{} + case "SyncCommitteeMessage": + obj = ðpb.SyncCommitteeMessage{} + case "SyncCommitteeContribution": + obj = ðpb.SyncCommitteeContribution{} + case "ContributionAndProof": + obj = ðpb.ContributionAndProof{} + case "SignedContributionAndProof": + obj = ðpb.SignedContributionAndProof{} + case "SingleAttestation": + obj = ðpb.SingleAttestation{} + case "SyncAggregate": + obj = ðpb.SyncAggregate{} + case "SyncAggregatorSelectionData": + obj = ðpb.SyncAggregatorSelectionData{} + case "SyncCommittee": + obj = ðpb.SyncCommittee{} + case "LightClientOptimisticUpdate": + obj = ðpb.LightClientOptimisticUpdateDeneb{} + case "LightClientFinalityUpdate": + obj = ðpb.LightClientFinalityUpdateElectra{} + case "LightClientBootstrap": + obj = ðpb.LightClientBootstrapElectra{} + case "LightClientUpdate": + obj = ðpb.LightClientUpdateElectra{} + case "LightClientHeader": + obj = ðpb.LightClientHeaderDeneb{} + case "BlobIdentifier": + obj = ðpb.BlobIdentifier{} + case "BlobSidecar": + t.Skip("Unused type") + case "PowBlock": + obj = ðpb.PowBlock{} + case "Withdrawal": + obj = &enginev1.Withdrawal{} + case "HistoricalSummary": + obj = ðpb.HistoricalSummary{} + case "BLSToExecutionChange": + obj = ðpb.BLSToExecutionChange{} + case "SignedBLSToExecutionChange": + obj = ðpb.SignedBLSToExecutionChange{} + case "PendingDeposit": + obj = ðpb.PendingDeposit{} + case "PendingPartialWithdrawal": + obj = ðpb.PendingPartialWithdrawal{} + case "PendingConsolidation": + obj = ðpb.PendingConsolidation{} + case "WithdrawalRequest": + obj = &enginev1.WithdrawalRequest{} + case "DepositRequest": + obj = &enginev1.DepositRequest{} + case "ConsolidationRequest": + obj = &enginev1.ConsolidationRequest{} + case "ExecutionRequests": + obj = &enginev1.ExecutionRequests{} + case "DataColumnsByRootIdentifier": + obj = ðpb.DataColumnsByRootIdentifier{} + case "MatrixEntry": + t.Skip("Unused type") + default: + return nil, errors.New("type not found") + } + + var err error + if o, ok := obj.(fssz.Unmarshaler); ok { + err = o.UnmarshalSSZ(serializedBytes) + } else { + err = errors.New("could not unmarshal object, not a fastssz compatible object") + } + + return obj, err +} From 1e7d74cf02db10664c8f70400dfb3b85ad254953 Mon Sep 17 00:00:00 2001 From: Marius van der Wijden Date: Wed, 5 Nov 2025 17:37:59 +0100 Subject: [PATCH 076/103] Increase mainnet DefaultBuilderGasLimit to 60M (#15979) * Increase mainnet DefaultBuilderGasLimit to 60M * Add changelog. * config/proposer/loader: update testdata --------- Co-authored-by: Manu NALEPA --- changelog/marius-builder-gas-limit.md | 2 ++ config/params/mainnet_config.go | 2 +- .../testdata/good-prepare-beacon-proposer-config-multiple.json | 2 +- .../loader/testdata/good-prepare-beacon-proposer-config.yaml | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 changelog/marius-builder-gas-limit.md diff --git a/changelog/marius-builder-gas-limit.md b/changelog/marius-builder-gas-limit.md new file mode 100644 index 0000000000..5c871d19e8 --- /dev/null +++ b/changelog/marius-builder-gas-limit.md @@ -0,0 +1,2 @@ +### Changed +- Bump builder default gas limit from `45000000` (45 MGas) to `60000000` (60 MGas) \ No newline at end of file diff --git a/config/params/mainnet_config.go b/config/params/mainnet_config.go index fd2997730a..68a2bd958b 100644 --- a/config/params/mainnet_config.go +++ b/config/params/mainnet_config.go @@ -268,7 +268,7 @@ var mainnetBeaconConfig = &BeaconChainConfig{ BytesPerLogsBloom: 256, MaxExtraDataBytes: 32, EthBurnAddressHex: "0x0000000000000000000000000000000000000000", - DefaultBuilderGasLimit: uint64(45000000), + DefaultBuilderGasLimit: uint64(60000000), // Mevboost circuit breaker MaxBuilderConsecutiveMissedSlots: 3, diff --git a/config/proposer/loader/testdata/good-prepare-beacon-proposer-config-multiple.json b/config/proposer/loader/testdata/good-prepare-beacon-proposer-config-multiple.json index a290f98eb0..cfb0837fcf 100644 --- a/config/proposer/loader/testdata/good-prepare-beacon-proposer-config-multiple.json +++ b/config/proposer/loader/testdata/good-prepare-beacon-proposer-config-multiple.json @@ -4,7 +4,7 @@ "fee_recipient": "0x50155530FCE8a85ec7055A5F8b2bE214B3DaeFd3", "builder": { "enabled": true, - "gas_limit": "45000000" + "gas_limit": "60000000" } }, "0xb057816155ad77931185101128655c0191bd0214c201ca48ed887f6c4c6adf334070efcd75140eada5ac83a92506dd7b": { diff --git a/config/proposer/loader/testdata/good-prepare-beacon-proposer-config.yaml b/config/proposer/loader/testdata/good-prepare-beacon-proposer-config.yaml index 5585d913d2..0fe32f7111 100644 --- a/config/proposer/loader/testdata/good-prepare-beacon-proposer-config.yaml +++ b/config/proposer/loader/testdata/good-prepare-beacon-proposer-config.yaml @@ -9,4 +9,4 @@ default_config: fee_recipient: '0x6e35733c5af9B61374A128e6F85f553aF09ff89A' builder: enabled: false - gas_limit: '45000000' \ No newline at end of file + gas_limit: '60000000' \ No newline at end of file From f0a099b27576877288ee692875309b4b35d364d4 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Wed, 5 Nov 2025 17:47:14 +0100 Subject: [PATCH 077/103] Ensures the rate limitation is respected for by root blob and data column sidecars requests. (#15981) * Set default value of `--blob-batch-limit` to 384. So, using default values, `--blob-batch-limit * --blob-batch-limit-burst-factor = 384*3 = MAX_REQUEST_BLOB_SIDECARS = 1152.` * `blobSidecarByRootRPCHandler`: Add rate limiting. Bacause now the rate limiter validation is done before the request validation, adapt `TestBlobsByRootValidation` consequently and add new specific tests for `validateBlobByRootRequest` to cover the now untested case. * Set default value of `--data-column-batch-limit-burst-factor` to 4. So, using default values, `--data-column-batch-limit * --data-column-batch-limit-burst-factor = 4096*2 = MAX_REQUEST_DATA_COLUMN_SIDECARS_ELECTRA = 16384`. * `validateDataColumnsByRootRequest`: Take a count instead of idents. * `dataColumnSidecarByRootRPCHandler`: Add rate limiting. --- .../sync/rpc_blob_sidecars_by_root.go | 6 ++ .../sync/rpc_blob_sidecars_by_root_test.go | 63 ++++++++++++- .../sync/rpc_data_column_sidecars_by_root.go | 21 +++-- .../rpc_data_column_sidecars_by_root_test.go | 91 ++++--------------- beacon-chain/sync/sync_test.go | 2 +- changelog/manu-rate-limit.md | 2 + cmd/beacon-chain/flags/base.go | 4 +- 7 files changed, 105 insertions(+), 84 deletions(-) create mode 100644 changelog/manu-rate-limit.md diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_root.go b/beacon-chain/sync/rpc_blob_sidecars_by_root.go index 63b31f113c..cbb75cacc0 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_root.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_root.go @@ -37,6 +37,11 @@ func (s *Service) blobSidecarByRootRPCHandler(ctx context.Context, msg interface } blobIdents := *ref + + if err := s.rateLimiter.validateRequest(stream, uint64(len(blobIdents))); err != nil { + return errors.Wrap(err, "rate limiter validate request") + } + cs := s.cfg.clock.CurrentSlot() remotePeer := stream.Conn().RemotePeer() if err := validateBlobByRootRequest(blobIdents, cs); err != nil { @@ -44,6 +49,7 @@ func (s *Service) blobSidecarByRootRPCHandler(ctx context.Context, msg interface s.writeErrorResponseToStream(responseCodeInvalidRequest, err.Error(), stream) return err } + // Sort the identifiers so that requests for the same blob root will be adjacent, minimizing db lookups. sort.Sort(blobIdents) diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go b/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go index e011455e0f..5fc6660cc7 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go @@ -238,7 +238,7 @@ func TestBlobsByRootValidation(t *testing.T) { { name: "exceeds req max", nblocks: int(params.BeaconConfig().MaxRequestBlobSidecars) + 1, - err: p2pTypes.ErrMaxBlobReqExceeded, + err: p2pTypes.ErrRateLimited, }, } for _, c := range cases { @@ -270,3 +270,64 @@ func TestBlobsByRootOK(t *testing.T) { }) } } + +func TestValidateBlobByRootRequest(t *testing.T) { + params.SetupTestConfigCleanup(t) + cfg := params.BeaconConfig() + + // Helper function to create blob identifiers + createBlobIdents := func(count int) p2pTypes.BlobSidecarsByRootReq { + idents := make([]*ethpb.BlobIdentifier, count) + for i := 0; i < count; i++ { + idents[i] = ðpb.BlobIdentifier{ + BlockRoot: make([]byte, 32), + Index: uint64(i), + } + } + return idents + } + + tests := []struct { + name string + blobIdents p2pTypes.BlobSidecarsByRootReq + slot types.Slot + expectedErr error + }{ + { + name: "pre-Electra: at max limit", + blobIdents: createBlobIdents(int(cfg.MaxRequestBlobSidecars)), + slot: util.SlotAtEpoch(t, cfg.ElectraForkEpoch-1), + expectedErr: nil, + }, + { + name: "pre-Electra: exceeds max limit by 1", + blobIdents: createBlobIdents(int(cfg.MaxRequestBlobSidecars) + 1), + slot: util.SlotAtEpoch(t, cfg.ElectraForkEpoch-1), + expectedErr: p2pTypes.ErrMaxBlobReqExceeded, + }, + { + name: "Electra: at max limit", + blobIdents: createBlobIdents(int(cfg.MaxRequestBlobSidecarsElectra)), + slot: util.SlotAtEpoch(t, cfg.ElectraForkEpoch), + expectedErr: nil, + }, + { + name: "Electra: exceeds Electra max limit by 1", + blobIdents: createBlobIdents(int(cfg.MaxRequestBlobSidecarsElectra) + 1), + slot: util.SlotAtEpoch(t, cfg.ElectraForkEpoch), + expectedErr: p2pTypes.ErrMaxBlobReqExceeded, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := validateBlobByRootRequest(tt.blobIdents, tt.slot) + if tt.expectedErr != nil { + require.ErrorIs(t, err, tt.expectedErr) + return + } + + require.NoError(t, err) + }) + } +} diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_root.go b/beacon-chain/sync/rpc_data_column_sidecars_by_root.go index 2c0f86ac20..983000ea3c 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_root.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_root.go @@ -49,8 +49,18 @@ func (s *Service) dataColumnSidecarByRootRPCHandler(ctx context.Context, msg int SetRPCStreamDeadlines(stream) + // Count the total number of requested data column sidecars. + totalRequested := 0 + for _, ident := range requestedColumnIdents { + totalRequested += len(ident.Columns) + } + + if err := s.rateLimiter.validateRequest(stream, uint64(totalRequested)); err != nil { + return errors.Wrap(err, "rate limiter validate request") + } + // Penalize peers that send invalid requests. - if err := validateDataColumnsByRootRequest(requestedColumnIdents); err != nil { + if err := validateDataColumnsByRootRequest(totalRequested); err != nil { s.downscorePeer(remotePeer, "dataColumnSidecarByRootRPCHandlerValidationError") s.writeErrorResponseToStream(responseCodeInvalidRequest, err.Error(), stream) return errors.Wrap(err, "validate data columns by root request") @@ -154,13 +164,8 @@ func (s *Service) dataColumnSidecarByRootRPCHandler(ctx context.Context, msg int } // validateDataColumnsByRootRequest checks if the request for data column sidecars is valid. -func validateDataColumnsByRootRequest(colIdents types.DataColumnsByRootIdentifiers) error { - total := uint64(0) - for _, id := range colIdents { - total += uint64(len(id.Columns)) - } - - if total > params.BeaconConfig().MaxRequestDataColumnSidecars { +func validateDataColumnsByRootRequest(count int) error { + if uint64(count) > params.BeaconConfig().MaxRequestDataColumnSidecars { return types.ErrMaxDataColumnReqExceeded } diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go b/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go index a0ba567985..a4ccae99a9 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go @@ -1,7 +1,6 @@ package sync import ( - "context" "io" "math" "sync" @@ -12,10 +11,10 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" @@ -36,7 +35,10 @@ func TestDataColumnSidecarsByRootRPCHandler(t *testing.T) { params.BeaconConfig().InitializeForkSchedule() ctxMap, err := ContextByteVersionsForValRoot(params.BeaconConfig().GenesisValidatorsRoot) require.NoError(t, err) - ctx := context.Background() + ctx := t.Context() + + protocolID := protocol.ID(p2p.RPCDataColumnSidecarsByRootTopicV1) + "/" + encoder.ProtocolSuffixSSZSnappy + t.Run("wrong message type", func(t *testing.T) { service := &Service{} err := service.dataColumnSidecarByRootRPCHandler(t.Context(), nil, nil) @@ -50,9 +52,7 @@ func TestDataColumnSidecarsByRootRPCHandler(t *testing.T) { params.OverrideBeaconConfig(cfg) localP2P := p2ptest.NewTestP2P(t) - service := &Service{cfg: &config{p2p: localP2P}} - - protocolID := protocol.ID(p2p.RPCDataColumnSidecarsByRootTopicV1) + service := &Service{cfg: &config{p2p: localP2P}, rateLimiter: newRateLimiter(localP2P)} remoteP2P := p2ptest.NewTestP2P(t) var wg sync.WaitGroup @@ -83,12 +83,6 @@ func TestDataColumnSidecarsByRootRPCHandler(t *testing.T) { }) t.Run("nominal", func(t *testing.T) { - resetFlags := flags.Get() - gFlags := new(flags.GlobalFlags) - gFlags.DataColumnBatchLimit = 2 - flags.Init(gFlags) - defer flags.Init(resetFlags) - // Setting the ticker to 0 will cause the ticker to panic. // Setting it to the minimum value instead. refTickerDelay := tickerDelay @@ -151,7 +145,6 @@ func TestDataColumnSidecarsByRootRPCHandler(t *testing.T) { rateLimiter: newRateLimiter(localP2P), } - protocolID := protocol.ID(p2p.RPCDataColumnSidecarsByRootTopicV1) remoteP2P := p2ptest.NewTestP2P(t) var wg sync.WaitGroup @@ -226,68 +219,22 @@ func TestDataColumnSidecarsByRootRPCHandler(t *testing.T) { } func TestValidateDataColumnsByRootRequest(t *testing.T) { + const max = 10 + params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - maxCols := uint64(10) // Set a small value for testing - config.MaxRequestDataColumnSidecars = maxCols - params.OverrideBeaconConfig(config) + cfg := params.BeaconConfig() + cfg.MaxRequestDataColumnSidecars = max + params.OverrideBeaconConfig(cfg) - tests := []struct { - name string - colIdents types.DataColumnsByRootIdentifiers - expectedErr error - }{ - { - name: "Invalid request - multiple identifiers exceed max", - colIdents: types.DataColumnsByRootIdentifiers{ - { - BlockRoot: make([]byte, fieldparams.RootLength), - Columns: make([]uint64, maxCols/2+1), - }, - { - BlockRoot: make([]byte, fieldparams.RootLength), - Columns: make([]uint64, maxCols/2+1), - }, - }, - expectedErr: types.ErrMaxDataColumnReqExceeded, - }, - { - name: "Valid request - less than max", - colIdents: types.DataColumnsByRootIdentifiers{ - { - BlockRoot: make([]byte, fieldparams.RootLength), - Columns: make([]uint64, maxCols-1), - }, - }, - expectedErr: nil, - }, - { - name: "Valid request - multiple identifiers sum to max", - colIdents: types.DataColumnsByRootIdentifiers{ - { - BlockRoot: make([]byte, fieldparams.RootLength), - Columns: make([]uint64, maxCols/2), - }, - { - BlockRoot: make([]byte, fieldparams.RootLength), - Columns: make([]uint64, maxCols/2), - }, - }, - expectedErr: nil, - }, - } + t.Run("invalid", func(t *testing.T) { + err := validateDataColumnsByRootRequest(max + 1) + require.ErrorIs(t, err, types.ErrMaxDataColumnReqExceeded) + }) - // Run tests - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := validateDataColumnsByRootRequest(tt.colIdents) - if tt.expectedErr == nil { - require.NoError(t, err) - } else { - require.ErrorIs(t, err, tt.expectedErr) - } - }) - } + t.Run("valid", func(t *testing.T) { + err := validateDataColumnsByRootRequest(max) + require.NoError(t, err) + }) } func TestDataColumnsRPCMinValidSlot(t *testing.T) { diff --git a/beacon-chain/sync/sync_test.go b/beacon-chain/sync/sync_test.go index 9615255ae4..dc3c222e2e 100644 --- a/beacon-chain/sync/sync_test.go +++ b/beacon-chain/sync/sync_test.go @@ -20,7 +20,7 @@ func TestMain(m *testing.M) { BlobBatchLimit: 32, BlobBatchLimitBurstFactor: 2, DataColumnBatchLimit: 4096, - DataColumnBatchLimitBurstFactor: 2, + DataColumnBatchLimitBurstFactor: 4, }) defer func() { flags.Init(resetFlags) diff --git a/changelog/manu-rate-limit.md b/changelog/manu-rate-limit.md new file mode 100644 index 0000000000..b2e16c5cc5 --- /dev/null +++ b/changelog/manu-rate-limit.md @@ -0,0 +1,2 @@ +### Fixed +- Ensures the rate limitation is respected for by root blob and data column sidecars requests. \ No newline at end of file diff --git a/cmd/beacon-chain/flags/base.go b/cmd/beacon-chain/flags/base.go index a338572bd1..681fec23fe 100644 --- a/cmd/beacon-chain/flags/base.go +++ b/cmd/beacon-chain/flags/base.go @@ -204,7 +204,7 @@ var ( BlobBatchLimit = &cli.IntFlag{ Name: "blob-batch-limit", Usage: "The amount of blobs the local peer is bounded to request and respond to in a batch.", - Value: 192, + Value: 384, } // BlobBatchLimitBurstFactor specifies the factor by which blob batch size may increase. BlobBatchLimitBurstFactor = &cli.IntFlag{ @@ -222,7 +222,7 @@ var ( DataColumnBatchLimitBurstFactor = &cli.IntFlag{ Name: "data-column-batch-limit-burst-factor", Usage: "The factor by which data column batch limit may increase on burst.", - Value: 2, + Value: 4, } // DisableDebugRPCEndpoints disables the debug Beacon API namespace. DisableDebugRPCEndpoints = &cli.BoolFlag{ From 9959782f1cfa7986349732f359145a4eb3411d95 Mon Sep 17 00:00:00 2001 From: Potuz Date: Wed, 5 Nov 2025 11:52:10 -0500 Subject: [PATCH 078/103] Only use head if it's compatible with target (#15965) * Only use head if it's compatible with target * Allow blocks from the previous epoch to be viable for checkpoints * Add feature flag to make it configurable * fix tests * @satushh's review * Manu's nit * Use fields in logs --- .../blockchain/process_attestation_helpers.go | 22 ++++++++++++++++++- .../forkchoice/doubly-linked-tree/BUILD.bazel | 1 + .../doubly-linked-tree/forkchoice.go | 10 ++++++--- .../doubly-linked-tree/forkchoice_test.go | 9 +++++--- changelog/potuz_head_target_compat.md | 3 +++ config/features/config.go | 6 ++++- config/features/flags.go | 6 +++++ 7 files changed, 49 insertions(+), 8 deletions(-) create mode 100644 changelog/potuz_head_target_compat.md diff --git a/beacon-chain/blockchain/process_attestation_helpers.go b/beacon-chain/blockchain/process_attestation_helpers.go index 45d24d9822..e3336f1fa8 100644 --- a/beacon-chain/blockchain/process_attestation_helpers.go +++ b/beacon-chain/blockchain/process_attestation_helpers.go @@ -1,6 +1,7 @@ package blockchain import ( + "bytes" "context" "fmt" "strconv" @@ -16,6 +17,7 @@ import ( ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/time/slots" "github.com/pkg/errors" + "github.com/sirupsen/logrus" ) // The caller of this function must have a lock on forkchoice. @@ -27,6 +29,20 @@ func (s *Service) getRecentPreState(ctx context.Context, c *ethpb.Checkpoint) st if !s.cfg.ForkChoiceStore.IsCanonical([32]byte(c.Root)) { return nil } + // Only use head state if the head state is compatible with the target checkpoint. + headRoot, err := s.HeadRoot(ctx) + if err != nil { + return nil + } + headTarget, err := s.cfg.ForkChoiceStore.TargetRootForEpoch([32]byte(headRoot), c.Epoch) + if err != nil { + return nil + } + if !bytes.Equal(c.Root, headTarget[:]) { + return nil + } + + // If the head state alone is enough, we can return it directly read only. if c.Epoch == headEpoch { st, err := s.HeadStateReadOnly(ctx) if err != nil { @@ -34,11 +50,13 @@ func (s *Service) getRecentPreState(ctx context.Context, c *ethpb.Checkpoint) st } return st } + // Otherwise we need to advance the head state to the start of the target epoch. + // This point can only be reached if c.Root == headRoot and c.Epoch > headEpoch. slot, err := slots.EpochStart(c.Epoch) if err != nil { return nil } - // Try if we have already set the checkpoint cache + // Try if we have already set the checkpoint cache. This will be tried again if we fail here but the check is cheap anyway. epochKey := strconv.FormatUint(uint64(c.Epoch), 10 /* base 10 */) lock := async.NewMultilock(string(c.Root) + epochKey) lock.Lock() @@ -50,6 +68,7 @@ func (s *Service) getRecentPreState(ctx context.Context, c *ethpb.Checkpoint) st if cachedState != nil && !cachedState.IsNil() { return cachedState } + // If we haven't advanced yet then process the slots from head state. st, err := s.HeadState(ctx) if err != nil { return nil @@ -114,6 +133,7 @@ func (s *Service) getAttPreState(ctx context.Context, c *ethpb.Checkpoint) (stat } // Fallback to state regeneration. + log.WithFields(logrus.Fields{"epoch": c.Epoch, "root": fmt.Sprintf("%#x", c.Root)}).Debug("Regenerating attestation pre-state") baseState, err := s.cfg.StateGen.StateByRoot(ctx, bytesutil.ToBytes32(c.Root)) if err != nil { return nil, errors.Wrapf(err, "could not get pre state for epoch %d", c.Epoch) diff --git a/beacon-chain/forkchoice/doubly-linked-tree/BUILD.bazel b/beacon-chain/forkchoice/doubly-linked-tree/BUILD.bazel index a75ea7249a..8843d891de 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/BUILD.bazel +++ b/beacon-chain/forkchoice/doubly-linked-tree/BUILD.bazel @@ -27,6 +27,7 @@ go_library( "//beacon-chain/forkchoice:go_default_library", "//beacon-chain/forkchoice/types:go_default_library", "//beacon-chain/state:go_default_library", + "//config/features:go_default_library", "//config/fieldparams:go_default_library", "//config/params:go_default_library", "//consensus-types/blocks:go_default_library", diff --git a/beacon-chain/forkchoice/doubly-linked-tree/forkchoice.go b/beacon-chain/forkchoice/doubly-linked-tree/forkchoice.go index 500fe988bd..0118e16fc1 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/forkchoice.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/forkchoice.go @@ -8,6 +8,7 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" "github.com/OffchainLabs/prysm/v6/beacon-chain/state" + "github.com/OffchainLabs/prysm/v6/config/features" fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" "github.com/OffchainLabs/prysm/v6/config/params" consensus_blocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" @@ -239,9 +240,12 @@ func (f *ForkChoice) IsViableForCheckpoint(cp *forkchoicetypes.Checkpoint) (bool if node.slot == epochStart { return true, nil } - nodeEpoch := slots.ToEpoch(node.slot) - if nodeEpoch >= cp.Epoch { - return false, nil + if !features.Get().DisableLastEpochTargets { + // Allow any node from the checkpoint epoch - 1 to be viable. + nodeEpoch := slots.ToEpoch(node.slot) + if nodeEpoch+1 == cp.Epoch { + return true, nil + } } for _, child := range node.children { if child.slot > epochStart { diff --git a/beacon-chain/forkchoice/doubly-linked-tree/forkchoice_test.go b/beacon-chain/forkchoice/doubly-linked-tree/forkchoice_test.go index 76d7319b29..a2b97a7051 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/forkchoice_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/forkchoice_test.go @@ -813,9 +813,10 @@ func TestForkChoiceIsViableForCheckpoint(t *testing.T) { require.NoError(t, err) require.Equal(t, true, viable) + // Last epoch blocks are still viable viable, err = f.IsViableForCheckpoint(&forkchoicetypes.Checkpoint{Root: blk.Root(), Epoch: 1}) require.NoError(t, err) - require.Equal(t, false, viable) + require.Equal(t, true, viable) // No Children but impossible checkpoint viable, err = f.IsViableForCheckpoint(&forkchoicetypes.Checkpoint{Root: blk2.Root()}) @@ -835,9 +836,10 @@ func TestForkChoiceIsViableForCheckpoint(t *testing.T) { require.NoError(t, err) require.Equal(t, false, viable) + // Last epoch blocks are still viable viable, err = f.IsViableForCheckpoint(&forkchoicetypes.Checkpoint{Root: blk2.Root(), Epoch: 1}) require.NoError(t, err) - require.Equal(t, false, viable) + require.Equal(t, true, viable) st, blk4, err := prepareForkchoiceState(ctx, params.BeaconConfig().SlotsPerEpoch, [32]byte{'d'}, blk2.Root(), [32]byte{'D'}, 0, 0) require.NoError(t, err) @@ -848,9 +850,10 @@ func TestForkChoiceIsViableForCheckpoint(t *testing.T) { require.NoError(t, err) require.Equal(t, false, viable) + // Last epoch blocks are still viable viable, err = f.IsViableForCheckpoint(&forkchoicetypes.Checkpoint{Root: blk2.Root(), Epoch: 1}) require.NoError(t, err) - require.Equal(t, false, viable) + require.Equal(t, true, viable) // Boundary block viable, err = f.IsViableForCheckpoint(&forkchoicetypes.Checkpoint{Root: blk4.Root(), Epoch: 1}) diff --git a/changelog/potuz_head_target_compat.md b/changelog/potuz_head_target_compat.md new file mode 100644 index 0000000000..fc87b72dcd --- /dev/null +++ b/changelog/potuz_head_target_compat.md @@ -0,0 +1,3 @@ +### Fixed + +- Use head only if its compatible with target for attestation validation. diff --git a/config/features/config.go b/config/features/config.go index a9b5a08e42..7ebb09c977 100644 --- a/config/features/config.go +++ b/config/features/config.go @@ -69,6 +69,7 @@ type Flags struct { DisableResourceManager bool // Disables running the node with libp2p's resource manager. DisableStakinContractCheck bool // Disables check for deposit contract when proposing blocks + DisableLastEpochTargets bool // Disables processing of states for attestations to old blocks. EnableVerboseSigVerification bool // EnableVerboseSigVerification specifies whether to verify individual signature if batch verification fails @@ -274,11 +275,14 @@ func ConfigureBeaconChain(ctx *cli.Context) error { logEnabled(forceHeadFlag) cfg.ForceHead = ctx.String(forceHeadFlag.Name) } - if ctx.IsSet(blacklistRoots.Name) { logEnabled(blacklistRoots) cfg.BlacklistedRoots = parseBlacklistedRoots(ctx.StringSlice(blacklistRoots.Name)) } + if ctx.IsSet(disableLastEpochTargets.Name) { + logEnabled(disableLastEpochTargets) + cfg.DisableLastEpochTargets = true + } cfg.AggregateIntervals = [3]time.Duration{aggregateFirstInterval.Value, aggregateSecondInterval.Value, aggregateThirdInterval.Value} Init(cfg) diff --git a/config/features/flags.go b/config/features/flags.go index 880092336b..715aceb205 100644 --- a/config/features/flags.go +++ b/config/features/flags.go @@ -197,6 +197,11 @@ var ( Usage: "(Work in progress): Enables the web portal for the validator client.", Value: false, } + // disableLastEpochTargets is a flag to disable processing of attestations for old blocks. + disableLastEpochTargets = &cli.BoolFlag{ + Name: "disable-last-epoch-targets", + Usage: "Disables processing of last epoch targets.", + } ) // devModeFlags holds list of flags that are set when development mode is on. @@ -257,6 +262,7 @@ var BeaconChainFlags = combinedFlags([]cli.Flag{ enableExperimentalAttestationPool, forceHeadFlag, blacklistRoots, + disableLastEpochTargets, }, deprecatedBeaconFlags, deprecatedFlags, upcomingDeprecation) func combinedFlags(flags ...[]cli.Flag) []cli.Flag { From d3d7f67bec493870b64b94bef96fe1b570753e26 Mon Sep 17 00:00:00 2001 From: Potuz Date: Wed, 5 Nov 2025 12:54:38 -0500 Subject: [PATCH 079/103] Use head for block validation when possible (#15972) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Use head for block validation when possible When validating blocks for pubsub, we always copy a state and advance when we simply need to get a read only beacon state without a copy in most cases since the head state normally works. * fix test * fix tests * fix more tests * fix more tests * Add nil check to be safe * fix more tests * add test case 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --------- Co-authored-by: Claude --- beacon-chain/core/blocks/payload.go | 2 +- .../sync/pending_blocks_queue_test.go | 71 ++++++++---- beacon-chain/sync/validate_beacon_blocks.go | 94 ++++++++++++---- .../sync/validate_beacon_blocks_test.go | 103 +++++++++++++++++- changelog/potuz_use_head_block_validation.md | 3 + 5 files changed, 222 insertions(+), 51 deletions(-) create mode 100644 changelog/potuz_use_head_block_validation.md diff --git a/beacon-chain/core/blocks/payload.go b/beacon-chain/core/blocks/payload.go index 02dfa0ffb7..0d647025f8 100644 --- a/beacon-chain/core/blocks/payload.go +++ b/beacon-chain/core/blocks/payload.go @@ -86,7 +86,7 @@ func IsExecutionBlock(body interfaces.ReadOnlyBeaconBlockBody) (bool, error) { // def is_execution_enabled(state: BeaconState, body: ReadOnlyBeaconBlockBody) -> bool: // // return is_merge_block(state, body) or is_merge_complete(state) -func IsExecutionEnabled(st state.BeaconState, body interfaces.ReadOnlyBeaconBlockBody) (bool, error) { +func IsExecutionEnabled(st state.ReadOnlyBeaconState, body interfaces.ReadOnlyBeaconBlockBody) (bool, error) { if st == nil || body == nil { return false, errors.New("nil state or block body") } diff --git a/beacon-chain/sync/pending_blocks_queue_test.go b/beacon-chain/sync/pending_blocks_queue_test.go index 92628d57c0..ab0dfe78a7 100644 --- a/beacon-chain/sync/pending_blocks_queue_test.go +++ b/beacon-chain/sync/pending_blocks_queue_test.go @@ -43,15 +43,16 @@ func TestRegularSyncBeaconBlockSubscriber_ProcessPendingBlocks1(t *testing.T) { db := dbtest.SetupDB(t) p1 := p2ptest.NewTestP2P(t) + mockChain := &mock.ChainService{ + FinalizedCheckPoint: ðpb.Checkpoint{ + Epoch: 0, + }, + } r := &Service{ cfg: &config{ p2p: p1, beaconDB: db, - chain: &mock.ChainService{ - FinalizedCheckPoint: ðpb.Checkpoint{ - Epoch: 0, - }, - }, + chain: mockChain, clock: startup.NewClock(time.Unix(0, 0), [32]byte{}), stateGen: stategen.New(db, doublylinkedtree.New()), }, @@ -64,6 +65,12 @@ func TestRegularSyncBeaconBlockSubscriber_ProcessPendingBlocks1(t *testing.T) { util.SaveBlock(t, t.Context(), r.cfg.beaconDB, b0) b0Root, err := b0.Block.HashTreeRoot() require.NoError(t, err) + + // Setup head state for blockVerifyingState logic + st, err := util.NewBeaconState() + require.NoError(t, err) + mockChain.Root = b0Root[:] + mockChain.State = st b3 := util.NewBeaconBlock() b3.Block.Slot = 3 b3.Block.ParentRoot = b0Root[:] @@ -115,16 +122,17 @@ func TestRegularSyncBeaconBlockSubscriber_OptimisticStatus(t *testing.T) { db := dbtest.SetupDB(t) p1 := p2ptest.NewTestP2P(t) + mockChain := &mock.ChainService{ + Optimistic: true, + FinalizedCheckPoint: ðpb.Checkpoint{ + Epoch: 0, + }, + } r := &Service{ cfg: &config{ p2p: p1, beaconDB: db, - chain: &mock.ChainService{ - Optimistic: true, - FinalizedCheckPoint: ðpb.Checkpoint{ - Epoch: 0, - }, - }, + chain: mockChain, clock: startup.NewClock(time.Unix(0, 0), [32]byte{}), stateGen: stategen.New(db, doublylinkedtree.New()), }, @@ -137,6 +145,12 @@ func TestRegularSyncBeaconBlockSubscriber_OptimisticStatus(t *testing.T) { util.SaveBlock(t, t.Context(), r.cfg.beaconDB, b0) b0Root, err := b0.Block.HashTreeRoot() require.NoError(t, err) + + // Setup head state for blockVerifyingState logic + st, err := util.NewBeaconState() + require.NoError(t, err) + mockChain.Root = b0Root[:] + mockChain.State = st b3 := util.NewBeaconBlock() b3.Block.Slot = 3 b3.Block.ParentRoot = b0Root[:] @@ -189,16 +203,17 @@ func TestRegularSyncBeaconBlockSubscriber_ExecutionEngineTimesOut(t *testing.T) p1 := p2ptest.NewTestP2P(t) fcs := doublylinkedtree.New() + mockChain := &mock.ChainService{ + FinalizedCheckPoint: ðpb.Checkpoint{ + Epoch: 0, + }, + ReceiveBlockMockErr: execution.ErrHTTPTimeout, + } r := &Service{ cfg: &config{ p2p: p1, beaconDB: db, - chain: &mock.ChainService{ - FinalizedCheckPoint: ðpb.Checkpoint{ - Epoch: 0, - }, - ReceiveBlockMockErr: execution.ErrHTTPTimeout, - }, + chain: mockChain, clock: startup.NewClock(time.Unix(0, 0), [32]byte{}), stateGen: stategen.New(db, fcs), }, @@ -211,6 +226,12 @@ func TestRegularSyncBeaconBlockSubscriber_ExecutionEngineTimesOut(t *testing.T) util.SaveBlock(t, t.Context(), r.cfg.beaconDB, b0) b0Root, err := b0.Block.HashTreeRoot() require.NoError(t, err) + + // Setup head state for blockVerifyingState logic + st, err := util.NewBeaconState() + require.NoError(t, err) + mockChain.Root = b0Root[:] + mockChain.State = st b3 := util.NewBeaconBlock() b3.Block.Slot = 3 b3.Block.ParentRoot = b0Root[:] @@ -412,6 +433,14 @@ func TestRegularSyncBeaconBlockSubscriber_ProcessPendingBlocks_2Chains(t *testin util.SaveBlock(t, t.Context(), r.cfg.beaconDB, b0) b0Root, err := b0.Block.HashTreeRoot() require.NoError(t, err) + + // Setup head state for blockVerifyingState logic + st, err := util.NewBeaconState() + require.NoError(t, err) + mockChain := r.cfg.chain.(*mock.ChainService) + mockChain.Root = b0Root[:] + mockChain.State = st + b1 := util.NewBeaconBlock() b1.Block.Slot = 1 b1.Block.ParentRoot = b0Root[:] @@ -741,10 +770,8 @@ func TestService_ProcessPendingBlockOnCorrectSlot(t *testing.T) { proposerIdx, err := helpers.BeaconProposerIndex(ctx, copied) require.NoError(t, err) - st, err := util.NewBeaconState() - require.NoError(t, err) mockChain.Root = bRoot[:] - mockChain.State = st + mockChain.State = beaconState b1 := util.NewBeaconBlock() b1.Block.ParentRoot = bRoot[:] @@ -819,10 +846,8 @@ func TestService_ProcessBadPendingBlocks(t *testing.T) { proposerIdx, err := helpers.BeaconProposerIndex(ctx, copied) require.NoError(t, err) - st, err := util.NewBeaconState() - require.NoError(t, err) mockChain.Root = bRoot[:] - mockChain.State = st + mockChain.State = beaconState b1 := util.NewBeaconBlock() b1.Block.ParentRoot = bRoot[:] diff --git a/beacon-chain/sync/validate_beacon_blocks.go b/beacon-chain/sync/validate_beacon_blocks.go index 8418ff9819..07bba00446 100644 --- a/beacon-chain/sync/validate_beacon_blocks.go +++ b/beacon-chain/sync/validate_beacon_blocks.go @@ -1,6 +1,7 @@ package sync import ( + "bytes" "context" "fmt" "time" @@ -105,10 +106,6 @@ func (s *Service) validateBeaconBlockPubSub(ctx context.Context, pid peer.ID, ms }() } - if err := validateDenebBeaconBlock(blk.Block()); err != nil { - return pubsub.ValidationReject, err - } - // Verify the block is the first block received for the proposer for the slot. if s.hasSeenBlockIndexSlot(blk.Block().Slot(), blk.Block().ProposerIndex()) { // Attempt to detect and broadcast equivocation before ignoring @@ -262,12 +259,15 @@ func (s *Service) validateBeaconBlock(ctx context.Context, blk interfaces.ReadOn return err } - parentState, err := s.validatePhase0Block(ctx, blk, blockRoot) + verifyingState, err := s.validatePhase0Block(ctx, blk, blockRoot) if err != nil { return err } + if verifyingState == nil { + return errors.New("could not get verifying state") + } - if err = s.validateBellatrixBeaconBlock(ctx, parentState, blk.Block()); err != nil { + if err = s.validateBellatrixBeaconBlock(ctx, verifyingState, blk.Block()); err != nil { if errors.Is(err, ErrOptimisticParent) { return err } @@ -282,31 +282,25 @@ func (s *Service) validateBeaconBlock(ctx context.Context, blk interfaces.ReadOn // - Checks that the parent is in our forkchoice tree. // - Validates that the proposer signature is valid. // - Validates that the proposer index is valid. -func (s *Service) validatePhase0Block(ctx context.Context, blk interfaces.ReadOnlySignedBeaconBlock, blockRoot [32]byte) (state.BeaconState, error) { +// Returns a state that has compatible Randao Mix and active validator indices as the block's parent state advanced to the block's slot. +// This state can be used for further block validations. +func (s *Service) validatePhase0Block(ctx context.Context, blk interfaces.ReadOnlySignedBeaconBlock, blockRoot [32]byte) (state.ReadOnlyBeaconState, error) { if !s.cfg.chain.InForkchoice(blk.Block().ParentRoot()) { s.setBadBlock(ctx, blockRoot) return nil, blockchain.ErrNotDescendantOfFinalized } - parentState, err := s.cfg.stateGen.StateByRoot(ctx, blk.Block().ParentRoot()) + verifyingState, err := s.blockVerifyingState(ctx, blk) if err != nil { return nil, err } - - if err := blocks.VerifyBlockSignatureUsingCurrentFork(parentState, blk, blockRoot); err != nil { + if err := blocks.VerifyBlockSignatureUsingCurrentFork(verifyingState, blk, blockRoot); err != nil { if errors.Is(err, blocks.ErrInvalidSignature) { s.setBadBlock(ctx, blockRoot) } return nil, err } - // In the event the block is more than an epoch ahead from its - // parent state, we have to advance the state forward. - parentRoot := blk.Block().ParentRoot() - parentState, err = transition.ProcessSlotsUsingNextSlotCache(ctx, parentState, parentRoot[:], blk.Block().Slot()) - if err != nil { - return nil, err - } - idx, err := helpers.BeaconProposerIndex(ctx, parentState) + idx, err := helpers.BeaconProposerIndexAtSlot(ctx, verifyingState, blk.Block().Slot()) if err != nil { return nil, err } @@ -314,7 +308,59 @@ func (s *Service) validatePhase0Block(ctx context.Context, blk interfaces.ReadOn s.setBadBlock(ctx, blockRoot) return nil, errors.New("incorrect proposer index") } - return parentState, nil + return verifyingState, nil +} + +// blockVerifyingState returns the appropriate state to verify the signature and proposer index of the given block. +// The returned state is guaranteed to be at the same epoch as the block's epoch, and have the same randao mix and active validator indices as the +// block's parent state advanced to the block's slot. +func (s *Service) blockVerifyingState(ctx context.Context, blk interfaces.ReadOnlySignedBeaconBlock) (state.ReadOnlyBeaconState, error) { + headRoot, err := s.cfg.chain.HeadRoot(ctx) + if err != nil { + return nil, err + } + parentRoot := blk.Block().ParentRoot() + blockSlot := blk.Block().Slot() + blockEpoch := slots.ToEpoch(blockSlot) + headSlot := s.cfg.chain.HeadSlot() + headEpoch := slots.ToEpoch(headSlot) + // Use head if it's the parent + if bytes.Equal(parentRoot[:], headRoot) { + // If they are in the same epoch, then we can return the head state directly + if blockEpoch == headEpoch { + return s.cfg.chain.HeadStateReadOnly(ctx) + } + // Otherwise, we need to process the head state to the block's slot + headState, err := s.cfg.chain.HeadState(ctx) + if err != nil { + return nil, err + } + return transition.ProcessSlotsUsingNextSlotCache(ctx, headState, headRoot, blockSlot) + } + // If head and block are in the same epoch and head is compatible with the parent's target, then use head + if blockEpoch == headEpoch { + headTarget, err := s.cfg.chain.TargetRootForEpoch([32]byte(headRoot), blockEpoch) + if err != nil { + return nil, err + } + parentTarget, err := s.cfg.chain.TargetRootForEpoch([32]byte(parentRoot), blockEpoch) + if err != nil { + return nil, err + } + if bytes.Equal(headTarget[:], parentTarget[:]) { + return s.cfg.chain.HeadStateReadOnly(ctx) + } + } + // Otherwise retrieve the the parent state and advance it to the block's slot + parentState, err := s.cfg.stateGen.StateByRoot(ctx, parentRoot) + if err != nil { + return nil, err + } + parentEpoch := slots.ToEpoch(parentState.Slot()) + if blockEpoch == parentEpoch { + return parentState, nil + } + return transition.ProcessSlotsUsingNextSlotCache(ctx, parentState, parentRoot[:], blockSlot) } func validateDenebBeaconBlock(blk interfaces.ReadOnlyBeaconBlock) error { @@ -336,6 +382,8 @@ func validateDenebBeaconBlock(blk interfaces.ReadOnlyBeaconBlock) error { } // validateBellatrixBeaconBlock validates the block for the Bellatrix fork. +// The verifying state is used only to check if the chain is execution enabled. +// // spec code: // // If the execution is enabled for the block -- i.e. is_execution_enabled(state, block.body) then validate the following: @@ -348,14 +396,14 @@ func validateDenebBeaconBlock(blk interfaces.ReadOnlyBeaconBlock) error { // otherwise: // [IGNORE] The block's parent (defined by block.parent_root) passes all validation (including execution // node verification of the block.body.execution_payload). -func (s *Service) validateBellatrixBeaconBlock(ctx context.Context, parentState state.BeaconState, blk interfaces.ReadOnlyBeaconBlock) error { +func (s *Service) validateBellatrixBeaconBlock(ctx context.Context, verifyingState state.ReadOnlyBeaconState, blk interfaces.ReadOnlyBeaconBlock) error { // Error if block and state are not the same version - if parentState.Version() != blk.Version() { + if verifyingState.Version() != blk.Version() { return errors.New("block and state are not the same version") } body := blk.Body() - executionEnabled, err := blocks.IsExecutionEnabled(parentState, body) + executionEnabled, err := blocks.IsExecutionEnabled(verifyingState, body) if err != nil { return err } @@ -363,7 +411,7 @@ func (s *Service) validateBellatrixBeaconBlock(ctx context.Context, parentState return nil } - t, err := slots.StartTime(parentState.GenesisTime(), blk.Slot()) + t, err := slots.StartTime(verifyingState.GenesisTime(), blk.Slot()) if err != nil { return err } diff --git a/beacon-chain/sync/validate_beacon_blocks_test.go b/beacon-chain/sync/validate_beacon_blocks_test.go index a64aea417e..c11b13ffc6 100644 --- a/beacon-chain/sync/validate_beacon_blocks_test.go +++ b/beacon-chain/sync/validate_beacon_blocks_test.go @@ -73,7 +73,9 @@ func TestValidateBeaconBlockPubSub_InvalidSignature(t *testing.T) { Epoch: 0, Root: make([]byte, 32), }, - DB: db, + DB: db, + State: beaconState, + Root: bRoot[:], } r := &Service{ cfg: &config{ @@ -137,7 +139,9 @@ func TestValidateBeaconBlockPubSub_InvalidSignature_MarksBlockAsBad(t *testing.T Epoch: 0, Root: make([]byte, 32), }, - DB: db, + DB: db, + State: beaconState, + Root: bRoot[:], } r := &Service{ cfg: &config{ @@ -1301,7 +1305,10 @@ func TestValidateBeaconBlockPubSub_ValidExecutionPayload(t *testing.T) { FinalizedCheckPoint: ðpb.Checkpoint{ Epoch: 0, Root: make([]byte, 32), - }} + }, + State: beaconState, + Root: bRoot[:], + } r := &Service{ cfg: &config{ beaconDB: db, @@ -1536,7 +1543,10 @@ func Test_validateBeaconBlockProcessingWhenParentIsOptimistic(t *testing.T) { FinalizedCheckPoint: ðpb.Checkpoint{ Epoch: 0, Root: make([]byte, 32), - }} + }, + State: beaconState, + Root: bRoot[:], + } r := &Service{ cfg: &config{ beaconDB: db, @@ -1814,3 +1824,88 @@ func TestDetectAndBroadcastEquivocation(t *testing.T) { require.ErrorIs(t, err, ErrSlashingSignatureFailure) }) } + +func TestBlockVerifyingState_SameEpochAsParent(t *testing.T) { + ctx := t.Context() + db := dbtest.SetupDB(t) + + // Create a genesis state + beaconState, _ := util.DeterministicGenesisState(t, 100) + + // Create parent block at slot 1 + parentBlock := util.NewBeaconBlock() + parentBlock.Block.Slot = 1 + util.SaveBlock(t, ctx, db, parentBlock) + parentRoot, err := parentBlock.Block.HashTreeRoot() + require.NoError(t, err) + + // Save parent state at slot 1 (epoch 0) + parentState := beaconState.Copy() + require.NoError(t, parentState.SetSlot(1)) + require.NoError(t, db.SaveState(ctx, parentState, parentRoot)) + require.NoError(t, db.SaveStateSummary(ctx, ðpb.StateSummary{Root: parentRoot[:]})) + + // Create a different head block at a later epoch + headBlock := util.NewBeaconBlock() + headBlock.Block.Slot = 40 // Different epoch (epoch 1) + headBlock.Block.ParentRoot = parentRoot[:] // Head descends from parent + util.SaveBlock(t, ctx, db, headBlock) + headRoot, err := headBlock.Block.HashTreeRoot() + require.NoError(t, err) + + headState := beaconState.Copy() + require.NoError(t, headState.SetSlot(40)) + require.NoError(t, db.SaveState(ctx, headState, headRoot)) + + // Create a block at slot 2 (same epoch 0 as parent) + block := util.NewBeaconBlock() + block.Block.Slot = 2 + block.Block.ParentRoot = parentRoot[:] + signedBlock, err := blocks.NewSignedBeaconBlock(block) + require.NoError(t, err) + + forkchoiceStore := doublylinkedtree.New() + stateGen := stategen.New(db, forkchoiceStore) + + // Insert parent block into forkchoice + signedParentBlock, err := blocks.NewSignedBeaconBlock(parentBlock) + require.NoError(t, err) + roParentBlock, err := blocks.NewROBlockWithRoot(signedParentBlock, parentRoot) + require.NoError(t, err) + require.NoError(t, forkchoiceStore.InsertNode(ctx, parentState, roParentBlock)) + + // Insert head block into forkchoice + signedHeadBlock, err := blocks.NewSignedBeaconBlock(headBlock) + require.NoError(t, err) + roHeadBlock, err := blocks.NewROBlockWithRoot(signedHeadBlock, headRoot) + require.NoError(t, err) + require.NoError(t, forkchoiceStore.InsertNode(ctx, headState, roHeadBlock)) + + chainService := &mock.ChainService{ + DB: db, + Root: headRoot[:], // Head is different from parent + State: headState, // Set head state so HeadSlot() returns correct value + FinalizedCheckPoint: ðpb.Checkpoint{ + Epoch: 0, + Root: parentRoot[:], + }, + ForkChoiceStore: forkchoiceStore, + } + + r := &Service{ + cfg: &config{ + beaconDB: db, + chain: chainService, + stateGen: stateGen, + }, + } + + // Call blockVerifyingState - should return parent state without processing + result, err := r.blockVerifyingState(ctx, signedBlock) + require.NoError(t, err) + require.NotNil(t, result) + + // Verify that the returned state is at slot 1 (parent state slot) + // This confirms that the branch at line 361 was taken (returning parentState directly) + assert.Equal(t, primitives.Slot(1), result.Slot()) +} diff --git a/changelog/potuz_use_head_block_validation.md b/changelog/potuz_use_head_block_validation.md new file mode 100644 index 0000000000..29988d3891 --- /dev/null +++ b/changelog/potuz_use_head_block_validation.md @@ -0,0 +1,3 @@ +### Changed + +- Use head state for block pubsub validation when possible. From 8ad547c969a35906edcb9bc491fdb38a61341bbf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Kapka?= Date: Wed, 5 Nov 2025 19:20:34 +0100 Subject: [PATCH 080/103] Remove Beacon API endpoints that were deprecated in Electra (#15962) * Remove Beacon API endpoints that were deprecated in Electra * changelog <3 * build fix * remove more stuff * fix post-submit e2e and remove structs * list endpoints in the changelog --------- Co-authored-by: james-prysm <90280386+james-prysm@users.noreply.github.com> --- api/server/structs/BUILD.bazel | 1 - api/server/structs/conversions.go | 14 - api/server/structs/conversions_test.go | 18 - api/server/structs/endpoints_beacon.go | 12 - api/server/structs/endpoints_builder.go | 14 - beacon-chain/rpc/BUILD.bazel | 1 - beacon-chain/rpc/endpoints.go | 138 -- beacon-chain/rpc/endpoints_test.go | 14 +- beacon-chain/rpc/eth/beacon/BUILD.bazel | 2 - beacon-chain/rpc/eth/beacon/handlers.go | 116 -- beacon-chain/rpc/eth/beacon/handlers_pool.go | 150 -- .../rpc/eth/beacon/handlers_pool_test.go | 1499 ++++++----------- beacon-chain/rpc/eth/beacon/handlers_test.go | 1440 ++-------------- beacon-chain/rpc/eth/builder/BUILD.bazel | 47 - beacon-chain/rpc/eth/builder/handlers.go | 132 -- beacon-chain/rpc/eth/builder/handlers_test.go | 210 --- beacon-chain/rpc/eth/builder/server.go | 12 - beacon-chain/rpc/eth/validator/handlers.go | 84 - .../rpc/eth/validator/handlers_test.go | 1085 +++++------- changelog/radek_v7-remove-apis.md | 3 + .../endtoend/evaluators/beaconapi/requests.go | 10 +- .../client/beacon-api/propose_attestation.go | 21 +- .../beacon-api/propose_attestation_test.go | 68 - .../submit_aggregate_selection_proof.go | 19 +- .../submit_aggregate_selection_proof_test.go | 125 -- .../submit_signed_aggregate_proof.go | 21 +- .../submit_signed_aggregate_proof_test.go | 46 - 27 files changed, 1076 insertions(+), 4226 deletions(-) delete mode 100644 api/server/structs/endpoints_builder.go delete mode 100644 beacon-chain/rpc/eth/builder/BUILD.bazel delete mode 100644 beacon-chain/rpc/eth/builder/handlers.go delete mode 100644 beacon-chain/rpc/eth/builder/handlers_test.go delete mode 100644 beacon-chain/rpc/eth/builder/server.go create mode 100644 changelog/radek_v7-remove-apis.md diff --git a/api/server/structs/BUILD.bazel b/api/server/structs/BUILD.bazel index 03d2d4eb65..de7d388966 100644 --- a/api/server/structs/BUILD.bazel +++ b/api/server/structs/BUILD.bazel @@ -13,7 +13,6 @@ go_library( "conversions_state.go", "endpoints_beacon.go", "endpoints_blob.go", - "endpoints_builder.go", "endpoints_config.go", "endpoints_debug.go", "endpoints_events.go", diff --git a/api/server/structs/conversions.go b/api/server/structs/conversions.go index 6715034435..4e3b5be8a1 100644 --- a/api/server/structs/conversions.go +++ b/api/server/structs/conversions.go @@ -1492,20 +1492,6 @@ func sszBytesToUint256String(b []byte) (string, error) { return bi.String(), nil } -func DepositSnapshotFromConsensus(ds *eth.DepositSnapshot) *DepositSnapshot { - finalized := make([]string, 0, len(ds.Finalized)) - for _, f := range ds.Finalized { - finalized = append(finalized, hexutil.Encode(f)) - } - return &DepositSnapshot{ - Finalized: finalized, - DepositRoot: hexutil.Encode(ds.DepositRoot), - DepositCount: fmt.Sprintf("%d", ds.DepositCount), - ExecutionBlockHash: hexutil.Encode(ds.ExecutionHash), - ExecutionBlockHeight: fmt.Sprintf("%d", ds.ExecutionDepth), - } -} - func PendingDepositsFromConsensus(ds []*eth.PendingDeposit) []*PendingDeposit { deposits := make([]*PendingDeposit, len(ds)) for i, d := range ds { diff --git a/api/server/structs/conversions_test.go b/api/server/structs/conversions_test.go index fae4ecefbe..5e98364c33 100644 --- a/api/server/structs/conversions_test.go +++ b/api/server/structs/conversions_test.go @@ -9,24 +9,6 @@ import ( "github.com/ethereum/go-ethereum/common/hexutil" ) -func TestDepositSnapshotFromConsensus(t *testing.T) { - ds := ð.DepositSnapshot{ - Finalized: [][]byte{{0xde, 0xad, 0xbe, 0xef}, {0xca, 0xfe, 0xba, 0xbe}}, - DepositRoot: []byte{0xab, 0xcd}, - DepositCount: 12345, - ExecutionHash: []byte{0x12, 0x34}, - ExecutionDepth: 67890, - } - - res := DepositSnapshotFromConsensus(ds) - require.NotNil(t, res) - require.DeepEqual(t, []string{"0xdeadbeef", "0xcafebabe"}, res.Finalized) - require.Equal(t, "0xabcd", res.DepositRoot) - require.Equal(t, "12345", res.DepositCount) - require.Equal(t, "0x1234", res.ExecutionBlockHash) - require.Equal(t, "67890", res.ExecutionBlockHeight) -} - func TestSignedBLSToExecutionChange_ToConsensus(t *testing.T) { s := &SignedBLSToExecutionChange{Message: nil, Signature: ""} _, err := s.ToConsensus() diff --git a/api/server/structs/endpoints_beacon.go b/api/server/structs/endpoints_beacon.go index 3a2788c141..511d452cc0 100644 --- a/api/server/structs/endpoints_beacon.go +++ b/api/server/structs/endpoints_beacon.go @@ -206,18 +206,6 @@ type WeakSubjectivityData struct { StateRoot string `json:"state_root"` } -type GetDepositSnapshotResponse struct { - Data *DepositSnapshot `json:"data"` -} - -type DepositSnapshot struct { - Finalized []string `json:"finalized"` - DepositRoot string `json:"deposit_root"` - DepositCount string `json:"deposit_count"` - ExecutionBlockHash string `json:"execution_block_hash"` - ExecutionBlockHeight string `json:"execution_block_height"` -} - type GetIndividualVotesRequest struct { Epoch string `json:"epoch"` PublicKeys []string `json:"public_keys,omitempty"` diff --git a/api/server/structs/endpoints_builder.go b/api/server/structs/endpoints_builder.go deleted file mode 100644 index e55af7cc9d..0000000000 --- a/api/server/structs/endpoints_builder.go +++ /dev/null @@ -1,14 +0,0 @@ -package structs - -type ExpectedWithdrawalsResponse struct { - Data []*ExpectedWithdrawal `json:"data"` - ExecutionOptimistic bool `json:"execution_optimistic"` - Finalized bool `json:"finalized"` -} - -type ExpectedWithdrawal struct { - Address string `json:"address" hex:"true"` - Amount string `json:"amount"` - Index string `json:"index"` - ValidatorIndex string `json:"validator_index"` -} diff --git a/beacon-chain/rpc/BUILD.bazel b/beacon-chain/rpc/BUILD.bazel index 40abe188fa..6e3cb833a1 100644 --- a/beacon-chain/rpc/BUILD.bazel +++ b/beacon-chain/rpc/BUILD.bazel @@ -33,7 +33,6 @@ go_library( "//beacon-chain/rpc/core:go_default_library", "//beacon-chain/rpc/eth/beacon:go_default_library", "//beacon-chain/rpc/eth/blob:go_default_library", - "//beacon-chain/rpc/eth/builder:go_default_library", "//beacon-chain/rpc/eth/config:go_default_library", "//beacon-chain/rpc/eth/debug:go_default_library", "//beacon-chain/rpc/eth/events:go_default_library", diff --git a/beacon-chain/rpc/endpoints.go b/beacon-chain/rpc/endpoints.go index b2d4de50df..065ff0bd9c 100644 --- a/beacon-chain/rpc/endpoints.go +++ b/beacon-chain/rpc/endpoints.go @@ -8,7 +8,6 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/beacon" "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/blob" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/builder" "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/config" "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/debug" "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/events" @@ -90,7 +89,6 @@ func (s *Service) endpoints( ) []endpoint { endpoints := make([]endpoint, 0) endpoints = append(endpoints, s.rewardsEndpoints(blocker, stater, rewardFetcher)...) - endpoints = append(endpoints, s.builderEndpoints(stater)...) endpoints = append(endpoints, s.blobEndpoints(blocker)...) endpoints = append(endpoints, s.validatorEndpoints(validatorServer, stater, coreService, rewardFetcher)...) endpoints = append(endpoints, s.nodeEndpoints()...) @@ -160,29 +158,6 @@ func (s *Service) rewardsEndpoints(blocker lookup.Blocker, stater lookup.Stater, } } -func (s *Service) builderEndpoints(stater lookup.Stater) []endpoint { - server := &builder.Server{ - FinalizationFetcher: s.cfg.FinalizationFetcher, - OptimisticModeFetcher: s.cfg.OptimisticModeFetcher, - Stater: stater, - } - - const namespace = "builder" - return []endpoint{ - { - // Deprecated: use SSE from /eth/v1/events for `Payload Attributes` instead - template: "/eth/v1/builder/states/{state_id}/expected_withdrawals", - name: namespace + ".ExpectedWithdrawals", - middleware: []middleware.Middleware{ - middleware.AcceptHeaderHandler([]string{api.JsonMediaType, api.OctetStreamMediaType}), - middleware.AcceptEncodingHeaderHandler(), - }, - handler: server.ExpectedWithdrawals, - methods: []string{http.MethodGet}, - }, - } -} - func (s *Service) blobEndpoints(blocker lookup.Blocker) []endpoint { server := &blob.Server{ Blocker: blocker, @@ -248,17 +223,6 @@ func (s *Service) validatorEndpoints( const namespace = "validator" return []endpoint{ - { - // Deprecated: use /eth/v2/validator/aggregate_attestation instead - template: "/eth/v1/validator/aggregate_attestation", - name: namespace + ".GetAggregateAttestation", - middleware: []middleware.Middleware{ - middleware.AcceptHeaderHandler([]string{api.JsonMediaType}), - middleware.AcceptEncodingHeaderHandler(), - }, - handler: server.GetAggregateAttestation, - methods: []string{http.MethodGet}, - }, { template: "/eth/v2/validator/aggregate_attestation", name: namespace + ".GetAggregateAttestationV2", @@ -280,18 +244,6 @@ func (s *Service) validatorEndpoints( handler: server.SubmitContributionAndProofs, methods: []string{http.MethodPost}, }, - { - // Deprecated: use /eth/v2/validator/aggregate_and_proofs instead - template: "/eth/v1/validator/aggregate_and_proofs", - name: namespace + ".SubmitAggregateAndProofs", - middleware: []middleware.Middleware{ - middleware.ContentTypeHandler([]string{api.JsonMediaType}), - middleware.AcceptHeaderHandler([]string{api.JsonMediaType}), - middleware.AcceptEncodingHeaderHandler(), - }, - handler: server.SubmitAggregateAndProofs, - methods: []string{http.MethodPost}, - }, { template: "/eth/v2/validator/aggregate_and_proofs", name: namespace + ".SubmitAggregateAndProofsV2", @@ -618,30 +570,6 @@ func (s *Service) beaconEndpoints( handler: server.GetRandao, methods: []string{http.MethodGet}, }, - { - // Deprecated: use /eth/v2/beacon/blocks instead - template: "/eth/v1/beacon/blocks", - name: namespace + ".PublishBlock", - middleware: []middleware.Middleware{ - middleware.ContentTypeHandler([]string{api.JsonMediaType, api.OctetStreamMediaType}), - middleware.AcceptHeaderHandler([]string{api.JsonMediaType}), - middleware.AcceptEncodingHeaderHandler(), - }, - handler: server.PublishBlock, - methods: []string{http.MethodPost}, - }, - { - // Deprecated: use /eth/v2/beacon/blinded_blocks instead - template: "/eth/v1/beacon/blinded_blocks", - name: namespace + ".PublishBlindedBlock", - middleware: []middleware.Middleware{ - middleware.ContentTypeHandler([]string{api.JsonMediaType, api.OctetStreamMediaType}), - middleware.AcceptHeaderHandler([]string{api.JsonMediaType}), - middleware.AcceptEncodingHeaderHandler(), - }, - handler: server.PublishBlindedBlock, - methods: []string{http.MethodPost}, - }, { template: "/eth/v2/beacon/blocks", name: namespace + ".PublishBlockV2", @@ -674,17 +602,6 @@ func (s *Service) beaconEndpoints( handler: server.GetBlockV2, methods: []string{http.MethodGet}, }, - { - // Deprecated: use /eth/v2/beacon/blocks/{block_id}/attestations instead - template: "/eth/v1/beacon/blocks/{block_id}/attestations", - name: namespace + ".GetBlockAttestations", - middleware: []middleware.Middleware{ - middleware.AcceptHeaderHandler([]string{api.JsonMediaType}), - middleware.AcceptEncodingHeaderHandler(), - }, - handler: server.GetBlockAttestations, - methods: []string{http.MethodGet}, - }, { template: "/eth/v2/beacon/blocks/{block_id}/attestations", name: namespace + ".GetBlockAttestationsV2", @@ -715,17 +632,6 @@ func (s *Service) beaconEndpoints( handler: server.GetBlockRoot, methods: []string{http.MethodGet}, }, - { - // Deprecated: use /eth/v2/beacon/pool/attestations instead - template: "/eth/v1/beacon/pool/attestations", - name: namespace + ".ListAttestations", - middleware: []middleware.Middleware{ - middleware.AcceptHeaderHandler([]string{api.JsonMediaType}), - middleware.AcceptEncodingHeaderHandler(), - }, - handler: server.ListAttestations, - methods: []string{http.MethodGet}, - }, { template: "/eth/v2/beacon/pool/attestations", name: namespace + ".ListAttestationsV2", @@ -736,17 +642,6 @@ func (s *Service) beaconEndpoints( handler: server.ListAttestationsV2, methods: []string{http.MethodGet}, }, - { - template: "/eth/v1/beacon/pool/attestations", - name: namespace + ".SubmitAttestations", - middleware: []middleware.Middleware{ - middleware.ContentTypeHandler([]string{api.JsonMediaType}), - middleware.AcceptHeaderHandler([]string{api.JsonMediaType}), - middleware.AcceptEncodingHeaderHandler(), - }, - handler: server.SubmitAttestations, - methods: []string{http.MethodPost}, - }, { template: "/eth/v2/beacon/pool/attestations", name: namespace + ".SubmitAttestationsV2", @@ -811,17 +706,6 @@ func (s *Service) beaconEndpoints( handler: server.SubmitBLSToExecutionChanges, methods: []string{http.MethodPost}, }, - { - // Deprecated: use /eth/v2/beacon/pool/attester_slashings instead - template: "/eth/v1/beacon/pool/attester_slashings", - name: namespace + ".GetAttesterSlashings", - middleware: []middleware.Middleware{ - middleware.AcceptHeaderHandler([]string{api.JsonMediaType}), - middleware.AcceptEncodingHeaderHandler(), - }, - handler: server.GetAttesterSlashings, - methods: []string{http.MethodGet}, - }, { template: "/eth/v2/beacon/pool/attester_slashings", name: namespace + ".GetAttesterSlashingsV2", @@ -832,17 +716,6 @@ func (s *Service) beaconEndpoints( handler: server.GetAttesterSlashingsV2, methods: []string{http.MethodGet}, }, - { - template: "/eth/v1/beacon/pool/attester_slashings", - name: namespace + ".SubmitAttesterSlashings", - middleware: []middleware.Middleware{ - middleware.ContentTypeHandler([]string{api.JsonMediaType}), - middleware.AcceptHeaderHandler([]string{api.JsonMediaType}), - middleware.AcceptEncodingHeaderHandler(), - }, - handler: server.SubmitAttesterSlashings, - methods: []string{http.MethodPost}, - }, { template: "/eth/v2/beacon/pool/attester_slashings", name: namespace + ".SubmitAttesterSlashingsV2", @@ -957,17 +830,6 @@ func (s *Service) beaconEndpoints( handler: server.GetValidatorIdentities, methods: []string{http.MethodPost}, }, - { - // Deprecated: no longer needed post Electra - template: "/eth/v1/beacon/deposit_snapshot", - name: namespace + ".GetDepositSnapshot", - middleware: []middleware.Middleware{ - middleware.AcceptHeaderHandler([]string{api.JsonMediaType}), - middleware.AcceptEncodingHeaderHandler(), - }, - handler: server.GetDepositSnapshot, - methods: []string{http.MethodGet}, - }, { template: "/eth/v1/beacon/states/{state_id}/pending_deposits", name: namespace + ".GetPendingDeposits", diff --git a/beacon-chain/rpc/endpoints_test.go b/beacon-chain/rpc/endpoints_test.go index e2a579cdd7..a55909a3c3 100644 --- a/beacon-chain/rpc/endpoints_test.go +++ b/beacon-chain/rpc/endpoints_test.go @@ -35,20 +35,14 @@ func Test_endpoints(t *testing.T) { "/eth/v1/beacon/states/{state_id}/proposer_lookahead": {http.MethodGet}, "/eth/v1/beacon/headers": {http.MethodGet}, "/eth/v1/beacon/headers/{block_id}": {http.MethodGet}, - "/eth/v1/beacon/blinded_blocks": {http.MethodPost}, "/eth/v2/beacon/blinded_blocks": {http.MethodPost}, - "/eth/v1/beacon/blocks": {http.MethodPost}, "/eth/v2/beacon/blocks": {http.MethodPost}, "/eth/v2/beacon/blocks/{block_id}": {http.MethodGet}, "/eth/v1/beacon/blocks/{block_id}/root": {http.MethodGet}, - "/eth/v1/beacon/blocks/{block_id}/attestations": {http.MethodGet}, "/eth/v2/beacon/blocks/{block_id}/attestations": {http.MethodGet}, "/eth/v1/beacon/blob_sidecars/{block_id}": {http.MethodGet}, - "/eth/v1/beacon/deposit_snapshot": {http.MethodGet}, "/eth/v1/beacon/blinded_blocks/{block_id}": {http.MethodGet}, - "/eth/v1/beacon/pool/attestations": {http.MethodGet, http.MethodPost}, "/eth/v2/beacon/pool/attestations": {http.MethodGet, http.MethodPost}, - "/eth/v1/beacon/pool/attester_slashings": {http.MethodGet, http.MethodPost}, "/eth/v2/beacon/pool/attester_slashings": {http.MethodGet, http.MethodPost}, "/eth/v1/beacon/pool/proposer_slashings": {http.MethodGet, http.MethodPost}, "/eth/v1/beacon/pool/sync_committees": {http.MethodPost}, @@ -64,10 +58,6 @@ func Test_endpoints(t *testing.T) { "/eth/v1/beacon/light_client/optimistic_update": {http.MethodGet}, } - builderRoutes := map[string][]string{ - "/eth/v1/builder/states/{state_id}/expected_withdrawals": {http.MethodGet}, - } - blobRoutes := map[string][]string{ "/eth/v1/beacon/blob_sidecars/{block_id}": {http.MethodGet}, "/eth/v1/beacon/blobs/{block_id}": {http.MethodGet}, @@ -106,9 +96,7 @@ func Test_endpoints(t *testing.T) { "/eth/v1/validator/duties/sync/{epoch}": {http.MethodPost}, "/eth/v3/validator/blocks/{slot}": {http.MethodGet}, "/eth/v1/validator/attestation_data": {http.MethodGet}, - "/eth/v1/validator/aggregate_attestation": {http.MethodGet}, "/eth/v2/validator/aggregate_attestation": {http.MethodGet}, - "/eth/v1/validator/aggregate_and_proofs": {http.MethodPost}, "/eth/v2/validator/aggregate_and_proofs": {http.MethodPost}, "/eth/v1/validator/beacon_committee_subscriptions": {http.MethodPost}, "/eth/v1/validator/sync_committee_subscriptions": {http.MethodPost}, @@ -182,7 +170,7 @@ func Test_endpoints(t *testing.T) { } expectedRoutes := make(map[string][]string) for _, m := range []map[string][]string{ - beaconRoutes, builderRoutes, configRoutes, debugRoutes, eventsRoutes, + beaconRoutes, configRoutes, debugRoutes, eventsRoutes, nodeRoutes, validatorRoutes, rewardsRoutes, blobRoutes, prysmValidatorRoutes, prysmNodeRoutes, prysmBeaconRoutes, } { diff --git a/beacon-chain/rpc/eth/beacon/BUILD.bazel b/beacon-chain/rpc/eth/beacon/BUILD.bazel index b937dad9c8..5f5eb0e52b 100644 --- a/beacon-chain/rpc/eth/beacon/BUILD.bazel +++ b/beacon-chain/rpc/eth/beacon/BUILD.bazel @@ -20,7 +20,6 @@ go_library( "//beacon-chain/blockchain:go_default_library", "//beacon-chain/blockchain/kzg:go_default_library", "//beacon-chain/cache:go_default_library", - "//beacon-chain/cache/depositsnapshot:go_default_library", "//beacon-chain/core/altair:go_default_library", "//beacon-chain/core/blocks:go_default_library", "//beacon-chain/core/feed:go_default_library", @@ -86,7 +85,6 @@ go_test( "//api/server/structs:go_default_library", "//beacon-chain/blockchain/kzg:go_default_library", "//beacon-chain/blockchain/testing:go_default_library", - "//beacon-chain/cache/depositsnapshot:go_default_library", "//beacon-chain/core/signing:go_default_library", "//beacon-chain/core/time:go_default_library", "//beacon-chain/core/transition:go_default_library", diff --git a/beacon-chain/rpc/eth/beacon/handlers.go b/beacon-chain/rpc/eth/beacon/handlers.go index acc437f04d..ffc9a25198 100644 --- a/beacon-chain/rpc/eth/beacon/handlers.go +++ b/beacon-chain/rpc/eth/beacon/handlers.go @@ -14,7 +14,6 @@ import ( "github.com/OffchainLabs/prysm/v6/api" "github.com/OffchainLabs/prysm/v6/api/server/structs" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" corehelpers "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" @@ -293,35 +292,6 @@ func (s *Server) getBlockResponseBodyJson(ctx context.Context, blk interfaces.Re }, nil } -// Deprecated: use GetBlockAttestationsV2 instead -// GetBlockAttestations retrieves attestation included in requested block. -func (s *Server) GetBlockAttestations(w http.ResponseWriter, r *http.Request) { - ctx, span := trace.StartSpan(r.Context(), "beacon.GetBlockAttestations") - defer span.End() - - blk, isOptimistic, root := s.blockData(ctx, w, r) - if blk == nil { - return - } - consensusAtts := blk.Block().Body().Attestations() - atts := make([]*structs.Attestation, len(consensusAtts)) - for i, att := range consensusAtts { - a, ok := att.(*eth.Attestation) - if ok { - atts[i] = structs.AttFromConsensus(a) - } else { - httputil.HandleError(w, fmt.Sprintf("unable to convert consensus attestations of type %T", att), http.StatusInternalServerError) - return - } - } - resp := &structs.GetBlockAttestationsResponse{ - Data: atts, - ExecutionOptimistic: isOptimistic, - Finalized: s.FinalizationFetcher.IsFinalized(ctx, root), - } - httputil.WriteJson(w, resp) -} - // GetBlockAttestationsV2 retrieves attestation included in requested block. func (s *Server) GetBlockAttestationsV2(w http.ResponseWriter, r *http.Request) { ctx, span := trace.StartSpan(r.Context(), "beacon.GetBlockAttestationsV2") @@ -396,28 +366,6 @@ func (s *Server) blockData(ctx context.Context, w http.ResponseWriter, r *http.R return blk, isOptimistic, root } -// Deprecated: use PublishBlindedBlockV2 instead -// PublishBlindedBlock instructs the beacon node to use the components of the `SignedBlindedBeaconBlock` to construct -// and publish a SignedBeaconBlock by swapping out the transactions_root for the corresponding full list of `transactions`. -// The beacon node should broadcast a newly constructed SignedBeaconBlock to the beacon network, to be included in the -// beacon chain. The beacon node is not required to validate the signed BeaconBlock, and a successful response (20X) -// only indicates that the broadcast has been successful. The beacon node is expected to integrate the new block into -// its state, and therefore validate the block internally, however blocks which fail the validation are still broadcast -// but a different status code is returned (202). Pre-Bellatrix, this endpoint will accept a SignedBeaconBlock. After -// Deneb, this additionally instructs the beacon node to broadcast all given signed blobs. -func (s *Server) PublishBlindedBlock(w http.ResponseWriter, r *http.Request) { - ctx, span := trace.StartSpan(r.Context(), "beacon.PublishBlindedBlock") - defer span.End() - if shared.IsSyncing(r.Context(), w, s.SyncChecker, s.HeadFetcher, s.TimeFetcher, s.OptimisticModeFetcher) { - return - } - if httputil.IsRequestSsz(r) { - s.publishBlindedBlockSSZ(ctx, w, r, false) - } else { - s.publishBlindedBlock(ctx, w, r, false) - } -} - // PublishBlindedBlockV2 instructs the beacon node to use the components of the `SignedBlindedBeaconBlock` to construct and publish a // `SignedBeaconBlock` by swapping out the `transactions_root` for the corresponding full list of `transactions`. // The beacon node should broadcast a newly constructed `SignedBeaconBlock` to the beacon network, @@ -627,28 +575,6 @@ func decodeBlindedBellatrixJSON(body []byte) (*eth.GenericSignedBeaconBlock, err ) } -// Deprecated: use PublishBlockV2 instead -// PublishBlock instructs the beacon node to broadcast a newly signed beacon block to the beacon network, -// to be included in the beacon chain. A success response (20x) indicates that the block -// passed gossip validation and was successfully broadcast onto the network. -// The beacon node is also expected to integrate the block into state, but may broadcast it -// before doing so, so as to aid timely delivery of the block. Should the block fail full -// validation, a separate success response code (202) is used to indicate that the block was -// successfully broadcast but failed integration. After Deneb, this additionally instructs the -// beacon node to broadcast all given signed blobs. -func (s *Server) PublishBlock(w http.ResponseWriter, r *http.Request) { - ctx, span := trace.StartSpan(r.Context(), "beacon.PublishBlock") - defer span.End() - if shared.IsSyncing(r.Context(), w, s.SyncChecker, s.HeadFetcher, s.TimeFetcher, s.OptimisticModeFetcher) { - return - } - if httputil.IsRequestSsz(r) { - s.publishBlockSSZ(ctx, w, r, false) - } else { - s.publishBlock(ctx, w, r, false) - } -} - // PublishBlockV2 instructs the beacon node to broadcast a newly signed beacon block to the beacon network, // to be included in the beacon chain. A success response (20x) indicates that the block // passed gossip validation and was successfully broadcast onto the network. @@ -1589,48 +1515,6 @@ func (s *Server) GetGenesis(w http.ResponseWriter, r *http.Request) { httputil.WriteJson(w, resp) } -// Deprecated: no longer needed post Electra -// GetDepositSnapshot retrieves the EIP-4881 Deposit Tree Snapshot. Either a JSON or, -// if the Accept header was added, bytes serialized by SSZ will be returned. -func (s *Server) GetDepositSnapshot(w http.ResponseWriter, r *http.Request) { - ctx, span := trace.StartSpan(r.Context(), "beacon.GetDepositSnapshot") - defer span.End() - - eth1data, err := s.BeaconDB.ExecutionChainData(ctx) - if err != nil { - httputil.HandleError(w, "Could not retrieve execution chain data: "+err.Error(), http.StatusInternalServerError) - return - } - if eth1data == nil { - httputil.HandleError(w, "Could not retrieve execution chain data: empty Eth1Data", http.StatusInternalServerError) - return - } - snapshot := eth1data.DepositSnapshot - if snapshot == nil || len(snapshot.Finalized) == 0 { - httputil.HandleError(w, "No finalized snapshot available", http.StatusNotFound) - return - } - if len(snapshot.Finalized) > depositsnapshot.DepositContractDepth { - httputil.HandleError(w, "Retrieved invalid deposit snapshot", http.StatusInternalServerError) - return - } - if httputil.RespondWithSsz(r) { - sszData, err := snapshot.MarshalSSZ() - if err != nil { - httputil.HandleError(w, "Could not marshal deposit snapshot into SSZ: "+err.Error(), http.StatusInternalServerError) - return - } - httputil.WriteSsz(w, sszData) - return - } - httputil.WriteJson( - w, - &structs.GetDepositSnapshotResponse{ - Data: structs.DepositSnapshotFromConsensus(snapshot), - }, - ) -} - // Broadcast blob sidecars even if the block of the same slot has been imported. // To ensure safety, we will only broadcast blob sidecars if the header references the same block that was previously seen. // Otherwise, a proposer could get slashed through a different blob sidecar header reference. diff --git a/beacon-chain/rpc/eth/beacon/handlers_pool.go b/beacon-chain/rpc/eth/beacon/handlers_pool.go index f8ca39e2ea..e40ad0cf47 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_pool.go +++ b/beacon-chain/rpc/eth/beacon/handlers_pool.go @@ -34,58 +34,6 @@ import ( const broadcastBLSChangesRateLimit = 128 -// Deprecated: use ListAttestationsV2 instead -// ListAttestations retrieves attestations known by the node but -// not necessarily incorporated into any block. Allows filtering by committee index or slot. -func (s *Server) ListAttestations(w http.ResponseWriter, r *http.Request) { - _, span := trace.StartSpan(r.Context(), "beacon.ListAttestations") - defer span.End() - - rawSlot, slot, ok := shared.UintFromQuery(w, r, "slot", false) - if !ok { - return - } - rawCommitteeIndex, committeeIndex, ok := shared.UintFromQuery(w, r, "committee_index", false) - if !ok { - return - } - - var attestations []eth.Att - if features.Get().EnableExperimentalAttestationPool { - attestations = s.AttestationCache.GetAll() - } else { - attestations = s.AttestationsPool.AggregatedAttestations() - unaggAtts := s.AttestationsPool.UnaggregatedAttestations() - attestations = append(attestations, unaggAtts...) - } - - filteredAtts := make([]*structs.Attestation, 0, len(attestations)) - for _, a := range attestations { - var includeAttestation bool - att, ok := a.(*eth.Attestation) - if !ok { - httputil.HandleError(w, fmt.Sprintf("Unable to convert attestation of type %T", a), http.StatusInternalServerError) - return - } - - includeAttestation = shouldIncludeAttestation(att, rawSlot, slot, rawCommitteeIndex, committeeIndex) - if includeAttestation { - attStruct := structs.AttFromConsensus(att) - filteredAtts = append(filteredAtts, attStruct) - } - } - - attsData, err := json.Marshal(filteredAtts) - if err != nil { - httputil.HandleError(w, "Could not marshal attestations: "+err.Error(), http.StatusInternalServerError) - return - } - - httputil.WriteJson(w, &structs.ListAttestationsResponse{ - Data: attsData, - }) -} - // ListAttestationsV2 retrieves attestations known by the node but // not necessarily incorporated into any block. Allows filtering by committee index or slot. func (s *Server) ListAttestationsV2(w http.ResponseWriter, r *http.Request) { @@ -176,49 +124,6 @@ func shouldIncludeAttestation( return committeeIndexMatch && slotMatch } -// SubmitAttestations submits an attestation object to node. If the attestation passes all validation -// constraints, node MUST publish the attestation on an appropriate subnet. -func (s *Server) SubmitAttestations(w http.ResponseWriter, r *http.Request) { - ctx, span := trace.StartSpan(r.Context(), "beacon.SubmitAttestations") - defer span.End() - - var req structs.SubmitAttestationsRequest - err := json.NewDecoder(r.Body).Decode(&req.Data) - switch { - case errors.Is(err, io.EOF): - httputil.HandleError(w, "No data submitted", http.StatusBadRequest) - return - case err != nil: - httputil.HandleError(w, "Could not decode request body: "+err.Error(), http.StatusBadRequest) - return - } - - attFailures, failedBroadcasts, err := s.handleAttestations(ctx, req.Data) - if err != nil { - httputil.HandleError(w, err.Error(), http.StatusBadRequest) - return - } - - if len(attFailures) > 0 { - failuresErr := &server.IndexedErrorContainer{ - Code: http.StatusBadRequest, - Message: server.ErrIndexedValidationFail, - Failures: attFailures, - } - httputil.WriteError(w, failuresErr) - return - } - if len(failedBroadcasts) > 0 { - failuresErr := &server.IndexedErrorContainer{ - Code: http.StatusInternalServerError, - Message: server.ErrIndexedBroadcastFail, - Failures: failedBroadcasts, - } - httputil.WriteError(w, failuresErr) - return - } -} - // SubmitAttestationsV2 submits an attestation object to node. If the attestation passes all validation // constraints, node MUST publish the attestation on an appropriate subnet. func (s *Server) SubmitAttestationsV2(w http.ResponseWriter, r *http.Request) { @@ -749,36 +654,6 @@ func (s *Server) ListBLSToExecutionChanges(w http.ResponseWriter, r *http.Reques }) } -// Deprecated: use GetAttesterSlashingsV2 instead -// GetAttesterSlashings retrieves attester slashings known by the node but -// not necessarily incorporated into any block. -func (s *Server) GetAttesterSlashings(w http.ResponseWriter, r *http.Request) { - ctx, span := trace.StartSpan(r.Context(), "beacon.GetAttesterSlashings") - defer span.End() - - headState, err := s.ChainInfoFetcher.HeadStateReadOnly(ctx) - if err != nil { - httputil.HandleError(w, "Could not get head state: "+err.Error(), http.StatusInternalServerError) - return - } - sourceSlashings := s.SlashingsPool.PendingAttesterSlashings(ctx, headState, true /* return unlimited slashings */) - slashings := make([]*structs.AttesterSlashing, len(sourceSlashings)) - for i, slashing := range sourceSlashings { - as, ok := slashing.(*eth.AttesterSlashing) - if !ok { - httputil.HandleError(w, fmt.Sprintf("Unable to convert slashing of type %T", slashing), http.StatusInternalServerError) - return - } - slashings[i] = structs.AttesterSlashingFromConsensus(as) - } - attBytes, err := json.Marshal(slashings) - if err != nil { - httputil.HandleError(w, fmt.Sprintf("Failed to marshal slashings: %v", err), http.StatusInternalServerError) - return - } - httputil.WriteJson(w, &structs.GetAttesterSlashingsResponse{Data: attBytes}) -} - // GetAttesterSlashingsV2 retrieves attester slashings known by the node but // not necessarily incorporated into any block, supporting both AttesterSlashing and AttesterSlashingElectra. func (s *Server) GetAttesterSlashingsV2(w http.ResponseWriter, r *http.Request) { @@ -830,31 +705,6 @@ func (s *Server) GetAttesterSlashingsV2(w http.ResponseWriter, r *http.Request) httputil.WriteJson(w, resp) } -// SubmitAttesterSlashings submits an attester slashing object to node's pool and -// if passes validation node MUST broadcast it to network. -func (s *Server) SubmitAttesterSlashings(w http.ResponseWriter, r *http.Request) { - ctx, span := trace.StartSpan(r.Context(), "beacon.SubmitAttesterSlashings") - defer span.End() - - var req structs.AttesterSlashing - err := json.NewDecoder(r.Body).Decode(&req) - switch { - case errors.Is(err, io.EOF): - httputil.HandleError(w, "No data submitted", http.StatusBadRequest) - return - case err != nil: - httputil.HandleError(w, "Could not decode request body: "+err.Error(), http.StatusBadRequest) - return - } - - slashing, err := req.ToConsensus() - if err != nil { - httputil.HandleError(w, "Could not convert request slashing to consensus slashing: "+err.Error(), http.StatusBadRequest) - return - } - s.submitAttesterSlashing(w, ctx, slashing) -} - // SubmitAttesterSlashingsV2 submits an attester slashing object to node's pool and // if passes validation node MUST broadcast it to network. func (s *Server) SubmitAttesterSlashingsV2(w http.ResponseWriter, r *http.Request) { diff --git a/beacon-chain/rpc/eth/beacon/handlers_pool_test.go b/beacon-chain/rpc/eth/beacon/handlers_pool_test.go index 855a79b400..a46c57f87a 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_pool_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_pool_test.go @@ -45,7 +45,7 @@ import ( "github.com/pkg/errors" ) -func TestListAttestations(t *testing.T) { +func TestListAttestationsV2(t *testing.T) { att1 := ðpbv1alpha1.Attestation{ AggregationBits: []byte{1, 10}, Data: ðpbv1alpha1.AttestationData{ @@ -114,36 +114,42 @@ func TestListAttestations(t *testing.T) { }, Signature: bytesutil.PadTo([]byte("signature4"), 96), } - t.Run("V1", func(t *testing.T) { + + t.Run("Pre-Electra", func(t *testing.T) { bs, err := util.NewBeaconState() require.NoError(t, err) - - chainService := &blockchainmock.ChainService{State: bs} + slot := primitives.Slot(0) + chainService := &blockchainmock.ChainService{State: bs, Slot: &slot} s := &Server{ ChainInfoFetcher: chainService, TimeFetcher: chainService, AttestationsPool: attestations.NewPool(), } + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.DenebForkEpoch = 0 + params.OverrideBeaconConfig(config) + require.NoError(t, s.AttestationsPool.SaveAggregatedAttestations([]ethpbv1alpha1.Att{att1, att2})) require.NoError(t, s.AttestationsPool.SaveUnaggregatedAttestations([]ethpbv1alpha1.Att{att3, att4})) - t.Run("empty request", func(t *testing.T) { url := "http://example.com" request := httptest.NewRequest(http.MethodGet, url, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - s.ListAttestations(writer, request) + s.ListAttestationsV2(writer, request) assert.Equal(t, http.StatusOK, writer.Code) resp := &structs.ListAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp)) + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) require.NotNil(t, resp) require.NotNil(t, resp.Data) var atts []*structs.Attestation require.NoError(t, json.Unmarshal(resp.Data, &atts)) assert.Equal(t, 4, len(atts)) + assert.Equal(t, "deneb", resp.Version) }) t.Run("slot request", func(t *testing.T) { url := "http://example.com?slot=2" @@ -151,16 +157,17 @@ func TestListAttestations(t *testing.T) { writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - s.ListAttestations(writer, request) + s.ListAttestationsV2(writer, request) assert.Equal(t, http.StatusOK, writer.Code) resp := &structs.ListAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp)) + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) require.NotNil(t, resp) require.NotNil(t, resp.Data) var atts []*structs.Attestation require.NoError(t, json.Unmarshal(resp.Data, &atts)) assert.Equal(t, 2, len(atts)) + assert.Equal(t, "deneb", resp.Version) for _, a := range atts { assert.Equal(t, "2", a.Data.Slot) } @@ -171,16 +178,17 @@ func TestListAttestations(t *testing.T) { writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - s.ListAttestations(writer, request) + s.ListAttestationsV2(writer, request) assert.Equal(t, http.StatusOK, writer.Code) resp := &structs.ListAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp)) + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) require.NotNil(t, resp) require.NotNil(t, resp.Data) var atts []*structs.Attestation require.NoError(t, json.Unmarshal(resp.Data, &atts)) assert.Equal(t, 2, len(atts)) + assert.Equal(t, "deneb", resp.Version) for _, a := range atts { assert.Equal(t, "4", a.Data.CommitteeIndex) } @@ -191,307 +199,205 @@ func TestListAttestations(t *testing.T) { writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - s.ListAttestations(writer, request) + s.ListAttestationsV2(writer, request) assert.Equal(t, http.StatusOK, writer.Code) resp := &structs.ListAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp)) + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) require.NotNil(t, resp) require.NotNil(t, resp.Data) var atts []*structs.Attestation require.NoError(t, json.Unmarshal(resp.Data, &atts)) assert.Equal(t, 1, len(atts)) + assert.Equal(t, "deneb", resp.Version) for _, a := range atts { assert.Equal(t, "2", a.Data.Slot) assert.Equal(t, "4", a.Data.CommitteeIndex) } }) }) - t.Run("V2", func(t *testing.T) { - t.Run("Pre-Electra", func(t *testing.T) { - bs, err := util.NewBeaconState() - require.NoError(t, err) - slot := primitives.Slot(0) - chainService := &blockchainmock.ChainService{State: bs, Slot: &slot} - s := &Server{ - ChainInfoFetcher: chainService, - TimeFetcher: chainService, - AttestationsPool: attestations.NewPool(), - } + t.Run("Post-Electra", func(t *testing.T) { + cb1 := primitives.NewAttestationCommitteeBits() + cb1.SetBitAt(1, true) + cb2 := primitives.NewAttestationCommitteeBits() + cb2.SetBitAt(2, true) - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.DenebForkEpoch = 0 - params.OverrideBeaconConfig(config) + attElectra1 := ðpbv1alpha1.AttestationElectra{ + AggregationBits: []byte{1, 10}, + Data: ðpbv1alpha1.AttestationData{ + Slot: 1, + CommitteeIndex: 0, + BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot1"), 32), + Source: ðpbv1alpha1.Checkpoint{ + Epoch: 1, + Root: bytesutil.PadTo([]byte("sourceroot1"), 32), + }, + Target: ðpbv1alpha1.Checkpoint{ + Epoch: 10, + Root: bytesutil.PadTo([]byte("targetroot1"), 32), + }, + }, + CommitteeBits: cb1, + Signature: bytesutil.PadTo([]byte("signature1"), 96), + } + attElectra2 := ðpbv1alpha1.AttestationElectra{ + AggregationBits: []byte{1, 10}, + Data: ðpbv1alpha1.AttestationData{ + Slot: 1, + CommitteeIndex: 0, + BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot2"), 32), + Source: ðpbv1alpha1.Checkpoint{ + Epoch: 1, + Root: bytesutil.PadTo([]byte("sourceroot2"), 32), + }, + Target: ðpbv1alpha1.Checkpoint{ + Epoch: 10, + Root: bytesutil.PadTo([]byte("targetroot2"), 32), + }, + }, + CommitteeBits: cb2, + Signature: bytesutil.PadTo([]byte("signature2"), 96), + } + attElectra3 := ðpbv1alpha1.AttestationElectra{ + AggregationBits: bitfield.NewBitlist(8), + Data: ðpbv1alpha1.AttestationData{ + Slot: 2, + CommitteeIndex: 0, + BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot3"), 32), + Source: ðpbv1alpha1.Checkpoint{ + Epoch: 1, + Root: bytesutil.PadTo([]byte("sourceroot3"), 32), + }, + Target: ðpbv1alpha1.Checkpoint{ + Epoch: 10, + Root: bytesutil.PadTo([]byte("targetroot3"), 32), + }, + }, + CommitteeBits: cb1, + Signature: bytesutil.PadTo([]byte("signature3"), 96), + } + attElectra4 := ðpbv1alpha1.AttestationElectra{ + AggregationBits: bitfield.NewBitlist(8), + Data: ðpbv1alpha1.AttestationData{ + Slot: 2, + CommitteeIndex: 0, + BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot4"), 32), + Source: ðpbv1alpha1.Checkpoint{ + Epoch: 1, + Root: bytesutil.PadTo([]byte("sourceroot4"), 32), + }, + Target: ðpbv1alpha1.Checkpoint{ + Epoch: 10, + Root: bytesutil.PadTo([]byte("targetroot4"), 32), + }, + }, + CommitteeBits: cb2, + Signature: bytesutil.PadTo([]byte("signature4"), 96), + } + bs, err := util.NewBeaconStateElectra() + require.NoError(t, err) - require.NoError(t, s.AttestationsPool.SaveAggregatedAttestations([]ethpbv1alpha1.Att{att1, att2})) - require.NoError(t, s.AttestationsPool.SaveUnaggregatedAttestations([]ethpbv1alpha1.Att{att3, att4})) - t.Run("empty request", func(t *testing.T) { - url := "http://example.com" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 0 + params.OverrideBeaconConfig(config) - s.ListAttestationsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - resp := &structs.ListAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) + chainService := &blockchainmock.ChainService{State: bs} + s := &Server{ + AttestationsPool: attestations.NewPool(), + ChainInfoFetcher: chainService, + TimeFetcher: chainService, + } + // Added one pre electra attestation to ensure it is ignored. + require.NoError(t, s.AttestationsPool.SaveAggregatedAttestations([]ethpbv1alpha1.Att{attElectra1, attElectra2, att1})) + require.NoError(t, s.AttestationsPool.SaveUnaggregatedAttestations([]ethpbv1alpha1.Att{attElectra3, attElectra4, att3})) - var atts []*structs.Attestation - require.NoError(t, json.Unmarshal(resp.Data, &atts)) - assert.Equal(t, 4, len(atts)) - assert.Equal(t, "deneb", resp.Version) - }) - t.Run("slot request", func(t *testing.T) { - url := "http://example.com?slot=2" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + t.Run("empty request", func(t *testing.T) { + url := "http://example.com" + request := httptest.NewRequest(http.MethodGet, url, nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s.ListAttestationsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - resp := &structs.ListAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) + s.ListAttestationsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + resp := &structs.ListAttestationsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) - var atts []*structs.Attestation - require.NoError(t, json.Unmarshal(resp.Data, &atts)) - assert.Equal(t, 2, len(atts)) - assert.Equal(t, "deneb", resp.Version) - for _, a := range atts { - assert.Equal(t, "2", a.Data.Slot) - } - }) - t.Run("index request", func(t *testing.T) { - url := "http://example.com?committee_index=4" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.ListAttestationsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - resp := &structs.ListAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - - var atts []*structs.Attestation - require.NoError(t, json.Unmarshal(resp.Data, &atts)) - assert.Equal(t, 2, len(atts)) - assert.Equal(t, "deneb", resp.Version) - for _, a := range atts { - assert.Equal(t, "4", a.Data.CommitteeIndex) - } - }) - t.Run("both slot + index request", func(t *testing.T) { - url := "http://example.com?slot=2&committee_index=4" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.ListAttestationsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - resp := &structs.ListAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - - var atts []*structs.Attestation - require.NoError(t, json.Unmarshal(resp.Data, &atts)) - assert.Equal(t, 1, len(atts)) - assert.Equal(t, "deneb", resp.Version) - for _, a := range atts { - assert.Equal(t, "2", a.Data.Slot) - assert.Equal(t, "4", a.Data.CommitteeIndex) - } - }) + var atts []*structs.AttestationElectra + require.NoError(t, json.Unmarshal(resp.Data, &atts)) + assert.Equal(t, 4, len(atts)) + assert.Equal(t, "electra", resp.Version) }) - t.Run("Post-Electra", func(t *testing.T) { - cb1 := primitives.NewAttestationCommitteeBits() - cb1.SetBitAt(1, true) - cb2 := primitives.NewAttestationCommitteeBits() - cb2.SetBitAt(2, true) + t.Run("slot request", func(t *testing.T) { + url := "http://example.com?slot=2" + request := httptest.NewRequest(http.MethodGet, url, nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - attElectra1 := ðpbv1alpha1.AttestationElectra{ - AggregationBits: []byte{1, 10}, - Data: ðpbv1alpha1.AttestationData{ - Slot: 1, - CommitteeIndex: 0, - BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot1"), 32), - Source: ðpbv1alpha1.Checkpoint{ - Epoch: 1, - Root: bytesutil.PadTo([]byte("sourceroot1"), 32), - }, - Target: ðpbv1alpha1.Checkpoint{ - Epoch: 10, - Root: bytesutil.PadTo([]byte("targetroot1"), 32), - }, - }, - CommitteeBits: cb1, - Signature: bytesutil.PadTo([]byte("signature1"), 96), + s.ListAttestationsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + resp := &structs.ListAttestationsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + + var atts []*structs.AttestationElectra + require.NoError(t, json.Unmarshal(resp.Data, &atts)) + assert.Equal(t, 2, len(atts)) + assert.Equal(t, "electra", resp.Version) + for _, a := range atts { + assert.Equal(t, "2", a.Data.Slot) } - attElectra2 := ðpbv1alpha1.AttestationElectra{ - AggregationBits: []byte{1, 10}, - Data: ðpbv1alpha1.AttestationData{ - Slot: 1, - CommitteeIndex: 0, - BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot2"), 32), - Source: ðpbv1alpha1.Checkpoint{ - Epoch: 1, - Root: bytesutil.PadTo([]byte("sourceroot2"), 32), - }, - Target: ðpbv1alpha1.Checkpoint{ - Epoch: 10, - Root: bytesutil.PadTo([]byte("targetroot2"), 32), - }, - }, - CommitteeBits: cb2, - Signature: bytesutil.PadTo([]byte("signature2"), 96), + }) + t.Run("index request", func(t *testing.T) { + url := "http://example.com?committee_index=2" + request := httptest.NewRequest(http.MethodGet, url, nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.ListAttestationsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + resp := &structs.ListAttestationsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + + var atts []*structs.AttestationElectra + require.NoError(t, json.Unmarshal(resp.Data, &atts)) + assert.Equal(t, 2, len(atts)) + assert.Equal(t, "electra", resp.Version) + for _, a := range atts { + assert.Equal(t, "0x0400000000000000", a.CommitteeBits) } - attElectra3 := ðpbv1alpha1.AttestationElectra{ - AggregationBits: bitfield.NewBitlist(8), - Data: ðpbv1alpha1.AttestationData{ - Slot: 2, - CommitteeIndex: 0, - BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot3"), 32), - Source: ðpbv1alpha1.Checkpoint{ - Epoch: 1, - Root: bytesutil.PadTo([]byte("sourceroot3"), 32), - }, - Target: ðpbv1alpha1.Checkpoint{ - Epoch: 10, - Root: bytesutil.PadTo([]byte("targetroot3"), 32), - }, - }, - CommitteeBits: cb1, - Signature: bytesutil.PadTo([]byte("signature3"), 96), + }) + t.Run("both slot + index request", func(t *testing.T) { + url := "http://example.com?slot=2&committee_index=2" + request := httptest.NewRequest(http.MethodGet, url, nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.ListAttestationsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + resp := &structs.ListAttestationsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + + var atts []*structs.AttestationElectra + require.NoError(t, json.Unmarshal(resp.Data, &atts)) + assert.Equal(t, 1, len(atts)) + assert.Equal(t, "electra", resp.Version) + for _, a := range atts { + assert.Equal(t, "2", a.Data.Slot) + assert.Equal(t, "0x0400000000000000", a.CommitteeBits) } - attElectra4 := ðpbv1alpha1.AttestationElectra{ - AggregationBits: bitfield.NewBitlist(8), - Data: ðpbv1alpha1.AttestationData{ - Slot: 2, - CommitteeIndex: 0, - BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot4"), 32), - Source: ðpbv1alpha1.Checkpoint{ - Epoch: 1, - Root: bytesutil.PadTo([]byte("sourceroot4"), 32), - }, - Target: ðpbv1alpha1.Checkpoint{ - Epoch: 10, - Root: bytesutil.PadTo([]byte("targetroot4"), 32), - }, - }, - CommitteeBits: cb2, - Signature: bytesutil.PadTo([]byte("signature4"), 96), - } - bs, err := util.NewBeaconStateElectra() - require.NoError(t, err) - - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.ElectraForkEpoch = 0 - params.OverrideBeaconConfig(config) - - chainService := &blockchainmock.ChainService{State: bs} - s := &Server{ - AttestationsPool: attestations.NewPool(), - ChainInfoFetcher: chainService, - TimeFetcher: chainService, - } - // Added one pre electra attestation to ensure it is ignored. - require.NoError(t, s.AttestationsPool.SaveAggregatedAttestations([]ethpbv1alpha1.Att{attElectra1, attElectra2, att1})) - require.NoError(t, s.AttestationsPool.SaveUnaggregatedAttestations([]ethpbv1alpha1.Att{attElectra3, attElectra4, att3})) - - t.Run("empty request", func(t *testing.T) { - url := "http://example.com" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.ListAttestationsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - resp := &structs.ListAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - - var atts []*structs.AttestationElectra - require.NoError(t, json.Unmarshal(resp.Data, &atts)) - assert.Equal(t, 4, len(atts)) - assert.Equal(t, "electra", resp.Version) - }) - t.Run("slot request", func(t *testing.T) { - url := "http://example.com?slot=2" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.ListAttestationsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - resp := &structs.ListAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - - var atts []*structs.AttestationElectra - require.NoError(t, json.Unmarshal(resp.Data, &atts)) - assert.Equal(t, 2, len(atts)) - assert.Equal(t, "electra", resp.Version) - for _, a := range atts { - assert.Equal(t, "2", a.Data.Slot) - } - }) - t.Run("index request", func(t *testing.T) { - url := "http://example.com?committee_index=2" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.ListAttestationsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - resp := &structs.ListAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - - var atts []*structs.AttestationElectra - require.NoError(t, json.Unmarshal(resp.Data, &atts)) - assert.Equal(t, 2, len(atts)) - assert.Equal(t, "electra", resp.Version) - for _, a := range atts { - assert.Equal(t, "0x0400000000000000", a.CommitteeBits) - } - }) - t.Run("both slot + index request", func(t *testing.T) { - url := "http://example.com?slot=2&committee_index=2" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.ListAttestationsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - resp := &structs.ListAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - - var atts []*structs.AttestationElectra - require.NoError(t, json.Unmarshal(resp.Data, &atts)) - assert.Equal(t, 1, len(atts)) - assert.Equal(t, "electra", resp.Version) - for _, a := range atts { - assert.Equal(t, "2", a.Data.Slot) - assert.Equal(t, "0x0400000000000000", a.CommitteeBits) - } - }) }) }) } -func TestSubmitAttestations(t *testing.T) { +func TestSubmitAttestationsV2(t *testing.T) { transition.SkipSlotCache.Disable() defer transition.SkipSlotCache.Enable() @@ -534,7 +440,8 @@ func TestSubmitAttestations(t *testing.T) { OperationNotifier: &blockchainmock.MockOperationNotifier{}, AttestationStateFetcher: chainService, } - t.Run("V1", func(t *testing.T) { + + t.Run("pre-electra", func(t *testing.T) { t.Run("single", func(t *testing.T) { broadcaster := &p2pMock.MockBroadcaster{} s.Broadcaster = broadcaster @@ -544,10 +451,11 @@ func TestSubmitAttestations(t *testing.T) { _, err := body.WriteString(singleAtt) require.NoError(t, err) request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Phase0)) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - s.SubmitAttestations(writer, request) + s.SubmitAttestationsV2(writer, request) assert.Equal(t, http.StatusOK, writer.Code) assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) @@ -572,16 +480,17 @@ func TestSubmitAttestations(t *testing.T) { _, err := body.WriteString(multipleAtts) require.NoError(t, err) request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Phase0)) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - s.SubmitAttestations(writer, request) + s.SubmitAttestationsV2(writer, request) assert.Equal(t, http.StatusOK, writer.Code) assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) assert.Equal(t, 2, broadcaster.NumAttestations()) assert.Equal(t, 2, s.AttestationsPool.UnaggregatedAttestationCount()) }) - t.Run("wrong fork", func(t *testing.T) { + t.Run("phase0 att post electra", func(t *testing.T) { params.SetupTestConfigCleanup(t) config := params.BeaconConfig() config.ElectraForkEpoch = 0 @@ -591,22 +500,40 @@ func TestSubmitAttestations(t *testing.T) { _, err := body.WriteString(singleAtt) require.NoError(t, err) request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Phase0)) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - s.SubmitAttestations(writer, request) + s.SubmitAttestationsV2(writer, request) assert.Equal(t, http.StatusBadRequest, writer.Code) e := &httputil.DefaultJsonError{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) assert.Equal(t, http.StatusBadRequest, e.Code) assert.ErrorContains(t, "old attestation format", errors.New(e.Message)) }) - t.Run("no body", func(t *testing.T) { - request := httptest.NewRequest(http.MethodPost, "http://example.com", nil) + t.Run("electra att before electra", func(t *testing.T) { + var body bytes.Buffer + _, err := body.WriteString(singleAttElectra) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - s.SubmitAttestations(writer, request) + s.SubmitAttestationsV2(writer, request) + assert.Equal(t, http.StatusBadRequest, writer.Code) + e := &httputil.DefaultJsonError{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusBadRequest, e.Code) + assert.ErrorContains(t, "electra attestations have not been enabled", errors.New(e.Message)) + }) + t.Run("no body", func(t *testing.T) { + request := httptest.NewRequest(http.MethodPost, "http://example.com", nil) + request.Header.Set(api.VersionHeader, version.String(version.Phase0)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.SubmitAttestationsV2(writer, request) assert.Equal(t, http.StatusBadRequest, writer.Code) e := &httputil.DefaultJsonError{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) @@ -618,10 +545,11 @@ func TestSubmitAttestations(t *testing.T) { _, err := body.WriteString("[]") require.NoError(t, err) request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Phase0)) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - s.SubmitAttestations(writer, request) + s.SubmitAttestationsV2(writer, request) assert.Equal(t, http.StatusBadRequest, writer.Code) e := &httputil.DefaultJsonError{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) @@ -633,10 +561,11 @@ func TestSubmitAttestations(t *testing.T) { _, err := body.WriteString(invalidAtt) require.NoError(t, err) request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Phase0)) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - s.SubmitAttestations(writer, request) + s.SubmitAttestationsV2(writer, request) assert.Equal(t, http.StatusBadRequest, writer.Code) e := &server.IndexedErrorContainer{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) @@ -645,243 +574,107 @@ func TestSubmitAttestations(t *testing.T) { assert.Equal(t, true, strings.Contains(e.Failures[0].Message, "Incorrect attestation signature")) }) }) - t.Run("V2", func(t *testing.T) { - t.Run("pre-electra", func(t *testing.T) { - t.Run("single", func(t *testing.T) { - broadcaster := &p2pMock.MockBroadcaster{} - s.Broadcaster = broadcaster - s.AttestationsPool = attestations.NewPool() + t.Run("post-electra", func(t *testing.T) { + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 0 + params.OverrideBeaconConfig(config) - var body bytes.Buffer - _, err := body.WriteString(singleAtt) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + t.Run("single", func(t *testing.T) { + broadcaster := &p2pMock.MockBroadcaster{} + s.Broadcaster = broadcaster + s.AttestationsPool = attestations.NewPool() - s.SubmitAttestationsV2(writer, request) + var body bytes.Buffer + _, err := body.WriteString(singleAttElectra) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - assert.Equal(t, http.StatusOK, writer.Code) - assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) - assert.Equal(t, 1, broadcaster.NumAttestations()) - assert.Equal(t, "0x03", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetAggregationBits())) - assert.Equal(t, "0x8146f4397bfd8fd057ebbcd6a67327bdc7ed5fb650533edcb6377b650dea0b6da64c14ecd60846d5c0a0cd43893d6972092500f82c9d8a955e2b58c5ed3cbe885d84008ace6bd86ba9e23652f58e2ec207cec494c916063257abf285b9b15b15", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetSignature())) - assert.Equal(t, primitives.Slot(0), broadcaster.BroadcastAttestations[0].GetData().Slot) - assert.Equal(t, primitives.CommitteeIndex(0), broadcaster.BroadcastAttestations[0].GetData().CommitteeIndex) - assert.Equal(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetData().BeaconBlockRoot)) - assert.Equal(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetData().Source.Root)) - assert.Equal(t, primitives.Epoch(0), broadcaster.BroadcastAttestations[0].GetData().Source.Epoch) - assert.Equal(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetData().Target.Root)) - assert.Equal(t, primitives.Epoch(0), broadcaster.BroadcastAttestations[0].GetData().Target.Epoch) - assert.Equal(t, 1, s.AttestationsPool.UnaggregatedAttestationCount()) - }) - t.Run("multiple", func(t *testing.T) { - broadcaster := &p2pMock.MockBroadcaster{} - s.Broadcaster = broadcaster - s.AttestationsPool = attestations.NewPool() + s.SubmitAttestationsV2(writer, request) - var body bytes.Buffer - _, err := body.WriteString(multipleAtts) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttestationsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) - assert.Equal(t, 2, broadcaster.NumAttestations()) - assert.Equal(t, 2, s.AttestationsPool.UnaggregatedAttestationCount()) - }) - t.Run("phase0 att post electra", func(t *testing.T) { - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.ElectraForkEpoch = 0 - params.OverrideBeaconConfig(config) - - var body bytes.Buffer - _, err := body.WriteString(singleAtt) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttestationsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.ErrorContains(t, "old attestation format", errors.New(e.Message)) - }) - t.Run("electra att before electra", func(t *testing.T) { - var body bytes.Buffer - _, err := body.WriteString(singleAttElectra) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttestationsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.ErrorContains(t, "electra attestations have not been enabled", errors.New(e.Message)) - }) - t.Run("no body", func(t *testing.T) { - request := httptest.NewRequest(http.MethodPost, "http://example.com", nil) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttestationsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) - }) - t.Run("empty", func(t *testing.T) { - var body bytes.Buffer - _, err := body.WriteString("[]") - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttestationsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.Equal(t, true, strings.Contains(e.Message, "no data submitted")) - }) - t.Run("invalid", func(t *testing.T) { - var body bytes.Buffer - _, err := body.WriteString(invalidAtt) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttestationsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &server.IndexedErrorContainer{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - require.Equal(t, 1, len(e.Failures)) - assert.Equal(t, true, strings.Contains(e.Failures[0].Message, "Incorrect attestation signature")) - }) + assert.Equal(t, http.StatusOK, writer.Code) + assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) + assert.Equal(t, 1, broadcaster.NumAttestations()) + assert.Equal(t, primitives.ValidatorIndex(1), broadcaster.BroadcastAttestations[0].GetAttestingIndex()) + assert.Equal(t, "0x8146f4397bfd8fd057ebbcd6a67327bdc7ed5fb650533edcb6377b650dea0b6da64c14ecd60846d5c0a0cd43893d6972092500f82c9d8a955e2b58c5ed3cbe885d84008ace6bd86ba9e23652f58e2ec207cec494c916063257abf285b9b15b15", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetSignature())) + assert.Equal(t, primitives.Slot(0), broadcaster.BroadcastAttestations[0].GetData().Slot) + assert.Equal(t, primitives.CommitteeIndex(0), broadcaster.BroadcastAttestations[0].GetData().CommitteeIndex) + assert.Equal(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetData().BeaconBlockRoot)) + assert.Equal(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetData().Source.Root)) + assert.Equal(t, primitives.Epoch(0), broadcaster.BroadcastAttestations[0].GetData().Source.Epoch) + assert.Equal(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetData().Target.Root)) + assert.Equal(t, primitives.Epoch(0), broadcaster.BroadcastAttestations[0].GetData().Target.Epoch) + assert.Equal(t, 1, s.AttestationsPool.UnaggregatedAttestationCount()) }) - t.Run("post-electra", func(t *testing.T) { - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.ElectraForkEpoch = 0 - params.OverrideBeaconConfig(config) + t.Run("multiple", func(t *testing.T) { + broadcaster := &p2pMock.MockBroadcaster{} + s.Broadcaster = broadcaster + s.AttestationsPool = attestations.NewPool() - t.Run("single", func(t *testing.T) { - broadcaster := &p2pMock.MockBroadcaster{} - s.Broadcaster = broadcaster - s.AttestationsPool = attestations.NewPool() + var body bytes.Buffer + _, err := body.WriteString(multipleAttsElectra) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - var body bytes.Buffer - _, err := body.WriteString(singleAttElectra) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + s.SubmitAttestationsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) + assert.Equal(t, 2, broadcaster.NumAttestations()) + assert.Equal(t, 2, s.AttestationsPool.UnaggregatedAttestationCount()) + }) + t.Run("no body", func(t *testing.T) { + request := httptest.NewRequest(http.MethodPost, "http://example.com", nil) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s.SubmitAttestationsV2(writer, request) + s.SubmitAttestationsV2(writer, request) + assert.Equal(t, http.StatusBadRequest, writer.Code) + e := &httputil.DefaultJsonError{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusBadRequest, e.Code) + assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) + }) + t.Run("empty", func(t *testing.T) { + var body bytes.Buffer + _, err := body.WriteString("[]") + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - assert.Equal(t, http.StatusOK, writer.Code) - assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) - assert.Equal(t, 1, broadcaster.NumAttestations()) - assert.Equal(t, primitives.ValidatorIndex(1), broadcaster.BroadcastAttestations[0].GetAttestingIndex()) - assert.Equal(t, "0x8146f4397bfd8fd057ebbcd6a67327bdc7ed5fb650533edcb6377b650dea0b6da64c14ecd60846d5c0a0cd43893d6972092500f82c9d8a955e2b58c5ed3cbe885d84008ace6bd86ba9e23652f58e2ec207cec494c916063257abf285b9b15b15", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetSignature())) - assert.Equal(t, primitives.Slot(0), broadcaster.BroadcastAttestations[0].GetData().Slot) - assert.Equal(t, primitives.CommitteeIndex(0), broadcaster.BroadcastAttestations[0].GetData().CommitteeIndex) - assert.Equal(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetData().BeaconBlockRoot)) - assert.Equal(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetData().Source.Root)) - assert.Equal(t, primitives.Epoch(0), broadcaster.BroadcastAttestations[0].GetData().Source.Epoch) - assert.Equal(t, "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", hexutil.Encode(broadcaster.BroadcastAttestations[0].GetData().Target.Root)) - assert.Equal(t, primitives.Epoch(0), broadcaster.BroadcastAttestations[0].GetData().Target.Epoch) - assert.Equal(t, 1, s.AttestationsPool.UnaggregatedAttestationCount()) - }) - t.Run("multiple", func(t *testing.T) { - broadcaster := &p2pMock.MockBroadcaster{} - s.Broadcaster = broadcaster - s.AttestationsPool = attestations.NewPool() + s.SubmitAttestationsV2(writer, request) + assert.Equal(t, http.StatusBadRequest, writer.Code) + e := &httputil.DefaultJsonError{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusBadRequest, e.Code) + assert.Equal(t, true, strings.Contains(e.Message, "no data submitted")) + }) + t.Run("invalid", func(t *testing.T) { + var body bytes.Buffer + _, err := body.WriteString(invalidAttElectra) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - var body bytes.Buffer - _, err := body.WriteString(multipleAttsElectra) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttestationsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) - assert.Equal(t, 2, broadcaster.NumAttestations()) - assert.Equal(t, 2, s.AttestationsPool.UnaggregatedAttestationCount()) - }) - t.Run("no body", func(t *testing.T) { - request := httptest.NewRequest(http.MethodPost, "http://example.com", nil) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttestationsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) - }) - t.Run("empty", func(t *testing.T) { - var body bytes.Buffer - _, err := body.WriteString("[]") - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttestationsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.Equal(t, true, strings.Contains(e.Message, "no data submitted")) - }) - t.Run("invalid", func(t *testing.T) { - var body bytes.Buffer - _, err := body.WriteString(invalidAttElectra) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttestationsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &server.IndexedErrorContainer{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - require.Equal(t, 1, len(e.Failures)) - assert.Equal(t, true, strings.Contains(e.Failures[0].Message, "Incorrect attestation signature")) - }) + s.SubmitAttestationsV2(writer, request) + assert.Equal(t, http.StatusBadRequest, writer.Code) + e := &server.IndexedErrorContainer{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusBadRequest, e.Code) + require.Equal(t, 1, len(e.Failures)) + assert.Equal(t, true, strings.Contains(e.Failures[0].Message, "Incorrect attestation signature")) }) }) - } func TestListVoluntaryExits(t *testing.T) { @@ -1552,7 +1345,7 @@ func TestSubmitSignedBLSToExecutionChanges_Failures(t *testing.T) { } } -func TestGetAttesterSlashings(t *testing.T) { +func TestGetAttesterSlashingsV2(t *testing.T) { slashing1PreElectra := ðpbv1alpha1.AttesterSlashing{ Attestation_1: ðpbv1alpha1.IndexedAttestation{ AttestingIndices: []uint64{1, 10}, @@ -1662,204 +1455,147 @@ func TestGetAttesterSlashings(t *testing.T) { }, } - t.Run("V1", func(t *testing.T) { - t.Run("ok", func(t *testing.T) { - bs, err := util.NewBeaconState() - require.NoError(t, err) + t.Run("post-electra-ok-1-pre-slashing", func(t *testing.T) { + bs, err := util.NewBeaconStateElectra() + require.NoError(t, err) - s := &Server{ - ChainInfoFetcher: &blockchainmock.ChainService{State: bs}, - SlashingsPool: &slashingsmock.PoolMock{PendingAttSlashings: []ethpbv1alpha1.AttSlashing{slashing1PreElectra, slashing2PreElectra}}, - } + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 100 + params.OverrideBeaconConfig(config) - request := httptest.NewRequest(http.MethodGet, "http://example.com/eth/v1/beacon/pool/attester_slashings", nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + chainService := &blockchainmock.ChainService{State: bs} - s.GetAttesterSlashings(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetAttesterSlashingsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) + s := &Server{ + ChainInfoFetcher: chainService, + TimeFetcher: chainService, + SlashingsPool: &slashingsmock.PoolMock{PendingAttSlashings: []ethpbv1alpha1.AttSlashing{slashingPostElectra, slashing1PreElectra}}, + } - var slashings []*structs.AttesterSlashing - require.NoError(t, json.Unmarshal(resp.Data, &slashings)) + request := httptest.NewRequest(http.MethodGet, "http://example.com/eth/v2/beacon/pool/attester_slashings", nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - ss, err := structs.AttesterSlashingsToConsensus(slashings) - require.NoError(t, err) + s.GetAttesterSlashingsV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code) + resp := &structs.GetAttesterSlashingsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + assert.Equal(t, "electra", resp.Version) - require.DeepEqual(t, slashing1PreElectra, ss[0]) - require.DeepEqual(t, slashing2PreElectra, ss[1]) - }) - t.Run("no slashings", func(t *testing.T) { - bs, err := util.NewBeaconState() - require.NoError(t, err) + // Unmarshal resp.Data into a slice of slashings + var slashings []*structs.AttesterSlashingElectra + require.NoError(t, json.Unmarshal(resp.Data, &slashings)) - s := &Server{ - ChainInfoFetcher: &blockchainmock.ChainService{State: bs}, - SlashingsPool: &slashingsmock.PoolMock{PendingAttSlashings: []ethpbv1alpha1.AttSlashing{}}, - } + ss, err := structs.AttesterSlashingsElectraToConsensus(slashings) + require.NoError(t, err) - request := httptest.NewRequest(http.MethodGet, "http://example.com/eth/v1/beacon/pool/attester_slashings", nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.GetAttesterSlashings(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetAttesterSlashingsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - - var slashings []*structs.AttesterSlashing - require.NoError(t, json.Unmarshal(resp.Data, &slashings)) - require.Equal(t, 0, len(slashings)) - }) + require.DeepEqual(t, slashingPostElectra, ss[0]) }) - t.Run("V2", func(t *testing.T) { - t.Run("post-electra-ok-1-pre-slashing", func(t *testing.T) { - bs, err := util.NewBeaconStateElectra() - require.NoError(t, err) + t.Run("post-electra-ok", func(t *testing.T) { + bs, err := util.NewBeaconStateElectra() + require.NoError(t, err) - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.ElectraForkEpoch = 100 - params.OverrideBeaconConfig(config) + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 100 + params.OverrideBeaconConfig(config) - chainService := &blockchainmock.ChainService{State: bs} + chainService := &blockchainmock.ChainService{State: bs} - s := &Server{ - ChainInfoFetcher: chainService, - TimeFetcher: chainService, - SlashingsPool: &slashingsmock.PoolMock{PendingAttSlashings: []ethpbv1alpha1.AttSlashing{slashingPostElectra, slashing1PreElectra}}, - } + s := &Server{ + ChainInfoFetcher: chainService, + TimeFetcher: chainService, + SlashingsPool: &slashingsmock.PoolMock{PendingAttSlashings: []ethpbv1alpha1.AttSlashing{slashingPostElectra}}, + } - request := httptest.NewRequest(http.MethodGet, "http://example.com/eth/v2/beacon/pool/attester_slashings", nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + request := httptest.NewRequest(http.MethodGet, "http://example.com/eth/v2/beacon/pool/attester_slashings", nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s.GetAttesterSlashingsV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetAttesterSlashingsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - assert.Equal(t, "electra", resp.Version) + s.GetAttesterSlashingsV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code) + resp := &structs.GetAttesterSlashingsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + assert.Equal(t, "electra", resp.Version) - // Unmarshal resp.Data into a slice of slashings - var slashings []*structs.AttesterSlashingElectra - require.NoError(t, json.Unmarshal(resp.Data, &slashings)) + // Unmarshal resp.Data into a slice of slashings + var slashings []*structs.AttesterSlashingElectra + require.NoError(t, json.Unmarshal(resp.Data, &slashings)) - ss, err := structs.AttesterSlashingsElectraToConsensus(slashings) - require.NoError(t, err) + ss, err := structs.AttesterSlashingsElectraToConsensus(slashings) + require.NoError(t, err) - require.DeepEqual(t, slashingPostElectra, ss[0]) - }) - t.Run("post-electra-ok", func(t *testing.T) { - bs, err := util.NewBeaconStateElectra() - require.NoError(t, err) + require.DeepEqual(t, slashingPostElectra, ss[0]) + }) + t.Run("pre-electra-ok", func(t *testing.T) { + bs, err := util.NewBeaconState() + require.NoError(t, err) + slot := primitives.Slot(0) + chainService := &blockchainmock.ChainService{State: bs, Slot: &slot} - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.ElectraForkEpoch = 100 - params.OverrideBeaconConfig(config) + s := &Server{ + ChainInfoFetcher: chainService, + TimeFetcher: chainService, + SlashingsPool: &slashingsmock.PoolMock{PendingAttSlashings: []ethpbv1alpha1.AttSlashing{slashing1PreElectra, slashing2PreElectra}}, + } - chainService := &blockchainmock.ChainService{State: bs} + request := httptest.NewRequest(http.MethodGet, "http://example.com/eth/v2/beacon/pool/attester_slashings", nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s := &Server{ - ChainInfoFetcher: chainService, - TimeFetcher: chainService, - SlashingsPool: &slashingsmock.PoolMock{PendingAttSlashings: []ethpbv1alpha1.AttSlashing{slashingPostElectra}}, - } + s.GetAttesterSlashingsV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code) + resp := &structs.GetAttesterSlashingsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) - request := httptest.NewRequest(http.MethodGet, "http://example.com/eth/v2/beacon/pool/attester_slashings", nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + var slashings []*structs.AttesterSlashing + require.NoError(t, json.Unmarshal(resp.Data, &slashings)) - s.GetAttesterSlashingsV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetAttesterSlashingsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - assert.Equal(t, "electra", resp.Version) + ss, err := structs.AttesterSlashingsToConsensus(slashings) + require.NoError(t, err) - // Unmarshal resp.Data into a slice of slashings - var slashings []*structs.AttesterSlashingElectra - require.NoError(t, json.Unmarshal(resp.Data, &slashings)) + require.DeepEqual(t, slashing1PreElectra, ss[0]) + require.DeepEqual(t, slashing2PreElectra, ss[1]) + }) + t.Run("no-slashings", func(t *testing.T) { + bs, err := util.NewBeaconStateElectra() + require.NoError(t, err) - ss, err := structs.AttesterSlashingsElectraToConsensus(slashings) - require.NoError(t, err) + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 100 + params.OverrideBeaconConfig(config) - require.DeepEqual(t, slashingPostElectra, ss[0]) - }) - t.Run("pre-electra-ok", func(t *testing.T) { - bs, err := util.NewBeaconState() - require.NoError(t, err) - slot := primitives.Slot(0) - chainService := &blockchainmock.ChainService{State: bs, Slot: &slot} + chainService := &blockchainmock.ChainService{State: bs} + s := &Server{ + ChainInfoFetcher: chainService, + TimeFetcher: chainService, + SlashingsPool: &slashingsmock.PoolMock{PendingAttSlashings: []ethpbv1alpha1.AttSlashing{}}, + } - s := &Server{ - ChainInfoFetcher: chainService, - TimeFetcher: chainService, - SlashingsPool: &slashingsmock.PoolMock{PendingAttSlashings: []ethpbv1alpha1.AttSlashing{slashing1PreElectra, slashing2PreElectra}}, - } + request := httptest.NewRequest(http.MethodGet, "http://example.com/eth/v2/beacon/pool/attester_slashings", nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - request := httptest.NewRequest(http.MethodGet, "http://example.com/eth/v1/beacon/pool/attester_slashings", nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + s.GetAttesterSlashingsV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code) + resp := &structs.GetAttesterSlashingsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + assert.Equal(t, "electra", resp.Version) - s.GetAttesterSlashingsV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetAttesterSlashingsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - - var slashings []*structs.AttesterSlashing - require.NoError(t, json.Unmarshal(resp.Data, &slashings)) - - ss, err := structs.AttesterSlashingsToConsensus(slashings) - require.NoError(t, err) - - require.DeepEqual(t, slashing1PreElectra, ss[0]) - require.DeepEqual(t, slashing2PreElectra, ss[1]) - }) - t.Run("no-slashings", func(t *testing.T) { - bs, err := util.NewBeaconStateElectra() - require.NoError(t, err) - - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.ElectraForkEpoch = 100 - params.OverrideBeaconConfig(config) - - chainService := &blockchainmock.ChainService{State: bs} - s := &Server{ - ChainInfoFetcher: chainService, - TimeFetcher: chainService, - SlashingsPool: &slashingsmock.PoolMock{PendingAttSlashings: []ethpbv1alpha1.AttSlashing{}}, - } - - request := httptest.NewRequest(http.MethodGet, "http://example.com/eth/v2/beacon/pool/attester_slashings", nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.GetAttesterSlashingsV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetAttesterSlashingsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - assert.Equal(t, "electra", resp.Version) - - // Unmarshal resp.Data into a slice of slashings - var slashings []*structs.AttesterSlashingElectra - require.NoError(t, json.Unmarshal(resp.Data, &slashings)) - require.NotNil(t, slashings) - require.Equal(t, 0, len(slashings)) - }) + // Unmarshal resp.Data into a slice of slashings + var slashings []*structs.AttesterSlashingElectra + require.NoError(t, json.Unmarshal(resp.Data, &slashings)) + require.NotNil(t, slashings) + require.Equal(t, 0, len(slashings)) }) } @@ -1929,7 +1665,7 @@ func TestGetProposerSlashings(t *testing.T) { assert.Equal(t, 2, len(resp.Data)) } -func TestSubmitAttesterSlashings(t *testing.T) { +func TestSubmitAttesterSlashingsV2(t *testing.T) { ctx := t.Context() transition.SkipSlotCache.Disable() @@ -1960,292 +1696,135 @@ func TestSubmitAttesterSlashings(t *testing.T) { }, } - t.Run("V1", func(t *testing.T) { - t.Run("ok", func(t *testing.T) { - attestationData1.Slot = 1 - attestationData2.Slot = 1 - slashing := ðpbv1alpha1.AttesterSlashing{ - Attestation_1: ðpbv1alpha1.IndexedAttestation{ - AttestingIndices: []uint64{0}, - Data: attestationData1, - Signature: make([]byte, 96), - }, - Attestation_2: ðpbv1alpha1.IndexedAttestation{ - AttestingIndices: []uint64{0}, - Data: attestationData2, - Signature: make([]byte, 96), - }, - } + t.Run("ok", func(t *testing.T) { + attestationData1.Slot = 1 + attestationData2.Slot = 1 + electraSlashing := ðpbv1alpha1.AttesterSlashingElectra{ + Attestation_1: ðpbv1alpha1.IndexedAttestationElectra{ + AttestingIndices: []uint64{0}, + Data: attestationData1, + Signature: make([]byte, 96), + }, + Attestation_2: ðpbv1alpha1.IndexedAttestationElectra{ + AttestingIndices: []uint64{0}, + Data: attestationData2, + Signature: make([]byte, 96), + }, + } - _, keys, err := util.DeterministicDepositsAndKeys(1) - require.NoError(t, err) - validator := ðpbv1alpha1.Validator{ - PublicKey: keys[0].PublicKey().Marshal(), - } + _, keys, err := util.DeterministicDepositsAndKeys(1) + require.NoError(t, err) + validator := ðpbv1alpha1.Validator{ + PublicKey: keys[0].PublicKey().Marshal(), + } - bs, err := util.NewBeaconState(func(state *ethpbv1alpha1.BeaconState) error { - state.Validators = []*ethpbv1alpha1.Validator{validator} - return nil - }) - require.NoError(t, err) - - for _, att := range []*ethpbv1alpha1.IndexedAttestation{slashing.Attestation_1, slashing.Attestation_2} { - sb, err := signing.ComputeDomainAndSign(bs, att.Data.Target.Epoch, att.Data, params.BeaconConfig().DomainBeaconAttester, keys[0]) - require.NoError(t, err) - sig, err := bls.SignatureFromBytes(sb) - require.NoError(t, err) - att.Signature = sig.Marshal() - } - - chainmock := &blockchainmock.ChainService{State: bs} - broadcaster := &p2pMock.MockBroadcaster{} - s := &Server{ - ChainInfoFetcher: chainmock, - SlashingsPool: &slashingsmock.PoolMock{}, - Broadcaster: broadcaster, - OperationNotifier: chainmock.OperationNotifier(), - } - - toSubmit := structs.AttesterSlashingsFromConsensus([]*ethpbv1alpha1.AttesterSlashing{slashing}) - b, err := json.Marshal(toSubmit[0]) - require.NoError(t, err) - var body bytes.Buffer - _, err = body.Write(b) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com/beacon/pool/attester_slashings", &body) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttesterSlashings(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - pendingSlashings := s.SlashingsPool.PendingAttesterSlashings(ctx, bs, true) - require.Equal(t, 1, len(pendingSlashings)) - assert.DeepEqual(t, slashing, pendingSlashings[0]) - require.Equal(t, 1, broadcaster.NumMessages()) - assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) - _, ok := broadcaster.BroadcastMessages[0].(*ethpbv1alpha1.AttesterSlashing) - assert.Equal(t, true, ok) + ebs, err := util.NewBeaconStateElectra(func(state *ethpbv1alpha1.BeaconStateElectra) error { + state.Validators = []*ethpbv1alpha1.Validator{validator} + return nil }) - t.Run("across-fork", func(t *testing.T) { - attestationData1.Slot = params.BeaconConfig().SlotsPerEpoch - attestationData2.Slot = params.BeaconConfig().SlotsPerEpoch - slashing := ðpbv1alpha1.AttesterSlashing{ - Attestation_1: ðpbv1alpha1.IndexedAttestation{ - AttestingIndices: []uint64{0}, - Data: attestationData1, - Signature: make([]byte, 96), - }, - Attestation_2: ðpbv1alpha1.IndexedAttestation{ - AttestingIndices: []uint64{0}, - Data: attestationData2, - Signature: make([]byte, 96), - }, - } + require.NoError(t, err) - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.AltairForkEpoch = 1 - params.OverrideBeaconConfig(config) - - bs, keys := util.DeterministicGenesisState(t, 1) - newBs := bs.Copy() - newBs, err := transition.ProcessSlots(ctx, newBs, params.BeaconConfig().SlotsPerEpoch) + for _, att := range []*ethpbv1alpha1.IndexedAttestationElectra{electraSlashing.Attestation_1, electraSlashing.Attestation_2} { + sb, err := signing.ComputeDomainAndSign(ebs, att.Data.Target.Epoch, att.Data, params.BeaconConfig().DomainBeaconAttester, keys[0]) require.NoError(t, err) - - for _, att := range []*ethpbv1alpha1.IndexedAttestation{slashing.Attestation_1, slashing.Attestation_2} { - sb, err := signing.ComputeDomainAndSign(newBs, att.Data.Target.Epoch, att.Data, params.BeaconConfig().DomainBeaconAttester, keys[0]) - require.NoError(t, err) - sig, err := bls.SignatureFromBytes(sb) - require.NoError(t, err) - att.Signature = sig.Marshal() - } - - broadcaster := &p2pMock.MockBroadcaster{} - chainmock := &blockchainmock.ChainService{State: bs} - s := &Server{ - ChainInfoFetcher: chainmock, - SlashingsPool: &slashingsmock.PoolMock{}, - Broadcaster: broadcaster, - OperationNotifier: chainmock.OperationNotifier(), - } - - toSubmit := structs.AttesterSlashingsFromConsensus([]*ethpbv1alpha1.AttesterSlashing{slashing}) - b, err := json.Marshal(toSubmit[0]) + sig, err := bls.SignatureFromBytes(sb) require.NoError(t, err) - var body bytes.Buffer - _, err = body.Write(b) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com/beacon/pool/attester_slashings", &body) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + att.Signature = sig.Marshal() + } - s.SubmitAttesterSlashings(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - pendingSlashings := s.SlashingsPool.PendingAttesterSlashings(ctx, bs, true) - require.Equal(t, 1, len(pendingSlashings)) - assert.DeepEqual(t, slashing, pendingSlashings[0]) - require.Equal(t, 1, broadcaster.NumMessages()) - assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) - _, ok := broadcaster.BroadcastMessages[0].(*ethpbv1alpha1.AttesterSlashing) - assert.Equal(t, true, ok) - }) - t.Run("invalid-slashing", func(t *testing.T) { - bs, err := util.NewBeaconState() - require.NoError(t, err) + chainmock := &blockchainmock.ChainService{State: ebs} + broadcaster := &p2pMock.MockBroadcaster{} + s := &Server{ + ChainInfoFetcher: chainmock, + SlashingsPool: &slashingsmock.PoolMock{}, + Broadcaster: broadcaster, + OperationNotifier: chainmock.OperationNotifier(), + } - broadcaster := &p2pMock.MockBroadcaster{} - s := &Server{ - ChainInfoFetcher: &blockchainmock.ChainService{State: bs}, - SlashingsPool: &slashingsmock.PoolMock{}, - Broadcaster: broadcaster, - } + toSubmit := structs.AttesterSlashingsElectraFromConsensus([]*ethpbv1alpha1.AttesterSlashingElectra{electraSlashing}) + b, err := json.Marshal(toSubmit[0]) + require.NoError(t, err) + var body bytes.Buffer + _, err = body.Write(b) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com/beacon/pool/attester_electras", &body) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - var body bytes.Buffer - _, err = body.WriteString(invalidAttesterSlashing) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com/beacon/pool/attester_slashings", &body) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttesterSlashings(writer, request) - require.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.StringContains(t, "Invalid attester slashing", e.Message) - }) + s.SubmitAttesterSlashingsV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code) + pendingSlashings := s.SlashingsPool.PendingAttesterSlashings(ctx, ebs, true) + require.Equal(t, 1, len(pendingSlashings)) + require.Equal(t, 1, broadcaster.NumMessages()) + assert.DeepEqual(t, electraSlashing, pendingSlashings[0]) + assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) + _, ok := broadcaster.BroadcastMessages[0].(*ethpbv1alpha1.AttesterSlashingElectra) + assert.Equal(t, true, ok) }) - t.Run("V2", func(t *testing.T) { - t.Run("ok", func(t *testing.T) { - attestationData1.Slot = 1 - attestationData2.Slot = 1 - electraSlashing := ðpbv1alpha1.AttesterSlashingElectra{ - Attestation_1: ðpbv1alpha1.IndexedAttestationElectra{ - AttestingIndices: []uint64{0}, - Data: attestationData1, - Signature: make([]byte, 96), - }, - Attestation_2: ðpbv1alpha1.IndexedAttestationElectra{ - AttestingIndices: []uint64{0}, - Data: attestationData2, - Signature: make([]byte, 96), - }, - } + t.Run("across-fork", func(t *testing.T) { + attestationData1.Slot = params.BeaconConfig().SlotsPerEpoch + attestationData2.Slot = params.BeaconConfig().SlotsPerEpoch + slashing := ðpbv1alpha1.AttesterSlashingElectra{ + Attestation_1: ðpbv1alpha1.IndexedAttestationElectra{ + AttestingIndices: []uint64{0}, + Data: attestationData1, + Signature: make([]byte, 96), + }, + Attestation_2: ðpbv1alpha1.IndexedAttestationElectra{ + AttestingIndices: []uint64{0}, + Data: attestationData2, + Signature: make([]byte, 96), + }, + } - _, keys, err := util.DeterministicDepositsAndKeys(1) + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.AltairForkEpoch = 1 + params.OverrideBeaconConfig(config) + + bs, keys := util.DeterministicGenesisState(t, 1) + newBs := bs.Copy() + newBs, err := transition.ProcessSlots(ctx, newBs, params.BeaconConfig().SlotsPerEpoch) + require.NoError(t, err) + + for _, att := range []*ethpbv1alpha1.IndexedAttestationElectra{slashing.Attestation_1, slashing.Attestation_2} { + sb, err := signing.ComputeDomainAndSign(newBs, att.Data.Target.Epoch, att.Data, params.BeaconConfig().DomainBeaconAttester, keys[0]) require.NoError(t, err) - validator := ðpbv1alpha1.Validator{ - PublicKey: keys[0].PublicKey().Marshal(), - } - - ebs, err := util.NewBeaconStateElectra(func(state *ethpbv1alpha1.BeaconStateElectra) error { - state.Validators = []*ethpbv1alpha1.Validator{validator} - return nil - }) + sig, err := bls.SignatureFromBytes(sb) require.NoError(t, err) + att.Signature = sig.Marshal() + } - for _, att := range []*ethpbv1alpha1.IndexedAttestationElectra{electraSlashing.Attestation_1, electraSlashing.Attestation_2} { - sb, err := signing.ComputeDomainAndSign(ebs, att.Data.Target.Epoch, att.Data, params.BeaconConfig().DomainBeaconAttester, keys[0]) - require.NoError(t, err) - sig, err := bls.SignatureFromBytes(sb) - require.NoError(t, err) - att.Signature = sig.Marshal() - } + broadcaster := &p2pMock.MockBroadcaster{} + chainmock := &blockchainmock.ChainService{State: bs} + s := &Server{ + ChainInfoFetcher: chainmock, + SlashingsPool: &slashingsmock.PoolMock{}, + Broadcaster: broadcaster, + OperationNotifier: chainmock.OperationNotifier(), + } - chainmock := &blockchainmock.ChainService{State: ebs} - broadcaster := &p2pMock.MockBroadcaster{} - s := &Server{ - ChainInfoFetcher: chainmock, - SlashingsPool: &slashingsmock.PoolMock{}, - Broadcaster: broadcaster, - OperationNotifier: chainmock.OperationNotifier(), - } + toSubmit := structs.AttesterSlashingsElectraFromConsensus([]*ethpbv1alpha1.AttesterSlashingElectra{slashing}) + b, err := json.Marshal(toSubmit[0]) + require.NoError(t, err) + var body bytes.Buffer + _, err = body.Write(b) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com/beacon/pool/attester_slashings", &body) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - toSubmit := structs.AttesterSlashingsElectraFromConsensus([]*ethpbv1alpha1.AttesterSlashingElectra{electraSlashing}) - b, err := json.Marshal(toSubmit[0]) - require.NoError(t, err) - var body bytes.Buffer - _, err = body.Write(b) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com/beacon/pool/attester_electras", &body) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttesterSlashingsV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - pendingSlashings := s.SlashingsPool.PendingAttesterSlashings(ctx, ebs, true) - require.Equal(t, 1, len(pendingSlashings)) - require.Equal(t, 1, broadcaster.NumMessages()) - assert.DeepEqual(t, electraSlashing, pendingSlashings[0]) - assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) - _, ok := broadcaster.BroadcastMessages[0].(*ethpbv1alpha1.AttesterSlashingElectra) - assert.Equal(t, true, ok) - }) - t.Run("across-fork", func(t *testing.T) { - attestationData1.Slot = params.BeaconConfig().SlotsPerEpoch - attestationData2.Slot = params.BeaconConfig().SlotsPerEpoch - slashing := ðpbv1alpha1.AttesterSlashingElectra{ - Attestation_1: ðpbv1alpha1.IndexedAttestationElectra{ - AttestingIndices: []uint64{0}, - Data: attestationData1, - Signature: make([]byte, 96), - }, - Attestation_2: ðpbv1alpha1.IndexedAttestationElectra{ - AttestingIndices: []uint64{0}, - Data: attestationData2, - Signature: make([]byte, 96), - }, - } - - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.AltairForkEpoch = 1 - params.OverrideBeaconConfig(config) - - bs, keys := util.DeterministicGenesisState(t, 1) - newBs := bs.Copy() - newBs, err := transition.ProcessSlots(ctx, newBs, params.BeaconConfig().SlotsPerEpoch) - require.NoError(t, err) - - for _, att := range []*ethpbv1alpha1.IndexedAttestationElectra{slashing.Attestation_1, slashing.Attestation_2} { - sb, err := signing.ComputeDomainAndSign(newBs, att.Data.Target.Epoch, att.Data, params.BeaconConfig().DomainBeaconAttester, keys[0]) - require.NoError(t, err) - sig, err := bls.SignatureFromBytes(sb) - require.NoError(t, err) - att.Signature = sig.Marshal() - } - - broadcaster := &p2pMock.MockBroadcaster{} - chainmock := &blockchainmock.ChainService{State: bs} - s := &Server{ - ChainInfoFetcher: chainmock, - SlashingsPool: &slashingsmock.PoolMock{}, - Broadcaster: broadcaster, - OperationNotifier: chainmock.OperationNotifier(), - } - - toSubmit := structs.AttesterSlashingsElectraFromConsensus([]*ethpbv1alpha1.AttesterSlashingElectra{slashing}) - b, err := json.Marshal(toSubmit[0]) - require.NoError(t, err) - var body bytes.Buffer - _, err = body.Write(b) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com/beacon/pool/attester_slashings", &body) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAttesterSlashingsV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - pendingSlashings := s.SlashingsPool.PendingAttesterSlashings(ctx, bs, true) - require.Equal(t, 1, len(pendingSlashings)) - assert.DeepEqual(t, slashing, pendingSlashings[0]) - require.Equal(t, 1, broadcaster.NumMessages()) - assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) - _, ok := broadcaster.BroadcastMessages[0].(*ethpbv1alpha1.AttesterSlashingElectra) - assert.Equal(t, true, ok) - }) + s.SubmitAttesterSlashingsV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code) + pendingSlashings := s.SlashingsPool.PendingAttesterSlashings(ctx, bs, true) + require.Equal(t, 1, len(pendingSlashings)) + assert.DeepEqual(t, slashing, pendingSlashings[0]) + require.Equal(t, 1, broadcaster.NumMessages()) + assert.Equal(t, true, broadcaster.BroadcastCalled.Load()) + _, ok := broadcaster.BroadcastMessages[0].(*ethpbv1alpha1.AttesterSlashingElectra) + assert.Equal(t, true, ok) }) t.Run("invalid-slashing", func(t *testing.T) { bs, err := util.NewBeaconStateElectra() diff --git a/beacon-chain/rpc/eth/beacon/handlers_test.go b/beacon-chain/rpc/eth/beacon/handlers_test.go index 86e24e33b4..1d585a687f 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_test.go @@ -17,7 +17,6 @@ import ( "github.com/OffchainLabs/prysm/v6/api/server/structs" "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" "github.com/OffchainLabs/prysm/v6/beacon-chain/db" dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" @@ -576,7 +575,7 @@ func TestGetBlockSSZV2(t *testing.T) { }) } -func TestGetBlockAttestations(t *testing.T) { +func TestGetBlockAttestationsV2(t *testing.T) { preElectraAtts := []*eth.Attestation{ { AggregationBits: bitfield.Bitlist{0x00}, @@ -667,112 +666,108 @@ func TestGetBlockAttestations(t *testing.T) { esb, err := blocks.NewSignedBeaconBlock(eb) require.NoError(t, err) - t.Run("v1", func(t *testing.T) { - t.Run("ok", func(t *testing.T) { - mockChainService := &chainMock.ChainService{ - FinalizedRoots: map[[32]byte]bool{}, - } + t.Run("ok-pre-electra", func(t *testing.T) { + mockChainService := &chainMock.ChainService{ + FinalizedRoots: map[[32]byte]bool{}, + } - s := &Server{ - OptimisticModeFetcher: mockChainService, - FinalizationFetcher: mockChainService, - Blocker: &testutil.MockBlocker{BlockToReturn: sb}, - } + s := &Server{ + OptimisticModeFetcher: mockChainService, + FinalizationFetcher: mockChainService, + Blocker: &testutil.MockBlocker{BlockToReturn: sb}, + } - request := httptest.NewRequest(http.MethodGet, "http://foo.example/eth/v1/beacon/blocks/{block_id}/attestations", nil) - request.SetPathValue("block_id", "head") - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + request := httptest.NewRequest(http.MethodGet, "http://foo.example/eth/v2/beacon/blocks/{block_id}/attestations", nil) + request.SetPathValue("block_id", "head") + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s.GetBlockAttestations(writer, request) - require.Equal(t, http.StatusOK, writer.Code) + s.GetBlockAttestationsV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetBlockAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.Equal(t, len(b.Block.Body.Attestations), len(resp.Data)) + resp := &structs.GetBlockAttestationsV2Response{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - atts := make([]*eth.Attestation, len(b.Block.Body.Attestations)) - for i, a := range resp.Data { - atts[i], err = a.ToConsensus() - require.NoError(t, err) - } - assert.DeepEqual(t, b.Block.Body.Attestations, atts) - }) - t.Run("execution-optimistic", func(t *testing.T) { - r, err := bsb.Block().HashTreeRoot() + var attStructs []structs.Attestation + require.NoError(t, json.Unmarshal(resp.Data, &attStructs)) + + atts := make([]*eth.Attestation, len(attStructs)) + for i, attStruct := range attStructs { + atts[i], err = attStruct.ToConsensus() require.NoError(t, err) - mockChainService := &chainMock.ChainService{ - OptimisticRoots: map[[32]byte]bool{r: true}, - FinalizedRoots: map[[32]byte]bool{}, - } - s := &Server{ - OptimisticModeFetcher: mockChainService, - FinalizationFetcher: mockChainService, - Blocker: &testutil.MockBlocker{BlockToReturn: bsb}, - } + } - request := httptest.NewRequest(http.MethodGet, "http://foo.example/eth/v1/beacon/blocks/{block_id}/attestations", nil) - request.SetPathValue("block_id", "head") - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.GetBlockAttestations(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetBlockAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - assert.Equal(t, true, resp.ExecutionOptimistic) - }) - t.Run("finalized", func(t *testing.T) { - r, err := sb.Block().HashTreeRoot() - require.NoError(t, err) - - t.Run("true", func(t *testing.T) { - mockChainService := &chainMock.ChainService{FinalizedRoots: map[[32]byte]bool{r: true}} - s := &Server{ - OptimisticModeFetcher: mockChainService, - FinalizationFetcher: mockChainService, - Blocker: &testutil.MockBlocker{BlockToReturn: sb}, - } - - request := httptest.NewRequest(http.MethodGet, "http://foo.example/eth/v1/beacon/blocks/{block_id}/attestations", nil) - request.SetPathValue("block_id", "head") - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.GetBlockAttestations(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetBlockAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - assert.Equal(t, true, resp.Finalized) - }) - t.Run("false", func(t *testing.T) { - mockChainService := &chainMock.ChainService{FinalizedRoots: map[[32]byte]bool{r: false}} - s := &Server{ - OptimisticModeFetcher: mockChainService, - FinalizationFetcher: mockChainService, - Blocker: &testutil.MockBlocker{BlockToReturn: sb}, - } - - request := httptest.NewRequest(http.MethodGet, "http://foo.example/eth/v1/beacon/blocks/{block_id}/attestations", nil) - request.SetPathValue("block_id", "head") - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.GetBlockAttestations(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetBlockAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - assert.Equal(t, false, resp.ExecutionOptimistic) - }) - }) + assert.DeepEqual(t, b.Block.Body.Attestations, atts) + assert.Equal(t, "phase0", resp.Version) }) + t.Run("ok-post-electra", func(t *testing.T) { + mockChainService := &chainMock.ChainService{ + FinalizedRoots: map[[32]byte]bool{}, + } - t.Run("V2", func(t *testing.T) { - t.Run("ok-pre-electra", func(t *testing.T) { - mockChainService := &chainMock.ChainService{ - FinalizedRoots: map[[32]byte]bool{}, - } + s := &Server{ + OptimisticModeFetcher: mockChainService, + FinalizationFetcher: mockChainService, + Blocker: &testutil.MockBlocker{BlockToReturn: esb}, + } + mockBlockFetcher := &testutil.MockBlocker{BlockToReturn: esb} + s.Blocker = mockBlockFetcher + + request := httptest.NewRequest(http.MethodGet, "http://foo.example/eth/v2/beacon/blocks/{block_id}/attestations", nil) + request.SetPathValue("block_id", "head") + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.GetBlockAttestationsV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code) + + resp := &structs.GetBlockAttestationsV2Response{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + + var attStructs []structs.AttestationElectra + require.NoError(t, json.Unmarshal(resp.Data, &attStructs)) + + atts := make([]*eth.AttestationElectra, len(attStructs)) + for i, attStruct := range attStructs { + atts[i], err = attStruct.ToConsensus() + require.NoError(t, err) + } + + assert.DeepEqual(t, eb.Block.Body.Attestations, atts) + assert.Equal(t, "electra", resp.Version) + }) + t.Run("execution-optimistic", func(t *testing.T) { + r, err := bsb.Block().HashTreeRoot() + require.NoError(t, err) + mockChainService := &chainMock.ChainService{ + OptimisticRoots: map[[32]byte]bool{r: true}, + FinalizedRoots: map[[32]byte]bool{}, + } + s := &Server{ + OptimisticModeFetcher: mockChainService, + FinalizationFetcher: mockChainService, + Blocker: &testutil.MockBlocker{BlockToReturn: bsb}, + } + + request := httptest.NewRequest(http.MethodGet, "http://foo.example/eth/v2/beacon/blocks/{block_id}/attestations", nil) + request.SetPathValue("block_id", "head") + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.GetBlockAttestationsV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code) + resp := &structs.GetBlockAttestationsV2Response{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + assert.Equal(t, true, resp.ExecutionOptimistic) + assert.Equal(t, "bellatrix", resp.Version) + }) + t.Run("finalized", func(t *testing.T) { + r, err := sb.Block().HashTreeRoot() + require.NoError(t, err) + + t.Run("true", func(t *testing.T) { + mockChainService := &chainMock.ChainService{FinalizedRoots: map[[32]byte]bool{r: true}} s := &Server{ OptimisticModeFetcher: mockChainService, FinalizationFetcher: mockChainService, @@ -786,70 +781,17 @@ func TestGetBlockAttestations(t *testing.T) { s.GetBlockAttestationsV2(writer, request) require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetBlockAttestationsV2Response{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - - var attStructs []structs.Attestation - require.NoError(t, json.Unmarshal(resp.Data, &attStructs)) - - atts := make([]*eth.Attestation, len(attStructs)) - for i, attStruct := range attStructs { - atts[i], err = attStruct.ToConsensus() - require.NoError(t, err) - } - - assert.DeepEqual(t, b.Block.Body.Attestations, atts) + assert.Equal(t, true, resp.Finalized) assert.Equal(t, "phase0", resp.Version) }) - t.Run("ok-post-electra", func(t *testing.T) { - mockChainService := &chainMock.ChainService{ - FinalizedRoots: map[[32]byte]bool{}, - } - + t.Run("false", func(t *testing.T) { + mockChainService := &chainMock.ChainService{FinalizedRoots: map[[32]byte]bool{r: false}} s := &Server{ OptimisticModeFetcher: mockChainService, FinalizationFetcher: mockChainService, - Blocker: &testutil.MockBlocker{BlockToReturn: esb}, - } - - mockBlockFetcher := &testutil.MockBlocker{BlockToReturn: esb} - s.Blocker = mockBlockFetcher - - request := httptest.NewRequest(http.MethodGet, "http://foo.example/eth/v2/beacon/blocks/{block_id}/attestations", nil) - request.SetPathValue("block_id", "head") - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.GetBlockAttestationsV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - - resp := &structs.GetBlockAttestationsV2Response{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - - var attStructs []structs.AttestationElectra - require.NoError(t, json.Unmarshal(resp.Data, &attStructs)) - - atts := make([]*eth.AttestationElectra, len(attStructs)) - for i, attStruct := range attStructs { - atts[i], err = attStruct.ToConsensus() - require.NoError(t, err) - } - - assert.DeepEqual(t, eb.Block.Body.Attestations, atts) - assert.Equal(t, "electra", resp.Version) - }) - t.Run("execution-optimistic", func(t *testing.T) { - r, err := bsb.Block().HashTreeRoot() - require.NoError(t, err) - mockChainService := &chainMock.ChainService{ - OptimisticRoots: map[[32]byte]bool{r: true}, - FinalizedRoots: map[[32]byte]bool{}, - } - s := &Server{ - OptimisticModeFetcher: mockChainService, - FinalizationFetcher: mockChainService, - Blocker: &testutil.MockBlocker{BlockToReturn: bsb}, + Blocker: &testutil.MockBlocker{BlockToReturn: sb}, } request := httptest.NewRequest(http.MethodGet, "http://foo.example/eth/v2/beacon/blocks/{block_id}/attestations", nil) @@ -861,89 +803,13 @@ func TestGetBlockAttestations(t *testing.T) { require.Equal(t, http.StatusOK, writer.Code) resp := &structs.GetBlockAttestationsV2Response{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - assert.Equal(t, true, resp.ExecutionOptimistic) - assert.Equal(t, "bellatrix", resp.Version) - }) - t.Run("finalized", func(t *testing.T) { - r, err := sb.Block().HashTreeRoot() - require.NoError(t, err) - - t.Run("true", func(t *testing.T) { - mockChainService := &chainMock.ChainService{FinalizedRoots: map[[32]byte]bool{r: true}} - s := &Server{ - OptimisticModeFetcher: mockChainService, - FinalizationFetcher: mockChainService, - Blocker: &testutil.MockBlocker{BlockToReturn: sb}, - } - - request := httptest.NewRequest(http.MethodGet, "http://foo.example/eth/v2/beacon/blocks/{block_id}/attestations", nil) - request.SetPathValue("block_id", "head") - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.GetBlockAttestationsV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetBlockAttestationsV2Response{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - assert.Equal(t, true, resp.Finalized) - assert.Equal(t, "phase0", resp.Version) - }) - t.Run("false", func(t *testing.T) { - mockChainService := &chainMock.ChainService{FinalizedRoots: map[[32]byte]bool{r: false}} - s := &Server{ - OptimisticModeFetcher: mockChainService, - FinalizationFetcher: mockChainService, - Blocker: &testutil.MockBlocker{BlockToReturn: sb}, - } - - request := httptest.NewRequest(http.MethodGet, "http://foo.example/eth/v2/beacon/blocks/{block_id}/attestations", nil) - request.SetPathValue("block_id", "head") - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.GetBlockAttestationsV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetBlockAttestationsV2Response{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - assert.Equal(t, false, resp.ExecutionOptimistic) - assert.Equal(t, "phase0", resp.Version) - }) + assert.Equal(t, false, resp.ExecutionOptimistic) + assert.Equal(t, "phase0", resp.Version) }) }) t.Run("empty-attestations", func(t *testing.T) { - t.Run("v1", func(t *testing.T) { - b := util.NewBeaconBlock() - b.Block.Body.Attestations = []*eth.Attestation{} // Explicitly set empty attestations - sb, err := blocks.NewSignedBeaconBlock(b) - require.NoError(t, err) - - mockChainService := &chainMock.ChainService{ - FinalizedRoots: map[[32]byte]bool{}, - } - - s := &Server{ - OptimisticModeFetcher: mockChainService, - FinalizationFetcher: mockChainService, - Blocker: &testutil.MockBlocker{BlockToReturn: sb}, - } - - request := httptest.NewRequest(http.MethodGet, "http://foo.example/eth/v1/beacon/blocks/{block_id}/attestations", nil) - request.SetPathValue("block_id", "head") - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.GetBlockAttestations(writer, request) - require.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetBlockAttestationsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - - // Ensure data is empty array, not null - require.NotNil(t, resp.Data) - assert.Equal(t, 0, len(resp.Data)) - }) - - t.Run("v2-pre-electra", func(t *testing.T) { + t.Run("pre-electra", func(t *testing.T) { b := util.NewBeaconBlock() b.Block.Body.Attestations = []*eth.Attestation{} // Explicitly set empty attestations sb, err := blocks.NewSignedBeaconBlock(b) @@ -972,7 +838,7 @@ func TestGetBlockAttestations(t *testing.T) { assert.Equal(t, string(json.RawMessage("[]")), string(resp.Data)) }) - t.Run("v2-electra", func(t *testing.T) { + t.Run("electra", func(t *testing.T) { eb := util.NewBeaconBlockFulu() eb.Block.Body.Attestations = []*eth.AttestationElectra{} // Explicitly set empty attestations esb, err := blocks.NewSignedBeaconBlock(eb) @@ -1408,290 +1274,6 @@ func TestGetBlindedBlockSSZ(t *testing.T) { }) } -func TestPublishBlock(t *testing.T) { - ctrl := gomock.NewController(t) - t.Run("Phase 0", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Phase0) - var signedblock *structs.SignedBeaconBlock - err := json.Unmarshal([]byte(rpctesting.Phase0Block), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, structs.BeaconBlockFromConsensus(block.Phase0.Block), signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.Phase0Block))) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Altair", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Altair) - var signedblock *structs.SignedBeaconBlockAltair - err := json.Unmarshal([]byte(rpctesting.AltairBlock), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, structs.BeaconBlockAltairFromConsensus(block.Altair.Block), signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.AltairBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Altair)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Bellatrix", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Bellatrix) - converted, err := structs.BeaconBlockBellatrixFromConsensus(block.Bellatrix.Block) - require.NoError(t, err) - var signedblock *structs.SignedBeaconBlockBellatrix - err = json.Unmarshal([]byte(rpctesting.BellatrixBlock), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.BellatrixBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Bellatrix)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Capella", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Capella) - converted, err := structs.BeaconBlockCapellaFromConsensus(block.Capella.Block) - require.NoError(t, err) - var signedblock *structs.SignedBeaconBlockCapella - err = json.Unmarshal([]byte(rpctesting.CapellaBlock), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.CapellaBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Capella)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Capella block without version header succeeds", func(t *testing.T) { - cfg := params.BeaconConfig().Copy() - cfg.CapellaForkEpoch = 4 - params.OverrideBeaconConfig(cfg) - params.SetupTestConfigCleanup(t) - var signedblock *structs.SignedBeaconBlockCapella - require.NoError(t, json.Unmarshal([]byte(rpctesting.CapellaBlock), &signedblock)) - signedblock.Message.Slot = fmt.Sprintf("%d", uint64(params.BeaconConfig().SlotsPerEpoch)*uint64(params.BeaconConfig().CapellaForkEpoch)) - newBlock, err := json.Marshal(signedblock) - require.NoError(t, err) - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Capella) - converted, err := structs.BeaconBlockCapellaFromConsensus(block.Capella.Block) - require.NoError(t, err) - var signedblock *structs.SignedBeaconBlockCapella - err = json.Unmarshal(newBlock, &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(newBlock)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Deneb", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Deneb) - converted, err := structs.SignedBeaconBlockContentsDenebFromConsensus(block.Deneb) - require.NoError(t, err) - var signedblock *structs.SignedBeaconBlockContentsDeneb - err = json.Unmarshal([]byte(rpctesting.DenebBlockContents), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.DenebBlockContents))) - request.Header.Set(api.VersionHeader, version.String(version.Deneb)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Electra", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Electra) - converted, err := structs.SignedBeaconBlockContentsElectraFromConsensus(block.Electra) - require.NoError(t, err) - var signedblock *structs.SignedBeaconBlockContentsElectra - err = json.Unmarshal([]byte(rpctesting.FuluBlockContents), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.ElectraBlockContents))) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Electra block without version header succeeds", func(t *testing.T) { - cfg := params.BeaconConfig().Copy() - cfg.ElectraForkEpoch = 6 - params.OverrideBeaconConfig(cfg) - params.SetupTestConfigCleanup(t) - var signedblock *structs.SignedBeaconBlockContentsElectra - require.NoError(t, json.Unmarshal([]byte(rpctesting.ElectraBlockContents), &signedblock)) - signedblock.SignedBlock.Message.Slot = fmt.Sprintf("%d", uint64(params.BeaconConfig().SlotsPerEpoch)*uint64(params.BeaconConfig().ElectraForkEpoch)) - newContents, err := json.Marshal(signedblock) - require.NoError(t, err) - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Electra) - converted, err := structs.SignedBeaconBlockContentsElectraFromConsensus(block.Electra) - require.NoError(t, err) - var signedblock *structs.SignedBeaconBlockContentsElectra - err = json.Unmarshal(newContents, &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(newContents)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Electra block without version header on wrong fork", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.ElectraBlockContents))) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - assert.StringContains(t, fmt.Sprintf("could not decode request body into %s consensus block", version.String(version.Phase0)), writer.Body.String()) - }) - t.Run("Fulu", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Fulu) - converted, err := structs.SignedBeaconBlockContentsFuluFromConsensus(block.Fulu) - require.NoError(t, err) - var signedblock *structs.SignedBeaconBlockContentsFulu - err = json.Unmarshal([]byte(rpctesting.FuluBlockContents), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.FuluBlockContents))) - request.Header.Set(api.VersionHeader, version.String(version.Fulu)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("invalid block", func(t *testing.T) { - server := &Server{ - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.BlindedBellatrixBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Bellatrix)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - assert.StringContains(t, fmt.Sprintf("could not decode request body into %s consensus block", version.String(version.Bellatrix)), writer.Body.String()) - }) - t.Run("wrong version header", func(t *testing.T) { - server := &Server{ - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.BellatrixBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Capella)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - assert.StringContains(t, fmt.Sprintf("could not decode request body into %s consensus block", version.String(version.Capella)), writer.Body.String()) - }) - t.Run("syncing", func(t *testing.T) { - chainService := &chainMock.ChainService{} - server := &Server{ - SyncChecker: &mockSync.Sync{IsSyncing: true}, - HeadFetcher: chainService, - TimeFetcher: chainService, - OptimisticModeFetcher: chainService, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte("foo"))) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusServiceUnavailable, writer.Code) - assert.StringContains(t, "Beacon node is currently syncing and not serving request on that endpoint", writer.Body.String()) - }) -} - func TestVersionHeaderFromRequest(t *testing.T) { t.Run("Fulu block contents returns fulu header", func(t *testing.T) { cfg := params.BeaconConfig().Copy() @@ -1863,759 +1445,6 @@ func TestVersionHeaderFromRequest(t *testing.T) { }) } -func TestPublishBlockSSZ(t *testing.T) { - ctrl := gomock.NewController(t) - t.Run("Phase 0", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Phase0) - var signedblock *structs.SignedBeaconBlock - err := json.Unmarshal([]byte(rpctesting.Phase0Block), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, structs.BeaconBlockFromConsensus(block.Phase0.Block), signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBeaconBlock - err := json.Unmarshal([]byte(rpctesting.Phase0Block), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetPhase0().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Altair", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Altair) - var signedblock *structs.SignedBeaconBlockAltair - err := json.Unmarshal([]byte(rpctesting.AltairBlock), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, structs.BeaconBlockAltairFromConsensus(block.Altair.Block), signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBeaconBlockAltair - err := json.Unmarshal([]byte(rpctesting.AltairBlock), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetAltair().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Altair)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Bellatrix", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - _, ok := req.Block.(*eth.GenericSignedBeaconBlock_Bellatrix) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - var blk structs.SignedBeaconBlockBellatrix - err := json.Unmarshal([]byte(rpctesting.BellatrixBlock), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetBellatrix().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Bellatrix)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Capella", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - _, ok := req.Block.(*eth.GenericSignedBeaconBlock_Capella) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBeaconBlockCapella - err := json.Unmarshal([]byte(rpctesting.CapellaBlock), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetCapella().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Capella)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Deneb", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - _, ok := req.Block.(*eth.GenericSignedBeaconBlock_Deneb) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBeaconBlockContentsDeneb - err := json.Unmarshal([]byte(rpctesting.DenebBlockContents), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetDeneb().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Deneb)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Electra", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - _, ok := req.Block.(*eth.GenericSignedBeaconBlock_Electra) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBeaconBlockContentsElectra - err := json.Unmarshal([]byte(rpctesting.FuluBlockContents), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetElectra().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Fulu", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - _, ok := req.Block.(*eth.GenericSignedBeaconBlock_Fulu) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBeaconBlockContentsFulu - err := json.Unmarshal([]byte(rpctesting.FuluBlockContents), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetFulu().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Fulu)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("invalid block", func(t *testing.T) { - server := &Server{ - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBlindedBeaconBlockBellatrix - err := json.Unmarshal([]byte(rpctesting.BlindedBellatrixBlock), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetBlindedBellatrix().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Bellatrix)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - assert.StringContains(t, fmt.Sprintf("could not decode request body into %s consensus block", version.String(version.Bellatrix)), writer.Body.String()) - }) - t.Run("wrong version header", func(t *testing.T) { - server := &Server{ - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBeaconBlockBellatrix - err := json.Unmarshal([]byte(rpctesting.BellatrixBlock), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetBellatrix().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Capella)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - assert.StringContains(t, fmt.Sprintf("could not decode request body into %s consensus block", version.String(version.Capella)), writer.Body.String()) - }) - t.Run("syncing", func(t *testing.T) { - chainService := &chainMock.ChainService{} - server := &Server{ - SyncChecker: &mockSync.Sync{IsSyncing: true}, - HeadFetcher: chainService, - TimeFetcher: chainService, - OptimisticModeFetcher: chainService, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte("foo"))) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlock(writer, request) - assert.Equal(t, http.StatusServiceUnavailable, writer.Code) - assert.StringContains(t, "Beacon node is currently syncing and not serving request on that endpoint", writer.Body.String()) - }) -} - -func TestPublishBlindedBlock(t *testing.T) { - ctrl := gomock.NewController(t) - t.Run("Phase 0", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Phase0) - var signedblock *structs.SignedBeaconBlock - err := json.Unmarshal([]byte(rpctesting.Phase0Block), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, structs.BeaconBlockFromConsensus(block.Phase0.Block), signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.Phase0Block))) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Altair", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Altair) - var signedblock *structs.SignedBeaconBlockAltair - err := json.Unmarshal([]byte(rpctesting.AltairBlock), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, structs.BeaconBlockAltairFromConsensus(block.Altair.Block), signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.AltairBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Altair)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Blinded Bellatrix", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_BlindedBellatrix) - converted, err := structs.BlindedBeaconBlockBellatrixFromConsensus(block.BlindedBellatrix.Block) - require.NoError(t, err) - var signedblock *structs.SignedBlindedBeaconBlockBellatrix - err = json.Unmarshal([]byte(rpctesting.BlindedBellatrixBlock), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.BlindedBellatrixBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Bellatrix)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Blinded Capella", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_BlindedCapella) - converted, err := structs.BlindedBeaconBlockCapellaFromConsensus(block.BlindedCapella.Block) - require.NoError(t, err) - var signedblock *structs.SignedBlindedBeaconBlockCapella - err = json.Unmarshal([]byte(rpctesting.BlindedCapellaBlock), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.BlindedCapellaBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Capella)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Blinded Deneb", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_BlindedDeneb) - converted, err := structs.BlindedBeaconBlockDenebFromConsensus(block.BlindedDeneb.Message) - require.NoError(t, err) - var signedblock *structs.SignedBlindedBeaconBlockDeneb - err = json.Unmarshal([]byte(rpctesting.BlindedDenebBlock), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.BlindedDenebBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Deneb)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Blinded Electra", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_BlindedElectra) - converted, err := structs.BlindedBeaconBlockElectraFromConsensus(block.BlindedElectra.Message) - require.NoError(t, err) - var signedblock *structs.SignedBlindedBeaconBlockElectra - err = json.Unmarshal([]byte(rpctesting.BlindedElectraBlock), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.BlindedElectraBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Blinded Electra block without version header succeeds", func(t *testing.T) { - cfg := params.BeaconConfig().Copy() - cfg.ElectraForkEpoch = 6 - params.OverrideBeaconConfig(cfg) - params.SetupTestConfigCleanup(t) - var signedblock *structs.SignedBlindedBeaconBlockElectra - require.NoError(t, json.Unmarshal([]byte(rpctesting.BlindedElectraBlock), &signedblock)) - signedblock.Message.Slot = fmt.Sprintf("%d", uint64(params.BeaconConfig().SlotsPerEpoch)*uint64(params.BeaconConfig().ElectraForkEpoch)) - newBlock, err := json.Marshal(signedblock) - require.NoError(t, err) - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_BlindedElectra) - converted, err := structs.BlindedBeaconBlockElectraFromConsensus(block.BlindedElectra.Message) - require.NoError(t, err) - var signedblock *structs.SignedBlindedBeaconBlockElectra - err = json.Unmarshal(newBlock, &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(newBlock)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Blinded Electra block without version header on wrong fork", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.BlindedElectraBlock))) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - // block is sent with slot == 1 which means it's in the phase0 fork - assert.StringContains(t, fmt.Sprintf("could not decode request body into %s consensus block", version.String(version.Phase0)), writer.Body.String()) - }) - t.Run("Blinded Fulu", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_BlindedFulu) - converted, err := structs.BlindedBeaconBlockFuluFromConsensus(block.BlindedFulu.Message) - require.NoError(t, err) - var signedblock *structs.SignedBlindedBeaconBlockFulu - err = json.Unmarshal([]byte(rpctesting.BlindedFuluBlock), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, converted, signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.BlindedFuluBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Fulu)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("invalid block", func(t *testing.T) { - server := &Server{ - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.BellatrixBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Bellatrix)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - assert.StringContains(t, fmt.Sprintf("could not decode request body into %s consensus block", version.String(version.Bellatrix)), writer.Body.String()) - }) - t.Run("wrong version header", func(t *testing.T) { - server := &Server{ - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.BlindedBellatrixBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Capella)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - assert.StringContains(t, fmt.Sprintf("could not decode request body into %s consensus block", version.String(version.Capella)), writer.Body.String()) - }) - t.Run("syncing", func(t *testing.T) { - chainService := &chainMock.ChainService{} - server := &Server{ - SyncChecker: &mockSync.Sync{IsSyncing: true}, - HeadFetcher: chainService, - TimeFetcher: chainService, - OptimisticModeFetcher: chainService, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte("foo"))) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusServiceUnavailable, writer.Code) - assert.StringContains(t, "Beacon node is currently syncing and not serving request on that endpoint", writer.Body.String()) - }) -} - -func TestPublishBlindedBlockSSZ(t *testing.T) { - ctrl := gomock.NewController(t) - t.Run("Phase 0", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Phase0) - var signedblock *structs.SignedBeaconBlock - err := json.Unmarshal([]byte(rpctesting.Phase0Block), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, structs.BeaconBlockFromConsensus(block.Phase0.Block), signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBeaconBlock - err := json.Unmarshal([]byte(rpctesting.Phase0Block), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetPhase0().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Altair", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - block, ok := req.Block.(*eth.GenericSignedBeaconBlock_Altair) - var signedblock *structs.SignedBeaconBlockAltair - err := json.Unmarshal([]byte(rpctesting.AltairBlock), &signedblock) - require.NoError(t, err) - require.DeepEqual(t, structs.BeaconBlockAltairFromConsensus(block.Altair.Block), signedblock.Message) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBeaconBlockAltair - err := json.Unmarshal([]byte(rpctesting.AltairBlock), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetAltair().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Altair)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Bellatrix", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - _, ok := req.Block.(*eth.GenericSignedBeaconBlock_BlindedBellatrix) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBlindedBeaconBlockBellatrix - err := json.Unmarshal([]byte(rpctesting.BlindedBellatrixBlock), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetBlindedBellatrix().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Bellatrix)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Capella", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - _, ok := req.Block.(*eth.GenericSignedBeaconBlock_BlindedCapella) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBlindedBeaconBlockCapella - err := json.Unmarshal([]byte(rpctesting.BlindedCapellaBlock), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetBlindedCapella().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Capella)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Deneb", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - _, ok := req.Block.(*eth.GenericSignedBeaconBlock_BlindedDeneb) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBlindedBeaconBlockDeneb - err := json.Unmarshal([]byte(rpctesting.BlindedDenebBlock), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetBlindedDeneb().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Deneb)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Electra", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - _, ok := req.Block.(*eth.GenericSignedBeaconBlock_BlindedElectra) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBlindedBeaconBlockElectra - err := json.Unmarshal([]byte(rpctesting.BlindedElectraBlock), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetBlindedElectra().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("Fulu", func(t *testing.T) { - v1alpha1Server := mock2.NewMockBeaconNodeValidatorServer(ctrl) - v1alpha1Server.EXPECT().ProposeBeaconBlock(gomock.Any(), mock.MatchedBy(func(req *eth.GenericSignedBeaconBlock) bool { - _, ok := req.Block.(*eth.GenericSignedBeaconBlock_BlindedFulu) - return ok - })) - server := &Server{ - V1Alpha1ValidatorServer: v1alpha1Server, - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBlindedBeaconBlockFulu - err := json.Unmarshal([]byte(rpctesting.BlindedFuluBlock), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetBlindedFulu().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Fulu)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - }) - t.Run("invalid block", func(t *testing.T) { - server := &Server{ - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte(rpctesting.BellatrixBlock))) - request.Header.Set(api.VersionHeader, version.String(version.Bellatrix)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - assert.StringContains(t, fmt.Sprintf("could not decode request body into %s consensus block", version.String(version.Bellatrix)), writer.Body.String()) - }) - t.Run("wrong version header", func(t *testing.T) { - server := &Server{ - SyncChecker: &mockSync.Sync{IsSyncing: false}, - } - - var blk structs.SignedBlindedBeaconBlockBellatrix - err := json.Unmarshal([]byte(rpctesting.BlindedBellatrixBlock), &blk) - require.NoError(t, err) - genericBlock, err := blk.ToGeneric() - require.NoError(t, err) - ssz, err := genericBlock.GetBlindedBellatrix().MarshalSSZ() - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader(ssz)) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - request.Header.Set(api.VersionHeader, version.String(version.Capella)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - assert.StringContains(t, fmt.Sprintf("could not decode request body into %s consensus block", version.String(version.Capella)), writer.Body.String()) - }) - t.Run("syncing", func(t *testing.T) { - chainService := &chainMock.ChainService{} - server := &Server{ - SyncChecker: &mockSync.Sync{IsSyncing: true}, - HeadFetcher: chainService, - TimeFetcher: chainService, - OptimisticModeFetcher: chainService, - } - - request := httptest.NewRequest(http.MethodPost, "http://foo.example", bytes.NewReader([]byte("foo"))) - request.Header.Set("Content-Type", api.OctetStreamMediaType) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) - assert.Equal(t, http.StatusServiceUnavailable, writer.Code) - assert.StringContains(t, "Beacon node is currently syncing and not serving request on that endpoint", writer.Body.String()) - }) -} - func TestPublishBlockV2(t *testing.T) { ctrl := gomock.NewController(t) t.Run("Phase 0", func(t *testing.T) { @@ -3463,7 +2292,7 @@ func TestPublishBlindedBlockV2SSZ(t *testing.T) { request.Header.Set(api.VersionHeader, version.String(version.Deneb)) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) + server.PublishBlindedBlockV2(writer, request) assert.Equal(t, http.StatusOK, writer.Code) }) t.Run("Electra", func(t *testing.T) { @@ -3489,7 +2318,7 @@ func TestPublishBlindedBlockV2SSZ(t *testing.T) { request.Header.Set(api.VersionHeader, version.String(version.Electra)) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) + server.PublishBlindedBlockV2(writer, request) assert.Equal(t, http.StatusOK, writer.Code) }) t.Run("Fulu", func(t *testing.T) { @@ -3515,7 +2344,7 @@ func TestPublishBlindedBlockV2SSZ(t *testing.T) { request.Header.Set(api.VersionHeader, version.String(version.Fulu)) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} - server.PublishBlindedBlock(writer, request) + server.PublishBlindedBlockV2(writer, request) assert.Equal(t, http.StatusOK, writer.Code) }) t.Run("invalid block", func(t *testing.T) { @@ -4790,67 +3619,6 @@ func TestGetGenesis(t *testing.T) { }) } -func TestGetDepositSnapshot(t *testing.T) { - beaconDB := dbTest.SetupDB(t) - mockTrie := depositsnapshot.NewDepositTree() - deposits := [][32]byte{ - bytesutil.ToBytes32([]byte{1}), - bytesutil.ToBytes32([]byte{2}), - bytesutil.ToBytes32([]byte{3}), - } - finalized := 2 - for _, leaf := range deposits { - err := mockTrie.Insert(leaf[:], 0) - require.NoError(t, err) - } - err := mockTrie.Finalize(1, deposits[1], 1) - require.NoError(t, err) - err = mockTrie.Finalize(2, deposits[2], 2) - require.NoError(t, err) - - snapshot, err := mockTrie.GetSnapshot() - require.NoError(t, err) - root, err := snapshot.CalculateRoot() - require.NoError(t, err) - chainData := ð.ETH1ChainData{ - DepositSnapshot: snapshot.ToProto(), - } - err = beaconDB.SaveExecutionChainData(t.Context(), chainData) - require.NoError(t, err) - s := Server{ - BeaconDB: beaconDB, - } - - request := httptest.NewRequest(http.MethodGet, "/eth/v1/beacon/deposit_snapshot", nil) - writer := httptest.NewRecorder() - t.Run("JSON response", func(t *testing.T) { - writer.Body = &bytes.Buffer{} - s.GetDepositSnapshot(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - resp := &structs.GetDepositSnapshotResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - require.NotNil(t, resp.Data) - - assert.Equal(t, hexutil.Encode(root[:]), resp.Data.DepositRoot) - assert.Equal(t, hexutil.Encode(deposits[2][:]), resp.Data.ExecutionBlockHash) - assert.Equal(t, strconv.Itoa(mockTrie.NumOfItems()), resp.Data.DepositCount) - assert.Equal(t, finalized, len(resp.Data.Finalized)) - }) - t.Run("SSZ response", func(t *testing.T) { - writer.Body = &bytes.Buffer{} - request.Header.Set("Accept", api.OctetStreamMediaType) - s.GetDepositSnapshot(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - resp := ð.DepositSnapshot{} - require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) - - assert.Equal(t, hexutil.Encode(root[:]), hexutil.Encode(resp.DepositRoot)) - assert.Equal(t, hexutil.Encode(deposits[2][:]), hexutil.Encode(resp.ExecutionHash)) - assert.Equal(t, uint64(mockTrie.NumOfItems()), resp.DepositCount) - assert.Equal(t, finalized, len(resp.Finalized)) - }) -} - func TestServer_broadcastBlobSidecars(t *testing.T) { hook := logTest.NewGlobal() blockToPropose := util.NewBeaconBlockContentsDeneb() diff --git a/beacon-chain/rpc/eth/builder/BUILD.bazel b/beacon-chain/rpc/eth/builder/BUILD.bazel deleted file mode 100644 index ba7b965d53..0000000000 --- a/beacon-chain/rpc/eth/builder/BUILD.bazel +++ /dev/null @@ -1,47 +0,0 @@ -load("@prysm//tools/go:def.bzl", "go_library", "go_test") - -go_library( - name = "go_default_library", - srcs = [ - "handlers.go", - "server.go", - ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/builder", - visibility = ["//visibility:public"], - deps = [ - "//api/server/structs:go_default_library", - "//beacon-chain/blockchain:go_default_library", - "//beacon-chain/core/helpers:go_default_library", - "//beacon-chain/core/transition:go_default_library", - "//beacon-chain/rpc/lookup:go_default_library", - "//config/params:go_default_library", - "//consensus-types/primitives:go_default_library", - "//network/httputil:go_default_library", - "//proto/engine/v1:go_default_library", - "//time/slots:go_default_library", - "@com_github_ethereum_go_ethereum//common/hexutil:go_default_library", - "@com_github_pkg_errors//:go_default_library", - ], -) - -go_test( - name = "go_default_test", - srcs = ["handlers_test.go"], - embed = [":go_default_library"], - deps = [ - "//api/server/structs:go_default_library", - "//beacon-chain/blockchain/testing:go_default_library", - "//beacon-chain/rpc/testutil:go_default_library", - "//beacon-chain/state:go_default_library", - "//config/params:go_default_library", - "//consensus-types/primitives:go_default_library", - "//crypto/bls:go_default_library", - "//network/httputil:go_default_library", - "//proto/prysm/v1alpha1:go_default_library", - "//testing/assert:go_default_library", - "//testing/require:go_default_library", - "//testing/util:go_default_library", - "//time/slots:go_default_library", - "@com_github_ethereum_go_ethereum//common/hexutil:go_default_library", - ], -) diff --git a/beacon-chain/rpc/eth/builder/handlers.go b/beacon-chain/rpc/eth/builder/handlers.go deleted file mode 100644 index af7b86dc6e..0000000000 --- a/beacon-chain/rpc/eth/builder/handlers.go +++ /dev/null @@ -1,132 +0,0 @@ -package builder - -import ( - "fmt" - "net/http" - "strconv" - - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/network/httputil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/pkg/errors" -) - -// Deprecated: use SSE from events for `payload attributes` instead -// ExpectedWithdrawals get the withdrawals computed from the specified state, that will be included in the block that gets built on the specified state. -func (s *Server) ExpectedWithdrawals(w http.ResponseWriter, r *http.Request) { - // Retrieve beacon state - stateId := r.PathValue("state_id") - if stateId == "" { - httputil.WriteError(w, &httputil.DefaultJsonError{ - Message: "state_id is required in URL params", - Code: http.StatusBadRequest, - }) - return - } - st, err := s.Stater.State(r.Context(), []byte(stateId)) - if err != nil { - httputil.WriteError(w, handleWrapError(err, "could not retrieve state", http.StatusNotFound)) - return - } - queryParam := r.URL.Query().Get("proposal_slot") - var proposalSlot primitives.Slot - if queryParam != "" { - pSlot, err := strconv.ParseUint(queryParam, 10, 64) - if err != nil { - httputil.WriteError(w, handleWrapError(err, "invalid proposal slot value", http.StatusBadRequest)) - return - } - proposalSlot = primitives.Slot(pSlot) - } else { - proposalSlot = st.Slot() + 1 - } - // Perform sanity checks on proposal slot before computing state - capellaStart, err := slots.EpochStart(params.BeaconConfig().CapellaForkEpoch) - if err != nil { - httputil.WriteError(w, handleWrapError(err, "could not calculate Capella start slot", http.StatusInternalServerError)) - return - } - if proposalSlot < capellaStart { - httputil.WriteError(w, &httputil.DefaultJsonError{ - Message: "expected withdrawals are not supported before Capella fork", - Code: http.StatusBadRequest, - }) - return - } - if proposalSlot <= st.Slot() { - httputil.WriteError(w, &httputil.DefaultJsonError{ - Message: fmt.Sprintf("proposal slot must be bigger than state slot. proposal slot: %d, state slot: %d", proposalSlot, st.Slot()), - Code: http.StatusBadRequest, - }) - return - } - lookAheadLimit := uint64(params.BeaconConfig().SlotsPerEpoch.Mul(uint64(params.BeaconConfig().MaxSeedLookahead))) - if st.Slot().Add(lookAheadLimit) <= proposalSlot { - httputil.WriteError(w, &httputil.DefaultJsonError{ - Message: fmt.Sprintf("proposal slot cannot be >= %d slots ahead of state slot", lookAheadLimit), - Code: http.StatusBadRequest, - }) - return - } - // Get metadata for response - isOptimistic, err := s.OptimisticModeFetcher.IsOptimistic(r.Context()) - if err != nil { - httputil.WriteError(w, handleWrapError(err, "could not get optimistic mode info", http.StatusInternalServerError)) - return - } - root, err := helpers.BlockRootAtSlot(st, slots.PrevSlot(st.Slot())) - if err != nil { - httputil.WriteError(w, handleWrapError(err, "could not get block root", http.StatusInternalServerError)) - return - } - var blockRoot = [32]byte(root) - isFinalized := s.FinalizationFetcher.IsFinalized(r.Context(), blockRoot) - // Advance state forward to proposal slot - st, err = transition.ProcessSlots(r.Context(), st, proposalSlot) - if err != nil { - httputil.WriteError(w, &httputil.DefaultJsonError{ - Message: "could not process slots", - Code: http.StatusInternalServerError, - }) - return - } - withdrawals, _, err := st.ExpectedWithdrawals() - if err != nil { - httputil.WriteError(w, &httputil.DefaultJsonError{ - Message: "could not get expected withdrawals", - Code: http.StatusInternalServerError, - }) - return - } - httputil.WriteJson(w, &structs.ExpectedWithdrawalsResponse{ - ExecutionOptimistic: isOptimistic, - Finalized: isFinalized, - Data: buildExpectedWithdrawalsData(withdrawals), - }) -} - -func buildExpectedWithdrawalsData(withdrawals []*enginev1.Withdrawal) []*structs.ExpectedWithdrawal { - data := make([]*structs.ExpectedWithdrawal, len(withdrawals)) - for i, withdrawal := range withdrawals { - data[i] = &structs.ExpectedWithdrawal{ - Address: hexutil.Encode(withdrawal.Address), - Amount: strconv.FormatUint(withdrawal.Amount, 10), - Index: strconv.FormatUint(withdrawal.Index, 10), - ValidatorIndex: strconv.FormatUint(uint64(withdrawal.ValidatorIndex), 10), - } - } - return data -} - -func handleWrapError(err error, message string, code int) *httputil.DefaultJsonError { - return &httputil.DefaultJsonError{ - Message: errors.Wrap(err, message).Error(), - Code: code, - } -} diff --git a/beacon-chain/rpc/eth/builder/handlers_test.go b/beacon-chain/rpc/eth/builder/handlers_test.go deleted file mode 100644 index 52ee422f98..0000000000 --- a/beacon-chain/rpc/eth/builder/handlers_test.go +++ /dev/null @@ -1,210 +0,0 @@ -package builder - -import ( - "bytes" - "encoding/json" - "net/http" - "net/http/httptest" - "strconv" - "testing" - - "github.com/OffchainLabs/prysm/v6/api/server/structs" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/network/httputil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/ethereum/go-ethereum/common/hexutil" -) - -func TestExpectedWithdrawals_BadRequest(t *testing.T) { - st, err := util.NewBeaconStateCapella() - slotsAhead := 5000 - require.NoError(t, err) - capellaSlot, err := slots.EpochStart(params.BeaconConfig().CapellaForkEpoch) - require.NoError(t, err) - currentSlot := capellaSlot + primitives.Slot(slotsAhead) - require.NoError(t, st.SetSlot(currentSlot)) - mockChainService := &mock.ChainService{Optimistic: true} - - testCases := []struct { - name string - path string - urlParams map[string]string - state state.BeaconState - errorMessage string - }{ - { - name: "no state_id url params", - path: "/eth/v1/builder/states/{state_id}/expected_withdrawals?proposal_slot" + - strconv.FormatUint(uint64(currentSlot), 10), - urlParams: map[string]string{}, - state: nil, - errorMessage: "state_id is required in URL params", - }, - { - name: "invalid proposal slot value", - path: "/eth/v1/builder/states/{state_id}/expected_withdrawals?proposal_slot=aaa", - urlParams: map[string]string{"state_id": "head"}, - state: st, - errorMessage: "invalid proposal slot value", - }, - { - name: "proposal slot < Capella start slot", - path: "/eth/v1/builder/states/{state_id}/expected_withdrawals?proposal_slot=" + - strconv.FormatUint(uint64(capellaSlot)-1, 10), - urlParams: map[string]string{"state_id": "head"}, - state: st, - errorMessage: "expected withdrawals are not supported before Capella fork", - }, - { - name: "proposal slot == Capella start slot", - path: "/eth/v1/builder/states/{state_id}/expected_withdrawals?proposal_slot=" + - strconv.FormatUint(uint64(capellaSlot), 10), - urlParams: map[string]string{"state_id": "head"}, - state: st, - errorMessage: "proposal slot must be bigger than state slot", - }, - { - name: "Proposal slot >= 128 slots ahead of state slot", - path: "/eth/v1/builder/states/{state_id}/expected_withdrawals?proposal_slot=" + - strconv.FormatUint(uint64(currentSlot+128), 10), - urlParams: map[string]string{"state_id": "head"}, - state: st, - errorMessage: "proposal slot cannot be >= 128 slots ahead of state slot", - }, - } - - for _, testCase := range testCases { - t.Run(testCase.name, func(t *testing.T) { - s := &Server{ - FinalizationFetcher: mockChainService, - OptimisticModeFetcher: mockChainService, - Stater: &testutil.MockStater{BeaconState: testCase.state}, - } - request := httptest.NewRequest("GET", testCase.path, nil) - request.SetPathValue("state_id", testCase.urlParams["state_id"]) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.ExpectedWithdrawals(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.StringContains(t, testCase.errorMessage, e.Message) - }) - } -} - -func TestExpectedWithdrawals(t *testing.T) { - st, err := util.NewBeaconStateCapella() - slotsAhead := 5000 - require.NoError(t, err) - capellaSlot, err := slots.EpochStart(params.BeaconConfig().CapellaForkEpoch) - require.NoError(t, err) - currentSlot := capellaSlot + primitives.Slot(slotsAhead) - require.NoError(t, st.SetSlot(currentSlot)) - mockChainService := &mock.ChainService{Optimistic: true} - - t.Run("get correct expected withdrawals", func(t *testing.T) { - params.SetupTestConfigCleanup(t) - cfg := params.BeaconConfig().Copy() - cfg.MaxValidatorsPerWithdrawalsSweep = 16 - params.OverrideBeaconConfig(cfg) - - // Update state with updated validator fields - valCount := 17 - validators := make([]*eth.Validator, 0, valCount) - balances := make([]uint64, 0, valCount) - for i := 0; i < valCount; i++ { - blsKey, err := bls.RandKey() - require.NoError(t, err) - val := ð.Validator{ - PublicKey: blsKey.PublicKey().Marshal(), - WithdrawalCredentials: make([]byte, 32), - ExitEpoch: params.BeaconConfig().FarFutureEpoch, - WithdrawableEpoch: params.BeaconConfig().FarFutureEpoch, - EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, - } - val.WithdrawalCredentials[0] = params.BeaconConfig().ETH1AddressWithdrawalPrefixByte - validators = append(validators, val) - balances = append(balances, params.BeaconConfig().MaxEffectiveBalance) - } - - epoch := slots.ToEpoch(st.Slot()) - // Fully withdrawable now with more than 0 balance - validators[5].WithdrawableEpoch = epoch - // Fully withdrawable now but 0 balance - validators[10].WithdrawableEpoch = epoch - balances[10] = 0 - // Partially withdrawable now but fully withdrawable after 1 epoch - validators[14].WithdrawableEpoch = epoch + 1 - balances[14] += params.BeaconConfig().MinDepositAmount - // Partially withdrawable - validators[15].WithdrawableEpoch = epoch + 2 - balances[15] += params.BeaconConfig().MinDepositAmount - // Above sweep bound - validators[16].WithdrawableEpoch = epoch + 1 - balances[16] += params.BeaconConfig().MinDepositAmount - - require.NoError(t, st.SetValidators(validators)) - require.NoError(t, st.SetBalances(balances)) - inactivityScores := make([]uint64, valCount) - for i := range inactivityScores { - inactivityScores[i] = 10 - } - require.NoError(t, st.SetInactivityScores(inactivityScores)) - - s := &Server{ - FinalizationFetcher: mockChainService, - OptimisticModeFetcher: mockChainService, - Stater: &testutil.MockStater{BeaconState: st}, - } - request := httptest.NewRequest( - "GET", "/eth/v1/builder/states/{state_id}/expected_withdrawals?proposal_slot="+ - strconv.FormatUint(uint64(currentSlot+params.BeaconConfig().SlotsPerEpoch), 10), nil) - request.SetPathValue("state_id", "head") - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.ExpectedWithdrawals(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - resp := &structs.ExpectedWithdrawalsResponse{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) - assert.Equal(t, true, resp.ExecutionOptimistic) - assert.Equal(t, false, resp.Finalized) - assert.Equal(t, 3, len(resp.Data)) - expectedWithdrawal1 := &structs.ExpectedWithdrawal{ - Index: strconv.FormatUint(0, 10), - ValidatorIndex: strconv.FormatUint(5, 10), - Address: hexutil.Encode(validators[5].WithdrawalCredentials[12:]), - // Decreased due to epoch processing when state advanced forward - Amount: strconv.FormatUint(31998257885, 10), - } - expectedWithdrawal2 := &structs.ExpectedWithdrawal{ - Index: strconv.FormatUint(1, 10), - ValidatorIndex: strconv.FormatUint(14, 10), - Address: hexutil.Encode(validators[14].WithdrawalCredentials[12:]), - // MaxEffectiveBalance + MinDepositAmount + decrease after epoch processing - Amount: strconv.FormatUint(32998257885, 10), - } - expectedWithdrawal3 := &structs.ExpectedWithdrawal{ - Index: strconv.FormatUint(2, 10), - ValidatorIndex: strconv.FormatUint(15, 10), - Address: hexutil.Encode(validators[15].WithdrawalCredentials[12:]), - // MinDepositAmount + decrease after epoch processing - Amount: strconv.FormatUint(998257885, 10), - } - require.DeepEqual(t, expectedWithdrawal1, resp.Data[0]) - require.DeepEqual(t, expectedWithdrawal2, resp.Data[1]) - require.DeepEqual(t, expectedWithdrawal3, resp.Data[2]) - }) -} diff --git a/beacon-chain/rpc/eth/builder/server.go b/beacon-chain/rpc/eth/builder/server.go deleted file mode 100644 index 2234d6d8d2..0000000000 --- a/beacon-chain/rpc/eth/builder/server.go +++ /dev/null @@ -1,12 +0,0 @@ -package builder - -import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" -) - -type Server struct { - FinalizationFetcher blockchain.FinalizationFetcher - OptimisticModeFetcher blockchain.OptimisticModeFetcher - Stater lookup.Stater -} diff --git a/beacon-chain/rpc/eth/validator/handlers.go b/beacon-chain/rpc/eth/validator/handlers.go index 16df7d5448..319df04ad2 100644 --- a/beacon-chain/rpc/eth/validator/handlers.go +++ b/beacon-chain/rpc/eth/validator/handlers.go @@ -44,38 +44,6 @@ import ( "google.golang.org/grpc/status" ) -// Deprecated: use GetAggregateAttestationV2 instead -// GetAggregateAttestation aggregates all attestations matching the given attestation data root and slot, returning the aggregated result. -func (s *Server) GetAggregateAttestation(w http.ResponseWriter, r *http.Request) { - _, span := trace.StartSpan(r.Context(), "validator.GetAggregateAttestation") - defer span.End() - - _, attDataRoot, ok := shared.HexFromQuery(w, r, "attestation_data_root", fieldparams.RootLength, true) - if !ok { - return - } - _, slot, ok := shared.UintFromQuery(w, r, "slot", true) - if !ok { - return - } - - agg := s.aggregatedAttestation(w, primitives.Slot(slot), attDataRoot, 0) - if agg == nil { - return - } - typedAgg, ok := agg.(*ethpbalpha.Attestation) - if !ok { - httputil.HandleError(w, fmt.Sprintf("Attestation is not of type %T", ðpbalpha.Attestation{}), http.StatusInternalServerError) - return - } - data, err := json.Marshal(structs.AttFromConsensus(typedAgg)) - if err != nil { - httputil.HandleError(w, "Could not marshal attestation: "+err.Error(), http.StatusInternalServerError) - return - } - httputil.WriteJson(w, &structs.AggregateAttestationResponse{Data: data}) -} - // GetAggregateAttestationV2 aggregates all attestations matching the given attestation data root and slot, returning the aggregated result. func (s *Server) GetAggregateAttestationV2(w http.ResponseWriter, r *http.Request) { _, span := trace.StartSpan(r.Context(), "validator.GetAggregateAttestationV2") @@ -326,58 +294,6 @@ func (s *Server) SubmitContributionAndProofs(w http.ResponseWriter, r *http.Requ } } -// Deprecated: use SubmitAggregateAndProofsV2 instead -// SubmitAggregateAndProofs verifies given aggregate and proofs and publishes them on appropriate gossipsub topic. -func (s *Server) SubmitAggregateAndProofs(w http.ResponseWriter, r *http.Request) { - ctx, span := trace.StartSpan(r.Context(), "validator.SubmitAggregateAndProofs") - defer span.End() - - var req structs.SubmitAggregateAndProofsRequest - err := json.NewDecoder(r.Body).Decode(&req.Data) - switch { - case errors.Is(err, io.EOF): - httputil.HandleError(w, "No data submitted", http.StatusBadRequest) - return - case err != nil: - httputil.HandleError(w, "Could not decode request body: "+err.Error(), http.StatusBadRequest) - return - } - if len(req.Data) == 0 { - httputil.HandleError(w, "No data submitted", http.StatusBadRequest) - return - } - - broadcastFailed := false - for _, item := range req.Data { - var signedAggregate structs.SignedAggregateAttestationAndProof - err := json.Unmarshal(item, &signedAggregate) - if err != nil { - httputil.HandleError(w, "Could not decode item: "+err.Error(), http.StatusBadRequest) - return - } - consensusItem, err := signedAggregate.ToConsensus() - if err != nil { - httputil.HandleError(w, "Could not convert request aggregate to consensus aggregate: "+err.Error(), http.StatusBadRequest) - return - } - rpcError := s.CoreService.SubmitSignedAggregateSelectionProof(ctx, consensusItem) - if rpcError != nil { - var broadcastFailedErr *server.BroadcastFailedError - ok := errors.As(rpcError.Err, &broadcastFailedErr) - if ok { - broadcastFailed = true - } else { - httputil.HandleError(w, rpcError.Err.Error(), core.ErrorReasonToHTTP(rpcError.Reason)) - return - } - } - } - - if broadcastFailed { - httputil.HandleError(w, "Could not broadcast one or more signed aggregated attestations", http.StatusInternalServerError) - } -} - // SubmitAggregateAndProofsV2 verifies given aggregate and proofs and publishes them on appropriate gossipsub topic. func (s *Server) SubmitAggregateAndProofsV2(w http.ResponseWriter, r *http.Request) { ctx, span := trace.StartSpan(r.Context(), "validator.SubmitAggregateAndProofsV2") diff --git a/beacon-chain/rpc/eth/validator/handlers_test.go b/beacon-chain/rpc/eth/validator/handlers_test.go index 88661139c0..8281dc12fa 100644 --- a/beacon-chain/rpc/eth/validator/handlers_test.go +++ b/beacon-chain/rpc/eth/validator/handlers_test.go @@ -47,7 +47,7 @@ import ( logTest "github.com/sirupsen/logrus/hooks/test" ) -func TestGetAggregateAttestation(t *testing.T) { +func TestGetAggregateAttestationV2(t *testing.T) { root1 := bytesutil.PadTo([]byte("root1"), 32) root2 := bytesutil.PadTo([]byte("root2"), 32) key, err := bls.RandKey() @@ -80,7 +80,10 @@ func TestGetAggregateAttestation(t *testing.T) { } } - t.Run("V1", func(t *testing.T) { + t.Run("pre-electra", func(t *testing.T) { + committeeBits := bitfield.NewBitvector64() + committeeBits.SetBitAt(1, true) + aggSlot1_Root1_1 := createAttestation(1, bitfield.Bitlist{0b11100}, root1) aggSlot1_Root1_2 := createAttestation(1, bitfield.Bitlist{0b10111}, root1) aggSlot1_Root2 := createAttestation(1, bitfield.Bitlist{0b11100}, root2) @@ -90,6 +93,10 @@ func TestGetAggregateAttestation(t *testing.T) { unaggSlot3_Root2 := createAttestation(3, bitfield.Bitlist{0b11000}, root2) unaggSlot4 := createAttestation(4, bitfield.Bitlist{0b11000}, root1) + // Add one post-electra attestation to ensure that it is being ignored. + // We choose slot 2 where we have one pre-electra attestation with less attestation bits. + postElectraAtt := createAttestationElectra(2, bitfield.Bitlist{0b11111}, root1) + compareResult := func( t *testing.T, attestation structs.Attestation, @@ -119,9 +126,9 @@ func TestGetAggregateAttestation(t *testing.T) { require.NoError(t, pool.SaveUnaggregatedAttestations([]ethpbalpha.Att{unaggSlot3_Root1_1, unaggSlot3_Root1_2, unaggSlot3_Root2, unaggSlot4}), "Failed to save unaggregated attestations") unagg := pool.UnaggregatedAttestations() require.Equal(t, 4, len(unagg), "Expected 4 unaggregated attestations") - require.NoError(t, pool.SaveAggregatedAttestations([]ethpbalpha.Att{aggSlot1_Root1_1, aggSlot1_Root1_2, aggSlot1_Root2, aggSlot2}), "Failed to save aggregated attestations") + require.NoError(t, pool.SaveAggregatedAttestations([]ethpbalpha.Att{aggSlot1_Root1_1, aggSlot1_Root1_2, aggSlot1_Root2, aggSlot2, postElectraAtt}), "Failed to save aggregated attestations") agg := pool.AggregatedAttestations() - require.Equal(t, 4, len(agg), "Expected 4 aggregated attestations") + require.Equal(t, 5, len(agg), "Expected 5 aggregated attestations, 4 pre electra and 1 post electra") s := &Server{ AttestationsPool: pool, } @@ -130,22 +137,22 @@ func TestGetAggregateAttestation(t *testing.T) { reqRoot, err := aggSlot2.Data.HashTreeRoot() require.NoError(t, err, "Failed to generate attestation data hash tree root") attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + "&committee_index=0" request := httptest.NewRequest(http.MethodGet, url, nil) writer := httptest.NewRecorder() - s.GetAggregateAttestation(writer, request) + s.GetAggregateAttestationV2(writer, request) assert.Equal(t, http.StatusNotFound, writer.Code, "Expected HTTP status NotFound for non-matching request") }) t.Run("1 matching aggregated attestation", func(t *testing.T) { reqRoot, err := aggSlot2.Data.HashTreeRoot() require.NoError(t, err, "Failed to generate attestation data hash tree root") attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=2" + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=2" + "&committee_index=0" request := httptest.NewRequest(http.MethodGet, url, nil) writer := httptest.NewRecorder() - s.GetAggregateAttestation(writer, request) + s.GetAggregateAttestationV2(writer, request) require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") var resp structs.AggregateAttestationResponse @@ -157,15 +164,32 @@ func TestGetAggregateAttestation(t *testing.T) { compareResult(t, attestation, "2", hexutil.Encode(aggSlot2.AggregationBits), root1, sig.Marshal()) }) + t.Run("1 matching aggregated attestation - SSZ", func(t *testing.T) { + reqRoot, err := aggSlot2.Data.HashTreeRoot() + require.NoError(t, err, "Failed to generate attestation data hash tree root") + attDataRoot := hexutil.Encode(reqRoot[:]) + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=2" + "&committee_index=0" + request := httptest.NewRequest(http.MethodGet, url, nil) + request.Header.Add("Accept", "application/octet-stream") + writer := httptest.NewRecorder() + + s.GetAggregateAttestationV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") + + var resp ethpbalpha.Attestation + require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) + + compareResult(t, *structs.AttFromConsensus(&resp), "2", hexutil.Encode(aggSlot2.AggregationBits), root1, sig.Marshal()) + }) t.Run("multiple matching aggregated attestations - return the one with most bits", func(t *testing.T) { reqRoot, err := aggSlot1_Root1_1.Data.HashTreeRoot() require.NoError(t, err, "Failed to generate attestation data hash tree root") attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + "&committee_index=0" request := httptest.NewRequest(http.MethodGet, url, nil) writer := httptest.NewRecorder() - s.GetAggregateAttestation(writer, request) + s.GetAggregateAttestationV2(writer, request) require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") var resp structs.AggregateAttestationResponse @@ -177,422 +201,255 @@ func TestGetAggregateAttestation(t *testing.T) { compareResult(t, attestation, "1", hexutil.Encode(aggSlot1_Root1_2.AggregationBits), root1, sig.Marshal()) }) - t.Run("1 matching unaggregated attestation", func(t *testing.T) { - reqRoot, err := unaggSlot4.Data.HashTreeRoot() + t.Run("multiple matching aggregated attestations - return the one with most bits - SSZ", func(t *testing.T) { + reqRoot, err := aggSlot1_Root1_1.Data.HashTreeRoot() require.NoError(t, err, "Failed to generate attestation data hash tree root") attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=4" + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + "&committee_index=0" + request := httptest.NewRequest(http.MethodGet, url, nil) + request.Header.Add("Accept", "application/octet-stream") + writer := httptest.NewRecorder() + + s.GetAggregateAttestationV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") + + var resp ethpbalpha.Attestation + require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) + + compareResult(t, *structs.AttFromConsensus(&resp), "1", hexutil.Encode(aggSlot1_Root1_2.AggregationBits), root1, sig.Marshal()) + }) + }) + t.Run("post-electra", func(t *testing.T) { + aggSlot1_Root1_1 := createAttestationElectra(1, bitfield.Bitlist{0b11100}, root1) + aggSlot1_Root1_2 := createAttestationElectra(1, bitfield.Bitlist{0b10111}, root1) + aggSlot1_Root2 := createAttestationElectra(1, bitfield.Bitlist{0b11100}, root2) + aggSlot2 := createAttestationElectra(2, bitfield.Bitlist{0b11100}, root1) + unaggSlot3_Root1_1 := createAttestationElectra(3, bitfield.Bitlist{0b11000}, root1) + unaggSlot3_Root1_2 := createAttestationElectra(3, bitfield.Bitlist{0b10100}, root1) + unaggSlot3_Root2 := createAttestationElectra(3, bitfield.Bitlist{0b11000}, root2) + unaggSlot4 := createAttestationElectra(4, bitfield.Bitlist{0b11000}, root1) + + // Add one pre-electra attestation to ensure that it is being ignored. + // We choose slot 2 where we have one post-electra attestation with less attestation bits. + preElectraAtt := createAttestation(2, bitfield.Bitlist{0b11111}, root1) + + compareResult := func( + t *testing.T, + attestation structs.AttestationElectra, + expectedSlot string, + expectedAggregationBits string, + expectedRoot []byte, + expectedSig []byte, + expectedCommitteeBits string, + ) { + assert.Equal(t, expectedAggregationBits, attestation.AggregationBits, "Unexpected aggregation bits in attestation") + assert.Equal(t, expectedCommitteeBits, attestation.CommitteeBits) + assert.Equal(t, hexutil.Encode(expectedSig), attestation.Signature, "Signature mismatch") + assert.Equal(t, expectedSlot, attestation.Data.Slot, "Slot mismatch in attestation data") + assert.Equal(t, "0", attestation.Data.CommitteeIndex, "Committee index mismatch") + assert.Equal(t, hexutil.Encode(expectedRoot), attestation.Data.BeaconBlockRoot, "Beacon block root mismatch") + + // Source checkpoint checks + require.NotNil(t, attestation.Data.Source, "Source checkpoint should not be nil") + assert.Equal(t, "1", attestation.Data.Source.Epoch, "Source epoch mismatch") + assert.Equal(t, hexutil.Encode(expectedRoot), attestation.Data.Source.Root, "Source root mismatch") + + // Target checkpoint checks + require.NotNil(t, attestation.Data.Target, "Target checkpoint should not be nil") + assert.Equal(t, "1", attestation.Data.Target.Epoch, "Target epoch mismatch") + assert.Equal(t, hexutil.Encode(expectedRoot), attestation.Data.Target.Root, "Target root mismatch") + } + + pool := attestations.NewPool() + require.NoError(t, pool.SaveUnaggregatedAttestations([]ethpbalpha.Att{unaggSlot3_Root1_1, unaggSlot3_Root1_2, unaggSlot3_Root2, unaggSlot4}), "Failed to save unaggregated attestations") + unagg := pool.UnaggregatedAttestations() + require.Equal(t, 4, len(unagg), "Expected 4 unaggregated attestations") + require.NoError(t, pool.SaveAggregatedAttestations([]ethpbalpha.Att{aggSlot1_Root1_1, aggSlot1_Root1_2, aggSlot1_Root2, aggSlot2, preElectraAtt}), "Failed to save aggregated attestations") + agg := pool.AggregatedAttestations() + require.Equal(t, 5, len(agg), "Expected 5 aggregated attestations, 4 electra and 1 pre electra") + bs, err := util.NewBeaconState() + require.NoError(t, err) + + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 0 + params.OverrideBeaconConfig(config) + + chainService := &mockChain.ChainService{State: bs} + s := &Server{ + ChainInfoFetcher: chainService, + TimeFetcher: chainService, + AttestationsPool: pool, + } + t.Run("non-matching attestation request", func(t *testing.T) { + reqRoot, err := aggSlot2.Data.HashTreeRoot() + require.NoError(t, err, "Failed to generate attestation data hash tree root") + attDataRoot := hexutil.Encode(reqRoot[:]) + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + "&committee_index=0" request := httptest.NewRequest(http.MethodGet, url, nil) writer := httptest.NewRecorder() - s.GetAggregateAttestation(writer, request) + s.GetAggregateAttestationV2(writer, request) + assert.Equal(t, http.StatusNotFound, writer.Code, "Expected HTTP status NotFound for non-matching request") + }) + t.Run("1 matching aggregated attestation", func(t *testing.T) { + reqRoot, err := aggSlot2.Data.HashTreeRoot() + require.NoError(t, err, "Failed to generate attestation data hash tree root") + attDataRoot := hexutil.Encode(reqRoot[:]) + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=2" + "&committee_index=0" + request := httptest.NewRequest(http.MethodGet, url, nil) + writer := httptest.NewRecorder() + + s.GetAggregateAttestationV2(writer, request) require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") var resp structs.AggregateAttestationResponse require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp), "Failed to unmarshal response") require.NotNil(t, resp.Data, "Response data should not be nil") - var attestation structs.Attestation + var attestation structs.AttestationElectra require.NoError(t, json.Unmarshal(resp.Data, &attestation), "Failed to unmarshal attestation data") - compareResult(t, attestation, "4", hexutil.Encode(unaggSlot4.AggregationBits), root1, sig.Marshal()) + + compareResult(t, attestation, "2", hexutil.Encode(aggSlot2.AggregationBits), root1, sig.Marshal(), hexutil.Encode(aggSlot2.CommitteeBits)) + }) + t.Run("1 matching aggregated attestation - SSZ", func(t *testing.T) { + reqRoot, err := aggSlot2.Data.HashTreeRoot() + require.NoError(t, err, "Failed to generate attestation data hash tree root") + attDataRoot := hexutil.Encode(reqRoot[:]) + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=2" + "&committee_index=0" + request := httptest.NewRequest(http.MethodGet, url, nil) + request.Header.Add("Accept", "application/octet-stream") + writer := httptest.NewRecorder() + + s.GetAggregateAttestationV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") + + var resp ethpbalpha.AttestationElectra + require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) + + compareResult(t, *structs.AttElectraFromConsensus(&resp), "2", hexutil.Encode(aggSlot2.AggregationBits), root1, sig.Marshal(), hexutil.Encode(aggSlot2.CommitteeBits)) + }) + t.Run("multiple matching aggregated attestations - return the one with most bits", func(t *testing.T) { + reqRoot, err := aggSlot1_Root1_1.Data.HashTreeRoot() + require.NoError(t, err, "Failed to generate attestation data hash tree root") + attDataRoot := hexutil.Encode(reqRoot[:]) + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + "&committee_index=0" + request := httptest.NewRequest(http.MethodGet, url, nil) + writer := httptest.NewRecorder() + + s.GetAggregateAttestationV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") + + var resp structs.AggregateAttestationResponse + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp), "Failed to unmarshal response") + require.NotNil(t, resp.Data, "Response data should not be nil") + + var attestation structs.AttestationElectra + require.NoError(t, json.Unmarshal(resp.Data, &attestation), "Failed to unmarshal attestation data") + + compareResult(t, attestation, "1", hexutil.Encode(aggSlot1_Root1_2.AggregationBits), root1, sig.Marshal(), hexutil.Encode(aggSlot1_Root1_1.CommitteeBits)) + }) + t.Run("multiple matching aggregated attestations - return the one with most bits - SSZ", func(t *testing.T) { + reqRoot, err := aggSlot1_Root1_1.Data.HashTreeRoot() + require.NoError(t, err, "Failed to generate attestation data hash tree root") + attDataRoot := hexutil.Encode(reqRoot[:]) + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + "&committee_index=0" + request := httptest.NewRequest(http.MethodGet, url, nil) + request.Header.Add("Accept", "application/octet-stream") + writer := httptest.NewRecorder() + + s.GetAggregateAttestationV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") + + var resp ethpbalpha.AttestationElectra + require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) + + compareResult(t, *structs.AttElectraFromConsensus(&resp), "1", hexutil.Encode(aggSlot1_Root1_2.AggregationBits), root1, sig.Marshal(), hexutil.Encode(aggSlot1_Root1_1.CommitteeBits)) + }) + t.Run("1 matching unaggregated attestation", func(t *testing.T) { + reqRoot, err := unaggSlot4.Data.HashTreeRoot() + require.NoError(t, err, "Failed to generate attestation data hash tree root") + attDataRoot := hexutil.Encode(reqRoot[:]) + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=4" + "&committee_index=0" + request := httptest.NewRequest(http.MethodGet, url, nil) + writer := httptest.NewRecorder() + + s.GetAggregateAttestationV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") + + var resp structs.AggregateAttestationResponse + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp), "Failed to unmarshal response") + require.NotNil(t, resp.Data, "Response data should not be nil") + + var attestation structs.AttestationElectra + require.NoError(t, json.Unmarshal(resp.Data, &attestation), "Failed to unmarshal attestation data") + compareResult(t, attestation, "4", hexutil.Encode(unaggSlot4.AggregationBits), root1, sig.Marshal(), hexutil.Encode(unaggSlot4.CommitteeBits)) + }) + t.Run("1 matching unaggregated attestation - SSZ", func(t *testing.T) { + reqRoot, err := unaggSlot4.Data.HashTreeRoot() + require.NoError(t, err, "Failed to generate attestation data hash tree root") + attDataRoot := hexutil.Encode(reqRoot[:]) + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=4" + "&committee_index=0" + request := httptest.NewRequest(http.MethodGet, url, nil) + request.Header.Add("Accept", "application/octet-stream") + writer := httptest.NewRecorder() + + s.GetAggregateAttestationV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") + + var resp ethpbalpha.AttestationElectra + require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) + + compareResult(t, *structs.AttElectraFromConsensus(&resp), "4", hexutil.Encode(unaggSlot4.AggregationBits), root1, sig.Marshal(), hexutil.Encode(unaggSlot4.CommitteeBits)) }) t.Run("multiple matching unaggregated attestations - their aggregate is returned", func(t *testing.T) { reqRoot, err := unaggSlot3_Root1_1.Data.HashTreeRoot() require.NoError(t, err, "Failed to generate attestation data hash tree root") attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=3" + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=3" + "&committee_index=0" request := httptest.NewRequest(http.MethodGet, url, nil) writer := httptest.NewRecorder() - s.GetAggregateAttestation(writer, request) + s.GetAggregateAttestationV2(writer, request) require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") var resp structs.AggregateAttestationResponse require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp), "Failed to unmarshal response") require.NotNil(t, resp.Data, "Response data should not be nil") - var attestation structs.Attestation + var attestation structs.AttestationElectra require.NoError(t, json.Unmarshal(resp.Data, &attestation), "Failed to unmarshal attestation data") sig1, err := bls.SignatureFromBytes(unaggSlot3_Root1_1.Signature) require.NoError(t, err) sig2, err := bls.SignatureFromBytes(unaggSlot3_Root1_2.Signature) require.NoError(t, err) expectedSig := bls.AggregateSignatures([]common.Signature{sig1, sig2}) - compareResult(t, attestation, "3", hexutil.Encode(bitfield.Bitlist{0b11100}), root1, expectedSig.Marshal()) + compareResult(t, attestation, "3", hexutil.Encode(bitfield.Bitlist{0b11100}), root1, expectedSig.Marshal(), hexutil.Encode(unaggSlot3_Root1_1.CommitteeBits)) }) - }) - t.Run("V2", func(t *testing.T) { - t.Run("pre-electra", func(t *testing.T) { - committeeBits := bitfield.NewBitvector64() - committeeBits.SetBitAt(1, true) + t.Run("multiple matching unaggregated attestations - their aggregate is returned - SSZ", func(t *testing.T) { + reqRoot, err := unaggSlot3_Root1_1.Data.HashTreeRoot() + require.NoError(t, err, "Failed to generate attestation data hash tree root") + attDataRoot := hexutil.Encode(reqRoot[:]) + url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=3" + "&committee_index=0" + request := httptest.NewRequest(http.MethodGet, url, nil) + request.Header.Add("Accept", "application/octet-stream") + writer := httptest.NewRecorder() - aggSlot1_Root1_1 := createAttestation(1, bitfield.Bitlist{0b11100}, root1) - aggSlot1_Root1_2 := createAttestation(1, bitfield.Bitlist{0b10111}, root1) - aggSlot1_Root2 := createAttestation(1, bitfield.Bitlist{0b11100}, root2) - aggSlot2 := createAttestation(2, bitfield.Bitlist{0b11100}, root1) - unaggSlot3_Root1_1 := createAttestation(3, bitfield.Bitlist{0b11000}, root1) - unaggSlot3_Root1_2 := createAttestation(3, bitfield.Bitlist{0b10100}, root1) - unaggSlot3_Root2 := createAttestation(3, bitfield.Bitlist{0b11000}, root2) - unaggSlot4 := createAttestation(4, bitfield.Bitlist{0b11000}, root1) + s.GetAggregateAttestationV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - // Add one post-electra attestation to ensure that it is being ignored. - // We choose slot 2 where we have one pre-electra attestation with less attestation bits. - postElectraAtt := createAttestationElectra(2, bitfield.Bitlist{0b11111}, root1) + var resp ethpbalpha.AttestationElectra + require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) - compareResult := func( - t *testing.T, - attestation structs.Attestation, - expectedSlot string, - expectedAggregationBits string, - expectedRoot []byte, - expectedSig []byte, - ) { - assert.Equal(t, expectedAggregationBits, attestation.AggregationBits, "Unexpected aggregation bits in attestation") - assert.Equal(t, hexutil.Encode(expectedSig), attestation.Signature, "Signature mismatch") - assert.Equal(t, expectedSlot, attestation.Data.Slot, "Slot mismatch in attestation data") - assert.Equal(t, "0", attestation.Data.CommitteeIndex, "Committee index mismatch") - assert.Equal(t, hexutil.Encode(expectedRoot), attestation.Data.BeaconBlockRoot, "Beacon block root mismatch") - - // Source checkpoint checks - require.NotNil(t, attestation.Data.Source, "Source checkpoint should not be nil") - assert.Equal(t, "1", attestation.Data.Source.Epoch, "Source epoch mismatch") - assert.Equal(t, hexutil.Encode(expectedRoot), attestation.Data.Source.Root, "Source root mismatch") - - // Target checkpoint checks - require.NotNil(t, attestation.Data.Target, "Target checkpoint should not be nil") - assert.Equal(t, "1", attestation.Data.Target.Epoch, "Target epoch mismatch") - assert.Equal(t, hexutil.Encode(expectedRoot), attestation.Data.Target.Root, "Target root mismatch") - } - - pool := attestations.NewPool() - require.NoError(t, pool.SaveUnaggregatedAttestations([]ethpbalpha.Att{unaggSlot3_Root1_1, unaggSlot3_Root1_2, unaggSlot3_Root2, unaggSlot4}), "Failed to save unaggregated attestations") - unagg := pool.UnaggregatedAttestations() - require.Equal(t, 4, len(unagg), "Expected 4 unaggregated attestations") - require.NoError(t, pool.SaveAggregatedAttestations([]ethpbalpha.Att{aggSlot1_Root1_1, aggSlot1_Root1_2, aggSlot1_Root2, aggSlot2, postElectraAtt}), "Failed to save aggregated attestations") - agg := pool.AggregatedAttestations() - require.Equal(t, 5, len(agg), "Expected 5 aggregated attestations, 4 pre electra and 1 post electra") - s := &Server{ - AttestationsPool: pool, - } - - t.Run("non-matching attestation request", func(t *testing.T) { - reqRoot, err := aggSlot2.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - assert.Equal(t, http.StatusNotFound, writer.Code, "Expected HTTP status NotFound for non-matching request") - }) - t.Run("1 matching aggregated attestation", func(t *testing.T) { - reqRoot, err := aggSlot2.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=2" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - - var resp structs.AggregateAttestationResponse - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp), "Failed to unmarshal response") - require.NotNil(t, resp.Data, "Response data should not be nil") - - var attestation structs.Attestation - require.NoError(t, json.Unmarshal(resp.Data, &attestation), "Failed to unmarshal attestation data") - - compareResult(t, attestation, "2", hexutil.Encode(aggSlot2.AggregationBits), root1, sig.Marshal()) - }) - t.Run("1 matching aggregated attestation - SSZ", func(t *testing.T) { - reqRoot, err := aggSlot2.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=2" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - request.Header.Add("Accept", "application/octet-stream") - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - - var resp ethpbalpha.Attestation - require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) - - compareResult(t, *structs.AttFromConsensus(&resp), "2", hexutil.Encode(aggSlot2.AggregationBits), root1, sig.Marshal()) - }) - t.Run("multiple matching aggregated attestations - return the one with most bits", func(t *testing.T) { - reqRoot, err := aggSlot1_Root1_1.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - - var resp structs.AggregateAttestationResponse - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp), "Failed to unmarshal response") - require.NotNil(t, resp.Data, "Response data should not be nil") - - var attestation structs.Attestation - require.NoError(t, json.Unmarshal(resp.Data, &attestation), "Failed to unmarshal attestation data") - - compareResult(t, attestation, "1", hexutil.Encode(aggSlot1_Root1_2.AggregationBits), root1, sig.Marshal()) - }) - t.Run("multiple matching aggregated attestations - return the one with most bits - SSZ", func(t *testing.T) { - reqRoot, err := aggSlot1_Root1_1.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - request.Header.Add("Accept", "application/octet-stream") - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - - var resp ethpbalpha.Attestation - require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) - - compareResult(t, *structs.AttFromConsensus(&resp), "1", hexutil.Encode(aggSlot1_Root1_2.AggregationBits), root1, sig.Marshal()) - }) - }) - t.Run("post-electra", func(t *testing.T) { - aggSlot1_Root1_1 := createAttestationElectra(1, bitfield.Bitlist{0b11100}, root1) - aggSlot1_Root1_2 := createAttestationElectra(1, bitfield.Bitlist{0b10111}, root1) - aggSlot1_Root2 := createAttestationElectra(1, bitfield.Bitlist{0b11100}, root2) - aggSlot2 := createAttestationElectra(2, bitfield.Bitlist{0b11100}, root1) - unaggSlot3_Root1_1 := createAttestationElectra(3, bitfield.Bitlist{0b11000}, root1) - unaggSlot3_Root1_2 := createAttestationElectra(3, bitfield.Bitlist{0b10100}, root1) - unaggSlot3_Root2 := createAttestationElectra(3, bitfield.Bitlist{0b11000}, root2) - unaggSlot4 := createAttestationElectra(4, bitfield.Bitlist{0b11000}, root1) - - // Add one pre-electra attestation to ensure that it is being ignored. - // We choose slot 2 where we have one post-electra attestation with less attestation bits. - preElectraAtt := createAttestation(2, bitfield.Bitlist{0b11111}, root1) - - compareResult := func( - t *testing.T, - attestation structs.AttestationElectra, - expectedSlot string, - expectedAggregationBits string, - expectedRoot []byte, - expectedSig []byte, - expectedCommitteeBits string, - ) { - assert.Equal(t, expectedAggregationBits, attestation.AggregationBits, "Unexpected aggregation bits in attestation") - assert.Equal(t, expectedCommitteeBits, attestation.CommitteeBits) - assert.Equal(t, hexutil.Encode(expectedSig), attestation.Signature, "Signature mismatch") - assert.Equal(t, expectedSlot, attestation.Data.Slot, "Slot mismatch in attestation data") - assert.Equal(t, "0", attestation.Data.CommitteeIndex, "Committee index mismatch") - assert.Equal(t, hexutil.Encode(expectedRoot), attestation.Data.BeaconBlockRoot, "Beacon block root mismatch") - - // Source checkpoint checks - require.NotNil(t, attestation.Data.Source, "Source checkpoint should not be nil") - assert.Equal(t, "1", attestation.Data.Source.Epoch, "Source epoch mismatch") - assert.Equal(t, hexutil.Encode(expectedRoot), attestation.Data.Source.Root, "Source root mismatch") - - // Target checkpoint checks - require.NotNil(t, attestation.Data.Target, "Target checkpoint should not be nil") - assert.Equal(t, "1", attestation.Data.Target.Epoch, "Target epoch mismatch") - assert.Equal(t, hexutil.Encode(expectedRoot), attestation.Data.Target.Root, "Target root mismatch") - } - - pool := attestations.NewPool() - require.NoError(t, pool.SaveUnaggregatedAttestations([]ethpbalpha.Att{unaggSlot3_Root1_1, unaggSlot3_Root1_2, unaggSlot3_Root2, unaggSlot4}), "Failed to save unaggregated attestations") - unagg := pool.UnaggregatedAttestations() - require.Equal(t, 4, len(unagg), "Expected 4 unaggregated attestations") - require.NoError(t, pool.SaveAggregatedAttestations([]ethpbalpha.Att{aggSlot1_Root1_1, aggSlot1_Root1_2, aggSlot1_Root2, aggSlot2, preElectraAtt}), "Failed to save aggregated attestations") - agg := pool.AggregatedAttestations() - require.Equal(t, 5, len(agg), "Expected 5 aggregated attestations, 4 electra and 1 pre electra") - bs, err := util.NewBeaconState() + sig1, err := bls.SignatureFromBytes(unaggSlot3_Root1_1.Signature) require.NoError(t, err) + sig2, err := bls.SignatureFromBytes(unaggSlot3_Root1_2.Signature) + require.NoError(t, err) + expectedSig := bls.AggregateSignatures([]common.Signature{sig1, sig2}) + compareResult(t, *structs.AttElectraFromConsensus(&resp), "3", hexutil.Encode(bitfield.Bitlist{0b11100}), root1, expectedSig.Marshal(), hexutil.Encode(unaggSlot3_Root1_1.CommitteeBits)) + }) + t.Run("pre-electra attestation is ignored", func(t *testing.T) { - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.ElectraForkEpoch = 0 - params.OverrideBeaconConfig(config) - - chainService := &mockChain.ChainService{State: bs} - s := &Server{ - ChainInfoFetcher: chainService, - TimeFetcher: chainService, - AttestationsPool: pool, - } - t.Run("non-matching attestation request", func(t *testing.T) { - reqRoot, err := aggSlot2.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - assert.Equal(t, http.StatusNotFound, writer.Code, "Expected HTTP status NotFound for non-matching request") - }) - t.Run("1 matching aggregated attestation", func(t *testing.T) { - reqRoot, err := aggSlot2.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=2" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - - var resp structs.AggregateAttestationResponse - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp), "Failed to unmarshal response") - require.NotNil(t, resp.Data, "Response data should not be nil") - - var attestation structs.AttestationElectra - require.NoError(t, json.Unmarshal(resp.Data, &attestation), "Failed to unmarshal attestation data") - - compareResult(t, attestation, "2", hexutil.Encode(aggSlot2.AggregationBits), root1, sig.Marshal(), hexutil.Encode(aggSlot2.CommitteeBits)) - }) - t.Run("1 matching aggregated attestation - SSZ", func(t *testing.T) { - reqRoot, err := aggSlot2.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=2" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - request.Header.Add("Accept", "application/octet-stream") - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - - var resp ethpbalpha.AttestationElectra - require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) - - compareResult(t, *structs.AttElectraFromConsensus(&resp), "2", hexutil.Encode(aggSlot2.AggregationBits), root1, sig.Marshal(), hexutil.Encode(aggSlot2.CommitteeBits)) - }) - t.Run("multiple matching aggregated attestations - return the one with most bits", func(t *testing.T) { - reqRoot, err := aggSlot1_Root1_1.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - - var resp structs.AggregateAttestationResponse - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp), "Failed to unmarshal response") - require.NotNil(t, resp.Data, "Response data should not be nil") - - var attestation structs.AttestationElectra - require.NoError(t, json.Unmarshal(resp.Data, &attestation), "Failed to unmarshal attestation data") - - compareResult(t, attestation, "1", hexutil.Encode(aggSlot1_Root1_2.AggregationBits), root1, sig.Marshal(), hexutil.Encode(aggSlot1_Root1_1.CommitteeBits)) - }) - t.Run("multiple matching aggregated attestations - return the one with most bits - SSZ", func(t *testing.T) { - reqRoot, err := aggSlot1_Root1_1.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=1" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - request.Header.Add("Accept", "application/octet-stream") - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - - var resp ethpbalpha.AttestationElectra - require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) - - compareResult(t, *structs.AttElectraFromConsensus(&resp), "1", hexutil.Encode(aggSlot1_Root1_2.AggregationBits), root1, sig.Marshal(), hexutil.Encode(aggSlot1_Root1_1.CommitteeBits)) - }) - t.Run("1 matching unaggregated attestation", func(t *testing.T) { - reqRoot, err := unaggSlot4.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=4" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - - var resp structs.AggregateAttestationResponse - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp), "Failed to unmarshal response") - require.NotNil(t, resp.Data, "Response data should not be nil") - - var attestation structs.AttestationElectra - require.NoError(t, json.Unmarshal(resp.Data, &attestation), "Failed to unmarshal attestation data") - compareResult(t, attestation, "4", hexutil.Encode(unaggSlot4.AggregationBits), root1, sig.Marshal(), hexutil.Encode(unaggSlot4.CommitteeBits)) - }) - t.Run("1 matching unaggregated attestation - SSZ", func(t *testing.T) { - reqRoot, err := unaggSlot4.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=4" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - request.Header.Add("Accept", "application/octet-stream") - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - - var resp ethpbalpha.AttestationElectra - require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) - - compareResult(t, *structs.AttElectraFromConsensus(&resp), "4", hexutil.Encode(unaggSlot4.AggregationBits), root1, sig.Marshal(), hexutil.Encode(unaggSlot4.CommitteeBits)) - }) - t.Run("multiple matching unaggregated attestations - their aggregate is returned", func(t *testing.T) { - reqRoot, err := unaggSlot3_Root1_1.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=3" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - - var resp structs.AggregateAttestationResponse - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp), "Failed to unmarshal response") - require.NotNil(t, resp.Data, "Response data should not be nil") - - var attestation structs.AttestationElectra - require.NoError(t, json.Unmarshal(resp.Data, &attestation), "Failed to unmarshal attestation data") - sig1, err := bls.SignatureFromBytes(unaggSlot3_Root1_1.Signature) - require.NoError(t, err) - sig2, err := bls.SignatureFromBytes(unaggSlot3_Root1_2.Signature) - require.NoError(t, err) - expectedSig := bls.AggregateSignatures([]common.Signature{sig1, sig2}) - compareResult(t, attestation, "3", hexutil.Encode(bitfield.Bitlist{0b11100}), root1, expectedSig.Marshal(), hexutil.Encode(unaggSlot3_Root1_1.CommitteeBits)) - }) - t.Run("multiple matching unaggregated attestations - their aggregate is returned - SSZ", func(t *testing.T) { - reqRoot, err := unaggSlot3_Root1_1.Data.HashTreeRoot() - require.NoError(t, err, "Failed to generate attestation data hash tree root") - attDataRoot := hexutil.Encode(reqRoot[:]) - url := "http://example.com?attestation_data_root=" + attDataRoot + "&slot=3" + "&committee_index=0" - request := httptest.NewRequest(http.MethodGet, url, nil) - request.Header.Add("Accept", "application/octet-stream") - writer := httptest.NewRecorder() - - s.GetAggregateAttestationV2(writer, request) - require.Equal(t, http.StatusOK, writer.Code, "Expected HTTP status OK") - - var resp ethpbalpha.AttestationElectra - require.NoError(t, resp.UnmarshalSSZ(writer.Body.Bytes())) - - sig1, err := bls.SignatureFromBytes(unaggSlot3_Root1_1.Signature) - require.NoError(t, err) - sig2, err := bls.SignatureFromBytes(unaggSlot3_Root1_2.Signature) - require.NoError(t, err) - expectedSig := bls.AggregateSignatures([]common.Signature{sig1, sig2}) - compareResult(t, *structs.AttElectraFromConsensus(&resp), "3", hexutil.Encode(bitfield.Bitlist{0b11100}), root1, expectedSig.Marshal(), hexutil.Encode(unaggSlot3_Root1_1.CommitteeBits)) - }) - t.Run("pre-electra attestation is ignored", func(t *testing.T) { - - }) }) }) } @@ -705,306 +562,212 @@ func TestSubmitContributionAndProofs(t *testing.T) { }) } -func TestSubmitAggregateAndProofs(t *testing.T) { +func TestSubmitAggregateAndProofsV2(t *testing.T) { slot := primitives.Slot(0) mock := &mockChain.ChainService{Slot: &slot, Genesis: time.Now().Add(-1 * time.Duration(params.BeaconConfig().SecondsPerSlot) * time.Second)} s := &Server{ CoreService: &core.Service{GenesisTimeFetcher: mock}, TimeFetcher: mock, } - t.Run("V1", func(t *testing.T) { - t.Run("single", func(t *testing.T) { - broadcaster := &p2pmock.MockBroadcaster{} - s.CoreService.Broadcaster = broadcaster - var body bytes.Buffer - _, err := body.WriteString(singleAggregate) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + t.Run("single", func(t *testing.T) { + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 0 + params.OverrideBeaconConfig(config) - s.SubmitAggregateAndProofs(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - assert.Equal(t, 1, len(broadcaster.BroadcastMessages)) - }) - t.Run("multiple", func(t *testing.T) { - broadcaster := &p2pmock.MockBroadcaster{} - s.CoreService.Broadcaster = broadcaster - s.CoreService.SyncCommitteePool = synccommittee.NewStore() + broadcaster := &p2pmock.MockBroadcaster{} + s.CoreService.Broadcaster = broadcaster - var body bytes.Buffer - _, err := body.WriteString(multipleAggregates) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + var body bytes.Buffer + _, err := body.WriteString(singleAggregateElectra) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s.SubmitAggregateAndProofs(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - assert.Equal(t, 2, len(broadcaster.BroadcastMessages)) - }) - t.Run("Phase 0 post electra", func(t *testing.T) { - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.ElectraForkEpoch = 0 - params.OverrideBeaconConfig(config) - - var body bytes.Buffer - _, err := body.WriteString(singleAggregate) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAggregateAndProofs(writer, request) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.ErrorContains(t, "old aggregate and proof", errors.New(e.Message)) - }) - t.Run("no body", func(t *testing.T) { - request := httptest.NewRequest(http.MethodPost, "http://example.com", nil) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAggregateAndProofs(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) - }) - t.Run("empty", func(t *testing.T) { - var body bytes.Buffer - _, err := body.WriteString("[]") - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAggregateAndProofs(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) - }) - t.Run("invalid", func(t *testing.T) { - var body bytes.Buffer - _, err := body.WriteString(invalidAggregate) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAggregateAndProofs(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - }) + s.SubmitAggregateAndProofsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + assert.Equal(t, 1, len(broadcaster.BroadcastMessages)) }) - t.Run("V2", func(t *testing.T) { - t.Run("single", func(t *testing.T) { - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.ElectraForkEpoch = 0 - params.OverrideBeaconConfig(config) + t.Run("single-pre-electra", func(t *testing.T) { + broadcaster := &p2pmock.MockBroadcaster{} + s.CoreService.Broadcaster = broadcaster - broadcaster := &p2pmock.MockBroadcaster{} - s.CoreService.Broadcaster = broadcaster + var body bytes.Buffer + _, err := body.WriteString(singleAggregate) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Phase0)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - var body bytes.Buffer - _, err := body.WriteString(singleAggregateElectra) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + s.SubmitAggregateAndProofsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + assert.Equal(t, 1, len(broadcaster.BroadcastMessages)) + }) + t.Run("multiple", func(t *testing.T) { + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 0 + params.OverrideBeaconConfig(config) - s.SubmitAggregateAndProofsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - assert.Equal(t, 1, len(broadcaster.BroadcastMessages)) - }) - t.Run("single-pre-electra", func(t *testing.T) { - broadcaster := &p2pmock.MockBroadcaster{} - s.CoreService.Broadcaster = broadcaster + broadcaster := &p2pmock.MockBroadcaster{} + s.CoreService.Broadcaster = broadcaster + s.CoreService.SyncCommitteePool = synccommittee.NewStore() - var body bytes.Buffer - _, err := body.WriteString(singleAggregate) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + var body bytes.Buffer + _, err := body.WriteString(multipleAggregatesElectra) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s.SubmitAggregateAndProofsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - assert.Equal(t, 1, len(broadcaster.BroadcastMessages)) - }) - t.Run("multiple", func(t *testing.T) { - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.ElectraForkEpoch = 0 - params.OverrideBeaconConfig(config) + s.SubmitAggregateAndProofsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + assert.Equal(t, 2, len(broadcaster.BroadcastMessages)) + }) + t.Run("multiple-pre-electra", func(t *testing.T) { + broadcaster := &p2pmock.MockBroadcaster{} + s.CoreService.Broadcaster = broadcaster + s.CoreService.SyncCommitteePool = synccommittee.NewStore() - broadcaster := &p2pmock.MockBroadcaster{} - s.CoreService.Broadcaster = broadcaster - s.CoreService.SyncCommitteePool = synccommittee.NewStore() + var body bytes.Buffer + _, err := body.WriteString(multipleAggregates) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Phase0)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - var body bytes.Buffer - _, err := body.WriteString(multipleAggregatesElectra) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + s.SubmitAggregateAndProofsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + assert.Equal(t, 2, len(broadcaster.BroadcastMessages)) + }) + t.Run("Phase 0 post electra", func(t *testing.T) { + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 0 + params.OverrideBeaconConfig(config) - s.SubmitAggregateAndProofsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - assert.Equal(t, 2, len(broadcaster.BroadcastMessages)) - }) - t.Run("multiple-pre-electra", func(t *testing.T) { - broadcaster := &p2pmock.MockBroadcaster{} - s.CoreService.Broadcaster = broadcaster - s.CoreService.SyncCommitteePool = synccommittee.NewStore() + var body bytes.Buffer + _, err := body.WriteString(singleAggregate) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Phase0)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - var body bytes.Buffer - _, err := body.WriteString(multipleAggregates) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + s.SubmitAggregateAndProofsV2(writer, request) + e := &httputil.DefaultJsonError{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusBadRequest, e.Code) + assert.ErrorContains(t, "old aggregate and proof", errors.New(e.Message)) + }) + t.Run("electra agg pre electra", func(t *testing.T) { + var body bytes.Buffer + _, err := body.WriteString(singleAggregateElectra) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s.SubmitAggregateAndProofsV2(writer, request) - assert.Equal(t, http.StatusOK, writer.Code) - assert.Equal(t, 2, len(broadcaster.BroadcastMessages)) - }) - t.Run("Phase 0 post electra", func(t *testing.T) { - params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.ElectraForkEpoch = 0 - params.OverrideBeaconConfig(config) + s.SubmitAggregateAndProofsV2(writer, request) + e := &httputil.DefaultJsonError{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusBadRequest, e.Code) + assert.ErrorContains(t, "electra aggregate and proof not supported yet", errors.New(e.Message)) + }) + t.Run("no body", func(t *testing.T) { + request := httptest.NewRequest(http.MethodPost, "http://example.com", nil) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - var body bytes.Buffer - _, err := body.WriteString(singleAggregate) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Phase0)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + s.SubmitAggregateAndProofsV2(writer, request) + assert.Equal(t, http.StatusBadRequest, writer.Code) + e := &httputil.DefaultJsonError{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusBadRequest, e.Code) + assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) + }) + t.Run("no body-pre-electra", func(t *testing.T) { + request := httptest.NewRequest(http.MethodPost, "http://example.com", nil) + request.Header.Set(api.VersionHeader, version.String(version.Bellatrix)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s.SubmitAggregateAndProofsV2(writer, request) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.ErrorContains(t, "old aggregate and proof", errors.New(e.Message)) - }) - t.Run("electra agg pre electra", func(t *testing.T) { - var body bytes.Buffer - _, err := body.WriteString(singleAggregateElectra) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + s.SubmitAggregateAndProofsV2(writer, request) + assert.Equal(t, http.StatusBadRequest, writer.Code) + e := &httputil.DefaultJsonError{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusBadRequest, e.Code) + assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) + }) + t.Run("empty", func(t *testing.T) { + var body bytes.Buffer + _, err := body.WriteString("[]") + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s.SubmitAggregateAndProofsV2(writer, request) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.ErrorContains(t, "electra aggregate and proof not supported yet", errors.New(e.Message)) - }) - t.Run("no body", func(t *testing.T) { - request := httptest.NewRequest(http.MethodPost, "http://example.com", nil) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + s.SubmitAggregateAndProofsV2(writer, request) + assert.Equal(t, http.StatusBadRequest, writer.Code) + e := &httputil.DefaultJsonError{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusBadRequest, e.Code) + assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) + }) + t.Run("empty-pre-electra", func(t *testing.T) { + var body bytes.Buffer + _, err := body.WriteString("[]") + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Altair)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s.SubmitAggregateAndProofsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) - }) - t.Run("no body-pre-electra", func(t *testing.T) { - request := httptest.NewRequest(http.MethodPost, "http://example.com", nil) - request.Header.Set(api.VersionHeader, version.String(version.Bellatrix)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + s.SubmitAggregateAndProofsV2(writer, request) + assert.Equal(t, http.StatusBadRequest, writer.Code) + e := &httputil.DefaultJsonError{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusBadRequest, e.Code) + assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) + }) + t.Run("invalid", func(t *testing.T) { + var body bytes.Buffer + _, err := body.WriteString(invalidAggregateElectra) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Electra)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s.SubmitAggregateAndProofsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) - }) - t.Run("empty", func(t *testing.T) { - var body bytes.Buffer - _, err := body.WriteString("[]") - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} + s.SubmitAggregateAndProofsV2(writer, request) + assert.Equal(t, http.StatusBadRequest, writer.Code) + e := &httputil.DefaultJsonError{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusBadRequest, e.Code) + }) + t.Run("invalid-pre-electra", func(t *testing.T) { + var body bytes.Buffer + _, err := body.WriteString(invalidAggregate) + require.NoError(t, err) + request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) + request.Header.Set(api.VersionHeader, version.String(version.Deneb)) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} - s.SubmitAggregateAndProofsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) - }) - t.Run("empty-pre-electra", func(t *testing.T) { - var body bytes.Buffer - _, err := body.WriteString("[]") - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Altair)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAggregateAndProofsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - assert.Equal(t, true, strings.Contains(e.Message, "No data submitted")) - }) - t.Run("invalid", func(t *testing.T) { - var body bytes.Buffer - _, err := body.WriteString(invalidAggregateElectra) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Electra)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAggregateAndProofsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - }) - t.Run("invalid-pre-electra", func(t *testing.T) { - var body bytes.Buffer - _, err := body.WriteString(invalidAggregate) - require.NoError(t, err) - request := httptest.NewRequest(http.MethodPost, "http://example.com", &body) - request.Header.Set(api.VersionHeader, version.String(version.Deneb)) - writer := httptest.NewRecorder() - writer.Body = &bytes.Buffer{} - - s.SubmitAggregateAndProofsV2(writer, request) - assert.Equal(t, http.StatusBadRequest, writer.Code) - e := &httputil.DefaultJsonError{} - require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) - assert.Equal(t, http.StatusBadRequest, e.Code) - }) + s.SubmitAggregateAndProofsV2(writer, request) + assert.Equal(t, http.StatusBadRequest, writer.Code) + e := &httputil.DefaultJsonError{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), e)) + assert.Equal(t, http.StatusBadRequest, e.Code) }) } diff --git a/changelog/radek_v7-remove-apis.md b/changelog/radek_v7-remove-apis.md new file mode 100644 index 0000000000..18f215d070 --- /dev/null +++ b/changelog/radek_v7-remove-apis.md @@ -0,0 +1,3 @@ +### Removed + +- Remove Beacon API endpoints that were deprecated in Electra: `GET /eth/v1/beacon/deposit_snapshot`, `GET /eth/v1/beacon/blocks/{block_id}/attestations`, `GET /eth/v1/beacon/pool/attestations`, `POST /eth/v1/beacon/pool/attestations`, `GET /eth/v1/beacon/pool/attester_slashings`, `POST /eth/v1/beacon/pool/attester_slashings`, `GET /eth/v1/validator/aggregate_attestation`, `POST /eth/v1/validator/aggregate_and_proofs`, `POST /eth/v1/beacon/blocks`, `POST /eth/v1/beacon/blinded_blocks`, `GET /eth/v1/builder/states/{state_id}/expected_withdrawals`. \ No newline at end of file diff --git a/testing/endtoend/evaluators/beaconapi/requests.go b/testing/endtoend/evaluators/beaconapi/requests.go index adab0bba94..bff0f706a6 100644 --- a/testing/endtoend/evaluators/beaconapi/requests.go +++ b/testing/endtoend/evaluators/beaconapi/requests.go @@ -136,7 +136,7 @@ var getRequests = map[string]endpoint{ v2PathTemplate, withSanityCheckOnly()), "/beacon/pool/attester_slashings": newMetadata[structs.GetAttesterSlashingsResponse]( - v1PathTemplate, + v2PathTemplate, withSanityCheckOnly()), "/beacon/pool/proposer_slashings": newMetadata[structs.GetProposerSlashingsResponse]( v1PathTemplate, @@ -147,12 +147,6 @@ var getRequests = map[string]endpoint{ "/beacon/pool/bls_to_execution_changes": newMetadata[structs.BLSToExecutionChangesPoolResponse]( v1PathTemplate, withSanityCheckOnly()), - "/builder/states/{param1}/expected_withdrawals": newMetadata[structs.ExpectedWithdrawalsResponse]( - v1PathTemplate, - withStart(params.CapellaE2EForkEpoch), - withParams(func(_ primitives.Epoch) []string { - return []string{"head"} - })), "/config/fork_schedule": newMetadata[structs.GetForkScheduleResponse]( v1PathTemplate, withCustomEval(func(p interface{}, lh interface{}) error { @@ -208,7 +202,7 @@ var getRequests = map[string]endpoint{ withCustomEval(func(p interface{}, _ interface{}) error { pResp, ok := p.(*structs.GetVersionResponse) if !ok { - return fmt.Errorf(msgWrongJSON, &structs.ListAttestationsResponse{}, p) + return fmt.Errorf(msgWrongJSON, &structs.GetVersionResponse{}, p) } if pResp.Data == nil { return errEmptyPrysmData diff --git a/validator/client/beacon-api/propose_attestation.go b/validator/client/beacon-api/propose_attestation.go index 5069ce4812..c251f41a59 100644 --- a/validator/client/beacon-api/propose_attestation.go +++ b/validator/client/beacon-api/propose_attestation.go @@ -4,10 +4,8 @@ import ( "bytes" "context" "encoding/json" - "net/http" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/network/httputil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/time/slots" @@ -31,25 +29,8 @@ func (c *beaconApiValidatorClient) proposeAttestation(ctx context.Context, attes bytes.NewBuffer(marshalledAttestation), nil, ) - errJson := &httputil.DefaultJsonError{} if err != nil { - // TODO: remove this when v2 becomes default - if !errors.As(err, &errJson) { - return nil, err - } - if errJson.Code != http.StatusNotFound { - return nil, errJson - } - log.Debug("Endpoint /eth/v2/beacon/pool/attestations is not supported, falling back to older endpoints for submit attestation.") - if err = c.jsonRestHandler.Post( - ctx, - "/eth/v1/beacon/pool/attestations", - nil, - bytes.NewBuffer(marshalledAttestation), - nil, - ); err != nil { - return nil, err - } + return nil, err } attestationDataRoot, err := attestation.Data.HashTreeRoot() diff --git a/validator/client/beacon-api/propose_attestation_test.go b/validator/client/beacon-api/propose_attestation_test.go index 3dc9742b8c..3973d2bffe 100644 --- a/validator/client/beacon-api/propose_attestation_test.go +++ b/validator/client/beacon-api/propose_attestation_test.go @@ -4,13 +4,11 @@ import ( "bytes" "encoding/json" "errors" - "net/http" "testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/config/params" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/network/httputil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/testing/assert" @@ -150,72 +148,6 @@ func TestProposeAttestation(t *testing.T) { } } -func TestProposeAttestationFallBack(t *testing.T) { - attestation := ðpb.Attestation{ - AggregationBits: testhelpers.FillByteSlice(4, 74), - Data: ðpb.AttestationData{ - Slot: 75, - CommitteeIndex: 76, - BeaconBlockRoot: testhelpers.FillByteSlice(32, 38), - Source: ðpb.Checkpoint{ - Epoch: 78, - Root: testhelpers.FillByteSlice(32, 79), - }, - Target: ðpb.Checkpoint{ - Epoch: 80, - Root: testhelpers.FillByteSlice(32, 81), - }, - }, - Signature: testhelpers.FillByteSlice(96, 82), - } - - ctrl := gomock.NewController(t) - jsonRestHandler := mock.NewMockJsonRestHandler(ctrl) - - var marshalledAttestations []byte - if helpers.ValidateNilAttestation(attestation) == nil { - b, err := json.Marshal(jsonifyAttestations([]*ethpb.Attestation{attestation})) - require.NoError(t, err) - marshalledAttestations = b - } - - ctx := t.Context() - headers := map[string]string{"Eth-Consensus-Version": version.String(attestation.Version())} - jsonRestHandler.EXPECT().Post( - gomock.Any(), - "/eth/v2/beacon/pool/attestations", - headers, - bytes.NewBuffer(marshalledAttestations), - nil, - ).Return( - &httputil.DefaultJsonError{ - Code: http.StatusNotFound, - }, - ).Times(1) - - jsonRestHandler.EXPECT().Post( - gomock.Any(), - "/eth/v1/beacon/pool/attestations", - nil, - bytes.NewBuffer(marshalledAttestations), - nil, - ).Return( - nil, - ).Times(1) - - validatorClient := &beaconApiValidatorClient{jsonRestHandler: jsonRestHandler} - proposeResponse, err := validatorClient.proposeAttestation(ctx, attestation) - - require.NoError(t, err) - require.NotNil(t, proposeResponse) - - expectedAttestationDataRoot, err := attestation.Data.HashTreeRoot() - require.NoError(t, err) - - // Make sure that the attestation data root is set - assert.DeepEqual(t, expectedAttestationDataRoot[:], proposeResponse.AttestationDataRoot) -} - func TestProposeAttestationElectra(t *testing.T) { params.SetupTestConfigCleanup(t) params.BeaconConfig().ElectraForkEpoch = 0 diff --git a/validator/client/beacon-api/submit_aggregate_selection_proof.go b/validator/client/beacon-api/submit_aggregate_selection_proof.go index 3aa959d256..ed0f89cdad 100644 --- a/validator/client/beacon-api/submit_aggregate_selection_proof.go +++ b/validator/client/beacon-api/submit_aggregate_selection_proof.go @@ -3,7 +3,6 @@ package beacon_api import ( "context" "encoding/json" - "net/http" "net/url" "strconv" @@ -11,7 +10,6 @@ import ( "github.com/OffchainLabs/prysm/v6/api/server/structs" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/network/httputil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" @@ -132,23 +130,8 @@ func (c *beaconApiValidatorClient) aggregateAttestation( var aggregateAttestationResponse structs.AggregateAttestationResponse err := c.jsonRestHandler.Get(ctx, endpoint, &aggregateAttestationResponse) - errJson := &httputil.DefaultJsonError{} if err != nil { - // TODO: remove this when v2 becomes default - if !errors.As(err, &errJson) { - return nil, err - } - if errJson.Code != http.StatusNotFound { - return nil, errJson - } - log.Debug("Endpoint /eth/v2/validator/aggregate_attestation is not supported, falling back to older endpoints for get aggregated attestation.") - params = url.Values{} - params.Add("slot", strconv.FormatUint(uint64(slot), 10)) - params.Add("attestation_data_root", hexutil.Encode(attestationDataRoot)) - oldEndpoint := apiutil.BuildURL("/eth/v1/validator/aggregate_attestation", params) - if err = c.jsonRestHandler.Get(ctx, oldEndpoint, &aggregateAttestationResponse); err != nil { - return nil, err - } + return nil, err } return &aggregateAttestationResponse, nil diff --git a/validator/client/beacon-api/submit_aggregate_selection_proof_test.go b/validator/client/beacon-api/submit_aggregate_selection_proof_test.go index fbd6cd2ee9..8afcd27a6a 100644 --- a/validator/client/beacon-api/submit_aggregate_selection_proof_test.go +++ b/validator/client/beacon-api/submit_aggregate_selection_proof_test.go @@ -4,12 +4,10 @@ import ( "encoding/json" "errors" "fmt" - "net/http" "testing" "github.com/OffchainLabs/prysm/v6/api/server/structs" "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/network/httputil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/testing/assert" "github.com/OffchainLabs/prysm/v6/testing/require" @@ -187,129 +185,6 @@ func TestSubmitAggregateSelectionProof(t *testing.T) { } } -func TestSubmitAggregateSelectionProofFallBack(t *testing.T) { - const ( - pubkeyStr = "0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13" - syncingEndpoint = "/eth/v1/node/syncing" - attestationDataEndpoint = "/eth/v1/validator/attestation_data" - aggregateAttestationEndpoint = "/eth/v1/validator/aggregate_attestation" - aggregateAttestationV2Endpoint = "/eth/v2/validator/aggregate_attestation" - validatorIndex = primitives.ValidatorIndex(55293) - slotSignature = "0x8776a37d6802c4797d113169c5fcfda50e68a32058eb6356a6f00d06d7da64c841a00c7c38b9b94a204751eca53707bd03523ce4797827d9bacff116a6e776a20bbccff4b683bf5201b610797ed0502557a58a65c8395f8a1649b976c3112d15" - slot = primitives.Slot(123) - committeeIndex = primitives.CommitteeIndex(1) - committeesAtSlot = uint64(1) - ) - - attestationDataResponse := generateValidAttestation(uint64(slot), uint64(committeeIndex)) - attestationDataProto, err := attestationDataResponse.Data.ToConsensus() - require.NoError(t, err) - attestationDataRootBytes, err := attestationDataProto.HashTreeRoot() - require.NoError(t, err) - - aggregateAttestation := ðpb.Attestation{ - AggregationBits: testhelpers.FillByteSlice(4, 74), - Data: attestationDataProto, - Signature: testhelpers.FillByteSlice(96, 82), - } - - ctrl := gomock.NewController(t) - defer ctrl.Finish() - ctx := t.Context() - jsonRestHandler := mock.NewMockJsonRestHandler(ctrl) - - // Call node syncing endpoint to check if head is optimistic. - jsonRestHandler.EXPECT().Get( - gomock.Any(), - syncingEndpoint, - &structs.SyncStatusResponse{}, - ).SetArg( - 2, - structs.SyncStatusResponse{ - Data: &structs.SyncStatusResponseData{ - IsOptimistic: false, - }, - }, - ).Return( - nil, - ).Times(1) - - // Call attestation data to get attestation data root to query aggregate attestation. - jsonRestHandler.EXPECT().Get( - gomock.Any(), - fmt.Sprintf("%s?committee_index=%d&slot=%d", attestationDataEndpoint, committeeIndex, slot), - &structs.GetAttestationDataResponse{}, - ).SetArg( - 2, - attestationDataResponse, - ).Return( - nil, - ).Times(1) - - attestationJSON, err := json.Marshal(jsonifyAttestation(aggregateAttestation)) - require.NoError(t, err) - - // Call attestation data to get attestation data root to query aggregate attestation. - jsonRestHandler.EXPECT().Get( - gomock.Any(), - fmt.Sprintf("%s?attestation_data_root=%s&committee_index=%d&slot=%d", aggregateAttestationV2Endpoint, hexutil.Encode(attestationDataRootBytes[:]), committeeIndex, slot), - &structs.AggregateAttestationResponse{}, - ).Return( - &httputil.DefaultJsonError{ - Code: http.StatusNotFound, - }, - ).Times(1) - - // Call attestation data to get attestation data root to query aggregate attestation. - jsonRestHandler.EXPECT().Get( - gomock.Any(), - fmt.Sprintf("%s?attestation_data_root=%s&slot=%d", aggregateAttestationEndpoint, hexutil.Encode(attestationDataRootBytes[:]), slot), - &structs.AggregateAttestationResponse{}, - ).SetArg( - 2, - structs.AggregateAttestationResponse{ - Data: attestationJSON, - }, - ).Return( - nil, - ).Times(1) - - pubkey, err := hexutil.Decode(pubkeyStr) - require.NoError(t, err) - - slotSignatureBytes, err := hexutil.Decode(slotSignature) - require.NoError(t, err) - - expectedResponse := ðpb.AggregateSelectionResponse{ - AggregateAndProof: ðpb.AggregateAttestationAndProof{ - AggregatorIndex: primitives.ValidatorIndex(55293), - Aggregate: aggregateAttestation, - SelectionProof: slotSignatureBytes, - }, - } - - validatorClient := &beaconApiValidatorClient{ - jsonRestHandler: jsonRestHandler, - stateValidatorsProvider: beaconApiStateValidatorsProvider{ - jsonRestHandler: jsonRestHandler, - }, - dutiesProvider: beaconApiDutiesProvider{ - jsonRestHandler: jsonRestHandler, - }, - } - - actualResponse, err := validatorClient.submitAggregateSelectionProof(ctx, ðpb.AggregateSelectionRequest{ - Slot: slot, - CommitteeIndex: committeeIndex, - PublicKey: pubkey, - SlotSignature: slotSignatureBytes, - }, validatorIndex, committeesAtSlot) - - require.NoError(t, err) - assert.DeepEqual(t, expectedResponse, actualResponse) - -} - func TestSubmitAggregateSelectionProofElectra(t *testing.T) { const ( pubkeyStr = "0x8000091c2ae64ee414a54c1cc1fc67dec663408bc636cb86756e0200e41a75c8f86603f104f02c856983d2783116be13" diff --git a/validator/client/beacon-api/submit_signed_aggregate_proof.go b/validator/client/beacon-api/submit_signed_aggregate_proof.go index 224b5c36d4..e493610a5b 100644 --- a/validator/client/beacon-api/submit_signed_aggregate_proof.go +++ b/validator/client/beacon-api/submit_signed_aggregate_proof.go @@ -4,10 +4,8 @@ import ( "bytes" "context" "encoding/json" - "net/http" "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/network/httputil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/time/slots" @@ -21,25 +19,8 @@ func (c *beaconApiValidatorClient) submitSignedAggregateSelectionProof(ctx conte } headers := map[string]string{"Eth-Consensus-Version": version.String(in.SignedAggregateAndProof.Version())} err = c.jsonRestHandler.Post(ctx, "/eth/v2/validator/aggregate_and_proofs", headers, bytes.NewBuffer(body), nil) - errJson := &httputil.DefaultJsonError{} if err != nil { - // TODO: remove this when v2 becomes default - if !errors.As(err, &errJson) { - return nil, err - } - if errJson.Code != http.StatusNotFound { - return nil, errJson - } - log.Debug("Endpoint /eth/v2/validator/aggregate_and_proofs is not supported, falling back to older endpoints for publish aggregate and proofs.") - if err = c.jsonRestHandler.Post( - ctx, - "/eth/v1/validator/aggregate_and_proofs", - nil, - bytes.NewBuffer(body), - nil, - ); err != nil { - return nil, err - } + return nil, err } attestationDataRoot, err := in.SignedAggregateAndProof.Message.Aggregate.Data.HashTreeRoot() diff --git a/validator/client/beacon-api/submit_signed_aggregate_proof_test.go b/validator/client/beacon-api/submit_signed_aggregate_proof_test.go index 26306c1c20..caa2967883 100644 --- a/validator/client/beacon-api/submit_signed_aggregate_proof_test.go +++ b/validator/client/beacon-api/submit_signed_aggregate_proof_test.go @@ -3,12 +3,10 @@ package beacon_api import ( "bytes" "encoding/json" - "net/http" "testing" "github.com/OffchainLabs/prysm/v6/api/server/structs" "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/network/httputil" ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v6/runtime/version" "github.com/OffchainLabs/prysm/v6/testing/assert" @@ -80,50 +78,6 @@ func TestSubmitSignedAggregateSelectionProof_BadRequest(t *testing.T) { assert.ErrorContains(t, "bad request", err) } -func TestSubmitSignedAggregateSelectionProof_Fallback(t *testing.T) { - ctrl := gomock.NewController(t) - defer ctrl.Finish() - - signedAggregateAndProof := generateSignedAggregateAndProofJson() - marshalledSignedAggregateSignedAndProof, err := json.Marshal([]*structs.SignedAggregateAttestationAndProof{jsonifySignedAggregateAndProof(signedAggregateAndProof)}) - require.NoError(t, err) - - ctx := t.Context() - - jsonRestHandler := mock.NewMockJsonRestHandler(ctrl) - headers := map[string]string{"Eth-Consensus-Version": version.String(signedAggregateAndProof.Message.Version())} - jsonRestHandler.EXPECT().Post( - gomock.Any(), - "/eth/v2/validator/aggregate_and_proofs", - headers, - bytes.NewBuffer(marshalledSignedAggregateSignedAndProof), - nil, - ).Return( - &httputil.DefaultJsonError{ - Code: http.StatusNotFound, - }, - ).Times(1) - jsonRestHandler.EXPECT().Post( - gomock.Any(), - "/eth/v1/validator/aggregate_and_proofs", - nil, - bytes.NewBuffer(marshalledSignedAggregateSignedAndProof), - nil, - ).Return( - nil, - ).Times(1) - - attestationDataRoot, err := signedAggregateAndProof.Message.Aggregate.Data.HashTreeRoot() - require.NoError(t, err) - - validatorClient := &beaconApiValidatorClient{jsonRestHandler: jsonRestHandler} - resp, err := validatorClient.submitSignedAggregateSelectionProof(ctx, ðpb.SignedAggregateSubmitRequest{ - SignedAggregateAndProof: signedAggregateAndProof, - }) - require.NoError(t, err) - assert.DeepEqual(t, attestationDataRoot[:], resp.AttestationDataRoot) -} - func TestSubmitSignedAggregateSelectionProofElectra_Valid(t *testing.T) { params.SetupTestConfigCleanup(t) params.BeaconConfig().ElectraForkEpoch = 0 From 8b6f187b159d33dbf861b76072b15fb297984284 Mon Sep 17 00:00:00 2001 From: james-prysm <90280386+james-prysm@users.noreply.github.com> Date: Wed, 5 Nov 2025 14:41:36 -0800 Subject: [PATCH 081/103] Add support for fulu fork epoch and bpo schedule (#15975) * wip * fixing tests * adding script to update workspace for eth clients * updating test sepc to 1.6.0 and fixing broadcaster test * fix specrefs * more ethspecify fixes * still trying to fix ethspecify * fixing attestation tests * fixing sha for consensus specs * removing script for now until i have something more standard * fixing more p2p tests * fixing discovery tests * attempting to fix discovery test flakeyness * attempting to fix port binding issue * more attempts to fix flakey tests * Revert "more attempts to fix flakey tests" This reverts commit 25e81837030f3f4d738ac941153b58524c99c67e. * Revert "attempting to fix port binding issue" This reverts commit 583df8000d00f13726ce75aad7d7349d840489d7. * Revert "attempting to fix discovery test flakeyness" This reverts commit 3c76525870f3b8e069fdef5dc0900f38a9fce8ac. * Revert "fixing discovery tests" This reverts commit 8c701bf3b9e0bfd2e59ade988ef78982d12c5865. * Revert "fixing more p2p tests" This reverts commit 140d5db203e3e394f3150f5d910004a2a17c0255. * Revert "fixing attestation tests" This reverts commit 26ded244cb6c6729c5d0ecd46f8a8aac0e2979ab. * fixing attestation tests * fixing more p2p tests * fixing discovery tests * attempting to fix discovery test flakeyness * attempting to fix port binding issue * more attempts to fix flakey tests * changelog * fixing import * adding some missing dependencies, but TestService_BroadcastAttestationWithDiscoveryAttempts is still failing * attempting to fix test * reverting test as it migrated to other pr * reverting test * fixing test from merge * Fix `TestService_BroadcastAttestationWithDiscoveryAttempts`. * Fix again `TestService_Start_OnlyStartsOnce`. * fixing TestListenForNewNodes * removing manual set of fulu epoch * missed a few * fixing subnet test * Update beacon-chain/rpc/eth/config/handlers_test.go Co-authored-by: Preston Van Loon * removing a few more missed spots of reverting fulu epoch setting * updating test name based on feedback * fixing rest apis, they actually need the setting of the epoch due to the guard --------- Co-authored-by: Manu NALEPA Co-authored-by: Preston Van Loon --- WORKSPACE | 16 +- beacon-chain/das/availability_blobs_test.go | 10 +- beacon-chain/p2p/broadcaster_test.go | 30 +- beacon-chain/p2p/discovery_test.go | 100 +++++-- beacon-chain/p2p/service_test.go | 34 ++- beacon-chain/p2p/subnets_test.go | 7 + .../rpc/eth/beacon/handlers_pool_test.go | 270 ++++++++++++++++++ beacon-chain/rpc/eth/config/handlers_test.go | 3 +- changelog/james-prysm_fulu-fork-epoch.md | 8 + config/params/fork_test.go | 6 +- config/params/mainnet_config.go | 13 +- specrefs/.ethspecify.yml | 2 +- specrefs/configs.yml | 14 +- 13 files changed, 446 insertions(+), 67 deletions(-) create mode 100644 changelog/james-prysm_fulu-fork-epoch.md diff --git a/WORKSPACE b/WORKSPACE index 651d8177c3..9c774b7358 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -253,16 +253,16 @@ filegroup( url = "https://github.com/ethereum/EIPs/archive/5480440fe51742ed23342b68cf106cefd427e39d.tar.gz", ) -consensus_spec_version = "v1.6.0-beta.2" +consensus_spec_version = "v1.6.0" load("@prysm//tools:download_spectests.bzl", "consensus_spec_tests") consensus_spec_tests( name = "consensus_spec_tests", flavors = { - "general": "sha256-oEj0MTViJHjZo32nABK36gfvSXpbwkBk/jt6Mj7pWFI=", - "minimal": "sha256-cS4NPv6IRBoCSmWomQ8OEo8IsVNW9YawUFqoRZQBUj4=", - "mainnet": "sha256-BYuLndMPAh4p13IRJgNfVakrCVL69KRrNw2tdc3ETbE=", + "general": "sha256-54hTaUNF9nLg+hRr3oHoq0yjZpW3MNiiUUuCQu6Rajk=", + "minimal": "sha256-1JHIGg3gVMjvcGYRHR5cwdDgOvX47oR/MWp6gyAeZfA=", + "mainnet": "sha256-292h3W2Ffts0YExgDTyxYe9Os7R0bZIXuAaMO8P6kl4=", }, version = consensus_spec_version, ) @@ -278,7 +278,7 @@ filegroup( visibility = ["//visibility:public"], ) """, - integrity = "sha256-MForEP9dTe0z3ZkTHjX4H6waSkSTghf3gQHPwrSCCro=", + integrity = "sha256-VzBgrEokvYSMIIXVnSA5XS9I3m9oxpvToQGxC1N5lzw=", strip_prefix = "consensus-specs-" + consensus_spec_version[1:], url = "https://github.com/ethereum/consensus-specs/archive/refs/tags/%s.tar.gz" % consensus_spec_version, ) @@ -327,9 +327,9 @@ filegroup( visibility = ["//visibility:public"], ) """, - integrity = "sha256-NZr/gsQK9rBHRnznlPBiNzJpK8MPMrfUa3f+QYqn1+g=", - strip_prefix = "mainnet-978f1794eada6f85bee76e4d2d5959a5fb8e0cc5", - url = "https://github.com/eth-clients/mainnet/archive/978f1794eada6f85bee76e4d2d5959a5fb8e0cc5.tar.gz", + integrity = "sha256-+mqMXyboedVw8Yp0v+U9GDz98QoC1SZET8mjaKPX+AI=", + strip_prefix = "mainnet-980aee8893a2291d473c38f63797d5bc370fa381", + url = "https://github.com/eth-clients/mainnet/archive/980aee8893a2291d473c38f63797d5bc370fa381.tar.gz", ) http_archive( diff --git a/beacon-chain/das/availability_blobs_test.go b/beacon-chain/das/availability_blobs_test.go index ab49e74489..559eccad98 100644 --- a/beacon-chain/das/availability_blobs_test.go +++ b/beacon-chain/das/availability_blobs_test.go @@ -51,16 +51,20 @@ func Test_commitmentsToCheck(t *testing.T) { name: "commitments within da", block: func(t *testing.T) blocks.ROBlock { d := util.NewBeaconBlockFulu() - d.Block.Body.BlobKzgCommitments = commits[:maxBlobs] d.Block.Slot = fulu + 100 + mb := params.GetNetworkScheduleEntry(slots.ToEpoch(d.Block.Slot)).MaxBlobsPerBlock + d.Block.Body.BlobKzgCommitments = commits[:mb] sb, err := blocks.NewSignedBeaconBlock(d) require.NoError(t, err) rb, err := blocks.NewROBlock(sb) require.NoError(t, err) return rb }, - commits: commits[:maxBlobs], - slot: fulu + 100, + commits: func() [][]byte { + mb := params.GetNetworkScheduleEntry(slots.ToEpoch(fulu + 100)).MaxBlobsPerBlock + return commits[:mb] + }(), + slot: fulu + 100, }, { name: "commitments outside da", diff --git a/beacon-chain/p2p/broadcaster_test.go b/beacon-chain/p2p/broadcaster_test.go index 9565008318..382035842e 100644 --- a/beacon-chain/p2p/broadcaster_test.go +++ b/beacon-chain/p2p/broadcaster_test.go @@ -13,6 +13,7 @@ import ( "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" + testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" @@ -218,19 +219,30 @@ func TestService_BroadcastAttestation(t *testing.T) { func TestService_BroadcastAttestationWithDiscoveryAttempts(t *testing.T) { const port = uint(2000) + // The DB has to be shared in all peers to avoid the + // duplicate metrics collector registration attempted. + // However, we don't care for this test. + db := testDB.SetupDB(t) + // Setup bootnode. - cfg := &Config{PingInterval: testPingInterval} + cfg := &Config{PingInterval: testPingInterval, DB: db} cfg.UDPPort = uint(port) _, pkey := createAddrAndPrivKey(t) ipAddr := net.ParseIP("127.0.0.1") genesisTime := time.Now() genesisValidatorsRoot := make([]byte, 32) + s := &Service{ cfg: cfg, genesisTime: genesisTime, genesisValidatorsRoot: genesisValidatorsRoot, custodyInfo: &custodyInfo{}, + ctx: t.Context(), + custodyInfoSet: make(chan struct{}), } + + close(s.custodyInfoSet) + bootListener, err := s.createListener(ipAddr, pkey) require.NoError(t, err) defer bootListener.Close() @@ -245,6 +257,7 @@ func TestService_BroadcastAttestationWithDiscoveryAttempts(t *testing.T) { Discv5BootStrapAddrs: []string{bootNode.String()}, MaxPeers: 2, PingInterval: testPingInterval, + DB: db, } // Setup 2 different hosts for i := uint(1); i <= 2; i++ { @@ -259,7 +272,12 @@ func TestService_BroadcastAttestationWithDiscoveryAttempts(t *testing.T) { genesisTime: genesisTime, genesisValidatorsRoot: genesisValidatorsRoot, custodyInfo: &custodyInfo{}, + ctx: t.Context(), + custodyInfoSet: make(chan struct{}), } + + close(s.custodyInfoSet) + listener, err := s.startDiscoveryV5(ipAddr, pkey) // Set for 2nd peer if i == 2 { @@ -711,18 +729,26 @@ func TestService_BroadcastDataColumn(t *testing.T) { // Create a host. _, pkey, ipAddr := createHost(t, port) + // Create a shared DB for the service + db := testDB.SetupDB(t) + + // Create and close the custody info channel immediately since custodyInfo is already set + custodyInfoSet := make(chan struct{}) + close(custodyInfoSet) + service := &Service{ ctx: ctx, host: p1.BHost, pubsub: p1.PubSub(), joinedTopics: map[string]*pubsub.Topic{}, - cfg: &Config{}, + cfg: &Config{DB: db}, genesisTime: time.Now(), genesisValidatorsRoot: bytesutil.PadTo([]byte{'A'}, 32), subnetsLock: make(map[uint64]*sync.RWMutex), subnetsLockLock: sync.Mutex{}, peers: peers.NewStatus(ctx, &peers.StatusConfig{ScorerParams: &scorers.Config{}}), custodyInfo: &custodyInfo{}, + custodyInfoSet: custodyInfoSet, } // Create a listener. diff --git a/beacon-chain/p2p/discovery_test.go b/beacon-chain/p2p/discovery_test.go index cd48877690..1477e08cfb 100644 --- a/beacon-chain/p2p/discovery_test.go +++ b/beacon-chain/p2p/discovery_test.go @@ -136,20 +136,26 @@ func setNodeSubnets(localNode *enode.LocalNode, attSubnets []uint64) { } func TestCreateListener(t *testing.T) { - port := 1024 ipAddr, pkey := createAddrAndPrivKey(t) + + db := testDB.SetupDB(t) + custodyInfoSet := make(chan struct{}) + close(custodyInfoSet) + s := &Service{ + ctx: t.Context(), genesisTime: time.Now(), genesisValidatorsRoot: bytesutil.PadTo([]byte{'A'}, 32), - cfg: &Config{UDPPort: uint(port)}, + cfg: &Config{UDPPort: 2200, DB: db}, custodyInfo: &custodyInfo{}, + custodyInfoSet: custodyInfoSet, } listener, err := s.createListener(ipAddr, pkey) require.NoError(t, err) defer listener.Close() assert.Equal(t, true, listener.Self().IP().Equal(ipAddr), "IP address is not the expected type") - assert.Equal(t, port, listener.Self().UDP(), "Incorrect port number") + assert.Equal(t, 2200, listener.Self().UDP(), "Incorrect port number") pubkey := listener.Self().Pubkey() XisSame := pkey.PublicKey.X.Cmp(pubkey.X) == 0 @@ -161,15 +167,21 @@ func TestCreateListener(t *testing.T) { } func TestStartDiscV5_DiscoverAllPeers(t *testing.T) { - port := 2000 ipAddr, pkey := createAddrAndPrivKey(t) genesisTime := time.Now() genesisValidatorsRoot := make([]byte, 32) + + db := testDB.SetupDB(t) + custodyInfoSet := make(chan struct{}) + close(custodyInfoSet) + s := &Service{ - cfg: &Config{UDPPort: uint(port), PingInterval: testPingInterval, DisableLivenessCheck: true}, + ctx: t.Context(), + cfg: &Config{UDPPort: 6000, PingInterval: testPingInterval, DisableLivenessCheck: true, DB: db}, // Use high port to reduce conflicts genesisTime: genesisTime, genesisValidatorsRoot: genesisValidatorsRoot, custodyInfo: &custodyInfo{}, + custodyInfoSet: custodyInfoSet, } bootListener, err := s.createListener(ipAddr, pkey) require.NoError(t, err) @@ -183,19 +195,26 @@ func TestStartDiscV5_DiscoverAllPeers(t *testing.T) { var listeners []*listenerWrapper for i := 1; i <= 5; i++ { - port = 3000 + i + port := 6000 + i // Use unique high ports for peer discovery cfg := &Config{ Discv5BootStrapAddrs: []string{bootNode.String()}, UDPPort: uint(port), PingInterval: testPingInterval, DisableLivenessCheck: true, + DB: db, } ipAddr, pkey := createAddrAndPrivKey(t) + + custodyInfoSetLoop := make(chan struct{}) + close(custodyInfoSetLoop) + s = &Service{ + ctx: t.Context(), cfg: cfg, genesisTime: genesisTime, genesisValidatorsRoot: genesisValidatorsRoot, custodyInfo: &custodyInfo{}, + custodyInfoSet: custodyInfoSetLoop, } listener, err := s.startDiscoveryV5(ipAddr, pkey) assert.NoError(t, err, "Could not start discovery for node") @@ -220,16 +239,6 @@ func TestStartDiscV5_DiscoverAllPeers(t *testing.T) { } func TestCreateLocalNode(t *testing.T) { - params.SetupTestConfigCleanup(t) - - // Set the fulu fork epoch to something other than the far future epoch. - initFuluForkEpoch := params.BeaconConfig().FuluForkEpoch - params.BeaconConfig().FuluForkEpoch = 42 - - defer func() { - params.BeaconConfig().FuluForkEpoch = initFuluForkEpoch - }() - testCases := []struct { name string cfg *Config @@ -264,11 +273,11 @@ func TestCreateLocalNode(t *testing.T) { for _, tt := range testCases { t.Run(tt.name, func(t *testing.T) { - // Define ports. + // Define ports. Use unique ports since this test validates ENR content. const ( - udpPort = 2000 - tcpPort = 3000 - quicPort = 3000 + udpPort = 3100 + tcpPort = 3101 + quicPort = 3102 ) custodyRequirement := params.BeaconConfig().CustodyRequirement @@ -344,13 +353,19 @@ func TestCreateLocalNode(t *testing.T) { } func TestRebootDiscoveryListener(t *testing.T) { - port := 1024 ipAddr, pkey := createAddrAndPrivKey(t) + + db := testDB.SetupDB(t) + custodyInfoSet := make(chan struct{}) + close(custodyInfoSet) + s := &Service{ + ctx: t.Context(), genesisTime: time.Now(), genesisValidatorsRoot: bytesutil.PadTo([]byte{'A'}, 32), - cfg: &Config{UDPPort: uint(port)}, + cfg: &Config{UDPPort: 0, DB: db}, // Use 0 to let OS assign an available port custodyInfo: &custodyInfo{}, + custodyInfoSet: custodyInfoSet, } createListener := func() (*discover.UDPv5, error) { @@ -379,11 +394,17 @@ func TestRebootDiscoveryListener(t *testing.T) { func TestMultiAddrsConversion_InvalidIPAddr(t *testing.T) { addr := net.ParseIP("invalidIP") _, pkey := createAddrAndPrivKey(t) + + custodyInfoSet := make(chan struct{}) + close(custodyInfoSet) + s := &Service{ + ctx: t.Context(), genesisTime: time.Now(), genesisValidatorsRoot: bytesutil.PadTo([]byte{'A'}, 32), cfg: &Config{}, custodyInfo: &custodyInfo{}, + custodyInfoSet: custodyInfoSet, } node, err := s.createLocalNode(pkey, addr, 0, 0, 0) require.NoError(t, err) @@ -394,15 +415,23 @@ func TestMultiAddrsConversion_InvalidIPAddr(t *testing.T) { func TestMultiAddrConversion_OK(t *testing.T) { hook := logTest.NewGlobal() ipAddr, pkey := createAddrAndPrivKey(t) + + db := testDB.SetupDB(t) + custodyInfoSet := make(chan struct{}) + close(custodyInfoSet) + s := &Service{ + ctx: t.Context(), cfg: &Config{ - UDPPort: 2000, - TCPPort: 3000, - QUICPort: 3000, + UDPPort: 0, // Use 0 to let OS assign an available port + TCPPort: 0, + QUICPort: 0, + DB: db, }, genesisTime: time.Now(), genesisValidatorsRoot: bytesutil.PadTo([]byte{'A'}, 32), custodyInfo: &custodyInfo{}, + custodyInfoSet: custodyInfoSet, } listener, err := s.createListener(ipAddr, pkey) require.NoError(t, err) @@ -472,13 +501,20 @@ func TestHostIsResolved(t *testing.T) { "2001:4860:4860::8844": true, } + db := testDB.SetupDB(t) + custodyInfoSet := make(chan struct{}) + close(custodyInfoSet) + s := &Service{ + ctx: t.Context(), cfg: &Config{ HostDNS: host, + DB: db, }, genesisTime: time.Now(), genesisValidatorsRoot: bytesutil.PadTo([]byte{'A'}, 32), custodyInfo: &custodyInfo{}, + custodyInfoSet: custodyInfoSet, } ip, key := createAddrAndPrivKey(t) list, err := s.createListener(ip, key) @@ -540,15 +576,21 @@ func TestOutboundPeerThreshold(t *testing.T) { } func TestUDPMultiAddress(t *testing.T) { - port := 6500 ipAddr, pkey := createAddrAndPrivKey(t) genesisTime := time.Now() genesisValidatorsRoot := make([]byte, 32) + + db := testDB.SetupDB(t) + custodyInfoSet := make(chan struct{}) + close(custodyInfoSet) + s := &Service{ - cfg: &Config{UDPPort: uint(port)}, + ctx: t.Context(), + cfg: &Config{UDPPort: 2500, DB: db}, genesisTime: genesisTime, genesisValidatorsRoot: genesisValidatorsRoot, custodyInfo: &custodyInfo{}, + custodyInfoSet: custodyInfoSet, } createListener := func() (*discover.UDPv5, error) { @@ -562,7 +604,7 @@ func TestUDPMultiAddress(t *testing.T) { multiAddresses, err := s.DiscoveryAddresses() require.NoError(t, err) require.Equal(t, true, len(multiAddresses) > 0) - assert.Equal(t, true, strings.Contains(multiAddresses[0].String(), fmt.Sprintf("%d", port))) + assert.Equal(t, true, strings.Contains(multiAddresses[0].String(), fmt.Sprintf("%d", 2500))) assert.Equal(t, true, strings.Contains(multiAddresses[0].String(), "udp")) } @@ -912,7 +954,7 @@ func TestRefreshPersistentSubnets(t *testing.T) { actualPingCount++ return nil }, - cfg: &Config{UDPPort: 2000, DB: testDB.SetupDB(t)}, + cfg: &Config{UDPPort: 0, DB: testDB.SetupDB(t)}, // Use 0 to let OS assign an available port peers: p2p.Peers(), genesisTime: time.Now().Add(-time.Duration(tc.epochSinceGenesis*secondsPerEpoch) * time.Second), genesisValidatorsRoot: bytesutil.PadTo([]byte{'A'}, 32), diff --git a/beacon-chain/p2p/service_test.go b/beacon-chain/p2p/service_test.go index df6a2789e3..4d16057e5d 100644 --- a/beacon-chain/p2p/service_test.go +++ b/beacon-chain/p2p/service_test.go @@ -58,14 +58,13 @@ func TestService_Stop_DontPanicIfDv5ListenerIsNotInited(t *testing.T) { } func TestService_Start_OnlyStartsOnce(t *testing.T) { - params.SetupTestConfigCleanup(t) hook := logTest.NewGlobal() cs := startup.NewClockSynchronizer() cfg := &Config{ - UDPPort: 2000, - TCPPort: 3000, - QUICPort: 3000, + UDPPort: 0, // Use 0 to let OS assign an available port + TCPPort: 0, + QUICPort: 0, ClockWaiter: cs, DB: testDB.SetupDB(t), } @@ -73,6 +72,7 @@ func TestService_Start_OnlyStartsOnce(t *testing.T) { require.NoError(t, err) s.dv5Listener = testp2p.NewMockListener(nil, nil) s.custodyInfo = &custodyInfo{} + close(s.custodyInfoSet) exitRoutine := make(chan bool) go func() { s.Start() @@ -111,9 +111,9 @@ func TestService_Start_NoDiscoverFlag(t *testing.T) { cs := startup.NewClockSynchronizer() cfg := &Config{ - UDPPort: 2000, - TCPPort: 3000, - QUICPort: 3000, + UDPPort: 0, // Use 0 to let OS assign an available port + TCPPort: 0, + QUICPort: 0, StateNotifier: &mock.MockStateNotifier{}, NoDiscovery: true, // <-- no s.dv5Listener is created ClockWaiter: cs, @@ -147,12 +147,11 @@ func TestService_Start_NoDiscoverFlag(t *testing.T) { func TestListenForNewNodes(t *testing.T) { const ( - port = uint(2000) + bootPort = uint(2200) // Use specific port for bootnode ENR testPollingPeriod = 1 * time.Second peerCount = 5 ) - params.SetupTestConfigCleanup(t) db := testDB.SetupDB(t) // Setup bootnode. @@ -160,7 +159,7 @@ func TestListenForNewNodes(t *testing.T) { StateNotifier: &mock.MockStateNotifier{}, PingInterval: testPingInterval, DisableLivenessCheck: true, - UDPPort: port, + UDPPort: bootPort, DB: db, } @@ -171,10 +170,13 @@ func TestListenForNewNodes(t *testing.T) { s := &Service{ cfg: cfg, + ctx: t.Context(), genesisTime: genesisTime, genesisValidatorsRoot: gvr[:], custodyInfo: &custodyInfo{}, + custodyInfoSet: make(chan struct{}), } + close(s.custodyInfoSet) bootListener, err := s.createListener(ipAddr, pkey) require.NoError(t, err) @@ -199,25 +201,29 @@ func TestListenForNewNodes(t *testing.T) { hosts := make([]host.Host, 0, peerCount) for i := uint(1); i <= peerCount; i++ { + peerPort := bootPort + i cfg = &Config{ Discv5BootStrapAddrs: []string{bootNode.String()}, PingInterval: testPingInterval, DisableLivenessCheck: true, MaxPeers: peerCount, ClockWaiter: cs, - UDPPort: port + i, - TCPPort: port + i, + UDPPort: peerPort, + TCPPort: peerPort, DB: db, } - h, pkey, ipAddr := createHost(t, port+i) + h, pkey, ipAddr := createHost(t, peerPort) s := &Service{ cfg: cfg, + ctx: t.Context(), genesisTime: genesisTime, genesisValidatorsRoot: gvr[:], custodyInfo: &custodyInfo{}, + custodyInfoSet: make(chan struct{}), } + close(s.custodyInfoSet) listener, err := s.startDiscoveryV5(ipAddr, pkey) require.NoError(t, err, "Could not start discovery for node") @@ -247,6 +253,7 @@ func TestListenForNewNodes(t *testing.T) { s, err = NewService(t.Context(), cfg) require.NoError(t, err) s.custodyInfo = &custodyInfo{} + close(s.custodyInfoSet) go s.Start() @@ -270,7 +277,6 @@ func TestListenForNewNodes(t *testing.T) { } func TestPeer_Disconnect(t *testing.T) { - params.SetupTestConfigCleanup(t) h1, _, _ := createHost(t, 5000) defer func() { if err := h1.Close(); err != nil { diff --git a/beacon-chain/p2p/subnets_test.go b/beacon-chain/p2p/subnets_test.go index 357784421f..ddd9849af0 100644 --- a/beacon-chain/p2p/subnets_test.go +++ b/beacon-chain/p2p/subnets_test.go @@ -69,10 +69,13 @@ func TestStartDiscV5_FindAndDialPeersWithSubnet(t *testing.T) { bootNodeService := &Service{ cfg: &Config{UDPPort: 2000, TCPPort: 3000, QUICPort: 3000, DisableLivenessCheck: true, PingInterval: testPingInterval}, + ctx: ctx, genesisTime: genesisTime, genesisValidatorsRoot: params.BeaconConfig().GenesisValidatorsRoot[:], custodyInfo: &custodyInfo{}, + custodyInfoSet: make(chan struct{}), } + close(bootNodeService.custodyInfoSet) bootNodeForkDigest, err := bootNodeService.currentForkDigest() require.NoError(t, err) @@ -102,6 +105,7 @@ func TestStartDiscV5_FindAndDialPeersWithSubnet(t *testing.T) { PingInterval: testPingInterval, DisableLivenessCheck: true, DB: db, + DataDir: t.TempDir(), // Unique data dir for each peer }) require.NoError(t, err) @@ -109,6 +113,7 @@ func TestStartDiscV5_FindAndDialPeersWithSubnet(t *testing.T) { service.genesisTime = genesisTime service.genesisValidatorsRoot = params.BeaconConfig().GenesisValidatorsRoot[:] service.custodyInfo = &custodyInfo{} + close(service.custodyInfoSet) nodeForkDigest, err := service.currentForkDigest() require.NoError(t, err) @@ -152,6 +157,7 @@ func TestStartDiscV5_FindAndDialPeersWithSubnet(t *testing.T) { TCPPort: 3010, QUICPort: 3010, DB: db, + DataDir: t.TempDir(), // Unique data dir for test service } service, err := NewService(t.Context(), cfg) @@ -160,6 +166,7 @@ func TestStartDiscV5_FindAndDialPeersWithSubnet(t *testing.T) { service.genesisTime = genesisTime service.genesisValidatorsRoot = params.BeaconConfig().GenesisValidatorsRoot[:] service.custodyInfo = &custodyInfo{} + close(service.custodyInfoSet) service.Start() defer func() { diff --git a/beacon-chain/rpc/eth/beacon/handlers_pool_test.go b/beacon-chain/rpc/eth/beacon/handlers_pool_test.go index a46c57f87a..6d9db8ad53 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_pool_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_pool_test.go @@ -300,6 +300,7 @@ func TestListAttestationsV2(t *testing.T) { params.SetupTestConfigCleanup(t) config := params.BeaconConfig() config.ElectraForkEpoch = 0 + config.FuluForkEpoch = config.FarFutureEpoch params.OverrideBeaconConfig(config) chainService := &blockchainmock.ChainService{State: bs} @@ -357,6 +358,12 @@ func TestListAttestationsV2(t *testing.T) { writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 0 + config.FuluForkEpoch = config.FarFutureEpoch + params.OverrideBeaconConfig(config) + s.ListAttestationsV2(writer, request) assert.Equal(t, http.StatusOK, writer.Code) resp := &structs.ListAttestationsResponse{} @@ -394,6 +401,186 @@ func TestListAttestationsV2(t *testing.T) { assert.Equal(t, "0x0400000000000000", a.CommitteeBits) } }) + t.Run("Post-Fulu", func(t *testing.T) { + cb1 := primitives.NewAttestationCommitteeBits() + cb1.SetBitAt(1, true) + cb2 := primitives.NewAttestationCommitteeBits() + cb2.SetBitAt(2, true) + + attFulu1 := ðpbv1alpha1.AttestationElectra{ + AggregationBits: []byte{1, 10}, + Data: ðpbv1alpha1.AttestationData{ + Slot: 1, + CommitteeIndex: 0, + BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot1"), 32), + Source: ðpbv1alpha1.Checkpoint{ + Epoch: 1, + Root: bytesutil.PadTo([]byte("sourceroot1"), 32), + }, + Target: ðpbv1alpha1.Checkpoint{ + Epoch: 10, + Root: bytesutil.PadTo([]byte("targetroot1"), 32), + }, + }, + CommitteeBits: cb1, + Signature: bytesutil.PadTo([]byte("signature1"), 96), + } + attFulu2 := ðpbv1alpha1.AttestationElectra{ + AggregationBits: []byte{1, 10}, + Data: ðpbv1alpha1.AttestationData{ + Slot: 1, + CommitteeIndex: 0, + BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot2"), 32), + Source: ðpbv1alpha1.Checkpoint{ + Epoch: 1, + Root: bytesutil.PadTo([]byte("sourceroot2"), 32), + }, + Target: ðpbv1alpha1.Checkpoint{ + Epoch: 10, + Root: bytesutil.PadTo([]byte("targetroot2"), 32), + }, + }, + CommitteeBits: cb2, + Signature: bytesutil.PadTo([]byte("signature2"), 96), + } + attFulu3 := ðpbv1alpha1.AttestationElectra{ + AggregationBits: bitfield.NewBitlist(8), + Data: ðpbv1alpha1.AttestationData{ + Slot: 2, + CommitteeIndex: 0, + BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot3"), 32), + Source: ðpbv1alpha1.Checkpoint{ + Epoch: 1, + Root: bytesutil.PadTo([]byte("sourceroot3"), 32), + }, + Target: ðpbv1alpha1.Checkpoint{ + Epoch: 10, + Root: bytesutil.PadTo([]byte("targetroot3"), 32), + }, + }, + CommitteeBits: cb1, + Signature: bytesutil.PadTo([]byte("signature3"), 96), + } + attFulu4 := ðpbv1alpha1.AttestationElectra{ + AggregationBits: bitfield.NewBitlist(8), + Data: ðpbv1alpha1.AttestationData{ + Slot: 2, + CommitteeIndex: 0, + BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot4"), 32), + Source: ðpbv1alpha1.Checkpoint{ + Epoch: 1, + Root: bytesutil.PadTo([]byte("sourceroot4"), 32), + }, + Target: ðpbv1alpha1.Checkpoint{ + Epoch: 10, + Root: bytesutil.PadTo([]byte("targetroot4"), 32), + }, + }, + CommitteeBits: cb2, + Signature: bytesutil.PadTo([]byte("signature4"), 96), + } + bs, err := util.NewBeaconStateFulu() + require.NoError(t, err) + + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 0 + config.FuluForkEpoch = 0 + params.OverrideBeaconConfig(config) + + chainService := &blockchainmock.ChainService{State: bs} + s := &Server{ + AttestationsPool: attestations.NewPool(), + ChainInfoFetcher: chainService, + TimeFetcher: chainService, + } + // Added one pre electra attestation to ensure it is ignored. + require.NoError(t, s.AttestationsPool.SaveAggregatedAttestations([]ethpbv1alpha1.Att{attFulu1, attFulu2, att1})) + require.NoError(t, s.AttestationsPool.SaveUnaggregatedAttestations([]ethpbv1alpha1.Att{attFulu3, attFulu4, att3})) + + t.Run("empty request", func(t *testing.T) { + url := "http://example.com" + request := httptest.NewRequest(http.MethodGet, url, nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.ListAttestationsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + resp := &structs.ListAttestationsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + + var atts []*structs.AttestationElectra + require.NoError(t, json.Unmarshal(resp.Data, &atts)) + assert.Equal(t, 4, len(atts)) + assert.Equal(t, "fulu", resp.Version) + }) + t.Run("slot request", func(t *testing.T) { + url := "http://example.com?slot=2" + request := httptest.NewRequest(http.MethodGet, url, nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.ListAttestationsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + resp := &structs.ListAttestationsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + + var atts []*structs.AttestationElectra + require.NoError(t, json.Unmarshal(resp.Data, &atts)) + assert.Equal(t, 2, len(atts)) + assert.Equal(t, "fulu", resp.Version) + for _, a := range atts { + assert.Equal(t, "2", a.Data.Slot) + } + }) + t.Run("index request", func(t *testing.T) { + url := "http://example.com?committee_index=2" + request := httptest.NewRequest(http.MethodGet, url, nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.ListAttestationsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + resp := &structs.ListAttestationsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + + var atts []*structs.AttestationElectra + require.NoError(t, json.Unmarshal(resp.Data, &atts)) + assert.Equal(t, 2, len(atts)) + assert.Equal(t, "fulu", resp.Version) + for _, a := range atts { + assert.Equal(t, "0x0400000000000000", a.CommitteeBits) + } + }) + t.Run("both slot + index request", func(t *testing.T) { + url := "http://example.com?slot=2&committee_index=2" + request := httptest.NewRequest(http.MethodGet, url, nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.ListAttestationsV2(writer, request) + assert.Equal(t, http.StatusOK, writer.Code) + resp := &structs.ListAttestationsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + + var atts []*structs.AttestationElectra + require.NoError(t, json.Unmarshal(resp.Data, &atts)) + assert.Equal(t, 1, len(atts)) + assert.Equal(t, "fulu", resp.Version) + for _, a := range atts { + assert.Equal(t, "2", a.Data.Slot) + assert.Equal(t, "0x0400000000000000", a.CommitteeBits) + } + }) + }) }) } @@ -494,6 +681,7 @@ func TestSubmitAttestationsV2(t *testing.T) { params.SetupTestConfigCleanup(t) config := params.BeaconConfig() config.ElectraForkEpoch = 0 + config.FuluForkEpoch = config.FarFutureEpoch params.OverrideBeaconConfig(config) var body bytes.Buffer @@ -574,6 +762,7 @@ func TestSubmitAttestationsV2(t *testing.T) { assert.Equal(t, true, strings.Contains(e.Failures[0].Message, "Incorrect attestation signature")) }) }) + t.Run("post-electra", func(t *testing.T) { params.SetupTestConfigCleanup(t) config := params.BeaconConfig() @@ -1462,6 +1651,7 @@ func TestGetAttesterSlashingsV2(t *testing.T) { params.SetupTestConfigCleanup(t) config := params.BeaconConfig() config.ElectraForkEpoch = 100 + config.FuluForkEpoch = config.FarFutureEpoch params.OverrideBeaconConfig(config) chainService := &blockchainmock.ChainService{State: bs} @@ -1493,6 +1683,7 @@ func TestGetAttesterSlashingsV2(t *testing.T) { require.DeepEqual(t, slashingPostElectra, ss[0]) }) + t.Run("post-electra-ok", func(t *testing.T) { bs, err := util.NewBeaconStateElectra() require.NoError(t, err) @@ -1500,6 +1691,7 @@ func TestGetAttesterSlashingsV2(t *testing.T) { params.SetupTestConfigCleanup(t) config := params.BeaconConfig() config.ElectraForkEpoch = 100 + config.FuluForkEpoch = config.FarFutureEpoch params.OverrideBeaconConfig(config) chainService := &blockchainmock.ChainService{State: bs} @@ -1570,6 +1762,7 @@ func TestGetAttesterSlashingsV2(t *testing.T) { params.SetupTestConfigCleanup(t) config := params.BeaconConfig() config.ElectraForkEpoch = 100 + config.FuluForkEpoch = config.FarFutureEpoch params.OverrideBeaconConfig(config) chainService := &blockchainmock.ChainService{State: bs} @@ -1596,6 +1789,83 @@ func TestGetAttesterSlashingsV2(t *testing.T) { require.NoError(t, json.Unmarshal(resp.Data, &slashings)) require.NotNil(t, slashings) require.Equal(t, 0, len(slashings)) + + t.Run("Post-Fulu", func(t *testing.T) { + t.Run("post-fulu-ok", func(t *testing.T) { + bs, err := util.NewBeaconStateFulu() + require.NoError(t, err) + + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 0 + config.FuluForkEpoch = 0 + params.OverrideBeaconConfig(config) + + chainService := &blockchainmock.ChainService{State: bs} + + s := &Server{ + ChainInfoFetcher: chainService, + TimeFetcher: chainService, + SlashingsPool: &slashingsmock.PoolMock{PendingAttSlashings: []ethpbv1alpha1.AttSlashing{slashingPostElectra}}, + } + + request := httptest.NewRequest(http.MethodGet, "http://example.com/eth/v2/beacon/pool/attester_slashings", nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.GetAttesterSlashingsV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code) + resp := &structs.GetAttesterSlashingsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + assert.Equal(t, "fulu", resp.Version) + + // Unmarshal resp.Data into a slice of slashings + var slashings []*structs.AttesterSlashingElectra + require.NoError(t, json.Unmarshal(resp.Data, &slashings)) + + ss, err := structs.AttesterSlashingsElectraToConsensus(slashings) + require.NoError(t, err) + + require.DeepEqual(t, slashingPostElectra, ss[0]) + }) + t.Run("no-slashings", func(t *testing.T) { + bs, err := util.NewBeaconStateFulu() + require.NoError(t, err) + + params.SetupTestConfigCleanup(t) + config := params.BeaconConfig() + config.ElectraForkEpoch = 0 + config.FuluForkEpoch = 0 + params.OverrideBeaconConfig(config) + + chainService := &blockchainmock.ChainService{State: bs} + s := &Server{ + ChainInfoFetcher: chainService, + TimeFetcher: chainService, + SlashingsPool: &slashingsmock.PoolMock{PendingAttSlashings: []ethpbv1alpha1.AttSlashing{}}, + } + + request := httptest.NewRequest(http.MethodGet, "http://example.com/eth/v2/beacon/pool/attester_slashings", nil) + writer := httptest.NewRecorder() + writer.Body = &bytes.Buffer{} + + s.GetAttesterSlashingsV2(writer, request) + require.Equal(t, http.StatusOK, writer.Code) + resp := &structs.GetAttesterSlashingsResponse{} + require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + assert.Equal(t, "fulu", resp.Version) + + // Unmarshal resp.Data into a slice of slashings + var slashings []*structs.AttesterSlashingElectra + require.NoError(t, json.Unmarshal(resp.Data, &slashings)) + require.NotNil(t, slashings) + require.Equal(t, 0, len(slashings)) + }) + }) }) } diff --git a/beacon-chain/rpc/eth/config/handlers_test.go b/beacon-chain/rpc/eth/config/handlers_test.go index 36ef6a0e97..86713ba815 100644 --- a/beacon-chain/rpc/eth/config/handlers_test.go +++ b/beacon-chain/rpc/eth/config/handlers_test.go @@ -568,10 +568,9 @@ func TestGetSpec(t *testing.T) { case "SYNC_MESSAGE_DUE_BPS": assert.Equal(t, "104", v) case "BLOB_SCHEDULE": - // BLOB_SCHEDULE should be an empty slice when no schedule is defined blobSchedule, ok := v.([]interface{}) assert.Equal(t, true, ok) - assert.Equal(t, 0, len(blobSchedule)) + assert.Equal(t, 2, len(blobSchedule)) default: t.Errorf("Incorrect key: %s", k) } diff --git a/changelog/james-prysm_fulu-fork-epoch.md b/changelog/james-prysm_fulu-fork-epoch.md new file mode 100644 index 0000000000..d2864ec640 --- /dev/null +++ b/changelog/james-prysm_fulu-fork-epoch.md @@ -0,0 +1,8 @@ +### Added + +- Fulu fork epoch for mainnet configurations set for December 3, 2025, 09:49:11pm UTC +- Added BPO schedules for December 9, 2025, 02:21:11pm UTC and January 7, 2026, 01:01:11am UTC + +### Changed + +- updated consensus spec to 1.6.0 from 1.6.0-beta.2 \ No newline at end of file diff --git a/config/params/fork_test.go b/config/params/fork_test.go index c31042b503..1f3fada116 100644 --- a/config/params/fork_test.go +++ b/config/params/fork_test.go @@ -130,10 +130,10 @@ func TestNextForkData(t *testing.T) { wantedEpoch: cfg.BellatrixForkEpoch, }, { - name: "after last bpo - should be far future epoch and 0x00000000", + name: "post last full fork, fulu bpo 1", currEpoch: params.LastForkEpoch() + 1, - wantedForkVersion: [4]byte(cfg.ElectraForkVersion), - wantedEpoch: cfg.ElectraForkEpoch, + wantedForkVersion: [4]byte(cfg.FuluForkVersion), + wantedEpoch: cfg.BlobSchedule[0].Epoch, }, } for _, tt := range tests { diff --git a/config/params/mainnet_config.go b/config/params/mainnet_config.go index 68a2bd958b..72131d173b 100644 --- a/config/params/mainnet_config.go +++ b/config/params/mainnet_config.go @@ -30,7 +30,7 @@ const ( // Electra Fork Epoch for mainnet config mainnetElectraForkEpoch = 364032 // May 7, 2025, 10:05:11 UTC // Fulu Fork Epoch for mainnet config - mainnetFuluForkEpoch = math.MaxUint64 // Far future / to be defined + mainnetFuluForkEpoch = 411392 // December 3, 2025, 09:49:11pm UTC ) var mainnetNetworkConfig = &NetworkConfig{ @@ -338,7 +338,16 @@ var mainnetBeaconConfig = &BeaconChainConfig{ SubnetsPerNode: 2, NodeIdBits: 256, - BlobSchedule: []BlobScheduleEntry{}, + BlobSchedule: []BlobScheduleEntry{ + { + Epoch: 412672, // December 9, 2025, 02:21:11pm UTC + MaxBlobsPerBlock: 15, + }, + { + Epoch: 419072, // January 7, 2026, 01:01:11am UTC + MaxBlobsPerBlock: 21, + }, + }, } // MainnetTestConfig provides a version of the mainnet config that has a different name diff --git a/specrefs/.ethspecify.yml b/specrefs/.ethspecify.yml index 2fd5381136..9e060d29a0 100644 --- a/specrefs/.ethspecify.yml +++ b/specrefs/.ethspecify.yml @@ -1,4 +1,4 @@ -version: v1.6.0-beta.2 +version: v1.6.0 style: full specrefs: diff --git a/specrefs/configs.yml b/specrefs/configs.yml index a23916fd4a..46b1f05d0b 100644 --- a/specrefs/configs.yml +++ b/specrefs/configs.yml @@ -108,8 +108,16 @@ search: BlobSchedule\s+\[]BlobScheduleEntry regex: true spec: | - + BLOB_SCHEDULE: tuple[frozendict[str, Any], ...] = ( + frozendict({ + "EPOCH": 412672, + "MAX_BLOBS_PER_BLOCK": 15, + }), + frozendict({ + "EPOCH": 419072, + "MAX_BLOBS_PER_BLOCK": 21, + }), ) @@ -266,8 +274,8 @@ search: FuluForkEpoch\s+primitives.Epoch regex: true spec: | - - FULU_FORK_EPOCH: Epoch = 18446744073709551615 + + FULU_FORK_EPOCH: Epoch = 411392 - name: FULU_FORK_VERSION From 35e9c1752e3b758b6f7d33884617f8d241d51737 Mon Sep 17 00:00:00 2001 From: Preston Van Loon Date: Wed, 5 Nov 2025 21:10:46 -0600 Subject: [PATCH 082/103] Remove test overrides for electra and fulu epochs in beacon-chain/rpc/eth/beacon/handlers_pool_test.go (#15988) * Reverted all config.FuluForkEpoch = config.FarFutureEpoch from 8b6f187b159d33dbf861b76072b15fb297984284 * Fix tests by referencing electra epoch / slot values in requests and test setup * Changelog fragment --- .../rpc/eth/beacon/handlers_pool_test.go | 46 ++++++++----------- changelog/pvl-fulu-test-fix.md | 3 ++ 2 files changed, 21 insertions(+), 28 deletions(-) create mode 100644 changelog/pvl-fulu-test-fix.md diff --git a/beacon-chain/rpc/eth/beacon/handlers_pool_test.go b/beacon-chain/rpc/eth/beacon/handlers_pool_test.go index 6d9db8ad53..9685957e42 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_pool_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_pool_test.go @@ -222,10 +222,13 @@ func TestListAttestationsV2(t *testing.T) { cb2 := primitives.NewAttestationCommitteeBits() cb2.SetBitAt(2, true) + config := params.BeaconConfig() + electraSlot := slots.UnsafeEpochStart(config.ElectraForkEpoch + 1) + attElectra1 := ðpbv1alpha1.AttestationElectra{ AggregationBits: []byte{1, 10}, Data: ðpbv1alpha1.AttestationData{ - Slot: 1, + Slot: electraSlot, CommitteeIndex: 0, BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot1"), 32), Source: ðpbv1alpha1.Checkpoint{ @@ -243,7 +246,7 @@ func TestListAttestationsV2(t *testing.T) { attElectra2 := ðpbv1alpha1.AttestationElectra{ AggregationBits: []byte{1, 10}, Data: ðpbv1alpha1.AttestationData{ - Slot: 1, + Slot: electraSlot, CommitteeIndex: 0, BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot2"), 32), Source: ðpbv1alpha1.Checkpoint{ @@ -261,7 +264,7 @@ func TestListAttestationsV2(t *testing.T) { attElectra3 := ðpbv1alpha1.AttestationElectra{ AggregationBits: bitfield.NewBitlist(8), Data: ðpbv1alpha1.AttestationData{ - Slot: 2, + Slot: electraSlot + 1, CommitteeIndex: 0, BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot3"), 32), Source: ðpbv1alpha1.Checkpoint{ @@ -279,7 +282,7 @@ func TestListAttestationsV2(t *testing.T) { attElectra4 := ðpbv1alpha1.AttestationElectra{ AggregationBits: bitfield.NewBitlist(8), Data: ðpbv1alpha1.AttestationData{ - Slot: 2, + Slot: electraSlot + 1, CommitteeIndex: 0, BeaconBlockRoot: bytesutil.PadTo([]byte("blockroot4"), 32), Source: ðpbv1alpha1.Checkpoint{ @@ -298,12 +301,8 @@ func TestListAttestationsV2(t *testing.T) { require.NoError(t, err) params.SetupTestConfigCleanup(t) - config := params.BeaconConfig() - config.ElectraForkEpoch = 0 - config.FuluForkEpoch = config.FarFutureEpoch - params.OverrideBeaconConfig(config) - chainService := &blockchainmock.ChainService{State: bs} + chainService := &blockchainmock.ChainService{State: bs, Slot: &electraSlot} s := &Server{ AttestationsPool: attestations.NewPool(), ChainInfoFetcher: chainService, @@ -332,7 +331,7 @@ func TestListAttestationsV2(t *testing.T) { assert.Equal(t, "electra", resp.Version) }) t.Run("slot request", func(t *testing.T) { - url := "http://example.com?slot=2" + url := fmt.Sprintf("http://example.com?slot=%d", electraSlot) request := httptest.NewRequest(http.MethodGet, url, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} @@ -349,7 +348,7 @@ func TestListAttestationsV2(t *testing.T) { assert.Equal(t, 2, len(atts)) assert.Equal(t, "electra", resp.Version) for _, a := range atts { - assert.Equal(t, "2", a.Data.Slot) + assert.Equal(t, fmt.Sprintf("%d", electraSlot), a.Data.Slot) } }) t.Run("index request", func(t *testing.T) { @@ -361,7 +360,6 @@ func TestListAttestationsV2(t *testing.T) { params.SetupTestConfigCleanup(t) config := params.BeaconConfig() config.ElectraForkEpoch = 0 - config.FuluForkEpoch = config.FarFutureEpoch params.OverrideBeaconConfig(config) s.ListAttestationsV2(writer, request) @@ -380,7 +378,7 @@ func TestListAttestationsV2(t *testing.T) { } }) t.Run("both slot + index request", func(t *testing.T) { - url := "http://example.com?slot=2&committee_index=2" + url := fmt.Sprintf("http://example.com?slot=%d&committee_index=2", electraSlot) request := httptest.NewRequest(http.MethodGet, url, nil) writer := httptest.NewRecorder() writer.Body = &bytes.Buffer{} @@ -397,7 +395,7 @@ func TestListAttestationsV2(t *testing.T) { assert.Equal(t, 1, len(atts)) assert.Equal(t, "electra", resp.Version) for _, a := range atts { - assert.Equal(t, "2", a.Data.Slot) + assert.Equal(t, fmt.Sprintf("%d", electraSlot), a.Data.Slot) assert.Equal(t, "0x0400000000000000", a.CommitteeBits) } }) @@ -681,7 +679,6 @@ func TestSubmitAttestationsV2(t *testing.T) { params.SetupTestConfigCleanup(t) config := params.BeaconConfig() config.ElectraForkEpoch = 0 - config.FuluForkEpoch = config.FarFutureEpoch params.OverrideBeaconConfig(config) var body bytes.Buffer @@ -1650,12 +1647,9 @@ func TestGetAttesterSlashingsV2(t *testing.T) { params.SetupTestConfigCleanup(t) config := params.BeaconConfig() - config.ElectraForkEpoch = 100 - config.FuluForkEpoch = config.FarFutureEpoch - params.OverrideBeaconConfig(config) - - chainService := &blockchainmock.ChainService{State: bs} + slot := slots.UnsafeEpochStart(config.ElectraForkEpoch + 1) + chainService := &blockchainmock.ChainService{State: bs, Slot: &slot} s := &Server{ ChainInfoFetcher: chainService, TimeFetcher: chainService, @@ -1690,11 +1684,9 @@ func TestGetAttesterSlashingsV2(t *testing.T) { params.SetupTestConfigCleanup(t) config := params.BeaconConfig() - config.ElectraForkEpoch = 100 - config.FuluForkEpoch = config.FarFutureEpoch - params.OverrideBeaconConfig(config) - chainService := &blockchainmock.ChainService{State: bs} + slot := slots.UnsafeEpochStart(config.ElectraForkEpoch + 1) + chainService := &blockchainmock.ChainService{State: bs, Slot: &slot} s := &Server{ ChainInfoFetcher: chainService, @@ -1761,11 +1753,9 @@ func TestGetAttesterSlashingsV2(t *testing.T) { params.SetupTestConfigCleanup(t) config := params.BeaconConfig() - config.ElectraForkEpoch = 100 - config.FuluForkEpoch = config.FarFutureEpoch - params.OverrideBeaconConfig(config) - chainService := &blockchainmock.ChainService{State: bs} + slot := slots.UnsafeEpochStart(config.ElectraForkEpoch + 1) + chainService := &blockchainmock.ChainService{State: bs, Slot: &slot} s := &Server{ ChainInfoFetcher: chainService, TimeFetcher: chainService, diff --git a/changelog/pvl-fulu-test-fix.md b/changelog/pvl-fulu-test-fix.md new file mode 100644 index 0000000000..3bee1e008e --- /dev/null +++ b/changelog/pvl-fulu-test-fix.md @@ -0,0 +1,3 @@ +### Ignored + +- Fix test setup to properly reference electra rather than unset the fulu epoch From 33476f5d7b411983a4656e7140da60e017744067 Mon Sep 17 00:00:00 2001 From: kasey <489222+kasey@users.noreply.github.com> Date: Thu, 6 Nov 2025 01:45:12 -0600 Subject: [PATCH 083/103] disable backfill if the checkpoint sync origin slot is in fulu (#15987) Co-authored-by: Kasey Kirkham --- beacon-chain/sync/backfill/BUILD.bazel | 1 + beacon-chain/sync/backfill/service.go | 19 ++++++++++ beacon-chain/sync/backfill/service_test.go | 39 +++++++++++++++++++++ changelog/kasey_disable-backfill-if-fulu.md | 2 ++ 4 files changed, 61 insertions(+) create mode 100644 changelog/kasey_disable-backfill-if-fulu.md diff --git a/beacon-chain/sync/backfill/BUILD.bazel b/beacon-chain/sync/backfill/BUILD.bazel index bbd2691dc4..cbaa1b7cac 100644 --- a/beacon-chain/sync/backfill/BUILD.bazel +++ b/beacon-chain/sync/backfill/BUILD.bazel @@ -81,6 +81,7 @@ go_test( "//runtime/interop:go_default_library", "//testing/require:go_default_library", "//testing/util:go_default_library", + "//time/slots:go_default_library", "@com_github_ethereum_go_ethereum//common/hexutil:go_default_library", "@com_github_libp2p_go_libp2p//core/peer:go_default_library", "@com_github_pkg_errors//:go_default_library", diff --git a/beacon-chain/sync/backfill/service.go b/beacon-chain/sync/backfill/service.go index fcbd0086fe..c07d753bdd 100644 --- a/beacon-chain/sync/backfill/service.go +++ b/beacon-chain/sync/backfill/service.go @@ -249,6 +249,18 @@ func (s *Service) scheduleTodos() { } } +// fuluOrigin checks whether the origin block (ie the checkpoint sync block from which backfill +// syncs backwards) is in an unsupported fork, enabling the backfill service to shut down rather than +// run with buggy behavior. +// This will be removed once DataColumnSidecar support is released. +func fuluOrigin(cfg *params.BeaconChainConfig, status *dbval.BackfillStatus) bool { + originEpoch := slots.ToEpoch(primitives.Slot(status.OriginSlot)) + if originEpoch < cfg.FuluForkEpoch { + return false + } + return true +} + // Start begins the runloop of backfill.Service in the current goroutine. func (s *Service) Start() { if !s.enabled { @@ -281,6 +293,12 @@ func (s *Service) Start() { return } status := s.store.status() + if fuluOrigin(params.BeaconConfig(), status) { + log.WithField("originSlot", s.store.status().OriginSlot). + Warn("backfill disabled; DataColumnSidecar currently unsupported, for updates follow https://github.com/OffchainLabs/prysm/issues/15982") + s.markComplete() + return + } // Exit early if there aren't going to be any batches to backfill. if primitives.Slot(status.LowSlot) <= s.ms(s.clock.CurrentSlot()) { log.WithField("minimumRequiredSlot", s.ms(s.clock.CurrentSlot())). @@ -289,6 +307,7 @@ func (s *Service) Start() { s.markComplete() return } + s.verifier, s.ctxMap, err = s.initVerifier(ctx) if err != nil { log.WithError(err).Error("Unable to initialize backfill verifier") diff --git a/beacon-chain/sync/backfill/service_test.go b/beacon-chain/sync/backfill/service_test.go index 59efb21f0e..b0b270c2b0 100644 --- a/beacon-chain/sync/backfill/service_test.go +++ b/beacon-chain/sync/backfill/service_test.go @@ -15,6 +15,7 @@ import ( "github.com/OffchainLabs/prysm/v6/proto/dbval" "github.com/OffchainLabs/prysm/v6/testing/require" "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v6/time/slots" ) type mockMinimumSlotter struct { @@ -131,3 +132,41 @@ func TestBackfillMinSlotDefault(t *testing.T) { require.Equal(t, specMin, s.ms(current)) }) } + +func TestFuluOrigin(t *testing.T) { + cfg := params.BeaconConfig() + fuluEpoch := cfg.FuluForkEpoch + fuluSlot, err := slots.EpochStart(fuluEpoch) + require.NoError(t, err) + cases := []struct { + name string + origin primitives.Slot + isFulu bool + }{ + { + name: "before fulu", + origin: fuluSlot - 1, + isFulu: false, + }, + { + name: "at fulu", + origin: fuluSlot, + isFulu: true, + }, + { + name: "after fulu", + origin: fuluSlot + 1, + isFulu: true, + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + status := &dbval.BackfillStatus{ + OriginSlot: uint64(tc.origin), + } + result := fuluOrigin(cfg, status) + require.Equal(t, tc.isFulu, result) + }) + } +} diff --git a/changelog/kasey_disable-backfill-if-fulu.md b/changelog/kasey_disable-backfill-if-fulu.md new file mode 100644 index 0000000000..b512f44118 --- /dev/null +++ b/changelog/kasey_disable-backfill-if-fulu.md @@ -0,0 +1,2 @@ +### Fixed +- Backfill disabled if checkpoint sync origin is after fulu fork due to lack of DataColumnSidecar support in backfill. To track the availability of fulu-compatible backfill please watch https://github.com/OffchainLabs/prysm/issues/15982 From d6005026e0ca31e31b9c2f9996ee0fe0016d5b6b Mon Sep 17 00:00:00 2001 From: Preston Van Loon Date: Thu, 6 Nov 2025 02:18:10 -0600 Subject: [PATCH 084/103] Remove deprecated flags for v7 release (#15986) * Delete deprecated flags * Changelog fragment * E2E: Remove delete flag usage. --------- Co-authored-by: Manu NALEPA --- changelog/remove-deprecated-flags.md | 21 ++++ config/features/deprecated_flags.go | 125 +-------------------- testing/endtoend/component_handler_test.go | 11 -- 3 files changed, 23 insertions(+), 134 deletions(-) create mode 100644 changelog/remove-deprecated-flags.md diff --git a/changelog/remove-deprecated-flags.md b/changelog/remove-deprecated-flags.md new file mode 100644 index 0000000000..92412e4172 --- /dev/null +++ b/changelog/remove-deprecated-flags.md @@ -0,0 +1,21 @@ +### Removed + +- Deprecated flag `--enable-optional-engine-methods` has been removed. +- Deprecated flag `--disable-build-block-parallel` has been removed. +- Deprecated flag `--disable-reorg-late-blocks` has been removed. +- Deprecated flag `--disable-optional-engine-methods` has been removed. +- Deprecated flag `--disable-aggregate-parallel` has been removed. +- Deprecated flag `--enable-eip-4881` has been removed. +- Deprecated flag `--disable-eip-4881` has been removed. +- Deprecated flag `--enable-verbose-sig-verification` has been removed. +- Deprecated flag `--enable-debug-rpc-endpoints` has been removed. +- Deprecated flag `--beacon-rpc-gateway-provider` has been removed. +- Deprecated flag `--disable-grpc-gateway` has been removed. +- Deprecated flag `--enable-experimental-state` has been removed. +- Deprecated flag `--enable-committee-aware-packing` has been removed. +- Deprecated flag `--interop-genesis-time` has been removed. +- Deprecated flag `--interop-num-validators` has been removed (from beacon-chain only; still available in validator client). +- Deprecated flag `--enable-quic` has been removed. +- Deprecated flag `--attest-timely` has been removed. +- Deprecated flag `--disable-experimental-state` has been removed. +- Deprecated flag `--p2p-metadata` has been removed. diff --git a/config/features/deprecated_flags.go b/config/features/deprecated_flags.go index 2cf0e03401..3f076a542e 100644 --- a/config/features/deprecated_flags.go +++ b/config/features/deprecated_flags.go @@ -14,129 +14,10 @@ var ( Usage: deprecatedUsage, Hidden: true, } - deprecatedEnableOptionalEngineMethods = &cli.BoolFlag{ - Name: "enable-optional-engine-methods", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedDisableBuildBlockParallel = &cli.BoolFlag{ - Name: "disable-build-block-parallel", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedDisableReorgLateBlocks = &cli.BoolFlag{ - Name: "disable-reorg-late-blocks", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedDisableOptionalEngineMethods = &cli.BoolFlag{ - Name: "disable-optional-engine-methods", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedDisableAggregateParallel = &cli.BoolFlag{ - Name: "disable-aggregate-parallel", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedEnableEIP4881 = &cli.BoolFlag{ - Name: "enable-eip-4881", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedDisableEIP4881 = &cli.BoolFlag{ - Name: "disable-eip-4881", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedVerboseSigVerification = &cli.BoolFlag{ - Name: "enable-verbose-sig-verification", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedEnableDebugRPCEndpoints = &cli.BoolFlag{ - Name: "enable-debug-rpc-endpoints", - Usage: deprecatedUsage, - Hidden: true, - } - - deprecatedBeaconRPCGatewayProviderFlag = &cli.StringFlag{ - Name: "beacon-rpc-gateway-provider", - Usage: deprecatedUsage, - Hidden: true, - } - - deprecatedDisableGRPCGateway = &cli.BoolFlag{ - Name: "disable-grpc-gateway", - Usage: deprecatedUsage, - Hidden: true, - } - - deprecatedEnableExperimentalState = &cli.BoolFlag{ - Name: "enable-experimental-state", - Usage: deprecatedUsage, - Hidden: true, - } - - deprecatedEnableCommitteeAwarePacking = &cli.BoolFlag{ - Name: "enable-committee-aware-packing", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedInteropGenesisTimeFlag = &cli.Uint64Flag{ - Name: "interop-genesis-time", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedInteropNumValidatorsFlag = &cli.Uint64Flag{ - Name: "interop-num-validators", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedEnableQuic = &cli.BoolFlag{ - Name: "enable-quic", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedAttestTimely = &cli.BoolFlag{ - Name: "attest-timely", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedDisableExperimentalState = &cli.BoolFlag{ - Name: "disable-experimental-state", - Usage: deprecatedUsage, - Hidden: true, - } - deprecatedP2PMetadata = &cli.StringFlag{ - Name: "p2p-metadata", - Usage: deprecatedUsage, - Hidden: true, - } ) // Deprecated flags for both the beacon node and validator client. -var deprecatedFlags = []cli.Flag{ - exampleDeprecatedFeatureFlag, - deprecatedEnableOptionalEngineMethods, - deprecatedDisableBuildBlockParallel, - deprecatedDisableReorgLateBlocks, - deprecatedDisableOptionalEngineMethods, - deprecatedDisableAggregateParallel, - deprecatedEnableEIP4881, - deprecatedDisableEIP4881, - deprecatedVerboseSigVerification, - deprecatedEnableDebugRPCEndpoints, - deprecatedBeaconRPCGatewayProviderFlag, - deprecatedDisableGRPCGateway, - deprecatedEnableExperimentalState, - deprecatedEnableCommitteeAwarePacking, - deprecatedInteropGenesisTimeFlag, - deprecatedEnableQuic, - deprecatedAttestTimely, - deprecatedDisableExperimentalState, - deprecatedP2PMetadata, -} +var deprecatedFlags = []cli.Flag{} var upcomingDeprecation = []cli.Flag{ enableHistoricalSpaceRepresentation, @@ -144,6 +25,4 @@ var upcomingDeprecation = []cli.Flag{ // deprecatedBeaconFlags contains flags that are still used by other components // and therefore cannot be added to deprecatedFlags -var deprecatedBeaconFlags = []cli.Flag{ - deprecatedInteropNumValidatorsFlag, -} +var deprecatedBeaconFlags = []cli.Flag{} diff --git a/testing/endtoend/component_handler_test.go b/testing/endtoend/component_handler_test.go index 28ae2158e6..9df7849cfe 100644 --- a/testing/endtoend/component_handler_test.go +++ b/testing/endtoend/component_handler_test.go @@ -136,10 +136,6 @@ func (c *componentHandler) setup() { }) c.eth1Nodes = eth1Nodes - if config.TestCheckpointSync { - appendDebugEndpoints(config) - } - var builders *components.BuilderSet var proxies *eth1.ProxySet if config.UseBuilder { @@ -290,10 +286,3 @@ func PIDsFromMultiComponentRunner(runner e2etypes.MultipleComponentRunners) []in } return pids } - -func appendDebugEndpoints(cfg *e2etypes.E2EConfig) { - debug := []string{ - "--enable-debug-rpc-endpoints", - } - cfg.BeaconFlags = append(cfg.BeaconFlags, debug...) -} From 92bd211e4dfb583d7d3602e21a48e38f753155d9 Mon Sep 17 00:00:00 2001 From: Bastin <43618253+Inspector-Butters@users.noreply.github.com> Date: Thu, 6 Nov 2025 17:16:23 +0100 Subject: [PATCH 085/103] upgrade v6 to v7 (#15989) * upgrade v6 to v7 * changelog * update-go-ssz --- .deepsource.toml | 2 +- BUILD.bazel | 2 +- api/BUILD.bazel | 2 +- api/apiutil/BUILD.bazel | 2 +- api/apiutil/common.go | 2 +- api/apiutil/common_test.go | 4 +- api/apiutil/header_test.go | 2 +- api/client/BUILD.bazel | 2 +- api/client/beacon/BUILD.bazel | 2 +- api/client/beacon/client.go | 12 +- api/client/beacon/client_test.go | 4 +- api/client/builder/BUILD.bazel | 2 +- api/client/builder/bid.go | 14 +- api/client/builder/client.go | 24 +- api/client/builder/client_test.go | 24 +- api/client/builder/testing/BUILD.bazel | 2 +- api/client/builder/testing/mock.go | 12 +- api/client/builder/types.go | 20 +- api/client/builder/types_test.go | 18 +- api/client/client_test.go | 2 +- api/client/event/BUILD.bazel | 2 +- api/client/event/event_stream.go | 4 +- api/client/event/event_stream_test.go | 2 +- api/client/event/utils_test.go | 2 +- api/client/transport_test.go | 4 +- api/client/validator/BUILD.bazel | 2 +- api/client/validator/client.go | 4 +- api/grpc/BUILD.bazel | 2 +- api/grpc/grpcutils_test.go | 4 +- api/jwt.go | 2 +- api/jwt_test.go | 2 +- api/pagination/BUILD.bazel | 2 +- api/pagination/pagination.go | 2 +- api/pagination/pagination_test.go | 6 +- api/server/BUILD.bazel | 2 +- api/server/error_test.go | 2 +- api/server/httprest/BUILD.bazel | 2 +- api/server/httprest/options.go | 2 +- api/server/httprest/server.go | 4 +- api/server/httprest/server_test.go | 6 +- api/server/middleware/BUILD.bazel | 2 +- api/server/middleware/middleware.go | 4 +- api/server/middleware/middleware_test.go | 4 +- api/server/middleware/util_test.go | 4 +- api/server/structs/BUILD.bazel | 2 +- api/server/structs/conversions.go | 24 +- api/server/structs/conversions_blob.go | 6 +- api/server/structs/conversions_block.go | 16 +- .../structs/conversions_block_execution.go | 16 +- .../conversions_block_execution_test.go | 4 +- api/server/structs/conversions_lightclient.go | 6 +- api/server/structs/conversions_state.go | 4 +- api/server/structs/conversions_test.go | 6 +- api/server/structs/endpoints_validator.go | 2 +- async/BUILD.bazel | 2 +- async/abool/BUILD.bazel | 2 +- async/benchmark_test.go | 4 +- async/debounce_test.go | 8 +- async/event/BUILD.bazel | 2 +- async/event/example_scope_test.go | 2 +- async/event/example_subscription_test.go | 2 +- async/event/subscription.go | 2 +- async/event/subscription_test.go | 2 +- async/every_test.go | 2 +- async/scatter_test.go | 6 +- beacon-chain/blockchain/BUILD.bazel | 2 +- beacon-chain/blockchain/chain_info.go | 28 +- .../blockchain/chain_info_forkchoice.go | 12 +- .../blockchain/chain_info_norace_test.go | 10 +- beacon-chain/blockchain/chain_info_test.go | 30 +- beacon-chain/blockchain/defragment.go | 4 +- beacon-chain/blockchain/error.go | 2 +- beacon-chain/blockchain/error_test.go | 4 +- beacon-chain/blockchain/execution_engine.go | 42 +- .../blockchain/execution_engine_test.go | 40 +- .../blockchain/forkchoice_update_execution.go | 16 +- .../forkchoice_update_execution_test.go | 18 +- beacon-chain/blockchain/head.go | 30 +- .../blockchain/head_sync_committee_info.go | 22 +- .../head_sync_committee_info_test.go | 14 +- beacon-chain/blockchain/head_test.go | 28 +- .../blockchain/init_sync_process_block.go | 4 +- .../init_sync_process_block_test.go | 8 +- beacon-chain/blockchain/init_test.go | 2 +- beacon-chain/blockchain/kzg/BUILD.bazel | 2 +- .../blockchain/kzg/trusted_setup_test.go | 2 +- beacon-chain/blockchain/kzg/validation.go | 2 +- .../blockchain/kzg/validation_test.go | 6 +- beacon-chain/blockchain/log.go | 18 +- beacon-chain/blockchain/log_test.go | 10 +- beacon-chain/blockchain/metrics.go | 18 +- beacon-chain/blockchain/metrics_test.go | 6 +- beacon-chain/blockchain/mock_test.go | 12 +- beacon-chain/blockchain/options.go | 34 +- beacon-chain/blockchain/pow_block.go | 14 +- beacon-chain/blockchain/pow_block_test.go | 16 +- .../blockchain/process_attestation.go | 14 +- .../blockchain/process_attestation_helpers.go | 18 +- .../blockchain/process_attestation_test.go | 24 +- beacon-chain/blockchain/process_block.go | 46 +- .../blockchain/process_block_helpers.go | 34 +- beacon-chain/blockchain/process_block_test.go | 74 +-- .../blockchain/receive_attestation.go | 20 +- .../blockchain/receive_attestation_test.go | 24 +- beacon-chain/blockchain/receive_blob.go | 4 +- beacon-chain/blockchain/receive_block.go | 42 +- beacon-chain/blockchain/receive_block_test.go | 42 +- .../blockchain/receive_data_column.go | 2 +- beacon-chain/blockchain/service.go | 60 +-- .../blockchain/service_norace_test.go | 6 +- beacon-chain/blockchain/service_test.go | 64 +-- beacon-chain/blockchain/setup_forkchoice.go | 14 +- .../blockchain/setup_forkchoice_test.go | 16 +- beacon-chain/blockchain/setup_test.go | 48 +- beacon-chain/blockchain/testing/BUILD.bazel | 2 +- beacon-chain/blockchain/testing/mock.go | 42 +- beacon-chain/blockchain/tracked_proposer.go | 10 +- .../blockchain/weak_subjectivity_checks.go | 12 +- .../weak_subjectivity_checks_test.go | 16 +- beacon-chain/builder/BUILD.bazel | 2 +- beacon-chain/builder/option.go | 10 +- beacon-chain/builder/service.go | 22 +- beacon-chain/builder/service_test.go | 14 +- beacon-chain/builder/testing/BUILD.bazel | 2 +- beacon-chain/builder/testing/mock.go | 22 +- beacon-chain/cache/BUILD.bazel | 2 +- beacon-chain/cache/active_balance.go | 4 +- beacon-chain/cache/active_balance_disabled.go | 2 +- beacon-chain/cache/active_balance_test.go | 12 +- beacon-chain/cache/attestation.go | 10 +- beacon-chain/cache/attestation_data.go | 4 +- beacon-chain/cache/attestation_data_test.go | 4 +- beacon-chain/cache/attestation_test.go | 14 +- beacon-chain/cache/balance_cache_key.go | 6 +- beacon-chain/cache/checkpoint_state.go | 8 +- beacon-chain/cache/checkpoint_state_test.go | 18 +- beacon-chain/cache/committee.go | 12 +- beacon-chain/cache/committee_disabled.go | 2 +- beacon-chain/cache/committee_fuzz_test.go | 4 +- beacon-chain/cache/committee_test.go | 10 +- beacon-chain/cache/committees.go | 2 +- .../cache/depositsnapshot/BUILD.bazel | 2 +- .../depositsnapshot/deposit_cache_test.go | 16 +- .../cache/depositsnapshot/deposit_fetcher.go | 8 +- .../depositsnapshot/deposit_fetcher_test.go | 4 +- .../cache/depositsnapshot/deposit_inserter.go | 6 +- .../cache/depositsnapshot/deposit_pruner.go | 4 +- .../depositsnapshot/deposit_pruner_test.go | 8 +- .../cache/depositsnapshot/deposit_tree.go | 8 +- .../depositsnapshot/deposit_tree_snapshot.go | 8 +- .../deposit_tree_snapshot_test.go | 2 +- .../cache/depositsnapshot/merkle_tree.go | 8 +- .../cache/depositsnapshot/merkle_tree_test.go | 8 +- .../cache/depositsnapshot/spec_test.go | 10 +- beacon-chain/cache/interfaces.go | 2 +- beacon-chain/cache/payload_id.go | 2 +- beacon-chain/cache/payload_id_test.go | 4 +- beacon-chain/cache/private_access_test.go | 2 +- beacon-chain/cache/proposer_indices.go | 6 +- .../cache/proposer_indices_disabled.go | 6 +- beacon-chain/cache/proposer_indices_test.go | 8 +- beacon-chain/cache/proposer_indices_type.go | 2 +- beacon-chain/cache/registration.go | 8 +- beacon-chain/cache/registration_test.go | 6 +- beacon-chain/cache/skip_slot_cache.go | 6 +- beacon-chain/cache/skip_slot_cache_test.go | 12 +- beacon-chain/cache/subnet_ids.go | 8 +- beacon-chain/cache/subnet_ids_test.go | 4 +- beacon-chain/cache/sync_committee.go | 6 +- beacon-chain/cache/sync_committee_disabled.go | 4 +- .../cache/sync_committee_head_state.go | 8 +- .../cache/sync_committee_head_state_test.go | 14 +- beacon-chain/cache/sync_committee_test.go | 10 +- beacon-chain/cache/sync_subnet_ids.go | 10 +- beacon-chain/cache/sync_subnet_ids_test.go | 8 +- beacon-chain/cache/tracked_validators.go | 2 +- beacon-chain/cache/tracked_validators_test.go | 4 +- beacon-chain/core/altair/BUILD.bazel | 2 +- beacon-chain/core/altair/attestation.go | 22 +- beacon-chain/core/altair/attestation_test.go | 34 +- beacon-chain/core/altair/block.go | 18 +- beacon-chain/core/altair/block_test.go | 30 +- beacon-chain/core/altair/deposit.go | 14 +- beacon-chain/core/altair/deposit_fuzz_test.go | 10 +- beacon-chain/core/altair/deposit_test.go | 26 +- beacon-chain/core/altair/epoch_precompute.go | 14 +- .../core/altair/epoch_precompute_test.go | 16 +- beacon-chain/core/altair/epoch_spec.go | 8 +- beacon-chain/core/altair/epoch_spec_test.go | 24 +- beacon-chain/core/altair/reward.go | 10 +- beacon-chain/core/altair/reward_test.go | 14 +- beacon-chain/core/altair/sync_committee.go | 24 +- .../core/altair/sync_committee_test.go | 24 +- beacon-chain/core/altair/transition.go | 8 +- beacon-chain/core/altair/transition_test.go | 8 +- beacon-chain/core/altair/upgrade.go | 14 +- beacon-chain/core/altair/upgrade_test.go | 18 +- beacon-chain/core/blocks/BUILD.bazel | 2 +- beacon-chain/core/blocks/attestation.go | 26 +- .../blocks/attestation_regression_test.go | 14 +- beacon-chain/core/blocks/attestation_test.go | 32 +- beacon-chain/core/blocks/attester_slashing.go | 18 +- .../core/blocks/attester_slashing_test.go | 30 +- .../core/blocks/block_operations_fuzz_test.go | 18 +- .../core/blocks/block_regression_test.go | 20 +- beacon-chain/core/blocks/deposit.go | 16 +- beacon-chain/core/blocks/deposit_test.go | 20 +- beacon-chain/core/blocks/eth1_data.go | 6 +- beacon-chain/core/blocks/eth1_data_test.go | 20 +- beacon-chain/core/blocks/exit.go | 16 +- beacon-chain/core/blocks/exit_test.go | 30 +- beacon-chain/core/blocks/genesis.go | 16 +- beacon-chain/core/blocks/genesis_test.go | 6 +- beacon-chain/core/blocks/header.go | 14 +- beacon-chain/core/blocks/header_test.go | 26 +- beacon-chain/core/blocks/payload.go | 20 +- beacon-chain/core/blocks/payload_test.go | 30 +- beacon-chain/core/blocks/proposer_slashing.go | 16 +- .../proposer_slashing_regression_test.go | 8 +- .../core/blocks/proposer_slashing_test.go | 30 +- beacon-chain/core/blocks/randao.go | 12 +- beacon-chain/core/blocks/randao_test.go | 22 +- beacon-chain/core/blocks/signature.go | 20 +- beacon-chain/core/blocks/signature_test.go | 20 +- beacon-chain/core/blocks/withdrawals.go | 26 +- beacon-chain/core/blocks/withdrawals_test.go | 38 +- beacon-chain/core/capella/BUILD.bazel | 2 +- beacon-chain/core/capella/upgrade.go | 12 +- beacon-chain/core/capella/upgrade_test.go | 16 +- beacon-chain/core/deneb/BUILD.bazel | 2 +- beacon-chain/core/deneb/upgrade.go | 12 +- beacon-chain/core/deneb/upgrade_test.go | 14 +- beacon-chain/core/electra/BUILD.bazel | 2 +- beacon-chain/core/electra/attestation.go | 18 +- beacon-chain/core/electra/churn.go | 10 +- beacon-chain/core/electra/churn_test.go | 18 +- beacon-chain/core/electra/consolidations.go | 20 +- .../core/electra/consolidations_test.go | 20 +- .../core/electra/deposit_fuzz_test.go | 10 +- beacon-chain/core/electra/deposits.go | 28 +- beacon-chain/core/electra/deposits_test.go | 30 +- .../core/electra/effective_balance_updates.go | 6 +- .../electra/effective_balance_updates_test.go | 12 +- beacon-chain/core/electra/registry_updates.go | 12 +- .../core/electra/registry_updates_test.go | 20 +- beacon-chain/core/electra/transition.go | 16 +- .../core/electra/transition_no_verify_sig.go | 10 +- .../electra/transition_no_verify_sig_test.go | 12 +- beacon-chain/core/electra/transition_test.go | 16 +- beacon-chain/core/electra/upgrade.go | 18 +- beacon-chain/core/electra/upgrade_test.go | 20 +- beacon-chain/core/electra/validator.go | 10 +- beacon-chain/core/electra/validator_test.go | 14 +- beacon-chain/core/electra/withdrawals.go | 22 +- beacon-chain/core/electra/withdrawals_test.go | 20 +- beacon-chain/core/epoch/BUILD.bazel | 2 +- beacon-chain/core/epoch/epoch_processing.go | 22 +- .../core/epoch/epoch_processing_fuzz_test.go | 8 +- .../core/epoch/epoch_processing_test.go | 28 +- .../core/epoch/precompute/BUILD.bazel | 2 +- .../core/epoch/precompute/attestation.go | 20 +- .../core/epoch/precompute/attestation_test.go | 18 +- .../precompute/justification_finalization.go | 12 +- .../justification_finalization_test.go | 18 +- beacon-chain/core/epoch/precompute/new.go | 10 +- .../core/epoch/precompute/new_test.go | 12 +- .../core/epoch/precompute/reward_penalty.go | 12 +- .../epoch/precompute/reward_penalty_test.go | 24 +- .../core/epoch/precompute/slashing.go | 8 +- .../core/epoch/precompute/slashing_test.go | 12 +- beacon-chain/core/epoch/precompute/type.go | 2 +- beacon-chain/core/epoch/sortable_indices.go | 4 +- .../core/epoch/sortable_indices_test.go | 8 +- beacon-chain/core/execution/BUILD.bazel | 2 +- beacon-chain/core/execution/upgrade.go | 12 +- beacon-chain/core/execution/upgrade_test.go | 14 +- beacon-chain/core/feed/BUILD.bazel | 2 +- beacon-chain/core/feed/block/BUILD.bazel | 2 +- beacon-chain/core/feed/block/events.go | 2 +- beacon-chain/core/feed/block/notifier.go | 2 +- beacon-chain/core/feed/operation/BUILD.bazel | 2 +- beacon-chain/core/feed/operation/events.go | 8 +- beacon-chain/core/feed/operation/notifier.go | 2 +- beacon-chain/core/feed/state/BUILD.bazel | 2 +- beacon-chain/core/feed/state/events.go | 4 +- beacon-chain/core/feed/state/notifier.go | 2 +- beacon-chain/core/fulu/BUILD.bazel | 2 +- beacon-chain/core/fulu/transition.go | 12 +- beacon-chain/core/fulu/transition_test.go | 8 +- beacon-chain/core/fulu/upgrade.go | 18 +- beacon-chain/core/fulu/upgrade_test.go | 16 +- beacon-chain/core/helpers/BUILD.bazel | 2 +- beacon-chain/core/helpers/attestation.go | 10 +- beacon-chain/core/helpers/attestation_test.go | 20 +- beacon-chain/core/helpers/beacon_committee.go | 30 +- .../core/helpers/beacon_committee_test.go | 26 +- beacon-chain/core/helpers/block.go | 8 +- beacon-chain/core/helpers/block_test.go | 14 +- beacon-chain/core/helpers/genesis.go | 8 +- beacon-chain/core/helpers/legacy.go | 4 +- beacon-chain/core/helpers/legacy_test.go | 4 +- .../helpers/private_access_fuzz_noop_test.go | 2 +- .../core/helpers/private_access_test.go | 2 +- beacon-chain/core/helpers/randao.go | 12 +- beacon-chain/core/helpers/randao_test.go | 16 +- beacon-chain/core/helpers/ranges_test.go | 4 +- .../core/helpers/rewards_penalties.go | 12 +- .../core/helpers/rewards_penalties_test.go | 16 +- beacon-chain/core/helpers/shuffle.go | 10 +- beacon-chain/core/helpers/shuffle_test.go | 10 +- beacon-chain/core/helpers/sync_committee.go | 16 +- .../core/helpers/sync_committee_test.go | 18 +- beacon-chain/core/helpers/validator_churn.go | 4 +- .../core/helpers/validator_churn_test.go | 8 +- beacon-chain/core/helpers/validators.go | 26 +- beacon-chain/core/helpers/validators_test.go | 28 +- .../core/helpers/weak_subjectivity.go | 16 +- .../core/helpers/weak_subjectivity_test.go | 18 +- beacon-chain/core/peerdas/BUILD.bazel | 2 +- beacon-chain/core/peerdas/das_core.go | 6 +- beacon-chain/core/peerdas/das_core_test.go | 6 +- beacon-chain/core/peerdas/info_test.go | 4 +- beacon-chain/core/peerdas/p2p_interface.go | 10 +- .../core/peerdas/p2p_interface_test.go | 18 +- beacon-chain/core/peerdas/reconstruction.go | 12 +- .../core/peerdas/reconstruction_test.go | 16 +- beacon-chain/core/peerdas/utils_test.go | 2 +- beacon-chain/core/peerdas/validator.go | 14 +- beacon-chain/core/peerdas/validator_test.go | 18 +- beacon-chain/core/peerdas/verification.go | 6 +- .../core/peerdas/verification_test.go | 12 +- beacon-chain/core/signing/BUILD.bazel | 2 +- beacon-chain/core/signing/domain.go | 6 +- beacon-chain/core/signing/domain_test.go | 10 +- beacon-chain/core/signing/signing_root.go | 12 +- .../core/signing/signing_root_test.go | 24 +- beacon-chain/core/time/BUILD.bazel | 2 +- beacon-chain/core/time/slot_epoch.go | 10 +- beacon-chain/core/time/slot_epoch_test.go | 20 +- beacon-chain/core/transition/BUILD.bazel | 2 +- .../altair_transition_no_verify_sig_test.go | 32 +- ...bellatrix_transition_no_verify_sig_test.go | 36 +- .../core/transition/benchmarks_test.go | 20 +- .../core/transition/interop/BUILD.bazel | 2 +- .../transition/interop/write_block_to_disk.go | 6 +- .../transition/interop/write_state_to_disk.go | 6 +- .../core/transition/skip_slot_cache.go | 8 +- .../core/transition/skip_slot_cache_test.go | 18 +- .../core/transition/state-bellatrix.go | 20 +- beacon-chain/core/transition/state.go | 18 +- .../core/transition/state_fuzz_test.go | 6 +- beacon-chain/core/transition/state_test.go | 18 +- .../core/transition/stateutils/BUILD.bazel | 2 +- .../stateutils/validator_index_map.go | 8 +- .../stateutils/validator_index_map_test.go | 16 +- .../transition/trailing_slot_state_cache.go | 6 +- .../trailing_slot_state_cache_test.go | 10 +- beacon-chain/core/transition/transition.go | 40 +- .../core/transition/transition_fuzz_test.go | 12 +- .../transition/transition_no_verify_sig.go | 28 +- .../transition_no_verify_sig_test.go | 20 +- .../core/transition/transition_test.go | 38 +- beacon-chain/core/validators/BUILD.bazel | 2 +- beacon-chain/core/validators/slashing.go | 4 +- beacon-chain/core/validators/slashing_test.go | 4 +- beacon-chain/core/validators/validator.go | 18 +- .../core/validators/validator_test.go | 22 +- beacon-chain/das/BUILD.bazel | 2 +- beacon-chain/das/availability_blobs.go | 16 +- beacon-chain/das/availability_blobs_test.go | 18 +- beacon-chain/das/blob_cache.go | 10 +- beacon-chain/das/blob_cache_test.go | 16 +- beacon-chain/das/data_column_cache.go | 6 +- beacon-chain/das/data_column_cache_test.go | 10 +- beacon-chain/das/iface.go | 4 +- beacon-chain/das/mock.go | 4 +- beacon-chain/db/BUILD.bazel | 2 +- beacon-chain/db/alias.go | 2 +- beacon-chain/db/db.go | 2 +- beacon-chain/db/db_test.go | 2 +- beacon-chain/db/errors.go | 2 +- beacon-chain/db/filesystem/BUILD.bazel | 2 +- beacon-chain/db/filesystem/blob.go | 12 +- beacon-chain/db/filesystem/blob_test.go | 16 +- beacon-chain/db/filesystem/cache.go | 8 +- beacon-chain/db/filesystem/cache_test.go | 8 +- beacon-chain/db/filesystem/data_column.go | 18 +- .../db/filesystem/data_column_cache.go | 6 +- .../db/filesystem/data_column_cache_test.go | 8 +- .../db/filesystem/data_column_test.go | 12 +- beacon-chain/db/filesystem/iteration.go | 2 +- beacon-chain/db/filesystem/iteration_test.go | 10 +- beacon-chain/db/filesystem/layout.go | 8 +- beacon-chain/db/filesystem/layout_by_epoch.go | 4 +- beacon-chain/db/filesystem/layout_flat.go | 4 +- beacon-chain/db/filesystem/layout_test.go | 4 +- beacon-chain/db/filesystem/migration_test.go | 8 +- beacon-chain/db/filesystem/mock.go | 8 +- beacon-chain/db/filesystem/pruner.go | 2 +- beacon-chain/db/filesystem/pruner_test.go | 12 +- beacon-chain/db/filters/BUILD.bazel | 2 +- beacon-chain/db/filters/filter.go | 2 +- beacon-chain/db/filters/filter_test.go | 6 +- beacon-chain/db/iface/BUILD.bazel | 2 +- beacon-chain/db/iface/interface.go | 18 +- beacon-chain/db/kv/BUILD.bazel | 2 +- beacon-chain/db/kv/archived_point.go | 6 +- beacon-chain/db/kv/archived_point_test.go | 8 +- beacon-chain/db/kv/backfill.go | 4 +- beacon-chain/db/kv/backfill_test.go | 6 +- beacon-chain/db/kv/backup.go | 8 +- beacon-chain/db/kv/backup_test.go | 8 +- beacon-chain/db/kv/blocks.go | 22 +- beacon-chain/db/kv/blocks_test.go | 24 +- beacon-chain/db/kv/checkpoint.go | 10 +- beacon-chain/db/kv/checkpoint_test.go | 14 +- beacon-chain/db/kv/custody.go | 10 +- beacon-chain/db/kv/custody_test.go | 10 +- beacon-chain/db/kv/deposit_contract.go | 2 +- beacon-chain/db/kv/deposit_contract_test.go | 4 +- beacon-chain/db/kv/encoding.go | 4 +- beacon-chain/db/kv/encoding_test.go | 4 +- beacon-chain/db/kv/execution_chain.go | 6 +- beacon-chain/db/kv/execution_chain_test.go | 2 +- beacon-chain/db/kv/finalized_block_roots.go | 14 +- .../db/kv/finalized_block_roots_test.go | 20 +- beacon-chain/db/kv/genesis.go | 12 +- beacon-chain/db/kv/genesis_test.go | 14 +- beacon-chain/db/kv/init_test.go | 2 +- beacon-chain/db/kv/kv.go | 14 +- beacon-chain/db/kv/kv_test.go | 10 +- beacon-chain/db/kv/lightclient.go | 16 +- beacon-chain/db/kv/lightclient_test.go | 24 +- .../db/kv/migration_archived_index.go | 6 +- .../db/kv/migration_archived_index_test.go | 6 +- .../db/kv/migration_block_slot_index.go | 2 +- .../db/kv/migration_block_slot_index_test.go | 4 +- .../db/kv/migration_finalized_parent.go | 6 +- .../db/kv/migration_state_validators.go | 8 +- .../db/kv/migration_state_validators_test.go | 16 +- beacon-chain/db/kv/p2p.go | 4 +- beacon-chain/db/kv/p2p_test.go | 4 +- beacon-chain/db/kv/state.go | 24 +- beacon-chain/db/kv/state_summary.go | 6 +- beacon-chain/db/kv/state_summary_cache.go | 2 +- beacon-chain/db/kv/state_summary_test.go | 10 +- beacon-chain/db/kv/state_test.go | 28 +- beacon-chain/db/kv/utils.go | 4 +- beacon-chain/db/kv/utils_test.go | 6 +- beacon-chain/db/kv/validated_checkpoint.go | 6 +- .../db/kv/validated_checkpoint_test.go | 12 +- beacon-chain/db/kv/wss.go | 12 +- beacon-chain/db/kv/wss_test.go | 10 +- beacon-chain/db/pruner/BUILD.bazel | 2 +- beacon-chain/db/pruner/pruner.go | 10 +- beacon-chain/db/pruner/pruner_test.go | 18 +- beacon-chain/db/restore.go | 8 +- beacon-chain/db/restore_test.go | 14 +- beacon-chain/db/slasherkv/BUILD.bazel | 2 +- beacon-chain/db/slasherkv/kv.go | 6 +- beacon-chain/db/slasherkv/kv_test.go | 2 +- beacon-chain/db/slasherkv/migrate.go | 4 +- beacon-chain/db/slasherkv/migrate_test.go | 6 +- beacon-chain/db/slasherkv/pruning.go | 4 +- beacon-chain/db/slasherkv/pruning_test.go | 12 +- beacon-chain/db/slasherkv/slasher.go | 14 +- beacon-chain/db/slasherkv/slasher_test.go | 14 +- beacon-chain/db/testing/BUILD.bazel | 2 +- beacon-chain/db/testing/setup_db.go | 8 +- beacon-chain/execution/BUILD.bazel | 2 +- beacon-chain/execution/block_cache.go | 4 +- beacon-chain/execution/block_cache_test.go | 8 +- beacon-chain/execution/block_reader.go | 8 +- beacon-chain/execution/block_reader_test.go | 16 +- beacon-chain/execution/deposit.go | 6 +- beacon-chain/execution/deposit_test.go | 28 +- beacon-chain/execution/engine_client.go | 32 +- .../execution/engine_client_fuzz_test.go | 6 +- beacon-chain/execution/engine_client_test.go | 36 +- beacon-chain/execution/init_test.go | 2 +- beacon-chain/execution/log_processing.go | 26 +- beacon-chain/execution/log_processing_test.go | 24 +- beacon-chain/execution/mock_test.go | 4 +- beacon-chain/execution/options.go | 16 +- beacon-chain/execution/payload_body.go | 10 +- beacon-chain/execution/payload_body_test.go | 20 +- beacon-chain/execution/prometheus.go | 2 +- beacon-chain/execution/prometheus_test.go | 2 +- beacon-chain/execution/rpc_connection.go | 10 +- beacon-chain/execution/service.go | 42 +- beacon-chain/execution/service_test.go | 44 +- beacon-chain/execution/testing/BUILD.bazel | 2 +- .../execution/testing/mock_engine_client.go | 18 +- .../execution/testing/mock_execution_chain.go | 12 +- .../execution/testing/mock_faulty_powchain.go | 10 +- beacon-chain/execution/types/BUILD.bazel | 2 +- beacon-chain/execution/types/eth1_types.go | 2 +- .../execution/types/eth1_types_test.go | 2 +- beacon-chain/forkchoice/BUILD.bazel | 2 +- .../forkchoice/doubly-linked-tree/BUILD.bazel | 2 +- .../doubly-linked-tree/ffg_update_test.go | 10 +- .../doubly-linked-tree/forkchoice.go | 26 +- .../doubly-linked-tree/forkchoice_test.go | 26 +- .../doubly-linked-tree/last_root.go | 4 +- .../doubly-linked-tree/last_root_test.go | 4 +- .../doubly-linked-tree/no_vote_test.go | 6 +- .../forkchoice/doubly-linked-tree/node.go | 8 +- .../doubly-linked-tree/node_test.go | 10 +- .../forkchoice/doubly-linked-tree/on_tick.go | 4 +- .../doubly-linked-tree/on_tick_test.go | 8 +- .../doubly-linked-tree/optimistic_sync.go | 2 +- .../optimistic_sync_test.go | 6 +- .../doubly-linked-tree/proposer_boost.go | 4 +- .../doubly-linked-tree/proposer_boost_test.go | 8 +- .../doubly-linked-tree/reorg_late_blocks.go | 4 +- .../reorg_late_blocks_test.go | 4 +- .../forkchoice/doubly-linked-tree/store.go | 14 +- .../doubly-linked-tree/store_test.go | 10 +- .../forkchoice/doubly-linked-tree/types.go | 8 +- .../unrealized_justification.go | 16 +- .../unrealized_justification_test.go | 10 +- .../doubly-linked-tree/vote_test.go | 6 +- beacon-chain/forkchoice/interfaces.go | 12 +- beacon-chain/forkchoice/ro.go | 6 +- beacon-chain/forkchoice/ro_test.go | 8 +- beacon-chain/forkchoice/types/BUILD.bazel | 2 +- beacon-chain/forkchoice/types/types.go | 8 +- beacon-chain/light-client/BUILD.bazel | 2 +- beacon-chain/light-client/cache.go | 4 +- beacon-chain/light-client/cache_test.go | 2 +- beacon-chain/light-client/helpers.go | 22 +- beacon-chain/light-client/lightclient.go | 24 +- beacon-chain/light-client/lightclient_test.go | 24 +- beacon-chain/light-client/store.go | 18 +- beacon-chain/light-client/store_test.go | 26 +- beacon-chain/monitor/BUILD.bazel | 2 +- beacon-chain/monitor/process_attestation.go | 22 +- .../monitor/process_attestation_test.go | 10 +- beacon-chain/monitor/process_block.go | 14 +- beacon-chain/monitor/process_block_test.go | 16 +- beacon-chain/monitor/process_exit.go | 4 +- beacon-chain/monitor/process_exit_test.go | 8 +- .../monitor/process_sync_committee.go | 8 +- .../monitor/process_sync_committee_test.go | 8 +- beacon-chain/monitor/service.go | 20 +- beacon-chain/monitor/service_test.go | 26 +- beacon-chain/node/BUILD.bazel | 2 +- beacon-chain/node/clear_db.go | 10 +- beacon-chain/node/config.go | 10 +- beacon-chain/node/config_test.go | 12 +- beacon-chain/node/node.go | 94 ++-- beacon-chain/node/node_test.go | 26 +- beacon-chain/node/options.go | 10 +- beacon-chain/node/registration/BUILD.bazel | 2 +- beacon-chain/node/registration/p2p.go | 4 +- beacon-chain/node/registration/p2p_test.go | 8 +- .../operations/attestations/BUILD.bazel | 2 +- .../attestations/attmap/BUILD.bazel | 2 +- .../operations/attestations/attmap/map.go | 4 +- .../operations/attestations/kv/BUILD.bazel | 2 +- .../operations/attestations/kv/aggregated.go | 14 +- .../attestations/kv/aggregated_test.go | 14 +- .../operations/attestations/kv/block.go | 4 +- .../operations/attestations/kv/block_test.go | 8 +- .../attestations/kv/forkchoice_test.go | 8 +- beacon-chain/operations/attestations/kv/kv.go | 8 +- .../operations/attestations/kv/seen_bits.go | 4 +- .../attestations/kv/seen_bits_test.go | 8 +- .../attestations/kv/unaggregated.go | 10 +- .../attestations/kv/unaggregated_test.go | 14 +- .../operations/attestations/mock/BUILD.bazel | 2 +- .../operations/attestations/mock/mock.go | 6 +- beacon-chain/operations/attestations/pool.go | 6 +- .../operations/attestations/pool_test.go | 2 +- .../attestations/prepare_forkchoice.go | 14 +- .../attestations/prepare_forkchoice_test.go | 12 +- .../operations/attestations/prune_expired.go | 6 +- .../attestations/prune_expired_test.go | 16 +- .../operations/attestations/service.go | 8 +- .../operations/attestations/service_test.go | 4 +- beacon-chain/operations/blstoexec/BUILD.bazel | 2 +- .../operations/blstoexec/mock/BUILD.bazel | 2 +- .../operations/blstoexec/mock/mock.go | 6 +- beacon-chain/operations/blstoexec/pool.go | 12 +- .../operations/blstoexec/pool_test.go | 24 +- beacon-chain/operations/slashings/BUILD.bazel | 2 +- .../operations/slashings/mock/BUILD.bazel | 2 +- .../operations/slashings/mock/mock.go | 4 +- beacon-chain/operations/slashings/pool.go | 20 +- beacon-chain/operations/slashings/service.go | 8 +- .../slashings/service_attester_test.go | 18 +- .../operations/slashings/service_new_test.go | 14 +- .../slashings/service_proposer_test.go | 14 +- .../operations/slashings/service_test.go | 4 +- beacon-chain/operations/slashings/types.go | 8 +- .../operations/synccommittee/BUILD.bazel | 2 +- .../operations/synccommittee/contribution.go | 6 +- .../synccommittee/contribution_test.go | 4 +- beacon-chain/operations/synccommittee/kv.go | 2 +- .../operations/synccommittee/message.go | 6 +- .../operations/synccommittee/message_test.go | 4 +- beacon-chain/operations/synccommittee/pool.go | 4 +- .../operations/voluntaryexits/BUILD.bazel | 2 +- .../voluntaryexits/mock/BUILD.bazel | 2 +- .../operations/voluntaryexits/mock/mock.go | 6 +- .../operations/voluntaryexits/pool.go | 14 +- .../operations/voluntaryexits/pool_test.go | 22 +- beacon-chain/p2p/BUILD.bazel | 2 +- beacon-chain/p2p/addr_factory_test.go | 4 +- beacon-chain/p2p/broadcaster.go | 26 +- beacon-chain/p2p/broadcaster_test.go | 42 +- beacon-chain/p2p/config.go | 6 +- beacon-chain/p2p/connection_gater_test.go | 16 +- beacon-chain/p2p/custody.go | 8 +- beacon-chain/p2p/custody_test.go | 20 +- beacon-chain/p2p/dial_relay_node.go | 2 +- beacon-chain/p2p/dial_relay_node_test.go | 4 +- beacon-chain/p2p/discovery.go | 16 +- beacon-chain/p2p/discovery_test.go | 34 +- beacon-chain/p2p/encoder/BUILD.bazel | 2 +- beacon-chain/p2p/encoder/snappy_test.go | 2 +- beacon-chain/p2p/encoder/ssz.go | 4 +- beacon-chain/p2p/encoder/ssz_test.go | 12 +- beacon-chain/p2p/encoder/varint_test.go | 4 +- beacon-chain/p2p/fork.go | 6 +- beacon-chain/p2p/fork_test.go | 10 +- beacon-chain/p2p/fork_watcher.go | 4 +- beacon-chain/p2p/gossip_scoring_params.go | 6 +- .../p2p/gossip_scoring_params_test.go | 12 +- beacon-chain/p2p/gossip_topic_mappings.go | 6 +- .../p2p/gossip_topic_mappings_test.go | 10 +- beacon-chain/p2p/handshake.go | 6 +- beacon-chain/p2p/interfaces.go | 16 +- beacon-chain/p2p/message_id.go | 12 +- beacon-chain/p2p/message_id_test.go | 14 +- beacon-chain/p2p/monitoring.go | 2 +- beacon-chain/p2p/options.go | 6 +- beacon-chain/p2p/options_test.go | 12 +- beacon-chain/p2p/parameter_test.go | 4 +- beacon-chain/p2p/peers/BUILD.bazel | 2 +- beacon-chain/p2p/peers/assigner.go | 6 +- beacon-chain/p2p/peers/assigner_test.go | 2 +- beacon-chain/p2p/peers/peerdata/BUILD.bazel | 2 +- beacon-chain/p2p/peers/peerdata/store.go | 4 +- beacon-chain/p2p/peers/peerdata/store_test.go | 6 +- beacon-chain/p2p/peers/peers_test.go | 4 +- beacon-chain/p2p/peers/scorers/BUILD.bazel | 2 +- .../p2p/peers/scorers/bad_responses.go | 2 +- .../p2p/peers/scorers/bad_responses_test.go | 10 +- .../p2p/peers/scorers/block_providers.go | 8 +- .../p2p/peers/scorers/block_providers_test.go | 14 +- .../p2p/peers/scorers/gossip_scorer.go | 4 +- .../p2p/peers/scorers/gossip_scorer_test.go | 8 +- beacon-chain/p2p/peers/scorers/peer_status.go | 8 +- .../p2p/peers/scorers/peer_status_test.go | 16 +- .../p2p/peers/scorers/scorers_test.go | 6 +- beacon-chain/p2p/peers/scorers/service.go | 4 +- .../p2p/peers/scorers/service_test.go | 8 +- beacon-chain/p2p/peers/status.go | 22 +- beacon-chain/p2p/peers/status_test.go | 22 +- beacon-chain/p2p/pubsub.go | 10 +- beacon-chain/p2p/pubsub_filter.go | 4 +- beacon-chain/p2p/pubsub_filter_test.go | 14 +- beacon-chain/p2p/pubsub_fuzz_test.go | 4 +- beacon-chain/p2p/pubsub_test.go | 14 +- beacon-chain/p2p/rpc_topic_mappings.go | 10 +- beacon-chain/p2p/rpc_topic_mappings_test.go | 14 +- beacon-chain/p2p/sender.go | 4 +- beacon-chain/p2p/sender_test.go | 12 +- beacon-chain/p2p/service.go | 30 +- beacon-chain/p2p/service_test.go | 24 +- beacon-chain/p2p/subnets.go | 24 +- beacon-chain/p2p/subnets_test.go | 24 +- beacon-chain/p2p/testing/BUILD.bazel | 2 +- beacon-chain/p2p/testing/fuzz_p2p.go | 16 +- beacon-chain/p2p/testing/mock_broadcaster.go | 6 +- .../p2p/testing/mock_metadataprovider.go | 2 +- beacon-chain/p2p/testing/mock_peermanager.go | 2 +- .../p2p/testing/mock_peersprovider.go | 6 +- beacon-chain/p2p/testing/p2p.go | 24 +- beacon-chain/p2p/topics.go | 6 +- beacon-chain/p2p/topics_test.go | 6 +- beacon-chain/p2p/types/BUILD.bazel | 2 +- beacon-chain/p2p/types/object_mapping.go | 18 +- beacon-chain/p2p/types/object_mapping_test.go | 10 +- beacon-chain/p2p/types/rpc_goodbye_codes.go | 2 +- beacon-chain/p2p/types/types.go | 6 +- beacon-chain/p2p/types/types_test.go | 14 +- beacon-chain/p2p/utils.go | 16 +- beacon-chain/p2p/utils_test.go | 8 +- beacon-chain/p2p/watch_peers.go | 2 +- beacon-chain/rpc/BUILD.bazel | 2 +- beacon-chain/rpc/core/BUILD.bazel | 2 +- beacon-chain/rpc/core/beacon.go | 10 +- beacon-chain/rpc/core/service.go | 16 +- beacon-chain/rpc/core/validator.go | 46 +- beacon-chain/rpc/core/validator_test.go | 20 +- beacon-chain/rpc/endpoints.go | 38 +- beacon-chain/rpc/endpoints_test.go | 4 +- beacon-chain/rpc/eth/beacon/BUILD.bazel | 2 +- beacon-chain/rpc/eth/beacon/handlers.go | 40 +- .../eth/beacon/handlers_equivocation_test.go | 8 +- beacon-chain/rpc/eth/beacon/handlers_pool.go | 40 +- .../rpc/eth/beacon/handlers_pool_test.go | 60 +-- beacon-chain/rpc/eth/beacon/handlers_state.go | 26 +- .../rpc/eth/beacon/handlers_state_test.go | 26 +- beacon-chain/rpc/eth/beacon/handlers_test.go | 56 +-- .../rpc/eth/beacon/handlers_validator.go | 26 +- .../eth/beacon/handlers_validators_test.go | 28 +- beacon-chain/rpc/eth/beacon/init_test.go | 2 +- beacon-chain/rpc/eth/beacon/server.go | 32 +- beacon-chain/rpc/eth/blob/BUILD.bazel | 2 +- beacon-chain/rpc/eth/blob/handlers.go | 24 +- beacon-chain/rpc/eth/blob/handlers_test.go | 36 +- beacon-chain/rpc/eth/blob/server.go | 4 +- beacon-chain/rpc/eth/config/BUILD.bazel | 2 +- beacon-chain/rpc/eth/config/handlers.go | 8 +- beacon-chain/rpc/eth/config/handlers_test.go | 10 +- beacon-chain/rpc/eth/debug/BUILD.bazel | 2 +- beacon-chain/rpc/eth/debug/handlers.go | 24 +- beacon-chain/rpc/eth/debug/handlers_test.go | 34 +- beacon-chain/rpc/eth/debug/server.go | 6 +- beacon-chain/rpc/eth/events/BUILD.bazel | 2 +- beacon-chain/rpc/eth/events/events.go | 38 +- beacon-chain/rpc/eth/events/events_test.go | 36 +- beacon-chain/rpc/eth/events/http_test.go | 2 +- beacon-chain/rpc/eth/events/server.go | 10 +- beacon-chain/rpc/eth/helpers/BUILD.bazel | 2 +- .../rpc/eth/helpers/error_handling.go | 12 +- beacon-chain/rpc/eth/helpers/sync.go | 14 +- beacon-chain/rpc/eth/helpers/sync_test.go | 34 +- .../rpc/eth/helpers/validator_status.go | 8 +- .../rpc/eth/helpers/validator_status_test.go | 16 +- beacon-chain/rpc/eth/light-client/BUILD.bazel | 2 +- beacon-chain/rpc/eth/light-client/handlers.go | 20 +- .../rpc/eth/light-client/handlers_test.go | 38 +- beacon-chain/rpc/eth/light-client/server.go | 4 +- beacon-chain/rpc/eth/node/BUILD.bazel | 2 +- beacon-chain/rpc/eth/node/handlers.go | 18 +- beacon-chain/rpc/eth/node/handlers_peers.go | 16 +- .../rpc/eth/node/handlers_peers_test.go | 14 +- beacon-chain/rpc/eth/node/handlers_test.go | 30 +- beacon-chain/rpc/eth/node/server.go | 10 +- beacon-chain/rpc/eth/rewards/BUILD.bazel | 2 +- beacon-chain/rpc/eth/rewards/handlers.go | 26 +- beacon-chain/rpc/eth/rewards/handlers_test.go | 46 +- beacon-chain/rpc/eth/rewards/server.go | 4 +- beacon-chain/rpc/eth/rewards/service.go | 24 +- beacon-chain/rpc/eth/rewards/service_test.go | 12 +- .../rpc/eth/rewards/testing/BUILD.bazel | 2 +- beacon-chain/rpc/eth/rewards/testing/mock.go | 8 +- beacon-chain/rpc/eth/shared/BUILD.bazel | 2 +- beacon-chain/rpc/eth/shared/errors.go | 8 +- beacon-chain/rpc/eth/shared/errors_test.go | 6 +- beacon-chain/rpc/eth/shared/request.go | 8 +- .../rpc/eth/shared/testing/BUILD.bazel | 2 +- beacon-chain/rpc/eth/validator/BUILD.bazel | 2 +- beacon-chain/rpc/eth/validator/handlers.go | 48 +- .../rpc/eth/validator/handlers_block.go | 26 +- .../rpc/eth/validator/handlers_block_test.go | 22 +- .../rpc/eth/validator/handlers_test.go | 60 +-- beacon-chain/rpc/eth/validator/server.go | 26 +- beacon-chain/rpc/lookup/BUILD.bazel | 2 +- beacon-chain/rpc/lookup/blocker.go | 28 +- beacon-chain/rpc/lookup/blocker_test.go | 36 +- beacon-chain/rpc/lookup/stater.go | 20 +- beacon-chain/rpc/lookup/stater_test.go | 26 +- beacon-chain/rpc/options/BUILD.bazel | 2 +- beacon-chain/rpc/prysm/beacon/BUILD.bazel | 2 +- beacon-chain/rpc/prysm/beacon/handlers.go | 24 +- .../rpc/prysm/beacon/handlers_test.go | 44 +- beacon-chain/rpc/prysm/beacon/server.go | 14 +- beacon-chain/rpc/prysm/beacon/ssz_query.go | 18 +- .../rpc/prysm/beacon/ssz_query_test.go | 26 +- .../rpc/prysm/beacon/validator_count.go | 22 +- .../rpc/prysm/beacon/validator_count_test.go | 22 +- beacon-chain/rpc/prysm/node/BUILD.bazel | 2 +- beacon-chain/rpc/prysm/node/handlers.go | 14 +- beacon-chain/rpc/prysm/node/handlers_test.go | 14 +- beacon-chain/rpc/prysm/node/server.go | 10 +- beacon-chain/rpc/prysm/testing/BUILD.bazel | 2 +- .../rpc/prysm/v1alpha1/beacon/BUILD.bazel | 2 +- .../rpc/prysm/v1alpha1/beacon/assignments.go | 14 +- .../prysm/v1alpha1/beacon/assignments_test.go | 28 +- .../rpc/prysm/v1alpha1/beacon/attestations.go | 26 +- .../v1alpha1/beacon/attestations_test.go | 40 +- .../rpc/prysm/v1alpha1/beacon/beacon_test.go | 4 +- .../rpc/prysm/v1alpha1/beacon/blocks.go | 16 +- .../rpc/prysm/v1alpha1/beacon/blocks_test.go | 32 +- .../rpc/prysm/v1alpha1/beacon/committees.go | 14 +- .../prysm/v1alpha1/beacon/committees_test.go | 34 +- .../rpc/prysm/v1alpha1/beacon/config.go | 4 +- .../rpc/prysm/v1alpha1/beacon/config_test.go | 6 +- .../rpc/prysm/v1alpha1/beacon/init_test.go | 2 +- .../rpc/prysm/v1alpha1/beacon/server.go | 28 +- .../rpc/prysm/v1alpha1/beacon/slashings.go | 8 +- .../prysm/v1alpha1/beacon/slashings_test.go | 18 +- .../rpc/prysm/v1alpha1/beacon/validators.go | 26 +- .../prysm/v1alpha1/beacon/validators_test.go | 58 +-- .../rpc/prysm/v1alpha1/debug/BUILD.bazel | 2 +- .../rpc/prysm/v1alpha1/debug/block.go | 16 +- .../rpc/prysm/v1alpha1/debug/block_test.go | 22 +- beacon-chain/rpc/prysm/v1alpha1/debug/p2p.go | 4 +- .../rpc/prysm/v1alpha1/debug/p2p_test.go | 8 +- .../rpc/prysm/v1alpha1/debug/server.go | 10 +- .../rpc/prysm/v1alpha1/debug/state.go | 4 +- .../rpc/prysm/v1alpha1/debug/state_test.go | 20 +- .../rpc/prysm/v1alpha1/node/BUILD.bazel | 2 +- .../rpc/prysm/v1alpha1/node/server.go | 18 +- .../rpc/prysm/v1alpha1/node/server_test.go | 24 +- .../rpc/prysm/v1alpha1/validator/BUILD.bazel | 2 +- .../prysm/v1alpha1/validator/aggregator.go | 20 +- .../v1alpha1/validator/aggregator_test.go | 38 +- .../rpc/prysm/v1alpha1/validator/attester.go | 26 +- .../validator/attester_mainnet_test.go | 20 +- .../prysm/v1alpha1/validator/attester_test.go | 36 +- .../rpc/prysm/v1alpha1/validator/blocks.go | 16 +- .../prysm/v1alpha1/validator/blocks_test.go | 26 +- .../validator/construct_generic_block.go | 10 +- .../validator/construct_generic_block_test.go | 14 +- .../rpc/prysm/v1alpha1/validator/duties.go | 18 +- .../prysm/v1alpha1/validator/duties_test.go | 34 +- .../rpc/prysm/v1alpha1/validator/duties_v2.go | 20 +- .../v1alpha1/validator/duties_v2_test.go | 38 +- .../rpc/prysm/v1alpha1/validator/exit.go | 10 +- .../rpc/prysm/v1alpha1/validator/exit_test.go | 32 +- .../rpc/prysm/v1alpha1/validator/proposer.go | 44 +- .../v1alpha1/validator/proposer_altair.go | 26 +- .../validator/proposer_altair_test.go | 26 +- .../validator/proposer_attestations.go | 28 +- .../proposer_attestations_electra.go | 6 +- .../proposer_attestations_electra_test.go | 14 +- .../validator/proposer_attestations_test.go | 28 +- .../v1alpha1/validator/proposer_bellatrix.go | 28 +- .../validator/proposer_bellatrix_test.go | 46 +- .../v1alpha1/validator/proposer_builder.go | 12 +- .../validator/proposer_builder_test.go | 30 +- .../v1alpha1/validator/proposer_capella.go | 8 +- .../v1alpha1/validator/proposer_deneb.go | 8 +- .../validator/proposer_deneb_bench_test.go | 14 +- .../v1alpha1/validator/proposer_deneb_test.go | 10 +- .../v1alpha1/validator/proposer_deposits.go | 18 +- .../validator/proposer_deposits_test.go | 22 +- .../validator/proposer_empty_block.go | 12 +- .../validator/proposer_empty_block_test.go | 12 +- .../v1alpha1/validator/proposer_eth1data.go | 22 +- .../validator/proposer_execution_payload.go | 34 +- .../proposer_execution_payload_test.go | 26 +- .../v1alpha1/validator/proposer_exits.go | 6 +- .../v1alpha1/validator/proposer_exits_test.go | 12 +- .../v1alpha1/validator/proposer_slashings.go | 10 +- .../validator/proposer_slashings_test.go | 12 +- .../validator/proposer_sync_aggregate.go | 2 +- .../validator/proposer_sync_aggregate_test.go | 6 +- .../prysm/v1alpha1/validator/proposer_test.go | 86 ++-- .../validator/proposer_utils_bench_test.go | 8 +- .../rpc/prysm/v1alpha1/validator/server.go | 52 +-- .../v1alpha1/validator/server_mainnet_test.go | 28 +- .../prysm/v1alpha1/validator/server_test.go | 34 +- .../rpc/prysm/v1alpha1/validator/status.go | 28 +- .../v1alpha1/validator/status_mainnet_test.go | 24 +- .../prysm/v1alpha1/validator/status_test.go | 36 +- .../v1alpha1/validator/sync_committee.go | 6 +- .../v1alpha1/validator/sync_committee_test.go | 30 +- .../rpc/prysm/v1alpha1/validator/unblinder.go | 12 +- .../v1alpha1/validator/unblinder_test.go | 10 +- .../v1alpha1/validator/validator_test.go | 2 +- beacon-chain/rpc/prysm/validator/BUILD.bazel | 2 +- beacon-chain/rpc/prysm/validator/handlers.go | 14 +- .../rpc/prysm/validator/handlers_test.go | 50 +-- beacon-chain/rpc/prysm/validator/server.go | 8 +- .../prysm/validator/validator_performance.go | 10 +- .../validator/validator_performance_test.go | 28 +- beacon-chain/rpc/service.go | 64 +-- beacon-chain/rpc/service_test.go | 12 +- beacon-chain/rpc/testutil/BUILD.bazel | 2 +- beacon-chain/rpc/testutil/db.go | 16 +- beacon-chain/rpc/testutil/mock_blocker.go | 12 +- .../rpc/testutil/mock_genesis_timefetcher.go | 4 +- beacon-chain/rpc/testutil/mock_sidecars.go | 2 +- beacon-chain/rpc/testutil/mock_stater.go | 6 +- beacon-chain/slasher/BUILD.bazel | 2 +- beacon-chain/slasher/chunks.go | 10 +- beacon-chain/slasher/chunks_test.go | 14 +- beacon-chain/slasher/detect_attestations.go | 12 +- .../slasher/detect_attestations_test.go | 36 +- beacon-chain/slasher/detect_blocks.go | 6 +- beacon-chain/slasher/detect_blocks_test.go | 30 +- beacon-chain/slasher/helpers.go | 16 +- beacon-chain/slasher/helpers_test.go | 14 +- beacon-chain/slasher/params.go | 2 +- beacon-chain/slasher/params_test.go | 4 +- beacon-chain/slasher/process_slashings.go | 8 +- .../slasher/process_slashings_test.go | 26 +- beacon-chain/slasher/queue.go | 2 +- beacon-chain/slasher/queue_test.go | 8 +- beacon-chain/slasher/receive.go | 10 +- beacon-chain/slasher/receive_test.go | 26 +- beacon-chain/slasher/service.go | 26 +- beacon-chain/slasher/service_test.go | 18 +- beacon-chain/slasher/types/BUILD.bazel | 2 +- beacon-chain/slasher/types/types.go | 4 +- beacon-chain/startup/BUILD.bazel | 2 +- beacon-chain/startup/clock.go | 4 +- beacon-chain/startup/clock_test.go | 6 +- beacon-chain/startup/synchronizer_test.go | 4 +- beacon-chain/startup/testing.go | 4 +- beacon-chain/state/BUILD.bazel | 2 +- beacon-chain/state/fieldtrie/BUILD.bazel | 2 +- beacon-chain/state/fieldtrie/field_trie.go | 8 +- .../state/fieldtrie/field_trie_helpers.go | 12 +- .../state/fieldtrie/field_trie_test.go | 22 +- beacon-chain/state/fieldtrie/helpers_test.go | 18 +- beacon-chain/state/interfaces.go | 14 +- beacon-chain/state/state-native/BUILD.bazel | 2 +- .../state/state-native/beacon_state.go | 14 +- .../state-native/custom-types/BUILD.bazel | 2 +- .../state-native/custom-types/block_roots.go | 2 +- .../custom-types/block_roots_test.go | 4 +- .../custom-types/historical_roots_test.go | 2 +- .../state-native/custom-types/randao_mixes.go | 2 +- .../custom-types/randao_mixes_test.go | 4 +- .../state-native/custom-types/state_roots.go | 2 +- .../custom-types/state_roots_test.go | 4 +- .../state/state-native/getters_attestation.go | 4 +- .../state-native/getters_attestation_test.go | 4 +- .../state/state-native/getters_block.go | 4 +- .../state/state-native/getters_block_test.go | 6 +- .../state/state-native/getters_checkpoint.go | 4 +- .../state-native/getters_checkpoint_test.go | 6 +- .../state-native/getters_consolidation.go | 6 +- .../getters_consolidation_test.go | 8 +- .../state-native/getters_deposit_requests.go | 2 +- .../getters_deposit_requests_test.go | 8 +- .../state/state-native/getters_deposits.go | 6 +- .../state-native/getters_deposits_test.go | 8 +- .../state/state-native/getters_eth1.go | 2 +- .../state/state-native/getters_exit.go | 4 +- .../state/state-native/getters_exit_test.go | 10 +- .../state/state-native/getters_misc.go | 6 +- .../state-native/getters_participation.go | 8 +- .../getters_participation_test.go | 6 +- .../state-native/getters_payload_header.go | 6 +- .../getters_proposer_lookahead.go | 4 +- .../state/state-native/getters_randao.go | 2 +- .../getters_setters_lookahead_test.go | 10 +- .../state/state-native/getters_state.go | 6 +- .../state-native/getters_sync_committee.go | 6 +- .../state/state-native/getters_test.go | 6 +- .../state/state-native/getters_validator.go | 14 +- .../state-native/getters_validator_test.go | 16 +- .../state/state-native/getters_withdrawal.go | 18 +- .../state-native/getters_withdrawal_test.go | 18 +- beacon-chain/state/state-native/hasher.go | 16 +- .../state/state-native/hasher_test.go | 18 +- .../state/state-native/multi_value_slices.go | 10 +- .../state/state-native/mvslice_fuzz_test.go | 6 +- beacon-chain/state/state-native/proofs.go | 8 +- .../state/state-native/proofs_test.go | 8 +- .../state/state-native/readonly_validator.go | 10 +- .../state-native/readonly_validator_test.go | 12 +- .../state/state-native/references_test.go | 12 +- .../state/state-native/setters_attestation.go | 10 +- .../state-native/setters_attestation_test.go | 12 +- .../state/state-native/setters_block.go | 4 +- .../state/state-native/setters_checkpoint.go | 4 +- .../state/state-native/setters_churn.go | 10 +- .../state/state-native/setters_churn_test.go | 16 +- .../state-native/setters_consolidation.go | 10 +- .../setters_consolidation_test.go | 8 +- .../state-native/setters_deposit_requests.go | 4 +- .../setters_deposit_requests_test.go | 8 +- .../state/state-native/setters_deposits.go | 10 +- .../state-native/setters_deposits_test.go | 8 +- .../state/state-native/setters_eth1.go | 6 +- .../state/state-native/setters_eth1_test.go | 8 +- .../state/state-native/setters_misc.go | 18 +- .../state/state-native/setters_misc_test.go | 10 +- .../state-native/setters_participation.go | 8 +- .../setters_participation_test.go | 6 +- .../state-native/setters_payload_header.go | 12 +- .../setters_payload_header_test.go | 12 +- .../setters_proposer_lookahead.go | 10 +- .../state/state-native/setters_randao.go | 2 +- .../state/state-native/setters_state.go | 2 +- .../state-native/setters_sync_committee.go | 6 +- .../state/state-native/setters_validator.go | 14 +- .../state-native/setters_validator_test.go | 6 +- .../state/state-native/setters_withdrawal.go | 10 +- .../state-native/setters_withdrawal_test.go | 10 +- .../state/state-native/spec_parameters.go | 4 +- .../state/state-native/state_fuzz_test.go | 16 +- beacon-chain/state/state-native/state_test.go | 20 +- beacon-chain/state/state-native/state_trie.go | 30 +- .../state/state-native/state_trie_test.go | 16 +- beacon-chain/state/state-native/types.go | 6 +- .../state/state-native/types/BUILD.bazel | 2 +- .../state/state-native/types/types.go | 2 +- beacon-chain/state/state-native/types_test.go | 16 +- beacon-chain/state/stategen/BUILD.bazel | 2 +- beacon-chain/state/stategen/cacher.go | 2 +- .../stategen/epoch_boundary_state_cache.go | 4 +- .../epoch_boundary_state_cache_test.go | 8 +- beacon-chain/state/stategen/getter.go | 18 +- beacon-chain/state/stategen/getter_test.go | 26 +- beacon-chain/state/stategen/history.go | 14 +- beacon-chain/state/stategen/history_test.go | 10 +- .../state/stategen/hot_state_cache.go | 4 +- .../state/stategen/hot_state_cache_test.go | 10 +- beacon-chain/state/stategen/init_test.go | 2 +- beacon-chain/state/stategen/migrate.go | 6 +- beacon-chain/state/stategen/migrate_test.go | 18 +- beacon-chain/state/stategen/mock/BUILD.bazel | 2 +- beacon-chain/state/stategen/mock/mock.go | 4 +- beacon-chain/state/stategen/mock/replayer.go | 6 +- beacon-chain/state/stategen/mock_test.go | 22 +- beacon-chain/state/stategen/replay.go | 14 +- beacon-chain/state/stategen/replay_test.go | 34 +- beacon-chain/state/stategen/replayer.go | 8 +- beacon-chain/state/stategen/replayer_test.go | 10 +- beacon-chain/state/stategen/service.go | 22 +- beacon-chain/state/stategen/service_test.go | 14 +- beacon-chain/state/stategen/setter.go | 14 +- beacon-chain/state/stategen/setter_test.go | 14 +- beacon-chain/state/stateutil/BUILD.bazel | 2 +- .../state/stateutil/benchmark_test.go | 6 +- .../state/stateutil/block_header_root.go | 6 +- beacon-chain/state/stateutil/eth1_root.go | 8 +- .../state/stateutil/field_root_attestation.go | 6 +- .../state/stateutil/field_root_eth1.go | 2 +- .../state/stateutil/field_root_test.go | 2 +- .../state/stateutil/field_root_validator.go | 8 +- .../stateutil/field_root_validator_test.go | 8 +- .../state/stateutil/field_root_vector.go | 2 +- .../stateutil/historical_summaries_root.go | 6 +- .../state/stateutil/participation_bit_root.go | 4 +- .../stateutil/pending_attestation_root.go | 8 +- .../stateutil/pending_consolidations_root.go | 6 +- .../state/stateutil/pending_deposits_root.go | 6 +- .../pending_partial_withdrawals_root.go | 6 +- .../stateutil/proposer_lookahead_root.go | 4 +- .../stateutil/proposer_lookahead_root_test.go | 6 +- .../state/stateutil/state_root_test.go | 12 +- .../state/stateutil/sync_committee.root.go | 6 +- beacon-chain/state/stateutil/trie_helpers.go | 10 +- .../state/stateutil/trie_helpers_test.go | 18 +- .../stateutil/unrealized_justification.go | 6 +- .../unrealized_justification_test.go | 8 +- .../state/stateutil/validator_map_handler.go | 8 +- .../state/stateutil/validator_reader.go | 4 +- .../state/stateutil/validator_root.go | 8 +- .../state/stateutil/validator_root_test.go | 2 +- beacon-chain/state/testing/BUILD.bazel | 2 +- beacon-chain/state/testing/generators.go | 16 +- beacon-chain/state/testing/getters.go | 12 +- beacon-chain/state/testing/getters_block.go | 10 +- .../state/testing/getters_checkpoint.go | 10 +- .../state/testing/getters_validator.go | 6 +- beacon-chain/sync/BUILD.bazel | 2 +- beacon-chain/sync/backfill/BUILD.bazel | 2 +- beacon-chain/sync/backfill/batch.go | 10 +- beacon-chain/sync/backfill/batch_test.go | 4 +- beacon-chain/sync/backfill/batcher.go | 2 +- beacon-chain/sync/backfill/batcher_test.go | 4 +- beacon-chain/sync/backfill/blobs.go | 16 +- beacon-chain/sync/backfill/blobs_test.go | 16 +- .../sync/backfill/coverage/BUILD.bazel | 2 +- .../sync/backfill/coverage/coverage.go | 2 +- beacon-chain/sync/backfill/metrics.go | 6 +- beacon-chain/sync/backfill/pool.go | 14 +- beacon-chain/sync/backfill/pool_test.go | 18 +- beacon-chain/sync/backfill/service.go | 24 +- beacon-chain/sync/backfill/service_test.go | 22 +- beacon-chain/sync/backfill/status.go | 16 +- beacon-chain/sync/backfill/status_test.go | 20 +- beacon-chain/sync/backfill/verify.go | 20 +- beacon-chain/sync/backfill/verify_test.go | 22 +- beacon-chain/sync/backfill/worker.go | 10 +- beacon-chain/sync/batch_verifier.go | 12 +- beacon-chain/sync/batch_verifier_test.go | 8 +- beacon-chain/sync/blobs_test.go | 42 +- beacon-chain/sync/block_batcher.go | 8 +- beacon-chain/sync/block_batcher_test.go | 10 +- beacon-chain/sync/checkpoint/BUILD.bazel | 2 +- beacon-chain/sync/checkpoint/api.go | 22 +- beacon-chain/sync/checkpoint/api_test.go | 18 +- beacon-chain/sync/checkpoint/file.go | 6 +- .../sync/checkpoint/weak-subjectivity.go | 14 +- .../sync/checkpoint/weak-subjectivity_test.go | 26 +- beacon-chain/sync/context.go | 4 +- beacon-chain/sync/context_test.go | 8 +- beacon-chain/sync/custody.go | 10 +- beacon-chain/sync/custody_test.go | 24 +- beacon-chain/sync/data_column_sidecars.go | 28 +- .../sync/data_column_sidecars_test.go | 38 +- beacon-chain/sync/data_columns_reconstruct.go | 12 +- .../sync/data_columns_reconstruct_test.go | 16 +- beacon-chain/sync/deadlines.go | 2 +- beacon-chain/sync/decode_pubsub.go | 14 +- beacon-chain/sync/decode_pubsub_test.go | 30 +- beacon-chain/sync/error.go | 8 +- beacon-chain/sync/error_test.go | 8 +- beacon-chain/sync/fork_watcher.go | 8 +- beacon-chain/sync/fork_watcher_test.go | 22 +- beacon-chain/sync/initial-sync/BUILD.bazel | 2 +- .../sync/initial-sync/blocks_fetcher.go | 46 +- .../sync/initial-sync/blocks_fetcher_peers.go | 12 +- .../initial-sync/blocks_fetcher_peers_test.go | 14 +- .../sync/initial-sync/blocks_fetcher_test.go | 44 +- .../sync/initial-sync/blocks_fetcher_utils.go | 18 +- .../initial-sync/blocks_fetcher_utils_test.go | 34 +- .../sync/initial-sync/blocks_queue.go | 18 +- .../sync/initial-sync/blocks_queue_test.go | 38 +- .../sync/initial-sync/blocks_queue_utils.go | 2 +- .../sync/initial-sync/downscore_test.go | 20 +- beacon-chain/sync/initial-sync/fsm.go | 6 +- .../sync/initial-sync/fsm_benchmark_test.go | 2 +- beacon-chain/sync/initial-sync/fsm_test.go | 6 +- .../sync/initial-sync/initial_sync_test.go | 42 +- beacon-chain/sync/initial-sync/round_robin.go | 18 +- .../sync/initial-sync/round_robin_test.go | 32 +- beacon-chain/sync/initial-sync/service.go | 46 +- .../sync/initial-sync/service_test.go | 50 +-- .../sync/initial-sync/testing/BUILD.bazel | 2 +- beacon-chain/sync/kzg_batch_verifier_test.go | 12 +- beacon-chain/sync/metrics.go | 12 +- beacon-chain/sync/options.go | 40 +- .../sync/pending_attestations_queue.go | 32 +- .../pending_attestations_queue_bucket_test.go | 12 +- .../sync/pending_attestations_queue_test.go | 48 +- beacon-chain/sync/pending_blocks_queue.go | 28 +- .../sync/pending_blocks_queue_test.go | 38 +- beacon-chain/sync/rate_limiter.go | 8 +- beacon-chain/sync/rate_limiter_test.go | 12 +- beacon-chain/sync/rpc.go | 14 +- .../sync/rpc_beacon_blocks_by_range.go | 20 +- .../sync/rpc_beacon_blocks_by_range_test.go | 44 +- .../sync/rpc_beacon_blocks_by_root.go | 28 +- .../sync/rpc_beacon_blocks_by_root_test.go | 44 +- .../sync/rpc_blob_sidecars_by_range.go | 18 +- .../sync/rpc_blob_sidecars_by_range_test.go | 18 +- .../sync/rpc_blob_sidecars_by_root.go | 22 +- .../sync/rpc_blob_sidecars_by_root_test.go | 22 +- beacon-chain/sync/rpc_chunked_response.go | 18 +- .../sync/rpc_data_column_sidecars_by_range.go | 14 +- .../rpc_data_column_sidecars_by_range_test.go | 26 +- .../sync/rpc_data_column_sidecars_by_root.go | 20 +- .../rpc_data_column_sidecars_by_root_test.go | 30 +- beacon-chain/sync/rpc_goodbye.go | 10 +- beacon-chain/sync/rpc_goodbye_test.go | 22 +- beacon-chain/sync/rpc_handler_test.go | 6 +- beacon-chain/sync/rpc_light_client.go | 16 +- beacon-chain/sync/rpc_light_client_test.go | 34 +- beacon-chain/sync/rpc_metadata.go | 16 +- beacon-chain/sync/rpc_metadata_test.go | 28 +- beacon-chain/sync/rpc_ping.go | 10 +- beacon-chain/sync/rpc_ping_test.go | 26 +- beacon-chain/sync/rpc_send_request.go | 30 +- beacon-chain/sync/rpc_send_request_test.go | 36 +- beacon-chain/sync/rpc_status.go | 24 +- beacon-chain/sync/rpc_status_test.go | 52 +-- beacon-chain/sync/rpc_test.go | 16 +- beacon-chain/sync/service.go | 64 +-- beacon-chain/sync/service_test.go | 36 +- beacon-chain/sync/slot_aware_cache.go | 2 +- beacon-chain/sync/slot_aware_cache_test.go | 4 +- beacon-chain/sync/subscriber.go | 32 +- .../sync/subscriber_beacon_aggregate_proof.go | 4 +- .../subscriber_beacon_aggregate_proof_test.go | 16 +- .../sync/subscriber_beacon_attestation.go | 14 +- beacon-chain/sync/subscriber_beacon_blocks.go | 24 +- .../sync/subscriber_beacon_blocks_test.go | 38 +- beacon-chain/sync/subscriber_blob_sidecar.go | 6 +- .../subscriber_bls_to_execution_change.go | 6 +- .../sync/subscriber_data_column_sidecar.go | 12 +- .../subscriber_data_column_sidecar_test.go | 18 +- beacon-chain/sync/subscriber_handlers.go | 2 +- .../sync/subscriber_sync_committee_message.go | 2 +- .../subscriber_sync_contribution_proof.go | 2 +- beacon-chain/sync/subscriber_test.go | 44 +- .../sync/subscription_topic_handler.go | 2 +- .../sync/subscription_topic_handler_test.go | 10 +- beacon-chain/sync/sync_fuzz_test.go | 32 +- beacon-chain/sync/sync_test.go | 2 +- beacon-chain/sync/validate_aggregate_proof.go | 34 +- .../sync/validate_aggregate_proof_test.go | 40 +- .../sync/validate_attester_slashing.go | 20 +- .../sync/validate_attester_slashing_test.go | 30 +- .../sync/validate_beacon_attestation.go | 34 +- .../sync/validate_beacon_attestation_test.go | 32 +- beacon-chain/sync/validate_beacon_blocks.go | 40 +- .../sync/validate_beacon_blocks_test.go | 52 +-- beacon-chain/sync/validate_blob.go | 24 +- beacon-chain/sync/validate_blob_test.go | 30 +- .../sync/validate_bls_to_execution_change.go | 8 +- .../validate_bls_to_execution_change_test.go | 38 +- beacon-chain/sync/validate_data_column.go | 26 +- .../sync/validate_data_column_test.go | 26 +- beacon-chain/sync/validate_light_client.go | 12 +- .../sync/validate_light_client_test.go | 30 +- .../sync/validate_proposer_slashing.go | 14 +- .../sync/validate_proposer_slashing_test.go | 32 +- .../sync/validate_sync_committee_message.go | 22 +- .../validate_sync_committee_message_test.go | 38 +- .../sync/validate_sync_contribution_proof.go | 24 +- .../validate_sync_contribution_proof_test.go | 56 +-- beacon-chain/sync/validate_voluntary_exit.go | 14 +- .../sync/validate_voluntary_exit_test.go | 34 +- beacon-chain/sync/verify/BUILD.bazel | 2 +- beacon-chain/sync/verify/blob.go | 8 +- beacon-chain/sync/verify/blob_test.go | 10 +- beacon-chain/verification/BUILD.bazel | 2 +- beacon-chain/verification/batch.go | 6 +- beacon-chain/verification/batch_test.go | 6 +- beacon-chain/verification/blob.go | 14 +- beacon-chain/verification/blob_test.go | 22 +- beacon-chain/verification/cache.go | 22 +- beacon-chain/verification/cache_test.go | 18 +- beacon-chain/verification/data_column.go | 20 +- beacon-chain/verification/data_column_test.go | 26 +- beacon-chain/verification/error.go | 2 +- beacon-chain/verification/fake.go | 2 +- beacon-chain/verification/filesystem.go | 6 +- beacon-chain/verification/filesystem_test.go | 6 +- beacon-chain/verification/initializer.go | 20 +- beacon-chain/verification/initializer_test.go | 6 +- beacon-chain/verification/interface.go | 4 +- beacon-chain/verification/mock.go | 4 +- beacon-chain/verification/result_test.go | 2 +- .../verification/verification_test.go | 2 +- build/bazel/BUILD.bazel | 2 +- build/bazel/bazel_test.go | 2 +- build/bazel/data_path.go | 2 +- cache/lru/BUILD.bazel | 2 +- cache/nonblocking/BUILD.bazel | 2 +- changelog/bastin_upgrade-v6-to-v7.md | 3 + cmd/BUILD.bazel | 2 +- cmd/beacon-chain/BUILD.bazel | 2 +- cmd/beacon-chain/blockchain/BUILD.bazel | 2 +- cmd/beacon-chain/blockchain/options.go | 8 +- cmd/beacon-chain/db/BUILD.bazel | 2 +- cmd/beacon-chain/db/db.go | 6 +- cmd/beacon-chain/execution/BUILD.bazel | 2 +- cmd/beacon-chain/execution/options.go | 6 +- cmd/beacon-chain/execution/options_test.go | 10 +- cmd/beacon-chain/flags/BUILD.bazel | 2 +- cmd/beacon-chain/flags/api_module_test.go | 2 +- cmd/beacon-chain/flags/base.go | 4 +- cmd/beacon-chain/flags/config.go | 2 +- cmd/beacon-chain/genesis/BUILD.bazel | 2 +- cmd/beacon-chain/genesis/options.go | 6 +- cmd/beacon-chain/jwt/BUILD.bazel | 2 +- cmd/beacon-chain/jwt/jwt.go | 6 +- cmd/beacon-chain/jwt/jwt_test.go | 6 +- cmd/beacon-chain/main.go | 46 +- cmd/beacon-chain/storage/BUILD.bazel | 2 +- cmd/beacon-chain/storage/options.go | 10 +- cmd/beacon-chain/storage/options_test.go | 12 +- cmd/beacon-chain/sync/backfill/BUILD.bazel | 2 +- .../sync/backfill/flags/BUILD.bazel | 2 +- cmd/beacon-chain/sync/backfill/options.go | 8 +- cmd/beacon-chain/sync/checkpoint/BUILD.bazel | 2 +- cmd/beacon-chain/sync/checkpoint/options.go | 4 +- cmd/beacon-chain/usage.go | 16 +- cmd/beacon-chain/usage_test.go | 2 +- cmd/client-stats/BUILD.bazel | 2 +- cmd/client-stats/flags/BUILD.bazel | 2 +- cmd/client-stats/main.go | 14 +- cmd/client-stats/usage.go | 4 +- cmd/config.go | 4 +- cmd/config_test.go | 6 +- cmd/defaults.go | 2 +- cmd/flags.go | 2 +- cmd/flags/BUILD.bazel | 2 +- cmd/flags_test.go | 2 +- cmd/helpers.go | 2 +- cmd/helpers_test.go | 6 +- cmd/mock/BUILD.bazel | 2 +- cmd/prysmctl/BUILD.bazel | 2 +- cmd/prysmctl/checkpointsync/BUILD.bazel | 2 +- cmd/prysmctl/checkpointsync/download.go | 6 +- cmd/prysmctl/db/BUILD.bazel | 2 +- cmd/prysmctl/db/buckets.go | 2 +- cmd/prysmctl/db/query.go | 2 +- cmd/prysmctl/db/span.go | 6 +- cmd/prysmctl/main.go | 12 +- cmd/prysmctl/p2p/BUILD.bazel | 2 +- cmd/prysmctl/p2p/client.go | 26 +- cmd/prysmctl/p2p/handler.go | 4 +- cmd/prysmctl/p2p/handshake.go | 10 +- cmd/prysmctl/p2p/mock_chain.go | 8 +- cmd/prysmctl/p2p/peers.go | 2 +- cmd/prysmctl/p2p/request_blobs.go | 16 +- cmd/prysmctl/p2p/request_blocks.go | 18 +- cmd/prysmctl/testnet/BUILD.bazel | 2 +- cmd/prysmctl/testnet/generate_genesis.go | 16 +- cmd/prysmctl/testnet/generate_genesis_test.go | 10 +- cmd/prysmctl/validator/BUILD.bazel | 2 +- cmd/prysmctl/validator/cmd.go | 10 +- cmd/prysmctl/validator/proposer_settings.go | 20 +- .../validator/proposer_settings_test.go | 6 +- cmd/prysmctl/validator/withdraw.go | 10 +- cmd/prysmctl/validator/withdraw_test.go | 10 +- cmd/prysmctl/weaksubjectivity/BUILD.bazel | 2 +- cmd/prysmctl/weaksubjectivity/checkpoint.go | 6 +- cmd/validator/BUILD.bazel | 2 +- cmd/validator/accounts/BUILD.bazel | 2 +- cmd/validator/accounts/accounts.go | 8 +- cmd/validator/accounts/backup.go | 12 +- cmd/validator/accounts/backup_test.go | 18 +- cmd/validator/accounts/delete.go | 10 +- cmd/validator/accounts/delete_test.go | 20 +- cmd/validator/accounts/exit.go | 20 +- cmd/validator/accounts/exit_test.go | 16 +- cmd/validator/accounts/import.go | 14 +- cmd/validator/accounts/import_test.go | 16 +- cmd/validator/accounts/list.go | 8 +- cmd/validator/accounts/wallet_utils.go | 10 +- cmd/validator/accounts/wallet_utils_test.go | 14 +- cmd/validator/db/BUILD.bazel | 2 +- cmd/validator/db/db.go | 6 +- cmd/validator/flags/BUILD.bazel | 2 +- cmd/validator/flags/flags.go | 6 +- cmd/validator/flags/flags_test.go | 4 +- cmd/validator/main.go | 34 +- cmd/validator/slashing-protection/BUILD.bazel | 2 +- cmd/validator/slashing-protection/export.go | 20 +- cmd/validator/slashing-protection/import.go | 16 +- .../slashing-protection/import_export_test.go | 18 +- .../slashing-protection.go | 8 +- cmd/validator/usage.go | 8 +- cmd/validator/usage_test.go | 2 +- cmd/validator/wallet/BUILD.bazel | 2 +- cmd/validator/wallet/create.go | 12 +- cmd/validator/wallet/create_test.go | 14 +- cmd/validator/wallet/recover.go | 10 +- cmd/validator/wallet/recover_test.go | 14 +- cmd/validator/wallet/wallet.go | 8 +- cmd/validator/web/BUILD.bazel | 2 +- cmd/validator/web/web.go | 12 +- config/BUILD.bazel | 2 +- config/features/BUILD.bazel | 2 +- config/features/config.go | 6 +- config/features/config_test.go | 4 +- config/features/deprecated_flags_test.go | 2 +- config/features/flags.go | 2 +- config/fieldparams/BUILD.bazel | 2 +- config/fieldparams/common_test.go | 6 +- config/fieldparams/mainnet_test.go | 6 +- config/fieldparams/minimal_test.go | 6 +- config/params/BUILD.bazel | 2 +- config/params/basis_points.go | 2 +- config/params/config.go | 12 +- config/params/config_test.go | 10 +- config/params/configset.go | 4 +- config/params/configset_test.go | 4 +- config/params/fork.go | 8 +- config/params/fork_test.go | 8 +- config/params/loader.go | 4 +- config/params/loader_test.go | 8 +- config/params/mainnet_config.go | 4 +- config/params/mainnet_config_test.go | 6 +- config/params/minimal_config.go | 2 +- config/params/network_config.go | 2 +- config/params/testnet_config_test.go | 8 +- config/params/testnet_holesky_config_test.go | 4 +- config/params/testnet_hoodi_config_test.go | 4 +- config/params/testnet_sepolia_config_test.go | 4 +- config/params/testutils.go | 2 +- config/proposer/BUILD.bazel | 2 +- config/proposer/loader/BUILD.bazel | 2 +- config/proposer/loader/loader.go | 14 +- config/proposer/loader/loader_test.go | 20 +- config/proposer/settings.go | 10 +- config/proposer/settings_test.go | 10 +- config/util_test.go | 6 +- consensus-types/BUILD.bazel | 2 +- consensus-types/blocks/BUILD.bazel | 2 +- consensus-types/blocks/execution.go | 12 +- consensus-types/blocks/execution_test.go | 14 +- consensus-types/blocks/factory.go | 8 +- consensus-types/blocks/factory_test.go | 14 +- consensus-types/blocks/get_payload.go | 8 +- consensus-types/blocks/getters.go | 16 +- consensus-types/blocks/getters_test.go | 20 +- consensus-types/blocks/kzg.go | 12 +- consensus-types/blocks/kzg_test.go | 12 +- consensus-types/blocks/proofs.go | 16 +- consensus-types/blocks/proofs_test.go | 4 +- consensus-types/blocks/proto.go | 10 +- consensus-types/blocks/proto_test.go | 16 +- consensus-types/blocks/roblob.go | 6 +- consensus-types/blocks/roblob_test.go | 12 +- consensus-types/blocks/roblock.go | 2 +- consensus-types/blocks/roblock_test.go | 12 +- consensus-types/blocks/rodatacolumn.go | 8 +- consensus-types/blocks/rodatacolumn_test.go | 12 +- consensus-types/blocks/setters.go | 12 +- consensus-types/blocks/testing/BUILD.bazel | 2 +- consensus-types/blocks/testing/factory.go | 6 +- consensus-types/blocks/testing/mutator.go | 8 +- consensus-types/blocks/types.go | 10 +- consensus-types/forkchoice/BUILD.bazel | 2 +- consensus-types/forkchoice/types.go | 4 +- consensus-types/hdiff/BUILD.bazel | 2 +- consensus-types/hdiff/fuzz_test.go | 154 +++---- consensus-types/hdiff/property_test.go | 118 ++--- consensus-types/hdiff/security_test.go | 144 +++--- consensus-types/hdiff/state_diff.go | 30 +- consensus-types/hdiff/state_diff_test.go | 20 +- consensus-types/helpers/BUILD.bazel | 2 +- consensus-types/helpers/comparisons.go | 2 +- consensus-types/helpers/comparisons_test.go | 2 +- consensus-types/interfaces/BUILD.bazel | 2 +- consensus-types/interfaces/beacon_block.go | 10 +- consensus-types/interfaces/error.go | 2 +- consensus-types/interfaces/error_test.go | 4 +- consensus-types/interfaces/light_client.go | 6 +- consensus-types/interfaces/utils.go | 2 +- consensus-types/interfaces/utils_test.go | 14 +- consensus-types/light-client/BUILD.bazel | 2 +- consensus-types/light-client/bootstrap.go | 12 +- .../light-client/finality_update.go | 12 +- consensus-types/light-client/header.go | 16 +- consensus-types/light-client/helpers.go | 4 +- .../light-client/optimistic_update.go | 10 +- consensus-types/light-client/update.go | 14 +- consensus-types/mock/BUILD.bazel | 2 +- consensus-types/mock/block.go | 14 +- consensus-types/payload-attribute/BUILD.bazel | 2 +- consensus-types/payload-attribute/getters.go | 6 +- .../payload-attribute/getters_test.go | 6 +- .../payload-attribute/interface.go | 2 +- consensus-types/payload-attribute/types.go | 12 +- consensus-types/primitives/BUILD.bazel | 2 +- consensus-types/primitives/epoch.go | 2 +- consensus-types/primitives/epoch_test.go | 6 +- consensus-types/primitives/slot.go | 2 +- consensus-types/primitives/slot_test.go | 4 +- consensus-types/primitives/sszbytes_test.go | 2 +- consensus-types/primitives/sszuint64_test.go | 2 +- consensus-types/primitives/wei_test.go | 4 +- consensus-types/types.go | 2 +- consensus-types/validator/BUILD.bazel | 2 +- .../validator/custom_types_test.go | 2 +- consensus-types/validator/types.go | 2 +- consensus-types/wrapper/BUILD.bazel | 2 +- consensus-types/wrapper/metadata.go | 6 +- container/doubly-linked-list/BUILD.bazel | 2 +- container/doubly-linked-list/list_test.go | 4 +- container/leaky-bucket/BUILD.bazel | 2 +- container/multi-value-slice/BUILD.bazel | 2 +- .../multi_value_slice_test.go | 4 +- container/queue/BUILD.bazel | 2 +- container/queue/priority_queue_test.go | 2 +- container/slice/BUILD.bazel | 2 +- container/slice/slice.go | 2 +- container/slice/slice_test.go | 6 +- container/thread-safe/BUILD.bazel | 2 +- container/thread-safe/map_test.go | 2 +- container/trie/BUILD.bazel | 2 +- container/trie/sparse_merkle.go | 6 +- container/trie/sparse_merkle_test.go | 18 +- .../trie/sparse_merkle_trie_fuzz_test.go | 10 +- contracts/deposit/BUILD.bazel | 2 +- contracts/deposit/contract_test.go | 10 +- contracts/deposit/deposit.go | 10 +- contracts/deposit/deposit_test.go | 16 +- contracts/deposit/deposit_tree_test.go | 12 +- contracts/deposit/mock/BUILD.bazel | 2 +- contracts/deposit/mock/mock.go | 2 +- crypto/bls/BUILD.bazel | 2 +- crypto/bls/bls.go | 6 +- crypto/bls/bls_test.go | 4 +- crypto/bls/blst/BUILD.bazel | 2 +- crypto/bls/blst/bls_benchmark_test.go | 6 +- crypto/bls/blst/init.go | 4 +- crypto/bls/blst/public_key.go | 8 +- crypto/bls/blst/public_key_test.go | 8 +- crypto/bls/blst/secret_key.go | 6 +- crypto/bls/blst/secret_key_test.go | 10 +- crypto/bls/blst/signature.go | 6 +- crypto/bls/blst/signature_test.go | 6 +- crypto/bls/blst/stub.go | 2 +- crypto/bls/common/BUILD.bazel | 2 +- crypto/bls/common/constants.go | 2 +- crypto/bls/common/mock/BUILD.bazel | 2 +- crypto/bls/common/mock/interface_mock.go | 2 +- crypto/bls/herumi/BUILD.bazel | 2 +- crypto/bls/interface.go | 2 +- crypto/bls/signature_batch_test.go | 6 +- crypto/ecdsa/BUILD.bazel | 2 +- crypto/ecdsa/utils_test.go | 4 +- crypto/hash/BUILD.bazel | 2 +- crypto/hash/hash.go | 2 +- crypto/hash/hash_test.go | 14 +- crypto/hash/htr/BUILD.bazel | 2 +- crypto/hash/htr/hashtree_test.go | 2 +- crypto/keystore/BUILD.bazel | 2 +- crypto/keystore/key.go | 4 +- crypto/keystore/key_test.go | 6 +- crypto/keystore/keystore.go | 2 +- crypto/keystore/keystore_test.go | 8 +- crypto/keystore/utils.go | 4 +- crypto/rand/BUILD.bazel | 2 +- crypto/rand/rand.go | 4 +- crypto/random/BUILD.bazel | 2 +- crypto/random/random_test.go | 4 +- encoding/bytesutil/BUILD.bazel | 2 +- encoding/bytesutil/bits_test.go | 6 +- encoding/bytesutil/bytes_test.go | 4 +- encoding/bytesutil/eth_types.go | 4 +- encoding/bytesutil/eth_types_test.go | 8 +- encoding/bytesutil/hex.go | 2 +- encoding/bytesutil/hex_test.go | 6 +- encoding/bytesutil/integers.go | 2 +- encoding/bytesutil/integers_test.go | 4 +- encoding/ssz/BUILD.bazel | 2 +- encoding/ssz/detect/BUILD.bazel | 2 +- encoding/ssz/detect/configfork.go | 22 +- encoding/ssz/detect/configfork_test.go | 22 +- encoding/ssz/detect/fieldspec.go | 2 +- encoding/ssz/detect/fieldspec_test.go | 2 +- encoding/ssz/equality/BUILD.bazel | 2 +- encoding/ssz/equality/deep_equal.go | 2 +- encoding/ssz/equality/deep_equal_test.go | 6 +- encoding/ssz/hashers_test.go | 6 +- encoding/ssz/helpers.go | 2 +- encoding/ssz/helpers_test.go | 6 +- encoding/ssz/htrutils.go | 8 +- encoding/ssz/htrutils_fuzz_test.go | 6 +- encoding/ssz/htrutils_test.go | 14 +- encoding/ssz/merkleize.go | 4 +- encoding/ssz/merkleize_test.go | 10 +- encoding/ssz/query/BUILD.bazel | 2 +- encoding/ssz/query/generalized_index.go | 2 +- encoding/ssz/query/generalized_index_test.go | 6 +- encoding/ssz/query/path_test.go | 4 +- encoding/ssz/query/query_test.go | 8 +- encoding/ssz/query/tag_parser_test.go | 4 +- encoding/ssz/query/testutil/BUILD.bazel | 2 +- encoding/ssz/query/testutil/runner.go | 4 +- encoding/ssz/query/testutil/type.go | 2 +- genesis/BUILD.bazel | 2 +- genesis/embedded.go | 6 +- genesis/embedded_test.go | 6 +- genesis/initialize.go | 6 +- genesis/initialize_test.go | 16 +- genesis/internal/embedded/BUILD.bazel | 2 +- genesis/internal/embedded/lookup.go | 8 +- genesis/internal/embedded/lookup_test.go | 4 +- genesis/internal/embedded/mainnet.go | 2 +- genesis/providers.go | 8 +- genesis/storage.go | 8 +- genesis/testing.go | 2 +- go.mod | 2 +- hack/update-go-pbs.sh | 2 +- hack/update-mockgen.sh | 12 +- io/file/BUILD.bazel | 2 +- io/file/fileutil.go | 2 +- io/file/fileutil_test.go | 8 +- io/logs/BUILD.bazel | 2 +- io/logs/logutil.go | 4 +- io/logs/logutil_test.go | 2 +- io/logs/mock/BUILD.bazel | 2 +- io/logs/mock/mock_stream.go | 2 +- io/logs/stream.go | 6 +- io/logs/stream_test.go | 2 +- io/prompt/BUILD.bazel | 2 +- io/prompt/prompt.go | 2 +- io/prompt/validate_test.go | 4 +- math/BUILD.bazel | 2 +- math/math_helper_test.go | 4 +- monitoring/backup/BUILD.bazel | 2 +- monitoring/clientstats/BUILD.bazel | 2 +- monitoring/clientstats/scrapers.go | 2 +- monitoring/clientstats/scrapers_test.go | 2 +- monitoring/journald/BUILD.bazel | 2 +- monitoring/progress/BUILD.bazel | 2 +- monitoring/prometheus/BUILD.bazel | 2 +- .../prometheus/logrus_collector_test.go | 6 +- monitoring/prometheus/service.go | 2 +- monitoring/prometheus/service_test.go | 6 +- monitoring/tracing/BUILD.bazel | 2 +- .../tracing/recovery_interceptor_option.go | 2 +- monitoring/tracing/trace/BUILD.bazel | 2 +- monitoring/tracing/tracer.go | 4 +- network/BUILD.bazel | 2 +- network/auth_test.go | 4 +- network/authorization/BUILD.bazel | 2 +- network/endpoint.go | 2 +- network/endpoint_test.go | 6 +- network/external_ip_test.go | 6 +- network/httputil/BUILD.bazel | 2 +- network/httputil/reader.go | 2 +- network/httputil/reader_test.go | 6 +- network/httputil/writer.go | 2 +- proto/dbval/BUILD.bazel | 4 +- proto/dbval/dbval.pb.go | 2 +- proto/dbval/dbval.proto | 2 +- proto/engine/v1/BUILD.bazel | 4 +- proto/engine/v1/electra.pb.go | 4 +- proto/engine/v1/electra.proto | 2 +- proto/engine/v1/electra_test.go | 8 +- proto/engine/v1/engine.ssz.go | 4 +- proto/engine/v1/execution_engine.go | 2 +- proto/engine/v1/execution_engine.pb.go | 14 +- proto/engine/v1/execution_engine.proto | 4 +- proto/engine/v1/execution_engine_fuzz_test.go | 4 +- proto/engine/v1/json_marshal_unmarshal.go | 8 +- .../engine/v1/json_marshal_unmarshal_test.go | 10 +- proto/eth/ext/BUILD.bazel | 6 +- proto/eth/ext/options.pb.go | 2 +- proto/eth/ext/options.proto | 2 +- proto/eth/v1/BUILD.bazel | 4 +- proto/eth/v1/attestation.pb.go | 38 +- proto/eth/v1/attestation.proto | 10 +- proto/eth/v1/beacon_block.pb.go | 54 +-- proto/eth/v1/beacon_block.proto | 14 +- proto/eth/v1/beacon_chain.pb.go | 2 +- proto/eth/v1/beacon_chain.proto | 2 +- proto/eth/v1/events.pb.go | 46 +- proto/eth/v1/events.proto | 12 +- proto/eth/v1/gateway.ssz.go | 30 +- proto/eth/v1/node.pb.go | 2 +- proto/eth/v1/node.proto | 2 +- proto/eth/v1/validator.pb.go | 46 +- proto/eth/v1/validator.proto | 12 +- proto/migration/BUILD.bazel | 2 +- proto/migration/enums.go | 4 +- proto/migration/enums_test.go | 4 +- proto/migration/v1alpha1_to_v1.go | 4 +- proto/migration/v1alpha1_to_v1_test.go | 8 +- proto/prysm/v1alpha1/BUILD.bazel | 4 +- proto/prysm/v1alpha1/altair.ssz.go | 24 +- proto/prysm/v1alpha1/attestation.go | 6 +- proto/prysm/v1alpha1/attestation.pb.go | 62 +-- proto/prysm/v1alpha1/attestation.proto | 16 +- proto/prysm/v1alpha1/attestation/BUILD.bazel | 2 +- .../attestation/aggregation/BUILD.bazel | 2 +- .../aggregation/attestations/BUILD.bazel | 2 +- .../aggregation/attestations/attestations.go | 6 +- .../attestations/attestations_test.go | 16 +- .../aggregation/attestations/maxcover.go | 8 +- .../aggregation/attestations/maxcover_test.go | 10 +- .../aggregation/maxcover_bench_test.go | 6 +- .../attestation/aggregation/maxcover_test.go | 4 +- .../aggregation/sync_contribution/BUILD.bazel | 2 +- .../sync_contribution/contribution.go | 4 +- .../aggregation/sync_contribution/naive.go | 6 +- .../sync_contribution/naive_test.go | 12 +- .../aggregation/testing/BUILD.bazel | 2 +- .../aggregation/testing/bitlistutils.go | 8 +- .../v1alpha1/attestation/attestation_utils.go | 12 +- .../attestation/attestation_utils_test.go | 14 +- proto/prysm/v1alpha1/attestation/id.go | 6 +- proto/prysm/v1alpha1/attestation/id_test.go | 12 +- proto/prysm/v1alpha1/attestation_fuzz_test.go | 2 +- proto/prysm/v1alpha1/beacon_block.go | 4 +- proto/prysm/v1alpha1/beacon_block.pb.go | 184 ++++---- proto/prysm/v1alpha1/beacon_block.proto | 46 +- .../prysm/v1alpha1/beacon_block_fuzz_test.go | 2 +- proto/prysm/v1alpha1/beacon_chain.pb.go | 422 +++++++++--------- proto/prysm/v1alpha1/beacon_chain.proto | 106 ++--- proto/prysm/v1alpha1/beacon_core_types.pb.go | 78 ++-- proto/prysm/v1alpha1/beacon_core_types.proto | 20 +- proto/prysm/v1alpha1/beacon_state.pb.go | 208 ++++----- proto/prysm/v1alpha1/beacon_state.proto | 52 +-- proto/prysm/v1alpha1/bellatrix.ssz.go | 14 +- proto/prysm/v1alpha1/blobs.pb.go | 22 +- proto/prysm/v1alpha1/blobs.proto | 6 +- proto/prysm/v1alpha1/capella.ssz.go | 24 +- proto/prysm/v1alpha1/cloners.go | 2 +- proto/prysm/v1alpha1/cloners_test.go | 6 +- proto/prysm/v1alpha1/data_columns.pb.go | 4 +- proto/prysm/v1alpha1/data_columns.proto | 2 +- proto/prysm/v1alpha1/debug.pb.go | 30 +- proto/prysm/v1alpha1/debug.proto | 8 +- proto/prysm/v1alpha1/deneb.ssz.go | 22 +- proto/prysm/v1alpha1/eip_7251.pb.go | 46 +- proto/prysm/v1alpha1/eip_7251.proto | 12 +- proto/prysm/v1alpha1/eip_7521.go | 2 +- proto/prysm/v1alpha1/eip_7521_fuzz_test.go | 2 +- proto/prysm/v1alpha1/electra.ssz.go | 46 +- .../finalized_block_root_container.pb.go | 2 +- .../finalized_block_root_container.proto | 2 +- proto/prysm/v1alpha1/fulu.ssz.go | 28 +- proto/prysm/v1alpha1/fuzz_test.go | 4 +- proto/prysm/v1alpha1/gloas.pb.go | 176 ++++---- proto/prysm/v1alpha1/gloas.proto | 44 +- proto/prysm/v1alpha1/gloas.ssz.go | 46 +- proto/prysm/v1alpha1/health.pb.go | 2 +- proto/prysm/v1alpha1/health.proto | 2 +- proto/prysm/v1alpha1/light_client.pb.go | 96 ++-- proto/prysm/v1alpha1/light_client.proto | 24 +- proto/prysm/v1alpha1/metadata/BUILD.bazel | 2 +- .../v1alpha1/metadata/metadata_interfaces.go | 2 +- proto/prysm/v1alpha1/node.pb.go | 4 +- proto/prysm/v1alpha1/node.proto | 2 +- proto/prysm/v1alpha1/non-core.ssz.go | 10 +- proto/prysm/v1alpha1/p2p_messages.pb.go | 78 ++-- proto/prysm/v1alpha1/p2p_messages.proto | 20 +- proto/prysm/v1alpha1/phase0.ssz.go | 48 +- proto/prysm/v1alpha1/powchain.pb.go | 4 +- proto/prysm/v1alpha1/powchain.proto | 2 +- proto/prysm/v1alpha1/slasher.pb.go | 22 +- proto/prysm/v1alpha1/slasher.proto | 6 +- proto/prysm/v1alpha1/slashings/BUILD.bazel | 2 +- .../v1alpha1/slashings/surround_votes.go | 2 +- .../v1alpha1/slashings/surround_votes_test.go | 4 +- proto/prysm/v1alpha1/sync_committee.pb.go | 38 +- proto/prysm/v1alpha1/sync_committee.proto | 10 +- .../v1alpha1/validator-client/BUILD.bazel | 4 +- .../validator-client/keymanager.pb.go | 42 +- .../validator-client/keymanager.proto | 10 +- proto/prysm/v1alpha1/validator.go | 2 +- proto/prysm/v1alpha1/validator.pb.go | 262 +++++------ proto/prysm/v1alpha1/validator.proto | 66 +-- proto/prysm/v1alpha1/withdrawals.pb.go | 14 +- proto/prysm/v1alpha1/withdrawals.proto | 4 +- proto/ssz_query/BUILD.bazel | 4 +- proto/ssz_query/response.pb.go | 4 +- proto/ssz_query/response.proto | 2 +- proto/ssz_query/testing/BUILD.bazel | 4 +- proto/ssz_query/testing/test_containers.pb.go | 4 +- proto/ssz_query/testing/test_containers.proto | 2 +- proto/testing/BUILD.bazel | 4 +- proto/testing/tags_test.go | 8 +- runtime/BUILD.bazel | 2 +- runtime/debug/BUILD.bazel | 2 +- runtime/fdlimits/BUILD.bazel | 2 +- runtime/fdlimits/fdlimits_test.go | 4 +- runtime/interop/BUILD.bazel | 2 +- runtime/interop/generate_genesis_state.go | 22 +- .../generate_genesis_state_bellatrix.go | 14 +- .../generate_genesis_state_bellatrix_test.go | 12 +- .../interop/generate_genesis_state_test.go | 14 +- runtime/interop/generate_keys.go | 8 +- runtime/interop/generate_keys_test.go | 6 +- runtime/interop/genesis.go | 4 +- runtime/interop/premine-state.go | 30 +- runtime/interop/premine-state_test.go | 4 +- runtime/interop/premined_genesis_state.go | 10 +- runtime/logging/BUILD.bazel | 2 +- runtime/logging/blob.go | 2 +- runtime/logging/data_column.go | 2 +- .../logrus-prefixed-formatter/BUILD.bazel | 2 +- .../formatter_test.go | 4 +- runtime/maxprocs/BUILD.bazel | 2 +- runtime/messagehandler/BUILD.bazel | 2 +- runtime/messagehandler/messagehandler.go | 2 +- runtime/messagehandler/messagehandler_test.go | 4 +- runtime/prereqs/BUILD.bazel | 2 +- runtime/prereqs/prereq_test.go | 2 +- runtime/service_registry_test.go | 4 +- runtime/tos/BUILD.bazel | 2 +- runtime/tos/tos.go | 6 +- runtime/tos/tos_test.go | 4 +- runtime/version/BUILD.bazel | 2 +- testing/assert/BUILD.bazel | 2 +- testing/assert/assertions.go | 2 +- testing/assertions/BUILD.bazel | 2 +- testing/assertions/assertions.go | 2 +- testing/assertions/assertions_test.go | 10 +- testing/benchmark/BUILD.bazel | 2 +- testing/benchmark/pregen.go | 8 +- testing/benchmark/pregen_test.go | 2 +- testing/bls/BUILD.bazel | 2 +- testing/bls/aggregate_test.go | 8 +- testing/bls/aggregate_verify_test.go | 10 +- testing/bls/batch_verify_test.go | 10 +- testing/bls/deserialization_G1_test.go | 8 +- testing/bls/deserialization_G2_test.go | 6 +- testing/bls/fast_aggregate_verify_test.go | 10 +- testing/bls/hash_to_G2_test.go | 4 +- testing/bls/sign_test.go | 8 +- testing/bls/utils/BUILD.bazel | 2 +- testing/bls/utils/utils.go | 4 +- testing/bls/verify_test.go | 8 +- testing/endtoend/component_handler_test.go | 10 +- testing/endtoend/components/BUILD.bazel | 2 +- testing/endtoend/components/beacon_node.go | 22 +- testing/endtoend/components/boot_node.go | 6 +- testing/endtoend/components/builder.go | 10 +- testing/endtoend/components/eth1/BUILD.bazel | 2 +- testing/endtoend/components/eth1/depositor.go | 14 +- .../components/eth1/depositor_test.go | 4 +- testing/endtoend/components/eth1/helpers.go | 4 +- testing/endtoend/components/eth1/miner.go | 14 +- testing/endtoend/components/eth1/node.go | 12 +- testing/endtoend/components/eth1/node_set.go | 6 +- testing/endtoend/components/eth1/proxy.go | 10 +- .../endtoend/components/eth1/transactions.go | 8 +- .../endtoend/components/lighthouse_beacon.go | 10 +- .../components/lighthouse_validator.go | 14 +- testing/endtoend/components/tracing_sink.go | 6 +- testing/endtoend/components/validator.go | 22 +- .../endtoend/components/web3remotesigner.go | 12 +- .../components/web3remotesigner_test.go | 8 +- testing/endtoend/endtoend_setup_test.go | 14 +- testing/endtoend/endtoend_test.go | 36 +- testing/endtoend/evaluators/BUILD.bazel | 2 +- .../endtoend/evaluators/beaconapi/BUILD.bazel | 2 +- .../endtoend/evaluators/beaconapi/requests.go | 6 +- .../endtoend/evaluators/beaconapi/types.go | 2 +- testing/endtoend/evaluators/beaconapi/util.go | 4 +- .../endtoend/evaluators/beaconapi/verify.go | 16 +- testing/endtoend/evaluators/builder.go | 16 +- testing/endtoend/evaluators/data.go | 6 +- .../endtoend/evaluators/execution_engine.go | 16 +- testing/endtoend/evaluators/fee_recipient.go | 16 +- testing/endtoend/evaluators/finality.go | 8 +- testing/endtoend/evaluators/fork.go | 16 +- testing/endtoend/evaluators/metrics.go | 16 +- testing/endtoend/evaluators/node.go | 10 +- testing/endtoend/evaluators/operations.go | 32 +- testing/endtoend/evaluators/peers.go | 6 +- testing/endtoend/evaluators/slashing.go | 26 +- .../endtoend/evaluators/slashing_helper.go | 14 +- testing/endtoend/evaluators/validator.go | 28 +- testing/endtoend/helpers/BUILD.bazel | 2 +- testing/endtoend/helpers/epochTimer.go | 2 +- testing/endtoend/helpers/helpers.go | 10 +- testing/endtoend/mainnet_e2e_test.go | 6 +- testing/endtoend/mainnet_scenario_e2e_test.go | 6 +- testing/endtoend/minimal_builder_e2e_test.go | 6 +- testing/endtoend/minimal_e2e_test.go | 6 +- testing/endtoend/minimal_scenario_e2e_test.go | 6 +- testing/endtoend/minimal_slashing_e2e_test.go | 10 +- testing/endtoend/params/BUILD.bazel | 2 +- testing/endtoend/params/params.go | 2 +- testing/endtoend/params/params_test.go | 4 +- testing/endtoend/policies/BUILD.bazel | 2 +- testing/endtoend/policies/policies.go | 2 +- .../endtoend/slasher_simulator_e2e_test.go | 24 +- testing/endtoend/types/BUILD.bazel | 2 +- testing/endtoend/types/fork.go | 4 +- testing/endtoend/types/types.go | 8 +- testing/fuzz/BUILD.bazel | 2 +- testing/middleware/builder/BUILD.bazel | 2 +- testing/middleware/builder/builder.go | 30 +- .../middleware/engine-api-proxy/BUILD.bazel | 2 +- testing/middleware/engine-api-proxy/proxy.go | 2 +- .../middleware/engine-api-proxy/proxy_test.go | 6 +- testing/mock/BUILD.bazel | 2 +- .../beacon_altair_validator_client_mock.go | 2 +- .../beacon_altair_validator_server_mock.go | 2 +- testing/mock/beacon_service_mock.go | 6 +- testing/mock/beacon_validator_client_mock.go | 6 +- testing/mock/beacon_validator_server_mock.go | 6 +- testing/mock/node_service_mock.go | 6 +- testing/require/BUILD.bazel | 2 +- testing/require/requires.go | 2 +- testing/slasher/simulator/BUILD.bazel | 2 +- .../simulator/attestation_generator.go | 22 +- .../simulator/attestation_generator_test.go | 8 +- testing/slasher/simulator/block_generator.go | 16 +- .../slasher/simulator/block_generator_test.go | 2 +- testing/slasher/simulator/simulator.go | 32 +- testing/slasher/simulator/simulator_test.go | 16 +- ...__kzg__verify_blob_kzg_proof_batch_test.go | 12 +- ..._kzg__compute_cells_and_kzg_proofs_test.go | 10 +- .../general/fulu__kzg__compute_cells_test.go | 10 +- ..._kzg__recover_cells_and_kzg_proofs_test.go | 8 +- ...__kzg__verify_cell_kzg_proof_batch_test.go | 8 +- ...cessing__effective_balance_updates_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...rocessing__historical_roots_update_test.go | 2 +- ...och_processing__inactivity_updates_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...essing__participation_flag_updates_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- ...ltair__epoch_processing__slashings_test.go | 2 +- .../altair__finality__finality_test.go | 2 +- ...ir__fork_helper__upgrade_to_altair_test.go | 2 +- ...ltair__fork_transition__transition_test.go | 2 +- .../altair__forkchoice__forkchoice_test.go | 4 +- ..._light_client__single_merkle_proof_test.go | 4 +- .../altair__operations__attestation_test.go | 2 +- ...air__operations__attester_slashing_test.go | 2 +- .../altair__operations__block_header_test.go | 2 +- .../altair__operations__deposit_test.go | 2 +- ...air__operations__proposer_slashing_test.go | 2 +- ...altair__operations__sync_committee_test.go | 2 +- ...altair__operations__voluntary_exit_test.go | 2 +- .../mainnet/altair__random__random_test.go | 2 +- .../mainnet/altair__rewards__rewards_test.go | 2 +- .../mainnet/altair__sanity__blocks_test.go | 2 +- .../mainnet/altair__sanity__slots_test.go | 2 +- .../altair__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...rocessing__historical_roots_update_test.go | 2 +- ...och_processing__inactivity_updates_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...essing__participation_flag_updates_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- ...atrix__epoch_processing__slashings_test.go | 2 +- .../bellatrix__finality__finality_test.go | 2 +- ...ix__fork_helper__upgrade_to_altair_test.go | 2 +- ...atrix__fork_transition__transition_test.go | 2 +- .../bellatrix__forkchoice__forkchoice_test.go | 4 +- ..._light_client__single_merkle_proof_test.go | 4 +- ...bellatrix__operations__attestation_test.go | 2 +- ...rix__operations__attester_slashing_test.go | 2 +- ...ellatrix__operations__block_header_test.go | 2 +- .../bellatrix__operations__deposit_test.go | 2 +- ...rix__operations__execution_payload_test.go | 2 +- ...rix__operations__proposer_slashing_test.go | 2 +- ...latrix__operations__sync_committee_test.go | 2 +- ...latrix__operations__voluntary_exit_test.go | 2 +- .../mainnet/bellatrix__random__random_test.go | 2 +- .../bellatrix__rewards__rewards_test.go | 2 +- .../mainnet/bellatrix__sanity__blocks_test.go | 2 +- .../mainnet/bellatrix__sanity__slots_test.go | 2 +- .../bellatrix__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...ssing__historical_summaries_update_test.go | 2 +- ...och_processing__inactivity_updates_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...essing__participation_flag_updates_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- ...pella__epoch_processing__slashings_test.go | 2 +- .../capella__finality__finality_test.go | 2 +- ...a__fork_helper__upgrade_to_capella_test.go | 2 +- ...pella__fork_transition__transition_test.go | 2 +- .../capella__forkchoice__forkchoice_test.go | 4 +- ..._light_client__single_merkle_proof_test.go | 4 +- .../capella__operations__attestation_test.go | 2 +- ...lla__operations__attester_slashing_test.go | 2 +- .../capella__operations__block_header_test.go | 2 +- ...perations__bls_to_execution_change_test.go | 2 +- .../capella__operations__deposit_test.go | 2 +- ...lla__operations__execution_payload_test.go | 2 +- ...lla__operations__proposer_slashing_test.go | 2 +- ...apella__operations__sync_committee_test.go | 2 +- ...apella__operations__voluntary_exit_test.go | 2 +- .../capella__operations__withdrawals_test.go | 2 +- .../mainnet/capella__random__random_test.go | 2 +- .../mainnet/capella__rewards__rewards_test.go | 2 +- .../mainnet/capella__sanity__blocks_test.go | 2 +- .../mainnet/capella__sanity__slots_test.go | 2 +- .../capella__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...ssing__historical_summaries_update_test.go | 2 +- ...och_processing__inactivity_updates_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...essing__participation_flag_updates_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- ...deneb__epoch_processing__slashings_test.go | 2 +- .../mainnet/deneb__finality__finality_test.go | 2 +- ...neb__fork_helper__upgrade_to_deneb_test.go | 2 +- ...deneb__fork_transition__transition_test.go | 2 +- .../deneb__forkchoice__forkchoice_test.go | 4 +- ..._light_client__single_merkle_proof_test.go | 4 +- .../deneb__merkle_proof__merkle_proof_test.go | 2 +- .../deneb__operations__attestation_test.go | 2 +- ...neb__operations__attester_slashing_test.go | 2 +- .../deneb__operations__block_header_test.go | 2 +- ...perations__bls_to_execution_change_test.go | 2 +- .../deneb__operations__deposit_test.go | 2 +- ...neb__operations__execution_payload_test.go | 2 +- ...neb__operations__proposer_slashing_test.go | 2 +- .../deneb__operations__sync_committee_test.go | 2 +- .../deneb__operations__voluntary_exit_test.go | 2 +- .../deneb__operations__withdrawals_test.go | 2 +- .../mainnet/deneb__random__random_test.go | 2 +- .../mainnet/deneb__rewards__rewards_test.go | 2 +- .../mainnet/deneb__sanity__blocks_test.go | 2 +- .../mainnet/deneb__sanity__slots_test.go | 2 +- .../deneb__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...ssing__historical_summaries_update_test.go | 2 +- ...och_processing__inactivity_updates_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...essing__participation_flag_updates_test.go | 2 +- ...processing__pending_consolidations_test.go | 2 +- ...ocessing__pending_deposits_updates_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- ...ectra__epoch_processing__slashings_test.go | 2 +- .../electra__finality__finality_test.go | 2 +- ...a__fork_helper__upgrade_to_electra_test.go | 2 +- ...ectra__fork_transition__transition_test.go | 2 +- .../electra__forkchoice__forkchoice_test.go | 4 +- ..._light_client__single_merkle_proof_test.go | 4 +- ...lectra__merkle_proof__merkle_proof_test.go | 2 +- .../electra__operations__attestation_test.go | 2 +- ...tra__operations__attester_slashing_test.go | 2 +- .../electra__operations__block_header_test.go | 2 +- ...perations__bls_to_execution_change_test.go | 2 +- ...electra__operations__consolidation_test.go | 2 +- ...ctra__operations__deposit_requests_test.go | 2 +- .../electra__operations__deposit_test.go | 2 +- ...tions__execution_layer_withdrawals_test.go | 2 +- ...tra__operations__execution_payload_test.go | 2 +- ...tra__operations__proposer_slashing_test.go | 2 +- ...lectra__operations__sync_committee_test.go | 2 +- ...lectra__operations__voluntary_exit_test.go | 2 +- .../electra__operations__withdrawals_test.go | 2 +- .../mainnet/electra__random__random_test.go | 2 +- .../mainnet/electra__rewards__rewards_test.go | 2 +- .../mainnet/electra__sanity__blocks_test.go | 2 +- .../mainnet/electra__sanity__slots_test.go | 2 +- .../electra__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...ssing__historical_summaries_update_test.go | 2 +- ...och_processing__inactivity_updates_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...essing__participation_flag_updates_test.go | 2 +- ...processing__pending_consolidations_test.go | 2 +- ...ocessing__pending_deposits_updates_test.go | 2 +- ...och_processing__proposer_lookahead_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- .../fulu__epoch_processing__slashings_test.go | 2 +- .../mainnet/fulu__finality__finality_test.go | 2 +- .../fulu__fork__upgrade_to_fulu_test.go | 2 +- .../fulu__fork_transition__transition_test.go | 2 +- .../fulu__forkchoice__forkchoice_test.go | 4 +- ..._light_client__single_merkle_proof_test.go | 4 +- .../fulu__merkle_proof__merkle_proof_test.go | 2 +- .../fulu__networking__custody_groups_test.go | 2 +- .../fulu__operations__attestation_test.go | 2 +- ...ulu__operations__attester_slashing_test.go | 2 +- .../fulu__operations__block_header_test.go | 2 +- ...perations__bls_to_execution_change_test.go | 2 +- .../fulu__operations__consolidation_test.go | 2 +- ...fulu__operations__deposit_requests_test.go | 2 +- .../mainnet/fulu__operations__deposit_test.go | 2 +- ...tions__execution_layer_withdrawals_test.go | 2 +- ...ulu__operations__execution_payload_test.go | 2 +- ...ulu__operations__proposer_slashing_test.go | 2 +- .../fulu__operations__sync_committee_test.go | 2 +- .../fulu__operations__voluntary_exit_test.go | 2 +- .../fulu__operations__withdrawals_test.go | 2 +- .../mainnet/fulu__random__random_test.go | 2 +- .../mainnet/fulu__rewards__rewards_test.go | 2 +- .../mainnet/fulu__sanity__blocks_test.go | 2 +- .../mainnet/fulu__sanity__slots_test.go | 2 +- .../fulu__ssz_static__ssz_static_test.go | 2 +- .../gloas__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ...epoch_processing__epoch_processing_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...rocessing__historical_roots_update_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...sing__participation_record_updates_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- ...hase0__epoch_processing__slashings_test.go | 2 +- .../phase0__finality__finality_test.go | 2 +- .../phase0__operations__attestation_test.go | 2 +- ...se0__operations__attester_slashing_test.go | 2 +- .../phase0__operations__block_header_test.go | 2 +- .../phase0__operations__deposit_test.go | 2 +- ...se0__operations__proposer_slashing_test.go | 2 +- ...phase0__operations__voluntary_exit_test.go | 2 +- .../mainnet/phase0__random__random_test.go | 2 +- .../mainnet/phase0__rewards__rewards_test.go | 2 +- .../mainnet/phase0__sanity__blocks_test.go | 2 +- .../mainnet/phase0__sanity__slots_test.go | 2 +- .../phase0__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...rocessing__historical_roots_update_test.go | 2 +- ...och_processing__inactivity_updates_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...essing__participation_flag_updates_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- ...ltair__epoch_processing__slashings_test.go | 2 +- .../altair__finality__finality_test.go | 2 +- .../altair__fork__upgrade_to_altair_test.go | 2 +- ...ltair__fork_transition__transition_test.go | 2 +- .../altair__forkchoice__forkchoice_test.go | 4 +- ..._light_client__single_merkle_proof_test.go | 4 +- ...tair__light_client__update_ranking_test.go | 4 +- .../altair__operations__attestation_test.go | 2 +- ...air__operations__attester_slashing_test.go | 2 +- .../altair__operations__block_header_test.go | 2 +- .../altair__operations__deposit_test.go | 2 +- ...air__operations__proposer_slashing_test.go | 2 +- ...altair__operations__sync_committee_test.go | 2 +- ...altair__operations__voluntary_exit_test.go | 2 +- .../minimal/altair__random__random_test.go | 2 +- .../minimal/altair__rewards__rewards_test.go | 2 +- .../minimal/altair__sanity__blocks_test.go | 2 +- .../minimal/altair__sanity__slots_test.go | 2 +- .../altair__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...rocessing__historical_roots_update_test.go | 2 +- ...och_processing__inactivity_updates_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...essing__participation_flag_updates_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- ...atrix__epoch_processing__slashings_test.go | 2 +- .../bellatrix__finality__finality_test.go | 2 +- ...bellatrix__fork__upgrade_to_altair_test.go | 2 +- ...atrix__fork_transition__transition_test.go | 2 +- .../bellatrix__forkchoice__forkchoice_test.go | 4 +- ..._light_client__single_merkle_proof_test.go | 4 +- ...trix__light_client__update_ranking_test.go | 4 +- ...bellatrix__operations__attestation_test.go | 2 +- ...rix__operations__attester_slashing_test.go | 2 +- ...ellatrix__operations__block_header_test.go | 2 +- .../bellatrix__operations__deposit_test.go | 2 +- ...rix__operations__execution_payload_test.go | 2 +- ...rix__operations__proposer_slashing_test.go | 2 +- ...latrix__operations__sync_committee_test.go | 2 +- ...latrix__operations__voluntary_exit_test.go | 2 +- .../minimal/bellatrix__random__random_test.go | 2 +- .../bellatrix__rewards__rewards_test.go | 2 +- .../minimal/bellatrix__sanity__blocks_test.go | 2 +- .../minimal/bellatrix__sanity__slots_test.go | 2 +- .../bellatrix__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...essing__historical_roots_summaries_test.go | 2 +- ...och_processing__inactivity_updates_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...essing__participation_flag_updates_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- ...pella__epoch_processing__slashings_test.go | 2 +- .../capella__finality__finality_test.go | 2 +- .../capella__fork__upgrade_to_capella_test.go | 2 +- ...pella__fork_transition__transition_test.go | 2 +- .../capella__forkchoice__forkchoice_test.go | 4 +- ..._light_client__single_merkle_proof_test.go | 4 +- ...ella__light_client__update_ranking_test.go | 4 +- .../capella__operations__attestation_test.go | 2 +- ...lla__operations__attester_slashing_test.go | 2 +- .../capella__operations__block_header_test.go | 2 +- ...perations__bls_to_execution_change_test.go | 2 +- .../capella__operations__deposit_test.go | 2 +- ...lla__operations__execution_payload_test.go | 2 +- ...lla__operations__proposer_slashing_test.go | 2 +- ...apella__operations__sync_committee_test.go | 2 +- ...apella__operations__voluntary_exit_test.go | 2 +- .../capella__operations__withdrawals_test.go | 2 +- .../minimal/capella__random__random_test.go | 2 +- .../minimal/capella__rewards__rewards_test.go | 2 +- .../minimal/capella__sanity__blocks_test.go | 2 +- .../minimal/capella__sanity__slots_test.go | 2 +- .../capella__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...ssing__historical_summaries_update_test.go | 2 +- ...och_processing__inactivity_updates_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...essing__participation_flag_updates_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- ...deneb__epoch_processing__slashings_test.go | 2 +- .../minimal/deneb__finality__finality_test.go | 2 +- .../deneb__fork__upgrade_to_deneb_test.go | 2 +- ...deneb__fork_transition__transition_test.go | 2 +- .../deneb__forkchoice__forkchoice_test.go | 4 +- ..._light_client__single_merkle_proof_test.go | 4 +- ...eneb__light_client__update_ranking_test.go | 4 +- .../deneb__merkle_proof__merkle_proof_test.go | 2 +- .../deneb__operations__attestation_test.go | 2 +- ...neb__operations__attester_slashing_test.go | 2 +- .../deneb__operations__block_header_test.go | 2 +- ...perations__bls_to_execution_change_test.go | 2 +- .../deneb__operations__deposit_test.go | 2 +- ...neb__operations__execution_payload_test.go | 2 +- ...neb__operations__proposer_slashing_test.go | 2 +- .../deneb__operations__sync_committee_test.go | 2 +- .../deneb__operations__voluntary_exit_test.go | 2 +- .../deneb__operations__withdrawals_test.go | 2 +- .../minimal/deneb__random__random_test.go | 2 +- .../minimal/deneb__rewards__rewards_test.go | 2 +- .../minimal/deneb__sanity__blocks_test.go | 2 +- .../minimal/deneb__sanity__slots_test.go | 2 +- .../deneb__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...ssing__historical_summaries_update_test.go | 2 +- ...och_processing__inactivity_updates_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...essing__participation_flag_updates_test.go | 2 +- ...processing__pending_consolidations_test.go | 2 +- ...ocessing__pending_deposits_updates_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- ...ectra__epoch_processing__slashings_test.go | 2 +- ...processing__sync_committee_updates_test.go | 2 +- .../electra__finality__finality_test.go | 2 +- .../electra__fork__upgrade_to_electra_test.go | 2 +- ...ectra__fork_transition__transition_test.go | 2 +- .../electra__forkchoice__forkchoice_test.go | 4 +- ..._light_client__single_merkle_proof_test.go | 4 +- ...ctra__light_client__update_ranking_test.go | 4 +- ...lectra__merkle_proof__merkle_proof_test.go | 2 +- .../electra__operations__attestation_test.go | 2 +- ...tra__operations__attester_slashing_test.go | 2 +- .../electra__operations__block_header_test.go | 2 +- ...perations__bls_to_execution_change_test.go | 2 +- ...electra__operations__consolidation_test.go | 2 +- ...ctra__operations__deposit_requests_test.go | 2 +- .../electra__operations__deposit_test.go | 2 +- ...tions__execution_layer_withdrawals_test.go | 2 +- ...tra__operations__execution_payload_test.go | 2 +- ...tra__operations__proposer_slashing_test.go | 2 +- ...lectra__operations__sync_committee_test.go | 2 +- ...lectra__operations__voluntary_exit_test.go | 2 +- .../electra__operations__withdrawals_test.go | 2 +- .../minimal/electra__random__random_test.go | 2 +- .../minimal/electra__rewards__rewards_test.go | 2 +- .../minimal/electra__sanity__blocks_test.go | 2 +- .../minimal/electra__sanity__slots_test.go | 2 +- .../electra__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...ssing__historical_summaries_update_test.go | 2 +- ...och_processing__inactivity_updates_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...essing__participation_flag_updates_test.go | 2 +- ...processing__pending_consolidations_test.go | 2 +- ...ocessing__pending_deposits_updates_test.go | 2 +- ...och_processing__proposer_lookahead_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- .../fulu__epoch_processing__slashings_test.go | 2 +- ...processing__sync_committee_updates_test.go | 2 +- .../minimal/fulu__finality__finality_test.go | 2 +- .../fulu__fork__upgrade_to_fulu_test.go | 2 +- .../fulu__fork_transition__transition_test.go | 2 +- .../fulu__forkchoice__forkchoice_test.go | 4 +- ..._light_client__single_merkle_proof_test.go | 4 +- .../fulu__merkle_proof__merkle_proof_test.go | 2 +- .../fulu__networking__custody_columns_test.go | 2 +- .../fulu__operations__attestation_test.go | 2 +- ...ulu__operations__attester_slashing_test.go | 2 +- .../fulu__operations__block_header_test.go | 2 +- ...perations__bls_to_execution_change_test.go | 2 +- .../fulu__operations__consolidation_test.go | 2 +- ...fulu__operations__deposit_requests_test.go | 2 +- .../minimal/fulu__operations__deposit_test.go | 2 +- ...tions__execution_layer_withdrawals_test.go | 2 +- ...ulu__operations__execution_payload_test.go | 2 +- ...ulu__operations__proposer_slashing_test.go | 2 +- .../fulu__operations__sync_committee_test.go | 2 +- .../fulu__operations__voluntary_exit_test.go | 2 +- .../fulu__operations__withdrawals_test.go | 2 +- .../minimal/fulu__random__random_test.go | 2 +- .../minimal/fulu__rewards__rewards_test.go | 2 +- .../minimal/fulu__sanity__blocks_test.go | 2 +- .../minimal/fulu__sanity__slots_test.go | 2 +- .../fulu__ssz_static__ssz_static_test.go | 2 +- .../gloas__ssz_static__ssz_static_test.go | 2 +- ...cessing__effective_balance_updates_test.go | 2 +- ...epoch_processing__epoch_processing_test.go | 2 +- ..._epoch_processing__eth1_data_reset_test.go | 2 +- ...rocessing__historical_roots_update_test.go | 2 +- ...ng__justification_and_finalization_test.go | 2 +- ...sing__participation_record_updates_test.go | 2 +- ...och_processing__randao_mixes_reset_test.go | 2 +- ...epoch_processing__registry_updates_test.go | 2 +- ..._processing__rewards_and_penalties_test.go | 2 +- ..._epoch_processing__slashings_reset_test.go | 2 +- ...hase0__epoch_processing__slashings_test.go | 2 +- .../phase0__finality__finality_test.go | 2 +- .../phase0__operations__attestation_test.go | 2 +- ...se0__operations__attester_slashing_test.go | 2 +- .../phase0__operations__block_header_test.go | 2 +- .../phase0__operations__deposit_test.go | 2 +- ...se0__operations__proposer_slashing_test.go | 2 +- ...phase0__operations__voluntary_exit_test.go | 2 +- .../minimal/phase0__random__random_test.go | 2 +- .../minimal/phase0__rewards__rewards_test.go | 2 +- .../minimal/phase0__sanity__blocks_test.go | 2 +- .../minimal/phase0__sanity__slots_test.go | 2 +- .../phase0__ssz_static__ssz_static_test.go | 2 +- .../altair/epoch_processing/BUILD.bazel | 2 +- .../effective_balance_updates.go | 8 +- .../epoch_processing/eth1_data_reset.go | 8 +- .../shared/altair/epoch_processing/helpers.go | 10 +- .../historical_roots_update.go | 8 +- .../epoch_processing/inactivity_updates.go | 10 +- .../justification_and_finalization.go | 10 +- .../participation_flag_updates.go | 8 +- .../epoch_processing/randao_mixes_reset.go | 8 +- .../epoch_processing/registry_updates.go | 10 +- .../epoch_processing/rewards_and_penalties.go | 10 +- .../altair/epoch_processing/slashings.go | 10 +- .../epoch_processing/slashings_reset.go | 8 +- .../shared/altair/finality/BUILD.bazel | 2 +- .../shared/altair/finality/finality.go | 18 +- .../spectest/shared/altair/fork/BUILD.bazel | 2 +- .../spectest/shared/altair/fork/transition.go | 20 +- .../shared/altair/fork/upgrade_to_altair.go | 14 +- .../shared/altair/operations/BUILD.bazel | 2 +- .../shared/altair/operations/attestation.go | 14 +- .../altair/operations/attester_slashing.go | 12 +- .../shared/altair/operations/block_header.go | 4 +- .../shared/altair/operations/deposit.go | 14 +- .../shared/altair/operations/helpers.go | 10 +- .../altair/operations/proposer_slashing.go | 12 +- .../altair/operations/sync_committee.go | 12 +- .../altair/operations/voluntary_exit.go | 12 +- .../shared/altair/rewards/BUILD.bazel | 2 +- .../altair/rewards/rewards_penalties.go | 14 +- .../spectest/shared/altair/sanity/BUILD.bazel | 2 +- .../shared/altair/sanity/block_processing.go | 18 +- .../shared/altair/sanity/slot_processing.go | 12 +- .../shared/altair/ssz_static/BUILD.bazel | 2 +- .../shared/altair/ssz_static/ssz_static.go | 8 +- .../bellatrix/epoch_processing/BUILD.bazel | 2 +- .../effective_balance_updates.go | 8 +- .../epoch_processing/eth1_data_reset.go | 8 +- .../bellatrix/epoch_processing/helpers.go | 10 +- .../historical_roots_update.go | 8 +- .../epoch_processing/inactivity_updates.go | 10 +- .../justification_and_finalization.go | 10 +- .../participation_flag_updates.go | 8 +- .../epoch_processing/randao_mixes_reset.go | 8 +- .../epoch_processing/registry_updates.go | 10 +- .../epoch_processing/rewards_and_penalties.go | 10 +- .../bellatrix/epoch_processing/slashings.go | 10 +- .../epoch_processing/slashings_reset.go | 8 +- .../shared/bellatrix/finality/BUILD.bazel | 2 +- .../shared/bellatrix/finality/finality.go | 18 +- .../shared/bellatrix/fork/BUILD.bazel | 2 +- .../shared/bellatrix/fork/transition.go | 20 +- .../bellatrix/fork/upgrade_to_bellatrix.go | 14 +- .../shared/bellatrix/operations/BUILD.bazel | 2 +- .../bellatrix/operations/attestation.go | 14 +- .../bellatrix/operations/attester_slashing.go | 12 +- .../bellatrix/operations/block_header.go | 4 +- .../shared/bellatrix/operations/deposit.go | 14 +- .../bellatrix/operations/execution_payload.go | 4 +- .../shared/bellatrix/operations/helpers.go | 10 +- .../bellatrix/operations/proposer_slashing.go | 12 +- .../bellatrix/operations/sync_committee.go | 12 +- .../bellatrix/operations/voluntary_exit.go | 12 +- .../shared/bellatrix/rewards/BUILD.bazel | 2 +- .../bellatrix/rewards/rewards_penalties.go | 14 +- .../shared/bellatrix/sanity/BUILD.bazel | 2 +- .../bellatrix/sanity/block_processing.go | 18 +- .../bellatrix/sanity/slot_processing.go | 12 +- .../shared/bellatrix/ssz_static/BUILD.bazel | 2 +- .../shared/bellatrix/ssz_static/ssz_static.go | 10 +- .../capella/epoch_processing/BUILD.bazel | 2 +- .../effective_balance_updates.go | 8 +- .../epoch_processing/eth1_data_reset.go | 8 +- .../capella/epoch_processing/helpers.go | 10 +- .../historical_summaries_update.go | 8 +- .../epoch_processing/inactivity_updates.go | 10 +- .../justification_and_finalization.go | 10 +- .../participation_flag_updates.go | 8 +- .../epoch_processing/randao_mixes_reset.go | 8 +- .../epoch_processing/registry_updates.go | 10 +- .../epoch_processing/rewards_and_penalties.go | 10 +- .../capella/epoch_processing/slashings.go | 10 +- .../epoch_processing/slashings_reset.go | 8 +- .../shared/capella/finality/BUILD.bazel | 2 +- .../shared/capella/finality/finality.go | 18 +- .../spectest/shared/capella/fork/BUILD.bazel | 2 +- .../shared/capella/fork/transition.go | 20 +- .../shared/capella/fork/upgrade_to_capella.go | 14 +- .../shared/capella/operations/BUILD.bazel | 2 +- .../shared/capella/operations/attestation.go | 14 +- .../capella/operations/attester_slashing.go | 12 +- .../shared/capella/operations/block_header.go | 4 +- .../operations/bls_to_execution_changes.go | 12 +- .../shared/capella/operations/deposit.go | 14 +- .../capella/operations/execution_payload.go | 4 +- .../shared/capella/operations/helpers.go | 10 +- .../capella/operations/proposer_slashing.go | 12 +- .../capella/operations/sync_committee.go | 12 +- .../capella/operations/voluntary_exit.go | 12 +- .../shared/capella/operations/withdrawals.go | 14 +- .../shared/capella/rewards/BUILD.bazel | 2 +- .../capella/rewards/rewards_penalties.go | 14 +- .../shared/capella/sanity/BUILD.bazel | 2 +- .../shared/capella/sanity/block_processing.go | 18 +- .../shared/capella/sanity/slot_processing.go | 12 +- .../shared/capella/ssz_static/BUILD.bazel | 2 +- .../shared/capella/ssz_static/ssz_static.go | 10 +- .../shared/common/forkchoice/BUILD.bazel | 2 +- .../shared/common/forkchoice/builder.go | 22 +- .../shared/common/forkchoice/builder_test.go | 8 +- .../shared/common/forkchoice/runner.go | 28 +- .../shared/common/forkchoice/service.go | 46 +- .../shared/common/light_client/BUILD.bazel | 2 +- .../light_client/single_merkle_proof.go | 18 +- .../common/light_client/update_ranking.go | 20 +- .../shared/common/merkle_proof/BUILD.bazel | 2 +- .../merkle_proof/single_merkle_proof.go | 14 +- .../shared/common/operations/BUILD.bazel | 2 +- .../shared/common/operations/attestation.go | 12 +- .../common/operations/attester_slashing.go | 8 +- .../shared/common/operations/block_header.go | 12 +- .../operations/bls_to_execution_changes.go | 12 +- .../common/operations/consolidations.go | 12 +- .../shared/common/operations/deposit.go | 12 +- .../common/operations/deposit_request.go | 12 +- .../common/operations/execution_payload.go | 16 +- .../common/operations/proposer_slashing.go | 8 +- .../shared/common/operations/slashing.go | 6 +- .../common/operations/sync_aggregate.go | 12 +- .../shared/common/operations/test_runner.go | 10 +- .../common/operations/voluntary_exit.go | 14 +- .../common/operations/withdrawal_request.go | 12 +- .../shared/common/operations/withdrawals.go | 16 +- .../shared/common/ssz_static/BUILD.bazel | 2 +- .../shared/common/ssz_static/ssz_static.go | 6 +- .../ssz_static/ssz_static_example_test.go | 8 +- .../shared/deneb/epoch_processing/BUILD.bazel | 2 +- .../effective_balance_updates.go | 8 +- .../deneb/epoch_processing/eth1_data_reset.go | 8 +- .../shared/deneb/epoch_processing/helpers.go | 10 +- .../historical_summaries_update.go | 8 +- .../epoch_processing/inactivity_updates.go | 10 +- .../justification_and_finalization.go | 10 +- .../participation_flag_updates.go | 8 +- .../epoch_processing/randao_mixes_reset.go | 8 +- .../epoch_processing/registry_updates.go | 10 +- .../epoch_processing/rewards_and_penalties.go | 10 +- .../deneb/epoch_processing/slashings.go | 10 +- .../deneb/epoch_processing/slashings_reset.go | 8 +- .../shared/deneb/finality/BUILD.bazel | 2 +- .../shared/deneb/finality/finality.go | 18 +- .../spectest/shared/deneb/fork/BUILD.bazel | 2 +- .../spectest/shared/deneb/fork/transition.go | 20 +- .../shared/deneb/fork/upgrade_to_deneb.go | 14 +- .../shared/deneb/merkle_proof/BUILD.bazel | 2 +- .../shared/deneb/merkle_proof/merkle_proof.go | 4 +- .../shared/deneb/operations/BUILD.bazel | 2 +- .../shared/deneb/operations/attestation.go | 14 +- .../deneb/operations/attester_slashing.go | 12 +- .../shared/deneb/operations/block_header.go | 4 +- .../operations/bls_to_execution_changes.go | 12 +- .../shared/deneb/operations/deposit.go | 14 +- .../deneb/operations/execution_payload.go | 4 +- .../shared/deneb/operations/helpers.go | 10 +- .../deneb/operations/proposer_slashing.go | 12 +- .../shared/deneb/operations/sync_committee.go | 12 +- .../shared/deneb/operations/voluntary_exit.go | 12 +- .../shared/deneb/operations/withdrawals.go | 14 +- .../spectest/shared/deneb/rewards/BUILD.bazel | 2 +- .../shared/deneb/rewards/rewards_penalties.go | 14 +- .../spectest/shared/deneb/sanity/BUILD.bazel | 2 +- .../shared/deneb/sanity/block_processing.go | 22 +- .../shared/deneb/sanity/slot_processing.go | 12 +- .../shared/deneb/ssz_static/BUILD.bazel | 2 +- .../shared/deneb/ssz_static/ssz_static.go | 10 +- .../electra/epoch_processing/BUILD.bazel | 2 +- .../effective_balance_updates.go | 8 +- .../epoch_processing/eth1_data_reset.go | 8 +- .../electra/epoch_processing/helpers.go | 10 +- .../historical_summaries_update.go | 8 +- .../epoch_processing/inactivity_updates.go | 10 +- .../justification_and_finalization.go | 10 +- .../participation_flag_updates.go | 8 +- .../pending_consolidations.go | 8 +- .../pending_deposit_updates.go | 12 +- .../epoch_processing/randao_mixes_reset.go | 8 +- .../epoch_processing/registry_updates.go | 10 +- .../epoch_processing/rewards_and_penalties.go | 10 +- .../electra/epoch_processing/slashings.go | 10 +- .../epoch_processing/slashings_reset.go | 8 +- .../sync_committee_updates.go | 10 +- .../shared/electra/finality/BUILD.bazel | 2 +- .../shared/electra/finality/finality.go | 18 +- .../spectest/shared/electra/fork/BUILD.bazel | 2 +- .../shared/electra/fork/transition.go | 20 +- .../shared/electra/fork/upgrade_to_electra.go | 14 +- .../shared/electra/merkle_proof/BUILD.bazel | 2 +- .../electra/merkle_proof/merkle_proof.go | 4 +- .../shared/electra/operations/BUILD.bazel | 2 +- .../shared/electra/operations/attestation.go | 14 +- .../electra/operations/attester_slashing.go | 12 +- .../shared/electra/operations/block_header.go | 4 +- .../operations/bls_to_execution_changes.go | 12 +- .../electra/operations/consolidations.go | 14 +- .../shared/electra/operations/deposit.go | 14 +- .../electra/operations/deposit_request.go | 14 +- .../electra/operations/execution_payload.go | 4 +- .../shared/electra/operations/helpers.go | 10 +- .../electra/operations/proposer_slashing.go | 12 +- .../electra/operations/sync_committee.go | 12 +- .../electra/operations/voluntary_exit.go | 12 +- .../electra/operations/withdrawal_request.go | 14 +- .../shared/electra/operations/withdrawals.go | 14 +- .../shared/electra/rewards/BUILD.bazel | 2 +- .../electra/rewards/rewards_penalties.go | 14 +- .../shared/electra/sanity/BUILD.bazel | 2 +- .../shared/electra/sanity/block_processing.go | 20 +- .../shared/electra/sanity/slot_processing.go | 12 +- .../shared/electra/ssz_static/BUILD.bazel | 2 +- .../shared/electra/ssz_static/ssz_static.go | 10 +- .../shared/fulu/epoch_processing/BUILD.bazel | 2 +- .../effective_balance_updates.go | 8 +- .../fulu/epoch_processing/eth1_data_reset.go | 8 +- .../shared/fulu/epoch_processing/helpers.go | 10 +- .../historical_summaries_update.go | 8 +- .../epoch_processing/inactivity_updates.go | 10 +- .../justification_and_finalization.go | 10 +- .../participation_flag_updates.go | 8 +- .../pending_consolidations.go | 8 +- .../pending_deposit_updates.go | 12 +- .../epoch_processing/proposer_lookahead.go | 8 +- .../epoch_processing/randao_mixes_reset.go | 8 +- .../fulu/epoch_processing/registry_updates.go | 10 +- .../epoch_processing/rewards_and_penalties.go | 10 +- .../shared/fulu/epoch_processing/slashings.go | 10 +- .../fulu/epoch_processing/slashings_reset.go | 8 +- .../sync_committee_updates.go | 10 +- .../spectest/shared/fulu/finality/BUILD.bazel | 2 +- .../spectest/shared/fulu/finality/finality.go | 18 +- testing/spectest/shared/fulu/fork/BUILD.bazel | 2 +- .../spectest/shared/fulu/fork/transition.go | 20 +- .../shared/fulu/fork/upgrade_to_fulu.go | 14 +- .../shared/fulu/merkle_proof/BUILD.bazel | 2 +- .../shared/fulu/merkle_proof/merkle_proof.go | 4 +- .../shared/fulu/networking/BUILD.bazel | 2 +- .../shared/fulu/networking/custody_groups.go | 8 +- .../shared/fulu/operations/BUILD.bazel | 2 +- .../shared/fulu/operations/attestation.go | 14 +- .../fulu/operations/attester_slashing.go | 12 +- .../shared/fulu/operations/block_header.go | 4 +- .../operations/bls_to_execution_changes.go | 12 +- .../shared/fulu/operations/consolidations.go | 14 +- .../shared/fulu/operations/deposit.go | 14 +- .../shared/fulu/operations/deposit_request.go | 14 +- .../fulu/operations/execution_payload.go | 4 +- .../shared/fulu/operations/helpers.go | 10 +- .../fulu/operations/proposer_slashing.go | 12 +- .../shared/fulu/operations/sync_committee.go | 12 +- .../shared/fulu/operations/voluntary_exit.go | 12 +- .../fulu/operations/withdrawal_request.go | 14 +- .../shared/fulu/operations/withdrawals.go | 14 +- .../spectest/shared/fulu/rewards/BUILD.bazel | 2 +- .../shared/fulu/rewards/rewards_penalties.go | 14 +- .../spectest/shared/fulu/sanity/BUILD.bazel | 2 +- .../shared/fulu/sanity/block_processing.go | 20 +- .../shared/fulu/sanity/slot_processing.go | 12 +- .../shared/fulu/ssz_static/BUILD.bazel | 2 +- .../shared/fulu/ssz_static/ssz_static.go | 10 +- .../shared/gloas/ssz_static/BUILD.bazel | 2 +- .../shared/gloas/ssz_static/ssz_static.go | 6 +- .../phase0/epoch_processing/BUILD.bazel | 2 +- .../effective_balance_updates.go | 8 +- .../epoch_processing/eth1_data_reset.go | 8 +- .../shared/phase0/epoch_processing/helpers.go | 10 +- .../historical_roots_update.go | 8 +- .../justification_and_finalization.go | 8 +- .../participation_record_updates.go | 8 +- .../epoch_processing/randao_mixes_reset.go | 8 +- .../epoch_processing/registry_updates.go | 10 +- .../epoch_processing/rewards_and_penalties.go | 10 +- .../phase0/epoch_processing/slashings.go | 12 +- .../epoch_processing/slashings_reset.go | 8 +- .../shared/phase0/finality/BUILD.bazel | 2 +- .../spectest/shared/phase0/finality/runner.go | 18 +- .../shared/phase0/operations/BUILD.bazel | 2 +- .../shared/phase0/operations/attestation.go | 14 +- .../phase0/operations/attester_slashing.go | 12 +- .../shared/phase0/operations/block_header.go | 4 +- .../shared/phase0/operations/deposit.go | 14 +- .../shared/phase0/operations/helpers.go | 10 +- .../phase0/operations/proposer_slashing.go | 12 +- .../phase0/operations/voluntary_exit.go | 12 +- .../shared/phase0/rewards/BUILD.bazel | 2 +- .../phase0/rewards/rewards_penalties.go | 14 +- .../spectest/shared/phase0/sanity/BUILD.bazel | 2 +- .../shared/phase0/sanity/block_processing.go | 18 +- .../shared/phase0/sanity/slot_processing.go | 12 +- .../phase0/shuffling/core/shuffle/BUILD.bazel | 2 +- .../phase0/shuffling/core/shuffle/shuffle.go | 10 +- .../core/shuffle/shuffle_test_format.go | 2 +- .../shared/phase0/ssz_static/BUILD.bazel | 2 +- .../shared/phase0/ssz_static/ssz_static.go | 8 +- testing/spectest/utils/BUILD.bazel | 2 +- testing/spectest/utils/config.go | 2 +- testing/spectest/utils/config_test.go | 6 +- testing/spectest/utils/utils.go | 4 +- testing/util/BUILD.bazel | 2 +- testing/util/altair.go | 32 +- testing/util/attestation.go | 28 +- testing/util/attestation_test.go | 10 +- testing/util/bellatrix.go | 26 +- testing/util/bellatrix_state.go | 20 +- testing/util/bellatrix_state_test.go | 4 +- testing/util/blob.go | 4 +- testing/util/block.go | 38 +- testing/util/block_test.go | 22 +- testing/util/capella_block.go | 26 +- testing/util/capella_block_test.go | 12 +- testing/util/capella_state.go | 18 +- testing/util/data_column.go | 10 +- testing/util/deneb.go | 24 +- testing/util/deneb_state.go | 18 +- testing/util/deneb_test.go | 6 +- testing/util/deposits.go | 20 +- testing/util/deposits_test.go | 6 +- testing/util/electra.go | 24 +- testing/util/electra_block.go | 26 +- testing/util/electra_state.go | 22 +- testing/util/fulu.go | 24 +- testing/util/fulu_block.go | 24 +- testing/util/fulu_state.go | 22 +- testing/util/helpers.go | 28 +- testing/util/helpers_test.go | 18 +- testing/util/lightclient.go | 28 +- testing/util/lightclient_test.go | 8 +- testing/util/logging_test.go | 2 +- testing/util/merge.go | 2 +- testing/util/slot.go | 6 +- testing/util/state.go | 20 +- testing/util/state_test.go | 6 +- testing/util/sync_aggregate.go | 16 +- testing/util/sync_committee.go | 8 +- testing/validator-mock/BUILD.bazel | 2 +- testing/validator-mock/chain_client_mock.go | 6 +- testing/validator-mock/node_client_mock.go | 6 +- .../validator-mock/prysm_chain_client_mock.go | 10 +- .../validator-mock/validator_client_mock.go | 12 +- testing/validator-mock/validator_mock.go | 16 +- time/BUILD.bazel | 2 +- time/mclock/BUILD.bazel | 2 +- time/slots/BUILD.bazel | 2 +- time/slots/countdown.go | 4 +- time/slots/countdown_test.go | 6 +- time/slots/slotticker.go | 6 +- time/slots/slotticker_test.go | 4 +- time/slots/slottime.go | 10 +- time/slots/slottime_test.go | 12 +- time/slots/testing/BUILD.bazel | 2 +- time/slots/testing/mock.go | 2 +- time/slots/testing/mock_test.go | 2 +- tools/analyzers/comparesame/BUILD.bazel | 2 +- tools/analyzers/comparesame/analyzer_test.go | 2 +- tools/analyzers/cryptorand/BUILD.bazel | 2 +- tools/analyzers/cryptorand/analyzer_test.go | 2 +- tools/analyzers/errcheck/BUILD.bazel | 2 +- tools/analyzers/featureconfig/BUILD.bazel | 2 +- tools/analyzers/gocognit/BUILD.bazel | 2 +- tools/analyzers/ineffassign/BUILD.bazel | 2 +- tools/analyzers/ineffassign/analyzer_test.go | 2 +- tools/analyzers/interfacechecker/BUILD.bazel | 2 +- tools/analyzers/logcapitalization/BUILD.bazel | 2 +- .../logcapitalization/analyzer_test.go | 4 +- tools/analyzers/logruswitherror/BUILD.bazel | 2 +- .../logruswitherror/analyzer_test.go | 2 +- tools/analyzers/maligned/BUILD.bazel | 2 +- tools/analyzers/nop/BUILD.bazel | 2 +- tools/analyzers/nop/analyzer_test.go | 2 +- tools/analyzers/nopanic/BUILD.bazel | 2 +- tools/analyzers/nopanic/analyzer_test.go | 2 +- tools/analyzers/properpermissions/BUILD.bazel | 2 +- .../properpermissions/analyzer_test.go | 2 +- tools/analyzers/recursivelock/BUILD.bazel | 2 +- .../analyzers/recursivelock/analyzer_test.go | 2 +- tools/analyzers/shadowpredecl/BUILD.bazel | 2 +- .../analyzers/shadowpredecl/analyzer_test.go | 2 +- tools/analyzers/slicedirect/BUILD.bazel | 2 +- tools/analyzers/slicedirect/analyzer_test.go | 2 +- tools/analyzers/uintcast/BUILD.bazel | 2 +- tools/analyzers/uintcast/analyzer_test.go | 4 +- tools/beacon-fuzz/BUILD.bazel | 2 +- tools/beacon-fuzz/main.go | 2 +- tools/benchmark-files-gen/BUILD.bazel | 2 +- tools/benchmark-files-gen/main.go | 28 +- tools/blocktree/BUILD.bazel | 2 +- tools/blocktree/main.go | 6 +- tools/bootnode/BUILD.bazel | 2 +- tools/bootnode/bootnode.go | 20 +- tools/bootnode/bootnode_test.go | 10 +- tools/enr-calculator/BUILD.bazel | 2 +- tools/enr-calculator/main.go | 6 +- tools/eth1exporter/BUILD.bazel | 2 +- tools/eth1exporter/main.go | 2 +- tools/exploredb/BUILD.bazel | 2 +- tools/exploredb/main.go | 12 +- tools/extractor/BUILD.bazel | 2 +- tools/extractor/main.go | 8 +- tools/forkchecker/BUILD.bazel | 2 +- tools/forkchecker/forkchecker.go | 6 +- tools/gocovmerge/BUILD.bazel | 2 +- tools/http-request-sink/BUILD.bazel | 2 +- tools/http-request-sink/main.go | 2 +- tools/http-request-sink/main_test.go | 4 +- tools/interop/convert-keys/BUILD.bazel | 2 +- tools/interop/convert-keys/main.go | 4 +- tools/interop/export-genesis/BUILD.bazel | 2 +- tools/interop/export-genesis/main.go | 4 +- tools/interop/split-keys/BUILD.bazel | 2 +- tools/interop/split-keys/main.go | 10 +- tools/interop/split-keys/main_test.go | 8 +- tools/keystores/BUILD.bazel | 2 +- tools/keystores/main.go | 8 +- tools/keystores/main_test.go | 10 +- tools/nogo_config/BUILD.bazel | 2 +- tools/pcli/BUILD.bazel | 2 +- tools/pcli/main.go | 22 +- tools/replay-http/BUILD.bazel | 2 +- tools/specs-checker/BUILD.bazel | 2 +- tools/specs-checker/download.go | 2 +- tools/unencrypted-keys-gen/BUILD.bazel | 2 +- tools/unencrypted-keys-gen/keygen/BUILD.bazel | 2 +- tools/unencrypted-keys-gen/main.go | 6 +- tools/unencrypted-keys-gen/main_test.go | 6 +- validator/accounts/BUILD.bazel | 2 +- validator/accounts/accounts.go | 2 +- validator/accounts/accounts_backup.go | 4 +- validator/accounts/accounts_delete.go | 6 +- validator/accounts/accounts_delete_test.go | 10 +- validator/accounts/accounts_exit.go | 20 +- validator/accounts/accounts_exit_test.go | 18 +- validator/accounts/accounts_helper.go | 14 +- validator/accounts/accounts_import.go | 12 +- validator/accounts/accounts_import_test.go | 16 +- validator/accounts/accounts_list.go | 8 +- validator/accounts/accounts_list_test.go | 24 +- validator/accounts/cli_manager.go | 20 +- validator/accounts/cli_options.go | 6 +- validator/accounts/iface/BUILD.bazel | 2 +- validator/accounts/iface/wallet.go | 4 +- validator/accounts/petnames/BUILD.bazel | 2 +- validator/accounts/petnames/names.go | 4 +- validator/accounts/testing/BUILD.bazel | 2 +- validator/accounts/testing/mock.go | 4 +- validator/accounts/userprompt/BUILD.bazel | 2 +- validator/accounts/userprompt/prompt.go | 6 +- validator/accounts/wallet/BUILD.bazel | 2 +- validator/accounts/wallet/wallet.go | 20 +- validator/accounts/wallet/wallet_test.go | 16 +- validator/accounts/wallet_create.go | 8 +- validator/accounts/wallet_recover.go | 6 +- .../accounts/wallet_recover_fuzz_test.go | 2 +- validator/client/BUILD.bazel | 2 +- validator/client/aggregate.go | 26 +- validator/client/aggregate_test.go | 20 +- validator/client/attest.go | 28 +- validator/client/attest_test.go | 24 +- validator/client/beacon-api/BUILD.bazel | 2 +- .../client/beacon-api/attestation_data.go | 12 +- .../beacon-api/attestation_data_test.go | 10 +- .../beacon_api_beacon_chain_client.go | 10 +- .../beacon_api_beacon_chain_client_test.go | 14 +- .../client/beacon-api/beacon_api_helpers.go | 6 +- .../beacon-api/beacon_api_helpers_test.go | 8 +- .../beacon-api/beacon_api_node_client.go | 6 +- .../beacon-api/beacon_api_node_client_test.go | 8 +- .../beacon-api/beacon_api_validator_client.go | 12 +- .../beacon_api_validator_client_test.go | 20 +- .../beacon-api/beacon_block_converter.go | 10 +- .../beacon-api/beacon_block_converter_test.go | 8 +- .../beacon-api/beacon_block_json_helpers.go | 8 +- .../beacon_block_json_helpers_test.go | 8 +- .../beacon-api/beacon_block_proto_helpers.go | 8 +- .../beacon_block_proto_helpers_test.go | 10 +- .../beacon-api/beacon_committee_selections.go | 2 +- .../beacon_committee_selections_test.go | 8 +- validator/client/beacon-api/domain_data.go | 12 +- .../client/beacon-api/domain_data_test.go | 12 +- validator/client/beacon-api/doppelganger.go | 8 +- .../client/beacon-api/doppelganger_test.go | 10 +- validator/client/beacon-api/duties.go | 12 +- validator/client/beacon-api/duties_test.go | 14 +- validator/client/beacon-api/genesis.go | 10 +- validator/client/beacon-api/genesis_test.go | 8 +- .../client/beacon-api/get_beacon_block.go | 12 +- .../beacon-api/get_beacon_block_test.go | 16 +- validator/client/beacon-api/index.go | 4 +- validator/client/beacon-api/index_test.go | 14 +- validator/client/beacon-api/mock/BUILD.bazel | 2 +- .../mock/beacon_block_converter_mock.go | 4 +- .../client/beacon-api/mock/duties_mock.go | 4 +- .../client/beacon-api/mock/genesis_mock.go | 2 +- .../beacon-api/mock/state_validators_mock.go | 4 +- .../beacon-api/prepare_beacon_proposer.go | 4 +- .../prepare_beacon_proposer_test.go | 10 +- .../client/beacon-api/propose_attestation.go | 8 +- .../beacon-api/propose_attestation_test.go | 20 +- .../client/beacon-api/propose_beacon_block.go | 6 +- .../beacon-api/propose_beacon_block_test.go | 18 +- validator/client/beacon-api/propose_exit.go | 4 +- .../client/beacon-api/propose_exit_test.go | 10 +- .../beacon-api/prysm_beacon_chain_client.go | 10 +- .../prysm_beacon_chain_client_test.go | 14 +- validator/client/beacon-api/registration.go | 4 +- .../client/beacon-api/registration_test.go | 10 +- .../client/beacon-api/rest_handler_client.go | 10 +- .../beacon-api/rest_handler_client_test.go | 14 +- .../client/beacon-api/state_validators.go | 6 +- .../beacon-api/state_validators_test.go | 12 +- validator/client/beacon-api/status.go | 8 +- validator/client/beacon-api/status_test.go | 16 +- validator/client/beacon-api/stream_blocks.go | 6 +- .../client/beacon-api/stream_blocks_test.go | 14 +- .../submit_aggregate_selection_proof.go | 10 +- .../submit_aggregate_selection_proof_test.go | 14 +- .../submit_signed_aggregate_proof.go | 8 +- .../submit_signed_aggregate_proof_test.go | 18 +- .../submit_signed_contribution_and_proof.go | 4 +- ...bmit_signed_contribution_and_proof_test.go | 10 +- .../beacon-api/subscribe_committee_subnets.go | 4 +- .../subscribe_committee_subnets_test.go | 12 +- validator/client/beacon-api/sync_committee.go | 10 +- .../beacon-api/sync_committee_selections.go | 2 +- .../sync_committee_selections_test.go | 8 +- .../client/beacon-api/sync_committee_test.go | 16 +- .../beacon-api/test-helpers/BUILD.bazel | 2 +- .../altair_beacon_block_test_helpers.go | 4 +- .../bellatrix_beacon_block_test_helpers.go | 8 +- .../capella_beacon_block_test_helpers.go | 8 +- .../deneb_beacon_block_test_helpers.go | 8 +- .../electra_beacon_block_test_helpers.go | 8 +- .../phase0_beacon_block_test_helpers.go | 4 +- .../beacon-api/wait_for_chain_start_test.go | 10 +- .../beacon-chain-client-factory/BUILD.bazel | 2 +- .../beacon_chain_client_factory.go | 12 +- validator/client/grpc-api/BUILD.bazel | 2 +- .../grpc-api/grpc_beacon_chain_client.go | 4 +- validator/client/grpc-api/grpc_node_client.go | 4 +- .../grpc_prysm_beacon_chain_client.go | 14 +- .../grpc_prysm_beacon_chain_client_test.go | 16 +- .../client/grpc-api/grpc_validator_client.go | 18 +- .../grpc-api/grpc_validator_client_test.go | 14 +- validator/client/health_monitor.go | 6 +- validator/client/health_monitor_test.go | 6 +- validator/client/iface/BUILD.bazel | 2 +- validator/client/iface/chain_client.go | 2 +- validator/client/iface/node_client.go | 2 +- validator/client/iface/prysm_chain_client.go | 4 +- validator/client/iface/validator.go | 16 +- validator/client/iface/validator_client.go | 6 +- validator/client/key_reload.go | 4 +- validator/client/key_reload_test.go | 12 +- validator/client/log.go | 6 +- validator/client/log_test.go | 12 +- validator/client/metrics.go | 14 +- validator/client/metrics_test.go | 14 +- .../client/node-client-factory/BUILD.bazel | 2 +- .../node_client_factory.go | 10 +- validator/client/propose.go | 34 +- validator/client/propose_test.go | 40 +- validator/client/registration.go | 16 +- validator/client/registration_test.go | 10 +- validator/client/runner.go | 16 +- validator/client/runner_test.go | 34 +- validator/client/service.go | 44 +- validator/client/service_test.go | 6 +- .../slashing_protection_interchange_test.go | 14 +- validator/client/sync_committee.go | 24 +- validator/client/sync_committee_test.go | 14 +- validator/client/testutil/BUILD.bazel | 2 +- validator/client/testutil/helper.go | 6 +- validator/client/testutil/mock_validator.go | 20 +- .../validator-client-factory/BUILD.bazel | 2 +- .../validator_client_factory.go | 10 +- validator/client/validator.go | 50 +-- validator/client/validator_test.go | 48 +- validator/client/wait_for_activation.go | 6 +- validator/client/wait_for_activation_test.go | 20 +- validator/db/BUILD.bazel | 2 +- validator/db/alias.go | 2 +- validator/db/common/BUILD.bazel | 2 +- validator/db/common/structs.go | 4 +- validator/db/convert.go | 16 +- validator/db/convert_test.go | 20 +- validator/db/filesystem/BUILD.bazel | 2 +- .../db/filesystem/attester_protection.go | 10 +- .../db/filesystem/attester_protection_test.go | 12 +- validator/db/filesystem/db.go | 8 +- validator/db/filesystem/db_test.go | 10 +- validator/db/filesystem/genesis_test.go | 2 +- validator/db/filesystem/graffiti_test.go | 4 +- validator/db/filesystem/import.go | 12 +- validator/db/filesystem/import_test.go | 14 +- validator/db/filesystem/migration_test.go | 2 +- .../db/filesystem/proposer_protection.go | 8 +- .../db/filesystem/proposer_protection_test.go | 16 +- validator/db/filesystem/proposer_settings.go | 2 +- .../db/filesystem/proposer_settings_test.go | 8 +- validator/db/iface/BUILD.bazel | 2 +- validator/db/iface/interface.go | 14 +- validator/db/kv/BUILD.bazel | 2 +- validator/db/kv/attester_protection.go | 16 +- validator/db/kv/attester_protection_test.go | 16 +- validator/db/kv/backup.go | 6 +- validator/db/kv/backup_test.go | 8 +- validator/db/kv/db.go | 14 +- .../db/kv/deprecated_attester_protection.go | 8 +- .../kv/deprecated_attester_protection_test.go | 10 +- validator/db/kv/eip_blacklisted_keys.go | 4 +- validator/db/kv/eip_blacklisted_keys_test.go | 6 +- validator/db/kv/genesis_test.go | 6 +- validator/db/kv/graffiti.go | 2 +- validator/db/kv/graffiti_test.go | 6 +- validator/db/kv/import.go | 18 +- validator/db/kv/import_test.go | 16 +- validator/db/kv/kv_test.go | 4 +- .../migration_optimal_attester_protection.go | 10 +- ...ration_optimal_attester_protection_test.go | 8 +- .../migration_source_target_epochs_bucket.go | 2 +- ...ration_source_target_epochs_bucket_test.go | 6 +- validator/db/kv/proposer_protection.go | 16 +- validator/db/kv/proposer_protection_test.go | 20 +- validator/db/kv/proposer_settings.go | 6 +- validator/db/kv/proposer_settings_test.go | 12 +- validator/db/kv/prune_attester_protection.go | 8 +- .../db/kv/prune_attester_protection_test.go | 10 +- validator/db/migrate.go | 6 +- validator/db/migrate_test.go | 8 +- validator/db/restore.go | 8 +- validator/db/restore_test.go | 10 +- validator/db/testing/BUILD.bazel | 2 +- validator/db/testing/setup_db.go | 8 +- validator/db/testing/setup_db_test.go | 10 +- validator/graffiti/BUILD.bazel | 2 +- validator/graffiti/parse_graffiti.go | 4 +- validator/graffiti/parse_graffiti_test.go | 8 +- validator/helpers/BUILD.bazel | 2 +- validator/helpers/converts.go | 4 +- validator/helpers/converts_test.go | 4 +- validator/helpers/metadata.go | 4 +- validator/helpers/metadata_test.go | 18 +- validator/keymanager/BUILD.bazel | 2 +- validator/keymanager/derived/BUILD.bazel | 2 +- validator/keymanager/derived/eip_test.go | 6 +- validator/keymanager/derived/keymanager.go | 14 +- .../keymanager/derived/keymanager_test.go | 16 +- validator/keymanager/derived/mnemonic.go | 4 +- validator/keymanager/derived/mnemonic_test.go | 4 +- validator/keymanager/local/BUILD.bazel | 2 +- validator/keymanager/local/backup.go | 6 +- validator/keymanager/local/backup_test.go | 10 +- validator/keymanager/local/delete.go | 6 +- validator/keymanager/local/delete_test.go | 10 +- validator/keymanager/local/import.go | 4 +- validator/keymanager/local/import_test.go | 12 +- validator/keymanager/local/keymanager.go | 20 +- validator/keymanager/local/keymanager_test.go | 16 +- validator/keymanager/local/refresh.go | 14 +- validator/keymanager/local/refresh_test.go | 12 +- .../keymanager/remote-web3signer/BUILD.bazel | 2 +- .../remote-web3signer/internal/BUILD.bazel | 2 +- .../remote-web3signer/internal/client.go | 8 +- .../remote-web3signer/internal/client_test.go | 4 +- .../remote-web3signer/keymanager.go | 24 +- .../remote-web3signer/keymanager_test.go | 16 +- .../remote-web3signer/types/BUILD.bazel | 2 +- .../remote-web3signer/types/custom_mappers.go | 8 +- .../types/custom_mappers_test.go | 10 +- .../remote-web3signer/types/mock/BUILD.bazel | 2 +- .../remote-web3signer/types/mock/mocks.go | 16 +- .../remote-web3signer/types/requests.go | 8 +- .../remote-web3signer/types/requests_test.go | 12 +- validator/keymanager/types.go | 8 +- validator/keymanager/types_test.go | 12 +- validator/node/BUILD.bazel | 2 +- validator/node/node.go | 48 +- validator/node/node_test.go | 20 +- validator/rpc/BUILD.bazel | 2 +- validator/rpc/auth_token.go | 8 +- validator/rpc/auth_token_test.go | 6 +- validator/rpc/beacon.go | 18 +- validator/rpc/beacon_test.go | 4 +- validator/rpc/handler_wallet.go | 16 +- validator/rpc/handler_wallet_test.go | 26 +- validator/rpc/handlers_accounts.go | 26 +- validator/rpc/handlers_accounts_test.go | 26 +- validator/rpc/handlers_auth.go | 8 +- validator/rpc/handlers_auth_test.go | 8 +- validator/rpc/handlers_beacon.go | 12 +- validator/rpc/handlers_beacon_test.go | 8 +- validator/rpc/handlers_health.go | 10 +- validator/rpc/handlers_health_test.go | 10 +- validator/rpc/handlers_keymanager.go | 32 +- validator/rpc/handlers_keymanager_test.go | 52 +-- validator/rpc/handlers_slashing.go | 6 +- validator/rpc/handlers_slashing_test.go | 18 +- validator/rpc/intercepter.go | 4 +- validator/rpc/intercepter_test.go | 6 +- validator/rpc/server.go | 24 +- validator/rpc/server_test.go | 2 +- validator/rpc/structs.go | 14 +- .../slashing-protection-history/BUILD.bazel | 2 +- .../slashing-protection-history/export.go | 12 +- .../export_test.go | 14 +- .../format/BUILD.bazel | 2 +- .../round_trip_test.go | 16 +- validator/testing/BUILD.bazel | 2 +- validator/testing/mock_protector.go | 2 +- validator/testing/protection_history.go | 16 +- validator/web/BUILD.bazel | 2 +- validator/web/handler_test.go | 2 +- 3030 files changed, 15365 insertions(+), 15362 deletions(-) create mode 100644 changelog/bastin_upgrade-v6-to-v7.md diff --git a/.deepsource.toml b/.deepsource.toml index a74613a648..079fd4cf86 100644 --- a/.deepsource.toml +++ b/.deepsource.toml @@ -11,7 +11,7 @@ name = "go" enabled = true [analyzers.meta] -import_paths = ["github.com/OffchainLabs/prysm/v6"] +import_paths = ["github.com/OffchainLabs/prysm/v7"] [[analyzers]] name = "test-coverage" diff --git a/BUILD.bazel b/BUILD.bazel index 432686dd96..0036873cc5 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -12,7 +12,7 @@ exports_files([ "LICENSE.md", ]) -# gazelle:prefix github.com/OffchainLabs/prysm/v6 +# gazelle:prefix github.com/OffchainLabs/prysm/v7 # gazelle:map_kind go_library go_library @prysm//tools/go:def.bzl # gazelle:map_kind go_test go_test @prysm//tools/go:def.bzl # gazelle:map_kind go_repository go_repository @prysm//tools/go:def.bzl diff --git a/api/BUILD.bazel b/api/BUILD.bazel index dbd22a30f4..bee57ea306 100644 --- a/api/BUILD.bazel +++ b/api/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "headers.go", "jwt.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/api", + importpath = "github.com/OffchainLabs/prysm/v7/api", visibility = ["//visibility:public"], deps = [ "//crypto/rand:go_default_library", diff --git a/api/apiutil/BUILD.bazel b/api/apiutil/BUILD.bazel index b9e4fc4782..8cb1cb1fcb 100644 --- a/api/apiutil/BUILD.bazel +++ b/api/apiutil/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "common.go", "header.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/api/apiutil", + importpath = "github.com/OffchainLabs/prysm/v7/api/apiutil", visibility = ["//visibility:public"], deps = [ "//consensus-types/primitives:go_default_library", diff --git a/api/apiutil/common.go b/api/apiutil/common.go index dbd45432d9..07ae4c744e 100644 --- a/api/apiutil/common.go +++ b/api/apiutil/common.go @@ -5,7 +5,7 @@ import ( neturl "net/url" "strconv" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // Uint64ToString is a util function that will convert uints to string diff --git a/api/apiutil/common_test.go b/api/apiutil/common_test.go index f50d07326e..392584e791 100644 --- a/api/apiutil/common_test.go +++ b/api/apiutil/common_test.go @@ -4,8 +4,8 @@ import ( "net/url" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestBeaconApiHelpers_TestUint64ToString(t *testing.T) { diff --git a/api/apiutil/header_test.go b/api/apiutil/header_test.go index 5c074f0b60..d33c2c05f9 100644 --- a/api/apiutil/header_test.go +++ b/api/apiutil/header_test.go @@ -3,7 +3,7 @@ package apiutil import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestParseAccept(t *testing.T) { diff --git a/api/client/BUILD.bazel b/api/client/BUILD.bazel index 131d74c4f1..300e6feee5 100644 --- a/api/client/BUILD.bazel +++ b/api/client/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "options.go", "transport.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/api/client", + importpath = "github.com/OffchainLabs/prysm/v7/api/client", visibility = ["//visibility:public"], deps = ["@com_github_pkg_errors//:go_default_library"], ) diff --git a/api/client/beacon/BUILD.bazel b/api/client/beacon/BUILD.bazel index 0013c9d453..4bdf01cf53 100644 --- a/api/client/beacon/BUILD.bazel +++ b/api/client/beacon/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "log.go", "template.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/api/client/beacon", + importpath = "github.com/OffchainLabs/prysm/v7/api/client/beacon", visibility = ["//visibility:public"], deps = [ "//api/client:go_default_library", diff --git a/api/client/beacon/client.go b/api/client/beacon/client.go index 46fac052c7..9a0b448763 100644 --- a/api/client/beacon/client.go +++ b/api/client/beacon/client.go @@ -11,12 +11,12 @@ import ( "regexp" "strconv" - "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/api/server" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/api/server" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/api/client/beacon/client_test.go b/api/client/beacon/client_test.go index 9b9a37397a..cf12f08125 100644 --- a/api/client/beacon/client_test.go +++ b/api/client/beacon/client_test.go @@ -4,8 +4,8 @@ import ( "net/url" "testing" - "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestParseNodeVersion(t *testing.T) { diff --git a/api/client/builder/BUILD.bazel b/api/client/builder/BUILD.bazel index c4dd47b4fe..ef6d736a37 100644 --- a/api/client/builder/BUILD.bazel +++ b/api/client/builder/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "errors.go", "types.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/api/client/builder", + importpath = "github.com/OffchainLabs/prysm/v7/api/client/builder", visibility = ["//visibility:public"], deps = [ "//api:go_default_library", diff --git a/api/client/builder/bid.go b/api/client/builder/bid.go index 3f10d5ee4f..6740ef7f90 100644 --- a/api/client/builder/bid.go +++ b/api/client/builder/bid.go @@ -1,13 +1,13 @@ package builder import ( - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ssz "github.com/prysmaticlabs/fastssz" ) diff --git a/api/client/builder/client.go b/api/client/builder/client.go index b50dcf1891..ca32e11bb6 100644 --- a/api/client/builder/client.go +++ b/api/client/builder/client.go @@ -12,18 +12,18 @@ import ( "strings" "text/template" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" log "github.com/sirupsen/logrus" "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp" diff --git a/api/client/builder/client_test.go b/api/client/builder/client_test.go index 9f058ac0f3..6ca43f7e66 100644 --- a/api/client/builder/client_test.go +++ b/api/client/builder/client_test.go @@ -11,18 +11,18 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" log "github.com/sirupsen/logrus" ) diff --git a/api/client/builder/testing/BUILD.bazel b/api/client/builder/testing/BUILD.bazel index 3546db8b29..3d79cb9870 100644 --- a/api/client/builder/testing/BUILD.bazel +++ b/api/client/builder/testing/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/api/client/builder/testing", + importpath = "github.com/OffchainLabs/prysm/v7/api/client/builder/testing", visibility = ["//visibility:public"], deps = [ "//api/client/builder:go_default_library", diff --git a/api/client/builder/testing/mock.go b/api/client/builder/testing/mock.go index fd2d8336a1..f921571cd7 100644 --- a/api/client/builder/testing/mock.go +++ b/api/client/builder/testing/mock.go @@ -3,12 +3,12 @@ package testing import ( "context" - "github.com/OffchainLabs/prysm/v6/api/client/builder" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/client/builder" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // MockClient is a mock implementation of BuilderClient. diff --git a/api/client/builder/types.go b/api/client/builder/types.go index df7a52e4ca..e0d6316298 100644 --- a/api/client/builder/types.go +++ b/api/client/builder/types.go @@ -7,16 +7,16 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/math" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/math" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "google.golang.org/protobuf/proto" diff --git a/api/client/builder/types_test.go b/api/client/builder/types_test.go index 016bf9678b..957abf5a65 100644 --- a/api/client/builder/types_test.go +++ b/api/client/builder/types_test.go @@ -12,15 +12,15 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/math" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/math" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/api/client/client_test.go b/api/client/client_test.go index b1bf60a3fe..cd66820668 100644 --- a/api/client/client_test.go +++ b/api/client/client_test.go @@ -4,7 +4,7 @@ import ( "net/url" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestValidHostname(t *testing.T) { diff --git a/api/client/event/BUILD.bazel b/api/client/event/BUILD.bazel index a6a03edbc1..31f7ebbf38 100644 --- a/api/client/event/BUILD.bazel +++ b/api/client/event/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "event_stream.go", "utils.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/api/client/event", + importpath = "github.com/OffchainLabs/prysm/v7/api/client/event", visibility = ["//visibility:public"], deps = [ "//api:go_default_library", diff --git a/api/client/event/event_stream.go b/api/client/event/event_stream.go index c8fda249c9..3244225883 100644 --- a/api/client/event/event_stream.go +++ b/api/client/event/event_stream.go @@ -7,8 +7,8 @@ import ( "net/url" "strings" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/client" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/client" "github.com/pkg/errors" log "github.com/sirupsen/logrus" ) diff --git a/api/client/event/event_stream_test.go b/api/client/event/event_stream_test.go index 886e4dc13c..b366ad3671 100644 --- a/api/client/event/event_stream_test.go +++ b/api/client/event/event_stream_test.go @@ -7,7 +7,7 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" log "github.com/sirupsen/logrus" ) diff --git a/api/client/event/utils_test.go b/api/client/event/utils_test.go index 20aecb1424..9139ca1494 100644 --- a/api/client/event/utils_test.go +++ b/api/client/event/utils_test.go @@ -5,7 +5,7 @@ import ( "bytes" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestScanLinesWithCarriage(t *testing.T) { diff --git a/api/client/transport_test.go b/api/client/transport_test.go index 0a2eca3103..6dc74c5527 100644 --- a/api/client/transport_test.go +++ b/api/client/transport_test.go @@ -5,8 +5,8 @@ import ( "net/http/httptest" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) type noopTransport struct{} diff --git a/api/client/validator/BUILD.bazel b/api/client/validator/BUILD.bazel index 59a0065c54..627748af8c 100644 --- a/api/client/validator/BUILD.bazel +++ b/api/client/validator/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["client.go"], - importpath = "github.com/OffchainLabs/prysm/v6/api/client/validator", + importpath = "github.com/OffchainLabs/prysm/v7/api/client/validator", visibility = ["//visibility:public"], deps = [ "//api/client:go_default_library", diff --git a/api/client/validator/client.go b/api/client/validator/client.go index 20a791f121..433a0390e7 100644 --- a/api/client/validator/client.go +++ b/api/client/validator/client.go @@ -6,8 +6,8 @@ import ( "fmt" "strings" - "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/validator/rpc" + "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/validator/rpc" "github.com/pkg/errors" ) diff --git a/api/grpc/BUILD.bazel b/api/grpc/BUILD.bazel index 4948a5aa49..bf45352f90 100644 --- a/api/grpc/BUILD.bazel +++ b/api/grpc/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "grpcutils.go", "parameters.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/api/grpc", + importpath = "github.com/OffchainLabs/prysm/v7/api/grpc", visibility = ["//visibility:public"], deps = [ "@com_github_sirupsen_logrus//:go_default_library", diff --git a/api/grpc/grpcutils_test.go b/api/grpc/grpcutils_test.go index c9d49b6957..cf06105444 100644 --- a/api/grpc/grpcutils_test.go +++ b/api/grpc/grpcutils_test.go @@ -3,8 +3,8 @@ package grpc import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" "google.golang.org/grpc/metadata" ) diff --git a/api/jwt.go b/api/jwt.go index e0bcc4796a..a139d143b7 100644 --- a/api/jwt.go +++ b/api/jwt.go @@ -1,7 +1,7 @@ package api import ( - "github.com/OffchainLabs/prysm/v6/crypto/rand" + "github.com/OffchainLabs/prysm/v7/crypto/rand" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/api/jwt_test.go b/api/jwt_test.go index a32c1ad1f9..f6f5eedc5a 100644 --- a/api/jwt_test.go +++ b/api/jwt_test.go @@ -3,7 +3,7 @@ package api import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestGenerateRandomHexString(t *testing.T) { diff --git a/api/pagination/BUILD.bazel b/api/pagination/BUILD.bazel index 45a079554b..aee92f41e8 100644 --- a/api/pagination/BUILD.bazel +++ b/api/pagination/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["pagination.go"], - importpath = "github.com/OffchainLabs/prysm/v6/api/pagination", + importpath = "github.com/OffchainLabs/prysm/v7/api/pagination", visibility = ["//visibility:public"], deps = [ "//config/params:go_default_library", diff --git a/api/pagination/pagination.go b/api/pagination/pagination.go index ea9f6d0d03..0b826724c7 100644 --- a/api/pagination/pagination.go +++ b/api/pagination/pagination.go @@ -5,7 +5,7 @@ import ( "fmt" "strconv" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/pkg/errors" ) diff --git a/api/pagination/pagination_test.go b/api/pagination/pagination_test.go index cb7c22e30d..f6105721fe 100644 --- a/api/pagination/pagination_test.go +++ b/api/pagination/pagination_test.go @@ -3,9 +3,9 @@ package pagination_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/api/pagination" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api/pagination" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStartAndEndPage(t *testing.T) { diff --git a/api/server/BUILD.bazel b/api/server/BUILD.bazel index e6d39608c2..5d2da2c94a 100644 --- a/api/server/BUILD.bazel +++ b/api/server/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["error.go"], - importpath = "github.com/OffchainLabs/prysm/v6/api/server", + importpath = "github.com/OffchainLabs/prysm/v7/api/server", visibility = ["//visibility:public"], ) diff --git a/api/server/error_test.go b/api/server/error_test.go index 4c62d0bbe4..88475eb37b 100644 --- a/api/server/error_test.go +++ b/api/server/error_test.go @@ -4,7 +4,7 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestDecodeError(t *testing.T) { diff --git a/api/server/httprest/BUILD.bazel b/api/server/httprest/BUILD.bazel index 8d4ee270cd..1f3cea9eb3 100644 --- a/api/server/httprest/BUILD.bazel +++ b/api/server/httprest/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "options.go", "server.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/api/server/httprest", + importpath = "github.com/OffchainLabs/prysm/v7/api/server/httprest", visibility = ["//visibility:public"], deps = [ "//api/server/middleware:go_default_library", diff --git a/api/server/httprest/options.go b/api/server/httprest/options.go index 15b86465c5..b825f33605 100644 --- a/api/server/httprest/options.go +++ b/api/server/httprest/options.go @@ -4,7 +4,7 @@ import ( "net/http" "time" - "github.com/OffchainLabs/prysm/v6/api/server/middleware" + "github.com/OffchainLabs/prysm/v7/api/server/middleware" ) // Option is a http rest server functional parameter type. diff --git a/api/server/httprest/server.go b/api/server/httprest/server.go index bd0d7ceeef..af059e1856 100644 --- a/api/server/httprest/server.go +++ b/api/server/httprest/server.go @@ -5,8 +5,8 @@ import ( "net/http" "time" - "github.com/OffchainLabs/prysm/v6/api/server/middleware" - "github.com/OffchainLabs/prysm/v6/runtime" + "github.com/OffchainLabs/prysm/v7/api/server/middleware" + "github.com/OffchainLabs/prysm/v7/runtime" "github.com/pkg/errors" ) diff --git a/api/server/httprest/server_test.go b/api/server/httprest/server_test.go index 960a733c78..0a4d487b44 100644 --- a/api/server/httprest/server_test.go +++ b/api/server/httprest/server_test.go @@ -9,9 +9,9 @@ import ( "net/url" "testing" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" "github.com/urfave/cli/v2" ) diff --git a/api/server/middleware/BUILD.bazel b/api/server/middleware/BUILD.bazel index eaca515833..d234346fea 100644 --- a/api/server/middleware/BUILD.bazel +++ b/api/server/middleware/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "middleware.go", "util.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/api/server/middleware", + importpath = "github.com/OffchainLabs/prysm/v7/api/server/middleware", visibility = ["//visibility:public"], deps = [ "//api:go_default_library", diff --git a/api/server/middleware/middleware.go b/api/server/middleware/middleware.go index 9ab7c410c2..412db44697 100644 --- a/api/server/middleware/middleware.go +++ b/api/server/middleware/middleware.go @@ -6,8 +6,8 @@ import ( "net/http" "strings" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/apiutil" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/apiutil" "github.com/rs/cors" log "github.com/sirupsen/logrus" ) diff --git a/api/server/middleware/middleware_test.go b/api/server/middleware/middleware_test.go index 3742b5c50f..c5bacce686 100644 --- a/api/server/middleware/middleware_test.go +++ b/api/server/middleware/middleware_test.go @@ -8,8 +8,8 @@ import ( "net/http/httptest" "testing" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/testing/require" log "github.com/sirupsen/logrus" ) diff --git a/api/server/middleware/util_test.go b/api/server/middleware/util_test.go index b6e006b37f..5573adaaa2 100644 --- a/api/server/middleware/util_test.go +++ b/api/server/middleware/util_test.go @@ -3,8 +3,8 @@ package middleware import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestNormalizeQueryValues(t *testing.T) { diff --git a/api/server/structs/BUILD.bazel b/api/server/structs/BUILD.bazel index de7d388966..cbc2021183 100644 --- a/api/server/structs/BUILD.bazel +++ b/api/server/structs/BUILD.bazel @@ -23,7 +23,7 @@ go_library( "other.go", "state.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/api/server/structs", + importpath = "github.com/OffchainLabs/prysm/v7/api/server/structs", visibility = ["//visibility:public"], deps = [ "//api/server:go_default_library", diff --git a/api/server/structs/conversions.go b/api/server/structs/conversions.go index 4e3b5be8a1..96350f7bad 100644 --- a/api/server/structs/conversions.go +++ b/api/server/structs/conversions.go @@ -4,18 +4,18 @@ import ( "fmt" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/math" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethv1 "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/math" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethv1 "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" diff --git a/api/server/structs/conversions_blob.go b/api/server/structs/conversions_blob.go index 80eec81cd0..06b30e973a 100644 --- a/api/server/structs/conversions_blob.go +++ b/api/server/structs/conversions_blob.go @@ -3,9 +3,9 @@ package structs import ( "strconv" - "github.com/OffchainLabs/prysm/v6/api/server" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func (sc *Sidecar) ToConsensus() (*eth.BlobSidecar, error) { diff --git a/api/server/structs/conversions_block.go b/api/server/structs/conversions_block.go index e27be6428c..6d6a4da528 100644 --- a/api/server/structs/conversions_block.go +++ b/api/server/structs/conversions_block.go @@ -4,14 +4,14 @@ import ( "fmt" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" diff --git a/api/server/structs/conversions_block_execution.go b/api/server/structs/conversions_block_execution.go index f2d5eda820..c45143b2dc 100644 --- a/api/server/structs/conversions_block_execution.go +++ b/api/server/structs/conversions_block_execution.go @@ -4,14 +4,14 @@ import ( "fmt" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/api/server" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" diff --git a/api/server/structs/conversions_block_execution_test.go b/api/server/structs/conversions_block_execution_test.go index 8a58896c64..55a37644f5 100644 --- a/api/server/structs/conversions_block_execution_test.go +++ b/api/server/structs/conversions_block_execution_test.go @@ -4,8 +4,8 @@ import ( "fmt" "testing" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/testing/require" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/api/server/structs/conversions_lightclient.go b/api/server/structs/conversions_lightclient.go index fcdc23582b..a06c0f6f37 100644 --- a/api/server/structs/conversions_lightclient.go +++ b/api/server/structs/conversions_lightclient.go @@ -4,9 +4,9 @@ import ( "encoding/json" "fmt" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/api/server/structs/conversions_state.go b/api/server/structs/conversions_state.go index 5abf2ab76f..8dd5609c36 100644 --- a/api/server/structs/conversions_state.go +++ b/api/server/structs/conversions_state.go @@ -4,8 +4,8 @@ import ( "errors" "fmt" - beaconState "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + beaconState "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/api/server/structs/conversions_test.go b/api/server/structs/conversions_test.go index 5e98364c33..31770807a3 100644 --- a/api/server/structs/conversions_test.go +++ b/api/server/structs/conversions_test.go @@ -3,9 +3,9 @@ package structs import ( "testing" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/api/server/structs/endpoints_validator.go b/api/server/structs/endpoints_validator.go index c25bfcebb7..71778fe62e 100644 --- a/api/server/structs/endpoints_validator.go +++ b/api/server/structs/endpoints_validator.go @@ -3,7 +3,7 @@ package structs import ( "encoding/json" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) type AggregateAttestationResponse struct { diff --git a/async/BUILD.bazel b/async/BUILD.bazel index cc1a36edfa..e1ff1fb811 100644 --- a/async/BUILD.bazel +++ b/async/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "multilock.go", "scatter.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/async", + importpath = "github.com/OffchainLabs/prysm/v7/async", visibility = ["//visibility:public"], deps = ["@com_github_sirupsen_logrus//:go_default_library"], ) diff --git a/async/abool/BUILD.bazel b/async/abool/BUILD.bazel index 9e1863a60f..6dc3a42971 100644 --- a/async/abool/BUILD.bazel +++ b/async/abool/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["abool.go"], - importpath = "github.com/OffchainLabs/prysm/v6/async/abool", + importpath = "github.com/OffchainLabs/prysm/v7/async/abool", visibility = ["//visibility:public"], ) diff --git a/async/benchmark_test.go b/async/benchmark_test.go index 830593beb1..83530091b8 100644 --- a/async/benchmark_test.go +++ b/async/benchmark_test.go @@ -6,8 +6,8 @@ import ( "sync" "testing" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/testing/require" log "github.com/sirupsen/logrus" ) diff --git a/async/debounce_test.go b/async/debounce_test.go index e190c85da2..f9272d646a 100644 --- a/async/debounce_test.go +++ b/async/debounce_test.go @@ -7,10 +7,10 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestDebounce_NoEvents(t *testing.T) { diff --git a/async/event/BUILD.bazel b/async/event/BUILD.bazel index c63e7006c1..4fa9ba0c2f 100644 --- a/async/event/BUILD.bazel +++ b/async/event/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "interface.go", "subscription.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/async/event", + importpath = "github.com/OffchainLabs/prysm/v7/async/event", visibility = ["//visibility:public"], deps = [ "//time/mclock:go_default_library", diff --git a/async/event/example_scope_test.go b/async/event/example_scope_test.go index b3504e6e24..510da227be 100644 --- a/async/event/example_scope_test.go +++ b/async/event/example_scope_test.go @@ -20,7 +20,7 @@ import ( "fmt" "sync" - "github.com/OffchainLabs/prysm/v6/async/event" + "github.com/OffchainLabs/prysm/v7/async/event" ) // This example demonstrates how SubscriptionScope can be used to control the lifetime of diff --git a/async/event/example_subscription_test.go b/async/event/example_subscription_test.go index f4abc03e38..fe8c65eb68 100644 --- a/async/event/example_subscription_test.go +++ b/async/event/example_subscription_test.go @@ -19,7 +19,7 @@ package event_test import ( "fmt" - "github.com/OffchainLabs/prysm/v6/async/event" + "github.com/OffchainLabs/prysm/v7/async/event" ) func ExampleNewSubscription() { diff --git a/async/event/subscription.go b/async/event/subscription.go index 56a4d379b9..4d7e3cd48d 100644 --- a/async/event/subscription.go +++ b/async/event/subscription.go @@ -21,7 +21,7 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/time/mclock" + "github.com/OffchainLabs/prysm/v7/time/mclock" ) // waitQuotient is divided against the max backoff time, in order to have N requests based on the full diff --git a/async/event/subscription_test.go b/async/event/subscription_test.go index bd477650bc..e3f04d1661 100644 --- a/async/event/subscription_test.go +++ b/async/event/subscription_test.go @@ -23,7 +23,7 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) var errInts = errors.New("error in subscribeInts") diff --git a/async/every_test.go b/async/every_test.go index 499cc27cd3..515f41a9f1 100644 --- a/async/every_test.go +++ b/async/every_test.go @@ -6,7 +6,7 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async" + "github.com/OffchainLabs/prysm/v7/async" ) func TestEveryRuns(t *testing.T) { diff --git a/async/scatter_test.go b/async/scatter_test.go index 048fdf1584..72d8eb2d50 100644 --- a/async/scatter_test.go +++ b/async/scatter_test.go @@ -5,9 +5,9 @@ import ( "sync" "testing" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestDouble(t *testing.T) { diff --git a/beacon-chain/blockchain/BUILD.bazel b/beacon-chain/blockchain/BUILD.bazel index c64c8edd7c..7f89e64c27 100644 --- a/beacon-chain/blockchain/BUILD.bazel +++ b/beacon-chain/blockchain/BUILD.bazel @@ -31,7 +31,7 @@ go_library( "tracked_proposer.go", "weak_subjectivity_checks.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain", visibility = [ "//beacon-chain:__subpackages__", "//cmd/beacon-chain:__subpackages__", diff --git a/beacon-chain/blockchain/chain_info.go b/beacon-chain/blockchain/chain_info.go index 276090a135..396c5e5b21 100644 --- a/beacon-chain/blockchain/chain_info.go +++ b/beacon-chain/blockchain/chain_info.go @@ -5,20 +5,20 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - f "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensus_blocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/forkchoice" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + f "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensus_blocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/forkchoice" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/blockchain/chain_info_forkchoice.go b/beacon-chain/blockchain/chain_info_forkchoice.go index 780203db24..9aedecd35c 100644 --- a/beacon-chain/blockchain/chain_info_forkchoice.go +++ b/beacon-chain/blockchain/chain_info_forkchoice.go @@ -4,12 +4,12 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - consensus_blocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/forkchoice" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + consensus_blocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/forkchoice" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/blockchain/chain_info_norace_test.go b/beacon-chain/blockchain/chain_info_norace_test.go index 4b5b950643..2489f97387 100644 --- a/beacon-chain/blockchain/chain_info_norace_test.go +++ b/beacon-chain/blockchain/chain_info_norace_test.go @@ -3,11 +3,11 @@ package blockchain import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestHeadSlot_DataRace(t *testing.T) { diff --git a/beacon-chain/blockchain/chain_info_test.go b/beacon-chain/blockchain/chain_info_test.go index 89f0d0e408..8426b1f436 100644 --- a/beacon-chain/blockchain/chain_info_test.go +++ b/beacon-chain/blockchain/chain_info_test.go @@ -5,21 +5,21 @@ import ( "testing" "time" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/blockchain/defragment.go b/beacon-chain/blockchain/defragment.go index 3d2f2287b3..f6e09dce18 100644 --- a/beacon-chain/blockchain/defragment.go +++ b/beacon-chain/blockchain/defragment.go @@ -1,8 +1,8 @@ package blockchain import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/time" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" ) diff --git a/beacon-chain/blockchain/error.go b/beacon-chain/blockchain/error.go index 35630eb139..3dd270ce75 100644 --- a/beacon-chain/blockchain/error.go +++ b/beacon-chain/blockchain/error.go @@ -3,7 +3,7 @@ package blockchain import ( stderrors "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" "github.com/pkg/errors" ) diff --git a/beacon-chain/blockchain/error_test.go b/beacon-chain/blockchain/error_test.go index 56a8ed2b99..dfd4c4e7ae 100644 --- a/beacon-chain/blockchain/error_test.go +++ b/beacon-chain/blockchain/error_test.go @@ -3,8 +3,8 @@ package blockchain import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" ) diff --git a/beacon-chain/blockchain/execution_engine.go b/beacon-chain/blockchain/execution_engine.go index d2afd2442d..459d09d2d0 100644 --- a/beacon-chain/blockchain/execution_engine.go +++ b/beacon-chain/blockchain/execution_engine.go @@ -4,27 +4,27 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - blocktypes "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - payloadattribute "github.com/OffchainLabs/prysm/v6/consensus-types/payload-attribute" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + blocktypes "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + payloadattribute "github.com/OffchainLabs/prysm/v7/consensus-types/payload-attribute" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/blockchain/execution_engine_test.go b/beacon-chain/blockchain/execution_engine_test.go index 8ef6c48337..3df1118463 100644 --- a/beacon-chain/blockchain/execution_engine_test.go +++ b/beacon-chain/blockchain/execution_engine_test.go @@ -5,26 +5,26 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - bstate "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + bstate "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common" gethtypes "github.com/ethereum/go-ethereum/core/types" ) diff --git a/beacon-chain/blockchain/forkchoice_update_execution.go b/beacon-chain/blockchain/forkchoice_update_execution.go index 51530e0744..79d9132e74 100644 --- a/beacon-chain/blockchain/forkchoice_update_execution.go +++ b/beacon-chain/blockchain/forkchoice_update_execution.go @@ -5,14 +5,14 @@ import ( "fmt" "time" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - payloadattribute "github.com/OffchainLabs/prysm/v6/consensus-types/payload-attribute" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/time/slots" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + payloadattribute "github.com/OffchainLabs/prysm/v7/consensus-types/payload-attribute" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/blockchain/forkchoice_update_execution_test.go b/beacon-chain/blockchain/forkchoice_update_execution_test.go index b7b8e1d84c..2f7746147d 100644 --- a/beacon-chain/blockchain/forkchoice_update_execution_test.go +++ b/beacon-chain/blockchain/forkchoice_update_execution_test.go @@ -4,15 +4,15 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/blockchain/head.go b/beacon-chain/blockchain/head.go index d4a73dcd96..c31e942f0f 100644 --- a/beacon-chain/blockchain/head.go +++ b/beacon-chain/blockchain/head.go @@ -5,21 +5,21 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpbv1 "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpbv1 "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/blockchain/head_sync_committee_info.go b/beacon-chain/blockchain/head_sync_committee_info.go index 343ce8f7cb..1d832a6c7a 100644 --- a/beacon-chain/blockchain/head_sync_committee_info.go +++ b/beacon-chain/blockchain/head_sync_committee_info.go @@ -4,17 +4,17 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/blockchain/head_sync_committee_info_test.go b/beacon-chain/blockchain/head_sync_committee_info_test.go index 0aca8a32f6..c1b1c9b4d4 100644 --- a/beacon-chain/blockchain/head_sync_committee_info_test.go +++ b/beacon-chain/blockchain/head_sync_committee_info_test.go @@ -3,13 +3,13 @@ package blockchain import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestService_HeadSyncCommitteeIndices(t *testing.T) { diff --git a/beacon-chain/blockchain/head_test.go b/beacon-chain/blockchain/head_test.go index a7221ec1e6..733f613237 100644 --- a/beacon-chain/blockchain/head_test.go +++ b/beacon-chain/blockchain/head_test.go @@ -7,20 +7,20 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpbv1 "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpbv1 "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/blockchain/init_sync_process_block.go b/beacon-chain/blockchain/init_sync_process_block.go index f3acd93cd9..6d6c7505c2 100644 --- a/beacon-chain/blockchain/init_sync_process_block.go +++ b/beacon-chain/blockchain/init_sync_process_block.go @@ -3,8 +3,8 @@ package blockchain import ( "context" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" "github.com/pkg/errors" ) diff --git a/beacon-chain/blockchain/init_sync_process_block_test.go b/beacon-chain/blockchain/init_sync_process_block_test.go index aa036d9b76..81a00f96aa 100644 --- a/beacon-chain/blockchain/init_sync_process_block_test.go +++ b/beacon-chain/blockchain/init_sync_process_block_test.go @@ -3,10 +3,10 @@ package blockchain import ( "testing" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestService_getBlock(t *testing.T) { diff --git a/beacon-chain/blockchain/init_test.go b/beacon-chain/blockchain/init_test.go index 9e8d92421a..9dff62fa06 100644 --- a/beacon-chain/blockchain/init_test.go +++ b/beacon-chain/blockchain/init_test.go @@ -1,7 +1,7 @@ package blockchain import ( - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" ) func init() { diff --git a/beacon-chain/blockchain/kzg/BUILD.bazel b/beacon-chain/blockchain/kzg/BUILD.bazel index 7535f490cd..6b5cdffc7c 100644 --- a/beacon-chain/blockchain/kzg/BUILD.bazel +++ b/beacon-chain/blockchain/kzg/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "validation.go", ], embedsrcs = ["trusted_setup_4096.json"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg", visibility = ["//visibility:public"], deps = [ "//consensus-types/blocks:go_default_library", diff --git a/beacon-chain/blockchain/kzg/trusted_setup_test.go b/beacon-chain/blockchain/kzg/trusted_setup_test.go index 1dcbd8a6ba..75ba22d60b 100644 --- a/beacon-chain/blockchain/kzg/trusted_setup_test.go +++ b/beacon-chain/blockchain/kzg/trusted_setup_test.go @@ -3,7 +3,7 @@ package kzg import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStart(t *testing.T) { diff --git a/beacon-chain/blockchain/kzg/validation.go b/beacon-chain/blockchain/kzg/validation.go index 35be112828..29697b40c9 100644 --- a/beacon-chain/blockchain/kzg/validation.go +++ b/beacon-chain/blockchain/kzg/validation.go @@ -3,7 +3,7 @@ package kzg import ( "fmt" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" GoKZG "github.com/crate-crypto/go-kzg-4844" ckzg4844 "github.com/ethereum/c-kzg-4844/v2/bindings/go" "github.com/pkg/errors" diff --git a/beacon-chain/blockchain/kzg/validation_test.go b/beacon-chain/blockchain/kzg/validation_test.go index ddfcb0eb08..fdfcd67297 100644 --- a/beacon-chain/blockchain/kzg/validation_test.go +++ b/beacon-chain/blockchain/kzg/validation_test.go @@ -3,9 +3,9 @@ package kzg import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/crypto/random" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/crypto/random" + "github.com/OffchainLabs/prysm/v7/testing/require" GoKZG "github.com/crate-crypto/go-kzg-4844" ) diff --git a/beacon-chain/blockchain/log.go b/beacon-chain/blockchain/log.go index d9886786ce..8e768a5f25 100644 --- a/beacon-chain/blockchain/log.go +++ b/beacon-chain/blockchain/log.go @@ -5,15 +5,15 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/config/params" - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/config/params" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/blockchain/log_test.go b/beacon-chain/blockchain/log_test.go index 2c1d6b19a2..5f64aaa78a 100644 --- a/beacon-chain/blockchain/log_test.go +++ b/beacon-chain/blockchain/log_test.go @@ -3,11 +3,11 @@ package blockchain import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/blockchain/metrics.go b/beacon-chain/blockchain/metrics.go index c03b3b3d0b..bf6aeffedb 100644 --- a/beacon-chain/blockchain/metrics.go +++ b/beacon-chain/blockchain/metrics.go @@ -3,15 +3,15 @@ package blockchain import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/beacon-chain/blockchain/metrics_test.go b/beacon-chain/blockchain/metrics_test.go index 31a9cf72d8..dd820c690c 100644 --- a/beacon-chain/blockchain/metrics_test.go +++ b/beacon-chain/blockchain/metrics_test.go @@ -3,9 +3,9 @@ package blockchain import ( "testing" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestReportEpochMetrics_BadAttestation(t *testing.T) { diff --git a/beacon-chain/blockchain/mock_test.go b/beacon-chain/blockchain/mock_test.go index bbd387f54f..bf2710b017 100644 --- a/beacon-chain/blockchain/mock_test.go +++ b/beacon-chain/blockchain/mock_test.go @@ -3,12 +3,12 @@ package blockchain import ( "testing" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/testing/require" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func testServiceOptsWithDB(t testing.TB) []Option { diff --git a/beacon-chain/blockchain/options.go b/beacon-chain/blockchain/options.go index df289fce96..d4fc428ba9 100644 --- a/beacon-chain/blockchain/options.go +++ b/beacon-chain/blockchain/options.go @@ -3,23 +3,23 @@ package blockchain import ( "time" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - lightclient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + lightclient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) type Option func(s *Service) error diff --git a/beacon-chain/blockchain/pow_block.go b/beacon-chain/blockchain/pow_block.go index f3a45c46ba..603bba54be 100644 --- a/beacon-chain/blockchain/pow_block.go +++ b/beacon-chain/blockchain/pow_block.go @@ -6,13 +6,13 @@ import ( "fmt" "math/big" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/holiman/uint256" diff --git a/beacon-chain/blockchain/pow_block_test.go b/beacon-chain/blockchain/pow_block_test.go index 3458ab9121..f6da6925ee 100644 --- a/beacon-chain/blockchain/pow_block_test.go +++ b/beacon-chain/blockchain/pow_block_test.go @@ -5,14 +5,14 @@ import ( "math/big" "testing" - mocks "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mocks "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" gethtypes "github.com/ethereum/go-ethereum/core/types" "github.com/holiman/uint256" ) diff --git a/beacon-chain/blockchain/process_attestation.go b/beacon-chain/blockchain/process_attestation.go index f97fcaebe8..dfbace64fd 100644 --- a/beacon-chain/blockchain/process_attestation.go +++ b/beacon-chain/blockchain/process_attestation.go @@ -4,13 +4,13 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/blockchain/process_attestation_helpers.go b/beacon-chain/blockchain/process_attestation_helpers.go index e3336f1fa8..385a1780a5 100644 --- a/beacon-chain/blockchain/process_attestation_helpers.go +++ b/beacon-chain/blockchain/process_attestation_helpers.go @@ -7,15 +7,15 @@ import ( "strconv" "time" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/blockchain/process_attestation_test.go b/beacon-chain/blockchain/process_attestation_test.go index b8aa5161cf..dbccecf0ec 100644 --- a/beacon-chain/blockchain/process_attestation_test.go +++ b/beacon-chain/blockchain/process_attestation_test.go @@ -6,18 +6,18 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestStore_OnAttestation_ErrorConditions(t *testing.T) { diff --git a/beacon-chain/blockchain/process_block.go b/beacon-chain/blockchain/process_block.go index bb7169c2f2..44d098a8c2 100644 --- a/beacon-chain/blockchain/process_block.go +++ b/beacon-chain/blockchain/process_block.go @@ -6,29 +6,29 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/das" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/das" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/blockchain/process_block_helpers.go b/beacon-chain/blockchain/process_block_helpers.go index 9e15c56719..df7790752a 100644 --- a/beacon-chain/blockchain/process_block_helpers.go +++ b/beacon-chain/blockchain/process_block_helpers.go @@ -6,23 +6,23 @@ import ( "slices" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - field_params "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensus_blocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - mathutil "github.com/OffchainLabs/prysm/v6/math" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + field_params "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensus_blocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + mathutil "github.com/OffchainLabs/prysm/v7/math" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/blockchain/process_block_test.go b/beacon-chain/blockchain/process_block_test.go index f1daba177e..7263cd4cad 100644 --- a/beacon-chain/blockchain/process_block_test.go +++ b/beacon-chain/blockchain/process_block_test.go @@ -10,43 +10,43 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/das" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - lightClient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations/kv" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/das" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + lightClient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations/kv" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" gethtypes "github.com/ethereum/go-ethereum/core/types" "github.com/pkg/errors" diff --git a/beacon-chain/blockchain/receive_attestation.go b/beacon-chain/blockchain/receive_attestation.go index 84ce75293e..e0d2f9abef 100644 --- a/beacon-chain/blockchain/receive_attestation.go +++ b/beacon-chain/blockchain/receive_attestation.go @@ -6,16 +6,16 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/blockchain/receive_attestation_test.go b/beacon-chain/blockchain/receive_attestation_test.go index 5a196d3c34..98a5afd8af 100644 --- a/beacon-chain/blockchain/receive_attestation_test.go +++ b/beacon-chain/blockchain/receive_attestation_test.go @@ -4,18 +4,18 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/blockchain/receive_blob.go b/beacon-chain/blockchain/receive_blob.go index 14ad2502ca..6609d8b93a 100644 --- a/beacon-chain/blockchain/receive_blob.go +++ b/beacon-chain/blockchain/receive_blob.go @@ -3,8 +3,8 @@ package blockchain import ( "context" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // SendNewBlobEvent sends a message to the BlobNotifier channel that the blob diff --git a/beacon-chain/blockchain/receive_block.go b/beacon-chain/blockchain/receive_block.go index 00064234b7..6b9334eaa7 100644 --- a/beacon-chain/blockchain/receive_block.go +++ b/beacon-chain/blockchain/receive_block.go @@ -6,27 +6,27 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/das" - "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpbv1 "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/das" + "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpbv1 "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "golang.org/x/sync/errgroup" ) diff --git a/beacon-chain/blockchain/receive_block_test.go b/beacon-chain/blockchain/receive_block_test.go index 7b7fb3c937..daf9d97a47 100644 --- a/beacon-chain/blockchain/receive_block_test.go +++ b/beacon-chain/blockchain/receive_block_test.go @@ -5,27 +5,27 @@ import ( "testing" "time" - blockchainTesting "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/das" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - lightClient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpbv1 "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + blockchainTesting "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/das" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + lightClient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpbv1 "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/blockchain/receive_data_column.go b/beacon-chain/blockchain/receive_data_column.go index bf362d84a9..078bcf2e3a 100644 --- a/beacon-chain/blockchain/receive_data_column.go +++ b/beacon-chain/blockchain/receive_data_column.go @@ -1,7 +1,7 @@ package blockchain import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" "github.com/pkg/errors" ) diff --git a/beacon-chain/blockchain/service.go b/beacon-chain/blockchain/service.go index bb152b5339..7891fbc5fe 100644 --- a/beacon-chain/blockchain/service.go +++ b/beacon-chain/blockchain/service.go @@ -9,36 +9,36 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - f "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - lightClient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + f "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + lightClient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/blockchain/service_norace_test.go b/beacon-chain/blockchain/service_norace_test.go index 22312c258d..a841acf992 100644 --- a/beacon-chain/blockchain/service_norace_test.go +++ b/beacon-chain/blockchain/service_norace_test.go @@ -4,9 +4,9 @@ import ( "io" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/blockchain/service_test.go b/beacon-chain/blockchain/service_test.go index 5ad85bebc3..03148af5d6 100644 --- a/beacon-chain/blockchain/service_test.go +++ b/beacon-chain/blockchain/service_test.go @@ -7,38 +7,38 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" logTest "github.com/sirupsen/logrus/hooks/test" diff --git a/beacon-chain/blockchain/setup_forkchoice.go b/beacon-chain/blockchain/setup_forkchoice.go index deea24d440..ff5f0377a0 100644 --- a/beacon-chain/blockchain/setup_forkchoice.go +++ b/beacon-chain/blockchain/setup_forkchoice.go @@ -6,13 +6,13 @@ import ( "fmt" "slices" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/time/slots" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/blockchain/setup_forkchoice_test.go b/beacon-chain/blockchain/setup_forkchoice_test.go index 481bb074fb..de92070d5a 100644 --- a/beacon-chain/blockchain/setup_forkchoice_test.go +++ b/beacon-chain/blockchain/setup_forkchoice_test.go @@ -3,14 +3,14 @@ package blockchain import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/blockchain/setup_test.go b/beacon-chain/blockchain/setup_test.go index 7c453cf2d7..1a2afad621 100644 --- a/beacon-chain/blockchain/setup_test.go +++ b/beacon-chain/blockchain/setup_test.go @@ -6,30 +6,30 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - lightclient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2pTesting "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/async/event" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + lightclient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2pTesting "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/libp2p/go-libp2p/core/peer" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/blockchain/testing/BUILD.bazel b/beacon-chain/blockchain/testing/BUILD.bazel index e0fe9a769d..e47969f03f 100644 --- a/beacon-chain/blockchain/testing/BUILD.bazel +++ b/beacon-chain/blockchain/testing/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing", visibility = [ "//beacon-chain:__subpackages__", "//testing:__subpackages__", diff --git a/beacon-chain/blockchain/testing/mock.go b/beacon-chain/blockchain/testing/mock.go index 433a8dc759..6c7e2d1463 100644 --- a/beacon-chain/blockchain/testing/mock.go +++ b/beacon-chain/blockchain/testing/mock.go @@ -8,27 +8,27 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - blockfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/das" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - forkchoice2 "github.com/OffchainLabs/prysm/v6/consensus-types/forkchoice" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + blockfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/das" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + forkchoice2 "github.com/OffchainLabs/prysm/v7/consensus-types/forkchoice" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/blockchain/tracked_proposer.go b/beacon-chain/blockchain/tracked_proposer.go index 4a7fcc7488..55a8400daf 100644 --- a/beacon-chain/blockchain/tracked_proposer.go +++ b/beacon-chain/blockchain/tracked_proposer.go @@ -1,11 +1,11 @@ package blockchain import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // trackedProposer returns whether the beacon node was informed, via the diff --git a/beacon-chain/blockchain/weak_subjectivity_checks.go b/beacon-chain/blockchain/weak_subjectivity_checks.go index dd55279771..bd92633577 100644 --- a/beacon-chain/blockchain/weak_subjectivity_checks.go +++ b/beacon-chain/blockchain/weak_subjectivity_checks.go @@ -5,12 +5,12 @@ import ( "fmt" "slices" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/blockchain/weak_subjectivity_checks_test.go b/beacon-chain/blockchain/weak_subjectivity_checks_test.go index a4376c5436..c6abdc0478 100644 --- a/beacon-chain/blockchain/weak_subjectivity_checks_test.go +++ b/beacon-chain/blockchain/weak_subjectivity_checks_test.go @@ -3,14 +3,14 @@ package blockchain import ( "testing" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/builder/BUILD.bazel b/beacon-chain/builder/BUILD.bazel index 90a6b5ac94..3566b1fd9a 100644 --- a/beacon-chain/builder/BUILD.bazel +++ b/beacon-chain/builder/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "option.go", "service.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/builder", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/builder", visibility = ["//visibility:public"], deps = [ "//api/client/builder:go_default_library", diff --git a/beacon-chain/builder/option.go b/beacon-chain/builder/option.go index 17735ae7a1..1a9d59a609 100644 --- a/beacon-chain/builder/option.go +++ b/beacon-chain/builder/option.go @@ -1,11 +1,11 @@ package builder import ( - "github.com/OffchainLabs/prysm/v6/api/client/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/api/client/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" "github.com/urfave/cli/v2" ) diff --git a/beacon-chain/builder/service.go b/beacon-chain/builder/service.go index 26d6f70f5c..70d5001fce 100644 --- a/beacon-chain/builder/service.go +++ b/beacon-chain/builder/service.go @@ -5,17 +5,17 @@ import ( "reflect" "time" - "github.com/OffchainLabs/prysm/v6/api/client/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/client/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/builder/service_test.go b/beacon-chain/builder/service_test.go index 426e19b6f7..44d7ba46f4 100644 --- a/beacon-chain/builder/service_test.go +++ b/beacon-chain/builder/service_test.go @@ -4,13 +4,13 @@ import ( "testing" "time" - buildertesting "github.com/OffchainLabs/prysm/v6/api/client/builder/testing" - blockchainTesting "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbtesting "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + buildertesting "github.com/OffchainLabs/prysm/v7/api/client/builder/testing" + blockchainTesting "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbtesting "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func Test_NewServiceWithBuilder(t *testing.T) { diff --git a/beacon-chain/builder/testing/BUILD.bazel b/beacon-chain/builder/testing/BUILD.bazel index 652cdb24b2..ca5c609467 100644 --- a/beacon-chain/builder/testing/BUILD.bazel +++ b/beacon-chain/builder/testing/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/builder/testing", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/builder/testing", visibility = ["//visibility:public"], deps = [ "//api/client/builder:go_default_library", diff --git a/beacon-chain/builder/testing/mock.go b/beacon-chain/builder/testing/mock.go index 16282f823c..20154b2f49 100644 --- a/beacon-chain/builder/testing/mock.go +++ b/beacon-chain/builder/testing/mock.go @@ -3,17 +3,17 @@ package testing import ( "context" - "github.com/OffchainLabs/prysm/v6/api/client/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/client/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/cache/BUILD.bazel b/beacon-chain/cache/BUILD.bazel index a5bf0457b6..a6eb511615 100644 --- a/beacon-chain/cache/BUILD.bazel +++ b/beacon-chain/cache/BUILD.bazel @@ -29,7 +29,7 @@ go_library( "sync_subnet_ids.go", "tracked_validators.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/cache", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/cache", visibility = [ "//beacon-chain:__subpackages__", "//testing/spectest:__subpackages__", diff --git a/beacon-chain/cache/active_balance.go b/beacon-chain/cache/active_balance.go index 7388be3ef1..37f186429a 100644 --- a/beacon-chain/cache/active_balance.go +++ b/beacon-chain/cache/active_balance.go @@ -5,8 +5,8 @@ package cache import ( "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" lru "github.com/hashicorp/golang-lru" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/beacon-chain/cache/active_balance_disabled.go b/beacon-chain/cache/active_balance_disabled.go index a669f43c4a..23a204a916 100644 --- a/beacon-chain/cache/active_balance_disabled.go +++ b/beacon-chain/cache/active_balance_disabled.go @@ -3,7 +3,7 @@ package cache import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" ) // FakeBalanceCache is a fake struct with 1 LRU cache for looking up balance by epoch. diff --git a/beacon-chain/cache/active_balance_test.go b/beacon-chain/cache/active_balance_test.go index d34b0b8f9f..591dfea4be 100644 --- a/beacon-chain/cache/active_balance_test.go +++ b/beacon-chain/cache/active_balance_test.go @@ -7,12 +7,12 @@ import ( "math" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestBalanceCache_AddGetBalance(t *testing.T) { diff --git a/beacon-chain/cache/attestation.go b/beacon-chain/cache/attestation.go index 309909b11f..17f2ef3c61 100644 --- a/beacon-chain/cache/attestation.go +++ b/beacon-chain/cache/attestation.go @@ -3,11 +3,11 @@ package cache import ( "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations/attmap" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations/attmap" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" "github.com/pkg/errors" log "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/cache/attestation_data.go b/beacon-chain/cache/attestation_data.go index 558c676789..bf548fa85d 100644 --- a/beacon-chain/cache/attestation_data.go +++ b/beacon-chain/cache/attestation_data.go @@ -4,8 +4,8 @@ import ( "errors" "sync" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) type AttestationConsensusData struct { diff --git a/beacon-chain/cache/attestation_data_test.go b/beacon-chain/cache/attestation_data_test.go index cdc44a8432..5197db6280 100644 --- a/beacon-chain/cache/attestation_data_test.go +++ b/beacon-chain/cache/attestation_data_test.go @@ -3,8 +3,8 @@ package cache_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" "github.com/stretchr/testify/require" ) diff --git a/beacon-chain/cache/attestation_test.go b/beacon-chain/cache/attestation_test.go index 82ab952f15..d14db40e39 100644 --- a/beacon-chain/cache/attestation_test.go +++ b/beacon-chain/cache/attestation_test.go @@ -4,13 +4,13 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls/blst" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls/blst" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestAdd(t *testing.T) { diff --git a/beacon-chain/cache/balance_cache_key.go b/beacon-chain/cache/balance_cache_key.go index dd9244905d..8e818f7805 100644 --- a/beacon-chain/cache/balance_cache_key.go +++ b/beacon-chain/cache/balance_cache_key.go @@ -4,9 +4,9 @@ import ( "encoding/binary" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // Given input state `st`, balance key is constructed as: diff --git a/beacon-chain/cache/checkpoint_state.go b/beacon-chain/cache/checkpoint_state.go index 0e2e16c93d..257c8f5e67 100644 --- a/beacon-chain/cache/checkpoint_state.go +++ b/beacon-chain/cache/checkpoint_state.go @@ -1,10 +1,10 @@ package cache import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" lru "github.com/hashicorp/golang-lru" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/beacon-chain/cache/checkpoint_state_test.go b/beacon-chain/cache/checkpoint_state_test.go index b016e0098d..1c1293325e 100644 --- a/beacon-chain/cache/checkpoint_state_test.go +++ b/beacon-chain/cache/checkpoint_state_test.go @@ -3,15 +3,15 @@ package cache_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/cache/committee.go b/beacon-chain/cache/committee.go index 6ddf01448f..e3db8b6f22 100644 --- a/beacon-chain/cache/committee.go +++ b/beacon-chain/cache/committee.go @@ -8,12 +8,12 @@ import ( "sync" "time" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - mathutil "github.com/OffchainLabs/prysm/v6/math" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + mathutil "github.com/OffchainLabs/prysm/v7/math" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" lru "github.com/hashicorp/golang-lru" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/beacon-chain/cache/committee_disabled.go b/beacon-chain/cache/committee_disabled.go index cfda2c1d4c..663df3bc25 100644 --- a/beacon-chain/cache/committee_disabled.go +++ b/beacon-chain/cache/committee_disabled.go @@ -6,7 +6,7 @@ package cache import ( "context" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // FakeCommitteeCache is a struct with 1 queue for looking up shuffled indices list by seed. diff --git a/beacon-chain/cache/committee_fuzz_test.go b/beacon-chain/cache/committee_fuzz_test.go index cd2fcf459d..e2a79a0b46 100644 --- a/beacon-chain/cache/committee_fuzz_test.go +++ b/beacon-chain/cache/committee_fuzz_test.go @@ -5,8 +5,8 @@ package cache import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" fuzz "github.com/google/gofuzz" ) diff --git a/beacon-chain/cache/committee_test.go b/beacon-chain/cache/committee_test.go index ab3dfa7ac4..ce9623945c 100644 --- a/beacon-chain/cache/committee_test.go +++ b/beacon-chain/cache/committee_test.go @@ -9,11 +9,11 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestCommitteeKeyFn_OK(t *testing.T) { diff --git a/beacon-chain/cache/committees.go b/beacon-chain/cache/committees.go index 22e4c6fd81..e2795ef3c5 100644 --- a/beacon-chain/cache/committees.go +++ b/beacon-chain/cache/committees.go @@ -3,7 +3,7 @@ package cache import ( "errors" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // ErrNotCommittee will be returned when a cache object is not a pointer to diff --git a/beacon-chain/cache/depositsnapshot/BUILD.bazel b/beacon-chain/cache/depositsnapshot/BUILD.bazel index 567c3ed36f..964a7c5b82 100644 --- a/beacon-chain/cache/depositsnapshot/BUILD.bazel +++ b/beacon-chain/cache/depositsnapshot/BUILD.bazel @@ -10,7 +10,7 @@ go_library( "deposit_tree_snapshot.go", "merkle_tree.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot", visibility = ["//visibility:public"], deps = [ "//beacon-chain/cache:go_default_library", diff --git a/beacon-chain/cache/depositsnapshot/deposit_cache_test.go b/beacon-chain/cache/depositsnapshot/deposit_cache_test.go index f354753241..fbf486ed93 100644 --- a/beacon-chain/cache/depositsnapshot/deposit_cache_test.go +++ b/beacon-chain/cache/depositsnapshot/deposit_cache_test.go @@ -6,14 +6,14 @@ import ( "math/big" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) var _ cache.DepositCache = (*Cache)(nil) diff --git a/beacon-chain/cache/depositsnapshot/deposit_fetcher.go b/beacon-chain/cache/depositsnapshot/deposit_fetcher.go index 19f5709b22..c9c2266890 100644 --- a/beacon-chain/cache/depositsnapshot/deposit_fetcher.go +++ b/beacon-chain/cache/depositsnapshot/deposit_fetcher.go @@ -6,10 +6,10 @@ import ( "sort" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" "github.com/sirupsen/logrus" diff --git a/beacon-chain/cache/depositsnapshot/deposit_fetcher_test.go b/beacon-chain/cache/depositsnapshot/deposit_fetcher_test.go index a559698ddd..0c4a336ab8 100644 --- a/beacon-chain/cache/depositsnapshot/deposit_fetcher_test.go +++ b/beacon-chain/cache/depositsnapshot/deposit_fetcher_test.go @@ -4,8 +4,8 @@ import ( "math/big" "testing" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) var _ PendingDepositsFetcher = (*Cache)(nil) diff --git a/beacon-chain/cache/depositsnapshot/deposit_inserter.go b/beacon-chain/cache/depositsnapshot/deposit_inserter.go index 0a79db6c82..17703edf56 100644 --- a/beacon-chain/cache/depositsnapshot/deposit_inserter.go +++ b/beacon-chain/cache/depositsnapshot/deposit_inserter.go @@ -5,9 +5,9 @@ import ( "encoding/hex" "sort" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" diff --git a/beacon-chain/cache/depositsnapshot/deposit_pruner.go b/beacon-chain/cache/depositsnapshot/deposit_pruner.go index 3dccb4eea9..b7243abc47 100644 --- a/beacon-chain/cache/depositsnapshot/deposit_pruner.go +++ b/beacon-chain/cache/depositsnapshot/deposit_pruner.go @@ -3,8 +3,8 @@ package depositsnapshot import ( "context" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // PruneProofs removes proofs from all deposits whose index is equal or less than untilDepositIndex. diff --git a/beacon-chain/cache/depositsnapshot/deposit_pruner_test.go b/beacon-chain/cache/depositsnapshot/deposit_pruner_test.go index e4eef928c7..d6b4f4a8d0 100644 --- a/beacon-chain/cache/depositsnapshot/deposit_pruner_test.go +++ b/beacon-chain/cache/depositsnapshot/deposit_pruner_test.go @@ -3,10 +3,10 @@ package depositsnapshot import ( "testing" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestPrunePendingDeposits_ZeroMerkleIndex(t *testing.T) { diff --git a/beacon-chain/cache/depositsnapshot/deposit_tree.go b/beacon-chain/cache/depositsnapshot/deposit_tree.go index cf0ab09db9..d7dde258ad 100644 --- a/beacon-chain/cache/depositsnapshot/deposit_tree.go +++ b/beacon-chain/cache/depositsnapshot/deposit_tree.go @@ -6,10 +6,10 @@ package depositsnapshot import ( "encoding/binary" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/math" - protodb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/math" + protodb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" ) diff --git a/beacon-chain/cache/depositsnapshot/deposit_tree_snapshot.go b/beacon-chain/cache/depositsnapshot/deposit_tree_snapshot.go index 76f030569c..eefb0330cf 100644 --- a/beacon-chain/cache/depositsnapshot/deposit_tree_snapshot.go +++ b/beacon-chain/cache/depositsnapshot/deposit_tree_snapshot.go @@ -1,10 +1,10 @@ package depositsnapshot import ( - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - protodb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + protodb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // DepositTreeSnapshot represents the data used to create a deposit tree given a snapshot. diff --git a/beacon-chain/cache/depositsnapshot/deposit_tree_snapshot_test.go b/beacon-chain/cache/depositsnapshot/deposit_tree_snapshot_test.go index 96c748504a..efb548abeb 100644 --- a/beacon-chain/cache/depositsnapshot/deposit_tree_snapshot_test.go +++ b/beacon-chain/cache/depositsnapshot/deposit_tree_snapshot_test.go @@ -5,7 +5,7 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestDepositTreeSnapshot_CalculateRoot(t *testing.T) { diff --git a/beacon-chain/cache/depositsnapshot/merkle_tree.go b/beacon-chain/cache/depositsnapshot/merkle_tree.go index 38f9c06969..a56ea23bd1 100644 --- a/beacon-chain/cache/depositsnapshot/merkle_tree.go +++ b/beacon-chain/cache/depositsnapshot/merkle_tree.go @@ -1,10 +1,10 @@ package depositsnapshot import ( - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/math" "github.com/pkg/errors" ) diff --git a/beacon-chain/cache/depositsnapshot/merkle_tree_test.go b/beacon-chain/cache/depositsnapshot/merkle_tree_test.go index 5bbeae2224..dffab8abb2 100644 --- a/beacon-chain/cache/depositsnapshot/merkle_tree_test.go +++ b/beacon-chain/cache/depositsnapshot/merkle_tree_test.go @@ -6,10 +6,10 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func hexString(t *testing.T, hexStr string) [32]byte { diff --git a/beacon-chain/cache/depositsnapshot/spec_test.go b/beacon-chain/cache/depositsnapshot/spec_test.go index 2e3170c86b..f1c5228a66 100644 --- a/beacon-chain/cache/depositsnapshot/spec_test.go +++ b/beacon-chain/cache/depositsnapshot/spec_test.go @@ -7,11 +7,11 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/pkg/errors" "gopkg.in/yaml.v3" diff --git a/beacon-chain/cache/interfaces.go b/beacon-chain/cache/interfaces.go index 0dd3a4c4a4..bb11f992a1 100644 --- a/beacon-chain/cache/interfaces.go +++ b/beacon-chain/cache/interfaces.go @@ -4,7 +4,7 @@ import ( "context" "math/big" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common" ) diff --git a/beacon-chain/cache/payload_id.go b/beacon-chain/cache/payload_id.go index c3913dbc03..be2bb52cf2 100644 --- a/beacon-chain/cache/payload_id.go +++ b/beacon-chain/cache/payload_id.go @@ -3,7 +3,7 @@ package cache import ( "sync" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // RootToPayloadIDMap is a map with keys the head root and values the diff --git a/beacon-chain/cache/payload_id_test.go b/beacon-chain/cache/payload_id_test.go index e8d2539142..8d21dfce59 100644 --- a/beacon-chain/cache/payload_id_test.go +++ b/beacon-chain/cache/payload_id_test.go @@ -3,8 +3,8 @@ package cache import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestValidatorPayloadIDsCache_GetAndSaveValidatorPayloadIDs(t *testing.T) { diff --git a/beacon-chain/cache/private_access_test.go b/beacon-chain/cache/private_access_test.go index 5446494bd9..16f4ca6578 100644 --- a/beacon-chain/cache/private_access_test.go +++ b/beacon-chain/cache/private_access_test.go @@ -1,7 +1,7 @@ package cache import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" lru "github.com/hashicorp/golang-lru" ) diff --git a/beacon-chain/cache/proposer_indices.go b/beacon-chain/cache/proposer_indices.go index 7a7a95f7e3..1df925ad8b 100644 --- a/beacon-chain/cache/proposer_indices.go +++ b/beacon-chain/cache/proposer_indices.go @@ -5,9 +5,9 @@ package cache import ( "sync" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" ) diff --git a/beacon-chain/cache/proposer_indices_disabled.go b/beacon-chain/cache/proposer_indices_disabled.go index 9c86e0378b..18e2b6e8b0 100644 --- a/beacon-chain/cache/proposer_indices_disabled.go +++ b/beacon-chain/cache/proposer_indices_disabled.go @@ -4,9 +4,9 @@ package cache import ( - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" ) diff --git a/beacon-chain/cache/proposer_indices_test.go b/beacon-chain/cache/proposer_indices_test.go index 7bdf691e29..1a06f7714a 100644 --- a/beacon-chain/cache/proposer_indices_test.go +++ b/beacon-chain/cache/proposer_indices_test.go @@ -5,10 +5,10 @@ package cache import ( "testing" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestProposerCache_Set(t *testing.T) { diff --git a/beacon-chain/cache/proposer_indices_type.go b/beacon-chain/cache/proposer_indices_type.go index 5fee8c63e5..cff78be2bb 100644 --- a/beacon-chain/cache/proposer_indices_type.go +++ b/beacon-chain/cache/proposer_indices_type.go @@ -1,7 +1,7 @@ package cache import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // ProposerIndices defines the cached struct for proposer indices. diff --git a/beacon-chain/cache/registration.go b/beacon-chain/cache/registration.go index 55840be444..d7a7434450 100644 --- a/beacon-chain/cache/registration.go +++ b/beacon-chain/cache/registration.go @@ -4,10 +4,10 @@ import ( "context" "sync" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/cache/registration_test.go b/beacon-chain/cache/registration_test.go index 6efc5f1037..ad32d12907 100644 --- a/beacon-chain/cache/registration_test.go +++ b/beacon-chain/cache/registration_test.go @@ -4,9 +4,9 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/cache/skip_slot_cache.go b/beacon-chain/cache/skip_slot_cache.go index c0f647f9d0..0647df1ea1 100644 --- a/beacon-chain/cache/skip_slot_cache.go +++ b/beacon-chain/cache/skip_slot_cache.go @@ -5,9 +5,9 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" lru "github.com/hashicorp/golang-lru" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/beacon-chain/cache/skip_slot_cache_test.go b/beacon-chain/cache/skip_slot_cache_test.go index 9ec18d08b2..6e364d1116 100644 --- a/beacon-chain/cache/skip_slot_cache_test.go +++ b/beacon-chain/cache/skip_slot_cache_test.go @@ -4,12 +4,12 @@ import ( "sync" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSkipSlotCache_RoundTrip(t *testing.T) { diff --git a/beacon-chain/cache/subnet_ids.go b/beacon-chain/cache/subnet_ids.go index 5ee8e116ff..fd98a7972d 100644 --- a/beacon-chain/cache/subnet_ids.go +++ b/beacon-chain/cache/subnet_ids.go @@ -4,10 +4,10 @@ import ( "sync" "time" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" lru "github.com/hashicorp/golang-lru" "github.com/patrickmn/go-cache" ) diff --git a/beacon-chain/cache/subnet_ids_test.go b/beacon-chain/cache/subnet_ids_test.go index bbd18a81c5..b945a115e6 100644 --- a/beacon-chain/cache/subnet_ids_test.go +++ b/beacon-chain/cache/subnet_ids_test.go @@ -3,8 +3,8 @@ package cache import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestSubnetIDsCache_RoundTrip(t *testing.T) { diff --git a/beacon-chain/cache/sync_committee.go b/beacon-chain/cache/sync_committee.go index ea95552fd3..8991a0022e 100644 --- a/beacon-chain/cache/sync_committee.go +++ b/beacon-chain/cache/sync_committee.go @@ -6,9 +6,9 @@ import ( "sync" "sync/atomic" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" log "github.com/sirupsen/logrus" diff --git a/beacon-chain/cache/sync_committee_disabled.go b/beacon-chain/cache/sync_committee_disabled.go index 06309543ab..0be0a2adec 100644 --- a/beacon-chain/cache/sync_committee_disabled.go +++ b/beacon-chain/cache/sync_committee_disabled.go @@ -3,8 +3,8 @@ package cache import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // FakeSyncCommitteeCache is a fake `SyncCommitteeCache` to satisfy fuzzing. diff --git a/beacon-chain/cache/sync_committee_head_state.go b/beacon-chain/cache/sync_committee_head_state.go index acec4c9fff..6321391685 100644 --- a/beacon-chain/cache/sync_committee_head_state.go +++ b/beacon-chain/cache/sync_committee_head_state.go @@ -3,10 +3,10 @@ package cache import ( "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" lru "github.com/hashicorp/golang-lru" ) diff --git a/beacon-chain/cache/sync_committee_head_state_test.go b/beacon-chain/cache/sync_committee_head_state_test.go index 534ca71b14..1a567f8e13 100644 --- a/beacon-chain/cache/sync_committee_head_state_test.go +++ b/beacon-chain/cache/sync_committee_head_state_test.go @@ -3,13 +3,13 @@ package cache_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSyncCommitteeHeadState(t *testing.T) { diff --git a/beacon-chain/cache/sync_committee_test.go b/beacon-chain/cache/sync_committee_test.go index d404a8d862..f25b3d3794 100644 --- a/beacon-chain/cache/sync_committee_test.go +++ b/beacon-chain/cache/sync_committee_test.go @@ -3,11 +3,11 @@ package cache_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestSyncCommitteeCache_CanUpdateAndRetrieve(t *testing.T) { diff --git a/beacon-chain/cache/sync_subnet_ids.go b/beacon-chain/cache/sync_subnet_ids.go index 601aee5289..f36df72170 100644 --- a/beacon-chain/cache/sync_subnet_ids.go +++ b/beacon-chain/cache/sync_subnet_ids.go @@ -4,11 +4,11 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/patrickmn/go-cache" ) diff --git a/beacon-chain/cache/sync_subnet_ids_test.go b/beacon-chain/cache/sync_subnet_ids_test.go index 9f65f71517..07122a4bef 100644 --- a/beacon-chain/cache/sync_subnet_ids_test.go +++ b/beacon-chain/cache/sync_subnet_ids_test.go @@ -3,10 +3,10 @@ package cache import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSyncSubnetIDsCache_Roundtrip(t *testing.T) { diff --git a/beacon-chain/cache/tracked_validators.go b/beacon-chain/cache/tracked_validators.go index 500e9cfa3a..06a63a98ec 100644 --- a/beacon-chain/cache/tracked_validators.go +++ b/beacon-chain/cache/tracked_validators.go @@ -4,7 +4,7 @@ import ( "strconv" "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/patrickmn/go-cache" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" diff --git a/beacon-chain/cache/tracked_validators_test.go b/beacon-chain/cache/tracked_validators_test.go index a06d52ec7f..be007a8dc2 100644 --- a/beacon-chain/cache/tracked_validators_test.go +++ b/beacon-chain/cache/tracked_validators_test.go @@ -3,8 +3,8 @@ package cache import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func mapEqual(a, b map[primitives.ValidatorIndex]bool) bool { diff --git a/beacon-chain/core/altair/BUILD.bazel b/beacon-chain/core/altair/BUILD.bazel index 5f9af101c6..5b87a7efa0 100644 --- a/beacon-chain/core/altair/BUILD.bazel +++ b/beacon-chain/core/altair/BUILD.bazel @@ -13,7 +13,7 @@ go_library( "transition.go", "upgrade.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/blocks:go_default_library", diff --git a/beacon-chain/core/altair/attestation.go b/beacon-chain/core/altair/attestation.go index 4611208758..ec2ac3d537 100644 --- a/beacon-chain/core/altair/attestation.go +++ b/beacon-chain/core/altair/attestation.go @@ -5,17 +5,17 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/altair/attestation_test.go b/beacon-chain/core/altair/attestation_test.go index d7af583c23..4b3f4916e7 100644 --- a/beacon-chain/core/altair/attestation_test.go +++ b/beacon-chain/core/altair/attestation_test.go @@ -5,23 +5,23 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/math" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/testing/fuzz" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/math" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/testing/fuzz" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" gofuzz "github.com/google/gofuzz" ) diff --git a/beacon-chain/core/altair/block.go b/beacon-chain/core/altair/block.go index db2385287c..5d2e9c04fb 100644 --- a/beacon-chain/core/altair/block.go +++ b/beacon-chain/core/altair/block.go @@ -3,15 +3,15 @@ package altair import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - p2pType "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + p2pType "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/altair/block_test.go b/beacon-chain/core/altair/block_test.go index be8041d242..51867fefd9 100644 --- a/beacon-chain/core/altair/block_test.go +++ b/beacon-chain/core/altair/block_test.go @@ -5,21 +5,21 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - p2pType "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + p2pType "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestProcessSyncCommittee_PerfectParticipation(t *testing.T) { diff --git a/beacon-chain/core/altair/deposit.go b/beacon-chain/core/altair/deposit.go index 12079f2d55..44fa4e7ff7 100644 --- a/beacon-chain/core/altair/deposit.go +++ b/beacon-chain/core/altair/deposit.go @@ -3,13 +3,13 @@ package altair import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/altair/deposit_fuzz_test.go b/beacon-chain/core/altair/deposit_fuzz_test.go index a70074bb63..b45b3347ba 100644 --- a/beacon-chain/core/altair/deposit_fuzz_test.go +++ b/beacon-chain/core/altair/deposit_fuzz_test.go @@ -3,11 +3,11 @@ package altair_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/fuzz" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/fuzz" + "github.com/OffchainLabs/prysm/v7/testing/require" gofuzz "github.com/google/gofuzz" ) diff --git a/beacon-chain/core/altair/deposit_test.go b/beacon-chain/core/altair/deposit_test.go index c35132d103..6a21da94a6 100644 --- a/beacon-chain/core/altair/deposit_test.go +++ b/beacon-chain/core/altair/deposit_test.go @@ -3,19 +3,19 @@ package altair_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProcessDeposits_SameValidatorMultipleDepositsSameBlock(t *testing.T) { diff --git a/beacon-chain/core/altair/epoch_precompute.go b/beacon-chain/core/altair/epoch_precompute.go index 7852dd713c..55ce28bbd1 100644 --- a/beacon-chain/core/altair/epoch_precompute.go +++ b/beacon-chain/core/altair/epoch_precompute.go @@ -3,13 +3,13 @@ package altair import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/math" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/math" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/altair/epoch_precompute_test.go b/beacon-chain/core/altair/epoch_precompute_test.go index 87b491f8d9..681880e058 100644 --- a/beacon-chain/core/altair/epoch_precompute_test.go +++ b/beacon-chain/core/altair/epoch_precompute_test.go @@ -4,14 +4,14 @@ import ( "math" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestInitializeEpochValidators_Ok(t *testing.T) { diff --git a/beacon-chain/core/altair/epoch_spec.go b/beacon-chain/core/altair/epoch_spec.go index 8acfd6e68e..352e36f41f 100644 --- a/beacon-chain/core/altair/epoch_spec.go +++ b/beacon-chain/core/altair/epoch_spec.go @@ -3,10 +3,10 @@ package altair import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" log "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/core/altair/epoch_spec_test.go b/beacon-chain/core/altair/epoch_spec_test.go index 5de4fc3817..0db3f03042 100644 --- a/beacon-chain/core/altair/epoch_spec_test.go +++ b/beacon-chain/core/altair/epoch_spec_test.go @@ -5,18 +5,18 @@ import ( "math" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/core/altair/reward.go b/beacon-chain/core/altair/reward.go index a2526b0be4..1330390beb 100644 --- a/beacon-chain/core/altair/reward.go +++ b/beacon-chain/core/altair/reward.go @@ -1,11 +1,11 @@ package altair import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/math" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/altair/reward_test.go b/beacon-chain/core/altair/reward_test.go index e33fea2564..ddcfe8ac8e 100644 --- a/beacon-chain/core/altair/reward_test.go +++ b/beacon-chain/core/altair/reward_test.go @@ -4,13 +4,13 @@ import ( "math" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func Test_BaseReward(t *testing.T) { diff --git a/beacon-chain/core/altair/sync_committee.go b/beacon-chain/core/altair/sync_committee.go index d2e4f45e33..2da0557c61 100644 --- a/beacon-chain/core/altair/sync_committee.go +++ b/beacon-chain/core/altair/sync_committee.go @@ -7,18 +7,18 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/altair/sync_committee_test.go b/beacon-chain/core/altair/sync_committee_test.go index 991653bf6f..096f4712b6 100644 --- a/beacon-chain/core/altair/sync_committee_test.go +++ b/beacon-chain/core/altair/sync_committee_test.go @@ -4,18 +4,18 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + prysmTime "github.com/OffchainLabs/prysm/v7/time" ) func TestSyncCommitteeIndices_CanGet(t *testing.T) { diff --git a/beacon-chain/core/altair/transition.go b/beacon-chain/core/altair/transition.go index 88f2dfaf81..ff57230651 100644 --- a/beacon-chain/core/altair/transition.go +++ b/beacon-chain/core/altair/transition.go @@ -3,10 +3,10 @@ package altair import ( "context" - e "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + e "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/altair/transition_test.go b/beacon-chain/core/altair/transition_test.go index d41e842a8a..3af6b0f8c1 100644 --- a/beacon-chain/core/altair/transition_test.go +++ b/beacon-chain/core/altair/transition_test.go @@ -3,10 +3,10 @@ package altair_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProcessEpoch_CanProcess(t *testing.T) { diff --git a/beacon-chain/core/altair/upgrade.go b/beacon-chain/core/altair/upgrade.go index 1c2b135acc..39f64b099d 100644 --- a/beacon-chain/core/altair/upgrade.go +++ b/beacon-chain/core/altair/upgrade.go @@ -3,13 +3,13 @@ package altair import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" ) // ConvertToAltair converts a Phase 0 beacon state to an Altair beacon state. diff --git a/beacon-chain/core/altair/upgrade_test.go b/beacon-chain/core/altair/upgrade_test.go index 715f57b0c6..cb9e3e9cd9 100644 --- a/beacon-chain/core/altair/upgrade_test.go +++ b/beacon-chain/core/altair/upgrade_test.go @@ -4,15 +4,15 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestTranslateParticipation(t *testing.T) { diff --git a/beacon-chain/core/blocks/BUILD.bazel b/beacon-chain/core/blocks/BUILD.bazel index 7eea2bfc29..18289bd76d 100644 --- a/beacon-chain/core/blocks/BUILD.bazel +++ b/beacon-chain/core/blocks/BUILD.bazel @@ -18,7 +18,7 @@ go_library( "signature.go", "withdrawals.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/beacon-chain/core/blocks/attestation.go b/beacon-chain/core/blocks/attestation.go index 10c12407e1..6ad6a0f965 100644 --- a/beacon-chain/core/blocks/attestation.go +++ b/beacon-chain/core/blocks/attestation.go @@ -4,19 +4,19 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/blocks/attestation_regression_test.go b/beacon-chain/core/blocks/attestation_regression_test.go index c23a110806..870baed09d 100644 --- a/beacon-chain/core/blocks/attestation_regression_test.go +++ b/beacon-chain/core/blocks/attestation_regression_test.go @@ -6,13 +6,13 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) // Beaconfuzz discovered an off by one issue where an attestation could be produced which would pass diff --git a/beacon-chain/core/blocks/attestation_test.go b/beacon-chain/core/blocks/attestation_test.go index 9f9135a7b4..7d3af6f02e 100644 --- a/beacon-chain/core/blocks/attestation_test.go +++ b/beacon-chain/core/blocks/attestation_test.go @@ -5,22 +5,22 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" - attaggregation "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation" + attaggregation "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/attestations" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProcessAggregatedAttestation_OverlappingBits(t *testing.T) { diff --git a/beacon-chain/core/blocks/attester_slashing.go b/beacon-chain/core/blocks/attester_slashing.go index f6fe916aa1..ddf9eff878 100644 --- a/beacon-chain/core/blocks/attester_slashing.go +++ b/beacon-chain/core/blocks/attester_slashing.go @@ -4,15 +4,15 @@ import ( "context" "sort" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/slashings" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/slashings" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/blocks/attester_slashing_test.go b/beacon-chain/core/blocks/attester_slashing_test.go index a95534c0c2..ae095db4ee 100644 --- a/beacon-chain/core/blocks/attester_slashing_test.go +++ b/beacon-chain/core/blocks/attester_slashing_test.go @@ -3,21 +3,21 @@ package blocks_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - v "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + v "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestSlashableAttestationData_CanSlash(t *testing.T) { diff --git a/beacon-chain/core/blocks/block_operations_fuzz_test.go b/beacon-chain/core/blocks/block_operations_fuzz_test.go index 778a399455..d65fb4c3a0 100644 --- a/beacon-chain/core/blocks/block_operations_fuzz_test.go +++ b/beacon-chain/core/blocks/block_operations_fuzz_test.go @@ -3,15 +3,15 @@ package blocks import ( "testing" - v "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/fuzz" - "github.com/OffchainLabs/prysm/v6/testing/require" + v "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/fuzz" + "github.com/OffchainLabs/prysm/v7/testing/require" gofuzz "github.com/google/gofuzz" ) diff --git a/beacon-chain/core/blocks/block_regression_test.go b/beacon-chain/core/blocks/block_regression_test.go index 9a57e6af92..1d613772bd 100644 --- a/beacon-chain/core/blocks/block_regression_test.go +++ b/beacon-chain/core/blocks/block_regression_test.go @@ -3,16 +3,16 @@ package blocks_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - v "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + v "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProcessAttesterSlashings_RegressionSlashableIndices(t *testing.T) { diff --git a/beacon-chain/core/blocks/deposit.go b/beacon-chain/core/blocks/deposit.go index 610035694e..2eca62610b 100644 --- a/beacon-chain/core/blocks/deposit.go +++ b/beacon-chain/core/blocks/deposit.go @@ -4,14 +4,14 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/blocks/deposit_test.go b/beacon-chain/core/blocks/deposit_test.go index 872f5b7f47..4d382fd000 100644 --- a/beacon-chain/core/blocks/deposit_test.go +++ b/beacon-chain/core/blocks/deposit_test.go @@ -3,16 +3,16 @@ package blocks_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestBatchVerifyDepositsSignatures_Ok(t *testing.T) { diff --git a/beacon-chain/core/blocks/eth1_data.go b/beacon-chain/core/blocks/eth1_data.go index 3a6c3208fd..d8953e947e 100644 --- a/beacon-chain/core/blocks/eth1_data.go +++ b/beacon-chain/core/blocks/eth1_data.go @@ -5,9 +5,9 @@ import ( "context" "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // ProcessEth1DataInBlock is an operation performed on each diff --git a/beacon-chain/core/blocks/eth1_data_test.go b/beacon-chain/core/blocks/eth1_data_test.go index b3a6d387ff..b2ef805225 100644 --- a/beacon-chain/core/blocks/eth1_data_test.go +++ b/beacon-chain/core/blocks/eth1_data_test.go @@ -4,16 +4,16 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/core/blocks/exit.go b/beacon-chain/core/blocks/exit.go index 954fb7485a..2d8b2d238f 100644 --- a/beacon-chain/core/blocks/exit.go +++ b/beacon-chain/core/blocks/exit.go @@ -4,14 +4,14 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - v "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + v "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/blocks/exit_test.go b/beacon-chain/core/blocks/exit_test.go index 33ac17c567..95533ae078 100644 --- a/beacon-chain/core/blocks/exit_test.go +++ b/beacon-chain/core/blocks/exit_test.go @@ -3,21 +3,21 @@ package blocks_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestProcessVoluntaryExits_NotActiveLongEnoughToExit(t *testing.T) { diff --git a/beacon-chain/core/blocks/genesis.go b/beacon-chain/core/blocks/genesis.go index 4ba71f86a2..12e0b45cf1 100644 --- a/beacon-chain/core/blocks/genesis.go +++ b/beacon-chain/core/blocks/genesis.go @@ -5,14 +5,14 @@ package blocks import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/blocks/genesis_test.go b/beacon-chain/core/blocks/genesis_test.go index ce5f42307f..011b5be3f8 100644 --- a/beacon-chain/core/blocks/genesis_test.go +++ b/beacon-chain/core/blocks/genesis_test.go @@ -3,9 +3,9 @@ package blocks_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestGenesisBlock_InitializedCorrectly(t *testing.T) { diff --git a/beacon-chain/core/blocks/header.go b/beacon-chain/core/blocks/header.go index 8e64aaebd0..a41a2b808e 100644 --- a/beacon-chain/core/blocks/header.go +++ b/beacon-chain/core/blocks/header.go @@ -5,13 +5,13 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // ProcessBlockHeader validates a block by its header. diff --git a/beacon-chain/core/blocks/header_test.go b/beacon-chain/core/blocks/header_test.go index a1da62f6fd..ac70b853d9 100644 --- a/beacon-chain/core/blocks/header_test.go +++ b/beacon-chain/core/blocks/header_test.go @@ -4,19 +4,19 @@ import ( "io" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/sirupsen/logrus" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/core/blocks/payload.go b/beacon-chain/core/blocks/payload.go index 0d647025f8..011befdeec 100644 --- a/beacon-chain/core/blocks/payload.go +++ b/beacon-chain/core/blocks/payload.go @@ -4,16 +4,16 @@ import ( "bytes" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/blocks/payload_test.go b/beacon-chain/core/blocks/payload_test.go index dfba23f045..0e9cb067f6 100644 --- a/beacon-chain/core/blocks/payload_test.go +++ b/beacon-chain/core/blocks/payload_test.go @@ -4,21 +4,21 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func Test_IsMergeComplete(t *testing.T) { diff --git a/beacon-chain/core/blocks/proposer_slashing.go b/beacon-chain/core/blocks/proposer_slashing.go index 48ba4b0047..453afdb804 100644 --- a/beacon-chain/core/blocks/proposer_slashing.go +++ b/beacon-chain/core/blocks/proposer_slashing.go @@ -4,14 +4,14 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/core/blocks/proposer_slashing_regression_test.go b/beacon-chain/core/blocks/proposer_slashing_regression_test.go index 1cfec7cca6..e529819e32 100644 --- a/beacon-chain/core/blocks/proposer_slashing_regression_test.go +++ b/beacon-chain/core/blocks/proposer_slashing_regression_test.go @@ -4,10 +4,10 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) // Beaconfuzz discovered an issue where a proposer slashing could be produced which would pass diff --git a/beacon-chain/core/blocks/proposer_slashing_test.go b/beacon-chain/core/blocks/proposer_slashing_test.go index f720448a30..e35de0a6b8 100644 --- a/beacon-chain/core/blocks/proposer_slashing_test.go +++ b/beacon-chain/core/blocks/proposer_slashing_test.go @@ -4,21 +4,21 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - v "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + v "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestProcessProposerSlashings_UnmatchedHeaderSlots(t *testing.T) { diff --git a/beacon-chain/core/blocks/randao.go b/beacon-chain/core/blocks/randao.go index 4882c3e4e6..718c665afd 100644 --- a/beacon-chain/core/blocks/randao.go +++ b/beacon-chain/core/blocks/randao.go @@ -3,12 +3,12 @@ package blocks import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/blocks/randao_test.go b/beacon-chain/core/blocks/randao_test.go index aa0c824a08..26fc90f0b7 100644 --- a/beacon-chain/core/blocks/randao_test.go +++ b/beacon-chain/core/blocks/randao_test.go @@ -4,17 +4,17 @@ import ( "encoding/binary" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProcessRandao_IncorrectProposerFailsVerification(t *testing.T) { diff --git a/beacon-chain/core/blocks/signature.go b/beacon-chain/core/blocks/signature.go index 1d8ec1dcc1..b6d6c83349 100644 --- a/beacon-chain/core/blocks/signature.go +++ b/beacon-chain/core/blocks/signature.go @@ -4,16 +4,16 @@ import ( "context" "encoding/binary" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/blocks/signature_test.go b/beacon-chain/core/blocks/signature_test.go index aa86bb8b3c..f0b7ff5981 100644 --- a/beacon-chain/core/blocks/signature_test.go +++ b/beacon-chain/core/blocks/signature_test.go @@ -3,16 +3,16 @@ package blocks_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestVerifyBlockHeaderSignature(t *testing.T) { diff --git a/beacon-chain/core/blocks/withdrawals.go b/beacon-chain/core/blocks/withdrawals.go index 5ce2d58dff..afd445e878 100644 --- a/beacon-chain/core/blocks/withdrawals.go +++ b/beacon-chain/core/blocks/withdrawals.go @@ -4,19 +4,19 @@ import ( "bytes" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/blocks/withdrawals_test.go b/beacon-chain/core/blocks/withdrawals_test.go index 9752c95455..bb1b468d46 100644 --- a/beacon-chain/core/blocks/withdrawals_test.go +++ b/beacon-chain/core/blocks/withdrawals_test.go @@ -4,25 +4,25 @@ import ( "math/rand" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestProcessBLSToExecutionChange(t *testing.T) { diff --git a/beacon-chain/core/capella/BUILD.bazel b/beacon-chain/core/capella/BUILD.bazel index 4cbe315b61..b08cc9c560 100644 --- a/beacon-chain/core/capella/BUILD.bazel +++ b/beacon-chain/core/capella/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["upgrade.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/capella", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/capella", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/time:go_default_library", diff --git a/beacon-chain/core/capella/upgrade.go b/beacon-chain/core/capella/upgrade.go index 5208eb4002..d27965276a 100644 --- a/beacon-chain/core/capella/upgrade.go +++ b/beacon-chain/core/capella/upgrade.go @@ -1,12 +1,12 @@ package capella import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // UpgradeToCapella updates a generic state to return the version Capella state. diff --git a/beacon-chain/core/capella/upgrade_test.go b/beacon-chain/core/capella/upgrade_test.go index 605d39a47f..9aba4b1254 100644 --- a/beacon-chain/core/capella/upgrade_test.go +++ b/beacon-chain/core/capella/upgrade_test.go @@ -3,14 +3,14 @@ package capella_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/capella" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/capella" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestUpgradeToCapella(t *testing.T) { diff --git a/beacon-chain/core/deneb/BUILD.bazel b/beacon-chain/core/deneb/BUILD.bazel index 8165824f17..83e65e3617 100644 --- a/beacon-chain/core/deneb/BUILD.bazel +++ b/beacon-chain/core/deneb/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["upgrade.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/deneb", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/deneb", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/time:go_default_library", diff --git a/beacon-chain/core/deneb/upgrade.go b/beacon-chain/core/deneb/upgrade.go index e730814e46..9e0d749fc2 100644 --- a/beacon-chain/core/deneb/upgrade.go +++ b/beacon-chain/core/deneb/upgrade.go @@ -1,12 +1,12 @@ package deneb import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // UpgradeToDeneb updates inputs a generic state to return the version Deneb state. diff --git a/beacon-chain/core/deneb/upgrade_test.go b/beacon-chain/core/deneb/upgrade_test.go index f8dd6ac00e..eeac4ab875 100644 --- a/beacon-chain/core/deneb/upgrade_test.go +++ b/beacon-chain/core/deneb/upgrade_test.go @@ -3,13 +3,13 @@ package deneb_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/deneb" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/config/params" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/deneb" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/config/params" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestUpgradeToDeneb(t *testing.T) { diff --git a/beacon-chain/core/electra/BUILD.bazel b/beacon-chain/core/electra/BUILD.bazel index 0c82bdb715..0086f060eb 100644 --- a/beacon-chain/core/electra/BUILD.bazel +++ b/beacon-chain/core/electra/BUILD.bazel @@ -16,7 +16,7 @@ go_library( "validator.go", "withdrawals.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/beacon-chain/core/electra/attestation.go b/beacon-chain/core/electra/attestation.go index 51fe08bca2..d09baf7af3 100644 --- a/beacon-chain/core/electra/attestation.go +++ b/beacon-chain/core/electra/attestation.go @@ -4,15 +4,15 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + customtypes "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/custom-types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" ) var ( diff --git a/beacon-chain/core/electra/churn.go b/beacon-chain/core/electra/churn.go index 6600ba5ef4..6ab669fd3f 100644 --- a/beacon-chain/core/electra/churn.go +++ b/beacon-chain/core/electra/churn.go @@ -3,11 +3,11 @@ package electra import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/math" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/math" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // ComputeConsolidationEpochAndUpdateChurn fulfills the consensus spec definition below. This method diff --git a/beacon-chain/core/electra/churn_test.go b/beacon-chain/core/electra/churn_test.go index 66c0af7192..2c28c11726 100644 --- a/beacon-chain/core/electra/churn_test.go +++ b/beacon-chain/core/electra/churn_test.go @@ -5,15 +5,15 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func createValidatorsWithTotalActiveBalance(totalBal primitives.Gwei) []*eth.Validator { diff --git a/beacon-chain/core/electra/consolidations.go b/beacon-chain/core/electra/consolidations.go index e482fe6adb..603f271e29 100644 --- a/beacon-chain/core/electra/consolidations.go +++ b/beacon-chain/core/electra/consolidations.go @@ -5,16 +5,16 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/math" "github.com/pkg/errors" log "github.com/sirupsen/logrus" diff --git a/beacon-chain/core/electra/consolidations_test.go b/beacon-chain/core/electra/consolidations_test.go index ad194846ab..872ac1c61b 100644 --- a/beacon-chain/core/electra/consolidations_test.go +++ b/beacon-chain/core/electra/consolidations_test.go @@ -4,16 +4,16 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProcessPendingConsolidations(t *testing.T) { diff --git a/beacon-chain/core/electra/deposit_fuzz_test.go b/beacon-chain/core/electra/deposit_fuzz_test.go index 435b0e0748..04a7ac2dd1 100644 --- a/beacon-chain/core/electra/deposit_fuzz_test.go +++ b/beacon-chain/core/electra/deposit_fuzz_test.go @@ -3,11 +3,11 @@ package electra_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/fuzz" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/fuzz" + "github.com/OffchainLabs/prysm/v7/testing/require" gofuzz "github.com/google/gofuzz" ) diff --git a/beacon-chain/core/electra/deposits.go b/beacon-chain/core/electra/deposits.go index 0202140583..6eb80555ee 100644 --- a/beacon-chain/core/electra/deposits.go +++ b/beacon-chain/core/electra/deposits.go @@ -3,20 +3,20 @@ package electra import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" log "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/core/electra/deposits_test.go b/beacon-chain/core/electra/deposits_test.go index 77fda66086..0c4d4658df 100644 --- a/beacon-chain/core/electra/deposits_test.go +++ b/beacon-chain/core/electra/deposits_test.go @@ -4,21 +4,21 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - stateTesting "github.com/OffchainLabs/prysm/v6/beacon-chain/state/testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + stateTesting "github.com/OffchainLabs/prysm/v7/beacon-chain/state/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProcessPendingDepositsMultiplesSameDeposits(t *testing.T) { diff --git a/beacon-chain/core/electra/effective_balance_updates.go b/beacon-chain/core/electra/effective_balance_updates.go index ffca52a416..9123c99098 100644 --- a/beacon-chain/core/electra/effective_balance_updates.go +++ b/beacon-chain/core/electra/effective_balance_updates.go @@ -3,9 +3,9 @@ package electra import ( "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // ProcessEffectiveBalanceUpdates processes effective balance updates during epoch processing. diff --git a/beacon-chain/core/electra/effective_balance_updates_test.go b/beacon-chain/core/electra/effective_balance_updates_test.go index 5b9af4f194..c8342dafd0 100644 --- a/beacon-chain/core/electra/effective_balance_updates_test.go +++ b/beacon-chain/core/electra/effective_balance_updates_test.go @@ -3,12 +3,12 @@ package electra_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestProcessEffectiveBalanceUpdates_SafeCopy(t *testing.T) { diff --git a/beacon-chain/core/electra/registry_updates.go b/beacon-chain/core/electra/registry_updates.go index 8b4cbc4421..0350d19df1 100644 --- a/beacon-chain/core/electra/registry_updates.go +++ b/beacon-chain/core/electra/registry_updates.go @@ -5,12 +5,12 @@ import ( "errors" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // ProcessRegistryUpdates processes all validators eligible for the activation queue, all validators diff --git a/beacon-chain/core/electra/registry_updates_test.go b/beacon-chain/core/electra/registry_updates_test.go index 2ab626c4b9..af393feac0 100644 --- a/beacon-chain/core/electra/registry_updates_test.go +++ b/beacon-chain/core/electra/registry_updates_test.go @@ -4,16 +4,16 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProcessRegistryUpdates(t *testing.T) { diff --git a/beacon-chain/core/electra/transition.go b/beacon-chain/core/electra/transition.go index 651fc63503..c26acd8421 100644 --- a/beacon-chain/core/electra/transition.go +++ b/beacon-chain/core/electra/transition.go @@ -4,14 +4,14 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - e "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + e "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/electra/transition_no_verify_sig.go b/beacon-chain/core/electra/transition_no_verify_sig.go index 77a11a0881..d12a2a5336 100644 --- a/beacon-chain/core/electra/transition_no_verify_sig.go +++ b/beacon-chain/core/electra/transition_no_verify_sig.go @@ -3,11 +3,11 @@ package electra import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - v "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + v "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/electra/transition_no_verify_sig_test.go b/beacon-chain/core/electra/transition_no_verify_sig_test.go index fb6f30a88a..a884f60628 100644 --- a/beacon-chain/core/electra/transition_no_verify_sig_test.go +++ b/beacon-chain/core/electra/transition_no_verify_sig_test.go @@ -3,12 +3,12 @@ package electra_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProcessOperationsWithNilRequests(t *testing.T) { diff --git a/beacon-chain/core/electra/transition_test.go b/beacon-chain/core/electra/transition_test.go index 05bf1739f6..41454524d7 100644 --- a/beacon-chain/core/electra/transition_test.go +++ b/beacon-chain/core/electra/transition_test.go @@ -3,14 +3,14 @@ package electra_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common" ) diff --git a/beacon-chain/core/electra/upgrade.go b/beacon-chain/core/electra/upgrade.go index 88b00a7718..240ee71019 100644 --- a/beacon-chain/core/electra/upgrade.go +++ b/beacon-chain/core/electra/upgrade.go @@ -3,15 +3,15 @@ package electra import ( "sort" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/electra/upgrade_test.go b/beacon-chain/core/electra/upgrade_test.go index fb83f0b527..adedf62296 100644 --- a/beacon-chain/core/electra/upgrade_test.go +++ b/beacon-chain/core/electra/upgrade_test.go @@ -3,16 +3,16 @@ package electra_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestUpgradeToElectra(t *testing.T) { diff --git a/beacon-chain/core/electra/validator.go b/beacon-chain/core/electra/validator.go index ca46fad222..2b70592069 100644 --- a/beacon-chain/core/electra/validator.go +++ b/beacon-chain/core/electra/validator.go @@ -3,11 +3,11 @@ package electra import ( "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // SwitchToCompoundingValidator diff --git a/beacon-chain/core/electra/validator_test.go b/beacon-chain/core/electra/validator_test.go index ee68d97950..8ea7d5120a 100644 --- a/beacon-chain/core/electra/validator_test.go +++ b/beacon-chain/core/electra/validator_test.go @@ -4,13 +4,13 @@ import ( "bytes" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestSwitchToCompoundingValidator(t *testing.T) { diff --git a/beacon-chain/core/electra/withdrawals.go b/beacon-chain/core/electra/withdrawals.go index 9cfbcc4641..89aa4b16ba 100644 --- a/beacon-chain/core/electra/withdrawals.go +++ b/beacon-chain/core/electra/withdrawals.go @@ -4,17 +4,17 @@ import ( "bytes" "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" log "github.com/sirupsen/logrus" diff --git a/beacon-chain/core/electra/withdrawals_test.go b/beacon-chain/core/electra/withdrawals_test.go index 429878824a..a8bc03d0db 100644 --- a/beacon-chain/core/electra/withdrawals_test.go +++ b/beacon-chain/core/electra/withdrawals_test.go @@ -3,16 +3,16 @@ package electra_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/sirupsen/logrus" "github.com/sirupsen/logrus/hooks/test" diff --git a/beacon-chain/core/epoch/BUILD.bazel b/beacon-chain/core/epoch/BUILD.bazel index 8a93c949e7..17f35789cf 100644 --- a/beacon-chain/core/epoch/BUILD.bazel +++ b/beacon-chain/core/epoch/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "epoch_processing.go", "sortable_indices.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch", visibility = [ "//beacon-chain:__subpackages__", "//testing/spectest:__subpackages__", diff --git a/beacon-chain/core/epoch/epoch_processing.go b/beacon-chain/core/epoch/epoch_processing.go index 13baf8c16c..a5b097e5c4 100644 --- a/beacon-chain/core/epoch/epoch_processing.go +++ b/beacon-chain/core/epoch/epoch_processing.go @@ -9,17 +9,17 @@ import ( "fmt" "sort" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/math" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/math" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/epoch/epoch_processing_fuzz_test.go b/beacon-chain/core/epoch/epoch_processing_fuzz_test.go index ff9cc79ddc..de84b7b14e 100644 --- a/beacon-chain/core/epoch/epoch_processing_fuzz_test.go +++ b/beacon-chain/core/epoch/epoch_processing_fuzz_test.go @@ -3,10 +3,10 @@ package epoch import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/fuzz" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/fuzz" + "github.com/OffchainLabs/prysm/v7/testing/require" gofuzz "github.com/google/gofuzz" ) diff --git a/beacon-chain/core/epoch/epoch_processing_test.go b/beacon-chain/core/epoch/epoch_processing_test.go index 59ee6040d1..dd2739a421 100644 --- a/beacon-chain/core/epoch/epoch_processing_test.go +++ b/beacon-chain/core/epoch/epoch_processing_test.go @@ -5,20 +5,20 @@ import ( "math" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/core/epoch/precompute/BUILD.bazel b/beacon-chain/core/epoch/precompute/BUILD.bazel index 51fe53fa86..356c9956bf 100644 --- a/beacon-chain/core/epoch/precompute/BUILD.bazel +++ b/beacon-chain/core/epoch/precompute/BUILD.bazel @@ -10,7 +10,7 @@ go_library( "slashing.go", "type.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute", visibility = [ "//beacon-chain:__subpackages__", "//testing/spectest:__subpackages__", diff --git a/beacon-chain/core/epoch/precompute/attestation.go b/beacon-chain/core/epoch/precompute/attestation.go index f2c671149f..881535a7a6 100644 --- a/beacon-chain/core/epoch/precompute/attestation.go +++ b/beacon-chain/core/epoch/precompute/attestation.go @@ -4,16 +4,16 @@ import ( "bytes" "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/epoch/precompute/attestation_test.go b/beacon-chain/core/epoch/precompute/attestation_test.go index 099fe7b749..639f0eaa34 100644 --- a/beacon-chain/core/epoch/precompute/attestation_test.go +++ b/beacon-chain/core/epoch/precompute/attestation_test.go @@ -4,15 +4,15 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestUpdateValidator_Works(t *testing.T) { diff --git a/beacon-chain/core/epoch/precompute/justification_finalization.go b/beacon-chain/core/epoch/precompute/justification_finalization.go index d219192cab..db6cfa0bc1 100644 --- a/beacon-chain/core/epoch/precompute/justification_finalization.go +++ b/beacon-chain/core/epoch/precompute/justification_finalization.go @@ -2,12 +2,12 @@ package precompute import ( "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/epoch/precompute/justification_finalization_test.go b/beacon-chain/core/epoch/precompute/justification_finalization_test.go index 1eaed13fb5..a0222d7f31 100644 --- a/beacon-chain/core/epoch/precompute/justification_finalization_test.go +++ b/beacon-chain/core/epoch/precompute/justification_finalization_test.go @@ -4,15 +4,15 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestProcessJustificationAndFinalizationPreCompute_ConsecutiveEpochs(t *testing.T) { diff --git a/beacon-chain/core/epoch/precompute/new.go b/beacon-chain/core/epoch/precompute/new.go index b01f9b9b77..485bee4bff 100644 --- a/beacon-chain/core/epoch/precompute/new.go +++ b/beacon-chain/core/epoch/precompute/new.go @@ -6,11 +6,11 @@ package precompute import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/epoch/precompute/new_test.go b/beacon-chain/core/epoch/precompute/new_test.go index de2468d715..8b8f9056b0 100644 --- a/beacon-chain/core/epoch/precompute/new_test.go +++ b/beacon-chain/core/epoch/precompute/new_test.go @@ -3,12 +3,12 @@ package precompute_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestNew(t *testing.T) { diff --git a/beacon-chain/core/epoch/precompute/reward_penalty.go b/beacon-chain/core/epoch/precompute/reward_penalty.go index b618bcec34..6643719a04 100644 --- a/beacon-chain/core/epoch/precompute/reward_penalty.go +++ b/beacon-chain/core/epoch/precompute/reward_penalty.go @@ -1,12 +1,12 @@ package precompute import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/math" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/epoch/precompute/reward_penalty_test.go b/beacon-chain/core/epoch/precompute/reward_penalty_test.go index a9a9084ebb..83f600ff5d 100644 --- a/beacon-chain/core/epoch/precompute/reward_penalty_test.go +++ b/beacon-chain/core/epoch/precompute/reward_penalty_test.go @@ -4,18 +4,18 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/math" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/math" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/epoch/precompute/slashing.go b/beacon-chain/core/epoch/precompute/slashing.go index 75e8b5e4b9..92e6fa8d93 100644 --- a/beacon-chain/core/epoch/precompute/slashing.go +++ b/beacon-chain/core/epoch/precompute/slashing.go @@ -1,10 +1,10 @@ package precompute import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" ) // ProcessSlashingsPrecompute processes the slashed validators during epoch processing. diff --git a/beacon-chain/core/epoch/precompute/slashing_test.go b/beacon-chain/core/epoch/precompute/slashing_test.go index 804d23752b..bf131e3969 100644 --- a/beacon-chain/core/epoch/precompute/slashing_test.go +++ b/beacon-chain/core/epoch/precompute/slashing_test.go @@ -4,12 +4,12 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/core/epoch/precompute/type.go b/beacon-chain/core/epoch/precompute/type.go index 96c1c55cc5..9ccdf0742a 100644 --- a/beacon-chain/core/epoch/precompute/type.go +++ b/beacon-chain/core/epoch/precompute/type.go @@ -1,6 +1,6 @@ package precompute -import "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" +import "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" // Validator stores the pre computation of individual validator's attesting records these records // consist of attestation votes, block inclusion record. Pre computing and storing such record diff --git a/beacon-chain/core/epoch/sortable_indices.go b/beacon-chain/core/epoch/sortable_indices.go index b2ca40c330..e5bc28dde8 100644 --- a/beacon-chain/core/epoch/sortable_indices.go +++ b/beacon-chain/core/epoch/sortable_indices.go @@ -1,8 +1,8 @@ package epoch import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // sortableIndices implements the Sort interface to sort newly activated validator indices diff --git a/beacon-chain/core/epoch/sortable_indices_test.go b/beacon-chain/core/epoch/sortable_indices_test.go index 8d9cfe00e9..ab2f1d47f6 100644 --- a/beacon-chain/core/epoch/sortable_indices_test.go +++ b/beacon-chain/core/epoch/sortable_indices_test.go @@ -4,10 +4,10 @@ import ( "sort" "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/google/go-cmp/cmp" ) diff --git a/beacon-chain/core/execution/BUILD.bazel b/beacon-chain/core/execution/BUILD.bazel index 46cb39bd84..e9e7d7d9f2 100644 --- a/beacon-chain/core/execution/BUILD.bazel +++ b/beacon-chain/core/execution/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["upgrade.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/execution", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/execution", visibility = [ "//beacon-chain:__subpackages__", "//cmd/prysmctl/testnet:__pkg__", diff --git a/beacon-chain/core/execution/upgrade.go b/beacon-chain/core/execution/upgrade.go index 50dd2dd68d..f59df9b433 100644 --- a/beacon-chain/core/execution/upgrade.go +++ b/beacon-chain/core/execution/upgrade.go @@ -1,12 +1,12 @@ package execution import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // UpgradeToBellatrix updates inputs a generic state to return the version Bellatrix state. diff --git a/beacon-chain/core/execution/upgrade_test.go b/beacon-chain/core/execution/upgrade_test.go index ec1566e012..6ed8c32cac 100644 --- a/beacon-chain/core/execution/upgrade_test.go +++ b/beacon-chain/core/execution/upgrade_test.go @@ -3,13 +3,13 @@ package execution_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/config/params" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/config/params" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestUpgradeToBellatrix(t *testing.T) { diff --git a/beacon-chain/core/feed/BUILD.bazel b/beacon-chain/core/feed/BUILD.bazel index 7bfb6ef707..f27443f72e 100644 --- a/beacon-chain/core/feed/BUILD.bazel +++ b/beacon-chain/core/feed/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["event.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed", visibility = [ "//beacon-chain:__subpackages__", "//testing/slasher/simulator:__subpackages__", diff --git a/beacon-chain/core/feed/block/BUILD.bazel b/beacon-chain/core/feed/block/BUILD.bazel index cf77608882..41b4185d90 100644 --- a/beacon-chain/core/feed/block/BUILD.bazel +++ b/beacon-chain/core/feed/block/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "events.go", "notifier.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//async/event:go_default_library", diff --git a/beacon-chain/core/feed/block/events.go b/beacon-chain/core/feed/block/events.go index a48a763926..a57d8afae4 100644 --- a/beacon-chain/core/feed/block/events.go +++ b/beacon-chain/core/feed/block/events.go @@ -2,7 +2,7 @@ // during the runtime of a beacon node. package block -import "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" +import "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" const ( // ReceivedBlock is sent after a block has been received by the beacon node via p2p or RPC. diff --git a/beacon-chain/core/feed/block/notifier.go b/beacon-chain/core/feed/block/notifier.go index 533931c520..e3a1e40d4b 100644 --- a/beacon-chain/core/feed/block/notifier.go +++ b/beacon-chain/core/feed/block/notifier.go @@ -1,6 +1,6 @@ package block -import "github.com/OffchainLabs/prysm/v6/async/event" +import "github.com/OffchainLabs/prysm/v7/async/event" // Notifier interface defines the methods of the service that provides block updates to consumers. type Notifier interface { diff --git a/beacon-chain/core/feed/operation/BUILD.bazel b/beacon-chain/core/feed/operation/BUILD.bazel index 30024d5187..294a92a2f4 100644 --- a/beacon-chain/core/feed/operation/BUILD.bazel +++ b/beacon-chain/core/feed/operation/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "events.go", "notifier.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//async/event:go_default_library", diff --git a/beacon-chain/core/feed/operation/events.go b/beacon-chain/core/feed/operation/events.go index bc219ba3cf..f97173b2af 100644 --- a/beacon-chain/core/feed/operation/events.go +++ b/beacon-chain/core/feed/operation/events.go @@ -2,10 +2,10 @@ package operation import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) const ( diff --git a/beacon-chain/core/feed/operation/notifier.go b/beacon-chain/core/feed/operation/notifier.go index 3ee0ffee3d..97cdc12733 100644 --- a/beacon-chain/core/feed/operation/notifier.go +++ b/beacon-chain/core/feed/operation/notifier.go @@ -1,6 +1,6 @@ package operation -import "github.com/OffchainLabs/prysm/v6/async/event" +import "github.com/OffchainLabs/prysm/v7/async/event" // Notifier interface defines the methods of the service that provides beacon block operation updates to consumers. type Notifier interface { diff --git a/beacon-chain/core/feed/state/BUILD.bazel b/beacon-chain/core/feed/state/BUILD.bazel index 1ec7254d98..321e44a043 100644 --- a/beacon-chain/core/feed/state/BUILD.bazel +++ b/beacon-chain/core/feed/state/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "events.go", "notifier.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state", visibility = [ "//beacon-chain:__subpackages__", "//testing/slasher/simulator:__subpackages__", diff --git a/beacon-chain/core/feed/state/events.go b/beacon-chain/core/feed/state/events.go index 7f0cd02d53..425c00318e 100644 --- a/beacon-chain/core/feed/state/events.go +++ b/beacon-chain/core/feed/state/events.go @@ -6,8 +6,8 @@ package state import ( "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) const ( diff --git a/beacon-chain/core/feed/state/notifier.go b/beacon-chain/core/feed/state/notifier.go index 2334e9b74b..4ae8d519fc 100644 --- a/beacon-chain/core/feed/state/notifier.go +++ b/beacon-chain/core/feed/state/notifier.go @@ -1,6 +1,6 @@ package state -import "github.com/OffchainLabs/prysm/v6/async/event" +import "github.com/OffchainLabs/prysm/v7/async/event" // Notifier interface defines the methods of the service that provides state updates to consumers. type Notifier interface { diff --git a/beacon-chain/core/fulu/BUILD.bazel b/beacon-chain/core/fulu/BUILD.bazel index 40bd2392ea..4b92c34966 100644 --- a/beacon-chain/core/fulu/BUILD.bazel +++ b/beacon-chain/core/fulu/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "transition.go", "upgrade.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/fulu", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/fulu", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/electra:go_default_library", diff --git a/beacon-chain/core/fulu/transition.go b/beacon-chain/core/fulu/transition.go index c3a75cb356..97127e0c04 100644 --- a/beacon-chain/core/fulu/transition.go +++ b/beacon-chain/core/fulu/transition.go @@ -3,12 +3,12 @@ package fulu import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/fulu/transition_test.go b/beacon-chain/core/fulu/transition_test.go index a1f5695c2e..125bf12d2a 100644 --- a/beacon-chain/core/fulu/transition_test.go +++ b/beacon-chain/core/fulu/transition_test.go @@ -3,10 +3,10 @@ package fulu_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/fulu" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/fulu" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProcessEpoch_CanProcessFulu(t *testing.T) { diff --git a/beacon-chain/core/fulu/upgrade.go b/beacon-chain/core/fulu/upgrade.go index ce1abff635..33ece5aecc 100644 --- a/beacon-chain/core/fulu/upgrade.go +++ b/beacon-chain/core/fulu/upgrade.go @@ -3,15 +3,15 @@ package fulu import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/fulu/upgrade_test.go b/beacon-chain/core/fulu/upgrade_test.go index ed82a6d3ae..a6f465d166 100644 --- a/beacon-chain/core/fulu/upgrade_test.go +++ b/beacon-chain/core/fulu/upgrade_test.go @@ -3,14 +3,14 @@ package fulu_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/fulu" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/fulu" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestUpgradeToFulu(t *testing.T) { diff --git a/beacon-chain/core/helpers/BUILD.bazel b/beacon-chain/core/helpers/BUILD.bazel index 4c014c3154..86cd9684ac 100644 --- a/beacon-chain/core/helpers/BUILD.bazel +++ b/beacon-chain/core/helpers/BUILD.bazel @@ -18,7 +18,7 @@ go_library( "validators.go", "weak_subjectivity.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers", visibility = ["//visibility:public"], deps = [ "//beacon-chain/cache:go_default_library", diff --git a/beacon-chain/core/helpers/attestation.go b/beacon-chain/core/helpers/attestation.go index a91b6a237f..f0ea5ffa25 100644 --- a/beacon-chain/core/helpers/attestation.go +++ b/beacon-chain/core/helpers/attestation.go @@ -6,11 +6,11 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" ) var ( diff --git a/beacon-chain/core/helpers/attestation_test.go b/beacon-chain/core/helpers/attestation_test.go index 66d94bd774..16d86f8e1e 100644 --- a/beacon-chain/core/helpers/attestation_test.go +++ b/beacon-chain/core/helpers/attestation_test.go @@ -5,16 +5,16 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestAttestation_IsAggregator(t *testing.T) { diff --git a/beacon-chain/core/helpers/beacon_committee.go b/beacon-chain/core/helpers/beacon_committee.go index 198832d681..fd04aef284 100644 --- a/beacon-chain/core/helpers/beacon_committee.go +++ b/beacon-chain/core/helpers/beacon_committee.go @@ -8,21 +8,21 @@ import ( "sort" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/math" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/math" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/helpers/beacon_committee_test.go b/beacon-chain/core/helpers/beacon_committee_test.go index efb3214aa0..b805b63a56 100644 --- a/beacon-chain/core/helpers/beacon_committee_test.go +++ b/beacon-chain/core/helpers/beacon_committee_test.go @@ -6,19 +6,19 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestComputeCommittee_WithoutCache(t *testing.T) { diff --git a/beacon-chain/core/helpers/block.go b/beacon-chain/core/helpers/block.go index dc2f6edc0f..7f2aa31c81 100644 --- a/beacon-chain/core/helpers/block.go +++ b/beacon-chain/core/helpers/block.go @@ -3,10 +3,10 @@ package helpers import ( "math" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/helpers/block_test.go b/beacon-chain/core/helpers/block_test.go index 777d83e2c0..2f33419aaa 100644 --- a/beacon-chain/core/helpers/block_test.go +++ b/beacon-chain/core/helpers/block_test.go @@ -5,13 +5,13 @@ import ( "math" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestBlockRootAtSlot_CorrectBlockRoot(t *testing.T) { diff --git a/beacon-chain/core/helpers/genesis.go b/beacon-chain/core/helpers/genesis.go index 482c270129..1b750b520e 100644 --- a/beacon-chain/core/helpers/genesis.go +++ b/beacon-chain/core/helpers/genesis.go @@ -3,10 +3,10 @@ package helpers import ( "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/helpers/legacy.go b/beacon-chain/core/helpers/legacy.go index 8d51316d2e..93933b735e 100644 --- a/beacon-chain/core/helpers/legacy.go +++ b/beacon-chain/core/helpers/legacy.go @@ -1,8 +1,8 @@ package helpers import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // DepositRequestsStarted determines if the deposit requests have started. diff --git a/beacon-chain/core/helpers/legacy_test.go b/beacon-chain/core/helpers/legacy_test.go index 5d1218d1db..de9c23db3d 100644 --- a/beacon-chain/core/helpers/legacy_test.go +++ b/beacon-chain/core/helpers/legacy_test.go @@ -3,8 +3,8 @@ package helpers_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/stretchr/testify/require" ) diff --git a/beacon-chain/core/helpers/private_access_fuzz_noop_test.go b/beacon-chain/core/helpers/private_access_fuzz_noop_test.go index f45e006faa..7b9ffa628c 100644 --- a/beacon-chain/core/helpers/private_access_fuzz_noop_test.go +++ b/beacon-chain/core/helpers/private_access_fuzz_noop_test.go @@ -2,7 +2,7 @@ package helpers -import "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" +import "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" func CommitteeCache() *cache.FakeCommitteeCache { return committeeCache diff --git a/beacon-chain/core/helpers/private_access_test.go b/beacon-chain/core/helpers/private_access_test.go index f72d52c51c..a453b6223e 100644 --- a/beacon-chain/core/helpers/private_access_test.go +++ b/beacon-chain/core/helpers/private_access_test.go @@ -2,7 +2,7 @@ package helpers -import "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" +import "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" func CommitteeCache() *cache.CommitteeCache { return committeeCache diff --git a/beacon-chain/core/helpers/randao.go b/beacon-chain/core/helpers/randao.go index 5a05a6029e..2ed0bf9122 100644 --- a/beacon-chain/core/helpers/randao.go +++ b/beacon-chain/core/helpers/randao.go @@ -1,12 +1,12 @@ package helpers import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ) // Seed returns the randao seed used for shuffling of a given epoch. diff --git a/beacon-chain/core/helpers/randao_test.go b/beacon-chain/core/helpers/randao_test.go index 8d1a5f3bf9..d9476ff49e 100644 --- a/beacon-chain/core/helpers/randao_test.go +++ b/beacon-chain/core/helpers/randao_test.go @@ -4,14 +4,14 @@ import ( "encoding/binary" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestRandaoMix_OK(t *testing.T) { diff --git a/beacon-chain/core/helpers/ranges_test.go b/beacon-chain/core/helpers/ranges_test.go index 34b1aac52d..16c3af53ec 100644 --- a/beacon-chain/core/helpers/ranges_test.go +++ b/beacon-chain/core/helpers/ranges_test.go @@ -3,8 +3,8 @@ package helpers_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSortedSliceFromMap(t *testing.T) { diff --git a/beacon-chain/core/helpers/rewards_penalties.go b/beacon-chain/core/helpers/rewards_penalties.go index 86ecf4ff63..85b59d37b0 100644 --- a/beacon-chain/core/helpers/rewards_penalties.go +++ b/beacon-chain/core/helpers/rewards_penalties.go @@ -3,12 +3,12 @@ package helpers import ( "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - mathutil "github.com/OffchainLabs/prysm/v6/math" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + mathutil "github.com/OffchainLabs/prysm/v7/math" + "github.com/OffchainLabs/prysm/v7/time/slots" ) var balanceCache = cache.NewEffectiveBalanceCache() diff --git a/beacon-chain/core/helpers/rewards_penalties_test.go b/beacon-chain/core/helpers/rewards_penalties_test.go index 64192ad352..5cedac6ef2 100644 --- a/beacon-chain/core/helpers/rewards_penalties_test.go +++ b/beacon-chain/core/helpers/rewards_penalties_test.go @@ -4,14 +4,14 @@ import ( "math" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestTotalBalance_OK(t *testing.T) { diff --git a/beacon-chain/core/helpers/shuffle.go b/beacon-chain/core/helpers/shuffle.go index bd5a35825e..e10053bdbe 100644 --- a/beacon-chain/core/helpers/shuffle.go +++ b/beacon-chain/core/helpers/shuffle.go @@ -4,11 +4,11 @@ import ( "encoding/binary" "fmt" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ) const seedSize = int8(32) diff --git a/beacon-chain/core/helpers/shuffle_test.go b/beacon-chain/core/helpers/shuffle_test.go index 0720d14962..d1e985b38f 100644 --- a/beacon-chain/core/helpers/shuffle_test.go +++ b/beacon-chain/core/helpers/shuffle_test.go @@ -5,11 +5,11 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestShuffleList_InvalidValidatorCount(t *testing.T) { diff --git a/beacon-chain/core/helpers/sync_committee.go b/beacon-chain/core/helpers/sync_committee.go index 961aafbf5b..53bca04ac6 100644 --- a/beacon-chain/core/helpers/sync_committee.go +++ b/beacon-chain/core/helpers/sync_committee.go @@ -5,14 +5,14 @@ package helpers import ( "bytes" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" log "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/core/helpers/sync_committee_test.go b/beacon-chain/core/helpers/sync_committee_test.go index cdc11c2f95..8b328d90c1 100644 --- a/beacon-chain/core/helpers/sync_committee_test.go +++ b/beacon-chain/core/helpers/sync_committee_test.go @@ -6,15 +6,15 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestCurrentPeriodPositions(t *testing.T) { diff --git a/beacon-chain/core/helpers/validator_churn.go b/beacon-chain/core/helpers/validator_churn.go index 14ec97cd7f..2314398c11 100644 --- a/beacon-chain/core/helpers/validator_churn.go +++ b/beacon-chain/core/helpers/validator_churn.go @@ -1,8 +1,8 @@ package helpers import ( - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // BalanceChurnLimit for the current active balance, in gwei. diff --git a/beacon-chain/core/helpers/validator_churn_test.go b/beacon-chain/core/helpers/validator_churn_test.go index f0a059c62e..379f1f82b6 100644 --- a/beacon-chain/core/helpers/validator_churn_test.go +++ b/beacon-chain/core/helpers/validator_churn_test.go @@ -3,10 +3,10 @@ package helpers_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestBalanceChurnLimit(t *testing.T) { diff --git a/beacon-chain/core/helpers/validators.go b/beacon-chain/core/helpers/validators.go index 0bfe30c4e4..171b484bce 100644 --- a/beacon-chain/core/helpers/validators.go +++ b/beacon-chain/core/helpers/validators.go @@ -5,19 +5,19 @@ import ( "context" "encoding/binary" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/beacon-chain/core/helpers/validators_test.go b/beacon-chain/core/helpers/validators_test.go index 45bde7fa3e..a449bb2a42 100644 --- a/beacon-chain/core/helpers/validators_test.go +++ b/beacon-chain/core/helpers/validators_test.go @@ -4,20 +4,20 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestIsActiveValidator_OK(t *testing.T) { diff --git a/beacon-chain/core/helpers/weak_subjectivity.go b/beacon-chain/core/helpers/weak_subjectivity.go index 1280f89f6c..7a0b4453b4 100644 --- a/beacon-chain/core/helpers/weak_subjectivity.go +++ b/beacon-chain/core/helpers/weak_subjectivity.go @@ -8,14 +8,14 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - v1alpha1 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + v1alpha1 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // ComputeWeakSubjectivityPeriod returns weak subjectivity period for the active validator count and finalized epoch. diff --git a/beacon-chain/core/helpers/weak_subjectivity_test.go b/beacon-chain/core/helpers/weak_subjectivity_test.go index cbe788541a..582271f328 100644 --- a/beacon-chain/core/helpers/weak_subjectivity_test.go +++ b/beacon-chain/core/helpers/weak_subjectivity_test.go @@ -4,15 +4,15 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestWeakSubjectivity_ComputeWeakSubjectivityPeriod(t *testing.T) { diff --git a/beacon-chain/core/peerdas/BUILD.bazel b/beacon-chain/core/peerdas/BUILD.bazel index 72fcbbae9e..82dfdf3c5d 100644 --- a/beacon-chain/core/peerdas/BUILD.bazel +++ b/beacon-chain/core/peerdas/BUILD.bazel @@ -11,7 +11,7 @@ go_library( "validator.go", "verification.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas", visibility = ["//visibility:public"], deps = [ "//beacon-chain/blockchain/kzg:go_default_library", diff --git a/beacon-chain/core/peerdas/das_core.go b/beacon-chain/core/peerdas/das_core.go index 0d8a357b49..0dd11fb6c2 100644 --- a/beacon-chain/core/peerdas/das_core.go +++ b/beacon-chain/core/peerdas/das_core.go @@ -5,9 +5,9 @@ import ( "math" "slices" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/holiman/uint256" "github.com/pkg/errors" diff --git a/beacon-chain/core/peerdas/das_core_test.go b/beacon-chain/core/peerdas/das_core_test.go index 752cfb01ac..14fe7873eb 100644 --- a/beacon-chain/core/peerdas/das_core_test.go +++ b/beacon-chain/core/peerdas/das_core_test.go @@ -3,9 +3,9 @@ package peerdas_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/p2p/enode" ) diff --git a/beacon-chain/core/peerdas/info_test.go b/beacon-chain/core/peerdas/info_test.go index 58af9ecf36..f7f45faea2 100644 --- a/beacon-chain/core/peerdas/info_test.go +++ b/beacon-chain/core/peerdas/info_test.go @@ -3,8 +3,8 @@ package peerdas_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/p2p/enode" ) diff --git a/beacon-chain/core/peerdas/p2p_interface.go b/beacon-chain/core/peerdas/p2p_interface.go index 942c278de1..09629f1f1a 100644 --- a/beacon-chain/core/peerdas/p2p_interface.go +++ b/beacon-chain/core/peerdas/p2p_interface.go @@ -1,11 +1,11 @@ package peerdas import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/container/trie" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/container/trie" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/peerdas/p2p_interface_test.go b/beacon-chain/core/peerdas/p2p_interface_test.go index 882690af71..559556e266 100644 --- a/beacon-chain/core/peerdas/p2p_interface_test.go +++ b/beacon-chain/core/peerdas/p2p_interface_test.go @@ -5,15 +5,15 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/p2p/enr" ) diff --git a/beacon-chain/core/peerdas/reconstruction.go b/beacon-chain/core/peerdas/reconstruction.go index 5a9663bace..455d892b71 100644 --- a/beacon-chain/core/peerdas/reconstruction.go +++ b/beacon-chain/core/peerdas/reconstruction.go @@ -3,12 +3,12 @@ package peerdas import ( "sort" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" "golang.org/x/sync/errgroup" ) diff --git a/beacon-chain/core/peerdas/reconstruction_test.go b/beacon-chain/core/peerdas/reconstruction_test.go index 88fe79d178..b1d7525125 100644 --- a/beacon-chain/core/peerdas/reconstruction_test.go +++ b/beacon-chain/core/peerdas/reconstruction_test.go @@ -4,14 +4,14 @@ import ( "encoding/binary" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/pkg/errors" "golang.org/x/sync/errgroup" ) diff --git a/beacon-chain/core/peerdas/utils_test.go b/beacon-chain/core/peerdas/utils_test.go index 7e340de9bf..f9a6979fc1 100644 --- a/beacon-chain/core/peerdas/utils_test.go +++ b/beacon-chain/core/peerdas/utils_test.go @@ -5,7 +5,7 @@ import ( "crypto/sha256" "encoding/binary" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" GoKZG "github.com/crate-crypto/go-kzg-4844" "github.com/pkg/errors" diff --git a/beacon-chain/core/peerdas/validator.go b/beacon-chain/core/peerdas/validator.go index 65575aef46..a83aa574a6 100644 --- a/beacon-chain/core/peerdas/validator.go +++ b/beacon-chain/core/peerdas/validator.go @@ -3,13 +3,13 @@ package peerdas import ( "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - beaconState "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + beaconState "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/peerdas/validator_test.go b/beacon-chain/core/peerdas/validator_test.go index e7923747af..9c474400a9 100644 --- a/beacon-chain/core/peerdas/validator_test.go +++ b/beacon-chain/core/peerdas/validator_test.go @@ -3,15 +3,15 @@ package peerdas_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestValidatorsCustodyRequirement(t *testing.T) { diff --git a/beacon-chain/core/peerdas/verification.go b/beacon-chain/core/peerdas/verification.go index 781610f219..869c56ffa1 100644 --- a/beacon-chain/core/peerdas/verification.go +++ b/beacon-chain/core/peerdas/verification.go @@ -3,9 +3,9 @@ package peerdas import ( "bytes" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/peerdas/verification_test.go b/beacon-chain/core/peerdas/verification_test.go index 8f9d89859a..6f8ba1b984 100644 --- a/beacon-chain/core/peerdas/verification_test.go +++ b/beacon-chain/core/peerdas/verification_test.go @@ -3,12 +3,12 @@ package peerdas_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestDataColumnsAlignWithBlock(t *testing.T) { diff --git a/beacon-chain/core/signing/BUILD.bazel b/beacon-chain/core/signing/BUILD.bazel index 1048d59de1..bb4753267b 100644 --- a/beacon-chain/core/signing/BUILD.bazel +++ b/beacon-chain/core/signing/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "domain.go", "signing_root.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing", visibility = ["//visibility:public"], deps = [ "//beacon-chain/state:go_default_library", diff --git a/beacon-chain/core/signing/domain.go b/beacon-chain/core/signing/domain.go index d5bb27dfa3..16e31d26cd 100644 --- a/beacon-chain/core/signing/domain.go +++ b/beacon-chain/core/signing/domain.go @@ -1,9 +1,9 @@ package signing import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/signing/domain_test.go b/beacon-chain/core/signing/domain_test.go index 0412f48348..6ea45b2b0d 100644 --- a/beacon-chain/core/signing/domain_test.go +++ b/beacon-chain/core/signing/domain_test.go @@ -3,11 +3,11 @@ package signing import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestDomain_OK(t *testing.T) { diff --git a/beacon-chain/core/signing/signing_root.go b/beacon-chain/core/signing/signing_root.go index c3e6b0870a..392bb5b2e4 100644 --- a/beacon-chain/core/signing/signing_root.go +++ b/beacon-chain/core/signing/signing_root.go @@ -3,12 +3,12 @@ package signing import ( "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/beacon-chain/core/signing/signing_root_test.go b/beacon-chain/core/signing/signing_root_test.go index 1f2b1639a1..ba42b1f777 100644 --- a/beacon-chain/core/signing/signing_root_test.go +++ b/beacon-chain/core/signing/signing_root_test.go @@ -4,18 +4,18 @@ import ( "bytes" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" fuzz "github.com/google/gofuzz" ) diff --git a/beacon-chain/core/time/BUILD.bazel b/beacon-chain/core/time/BUILD.bazel index 220c182dc3..8e26aabd29 100644 --- a/beacon-chain/core/time/BUILD.bazel +++ b/beacon-chain/core/time/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["slot_epoch.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time", visibility = ["//visibility:public"], deps = [ "//beacon-chain/state:go_default_library", diff --git a/beacon-chain/core/time/slot_epoch.go b/beacon-chain/core/time/slot_epoch.go index 20cbe86071..06319fd2e7 100644 --- a/beacon-chain/core/time/slot_epoch.go +++ b/beacon-chain/core/time/slot_epoch.go @@ -1,11 +1,11 @@ package time import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // CurrentEpoch returns the current epoch number calculated from diff --git a/beacon-chain/core/time/slot_epoch_test.go b/beacon-chain/core/time/slot_epoch_test.go index ae8b16cb13..a049094986 100644 --- a/beacon-chain/core/time/slot_epoch_test.go +++ b/beacon-chain/core/time/slot_epoch_test.go @@ -4,16 +4,16 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestSlotToEpoch_OK(t *testing.T) { diff --git a/beacon-chain/core/transition/BUILD.bazel b/beacon-chain/core/transition/BUILD.bazel index f41201944e..d097e3453a 100644 --- a/beacon-chain/core/transition/BUILD.bazel +++ b/beacon-chain/core/transition/BUILD.bazel @@ -11,7 +11,7 @@ go_library( "transition.go", "transition_no_verify_sig.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition", visibility = ["//visibility:public"], deps = [ "//beacon-chain/cache:go_default_library", diff --git a/beacon-chain/core/transition/altair_transition_no_verify_sig_test.go b/beacon-chain/core/transition/altair_transition_no_verify_sig_test.go index 4eab874cc5..1dd3c4655a 100644 --- a/beacon-chain/core/transition/altair_transition_no_verify_sig_test.go +++ b/beacon-chain/core/transition/altair_transition_no_verify_sig_test.go @@ -5,22 +5,22 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - p2pType "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + p2pType "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestExecuteAltairStateTransitionNoVerify_FullProcess(t *testing.T) { diff --git a/beacon-chain/core/transition/bellatrix_transition_no_verify_sig_test.go b/beacon-chain/core/transition/bellatrix_transition_no_verify_sig_test.go index 4fb6e13495..8c6433346c 100644 --- a/beacon-chain/core/transition/bellatrix_transition_no_verify_sig_test.go +++ b/beacon-chain/core/transition/bellatrix_transition_no_verify_sig_test.go @@ -5,24 +5,24 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - p2pType "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + p2pType "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestExecuteBellatrixStateTransitionNoVerify_FullProcess(t *testing.T) { diff --git a/beacon-chain/core/transition/benchmarks_test.go b/beacon-chain/core/transition/benchmarks_test.go index 0b70b85436..0422390963 100644 --- a/beacon-chain/core/transition/benchmarks_test.go +++ b/beacon-chain/core/transition/benchmarks_test.go @@ -3,16 +3,16 @@ package transition_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - coreState "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/benchmark" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + coreState "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/benchmark" + "github.com/OffchainLabs/prysm/v7/testing/require" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/core/transition/interop/BUILD.bazel b/beacon-chain/core/transition/interop/BUILD.bazel index 8d9f6ace83..48e223ddcd 100644 --- a/beacon-chain/core/transition/interop/BUILD.bazel +++ b/beacon-chain/core/transition/interop/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "write_block_to_disk.go", "write_state_to_disk.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition/interop", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition/interop", visibility = [ "//beacon-chain:__subpackages__", "//tools:__subpackages__", diff --git a/beacon-chain/core/transition/interop/write_block_to_disk.go b/beacon-chain/core/transition/interop/write_block_to_disk.go index f48aa8828f..c71912865f 100644 --- a/beacon-chain/core/transition/interop/write_block_to_disk.go +++ b/beacon-chain/core/transition/interop/write_block_to_disk.go @@ -5,9 +5,9 @@ import ( "os" "path" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/io/file" ) // WriteBlockToDisk as a block ssz. Writes to temp directory. Debug! diff --git a/beacon-chain/core/transition/interop/write_state_to_disk.go b/beacon-chain/core/transition/interop/write_state_to_disk.go index 5bbb49b73c..958fb1b029 100644 --- a/beacon-chain/core/transition/interop/write_state_to_disk.go +++ b/beacon-chain/core/transition/interop/write_state_to_disk.go @@ -5,9 +5,9 @@ import ( "os" "path" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/io/file" ) // WriteStateToDisk as a state ssz. Writes to temp directory. Debug! diff --git a/beacon-chain/core/transition/skip_slot_cache.go b/beacon-chain/core/transition/skip_slot_cache.go index d55973482b..a9002c2477 100644 --- a/beacon-chain/core/transition/skip_slot_cache.go +++ b/beacon-chain/core/transition/skip_slot_cache.go @@ -4,10 +4,10 @@ import ( "context" "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ) // SkipSlotCache exists for the unlikely scenario that is a large gap between the head state and diff --git a/beacon-chain/core/transition/skip_slot_cache_test.go b/beacon-chain/core/transition/skip_slot_cache_test.go index 2184b090c2..f4458eba88 100644 --- a/beacon-chain/core/transition/skip_slot_cache_test.go +++ b/beacon-chain/core/transition/skip_slot_cache_test.go @@ -4,15 +4,15 @@ import ( "sync" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestSkipSlotCache_OK(t *testing.T) { diff --git a/beacon-chain/core/transition/state-bellatrix.go b/beacon-chain/core/transition/state-bellatrix.go index b09a706dd9..55b106449a 100644 --- a/beacon-chain/core/transition/state-bellatrix.go +++ b/beacon-chain/core/transition/state-bellatrix.go @@ -3,16 +3,16 @@ package transition import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/transition/state.go b/beacon-chain/core/transition/state.go index db52e8ad60..bad69cd22e 100644 --- a/beacon-chain/core/transition/state.go +++ b/beacon-chain/core/transition/state.go @@ -3,15 +3,15 @@ package transition import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/transition/state_fuzz_test.go b/beacon-chain/core/transition/state_fuzz_test.go index 7419bf853f..da29755479 100644 --- a/beacon-chain/core/transition/state_fuzz_test.go +++ b/beacon-chain/core/transition/state_fuzz_test.go @@ -3,9 +3,9 @@ package transition import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" fuzz "github.com/google/gofuzz" ) diff --git a/beacon-chain/core/transition/state_test.go b/beacon-chain/core/transition/state_test.go index 1420873211..b136838355 100644 --- a/beacon-chain/core/transition/state_test.go +++ b/beacon-chain/core/transition/state_test.go @@ -3,15 +3,15 @@ package transition_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/core/transition/stateutils/BUILD.bazel b/beacon-chain/core/transition/stateutils/BUILD.bazel index 9bb0636e91..8c9f657c6f 100644 --- a/beacon-chain/core/transition/stateutils/BUILD.bazel +++ b/beacon-chain/core/transition/stateutils/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["validator_index_map.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition/stateutils", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition/stateutils", visibility = [ "//beacon-chain:__subpackages__", "//testing/util:__pkg__", diff --git a/beacon-chain/core/transition/stateutils/validator_index_map.go b/beacon-chain/core/transition/stateutils/validator_index_map.go index 175a547f64..4b57b9df6c 100644 --- a/beacon-chain/core/transition/stateutils/validator_index_map.go +++ b/beacon-chain/core/transition/stateutils/validator_index_map.go @@ -4,10 +4,10 @@ package stateutils import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // ValidatorIndexMap builds a lookup map for quickly determining the index of diff --git a/beacon-chain/core/transition/stateutils/validator_index_map_test.go b/beacon-chain/core/transition/stateutils/validator_index_map_test.go index 2247589bdc..006b1a1cc5 100644 --- a/beacon-chain/core/transition/stateutils/validator_index_map_test.go +++ b/beacon-chain/core/transition/stateutils/validator_index_map_test.go @@ -3,14 +3,14 @@ package stateutils_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition/stateutils" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition/stateutils" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestValidatorIndexMap_OK(t *testing.T) { diff --git a/beacon-chain/core/transition/trailing_slot_state_cache.go b/beacon-chain/core/transition/trailing_slot_state_cache.go index 6862cac9e0..09da22785a 100644 --- a/beacon-chain/core/transition/trailing_slot_state_cache.go +++ b/beacon-chain/core/transition/trailing_slot_state_cache.go @@ -5,9 +5,9 @@ import ( "context" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/beacon-chain/core/transition/trailing_slot_state_cache_test.go b/beacon-chain/core/transition/trailing_slot_state_cache_test.go index 886fab18b2..28e93e98ed 100644 --- a/beacon-chain/core/transition/trailing_slot_state_cache_test.go +++ b/beacon-chain/core/transition/trailing_slot_state_cache_test.go @@ -3,11 +3,11 @@ package transition_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestTrailingSlotState_RoundTrip(t *testing.T) { diff --git a/beacon-chain/core/transition/transition.go b/beacon-chain/core/transition/transition.go index d103ad21f4..d9286dab75 100644 --- a/beacon-chain/core/transition/transition.go +++ b/beacon-chain/core/transition/transition.go @@ -8,26 +8,26 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/capella" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/deneb" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - e "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/fulu" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - prysmTrace "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/capella" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/deneb" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + e "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/fulu" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + prysmTrace "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" "go.opentelemetry.io/otel/trace" diff --git a/beacon-chain/core/transition/transition_fuzz_test.go b/beacon-chain/core/transition/transition_fuzz_test.go index 4642ab942b..78a688c6d6 100644 --- a/beacon-chain/core/transition/transition_fuzz_test.go +++ b/beacon-chain/core/transition/transition_fuzz_test.go @@ -3,12 +3,12 @@ package transition import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" fuzz "github.com/google/gofuzz" ) diff --git a/beacon-chain/core/transition/transition_no_verify_sig.go b/beacon-chain/core/transition/transition_no_verify_sig.go index 4cfa37a789..2873301ae4 100644 --- a/beacon-chain/core/transition/transition_no_verify_sig.go +++ b/beacon-chain/core/transition/transition_no_verify_sig.go @@ -5,20 +5,20 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - b "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition/interop" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - v "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + b "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition/interop" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + v "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/transition/transition_no_verify_sig_test.go b/beacon-chain/core/transition/transition_no_verify_sig_test.go index 6aea1c03de..772feb4186 100644 --- a/beacon-chain/core/transition/transition_no_verify_sig_test.go +++ b/beacon-chain/core/transition/transition_no_verify_sig_test.go @@ -3,16 +3,16 @@ package transition_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestExecuteStateTransitionNoVerify_FullProcess(t *testing.T) { diff --git a/beacon-chain/core/transition/transition_test.go b/beacon-chain/core/transition/transition_test.go index bee546fd13..908e392465 100644 --- a/beacon-chain/core/transition/transition_test.go +++ b/beacon-chain/core/transition/transition_test.go @@ -5,25 +5,25 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func init() { diff --git a/beacon-chain/core/validators/BUILD.bazel b/beacon-chain/core/validators/BUILD.bazel index 149f49c1c5..6efe999efb 100644 --- a/beacon-chain/core/validators/BUILD.bazel +++ b/beacon-chain/core/validators/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "slashing.go", "validator.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators", visibility = [ "//beacon-chain:__subpackages__", "//testing/endtoend:__subpackages__", diff --git a/beacon-chain/core/validators/slashing.go b/beacon-chain/core/validators/slashing.go index 68387b062c..04777c3136 100644 --- a/beacon-chain/core/validators/slashing.go +++ b/beacon-chain/core/validators/slashing.go @@ -1,8 +1,8 @@ package validators import ( - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/validators/slashing_test.go b/beacon-chain/core/validators/slashing_test.go index 3ec003bcff..970f82e1d2 100644 --- a/beacon-chain/core/validators/slashing_test.go +++ b/beacon-chain/core/validators/slashing_test.go @@ -3,8 +3,8 @@ package validators_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) func TestSlashingParamsPerVersion_NoErrors(t *testing.T) { diff --git a/beacon-chain/core/validators/validator.go b/beacon-chain/core/validators/validator.go index 8306f2eb74..9e1079eaf1 100644 --- a/beacon-chain/core/validators/validator.go +++ b/beacon-chain/core/validators/validator.go @@ -7,15 +7,15 @@ package validators import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/math" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/math" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/core/validators/validator_test.go b/beacon-chain/core/validators/validator_test.go index e0ec103235..8e2edde42a 100644 --- a/beacon-chain/core/validators/validator_test.go +++ b/beacon-chain/core/validators/validator_test.go @@ -3,17 +3,17 @@ package validators_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestHasVoted_OK(t *testing.T) { diff --git a/beacon-chain/das/BUILD.bazel b/beacon-chain/das/BUILD.bazel index c9f3f1e270..9fd326fc14 100644 --- a/beacon-chain/das/BUILD.bazel +++ b/beacon-chain/das/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "iface.go", "mock.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/das", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/das", visibility = ["//visibility:public"], deps = [ "//beacon-chain/db/filesystem:go_default_library", diff --git a/beacon-chain/das/availability_blobs.go b/beacon-chain/das/availability_blobs.go index 9007cd1898..08e68d59f4 100644 --- a/beacon-chain/das/availability_blobs.go +++ b/beacon-chain/das/availability_blobs.go @@ -4,14 +4,14 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/logging" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/logging" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" log "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/das/availability_blobs_test.go b/beacon-chain/das/availability_blobs_test.go index 559eccad98..1f41e96aba 100644 --- a/beacon-chain/das/availability_blobs_test.go +++ b/beacon-chain/das/availability_blobs_test.go @@ -5,15 +5,15 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" errors "github.com/pkg/errors" ) diff --git a/beacon-chain/das/blob_cache.go b/beacon-chain/das/blob_cache.go index 16659188f1..3b44f62a5e 100644 --- a/beacon-chain/das/blob_cache.go +++ b/beacon-chain/das/blob_cache.go @@ -3,11 +3,11 @@ package das import ( "bytes" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/pkg/errors" ) diff --git a/beacon-chain/das/blob_cache_test.go b/beacon-chain/das/blob_cache_test.go index 615f4112cf..8b8e41d434 100644 --- a/beacon-chain/das/blob_cache_test.go +++ b/beacon-chain/das/blob_cache_test.go @@ -3,14 +3,14 @@ package das import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestCacheEnsureDelete(t *testing.T) { diff --git a/beacon-chain/das/data_column_cache.go b/beacon-chain/das/data_column_cache.go index e0ecf25a2b..b6e78140ba 100644 --- a/beacon-chain/das/data_column_cache.go +++ b/beacon-chain/das/data_column_cache.go @@ -4,9 +4,9 @@ import ( "bytes" "slices" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" "github.com/pkg/errors" ) diff --git a/beacon-chain/das/data_column_cache_test.go b/beacon-chain/das/data_column_cache_test.go index 1fefd4a105..96154fabb6 100644 --- a/beacon-chain/das/data_column_cache_test.go +++ b/beacon-chain/das/data_column_cache_test.go @@ -3,11 +3,11 @@ package das import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestEnsureDeleteSetDiskSummary(t *testing.T) { diff --git a/beacon-chain/das/iface.go b/beacon-chain/das/iface.go index 00358d8e1d..5826f04ad4 100644 --- a/beacon-chain/das/iface.go +++ b/beacon-chain/das/iface.go @@ -3,8 +3,8 @@ package das import ( "context" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // AvailabilityStore describes a component that can verify and save sidecars for a given block, and confirm previously diff --git a/beacon-chain/das/mock.go b/beacon-chain/das/mock.go index 48b480e7d8..c9b65d90c6 100644 --- a/beacon-chain/das/mock.go +++ b/beacon-chain/das/mock.go @@ -3,8 +3,8 @@ package das import ( "context" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // MockAvailabilityStore is an implementation of AvailabilityStore that can be used by other packages in tests. diff --git a/beacon-chain/db/BUILD.bazel b/beacon-chain/db/BUILD.bazel index e5502d7cf7..d29d8f3037 100644 --- a/beacon-chain/db/BUILD.bazel +++ b/beacon-chain/db/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "log.go", "restore.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/db", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/db", visibility = [ "//beacon-chain:__subpackages__", "//cmd/beacon-chain:__subpackages__", diff --git a/beacon-chain/db/alias.go b/beacon-chain/db/alias.go index 4bc0351274..f308d92c4e 100644 --- a/beacon-chain/db/alias.go +++ b/beacon-chain/db/alias.go @@ -1,6 +1,6 @@ package db -import "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface" +import "github.com/OffchainLabs/prysm/v7/beacon-chain/db/iface" // ReadOnlyDatabase exposes Prysm's Ethereum data backend for read access only, no information about // head info. For head info, use github.com/prysmaticlabs/prysm/blockchain.HeadFetcher. diff --git a/beacon-chain/db/db.go b/beacon-chain/db/db.go index 71280caba8..430feeb850 100644 --- a/beacon-chain/db/db.go +++ b/beacon-chain/db/db.go @@ -3,7 +3,7 @@ package db import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" ) // NewFileName uses the KVStoreDatafilePath so that if this layer of diff --git a/beacon-chain/db/db_test.go b/beacon-chain/db/db_test.go index b8aec71aad..02ac4d5a7c 100644 --- a/beacon-chain/db/db_test.go +++ b/beacon-chain/db/db_test.go @@ -1,5 +1,5 @@ package db -import "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" +import "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" var _ Database = (*kv.Store)(nil) diff --git a/beacon-chain/db/errors.go b/beacon-chain/db/errors.go index 1430f19d8c..0220358674 100644 --- a/beacon-chain/db/errors.go +++ b/beacon-chain/db/errors.go @@ -4,7 +4,7 @@ import ( "errors" "os" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" ) // ErrNotFound can be used to determine if an error from a method in the database package diff --git a/beacon-chain/db/filesystem/BUILD.bazel b/beacon-chain/db/filesystem/BUILD.bazel index d3d2f3a261..5c9d2df0b4 100644 --- a/beacon-chain/db/filesystem/BUILD.bazel +++ b/beacon-chain/db/filesystem/BUILD.bazel @@ -17,7 +17,7 @@ go_library( "mock.go", "pruner.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem", visibility = ["//visibility:public"], deps = [ "//async:go_default_library", diff --git a/beacon-chain/db/filesystem/blob.go b/beacon-chain/db/filesystem/blob.go index 7bf93ca00e..2c227233c4 100644 --- a/beacon-chain/db/filesystem/blob.go +++ b/beacon-chain/db/filesystem/blob.go @@ -7,12 +7,12 @@ import ( "path" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/runtime/logging" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/runtime/logging" "github.com/pkg/errors" "github.com/sirupsen/logrus" "github.com/spf13/afero" diff --git a/beacon-chain/db/filesystem/blob_test.go b/beacon-chain/db/filesystem/blob_test.go index eca59408b4..b8356cd9cd 100644 --- a/beacon-chain/db/filesystem/blob_test.go +++ b/beacon-chain/db/filesystem/blob_test.go @@ -8,14 +8,14 @@ import ( "sync" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ssz "github.com/prysmaticlabs/fastssz" "github.com/spf13/afero" ) diff --git a/beacon-chain/db/filesystem/cache.go b/beacon-chain/db/filesystem/cache.go index b166badf4e..fa5f56f03d 100644 --- a/beacon-chain/db/filesystem/cache.go +++ b/beacon-chain/db/filesystem/cache.go @@ -4,10 +4,10 @@ import ( "fmt" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // blobIndexMask is a bitmask representing the set of blob indices that are currently set. diff --git a/beacon-chain/db/filesystem/cache_test.go b/beacon-chain/db/filesystem/cache_test.go index d5da4a1ec7..138344ae84 100644 --- a/beacon-chain/db/filesystem/cache_test.go +++ b/beacon-chain/db/filesystem/cache_test.go @@ -3,10 +3,10 @@ package filesystem import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestSlotByRoot_Summary(t *testing.T) { diff --git a/beacon-chain/db/filesystem/data_column.go b/beacon-chain/db/filesystem/data_column.go index ac33e8870f..e4491dea50 100644 --- a/beacon-chain/db/filesystem/data_column.go +++ b/beacon-chain/db/filesystem/data_column.go @@ -14,15 +14,15 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/spf13/afero" ) diff --git a/beacon-chain/db/filesystem/data_column_cache.go b/beacon-chain/db/filesystem/data_column_cache.go index 52684b683b..5e721d25e1 100644 --- a/beacon-chain/db/filesystem/data_column_cache.go +++ b/beacon-chain/db/filesystem/data_column_cache.go @@ -3,9 +3,9 @@ package filesystem import ( "sync" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/pkg/errors" ) diff --git a/beacon-chain/db/filesystem/data_column_cache_test.go b/beacon-chain/db/filesystem/data_column_cache_test.go index fc89a93e10..2929535389 100644 --- a/beacon-chain/db/filesystem/data_column_cache_test.go +++ b/beacon-chain/db/filesystem/data_column_cache_test.go @@ -3,10 +3,10 @@ package filesystem import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestHasIndex(t *testing.T) { diff --git a/beacon-chain/db/filesystem/data_column_test.go b/beacon-chain/db/filesystem/data_column_test.go index 23dc6d5c72..fe42199689 100644 --- a/beacon-chain/db/filesystem/data_column_test.go +++ b/beacon-chain/db/filesystem/data_column_test.go @@ -6,12 +6,12 @@ import ( "path/filepath" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/spf13/afero" ) diff --git a/beacon-chain/db/filesystem/iteration.go b/beacon-chain/db/filesystem/iteration.go index 3cadf8d8ae..0c8d9189d7 100644 --- a/beacon-chain/db/filesystem/iteration.go +++ b/beacon-chain/db/filesystem/iteration.go @@ -7,7 +7,7 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/db/filesystem/iteration_test.go b/beacon-chain/db/filesystem/iteration_test.go index e2a44a17d0..57c563a9d8 100644 --- a/beacon-chain/db/filesystem/iteration_test.go +++ b/beacon-chain/db/filesystem/iteration_test.go @@ -10,11 +10,11 @@ import ( "sort" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/spf13/afero" ) diff --git a/beacon-chain/db/filesystem/layout.go b/beacon-chain/db/filesystem/layout.go index b3f3782637..b3af864132 100644 --- a/beacon-chain/db/filesystem/layout.go +++ b/beacon-chain/db/filesystem/layout.go @@ -7,10 +7,10 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" "github.com/spf13/afero" diff --git a/beacon-chain/db/filesystem/layout_by_epoch.go b/beacon-chain/db/filesystem/layout_by_epoch.go index 3e6adb4c93..7cf7ee3636 100644 --- a/beacon-chain/db/filesystem/layout_by_epoch.go +++ b/beacon-chain/db/filesystem/layout_by_epoch.go @@ -7,8 +7,8 @@ import ( "path/filepath" "strconv" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/pkg/errors" "github.com/spf13/afero" ) diff --git a/beacon-chain/db/filesystem/layout_flat.go b/beacon-chain/db/filesystem/layout_flat.go index 3206082ddf..0c91f5aecd 100644 --- a/beacon-chain/db/filesystem/layout_flat.go +++ b/beacon-chain/db/filesystem/layout_flat.go @@ -6,8 +6,8 @@ import ( "os" "path" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/spf13/afero" ) diff --git a/beacon-chain/db/filesystem/layout_test.go b/beacon-chain/db/filesystem/layout_test.go index f7fd1b3e7b..3dc324548e 100644 --- a/beacon-chain/db/filesystem/layout_test.go +++ b/beacon-chain/db/filesystem/layout_test.go @@ -3,8 +3,8 @@ package filesystem import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) type mockLayout struct { diff --git a/beacon-chain/db/filesystem/migration_test.go b/beacon-chain/db/filesystem/migration_test.go index fa29f9bf2a..b277f9e6dd 100644 --- a/beacon-chain/db/filesystem/migration_test.go +++ b/beacon-chain/db/filesystem/migration_test.go @@ -4,10 +4,10 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/spf13/afero" ) diff --git a/beacon-chain/db/filesystem/mock.go b/beacon-chain/db/filesystem/mock.go index 1a5eb2f40c..fcfd92090e 100644 --- a/beacon-chain/db/filesystem/mock.go +++ b/beacon-chain/db/filesystem/mock.go @@ -4,10 +4,10 @@ import ( "context" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/spf13/afero" ) diff --git a/beacon-chain/db/filesystem/pruner.go b/beacon-chain/db/filesystem/pruner.go index b6fc73b0e7..85ebedf996 100644 --- a/beacon-chain/db/filesystem/pruner.go +++ b/beacon-chain/db/filesystem/pruner.go @@ -5,7 +5,7 @@ import ( "sync/atomic" "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/db/filesystem/pruner_test.go b/beacon-chain/db/filesystem/pruner_test.go index 1c4f028bb0..04b7c9d5a8 100644 --- a/beacon-chain/db/filesystem/pruner_test.go +++ b/beacon-chain/db/filesystem/pruner_test.go @@ -6,12 +6,12 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/spf13/afero" ) diff --git a/beacon-chain/db/filters/BUILD.bazel b/beacon-chain/db/filters/BUILD.bazel index 1fc2bedfba..535f825871 100644 --- a/beacon-chain/db/filters/BUILD.bazel +++ b/beacon-chain/db/filters/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "errors.go", "filter.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters", visibility = [ "//beacon-chain:__subpackages__", "//tools:__subpackages__", diff --git a/beacon-chain/db/filters/filter.go b/beacon-chain/db/filters/filter.go index 7ef8a03bcc..132a702603 100644 --- a/beacon-chain/db/filters/filter.go +++ b/beacon-chain/db/filters/filter.go @@ -15,7 +15,7 @@ package filters import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/pkg/errors" ) diff --git a/beacon-chain/db/filters/filter_test.go b/beacon-chain/db/filters/filter_test.go index 2929cf4fe2..66a1d98555 100644 --- a/beacon-chain/db/filters/filter_test.go +++ b/beacon-chain/db/filters/filter_test.go @@ -3,9 +3,9 @@ package filters import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestQueryFilter_ChainsCorrectly(t *testing.T) { diff --git a/beacon-chain/db/iface/BUILD.bazel b/beacon-chain/db/iface/BUILD.bazel index 70b18f0700..f1576ad8b5 100644 --- a/beacon-chain/db/iface/BUILD.bazel +++ b/beacon-chain/db/iface/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "errors.go", "interface.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/db/iface", # Other packages must use github.com/prysmaticlabs/prysm/beacon-chain/db.Database alias. visibility = ["//visibility:public"], deps = [ diff --git a/beacon-chain/db/iface/interface.go b/beacon-chain/db/iface/interface.go index 191a807a87..179e9b46f2 100644 --- a/beacon-chain/db/iface/interface.go +++ b/beacon-chain/db/iface/interface.go @@ -7,15 +7,15 @@ import ( "context" "io" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/backup" - "github.com/OffchainLabs/prysm/v6/proto/dbval" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/backup" + "github.com/OffchainLabs/prysm/v7/proto/dbval" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common" ) diff --git a/beacon-chain/db/kv/BUILD.bazel b/beacon-chain/db/kv/BUILD.bazel index acbaa50fa2..a396c2d3d1 100644 --- a/beacon-chain/db/kv/BUILD.bazel +++ b/beacon-chain/db/kv/BUILD.bazel @@ -33,7 +33,7 @@ go_library( "validated_checkpoint.go", "wss.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/blocks:go_default_library", diff --git a/beacon-chain/db/kv/archived_point.go b/beacon-chain/db/kv/archived_point.go index 3af6d1e6e7..9480472664 100644 --- a/beacon-chain/db/kv/archived_point.go +++ b/beacon-chain/db/kv/archived_point.go @@ -3,9 +3,9 @@ package kv import ( "context" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/archived_point_test.go b/beacon-chain/db/kv/archived_point_test.go index afc160e115..fc31d2b252 100644 --- a/beacon-chain/db/kv/archived_point_test.go +++ b/beacon-chain/db/kv/archived_point_test.go @@ -3,10 +3,10 @@ package kv import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestArchivedPointIndexRoot_CanSaveRetrieve(t *testing.T) { diff --git a/beacon-chain/db/kv/backfill.go b/beacon-chain/db/kv/backfill.go index 28c0c61b06..7fdd72fddf 100644 --- a/beacon-chain/db/kv/backfill.go +++ b/beacon-chain/db/kv/backfill.go @@ -3,8 +3,8 @@ package kv import ( "context" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/proto/dbval" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/proto/dbval" "github.com/pkg/errors" bolt "go.etcd.io/bbolt" "google.golang.org/protobuf/proto" diff --git a/beacon-chain/db/kv/backfill_test.go b/beacon-chain/db/kv/backfill_test.go index 8878c9f303..8223bcfdae 100644 --- a/beacon-chain/db/kv/backfill_test.go +++ b/beacon-chain/db/kv/backfill_test.go @@ -3,9 +3,9 @@ package kv import ( "testing" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/proto/dbval" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/proto/dbval" + "github.com/OffchainLabs/prysm/v7/testing/require" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/db/kv/backup.go b/beacon-chain/db/kv/backup.go index 75c18b100d..bca6467eb3 100644 --- a/beacon-chain/db/kv/backup.go +++ b/beacon-chain/db/kv/backup.go @@ -5,10 +5,10 @@ import ( "fmt" "path" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/backup_test.go b/beacon-chain/db/kv/backup_test.go index b8fdcf7aac..b93c5dee2a 100644 --- a/beacon-chain/db/kv/backup_test.go +++ b/beacon-chain/db/kv/backup_test.go @@ -5,10 +5,10 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestStore_Backup(t *testing.T) { diff --git a/beacon-chain/db/kv/blocks.go b/beacon-chain/db/kv/blocks.go index aeaa314567..d10c3aa5c5 100644 --- a/beacon-chain/db/kv/blocks.go +++ b/beacon-chain/db/kv/blocks.go @@ -6,17 +6,17 @@ import ( "fmt" "slices" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" "github.com/golang/snappy" "github.com/pkg/errors" diff --git a/beacon-chain/db/kv/blocks_test.go b/beacon-chain/db/kv/blocks_test.go index a35708d3c1..94f1ceafb5 100644 --- a/beacon-chain/db/kv/blocks_test.go +++ b/beacon-chain/db/kv/blocks_test.go @@ -6,18 +6,18 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" bolt "go.etcd.io/bbolt" diff --git a/beacon-chain/db/kv/checkpoint.go b/beacon-chain/db/kv/checkpoint.go index 90435ae737..16b01866aa 100644 --- a/beacon-chain/db/kv/checkpoint.go +++ b/beacon-chain/db/kv/checkpoint.go @@ -4,11 +4,11 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/checkpoint_test.go b/beacon-chain/db/kv/checkpoint_test.go index 1b4389b45c..50a707d95c 100644 --- a/beacon-chain/db/kv/checkpoint_test.go +++ b/beacon-chain/db/kv/checkpoint_test.go @@ -3,13 +3,13 @@ package kv import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/db/kv/custody.go b/beacon-chain/db/kv/custody.go index 63df838bed..3655b632d1 100644 --- a/beacon-chain/db/kv/custody.go +++ b/beacon-chain/db/kv/custody.go @@ -4,11 +4,11 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" bolt "go.etcd.io/bbolt" diff --git a/beacon-chain/db/kv/custody_test.go b/beacon-chain/db/kv/custody_test.go index 6db0148109..e7ff3af4a5 100644 --- a/beacon-chain/db/kv/custody_test.go +++ b/beacon-chain/db/kv/custody_test.go @@ -5,11 +5,11 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/deposit_contract.go b/beacon-chain/db/kv/deposit_contract.go index 2849e882ba..6363337b00 100644 --- a/beacon-chain/db/kv/deposit_contract.go +++ b/beacon-chain/db/kv/deposit_contract.go @@ -4,7 +4,7 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/ethereum/go-ethereum/common" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/deposit_contract_test.go b/beacon-chain/db/kv/deposit_contract_test.go index e5b2f6b60c..2743e6145f 100644 --- a/beacon-chain/db/kv/deposit_contract_test.go +++ b/beacon-chain/db/kv/deposit_contract_test.go @@ -3,8 +3,8 @@ package kv import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" ) diff --git a/beacon-chain/db/kv/encoding.go b/beacon-chain/db/kv/encoding.go index 1c7f1c70c0..d1a5c9d5b9 100644 --- a/beacon-chain/db/kv/encoding.go +++ b/beacon-chain/db/kv/encoding.go @@ -5,8 +5,8 @@ import ( "errors" "reflect" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/golang/snappy" fastssz "github.com/prysmaticlabs/fastssz" "google.golang.org/protobuf/proto" diff --git a/beacon-chain/db/kv/encoding_test.go b/beacon-chain/db/kv/encoding_test.go index cfcf04e964..13b328c91f 100644 --- a/beacon-chain/db/kv/encoding_test.go +++ b/beacon-chain/db/kv/encoding_test.go @@ -3,8 +3,8 @@ package kv import ( "testing" - testpb "github.com/OffchainLabs/prysm/v6/proto/testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + testpb "github.com/OffchainLabs/prysm/v7/proto/testing" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func Test_encode_handlesNilFromFunction(t *testing.T) { diff --git a/beacon-chain/db/kv/execution_chain.go b/beacon-chain/db/kv/execution_chain.go index 861bae1bc0..864bac580b 100644 --- a/beacon-chain/db/kv/execution_chain.go +++ b/beacon-chain/db/kv/execution_chain.go @@ -4,9 +4,9 @@ import ( "context" "errors" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - v2 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + v2 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" bolt "go.etcd.io/bbolt" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/db/kv/execution_chain_test.go b/beacon-chain/db/kv/execution_chain_test.go index d7e8bdf9e0..9dd8b1201e 100644 --- a/beacon-chain/db/kv/execution_chain_test.go +++ b/beacon-chain/db/kv/execution_chain_test.go @@ -3,7 +3,7 @@ package kv import ( "testing" - v2 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + v2 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func TestStore_SavePowchainData(t *testing.T) { diff --git a/beacon-chain/db/kv/finalized_block_roots.go b/beacon-chain/db/kv/finalized_block_roots.go index 864d46ec44..5f507aaec6 100644 --- a/beacon-chain/db/kv/finalized_block_roots.go +++ b/beacon-chain/db/kv/finalized_block_roots.go @@ -4,13 +4,13 @@ import ( "bytes" "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/finalized_block_roots_test.go b/beacon-chain/db/kv/finalized_block_roots_test.go index dc4aabc612..91e2c057a6 100644 --- a/beacon-chain/db/kv/finalized_block_roots_test.go +++ b/beacon-chain/db/kv/finalized_block_roots_test.go @@ -5,16 +5,16 @@ import ( "context" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/genesis.go b/beacon-chain/db/kv/genesis.go index 297103f1fb..b7f201d445 100644 --- a/beacon-chain/db/kv/genesis.go +++ b/beacon-chain/db/kv/genesis.go @@ -4,12 +4,12 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - dbIface "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/detect" - "github.com/OffchainLabs/prysm/v6/genesis" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + dbIface "github.com/OffchainLabs/prysm/v7/beacon-chain/db/iface" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/detect" + "github.com/OffchainLabs/prysm/v7/genesis" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/db/kv/genesis_test.go b/beacon-chain/db/kv/genesis_test.go index d79ce95873..fb340174f3 100644 --- a/beacon-chain/db/kv/genesis_test.go +++ b/beacon-chain/db/kv/genesis_test.go @@ -5,13 +5,13 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/iface" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" ) diff --git a/beacon-chain/db/kv/init_test.go b/beacon-chain/db/kv/init_test.go index 5672e14350..0763203342 100644 --- a/beacon-chain/db/kv/init_test.go +++ b/beacon-chain/db/kv/init_test.go @@ -5,7 +5,7 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/db/kv/kv.go b/beacon-chain/db/kv/kv.go index 193cadf48b..fd1b3482a6 100644 --- a/beacon-chain/db/kv/kv.go +++ b/beacon-chain/db/kv/kv.go @@ -9,13 +9,13 @@ import ( "path" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/io/file" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/iface" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/io/file" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/dgraph-io/ristretto/v2" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" diff --git a/beacon-chain/db/kv/kv_test.go b/beacon-chain/db/kv/kv_test.go index c5eb010740..260adb17fd 100644 --- a/beacon-chain/db/kv/kv_test.go +++ b/beacon-chain/db/kv/kv_test.go @@ -6,11 +6,11 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/lightclient.go b/beacon-chain/db/kv/lightclient.go index 33eb6bb237..abf9695e14 100644 --- a/beacon-chain/db/kv/lightclient.go +++ b/beacon-chain/db/kv/lightclient.go @@ -5,14 +5,14 @@ import ( "encoding/binary" "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - light_client "github.com/OffchainLabs/prysm/v6/consensus-types/light-client" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + light_client "github.com/OffchainLabs/prysm/v7/consensus-types/light-client" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/golang/snappy" "github.com/pkg/errors" bolt "go.etcd.io/bbolt" diff --git a/beacon-chain/db/kv/lightclient_test.go b/beacon-chain/db/kv/lightclient_test.go index 51ef070136..609471e064 100644 --- a/beacon-chain/db/kv/lightclient_test.go +++ b/beacon-chain/db/kv/lightclient_test.go @@ -5,18 +5,18 @@ import ( "math/rand" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - light_client "github.com/OffchainLabs/prysm/v6/consensus-types/light-client" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + light_client "github.com/OffchainLabs/prysm/v7/consensus-types/light-client" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" bolt "go.etcd.io/bbolt" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/db/kv/migration_archived_index.go b/beacon-chain/db/kv/migration_archived_index.go index 438d71efe9..6a790e74bd 100644 --- a/beacon-chain/db/kv/migration_archived_index.go +++ b/beacon-chain/db/kv/migration_archived_index.go @@ -4,9 +4,9 @@ import ( "bytes" "context" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/migration_archived_index_test.go b/beacon-chain/db/kv/migration_archived_index_test.go index 87c2f359b4..e87ac0ecdb 100644 --- a/beacon-chain/db/kv/migration_archived_index_test.go +++ b/beacon-chain/db/kv/migration_archived_index_test.go @@ -3,9 +3,9 @@ package kv import ( "testing" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/util" "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/migration_block_slot_index.go b/beacon-chain/db/kv/migration_block_slot_index.go index be9be8963c..ffb2c79be4 100644 --- a/beacon-chain/db/kv/migration_block_slot_index.go +++ b/beacon-chain/db/kv/migration_block_slot_index.go @@ -5,7 +5,7 @@ import ( "context" "strconv" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/migration_block_slot_index_test.go b/beacon-chain/db/kv/migration_block_slot_index_test.go index f38e3d50a4..070fab4024 100644 --- a/beacon-chain/db/kv/migration_block_slot_index_test.go +++ b/beacon-chain/db/kv/migration_block_slot_index_test.go @@ -3,8 +3,8 @@ package kv import ( "testing" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/migration_finalized_parent.go b/beacon-chain/db/kv/migration_finalized_parent.go index 7d5961408a..b4d8707fb3 100644 --- a/beacon-chain/db/kv/migration_finalized_parent.go +++ b/beacon-chain/db/kv/migration_finalized_parent.go @@ -5,9 +5,9 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/migration_state_validators.go b/beacon-chain/db/kv/migration_state_validators.go index 923ea9b04b..7f6a8fc3f0 100644 --- a/beacon-chain/db/kv/migration_state_validators.go +++ b/beacon-chain/db/kv/migration_state_validators.go @@ -5,10 +5,10 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/detect" - "github.com/OffchainLabs/prysm/v6/monitoring/progress" - v1alpha1 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/detect" + "github.com/OffchainLabs/prysm/v7/monitoring/progress" + v1alpha1 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/golang/snappy" "github.com/schollz/progressbar/v3" diff --git a/beacon-chain/db/kv/migration_state_validators_test.go b/beacon-chain/db/kv/migration_state_validators_test.go index 5fb902355a..af8b84f8b6 100644 --- a/beacon-chain/db/kv/migration_state_validators_test.go +++ b/beacon-chain/db/kv/migration_state_validators_test.go @@ -4,14 +4,14 @@ import ( "bytes" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - v1alpha1 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + v1alpha1 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/p2p.go b/beacon-chain/db/kv/p2p.go index 1748eda100..d81f457017 100644 --- a/beacon-chain/db/kv/p2p.go +++ b/beacon-chain/db/kv/p2p.go @@ -3,8 +3,8 @@ package kv import ( "context" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/p2p_test.go b/beacon-chain/db/kv/p2p_test.go index 435141b8fb..43e476f269 100644 --- a/beacon-chain/db/kv/p2p_test.go +++ b/beacon-chain/db/kv/p2p_test.go @@ -3,8 +3,8 @@ package kv import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStore_MetadataSeqNum(t *testing.T) { diff --git a/beacon-chain/db/kv/state.go b/beacon-chain/db/kv/state.go index 16ed683747..485c5bcadd 100644 --- a/beacon-chain/db/kv/state.go +++ b/beacon-chain/db/kv/state.go @@ -5,18 +5,18 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/golang/snappy" "github.com/pkg/errors" bolt "go.etcd.io/bbolt" diff --git a/beacon-chain/db/kv/state_summary.go b/beacon-chain/db/kv/state_summary.go index ca964509bc..a79410da4c 100644 --- a/beacon-chain/db/kv/state_summary.go +++ b/beacon-chain/db/kv/state_summary.go @@ -3,9 +3,9 @@ package kv import ( "context" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/state_summary_cache.go b/beacon-chain/db/kv/state_summary_cache.go index 48d4b91867..ab2977aed9 100644 --- a/beacon-chain/db/kv/state_summary_cache.go +++ b/beacon-chain/db/kv/state_summary_cache.go @@ -3,7 +3,7 @@ package kv import ( "sync" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) const stateSummaryCachePruneCount = 128 diff --git a/beacon-chain/db/kv/state_summary_test.go b/beacon-chain/db/kv/state_summary_test.go index b3a207a3a2..bab1959347 100644 --- a/beacon-chain/db/kv/state_summary_test.go +++ b/beacon-chain/db/kv/state_summary_test.go @@ -3,11 +3,11 @@ package kv import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStateSummary_CanSaveRetrieve(t *testing.T) { diff --git a/beacon-chain/db/kv/state_test.go b/beacon-chain/db/kv/state_test.go index c5bb58de9c..38af8b225e 100644 --- a/beacon-chain/db/kv/state_test.go +++ b/beacon-chain/db/kv/state_test.go @@ -8,20 +8,20 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/utils.go b/beacon-chain/db/kv/utils.go index a96b25a195..21ba282ab4 100644 --- a/beacon-chain/db/kv/utils.go +++ b/beacon-chain/db/kv/utils.go @@ -4,8 +4,8 @@ import ( "bytes" "context" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/pkg/errors" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/utils_test.go b/beacon-chain/db/kv/utils_test.go index e8d761cbad..cafa836337 100644 --- a/beacon-chain/db/kv/utils_test.go +++ b/beacon-chain/db/kv/utils_test.go @@ -5,9 +5,9 @@ import ( "crypto/rand" "testing" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/validated_checkpoint.go b/beacon-chain/db/kv/validated_checkpoint.go index a06b9cdd1d..b4e8a55ac4 100644 --- a/beacon-chain/db/kv/validated_checkpoint.go +++ b/beacon-chain/db/kv/validated_checkpoint.go @@ -4,9 +4,9 @@ import ( "bytes" "context" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/kv/validated_checkpoint_test.go b/beacon-chain/db/kv/validated_checkpoint_test.go index 85aa5a2792..895ea6bbdf 100644 --- a/beacon-chain/db/kv/validated_checkpoint_test.go +++ b/beacon-chain/db/kv/validated_checkpoint_test.go @@ -3,12 +3,12 @@ package kv import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/db/kv/wss.go b/beacon-chain/db/kv/wss.go index 4757d8aab0..77cbd5e89b 100644 --- a/beacon-chain/db/kv/wss.go +++ b/beacon-chain/db/kv/wss.go @@ -4,12 +4,12 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/detect" - "github.com/OffchainLabs/prysm/v6/proto/dbval" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/detect" + "github.com/OffchainLabs/prysm/v7/proto/dbval" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/db/kv/wss_test.go b/beacon-chain/db/kv/wss_test.go index d06082d68c..83b2075ced 100644 --- a/beacon-chain/db/kv/wss_test.go +++ b/beacon-chain/db/kv/wss_test.go @@ -3,11 +3,11 @@ package kv import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/genesis" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/genesis" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestSaveOrigin(t *testing.T) { diff --git a/beacon-chain/db/pruner/BUILD.bazel b/beacon-chain/db/pruner/BUILD.bazel index aea71592ef..26d5e32cf2 100644 --- a/beacon-chain/db/pruner/BUILD.bazel +++ b/beacon-chain/db/pruner/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["pruner.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/db/pruner", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/db/pruner", visibility = [ "//beacon-chain:__subpackages__", ], diff --git a/beacon-chain/db/pruner/pruner.go b/beacon-chain/db/pruner/pruner.go index 6e3699a9e6..7573435214 100644 --- a/beacon-chain/db/pruner/pruner.go +++ b/beacon-chain/db/pruner/pruner.go @@ -4,11 +4,11 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/iface" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/db/pruner/pruner_test.go b/beacon-chain/db/pruner/pruner_test.go index 55e3c109a2..c659aa9678 100644 --- a/beacon-chain/db/pruner/pruner_test.go +++ b/beacon-chain/db/pruner/pruner_test.go @@ -6,18 +6,18 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/util" - slottest "github.com/OffchainLabs/prysm/v6/time/slots/testing" + "github.com/OffchainLabs/prysm/v7/testing/util" + slottest "github.com/OffchainLabs/prysm/v7/time/slots/testing" "github.com/sirupsen/logrus" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/db/restore.go b/beacon-chain/db/restore.go index 23c0fcd55d..5ce68a45fd 100644 --- a/beacon-chain/db/restore.go +++ b/beacon-chain/db/restore.go @@ -5,10 +5,10 @@ import ( "path" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/io/prompt" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/io/prompt" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) diff --git a/beacon-chain/db/restore_test.go b/beacon-chain/db/restore_test.go index f246d0fee0..35028f991a 100644 --- a/beacon-chain/db/restore_test.go +++ b/beacon-chain/db/restore_test.go @@ -6,13 +6,13 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" "github.com/urfave/cli/v2" ) diff --git a/beacon-chain/db/slasherkv/BUILD.bazel b/beacon-chain/db/slasherkv/BUILD.bazel index b693a8aba2..8ff95cc40e 100644 --- a/beacon-chain/db/slasherkv/BUILD.bazel +++ b/beacon-chain/db/slasherkv/BUILD.bazel @@ -11,7 +11,7 @@ go_library( "schema.go", "slasher.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/db/slasherkv", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/db/slasherkv", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//beacon-chain/db/iface:go_default_library", diff --git a/beacon-chain/db/slasherkv/kv.go b/beacon-chain/db/slasherkv/kv.go index 7baf13d1fa..4c543226c5 100644 --- a/beacon-chain/db/slasherkv/kv.go +++ b/beacon-chain/db/slasherkv/kv.go @@ -9,9 +9,9 @@ import ( "path" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/iface" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" "github.com/pkg/errors" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/slasherkv/kv_test.go b/beacon-chain/db/slasherkv/kv_test.go index 0f39ba0df7..913423b3ec 100644 --- a/beacon-chain/db/slasherkv/kv_test.go +++ b/beacon-chain/db/slasherkv/kv_test.go @@ -3,7 +3,7 @@ package slasherkv import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) // setupDB instantiates and returns a Store instance. diff --git a/beacon-chain/db/slasherkv/migrate.go b/beacon-chain/db/slasherkv/migrate.go index d3aca379e2..1b052c75e7 100644 --- a/beacon-chain/db/slasherkv/migrate.go +++ b/beacon-chain/db/slasherkv/migrate.go @@ -5,8 +5,8 @@ import ( "encoding/binary" "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" bolt "go.etcd.io/bbolt" diff --git a/beacon-chain/db/slasherkv/migrate_test.go b/beacon-chain/db/slasherkv/migrate_test.go index 5ef5ac62f8..01673dc292 100644 --- a/beacon-chain/db/slasherkv/migrate_test.go +++ b/beacon-chain/db/slasherkv/migrate_test.go @@ -4,9 +4,9 @@ import ( "encoding/binary" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/slasherkv/pruning.go b/beacon-chain/db/slasherkv/pruning.go index 8f4e2efbe5..7560640b74 100644 --- a/beacon-chain/db/slasherkv/pruning.go +++ b/beacon-chain/db/slasherkv/pruning.go @@ -6,8 +6,8 @@ import ( "encoding/binary" "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/slasherkv/pruning_test.go b/beacon-chain/db/slasherkv/pruning_test.go index 93bc95f121..3e4912fd9a 100644 --- a/beacon-chain/db/slasherkv/pruning_test.go +++ b/beacon-chain/db/slasherkv/pruning_test.go @@ -4,12 +4,12 @@ import ( "fmt" "testing" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" logTest "github.com/sirupsen/logrus/hooks/test" bolt "go.etcd.io/bbolt" ) diff --git a/beacon-chain/db/slasherkv/slasher.go b/beacon-chain/db/slasherkv/slasher.go index 3ecc728aa3..355861e746 100644 --- a/beacon-chain/db/slasherkv/slasher.go +++ b/beacon-chain/db/slasherkv/slasher.go @@ -8,13 +8,13 @@ import ( "sort" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/golang/snappy" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" diff --git a/beacon-chain/db/slasherkv/slasher_test.go b/beacon-chain/db/slasherkv/slasher_test.go index 2044520c3b..a76128e5ee 100644 --- a/beacon-chain/db/slasherkv/slasher_test.go +++ b/beacon-chain/db/slasherkv/slasher_test.go @@ -7,13 +7,13 @@ import ( "sort" "testing" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" ssz "github.com/prysmaticlabs/fastssz" ) diff --git a/beacon-chain/db/testing/BUILD.bazel b/beacon-chain/db/testing/BUILD.bazel index b79ef547d4..15218051f4 100644 --- a/beacon-chain/db/testing/BUILD.bazel +++ b/beacon-chain/db/testing/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["setup_db.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing", visibility = [ "//beacon-chain:__subpackages__", "//testing:__subpackages__", diff --git a/beacon-chain/db/testing/setup_db.go b/beacon-chain/db/testing/setup_db.go index dbf6bad795..63bd83ed6f 100644 --- a/beacon-chain/db/testing/setup_db.go +++ b/beacon-chain/db/testing/setup_db.go @@ -6,10 +6,10 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/slasherkv" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/iface" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/slasherkv" ) // SetupDB instantiates and returns database backed by key value store. diff --git a/beacon-chain/execution/BUILD.bazel b/beacon-chain/execution/BUILD.bazel index c2445712c8..7866c9049f 100644 --- a/beacon-chain/execution/BUILD.bazel +++ b/beacon-chain/execution/BUILD.bazel @@ -17,7 +17,7 @@ go_library( "rpc_connection.go", "service.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/execution", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/execution", visibility = [ "//beacon-chain:__subpackages__", "//cmd/beacon-chain:__subpackages__", diff --git a/beacon-chain/execution/block_cache.go b/beacon-chain/execution/block_cache.go index e567293951..3762e9acce 100644 --- a/beacon-chain/execution/block_cache.go +++ b/beacon-chain/execution/block_cache.go @@ -5,8 +5,8 @@ import ( "math/big" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/types" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/types" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/ethereum/go-ethereum/common" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/beacon-chain/execution/block_cache_test.go b/beacon-chain/execution/block_cache_test.go index d7fac14f54..5ca4d886db 100644 --- a/beacon-chain/execution/block_cache_test.go +++ b/beacon-chain/execution/block_cache_test.go @@ -4,10 +4,10 @@ import ( "math/big" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/types" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/types" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" ) diff --git a/beacon-chain/execution/block_reader.go b/beacon-chain/execution/block_reader.go index c13eef4f34..f3a558dd46 100644 --- a/beacon-chain/execution/block_reader.go +++ b/beacon-chain/execution/block_reader.go @@ -5,10 +5,10 @@ import ( "fmt" "math/big" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" ) diff --git a/beacon-chain/execution/block_reader_test.go b/beacon-chain/execution/block_reader_test.go index 9260f3ab98..11cbca4fc8 100644 --- a/beacon-chain/execution/block_reader_test.go +++ b/beacon-chain/execution/block_reader_test.go @@ -6,14 +6,14 @@ import ( "testing" "time" - dbutil "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/types" - "github.com/OffchainLabs/prysm/v6/config/params" - contracts "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/contracts/deposit/mock" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + dbutil "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/types" + "github.com/OffchainLabs/prysm/v7/config/params" + contracts "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/contracts/deposit/mock" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" gethTypes "github.com/ethereum/go-ethereum/core/types" diff --git a/beacon-chain/execution/deposit.go b/beacon-chain/execution/deposit.go index 1056b960b8..f874b19500 100644 --- a/beacon-chain/execution/deposit.go +++ b/beacon-chain/execution/deposit.go @@ -3,9 +3,9 @@ package execution import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" ) diff --git a/beacon-chain/execution/deposit_test.go b/beacon-chain/execution/deposit_test.go index f659cdf5e9..7c78f11b5f 100644 --- a/beacon-chain/execution/deposit_test.go +++ b/beacon-chain/execution/deposit_test.go @@ -4,20 +4,20 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - testing2 "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + testing2 "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/execution/engine_client.go b/beacon-chain/execution/engine_client.go index c8bbcd997a..73e41e32e0 100644 --- a/beacon-chain/execution/engine_client.go +++ b/beacon-chain/execution/engine_client.go @@ -7,22 +7,22 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - payloadattribute "github.com/OffchainLabs/prysm/v6/consensus-types/payload-attribute" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + payloadattribute "github.com/OffchainLabs/prysm/v7/consensus-types/payload-attribute" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" diff --git a/beacon-chain/execution/engine_client_fuzz_test.go b/beacon-chain/execution/engine_client_fuzz_test.go index 17459ad320..21e8b6bdb3 100644 --- a/beacon-chain/execution/engine_client_fuzz_test.go +++ b/beacon-chain/execution/engine_client_fuzz_test.go @@ -11,9 +11,9 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/testing/assert" "github.com/ethereum/go-ethereum/beacon/engine" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" diff --git a/beacon-chain/execution/engine_client_test.go b/beacon-chain/execution/engine_client_test.go index 649da34914..b7368e81c4 100644 --- a/beacon-chain/execution/engine_client_test.go +++ b/beacon-chain/execution/engine_client_test.go @@ -13,24 +13,24 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - mocks "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - payloadattribute "github.com/OffchainLabs/prysm/v6/consensus-types/payload-attribute" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + mocks "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + payloadattribute "github.com/OffchainLabs/prysm/v7/consensus-types/payload-attribute" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" diff --git a/beacon-chain/execution/init_test.go b/beacon-chain/execution/init_test.go index 3123249fa2..2d76c903f9 100644 --- a/beacon-chain/execution/init_test.go +++ b/beacon-chain/execution/init_test.go @@ -1,7 +1,7 @@ package execution import ( - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" ) func init() { diff --git a/beacon-chain/execution/log_processing.go b/beacon-chain/execution/log_processing.go index 82910ffe80..1985780f36 100644 --- a/beacon-chain/execution/log_processing.go +++ b/beacon-chain/execution/log_processing.go @@ -8,19 +8,19 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - coreState "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/types" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - contracts "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + coreState "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/types" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + contracts "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/ethereum/go-ethereum/common" diff --git a/beacon-chain/execution/log_processing_test.go b/beacon-chain/execution/log_processing_test.go index 3f50a761de..a3673bc797 100644 --- a/beacon-chain/execution/log_processing_test.go +++ b/beacon-chain/execution/log_processing_test.go @@ -6,18 +6,18 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - contracts "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/contracts/deposit/mock" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + contracts "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/contracts/deposit/mock" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" logTest "github.com/sirupsen/logrus/hooks/test" diff --git a/beacon-chain/execution/mock_test.go b/beacon-chain/execution/mock_test.go index 6db5d1d7d1..500f71c529 100644 --- a/beacon-chain/execution/mock_test.go +++ b/beacon-chain/execution/mock_test.go @@ -6,8 +6,8 @@ import ( "net/http/httptest" "testing" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/testing/require" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/rpc" ) diff --git a/beacon-chain/execution/options.go b/beacon-chain/execution/options.go index f6fe84caed..7d178671ce 100644 --- a/beacon-chain/execution/options.go +++ b/beacon-chain/execution/options.go @@ -1,14 +1,14 @@ package execution import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/network" - "github.com/OffchainLabs/prysm/v6/network/authorization" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/network" + "github.com/OffchainLabs/prysm/v7/network/authorization" "github.com/ethereum/go-ethereum/common" ) diff --git a/beacon-chain/execution/payload_body.go b/beacon-chain/execution/payload_body.go index e048eae7d6..bda68063f5 100644 --- a/beacon-chain/execution/payload_body.go +++ b/beacon-chain/execution/payload_body.go @@ -4,11 +4,11 @@ import ( "context" "sort" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" diff --git a/beacon-chain/execution/payload_body_test.go b/beacon-chain/execution/payload_body_test.go index 9ad162a80c..e1b412f516 100644 --- a/beacon-chain/execution/payload_body_test.go +++ b/beacon-chain/execution/payload_body_test.go @@ -4,16 +4,16 @@ import ( "net/http" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func payloadToBody(t *testing.T, ed interfaces.ExecutionData) *pb.ExecutionPayloadBody { diff --git a/beacon-chain/execution/prometheus.go b/beacon-chain/execution/prometheus.go index 5332d42021..8d3160e968 100644 --- a/beacon-chain/execution/prometheus.go +++ b/beacon-chain/execution/prometheus.go @@ -4,7 +4,7 @@ import ( "context" "sync" - "github.com/OffchainLabs/prysm/v6/monitoring/clientstats" + "github.com/OffchainLabs/prysm/v7/monitoring/clientstats" "github.com/prometheus/client_golang/prometheus" ) diff --git a/beacon-chain/execution/prometheus_test.go b/beacon-chain/execution/prometheus_test.go index 069414a442..747e61c0d4 100644 --- a/beacon-chain/execution/prometheus_test.go +++ b/beacon-chain/execution/prometheus_test.go @@ -5,7 +5,7 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/assert" "github.com/prometheus/client_golang/prometheus" ) diff --git a/beacon-chain/execution/rpc_connection.go b/beacon-chain/execution/rpc_connection.go index 7e1d8083e8..4dc25d0d34 100644 --- a/beacon-chain/execution/rpc_connection.go +++ b/beacon-chain/execution/rpc_connection.go @@ -7,11 +7,11 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - contracts "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/io/logs" - "github.com/OffchainLabs/prysm/v6/network" - "github.com/OffchainLabs/prysm/v6/network/authorization" + "github.com/OffchainLabs/prysm/v7/config/params" + contracts "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/io/logs" + "github.com/OffchainLabs/prysm/v7/network" + "github.com/OffchainLabs/prysm/v7/network/authorization" "github.com/ethereum/go-ethereum/ethclient" gethRPC "github.com/ethereum/go-ethereum/rpc" "github.com/pkg/errors" diff --git a/beacon-chain/execution/service.go b/beacon-chain/execution/service.go index 376a572a92..f621fd1d3c 100644 --- a/beacon-chain/execution/service.go +++ b/beacon-chain/execution/service.go @@ -13,27 +13,27 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/container/trie" - contracts "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/clientstats" - "github.com/OffchainLabs/prysm/v6/network" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/container/trie" + contracts "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/clientstats" + "github.com/OffchainLabs/prysm/v7/network" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" diff --git a/beacon-chain/execution/service_test.go b/beacon-chain/execution/service_test.go index 60f2785a70..6586c82ecf 100644 --- a/beacon-chain/execution/service_test.go +++ b/beacon-chain/execution/service_test.go @@ -7,28 +7,28 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - dbutil "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - contracts "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/contracts/deposit/mock" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - "github.com/OffchainLabs/prysm/v6/monitoring/clientstats" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + dbutil "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + contracts "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/contracts/deposit/mock" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + "github.com/OffchainLabs/prysm/v7/monitoring/clientstats" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" diff --git a/beacon-chain/execution/testing/BUILD.bazel b/beacon-chain/execution/testing/BUILD.bazel index a1d1b32849..f1e09ecb4f 100644 --- a/beacon-chain/execution/testing/BUILD.bazel +++ b/beacon-chain/execution/testing/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "mock_execution_chain.go", "mock_faulty_powchain.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing", visibility = [ "//visibility:public", ], diff --git a/beacon-chain/execution/testing/mock_engine_client.go b/beacon-chain/execution/testing/mock_engine_client.go index 969a39892a..9f3d66f1e2 100644 --- a/beacon-chain/execution/testing/mock_engine_client.go +++ b/beacon-chain/execution/testing/mock_engine_client.go @@ -4,15 +4,15 @@ import ( "context" "math/big" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - payloadattribute "github.com/OffchainLabs/prysm/v6/consensus-types/payload-attribute" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + payloadattribute "github.com/OffchainLabs/prysm/v7/consensus-types/payload-attribute" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/holiman/uint256" diff --git a/beacon-chain/execution/testing/mock_execution_chain.go b/beacon-chain/execution/testing/mock_execution_chain.go index 6bb158ae01..c1eb76d369 100644 --- a/beacon-chain/execution/testing/mock_execution_chain.go +++ b/beacon-chain/execution/testing/mock_execution_chain.go @@ -9,12 +9,12 @@ import ( "net/http/httptest" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" gethTypes "github.com/ethereum/go-ethereum/core/types" diff --git a/beacon-chain/execution/testing/mock_faulty_powchain.go b/beacon-chain/execution/testing/mock_faulty_powchain.go index 74c18f3484..b273c9f8d3 100644 --- a/beacon-chain/execution/testing/mock_faulty_powchain.go +++ b/beacon-chain/execution/testing/mock_faulty_powchain.go @@ -4,11 +4,11 @@ import ( "context" "math/big" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" ) diff --git a/beacon-chain/execution/types/BUILD.bazel b/beacon-chain/execution/types/BUILD.bazel index b9ac33bfca..254b4fa2f9 100644 --- a/beacon-chain/execution/types/BUILD.bazel +++ b/beacon-chain/execution/types/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["eth1_types.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/types", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/types", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//encoding/bytesutil:go_default_library", diff --git a/beacon-chain/execution/types/eth1_types.go b/beacon-chain/execution/types/eth1_types.go index ea94661260..faf4d0d3f4 100644 --- a/beacon-chain/execution/types/eth1_types.go +++ b/beacon-chain/execution/types/eth1_types.go @@ -5,7 +5,7 @@ import ( "errors" "math/big" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/execution/types/eth1_types_test.go b/beacon-chain/execution/types/eth1_types_test.go index ef04cf1f4f..fc3674e354 100644 --- a/beacon-chain/execution/types/eth1_types_test.go +++ b/beacon-chain/execution/types/eth1_types_test.go @@ -6,7 +6,7 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/assert" "github.com/ethereum/go-ethereum/common" ) diff --git a/beacon-chain/forkchoice/BUILD.bazel b/beacon-chain/forkchoice/BUILD.bazel index 6f09ed4119..601c4b0e05 100644 --- a/beacon-chain/forkchoice/BUILD.bazel +++ b/beacon-chain/forkchoice/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "interfaces.go", "ro.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice", visibility = [ "//beacon-chain:__subpackages__", "//cmd:__subpackages__", diff --git a/beacon-chain/forkchoice/doubly-linked-tree/BUILD.bazel b/beacon-chain/forkchoice/doubly-linked-tree/BUILD.bazel index 8843d891de..619c9fc046 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/BUILD.bazel +++ b/beacon-chain/forkchoice/doubly-linked-tree/BUILD.bazel @@ -17,7 +17,7 @@ go_library( "types.go", "unrealized_justification.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree", visibility = [ "//beacon-chain:__subpackages__", "//testing/spectest:__subpackages__", diff --git a/beacon-chain/forkchoice/doubly-linked-tree/ffg_update_test.go b/beacon-chain/forkchoice/doubly-linked-tree/ffg_update_test.go index 466c022a6a..5d6cfa9ad7 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/ffg_update_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/ffg_update_test.go @@ -4,11 +4,11 @@ import ( "context" "testing" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestFFGUpdates_OneBranch(t *testing.T) { diff --git a/beacon-chain/forkchoice/doubly-linked-tree/forkchoice.go b/beacon-chain/forkchoice/doubly-linked-tree/forkchoice.go index 0118e16fc1..516a3faf0f 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/forkchoice.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/forkchoice.go @@ -5,19 +5,19 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensus_blocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - forkchoice2 "github.com/OffchainLabs/prysm/v6/consensus-types/forkchoice" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensus_blocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + forkchoice2 "github.com/OffchainLabs/prysm/v7/consensus-types/forkchoice" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/forkchoice/doubly-linked-tree/forkchoice_test.go b/beacon-chain/forkchoice/doubly-linked-tree/forkchoice_test.go index a2b97a7051..107ead8ff8 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/forkchoice_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/forkchoice_test.go @@ -6,19 +6,19 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) // prepareForkchoiceState prepares a beacon State with the given data to mock diff --git a/beacon-chain/forkchoice/doubly-linked-tree/last_root.go b/beacon-chain/forkchoice/doubly-linked-tree/last_root.go index aeb4685af0..4a5b26a2a8 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/last_root.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/last_root.go @@ -1,8 +1,8 @@ package doublylinkedtree import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // LastRoot returns the last canonical block root in the given epoch diff --git a/beacon-chain/forkchoice/doubly-linked-tree/last_root_test.go b/beacon-chain/forkchoice/doubly-linked-tree/last_root_test.go index 006b813e1f..2ddbccdbe6 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/last_root_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/last_root_test.go @@ -3,8 +3,8 @@ package doublylinkedtree import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestLastRoot(t *testing.T) { diff --git a/beacon-chain/forkchoice/doubly-linked-tree/no_vote_test.go b/beacon-chain/forkchoice/doubly-linked-tree/no_vote_test.go index bc1897e3f6..537188f001 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/no_vote_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/no_vote_test.go @@ -3,9 +3,9 @@ package doublylinkedtree import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestNoVote_CanFindHead(t *testing.T) { diff --git a/beacon-chain/forkchoice/doubly-linked-tree/node.go b/beacon-chain/forkchoice/doubly-linked-tree/node.go index 811adbe835..8ac6884d1b 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/node.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/node.go @@ -5,10 +5,10 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - forkchoice2 "github.com/OffchainLabs/prysm/v6/consensus-types/forkchoice" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + forkchoice2 "github.com/OffchainLabs/prysm/v7/consensus-types/forkchoice" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/forkchoice/doubly-linked-tree/node_test.go b/beacon-chain/forkchoice/doubly-linked-tree/node_test.go index 72c83512d9..fe21705f06 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/node_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/node_test.go @@ -4,11 +4,11 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/forkchoice" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/forkchoice" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestNode_ApplyWeightChanges_PositiveChange(t *testing.T) { diff --git a/beacon-chain/forkchoice/doubly-linked-tree/on_tick.go b/beacon-chain/forkchoice/doubly-linked-tree/on_tick.go index 71cbbea413..dcc1b39a7b 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/on_tick.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/on_tick.go @@ -3,8 +3,8 @@ package doublylinkedtree import ( "context" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/forkchoice/doubly-linked-tree/on_tick_test.go b/beacon-chain/forkchoice/doubly-linked-tree/on_tick_test.go index ce2930dd07..0a14dea494 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/on_tick_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/on_tick_test.go @@ -3,10 +3,10 @@ package doublylinkedtree import ( "testing" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStore_NewSlot(t *testing.T) { diff --git a/beacon-chain/forkchoice/doubly-linked-tree/optimistic_sync.go b/beacon-chain/forkchoice/doubly-linked-tree/optimistic_sync.go index 0049bbee62..c70fdb7a3f 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/optimistic_sync.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/optimistic_sync.go @@ -3,7 +3,7 @@ package doublylinkedtree import ( "context" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/pkg/errors" ) diff --git a/beacon-chain/forkchoice/doubly-linked-tree/optimistic_sync_test.go b/beacon-chain/forkchoice/doubly-linked-tree/optimistic_sync_test.go index 8bcc44a539..3bc6f44b70 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/optimistic_sync_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/optimistic_sync_test.go @@ -4,9 +4,9 @@ import ( "sort" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" ) // We test the algorithm to update a node from SYNCING to INVALID diff --git a/beacon-chain/forkchoice/doubly-linked-tree/proposer_boost.go b/beacon-chain/forkchoice/doubly-linked-tree/proposer_boost.go index 6857d7ee4b..41b20185a0 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/proposer_boost.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/proposer_boost.go @@ -1,8 +1,8 @@ package doublylinkedtree import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" ) // applyProposerBoostScore applies the current proposer boost scores to the diff --git a/beacon-chain/forkchoice/doubly-linked-tree/proposer_boost_test.go b/beacon-chain/forkchoice/doubly-linked-tree/proposer_boost_test.go index 4ed14ddb81..fe8e12e18c 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/proposer_boost_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/proposer_boost_test.go @@ -4,10 +4,10 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) // Helper function to simulate the block being on time or delayed for proposer diff --git a/beacon-chain/forkchoice/doubly-linked-tree/reorg_late_blocks.go b/beacon-chain/forkchoice/doubly-linked-tree/reorg_late_blocks.go index 2cc784e3fc..26e9a6c5bd 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/reorg_late_blocks.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/reorg_late_blocks.go @@ -3,8 +3,8 @@ package doublylinkedtree import ( "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // orphanLateBlockProposingEarly determines the maximum threshold that we diff --git a/beacon-chain/forkchoice/doubly-linked-tree/reorg_late_blocks_test.go b/beacon-chain/forkchoice/doubly-linked-tree/reorg_late_blocks_test.go index d5f7bd65df..a828e10a3b 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/reorg_late_blocks_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/reorg_late_blocks_test.go @@ -4,8 +4,8 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestForkChoice_ShouldOverrideFCU(t *testing.T) { diff --git a/beacon-chain/forkchoice/doubly-linked-tree/store.go b/beacon-chain/forkchoice/doubly-linked-tree/store.go index 95d58669ea..c0d314df73 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/store.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/store.go @@ -5,13 +5,13 @@ import ( "fmt" "time" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensus_blocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensus_blocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/forkchoice/doubly-linked-tree/store_test.go b/beacon-chain/forkchoice/doubly-linked-tree/store_test.go index fb69496d1f..a501a261d0 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/store_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/store_test.go @@ -5,11 +5,11 @@ import ( "testing" "time" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStore_JustifiedEpoch(t *testing.T) { diff --git a/beacon-chain/forkchoice/doubly-linked-tree/types.go b/beacon-chain/forkchoice/doubly-linked-tree/types.go index a0faf6a88e..3db93944af 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/types.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/types.go @@ -4,10 +4,10 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // ForkChoice defines the overall fork choice store which includes all block nodes, validator's latest votes and balances. diff --git a/beacon-chain/forkchoice/doubly-linked-tree/unrealized_justification.go b/beacon-chain/forkchoice/doubly-linked-tree/unrealized_justification.go index 1949d9daab..0900a61e14 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/unrealized_justification.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/unrealized_justification.go @@ -3,14 +3,14 @@ package doublylinkedtree import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/forkchoice/doubly-linked-tree/unrealized_justification_test.go b/beacon-chain/forkchoice/doubly-linked-tree/unrealized_justification_test.go index 2bbef62806..f8ee9330b0 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/unrealized_justification_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/unrealized_justification_test.go @@ -3,11 +3,11 @@ package doublylinkedtree import ( "testing" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestStore_SetUnrealizedEpochs(t *testing.T) { diff --git a/beacon-chain/forkchoice/doubly-linked-tree/vote_test.go b/beacon-chain/forkchoice/doubly-linked-tree/vote_test.go index 7ad3e4c3a4..c3e6e77eac 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/vote_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/vote_test.go @@ -3,9 +3,9 @@ package doublylinkedtree import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestVotes_CanFindHead(t *testing.T) { diff --git a/beacon-chain/forkchoice/interfaces.go b/beacon-chain/forkchoice/interfaces.go index 4fb194f827..11339520f1 100644 --- a/beacon-chain/forkchoice/interfaces.go +++ b/beacon-chain/forkchoice/interfaces.go @@ -4,12 +4,12 @@ import ( "context" "time" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - consensus_blocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - forkchoice2 "github.com/OffchainLabs/prysm/v6/consensus-types/forkchoice" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + consensus_blocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + forkchoice2 "github.com/OffchainLabs/prysm/v7/consensus-types/forkchoice" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // BalancesByRooter is a handler to obtain the effective balances of the state diff --git a/beacon-chain/forkchoice/ro.go b/beacon-chain/forkchoice/ro.go index 55accf3ac5..5f010ff22f 100644 --- a/beacon-chain/forkchoice/ro.go +++ b/beacon-chain/forkchoice/ro.go @@ -1,9 +1,9 @@ package forkchoice import ( - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // ROForkChoice is an implementation of forkchoice.Getter which calls `Rlock`/`RUnlock` diff --git a/beacon-chain/forkchoice/ro_test.go b/beacon-chain/forkchoice/ro_test.go index 94d2b178b2..257edf658d 100644 --- a/beacon-chain/forkchoice/ro_test.go +++ b/beacon-chain/forkchoice/ro_test.go @@ -4,10 +4,10 @@ import ( "io" "testing" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) type mockCall int diff --git a/beacon-chain/forkchoice/types/BUILD.bazel b/beacon-chain/forkchoice/types/BUILD.bazel index 0f97212d29..f2630876ea 100644 --- a/beacon-chain/forkchoice/types/BUILD.bazel +++ b/beacon-chain/forkchoice/types/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["types.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/beacon-chain/forkchoice/types/types.go b/beacon-chain/forkchoice/types/types.go index afb4042b58..98cded790f 100644 --- a/beacon-chain/forkchoice/types/types.go +++ b/beacon-chain/forkchoice/types/types.go @@ -1,10 +1,10 @@ package types import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - consensus_blocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + consensus_blocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // Checkpoint is an array version of ethpb.Checkpoint. It is used internally in diff --git a/beacon-chain/light-client/BUILD.bazel b/beacon-chain/light-client/BUILD.bazel index 06d1da2643..30969888a9 100644 --- a/beacon-chain/light-client/BUILD.bazel +++ b/beacon-chain/light-client/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "log.go", "store.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client", visibility = ["//visibility:public"], deps = [ "//async/event:go_default_library", diff --git a/beacon-chain/light-client/cache.go b/beacon-chain/light-client/cache.go index 69c0bdbfb4..b06775a808 100644 --- a/beacon-chain/light-client/cache.go +++ b/beacon-chain/light-client/cache.go @@ -1,8 +1,8 @@ package light_client import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // cache tracks LC data over the non finalized chain for different branches. diff --git a/beacon-chain/light-client/cache_test.go b/beacon-chain/light-client/cache_test.go index d537756181..4087b61948 100644 --- a/beacon-chain/light-client/cache_test.go +++ b/beacon-chain/light-client/cache_test.go @@ -3,7 +3,7 @@ package light_client import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestLCCache(t *testing.T) { diff --git a/beacon-chain/light-client/helpers.go b/beacon-chain/light-client/helpers.go index c2d0ab2d59..ef37bd082a 100644 --- a/beacon-chain/light-client/helpers.go +++ b/beacon-chain/light-client/helpers.go @@ -4,17 +4,17 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - light_client "github.com/OffchainLabs/prysm/v6/consensus-types/light-client" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + light_client "github.com/OffchainLabs/prysm/v7/consensus-types/light-client" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/light-client/lightclient.go b/beacon-chain/light-client/lightclient.go index 4b3dcf53ea..7e6063c5b4 100644 --- a/beacon-chain/light-client/lightclient.go +++ b/beacon-chain/light-client/lightclient.go @@ -6,18 +6,18 @@ import ( "fmt" "reflect" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - light_client "github.com/OffchainLabs/prysm/v6/consensus-types/light-client" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + light_client "github.com/OffchainLabs/prysm/v7/consensus-types/light-client" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/light-client/lightclient_test.go b/beacon-chain/light-client/lightclient_test.go index eef1ec05b8..3fc490a9b8 100644 --- a/beacon-chain/light-client/lightclient_test.go +++ b/beacon-chain/light-client/lightclient_test.go @@ -5,19 +5,19 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - light_client "github.com/OffchainLabs/prysm/v6/consensus-types/light-client" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/config/params" + light_client "github.com/OffchainLabs/prysm/v7/consensus-types/light-client" + "github.com/OffchainLabs/prysm/v7/runtime/version" - lightClient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - consensustypes "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - v11 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + lightClient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + consensustypes "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + v11 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/pkg/errors" ) diff --git a/beacon-chain/light-client/store.go b/beacon-chain/light-client/store.go index fecce9913d..74da9f343f 100644 --- a/beacon-chain/light-client/store.go +++ b/beacon-chain/light-client/store.go @@ -4,15 +4,15 @@ import ( "context" "sync" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/iface" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/light-client/store_test.go b/beacon-chain/light-client/store_test.go index 9808cd353d..06bd48f9d3 100644 --- a/beacon-chain/light-client/store_test.go +++ b/beacon-chain/light-client/store_test.go @@ -5,19 +5,19 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - p2pTesting "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + p2pTesting "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestLightClientStore(t *testing.T) { diff --git a/beacon-chain/monitor/BUILD.bazel b/beacon-chain/monitor/BUILD.bazel index 7b89f229c2..de5b433c31 100644 --- a/beacon-chain/monitor/BUILD.bazel +++ b/beacon-chain/monitor/BUILD.bazel @@ -11,7 +11,7 @@ go_library( "process_sync_committee.go", "service.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/monitor", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/monitor", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//async/event:go_default_library", diff --git a/beacon-chain/monitor/process_attestation.go b/beacon-chain/monitor/process_attestation.go index d2747840c6..9fa53e221c 100644 --- a/beacon-chain/monitor/process_attestation.go +++ b/beacon-chain/monitor/process_attestation.go @@ -4,17 +4,17 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/monitor/process_attestation_test.go b/beacon-chain/monitor/process_attestation_test.go index 03bdad84ed..fcd4665e1c 100644 --- a/beacon-chain/monitor/process_attestation_test.go +++ b/beacon-chain/monitor/process_attestation_test.go @@ -5,11 +5,11 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/sirupsen/logrus" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/monitor/process_block.go b/beacon-chain/monitor/process_block.go index c57ea5418b..926fdd20ae 100644 --- a/beacon-chain/monitor/process_block.go +++ b/beacon-chain/monitor/process_block.go @@ -4,13 +4,13 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/monitor/process_block_test.go b/beacon-chain/monitor/process_block_test.go index e1d30a8013..7000d5eac2 100644 --- a/beacon-chain/monitor/process_block_test.go +++ b/beacon-chain/monitor/process_block_test.go @@ -4,14 +4,14 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/monitor/process_exit.go b/beacon-chain/monitor/process_exit.go index 5f87b7afb6..12e21ae5c4 100644 --- a/beacon-chain/monitor/process_exit.go +++ b/beacon-chain/monitor/process_exit.go @@ -1,8 +1,8 @@ package monitor import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/monitor/process_exit_test.go b/beacon-chain/monitor/process_exit_test.go index 82fa023f57..0375463c13 100644 --- a/beacon-chain/monitor/process_exit_test.go +++ b/beacon-chain/monitor/process_exit_test.go @@ -3,10 +3,10 @@ package monitor import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/monitor/process_sync_committee.go b/beacon-chain/monitor/process_sync_committee.go index b4a7a64b30..9f4c8c7558 100644 --- a/beacon-chain/monitor/process_sync_committee.go +++ b/beacon-chain/monitor/process_sync_committee.go @@ -3,10 +3,10 @@ package monitor import ( "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/monitor/process_sync_committee_test.go b/beacon-chain/monitor/process_sync_committee_test.go index 35848ed0ad..a1365f9aa5 100644 --- a/beacon-chain/monitor/process_sync_committee_test.go +++ b/beacon-chain/monitor/process_sync_committee_test.go @@ -4,10 +4,10 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/monitor/service.go b/beacon-chain/monitor/service.go index 68e0675488..7674bb2d76 100644 --- a/beacon-chain/monitor/service.go +++ b/beacon-chain/monitor/service.go @@ -6,16 +6,16 @@ import ( "sort" "sync" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/monitor/service_test.go b/beacon-chain/monitor/service_test.go index abc75f4bfc..e4c4d84243 100644 --- a/beacon-chain/monitor/service_test.go +++ b/beacon-chain/monitor/service_test.go @@ -7,19 +7,19 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/node/BUILD.bazel b/beacon-chain/node/BUILD.bazel index 560969063b..71b7556b69 100644 --- a/beacon-chain/node/BUILD.bazel +++ b/beacon-chain/node/BUILD.bazel @@ -10,7 +10,7 @@ go_library( "options.go", "prometheus.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/node", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/node", visibility = [ "//beacon-chain:__subpackages__", "//cmd/beacon-chain:__subpackages__", diff --git a/beacon-chain/node/clear_db.go b/beacon-chain/node/clear_db.go index 9476e113c2..4c167a6417 100644 --- a/beacon-chain/node/clear_db.go +++ b/beacon-chain/node/clear_db.go @@ -4,11 +4,11 @@ import ( "context" "os" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/slasherkv" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/genesis" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/slasherkv" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/genesis" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) diff --git a/beacon-chain/node/config.go b/beacon-chain/node/config.go index bc6b5e3d4b..e506687513 100644 --- a/beacon-chain/node/config.go +++ b/beacon-chain/node/config.go @@ -3,11 +3,11 @@ package node import ( "fmt" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" "github.com/ethereum/go-ethereum/common" "github.com/urfave/cli/v2" ) diff --git a/beacon-chain/node/config_test.go b/beacon-chain/node/config_test.go index 3982f822db..f03e106a62 100644 --- a/beacon-chain/node/config_test.go +++ b/beacon-chain/node/config_test.go @@ -8,12 +8,12 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" logTest "github.com/sirupsen/logrus/hooks/test" "github.com/urfave/cli/v2" diff --git a/beacon-chain/node/node.go b/beacon-chain/node/node.go index 045c61e9dd..18f492a6f6 100644 --- a/beacon-chain/node/node.go +++ b/beacon-chain/node/node.go @@ -19,53 +19,53 @@ import ( "syscall" "time" - "github.com/OffchainLabs/prysm/v6/api/server/httprest" - "github.com/OffchainLabs/prysm/v6/api/server/middleware" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/pruner" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/slasherkv" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - lightclient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/monitor" - "github.com/OffchainLabs/prysm/v6/beacon-chain/node/registration" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc" - "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - regularsync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/backfill" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/backfill/coverage" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/checkpoint" - initialsync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/genesis" - "github.com/OffchainLabs/prysm/v6/monitoring/prometheus" - "github.com/OffchainLabs/prysm/v6/runtime" - "github.com/OffchainLabs/prysm/v6/runtime/prereqs" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/api/server/httprest" + "github.com/OffchainLabs/prysm/v7/api/server/middleware" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/pruner" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/slasherkv" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + lightclient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/monitor" + "github.com/OffchainLabs/prysm/v7/beacon-chain/node/registration" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc" + "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + regularsync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/backfill" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/backfill/coverage" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/checkpoint" + initialsync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/genesis" + "github.com/OffchainLabs/prysm/v7/monitoring/prometheus" + "github.com/OffchainLabs/prysm/v7/runtime" + "github.com/OffchainLabs/prysm/v7/runtime/prereqs" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/node/node_test.go b/beacon-chain/node/node_test.go index ea08ccb637..3c2049354b 100644 --- a/beacon-chain/node/node_test.go +++ b/beacon-chain/node/node_test.go @@ -10,19 +10,19 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/api/server/middleware" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/monitor" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/runtime" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api/server/middleware" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/builder" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/monitor" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/runtime" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" "github.com/urfave/cli/v2" ) diff --git a/beacon-chain/node/options.go b/beacon-chain/node/options.go index 522ebd2787..bbfa9fbce4 100644 --- a/beacon-chain/node/options.go +++ b/beacon-chain/node/options.go @@ -1,11 +1,11 @@ package node import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/config/params" ) // Option for beacon node configuration. diff --git a/beacon-chain/node/registration/BUILD.bazel b/beacon-chain/node/registration/BUILD.bazel index 788327e049..b935aa78c6 100644 --- a/beacon-chain/node/registration/BUILD.bazel +++ b/beacon-chain/node/registration/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "log.go", "p2p.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/node/registration", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/node/registration", visibility = ["//beacon-chain/node:__subpackages__"], deps = [ "//cmd:go_default_library", diff --git a/beacon-chain/node/registration/p2p.go b/beacon-chain/node/registration/p2p.go index 38aa877da2..3fdabfc7a5 100644 --- a/beacon-chain/node/registration/p2p.go +++ b/beacon-chain/node/registration/p2p.go @@ -4,8 +4,8 @@ import ( "os" "path/filepath" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/pkg/errors" log "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" diff --git a/beacon-chain/node/registration/p2p_test.go b/beacon-chain/node/registration/p2p_test.go index 302ac8074e..b890ab3ffa 100644 --- a/beacon-chain/node/registration/p2p_test.go +++ b/beacon-chain/node/registration/p2p_test.go @@ -5,10 +5,10 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/urfave/cli/v2" ) diff --git a/beacon-chain/operations/attestations/BUILD.bazel b/beacon-chain/operations/attestations/BUILD.bazel index a98c823cca..547f398ede 100644 --- a/beacon-chain/operations/attestations/BUILD.bazel +++ b/beacon-chain/operations/attestations/BUILD.bazel @@ -10,7 +10,7 @@ go_library( "prune_expired.go", "service.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations", visibility = [ "//beacon-chain:__subpackages__", "//testing/spectest:__subpackages__", diff --git a/beacon-chain/operations/attestations/attmap/BUILD.bazel b/beacon-chain/operations/attestations/attmap/BUILD.bazel index 0bff726698..de97fffb94 100644 --- a/beacon-chain/operations/attestations/attmap/BUILD.bazel +++ b/beacon-chain/operations/attestations/attmap/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["map.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations/attmap", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations/attmap", visibility = ["//visibility:public"], deps = [ "//proto/prysm/v1alpha1:go_default_library", diff --git a/beacon-chain/operations/attestations/attmap/map.go b/beacon-chain/operations/attestations/attmap/map.go index cc531e07a6..4e7b47dbd1 100644 --- a/beacon-chain/operations/attestations/attmap/map.go +++ b/beacon-chain/operations/attestations/attmap/map.go @@ -3,8 +3,8 @@ package attmap import ( "sync" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" "github.com/pkg/errors" ) diff --git a/beacon-chain/operations/attestations/kv/BUILD.bazel b/beacon-chain/operations/attestations/kv/BUILD.bazel index 070ae2afc4..f2251084da 100644 --- a/beacon-chain/operations/attestations/kv/BUILD.bazel +++ b/beacon-chain/operations/attestations/kv/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "seen_bits.go", "unaggregated.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations/kv", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations/kv", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/beacon-chain/operations/attestations/kv/aggregated.go b/beacon-chain/operations/attestations/kv/aggregated.go index b25861c40a..e15e439b11 100644 --- a/beacon-chain/operations/attestations/kv/aggregated.go +++ b/beacon-chain/operations/attestations/kv/aggregated.go @@ -5,13 +5,13 @@ import ( "runtime" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - attaggregation "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + attaggregation "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/attestations" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" log "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/operations/attestations/kv/aggregated_test.go b/beacon-chain/operations/attestations/kv/aggregated_test.go index c285fab015..5d4a9a9d96 100644 --- a/beacon-chain/operations/attestations/kv/aggregated_test.go +++ b/beacon-chain/operations/attestations/kv/aggregated_test.go @@ -5,13 +5,13 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" c "github.com/patrickmn/go-cache" "github.com/pkg/errors" ) diff --git a/beacon-chain/operations/attestations/kv/block.go b/beacon-chain/operations/attestations/kv/block.go index de44f8b41f..8708eb3f83 100644 --- a/beacon-chain/operations/attestations/kv/block.go +++ b/beacon-chain/operations/attestations/kv/block.go @@ -1,8 +1,8 @@ package kv import ( - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" "github.com/pkg/errors" ) diff --git a/beacon-chain/operations/attestations/kv/block_test.go b/beacon-chain/operations/attestations/kv/block_test.go index 492c5ffeee..26cb4baf7a 100644 --- a/beacon-chain/operations/attestations/kv/block_test.go +++ b/beacon-chain/operations/attestations/kv/block_test.go @@ -5,10 +5,10 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestKV_BlockAttestation_CanSaveRetrieve(t *testing.T) { diff --git a/beacon-chain/operations/attestations/kv/forkchoice_test.go b/beacon-chain/operations/attestations/kv/forkchoice_test.go index 9f7fe460d3..2bc6ada2ca 100644 --- a/beacon-chain/operations/attestations/kv/forkchoice_test.go +++ b/beacon-chain/operations/attestations/kv/forkchoice_test.go @@ -5,10 +5,10 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestKV_Forkchoice_CanSaveRetrieve(t *testing.T) { diff --git a/beacon-chain/operations/attestations/kv/kv.go b/beacon-chain/operations/attestations/kv/kv.go index 8c59530ed1..1c4103c915 100644 --- a/beacon-chain/operations/attestations/kv/kv.go +++ b/beacon-chain/operations/attestations/kv/kv.go @@ -7,10 +7,10 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations/attmap" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations/attmap" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" "github.com/patrickmn/go-cache" ) diff --git a/beacon-chain/operations/attestations/kv/seen_bits.go b/beacon-chain/operations/attestations/kv/seen_bits.go index 5dfe00cd69..68c0bf7a43 100644 --- a/beacon-chain/operations/attestations/kv/seen_bits.go +++ b/beacon-chain/operations/attestations/kv/seen_bits.go @@ -2,8 +2,8 @@ package kv import ( "github.com/OffchainLabs/go-bitfield" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" "github.com/patrickmn/go-cache" "github.com/pkg/errors" ) diff --git a/beacon-chain/operations/attestations/kv/seen_bits_test.go b/beacon-chain/operations/attestations/kv/seen_bits_test.go index e8a7037307..4415235ea5 100644 --- a/beacon-chain/operations/attestations/kv/seen_bits_test.go +++ b/beacon-chain/operations/attestations/kv/seen_bits_test.go @@ -4,10 +4,10 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestAttCaches_hasSeenBit(t *testing.T) { diff --git a/beacon-chain/operations/attestations/kv/unaggregated.go b/beacon-chain/operations/attestations/kv/unaggregated.go index 65e69dac02..dc258b84b7 100644 --- a/beacon-chain/operations/attestations/kv/unaggregated.go +++ b/beacon-chain/operations/attestations/kv/unaggregated.go @@ -3,11 +3,11 @@ package kv import ( "context" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" log "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/operations/attestations/kv/unaggregated_test.go b/beacon-chain/operations/attestations/kv/unaggregated_test.go index 3d53c392db..014a4648dd 100644 --- a/beacon-chain/operations/attestations/kv/unaggregated_test.go +++ b/beacon-chain/operations/attestations/kv/unaggregated_test.go @@ -6,13 +6,13 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" c "github.com/patrickmn/go-cache" ) diff --git a/beacon-chain/operations/attestations/mock/BUILD.bazel b/beacon-chain/operations/attestations/mock/BUILD.bazel index e94247b3e1..d6a32021d0 100644 --- a/beacon-chain/operations/attestations/mock/BUILD.bazel +++ b/beacon-chain/operations/attestations/mock/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations/mock", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations/mock", visibility = ["//visibility:public"], deps = [ "//beacon-chain/operations/attestations:go_default_library", diff --git a/beacon-chain/operations/attestations/mock/mock.go b/beacon-chain/operations/attestations/mock/mock.go index 95f2fb9c47..3abf7599dd 100644 --- a/beacon-chain/operations/attestations/mock/mock.go +++ b/beacon-chain/operations/attestations/mock/mock.go @@ -4,9 +4,9 @@ package mock import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) var _ attestations.Pool = &PoolMock{} diff --git a/beacon-chain/operations/attestations/pool.go b/beacon-chain/operations/attestations/pool.go index 918ac369b1..6acaab13c1 100644 --- a/beacon-chain/operations/attestations/pool.go +++ b/beacon-chain/operations/attestations/pool.go @@ -3,9 +3,9 @@ package attestations import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations/kv" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations/kv" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // Pool defines the necessary methods for Prysm attestations pool to serve diff --git a/beacon-chain/operations/attestations/pool_test.go b/beacon-chain/operations/attestations/pool_test.go index 6210051591..948dbf6c94 100644 --- a/beacon-chain/operations/attestations/pool_test.go +++ b/beacon-chain/operations/attestations/pool_test.go @@ -1,7 +1,7 @@ package attestations import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations/kv" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations/kv" ) var _ Pool = (*kv.AttCaches)(nil) diff --git a/beacon-chain/operations/attestations/prepare_forkchoice.go b/beacon-chain/operations/attestations/prepare_forkchoice.go index 99a9b3f90e..84fbbeed52 100644 --- a/beacon-chain/operations/attestations/prepare_forkchoice.go +++ b/beacon-chain/operations/attestations/prepare_forkchoice.go @@ -6,13 +6,13 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - attaggregation "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + attaggregation "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/attestations" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/operations/attestations/prepare_forkchoice_test.go b/beacon-chain/operations/attestations/prepare_forkchoice_test.go index 598630c428..0d3c6d4a00 100644 --- a/beacon-chain/operations/attestations/prepare_forkchoice_test.go +++ b/beacon-chain/operations/attestations/prepare_forkchoice_test.go @@ -6,12 +6,12 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - attaggregation "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + attaggregation "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/attestations" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/operations/attestations/prune_expired.go b/beacon-chain/operations/attestations/prune_expired.go index 6bd66ebaef..3217bf6c85 100644 --- a/beacon-chain/operations/attestations/prune_expired.go +++ b/beacon-chain/operations/attestations/prune_expired.go @@ -3,9 +3,9 @@ package attestations import ( "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // pruneExpired prunes attestations pool on every slot interval. diff --git a/beacon-chain/operations/attestations/prune_expired_test.go b/beacon-chain/operations/attestations/prune_expired_test.go index d5ef9b0c29..8d52b77eaf 100644 --- a/beacon-chain/operations/attestations/prune_expired_test.go +++ b/beacon-chain/operations/attestations/prune_expired_test.go @@ -6,14 +6,14 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/async" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/async" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestPruneExpired_Ticker(t *testing.T) { diff --git a/beacon-chain/operations/attestations/service.go b/beacon-chain/operations/attestations/service.go index 8c77a97011..7e2b8e4926 100644 --- a/beacon-chain/operations/attestations/service.go +++ b/beacon-chain/operations/attestations/service.go @@ -8,10 +8,10 @@ import ( "errors" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" lru "github.com/hashicorp/golang-lru" ) diff --git a/beacon-chain/operations/attestations/service_test.go b/beacon-chain/operations/attestations/service_test.go index 70a45f08a2..a111508afc 100644 --- a/beacon-chain/operations/attestations/service_test.go +++ b/beacon-chain/operations/attestations/service_test.go @@ -5,8 +5,8 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStop_OK(t *testing.T) { diff --git a/beacon-chain/operations/blstoexec/BUILD.bazel b/beacon-chain/operations/blstoexec/BUILD.bazel index c575e06080..4fcc38d760 100644 --- a/beacon-chain/operations/blstoexec/BUILD.bazel +++ b/beacon-chain/operations/blstoexec/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "doc.go", "pool.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec", visibility = [ "//beacon-chain:__subpackages__", ], diff --git a/beacon-chain/operations/blstoexec/mock/BUILD.bazel b/beacon-chain/operations/blstoexec/mock/BUILD.bazel index 57873bfe96..7cdae9390e 100644 --- a/beacon-chain/operations/blstoexec/mock/BUILD.bazel +++ b/beacon-chain/operations/blstoexec/mock/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec/mock", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec/mock", visibility = ["//visibility:public"], deps = [ "//beacon-chain/state:go_default_library", diff --git a/beacon-chain/operations/blstoexec/mock/mock.go b/beacon-chain/operations/blstoexec/mock/mock.go index 590b7601d9..c16d7e60cf 100644 --- a/beacon-chain/operations/blstoexec/mock/mock.go +++ b/beacon-chain/operations/blstoexec/mock/mock.go @@ -1,9 +1,9 @@ package mock import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // PoolMock is a fake implementation of PoolManager. diff --git a/beacon-chain/operations/blstoexec/pool.go b/beacon-chain/operations/blstoexec/pool.go index bb4be4e2c3..447b0f9a14 100644 --- a/beacon-chain/operations/blstoexec/pool.go +++ b/beacon-chain/operations/blstoexec/pool.go @@ -3,12 +3,12 @@ package blstoexec import ( "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - doublylinkedlist "github.com/OffchainLabs/prysm/v6/container/doubly-linked-list" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + doublylinkedlist "github.com/OffchainLabs/prysm/v7/container/doubly-linked-list" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" "github.com/sirupsen/logrus" diff --git a/beacon-chain/operations/blstoexec/pool_test.go b/beacon-chain/operations/blstoexec/pool_test.go index 6712dd1383..76ad873dce 100644 --- a/beacon-chain/operations/blstoexec/pool_test.go +++ b/beacon-chain/operations/blstoexec/pool_test.go @@ -3,18 +3,18 @@ package blstoexec import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestPendingBLSToExecChanges(t *testing.T) { diff --git a/beacon-chain/operations/slashings/BUILD.bazel b/beacon-chain/operations/slashings/BUILD.bazel index df9752384c..2ab1936ebb 100644 --- a/beacon-chain/operations/slashings/BUILD.bazel +++ b/beacon-chain/operations/slashings/BUILD.bazel @@ -10,7 +10,7 @@ go_library( "service.go", "types.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings", visibility = [ "//beacon-chain:__subpackages__", "//testing/endtoend:__subpackages__", diff --git a/beacon-chain/operations/slashings/mock/BUILD.bazel b/beacon-chain/operations/slashings/mock/BUILD.bazel index 5169317127..8fa6c0f9a3 100644 --- a/beacon-chain/operations/slashings/mock/BUILD.bazel +++ b/beacon-chain/operations/slashings/mock/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings/mock", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings/mock", visibility = ["//visibility:public"], deps = [ "//beacon-chain/state:go_default_library", diff --git a/beacon-chain/operations/slashings/mock/mock.go b/beacon-chain/operations/slashings/mock/mock.go index b5558e0e0a..d5ba1361a4 100644 --- a/beacon-chain/operations/slashings/mock/mock.go +++ b/beacon-chain/operations/slashings/mock/mock.go @@ -3,8 +3,8 @@ package mock import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // PoolMock is a fake implementation of PoolManager. diff --git a/beacon-chain/operations/slashings/pool.go b/beacon-chain/operations/slashings/pool.go index d84a091204..303ebdf6fa 100644 --- a/beacon-chain/operations/slashings/pool.go +++ b/beacon-chain/operations/slashings/pool.go @@ -5,16 +5,16 @@ import ( "fmt" "sort" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - coretime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + coretime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "github.com/trailofbits/go-mutexasserts" ) diff --git a/beacon-chain/operations/slashings/service.go b/beacon-chain/operations/slashings/service.go index bb7974cd19..c946ebe9ad 100644 --- a/beacon-chain/operations/slashings/service.go +++ b/beacon-chain/operations/slashings/service.go @@ -4,10 +4,10 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // WithElectraTimer includes functional options for the blockchain service related to CLI flags. diff --git a/beacon-chain/operations/slashings/service_attester_test.go b/beacon-chain/operations/slashings/service_attester_test.go index 1227fc0d22..6f74036cb8 100644 --- a/beacon-chain/operations/slashings/service_attester_test.go +++ b/beacon-chain/operations/slashings/service_attester_test.go @@ -3,15 +3,15 @@ package slashings import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func validAttesterSlashingForValIdx(t *testing.T, beaconState state.BeaconState, privs []bls.SecretKey, valIdx ...uint64) ethpb.AttSlashing { diff --git a/beacon-chain/operations/slashings/service_new_test.go b/beacon-chain/operations/slashings/service_new_test.go index 529875010b..c4ed648077 100644 --- a/beacon-chain/operations/slashings/service_new_test.go +++ b/beacon-chain/operations/slashings/service_new_test.go @@ -4,13 +4,13 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestConvertToElectraWithTimer(t *testing.T) { diff --git a/beacon-chain/operations/slashings/service_proposer_test.go b/beacon-chain/operations/slashings/service_proposer_test.go index 008a128278..d8e043d740 100644 --- a/beacon-chain/operations/slashings/service_proposer_test.go +++ b/beacon-chain/operations/slashings/service_proposer_test.go @@ -3,13 +3,13 @@ package slashings import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func proposerSlashingForValIdx(valIdx primitives.ValidatorIndex) *ethpb.ProposerSlashing { diff --git a/beacon-chain/operations/slashings/service_test.go b/beacon-chain/operations/slashings/service_test.go index 08d6f78836..abb55a25cd 100644 --- a/beacon-chain/operations/slashings/service_test.go +++ b/beacon-chain/operations/slashings/service_test.go @@ -3,8 +3,8 @@ package slashings import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings/mock" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings/mock" + "github.com/OffchainLabs/prysm/v7/testing/require" ) var ( diff --git a/beacon-chain/operations/slashings/types.go b/beacon-chain/operations/slashings/types.go index 80c55b869b..c04906d1c6 100644 --- a/beacon-chain/operations/slashings/types.go +++ b/beacon-chain/operations/slashings/types.go @@ -4,10 +4,10 @@ import ( "context" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // PoolInserter is capable of inserting new slashing objects into the operations pool. diff --git a/beacon-chain/operations/synccommittee/BUILD.bazel b/beacon-chain/operations/synccommittee/BUILD.bazel index 6506671012..08cf31c0c9 100644 --- a/beacon-chain/operations/synccommittee/BUILD.bazel +++ b/beacon-chain/operations/synccommittee/BUILD.bazel @@ -10,7 +10,7 @@ go_library( "metric.go", "pool.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//consensus-types/primitives:go_default_library", diff --git a/beacon-chain/operations/synccommittee/contribution.go b/beacon-chain/operations/synccommittee/contribution.go index 56f0e17612..24605efd41 100644 --- a/beacon-chain/operations/synccommittee/contribution.go +++ b/beacon-chain/operations/synccommittee/contribution.go @@ -3,9 +3,9 @@ package synccommittee import ( "strconv" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/queue" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/queue" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/operations/synccommittee/contribution_test.go b/beacon-chain/operations/synccommittee/contribution_test.go index e22a82c598..179c6ceb24 100644 --- a/beacon-chain/operations/synccommittee/contribution_test.go +++ b/beacon-chain/operations/synccommittee/contribution_test.go @@ -3,8 +3,8 @@ package synccommittee import ( "testing" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSyncCommitteeContributionCache_Nil(t *testing.T) { diff --git a/beacon-chain/operations/synccommittee/kv.go b/beacon-chain/operations/synccommittee/kv.go index cd5ba2e226..43837f35b7 100644 --- a/beacon-chain/operations/synccommittee/kv.go +++ b/beacon-chain/operations/synccommittee/kv.go @@ -3,7 +3,7 @@ package synccommittee import ( "sync" - "github.com/OffchainLabs/prysm/v6/container/queue" + "github.com/OffchainLabs/prysm/v7/container/queue" ) // Store defines the caches for various sync committee objects diff --git a/beacon-chain/operations/synccommittee/message.go b/beacon-chain/operations/synccommittee/message.go index feeac9c65d..5db8f91796 100644 --- a/beacon-chain/operations/synccommittee/message.go +++ b/beacon-chain/operations/synccommittee/message.go @@ -1,9 +1,9 @@ package synccommittee import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/queue" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/queue" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/operations/synccommittee/message_test.go b/beacon-chain/operations/synccommittee/message_test.go index 1d6c1a254c..2267a1f73f 100644 --- a/beacon-chain/operations/synccommittee/message_test.go +++ b/beacon-chain/operations/synccommittee/message_test.go @@ -3,8 +3,8 @@ package synccommittee import ( "testing" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSyncCommitteeSignatureCache_Nil(t *testing.T) { diff --git a/beacon-chain/operations/synccommittee/pool.go b/beacon-chain/operations/synccommittee/pool.go index db9dc25e13..345e5b9551 100644 --- a/beacon-chain/operations/synccommittee/pool.go +++ b/beacon-chain/operations/synccommittee/pool.go @@ -1,8 +1,8 @@ package synccommittee import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) var _ = Pool(&Store{}) diff --git a/beacon-chain/operations/voluntaryexits/BUILD.bazel b/beacon-chain/operations/voluntaryexits/BUILD.bazel index 3abbcb6e29..5a76900e8e 100644 --- a/beacon-chain/operations/voluntaryexits/BUILD.bazel +++ b/beacon-chain/operations/voluntaryexits/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "doc.go", "pool.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits", visibility = [ "//beacon-chain:__subpackages__", ], diff --git a/beacon-chain/operations/voluntaryexits/mock/BUILD.bazel b/beacon-chain/operations/voluntaryexits/mock/BUILD.bazel index 2924a5cacd..01b81a6919 100644 --- a/beacon-chain/operations/voluntaryexits/mock/BUILD.bazel +++ b/beacon-chain/operations/voluntaryexits/mock/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits/mock", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits/mock", visibility = ["//visibility:public"], deps = [ "//beacon-chain/state:go_default_library", diff --git a/beacon-chain/operations/voluntaryexits/mock/mock.go b/beacon-chain/operations/voluntaryexits/mock/mock.go index 84d388c68d..6ad2ef7001 100644 --- a/beacon-chain/operations/voluntaryexits/mock/mock.go +++ b/beacon-chain/operations/voluntaryexits/mock/mock.go @@ -1,9 +1,9 @@ package mock import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // PoolMock is a fake implementation of PoolManager. diff --git a/beacon-chain/operations/voluntaryexits/pool.go b/beacon-chain/operations/voluntaryexits/pool.go index d95e14a061..18ca1fa3d4 100644 --- a/beacon-chain/operations/voluntaryexits/pool.go +++ b/beacon-chain/operations/voluntaryexits/pool.go @@ -3,13 +3,13 @@ package voluntaryexits import ( "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - doublylinkedlist "github.com/OffchainLabs/prysm/v6/container/doubly-linked-list" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + doublylinkedlist "github.com/OffchainLabs/prysm/v7/container/doubly-linked-list" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/operations/voluntaryexits/pool_test.go b/beacon-chain/operations/voluntaryexits/pool_test.go index 252721da76..29f89fc034 100644 --- a/beacon-chain/operations/voluntaryexits/pool_test.go +++ b/beacon-chain/operations/voluntaryexits/pool_test.go @@ -3,17 +3,17 @@ package voluntaryexits import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestPendingExits(t *testing.T) { diff --git a/beacon-chain/p2p/BUILD.bazel b/beacon-chain/p2p/BUILD.bazel index 406f756b39..8d27988d82 100644 --- a/beacon-chain/p2p/BUILD.bazel +++ b/beacon-chain/p2p/BUILD.bazel @@ -33,7 +33,7 @@ go_library( "utils.go", "watch_peers.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p", visibility = [ "//beacon-chain:__subpackages__", "//cmd:__subpackages__", diff --git a/beacon-chain/p2p/addr_factory_test.go b/beacon-chain/p2p/addr_factory_test.go index b77bdd00e0..f197a98e37 100644 --- a/beacon-chain/p2p/addr_factory_test.go +++ b/beacon-chain/p2p/addr_factory_test.go @@ -3,8 +3,8 @@ package p2p import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ma "github.com/multiformats/go-multiaddr" ) diff --git a/beacon-chain/p2p/broadcaster.go b/beacon-chain/p2p/broadcaster.go index 03b38e29ac..6066a4e052 100644 --- a/beacon-chain/p2p/broadcaster.go +++ b/beacon-chain/p2p/broadcaster.go @@ -9,19 +9,19 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" "github.com/sirupsen/logrus" diff --git a/beacon-chain/p2p/broadcaster_test.go b/beacon-chain/p2p/broadcaster_test.go index 382035842e..5ea0c13bcd 100644 --- a/beacon-chain/p2p/broadcaster_test.go +++ b/beacon-chain/p2p/broadcaster_test.go @@ -10,27 +10,27 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - testpb "github.com/OffchainLabs/prysm/v6/proto/testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + testpb "github.com/OffchainLabs/prysm/v7/proto/testing" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/host" "google.golang.org/protobuf/proto" diff --git a/beacon-chain/p2p/config.go b/beacon-chain/p2p/config.go index 15267532ea..70d939cb49 100644 --- a/beacon-chain/p2p/config.go +++ b/beacon-chain/p2p/config.go @@ -4,9 +4,9 @@ import ( "net" "time" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/p2p/connection_gater_test.go b/beacon-chain/p2p/connection_gater_test.go index 41d802c8ae..b40a55e7a4 100644 --- a/beacon-chain/p2p/connection_gater_test.go +++ b/beacon-chain/p2p/connection_gater_test.go @@ -5,14 +5,14 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - ethpb "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + ethpb "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/libp2p/go-libp2p" "github.com/libp2p/go-libp2p/core/peer" ma "github.com/multiformats/go-multiaddr" diff --git a/beacon-chain/p2p/custody.go b/beacon-chain/p2p/custody.go index 01ff28b3da..fe4c9ffeb7 100644 --- a/beacon-chain/p2p/custody.go +++ b/beacon-chain/p2p/custody.go @@ -3,10 +3,10 @@ package p2p import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/p2p/custody_test.go b/beacon-chain/p2p/custody_test.go index 5c6b3b29e7..915ab382a1 100644 --- a/beacon-chain/p2p/custody_test.go +++ b/beacon-chain/p2p/custody_test.go @@ -6,16 +6,16 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - testp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + testp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/network" ) diff --git a/beacon-chain/p2p/dial_relay_node.go b/beacon-chain/p2p/dial_relay_node.go index ae29b843c7..27cf404771 100644 --- a/beacon-chain/p2p/dial_relay_node.go +++ b/beacon-chain/p2p/dial_relay_node.go @@ -3,7 +3,7 @@ package p2p import ( "context" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/libp2p/go-libp2p/core/host" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/p2p/dial_relay_node_test.go b/beacon-chain/p2p/dial_relay_node_test.go index 40db7d5c11..ca699564c5 100644 --- a/beacon-chain/p2p/dial_relay_node_test.go +++ b/beacon-chain/p2p/dial_relay_node_test.go @@ -4,8 +4,8 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/libp2p/go-libp2p" "github.com/libp2p/go-libp2p/core/network" ) diff --git a/beacon-chain/p2p/discovery.go b/beacon-chain/p2p/discovery.go index a5f324ad5c..d6ac5aec82 100644 --- a/beacon-chain/p2p/discovery.go +++ b/beacon-chain/p2p/discovery.go @@ -10,14 +10,14 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - ecdsaprysm "github.com/OffchainLabs/prysm/v6/crypto/ecdsa" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + ecdsaprysm "github.com/OffchainLabs/prysm/v7/crypto/ecdsa" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/p2p/discover" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" diff --git a/beacon-chain/p2p/discovery_test.go b/beacon-chain/p2p/discovery_test.go index 1477e08cfb..fc73c6d075 100644 --- a/beacon-chain/p2p/discovery_test.go +++ b/beacon-chain/p2p/discovery_test.go @@ -17,23 +17,23 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - testp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - ecdsaprysm "github.com/OffchainLabs/prysm/v6/crypto/ecdsa" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - prysmNetwork "github.com/OffchainLabs/prysm/v6/network" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + testp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + ecdsaprysm "github.com/OffchainLabs/prysm/v7/crypto/ecdsa" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + prysmNetwork "github.com/OffchainLabs/prysm/v7/network" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/p2p/discover" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" diff --git a/beacon-chain/p2p/encoder/BUILD.bazel b/beacon-chain/p2p/encoder/BUILD.bazel index 2841ab5726..48a51b1a6f 100644 --- a/beacon-chain/p2p/encoder/BUILD.bazel +++ b/beacon-chain/p2p/encoder/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "ssz.go", "varint.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder", visibility = [ "//beacon-chain:__subpackages__", "//cmd:__subpackages__", diff --git a/beacon-chain/p2p/encoder/snappy_test.go b/beacon-chain/p2p/encoder/snappy_test.go index 0785c141c1..c9cbef5697 100644 --- a/beacon-chain/p2p/encoder/snappy_test.go +++ b/beacon-chain/p2p/encoder/snappy_test.go @@ -5,7 +5,7 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/assert" "github.com/golang/snappy" ) diff --git a/beacon-chain/p2p/encoder/ssz.go b/beacon-chain/p2p/encoder/ssz.go index 7ec56dde93..a50a97257a 100644 --- a/beacon-chain/p2p/encoder/ssz.go +++ b/beacon-chain/p2p/encoder/ssz.go @@ -5,8 +5,8 @@ import ( "io" "sync" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/math" "github.com/gogo/protobuf/proto" "github.com/golang/snappy" "github.com/pkg/errors" diff --git a/beacon-chain/p2p/encoder/ssz_test.go b/beacon-chain/p2p/encoder/ssz_test.go index b671c2330a..10b6bbe5f3 100644 --- a/beacon-chain/p2p/encoder/ssz_test.go +++ b/beacon-chain/p2p/encoder/ssz_test.go @@ -8,12 +8,12 @@ import ( "math" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" gogo "github.com/gogo/protobuf/proto" "github.com/google/go-cmp/cmp" fastssz "github.com/prysmaticlabs/fastssz" diff --git a/beacon-chain/p2p/encoder/varint_test.go b/beacon-chain/p2p/encoder/varint_test.go index 593e00b26d..65fa6bfbba 100644 --- a/beacon-chain/p2p/encoder/varint_test.go +++ b/beacon-chain/p2p/encoder/varint_test.go @@ -4,8 +4,8 @@ import ( "bytes" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/gogo/protobuf/proto" ) diff --git a/beacon-chain/p2p/fork.go b/beacon-chain/p2p/fork.go index b192303683..bf65bb6376 100644 --- a/beacon-chain/p2p/fork.go +++ b/beacon-chain/p2p/fork.go @@ -4,9 +4,9 @@ import ( "bytes" "fmt" - "github.com/OffchainLabs/prysm/v6/config/params" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/pkg/errors" diff --git a/beacon-chain/p2p/fork_test.go b/beacon-chain/p2p/fork_test.go index 8791c3eb55..6225585b9a 100644 --- a/beacon-chain/p2p/fork_test.go +++ b/beacon-chain/p2p/fork_test.go @@ -8,11 +8,11 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/config/params" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/config/params" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" diff --git a/beacon-chain/p2p/fork_watcher.go b/beacon-chain/p2p/fork_watcher.go index d1be21400b..cc0b62c89d 100644 --- a/beacon-chain/p2p/fork_watcher.go +++ b/beacon-chain/p2p/fork_watcher.go @@ -1,8 +1,8 @@ package p2p import ( - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // A background routine which listens for new and upcoming forks and diff --git a/beacon-chain/p2p/gossip_scoring_params.go b/beacon-chain/p2p/gossip_scoring_params.go index 69a853566a..d48d578eef 100644 --- a/beacon-chain/p2p/gossip_scoring_params.go +++ b/beacon-chain/p2p/gossip_scoring_params.go @@ -9,9 +9,9 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/config/params" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" diff --git a/beacon-chain/p2p/gossip_scoring_params_test.go b/beacon-chain/p2p/gossip_scoring_params_test.go index 206f7733d0..31a9f4f645 100644 --- a/beacon-chain/p2p/gossip_scoring_params_test.go +++ b/beacon-chain/p2p/gossip_scoring_params_test.go @@ -3,12 +3,12 @@ package p2p import ( "testing" - dbutil "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + dbutil "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" pubsub "github.com/libp2p/go-libp2p-pubsub" ) diff --git a/beacon-chain/p2p/gossip_topic_mappings.go b/beacon-chain/p2p/gossip_topic_mappings.go index eac93eef57..fbb9fcd605 100644 --- a/beacon-chain/p2p/gossip_topic_mappings.go +++ b/beacon-chain/p2p/gossip_topic_mappings.go @@ -3,9 +3,9 @@ package p2p import ( "reflect" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/p2p/gossip_topic_mappings_test.go b/beacon-chain/p2p/gossip_topic_mappings_test.go index 2b186e8c50..441a56f811 100644 --- a/beacon-chain/p2p/gossip_topic_mappings_test.go +++ b/beacon-chain/p2p/gossip_topic_mappings_test.go @@ -4,11 +4,11 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestMappingHasNoDuplicates(t *testing.T) { diff --git a/beacon-chain/p2p/handshake.go b/beacon-chain/p2p/handshake.go index 0b7f66c14a..1fda231478 100644 --- a/beacon-chain/p2p/handshake.go +++ b/beacon-chain/p2p/handshake.go @@ -7,9 +7,9 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + prysmTime "github.com/OffchainLabs/prysm/v7/time" "github.com/libp2p/go-libp2p/core/host" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/p2p/interfaces.go b/beacon-chain/p2p/interfaces.go index 2d2d0993d3..bb77b5b4f5 100644 --- a/beacon-chain/p2p/interfaces.go +++ b/beacon-chain/p2p/interfaces.go @@ -3,14 +3,14 @@ package p2p import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" pubsub "github.com/libp2p/go-libp2p-pubsub" diff --git a/beacon-chain/p2p/message_id.go b/beacon-chain/p2p/message_id.go index b9e95b9544..2b488685f6 100644 --- a/beacon-chain/p2p/message_id.go +++ b/beacon-chain/p2p/message_id.go @@ -1,12 +1,12 @@ package p2p import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/math" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" ) diff --git a/beacon-chain/p2p/message_id_test.go b/beacon-chain/p2p/message_id_test.go index 5e0cce8a04..d87139f8ba 100644 --- a/beacon-chain/p2p/message_id_test.go +++ b/beacon-chain/p2p/message_id_test.go @@ -5,13 +5,13 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" "github.com/golang/snappy" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" ) diff --git a/beacon-chain/p2p/monitoring.go b/beacon-chain/p2p/monitoring.go index d6bc829a80..b454d2587d 100644 --- a/beacon-chain/p2p/monitoring.go +++ b/beacon-chain/p2p/monitoring.go @@ -3,7 +3,7 @@ package p2p import ( "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" "github.com/libp2p/go-libp2p/core/peer" "github.com/libp2p/go-libp2p/core/peerstore" "github.com/prometheus/client_golang/prometheus" diff --git a/beacon-chain/p2p/options.go b/beacon-chain/p2p/options.go index 3f4d5dba86..a95f1d03c4 100644 --- a/beacon-chain/p2p/options.go +++ b/beacon-chain/p2p/options.go @@ -6,9 +6,9 @@ import ( "net" "time" - "github.com/OffchainLabs/prysm/v6/config/features" - ecdsaprysm "github.com/OffchainLabs/prysm/v6/crypto/ecdsa" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/config/features" + ecdsaprysm "github.com/OffchainLabs/prysm/v7/crypto/ecdsa" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/libp2p/go-libp2p" mplex "github.com/libp2p/go-libp2p-mplex" "github.com/libp2p/go-libp2p/core/network" diff --git a/beacon-chain/p2p/options_test.go b/beacon-chain/p2p/options_test.go index b89632a8f6..9ddea0d5b4 100644 --- a/beacon-chain/p2p/options_test.go +++ b/beacon-chain/p2p/options_test.go @@ -8,12 +8,12 @@ import ( "path" "testing" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - ecdsaprysm "github.com/OffchainLabs/prysm/v6/crypto/ecdsa" - "github.com/OffchainLabs/prysm/v6/network" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + ecdsaprysm "github.com/OffchainLabs/prysm/v7/crypto/ecdsa" + "github.com/OffchainLabs/prysm/v7/network" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" gethCrypto "github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" diff --git a/beacon-chain/p2p/parameter_test.go b/beacon-chain/p2p/parameter_test.go index fa437f4497..824b7d2502 100644 --- a/beacon-chain/p2p/parameter_test.go +++ b/beacon-chain/p2p/parameter_test.go @@ -3,8 +3,8 @@ package p2p import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" pubsub "github.com/libp2p/go-libp2p-pubsub" ) diff --git a/beacon-chain/p2p/peers/BUILD.bazel b/beacon-chain/p2p/peers/BUILD.bazel index b8d2232610..461dd44716 100644 --- a/beacon-chain/p2p/peers/BUILD.bazel +++ b/beacon-chain/p2p/peers/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "log.go", "status.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers", visibility = [ "//beacon-chain:__subpackages__", "//cmd:__subpackages__", diff --git a/beacon-chain/p2p/peers/assigner.go b/beacon-chain/p2p/peers/assigner.go index 4af20f542a..45a27b4888 100644 --- a/beacon-chain/p2p/peers/assigner.go +++ b/beacon-chain/p2p/peers/assigner.go @@ -1,9 +1,9 @@ package peers import ( - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/p2p/peers/assigner_test.go b/beacon-chain/p2p/peers/assigner_test.go index 97a037f29b..8fd064dc1e 100644 --- a/beacon-chain/p2p/peers/assigner_test.go +++ b/beacon-chain/p2p/peers/assigner_test.go @@ -4,7 +4,7 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/p2p/peers/peerdata/BUILD.bazel b/beacon-chain/p2p/peers/peerdata/BUILD.bazel index 5b20ba06cd..e55183a42e 100644 --- a/beacon-chain/p2p/peers/peerdata/BUILD.bazel +++ b/beacon-chain/p2p/peers/peerdata/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["store.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//proto/prysm/v1alpha1:go_default_library", diff --git a/beacon-chain/p2p/peers/peerdata/store.go b/beacon-chain/p2p/peers/peerdata/store.go index fed0cc31e8..07e10e8935 100644 --- a/beacon-chain/p2p/peers/peerdata/store.go +++ b/beacon-chain/p2p/peers/peerdata/store.go @@ -6,8 +6,8 @@ import ( "sync" "time" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/p2p/peers/peerdata/store_test.go b/beacon-chain/p2p/peers/peerdata/store_test.go index 0e8bd32014..623e30b714 100644 --- a/beacon-chain/p2p/peers/peerdata/store_test.go +++ b/beacon-chain/p2p/peers/peerdata/store_test.go @@ -3,9 +3,9 @@ package peerdata_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/p2p/peers/peers_test.go b/beacon-chain/p2p/peers/peers_test.go index 63daef3096..82df2f13dc 100644 --- a/beacon-chain/p2p/peers/peers_test.go +++ b/beacon-chain/p2p/peers/peers_test.go @@ -5,8 +5,8 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/features" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/features" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/p2p/peers/scorers/BUILD.bazel b/beacon-chain/p2p/peers/scorers/BUILD.bazel index 4d19789783..0788229469 100644 --- a/beacon-chain/p2p/peers/scorers/BUILD.bazel +++ b/beacon-chain/p2p/peers/scorers/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "peer_status.go", "service.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//beacon-chain/p2p/peers/peerdata:go_default_library", diff --git a/beacon-chain/p2p/peers/scorers/bad_responses.go b/beacon-chain/p2p/peers/scorers/bad_responses.go index 1525cc20e9..ce0f729f6b 100644 --- a/beacon-chain/p2p/peers/scorers/bad_responses.go +++ b/beacon-chain/p2p/peers/scorers/bad_responses.go @@ -3,7 +3,7 @@ package scorers import ( "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" ) diff --git a/beacon-chain/p2p/peers/scorers/bad_responses_test.go b/beacon-chain/p2p/peers/scorers/bad_responses_test.go index 1cb70d7b81..45fb515ec5 100644 --- a/beacon-chain/p2p/peers/scorers/bad_responses_test.go +++ b/beacon-chain/p2p/peers/scorers/bad_responses_test.go @@ -4,11 +4,11 @@ import ( "sort" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/p2p/peers/scorers/block_providers.go b/beacon-chain/p2p/peers/scorers/block_providers.go index 094755f644..9fea1b12bb 100644 --- a/beacon-chain/p2p/peers/scorers/block_providers.go +++ b/beacon-chain/p2p/peers/scorers/block_providers.go @@ -6,10 +6,10 @@ import ( "sort" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/crypto/rand" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/crypto/rand" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/p2p/peers/scorers/block_providers_test.go b/beacon-chain/p2p/peers/scorers/block_providers_test.go index 26b4b735f3..f4a9b51cd5 100644 --- a/beacon-chain/p2p/peers/scorers/block_providers_test.go +++ b/beacon-chain/p2p/peers/scorers/block_providers_test.go @@ -6,13 +6,13 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/time" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/p2p/peers/scorers/gossip_scorer.go b/beacon-chain/p2p/peers/scorers/gossip_scorer.go index f52de23c55..dd9b407c47 100644 --- a/beacon-chain/p2p/peers/scorers/gossip_scorer.go +++ b/beacon-chain/p2p/peers/scorers/gossip_scorer.go @@ -1,8 +1,8 @@ package scorers import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - pbrpc "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + pbrpc "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" ) diff --git a/beacon-chain/p2p/peers/scorers/gossip_scorer_test.go b/beacon-chain/p2p/peers/scorers/gossip_scorer_test.go index 02c5cc8d90..3093127b72 100644 --- a/beacon-chain/p2p/peers/scorers/gossip_scorer_test.go +++ b/beacon-chain/p2p/peers/scorers/gossip_scorer_test.go @@ -3,10 +3,10 @@ package scorers_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - pbrpc "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + pbrpc "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestScorers_Gossip_Score(t *testing.T) { diff --git a/beacon-chain/p2p/peers/scorers/peer_status.go b/beacon-chain/p2p/peers/scorers/peer_status.go index bd0959d2f1..e21a4637cd 100644 --- a/beacon-chain/p2p/peers/scorers/peer_status.go +++ b/beacon-chain/p2p/peers/scorers/peer_status.go @@ -5,10 +5,10 @@ import ( "math" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/p2p/peers/scorers/peer_status_test.go b/beacon-chain/p2p/peers/scorers/peer_status_test.go index fc71bd94a0..2f88eacd26 100644 --- a/beacon-chain/p2p/peers/scorers/peer_status_test.go +++ b/beacon-chain/p2p/peers/scorers/peer_status_test.go @@ -3,14 +3,14 @@ package scorers_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/p2p/peers/scorers/scorers_test.go b/beacon-chain/p2p/peers/scorers/scorers_test.go index ea67765fc7..12881dc9fc 100644 --- a/beacon-chain/p2p/peers/scorers/scorers_test.go +++ b/beacon-chain/p2p/peers/scorers/scorers_test.go @@ -6,9 +6,9 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/features" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/features" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/p2p/peers/scorers/service.go b/beacon-chain/p2p/peers/scorers/service.go index 789d23fcfa..dbd5681e33 100644 --- a/beacon-chain/p2p/peers/scorers/service.go +++ b/beacon-chain/p2p/peers/scorers/service.go @@ -5,8 +5,8 @@ import ( "math" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - "github.com/OffchainLabs/prysm/v6/config/features" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + "github.com/OffchainLabs/prysm/v7/config/features" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" ) diff --git a/beacon-chain/p2p/peers/scorers/service_test.go b/beacon-chain/p2p/peers/scorers/service_test.go index 873137a47b..f728132628 100644 --- a/beacon-chain/p2p/peers/scorers/service_test.go +++ b/beacon-chain/p2p/peers/scorers/service_test.go @@ -5,10 +5,10 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/testing/assert" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/p2p/peers/status.go b/beacon-chain/p2p/peers/status.go index f533cf31e3..7dafeb574c 100644 --- a/beacon-chain/p2p/peers/status.go +++ b/beacon-chain/p2p/peers/status.go @@ -31,17 +31,17 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - pmath "github.com/OffchainLabs/prysm/v6/math" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + pmath "github.com/OffchainLabs/prysm/v7/math" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/p2p/peers/status_test.go b/beacon-chain/p2p/peers/status_test.go index d5d45cbeef..97bd4b7a90 100644 --- a/beacon-chain/p2p/peers/status_test.go +++ b/beacon-chain/p2p/peers/status_test.go @@ -7,17 +7,17 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - ethpb "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + ethpb "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/p2p/pubsub.go b/beacon-chain/p2p/pubsub.go index 6877fe606f..514f08d8c7 100644 --- a/beacon-chain/p2p/pubsub.go +++ b/beacon-chain/p2p/pubsub.go @@ -7,11 +7,11 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pbrpc "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pbrpc "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/p2p/pubsub_filter.go b/beacon-chain/p2p/pubsub_filter.go index 3aeee657b5..ae0cf816a0 100644 --- a/beacon-chain/p2p/pubsub_filter.go +++ b/beacon-chain/p2p/pubsub_filter.go @@ -5,8 +5,8 @@ import ( "fmt" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/config/params" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/p2p/pubsub_filter_test.go b/beacon-chain/p2p/pubsub_filter_test.go index 075a3f2191..20bb5638ad 100644 --- a/beacon-chain/p2p/pubsub_filter_test.go +++ b/beacon-chain/p2p/pubsub_filter_test.go @@ -7,13 +7,13 @@ import ( "testing" "time" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" + prysmTime "github.com/OffchainLabs/prysm/v7/time" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/p2p/pubsub_fuzz_test.go b/beacon-chain/p2p/pubsub_fuzz_test.go index d673464621..fc53e1303e 100644 --- a/beacon-chain/p2p/pubsub_fuzz_test.go +++ b/beacon-chain/p2p/pubsub_fuzz_test.go @@ -6,8 +6,8 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" ) func FuzzMsgID(f *testing.F) { diff --git a/beacon-chain/p2p/pubsub_test.go b/beacon-chain/p2p/pubsub_test.go index adb4aaa4fb..0ddb7f91c4 100644 --- a/beacon-chain/p2p/pubsub_test.go +++ b/beacon-chain/p2p/pubsub_test.go @@ -7,13 +7,13 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - testp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + testp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" ) diff --git a/beacon-chain/p2p/rpc_topic_mappings.go b/beacon-chain/p2p/rpc_topic_mappings.go index 68a965553a..cb206f3765 100644 --- a/beacon-chain/p2p/rpc_topic_mappings.go +++ b/beacon-chain/p2p/rpc_topic_mappings.go @@ -3,11 +3,11 @@ package p2p import ( "reflect" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/p2p/rpc_topic_mappings_test.go b/beacon-chain/p2p/rpc_topic_mappings_test.go index 6134759948..9b14e93839 100644 --- a/beacon-chain/p2p/rpc_topic_mappings_test.go +++ b/beacon-chain/p2p/rpc_topic_mappings_test.go @@ -5,13 +5,13 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestVerifyRPCMappings(t *testing.T) { diff --git a/beacon-chain/p2p/sender.go b/beacon-chain/p2p/sender.go index fa6e8b4324..abcc5377c0 100644 --- a/beacon-chain/p2p/sender.go +++ b/beacon-chain/p2p/sender.go @@ -3,8 +3,8 @@ package p2p import ( "context" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/kr/pretty" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/p2p/sender_test.go b/beacon-chain/p2p/sender_test.go index dfd86f4c98..3d61e29ec6 100644 --- a/beacon-chain/p2p/sender_test.go +++ b/beacon-chain/p2p/sender_test.go @@ -5,12 +5,12 @@ import ( "testing" "time" - testp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + testp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/libp2p/go-libp2p/core/network" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/p2p/service.go b/beacon-chain/p2p/service.go index 40cff9e35f..17bd0d9c25 100644 --- a/beacon-chain/p2p/service.go +++ b/beacon-chain/p2p/service.go @@ -9,21 +9,21 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - prysmnetwork "github.com/OffchainLabs/prysm/v6/network" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" - "github.com/OffchainLabs/prysm/v6/runtime" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + prysmnetwork "github.com/OffchainLabs/prysm/v7/network" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/runtime" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p" diff --git a/beacon-chain/p2p/service_test.go b/beacon-chain/p2p/service_test.go index 4d16057e5d..2596681ca7 100644 --- a/beacon-chain/p2p/service_test.go +++ b/beacon-chain/p2p/service_test.go @@ -8,18 +8,18 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - testp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + testp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + prysmTime "github.com/OffchainLabs/prysm/v7/time" "github.com/libp2p/go-libp2p" "github.com/libp2p/go-libp2p/core/host" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/p2p/subnets.go b/beacon-chain/p2p/subnets.go index 0c7d3a1f54..8629da03c1 100644 --- a/beacon-chain/p2p/subnets.go +++ b/beacon-chain/p2p/subnets.go @@ -9,18 +9,18 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/holiman/uint256" diff --git a/beacon-chain/p2p/subnets_test.go b/beacon-chain/p2p/subnets_test.go index ddd9849af0..8a22e608a4 100644 --- a/beacon-chain/p2p/subnets_test.go +++ b/beacon-chain/p2p/subnets_test.go @@ -8,18 +8,18 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - testp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - ecdsaprysm "github.com/OffchainLabs/prysm/v6/crypto/ecdsa" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + testp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + ecdsaprysm "github.com/OffchainLabs/prysm/v7/crypto/ecdsa" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/crypto" diff --git a/beacon-chain/p2p/testing/BUILD.bazel b/beacon-chain/p2p/testing/BUILD.bazel index 2728164369..2aee33be0c 100644 --- a/beacon-chain/p2p/testing/BUILD.bazel +++ b/beacon-chain/p2p/testing/BUILD.bazel @@ -13,7 +13,7 @@ go_library( "mock_peersprovider.go", "p2p.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing", visibility = [ "//beacon-chain:__subpackages__", "//testing:__subpackages__", diff --git a/beacon-chain/p2p/testing/fuzz_p2p.go b/beacon-chain/p2p/testing/fuzz_p2p.go index b9dbb71e41..130fc9f8d7 100644 --- a/beacon-chain/p2p/testing/fuzz_p2p.go +++ b/beacon-chain/p2p/testing/fuzz_p2p.go @@ -3,14 +3,14 @@ package testing import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" pubsub "github.com/libp2p/go-libp2p-pubsub" diff --git a/beacon-chain/p2p/testing/mock_broadcaster.go b/beacon-chain/p2p/testing/mock_broadcaster.go index 512b9901ea..bfa3f7b3c2 100644 --- a/beacon-chain/p2p/testing/mock_broadcaster.go +++ b/beacon-chain/p2p/testing/mock_broadcaster.go @@ -5,9 +5,9 @@ import ( "sync" "sync/atomic" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/p2p/testing/mock_metadataprovider.go b/beacon-chain/p2p/testing/mock_metadataprovider.go index 6197a0da69..191d504460 100644 --- a/beacon-chain/p2p/testing/mock_metadataprovider.go +++ b/beacon-chain/p2p/testing/mock_metadataprovider.go @@ -1,7 +1,7 @@ package testing import ( - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" ) // MockMetadataProvider is a fake implementation of the MetadataProvider interface. diff --git a/beacon-chain/p2p/testing/mock_peermanager.go b/beacon-chain/p2p/testing/mock_peermanager.go index fed8926c67..0e82e8308a 100644 --- a/beacon-chain/p2p/testing/mock_peermanager.go +++ b/beacon-chain/p2p/testing/mock_peermanager.go @@ -4,7 +4,7 @@ import ( "context" "errors" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/host" diff --git a/beacon-chain/p2p/testing/mock_peersprovider.go b/beacon-chain/p2p/testing/mock_peersprovider.go index 2597780f7e..37051e157a 100644 --- a/beacon-chain/p2p/testing/mock_peersprovider.go +++ b/beacon-chain/p2p/testing/mock_peersprovider.go @@ -4,9 +4,9 @@ import ( "context" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" diff --git a/beacon-chain/p2p/testing/p2p.go b/beacon-chain/p2p/testing/p2p.go index 641172ea07..208eb248ff 100644 --- a/beacon-chain/p2p/testing/p2p.go +++ b/beacon-chain/p2p/testing/p2p.go @@ -11,18 +11,18 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p" diff --git a/beacon-chain/p2p/topics.go b/beacon-chain/p2p/topics.go index 1124809ab3..35db556180 100644 --- a/beacon-chain/p2p/topics.go +++ b/beacon-chain/p2p/topics.go @@ -5,9 +5,9 @@ import ( "slices" "strconv" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) const ( diff --git a/beacon-chain/p2p/topics_test.go b/beacon-chain/p2p/topics_test.go index 75669a9006..c10594c9dc 100644 --- a/beacon-chain/p2p/topics_test.go +++ b/beacon-chain/p2p/topics_test.go @@ -4,9 +4,9 @@ import ( "encoding/hex" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestAllTopics(t *testing.T) { diff --git a/beacon-chain/p2p/types/BUILD.bazel b/beacon-chain/p2p/types/BUILD.bazel index 61bdc78c93..de2644eee0 100644 --- a/beacon-chain/p2p/types/BUILD.bazel +++ b/beacon-chain/p2p/types/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "rpc_goodbye_codes.go", "types.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types", visibility = [ "//beacon-chain:__subpackages__", "//cmd:__subpackages__", diff --git a/beacon-chain/p2p/types/object_mapping.go b/beacon-chain/p2p/types/object_mapping.go index e96a4a983e..cd519f7325 100644 --- a/beacon-chain/p2p/types/object_mapping.go +++ b/beacon-chain/p2p/types/object_mapping.go @@ -1,15 +1,15 @@ package types import ( - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - lightclientConsensusTypes "github.com/OffchainLabs/prysm/v6/consensus-types/light-client" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + lightclientConsensusTypes "github.com/OffchainLabs/prysm/v7/consensus-types/light-client" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" ) func init() { diff --git a/beacon-chain/p2p/types/object_mapping_test.go b/beacon-chain/p2p/types/object_mapping_test.go index 999972e137..0bb28bc73b 100644 --- a/beacon-chain/p2p/types/object_mapping_test.go +++ b/beacon-chain/p2p/types/object_mapping_test.go @@ -3,11 +3,11 @@ package types import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestInitializeDataMaps(t *testing.T) { diff --git a/beacon-chain/p2p/types/rpc_goodbye_codes.go b/beacon-chain/p2p/types/rpc_goodbye_codes.go index 011e7c4011..a2b4a14ae7 100644 --- a/beacon-chain/p2p/types/rpc_goodbye_codes.go +++ b/beacon-chain/p2p/types/rpc_goodbye_codes.go @@ -3,7 +3,7 @@ package types import ( "errors" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // RPCGoodbyeCode represents goodbye code, used in sync package. diff --git a/beacon-chain/p2p/types/types.go b/beacon-chain/p2p/types/types.go index f6e8fa6a32..c82d6487ee 100644 --- a/beacon-chain/p2p/types/types.go +++ b/beacon-chain/p2p/types/types.go @@ -8,9 +8,9 @@ import ( "encoding/binary" "sort" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" ) diff --git a/beacon-chain/p2p/types/types_test.go b/beacon-chain/p2p/types/types_test.go index ebe542cc7a..945feb1185 100644 --- a/beacon-chain/p2p/types/types_test.go +++ b/beacon-chain/p2p/types/types_test.go @@ -4,13 +4,13 @@ import ( "encoding/hex" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ssz "github.com/prysmaticlabs/fastssz" ) diff --git a/beacon-chain/p2p/utils.go b/beacon-chain/p2p/utils.go index f65b4d5e17..22a8d1e196 100644 --- a/beacon-chain/p2p/utils.go +++ b/beacon-chain/p2p/utils.go @@ -14,14 +14,14 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - ecdsaprysm "github.com/OffchainLabs/prysm/v6/crypto/ecdsa" - "github.com/OffchainLabs/prysm/v6/io/file" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + ecdsaprysm "github.com/OffchainLabs/prysm/v7/crypto/ecdsa" + "github.com/OffchainLabs/prysm/v7/io/file" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" "github.com/btcsuite/btcd/btcec/v2" gCrypto "github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/p2p/enode" diff --git a/beacon-chain/p2p/utils_test.go b/beacon-chain/p2p/utils_test.go index 59e5e6fd0c..c5be788cc8 100644 --- a/beacon-chain/p2p/utils_test.go +++ b/beacon-chain/p2p/utils_test.go @@ -4,10 +4,10 @@ import ( "context" "testing" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/p2p/watch_peers.go b/beacon-chain/p2p/watch_peers.go index bb25d06f62..167d61cd8b 100644 --- a/beacon-chain/p2p/watch_peers.go +++ b/beacon-chain/p2p/watch_peers.go @@ -3,7 +3,7 @@ package p2p import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" "github.com/libp2p/go-libp2p/core/host" "github.com/libp2p/go-libp2p/core/peer" ma "github.com/multiformats/go-multiaddr" diff --git a/beacon-chain/rpc/BUILD.bazel b/beacon-chain/rpc/BUILD.bazel index 6e3cb833a1..404e378181 100644 --- a/beacon-chain/rpc/BUILD.bazel +++ b/beacon-chain/rpc/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "metrics.go", "service.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//api:go_default_library", diff --git a/beacon-chain/rpc/core/BUILD.bazel b/beacon-chain/rpc/core/BUILD.bazel index 47e85fd9ce..252cd453fe 100644 --- a/beacon-chain/rpc/core/BUILD.bazel +++ b/beacon-chain/rpc/core/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "service.go", "validator.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core", visibility = ["//visibility:public"], deps = [ "//api/server:go_default_library", diff --git a/beacon-chain/rpc/core/beacon.go b/beacon-chain/rpc/core/beacon.go index f50643c9f5..be69cb8be9 100644 --- a/beacon-chain/rpc/core/beacon.go +++ b/beacon-chain/rpc/core/beacon.go @@ -3,11 +3,11 @@ package core import ( "context" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/core/service.go b/beacon-chain/rpc/core/service.go index c7f43ffc24..fe22234332 100644 --- a/beacon-chain/rpc/core/service.go +++ b/beacon-chain/rpc/core/service.go @@ -1,14 +1,14 @@ package core import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" ) type Service struct { diff --git a/beacon-chain/rpc/core/validator.go b/beacon-chain/rpc/core/validator.go index 761e2478f1..18e9f29b7d 100644 --- a/beacon-chain/rpc/core/validator.go +++ b/beacon-chain/rpc/core/validator.go @@ -7,29 +7,29 @@ import ( "sort" "time" - "github.com/OffchainLabs/prysm/v6/api/server" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - beaconState "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + beaconState "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" "golang.org/x/sync/errgroup" diff --git a/beacon-chain/rpc/core/validator_test.go b/beacon-chain/rpc/core/validator_test.go index 58a14e70f7..849beef301 100644 --- a/beacon-chain/rpc/core/validator_test.go +++ b/beacon-chain/rpc/core/validator_test.go @@ -5,16 +5,16 @@ import ( "testing" "time" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - p2pmock "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + p2pmock "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/endpoints.go b/beacon-chain/rpc/endpoints.go index 065ff0bd9c..1046781296 100644 --- a/beacon-chain/rpc/endpoints.go +++ b/beacon-chain/rpc/endpoints.go @@ -3,25 +3,25 @@ package rpc import ( "net/http" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/middleware" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/beacon" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/blob" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/config" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/debug" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/events" - lightclient "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/node" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/rewards" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/validator" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - beaconprysm "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/beacon" - nodeprysm "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/node" - validatorv1alpha1 "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/v1alpha1/validator" - validatorprysm "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/validator" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/config/features" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/middleware" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/beacon" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/blob" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/config" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/debug" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/events" + lightclient "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/node" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/rewards" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/validator" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + beaconprysm "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/beacon" + nodeprysm "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/node" + validatorv1alpha1 "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/v1alpha1/validator" + validatorprysm "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/validator" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/config/features" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" ) diff --git a/beacon-chain/rpc/endpoints_test.go b/beacon-chain/rpc/endpoints_test.go index a55909a3c3..7967e3c592 100644 --- a/beacon-chain/rpc/endpoints_test.go +++ b/beacon-chain/rpc/endpoints_test.go @@ -6,8 +6,8 @@ import ( "slices" "testing" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func Test_endpoints(t *testing.T) { diff --git a/beacon-chain/rpc/eth/beacon/BUILD.bazel b/beacon-chain/rpc/eth/beacon/BUILD.bazel index 5f5eb0e52b..ccd0f52407 100644 --- a/beacon-chain/rpc/eth/beacon/BUILD.bazel +++ b/beacon-chain/rpc/eth/beacon/BUILD.bazel @@ -11,7 +11,7 @@ go_library( "metrics.go", "server.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/beacon", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/beacon", visibility = ["//visibility:public"], deps = [ "//api:go_default_library", diff --git a/beacon-chain/rpc/eth/beacon/handlers.go b/beacon-chain/rpc/eth/beacon/handlers.go index ffc9a25198..58eff169c2 100644 --- a/beacon-chain/rpc/eth/beacon/handlers.go +++ b/beacon-chain/rpc/eth/beacon/handlers.go @@ -11,26 +11,26 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - corehelpers "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/v1alpha1/validator" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + corehelpers "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/v1alpha1/validator" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" diff --git a/beacon-chain/rpc/eth/beacon/handlers_equivocation_test.go b/beacon-chain/rpc/eth/beacon/handlers_equivocation_test.go index 5f0521463f..199d560dd7 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_equivocation_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_equivocation_test.go @@ -4,10 +4,10 @@ import ( "encoding/json" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - rpctesting "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared/testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + rpctesting "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/testing/require" ) // TestBlocks_NewSignedBeaconBlock_EquivocationFix tests that blocks.NewSignedBeaconBlock diff --git a/beacon-chain/rpc/eth/beacon/handlers_pool.go b/beacon-chain/rpc/eth/beacon/handlers_pool.go index e40ad0cf47..e6adb7b1e1 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_pool.go +++ b/beacon-chain/rpc/eth/beacon/handlers_pool.go @@ -8,26 +8,26 @@ import ( "net/http" "time" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - corehelpers "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - mvslice "github.com/OffchainLabs/prysm/v6/container/multi-value-slice" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + corehelpers "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + mvslice "github.com/OffchainLabs/prysm/v7/container/multi-value-slice" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/rpc/eth/beacon/handlers_pool_test.go b/beacon-chain/rpc/eth/beacon/handlers_pool_test.go index 9685957e42..8fb48692ea 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_pool_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_pool_test.go @@ -11,36 +11,36 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - blockchainmock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - prysmtime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - blstoexecmock "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec/mock" - slashingsmock "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings/mock" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits/mock" - p2pMock "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpbv1alpha1 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + blockchainmock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + prysmtime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + blstoexecmock "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec/mock" + slashingsmock "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings/mock" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits/mock" + p2pMock "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpbv1alpha1 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/eth/beacon/handlers_state.go b/beacon-chain/rpc/eth/beacon/handlers_state.go index a57efb621c..579cd93dfa 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_state.go +++ b/beacon-chain/rpc/eth/beacon/handlers_state.go @@ -7,19 +7,19 @@ import ( "net/http" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpbalpha "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpbalpha "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/eth/beacon/handlers_state_test.go b/beacon-chain/rpc/eth/beacon/handlers_state_test.go index cf5f16af4d..f73d3f9f75 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_state_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_state_test.go @@ -11,19 +11,19 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpbalpha "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpbalpha "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/eth/beacon/handlers_test.go b/beacon-chain/rpc/eth/beacon/handlers_test.go index 1d585a687f..344b535f5a 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_test.go @@ -13,34 +13,34 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - rpctesting "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/network/httputil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - mock2 "github.com/OffchainLabs/prysm/v6/testing/mock" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + rpctesting "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/network/httputil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + mock2 "github.com/OffchainLabs/prysm/v7/testing/mock" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" diff --git a/beacon-chain/rpc/eth/beacon/handlers_validator.go b/beacon-chain/rpc/eth/beacon/handlers_validator.go index 25a40b4892..91b6256f23 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_validator.go +++ b/beacon-chain/rpc/eth/beacon/handlers_validator.go @@ -9,19 +9,19 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/eth/beacon/handlers_validators_test.go b/beacon-chain/rpc/eth/beacon/handlers_validators_test.go index 1896c4696b..8ff8389b49 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_validators_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_validators_test.go @@ -10,20 +10,20 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/network/httputil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/network/httputil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/eth/beacon/init_test.go b/beacon-chain/rpc/eth/beacon/init_test.go index b40158b177..e1d1bde044 100644 --- a/beacon-chain/rpc/eth/beacon/init_test.go +++ b/beacon-chain/rpc/eth/beacon/init_test.go @@ -1,7 +1,7 @@ package beacon import ( - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" ) func init() { diff --git a/beacon-chain/rpc/eth/beacon/server.go b/beacon-chain/rpc/eth/beacon/server.go index d5e89c003b..bf6fca748a 100644 --- a/beacon-chain/rpc/eth/beacon/server.go +++ b/beacon-chain/rpc/eth/beacon/server.go @@ -4,22 +4,22 @@ package beacon import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - blockfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + blockfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // Server defines a server implementation of the gRPC Beacon Chain service, diff --git a/beacon-chain/rpc/eth/blob/BUILD.bazel b/beacon-chain/rpc/eth/blob/BUILD.bazel index e98c2bd769..475333d21a 100644 --- a/beacon-chain/rpc/eth/blob/BUILD.bazel +++ b/beacon-chain/rpc/eth/blob/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "handlers.go", "server.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/blob", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/blob", visibility = ["//visibility:public"], deps = [ "//api:go_default_library", diff --git a/beacon-chain/rpc/eth/blob/handlers.go b/beacon-chain/rpc/eth/blob/handlers.go index b11fca3b8d..afd1570a74 100644 --- a/beacon-chain/rpc/eth/blob/handlers.go +++ b/beacon-chain/rpc/eth/blob/handlers.go @@ -7,18 +7,18 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/options" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/options" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/eth/blob/handlers_test.go b/beacon-chain/rpc/eth/blob/handlers_test.go index eca9a16b03..471aac7cee 100644 --- a/beacon-chain/rpc/eth/blob/handlers_test.go +++ b/beacon-chain/rpc/eth/blob/handlers_test.go @@ -12,24 +12,24 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/network/httputil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/network/httputil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/eth/blob/server.go b/beacon-chain/rpc/eth/blob/server.go index 2fdbfd40cf..4d41556726 100644 --- a/beacon-chain/rpc/eth/blob/server.go +++ b/beacon-chain/rpc/eth/blob/server.go @@ -1,8 +1,8 @@ package blob import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" ) type Server struct { diff --git a/beacon-chain/rpc/eth/config/BUILD.bazel b/beacon-chain/rpc/eth/config/BUILD.bazel index 7c9dabf108..5536a3f08a 100644 --- a/beacon-chain/rpc/eth/config/BUILD.bazel +++ b/beacon-chain/rpc/eth/config/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["handlers.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/config", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/config", visibility = ["//visibility:public"], deps = [ "//api/server/structs:go_default_library", diff --git a/beacon-chain/rpc/eth/config/handlers.go b/beacon-chain/rpc/eth/config/handlers.go index b4cf372963..44185dd94c 100644 --- a/beacon-chain/rpc/eth/config/handlers.go +++ b/beacon-chain/rpc/eth/config/handlers.go @@ -8,10 +8,10 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" "github.com/ethereum/go-ethereum/common/hexutil" log "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/rpc/eth/config/handlers_test.go b/beacon-chain/rpc/eth/config/handlers_test.go index 86713ba815..8f174d80d6 100644 --- a/beacon-chain/rpc/eth/config/handlers_test.go +++ b/beacon-chain/rpc/eth/config/handlers_test.go @@ -11,11 +11,11 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" log "github.com/sirupsen/logrus" diff --git a/beacon-chain/rpc/eth/debug/BUILD.bazel b/beacon-chain/rpc/eth/debug/BUILD.bazel index 7d85a5c6fc..f68d37afc9 100644 --- a/beacon-chain/rpc/eth/debug/BUILD.bazel +++ b/beacon-chain/rpc/eth/debug/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "handlers.go", "server.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/debug", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/debug", visibility = ["//visibility:public"], deps = [ "//api:go_default_library", diff --git a/beacon-chain/rpc/eth/debug/handlers.go b/beacon-chain/rpc/eth/debug/handlers.go index 4ba228f786..7e279bdfaf 100644 --- a/beacon-chain/rpc/eth/debug/handlers.go +++ b/beacon-chain/rpc/eth/debug/handlers.go @@ -10,18 +10,18 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/eth/debug/handlers_test.go b/beacon-chain/rpc/eth/debug/handlers_test.go index 082f774004..90635813b6 100644 --- a/beacon-chain/rpc/eth/debug/handlers_test.go +++ b/beacon-chain/rpc/eth/debug/handlers_test.go @@ -11,23 +11,23 @@ import ( "net/url" "testing" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - blockchainmock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + blockchainmock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/eth/debug/server.go b/beacon-chain/rpc/eth/debug/server.go index e0c822e19e..8bbe22ce51 100644 --- a/beacon-chain/rpc/eth/debug/server.go +++ b/beacon-chain/rpc/eth/debug/server.go @@ -4,9 +4,9 @@ package debug import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" ) // Server defines a server implementation of the gRPC Beacon Chain service, diff --git a/beacon-chain/rpc/eth/events/BUILD.bazel b/beacon-chain/rpc/eth/events/BUILD.bazel index 09254b3a67..485880a814 100644 --- a/beacon-chain/rpc/eth/events/BUILD.bazel +++ b/beacon-chain/rpc/eth/events/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "log.go", "server.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/events", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/events", visibility = ["//visibility:public"], deps = [ "//api:go_default_library", diff --git a/beacon-chain/rpc/eth/events/events.go b/beacon-chain/rpc/eth/events/events.go index 4971abdf63..92fd0f91cb 100644 --- a/beacon-chain/rpc/eth/events/events.go +++ b/beacon-chain/rpc/eth/events/events.go @@ -11,25 +11,25 @@ import ( "strconv" "time" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - payloadattribute "github.com/OffchainLabs/prysm/v6/consensus-types/payload-attribute" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - engine "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + payloadattribute "github.com/OffchainLabs/prysm/v7/consensus-types/payload-attribute" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + engine "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" diff --git a/beacon-chain/rpc/eth/events/events_test.go b/beacon-chain/rpc/eth/events/events_test.go index 27f0d1371d..329140a4e3 100644 --- a/beacon-chain/rpc/eth/events/events_test.go +++ b/beacon-chain/rpc/eth/events/events_test.go @@ -12,24 +12,24 @@ import ( "testing" "time" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - payloadattribute "github.com/OffchainLabs/prysm/v6/consensus-types/payload-attribute" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + payloadattribute "github.com/OffchainLabs/prysm/v7/consensus-types/payload-attribute" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common" "github.com/r3labs/sse/v2" "github.com/sirupsen/logrus" diff --git a/beacon-chain/rpc/eth/events/http_test.go b/beacon-chain/rpc/eth/events/http_test.go index d99c5092a4..a2c408bacc 100644 --- a/beacon-chain/rpc/eth/events/http_test.go +++ b/beacon-chain/rpc/eth/events/http_test.go @@ -8,7 +8,7 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) type StreamingResponseWriterRecorder struct { diff --git a/beacon-chain/rpc/eth/events/server.go b/beacon-chain/rpc/eth/events/server.go index a386ea5431..bcc05cb253 100644 --- a/beacon-chain/rpc/eth/events/server.go +++ b/beacon-chain/rpc/eth/events/server.go @@ -6,11 +6,11 @@ package events import ( "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" ) // Server defines a server implementation of the http events service, diff --git a/beacon-chain/rpc/eth/helpers/BUILD.bazel b/beacon-chain/rpc/eth/helpers/BUILD.bazel index 763472b6c4..92b4d0e977 100644 --- a/beacon-chain/rpc/eth/helpers/BUILD.bazel +++ b/beacon-chain/rpc/eth/helpers/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "sync.go", "validator_status.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/helpers", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/helpers", visibility = ["//visibility:public"], deps = [ "//beacon-chain/blockchain:go_default_library", diff --git a/beacon-chain/rpc/eth/helpers/error_handling.go b/beacon-chain/rpc/eth/helpers/error_handling.go index c06efe840b..92a48b2387 100644 --- a/beacon-chain/rpc/eth/helpers/error_handling.go +++ b/beacon-chain/rpc/eth/helpers/error_handling.go @@ -4,12 +4,12 @@ import ( "errors" "net/http" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/network/httputil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/network/httputil" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/beacon-chain/rpc/eth/helpers/sync.go b/beacon-chain/rpc/eth/helpers/sync.go index f06c6ee56c..640e9b6263 100644 --- a/beacon-chain/rpc/eth/helpers/sync.go +++ b/beacon-chain/rpc/eth/helpers/sync.go @@ -6,13 +6,13 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/eth/helpers/sync_test.go b/beacon-chain/rpc/eth/helpers/sync_test.go index c3ff519b4d..34455db637 100644 --- a/beacon-chain/rpc/eth/helpers/sync_test.go +++ b/beacon-chain/rpc/eth/helpers/sync_test.go @@ -6,23 +6,23 @@ import ( "strconv" "testing" - chainmock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + chainmock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/eth/helpers/validator_status.go b/beacon-chain/rpc/eth/helpers/validator_status.go index eeb16e1f30..af36869820 100644 --- a/beacon-chain/rpc/eth/helpers/validator_status.go +++ b/beacon-chain/rpc/eth/helpers/validator_status.go @@ -1,10 +1,10 @@ package helpers import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/eth/helpers/validator_status_test.go b/beacon-chain/rpc/eth/helpers/validator_status_test.go index 07691ecadc..f76a18b7d7 100644 --- a/beacon-chain/rpc/eth/helpers/validator_status_test.go +++ b/beacon-chain/rpc/eth/helpers/validator_status_test.go @@ -4,14 +4,14 @@ import ( "strconv" "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - ethpb "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - "github.com/OffchainLabs/prysm/v6/proto/migration" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + ethpb "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + "github.com/OffchainLabs/prysm/v7/proto/migration" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func Test_ValidatorStatus(t *testing.T) { diff --git a/beacon-chain/rpc/eth/light-client/BUILD.bazel b/beacon-chain/rpc/eth/light-client/BUILD.bazel index 366eab820b..1fa3b165e5 100644 --- a/beacon-chain/rpc/eth/light-client/BUILD.bazel +++ b/beacon-chain/rpc/eth/light-client/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "handlers.go", "server.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/light-client", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/light-client", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//api:go_default_library", diff --git a/beacon-chain/rpc/eth/light-client/handlers.go b/beacon-chain/rpc/eth/light-client/handlers.go index 3f7655b592..8a6655f6e3 100644 --- a/beacon-chain/rpc/eth/light-client/handlers.go +++ b/beacon-chain/rpc/eth/light-client/handlers.go @@ -4,16 +4,16 @@ import ( "fmt" "net/http" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - lightclient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + lightclient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" diff --git a/beacon-chain/rpc/eth/light-client/handlers_test.go b/beacon-chain/rpc/eth/light-client/handlers_test.go index 6c073bac62..3486c49220 100644 --- a/beacon-chain/rpc/eth/light-client/handlers_test.go +++ b/beacon-chain/rpc/eth/light-client/handlers_test.go @@ -11,25 +11,25 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/async/event" - blockchainTest "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - dbtesting "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - lightclient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - p2ptesting "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - light_client "github.com/OffchainLabs/prysm/v6/consensus-types/light-client" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/async/event" + blockchainTest "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + dbtesting "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + lightclient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + p2ptesting "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + light_client "github.com/OffchainLabs/prysm/v7/consensus-types/light-client" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ssz "github.com/prysmaticlabs/fastssz" "google.golang.org/protobuf/proto" diff --git a/beacon-chain/rpc/eth/light-client/server.go b/beacon-chain/rpc/eth/light-client/server.go index b41cb2204a..a67671b072 100644 --- a/beacon-chain/rpc/eth/light-client/server.go +++ b/beacon-chain/rpc/eth/light-client/server.go @@ -1,8 +1,8 @@ package lightclient import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - lightClient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + lightClient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" ) type Server struct { diff --git a/beacon-chain/rpc/eth/node/BUILD.bazel b/beacon-chain/rpc/eth/node/BUILD.bazel index 7ae9c5ab4b..bb409c78e8 100644 --- a/beacon-chain/rpc/eth/node/BUILD.bazel +++ b/beacon-chain/rpc/eth/node/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "handlers_peers.go", "server.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/node", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/node", visibility = ["//visibility:public"], deps = [ "//api/server/structs:go_default_library", diff --git a/beacon-chain/rpc/eth/node/handlers.go b/beacon-chain/rpc/eth/node/handlers.go index ba1d3ef95d..665dfa78ac 100644 --- a/beacon-chain/rpc/eth/node/handlers.go +++ b/beacon-chain/rpc/eth/node/handlers.go @@ -6,15 +6,15 @@ import ( "runtime" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/eth/node/handlers_peers.go b/beacon-chain/rpc/eth/node/handlers_peers.go index e36d2bd693..44bc8d5294 100644 --- a/beacon-chain/rpc/eth/node/handlers_peers.go +++ b/beacon-chain/rpc/eth/node/handlers_peers.go @@ -5,14 +5,14 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/proto/migration" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/proto/migration" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/eth/node/handlers_peers_test.go b/beacon-chain/rpc/eth/node/handlers_peers_test.go index e6e3e50ff0..a9b89c0ad1 100644 --- a/beacon-chain/rpc/eth/node/handlers_peers_test.go +++ b/beacon-chain/rpc/eth/node/handlers_peers_test.go @@ -9,13 +9,13 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/rpc/eth/node/handlers_test.go b/beacon-chain/rpc/eth/node/handlers_test.go index b9cf95b11d..4c5e70e548 100644 --- a/beacon-chain/rpc/eth/node/handlers_test.go +++ b/beacon-chain/rpc/eth/node/handlers_test.go @@ -10,21 +10,21 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - syncmock "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - "github.com/OffchainLabs/prysm/v6/network/httputil" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + syncmock "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + "github.com/OffchainLabs/prysm/v7/network/httputil" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/rpc/eth/node/server.go b/beacon-chain/rpc/eth/node/server.go index 71b7698e1b..588bcef841 100644 --- a/beacon-chain/rpc/eth/node/server.go +++ b/beacon-chain/rpc/eth/node/server.go @@ -4,11 +4,11 @@ package node import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" "google.golang.org/grpc" ) diff --git a/beacon-chain/rpc/eth/rewards/BUILD.bazel b/beacon-chain/rpc/eth/rewards/BUILD.bazel index 33905c7a56..f8368f53b4 100644 --- a/beacon-chain/rpc/eth/rewards/BUILD.bazel +++ b/beacon-chain/rpc/eth/rewards/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "server.go", "service.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/rewards", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/rewards", visibility = ["//visibility:public"], deps = [ "//api/server/structs:go_default_library", diff --git a/beacon-chain/rpc/eth/rewards/handlers.go b/beacon-chain/rpc/eth/rewards/handlers.go index 1d392196a0..a30e843553 100644 --- a/beacon-chain/rpc/eth/rewards/handlers.go +++ b/beacon-chain/rpc/eth/rewards/handlers.go @@ -8,19 +8,19 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/wealdtech/go-bytesutil" ) diff --git a/beacon-chain/rpc/eth/rewards/handlers_test.go b/beacon-chain/rpc/eth/rewards/handlers_test.go index f0604ffb9a..55d76f8c1a 100644 --- a/beacon-chain/rpc/eth/rewards/handlers_test.go +++ b/beacon-chain/rpc/eth/rewards/handlers_test.go @@ -12,29 +12,29 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - dbutil "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - mockstategen "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/blst" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/network/httputil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + dbutil "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + mockstategen "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/blst" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/network/httputil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/eth/rewards/server.go b/beacon-chain/rpc/eth/rewards/server.go index 8cb6ab8302..2a764181cf 100644 --- a/beacon-chain/rpc/eth/rewards/server.go +++ b/beacon-chain/rpc/eth/rewards/server.go @@ -1,8 +1,8 @@ package rewards import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" ) type Server struct { diff --git a/beacon-chain/rpc/eth/rewards/service.go b/beacon-chain/rpc/eth/rewards/service.go index 6043620e20..1d1348c66b 100644 --- a/beacon-chain/rpc/eth/rewards/service.go +++ b/beacon-chain/rpc/eth/rewards/service.go @@ -5,18 +5,18 @@ import ( "net/http" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - coreblocks "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + coreblocks "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // BlockRewardsFetcher is a interface that provides access to reward related responses diff --git a/beacon-chain/rpc/eth/rewards/service_test.go b/beacon-chain/rpc/eth/rewards/service_test.go index 026246d44a..71f96ca661 100644 --- a/beacon-chain/rpc/eth/rewards/service_test.go +++ b/beacon-chain/rpc/eth/rewards/service_test.go @@ -3,12 +3,12 @@ package rewards import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - dbutil "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + dbutil "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestGetStateForRewards_NextSlotCacheHit(t *testing.T) { diff --git a/beacon-chain/rpc/eth/rewards/testing/BUILD.bazel b/beacon-chain/rpc/eth/rewards/testing/BUILD.bazel index 9fa9f0862f..2ea70a7b5b 100644 --- a/beacon-chain/rpc/eth/rewards/testing/BUILD.bazel +++ b/beacon-chain/rpc/eth/rewards/testing/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/rewards/testing", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/rewards/testing", visibility = ["//visibility:public"], deps = [ "//api/server/structs:go_default_library", diff --git a/beacon-chain/rpc/eth/rewards/testing/mock.go b/beacon-chain/rpc/eth/rewards/testing/mock.go index 7a964d0305..2403e1f6ed 100644 --- a/beacon-chain/rpc/eth/rewards/testing/mock.go +++ b/beacon-chain/rpc/eth/rewards/testing/mock.go @@ -3,10 +3,10 @@ package testing import ( "context" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/network/httputil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/network/httputil" ) type MockBlockRewardFetcher struct { diff --git a/beacon-chain/rpc/eth/shared/BUILD.bazel b/beacon-chain/rpc/eth/shared/BUILD.bazel index fca42bc01e..a80fdddd6b 100644 --- a/beacon-chain/rpc/eth/shared/BUILD.bazel +++ b/beacon-chain/rpc/eth/shared/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "errors.go", "request.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared", visibility = ["//visibility:public"], deps = [ "//api/server/structs:go_default_library", diff --git a/beacon-chain/rpc/eth/shared/errors.go b/beacon-chain/rpc/eth/shared/errors.go index d42e6fe8a7..353b818ac7 100644 --- a/beacon-chain/rpc/eth/shared/errors.go +++ b/beacon-chain/rpc/eth/shared/errors.go @@ -3,10 +3,10 @@ package shared import ( "net/http" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/network/httputil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/network/httputil" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/eth/shared/errors_test.go b/beacon-chain/rpc/eth/shared/errors_test.go index 734a4c4e72..9181813603 100644 --- a/beacon-chain/rpc/eth/shared/errors_test.go +++ b/beacon-chain/rpc/eth/shared/errors_test.go @@ -6,9 +6,9 @@ import ( "net/http/httptest" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/testing/assert" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/eth/shared/request.go b/beacon-chain/rpc/eth/shared/request.go index 344955532a..c1ddda4249 100644 --- a/beacon-chain/rpc/eth/shared/request.go +++ b/beacon-chain/rpc/eth/shared/request.go @@ -8,10 +8,10 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/network/httputil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/network/httputil" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/eth/shared/testing/BUILD.bazel b/beacon-chain/rpc/eth/shared/testing/BUILD.bazel index 889d7c056b..45bd04801b 100644 --- a/beacon-chain/rpc/eth/shared/testing/BUILD.bazel +++ b/beacon-chain/rpc/eth/shared/testing/BUILD.bazel @@ -8,6 +8,6 @@ go_library( "json_mainnet.go", "json_minimal.go", # keep ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared/testing", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared/testing", visibility = ["//visibility:public"], ) diff --git a/beacon-chain/rpc/eth/validator/BUILD.bazel b/beacon-chain/rpc/eth/validator/BUILD.bazel index a965ef4aee..d0a0e9d044 100644 --- a/beacon-chain/rpc/eth/validator/BUILD.bazel +++ b/beacon-chain/rpc/eth/validator/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "log.go", "server.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/validator", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/validator", visibility = ["//visibility:public"], deps = [ "//api:go_default_library", diff --git a/beacon-chain/rpc/eth/validator/handlers.go b/beacon-chain/rpc/eth/validator/handlers.go index 319df04ad2..24b1924891 100644 --- a/beacon-chain/rpc/eth/validator/handlers.go +++ b/beacon-chain/rpc/eth/validator/handlers.go @@ -13,30 +13,30 @@ import ( "strconv" "time" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - rpchelpers "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - validator2 "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - mvslice "github.com/OffchainLabs/prysm/v6/container/multi-value-slice" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpbalpha "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + rpchelpers "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + validator2 "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + mvslice "github.com/OffchainLabs/prysm/v7/container/multi-value-slice" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpbalpha "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/attestations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/rpc/eth/validator/handlers_block.go b/beacon-chain/rpc/eth/validator/handlers_block.go index df3543ae7a..88768b4409 100644 --- a/beacon-chain/rpc/eth/validator/handlers_block.go +++ b/beacon-chain/rpc/eth/validator/handlers_block.go @@ -8,19 +8,19 @@ import ( "net/http" "strings" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/rewards" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/rewards" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "google.golang.org/protobuf/types/known/wrapperspb" ) diff --git a/beacon-chain/rpc/eth/validator/handlers_block_test.go b/beacon-chain/rpc/eth/validator/handlers_block_test.go index fabc1e1d05..509bea6c47 100644 --- a/beacon-chain/rpc/eth/validator/handlers_block_test.go +++ b/beacon-chain/rpc/eth/validator/handlers_block_test.go @@ -9,17 +9,17 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - blockchainTesting "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - rewardtesting "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/rewards/testing" - rpctesting "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared/testing" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/network/httputil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - mock2 "github.com/OffchainLabs/prysm/v6/testing/mock" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + blockchainTesting "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + rewardtesting "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/rewards/testing" + rpctesting "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared/testing" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/network/httputil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + mock2 "github.com/OffchainLabs/prysm/v7/testing/mock" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" ) diff --git a/beacon-chain/rpc/eth/validator/handlers_test.go b/beacon-chain/rpc/eth/validator/handlers_test.go index 8281dc12fa..56ec6b0b04 100644 --- a/beacon-chain/rpc/eth/validator/handlers_test.go +++ b/beacon-chain/rpc/eth/validator/handlers_test.go @@ -12,36 +12,36 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - builderTest "github.com/OffchainLabs/prysm/v6/beacon-chain/builder/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - dbutil "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" - p2pmock "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpbalpha "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + builderTest "github.com/OffchainLabs/prysm/v7/beacon-chain/builder/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + dbutil "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee" + p2pmock "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpbalpha "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" logTest "github.com/sirupsen/logrus/hooks/test" diff --git a/beacon-chain/rpc/eth/validator/server.go b/beacon-chain/rpc/eth/validator/server.go index 43e6dd4a38..c8f456bed3 100644 --- a/beacon-chain/rpc/eth/validator/server.go +++ b/beacon-chain/rpc/eth/validator/server.go @@ -1,19 +1,19 @@ package validator import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/rewards" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/rewards" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // Server defines a server implementation of the gRPC Validator service, diff --git a/beacon-chain/rpc/lookup/BUILD.bazel b/beacon-chain/rpc/lookup/BUILD.bazel index b5d6a5e24e..42e82a0599 100644 --- a/beacon-chain/rpc/lookup/BUILD.bazel +++ b/beacon-chain/rpc/lookup/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "blocker.go", "stater.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup", visibility = ["//visibility:public"], deps = [ "//beacon-chain/blockchain:go_default_library", diff --git a/beacon-chain/rpc/lookup/blocker.go b/beacon-chain/rpc/lookup/blocker.go index f618808dce..958cbad67b 100644 --- a/beacon-chain/rpc/lookup/blocker.go +++ b/beacon-chain/rpc/lookup/blocker.go @@ -6,20 +6,20 @@ import ( "math" "strconv" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/options" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/options" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/lookup/blocker_test.go b/beacon-chain/rpc/lookup/blocker_test.go index bf8f967c64..0828e56bdb 100644 --- a/beacon-chain/rpc/lookup/blocker_test.go +++ b/beacon-chain/rpc/lookup/blocker_test.go @@ -8,24 +8,24 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/options" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/options" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/lookup/stater.go b/beacon-chain/rpc/lookup/stater.go index abecc7a8d0..b0fd36b6cd 100644 --- a/beacon-chain/rpc/lookup/stater.go +++ b/beacon-chain/rpc/lookup/stater.go @@ -7,16 +7,16 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/lookup/stater_test.go b/beacon-chain/rpc/lookup/stater_test.go index 14caa1f6c8..eaeafbb7f2 100644 --- a/beacon-chain/rpc/lookup/stater_test.go +++ b/beacon-chain/rpc/lookup/stater_test.go @@ -6,19 +6,19 @@ import ( "testing" "time" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockstategen "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockstategen "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/options/BUILD.bazel b/beacon-chain/rpc/options/BUILD.bazel index d545880b4c..ba9833aa94 100644 --- a/beacon-chain/rpc/options/BUILD.bazel +++ b/beacon-chain/rpc/options/BUILD.bazel @@ -3,6 +3,6 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["options.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/options", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/options", visibility = ["//visibility:public"], ) diff --git a/beacon-chain/rpc/prysm/beacon/BUILD.bazel b/beacon-chain/rpc/prysm/beacon/BUILD.bazel index 6315d6ef7c..7e3e9d4b54 100644 --- a/beacon-chain/rpc/prysm/beacon/BUILD.bazel +++ b/beacon-chain/rpc/prysm/beacon/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "ssz_query.go", "validator_count.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/beacon", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/beacon", visibility = ["//visibility:public"], deps = [ "//api:go_default_library", diff --git a/beacon-chain/rpc/prysm/beacon/handlers.go b/beacon-chain/rpc/prysm/beacon/handlers.go index daeed23657..3a873d7e54 100644 --- a/beacon-chain/rpc/prysm/beacon/handlers.go +++ b/beacon-chain/rpc/prysm/beacon/handlers.go @@ -8,18 +8,18 @@ import ( "net/http" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/prysm/beacon/handlers_test.go b/beacon-chain/rpc/prysm/beacon/handlers_test.go index 2b40edba71..d9d49a89e3 100644 --- a/beacon-chain/rpc/prysm/beacon/handlers_test.go +++ b/beacon-chain/rpc/prysm/beacon/handlers_test.go @@ -11,28 +11,28 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - rpctesting "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockstategen "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + rpctesting "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/testing" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockstategen "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/prysm/beacon/server.go b/beacon-chain/rpc/prysm/beacon/server.go index efb9714b04..c5d733ea1f 100644 --- a/beacon-chain/rpc/prysm/beacon/server.go +++ b/beacon-chain/rpc/prysm/beacon/server.go @@ -1,13 +1,13 @@ package beacon import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - beacondb "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + beacondb "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" ) type Server struct { diff --git a/beacon-chain/rpc/prysm/beacon/ssz_query.go b/beacon-chain/rpc/prysm/beacon/ssz_query.go index 063570d91d..30fbfdf16b 100644 --- a/beacon-chain/rpc/prysm/beacon/ssz_query.go +++ b/beacon-chain/rpc/prysm/beacon/ssz_query.go @@ -6,15 +6,15 @@ import ( "io" "net/http" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - sszquerypb "github.com/OffchainLabs/prysm/v6/proto/ssz_query" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/query" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + sszquerypb "github.com/OffchainLabs/prysm/v7/proto/ssz_query" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // QueryBeaconState handles SSZ Query request for BeaconState. diff --git a/beacon-chain/rpc/prysm/beacon/ssz_query_test.go b/beacon-chain/rpc/prysm/beacon/ssz_query_test.go index 099faef713..e3932d235e 100644 --- a/beacon-chain/rpc/prysm/beacon/ssz_query_test.go +++ b/beacon-chain/rpc/prysm/beacon/ssz_query_test.go @@ -11,19 +11,19 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - sszquerypb "github.com/OffchainLabs/prysm/v6/proto/ssz_query" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + sszquerypb "github.com/OffchainLabs/prysm/v7/proto/ssz_query" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/prysm/beacon/validator_count.go b/beacon-chain/rpc/prysm/beacon/validator_count.go index 7f0f71a0f3..1735aae994 100644 --- a/beacon-chain/rpc/prysm/beacon/validator_count.go +++ b/beacon-chain/rpc/prysm/beacon/validator_count.go @@ -7,17 +7,17 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // GetValidatorCount is a HTTP handler that serves the GET /eth/v1/beacon/states/{state_id}/validator_count endpoint. diff --git a/beacon-chain/rpc/prysm/beacon/validator_count_test.go b/beacon-chain/rpc/prysm/beacon/validator_count_test.go index b08f4c47c8..50db31d91c 100644 --- a/beacon-chain/rpc/prysm/beacon/validator_count_test.go +++ b/beacon-chain/rpc/prysm/beacon/validator_count_test.go @@ -12,17 +12,17 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/network/httputil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/network/httputil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestGetValidatorCountInvalidRequest(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/node/BUILD.bazel b/beacon-chain/rpc/prysm/node/BUILD.bazel index 5b3bd5d806..084ac67049 100644 --- a/beacon-chain/rpc/prysm/node/BUILD.bazel +++ b/beacon-chain/rpc/prysm/node/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "handlers.go", "server.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/node", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/node", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//api/server/structs:go_default_library", diff --git a/beacon-chain/rpc/prysm/node/handlers.go b/beacon-chain/rpc/prysm/node/handlers.go index dae36c9825..0eaea34be5 100644 --- a/beacon-chain/rpc/prysm/node/handlers.go +++ b/beacon-chain/rpc/prysm/node/handlers.go @@ -6,13 +6,13 @@ import ( "net/http" "strings" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" corenet "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" diff --git a/beacon-chain/rpc/prysm/node/handlers_test.go b/beacon-chain/rpc/prysm/node/handlers_test.go index 734295a8f3..7e6875438a 100644 --- a/beacon-chain/rpc/prysm/node/handlers_test.go +++ b/beacon-chain/rpc/prysm/node/handlers_test.go @@ -8,13 +8,13 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" corenet "github.com/libp2p/go-libp2p/core/network" diff --git a/beacon-chain/rpc/prysm/node/server.go b/beacon-chain/rpc/prysm/node/server.go index dd702b1c9f..cb84af8ac7 100644 --- a/beacon-chain/rpc/prysm/node/server.go +++ b/beacon-chain/rpc/prysm/node/server.go @@ -1,11 +1,11 @@ package node import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" ) type Server struct { diff --git a/beacon-chain/rpc/prysm/testing/BUILD.bazel b/beacon-chain/rpc/prysm/testing/BUILD.bazel index 4bc82bf539..423c5701a5 100644 --- a/beacon-chain/rpc/prysm/testing/BUILD.bazel +++ b/beacon-chain/rpc/prysm/testing/BUILD.bazel @@ -4,6 +4,6 @@ go_library( name = "go_default_library", testonly = True, srcs = ["json.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/testing", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/testing", visibility = ["//visibility:public"], ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/BUILD.bazel b/beacon-chain/rpc/prysm/v1alpha1/beacon/BUILD.bazel index 1b5302d1bd..19415d336b 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/BUILD.bazel +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/BUILD.bazel @@ -13,7 +13,7 @@ go_library( "slashings.go", "validators.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/v1alpha1/beacon", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/v1alpha1/beacon", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//api/pagination:go_default_library", diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/assignments.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/assignments.go index 5cf1541ed9..f6889926b3 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/assignments.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/assignments.go @@ -5,13 +5,13 @@ import ( "fmt" "strconv" - "github.com/OffchainLabs/prysm/v6/api/pagination" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/pagination" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/assignments_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/assignments_test.go index 7330c502dd..a114919742 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/assignments_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/assignments_test.go @@ -6,20 +6,20 @@ import ( "strconv" "testing" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockstategen "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockstategen "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestServer_ListAssignments_CannotRequestFutureEpoch(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations.go index 8e7c119fca..1290130067 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations.go @@ -6,19 +6,19 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api/pagination" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/api/pagination" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations_test.go index 16d5ed9392..fe72b7d89a 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations_test.go @@ -8,26 +8,26 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/cmd" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/cmd" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/beacon_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/beacon_test.go index 029dbb6071..70021d99b3 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/beacon_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/beacon_test.go @@ -4,8 +4,8 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" ) func TestMain(m *testing.M) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/blocks.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/blocks.go index b5fbbd5fa0..4be4a41772 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/blocks.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/blocks.go @@ -4,14 +4,14 @@ import ( "context" "strconv" - "github.com/OffchainLabs/prysm/v6/api/pagination" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/cmd" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/pagination" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/cmd" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/blocks_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/blocks_test.go index 74849b6b81..a9df700a31 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/blocks_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/blocks_test.go @@ -5,22 +5,22 @@ import ( "strconv" "testing" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/committees.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/committees.go index a521ac2766..b829b8aff1 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/committees.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/committees.go @@ -4,13 +4,13 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/committees_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/committees_test.go index a54f1890fd..0c746cc3fb 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/committees_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/committees_test.go @@ -6,23 +6,23 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockstategen "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - blocktest "github.com/OffchainLabs/prysm/v6/consensus-types/blocks/testing" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockstategen "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + blocktest "github.com/OffchainLabs/prysm/v7/consensus-types/blocks/testing" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/protobuf/proto" "gopkg.in/d4l3k/messagediff.v1" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/config.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/config.go index 4ced9735b7..98a7184950 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/config.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/config.go @@ -5,8 +5,8 @@ import ( "fmt" "reflect" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "google.golang.org/protobuf/types/known/emptypb" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/config_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/config_test.go index 68a30df3b2..799b27e54d 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/config_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/config_test.go @@ -5,9 +5,9 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "google.golang.org/protobuf/types/known/emptypb" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/init_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/init_test.go index b40158b177..e1d1bde044 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/init_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/init_test.go @@ -1,7 +1,7 @@ package beacon import ( - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" ) func init() { diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/server.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/server.go index b26922ec3e..7c9f23c921 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/server.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/server.go @@ -7,20 +7,20 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - blockfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + blockfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // Server defines a server implementation of the gRPC Beacon Chain service, diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/slashings.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/slashings.go index 96cbc3b499..7a925329fa 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/slashings.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/slashings.go @@ -3,10 +3,10 @@ package beacon import ( "context" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/slashings_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/slashings_test.go index ba4186ea5f..c88c439690 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/slashings_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/slashings_test.go @@ -3,15 +3,15 @@ package beacon import ( "testing" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/validators.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/validators.go index e97c9dbd9b..77b7714403 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/validators.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/validators.go @@ -6,19 +6,19 @@ import ( "sort" "strconv" - "github.com/OffchainLabs/prysm/v6/api/pagination" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/pagination" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/emptypb" diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/validators_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/validators_test.go index b3f1370785..e7e5583f50 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/validators_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/validators_test.go @@ -9,35 +9,35 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockstategen "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/cmd" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - blocktest "github.com/OffchainLabs/prysm/v6/consensus-types/blocks/testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockstategen "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/cmd" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + blocktest "github.com/OffchainLabs/prysm/v7/consensus-types/blocks/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/emptypb" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/debug/BUILD.bazel b/beacon-chain/rpc/prysm/v1alpha1/debug/BUILD.bazel index 237ba449e4..e13f02f324 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/debug/BUILD.bazel +++ b/beacon-chain/rpc/prysm/v1alpha1/debug/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "server.go", "state.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/v1alpha1/debug", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/v1alpha1/debug", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//beacon-chain/blockchain:go_default_library", diff --git a/beacon-chain/rpc/prysm/v1alpha1/debug/block.go b/beacon-chain/rpc/prysm/v1alpha1/debug/block.go index 2223f20579..0a44dad0df 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/debug/block.go +++ b/beacon-chain/rpc/prysm/v1alpha1/debug/block.go @@ -5,14 +5,14 @@ import ( "fmt" "math" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pbrpc "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pbrpc "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/debug/block_test.go b/beacon-chain/rpc/prysm/v1alpha1/debug/block_test.go index 1613732094..08948244b1 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/debug/block_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/debug/block_test.go @@ -5,17 +5,17 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestServer_GetBlock(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/debug/p2p.go b/beacon-chain/rpc/prysm/v1alpha1/debug/p2p.go index 3a9f72e2cb..4454fc23d9 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/debug/p2p.go +++ b/beacon-chain/rpc/prysm/v1alpha1/debug/p2p.go @@ -3,8 +3,8 @@ package debug import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/golang/protobuf/ptypes/empty" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/rpc/prysm/v1alpha1/debug/p2p_test.go b/beacon-chain/rpc/prysm/v1alpha1/debug/p2p_test.go index 5086c92017..2383fbdd66 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/debug/p2p_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/debug/p2p_test.go @@ -3,10 +3,10 @@ package debug import ( "testing" - mockP2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + mockP2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/golang/protobuf/ptypes/empty" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/debug/server.go b/beacon-chain/rpc/prysm/v1alpha1/debug/server.go index 74ce07d456..0d6aa41a8f 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/debug/server.go +++ b/beacon-chain/rpc/prysm/v1alpha1/debug/server.go @@ -7,11 +7,11 @@ import ( "context" "os" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - pbrpc "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + pbrpc "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" gethlog "github.com/ethereum/go-ethereum/log" "github.com/golang/protobuf/ptypes/empty" golog "github.com/ipfs/go-log/v2" diff --git a/beacon-chain/rpc/prysm/v1alpha1/debug/state.go b/beacon-chain/rpc/prysm/v1alpha1/debug/state.go index 20221ddc13..e3c8c62fb0 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/debug/state.go +++ b/beacon-chain/rpc/prysm/v1alpha1/debug/state.go @@ -4,8 +4,8 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pbrpc "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pbrpc "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/debug/state_test.go b/beacon-chain/rpc/prysm/v1alpha1/debug/state_test.go index eca0580016..3ee20e0489 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/debug/state_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/debug/state_test.go @@ -4,16 +4,16 @@ import ( "math" "testing" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockstategen "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pbrpc "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockstategen "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pbrpc "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func addDefaultReplayerBuilder(s *Server, h stategen.HistoryAccessor) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/node/BUILD.bazel b/beacon-chain/rpc/prysm/v1alpha1/node/BUILD.bazel index 15e5abe0f5..0d47b5c98b 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/node/BUILD.bazel +++ b/beacon-chain/rpc/prysm/v1alpha1/node/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["server.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/v1alpha1/node", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/v1alpha1/node", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//beacon-chain/blockchain:go_default_library", diff --git a/beacon-chain/rpc/prysm/v1alpha1/node/server.go b/beacon-chain/rpc/prysm/v1alpha1/node/server.go index 7e5b8bdf6b..2dd185ce93 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/node/server.go +++ b/beacon-chain/rpc/prysm/v1alpha1/node/server.go @@ -11,15 +11,15 @@ import ( "strconv" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/io/logs" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/io/logs" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/golang/protobuf/ptypes/empty" "github.com/golang/protobuf/ptypes/timestamp" "github.com/libp2p/go-libp2p/core/network" diff --git a/beacon-chain/rpc/prysm/v1alpha1/node/server_test.go b/beacon-chain/rpc/prysm/v1alpha1/node/server_test.go index 6c615e1cdb..373763da7c 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/node/server_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/node/server_test.go @@ -5,18 +5,18 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbutil "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - mockP2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbutil "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + mockP2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/p2p/enode" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/BUILD.bazel b/beacon-chain/rpc/prysm/v1alpha1/validator/BUILD.bazel index 09b9336970..823c1fdd5c 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/BUILD.bazel +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/BUILD.bazel @@ -33,7 +33,7 @@ go_library( "sync_committee.go", "unblinder.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/v1alpha1/validator", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/v1alpha1/validator", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//api/client/builder:go_default_library", diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/aggregator.go b/beacon-chain/rpc/prysm/v1alpha1/validator/aggregator.go index f8b371af67..e88a10d67f 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/aggregator.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/aggregator.go @@ -3,16 +3,16 @@ package validator import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/aggregator_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/aggregator_test.go index 9fe8fa4713..1fae1d90e6 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/aggregator_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/aggregator_test.go @@ -7,25 +7,25 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - attaggregation "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + attaggregation "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/attestations" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestSubmitAggregateAndProof_Syncing(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/attester.go b/beacon-chain/rpc/prysm/v1alpha1/validator/attester.go index 2e0dc4c940..d23cd25186 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/attester.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/attester.go @@ -4,19 +4,19 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/emptypb" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/attester_mainnet_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/attester_mainnet_test.go index 39e620c576..3bacd50c9b 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/attester_mainnet_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/attester_mainnet_test.go @@ -4,16 +4,16 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/attester_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/attester_test.go index 6a59eabe06..7d2faf7302 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/attester_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/attester_test.go @@ -6,24 +6,24 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - dbutil "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + dbutil "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + prysmTime "github.com/OffchainLabs/prysm/v7/time" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/proto" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/blocks.go b/beacon-chain/rpc/prysm/v1alpha1/validator/blocks.go index ebdd7eb03d..cb427da796 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/blocks.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/blocks.go @@ -1,14 +1,14 @@ package validator import ( - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - blockfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + blockfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/blocks_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/blocks_test.go index ea698b6d70..25a2f1904e 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/blocks_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/blocks_test.go @@ -4,19 +4,19 @@ import ( "context" "testing" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - blockfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/mock" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + blockfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/mock" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "go.uber.org/mock/gomock" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/construct_generic_block.go b/beacon-chain/rpc/prysm/v1alpha1/validator/construct_generic_block.go index 138949fe66..7d9f9bcb52 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/construct_generic_block.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/construct_generic_block.go @@ -3,11 +3,11 @@ package validator import ( "fmt" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/construct_generic_block_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/construct_generic_block_test.go index 1eecb10a74..ff12c80b8d 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/construct_generic_block_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/construct_generic_block_test.go @@ -3,13 +3,13 @@ package validator import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/duties.go b/beacon-chain/rpc/prysm/v1alpha1/validator/duties.go index 02e38a928c..9523447166 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/duties.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/duties.go @@ -3,15 +3,15 @@ package validator import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/emptypb" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/duties_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/duties_test.go index 4de0d87098..65eb5b45bd 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/duties_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/duties_test.go @@ -5,23 +5,23 @@ import ( "testing" "time" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) // pubKey is a helper to generate a well-formed public key. diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/duties_v2.go b/beacon-chain/rpc/prysm/v1alpha1/validator/duties_v2.go index 32ca83c471..d48dfdce9a 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/duties_v2.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/duties_v2.go @@ -3,16 +3,16 @@ package validator import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/duties_v2_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/duties_v2_test.go index d4c01b48b5..09e58e7a10 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/duties_v2_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/duties_v2_test.go @@ -4,25 +4,25 @@ import ( "testing" "time" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestGetDutiesV2_OK(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/exit.go b/beacon-chain/rpc/prysm/v1alpha1/validator/exit.go index 40443bf0d6..075378f103 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/exit.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/exit.go @@ -3,11 +3,11 @@ package validator import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/exit_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/exit_test.go index 89ff60bdaf..21dbf999a3 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/exit_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/exit_test.go @@ -4,22 +4,22 @@ import ( "testing" "time" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProposeExit_Notification(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go index 091a745737..fef20a9e72 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go @@ -7,28 +7,28 @@ import ( "sync" "time" - builderapi "github.com/OffchainLabs/prysm/v6/api/client/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - blockfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + builderapi "github.com/OffchainLabs/prysm/v7/api/client/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + blockfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" emptypb "github.com/golang/protobuf/ptypes/empty" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go index 5a7ae057ac..7894c49261 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go @@ -5,19 +5,19 @@ import ( "context" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - synccontribution "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + synccontribution "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go index 04f086079a..b97366263a 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go @@ -4,19 +4,19 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - chainmock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + chainmock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProposer_GetSyncAggregate_OK(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations.go index 2e84393854..ad8a57f701 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations.go @@ -9,20 +9,20 @@ import ( "sort" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" - attaggregation "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation" + attaggregation "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/attestations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra.go index 7ca2a62f5c..71d86d4ca4 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra.go @@ -5,9 +5,9 @@ import ( "slices" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // computeOnChainAggregate constructs a final aggregate form a list of network aggregates with equal attestation data. diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra_test.go index ddda9b4f19..3b95c27011 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_electra_test.go @@ -5,13 +5,13 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls/blst" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls/blst" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func Test_computeOnChainAggregate(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_test.go index 93056e4bea..8ae2978674 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_test.go @@ -8,20 +8,20 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations/mock" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls/blst" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations/mock" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls/blst" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestProposer_ProposerAtts_committeeAwareSort(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_bellatrix.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_bellatrix.go index 0e5a339b22..47c2a76e31 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_bellatrix.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_bellatrix.go @@ -7,20 +7,20 @@ import ( "math/big" "time" - "github.com/OffchainLabs/prysm/v6/api/client/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/client/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_bellatrix_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_bellatrix_test.go index 572738227e..a3f586a45d 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_bellatrix_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_bellatrix_test.go @@ -6,29 +6,29 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/api/client/builder" - blockchainTest "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - builderTest "github.com/OffchainLabs/prysm/v6/beacon-chain/builder/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - powtesting "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/client/builder" + blockchainTest "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + builderTest "github.com/OffchainLabs/prysm/v7/beacon-chain/builder/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + powtesting "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" logTest "github.com/sirupsen/logrus/hooks/test" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_builder.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_builder.go index 76fa6b24a9..1d824cc6d3 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_builder.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_builder.go @@ -3,12 +3,12 @@ package validator import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_builder_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_builder_test.go index 9ff262273e..b638acda5f 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_builder_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_builder_test.go @@ -4,21 +4,21 @@ import ( "testing" "time" - blockchainTest "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" - testing2 "github.com/OffchainLabs/prysm/v6/beacon-chain/builder/testing" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + blockchainTest "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/builder" + testing2 "github.com/OffchainLabs/prysm/v7/beacon-chain/builder/testing" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_capella.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_capella.go index cc945fe2a9..6214204bf1 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_capella.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_capella.go @@ -1,10 +1,10 @@ package validator import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // Sets the bls to exec data for a block. diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb.go index ac246b27e1..dac00152ae 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb.go @@ -3,10 +3,10 @@ package validator import ( "errors" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // BuildBlobSidecars given a block, builds the blob sidecars for the block. diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb_bench_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb_bench_test.go index abbfb4e1c0..7427dd46b9 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb_bench_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb_bench_test.go @@ -4,13 +4,13 @@ import ( "errors" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/util" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb_test.go index 95b16554ca..45229db3a7 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb_test.go @@ -3,11 +3,11 @@ package validator import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deposits.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deposits.go index 6b0bb317e9..f398e0b7eb 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deposits.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deposits.go @@ -5,15 +5,15 @@ import ( "context" "math/big" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "github.com/sirupsen/logrus" "golang.org/x/sync/errgroup" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deposits_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deposits_test.go index 6f51c55782..f17440acc5 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deposits_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deposits_test.go @@ -4,17 +4,17 @@ import ( "math/big" "testing" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestShouldFallback(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_empty_block.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_empty_block.go index 2b67b78855..c730df9915 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_empty_block.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_empty_block.go @@ -1,12 +1,12 @@ package validator import ( - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_empty_block_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_empty_block_test.go index 7775fcff6c..9c77a10c06 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_empty_block_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_empty_block_test.go @@ -3,12 +3,12 @@ package validator import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func Test_getEmptyBlock(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_eth1data.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_eth1data.go index 9679adea80..d43f957da7 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_eth1data.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_eth1data.go @@ -4,17 +4,17 @@ import ( "context" "math/big" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" fastssz "github.com/prysmaticlabs/fastssz" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_execution_payload.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_execution_payload.go index 84e6000dd3..30248c8c85 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_execution_payload.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_execution_payload.go @@ -5,23 +5,23 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/api/client/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - payloadattribute "github.com/OffchainLabs/prysm/v6/consensus-types/payload-attribute" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/client/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + payloadattribute "github.com/OffchainLabs/prysm/v7/consensus-types/payload-attribute" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_execution_payload_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_execution_payload_test.go index 62be55ecfa..34e2434080 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_execution_payload_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_execution_payload_test.go @@ -5,19 +5,19 @@ import ( "errors" "testing" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - powtesting "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + powtesting "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common" gethtypes "github.com/ethereum/go-ethereum/core/types" logTest "github.com/sirupsen/logrus/hooks/test" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_exits.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_exits.go index e3676088f6..89d79c4644 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_exits.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_exits.go @@ -1,9 +1,9 @@ package validator import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func (vs *Server) getExits(head state.BeaconState, slot primitives.Slot) []*ethpb.SignedVoluntaryExit { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_exits_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_exits_test.go index ec68b38639..80b7da575c 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_exits_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_exits_test.go @@ -3,12 +3,12 @@ package validator import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestServer_getExits(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_slashings.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_slashings.go index dd8ff2f5e4..65daa0aa0a 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_slashings.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_slashings.go @@ -3,11 +3,11 @@ package validator import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - v "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + v "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func (vs *Server) getSlashings(ctx context.Context, head state.BeaconState) ([]*ethpb.ProposerSlashing, []ethpb.AttSlashing) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_slashings_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_slashings_test.go index a90ac8c8aa..41706c351f 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_slashings_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_slashings_test.go @@ -3,12 +3,12 @@ package validator import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestServer_getSlashings(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_sync_aggregate.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_sync_aggregate.go index 5e398416dc..559e67966b 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_sync_aggregate.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_sync_aggregate.go @@ -3,7 +3,7 @@ package validator import ( "bytes" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) type proposerSyncContributions []*eth.SyncCommitteeContribution diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_sync_aggregate_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_sync_aggregate_test.go index e440db6342..785ce0b624 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_sync_aggregate_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_sync_aggregate_test.go @@ -5,9 +5,9 @@ import ( "sort" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - v2 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + v2 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestProposerSyncContributions_FilterByBlockRoot(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go index 447d30fa02..7365b6a501 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go @@ -7,49 +7,49 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - builderapi "github.com/OffchainLabs/prysm/v6/api/client/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" - builderTest "github.com/OffchainLabs/prysm/v6/beacon-chain/builder/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - b "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - coretime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - dbutil "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - attaggregation "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + builderapi "github.com/OffchainLabs/prysm/v7/api/client/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/builder" + builderTest "github.com/OffchainLabs/prysm/v7/beacon-chain/builder/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + b "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + coretime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + dbutil "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + attaggregation "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/attestations" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_utils_bench_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_utils_bench_test.go index 9eb09e077f..2bc3b57da1 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_utils_bench_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_utils_bench_test.go @@ -5,10 +5,10 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - aggtesting "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + aggtesting "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/testing" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func BenchmarkProposerAtts_sortByProfitability(b *testing.B) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/server.go b/beacon-chain/rpc/prysm/v1alpha1/validator/server.go index 2b65be8774..aaba034c12 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/server.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/server.go @@ -8,32 +8,32 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - blockfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + blockfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/emptypb" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/server_mainnet_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/server_mainnet_test.go index 69093b533f..584f5740a9 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/server_mainnet_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/server_mainnet_test.go @@ -3,20 +3,20 @@ package validator import ( "testing" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/mock" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/mock" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "go.uber.org/mock/gomock" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/server_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/server_test.go index e79da751ed..8d8f20c616 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/server_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/server_test.go @@ -7,23 +7,23 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/mock" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/async/event" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/mock" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" logTest "github.com/sirupsen/logrus/hooks/test" "go.uber.org/mock/gomock" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/status.go b/beacon-chain/rpc/prysm/v1alpha1/validator/status.go index e42a3bd876..8c86e0a203 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/status.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/status.go @@ -4,20 +4,20 @@ import ( "context" "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/status_mainnet_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/status_mainnet_test.go index 04c40a09b6..578b1fd442 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/status_mainnet_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/status_mainnet_test.go @@ -5,18 +5,18 @@ import ( "testing" "time" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/status_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/status_test.go index 3299d07563..b38b96bad2 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/status_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/status_test.go @@ -5,24 +5,24 @@ import ( "testing" "time" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - mockstategen "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + mockstategen "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/d4l3k/messagediff" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/sync_committee.go b/beacon-chain/rpc/prysm/v1alpha1/validator/sync_committee.go index acf02e8de8..db15b3ac80 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/sync_committee.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/sync_committee.go @@ -3,9 +3,9 @@ package validator import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/sync_committee_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/sync_committee_test.go index 35c5e6eaa6..95ae5133da 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/sync_committee_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/sync_committee_test.go @@ -4,21 +4,21 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/emptypb" diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/unblinder.go b/beacon-chain/rpc/prysm/v1alpha1/validator/unblinder.go index 78919e7441..638bd0accf 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/unblinder.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/unblinder.go @@ -3,12 +3,12 @@ package validator import ( "bytes" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/unblinder_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/unblinder_test.go index da0d1dd49d..32253f49a7 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/unblinder_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/unblinder_test.go @@ -3,11 +3,11 @@ package validator import ( "testing" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestUnblinder_UnblindBlobSidecars_InvalidBundle(t *testing.T) { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/validator_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/validator_test.go index 42fdf7895c..f5ed369179 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/validator_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/validator_test.go @@ -5,7 +5,7 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/rpc/prysm/validator/BUILD.bazel b/beacon-chain/rpc/prysm/validator/BUILD.bazel index b7bfa46a4f..bcd3c1f055 100644 --- a/beacon-chain/rpc/prysm/validator/BUILD.bazel +++ b/beacon-chain/rpc/prysm/validator/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "server.go", "validator_performance.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/validator", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/validator", visibility = ["//visibility:public"], deps = [ "//api/server/structs:go_default_library", diff --git a/beacon-chain/rpc/prysm/validator/handlers.go b/beacon-chain/rpc/prysm/validator/handlers.go index 3eaf3715e6..6b60b89f2a 100644 --- a/beacon-chain/rpc/prysm/validator/handlers.go +++ b/beacon-chain/rpc/prysm/validator/handlers.go @@ -4,13 +4,13 @@ import ( "fmt" "net/http" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/prysm/validator/handlers_test.go b/beacon-chain/rpc/prysm/validator/handlers_test.go index d38ac2ee1f..730f144348 100644 --- a/beacon-chain/rpc/prysm/validator/handlers_test.go +++ b/beacon-chain/rpc/prysm/validator/handlers_test.go @@ -12,31 +12,31 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockstategen "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - blocktest "github.com/OffchainLabs/prysm/v6/consensus-types/blocks/testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockstategen "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + blocktest "github.com/OffchainLabs/prysm/v7/consensus-types/blocks/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/rpc/prysm/validator/server.go b/beacon-chain/rpc/prysm/validator/server.go index 41ed7035e5..37c2930f3e 100644 --- a/beacon-chain/rpc/prysm/validator/server.go +++ b/beacon-chain/rpc/prysm/validator/server.go @@ -1,10 +1,10 @@ package validator import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" ) type Server struct { diff --git a/beacon-chain/rpc/prysm/validator/validator_performance.go b/beacon-chain/rpc/prysm/validator/validator_performance.go index cff272d634..29334068a3 100644 --- a/beacon-chain/rpc/prysm/validator/validator_performance.go +++ b/beacon-chain/rpc/prysm/validator/validator_performance.go @@ -5,11 +5,11 @@ import ( "io" "net/http" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/rpc/prysm/validator/validator_performance_test.go b/beacon-chain/rpc/prysm/validator/validator_performance_test.go index e85668f04b..5d055b3769 100644 --- a/beacon-chain/rpc/prysm/validator/validator_performance_test.go +++ b/beacon-chain/rpc/prysm/validator/validator_performance_test.go @@ -10,20 +10,20 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestServer_GetValidatorPerformance(t *testing.T) { diff --git a/beacon-chain/rpc/service.go b/beacon-chain/rpc/service.go index 8113e32d9b..1aeeb18448 100644 --- a/beacon-chain/rpc/service.go +++ b/beacon-chain/rpc/service.go @@ -9,38 +9,38 @@ import ( "net/http" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - blockfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - lightClient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/rewards" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/lookup" - beaconv1alpha1 "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/v1alpha1/beacon" - debugv1alpha1 "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/v1alpha1/debug" - nodev1alpha1 "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/v1alpha1/node" - validatorv1alpha1 "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/prysm/v1alpha1/validator" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - chainSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/logs" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - ethpbv1alpha1 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + blockfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + lightClient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/rewards" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/lookup" + beaconv1alpha1 "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/v1alpha1/beacon" + debugv1alpha1 "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/v1alpha1/debug" + nodev1alpha1 "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/v1alpha1/node" + validatorv1alpha1 "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/prysm/v1alpha1/validator" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + chainSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/logs" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + ethpbv1alpha1 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" middleware "github.com/grpc-ecosystem/go-grpc-middleware" recovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery" grpcopentracing "github.com/grpc-ecosystem/go-grpc-middleware/tracing/opentracing" diff --git a/beacon-chain/rpc/service_test.go b/beacon-chain/rpc/service_test.go index 88873ecaaa..a9f128a080 100644 --- a/beacon-chain/rpc/service_test.go +++ b/beacon-chain/rpc/service_test.go @@ -7,12 +7,12 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/sirupsen/logrus" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/rpc/testutil/BUILD.bazel b/beacon-chain/rpc/testutil/BUILD.bazel index 19b39afe2c..e0b883597e 100644 --- a/beacon-chain/rpc/testutil/BUILD.bazel +++ b/beacon-chain/rpc/testutil/BUILD.bazel @@ -11,7 +11,7 @@ go_library( "mock_sidecars.go", "mock_stater.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/testutil", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/testutil", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//beacon-chain/db:go_default_library", diff --git a/beacon-chain/rpc/testutil/db.go b/beacon-chain/rpc/testutil/db.go index 029f1fab95..5eb563bc5d 100644 --- a/beacon-chain/rpc/testutil/db.go +++ b/beacon-chain/rpc/testutil/db.go @@ -4,14 +4,14 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpbalpha "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpbalpha "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func FillDBWithBlocks(ctx context.Context, t *testing.T, beaconDB db.Database) (*ethpbalpha.SignedBeaconBlock, []*ethpbalpha.BeaconBlockContainer) { diff --git a/beacon-chain/rpc/testutil/mock_blocker.go b/beacon-chain/rpc/testutil/mock_blocker.go index 284aa6158c..bfa4e3fbe7 100644 --- a/beacon-chain/rpc/testutil/mock_blocker.go +++ b/beacon-chain/rpc/testutil/mock_blocker.go @@ -4,12 +4,12 @@ import ( "context" "strconv" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/core" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/options" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/core" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/options" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ) // MockBlocker is a fake implementation of lookup.Blocker. diff --git a/beacon-chain/rpc/testutil/mock_genesis_timefetcher.go b/beacon-chain/rpc/testutil/mock_genesis_timefetcher.go index c7ac3c9f0e..c80ecdfacb 100644 --- a/beacon-chain/rpc/testutil/mock_genesis_timefetcher.go +++ b/beacon-chain/rpc/testutil/mock_genesis_timefetcher.go @@ -3,8 +3,8 @@ package testutil import ( "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // MockGenesisTimeFetcher is a fake implementation of the blockchain.TimeFetcher diff --git a/beacon-chain/rpc/testutil/mock_sidecars.go b/beacon-chain/rpc/testutil/mock_sidecars.go index 14b7560db9..8b2bd2c381 100644 --- a/beacon-chain/rpc/testutil/mock_sidecars.go +++ b/beacon-chain/rpc/testutil/mock_sidecars.go @@ -1,6 +1,6 @@ package testutil -import ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" +import ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" // CreateDataColumnSidecar generates a filled dummy data column sidecar func CreateDataColumnSidecar(index uint64, data []byte) *ethpb.DataColumnSidecar { diff --git a/beacon-chain/rpc/testutil/mock_stater.go b/beacon-chain/rpc/testutil/mock_stater.go index bd0949ba69..15b0baea95 100644 --- a/beacon-chain/rpc/testutil/mock_stater.go +++ b/beacon-chain/rpc/testutil/mock_stater.go @@ -3,9 +3,9 @@ package testutil import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ) // MockStater is a fake implementation of lookup.Stater. diff --git a/beacon-chain/slasher/BUILD.bazel b/beacon-chain/slasher/BUILD.bazel index 415fe6cee1..42e844c5cf 100644 --- a/beacon-chain/slasher/BUILD.bazel +++ b/beacon-chain/slasher/BUILD.bazel @@ -16,7 +16,7 @@ go_library( "receive.go", "service.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher", visibility = [ "//beacon-chain:__subpackages__", "//cmd/prysmctl:__subpackages__", diff --git a/beacon-chain/slasher/chunks.go b/beacon-chain/slasher/chunks.go index 806ae6588b..aa979a32a8 100644 --- a/beacon-chain/slasher/chunks.go +++ b/beacon-chain/slasher/chunks.go @@ -6,11 +6,11 @@ import ( "fmt" "math" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/slasher/chunks_test.go b/beacon-chain/slasher/chunks_test.go index d139456a65..0f84d2b699 100644 --- a/beacon-chain/slasher/chunks_test.go +++ b/beacon-chain/slasher/chunks_test.go @@ -5,13 +5,13 @@ import ( "reflect" "testing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) var ( diff --git a/beacon-chain/slasher/detect_attestations.go b/beacon-chain/slasher/detect_attestations.go index 07fb771e20..28cb94e4c8 100644 --- a/beacon-chain/slasher/detect_attestations.go +++ b/beacon-chain/slasher/detect_attestations.go @@ -7,12 +7,12 @@ import ( "maps" "slices" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/slasher/detect_attestations_test.go b/beacon-chain/slasher/detect_attestations_test.go index 6801b2368a..ee5c9d2c3a 100644 --- a/beacon-chain/slasher/detect_attestations_test.go +++ b/beacon-chain/slasher/detect_attestations_test.go @@ -7,24 +7,24 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - slashingsmock "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings/mock" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + slashingsmock "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings/mock" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/slasher/detect_blocks.go b/beacon-chain/slasher/detect_blocks.go index 09ade5420f..76904f1aa1 100644 --- a/beacon-chain/slasher/detect_blocks.go +++ b/beacon-chain/slasher/detect_blocks.go @@ -3,9 +3,9 @@ package slasher import ( "context" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/slasher/detect_blocks_test.go b/beacon-chain/slasher/detect_blocks_test.go index a90967ef3d..e7d84c6d11 100644 --- a/beacon-chain/slasher/detect_blocks_test.go +++ b/beacon-chain/slasher/detect_blocks_test.go @@ -4,21 +4,21 @@ import ( "context" "testing" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - slashingsmock "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings/mock" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + slashingsmock "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings/mock" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/slasher/helpers.go b/beacon-chain/slasher/helpers.go index 63d7179007..5afde52d25 100644 --- a/beacon-chain/slasher/helpers.go +++ b/beacon-chain/slasher/helpers.go @@ -6,14 +6,14 @@ import ( "fmt" "strconv" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/slasherkv" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/slasherkv" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/slasher/helpers_test.go b/beacon-chain/slasher/helpers_test.go index ef3de7f83a..d959f0a150 100644 --- a/beacon-chain/slasher/helpers_test.go +++ b/beacon-chain/slasher/helpers_test.go @@ -4,13 +4,13 @@ import ( "reflect" "testing" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/slasher/params.go b/beacon-chain/slasher/params.go index 729683ef74..e8ae28aa58 100644 --- a/beacon-chain/slasher/params.go +++ b/beacon-chain/slasher/params.go @@ -1,7 +1,7 @@ package slasher import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ssz "github.com/prysmaticlabs/fastssz" ) diff --git a/beacon-chain/slasher/params_test.go b/beacon-chain/slasher/params_test.go index aa00f840a0..f9c79c66ad 100644 --- a/beacon-chain/slasher/params_test.go +++ b/beacon-chain/slasher/params_test.go @@ -4,8 +4,8 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" ssz "github.com/prysmaticlabs/fastssz" ) diff --git a/beacon-chain/slasher/process_slashings.go b/beacon-chain/slasher/process_slashings.go index 0551fc1977..339acdc8bb 100644 --- a/beacon-chain/slasher/process_slashings.go +++ b/beacon-chain/slasher/process_slashings.go @@ -3,10 +3,10 @@ package slasher import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/slasher/process_slashings_test.go b/beacon-chain/slasher/process_slashings_test.go index 658008c927..d5934c54a6 100644 --- a/beacon-chain/slasher/process_slashings_test.go +++ b/beacon-chain/slasher/process_slashings_test.go @@ -3,19 +3,19 @@ package slasher import ( "testing" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - slashingsmock "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings/mock" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + slashingsmock "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings/mock" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/slasher/queue.go b/beacon-chain/slasher/queue.go index b31c5c528e..2eff17bc0b 100644 --- a/beacon-chain/slasher/queue.go +++ b/beacon-chain/slasher/queue.go @@ -3,7 +3,7 @@ package slasher import ( "sync" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" ) // Struct for handling a thread-safe list of indexed attestation wrappers. diff --git a/beacon-chain/slasher/queue_test.go b/beacon-chain/slasher/queue_test.go index 06f7155211..2266ab2b40 100644 --- a/beacon-chain/slasher/queue_test.go +++ b/beacon-chain/slasher/queue_test.go @@ -3,10 +3,10 @@ package slasher import ( "testing" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func Test_attestationsQueue(t *testing.T) { diff --git a/beacon-chain/slasher/receive.go b/beacon-chain/slasher/receive.go index 0902c5942b..e3984fc7e1 100644 --- a/beacon-chain/slasher/receive.go +++ b/beacon-chain/slasher/receive.go @@ -4,11 +4,11 @@ import ( "context" "time" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/slasher/receive_test.go b/beacon-chain/slasher/receive_test.go index b059cf31ed..4d39f46ea1 100644 --- a/beacon-chain/slasher/receive_test.go +++ b/beacon-chain/slasher/receive_test.go @@ -4,19 +4,19 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/async/event" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - params2 "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/async/event" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + params2 "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/slasher/service.go b/beacon-chain/slasher/service.go index d660bdca0d..870fcb5c37 100644 --- a/beacon-chain/slasher/service.go +++ b/beacon-chain/slasher/service.go @@ -9,19 +9,19 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - beaconChainSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + beaconChainSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" ) const ( diff --git a/beacon-chain/slasher/service_test.go b/beacon-chain/slasher/service_test.go index 46a5ea0a17..6f9225964a 100644 --- a/beacon-chain/slasher/service_test.go +++ b/beacon-chain/slasher/service_test.go @@ -6,15 +6,15 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/event" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/sirupsen/logrus" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/slasher/types/BUILD.bazel b/beacon-chain/slasher/types/BUILD.bazel index 95f4af3480..64662821e4 100644 --- a/beacon-chain/slasher/types/BUILD.bazel +++ b/beacon-chain/slasher/types/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["types.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types", visibility = [ "//beacon-chain:__subpackages__", "//cmd/prysmctl:__subpackages__", diff --git a/beacon-chain/slasher/types/types.go b/beacon-chain/slasher/types/types.go index 6921bc7e9e..4b5696cad7 100644 --- a/beacon-chain/slasher/types/types.go +++ b/beacon-chain/slasher/types/types.go @@ -1,8 +1,8 @@ package types import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // ChunkKind to differentiate what kind of span we are working diff --git a/beacon-chain/startup/BUILD.bazel b/beacon-chain/startup/BUILD.bazel index 0134e304d4..0ed66d12c4 100644 --- a/beacon-chain/startup/BUILD.bazel +++ b/beacon-chain/startup/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "synchronizer.go", "testing.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/startup", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/startup", visibility = ["//visibility:public"], deps = [ "//consensus-types/primitives:go_default_library", diff --git a/beacon-chain/startup/clock.go b/beacon-chain/startup/clock.go index fb4f19be8b..910869e521 100644 --- a/beacon-chain/startup/clock.go +++ b/beacon-chain/startup/clock.go @@ -3,8 +3,8 @@ package startup import ( "time" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // Nower is a function that can return the current time. diff --git a/beacon-chain/startup/clock_test.go b/beacon-chain/startup/clock_test.go index 16cb65dfa6..346e8e2863 100644 --- a/beacon-chain/startup/clock_test.go +++ b/beacon-chain/startup/clock_test.go @@ -4,9 +4,9 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestClock(t *testing.T) { diff --git a/beacon-chain/startup/synchronizer_test.go b/beacon-chain/startup/synchronizer_test.go index 630510955e..544fc01c1b 100644 --- a/beacon-chain/startup/synchronizer_test.go +++ b/beacon-chain/startup/synchronizer_test.go @@ -5,8 +5,8 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSynchronizerErrOnSecondSet(t *testing.T) { diff --git a/beacon-chain/startup/testing.go b/beacon-chain/startup/testing.go index 4b4e6b51e3..175fa0a3b6 100644 --- a/beacon-chain/startup/testing.go +++ b/beacon-chain/startup/testing.go @@ -4,8 +4,8 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // MockNower is a mock implementation of the Nower interface for use in tests. diff --git a/beacon-chain/state/BUILD.bazel b/beacon-chain/state/BUILD.bazel index 3452fe1f32..af0a4bc5ae 100644 --- a/beacon-chain/state/BUILD.bazel +++ b/beacon-chain/state/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "interfaces.go", "prometheus.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/state", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/state", visibility = ["//visibility:public"], deps = [ "//beacon-chain/state/state-native/custom-types:go_default_library", diff --git a/beacon-chain/state/fieldtrie/BUILD.bazel b/beacon-chain/state/fieldtrie/BUILD.bazel index 2d5fa9e8ca..6ddba6999b 100644 --- a/beacon-chain/state/fieldtrie/BUILD.bazel +++ b/beacon-chain/state/fieldtrie/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "field_trie.go", "field_trie_helpers.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/state/fieldtrie", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/state/fieldtrie", visibility = ["//visibility:public"], deps = [ "//beacon-chain/state/state-native/custom-types:go_default_library", diff --git a/beacon-chain/state/fieldtrie/field_trie.go b/beacon-chain/state/fieldtrie/field_trie.go index eb32491867..abcb25b5cc 100644 --- a/beacon-chain/state/fieldtrie/field_trie.go +++ b/beacon-chain/state/fieldtrie/field_trie.go @@ -4,10 +4,10 @@ import ( "reflect" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - multi_value_slice "github.com/OffchainLabs/prysm/v6/container/multi-value-slice" - pmath "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + multi_value_slice "github.com/OffchainLabs/prysm/v7/container/multi-value-slice" + pmath "github.com/OffchainLabs/prysm/v7/math" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/fieldtrie/field_trie_helpers.go b/beacon-chain/state/fieldtrie/field_trie_helpers.go index 4af382176c..07667eb0eb 100644 --- a/beacon-chain/state/fieldtrie/field_trie_helpers.go +++ b/beacon-chain/state/fieldtrie/field_trie_helpers.go @@ -5,12 +5,12 @@ import ( "fmt" "reflect" - customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - multi_value_slice "github.com/OffchainLabs/prysm/v6/container/multi-value-slice" - pmath "github.com/OffchainLabs/prysm/v6/math" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + customtypes "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/custom-types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + multi_value_slice "github.com/OffchainLabs/prysm/v7/container/multi-value-slice" + pmath "github.com/OffchainLabs/prysm/v7/math" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/fieldtrie/field_trie_test.go b/beacon-chain/state/fieldtrie/field_trie_test.go index 34458c76b6..b6beef89cc 100644 --- a/beacon-chain/state/fieldtrie/field_trie_test.go +++ b/beacon-chain/state/fieldtrie/field_trie_test.go @@ -3,17 +3,17 @@ package fieldtrie_test import ( "testing" - . "github.com/OffchainLabs/prysm/v6/beacon-chain/state/fieldtrie" - customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - mvslice "github.com/OffchainLabs/prysm/v6/container/multi-value-slice" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + . "github.com/OffchainLabs/prysm/v7/beacon-chain/state/fieldtrie" + customtypes "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/custom-types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + mvslice "github.com/OffchainLabs/prysm/v7/container/multi-value-slice" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestFieldTrie_NewTrie(t *testing.T) { diff --git a/beacon-chain/state/fieldtrie/helpers_test.go b/beacon-chain/state/fieldtrie/helpers_test.go index 0de4cc339b..3ba766e9dd 100644 --- a/beacon-chain/state/fieldtrie/helpers_test.go +++ b/beacon-chain/state/fieldtrie/helpers_test.go @@ -6,15 +6,15 @@ import ( "sync" "testing" - customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - mvslice "github.com/OffchainLabs/prysm/v6/container/multi-value-slice" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + customtypes "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/custom-types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + mvslice "github.com/OffchainLabs/prysm/v7/container/multi-value-slice" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/state/interfaces.go b/beacon-chain/state/interfaces.go index 38b1421e95..51511fe9f5 100644 --- a/beacon-chain/state/interfaces.go +++ b/beacon-chain/state/interfaces.go @@ -9,13 +9,13 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + customtypes "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/custom-types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // BeaconState has read and write access to beacon state methods. diff --git a/beacon-chain/state/state-native/BUILD.bazel b/beacon-chain/state/state-native/BUILD.bazel index 6d7b6d19c9..08c1075712 100644 --- a/beacon-chain/state/state-native/BUILD.bazel +++ b/beacon-chain/state/state-native/BUILD.bazel @@ -49,7 +49,7 @@ go_library( "state_trie.go", "types.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/beacon-chain/state/state-native/beacon_state.go b/beacon-chain/state/state-native/beacon_state.go index e44579978d..07b950871c 100644 --- a/beacon-chain/state/state-native/beacon_state.go +++ b/beacon-chain/state/state-native/beacon_state.go @@ -5,13 +5,13 @@ import ( "sync" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/fieldtrie" - customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/fieldtrie" + customtypes "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/custom-types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // BeaconState defines a struct containing utilities for the Ethereum Beacon Chain state, defining diff --git a/beacon-chain/state/state-native/custom-types/BUILD.bazel b/beacon-chain/state/state-native/custom-types/BUILD.bazel index 6a5cc31e26..4f2423b296 100644 --- a/beacon-chain/state/state-native/custom-types/BUILD.bazel +++ b/beacon-chain/state/state-native/custom-types/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "randao_mixes.go", "state_roots.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/custom-types", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/beacon-chain/state/state-native/custom-types/block_roots.go b/beacon-chain/state/state-native/custom-types/block_roots.go index d67cc7a178..69c9f8c7bf 100644 --- a/beacon-chain/state/state-native/custom-types/block_roots.go +++ b/beacon-chain/state/state-native/custom-types/block_roots.go @@ -3,7 +3,7 @@ package customtypes import ( "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/beacon-chain/state/state-native/custom-types/block_roots_test.go b/beacon-chain/state/state-native/custom-types/block_roots_test.go index a67bf4a52a..6c917effc5 100644 --- a/beacon-chain/state/state-native/custom-types/block_roots_test.go +++ b/beacon-chain/state/state-native/custom-types/block_roots_test.go @@ -4,8 +4,8 @@ import ( "reflect" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/testing/assert" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestBlockRoots_UnmarshalSSZ(t *testing.T) { diff --git a/beacon-chain/state/state-native/custom-types/historical_roots_test.go b/beacon-chain/state/state-native/custom-types/historical_roots_test.go index a80211c69e..97b8f19c61 100644 --- a/beacon-chain/state/state-native/custom-types/historical_roots_test.go +++ b/beacon-chain/state/state-native/custom-types/historical_roots_test.go @@ -4,7 +4,7 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestHistoricalRoots_Casting(t *testing.T) { diff --git a/beacon-chain/state/state-native/custom-types/randao_mixes.go b/beacon-chain/state/state-native/custom-types/randao_mixes.go index 56d9893bad..7fab54759c 100644 --- a/beacon-chain/state/state-native/custom-types/randao_mixes.go +++ b/beacon-chain/state/state-native/custom-types/randao_mixes.go @@ -3,7 +3,7 @@ package customtypes import ( "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/beacon-chain/state/state-native/custom-types/randao_mixes_test.go b/beacon-chain/state/state-native/custom-types/randao_mixes_test.go index 593a24e4da..1e1832cd8b 100644 --- a/beacon-chain/state/state-native/custom-types/randao_mixes_test.go +++ b/beacon-chain/state/state-native/custom-types/randao_mixes_test.go @@ -4,8 +4,8 @@ import ( "reflect" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/testing/assert" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestRandaoMixes_UnmarshalSSZ(t *testing.T) { diff --git a/beacon-chain/state/state-native/custom-types/state_roots.go b/beacon-chain/state/state-native/custom-types/state_roots.go index aafc2391ac..37a367483c 100644 --- a/beacon-chain/state/state-native/custom-types/state_roots.go +++ b/beacon-chain/state/state-native/custom-types/state_roots.go @@ -3,7 +3,7 @@ package customtypes import ( "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/beacon-chain/state/state-native/custom-types/state_roots_test.go b/beacon-chain/state/state-native/custom-types/state_roots_test.go index 099dd204fd..1c157a8c1a 100644 --- a/beacon-chain/state/state-native/custom-types/state_roots_test.go +++ b/beacon-chain/state/state-native/custom-types/state_roots_test.go @@ -4,8 +4,8 @@ import ( "reflect" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/testing/assert" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestStateRoots_UnmarshalSSZ(t *testing.T) { diff --git a/beacon-chain/state/state-native/getters_attestation.go b/beacon-chain/state/state-native/getters_attestation.go index 3a156041cb..ee3a24e2ca 100644 --- a/beacon-chain/state/state-native/getters_attestation.go +++ b/beacon-chain/state/state-native/getters_attestation.go @@ -1,8 +1,8 @@ package state_native import ( - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // PreviousEpochAttestations corresponding to blocks on the beacon chain. diff --git a/beacon-chain/state/state-native/getters_attestation_test.go b/beacon-chain/state/state-native/getters_attestation_test.go index 6c3bca31ac..1a5f8b8876 100644 --- a/beacon-chain/state/state-native/getters_attestation_test.go +++ b/beacon-chain/state/state-native/getters_attestation_test.go @@ -3,8 +3,8 @@ package state_native import ( "testing" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestBeaconState_PreviousEpochAttestations(t *testing.T) { diff --git a/beacon-chain/state/state-native/getters_block.go b/beacon-chain/state/state-native/getters_block.go index aab46231d0..cacac351c5 100644 --- a/beacon-chain/state/state-native/getters_block.go +++ b/beacon-chain/state/state-native/getters_block.go @@ -1,8 +1,8 @@ package state_native import ( - customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + customtypes "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/custom-types" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // LatestBlockHeader stored within the beacon state. diff --git a/beacon-chain/state/state-native/getters_block_test.go b/beacon-chain/state/state-native/getters_block_test.go index 6889a0a045..b64d2a51e9 100644 --- a/beacon-chain/state/state-native/getters_block_test.go +++ b/beacon-chain/state/state-native/getters_block_test.go @@ -3,9 +3,9 @@ package state_native import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - testtmpl "github.com/OffchainLabs/prysm/v6/beacon-chain/state/testing" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + testtmpl "github.com/OffchainLabs/prysm/v7/beacon-chain/state/testing" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func TestBeaconState_LatestBlockHeader_Phase0(t *testing.T) { diff --git a/beacon-chain/state/state-native/getters_checkpoint.go b/beacon-chain/state/state-native/getters_checkpoint.go index 76b760e170..45024d6a77 100644 --- a/beacon-chain/state/state-native/getters_checkpoint.go +++ b/beacon-chain/state/state-native/getters_checkpoint.go @@ -4,8 +4,8 @@ import ( "bytes" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // JustificationBits marking which epochs have been justified in the beacon chain. diff --git a/beacon-chain/state/state-native/getters_checkpoint_test.go b/beacon-chain/state/state-native/getters_checkpoint_test.go index 852d48cf71..03db7ec6f2 100644 --- a/beacon-chain/state/state-native/getters_checkpoint_test.go +++ b/beacon-chain/state/state-native/getters_checkpoint_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - testtmpl "github.com/OffchainLabs/prysm/v6/beacon-chain/state/testing" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + testtmpl "github.com/OffchainLabs/prysm/v7/beacon-chain/state/testing" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func TestBeaconState_PreviousJustifiedCheckpointNil_Phase0(t *testing.T) { diff --git a/beacon-chain/state/state-native/getters_consolidation.go b/beacon-chain/state/state-native/getters_consolidation.go index 3546ba8c6c..6d8e0e86eb 100644 --- a/beacon-chain/state/state-native/getters_consolidation.go +++ b/beacon-chain/state/state-native/getters_consolidation.go @@ -1,9 +1,9 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // EarliestConsolidationEpoch is a non-mutating call to the beacon state which returns the value of diff --git a/beacon-chain/state/state-native/getters_consolidation_test.go b/beacon-chain/state/state-native/getters_consolidation_test.go index e31a5a21eb..e31c115c29 100644 --- a/beacon-chain/state/state-native/getters_consolidation_test.go +++ b/beacon-chain/state/state-native/getters_consolidation_test.go @@ -3,10 +3,10 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestEarliestConsolidationEpoch(t *testing.T) { diff --git a/beacon-chain/state/state-native/getters_deposit_requests.go b/beacon-chain/state/state-native/getters_deposit_requests.go index 2d859bff2d..ec234e953c 100644 --- a/beacon-chain/state/state-native/getters_deposit_requests.go +++ b/beacon-chain/state/state-native/getters_deposit_requests.go @@ -1,7 +1,7 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // DepositRequestsStartIndex is used for returning the deposit requests start index which is used for eip6110 diff --git a/beacon-chain/state/state-native/getters_deposit_requests_test.go b/beacon-chain/state/state-native/getters_deposit_requests_test.go index 45905d11f9..d8b83fafcf 100644 --- a/beacon-chain/state/state-native/getters_deposit_requests_test.go +++ b/beacon-chain/state/state-native/getters_deposit_requests_test.go @@ -3,10 +3,10 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestDepositRequestsStartIndex(t *testing.T) { diff --git a/beacon-chain/state/state-native/getters_deposits.go b/beacon-chain/state/state-native/getters_deposits.go index 3184462245..cca78de8e7 100644 --- a/beacon-chain/state/state-native/getters_deposits.go +++ b/beacon-chain/state/state-native/getters_deposits.go @@ -1,9 +1,9 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // DepositBalanceToConsume is a non-mutating call to the beacon state which returns the value of the diff --git a/beacon-chain/state/state-native/getters_deposits_test.go b/beacon-chain/state/state-native/getters_deposits_test.go index 692af81998..b7047aae98 100644 --- a/beacon-chain/state/state-native/getters_deposits_test.go +++ b/beacon-chain/state/state-native/getters_deposits_test.go @@ -3,10 +3,10 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestDepositBalanceToConsume(t *testing.T) { diff --git a/beacon-chain/state/state-native/getters_eth1.go b/beacon-chain/state/state-native/getters_eth1.go index 9536be0543..6abf95bc28 100644 --- a/beacon-chain/state/state-native/getters_eth1.go +++ b/beacon-chain/state/state-native/getters_eth1.go @@ -1,7 +1,7 @@ package state_native import ( - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // Eth1Data corresponding to the proof-of-work chain information stored in the beacon state. diff --git a/beacon-chain/state/state-native/getters_exit.go b/beacon-chain/state/state-native/getters_exit.go index 3aa16e3840..4ec7f9f95e 100644 --- a/beacon-chain/state/state-native/getters_exit.go +++ b/beacon-chain/state/state-native/getters_exit.go @@ -1,8 +1,8 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // ExitBalanceToConsume is used for returning the ExitBalanceToConsume as part of eip 7251 diff --git a/beacon-chain/state/state-native/getters_exit_test.go b/beacon-chain/state/state-native/getters_exit_test.go index ce0f808b5b..87c9f5fa12 100644 --- a/beacon-chain/state/state-native/getters_exit_test.go +++ b/beacon-chain/state/state-native/getters_exit_test.go @@ -3,11 +3,11 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestExitBalanceToConsume(t *testing.T) { diff --git a/beacon-chain/state/state-native/getters_misc.go b/beacon-chain/state/state-native/getters_misc.go index 1f0e31abde..3db34886ab 100644 --- a/beacon-chain/state/state-native/getters_misc.go +++ b/beacon-chain/state/state-native/getters_misc.go @@ -3,9 +3,9 @@ package state_native import ( "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // Id is the identifier of the beacon state. diff --git a/beacon-chain/state/state-native/getters_participation.go b/beacon-chain/state/state-native/getters_participation.go index 16f40641b5..237039602f 100644 --- a/beacon-chain/state/state-native/getters_participation.go +++ b/beacon-chain/state/state-native/getters_participation.go @@ -1,10 +1,10 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + customtypes "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/custom-types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // CurrentEpochParticipation corresponding to participation bits on the beacon chain. diff --git a/beacon-chain/state/state-native/getters_participation_test.go b/beacon-chain/state/state-native/getters_participation_test.go index fc2ba635a3..7e18caad65 100644 --- a/beacon-chain/state/state-native/getters_participation_test.go +++ b/beacon-chain/state/state-native/getters_participation_test.go @@ -3,9 +3,9 @@ package state_native import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestState_UnrealizedCheckpointBalances(t *testing.T) { diff --git a/beacon-chain/state/state-native/getters_payload_header.go b/beacon-chain/state/state-native/getters_payload_header.go index 29e54b4e3a..e3add2d198 100644 --- a/beacon-chain/state/state-native/getters_payload_header.go +++ b/beacon-chain/state/state-native/getters_payload_header.go @@ -3,9 +3,9 @@ package state_native import ( "fmt" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // LatestExecutionPayloadHeader of the beacon state. diff --git a/beacon-chain/state/state-native/getters_proposer_lookahead.go b/beacon-chain/state/state-native/getters_proposer_lookahead.go index 1f453f8024..cad8276eef 100644 --- a/beacon-chain/state/state-native/getters_proposer_lookahead.go +++ b/beacon-chain/state/state-native/getters_proposer_lookahead.go @@ -3,8 +3,8 @@ package state_native import ( "slices" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // ProposerLookahead is a non-mutating call to the beacon state which returns a slice of diff --git a/beacon-chain/state/state-native/getters_randao.go b/beacon-chain/state/state-native/getters_randao.go index 29cee7c693..d0f24f2626 100644 --- a/beacon-chain/state/state-native/getters_randao.go +++ b/beacon-chain/state/state-native/getters_randao.go @@ -1,7 +1,7 @@ package state_native import ( - customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" + customtypes "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/custom-types" ) // RandaoMixes of block proposers on the beacon chain. diff --git a/beacon-chain/state/state-native/getters_setters_lookahead_test.go b/beacon-chain/state/state-native/getters_setters_lookahead_test.go index f07dcbf168..ed745e377f 100644 --- a/beacon-chain/state/state-native/getters_setters_lookahead_test.go +++ b/beacon-chain/state/state-native/getters_setters_lookahead_test.go @@ -3,11 +3,11 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestProposerLookahead(t *testing.T) { diff --git a/beacon-chain/state/state-native/getters_state.go b/beacon-chain/state/state-native/getters_state.go index a347bc9992..bb00922978 100644 --- a/beacon-chain/state/state-native/getters_state.go +++ b/beacon-chain/state/state-native/getters_state.go @@ -1,9 +1,9 @@ package state_native import ( - customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + customtypes "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/custom-types" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/state-native/getters_sync_committee.go b/beacon-chain/state/state-native/getters_sync_committee.go index 5332a15ff5..6417d7686b 100644 --- a/beacon-chain/state/state-native/getters_sync_committee.go +++ b/beacon-chain/state/state-native/getters_sync_committee.go @@ -1,9 +1,9 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // CurrentSyncCommittee of the current sync committee in beacon chain state. diff --git a/beacon-chain/state/state-native/getters_test.go b/beacon-chain/state/state-native/getters_test.go index c6aad62de9..f7ca0fec96 100644 --- a/beacon-chain/state/state-native/getters_test.go +++ b/beacon-chain/state/state-native/getters_test.go @@ -3,9 +3,9 @@ package state_native import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - testtmpl "github.com/OffchainLabs/prysm/v6/beacon-chain/state/testing" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + testtmpl "github.com/OffchainLabs/prysm/v7/beacon-chain/state/testing" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func TestBeaconState_SlotDataRace_Phase0(t *testing.T) { diff --git a/beacon-chain/state/state-native/getters_validator.go b/beacon-chain/state/state-native/getters_validator.go index 3e75b92d78..037418e167 100644 --- a/beacon-chain/state/state-native/getters_validator.go +++ b/beacon-chain/state/state-native/getters_validator.go @@ -1,13 +1,13 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // Validators participating in consensus on the beacon chain. diff --git a/beacon-chain/state/state-native/getters_validator_test.go b/beacon-chain/state/state-native/getters_validator_test.go index 26a1e44e68..3179d14537 100644 --- a/beacon-chain/state/state-native/getters_validator_test.go +++ b/beacon-chain/state/state-native/getters_validator_test.go @@ -3,14 +3,14 @@ package state_native_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - testtmpl "github.com/OffchainLabs/prysm/v6/beacon-chain/state/testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + testtmpl "github.com/OffchainLabs/prysm/v7/beacon-chain/state/testing" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/state/state-native/getters_withdrawal.go b/beacon-chain/state/state-native/getters_withdrawal.go index 2713738037..3b6ea2a3d0 100644 --- a/beacon-chain/state/state-native/getters_withdrawal.go +++ b/beacon-chain/state/state-native/getters_withdrawal.go @@ -3,15 +3,15 @@ package state_native import ( "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - mathutil "github.com/OffchainLabs/prysm/v6/math" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + mathutil "github.com/OffchainLabs/prysm/v7/math" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/state-native/getters_withdrawal_test.go b/beacon-chain/state/state-native/getters_withdrawal_test.go index d9d3d7513e..4154504dc2 100644 --- a/beacon-chain/state/state-native/getters_withdrawal_test.go +++ b/beacon-chain/state/state-native/getters_withdrawal_test.go @@ -3,15 +3,15 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/beacon-chain/state/state-native/hasher.go b/beacon-chain/state/state-native/hasher.go index 7f349a048a..c0c19f9f0e 100644 --- a/beacon-chain/state/state-native/hasher.go +++ b/beacon-chain/state/state-native/hasher.go @@ -5,14 +5,14 @@ import ( "encoding/binary" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/state-native/hasher_test.go b/beacon-chain/state/state-native/hasher_test.go index 2b7407d8e9..342e22b6e6 100644 --- a/beacon-chain/state/state-native/hasher_test.go +++ b/beacon-chain/state/state-native/hasher_test.go @@ -5,15 +5,15 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestComputeFieldRootsWithHasher_Phase0(t *testing.T) { diff --git a/beacon-chain/state/state-native/multi_value_slices.go b/beacon-chain/state/state-native/multi_value_slices.go index cf14432718..03e5353dc3 100644 --- a/beacon-chain/state/state-native/multi_value_slices.go +++ b/beacon-chain/state/state-native/multi_value_slices.go @@ -3,11 +3,11 @@ package state_native import ( "runtime" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - multi_value_slice "github.com/OffchainLabs/prysm/v6/container/multi-value-slice" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + multi_value_slice "github.com/OffchainLabs/prysm/v7/container/multi-value-slice" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" ) diff --git a/beacon-chain/state/state-native/mvslice_fuzz_test.go b/beacon-chain/state/state-native/mvslice_fuzz_test.go index d0a466b7ca..19e4530465 100644 --- a/beacon-chain/state/state-native/mvslice_fuzz_test.go +++ b/beacon-chain/state/state-native/mvslice_fuzz_test.go @@ -3,9 +3,9 @@ package state_native import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func FuzzMultiValueBalances(f *testing.F) { diff --git a/beacon-chain/state/state-native/proofs.go b/beacon-chain/state/state-native/proofs.go index 5ebd4d3efe..df83517485 100644 --- a/beacon-chain/state/state-native/proofs.go +++ b/beacon-chain/state/state-native/proofs.go @@ -4,10 +4,10 @@ import ( "context" "encoding/binary" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) const ( diff --git a/beacon-chain/state/state-native/proofs_test.go b/beacon-chain/state/state-native/proofs_test.go index b35627ebb8..66fb595024 100644 --- a/beacon-chain/state/state-native/proofs_test.go +++ b/beacon-chain/state/state-native/proofs_test.go @@ -3,10 +3,10 @@ package state_native_test import ( "testing" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/state/state-native/readonly_validator.go b/beacon-chain/state/state-native/readonly_validator.go index 9ad81c1d03..0b13bd5aa9 100644 --- a/beacon-chain/state/state-native/readonly_validator.go +++ b/beacon-chain/state/state-native/readonly_validator.go @@ -1,11 +1,11 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/state-native/readonly_validator_test.go b/beacon-chain/state/state-native/readonly_validator_test.go index ce6a733745..ad626d8ac9 100644 --- a/beacon-chain/state/state-native/readonly_validator_test.go +++ b/beacon-chain/state/state-native/readonly_validator_test.go @@ -3,12 +3,12 @@ package state_native_test import ( "testing" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestReadOnlyValidator_ReturnsErrorOnNil(t *testing.T) { diff --git a/beacon-chain/state/state-native/references_test.go b/beacon-chain/state/state-native/references_test.go index 04d44c74c9..6f1221e556 100644 --- a/beacon-chain/state/state-native/references_test.go +++ b/beacon-chain/state/state-native/references_test.go @@ -7,12 +7,12 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStateReferenceSharing_Finalizer_Phase0(t *testing.T) { diff --git a/beacon-chain/state/state-native/setters_attestation.go b/beacon-chain/state/state-native/setters_attestation.go index 74ba2f9040..f93ee2897c 100644 --- a/beacon-chain/state/state-native/setters_attestation.go +++ b/beacon-chain/state/state-native/setters_attestation.go @@ -3,11 +3,11 @@ package state_native import ( "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // RotateAttestations sets the previous epoch attestations to the current epoch attestations and diff --git a/beacon-chain/state/state-native/setters_attestation_test.go b/beacon-chain/state/state-native/setters_attestation_test.go index 12144219d1..ee16f0c622 100644 --- a/beacon-chain/state/state-native/setters_attestation_test.go +++ b/beacon-chain/state/state-native/setters_attestation_test.go @@ -3,12 +3,12 @@ package state_native import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestBeaconState_RotateAttestations(t *testing.T) { diff --git a/beacon-chain/state/state-native/setters_block.go b/beacon-chain/state/state-native/setters_block.go index 36569fc0c4..67a1e64940 100644 --- a/beacon-chain/state/state-native/setters_block.go +++ b/beacon-chain/state/state-native/setters_block.go @@ -1,8 +1,8 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/state-native/setters_checkpoint.go b/beacon-chain/state/state-native/setters_checkpoint.go index f405f3a66d..8cd4f08c14 100644 --- a/beacon-chain/state/state-native/setters_checkpoint.go +++ b/beacon-chain/state/state-native/setters_checkpoint.go @@ -2,8 +2,8 @@ package state_native import ( "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // SetJustificationBits for the beacon state. diff --git a/beacon-chain/state/state-native/setters_churn.go b/beacon-chain/state/state-native/setters_churn.go index b2073b8f80..dc81d21e90 100644 --- a/beacon-chain/state/state-native/setters_churn.go +++ b/beacon-chain/state/state-native/setters_churn.go @@ -1,11 +1,11 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" ) // ExitEpochAndUpdateChurn computes the exit epoch and updates the churn. This method mutates the state. diff --git a/beacon-chain/state/state-native/setters_churn_test.go b/beacon-chain/state/state-native/setters_churn_test.go index f614114971..100b4478dc 100644 --- a/beacon-chain/state/state-native/setters_churn_test.go +++ b/beacon-chain/state/state-native/setters_churn_test.go @@ -3,14 +3,14 @@ package state_native_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/golang/snappy" ) diff --git a/beacon-chain/state/state-native/setters_consolidation.go b/beacon-chain/state/state-native/setters_consolidation.go index 4b04579096..198a490232 100644 --- a/beacon-chain/state/state-native/setters_consolidation.go +++ b/beacon-chain/state/state-native/setters_consolidation.go @@ -3,11 +3,11 @@ package state_native import ( "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // AppendPendingConsolidation is a mutating call to the beacon state which appends the provided diff --git a/beacon-chain/state/state-native/setters_consolidation_test.go b/beacon-chain/state/state-native/setters_consolidation_test.go index 8720a6ef7f..5492cd1912 100644 --- a/beacon-chain/state/state-native/setters_consolidation_test.go +++ b/beacon-chain/state/state-native/setters_consolidation_test.go @@ -3,10 +3,10 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestAppendPendingConsolidation(t *testing.T) { diff --git a/beacon-chain/state/state-native/setters_deposit_requests.go b/beacon-chain/state/state-native/setters_deposit_requests.go index ab821b6e8e..ec79950666 100644 --- a/beacon-chain/state/state-native/setters_deposit_requests.go +++ b/beacon-chain/state/state-native/setters_deposit_requests.go @@ -1,8 +1,8 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // SetDepositRequestsStartIndex for the beacon state. Updates the DepositRequestsStartIndex diff --git a/beacon-chain/state/state-native/setters_deposit_requests_test.go b/beacon-chain/state/state-native/setters_deposit_requests_test.go index 800e7e311e..8302249656 100644 --- a/beacon-chain/state/state-native/setters_deposit_requests_test.go +++ b/beacon-chain/state/state-native/setters_deposit_requests_test.go @@ -3,10 +3,10 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestSetDepositRequestsStartIndex(t *testing.T) { diff --git a/beacon-chain/state/state-native/setters_deposits.go b/beacon-chain/state/state-native/setters_deposits.go index 9fdd670e7b..0e366179d4 100644 --- a/beacon-chain/state/state-native/setters_deposits.go +++ b/beacon-chain/state/state-native/setters_deposits.go @@ -3,11 +3,11 @@ package state_native import ( "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // AppendPendingDeposit is a mutating call to the beacon state to create and append a pending diff --git a/beacon-chain/state/state-native/setters_deposits_test.go b/beacon-chain/state/state-native/setters_deposits_test.go index f3d908337f..03be5d90d3 100644 --- a/beacon-chain/state/state-native/setters_deposits_test.go +++ b/beacon-chain/state/state-native/setters_deposits_test.go @@ -3,10 +3,10 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestAppendPendingDeposit(t *testing.T) { diff --git a/beacon-chain/state/state-native/setters_eth1.go b/beacon-chain/state/state-native/setters_eth1.go index 774abe27b3..8e3b416147 100644 --- a/beacon-chain/state/state-native/setters_eth1.go +++ b/beacon-chain/state/state-native/setters_eth1.go @@ -1,9 +1,9 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // SetEth1Data for the beacon state. diff --git a/beacon-chain/state/state-native/setters_eth1_test.go b/beacon-chain/state/state-native/setters_eth1_test.go index 059973193e..5908abc159 100644 --- a/beacon-chain/state/state-native/setters_eth1_test.go +++ b/beacon-chain/state/state-native/setters_eth1_test.go @@ -3,10 +3,10 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func BenchmarkAppendEth1DataVotes(b *testing.B) { diff --git a/beacon-chain/state/state-native/setters_misc.go b/beacon-chain/state/state-native/setters_misc.go index a231699a63..4a45081878 100644 --- a/beacon-chain/state/state-native/setters_misc.go +++ b/beacon-chain/state/state-native/setters_misc.go @@ -3,15 +3,15 @@ package state_native import ( "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/state/state-native/setters_misc_test.go b/beacon-chain/state/state-native/setters_misc_test.go index f6d05e8fa1..594c4a295b 100644 --- a/beacon-chain/state/state-native/setters_misc_test.go +++ b/beacon-chain/state/state-native/setters_misc_test.go @@ -3,11 +3,11 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func BenchmarkAppendHistoricalRoots(b *testing.B) { diff --git a/beacon-chain/state/state-native/setters_participation.go b/beacon-chain/state/state-native/setters_participation.go index 4d157197a5..c12fc4fde1 100644 --- a/beacon-chain/state/state-native/setters_participation.go +++ b/beacon-chain/state/state-native/setters_participation.go @@ -1,10 +1,10 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // SetPreviousParticipationBits for the beacon state. Updates the entire diff --git a/beacon-chain/state/state-native/setters_participation_test.go b/beacon-chain/state/state-native/setters_participation_test.go index 47b051bc43..3d51816cab 100644 --- a/beacon-chain/state/state-native/setters_participation_test.go +++ b/beacon-chain/state/state-native/setters_participation_test.go @@ -3,9 +3,9 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func BenchmarkParticipationBits(b *testing.B) { diff --git a/beacon-chain/state/state-native/setters_payload_header.go b/beacon-chain/state/state-native/setters_payload_header.go index b8c0a0cebb..7f13eef560 100644 --- a/beacon-chain/state/state-native/setters_payload_header.go +++ b/beacon-chain/state/state-native/setters_payload_header.go @@ -3,12 +3,12 @@ package state_native import ( "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - _ "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + _ "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/state-native/setters_payload_header_test.go b/beacon-chain/state/state-native/setters_payload_header_test.go index 9c60b5f781..51d552e42b 100644 --- a/beacon-chain/state/state-native/setters_payload_header_test.go +++ b/beacon-chain/state/state-native/setters_payload_header_test.go @@ -4,12 +4,12 @@ import ( "fmt" "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestSetLatestExecutionPayloadHeader(t *testing.T) { diff --git a/beacon-chain/state/state-native/setters_proposer_lookahead.go b/beacon-chain/state/state-native/setters_proposer_lookahead.go index 1ed0a2b5c7..38809aca8a 100644 --- a/beacon-chain/state/state-native/setters_proposer_lookahead.go +++ b/beacon-chain/state/state-native/setters_proposer_lookahead.go @@ -3,11 +3,11 @@ package state_native import ( "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // SetProposerLookahead is a mutating call to the beacon state which sets the proposer lookahead diff --git a/beacon-chain/state/state-native/setters_randao.go b/beacon-chain/state/state-native/setters_randao.go index 12b4d188ce..93a1b56260 100644 --- a/beacon-chain/state/state-native/setters_randao.go +++ b/beacon-chain/state/state-native/setters_randao.go @@ -1,7 +1,7 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/state-native/setters_state.go b/beacon-chain/state/state-native/setters_state.go index 28d69417c4..8582f989b1 100644 --- a/beacon-chain/state/state-native/setters_state.go +++ b/beacon-chain/state/state-native/setters_state.go @@ -1,7 +1,7 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/state-native/setters_sync_committee.go b/beacon-chain/state/state-native/setters_sync_committee.go index 62231f6af2..aec44a4868 100644 --- a/beacon-chain/state/state-native/setters_sync_committee.go +++ b/beacon-chain/state/state-native/setters_sync_committee.go @@ -1,9 +1,9 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // SetCurrentSyncCommittee for the beacon state. diff --git a/beacon-chain/state/state-native/setters_validator.go b/beacon-chain/state/state-native/setters_validator.go index 3e98b4bcce..70d10e23e2 100644 --- a/beacon-chain/state/state-native/setters_validator.go +++ b/beacon-chain/state/state-native/setters_validator.go @@ -1,13 +1,13 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/state-native/setters_validator_test.go b/beacon-chain/state/state-native/setters_validator_test.go index 03a3d45d33..83b19766cd 100644 --- a/beacon-chain/state/state-native/setters_validator_test.go +++ b/beacon-chain/state/state-native/setters_validator_test.go @@ -3,9 +3,9 @@ package state_native_test import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func BenchmarkAppendBalance(b *testing.B) { diff --git a/beacon-chain/state/state-native/setters_withdrawal.go b/beacon-chain/state/state-native/setters_withdrawal.go index e0fc5a42e1..1223add5e9 100644 --- a/beacon-chain/state/state-native/setters_withdrawal.go +++ b/beacon-chain/state/state-native/setters_withdrawal.go @@ -3,11 +3,11 @@ package state_native import ( "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // SetNextWithdrawalIndex sets the index that will be assigned to the next withdrawal. diff --git a/beacon-chain/state/state-native/setters_withdrawal_test.go b/beacon-chain/state/state-native/setters_withdrawal_test.go index 44e568d355..c490576205 100644 --- a/beacon-chain/state/state-native/setters_withdrawal_test.go +++ b/beacon-chain/state/state-native/setters_withdrawal_test.go @@ -3,11 +3,11 @@ package state_native import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSetNextWithdrawalIndex(t *testing.T) { diff --git a/beacon-chain/state/state-native/spec_parameters.go b/beacon-chain/state/state-native/spec_parameters.go index c265b2bdc4..26358c25c7 100644 --- a/beacon-chain/state/state-native/spec_parameters.go +++ b/beacon-chain/state/state-native/spec_parameters.go @@ -1,8 +1,8 @@ package state_native import ( - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) func (b *BeaconState) ProportionalSlashingMultiplier() (uint64, error) { diff --git a/beacon-chain/state/state-native/state_fuzz_test.go b/beacon-chain/state/state-native/state_fuzz_test.go index 00bebc68cb..bc2ff5d480 100644 --- a/beacon-chain/state/state-native/state_fuzz_test.go +++ b/beacon-chain/state/state-native/state_fuzz_test.go @@ -3,14 +3,14 @@ package state_native_test import ( "testing" - coreState "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/util" + coreState "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func FuzzPhase0StateHashTreeRoot(f *testing.F) { diff --git a/beacon-chain/state/state-native/state_test.go b/beacon-chain/state/state-native/state_test.go index 980db230b1..56f1f4a632 100644 --- a/beacon-chain/state/state-native/state_test.go +++ b/beacon-chain/state/state-native/state_test.go @@ -7,16 +7,16 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestBeaconState_NoDeadlock_Phase0(t *testing.T) { diff --git a/beacon-chain/state/state-native/state_trie.go b/beacon-chain/state/state-native/state_trie.go index 722edcd5e0..b4e6011fd2 100644 --- a/beacon-chain/state/state-native/state_trie.go +++ b/beacon-chain/state/state-native/state_trie.go @@ -6,21 +6,21 @@ import ( "runtime" "sort" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/fieldtrie" - customtypes "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/custom-types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - mvslice "github.com/OffchainLabs/prysm/v6/container/multi-value-slice" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/fieldtrie" + customtypes "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/custom-types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + mvslice "github.com/OffchainLabs/prysm/v7/container/multi-value-slice" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/state/state-native/state_trie_test.go b/beacon-chain/state/state-native/state_trie_test.go index bbc7043c5c..2802edcf41 100644 --- a/beacon-chain/state/state-native/state_trie_test.go +++ b/beacon-chain/state/state-native/state_trie_test.go @@ -4,14 +4,14 @@ import ( "bytes" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" "google.golang.org/protobuf/testing/protocmp" diff --git a/beacon-chain/state/state-native/types.go b/beacon-chain/state/state-native/types.go index 768b5d3a46..72e233c6fb 100644 --- a/beacon-chain/state/state-native/types.go +++ b/beacon-chain/state/state-native/types.go @@ -3,9 +3,9 @@ package state_native import ( "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // Ensure type BeaconState below implements BeaconState interface. diff --git a/beacon-chain/state/state-native/types/BUILD.bazel b/beacon-chain/state/state-native/types/BUILD.bazel index 77492bc4c6..353a1f54b1 100644 --- a/beacon-chain/state/state-native/types/BUILD.bazel +++ b/beacon-chain/state/state-native/types/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["types.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native/types", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native/types", visibility = ["//visibility:public"], deps = [ "//consensus-types:go_default_library", diff --git a/beacon-chain/state/state-native/types/types.go b/beacon-chain/state/state-native/types/types.go index 823de83e27..2d7aa0fb0a 100644 --- a/beacon-chain/state/state-native/types/types.go +++ b/beacon-chain/state/state-native/types/types.go @@ -3,7 +3,7 @@ package types import ( "fmt" - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/state-native/types_test.go b/beacon-chain/state/state-native/types_test.go index accf2ed18e..a749c8455c 100644 --- a/beacon-chain/state/state-native/types_test.go +++ b/beacon-chain/state/state-native/types_test.go @@ -5,14 +5,14 @@ import ( "strconv" "testing" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" log "github.com/sirupsen/logrus" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/state/stategen/BUILD.bazel b/beacon-chain/state/stategen/BUILD.bazel index 1aa0da9269..7517891000 100644 --- a/beacon-chain/state/stategen/BUILD.bazel +++ b/beacon-chain/state/stategen/BUILD.bazel @@ -17,7 +17,7 @@ go_library( "service.go", "setter.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/beacon-chain/state/stategen/cacher.go b/beacon-chain/state/stategen/cacher.go index 63da5af1b9..d5cb8f6ce4 100644 --- a/beacon-chain/state/stategen/cacher.go +++ b/beacon-chain/state/stategen/cacher.go @@ -1,7 +1,7 @@ package stategen import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stategen/epoch_boundary_state_cache.go b/beacon-chain/state/stategen/epoch_boundary_state_cache.go index 46ab4da637..83043152b8 100644 --- a/beacon-chain/state/stategen/epoch_boundary_state_cache.go +++ b/beacon-chain/state/stategen/epoch_boundary_state_cache.go @@ -5,8 +5,8 @@ import ( "strconv" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "k8s.io/client-go/tools/cache" ) diff --git a/beacon-chain/state/stategen/epoch_boundary_state_cache_test.go b/beacon-chain/state/stategen/epoch_boundary_state_cache_test.go index 49d78a33f5..b8cd9f173a 100644 --- a/beacon-chain/state/stategen/epoch_boundary_state_cache_test.go +++ b/beacon-chain/state/stategen/epoch_boundary_state_cache_test.go @@ -3,10 +3,10 @@ package stategen import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestEpochBoundaryStateCache_BadSlotKey(t *testing.T) { diff --git a/beacon-chain/state/stategen/getter.go b/beacon-chain/state/stategen/getter.go index d09bf7fe8d..8aeb6aa090 100644 --- a/beacon-chain/state/stategen/getter.go +++ b/beacon-chain/state/stategen/getter.go @@ -4,15 +4,15 @@ import ( "context" stderrors "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stategen/getter_test.go b/beacon-chain/state/stategen/getter_test.go index e73f3bd6a0..dfd8b12095 100644 --- a/beacon-chain/state/stategen/getter_test.go +++ b/beacon-chain/state/stategen/getter_test.go @@ -5,19 +5,19 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - blt "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + blt "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestStateByRoot_GenesisState(t *testing.T) { diff --git a/beacon-chain/state/stategen/history.go b/beacon-chain/state/stategen/history.go index d2bcd832cf..e5858dd0ab 100644 --- a/beacon-chain/state/stategen/history.go +++ b/beacon-chain/state/stategen/history.go @@ -4,13 +4,13 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stategen/history_test.go b/beacon-chain/state/stategen/history_test.go index 94d798e42e..b586d1d472 100644 --- a/beacon-chain/state/stategen/history_test.go +++ b/beacon-chain/state/stategen/history_test.go @@ -6,11 +6,11 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/mock" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/mock" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stategen/hot_state_cache.go b/beacon-chain/state/stategen/hot_state_cache.go index f90f5df0b6..d317ecd22e 100644 --- a/beacon-chain/state/stategen/hot_state_cache.go +++ b/beacon-chain/state/stategen/hot_state_cache.go @@ -3,8 +3,8 @@ package stategen import ( "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" lru "github.com/hashicorp/golang-lru" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/beacon-chain/state/stategen/hot_state_cache_test.go b/beacon-chain/state/stategen/hot_state_cache_test.go index 524fc34c63..efb1e24f55 100644 --- a/beacon-chain/state/stategen/hot_state_cache_test.go +++ b/beacon-chain/state/stategen/hot_state_cache_test.go @@ -3,11 +3,11 @@ package stategen import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestHotStateCache_RoundTrip(t *testing.T) { diff --git a/beacon-chain/state/stategen/init_test.go b/beacon-chain/state/stategen/init_test.go index 879fcc2b5d..b01a4c77cd 100644 --- a/beacon-chain/state/stategen/init_test.go +++ b/beacon-chain/state/stategen/init_test.go @@ -1,7 +1,7 @@ package stategen import ( - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" ) func init() { diff --git a/beacon-chain/state/stategen/migrate.go b/beacon-chain/state/stategen/migrate.go index 3c0dd2cc69..498caa8e50 100644 --- a/beacon-chain/state/stategen/migrate.go +++ b/beacon-chain/state/stategen/migrate.go @@ -5,9 +5,9 @@ import ( "encoding/hex" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/state/stategen/migrate_test.go b/beacon-chain/state/stategen/migrate_test.go index d8878c1c5b..f99adbeb41 100644 --- a/beacon-chain/state/stategen/migrate_test.go +++ b/beacon-chain/state/stategen/migrate_test.go @@ -3,15 +3,15 @@ package stategen import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/state/stategen/mock/BUILD.bazel b/beacon-chain/state/stategen/mock/BUILD.bazel index 8482e0f59e..5f5177a3cd 100644 --- a/beacon-chain/state/stategen/mock/BUILD.bazel +++ b/beacon-chain/state/stategen/mock/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "mock.go", "replayer.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock", visibility = ["//visibility:public"], deps = [ "//beacon-chain/state:go_default_library", diff --git a/beacon-chain/state/stategen/mock/mock.go b/beacon-chain/state/stategen/mock/mock.go index 75d72e409e..d3b4b92e6f 100644 --- a/beacon-chain/state/stategen/mock/mock.go +++ b/beacon-chain/state/stategen/mock/mock.go @@ -4,8 +4,8 @@ package mock import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // StateManager is a fake implementation of StateManager. diff --git a/beacon-chain/state/stategen/mock/replayer.go b/beacon-chain/state/stategen/mock/replayer.go index 164f4260f8..f894da0ffd 100644 --- a/beacon-chain/state/stategen/mock/replayer.go +++ b/beacon-chain/state/stategen/mock/replayer.go @@ -3,9 +3,9 @@ package mock import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) func NewReplayerBuilder(opt ...ReplayerBuilderOption) *ReplayerBuilder { diff --git a/beacon-chain/state/stategen/mock_test.go b/beacon-chain/state/stategen/mock_test.go index e3d9f25bbf..830a3d9dc4 100644 --- a/beacon-chain/state/stategen/mock_test.go +++ b/beacon-chain/state/stategen/mock_test.go @@ -6,17 +6,17 @@ import ( "sort" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - blocktest "github.com/OffchainLabs/prysm/v6/consensus-types/blocks/testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + blocktest "github.com/OffchainLabs/prysm/v7/consensus-types/blocks/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stategen/replay.go b/beacon-chain/state/stategen/replay.go index fc47de86fa..6c58e24763 100644 --- a/beacon-chain/state/stategen/replay.go +++ b/beacon-chain/state/stategen/replay.go @@ -5,13 +5,13 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/state/stategen/replay_test.go b/beacon-chain/state/stategen/replay_test.go index 8361cbed8b..a32570164c 100644 --- a/beacon-chain/state/stategen/replay_test.go +++ b/beacon-chain/state/stategen/replay_test.go @@ -3,23 +3,23 @@ package stategen import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - stateTesting "github.com/OffchainLabs/prysm/v6/beacon-chain/state/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + stateTesting "github.com/OffchainLabs/prysm/v7/beacon-chain/state/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/state/stategen/replayer.go b/beacon-chain/state/stategen/replayer.go index 7d5bb4560a..a5dc5cc4d1 100644 --- a/beacon-chain/state/stategen/replayer.go +++ b/beacon-chain/state/stategen/replayer.go @@ -5,10 +5,10 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/state/stategen/replayer_test.go b/beacon-chain/state/stategen/replayer_test.go index 4bbabfe395..d2ef40dae5 100644 --- a/beacon-chain/state/stategen/replayer_test.go +++ b/beacon-chain/state/stategen/replayer_test.go @@ -3,11 +3,11 @@ package stategen import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/state/stategen/service.go b/beacon-chain/state/stategen/service.go index 0136b45e42..29db34cbe2 100644 --- a/beacon-chain/state/stategen/service.go +++ b/beacon-chain/state/stategen/service.go @@ -9,17 +9,17 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/backfill/coverage" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/backfill/coverage" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stategen/service_test.go b/beacon-chain/state/stategen/service_test.go index 66d03a5ccf..d3b2632a81 100644 --- a/beacon-chain/state/stategen/service_test.go +++ b/beacon-chain/state/stategen/service_test.go @@ -3,13 +3,13 @@ package stategen import ( "testing" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestResume(t *testing.T) { diff --git a/beacon-chain/state/stategen/setter.go b/beacon-chain/state/stategen/setter.go index 99a77b6753..3756e6d70f 100644 --- a/beacon-chain/state/stategen/setter.go +++ b/beacon-chain/state/stategen/setter.go @@ -4,13 +4,13 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/state/stategen/setter_test.go b/beacon-chain/state/stategen/setter_test.go index 9d320ba8ae..fbca8e97f8 100644 --- a/beacon-chain/state/stategen/setter_test.go +++ b/beacon-chain/state/stategen/setter_test.go @@ -3,13 +3,13 @@ package stategen import ( "testing" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/state/stateutil/BUILD.bazel b/beacon-chain/state/stateutil/BUILD.bazel index f812a169d5..30762e02b1 100644 --- a/beacon-chain/state/stateutil/BUILD.bazel +++ b/beacon-chain/state/stateutil/BUILD.bazel @@ -24,7 +24,7 @@ go_library( "validator_reader.go", "validator_root.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/transition/stateutils:go_default_library", diff --git a/beacon-chain/state/stateutil/benchmark_test.go b/beacon-chain/state/stateutil/benchmark_test.go index 15613e27e3..a97ea609a9 100644 --- a/beacon-chain/state/stateutil/benchmark_test.go +++ b/beacon-chain/state/stateutil/benchmark_test.go @@ -3,9 +3,9 @@ package stateutil_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func BenchmarkMerkleize_Buffered(b *testing.B) { diff --git a/beacon-chain/state/stateutil/block_header_root.go b/beacon-chain/state/stateutil/block_header_root.go index b00616fb42..3f4a53fb81 100644 --- a/beacon-chain/state/stateutil/block_header_root.go +++ b/beacon-chain/state/stateutil/block_header_root.go @@ -3,9 +3,9 @@ package stateutil import ( "encoding/binary" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // BlockHeaderRoot computes the HashTreeRoot Merkleization of diff --git a/beacon-chain/state/stateutil/eth1_root.go b/beacon-chain/state/stateutil/eth1_root.go index fb31916d23..4189b6d616 100644 --- a/beacon-chain/state/stateutil/eth1_root.go +++ b/beacon-chain/state/stateutil/eth1_root.go @@ -4,10 +4,10 @@ import ( "bytes" "encoding/binary" - params "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + params "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stateutil/field_root_attestation.go b/beacon-chain/state/stateutil/field_root_attestation.go index cecc2e3e8a..0c26460f77 100644 --- a/beacon-chain/state/stateutil/field_root_attestation.go +++ b/beacon-chain/state/stateutil/field_root_attestation.go @@ -5,9 +5,9 @@ import ( "encoding/binary" "fmt" - params "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + params "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stateutil/field_root_eth1.go b/beacon-chain/state/stateutil/field_root_eth1.go index 591749efe0..c34b69002d 100644 --- a/beacon-chain/state/stateutil/field_root_eth1.go +++ b/beacon-chain/state/stateutil/field_root_eth1.go @@ -1,7 +1,7 @@ package stateutil import ( - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stateutil/field_root_test.go b/beacon-chain/state/stateutil/field_root_test.go index e7724e4e7a..4b00298b79 100644 --- a/beacon-chain/state/stateutil/field_root_test.go +++ b/beacon-chain/state/stateutil/field_root_test.go @@ -3,7 +3,7 @@ package stateutil import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestArraysTreeRoot_OnlyPowerOf2(t *testing.T) { diff --git a/beacon-chain/state/stateutil/field_root_validator.go b/beacon-chain/state/stateutil/field_root_validator.go index 96493fe856..b1772043ca 100644 --- a/beacon-chain/state/stateutil/field_root_validator.go +++ b/beacon-chain/state/stateutil/field_root_validator.go @@ -6,10 +6,10 @@ import ( "runtime" "sync" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/hash/htr" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/hash/htr" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/state/stateutil/field_root_validator_test.go b/beacon-chain/state/stateutil/field_root_validator_test.go index fc87448f5f..7e4c1b86e6 100644 --- a/beacon-chain/state/stateutil/field_root_validator_test.go +++ b/beacon-chain/state/stateutil/field_root_validator_test.go @@ -6,10 +6,10 @@ import ( "sync" "testing" - mathutil "github.com/OffchainLabs/prysm/v6/math" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + mathutil "github.com/OffchainLabs/prysm/v7/math" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestValidatorConstants(t *testing.T) { diff --git a/beacon-chain/state/stateutil/field_root_vector.go b/beacon-chain/state/stateutil/field_root_vector.go index 4307f38e48..810a0b1d01 100644 --- a/beacon-chain/state/stateutil/field_root_vector.go +++ b/beacon-chain/state/stateutil/field_root_vector.go @@ -1,7 +1,7 @@ package stateutil import ( - "github.com/OffchainLabs/prysm/v6/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stateutil/historical_summaries_root.go b/beacon-chain/state/stateutil/historical_summaries_root.go index 6a64a8667d..a1371fcaa5 100644 --- a/beacon-chain/state/stateutil/historical_summaries_root.go +++ b/beacon-chain/state/stateutil/historical_summaries_root.go @@ -1,9 +1,9 @@ package stateutil import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func HistoricalSummariesRoot(summaries []*ethpb.HistoricalSummary) ([32]byte, error) { diff --git a/beacon-chain/state/stateutil/participation_bit_root.go b/beacon-chain/state/stateutil/participation_bit_root.go index 9c95032822..fc0166805e 100644 --- a/beacon-chain/state/stateutil/participation_bit_root.go +++ b/beacon-chain/state/stateutil/participation_bit_root.go @@ -3,8 +3,8 @@ package stateutil import ( "encoding/binary" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stateutil/pending_attestation_root.go b/beacon-chain/state/stateutil/pending_attestation_root.go index 076c36ce7b..4daf0dc4a4 100644 --- a/beacon-chain/state/stateutil/pending_attestation_root.go +++ b/beacon-chain/state/stateutil/pending_attestation_root.go @@ -3,10 +3,10 @@ package stateutil import ( "encoding/binary" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stateutil/pending_consolidations_root.go b/beacon-chain/state/stateutil/pending_consolidations_root.go index 81e7a3422c..8aa62b8d34 100644 --- a/beacon-chain/state/stateutil/pending_consolidations_root.go +++ b/beacon-chain/state/stateutil/pending_consolidations_root.go @@ -1,9 +1,9 @@ package stateutil import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func PendingConsolidationsRoot(slice []*ethpb.PendingConsolidation) ([32]byte, error) { diff --git a/beacon-chain/state/stateutil/pending_deposits_root.go b/beacon-chain/state/stateutil/pending_deposits_root.go index 96c36acc28..52afb734ef 100644 --- a/beacon-chain/state/stateutil/pending_deposits_root.go +++ b/beacon-chain/state/stateutil/pending_deposits_root.go @@ -1,9 +1,9 @@ package stateutil import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func PendingDepositsRoot(slice []*ethpb.PendingDeposit) ([32]byte, error) { diff --git a/beacon-chain/state/stateutil/pending_partial_withdrawals_root.go b/beacon-chain/state/stateutil/pending_partial_withdrawals_root.go index 098fcb58de..5d17cac48e 100644 --- a/beacon-chain/state/stateutil/pending_partial_withdrawals_root.go +++ b/beacon-chain/state/stateutil/pending_partial_withdrawals_root.go @@ -1,9 +1,9 @@ package stateutil import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func PendingPartialWithdrawalsRoot(slice []*ethpb.PendingPartialWithdrawal) ([32]byte, error) { diff --git a/beacon-chain/state/stateutil/proposer_lookahead_root.go b/beacon-chain/state/stateutil/proposer_lookahead_root.go index aff1cebcd7..b2ef8e45f0 100644 --- a/beacon-chain/state/stateutil/proposer_lookahead_root.go +++ b/beacon-chain/state/stateutil/proposer_lookahead_root.go @@ -3,8 +3,8 @@ package stateutil import ( "encoding/binary" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" ) // ProposerLookaheadRoot computes the hash tree root of the proposer lookahead diff --git a/beacon-chain/state/stateutil/proposer_lookahead_root_test.go b/beacon-chain/state/stateutil/proposer_lookahead_root_test.go index c4e4e2af2e..35ccd425a6 100644 --- a/beacon-chain/state/stateutil/proposer_lookahead_root_test.go +++ b/beacon-chain/state/stateutil/proposer_lookahead_root_test.go @@ -3,9 +3,9 @@ package stateutil_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestProposerLookaheadRoot(t *testing.T) { diff --git a/beacon-chain/state/stateutil/state_root_test.go b/beacon-chain/state/stateutil/state_root_test.go index 88f93f799f..04ed135884 100644 --- a/beacon-chain/state/stateutil/state_root_test.go +++ b/beacon-chain/state/stateutil/state_root_test.go @@ -5,12 +5,12 @@ import ( "strconv" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestState_FieldCount(t *testing.T) { diff --git a/beacon-chain/state/stateutil/sync_committee.root.go b/beacon-chain/state/stateutil/sync_committee.root.go index a95f56671e..4fc58f559e 100644 --- a/beacon-chain/state/stateutil/sync_committee.root.go +++ b/beacon-chain/state/stateutil/sync_committee.root.go @@ -1,9 +1,9 @@ package stateutil import ( - "github.com/OffchainLabs/prysm/v6/crypto/hash/htr" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/crypto/hash/htr" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stateutil/trie_helpers.go b/beacon-chain/state/stateutil/trie_helpers.go index 762117f52a..4cd2fd5b19 100644 --- a/beacon-chain/state/stateutil/trie_helpers.go +++ b/beacon-chain/state/stateutil/trie_helpers.go @@ -4,11 +4,11 @@ import ( "bytes" "encoding/binary" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/crypto/hash/htr" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/crypto/hash/htr" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/math" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stateutil/trie_helpers_test.go b/beacon-chain/state/stateutil/trie_helpers_test.go index 69f468fbf3..90a3642b59 100644 --- a/beacon-chain/state/stateutil/trie_helpers_test.go +++ b/beacon-chain/state/stateutil/trie_helpers_test.go @@ -3,15 +3,15 @@ package stateutil_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestReturnTrieLayer_OK(t *testing.T) { diff --git a/beacon-chain/state/stateutil/unrealized_justification.go b/beacon-chain/state/stateutil/unrealized_justification.go index 62a7f2513d..9500ed739a 100644 --- a/beacon-chain/state/stateutil/unrealized_justification.go +++ b/beacon-chain/state/stateutil/unrealized_justification.go @@ -1,9 +1,9 @@ package stateutil import ( - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/math" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stateutil/unrealized_justification_test.go b/beacon-chain/state/stateutil/unrealized_justification_test.go index c4f77e5f71..dabb95452b 100644 --- a/beacon-chain/state/stateutil/unrealized_justification_test.go +++ b/beacon-chain/state/stateutil/unrealized_justification_test.go @@ -3,10 +3,10 @@ package stateutil import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - multi_value_slice "github.com/OffchainLabs/prysm/v6/container/multi-value-slice" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + multi_value_slice "github.com/OffchainLabs/prysm/v7/container/multi-value-slice" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestState_UnrealizedCheckpointBalances(t *testing.T) { diff --git a/beacon-chain/state/stateutil/validator_map_handler.go b/beacon-chain/state/stateutil/validator_map_handler.go index 690b62c1f7..ece37e9ada 100644 --- a/beacon-chain/state/stateutil/validator_map_handler.go +++ b/beacon-chain/state/stateutil/validator_map_handler.go @@ -3,10 +3,10 @@ package stateutil import ( "sync" - coreutils "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition/stateutils" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + coreutils "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition/stateutils" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // ValidatorMapHandler is a container to hold the map and a reference tracker for how many diff --git a/beacon-chain/state/stateutil/validator_reader.go b/beacon-chain/state/stateutil/validator_reader.go index 6a7a9a79e6..0a722b3f7a 100644 --- a/beacon-chain/state/stateutil/validator_reader.go +++ b/beacon-chain/state/stateutil/validator_reader.go @@ -1,8 +1,8 @@ package stateutil import ( - multi_value_slice "github.com/OffchainLabs/prysm/v6/container/multi-value-slice" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + multi_value_slice "github.com/OffchainLabs/prysm/v7/container/multi-value-slice" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // ValReader specifies an interface through which we can access the validator registry. diff --git a/beacon-chain/state/stateutil/validator_root.go b/beacon-chain/state/stateutil/validator_root.go index e6c4902f03..aa86f74024 100644 --- a/beacon-chain/state/stateutil/validator_root.go +++ b/beacon-chain/state/stateutil/validator_root.go @@ -3,10 +3,10 @@ package stateutil import ( "encoding/binary" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/beacon-chain/state/stateutil/validator_root_test.go b/beacon-chain/state/stateutil/validator_root_test.go index 9d1ffe18df..144f458163 100644 --- a/beacon-chain/state/stateutil/validator_root_test.go +++ b/beacon-chain/state/stateutil/validator_root_test.go @@ -3,7 +3,7 @@ package stateutil_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" ) func BenchmarkUint64ListRootWithRegistryLimit(b *testing.B) { diff --git a/beacon-chain/state/testing/BUILD.bazel b/beacon-chain/state/testing/BUILD.bazel index b5f4af46e9..ff601fb2e0 100644 --- a/beacon-chain/state/testing/BUILD.bazel +++ b/beacon-chain/state/testing/BUILD.bazel @@ -10,7 +10,7 @@ go_library( "getters_checkpoint.go", "getters_validator.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/state/testing", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/state/testing", visibility = [ "//beacon-chain/core:__subpackages__", "//beacon-chain/state:__subpackages__", diff --git a/beacon-chain/state/testing/generators.go b/beacon-chain/state/testing/generators.go index 5e32f0063e..2af5dab71f 100644 --- a/beacon-chain/state/testing/generators.go +++ b/beacon-chain/state/testing/generators.go @@ -3,14 +3,14 @@ package testing import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) // GeneratePendingDeposit is used for testing and producing a signed pending deposit diff --git a/beacon-chain/state/testing/getters.go b/beacon-chain/state/testing/getters.go index 8cf0fd6e0a..1881e4e9ec 100644 --- a/beacon-chain/state/testing/getters.go +++ b/beacon-chain/state/testing/getters.go @@ -4,12 +4,12 @@ import ( "sync" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func VerifyBeaconStateSlotDataRace(t *testing.T, factory getState) { diff --git a/beacon-chain/state/testing/getters_block.go b/beacon-chain/state/testing/getters_block.go index 8ec1307058..274c008aef 100644 --- a/beacon-chain/state/testing/getters_block.go +++ b/beacon-chain/state/testing/getters_block.go @@ -3,11 +3,11 @@ package testing import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) type getStateWithLatestBlockHeader func(*ethpb.BeaconBlockHeader) (state.BeaconState, error) diff --git a/beacon-chain/state/testing/getters_checkpoint.go b/beacon-chain/state/testing/getters_checkpoint.go index a458ae1804..799ffa2164 100644 --- a/beacon-chain/state/testing/getters_checkpoint.go +++ b/beacon-chain/state/testing/getters_checkpoint.go @@ -4,11 +4,11 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func VerifyBeaconStateJustificationBitsNil(t *testing.T, factory getState) { diff --git a/beacon-chain/state/testing/getters_validator.go b/beacon-chain/state/testing/getters_validator.go index 0f411ae862..c7e4d1039f 100644 --- a/beacon-chain/state/testing/getters_validator.go +++ b/beacon-chain/state/testing/getters_validator.go @@ -3,9 +3,9 @@ package testing import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) type getState func() (state.BeaconState, error) diff --git a/beacon-chain/sync/BUILD.bazel b/beacon-chain/sync/BUILD.bazel index d151af3d3e..b97bc91850 100644 --- a/beacon-chain/sync/BUILD.bazel +++ b/beacon-chain/sync/BUILD.bazel @@ -62,7 +62,7 @@ go_library( "validate_sync_contribution_proof.go", "validate_voluntary_exit.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/sync", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/sync", visibility = [ "//beacon-chain:__subpackages__", "//cmd:__subpackages__", diff --git a/beacon-chain/sync/backfill/BUILD.bazel b/beacon-chain/sync/backfill/BUILD.bazel index cbaa1b7cac..d81d707c43 100644 --- a/beacon-chain/sync/backfill/BUILD.bazel +++ b/beacon-chain/sync/backfill/BUILD.bazel @@ -14,7 +14,7 @@ go_library( "verify.go", "worker.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/backfill", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/backfill", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/signing:go_default_library", diff --git a/beacon-chain/sync/backfill/batch.go b/beacon-chain/sync/backfill/batch.go index 655760800e..a8874d7410 100644 --- a/beacon-chain/sync/backfill/batch.go +++ b/beacon-chain/sync/backfill/batch.go @@ -6,11 +6,11 @@ import ( "sort" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/das" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/das" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/sync/backfill/batch_test.go b/beacon-chain/sync/backfill/batch_test.go index 01f9b3c1cd..f26b3c6a2c 100644 --- a/beacon-chain/sync/backfill/batch_test.go +++ b/beacon-chain/sync/backfill/batch_test.go @@ -5,8 +5,8 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/backfill/batcher.go b/beacon-chain/sync/backfill/batcher.go index 98a0eeedc9..87aa92f743 100644 --- a/beacon-chain/sync/backfill/batcher.go +++ b/beacon-chain/sync/backfill/batcher.go @@ -1,7 +1,7 @@ package backfill import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/backfill/batcher_test.go b/beacon-chain/sync/backfill/batcher_test.go index 3d848229e3..52667dfb31 100644 --- a/beacon-chain/sync/backfill/batcher_test.go +++ b/beacon-chain/sync/backfill/batcher_test.go @@ -4,8 +4,8 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestBatcherBefore(t *testing.T) { diff --git a/beacon-chain/sync/backfill/blobs.go b/beacon-chain/sync/backfill/blobs.go index 2000f6c844..25de5130c9 100644 --- a/beacon-chain/sync/backfill/blobs.go +++ b/beacon-chain/sync/backfill/blobs.go @@ -4,14 +4,14 @@ import ( "bytes" "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/das" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/das" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/backfill/blobs_test.go b/beacon-chain/sync/backfill/blobs_test.go index b891249ed8..19eab84ebb 100644 --- a/beacon-chain/sync/backfill/blobs_test.go +++ b/beacon-chain/sync/backfill/blobs_test.go @@ -3,14 +3,14 @@ package backfill import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func testBlobGen(t *testing.T, start primitives.Slot, n int) ([]blocks.ROBlock, [][]blocks.ROBlob) { diff --git a/beacon-chain/sync/backfill/coverage/BUILD.bazel b/beacon-chain/sync/backfill/coverage/BUILD.bazel index 8b14d98015..f251708bcc 100644 --- a/beacon-chain/sync/backfill/coverage/BUILD.bazel +++ b/beacon-chain/sync/backfill/coverage/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["coverage.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/backfill/coverage", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/backfill/coverage", visibility = ["//visibility:public"], deps = ["//consensus-types/primitives:go_default_library"], ) diff --git a/beacon-chain/sync/backfill/coverage/coverage.go b/beacon-chain/sync/backfill/coverage/coverage.go index bafa4d8b13..d8870c92b5 100644 --- a/beacon-chain/sync/backfill/coverage/coverage.go +++ b/beacon-chain/sync/backfill/coverage/coverage.go @@ -1,6 +1,6 @@ package coverage -import "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" +import "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" // AvailableBlocker can be used to check whether there is a finalized block in the db for the given slot. // This interface is typically fulfilled by backfill.Store. diff --git a/beacon-chain/sync/backfill/metrics.go b/beacon-chain/sync/backfill/metrics.go index 2044307a00..5a9b3b3d75 100644 --- a/beacon-chain/sync/backfill/metrics.go +++ b/beacon-chain/sync/backfill/metrics.go @@ -1,9 +1,9 @@ package backfill import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" ) diff --git a/beacon-chain/sync/backfill/pool.go b/beacon-chain/sync/backfill/pool.go index 067530e65e..95bdb6b757 100644 --- a/beacon-chain/sync/backfill/pool.go +++ b/beacon-chain/sync/backfill/pool.go @@ -5,13 +5,13 @@ import ( "math" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/backfill/pool_test.go b/beacon-chain/sync/backfill/pool_test.go index e5df3fbd67..54c184376f 100644 --- a/beacon-chain/sync/backfill/pool_test.go +++ b/beacon-chain/sync/backfill/pool_test.go @@ -5,15 +5,15 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/sync/backfill/service.go b/beacon-chain/sync/backfill/service.go index c07d753bdd..2229cc908d 100644 --- a/beacon-chain/sync/backfill/service.go +++ b/beacon-chain/sync/backfill/service.go @@ -3,18 +3,18 @@ package backfill import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/proto/dbval" - "github.com/OffchainLabs/prysm/v6/runtime" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/proto/dbval" + "github.com/OffchainLabs/prysm/v7/runtime" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/sync/backfill/service_test.go b/beacon-chain/sync/backfill/service_test.go index b0b270c2b0..9636b76daa 100644 --- a/beacon-chain/sync/backfill/service_test.go +++ b/beacon-chain/sync/backfill/service_test.go @@ -5,17 +5,17 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/proto/dbval" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/proto/dbval" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) type mockMinimumSlotter struct { diff --git a/beacon-chain/sync/backfill/status.go b/beacon-chain/sync/backfill/status.go index a975255439..7a4fa9d33e 100644 --- a/beacon-chain/sync/backfill/status.go +++ b/beacon-chain/sync/backfill/status.go @@ -4,14 +4,14 @@ import ( "context" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/das" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/proto/dbval" + "github.com/OffchainLabs/prysm/v7/beacon-chain/das" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/proto/dbval" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/backfill/status_test.go b/beacon-chain/sync/backfill/status_test.go index 70280b1998..3b7fc5a9a9 100644 --- a/beacon-chain/sync/backfill/status_test.go +++ b/beacon-chain/sync/backfill/status_test.go @@ -5,16 +5,16 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/das" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - blocktest "github.com/OffchainLabs/prysm/v6/consensus-types/blocks/testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/proto/dbval" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/das" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + blocktest "github.com/OffchainLabs/prysm/v7/consensus-types/blocks/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/proto/dbval" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/backfill/verify.go b/beacon-chain/sync/backfill/verify.go index 8581bd1854..f810835b9a 100644 --- a/beacon-chain/sync/backfill/verify.go +++ b/beacon-chain/sync/backfill/verify.go @@ -1,16 +1,16 @@ package backfill import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/backfill/verify_test.go b/beacon-chain/sync/backfill/verify_test.go index 05972c1b77..0f3466d43e 100644 --- a/beacon-chain/sync/backfill/verify_test.go +++ b/beacon-chain/sync/backfill/verify_test.go @@ -4,17 +4,17 @@ import ( "math" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/beacon-chain/sync/backfill/worker.go b/beacon-chain/sync/backfill/worker.go index 27471aa1de..31ecc6f301 100644 --- a/beacon-chain/sync/backfill/worker.go +++ b/beacon-chain/sync/backfill/worker.go @@ -4,11 +4,11 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/batch_verifier.go b/beacon-chain/sync/batch_verifier.go index 4968ac8805..4e6a517993 100644 --- a/beacon-chain/sync/batch_verifier.go +++ b/beacon-chain/sync/batch_verifier.go @@ -4,12 +4,12 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/batch_verifier_test.go b/beacon-chain/sync/batch_verifier_test.go index aaa27fdffd..12bf404549 100644 --- a/beacon-chain/sync/batch_verifier_test.go +++ b/beacon-chain/sync/batch_verifier_test.go @@ -4,10 +4,10 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/util" pubsub "github.com/libp2p/go-libp2p-pubsub" ) diff --git a/beacon-chain/sync/blobs_test.go b/beacon-chain/sync/blobs_test.go index 544a58d46f..7bf450f4ee 100644 --- a/beacon-chain/sync/blobs_test.go +++ b/beacon-chain/sync/blobs_test.go @@ -8,27 +8,27 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - db "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + db "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" gethTypes "github.com/ethereum/go-ethereum/core/types" "github.com/libp2p/go-libp2p/core/network" diff --git a/beacon-chain/sync/block_batcher.go b/beacon-chain/sync/block_batcher.go index 0e0c0c2288..aa1963d425 100644 --- a/beacon-chain/sync/block_batcher.go +++ b/beacon-chain/sync/block_batcher.go @@ -6,10 +6,10 @@ import ( "sort" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/block_batcher_test.go b/beacon-chain/sync/block_batcher_test.go index 61302233d8..7cdce247da 100644 --- a/beacon-chain/sync/block_batcher_test.go +++ b/beacon-chain/sync/block_batcher_test.go @@ -4,11 +4,11 @@ import ( "math/rand" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSortedObj_SortBlocksRoots(t *testing.T) { diff --git a/beacon-chain/sync/checkpoint/BUILD.bazel b/beacon-chain/sync/checkpoint/BUILD.bazel index dc4cd60576..58542e66a7 100644 --- a/beacon-chain/sync/checkpoint/BUILD.bazel +++ b/beacon-chain/sync/checkpoint/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "log.go", "weak-subjectivity.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/checkpoint", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/checkpoint", visibility = ["//visibility:public"], deps = [ "//api/client:go_default_library", diff --git a/beacon-chain/sync/checkpoint/api.go b/beacon-chain/sync/checkpoint/api.go index 8e4b7f9a02..d20cf5a093 100644 --- a/beacon-chain/sync/checkpoint/api.go +++ b/beacon-chain/sync/checkpoint/api.go @@ -5,17 +5,17 @@ import ( "fmt" "path" - "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/api/client/beacon" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/detect" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/api/client/beacon" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/detect" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/sync/checkpoint/api_test.go b/beacon-chain/sync/checkpoint/api_test.go index cef59bf8ab..f2a4add183 100644 --- a/beacon-chain/sync/checkpoint/api_test.go +++ b/beacon-chain/sync/checkpoint/api_test.go @@ -6,15 +6,15 @@ import ( "net/http" "testing" - "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/api/client/beacon" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - blocktest "github.com/OffchainLabs/prysm/v6/consensus-types/blocks/testing" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/detect" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/api/client/beacon" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + blocktest "github.com/OffchainLabs/prysm/v7/consensus-types/blocks/testing" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/detect" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/checkpoint/file.go b/beacon-chain/sync/checkpoint/file.go index 54e8fe2f2e..330e769883 100644 --- a/beacon-chain/sync/checkpoint/file.go +++ b/beacon-chain/sync/checkpoint/file.go @@ -5,9 +5,9 @@ import ( "fmt" "os" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/checkpoint/weak-subjectivity.go b/beacon-chain/sync/checkpoint/weak-subjectivity.go index 13a7b3960a..c8812462f5 100644 --- a/beacon-chain/sync/checkpoint/weak-subjectivity.go +++ b/beacon-chain/sync/checkpoint/weak-subjectivity.go @@ -3,13 +3,13 @@ package checkpoint import ( "context" - base "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/api/client/beacon" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/detect" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + base "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/api/client/beacon" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/detect" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/checkpoint/weak-subjectivity_test.go b/beacon-chain/sync/checkpoint/weak-subjectivity_test.go index b027b6b20a..e7e2548419 100644 --- a/beacon-chain/sync/checkpoint/weak-subjectivity_test.go +++ b/beacon-chain/sync/checkpoint/weak-subjectivity_test.go @@ -8,19 +8,19 @@ import ( "net/http" "testing" - "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/api/client/beacon" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - blocktest "github.com/OffchainLabs/prysm/v6/consensus-types/blocks/testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/detect" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/api/client/beacon" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + blocktest "github.com/OffchainLabs/prysm/v7/consensus-types/blocks/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/detect" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/context.go b/beacon-chain/sync/context.go index 2e0d1b5a60..f8be375877 100644 --- a/beacon-chain/sync/context.go +++ b/beacon-chain/sync/context.go @@ -3,8 +3,8 @@ package sync import ( "io" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" "github.com/pkg/errors" diff --git a/beacon-chain/sync/context_test.go b/beacon-chain/sync/context_test.go index 0ecb33d0e3..95d8ee2048 100644 --- a/beacon-chain/sync/context_test.go +++ b/beacon-chain/sync/context_test.go @@ -5,10 +5,10 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/util" core "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" diff --git a/beacon-chain/sync/custody.go b/beacon-chain/sync/custody.go index e97a57a472..70534d3bfc 100644 --- a/beacon-chain/sync/custody.go +++ b/beacon-chain/sync/custody.go @@ -5,11 +5,11 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/sync/custody_test.go b/beacon-chain/sync/custody_test.go index 5408c1a81f..f2961098f5 100644 --- a/beacon-chain/sync/custody_test.go +++ b/beacon-chain/sync/custody_test.go @@ -6,18 +6,18 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - dbtesting "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + dbtesting "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) type testSetup struct { diff --git a/beacon-chain/sync/data_column_sidecars.go b/beacon-chain/sync/data_column_sidecars.go index 1cdebb65e3..2e9c895244 100644 --- a/beacon-chain/sync/data_column_sidecars.go +++ b/beacon-chain/sync/data_column_sidecars.go @@ -7,20 +7,20 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - prysmP2P "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + prysmP2P "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" goPeer "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/sync/data_column_sidecars_test.go b/beacon-chain/sync/data_column_sidecars_test.go index 4c533ccc31..1d97589fc2 100644 --- a/beacon-chain/sync/data_column_sidecars_test.go +++ b/beacon-chain/sync/data_column_sidecars_test.go @@ -7,25 +7,25 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - testp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + testp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/libp2p/go-libp2p" "github.com/libp2p/go-libp2p/core/crypto" "github.com/libp2p/go-libp2p/core/network" diff --git a/beacon-chain/sync/data_columns_reconstruct.go b/beacon-chain/sync/data_columns_reconstruct.go index 837d766264..dd81b2de77 100644 --- a/beacon-chain/sync/data_columns_reconstruct.go +++ b/beacon-chain/sync/data_columns_reconstruct.go @@ -6,12 +6,12 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/sync/data_columns_reconstruct_test.go b/beacon-chain/sync/data_columns_reconstruct_test.go index b532a0c3c3..542c8527f6 100644 --- a/beacon-chain/sync/data_columns_reconstruct_test.go +++ b/beacon-chain/sync/data_columns_reconstruct_test.go @@ -7,14 +7,14 @@ import ( "math/rand" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestProcessDataColumnSidecarsFromReconstruction(t *testing.T) { diff --git a/beacon-chain/sync/deadlines.go b/beacon-chain/sync/deadlines.go index 0c101bc7f2..a9f017234b 100644 --- a/beacon-chain/sync/deadlines.go +++ b/beacon-chain/sync/deadlines.go @@ -4,7 +4,7 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/libp2p/go-libp2p/core/network" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/sync/decode_pubsub.go b/beacon-chain/sync/decode_pubsub.go index d749bb9fd7..050798ff57 100644 --- a/beacon-chain/sync/decode_pubsub.go +++ b/beacon-chain/sync/decode_pubsub.go @@ -4,13 +4,13 @@ import ( "reflect" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" diff --git a/beacon-chain/sync/decode_pubsub_test.go b/beacon-chain/sync/decode_pubsub_test.go index 4fe6ce5197..0d5c2e0209 100644 --- a/beacon-chain/sync/decode_pubsub_test.go +++ b/beacon-chain/sync/decode_pubsub_test.go @@ -7,21 +7,21 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptesting "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptesting "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/d4l3k/messagediff" pubsub "github.com/libp2p/go-libp2p-pubsub" pb "github.com/libp2p/go-libp2p-pubsub/pb" diff --git a/beacon-chain/sync/error.go b/beacon-chain/sync/error.go index 594820f4ad..5d3f54b135 100644 --- a/beacon-chain/sync/error.go +++ b/beacon-chain/sync/error.go @@ -4,10 +4,10 @@ import ( "bytes" "io" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/config/params" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/network" multiplex "github.com/libp2p/go-mplex" diff --git a/beacon-chain/sync/error_test.go b/beacon-chain/sync/error_test.go index 7332527b44..78bee9a6ab 100644 --- a/beacon-chain/sync/error_test.go +++ b/beacon-chain/sync/error_test.go @@ -4,10 +4,10 @@ import ( "bytes" "testing" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestRegularSync_generateErrorResponse(t *testing.T) { diff --git a/beacon-chain/sync/fork_watcher.go b/beacon-chain/sync/fork_watcher.go index a4791f3c40..aaeda70ea9 100644 --- a/beacon-chain/sync/fork_watcher.go +++ b/beacon-chain/sync/fork_watcher.go @@ -1,10 +1,10 @@ package sync import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core/protocol" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/fork_watcher_test.go b/beacon-chain/sync/fork_watcher_test.go index e6466a763e..2d81bb37a5 100644 --- a/beacon-chain/sync/fork_watcher_test.go +++ b/beacon-chain/sync/fork_watcher_test.go @@ -7,17 +7,17 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/abool" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/genesis" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/async/abool" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/genesis" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func defaultClockWithTimeAtEpoch(epoch primitives.Epoch) *startup.Clock { diff --git a/beacon-chain/sync/initial-sync/BUILD.bazel b/beacon-chain/sync/initial-sync/BUILD.bazel index dbb3578482..d6731c248b 100644 --- a/beacon-chain/sync/initial-sync/BUILD.bazel +++ b/beacon-chain/sync/initial-sync/BUILD.bazel @@ -13,7 +13,7 @@ go_library( "round_robin.go", "service.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//async/abool:go_default_library", diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher.go b/beacon-chain/sync/initial-sync/blocks_fetcher.go index aa6802fcc4..cbb5c62acd 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher.go @@ -8,29 +8,29 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2pTypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - prysmsync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/verify" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/math" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - p2ppb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2pTypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + prysmsync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/verify" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/math" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + p2ppb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher_peers.go b/beacon-chain/sync/initial-sync/blocks_fetcher_peers.go index f07d2de955..a6f1edcd01 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher_peers.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher_peers.go @@ -6,12 +6,12 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core/peer" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher_peers_test.go b/beacon-chain/sync/initial-sync/blocks_fetcher_peers_test.go index 4bf2a91de8..8e41e65df0 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher_peers_test.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher_peers_test.go @@ -8,13 +8,13 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/scorers" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/scorers" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + prysmTime "github.com/OffchainLabs/prysm/v7/time" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher_test.go b/beacon-chain/sync/initial-sync/blocks_fetcher_test.go index 5e377c9f7d..1e4be4fa5e 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher_test.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher_test.go @@ -9,28 +9,28 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - beaconsync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + beaconsync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher_utils.go b/beacon-chain/sync/initial-sync/blocks_fetcher_utils.go index 22aa52202a..b44b1fad7e 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher_utils.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher_utils.go @@ -4,15 +4,15 @@ import ( "context" "fmt" - p2pTypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - p2ppb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + p2pTypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + p2ppb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher_utils_test.go b/beacon-chain/sync/initial-sync/blocks_fetcher_utils_test.go index 7d71774b08..456c8a5036 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher_utils_test.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher_utils_test.go @@ -7,23 +7,23 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - p2pm "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2pt "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2pTypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + p2pm "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2pt "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2pTypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/sync/initial-sync/blocks_queue.go b/beacon-chain/sync/initial-sync/blocks_queue.go index 93c88179a2..8701885c18 100644 --- a/beacon-chain/sync/initial-sync/blocks_queue.go +++ b/beacon-chain/sync/initial-sync/blocks_queue.go @@ -5,15 +5,15 @@ import ( "errors" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - beaconsync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + beaconsync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core/peer" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/sync/initial-sync/blocks_queue_test.go b/beacon-chain/sync/initial-sync/blocks_queue_test.go index 064cecf8d4..6d058f0375 100644 --- a/beacon-chain/sync/initial-sync/blocks_queue_test.go +++ b/beacon-chain/sync/initial-sync/blocks_queue_test.go @@ -6,25 +6,25 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - p2pt "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - beaconsync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + p2pt "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + beaconsync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core/peer" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/sync/initial-sync/blocks_queue_utils.go b/beacon-chain/sync/initial-sync/blocks_queue_utils.go index eae32a49c4..02b67b0e49 100644 --- a/beacon-chain/sync/initial-sync/blocks_queue_utils.go +++ b/beacon-chain/sync/initial-sync/blocks_queue_utils.go @@ -4,7 +4,7 @@ import ( "context" "errors" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // resetWithBlocks removes all state machines, then re-adds enough machines to contain all provided diff --git a/beacon-chain/sync/initial-sync/downscore_test.go b/beacon-chain/sync/initial-sync/downscore_test.go index a6b21b4102..7da1dbb977 100644 --- a/beacon-chain/sync/initial-sync/downscore_test.go +++ b/beacon-chain/sync/initial-sync/downscore_test.go @@ -4,16 +4,16 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers/peerdata" - p2pt "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers/peerdata" + p2pt "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/initial-sync/fsm.go b/beacon-chain/sync/initial-sync/fsm.go index 1bd03a5b43..284b1e5d5a 100644 --- a/beacon-chain/sync/initial-sync/fsm.go +++ b/beacon-chain/sync/initial-sync/fsm.go @@ -6,9 +6,9 @@ import ( "sort" "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" ) const ( diff --git a/beacon-chain/sync/initial-sync/fsm_benchmark_test.go b/beacon-chain/sync/initial-sync/fsm_benchmark_test.go index f23cfa3b9b..684985fcdc 100644 --- a/beacon-chain/sync/initial-sync/fsm_benchmark_test.go +++ b/beacon-chain/sync/initial-sync/fsm_benchmark_test.go @@ -3,7 +3,7 @@ package initialsync import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func BenchmarkStateMachine_trigger(b *testing.B) { diff --git a/beacon-chain/sync/initial-sync/fsm_test.go b/beacon-chain/sync/initial-sync/fsm_test.go index 35a6f0c82f..a9965bd2af 100644 --- a/beacon-chain/sync/initial-sync/fsm_test.go +++ b/beacon-chain/sync/initial-sync/fsm_test.go @@ -5,9 +5,9 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStateMachineManager_String(t *testing.T) { diff --git a/beacon-chain/sync/initial-sync/initial_sync_test.go b/beacon-chain/sync/initial-sync/initial_sync_test.go index b81c11b2b0..08460cf85f 100644 --- a/beacon-chain/sync/initial-sync/initial_sync_test.go +++ b/beacon-chain/sync/initial-sync/initial_sync_test.go @@ -8,27 +8,27 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - p2pt "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2pTypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - beaconsync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + p2pt "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2pTypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + beaconsync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/sync/initial-sync/round_robin.go b/beacon-chain/sync/initial-sync/round_robin.go index 78436866df..a257e5dfc2 100644 --- a/beacon-chain/sync/initial-sync/round_robin.go +++ b/beacon-chain/sync/initial-sync/round_robin.go @@ -7,15 +7,15 @@ import ( "sort" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/das" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/das" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core/peer" "github.com/paulbellamy/ratecounter" "github.com/pkg/errors" diff --git a/beacon-chain/sync/initial-sync/round_robin_test.go b/beacon-chain/sync/initial-sync/round_robin_test.go index 6ffdf54eb9..bf6f3d1673 100644 --- a/beacon-chain/sync/initial-sync/round_robin_test.go +++ b/beacon-chain/sync/initial-sync/round_robin_test.go @@ -5,22 +5,22 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/abool" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/das" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - p2pt "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/async/abool" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/das" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + p2pt "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/paulbellamy/ratecounter" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/sync/initial-sync/service.go b/beacon-chain/sync/initial-sync/service.go index 7ca5da18ab..b82d288234 100644 --- a/beacon-chain/sync/initial-sync/service.go +++ b/beacon-chain/sync/initial-sync/service.go @@ -8,29 +8,29 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/async/abool" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - blockfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/das" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime" - "github.com/OffchainLabs/prysm/v6/runtime/version" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/abool" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + blockfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/das" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime" + "github.com/OffchainLabs/prysm/v7/runtime/version" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core/peer" "github.com/paulbellamy/ratecounter" "github.com/pkg/errors" diff --git a/beacon-chain/sync/initial-sync/service_test.go b/beacon-chain/sync/initial-sync/service_test.go index 6dc7cce17a..d7ffa7533a 100644 --- a/beacon-chain/sync/initial-sync/service_test.go +++ b/beacon-chain/sync/initial-sync/service_test.go @@ -7,31 +7,31 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/abool" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - testp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - prysmSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/abool" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + testp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + prysmSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p" "github.com/libp2p/go-libp2p/core/crypto" "github.com/libp2p/go-libp2p/core/network" diff --git a/beacon-chain/sync/initial-sync/testing/BUILD.bazel b/beacon-chain/sync/initial-sync/testing/BUILD.bazel index 93f9d425d4..f17a33ff10 100644 --- a/beacon-chain/sync/initial-sync/testing/BUILD.bazel +++ b/beacon-chain/sync/initial-sync/testing/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing", visibility = [ "//beacon-chain:__subpackages__", ], diff --git a/beacon-chain/sync/kzg_batch_verifier_test.go b/beacon-chain/sync/kzg_batch_verifier_test.go index 7733465d4c..5929b139bd 100644 --- a/beacon-chain/sync/kzg_batch_verifier_test.go +++ b/beacon-chain/sync/kzg_batch_verifier_test.go @@ -6,12 +6,12 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" pubsub "github.com/libp2p/go-libp2p-pubsub" ) diff --git a/beacon-chain/sync/metrics.go b/beacon-chain/sync/metrics.go index 96601bac06..99a828ae7f 100644 --- a/beacon-chain/sync/metrics.go +++ b/beacon-chain/sync/metrics.go @@ -5,12 +5,12 @@ import ( "reflect" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" ) diff --git a/beacon-chain/sync/options.go b/beacon-chain/sync/options.go index d30a8ef021..49ec2c8fbd 100644 --- a/beacon-chain/sync/options.go +++ b/beacon-chain/sync/options.go @@ -1,26 +1,26 @@ package sync import ( - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - blockfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - lightClient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/backfill/coverage" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/crypto/rand" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + blockfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + lightClient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/backfill/coverage" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/crypto/rand" ) type Option func(s *Service) error diff --git a/beacon-chain/sync/pending_attestations_queue.go b/beacon-chain/sync/pending_attestations_queue.go index 64c0033ad1..bb30118e49 100644 --- a/beacon-chain/sync/pending_attestations_queue.go +++ b/beacon-chain/sync/pending_attestations_queue.go @@ -7,22 +7,22 @@ import ( "fmt" "slices" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/sync/pending_attestations_queue_bucket_test.go b/beacon-chain/sync/pending_attestations_queue_bucket_test.go index 59b72e71c1..f6d72d1f34 100644 --- a/beacon-chain/sync/pending_attestations_queue_bucket_test.go +++ b/beacon-chain/sync/pending_attestations_queue_bucket_test.go @@ -4,12 +4,12 @@ import ( "context" "testing" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/beacon-chain/sync/pending_attestations_queue_test.go b/beacon-chain/sync/pending_attestations_queue_test.go index fbe2113ef8..89db1a4803 100644 --- a/beacon-chain/sync/pending_attestations_queue_test.go +++ b/beacon-chain/sync/pending_attestations_queue_test.go @@ -9,30 +9,30 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/async/abool" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/async/abool" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + prysmTime "github.com/OffchainLabs/prysm/v7/time" "github.com/ethereum/go-ethereum/p2p/enr" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" diff --git a/beacon-chain/sync/pending_blocks_queue.go b/beacon-chain/sync/pending_blocks_queue.go index 3a0bacbba4..213a97e6c9 100644 --- a/beacon-chain/sync/pending_blocks_queue.go +++ b/beacon-chain/sync/pending_blocks_queue.go @@ -8,20 +8,20 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/equality" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - prysmTrace "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/equality" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + prysmTrace "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/sync/pending_blocks_queue_test.go b/beacon-chain/sync/pending_blocks_queue_test.go index ab0dfe78a7..70a8688b68 100644 --- a/beacon-chain/sync/pending_blocks_queue_test.go +++ b/beacon-chain/sync/pending_blocks_queue_test.go @@ -6,25 +6,25 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" diff --git a/beacon-chain/sync/rate_limiter.go b/beacon-chain/sync/rate_limiter.go index 00df4b9481..a19c10e8b9 100644 --- a/beacon-chain/sync/rate_limiter.go +++ b/beacon-chain/sync/rate_limiter.go @@ -11,10 +11,10 @@ import ( "github.com/sirupsen/logrus" "github.com/trailofbits/go-mutexasserts" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" ) const defaultBurstLimit = 5 diff --git a/beacon-chain/sync/rate_limiter_test.go b/beacon-chain/sync/rate_limiter_test.go index 6d550ff260..9d10390a10 100644 --- a/beacon-chain/sync/rate_limiter_test.go +++ b/beacon-chain/sync/rate_limiter_test.go @@ -5,12 +5,12 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" ) diff --git a/beacon-chain/sync/rpc.go b/beacon-chain/sync/rpc.go index edfa79c241..66156895c1 100644 --- a/beacon-chain/sync/rpc.go +++ b/beacon-chain/sync/rpc.go @@ -7,13 +7,13 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/runtime/version" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/network" "github.com/pkg/errors" diff --git a/beacon-chain/sync/rpc_beacon_blocks_by_range.go b/beacon-chain/sync/rpc_beacon_blocks_by_range.go index daf3d911f5..fd668fe405 100644 --- a/beacon-chain/sync/rpc_beacon_blocks_by_range.go +++ b/beacon-chain/sync/rpc_beacon_blocks_by_range.go @@ -4,16 +4,16 @@ import ( "context" "time" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" diff --git a/beacon-chain/sync/rpc_beacon_blocks_by_range_test.go b/beacon-chain/sync/rpc_beacon_blocks_by_range_test.go index 0b893b50aa..c279087f5a 100644 --- a/beacon-chain/sync/rpc_beacon_blocks_by_range_test.go +++ b/beacon-chain/sync/rpc_beacon_blocks_by_range_test.go @@ -7,28 +7,28 @@ import ( "testing" "time" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - db2 "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - db "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + db2 "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + db "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" gethTypes "github.com/ethereum/go-ethereum/core/types" "github.com/libp2p/go-libp2p/core/network" diff --git a/beacon-chain/sync/rpc_beacon_blocks_by_root.go b/beacon-chain/sync/rpc_beacon_blocks_by_root.go index fb0c904594..c9afe0a9b7 100644 --- a/beacon-chain/sync/rpc_beacon_blocks_by_root.go +++ b/beacon-chain/sync/rpc_beacon_blocks_by_root.go @@ -4,20 +4,20 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/verify" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/verify" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" diff --git a/beacon-chain/sync/rpc_beacon_blocks_by_root_test.go b/beacon-chain/sync/rpc_beacon_blocks_by_root_test.go index 7fe47a08d7..3158f84c5e 100644 --- a/beacon-chain/sync/rpc_beacon_blocks_by_root_test.go +++ b/beacon-chain/sync/rpc_beacon_blocks_by_root_test.go @@ -6,28 +6,28 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - db "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2pTypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + db "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2pTypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common" gethTypes "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/p2p/enr" diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_range.go b/beacon-chain/sync/rpc_blob_sidecars_by_range.go index ff3e6e28ae..7b4f0f3838 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_range.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_range.go @@ -5,15 +5,15 @@ import ( "math" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_range_test.go b/beacon-chain/sync/rpc_blob_sidecars_by_range_test.go index b79c65b1bc..637d37bb03 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_range_test.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_range_test.go @@ -3,15 +3,15 @@ package sync import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/genesis" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/genesis" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func (c *blobsTestCase) defaultOldestSlotByRange(t *testing.T) types.Slot { diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_root.go b/beacon-chain/sync/rpc_blob_sidecars_by_root.go index cbb75cacc0..a8415d8849 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_root.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_root.go @@ -6,17 +6,17 @@ import ( "sort" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go b/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go index 5fc6660cc7..e1888f101b 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go @@ -5,17 +5,17 @@ import ( "sort" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2pTypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2pTypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/libp2p/go-libp2p/core/network" ) diff --git a/beacon-chain/sync/rpc_chunked_response.go b/beacon-chain/sync/rpc_chunked_response.go index 239070a2f8..d89a83d440 100644 --- a/beacon-chain/sync/rpc_chunked_response.go +++ b/beacon-chain/sync/rpc_chunked_response.go @@ -1,15 +1,15 @@ package sync import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_range.go b/beacon-chain/sync/rpc_data_column_sidecars_by_range.go index 9c9fb3c8f6..90e9eee2a4 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_range.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_range.go @@ -5,13 +5,13 @@ import ( "slices" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/pkg/errors" diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_range_test.go b/beacon-chain/sync/rpc_data_column_sidecars_by_range_test.go index 8b7d212668..4c3df347fe 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_range_test.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_range_test.go @@ -14,19 +14,19 @@ import ( "github.com/pkg/errors" "github.com/stretchr/testify/require" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/util" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestDataColumnSidecarsByRangeRPCHandler(t *testing.T) { diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_root.go b/beacon-chain/sync/rpc_data_column_sidecars_by_root.go index 983000ea3c..28fb61f233 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_root.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_root.go @@ -7,16 +7,16 @@ import ( "slices" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go b/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go index a4ccae99a9..2b576eb6df 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_root_test.go @@ -7,21 +7,21 @@ import ( "testing" "time" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" "github.com/pkg/errors" diff --git a/beacon-chain/sync/rpc_goodbye.go b/beacon-chain/sync/rpc_goodbye.go index a7ed56c424..63c2f0f920 100644 --- a/beacon-chain/sync/rpc_goodbye.go +++ b/beacon-chain/sync/rpc_goodbye.go @@ -5,11 +5,11 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/sync/rpc_goodbye_test.go b/beacon-chain/sync/rpc_goodbye_test.go index baf48c3e61..17b2f8210c 100644 --- a/beacon-chain/sync/rpc_goodbye_test.go +++ b/beacon-chain/sync/rpc_goodbye_test.go @@ -5,17 +5,17 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - db "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + db "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" ) diff --git a/beacon-chain/sync/rpc_handler_test.go b/beacon-chain/sync/rpc_handler_test.go index 28b52021a6..d020a8514b 100644 --- a/beacon-chain/sync/rpc_handler_test.go +++ b/beacon-chain/sync/rpc_handler_test.go @@ -5,9 +5,9 @@ import ( "testing" "time" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" ) diff --git a/beacon-chain/sync/rpc_light_client.go b/beacon-chain/sync/rpc_light_client.go index 3de09509eb..c31f7d845f 100644 --- a/beacon-chain/sync/rpc_light_client.go +++ b/beacon-chain/sync/rpc_light_client.go @@ -4,14 +4,14 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/math" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/math" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" libp2pcore "github.com/libp2p/go-libp2p/core" ) diff --git a/beacon-chain/sync/rpc_light_client_test.go b/beacon-chain/sync/rpc_light_client_test.go index e96f74155b..e8cc016821 100644 --- a/beacon-chain/sync/rpc_light_client_test.go +++ b/beacon-chain/sync/rpc_light_client_test.go @@ -5,23 +5,23 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/abool" - "github.com/OffchainLabs/prysm/v6/async/event" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - db "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - lightClient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/async/abool" + "github.com/OffchainLabs/prysm/v7/async/event" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + db "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + lightClient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" ) diff --git a/beacon-chain/sync/rpc_metadata.go b/beacon-chain/sync/rpc_metadata.go index 5fe051741a..52b7eefa05 100644 --- a/beacon-chain/sync/rpc_metadata.go +++ b/beacon-chain/sync/rpc_metadata.go @@ -4,14 +4,14 @@ import ( "context" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" diff --git a/beacon-chain/sync/rpc_metadata_test.go b/beacon-chain/sync/rpc_metadata_test.go index cba6f95f97..3c1192a579 100644 --- a/beacon-chain/sync/rpc_metadata_test.go +++ b/beacon-chain/sync/rpc_metadata_test.go @@ -7,20 +7,20 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - db "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/equality" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + db "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/equality" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/libp2p/go-libp2p" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" diff --git a/beacon-chain/sync/rpc_ping.go b/beacon-chain/sync/rpc_ping.go index 3b0f2e172a..7a4eb51831 100644 --- a/beacon-chain/sync/rpc_ping.go +++ b/beacon-chain/sync/rpc_ping.go @@ -5,11 +5,11 @@ import ( "fmt" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" diff --git a/beacon-chain/sync/rpc_ping_test.go b/beacon-chain/sync/rpc_ping_test.go index 9bccb2d008..ab0daaf089 100644 --- a/beacon-chain/sync/rpc_ping_test.go +++ b/beacon-chain/sync/rpc_ping_test.go @@ -5,19 +5,19 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - db "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + db "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" diff --git a/beacon-chain/sync/rpc_send_request.go b/beacon-chain/sync/rpc_send_request.go index 64096e4a51..be44e4f881 100644 --- a/beacon-chain/sync/rpc_send_request.go +++ b/beacon-chain/sync/rpc_send_request.go @@ -6,21 +6,21 @@ import ( "io" "slices" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" goPeer "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/sync/rpc_send_request_test.go b/beacon-chain/sync/rpc_send_request_test.go index 8c7af5e03f..67c6359556 100644 --- a/beacon-chain/sync/rpc_send_request_test.go +++ b/beacon-chain/sync/rpc_send_request_test.go @@ -9,24 +9,24 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - p2pTypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + p2pTypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/libp2p/go-libp2p/core/network" ) diff --git a/beacon-chain/sync/rpc_status.go b/beacon-chain/sync/rpc_status.go index 32367df951..fdb031a558 100644 --- a/beacon-chain/sync/rpc_status.go +++ b/beacon-chain/sync/rpc_status.go @@ -7,18 +7,18 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/sync/rpc_status_test.go b/beacon-chain/sync/rpc_status_test.go index c19a70a4a5..576c3bfacd 100644 --- a/beacon-chain/sync/rpc_status_test.go +++ b/beacon-chain/sync/rpc_status_test.go @@ -6,33 +6,33 @@ import ( "testing" "time" - beaconState "github.com/OffchainLabs/prysm/v6/beacon-chain/state" + beaconState "github.com/OffchainLabs/prysm/v7/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/async/abool" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - testingDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/async/abool" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + testingDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + prysmTime "github.com/OffchainLabs/prysm/v7/time" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" diff --git a/beacon-chain/sync/rpc_test.go b/beacon-chain/sync/rpc_test.go index ca3f1e319e..242c09dc1b 100644 --- a/beacon-chain/sync/rpc_test.go +++ b/beacon-chain/sync/rpc_test.go @@ -7,14 +7,14 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - prysmP2P "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + prysmP2P "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" diff --git a/beacon-chain/sync/service.go b/beacon-chain/sync/service.go index 02ccffd9f3..1b389382e4 100644 --- a/beacon-chain/sync/service.go +++ b/beacon-chain/sync/service.go @@ -9,38 +9,38 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/async/abool" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - blockfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - lightClient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/synccommittee" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/voluntaryexits" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/backfill/coverage" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/runtime" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/async/abool" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + blockfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + lightClient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/synccommittee" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/voluntaryexits" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/backfill/coverage" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/runtime" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" lru "github.com/hashicorp/golang-lru" pubsub "github.com/libp2p/go-libp2p-pubsub" libp2pcore "github.com/libp2p/go-libp2p/core" diff --git a/beacon-chain/sync/service_test.go b/beacon-chain/sync/service_test.go index b75103ebdc..3bf8583c83 100644 --- a/beacon-chain/sync/service_test.go +++ b/beacon-chain/sync/service_test.go @@ -6,24 +6,24 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/abool" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - dbTest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - leakybucket "github.com/OffchainLabs/prysm/v6/container/leaky-bucket" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/async/abool" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + dbTest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + leakybucket "github.com/OffchainLabs/prysm/v7/container/leaky-bucket" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" gcache "github.com/patrickmn/go-cache" diff --git a/beacon-chain/sync/slot_aware_cache.go b/beacon-chain/sync/slot_aware_cache.go index 8b897e1add..d3d2b52a3d 100644 --- a/beacon-chain/sync/slot_aware_cache.go +++ b/beacon-chain/sync/slot_aware_cache.go @@ -4,7 +4,7 @@ import ( "slices" "sync" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" lru "github.com/hashicorp/golang-lru" ) diff --git a/beacon-chain/sync/slot_aware_cache_test.go b/beacon-chain/sync/slot_aware_cache_test.go index b6a66c2a72..d1101486fe 100644 --- a/beacon-chain/sync/slot_aware_cache_test.go +++ b/beacon-chain/sync/slot_aware_cache_test.go @@ -4,8 +4,8 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSlotAwareCache(t *testing.T) { diff --git a/beacon-chain/sync/subscriber.go b/beacon-chain/sync/subscriber.go index babe32eb9f..9b53ae4e57 100644 --- a/beacon-chain/sync/subscriber.go +++ b/beacon-chain/sync/subscriber.go @@ -9,22 +9,22 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/peers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/messagehandler" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/messagehandler" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/host" diff --git a/beacon-chain/sync/subscriber_beacon_aggregate_proof.go b/beacon-chain/sync/subscriber_beacon_aggregate_proof.go index 15d768eb9a..732437b191 100644 --- a/beacon-chain/sync/subscriber_beacon_aggregate_proof.go +++ b/beacon-chain/sync/subscriber_beacon_aggregate_proof.go @@ -5,8 +5,8 @@ import ( "errors" "fmt" - "github.com/OffchainLabs/prysm/v6/config/features" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/config/features" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/sync/subscriber_beacon_aggregate_proof_test.go b/beacon-chain/sync/subscriber_beacon_aggregate_proof_test.go index f4767a416e..d8450970d7 100644 --- a/beacon-chain/sync/subscriber_beacon_aggregate_proof_test.go +++ b/beacon-chain/sync/subscriber_beacon_aggregate_proof_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestBeaconAggregateProofSubscriber_CanSaveAggregatedAttestation(t *testing.T) { diff --git a/beacon-chain/sync/subscriber_beacon_attestation.go b/beacon-chain/sync/subscriber_beacon_attestation.go index d5bd9158b9..42fcee621e 100644 --- a/beacon-chain/sync/subscriber_beacon_attestation.go +++ b/beacon-chain/sync/subscriber_beacon_attestation.go @@ -4,13 +4,13 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/sync/subscriber_beacon_blocks.go b/beacon-chain/sync/subscriber_beacon_blocks.go index 92017f56b6..44fde992f2 100644 --- a/beacon-chain/sync/subscriber_beacon_blocks.go +++ b/beacon-chain/sync/subscriber_beacon_blocks.go @@ -7,18 +7,18 @@ import ( "path" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition/interop" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition/interop" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" "google.golang.org/protobuf/proto" diff --git a/beacon-chain/sync/subscriber_beacon_blocks_test.go b/beacon-chain/sync/subscriber_beacon_blocks_test.go index ac0bc8ea76..f400f86226 100644 --- a/beacon-chain/sync/subscriber_beacon_blocks_test.go +++ b/beacon-chain/sync/subscriber_beacon_blocks_test.go @@ -4,25 +4,25 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - chainMock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - mockExecution "github.com/OffchainLabs/prysm/v6/beacon-chain/execution/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/sync/subscriber_blob_sidecar.go b/beacon-chain/sync/subscriber_blob_sidecar.go index 4b657e5e2b..78f9819272 100644 --- a/beacon-chain/sync/subscriber_blob_sidecar.go +++ b/beacon-chain/sync/subscriber_blob_sidecar.go @@ -4,9 +4,9 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/sync/subscriber_bls_to_execution_change.go b/beacon-chain/sync/subscriber_bls_to_execution_change.go index a0d0f87080..0e46bf1729 100644 --- a/beacon-chain/sync/subscriber_bls_to_execution_change.go +++ b/beacon-chain/sync/subscriber_bls_to_execution_change.go @@ -3,9 +3,9 @@ package sync import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/sync/subscriber_data_column_sidecar.go b/beacon-chain/sync/subscriber_data_column_sidecar.go index a051eab3bf..1cc10c0630 100644 --- a/beacon-chain/sync/subscriber_data_column_sidecar.go +++ b/beacon-chain/sync/subscriber_data_column_sidecar.go @@ -4,12 +4,12 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/pkg/errors" "golang.org/x/sync/errgroup" "google.golang.org/protobuf/proto" diff --git a/beacon-chain/sync/subscriber_data_column_sidecar_test.go b/beacon-chain/sync/subscriber_data_column_sidecar_test.go index 15526e1f2d..c7c8d3f9cd 100644 --- a/beacon-chain/sync/subscriber_data_column_sidecar_test.go +++ b/beacon-chain/sync/subscriber_data_column_sidecar_test.go @@ -3,15 +3,15 @@ package sync import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestAllDataColumnSubnets(t *testing.T) { diff --git a/beacon-chain/sync/subscriber_handlers.go b/beacon-chain/sync/subscriber_handlers.go index 71c3b25f25..f079bf9789 100644 --- a/beacon-chain/sync/subscriber_handlers.go +++ b/beacon-chain/sync/subscriber_handlers.go @@ -4,7 +4,7 @@ import ( "context" "fmt" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/sync/subscriber_sync_committee_message.go b/beacon-chain/sync/subscriber_sync_committee_message.go index 4914dbba88..fa29ec5d5d 100644 --- a/beacon-chain/sync/subscriber_sync_committee_message.go +++ b/beacon-chain/sync/subscriber_sync_committee_message.go @@ -4,7 +4,7 @@ import ( "context" "fmt" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/sync/subscriber_sync_contribution_proof.go b/beacon-chain/sync/subscriber_sync_contribution_proof.go index 870145b2db..6337c3936e 100644 --- a/beacon-chain/sync/subscriber_sync_contribution_proof.go +++ b/beacon-chain/sync/subscriber_sync_contribution_proof.go @@ -5,7 +5,7 @@ import ( "errors" "fmt" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "google.golang.org/protobuf/proto" ) diff --git a/beacon-chain/sync/subscriber_test.go b/beacon-chain/sync/subscriber_test.go index fd6ac72e85..7fc3bf0883 100644 --- a/beacon-chain/sync/subscriber_test.go +++ b/beacon-chain/sync/subscriber_test.go @@ -8,28 +8,28 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/abool" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - db "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/abool" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + db "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/sync/subscription_topic_handler.go b/beacon-chain/sync/subscription_topic_handler.go index bfb42dd199..784ec3859b 100644 --- a/beacon-chain/sync/subscription_topic_handler.go +++ b/beacon-chain/sync/subscription_topic_handler.go @@ -3,7 +3,7 @@ package sync import ( "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" pubsub "github.com/libp2p/go-libp2p-pubsub" ) diff --git a/beacon-chain/sync/subscription_topic_handler_test.go b/beacon-chain/sync/subscription_topic_handler_test.go index 405deb6d93..40b458f5bd 100644 --- a/beacon-chain/sync/subscription_topic_handler_test.go +++ b/beacon-chain/sync/subscription_topic_handler_test.go @@ -5,11 +5,11 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" pubsub "github.com/libp2p/go-libp2p-pubsub" ) diff --git a/beacon-chain/sync/sync_fuzz_test.go b/beacon-chain/sync/sync_fuzz_test.go index 0703d8dd67..db23ebc154 100644 --- a/beacon-chain/sync/sync_fuzz_test.go +++ b/beacon-chain/sync/sync_fuzz_test.go @@ -8,22 +8,22 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" pubsub "github.com/libp2p/go-libp2p-pubsub" pb "github.com/libp2p/go-libp2p-pubsub/pb" "github.com/libp2p/go-libp2p/core/peer" diff --git a/beacon-chain/sync/sync_test.go b/beacon-chain/sync/sync_test.go index dc3c222e2e..0af9afa983 100644 --- a/beacon-chain/sync/sync_test.go +++ b/beacon-chain/sync/sync_test.go @@ -5,7 +5,7 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/sync/validate_aggregate_proof.go b/beacon-chain/sync/validate_aggregate_proof.go index 0f64ab462d..5503e39ba7 100644 --- a/beacon-chain/sync/validate_aggregate_proof.go +++ b/beacon-chain/sync/validate_aggregate_proof.go @@ -5,23 +5,23 @@ import ( "fmt" "slices" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" diff --git a/beacon-chain/sync/validate_aggregate_proof_test.go b/beacon-chain/sync/validate_aggregate_proof_test.go index 1244ed4888..bdb82a549a 100644 --- a/beacon-chain/sync/validate_aggregate_proof_test.go +++ b/beacon-chain/sync/validate_aggregate_proof_test.go @@ -8,26 +8,26 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" ) diff --git a/beacon-chain/sync/validate_attester_slashing.go b/beacon-chain/sync/validate_attester_slashing.go index 7d6af4be07..79b4bb4c67 100644 --- a/beacon-chain/sync/validate_attester_slashing.go +++ b/beacon-chain/sync/validate_attester_slashing.go @@ -3,16 +3,16 @@ package sync import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" diff --git a/beacon-chain/sync/validate_attester_slashing_test.go b/beacon-chain/sync/validate_attester_slashing_test.go index 0814a40240..48a8de8e88 100644 --- a/beacon-chain/sync/validate_attester_slashing_test.go +++ b/beacon-chain/sync/validate_attester_slashing_test.go @@ -8,21 +8,21 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" ) diff --git a/beacon-chain/sync/validate_beacon_attestation.go b/beacon-chain/sync/validate_beacon_attestation.go index aee654cf0f..2384bd7773 100644 --- a/beacon-chain/sync/validate_beacon_attestation.go +++ b/beacon-chain/sync/validate_beacon_attestation.go @@ -8,23 +8,23 @@ import ( "slices" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" diff --git a/beacon-chain/sync/validate_beacon_attestation_test.go b/beacon-chain/sync/validate_beacon_attestation_test.go index dc47650c03..2d4d32cfb7 100644 --- a/beacon-chain/sync/validate_beacon_attestation_test.go +++ b/beacon-chain/sync/validate_beacon_attestation_test.go @@ -8,22 +8,22 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" ) diff --git a/beacon-chain/sync/validate_beacon_blocks.go b/beacon-chain/sync/validate_beacon_blocks.go index 07bba00446..d203411742 100644 --- a/beacon-chain/sync/validate_beacon_blocks.go +++ b/beacon-chain/sync/validate_beacon_blocks.go @@ -6,26 +6,26 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - blockfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/block" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + blockfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/block" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" diff --git a/beacon-chain/sync/validate_beacon_blocks_test.go b/beacon-chain/sync/validate_beacon_blocks_test.go index c11b13ffc6..00db86133c 100644 --- a/beacon-chain/sync/validate_beacon_blocks_test.go +++ b/beacon-chain/sync/validate_beacon_blocks_test.go @@ -9,31 +9,31 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/abool" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - slashingsmock "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings/mock" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/async/abool" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + slashingsmock "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings/mock" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" gcache "github.com/patrickmn/go-cache" @@ -1847,7 +1847,7 @@ func TestBlockVerifyingState_SameEpochAsParent(t *testing.T) { // Create a different head block at a later epoch headBlock := util.NewBeaconBlock() - headBlock.Block.Slot = 40 // Different epoch (epoch 1) + headBlock.Block.Slot = 40 // Different epoch (epoch 1) headBlock.Block.ParentRoot = parentRoot[:] // Head descends from parent util.SaveBlock(t, ctx, db, headBlock) headRoot, err := headBlock.Block.HashTreeRoot() diff --git a/beacon-chain/sync/validate_blob.go b/beacon-chain/sync/validate_blob.go index 7c6244fbda..7b02ef3342 100644 --- a/beacon-chain/sync/validate_blob.go +++ b/beacon-chain/sync/validate_blob.go @@ -7,18 +7,18 @@ import ( "path" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/file" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/file" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" diff --git a/beacon-chain/sync/validate_blob_test.go b/beacon-chain/sync/validate_blob_test.go index 8a30aebb1d..db1ccccbcd 100644 --- a/beacon-chain/sync/validate_blob_test.go +++ b/beacon-chain/sync/validate_blob_test.go @@ -6,21 +6,21 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" pubsub "github.com/libp2p/go-libp2p-pubsub" pb "github.com/libp2p/go-libp2p-pubsub/pb" "github.com/pkg/errors" diff --git a/beacon-chain/sync/validate_bls_to_execution_change.go b/beacon-chain/sync/validate_bls_to_execution_change.go index dcf6b5be39..ce4fb8ffb6 100644 --- a/beacon-chain/sync/validate_bls_to_execution_change.go +++ b/beacon-chain/sync/validate_bls_to_execution_change.go @@ -3,10 +3,10 @@ package sync import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/sync/validate_bls_to_execution_change_test.go b/beacon-chain/sync/validate_bls_to_execution_change_test.go index 8fc1a06819..401558e6a5 100644 --- a/beacon-chain/sync/validate_bls_to_execution_change_test.go +++ b/beacon-chain/sync/validate_bls_to_execution_change_test.go @@ -6,25 +6,25 @@ import ( "testing" "time" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - testingdb "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/blstoexec" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + testingdb "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/blstoexec" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/golang/snappy" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" diff --git a/beacon-chain/sync/validate_data_column.go b/beacon-chain/sync/validate_data_column.go index d43257d83b..13cc1f725d 100644 --- a/beacon-chain/sync/validate_data_column.go +++ b/beacon-chain/sync/validate_data_column.go @@ -6,19 +6,19 @@ import ( "math" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/logging" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/logging" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" "github.com/pkg/errors" diff --git a/beacon-chain/sync/validate_data_column_test.go b/beacon-chain/sync/validate_data_column_test.go index 6f396159a2..8a55a2523e 100644 --- a/beacon-chain/sync/validate_data_column_test.go +++ b/beacon-chain/sync/validate_data_column_test.go @@ -7,19 +7,19 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" pubsub "github.com/libp2p/go-libp2p-pubsub" pb "github.com/libp2p/go-libp2p-pubsub/pb" ssz "github.com/prysmaticlabs/fastssz" diff --git a/beacon-chain/sync/validate_light_client.go b/beacon-chain/sync/validate_light_client.go index 31aa9cf9cb..d8b7ad2117 100644 --- a/beacon-chain/sync/validate_light_client.go +++ b/beacon-chain/sync/validate_light_client.go @@ -4,12 +4,12 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" "github.com/sirupsen/logrus" diff --git a/beacon-chain/sync/validate_light_client_test.go b/beacon-chain/sync/validate_light_client_test.go index 6e74234e6f..080ed30b4c 100644 --- a/beacon-chain/sync/validate_light_client_test.go +++ b/beacon-chain/sync/validate_light_client_test.go @@ -6,21 +6,21 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - lightClient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/event" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + lightClient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" pb "github.com/libp2p/go-libp2p-pubsub/pb" ) diff --git a/beacon-chain/sync/validate_proposer_slashing.go b/beacon-chain/sync/validate_proposer_slashing.go index 18be458047..7abbcb878d 100644 --- a/beacon-chain/sync/validate_proposer_slashing.go +++ b/beacon-chain/sync/validate_proposer_slashing.go @@ -4,13 +4,13 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/sync/validate_proposer_slashing_test.go b/beacon-chain/sync/validate_proposer_slashing_test.go index 2e6a76b609..0d794435b0 100644 --- a/beacon-chain/sync/validate_proposer_slashing_test.go +++ b/beacon-chain/sync/validate_proposer_slashing_test.go @@ -9,22 +9,22 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" ) diff --git a/beacon-chain/sync/validate_sync_committee_message.go b/beacon-chain/sync/validate_sync_committee_message.go index 1d92bb4cd6..9c0ba0dff6 100644 --- a/beacon-chain/sync/validate_sync_committee_message.go +++ b/beacon-chain/sync/validate_sync_committee_message.go @@ -7,17 +7,17 @@ import ( "reflect" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/sync/validate_sync_committee_message_test.go b/beacon-chain/sync/validate_sync_committee_message_test.go index bd59adae18..18eb40a62f 100644 --- a/beacon-chain/sync/validate_sync_committee_message_test.go +++ b/beacon-chain/sync/validate_sync_committee_message_test.go @@ -7,25 +7,25 @@ import ( "testing" "time" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - testingdb "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + testingdb "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/golang/snappy" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" diff --git a/beacon-chain/sync/validate_sync_contribution_proof.go b/beacon-chain/sync/validate_sync_contribution_proof.go index 4b9bcb6a67..510307539d 100644 --- a/beacon-chain/sync/validate_sync_contribution_proof.go +++ b/beacon-chain/sync/validate_sync_contribution_proof.go @@ -4,18 +4,18 @@ import ( "context" "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/sync/validate_sync_contribution_proof_test.go b/beacon-chain/sync/validate_sync_contribution_proof_test.go index 1fd598e062..2de858bff5 100644 --- a/beacon-chain/sync/validate_sync_contribution_proof_test.go +++ b/beacon-chain/sync/validate_sync_contribution_proof_test.go @@ -7,34 +7,34 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - mockChain "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - testingdb "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - mockp2p "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + mockChain "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + testingdb "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + mockp2p "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/golang/snappy" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" diff --git a/beacon-chain/sync/validate_voluntary_exit.go b/beacon-chain/sync/validate_voluntary_exit.go index e98ddebc41..d9d5c40b36 100644 --- a/beacon-chain/sync/validate_voluntary_exit.go +++ b/beacon-chain/sync/validate_voluntary_exit.go @@ -4,13 +4,13 @@ import ( "context" "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/beacon-chain/sync/validate_voluntary_exit_test.go b/beacon-chain/sync/validate_voluntary_exit_test.go index ce44f273ed..55cc06f1f5 100644 --- a/beacon-chain/sync/validate_voluntary_exit_test.go +++ b/beacon-chain/sync/validate_voluntary_exit_test.go @@ -8,23 +8,23 @@ import ( "testing" "time" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed" - opfeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/operation" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptest "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - mockSync "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/initial-sync/testing" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed" + opfeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/operation" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptest "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" pubsub "github.com/libp2p/go-libp2p-pubsub" pubsubpb "github.com/libp2p/go-libp2p-pubsub/pb" ) diff --git a/beacon-chain/sync/verify/BUILD.bazel b/beacon-chain/sync/verify/BUILD.bazel index 705ba5eb00..bf6d2c41b8 100644 --- a/beacon-chain/sync/verify/BUILD.bazel +++ b/beacon-chain/sync/verify/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["blob.go"], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/verify", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/verify", visibility = ["//visibility:public"], deps = [ "//config/params:go_default_library", diff --git a/beacon-chain/sync/verify/blob.go b/beacon-chain/sync/verify/blob.go index 42aa3c3312..7bb01cf9bf 100644 --- a/beacon-chain/sync/verify/blob.go +++ b/beacon-chain/sync/verify/blob.go @@ -1,10 +1,10 @@ package verify import ( - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/beacon-chain/sync/verify/blob_test.go b/beacon-chain/sync/verify/blob_test.go index 441b6b4c2b..b0765f31d2 100644 --- a/beacon-chain/sync/verify/blob_test.go +++ b/beacon-chain/sync/verify/blob_test.go @@ -4,11 +4,11 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestBlobAlignsWithBlock(t *testing.T) { diff --git a/beacon-chain/verification/BUILD.bazel b/beacon-chain/verification/BUILD.bazel index d2d6621da6..5ac2aabbf2 100644 --- a/beacon-chain/verification/BUILD.bazel +++ b/beacon-chain/verification/BUILD.bazel @@ -17,7 +17,7 @@ go_library( "mock.go", "result.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/beacon-chain/verification", + importpath = "github.com/OffchainLabs/prysm/v7/beacon-chain/verification", visibility = ["//visibility:public"], deps = [ "//beacon-chain/blockchain/kzg:go_default_library", diff --git a/beacon-chain/verification/batch.go b/beacon-chain/verification/batch.go index 77b1e2f075..cc48200cbd 100644 --- a/beacon-chain/verification/batch.go +++ b/beacon-chain/verification/batch.go @@ -3,9 +3,9 @@ package verification import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ) // NewBlobBatchVerifier initializes a blob batch verifier. It requires the caller to correctly specify diff --git a/beacon-chain/verification/batch_test.go b/beacon-chain/verification/batch_test.go index 97a743f693..1457bf548e 100644 --- a/beacon-chain/verification/batch_test.go +++ b/beacon-chain/verification/batch_test.go @@ -3,9 +3,9 @@ package verification import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/pkg/errors" "github.com/stretchr/testify/require" ) diff --git a/beacon-chain/verification/blob.go b/beacon-chain/verification/blob.go index adcc3cc449..7b03462a73 100644 --- a/beacon-chain/verification/blob.go +++ b/beacon-chain/verification/blob.go @@ -4,13 +4,13 @@ import ( "context" "fmt" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/runtime/logging" - "github.com/OffchainLabs/prysm/v6/time/slots" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/runtime/logging" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/verification/blob_test.go b/beacon-chain/verification/blob_test.go index f18111e92e..fe36068231 100644 --- a/beacon-chain/verification/blob_test.go +++ b/beacon-chain/verification/blob_test.go @@ -7,17 +7,17 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/verification/cache.go b/beacon-chain/verification/cache.go index 71931d002c..d421d140fb 100644 --- a/beacon-chain/verification/cache.go +++ b/beacon-chain/verification/cache.go @@ -4,17 +4,17 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" lru "github.com/hashicorp/golang-lru" "github.com/sirupsen/logrus" ) diff --git a/beacon-chain/verification/cache_test.go b/beacon-chain/verification/cache_test.go index 91dd4fc658..034069dd03 100644 --- a/beacon-chain/verification/cache_test.go +++ b/beacon-chain/verification/cache_test.go @@ -3,15 +3,15 @@ package verification import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func testSignedBlockBlobKeys(t *testing.T, valRoot []byte, slot primitives.Slot, nblobs int) (blocks.ROBlock, []blocks.ROBlob, bls.SecretKey, bls.PublicKey) { diff --git a/beacon-chain/verification/data_column.go b/beacon-chain/verification/data_column.go index dc35ae3f1b..c090c63af2 100644 --- a/beacon-chain/verification/data_column.go +++ b/beacon-chain/verification/data_column.go @@ -7,16 +7,16 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/runtime/logging" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/runtime/logging" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/verification/data_column_test.go b/beacon-chain/verification/data_column_test.go index 8e6f0b5675..75cb6e2b16 100644 --- a/beacon-chain/verification/data_column_test.go +++ b/beacon-chain/verification/data_column_test.go @@ -6,19 +6,19 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/beacon-chain/verification/error.go b/beacon-chain/verification/error.go index 51507efaaa..9e235136b3 100644 --- a/beacon-chain/verification/error.go +++ b/beacon-chain/verification/error.go @@ -3,7 +3,7 @@ package verification import ( "errors" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" ) // ErrInvalid is a general purpose verification failure that can be wrapped or joined to indicate diff --git a/beacon-chain/verification/fake.go b/beacon-chain/verification/fake.go index 90e5008e47..0348389ab0 100644 --- a/beacon-chain/verification/fake.go +++ b/beacon-chain/verification/fake.go @@ -3,7 +3,7 @@ package verification import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" ) // FakeVerifyForTest can be used by tests that need a VerifiedROBlob but don't want to do all the diff --git a/beacon-chain/verification/filesystem.go b/beacon-chain/verification/filesystem.go index 2441f3565d..1d81ff4d48 100644 --- a/beacon-chain/verification/filesystem.go +++ b/beacon-chain/verification/filesystem.go @@ -1,9 +1,9 @@ package verification import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" "github.com/spf13/afero" diff --git a/beacon-chain/verification/filesystem_test.go b/beacon-chain/verification/filesystem_test.go index 68884ff561..091d8b9fb0 100644 --- a/beacon-chain/verification/filesystem_test.go +++ b/beacon-chain/verification/filesystem_test.go @@ -3,9 +3,9 @@ package verification import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/spf13/afero" ) diff --git a/beacon-chain/verification/initializer.go b/beacon-chain/verification/initializer.go index a949690055..14aba8b1ad 100644 --- a/beacon-chain/verification/initializer.go +++ b/beacon-chain/verification/initializer.go @@ -4,16 +4,16 @@ import ( "context" "sync" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - forkchoicetypes "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "golang.org/x/sync/singleflight" ) diff --git a/beacon-chain/verification/initializer_test.go b/beacon-chain/verification/initializer_test.go index 59810159d1..fc63d1fe90 100644 --- a/beacon-chain/verification/initializer_test.go +++ b/beacon-chain/verification/initializer_test.go @@ -5,9 +5,9 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestInitializerWaiter(t *testing.T) { diff --git a/beacon-chain/verification/interface.go b/beacon-chain/verification/interface.go index d107d1a1fe..f4dc9b4f2b 100644 --- a/beacon-chain/verification/interface.go +++ b/beacon-chain/verification/interface.go @@ -3,8 +3,8 @@ package verification import ( "context" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" ) // BlobVerifier defines the methods implemented by the ROBlobVerifier. diff --git a/beacon-chain/verification/mock.go b/beacon-chain/verification/mock.go index bcbf1fc27d..27d5b50f11 100644 --- a/beacon-chain/verification/mock.go +++ b/beacon-chain/verification/mock.go @@ -3,8 +3,8 @@ package verification import ( "context" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" ) // MockBlobVerifier is a mock implementation of the BlobVerifier interface. diff --git a/beacon-chain/verification/result_test.go b/beacon-chain/verification/result_test.go index 8b47b8f826..15a7789cee 100644 --- a/beacon-chain/verification/result_test.go +++ b/beacon-chain/verification/result_test.go @@ -4,7 +4,7 @@ import ( "math" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestResultList(t *testing.T) { diff --git a/beacon-chain/verification/verification_test.go b/beacon-chain/verification/verification_test.go index 491dde8fe6..c9b026c55a 100644 --- a/beacon-chain/verification/verification_test.go +++ b/beacon-chain/verification/verification_test.go @@ -4,7 +4,7 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" ) func TestMain(t *testing.M) { diff --git a/build/bazel/BUILD.bazel b/build/bazel/BUILD.bazel index 8a218b5a63..58d75b6ce5 100644 --- a/build/bazel/BUILD.bazel +++ b/build/bazel/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "bazel.go", "data_path.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/build/bazel", + importpath = "github.com/OffchainLabs/prysm/v7/build/bazel", visibility = ["//visibility:public"], deps = [ "//testing/require:go_default_library", diff --git a/build/bazel/bazel_test.go b/build/bazel/bazel_test.go index 4778c4c4fb..f087c550d7 100644 --- a/build/bazel/bazel_test.go +++ b/build/bazel/bazel_test.go @@ -3,7 +3,7 @@ package bazel_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/build/bazel" + "github.com/OffchainLabs/prysm/v7/build/bazel" ) func TestBuildWithBazel(t *testing.T) { diff --git a/build/bazel/data_path.go b/build/bazel/data_path.go index 6678847dcc..06859a5fc0 100644 --- a/build/bazel/data_path.go +++ b/build/bazel/data_path.go @@ -15,7 +15,7 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) // TestDataPath returns a path to an asset in the testdata directory. It knows diff --git a/cache/lru/BUILD.bazel b/cache/lru/BUILD.bazel index db5836c4c1..cbfe07ceb7 100644 --- a/cache/lru/BUILD.bazel +++ b/cache/lru/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["lru_wrpr.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cache/lru", + importpath = "github.com/OffchainLabs/prysm/v7/cache/lru", visibility = ["//visibility:public"], deps = ["@com_github_hashicorp_golang_lru//:go_default_library"], ) diff --git a/cache/nonblocking/BUILD.bazel b/cache/nonblocking/BUILD.bazel index ae64c46809..a5998d7138 100644 --- a/cache/nonblocking/BUILD.bazel +++ b/cache/nonblocking/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "list.go", "lru.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cache/nonblocking", + importpath = "github.com/OffchainLabs/prysm/v7/cache/nonblocking", visibility = ["//visibility:public"], ) diff --git a/changelog/bastin_upgrade-v6-to-v7.md b/changelog/bastin_upgrade-v6-to-v7.md new file mode 100644 index 0000000000..9d2ea8313a --- /dev/null +++ b/changelog/bastin_upgrade-v6-to-v7.md @@ -0,0 +1,3 @@ +### Changed + +- Upgrade Prysm v6 to v7. \ No newline at end of file diff --git a/cmd/BUILD.bazel b/cmd/BUILD.bazel index 67e5db8355..05ec4f08c9 100644 --- a/cmd/BUILD.bazel +++ b/cmd/BUILD.bazel @@ -10,7 +10,7 @@ go_library( "password_reader.go", "wrap_flags.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd", + importpath = "github.com/OffchainLabs/prysm/v7/cmd", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/cmd/beacon-chain/BUILD.bazel b/cmd/beacon-chain/BUILD.bazel index fcc9fadc08..c152191e9a 100644 --- a/cmd/beacon-chain/BUILD.bazel +++ b/cmd/beacon-chain/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "main.go", "usage.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain", visibility = ["//beacon-chain:__subpackages__"], deps = [ "//beacon-chain/builder:go_default_library", diff --git a/cmd/beacon-chain/blockchain/BUILD.bazel b/cmd/beacon-chain/blockchain/BUILD.bazel index 0c3ee257b2..62fa72fdab 100644 --- a/cmd/beacon-chain/blockchain/BUILD.bazel +++ b/cmd/beacon-chain/blockchain/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["options.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/blockchain", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/blockchain", visibility = ["//cmd:__subpackages__"], deps = [ "//beacon-chain/blockchain:go_default_library", diff --git a/cmd/beacon-chain/blockchain/options.go b/cmd/beacon-chain/blockchain/options.go index f44c556bb8..afaff3ef68 100644 --- a/cmd/beacon-chain/blockchain/options.go +++ b/cmd/beacon-chain/blockchain/options.go @@ -1,10 +1,10 @@ package blockchaincmd import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" "github.com/urfave/cli/v2" ) diff --git a/cmd/beacon-chain/db/BUILD.bazel b/cmd/beacon-chain/db/BUILD.bazel index 6cafca7cc1..794bb8c65e 100644 --- a/cmd/beacon-chain/db/BUILD.bazel +++ b/cmd/beacon-chain/db/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["db.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/db", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/db", visibility = ["//visibility:public"], deps = [ "//beacon-chain/db:go_default_library", diff --git a/cmd/beacon-chain/db/db.go b/cmd/beacon-chain/db/db.go index f9e9c3a439..84b1b27549 100644 --- a/cmd/beacon-chain/db/db.go +++ b/cmd/beacon-chain/db/db.go @@ -1,9 +1,9 @@ package db import ( - beacondb "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/runtime/tos" + beacondb "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/runtime/tos" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" ) diff --git a/cmd/beacon-chain/execution/BUILD.bazel b/cmd/beacon-chain/execution/BUILD.bazel index 0446b881c5..22687e407b 100644 --- a/cmd/beacon-chain/execution/BUILD.bazel +++ b/cmd/beacon-chain/execution/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "log.go", "options.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/execution", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/execution", visibility = [ "//beacon-chain:__subpackages__", "//cmd:__subpackages__", diff --git a/cmd/beacon-chain/execution/options.go b/cmd/beacon-chain/execution/options.go index 8320625266..a1a4325ced 100644 --- a/cmd/beacon-chain/execution/options.go +++ b/cmd/beacon-chain/execution/options.go @@ -5,9 +5,9 @@ import ( "fmt" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/io/file" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) diff --git a/cmd/beacon-chain/execution/options_test.go b/cmd/beacon-chain/execution/options_test.go index fc4b02b59a..e62a3d3181 100644 --- a/cmd/beacon-chain/execution/options_test.go +++ b/cmd/beacon-chain/execution/options_test.go @@ -6,11 +6,11 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/urfave/cli/v2" ) diff --git a/cmd/beacon-chain/flags/BUILD.bazel b/cmd/beacon-chain/flags/BUILD.bazel index 5961755801..a94f14384a 100644 --- a/cmd/beacon-chain/flags/BUILD.bazel +++ b/cmd/beacon-chain/flags/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "interop.go", "log.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags", visibility = [ "//api:__subpackages__", "//beacon-chain:__subpackages__", diff --git a/cmd/beacon-chain/flags/api_module_test.go b/cmd/beacon-chain/flags/api_module_test.go index d1d6ca8480..d1a2338fa9 100644 --- a/cmd/beacon-chain/flags/api_module_test.go +++ b/cmd/beacon-chain/flags/api_module_test.go @@ -3,7 +3,7 @@ package flags import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestEnableHTTPPrysmAPI(t *testing.T) { diff --git a/cmd/beacon-chain/flags/base.go b/cmd/beacon-chain/flags/base.go index 681fec23fe..e7a35d8f58 100644 --- a/cmd/beacon-chain/flags/base.go +++ b/cmd/beacon-chain/flags/base.go @@ -5,8 +5,8 @@ package flags import ( "strings" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/urfave/cli/v2" ) diff --git a/cmd/beacon-chain/flags/config.go b/cmd/beacon-chain/flags/config.go index e366551827..a67aac0483 100644 --- a/cmd/beacon-chain/flags/config.go +++ b/cmd/beacon-chain/flags/config.go @@ -1,7 +1,7 @@ package flags import ( - "github.com/OffchainLabs/prysm/v6/cmd" + "github.com/OffchainLabs/prysm/v7/cmd" "github.com/urfave/cli/v2" ) diff --git a/cmd/beacon-chain/genesis/BUILD.bazel b/cmd/beacon-chain/genesis/BUILD.bazel index d7aad35df6..c93433052d 100644 --- a/cmd/beacon-chain/genesis/BUILD.bazel +++ b/cmd/beacon-chain/genesis/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["options.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/genesis", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/genesis", visibility = ["//visibility:public"], deps = [ "//beacon-chain/node:go_default_library", diff --git a/cmd/beacon-chain/genesis/options.go b/cmd/beacon-chain/genesis/options.go index 765e084143..fc639dcd0a 100644 --- a/cmd/beacon-chain/genesis/options.go +++ b/cmd/beacon-chain/genesis/options.go @@ -1,9 +1,9 @@ package genesis import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/node" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/sync/checkpoint" - "github.com/OffchainLabs/prysm/v6/genesis" + "github.com/OffchainLabs/prysm/v7/beacon-chain/node" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/sync/checkpoint" + "github.com/OffchainLabs/prysm/v7/genesis" "github.com/pkg/errors" log "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" diff --git a/cmd/beacon-chain/jwt/BUILD.bazel b/cmd/beacon-chain/jwt/BUILD.bazel index 284e273d6b..608b845232 100644 --- a/cmd/beacon-chain/jwt/BUILD.bazel +++ b/cmd/beacon-chain/jwt/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["jwt.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/jwt", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/jwt", visibility = ["//visibility:public"], deps = [ "//api:go_default_library", diff --git a/cmd/beacon-chain/jwt/jwt.go b/cmd/beacon-chain/jwt/jwt.go index 9304d57d47..fc0c6ae597 100644 --- a/cmd/beacon-chain/jwt/jwt.go +++ b/cmd/beacon-chain/jwt/jwt.go @@ -3,9 +3,9 @@ package jwt import ( "path/filepath" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/io/file" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" ) diff --git a/cmd/beacon-chain/jwt/jwt_test.go b/cmd/beacon-chain/jwt/jwt_test.go index bdf78fd348..bbf6fd6532 100644 --- a/cmd/beacon-chain/jwt/jwt_test.go +++ b/cmd/beacon-chain/jwt/jwt_test.go @@ -6,9 +6,9 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/urfave/cli/v2" ) diff --git a/cmd/beacon-chain/main.go b/cmd/beacon-chain/main.go index 7c739696ee..2ab85e3c5a 100644 --- a/cmd/beacon-chain/main.go +++ b/cmd/beacon-chain/main.go @@ -8,29 +8,29 @@ import ( "path/filepath" runtimeDebug "runtime/debug" - "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/node" - "github.com/OffchainLabs/prysm/v6/cmd" - blockchaincmd "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/blockchain" - dbcommands "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/genesis" - jwtcommands "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/jwt" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/storage" - backfill "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/sync/backfill" - bflags "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/sync/backfill/flags" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/sync/checkpoint" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/io/logs" - "github.com/OffchainLabs/prysm/v6/monitoring/journald" - "github.com/OffchainLabs/prysm/v6/runtime/debug" - "github.com/OffchainLabs/prysm/v6/runtime/fdlimits" - prefixed "github.com/OffchainLabs/prysm/v6/runtime/logging/logrus-prefixed-formatter" - _ "github.com/OffchainLabs/prysm/v6/runtime/maxprocs" - "github.com/OffchainLabs/prysm/v6/runtime/tos" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/node" + "github.com/OffchainLabs/prysm/v7/cmd" + blockchaincmd "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/blockchain" + dbcommands "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/genesis" + jwtcommands "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/jwt" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/storage" + backfill "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/sync/backfill" + bflags "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/sync/backfill/flags" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/sync/checkpoint" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/io/logs" + "github.com/OffchainLabs/prysm/v7/monitoring/journald" + "github.com/OffchainLabs/prysm/v7/runtime/debug" + "github.com/OffchainLabs/prysm/v7/runtime/fdlimits" + prefixed "github.com/OffchainLabs/prysm/v7/runtime/logging/logrus-prefixed-formatter" + _ "github.com/OffchainLabs/prysm/v7/runtime/maxprocs" + "github.com/OffchainLabs/prysm/v7/runtime/tos" + "github.com/OffchainLabs/prysm/v7/runtime/version" gethlog "github.com/ethereum/go-ethereum/log" golog "github.com/ipfs/go-log/v2" joonix "github.com/joonix/log" diff --git a/cmd/beacon-chain/storage/BUILD.bazel b/cmd/beacon-chain/storage/BUILD.bazel index c966205507..49a9708e27 100644 --- a/cmd/beacon-chain/storage/BUILD.bazel +++ b/cmd/beacon-chain/storage/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["options.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/storage", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/storage", visibility = ["//visibility:public"], deps = [ "//beacon-chain/db/filesystem:go_default_library", diff --git a/cmd/beacon-chain/storage/options.go b/cmd/beacon-chain/storage/options.go index a6bf4aa8dc..a1134d444c 100644 --- a/cmd/beacon-chain/storage/options.go +++ b/cmd/beacon-chain/storage/options.go @@ -8,11 +8,11 @@ import ( "slices" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/beacon-chain/node" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/beacon-chain/node" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/pkg/errors" log "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" diff --git a/cmd/beacon-chain/storage/options_test.go b/cmd/beacon-chain/storage/options_test.go index 0af3307256..fe68ce25bd 100644 --- a/cmd/beacon-chain/storage/options_test.go +++ b/cmd/beacon-chain/storage/options_test.go @@ -10,12 +10,12 @@ import ( "syscall" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/urfave/cli/v2" ) diff --git a/cmd/beacon-chain/sync/backfill/BUILD.bazel b/cmd/beacon-chain/sync/backfill/BUILD.bazel index 4aeb5df299..59b80b975c 100644 --- a/cmd/beacon-chain/sync/backfill/BUILD.bazel +++ b/cmd/beacon-chain/sync/backfill/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["options.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/sync/backfill", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/sync/backfill", visibility = ["//visibility:public"], deps = [ "//beacon-chain/node:go_default_library", diff --git a/cmd/beacon-chain/sync/backfill/flags/BUILD.bazel b/cmd/beacon-chain/sync/backfill/flags/BUILD.bazel index 1e58f5dfb1..99ec34f2b4 100644 --- a/cmd/beacon-chain/sync/backfill/flags/BUILD.bazel +++ b/cmd/beacon-chain/sync/backfill/flags/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["flags.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/sync/backfill/flags", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/sync/backfill/flags", visibility = ["//visibility:public"], deps = ["@com_github_urfave_cli_v2//:go_default_library"], ) diff --git a/cmd/beacon-chain/sync/backfill/options.go b/cmd/beacon-chain/sync/backfill/options.go index 6c2074d1f3..105b27cdf3 100644 --- a/cmd/beacon-chain/sync/backfill/options.go +++ b/cmd/beacon-chain/sync/backfill/options.go @@ -1,10 +1,10 @@ package backfill import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/node" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/backfill" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/sync/backfill/flags" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/node" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/backfill" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/sync/backfill/flags" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/urfave/cli/v2" ) diff --git a/cmd/beacon-chain/sync/checkpoint/BUILD.bazel b/cmd/beacon-chain/sync/checkpoint/BUILD.bazel index be97894070..5b72488c9b 100644 --- a/cmd/beacon-chain/sync/checkpoint/BUILD.bazel +++ b/cmd/beacon-chain/sync/checkpoint/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["options.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/sync/checkpoint", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/sync/checkpoint", visibility = ["//visibility:public"], deps = [ "//beacon-chain/node:go_default_library", diff --git a/cmd/beacon-chain/sync/checkpoint/options.go b/cmd/beacon-chain/sync/checkpoint/options.go index 2dd1509202..04137343ca 100644 --- a/cmd/beacon-chain/sync/checkpoint/options.go +++ b/cmd/beacon-chain/sync/checkpoint/options.go @@ -1,8 +1,8 @@ package checkpoint import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/node" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/checkpoint" + "github.com/OffchainLabs/prysm/v7/beacon-chain/node" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/checkpoint" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) diff --git a/cmd/beacon-chain/usage.go b/cmd/beacon-chain/usage.go index 279e1c07c9..af8b89400f 100644 --- a/cmd/beacon-chain/usage.go +++ b/cmd/beacon-chain/usage.go @@ -5,14 +5,14 @@ import ( "io" "sort" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/genesis" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/storage" - backfill "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/sync/backfill/flags" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/sync/checkpoint" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/runtime/debug" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/genesis" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/storage" + backfill "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/sync/backfill/flags" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/sync/checkpoint" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/runtime/debug" "github.com/urfave/cli/v2" ) diff --git a/cmd/beacon-chain/usage_test.go b/cmd/beacon-chain/usage_test.go index 8c82272b87..eb26defb7a 100644 --- a/cmd/beacon-chain/usage_test.go +++ b/cmd/beacon-chain/usage_test.go @@ -3,7 +3,7 @@ package main import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/features" + "github.com/OffchainLabs/prysm/v7/config/features" "github.com/urfave/cli/v2" ) diff --git a/cmd/client-stats/BUILD.bazel b/cmd/client-stats/BUILD.bazel index 4ab521abee..936ef3a7da 100644 --- a/cmd/client-stats/BUILD.bazel +++ b/cmd/client-stats/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "main.go", "usage.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/client-stats", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/client-stats", visibility = ["//visibility:private"], deps = [ "//cmd:go_default_library", diff --git a/cmd/client-stats/flags/BUILD.bazel b/cmd/client-stats/flags/BUILD.bazel index 162e8ec510..3f69e823d5 100644 --- a/cmd/client-stats/flags/BUILD.bazel +++ b/cmd/client-stats/flags/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["flags.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/client-stats/flags", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/client-stats/flags", visibility = ["//visibility:public"], deps = ["@com_github_urfave_cli_v2//:go_default_library"], ) diff --git a/cmd/client-stats/main.go b/cmd/client-stats/main.go index 41d3449d70..4c5b582ff6 100644 --- a/cmd/client-stats/main.go +++ b/cmd/client-stats/main.go @@ -6,13 +6,13 @@ import ( runtimeDebug "runtime/debug" "time" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/client-stats/flags" - "github.com/OffchainLabs/prysm/v6/io/logs" - "github.com/OffchainLabs/prysm/v6/monitoring/clientstats" - "github.com/OffchainLabs/prysm/v6/monitoring/journald" - prefixed "github.com/OffchainLabs/prysm/v6/runtime/logging/logrus-prefixed-formatter" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/client-stats/flags" + "github.com/OffchainLabs/prysm/v7/io/logs" + "github.com/OffchainLabs/prysm/v7/monitoring/clientstats" + "github.com/OffchainLabs/prysm/v7/monitoring/journald" + prefixed "github.com/OffchainLabs/prysm/v7/runtime/logging/logrus-prefixed-formatter" + "github.com/OffchainLabs/prysm/v7/runtime/version" joonix "github.com/joonix/log" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" diff --git a/cmd/client-stats/usage.go b/cmd/client-stats/usage.go index c5cdbe2690..eefb511f71 100644 --- a/cmd/client-stats/usage.go +++ b/cmd/client-stats/usage.go @@ -5,8 +5,8 @@ import ( "io" "sort" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/client-stats/flags" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/client-stats/flags" "github.com/urfave/cli/v2" ) diff --git a/cmd/config.go b/cmd/config.go index 454b53f495..8e9ee5598a 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -1,8 +1,8 @@ package cmd import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/urfave/cli/v2" ) diff --git a/cmd/config_test.go b/cmd/config_test.go index 1c75ffcb36..910e9ea978 100644 --- a/cmd/config_test.go +++ b/cmd/config_test.go @@ -4,9 +4,9 @@ import ( "flag" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/urfave/cli/v2" ) diff --git a/cmd/defaults.go b/cmd/defaults.go index add3ccc0ea..324e82324a 100644 --- a/cmd/defaults.go +++ b/cmd/defaults.go @@ -22,7 +22,7 @@ import ( "path/filepath" "runtime" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/io/file" ) // DefaultDataDir is the default data directory to use for the databases and other diff --git a/cmd/flags.go b/cmd/flags.go index ae3d3cb296..5f12f4ce2a 100644 --- a/cmd/flags.go +++ b/cmd/flags.go @@ -8,7 +8,7 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/urfave/cli/v2" "github.com/urfave/cli/v2/altsrc" ) diff --git a/cmd/flags/BUILD.bazel b/cmd/flags/BUILD.bazel index d9ac9feed8..d225cd4800 100644 --- a/cmd/flags/BUILD.bazel +++ b/cmd/flags/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["enum.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/flags", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/flags", visibility = ["//visibility:public"], deps = ["@com_github_urfave_cli_v2//:go_default_library"], ) diff --git a/cmd/flags_test.go b/cmd/flags_test.go index ef20bde5b4..789b151417 100644 --- a/cmd/flags_test.go +++ b/cmd/flags_test.go @@ -5,7 +5,7 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/urfave/cli/v2" ) diff --git a/cmd/helpers.go b/cmd/helpers.go index 30f1f36460..cf7e6d2be7 100644 --- a/cmd/helpers.go +++ b/cmd/helpers.go @@ -7,7 +7,7 @@ import ( "runtime" "strings" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/io/file" "github.com/pkg/errors" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" diff --git a/cmd/helpers_test.go b/cmd/helpers_test.go index e66da9652c..4f6bdc0695 100644 --- a/cmd/helpers_test.go +++ b/cmd/helpers_test.go @@ -6,9 +6,9 @@ import ( "os/user" "testing" - "github.com/OffchainLabs/prysm/v6/cmd/mock" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/cmd/mock" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" "github.com/urfave/cli/v2" "go.uber.org/mock/gomock" diff --git a/cmd/mock/BUILD.bazel b/cmd/mock/BUILD.bazel index b5fd17b9ce..37d9e3b759 100644 --- a/cmd/mock/BUILD.bazel +++ b/cmd/mock/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["password_reader_mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/mock", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/mock", visibility = ["//visibility:public"], deps = ["@org_uber_go_mock//gomock:go_default_library"], ) diff --git a/cmd/prysmctl/BUILD.bazel b/cmd/prysmctl/BUILD.bazel index 4cfc7fcc3c..cf874992f4 100644 --- a/cmd/prysmctl/BUILD.bazel +++ b/cmd/prysmctl/BUILD.bazel @@ -5,7 +5,7 @@ load("//tools:prysm_image.bzl", "prysm_image_upload") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/prysmctl", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/prysmctl", visibility = ["//visibility:private"], deps = [ "//cmd/prysmctl/checkpointsync:go_default_library", diff --git a/cmd/prysmctl/checkpointsync/BUILD.bazel b/cmd/prysmctl/checkpointsync/BUILD.bazel index 2f17552376..60183b0ec8 100644 --- a/cmd/prysmctl/checkpointsync/BUILD.bazel +++ b/cmd/prysmctl/checkpointsync/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "cmd.go", "download.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/prysmctl/checkpointsync", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/prysmctl/checkpointsync", visibility = ["//visibility:public"], deps = [ "//api/client:go_default_library", diff --git a/cmd/prysmctl/checkpointsync/download.go b/cmd/prysmctl/checkpointsync/download.go index b5cf3b61b8..53c724f630 100644 --- a/cmd/prysmctl/checkpointsync/download.go +++ b/cmd/prysmctl/checkpointsync/download.go @@ -5,9 +5,9 @@ import ( "os" "time" - "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/api/client/beacon" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/checkpoint" + "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/api/client/beacon" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/checkpoint" log "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" ) diff --git a/cmd/prysmctl/db/BUILD.bazel b/cmd/prysmctl/db/BUILD.bazel index 015c7b8ac4..d5ac5f24c0 100644 --- a/cmd/prysmctl/db/BUILD.bazel +++ b/cmd/prysmctl/db/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "query.go", "span.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/prysmctl/db", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/prysmctl/db", visibility = ["//visibility:public"], deps = [ "//beacon-chain/db/kv:go_default_library", diff --git a/cmd/prysmctl/db/buckets.go b/cmd/prysmctl/db/buckets.go index c2d040eccd..b34968fdad 100644 --- a/cmd/prysmctl/db/buckets.go +++ b/cmd/prysmctl/db/buckets.go @@ -3,7 +3,7 @@ package db import ( "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" "github.com/urfave/cli/v2" ) diff --git a/cmd/prysmctl/db/query.go b/cmd/prysmctl/db/query.go index 58f474dc6f..3447c04c39 100644 --- a/cmd/prysmctl/db/query.go +++ b/cmd/prysmctl/db/query.go @@ -5,7 +5,7 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" log "github.com/sirupsen/logrus" diff --git a/cmd/prysmctl/db/span.go b/cmd/prysmctl/db/span.go index daeaa9fe84..a855ea7043 100644 --- a/cmd/prysmctl/db/span.go +++ b/cmd/prysmctl/db/span.go @@ -3,9 +3,9 @@ package db import ( "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher" - "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher" + "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/jedib0t/go-pretty/v6/table" "github.com/pkg/errors" "github.com/urfave/cli/v2" diff --git a/cmd/prysmctl/main.go b/cmd/prysmctl/main.go index 8d1dac5d39..afb2e5b8fa 100644 --- a/cmd/prysmctl/main.go +++ b/cmd/prysmctl/main.go @@ -3,12 +3,12 @@ package main import ( "os" - "github.com/OffchainLabs/prysm/v6/cmd/prysmctl/checkpointsync" - "github.com/OffchainLabs/prysm/v6/cmd/prysmctl/db" - "github.com/OffchainLabs/prysm/v6/cmd/prysmctl/p2p" - "github.com/OffchainLabs/prysm/v6/cmd/prysmctl/testnet" - "github.com/OffchainLabs/prysm/v6/cmd/prysmctl/validator" - "github.com/OffchainLabs/prysm/v6/cmd/prysmctl/weaksubjectivity" + "github.com/OffchainLabs/prysm/v7/cmd/prysmctl/checkpointsync" + "github.com/OffchainLabs/prysm/v7/cmd/prysmctl/db" + "github.com/OffchainLabs/prysm/v7/cmd/prysmctl/p2p" + "github.com/OffchainLabs/prysm/v7/cmd/prysmctl/testnet" + "github.com/OffchainLabs/prysm/v7/cmd/prysmctl/validator" + "github.com/OffchainLabs/prysm/v7/cmd/prysmctl/weaksubjectivity" log "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" ) diff --git a/cmd/prysmctl/p2p/BUILD.bazel b/cmd/prysmctl/p2p/BUILD.bazel index bceb3327fa..8453683f18 100644 --- a/cmd/prysmctl/p2p/BUILD.bazel +++ b/cmd/prysmctl/p2p/BUILD.bazel @@ -13,7 +13,7 @@ go_library( "request_blobs.go", "request_blocks.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/prysmctl/p2p", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/prysmctl/p2p", visibility = ["//visibility:public"], deps = [ "//beacon-chain/forkchoice:go_default_library", diff --git a/cmd/prysmctl/p2p/client.go b/cmd/prysmctl/p2p/client.go index 5a660d9132..808bc8d95e 100644 --- a/cmd/prysmctl/p2p/client.go +++ b/cmd/prysmctl/p2p/client.go @@ -8,19 +8,19 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/encoder" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper" - ecdsaprysm "github.com/OffchainLabs/prysm/v6/crypto/ecdsa" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/encoder" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" + ecdsaprysm "github.com/OffchainLabs/prysm/v7/crypto/ecdsa" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/libp2p/go-libp2p" "github.com/libp2p/go-libp2p/core/crypto" "github.com/libp2p/go-libp2p/core/host" diff --git a/cmd/prysmctl/p2p/handler.go b/cmd/prysmctl/p2p/handler.go index 86aae3aa60..3ad525de7d 100644 --- a/cmd/prysmctl/p2p/handler.go +++ b/cmd/prysmctl/p2p/handler.go @@ -6,8 +6,8 @@ import ( "runtime/debug" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" libp2pcore "github.com/libp2p/go-libp2p/core" corenet "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/protocol" diff --git a/cmd/prysmctl/p2p/handshake.go b/cmd/prysmctl/p2p/handshake.go index 70c1a818ff..7d38e7f0f9 100644 --- a/cmd/prysmctl/p2p/handshake.go +++ b/cmd/prysmctl/p2p/handshake.go @@ -3,11 +3,11 @@ package p2p import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/sirupsen/logrus" "google.golang.org/protobuf/types/known/emptypb" diff --git a/cmd/prysmctl/p2p/mock_chain.go b/cmd/prysmctl/p2p/mock_chain.go index 453b4bc6db..40c6f10238 100644 --- a/cmd/prysmctl/p2p/mock_chain.go +++ b/cmd/prysmctl/p2p/mock_chain.go @@ -3,10 +3,10 @@ package p2p import ( "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" ) type mockChain struct { diff --git a/cmd/prysmctl/p2p/peers.go b/cmd/prysmctl/p2p/peers.go index f59bd989ea..f722f3f860 100644 --- a/cmd/prysmctl/p2p/peers.go +++ b/cmd/prysmctl/p2p/peers.go @@ -3,7 +3,7 @@ package p2p import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" "github.com/libp2p/go-libp2p/core/peer" ) diff --git a/cmd/prysmctl/p2p/request_blobs.go b/cmd/prysmctl/p2p/request_blobs.go index 7d9a8135d5..065f2c8b54 100644 --- a/cmd/prysmctl/p2p/request_blobs.go +++ b/cmd/prysmctl/p2p/request_blobs.go @@ -6,14 +6,14 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/cmd/prysmctl/p2p/request_blocks.go b/cmd/prysmctl/p2p/request_blocks.go index 922ed177db..a153664290 100644 --- a/cmd/prysmctl/p2p/request_blocks.go +++ b/cmd/prysmctl/p2p/request_blocks.go @@ -5,15 +5,15 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - p2ptypes "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/params" - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + p2ptypes "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/params" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" libp2pcore "github.com/libp2p/go-libp2p/core" corenet "github.com/libp2p/go-libp2p/core/network" "github.com/pkg/errors" diff --git a/cmd/prysmctl/testnet/BUILD.bazel b/cmd/prysmctl/testnet/BUILD.bazel index 5dec5caf75..dcfac82d18 100644 --- a/cmd/prysmctl/testnet/BUILD.bazel +++ b/cmd/prysmctl/testnet/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "generate_genesis.go", "testnet.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/prysmctl/testnet", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/prysmctl/testnet", visibility = ["//visibility:public"], deps = [ "//beacon-chain/state:go_default_library", diff --git a/cmd/prysmctl/testnet/generate_genesis.go b/cmd/prysmctl/testnet/generate_genesis.go index 479617ee4a..eb081cb4f5 100644 --- a/cmd/prysmctl/testnet/generate_genesis.go +++ b/cmd/prysmctl/testnet/generate_genesis.go @@ -10,14 +10,14 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/cmd/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/io/file" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/cmd/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/io/file" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/ethereum/go-ethereum/core" "github.com/ethereum/go-ethereum/ethclient" "github.com/ethereum/go-ethereum/rpc" diff --git a/cmd/prysmctl/testnet/generate_genesis_test.go b/cmd/prysmctl/testnet/generate_genesis_test.go index 95da934461..b0d7312b9c 100644 --- a/cmd/prysmctl/testnet/generate_genesis_test.go +++ b/cmd/prysmctl/testnet/generate_genesis_test.go @@ -8,11 +8,11 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/core" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/params" diff --git a/cmd/prysmctl/validator/BUILD.bazel b/cmd/prysmctl/validator/BUILD.bazel index 8920247be1..edb4ce4e25 100644 --- a/cmd/prysmctl/validator/BUILD.bazel +++ b/cmd/prysmctl/validator/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "proposer_settings.go", "withdraw.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/prysmctl/validator", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/prysmctl/validator", visibility = ["//visibility:public"], deps = [ "//api/client:go_default_library", diff --git a/cmd/prysmctl/validator/cmd.go b/cmd/prysmctl/validator/cmd.go index 85f313e643..c20ecabf47 100644 --- a/cmd/prysmctl/validator/cmd.go +++ b/cmd/prysmctl/validator/cmd.go @@ -4,11 +4,11 @@ import ( "fmt" "os" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/accounts" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/runtime/tos" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/accounts" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/runtime/tos" "github.com/logrusorgru/aurora" log "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" diff --git a/cmd/prysmctl/validator/proposer_settings.go b/cmd/prysmctl/validator/proposer_settings.go index f34258ba33..a5fa7e7197 100644 --- a/cmd/prysmctl/validator/proposer_settings.go +++ b/cmd/prysmctl/validator/proposer_settings.go @@ -5,16 +5,16 @@ import ( "errors" "io" - "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/api/client/validator" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - validatorType "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/io/prompt" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/api/client/validator" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + validatorType "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/io/prompt" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" "github.com/ethereum/go-ethereum/common" log "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" diff --git a/cmd/prysmctl/validator/proposer_settings_test.go b/cmd/prysmctl/validator/proposer_settings_test.go index d63d5b62f9..1eb547dd58 100644 --- a/cmd/prysmctl/validator/proposer_settings_test.go +++ b/cmd/prysmctl/validator/proposer_settings_test.go @@ -11,9 +11,9 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/rpc" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/rpc" logtest "github.com/sirupsen/logrus/hooks/test" "github.com/urfave/cli/v2" ) diff --git a/cmd/prysmctl/validator/withdraw.go b/cmd/prysmctl/validator/withdraw.go index 154034d2b0..e5161e8fe3 100644 --- a/cmd/prysmctl/validator/withdraw.go +++ b/cmd/prysmctl/validator/withdraw.go @@ -10,11 +10,11 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api/client/beacon" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/api/client/beacon" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/ethereum/go-ethereum/common" "github.com/logrusorgru/aurora" "github.com/pkg/errors" diff --git a/cmd/prysmctl/validator/withdraw_test.go b/cmd/prysmctl/validator/withdraw_test.go index dcc4a61339..d10f962625 100644 --- a/cmd/prysmctl/validator/withdraw_test.go +++ b/cmd/prysmctl/validator/withdraw_test.go @@ -11,11 +11,11 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/api/server" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api/server" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" logtest "github.com/sirupsen/logrus/hooks/test" "github.com/urfave/cli/v2" diff --git a/cmd/prysmctl/weaksubjectivity/BUILD.bazel b/cmd/prysmctl/weaksubjectivity/BUILD.bazel index d3e709efcc..74627ebbdb 100644 --- a/cmd/prysmctl/weaksubjectivity/BUILD.bazel +++ b/cmd/prysmctl/weaksubjectivity/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "checkpoint.go", "cmd.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/prysmctl/weaksubjectivity", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/prysmctl/weaksubjectivity", visibility = ["//visibility:public"], deps = [ "//api/client:go_default_library", diff --git a/cmd/prysmctl/weaksubjectivity/checkpoint.go b/cmd/prysmctl/weaksubjectivity/checkpoint.go index e49b04f09b..f32d44f574 100644 --- a/cmd/prysmctl/weaksubjectivity/checkpoint.go +++ b/cmd/prysmctl/weaksubjectivity/checkpoint.go @@ -5,9 +5,9 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/api/client/beacon" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync/checkpoint" + "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/api/client/beacon" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/checkpoint" log "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/BUILD.bazel b/cmd/validator/BUILD.bazel index a7ff3198f8..8efe7d3dfc 100644 --- a/cmd/validator/BUILD.bazel +++ b/cmd/validator/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "main.go", "usage.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/validator", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/validator", visibility = ["//validator:__subpackages__"], deps = [ "//cmd:go_default_library", diff --git a/cmd/validator/accounts/BUILD.bazel b/cmd/validator/accounts/BUILD.bazel index 228352f505..c0634c8a48 100644 --- a/cmd/validator/accounts/BUILD.bazel +++ b/cmd/validator/accounts/BUILD.bazel @@ -11,7 +11,7 @@ go_library( "list.go", "wallet_utils.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/validator/accounts", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/validator/accounts", visibility = ["//visibility:public"], deps = [ "//api/grpc:go_default_library", diff --git a/cmd/validator/accounts/accounts.go b/cmd/validator/accounts/accounts.go index 90d81b7eb2..10f233380e 100644 --- a/cmd/validator/accounts/accounts.go +++ b/cmd/validator/accounts/accounts.go @@ -3,10 +3,10 @@ package accounts import ( "os" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/runtime/tos" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/runtime/tos" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/accounts/backup.go b/cmd/validator/accounts/backup.go index d0e30bf6b9..492e02ccac 100644 --- a/cmd/validator/accounts/backup.go +++ b/cmd/validator/accounts/backup.go @@ -3,12 +3,12 @@ package accounts import ( "strings" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/io/prompt" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/userprompt" - "github.com/OffchainLabs/prysm/v6/validator/client" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/io/prompt" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/userprompt" + "github.com/OffchainLabs/prysm/v7/validator/client" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/accounts/backup_test.go b/cmd/validator/accounts/backup_test.go index dc1b7d3e8b..6ed61e79d8 100644 --- a/cmd/validator/accounts/backup_test.go +++ b/cmd/validator/accounts/backup_test.go @@ -12,15 +12,15 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" - constant "github.com/OffchainLabs/prysm/v6/validator/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" + constant "github.com/OffchainLabs/prysm/v7/validator/testing" ) func TestBackupAccounts_Noninteractive_Derived(t *testing.T) { diff --git a/cmd/validator/accounts/delete.go b/cmd/validator/accounts/delete.go index 83ada5f442..62b62684cc 100644 --- a/cmd/validator/accounts/delete.go +++ b/cmd/validator/accounts/delete.go @@ -3,11 +3,11 @@ package accounts import ( "strings" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/userprompt" - "github.com/OffchainLabs/prysm/v6/validator/client" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/userprompt" + "github.com/OffchainLabs/prysm/v7/validator/client" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/accounts/delete_test.go b/cmd/validator/accounts/delete_test.go index 3a6854c142..0f9dce2366 100644 --- a/cmd/validator/accounts/delete_test.go +++ b/cmd/validator/accounts/delete_test.go @@ -12,16 +12,16 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" "github.com/google/uuid" "github.com/urfave/cli/v2" keystorev4 "github.com/wealdtech/go-eth2-wallet-encryptor-keystorev4" diff --git a/cmd/validator/accounts/exit.go b/cmd/validator/accounts/exit.go index 41572ccef5..4cbeac6223 100644 --- a/cmd/validator/accounts/exit.go +++ b/cmd/validator/accounts/exit.go @@ -4,16 +4,16 @@ import ( "io" "strings" - grpcutil "github.com/OffchainLabs/prysm/v6/api/grpc" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/client" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" - "github.com/OffchainLabs/prysm/v6/validator/node" + grpcutil "github.com/OffchainLabs/prysm/v7/api/grpc" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/client" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/validator/node" "github.com/golang/protobuf/ptypes/empty" "github.com/pkg/errors" "github.com/urfave/cli/v2" diff --git a/cmd/validator/accounts/exit_test.go b/cmd/validator/accounts/exit_test.go index c70ba715b3..4278d354b4 100644 --- a/cmd/validator/accounts/exit_test.go +++ b/cmd/validator/accounts/exit_test.go @@ -9,14 +9,14 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/build/bazel" - "github.com/OffchainLabs/prysm/v6/io/file" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - validatormock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/build/bazel" + "github.com/OffchainLabs/prysm/v7/io/file" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + validatormock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "go.uber.org/mock/gomock" "google.golang.org/protobuf/types/known/timestamppb" ) diff --git a/cmd/validator/accounts/import.go b/cmd/validator/accounts/import.go index 8b1779ae66..6e52577b34 100644 --- a/cmd/validator/accounts/import.go +++ b/cmd/validator/accounts/import.go @@ -3,13 +3,13 @@ package accounts import ( "strings" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/accounts/userprompt" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/client" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/accounts/userprompt" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/client" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/accounts/import_test.go b/cmd/validator/accounts/import_test.go index 1c3e40b7de..93e877b202 100644 --- a/cmd/validator/accounts/import_test.go +++ b/cmd/validator/accounts/import_test.go @@ -10,14 +10,14 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" "github.com/google/uuid" keystorev4 "github.com/wealdtech/go-eth2-wallet-encryptor-keystorev4" ) diff --git a/cmd/validator/accounts/list.go b/cmd/validator/accounts/list.go index bfe6582b89..4afaf82f4f 100644 --- a/cmd/validator/accounts/list.go +++ b/cmd/validator/accounts/list.go @@ -3,10 +3,10 @@ package accounts import ( "strings" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/client" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/client" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/accounts/wallet_utils.go b/cmd/validator/accounts/wallet_utils.go index 25db33ad41..2aaed1b57c 100644 --- a/cmd/validator/accounts/wallet_utils.go +++ b/cmd/validator/accounts/wallet_utils.go @@ -3,11 +3,11 @@ package accounts import ( "strings" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - remote_web3signer "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + remote_web3signer "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/accounts/wallet_utils_test.go b/cmd/validator/accounts/wallet_utils_test.go index 2d8fdc20bf..554cbe824e 100644 --- a/cmd/validator/accounts/wallet_utils_test.go +++ b/cmd/validator/accounts/wallet_utils_test.go @@ -8,13 +8,13 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" - "github.com/OffchainLabs/prysm/v6/validator/node" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/validator/node" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/sirupsen/logrus/hooks/test" "github.com/urfave/cli/v2" diff --git a/cmd/validator/db/BUILD.bazel b/cmd/validator/db/BUILD.bazel index fb25a0a9e0..8d697996a9 100644 --- a/cmd/validator/db/BUILD.bazel +++ b/cmd/validator/db/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["db.go"], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/validator/db", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/validator/db", visibility = ["//visibility:public"], deps = [ "//cmd:go_default_library", diff --git a/cmd/validator/db/db.go b/cmd/validator/db/db.go index 14f79f5bfc..fa0314b082 100644 --- a/cmd/validator/db/db.go +++ b/cmd/validator/db/db.go @@ -1,9 +1,9 @@ package db import ( - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/runtime/tos" - validatordb "github.com/OffchainLabs/prysm/v6/validator/db" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/runtime/tos" + validatordb "github.com/OffchainLabs/prysm/v7/validator/db" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/flags/BUILD.bazel b/cmd/validator/flags/BUILD.bazel index 4a1883d419..99f7e697c6 100644 --- a/cmd/validator/flags/BUILD.bazel +++ b/cmd/validator/flags/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "flags.go", "interop.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/validator/flags", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/validator/flags", visibility = [ "//cmd/prysmctl:__subpackages__", "//cmd/validator:__subpackages__", diff --git a/cmd/validator/flags/flags.go b/cmd/validator/flags/flags.go index 003ad0d517..3580effef0 100644 --- a/cmd/validator/flags/flags.go +++ b/cmd/validator/flags/flags.go @@ -8,9 +8,9 @@ import ( "runtime" "time" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/flags/flags_test.go b/cmd/validator/flags/flags_test.go index 48ed94cdd9..a4a3b57876 100644 --- a/cmd/validator/flags/flags_test.go +++ b/cmd/validator/flags/flags_test.go @@ -7,8 +7,8 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/main.go b/cmd/validator/main.go index 3a212cfe25..284df48efd 100644 --- a/cmd/validator/main.go +++ b/cmd/validator/main.go @@ -9,23 +9,23 @@ import ( "path/filepath" runtimeDebug "runtime/debug" - "github.com/OffchainLabs/prysm/v6/cmd" - accountcommands "github.com/OffchainLabs/prysm/v6/cmd/validator/accounts" - dbcommands "github.com/OffchainLabs/prysm/v6/cmd/validator/db" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - slashingprotectioncommands "github.com/OffchainLabs/prysm/v6/cmd/validator/slashing-protection" - walletcommands "github.com/OffchainLabs/prysm/v6/cmd/validator/wallet" - "github.com/OffchainLabs/prysm/v6/cmd/validator/web" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/io/logs" - "github.com/OffchainLabs/prysm/v6/monitoring/journald" - "github.com/OffchainLabs/prysm/v6/runtime/debug" - prefixed "github.com/OffchainLabs/prysm/v6/runtime/logging/logrus-prefixed-formatter" - _ "github.com/OffchainLabs/prysm/v6/runtime/maxprocs" - "github.com/OffchainLabs/prysm/v6/runtime/tos" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/validator/node" + "github.com/OffchainLabs/prysm/v7/cmd" + accountcommands "github.com/OffchainLabs/prysm/v7/cmd/validator/accounts" + dbcommands "github.com/OffchainLabs/prysm/v7/cmd/validator/db" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + slashingprotectioncommands "github.com/OffchainLabs/prysm/v7/cmd/validator/slashing-protection" + walletcommands "github.com/OffchainLabs/prysm/v7/cmd/validator/wallet" + "github.com/OffchainLabs/prysm/v7/cmd/validator/web" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/io/logs" + "github.com/OffchainLabs/prysm/v7/monitoring/journald" + "github.com/OffchainLabs/prysm/v7/runtime/debug" + prefixed "github.com/OffchainLabs/prysm/v7/runtime/logging/logrus-prefixed-formatter" + _ "github.com/OffchainLabs/prysm/v7/runtime/maxprocs" + "github.com/OffchainLabs/prysm/v7/runtime/tos" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/validator/node" joonix "github.com/joonix/log" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/cmd/validator/slashing-protection/BUILD.bazel b/cmd/validator/slashing-protection/BUILD.bazel index 2afcacf313..119a590546 100644 --- a/cmd/validator/slashing-protection/BUILD.bazel +++ b/cmd/validator/slashing-protection/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "log.go", "slashing-protection.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/validator/slashing-protection", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/validator/slashing-protection", visibility = ["//visibility:public"], deps = [ "//cmd:go_default_library", diff --git a/cmd/validator/slashing-protection/export.go b/cmd/validator/slashing-protection/export.go index 3207f0545e..bc2718bf16 100644 --- a/cmd/validator/slashing-protection/export.go +++ b/cmd/validator/slashing-protection/export.go @@ -5,16 +5,16 @@ import ( "fmt" "path/filepath" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/validator/accounts/userprompt" - "github.com/OffchainLabs/prysm/v6/validator/db/filesystem" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" - slashingprotection "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/validator/accounts/userprompt" + "github.com/OffchainLabs/prysm/v7/validator/db/filesystem" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" + slashingprotection "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/slashing-protection/import.go b/cmd/validator/slashing-protection/import.go index 706ab07097..6c277ca1a2 100644 --- a/cmd/validator/slashing-protection/import.go +++ b/cmd/validator/slashing-protection/import.go @@ -5,14 +5,14 @@ import ( "fmt" "path/filepath" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/validator/accounts/userprompt" - "github.com/OffchainLabs/prysm/v6/validator/db/filesystem" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/validator/accounts/userprompt" + "github.com/OffchainLabs/prysm/v7/validator/db/filesystem" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/slashing-protection/import_export_test.go b/cmd/validator/slashing-protection/import_export_test.go index 86c2867e56..a25b490250 100644 --- a/cmd/validator/slashing-protection/import_export_test.go +++ b/cmd/validator/slashing-protection/import_export_test.go @@ -6,15 +6,15 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/db/common" - dbTest "github.com/OffchainLabs/prysm/v6/validator/db/testing" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" - mocks "github.com/OffchainLabs/prysm/v6/validator/testing" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/db/common" + dbTest "github.com/OffchainLabs/prysm/v7/validator/db/testing" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" + mocks "github.com/OffchainLabs/prysm/v7/validator/testing" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/slashing-protection/slashing-protection.go b/cmd/validator/slashing-protection/slashing-protection.go index 0acd5f5edf..198f8458e8 100644 --- a/cmd/validator/slashing-protection/slashing-protection.go +++ b/cmd/validator/slashing-protection/slashing-protection.go @@ -1,10 +1,10 @@ package historycmd import ( - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/runtime/tos" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/runtime/tos" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/usage.go b/cmd/validator/usage.go index 4030c02210..5ad08d0fe6 100644 --- a/cmd/validator/usage.go +++ b/cmd/validator/usage.go @@ -5,10 +5,10 @@ import ( "io" "sort" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/runtime/debug" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/runtime/debug" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/usage_test.go b/cmd/validator/usage_test.go index e13ea0d7b2..d68b299e90 100644 --- a/cmd/validator/usage_test.go +++ b/cmd/validator/usage_test.go @@ -4,7 +4,7 @@ import ( "slices" "testing" - "github.com/OffchainLabs/prysm/v6/config/features" + "github.com/OffchainLabs/prysm/v7/config/features" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/wallet/BUILD.bazel b/cmd/validator/wallet/BUILD.bazel index 4545c2d479..2c00a6e28b 100644 --- a/cmd/validator/wallet/BUILD.bazel +++ b/cmd/validator/wallet/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "recover.go", "wallet.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/validator/wallet", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/validator/wallet", visibility = ["//visibility:public"], deps = [ "//cmd:go_default_library", diff --git a/cmd/validator/wallet/create.go b/cmd/validator/wallet/create.go index edc5269ca3..050d98c5ad 100644 --- a/cmd/validator/wallet/create.go +++ b/cmd/validator/wallet/create.go @@ -5,12 +5,12 @@ import ( "os" "strings" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/io/prompt" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/userprompt" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/io/prompt" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/userprompt" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/manifoldco/promptui" "github.com/pkg/errors" "github.com/urfave/cli/v2" diff --git a/cmd/validator/wallet/create_test.go b/cmd/validator/wallet/create_test.go index 70a513596d..453c5b15e5 100644 --- a/cmd/validator/wallet/create_test.go +++ b/cmd/validator/wallet/create_test.go @@ -8,13 +8,13 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" "github.com/sirupsen/logrus" logTest "github.com/sirupsen/logrus/hooks/test" "github.com/urfave/cli/v2" diff --git a/cmd/validator/wallet/recover.go b/cmd/validator/wallet/recover.go index bb1cbad0cb..5802b2597e 100644 --- a/cmd/validator/wallet/recover.go +++ b/cmd/validator/wallet/recover.go @@ -7,11 +7,11 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/io/prompt" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/userprompt" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/io/prompt" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/userprompt" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" "github.com/pkg/errors" "github.com/tyler-smith/go-bip39" "github.com/tyler-smith/go-bip39/wordlists" diff --git a/cmd/validator/wallet/recover_test.go b/cmd/validator/wallet/recover_test.go index 42c641a1d1..52ca2c9f3f 100644 --- a/cmd/validator/wallet/recover_test.go +++ b/cmd/validator/wallet/recover_test.go @@ -7,13 +7,13 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/wallet/wallet.go b/cmd/validator/wallet/wallet.go index 6a7aea7741..593d1288e7 100644 --- a/cmd/validator/wallet/wallet.go +++ b/cmd/validator/wallet/wallet.go @@ -1,10 +1,10 @@ package wallet import ( - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/runtime/tos" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/runtime/tos" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" ) diff --git a/cmd/validator/web/BUILD.bazel b/cmd/validator/web/BUILD.bazel index 2a9ca685d5..4259bcc465 100644 --- a/cmd/validator/web/BUILD.bazel +++ b/cmd/validator/web/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "log.go", "web.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/cmd/validator/web", + importpath = "github.com/OffchainLabs/prysm/v7/cmd/validator/web", visibility = ["//visibility:public"], deps = [ "//api:go_default_library", diff --git a/cmd/validator/web/web.go b/cmd/validator/web/web.go index 13d2f250df..3a3e1f6279 100644 --- a/cmd/validator/web/web.go +++ b/cmd/validator/web/web.go @@ -4,12 +4,12 @@ import ( "fmt" "path/filepath" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/runtime/tos" - "github.com/OffchainLabs/prysm/v6/validator/rpc" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/runtime/tos" + "github.com/OffchainLabs/prysm/v7/validator/rpc" "github.com/urfave/cli/v2" ) diff --git a/config/BUILD.bazel b/config/BUILD.bazel index a9057b4ecf..e2baca572f 100644 --- a/config/BUILD.bazel +++ b/config/BUILD.bazel @@ -17,7 +17,7 @@ config_setting( go_library( name = "go_default_library", srcs = ["util.go"], - importpath = "github.com/OffchainLabs/prysm/v6/config", + importpath = "github.com/OffchainLabs/prysm/v7/config", visibility = ["//visibility:public"], deps = [ "@com_github_ethereum_go_ethereum//common:go_default_library", diff --git a/config/features/BUILD.bazel b/config/features/BUILD.bazel index dde439f978..93a4d9dbc7 100644 --- a/config/features/BUILD.bazel +++ b/config/features/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "filter_flags.go", "flags.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/config/features", + importpath = "github.com/OffchainLabs/prysm/v7/config/features", visibility = ["//visibility:public"], deps = [ "//cmd:go_default_library", diff --git a/config/features/config.go b/config/features/config.go index 7ebb09c977..02b3de21bd 100644 --- a/config/features/config.go +++ b/config/features/config.go @@ -25,9 +25,9 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" ) diff --git a/config/features/config_test.go b/config/features/config_test.go index 9589852281..c395d3dd34 100644 --- a/config/features/config_test.go +++ b/config/features/config_test.go @@ -4,8 +4,8 @@ import ( "flag" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/sirupsen/logrus/hooks/test" "github.com/urfave/cli/v2" ) diff --git a/config/features/deprecated_flags_test.go b/config/features/deprecated_flags_test.go index ea53d706b1..a0fb6dfb1b 100644 --- a/config/features/deprecated_flags_test.go +++ b/config/features/deprecated_flags_test.go @@ -5,7 +5,7 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestDeprecatedFlags(t *testing.T) { diff --git a/config/features/flags.go b/config/features/flags.go index 715aceb205..b1e3ca1a81 100644 --- a/config/features/flags.go +++ b/config/features/flags.go @@ -3,7 +3,7 @@ package features import ( "time" - backfill "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/sync/backfill/flags" + backfill "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/sync/backfill/flags" "github.com/urfave/cli/v2" ) diff --git a/config/fieldparams/BUILD.bazel b/config/fieldparams/BUILD.bazel index 9efe7ea975..1e5b24a42f 100644 --- a/config/fieldparams/BUILD.bazel +++ b/config/fieldparams/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "//config:mainnet": ["mainnet.go"], "//config:minimal": ["minimal.go"], }), - importpath = "github.com/OffchainLabs/prysm/v6/config/fieldparams", + importpath = "github.com/OffchainLabs/prysm/v7/config/fieldparams", visibility = ["//visibility:public"], ) diff --git a/config/fieldparams/common_test.go b/config/fieldparams/common_test.go index 6912cd8e3e..13422be483 100644 --- a/config/fieldparams/common_test.go +++ b/config/fieldparams/common_test.go @@ -3,9 +3,9 @@ package field_params_test import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func testFieldParametersMatchConfig(t *testing.T) { diff --git a/config/fieldparams/mainnet_test.go b/config/fieldparams/mainnet_test.go index 80c9bb03e2..507c7a9ef5 100644 --- a/config/fieldparams/mainnet_test.go +++ b/config/fieldparams/mainnet_test.go @@ -5,9 +5,9 @@ package field_params_test import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestFieldParametersValues(t *testing.T) { diff --git a/config/fieldparams/minimal_test.go b/config/fieldparams/minimal_test.go index 65089f9a50..653fcf5106 100644 --- a/config/fieldparams/minimal_test.go +++ b/config/fieldparams/minimal_test.go @@ -5,9 +5,9 @@ package field_params_test import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestFieldParametersValues(t *testing.T) { diff --git a/config/params/BUILD.bazel b/config/params/BUILD.bazel index f2bc785355..026d154073 100644 --- a/config/params/BUILD.bazel +++ b/config/params/BUILD.bazel @@ -27,7 +27,7 @@ go_library( "testutils_develop.go", # keep "values.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/config/params", + importpath = "github.com/OffchainLabs/prysm/v7/config/params", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/config/params/basis_points.go b/config/params/basis_points.go index c2590bd397..f829bd21ed 100644 --- a/config/params/basis_points.go +++ b/config/params/basis_points.go @@ -1,6 +1,6 @@ package params -import "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" +import "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" const BasisPoints = primitives.BP(10000) diff --git a/config/params/config.go b/config/params/config.go index f36a3097d2..a561164124 100644 --- a/config/params/config.go +++ b/config/params/config.go @@ -12,12 +12,12 @@ import ( log "github.com/sirupsen/logrus" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" ) diff --git a/config/params/config_test.go b/config/params/config_test.go index 81a48f4118..0968fef2b6 100644 --- a/config/params/config_test.go +++ b/config/params/config_test.go @@ -7,11 +7,11 @@ import ( "sync" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/genesis" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/genesis" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/config/params/configset.go b/config/params/configset.go index 3a4651e025..9d86efe908 100644 --- a/config/params/configset.go +++ b/config/params/configset.go @@ -3,8 +3,8 @@ package params import ( "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/runtime/version" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/config/params/configset_test.go b/config/params/configset_test.go index 2a12b6ffce..773ef9ef73 100644 --- a/config/params/configset_test.go +++ b/config/params/configset_test.go @@ -3,8 +3,8 @@ package params import ( "testing" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestConfigset_Add(t *testing.T) { diff --git a/config/params/fork.go b/config/params/fork.go index 5727698274..15c4a6f1ec 100644 --- a/config/params/fork.go +++ b/config/params/fork.go @@ -1,10 +1,10 @@ package params import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/config/params/fork_test.go b/config/params/fork_test.go index 1f3fada116..9db11d410e 100644 --- a/config/params/fork_test.go +++ b/config/params/fork_test.go @@ -4,10 +4,10 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestFork(t *testing.T) { diff --git a/config/params/loader.go b/config/params/loader.go index 9b913d4d0c..25fc18f237 100644 --- a/config/params/loader.go +++ b/config/params/loader.go @@ -7,8 +7,8 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/math" "github.com/pkg/errors" log "github.com/sirupsen/logrus" "gopkg.in/yaml.v2" diff --git a/config/params/loader_test.go b/config/params/loader_test.go index b72cec3edb..2158702e4b 100644 --- a/config/params/loader_test.go +++ b/config/params/loader_test.go @@ -12,10 +12,10 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/bazelbuild/rules_go/go/tools/bazel" "gopkg.in/yaml.v2" ) diff --git a/config/params/mainnet_config.go b/config/params/mainnet_config.go index 72131d173b..98ce3a0832 100644 --- a/config/params/mainnet_config.go +++ b/config/params/mainnet_config.go @@ -4,8 +4,8 @@ import ( "math" "time" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ) // MainnetConfig returns the configuration to be used in the main network. diff --git a/config/params/mainnet_config_test.go b/config/params/mainnet_config_test.go index fa141dce7d..6131bf872c 100644 --- a/config/params/mainnet_config_test.go +++ b/config/params/mainnet_config_test.go @@ -4,9 +4,9 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/bazelbuild/rules_go/go/tools/bazel" ) diff --git a/config/params/minimal_config.go b/config/params/minimal_config.go index afe1d9a62f..d030cffb29 100644 --- a/config/params/minimal_config.go +++ b/config/params/minimal_config.go @@ -3,7 +3,7 @@ package params import ( "math" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ) // MinimalSpecConfig retrieves the minimal config used in spec tests. diff --git a/config/params/network_config.go b/config/params/network_config.go index e852570583..4ba029de5e 100644 --- a/config/params/network_config.go +++ b/config/params/network_config.go @@ -1,7 +1,7 @@ package params import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/mohae/deepcopy" ) diff --git a/config/params/testnet_config_test.go b/config/params/testnet_config_test.go index bd1ae058aa..70ab4995d6 100644 --- a/config/params/testnet_config_test.go +++ b/config/params/testnet_config_test.go @@ -4,10 +4,10 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/bazelbuild/rules_go/go/tools/bazel" ) diff --git a/config/params/testnet_holesky_config_test.go b/config/params/testnet_holesky_config_test.go index fac3357f4f..a1858aa5cf 100644 --- a/config/params/testnet_holesky_config_test.go +++ b/config/params/testnet_holesky_config_test.go @@ -4,8 +4,8 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/bazelbuild/rules_go/go/tools/bazel" ) diff --git a/config/params/testnet_hoodi_config_test.go b/config/params/testnet_hoodi_config_test.go index 2e4eafa903..a4428067cc 100644 --- a/config/params/testnet_hoodi_config_test.go +++ b/config/params/testnet_hoodi_config_test.go @@ -4,8 +4,8 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/bazelbuild/rules_go/go/tools/bazel" ) diff --git a/config/params/testnet_sepolia_config_test.go b/config/params/testnet_sepolia_config_test.go index b0f19e5fa6..9b5c46da97 100644 --- a/config/params/testnet_sepolia_config_test.go +++ b/config/params/testnet_sepolia_config_test.go @@ -4,8 +4,8 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/bazelbuild/rules_go/go/tools/bazel" ) diff --git a/config/params/testutils.go b/config/params/testutils.go index 76e9c5c7bf..1bcae086f4 100644 --- a/config/params/testutils.go +++ b/config/params/testutils.go @@ -3,7 +3,7 @@ package params import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) const ( diff --git a/config/proposer/BUILD.bazel b/config/proposer/BUILD.bazel index 3dc814b3b9..619a6e185f 100644 --- a/config/proposer/BUILD.bazel +++ b/config/proposer/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["settings.go"], - importpath = "github.com/OffchainLabs/prysm/v6/config/proposer", + importpath = "github.com/OffchainLabs/prysm/v7/config/proposer", visibility = ["//visibility:public"], deps = [ "//config:go_default_library", diff --git a/config/proposer/loader/BUILD.bazel b/config/proposer/loader/BUILD.bazel index b0cc09e51e..ef18044bba 100644 --- a/config/proposer/loader/BUILD.bazel +++ b/config/proposer/loader/BUILD.bazel @@ -27,7 +27,7 @@ go_test( go_library( name = "go_default_library", srcs = ["loader.go"], - importpath = "github.com/OffchainLabs/prysm/v6/config/proposer/loader", + importpath = "github.com/OffchainLabs/prysm/v7/config/proposer/loader", visibility = ["//visibility:public"], deps = [ "//cmd/validator/flags:go_default_library", diff --git a/config/proposer/loader/loader.go b/config/proposer/loader/loader.go index 1289b1e8b1..bb5d015fb4 100644 --- a/config/proposer/loader/loader.go +++ b/config/proposer/loader/loader.go @@ -5,13 +5,13 @@ import ( "fmt" "strconv" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" log "github.com/sirupsen/logrus" diff --git a/config/proposer/loader/loader_test.go b/config/proposer/loader/loader_test.go index 2043b4bc25..def760f142 100644 --- a/config/proposer/loader/loader_test.go +++ b/config/proposer/loader/loader_test.go @@ -8,16 +8,16 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - dbTest "github.com/OffchainLabs/prysm/v6/validator/db/testing" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + dbTest "github.com/OffchainLabs/prysm/v7/validator/db/testing" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" logtest "github.com/sirupsen/logrus/hooks/test" diff --git a/config/proposer/settings.go b/config/proposer/settings.go index b3314cb8e0..8367237903 100644 --- a/config/proposer/settings.go +++ b/config/proposer/settings.go @@ -3,11 +3,11 @@ package proposer import ( "fmt" - "github.com/OffchainLabs/prysm/v6/config" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/config" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" diff --git a/config/proposer/settings_test.go b/config/proposer/settings_test.go index 911e771537..c657a06ea3 100644 --- a/config/proposer/settings_test.go +++ b/config/proposer/settings_test.go @@ -3,11 +3,11 @@ package proposer import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/config/util_test.go b/config/util_test.go index 3bf75552c9..38fac4e06f 100644 --- a/config/util_test.go +++ b/config/util_test.go @@ -7,9 +7,9 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/sirupsen/logrus/hooks/test" ) diff --git a/consensus-types/BUILD.bazel b/consensus-types/BUILD.bazel index 6b654cc54b..ff5bccad2d 100644 --- a/consensus-types/BUILD.bazel +++ b/consensus-types/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["types.go"], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types", visibility = ["//visibility:public"], deps = [ "//runtime/version:go_default_library", diff --git a/consensus-types/blocks/BUILD.bazel b/consensus-types/blocks/BUILD.bazel index 1db754610a..8fea0333e7 100644 --- a/consensus-types/blocks/BUILD.bazel +++ b/consensus-types/blocks/BUILD.bazel @@ -16,7 +16,7 @@ go_library( "setters.go", "types.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/blocks", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types/blocks", visibility = ["//visibility:public"], deps = [ "//beacon-chain/state/stateutil:go_default_library", diff --git a/consensus-types/blocks/execution.go b/consensus-types/blocks/execution.go index 0129404cab..8d4741931c 100644 --- a/consensus-types/blocks/execution.go +++ b/consensus-types/blocks/execution.go @@ -3,12 +3,12 @@ package blocks import ( "bytes" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" "github.com/pkg/errors" fastssz "github.com/prysmaticlabs/fastssz" "google.golang.org/protobuf/proto" diff --git a/consensus-types/blocks/execution_test.go b/consensus-types/blocks/execution_test.go index 68dd800192..d4c8ecf78e 100644 --- a/consensus-types/blocks/execution_test.go +++ b/consensus-types/blocks/execution_test.go @@ -3,13 +3,13 @@ package blocks_test import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestWrapExecutionPayload(t *testing.T) { diff --git a/consensus-types/blocks/factory.go b/consensus-types/blocks/factory.go index 54c34e7271..c518845459 100644 --- a/consensus-types/blocks/factory.go +++ b/consensus-types/blocks/factory.go @@ -3,10 +3,10 @@ package blocks import ( "fmt" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/consensus-types/blocks/factory_test.go b/consensus-types/blocks/factory_test.go index 2ec0d74c54..76fb0ff531 100644 --- a/consensus-types/blocks/factory_test.go +++ b/consensus-types/blocks/factory_test.go @@ -5,13 +5,13 @@ import ( "errors" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func Test_NewSignedBeaconBlock(t *testing.T) { diff --git a/consensus-types/blocks/get_payload.go b/consensus-types/blocks/get_payload.go index 16d186b983..7e9d9b25b5 100644 --- a/consensus-types/blocks/get_payload.go +++ b/consensus-types/blocks/get_payload.go @@ -1,10 +1,10 @@ package blocks import ( - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/consensus-types/blocks/getters.go b/consensus-types/blocks/getters.go index f70d4fdda5..b1e41cb06c 100644 --- a/consensus-types/blocks/getters.go +++ b/consensus-types/blocks/getters.go @@ -3,14 +3,14 @@ package blocks import ( "fmt" - field_params "github.com/OffchainLabs/prysm/v6/config/fieldparams" - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/runtime/version" + field_params "github.com/OffchainLabs/prysm/v7/config/fieldparams" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" ) diff --git a/consensus-types/blocks/getters_test.go b/consensus-types/blocks/getters_test.go index 9bcfc2ee0a..6a4991f3a8 100644 --- a/consensus-types/blocks/getters_test.go +++ b/consensus-types/blocks/getters_test.go @@ -3,16 +3,16 @@ package blocks import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ssz "github.com/prysmaticlabs/fastssz" ) diff --git a/consensus-types/blocks/kzg.go b/consensus-types/blocks/kzg.go index 6cb3bc8b4f..fda05ebcfd 100644 --- a/consensus-types/blocks/kzg.go +++ b/consensus-types/blocks/kzg.go @@ -1,12 +1,12 @@ package blocks import ( - field_params "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - "github.com/OffchainLabs/prysm/v6/runtime/version" + field_params "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "github.com/prysmaticlabs/gohashtree" ) diff --git a/consensus-types/blocks/kzg_test.go b/consensus-types/blocks/kzg_test.go index f4486015f4..2349b379a3 100644 --- a/consensus-types/blocks/kzg_test.go +++ b/consensus-types/blocks/kzg_test.go @@ -5,12 +5,12 @@ import ( "errors" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/container/trie" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/container/trie" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/prysmaticlabs/gohashtree" ) diff --git a/consensus-types/blocks/proofs.go b/consensus-types/blocks/proofs.go index dffe15695b..cd6045da08 100644 --- a/consensus-types/blocks/proofs.go +++ b/consensus-types/blocks/proofs.go @@ -6,14 +6,14 @@ import ( "errors" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/hash/htr" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/hash/htr" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) const ( diff --git a/consensus-types/blocks/proofs_test.go b/consensus-types/blocks/proofs_test.go index 9604cbe739..be0380215b 100644 --- a/consensus-types/blocks/proofs_test.go +++ b/consensus-types/blocks/proofs_test.go @@ -3,8 +3,8 @@ package blocks import ( "testing" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestComputeBlockBodyFieldRoots_Phase0(t *testing.T) { diff --git a/consensus-types/blocks/proto.go b/consensus-types/blocks/proto.go index 7300650606..4c3bc95645 100644 --- a/consensus-types/blocks/proto.go +++ b/consensus-types/blocks/proto.go @@ -3,11 +3,11 @@ package blocks import ( "fmt" - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "google.golang.org/protobuf/proto" ) diff --git a/consensus-types/blocks/proto_test.go b/consensus-types/blocks/proto_test.go index 1f64ec639b..d775fc5f1b 100644 --- a/consensus-types/blocks/proto_test.go +++ b/consensus-types/blocks/proto_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) type fields struct { diff --git a/consensus-types/blocks/roblob.go b/consensus-types/blocks/roblob.go index 601f1071e6..5af0fcb851 100644 --- a/consensus-types/blocks/roblob.go +++ b/consensus-types/blocks/roblob.go @@ -1,9 +1,9 @@ package blocks import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // ROBlob represents a read-only blob sidecar with its block root. diff --git a/consensus-types/blocks/roblob_test.go b/consensus-types/blocks/roblob_test.go index 5db6614020..4266504d03 100644 --- a/consensus-types/blocks/roblob_test.go +++ b/consensus-types/blocks/roblob_test.go @@ -3,12 +3,12 @@ package blocks import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestROBlobNilChecks(t *testing.T) { diff --git a/consensus-types/blocks/roblock.go b/consensus-types/blocks/roblock.go index bc394fe61b..cfa1ce390d 100644 --- a/consensus-types/blocks/roblock.go +++ b/consensus-types/blocks/roblock.go @@ -4,7 +4,7 @@ import ( "bytes" "sort" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" ) // ROBlock is a value that embeds a ReadOnlySignedBeaconBlock along with its block root ([32]byte). diff --git a/consensus-types/blocks/roblock_test.go b/consensus-types/blocks/roblock_test.go index 340e1d3beb..7eeb29377e 100644 --- a/consensus-types/blocks/roblock_test.go +++ b/consensus-types/blocks/roblock_test.go @@ -4,12 +4,12 @@ import ( "sort" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestROBlockSorting(t *testing.T) { diff --git a/consensus-types/blocks/rodatacolumn.go b/consensus-types/blocks/rodatacolumn.go index 013c0bd1a4..72c210281d 100644 --- a/consensus-types/blocks/rodatacolumn.go +++ b/consensus-types/blocks/rodatacolumn.go @@ -1,10 +1,10 @@ package blocks import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // RODataColumn represents a read-only data column sidecar with its block root. diff --git a/consensus-types/blocks/rodatacolumn_test.go b/consensus-types/blocks/rodatacolumn_test.go index f5adb1efcc..f299dfec5a 100644 --- a/consensus-types/blocks/rodatacolumn_test.go +++ b/consensus-types/blocks/rodatacolumn_test.go @@ -3,12 +3,12 @@ package blocks import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestNewRODataColumnWithAndWithoutRoot(t *testing.T) { diff --git a/consensus-types/blocks/setters.go b/consensus-types/blocks/setters.go index a8cae5757d..5df0b2517e 100644 --- a/consensus-types/blocks/setters.go +++ b/consensus-types/blocks/setters.go @@ -3,12 +3,12 @@ package blocks import ( "fmt" - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // SetSignature sets the signature of the signed beacon block. diff --git a/consensus-types/blocks/testing/BUILD.bazel b/consensus-types/blocks/testing/BUILD.bazel index 773b52faf0..8056967e67 100644 --- a/consensus-types/blocks/testing/BUILD.bazel +++ b/consensus-types/blocks/testing/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "factory.go", "mutator.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/blocks/testing", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types/blocks/testing", visibility = ["//visibility:public"], deps = [ "//consensus-types/blocks:go_default_library", diff --git a/consensus-types/blocks/testing/factory.go b/consensus-types/blocks/testing/factory.go index 7d976580be..05f911ec1a 100644 --- a/consensus-types/blocks/testing/factory.go +++ b/consensus-types/blocks/testing/factory.go @@ -1,9 +1,9 @@ package testing import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/consensus-types/blocks/testing/mutator.go b/consensus-types/blocks/testing/mutator.go index ddf572818a..fff5b0b0df 100644 --- a/consensus-types/blocks/testing/mutator.go +++ b/consensus-types/blocks/testing/mutator.go @@ -1,10 +1,10 @@ package testing import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) type blockMutator struct { diff --git a/consensus-types/blocks/types.go b/consensus-types/blocks/types.go index 2660172025..2cd90e6a19 100644 --- a/consensus-types/blocks/types.go +++ b/consensus-types/blocks/types.go @@ -1,11 +1,11 @@ package blocks import ( - field_params "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + field_params "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/consensus-types/forkchoice/BUILD.bazel b/consensus-types/forkchoice/BUILD.bazel index d2c778a871..cc189a30f5 100644 --- a/consensus-types/forkchoice/BUILD.bazel +++ b/consensus-types/forkchoice/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["types.go"], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/forkchoice", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types/forkchoice", visibility = ["//visibility:public"], deps = [ "//consensus-types/primitives:go_default_library", diff --git a/consensus-types/forkchoice/types.go b/consensus-types/forkchoice/types.go index 24bf07ac79..a98b75011a 100644 --- a/consensus-types/forkchoice/types.go +++ b/consensus-types/forkchoice/types.go @@ -3,8 +3,8 @@ package forkchoice import ( "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) type NodeValidity uint8 diff --git a/consensus-types/hdiff/BUILD.bazel b/consensus-types/hdiff/BUILD.bazel index e19531167d..da7b3201eb 100644 --- a/consensus-types/hdiff/BUILD.bazel +++ b/consensus-types/hdiff/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["state_diff.go"], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/hdiff", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types/hdiff", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/consensus-types/hdiff/fuzz_test.go b/consensus-types/hdiff/fuzz_test.go index 6ca31f5192..a0f692f681 100644 --- a/consensus-types/hdiff/fuzz_test.go +++ b/consensus-types/hdiff/fuzz_test.go @@ -7,8 +7,8 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/util" ) const maxFuzzValidators = 10000 @@ -26,12 +26,12 @@ func FuzzNewHdiff(f *testing.F) { sizes := []uint64{8, 16, 32} for _, size := range sizes { source, _ := util.DeterministicGenesisStateElectra(f, size) - + // Create various realistic target states scenarios := []string{"slot_change", "balance_change", "validator_change", "multiple_changes"} for _, scenario := range scenarios { target := source.Copy() - + switch scenario { case "slot_change": _ = target.SetSlot(source.Slot() + 1) @@ -58,20 +58,20 @@ func FuzzNewHdiff(f *testing.F) { _ = target.SetValidators(validators) } } - + validDiff, err := Diff(source, target) if err == nil { f.Add(validDiff.StateDiff, validDiff.ValidatorDiffs, validDiff.BalancesDiff) } } } - + f.Fuzz(func(t *testing.T, stateDiff, validatorDiffs, balancesDiff []byte) { // Limit input sizes to reasonable bounds if len(stateDiff) > 5000 || len(validatorDiffs) > 5000 || len(balancesDiff) > 5000 { return } - + // Bound historical roots length in stateDiff (if it contains snappy-compressed data) // The historicalRootsLength is read after snappy decompression, but we can still // limit the compressed input size to prevent extreme decompression ratios @@ -79,7 +79,7 @@ func FuzzNewHdiff(f *testing.F) { // Limit stateDiff to prevent potential memory bombs from snappy decompression stateDiff = stateDiff[:maxFuzzStateDiffSize] } - + // Bound validator count in validatorDiffs if len(validatorDiffs) >= 8 { count := binary.LittleEndian.Uint64(validatorDiffs[0:8]) @@ -88,7 +88,7 @@ func FuzzNewHdiff(f *testing.F) { binary.LittleEndian.PutUint64(validatorDiffs[0:8], boundedCount) } } - + // Bound balance count in balancesDiff if len(balancesDiff) >= 8 { count := binary.LittleEndian.Uint64(balancesDiff[0:8]) @@ -97,13 +97,13 @@ func FuzzNewHdiff(f *testing.F) { binary.LittleEndian.PutUint64(balancesDiff[0:8], boundedCount) } } - + input := HdiffBytes{ StateDiff: stateDiff, ValidatorDiffs: validatorDiffs, BalancesDiff: balancesDiff, } - + // Test parsing - should not panic even with corrupted but bounded data _, err := newHdiff(input) _ = err // Expected to fail with corrupted data @@ -118,30 +118,30 @@ func FuzzNewStateDiff(f *testing.F) { t.Errorf("newStateDiff panicked: %v", r) } }() - + // Bound validator count to reasonable range validators := uint64(validatorCount%32 + 8) // 8-39 validators if slotDelta > 100 { slotDelta = slotDelta % 100 } - + // Generate random source state source, _ := util.DeterministicGenesisStateElectra(t, validators) target := source.Copy() - + // Apply random slot change _ = target.SetSlot(source.Slot() + primitives.Slot(slotDelta)) - + // Apply random balance changes if len(balanceData) >= 8 { balances := target.Balances() numChanges := int(binary.LittleEndian.Uint64(balanceData[:8])) % len(balances) for i := 0; i < numChanges && i*8+8 < len(balanceData); i++ { idx := i % len(balances) - delta := int64(binary.LittleEndian.Uint64(balanceData[i*8+8:(i+1)*8+8])) + delta := int64(binary.LittleEndian.Uint64(balanceData[i*8+8 : (i+1)*8+8])) // Keep delta reasonable delta = delta % oneEthInGwei // Max 1 ETH change - + if delta < 0 && uint64(-delta) > balances[idx] { balances[idx] = 0 } else if delta < 0 { @@ -152,7 +152,7 @@ func FuzzNewStateDiff(f *testing.F) { } _ = target.SetBalances(balances) } - + // Apply random validator changes if len(validatorData) > 0 { validators := target.Validators() @@ -165,20 +165,20 @@ func FuzzNewStateDiff(f *testing.F) { } _ = target.SetValidators(validators) } - - // Create diff between source and target + + // Create diff between source and target diff, err := Diff(source, target) if err != nil { return // Skip if diff creation fails } - + // Test newStateDiff with the valid serialized diff from StateDiff field reconstructed, err := newStateDiff(diff.StateDiff) if err != nil { t.Errorf("newStateDiff failed on valid diff: %v", err) return } - + // Basic validation that reconstruction worked if reconstructed == nil { t.Error("newStateDiff returned nil without error") @@ -194,23 +194,23 @@ func FuzzNewValidatorDiffs(f *testing.F) { t.Errorf("newValidatorDiffs panicked: %v", r) } }() - + // Bound validator count to reasonable range validators := uint64(validatorCount%16 + 4) // 4-19 validators - + // Generate random source state source, _ := util.DeterministicGenesisStateElectra(t, validators) target := source.Copy() - + // Apply random validator changes based on changeData if len(changeData) > 0 { vals := target.Validators() numChanges := int(changeData[0]) % len(vals) - + for i := 0; i < numChanges && i < len(changeData)-1; i++ { idx := i % len(vals) changeType := changeData[i+1] % 4 - + switch changeType { case 0: // Change effective balance vals[idx].EffectiveBalance += oneEthInGwei @@ -224,20 +224,20 @@ func FuzzNewValidatorDiffs(f *testing.F) { } _ = target.SetValidators(vals) } - + // Create diff between source and target diff, err := Diff(source, target) if err != nil { return // Skip if diff creation fails } - + // Test newValidatorDiffs with the valid serialized diff reconstructed, err := newValidatorDiffs(diff.ValidatorDiffs) if err != nil { t.Errorf("newValidatorDiffs failed on valid diff: %v", err) return } - + // Basic validation that reconstruction worked if reconstructed == nil { t.Error("newValidatorDiffs returned nil without error") @@ -253,25 +253,25 @@ func FuzzNewBalancesDiff(f *testing.F) { t.Errorf("newBalancesDiff panicked: %v", r) } }() - + // Bound balance count to reasonable range numBalances := int(balanceCount%32 + 8) // 8-39 balances - + // Generate simple source state source, _ := util.DeterministicGenesisStateElectra(t, uint64(numBalances)) target := source.Copy() - + // Apply random balance changes based on balanceData if len(balanceData) >= 8 { balances := target.Balances() numChanges := int(binary.LittleEndian.Uint64(balanceData[:8])) % numBalances - + for i := 0; i < numChanges && i*8+8 < len(balanceData); i++ { idx := i % numBalances - delta := int64(binary.LittleEndian.Uint64(balanceData[i*8+8:(i+1)*8+8])) + delta := int64(binary.LittleEndian.Uint64(balanceData[i*8+8 : (i+1)*8+8])) // Keep delta reasonable delta = delta % oneEthInGwei // Max 1 ETH change - + if delta < 0 && uint64(-delta) > balances[idx] { balances[idx] = 0 } else if delta < 0 { @@ -282,20 +282,20 @@ func FuzzNewBalancesDiff(f *testing.F) { } _ = target.SetBalances(balances) } - + // Create diff between source and target to get BalancesDiff diff, err := Diff(source, target) if err != nil { return // Skip if diff creation fails } - + // Test newBalancesDiff with the valid serialized diff reconstructed, err := newBalancesDiff(diff.BalancesDiff) if err != nil { t.Errorf("newBalancesDiff failed on valid diff: %v", err) return } - + // Basic validation that reconstruction worked if reconstructed == nil { t.Error("newBalancesDiff returned nil without error") @@ -307,13 +307,13 @@ func FuzzNewBalancesDiff(f *testing.F) { func FuzzApplyDiff(f *testing.F) { // Test with realistic state variations, not random data ctx := context.Background() - + // Add seed corpus with various valid scenarios sizes := []uint64{8, 16, 32, 64} for _, size := range sizes { source, _ := util.DeterministicGenesisStateElectra(f, size) target := source.Copy() - + // Different types of realistic changes scenarios := []func(){ func() { _ = target.SetSlot(source.Slot() + 1) }, // Slot change @@ -332,29 +332,29 @@ func FuzzApplyDiff(f *testing.F) { } }, } - + for _, scenario := range scenarios { testTarget := source.Copy() scenario() - + validDiff, err := Diff(source, testTarget) if err == nil { f.Add(validDiff.StateDiff, validDiff.ValidatorDiffs, validDiff.BalancesDiff) } } } - + f.Fuzz(func(t *testing.T, stateDiff, validatorDiffs, balancesDiff []byte) { // Only test with reasonable sized inputs if len(stateDiff) > 10000 || len(validatorDiffs) > 10000 || len(balancesDiff) > 10000 { return } - + // Bound historical roots length in stateDiff (same as FuzzNewHdiff) if len(stateDiff) > maxFuzzStateDiffSize { stateDiff = stateDiff[:maxFuzzStateDiffSize] } - + // Bound validator count in validatorDiffs if len(validatorDiffs) >= 8 { count := binary.LittleEndian.Uint64(validatorDiffs[0:8]) @@ -363,7 +363,7 @@ func FuzzApplyDiff(f *testing.F) { binary.LittleEndian.PutUint64(validatorDiffs[0:8], boundedCount) } } - + // Bound balance count in balancesDiff if len(balancesDiff) >= 8 { count := binary.LittleEndian.Uint64(balancesDiff[0:8]) @@ -372,16 +372,16 @@ func FuzzApplyDiff(f *testing.F) { binary.LittleEndian.PutUint64(balancesDiff[0:8], boundedCount) } } - + // Create fresh source state for each test source, _ := util.DeterministicGenesisStateElectra(t, 8) - + diff := HdiffBytes{ StateDiff: stateDiff, ValidatorDiffs: validatorDiffs, BalancesDiff: balancesDiff, } - + // Apply diff - errors are expected for fuzzed data _, err := ApplyDiff(ctx, source, diff) _ = err // Expected to fail with invalid data @@ -393,24 +393,24 @@ func FuzzReadPendingAttestation(f *testing.F) { // Add edge cases - this function is particularly vulnerable f.Add([]byte{}) f.Add([]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08}) // 8 bytes - f.Add(make([]byte, 200)) // Larger than expected - + f.Add(make([]byte, 200)) // Larger than expected + // Add a case with large reported length largeLength := make([]byte, 8) binary.LittleEndian.PutUint64(largeLength, 0xFFFFFFFF) // Large bits length f.Add(largeLength) - + f.Fuzz(func(t *testing.T, data []byte) { defer func() { if r := recover(); r != nil { t.Errorf("readPendingAttestation panicked: %v", r) } }() - + // Make a copy since the function modifies the slice dataCopy := make([]byte, len(data)) copy(dataCopy, data) - + // Bound the bits length by modifying the first 8 bytes if they exist if len(dataCopy) >= 8 { // Read the bits length and bound it to maxFuzzValidators @@ -420,7 +420,7 @@ func FuzzReadPendingAttestation(f *testing.F) { binary.LittleEndian.PutUint64(dataCopy[0:8], boundedLength) } } - + _, err := readPendingAttestation(&dataCopy) _ = err }) @@ -497,14 +497,14 @@ func FuzzComputeLPS(f *testing.F) { f.Add("1,1,1") f.Add("1,2,3,4") f.Add("") - + f.Fuzz(func(t *testing.T, patternStr string) { defer func() { if r := recover(); r != nil { t.Errorf("computeLPS panicked: %v", r) } }() - + // Parse comma-separated string into int slice var pattern []int if patternStr != "" { @@ -514,14 +514,14 @@ func FuzzComputeLPS(f *testing.F) { } } } - + // Convert to pointer slice patternPtrs := make([]*int, len(pattern)) for i := range pattern { val := pattern[i] patternPtrs[i] = &val } - + integerEquals := func(a, b *int) bool { if a == nil && b == nil { return true @@ -531,14 +531,14 @@ func FuzzComputeLPS(f *testing.F) { } return *a == *b } - + result := computeLPS(patternPtrs, integerEquals) - + // Verify result length matches input if len(result) != len(pattern) { t.Errorf("computeLPS returned wrong length: got %d, expected %d", len(result), len(pattern)) } - + // Verify all LPS values are non-negative and within bounds for i, lps := range result { if lps < 0 || lps > i { @@ -556,40 +556,40 @@ func FuzzDiffToBalances(f *testing.F) { t.Errorf("diffToBalances panicked: %v", r) } }() - + // Convert byte data to balance arrays var sourceBalances, targetBalances []uint64 - + // Parse source balances (8 bytes per uint64) for i := 0; i+7 < len(sourceData) && len(sourceBalances) < 100; i += 8 { balance := binary.LittleEndian.Uint64(sourceData[i : i+8]) sourceBalances = append(sourceBalances, balance) } - + // Parse target balances for i := 0; i+7 < len(targetData) && len(targetBalances) < 100; i += 8 { balance := binary.LittleEndian.Uint64(targetData[i : i+8]) targetBalances = append(targetBalances, balance) } - + // Create states with the provided balances source, _ := util.DeterministicGenesisStateElectra(t, 1) target, _ := util.DeterministicGenesisStateElectra(t, 1) - + if len(sourceBalances) > 0 { _ = source.SetBalances(sourceBalances) } if len(targetBalances) > 0 { _ = target.SetBalances(targetBalances) } - + result, err := diffToBalances(source, target) - + // If no error, verify result consistency if err == nil && len(result) > 0 { // Result length should match target length if len(result) != len(target.Balances()) { - t.Errorf("diffToBalances result length mismatch: got %d, expected %d", + t.Errorf("diffToBalances result length mismatch: got %d, expected %d", len(result), len(target.Balances())) } } @@ -604,21 +604,21 @@ func FuzzValidatorsEqual(f *testing.F) { t.Errorf("validatorsEqual panicked: %v", r) } }() - + // Create two validators and fuzz their fields if len(data) < 16 { return } - + source, _ := util.DeterministicGenesisStateElectra(t, 2) validators := source.Validators() if len(validators) < 2 { return } - + val1 := validators[0] val2 := validators[1] - + // Modify validator fields based on fuzz data if len(data) > 0 && data[0]%2 == 0 { val2.EffectiveBalance = val1.EffectiveBalance + uint64(data[0]) @@ -626,11 +626,11 @@ func FuzzValidatorsEqual(f *testing.F) { if len(data) > 1 && data[1]%2 == 0 { val2.Slashed = !val1.Slashed } - + // Create ReadOnlyValidator wrappers if needed // Since validatorsEqual expects ReadOnlyValidator interface, // we'll skip this test for now as it requires state wrapper implementation _ = val1 _ = val2 }) -} \ No newline at end of file +} diff --git a/consensus-types/hdiff/property_test.go b/consensus-types/hdiff/property_test.go index 058c62c8b6..1130e77ecc 100644 --- a/consensus-types/hdiff/property_test.go +++ b/consensus-types/hdiff/property_test.go @@ -6,9 +6,9 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) // maxSafeBalance ensures balances can be safely cast to int64 for diff computation @@ -21,11 +21,11 @@ func FuzzPropertyRoundTrip(f *testing.F) { if slotDelta > 32 { // Max one epoch slotDelta = slotDelta % 32 } - + // Convert byte data to realistic deltas and changes var balanceDeltas []int64 var validatorChanges []bool - + // Parse balance deltas - limit to realistic amounts (8 bytes per int64) for i := 0; i+7 < len(balanceData) && len(balanceDeltas) < 20; i += 8 { delta := int64(binary.LittleEndian.Uint64(balanceData[i : i+8])) @@ -38,27 +38,27 @@ func FuzzPropertyRoundTrip(f *testing.F) { } balanceDeltas = append(balanceDeltas, delta) } - + // Parse validator changes (1 byte per bool) - limit to small number for i := 0; i < len(validatorData) && len(validatorChanges) < 10; i++ { validatorChanges = append(validatorChanges, validatorData[i]%2 == 0) } - + ctx := t.Context() - + // Create source state with reasonable size validatorCount := uint64(len(validatorChanges) + 8) // Minimum 8 validators if validatorCount > 64 { validatorCount = 64 // Cap at 64 for performance } source, _ := util.DeterministicGenesisStateElectra(t, validatorCount) - + // Create target state with modifications target := source.Copy() - + // Apply slot change _ = target.SetSlot(source.Slot() + primitives.Slot(slotDelta)) - + // Apply realistic balance changes if len(balanceDeltas) > 0 { balances := target.Balances() @@ -85,7 +85,7 @@ func FuzzPropertyRoundTrip(f *testing.F) { } _ = target.SetBalances(balances) } - + // Apply realistic validator changes if len(validatorChanges) > 0 { validators := target.Validators() @@ -100,24 +100,24 @@ func FuzzPropertyRoundTrip(f *testing.F) { } _ = target.SetValidators(validators) } - + // Create diff diff, err := Diff(source, target) if err != nil { // If diff creation fails, that's acceptable for malformed inputs return } - + // Apply diff result, err := ApplyDiff(ctx, source, diff) if err != nil { // If diff application fails, that's acceptable return } - + // Verify round-trip property: source + diff = target require.Equal(t, target.Slot(), result.Slot()) - + // Verify balance consistency targetBalances := target.Balances() resultBalances := result.Balances() @@ -125,7 +125,7 @@ func FuzzPropertyRoundTrip(f *testing.F) { for i := range targetBalances { require.Equal(t, targetBalances[i], resultBalances[i], "Balance mismatch at index %d", i) } - + // Verify validator consistency targetVals := target.Validators() resultVals := result.Validators() @@ -142,16 +142,16 @@ func FuzzPropertyResourceBounds(f *testing.F) { f.Fuzz(func(t *testing.T, validatorCount uint8, slotDelta uint8, changeCount uint8) { // Use realistic parameters validators := uint64(validatorCount%64 + 8) // 8-71 validators - slots := uint64(slotDelta % 32) // 0-31 slots + slots := uint64(slotDelta % 32) // 0-31 slots changes := int(changeCount % 10) // 0-9 changes - + // Create realistic states source, _ := util.DeterministicGenesisStateElectra(t, validators) target := source.Copy() - + // Apply realistic changes _ = target.SetSlot(source.Slot() + primitives.Slot(slots)) - + if changes > 0 { validatorList := target.Validators() for i := 0; i < changes && i < len(validatorList); i++ { @@ -159,21 +159,21 @@ func FuzzPropertyResourceBounds(f *testing.F) { } _ = target.SetValidators(validatorList) } - + // Operations should complete quickly start := time.Now() diff, err := Diff(source, target) duration := time.Since(start) - + if err == nil { // Should be fast require.Equal(t, true, duration < time.Second, "Diff creation too slow: %v", duration) - + // Apply should also be fast start = time.Now() _, err = ApplyDiff(t.Context(), source, diff) duration = time.Since(start) - + if err == nil { require.Equal(t, true, duration < time.Second, "Diff application too slow: %v", duration) } @@ -190,13 +190,13 @@ func FuzzPropertyDiffEfficiency(f *testing.F) { if numChanges > 10 { numChanges = numChanges % 10 } - + // Create states with small differences source, _ := util.DeterministicGenesisStateElectra(t, 64) target := source.Copy() - + _ = target.SetSlot(source.Slot() + primitives.Slot(slotDelta)) - + // Make a few small changes if numChanges > 0 { validators := target.Validators() @@ -205,24 +205,24 @@ func FuzzPropertyDiffEfficiency(f *testing.F) { } _ = target.SetValidators(validators) } - + // Create diff diff, err := Diff(source, target) if err != nil { return } - + // For small changes, diff should be much smaller than full state sourceSSZ, err := source.MarshalSSZ() if err != nil { return } - + diffSize := len(diff.StateDiff) + len(diff.ValidatorDiffs) + len(diff.BalancesDiff) - + // Diff should be smaller than full state for small changes if numChanges <= 5 && slotDelta <= 10 { - require.Equal(t, true, diffSize < len(sourceSSZ)/2, + require.Equal(t, true, diffSize < len(sourceSSZ)/2, "Diff size %d should be less than half of state size %d", diffSize, len(sourceSSZ)) } }) @@ -239,10 +239,10 @@ func FuzzPropertyBalanceConservation(f *testing.F) { change := rawChange % (maxSafeBalance / 2) // Divide by 2 to allow for addition/subtraction balanceChanges = append(balanceChanges, change) } - + source, _ := util.DeterministicGenesisStateElectra(t, uint64(len(balanceChanges)+10)) originalBalances := source.Balances() - + // Ensure initial balances are within safe range for int64 casting for i, balance := range originalBalances { if balance > maxSafeBalance { @@ -250,23 +250,23 @@ func FuzzPropertyBalanceConservation(f *testing.F) { } } _ = source.SetBalances(originalBalances) - + // Calculate total before var totalBefore uint64 for _, balance := range originalBalances { totalBefore += balance } - + // Apply balance changes via diff system target := source.Copy() targetBalances := target.Balances() - + var totalDelta int64 for i, delta := range balanceChanges { if i >= len(targetBalances) { break } - + // Prevent underflow if delta < 0 && uint64(-delta) > targetBalances[i] { totalDelta -= int64(targetBalances[i]) // Actually lost amount (negative) @@ -287,25 +287,25 @@ func FuzzPropertyBalanceConservation(f *testing.F) { } } _ = target.SetBalances(targetBalances) - + // Apply through diff system diff, err := Diff(source, target) if err != nil { return } - + result, err := ApplyDiff(t.Context(), source, diff) if err != nil { return } - + // Calculate total after resultBalances := result.Balances() var totalAfter uint64 for _, balance := range resultBalances { totalAfter += balance } - + // Verify conservation (accounting for intended changes) expectedTotal := totalBefore if totalDelta >= 0 { @@ -317,9 +317,9 @@ func FuzzPropertyBalanceConservation(f *testing.F) { expectedTotal = 0 } } - - require.Equal(t, expectedTotal, totalAfter, - "Balance conservation violated: before=%d, delta=%d, expected=%d, actual=%d", + + require.Equal(t, expectedTotal, totalAfter, + "Balance conservation violated: before=%d, delta=%d, expected=%d, actual=%d", totalBefore, totalDelta, expectedTotal, totalAfter) }) } @@ -329,24 +329,24 @@ func FuzzPropertyMonotonicSlot(f *testing.F) { f.Fuzz(func(t *testing.T, slotDelta uint64) { source, _ := util.DeterministicGenesisStateElectra(t, 16) target := source.Copy() - + targetSlot := source.Slot() + primitives.Slot(slotDelta) _ = target.SetSlot(targetSlot) - + diff, err := Diff(source, target) if err != nil { return } - + result, err := ApplyDiff(t.Context(), source, diff) if err != nil { return } - + // Slot should never decrease - require.Equal(t, true, result.Slot() >= source.Slot(), + require.Equal(t, true, result.Slot() >= source.Slot(), "Slot decreased from %d to %d", source.Slot(), result.Slot()) - + // Slot should match target require.Equal(t, targetSlot, result.Slot()) }) @@ -360,10 +360,10 @@ func FuzzPropertyValidatorIndices(f *testing.F) { for i := 0; i < len(changeData) && len(changes) < 20; i++ { changes = append(changes, changeData[i]%2 == 0) } - + source, _ := util.DeterministicGenesisStateElectra(t, uint64(len(changes)+5)) target := source.Copy() - + // Apply changes validators := target.Validators() for i, shouldChange := range changes { @@ -375,21 +375,21 @@ func FuzzPropertyValidatorIndices(f *testing.F) { } } _ = target.SetValidators(validators) - + diff, err := Diff(source, target) if err != nil { return } - + result, err := ApplyDiff(t.Context(), source, diff) if err != nil { return } - + // Validator count should not decrease require.Equal(t, true, len(result.Validators()) >= len(source.Validators()), "Validator count decreased from %d to %d", len(source.Validators()), len(result.Validators())) - + // Public keys should be preserved for existing validators sourceVals := source.Validators() resultVals := result.Validators() @@ -400,4 +400,4 @@ func FuzzPropertyValidatorIndices(f *testing.F) { } } }) -} \ No newline at end of file +} diff --git a/consensus-types/hdiff/security_test.go b/consensus-types/hdiff/security_test.go index 697fb25daf..b108e78507 100644 --- a/consensus-types/hdiff/security_test.go +++ b/consensus-types/hdiff/security_test.go @@ -6,46 +6,46 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) // TestIntegerOverflowProtection tests protection against balance overflow attacks func TestIntegerOverflowProtection(t *testing.T) { source, _ := util.DeterministicGenesisStateElectra(t, 8) - + // Test balance overflow in diffToBalances - use realistic values t.Run("balance_diff_overflow", func(t *testing.T) { target := source.Copy() balances := target.Balances() - + // Set high but realistic balance values (32 ETH in Gwei = 32e9) balances[0] = 32000000000 // 32 ETH balances[1] = 64000000000 // 64 ETH _ = target.SetBalances(balances) - + // This should work fine with realistic values diffs, err := diffToBalances(source, target) require.NoError(t, err) - + // Verify the diffs are reasonable require.Equal(t, true, len(diffs) > 0, "Should have balance diffs") }) - + // Test reasonable balance changes t.Run("realistic_balance_changes", func(t *testing.T) { // Create realistic balance changes (slashing, rewards) balancesDiff := []int64{1000000000, -500000000, 2000000000} // 1 ETH gain, 0.5 ETH loss, 2 ETH gain - + // Apply to state with normal balances testSource := source.Copy() normalBalances := []uint64{32000000000, 32000000000, 32000000000} // 32 ETH each _ = testSource.SetBalances(normalBalances) - + // This should work fine result, err := applyBalancesDiff(testSource, balancesDiff) require.NoError(t, err) - + resultBalances := result.Balances() require.Equal(t, uint64(33000000000), resultBalances[0]) // 33 ETH require.Equal(t, uint64(31500000000), resultBalances[1]) // 31.5 ETH @@ -59,7 +59,7 @@ func TestReasonablePerformance(t *testing.T) { // Test with a large but realistic validator set source, _ := util.DeterministicGenesisStateElectra(t, 1000) // 1000 validators target := source.Copy() - + // Make realistic changes _ = target.SetSlot(source.Slot() + 32) // One epoch validators := target.Validators() @@ -67,31 +67,31 @@ func TestReasonablePerformance(t *testing.T) { validators[i].EffectiveBalance += 1000000000 // 1 ETH change } _ = target.SetValidators(validators) - + // Should complete quickly start := time.Now() diff, err := Diff(source, target) duration := time.Since(start) - + require.NoError(t, err) require.Equal(t, true, duration < time.Second, "Diff creation took too long: %v", duration) require.Equal(t, true, len(diff.StateDiff) > 0, "Should have state diff") }) - + t.Run("realistic_diff_application", func(t *testing.T) { // Test applying diffs to large states source, _ := util.DeterministicGenesisStateElectra(t, 500) target := source.Copy() _ = target.SetSlot(source.Slot() + 1) - + // Create and apply diff diff, err := Diff(source, target) require.NoError(t, err) - + start := time.Now() result, err := ApplyDiff(t.Context(), source, diff) duration := time.Since(start) - + require.NoError(t, err) require.Equal(t, target.Slot(), result.Slot()) require.Equal(t, true, duration < time.Second, "Diff application took too long: %v", duration) @@ -103,30 +103,30 @@ func TestStateTransitionValidation(t *testing.T) { t.Run("validator_slashing_scenario", func(t *testing.T) { source, _ := util.DeterministicGenesisStateElectra(t, 10) target := source.Copy() - + // Simulate validator slashing (realistic scenario) validators := target.Validators() validators[0].Slashed = true validators[0].EffectiveBalance = 0 // Slashed validator loses balance _ = target.SetValidators(validators) - + // This should work fine diff, err := Diff(source, target) require.NoError(t, err) - + result, err := ApplyDiff(t.Context(), source, diff) require.NoError(t, err) require.Equal(t, true, result.Validators()[0].Slashed) require.Equal(t, uint64(0), result.Validators()[0].EffectiveBalance) }) - + t.Run("epoch_transition_scenario", func(t *testing.T) { source, _ := util.DeterministicGenesisStateElectra(t, 64) target := source.Copy() - + // Simulate epoch transition with multiple changes _ = target.SetSlot(source.Slot() + 32) // One epoch - + // Some validators get rewards, others get penalties balances := target.Balances() for i := 0; i < len(balances); i++ { @@ -139,31 +139,31 @@ func TestStateTransitionValidation(t *testing.T) { } } _ = target.SetBalances(balances) - + // This should work smoothly diff, err := Diff(source, target) require.NoError(t, err) - + result, err := ApplyDiff(t.Context(), source, diff) require.NoError(t, err) require.Equal(t, target.Slot(), result.Slot()) }) - + t.Run("consistent_state_root", func(t *testing.T) { // Test that diffs preserve state consistency source, _ := util.DeterministicGenesisStateElectra(t, 32) target := source.Copy() - + // Make minimal changes _ = target.SetSlot(source.Slot() + 1) - + // Diff and apply should be consistent diff, err := Diff(source, target) require.NoError(t, err) - + result, err := ApplyDiff(t.Context(), source, diff) require.NoError(t, err) - + // Result should match target require.Equal(t, target.Slot(), result.Slot()) require.Equal(t, len(target.Validators()), len(result.Validators())) @@ -177,73 +177,73 @@ func TestSerializationRoundTrip(t *testing.T) { // Test that serialization and deserialization are consistent source, _ := util.DeterministicGenesisStateElectra(t, 16) target := source.Copy() - + // Make changes _ = target.SetSlot(source.Slot() + 5) validators := target.Validators() validators[0].EffectiveBalance += 1000000000 _ = target.SetValidators(validators) - + // Create diff diff1, err := Diff(source, target) require.NoError(t, err) - + // Deserialize and re-serialize hdiff, err := newHdiff(diff1) require.NoError(t, err) - + diff2 := hdiff.serialize() - + // Apply both diffs - should get same result result1, err := ApplyDiff(t.Context(), source, diff1) require.NoError(t, err) - + result2, err := ApplyDiff(t.Context(), source, diff2) require.NoError(t, err) - + require.Equal(t, result1.Slot(), result2.Slot()) require.Equal(t, result1.Validators()[0].EffectiveBalance, result2.Validators()[0].EffectiveBalance) }) - + t.Run("empty_diff_handling", func(t *testing.T) { // Test that empty diffs are handled correctly source, _ := util.DeterministicGenesisStateElectra(t, 8) target := source.Copy() // No changes - + // Should create minimal diff diff, err := Diff(source, target) require.NoError(t, err) - + // Apply should work and return equivalent state result, err := ApplyDiff(t.Context(), source, diff) require.NoError(t, err) - + require.Equal(t, source.Slot(), result.Slot()) require.Equal(t, len(source.Validators()), len(result.Validators())) }) - + t.Run("compression_efficiency", func(t *testing.T) { // Test that compression is working effectively source, _ := util.DeterministicGenesisStateElectra(t, 100) target := source.Copy() - + // Make small changes _ = target.SetSlot(source.Slot() + 1) validators := target.Validators() validators[0].EffectiveBalance += 1000000000 _ = target.SetValidators(validators) - + // Create diff diff, err := Diff(source, target) require.NoError(t, err) - + // Get full state size fullStateSSZ, err := target.MarshalSSZ() require.NoError(t, err) - + // Diff should be much smaller than full state diffSize := len(diff.StateDiff) + len(diff.ValidatorDiffs) + len(diff.BalancesDiff) - require.Equal(t, true, diffSize < len(fullStateSSZ)/2, + require.Equal(t, true, diffSize < len(fullStateSSZ)/2, "Diff should be smaller than full state: diff=%d, full=%d", diffSize, len(fullStateSSZ)) }) } @@ -254,7 +254,7 @@ func TestKMPSecurity(t *testing.T) { // Test with nil pointers in the pattern/text pattern := []*int{nil, nil, nil} text := []*int{nil, nil, nil, nil, nil} - + equals := func(a, b *int) bool { if a == nil && b == nil { return true @@ -264,29 +264,29 @@ func TestKMPSecurity(t *testing.T) { } return *a == *b } - + // Should not panic - result can be any integer result := kmpIndex(len(pattern), text, equals) _ = result // Any result is valid, just ensure no panic }) - + t.Run("empty_pattern_edge_case", func(t *testing.T) { var pattern []*int text := []*int{new(int), new(int)} - + equals := func(a, b *int) bool { return a == b } - + result := kmpIndex(0, text, equals) require.Equal(t, 0, result, "Empty pattern should return 0") _ = pattern // Silence unused variable warning }) - + t.Run("realistic_pattern_performance", func(t *testing.T) { // Test with realistic sizes to ensure good performance realisticSize := 100 // More realistic for validator arrays pattern := make([]*int, realisticSize) text := make([]*int, realisticSize*2) - + // Create realistic pattern for i := range pattern { val := i % 10 // More variation @@ -296,7 +296,7 @@ func TestKMPSecurity(t *testing.T) { val := i % 10 text[i] = &val } - + equals := func(a, b *int) bool { if a == nil && b == nil { return true @@ -306,13 +306,13 @@ func TestKMPSecurity(t *testing.T) { } return *a == *b } - + start := time.Now() result := kmpIndex(len(pattern), text, equals) duration := time.Since(start) - + // Should complete quickly with realistic inputs - require.Equal(t, true, duration < time.Second, + require.Equal(t, true, duration < time.Second, "KMP took too long: %v", duration) _ = result // Any result is valid, just ensure performance is good }) @@ -324,18 +324,18 @@ func TestConcurrencySafety(t *testing.T) { source, _ := util.DeterministicGenesisStateElectra(t, 32) target := source.Copy() _ = target.SetSlot(source.Slot() + 1) - + const numGoroutines = 10 const iterations = 100 - + var wg sync.WaitGroup errors := make(chan error, numGoroutines*iterations) - + for i := 0; i < numGoroutines; i++ { wg.Add(1) go func(workerID int) { defer wg.Done() - + for j := 0; j < iterations; j++ { _, err := Diff(source, target) if err != nil { @@ -344,34 +344,34 @@ func TestConcurrencySafety(t *testing.T) { } }(i) } - + wg.Wait() close(errors) - + // Check for any errors for err := range errors { t.Error(err) } }) - + t.Run("concurrent_diff_application", func(t *testing.T) { ctx := t.Context() source, _ := util.DeterministicGenesisStateElectra(t, 16) target := source.Copy() _ = target.SetSlot(source.Slot() + 5) - + diff, err := Diff(source, target) require.NoError(t, err) - + const numGoroutines = 10 var wg sync.WaitGroup errors := make(chan error, numGoroutines) - + for i := 0; i < numGoroutines; i++ { wg.Add(1) go func(workerID int) { defer wg.Done() - + // Each goroutine needs its own copy of the source state localSource := source.Copy() _, err := ApplyDiff(ctx, localSource, diff) @@ -380,13 +380,13 @@ func TestConcurrencySafety(t *testing.T) { } }(i) } - + wg.Wait() close(errors) - + // Check for any errors for err := range errors { t.Error(err) } }) -} \ No newline at end of file +} diff --git a/consensus-types/hdiff/state_diff.go b/consensus-types/hdiff/state_diff.go index eb009808a3..3d88541576 100644 --- a/consensus-types/hdiff/state_diff.go +++ b/consensus-types/hdiff/state_diff.go @@ -7,21 +7,21 @@ import ( "slices" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/capella" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/deneb" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/fulu" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/helpers" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/capella" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/deneb" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/fulu" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/helpers" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/golang/snappy" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" diff --git a/consensus-types/hdiff/state_diff_test.go b/consensus-types/hdiff/state_diff_test.go index c556354d73..082425ee10 100644 --- a/consensus-types/hdiff/state_diff_test.go +++ b/consensus-types/hdiff/state_diff_test.go @@ -8,16 +8,16 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/pkg/errors" ) diff --git a/consensus-types/helpers/BUILD.bazel b/consensus-types/helpers/BUILD.bazel index aac519dd5a..d493a9ad55 100644 --- a/consensus-types/helpers/BUILD.bazel +++ b/consensus-types/helpers/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["comparisons.go"], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/helpers", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types/helpers", visibility = ["//visibility:public"], deps = ["//proto/prysm/v1alpha1:go_default_library"], ) diff --git a/consensus-types/helpers/comparisons.go b/consensus-types/helpers/comparisons.go index 49861b2a73..aad30a7e40 100644 --- a/consensus-types/helpers/comparisons.go +++ b/consensus-types/helpers/comparisons.go @@ -3,7 +3,7 @@ package helpers import ( "bytes" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func ForksEqual(s, t *ethpb.Fork) bool { diff --git a/consensus-types/helpers/comparisons_test.go b/consensus-types/helpers/comparisons_test.go index e4d3486fe9..0ba22aeddd 100644 --- a/consensus-types/helpers/comparisons_test.go +++ b/consensus-types/helpers/comparisons_test.go @@ -3,7 +3,7 @@ package helpers import ( "testing" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func TestForksEqual(t *testing.T) { diff --git a/consensus-types/interfaces/BUILD.bazel b/consensus-types/interfaces/BUILD.bazel index 6725f36bf1..0e40d7a816 100644 --- a/consensus-types/interfaces/BUILD.bazel +++ b/consensus-types/interfaces/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "utils.go", "validator.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/consensus-types/interfaces/beacon_block.go b/consensus-types/interfaces/beacon_block.go index 14faf3bf6a..a48a8db955 100644 --- a/consensus-types/interfaces/beacon_block.go +++ b/consensus-types/interfaces/beacon_block.go @@ -1,11 +1,11 @@ package interfaces import ( - field_params "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" + field_params "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" "google.golang.org/protobuf/proto" diff --git a/consensus-types/interfaces/error.go b/consensus-types/interfaces/error.go index 23a0645ec8..cd25a383b7 100644 --- a/consensus-types/interfaces/error.go +++ b/consensus-types/interfaces/error.go @@ -1,7 +1,7 @@ package interfaces import ( - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/consensus-types/interfaces/error_test.go b/consensus-types/interfaces/error_test.go index 5c508a1224..92601c467e 100644 --- a/consensus-types/interfaces/error_test.go +++ b/consensus-types/interfaces/error_test.go @@ -3,8 +3,8 @@ package interfaces import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" ) diff --git a/consensus-types/interfaces/light_client.go b/consensus-types/interfaces/light_client.go index 9f22b9828f..fe774fe198 100644 --- a/consensus-types/interfaces/light_client.go +++ b/consensus-types/interfaces/light_client.go @@ -1,9 +1,9 @@ package interfaces import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ssz "github.com/prysmaticlabs/fastssz" "google.golang.org/protobuf/proto" ) diff --git a/consensus-types/interfaces/utils.go b/consensus-types/interfaces/utils.go index c9bf4a61d5..f898a3873e 100644 --- a/consensus-types/interfaces/utils.go +++ b/consensus-types/interfaces/utils.go @@ -1,7 +1,7 @@ package interfaces import ( - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/consensus-types/interfaces/utils_test.go b/consensus-types/interfaces/utils_test.go index 5111f28cb7..6b8b551a15 100644 --- a/consensus-types/interfaces/utils_test.go +++ b/consensus-types/interfaces/utils_test.go @@ -3,13 +3,13 @@ package interfaces_test import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestBeaconBlockHeaderFromBlock(t *testing.T) { diff --git a/consensus-types/light-client/BUILD.bazel b/consensus-types/light-client/BUILD.bazel index 12b3a77d4c..711a56683e 100644 --- a/consensus-types/light-client/BUILD.bazel +++ b/consensus-types/light-client/BUILD.bazel @@ -10,7 +10,7 @@ go_library( "optimistic_update.go", "update.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/light-client", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types/light-client", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/consensus-types/light-client/bootstrap.go b/consensus-types/light-client/bootstrap.go index a477b89dca..1a7fd509b8 100644 --- a/consensus-types/light-client/bootstrap.go +++ b/consensus-types/light-client/bootstrap.go @@ -3,12 +3,12 @@ package light_client import ( "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - consensustypes "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + consensustypes "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/protobuf/proto" ) diff --git a/consensus-types/light-client/finality_update.go b/consensus-types/light-client/finality_update.go index b6445cacc9..96d9ee4ff7 100644 --- a/consensus-types/light-client/finality_update.go +++ b/consensus-types/light-client/finality_update.go @@ -3,12 +3,12 @@ package light_client import ( "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - consensustypes "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + consensustypes "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/protobuf/proto" ) diff --git a/consensus-types/light-client/header.go b/consensus-types/light-client/header.go index fbf71f5ad4..abfb14538c 100644 --- a/consensus-types/light-client/header.go +++ b/consensus-types/light-client/header.go @@ -3,14 +3,14 @@ package light_client import ( "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensustypes "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensustypes "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/protobuf/proto" ) diff --git a/consensus-types/light-client/helpers.go b/consensus-types/light-client/helpers.go index 0c90d5698c..b2f34c28e6 100644 --- a/consensus-types/light-client/helpers.go +++ b/consensus-types/light-client/helpers.go @@ -3,8 +3,8 @@ package light_client import ( "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ) type branchConstraint interface { diff --git a/consensus-types/light-client/optimistic_update.go b/consensus-types/light-client/optimistic_update.go index 5c723588cc..b22b60ad97 100644 --- a/consensus-types/light-client/optimistic_update.go +++ b/consensus-types/light-client/optimistic_update.go @@ -3,11 +3,11 @@ package light_client import ( "fmt" - consensustypes "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + consensustypes "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/protobuf/proto" ) diff --git a/consensus-types/light-client/update.go b/consensus-types/light-client/update.go index dfcf35bed3..f00eaf17c1 100644 --- a/consensus-types/light-client/update.go +++ b/consensus-types/light-client/update.go @@ -3,13 +3,13 @@ package light_client import ( "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - consensustypes "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + consensustypes "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "google.golang.org/protobuf/proto" ) diff --git a/consensus-types/mock/BUILD.bazel b/consensus-types/mock/BUILD.bazel index 1491b70056..de3f370e10 100644 --- a/consensus-types/mock/BUILD.bazel +++ b/consensus-types/mock/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["block.go"], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/mock", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types/mock", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/consensus-types/mock/block.go b/consensus-types/mock/block.go index 2b8404406f..da5507067f 100644 --- a/consensus-types/mock/block.go +++ b/consensus-types/mock/block.go @@ -3,13 +3,13 @@ package mock import ( - field_params "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" + field_params "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" ssz "github.com/prysmaticlabs/fastssz" "google.golang.org/protobuf/proto" ) diff --git a/consensus-types/payload-attribute/BUILD.bazel b/consensus-types/payload-attribute/BUILD.bazel index 4310ac7820..7d2f50506c 100644 --- a/consensus-types/payload-attribute/BUILD.bazel +++ b/consensus-types/payload-attribute/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "interface.go", "types.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/payload-attribute", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types/payload-attribute", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/consensus-types/payload-attribute/getters.go b/consensus-types/payload-attribute/getters.go index 0688ba794c..9752037f0d 100644 --- a/consensus-types/payload-attribute/getters.go +++ b/consensus-types/payload-attribute/getters.go @@ -1,9 +1,9 @@ package payloadattribute import ( - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) // Version returns the version of the payload attribute. diff --git a/consensus-types/payload-attribute/getters_test.go b/consensus-types/payload-attribute/getters_test.go index 961527c540..b272f65b62 100644 --- a/consensus-types/payload-attribute/getters_test.go +++ b/consensus-types/payload-attribute/getters_test.go @@ -3,9 +3,9 @@ package payloadattribute import ( "testing" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestPayloadAttributeGetters(t *testing.T) { diff --git a/consensus-types/payload-attribute/interface.go b/consensus-types/payload-attribute/interface.go index b9fe7f26a1..63ab3343ca 100644 --- a/consensus-types/payload-attribute/interface.go +++ b/consensus-types/payload-attribute/interface.go @@ -1,7 +1,7 @@ package payloadattribute import ( - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" ) type Attributer interface { diff --git a/consensus-types/payload-attribute/types.go b/consensus-types/payload-attribute/types.go index 144ffb1f7d..563f7dff8b 100644 --- a/consensus-types/payload-attribute/types.go +++ b/consensus-types/payload-attribute/types.go @@ -1,12 +1,12 @@ package payloadattribute import ( - field_params "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + field_params "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/consensus-types/primitives/BUILD.bazel b/consensus-types/primitives/BUILD.bazel index f27023c2a6..295326cc2f 100644 --- a/consensus-types/primitives/BUILD.bazel +++ b/consensus-types/primitives/BUILD.bazel @@ -18,7 +18,7 @@ go_library( "validator.go", "wei.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/primitives", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types/primitives", visibility = ["//visibility:public"], deps = [ "//math:go_default_library", diff --git a/consensus-types/primitives/epoch.go b/consensus-types/primitives/epoch.go index 0a11f3e80a..e9b1d33c76 100644 --- a/consensus-types/primitives/epoch.go +++ b/consensus-types/primitives/epoch.go @@ -3,7 +3,7 @@ package primitives import ( "fmt" - "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/math" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/consensus-types/primitives/epoch_test.go b/consensus-types/primitives/epoch_test.go index aa2f80d0e5..54f9efe0d7 100644 --- a/consensus-types/primitives/epoch_test.go +++ b/consensus-types/primitives/epoch_test.go @@ -5,9 +5,9 @@ import ( "math" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - mathprysm "github.com/OffchainLabs/prysm/v6/math" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + mathprysm "github.com/OffchainLabs/prysm/v7/math" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestMaxEpoch(t *testing.T) { diff --git a/consensus-types/primitives/slot.go b/consensus-types/primitives/slot.go index 852f2c0e6c..da147608d5 100644 --- a/consensus-types/primitives/slot.go +++ b/consensus-types/primitives/slot.go @@ -3,7 +3,7 @@ package primitives import ( "fmt" - "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/math" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/consensus-types/primitives/slot_test.go b/consensus-types/primitives/slot_test.go index 3da08c6169..d989939b66 100644 --- a/consensus-types/primitives/slot_test.go +++ b/consensus-types/primitives/slot_test.go @@ -6,8 +6,8 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - mathprysm "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + mathprysm "github.com/OffchainLabs/prysm/v7/math" ) func TestSlot_Casting(t *testing.T) { diff --git a/consensus-types/primitives/sszbytes_test.go b/consensus-types/primitives/sszbytes_test.go index 9de164e830..6fc83e32c5 100644 --- a/consensus-types/primitives/sszbytes_test.go +++ b/consensus-types/primitives/sszbytes_test.go @@ -5,7 +5,7 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) func TestSSZBytes_HashTreeRoot(t *testing.T) { diff --git a/consensus-types/primitives/sszuint64_test.go b/consensus-types/primitives/sszuint64_test.go index 18942ece5b..7c320750a8 100644 --- a/consensus-types/primitives/sszuint64_test.go +++ b/consensus-types/primitives/sszuint64_test.go @@ -5,7 +5,7 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) func TestSSZUint64_Limit(t *testing.T) { diff --git a/consensus-types/primitives/wei_test.go b/consensus-types/primitives/wei_test.go index faedc864a0..c005b46c94 100644 --- a/consensus-types/primitives/wei_test.go +++ b/consensus-types/primitives/wei_test.go @@ -5,8 +5,8 @@ import ( "math/big" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestWeiStringer(t *testing.T) { diff --git a/consensus-types/types.go b/consensus-types/types.go index 0e08cf677a..495fd9c18f 100644 --- a/consensus-types/types.go +++ b/consensus-types/types.go @@ -5,7 +5,7 @@ import ( "fmt" "sync/atomic" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/runtime/version" errors2 "github.com/pkg/errors" ) diff --git a/consensus-types/validator/BUILD.bazel b/consensus-types/validator/BUILD.bazel index 7345deda74..6e0af38eb1 100644 --- a/consensus-types/validator/BUILD.bazel +++ b/consensus-types/validator/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "custom_types.go", "types.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/validator", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types/validator", visibility = ["//visibility:public"], deps = ["//consensus-types/primitives:go_default_library"], ) diff --git a/consensus-types/validator/custom_types_test.go b/consensus-types/validator/custom_types_test.go index 29743027a7..823d81a202 100644 --- a/consensus-types/validator/custom_types_test.go +++ b/consensus-types/validator/custom_types_test.go @@ -3,7 +3,7 @@ package validator import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" "k8s.io/apimachinery/pkg/util/yaml" ) diff --git a/consensus-types/validator/types.go b/consensus-types/validator/types.go index 84633884ef..22b747497f 100644 --- a/consensus-types/validator/types.go +++ b/consensus-types/validator/types.go @@ -1,7 +1,7 @@ package validator import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) type Status int8 diff --git a/consensus-types/wrapper/BUILD.bazel b/consensus-types/wrapper/BUILD.bazel index 9c29e8ba54..52d2658813 100644 --- a/consensus-types/wrapper/BUILD.bazel +++ b/consensus-types/wrapper/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["metadata.go"], - importpath = "github.com/OffchainLabs/prysm/v6/consensus-types/wrapper", + importpath = "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper", visibility = ["//visibility:public"], deps = [ "//proto/prysm/v1alpha1:go_default_library", diff --git a/consensus-types/wrapper/metadata.go b/consensus-types/wrapper/metadata.go index 1fb14668ab..08c5fce8f6 100644 --- a/consensus-types/wrapper/metadata.go +++ b/consensus-types/wrapper/metadata.go @@ -2,9 +2,9 @@ package wrapper import ( "github.com/OffchainLabs/go-bitfield" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata" - "github.com/OffchainLabs/prysm/v6/runtime/version" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata" + "github.com/OffchainLabs/prysm/v7/runtime/version" "google.golang.org/protobuf/proto" ) diff --git a/container/doubly-linked-list/BUILD.bazel b/container/doubly-linked-list/BUILD.bazel index f76285a207..5f159360ea 100644 --- a/container/doubly-linked-list/BUILD.bazel +++ b/container/doubly-linked-list/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["list.go"], - importpath = "github.com/OffchainLabs/prysm/v6/container/doubly-linked-list", + importpath = "github.com/OffchainLabs/prysm/v7/container/doubly-linked-list", visibility = ["//visibility:public"], deps = ["@com_github_pkg_errors//:go_default_library"], ) diff --git a/container/doubly-linked-list/list_test.go b/container/doubly-linked-list/list_test.go index efb2193471..c76111acdc 100644 --- a/container/doubly-linked-list/list_test.go +++ b/container/doubly-linked-list/list_test.go @@ -3,8 +3,8 @@ package doublylinkedlist import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestAppend(t *testing.T) { diff --git a/container/leaky-bucket/BUILD.bazel b/container/leaky-bucket/BUILD.bazel index e9994c41c9..731f5eb92e 100644 --- a/container/leaky-bucket/BUILD.bazel +++ b/container/leaky-bucket/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "heap.go", "leakybucket.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/container/leaky-bucket", + importpath = "github.com/OffchainLabs/prysm/v7/container/leaky-bucket", visibility = ["//visibility:public"], ) diff --git a/container/multi-value-slice/BUILD.bazel b/container/multi-value-slice/BUILD.bazel index 61862ec517..740497f4ad 100644 --- a/container/multi-value-slice/BUILD.bazel +++ b/container/multi-value-slice/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["multi_value_slice.go"], - importpath = "github.com/OffchainLabs/prysm/v6/container/multi-value-slice", + importpath = "github.com/OffchainLabs/prysm/v7/container/multi-value-slice", visibility = ["//visibility:public"], deps = ["@com_github_pkg_errors//:go_default_library"], ) diff --git a/container/multi-value-slice/multi_value_slice_test.go b/container/multi-value-slice/multi_value_slice_test.go index c8f6a94a45..e967b12b36 100644 --- a/container/multi-value-slice/multi_value_slice_test.go +++ b/container/multi-value-slice/multi_value_slice_test.go @@ -4,8 +4,8 @@ import ( "math/rand" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) type testObject struct { diff --git a/container/queue/BUILD.bazel b/container/queue/BUILD.bazel index c62ce6e4bd..d4ab3376a8 100644 --- a/container/queue/BUILD.bazel +++ b/container/queue/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["priority_queue.go"], - importpath = "github.com/OffchainLabs/prysm/v6/container/queue", + importpath = "github.com/OffchainLabs/prysm/v7/container/queue", visibility = ["//visibility:public"], ) diff --git a/container/queue/priority_queue_test.go b/container/queue/priority_queue_test.go index 87f8b69c79..9be7eb6e70 100644 --- a/container/queue/priority_queue_test.go +++ b/container/queue/priority_queue_test.go @@ -6,7 +6,7 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) // Ensure we satisfy the heap.Interface diff --git a/container/slice/BUILD.bazel b/container/slice/BUILD.bazel index 3418c7a4a3..bb611f32c4 100644 --- a/container/slice/BUILD.bazel +++ b/container/slice/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "doc.go", "slice.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/container/slice", + importpath = "github.com/OffchainLabs/prysm/v7/container/slice", visibility = ["//visibility:public"], deps = ["//consensus-types/primitives:go_default_library"], ) diff --git a/container/slice/slice.go b/container/slice/slice.go index af8119c8f8..329885fbde 100644 --- a/container/slice/slice.go +++ b/container/slice/slice.go @@ -5,7 +5,7 @@ import ( "slices" "strings" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // SubsetUint64 returns true if the first array is diff --git a/container/slice/slice_test.go b/container/slice/slice_test.go index 033b86161a..56aae7a9b3 100644 --- a/container/slice/slice_test.go +++ b/container/slice/slice_test.go @@ -5,9 +5,9 @@ import ( "sort" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSubsetUint64(t *testing.T) { diff --git a/container/thread-safe/BUILD.bazel b/container/thread-safe/BUILD.bazel index eb8ce95238..b74a9a9ad0 100644 --- a/container/thread-safe/BUILD.bazel +++ b/container/thread-safe/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["map.go"], - importpath = "github.com/OffchainLabs/prysm/v6/container/thread-safe", + importpath = "github.com/OffchainLabs/prysm/v7/container/thread-safe", visibility = ["//visibility:public"], ) diff --git a/container/thread-safe/map_test.go b/container/thread-safe/map_test.go index fb9fde60b0..a3956318f3 100644 --- a/container/thread-safe/map_test.go +++ b/container/thread-safe/map_test.go @@ -5,7 +5,7 @@ import ( "sync" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) type safeMap struct { diff --git a/container/trie/BUILD.bazel b/container/trie/BUILD.bazel index 7876e7c318..43b5430bd8 100644 --- a/container/trie/BUILD.bazel +++ b/container/trie/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "sparse_merkle.go", "zerohashes.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/container/trie", + importpath = "github.com/OffchainLabs/prysm/v7/container/trie", visibility = ["//visibility:public"], deps = [ "//crypto/hash:go_default_library", diff --git a/container/trie/sparse_merkle.go b/container/trie/sparse_merkle.go index 0973805827..2c95f367f1 100644 --- a/container/trie/sparse_merkle.go +++ b/container/trie/sparse_merkle.go @@ -6,9 +6,9 @@ import ( "encoding/binary" "fmt" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - protodb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + protodb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/container/trie/sparse_merkle_test.go b/container/trie/sparse_merkle_test.go index 32b9fc3dda..dfc13413ff 100644 --- a/container/trie/sparse_merkle_test.go +++ b/container/trie/sparse_merkle_test.go @@ -4,15 +4,15 @@ import ( "strconv" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - contracts "github.com/OffchainLabs/prysm/v6/contracts/deposit/mock" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + contracts "github.com/OffchainLabs/prysm/v7/contracts/deposit/mock" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/accounts/abi/bind" ) diff --git a/container/trie/sparse_merkle_trie_fuzz_test.go b/container/trie/sparse_merkle_trie_fuzz_test.go index 83cea4a1ea..d6ae3ae618 100644 --- a/container/trie/sparse_merkle_trie_fuzz_test.go +++ b/container/trie/sparse_merkle_trie_fuzz_test.go @@ -3,11 +3,11 @@ package trie_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/golang/protobuf/proto" ) diff --git a/contracts/deposit/BUILD.bazel b/contracts/deposit/BUILD.bazel index 37f5d6eaa2..9990f9e718 100644 --- a/contracts/deposit/BUILD.bazel +++ b/contracts/deposit/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "helper.go", "logs.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/contracts/deposit", + importpath = "github.com/OffchainLabs/prysm/v7/contracts/deposit", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/signing:go_default_library", diff --git a/contracts/deposit/contract_test.go b/contracts/deposit/contract_test.go index 24917cafc9..9f306b31da 100644 --- a/contracts/deposit/contract_test.go +++ b/contracts/deposit/contract_test.go @@ -4,11 +4,11 @@ import ( "encoding/binary" "testing" - depositcontract "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/contracts/deposit/mock" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + depositcontract "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/contracts/deposit/mock" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" ) diff --git a/contracts/deposit/deposit.go b/contracts/deposit/deposit.go index 78ca9aca9c..adcb544d70 100644 --- a/contracts/deposit/deposit.go +++ b/contracts/deposit/deposit.go @@ -3,11 +3,11 @@ package deposit import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/contracts/deposit/deposit_test.go b/contracts/deposit/deposit_test.go index ac99f859b4..0cd5837ce4 100644 --- a/contracts/deposit/deposit_test.go +++ b/contracts/deposit/deposit_test.go @@ -3,14 +3,14 @@ package deposit_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestDepositInput_GeneratesPb(t *testing.T) { diff --git a/contracts/deposit/deposit_tree_test.go b/contracts/deposit/deposit_tree_test.go index 68f819b680..fd228b8561 100644 --- a/contracts/deposit/deposit_tree_test.go +++ b/contracts/deposit/deposit_tree_test.go @@ -4,12 +4,12 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - depositcontract "github.com/OffchainLabs/prysm/v6/contracts/deposit/mock" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + depositcontract "github.com/OffchainLabs/prysm/v7/contracts/deposit/mock" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/accounts/abi/bind" ) diff --git a/contracts/deposit/mock/BUILD.bazel b/contracts/deposit/mock/BUILD.bazel index 1add8bd9d6..b4d8da34be 100644 --- a/contracts/deposit/mock/BUILD.bazel +++ b/contracts/deposit/mock/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/contracts/deposit/mock", + importpath = "github.com/OffchainLabs/prysm/v7/contracts/deposit/mock", visibility = ["//visibility:public"], deps = [ "//contracts/deposit:go_default_library", diff --git a/contracts/deposit/mock/mock.go b/contracts/deposit/mock/mock.go index b2b5239bf1..be30968c9c 100644 --- a/contracts/deposit/mock/mock.go +++ b/contracts/deposit/mock/mock.go @@ -6,7 +6,7 @@ import ( "math/big" "strings" - "github.com/OffchainLabs/prysm/v6/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/contracts/deposit" "github.com/ethereum/go-ethereum/accounts/abi" "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/ethereum/go-ethereum/common" diff --git a/crypto/bls/BUILD.bazel b/crypto/bls/BUILD.bazel index dbcb6d3bc8..da0551d419 100644 --- a/crypto/bls/BUILD.bazel +++ b/crypto/bls/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "interface.go", "signature_batch.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/crypto/bls", + importpath = "github.com/OffchainLabs/prysm/v7/crypto/bls", visibility = ["//visibility:public"], deps = [ "//crypto/bls/blst:go_default_library", diff --git a/crypto/bls/bls.go b/crypto/bls/bls.go index a0c5a8c243..fbde5655c7 100644 --- a/crypto/bls/bls.go +++ b/crypto/bls/bls.go @@ -4,9 +4,9 @@ package bls import ( - "github.com/OffchainLabs/prysm/v6/crypto/bls/blst" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/crypto/bls/herumi" + "github.com/OffchainLabs/prysm/v7/crypto/bls/blst" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/crypto/bls/herumi" ) // Initialize herumi temporarily while we transition to blst for ethdo. diff --git a/crypto/bls/bls_test.go b/crypto/bls/bls_test.go index 0f9de4801b..f1a88e901a 100644 --- a/crypto/bls/bls_test.go +++ b/crypto/bls/bls_test.go @@ -3,8 +3,8 @@ package bls import ( "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestDisallowZeroSecretKeys(t *testing.T) { diff --git a/crypto/bls/blst/BUILD.bazel b/crypto/bls/blst/BUILD.bazel index e6de17d4c9..f2ae81155d 100644 --- a/crypto/bls/blst/BUILD.bazel +++ b/crypto/bls/blst/BUILD.bazel @@ -14,7 +14,7 @@ go_library( "signature.go", "stub.go", # keep ], - importpath = "github.com/OffchainLabs/prysm/v6/crypto/bls/blst", + importpath = "github.com/OffchainLabs/prysm/v7/crypto/bls/blst", visibility = ["//visibility:public"], deps = select({ "@io_bazel_rules_go//go/platform:android_amd64": [ diff --git a/crypto/bls/blst/bls_benchmark_test.go b/crypto/bls/blst/bls_benchmark_test.go index 7785652da7..6f63a623ea 100644 --- a/crypto/bls/blst/bls_benchmark_test.go +++ b/crypto/bls/blst/bls_benchmark_test.go @@ -5,9 +5,9 @@ package blst_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls/blst" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls/blst" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func BenchmarkSignature_Verify(b *testing.B) { diff --git a/crypto/bls/blst/init.go b/crypto/bls/blst/init.go index 874527b65c..b148079bcc 100644 --- a/crypto/bls/blst/init.go +++ b/crypto/bls/blst/init.go @@ -6,8 +6,8 @@ import ( "fmt" "runtime" - "github.com/OffchainLabs/prysm/v6/cache/nonblocking" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/cache/nonblocking" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" blst "github.com/supranational/blst/bindings/go" ) diff --git a/crypto/bls/blst/public_key.go b/crypto/bls/blst/public_key.go index ec23104a65..449f89e5fa 100644 --- a/crypto/bls/blst/public_key.go +++ b/crypto/bls/blst/public_key.go @@ -5,10 +5,10 @@ package blst import ( "fmt" - "github.com/OffchainLabs/prysm/v6/cache/nonblocking" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/cache/nonblocking" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" "github.com/pkg/errors" ) diff --git a/crypto/bls/blst/public_key_test.go b/crypto/bls/blst/public_key_test.go index e17697fc41..f1eea07a1b 100644 --- a/crypto/bls/blst/public_key_test.go +++ b/crypto/bls/blst/public_key_test.go @@ -8,10 +8,10 @@ import ( "sync" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls/blst" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls/blst" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestPublicKeyFromBytes(t *testing.T) { diff --git a/crypto/bls/blst/secret_key.go b/crypto/bls/blst/secret_key.go index 365ba5115e..1ddb1d5f3e 100644 --- a/crypto/bls/blst/secret_key.go +++ b/crypto/bls/blst/secret_key.go @@ -6,9 +6,9 @@ import ( "crypto/subtle" "fmt" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/crypto/rand" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/crypto/rand" blst "github.com/supranational/blst/bindings/go" ) diff --git a/crypto/bls/blst/secret_key_test.go b/crypto/bls/blst/secret_key_test.go index 7d5a37f75e..d618a30e63 100644 --- a/crypto/bls/blst/secret_key_test.go +++ b/crypto/bls/blst/secret_key_test.go @@ -8,11 +8,11 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls/blst" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls/blst" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestMarshalUnmarshal(t *testing.T) { diff --git a/crypto/bls/blst/signature.go b/crypto/bls/blst/signature.go index 63835af203..d6e789adab 100644 --- a/crypto/bls/blst/signature.go +++ b/crypto/bls/blst/signature.go @@ -7,9 +7,9 @@ import ( "fmt" "sync" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/crypto/rand" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/crypto/rand" "github.com/pkg/errors" blst "github.com/supranational/blst/bindings/go" ) diff --git a/crypto/bls/blst/signature_test.go b/crypto/bls/blst/signature_test.go index d4e28bf6e4..2890bf7161 100644 --- a/crypto/bls/blst/signature_test.go +++ b/crypto/bls/blst/signature_test.go @@ -7,9 +7,9 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSignVerify(t *testing.T) { diff --git a/crypto/bls/blst/stub.go b/crypto/bls/blst/stub.go index 5237024ca2..94e9009e79 100644 --- a/crypto/bls/blst/stub.go +++ b/crypto/bls/blst/stub.go @@ -3,7 +3,7 @@ package blst import ( - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" ) // This stub file exists until build issues can be resolved for libfuzz. diff --git a/crypto/bls/common/BUILD.bazel b/crypto/bls/common/BUILD.bazel index ec9f4ae4b9..2e9b9a3e1d 100644 --- a/crypto/bls/common/BUILD.bazel +++ b/crypto/bls/common/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "error.go", "interface.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/crypto/bls/common", + importpath = "github.com/OffchainLabs/prysm/v7/crypto/bls/common", visibility = ["//visibility:public"], deps = ["//config/fieldparams:go_default_library"], ) diff --git a/crypto/bls/common/constants.go b/crypto/bls/common/constants.go index fff12dbc0a..a798c8e55e 100644 --- a/crypto/bls/common/constants.go +++ b/crypto/bls/common/constants.go @@ -1,6 +1,6 @@ package common -import fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" +import fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" // ZeroSecretKey represents a zero secret key. var ZeroSecretKey = [32]byte{} diff --git a/crypto/bls/common/mock/BUILD.bazel b/crypto/bls/common/mock/BUILD.bazel index e3c9e3a877..e2b5ad57f9 100644 --- a/crypto/bls/common/mock/BUILD.bazel +++ b/crypto/bls/common/mock/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["interface_mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/crypto/bls/common/mock", + importpath = "github.com/OffchainLabs/prysm/v7/crypto/bls/common/mock", visibility = ["//visibility:public"], deps = [ "//crypto/bls/common:go_default_library", diff --git a/crypto/bls/common/mock/interface_mock.go b/crypto/bls/common/mock/interface_mock.go index bf6b89fd66..a2231af047 100644 --- a/crypto/bls/common/mock/interface_mock.go +++ b/crypto/bls/common/mock/interface_mock.go @@ -12,7 +12,7 @@ package mock import ( reflect "reflect" - common "github.com/OffchainLabs/prysm/v6/crypto/bls/common" + common "github.com/OffchainLabs/prysm/v7/crypto/bls/common" gomock "go.uber.org/mock/gomock" ) diff --git a/crypto/bls/herumi/BUILD.bazel b/crypto/bls/herumi/BUILD.bazel index 4929ce7a7a..9b00eb898a 100644 --- a/crypto/bls/herumi/BUILD.bazel +++ b/crypto/bls/herumi/BUILD.bazel @@ -5,7 +5,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["init.go"], - importpath = "github.com/OffchainLabs/prysm/v6/crypto/bls/herumi", + importpath = "github.com/OffchainLabs/prysm/v7/crypto/bls/herumi", visibility = [ "//crypto/bls:__pkg__", ], diff --git a/crypto/bls/interface.go b/crypto/bls/interface.go index 04db77671e..66fd5883e7 100644 --- a/crypto/bls/interface.go +++ b/crypto/bls/interface.go @@ -1,7 +1,7 @@ package bls import ( - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" ) // PublicKey represents a BLS public key. diff --git a/crypto/bls/signature_batch_test.go b/crypto/bls/signature_batch_test.go index 1b931f4bf2..a33c0b4a00 100644 --- a/crypto/bls/signature_batch_test.go +++ b/crypto/bls/signature_batch_test.go @@ -7,9 +7,9 @@ import ( "sort" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) const TestSignature = "test signature" diff --git a/crypto/ecdsa/BUILD.bazel b/crypto/ecdsa/BUILD.bazel index ddfd8e47b0..96ab0df21b 100644 --- a/crypto/ecdsa/BUILD.bazel +++ b/crypto/ecdsa/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["utils.go"], - importpath = "github.com/OffchainLabs/prysm/v6/crypto/ecdsa", + importpath = "github.com/OffchainLabs/prysm/v7/crypto/ecdsa", visibility = ["//visibility:public"], deps = [ "@com_github_btcsuite_btcd_btcec_v2//:go_default_library", diff --git a/crypto/ecdsa/utils_test.go b/crypto/ecdsa/utils_test.go index d3bb2651e0..4943529b5b 100644 --- a/crypto/ecdsa/utils_test.go +++ b/crypto/ecdsa/utils_test.go @@ -6,8 +6,8 @@ import ( "math/big" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/btcsuite/btcd/btcec/v2" gcrypto "github.com/ethereum/go-ethereum/crypto" "github.com/libp2p/go-libp2p/core/crypto" diff --git a/crypto/hash/BUILD.bazel b/crypto/hash/BUILD.bazel index 77dbc9d481..8c1f95e8fb 100644 --- a/crypto/hash/BUILD.bazel +++ b/crypto/hash/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["hash.go"], - importpath = "github.com/OffchainLabs/prysm/v6/crypto/hash", + importpath = "github.com/OffchainLabs/prysm/v7/crypto/hash", visibility = ["//visibility:public"], deps = [ "//encoding/bytesutil:go_default_library", diff --git a/crypto/hash/hash.go b/crypto/hash/hash.go index 38146c2def..972f006032 100644 --- a/crypto/hash/hash.go +++ b/crypto/hash/hash.go @@ -7,7 +7,7 @@ import ( "reflect" "sync" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/minio/highwayhash" "github.com/minio/sha256-simd" fastssz "github.com/prysmaticlabs/fastssz" diff --git a/crypto/hash/hash_test.go b/crypto/hash/hash_test.go index 6222a72ee1..afd15a35e8 100644 --- a/crypto/hash/hash_test.go +++ b/crypto/hash/hash_test.go @@ -4,13 +4,13 @@ import ( "encoding/hex" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - pb "github.com/OffchainLabs/prysm/v6/proto/testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + pb "github.com/OffchainLabs/prysm/v7/proto/testing" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" fuzz "github.com/google/gofuzz" ) diff --git a/crypto/hash/htr/BUILD.bazel b/crypto/hash/htr/BUILD.bazel index 76ebe9e06d..00729be43b 100644 --- a/crypto/hash/htr/BUILD.bazel +++ b/crypto/hash/htr/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["hashtree.go"], - importpath = "github.com/OffchainLabs/prysm/v6/crypto/hash/htr", + importpath = "github.com/OffchainLabs/prysm/v7/crypto/hash/htr", visibility = ["//visibility:public"], deps = ["@com_github_prysmaticlabs_gohashtree//:go_default_library"], ) diff --git a/crypto/hash/htr/hashtree_test.go b/crypto/hash/htr/hashtree_test.go index 2fabcd3e93..57f1e12ea2 100644 --- a/crypto/hash/htr/hashtree_test.go +++ b/crypto/hash/htr/hashtree_test.go @@ -4,7 +4,7 @@ import ( "sync" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func Test_VectorizedSha256(t *testing.T) { diff --git a/crypto/keystore/BUILD.bazel b/crypto/keystore/BUILD.bazel index ecd4ffdab7..53ebdeb785 100644 --- a/crypto/keystore/BUILD.bazel +++ b/crypto/keystore/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "keystore.go", "utils.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/crypto/keystore", + importpath = "github.com/OffchainLabs/prysm/v7/crypto/keystore", visibility = ["//visibility:public"], deps = [ "//crypto/bls:go_default_library", diff --git a/crypto/keystore/key.go b/crypto/keystore/key.go index 9f1b114624..e4d538ef84 100644 --- a/crypto/keystore/key.go +++ b/crypto/keystore/key.go @@ -24,8 +24,8 @@ import ( "os" "path/filepath" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/io/file" "github.com/pborman/uuid" ) diff --git a/crypto/keystore/key_test.go b/crypto/keystore/key_test.go index 4c9aa9dd64..0adc3b263e 100644 --- a/crypto/keystore/key_test.go +++ b/crypto/keystore/key_test.go @@ -6,9 +6,9 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pborman/uuid" ) diff --git a/crypto/keystore/keystore.go b/crypto/keystore/keystore.go index b6911ff6f2..e4fbf98ad1 100644 --- a/crypto/keystore/keystore.go +++ b/crypto/keystore/keystore.go @@ -32,7 +32,7 @@ import ( "path/filepath" "strings" - "github.com/OffchainLabs/prysm/v6/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls" "github.com/minio/sha256-simd" "github.com/pborman/uuid" log "github.com/sirupsen/logrus" diff --git a/crypto/keystore/keystore_test.go b/crypto/keystore/keystore_test.go index 99b941a479..3ae6db4558 100644 --- a/crypto/keystore/keystore_test.go +++ b/crypto/keystore/keystore_test.go @@ -6,10 +6,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pborman/uuid" ) diff --git a/crypto/keystore/utils.go b/crypto/keystore/utils.go index dd426f4610..6b518438b1 100644 --- a/crypto/keystore/utils.go +++ b/crypto/keystore/utils.go @@ -25,8 +25,8 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + prysmTime "github.com/OffchainLabs/prysm/v7/time" ) func aesCTRXOR(key, inText, iv []byte) ([]byte, error) { diff --git a/crypto/rand/BUILD.bazel b/crypto/rand/BUILD.bazel index 213a4583eb..dfdd0ad259 100644 --- a/crypto/rand/BUILD.bazel +++ b/crypto/rand/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["rand.go"], - importpath = "github.com/OffchainLabs/prysm/v6/crypto/rand", + importpath = "github.com/OffchainLabs/prysm/v7/crypto/rand", visibility = ["//visibility:public"], ) diff --git a/crypto/rand/rand.go b/crypto/rand/rand.go index 5d94c2232b..1b5232b0d1 100644 --- a/crypto/rand/rand.go +++ b/crypto/rand/rand.go @@ -7,7 +7,7 @@ This limits the scope of code that needs to be hardened. There are two modes, one for deterministic and another non-deterministic randomness: 1. If deterministic pseudo-random generator is enough, use: - import "github.com/OffchainLabs/prysm/v6/crypto/rand" + import "github.com/OffchainLabs/prysm/v7/crypto/rand" randGen := rand.NewDeterministicGenerator() randGen.Intn(32) // or any other func defined in math.rand API @@ -20,7 +20,7 @@ There are two modes, one for deterministic and another non-deterministic randomn 2. For cryptographically secure non-deterministic mode (CSPRNG), use: - import "github.com/OffchainLabs/prysm/v6/crypto/rand" + import "github.com/OffchainLabs/prysm/v7/crypto/rand" randGen := rand.NewGenerator() randGen.Intn(32) // or any other func defined in math.rand API diff --git a/crypto/random/BUILD.bazel b/crypto/random/BUILD.bazel index 3b4adea6c5..cd4a615a24 100644 --- a/crypto/random/BUILD.bazel +++ b/crypto/random/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["random.go"], - importpath = "github.com/OffchainLabs/prysm/v6/crypto/random", + importpath = "github.com/OffchainLabs/prysm/v7/crypto/random", visibility = ["//visibility:public"], deps = [ "@com_github_consensys_gnark_crypto//ecc/bls12-381/fr:go_default_library", diff --git a/crypto/random/random_test.go b/crypto/random/random_test.go index b0d21ddc24..3d61f3e241 100644 --- a/crypto/random/random_test.go +++ b/crypto/random/random_test.go @@ -3,8 +3,8 @@ package random import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" GoKZG "github.com/crate-crypto/go-kzg-4844" ) diff --git a/encoding/bytesutil/BUILD.bazel b/encoding/bytesutil/BUILD.bazel index f0fb698cd5..f32aaec8b0 100644 --- a/encoding/bytesutil/BUILD.bazel +++ b/encoding/bytesutil/BUILD.bazel @@ -11,7 +11,7 @@ go_library( "hex.go", "integers.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/encoding/bytesutil", + importpath = "github.com/OffchainLabs/prysm/v7/encoding/bytesutil", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/encoding/bytesutil/bits_test.go b/encoding/bytesutil/bits_test.go index d984d879ec..93ded884cc 100644 --- a/encoding/bytesutil/bits_test.go +++ b/encoding/bytesutil/bits_test.go @@ -3,9 +3,9 @@ package bytesutil_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSetBit(t *testing.T) { diff --git a/encoding/bytesutil/bytes_test.go b/encoding/bytesutil/bytes_test.go index 0472a1da4c..e086831dce 100644 --- a/encoding/bytesutil/bytes_test.go +++ b/encoding/bytesutil/bytes_test.go @@ -6,8 +6,8 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/encoding/bytesutil/eth_types.go b/encoding/bytesutil/eth_types.go index 0aac7abf36..b538b853bc 100644 --- a/encoding/bytesutil/eth_types.go +++ b/encoding/bytesutil/eth_types.go @@ -1,8 +1,8 @@ package bytesutil import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // EpochToBytesLittleEndian conversion. diff --git a/encoding/bytesutil/eth_types_test.go b/encoding/bytesutil/eth_types_test.go index 1a2fcb0353..904bb71540 100644 --- a/encoding/bytesutil/eth_types_test.go +++ b/encoding/bytesutil/eth_types_test.go @@ -3,10 +3,10 @@ package bytesutil_test import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestZeroRoot(t *testing.T) { diff --git a/encoding/bytesutil/hex.go b/encoding/bytesutil/hex.go index 7290fdc7f0..c94a8fdf59 100644 --- a/encoding/bytesutil/hex.go +++ b/encoding/bytesutil/hex.go @@ -4,7 +4,7 @@ import ( "fmt" "regexp" - "github.com/OffchainLabs/prysm/v6/container/slice" + "github.com/OffchainLabs/prysm/v7/container/slice" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/encoding/bytesutil/hex_test.go b/encoding/bytesutil/hex_test.go index 47ab44e852..0b01f50349 100644 --- a/encoding/bytesutil/hex_test.go +++ b/encoding/bytesutil/hex_test.go @@ -3,9 +3,9 @@ package bytesutil_test import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestIsHex(t *testing.T) { diff --git a/encoding/bytesutil/integers.go b/encoding/bytesutil/integers.go index 294e39d960..1e91542b4c 100644 --- a/encoding/bytesutil/integers.go +++ b/encoding/bytesutil/integers.go @@ -6,7 +6,7 @@ import ( "fmt" "math/big" - "github.com/OffchainLabs/prysm/v6/math" + "github.com/OffchainLabs/prysm/v7/math" ) // ToBytes returns integer x to bytes in little-endian format at the specified length. diff --git a/encoding/bytesutil/integers_test.go b/encoding/bytesutil/integers_test.go index bc27c3208c..968a390dd0 100644 --- a/encoding/bytesutil/integers_test.go +++ b/encoding/bytesutil/integers_test.go @@ -7,8 +7,8 @@ import ( "math/big" "testing" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestToBytes(t *testing.T) { diff --git a/encoding/ssz/BUILD.bazel b/encoding/ssz/BUILD.bazel index 82ba5c636a..75f9148ead 100644 --- a/encoding/ssz/BUILD.bazel +++ b/encoding/ssz/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "merkleize.go", "slice_root.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/encoding/ssz", + importpath = "github.com/OffchainLabs/prysm/v7/encoding/ssz", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/encoding/ssz/detect/BUILD.bazel b/encoding/ssz/detect/BUILD.bazel index cb3cecd256..3a38d7217a 100644 --- a/encoding/ssz/detect/BUILD.bazel +++ b/encoding/ssz/detect/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "configfork.go", "fieldspec.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/encoding/ssz/detect", + importpath = "github.com/OffchainLabs/prysm/v7/encoding/ssz/detect", visibility = ["//visibility:public"], deps = [ "//beacon-chain/state:go_default_library", diff --git a/encoding/ssz/detect/configfork.go b/encoding/ssz/detect/configfork.go index e74791879d..3550ffeb3c 100644 --- a/encoding/ssz/detect/configfork.go +++ b/encoding/ssz/detect/configfork.go @@ -3,17 +3,17 @@ package detect import ( "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ssz "github.com/prysmaticlabs/fastssz" ) diff --git a/encoding/ssz/detect/configfork_test.go b/encoding/ssz/detect/configfork_test.go index 16272764bb..3bf0ed962c 100644 --- a/encoding/ssz/detect/configfork_test.go +++ b/encoding/ssz/detect/configfork_test.go @@ -4,17 +4,17 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestSlotFromBlock(t *testing.T) { diff --git a/encoding/ssz/detect/fieldspec.go b/encoding/ssz/detect/fieldspec.go index dccd1bf14f..18ec6878b8 100644 --- a/encoding/ssz/detect/fieldspec.go +++ b/encoding/ssz/detect/fieldspec.go @@ -3,7 +3,7 @@ package detect import ( "encoding/binary" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/pkg/errors" ) diff --git a/encoding/ssz/detect/fieldspec_test.go b/encoding/ssz/detect/fieldspec_test.go index 1053e027dd..96e063c65a 100644 --- a/encoding/ssz/detect/fieldspec_test.go +++ b/encoding/ssz/detect/fieldspec_test.go @@ -4,7 +4,7 @@ import ( "encoding/binary" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestTypeMismatch(t *testing.T) { diff --git a/encoding/ssz/equality/BUILD.bazel b/encoding/ssz/equality/BUILD.bazel index 9011b7c0ff..c6fb042d79 100644 --- a/encoding/ssz/equality/BUILD.bazel +++ b/encoding/ssz/equality/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["deep_equal.go"], - importpath = "github.com/OffchainLabs/prysm/v6/encoding/ssz/equality", + importpath = "github.com/OffchainLabs/prysm/v7/encoding/ssz/equality", visibility = ["//visibility:public"], deps = [ "//consensus-types/primitives:go_default_library", diff --git a/encoding/ssz/equality/deep_equal.go b/encoding/ssz/equality/deep_equal.go index e51bfce69d..d7e6714510 100644 --- a/encoding/ssz/equality/deep_equal.go +++ b/encoding/ssz/equality/deep_equal.go @@ -4,7 +4,7 @@ import ( "reflect" "unsafe" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "google.golang.org/protobuf/proto" ) diff --git a/encoding/ssz/equality/deep_equal_test.go b/encoding/ssz/equality/deep_equal_test.go index 589c4174dd..31b7357954 100644 --- a/encoding/ssz/equality/deep_equal_test.go +++ b/encoding/ssz/equality/deep_equal_test.go @@ -3,9 +3,9 @@ package equality_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/equality" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/equality" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestDeepEqualBasicTypes(t *testing.T) { diff --git a/encoding/ssz/hashers_test.go b/encoding/ssz/hashers_test.go index 2e7c33ad3f..3377cd9fa7 100644 --- a/encoding/ssz/hashers_test.go +++ b/encoding/ssz/hashers_test.go @@ -3,9 +3,9 @@ package ssz_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestHash(t *testing.T) { diff --git a/encoding/ssz/helpers.go b/encoding/ssz/helpers.go index f2c6d4ca4e..c5c9553037 100644 --- a/encoding/ssz/helpers.go +++ b/encoding/ssz/helpers.go @@ -6,7 +6,7 @@ import ( "encoding/binary" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/minio/sha256-simd" "github.com/pkg/errors" ) diff --git a/encoding/ssz/helpers_test.go b/encoding/ssz/helpers_test.go index db66f467ff..d463ffb806 100644 --- a/encoding/ssz/helpers_test.go +++ b/encoding/ssz/helpers_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) const merkleizingListLimitError = "merkleizing list that is too large, over limit" diff --git a/encoding/ssz/htrutils.go b/encoding/ssz/htrutils.go index abedbac03c..9ab74ca3e1 100644 --- a/encoding/ssz/htrutils.go +++ b/encoding/ssz/htrutils.go @@ -4,10 +4,10 @@ import ( "bytes" "encoding/binary" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/encoding/ssz/htrutils_fuzz_test.go b/encoding/ssz/htrutils_fuzz_test.go index 45306593bb..d77d264428 100644 --- a/encoding/ssz/htrutils_fuzz_test.go +++ b/encoding/ssz/htrutils_fuzz_test.go @@ -5,9 +5,9 @@ package ssz_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/encoding/ssz/htrutils_test.go b/encoding/ssz/htrutils_test.go index acedb18dd3..0188fafca1 100644 --- a/encoding/ssz/htrutils_test.go +++ b/encoding/ssz/htrutils_test.go @@ -4,13 +4,13 @@ import ( "reflect" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestUint64Root(t *testing.T) { diff --git a/encoding/ssz/merkleize.go b/encoding/ssz/merkleize.go index 2fd1f238db..76feec0469 100644 --- a/encoding/ssz/merkleize.go +++ b/encoding/ssz/merkleize.go @@ -3,8 +3,8 @@ package ssz import ( "encoding/binary" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/hash/htr" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/hash/htr" "github.com/pkg/errors" "github.com/prysmaticlabs/gohashtree" ) diff --git a/encoding/ssz/merkleize_test.go b/encoding/ssz/merkleize_test.go index c908b644c7..66ab893040 100644 --- a/encoding/ssz/merkleize_test.go +++ b/encoding/ssz/merkleize_test.go @@ -4,11 +4,11 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestGetDepth(t *testing.T) { diff --git a/encoding/ssz/query/BUILD.bazel b/encoding/ssz/query/BUILD.bazel index 5963ad69c3..a30062d300 100644 --- a/encoding/ssz/query/BUILD.bazel +++ b/encoding/ssz/query/BUILD.bazel @@ -17,7 +17,7 @@ go_library( "tag_parser.go", "vector.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/encoding/ssz/query", + importpath = "github.com/OffchainLabs/prysm/v7/encoding/ssz/query", visibility = ["//visibility:public"], deps = [ "//encoding/ssz:go_default_library", diff --git a/encoding/ssz/query/generalized_index.go b/encoding/ssz/query/generalized_index.go index 9a581c8287..50d96eda6a 100644 --- a/encoding/ssz/query/generalized_index.go +++ b/encoding/ssz/query/generalized_index.go @@ -4,7 +4,7 @@ import ( "errors" "fmt" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" ) const listBaseIndex = 2 diff --git a/encoding/ssz/query/generalized_index_test.go b/encoding/ssz/query/generalized_index_test.go index 25792807d4..2066768398 100644 --- a/encoding/ssz/query/generalized_index_test.go +++ b/encoding/ssz/query/generalized_index_test.go @@ -4,9 +4,9 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" - sszquerypb "github.com/OffchainLabs/prysm/v6/proto/ssz_query/testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/query" + sszquerypb "github.com/OffchainLabs/prysm/v7/proto/ssz_query/testing" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestGetIndicesFromPath_FixedNestedContainer(t *testing.T) { diff --git a/encoding/ssz/query/path_test.go b/encoding/ssz/query/path_test.go index 363aacfa44..8807d133c9 100644 --- a/encoding/ssz/query/path_test.go +++ b/encoding/ssz/query/path_test.go @@ -3,8 +3,8 @@ package query_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/query" + "github.com/OffchainLabs/prysm/v7/testing/require" ) // Helper to get pointer to uint64 diff --git a/encoding/ssz/query/query_test.go b/encoding/ssz/query/query_test.go index 3c8de53cc1..cff044eec1 100644 --- a/encoding/ssz/query/query_test.go +++ b/encoding/ssz/query/query_test.go @@ -5,10 +5,10 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/query/testutil" - sszquerypb "github.com/OffchainLabs/prysm/v6/proto/ssz_query/testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/query" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/query/testutil" + sszquerypb "github.com/OffchainLabs/prysm/v7/proto/ssz_query/testing" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSize(t *testing.T) { diff --git a/encoding/ssz/query/tag_parser_test.go b/encoding/ssz/query/tag_parser_test.go index 43bfbe8b79..1ca986ce2b 100644 --- a/encoding/ssz/query/tag_parser_test.go +++ b/encoding/ssz/query/tag_parser_test.go @@ -4,8 +4,8 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/query" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestParseSSZTag(t *testing.T) { diff --git a/encoding/ssz/query/testutil/BUILD.bazel b/encoding/ssz/query/testutil/BUILD.bazel index 8e66a56bb3..f73affac51 100644 --- a/encoding/ssz/query/testutil/BUILD.bazel +++ b/encoding/ssz/query/testutil/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "type.go", "util.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/encoding/ssz/query/testutil", + importpath = "github.com/OffchainLabs/prysm/v7/encoding/ssz/query/testutil", visibility = ["//visibility:public"], deps = [ "//encoding/ssz/query:go_default_library", diff --git a/encoding/ssz/query/testutil/runner.go b/encoding/ssz/query/testutil/runner.go index 066d1d4464..bb19c85bae 100644 --- a/encoding/ssz/query/testutil/runner.go +++ b/encoding/ssz/query/testutil/runner.go @@ -4,8 +4,8 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/query" + "github.com/OffchainLabs/prysm/v7/testing/require" ssz "github.com/prysmaticlabs/fastssz" ) diff --git a/encoding/ssz/query/testutil/type.go b/encoding/ssz/query/testutil/type.go index 6e300adc20..5f4ce727cc 100644 --- a/encoding/ssz/query/testutil/type.go +++ b/encoding/ssz/query/testutil/type.go @@ -1,6 +1,6 @@ package testutil -import "github.com/OffchainLabs/prysm/v6/encoding/ssz/query" +import "github.com/OffchainLabs/prysm/v7/encoding/ssz/query" type PathTest struct { Path string diff --git a/genesis/BUILD.bazel b/genesis/BUILD.bazel index 22e4cfe67f..5c1f396768 100644 --- a/genesis/BUILD.bazel +++ b/genesis/BUILD.bazel @@ -11,7 +11,7 @@ go_library( "storage.go", "testing.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/genesis", + importpath = "github.com/OffchainLabs/prysm/v7/genesis", visibility = ["//visibility:public"], deps = [ "//api/client:go_default_library", diff --git a/genesis/embedded.go b/genesis/embedded.go index 185491eec9..e0cda3e183 100644 --- a/genesis/embedded.go +++ b/genesis/embedded.go @@ -3,9 +3,9 @@ package genesis import ( "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/genesis/internal/embedded" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/genesis/internal/embedded" ) var embeddedGenesisData map[string]GenesisData diff --git a/genesis/embedded_test.go b/genesis/embedded_test.go index 7b7ed87243..26e5ae8d40 100644 --- a/genesis/embedded_test.go +++ b/genesis/embedded_test.go @@ -3,9 +3,9 @@ package genesis import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/genesis/internal/embedded" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/genesis/internal/embedded" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestEmbededGenesisDataMatchesMainnet(t *testing.T) { diff --git a/genesis/initialize.go b/genesis/initialize.go index c70367a964..4721375112 100644 --- a/genesis/initialize.go +++ b/genesis/initialize.go @@ -8,9 +8,9 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/genesis/initialize_test.go b/genesis/initialize_test.go index df769d7030..af4fef1aba 100644 --- a/genesis/initialize_test.go +++ b/genesis/initialize_test.go @@ -8,14 +8,14 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/genesis" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/genesis" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/genesis/internal/embedded/BUILD.bazel b/genesis/internal/embedded/BUILD.bazel index 957c7a63db..f0f5bb8039 100644 --- a/genesis/internal/embedded/BUILD.bazel +++ b/genesis/internal/embedded/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "mainnet.go", ], embedsrcs = ["mainnet.ssz.snappy"], - importpath = "github.com/OffchainLabs/prysm/v6/genesis/internal/embedded", + importpath = "github.com/OffchainLabs/prysm/v7/genesis/internal/embedded", visibility = [ "//genesis:__pkg__", ], diff --git a/genesis/internal/embedded/lookup.go b/genesis/internal/embedded/lookup.go index 975963ef8d..4231c3ceb8 100644 --- a/genesis/internal/embedded/lookup.go +++ b/genesis/internal/embedded/lookup.go @@ -5,10 +5,10 @@ import ( _ "embed" "errors" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/golang/snappy" ) diff --git a/genesis/internal/embedded/lookup_test.go b/genesis/internal/embedded/lookup_test.go index 4be626a703..9cfa7cd133 100644 --- a/genesis/internal/embedded/lookup_test.go +++ b/genesis/internal/embedded/lookup_test.go @@ -3,8 +3,8 @@ package embedded_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/genesis/internal/embedded" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/genesis/internal/embedded" ) func TestGenesisState(t *testing.T) { diff --git a/genesis/internal/embedded/mainnet.go b/genesis/internal/embedded/mainnet.go index 30d45291b4..ca855c7313 100644 --- a/genesis/internal/embedded/mainnet.go +++ b/genesis/internal/embedded/mainnet.go @@ -6,7 +6,7 @@ package embedded import ( _ "embed" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" ) var ( diff --git a/genesis/providers.go b/genesis/providers.go index b0fe1d0c1b..89728cb86b 100644 --- a/genesis/providers.go +++ b/genesis/providers.go @@ -5,10 +5,10 @@ import ( "fmt" "os" - "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/api/client/beacon" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/detect" + "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/api/client/beacon" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/detect" "github.com/pkg/errors" ) diff --git a/genesis/storage.go b/genesis/storage.go index b0c0298a99..0aa838404c 100644 --- a/genesis/storage.go +++ b/genesis/storage.go @@ -9,10 +9,10 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/detect" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/detect" + "github.com/OffchainLabs/prysm/v7/io/file" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/genesis/testing.go b/genesis/testing.go index 957642a004..6200d4e0ab 100644 --- a/genesis/testing.go +++ b/genesis/testing.go @@ -3,7 +3,7 @@ package genesis import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" ) // StoreDuringTest temporarily replaces the package level GenesisData with the provided GenesisData diff --git a/go.mod b/go.mod index 5e32370d4b..d829269912 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/OffchainLabs/prysm/v6 +module github.com/OffchainLabs/prysm/v7 go 1.25.1 diff --git a/hack/update-go-pbs.sh b/hack/update-go-pbs.sh index 8aa042415a..025a0c5af8 100755 --- a/hack/update-go-pbs.sh +++ b/hack/update-go-pbs.sh @@ -12,7 +12,7 @@ while IFS= read -d $'\0' -r file; do done < <($findutil -L "$(bazel info bazel-bin)"/proto -type f -regextype sed -regex ".*pb\.go$" -print0) arraylength=${#file_list[@]} -searchstring="OffchainLabs/prysm/v6/" +searchstring="OffchainLabs/prysm/v7/" # Copy pb.go files from bazel-bin to original folder where .proto is. for ((i = 0; i < arraylength; i++)); do diff --git a/hack/update-mockgen.sh b/hack/update-mockgen.sh index b9b68feadb..30da332735 100755 --- a/hack/update-mockgen.sh +++ b/hack/update-mockgen.sh @@ -7,7 +7,7 @@ mock_path="testing/mock" iface_mock_path="testing/validator-mock" -# github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1 +# github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1 # ------------------------------------------------------ proto_mocks_v1alpha1=( "$mock_path/beacon_service_mock.go BeaconChainClient" @@ -21,10 +21,10 @@ for ((i = 0; i < ${#proto_mocks_v1alpha1[@]}; i++)); do interfaces=${proto_mocks_v1alpha1[i]#* }; echo "generating $file for interfaces: $interfaces"; echo - GO11MODULE=on mockgen -package=mock -destination="$file" github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1 "$interfaces" + GO11MODULE=on mockgen -package=mock -destination="$file" github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1 "$interfaces" done -# github.com/OffchainLabs/prysm/v6/validator/client/iface +# github.com/OffchainLabs/prysm/v7/validator/client/iface # -------------------------------------------------------- iface_mocks=( "$iface_mock_path/chain_client_mock.go ChainClient" @@ -37,13 +37,13 @@ for ((i = 0; i < ${#iface_mocks[@]}; i++)); do file=${iface_mocks[i]% *}; interfaces=${iface_mocks[i]#* }; echo "generating $file for interfaces: $interfaces"; - GO11MODULE=on mockgen -package=validator_mock -destination="$file" github.com/OffchainLabs/prysm/v6/validator/client/iface "$interfaces" + GO11MODULE=on mockgen -package=validator_mock -destination="$file" github.com/OffchainLabs/prysm/v7/validator/client/iface "$interfaces" done goimports -w "$mock_path/." gofmt -s -w "$mock_path/." -# github.com/OffchainLabs/prysm/v6/validator/client/beacon-api +# github.com/OffchainLabs/prysm/v7/validator/client/beacon-api # ------------------------------------------------------------- beacon_api_mock_path="validator/client/beacon-api/mock" beacon_api_mocks=( @@ -64,7 +64,7 @@ done goimports -w "$beacon_api_mock_path/." gofmt -s -w "$beacon_api_mock_path/." -# github.com/OffchainLabs/prysm/v6/crypto/bls +# github.com/OffchainLabs/prysm/v7/crypto/bls # -------------------------------------------- crypto_bls_common_mock_path="crypto/bls/common/mock" crypto_bls_common_mocks=( diff --git a/io/file/BUILD.bazel b/io/file/BUILD.bazel index 4df065d1c6..b409fec08d 100644 --- a/io/file/BUILD.bazel +++ b/io/file/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "fileutil.go", "log.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/io/file", + importpath = "github.com/OffchainLabs/prysm/v7/io/file", visibility = ["//visibility:public"], deps = [ "//config/params:go_default_library", diff --git a/io/file/fileutil.go b/io/file/fileutil.go index 39f95bc682..004d162f33 100644 --- a/io/file/fileutil.go +++ b/io/file/fileutil.go @@ -12,7 +12,7 @@ import ( "sort" "strings" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/pkg/errors" ) diff --git a/io/file/fileutil_test.go b/io/file/fileutil_test.go index 07007e6ca8..caf943326b 100644 --- a/io/file/fileutil_test.go +++ b/io/file/fileutil_test.go @@ -27,10 +27,10 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestPathExpansion(t *testing.T) { diff --git a/io/logs/BUILD.bazel b/io/logs/BUILD.bazel index dd61d753d2..ac6b538156 100644 --- a/io/logs/BUILD.bazel +++ b/io/logs/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "logutil.go", "stream.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/io/logs", + importpath = "github.com/OffchainLabs/prysm/v7/io/logs", visibility = ["//visibility:public"], deps = [ "//async/event:go_default_library", diff --git a/io/logs/logutil.go b/io/logs/logutil.go index e8461dafca..7208909115 100644 --- a/io/logs/logutil.go +++ b/io/logs/logutil.go @@ -9,8 +9,8 @@ import ( "path/filepath" "strings" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" "github.com/sirupsen/logrus" ) diff --git a/io/logs/logutil_test.go b/io/logs/logutil_test.go index be4719504f..7d848c8215 100644 --- a/io/logs/logutil_test.go +++ b/io/logs/logutil_test.go @@ -5,7 +5,7 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) var urltests = []struct { diff --git a/io/logs/mock/BUILD.bazel b/io/logs/mock/BUILD.bazel index 7b3482105f..b7eb0e3e67 100644 --- a/io/logs/mock/BUILD.bazel +++ b/io/logs/mock/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["mock_stream.go"], - importpath = "github.com/OffchainLabs/prysm/v6/io/logs/mock", + importpath = "github.com/OffchainLabs/prysm/v7/io/logs/mock", visibility = ["//visibility:public"], deps = ["//async/event:go_default_library"], ) diff --git a/io/logs/mock/mock_stream.go b/io/logs/mock/mock_stream.go index 5951096345..2ec2eb3183 100644 --- a/io/logs/mock/mock_stream.go +++ b/io/logs/mock/mock_stream.go @@ -1,6 +1,6 @@ package mock -import "github.com/OffchainLabs/prysm/v6/async/event" +import "github.com/OffchainLabs/prysm/v7/async/event" type MockStreamer struct { logs [][]byte diff --git a/io/logs/stream.go b/io/logs/stream.go index 06e4b242dc..082c88fc2f 100644 --- a/io/logs/stream.go +++ b/io/logs/stream.go @@ -3,9 +3,9 @@ package logs import ( "io" - "github.com/OffchainLabs/prysm/v6/async/event" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - "github.com/OffchainLabs/prysm/v6/crypto/rand" + "github.com/OffchainLabs/prysm/v7/async/event" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + "github.com/OffchainLabs/prysm/v7/crypto/rand" lru "github.com/hashicorp/golang-lru" ) diff --git a/io/logs/stream_test.go b/io/logs/stream_test.go index c0ebc0da72..ef55beb7e9 100644 --- a/io/logs/stream_test.go +++ b/io/logs/stream_test.go @@ -3,7 +3,7 @@ package logs import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStreamServer_BackfillsMessages(t *testing.T) { diff --git a/io/prompt/BUILD.bazel b/io/prompt/BUILD.bazel index b5721bb82d..12c841cc2f 100644 --- a/io/prompt/BUILD.bazel +++ b/io/prompt/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "prompt.go", "validate.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/io/prompt", + importpath = "github.com/OffchainLabs/prysm/v7/io/prompt", visibility = ["//visibility:public"], deps = [ "//io/file:go_default_library", diff --git a/io/prompt/prompt.go b/io/prompt/prompt.go index c6395df67c..d5009e6a64 100644 --- a/io/prompt/prompt.go +++ b/io/prompt/prompt.go @@ -7,7 +7,7 @@ import ( "os" "strings" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/io/file" "github.com/logrusorgru/aurora" "github.com/pkg/errors" log "github.com/sirupsen/logrus" diff --git a/io/prompt/validate_test.go b/io/prompt/validate_test.go index 1debad3d44..a64b38f037 100644 --- a/io/prompt/validate_test.go +++ b/io/prompt/validate_test.go @@ -4,8 +4,8 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestValidatePasswordInput(t *testing.T) { diff --git a/math/BUILD.bazel b/math/BUILD.bazel index 73991c1850..129a691d04 100644 --- a/math/BUILD.bazel +++ b/math/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["math_helper.go"], - importpath = "github.com/OffchainLabs/prysm/v6/math", + importpath = "github.com/OffchainLabs/prysm/v7/math", visibility = ["//visibility:public"], deps = ["@com_github_thomaso_mirodin_intmath//u64:go_default_library"], ) diff --git a/math/math_helper_test.go b/math/math_helper_test.go index 5d7ac781f3..f90c740125 100644 --- a/math/math_helper_test.go +++ b/math/math_helper_test.go @@ -5,8 +5,8 @@ import ( stdmath "math" "testing" - "github.com/OffchainLabs/prysm/v6/math" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/math" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestIntegerSquareRoot(t *testing.T) { diff --git a/monitoring/backup/BUILD.bazel b/monitoring/backup/BUILD.bazel index 299c86e2ba..80a729e826 100644 --- a/monitoring/backup/BUILD.bazel +++ b/monitoring/backup/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["http_backup_handler.go"], - importpath = "github.com/OffchainLabs/prysm/v6/monitoring/backup", + importpath = "github.com/OffchainLabs/prysm/v7/monitoring/backup", visibility = ["//visibility:public"], deps = ["@com_github_sirupsen_logrus//:go_default_library"], ) diff --git a/monitoring/clientstats/BUILD.bazel b/monitoring/clientstats/BUILD.bazel index cd165bce5d..390f712fb1 100644 --- a/monitoring/clientstats/BUILD.bazel +++ b/monitoring/clientstats/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "types.go", "updaters.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/monitoring/clientstats", + importpath = "github.com/OffchainLabs/prysm/v7/monitoring/clientstats", visibility = ["//visibility:public"], deps = [ "//proto/prysm/v1alpha1:go_default_library", diff --git a/monitoring/clientstats/scrapers.go b/monitoring/clientstats/scrapers.go index 0519e83856..e2dde948ea 100644 --- a/monitoring/clientstats/scrapers.go +++ b/monitoring/clientstats/scrapers.go @@ -9,7 +9,7 @@ import ( "strconv" "time" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" dto "github.com/prometheus/client_model/go" "github.com/prometheus/prom2json" log "github.com/sirupsen/logrus" diff --git a/monitoring/clientstats/scrapers_test.go b/monitoring/clientstats/scrapers_test.go index 645c846ff4..061baef141 100644 --- a/monitoring/clientstats/scrapers_test.go +++ b/monitoring/clientstats/scrapers_test.go @@ -9,7 +9,7 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/sirupsen/logrus" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/monitoring/journald/BUILD.bazel b/monitoring/journald/BUILD.bazel index b98545fb84..fd5e2509c2 100644 --- a/monitoring/journald/BUILD.bazel +++ b/monitoring/journald/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "journald_linux.go", "journalhook_linux.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/monitoring/journald", + importpath = "github.com/OffchainLabs/prysm/v7/monitoring/journald", visibility = ["//visibility:public"], deps = select({ "@io_bazel_rules_go//go/platform:android": [ diff --git a/monitoring/progress/BUILD.bazel b/monitoring/progress/BUILD.bazel index b1d2fea13c..2c3168e26d 100644 --- a/monitoring/progress/BUILD.bazel +++ b/monitoring/progress/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["progress.go"], - importpath = "github.com/OffchainLabs/prysm/v6/monitoring/progress", + importpath = "github.com/OffchainLabs/prysm/v7/monitoring/progress", visibility = ["//visibility:public"], deps = [ "@com_github_k0kubun_go_ansi//:go_default_library", diff --git a/monitoring/prometheus/BUILD.bazel b/monitoring/prometheus/BUILD.bazel index 7809130de1..26c6277aab 100644 --- a/monitoring/prometheus/BUILD.bazel +++ b/monitoring/prometheus/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "service.go", "simple_server.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/monitoring/prometheus", + importpath = "github.com/OffchainLabs/prysm/v7/monitoring/prometheus", visibility = ["//visibility:public"], deps = [ "//runtime:go_default_library", diff --git a/monitoring/prometheus/logrus_collector_test.go b/monitoring/prometheus/logrus_collector_test.go index 3362bc983c..5e4e57852a 100644 --- a/monitoring/prometheus/logrus_collector_test.go +++ b/monitoring/prometheus/logrus_collector_test.go @@ -10,9 +10,9 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/monitoring/prometheus" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/monitoring/prometheus" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" log "github.com/sirupsen/logrus" ) diff --git a/monitoring/prometheus/service.go b/monitoring/prometheus/service.go index 2c683559c9..96ed2f42b5 100644 --- a/monitoring/prometheus/service.go +++ b/monitoring/prometheus/service.go @@ -12,7 +12,7 @@ import ( "runtime/pprof" "time" - "github.com/OffchainLabs/prysm/v6/runtime" + "github.com/OffchainLabs/prysm/v7/runtime" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" "github.com/sirupsen/logrus" diff --git a/monitoring/prometheus/service_test.go b/monitoring/prometheus/service_test.go index 3087ab3baa..00f85b4750 100644 --- a/monitoring/prometheus/service_test.go +++ b/monitoring/prometheus/service_test.go @@ -11,9 +11,9 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/runtime" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/runtime" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/sirupsen/logrus" ) diff --git a/monitoring/tracing/BUILD.bazel b/monitoring/tracing/BUILD.bazel index 1927b58467..a6409cd39d 100644 --- a/monitoring/tracing/BUILD.bazel +++ b/monitoring/tracing/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "recovery_interceptor_option.go", "tracer.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/monitoring/tracing", + importpath = "github.com/OffchainLabs/prysm/v7/monitoring/tracing", visibility = ["//visibility:public"], deps = [ "//monitoring/tracing/trace:go_default_library", diff --git a/monitoring/tracing/recovery_interceptor_option.go b/monitoring/tracing/recovery_interceptor_option.go index bf127224be..756440a63a 100644 --- a/monitoring/tracing/recovery_interceptor_option.go +++ b/monitoring/tracing/recovery_interceptor_option.go @@ -7,7 +7,7 @@ import ( "runtime" "runtime/debug" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/sirupsen/logrus" ) diff --git a/monitoring/tracing/trace/BUILD.bazel b/monitoring/tracing/trace/BUILD.bazel index 3226244367..dc0ec7e33a 100644 --- a/monitoring/tracing/trace/BUILD.bazel +++ b/monitoring/tracing/trace/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["span.go"], - importpath = "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace", + importpath = "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace", visibility = ["//visibility:public"], deps = [ "@io_opentelemetry_go_otel//:go_default_library", diff --git a/monitoring/tracing/tracer.go b/monitoring/tracing/tracer.go index 488544808f..4ccfee2dc7 100644 --- a/monitoring/tracing/tracer.go +++ b/monitoring/tracing/tracer.go @@ -7,8 +7,8 @@ import ( "errors" "time" - prysmTrace "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/runtime/version" + prysmTrace "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/sirupsen/logrus" "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/attribute" diff --git a/network/BUILD.bazel b/network/BUILD.bazel index f6cfeeeed9..b159e854e5 100644 --- a/network/BUILD.bazel +++ b/network/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "endpoint.go", "external_ip.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/network", + importpath = "github.com/OffchainLabs/prysm/v7/network", visibility = ["//visibility:public"], deps = [ "//network/authorization:go_default_library", diff --git a/network/auth_test.go b/network/auth_test.go index 8ec5ce8621..2b7c6327ab 100644 --- a/network/auth_test.go +++ b/network/auth_test.go @@ -7,8 +7,8 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/golang-jwt/jwt/v4" ) diff --git a/network/authorization/BUILD.bazel b/network/authorization/BUILD.bazel index b9e30c8242..f782fc720d 100644 --- a/network/authorization/BUILD.bazel +++ b/network/authorization/BUILD.bazel @@ -3,6 +3,6 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["authorization_method.go"], - importpath = "github.com/OffchainLabs/prysm/v6/network/authorization", + importpath = "github.com/OffchainLabs/prysm/v7/network/authorization", visibility = ["//visibility:public"], ) diff --git a/network/endpoint.go b/network/endpoint.go index 16e04a56ad..cc59382dc5 100644 --- a/network/endpoint.go +++ b/network/endpoint.go @@ -9,7 +9,7 @@ import ( "net/url" "strings" - "github.com/OffchainLabs/prysm/v6/network/authorization" + "github.com/OffchainLabs/prysm/v7/network/authorization" gethRPC "github.com/ethereum/go-ethereum/rpc" log "github.com/sirupsen/logrus" "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp" diff --git a/network/endpoint_test.go b/network/endpoint_test.go index 6a9d932583..698a6d1cde 100644 --- a/network/endpoint_test.go +++ b/network/endpoint_test.go @@ -3,9 +3,9 @@ package network import ( "testing" - "github.com/OffchainLabs/prysm/v6/network/authorization" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/network/authorization" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/network/external_ip_test.go b/network/external_ip_test.go index 26663d0237..aeebc99d9d 100644 --- a/network/external_ip_test.go +++ b/network/external_ip_test.go @@ -5,9 +5,9 @@ import ( "regexp" "testing" - "github.com/OffchainLabs/prysm/v6/network" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/network" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestExternalIPv4(t *testing.T) { diff --git a/network/httputil/BUILD.bazel b/network/httputil/BUILD.bazel index 70bce74783..27692da5f3 100644 --- a/network/httputil/BUILD.bazel +++ b/network/httputil/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "reader.go", "writer.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/network/httputil", + importpath = "github.com/OffchainLabs/prysm/v7/network/httputil", visibility = ["//visibility:public"], deps = [ "//api:go_default_library", diff --git a/network/httputil/reader.go b/network/httputil/reader.go index e13ef2b626..8439c358e9 100644 --- a/network/httputil/reader.go +++ b/network/httputil/reader.go @@ -6,7 +6,7 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api" + "github.com/OffchainLabs/prysm/v7/api" ) // match a number with optional decimals diff --git a/network/httputil/reader_test.go b/network/httputil/reader_test.go index 828a9bbbb9..8954e21fff 100644 --- a/network/httputil/reader_test.go +++ b/network/httputil/reader_test.go @@ -7,9 +7,9 @@ import ( "net/http/httptest" "testing" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestRespondWithSsz(t *testing.T) { diff --git a/network/httputil/writer.go b/network/httputil/writer.go index a025728da2..745b9409e2 100644 --- a/network/httputil/writer.go +++ b/network/httputil/writer.go @@ -8,7 +8,7 @@ import ( "net/http" "strconv" - "github.com/OffchainLabs/prysm/v6/api" + "github.com/OffchainLabs/prysm/v7/api" log "github.com/sirupsen/logrus" ) diff --git a/proto/dbval/BUILD.bazel b/proto/dbval/BUILD.bazel index 99a9ad6cb8..23c917a394 100644 --- a/proto/dbval/BUILD.bazel +++ b/proto/dbval/BUILD.bazel @@ -10,7 +10,7 @@ proto_library( go_proto_library( name = "dbval_go_proto", - importpath = "github.com/OffchainLabs/prysm/v6/proto/dbval", + importpath = "github.com/OffchainLabs/prysm/v7/proto/dbval", proto = ":dbval_proto", visibility = ["//visibility:public"], ) @@ -18,6 +18,6 @@ go_proto_library( go_library( name = "go_default_library", embed = [":dbval_go_proto"], - importpath = "github.com/OffchainLabs/prysm/v6/proto/dbval", + importpath = "github.com/OffchainLabs/prysm/v7/proto/dbval", visibility = ["//visibility:public"], ) diff --git a/proto/dbval/dbval.pb.go b/proto/dbval/dbval.pb.go index dfeda1c5db..ff8eafec05 100755 --- a/proto/dbval/dbval.pb.go +++ b/proto/dbval/dbval.pb.go @@ -116,7 +116,7 @@ var file_proto_dbval_dbval_proto_rawDesc = []byte{ 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x52, 0x6f, 0x6f, 0x74, 0x42, 0x34, 0x5a, 0x32, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x64, 0x62, 0x76, 0x61, 0x6c, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x64, 0x62, 0x76, 0x61, 0x6c, 0x3b, 0x64, 0x62, 0x76, 0x61, 0x6c, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } diff --git a/proto/dbval/dbval.proto b/proto/dbval/dbval.proto index 37a20b97c8..ad65eaf292 100644 --- a/proto/dbval/dbval.proto +++ b/proto/dbval/dbval.proto @@ -2,7 +2,7 @@ syntax = "proto3"; package ethereum.eth.dbval; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/dbval;dbval"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/dbval;dbval"; // BackfillStatus is a value used to keep track of the progress of the process // of backfilling blocks leading up to the origin block used to checkpoint sync diff --git a/proto/engine/v1/BUILD.bazel b/proto/engine/v1/BUILD.bazel index 9b456d1097..71058b0a4c 100644 --- a/proto/engine/v1/BUILD.bazel +++ b/proto/engine/v1/BUILD.bazel @@ -59,7 +59,7 @@ go_proto_library( compilers = [ "@com_github_prysmaticlabs_protoc_gen_go_cast//:go_cast_grpc", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/engine/v1", + importpath = "github.com/OffchainLabs/prysm/v7/proto/engine/v1", proto = ":proto", visibility = ["//visibility:public"], deps = [ @@ -86,7 +86,7 @@ go_library( embed = [ ":go_proto", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/engine/v1", + importpath = "github.com/OffchainLabs/prysm/v7/proto/engine/v1", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/proto/engine/v1/electra.pb.go b/proto/engine/v1/electra.pb.go index e11f82639e..bf80e25ff4 100755 --- a/proto/engine/v1/electra.pb.go +++ b/proto/engine/v1/electra.pb.go @@ -10,7 +10,7 @@ import ( reflect "reflect" sync "sync" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -435,7 +435,7 @@ var file_proto_engine_v1_electra_proto_rawDesc = []byte{ 0x69, 0x6e, 0x65, 0x2e, 0x76, 0x31, 0x42, 0x0c, 0x45, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x61, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2f, 0x76, 0x31, 0x3b, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x76, 0x31, 0xaa, 0x02, 0x12, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x12, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, diff --git a/proto/engine/v1/electra.proto b/proto/engine/v1/electra.proto index 6f18e2aea5..737ccb852b 100644 --- a/proto/engine/v1/electra.proto +++ b/proto/engine/v1/electra.proto @@ -19,7 +19,7 @@ import "proto/eth/ext/options.proto"; import "proto/engine/v1/execution_engine.proto"; option csharp_namespace = "Ethereum.Engine.V1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/engine/v1;enginev1"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/engine/v1;enginev1"; option java_multiple_files = true; option java_outer_classname = "ElectraProto"; option java_package = "org.ethereum.engine.v1"; diff --git a/proto/engine/v1/electra_test.go b/proto/engine/v1/electra_test.go index 281436c2e3..0d18971421 100644 --- a/proto/engine/v1/electra_test.go +++ b/proto/engine/v1/electra_test.go @@ -3,10 +3,10 @@ package enginev1_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/proto/engine/v1/engine.ssz.go b/proto/engine/v1/engine.ssz.go index 68435ca9d3..2278162d58 100644 --- a/proto/engine/v1/engine.ssz.go +++ b/proto/engine/v1/engine.ssz.go @@ -2,7 +2,7 @@ package enginev1 import ( - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ssz "github.com/prysmaticlabs/fastssz" ) @@ -3072,7 +3072,7 @@ func (w *Withdrawal) UnmarshalSSZ(buf []byte) error { w.Index = ssz.UnmarshallUint64(buf[0:8]) // Field (1) 'ValidatorIndex' - w.ValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + w.ValidatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'Address' if cap(w.Address) == 0 { diff --git a/proto/engine/v1/execution_engine.go b/proto/engine/v1/execution_engine.go index 67a37dcf6d..00b542b863 100644 --- a/proto/engine/v1/execution_engine.go +++ b/proto/engine/v1/execution_engine.go @@ -1,6 +1,6 @@ package enginev1 -import "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" +import "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" type copier[T any] interface { Copy() T diff --git a/proto/engine/v1/execution_engine.pb.go b/proto/engine/v1/execution_engine.pb.go index 6e8b71ec42..e7946d3f99 100755 --- a/proto/engine/v1/execution_engine.pb.go +++ b/proto/engine/v1/execution_engine.pb.go @@ -10,8 +10,8 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -1584,7 +1584,7 @@ func (x *ForkchoiceState) GetFinalizedBlockHash() []byte { type Withdrawal struct { state protoimpl.MessageState `protogen:"open.v1"` Index uint64 `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty"` - ValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` Address []byte `protobuf:"bytes,3,opt,name=address,proto3" json:"address,omitempty" ssz-size:"20"` Amount uint64 `protobuf:"varint,4,opt,name=amount,proto3" json:"amount,omitempty"` unknownFields protoimpl.UnknownFields @@ -1628,11 +1628,11 @@ func (x *Withdrawal) GetIndex() uint64 { return 0 } -func (x *Withdrawal) GetValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *Withdrawal) GetValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *Withdrawal) GetAddress() []byte { @@ -2295,7 +2295,7 @@ var file_proto_engine_v1_execution_engine_proto_rawDesc = []byte{ 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x20, 0x0a, @@ -2342,7 +2342,7 @@ var file_proto_engine_v1_execution_engine_proto_rawDesc = []byte{ 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x6e, 0x67, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2f, 0x76, 0x31, 0x3b, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x76, 0x31, 0xaa, 0x02, 0x12, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x12, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, diff --git a/proto/engine/v1/execution_engine.proto b/proto/engine/v1/execution_engine.proto index 3197c173d0..748b5d786a 100644 --- a/proto/engine/v1/execution_engine.proto +++ b/proto/engine/v1/execution_engine.proto @@ -18,7 +18,7 @@ package ethereum.engine.v1; import "proto/eth/ext/options.proto"; option csharp_namespace = "Ethereum.Engine.V1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/engine/v1;enginev1"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/engine/v1;enginev1"; option java_multiple_files = true; option java_outer_classname = "ExecutionEngineProto"; option java_package = "org.ethereum.engine.v1"; @@ -221,7 +221,7 @@ message Withdrawal { // Validator index for the withdrawal uint64 validator_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // The execution address receiving the funds diff --git a/proto/engine/v1/execution_engine_fuzz_test.go b/proto/engine/v1/execution_engine_fuzz_test.go index 98746c43bd..43f9bc2f99 100644 --- a/proto/engine/v1/execution_engine_fuzz_test.go +++ b/proto/engine/v1/execution_engine_fuzz_test.go @@ -4,8 +4,8 @@ import ( "fmt" "testing" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/testing/require" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/testing/require" fuzz "github.com/google/gofuzz" ) diff --git a/proto/engine/v1/json_marshal_unmarshal.go b/proto/engine/v1/json_marshal_unmarshal.go index 7fca3ea192..d89c9988ec 100644 --- a/proto/engine/v1/json_marshal_unmarshal.go +++ b/proto/engine/v1/json_marshal_unmarshal.go @@ -6,10 +6,10 @@ import ( "reflect" "strings" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/runtime/version" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" gethtypes "github.com/ethereum/go-ethereum/core/types" diff --git a/proto/engine/v1/json_marshal_unmarshal_test.go b/proto/engine/v1/json_marshal_unmarshal_test.go index 940b7dfb96..a5bee9e040 100644 --- a/proto/engine/v1/json_marshal_unmarshal_test.go +++ b/proto/engine/v1/json_marshal_unmarshal_test.go @@ -6,11 +6,11 @@ import ( "math/big" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" gethtypes "github.com/ethereum/go-ethereum/core/types" diff --git a/proto/eth/ext/BUILD.bazel b/proto/eth/ext/BUILD.bazel index 5eb8c6e421..e514b54f42 100644 --- a/proto/eth/ext/BUILD.bazel +++ b/proto/eth/ext/BUILD.bazel @@ -24,7 +24,7 @@ go_proto_library( compilers = [ "@com_github_prysmaticlabs_protoc_gen_go_cast//:go_cast", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/eth/ext", + importpath = "github.com/OffchainLabs/prysm/v7/proto/eth/ext", proto = ":proto", visibility = ["//visibility:public"], deps = [ @@ -40,7 +40,7 @@ go_proto_library( go_library( name = "go_default_library", embed = [":ext_go_proto"], - importpath = "github.com/OffchainLabs/prysm/v6/proto/eth/ext", + importpath = "github.com/OffchainLabs/prysm/v7/proto/eth/ext", visibility = ["//visibility:public"], deps = SSZ_DEPS + [ "@com_github_golang_protobuf//proto:go_default_library", @@ -53,7 +53,7 @@ go_library( go_proto_library( name = "ext_go_proto", - importpath = "github.com/OffchainLabs/prysm/v6/proto/eth/ext", + importpath = "github.com/OffchainLabs/prysm/v7/proto/eth/ext", proto = ":proto", visibility = ["//visibility:public"], ) diff --git a/proto/eth/ext/options.pb.go b/proto/eth/ext/options.pb.go index 9272fb952c..a7d8251f9d 100755 --- a/proto/eth/ext/options.pb.go +++ b/proto/eth/ext/options.pb.go @@ -95,7 +95,7 @@ var file_proto_eth_ext_options_proto_rawDesc = []byte{ 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x65, 0x78, 0x74, 0x42, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x65, 0x78, 0x74, 0xaa, 0x02, 0x10, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x65, 0x78, 0x74, 0xca, 0x02, 0x10, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x65, 0x78, diff --git a/proto/eth/ext/options.proto b/proto/eth/ext/options.proto index 510c1de834..a7297935b2 100644 --- a/proto/eth/ext/options.proto +++ b/proto/eth/ext/options.proto @@ -18,7 +18,7 @@ package ethereum.eth.ext; import "google/protobuf/descriptor.proto"; option csharp_namespace = "Ethereum.Eth.ext"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/eth/ext"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/eth/ext"; option java_multiple_files = true; option java_outer_classname = "OptionsProto"; option java_package = "org.ethereum.eth.ext"; diff --git a/proto/eth/v1/BUILD.bazel b/proto/eth/v1/BUILD.bazel index c8a1ac19b1..0e25663556 100644 --- a/proto/eth/v1/BUILD.bazel +++ b/proto/eth/v1/BUILD.bazel @@ -68,7 +68,7 @@ go_proto_library( compilers = [ "@com_github_prysmaticlabs_protoc_gen_go_cast//:go_cast_grpc", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/eth/v1", + importpath = "github.com/OffchainLabs/prysm/v7/proto/eth/v1", proto = ":proto", visibility = ["//visibility:public"], deps = [ @@ -92,7 +92,7 @@ go_library( embed = [ ":go_proto", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/eth/v1", + importpath = "github.com/OffchainLabs/prysm/v7/proto/eth/v1", visibility = ["//visibility:public"], deps = SSZ_DEPS + [ "@org_golang_google_protobuf//types/descriptorpb", diff --git a/proto/eth/v1/attestation.pb.go b/proto/eth/v1/attestation.pb.go index dd317f0428..a13a000aaf 100755 --- a/proto/eth/v1/attestation.pb.go +++ b/proto/eth/v1/attestation.pb.go @@ -11,8 +11,8 @@ import ( sync "sync" github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" _ "google.golang.org/protobuf/types/descriptorpb" @@ -87,7 +87,7 @@ func (x *Attestation) GetSignature() []byte { type AggregateAttestationAndProof struct { state protoimpl.MessageState `protogen:"open.v1"` - AggregatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=aggregator_index,json=aggregatorIndex,proto3" json:"aggregator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + AggregatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=aggregator_index,json=aggregatorIndex,proto3" json:"aggregator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` Aggregate *Attestation `protobuf:"bytes,3,opt,name=aggregate,proto3" json:"aggregate,omitempty"` SelectionProof []byte `protobuf:"bytes,2,opt,name=selection_proof,json=selectionProof,proto3" json:"selection_proof,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields @@ -124,11 +124,11 @@ func (*AggregateAttestationAndProof) Descriptor() ([]byte, []int) { return file_proto_eth_v1_attestation_proto_rawDescGZIP(), []int{1} } -func (x *AggregateAttestationAndProof) GetAggregatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *AggregateAttestationAndProof) GetAggregatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.AggregatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *AggregateAttestationAndProof) GetAggregate() *Attestation { @@ -199,8 +199,8 @@ func (x *SignedAggregateAttestationAndProof) GetSignature() []byte { type AttestationData struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - Index github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.CommitteeIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + Index github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.CommitteeIndex"` BeaconBlockRoot []byte `protobuf:"bytes,3,opt,name=beacon_block_root,json=beaconBlockRoot,proto3" json:"beacon_block_root,omitempty" ssz-size:"32"` Source *Checkpoint `protobuf:"bytes,4,opt,name=source,proto3" json:"source,omitempty"` Target *Checkpoint `protobuf:"bytes,5,opt,name=target,proto3" json:"target,omitempty"` @@ -238,18 +238,18 @@ func (*AttestationData) Descriptor() ([]byte, []int) { return file_proto_eth_v1_attestation_proto_rawDescGZIP(), []int{3} } -func (x *AttestationData) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *AttestationData) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *AttestationData) GetIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex { +func (x *AttestationData) GetIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex { if x != nil { return x.Index } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(0) } func (x *AttestationData) GetBeaconBlockRoot() []byte { @@ -275,7 +275,7 @@ func (x *AttestationData) GetTarget() *Checkpoint { type Checkpoint struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` Root []byte `protobuf:"bytes,2,opt,name=root,proto3" json:"root,omitempty" ssz-size:"32"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -311,11 +311,11 @@ func (*Checkpoint) Descriptor() ([]byte, []int) { return file_proto_eth_v1_attestation_proto_rawDescGZIP(), []int{4} } -func (x *Checkpoint) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Checkpoint) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *Checkpoint) GetRoot() []byte { @@ -354,7 +354,7 @@ var file_proto_eth_v1_attestation_proto_rawDesc = []byte{ 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x3a, 0x0a, 0x09, @@ -378,13 +378,13 @@ var file_proto_eth_v1_attestation_proto_rawDesc = []byte{ 0x61, 0x74, 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x64, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x32, 0x0a, 0x11, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x5f, 0x62, 0x6c, @@ -401,7 +401,7 @@ var file_proto_eth_v1_attestation_proto_rawDesc = []byte{ 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, @@ -410,7 +410,7 @@ var file_proto_eth_v1_attestation_proto_rawDesc = []byte{ 0x42, 0x10, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x76, 0x31, 0xaa, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, diff --git a/proto/eth/v1/attestation.proto b/proto/eth/v1/attestation.proto index f86a0fb14c..1bd9a5ef0c 100644 --- a/proto/eth/v1/attestation.proto +++ b/proto/eth/v1/attestation.proto @@ -19,7 +19,7 @@ import "google/protobuf/descriptor.proto"; import "proto/eth/ext/options.proto"; option csharp_namespace = "Ethereum.Eth.V1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/eth/v1"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/eth/v1"; option java_multiple_files = true; option java_outer_classname = "AttestationProto"; option java_package = "org.ethereum.eth.v1"; @@ -44,7 +44,7 @@ message AggregateAttestationAndProof { // The aggregator index that submitted this aggregated attestation and proof. uint64 aggregator_index = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // The aggregated attestation that was submitted. @@ -70,12 +70,12 @@ message AttestationData { // Slot of the attestation attesting for. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // The committee index that submitted this attestation. uint64 index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.CommitteeIndex" ]; // 32 byte root of the LMD GHOST block vote. @@ -96,7 +96,7 @@ message Checkpoint { // Epoch the checkpoint references. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Block root of the checkpoint references. diff --git a/proto/eth/v1/beacon_block.pb.go b/proto/eth/v1/beacon_block.pb.go index c8357bb234..b4783afa7c 100755 --- a/proto/eth/v1/beacon_block.pb.go +++ b/proto/eth/v1/beacon_block.pb.go @@ -11,8 +11,8 @@ import ( sync "sync" github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" _ "google.golang.org/protobuf/types/descriptorpb" @@ -27,8 +27,8 @@ const ( type BeaconBlock struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BeaconBlockBody `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -66,18 +66,18 @@ func (*BeaconBlock) Descriptor() ([]byte, []int) { return file_proto_eth_v1_beacon_block_proto_rawDescGZIP(), []int{0} } -func (x *BeaconBlock) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconBlock) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BeaconBlock) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconBlock) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconBlock) GetParentRoot() []byte { @@ -411,8 +411,8 @@ func (x *Deposit) GetData() *Deposit_Data { type VoluntaryExit struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` - ValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` + ValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -447,18 +447,18 @@ func (*VoluntaryExit) Descriptor() ([]byte, []int) { return file_proto_eth_v1_beacon_block_proto_rawDescGZIP(), []int{6} } -func (x *VoluntaryExit) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *VoluntaryExit) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } -func (x *VoluntaryExit) GetValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *VoluntaryExit) GetValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } type SignedVoluntaryExit struct { @@ -575,8 +575,8 @@ func (x *Eth1Data) GetBlockHash() []byte { type BeaconBlockHeader struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` BodyRoot []byte `protobuf:"bytes,5,opt,name=body_root,json=bodyRoot,proto3" json:"body_root,omitempty" ssz-size:"32"` @@ -614,18 +614,18 @@ func (*BeaconBlockHeader) Descriptor() ([]byte, []int) { return file_proto_eth_v1_beacon_block_proto_rawDescGZIP(), []int{9} } -func (x *BeaconBlockHeader) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconBlockHeader) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BeaconBlockHeader) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconBlockHeader) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconBlockHeader) GetParentRoot() []byte { @@ -897,13 +897,13 @@ var file_proto_eth_v1_beacon_block_proto_rawDesc = []byte{ 0x6b, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, @@ -1000,14 +1000,14 @@ var file_proto_eth_v1_beacon_block_proto_rawDesc = []byte{ 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x45, 0x78, 0x69, 0x74, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x77, 0x0a, 0x0f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, @@ -1031,13 +1031,13 @@ var file_proto_eth_v1_beacon_block_proto_rawDesc = []byte{ 0x65, 0x72, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, @@ -1083,7 +1083,7 @@ var file_proto_eth_v1_beacon_block_proto_rawDesc = []byte{ 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x42, 0x10, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x76, 0x31, 0xaa, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x62, diff --git a/proto/eth/v1/beacon_block.proto b/proto/eth/v1/beacon_block.proto index cfa5e14213..e7067dffcd 100644 --- a/proto/eth/v1/beacon_block.proto +++ b/proto/eth/v1/beacon_block.proto @@ -20,7 +20,7 @@ import "proto/eth/ext/options.proto"; import "proto/eth/v1/attestation.proto"; option csharp_namespace = "Ethereum.Eth.V1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/eth/v1"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/eth/v1"; option java_multiple_files = true; option java_outer_classname = "BeaconBlockProto"; option java_package = "org.ethereum.eth.v1"; @@ -32,12 +32,12 @@ message BeaconBlock { // Beacon chain slot that this block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte root of the parent block. @@ -146,13 +146,13 @@ message VoluntaryExit { // The epoch on when exit request becomes valid. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Index of the exiting validator. uint64 validator_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } @@ -188,12 +188,12 @@ message BeaconBlockHeader { // Beacon chain slot that this block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte merkle tree root of the parent ssz encoded block. diff --git a/proto/eth/v1/beacon_chain.pb.go b/proto/eth/v1/beacon_chain.pb.go index 7c33ccb06c..d11fad5acc 100755 --- a/proto/eth/v1/beacon_chain.pb.go +++ b/proto/eth/v1/beacon_chain.pb.go @@ -81,7 +81,7 @@ var file_proto_eth_v1_beacon_chain_proto_rawDesc = []byte{ 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x42, 0x10, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x76, 0x31, 0xaa, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x62, 0x06, 0x70, diff --git a/proto/eth/v1/beacon_chain.proto b/proto/eth/v1/beacon_chain.proto index dde012d243..714ea08fb3 100644 --- a/proto/eth/v1/beacon_chain.proto +++ b/proto/eth/v1/beacon_chain.proto @@ -18,7 +18,7 @@ package ethereum.eth.v1; import "google/protobuf/descriptor.proto"; option csharp_namespace = "Ethereum.Eth.V1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/eth/v1"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/eth/v1"; option java_multiple_files = true; option java_outer_classname = "BeaconChainProto"; option java_package = "org.ethereum.eth.v1"; diff --git a/proto/eth/v1/events.pb.go b/proto/eth/v1/events.pb.go index cc634710a4..a3fc1279d2 100755 --- a/proto/eth/v1/events.pb.go +++ b/proto/eth/v1/events.pb.go @@ -10,8 +10,8 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" _ "google.golang.org/protobuf/types/descriptorpb" @@ -26,7 +26,7 @@ const ( type EventHead struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Block []byte `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty" ssz-size:"32"` State []byte `protobuf:"bytes,3,opt,name=state,proto3" json:"state,omitempty" ssz-size:"32"` EpochTransition bool `protobuf:"varint,4,opt,name=epoch_transition,json=epochTransition,proto3" json:"epoch_transition,omitempty"` @@ -67,11 +67,11 @@ func (*EventHead) Descriptor() ([]byte, []int) { return file_proto_eth_v1_events_proto_rawDescGZIP(), []int{0} } -func (x *EventHead) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *EventHead) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *EventHead) GetBlock() []byte { @@ -118,7 +118,7 @@ func (x *EventHead) GetExecutionOptimistic() bool { type EventBlock struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Block []byte `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty" ssz-size:"32"` ExecutionOptimistic bool `protobuf:"varint,3,opt,name=execution_optimistic,json=executionOptimistic,proto3" json:"execution_optimistic,omitempty"` unknownFields protoimpl.UnknownFields @@ -155,11 +155,11 @@ func (*EventBlock) Descriptor() ([]byte, []int) { return file_proto_eth_v1_events_proto_rawDescGZIP(), []int{1} } -func (x *EventBlock) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *EventBlock) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *EventBlock) GetBlock() []byte { @@ -178,13 +178,13 @@ func (x *EventBlock) GetExecutionOptimistic() bool { type EventChainReorg struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Depth uint64 `protobuf:"varint,2,opt,name=depth,proto3" json:"depth,omitempty"` OldHeadBlock []byte `protobuf:"bytes,3,opt,name=old_head_block,json=oldHeadBlock,proto3" json:"old_head_block,omitempty" ssz-size:"32"` NewHeadBlock []byte `protobuf:"bytes,4,opt,name=new_head_block,json=newHeadBlock,proto3" json:"new_head_block,omitempty" ssz-size:"32"` OldHeadState []byte `protobuf:"bytes,5,opt,name=old_head_state,json=oldHeadState,proto3" json:"old_head_state,omitempty" ssz-size:"32"` NewHeadState []byte `protobuf:"bytes,6,opt,name=new_head_state,json=newHeadState,proto3" json:"new_head_state,omitempty" ssz-size:"32"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,7,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,7,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` ExecutionOptimistic bool `protobuf:"varint,8,opt,name=execution_optimistic,json=executionOptimistic,proto3" json:"execution_optimistic,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -220,11 +220,11 @@ func (*EventChainReorg) Descriptor() ([]byte, []int) { return file_proto_eth_v1_events_proto_rawDescGZIP(), []int{2} } -func (x *EventChainReorg) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *EventChainReorg) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *EventChainReorg) GetDepth() uint64 { @@ -262,11 +262,11 @@ func (x *EventChainReorg) GetNewHeadState() []byte { return nil } -func (x *EventChainReorg) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *EventChainReorg) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *EventChainReorg) GetExecutionOptimistic() bool { @@ -280,7 +280,7 @@ type EventFinalizedCheckpoint struct { state protoimpl.MessageState `protogen:"open.v1"` Block []byte `protobuf:"bytes,1,opt,name=block,proto3" json:"block,omitempty" ssz-size:"32"` State []byte `protobuf:"bytes,2,opt,name=state,proto3" json:"state,omitempty" ssz-size:"32"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` ExecutionOptimistic bool `protobuf:"varint,4,opt,name=execution_optimistic,json=executionOptimistic,proto3" json:"execution_optimistic,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -330,11 +330,11 @@ func (x *EventFinalizedCheckpoint) GetState() []byte { return nil } -func (x *EventFinalizedCheckpoint) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *EventFinalizedCheckpoint) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *EventFinalizedCheckpoint) GetExecutionOptimistic() bool { @@ -357,7 +357,7 @@ var file_proto_eth_v1_events_proto_rawDesc = []byte{ 0x45, 0x76, 0x65, 0x6e, 0x74, 0x48, 0x65, 0x61, 0x64, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x1c, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, @@ -382,7 +382,7 @@ var file_proto_eth_v1_events_proto_rawDesc = []byte{ 0x0a, 0x0a, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x1c, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, @@ -394,7 +394,7 @@ var file_proto_eth_v1_events_proto_rawDesc = []byte{ 0x74, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x52, 0x65, 0x6f, 0x72, 0x67, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x64, 0x65, 0x70, 0x74, 0x68, 0x18, 0x02, @@ -413,7 +413,7 @@ var file_proto_eth_v1_events_proto_rawDesc = []byte{ 0x61, 0x74, 0x65, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x31, 0x0a, 0x14, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6f, 0x70, @@ -428,7 +428,7 @@ var file_proto_eth_v1_events_proto_rawDesc = []byte{ 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x31, 0x0a, 0x14, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x73, 0x74, 0x69, @@ -438,7 +438,7 @@ var file_proto_eth_v1_events_proto_rawDesc = []byte{ 0x2e, 0x76, 0x31, 0x42, 0x11, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x76, 0x31, 0xaa, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, diff --git a/proto/eth/v1/events.proto b/proto/eth/v1/events.proto index 58445f3d2d..5c1634324d 100644 --- a/proto/eth/v1/events.proto +++ b/proto/eth/v1/events.proto @@ -19,7 +19,7 @@ import "google/protobuf/descriptor.proto"; import "proto/eth/ext/options.proto"; option csharp_namespace = "Ethereum.Eth.V1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/eth/v1"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/eth/v1"; option java_multiple_files = true; option java_outer_classname = "BeaconEventsProto"; option java_package = "org.ethereum.eth.v1"; @@ -29,7 +29,7 @@ message EventHead { // Slot of the new chain head. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Block root of the new chain head. @@ -55,7 +55,7 @@ message EventBlock { // The slot of the observed block. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // The root of the observed block. @@ -69,7 +69,7 @@ message EventChainReorg { // The slot of the observed reorg. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Depth of the reorg in slots. @@ -90,7 +90,7 @@ message EventChainReorg { // Epoch of the observed reorg. uint64 epoch = 7 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Information about optimistic sync. @@ -107,7 +107,7 @@ message EventFinalizedCheckpoint { // Epoch the checkpoint references. uint64 epoch = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Information about optimistic sync. diff --git a/proto/eth/v1/gateway.ssz.go b/proto/eth/v1/gateway.ssz.go index 47709006e0..1f3baadd7b 100644 --- a/proto/eth/v1/gateway.ssz.go +++ b/proto/eth/v1/gateway.ssz.go @@ -2,7 +2,7 @@ package v1 import ( - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ssz "github.com/prysmaticlabs/fastssz" ) @@ -182,7 +182,7 @@ func (a *AggregateAttestationAndProof) UnmarshalSSZ(buf []byte) error { var o1 uint64 // Field (0) 'AggregatorIndex' - a.AggregatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) + a.AggregatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) // Offset (1) 'Aggregate' if o1 = ssz.ReadOffset(buf[8:12]); o1 > size { @@ -412,10 +412,10 @@ func (a *AttestationData) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Slot' - a.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + a.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'Index' - a.Index = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(ssz.UnmarshallUint64(buf[8:16])) + a.Index = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'BeaconBlockRoot' if cap(a.BeaconBlockRoot) == 0 { @@ -515,7 +515,7 @@ func (c *Checkpoint) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Epoch' - c.Epoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[0:8])) + c.Epoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'Root' if cap(c.Root) == 0 { @@ -612,10 +612,10 @@ func (b *BeaconBlock) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -1582,10 +1582,10 @@ func (v *VoluntaryExit) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Epoch' - v.Epoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[0:8])) + v.Epoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ValidatorIndex' - v.ValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + v.ValidatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) return err } @@ -1837,10 +1837,10 @@ func (b *BeaconBlockHeader) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -2400,16 +2400,16 @@ func (v *Validator) UnmarshalSSZ(buf []byte) error { } // Field (4) 'ActivationEligibilityEpoch' - v.ActivationEligibilityEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[89:97])) + v.ActivationEligibilityEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[89:97])) // Field (5) 'ActivationEpoch' - v.ActivationEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[97:105])) + v.ActivationEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[97:105])) // Field (6) 'ExitEpoch' - v.ExitEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[105:113])) + v.ExitEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[105:113])) // Field (7) 'WithdrawableEpoch' - v.WithdrawableEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[113:121])) + v.WithdrawableEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[113:121])) return err } diff --git a/proto/eth/v1/node.pb.go b/proto/eth/v1/node.pb.go index fd0f3da3fa..6e3ea3f648 100755 --- a/proto/eth/v1/node.pb.go +++ b/proto/eth/v1/node.pb.go @@ -230,7 +230,7 @@ var file_proto_eth_v1_node_proto_rawDesc = []byte{ 0x2e, 0x76, 0x31, 0x42, 0x0f, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x4e, 0x6f, 0x64, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x76, 0x31, 0xaa, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, diff --git a/proto/eth/v1/node.proto b/proto/eth/v1/node.proto index 9910e55274..5523bdfaec 100644 --- a/proto/eth/v1/node.proto +++ b/proto/eth/v1/node.proto @@ -18,7 +18,7 @@ package ethereum.eth.v1; import "google/protobuf/descriptor.proto"; option csharp_namespace = "Ethereum.Eth.V1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/eth/v1"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/eth/v1"; option java_multiple_files = true; option java_outer_classname = "BeaconNodeProto"; option java_package = "org.ethereum.eth.v1"; diff --git a/proto/eth/v1/validator.pb.go b/proto/eth/v1/validator.pb.go index 5446711c0a..46e76acae6 100755 --- a/proto/eth/v1/validator.pb.go +++ b/proto/eth/v1/validator.pb.go @@ -10,8 +10,8 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" _ "google.golang.org/protobuf/types/descriptorpb" @@ -109,10 +109,10 @@ type Validator struct { WithdrawalCredentials []byte `protobuf:"bytes,2,opt,name=withdrawal_credentials,json=withdrawalCredentials,proto3" json:"withdrawal_credentials,omitempty" ssz-size:"32"` EffectiveBalance uint64 `protobuf:"varint,3,opt,name=effective_balance,json=effectiveBalance,proto3" json:"effective_balance,omitempty"` Slashed bool `protobuf:"varint,4,opt,name=slashed,proto3" json:"slashed,omitempty"` - ActivationEligibilityEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,5,opt,name=activation_eligibility_epoch,json=activationEligibilityEpoch,proto3" json:"activation_eligibility_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` - ActivationEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,6,opt,name=activation_epoch,json=activationEpoch,proto3" json:"activation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` - ExitEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,7,opt,name=exit_epoch,json=exitEpoch,proto3" json:"exit_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` - WithdrawableEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,8,opt,name=withdrawable_epoch,json=withdrawableEpoch,proto3" json:"withdrawable_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + ActivationEligibilityEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,5,opt,name=activation_eligibility_epoch,json=activationEligibilityEpoch,proto3" json:"activation_eligibility_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` + ActivationEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,6,opt,name=activation_epoch,json=activationEpoch,proto3" json:"activation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` + ExitEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,7,opt,name=exit_epoch,json=exitEpoch,proto3" json:"exit_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` + WithdrawableEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,8,opt,name=withdrawable_epoch,json=withdrawableEpoch,proto3" json:"withdrawable_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -175,37 +175,37 @@ func (x *Validator) GetSlashed() bool { return false } -func (x *Validator) GetActivationEligibilityEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Validator) GetActivationEligibilityEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.ActivationEligibilityEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } -func (x *Validator) GetActivationEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Validator) GetActivationEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.ActivationEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } -func (x *Validator) GetExitEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Validator) GetExitEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.ExitEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } -func (x *Validator) GetWithdrawableEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Validator) GetWithdrawableEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.WithdrawableEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } type ProduceBlockRequest struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` RandaoReveal []byte `protobuf:"bytes,2,opt,name=randao_reveal,json=randaoReveal,proto3" json:"randao_reveal,omitempty" ssz-size:"96"` Graffiti []byte `protobuf:"bytes,3,opt,name=graffiti,proto3,oneof" json:"graffiti,omitempty" ssz-size:"32"` unknownFields protoimpl.UnknownFields @@ -242,11 +242,11 @@ func (*ProduceBlockRequest) Descriptor() ([]byte, []int) { return file_proto_eth_v1_validator_proto_rawDescGZIP(), []int{1} } -func (x *ProduceBlockRequest) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ProduceBlockRequest) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *ProduceBlockRequest) GetRandaoReveal() []byte { @@ -335,7 +335,7 @@ var file_proto_eth_v1_validator_proto_rawDesc = []byte{ 0x67, 0x69, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x1a, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6c, 0x69, 0x67, 0x69, 0x62, 0x69, 0x6c, 0x69, @@ -343,21 +343,21 @@ var file_proto_eth_v1_validator_proto_rawDesc = []byte{ 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x0f, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x64, 0x0a, 0x0a, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x09, 0x65, 0x78, 0x69, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x74, 0x0a, 0x12, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x22, 0xd2, 0x01, 0x0a, 0x13, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, @@ -365,7 +365,7 @@ var file_proto_eth_v1_validator_proto_rawDesc = []byte{ 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, @@ -399,7 +399,7 @@ var file_proto_eth_v1_validator_proto_rawDesc = []byte{ 0x74, 0x68, 0x2e, 0x76, 0x31, 0x42, 0x0e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x76, 0x31, 0xaa, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, diff --git a/proto/eth/v1/validator.proto b/proto/eth/v1/validator.proto index bbd8c5984b..c0291131c1 100644 --- a/proto/eth/v1/validator.proto +++ b/proto/eth/v1/validator.proto @@ -21,7 +21,7 @@ import "proto/eth/ext/options.proto"; import "proto/eth/v1/beacon_block.proto"; option csharp_namespace = "Ethereum.Eth.V1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/eth/v1"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/eth/v1"; option java_multiple_files = true; option java_outer_classname = "ValidatorProto"; option java_package = "org.ethereum.eth.v1"; @@ -50,7 +50,7 @@ message Validator { // activated. uint64 activation_eligibility_epoch = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Epoch when the validator was activated. This field may be zero if the @@ -58,7 +58,7 @@ message Validator { // FAR_FUTURE_EPOCH if the validator has not been activated. uint64 activation_epoch = 6 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Epoch when the validator was exited. This field is FAR_FUTURE_EPOCH if @@ -68,7 +68,7 @@ message Validator { // https://github.com/ethereum/consensus-specs/blob/v0.9.2/specs/core/0_beacon-chain.md#constants uint64 exit_epoch = 7 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Epoch when the validator is eligible to withdraw their funds. This field @@ -78,7 +78,7 @@ message Validator { // https://github.com/ethereum/consensus-specs/blob/v0.9.2/specs/core/0_beacon-chain.md#constants uint64 withdrawable_epoch = 8 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; } @@ -102,7 +102,7 @@ message ProduceBlockRequest { // The slot to request a block for. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // The validators RANDAO reveal 96 byte value. diff --git a/proto/migration/BUILD.bazel b/proto/migration/BUILD.bazel index 1f934ce958..2b4708b3a4 100644 --- a/proto/migration/BUILD.bazel +++ b/proto/migration/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "enums.go", "v1alpha1_to_v1.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/migration", + importpath = "github.com/OffchainLabs/prysm/v7/proto/migration", visibility = ["//visibility:public"], deps = [ "//proto/eth/v1:go_default_library", diff --git a/proto/migration/enums.go b/proto/migration/enums.go index e0eb0f2d85..0bc13926fd 100644 --- a/proto/migration/enums.go +++ b/proto/migration/enums.go @@ -1,8 +1,8 @@ package migration import ( - ethpb "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/proto/migration/enums_test.go b/proto/migration/enums_test.go index e387211a90..f34694108a 100644 --- a/proto/migration/enums_test.go +++ b/proto/migration/enums_test.go @@ -3,8 +3,8 @@ package migration import ( "testing" - v1 "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + v1 "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func TestV1Alpha1ConnectionStateToV1(t *testing.T) { diff --git a/proto/migration/v1alpha1_to_v1.go b/proto/migration/v1alpha1_to_v1.go index 7bb4b5e77a..e2f7c961b2 100644 --- a/proto/migration/v1alpha1_to_v1.go +++ b/proto/migration/v1alpha1_to_v1.go @@ -1,8 +1,8 @@ package migration import ( - ethpbv1 "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - ethpbalpha "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpbv1 "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + ethpbalpha "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // V1ValidatorToV1Alpha1 converts a v1 validator to v1alpha1. diff --git a/proto/migration/v1alpha1_to_v1_test.go b/proto/migration/v1alpha1_to_v1_test.go index 92e681ee99..4f89141e73 100644 --- a/proto/migration/v1alpha1_to_v1_test.go +++ b/proto/migration/v1alpha1_to_v1_test.go @@ -3,10 +3,10 @@ package migration import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpbv1 "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpbv1 "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func Test_V1ValidatorToV1Alpha1(t *testing.T) { diff --git a/proto/prysm/v1alpha1/BUILD.bazel b/proto/prysm/v1alpha1/BUILD.bazel index 40a383981c..799840abd4 100644 --- a/proto/prysm/v1alpha1/BUILD.bazel +++ b/proto/prysm/v1alpha1/BUILD.bazel @@ -342,7 +342,7 @@ go_proto_library( compilers = [ "@com_github_prysmaticlabs_protoc_gen_go_cast//:go_cast_grpc", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1", + importpath = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1", proto = ":proto", visibility = ["//visibility:public"], deps = [ @@ -386,7 +386,7 @@ go_library( ":ssz_generated_phase0", # keep ], embed = [":go_proto"], - importpath = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1", + importpath = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1", visibility = ["//visibility:public"], deps = SSZ_DEPS + [ "//consensus-types/primitives:go_default_library", diff --git a/proto/prysm/v1alpha1/altair.ssz.go b/proto/prysm/v1alpha1/altair.ssz.go index a28ef3fec2..0429a0a74c 100644 --- a/proto/prysm/v1alpha1/altair.ssz.go +++ b/proto/prysm/v1alpha1/altair.ssz.go @@ -2,7 +2,7 @@ package eth import ( - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ssz "github.com/prysmaticlabs/fastssz" ) @@ -172,10 +172,10 @@ func (b *BeaconBlockAltair) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -1160,7 +1160,7 @@ func (b *BeaconStateAltair) UnmarshalSSZ(buf []byte) error { b.GenesisValidatorsRoot = append(b.GenesisValidatorsRoot, buf[8:40]...) // Field (2) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) // Field (3) 'Fork' if b.Fork == nil { @@ -1713,7 +1713,7 @@ func (s *SyncAggregatorSelectionData) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Slot' - s.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + s.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'SubcommitteeIndex' s.SubcommitteeIndex = ssz.UnmarshallUint64(buf[8:16]) @@ -2001,7 +2001,7 @@ func (l *LightClientUpdateAltair) UnmarshalSSZ(buf []byte) error { } // Field (6) 'SignatureSlot' - l.SignatureSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[25360:25368])) + l.SignatureSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[25360:25368])) return err } @@ -2176,7 +2176,7 @@ func (l *LightClientFinalityUpdateAltair) UnmarshalSSZ(buf []byte) error { } // Field (4) 'SignatureSlot' - l.SignatureSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[576:584])) + l.SignatureSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[576:584])) return err } @@ -2291,7 +2291,7 @@ func (l *LightClientOptimisticUpdateAltair) UnmarshalSSZ(buf []byte) error { } // Field (2) 'SignatureSlot' - l.SignatureSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[272:280])) + l.SignatureSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[272:280])) return err } @@ -2432,7 +2432,7 @@ func (s *SyncCommitteeMessage) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Slot' - s.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + s.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'BlockRoot' if cap(s.BlockRoot) == 0 { @@ -2441,7 +2441,7 @@ func (s *SyncCommitteeMessage) UnmarshalSSZ(buf []byte) error { s.BlockRoot = append(s.BlockRoot, buf[8:40]...) // Field (2) 'ValidatorIndex' - s.ValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[40:48])) + s.ValidatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[40:48])) // Field (3) 'Signature' if cap(s.Signature) == 0 { @@ -2613,7 +2613,7 @@ func (c *ContributionAndProof) UnmarshalSSZ(buf []byte) error { } // Field (0) 'AggregatorIndex' - c.AggregatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) + c.AggregatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'Contribution' if c.Contribution == nil { @@ -2714,7 +2714,7 @@ func (s *SyncCommitteeContribution) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Slot' - s.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + s.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'BlockRoot' if cap(s.BlockRoot) == 0 { diff --git a/proto/prysm/v1alpha1/attestation.go b/proto/prysm/v1alpha1/attestation.go index 8c185940b0..6336d9a4df 100644 --- a/proto/prysm/v1alpha1/attestation.go +++ b/proto/prysm/v1alpha1/attestation.go @@ -2,9 +2,9 @@ package eth import ( "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/runtime/version" ssz "github.com/prysmaticlabs/fastssz" "google.golang.org/protobuf/proto" ) diff --git a/proto/prysm/v1alpha1/attestation.pb.go b/proto/prysm/v1alpha1/attestation.pb.go index ea687d12f1..708bb10922 100755 --- a/proto/prysm/v1alpha1/attestation.pb.go +++ b/proto/prysm/v1alpha1/attestation.pb.go @@ -11,8 +11,8 @@ import ( sync "sync" github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -78,7 +78,7 @@ func (x *SignedAggregateAttestationAndProof) GetSignature() []byte { type AggregateAttestationAndProof struct { state protoimpl.MessageState `protogen:"open.v1"` - AggregatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=aggregator_index,json=aggregatorIndex,proto3" json:"aggregator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + AggregatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=aggregator_index,json=aggregatorIndex,proto3" json:"aggregator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` Aggregate *Attestation `protobuf:"bytes,3,opt,name=aggregate,proto3" json:"aggregate,omitempty"` SelectionProof []byte `protobuf:"bytes,2,opt,name=selection_proof,json=selectionProof,proto3" json:"selection_proof,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields @@ -115,11 +115,11 @@ func (*AggregateAttestationAndProof) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_attestation_proto_rawDescGZIP(), []int{1} } -func (x *AggregateAttestationAndProof) GetAggregatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *AggregateAttestationAndProof) GetAggregatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.AggregatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *AggregateAttestationAndProof) GetAggregate() *Attestation { @@ -198,8 +198,8 @@ func (x *Attestation) GetSignature() []byte { type AttestationData struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - CommitteeIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=committee_index,json=committeeIndex,proto3" json:"committee_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.CommitteeIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + CommitteeIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=committee_index,json=committeeIndex,proto3" json:"committee_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.CommitteeIndex"` BeaconBlockRoot []byte `protobuf:"bytes,3,opt,name=beacon_block_root,json=beaconBlockRoot,proto3" json:"beacon_block_root,omitempty" ssz-size:"32"` Source *Checkpoint `protobuf:"bytes,4,opt,name=source,proto3" json:"source,omitempty"` Target *Checkpoint `protobuf:"bytes,5,opt,name=target,proto3" json:"target,omitempty"` @@ -237,18 +237,18 @@ func (*AttestationData) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_attestation_proto_rawDescGZIP(), []int{3} } -func (x *AttestationData) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *AttestationData) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *AttestationData) GetCommitteeIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex { +func (x *AttestationData) GetCommitteeIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex { if x != nil { return x.CommitteeIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(0) } func (x *AttestationData) GetBeaconBlockRoot() []byte { @@ -274,7 +274,7 @@ func (x *AttestationData) GetTarget() *Checkpoint { type Checkpoint struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` Root []byte `protobuf:"bytes,2,opt,name=root,proto3" json:"root,omitempty" ssz-size:"32"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -310,11 +310,11 @@ func (*Checkpoint) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_attestation_proto_rawDescGZIP(), []int{4} } -func (x *Checkpoint) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Checkpoint) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *Checkpoint) GetRoot() []byte { @@ -378,7 +378,7 @@ func (x *SignedAggregateAttestationAndProofElectra) GetSignature() []byte { type AggregateAttestationAndProofElectra struct { state protoimpl.MessageState `protogen:"open.v1"` - AggregatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=aggregator_index,json=aggregatorIndex,proto3" json:"aggregator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + AggregatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=aggregator_index,json=aggregatorIndex,proto3" json:"aggregator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` Aggregate *AttestationElectra `protobuf:"bytes,2,opt,name=aggregate,proto3" json:"aggregate,omitempty"` SelectionProof []byte `protobuf:"bytes,3,opt,name=selection_proof,json=selectionProof,proto3" json:"selection_proof,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields @@ -415,11 +415,11 @@ func (*AggregateAttestationAndProofElectra) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_attestation_proto_rawDescGZIP(), []int{6} } -func (x *AggregateAttestationAndProofElectra) GetAggregatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *AggregateAttestationAndProofElectra) GetAggregatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.AggregatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *AggregateAttestationAndProofElectra) GetAggregate() *AttestationElectra { @@ -506,8 +506,8 @@ func (x *AttestationElectra) GetCommitteeBits() github_com_OffchainLabs_go_bitfi type SingleAttestation struct { state protoimpl.MessageState `protogen:"open.v1"` - CommitteeId github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex `protobuf:"varint,1,opt,name=committee_id,json=committeeId,proto3" json:"committee_id,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.CommitteeIndex"` - AttesterIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=attester_index,json=attesterIndex,proto3" json:"attester_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + CommitteeId github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex `protobuf:"varint,1,opt,name=committee_id,json=committeeId,proto3" json:"committee_id,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.CommitteeIndex"` + AttesterIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=attester_index,json=attesterIndex,proto3" json:"attester_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` Data *AttestationData `protobuf:"bytes,3,opt,name=data,proto3" json:"data,omitempty"` Signature []byte `protobuf:"bytes,4,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields @@ -544,18 +544,18 @@ func (*SingleAttestation) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_attestation_proto_rawDescGZIP(), []int{8} } -func (x *SingleAttestation) GetCommitteeId() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex { +func (x *SingleAttestation) GetCommitteeId() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex { if x != nil { return x.CommitteeId } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(0) } -func (x *SingleAttestation) GetAttesterIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *SingleAttestation) GetAttesterIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.AttesterIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *SingleAttestation) GetData() *AttestationData { @@ -596,7 +596,7 @@ var file_proto_prysm_v1alpha1_attestation_proto_rawDesc = []byte{ 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x49, @@ -625,13 +625,13 @@ var file_proto_prysm_v1alpha1_attestation_proto_rawDesc = []byte{ 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x77, 0x0a, 0x0f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, @@ -650,7 +650,7 @@ var file_proto_prysm_v1alpha1_attestation_proto_rawDesc = []byte{ 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x33, 0x32, @@ -671,7 +671,7 @@ var file_proto_prysm_v1alpha1_attestation_proto_rawDesc = []byte{ 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, @@ -707,7 +707,7 @@ var file_proto_prysm_v1alpha1_attestation_proto_rawDesc = []byte{ 0x69, 0x6f, 0x6e, 0x12, 0x71, 0x0a, 0x0c, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0b, 0x63, 0x6f, 0x6d, 0x6d, 0x69, @@ -715,7 +715,7 @@ var file_proto_prysm_v1alpha1_attestation_proto_rawDesc = []byte{ 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x3a, 0x0a, @@ -730,7 +730,7 @@ var file_proto_prysm_v1alpha1_attestation_proto_rawDesc = []byte{ 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, + 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, diff --git a/proto/prysm/v1alpha1/attestation.proto b/proto/prysm/v1alpha1/attestation.proto index b3069fd6cf..bb6ad393d7 100644 --- a/proto/prysm/v1alpha1/attestation.proto +++ b/proto/prysm/v1alpha1/attestation.proto @@ -18,7 +18,7 @@ package ethereum.eth.v1alpha1; import "proto/eth/ext/options.proto"; option csharp_namespace = "Ethereum.Eth.v1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "AttestationProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -40,7 +40,7 @@ message AggregateAttestationAndProof { // The aggregator index that submitted this aggregated attestation and proof. uint64 aggregator_index = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // The aggregated attestation that was submitted. @@ -73,13 +73,13 @@ message AttestationData { // Slot of the attestation attesting for. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // The committee index that submitted this attestation. uint64 committee_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.CommitteeIndex" ]; // 32 byte root of the LMD GHOST block vote. @@ -100,7 +100,7 @@ message Checkpoint { // Epoch the checkpoint references. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Block root of the checkpoint references. @@ -123,7 +123,7 @@ message AggregateAttestationAndProofElectra { // The aggregator index that submitted this aggregated attestation and proof. uint64 aggregator_index = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // The aggregated attestation that was submitted. @@ -156,10 +156,10 @@ message AttestationElectra { message SingleAttestation { uint64 committee_id = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.CommitteeIndex" ]; uint64 attester_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; AttestationData data = 3; bytes signature = 4 [ (ethereum.eth.ext.ssz_size) = "96" ]; diff --git a/proto/prysm/v1alpha1/attestation/BUILD.bazel b/proto/prysm/v1alpha1/attestation/BUILD.bazel index 00da0e7a62..3fe7967988 100644 --- a/proto/prysm/v1alpha1/attestation/BUILD.bazel +++ b/proto/prysm/v1alpha1/attestation/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "attestation_utils.go", "id.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation", + importpath = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/signing:go_default_library", diff --git a/proto/prysm/v1alpha1/attestation/aggregation/BUILD.bazel b/proto/prysm/v1alpha1/attestation/aggregation/BUILD.bazel index c056e85313..fd34854524 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/BUILD.bazel +++ b/proto/prysm/v1alpha1/attestation/aggregation/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "aggregation.go", "maxcover.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation", + importpath = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation", visibility = ["//visibility:public"], deps = [ "@com_github_pkg_errors//:go_default_library", diff --git a/proto/prysm/v1alpha1/attestation/aggregation/attestations/BUILD.bazel b/proto/prysm/v1alpha1/attestation/aggregation/attestations/BUILD.bazel index 60168e29a9..fa374c7f91 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/attestations/BUILD.bazel +++ b/proto/prysm/v1alpha1/attestation/aggregation/attestations/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "attestations.go", "maxcover.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations", + importpath = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/attestations", visibility = ["//visibility:public"], deps = [ "//crypto/bls:go_default_library", diff --git a/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations.go b/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations.go index f1ba6b0384..78889dbb13 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations.go @@ -1,9 +1,9 @@ package attestations import ( - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations_test.go b/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations_test.go index 0d2cd0e85a..cc5ad03403 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations_test.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations_test.go @@ -7,14 +7,14 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/equality" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" - aggtesting "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/equality" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/attestations" + aggtesting "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/testing" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/sirupsen/logrus" ) diff --git a/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover.go b/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover.go index 0b91af8c59..4b8c078cbf 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover.go @@ -4,10 +4,10 @@ import ( "sort" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover_test.go b/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover_test.go index a7d03a8b7a..13bdfd7227 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover_test.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover_test.go @@ -4,11 +4,11 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/attestations" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/attestations" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestAggregateAttestations_MaxCover_NewMaxCover(t *testing.T) { diff --git a/proto/prysm/v1alpha1/attestation/aggregation/maxcover_bench_test.go b/proto/prysm/v1alpha1/attestation/aggregation/maxcover_bench_test.go index 61dea9be3e..8d47200eb1 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/maxcover_bench_test.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/maxcover_bench_test.go @@ -5,9 +5,9 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" - aggtesting "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation" + aggtesting "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/testing" ) func BenchmarkMaxCoverProblem_MaxCover(b *testing.B) { diff --git a/proto/prysm/v1alpha1/attestation/aggregation/maxcover_test.go b/proto/prysm/v1alpha1/attestation/aggregation/maxcover_test.go index 7c385062b2..83dbfa956d 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/maxcover_test.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/maxcover_test.go @@ -6,8 +6,8 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - aggtesting "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" + aggtesting "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/testing" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestMaxCover_MaxCoverCandidates_filter(t *testing.T) { diff --git a/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/BUILD.bazel b/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/BUILD.bazel index e2143e863b..2406c5951e 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/BUILD.bazel +++ b/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "contribution.go", "naive.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution", + importpath = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution", visibility = ["//visibility:public"], deps = [ "//crypto/bls:go_default_library", diff --git a/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/contribution.go b/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/contribution.go index c93bcbbf68..cf6610ddb6 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/contribution.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/contribution.go @@ -1,8 +1,8 @@ package sync_contribution import ( - v2 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" + v2 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/naive.go b/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/naive.go index 048b4ca318..46cbdbfdd0 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/naive.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/naive.go @@ -1,9 +1,9 @@ package sync_contribution import ( - "github.com/OffchainLabs/prysm/v6/crypto/bls" - v2 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + v2 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation" ) // naiveSyncContributionAggregation aggregates naively, without any complex algorithms or optimizations. diff --git a/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/naive_test.go b/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/naive_test.go index 5c9c91af54..f1d8fe9ee2 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/naive_test.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/sync_contribution/naive_test.go @@ -6,12 +6,12 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation" - aggtesting "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation" + aggtesting "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/testing" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestAggregateAttestations_aggregate(t *testing.T) { diff --git a/proto/prysm/v1alpha1/attestation/aggregation/testing/BUILD.bazel b/proto/prysm/v1alpha1/attestation/aggregation/testing/BUILD.bazel index b5d04a5c38..99a5c700f9 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/testing/BUILD.bazel +++ b/proto/prysm/v1alpha1/attestation/aggregation/testing/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["bitlistutils.go"], - importpath = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation/aggregation/testing", + importpath = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation/aggregation/testing", visibility = ["//visibility:public"], deps = [ "//consensus-types/primitives:go_default_library", diff --git a/proto/prysm/v1alpha1/attestation/aggregation/testing/bitlistutils.go b/proto/prysm/v1alpha1/attestation/aggregation/testing/bitlistutils.go index 6593084efc..48c6392d91 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/testing/bitlistutils.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/testing/bitlistutils.go @@ -5,10 +5,10 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time" ) // BitlistWithAllBitsSet creates list of bitlists with all bits set. diff --git a/proto/prysm/v1alpha1/attestation/attestation_utils.go b/proto/prysm/v1alpha1/attestation/attestation_utils.go index 19525cef86..effec69855 100644 --- a/proto/prysm/v1alpha1/attestation/attestation_utils.go +++ b/proto/prysm/v1alpha1/attestation/attestation_utils.go @@ -11,12 +11,12 @@ import ( "sort" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/proto/prysm/v1alpha1/attestation/attestation_utils_test.go b/proto/prysm/v1alpha1/attestation/attestation_utils_test.go index d4b33bdb8a..565d9132df 100644 --- a/proto/prysm/v1alpha1/attestation/attestation_utils_test.go +++ b/proto/prysm/v1alpha1/attestation/attestation_utils_test.go @@ -4,13 +4,13 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestAttestingIndices(t *testing.T) { diff --git a/proto/prysm/v1alpha1/attestation/id.go b/proto/prysm/v1alpha1/attestation/id.go index 97a5c49757..420efdcc32 100644 --- a/proto/prysm/v1alpha1/attestation/id.go +++ b/proto/prysm/v1alpha1/attestation/id.go @@ -4,9 +4,9 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/proto/prysm/v1alpha1/attestation/id_test.go b/proto/prysm/v1alpha1/attestation/id_test.go index 3a20f3191f..0a8b4b32d7 100644 --- a/proto/prysm/v1alpha1/attestation/id_test.go +++ b/proto/prysm/v1alpha1/attestation/id_test.go @@ -3,12 +3,12 @@ package attestation_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/attestation" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/attestation" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestNewId(t *testing.T) { diff --git a/proto/prysm/v1alpha1/attestation_fuzz_test.go b/proto/prysm/v1alpha1/attestation_fuzz_test.go index fcadadba03..34449d9862 100644 --- a/proto/prysm/v1alpha1/attestation_fuzz_test.go +++ b/proto/prysm/v1alpha1/attestation_fuzz_test.go @@ -3,7 +3,7 @@ package eth_test import ( "testing" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func TestCopyAttestation_Fuzz(t *testing.T) { diff --git a/proto/prysm/v1alpha1/beacon_block.go b/proto/prysm/v1alpha1/beacon_block.go index 9363392a33..45144ed77e 100644 --- a/proto/prysm/v1alpha1/beacon_block.go +++ b/proto/prysm/v1alpha1/beacon_block.go @@ -1,8 +1,8 @@ package eth import ( - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" ) // GenericConverter defines any struct that can be converted to a generic beacon block. diff --git a/proto/prysm/v1alpha1/beacon_block.pb.go b/proto/prysm/v1alpha1/beacon_block.pb.go index 760bdb149b..95366279ea 100755 --- a/proto/prysm/v1alpha1/beacon_block.pb.go +++ b/proto/prysm/v1alpha1/beacon_block.pb.go @@ -10,9 +10,9 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -620,8 +620,8 @@ func (x *SignedBeaconBlock) GetSignature() []byte { type BeaconBlock struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BeaconBlockBody `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -659,18 +659,18 @@ func (*BeaconBlock) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_block_proto_rawDescGZIP(), []int{3} } -func (x *BeaconBlock) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconBlock) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BeaconBlock) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconBlock) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconBlock) GetParentRoot() []byte { @@ -1237,8 +1237,8 @@ func (x *SignedBeaconBlockAltair) GetSignature() []byte { type BeaconBlockAltair struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BeaconBlockBodyAltair `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -1276,18 +1276,18 @@ func (*BeaconBlockAltair) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_block_proto_rawDescGZIP(), []int{13} } -func (x *BeaconBlockAltair) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconBlockAltair) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BeaconBlockAltair) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconBlockAltair) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconBlockAltair) GetParentRoot() []byte { @@ -1473,8 +1473,8 @@ func (x *SignedBeaconBlockBellatrix) GetSignature() []byte { type BeaconBlockBellatrix struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BeaconBlockBodyBellatrix `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -1512,18 +1512,18 @@ func (*BeaconBlockBellatrix) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_block_proto_rawDescGZIP(), []int{16} } -func (x *BeaconBlockBellatrix) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconBlockBellatrix) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BeaconBlockBellatrix) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconBlockBellatrix) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconBlockBellatrix) GetParentRoot() []byte { @@ -1717,8 +1717,8 @@ func (x *SignedBlindedBeaconBlockBellatrix) GetSignature() []byte { type BlindedBeaconBlockBellatrix struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BlindedBeaconBlockBodyBellatrix `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -1756,18 +1756,18 @@ func (*BlindedBeaconBlockBellatrix) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_block_proto_rawDescGZIP(), []int{19} } -func (x *BlindedBeaconBlockBellatrix) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BlindedBeaconBlockBellatrix) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BlindedBeaconBlockBellatrix) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BlindedBeaconBlockBellatrix) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BlindedBeaconBlockBellatrix) GetParentRoot() []byte { @@ -1961,8 +1961,8 @@ func (x *SignedBeaconBlockCapella) GetSignature() []byte { type BeaconBlockCapella struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BeaconBlockBodyCapella `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -2000,18 +2000,18 @@ func (*BeaconBlockCapella) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_block_proto_rawDescGZIP(), []int{22} } -func (x *BeaconBlockCapella) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconBlockCapella) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BeaconBlockCapella) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconBlockCapella) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconBlockCapella) GetParentRoot() []byte { @@ -2213,8 +2213,8 @@ func (x *SignedBlindedBeaconBlockCapella) GetSignature() []byte { type BlindedBeaconBlockCapella struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BlindedBeaconBlockBodyCapella `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -2252,18 +2252,18 @@ func (*BlindedBeaconBlockCapella) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_block_proto_rawDescGZIP(), []int{25} } -func (x *BlindedBeaconBlockCapella) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BlindedBeaconBlockCapella) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BlindedBeaconBlockCapella) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BlindedBeaconBlockCapella) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BlindedBeaconBlockCapella) GetParentRoot() []byte { @@ -2697,8 +2697,8 @@ func (x *BeaconBlockContentsDeneb) GetBlobs() [][]byte { type BeaconBlockDeneb struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BeaconBlockBodyDeneb `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -2736,18 +2736,18 @@ func (*BeaconBlockDeneb) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_block_proto_rawDescGZIP(), []int{32} } -func (x *BeaconBlockDeneb) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconBlockDeneb) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BeaconBlockDeneb) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconBlockDeneb) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconBlockDeneb) GetParentRoot() []byte { @@ -2957,8 +2957,8 @@ func (x *SignedBlindedBeaconBlockDeneb) GetSignature() []byte { type BlindedBeaconBlockDeneb struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BlindedBeaconBlockBodyDeneb `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -2996,18 +2996,18 @@ func (*BlindedBeaconBlockDeneb) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_block_proto_rawDescGZIP(), []int{35} } -func (x *BlindedBeaconBlockDeneb) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BlindedBeaconBlockDeneb) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BlindedBeaconBlockDeneb) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BlindedBeaconBlockDeneb) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BlindedBeaconBlockDeneb) GetParentRoot() []byte { @@ -3713,8 +3713,8 @@ func (x *BeaconBlockContentsElectra) GetBlobs() [][]byte { type BeaconBlockElectra struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BeaconBlockBodyElectra `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -3752,18 +3752,18 @@ func (*BeaconBlockElectra) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_block_proto_rawDescGZIP(), []int{46} } -func (x *BeaconBlockElectra) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconBlockElectra) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BeaconBlockElectra) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconBlockElectra) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconBlockElectra) GetParentRoot() []byte { @@ -3981,8 +3981,8 @@ func (x *SignedBlindedBeaconBlockElectra) GetSignature() []byte { type BlindedBeaconBlockElectra struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BlindedBeaconBlockBodyElectra `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -4020,18 +4020,18 @@ func (*BlindedBeaconBlockElectra) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_block_proto_rawDescGZIP(), []int{49} } -func (x *BlindedBeaconBlockElectra) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BlindedBeaconBlockElectra) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BlindedBeaconBlockElectra) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BlindedBeaconBlockElectra) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BlindedBeaconBlockElectra) GetParentRoot() []byte { @@ -4421,8 +4421,8 @@ func (x *SignedBlindedBeaconBlockFulu) GetSignature() []byte { type BlindedBeaconBlockFulu struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BlindedBeaconBlockBodyElectra `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -4460,18 +4460,18 @@ func (*BlindedBeaconBlockFulu) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_block_proto_rawDescGZIP(), []int{55} } -func (x *BlindedBeaconBlockFulu) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BlindedBeaconBlockFulu) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BlindedBeaconBlockFulu) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BlindedBeaconBlockFulu) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BlindedBeaconBlockFulu) GetParentRoot() []byte { @@ -4673,13 +4673,13 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, @@ -4807,14 +4807,14 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x41, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, @@ -4882,13 +4882,13 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x6c, 0x61, 0x74, 0x72, 0x69, 0x78, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, @@ -4964,13 +4964,13 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, @@ -5046,13 +5046,13 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, @@ -5134,13 +5134,13 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x6b, 0x43, 0x61, 0x70, 0x65, 0x6c, 0x6c, 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, @@ -5265,14 +5265,14 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x44, 0x65, 0x6e, 0x65, 0x62, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, 0x0b, 0x70, @@ -5358,13 +5358,13 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, @@ -5551,13 +5551,13 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, @@ -5650,13 +5650,13 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x74, 0x72, 0x61, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, @@ -5783,13 +5783,13 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x6c, 0x75, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, @@ -5808,7 +5808,7 @@ var file_proto_prysm_v1alpha1_beacon_block_proto_rawDesc = []byte{ 0x63, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, + 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, diff --git a/proto/prysm/v1alpha1/beacon_block.proto b/proto/prysm/v1alpha1/beacon_block.proto index 82dbcdcfea..a1faa1a89d 100644 --- a/proto/prysm/v1alpha1/beacon_block.proto +++ b/proto/prysm/v1alpha1/beacon_block.proto @@ -24,7 +24,7 @@ import "proto/engine/v1/execution_engine.proto"; import "proto/engine/v1/electra.proto"; option csharp_namespace = "Ethereum.Eth.v1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "BeaconBlockProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -145,12 +145,12 @@ message BeaconBlock { // Beacon chain slot that this block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte root of the parent block. @@ -267,12 +267,12 @@ message BeaconBlockAltair { // Beacon chain slot that this block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte root of the parent block. @@ -341,12 +341,12 @@ message BeaconBlockBellatrix { // Beacon chain slot that this block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte root of the parent block. @@ -412,12 +412,12 @@ message BlindedBeaconBlockBellatrix { // Beacon chain slot that this blinded block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte root of the parent block. @@ -483,12 +483,12 @@ message BeaconBlockCapella { // Beacon chain slot that this block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte root of the parent block. @@ -557,12 +557,12 @@ message BlindedBeaconBlockCapella { // Beacon chain slot that this blinded block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte root of the parent block. @@ -667,12 +667,12 @@ message BeaconBlockDeneb { // Beacon chain slot that this block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte root of the parent block. @@ -746,12 +746,12 @@ message BlindedBeaconBlockDeneb { // Beacon chain slot that this blinded block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte root of the parent block. @@ -894,12 +894,12 @@ message BeaconBlockElectra { // Beacon chain slot that this block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte root of the parent block. @@ -979,12 +979,12 @@ message BlindedBeaconBlockElectra { // Beacon chain slot that this blinded block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte root of the parent block. @@ -1095,12 +1095,12 @@ message BlindedBeaconBlockFulu { // Beacon chain slot that this blinded block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte root of the parent block. diff --git a/proto/prysm/v1alpha1/beacon_block_fuzz_test.go b/proto/prysm/v1alpha1/beacon_block_fuzz_test.go index ac52628a24..b8aa79b1ae 100644 --- a/proto/prysm/v1alpha1/beacon_block_fuzz_test.go +++ b/proto/prysm/v1alpha1/beacon_block_fuzz_test.go @@ -3,7 +3,7 @@ package eth_test import ( "testing" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func TestBeaconBlock_Fuzz(t *testing.T) { diff --git a/proto/prysm/v1alpha1/beacon_chain.pb.go b/proto/prysm/v1alpha1/beacon_chain.pb.go index e1b94e89e9..8824111595 100755 --- a/proto/prysm/v1alpha1/beacon_chain.pb.go +++ b/proto/prysm/v1alpha1/beacon_chain.pb.go @@ -11,8 +11,8 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -80,13 +80,13 @@ func (x *ListIndexedAttestationsRequest) GetQueryFilter() isListIndexedAttestati return nil } -func (x *ListIndexedAttestationsRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ListIndexedAttestationsRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { if x, ok := x.QueryFilter.(*ListIndexedAttestationsRequest_Epoch); ok { return x.Epoch } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ListIndexedAttestationsRequest) GetGenesisEpoch() bool { @@ -117,7 +117,7 @@ type isListIndexedAttestationsRequest_QueryFilter interface { } type ListIndexedAttestationsRequest_Epoch struct { - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` } type ListIndexedAttestationsRequest_GenesisEpoch struct { @@ -179,13 +179,13 @@ func (x *ListAttestationsRequest) GetQueryFilter() isListAttestationsRequest_Que return nil } -func (x *ListAttestationsRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ListAttestationsRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { if x, ok := x.QueryFilter.(*ListAttestationsRequest_Epoch); ok { return x.Epoch } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ListAttestationsRequest) GetGenesisEpoch() bool { @@ -216,7 +216,7 @@ type isListAttestationsRequest_QueryFilter interface { } type ListAttestationsRequest_Epoch struct { - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` } type ListAttestationsRequest_GenesisEpoch struct { @@ -533,22 +533,22 @@ func (x *ListBlocksRequest) GetRoot() []byte { return nil } -func (x *ListBlocksRequest) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ListBlocksRequest) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { if x, ok := x.QueryFilter.(*ListBlocksRequest_Slot); ok { return x.Slot } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *ListBlocksRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ListBlocksRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { if x, ok := x.QueryFilter.(*ListBlocksRequest_Epoch); ok { return x.Epoch } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ListBlocksRequest) GetGenesis() bool { @@ -583,11 +583,11 @@ type ListBlocksRequest_Root struct { } type ListBlocksRequest_Slot struct { - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` } type ListBlocksRequest_Epoch struct { - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` } type ListBlocksRequest_Genesis struct { @@ -924,17 +924,17 @@ func (*BeaconBlockContainer_BlindedFuluBlock) isBeaconBlockContainer_Block() {} // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. type ChainHead struct { state protoimpl.MessageState `protogen:"open.v1"` - HeadSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=head_slot,json=headSlot,proto3" json:"head_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - HeadEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,2,opt,name=head_epoch,json=headEpoch,proto3" json:"head_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + HeadSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=head_slot,json=headSlot,proto3" json:"head_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + HeadEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,2,opt,name=head_epoch,json=headEpoch,proto3" json:"head_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` HeadBlockRoot []byte `protobuf:"bytes,3,opt,name=head_block_root,json=headBlockRoot,proto3" json:"head_block_root,omitempty" ssz-size:"32"` - FinalizedSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,4,opt,name=finalized_slot,json=finalizedSlot,proto3" json:"finalized_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - FinalizedEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,5,opt,name=finalized_epoch,json=finalizedEpoch,proto3" json:"finalized_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + FinalizedSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,4,opt,name=finalized_slot,json=finalizedSlot,proto3" json:"finalized_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + FinalizedEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,5,opt,name=finalized_epoch,json=finalizedEpoch,proto3" json:"finalized_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` FinalizedBlockRoot []byte `protobuf:"bytes,6,opt,name=finalized_block_root,json=finalizedBlockRoot,proto3" json:"finalized_block_root,omitempty" ssz-size:"32"` - JustifiedSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=justified_slot,json=justifiedSlot,proto3" json:"justified_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - JustifiedEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,8,opt,name=justified_epoch,json=justifiedEpoch,proto3" json:"justified_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + JustifiedSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=justified_slot,json=justifiedSlot,proto3" json:"justified_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + JustifiedEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,8,opt,name=justified_epoch,json=justifiedEpoch,proto3" json:"justified_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` JustifiedBlockRoot []byte `protobuf:"bytes,9,opt,name=justified_block_root,json=justifiedBlockRoot,proto3" json:"justified_block_root,omitempty" ssz-size:"32"` - PreviousJustifiedSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,10,opt,name=previous_justified_slot,json=previousJustifiedSlot,proto3" json:"previous_justified_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - PreviousJustifiedEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,11,opt,name=previous_justified_epoch,json=previousJustifiedEpoch,proto3" json:"previous_justified_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + PreviousJustifiedSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,10,opt,name=previous_justified_slot,json=previousJustifiedSlot,proto3" json:"previous_justified_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + PreviousJustifiedEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,11,opt,name=previous_justified_epoch,json=previousJustifiedEpoch,proto3" json:"previous_justified_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` PreviousJustifiedBlockRoot []byte `protobuf:"bytes,12,opt,name=previous_justified_block_root,json=previousJustifiedBlockRoot,proto3" json:"previous_justified_block_root,omitempty" ssz-size:"32"` OptimisticStatus bool `protobuf:"varint,13,opt,name=optimistic_status,json=optimisticStatus,proto3" json:"optimistic_status,omitempty"` unknownFields protoimpl.UnknownFields @@ -971,18 +971,18 @@ func (*ChainHead) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{9} } -func (x *ChainHead) GetHeadSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ChainHead) GetHeadSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.HeadSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *ChainHead) GetHeadEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ChainHead) GetHeadEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.HeadEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ChainHead) GetHeadBlockRoot() []byte { @@ -992,18 +992,18 @@ func (x *ChainHead) GetHeadBlockRoot() []byte { return nil } -func (x *ChainHead) GetFinalizedSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ChainHead) GetFinalizedSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.FinalizedSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *ChainHead) GetFinalizedEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ChainHead) GetFinalizedEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.FinalizedEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ChainHead) GetFinalizedBlockRoot() []byte { @@ -1013,18 +1013,18 @@ func (x *ChainHead) GetFinalizedBlockRoot() []byte { return nil } -func (x *ChainHead) GetJustifiedSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ChainHead) GetJustifiedSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.JustifiedSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *ChainHead) GetJustifiedEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ChainHead) GetJustifiedEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.JustifiedEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ChainHead) GetJustifiedBlockRoot() []byte { @@ -1034,18 +1034,18 @@ func (x *ChainHead) GetJustifiedBlockRoot() []byte { return nil } -func (x *ChainHead) GetPreviousJustifiedSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ChainHead) GetPreviousJustifiedSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.PreviousJustifiedSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *ChainHead) GetPreviousJustifiedEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ChainHead) GetPreviousJustifiedEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.PreviousJustifiedEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ChainHead) GetPreviousJustifiedBlockRoot() []byte { @@ -1111,13 +1111,13 @@ func (x *ListCommitteesRequest) GetQueryFilter() isListCommitteesRequest_QueryFi return nil } -func (x *ListCommitteesRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ListCommitteesRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { if x, ok := x.QueryFilter.(*ListCommitteesRequest_Epoch); ok { return x.Epoch } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ListCommitteesRequest) GetGenesis() bool { @@ -1134,7 +1134,7 @@ type isListCommitteesRequest_QueryFilter interface { } type ListCommitteesRequest_Epoch struct { - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` } type ListCommitteesRequest_Genesis struct { @@ -1148,7 +1148,7 @@ func (*ListCommitteesRequest_Genesis) isListCommitteesRequest_QueryFilter() {} // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. type BeaconCommittees struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` Committees map[uint64]*BeaconCommittees_CommitteesList `protobuf:"bytes,2,rep,name=committees,proto3" json:"committees,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` ActiveValidatorCount uint64 `protobuf:"varint,3,opt,name=active_validator_count,json=activeValidatorCount,proto3" json:"active_validator_count,omitempty"` unknownFields protoimpl.UnknownFields @@ -1185,11 +1185,11 @@ func (*BeaconCommittees) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{11} } -func (x *BeaconCommittees) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *BeaconCommittees) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *BeaconCommittees) GetCommittees() map[uint64]*BeaconCommittees_CommitteesList { @@ -1215,7 +1215,7 @@ type ListValidatorBalancesRequest struct { // *ListValidatorBalancesRequest_Genesis QueryFilter isListValidatorBalancesRequest_QueryFilter `protobuf_oneof:"query_filter"` PublicKeys [][]byte `protobuf:"bytes,3,rep,name=public_keys,json=publicKeys,proto3" json:"public_keys,omitempty" ssz-size:"?,48"` - Indices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,4,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Indices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,4,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` unknownFields protoimpl.UnknownFields @@ -1259,13 +1259,13 @@ func (x *ListValidatorBalancesRequest) GetQueryFilter() isListValidatorBalancesR return nil } -func (x *ListValidatorBalancesRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ListValidatorBalancesRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { if x, ok := x.QueryFilter.(*ListValidatorBalancesRequest_Epoch); ok { return x.Epoch } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ListValidatorBalancesRequest) GetGenesis() bool { @@ -1284,11 +1284,11 @@ func (x *ListValidatorBalancesRequest) GetPublicKeys() [][]byte { return nil } -func (x *ListValidatorBalancesRequest) GetIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ListValidatorBalancesRequest) GetIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Indices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } func (x *ListValidatorBalancesRequest) GetPageSize() int32 { @@ -1310,7 +1310,7 @@ type isListValidatorBalancesRequest_QueryFilter interface { } type ListValidatorBalancesRequest_Epoch struct { - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` } type ListValidatorBalancesRequest_Genesis struct { @@ -1324,7 +1324,7 @@ func (*ListValidatorBalancesRequest_Genesis) isListValidatorBalancesRequest_Quer // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. type ValidatorBalances struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` Balances []*ValidatorBalances_Balance `protobuf:"bytes,2,rep,name=balances,proto3" json:"balances,omitempty"` NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` TotalSize int32 `protobuf:"varint,4,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` @@ -1362,11 +1362,11 @@ func (*ValidatorBalances) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{13} } -func (x *ValidatorBalances) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ValidatorBalances) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ValidatorBalances) GetBalances() []*ValidatorBalances_Balance { @@ -1402,7 +1402,7 @@ type ListValidatorsRequest struct { PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` PublicKeys [][]byte `protobuf:"bytes,6,rep,name=public_keys,json=publicKeys,proto3" json:"public_keys,omitempty"` - Indices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,7,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Indices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,7,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1444,13 +1444,13 @@ func (x *ListValidatorsRequest) GetQueryFilter() isListValidatorsRequest_QueryFi return nil } -func (x *ListValidatorsRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ListValidatorsRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { if x, ok := x.QueryFilter.(*ListValidatorsRequest_Epoch); ok { return x.Epoch } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ListValidatorsRequest) GetGenesis() bool { @@ -1490,11 +1490,11 @@ func (x *ListValidatorsRequest) GetPublicKeys() [][]byte { return nil } -func (x *ListValidatorsRequest) GetIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ListValidatorsRequest) GetIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Indices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } type isListValidatorsRequest_QueryFilter interface { @@ -1502,7 +1502,7 @@ type isListValidatorsRequest_QueryFilter interface { } type ListValidatorsRequest_Epoch struct { - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` } type ListValidatorsRequest_Genesis struct { @@ -1562,13 +1562,13 @@ func (x *GetValidatorRequest) GetQueryFilter() isGetValidatorRequest_QueryFilter return nil } -func (x *GetValidatorRequest) GetIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *GetValidatorRequest) GetIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { if x, ok := x.QueryFilter.(*GetValidatorRequest_Index); ok { return x.Index } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *GetValidatorRequest) GetPublicKey() []byte { @@ -1585,7 +1585,7 @@ type isGetValidatorRequest_QueryFilter interface { } type GetValidatorRequest_Index struct { - Index github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=index,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Index github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=index,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` } type GetValidatorRequest_PublicKey struct { @@ -1599,7 +1599,7 @@ func (*GetValidatorRequest_PublicKey) isGetValidatorRequest_QueryFilter() {} // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. type Validators struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` ValidatorList []*Validators_ValidatorContainer `protobuf:"bytes,2,rep,name=validator_list,json=validatorList,proto3" json:"validator_list,omitempty"` NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` TotalSize int32 `protobuf:"varint,4,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` @@ -1637,11 +1637,11 @@ func (*Validators) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{16} } -func (x *Validators) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Validators) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *Validators) GetValidatorList() []*Validators_ValidatorContainer { @@ -1714,13 +1714,13 @@ func (x *GetValidatorActiveSetChangesRequest) GetQueryFilter() isGetValidatorAct return nil } -func (x *GetValidatorActiveSetChangesRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *GetValidatorActiveSetChangesRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { if x, ok := x.QueryFilter.(*GetValidatorActiveSetChangesRequest_Epoch); ok { return x.Epoch } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *GetValidatorActiveSetChangesRequest) GetGenesis() bool { @@ -1737,7 +1737,7 @@ type isGetValidatorActiveSetChangesRequest_QueryFilter interface { } type GetValidatorActiveSetChangesRequest_Epoch struct { - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` } type GetValidatorActiveSetChangesRequest_Genesis struct { @@ -1753,15 +1753,15 @@ func (*GetValidatorActiveSetChangesRequest_Genesis) isGetValidatorActiveSetChang // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. type ActiveSetChanges struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` ActivatedPublicKeys [][]byte `protobuf:"bytes,2,rep,name=activated_public_keys,json=activatedPublicKeys,proto3" json:"activated_public_keys,omitempty" ssz-size:"?,48"` - ActivatedIndices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,rep,packed,name=activated_indices,json=activatedIndices,proto3" json:"activated_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ActivatedIndices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,rep,packed,name=activated_indices,json=activatedIndices,proto3" json:"activated_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ExitedPublicKeys [][]byte `protobuf:"bytes,4,rep,name=exited_public_keys,json=exitedPublicKeys,proto3" json:"exited_public_keys,omitempty" ssz-size:"?,48"` - ExitedIndices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,5,rep,packed,name=exited_indices,json=exitedIndices,proto3" json:"exited_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ExitedIndices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,5,rep,packed,name=exited_indices,json=exitedIndices,proto3" json:"exited_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` SlashedPublicKeys [][]byte `protobuf:"bytes,6,rep,name=slashed_public_keys,json=slashedPublicKeys,proto3" json:"slashed_public_keys,omitempty" ssz-size:"?,48"` - SlashedIndices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,7,rep,packed,name=slashed_indices,json=slashedIndices,proto3" json:"slashed_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + SlashedIndices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,7,rep,packed,name=slashed_indices,json=slashedIndices,proto3" json:"slashed_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` EjectedPublicKeys [][]byte `protobuf:"bytes,8,rep,name=ejected_public_keys,json=ejectedPublicKeys,proto3" json:"ejected_public_keys,omitempty" ssz-size:"?,48"` - EjectedIndices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,9,rep,packed,name=ejected_indices,json=ejectedIndices,proto3" json:"ejected_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + EjectedIndices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,9,rep,packed,name=ejected_indices,json=ejectedIndices,proto3" json:"ejected_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1796,11 +1796,11 @@ func (*ActiveSetChanges) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{18} } -func (x *ActiveSetChanges) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ActiveSetChanges) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ActiveSetChanges) GetActivatedPublicKeys() [][]byte { @@ -1810,11 +1810,11 @@ func (x *ActiveSetChanges) GetActivatedPublicKeys() [][]byte { return nil } -func (x *ActiveSetChanges) GetActivatedIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ActiveSetChanges) GetActivatedIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ActivatedIndices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } func (x *ActiveSetChanges) GetExitedPublicKeys() [][]byte { @@ -1824,11 +1824,11 @@ func (x *ActiveSetChanges) GetExitedPublicKeys() [][]byte { return nil } -func (x *ActiveSetChanges) GetExitedIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ActiveSetChanges) GetExitedIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ExitedIndices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } func (x *ActiveSetChanges) GetSlashedPublicKeys() [][]byte { @@ -1838,11 +1838,11 @@ func (x *ActiveSetChanges) GetSlashedPublicKeys() [][]byte { return nil } -func (x *ActiveSetChanges) GetSlashedIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ActiveSetChanges) GetSlashedIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.SlashedIndices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } func (x *ActiveSetChanges) GetEjectedPublicKeys() [][]byte { @@ -1852,11 +1852,11 @@ func (x *ActiveSetChanges) GetEjectedPublicKeys() [][]byte { return nil } -func (x *ActiveSetChanges) GetEjectedIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ActiveSetChanges) GetEjectedIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.EjectedIndices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. @@ -1864,7 +1864,7 @@ type ValidatorPerformanceRequest struct { state protoimpl.MessageState `protogen:"open.v1"` // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. PublicKeys [][]byte `protobuf:"bytes,1,rep,name=public_keys,json=publicKeys,proto3" json:"public_keys,omitempty"` - Indices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Indices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1907,11 +1907,11 @@ func (x *ValidatorPerformanceRequest) GetPublicKeys() [][]byte { return nil } -func (x *ValidatorPerformanceRequest) GetIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ValidatorPerformanceRequest) GetIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Indices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. @@ -1919,9 +1919,9 @@ type ValidatorPerformanceResponse struct { state protoimpl.MessageState `protogen:"open.v1"` CurrentEffectiveBalances []uint64 `protobuf:"varint,1,rep,packed,name=current_effective_balances,json=currentEffectiveBalances,proto3" json:"current_effective_balances,omitempty"` // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. - InclusionSlots []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,2,rep,packed,name=inclusion_slots,json=inclusionSlots,proto3" json:"inclusion_slots,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + InclusionSlots []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,2,rep,packed,name=inclusion_slots,json=inclusionSlots,proto3" json:"inclusion_slots,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. - InclusionDistances []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,3,rep,packed,name=inclusion_distances,json=inclusionDistances,proto3" json:"inclusion_distances,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + InclusionDistances []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,3,rep,packed,name=inclusion_distances,json=inclusionDistances,proto3" json:"inclusion_distances,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` CorrectlyVotedSource []bool `protobuf:"varint,4,rep,packed,name=correctly_voted_source,json=correctlyVotedSource,proto3" json:"correctly_voted_source,omitempty"` CorrectlyVotedTarget []bool `protobuf:"varint,5,rep,packed,name=correctly_voted_target,json=correctlyVotedTarget,proto3" json:"correctly_voted_target,omitempty"` CorrectlyVotedHead []bool `protobuf:"varint,6,rep,packed,name=correctly_voted_head,json=correctlyVotedHead,proto3" json:"correctly_voted_head,omitempty"` @@ -1973,19 +1973,19 @@ func (x *ValidatorPerformanceResponse) GetCurrentEffectiveBalances() []uint64 { } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. -func (x *ValidatorPerformanceResponse) GetInclusionSlots() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ValidatorPerformanceResponse) GetInclusionSlots() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.InclusionSlots } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(nil) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. -func (x *ValidatorPerformanceResponse) GetInclusionDistances() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ValidatorPerformanceResponse) GetInclusionDistances() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.InclusionDistances } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(nil) } func (x *ValidatorPerformanceResponse) GetCorrectlyVotedSource() []bool { @@ -2059,8 +2059,8 @@ type ValidatorQueue struct { ActivationPublicKeys [][]byte `protobuf:"bytes,2,rep,name=activation_public_keys,json=activationPublicKeys,proto3" json:"activation_public_keys,omitempty" ssz-size:"?,48"` // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. ExitPublicKeys [][]byte `protobuf:"bytes,3,rep,name=exit_public_keys,json=exitPublicKeys,proto3" json:"exit_public_keys,omitempty" ssz-size:"?,48"` - ActivationValidatorIndices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,4,rep,packed,name=activation_validator_indices,json=activationValidatorIndices,proto3" json:"activation_validator_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` - ExitValidatorIndices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,5,rep,packed,name=exit_validator_indices,json=exitValidatorIndices,proto3" json:"exit_validator_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ActivationValidatorIndices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,4,rep,packed,name=activation_validator_indices,json=activationValidatorIndices,proto3" json:"activation_validator_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` + ExitValidatorIndices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,5,rep,packed,name=exit_validator_indices,json=exitValidatorIndices,proto3" json:"exit_validator_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -2118,18 +2118,18 @@ func (x *ValidatorQueue) GetExitPublicKeys() [][]byte { return nil } -func (x *ValidatorQueue) GetActivationValidatorIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ValidatorQueue) GetActivationValidatorIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ActivationValidatorIndices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } -func (x *ValidatorQueue) GetExitValidatorIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ValidatorQueue) GetExitValidatorIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ExitValidatorIndices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. @@ -2141,7 +2141,7 @@ type ListValidatorAssignmentsRequest struct { // *ListValidatorAssignmentsRequest_Genesis QueryFilter isListValidatorAssignmentsRequest_QueryFilter `protobuf_oneof:"query_filter"` PublicKeys [][]byte `protobuf:"bytes,3,rep,name=public_keys,json=publicKeys,proto3" json:"public_keys,omitempty" ssz-size:"?,48"` - Indices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,4,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Indices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,4,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` unknownFields protoimpl.UnknownFields @@ -2185,13 +2185,13 @@ func (x *ListValidatorAssignmentsRequest) GetQueryFilter() isListValidatorAssign return nil } -func (x *ListValidatorAssignmentsRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ListValidatorAssignmentsRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { if x, ok := x.QueryFilter.(*ListValidatorAssignmentsRequest_Epoch); ok { return x.Epoch } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ListValidatorAssignmentsRequest) GetGenesis() bool { @@ -2210,11 +2210,11 @@ func (x *ListValidatorAssignmentsRequest) GetPublicKeys() [][]byte { return nil } -func (x *ListValidatorAssignmentsRequest) GetIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ListValidatorAssignmentsRequest) GetIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Indices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } func (x *ListValidatorAssignmentsRequest) GetPageSize() int32 { @@ -2236,7 +2236,7 @@ type isListValidatorAssignmentsRequest_QueryFilter interface { } type ListValidatorAssignmentsRequest_Epoch struct { - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` } type ListValidatorAssignmentsRequest_Genesis struct { @@ -2250,7 +2250,7 @@ func (*ListValidatorAssignmentsRequest_Genesis) isListValidatorAssignmentsReques // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. type ValidatorAssignments struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` Assignments []*ValidatorAssignments_CommitteeAssignment `protobuf:"bytes,2,rep,name=assignments,proto3" json:"assignments,omitempty"` NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` TotalSize int32 `protobuf:"varint,4,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` @@ -2288,11 +2288,11 @@ func (*ValidatorAssignments) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{23} } -func (x *ValidatorAssignments) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ValidatorAssignments) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ValidatorAssignments) GetAssignments() []*ValidatorAssignments_CommitteeAssignment { @@ -2365,13 +2365,13 @@ func (x *GetValidatorParticipationRequest) GetQueryFilter() isGetValidatorPartic return nil } -func (x *GetValidatorParticipationRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *GetValidatorParticipationRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { if x, ok := x.QueryFilter.(*GetValidatorParticipationRequest_Epoch); ok { return x.Epoch } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *GetValidatorParticipationRequest) GetGenesis() bool { @@ -2388,7 +2388,7 @@ type isGetValidatorParticipationRequest_QueryFilter interface { } type GetValidatorParticipationRequest_Epoch struct { - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` } type GetValidatorParticipationRequest_Genesis struct { @@ -2402,7 +2402,7 @@ func (*GetValidatorParticipationRequest_Genesis) isGetValidatorParticipationRequ // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. type ValidatorParticipationResponse struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` Finalized bool `protobuf:"varint,2,opt,name=finalized,proto3" json:"finalized,omitempty"` Participation *ValidatorParticipation `protobuf:"bytes,3,opt,name=participation,proto3" json:"participation,omitempty"` unknownFields protoimpl.UnknownFields @@ -2439,11 +2439,11 @@ func (*ValidatorParticipationResponse) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{25} } -func (x *ValidatorParticipationResponse) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ValidatorParticipationResponse) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ValidatorParticipationResponse) GetFinalized() bool { @@ -2681,7 +2681,7 @@ func (x *BeaconConfig) GetConfig() map[string]string { type SubmitSlashingResponse struct { state protoimpl.MessageState `protogen:"open.v1"` - SlashedIndices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,rep,packed,name=slashed_indices,json=slashedIndices,proto3" json:"slashed_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + SlashedIndices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,rep,packed,name=slashed_indices,json=slashedIndices,proto3" json:"slashed_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -2716,18 +2716,18 @@ func (*SubmitSlashingResponse) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{30} } -func (x *SubmitSlashingResponse) GetSlashedIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *SubmitSlashingResponse) GetSlashedIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.SlashedIndices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } type IndividualVotesRequest struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` PublicKeys [][]byte `protobuf:"bytes,2,rep,name=public_keys,json=publicKeys,proto3" json:"public_keys,omitempty"` - Indices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Indices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -2762,11 +2762,11 @@ func (*IndividualVotesRequest) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{31} } -func (x *IndividualVotesRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *IndividualVotesRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *IndividualVotesRequest) GetPublicKeys() [][]byte { @@ -2776,11 +2776,11 @@ func (x *IndividualVotesRequest) GetPublicKeys() [][]byte { return nil } -func (x *IndividualVotesRequest) GetIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *IndividualVotesRequest) GetIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Indices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } type IndividualVotesRespond struct { @@ -2829,7 +2829,7 @@ func (x *IndividualVotesRespond) GetIndividualVotes() []*IndividualVotesRespond_ type BeaconCommittees_CommitteeItem struct { state protoimpl.MessageState `protogen:"open.v1"` - ValidatorIndices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,rep,packed,name=validator_indices,json=validatorIndices,proto3" json:"validator_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ValidatorIndices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,rep,packed,name=validator_indices,json=validatorIndices,proto3" json:"validator_indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -2864,11 +2864,11 @@ func (*BeaconCommittees_CommitteeItem) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{11, 0} } -func (x *BeaconCommittees_CommitteeItem) GetValidatorIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconCommittees_CommitteeItem) GetValidatorIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ValidatorIndices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } type BeaconCommittees_CommitteesList struct { @@ -2918,7 +2918,7 @@ func (x *BeaconCommittees_CommitteesList) GetCommittees() []*BeaconCommittees_Co type ValidatorBalances_Balance struct { state protoimpl.MessageState `protogen:"open.v1"` PublicKey []byte `protobuf:"bytes,1,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty" ssz-size:"48"` - Index github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Index github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` Balance uint64 `protobuf:"varint,3,opt,name=balance,proto3" json:"balance,omitempty"` Status string `protobuf:"bytes,4,opt,name=status,proto3" json:"status,omitempty"` unknownFields protoimpl.UnknownFields @@ -2962,11 +2962,11 @@ func (x *ValidatorBalances_Balance) GetPublicKey() []byte { return nil } -func (x *ValidatorBalances_Balance) GetIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ValidatorBalances_Balance) GetIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Index } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *ValidatorBalances_Balance) GetBalance() uint64 { @@ -2985,7 +2985,7 @@ func (x *ValidatorBalances_Balance) GetStatus() string { type Validators_ValidatorContainer struct { state protoimpl.MessageState `protogen:"open.v1"` - Index github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Index github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` Validator *Validator `protobuf:"bytes,2,opt,name=validator,proto3" json:"validator,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -3021,11 +3021,11 @@ func (*Validators_ValidatorContainer) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{16, 0} } -func (x *Validators_ValidatorContainer) GetIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *Validators_ValidatorContainer) GetIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Index } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *Validators_ValidatorContainer) GetValidator() *Validator { @@ -3037,13 +3037,13 @@ func (x *Validators_ValidatorContainer) GetValidator() *Validator { type ValidatorAssignments_CommitteeAssignment struct { state protoimpl.MessageState `protogen:"open.v1"` - BeaconCommittees []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,rep,packed,name=beacon_committees,json=beaconCommittees,proto3" json:"beacon_committees,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` - CommitteeIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=committee_index,json=committeeIndex,proto3" json:"committee_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.CommitteeIndex"` - AttesterSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=attester_slot,json=attesterSlot,proto3" json:"attester_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerSlots []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,4,rep,packed,name=proposer_slots,json=proposerSlots,proto3" json:"proposer_slots,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + BeaconCommittees []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,rep,packed,name=beacon_committees,json=beaconCommittees,proto3" json:"beacon_committees,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` + CommitteeIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=committee_index,json=committeeIndex,proto3" json:"committee_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.CommitteeIndex"` + AttesterSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=attester_slot,json=attesterSlot,proto3" json:"attester_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerSlots []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,4,rep,packed,name=proposer_slots,json=proposerSlots,proto3" json:"proposer_slots,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. PublicKey []byte `protobuf:"bytes,5,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty" ssz-size:"48"` - ValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,6,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,6,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -3078,32 +3078,32 @@ func (*ValidatorAssignments_CommitteeAssignment) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{23, 0} } -func (x *ValidatorAssignments_CommitteeAssignment) GetBeaconCommittees() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ValidatorAssignments_CommitteeAssignment) GetBeaconCommittees() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.BeaconCommittees } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } -func (x *ValidatorAssignments_CommitteeAssignment) GetCommitteeIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex { +func (x *ValidatorAssignments_CommitteeAssignment) GetCommitteeIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex { if x != nil { return x.CommitteeIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(0) } -func (x *ValidatorAssignments_CommitteeAssignment) GetAttesterSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ValidatorAssignments_CommitteeAssignment) GetAttesterSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.AttesterSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *ValidatorAssignments_CommitteeAssignment) GetProposerSlots() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ValidatorAssignments_CommitteeAssignment) GetProposerSlots() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.ProposerSlots } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(nil) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. @@ -3114,18 +3114,18 @@ func (x *ValidatorAssignments_CommitteeAssignment) GetPublicKey() []byte { return nil } -func (x *ValidatorAssignments_CommitteeAssignment) GetValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ValidatorAssignments_CommitteeAssignment) GetValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } type IndividualVotesRespond_IndividualVote struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` PublicKey []byte `protobuf:"bytes,2,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty"` - ValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` IsSlashed bool `protobuf:"varint,4,opt,name=is_slashed,json=isSlashed,proto3" json:"is_slashed,omitempty"` IsWithdrawableInCurrentEpoch bool `protobuf:"varint,5,opt,name=is_withdrawable_in_current_epoch,json=isWithdrawableInCurrentEpoch,proto3" json:"is_withdrawable_in_current_epoch,omitempty"` IsActiveInCurrentEpoch bool `protobuf:"varint,6,opt,name=is_active_in_current_epoch,json=isActiveInCurrentEpoch,proto3" json:"is_active_in_current_epoch,omitempty"` @@ -3137,9 +3137,9 @@ type IndividualVotesRespond_IndividualVote struct { IsPreviousEpochHeadAttester bool `protobuf:"varint,12,opt,name=is_previous_epoch_head_attester,json=isPreviousEpochHeadAttester,proto3" json:"is_previous_epoch_head_attester,omitempty"` CurrentEpochEffectiveBalanceGwei uint64 `protobuf:"varint,13,opt,name=current_epoch_effective_balance_gwei,json=currentEpochEffectiveBalanceGwei,proto3" json:"current_epoch_effective_balance_gwei,omitempty"` // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. - InclusionSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,14,opt,name=inclusion_slot,json=inclusionSlot,proto3" json:"inclusion_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + InclusionSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,14,opt,name=inclusion_slot,json=inclusionSlot,proto3" json:"inclusion_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. - InclusionDistance github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,15,opt,name=inclusion_distance,json=inclusionDistance,proto3" json:"inclusion_distance,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + InclusionDistance github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,15,opt,name=inclusion_distance,json=inclusionDistance,proto3" json:"inclusion_distance,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` InactivityScore uint64 `protobuf:"varint,16,opt,name=inactivity_score,json=inactivityScore,proto3" json:"inactivity_score,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -3175,11 +3175,11 @@ func (*IndividualVotesRespond_IndividualVote) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_chain_proto_rawDescGZIP(), []int{32, 0} } -func (x *IndividualVotesRespond_IndividualVote) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *IndividualVotesRespond_IndividualVote) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *IndividualVotesRespond_IndividualVote) GetPublicKey() []byte { @@ -3189,11 +3189,11 @@ func (x *IndividualVotesRespond_IndividualVote) GetPublicKey() []byte { return nil } -func (x *IndividualVotesRespond_IndividualVote) GetValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *IndividualVotesRespond_IndividualVote) GetValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *IndividualVotesRespond_IndividualVote) GetIsSlashed() bool { @@ -3267,19 +3267,19 @@ func (x *IndividualVotesRespond_IndividualVote) GetCurrentEpochEffectiveBalanceG } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. -func (x *IndividualVotesRespond_IndividualVote) GetInclusionSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *IndividualVotesRespond_IndividualVote) GetInclusionSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.InclusionSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/beacon_chain.proto. -func (x *IndividualVotesRespond_IndividualVote) GetInclusionDistance() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *IndividualVotesRespond_IndividualVote) GetInclusionDistance() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.InclusionDistance } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *IndividualVotesRespond_IndividualVote) GetInactivityScore() uint64 { @@ -3317,7 +3317,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x12, 0x5d, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x25, 0x0a, 0x0d, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, @@ -3332,7 +3332,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x73, 0x74, 0x12, 0x5d, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x25, 0x0a, 0x0d, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x65, 0x70, 0x6f, @@ -3397,13 +3397,13 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x28, 0x0c, 0x48, 0x00, 0x52, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x12, 0x5a, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x48, 0x00, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x5d, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x07, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, @@ -3506,13 +3506,13 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x0a, 0x09, 0x68, 0x65, 0x61, 0x64, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x08, 0x68, 0x65, 0x61, 0x64, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x64, 0x0a, 0x0a, 0x68, 0x65, 0x61, 0x64, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x09, 0x68, 0x65, 0x61, 0x64, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x2e, 0x0a, 0x0f, 0x68, 0x65, 0x61, 0x64, 0x5f, @@ -3522,14 +3522,14 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x6e, 0x0a, 0x0f, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x0e, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x38, 0x0a, 0x14, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, @@ -3539,14 +3539,14 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x0a, 0x0e, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x6e, 0x0a, 0x0f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x0e, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x38, 0x0a, 0x14, 0x6a, @@ -3557,7 +3557,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x73, 0x5f, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x15, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x53, @@ -3565,7 +3565,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x16, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x4a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x69, 0x65, 0x64, 0x45, @@ -3581,7 +3581,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x65, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x5d, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x07, 0x67, 0x65, 0x6e, @@ -3592,7 +3592,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x57, 0x0a, 0x0a, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, @@ -3608,7 +3608,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x10, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, @@ -3632,7 +3632,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x07, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, @@ -3642,7 +3642,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x4b, 0x65, 0x79, 0x73, 0x12, 0x68, 0x0a, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x12, 0x1b, @@ -3655,7 +3655,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x6e, 0x63, 0x65, 0x73, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x4c, 0x0a, 0x08, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, @@ -3673,7 +3673,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x64, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, @@ -3685,7 +3685,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x5d, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x07, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x48, @@ -3700,7 +3700,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x68, 0x0a, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x3a, 0x02, 0x18, 0x01, 0x42, 0x0e, 0x0a, @@ -3709,7 +3709,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x66, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x48, 0x00, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x27, 0x0a, @@ -3720,7 +3720,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x5b, 0x0a, 0x0e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, @@ -3738,7 +3738,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x72, 0x12, 0x64, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x3e, 0x0a, 0x09, 0x76, 0x61, 0x6c, 0x69, 0x64, @@ -3751,7 +3751,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x65, 0x73, 0x74, 0x12, 0x5d, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x07, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x18, 0x02, 0x20, @@ -3761,7 +3761,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x3c, 0x0a, 0x15, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x65, @@ -3772,7 +3772,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x10, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x65, 0x64, 0x49, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x12, @@ -3783,7 +3783,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x64, 0x5f, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x65, 0x78, 0x69, 0x74, 0x65, 0x64, 0x49, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x12, 0x38, @@ -3794,7 +3794,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x68, 0x65, 0x64, 0x5f, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x65, 0x64, 0x49, 0x6e, 0x64, 0x69, 0x63, 0x65, @@ -3805,7 +3805,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x6a, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x09, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x65, 0x64, 0x49, 0x6e, 0x64, @@ -3817,7 +3817,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x3a, 0x02, 0x18, 0x01, 0x22, 0xc8, 0x06, 0x0a, 0x1c, @@ -3830,14 +3830,14 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x04, 0x42, 0x46, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x52, 0x0e, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x53, 0x6c, 0x6f, 0x74, 0x73, 0x12, 0x77, 0x0a, 0x13, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x64, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x04, 0x42, 0x46, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x52, 0x12, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x44, 0x69, 0x73, 0x74, 0x61, @@ -3888,7 +3888,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x1a, 0x61, 0x63, 0x74, 0x69, 0x76, @@ -3897,7 +3897,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x14, 0x65, 0x78, 0x69, 0x74, 0x56, 0x61, 0x6c, 0x69, @@ -3907,7 +3907,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x75, 0x65, 0x73, 0x74, 0x12, 0x5d, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x07, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x18, 0x02, @@ -3918,7 +3918,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, @@ -3931,7 +3931,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x61, 0x0a, 0x0b, 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, @@ -3949,7 +3949,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x10, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x43, @@ -3957,21 +3957,21 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x69, 0x0a, 0x0d, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x6b, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x6f, 0x74, 0x73, 0x12, 0x27, 0x0a, 0x0a, 0x70, 0x75, @@ -3981,7 +3981,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x3a, 0x02, 0x18, 0x01, @@ -3990,7 +3990,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x5d, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x07, 0x67, 0x65, 0x6e, 0x65, 0x73, 0x69, 0x73, 0x18, @@ -4001,7 +4001,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1c, 0x0a, 0x09, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, @@ -4054,7 +4054,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x65, 0x12, 0x77, 0x0a, 0x0f, 0x73, 0x6c, 0x61, 0x73, 0x68, 0x65, 0x64, 0x5f, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x73, 0x6c, 0x61, 0x73, @@ -4063,7 +4063,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, @@ -4071,7 +4071,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x65, 0x79, 0x73, 0x12, 0x68, 0x0a, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x22, 0x9b, 0x0a, @@ -4087,7 +4087,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x56, 0x6f, 0x74, 0x65, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, @@ -4095,7 +4095,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x12, 0x77, 0x0a, 0x0f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x69, 0x64, @@ -4143,14 +4143,14 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x04, 0x42, 0x46, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x52, 0x0d, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x12, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x64, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x04, 0x42, 0x46, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x52, 0x11, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x44, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, @@ -4392,7 +4392,7 @@ var file_proto_prysm_v1alpha1_beacon_chain_proto_rawDesc = []byte{ 0x31, 0x42, 0x10, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, diff --git a/proto/prysm/v1alpha1/beacon_chain.proto b/proto/prysm/v1alpha1/beacon_chain.proto index 4871ff9b07..25006fa2c2 100644 --- a/proto/prysm/v1alpha1/beacon_chain.proto +++ b/proto/prysm/v1alpha1/beacon_chain.proto @@ -25,7 +25,7 @@ import "proto/prysm/v1alpha1/beacon_core_types.proto"; import "proto/prysm/v1alpha1/validator.proto"; option csharp_namespace = "Ethereum.Eth.v1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "BeaconChainProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -314,7 +314,7 @@ message ListIndexedAttestationsRequest { // Retrieve attestations by epoch processed. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Optional criteria to retrieve genesis epoch attestations. @@ -339,7 +339,7 @@ message ListAttestationsRequest { // Filter attestations by epoch processed. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Optional criteria to retrieve attestations from 0 epoch. @@ -424,14 +424,14 @@ message ListBlocksRequest { // from another fork. uint64 slot = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // The epoch number for which to retrieve blocks. If specified, this // will return all blocks found within the span of the specified epoch. uint64 epoch = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Optional criteria to retrieve genesis block. @@ -519,13 +519,13 @@ message ChainHead { // Slot of the head block. uint64 head_slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Epoch of the head block. uint64 head_epoch = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // 32 byte merkle tree root of the canonical head block in the beacon node. @@ -534,13 +534,13 @@ message ChainHead { // Most recent slot that contains the finalized block. uint64 finalized_slot = 4 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Epoch of the finalized block. uint64 finalized_epoch = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Most recent 32 byte finalized block root. @@ -549,13 +549,13 @@ message ChainHead { // Most recent slot that contains the justified block. uint64 justified_slot = 7 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Epoch of the justified block. uint64 justified_epoch = 8 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Most recent 32 byte justified block root. @@ -564,13 +564,13 @@ message ChainHead { // Most recent slot that contains the previous justified block. uint64 previous_justified_slot = 10 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Epoch of the previous justified block. uint64 previous_justified_epoch = 11 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Previous 32 byte justified block root. @@ -588,7 +588,7 @@ message ListCommitteesRequest { // Optional criteria to retrieve data at a specific epoch. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Optional criteria to retrieve genesis data. @@ -604,7 +604,7 @@ message BeaconCommittees { // a slot. repeated uint64 validator_indices = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } @@ -616,7 +616,7 @@ message BeaconCommittees { // The epoch for which the committees in the response belong to. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // A map of validator committees by slot. @@ -633,7 +633,7 @@ message ListValidatorBalancesRequest { // Optional criteria to retrieve balances at a specific epoch. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Optional criteria to retrieve the genesis list of balances. @@ -646,7 +646,7 @@ message ListValidatorBalancesRequest { // Validator indices to filter validators for the given epoch. repeated uint64 indices = 4 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // The maximum number of Validators to return in the response. @@ -665,7 +665,7 @@ message ValidatorBalances { // Epoch which the state was considered to determine the validator balances. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; message Balance { @@ -674,7 +674,7 @@ message ValidatorBalances { // Validator's index in the validator set. uint64 index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // Validator's balance in gwei. @@ -704,7 +704,7 @@ message ListValidatorsRequest { // with the current active validator set. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Optional criteria to retrieve the genesis set of validators. @@ -731,7 +731,7 @@ message ListValidatorsRequest { // This field is optional. repeated uint64 indices = 7 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } @@ -741,7 +741,7 @@ message GetValidatorRequest { oneof query_filter { // Validator index in the registry. uint64 index = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // 48 byte validator public key. @@ -757,12 +757,12 @@ message Validators { // set is from the Ethereum proof of stake genesis set. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; message ValidatorContainer { uint64 index = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; Validator validator = 2; } @@ -785,7 +785,7 @@ message GetValidatorActiveSetChangesRequest { // Optional criteria to retrieve balances at a specific epoch. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Optional criteria to retrieve the genesis list of balances. @@ -800,7 +800,7 @@ message ActiveSetChanges { // set. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // 48 byte validator public keys that have been activated in the given epoch. @@ -810,7 +810,7 @@ message ActiveSetChanges { // Indices of validators activated in the given epoch. repeated uint64 activated_indices = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // 48 byte validator public keys that have been voluntarily exited in the @@ -821,7 +821,7 @@ message ActiveSetChanges { // Indices of validators exited in the given epoch. repeated uint64 exited_indices = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // 48 byte validator public keys that have been slashed in the given epoch. @@ -831,7 +831,7 @@ message ActiveSetChanges { // Indices of validators slashed in the given epoch. repeated uint64 slashed_indices = 7 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // 48 byte validator public keys that have been involuntarily ejected in this @@ -842,7 +842,7 @@ message ActiveSetChanges { // Indices of validators ejected in the given epoch. repeated uint64 ejected_indices = 9 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } @@ -854,7 +854,7 @@ message ValidatorPerformanceRequest { // A list of validator indices to retrieve performance by their indices. repeated uint64 indices = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } @@ -870,7 +870,7 @@ message ValidatorPerformanceResponse { // the Altair hard fork. repeated uint64 inclusion_slots = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot", + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot", deprecated = true ]; // The distance of when validator submitted and got included in the chain, the @@ -879,7 +879,7 @@ message ValidatorPerformanceResponse { // fork. repeated uint64 inclusion_distances = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot", + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot", deprecated = true ]; // Whether the list of validator recently correctly voted for source at @@ -932,14 +932,14 @@ message ValidatorQueue { // is the next validator index to be processed. repeated uint64 activation_validator_indices = 4 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // Ordered list of validator indices awaiting exit. 0th item in the list is // the next validator index to be processed. repeated uint64 exit_validator_indices = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } @@ -950,7 +950,7 @@ message ListValidatorAssignmentsRequest { // Epoch to validator assignments for. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Whether or not to query for the genesis information. @@ -961,7 +961,7 @@ message ListValidatorAssignmentsRequest { // Validator indices to filter assignments for the given epoch. repeated uint64 indices = 4 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // The maximum number of ValidatorAssignments to return in the response. @@ -985,27 +985,27 @@ message ValidatorAssignments { // slot and same committee. repeated uint64 beacon_committees = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // Committee index represents the committee of validator that's in. uint64 committee_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.CommitteeIndex" ]; // Beacon chain slot in which the validator must perform its assigned // duty as an attester. uint64 attester_slot = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Beacon chain slots in which the validator must perform its assigned // duty as a proposer. repeated uint64 proposer_slots = 4 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // 48 byte BLS public key. @@ -1015,14 +1015,14 @@ message ValidatorAssignments { // Validator index in the beacon state. uint64 validator_index = 6 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } // The epoch for which this set of validator assignments is valid. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; repeated CommitteeAssignment assignments = 2; @@ -1044,7 +1044,7 @@ message GetValidatorParticipationRequest { // Epoch to request participation information. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Whether or not to query for the genesis information. @@ -1057,7 +1057,7 @@ message ValidatorParticipationResponse { // Epoch which this message is applicable. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Whether or not epoch has been finalized. @@ -1119,7 +1119,7 @@ message SubmitSlashingResponse { // proposer/attester slashing object. repeated uint64 slashed_indices = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } @@ -1127,14 +1127,14 @@ message IndividualVotesRequest { // Epoch of the request. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Validator 48 byte BLS public keys to filter validators for the given epoch. repeated bytes public_keys = 2; // Validator indices to filter validators for the given epoch. repeated uint64 indices = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } @@ -1143,14 +1143,14 @@ message IndividualVotesRespond { // The epoch of the vote status request. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // The public key of the vote status request. bytes public_key = 2; // The validator index of the request. uint64 validator_index = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // Has the validator been slashed. bool is_slashed = 4; @@ -1176,14 +1176,14 @@ message IndividualVotesRespond { // Only available in phase0. uint64 inclusion_slot = 14 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot", + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot", deprecated = true ]; // How many slots have passed until the validator's attestation got included // in the block. Only available in phase0. uint64 inclusion_distance = 15 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot", + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot", deprecated = true ]; // The inactivity score of the validator tracks validator participation. diff --git a/proto/prysm/v1alpha1/beacon_core_types.pb.go b/proto/prysm/v1alpha1/beacon_core_types.pb.go index d63fec3a30..b3ee1bf564 100755 --- a/proto/prysm/v1alpha1/beacon_core_types.pb.go +++ b/proto/prysm/v1alpha1/beacon_core_types.pb.go @@ -11,8 +11,8 @@ import ( sync "sync" github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -26,8 +26,8 @@ const ( type BeaconBlockHeader struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` BodyRoot []byte `protobuf:"bytes,5,opt,name=body_root,json=bodyRoot,proto3" json:"body_root,omitempty" ssz-size:"32"` @@ -65,18 +65,18 @@ func (*BeaconBlockHeader) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_core_types_proto_rawDescGZIP(), []int{0} } -func (x *BeaconBlockHeader) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconBlockHeader) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BeaconBlockHeader) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconBlockHeader) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconBlockHeader) GetParentRoot() []byte { @@ -370,8 +370,8 @@ func (x *SignedVoluntaryExit) GetSignature() []byte { type VoluntaryExit struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` - ValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` + ValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -406,18 +406,18 @@ func (*VoluntaryExit) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_core_types_proto_rawDescGZIP(), []int{6} } -func (x *VoluntaryExit) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *VoluntaryExit) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } -func (x *VoluntaryExit) GetValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *VoluntaryExit) GetValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } type AttesterSlashingElectra struct { @@ -588,7 +588,7 @@ type Fork struct { state protoimpl.MessageState `protogen:"open.v1"` PreviousVersion []byte `protobuf:"bytes,1,opt,name=previous_version,json=previousVersion,proto3" json:"previous_version,omitempty" ssz-size:"4"` CurrentVersion []byte `protobuf:"bytes,2,opt,name=current_version,json=currentVersion,proto3" json:"current_version,omitempty" ssz-size:"4"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -637,11 +637,11 @@ func (x *Fork) GetCurrentVersion() []byte { return nil } -func (x *Fork) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Fork) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } type SyncCommittee struct { @@ -754,10 +754,10 @@ type Validator struct { WithdrawalCredentials []byte `protobuf:"bytes,2,opt,name=withdrawal_credentials,json=withdrawalCredentials,proto3" json:"withdrawal_credentials,omitempty" ssz-size:"32"` EffectiveBalance uint64 `protobuf:"varint,3,opt,name=effective_balance,json=effectiveBalance,proto3" json:"effective_balance,omitempty"` Slashed bool `protobuf:"varint,4,opt,name=slashed,proto3" json:"slashed,omitempty"` - ActivationEligibilityEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,5,opt,name=activation_eligibility_epoch,json=activationEligibilityEpoch,proto3" json:"activation_eligibility_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` - ActivationEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,6,opt,name=activation_epoch,json=activationEpoch,proto3" json:"activation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` - ExitEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,7,opt,name=exit_epoch,json=exitEpoch,proto3" json:"exit_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` - WithdrawableEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,8,opt,name=withdrawable_epoch,json=withdrawableEpoch,proto3" json:"withdrawable_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + ActivationEligibilityEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,5,opt,name=activation_eligibility_epoch,json=activationEligibilityEpoch,proto3" json:"activation_eligibility_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` + ActivationEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,6,opt,name=activation_epoch,json=activationEpoch,proto3" json:"activation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` + ExitEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,7,opt,name=exit_epoch,json=exitEpoch,proto3" json:"exit_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` + WithdrawableEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,8,opt,name=withdrawable_epoch,json=withdrawableEpoch,proto3" json:"withdrawable_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -820,32 +820,32 @@ func (x *Validator) GetSlashed() bool { return false } -func (x *Validator) GetActivationEligibilityEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Validator) GetActivationEligibilityEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.ActivationEligibilityEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } -func (x *Validator) GetActivationEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Validator) GetActivationEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.ActivationEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } -func (x *Validator) GetExitEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Validator) GetExitEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.ExitEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } -func (x *Validator) GetWithdrawableEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Validator) GetWithdrawableEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.WithdrawableEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } type Deposit_Data struct { @@ -932,13 +932,13 @@ var file_proto_prysm_v1alpha1_beacon_core_types_proto_rawDesc = []byte{ 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, @@ -1006,13 +1006,13 @@ var file_proto_prysm_v1alpha1_beacon_core_types_proto_rawDesc = []byte{ 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x77, 0x0a, 0x0f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, @@ -1062,7 +1062,7 @@ var file_proto_prysm_v1alpha1_beacon_core_types_proto_rawDesc = []byte{ 0x69, 0x6f, 0x6e, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x22, 0x68, 0x0a, 0x0d, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, @@ -1096,7 +1096,7 @@ var file_proto_prysm_v1alpha1_beacon_core_types_proto_rawDesc = []byte{ 0x65, 0x6c, 0x69, 0x67, 0x69, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x1a, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6c, 0x69, 0x67, 0x69, 0x62, @@ -1104,21 +1104,21 @@ var file_proto_prysm_v1alpha1_beacon_core_types_proto_rawDesc = []byte{ 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x0f, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x64, 0x0a, 0x0a, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x09, 0x65, 0x78, 0x69, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x74, 0x0a, 0x12, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x42, 0x9e, 0x01, 0x0a, 0x19, 0x6f, 0x72, 0x67, @@ -1126,7 +1126,7 @@ var file_proto_prysm_v1alpha1_beacon_core_types_proto_rawDesc = []byte{ 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x14, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x43, 0x6f, 0x72, 0x65, 0x54, 0x79, 0x70, 0x65, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, diff --git a/proto/prysm/v1alpha1/beacon_core_types.proto b/proto/prysm/v1alpha1/beacon_core_types.proto index 5f55e5f472..8e9ff04385 100644 --- a/proto/prysm/v1alpha1/beacon_core_types.proto +++ b/proto/prysm/v1alpha1/beacon_core_types.proto @@ -8,7 +8,7 @@ import "proto/eth/ext/options.proto"; import "proto/prysm/v1alpha1/attestation.proto"; option csharp_namespace = "Ethereum.Eth.v1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "BeaconCoreTypesProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -22,12 +22,12 @@ message BeaconBlockHeader { // Beacon chain slot that this block represents. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator index of the validator that proposed the block header. uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // 32 byte merkle tree root of the parent ssz encoded block. @@ -116,13 +116,13 @@ message VoluntaryExit { // The epoch on when exit request becomes valid. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Index of the exiting validator. uint64 validator_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } @@ -162,7 +162,7 @@ message Fork { bytes current_version = 2 [ (ethereum.eth.ext.ssz_size) = "4" ]; uint64 epoch = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; } @@ -203,7 +203,7 @@ message Validator { // activated. uint64 activation_eligibility_epoch = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Epoch when the validator was activated. This field may be zero if the @@ -211,7 +211,7 @@ message Validator { // FAR_FUTURE_EPOCH if the validator has not been activated. uint64 activation_epoch = 6 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Epoch when the validator was exited. This field is FAR_FUTURE_EPOCH if @@ -221,7 +221,7 @@ message Validator { // https://github.com/ethereum/consensus-specs/blob/v0.9.2/specs/core/0_beacon-chain.md#constants uint64 exit_epoch = 7 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Epoch when the validator is eligible to withdraw their funds. This field @@ -231,6 +231,6 @@ message Validator { // https://github.com/ethereum/consensus-specs/blob/v0.9.2/specs/core/0_beacon-chain.md#constants uint64 withdrawable_epoch = 8 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; } diff --git a/proto/prysm/v1alpha1/beacon_state.pb.go b/proto/prysm/v1alpha1/beacon_state.pb.go index 51274ecf97..32ed07e2c7 100755 --- a/proto/prysm/v1alpha1/beacon_state.pb.go +++ b/proto/prysm/v1alpha1/beacon_state.pb.go @@ -11,9 +11,9 @@ import ( sync "sync" github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -29,7 +29,7 @@ type BeaconState struct { state protoimpl.MessageState `protogen:"open.v1"` GenesisTime uint64 `protobuf:"varint,1001,opt,name=genesis_time,json=genesisTime,proto3" json:"genesis_time,omitempty"` GenesisValidatorsRoot []byte `protobuf:"bytes,1002,opt,name=genesis_validators_root,json=genesisValidatorsRoot,proto3" json:"genesis_validators_root,omitempty" ssz-size:"32"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Fork *Fork `protobuf:"bytes,1004,opt,name=fork,proto3" json:"fork,omitempty"` LatestBlockHeader *BeaconBlockHeader `protobuf:"bytes,2001,opt,name=latest_block_header,json=latestBlockHeader,proto3" json:"latest_block_header,omitempty"` BlockRoots [][]byte `protobuf:"bytes,2002,rep,name=block_roots,json=blockRoots,proto3" json:"block_roots,omitempty" ssz-size:"8192,32"` @@ -96,11 +96,11 @@ func (x *BeaconState) GetGenesisValidatorsRoot() []byte { return nil } -func (x *BeaconState) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconState) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BeaconState) GetFork() *Fork { @@ -233,8 +233,8 @@ type PendingAttestation struct { state protoimpl.MessageState `protogen:"open.v1"` AggregationBits github_com_OffchainLabs_go_bitfield.Bitlist `protobuf:"bytes,1,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitlist" ssz-max:"2048"` Data *AttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` - InclusionDelay github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=inclusion_delay,json=inclusionDelay,proto3" json:"inclusion_delay,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,4,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + InclusionDelay github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=inclusion_delay,json=inclusionDelay,proto3" json:"inclusion_delay,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,4,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -283,18 +283,18 @@ func (x *PendingAttestation) GetData() *AttestationData { return nil } -func (x *PendingAttestation) GetInclusionDelay() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *PendingAttestation) GetInclusionDelay() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.InclusionDelay } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *PendingAttestation) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *PendingAttestation) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } type HistoricalBatch struct { @@ -351,7 +351,7 @@ func (x *HistoricalBatch) GetStateRoots() [][]byte { type StateSummary struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Root []byte `protobuf:"bytes,2,opt,name=root,proto3" json:"root,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -387,11 +387,11 @@ func (*StateSummary) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_state_proto_rawDescGZIP(), []int{3} } -func (x *StateSummary) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *StateSummary) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *StateSummary) GetRoot() []byte { @@ -705,7 +705,7 @@ type BeaconStateAltair struct { state protoimpl.MessageState `protogen:"open.v1"` GenesisTime uint64 `protobuf:"varint,1001,opt,name=genesis_time,json=genesisTime,proto3" json:"genesis_time,omitempty"` GenesisValidatorsRoot []byte `protobuf:"bytes,1002,opt,name=genesis_validators_root,json=genesisValidatorsRoot,proto3" json:"genesis_validators_root,omitempty" ssz-size:"32"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Fork *Fork `protobuf:"bytes,1004,opt,name=fork,proto3" json:"fork,omitempty"` LatestBlockHeader *BeaconBlockHeader `protobuf:"bytes,2001,opt,name=latest_block_header,json=latestBlockHeader,proto3" json:"latest_block_header,omitempty"` BlockRoots [][]byte `protobuf:"bytes,2002,rep,name=block_roots,json=blockRoots,proto3" json:"block_roots,omitempty" ssz-size:"8192,32"` @@ -775,11 +775,11 @@ func (x *BeaconStateAltair) GetGenesisValidatorsRoot() []byte { return nil } -func (x *BeaconStateAltair) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconStateAltair) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BeaconStateAltair) GetFork() *Fork { @@ -931,7 +931,7 @@ func (x *BeaconStateAltair) GetNextSyncCommittee() *SyncCommittee { type SyncAggregatorSelectionData struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` SubcommitteeIndex uint64 `protobuf:"varint,2,opt,name=subcommittee_index,json=subcommitteeIndex,proto3" json:"subcommittee_index,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -967,11 +967,11 @@ func (*SyncAggregatorSelectionData) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_beacon_state_proto_rawDescGZIP(), []int{10} } -func (x *SyncAggregatorSelectionData) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *SyncAggregatorSelectionData) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *SyncAggregatorSelectionData) GetSubcommitteeIndex() uint64 { @@ -985,7 +985,7 @@ type BeaconStateBellatrix struct { state protoimpl.MessageState `protogen:"open.v1"` GenesisTime uint64 `protobuf:"varint,1001,opt,name=genesis_time,json=genesisTime,proto3" json:"genesis_time,omitempty"` GenesisValidatorsRoot []byte `protobuf:"bytes,1002,opt,name=genesis_validators_root,json=genesisValidatorsRoot,proto3" json:"genesis_validators_root,omitempty" ssz-size:"32"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Fork *Fork `protobuf:"bytes,1004,opt,name=fork,proto3" json:"fork,omitempty"` LatestBlockHeader *BeaconBlockHeader `protobuf:"bytes,2001,opt,name=latest_block_header,json=latestBlockHeader,proto3" json:"latest_block_header,omitempty"` BlockRoots [][]byte `protobuf:"bytes,2002,rep,name=block_roots,json=blockRoots,proto3" json:"block_roots,omitempty" ssz-size:"8192,32"` @@ -1056,11 +1056,11 @@ func (x *BeaconStateBellatrix) GetGenesisValidatorsRoot() []byte { return nil } -func (x *BeaconStateBellatrix) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconStateBellatrix) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BeaconStateBellatrix) GetFork() *Fork { @@ -1221,7 +1221,7 @@ type BeaconStateCapella struct { state protoimpl.MessageState `protogen:"open.v1"` GenesisTime uint64 `protobuf:"varint,1001,opt,name=genesis_time,json=genesisTime,proto3" json:"genesis_time,omitempty"` GenesisValidatorsRoot []byte `protobuf:"bytes,1002,opt,name=genesis_validators_root,json=genesisValidatorsRoot,proto3" json:"genesis_validators_root,omitempty" ssz-size:"32"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Fork *Fork `protobuf:"bytes,1004,opt,name=fork,proto3" json:"fork,omitempty"` LatestBlockHeader *BeaconBlockHeader `protobuf:"bytes,2001,opt,name=latest_block_header,json=latestBlockHeader,proto3" json:"latest_block_header,omitempty"` BlockRoots [][]byte `protobuf:"bytes,2002,rep,name=block_roots,json=blockRoots,proto3" json:"block_roots,omitempty" ssz-size:"8192,32"` @@ -1245,7 +1245,7 @@ type BeaconStateCapella struct { NextSyncCommittee *SyncCommittee `protobuf:"bytes,9003,opt,name=next_sync_committee,json=nextSyncCommittee,proto3" json:"next_sync_committee,omitempty"` LatestExecutionPayloadHeader *v1.ExecutionPayloadHeaderCapella `protobuf:"bytes,10001,opt,name=latest_execution_payload_header,json=latestExecutionPayloadHeader,proto3" json:"latest_execution_payload_header,omitempty"` NextWithdrawalIndex uint64 `protobuf:"varint,11001,opt,name=next_withdrawal_index,json=nextWithdrawalIndex,proto3" json:"next_withdrawal_index,omitempty"` - NextWithdrawalValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,11002,opt,name=next_withdrawal_validator_index,json=nextWithdrawalValidatorIndex,proto3" json:"next_withdrawal_validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + NextWithdrawalValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,11002,opt,name=next_withdrawal_validator_index,json=nextWithdrawalValidatorIndex,proto3" json:"next_withdrawal_validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` HistoricalSummaries []*HistoricalSummary `protobuf:"bytes,11003,rep,name=historical_summaries,json=historicalSummaries,proto3" json:"historical_summaries,omitempty" ssz-max:"16777216"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -1295,11 +1295,11 @@ func (x *BeaconStateCapella) GetGenesisValidatorsRoot() []byte { return nil } -func (x *BeaconStateCapella) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconStateCapella) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BeaconStateCapella) GetFork() *Fork { @@ -1463,11 +1463,11 @@ func (x *BeaconStateCapella) GetNextWithdrawalIndex() uint64 { return 0 } -func (x *BeaconStateCapella) GetNextWithdrawalValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconStateCapella) GetNextWithdrawalValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.NextWithdrawalValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconStateCapella) GetHistoricalSummaries() []*HistoricalSummary { @@ -1481,7 +1481,7 @@ type BeaconStateDeneb struct { state protoimpl.MessageState `protogen:"open.v1"` GenesisTime uint64 `protobuf:"varint,1001,opt,name=genesis_time,json=genesisTime,proto3" json:"genesis_time,omitempty"` GenesisValidatorsRoot []byte `protobuf:"bytes,1002,opt,name=genesis_validators_root,json=genesisValidatorsRoot,proto3" json:"genesis_validators_root,omitempty" ssz-size:"32"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Fork *Fork `protobuf:"bytes,1004,opt,name=fork,proto3" json:"fork,omitempty"` LatestBlockHeader *BeaconBlockHeader `protobuf:"bytes,2001,opt,name=latest_block_header,json=latestBlockHeader,proto3" json:"latest_block_header,omitempty"` BlockRoots [][]byte `protobuf:"bytes,2002,rep,name=block_roots,json=blockRoots,proto3" json:"block_roots,omitempty" ssz-size:"8192,32"` @@ -1505,7 +1505,7 @@ type BeaconStateDeneb struct { NextSyncCommittee *SyncCommittee `protobuf:"bytes,9003,opt,name=next_sync_committee,json=nextSyncCommittee,proto3" json:"next_sync_committee,omitempty"` LatestExecutionPayloadHeader *v1.ExecutionPayloadHeaderDeneb `protobuf:"bytes,10001,opt,name=latest_execution_payload_header,json=latestExecutionPayloadHeader,proto3" json:"latest_execution_payload_header,omitempty"` NextWithdrawalIndex uint64 `protobuf:"varint,11001,opt,name=next_withdrawal_index,json=nextWithdrawalIndex,proto3" json:"next_withdrawal_index,omitempty"` - NextWithdrawalValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,11002,opt,name=next_withdrawal_validator_index,json=nextWithdrawalValidatorIndex,proto3" json:"next_withdrawal_validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + NextWithdrawalValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,11002,opt,name=next_withdrawal_validator_index,json=nextWithdrawalValidatorIndex,proto3" json:"next_withdrawal_validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` HistoricalSummaries []*HistoricalSummary `protobuf:"bytes,11003,rep,name=historical_summaries,json=historicalSummaries,proto3" json:"historical_summaries,omitempty" ssz-max:"16777216"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -1555,11 +1555,11 @@ func (x *BeaconStateDeneb) GetGenesisValidatorsRoot() []byte { return nil } -func (x *BeaconStateDeneb) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconStateDeneb) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BeaconStateDeneb) GetFork() *Fork { @@ -1723,11 +1723,11 @@ func (x *BeaconStateDeneb) GetNextWithdrawalIndex() uint64 { return 0 } -func (x *BeaconStateDeneb) GetNextWithdrawalValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconStateDeneb) GetNextWithdrawalValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.NextWithdrawalValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconStateDeneb) GetHistoricalSummaries() []*HistoricalSummary { @@ -1741,7 +1741,7 @@ type BeaconStateElectra struct { state protoimpl.MessageState `protogen:"open.v1"` GenesisTime uint64 `protobuf:"varint,1001,opt,name=genesis_time,json=genesisTime,proto3" json:"genesis_time,omitempty"` GenesisValidatorsRoot []byte `protobuf:"bytes,1002,opt,name=genesis_validators_root,json=genesisValidatorsRoot,proto3" json:"genesis_validators_root,omitempty" ssz-size:"32"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Fork *Fork `protobuf:"bytes,1004,opt,name=fork,proto3" json:"fork,omitempty"` LatestBlockHeader *BeaconBlockHeader `protobuf:"bytes,2001,opt,name=latest_block_header,json=latestBlockHeader,proto3" json:"latest_block_header,omitempty"` BlockRoots [][]byte `protobuf:"bytes,2002,rep,name=block_roots,json=blockRoots,proto3" json:"block_roots,omitempty" ssz-size:"8192,32"` @@ -1765,14 +1765,14 @@ type BeaconStateElectra struct { NextSyncCommittee *SyncCommittee `protobuf:"bytes,9003,opt,name=next_sync_committee,json=nextSyncCommittee,proto3" json:"next_sync_committee,omitempty"` LatestExecutionPayloadHeader *v1.ExecutionPayloadHeaderDeneb `protobuf:"bytes,10001,opt,name=latest_execution_payload_header,json=latestExecutionPayloadHeader,proto3" json:"latest_execution_payload_header,omitempty"` NextWithdrawalIndex uint64 `protobuf:"varint,11001,opt,name=next_withdrawal_index,json=nextWithdrawalIndex,proto3" json:"next_withdrawal_index,omitempty"` - NextWithdrawalValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,11002,opt,name=next_withdrawal_validator_index,json=nextWithdrawalValidatorIndex,proto3" json:"next_withdrawal_validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + NextWithdrawalValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,11002,opt,name=next_withdrawal_validator_index,json=nextWithdrawalValidatorIndex,proto3" json:"next_withdrawal_validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` HistoricalSummaries []*HistoricalSummary `protobuf:"bytes,11003,rep,name=historical_summaries,json=historicalSummaries,proto3" json:"historical_summaries,omitempty" ssz-max:"16777216"` DepositRequestsStartIndex uint64 `protobuf:"varint,12001,opt,name=deposit_requests_start_index,json=depositRequestsStartIndex,proto3" json:"deposit_requests_start_index,omitempty"` - DepositBalanceToConsume github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,12002,opt,name=deposit_balance_to_consume,json=depositBalanceToConsume,proto3" json:"deposit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` - ExitBalanceToConsume github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,12003,opt,name=exit_balance_to_consume,json=exitBalanceToConsume,proto3" json:"exit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` - EarliestExitEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,12004,opt,name=earliest_exit_epoch,json=earliestExitEpoch,proto3" json:"earliest_exit_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` - ConsolidationBalanceToConsume github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,12005,opt,name=consolidation_balance_to_consume,json=consolidationBalanceToConsume,proto3" json:"consolidation_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` - EarliestConsolidationEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,12006,opt,name=earliest_consolidation_epoch,json=earliestConsolidationEpoch,proto3" json:"earliest_consolidation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + DepositBalanceToConsume github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei `protobuf:"varint,12002,opt,name=deposit_balance_to_consume,json=depositBalanceToConsume,proto3" json:"deposit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei"` + ExitBalanceToConsume github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei `protobuf:"varint,12003,opt,name=exit_balance_to_consume,json=exitBalanceToConsume,proto3" json:"exit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei"` + EarliestExitEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,12004,opt,name=earliest_exit_epoch,json=earliestExitEpoch,proto3" json:"earliest_exit_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` + ConsolidationBalanceToConsume github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei `protobuf:"varint,12005,opt,name=consolidation_balance_to_consume,json=consolidationBalanceToConsume,proto3" json:"consolidation_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei"` + EarliestConsolidationEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,12006,opt,name=earliest_consolidation_epoch,json=earliestConsolidationEpoch,proto3" json:"earliest_consolidation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` PendingDeposits []*PendingDeposit `protobuf:"bytes,12007,rep,name=pending_deposits,json=pendingDeposits,proto3" json:"pending_deposits,omitempty" ssz-max:"134217728"` PendingPartialWithdrawals []*PendingPartialWithdrawal `protobuf:"bytes,12008,rep,name=pending_partial_withdrawals,json=pendingPartialWithdrawals,proto3" json:"pending_partial_withdrawals,omitempty" ssz-max:"134217728"` PendingConsolidations []*PendingConsolidation `protobuf:"bytes,12009,rep,name=pending_consolidations,json=pendingConsolidations,proto3" json:"pending_consolidations,omitempty" ssz-max:"262144"` @@ -1824,11 +1824,11 @@ func (x *BeaconStateElectra) GetGenesisValidatorsRoot() []byte { return nil } -func (x *BeaconStateElectra) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconStateElectra) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BeaconStateElectra) GetFork() *Fork { @@ -1992,11 +1992,11 @@ func (x *BeaconStateElectra) GetNextWithdrawalIndex() uint64 { return 0 } -func (x *BeaconStateElectra) GetNextWithdrawalValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconStateElectra) GetNextWithdrawalValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.NextWithdrawalValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconStateElectra) GetHistoricalSummaries() []*HistoricalSummary { @@ -2013,39 +2013,39 @@ func (x *BeaconStateElectra) GetDepositRequestsStartIndex() uint64 { return 0 } -func (x *BeaconStateElectra) GetDepositBalanceToConsume() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { +func (x *BeaconStateElectra) GetDepositBalanceToConsume() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei { if x != nil { return x.DepositBalanceToConsume } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(0) } -func (x *BeaconStateElectra) GetExitBalanceToConsume() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { +func (x *BeaconStateElectra) GetExitBalanceToConsume() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei { if x != nil { return x.ExitBalanceToConsume } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(0) } -func (x *BeaconStateElectra) GetEarliestExitEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *BeaconStateElectra) GetEarliestExitEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.EarliestExitEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } -func (x *BeaconStateElectra) GetConsolidationBalanceToConsume() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { +func (x *BeaconStateElectra) GetConsolidationBalanceToConsume() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei { if x != nil { return x.ConsolidationBalanceToConsume } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(0) } -func (x *BeaconStateElectra) GetEarliestConsolidationEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *BeaconStateElectra) GetEarliestConsolidationEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.EarliestConsolidationEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *BeaconStateElectra) GetPendingDeposits() []*PendingDeposit { @@ -2073,7 +2073,7 @@ type BeaconStateFulu struct { state protoimpl.MessageState `protogen:"open.v1"` GenesisTime uint64 `protobuf:"varint,1001,opt,name=genesis_time,json=genesisTime,proto3" json:"genesis_time,omitempty"` GenesisValidatorsRoot []byte `protobuf:"bytes,1002,opt,name=genesis_validators_root,json=genesisValidatorsRoot,proto3" json:"genesis_validators_root,omitempty" ssz-size:"32"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Fork *Fork `protobuf:"bytes,1004,opt,name=fork,proto3" json:"fork,omitempty"` LatestBlockHeader *BeaconBlockHeader `protobuf:"bytes,2001,opt,name=latest_block_header,json=latestBlockHeader,proto3" json:"latest_block_header,omitempty"` BlockRoots [][]byte `protobuf:"bytes,2002,rep,name=block_roots,json=blockRoots,proto3" json:"block_roots,omitempty" ssz-size:"8192,32"` @@ -2097,14 +2097,14 @@ type BeaconStateFulu struct { NextSyncCommittee *SyncCommittee `protobuf:"bytes,9003,opt,name=next_sync_committee,json=nextSyncCommittee,proto3" json:"next_sync_committee,omitempty"` LatestExecutionPayloadHeader *v1.ExecutionPayloadHeaderDeneb `protobuf:"bytes,10001,opt,name=latest_execution_payload_header,json=latestExecutionPayloadHeader,proto3" json:"latest_execution_payload_header,omitempty"` NextWithdrawalIndex uint64 `protobuf:"varint,11001,opt,name=next_withdrawal_index,json=nextWithdrawalIndex,proto3" json:"next_withdrawal_index,omitempty"` - NextWithdrawalValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,11002,opt,name=next_withdrawal_validator_index,json=nextWithdrawalValidatorIndex,proto3" json:"next_withdrawal_validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + NextWithdrawalValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,11002,opt,name=next_withdrawal_validator_index,json=nextWithdrawalValidatorIndex,proto3" json:"next_withdrawal_validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` HistoricalSummaries []*HistoricalSummary `protobuf:"bytes,11003,rep,name=historical_summaries,json=historicalSummaries,proto3" json:"historical_summaries,omitempty" ssz-max:"16777216"` DepositRequestsStartIndex uint64 `protobuf:"varint,12001,opt,name=deposit_requests_start_index,json=depositRequestsStartIndex,proto3" json:"deposit_requests_start_index,omitempty"` - DepositBalanceToConsume github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,12002,opt,name=deposit_balance_to_consume,json=depositBalanceToConsume,proto3" json:"deposit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` - ExitBalanceToConsume github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,12003,opt,name=exit_balance_to_consume,json=exitBalanceToConsume,proto3" json:"exit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` - EarliestExitEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,12004,opt,name=earliest_exit_epoch,json=earliestExitEpoch,proto3" json:"earliest_exit_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` - ConsolidationBalanceToConsume github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,12005,opt,name=consolidation_balance_to_consume,json=consolidationBalanceToConsume,proto3" json:"consolidation_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` - EarliestConsolidationEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,12006,opt,name=earliest_consolidation_epoch,json=earliestConsolidationEpoch,proto3" json:"earliest_consolidation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + DepositBalanceToConsume github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei `protobuf:"varint,12002,opt,name=deposit_balance_to_consume,json=depositBalanceToConsume,proto3" json:"deposit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei"` + ExitBalanceToConsume github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei `protobuf:"varint,12003,opt,name=exit_balance_to_consume,json=exitBalanceToConsume,proto3" json:"exit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei"` + EarliestExitEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,12004,opt,name=earliest_exit_epoch,json=earliestExitEpoch,proto3" json:"earliest_exit_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` + ConsolidationBalanceToConsume github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei `protobuf:"varint,12005,opt,name=consolidation_balance_to_consume,json=consolidationBalanceToConsume,proto3" json:"consolidation_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei"` + EarliestConsolidationEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,12006,opt,name=earliest_consolidation_epoch,json=earliestConsolidationEpoch,proto3" json:"earliest_consolidation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` PendingDeposits []*PendingDeposit `protobuf:"bytes,12007,rep,name=pending_deposits,json=pendingDeposits,proto3" json:"pending_deposits,omitempty" ssz-max:"134217728"` PendingPartialWithdrawals []*PendingPartialWithdrawal `protobuf:"bytes,12008,rep,name=pending_partial_withdrawals,json=pendingPartialWithdrawals,proto3" json:"pending_partial_withdrawals,omitempty" ssz-max:"134217728"` PendingConsolidations []*PendingConsolidation `protobuf:"bytes,12009,rep,name=pending_consolidations,json=pendingConsolidations,proto3" json:"pending_consolidations,omitempty" ssz-max:"262144"` @@ -2157,11 +2157,11 @@ func (x *BeaconStateFulu) GetGenesisValidatorsRoot() []byte { return nil } -func (x *BeaconStateFulu) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconStateFulu) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BeaconStateFulu) GetFork() *Fork { @@ -2325,11 +2325,11 @@ func (x *BeaconStateFulu) GetNextWithdrawalIndex() uint64 { return 0 } -func (x *BeaconStateFulu) GetNextWithdrawalValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconStateFulu) GetNextWithdrawalValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.NextWithdrawalValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconStateFulu) GetHistoricalSummaries() []*HistoricalSummary { @@ -2346,39 +2346,39 @@ func (x *BeaconStateFulu) GetDepositRequestsStartIndex() uint64 { return 0 } -func (x *BeaconStateFulu) GetDepositBalanceToConsume() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { +func (x *BeaconStateFulu) GetDepositBalanceToConsume() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei { if x != nil { return x.DepositBalanceToConsume } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(0) } -func (x *BeaconStateFulu) GetExitBalanceToConsume() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { +func (x *BeaconStateFulu) GetExitBalanceToConsume() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei { if x != nil { return x.ExitBalanceToConsume } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(0) } -func (x *BeaconStateFulu) GetEarliestExitEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *BeaconStateFulu) GetEarliestExitEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.EarliestExitEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } -func (x *BeaconStateFulu) GetConsolidationBalanceToConsume() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { +func (x *BeaconStateFulu) GetConsolidationBalanceToConsume() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei { if x != nil { return x.ConsolidationBalanceToConsume } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(0) } -func (x *BeaconStateFulu) GetEarliestConsolidationEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *BeaconStateFulu) GetEarliestConsolidationEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.EarliestConsolidationEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *BeaconStateFulu) GetPendingDeposits() []*PendingDeposit { @@ -2438,7 +2438,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, @@ -2540,14 +2540,14 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x6d, 0x0a, 0x0f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x64, 0x65, 0x6c, 0x61, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0e, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x44, 0x65, 0x6c, 0x61, 0x79, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, @@ -2562,7 +2562,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x6d, 0x61, 0x72, 0x79, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x72, 0x6f, @@ -2620,7 +2620,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x72, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, @@ -2724,7 +2724,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x2d, 0x0a, 0x12, 0x73, 0x75, 0x62, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, @@ -2740,7 +2740,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, @@ -2857,7 +2857,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, @@ -2971,7 +2971,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xfa, 0x55, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x1c, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, @@ -2993,7 +2993,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, @@ -3107,7 +3107,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xfa, 0x55, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x1c, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, @@ -3129,7 +3129,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, @@ -3244,7 +3244,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xfa, 0x55, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x1c, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x56, 0x61, 0x6c, 0x69, @@ -3264,7 +3264,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe2, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x17, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x7c, 0x0a, @@ -3272,14 +3272,14 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe3, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x14, 0x65, 0x78, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x76, 0x0a, 0x13, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe4, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x45, 0x78, 0x69, 0x74, 0x45, 0x70, @@ -3288,7 +3288,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe5, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x1d, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, @@ -3297,7 +3297,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe6, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x1a, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, @@ -3333,7 +3333,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, @@ -3447,7 +3447,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xfa, 0x55, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x1c, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x56, 0x61, 0x6c, @@ -3467,7 +3467,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe2, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x17, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x7c, @@ -3475,14 +3475,14 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe3, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x14, 0x65, 0x78, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x76, 0x0a, 0x13, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe4, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x45, 0x78, 0x69, 0x74, 0x45, @@ -3491,7 +3491,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe5, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x1d, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, @@ -3500,7 +3500,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe6, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x1a, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, @@ -3533,7 +3533,7 @@ var file_proto_prysm_v1alpha1_beacon_state_proto_rawDesc = []byte{ 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x10, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0x41, 0x6c, 0x70, 0x68, 0x61, diff --git a/proto/prysm/v1alpha1/beacon_state.proto b/proto/prysm/v1alpha1/beacon_state.proto index 59ea245fc5..0e8b4f15d7 100644 --- a/proto/prysm/v1alpha1/beacon_state.proto +++ b/proto/prysm/v1alpha1/beacon_state.proto @@ -9,7 +9,7 @@ import "proto/eth/ext/options.proto"; import "proto/prysm/v1alpha1/eip_7251.proto"; option csharp_namespace = "Ethereum.Eth.V1Alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "BeaconStateProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -25,7 +25,7 @@ message BeaconState { bytes genesis_validators_root = 1002 [ (ethereum.eth.ext.ssz_size) = "32" ]; uint64 slot = 1003 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; Fork fork = 1004; @@ -90,11 +90,11 @@ message PendingAttestation { // The difference of when attestation gets created and get included on chain. uint64 inclusion_delay = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // The proposer who included the attestation in the block. uint64 proposer_index = 4 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; } @@ -113,7 +113,7 @@ message StateSummary { // The slot of the state. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // The block root of the state. bytes root = 2; @@ -188,7 +188,7 @@ message BeaconStateAltair { bytes genesis_validators_root = 1002 [ (ethereum.eth.ext.ssz_size) = "32" ]; uint64 slot = 1003 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; Fork fork = 1004; @@ -255,7 +255,7 @@ message SyncAggregatorSelectionData { // Slot of this signing data. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Subcommittee index of this signing data. uint64 subcommittee_index = 2; @@ -271,7 +271,7 @@ message BeaconStateBellatrix { bytes genesis_validators_root = 1002 [ (ethereum.eth.ext.ssz_size) = "32" ]; uint64 slot = 1003 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; Fork fork = 1004; @@ -344,7 +344,7 @@ message BeaconStateCapella { bytes genesis_validators_root = 1002 [ (ethereum.eth.ext.ssz_size) = "32" ]; uint64 slot = 1003 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; Fork fork = 1004; @@ -410,7 +410,7 @@ message BeaconStateCapella { uint64 next_withdrawal_index = 11001; // [New in Capella] uint64 next_withdrawal_validator_index = 11002 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // [New in Capella] repeated HistoricalSummary historical_summaries = 11003 [ (ethereum.eth.ext.ssz_max) = "16777216" ]; // [New in Capella] @@ -426,7 +426,7 @@ message BeaconStateDeneb { bytes genesis_validators_root = 1002 [ (ethereum.eth.ext.ssz_size) = "32" ]; uint64 slot = 1003 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; Fork fork = 1004; @@ -492,7 +492,7 @@ message BeaconStateDeneb { uint64 next_withdrawal_index = 11001; uint64 next_withdrawal_validator_index = 11002 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; repeated HistoricalSummary historical_summaries = 11003 [ (ethereum.eth.ext.ssz_max) = "16777216" ]; @@ -508,7 +508,7 @@ message BeaconStateElectra { bytes genesis_validators_root = 1002 [ (ethereum.eth.ext.ssz_size) = "32" ]; uint64 slot = 1003 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; Fork fork = 1004; @@ -574,7 +574,7 @@ message BeaconStateElectra { uint64 next_withdrawal_index = 11001; uint64 next_withdrawal_validator_index = 11002 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; repeated HistoricalSummary historical_summaries = 11003 [ (ethereum.eth.ext.ssz_max) = "16777216" ]; @@ -583,23 +583,23 @@ message BeaconStateElectra { uint64 deposit_requests_start_index = 12001; uint64 deposit_balance_to_consume = 12002 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei" ]; uint64 exit_balance_to_consume = 12003 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei" ]; uint64 earliest_exit_epoch = 12004 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; uint64 consolidation_balance_to_consume = 12005 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei" ]; uint64 earliest_consolidation_epoch = 12006 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; repeated PendingDeposit pending_deposits = 12007 [ (ethereum.eth.ext.ssz_max) = "pending_deposits_limit" ]; @@ -619,7 +619,7 @@ message BeaconStateFulu { bytes genesis_validators_root = 1002 [ (ethereum.eth.ext.ssz_size) = "32" ]; uint64 slot = 1003 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; Fork fork = 1004; @@ -685,7 +685,7 @@ message BeaconStateFulu { uint64 next_withdrawal_index = 11001; uint64 next_withdrawal_validator_index = 11002 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; repeated HistoricalSummary historical_summaries = 11003 [ (ethereum.eth.ext.ssz_max) = "16777216" ]; @@ -694,23 +694,23 @@ message BeaconStateFulu { uint64 deposit_requests_start_index = 12001; uint64 deposit_balance_to_consume = 12002 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei" ]; uint64 exit_balance_to_consume = 12003 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei" ]; uint64 earliest_exit_epoch = 12004 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; uint64 consolidation_balance_to_consume = 12005 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei" ]; uint64 earliest_consolidation_epoch = 12006 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; repeated PendingDeposit pending_deposits = 12007 [ (ethereum.eth.ext.ssz_max) = "pending_deposits_limit" ]; diff --git a/proto/prysm/v1alpha1/bellatrix.ssz.go b/proto/prysm/v1alpha1/bellatrix.ssz.go index ea015fb454..2828b9ea16 100644 --- a/proto/prysm/v1alpha1/bellatrix.ssz.go +++ b/proto/prysm/v1alpha1/bellatrix.ssz.go @@ -2,8 +2,8 @@ package eth import ( - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" ssz "github.com/prysmaticlabs/fastssz" ) @@ -173,10 +173,10 @@ func (b *BeaconBlockBellatrix) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -919,10 +919,10 @@ func (b *BlindedBeaconBlockBellatrix) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -1775,7 +1775,7 @@ func (b *BeaconStateBellatrix) UnmarshalSSZ(buf []byte) error { b.GenesisValidatorsRoot = append(b.GenesisValidatorsRoot, buf[8:40]...) // Field (2) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) // Field (3) 'Fork' if b.Fork == nil { diff --git a/proto/prysm/v1alpha1/blobs.pb.go b/proto/prysm/v1alpha1/blobs.pb.go index f2cff3d841..c97b109f79 100755 --- a/proto/prysm/v1alpha1/blobs.pb.go +++ b/proto/prysm/v1alpha1/blobs.pb.go @@ -10,8 +10,8 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -123,9 +123,9 @@ type BlindedBlobSidecar struct { state protoimpl.MessageState `protogen:"open.v1"` BlockRoot []byte `protobuf:"bytes,1,opt,name=block_root,json=blockRoot,proto3" json:"block_root,omitempty" ssz-size:"32"` Index uint64 `protobuf:"varint,2,opt,name=index,proto3" json:"index,omitempty"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` BlockParentRoot []byte `protobuf:"bytes,4,opt,name=block_parent_root,json=blockParentRoot,proto3" json:"block_parent_root,omitempty" ssz-size:"32"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,5,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,5,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` BlobRoot []byte `protobuf:"bytes,6,opt,name=blob_root,json=blobRoot,proto3" json:"blob_root,omitempty" ssz-size:"32"` KzgCommitment []byte `protobuf:"bytes,7,opt,name=kzg_commitment,json=kzgCommitment,proto3" json:"kzg_commitment,omitempty" ssz-size:"48"` KzgProof []byte `protobuf:"bytes,8,opt,name=kzg_proof,json=kzgProof,proto3" json:"kzg_proof,omitempty" ssz-size:"48"` @@ -177,11 +177,11 @@ func (x *BlindedBlobSidecar) GetIndex() uint64 { return 0 } -func (x *BlindedBlobSidecar) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BlindedBlobSidecar) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BlindedBlobSidecar) GetBlockParentRoot() []byte { @@ -191,11 +191,11 @@ func (x *BlindedBlobSidecar) GetBlockParentRoot() []byte { return nil } -func (x *BlindedBlobSidecar) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BlindedBlobSidecar) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BlindedBlobSidecar) GetBlobRoot() []byte { @@ -302,7 +302,7 @@ var file_proto_prysm_v1alpha1_blobs_proto_rawDesc = []byte{ 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x32, 0x0a, 0x11, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, @@ -312,7 +312,7 @@ var file_proto_prysm_v1alpha1_blobs_proto_rawDesc = []byte{ 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x23, 0x0a, 0x09, @@ -333,7 +333,7 @@ var file_proto_prysm_v1alpha1_blobs_proto_rawDesc = []byte{ 0x61, 0x31, 0x42, 0x0a, 0x42, 0x6c, 0x6f, 0x62, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, + 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, diff --git a/proto/prysm/v1alpha1/blobs.proto b/proto/prysm/v1alpha1/blobs.proto index ac75b6f57e..144936b6e8 100644 --- a/proto/prysm/v1alpha1/blobs.proto +++ b/proto/prysm/v1alpha1/blobs.proto @@ -18,7 +18,7 @@ package ethereum.eth.v1alpha1; import "proto/eth/ext/options.proto"; option csharp_namespace = "Ethereum.Eth.v1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "BlobsProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -39,11 +39,11 @@ message BlindedBlobSidecar { uint64 index = 2; uint64 slot = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; bytes block_parent_root = 4 [ (ethereum.eth.ext.ssz_size) = "32" ]; uint64 proposer_index = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; bytes blob_root = 6 [ (ethereum.eth.ext.ssz_size) = "32" ]; bytes kzg_commitment = 7 [ (ethereum.eth.ext.ssz_size) = "48" ]; diff --git a/proto/prysm/v1alpha1/capella.ssz.go b/proto/prysm/v1alpha1/capella.ssz.go index 96667eb275..bf639150cf 100644 --- a/proto/prysm/v1alpha1/capella.ssz.go +++ b/proto/prysm/v1alpha1/capella.ssz.go @@ -2,8 +2,8 @@ package eth import ( - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" ssz "github.com/prysmaticlabs/fastssz" ) @@ -173,10 +173,10 @@ func (b *BeaconBlockCapella) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -976,10 +976,10 @@ func (b *BlindedBeaconBlockCapella) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -2230,7 +2230,7 @@ func (b *BeaconStateCapella) UnmarshalSSZ(buf []byte) error { b.GenesisValidatorsRoot = append(b.GenesisValidatorsRoot, buf[8:40]...) // Field (2) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) // Field (3) 'Fork' if b.Fork == nil { @@ -2386,7 +2386,7 @@ func (b *BeaconStateCapella) UnmarshalSSZ(buf []byte) error { b.NextWithdrawalIndex = ssz.UnmarshallUint64(buf[2736633:2736641]) // Field (26) 'NextWithdrawalValidatorIndex' - b.NextWithdrawalValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[2736641:2736649])) + b.NextWithdrawalValidatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[2736641:2736649])) // Offset (27) 'HistoricalSummaries' if o27 = ssz.ReadOffset(buf[2736649:2736653]); o27 > size || o24 > o27 { @@ -3128,7 +3128,7 @@ func (l *LightClientUpdateCapella) UnmarshalSSZ(buf []byte) error { } // Field (6) 'SignatureSlot' - l.SignatureSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[25144:25152])) + l.SignatureSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[25144:25152])) // Field (0) 'AttestedHeader' { @@ -3347,7 +3347,7 @@ func (l *LightClientFinalityUpdateCapella) UnmarshalSSZ(buf []byte) error { } // Field (4) 'SignatureSlot' - l.SignatureSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[360:368])) + l.SignatureSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[360:368])) // Field (0) 'AttestedHeader' { @@ -3505,7 +3505,7 @@ func (l *LightClientOptimisticUpdateCapella) UnmarshalSSZ(buf []byte) error { } // Field (2) 'SignatureSlot' - l.SignatureSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[164:172])) + l.SignatureSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[164:172])) // Field (0) 'AttestedHeader' { @@ -3829,7 +3829,7 @@ func (b *BLSToExecutionChange) UnmarshalSSZ(buf []byte) error { } // Field (0) 'ValidatorIndex' - b.ValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) + b.ValidatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'FromBlsPubkey' if cap(b.FromBlsPubkey) == 0 { diff --git a/proto/prysm/v1alpha1/cloners.go b/proto/prysm/v1alpha1/cloners.go index 9c6bf55e62..2d76721dd0 100644 --- a/proto/prysm/v1alpha1/cloners.go +++ b/proto/prysm/v1alpha1/cloners.go @@ -1,7 +1,7 @@ package eth import ( - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ) type copier[T any] interface { diff --git a/proto/prysm/v1alpha1/cloners_test.go b/proto/prysm/v1alpha1/cloners_test.go index b19e9f93bb..1697134968 100644 --- a/proto/prysm/v1alpha1/cloners_test.go +++ b/proto/prysm/v1alpha1/cloners_test.go @@ -5,9 +5,9 @@ import ( "reflect" "testing" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - v1alpha1 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + v1alpha1 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestCopySignedBeaconBlock(t *testing.T) { diff --git a/proto/prysm/v1alpha1/data_columns.pb.go b/proto/prysm/v1alpha1/data_columns.pb.go index 4660cbe125..2e793b8e75 100755 --- a/proto/prysm/v1alpha1/data_columns.pb.go +++ b/proto/prysm/v1alpha1/data_columns.pb.go @@ -10,7 +10,7 @@ import ( reflect "reflect" sync "sync" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -204,7 +204,7 @@ var file_proto_prysm_v1alpha1_data_columns_proto_rawDesc = []byte{ 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x10, 0x44, 0x61, 0x74, 0x61, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, diff --git a/proto/prysm/v1alpha1/data_columns.proto b/proto/prysm/v1alpha1/data_columns.proto index 80bd88ae21..cb4c4a917b 100644 --- a/proto/prysm/v1alpha1/data_columns.proto +++ b/proto/prysm/v1alpha1/data_columns.proto @@ -19,7 +19,7 @@ import "proto/eth/ext/options.proto"; import "proto/prysm/v1alpha1/beacon_core_types.proto"; option csharp_namespace = "Ethereum.Eth.v1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "DataColumnsProto"; option java_package = "org.ethereum.eth.v1alpha1"; diff --git a/proto/prysm/v1alpha1/debug.pb.go b/proto/prysm/v1alpha1/debug.pb.go index eca4c8f561..7233bcdaff 100755 --- a/proto/prysm/v1alpha1/debug.pb.go +++ b/proto/prysm/v1alpha1/debug.pb.go @@ -11,8 +11,8 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -81,7 +81,7 @@ func (LoggingLevelRequest_Level) EnumDescriptor() ([]byte, []int) { type InclusionSlotRequest struct { state protoimpl.MessageState `protogen:"open.v1"` Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -123,16 +123,16 @@ func (x *InclusionSlotRequest) GetId() uint64 { return 0 } -func (x *InclusionSlotRequest) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *InclusionSlotRequest) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type InclusionSlotResponse struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -167,11 +167,11 @@ func (*InclusionSlotResponse) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_debug_proto_rawDescGZIP(), []int{1} } -func (x *InclusionSlotResponse) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *InclusionSlotResponse) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/debug.proto. @@ -223,13 +223,13 @@ func (x *BeaconStateRequest) GetQueryFilter() isBeaconStateRequest_QueryFilter { return nil } -func (x *BeaconStateRequest) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconStateRequest) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { if x, ok := x.QueryFilter.(*BeaconStateRequest_Slot); ok { return x.Slot } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BeaconStateRequest) GetBlockRoot() []byte { @@ -246,7 +246,7 @@ type isBeaconStateRequest_QueryFilter interface { } type BeaconStateRequest_Slot struct { - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` } type BeaconStateRequest_BlockRoot struct { @@ -828,21 +828,21 @@ var file_proto_prysm_v1alpha1_debug_proto_rawDesc = []byte{ 0x52, 0x02, 0x69, 0x64, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x22, 0x71, 0x0a, 0x15, 0x49, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x22, 0xa5, 0x01, 0x0a, 0x12, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x5a, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x48, 0x00, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x1f, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, @@ -1014,7 +1014,7 @@ var file_proto_prysm_v1alpha1_debug_proto_rawDesc = []byte{ 0x65, 0x75, 0x6d, 0x2e, 0x65, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x0a, 0x44, 0x65, 0x62, 0x75, 0x67, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, diff --git a/proto/prysm/v1alpha1/debug.proto b/proto/prysm/v1alpha1/debug.proto index e243b37372..00e4bc44d2 100644 --- a/proto/prysm/v1alpha1/debug.proto +++ b/proto/prysm/v1alpha1/debug.proto @@ -9,7 +9,7 @@ import "google/api/annotations.proto"; import "google/protobuf/empty.proto"; option csharp_namespace = "Ethereum.Eth.V1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "DebugProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -63,14 +63,14 @@ message InclusionSlotRequest { uint64 id = 1; uint64 slot = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } message InclusionSlotResponse { uint64 slot = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -81,7 +81,7 @@ message BeaconStateRequest { // The slot corresponding to a desired beacon state. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // The block root corresponding to a desired beacon state. diff --git a/proto/prysm/v1alpha1/deneb.ssz.go b/proto/prysm/v1alpha1/deneb.ssz.go index 2c54bdfea4..f8f8ccd832 100644 --- a/proto/prysm/v1alpha1/deneb.ssz.go +++ b/proto/prysm/v1alpha1/deneb.ssz.go @@ -2,8 +2,8 @@ package eth import ( - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" ssz "github.com/prysmaticlabs/fastssz" ) @@ -591,10 +591,10 @@ func (b *BeaconBlockDeneb) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -1454,10 +1454,10 @@ func (b *BlindedBeaconBlockDeneb) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -3024,7 +3024,7 @@ func (b *BeaconStateDeneb) UnmarshalSSZ(buf []byte) error { b.GenesisValidatorsRoot = append(b.GenesisValidatorsRoot, buf[8:40]...) // Field (2) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) // Field (3) 'Fork' if b.Fork == nil { @@ -3180,7 +3180,7 @@ func (b *BeaconStateDeneb) UnmarshalSSZ(buf []byte) error { b.NextWithdrawalIndex = ssz.UnmarshallUint64(buf[2736633:2736641]) // Field (26) 'NextWithdrawalValidatorIndex' - b.NextWithdrawalValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[2736641:2736649])) + b.NextWithdrawalValidatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[2736641:2736649])) // Offset (27) 'HistoricalSummaries' if o27 = ssz.ReadOffset(buf[2736649:2736653]); o27 > size || o24 > o27 { @@ -3993,7 +3993,7 @@ func (l *LightClientUpdateDeneb) UnmarshalSSZ(buf []byte) error { } // Field (6) 'SignatureSlot' - l.SignatureSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[25144:25152])) + l.SignatureSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[25144:25152])) // Field (0) 'AttestedHeader' { @@ -4212,7 +4212,7 @@ func (l *LightClientFinalityUpdateDeneb) UnmarshalSSZ(buf []byte) error { } // Field (4) 'SignatureSlot' - l.SignatureSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[360:368])) + l.SignatureSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[360:368])) // Field (0) 'AttestedHeader' { @@ -4370,7 +4370,7 @@ func (l *LightClientOptimisticUpdateDeneb) UnmarshalSSZ(buf []byte) error { } // Field (2) 'SignatureSlot' - l.SignatureSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[164:172])) + l.SignatureSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[164:172])) // Field (0) 'AttestedHeader' { diff --git a/proto/prysm/v1alpha1/eip_7251.pb.go b/proto/prysm/v1alpha1/eip_7251.pb.go index e2f0916d5a..26ec20a854 100755 --- a/proto/prysm/v1alpha1/eip_7251.pb.go +++ b/proto/prysm/v1alpha1/eip_7251.pb.go @@ -10,8 +10,8 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -29,7 +29,7 @@ type PendingDeposit struct { WithdrawalCredentials []byte `protobuf:"bytes,2,opt,name=withdrawal_credentials,json=withdrawalCredentials,proto3" json:"withdrawal_credentials,omitempty" ssz-size:"32"` Amount uint64 `protobuf:"varint,3,opt,name=amount,proto3" json:"amount,omitempty"` Signature []byte `protobuf:"bytes,4,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -92,18 +92,18 @@ func (x *PendingDeposit) GetSignature() []byte { return nil } -func (x *PendingDeposit) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *PendingDeposit) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type PendingPartialWithdrawal struct { state protoimpl.MessageState `protogen:"open.v1"` - Index github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Index github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` Amount uint64 `protobuf:"varint,2,opt,name=amount,proto3" json:"amount,omitempty"` - WithdrawableEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=withdrawable_epoch,json=withdrawableEpoch,proto3" json:"withdrawable_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + WithdrawableEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=withdrawable_epoch,json=withdrawableEpoch,proto3" json:"withdrawable_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -138,11 +138,11 @@ func (*PendingPartialWithdrawal) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_eip_7251_proto_rawDescGZIP(), []int{1} } -func (x *PendingPartialWithdrawal) GetIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *PendingPartialWithdrawal) GetIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Index } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *PendingPartialWithdrawal) GetAmount() uint64 { @@ -152,17 +152,17 @@ func (x *PendingPartialWithdrawal) GetAmount() uint64 { return 0 } -func (x *PendingPartialWithdrawal) GetWithdrawableEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *PendingPartialWithdrawal) GetWithdrawableEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.WithdrawableEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } type PendingConsolidation struct { state protoimpl.MessageState `protogen:"open.v1"` - SourceIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=source_index,json=sourceIndex,proto3" json:"source_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` - TargetIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=target_index,json=targetIndex,proto3" json:"target_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + SourceIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=source_index,json=sourceIndex,proto3" json:"source_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` + TargetIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=target_index,json=targetIndex,proto3" json:"target_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -197,18 +197,18 @@ func (*PendingConsolidation) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_eip_7251_proto_rawDescGZIP(), []int{2} } -func (x *PendingConsolidation) GetSourceIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *PendingConsolidation) GetSourceIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.SourceIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } -func (x *PendingConsolidation) GetTargetIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *PendingConsolidation) GetTargetIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.TargetIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } var File_proto_prysm_v1alpha1_eip_7251_proto protoreflect.FileDescriptor @@ -234,7 +234,7 @@ var file_proto_prysm_v1alpha1_eip_7251_proto_rawDesc = []byte{ 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x22, 0x8e, 0x02, 0x0a, 0x18, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, @@ -242,7 +242,7 @@ var file_proto_prysm_v1alpha1_eip_7251_proto_rawDesc = []byte{ 0x6c, 0x12, 0x64, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, @@ -251,7 +251,7 @@ var file_proto_prysm_v1alpha1_eip_7251_proto_rawDesc = []byte{ 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x22, 0xfc, 0x01, 0x0a, 0x14, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, @@ -259,14 +259,14 @@ var file_proto_prysm_v1alpha1_eip_7251_proto_rawDesc = []byte{ 0x0a, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x71, 0x0a, 0x0c, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0b, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x49, @@ -275,7 +275,7 @@ var file_proto_prysm_v1alpha1_eip_7251_proto_rawDesc = []byte{ 0x61, 0x31, 0x42, 0x0c, 0x45, 0x49, 0x50, 0x37, 0x32, 0x35, 0x31, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, diff --git a/proto/prysm/v1alpha1/eip_7251.proto b/proto/prysm/v1alpha1/eip_7251.proto index 0fff30a037..3a662d4371 100644 --- a/proto/prysm/v1alpha1/eip_7251.proto +++ b/proto/prysm/v1alpha1/eip_7251.proto @@ -18,7 +18,7 @@ package ethereum.eth.v1alpha1; import "proto/eth/ext/options.proto"; option csharp_namespace = "Ethereum.Eth.v1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "EIP7251Proto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -40,14 +40,14 @@ message PendingDeposit { bytes signature = 4 [ (ethereum.eth.ext.ssz_size) = "96" ]; uint64 slot = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } message PendingPartialWithdrawal { // Validator index for the withdrawal. uint64 index = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // The amount of the withdrawal (gwei). @@ -56,17 +56,17 @@ message PendingPartialWithdrawal { // A partial withdrawal is valid at this epoch or later. uint64 withdrawable_epoch = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; } message PendingConsolidation { // Validator from which the funds will be moved. uint64 source_index = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; // Validator to which the funds will be moved. uint64 target_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; } diff --git a/proto/prysm/v1alpha1/eip_7521.go b/proto/prysm/v1alpha1/eip_7521.go index 0e19321c41..ee1a788016 100644 --- a/proto/prysm/v1alpha1/eip_7521.go +++ b/proto/prysm/v1alpha1/eip_7521.go @@ -1,6 +1,6 @@ package eth -import "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" +import "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" // Copy -- func (pd *PendingDeposit) Copy() *PendingDeposit { diff --git a/proto/prysm/v1alpha1/eip_7521_fuzz_test.go b/proto/prysm/v1alpha1/eip_7521_fuzz_test.go index d7d1fca552..65fa4c9170 100644 --- a/proto/prysm/v1alpha1/eip_7521_fuzz_test.go +++ b/proto/prysm/v1alpha1/eip_7521_fuzz_test.go @@ -3,7 +3,7 @@ package eth_test import ( "testing" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func TestCopyEip7521Types_Fuzz(t *testing.T) { diff --git a/proto/prysm/v1alpha1/electra.ssz.go b/proto/prysm/v1alpha1/electra.ssz.go index 218d4121d4..babdc094ac 100644 --- a/proto/prysm/v1alpha1/electra.ssz.go +++ b/proto/prysm/v1alpha1/electra.ssz.go @@ -2,8 +2,8 @@ package eth import ( - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" ssz "github.com/prysmaticlabs/fastssz" ) @@ -163,7 +163,7 @@ func (a *AggregateAttestationAndProofElectra) UnmarshalSSZ(buf []byte) error { var o1 uint64 // Field (0) 'AggregatorIndex' - a.AggregatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) + a.AggregatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) // Offset (1) 'Aggregate' if o1 = ssz.ReadOffset(buf[8:12]); o1 > size { @@ -425,10 +425,10 @@ func (s *SingleAttestation) UnmarshalSSZ(buf []byte) error { } // Field (0) 'CommitteeId' - s.CommitteeId = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(ssz.UnmarshallUint64(buf[0:8])) + s.CommitteeId = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'AttesterIndex' - s.AttesterIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + s.AttesterIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'Data' if s.Data == nil { @@ -1405,10 +1405,10 @@ func (b *BeaconBlockElectra) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -2307,10 +2307,10 @@ func (b *BlindedBeaconBlockElectra) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -3672,7 +3672,7 @@ func (b *BeaconStateElectra) UnmarshalSSZ(buf []byte) error { b.GenesisValidatorsRoot = append(b.GenesisValidatorsRoot, buf[8:40]...) // Field (2) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) // Field (3) 'Fork' if b.Fork == nil { @@ -3828,7 +3828,7 @@ func (b *BeaconStateElectra) UnmarshalSSZ(buf []byte) error { b.NextWithdrawalIndex = ssz.UnmarshallUint64(buf[2736633:2736641]) // Field (26) 'NextWithdrawalValidatorIndex' - b.NextWithdrawalValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[2736641:2736649])) + b.NextWithdrawalValidatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[2736641:2736649])) // Offset (27) 'HistoricalSummaries' if o27 = ssz.ReadOffset(buf[2736649:2736653]); o27 > size || o24 > o27 { @@ -3839,19 +3839,19 @@ func (b *BeaconStateElectra) UnmarshalSSZ(buf []byte) error { b.DepositRequestsStartIndex = ssz.UnmarshallUint64(buf[2736653:2736661]) // Field (29) 'DepositBalanceToConsume' - b.DepositBalanceToConsume = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736661:2736669])) + b.DepositBalanceToConsume = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736661:2736669])) // Field (30) 'ExitBalanceToConsume' - b.ExitBalanceToConsume = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736669:2736677])) + b.ExitBalanceToConsume = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736669:2736677])) // Field (31) 'EarliestExitEpoch' - b.EarliestExitEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736677:2736685])) + b.EarliestExitEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736677:2736685])) // Field (32) 'ConsolidationBalanceToConsume' - b.ConsolidationBalanceToConsume = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736685:2736693])) + b.ConsolidationBalanceToConsume = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736685:2736693])) // Field (33) 'EarliestConsolidationEpoch' - b.EarliestConsolidationEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736693:2736701])) + b.EarliestConsolidationEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736693:2736701])) // Offset (34) 'PendingDeposits' if o34 = ssz.ReadOffset(buf[2736701:2736705]); o34 > size || o27 > o34 { @@ -4510,7 +4510,7 @@ func (p *PendingDeposit) UnmarshalSSZ(buf []byte) error { p.Signature = append(p.Signature, buf[88:184]...) // Field (4) 'Slot' - p.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[184:192])) + p.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[184:192])) return err } @@ -4591,13 +4591,13 @@ func (p *PendingPartialWithdrawal) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Index' - p.Index = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) + p.Index = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'Amount' p.Amount = ssz.UnmarshallUint64(buf[8:16]) // Field (2) 'WithdrawableEpoch' - p.WithdrawableEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[16:24])) + p.WithdrawableEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[16:24])) return err } @@ -4657,10 +4657,10 @@ func (p *PendingConsolidation) UnmarshalSSZ(buf []byte) error { } // Field (0) 'SourceIndex' - p.SourceIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) + p.SourceIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'TargetIndex' - p.TargetIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + p.TargetIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) return err } @@ -4981,7 +4981,7 @@ func (l *LightClientUpdateElectra) UnmarshalSSZ(buf []byte) error { } // Field (6) 'SignatureSlot' - l.SignatureSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[25208:25216])) + l.SignatureSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[25208:25216])) // Field (0) 'AttestedHeader' { @@ -5200,7 +5200,7 @@ func (l *LightClientFinalityUpdateElectra) UnmarshalSSZ(buf []byte) error { } // Field (4) 'SignatureSlot' - l.SignatureSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[392:400])) + l.SignatureSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[392:400])) // Field (0) 'AttestedHeader' { diff --git a/proto/prysm/v1alpha1/finalized_block_root_container.pb.go b/proto/prysm/v1alpha1/finalized_block_root_container.pb.go index a25471ed1f..ab8b1e7641 100755 --- a/proto/prysm/v1alpha1/finalized_block_root_container.pb.go +++ b/proto/prysm/v1alpha1/finalized_block_root_container.pb.go @@ -93,7 +93,7 @@ var file_proto_prysm_v1alpha1_finalized_block_root_container_proto_rawDesc = []b 0x6f, 0x6f, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, diff --git a/proto/prysm/v1alpha1/finalized_block_root_container.proto b/proto/prysm/v1alpha1/finalized_block_root_container.proto index 24fa439eef..79c3505564 100644 --- a/proto/prysm/v1alpha1/finalized_block_root_container.proto +++ b/proto/prysm/v1alpha1/finalized_block_root_container.proto @@ -3,7 +3,7 @@ syntax = "proto3"; package ethereum.eth.v1alpha1; option csharp_namespace = "Ethereum.Eth.v1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "FinalizedBlockRootContainerProto"; option java_package = "org.ethereum.eth.v1alpha1"; diff --git a/proto/prysm/v1alpha1/fulu.ssz.go b/proto/prysm/v1alpha1/fulu.ssz.go index 25e3094a0e..b41c3e2f93 100644 --- a/proto/prysm/v1alpha1/fulu.ssz.go +++ b/proto/prysm/v1alpha1/fulu.ssz.go @@ -2,8 +2,8 @@ package eth import ( - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" ssz "github.com/prysmaticlabs/fastssz" ) @@ -700,10 +700,10 @@ func (b *BlindedBeaconBlockFulu) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -1159,7 +1159,7 @@ func (b *BeaconStateFulu) UnmarshalSSZ(buf []byte) error { b.GenesisValidatorsRoot = append(b.GenesisValidatorsRoot, buf[8:40]...) // Field (2) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) // Field (3) 'Fork' if b.Fork == nil { @@ -1315,7 +1315,7 @@ func (b *BeaconStateFulu) UnmarshalSSZ(buf []byte) error { b.NextWithdrawalIndex = ssz.UnmarshallUint64(buf[2736633:2736641]) // Field (26) 'NextWithdrawalValidatorIndex' - b.NextWithdrawalValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[2736641:2736649])) + b.NextWithdrawalValidatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[2736641:2736649])) // Offset (27) 'HistoricalSummaries' if o27 = ssz.ReadOffset(buf[2736649:2736653]); o27 > size || o24 > o27 { @@ -1326,19 +1326,19 @@ func (b *BeaconStateFulu) UnmarshalSSZ(buf []byte) error { b.DepositRequestsStartIndex = ssz.UnmarshallUint64(buf[2736653:2736661]) // Field (29) 'DepositBalanceToConsume' - b.DepositBalanceToConsume = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736661:2736669])) + b.DepositBalanceToConsume = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736661:2736669])) // Field (30) 'ExitBalanceToConsume' - b.ExitBalanceToConsume = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736669:2736677])) + b.ExitBalanceToConsume = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736669:2736677])) // Field (31) 'EarliestExitEpoch' - b.EarliestExitEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736677:2736685])) + b.EarliestExitEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736677:2736685])) // Field (32) 'ConsolidationBalanceToConsume' - b.ConsolidationBalanceToConsume = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736685:2736693])) + b.ConsolidationBalanceToConsume = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736685:2736693])) // Field (33) 'EarliestConsolidationEpoch' - b.EarliestConsolidationEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736693:2736701])) + b.EarliestConsolidationEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736693:2736701])) // Offset (34) 'PendingDeposits' if o34 = ssz.ReadOffset(buf[2736701:2736705]); o34 > size || o27 > o34 { @@ -2429,7 +2429,7 @@ func (s *StatusV2) UnmarshalSSZ(buf []byte) error { s.FinalizedRoot = append(s.FinalizedRoot, buf[4:36]...) // Field (2) 'FinalizedEpoch' - s.FinalizedEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[36:44])) + s.FinalizedEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[36:44])) // Field (3) 'HeadRoot' if cap(s.HeadRoot) == 0 { @@ -2438,10 +2438,10 @@ func (s *StatusV2) UnmarshalSSZ(buf []byte) error { s.HeadRoot = append(s.HeadRoot, buf[44:76]...) // Field (4) 'HeadSlot' - s.HeadSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[76:84])) + s.HeadSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[76:84])) // Field (5) 'EarliestAvailableSlot' - s.EarliestAvailableSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[84:92])) + s.EarliestAvailableSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[84:92])) return err } diff --git a/proto/prysm/v1alpha1/fuzz_test.go b/proto/prysm/v1alpha1/fuzz_test.go index 2b5a5323cb..b8b8c138db 100644 --- a/proto/prysm/v1alpha1/fuzz_test.go +++ b/proto/prysm/v1alpha1/fuzz_test.go @@ -4,8 +4,8 @@ import ( "fmt" "testing" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" fuzz "github.com/google/gofuzz" ) diff --git a/proto/prysm/v1alpha1/gloas.pb.go b/proto/prysm/v1alpha1/gloas.pb.go index 4e54da097a..733aca6d0d 100755 --- a/proto/prysm/v1alpha1/gloas.pb.go +++ b/proto/prysm/v1alpha1/gloas.pb.go @@ -11,9 +11,9 @@ import ( sync "sync" github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -32,9 +32,9 @@ type ExecutionPayloadBid struct { BlockHash []byte `protobuf:"bytes,3,opt,name=block_hash,json=blockHash,proto3" json:"block_hash,omitempty" ssz-size:"32"` FeeRecipient []byte `protobuf:"bytes,4,opt,name=fee_recipient,json=feeRecipient,proto3" json:"fee_recipient,omitempty" ssz-size:"20"` GasLimit uint64 `protobuf:"varint,5,opt,name=gas_limit,json=gasLimit,proto3" json:"gas_limit,omitempty"` - BuilderIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,6,opt,name=builder_index,json=builderIndex,proto3" json:"builder_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - Value github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,8,opt,name=value,proto3" json:"value,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` + BuilderIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,6,opt,name=builder_index,json=builderIndex,proto3" json:"builder_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + Value github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei `protobuf:"varint,8,opt,name=value,proto3" json:"value,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei"` BlobKzgCommitmentsRoot []byte `protobuf:"bytes,9,opt,name=blob_kzg_commitments_root,json=blobKzgCommitmentsRoot,proto3" json:"blob_kzg_commitments_root,omitempty" ssz-size:"32"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -105,25 +105,25 @@ func (x *ExecutionPayloadBid) GetGasLimit() uint64 { return 0 } -func (x *ExecutionPayloadBid) GetBuilderIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ExecutionPayloadBid) GetBuilderIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.BuilderIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } -func (x *ExecutionPayloadBid) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ExecutionPayloadBid) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *ExecutionPayloadBid) GetValue() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { +func (x *ExecutionPayloadBid) GetValue() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei { if x != nil { return x.Value } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(0) } func (x *ExecutionPayloadBid) GetBlobKzgCommitmentsRoot() []byte { @@ -188,7 +188,7 @@ func (x *SignedExecutionPayloadBid) GetSignature() []byte { type PayloadAttestationData struct { state protoimpl.MessageState `protogen:"open.v1"` BeaconBlockRoot []byte `protobuf:"bytes,1,opt,name=beacon_block_root,json=beaconBlockRoot,proto3" json:"beacon_block_root,omitempty" ssz-size:"32"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` PayloadPresent bool `protobuf:"varint,3,opt,name=payload_present,json=payloadPresent,proto3" json:"payload_present,omitempty"` BlobDataAvailable bool `protobuf:"varint,4,opt,name=blob_data_available,json=blobDataAvailable,proto3" json:"blob_data_available,omitempty"` unknownFields protoimpl.UnknownFields @@ -232,11 +232,11 @@ func (x *PayloadAttestationData) GetBeaconBlockRoot() []byte { return nil } -func (x *PayloadAttestationData) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *PayloadAttestationData) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *PayloadAttestationData) GetPayloadPresent() bool { @@ -315,7 +315,7 @@ func (x *PayloadAttestation) GetSignature() []byte { type PayloadAttestationMessage struct { state protoimpl.MessageState `protogen:"open.v1"` - ValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` Data *PayloadAttestationData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` Signature []byte `protobuf:"bytes,3,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields @@ -352,11 +352,11 @@ func (*PayloadAttestationMessage) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{4} } -func (x *PayloadAttestationMessage) GetValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *PayloadAttestationMessage) GetValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *PayloadAttestationMessage) GetData() *PayloadAttestationData { @@ -375,8 +375,8 @@ func (x *PayloadAttestationMessage) GetSignature() []byte { type BeaconBlockGloas struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=proposer_index,json=proposerIndex,proto3" json:"proposer_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` ParentRoot []byte `protobuf:"bytes,3,opt,name=parent_root,json=parentRoot,proto3" json:"parent_root,omitempty" ssz-size:"32"` StateRoot []byte `protobuf:"bytes,4,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` Body *BeaconBlockBodyGloas `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` @@ -414,18 +414,18 @@ func (*BeaconBlockGloas) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{5} } -func (x *BeaconBlockGloas) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconBlockGloas) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *BeaconBlockGloas) GetProposerIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconBlockGloas) GetProposerIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ProposerIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconBlockGloas) GetParentRoot() []byte { @@ -637,7 +637,7 @@ type BeaconStateGloas struct { state protoimpl.MessageState `protogen:"open.v1"` GenesisTime uint64 `protobuf:"varint,1001,opt,name=genesis_time,json=genesisTime,proto3" json:"genesis_time,omitempty"` GenesisValidatorsRoot []byte `protobuf:"bytes,1002,opt,name=genesis_validators_root,json=genesisValidatorsRoot,proto3" json:"genesis_validators_root,omitempty" ssz-size:"32"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1003,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Fork *Fork `protobuf:"bytes,1004,opt,name=fork,proto3" json:"fork,omitempty"` LatestBlockHeader *BeaconBlockHeader `protobuf:"bytes,2001,opt,name=latest_block_header,json=latestBlockHeader,proto3" json:"latest_block_header,omitempty"` BlockRoots [][]byte `protobuf:"bytes,2002,rep,name=block_roots,json=blockRoots,proto3" json:"block_roots,omitempty" ssz-size:"8192,32"` @@ -661,14 +661,14 @@ type BeaconStateGloas struct { NextSyncCommittee *SyncCommittee `protobuf:"bytes,9003,opt,name=next_sync_committee,json=nextSyncCommittee,proto3" json:"next_sync_committee,omitempty"` LatestExecutionPayloadBid *ExecutionPayloadBid `protobuf:"bytes,10001,opt,name=latest_execution_payload_bid,json=latestExecutionPayloadBid,proto3" json:"latest_execution_payload_bid,omitempty"` NextWithdrawalIndex uint64 `protobuf:"varint,11001,opt,name=next_withdrawal_index,json=nextWithdrawalIndex,proto3" json:"next_withdrawal_index,omitempty"` - NextWithdrawalValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,11002,opt,name=next_withdrawal_validator_index,json=nextWithdrawalValidatorIndex,proto3" json:"next_withdrawal_validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + NextWithdrawalValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,11002,opt,name=next_withdrawal_validator_index,json=nextWithdrawalValidatorIndex,proto3" json:"next_withdrawal_validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` HistoricalSummaries []*HistoricalSummary `protobuf:"bytes,11003,rep,name=historical_summaries,json=historicalSummaries,proto3" json:"historical_summaries,omitempty" ssz-max:"16777216"` DepositRequestsStartIndex uint64 `protobuf:"varint,12001,opt,name=deposit_requests_start_index,json=depositRequestsStartIndex,proto3" json:"deposit_requests_start_index,omitempty"` - DepositBalanceToConsume github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,12002,opt,name=deposit_balance_to_consume,json=depositBalanceToConsume,proto3" json:"deposit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` - ExitBalanceToConsume github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,12003,opt,name=exit_balance_to_consume,json=exitBalanceToConsume,proto3" json:"exit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` - EarliestExitEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,12004,opt,name=earliest_exit_epoch,json=earliestExitEpoch,proto3" json:"earliest_exit_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` - ConsolidationBalanceToConsume github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,12005,opt,name=consolidation_balance_to_consume,json=consolidationBalanceToConsume,proto3" json:"consolidation_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` - EarliestConsolidationEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,12006,opt,name=earliest_consolidation_epoch,json=earliestConsolidationEpoch,proto3" json:"earliest_consolidation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + DepositBalanceToConsume github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei `protobuf:"varint,12002,opt,name=deposit_balance_to_consume,json=depositBalanceToConsume,proto3" json:"deposit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei"` + ExitBalanceToConsume github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei `protobuf:"varint,12003,opt,name=exit_balance_to_consume,json=exitBalanceToConsume,proto3" json:"exit_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei"` + EarliestExitEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,12004,opt,name=earliest_exit_epoch,json=earliestExitEpoch,proto3" json:"earliest_exit_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` + ConsolidationBalanceToConsume github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei `protobuf:"varint,12005,opt,name=consolidation_balance_to_consume,json=consolidationBalanceToConsume,proto3" json:"consolidation_balance_to_consume,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei"` + EarliestConsolidationEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,12006,opt,name=earliest_consolidation_epoch,json=earliestConsolidationEpoch,proto3" json:"earliest_consolidation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` PendingDeposits []*PendingDeposit `protobuf:"bytes,12007,rep,name=pending_deposits,json=pendingDeposits,proto3" json:"pending_deposits,omitempty" ssz-max:"134217728"` PendingPartialWithdrawals []*PendingPartialWithdrawal `protobuf:"bytes,12008,rep,name=pending_partial_withdrawals,json=pendingPartialWithdrawals,proto3" json:"pending_partial_withdrawals,omitempty" ssz-max:"134217728"` PendingConsolidations []*PendingConsolidation `protobuf:"bytes,12009,rep,name=pending_consolidations,json=pendingConsolidations,proto3" json:"pending_consolidations,omitempty" ssz-max:"262144"` @@ -726,11 +726,11 @@ func (x *BeaconStateGloas) GetGenesisValidatorsRoot() []byte { return nil } -func (x *BeaconStateGloas) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconStateGloas) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BeaconStateGloas) GetFork() *Fork { @@ -894,11 +894,11 @@ func (x *BeaconStateGloas) GetNextWithdrawalIndex() uint64 { return 0 } -func (x *BeaconStateGloas) GetNextWithdrawalValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BeaconStateGloas) GetNextWithdrawalValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.NextWithdrawalValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BeaconStateGloas) GetHistoricalSummaries() []*HistoricalSummary { @@ -915,39 +915,39 @@ func (x *BeaconStateGloas) GetDepositRequestsStartIndex() uint64 { return 0 } -func (x *BeaconStateGloas) GetDepositBalanceToConsume() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { +func (x *BeaconStateGloas) GetDepositBalanceToConsume() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei { if x != nil { return x.DepositBalanceToConsume } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(0) } -func (x *BeaconStateGloas) GetExitBalanceToConsume() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { +func (x *BeaconStateGloas) GetExitBalanceToConsume() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei { if x != nil { return x.ExitBalanceToConsume } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(0) } -func (x *BeaconStateGloas) GetEarliestExitEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *BeaconStateGloas) GetEarliestExitEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.EarliestExitEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } -func (x *BeaconStateGloas) GetConsolidationBalanceToConsume() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { +func (x *BeaconStateGloas) GetConsolidationBalanceToConsume() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei { if x != nil { return x.ConsolidationBalanceToConsume } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(0) } -func (x *BeaconStateGloas) GetEarliestConsolidationEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *BeaconStateGloas) GetEarliestConsolidationEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.EarliestConsolidationEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *BeaconStateGloas) GetPendingDeposits() []*PendingDeposit { @@ -1015,7 +1015,7 @@ func (x *BeaconStateGloas) GetLatestWithdrawalsRoot() []byte { type BuilderPendingPayment struct { state protoimpl.MessageState `protogen:"open.v1"` - Weight github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,1,opt,name=weight,proto3" json:"weight,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` + Weight github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei `protobuf:"varint,1,opt,name=weight,proto3" json:"weight,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei"` Withdrawal *BuilderPendingWithdrawal `protobuf:"bytes,2,opt,name=withdrawal,proto3" json:"withdrawal,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -1051,11 +1051,11 @@ func (*BuilderPendingPayment) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_gloas_proto_rawDescGZIP(), []int{9} } -func (x *BuilderPendingPayment) GetWeight() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { +func (x *BuilderPendingPayment) GetWeight() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei { if x != nil { return x.Weight } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(0) } func (x *BuilderPendingPayment) GetWithdrawal() *BuilderPendingWithdrawal { @@ -1068,9 +1068,9 @@ func (x *BuilderPendingPayment) GetWithdrawal() *BuilderPendingWithdrawal { type BuilderPendingWithdrawal struct { state protoimpl.MessageState `protogen:"open.v1"` FeeRecipient []byte `protobuf:"bytes,1,opt,name=fee_recipient,json=feeRecipient,proto3" json:"fee_recipient,omitempty" ssz-size:"20"` - Amount github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei `protobuf:"varint,2,opt,name=amount,proto3" json:"amount,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei"` - BuilderIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,opt,name=builder_index,json=builderIndex,proto3" json:"builder_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` - WithdrawableEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,4,opt,name=withdrawable_epoch,json=withdrawableEpoch,proto3" json:"withdrawable_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Amount github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei `protobuf:"varint,2,opt,name=amount,proto3" json:"amount,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei"` + BuilderIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,opt,name=builder_index,json=builderIndex,proto3" json:"builder_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` + WithdrawableEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,4,opt,name=withdrawable_epoch,json=withdrawableEpoch,proto3" json:"withdrawable_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1112,25 +1112,25 @@ func (x *BuilderPendingWithdrawal) GetFeeRecipient() []byte { return nil } -func (x *BuilderPendingWithdrawal) GetAmount() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei { +func (x *BuilderPendingWithdrawal) GetAmount() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei { if x != nil { return x.Amount } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(0) } -func (x *BuilderPendingWithdrawal) GetBuilderIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BuilderPendingWithdrawal) GetBuilderIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.BuilderIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } -func (x *BuilderPendingWithdrawal) GetWithdrawableEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *BuilderPendingWithdrawal) GetWithdrawableEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.WithdrawableEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } type DataColumnSidecarGloas struct { @@ -1139,7 +1139,7 @@ type DataColumnSidecarGloas struct { Column [][]byte `protobuf:"bytes,2,rep,name=column,proto3" json:"column,omitempty" ssz-max:"4096" ssz-size:"?,2048"` KzgCommitments [][]byte `protobuf:"bytes,3,rep,name=kzg_commitments,json=kzgCommitments,proto3" json:"kzg_commitments,omitempty" ssz-max:"4096" ssz-size:"?,48"` KzgProofs [][]byte `protobuf:"bytes,4,rep,name=kzg_proofs,json=kzgProofs,proto3" json:"kzg_proofs,omitempty" ssz-max:"4096" ssz-size:"?,48"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` BeaconBlockRoot []byte `protobuf:"bytes,6,opt,name=beacon_block_root,json=beaconBlockRoot,proto3" json:"beacon_block_root,omitempty" ssz-size:"32"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -1203,11 +1203,11 @@ func (x *DataColumnSidecarGloas) GetKzgProofs() [][]byte { return nil } -func (x *DataColumnSidecarGloas) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *DataColumnSidecarGloas) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *DataColumnSidecarGloas) GetBeaconBlockRoot() []byte { @@ -1221,9 +1221,9 @@ type ExecutionPayloadEnvelope struct { state protoimpl.MessageState `protogen:"open.v1"` Payload *v1.ExecutionPayloadDeneb `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` ExecutionRequests *v1.ExecutionRequests `protobuf:"bytes,2,opt,name=execution_requests,json=executionRequests,proto3" json:"execution_requests,omitempty"` - BuilderIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,opt,name=builder_index,json=builderIndex,proto3" json:"builder_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + BuilderIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,opt,name=builder_index,json=builderIndex,proto3" json:"builder_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` BeaconBlockRoot []byte `protobuf:"bytes,4,opt,name=beacon_block_root,json=beaconBlockRoot,proto3" json:"beacon_block_root,omitempty" ssz-size:"32"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` BlobKzgCommitments [][]byte `protobuf:"bytes,6,rep,name=blob_kzg_commitments,json=blobKzgCommitments,proto3" json:"blob_kzg_commitments,omitempty" ssz-max:"4096" ssz-size:"?,48"` StateRoot []byte `protobuf:"bytes,7,opt,name=state_root,json=stateRoot,proto3" json:"state_root,omitempty" ssz-size:"32"` unknownFields protoimpl.UnknownFields @@ -1274,11 +1274,11 @@ func (x *ExecutionPayloadEnvelope) GetExecutionRequests() *v1.ExecutionRequests return nil } -func (x *ExecutionPayloadEnvelope) GetBuilderIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ExecutionPayloadEnvelope) GetBuilderIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.BuilderIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *ExecutionPayloadEnvelope) GetBeaconBlockRoot() []byte { @@ -1288,11 +1288,11 @@ func (x *ExecutionPayloadEnvelope) GetBeaconBlockRoot() []byte { return nil } -func (x *ExecutionPayloadEnvelope) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ExecutionPayloadEnvelope) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *ExecutionPayloadEnvelope) GetBlobKzgCommitments() [][]byte { @@ -1402,20 +1402,20 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x74, 0x12, 0x73, 0x0a, 0x0d, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0c, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x5a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x41, 0x0a, 0x19, 0x62, 0x6c, 0x6f, 0x62, 0x5f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, @@ -1438,7 +1438,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x70, 0x72, @@ -1466,7 +1466,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x12, 0x77, 0x0a, 0x0f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x69, 0x64, @@ -1481,13 +1481,13 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x63, 0x6b, 0x47, 0x6c, 0x6f, 0x61, 0x73, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x75, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, @@ -1583,7 +1583,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x12, 0x59, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0xeb, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x6f, 0x72, 0x6b, 0x18, 0xec, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x65, 0x74, 0x68, @@ -1696,7 +1696,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0xfa, 0x55, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x1c, 0x6e, 0x65, 0x78, 0x74, 0x57, 0x69, 0x74, 0x68, @@ -1716,7 +1716,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe2, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x17, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, @@ -1724,7 +1724,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe3, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x14, 0x65, 0x78, 0x69, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x6f, 0x43, 0x6f, @@ -1732,7 +1732,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x74, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe4, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x45, 0x78, 0x69, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x8e, 0x01, @@ -1740,7 +1740,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x18, 0xe5, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x1d, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, @@ -1749,7 +1749,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x6f, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0xe6, 0x5d, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x1a, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x61, @@ -1810,7 +1810,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x61, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x5c, 0x0a, 0x06, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x06, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x4f, 0x0a, 0x0a, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, @@ -1826,13 +1826,13 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x5c, 0x0a, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x47, 0x77, 0x65, 0x69, 0x52, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x73, 0x0a, 0x0d, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0c, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x49, 0x6e, 0x64, @@ -1840,7 +1840,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x6c, 0x65, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x11, 0x77, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x22, 0xd4, 0x02, 0x0a, 0x16, 0x44, 0x61, 0x74, @@ -1859,7 +1859,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x73, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x32, 0x0a, 0x11, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, @@ -1880,7 +1880,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0c, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x65, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x32, 0x0a, 0x11, @@ -1890,7 +1890,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x42, 0x0a, 0x14, 0x62, 0x6c, 0x6f, 0x62, 0x5f, 0x6b, 0x7a, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x6d, 0x65, 0x6e, @@ -1910,7 +1910,7 @@ var file_proto_prysm_v1alpha1_gloas_proto_rawDesc = []byte{ 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x39, 0x36, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x42, 0x3b, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } diff --git a/proto/prysm/v1alpha1/gloas.proto b/proto/prysm/v1alpha1/gloas.proto index 461c017c91..fcaaabe0f1 100644 --- a/proto/prysm/v1alpha1/gloas.proto +++ b/proto/prysm/v1alpha1/gloas.proto @@ -10,7 +10,7 @@ import "proto/prysm/v1alpha1/withdrawals.proto"; import "proto/prysm/v1alpha1/beacon_core_types.proto"; import "proto/prysm/v1alpha1/eip_7251.proto"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; // ============================================================================= // Gloas Fork Specification @@ -39,15 +39,15 @@ message ExecutionPayloadBid { bytes fee_recipient = 4 [ (ethereum.eth.ext.ssz_size) = "20" ]; uint64 gas_limit = 5; uint64 builder_index = 6 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; uint64 slot = 7 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; uint64 value = 8 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei" ]; bytes blob_kzg_commitments_root = 9 [ (ethereum.eth.ext.ssz_size) = "32" ]; } @@ -75,7 +75,7 @@ message PayloadAttestationData { bytes beacon_block_root = 1 [ (ethereum.eth.ext.ssz_size) = "32" ]; uint64 slot = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; bool payload_present = 3; bool blob_data_available = 4; @@ -107,7 +107,7 @@ message PayloadAttestation { message PayloadAttestationMessage { uint64 validator_index = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; PayloadAttestationData data = 2; bytes signature = 3 [ (ethereum.eth.ext.ssz_size) = "96" ]; @@ -118,10 +118,10 @@ message PayloadAttestationMessage { message BeaconBlockGloas { uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; uint64 proposer_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; bytes parent_root = 3 [ (ethereum.eth.ext.ssz_size) = "32" ]; bytes state_root = 4 [ (ethereum.eth.ext.ssz_size) = "32" ]; @@ -197,7 +197,7 @@ message BeaconStateGloas { bytes genesis_validators_root = 1002 [ (ethereum.eth.ext.ssz_size) = "32" ]; uint64 slot = 1003 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; Fork fork = 1004; @@ -261,7 +261,7 @@ message BeaconStateGloas { uint64 next_withdrawal_index = 11001; uint64 next_withdrawal_validator_index = 11002 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; repeated HistoricalSummary historical_summaries = 11003 [ (ethereum.eth.ext.ssz_max) = "16777216" ]; @@ -270,23 +270,23 @@ message BeaconStateGloas { uint64 deposit_requests_start_index = 12001; uint64 deposit_balance_to_consume = 12002 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei" ]; uint64 exit_balance_to_consume = 12003 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei" ]; uint64 earliest_exit_epoch = 12004 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; uint64 consolidation_balance_to_consume = 12005 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei" ]; uint64 earliest_consolidation_epoch = 12006 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; repeated PendingDeposit pending_deposits = 12007 [ (ethereum.eth.ext.ssz_max) = "pending_deposits_limit" ]; @@ -318,7 +318,7 @@ message BeaconStateGloas { message BuilderPendingPayment { uint64 weight = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei" ]; BuilderPendingWithdrawal withdrawal = 2; } @@ -335,15 +335,15 @@ message BuilderPendingWithdrawal { bytes fee_recipient = 1 [ (ethereum.eth.ext.ssz_size) = "20" ]; uint64 amount = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Gwei" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Gwei" ]; uint64 builder_index = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; uint64 withdrawable_epoch = 4 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; } @@ -374,7 +374,7 @@ message DataColumnSidecarGloas { ]; uint64 slot = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; bytes beacon_block_root = 6 [(ethereum.eth.ext.ssz_size) = "32"]; } @@ -396,12 +396,12 @@ message ExecutionPayloadEnvelope { ethereum.engine.v1.ExecutionPayloadDeneb payload = 1; ethereum.engine.v1.ExecutionRequests execution_requests = 2; uint64 builder_index = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/" + "github.com/OffchainLabs/prysm/v7/" "consensus-types/primitives.ValidatorIndex" ]; bytes beacon_block_root = 4 [ (ethereum.eth.ext.ssz_size) = "32" ]; uint64 slot = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; repeated bytes blob_kzg_commitments = 6 [ (ethereum.eth.ext.ssz_size) = "?,48", diff --git a/proto/prysm/v1alpha1/gloas.ssz.go b/proto/prysm/v1alpha1/gloas.ssz.go index 7a1618413d..0fba58e759 100644 --- a/proto/prysm/v1alpha1/gloas.ssz.go +++ b/proto/prysm/v1alpha1/gloas.ssz.go @@ -2,8 +2,8 @@ package eth import ( - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" ssz "github.com/prysmaticlabs/fastssz" ) @@ -102,13 +102,13 @@ func (e *ExecutionPayloadBid) UnmarshalSSZ(buf []byte) error { e.GasLimit = ssz.UnmarshallUint64(buf[116:124]) // Field (5) 'BuilderIndex' - e.BuilderIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[124:132])) + e.BuilderIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[124:132])) // Field (6) 'Slot' - e.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[132:140])) + e.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[132:140])) // Field (7) 'Value' - e.Value = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[140:148])) + e.Value = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[140:148])) // Field (8) 'BlobKzgCommitmentsRoot' if cap(e.BlobKzgCommitmentsRoot) == 0 { @@ -311,7 +311,7 @@ func (p *PayloadAttestationData) UnmarshalSSZ(buf []byte) error { p.BeaconBlockRoot = append(p.BeaconBlockRoot, buf[0:32]...) // Field (1) 'Slot' - p.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[32:40])) + p.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[32:40])) // Field (2) 'PayloadPresent' p.PayloadPresent, err = ssz.DecodeBool(buf[40:41]) @@ -505,7 +505,7 @@ func (p *PayloadAttestationMessage) UnmarshalSSZ(buf []byte) error { } // Field (0) 'ValidatorIndex' - p.ValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) + p.ValidatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'Data' if p.Data == nil { @@ -615,10 +615,10 @@ func (b *BeaconBlockGloas) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -1812,7 +1812,7 @@ func (b *BeaconStateGloas) UnmarshalSSZ(buf []byte) error { b.GenesisValidatorsRoot = append(b.GenesisValidatorsRoot, buf[8:40]...) // Field (2) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) // Field (3) 'Fork' if b.Fork == nil { @@ -1971,7 +1971,7 @@ func (b *BeaconStateGloas) UnmarshalSSZ(buf []byte) error { b.NextWithdrawalIndex = ssz.UnmarshallUint64(buf[2736809:2736817]) // Field (26) 'NextWithdrawalValidatorIndex' - b.NextWithdrawalValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[2736817:2736825])) + b.NextWithdrawalValidatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[2736817:2736825])) // Offset (27) 'HistoricalSummaries' if o27 = ssz.ReadOffset(buf[2736825:2736829]); o27 > size || o21 > o27 { @@ -1982,19 +1982,19 @@ func (b *BeaconStateGloas) UnmarshalSSZ(buf []byte) error { b.DepositRequestsStartIndex = ssz.UnmarshallUint64(buf[2736829:2736837]) // Field (29) 'DepositBalanceToConsume' - b.DepositBalanceToConsume = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736837:2736845])) + b.DepositBalanceToConsume = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736837:2736845])) // Field (30) 'ExitBalanceToConsume' - b.ExitBalanceToConsume = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736845:2736853])) + b.ExitBalanceToConsume = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736845:2736853])) // Field (31) 'EarliestExitEpoch' - b.EarliestExitEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736853:2736861])) + b.EarliestExitEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736853:2736861])) // Field (32) 'ConsolidationBalanceToConsume' - b.ConsolidationBalanceToConsume = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736861:2736869])) + b.ConsolidationBalanceToConsume = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[2736861:2736869])) // Field (33) 'EarliestConsolidationEpoch' - b.EarliestConsolidationEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736869:2736877])) + b.EarliestConsolidationEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[2736869:2736877])) // Offset (34) 'PendingDeposits' if o34 = ssz.ReadOffset(buf[2736877:2736881]); o34 > size || o27 > o34 { @@ -2721,7 +2721,7 @@ func (b *BuilderPendingPayment) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Weight' - b.Weight = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[0:8])) + b.Weight = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'Withdrawal' if b.Withdrawal == nil { @@ -2804,13 +2804,13 @@ func (b *BuilderPendingWithdrawal) UnmarshalSSZ(buf []byte) error { b.FeeRecipient = append(b.FeeRecipient, buf[0:20]...) // Field (1) 'Amount' - b.Amount = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[20:28])) + b.Amount = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Gwei(ssz.UnmarshallUint64(buf[20:28])) // Field (2) 'BuilderIndex' - b.BuilderIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[28:36])) + b.BuilderIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[28:36])) // Field (3) 'WithdrawableEpoch' - b.WithdrawableEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[36:44])) + b.WithdrawableEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[36:44])) return err } @@ -2961,7 +2961,7 @@ func (d *DataColumnSidecarGloas) UnmarshalSSZ(buf []byte) error { } // Field (4) 'Slot' - d.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[20:28])) + d.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[20:28])) // Field (5) 'BeaconBlockRoot' if cap(d.BeaconBlockRoot) == 0 { @@ -3218,7 +3218,7 @@ func (e *ExecutionPayloadEnvelope) UnmarshalSSZ(buf []byte) error { } // Field (2) 'BuilderIndex' - e.BuilderIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + e.BuilderIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (3) 'BeaconBlockRoot' if cap(e.BeaconBlockRoot) == 0 { @@ -3227,7 +3227,7 @@ func (e *ExecutionPayloadEnvelope) UnmarshalSSZ(buf []byte) error { e.BeaconBlockRoot = append(e.BeaconBlockRoot, buf[16:48]...) // Field (4) 'Slot' - e.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[48:56])) + e.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[48:56])) // Offset (5) 'BlobKzgCommitments' if o5 = ssz.ReadOffset(buf[56:60]); o5 > size || o1 > o5 { diff --git a/proto/prysm/v1alpha1/health.pb.go b/proto/prysm/v1alpha1/health.pb.go index a558f41d1e..0e7e739618 100755 --- a/proto/prysm/v1alpha1/health.pb.go +++ b/proto/prysm/v1alpha1/health.pb.go @@ -98,7 +98,7 @@ var file_proto_prysm_v1alpha1_health_proto_rawDesc = []byte{ 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x0b, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, diff --git a/proto/prysm/v1alpha1/health.proto b/proto/prysm/v1alpha1/health.proto index 212efc85c4..ce7ac7a2db 100644 --- a/proto/prysm/v1alpha1/health.proto +++ b/proto/prysm/v1alpha1/health.proto @@ -6,7 +6,7 @@ import "google/api/annotations.proto"; import "google/protobuf/empty.proto"; option csharp_namespace = "Ethereum.Eth.V1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "HealthProto"; option java_package = "org.ethereum.eth.v1alpha1"; diff --git a/proto/prysm/v1alpha1/light_client.pb.go b/proto/prysm/v1alpha1/light_client.pb.go index a05baf35cb..bb14b53976 100755 --- a/proto/prysm/v1alpha1/light_client.pb.go +++ b/proto/prysm/v1alpha1/light_client.pb.go @@ -10,9 +10,9 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -92,7 +92,7 @@ type LightClientUpdateAltair struct { FinalizedHeader *LightClientHeaderAltair `protobuf:"bytes,4,opt,name=finalized_header,json=finalizedHeader,proto3" json:"finalized_header,omitempty"` FinalityBranch [][]byte `protobuf:"bytes,5,rep,name=finality_branch,json=finalityBranch,proto3" json:"finality_branch,omitempty" ssz-size:"6,32"` SyncAggregate *SyncAggregate `protobuf:"bytes,6,opt,name=sync_aggregate,json=syncAggregate,proto3" json:"sync_aggregate,omitempty"` - SignatureSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + SignatureSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -169,11 +169,11 @@ func (x *LightClientUpdateAltair) GetSyncAggregate() *SyncAggregate { return nil } -func (x *LightClientUpdateAltair) GetSignatureSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *LightClientUpdateAltair) GetSignatureSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.SignatureSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type LightClientFinalityUpdateAltair struct { @@ -182,7 +182,7 @@ type LightClientFinalityUpdateAltair struct { FinalizedHeader *LightClientHeaderAltair `protobuf:"bytes,2,opt,name=finalized_header,json=finalizedHeader,proto3" json:"finalized_header,omitempty"` FinalityBranch [][]byte `protobuf:"bytes,3,rep,name=finality_branch,json=finalityBranch,proto3" json:"finality_branch,omitempty" ssz-size:"6,32"` SyncAggregate *SyncAggregate `protobuf:"bytes,4,opt,name=sync_aggregate,json=syncAggregate,proto3" json:"sync_aggregate,omitempty"` - SignatureSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + SignatureSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -245,18 +245,18 @@ func (x *LightClientFinalityUpdateAltair) GetSyncAggregate() *SyncAggregate { return nil } -func (x *LightClientFinalityUpdateAltair) GetSignatureSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *LightClientFinalityUpdateAltair) GetSignatureSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.SignatureSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type LightClientOptimisticUpdateAltair struct { state protoimpl.MessageState `protogen:"open.v1"` AttestedHeader *LightClientHeaderAltair `protobuf:"bytes,1,opt,name=attested_header,json=attestedHeader,proto3" json:"attested_header,omitempty"` SyncAggregate *SyncAggregate `protobuf:"bytes,2,opt,name=sync_aggregate,json=syncAggregate,proto3" json:"sync_aggregate,omitempty"` - SignatureSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + SignatureSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -305,11 +305,11 @@ func (x *LightClientOptimisticUpdateAltair) GetSyncAggregate() *SyncAggregate { return nil } -func (x *LightClientOptimisticUpdateAltair) GetSignatureSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *LightClientOptimisticUpdateAltair) GetSignatureSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.SignatureSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type LightClientHeaderAltair struct { @@ -424,7 +424,7 @@ type LightClientUpdateCapella struct { FinalizedHeader *LightClientHeaderCapella `protobuf:"bytes,4,opt,name=finalized_header,json=finalizedHeader,proto3" json:"finalized_header,omitempty"` FinalityBranch [][]byte `protobuf:"bytes,5,rep,name=finality_branch,json=finalityBranch,proto3" json:"finality_branch,omitempty" ssz-size:"6,32"` SyncAggregate *SyncAggregate `protobuf:"bytes,6,opt,name=sync_aggregate,json=syncAggregate,proto3" json:"sync_aggregate,omitempty"` - SignatureSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + SignatureSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -501,11 +501,11 @@ func (x *LightClientUpdateCapella) GetSyncAggregate() *SyncAggregate { return nil } -func (x *LightClientUpdateCapella) GetSignatureSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *LightClientUpdateCapella) GetSignatureSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.SignatureSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type LightClientFinalityUpdateCapella struct { @@ -514,7 +514,7 @@ type LightClientFinalityUpdateCapella struct { FinalizedHeader *LightClientHeaderCapella `protobuf:"bytes,2,opt,name=finalized_header,json=finalizedHeader,proto3" json:"finalized_header,omitempty"` FinalityBranch [][]byte `protobuf:"bytes,3,rep,name=finality_branch,json=finalityBranch,proto3" json:"finality_branch,omitempty" ssz-size:"6,32"` SyncAggregate *SyncAggregate `protobuf:"bytes,4,opt,name=sync_aggregate,json=syncAggregate,proto3" json:"sync_aggregate,omitempty"` - SignatureSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + SignatureSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -577,18 +577,18 @@ func (x *LightClientFinalityUpdateCapella) GetSyncAggregate() *SyncAggregate { return nil } -func (x *LightClientFinalityUpdateCapella) GetSignatureSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *LightClientFinalityUpdateCapella) GetSignatureSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.SignatureSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type LightClientOptimisticUpdateCapella struct { state protoimpl.MessageState `protogen:"open.v1"` AttestedHeader *LightClientHeaderCapella `protobuf:"bytes,1,opt,name=attested_header,json=attestedHeader,proto3" json:"attested_header,omitempty"` SyncAggregate *SyncAggregate `protobuf:"bytes,2,opt,name=sync_aggregate,json=syncAggregate,proto3" json:"sync_aggregate,omitempty"` - SignatureSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + SignatureSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -637,11 +637,11 @@ func (x *LightClientOptimisticUpdateCapella) GetSyncAggregate() *SyncAggregate { return nil } -func (x *LightClientOptimisticUpdateCapella) GetSignatureSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *LightClientOptimisticUpdateCapella) GetSignatureSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.SignatureSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type LightClientHeaderCapella struct { @@ -772,7 +772,7 @@ type LightClientUpdateDeneb struct { FinalizedHeader *LightClientHeaderDeneb `protobuf:"bytes,4,opt,name=finalized_header,json=finalizedHeader,proto3" json:"finalized_header,omitempty"` FinalityBranch [][]byte `protobuf:"bytes,5,rep,name=finality_branch,json=finalityBranch,proto3" json:"finality_branch,omitempty" ssz-size:"6,32"` SyncAggregate *SyncAggregate `protobuf:"bytes,6,opt,name=sync_aggregate,json=syncAggregate,proto3" json:"sync_aggregate,omitempty"` - SignatureSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + SignatureSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -849,11 +849,11 @@ func (x *LightClientUpdateDeneb) GetSyncAggregate() *SyncAggregate { return nil } -func (x *LightClientUpdateDeneb) GetSignatureSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *LightClientUpdateDeneb) GetSignatureSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.SignatureSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type LightClientFinalityUpdateDeneb struct { @@ -862,7 +862,7 @@ type LightClientFinalityUpdateDeneb struct { FinalizedHeader *LightClientHeaderDeneb `protobuf:"bytes,2,opt,name=finalized_header,json=finalizedHeader,proto3" json:"finalized_header,omitempty"` FinalityBranch [][]byte `protobuf:"bytes,3,rep,name=finality_branch,json=finalityBranch,proto3" json:"finality_branch,omitempty" ssz-size:"6,32"` SyncAggregate *SyncAggregate `protobuf:"bytes,4,opt,name=sync_aggregate,json=syncAggregate,proto3" json:"sync_aggregate,omitempty"` - SignatureSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + SignatureSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -925,18 +925,18 @@ func (x *LightClientFinalityUpdateDeneb) GetSyncAggregate() *SyncAggregate { return nil } -func (x *LightClientFinalityUpdateDeneb) GetSignatureSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *LightClientFinalityUpdateDeneb) GetSignatureSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.SignatureSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type LightClientOptimisticUpdateDeneb struct { state protoimpl.MessageState `protogen:"open.v1"` AttestedHeader *LightClientHeaderDeneb `protobuf:"bytes,1,opt,name=attested_header,json=attestedHeader,proto3" json:"attested_header,omitempty"` SyncAggregate *SyncAggregate `protobuf:"bytes,2,opt,name=sync_aggregate,json=syncAggregate,proto3" json:"sync_aggregate,omitempty"` - SignatureSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + SignatureSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -985,11 +985,11 @@ func (x *LightClientOptimisticUpdateDeneb) GetSyncAggregate() *SyncAggregate { return nil } -func (x *LightClientOptimisticUpdateDeneb) GetSignatureSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *LightClientOptimisticUpdateDeneb) GetSignatureSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.SignatureSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type LightClientHeaderDeneb struct { @@ -1120,7 +1120,7 @@ type LightClientUpdateElectra struct { FinalizedHeader *LightClientHeaderDeneb `protobuf:"bytes,4,opt,name=finalized_header,json=finalizedHeader,proto3" json:"finalized_header,omitempty"` FinalityBranch [][]byte `protobuf:"bytes,5,rep,name=finality_branch,json=finalityBranch,proto3" json:"finality_branch,omitempty" ssz-size:"7,32"` SyncAggregate *SyncAggregate `protobuf:"bytes,6,opt,name=sync_aggregate,json=syncAggregate,proto3" json:"sync_aggregate,omitempty"` - SignatureSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + SignatureSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,7,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1197,11 +1197,11 @@ func (x *LightClientUpdateElectra) GetSyncAggregate() *SyncAggregate { return nil } -func (x *LightClientUpdateElectra) GetSignatureSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *LightClientUpdateElectra) GetSignatureSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.SignatureSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type LightClientFinalityUpdateElectra struct { @@ -1210,7 +1210,7 @@ type LightClientFinalityUpdateElectra struct { FinalizedHeader *LightClientHeaderDeneb `protobuf:"bytes,2,opt,name=finalized_header,json=finalizedHeader,proto3" json:"finalized_header,omitempty"` FinalityBranch [][]byte `protobuf:"bytes,3,rep,name=finality_branch,json=finalityBranch,proto3" json:"finality_branch,omitempty" ssz-size:"7,32"` SyncAggregate *SyncAggregate `protobuf:"bytes,4,opt,name=sync_aggregate,json=syncAggregate,proto3" json:"sync_aggregate,omitempty"` - SignatureSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + SignatureSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=signature_slot,json=signatureSlot,proto3" json:"signature_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1273,11 +1273,11 @@ func (x *LightClientFinalityUpdateElectra) GetSyncAggregate() *SyncAggregate { return nil } -func (x *LightClientFinalityUpdateElectra) GetSignatureSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *LightClientFinalityUpdateElectra) GetSignatureSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.SignatureSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } var File_proto_prysm_v1alpha1_light_client_proto protoreflect.FileDescriptor @@ -1345,7 +1345,7 @@ var file_proto_prysm_v1alpha1_light_client_proto_rawDesc = []byte{ 0x12, 0x6b, 0x0a, 0x0e, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x22, 0xc2, 0x03, @@ -1374,7 +1374,7 @@ var file_proto_prysm_v1alpha1_light_client_proto_rawDesc = []byte{ 0x72, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x22, 0xb6, 0x02, 0x0a, 0x21, 0x4c, 0x69, 0x67, 0x68, 0x74, 0x43, 0x6c, 0x69, 0x65, @@ -1393,7 +1393,7 @@ var file_proto_prysm_v1alpha1_light_client_proto_rawDesc = []byte{ 0x0a, 0x0e, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x22, 0x5b, 0x0a, 0x17, 0x4c, @@ -1454,7 +1454,7 @@ var file_proto_prysm_v1alpha1_light_client_proto_rawDesc = []byte{ 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x22, 0xc5, 0x03, 0x0a, 0x20, 0x4c, 0x69, 0x67, 0x68, @@ -1482,7 +1482,7 @@ var file_proto_prysm_v1alpha1_light_client_proto_rawDesc = []byte{ 0x74, 0x65, 0x12, 0x6b, 0x0a, 0x0e, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x22, @@ -1502,7 +1502,7 @@ var file_proto_prysm_v1alpha1_light_client_proto_rawDesc = []byte{ 0x0e, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x22, 0xe2, 0x01, 0x0a, 0x18, 0x4c, @@ -1571,7 +1571,7 @@ var file_proto_prysm_v1alpha1_light_client_proto_rawDesc = []byte{ 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x22, 0xbf, 0x03, 0x0a, 0x1e, 0x4c, 0x69, 0x67, 0x68, 0x74, 0x43, 0x6c, @@ -1599,7 +1599,7 @@ var file_proto_prysm_v1alpha1_light_client_proto_rawDesc = []byte{ 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x22, 0xb4, 0x02, 0x0a, 0x20, 0x4c, 0x69, 0x67, 0x68, 0x74, @@ -1618,7 +1618,7 @@ var file_proto_prysm_v1alpha1_light_client_proto_rawDesc = []byte{ 0x12, 0x6b, 0x0a, 0x0e, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x22, 0xde, 0x01, @@ -1687,7 +1687,7 @@ var file_proto_prysm_v1alpha1_light_client_proto_rawDesc = []byte{ 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x22, 0xc1, 0x03, 0x0a, 0x20, 0x4c, 0x69, 0x67, 0x68, @@ -1715,7 +1715,7 @@ var file_proto_prysm_v1alpha1_light_client_proto_rawDesc = []byte{ 0x0a, 0x0e, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x42, 0x98, 0x01, 0x0a, 0x19, @@ -1723,7 +1723,7 @@ var file_proto_prysm_v1alpha1_light_client_proto_rawDesc = []byte{ 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x10, 0x4c, 0x69, 0x67, 0x68, 0x74, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x37, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x74, 0x68, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, diff --git a/proto/prysm/v1alpha1/light_client.proto b/proto/prysm/v1alpha1/light_client.proto index 9379cab450..e737b395c4 100644 --- a/proto/prysm/v1alpha1/light_client.proto +++ b/proto/prysm/v1alpha1/light_client.proto @@ -20,7 +20,7 @@ import "proto/prysm/v1alpha1/beacon_core_types.proto"; import "proto/engine/v1/execution_engine.proto"; option csharp_namespace = "Ethereum.Eth.V1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/eth/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/eth/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "LightClientProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -47,7 +47,7 @@ message LightClientUpdateAltair { SyncAggregate sync_aggregate = 6; uint64 signature_slot = 7 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -58,7 +58,7 @@ message LightClientFinalityUpdateAltair { SyncAggregate sync_aggregate = 4; uint64 signature_slot = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -67,7 +67,7 @@ message LightClientOptimisticUpdateAltair { SyncAggregate sync_aggregate = 2; uint64 signature_slot = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -94,7 +94,7 @@ message LightClientUpdateCapella { SyncAggregate sync_aggregate = 6; uint64 signature_slot = 7 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -105,7 +105,7 @@ message LightClientFinalityUpdateCapella { SyncAggregate sync_aggregate = 4; uint64 signature_slot = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -114,7 +114,7 @@ message LightClientOptimisticUpdateCapella { SyncAggregate sync_aggregate = 2; uint64 signature_slot = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -145,7 +145,7 @@ message LightClientUpdateDeneb { SyncAggregate sync_aggregate = 6; uint64 signature_slot = 7 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -156,7 +156,7 @@ message LightClientFinalityUpdateDeneb { SyncAggregate sync_aggregate = 4; uint64 signature_slot = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -165,7 +165,7 @@ message LightClientOptimisticUpdateDeneb { SyncAggregate sync_aggregate = 2; uint64 signature_slot = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -196,7 +196,7 @@ message LightClientUpdateElectra { SyncAggregate sync_aggregate = 6; uint64 signature_slot = 7 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -207,6 +207,6 @@ message LightClientFinalityUpdateElectra { SyncAggregate sync_aggregate = 4; uint64 signature_slot = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } \ No newline at end of file diff --git a/proto/prysm/v1alpha1/metadata/BUILD.bazel b/proto/prysm/v1alpha1/metadata/BUILD.bazel index 31c6b89231..81c3480e07 100644 --- a/proto/prysm/v1alpha1/metadata/BUILD.bazel +++ b/proto/prysm/v1alpha1/metadata/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["metadata_interfaces.go"], - importpath = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/metadata", + importpath = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/metadata", visibility = ["//visibility:public"], deps = [ "//proto/prysm/v1alpha1:go_default_library", diff --git a/proto/prysm/v1alpha1/metadata/metadata_interfaces.go b/proto/prysm/v1alpha1/metadata/metadata_interfaces.go index 60d6d892d9..dec759f4ce 100644 --- a/proto/prysm/v1alpha1/metadata/metadata_interfaces.go +++ b/proto/prysm/v1alpha1/metadata/metadata_interfaces.go @@ -2,7 +2,7 @@ package metadata import ( "github.com/OffchainLabs/go-bitfield" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ssz "github.com/prysmaticlabs/fastssz" ) diff --git a/proto/prysm/v1alpha1/node.pb.go b/proto/prysm/v1alpha1/node.pb.go index 71e92379c6..f4d1b36c4d 100755 --- a/proto/prysm/v1alpha1/node.pb.go +++ b/proto/prysm/v1alpha1/node.pb.go @@ -11,7 +11,7 @@ import ( reflect "reflect" sync "sync" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -836,7 +836,7 @@ var file_proto_prysm_v1alpha1_node_proto_rawDesc = []byte{ 0x70, 0x68, 0x61, 0x31, 0x42, 0x09, 0x4e, 0x6f, 0x64, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, + 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, diff --git a/proto/prysm/v1alpha1/node.proto b/proto/prysm/v1alpha1/node.proto index d2db4bfa36..b57d7efecf 100644 --- a/proto/prysm/v1alpha1/node.proto +++ b/proto/prysm/v1alpha1/node.proto @@ -22,7 +22,7 @@ import "google/protobuf/timestamp.proto"; import "proto/eth/ext/options.proto"; option csharp_namespace = "Ethereum.Eth.v1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "NodeProto"; option java_package = "org.ethereum.eth.v1alpha1"; diff --git a/proto/prysm/v1alpha1/non-core.ssz.go b/proto/prysm/v1alpha1/non-core.ssz.go index fb992c901e..db7f3da394 100644 --- a/proto/prysm/v1alpha1/non-core.ssz.go +++ b/proto/prysm/v1alpha1/non-core.ssz.go @@ -2,8 +2,8 @@ package eth import ( - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" ssz "github.com/prysmaticlabs/fastssz" ) @@ -458,7 +458,7 @@ func (b *BeaconBlocksByRangeRequest) UnmarshalSSZ(buf []byte) error { } // Field (0) 'StartSlot' - b.StartSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.StartSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'Count' b.Count = ssz.UnmarshallUint64(buf[8:16]) @@ -786,7 +786,7 @@ func (b *BlobSidecarsByRangeRequest) UnmarshalSSZ(buf []byte) error { } // Field (0) 'StartSlot' - b.StartSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.StartSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'Count' b.Count = ssz.UnmarshallUint64(buf[8:16]) @@ -863,7 +863,7 @@ func (d *DataColumnSidecarsByRangeRequest) UnmarshalSSZ(buf []byte) error { var o2 uint64 // Field (0) 'StartSlot' - d.StartSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + d.StartSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'Count' d.Count = ssz.UnmarshallUint64(buf[8:16]) diff --git a/proto/prysm/v1alpha1/p2p_messages.pb.go b/proto/prysm/v1alpha1/p2p_messages.pb.go index 32ad7c62ec..8e8bcd0ce5 100755 --- a/proto/prysm/v1alpha1/p2p_messages.pb.go +++ b/proto/prysm/v1alpha1/p2p_messages.pb.go @@ -11,8 +11,8 @@ import ( sync "sync" github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" _ "google.golang.org/protobuf/types/descriptorpb" @@ -29,9 +29,9 @@ type Status struct { state protoimpl.MessageState `protogen:"open.v1"` ForkDigest []byte `protobuf:"bytes,1,opt,name=fork_digest,json=forkDigest,proto3" json:"fork_digest,omitempty" ssz-size:"4"` FinalizedRoot []byte `protobuf:"bytes,2,opt,name=finalized_root,json=finalizedRoot,proto3" json:"finalized_root,omitempty" ssz-size:"32"` - FinalizedEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=finalized_epoch,json=finalizedEpoch,proto3" json:"finalized_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + FinalizedEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=finalized_epoch,json=finalizedEpoch,proto3" json:"finalized_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` HeadRoot []byte `protobuf:"bytes,4,opt,name=head_root,json=headRoot,proto3" json:"head_root,omitempty" ssz-size:"32"` - HeadSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=head_slot,json=headSlot,proto3" json:"head_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + HeadSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=head_slot,json=headSlot,proto3" json:"head_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -80,11 +80,11 @@ func (x *Status) GetFinalizedRoot() []byte { return nil } -func (x *Status) GetFinalizedEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *Status) GetFinalizedEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.FinalizedEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *Status) GetHeadRoot() []byte { @@ -94,21 +94,21 @@ func (x *Status) GetHeadRoot() []byte { return nil } -func (x *Status) GetHeadSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *Status) GetHeadSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.HeadSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type StatusV2 struct { state protoimpl.MessageState `protogen:"open.v1"` ForkDigest []byte `protobuf:"bytes,1,opt,name=fork_digest,json=forkDigest,proto3" json:"fork_digest,omitempty" ssz-size:"4"` FinalizedRoot []byte `protobuf:"bytes,2,opt,name=finalized_root,json=finalizedRoot,proto3" json:"finalized_root,omitempty" ssz-size:"32"` - FinalizedEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=finalized_epoch,json=finalizedEpoch,proto3" json:"finalized_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + FinalizedEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=finalized_epoch,json=finalizedEpoch,proto3" json:"finalized_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` HeadRoot []byte `protobuf:"bytes,4,opt,name=head_root,json=headRoot,proto3" json:"head_root,omitempty" ssz-size:"32"` - HeadSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=head_slot,json=headSlot,proto3" json:"head_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - EarliestAvailableSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,6,opt,name=earliest_available_slot,json=earliestAvailableSlot,proto3" json:"earliest_available_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + HeadSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=head_slot,json=headSlot,proto3" json:"head_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + EarliestAvailableSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,6,opt,name=earliest_available_slot,json=earliestAvailableSlot,proto3" json:"earliest_available_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -157,11 +157,11 @@ func (x *StatusV2) GetFinalizedRoot() []byte { return nil } -func (x *StatusV2) GetFinalizedEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *StatusV2) GetFinalizedEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.FinalizedEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *StatusV2) GetHeadRoot() []byte { @@ -171,23 +171,23 @@ func (x *StatusV2) GetHeadRoot() []byte { return nil } -func (x *StatusV2) GetHeadSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *StatusV2) GetHeadSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.HeadSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *StatusV2) GetEarliestAvailableSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *StatusV2) GetEarliestAvailableSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.EarliestAvailableSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type BeaconBlocksByRangeRequest struct { state protoimpl.MessageState `protogen:"open.v1"` - StartSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=start_slot,json=startSlot,proto3" json:"start_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + StartSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=start_slot,json=startSlot,proto3" json:"start_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Count uint64 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` Step uint64 `protobuf:"varint,3,opt,name=step,proto3" json:"step,omitempty"` unknownFields protoimpl.UnknownFields @@ -224,11 +224,11 @@ func (*BeaconBlocksByRangeRequest) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_p2p_messages_proto_rawDescGZIP(), []int{2} } -func (x *BeaconBlocksByRangeRequest) GetStartSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BeaconBlocksByRangeRequest) GetStartSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.StartSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BeaconBlocksByRangeRequest) GetCount() uint64 { @@ -249,7 +249,7 @@ type ENRForkID struct { state protoimpl.MessageState `protogen:"open.v1"` CurrentForkDigest []byte `protobuf:"bytes,1,opt,name=current_fork_digest,json=currentForkDigest,proto3" json:"current_fork_digest,omitempty" ssz-size:"4"` NextForkVersion []byte `protobuf:"bytes,2,opt,name=next_fork_version,json=nextForkVersion,proto3" json:"next_fork_version,omitempty" ssz-size:"4"` - NextForkEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=next_fork_epoch,json=nextForkEpoch,proto3" json:"next_fork_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + NextForkEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=next_fork_epoch,json=nextForkEpoch,proto3" json:"next_fork_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -298,11 +298,11 @@ func (x *ENRForkID) GetNextForkVersion() []byte { return nil } -func (x *ENRForkID) GetNextForkEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ENRForkID) GetNextForkEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.NextForkEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } type MetaDataV0 struct { @@ -487,7 +487,7 @@ func (x *MetaDataV2) GetCustodyGroupCount() uint64 { type BlobSidecarsByRangeRequest struct { state protoimpl.MessageState `protogen:"open.v1"` - StartSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=start_slot,json=startSlot,proto3" json:"start_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + StartSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=start_slot,json=startSlot,proto3" json:"start_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Count uint64 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -523,11 +523,11 @@ func (*BlobSidecarsByRangeRequest) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_p2p_messages_proto_rawDescGZIP(), []int{7} } -func (x *BlobSidecarsByRangeRequest) GetStartSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BlobSidecarsByRangeRequest) GetStartSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.StartSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BlobSidecarsByRangeRequest) GetCount() uint64 { @@ -539,7 +539,7 @@ func (x *BlobSidecarsByRangeRequest) GetCount() uint64 { type DataColumnSidecarsByRangeRequest struct { state protoimpl.MessageState `protogen:"open.v1"` - StartSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=start_slot,json=startSlot,proto3" json:"start_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + StartSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=start_slot,json=startSlot,proto3" json:"start_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` Count uint64 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` Columns []uint64 `protobuf:"varint,3,rep,packed,name=columns,proto3" json:"columns,omitempty" ssz-max:"128"` unknownFields protoimpl.UnknownFields @@ -576,11 +576,11 @@ func (*DataColumnSidecarsByRangeRequest) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_p2p_messages_proto_rawDescGZIP(), []int{8} } -func (x *DataColumnSidecarsByRangeRequest) GetStartSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *DataColumnSidecarsByRangeRequest) GetStartSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.StartSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *DataColumnSidecarsByRangeRequest) GetCount() uint64 { @@ -669,7 +669,7 @@ var file_proto_prysm_v1alpha1_p2p_messages_proto_rawDesc = []byte{ 0x74, 0x12, 0x6e, 0x0a, 0x0f, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x0e, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x45, 0x70, 0x6f, 0x63, @@ -678,7 +678,7 @@ var file_proto_prysm_v1alpha1_p2p_messages_proto_rawDesc = []byte{ 0x61, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x61, 0x0a, 0x09, 0x68, 0x65, 0x61, 0x64, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x08, 0x68, 0x65, 0x61, 0x64, 0x53, 0x6c, 0x6f, 0x74, 0x22, 0xd7, 0x03, 0x0a, 0x08, 0x53, 0x74, @@ -691,7 +691,7 @@ var file_proto_prysm_v1alpha1_p2p_messages_proto_rawDesc = []byte{ 0x0f, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x0e, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x23, 0x0a, @@ -700,14 +700,14 @@ var file_proto_prysm_v1alpha1_p2p_messages_proto_rawDesc = []byte{ 0x6f, 0x74, 0x12, 0x61, 0x0a, 0x09, 0x68, 0x65, 0x61, 0x64, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x08, 0x68, 0x65, 0x61, 0x64, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x7c, 0x0a, 0x17, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x5f, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x15, 0x65, 0x61, 0x72, 0x6c, 0x69, 0x65, 0x73, 0x74, 0x41, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x53, @@ -716,7 +716,7 @@ var file_proto_prysm_v1alpha1_p2p_messages_proto_rawDesc = []byte{ 0x73, 0x74, 0x12, 0x63, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, @@ -733,7 +733,7 @@ var file_proto_prysm_v1alpha1_p2p_messages_proto_rawDesc = []byte{ 0x74, 0x5f, 0x66, 0x6f, 0x72, 0x6b, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x46, 0x6f, 0x72, 0x6b, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x22, 0x7f, 0x0a, 0x0a, 0x4d, 0x65, 0x74, 0x61, @@ -779,7 +779,7 @@ var file_proto_prysm_v1alpha1_p2p_messages_proto_rawDesc = []byte{ 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x63, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x63, @@ -789,7 +789,7 @@ var file_proto_prysm_v1alpha1_p2p_messages_proto_rawDesc = []byte{ 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x63, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x63, @@ -807,7 +807,7 @@ var file_proto_prysm_v1alpha1_p2p_messages_proto_rawDesc = []byte{ 0x10, 0x50, 0x32, 0x50, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, diff --git a/proto/prysm/v1alpha1/p2p_messages.proto b/proto/prysm/v1alpha1/p2p_messages.proto index 620e07b403..1852eca531 100644 --- a/proto/prysm/v1alpha1/p2p_messages.proto +++ b/proto/prysm/v1alpha1/p2p_messages.proto @@ -6,7 +6,7 @@ import "google/protobuf/descriptor.proto"; import "proto/eth/ext/options.proto"; option csharp_namespace = "Ethereum.Eth.V1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "P2PMessagesProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -17,12 +17,12 @@ message Status { bytes finalized_root = 2 [(ethereum.eth.ext.ssz_size) = "32"]; uint64 finalized_epoch = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; bytes head_root = 4 [(ethereum.eth.ext.ssz_size) = "32"]; uint64 head_slot = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -31,23 +31,23 @@ message StatusV2 { bytes finalized_root = 2 [(ethereum.eth.ext.ssz_size) = "32"]; uint64 finalized_epoch = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; bytes head_root = 4 [(ethereum.eth.ext.ssz_size) = "32"]; uint64 head_slot = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; uint64 earliest_available_slot = 6 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } message BeaconBlocksByRangeRequest { uint64 start_slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; uint64 count = 2; uint64 step = 3; @@ -58,7 +58,7 @@ message ENRForkID { bytes next_fork_version = 2 [(ethereum.eth.ext.ssz_size) = "4"]; uint64 next_fork_epoch = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; } @@ -137,7 +137,7 @@ message MetaDataV2 { message BlobSidecarsByRangeRequest { uint64 start_slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; uint64 count = 2; } @@ -153,7 +153,7 @@ Spec Definition: message DataColumnSidecarsByRangeRequest { uint64 start_slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; uint64 count = 2; repeated uint64 columns = 3 [(ethereum.eth.ext.ssz_max) = "128"]; diff --git a/proto/prysm/v1alpha1/phase0.ssz.go b/proto/prysm/v1alpha1/phase0.ssz.go index f694dd6a88..1bcab26d61 100644 --- a/proto/prysm/v1alpha1/phase0.ssz.go +++ b/proto/prysm/v1alpha1/phase0.ssz.go @@ -2,7 +2,7 @@ package eth import ( - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ssz "github.com/prysmaticlabs/fastssz" ) @@ -162,7 +162,7 @@ func (a *AggregateAttestationAndProof) UnmarshalSSZ(buf []byte) error { var o1 uint64 // Field (0) 'AggregatorIndex' - a.AggregatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) + a.AggregatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) // Offset (1) 'Aggregate' if o1 = ssz.ReadOffset(buf[8:12]); o1 > size { @@ -412,10 +412,10 @@ func (a *AttestationData) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Slot' - a.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + a.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'CommitteeIndex' - a.CommitteeIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(ssz.UnmarshallUint64(buf[8:16])) + a.CommitteeIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'BeaconBlockRoot' if cap(a.BeaconBlockRoot) == 0 { @@ -515,7 +515,7 @@ func (c *Checkpoint) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Epoch' - c.Epoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[0:8])) + c.Epoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'Root' if cap(c.Root) == 0 { @@ -721,10 +721,10 @@ func (b *BeaconBlock) UnmarshalSSZ(buf []byte) error { var o4 uint64 // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -1558,10 +1558,10 @@ func (b *BeaconBlockHeader) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ProposerIndex' - b.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + b.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) // Field (2) 'ParentRoot' if cap(b.ParentRoot) == 0 { @@ -2100,10 +2100,10 @@ func (v *VoluntaryExit) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Epoch' - v.Epoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[0:8])) + v.Epoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'ValidatorIndex' - v.ValidatorIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) + v.ValidatorIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[8:16])) return err } @@ -2183,7 +2183,7 @@ func (f *Fork) UnmarshalSSZ(buf []byte) error { f.CurrentVersion = append(f.CurrentVersion, buf[4:8]...) // Field (2) 'Epoch' - f.Epoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[8:16])) + f.Epoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[8:16])) return err } @@ -2298,16 +2298,16 @@ func (v *Validator) UnmarshalSSZ(buf []byte) error { } // Field (4) 'ActivationEligibilityEpoch' - v.ActivationEligibilityEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[89:97])) + v.ActivationEligibilityEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[89:97])) // Field (5) 'ActivationEpoch' - v.ActivationEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[97:105])) + v.ActivationEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[97:105])) // Field (6) 'ExitEpoch' - v.ExitEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[105:113])) + v.ExitEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[105:113])) // Field (7) 'WithdrawableEpoch' - v.WithdrawableEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[113:121])) + v.WithdrawableEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[113:121])) return err } @@ -2737,7 +2737,7 @@ func (b *BeaconState) UnmarshalSSZ(buf []byte) error { b.GenesisValidatorsRoot = append(b.GenesisValidatorsRoot, buf[8:40]...) // Field (2) 'Slot' - b.Slot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) + b.Slot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[40:48])) // Field (3) 'Fork' if b.Fork == nil { @@ -3301,10 +3301,10 @@ func (p *PendingAttestation) UnmarshalSSZ(buf []byte) error { } // Field (2) 'InclusionDelay' - p.InclusionDelay = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[132:140])) + p.InclusionDelay = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[132:140])) // Field (3) 'ProposerIndex' - p.ProposerIndex = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[140:148])) + p.ProposerIndex = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[140:148])) // Field (0) 'AggregationBits' { @@ -3898,7 +3898,7 @@ func (s *Status) UnmarshalSSZ(buf []byte) error { s.FinalizedRoot = append(s.FinalizedRoot, buf[4:36]...) // Field (2) 'FinalizedEpoch' - s.FinalizedEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[36:44])) + s.FinalizedEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[36:44])) // Field (3) 'HeadRoot' if cap(s.HeadRoot) == 0 { @@ -3907,7 +3907,7 @@ func (s *Status) UnmarshalSSZ(buf []byte) error { s.HeadRoot = append(s.HeadRoot, buf[44:76]...) // Field (4) 'HeadSlot' - s.HeadSlot = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[76:84])) + s.HeadSlot = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(ssz.UnmarshallUint64(buf[76:84])) return err } @@ -4008,7 +4008,7 @@ func (e *ENRForkID) UnmarshalSSZ(buf []byte) error { e.NextForkVersion = append(e.NextForkVersion, buf[4:8]...) // Field (2) 'NextForkEpoch' - e.NextForkEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[8:16])) + e.NextForkEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[8:16])) return err } @@ -4083,7 +4083,7 @@ func (v *ValidatorIdentity) UnmarshalSSZ(buf []byte) error { } // Field (0) 'Index' - v.Index = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) + v.Index = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(ssz.UnmarshallUint64(buf[0:8])) // Field (1) 'Pubkey' if cap(v.Pubkey) == 0 { @@ -4092,7 +4092,7 @@ func (v *ValidatorIdentity) UnmarshalSSZ(buf []byte) error { v.Pubkey = append(v.Pubkey, buf[8:56]...) // Field (2) 'ActivationEpoch' - v.ActivationEpoch = github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[56:64])) + v.ActivationEpoch = github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(ssz.UnmarshallUint64(buf[56:64])) return err } diff --git a/proto/prysm/v1alpha1/powchain.pb.go b/proto/prysm/v1alpha1/powchain.pb.go index a30cc25057..84561953ce 100755 --- a/proto/prysm/v1alpha1/powchain.pb.go +++ b/proto/prysm/v1alpha1/powchain.pb.go @@ -10,7 +10,7 @@ import ( reflect "reflect" sync "sync" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" _ "google.golang.org/protobuf/types/descriptorpb" @@ -614,7 +614,7 @@ var file_proto_prysm_v1alpha1_powchain_proto_rawDesc = []byte{ 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x0d, 0x50, 0x6f, 0x77, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, diff --git a/proto/prysm/v1alpha1/powchain.proto b/proto/prysm/v1alpha1/powchain.proto index 6ba68fdbdd..01b7a34f9b 100644 --- a/proto/prysm/v1alpha1/powchain.proto +++ b/proto/prysm/v1alpha1/powchain.proto @@ -8,7 +8,7 @@ import "proto/prysm/v1alpha1/beacon_state.proto"; import "proto/eth/ext/options.proto"; option csharp_namespace = "Ethereum.Eth.V1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "PowchainProto"; option java_package = "org.ethereum.eth.v1alpha1"; diff --git a/proto/prysm/v1alpha1/slasher.pb.go b/proto/prysm/v1alpha1/slasher.pb.go index 776d6dfd43..8e5a11bca6 100755 --- a/proto/prysm/v1alpha1/slasher.pb.go +++ b/proto/prysm/v1alpha1/slasher.pb.go @@ -10,8 +10,8 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -26,8 +26,8 @@ const ( type HighestAttestation struct { state protoimpl.MessageState `protogen:"open.v1"` ValidatorIndex uint64 `protobuf:"varint,1,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty"` - HighestSourceEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,2,opt,name=highest_source_epoch,json=highestSourceEpoch,proto3" json:"highest_source_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` - HighestTargetEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=highest_target_epoch,json=highestTargetEpoch,proto3" json:"highest_target_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + HighestSourceEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,2,opt,name=highest_source_epoch,json=highestSourceEpoch,proto3" json:"highest_source_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` + HighestTargetEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=highest_target_epoch,json=highestTargetEpoch,proto3" json:"highest_target_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -69,18 +69,18 @@ func (x *HighestAttestation) GetValidatorIndex() uint64 { return 0 } -func (x *HighestAttestation) GetHighestSourceEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *HighestAttestation) GetHighestSourceEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.HighestSourceEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } -func (x *HighestAttestation) GetHighestTargetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *HighestAttestation) GetHighestTargetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.HighestTargetEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } var File_proto_prysm_v1alpha1_slasher_proto protoreflect.FileDescriptor @@ -99,7 +99,7 @@ var file_proto_prysm_v1alpha1_slasher_proto_rawDesc = []byte{ 0x65, 0x73, 0x74, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x12, 0x68, 0x69, 0x67, 0x68, 0x65, 0x73, 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x45, 0x70, 0x6f, 0x63, @@ -107,7 +107,7 @@ var file_proto_prysm_v1alpha1_slasher_proto_rawDesc = []byte{ 0x67, 0x65, 0x74, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x12, 0x68, 0x69, 0x67, 0x68, 0x65, 0x73, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x42, 0x96, 0x01, 0x0a, 0x19, 0x6f, @@ -115,7 +115,7 @@ var file_proto_prysm_v1alpha1_slasher_proto_rawDesc = []byte{ 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x0c, 0x53, 0x6c, 0x61, 0x73, 0x68, 0x65, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, diff --git a/proto/prysm/v1alpha1/slasher.proto b/proto/prysm/v1alpha1/slasher.proto index 696006bd85..db2ab7763b 100644 --- a/proto/prysm/v1alpha1/slasher.proto +++ b/proto/prysm/v1alpha1/slasher.proto @@ -18,7 +18,7 @@ package ethereum.eth.v1alpha1; import "proto/eth/ext/options.proto"; option csharp_namespace = "Ethereum.Eth.V1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "SlasherProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -28,10 +28,10 @@ message HighestAttestation { uint64 validator_index = 1; uint64 highest_source_epoch = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; uint64 highest_target_epoch = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; } \ No newline at end of file diff --git a/proto/prysm/v1alpha1/slashings/BUILD.bazel b/proto/prysm/v1alpha1/slashings/BUILD.bazel index 49ee7d5337..30c12e391a 100644 --- a/proto/prysm/v1alpha1/slashings/BUILD.bazel +++ b/proto/prysm/v1alpha1/slashings/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "double_votes.go", "surround_votes.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/slashings", + importpath = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/slashings", visibility = ["//visibility:public"], deps = ["//proto/prysm/v1alpha1:go_default_library"], ) diff --git a/proto/prysm/v1alpha1/slashings/surround_votes.go b/proto/prysm/v1alpha1/slashings/surround_votes.go index af2848fc43..5f993941e4 100644 --- a/proto/prysm/v1alpha1/slashings/surround_votes.go +++ b/proto/prysm/v1alpha1/slashings/surround_votes.go @@ -1,6 +1,6 @@ package slashings -import ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" +import ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" // IsSurround checks if an attestation, a, is surrounding // another one, b, based on the Ethereum slashing conditions specified diff --git a/proto/prysm/v1alpha1/slashings/surround_votes_test.go b/proto/prysm/v1alpha1/slashings/surround_votes_test.go index 309484a8e5..775193b13c 100644 --- a/proto/prysm/v1alpha1/slashings/surround_votes_test.go +++ b/proto/prysm/v1alpha1/slashings/surround_votes_test.go @@ -3,8 +3,8 @@ package slashings import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func TestIsSurround(t *testing.T) { diff --git a/proto/prysm/v1alpha1/sync_committee.pb.go b/proto/prysm/v1alpha1/sync_committee.pb.go index 5b4ba42a35..dc769228f4 100755 --- a/proto/prysm/v1alpha1/sync_committee.pb.go +++ b/proto/prysm/v1alpha1/sync_committee.pb.go @@ -11,8 +11,8 @@ import ( sync "sync" github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -27,9 +27,9 @@ const ( // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/sync_committee.proto. type SyncCommitteeMessage struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` BlockRoot []byte `protobuf:"bytes,2,opt,name=block_root,json=blockRoot,proto3" json:"block_root,omitempty" ssz-size:"32"` - ValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` Signature []byte `protobuf:"bytes,4,opt,name=signature,proto3" json:"signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -65,11 +65,11 @@ func (*SyncCommitteeMessage) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_sync_committee_proto_rawDescGZIP(), []int{0} } -func (x *SyncCommitteeMessage) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *SyncCommitteeMessage) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *SyncCommitteeMessage) GetBlockRoot() []byte { @@ -79,11 +79,11 @@ func (x *SyncCommitteeMessage) GetBlockRoot() []byte { return nil } -func (x *SyncCommitteeMessage) GetValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *SyncCommitteeMessage) GetValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *SyncCommitteeMessage) GetSignature() []byte { @@ -147,7 +147,7 @@ func (x *SignedContributionAndProof) GetSignature() []byte { type ContributionAndProof struct { state protoimpl.MessageState `protogen:"open.v1"` - AggregatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=aggregator_index,json=aggregatorIndex,proto3" json:"aggregator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + AggregatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=aggregator_index,json=aggregatorIndex,proto3" json:"aggregator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` Contribution *SyncCommitteeContribution `protobuf:"bytes,2,opt,name=contribution,proto3" json:"contribution,omitempty"` SelectionProof []byte `protobuf:"bytes,3,opt,name=selection_proof,json=selectionProof,proto3" json:"selection_proof,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields @@ -184,11 +184,11 @@ func (*ContributionAndProof) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_sync_committee_proto_rawDescGZIP(), []int{2} } -func (x *ContributionAndProof) GetAggregatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ContributionAndProof) GetAggregatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.AggregatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *ContributionAndProof) GetContribution() *SyncCommitteeContribution { @@ -207,7 +207,7 @@ func (x *ContributionAndProof) GetSelectionProof() []byte { type SyncCommitteeContribution struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` BlockRoot []byte `protobuf:"bytes,2,opt,name=block_root,json=blockRoot,proto3" json:"block_root,omitempty" ssz-size:"32"` SubcommitteeIndex uint64 `protobuf:"varint,3,opt,name=subcommittee_index,json=subcommitteeIndex,proto3" json:"subcommittee_index,omitempty"` AggregationBits github_com_OffchainLabs_go_bitfield.Bitvector128 `protobuf:"bytes,4,opt,name=aggregation_bits,json=aggregationBits,proto3" json:"aggregation_bits,omitempty" cast-type:"github.com/OffchainLabs/go-bitfield.Bitvector128" ssz-size:"16"` @@ -246,11 +246,11 @@ func (*SyncCommitteeContribution) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_sync_committee_proto_rawDescGZIP(), []int{3} } -func (x *SyncCommitteeContribution) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *SyncCommitteeContribution) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *SyncCommitteeContribution) GetBlockRoot() []byte { @@ -294,7 +294,7 @@ var file_proto_prysm_v1alpha1_sync_committee_proto_rawDesc = []byte{ 0x65, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, @@ -303,7 +303,7 @@ var file_proto_prysm_v1alpha1_sync_committee_proto_rawDesc = []byte{ 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, @@ -324,7 +324,7 @@ var file_proto_prysm_v1alpha1_sync_committee_proto_rawDesc = []byte{ 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x54, 0x0a, 0x0c, @@ -341,7 +341,7 @@ var file_proto_prysm_v1alpha1_sync_committee_proto_rawDesc = []byte{ 0x6e, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x25, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, @@ -363,7 +363,7 @@ var file_proto_prysm_v1alpha1_sync_committee_proto_rawDesc = []byte{ 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x5c, 0x45, 0x74, 0x68, 0x5c, 0x76, diff --git a/proto/prysm/v1alpha1/sync_committee.proto b/proto/prysm/v1alpha1/sync_committee.proto index 380319f218..1afef93f13 100644 --- a/proto/prysm/v1alpha1/sync_committee.proto +++ b/proto/prysm/v1alpha1/sync_committee.proto @@ -18,7 +18,7 @@ package ethereum.eth.v1alpha1; import "proto/eth/ext/options.proto"; option csharp_namespace = "Ethereum.Eth.V1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "SyncCommitteeProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -31,7 +31,7 @@ message SyncCommitteeMessage { // Slot to which this contribution pertains. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // 32 byte block root for this signature. @@ -40,7 +40,7 @@ message SyncCommitteeMessage { // Index of the validator that produced this signature. uint64 validator_index = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // Signature by the validator over the block root of `slot`. @@ -62,7 +62,7 @@ message ContributionAndProof { // Index of the aggregator that produced this proof. uint64 aggregator_index = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; SyncCommitteeContribution contribution = 2; @@ -76,7 +76,7 @@ message SyncCommitteeContribution { // Slot to which this contribution pertains. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // 32 byte block root for this signature. diff --git a/proto/prysm/v1alpha1/validator-client/BUILD.bazel b/proto/prysm/v1alpha1/validator-client/BUILD.bazel index 80a0add9e7..fdc3bac04e 100644 --- a/proto/prysm/v1alpha1/validator-client/BUILD.bazel +++ b/proto/prysm/v1alpha1/validator-client/BUILD.bazel @@ -34,7 +34,7 @@ go_proto_library( compilers = [ "@com_github_prysmaticlabs_protoc_gen_go_cast//:go_cast_grpc", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client", + importpath = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client", proto = ":proto", visibility = ["//visibility:public"], deps = [ @@ -64,7 +64,7 @@ go_library( embed = [ ":go_proto", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client", + importpath = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client", visibility = ["//visibility:public"], deps = [ "//proto/eth/ext:go_default_library", diff --git a/proto/prysm/v1alpha1/validator-client/keymanager.pb.go b/proto/prysm/v1alpha1/validator-client/keymanager.pb.go index 64e888fcf3..a0b4bc7f78 100755 --- a/proto/prysm/v1alpha1/validator-client/keymanager.pb.go +++ b/proto/prysm/v1alpha1/validator-client/keymanager.pb.go @@ -10,10 +10,10 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - github_com_OffchainLabs_prysm_v6_consensus_types_validator "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" - v1alpha1 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + github_com_OffchainLabs_prysm_v7_consensus_types_validator "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" + v1alpha1 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -107,7 +107,7 @@ type SignRequest struct { // *SignRequest_BlockFulu // *SignRequest_BlindedBlockFulu Object isSignRequest_Object `protobuf_oneof:"object"` - SigningSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,6,opt,name=signing_slot,json=signingSlot,proto3" json:"signing_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + SigningSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,6,opt,name=signing_slot,json=signingSlot,proto3" json:"signing_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -206,22 +206,22 @@ func (x *SignRequest) GetExit() *v1alpha1.VoluntaryExit { return nil } -func (x *SignRequest) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *SignRequest) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { if x, ok := x.Object.(*SignRequest_Slot); ok { return x.Slot } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *SignRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *SignRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { if x, ok := x.Object.(*SignRequest_Epoch); ok { return x.Epoch } } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *SignRequest) GetBlockAltair() *v1alpha1.BeaconBlockAltair { @@ -368,11 +368,11 @@ func (x *SignRequest) GetBlindedBlockFulu() *v1alpha1.BlindedBeaconBlockFulu { return nil } -func (x *SignRequest) GetSigningSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *SignRequest) GetSigningSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.SigningSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } type isSignRequest_Object interface { @@ -396,11 +396,11 @@ type SignRequest_Exit struct { } type SignRequest_Slot struct { - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,105,opt,name=slot,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,105,opt,name=slot,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` } type SignRequest_Epoch struct { - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,106,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,106,opt,name=epoch,proto3,oneof" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` } type SignRequest_BlockAltair struct { @@ -626,7 +626,7 @@ func (x *ProposerOptionPayload) GetGraffiti() string { type BuilderConfig struct { state protoimpl.MessageState `protogen:"open.v1"` Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` - GasLimit github_com_OffchainLabs_prysm_v6_consensus_types_validator.Uint64 `protobuf:"varint,2,opt,name=gas_limit,json=gasLimit,proto3" json:"gas_limit,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/validator.Uint64"` + GasLimit github_com_OffchainLabs_prysm_v7_consensus_types_validator.Uint64 `protobuf:"varint,2,opt,name=gas_limit,json=gasLimit,proto3" json:"gas_limit,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/validator.Uint64"` Relays []string `protobuf:"bytes,3,rep,name=relays,proto3" json:"relays,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -669,11 +669,11 @@ func (x *BuilderConfig) GetEnabled() bool { return false } -func (x *BuilderConfig) GetGasLimit() github_com_OffchainLabs_prysm_v6_consensus_types_validator.Uint64 { +func (x *BuilderConfig) GetGasLimit() github_com_OffchainLabs_prysm_v7_consensus_types_validator.Uint64 { if x != nil { return x.GasLimit } - return github_com_OffchainLabs_prysm_v6_consensus_types_validator.Uint64(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_validator.Uint64(0) } func (x *BuilderConfig) GetRelays() []string { @@ -790,13 +790,13 @@ var file_proto_prysm_v1alpha1_validator_client_keymanager_proto_rawDesc = []byte 0x12, 0x5a, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x69, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x48, 0x00, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x5d, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x6a, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x48, 0x00, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x4d, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x61, 0x6c, 0x74, 0x61, 0x69, 0x72, 0x18, 0x6b, 0x20, 0x01, 0x28, @@ -896,7 +896,7 @@ var file_proto_prysm_v1alpha1_validator_client_keymanager_proto_rawDesc = []byte 0x75, 0x12, 0x67, 0x0a, 0x0c, 0x73, 0x69, 0x67, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0b, 0x73, 0x69, 0x67, 0x6e, 0x69, 0x6e, 0x67, 0x53, 0x6c, 0x6f, 0x74, 0x42, 0x08, 0x0a, 0x06, 0x6f, 0x62, @@ -930,7 +930,7 @@ var file_proto_prysm_v1alpha1_validator_client_keymanager_proto_rawDesc = []byte 0x67, 0x61, 0x73, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x55, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x52, 0x08, 0x67, 0x61, 0x73, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, @@ -962,7 +962,7 @@ var file_proto_prysm_v1alpha1_validator_client_keymanager_proto_rawDesc = []byte 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x32, 0x42, 0x0f, 0x4b, 0x65, 0x79, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x52, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x2d, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x3b, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x70, 0x62, diff --git a/proto/prysm/v1alpha1/validator-client/keymanager.proto b/proto/prysm/v1alpha1/validator-client/keymanager.proto index 706ae20b16..4e62760bf0 100644 --- a/proto/prysm/v1alpha1/validator-client/keymanager.proto +++ b/proto/prysm/v1alpha1/validator-client/keymanager.proto @@ -9,7 +9,7 @@ import "proto/prysm/v1alpha1/beacon_state.proto"; import "proto/prysm/v1alpha1/sync_committee.proto"; option csharp_namespace = "Ethereum.Validator.Accounts.V2"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client;validatorpb"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client;validatorpb"; option java_multiple_files = true; option java_outer_classname = "KeymanagerProto"; option java_package = "org.ethereum.validator.accounts.v2"; @@ -41,11 +41,11 @@ message SignRequest { ethereum.eth.v1alpha1.VoluntaryExit exit = 104; uint64 slot = 105 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; uint64 epoch = 106 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Altair objects. @@ -85,7 +85,7 @@ message SignRequest { reserved 4, 5; // Reserving old, deleted fields. uint64 signing_slot = 6 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -119,7 +119,7 @@ message BuilderConfig { bool enabled = 1; uint64 gas_limit = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/validator.Uint64" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator.Uint64" ]; repeated string relays = 3; } diff --git a/proto/prysm/v1alpha1/validator.go b/proto/prysm/v1alpha1/validator.go index 7019310ac2..b417f0da21 100644 --- a/proto/prysm/v1alpha1/validator.go +++ b/proto/prysm/v1alpha1/validator.go @@ -1,7 +1,7 @@ package eth import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // ValidatorDutiesContainer is a wrapper that can be both used for the gRPC DutiesResponse and Rest API response structs for attestation, proposer, and sync duties. diff --git a/proto/prysm/v1alpha1/validator.pb.go b/proto/prysm/v1alpha1/validator.pb.go index 32a45c7e3e..9d3fa19f85 100755 --- a/proto/prysm/v1alpha1/validator.pb.go +++ b/proto/prysm/v1alpha1/validator.pb.go @@ -11,8 +11,8 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -146,7 +146,7 @@ func (x *SyncMessageBlockRootResponse) GetRoot() []byte { type SyncSubcommitteeIndexRequest struct { state protoimpl.MessageState `protogen:"open.v1"` PublicKey []byte `protobuf:"bytes,1,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty" ssz-size:"48"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -188,17 +188,17 @@ func (x *SyncSubcommitteeIndexRequest) GetPublicKey() []byte { return nil } -func (x *SyncSubcommitteeIndexRequest) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *SyncSubcommitteeIndexRequest) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. type SyncCommitteeContributionRequest struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` PublicKey []byte `protobuf:"bytes,2,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty" spec-name:"pubkey" ssz-size:"48"` SubnetId uint64 `protobuf:"varint,3,opt,name=subnet_id,json=subnetId,proto3" json:"subnet_id,omitempty"` unknownFields protoimpl.UnknownFields @@ -235,11 +235,11 @@ func (*SyncCommitteeContributionRequest) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_validator_proto_rawDescGZIP(), []int{2} } -func (x *SyncCommitteeContributionRequest) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *SyncCommitteeContributionRequest) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *SyncCommitteeContributionRequest) GetPublicKey() []byte { @@ -259,7 +259,7 @@ func (x *SyncCommitteeContributionRequest) GetSubnetId() uint64 { // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. type SyncSubcommitteeIndexResponse struct { state protoimpl.MessageState `protogen:"open.v1"` - Indices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex `protobuf:"varint,1,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.CommitteeIndex"` + Indices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex `protobuf:"varint,1,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.CommitteeIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -294,17 +294,17 @@ func (*SyncSubcommitteeIndexResponse) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_validator_proto_rawDescGZIP(), []int{3} } -func (x *SyncSubcommitteeIndexResponse) GetIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex { +func (x *SyncSubcommitteeIndexResponse) GetIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex { if x != nil { return x.Indices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(nil) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. type StreamSlotsResponse struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` PreviousDutyDependentRoot []byte `protobuf:"bytes,2,opt,name=previous_duty_dependent_root,json=previousDutyDependentRoot,proto3" json:"previous_duty_dependent_root,omitempty" ssz-size:"32"` CurrentDutyDependentRoot []byte `protobuf:"bytes,3,opt,name=current_duty_dependent_root,json=currentDutyDependentRoot,proto3" json:"current_duty_dependent_root,omitempty" ssz-size:"32"` unknownFields protoimpl.UnknownFields @@ -341,11 +341,11 @@ func (*StreamSlotsResponse) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_validator_proto_rawDescGZIP(), []int{4} } -func (x *StreamSlotsResponse) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *StreamSlotsResponse) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *StreamSlotsResponse) GetPreviousDutyDependentRoot() []byte { @@ -528,7 +528,7 @@ func (*StreamBlocksResponse_FuluBlock) isStreamBlocksResponse_Block() {} // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. type DomainRequest struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` Domain []byte `protobuf:"bytes,2,opt,name=domain,proto3" json:"domain,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -564,11 +564,11 @@ func (*DomainRequest) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_validator_proto_rawDescGZIP(), []int{6} } -func (x *DomainRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *DomainRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *DomainRequest) GetDomain() []byte { @@ -874,7 +874,7 @@ func (x *ValidatorIndexRequest) GetPublicKey() []byte { // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. type ValidatorIndexResponse struct { state protoimpl.MessageState `protogen:"open.v1"` - Index github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Index github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -909,11 +909,11 @@ func (*ValidatorIndexResponse) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_validator_proto_rawDescGZIP(), []int{13} } -func (x *ValidatorIndexResponse) GetIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ValidatorIndexResponse) GetIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Index } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. @@ -966,8 +966,8 @@ type ValidatorStatusResponse struct { state protoimpl.MessageState `protogen:"open.v1"` Status ValidatorStatus `protobuf:"varint,1,opt,name=status,proto3,enum=ethereum.eth.v1alpha1.ValidatorStatus" json:"status,omitempty"` Eth1DepositBlockNumber uint64 `protobuf:"varint,2,opt,name=eth1_deposit_block_number,json=eth1DepositBlockNumber,proto3" json:"eth1_deposit_block_number,omitempty"` - DepositInclusionSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=deposit_inclusion_slot,json=depositInclusionSlot,proto3" json:"deposit_inclusion_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ActivationEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,4,opt,name=activation_epoch,json=activationEpoch,proto3" json:"activation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + DepositInclusionSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=deposit_inclusion_slot,json=depositInclusionSlot,proto3" json:"deposit_inclusion_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ActivationEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,4,opt,name=activation_epoch,json=activationEpoch,proto3" json:"activation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` PositionInActivationQueue uint64 `protobuf:"varint,5,opt,name=position_in_activation_queue,json=positionInActivationQueue,proto3" json:"position_in_activation_queue,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -1017,18 +1017,18 @@ func (x *ValidatorStatusResponse) GetEth1DepositBlockNumber() uint64 { return 0 } -func (x *ValidatorStatusResponse) GetDepositInclusionSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *ValidatorStatusResponse) GetDepositInclusionSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.DepositInclusionSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *ValidatorStatusResponse) GetActivationEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ValidatorStatusResponse) GetActivationEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.ActivationEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *ValidatorStatusResponse) GetPositionInActivationQueue() uint64 { @@ -1096,7 +1096,7 @@ type MultipleValidatorStatusResponse struct { state protoimpl.MessageState `protogen:"open.v1"` PublicKeys [][]byte `protobuf:"bytes,1,rep,name=public_keys,json=publicKeys,proto3" json:"public_keys,omitempty" ssz-size:"?,48"` Statuses []*ValidatorStatusResponse `protobuf:"bytes,2,rep,name=statuses,proto3" json:"statuses,omitempty"` - Indices []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Indices []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,rep,packed,name=indices,proto3" json:"indices,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1145,17 +1145,17 @@ func (x *MultipleValidatorStatusResponse) GetStatuses() []*ValidatorStatusRespon return nil } -func (x *MultipleValidatorStatusResponse) GetIndices() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *MultipleValidatorStatusResponse) GetIndices() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Indices } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. type DutiesRequest struct { state protoimpl.MessageState `protogen:"open.v1"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` PublicKeys [][]byte `protobuf:"bytes,2,rep,name=public_keys,json=publicKeys,proto3" json:"public_keys,omitempty" ssz-size:"?,48"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -1191,11 +1191,11 @@ func (*DutiesRequest) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_validator_proto_rawDescGZIP(), []int{18} } -func (x *DutiesRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *DutiesRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *DutiesRequest) GetPublicKeys() [][]byte { @@ -1346,7 +1346,7 @@ func (x *DutiesV2Response) GetCurrentDutyDependentRoot() []byte { // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. type BlockRequest struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` RandaoReveal []byte `protobuf:"bytes,2,opt,name=randao_reveal,json=randaoReveal,proto3" json:"randao_reveal,omitempty" ssz-size:"48"` Graffiti []byte `protobuf:"bytes,3,opt,name=graffiti,proto3" json:"graffiti,omitempty" ssz-size:"32"` SkipMevBoost bool `protobuf:"varint,4,opt,name=skip_mev_boost,json=skipMevBoost,proto3" json:"skip_mev_boost,omitempty"` @@ -1385,11 +1385,11 @@ func (*BlockRequest) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_validator_proto_rawDescGZIP(), []int{21} } -func (x *BlockRequest) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *BlockRequest) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *BlockRequest) GetRandaoReveal() []byte { @@ -1513,8 +1513,8 @@ func (x *ProposeExitResponse) GetExitRoot() []byte { // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. type AttestationDataRequest struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - CommitteeIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=committee_index,json=committeeIndex,proto3" json:"committee_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.CommitteeIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + CommitteeIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=committee_index,json=committeeIndex,proto3" json:"committee_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.CommitteeIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1549,18 +1549,18 @@ func (*AttestationDataRequest) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_validator_proto_rawDescGZIP(), []int{24} } -func (x *AttestationDataRequest) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *AttestationDataRequest) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *AttestationDataRequest) GetCommitteeIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex { +func (x *AttestationDataRequest) GetCommitteeIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex { if x != nil { return x.CommitteeIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(0) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. @@ -1611,8 +1611,8 @@ func (x *AttestResponse) GetAttestationDataRoot() []byte { // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. type AggregateSelectionRequest struct { state protoimpl.MessageState `protogen:"open.v1"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - CommitteeIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=committee_index,json=committeeIndex,proto3" json:"committee_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.CommitteeIndex"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + CommitteeIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=committee_index,json=committeeIndex,proto3" json:"committee_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.CommitteeIndex"` PublicKey []byte `protobuf:"bytes,3,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty" spec-name:"pubkey" ssz-size:"48"` SlotSignature []byte `protobuf:"bytes,4,opt,name=slot_signature,json=slotSignature,proto3" json:"slot_signature,omitempty" ssz-size:"96"` unknownFields protoimpl.UnknownFields @@ -1649,18 +1649,18 @@ func (*AggregateSelectionRequest) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_validator_proto_rawDescGZIP(), []int{26} } -func (x *AggregateSelectionRequest) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *AggregateSelectionRequest) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *AggregateSelectionRequest) GetCommitteeIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex { +func (x *AggregateSelectionRequest) GetCommitteeIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex { if x != nil { return x.CommitteeIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(0) } func (x *AggregateSelectionRequest) GetPublicKey() []byte { @@ -1905,8 +1905,8 @@ func (x *SignedAggregateSubmitResponse) GetAttestationDataRoot() []byte { // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. type CommitteeSubnetsSubscribeRequest struct { state protoimpl.MessageState `protogen:"open.v1"` - Slots []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,1,rep,packed,name=slots,proto3" json:"slots,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - CommitteeIds []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,rep,packed,name=committee_ids,json=committeeIds,proto3" json:"committee_ids,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.CommitteeIndex"` + Slots []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,1,rep,packed,name=slots,proto3" json:"slots,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + CommitteeIds []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,rep,packed,name=committee_ids,json=committeeIds,proto3" json:"committee_ids,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.CommitteeIndex"` IsAggregator []bool `protobuf:"varint,3,rep,packed,name=is_aggregator,json=isAggregator,proto3" json:"is_aggregator,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -1942,18 +1942,18 @@ func (*CommitteeSubnetsSubscribeRequest) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_validator_proto_rawDescGZIP(), []int{32} } -func (x *CommitteeSubnetsSubscribeRequest) GetSlots() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *CommitteeSubnetsSubscribeRequest) GetSlots() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slots } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(nil) } -func (x *CommitteeSubnetsSubscribeRequest) GetCommitteeIds() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex { +func (x *CommitteeSubnetsSubscribeRequest) GetCommitteeIds() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex { if x != nil { return x.CommitteeIds } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(nil) } func (x *CommitteeSubnetsSubscribeRequest) GetIsAggregator() []bool { @@ -2087,9 +2087,9 @@ func (x *ValidatorParticipation) GetPreviousEpochHeadAttestingGwei() uint64 { type ValidatorIdentity struct { state protoimpl.MessageState `protogen:"open.v1"` - Index github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Index github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` Pubkey []byte `protobuf:"bytes,2,opt,name=pubkey,proto3" json:"pubkey,omitempty" ssz-size:"48"` - ActivationEpoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=activation_epoch,json=activationEpoch,proto3" json:"activation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + ActivationEpoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=activation_epoch,json=activationEpoch,proto3" json:"activation_epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -2124,11 +2124,11 @@ func (*ValidatorIdentity) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_validator_proto_rawDescGZIP(), []int{34} } -func (x *ValidatorIdentity) GetIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ValidatorIdentity) GetIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Index } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *ValidatorIdentity) GetPubkey() []byte { @@ -2138,11 +2138,11 @@ func (x *ValidatorIdentity) GetPubkey() []byte { return nil } -func (x *ValidatorIdentity) GetActivationEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *ValidatorIdentity) GetActivationEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.ActivationEpoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } // Deprecated: Marked as deprecated in proto/prysm/v1alpha1/validator.proto. @@ -2517,7 +2517,7 @@ func (x *AssignValidatorToSubnetRequest) GetStatus() ValidatorStatus { type AggregatedSigAndAggregationBitsRequest struct { state protoimpl.MessageState `protogen:"open.v1"` Msgs []*SyncCommitteeMessage `protobuf:"bytes,1,rep,name=msgs,proto3" json:"msgs,omitempty"` - Slot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Slot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,2,opt,name=slot,proto3" json:"slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` SubnetId uint64 `protobuf:"varint,3,opt,name=subnet_id,json=subnetId,proto3" json:"subnet_id,omitempty"` BlockRoot []byte `protobuf:"bytes,4,opt,name=block_root,json=blockRoot,proto3" json:"block_root,omitempty" ssz-size:"32"` unknownFields protoimpl.UnknownFields @@ -2561,11 +2561,11 @@ func (x *AggregatedSigAndAggregationBitsRequest) GetMsgs() []*SyncCommitteeMessa return nil } -func (x *AggregatedSigAndAggregationBitsRequest) GetSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *AggregatedSigAndAggregationBitsRequest) GetSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.Slot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } func (x *AggregatedSigAndAggregationBitsRequest) GetSubnetId() uint64 { @@ -2639,7 +2639,7 @@ type ValidatorActivationResponse_Status struct { state protoimpl.MessageState `protogen:"open.v1"` PublicKey []byte `protobuf:"bytes,1,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty"` Status *ValidatorStatusResponse `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"` - Index github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + Index github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,3,opt,name=index,proto3" json:"index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -2688,22 +2688,22 @@ func (x *ValidatorActivationResponse_Status) GetStatus() *ValidatorStatusRespons return nil } -func (x *ValidatorActivationResponse_Status) GetIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *ValidatorActivationResponse_Status) GetIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Index } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } type DutiesResponse_Duty struct { state protoimpl.MessageState `protogen:"open.v1"` - Committee []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,rep,packed,name=committee,proto3" json:"committee,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` - CommitteeIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=committee_index,json=committeeIndex,proto3" json:"committee_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.CommitteeIndex"` - AttesterSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=attester_slot,json=attesterSlot,proto3" json:"attester_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerSlots []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,4,rep,packed,name=proposer_slots,json=proposerSlots,proto3" json:"proposer_slots,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + Committee []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,rep,packed,name=committee,proto3" json:"committee,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` + CommitteeIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=committee_index,json=committeeIndex,proto3" json:"committee_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.CommitteeIndex"` + AttesterSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,3,opt,name=attester_slot,json=attesterSlot,proto3" json:"attester_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerSlots []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,4,rep,packed,name=proposer_slots,json=proposerSlots,proto3" json:"proposer_slots,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` PublicKey []byte `protobuf:"bytes,5,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty" ssz-size:"48"` Status ValidatorStatus `protobuf:"varint,6,opt,name=status,proto3,enum=ethereum.eth.v1alpha1.ValidatorStatus" json:"status,omitempty"` - ValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,7,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,7,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` IsSyncCommittee bool `protobuf:"varint,8,opt,name=is_sync_committee,json=isSyncCommittee,proto3" json:"is_sync_committee,omitempty"` CommitteesAtSlot uint64 `protobuf:"varint,9,opt,name=committees_at_slot,json=committeesAtSlot,proto3" json:"committees_at_slot,omitempty"` unknownFields protoimpl.UnknownFields @@ -2740,32 +2740,32 @@ func (*DutiesResponse_Duty) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_validator_proto_rawDescGZIP(), []int{19, 0} } -func (x *DutiesResponse_Duty) GetCommittee() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *DutiesResponse_Duty) GetCommittee() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.Committee } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(nil) } -func (x *DutiesResponse_Duty) GetCommitteeIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex { +func (x *DutiesResponse_Duty) GetCommitteeIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex { if x != nil { return x.CommitteeIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(0) } -func (x *DutiesResponse_Duty) GetAttesterSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *DutiesResponse_Duty) GetAttesterSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.AttesterSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *DutiesResponse_Duty) GetProposerSlots() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *DutiesResponse_Duty) GetProposerSlots() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.ProposerSlots } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(nil) } func (x *DutiesResponse_Duty) GetPublicKey() []byte { @@ -2782,11 +2782,11 @@ func (x *DutiesResponse_Duty) GetStatus() ValidatorStatus { return ValidatorStatus_UNKNOWN_STATUS } -func (x *DutiesResponse_Duty) GetValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *DutiesResponse_Duty) GetValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *DutiesResponse_Duty) GetIsSyncCommittee() bool { @@ -2806,14 +2806,14 @@ func (x *DutiesResponse_Duty) GetCommitteesAtSlot() uint64 { type DutiesV2Response_Duty struct { state protoimpl.MessageState `protogen:"open.v1"` CommitteeLength uint64 `protobuf:"varint,1,opt,name=committee_length,json=committeeLength,proto3" json:"committee_length,omitempty"` - CommitteeIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=committee_index,json=committeeIndex,proto3" json:"committee_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.CommitteeIndex"` + CommitteeIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex `protobuf:"varint,2,opt,name=committee_index,json=committeeIndex,proto3" json:"committee_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.CommitteeIndex"` CommitteesAtSlot uint64 `protobuf:"varint,3,opt,name=committees_at_slot,json=committeesAtSlot,proto3" json:"committees_at_slot,omitempty"` ValidatorCommitteeIndex uint64 `protobuf:"varint,4,opt,name=validator_committee_index,json=validatorCommitteeIndex,proto3" json:"validator_committee_index,omitempty"` - AttesterSlot github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=attester_slot,json=attesterSlot,proto3" json:"attester_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` - ProposerSlots []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot `protobuf:"varint,6,rep,packed,name=proposer_slots,json=proposerSlots,proto3" json:"proposer_slots,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot"` + AttesterSlot github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,5,opt,name=attester_slot,json=attesterSlot,proto3" json:"attester_slot,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` + ProposerSlots []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot `protobuf:"varint,6,rep,packed,name=proposer_slots,json=proposerSlots,proto3" json:"proposer_slots,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot"` PublicKey []byte `protobuf:"bytes,7,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty" ssz-size:"48"` Status ValidatorStatus `protobuf:"varint,8,opt,name=status,proto3,enum=ethereum.eth.v1alpha1.ValidatorStatus" json:"status,omitempty"` - ValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,9,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,9,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` IsSyncCommittee bool `protobuf:"varint,10,opt,name=is_sync_committee,json=isSyncCommittee,proto3" json:"is_sync_committee,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -2856,11 +2856,11 @@ func (x *DutiesV2Response_Duty) GetCommitteeLength() uint64 { return 0 } -func (x *DutiesV2Response_Duty) GetCommitteeIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex { +func (x *DutiesV2Response_Duty) GetCommitteeIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex { if x != nil { return x.CommitteeIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.CommitteeIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.CommitteeIndex(0) } func (x *DutiesV2Response_Duty) GetCommitteesAtSlot() uint64 { @@ -2877,18 +2877,18 @@ func (x *DutiesV2Response_Duty) GetValidatorCommitteeIndex() uint64 { return 0 } -func (x *DutiesV2Response_Duty) GetAttesterSlot() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *DutiesV2Response_Duty) GetAttesterSlot() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.AttesterSlot } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(0) } -func (x *DutiesV2Response_Duty) GetProposerSlots() []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot { +func (x *DutiesV2Response_Duty) GetProposerSlots() []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot { if x != nil { return x.ProposerSlots } - return []github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Slot(nil) + return []github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Slot(nil) } func (x *DutiesV2Response_Duty) GetPublicKey() []byte { @@ -2905,11 +2905,11 @@ func (x *DutiesV2Response_Duty) GetStatus() ValidatorStatus { return ValidatorStatus_UNKNOWN_STATUS } -func (x *DutiesV2Response_Duty) GetValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *DutiesV2Response_Duty) GetValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *DutiesV2Response_Duty) GetIsSyncCommittee() bool { @@ -2922,7 +2922,7 @@ func (x *DutiesV2Response_Duty) GetIsSyncCommittee() bool { type DoppelGangerRequest_ValidatorRequest struct { state protoimpl.MessageState `protogen:"open.v1"` PublicKey []byte `protobuf:"bytes,1,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty" spec-name:"pubkey" ssz-size:"48"` - Epoch github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch"` + Epoch github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch `protobuf:"varint,3,opt,name=epoch,proto3" json:"epoch,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch"` SignedRoot []byte `protobuf:"bytes,2,opt,name=signed_root,json=signedRoot,proto3" json:"signed_root,omitempty" ssz-size:"32"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -2965,11 +2965,11 @@ func (x *DoppelGangerRequest_ValidatorRequest) GetPublicKey() []byte { return nil } -func (x *DoppelGangerRequest_ValidatorRequest) GetEpoch() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch { +func (x *DoppelGangerRequest_ValidatorRequest) GetEpoch() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch { if x != nil { return x.Epoch } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.Epoch(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.Epoch(0) } func (x *DoppelGangerRequest_ValidatorRequest) GetSignedRoot() []byte { @@ -3034,7 +3034,7 @@ func (x *DoppelGangerResponse_ValidatorResponse) GetDuplicateExists() bool { type PrepareBeaconProposerRequest_FeeRecipientContainer struct { state protoimpl.MessageState `protogen:"open.v1"` FeeRecipient []byte `protobuf:"bytes,1,opt,name=fee_recipient,json=feeRecipient,proto3" json:"fee_recipient,omitempty" ssz-size:"20"` - ValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,2,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -3076,11 +3076,11 @@ func (x *PrepareBeaconProposerRequest_FeeRecipientContainer) GetFeeRecipient() [ return nil } -func (x *PrepareBeaconProposerRequest_FeeRecipientContainer) GetValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *PrepareBeaconProposerRequest_FeeRecipientContainer) GetValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } var File_proto_prysm_v1alpha1_validator_proto protoreflect.FileDescriptor @@ -3119,7 +3119,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x3a, 0x02, 0x18, 0x01, 0x22, 0xce, 0x01, 0x0a, 0x20, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, @@ -3127,7 +3127,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, + 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x2f, 0x0a, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, @@ -3140,7 +3140,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x68, 0x0a, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, @@ -3149,7 +3149,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x47, 0x0a, 0x1c, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x64, 0x75, 0x74, 0x79, 0x5f, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, @@ -3204,7 +3204,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x02, @@ -3235,7 +3235,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x64, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, - 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, + 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x3a, 0x02, 0x18, @@ -3262,7 +3262,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x64, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, - 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, + 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x3a, 0x02, 0x18, 0x01, 0x22, @@ -3284,14 +3284,14 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x14, 0x64, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x49, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x70, 0x0a, 0x10, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x0f, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x3f, 0x0a, 0x1c, 0x70, 0x6f, 0x73, @@ -3318,7 +3318,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x75, 0x73, 0x65, 0x73, 0x12, 0x68, 0x0a, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x3a, 0x02, @@ -3326,7 +3326,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x75, 0x65, 0x73, 0x74, 0x12, 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x29, 0x0a, 0x0b, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x73, @@ -3357,14 +3357,14 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x74, 0x79, 0x12, 0x6c, 0x0a, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x12, 0x77, 0x0a, 0x0f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, + 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x69, @@ -3372,14 +3372,14 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x6b, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x6f, 0x74, 0x73, 0x12, 0x25, 0x0a, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, @@ -3392,7 +3392,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, @@ -3431,7 +3431,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, - 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, + 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, @@ -3445,14 +3445,14 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0c, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x65, 0x72, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x6b, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x72, 0x53, 0x6c, 0x6f, 0x74, 0x73, 0x12, 0x25, 0x0a, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, @@ -3465,7 +3465,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x09, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, - 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, + 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, @@ -3476,7 +3476,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x75, 0x65, 0x73, 0x74, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x2b, 0x0a, 0x0d, 0x72, 0x61, 0x6e, 0x64, 0x61, 0x6f, 0x5f, 0x72, 0x65, 0x76, 0x65, 0x61, 0x6c, 0x18, @@ -3504,13 +3504,13 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x73, 0x74, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, - 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, + 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x77, 0x0a, 0x0f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, @@ -3524,14 +3524,14 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, - 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, + 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x77, 0x0a, 0x0f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x2f, 0x0a, 0x0a, @@ -3591,13 +3591,13 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x73, 0x6c, 0x6f, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, + 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x05, 0x73, 0x6c, 0x6f, 0x74, 0x73, 0x12, 0x73, 0x0a, 0x0d, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0c, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x65, 0x49, 0x64, 0x73, 0x12, 0x23, 0x0a, @@ -3649,7 +3649,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, - 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, + 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x1e, 0x0a, 0x06, 0x70, 0x75, 0x62, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, @@ -3658,7 +3658,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x6e, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, - 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, + 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x0f, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x22, 0xd1, 0x02, 0x0a, 0x13, 0x44, 0x6f, 0x70, 0x70, 0x65, 0x6c, @@ -3677,7 +3677,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x5b, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x42, 0x45, 0x82, 0xb5, 0x18, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, - 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, + 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x27, 0x0a, 0x0b, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, @@ -3722,7 +3722,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, - 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, + 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x3a, @@ -3755,7 +3755,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, 0x04, 0x6d, 0x73, 0x67, 0x73, 0x12, 0x58, 0x0a, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x42, 0x44, 0x82, 0xb5, 0x18, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x04, 0x73, 0x6c, 0x6f, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x75, 0x62, 0x6e, 0x65, 0x74, @@ -4125,7 +4125,7 @@ var file_proto_prysm_v1alpha1_validator_proto_rawDesc = []byte{ 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x0e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, - 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, + 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x0f, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, diff --git a/proto/prysm/v1alpha1/validator.proto b/proto/prysm/v1alpha1/validator.proto index f1607fdc56..e4ddd9514c 100644 --- a/proto/prysm/v1alpha1/validator.proto +++ b/proto/prysm/v1alpha1/validator.proto @@ -27,7 +27,7 @@ import "proto/prysm/v1alpha1/sync_committee.proto"; import "proto/prysm/v1alpha1/attestation.proto"; option csharp_namespace = "Ethereum.Eth.V1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "ValidatorProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -461,7 +461,7 @@ message SyncSubcommitteeIndexRequest { // The slot of validator's assignment. uint64 slot = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; } @@ -471,7 +471,7 @@ message SyncCommitteeContributionRequest { // Slot for which the aggregation request applies. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // 48 byte public key of the validator. bytes public_key = 2 [ @@ -492,7 +492,7 @@ message SyncSubcommitteeIndexResponse { // one index. repeated uint64 indices = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.CommitteeIndex" ]; } @@ -503,7 +503,7 @@ message StreamSlotsResponse { uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; bytes previous_duty_dependent_root = 2 [ (ethereum.eth.ext.ssz_size) = "32" ]; bytes current_duty_dependent_root = 3 [ (ethereum.eth.ext.ssz_size) = "32" ]; @@ -544,7 +544,7 @@ message DomainRequest { // The epoch for which the domain is being requested. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // The bytes domain specified by the validator. @@ -578,7 +578,7 @@ message ValidatorActivationResponse { // The validators index in the beacon state. uint64 index = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } // A list of validator statuses mapped 1-to-1 with the public keys @@ -620,7 +620,7 @@ message ValidatorIndexResponse { // The validator's index in the beacon chain state's validator registry. uint64 index = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } @@ -657,14 +657,14 @@ message ValidatorStatusResponse { // deposit was included in a block. uint64 deposit_inclusion_slot = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // The epoch in the beacon chain in which the validator // is determined as active. uint64 activation_epoch = 4 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // The position in the activation queue of pending validators. @@ -690,7 +690,7 @@ message MultipleValidatorStatusResponse { // A list of validator indices. repeated uint64 indices = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } @@ -700,7 +700,7 @@ message DutiesRequest { // Epoch at which validators should perform their duties. uint64 epoch = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // Array of byte encoded BLS public keys. @@ -723,25 +723,25 @@ message DutiesResponse { // The committee a validator is assigned to. repeated uint64 committee = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // The index into the committee where the validator belongs in. uint64 committee_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.CommitteeIndex" ]; // Slot at which a validator must attest. uint64 attester_slot = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Slots at which a validator must propose a beacon chain block. repeated uint64 proposer_slots = 4 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // 48 byte BLS public key for the validator who's assigned to perform a @@ -754,7 +754,7 @@ message DutiesResponse { // The index of the validator in the beacon state. uint64 validator_index = 7 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // Whether the validator belongs in the sync committee and has to perform @@ -783,7 +783,7 @@ message DutiesV2Response { // The index of the committee which the given validator has an assignment uint64 committee_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.CommitteeIndex" ]; // The number of committees in the duty's slot. @@ -795,13 +795,13 @@ message DutiesV2Response { // Slot at which a validator must attest. uint64 attester_slot = 5 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Slots at which a validator must propose a beacon chain block. repeated uint64 proposer_slots = 6 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // 48 byte BLS public key for the validator who's assigned to perform a @@ -814,7 +814,7 @@ message DutiesV2Response { // The index of the validator in the beacon state. uint64 validator_index = 9 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // Whether the validator belongs in the sync committee and has to perform @@ -829,7 +829,7 @@ message BlockRequest { // Slot for which the block should be proposed. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Validator's 32 byte randao reveal secret of the current epoch. @@ -867,13 +867,13 @@ message AttestationDataRequest { // Slot for which the attestation should be created. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Committee index the attestation should be created for. uint64 committee_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.CommitteeIndex" ]; } @@ -890,12 +890,12 @@ message AggregateSelectionRequest { // Slot for which the aggregation request applies. uint64 slot = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // Committee index of the validator at the given slot. uint64 committee_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.CommitteeIndex" ]; // 48 byte public key of the validator. bytes public_key = 3 [ @@ -948,13 +948,13 @@ message CommitteeSubnetsSubscribeRequest { // A list of intended slots to subscribe. repeated uint64 slots = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; // A list of intended committee ids to subscribe. It is mapped 1-to-1 with the // slots repeated uint64 committee_ids = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.CommitteeIndex" ]; // Whether to subscribe as an aggregator or by default attester. // It is mapped 1-to-1 with the slots and committee ids. @@ -999,7 +999,7 @@ message ValidatorParticipation { message ValidatorIdentity { // The validator's index in the beacon state. uint64 index = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // The validator's 48 byte BLS public key. @@ -1012,7 +1012,7 @@ message ValidatorIdentity { // FAR_FUTURE_EPOCH if the validator has not been activated. uint64 activation_epoch = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; } @@ -1033,7 +1033,7 @@ message DoppelGangerRequest { // The validator's last recorded epoch to attest. uint64 epoch = 3 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Epoch" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Epoch" ]; // The validator's last recorded signed root. bytes signed_root = 2 [ (ethereum.eth.ext.ssz_size) = "32" ]; @@ -1085,7 +1085,7 @@ message PrepareBeaconProposerRequest { // The proposer validator index. uint64 validator_index = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; } repeated FeeRecipientContainer recipients = 1; @@ -1122,7 +1122,7 @@ message AggregatedSigAndAggregationBitsRequest { repeated SyncCommitteeMessage msgs = 1; uint64 slot = 2 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives.Slot" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives.Slot" ]; uint64 subnet_id = 3; bytes block_root = 4 [ (ethereum.eth.ext.ssz_size) = "32" ]; diff --git a/proto/prysm/v1alpha1/withdrawals.pb.go b/proto/prysm/v1alpha1/withdrawals.pb.go index 541140332b..31263b4ed0 100755 --- a/proto/prysm/v1alpha1/withdrawals.pb.go +++ b/proto/prysm/v1alpha1/withdrawals.pb.go @@ -10,8 +10,8 @@ import ( reflect "reflect" sync "sync" - github_com_OffchainLabs_prysm_v6_consensus_types_primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + github_com_OffchainLabs_prysm_v7_consensus_types_primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -77,7 +77,7 @@ func (x *SignedBLSToExecutionChange) GetSignature() []byte { type BLSToExecutionChange struct { state protoimpl.MessageState `protogen:"open.v1"` - ValidatorIndex github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v6/consensus-types/primitives.ValidatorIndex"` + ValidatorIndex github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex `protobuf:"varint,1,opt,name=validator_index,json=validatorIndex,proto3" json:"validator_index,omitempty" cast-type:"github.com/OffchainLabs/prysm/v7/consensus-types/primitives.ValidatorIndex"` FromBlsPubkey []byte `protobuf:"bytes,2,opt,name=from_bls_pubkey,json=fromBlsPubkey,proto3" json:"from_bls_pubkey,omitempty" ssz-size:"48"` ToExecutionAddress []byte `protobuf:"bytes,3,opt,name=to_execution_address,json=toExecutionAddress,proto3" json:"to_execution_address,omitempty" ssz-size:"20"` unknownFields protoimpl.UnknownFields @@ -114,11 +114,11 @@ func (*BLSToExecutionChange) Descriptor() ([]byte, []int) { return file_proto_prysm_v1alpha1_withdrawals_proto_rawDescGZIP(), []int{1} } -func (x *BLSToExecutionChange) GetValidatorIndex() github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex { +func (x *BLSToExecutionChange) GetValidatorIndex() github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex { if x != nil { return x.ValidatorIndex } - return github_com_OffchainLabs_prysm_v6_consensus_types_primitives.ValidatorIndex(0) + return github_com_OffchainLabs_prysm_v7_consensus_types_primitives.ValidatorIndex(0) } func (x *BLSToExecutionChange) GetFromBlsPubkey() []byte { @@ -157,7 +157,7 @@ var file_proto_prysm_v1alpha1_withdrawals_proto_rawDesc = []byte{ 0x65, 0x12, 0x77, 0x0a, 0x0f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x42, 0x4e, 0x82, 0xb5, 0x18, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x69, @@ -173,7 +173,7 @@ var file_proto_prysm_v1alpha1_withdrawals_proto_rawDesc = []byte{ 0x61, 0x31, 0x42, 0x10, 0x57, 0x69, 0x74, 0x68, 0x64, 0x72, 0x61, 0x77, 0x61, 0x6c, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, - 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, + 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x74, 0x68, 0xaa, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, 0x72, 0x65, 0x75, 0x6d, 0x2e, 0x45, 0x74, 0x68, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x45, 0x74, 0x68, 0x65, diff --git a/proto/prysm/v1alpha1/withdrawals.proto b/proto/prysm/v1alpha1/withdrawals.proto index 40c3be930d..6c88c30d63 100644 --- a/proto/prysm/v1alpha1/withdrawals.proto +++ b/proto/prysm/v1alpha1/withdrawals.proto @@ -18,7 +18,7 @@ package ethereum.eth.v1alpha1; import "proto/eth/ext/options.proto"; option csharp_namespace = "Ethereum.Eth.v1alpha1"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1;eth"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1;eth"; option java_multiple_files = true; option java_outer_classname = "WithdrawalsProto"; option java_package = "org.ethereum.eth.v1alpha1"; @@ -38,7 +38,7 @@ message BLSToExecutionChange { // The validator index requesting the change uint64 validator_index = 1 [ (ethereum.eth.ext.cast_type) = - "github.com/OffchainLabs/prysm/v6/consensus-types/" + "github.com/OffchainLabs/prysm/v7/consensus-types/" "primitives.ValidatorIndex" ]; // The public key of the BLS address requesting the change diff --git a/proto/ssz_query/BUILD.bazel b/proto/ssz_query/BUILD.bazel index b4357eec32..deb168b5e8 100644 --- a/proto/ssz_query/BUILD.bazel +++ b/proto/ssz_query/BUILD.bazel @@ -19,7 +19,7 @@ go_proto_library( compilers = [ "@com_github_prysmaticlabs_protoc_gen_go_cast//:go_cast_grpc", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/ssz_query", + importpath = "github.com/OffchainLabs/prysm/v7/proto/ssz_query", proto = ":proto", visibility = ["//visibility:public"], deps = [ @@ -48,7 +48,7 @@ go_library( ":ssz_generated", # keep ], embed = [":go_proto"], - importpath = "github.com/OffchainLabs/prysm/v6/proto/ssz_query", + importpath = "github.com/OffchainLabs/prysm/v7/proto/ssz_query", visibility = ["//visibility:public"], deps = SSZ_DEPS + [ "//proto/eth/ext:go_default_library", diff --git a/proto/ssz_query/response.pb.go b/proto/ssz_query/response.pb.go index f6ef39d3f0..9d5404cb7a 100755 --- a/proto/ssz_query/response.pb.go +++ b/proto/ssz_query/response.pb.go @@ -10,7 +10,7 @@ import ( reflect "reflect" sync "sync" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -225,7 +225,7 @@ var file_proto_ssz_query_response_proto_rawDesc = []byte{ 0x79, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x42, 0x32, 0x5a, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, - 0x36, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, + 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } diff --git a/proto/ssz_query/response.proto b/proto/ssz_query/response.proto index 299204af29..74ba136a3c 100644 --- a/proto/ssz_query/response.proto +++ b/proto/ssz_query/response.proto @@ -4,7 +4,7 @@ package testing; import "proto/eth/ext/options.proto"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/ssz_query"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/ssz_query"; message SSZQueryProof { bytes leaf = 1 [ (ethereum.eth.ext.ssz_size) = "32" ]; diff --git a/proto/ssz_query/testing/BUILD.bazel b/proto/ssz_query/testing/BUILD.bazel index 929ce56d34..610857a34d 100644 --- a/proto/ssz_query/testing/BUILD.bazel +++ b/proto/ssz_query/testing/BUILD.bazel @@ -20,7 +20,7 @@ go_proto_library( compilers = [ "@com_github_prysmaticlabs_protoc_gen_go_cast//:go_cast_grpc", ], - importpath = "github.com/OffchainLabs/prysm/v6/proto/ssz_query/testing", + importpath = "github.com/OffchainLabs/prysm/v7/proto/ssz_query/testing", proto = ":proto", visibility = ["//visibility:public"], deps = [ @@ -50,7 +50,7 @@ go_library( ":ssz_generated", # keep ], embed = [":go_proto"], - importpath = "github.com/OffchainLabs/prysm/v6/proto/ssz_query/testing", + importpath = "github.com/OffchainLabs/prysm/v7/proto/ssz_query/testing", visibility = ["//visibility:public"], deps = SSZ_DEPS + [ "//proto/eth/ext:go_default_library", diff --git a/proto/ssz_query/testing/test_containers.pb.go b/proto/ssz_query/testing/test_containers.pb.go index b1af1311c1..b3e54aaf1a 100755 --- a/proto/ssz_query/testing/test_containers.pb.go +++ b/proto/ssz_query/testing/test_containers.pb.go @@ -11,7 +11,7 @@ import ( sync "sync" github_com_OffchainLabs_go_bitfield "github.com/OffchainLabs/go-bitfield" - _ "github.com/OffchainLabs/prysm/v6/proto/eth/ext" + _ "github.com/OffchainLabs/prysm/v7/proto/eth/ext" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" ) @@ -522,7 +522,7 @@ var file_proto_ssz_query_testing_test_containers_proto_rawDesc = []byte{ 0x09, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x06, 0x8a, 0xb5, 0x18, 0x02, 0x35, 0x36, 0x52, 0x0d, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x42, 0x42, 0x5a, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f, 0x66, 0x66, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x36, + 0x61, 0x69, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x2f, 0x70, 0x72, 0x79, 0x73, 0x6d, 0x2f, 0x76, 0x37, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x73, 0x73, 0x7a, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x3b, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, diff --git a/proto/ssz_query/testing/test_containers.proto b/proto/ssz_query/testing/test_containers.proto index 506a187d27..256699fb1a 100644 --- a/proto/ssz_query/testing/test_containers.proto +++ b/proto/ssz_query/testing/test_containers.proto @@ -4,7 +4,7 @@ package testing; import "proto/eth/ext/options.proto"; -option go_package = "github.com/OffchainLabs/prysm/v6/proto/ssz_query/testing;testing"; +option go_package = "github.com/OffchainLabs/prysm/v7/proto/ssz_query/testing;testing"; // ===== FIXED-SIZE TEST CONTAINERS ===== diff --git a/proto/testing/BUILD.bazel b/proto/testing/BUILD.bazel index 6cf5d28538..b3fac4b73f 100644 --- a/proto/testing/BUILD.bazel +++ b/proto/testing/BUILD.bazel @@ -18,7 +18,7 @@ proto_library( go_proto_library( name = "ethereum_testing_go_proto", compiler = "//:cast_proto_compiler", - importpath = "github.com/OffchainLabs/prysm/v6/proto/testing", + importpath = "github.com/OffchainLabs/prysm/v7/proto/testing", proto = ":testing_proto", visibility = ["//visibility:public"], deps = [ @@ -35,7 +35,7 @@ go_library( name = "go_default_library", testonly = True, embed = [":ethereum_testing_go_proto"], - importpath = "github.com/OffchainLabs/prysm/v6/proto/testing", + importpath = "github.com/OffchainLabs/prysm/v7/proto/testing", visibility = ["//visibility:public"], ) diff --git a/proto/testing/tags_test.go b/proto/testing/tags_test.go index 510f64561c..a164347019 100644 --- a/proto/testing/tags_test.go +++ b/proto/testing/tags_test.go @@ -7,10 +7,10 @@ import ( "strings" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestSSZTagSize(t *testing.T) { diff --git a/runtime/BUILD.bazel b/runtime/BUILD.bazel index 3213458cff..04b58ce8c9 100644 --- a/runtime/BUILD.bazel +++ b/runtime/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["service_registry.go"], - importpath = "github.com/OffchainLabs/prysm/v6/runtime", + importpath = "github.com/OffchainLabs/prysm/v7/runtime", visibility = ["//visibility:public"], deps = ["@com_github_sirupsen_logrus//:go_default_library"], ) diff --git a/runtime/debug/BUILD.bazel b/runtime/debug/BUILD.bazel index a995a9f6fd..191bf74618 100644 --- a/runtime/debug/BUILD.bazel +++ b/runtime/debug/BUILD.bazel @@ -15,7 +15,7 @@ go_library( ":use_cgosymbolizer": ["cgo_symbolizer.go"], "//conditions:default": [], }), - importpath = "github.com/OffchainLabs/prysm/v6/runtime/debug", + importpath = "github.com/OffchainLabs/prysm/v7/runtime/debug", visibility = ["//visibility:public"], deps = [ "@com_github_prometheus_client_golang//prometheus:go_default_library", diff --git a/runtime/fdlimits/BUILD.bazel b/runtime/fdlimits/BUILD.bazel index ad5151beab..59048fc29c 100644 --- a/runtime/fdlimits/BUILD.bazel +++ b/runtime/fdlimits/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["fdlimits.go"], - importpath = "github.com/OffchainLabs/prysm/v6/runtime/fdlimits", + importpath = "github.com/OffchainLabs/prysm/v7/runtime/fdlimits", visibility = ["//visibility:public"], deps = [ "@com_github_ethereum_go_ethereum//common/fdlimit:go_default_library", diff --git a/runtime/fdlimits/fdlimits_test.go b/runtime/fdlimits/fdlimits_test.go index 755a2d6d4a..b1636b8a1a 100644 --- a/runtime/fdlimits/fdlimits_test.go +++ b/runtime/fdlimits/fdlimits_test.go @@ -3,8 +3,8 @@ package fdlimits_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/fdlimits" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/runtime/fdlimits" + "github.com/OffchainLabs/prysm/v7/testing/assert" gethLimit "github.com/ethereum/go-ethereum/common/fdlimit" ) diff --git a/runtime/interop/BUILD.bazel b/runtime/interop/BUILD.bazel index 59be2f7253..03af24a9c6 100644 --- a/runtime/interop/BUILD.bazel +++ b/runtime/interop/BUILD.bazel @@ -10,7 +10,7 @@ go_library( "premine-state.go", "premined_genesis_state.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/runtime/interop", + importpath = "github.com/OffchainLabs/prysm/v7/runtime/interop", visibility = ["//visibility:public"], deps = [ "//async:go_default_library", diff --git a/runtime/interop/generate_genesis_state.go b/runtime/interop/generate_genesis_state.go index 04afdc7622..83e5705edf 100644 --- a/runtime/interop/generate_genesis_state.go +++ b/runtime/interop/generate_genesis_state.go @@ -6,17 +6,17 @@ import ( "context" "sync" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - coreState "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + coreState "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time" "github.com/pkg/errors" ) diff --git a/runtime/interop/generate_genesis_state_bellatrix.go b/runtime/interop/generate_genesis_state_bellatrix.go index 8836bd9c80..678a9c5e6b 100644 --- a/runtime/interop/generate_genesis_state_bellatrix.go +++ b/runtime/interop/generate_genesis_state_bellatrix.go @@ -5,13 +5,13 @@ package interop import ( "context" - coreState "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time" + coreState "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time" "github.com/pkg/errors" ) diff --git a/runtime/interop/generate_genesis_state_bellatrix_test.go b/runtime/interop/generate_genesis_state_bellatrix_test.go index fdc285f7ef..7b2afe9dfd 100644 --- a/runtime/interop/generate_genesis_state_bellatrix_test.go +++ b/runtime/interop/generate_genesis_state_bellatrix_test.go @@ -3,12 +3,12 @@ package interop import ( "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestGenerateGenesisStateBellatrix(t *testing.T) { diff --git a/runtime/interop/generate_genesis_state_test.go b/runtime/interop/generate_genesis_state_test.go index 6c4279a8db..f753f2e15c 100644 --- a/runtime/interop/generate_genesis_state_test.go +++ b/runtime/interop/generate_genesis_state_test.go @@ -4,13 +4,13 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestGenerateGenesisState(t *testing.T) { diff --git a/runtime/interop/generate_keys.go b/runtime/interop/generate_keys.go index 2ce9283dec..655dad5abc 100644 --- a/runtime/interop/generate_keys.go +++ b/runtime/interop/generate_keys.go @@ -5,10 +5,10 @@ import ( "math/big" "sync" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" "github.com/pkg/errors" ) diff --git a/runtime/interop/generate_keys_test.go b/runtime/interop/generate_keys_test.go index cb87868d6c..47a135a40a 100644 --- a/runtime/interop/generate_keys_test.go +++ b/runtime/interop/generate_keys_test.go @@ -5,9 +5,9 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/go-yaml/yaml" diff --git a/runtime/interop/genesis.go b/runtime/interop/genesis.go index 87daa95b69..70a8a1a4ff 100644 --- a/runtime/interop/genesis.go +++ b/runtime/interop/genesis.go @@ -5,8 +5,8 @@ import ( "math/big" "time" - clparams "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/time/slots" + clparams "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/core" diff --git a/runtime/interop/premine-state.go b/runtime/interop/premine-state.go index c4efcb6ad3..be38b420a2 100644 --- a/runtime/interop/premine-state.go +++ b/runtime/interop/premine-state.go @@ -4,20 +4,20 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/ethereum/go-ethereum/core/types" "github.com/pkg/errors" ) @@ -28,7 +28,7 @@ type PremineGenesisConfig struct { GenesisTime time.Time NVals uint64 PregenesisCreds uint64 - Version int // as in "github.com/OffchainLabs/prysm/v6/runtime/version" + Version int // as in "github.com/OffchainLabs/prysm/v7/runtime/version" GB *types.Block // geth genesis block depositEntries *depositEntries } diff --git a/runtime/interop/premine-state_test.go b/runtime/interop/premine-state_test.go index 2075926ec6..f4f396ee9b 100644 --- a/runtime/interop/premine-state_test.go +++ b/runtime/interop/premine-state_test.go @@ -5,8 +5,8 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/core/types" ) diff --git a/runtime/interop/premined_genesis_state.go b/runtime/interop/premined_genesis_state.go index a61271b3cb..304c798936 100644 --- a/runtime/interop/premined_genesis_state.go +++ b/runtime/interop/premined_genesis_state.go @@ -4,11 +4,11 @@ import ( "context" "time" - coreState "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + coreState "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/runtime/logging/BUILD.bazel b/runtime/logging/BUILD.bazel index 830db8f4ae..dccb97a7a6 100644 --- a/runtime/logging/BUILD.bazel +++ b/runtime/logging/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "blob.go", "data_column.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/runtime/logging", + importpath = "github.com/OffchainLabs/prysm/v7/runtime/logging", visibility = ["//visibility:public"], deps = [ "//consensus-types/blocks:go_default_library", diff --git a/runtime/logging/blob.go b/runtime/logging/blob.go index a4465277a6..eab92d67d2 100644 --- a/runtime/logging/blob.go +++ b/runtime/logging/blob.go @@ -3,7 +3,7 @@ package logging import ( "fmt" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" "github.com/sirupsen/logrus" ) diff --git a/runtime/logging/data_column.go b/runtime/logging/data_column.go index c2e341b1ea..4be59e4673 100644 --- a/runtime/logging/data_column.go +++ b/runtime/logging/data_column.go @@ -3,7 +3,7 @@ package logging import ( "fmt" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" "github.com/sirupsen/logrus" ) diff --git a/runtime/logging/logrus-prefixed-formatter/BUILD.bazel b/runtime/logging/logrus-prefixed-formatter/BUILD.bazel index dfa8acd594..ee97a30fe8 100644 --- a/runtime/logging/logrus-prefixed-formatter/BUILD.bazel +++ b/runtime/logging/logrus-prefixed-formatter/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["formatter.go"], - importpath = "github.com/OffchainLabs/prysm/v6/runtime/logging/logrus-prefixed-formatter", + importpath = "github.com/OffchainLabs/prysm/v7/runtime/logging/logrus-prefixed-formatter", visibility = ["//visibility:public"], deps = [ "@com_github_mgutz_ansi//:go_default_library", diff --git a/runtime/logging/logrus-prefixed-formatter/formatter_test.go b/runtime/logging/logrus-prefixed-formatter/formatter_test.go index 3452fe00e5..26734302cd 100644 --- a/runtime/logging/logrus-prefixed-formatter/formatter_test.go +++ b/runtime/logging/logrus-prefixed-formatter/formatter_test.go @@ -5,8 +5,8 @@ import ( "regexp" "testing" - prefixed "github.com/OffchainLabs/prysm/v6/runtime/logging/logrus-prefixed-formatter" - "github.com/OffchainLabs/prysm/v6/testing/require" + prefixed "github.com/OffchainLabs/prysm/v7/runtime/logging/logrus-prefixed-formatter" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/runtime/maxprocs/BUILD.bazel b/runtime/maxprocs/BUILD.bazel index 4fcc2aafa4..2e928bfa49 100644 --- a/runtime/maxprocs/BUILD.bazel +++ b/runtime/maxprocs/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["maxprocs.go"], - importpath = "github.com/OffchainLabs/prysm/v6/runtime/maxprocs", + importpath = "github.com/OffchainLabs/prysm/v7/runtime/maxprocs", visibility = ["//visibility:public"], deps = [ "@com_github_sirupsen_logrus//:go_default_library", diff --git a/runtime/messagehandler/BUILD.bazel b/runtime/messagehandler/BUILD.bazel index bc58d7d6cf..735fb89ba4 100644 --- a/runtime/messagehandler/BUILD.bazel +++ b/runtime/messagehandler/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["messagehandler.go"], - importpath = "github.com/OffchainLabs/prysm/v6/runtime/messagehandler", + importpath = "github.com/OffchainLabs/prysm/v7/runtime/messagehandler", visibility = ["//visibility:public"], deps = [ "//monitoring/tracing/trace:go_default_library", diff --git a/runtime/messagehandler/messagehandler.go b/runtime/messagehandler/messagehandler.go index 5c900a5cde..6671705c7d 100644 --- a/runtime/messagehandler/messagehandler.go +++ b/runtime/messagehandler/messagehandler.go @@ -7,7 +7,7 @@ import ( "fmt" "runtime/debug" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/sirupsen/logrus" "go.opentelemetry.io/otel/codes" diff --git a/runtime/messagehandler/messagehandler_test.go b/runtime/messagehandler/messagehandler_test.go index 365ad8e1df..2b7238953b 100644 --- a/runtime/messagehandler/messagehandler_test.go +++ b/runtime/messagehandler/messagehandler_test.go @@ -4,8 +4,8 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/runtime/messagehandler" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/runtime/messagehandler" + "github.com/OffchainLabs/prysm/v7/testing/require" pubsub "github.com/libp2p/go-libp2p-pubsub" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/runtime/prereqs/BUILD.bazel b/runtime/prereqs/BUILD.bazel index 3b564ec70e..542bb2a792 100644 --- a/runtime/prereqs/BUILD.bazel +++ b/runtime/prereqs/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["prereq.go"], - importpath = "github.com/OffchainLabs/prysm/v6/runtime/prereqs", + importpath = "github.com/OffchainLabs/prysm/v7/runtime/prereqs", visibility = ["//visibility:public"], deps = [ "@com_github_pkg_errors//:go_default_library", diff --git a/runtime/prereqs/prereq_test.go b/runtime/prereqs/prereq_test.go index 3275d2e2f1..5a548bb01b 100644 --- a/runtime/prereqs/prereq_test.go +++ b/runtime/prereqs/prereq_test.go @@ -4,7 +4,7 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/runtime/service_registry_test.go b/runtime/service_registry_test.go index 598c7ba014..1fbf0173c7 100644 --- a/runtime/service_registry_test.go +++ b/runtime/service_registry_test.go @@ -5,8 +5,8 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) type mockService struct { diff --git a/runtime/tos/BUILD.bazel b/runtime/tos/BUILD.bazel index 74a59455ce..d3935f3f6e 100644 --- a/runtime/tos/BUILD.bazel +++ b/runtime/tos/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["tos.go"], - importpath = "github.com/OffchainLabs/prysm/v6/runtime/tos", + importpath = "github.com/OffchainLabs/prysm/v7/runtime/tos", visibility = ["//visibility:public"], deps = [ "//cmd:go_default_library", diff --git a/runtime/tos/tos.go b/runtime/tos/tos.go index 2ecd89c778..7175c10f29 100644 --- a/runtime/tos/tos.go +++ b/runtime/tos/tos.go @@ -4,9 +4,9 @@ import ( "path/filepath" "strings" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/io/prompt" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/io/prompt" "github.com/logrusorgru/aurora" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/runtime/tos/tos_test.go b/runtime/tos/tos_test.go index 000f2955f5..33383cb3b1 100644 --- a/runtime/tos/tos_test.go +++ b/runtime/tos/tos_test.go @@ -6,8 +6,8 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/urfave/cli/v2" ) diff --git a/runtime/version/BUILD.bazel b/runtime/version/BUILD.bazel index f0d60af12c..4c8bfaa546 100644 --- a/runtime/version/BUILD.bazel +++ b/runtime/version/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "metrics.go", "version.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/runtime/version", + importpath = "github.com/OffchainLabs/prysm/v7/runtime/version", visibility = ["//visibility:public"], x_defs = { "gitCommit": "{STABLE_GIT_COMMIT}", diff --git a/testing/assert/BUILD.bazel b/testing/assert/BUILD.bazel index 2801c6945e..5eb2644f4c 100644 --- a/testing/assert/BUILD.bazel +++ b/testing/assert/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["assertions.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/assert", + importpath = "github.com/OffchainLabs/prysm/v7/testing/assert", visibility = ["//visibility:public"], deps = [ "//testing/assertions:go_default_library", diff --git a/testing/assert/assertions.go b/testing/assert/assertions.go index 0aa3db8b87..434bdc6b4a 100644 --- a/testing/assert/assertions.go +++ b/testing/assert/assertions.go @@ -1,7 +1,7 @@ package assert import ( - "github.com/OffchainLabs/prysm/v6/testing/assertions" + "github.com/OffchainLabs/prysm/v7/testing/assertions" "github.com/sirupsen/logrus/hooks/test" ) diff --git a/testing/assertions/BUILD.bazel b/testing/assertions/BUILD.bazel index 0d91acaeaf..d4cd5c1c96 100644 --- a/testing/assertions/BUILD.bazel +++ b/testing/assertions/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["assertions.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/assertions", + importpath = "github.com/OffchainLabs/prysm/v7/testing/assertions", visibility = ["//visibility:public"], deps = [ "//encoding/ssz/equality:go_default_library", diff --git a/testing/assertions/assertions.go b/testing/assertions/assertions.go index ebfbe766e5..9aa2f11800 100644 --- a/testing/assertions/assertions.go +++ b/testing/assertions/assertions.go @@ -9,7 +9,7 @@ import ( "sort" "strings" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/equality" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/equality" "github.com/d4l3k/messagediff" "github.com/google/go-cmp/cmp" "github.com/sirupsen/logrus/hooks/test" diff --git a/testing/assertions/assertions_test.go b/testing/assertions/assertions_test.go index e3aa9edfe3..c81034c90a 100644 --- a/testing/assertions/assertions_test.go +++ b/testing/assertions/assertions_test.go @@ -7,11 +7,11 @@ import ( "testing" "unicode" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - testpb "github.com/OffchainLabs/prysm/v6/proto/testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/assertions" - "github.com/OffchainLabs/prysm/v6/testing/require" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + testpb "github.com/OffchainLabs/prysm/v7/proto/testing" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/assertions" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/sirupsen/logrus" "github.com/sirupsen/logrus/hooks/test" "google.golang.org/protobuf/types/known/timestamppb" diff --git a/testing/benchmark/BUILD.bazel b/testing/benchmark/BUILD.bazel index af80b95db9..8bb11bee52 100644 --- a/testing/benchmark/BUILD.bazel +++ b/testing/benchmark/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", srcs = ["pregen.go"], data = ["//testing/benchmark/benchmark_files:benchmark_data"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/benchmark", + importpath = "github.com/OffchainLabs/prysm/v7/testing/benchmark", visibility = ["//visibility:public"], deps = [ "//beacon-chain/state:go_default_library", diff --git a/testing/benchmark/pregen.go b/testing/benchmark/pregen.go index b8ec269c26..8e8f8b6137 100644 --- a/testing/benchmark/pregen.go +++ b/testing/benchmark/pregen.go @@ -6,10 +6,10 @@ import ( "fmt" "os" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/bazelbuild/rules_go/go/tools/bazel" ) diff --git a/testing/benchmark/pregen_test.go b/testing/benchmark/pregen_test.go index 65299f6afc..de8ce16828 100644 --- a/testing/benchmark/pregen_test.go +++ b/testing/benchmark/pregen_test.go @@ -3,7 +3,7 @@ package benchmark import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestPreGenFullBlock(t *testing.T) { diff --git a/testing/bls/BUILD.bazel b/testing/bls/BUILD.bazel index 02ad7aeefd..82433bac88 100644 --- a/testing/bls/BUILD.bazel +++ b/testing/bls/BUILD.bazel @@ -46,6 +46,6 @@ go_library( "sign_test.yaml.go", "verify_test.yaml.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/bls", + importpath = "github.com/OffchainLabs/prysm/v7/testing/bls", visibility = ["//visibility:public"], ) diff --git a/testing/bls/aggregate_test.go b/testing/bls/aggregate_test.go index af28898e41..b350481b91 100644 --- a/testing/bls/aggregate_test.go +++ b/testing/bls/aggregate_test.go @@ -4,10 +4,10 @@ import ( "encoding/hex" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/testing/bls/utils" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/testing/bls/utils" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ghodss/yaml" ) diff --git a/testing/bls/aggregate_verify_test.go b/testing/bls/aggregate_verify_test.go index 62115c7554..e02c73ffac 100644 --- a/testing/bls/aggregate_verify_test.go +++ b/testing/bls/aggregate_verify_test.go @@ -5,11 +5,11 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/bls/utils" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/bls/utils" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ghodss/yaml" ) diff --git a/testing/bls/batch_verify_test.go b/testing/bls/batch_verify_test.go index 4aa6eddd62..4ec8449674 100644 --- a/testing/bls/batch_verify_test.go +++ b/testing/bls/batch_verify_test.go @@ -5,11 +5,11 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/bls/utils" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/bls/utils" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ghodss/yaml" ) diff --git a/testing/bls/deserialization_G1_test.go b/testing/bls/deserialization_G1_test.go index 6f5ff2f16b..cade0390c0 100644 --- a/testing/bls/deserialization_G1_test.go +++ b/testing/bls/deserialization_G1_test.go @@ -5,10 +5,10 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/testing/bls/utils" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/testing/bls/utils" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ghodss/yaml" ) diff --git a/testing/bls/deserialization_G2_test.go b/testing/bls/deserialization_G2_test.go index edc6932c23..10f98b5181 100644 --- a/testing/bls/deserialization_G2_test.go +++ b/testing/bls/deserialization_G2_test.go @@ -4,9 +4,9 @@ import ( "encoding/hex" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/testing/bls/utils" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/testing/bls/utils" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ghodss/yaml" ) diff --git a/testing/bls/fast_aggregate_verify_test.go b/testing/bls/fast_aggregate_verify_test.go index eb3044118b..a232c1386b 100644 --- a/testing/bls/fast_aggregate_verify_test.go +++ b/testing/bls/fast_aggregate_verify_test.go @@ -5,11 +5,11 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/bls/utils" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/bls/utils" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ghodss/yaml" ) diff --git a/testing/bls/hash_to_G2_test.go b/testing/bls/hash_to_G2_test.go index 2541327a51..3635b705dc 100644 --- a/testing/bls/hash_to_G2_test.go +++ b/testing/bls/hash_to_G2_test.go @@ -6,8 +6,8 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/testing/bls/utils" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/bls/utils" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ghodss/yaml" blst "github.com/supranational/blst/bindings/go" ) diff --git a/testing/bls/sign_test.go b/testing/bls/sign_test.go index 01977dd08e..78c51d13ec 100644 --- a/testing/bls/sign_test.go +++ b/testing/bls/sign_test.go @@ -6,10 +6,10 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/testing/bls/utils" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/testing/bls/utils" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ghodss/yaml" ) diff --git a/testing/bls/utils/BUILD.bazel b/testing/bls/utils/BUILD.bazel index 77a8428181..2c3db17988 100644 --- a/testing/bls/utils/BUILD.bazel +++ b/testing/bls/utils/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["utils.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/bls/utils", + importpath = "github.com/OffchainLabs/prysm/v7/testing/bls/utils", visibility = ["//testing/bls:__subpackages__"], deps = [ "//io/file:go_default_library", diff --git a/testing/bls/utils/utils.go b/testing/bls/utils/utils.go index e5d64038d7..eab7268af4 100644 --- a/testing/bls/utils/utils.go +++ b/testing/bls/utils/utils.go @@ -6,8 +6,8 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/bazelbuild/rules_go/go/tools/bazel" ) diff --git a/testing/bls/verify_test.go b/testing/bls/verify_test.go index ce2568753b..a7fbdc7afe 100644 --- a/testing/bls/verify_test.go +++ b/testing/bls/verify_test.go @@ -5,10 +5,10 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/bls/common" - "github.com/OffchainLabs/prysm/v6/testing/bls/utils" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/bls/common" + "github.com/OffchainLabs/prysm/v7/testing/bls/utils" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ghodss/yaml" ) diff --git a/testing/endtoend/component_handler_test.go b/testing/endtoend/component_handler_test.go index 9df7849cfe..9743268791 100644 --- a/testing/endtoend/component_handler_test.go +++ b/testing/endtoend/component_handler_test.go @@ -8,11 +8,11 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/components" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/components/eth1" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/components" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/components/eth1" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/pkg/errors" "golang.org/x/sync/errgroup" ) diff --git a/testing/endtoend/components/BUILD.bazel b/testing/endtoend/components/BUILD.bazel index 4ef4f36620..bf8ff52171 100644 --- a/testing/endtoend/components/BUILD.bazel +++ b/testing/endtoend/components/BUILD.bazel @@ -18,7 +18,7 @@ go_library( "//testing/endtoend/static-files/eth1:eth1data", "@lighthouse//:lighthouse_bin", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/endtoend/components", + importpath = "github.com/OffchainLabs/prysm/v7/testing/endtoend/components", visibility = ["//testing/endtoend:__subpackages__"], deps = [ "//beacon-chain/state:go_default_library", diff --git a/testing/endtoend/components/beacon_node.go b/testing/endtoend/components/beacon_node.go index ebb5931a98..98f9131394 100644 --- a/testing/endtoend/components/beacon_node.go +++ b/testing/endtoend/components/beacon_node.go @@ -12,17 +12,17 @@ import ( "strings" "syscall" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - cmdshared "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/flags" - "github.com/OffchainLabs/prysm/v6/cmd/beacon-chain/genesis" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + cmdshared "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/flags" + "github.com/OffchainLabs/prysm/v7/cmd/beacon-chain/genesis" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/pkg/errors" ) diff --git a/testing/endtoend/components/boot_node.go b/testing/endtoend/components/boot_node.go index 4f8b6dda7d..5203648779 100644 --- a/testing/endtoend/components/boot_node.go +++ b/testing/endtoend/components/boot_node.go @@ -9,9 +9,9 @@ import ( "strings" "syscall" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/bazelbuild/rules_go/go/tools/bazel" ) diff --git a/testing/endtoend/components/builder.go b/testing/endtoend/components/builder.go index f94377d3ec..467b27b8f0 100644 --- a/testing/endtoend/components/builder.go +++ b/testing/endtoend/components/builder.go @@ -9,11 +9,11 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/testing/middleware/builder" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/testing/middleware/builder" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/testing/endtoend/components/eth1/BUILD.bazel b/testing/endtoend/components/eth1/BUILD.bazel index 1fc870faac..fb3cfe53b5 100644 --- a/testing/endtoend/components/eth1/BUILD.bazel +++ b/testing/endtoend/components/eth1/BUILD.bazel @@ -12,7 +12,7 @@ go_library( "proxy.go", "transactions.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/endtoend/components/eth1", + importpath = "github.com/OffchainLabs/prysm/v7/testing/endtoend/components/eth1", visibility = ["//testing/endtoend:__subpackages__"], deps = [ "//config/fieldparams:go_default_library", diff --git a/testing/endtoend/components/eth1/depositor.go b/testing/endtoend/components/eth1/depositor.go index 49ee518d01..f7dda70ab6 100644 --- a/testing/endtoend/components/eth1/depositor.go +++ b/testing/endtoend/components/eth1/depositor.go @@ -8,13 +8,13 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - contracts "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/config/params" + contracts "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/ethereum/go-ethereum/accounts/keystore" "github.com/ethereum/go-ethereum/common" diff --git a/testing/endtoend/components/eth1/depositor_test.go b/testing/endtoend/components/eth1/depositor_test.go index 4428e9ed94..8c485f6d06 100644 --- a/testing/endtoend/components/eth1/depositor_test.go +++ b/testing/endtoend/components/eth1/depositor_test.go @@ -5,8 +5,8 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestComputeDeposits(t *testing.T) { diff --git a/testing/endtoend/components/eth1/helpers.go b/testing/endtoend/components/eth1/helpers.go index 2ba23edfd6..2cb7af1422 100644 --- a/testing/endtoend/components/eth1/helpers.go +++ b/testing/endtoend/components/eth1/helpers.go @@ -5,8 +5,8 @@ import ( "math/big" "time" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/ethereum/go-ethereum/accounts/keystore" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/ethclient" diff --git a/testing/endtoend/components/eth1/miner.go b/testing/endtoend/components/eth1/miner.go index e5294ae789..3f09a9e866 100644 --- a/testing/endtoend/components/eth1/miner.go +++ b/testing/endtoend/components/eth1/miner.go @@ -10,13 +10,13 @@ import ( "strings" "syscall" - "github.com/OffchainLabs/prysm/v6/config/params" - contracts "github.com/OffchainLabs/prysm/v6/contracts/deposit" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/config/params" + contracts "github.com/OffchainLabs/prysm/v7/contracts/deposit" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/ethereum/go-ethereum/common" diff --git a/testing/endtoend/components/eth1/node.go b/testing/endtoend/components/eth1/node.go index eab69165b9..50d6f73eb4 100644 --- a/testing/endtoend/components/eth1/node.go +++ b/testing/endtoend/components/eth1/node.go @@ -11,12 +11,12 @@ import ( "strings" "syscall" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/pkg/errors" log "github.com/sirupsen/logrus" diff --git a/testing/endtoend/components/eth1/node_set.go b/testing/endtoend/components/eth1/node_set.go index a0a13f08b1..ccd80636ce 100644 --- a/testing/endtoend/components/eth1/node_set.go +++ b/testing/endtoend/components/eth1/node_set.go @@ -3,9 +3,9 @@ package eth1 import ( "context" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/pkg/errors" ) diff --git a/testing/endtoend/components/eth1/proxy.go b/testing/endtoend/components/eth1/proxy.go index 0aac5da34e..1de8c1cda2 100644 --- a/testing/endtoend/components/eth1/proxy.go +++ b/testing/endtoend/components/eth1/proxy.go @@ -9,11 +9,11 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - proxy "github.com/OffchainLabs/prysm/v6/testing/middleware/engine-api-proxy" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + proxy "github.com/OffchainLabs/prysm/v7/testing/middleware/engine-api-proxy" "github.com/pkg/errors" log "github.com/sirupsen/logrus" ) diff --git a/testing/endtoend/components/eth1/transactions.go b/testing/endtoend/components/eth1/transactions.go index cbf75fa9f3..e6b8f15679 100644 --- a/testing/endtoend/components/eth1/transactions.go +++ b/testing/endtoend/components/eth1/transactions.go @@ -12,10 +12,10 @@ import ( "github.com/MariusVanDerWijden/FuzzyVM/filler" txfuzz "github.com/MariusVanDerWijden/tx-fuzz" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/accounts/keystore" "github.com/ethereum/go-ethereum/common" diff --git a/testing/endtoend/components/lighthouse_beacon.go b/testing/endtoend/components/lighthouse_beacon.go index 295b710894..3eb5c4a6e9 100644 --- a/testing/endtoend/components/lighthouse_beacon.go +++ b/testing/endtoend/components/lighthouse_beacon.go @@ -11,11 +11,11 @@ import ( "strings" "syscall" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/pkg/errors" ) diff --git a/testing/endtoend/components/lighthouse_validator.go b/testing/endtoend/components/lighthouse_validator.go index b162da0458..7cbdcd530b 100644 --- a/testing/endtoend/components/lighthouse_validator.go +++ b/testing/endtoend/components/lighthouse_validator.go @@ -11,13 +11,13 @@ import ( "strings" "syscall" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/google/uuid" "github.com/pkg/errors" diff --git a/testing/endtoend/components/tracing_sink.go b/testing/endtoend/components/tracing_sink.go index 223c9075a2..b5abac7391 100644 --- a/testing/endtoend/components/tracing_sink.go +++ b/testing/endtoend/components/tracing_sink.go @@ -11,9 +11,9 @@ import ( "syscall" "time" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/pkg/errors" ) diff --git a/testing/endtoend/components/validator.go b/testing/endtoend/components/validator.go index 06a654d02b..0fa5c6aa03 100644 --- a/testing/endtoend/components/validator.go +++ b/testing/endtoend/components/validator.go @@ -11,17 +11,17 @@ import ( "strings" "syscall" - cmdshared "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + cmdshared "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" diff --git a/testing/endtoend/components/web3remotesigner.go b/testing/endtoend/components/web3remotesigner.go index f84d8bfc1a..9348b9bea0 100644 --- a/testing/endtoend/components/web3remotesigner.go +++ b/testing/endtoend/components/web3remotesigner.go @@ -16,12 +16,12 @@ import ( "syscall" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" diff --git a/testing/endtoend/components/web3remotesigner_test.go b/testing/endtoend/components/web3remotesigner_test.go index 79de5b2f92..f41c9696a9 100644 --- a/testing/endtoend/components/web3remotesigner_test.go +++ b/testing/endtoend/components/web3remotesigner_test.go @@ -5,10 +5,10 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/components" - e2eparams "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/components" + e2eparams "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestWeb3RemoteSigner_StartsAndReturnsPublicKeys(t *testing.T) { diff --git a/testing/endtoend/endtoend_setup_test.go b/testing/endtoend/endtoend_setup_test.go index a28bab69f5..174c867e48 100644 --- a/testing/endtoend/endtoend_setup_test.go +++ b/testing/endtoend/endtoend_setup_test.go @@ -6,13 +6,13 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ev "github.com/OffchainLabs/prysm/v6/testing/endtoend/evaluators" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/evaluators/beaconapi" - e2eParams "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ev "github.com/OffchainLabs/prysm/v7/testing/endtoend/evaluators" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/evaluators/beaconapi" + e2eParams "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func e2eMinimal(t *testing.T, cfg *params.BeaconChainConfig, cfgo ...types.E2EConfigOpt) *testRunner { diff --git a/testing/endtoend/endtoend_test.go b/testing/endtoend/endtoend_test.go index 42330f2589..70c661ae59 100644 --- a/testing/endtoend/endtoend_test.go +++ b/testing/endtoend/endtoend_test.go @@ -16,24 +16,24 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/api/client/beacon" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/genesis" - "github.com/OffchainLabs/prysm/v6/io/file" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/components" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/components/eth1" - ev "github.com/OffchainLabs/prysm/v6/testing/endtoend/evaluators" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api/client/beacon" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/genesis" + "github.com/OffchainLabs/prysm/v7/io/file" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/components" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/components/eth1" + ev "github.com/OffchainLabs/prysm/v7/testing/endtoend/evaluators" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" log "github.com/sirupsen/logrus" "golang.org/x/sync/errgroup" diff --git a/testing/endtoend/evaluators/BUILD.bazel b/testing/endtoend/evaluators/BUILD.bazel index 2e42203a42..d9889fcf7f 100644 --- a/testing/endtoend/evaluators/BUILD.bazel +++ b/testing/endtoend/evaluators/BUILD.bazel @@ -18,7 +18,7 @@ go_library( "slashing_helper.go", "validator.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/endtoend/evaluators", + importpath = "github.com/OffchainLabs/prysm/v7/testing/endtoend/evaluators", visibility = ["//testing/endtoend:__subpackages__"], deps = [ "//api/client/beacon:go_default_library", diff --git a/testing/endtoend/evaluators/beaconapi/BUILD.bazel b/testing/endtoend/evaluators/beaconapi/BUILD.bazel index 3965f28d2d..3ac9aff0b3 100644 --- a/testing/endtoend/evaluators/beaconapi/BUILD.bazel +++ b/testing/endtoend/evaluators/beaconapi/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "util.go", "verify.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/endtoend/evaluators/beaconapi", + importpath = "github.com/OffchainLabs/prysm/v7/testing/endtoend/evaluators/beaconapi", visibility = ["//testing/endtoend:__subpackages__"], deps = [ "//api:go_default_library", diff --git a/testing/endtoend/evaluators/beaconapi/requests.go b/testing/endtoend/evaluators/beaconapi/requests.go index bff0f706a6..7abdaa4249 100644 --- a/testing/endtoend/evaluators/beaconapi/requests.go +++ b/testing/endtoend/evaluators/beaconapi/requests.go @@ -5,9 +5,9 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/pkg/errors" ) diff --git a/testing/endtoend/evaluators/beaconapi/types.go b/testing/endtoend/evaluators/beaconapi/types.go index 254d945bc7..8253f79eae 100644 --- a/testing/endtoend/evaluators/beaconapi/types.go +++ b/testing/endtoend/evaluators/beaconapi/types.go @@ -1,7 +1,7 @@ package beaconapi import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) type endpoint interface { diff --git a/testing/endtoend/evaluators/beaconapi/util.go b/testing/endtoend/evaluators/beaconapi/util.go index 565bcb22e6..43cb842537 100644 --- a/testing/endtoend/evaluators/beaconapi/util.go +++ b/testing/endtoend/evaluators/beaconapi/util.go @@ -7,8 +7,8 @@ import ( "io" "net/http" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" "github.com/pkg/errors" log "github.com/sirupsen/logrus" ) diff --git a/testing/endtoend/evaluators/beaconapi/verify.go b/testing/endtoend/evaluators/beaconapi/verify.go index 37b6188fcc..a47e599380 100644 --- a/testing/endtoend/evaluators/beaconapi/verify.go +++ b/testing/endtoend/evaluators/beaconapi/verify.go @@ -10,14 +10,14 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - params2 "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + params2 "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "google.golang.org/grpc" ) diff --git a/testing/endtoend/evaluators/builder.go b/testing/endtoend/evaluators/builder.go index 00f8dceaaa..da974e2bb8 100644 --- a/testing/endtoend/evaluators/builder.go +++ b/testing/endtoend/evaluators/builder.go @@ -3,14 +3,14 @@ package evaluators import ( "context" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "google.golang.org/grpc" diff --git a/testing/endtoend/evaluators/data.go b/testing/endtoend/evaluators/data.go index a0f639baa6..080c9d8ad2 100644 --- a/testing/endtoend/evaluators/data.go +++ b/testing/endtoend/evaluators/data.go @@ -4,9 +4,9 @@ import ( "context" "errors" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "google.golang.org/grpc" ) diff --git a/testing/endtoend/evaluators/execution_engine.go b/testing/endtoend/evaluators/execution_engine.go index 2de16b86c0..b05202335b 100644 --- a/testing/endtoend/evaluators/execution_engine.go +++ b/testing/endtoend/evaluators/execution_engine.go @@ -6,14 +6,14 @@ import ( "net/http" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "google.golang.org/grpc" ) diff --git a/testing/endtoend/evaluators/fee_recipient.go b/testing/endtoend/evaluators/fee_recipient.go index 33b5654ff7..7439218a95 100644 --- a/testing/endtoend/evaluators/fee_recipient.go +++ b/testing/endtoend/evaluators/fee_recipient.go @@ -5,14 +5,14 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/components" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/components" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/ethclient" diff --git a/testing/endtoend/evaluators/finality.go b/testing/endtoend/evaluators/finality.go index 8f50e53de7..d1d1cc911a 100644 --- a/testing/endtoend/evaluators/finality.go +++ b/testing/endtoend/evaluators/finality.go @@ -4,10 +4,10 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/pkg/errors" "google.golang.org/grpc" "google.golang.org/protobuf/types/known/emptypb" diff --git a/testing/endtoend/evaluators/fork.go b/testing/endtoend/evaluators/fork.go index 44781faa1a..da167c86d6 100644 --- a/testing/endtoend/evaluators/fork.go +++ b/testing/endtoend/evaluators/fork.go @@ -4,14 +4,14 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "google.golang.org/grpc" ) diff --git a/testing/endtoend/evaluators/metrics.go b/testing/endtoend/evaluators/metrics.go index b4e3b9cf27..27e022892d 100644 --- a/testing/endtoend/evaluators/metrics.go +++ b/testing/endtoend/evaluators/metrics.go @@ -10,14 +10,14 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/genesis" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/genesis" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "google.golang.org/grpc" "google.golang.org/protobuf/types/known/emptypb" diff --git a/testing/endtoend/evaluators/node.go b/testing/endtoend/evaluators/node.go index bdbd0dbdca..fb3cb23c54 100644 --- a/testing/endtoend/evaluators/node.go +++ b/testing/endtoend/evaluators/node.go @@ -10,11 +10,11 @@ import ( "net/http" "time" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/pkg/errors" "golang.org/x/sync/errgroup" "google.golang.org/grpc" diff --git a/testing/endtoend/evaluators/operations.go b/testing/endtoend/evaluators/operations.go index 0b44a9fd33..deb9a0570c 100644 --- a/testing/endtoend/evaluators/operations.go +++ b/testing/endtoend/evaluators/operations.go @@ -7,22 +7,22 @@ import ( "math" "strings" - "github.com/OffchainLabs/prysm/v6/api/client/beacon" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - corehelpers "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/detect" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/api/client/beacon" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + corehelpers "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/detect" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "golang.org/x/exp/rand" diff --git a/testing/endtoend/evaluators/peers.go b/testing/endtoend/evaluators/peers.go index 9c8af37b9c..813d3705c5 100644 --- a/testing/endtoend/evaluators/peers.go +++ b/testing/endtoend/evaluators/peers.go @@ -3,9 +3,9 @@ package evaluators import ( "context" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" "github.com/pkg/errors" "google.golang.org/grpc" "google.golang.org/protobuf/types/known/emptypb" diff --git a/testing/endtoend/evaluators/slashing.go b/testing/endtoend/evaluators/slashing.go index 91d342c246..65158a0f67 100644 --- a/testing/endtoend/evaluators/slashing.go +++ b/testing/endtoend/evaluators/slashing.go @@ -4,19 +4,19 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/container/slice" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies" - e2eTypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/container/slice" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies" + e2eTypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/pkg/errors" "google.golang.org/grpc" "google.golang.org/protobuf/types/known/emptypb" diff --git a/testing/endtoend/evaluators/slashing_helper.go b/testing/endtoend/evaluators/slashing_helper.go index 30d9112cd6..b6dc975bd3 100644 --- a/testing/endtoend/evaluators/slashing_helper.go +++ b/testing/endtoend/evaluators/slashing_helper.go @@ -5,13 +5,13 @@ import ( "crypto/rand" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/pkg/errors" "google.golang.org/protobuf/types/known/emptypb" ) diff --git a/testing/endtoend/evaluators/validator.go b/testing/endtoend/evaluators/validator.go index 508cf67ee1..aed605ba59 100644 --- a/testing/endtoend/evaluators/validator.go +++ b/testing/endtoend/evaluators/validator.go @@ -7,20 +7,20 @@ import ( "net/http" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - e2eparams "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + e2eparams "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "google.golang.org/grpc" "google.golang.org/protobuf/types/known/emptypb" diff --git a/testing/endtoend/helpers/BUILD.bazel b/testing/endtoend/helpers/BUILD.bazel index dedd604485..f88ff48a33 100644 --- a/testing/endtoend/helpers/BUILD.bazel +++ b/testing/endtoend/helpers/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "helpers.go", "keystore.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/endtoend/helpers", + importpath = "github.com/OffchainLabs/prysm/v7/testing/endtoend/helpers", visibility = ["//testing/endtoend:__subpackages__"], deps = [ "//config/params:go_default_library", diff --git a/testing/endtoend/helpers/epochTimer.go b/testing/endtoend/helpers/epochTimer.go index f87cbeac7f..cbbe8a12eb 100644 --- a/testing/endtoend/helpers/epochTimer.go +++ b/testing/endtoend/helpers/epochTimer.go @@ -3,7 +3,7 @@ package helpers import ( "time" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + prysmTime "github.com/OffchainLabs/prysm/v7/time" ) // EpochTicker is a special ticker for timing epoch changes. diff --git a/testing/endtoend/helpers/helpers.go b/testing/endtoend/helpers/helpers.go index de1c9acb5c..54911dab3c 100644 --- a/testing/endtoend/helpers/helpers.go +++ b/testing/endtoend/helpers/helpers.go @@ -17,11 +17,11 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - e2e "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - e2etypes "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + e2e "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + e2etypes "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/ethclient" "github.com/ethereum/go-ethereum/rpc" "github.com/pkg/errors" diff --git a/testing/endtoend/mainnet_e2e_test.go b/testing/endtoend/mainnet_e2e_test.go index 9ea73978f1..6b460e85d6 100644 --- a/testing/endtoend/mainnet_e2e_test.go +++ b/testing/endtoend/mainnet_e2e_test.go @@ -3,9 +3,9 @@ package endtoend import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" ) // Run mainnet e2e config with the current release validator against latest beacon node. diff --git a/testing/endtoend/mainnet_scenario_e2e_test.go b/testing/endtoend/mainnet_scenario_e2e_test.go index acf68d2157..6dffc3425a 100644 --- a/testing/endtoend/mainnet_scenario_e2e_test.go +++ b/testing/endtoend/mainnet_scenario_e2e_test.go @@ -3,9 +3,9 @@ package endtoend import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" ) func TestEndToEnd_MultiScenarioRun_Multiclient(t *testing.T) { diff --git a/testing/endtoend/minimal_builder_e2e_test.go b/testing/endtoend/minimal_builder_e2e_test.go index cafe1d40e2..80c02c01ef 100644 --- a/testing/endtoend/minimal_builder_e2e_test.go +++ b/testing/endtoend/minimal_builder_e2e_test.go @@ -3,9 +3,9 @@ package endtoend import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" ) func TestEndToEnd_MinimalConfig_WithBuilder(t *testing.T) { diff --git a/testing/endtoend/minimal_e2e_test.go b/testing/endtoend/minimal_e2e_test.go index 1d88516f01..f11f5970d2 100644 --- a/testing/endtoend/minimal_e2e_test.go +++ b/testing/endtoend/minimal_e2e_test.go @@ -3,9 +3,9 @@ package endtoend import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" ) func TestEndToEnd_MinimalConfig(t *testing.T) { diff --git a/testing/endtoend/minimal_scenario_e2e_test.go b/testing/endtoend/minimal_scenario_e2e_test.go index 1d4151dd6e..7f6b4ec4c0 100644 --- a/testing/endtoend/minimal_scenario_e2e_test.go +++ b/testing/endtoend/minimal_scenario_e2e_test.go @@ -3,9 +3,9 @@ package endtoend import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" ) func TestEndToEnd_MultiScenarioRun(t *testing.T) { diff --git a/testing/endtoend/minimal_slashing_e2e_test.go b/testing/endtoend/minimal_slashing_e2e_test.go index 15d54b1cf1..6029eb9cda 100644 --- a/testing/endtoend/minimal_slashing_e2e_test.go +++ b/testing/endtoend/minimal_slashing_e2e_test.go @@ -4,11 +4,11 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - ev "github.com/OffchainLabs/prysm/v6/testing/endtoend/evaluators" - e2eParams "github.com/OffchainLabs/prysm/v6/testing/endtoend/params" - "github.com/OffchainLabs/prysm/v6/testing/endtoend/types" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + ev "github.com/OffchainLabs/prysm/v7/testing/endtoend/evaluators" + e2eParams "github.com/OffchainLabs/prysm/v7/testing/endtoend/params" + "github.com/OffchainLabs/prysm/v7/testing/endtoend/types" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestEndToEnd_Slasher_MinimalConfig(t *testing.T) { diff --git a/testing/endtoend/params/BUILD.bazel b/testing/endtoend/params/BUILD.bazel index f3c9a3351b..18bfd5dcfc 100644 --- a/testing/endtoend/params/BUILD.bazel +++ b/testing/endtoend/params/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "const.go", "params.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/endtoend/params", + importpath = "github.com/OffchainLabs/prysm/v7/testing/endtoend/params", visibility = ["//visibility:public"], deps = [ "//io/file:go_default_library", diff --git a/testing/endtoend/params/params.go b/testing/endtoend/params/params.go index 06df912c40..f794890d5d 100644 --- a/testing/endtoend/params/params.go +++ b/testing/endtoend/params/params.go @@ -14,7 +14,7 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/io/file" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/ethereum/go-ethereum/core/types" ) diff --git a/testing/endtoend/params/params_test.go b/testing/endtoend/params/params_test.go index 1cbbd392ee..b1b310bf0d 100644 --- a/testing/endtoend/params/params_test.go +++ b/testing/endtoend/params/params_test.go @@ -3,8 +3,8 @@ package params import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func Test_port(t *testing.T) { diff --git a/testing/endtoend/policies/BUILD.bazel b/testing/endtoend/policies/BUILD.bazel index 74d006a2f2..d8c6bc87ab 100644 --- a/testing/endtoend/policies/BUILD.bazel +++ b/testing/endtoend/policies/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["policies.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/endtoend/policies", + importpath = "github.com/OffchainLabs/prysm/v7/testing/endtoend/policies", visibility = ["//visibility:public"], deps = ["//consensus-types/primitives:go_default_library"], ) diff --git a/testing/endtoend/policies/policies.go b/testing/endtoend/policies/policies.go index d0af20b407..2bccc3a80d 100644 --- a/testing/endtoend/policies/policies.go +++ b/testing/endtoend/policies/policies.go @@ -1,6 +1,6 @@ package policies -import "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" +import "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" // AfterNthEpoch runs for every epoch after the provided epoch. func AfterNthEpoch(afterEpoch primitives.Epoch) func(epoch primitives.Epoch) bool { diff --git a/testing/endtoend/slasher_simulator_e2e_test.go b/testing/endtoend/slasher_simulator_e2e_test.go index bcfa6817cb..da78e0a274 100644 --- a/testing/endtoend/slasher_simulator_e2e_test.go +++ b/testing/endtoend/slasher_simulator_e2e_test.go @@ -6,18 +6,18 @@ import ( "strconv" "testing" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - mockslashings "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings/mock" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - mockstategen "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - slashersimulator "github.com/OffchainLabs/prysm/v6/testing/slasher/simulator" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + mockslashings "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings/mock" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + mockstategen "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + slashersimulator "github.com/OffchainLabs/prysm/v7/testing/slasher/simulator" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/testing/endtoend/types/BUILD.bazel b/testing/endtoend/types/BUILD.bazel index 662a63091f..b34c009de0 100644 --- a/testing/endtoend/types/BUILD.bazel +++ b/testing/endtoend/types/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "fork.go", "types.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/endtoend/types", + importpath = "github.com/OffchainLabs/prysm/v7/testing/endtoend/types", visibility = ["//testing/endtoend:__subpackages__"], deps = [ "//api:go_default_library", diff --git a/testing/endtoend/types/fork.go b/testing/endtoend/types/fork.go index 032b262a5b..56cbc3011b 100644 --- a/testing/endtoend/types/fork.go +++ b/testing/endtoend/types/fork.go @@ -4,8 +4,8 @@ import ( "fmt" "math" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/runtime/version" ) func InitForkCfg(start, end int, c *params.BeaconChainConfig) *params.BeaconChainConfig { diff --git a/testing/endtoend/types/types.go b/testing/endtoend/types/types.go index ef263663f0..887ead02be 100644 --- a/testing/endtoend/types/types.go +++ b/testing/endtoend/types/types.go @@ -6,10 +6,10 @@ import ( "context" "os" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/sirupsen/logrus" "google.golang.org/grpc" ) diff --git a/testing/fuzz/BUILD.bazel b/testing/fuzz/BUILD.bazel index 1506ca0190..614f8742ad 100644 --- a/testing/fuzz/BUILD.bazel +++ b/testing/fuzz/BUILD.bazel @@ -3,6 +3,6 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["fuzz.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/fuzz", + importpath = "github.com/OffchainLabs/prysm/v7/testing/fuzz", visibility = ["//visibility:public"], ) diff --git a/testing/middleware/builder/BUILD.bazel b/testing/middleware/builder/BUILD.bazel index 0c34a097b9..d0bb50707f 100644 --- a/testing/middleware/builder/BUILD.bazel +++ b/testing/middleware/builder/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "builder.go", "options.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/middleware/builder", + importpath = "github.com/OffchainLabs/prysm/v7/testing/middleware/builder", visibility = ["//visibility:public"], deps = [ "//api/client/builder:go_default_library", diff --git a/testing/middleware/builder/builder.go b/testing/middleware/builder/builder.go index 048baf54c1..6833a6d812 100644 --- a/testing/middleware/builder/builder.go +++ b/testing/middleware/builder/builder.go @@ -14,21 +14,21 @@ import ( "sync" "time" - builderAPI "github.com/OffchainLabs/prysm/v6/api/client/builder" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/network" - "github.com/OffchainLabs/prysm/v6/network/authorization" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + builderAPI "github.com/OffchainLabs/prysm/v7/api/client/builder" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/network" + "github.com/OffchainLabs/prysm/v7/network/authorization" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/ethereum/go-ethereum/beacon/engine" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" diff --git a/testing/middleware/engine-api-proxy/BUILD.bazel b/testing/middleware/engine-api-proxy/BUILD.bazel index fdd48d6ba8..b9c202bbff 100644 --- a/testing/middleware/engine-api-proxy/BUILD.bazel +++ b/testing/middleware/engine-api-proxy/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "options.go", "proxy.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/middleware/engine-api-proxy", + importpath = "github.com/OffchainLabs/prysm/v7/testing/middleware/engine-api-proxy", visibility = ["//visibility:public"], deps = [ "//network:go_default_library", diff --git a/testing/middleware/engine-api-proxy/proxy.go b/testing/middleware/engine-api-proxy/proxy.go index 01b5fe2b3d..c01b9033f1 100644 --- a/testing/middleware/engine-api-proxy/proxy.go +++ b/testing/middleware/engine-api-proxy/proxy.go @@ -15,7 +15,7 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/network" + "github.com/OffchainLabs/prysm/v7/network" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/testing/middleware/engine-api-proxy/proxy_test.go b/testing/middleware/engine-api-proxy/proxy_test.go index f59bae0928..38bc7fce69 100644 --- a/testing/middleware/engine-api-proxy/proxy_test.go +++ b/testing/middleware/engine-api-proxy/proxy_test.go @@ -7,9 +7,9 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/rpc" "github.com/sirupsen/logrus" diff --git a/testing/mock/BUILD.bazel b/testing/mock/BUILD.bazel index 3253a78485..bb668e37a1 100644 --- a/testing/mock/BUILD.bazel +++ b/testing/mock/BUILD.bazel @@ -12,7 +12,7 @@ go_library( "beacon_validator_server_mock.go", "node_service_mock.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/mock", + importpath = "github.com/OffchainLabs/prysm/v7/testing/mock", visibility = ["//visibility:public"], deps = [ "//proto/prysm/v1alpha1:go_default_library", diff --git a/testing/mock/beacon_altair_validator_client_mock.go b/testing/mock/beacon_altair_validator_client_mock.go index e169f52660..ec296a5a92 100644 --- a/testing/mock/beacon_altair_validator_client_mock.go +++ b/testing/mock/beacon_altair_validator_client_mock.go @@ -8,7 +8,7 @@ import ( context "context" reflect "reflect" - v2 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + v2 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" gomock "go.uber.org/mock/gomock" metadata "google.golang.org/grpc/metadata" ) diff --git a/testing/mock/beacon_altair_validator_server_mock.go b/testing/mock/beacon_altair_validator_server_mock.go index 68cda095d6..2eab7dedf1 100644 --- a/testing/mock/beacon_altair_validator_server_mock.go +++ b/testing/mock/beacon_altair_validator_server_mock.go @@ -8,7 +8,7 @@ import ( context "context" reflect "reflect" - v2 "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + v2 "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" gomock "go.uber.org/mock/gomock" metadata "google.golang.org/grpc/metadata" ) diff --git a/testing/mock/beacon_service_mock.go b/testing/mock/beacon_service_mock.go index bcddcc7bff..a5dbe53efb 100644 --- a/testing/mock/beacon_service_mock.go +++ b/testing/mock/beacon_service_mock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1 (interfaces: BeaconChainClient) +// Source: github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1 (interfaces: BeaconChainClient) // // Generated by this command: // -// mockgen -package=mock -destination=testing/mock/beacon_service_mock.go github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1 BeaconChainClient +// mockgen -package=mock -destination=testing/mock/beacon_service_mock.go github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1 BeaconChainClient // // Package mock is a generated GoMock package. @@ -13,7 +13,7 @@ import ( context "context" reflect "reflect" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" gomock "go.uber.org/mock/gomock" grpc "google.golang.org/grpc" emptypb "google.golang.org/protobuf/types/known/emptypb" diff --git a/testing/mock/beacon_validator_client_mock.go b/testing/mock/beacon_validator_client_mock.go index 868011ba54..5c52f48a98 100644 --- a/testing/mock/beacon_validator_client_mock.go +++ b/testing/mock/beacon_validator_client_mock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1 (interfaces: BeaconNodeValidatorClient,BeaconNodeValidator_WaitForChainStartClient,BeaconNodeValidator_WaitForActivationClient,BeaconNodeValidator_StreamSlotsClient) +// Source: github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1 (interfaces: BeaconNodeValidatorClient,BeaconNodeValidator_WaitForChainStartClient,BeaconNodeValidator_WaitForActivationClient,BeaconNodeValidator_StreamSlotsClient) // // Generated by this command: // -// mockgen -package=mock -destination=testing/mock/beacon_validator_client_mock.go github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1 BeaconNodeValidatorClient,BeaconNodeValidator_WaitForChainStartClient,BeaconNodeValidator_WaitForActivationClient,BeaconNodeValidator_StreamSlotsClient +// mockgen -package=mock -destination=testing/mock/beacon_validator_client_mock.go github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1 BeaconNodeValidatorClient,BeaconNodeValidator_WaitForChainStartClient,BeaconNodeValidator_WaitForActivationClient,BeaconNodeValidator_StreamSlotsClient // // Package mock is a generated GoMock package. @@ -13,7 +13,7 @@ import ( context "context" reflect "reflect" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" gomock "go.uber.org/mock/gomock" grpc "google.golang.org/grpc" metadata "google.golang.org/grpc/metadata" diff --git a/testing/mock/beacon_validator_server_mock.go b/testing/mock/beacon_validator_server_mock.go index efedf134e9..217cf1d382 100644 --- a/testing/mock/beacon_validator_server_mock.go +++ b/testing/mock/beacon_validator_server_mock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1 (interfaces: BeaconNodeValidatorServer,BeaconNodeValidator_WaitForActivationServer,BeaconNodeValidator_WaitForChainStartServer,BeaconNodeValidator_StreamSlotsServer) +// Source: github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1 (interfaces: BeaconNodeValidatorServer,BeaconNodeValidator_WaitForActivationServer,BeaconNodeValidator_WaitForChainStartServer,BeaconNodeValidator_StreamSlotsServer) // // Generated by this command: // -// mockgen -package=mock -destination=testing/mock/beacon_validator_server_mock.go github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1 BeaconNodeValidatorServer,BeaconNodeValidator_WaitForActivationServer,BeaconNodeValidator_WaitForChainStartServer,BeaconNodeValidator_StreamSlotsServer +// mockgen -package=mock -destination=testing/mock/beacon_validator_server_mock.go github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1 BeaconNodeValidatorServer,BeaconNodeValidator_WaitForActivationServer,BeaconNodeValidator_WaitForChainStartServer,BeaconNodeValidator_StreamSlotsServer // // Package mock is a generated GoMock package. @@ -13,7 +13,7 @@ import ( context "context" reflect "reflect" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" gomock "go.uber.org/mock/gomock" metadata "google.golang.org/grpc/metadata" emptypb "google.golang.org/protobuf/types/known/emptypb" diff --git a/testing/mock/node_service_mock.go b/testing/mock/node_service_mock.go index 7f4ccd95e2..9f8fe8e456 100644 --- a/testing/mock/node_service_mock.go +++ b/testing/mock/node_service_mock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1 (interfaces: NodeClient) +// Source: github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1 (interfaces: NodeClient) // // Generated by this command: // -// mockgen -package=mock -destination=testing/mock/node_service_mock.go github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1 NodeClient +// mockgen -package=mock -destination=testing/mock/node_service_mock.go github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1 NodeClient // // Package mock is a generated GoMock package. @@ -13,7 +13,7 @@ import ( context "context" reflect "reflect" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" gomock "go.uber.org/mock/gomock" grpc "google.golang.org/grpc" emptypb "google.golang.org/protobuf/types/known/emptypb" diff --git a/testing/require/BUILD.bazel b/testing/require/BUILD.bazel index 4fdd50e5a3..d9a1bd72d6 100644 --- a/testing/require/BUILD.bazel +++ b/testing/require/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["requires.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/require", + importpath = "github.com/OffchainLabs/prysm/v7/testing/require", visibility = ["//visibility:public"], deps = [ "//testing/assertions:go_default_library", diff --git a/testing/require/requires.go b/testing/require/requires.go index 6969a72e08..98cb8d6702 100644 --- a/testing/require/requires.go +++ b/testing/require/requires.go @@ -1,7 +1,7 @@ package require import ( - "github.com/OffchainLabs/prysm/v6/testing/assertions" + "github.com/OffchainLabs/prysm/v7/testing/assertions" "github.com/sirupsen/logrus/hooks/test" ) diff --git a/testing/slasher/simulator/BUILD.bazel b/testing/slasher/simulator/BUILD.bazel index d9d85adca9..f6830fca3f 100644 --- a/testing/slasher/simulator/BUILD.bazel +++ b/testing/slasher/simulator/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "block_generator.go", "simulator.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/slasher/simulator", + importpath = "github.com/OffchainLabs/prysm/v7/testing/slasher/simulator", visibility = [ "//testing/endtoend:__subpackages__", ], diff --git a/testing/slasher/simulator/attestation_generator.go b/testing/slasher/simulator/attestation_generator.go index 3e944e3d0a..aa2a89f0a6 100644 --- a/testing/slasher/simulator/attestation_generator.go +++ b/testing/slasher/simulator/attestation_generator.go @@ -5,17 +5,17 @@ import ( "context" "math" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/testing/slasher/simulator/attestation_generator_test.go b/testing/slasher/simulator/attestation_generator_test.go index 6bd752ba8f..46192a2dc5 100644 --- a/testing/slasher/simulator/attestation_generator_test.go +++ b/testing/slasher/simulator/attestation_generator_test.go @@ -3,10 +3,10 @@ package simulator import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/slashings" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/slashings" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestGenerateAttestationsForSlot_Slashing(t *testing.T) { diff --git a/testing/slasher/simulator/block_generator.go b/testing/slasher/simulator/block_generator.go index d5a6f8fd1c..a2ac08f32c 100644 --- a/testing/slasher/simulator/block_generator.go +++ b/testing/slasher/simulator/block_generator.go @@ -3,14 +3,14 @@ package simulator import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func (s *Simulator) generateBlockHeadersForSlot( diff --git a/testing/slasher/simulator/block_generator_test.go b/testing/slasher/simulator/block_generator_test.go index 66e81c0392..12c132ddf7 100644 --- a/testing/slasher/simulator/block_generator_test.go +++ b/testing/slasher/simulator/block_generator_test.go @@ -4,7 +4,7 @@ import ( "bytes" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestGenerateBlockHeadersForSlot_Slashing(t *testing.T) { diff --git a/testing/slasher/simulator/simulator.go b/testing/slasher/simulator/simulator.go index 627f222d00..01a427028c 100644 --- a/testing/slasher/simulator/simulator.go +++ b/testing/slasher/simulator/simulator.go @@ -6,22 +6,22 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - statefeed "github.com/OffchainLabs/prysm/v6/beacon-chain/core/feed/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/slashings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher" - slashertypes "github.com/OffchainLabs/prysm/v6/beacon-chain/slasher/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/beacon-chain/sync" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + statefeed "github.com/OffchainLabs/prysm/v7/beacon-chain/core/feed/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/slashings" + "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher" + slashertypes "github.com/OffchainLabs/prysm/v7/beacon-chain/slasher/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/beacon-chain/sync" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/sirupsen/logrus" ) diff --git a/testing/slasher/simulator/simulator_test.go b/testing/slasher/simulator/simulator_test.go index 2b95620cb1..7eb50a8b58 100644 --- a/testing/slasher/simulator/simulator_test.go +++ b/testing/slasher/simulator/simulator_test.go @@ -3,14 +3,14 @@ package simulator import ( "testing" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - dbtest "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - mockstategen "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen/mock" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + dbtest "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + mockstategen "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen/mock" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func setupService(t *testing.T, params *Parameters) *Simulator { diff --git a/testing/spectest/general/deneb__kzg__verify_blob_kzg_proof_batch_test.go b/testing/spectest/general/deneb__kzg__verify_blob_kzg_proof_batch_test.go index 7979e3f562..a1a5568813 100644 --- a/testing/spectest/general/deneb__kzg__verify_blob_kzg_proof_batch_test.go +++ b/testing/spectest/general/deneb__kzg__verify_blob_kzg_proof_batch_test.go @@ -5,12 +5,12 @@ import ( "path" "testing" - kzgPrysm "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + kzgPrysm "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ghodss/yaml" ) diff --git a/testing/spectest/general/fulu__kzg__compute_cells_and_kzg_proofs_test.go b/testing/spectest/general/fulu__kzg__compute_cells_and_kzg_proofs_test.go index 1dc122777d..574a71aa89 100644 --- a/testing/spectest/general/fulu__kzg__compute_cells_and_kzg_proofs_test.go +++ b/testing/spectest/general/fulu__kzg__compute_cells_and_kzg_proofs_test.go @@ -4,11 +4,11 @@ import ( "path" "testing" - kzgPrysm "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + kzgPrysm "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ghodss/yaml" ) diff --git a/testing/spectest/general/fulu__kzg__compute_cells_test.go b/testing/spectest/general/fulu__kzg__compute_cells_test.go index 802243ba4d..2a6a58fe32 100644 --- a/testing/spectest/general/fulu__kzg__compute_cells_test.go +++ b/testing/spectest/general/fulu__kzg__compute_cells_test.go @@ -4,11 +4,11 @@ import ( "path" "testing" - kzgPrysm "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + kzgPrysm "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ghodss/yaml" ) diff --git a/testing/spectest/general/fulu__kzg__recover_cells_and_kzg_proofs_test.go b/testing/spectest/general/fulu__kzg__recover_cells_and_kzg_proofs_test.go index d6731ad2b3..1f613ffbb7 100644 --- a/testing/spectest/general/fulu__kzg__recover_cells_and_kzg_proofs_test.go +++ b/testing/spectest/general/fulu__kzg__recover_cells_and_kzg_proofs_test.go @@ -5,10 +5,10 @@ import ( "strconv" "testing" - kzgPrysm "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + kzgPrysm "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ghodss/yaml" ) diff --git a/testing/spectest/general/fulu__kzg__verify_cell_kzg_proof_batch_test.go b/testing/spectest/general/fulu__kzg__verify_cell_kzg_proof_batch_test.go index 08d8f5ee0f..e6362e9375 100644 --- a/testing/spectest/general/fulu__kzg__verify_cell_kzg_proof_batch_test.go +++ b/testing/spectest/general/fulu__kzg__verify_cell_kzg_proof_batch_test.go @@ -5,10 +5,10 @@ import ( "strconv" "testing" - kzgPrysm "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + kzgPrysm "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ghodss/yaml" ) diff --git a/testing/spectest/mainnet/altair__epoch_processing__effective_balance_updates_test.go b/testing/spectest/mainnet/altair__epoch_processing__effective_balance_updates_test.go index 481df02359..1780f4cd5e 100644 --- a/testing/spectest/mainnet/altair__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/mainnet/altair__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMainnet_Altair_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__epoch_processing__eth1_data_reset_test.go b/testing/spectest/mainnet/altair__epoch_processing__eth1_data_reset_test.go index 7bd93780e1..63e4617f63 100644 --- a/testing/spectest/mainnet/altair__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/mainnet/altair__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMainnet_Altair_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__epoch_processing__historical_roots_update_test.go b/testing/spectest/mainnet/altair__epoch_processing__historical_roots_update_test.go index 5a2326c709..7edc4865dc 100644 --- a/testing/spectest/mainnet/altair__epoch_processing__historical_roots_update_test.go +++ b/testing/spectest/mainnet/altair__epoch_processing__historical_roots_update_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMainnet_Altair_EpochProcessing_HistoricalRootsUpdate(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__epoch_processing__inactivity_updates_test.go b/testing/spectest/mainnet/altair__epoch_processing__inactivity_updates_test.go index cfb93aaaad..35faaa96b8 100644 --- a/testing/spectest/mainnet/altair__epoch_processing__inactivity_updates_test.go +++ b/testing/spectest/mainnet/altair__epoch_processing__inactivity_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMainnet_Altair_EpochProcessing_InactivityUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__epoch_processing__justification_and_finalization_test.go b/testing/spectest/mainnet/altair__epoch_processing__justification_and_finalization_test.go index 27056ab898..34c4314648 100644 --- a/testing/spectest/mainnet/altair__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/mainnet/altair__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMainnet_Altair_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__epoch_processing__participation_flag_updates_test.go b/testing/spectest/mainnet/altair__epoch_processing__participation_flag_updates_test.go index 34ca48082e..2f07c587bb 100644 --- a/testing/spectest/mainnet/altair__epoch_processing__participation_flag_updates_test.go +++ b/testing/spectest/mainnet/altair__epoch_processing__participation_flag_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMainnet_Altair_EpochProcessing_ParticipationFlag(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/mainnet/altair__epoch_processing__randao_mixes_reset_test.go index a8278b3a4c..5d87890590 100644 --- a/testing/spectest/mainnet/altair__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/mainnet/altair__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMainnet_Altair_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__epoch_processing__registry_updates_test.go b/testing/spectest/mainnet/altair__epoch_processing__registry_updates_test.go index dc282bcbf3..ea69e3ccf6 100644 --- a/testing/spectest/mainnet/altair__epoch_processing__registry_updates_test.go +++ b/testing/spectest/mainnet/altair__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMainnet_Altair_EpochProcessing_ResetRegistryUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/mainnet/altair__epoch_processing__rewards_and_penalties_test.go index 6fcad223f5..7844683f21 100644 --- a/testing/spectest/mainnet/altair__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/mainnet/altair__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMainnet_Altair_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__epoch_processing__slashings_reset_test.go b/testing/spectest/mainnet/altair__epoch_processing__slashings_reset_test.go index e061efefd7..72ce879c19 100644 --- a/testing/spectest/mainnet/altair__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/mainnet/altair__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMainnet_Altair_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__epoch_processing__slashings_test.go b/testing/spectest/mainnet/altair__epoch_processing__slashings_test.go index 522575a123..b46a3a6a3c 100644 --- a/testing/spectest/mainnet/altair__epoch_processing__slashings_test.go +++ b/testing/spectest/mainnet/altair__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMainnet_Altair_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__finality__finality_test.go b/testing/spectest/mainnet/altair__finality__finality_test.go index 2309f6e24f..cc1699d3ee 100644 --- a/testing/spectest/mainnet/altair__finality__finality_test.go +++ b/testing/spectest/mainnet/altair__finality__finality_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/finality" ) func TestMainnet_Altair_Finality(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__fork_helper__upgrade_to_altair_test.go b/testing/spectest/mainnet/altair__fork_helper__upgrade_to_altair_test.go index 227e5f5d67..6c9ec57dab 100644 --- a/testing/spectest/mainnet/altair__fork_helper__upgrade_to_altair_test.go +++ b/testing/spectest/mainnet/altair__fork_helper__upgrade_to_altair_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/fork" ) func TestMainnet_Altair_UpgradeToAltair(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__fork_transition__transition_test.go b/testing/spectest/mainnet/altair__fork_transition__transition_test.go index 7f112fdaa8..d72aca8539 100644 --- a/testing/spectest/mainnet/altair__fork_transition__transition_test.go +++ b/testing/spectest/mainnet/altair__fork_transition__transition_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/fork" ) func TestMainnet_Altair_Transition(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__forkchoice__forkchoice_test.go b/testing/spectest/mainnet/altair__forkchoice__forkchoice_test.go index 793650315f..6c2e177ef8 100644 --- a/testing/spectest/mainnet/altair__forkchoice__forkchoice_test.go +++ b/testing/spectest/mainnet/altair__forkchoice__forkchoice_test.go @@ -3,8 +3,8 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice" ) func TestMainnet_Altair_Forkchoice(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__light_client__single_merkle_proof_test.go b/testing/spectest/mainnet/altair__light_client__single_merkle_proof_test.go index ae47841048..89af8a8379 100644 --- a/testing/spectest/mainnet/altair__light_client__single_merkle_proof_test.go +++ b/testing/spectest/mainnet/altair__light_client__single_merkle_proof_test.go @@ -3,8 +3,8 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Altair_LightClient_SingleMerkleProof(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__operations__attestation_test.go b/testing/spectest/mainnet/altair__operations__attestation_test.go index ca0714ff44..9c2e977a41 100644 --- a/testing/spectest/mainnet/altair__operations__attestation_test.go +++ b/testing/spectest/mainnet/altair__operations__attestation_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMainnet_Altair_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__operations__attester_slashing_test.go b/testing/spectest/mainnet/altair__operations__attester_slashing_test.go index 9795331bac..7e3eae68fc 100644 --- a/testing/spectest/mainnet/altair__operations__attester_slashing_test.go +++ b/testing/spectest/mainnet/altair__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMainnet_Altair_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__operations__block_header_test.go b/testing/spectest/mainnet/altair__operations__block_header_test.go index 81ffb9dc66..33cdd31083 100644 --- a/testing/spectest/mainnet/altair__operations__block_header_test.go +++ b/testing/spectest/mainnet/altair__operations__block_header_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMainnet_Altair_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__operations__deposit_test.go b/testing/spectest/mainnet/altair__operations__deposit_test.go index 8b511f1650..92197e3d83 100644 --- a/testing/spectest/mainnet/altair__operations__deposit_test.go +++ b/testing/spectest/mainnet/altair__operations__deposit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMainnet_Altair_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__operations__proposer_slashing_test.go b/testing/spectest/mainnet/altair__operations__proposer_slashing_test.go index 877732f84e..26e0ebd7f2 100644 --- a/testing/spectest/mainnet/altair__operations__proposer_slashing_test.go +++ b/testing/spectest/mainnet/altair__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMainnet_Altair_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__operations__sync_committee_test.go b/testing/spectest/mainnet/altair__operations__sync_committee_test.go index 05d4612f45..de850a2265 100644 --- a/testing/spectest/mainnet/altair__operations__sync_committee_test.go +++ b/testing/spectest/mainnet/altair__operations__sync_committee_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMainnet_Altair_Operations_SyncCommittee(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__operations__voluntary_exit_test.go b/testing/spectest/mainnet/altair__operations__voluntary_exit_test.go index 3ce9542992..8ddbc76e51 100644 --- a/testing/spectest/mainnet/altair__operations__voluntary_exit_test.go +++ b/testing/spectest/mainnet/altair__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMainnet_Altair_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__random__random_test.go b/testing/spectest/mainnet/altair__random__random_test.go index 6d042f3230..ca592389cf 100644 --- a/testing/spectest/mainnet/altair__random__random_test.go +++ b/testing/spectest/mainnet/altair__random__random_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/sanity" ) func TestMainnet_Altair_Random(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__rewards__rewards_test.go b/testing/spectest/mainnet/altair__rewards__rewards_test.go index 3fd5f67b35..70fe22fd7a 100644 --- a/testing/spectest/mainnet/altair__rewards__rewards_test.go +++ b/testing/spectest/mainnet/altair__rewards__rewards_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/rewards" ) func TestMainnet_Altair_Rewards(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__sanity__blocks_test.go b/testing/spectest/mainnet/altair__sanity__blocks_test.go index c026eeb898..115dcaafba 100644 --- a/testing/spectest/mainnet/altair__sanity__blocks_test.go +++ b/testing/spectest/mainnet/altair__sanity__blocks_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/sanity" ) func TestMainnet_Altair_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__sanity__slots_test.go b/testing/spectest/mainnet/altair__sanity__slots_test.go index ede5ddbe77..f017bce38e 100644 --- a/testing/spectest/mainnet/altair__sanity__slots_test.go +++ b/testing/spectest/mainnet/altair__sanity__slots_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/sanity" ) func TestMainnet_Altair_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/mainnet/altair__ssz_static__ssz_static_test.go b/testing/spectest/mainnet/altair__ssz_static__ssz_static_test.go index 9dbb4c32ff..cf350a7748 100644 --- a/testing/spectest/mainnet/altair__ssz_static__ssz_static_test.go +++ b/testing/spectest/mainnet/altair__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/ssz_static" ) func TestMainnet_Altair_SSZStatic(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__epoch_processing__effective_balance_updates_test.go b/testing/spectest/mainnet/bellatrix__epoch_processing__effective_balance_updates_test.go index bc0c895376..d0f2ef7053 100644 --- a/testing/spectest/mainnet/bellatrix__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/mainnet/bellatrix__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMainnet_Bellatrix_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__epoch_processing__eth1_data_reset_test.go b/testing/spectest/mainnet/bellatrix__epoch_processing__eth1_data_reset_test.go index be62930aa5..46851d7cb4 100644 --- a/testing/spectest/mainnet/bellatrix__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/mainnet/bellatrix__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMainnet_Bellatrix_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__epoch_processing__historical_roots_update_test.go b/testing/spectest/mainnet/bellatrix__epoch_processing__historical_roots_update_test.go index fa02a1a217..548f9e27e0 100644 --- a/testing/spectest/mainnet/bellatrix__epoch_processing__historical_roots_update_test.go +++ b/testing/spectest/mainnet/bellatrix__epoch_processing__historical_roots_update_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMainnet_Bellatrix_EpochProcessing_HistoricalRootsUpdate(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__epoch_processing__inactivity_updates_test.go b/testing/spectest/mainnet/bellatrix__epoch_processing__inactivity_updates_test.go index 34cc0c7047..9407cc6ca9 100644 --- a/testing/spectest/mainnet/bellatrix__epoch_processing__inactivity_updates_test.go +++ b/testing/spectest/mainnet/bellatrix__epoch_processing__inactivity_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMainnet_Bellatrix_EpochProcessing_InactivityUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__epoch_processing__justification_and_finalization_test.go b/testing/spectest/mainnet/bellatrix__epoch_processing__justification_and_finalization_test.go index ed7146221d..a7eeedef7e 100644 --- a/testing/spectest/mainnet/bellatrix__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/mainnet/bellatrix__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMainnet_Bellatrix_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__epoch_processing__participation_flag_updates_test.go b/testing/spectest/mainnet/bellatrix__epoch_processing__participation_flag_updates_test.go index 17c6ae3483..b9e14267d1 100644 --- a/testing/spectest/mainnet/bellatrix__epoch_processing__participation_flag_updates_test.go +++ b/testing/spectest/mainnet/bellatrix__epoch_processing__participation_flag_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMainnet_Bellatrix_EpochProcessing_ParticipationFlag(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/mainnet/bellatrix__epoch_processing__randao_mixes_reset_test.go index 51d4641467..6e2ecbde77 100644 --- a/testing/spectest/mainnet/bellatrix__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/mainnet/bellatrix__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMainnet_Bellatrix_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__epoch_processing__registry_updates_test.go b/testing/spectest/mainnet/bellatrix__epoch_processing__registry_updates_test.go index 2f91ea38c3..b48cc096a6 100644 --- a/testing/spectest/mainnet/bellatrix__epoch_processing__registry_updates_test.go +++ b/testing/spectest/mainnet/bellatrix__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMainnet_Bellatrix_EpochProcessing_ResetRegistryUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/mainnet/bellatrix__epoch_processing__rewards_and_penalties_test.go index a3d11b2903..899a069422 100644 --- a/testing/spectest/mainnet/bellatrix__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/mainnet/bellatrix__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMainnet_Bellatrix_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__epoch_processing__slashings_reset_test.go b/testing/spectest/mainnet/bellatrix__epoch_processing__slashings_reset_test.go index 0739bbc274..5c4d008a15 100644 --- a/testing/spectest/mainnet/bellatrix__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/mainnet/bellatrix__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMainnet_Bellatrix_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__epoch_processing__slashings_test.go b/testing/spectest/mainnet/bellatrix__epoch_processing__slashings_test.go index 99427f74ed..772f359084 100644 --- a/testing/spectest/mainnet/bellatrix__epoch_processing__slashings_test.go +++ b/testing/spectest/mainnet/bellatrix__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMainnet_Bellatrix_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__finality__finality_test.go b/testing/spectest/mainnet/bellatrix__finality__finality_test.go index 838544d5bb..cfa454b1ef 100644 --- a/testing/spectest/mainnet/bellatrix__finality__finality_test.go +++ b/testing/spectest/mainnet/bellatrix__finality__finality_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/finality" ) func TestMainnet_Bellatrix_Finality(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__fork_helper__upgrade_to_altair_test.go b/testing/spectest/mainnet/bellatrix__fork_helper__upgrade_to_altair_test.go index 304b152f1f..c9897cdd37 100644 --- a/testing/spectest/mainnet/bellatrix__fork_helper__upgrade_to_altair_test.go +++ b/testing/spectest/mainnet/bellatrix__fork_helper__upgrade_to_altair_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/fork" ) func TestMainnet_Bellatrix_UpgradeToBellatrix(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__fork_transition__transition_test.go b/testing/spectest/mainnet/bellatrix__fork_transition__transition_test.go index 16358c3978..6f28bd85d5 100644 --- a/testing/spectest/mainnet/bellatrix__fork_transition__transition_test.go +++ b/testing/spectest/mainnet/bellatrix__fork_transition__transition_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/fork" ) func TestMainnet_Bellatrix_Transition(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__forkchoice__forkchoice_test.go b/testing/spectest/mainnet/bellatrix__forkchoice__forkchoice_test.go index decff38d7a..9ca95810b6 100644 --- a/testing/spectest/mainnet/bellatrix__forkchoice__forkchoice_test.go +++ b/testing/spectest/mainnet/bellatrix__forkchoice__forkchoice_test.go @@ -3,8 +3,8 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice" ) func TestMainnet_Bellatrix_Forkchoice(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__light_client__single_merkle_proof_test.go b/testing/spectest/mainnet/bellatrix__light_client__single_merkle_proof_test.go index 7d6b9f4388..e04088a452 100644 --- a/testing/spectest/mainnet/bellatrix__light_client__single_merkle_proof_test.go +++ b/testing/spectest/mainnet/bellatrix__light_client__single_merkle_proof_test.go @@ -3,8 +3,8 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Bellatrix_LightClient_SingleMerkleProof(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__operations__attestation_test.go b/testing/spectest/mainnet/bellatrix__operations__attestation_test.go index 444e4cc629..b58d1a4d29 100644 --- a/testing/spectest/mainnet/bellatrix__operations__attestation_test.go +++ b/testing/spectest/mainnet/bellatrix__operations__attestation_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMainnet_Bellatrix_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__operations__attester_slashing_test.go b/testing/spectest/mainnet/bellatrix__operations__attester_slashing_test.go index b66b8ea167..cfe7f64aa6 100644 --- a/testing/spectest/mainnet/bellatrix__operations__attester_slashing_test.go +++ b/testing/spectest/mainnet/bellatrix__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMainnet_Bellatrix_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__operations__block_header_test.go b/testing/spectest/mainnet/bellatrix__operations__block_header_test.go index 0664c90526..0182b5e655 100644 --- a/testing/spectest/mainnet/bellatrix__operations__block_header_test.go +++ b/testing/spectest/mainnet/bellatrix__operations__block_header_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMainnet_Bellatrix_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__operations__deposit_test.go b/testing/spectest/mainnet/bellatrix__operations__deposit_test.go index c2fda6680c..f77194e37e 100644 --- a/testing/spectest/mainnet/bellatrix__operations__deposit_test.go +++ b/testing/spectest/mainnet/bellatrix__operations__deposit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMainnet_Bellatrix_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__operations__execution_payload_test.go b/testing/spectest/mainnet/bellatrix__operations__execution_payload_test.go index e414826198..1b0b63d999 100644 --- a/testing/spectest/mainnet/bellatrix__operations__execution_payload_test.go +++ b/testing/spectest/mainnet/bellatrix__operations__execution_payload_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMainnet_Bellatrix_Operations_PayloadExecution(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__operations__proposer_slashing_test.go b/testing/spectest/mainnet/bellatrix__operations__proposer_slashing_test.go index 61e6fc1062..4dcd32105a 100644 --- a/testing/spectest/mainnet/bellatrix__operations__proposer_slashing_test.go +++ b/testing/spectest/mainnet/bellatrix__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMainnet_Bellatrix_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__operations__sync_committee_test.go b/testing/spectest/mainnet/bellatrix__operations__sync_committee_test.go index 24017da422..380673e990 100644 --- a/testing/spectest/mainnet/bellatrix__operations__sync_committee_test.go +++ b/testing/spectest/mainnet/bellatrix__operations__sync_committee_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMainnet_Bellatrix_Operations_SyncCommittee(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__operations__voluntary_exit_test.go b/testing/spectest/mainnet/bellatrix__operations__voluntary_exit_test.go index e8de0480d8..55b81b594c 100644 --- a/testing/spectest/mainnet/bellatrix__operations__voluntary_exit_test.go +++ b/testing/spectest/mainnet/bellatrix__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMainnet_Bellatrix_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__random__random_test.go b/testing/spectest/mainnet/bellatrix__random__random_test.go index 96733e0a7a..9710af735a 100644 --- a/testing/spectest/mainnet/bellatrix__random__random_test.go +++ b/testing/spectest/mainnet/bellatrix__random__random_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/sanity" ) func TestMainnet_Bellatrix_Random(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__rewards__rewards_test.go b/testing/spectest/mainnet/bellatrix__rewards__rewards_test.go index 86c6e0f7c1..b3208f86cf 100644 --- a/testing/spectest/mainnet/bellatrix__rewards__rewards_test.go +++ b/testing/spectest/mainnet/bellatrix__rewards__rewards_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/rewards" ) func TestMainnet_Bellatrix_Rewards(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__sanity__blocks_test.go b/testing/spectest/mainnet/bellatrix__sanity__blocks_test.go index cb88187866..7a621d9cfb 100644 --- a/testing/spectest/mainnet/bellatrix__sanity__blocks_test.go +++ b/testing/spectest/mainnet/bellatrix__sanity__blocks_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/sanity" ) func TestMainnet_Bellatrix_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__sanity__slots_test.go b/testing/spectest/mainnet/bellatrix__sanity__slots_test.go index cbc5a2e046..4c1bad3d60 100644 --- a/testing/spectest/mainnet/bellatrix__sanity__slots_test.go +++ b/testing/spectest/mainnet/bellatrix__sanity__slots_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/sanity" ) func TestMainnet_Bellatrix_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/mainnet/bellatrix__ssz_static__ssz_static_test.go b/testing/spectest/mainnet/bellatrix__ssz_static__ssz_static_test.go index 31d2e01cbc..6f62acb16e 100644 --- a/testing/spectest/mainnet/bellatrix__ssz_static__ssz_static_test.go +++ b/testing/spectest/mainnet/bellatrix__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/ssz_static" ) func TestMainnet_Bellatrix_SSZStatic(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__epoch_processing__effective_balance_updates_test.go b/testing/spectest/mainnet/capella__epoch_processing__effective_balance_updates_test.go index 2c750c8fa1..1e4c722dd4 100644 --- a/testing/spectest/mainnet/capella__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/mainnet/capella__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMainnet_Capella_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__epoch_processing__eth1_data_reset_test.go b/testing/spectest/mainnet/capella__epoch_processing__eth1_data_reset_test.go index f7b4acfb57..ecdf67389b 100644 --- a/testing/spectest/mainnet/capella__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/mainnet/capella__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMainnet_Capella_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__epoch_processing__historical_summaries_update_test.go b/testing/spectest/mainnet/capella__epoch_processing__historical_summaries_update_test.go index af1831c1ef..a9906a2c75 100644 --- a/testing/spectest/mainnet/capella__epoch_processing__historical_summaries_update_test.go +++ b/testing/spectest/mainnet/capella__epoch_processing__historical_summaries_update_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMainnet_Capella_EpochProcessing_HistoricalSummariesUpdate(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__epoch_processing__inactivity_updates_test.go b/testing/spectest/mainnet/capella__epoch_processing__inactivity_updates_test.go index d8840cc9d9..6c809d389f 100644 --- a/testing/spectest/mainnet/capella__epoch_processing__inactivity_updates_test.go +++ b/testing/spectest/mainnet/capella__epoch_processing__inactivity_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMainnet_Capella_EpochProcessing_InactivityUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__epoch_processing__justification_and_finalization_test.go b/testing/spectest/mainnet/capella__epoch_processing__justification_and_finalization_test.go index 3a42037417..4cf65afaa4 100644 --- a/testing/spectest/mainnet/capella__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/mainnet/capella__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMainnet_Capella_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__epoch_processing__participation_flag_updates_test.go b/testing/spectest/mainnet/capella__epoch_processing__participation_flag_updates_test.go index 6f345ae471..90983ee15f 100644 --- a/testing/spectest/mainnet/capella__epoch_processing__participation_flag_updates_test.go +++ b/testing/spectest/mainnet/capella__epoch_processing__participation_flag_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMainnet_Capella_EpochProcessing_ParticipationFlag(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/mainnet/capella__epoch_processing__randao_mixes_reset_test.go index 3f80906fc0..04459df8ff 100644 --- a/testing/spectest/mainnet/capella__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/mainnet/capella__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMainnet_Capella_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__epoch_processing__registry_updates_test.go b/testing/spectest/mainnet/capella__epoch_processing__registry_updates_test.go index e71170c73a..4136c1451e 100644 --- a/testing/spectest/mainnet/capella__epoch_processing__registry_updates_test.go +++ b/testing/spectest/mainnet/capella__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMainnet_Capella_EpochProcessing_ResetRegistryUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/mainnet/capella__epoch_processing__rewards_and_penalties_test.go index cf272dafb7..65da23ceb5 100644 --- a/testing/spectest/mainnet/capella__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/mainnet/capella__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMainnet_Capella_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__epoch_processing__slashings_reset_test.go b/testing/spectest/mainnet/capella__epoch_processing__slashings_reset_test.go index 1a31620535..f8bae0f666 100644 --- a/testing/spectest/mainnet/capella__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/mainnet/capella__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMainnet_Capella_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__epoch_processing__slashings_test.go b/testing/spectest/mainnet/capella__epoch_processing__slashings_test.go index 4c6d1cc000..c48bd48c91 100644 --- a/testing/spectest/mainnet/capella__epoch_processing__slashings_test.go +++ b/testing/spectest/mainnet/capella__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMainnet_Capella_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__finality__finality_test.go b/testing/spectest/mainnet/capella__finality__finality_test.go index 7a7a69eda1..8ff195917b 100644 --- a/testing/spectest/mainnet/capella__finality__finality_test.go +++ b/testing/spectest/mainnet/capella__finality__finality_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/finality" ) func TestMainnet_Capella_Finality(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__fork_helper__upgrade_to_capella_test.go b/testing/spectest/mainnet/capella__fork_helper__upgrade_to_capella_test.go index 137d03b411..ac75899f29 100644 --- a/testing/spectest/mainnet/capella__fork_helper__upgrade_to_capella_test.go +++ b/testing/spectest/mainnet/capella__fork_helper__upgrade_to_capella_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/fork" ) func TestMainnet_Capella_UpgradeToCapella(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__fork_transition__transition_test.go b/testing/spectest/mainnet/capella__fork_transition__transition_test.go index 2f7d8e414b..9c3b7017f6 100644 --- a/testing/spectest/mainnet/capella__fork_transition__transition_test.go +++ b/testing/spectest/mainnet/capella__fork_transition__transition_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/fork" ) func TestMainnet_Capella_Transition(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__forkchoice__forkchoice_test.go b/testing/spectest/mainnet/capella__forkchoice__forkchoice_test.go index 2660dc96d7..25a2aa8da4 100644 --- a/testing/spectest/mainnet/capella__forkchoice__forkchoice_test.go +++ b/testing/spectest/mainnet/capella__forkchoice__forkchoice_test.go @@ -3,8 +3,8 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice" ) func TestMainnet_Capella_Forkchoice(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__light_client__single_merkle_proof_test.go b/testing/spectest/mainnet/capella__light_client__single_merkle_proof_test.go index e153467c9b..8de6004e95 100644 --- a/testing/spectest/mainnet/capella__light_client__single_merkle_proof_test.go +++ b/testing/spectest/mainnet/capella__light_client__single_merkle_proof_test.go @@ -3,8 +3,8 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Capella_LightClient_SingleMerkleProof(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__operations__attestation_test.go b/testing/spectest/mainnet/capella__operations__attestation_test.go index 89a0621cba..ee36a11782 100644 --- a/testing/spectest/mainnet/capella__operations__attestation_test.go +++ b/testing/spectest/mainnet/capella__operations__attestation_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMainnet_Capella_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__operations__attester_slashing_test.go b/testing/spectest/mainnet/capella__operations__attester_slashing_test.go index a6d0c3a75b..dbf15d0c4d 100644 --- a/testing/spectest/mainnet/capella__operations__attester_slashing_test.go +++ b/testing/spectest/mainnet/capella__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMainnet_Capella_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__operations__block_header_test.go b/testing/spectest/mainnet/capella__operations__block_header_test.go index cc0aa52fc5..d3f1b377ba 100644 --- a/testing/spectest/mainnet/capella__operations__block_header_test.go +++ b/testing/spectest/mainnet/capella__operations__block_header_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMainnet_Capella_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__operations__bls_to_execution_change_test.go b/testing/spectest/mainnet/capella__operations__bls_to_execution_change_test.go index 158d9ee0f4..d1b5512a6f 100644 --- a/testing/spectest/mainnet/capella__operations__bls_to_execution_change_test.go +++ b/testing/spectest/mainnet/capella__operations__bls_to_execution_change_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMainnet_Capella_Operations_BLSToExecutionChange(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__operations__deposit_test.go b/testing/spectest/mainnet/capella__operations__deposit_test.go index fe0609bc7e..39c34b988f 100644 --- a/testing/spectest/mainnet/capella__operations__deposit_test.go +++ b/testing/spectest/mainnet/capella__operations__deposit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMainnet_Capella_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__operations__execution_payload_test.go b/testing/spectest/mainnet/capella__operations__execution_payload_test.go index 7d53ce6356..3d21ad2dc4 100644 --- a/testing/spectest/mainnet/capella__operations__execution_payload_test.go +++ b/testing/spectest/mainnet/capella__operations__execution_payload_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMainnet_Capella_Operations_PayloadExecution(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__operations__proposer_slashing_test.go b/testing/spectest/mainnet/capella__operations__proposer_slashing_test.go index e0571f2dfb..c35cb9ee7c 100644 --- a/testing/spectest/mainnet/capella__operations__proposer_slashing_test.go +++ b/testing/spectest/mainnet/capella__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMainnet_Capella_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__operations__sync_committee_test.go b/testing/spectest/mainnet/capella__operations__sync_committee_test.go index 8f007dd038..c7cff4a218 100644 --- a/testing/spectest/mainnet/capella__operations__sync_committee_test.go +++ b/testing/spectest/mainnet/capella__operations__sync_committee_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMainnet_Capella_Operations_SyncCommittee(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__operations__voluntary_exit_test.go b/testing/spectest/mainnet/capella__operations__voluntary_exit_test.go index 7bda262935..17191bee3d 100644 --- a/testing/spectest/mainnet/capella__operations__voluntary_exit_test.go +++ b/testing/spectest/mainnet/capella__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMainnet_Capella_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__operations__withdrawals_test.go b/testing/spectest/mainnet/capella__operations__withdrawals_test.go index 373fe8df8a..81dded3dc6 100644 --- a/testing/spectest/mainnet/capella__operations__withdrawals_test.go +++ b/testing/spectest/mainnet/capella__operations__withdrawals_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMainnet_Capella_Operations_Withdrawals(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__random__random_test.go b/testing/spectest/mainnet/capella__random__random_test.go index 5bcceba1db..286411ed2c 100644 --- a/testing/spectest/mainnet/capella__random__random_test.go +++ b/testing/spectest/mainnet/capella__random__random_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/sanity" ) func TestMainnet_Capella_Random(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__rewards__rewards_test.go b/testing/spectest/mainnet/capella__rewards__rewards_test.go index 920aef4369..af38f82291 100644 --- a/testing/spectest/mainnet/capella__rewards__rewards_test.go +++ b/testing/spectest/mainnet/capella__rewards__rewards_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/rewards" ) func TestMainnet_Capella_Rewards(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__sanity__blocks_test.go b/testing/spectest/mainnet/capella__sanity__blocks_test.go index 9b797f395b..f0f1e6420e 100644 --- a/testing/spectest/mainnet/capella__sanity__blocks_test.go +++ b/testing/spectest/mainnet/capella__sanity__blocks_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/sanity" ) func TestMainnet_Capella_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__sanity__slots_test.go b/testing/spectest/mainnet/capella__sanity__slots_test.go index 44bff5d8ef..5b84a38337 100644 --- a/testing/spectest/mainnet/capella__sanity__slots_test.go +++ b/testing/spectest/mainnet/capella__sanity__slots_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/sanity" ) func TestMainnet_Capella_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/mainnet/capella__ssz_static__ssz_static_test.go b/testing/spectest/mainnet/capella__ssz_static__ssz_static_test.go index 1a44eb1409..0fe47c50d0 100644 --- a/testing/spectest/mainnet/capella__ssz_static__ssz_static_test.go +++ b/testing/spectest/mainnet/capella__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/ssz_static" ) func TestMainnet_Capella_SSZStatic(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__epoch_processing__effective_balance_updates_test.go b/testing/spectest/mainnet/deneb__epoch_processing__effective_balance_updates_test.go index 004d5ec144..3f5d61fb89 100644 --- a/testing/spectest/mainnet/deneb__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/mainnet/deneb__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMainnet_Deneb_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__epoch_processing__eth1_data_reset_test.go b/testing/spectest/mainnet/deneb__epoch_processing__eth1_data_reset_test.go index 8674fcd6b0..5424fc5b9c 100644 --- a/testing/spectest/mainnet/deneb__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/mainnet/deneb__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMainnet_Deneb_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__epoch_processing__historical_summaries_update_test.go b/testing/spectest/mainnet/deneb__epoch_processing__historical_summaries_update_test.go index 7cda081548..d0a4b3f55d 100644 --- a/testing/spectest/mainnet/deneb__epoch_processing__historical_summaries_update_test.go +++ b/testing/spectest/mainnet/deneb__epoch_processing__historical_summaries_update_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMainnet_Deneb_EpochProcessing_HistoricalSummariesUpdate(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__epoch_processing__inactivity_updates_test.go b/testing/spectest/mainnet/deneb__epoch_processing__inactivity_updates_test.go index f8dc106f60..fcda507543 100644 --- a/testing/spectest/mainnet/deneb__epoch_processing__inactivity_updates_test.go +++ b/testing/spectest/mainnet/deneb__epoch_processing__inactivity_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMainnet_Deneb_EpochProcessing_InactivityUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__epoch_processing__justification_and_finalization_test.go b/testing/spectest/mainnet/deneb__epoch_processing__justification_and_finalization_test.go index 65f5b3346f..e55d25bda7 100644 --- a/testing/spectest/mainnet/deneb__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/mainnet/deneb__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMainnet_Deneb_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__epoch_processing__participation_flag_updates_test.go b/testing/spectest/mainnet/deneb__epoch_processing__participation_flag_updates_test.go index c17fa8c887..18dd6d06c5 100644 --- a/testing/spectest/mainnet/deneb__epoch_processing__participation_flag_updates_test.go +++ b/testing/spectest/mainnet/deneb__epoch_processing__participation_flag_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMainnet_Deneb_EpochProcessing_ParticipationFlag(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/mainnet/deneb__epoch_processing__randao_mixes_reset_test.go index a253044bc5..1f40c1980b 100644 --- a/testing/spectest/mainnet/deneb__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/mainnet/deneb__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMainnet_Deneb_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__epoch_processing__registry_updates_test.go b/testing/spectest/mainnet/deneb__epoch_processing__registry_updates_test.go index 2d7bfaea79..99fce8b804 100644 --- a/testing/spectest/mainnet/deneb__epoch_processing__registry_updates_test.go +++ b/testing/spectest/mainnet/deneb__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMainnet_Deneb_EpochProcessing_ResetRegistryUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/mainnet/deneb__epoch_processing__rewards_and_penalties_test.go index 87fe8f4022..a4996c590a 100644 --- a/testing/spectest/mainnet/deneb__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/mainnet/deneb__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMainnet_Deneb_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__epoch_processing__slashings_reset_test.go b/testing/spectest/mainnet/deneb__epoch_processing__slashings_reset_test.go index 3746428856..094dd03498 100644 --- a/testing/spectest/mainnet/deneb__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/mainnet/deneb__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMainnet_Deneb_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__epoch_processing__slashings_test.go b/testing/spectest/mainnet/deneb__epoch_processing__slashings_test.go index e58bff9259..d635fe8a0b 100644 --- a/testing/spectest/mainnet/deneb__epoch_processing__slashings_test.go +++ b/testing/spectest/mainnet/deneb__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMainnet_Deneb_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__finality__finality_test.go b/testing/spectest/mainnet/deneb__finality__finality_test.go index 7350b40f33..283e062ff7 100644 --- a/testing/spectest/mainnet/deneb__finality__finality_test.go +++ b/testing/spectest/mainnet/deneb__finality__finality_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/finality" ) func TestMainnet_Deneb_Finality(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__fork_helper__upgrade_to_deneb_test.go b/testing/spectest/mainnet/deneb__fork_helper__upgrade_to_deneb_test.go index bfcca00179..d4485a5b82 100644 --- a/testing/spectest/mainnet/deneb__fork_helper__upgrade_to_deneb_test.go +++ b/testing/spectest/mainnet/deneb__fork_helper__upgrade_to_deneb_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/fork" ) func TestMainnet_UpgradeToDeneb(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__fork_transition__transition_test.go b/testing/spectest/mainnet/deneb__fork_transition__transition_test.go index bf411b88a5..7acc2e37d1 100644 --- a/testing/spectest/mainnet/deneb__fork_transition__transition_test.go +++ b/testing/spectest/mainnet/deneb__fork_transition__transition_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/fork" ) func TestMainnet_Deneb_Transition(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__forkchoice__forkchoice_test.go b/testing/spectest/mainnet/deneb__forkchoice__forkchoice_test.go index cb7c634785..b8a70331b5 100644 --- a/testing/spectest/mainnet/deneb__forkchoice__forkchoice_test.go +++ b/testing/spectest/mainnet/deneb__forkchoice__forkchoice_test.go @@ -3,8 +3,8 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice" ) func TestMainnet_Deneb_Forkchoice(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__light_client__single_merkle_proof_test.go b/testing/spectest/mainnet/deneb__light_client__single_merkle_proof_test.go index 365ad743b7..e14ded39a1 100644 --- a/testing/spectest/mainnet/deneb__light_client__single_merkle_proof_test.go +++ b/testing/spectest/mainnet/deneb__light_client__single_merkle_proof_test.go @@ -3,8 +3,8 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Deneb_LightClient_SingleMerkleProof(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__merkle_proof__merkle_proof_test.go b/testing/spectest/mainnet/deneb__merkle_proof__merkle_proof_test.go index 9c6a901c57..e3d21d2a60 100644 --- a/testing/spectest/mainnet/deneb__merkle_proof__merkle_proof_test.go +++ b/testing/spectest/mainnet/deneb__merkle_proof__merkle_proof_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/merkle_proof" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/merkle_proof" ) func TestMainnet_Deneb_MerkleProof(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__operations__attestation_test.go b/testing/spectest/mainnet/deneb__operations__attestation_test.go index 25e5f37a6f..fb20f34924 100644 --- a/testing/spectest/mainnet/deneb__operations__attestation_test.go +++ b/testing/spectest/mainnet/deneb__operations__attestation_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMainnet_Deneb_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__operations__attester_slashing_test.go b/testing/spectest/mainnet/deneb__operations__attester_slashing_test.go index 17bc78e474..fc640911d8 100644 --- a/testing/spectest/mainnet/deneb__operations__attester_slashing_test.go +++ b/testing/spectest/mainnet/deneb__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMainnet_Deneb_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__operations__block_header_test.go b/testing/spectest/mainnet/deneb__operations__block_header_test.go index ffcfd90769..c958e69b7b 100644 --- a/testing/spectest/mainnet/deneb__operations__block_header_test.go +++ b/testing/spectest/mainnet/deneb__operations__block_header_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMainnet_Deneb_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__operations__bls_to_execution_change_test.go b/testing/spectest/mainnet/deneb__operations__bls_to_execution_change_test.go index a35476a1b3..8bebf79793 100644 --- a/testing/spectest/mainnet/deneb__operations__bls_to_execution_change_test.go +++ b/testing/spectest/mainnet/deneb__operations__bls_to_execution_change_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMainnet_Deneb_Operations_BLSToExecutionChange(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__operations__deposit_test.go b/testing/spectest/mainnet/deneb__operations__deposit_test.go index 563d305e0d..d23f2b55c0 100644 --- a/testing/spectest/mainnet/deneb__operations__deposit_test.go +++ b/testing/spectest/mainnet/deneb__operations__deposit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMainnet_Deneb_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__operations__execution_payload_test.go b/testing/spectest/mainnet/deneb__operations__execution_payload_test.go index 57234a52f4..e084a1a519 100644 --- a/testing/spectest/mainnet/deneb__operations__execution_payload_test.go +++ b/testing/spectest/mainnet/deneb__operations__execution_payload_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMainnet_Deneb_Operations_PayloadExecution(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__operations__proposer_slashing_test.go b/testing/spectest/mainnet/deneb__operations__proposer_slashing_test.go index 6543d213e7..cbcd8ff23d 100644 --- a/testing/spectest/mainnet/deneb__operations__proposer_slashing_test.go +++ b/testing/spectest/mainnet/deneb__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMainnet_Deneb_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__operations__sync_committee_test.go b/testing/spectest/mainnet/deneb__operations__sync_committee_test.go index d5f5b9c967..d63b0477c3 100644 --- a/testing/spectest/mainnet/deneb__operations__sync_committee_test.go +++ b/testing/spectest/mainnet/deneb__operations__sync_committee_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMainnet_Deneb_Operations_SyncCommittee(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__operations__voluntary_exit_test.go b/testing/spectest/mainnet/deneb__operations__voluntary_exit_test.go index 2b1e927a44..d204a25a4e 100644 --- a/testing/spectest/mainnet/deneb__operations__voluntary_exit_test.go +++ b/testing/spectest/mainnet/deneb__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMainnet_Deneb_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__operations__withdrawals_test.go b/testing/spectest/mainnet/deneb__operations__withdrawals_test.go index 7f17bd1b0b..9f1c0b9a4e 100644 --- a/testing/spectest/mainnet/deneb__operations__withdrawals_test.go +++ b/testing/spectest/mainnet/deneb__operations__withdrawals_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMainnet_Deneb_Operations_Withdrawals(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__random__random_test.go b/testing/spectest/mainnet/deneb__random__random_test.go index e3bf2d6a41..970a20c767 100644 --- a/testing/spectest/mainnet/deneb__random__random_test.go +++ b/testing/spectest/mainnet/deneb__random__random_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/sanity" ) func TestMainnet_Deneb_Random(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__rewards__rewards_test.go b/testing/spectest/mainnet/deneb__rewards__rewards_test.go index 9cad2025a7..c271a17404 100644 --- a/testing/spectest/mainnet/deneb__rewards__rewards_test.go +++ b/testing/spectest/mainnet/deneb__rewards__rewards_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/rewards" ) func TestMainnet_Deneb_Rewards(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__sanity__blocks_test.go b/testing/spectest/mainnet/deneb__sanity__blocks_test.go index 409a06f419..d1fdf08621 100644 --- a/testing/spectest/mainnet/deneb__sanity__blocks_test.go +++ b/testing/spectest/mainnet/deneb__sanity__blocks_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/sanity" ) func TestMainnet_Deneb_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__sanity__slots_test.go b/testing/spectest/mainnet/deneb__sanity__slots_test.go index d8809a60aa..69d53b00fe 100644 --- a/testing/spectest/mainnet/deneb__sanity__slots_test.go +++ b/testing/spectest/mainnet/deneb__sanity__slots_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/sanity" ) func TestMainnet_Deneb_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/mainnet/deneb__ssz_static__ssz_static_test.go b/testing/spectest/mainnet/deneb__ssz_static__ssz_static_test.go index 7e753865ff..8f45aad3ba 100644 --- a/testing/spectest/mainnet/deneb__ssz_static__ssz_static_test.go +++ b/testing/spectest/mainnet/deneb__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/ssz_static" ) func TestMainnet_Deneb_SSZStatic(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__effective_balance_updates_test.go b/testing/spectest/mainnet/electra__epoch_processing__effective_balance_updates_test.go index c449423e41..d8457e9719 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_electra_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__eth1_data_reset_test.go b/testing/spectest/mainnet/electra__epoch_processing__eth1_data_reset_test.go index 76298f1d98..bbd617e9c8 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_Electra_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__historical_summaries_update_test.go b/testing/spectest/mainnet/electra__epoch_processing__historical_summaries_update_test.go index 7c0c5fa931..896e2227bc 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__historical_summaries_update_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__historical_summaries_update_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_Electra_EpochProcessing_HistoricalSummariesUpdate(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__inactivity_updates_test.go b/testing/spectest/mainnet/electra__epoch_processing__inactivity_updates_test.go index d0a5dcb83b..e4b2a07885 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__inactivity_updates_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__inactivity_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_Electra_EpochProcessing_InactivityUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__justification_and_finalization_test.go b/testing/spectest/mainnet/electra__epoch_processing__justification_and_finalization_test.go index b254055e8f..e92417647f 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_Electra_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__participation_flag_updates_test.go b/testing/spectest/mainnet/electra__epoch_processing__participation_flag_updates_test.go index 895227d613..b69802c9a2 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__participation_flag_updates_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__participation_flag_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_Electra_EpochProcessing_ParticipationFlag(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__pending_consolidations_test.go b/testing/spectest/mainnet/electra__epoch_processing__pending_consolidations_test.go index 9c37903eba..40bf3b19ed 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__pending_consolidations_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__pending_consolidations_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_Electra_EpochProcessing_PendingConsolidations(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__pending_deposits_updates_test.go b/testing/spectest/mainnet/electra__epoch_processing__pending_deposits_updates_test.go index 907e1c0cde..869b45e904 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__pending_deposits_updates_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__pending_deposits_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_Electra_EpochProcessing_PendingDeposits(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/mainnet/electra__epoch_processing__randao_mixes_reset_test.go index f4e627b80d..7602684b7d 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_Electra_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__registry_updates_test.go b/testing/spectest/mainnet/electra__epoch_processing__registry_updates_test.go index 6eecc9f7f5..ff3e790ca0 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__registry_updates_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_Electra_EpochProcessing_RegistryUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/mainnet/electra__epoch_processing__rewards_and_penalties_test.go index 1bd1368bd2..8115604411 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_Electra_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__slashings_reset_test.go b/testing/spectest/mainnet/electra__epoch_processing__slashings_reset_test.go index a29c86dd1f..27c7331422 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_Electra_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__epoch_processing__slashings_test.go b/testing/spectest/mainnet/electra__epoch_processing__slashings_test.go index 706f4f136a..44cabc3857 100644 --- a/testing/spectest/mainnet/electra__epoch_processing__slashings_test.go +++ b/testing/spectest/mainnet/electra__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMainnet_Electra_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__finality__finality_test.go b/testing/spectest/mainnet/electra__finality__finality_test.go index 638b7f7933..e4abee2b63 100644 --- a/testing/spectest/mainnet/electra__finality__finality_test.go +++ b/testing/spectest/mainnet/electra__finality__finality_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/finality" ) func TestMainnet_Electra_Finality(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__fork_helper__upgrade_to_electra_test.go b/testing/spectest/mainnet/electra__fork_helper__upgrade_to_electra_test.go index 79fea1a3d5..2559ef6904 100644 --- a/testing/spectest/mainnet/electra__fork_helper__upgrade_to_electra_test.go +++ b/testing/spectest/mainnet/electra__fork_helper__upgrade_to_electra_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/fork" ) func TestMainnet_UpgradeToElectra(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__fork_transition__transition_test.go b/testing/spectest/mainnet/electra__fork_transition__transition_test.go index 00a0fc86ae..1c104f0926 100644 --- a/testing/spectest/mainnet/electra__fork_transition__transition_test.go +++ b/testing/spectest/mainnet/electra__fork_transition__transition_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/fork" ) func TestMainnet_Electra_Transition(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__forkchoice__forkchoice_test.go b/testing/spectest/mainnet/electra__forkchoice__forkchoice_test.go index 7ce9594138..915f263954 100644 --- a/testing/spectest/mainnet/electra__forkchoice__forkchoice_test.go +++ b/testing/spectest/mainnet/electra__forkchoice__forkchoice_test.go @@ -3,8 +3,8 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice" ) func TestMainnet_Electra_Forkchoice(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__light_client__single_merkle_proof_test.go b/testing/spectest/mainnet/electra__light_client__single_merkle_proof_test.go index 18ca1761e5..ff3e0d4072 100644 --- a/testing/spectest/mainnet/electra__light_client__single_merkle_proof_test.go +++ b/testing/spectest/mainnet/electra__light_client__single_merkle_proof_test.go @@ -3,8 +3,8 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Electra_LightClient_SingleMerkleProof(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__merkle_proof__merkle_proof_test.go b/testing/spectest/mainnet/electra__merkle_proof__merkle_proof_test.go index 08108b6846..36eeaeb2e7 100644 --- a/testing/spectest/mainnet/electra__merkle_proof__merkle_proof_test.go +++ b/testing/spectest/mainnet/electra__merkle_proof__merkle_proof_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/merkle_proof" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/merkle_proof" ) func TestMainnet_Electra_MerkleProof(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__attestation_test.go b/testing/spectest/mainnet/electra__operations__attestation_test.go index 895a54be7c..79f8fd3a43 100644 --- a/testing/spectest/mainnet/electra__operations__attestation_test.go +++ b/testing/spectest/mainnet/electra__operations__attestation_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__attester_slashing_test.go b/testing/spectest/mainnet/electra__operations__attester_slashing_test.go index e00413ec6e..a287320554 100644 --- a/testing/spectest/mainnet/electra__operations__attester_slashing_test.go +++ b/testing/spectest/mainnet/electra__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__block_header_test.go b/testing/spectest/mainnet/electra__operations__block_header_test.go index 11d62ad954..73950d60c6 100644 --- a/testing/spectest/mainnet/electra__operations__block_header_test.go +++ b/testing/spectest/mainnet/electra__operations__block_header_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__bls_to_execution_change_test.go b/testing/spectest/mainnet/electra__operations__bls_to_execution_change_test.go index f3aa0c1cd7..65803e00dd 100644 --- a/testing/spectest/mainnet/electra__operations__bls_to_execution_change_test.go +++ b/testing/spectest/mainnet/electra__operations__bls_to_execution_change_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_BLSToExecutionChange(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__consolidation_test.go b/testing/spectest/mainnet/electra__operations__consolidation_test.go index 25d47a9825..201e1f5bc1 100644 --- a/testing/spectest/mainnet/electra__operations__consolidation_test.go +++ b/testing/spectest/mainnet/electra__operations__consolidation_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_Consolidation(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__deposit_requests_test.go b/testing/spectest/mainnet/electra__operations__deposit_requests_test.go index b97524d510..f9f59c1620 100644 --- a/testing/spectest/mainnet/electra__operations__deposit_requests_test.go +++ b/testing/spectest/mainnet/electra__operations__deposit_requests_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_DepositRequests(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__deposit_test.go b/testing/spectest/mainnet/electra__operations__deposit_test.go index dc06c5b685..f2701061c6 100644 --- a/testing/spectest/mainnet/electra__operations__deposit_test.go +++ b/testing/spectest/mainnet/electra__operations__deposit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__execution_layer_withdrawals_test.go b/testing/spectest/mainnet/electra__operations__execution_layer_withdrawals_test.go index 1ec0fb6a7f..9fd3cc60ad 100644 --- a/testing/spectest/mainnet/electra__operations__execution_layer_withdrawals_test.go +++ b/testing/spectest/mainnet/electra__operations__execution_layer_withdrawals_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_WithdrawalRequest(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__execution_payload_test.go b/testing/spectest/mainnet/electra__operations__execution_payload_test.go index a5df3129c5..9926b74488 100644 --- a/testing/spectest/mainnet/electra__operations__execution_payload_test.go +++ b/testing/spectest/mainnet/electra__operations__execution_payload_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_PayloadExecution(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__proposer_slashing_test.go b/testing/spectest/mainnet/electra__operations__proposer_slashing_test.go index 3054c692a3..6f2c25cb44 100644 --- a/testing/spectest/mainnet/electra__operations__proposer_slashing_test.go +++ b/testing/spectest/mainnet/electra__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__sync_committee_test.go b/testing/spectest/mainnet/electra__operations__sync_committee_test.go index d29612f0e2..32ab6a3a5d 100644 --- a/testing/spectest/mainnet/electra__operations__sync_committee_test.go +++ b/testing/spectest/mainnet/electra__operations__sync_committee_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_SyncCommittee(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__voluntary_exit_test.go b/testing/spectest/mainnet/electra__operations__voluntary_exit_test.go index 5c692cbdcf..cf9db72185 100644 --- a/testing/spectest/mainnet/electra__operations__voluntary_exit_test.go +++ b/testing/spectest/mainnet/electra__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__operations__withdrawals_test.go b/testing/spectest/mainnet/electra__operations__withdrawals_test.go index 59a24e2ab2..852f242f8a 100644 --- a/testing/spectest/mainnet/electra__operations__withdrawals_test.go +++ b/testing/spectest/mainnet/electra__operations__withdrawals_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_Withdrawals(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__random__random_test.go b/testing/spectest/mainnet/electra__random__random_test.go index cf69d5cc1e..a0722ac201 100644 --- a/testing/spectest/mainnet/electra__random__random_test.go +++ b/testing/spectest/mainnet/electra__random__random_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/sanity" ) func TestMainnet_Electra_Random(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__rewards__rewards_test.go b/testing/spectest/mainnet/electra__rewards__rewards_test.go index 7267ba4563..8826c4b045 100644 --- a/testing/spectest/mainnet/electra__rewards__rewards_test.go +++ b/testing/spectest/mainnet/electra__rewards__rewards_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/rewards" ) func TestMainnet_Electra_Rewards(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__sanity__blocks_test.go b/testing/spectest/mainnet/electra__sanity__blocks_test.go index abb6f047ff..c848754b6d 100644 --- a/testing/spectest/mainnet/electra__sanity__blocks_test.go +++ b/testing/spectest/mainnet/electra__sanity__blocks_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/sanity" ) func TestMainnet_Electra_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__sanity__slots_test.go b/testing/spectest/mainnet/electra__sanity__slots_test.go index 50e845ee29..43a25dd3c2 100644 --- a/testing/spectest/mainnet/electra__sanity__slots_test.go +++ b/testing/spectest/mainnet/electra__sanity__slots_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/sanity" ) func TestMainnet_Electra_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/mainnet/electra__ssz_static__ssz_static_test.go b/testing/spectest/mainnet/electra__ssz_static__ssz_static_test.go index 11b4b904b9..61d65f784b 100644 --- a/testing/spectest/mainnet/electra__ssz_static__ssz_static_test.go +++ b/testing/spectest/mainnet/electra__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/ssz_static" ) func TestMainnet_Electra_SSZStatic(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__effective_balance_updates_test.go b/testing/spectest/mainnet/fulu__epoch_processing__effective_balance_updates_test.go index 0abb667c70..0500d40dbc 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_fulu_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__eth1_data_reset_test.go b/testing/spectest/mainnet/fulu__epoch_processing__eth1_data_reset_test.go index 391169f029..2f547acf53 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_Fulu_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__historical_summaries_update_test.go b/testing/spectest/mainnet/fulu__epoch_processing__historical_summaries_update_test.go index 1b6bf8f081..567a1be5d3 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__historical_summaries_update_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__historical_summaries_update_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_Fulu_EpochProcessing_HistoricalSummariesUpdate(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__inactivity_updates_test.go b/testing/spectest/mainnet/fulu__epoch_processing__inactivity_updates_test.go index f2bd5d419b..621000e8c3 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__inactivity_updates_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__inactivity_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_Fulu_EpochProcessing_InactivityUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__justification_and_finalization_test.go b/testing/spectest/mainnet/fulu__epoch_processing__justification_and_finalization_test.go index a277be899f..1bdc0b291e 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_Fulu_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__participation_flag_updates_test.go b/testing/spectest/mainnet/fulu__epoch_processing__participation_flag_updates_test.go index 4d93f23f59..6a109e2c6c 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__participation_flag_updates_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__participation_flag_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_Fulu_EpochProcessing_ParticipationFlag(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__pending_consolidations_test.go b/testing/spectest/mainnet/fulu__epoch_processing__pending_consolidations_test.go index 1e774642ef..9510c2463c 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__pending_consolidations_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__pending_consolidations_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_Fulu_EpochProcessing_PendingConsolidations(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__pending_deposits_updates_test.go b/testing/spectest/mainnet/fulu__epoch_processing__pending_deposits_updates_test.go index dfc4123859..e1386d23b2 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__pending_deposits_updates_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__pending_deposits_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_Fulu_EpochProcessing_PendingDeposits(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__proposer_lookahead_test.go b/testing/spectest/mainnet/fulu__epoch_processing__proposer_lookahead_test.go index 3ddb6a89a6..7b77cabc9d 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__proposer_lookahead_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__proposer_lookahead_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_fulu_EpochProcessing_ProposerLookahead(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/mainnet/fulu__epoch_processing__randao_mixes_reset_test.go index b8c3c53c44..1ade5dbb3a 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_Fulu_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__registry_updates_test.go b/testing/spectest/mainnet/fulu__epoch_processing__registry_updates_test.go index 91ae44cabe..d684e857d6 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__registry_updates_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_Fulu_EpochProcessing_RegistryUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/mainnet/fulu__epoch_processing__rewards_and_penalties_test.go index 909a9a5247..d41404603e 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_Fulu_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__slashings_reset_test.go b/testing/spectest/mainnet/fulu__epoch_processing__slashings_reset_test.go index 20d830ad19..c0d71e6b7f 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_Fulu_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__epoch_processing__slashings_test.go b/testing/spectest/mainnet/fulu__epoch_processing__slashings_test.go index aff33e3d02..9a01933d14 100644 --- a/testing/spectest/mainnet/fulu__epoch_processing__slashings_test.go +++ b/testing/spectest/mainnet/fulu__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMainnet_Fulu_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__finality__finality_test.go b/testing/spectest/mainnet/fulu__finality__finality_test.go index 27df315db3..aef3c0d027 100644 --- a/testing/spectest/mainnet/fulu__finality__finality_test.go +++ b/testing/spectest/mainnet/fulu__finality__finality_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/finality" ) func TestMainnet_Fulu_Finality(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__fork__upgrade_to_fulu_test.go b/testing/spectest/mainnet/fulu__fork__upgrade_to_fulu_test.go index 1378ba768f..d1ef8bd388 100644 --- a/testing/spectest/mainnet/fulu__fork__upgrade_to_fulu_test.go +++ b/testing/spectest/mainnet/fulu__fork__upgrade_to_fulu_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/fork" ) func TestMainnet_UpgradeToFulu(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__fork_transition__transition_test.go b/testing/spectest/mainnet/fulu__fork_transition__transition_test.go index 31a4d38dd0..ee52b938d2 100644 --- a/testing/spectest/mainnet/fulu__fork_transition__transition_test.go +++ b/testing/spectest/mainnet/fulu__fork_transition__transition_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/fork" ) func TestMainnet_Fulu_Transition(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__forkchoice__forkchoice_test.go b/testing/spectest/mainnet/fulu__forkchoice__forkchoice_test.go index 143240e465..446193bc48 100644 --- a/testing/spectest/mainnet/fulu__forkchoice__forkchoice_test.go +++ b/testing/spectest/mainnet/fulu__forkchoice__forkchoice_test.go @@ -3,8 +3,8 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice" ) func TestMainnet_Fulu_Forkchoice(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__light_client__single_merkle_proof_test.go b/testing/spectest/mainnet/fulu__light_client__single_merkle_proof_test.go index e073c02b42..aaab983205 100644 --- a/testing/spectest/mainnet/fulu__light_client__single_merkle_proof_test.go +++ b/testing/spectest/mainnet/fulu__light_client__single_merkle_proof_test.go @@ -3,8 +3,8 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Fulu_LightClient_SingleMerkleProof(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__merkle_proof__merkle_proof_test.go b/testing/spectest/mainnet/fulu__merkle_proof__merkle_proof_test.go index f6596fa3a4..1e4558a2bd 100644 --- a/testing/spectest/mainnet/fulu__merkle_proof__merkle_proof_test.go +++ b/testing/spectest/mainnet/fulu__merkle_proof__merkle_proof_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/merkle_proof" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/merkle_proof" ) func TestMainnet_Fulu_MerkleProof(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__networking__custody_groups_test.go b/testing/spectest/mainnet/fulu__networking__custody_groups_test.go index 0116edffec..86f2ca3ecc 100644 --- a/testing/spectest/mainnet/fulu__networking__custody_groups_test.go +++ b/testing/spectest/mainnet/fulu__networking__custody_groups_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/networking" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/networking" ) func TestMainnet_Fulu_Networking_CustodyGroups(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__attestation_test.go b/testing/spectest/mainnet/fulu__operations__attestation_test.go index ec09eeb7e1..a3d75c579c 100644 --- a/testing/spectest/mainnet/fulu__operations__attestation_test.go +++ b/testing/spectest/mainnet/fulu__operations__attestation_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__attester_slashing_test.go b/testing/spectest/mainnet/fulu__operations__attester_slashing_test.go index c933f3c482..a70c29ffa5 100644 --- a/testing/spectest/mainnet/fulu__operations__attester_slashing_test.go +++ b/testing/spectest/mainnet/fulu__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__block_header_test.go b/testing/spectest/mainnet/fulu__operations__block_header_test.go index a5e0a08b4f..3a08ee0ead 100644 --- a/testing/spectest/mainnet/fulu__operations__block_header_test.go +++ b/testing/spectest/mainnet/fulu__operations__block_header_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__bls_to_execution_change_test.go b/testing/spectest/mainnet/fulu__operations__bls_to_execution_change_test.go index d549c2cbe2..20fe3f6a65 100644 --- a/testing/spectest/mainnet/fulu__operations__bls_to_execution_change_test.go +++ b/testing/spectest/mainnet/fulu__operations__bls_to_execution_change_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_BLSToExecutionChange(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__consolidation_test.go b/testing/spectest/mainnet/fulu__operations__consolidation_test.go index b957d5cf90..d5e599ffc1 100644 --- a/testing/spectest/mainnet/fulu__operations__consolidation_test.go +++ b/testing/spectest/mainnet/fulu__operations__consolidation_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_Consolidation(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__deposit_requests_test.go b/testing/spectest/mainnet/fulu__operations__deposit_requests_test.go index c8c7de74b5..32a88fcec1 100644 --- a/testing/spectest/mainnet/fulu__operations__deposit_requests_test.go +++ b/testing/spectest/mainnet/fulu__operations__deposit_requests_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_DepositRequests(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__deposit_test.go b/testing/spectest/mainnet/fulu__operations__deposit_test.go index 26ec66f2a7..46b6cdf840 100644 --- a/testing/spectest/mainnet/fulu__operations__deposit_test.go +++ b/testing/spectest/mainnet/fulu__operations__deposit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__execution_layer_withdrawals_test.go b/testing/spectest/mainnet/fulu__operations__execution_layer_withdrawals_test.go index 712895cd82..94009ef3e8 100644 --- a/testing/spectest/mainnet/fulu__operations__execution_layer_withdrawals_test.go +++ b/testing/spectest/mainnet/fulu__operations__execution_layer_withdrawals_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_WithdrawalRequest(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__execution_payload_test.go b/testing/spectest/mainnet/fulu__operations__execution_payload_test.go index 84776eaffe..67be879741 100644 --- a/testing/spectest/mainnet/fulu__operations__execution_payload_test.go +++ b/testing/spectest/mainnet/fulu__operations__execution_payload_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_PayloadExecution(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__proposer_slashing_test.go b/testing/spectest/mainnet/fulu__operations__proposer_slashing_test.go index b31d245b7b..5adcd7d28b 100644 --- a/testing/spectest/mainnet/fulu__operations__proposer_slashing_test.go +++ b/testing/spectest/mainnet/fulu__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__sync_committee_test.go b/testing/spectest/mainnet/fulu__operations__sync_committee_test.go index 03061e92d2..7ffd060635 100644 --- a/testing/spectest/mainnet/fulu__operations__sync_committee_test.go +++ b/testing/spectest/mainnet/fulu__operations__sync_committee_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_SyncCommittee(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__voluntary_exit_test.go b/testing/spectest/mainnet/fulu__operations__voluntary_exit_test.go index afd0aa282d..94a0fc11aa 100644 --- a/testing/spectest/mainnet/fulu__operations__voluntary_exit_test.go +++ b/testing/spectest/mainnet/fulu__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__operations__withdrawals_test.go b/testing/spectest/mainnet/fulu__operations__withdrawals_test.go index a72cbafa69..bb78df79f1 100644 --- a/testing/spectest/mainnet/fulu__operations__withdrawals_test.go +++ b/testing/spectest/mainnet/fulu__operations__withdrawals_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_Withdrawals(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__random__random_test.go b/testing/spectest/mainnet/fulu__random__random_test.go index e31ed9a46e..8095795090 100644 --- a/testing/spectest/mainnet/fulu__random__random_test.go +++ b/testing/spectest/mainnet/fulu__random__random_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/sanity" ) func TestMainnet_Fulu_Random(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__rewards__rewards_test.go b/testing/spectest/mainnet/fulu__rewards__rewards_test.go index 131179191e..887f6e5328 100644 --- a/testing/spectest/mainnet/fulu__rewards__rewards_test.go +++ b/testing/spectest/mainnet/fulu__rewards__rewards_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/rewards" ) func TestMainnet_Fulu_Rewards(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__sanity__blocks_test.go b/testing/spectest/mainnet/fulu__sanity__blocks_test.go index 191cb37273..1d3a7ba5c8 100644 --- a/testing/spectest/mainnet/fulu__sanity__blocks_test.go +++ b/testing/spectest/mainnet/fulu__sanity__blocks_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/sanity" ) func TestMainnet_Fulu_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__sanity__slots_test.go b/testing/spectest/mainnet/fulu__sanity__slots_test.go index 4d52c0ed76..db571de59e 100644 --- a/testing/spectest/mainnet/fulu__sanity__slots_test.go +++ b/testing/spectest/mainnet/fulu__sanity__slots_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/sanity" ) func TestMainnet_Fulu_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/mainnet/fulu__ssz_static__ssz_static_test.go b/testing/spectest/mainnet/fulu__ssz_static__ssz_static_test.go index 473e60094e..ce1ee67689 100644 --- a/testing/spectest/mainnet/fulu__ssz_static__ssz_static_test.go +++ b/testing/spectest/mainnet/fulu__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/ssz_static" ) func TestMainnet_Fulu_SSZStatic(t *testing.T) { diff --git a/testing/spectest/mainnet/gloas__ssz_static__ssz_static_test.go b/testing/spectest/mainnet/gloas__ssz_static__ssz_static_test.go index 653e9d340e..3d63dce958 100644 --- a/testing/spectest/mainnet/gloas__ssz_static__ssz_static_test.go +++ b/testing/spectest/mainnet/gloas__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/gloas/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/gloas/ssz_static" ) func TestMainnet_Gloas_SSZStatic(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__epoch_processing__effective_balance_updates_test.go b/testing/spectest/mainnet/phase0__epoch_processing__effective_balance_updates_test.go index f41a3f8927..18e03ca73c 100644 --- a/testing/spectest/mainnet/phase0__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/mainnet/phase0__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMainnet_Phase0_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__epoch_processing__epoch_processing_test.go b/testing/spectest/mainnet/phase0__epoch_processing__epoch_processing_test.go index df0103e8e1..cc4ef4dbd7 100644 --- a/testing/spectest/mainnet/phase0__epoch_processing__epoch_processing_test.go +++ b/testing/spectest/mainnet/phase0__epoch_processing__epoch_processing_test.go @@ -4,7 +4,7 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" ) func TestMain(m *testing.M) { diff --git a/testing/spectest/mainnet/phase0__epoch_processing__eth1_data_reset_test.go b/testing/spectest/mainnet/phase0__epoch_processing__eth1_data_reset_test.go index 0dddf4f449..79b51353a7 100644 --- a/testing/spectest/mainnet/phase0__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/mainnet/phase0__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMainnet_Phase0_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__epoch_processing__historical_roots_update_test.go b/testing/spectest/mainnet/phase0__epoch_processing__historical_roots_update_test.go index 0f48927225..af3ac5a57d 100644 --- a/testing/spectest/mainnet/phase0__epoch_processing__historical_roots_update_test.go +++ b/testing/spectest/mainnet/phase0__epoch_processing__historical_roots_update_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMainnet_Phase0_EpochProcessing_HistoricalRootsUpdate(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__epoch_processing__justification_and_finalization_test.go b/testing/spectest/mainnet/phase0__epoch_processing__justification_and_finalization_test.go index f973afb8a6..07ffbdc955 100644 --- a/testing/spectest/mainnet/phase0__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/mainnet/phase0__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMainnet_Phase0_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__epoch_processing__participation_record_updates_test.go b/testing/spectest/mainnet/phase0__epoch_processing__participation_record_updates_test.go index 1d51e3474f..88fee16320 100644 --- a/testing/spectest/mainnet/phase0__epoch_processing__participation_record_updates_test.go +++ b/testing/spectest/mainnet/phase0__epoch_processing__participation_record_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMainnet_Phase0_EpochProcessing_ParticipationRecordUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/mainnet/phase0__epoch_processing__randao_mixes_reset_test.go index 010145d932..b49b081024 100644 --- a/testing/spectest/mainnet/phase0__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/mainnet/phase0__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMainnet_Phase0_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__epoch_processing__registry_updates_test.go b/testing/spectest/mainnet/phase0__epoch_processing__registry_updates_test.go index 3bf29c0bad..bb84ff97dd 100644 --- a/testing/spectest/mainnet/phase0__epoch_processing__registry_updates_test.go +++ b/testing/spectest/mainnet/phase0__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMainnet_Phase0_EpochProcessing_ResetRegistryUpdates(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/mainnet/phase0__epoch_processing__rewards_and_penalties_test.go index bd8ad7d45e..3d3ca1aa9f 100644 --- a/testing/spectest/mainnet/phase0__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/mainnet/phase0__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMainnet_Phase0_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__epoch_processing__slashings_reset_test.go b/testing/spectest/mainnet/phase0__epoch_processing__slashings_reset_test.go index 02786667fd..c1f04e9184 100644 --- a/testing/spectest/mainnet/phase0__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/mainnet/phase0__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMainnet_Phase0_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__epoch_processing__slashings_test.go b/testing/spectest/mainnet/phase0__epoch_processing__slashings_test.go index ce8c2bbc6d..8fff3ab5cd 100644 --- a/testing/spectest/mainnet/phase0__epoch_processing__slashings_test.go +++ b/testing/spectest/mainnet/phase0__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMainnet_Phase0_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__finality__finality_test.go b/testing/spectest/mainnet/phase0__finality__finality_test.go index 8d712269ab..255db62812 100644 --- a/testing/spectest/mainnet/phase0__finality__finality_test.go +++ b/testing/spectest/mainnet/phase0__finality__finality_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/finality" ) func TestMainnet_Phase0_Finality(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__operations__attestation_test.go b/testing/spectest/mainnet/phase0__operations__attestation_test.go index b1c06bde9f..43d3a6fe7a 100644 --- a/testing/spectest/mainnet/phase0__operations__attestation_test.go +++ b/testing/spectest/mainnet/phase0__operations__attestation_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations" ) func TestMainnet_Phase0_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__operations__attester_slashing_test.go b/testing/spectest/mainnet/phase0__operations__attester_slashing_test.go index e60d6af116..9a37875fda 100644 --- a/testing/spectest/mainnet/phase0__operations__attester_slashing_test.go +++ b/testing/spectest/mainnet/phase0__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations" ) func TestMainnet_Phase0_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__operations__block_header_test.go b/testing/spectest/mainnet/phase0__operations__block_header_test.go index ccffa92a9e..9a004bce60 100644 --- a/testing/spectest/mainnet/phase0__operations__block_header_test.go +++ b/testing/spectest/mainnet/phase0__operations__block_header_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations" ) func TestMainnet_Phase0_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__operations__deposit_test.go b/testing/spectest/mainnet/phase0__operations__deposit_test.go index 70898e3e2d..77cb418b94 100644 --- a/testing/spectest/mainnet/phase0__operations__deposit_test.go +++ b/testing/spectest/mainnet/phase0__operations__deposit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations" ) func TestMainnet_Phase0_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__operations__proposer_slashing_test.go b/testing/spectest/mainnet/phase0__operations__proposer_slashing_test.go index 30fc795629..de6482ee3b 100644 --- a/testing/spectest/mainnet/phase0__operations__proposer_slashing_test.go +++ b/testing/spectest/mainnet/phase0__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations" ) func TestMainnet_Phase0_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__operations__voluntary_exit_test.go b/testing/spectest/mainnet/phase0__operations__voluntary_exit_test.go index f37295e81d..003b2d3724 100644 --- a/testing/spectest/mainnet/phase0__operations__voluntary_exit_test.go +++ b/testing/spectest/mainnet/phase0__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations" ) func TestMainnet_Phase0_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__random__random_test.go b/testing/spectest/mainnet/phase0__random__random_test.go index fe631bd03f..30ad28571a 100644 --- a/testing/spectest/mainnet/phase0__random__random_test.go +++ b/testing/spectest/mainnet/phase0__random__random_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/sanity" ) func TestMainnet_Phase0_Random(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__rewards__rewards_test.go b/testing/spectest/mainnet/phase0__rewards__rewards_test.go index 21d99009a5..d76b04e298 100644 --- a/testing/spectest/mainnet/phase0__rewards__rewards_test.go +++ b/testing/spectest/mainnet/phase0__rewards__rewards_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/rewards" ) func TestMainnet_Phase0_Rewards(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__sanity__blocks_test.go b/testing/spectest/mainnet/phase0__sanity__blocks_test.go index c7344cc109..9ce4bd2ad5 100644 --- a/testing/spectest/mainnet/phase0__sanity__blocks_test.go +++ b/testing/spectest/mainnet/phase0__sanity__blocks_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/sanity" ) func TestMainnet_Phase0_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__sanity__slots_test.go b/testing/spectest/mainnet/phase0__sanity__slots_test.go index 8ac09e6f97..2ff619d082 100644 --- a/testing/spectest/mainnet/phase0__sanity__slots_test.go +++ b/testing/spectest/mainnet/phase0__sanity__slots_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/sanity" ) func TestMainnet_Phase0_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/mainnet/phase0__ssz_static__ssz_static_test.go b/testing/spectest/mainnet/phase0__ssz_static__ssz_static_test.go index 651bed824d..34a25f36e6 100644 --- a/testing/spectest/mainnet/phase0__ssz_static__ssz_static_test.go +++ b/testing/spectest/mainnet/phase0__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package mainnet import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/ssz_static" ) func TestMainnet_Phase0_SSZStatic(t *testing.T) { diff --git a/testing/spectest/minimal/altair__epoch_processing__effective_balance_updates_test.go b/testing/spectest/minimal/altair__epoch_processing__effective_balance_updates_test.go index ae4fce99be..b14dacb3bb 100644 --- a/testing/spectest/minimal/altair__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/minimal/altair__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMinimal_Altair_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/altair__epoch_processing__eth1_data_reset_test.go b/testing/spectest/minimal/altair__epoch_processing__eth1_data_reset_test.go index 1202b906bc..947cd43239 100644 --- a/testing/spectest/minimal/altair__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/minimal/altair__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMinimal_Altair_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/minimal/altair__epoch_processing__historical_roots_update_test.go b/testing/spectest/minimal/altair__epoch_processing__historical_roots_update_test.go index bf3498da01..26306d8112 100644 --- a/testing/spectest/minimal/altair__epoch_processing__historical_roots_update_test.go +++ b/testing/spectest/minimal/altair__epoch_processing__historical_roots_update_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMinimal_Altair_EpochProcessing_HistoricalRootsUpdate(t *testing.T) { diff --git a/testing/spectest/minimal/altair__epoch_processing__inactivity_updates_test.go b/testing/spectest/minimal/altair__epoch_processing__inactivity_updates_test.go index 465cfef719..8caca7daae 100644 --- a/testing/spectest/minimal/altair__epoch_processing__inactivity_updates_test.go +++ b/testing/spectest/minimal/altair__epoch_processing__inactivity_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMinimal_Altair_EpochProcessing_InactivityUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/altair__epoch_processing__justification_and_finalization_test.go b/testing/spectest/minimal/altair__epoch_processing__justification_and_finalization_test.go index b388691fa7..b88565ab49 100644 --- a/testing/spectest/minimal/altair__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/minimal/altair__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMinimal_Altair_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/minimal/altair__epoch_processing__participation_flag_updates_test.go b/testing/spectest/minimal/altair__epoch_processing__participation_flag_updates_test.go index 59573063ee..74e16fb75c 100644 --- a/testing/spectest/minimal/altair__epoch_processing__participation_flag_updates_test.go +++ b/testing/spectest/minimal/altair__epoch_processing__participation_flag_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMinimal_Altair_EpochProcessing_ParticipationFlag(t *testing.T) { diff --git a/testing/spectest/minimal/altair__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/minimal/altair__epoch_processing__randao_mixes_reset_test.go index 18c3f9bec3..6a78764486 100644 --- a/testing/spectest/minimal/altair__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/minimal/altair__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMinimal_Altair_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/minimal/altair__epoch_processing__registry_updates_test.go b/testing/spectest/minimal/altair__epoch_processing__registry_updates_test.go index 262deee996..e0a3f7d9d5 100644 --- a/testing/spectest/minimal/altair__epoch_processing__registry_updates_test.go +++ b/testing/spectest/minimal/altair__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMinimal_Altair_EpochProcessing_ResetRegistryUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/altair__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/minimal/altair__epoch_processing__rewards_and_penalties_test.go index 6d72fd164b..788a654aa5 100644 --- a/testing/spectest/minimal/altair__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/minimal/altair__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMinimal_Altair_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/minimal/altair__epoch_processing__slashings_reset_test.go b/testing/spectest/minimal/altair__epoch_processing__slashings_reset_test.go index 74a8b7e7ee..dcb7a88fde 100644 --- a/testing/spectest/minimal/altair__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/minimal/altair__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMinimal_Altair_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/minimal/altair__epoch_processing__slashings_test.go b/testing/spectest/minimal/altair__epoch_processing__slashings_test.go index 94d82f8a1a..1b58c2909f 100644 --- a/testing/spectest/minimal/altair__epoch_processing__slashings_test.go +++ b/testing/spectest/minimal/altair__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing" ) func TestMinimal_Altair_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/minimal/altair__finality__finality_test.go b/testing/spectest/minimal/altair__finality__finality_test.go index 7be4b3f71c..96d913b2e5 100644 --- a/testing/spectest/minimal/altair__finality__finality_test.go +++ b/testing/spectest/minimal/altair__finality__finality_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/finality" ) func TestMinimal_Altair_Finality(t *testing.T) { diff --git a/testing/spectest/minimal/altair__fork__upgrade_to_altair_test.go b/testing/spectest/minimal/altair__fork__upgrade_to_altair_test.go index 998b2fbce2..7e978de066 100644 --- a/testing/spectest/minimal/altair__fork__upgrade_to_altair_test.go +++ b/testing/spectest/minimal/altair__fork__upgrade_to_altair_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/fork" ) func TestMinimal_Altair_UpgradeToAltair(t *testing.T) { diff --git a/testing/spectest/minimal/altair__fork_transition__transition_test.go b/testing/spectest/minimal/altair__fork_transition__transition_test.go index 6e9f206979..f77e14b480 100644 --- a/testing/spectest/minimal/altair__fork_transition__transition_test.go +++ b/testing/spectest/minimal/altair__fork_transition__transition_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/fork" ) func TestMinimal_Altair_Transition(t *testing.T) { diff --git a/testing/spectest/minimal/altair__forkchoice__forkchoice_test.go b/testing/spectest/minimal/altair__forkchoice__forkchoice_test.go index c3af51fdf2..2e6b6b9c54 100644 --- a/testing/spectest/minimal/altair__forkchoice__forkchoice_test.go +++ b/testing/spectest/minimal/altair__forkchoice__forkchoice_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice" ) func TestMinimal_Altair_Forkchoice(t *testing.T) { diff --git a/testing/spectest/minimal/altair__light_client__single_merkle_proof_test.go b/testing/spectest/minimal/altair__light_client__single_merkle_proof_test.go index 386ae8bb49..3996c321d3 100644 --- a/testing/spectest/minimal/altair__light_client__single_merkle_proof_test.go +++ b/testing/spectest/minimal/altair__light_client__single_merkle_proof_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Altair_LightClient_SingleMerkleProof(t *testing.T) { diff --git a/testing/spectest/minimal/altair__light_client__update_ranking_test.go b/testing/spectest/minimal/altair__light_client__update_ranking_test.go index c70afd5505..0ac087ace0 100644 --- a/testing/spectest/minimal/altair__light_client__update_ranking_test.go +++ b/testing/spectest/minimal/altair__light_client__update_ranking_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Altair_LightClient_UpdateRanking(t *testing.T) { diff --git a/testing/spectest/minimal/altair__operations__attestation_test.go b/testing/spectest/minimal/altair__operations__attestation_test.go index 113f68b262..10cbc3be3f 100644 --- a/testing/spectest/minimal/altair__operations__attestation_test.go +++ b/testing/spectest/minimal/altair__operations__attestation_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMinimal_Altair_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/minimal/altair__operations__attester_slashing_test.go b/testing/spectest/minimal/altair__operations__attester_slashing_test.go index 59378a7361..0f28e1fec7 100644 --- a/testing/spectest/minimal/altair__operations__attester_slashing_test.go +++ b/testing/spectest/minimal/altair__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMinimal_Altair_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/altair__operations__block_header_test.go b/testing/spectest/minimal/altair__operations__block_header_test.go index d9b4827d6c..207f563482 100644 --- a/testing/spectest/minimal/altair__operations__block_header_test.go +++ b/testing/spectest/minimal/altair__operations__block_header_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMinimal_Altair_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/minimal/altair__operations__deposit_test.go b/testing/spectest/minimal/altair__operations__deposit_test.go index 42525b903c..49619f56a7 100644 --- a/testing/spectest/minimal/altair__operations__deposit_test.go +++ b/testing/spectest/minimal/altair__operations__deposit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMinimal_Altair_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/minimal/altair__operations__proposer_slashing_test.go b/testing/spectest/minimal/altair__operations__proposer_slashing_test.go index e7808d7d6f..7e615a6c7b 100644 --- a/testing/spectest/minimal/altair__operations__proposer_slashing_test.go +++ b/testing/spectest/minimal/altair__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMinimal_Altair_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/altair__operations__sync_committee_test.go b/testing/spectest/minimal/altair__operations__sync_committee_test.go index 7a865b2108..a3a64fe380 100644 --- a/testing/spectest/minimal/altair__operations__sync_committee_test.go +++ b/testing/spectest/minimal/altair__operations__sync_committee_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMinimal_Altair_Operations_SyncCommittee(t *testing.T) { diff --git a/testing/spectest/minimal/altair__operations__voluntary_exit_test.go b/testing/spectest/minimal/altair__operations__voluntary_exit_test.go index 99f8837b60..8458377b9e 100644 --- a/testing/spectest/minimal/altair__operations__voluntary_exit_test.go +++ b/testing/spectest/minimal/altair__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations" ) func TestMinimal_Altair_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/minimal/altair__random__random_test.go b/testing/spectest/minimal/altair__random__random_test.go index 7f6040201f..4b27aa2257 100644 --- a/testing/spectest/minimal/altair__random__random_test.go +++ b/testing/spectest/minimal/altair__random__random_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/sanity" ) func TestMinimal_Altair_Random(t *testing.T) { diff --git a/testing/spectest/minimal/altair__rewards__rewards_test.go b/testing/spectest/minimal/altair__rewards__rewards_test.go index 57edff1bdc..ea7fb70dd9 100644 --- a/testing/spectest/minimal/altair__rewards__rewards_test.go +++ b/testing/spectest/minimal/altair__rewards__rewards_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/rewards" ) func TestMinimal_Altair_Rewards(t *testing.T) { diff --git a/testing/spectest/minimal/altair__sanity__blocks_test.go b/testing/spectest/minimal/altair__sanity__blocks_test.go index 1e02d2e86a..b0c59b42aa 100644 --- a/testing/spectest/minimal/altair__sanity__blocks_test.go +++ b/testing/spectest/minimal/altair__sanity__blocks_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/sanity" ) func TestMinimal_Altair_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/minimal/altair__sanity__slots_test.go b/testing/spectest/minimal/altair__sanity__slots_test.go index b2f666cc12..edce8d2b47 100644 --- a/testing/spectest/minimal/altair__sanity__slots_test.go +++ b/testing/spectest/minimal/altair__sanity__slots_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/sanity" ) func TestMinimal_Altair_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/minimal/altair__ssz_static__ssz_static_test.go b/testing/spectest/minimal/altair__ssz_static__ssz_static_test.go index 52e1e5be84..546cf42498 100644 --- a/testing/spectest/minimal/altair__ssz_static__ssz_static_test.go +++ b/testing/spectest/minimal/altair__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/ssz_static" ) func TestMinimal_Altair_SSZStatic(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__epoch_processing__effective_balance_updates_test.go b/testing/spectest/minimal/bellatrix__epoch_processing__effective_balance_updates_test.go index bb5ef76180..33f49b6226 100644 --- a/testing/spectest/minimal/bellatrix__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/minimal/bellatrix__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMinimal_Bellatrix_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__epoch_processing__eth1_data_reset_test.go b/testing/spectest/minimal/bellatrix__epoch_processing__eth1_data_reset_test.go index 6504f01c5a..71117b49af 100644 --- a/testing/spectest/minimal/bellatrix__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/minimal/bellatrix__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMinimal_Bellatrix_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__epoch_processing__historical_roots_update_test.go b/testing/spectest/minimal/bellatrix__epoch_processing__historical_roots_update_test.go index b9c801cab8..7dbdcad0c5 100644 --- a/testing/spectest/minimal/bellatrix__epoch_processing__historical_roots_update_test.go +++ b/testing/spectest/minimal/bellatrix__epoch_processing__historical_roots_update_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMinimal_Bellatrix_EpochProcessing_HistoricalRootsUpdate(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__epoch_processing__inactivity_updates_test.go b/testing/spectest/minimal/bellatrix__epoch_processing__inactivity_updates_test.go index fb0c09e29c..693e44c705 100644 --- a/testing/spectest/minimal/bellatrix__epoch_processing__inactivity_updates_test.go +++ b/testing/spectest/minimal/bellatrix__epoch_processing__inactivity_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMinimal_Bellatrix_EpochProcessing_InactivityUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__epoch_processing__justification_and_finalization_test.go b/testing/spectest/minimal/bellatrix__epoch_processing__justification_and_finalization_test.go index 55758f1183..1817b48bf5 100644 --- a/testing/spectest/minimal/bellatrix__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/minimal/bellatrix__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMinimal_Bellatrix_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__epoch_processing__participation_flag_updates_test.go b/testing/spectest/minimal/bellatrix__epoch_processing__participation_flag_updates_test.go index 7adaa526fa..c560343a58 100644 --- a/testing/spectest/minimal/bellatrix__epoch_processing__participation_flag_updates_test.go +++ b/testing/spectest/minimal/bellatrix__epoch_processing__participation_flag_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMinimal_Bellatrix_EpochProcessing_ParticipationFlag(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/minimal/bellatrix__epoch_processing__randao_mixes_reset_test.go index 5874084a86..bfc6b23042 100644 --- a/testing/spectest/minimal/bellatrix__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/minimal/bellatrix__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMinimal_Bellatrix_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__epoch_processing__registry_updates_test.go b/testing/spectest/minimal/bellatrix__epoch_processing__registry_updates_test.go index 00d9e3ba82..6f5393e646 100644 --- a/testing/spectest/minimal/bellatrix__epoch_processing__registry_updates_test.go +++ b/testing/spectest/minimal/bellatrix__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMinimal_Bellatrix_EpochProcessing_ResetRegistryUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/minimal/bellatrix__epoch_processing__rewards_and_penalties_test.go index 14b340ffe7..861d1d6a07 100644 --- a/testing/spectest/minimal/bellatrix__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/minimal/bellatrix__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMinimal_Bellatrix_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__epoch_processing__slashings_reset_test.go b/testing/spectest/minimal/bellatrix__epoch_processing__slashings_reset_test.go index eabf8bc25c..0b973829f4 100644 --- a/testing/spectest/minimal/bellatrix__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/minimal/bellatrix__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMinimal_Bellatrix_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__epoch_processing__slashings_test.go b/testing/spectest/minimal/bellatrix__epoch_processing__slashings_test.go index ad1b5a6661..94ffd9e9a9 100644 --- a/testing/spectest/minimal/bellatrix__epoch_processing__slashings_test.go +++ b/testing/spectest/minimal/bellatrix__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing" ) func TestMinimal_Bellatrix_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__finality__finality_test.go b/testing/spectest/minimal/bellatrix__finality__finality_test.go index 0c87711ec5..1ef3a9b423 100644 --- a/testing/spectest/minimal/bellatrix__finality__finality_test.go +++ b/testing/spectest/minimal/bellatrix__finality__finality_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/finality" ) func TestMinimal_Bellatrix_Finality(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__fork__upgrade_to_altair_test.go b/testing/spectest/minimal/bellatrix__fork__upgrade_to_altair_test.go index c2dba75ae3..b11a2a0ad3 100644 --- a/testing/spectest/minimal/bellatrix__fork__upgrade_to_altair_test.go +++ b/testing/spectest/minimal/bellatrix__fork__upgrade_to_altair_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/fork" ) func TestMinimal_Bellatrix_UpgradeToBellatrix(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__fork_transition__transition_test.go b/testing/spectest/minimal/bellatrix__fork_transition__transition_test.go index 46670f9cb6..77c74dca13 100644 --- a/testing/spectest/minimal/bellatrix__fork_transition__transition_test.go +++ b/testing/spectest/minimal/bellatrix__fork_transition__transition_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/fork" ) func TestMinimal_Bellatrix_Transition(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__forkchoice__forkchoice_test.go b/testing/spectest/minimal/bellatrix__forkchoice__forkchoice_test.go index 29ecb3ba34..93e9e00cc2 100644 --- a/testing/spectest/minimal/bellatrix__forkchoice__forkchoice_test.go +++ b/testing/spectest/minimal/bellatrix__forkchoice__forkchoice_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice" ) func TestMinimal_Bellatrix_Forkchoice(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__light_client__single_merkle_proof_test.go b/testing/spectest/minimal/bellatrix__light_client__single_merkle_proof_test.go index 13d709ed03..36960cec74 100644 --- a/testing/spectest/minimal/bellatrix__light_client__single_merkle_proof_test.go +++ b/testing/spectest/minimal/bellatrix__light_client__single_merkle_proof_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Bellatrix_LightClient_SingleMerkleProof(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__light_client__update_ranking_test.go b/testing/spectest/minimal/bellatrix__light_client__update_ranking_test.go index 9cfd8b8897..0f73d94bd1 100644 --- a/testing/spectest/minimal/bellatrix__light_client__update_ranking_test.go +++ b/testing/spectest/minimal/bellatrix__light_client__update_ranking_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Bellatrix_LightClient_UpdateRanking(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__operations__attestation_test.go b/testing/spectest/minimal/bellatrix__operations__attestation_test.go index bcfca7dbf2..76ebb7106e 100644 --- a/testing/spectest/minimal/bellatrix__operations__attestation_test.go +++ b/testing/spectest/minimal/bellatrix__operations__attestation_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMinimal_Bellatrix_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__operations__attester_slashing_test.go b/testing/spectest/minimal/bellatrix__operations__attester_slashing_test.go index 52751df0b1..889bc27e28 100644 --- a/testing/spectest/minimal/bellatrix__operations__attester_slashing_test.go +++ b/testing/spectest/minimal/bellatrix__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMinimal_Bellatrix_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__operations__block_header_test.go b/testing/spectest/minimal/bellatrix__operations__block_header_test.go index 2894532694..5a8b1a7308 100644 --- a/testing/spectest/minimal/bellatrix__operations__block_header_test.go +++ b/testing/spectest/minimal/bellatrix__operations__block_header_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMinimal_Bellatrix_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__operations__deposit_test.go b/testing/spectest/minimal/bellatrix__operations__deposit_test.go index 75319c1e45..b84b13bbe7 100644 --- a/testing/spectest/minimal/bellatrix__operations__deposit_test.go +++ b/testing/spectest/minimal/bellatrix__operations__deposit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMinimal_Bellatrix_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__operations__execution_payload_test.go b/testing/spectest/minimal/bellatrix__operations__execution_payload_test.go index 2e6a71d699..ecc3e7703a 100644 --- a/testing/spectest/minimal/bellatrix__operations__execution_payload_test.go +++ b/testing/spectest/minimal/bellatrix__operations__execution_payload_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMinimal_Bellatrix_Operations_PayloadExecution(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__operations__proposer_slashing_test.go b/testing/spectest/minimal/bellatrix__operations__proposer_slashing_test.go index 145efaf0a5..b428ba0847 100644 --- a/testing/spectest/minimal/bellatrix__operations__proposer_slashing_test.go +++ b/testing/spectest/minimal/bellatrix__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMinimal_Bellatrix_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__operations__sync_committee_test.go b/testing/spectest/minimal/bellatrix__operations__sync_committee_test.go index 0b583bb392..e35d7c28e0 100644 --- a/testing/spectest/minimal/bellatrix__operations__sync_committee_test.go +++ b/testing/spectest/minimal/bellatrix__operations__sync_committee_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMinimal_Bellatrix_Operations_SyncCommittee(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__operations__voluntary_exit_test.go b/testing/spectest/minimal/bellatrix__operations__voluntary_exit_test.go index c8b3c4fd21..1105fd27d3 100644 --- a/testing/spectest/minimal/bellatrix__operations__voluntary_exit_test.go +++ b/testing/spectest/minimal/bellatrix__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations" ) func TestMinimal_Bellatrix_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__random__random_test.go b/testing/spectest/minimal/bellatrix__random__random_test.go index 4b668f2495..b55e4ad63b 100644 --- a/testing/spectest/minimal/bellatrix__random__random_test.go +++ b/testing/spectest/minimal/bellatrix__random__random_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/sanity" ) func TestMinimal_Bellatrix_Random(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__rewards__rewards_test.go b/testing/spectest/minimal/bellatrix__rewards__rewards_test.go index 35b680291e..58efaabbcb 100644 --- a/testing/spectest/minimal/bellatrix__rewards__rewards_test.go +++ b/testing/spectest/minimal/bellatrix__rewards__rewards_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/rewards" ) func TestMinimal_Bellatrix_Rewards(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__sanity__blocks_test.go b/testing/spectest/minimal/bellatrix__sanity__blocks_test.go index a602ecbadd..78329c8b23 100644 --- a/testing/spectest/minimal/bellatrix__sanity__blocks_test.go +++ b/testing/spectest/minimal/bellatrix__sanity__blocks_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/sanity" ) func TestMinimal_Bellatrix_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__sanity__slots_test.go b/testing/spectest/minimal/bellatrix__sanity__slots_test.go index 54a8a3e3b5..3bf39073bd 100644 --- a/testing/spectest/minimal/bellatrix__sanity__slots_test.go +++ b/testing/spectest/minimal/bellatrix__sanity__slots_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/sanity" ) func TestMinimal_Bellatrix_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/minimal/bellatrix__ssz_static__ssz_static_test.go b/testing/spectest/minimal/bellatrix__ssz_static__ssz_static_test.go index d8481faf90..d08691e942 100644 --- a/testing/spectest/minimal/bellatrix__ssz_static__ssz_static_test.go +++ b/testing/spectest/minimal/bellatrix__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/ssz_static" ) func TestMinimal_Bellatrix_SSZStatic(t *testing.T) { diff --git a/testing/spectest/minimal/capella__epoch_processing__effective_balance_updates_test.go b/testing/spectest/minimal/capella__epoch_processing__effective_balance_updates_test.go index f8c5b869de..3234e594a3 100644 --- a/testing/spectest/minimal/capella__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/minimal/capella__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMinimal_Capella_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/capella__epoch_processing__eth1_data_reset_test.go b/testing/spectest/minimal/capella__epoch_processing__eth1_data_reset_test.go index 042eb49d8c..24a4c623e0 100644 --- a/testing/spectest/minimal/capella__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/minimal/capella__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMinimal_Capella_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/minimal/capella__epoch_processing__historical_roots_summaries_test.go b/testing/spectest/minimal/capella__epoch_processing__historical_roots_summaries_test.go index 5153b737db..f5b1652ef9 100644 --- a/testing/spectest/minimal/capella__epoch_processing__historical_roots_summaries_test.go +++ b/testing/spectest/minimal/capella__epoch_processing__historical_roots_summaries_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMinimal_Capella_EpochProcessing_HistoricalSummariesUpdate(t *testing.T) { diff --git a/testing/spectest/minimal/capella__epoch_processing__inactivity_updates_test.go b/testing/spectest/minimal/capella__epoch_processing__inactivity_updates_test.go index 02b2f48fa7..f9513bd628 100644 --- a/testing/spectest/minimal/capella__epoch_processing__inactivity_updates_test.go +++ b/testing/spectest/minimal/capella__epoch_processing__inactivity_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMinimal_Capella_EpochProcessing_InactivityUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/capella__epoch_processing__justification_and_finalization_test.go b/testing/spectest/minimal/capella__epoch_processing__justification_and_finalization_test.go index 861af2b79e..cff8f32567 100644 --- a/testing/spectest/minimal/capella__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/minimal/capella__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMinimal_Capella_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/minimal/capella__epoch_processing__participation_flag_updates_test.go b/testing/spectest/minimal/capella__epoch_processing__participation_flag_updates_test.go index d53883c587..c15b7e8af1 100644 --- a/testing/spectest/minimal/capella__epoch_processing__participation_flag_updates_test.go +++ b/testing/spectest/minimal/capella__epoch_processing__participation_flag_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMinimal_Capella_EpochProcessing_ParticipationFlag(t *testing.T) { diff --git a/testing/spectest/minimal/capella__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/minimal/capella__epoch_processing__randao_mixes_reset_test.go index 48e39ee18b..65afb27525 100644 --- a/testing/spectest/minimal/capella__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/minimal/capella__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMinimal_Capella_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/minimal/capella__epoch_processing__registry_updates_test.go b/testing/spectest/minimal/capella__epoch_processing__registry_updates_test.go index 068924bfd4..b54785f85e 100644 --- a/testing/spectest/minimal/capella__epoch_processing__registry_updates_test.go +++ b/testing/spectest/minimal/capella__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMinimal_Capella_EpochProcessing_ResetRegistryUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/capella__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/minimal/capella__epoch_processing__rewards_and_penalties_test.go index 7de4dd8daa..5ed7fd7835 100644 --- a/testing/spectest/minimal/capella__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/minimal/capella__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMinimal_Capella_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/minimal/capella__epoch_processing__slashings_reset_test.go b/testing/spectest/minimal/capella__epoch_processing__slashings_reset_test.go index 3230a73752..a8dedc68be 100644 --- a/testing/spectest/minimal/capella__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/minimal/capella__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMinimal_Capella_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/minimal/capella__epoch_processing__slashings_test.go b/testing/spectest/minimal/capella__epoch_processing__slashings_test.go index fee7e7d50b..2fed223a1e 100644 --- a/testing/spectest/minimal/capella__epoch_processing__slashings_test.go +++ b/testing/spectest/minimal/capella__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing" ) func TestMinimal_Capella_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/minimal/capella__finality__finality_test.go b/testing/spectest/minimal/capella__finality__finality_test.go index ad0083b3e1..6186a06591 100644 --- a/testing/spectest/minimal/capella__finality__finality_test.go +++ b/testing/spectest/minimal/capella__finality__finality_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/finality" ) func TestMinimal_Capella_Finality(t *testing.T) { diff --git a/testing/spectest/minimal/capella__fork__upgrade_to_capella_test.go b/testing/spectest/minimal/capella__fork__upgrade_to_capella_test.go index 47fdd5570e..3dd4a66ad7 100644 --- a/testing/spectest/minimal/capella__fork__upgrade_to_capella_test.go +++ b/testing/spectest/minimal/capella__fork__upgrade_to_capella_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/fork" ) func TestMinimal_Capella_UpgradeToCapella(t *testing.T) { diff --git a/testing/spectest/minimal/capella__fork_transition__transition_test.go b/testing/spectest/minimal/capella__fork_transition__transition_test.go index 453f3eb319..8f161b4c1b 100644 --- a/testing/spectest/minimal/capella__fork_transition__transition_test.go +++ b/testing/spectest/minimal/capella__fork_transition__transition_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/fork" ) func TestMinimal_Capella_Transition(t *testing.T) { diff --git a/testing/spectest/minimal/capella__forkchoice__forkchoice_test.go b/testing/spectest/minimal/capella__forkchoice__forkchoice_test.go index 99a7208b90..25a3ec407b 100644 --- a/testing/spectest/minimal/capella__forkchoice__forkchoice_test.go +++ b/testing/spectest/minimal/capella__forkchoice__forkchoice_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice" ) func TestMinimal_Capella_Forkchoice(t *testing.T) { diff --git a/testing/spectest/minimal/capella__light_client__single_merkle_proof_test.go b/testing/spectest/minimal/capella__light_client__single_merkle_proof_test.go index 4c4505c7cc..9ed8933ad9 100644 --- a/testing/spectest/minimal/capella__light_client__single_merkle_proof_test.go +++ b/testing/spectest/minimal/capella__light_client__single_merkle_proof_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Capella_LightClient_SingleMerkleProof(t *testing.T) { diff --git a/testing/spectest/minimal/capella__light_client__update_ranking_test.go b/testing/spectest/minimal/capella__light_client__update_ranking_test.go index 73857a99d9..a989c71e1e 100644 --- a/testing/spectest/minimal/capella__light_client__update_ranking_test.go +++ b/testing/spectest/minimal/capella__light_client__update_ranking_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Capella_LightClient_UpdateRanking(t *testing.T) { diff --git a/testing/spectest/minimal/capella__operations__attestation_test.go b/testing/spectest/minimal/capella__operations__attestation_test.go index bfd30f21bd..4570946ebd 100644 --- a/testing/spectest/minimal/capella__operations__attestation_test.go +++ b/testing/spectest/minimal/capella__operations__attestation_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMinimal_Capella_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/minimal/capella__operations__attester_slashing_test.go b/testing/spectest/minimal/capella__operations__attester_slashing_test.go index 61f275586b..e61e93a52a 100644 --- a/testing/spectest/minimal/capella__operations__attester_slashing_test.go +++ b/testing/spectest/minimal/capella__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMinimal_Capella_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/capella__operations__block_header_test.go b/testing/spectest/minimal/capella__operations__block_header_test.go index 2f8dd5bc7a..11ad77071f 100644 --- a/testing/spectest/minimal/capella__operations__block_header_test.go +++ b/testing/spectest/minimal/capella__operations__block_header_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMinimal_Capella_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/minimal/capella__operations__bls_to_execution_change_test.go b/testing/spectest/minimal/capella__operations__bls_to_execution_change_test.go index 224a04d8d2..9990af59ea 100644 --- a/testing/spectest/minimal/capella__operations__bls_to_execution_change_test.go +++ b/testing/spectest/minimal/capella__operations__bls_to_execution_change_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMinimal_Capella_Operations_BLSToExecutionChange(t *testing.T) { diff --git a/testing/spectest/minimal/capella__operations__deposit_test.go b/testing/spectest/minimal/capella__operations__deposit_test.go index de44d5c22f..e4dacabf68 100644 --- a/testing/spectest/minimal/capella__operations__deposit_test.go +++ b/testing/spectest/minimal/capella__operations__deposit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMinimal_Capella_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/minimal/capella__operations__execution_payload_test.go b/testing/spectest/minimal/capella__operations__execution_payload_test.go index 44b5dde71a..fbcbb9a0f9 100644 --- a/testing/spectest/minimal/capella__operations__execution_payload_test.go +++ b/testing/spectest/minimal/capella__operations__execution_payload_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMinimal_Capella_Operations_PayloadExecution(t *testing.T) { diff --git a/testing/spectest/minimal/capella__operations__proposer_slashing_test.go b/testing/spectest/minimal/capella__operations__proposer_slashing_test.go index fe63897c58..0cec427f50 100644 --- a/testing/spectest/minimal/capella__operations__proposer_slashing_test.go +++ b/testing/spectest/minimal/capella__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMinimal_Capella_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/capella__operations__sync_committee_test.go b/testing/spectest/minimal/capella__operations__sync_committee_test.go index 88846db371..4a2f92a5a3 100644 --- a/testing/spectest/minimal/capella__operations__sync_committee_test.go +++ b/testing/spectest/minimal/capella__operations__sync_committee_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMinimal_Capella_Operations_SyncCommittee(t *testing.T) { diff --git a/testing/spectest/minimal/capella__operations__voluntary_exit_test.go b/testing/spectest/minimal/capella__operations__voluntary_exit_test.go index 62ef84c8fd..55000d92c0 100644 --- a/testing/spectest/minimal/capella__operations__voluntary_exit_test.go +++ b/testing/spectest/minimal/capella__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMinimal_Capella_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/minimal/capella__operations__withdrawals_test.go b/testing/spectest/minimal/capella__operations__withdrawals_test.go index e470bfb29d..2c7c1a0daf 100644 --- a/testing/spectest/minimal/capella__operations__withdrawals_test.go +++ b/testing/spectest/minimal/capella__operations__withdrawals_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations" ) func TestMinimal_Capella_Operations_Withdrawals(t *testing.T) { diff --git a/testing/spectest/minimal/capella__random__random_test.go b/testing/spectest/minimal/capella__random__random_test.go index a1e9f9c6b1..a20de0681d 100644 --- a/testing/spectest/minimal/capella__random__random_test.go +++ b/testing/spectest/minimal/capella__random__random_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/sanity" ) func TestMinimal_Capella_Random(t *testing.T) { diff --git a/testing/spectest/minimal/capella__rewards__rewards_test.go b/testing/spectest/minimal/capella__rewards__rewards_test.go index 003ba6c056..35c1e15d71 100644 --- a/testing/spectest/minimal/capella__rewards__rewards_test.go +++ b/testing/spectest/minimal/capella__rewards__rewards_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/rewards" ) func TestMinimal_Capella_Rewards(t *testing.T) { diff --git a/testing/spectest/minimal/capella__sanity__blocks_test.go b/testing/spectest/minimal/capella__sanity__blocks_test.go index 5814b612bf..77f6c1ecce 100644 --- a/testing/spectest/minimal/capella__sanity__blocks_test.go +++ b/testing/spectest/minimal/capella__sanity__blocks_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/sanity" ) func TestMinimal_Capella_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/minimal/capella__sanity__slots_test.go b/testing/spectest/minimal/capella__sanity__slots_test.go index 3462a7e4cd..0dec7eef0f 100644 --- a/testing/spectest/minimal/capella__sanity__slots_test.go +++ b/testing/spectest/minimal/capella__sanity__slots_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/sanity" ) func TestMinimal_Capella_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/minimal/capella__ssz_static__ssz_static_test.go b/testing/spectest/minimal/capella__ssz_static__ssz_static_test.go index 3003486da6..55322a4987 100644 --- a/testing/spectest/minimal/capella__ssz_static__ssz_static_test.go +++ b/testing/spectest/minimal/capella__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/ssz_static" ) func TestMinimal_Capella_SSZStatic(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__epoch_processing__effective_balance_updates_test.go b/testing/spectest/minimal/deneb__epoch_processing__effective_balance_updates_test.go index f222702143..228f8fe890 100644 --- a/testing/spectest/minimal/deneb__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/minimal/deneb__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMinimal_Deneb_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__epoch_processing__eth1_data_reset_test.go b/testing/spectest/minimal/deneb__epoch_processing__eth1_data_reset_test.go index ed760f6dc0..47a3bb4f38 100644 --- a/testing/spectest/minimal/deneb__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/minimal/deneb__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMinimal_Deneb_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__epoch_processing__historical_summaries_update_test.go b/testing/spectest/minimal/deneb__epoch_processing__historical_summaries_update_test.go index 9ec6945c06..25118f0d0f 100644 --- a/testing/spectest/minimal/deneb__epoch_processing__historical_summaries_update_test.go +++ b/testing/spectest/minimal/deneb__epoch_processing__historical_summaries_update_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMinimal_Deneb_EpochProcessing_HistoricalSummariesUpdate(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__epoch_processing__inactivity_updates_test.go b/testing/spectest/minimal/deneb__epoch_processing__inactivity_updates_test.go index dcda7d23f0..0b357c5d2c 100644 --- a/testing/spectest/minimal/deneb__epoch_processing__inactivity_updates_test.go +++ b/testing/spectest/minimal/deneb__epoch_processing__inactivity_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMinimal_Deneb_EpochProcessing_InactivityUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__epoch_processing__justification_and_finalization_test.go b/testing/spectest/minimal/deneb__epoch_processing__justification_and_finalization_test.go index 968645a426..c8c83901fe 100644 --- a/testing/spectest/minimal/deneb__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/minimal/deneb__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMinimal_Deneb_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__epoch_processing__participation_flag_updates_test.go b/testing/spectest/minimal/deneb__epoch_processing__participation_flag_updates_test.go index 2fbd1e82fa..64cb390d82 100644 --- a/testing/spectest/minimal/deneb__epoch_processing__participation_flag_updates_test.go +++ b/testing/spectest/minimal/deneb__epoch_processing__participation_flag_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMinimal_Deneb_EpochProcessing_ParticipationFlag(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/minimal/deneb__epoch_processing__randao_mixes_reset_test.go index 933a5f200f..b8df6befbe 100644 --- a/testing/spectest/minimal/deneb__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/minimal/deneb__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMinimal_Deneb_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__epoch_processing__registry_updates_test.go b/testing/spectest/minimal/deneb__epoch_processing__registry_updates_test.go index 91deaa467a..e76ffdb967 100644 --- a/testing/spectest/minimal/deneb__epoch_processing__registry_updates_test.go +++ b/testing/spectest/minimal/deneb__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMinimal_Deneb_EpochProcessing_ResetRegistryUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/minimal/deneb__epoch_processing__rewards_and_penalties_test.go index 5d46e97ea2..ba9ade5b0c 100644 --- a/testing/spectest/minimal/deneb__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/minimal/deneb__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMinimal_Deneb_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__epoch_processing__slashings_reset_test.go b/testing/spectest/minimal/deneb__epoch_processing__slashings_reset_test.go index a0a65a3f20..5457f2ef54 100644 --- a/testing/spectest/minimal/deneb__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/minimal/deneb__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMinimal_Deneb_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__epoch_processing__slashings_test.go b/testing/spectest/minimal/deneb__epoch_processing__slashings_test.go index 1528f8d8b7..5ecf125d6f 100644 --- a/testing/spectest/minimal/deneb__epoch_processing__slashings_test.go +++ b/testing/spectest/minimal/deneb__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing" ) func TestMinimal_Deneb_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__finality__finality_test.go b/testing/spectest/minimal/deneb__finality__finality_test.go index 1c3e995c19..8f88f071fa 100644 --- a/testing/spectest/minimal/deneb__finality__finality_test.go +++ b/testing/spectest/minimal/deneb__finality__finality_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/finality" ) func TestMinimal_Deneb_Finality(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__fork__upgrade_to_deneb_test.go b/testing/spectest/minimal/deneb__fork__upgrade_to_deneb_test.go index 28bf20b5ce..93459ae05f 100644 --- a/testing/spectest/minimal/deneb__fork__upgrade_to_deneb_test.go +++ b/testing/spectest/minimal/deneb__fork__upgrade_to_deneb_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/fork" ) func TestMinimal_UpgradeToDeneb(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__fork_transition__transition_test.go b/testing/spectest/minimal/deneb__fork_transition__transition_test.go index afeab4298f..9897494ca9 100644 --- a/testing/spectest/minimal/deneb__fork_transition__transition_test.go +++ b/testing/spectest/minimal/deneb__fork_transition__transition_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/fork" ) func TestMinimal_Deneb_Transition(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__forkchoice__forkchoice_test.go b/testing/spectest/minimal/deneb__forkchoice__forkchoice_test.go index e11fd470d4..3739b92cc7 100644 --- a/testing/spectest/minimal/deneb__forkchoice__forkchoice_test.go +++ b/testing/spectest/minimal/deneb__forkchoice__forkchoice_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice" ) func TestMinimal_Deneb_Forkchoice(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__light_client__single_merkle_proof_test.go b/testing/spectest/minimal/deneb__light_client__single_merkle_proof_test.go index 40f5c119ae..ca5c83e640 100644 --- a/testing/spectest/minimal/deneb__light_client__single_merkle_proof_test.go +++ b/testing/spectest/minimal/deneb__light_client__single_merkle_proof_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Deneb_LightClient_SingleMerkleProof(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__light_client__update_ranking_test.go b/testing/spectest/minimal/deneb__light_client__update_ranking_test.go index 1a800d0e38..22a58fd009 100644 --- a/testing/spectest/minimal/deneb__light_client__update_ranking_test.go +++ b/testing/spectest/minimal/deneb__light_client__update_ranking_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMainnet_Deneb_LightClient_UpdateRanking(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__merkle_proof__merkle_proof_test.go b/testing/spectest/minimal/deneb__merkle_proof__merkle_proof_test.go index 36211fa7d2..2c0ed480f4 100644 --- a/testing/spectest/minimal/deneb__merkle_proof__merkle_proof_test.go +++ b/testing/spectest/minimal/deneb__merkle_proof__merkle_proof_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/merkle_proof" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/merkle_proof" ) func TestMainnet_Deneb_MerkleProof(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__operations__attestation_test.go b/testing/spectest/minimal/deneb__operations__attestation_test.go index c210b4fcdc..61dcd023fe 100644 --- a/testing/spectest/minimal/deneb__operations__attestation_test.go +++ b/testing/spectest/minimal/deneb__operations__attestation_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMinimal_Deneb_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__operations__attester_slashing_test.go b/testing/spectest/minimal/deneb__operations__attester_slashing_test.go index 73e16835ef..8b6684e6b7 100644 --- a/testing/spectest/minimal/deneb__operations__attester_slashing_test.go +++ b/testing/spectest/minimal/deneb__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMinimal_Deneb_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__operations__block_header_test.go b/testing/spectest/minimal/deneb__operations__block_header_test.go index ad504ddbb4..042b990440 100644 --- a/testing/spectest/minimal/deneb__operations__block_header_test.go +++ b/testing/spectest/minimal/deneb__operations__block_header_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMinimal_Deneb_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__operations__bls_to_execution_change_test.go b/testing/spectest/minimal/deneb__operations__bls_to_execution_change_test.go index a66b0e713f..73c60e3939 100644 --- a/testing/spectest/minimal/deneb__operations__bls_to_execution_change_test.go +++ b/testing/spectest/minimal/deneb__operations__bls_to_execution_change_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMinimal_Deneb_Operations_BLSToExecutionChange(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__operations__deposit_test.go b/testing/spectest/minimal/deneb__operations__deposit_test.go index 6bd817c389..6e3af6346f 100644 --- a/testing/spectest/minimal/deneb__operations__deposit_test.go +++ b/testing/spectest/minimal/deneb__operations__deposit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMinimal_Deneb_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__operations__execution_payload_test.go b/testing/spectest/minimal/deneb__operations__execution_payload_test.go index 1a6f36af0b..c6e7a93295 100644 --- a/testing/spectest/minimal/deneb__operations__execution_payload_test.go +++ b/testing/spectest/minimal/deneb__operations__execution_payload_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMinimal_Deneb_Operations_PayloadExecution(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__operations__proposer_slashing_test.go b/testing/spectest/minimal/deneb__operations__proposer_slashing_test.go index 6959737558..4ffaafb6ef 100644 --- a/testing/spectest/minimal/deneb__operations__proposer_slashing_test.go +++ b/testing/spectest/minimal/deneb__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMinimal_Deneb_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__operations__sync_committee_test.go b/testing/spectest/minimal/deneb__operations__sync_committee_test.go index 43ad00012f..c6257de541 100644 --- a/testing/spectest/minimal/deneb__operations__sync_committee_test.go +++ b/testing/spectest/minimal/deneb__operations__sync_committee_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMinimal_Deneb_Operations_SyncCommittee(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__operations__voluntary_exit_test.go b/testing/spectest/minimal/deneb__operations__voluntary_exit_test.go index b3452f182b..6677d57e62 100644 --- a/testing/spectest/minimal/deneb__operations__voluntary_exit_test.go +++ b/testing/spectest/minimal/deneb__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMinimal_Deneb_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__operations__withdrawals_test.go b/testing/spectest/minimal/deneb__operations__withdrawals_test.go index e5a18a26b3..52a607fb86 100644 --- a/testing/spectest/minimal/deneb__operations__withdrawals_test.go +++ b/testing/spectest/minimal/deneb__operations__withdrawals_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations" ) func TestMinimal_Deneb_Operations_Withdrawals(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__random__random_test.go b/testing/spectest/minimal/deneb__random__random_test.go index ddf51eddf9..728b195dd7 100644 --- a/testing/spectest/minimal/deneb__random__random_test.go +++ b/testing/spectest/minimal/deneb__random__random_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/sanity" ) func TestMinimal_Deneb_Random(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__rewards__rewards_test.go b/testing/spectest/minimal/deneb__rewards__rewards_test.go index 6b1bb50d9b..a0ba37f425 100644 --- a/testing/spectest/minimal/deneb__rewards__rewards_test.go +++ b/testing/spectest/minimal/deneb__rewards__rewards_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/rewards" ) func TestMinimal_Deneb_Rewards(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__sanity__blocks_test.go b/testing/spectest/minimal/deneb__sanity__blocks_test.go index ba9c85a758..3dc6536f50 100644 --- a/testing/spectest/minimal/deneb__sanity__blocks_test.go +++ b/testing/spectest/minimal/deneb__sanity__blocks_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/sanity" ) func TestMinimal_Deneb_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__sanity__slots_test.go b/testing/spectest/minimal/deneb__sanity__slots_test.go index b4376a714e..d3b3bb4333 100644 --- a/testing/spectest/minimal/deneb__sanity__slots_test.go +++ b/testing/spectest/minimal/deneb__sanity__slots_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/sanity" ) func TestMinimal_Deneb_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/minimal/deneb__ssz_static__ssz_static_test.go b/testing/spectest/minimal/deneb__ssz_static__ssz_static_test.go index bcfb244f97..81e312390c 100644 --- a/testing/spectest/minimal/deneb__ssz_static__ssz_static_test.go +++ b/testing/spectest/minimal/deneb__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/ssz_static" ) func TestMinimal_Deneb_SSZStatic(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__effective_balance_updates_test.go b/testing/spectest/minimal/electra__epoch_processing__effective_balance_updates_test.go index 5187907870..ee46b9a856 100644 --- a/testing/spectest/minimal/electra__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_electra_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__eth1_data_reset_test.go b/testing/spectest/minimal/electra__epoch_processing__eth1_data_reset_test.go index 16357ec3aa..6a3c32be02 100644 --- a/testing/spectest/minimal/electra__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__historical_summaries_update_test.go b/testing/spectest/minimal/electra__epoch_processing__historical_summaries_update_test.go index 74a28835bf..faf98a72f1 100644 --- a/testing/spectest/minimal/electra__epoch_processing__historical_summaries_update_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__historical_summaries_update_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_HistoricalSummariesUpdate(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__inactivity_updates_test.go b/testing/spectest/minimal/electra__epoch_processing__inactivity_updates_test.go index ff214bb5ea..795f9d7b19 100644 --- a/testing/spectest/minimal/electra__epoch_processing__inactivity_updates_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__inactivity_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_InactivityUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__justification_and_finalization_test.go b/testing/spectest/minimal/electra__epoch_processing__justification_and_finalization_test.go index 3daef31c57..e870968048 100644 --- a/testing/spectest/minimal/electra__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__participation_flag_updates_test.go b/testing/spectest/minimal/electra__epoch_processing__participation_flag_updates_test.go index 1d2dd09fa0..5b1ef8c712 100644 --- a/testing/spectest/minimal/electra__epoch_processing__participation_flag_updates_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__participation_flag_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_ParticipationFlag(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__pending_consolidations_test.go b/testing/spectest/minimal/electra__epoch_processing__pending_consolidations_test.go index c471efd52c..f198f485db 100644 --- a/testing/spectest/minimal/electra__epoch_processing__pending_consolidations_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__pending_consolidations_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_PendingConsolidations(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__pending_deposits_updates_test.go b/testing/spectest/minimal/electra__epoch_processing__pending_deposits_updates_test.go index f5195158e9..4fc67bc4b4 100644 --- a/testing/spectest/minimal/electra__epoch_processing__pending_deposits_updates_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__pending_deposits_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_PendingDeposits(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/minimal/electra__epoch_processing__randao_mixes_reset_test.go index 88bab1f907..0eb2f58f5d 100644 --- a/testing/spectest/minimal/electra__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__registry_updates_test.go b/testing/spectest/minimal/electra__epoch_processing__registry_updates_test.go index eee834588c..9c1f119d8b 100644 --- a/testing/spectest/minimal/electra__epoch_processing__registry_updates_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_RegistryUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/minimal/electra__epoch_processing__rewards_and_penalties_test.go index acbeee8787..4fb944ba55 100644 --- a/testing/spectest/minimal/electra__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__slashings_reset_test.go b/testing/spectest/minimal/electra__epoch_processing__slashings_reset_test.go index a6dd55414c..0f456314e5 100644 --- a/testing/spectest/minimal/electra__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__slashings_test.go b/testing/spectest/minimal/electra__epoch_processing__slashings_test.go index 127ccc8a6a..09fbf022f5 100644 --- a/testing/spectest/minimal/electra__epoch_processing__slashings_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/minimal/electra__epoch_processing__sync_committee_updates_test.go b/testing/spectest/minimal/electra__epoch_processing__sync_committee_updates_test.go index 9c19b4b3db..80d1af8eee 100644 --- a/testing/spectest/minimal/electra__epoch_processing__sync_committee_updates_test.go +++ b/testing/spectest/minimal/electra__epoch_processing__sync_committee_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing" ) func TestMinimal_Electra_EpochProcessing_SyncCommitteeUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/electra__finality__finality_test.go b/testing/spectest/minimal/electra__finality__finality_test.go index 9c6239d795..c11e44ca15 100644 --- a/testing/spectest/minimal/electra__finality__finality_test.go +++ b/testing/spectest/minimal/electra__finality__finality_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/finality" ) func TestMinimal_Electra_Finality(t *testing.T) { diff --git a/testing/spectest/minimal/electra__fork__upgrade_to_electra_test.go b/testing/spectest/minimal/electra__fork__upgrade_to_electra_test.go index 2acd3b50d7..d07bed880d 100644 --- a/testing/spectest/minimal/electra__fork__upgrade_to_electra_test.go +++ b/testing/spectest/minimal/electra__fork__upgrade_to_electra_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/fork" ) func TestMinimal_UpgradeToElectra(t *testing.T) { diff --git a/testing/spectest/minimal/electra__fork_transition__transition_test.go b/testing/spectest/minimal/electra__fork_transition__transition_test.go index 6a6269ef77..3ab4b2ad19 100644 --- a/testing/spectest/minimal/electra__fork_transition__transition_test.go +++ b/testing/spectest/minimal/electra__fork_transition__transition_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/fork" ) func TestMinimal_Electra_Transition(t *testing.T) { diff --git a/testing/spectest/minimal/electra__forkchoice__forkchoice_test.go b/testing/spectest/minimal/electra__forkchoice__forkchoice_test.go index 91beb2f5b1..330b366dc1 100644 --- a/testing/spectest/minimal/electra__forkchoice__forkchoice_test.go +++ b/testing/spectest/minimal/electra__forkchoice__forkchoice_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice" ) func TestMinimal_Electra_Forkchoice(t *testing.T) { diff --git a/testing/spectest/minimal/electra__light_client__single_merkle_proof_test.go b/testing/spectest/minimal/electra__light_client__single_merkle_proof_test.go index 78a6701f86..e0076ebe4d 100644 --- a/testing/spectest/minimal/electra__light_client__single_merkle_proof_test.go +++ b/testing/spectest/minimal/electra__light_client__single_merkle_proof_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMinimal_Electra_LightClient_SingleMerkleProof(t *testing.T) { diff --git a/testing/spectest/minimal/electra__light_client__update_ranking_test.go b/testing/spectest/minimal/electra__light_client__update_ranking_test.go index 8903e774aa..4ed42d78dc 100644 --- a/testing/spectest/minimal/electra__light_client__update_ranking_test.go +++ b/testing/spectest/minimal/electra__light_client__update_ranking_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMinimal_Electra_LightClient_UpdateRanking(t *testing.T) { diff --git a/testing/spectest/minimal/electra__merkle_proof__merkle_proof_test.go b/testing/spectest/minimal/electra__merkle_proof__merkle_proof_test.go index 3b4c162dff..6e8bbf2148 100644 --- a/testing/spectest/minimal/electra__merkle_proof__merkle_proof_test.go +++ b/testing/spectest/minimal/electra__merkle_proof__merkle_proof_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/merkle_proof" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/merkle_proof" ) func TestMinimal_Electra_MerkleProof(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__attestation_test.go b/testing/spectest/minimal/electra__operations__attestation_test.go index ebdfbf3321..5b0db2e44e 100644 --- a/testing/spectest/minimal/electra__operations__attestation_test.go +++ b/testing/spectest/minimal/electra__operations__attestation_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMinimal_Electra_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__attester_slashing_test.go b/testing/spectest/minimal/electra__operations__attester_slashing_test.go index 7814394f44..5cc0b4a655 100644 --- a/testing/spectest/minimal/electra__operations__attester_slashing_test.go +++ b/testing/spectest/minimal/electra__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMinimal_Electra_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__block_header_test.go b/testing/spectest/minimal/electra__operations__block_header_test.go index 900c404c82..529e3045d9 100644 --- a/testing/spectest/minimal/electra__operations__block_header_test.go +++ b/testing/spectest/minimal/electra__operations__block_header_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMinimal_Electra_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__bls_to_execution_change_test.go b/testing/spectest/minimal/electra__operations__bls_to_execution_change_test.go index 98ef63da93..1ac9316e61 100644 --- a/testing/spectest/minimal/electra__operations__bls_to_execution_change_test.go +++ b/testing/spectest/minimal/electra__operations__bls_to_execution_change_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMinimal_Electra_Operations_BLSToExecutionChange(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__consolidation_test.go b/testing/spectest/minimal/electra__operations__consolidation_test.go index 83b70a94d5..a4a5fcf4ba 100644 --- a/testing/spectest/minimal/electra__operations__consolidation_test.go +++ b/testing/spectest/minimal/electra__operations__consolidation_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMinimal_Electra_Operations_Consolidation(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__deposit_requests_test.go b/testing/spectest/minimal/electra__operations__deposit_requests_test.go index ed33524d8b..af64358319 100644 --- a/testing/spectest/minimal/electra__operations__deposit_requests_test.go +++ b/testing/spectest/minimal/electra__operations__deposit_requests_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMainnet_Electra_Operations_DepositRequests(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__deposit_test.go b/testing/spectest/minimal/electra__operations__deposit_test.go index 8e1ab20482..e697909316 100644 --- a/testing/spectest/minimal/electra__operations__deposit_test.go +++ b/testing/spectest/minimal/electra__operations__deposit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMinimal_Electra_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__execution_layer_withdrawals_test.go b/testing/spectest/minimal/electra__operations__execution_layer_withdrawals_test.go index 69f2013943..a38ae3b87e 100644 --- a/testing/spectest/minimal/electra__operations__execution_layer_withdrawals_test.go +++ b/testing/spectest/minimal/electra__operations__execution_layer_withdrawals_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMinimal_Electra_Operations_WithdrawalRequest(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__execution_payload_test.go b/testing/spectest/minimal/electra__operations__execution_payload_test.go index 242a601f01..75746dc3de 100644 --- a/testing/spectest/minimal/electra__operations__execution_payload_test.go +++ b/testing/spectest/minimal/electra__operations__execution_payload_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMinimal_Electra_Operations_PayloadExecution(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__proposer_slashing_test.go b/testing/spectest/minimal/electra__operations__proposer_slashing_test.go index 67c45446e6..d6360a7f20 100644 --- a/testing/spectest/minimal/electra__operations__proposer_slashing_test.go +++ b/testing/spectest/minimal/electra__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMinimal_Electra_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__sync_committee_test.go b/testing/spectest/minimal/electra__operations__sync_committee_test.go index f716db35cc..ca36953b15 100644 --- a/testing/spectest/minimal/electra__operations__sync_committee_test.go +++ b/testing/spectest/minimal/electra__operations__sync_committee_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMinimal_Electra_Operations_SyncCommittee(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__voluntary_exit_test.go b/testing/spectest/minimal/electra__operations__voluntary_exit_test.go index e7bb9c4394..aa768ed6db 100644 --- a/testing/spectest/minimal/electra__operations__voluntary_exit_test.go +++ b/testing/spectest/minimal/electra__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMinimal_Electra_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/minimal/electra__operations__withdrawals_test.go b/testing/spectest/minimal/electra__operations__withdrawals_test.go index 30d904df9f..918a87f7b8 100644 --- a/testing/spectest/minimal/electra__operations__withdrawals_test.go +++ b/testing/spectest/minimal/electra__operations__withdrawals_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations" ) func TestMinimal_Electra_Operations_Withdrawals(t *testing.T) { diff --git a/testing/spectest/minimal/electra__random__random_test.go b/testing/spectest/minimal/electra__random__random_test.go index cb282b8a3b..30c1632e74 100644 --- a/testing/spectest/minimal/electra__random__random_test.go +++ b/testing/spectest/minimal/electra__random__random_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/sanity" ) func TestMinimal_Electra_Random(t *testing.T) { diff --git a/testing/spectest/minimal/electra__rewards__rewards_test.go b/testing/spectest/minimal/electra__rewards__rewards_test.go index d9f91928c8..e547f0820a 100644 --- a/testing/spectest/minimal/electra__rewards__rewards_test.go +++ b/testing/spectest/minimal/electra__rewards__rewards_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/rewards" ) func TestMinimal_Electra_Rewards(t *testing.T) { diff --git a/testing/spectest/minimal/electra__sanity__blocks_test.go b/testing/spectest/minimal/electra__sanity__blocks_test.go index 5fad1d6bb6..d5d216c178 100644 --- a/testing/spectest/minimal/electra__sanity__blocks_test.go +++ b/testing/spectest/minimal/electra__sanity__blocks_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/sanity" ) func TestMinimal_Electra_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/minimal/electra__sanity__slots_test.go b/testing/spectest/minimal/electra__sanity__slots_test.go index d0bf1322c8..f979b73e46 100644 --- a/testing/spectest/minimal/electra__sanity__slots_test.go +++ b/testing/spectest/minimal/electra__sanity__slots_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/sanity" ) func TestMinimal_Electra_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/minimal/electra__ssz_static__ssz_static_test.go b/testing/spectest/minimal/electra__ssz_static__ssz_static_test.go index 197641491d..f6e4c1cbee 100644 --- a/testing/spectest/minimal/electra__ssz_static__ssz_static_test.go +++ b/testing/spectest/minimal/electra__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/ssz_static" ) func TestMinimal_Electra_SSZStatic(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__effective_balance_updates_test.go b/testing/spectest/minimal/fulu__epoch_processing__effective_balance_updates_test.go index cbc83036cb..0614a4d603 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_fulu_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__eth1_data_reset_test.go b/testing/spectest/minimal/fulu__epoch_processing__eth1_data_reset_test.go index 461c243bd2..3c32276c7e 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__historical_summaries_update_test.go b/testing/spectest/minimal/fulu__epoch_processing__historical_summaries_update_test.go index d2b2c7b156..6f7c2fcd78 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__historical_summaries_update_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__historical_summaries_update_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_HistoricalSummariesUpdate(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__inactivity_updates_test.go b/testing/spectest/minimal/fulu__epoch_processing__inactivity_updates_test.go index de27dc7a30..5dedaa0190 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__inactivity_updates_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__inactivity_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_InactivityUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__justification_and_finalization_test.go b/testing/spectest/minimal/fulu__epoch_processing__justification_and_finalization_test.go index 885b39e735..27d1b0959c 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__participation_flag_updates_test.go b/testing/spectest/minimal/fulu__epoch_processing__participation_flag_updates_test.go index 1e0b99cefa..a71b1e9406 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__participation_flag_updates_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__participation_flag_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_ParticipationFlag(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__pending_consolidations_test.go b/testing/spectest/minimal/fulu__epoch_processing__pending_consolidations_test.go index 4ddd52703c..19f346ac88 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__pending_consolidations_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__pending_consolidations_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_PendingConsolidations(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__pending_deposits_updates_test.go b/testing/spectest/minimal/fulu__epoch_processing__pending_deposits_updates_test.go index 5723a68611..d8b7a72448 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__pending_deposits_updates_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__pending_deposits_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_PendingDeposits(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__proposer_lookahead_test.go b/testing/spectest/minimal/fulu__epoch_processing__proposer_lookahead_test.go index 1aa04028f4..54799d1f83 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__proposer_lookahead_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__proposer_lookahead_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_fulu_EpochProcessing_ProposerLookahead(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/minimal/fulu__epoch_processing__randao_mixes_reset_test.go index 34f7faffe9..80a10a52e6 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__registry_updates_test.go b/testing/spectest/minimal/fulu__epoch_processing__registry_updates_test.go index d7025e7bd6..87a5582f6a 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__registry_updates_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_RegistryUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/minimal/fulu__epoch_processing__rewards_and_penalties_test.go index 2a0f7e27df..7578e227a4 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__slashings_reset_test.go b/testing/spectest/minimal/fulu__epoch_processing__slashings_reset_test.go index 4d06d23ffc..a591124b25 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__slashings_test.go b/testing/spectest/minimal/fulu__epoch_processing__slashings_test.go index 8d0fcccc1a..08ee946b67 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__slashings_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__epoch_processing__sync_committee_updates_test.go b/testing/spectest/minimal/fulu__epoch_processing__sync_committee_updates_test.go index 5e3d85dae3..e2dcd201aa 100644 --- a/testing/spectest/minimal/fulu__epoch_processing__sync_committee_updates_test.go +++ b/testing/spectest/minimal/fulu__epoch_processing__sync_committee_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing" ) func TestMinimal_Fulu_EpochProcessing_SyncCommitteeUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__finality__finality_test.go b/testing/spectest/minimal/fulu__finality__finality_test.go index 716718ae05..fb09b07d12 100644 --- a/testing/spectest/minimal/fulu__finality__finality_test.go +++ b/testing/spectest/minimal/fulu__finality__finality_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/finality" ) func TestMinimal_Fulu_Finality(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__fork__upgrade_to_fulu_test.go b/testing/spectest/minimal/fulu__fork__upgrade_to_fulu_test.go index 540ad38ec8..a7c76b80e7 100644 --- a/testing/spectest/minimal/fulu__fork__upgrade_to_fulu_test.go +++ b/testing/spectest/minimal/fulu__fork__upgrade_to_fulu_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/fork" ) func TestMinimal_UpgradeToFulu(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__fork_transition__transition_test.go b/testing/spectest/minimal/fulu__fork_transition__transition_test.go index 6e3f143302..14633b5254 100644 --- a/testing/spectest/minimal/fulu__fork_transition__transition_test.go +++ b/testing/spectest/minimal/fulu__fork_transition__transition_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/fork" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/fork" ) func TestMinimal_Fulu_Transition(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__forkchoice__forkchoice_test.go b/testing/spectest/minimal/fulu__forkchoice__forkchoice_test.go index 7b657c6367..ac474586cd 100644 --- a/testing/spectest/minimal/fulu__forkchoice__forkchoice_test.go +++ b/testing/spectest/minimal/fulu__forkchoice__forkchoice_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice" ) func TestMinimal_Fulu_Forkchoice(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__light_client__single_merkle_proof_test.go b/testing/spectest/minimal/fulu__light_client__single_merkle_proof_test.go index 819166b3e1..275063e6ed 100644 --- a/testing/spectest/minimal/fulu__light_client__single_merkle_proof_test.go +++ b/testing/spectest/minimal/fulu__light_client__single_merkle_proof_test.go @@ -3,8 +3,8 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client" ) func TestMinimal_Fulu_LightClient_SingleMerkleProof(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__merkle_proof__merkle_proof_test.go b/testing/spectest/minimal/fulu__merkle_proof__merkle_proof_test.go index 36e861721b..526359c61f 100644 --- a/testing/spectest/minimal/fulu__merkle_proof__merkle_proof_test.go +++ b/testing/spectest/minimal/fulu__merkle_proof__merkle_proof_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/merkle_proof" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/merkle_proof" ) func TestMinimal_Fulu_MerkleProof(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__networking__custody_columns_test.go b/testing/spectest/minimal/fulu__networking__custody_columns_test.go index 040c1ff31f..6d5d4c7ed2 100644 --- a/testing/spectest/minimal/fulu__networking__custody_columns_test.go +++ b/testing/spectest/minimal/fulu__networking__custody_columns_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/networking" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/networking" ) func TestMainnet_Fulu_Networking_CustodyGroups(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__attestation_test.go b/testing/spectest/minimal/fulu__operations__attestation_test.go index ebad524a16..4d82f82b8f 100644 --- a/testing/spectest/minimal/fulu__operations__attestation_test.go +++ b/testing/spectest/minimal/fulu__operations__attestation_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMinimal_Fulu_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__attester_slashing_test.go b/testing/spectest/minimal/fulu__operations__attester_slashing_test.go index 573ef8e05f..32b35fda07 100644 --- a/testing/spectest/minimal/fulu__operations__attester_slashing_test.go +++ b/testing/spectest/minimal/fulu__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMinimal_Fulu_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__block_header_test.go b/testing/spectest/minimal/fulu__operations__block_header_test.go index ffb1bd0fa6..e2da6384bc 100644 --- a/testing/spectest/minimal/fulu__operations__block_header_test.go +++ b/testing/spectest/minimal/fulu__operations__block_header_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMinimal_Fulu_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__bls_to_execution_change_test.go b/testing/spectest/minimal/fulu__operations__bls_to_execution_change_test.go index 1356683739..7b358a036e 100644 --- a/testing/spectest/minimal/fulu__operations__bls_to_execution_change_test.go +++ b/testing/spectest/minimal/fulu__operations__bls_to_execution_change_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMinimal_Fulu_Operations_BLSToExecutionChange(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__consolidation_test.go b/testing/spectest/minimal/fulu__operations__consolidation_test.go index 2f6a761d8c..d2a948c619 100644 --- a/testing/spectest/minimal/fulu__operations__consolidation_test.go +++ b/testing/spectest/minimal/fulu__operations__consolidation_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMinimal_Fulu_Operations_Consolidation(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__deposit_requests_test.go b/testing/spectest/minimal/fulu__operations__deposit_requests_test.go index fb2779fc31..2d5897a6a2 100644 --- a/testing/spectest/minimal/fulu__operations__deposit_requests_test.go +++ b/testing/spectest/minimal/fulu__operations__deposit_requests_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMainnet_Fulu_Operations_DepositRequests(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__deposit_test.go b/testing/spectest/minimal/fulu__operations__deposit_test.go index b4974f943d..51522fd6c4 100644 --- a/testing/spectest/minimal/fulu__operations__deposit_test.go +++ b/testing/spectest/minimal/fulu__operations__deposit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMinimal_Fulu_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__execution_layer_withdrawals_test.go b/testing/spectest/minimal/fulu__operations__execution_layer_withdrawals_test.go index 34aa1e5c9b..e5e0f8682d 100644 --- a/testing/spectest/minimal/fulu__operations__execution_layer_withdrawals_test.go +++ b/testing/spectest/minimal/fulu__operations__execution_layer_withdrawals_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMinimal_Fulu_Operations_WithdrawalRequest(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__execution_payload_test.go b/testing/spectest/minimal/fulu__operations__execution_payload_test.go index cd84ef9a99..c47ac76dbf 100644 --- a/testing/spectest/minimal/fulu__operations__execution_payload_test.go +++ b/testing/spectest/minimal/fulu__operations__execution_payload_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMinimal_Fulu_Operations_PayloadExecution(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__proposer_slashing_test.go b/testing/spectest/minimal/fulu__operations__proposer_slashing_test.go index 3bd599c497..5bc6b37cb7 100644 --- a/testing/spectest/minimal/fulu__operations__proposer_slashing_test.go +++ b/testing/spectest/minimal/fulu__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMinimal_Fulu_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__sync_committee_test.go b/testing/spectest/minimal/fulu__operations__sync_committee_test.go index 2ec5e9495c..fd01011045 100644 --- a/testing/spectest/minimal/fulu__operations__sync_committee_test.go +++ b/testing/spectest/minimal/fulu__operations__sync_committee_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMinimal_Fulu_Operations_SyncCommittee(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__voluntary_exit_test.go b/testing/spectest/minimal/fulu__operations__voluntary_exit_test.go index 55dfdac984..423a4ebd2b 100644 --- a/testing/spectest/minimal/fulu__operations__voluntary_exit_test.go +++ b/testing/spectest/minimal/fulu__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMinimal_Fulu_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__operations__withdrawals_test.go b/testing/spectest/minimal/fulu__operations__withdrawals_test.go index 333fe00f5f..476eb7ea4b 100644 --- a/testing/spectest/minimal/fulu__operations__withdrawals_test.go +++ b/testing/spectest/minimal/fulu__operations__withdrawals_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations" ) func TestMinimal_Fulu_Operations_Withdrawals(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__random__random_test.go b/testing/spectest/minimal/fulu__random__random_test.go index 0a8de1b216..af76ac6d5f 100644 --- a/testing/spectest/minimal/fulu__random__random_test.go +++ b/testing/spectest/minimal/fulu__random__random_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/sanity" ) func TestMinimal_Fulu_Random(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__rewards__rewards_test.go b/testing/spectest/minimal/fulu__rewards__rewards_test.go index d99d862dc5..2314604bf3 100644 --- a/testing/spectest/minimal/fulu__rewards__rewards_test.go +++ b/testing/spectest/minimal/fulu__rewards__rewards_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/rewards" ) func TestMinimal_Fulu_Rewards(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__sanity__blocks_test.go b/testing/spectest/minimal/fulu__sanity__blocks_test.go index 941c0edd91..da50a0cd58 100644 --- a/testing/spectest/minimal/fulu__sanity__blocks_test.go +++ b/testing/spectest/minimal/fulu__sanity__blocks_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/sanity" ) func TestMinimal_Fulu_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__sanity__slots_test.go b/testing/spectest/minimal/fulu__sanity__slots_test.go index bac3894e4f..cd5cf75f23 100644 --- a/testing/spectest/minimal/fulu__sanity__slots_test.go +++ b/testing/spectest/minimal/fulu__sanity__slots_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/sanity" ) func TestMinimal_Fulu_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/minimal/fulu__ssz_static__ssz_static_test.go b/testing/spectest/minimal/fulu__ssz_static__ssz_static_test.go index de11535379..6ea06a2d9c 100644 --- a/testing/spectest/minimal/fulu__ssz_static__ssz_static_test.go +++ b/testing/spectest/minimal/fulu__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/ssz_static" ) func TestMinimal_Fulu_SSZStatic(t *testing.T) { diff --git a/testing/spectest/minimal/gloas__ssz_static__ssz_static_test.go b/testing/spectest/minimal/gloas__ssz_static__ssz_static_test.go index 381d6c0430..50e31d3a5f 100644 --- a/testing/spectest/minimal/gloas__ssz_static__ssz_static_test.go +++ b/testing/spectest/minimal/gloas__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/gloas/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/gloas/ssz_static" ) func TestMinimal_Gloas_SSZStatic(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__epoch_processing__effective_balance_updates_test.go b/testing/spectest/minimal/phase0__epoch_processing__effective_balance_updates_test.go index b328a3c985..140d81a210 100644 --- a/testing/spectest/minimal/phase0__epoch_processing__effective_balance_updates_test.go +++ b/testing/spectest/minimal/phase0__epoch_processing__effective_balance_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMinimal_Phase0_EpochProcessing_EffectiveBalanceUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__epoch_processing__epoch_processing_test.go b/testing/spectest/minimal/phase0__epoch_processing__epoch_processing_test.go index 0190025328..567cab4f01 100644 --- a/testing/spectest/minimal/phase0__epoch_processing__epoch_processing_test.go +++ b/testing/spectest/minimal/phase0__epoch_processing__epoch_processing_test.go @@ -4,7 +4,7 @@ import ( "os" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" ) func TestMain(m *testing.M) { diff --git a/testing/spectest/minimal/phase0__epoch_processing__eth1_data_reset_test.go b/testing/spectest/minimal/phase0__epoch_processing__eth1_data_reset_test.go index 95e754d06a..ba92a246b7 100644 --- a/testing/spectest/minimal/phase0__epoch_processing__eth1_data_reset_test.go +++ b/testing/spectest/minimal/phase0__epoch_processing__eth1_data_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMinimal_Phase0_EpochProcessing_Eth1DataReset(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__epoch_processing__historical_roots_update_test.go b/testing/spectest/minimal/phase0__epoch_processing__historical_roots_update_test.go index 7fc16e16d8..8fc9749b52 100644 --- a/testing/spectest/minimal/phase0__epoch_processing__historical_roots_update_test.go +++ b/testing/spectest/minimal/phase0__epoch_processing__historical_roots_update_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMinimal_Phase0_EpochProcessing_HistoricalRootsUpdate(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__epoch_processing__justification_and_finalization_test.go b/testing/spectest/minimal/phase0__epoch_processing__justification_and_finalization_test.go index 6428a5346b..9e57719739 100644 --- a/testing/spectest/minimal/phase0__epoch_processing__justification_and_finalization_test.go +++ b/testing/spectest/minimal/phase0__epoch_processing__justification_and_finalization_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMinimal_Phase0_EpochProcessing_JustificationAndFinalization(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__epoch_processing__participation_record_updates_test.go b/testing/spectest/minimal/phase0__epoch_processing__participation_record_updates_test.go index 2aceb3822e..0c54237359 100644 --- a/testing/spectest/minimal/phase0__epoch_processing__participation_record_updates_test.go +++ b/testing/spectest/minimal/phase0__epoch_processing__participation_record_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMinimal_Phase0_EpochProcessing_ParticipationRecordUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__epoch_processing__randao_mixes_reset_test.go b/testing/spectest/minimal/phase0__epoch_processing__randao_mixes_reset_test.go index 789fb612e1..43f0564a9e 100644 --- a/testing/spectest/minimal/phase0__epoch_processing__randao_mixes_reset_test.go +++ b/testing/spectest/minimal/phase0__epoch_processing__randao_mixes_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMinimal_Phase0_EpochProcessing_RandaoMixesReset(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__epoch_processing__registry_updates_test.go b/testing/spectest/minimal/phase0__epoch_processing__registry_updates_test.go index d3a3738ea8..9133f5e68b 100644 --- a/testing/spectest/minimal/phase0__epoch_processing__registry_updates_test.go +++ b/testing/spectest/minimal/phase0__epoch_processing__registry_updates_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMinimal_Phase0_EpochProcessing_ResetRegistryUpdates(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__epoch_processing__rewards_and_penalties_test.go b/testing/spectest/minimal/phase0__epoch_processing__rewards_and_penalties_test.go index 181a527a86..4756e56801 100644 --- a/testing/spectest/minimal/phase0__epoch_processing__rewards_and_penalties_test.go +++ b/testing/spectest/minimal/phase0__epoch_processing__rewards_and_penalties_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMinimal_Phase0_EpochProcessing_RewardsAndPenalties(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__epoch_processing__slashings_reset_test.go b/testing/spectest/minimal/phase0__epoch_processing__slashings_reset_test.go index 2668354fb2..dc011f0b62 100644 --- a/testing/spectest/minimal/phase0__epoch_processing__slashings_reset_test.go +++ b/testing/spectest/minimal/phase0__epoch_processing__slashings_reset_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMinimal_Phase0_EpochProcessing_SlashingsReset(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__epoch_processing__slashings_test.go b/testing/spectest/minimal/phase0__epoch_processing__slashings_test.go index 1ab33fe1d4..aa9223bd1a 100644 --- a/testing/spectest/minimal/phase0__epoch_processing__slashings_test.go +++ b/testing/spectest/minimal/phase0__epoch_processing__slashings_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing" ) func TestMinimal_Phase0_EpochProcessing_Slashings(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__finality__finality_test.go b/testing/spectest/minimal/phase0__finality__finality_test.go index 47d0df94a7..f1bb665e97 100644 --- a/testing/spectest/minimal/phase0__finality__finality_test.go +++ b/testing/spectest/minimal/phase0__finality__finality_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/finality" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/finality" ) func TestMinimal_Phase0_Finality(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__operations__attestation_test.go b/testing/spectest/minimal/phase0__operations__attestation_test.go index 638bbeb401..d417a6a2c2 100644 --- a/testing/spectest/minimal/phase0__operations__attestation_test.go +++ b/testing/spectest/minimal/phase0__operations__attestation_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations" ) func TestMinimal_Phase0_Operations_Attestation(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__operations__attester_slashing_test.go b/testing/spectest/minimal/phase0__operations__attester_slashing_test.go index 13e64bb9f7..6dadd9106a 100644 --- a/testing/spectest/minimal/phase0__operations__attester_slashing_test.go +++ b/testing/spectest/minimal/phase0__operations__attester_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations" ) func TestMinimal_Phase0_Operations_AttesterSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__operations__block_header_test.go b/testing/spectest/minimal/phase0__operations__block_header_test.go index 4efb1b2faa..90d7430db4 100644 --- a/testing/spectest/minimal/phase0__operations__block_header_test.go +++ b/testing/spectest/minimal/phase0__operations__block_header_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations" ) func TestMinimal_Phase0_Operations_BlockHeader(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__operations__deposit_test.go b/testing/spectest/minimal/phase0__operations__deposit_test.go index 0b025d70a5..16a8b79211 100644 --- a/testing/spectest/minimal/phase0__operations__deposit_test.go +++ b/testing/spectest/minimal/phase0__operations__deposit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations" ) func TestMinimal_Phase0_Operations_Deposit(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__operations__proposer_slashing_test.go b/testing/spectest/minimal/phase0__operations__proposer_slashing_test.go index d460119c86..dfc33b5939 100644 --- a/testing/spectest/minimal/phase0__operations__proposer_slashing_test.go +++ b/testing/spectest/minimal/phase0__operations__proposer_slashing_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations" ) func TestMinimal_Phase0_Operations_ProposerSlashing(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__operations__voluntary_exit_test.go b/testing/spectest/minimal/phase0__operations__voluntary_exit_test.go index 5e301194bd..e47f08e18b 100644 --- a/testing/spectest/minimal/phase0__operations__voluntary_exit_test.go +++ b/testing/spectest/minimal/phase0__operations__voluntary_exit_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations" ) func TestMinimal_Phase0_Operations_VoluntaryExit(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__random__random_test.go b/testing/spectest/minimal/phase0__random__random_test.go index 89140b6850..7230b7ef75 100644 --- a/testing/spectest/minimal/phase0__random__random_test.go +++ b/testing/spectest/minimal/phase0__random__random_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/sanity" ) func TestMinimal_Phase0_Random(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__rewards__rewards_test.go b/testing/spectest/minimal/phase0__rewards__rewards_test.go index 31d656680e..9de561bbd6 100644 --- a/testing/spectest/minimal/phase0__rewards__rewards_test.go +++ b/testing/spectest/minimal/phase0__rewards__rewards_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/rewards" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/rewards" ) func TestMinimal_Phase0_Rewards(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__sanity__blocks_test.go b/testing/spectest/minimal/phase0__sanity__blocks_test.go index 1cadb77f40..c30d9beade 100644 --- a/testing/spectest/minimal/phase0__sanity__blocks_test.go +++ b/testing/spectest/minimal/phase0__sanity__blocks_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/sanity" ) func TestMinimal_Phase0_Sanity_Blocks(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__sanity__slots_test.go b/testing/spectest/minimal/phase0__sanity__slots_test.go index bf2fb6e46d..9b54a3d637 100644 --- a/testing/spectest/minimal/phase0__sanity__slots_test.go +++ b/testing/spectest/minimal/phase0__sanity__slots_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/sanity" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/sanity" ) func TestMinimal_Phase0_Sanity_Slots(t *testing.T) { diff --git a/testing/spectest/minimal/phase0__ssz_static__ssz_static_test.go b/testing/spectest/minimal/phase0__ssz_static__ssz_static_test.go index 8e7eaece32..98d0fd10f9 100644 --- a/testing/spectest/minimal/phase0__ssz_static__ssz_static_test.go +++ b/testing/spectest/minimal/phase0__ssz_static__ssz_static_test.go @@ -3,7 +3,7 @@ package minimal import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/ssz_static" ) func TestMinimal_Phase0_SSZStatic(t *testing.T) { diff --git a/testing/spectest/shared/altair/epoch_processing/BUILD.bazel b/testing/spectest/shared/altair/epoch_processing/BUILD.bazel index f1cd55a672..6de9f30ba4 100644 --- a/testing/spectest/shared/altair/epoch_processing/BUILD.bazel +++ b/testing/spectest/shared/altair/epoch_processing/BUILD.bazel @@ -17,7 +17,7 @@ go_library( "slashings.go", "slashings_reset.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/epoch_processing", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/epoch_processing", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/altair/epoch_processing/effective_balance_updates.go b/testing/spectest/shared/altair/epoch_processing/effective_balance_updates.go index adb0fd2403..a232a14bd4 100644 --- a/testing/spectest/shared/altair/epoch_processing/effective_balance_updates.go +++ b/testing/spectest/shared/altair/epoch_processing/effective_balance_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEffectiveBalanceUpdatesTests executes "epoch_processing/effective_balance_updates" tests. diff --git a/testing/spectest/shared/altair/epoch_processing/eth1_data_reset.go b/testing/spectest/shared/altair/epoch_processing/eth1_data_reset.go index 38c015fa1b..2bb03ab414 100644 --- a/testing/spectest/shared/altair/epoch_processing/eth1_data_reset.go +++ b/testing/spectest/shared/altair/epoch_processing/eth1_data_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEth1DataResetTests executes "epoch_processing/eth1_data_reset" tests. diff --git a/testing/spectest/shared/altair/epoch_processing/helpers.go b/testing/spectest/shared/altair/epoch_processing/helpers.go index 1c9e3b2bbf..09c9982fea 100644 --- a/testing/spectest/shared/altair/epoch_processing/helpers.go +++ b/testing/spectest/shared/altair/epoch_processing/helpers.go @@ -6,11 +6,11 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" diff --git a/testing/spectest/shared/altair/epoch_processing/historical_roots_update.go b/testing/spectest/shared/altair/epoch_processing/historical_roots_update.go index 606bc82c93..b4c7c3a66e 100644 --- a/testing/spectest/shared/altair/epoch_processing/historical_roots_update.go +++ b/testing/spectest/shared/altair/epoch_processing/historical_roots_update.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunHistoricalRootsUpdateTests executes "epoch_processing/historical_roots_update" tests. diff --git a/testing/spectest/shared/altair/epoch_processing/inactivity_updates.go b/testing/spectest/shared/altair/epoch_processing/inactivity_updates.go index 8d7635be2b..33f11bdd16 100644 --- a/testing/spectest/shared/altair/epoch_processing/inactivity_updates.go +++ b/testing/spectest/shared/altair/epoch_processing/inactivity_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunInactivityUpdatesTest executes "epoch_processing/inactivity_updates" tests. diff --git a/testing/spectest/shared/altair/epoch_processing/justification_and_finalization.go b/testing/spectest/shared/altair/epoch_processing/justification_and_finalization.go index 370d96d83d..4cb4e6f5d2 100644 --- a/testing/spectest/shared/altair/epoch_processing/justification_and_finalization.go +++ b/testing/spectest/shared/altair/epoch_processing/justification_and_finalization.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunJustificationAndFinalizationTests executes "epoch_processing/justification_and_finalization" tests. diff --git a/testing/spectest/shared/altair/epoch_processing/participation_flag_updates.go b/testing/spectest/shared/altair/epoch_processing/participation_flag_updates.go index b6f4817932..c5fc600168 100644 --- a/testing/spectest/shared/altair/epoch_processing/participation_flag_updates.go +++ b/testing/spectest/shared/altair/epoch_processing/participation_flag_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunParticipationFlagUpdatesTests executes "epoch_processing/participation_flag_updates" tests. diff --git a/testing/spectest/shared/altair/epoch_processing/randao_mixes_reset.go b/testing/spectest/shared/altair/epoch_processing/randao_mixes_reset.go index 92079a3646..e3a3eb3dcf 100644 --- a/testing/spectest/shared/altair/epoch_processing/randao_mixes_reset.go +++ b/testing/spectest/shared/altair/epoch_processing/randao_mixes_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRandaoMixesResetTests executes "epoch_processing/randao_mixes_reset" tests. diff --git a/testing/spectest/shared/altair/epoch_processing/registry_updates.go b/testing/spectest/shared/altair/epoch_processing/registry_updates.go index 6232386ba5..4658e52fa4 100644 --- a/testing/spectest/shared/altair/epoch_processing/registry_updates.go +++ b/testing/spectest/shared/altair/epoch_processing/registry_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRegistryUpdatesTests executes "epoch_processing/registry_updates" tests. diff --git a/testing/spectest/shared/altair/epoch_processing/rewards_and_penalties.go b/testing/spectest/shared/altair/epoch_processing/rewards_and_penalties.go index 3386947be7..22d41dd186 100644 --- a/testing/spectest/shared/altair/epoch_processing/rewards_and_penalties.go +++ b/testing/spectest/shared/altair/epoch_processing/rewards_and_penalties.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRewardsAndPenaltiesTests executes "epoch_processing/rewards_and_penalties" tests. diff --git a/testing/spectest/shared/altair/epoch_processing/slashings.go b/testing/spectest/shared/altair/epoch_processing/slashings.go index 7e4867633d..6f6b2f73d6 100644 --- a/testing/spectest/shared/altair/epoch_processing/slashings.go +++ b/testing/spectest/shared/altair/epoch_processing/slashings.go @@ -4,11 +4,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsTests executes "epoch_processing/slashings" tests. diff --git a/testing/spectest/shared/altair/epoch_processing/slashings_reset.go b/testing/spectest/shared/altair/epoch_processing/slashings_reset.go index 0f8babeadd..ac4d6f2a51 100644 --- a/testing/spectest/shared/altair/epoch_processing/slashings_reset.go +++ b/testing/spectest/shared/altair/epoch_processing/slashings_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsResetTests executes "epoch_processing/slashings_reset" tests. diff --git a/testing/spectest/shared/altair/finality/BUILD.bazel b/testing/spectest/shared/altair/finality/BUILD.bazel index ff7da0de80..d338089f26 100644 --- a/testing/spectest/shared/altair/finality/BUILD.bazel +++ b/testing/spectest/shared/altair/finality/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["finality.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/finality", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/finality", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/altair/finality/finality.go b/testing/spectest/shared/altair/finality/finality.go index 8dc8bc2e00..5c4a408bcf 100644 --- a/testing/spectest/shared/altair/finality/finality.go +++ b/testing/spectest/shared/altair/finality/finality.go @@ -5,15 +5,15 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "google.golang.org/protobuf/proto" ) diff --git a/testing/spectest/shared/altair/fork/BUILD.bazel b/testing/spectest/shared/altair/fork/BUILD.bazel index 9805afe4ec..31dd685886 100644 --- a/testing/spectest/shared/altair/fork/BUILD.bazel +++ b/testing/spectest/shared/altair/fork/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "transition.go", "upgrade_to_altair.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/fork", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/fork", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/altair/fork/transition.go b/testing/spectest/shared/altair/fork/transition.go index 0b3a58b6c8..49566e9fa8 100644 --- a/testing/spectest/shared/altair/fork/transition.go +++ b/testing/spectest/shared/altair/fork/transition.go @@ -5,16 +5,16 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "google.golang.org/protobuf/proto" ) diff --git a/testing/spectest/shared/altair/fork/upgrade_to_altair.go b/testing/spectest/shared/altair/fork/upgrade_to_altair.go index ccb8fefc9a..6789c05f37 100644 --- a/testing/spectest/shared/altair/fork/upgrade_to_altair.go +++ b/testing/spectest/shared/altair/fork/upgrade_to_altair.go @@ -5,13 +5,13 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "google.golang.org/protobuf/proto" ) diff --git a/testing/spectest/shared/altair/operations/BUILD.bazel b/testing/spectest/shared/altair/operations/BUILD.bazel index 183b84602f..d9b956d688 100644 --- a/testing/spectest/shared/altair/operations/BUILD.bazel +++ b/testing/spectest/shared/altair/operations/BUILD.bazel @@ -13,7 +13,7 @@ go_library( "sync_committee.go", "voluntary_exit.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/operations", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/operations", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/altair/operations/attestation.go b/testing/spectest/shared/altair/operations/attestation.go index 8436baf9b7..13cbce4399 100644 --- a/testing/spectest/shared/altair/operations/attestation.go +++ b/testing/spectest/shared/altair/operations/attestation.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttestation(attestationSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/altair/operations/attester_slashing.go b/testing/spectest/shared/altair/operations/attester_slashing.go index 49c9922c3b..b7f9c96a3a 100644 --- a/testing/spectest/shared/altair/operations/attester_slashing.go +++ b/testing/spectest/shared/altair/operations/attester_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttesterSlashing(asSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/altair/operations/block_header.go b/testing/spectest/shared/altair/operations/block_header.go index 58f6a18855..a9879bae4d 100644 --- a/testing/spectest/shared/altair/operations/block_header.go +++ b/testing/spectest/shared/altair/operations/block_header.go @@ -3,8 +3,8 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" ) func RunBlockHeaderTest(t *testing.T, config string) { diff --git a/testing/spectest/shared/altair/operations/deposit.go b/testing/spectest/shared/altair/operations/deposit.go index 129043d276..8330c5c0b2 100644 --- a/testing/spectest/shared/altair/operations/deposit.go +++ b/testing/spectest/shared/altair/operations/deposit.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithDeposit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/altair/operations/helpers.go b/testing/spectest/shared/altair/operations/helpers.go index 8784d1066b..305125123d 100644 --- a/testing/spectest/shared/altair/operations/helpers.go +++ b/testing/spectest/shared/altair/operations/helpers.go @@ -1,11 +1,11 @@ package operations import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func sszToState(b []byte) (state.BeaconState, error) { diff --git a/testing/spectest/shared/altair/operations/proposer_slashing.go b/testing/spectest/shared/altair/operations/proposer_slashing.go index cbafc18d85..ef0bf4b2f0 100644 --- a/testing/spectest/shared/altair/operations/proposer_slashing.go +++ b/testing/spectest/shared/altair/operations/proposer_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithProposerSlashing(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/altair/operations/sync_committee.go b/testing/spectest/shared/altair/operations/sync_committee.go index eb275b41d0..d25a70b801 100644 --- a/testing/spectest/shared/altair/operations/sync_committee.go +++ b/testing/spectest/shared/altair/operations/sync_committee.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithSyncAggregate(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/altair/operations/voluntary_exit.go b/testing/spectest/shared/altair/operations/voluntary_exit.go index d71f84c594..d4a5f97cfc 100644 --- a/testing/spectest/shared/altair/operations/voluntary_exit.go +++ b/testing/spectest/shared/altair/operations/voluntary_exit.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithVoluntaryExit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/altair/rewards/BUILD.bazel b/testing/spectest/shared/altair/rewards/BUILD.bazel index 74329b51af..2a35d14c7b 100644 --- a/testing/spectest/shared/altair/rewards/BUILD.bazel +++ b/testing/spectest/shared/altair/rewards/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["rewards_penalties.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/rewards", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/rewards", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/altair/rewards/rewards_penalties.go b/testing/spectest/shared/altair/rewards/rewards_penalties.go index f25f84b405..79f468e584 100644 --- a/testing/spectest/shared/altair/rewards/rewards_penalties.go +++ b/testing/spectest/shared/altair/rewards/rewards_penalties.go @@ -9,13 +9,13 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/altair/sanity/BUILD.bazel b/testing/spectest/shared/altair/sanity/BUILD.bazel index 4800b11fa4..bd4e28257b 100644 --- a/testing/spectest/shared/altair/sanity/BUILD.bazel +++ b/testing/spectest/shared/altair/sanity/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "block_processing.yaml.go", "slot_processing.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/sanity", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/sanity", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/altair/sanity/block_processing.go b/testing/spectest/shared/altair/sanity/block_processing.go index 70b2c1dfcb..ae7a0793ee 100644 --- a/testing/spectest/shared/altair/sanity/block_processing.go +++ b/testing/spectest/shared/altair/sanity/block_processing.go @@ -8,15 +8,15 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" diff --git a/testing/spectest/shared/altair/sanity/slot_processing.go b/testing/spectest/shared/altair/sanity/slot_processing.go index 89ee8b5e68..150c889efb 100644 --- a/testing/spectest/shared/altair/sanity/slot_processing.go +++ b/testing/spectest/shared/altair/sanity/slot_processing.go @@ -5,12 +5,12 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/altair/ssz_static/BUILD.bazel b/testing/spectest/shared/altair/ssz_static/BUILD.bazel index 4e8f083364..50dd40e32c 100644 --- a/testing/spectest/shared/altair/ssz_static/BUILD.bazel +++ b/testing/spectest/shared/altair/ssz_static/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["ssz_static.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/altair/ssz_static", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/altair/ssz_static", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/state/state-native:go_default_library", diff --git a/testing/spectest/shared/altair/ssz_static/ssz_static.go b/testing/spectest/shared/altair/ssz_static/ssz_static.go index 0fce6ff12f..0924bda4dc 100644 --- a/testing/spectest/shared/altair/ssz_static/ssz_static.go +++ b/testing/spectest/shared/altair/ssz_static/ssz_static.go @@ -5,10 +5,10 @@ import ( "errors" "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/ssz_static" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/ssz_static" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/testing/spectest/shared/bellatrix/epoch_processing/BUILD.bazel b/testing/spectest/shared/bellatrix/epoch_processing/BUILD.bazel index 9bc2a38dc4..cc2188be56 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/BUILD.bazel +++ b/testing/spectest/shared/bellatrix/epoch_processing/BUILD.bazel @@ -17,7 +17,7 @@ go_library( "slashings.go", "slashings_reset.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/epoch_processing", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/epoch_processing", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/bellatrix/epoch_processing/effective_balance_updates.go b/testing/spectest/shared/bellatrix/epoch_processing/effective_balance_updates.go index 79a58fe103..ea909b674a 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/effective_balance_updates.go +++ b/testing/spectest/shared/bellatrix/epoch_processing/effective_balance_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEffectiveBalanceUpdatesTests executes "epoch_processing/effective_balance_updates" tests. diff --git a/testing/spectest/shared/bellatrix/epoch_processing/eth1_data_reset.go b/testing/spectest/shared/bellatrix/epoch_processing/eth1_data_reset.go index 4e6bcc5bc3..e97163238c 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/eth1_data_reset.go +++ b/testing/spectest/shared/bellatrix/epoch_processing/eth1_data_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEth1DataResetTests executes "epoch_processing/eth1_data_reset" tests. diff --git a/testing/spectest/shared/bellatrix/epoch_processing/helpers.go b/testing/spectest/shared/bellatrix/epoch_processing/helpers.go index f2ef02a9a2..ec1b0ed39b 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/helpers.go +++ b/testing/spectest/shared/bellatrix/epoch_processing/helpers.go @@ -6,11 +6,11 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" diff --git a/testing/spectest/shared/bellatrix/epoch_processing/historical_roots_update.go b/testing/spectest/shared/bellatrix/epoch_processing/historical_roots_update.go index dab56d3845..e941f8667a 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/historical_roots_update.go +++ b/testing/spectest/shared/bellatrix/epoch_processing/historical_roots_update.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunHistoricalRootsUpdateTests executes "epoch_processing/historical_roots_update" tests. diff --git a/testing/spectest/shared/bellatrix/epoch_processing/inactivity_updates.go b/testing/spectest/shared/bellatrix/epoch_processing/inactivity_updates.go index a7b199f705..084fc51820 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/inactivity_updates.go +++ b/testing/spectest/shared/bellatrix/epoch_processing/inactivity_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunInactivityUpdatesTest executes "epoch_processing/inactivity_updates" tests. diff --git a/testing/spectest/shared/bellatrix/epoch_processing/justification_and_finalization.go b/testing/spectest/shared/bellatrix/epoch_processing/justification_and_finalization.go index 3d096838e3..65fbb8f3c3 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/justification_and_finalization.go +++ b/testing/spectest/shared/bellatrix/epoch_processing/justification_and_finalization.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunJustificationAndFinalizationTests executes "epoch_processing/justification_and_finalization" tests. diff --git a/testing/spectest/shared/bellatrix/epoch_processing/participation_flag_updates.go b/testing/spectest/shared/bellatrix/epoch_processing/participation_flag_updates.go index 5082c957e4..b2477b96bd 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/participation_flag_updates.go +++ b/testing/spectest/shared/bellatrix/epoch_processing/participation_flag_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunParticipationFlagUpdatesTests executes "epoch_processing/participation_flag_updates" tests. diff --git a/testing/spectest/shared/bellatrix/epoch_processing/randao_mixes_reset.go b/testing/spectest/shared/bellatrix/epoch_processing/randao_mixes_reset.go index 763b95f4e8..7df83b80f7 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/randao_mixes_reset.go +++ b/testing/spectest/shared/bellatrix/epoch_processing/randao_mixes_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRandaoMixesResetTests executes "epoch_processing/randao_mixes_reset" tests. diff --git a/testing/spectest/shared/bellatrix/epoch_processing/registry_updates.go b/testing/spectest/shared/bellatrix/epoch_processing/registry_updates.go index 0ab23d907e..aab27c2c5a 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/registry_updates.go +++ b/testing/spectest/shared/bellatrix/epoch_processing/registry_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRegistryUpdatesTests executes "epoch_processing/registry_updates" tests. diff --git a/testing/spectest/shared/bellatrix/epoch_processing/rewards_and_penalties.go b/testing/spectest/shared/bellatrix/epoch_processing/rewards_and_penalties.go index 32d934f484..1453622311 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/rewards_and_penalties.go +++ b/testing/spectest/shared/bellatrix/epoch_processing/rewards_and_penalties.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRewardsAndPenaltiesTests executes "epoch_processing/rewards_and_penalties" tests. diff --git a/testing/spectest/shared/bellatrix/epoch_processing/slashings.go b/testing/spectest/shared/bellatrix/epoch_processing/slashings.go index ff92983c72..5e49ba0bf1 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/slashings.go +++ b/testing/spectest/shared/bellatrix/epoch_processing/slashings.go @@ -4,11 +4,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsTests executes "epoch_processing/slashings" tests. diff --git a/testing/spectest/shared/bellatrix/epoch_processing/slashings_reset.go b/testing/spectest/shared/bellatrix/epoch_processing/slashings_reset.go index dc4f465e32..2a3a043e75 100644 --- a/testing/spectest/shared/bellatrix/epoch_processing/slashings_reset.go +++ b/testing/spectest/shared/bellatrix/epoch_processing/slashings_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsResetTests executes "epoch_processing/slashings_reset" tests. diff --git a/testing/spectest/shared/bellatrix/finality/BUILD.bazel b/testing/spectest/shared/bellatrix/finality/BUILD.bazel index 6abbf3e56c..f60e82c078 100644 --- a/testing/spectest/shared/bellatrix/finality/BUILD.bazel +++ b/testing/spectest/shared/bellatrix/finality/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["finality.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/finality", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/finality", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/bellatrix/finality/finality.go b/testing/spectest/shared/bellatrix/finality/finality.go index 5e0586ba2c..01214171a0 100644 --- a/testing/spectest/shared/bellatrix/finality/finality.go +++ b/testing/spectest/shared/bellatrix/finality/finality.go @@ -5,15 +5,15 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "google.golang.org/protobuf/proto" ) diff --git a/testing/spectest/shared/bellatrix/fork/BUILD.bazel b/testing/spectest/shared/bellatrix/fork/BUILD.bazel index 6ab79b1ee0..21be937dd1 100644 --- a/testing/spectest/shared/bellatrix/fork/BUILD.bazel +++ b/testing/spectest/shared/bellatrix/fork/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "transition.go", "upgrade_to_bellatrix.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/fork", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/fork", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/execution:go_default_library", diff --git a/testing/spectest/shared/bellatrix/fork/transition.go b/testing/spectest/shared/bellatrix/fork/transition.go index 5fdb619987..83c4986334 100644 --- a/testing/spectest/shared/bellatrix/fork/transition.go +++ b/testing/spectest/shared/bellatrix/fork/transition.go @@ -5,16 +5,16 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/bellatrix/fork/upgrade_to_bellatrix.go b/testing/spectest/shared/bellatrix/fork/upgrade_to_bellatrix.go index d35529f9ee..a671ac454d 100644 --- a/testing/spectest/shared/bellatrix/fork/upgrade_to_bellatrix.go +++ b/testing/spectest/shared/bellatrix/fork/upgrade_to_bellatrix.go @@ -4,13 +4,13 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "google.golang.org/protobuf/proto" ) diff --git a/testing/spectest/shared/bellatrix/operations/BUILD.bazel b/testing/spectest/shared/bellatrix/operations/BUILD.bazel index 9bb49c1b80..580080ad83 100644 --- a/testing/spectest/shared/bellatrix/operations/BUILD.bazel +++ b/testing/spectest/shared/bellatrix/operations/BUILD.bazel @@ -14,7 +14,7 @@ go_library( "sync_committee.go", "voluntary_exit.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/operations", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/operations", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/bellatrix/operations/attestation.go b/testing/spectest/shared/bellatrix/operations/attestation.go index 538d76d702..bba52743b1 100644 --- a/testing/spectest/shared/bellatrix/operations/attestation.go +++ b/testing/spectest/shared/bellatrix/operations/attestation.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttestation(attestationSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/bellatrix/operations/attester_slashing.go b/testing/spectest/shared/bellatrix/operations/attester_slashing.go index 9c637497aa..6072c6d401 100644 --- a/testing/spectest/shared/bellatrix/operations/attester_slashing.go +++ b/testing/spectest/shared/bellatrix/operations/attester_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttesterSlashing(asSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/bellatrix/operations/block_header.go b/testing/spectest/shared/bellatrix/operations/block_header.go index 50f5cfcb13..3299be7f24 100644 --- a/testing/spectest/shared/bellatrix/operations/block_header.go +++ b/testing/spectest/shared/bellatrix/operations/block_header.go @@ -3,8 +3,8 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" ) func RunBlockHeaderTest(t *testing.T, config string) { diff --git a/testing/spectest/shared/bellatrix/operations/deposit.go b/testing/spectest/shared/bellatrix/operations/deposit.go index adf5e4126f..3346a15054 100644 --- a/testing/spectest/shared/bellatrix/operations/deposit.go +++ b/testing/spectest/shared/bellatrix/operations/deposit.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithDeposit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/bellatrix/operations/execution_payload.go b/testing/spectest/shared/bellatrix/operations/execution_payload.go index 110b4d0334..ab17b4df4c 100644 --- a/testing/spectest/shared/bellatrix/operations/execution_payload.go +++ b/testing/spectest/shared/bellatrix/operations/execution_payload.go @@ -3,8 +3,8 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" ) func RunExecutionPayloadTest(t *testing.T, config string) { diff --git a/testing/spectest/shared/bellatrix/operations/helpers.go b/testing/spectest/shared/bellatrix/operations/helpers.go index 3acfae6809..c6b3c5fac1 100644 --- a/testing/spectest/shared/bellatrix/operations/helpers.go +++ b/testing/spectest/shared/bellatrix/operations/helpers.go @@ -1,11 +1,11 @@ package operations import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func sszToState(b []byte) (state.BeaconState, error) { diff --git a/testing/spectest/shared/bellatrix/operations/proposer_slashing.go b/testing/spectest/shared/bellatrix/operations/proposer_slashing.go index d7890121a1..095cd7dcd5 100644 --- a/testing/spectest/shared/bellatrix/operations/proposer_slashing.go +++ b/testing/spectest/shared/bellatrix/operations/proposer_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithProposerSlashing(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/bellatrix/operations/sync_committee.go b/testing/spectest/shared/bellatrix/operations/sync_committee.go index a2696c2033..adcc2d8bcd 100644 --- a/testing/spectest/shared/bellatrix/operations/sync_committee.go +++ b/testing/spectest/shared/bellatrix/operations/sync_committee.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithSyncAggregate(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/bellatrix/operations/voluntary_exit.go b/testing/spectest/shared/bellatrix/operations/voluntary_exit.go index 556ba8f902..e41d6bef20 100644 --- a/testing/spectest/shared/bellatrix/operations/voluntary_exit.go +++ b/testing/spectest/shared/bellatrix/operations/voluntary_exit.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithVoluntaryExit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/bellatrix/rewards/BUILD.bazel b/testing/spectest/shared/bellatrix/rewards/BUILD.bazel index 2ee840086f..d9267dbf1c 100644 --- a/testing/spectest/shared/bellatrix/rewards/BUILD.bazel +++ b/testing/spectest/shared/bellatrix/rewards/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["rewards_penalties.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/rewards", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/rewards", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/bellatrix/rewards/rewards_penalties.go b/testing/spectest/shared/bellatrix/rewards/rewards_penalties.go index 3033de5b7c..53c6797f6c 100644 --- a/testing/spectest/shared/bellatrix/rewards/rewards_penalties.go +++ b/testing/spectest/shared/bellatrix/rewards/rewards_penalties.go @@ -9,13 +9,13 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/bellatrix/sanity/BUILD.bazel b/testing/spectest/shared/bellatrix/sanity/BUILD.bazel index c5a89d7cb8..9262df1e1a 100644 --- a/testing/spectest/shared/bellatrix/sanity/BUILD.bazel +++ b/testing/spectest/shared/bellatrix/sanity/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "block_processing.yaml.go", "slot_processing.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/sanity", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/sanity", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/bellatrix/sanity/block_processing.go b/testing/spectest/shared/bellatrix/sanity/block_processing.go index 61f9e71d58..ff70ce953b 100644 --- a/testing/spectest/shared/bellatrix/sanity/block_processing.go +++ b/testing/spectest/shared/bellatrix/sanity/block_processing.go @@ -8,15 +8,15 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" diff --git a/testing/spectest/shared/bellatrix/sanity/slot_processing.go b/testing/spectest/shared/bellatrix/sanity/slot_processing.go index 93ffbf04ab..080b907785 100644 --- a/testing/spectest/shared/bellatrix/sanity/slot_processing.go +++ b/testing/spectest/shared/bellatrix/sanity/slot_processing.go @@ -5,12 +5,12 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/bellatrix/ssz_static/BUILD.bazel b/testing/spectest/shared/bellatrix/ssz_static/BUILD.bazel index 9982ea6a8d..7204379bb0 100644 --- a/testing/spectest/shared/bellatrix/ssz_static/BUILD.bazel +++ b/testing/spectest/shared/bellatrix/ssz_static/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["ssz_static.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/bellatrix/ssz_static", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/bellatrix/ssz_static", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/state/state-native:go_default_library", diff --git a/testing/spectest/shared/bellatrix/ssz_static/ssz_static.go b/testing/spectest/shared/bellatrix/ssz_static/ssz_static.go index 8a62730731..718670c317 100644 --- a/testing/spectest/shared/bellatrix/ssz_static/ssz_static.go +++ b/testing/spectest/shared/bellatrix/ssz_static/ssz_static.go @@ -5,11 +5,11 @@ import ( "errors" "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/ssz_static" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/ssz_static" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/testing/spectest/shared/capella/epoch_processing/BUILD.bazel b/testing/spectest/shared/capella/epoch_processing/BUILD.bazel index 03a08ce557..72f59c67e3 100644 --- a/testing/spectest/shared/capella/epoch_processing/BUILD.bazel +++ b/testing/spectest/shared/capella/epoch_processing/BUILD.bazel @@ -17,7 +17,7 @@ go_library( "slashings.go", "slashings_reset.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/epoch_processing", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/epoch_processing", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/capella/epoch_processing/effective_balance_updates.go b/testing/spectest/shared/capella/epoch_processing/effective_balance_updates.go index 823ddc8f3b..ce48fabe5e 100644 --- a/testing/spectest/shared/capella/epoch_processing/effective_balance_updates.go +++ b/testing/spectest/shared/capella/epoch_processing/effective_balance_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEffectiveBalanceUpdatesTests executes "epoch_processing/effective_balance_updates" tests. diff --git a/testing/spectest/shared/capella/epoch_processing/eth1_data_reset.go b/testing/spectest/shared/capella/epoch_processing/eth1_data_reset.go index 0c3b8c89c7..2ae0b1d8e8 100644 --- a/testing/spectest/shared/capella/epoch_processing/eth1_data_reset.go +++ b/testing/spectest/shared/capella/epoch_processing/eth1_data_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEth1DataResetTests executes "epoch_processing/eth1_data_reset" tests. diff --git a/testing/spectest/shared/capella/epoch_processing/helpers.go b/testing/spectest/shared/capella/epoch_processing/helpers.go index 5d9d66b6c8..f5a312e17c 100644 --- a/testing/spectest/shared/capella/epoch_processing/helpers.go +++ b/testing/spectest/shared/capella/epoch_processing/helpers.go @@ -6,11 +6,11 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" diff --git a/testing/spectest/shared/capella/epoch_processing/historical_summaries_update.go b/testing/spectest/shared/capella/epoch_processing/historical_summaries_update.go index dac79f0cd3..6ede28c4c5 100644 --- a/testing/spectest/shared/capella/epoch_processing/historical_summaries_update.go +++ b/testing/spectest/shared/capella/epoch_processing/historical_summaries_update.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunHistoricalSummariesUpdateTests executes "epoch_processing/historical_summaries_update" tests. diff --git a/testing/spectest/shared/capella/epoch_processing/inactivity_updates.go b/testing/spectest/shared/capella/epoch_processing/inactivity_updates.go index f66db00d5d..f8f19a4a44 100644 --- a/testing/spectest/shared/capella/epoch_processing/inactivity_updates.go +++ b/testing/spectest/shared/capella/epoch_processing/inactivity_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunInactivityUpdatesTest executes "epoch_processing/inactivity_updates" tests. diff --git a/testing/spectest/shared/capella/epoch_processing/justification_and_finalization.go b/testing/spectest/shared/capella/epoch_processing/justification_and_finalization.go index 8285cd2965..a737e4e336 100644 --- a/testing/spectest/shared/capella/epoch_processing/justification_and_finalization.go +++ b/testing/spectest/shared/capella/epoch_processing/justification_and_finalization.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunJustificationAndFinalizationTests executes "epoch_processing/justification_and_finalization" tests. diff --git a/testing/spectest/shared/capella/epoch_processing/participation_flag_updates.go b/testing/spectest/shared/capella/epoch_processing/participation_flag_updates.go index 87eff06571..5af9c4f3aa 100644 --- a/testing/spectest/shared/capella/epoch_processing/participation_flag_updates.go +++ b/testing/spectest/shared/capella/epoch_processing/participation_flag_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunParticipationFlagUpdatesTests executes "epoch_processing/participation_flag_updates" tests. diff --git a/testing/spectest/shared/capella/epoch_processing/randao_mixes_reset.go b/testing/spectest/shared/capella/epoch_processing/randao_mixes_reset.go index bdca2f0fe9..c5f04ca23c 100644 --- a/testing/spectest/shared/capella/epoch_processing/randao_mixes_reset.go +++ b/testing/spectest/shared/capella/epoch_processing/randao_mixes_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRandaoMixesResetTests executes "epoch_processing/randao_mixes_reset" tests. diff --git a/testing/spectest/shared/capella/epoch_processing/registry_updates.go b/testing/spectest/shared/capella/epoch_processing/registry_updates.go index cb366cd548..d02c124842 100644 --- a/testing/spectest/shared/capella/epoch_processing/registry_updates.go +++ b/testing/spectest/shared/capella/epoch_processing/registry_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRegistryUpdatesTests executes "epoch_processing/registry_updates" tests. diff --git a/testing/spectest/shared/capella/epoch_processing/rewards_and_penalties.go b/testing/spectest/shared/capella/epoch_processing/rewards_and_penalties.go index 8b72f98301..580a5b6e96 100644 --- a/testing/spectest/shared/capella/epoch_processing/rewards_and_penalties.go +++ b/testing/spectest/shared/capella/epoch_processing/rewards_and_penalties.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRewardsAndPenaltiesTests executes "epoch_processing/rewards_and_penalties" tests. diff --git a/testing/spectest/shared/capella/epoch_processing/slashings.go b/testing/spectest/shared/capella/epoch_processing/slashings.go index b4957f2bcd..49a5a32829 100644 --- a/testing/spectest/shared/capella/epoch_processing/slashings.go +++ b/testing/spectest/shared/capella/epoch_processing/slashings.go @@ -4,11 +4,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsTests executes "epoch_processing/slashings" tests. diff --git a/testing/spectest/shared/capella/epoch_processing/slashings_reset.go b/testing/spectest/shared/capella/epoch_processing/slashings_reset.go index 5f4a6b7236..27fd39e745 100644 --- a/testing/spectest/shared/capella/epoch_processing/slashings_reset.go +++ b/testing/spectest/shared/capella/epoch_processing/slashings_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsResetTests executes "epoch_processing/slashings_reset" tests. diff --git a/testing/spectest/shared/capella/finality/BUILD.bazel b/testing/spectest/shared/capella/finality/BUILD.bazel index 83e7e0a590..d575e2f044 100644 --- a/testing/spectest/shared/capella/finality/BUILD.bazel +++ b/testing/spectest/shared/capella/finality/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["finality.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/finality", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/finality", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/capella/finality/finality.go b/testing/spectest/shared/capella/finality/finality.go index ac274d7f75..1db1c4d6d9 100644 --- a/testing/spectest/shared/capella/finality/finality.go +++ b/testing/spectest/shared/capella/finality/finality.go @@ -5,15 +5,15 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "google.golang.org/protobuf/proto" ) diff --git a/testing/spectest/shared/capella/fork/BUILD.bazel b/testing/spectest/shared/capella/fork/BUILD.bazel index 6f09bdbb9b..e752dd77be 100644 --- a/testing/spectest/shared/capella/fork/BUILD.bazel +++ b/testing/spectest/shared/capella/fork/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "transition.go", "upgrade_to_capella.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/fork", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/fork", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/capella:go_default_library", diff --git a/testing/spectest/shared/capella/fork/transition.go b/testing/spectest/shared/capella/fork/transition.go index 13a5913d47..52f4c19098 100644 --- a/testing/spectest/shared/capella/fork/transition.go +++ b/testing/spectest/shared/capella/fork/transition.go @@ -5,16 +5,16 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/capella/fork/upgrade_to_capella.go b/testing/spectest/shared/capella/fork/upgrade_to_capella.go index 1f49cd1fe3..24e4c8b0ce 100644 --- a/testing/spectest/shared/capella/fork/upgrade_to_capella.go +++ b/testing/spectest/shared/capella/fork/upgrade_to_capella.go @@ -4,13 +4,13 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/capella" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/capella" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "google.golang.org/protobuf/proto" ) diff --git a/testing/spectest/shared/capella/operations/BUILD.bazel b/testing/spectest/shared/capella/operations/BUILD.bazel index c0222939d9..53a5b7a1b9 100644 --- a/testing/spectest/shared/capella/operations/BUILD.bazel +++ b/testing/spectest/shared/capella/operations/BUILD.bazel @@ -16,7 +16,7 @@ go_library( "voluntary_exit.go", "withdrawals.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/operations", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/operations", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/capella/operations/attestation.go b/testing/spectest/shared/capella/operations/attestation.go index c1bde9c1a1..880e15dd90 100644 --- a/testing/spectest/shared/capella/operations/attestation.go +++ b/testing/spectest/shared/capella/operations/attestation.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttestation(attestationSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/capella/operations/attester_slashing.go b/testing/spectest/shared/capella/operations/attester_slashing.go index 9b51e7167d..c6678ffaf3 100644 --- a/testing/spectest/shared/capella/operations/attester_slashing.go +++ b/testing/spectest/shared/capella/operations/attester_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttesterSlashing(asSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/capella/operations/block_header.go b/testing/spectest/shared/capella/operations/block_header.go index 9f926b3d59..7ac2a3a814 100644 --- a/testing/spectest/shared/capella/operations/block_header.go +++ b/testing/spectest/shared/capella/operations/block_header.go @@ -3,8 +3,8 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" ) func RunBlockHeaderTest(t *testing.T, config string) { diff --git a/testing/spectest/shared/capella/operations/bls_to_execution_changes.go b/testing/spectest/shared/capella/operations/bls_to_execution_changes.go index 50734162f0..79efba862d 100644 --- a/testing/spectest/shared/capella/operations/bls_to_execution_changes.go +++ b/testing/spectest/shared/capella/operations/bls_to_execution_changes.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithBlsChange(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/capella/operations/deposit.go b/testing/spectest/shared/capella/operations/deposit.go index 552bec5e62..71150b5143 100644 --- a/testing/spectest/shared/capella/operations/deposit.go +++ b/testing/spectest/shared/capella/operations/deposit.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithDeposit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/capella/operations/execution_payload.go b/testing/spectest/shared/capella/operations/execution_payload.go index f69a8acff9..95d0df52ab 100644 --- a/testing/spectest/shared/capella/operations/execution_payload.go +++ b/testing/spectest/shared/capella/operations/execution_payload.go @@ -3,8 +3,8 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" ) func RunExecutionPayloadTest(t *testing.T, config string) { diff --git a/testing/spectest/shared/capella/operations/helpers.go b/testing/spectest/shared/capella/operations/helpers.go index 368c5b1a98..f270b83396 100644 --- a/testing/spectest/shared/capella/operations/helpers.go +++ b/testing/spectest/shared/capella/operations/helpers.go @@ -1,11 +1,11 @@ package operations import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func sszToState(b []byte) (state.BeaconState, error) { diff --git a/testing/spectest/shared/capella/operations/proposer_slashing.go b/testing/spectest/shared/capella/operations/proposer_slashing.go index 58dd3874cb..a69a8ca118 100644 --- a/testing/spectest/shared/capella/operations/proposer_slashing.go +++ b/testing/spectest/shared/capella/operations/proposer_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithProposerSlashing(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/capella/operations/sync_committee.go b/testing/spectest/shared/capella/operations/sync_committee.go index 953a1ed34e..50a39d115e 100644 --- a/testing/spectest/shared/capella/operations/sync_committee.go +++ b/testing/spectest/shared/capella/operations/sync_committee.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithSyncAggregate(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/capella/operations/voluntary_exit.go b/testing/spectest/shared/capella/operations/voluntary_exit.go index 07d9c7c4bd..699f3bdc07 100644 --- a/testing/spectest/shared/capella/operations/voluntary_exit.go +++ b/testing/spectest/shared/capella/operations/voluntary_exit.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithVoluntaryExit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/capella/operations/withdrawals.go b/testing/spectest/shared/capella/operations/withdrawals.go index e6c9d9782a..5fbe2a8b68 100644 --- a/testing/spectest/shared/capella/operations/withdrawals.go +++ b/testing/spectest/shared/capella/operations/withdrawals.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithWithdrawals(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/capella/rewards/BUILD.bazel b/testing/spectest/shared/capella/rewards/BUILD.bazel index ab14ce292d..2763b069dc 100644 --- a/testing/spectest/shared/capella/rewards/BUILD.bazel +++ b/testing/spectest/shared/capella/rewards/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["rewards_penalties.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/rewards", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/rewards", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/capella/rewards/rewards_penalties.go b/testing/spectest/shared/capella/rewards/rewards_penalties.go index 4fbf701eb2..0a5c77e74c 100644 --- a/testing/spectest/shared/capella/rewards/rewards_penalties.go +++ b/testing/spectest/shared/capella/rewards/rewards_penalties.go @@ -9,13 +9,13 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/capella/sanity/BUILD.bazel b/testing/spectest/shared/capella/sanity/BUILD.bazel index dad1af022c..aa7f2ba200 100644 --- a/testing/spectest/shared/capella/sanity/BUILD.bazel +++ b/testing/spectest/shared/capella/sanity/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "block_processing.yaml.go", "slot_processing.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/sanity", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/sanity", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/capella/sanity/block_processing.go b/testing/spectest/shared/capella/sanity/block_processing.go index d27f9333a1..c2bfb1c2d5 100644 --- a/testing/spectest/shared/capella/sanity/block_processing.go +++ b/testing/spectest/shared/capella/sanity/block_processing.go @@ -8,15 +8,15 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" diff --git a/testing/spectest/shared/capella/sanity/slot_processing.go b/testing/spectest/shared/capella/sanity/slot_processing.go index d98efc1825..2e51f86f1f 100644 --- a/testing/spectest/shared/capella/sanity/slot_processing.go +++ b/testing/spectest/shared/capella/sanity/slot_processing.go @@ -5,12 +5,12 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/capella/ssz_static/BUILD.bazel b/testing/spectest/shared/capella/ssz_static/BUILD.bazel index 3905c77d0d..95e75f7815 100644 --- a/testing/spectest/shared/capella/ssz_static/BUILD.bazel +++ b/testing/spectest/shared/capella/ssz_static/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["ssz_static.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/capella/ssz_static", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/capella/ssz_static", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/state/state-native:go_default_library", diff --git a/testing/spectest/shared/capella/ssz_static/ssz_static.go b/testing/spectest/shared/capella/ssz_static/ssz_static.go index 6e7e909cc4..1976e9b3e6 100644 --- a/testing/spectest/shared/capella/ssz_static/ssz_static.go +++ b/testing/spectest/shared/capella/ssz_static/ssz_static.go @@ -5,11 +5,11 @@ import ( "errors" "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/ssz_static" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/ssz_static" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/testing/spectest/shared/common/forkchoice/BUILD.bazel b/testing/spectest/shared/common/forkchoice/BUILD.bazel index ddc4162632..8d0aa99e4a 100644 --- a/testing/spectest/shared/common/forkchoice/BUILD.bazel +++ b/testing/spectest/shared/common/forkchoice/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "service.go", "type.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/forkchoice", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/forkchoice", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/blockchain:go_default_library", diff --git a/testing/spectest/shared/common/forkchoice/builder.go b/testing/spectest/shared/common/forkchoice/builder.go index acc48af45b..af6e49f179 100644 --- a/testing/spectest/shared/common/forkchoice/builder.go +++ b/testing/spectest/shared/common/forkchoice/builder.go @@ -7,17 +7,17 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/execution" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/execution" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" ) diff --git a/testing/spectest/shared/common/forkchoice/builder_test.go b/testing/spectest/shared/common/forkchoice/builder_test.go index 30c3b8e6d7..88c96dcf9b 100644 --- a/testing/spectest/shared/common/forkchoice/builder_test.go +++ b/testing/spectest/shared/common/forkchoice/builder_test.go @@ -3,10 +3,10 @@ package forkchoice import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestBuilderTick(t *testing.T) { diff --git a/testing/spectest/shared/common/forkchoice/runner.go b/testing/spectest/shared/common/forkchoice/runner.go index d664c06a41..92908590f4 100644 --- a/testing/spectest/shared/common/forkchoice/runner.go +++ b/testing/spectest/shared/common/forkchoice/runner.go @@ -10,20 +10,20 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/verification" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/forkchoice/service.go b/testing/spectest/shared/common/forkchoice/service.go index 25674cdd3a..d96b8e600c 100644 --- a/testing/spectest/shared/common/forkchoice/service.go +++ b/testing/spectest/shared/common/forkchoice/service.go @@ -5,29 +5,29 @@ import ( "math/big" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - mock "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache" - "github.com/OffchainLabs/prysm/v6/beacon-chain/cache/depositsnapshot" - coreTime "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filesystem" - testDB "github.com/OffchainLabs/prysm/v6/beacon-chain/db/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice" - doublylinkedtree "github.com/OffchainLabs/prysm/v6/beacon-chain/forkchoice/doubly-linked-tree" - "github.com/OffchainLabs/prysm/v6/beacon-chain/operations/attestations" - p2pTesting "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stategen" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - payloadattribute "github.com/OffchainLabs/prysm/v6/consensus-types/payload-attribute" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - pb "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + mock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" + "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" + coreTime "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filesystem" + testDB "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice" + doublylinkedtree "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/doubly-linked-tree" + "github.com/OffchainLabs/prysm/v7/beacon-chain/operations/attestations" + p2pTesting "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/testing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stategen" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + payloadattribute "github.com/OffchainLabs/prysm/v7/consensus-types/payload-attribute" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + pb "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" gethtypes "github.com/ethereum/go-ethereum/core/types" diff --git a/testing/spectest/shared/common/light_client/BUILD.bazel b/testing/spectest/shared/common/light_client/BUILD.bazel index 0184fadbc6..d46557af36 100644 --- a/testing/spectest/shared/common/light_client/BUILD.bazel +++ b/testing/spectest/shared/common/light_client/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "single_merkle_proof.go", "update_ranking.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/light_client", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/light_client", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/common/light_client/single_merkle_proof.go b/testing/spectest/shared/common/light_client/single_merkle_proof.go index acec35b3a4..7a05e8baf5 100644 --- a/testing/spectest/shared/common/light_client/single_merkle_proof.go +++ b/testing/spectest/shared/common/light_client/single_merkle_proof.go @@ -8,15 +8,15 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/container/trie" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/container/trie" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/light_client/update_ranking.go b/testing/spectest/shared/common/light_client/update_ranking.go index 7b1a1774e5..9101573e9a 100644 --- a/testing/spectest/shared/common/light_client/update_ranking.go +++ b/testing/spectest/shared/common/light_client/update_ranking.go @@ -5,16 +5,16 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - lightclient "github.com/OffchainLabs/prysm/v6/beacon-chain/light-client" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - lightclienttypes "github.com/OffchainLabs/prysm/v6/consensus-types/light-client" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + lightclient "github.com/OffchainLabs/prysm/v7/beacon-chain/light-client" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + lightclienttypes "github.com/OffchainLabs/prysm/v7/consensus-types/light-client" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/merkle_proof/BUILD.bazel b/testing/spectest/shared/common/merkle_proof/BUILD.bazel index 2c15c19b92..41abacb5e0 100644 --- a/testing/spectest/shared/common/merkle_proof/BUILD.bazel +++ b/testing/spectest/shared/common/merkle_proof/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["single_merkle_proof.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/merkle_proof", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/merkle_proof", visibility = ["//visibility:public"], deps = [ "//config/params:go_default_library", diff --git a/testing/spectest/shared/common/merkle_proof/single_merkle_proof.go b/testing/spectest/shared/common/merkle_proof/single_merkle_proof.go index 99280f3423..ea2c77685f 100644 --- a/testing/spectest/shared/common/merkle_proof/single_merkle_proof.go +++ b/testing/spectest/shared/common/merkle_proof/single_merkle_proof.go @@ -6,13 +6,13 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - consensus_blocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/ssz_static" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/config/params" + consensus_blocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/ssz_static" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" fssz "github.com/prysmaticlabs/fastssz" diff --git a/testing/spectest/shared/common/operations/BUILD.bazel b/testing/spectest/shared/common/operations/BUILD.bazel index 758f479767..0156ebb991 100644 --- a/testing/spectest/shared/common/operations/BUILD.bazel +++ b/testing/spectest/shared/common/operations/BUILD.bazel @@ -20,7 +20,7 @@ go_library( "withdrawal_request.go", "withdrawals.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/common/operations/attestation.go b/testing/spectest/shared/common/operations/attestation.go index 410c7aa104..32e4a0e404 100644 --- a/testing/spectest/shared/common/operations/attestation.go +++ b/testing/spectest/shared/common/operations/attestation.go @@ -5,12 +5,12 @@ import ( "path" "testing" - b "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + b "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/pkg/errors" ) diff --git a/testing/spectest/shared/common/operations/attester_slashing.go b/testing/spectest/shared/common/operations/attester_slashing.go index 6919950754..7156beeab1 100644 --- a/testing/spectest/shared/common/operations/attester_slashing.go +++ b/testing/spectest/shared/common/operations/attester_slashing.go @@ -4,10 +4,10 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - v "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + v "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" ) func RunAttesterSlashingTest(t *testing.T, config string, fork string, block blockWithSSZObject, sszToState SSZToState) { diff --git a/testing/spectest/shared/common/operations/block_header.go b/testing/spectest/shared/common/operations/block_header.go index 5f502949e0..041fe41810 100644 --- a/testing/spectest/shared/common/operations/block_header.go +++ b/testing/spectest/shared/common/operations/block_header.go @@ -6,12 +6,12 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/operations/bls_to_execution_changes.go b/testing/spectest/shared/common/operations/bls_to_execution_changes.go index 371509fd46..6d999954c8 100644 --- a/testing/spectest/shared/common/operations/bls_to_execution_changes.go +++ b/testing/spectest/shared/common/operations/bls_to_execution_changes.go @@ -5,12 +5,12 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/pkg/errors" ) diff --git a/testing/spectest/shared/common/operations/consolidations.go b/testing/spectest/shared/common/operations/consolidations.go index 2e8af689f3..34e696770d 100644 --- a/testing/spectest/shared/common/operations/consolidations.go +++ b/testing/spectest/shared/common/operations/consolidations.go @@ -5,12 +5,12 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/operations/deposit.go b/testing/spectest/shared/common/operations/deposit.go index 53b74205d6..f5518abae7 100644 --- a/testing/spectest/shared/common/operations/deposit.go +++ b/testing/spectest/shared/common/operations/deposit.go @@ -5,12 +5,12 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/operations/deposit_request.go b/testing/spectest/shared/common/operations/deposit_request.go index 4ac97f0627..ae6b875732 100644 --- a/testing/spectest/shared/common/operations/deposit_request.go +++ b/testing/spectest/shared/common/operations/deposit_request.go @@ -5,12 +5,12 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/operations/execution_payload.go b/testing/spectest/shared/common/operations/execution_payload.go index f62041349f..bf807f8d69 100644 --- a/testing/spectest/shared/common/operations/execution_payload.go +++ b/testing/spectest/shared/common/operations/execution_payload.go @@ -5,14 +5,14 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/operations/proposer_slashing.go b/testing/spectest/shared/common/operations/proposer_slashing.go index 40f19b62ae..ca912a9154 100644 --- a/testing/spectest/shared/common/operations/proposer_slashing.go +++ b/testing/spectest/shared/common/operations/proposer_slashing.go @@ -4,10 +4,10 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - v "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + v "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" ) func RunProposerSlashingTest(t *testing.T, config string, fork string, block blockWithSSZObject, sszToState SSZToState) { diff --git a/testing/spectest/shared/common/operations/slashing.go b/testing/spectest/shared/common/operations/slashing.go index 77f8b42aed..412183998a 100644 --- a/testing/spectest/shared/common/operations/slashing.go +++ b/testing/spectest/shared/common/operations/slashing.go @@ -4,9 +4,9 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/operations/sync_aggregate.go b/testing/spectest/shared/common/operations/sync_aggregate.go index cf39a1a29b..d966f1ff8f 100644 --- a/testing/spectest/shared/common/operations/sync_aggregate.go +++ b/testing/spectest/shared/common/operations/sync_aggregate.go @@ -5,12 +5,12 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/operations/test_runner.go b/testing/spectest/shared/common/operations/test_runner.go index 5ae73b6dec..62056d5d3d 100644 --- a/testing/spectest/shared/common/operations/test_runner.go +++ b/testing/spectest/shared/common/operations/test_runner.go @@ -7,11 +7,11 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" diff --git a/testing/spectest/shared/common/operations/voluntary_exit.go b/testing/spectest/shared/common/operations/voluntary_exit.go index 5a8646b51f..31c86cf389 100644 --- a/testing/spectest/shared/common/operations/voluntary_exit.go +++ b/testing/spectest/shared/common/operations/voluntary_exit.go @@ -5,13 +5,13 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/validators" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/validators" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/operations/withdrawal_request.go b/testing/spectest/shared/common/operations/withdrawal_request.go index 4a8d673f91..a9a95abc82 100644 --- a/testing/spectest/shared/common/operations/withdrawal_request.go +++ b/testing/spectest/shared/common/operations/withdrawal_request.go @@ -5,12 +5,12 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/operations/withdrawals.go b/testing/spectest/shared/common/operations/withdrawals.go index dca1ad8bb8..3bf029a903 100644 --- a/testing/spectest/shared/common/operations/withdrawals.go +++ b/testing/spectest/shared/common/operations/withdrawals.go @@ -5,14 +5,14 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/common/ssz_static/BUILD.bazel b/testing/spectest/shared/common/ssz_static/BUILD.bazel index 057f4ef3e8..ffeeecf5cf 100644 --- a/testing/spectest/shared/common/ssz_static/BUILD.bazel +++ b/testing/spectest/shared/common/ssz_static/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "ssz_static.go", "types.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/ssz_static", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/ssz_static", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//testing/require:go_default_library", diff --git a/testing/spectest/shared/common/ssz_static/ssz_static.go b/testing/spectest/shared/common/ssz_static/ssz_static.go index 4457b6b7b6..459722c0e0 100644 --- a/testing/spectest/shared/common/ssz_static/ssz_static.go +++ b/testing/spectest/shared/common/ssz_static/ssz_static.go @@ -6,9 +6,9 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/testing/spectest/shared/common/ssz_static/ssz_static_example_test.go b/testing/spectest/shared/common/ssz_static/ssz_static_example_test.go index 29ef6db168..7fa24b2db6 100644 --- a/testing/spectest/shared/common/ssz_static/ssz_static_example_test.go +++ b/testing/spectest/shared/common/ssz_static/ssz_static_example_test.go @@ -5,10 +5,10 @@ import ( "fmt" "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/ssz_static" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/ssz_static" "github.com/pkg/errors" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/testing/spectest/shared/deneb/epoch_processing/BUILD.bazel b/testing/spectest/shared/deneb/epoch_processing/BUILD.bazel index 1af5a00967..ca9354a4a9 100644 --- a/testing/spectest/shared/deneb/epoch_processing/BUILD.bazel +++ b/testing/spectest/shared/deneb/epoch_processing/BUILD.bazel @@ -17,7 +17,7 @@ go_library( "slashings.go", "slashings_reset.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/epoch_processing", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/epoch_processing", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/deneb/epoch_processing/effective_balance_updates.go b/testing/spectest/shared/deneb/epoch_processing/effective_balance_updates.go index c7e2e2cb29..ec05c420b2 100644 --- a/testing/spectest/shared/deneb/epoch_processing/effective_balance_updates.go +++ b/testing/spectest/shared/deneb/epoch_processing/effective_balance_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEffectiveBalanceUpdatesTests executes "epoch_processing/effective_balance_updates" tests. diff --git a/testing/spectest/shared/deneb/epoch_processing/eth1_data_reset.go b/testing/spectest/shared/deneb/epoch_processing/eth1_data_reset.go index fa13661ad5..394c68cdbe 100644 --- a/testing/spectest/shared/deneb/epoch_processing/eth1_data_reset.go +++ b/testing/spectest/shared/deneb/epoch_processing/eth1_data_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEth1DataResetTests executes "epoch_processing/eth1_data_reset" tests. diff --git a/testing/spectest/shared/deneb/epoch_processing/helpers.go b/testing/spectest/shared/deneb/epoch_processing/helpers.go index b1871fde02..3ab99b8e74 100644 --- a/testing/spectest/shared/deneb/epoch_processing/helpers.go +++ b/testing/spectest/shared/deneb/epoch_processing/helpers.go @@ -6,11 +6,11 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/deneb/epoch_processing/historical_summaries_update.go b/testing/spectest/shared/deneb/epoch_processing/historical_summaries_update.go index 14c1a237b0..aa7f57a360 100644 --- a/testing/spectest/shared/deneb/epoch_processing/historical_summaries_update.go +++ b/testing/spectest/shared/deneb/epoch_processing/historical_summaries_update.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunHistoricalSummariesUpdateTests executes "epoch_processing/historical_Summaries_update" tests. diff --git a/testing/spectest/shared/deneb/epoch_processing/inactivity_updates.go b/testing/spectest/shared/deneb/epoch_processing/inactivity_updates.go index 7893538604..6b9c4adde9 100644 --- a/testing/spectest/shared/deneb/epoch_processing/inactivity_updates.go +++ b/testing/spectest/shared/deneb/epoch_processing/inactivity_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunInactivityUpdatesTest executes "epoch_processing/inactivity_updates" tests. diff --git a/testing/spectest/shared/deneb/epoch_processing/justification_and_finalization.go b/testing/spectest/shared/deneb/epoch_processing/justification_and_finalization.go index 6b1e86b435..afd1f57162 100644 --- a/testing/spectest/shared/deneb/epoch_processing/justification_and_finalization.go +++ b/testing/spectest/shared/deneb/epoch_processing/justification_and_finalization.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunJustificationAndFinalizationTests executes "epoch_processing/justification_and_finalization" tests. diff --git a/testing/spectest/shared/deneb/epoch_processing/participation_flag_updates.go b/testing/spectest/shared/deneb/epoch_processing/participation_flag_updates.go index 5465cdc979..85215fef01 100644 --- a/testing/spectest/shared/deneb/epoch_processing/participation_flag_updates.go +++ b/testing/spectest/shared/deneb/epoch_processing/participation_flag_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunParticipationFlagUpdatesTests executes "epoch_processing/participation_flag_updates" tests. diff --git a/testing/spectest/shared/deneb/epoch_processing/randao_mixes_reset.go b/testing/spectest/shared/deneb/epoch_processing/randao_mixes_reset.go index 5df650eaa7..e0465416aa 100644 --- a/testing/spectest/shared/deneb/epoch_processing/randao_mixes_reset.go +++ b/testing/spectest/shared/deneb/epoch_processing/randao_mixes_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRandaoMixesResetTests executes "epoch_processing/randao_mixes_reset" tests. diff --git a/testing/spectest/shared/deneb/epoch_processing/registry_updates.go b/testing/spectest/shared/deneb/epoch_processing/registry_updates.go index 2f067496cf..1a5497ebe6 100644 --- a/testing/spectest/shared/deneb/epoch_processing/registry_updates.go +++ b/testing/spectest/shared/deneb/epoch_processing/registry_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRegistryUpdatesTests executes "epoch_processing/registry_updates" tests. diff --git a/testing/spectest/shared/deneb/epoch_processing/rewards_and_penalties.go b/testing/spectest/shared/deneb/epoch_processing/rewards_and_penalties.go index 28198b0be2..60fbf44c08 100644 --- a/testing/spectest/shared/deneb/epoch_processing/rewards_and_penalties.go +++ b/testing/spectest/shared/deneb/epoch_processing/rewards_and_penalties.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRewardsAndPenaltiesTests executes "epoch_processing/rewards_and_penalties" tests. diff --git a/testing/spectest/shared/deneb/epoch_processing/slashings.go b/testing/spectest/shared/deneb/epoch_processing/slashings.go index 0b41b3227d..2c91603531 100644 --- a/testing/spectest/shared/deneb/epoch_processing/slashings.go +++ b/testing/spectest/shared/deneb/epoch_processing/slashings.go @@ -4,11 +4,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsTests executes "epoch_processing/slashings" tests. diff --git a/testing/spectest/shared/deneb/epoch_processing/slashings_reset.go b/testing/spectest/shared/deneb/epoch_processing/slashings_reset.go index b4fc779a37..ec1da90141 100644 --- a/testing/spectest/shared/deneb/epoch_processing/slashings_reset.go +++ b/testing/spectest/shared/deneb/epoch_processing/slashings_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsResetTests executes "epoch_processing/slashings_reset" tests. diff --git a/testing/spectest/shared/deneb/finality/BUILD.bazel b/testing/spectest/shared/deneb/finality/BUILD.bazel index 0c24468b7a..00983dea5a 100644 --- a/testing/spectest/shared/deneb/finality/BUILD.bazel +++ b/testing/spectest/shared/deneb/finality/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["finality.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/finality", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/finality", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/deneb/finality/finality.go b/testing/spectest/shared/deneb/finality/finality.go index af29f52ae4..d88782d7f7 100644 --- a/testing/spectest/shared/deneb/finality/finality.go +++ b/testing/spectest/shared/deneb/finality/finality.go @@ -5,15 +5,15 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "google.golang.org/protobuf/proto" ) diff --git a/testing/spectest/shared/deneb/fork/BUILD.bazel b/testing/spectest/shared/deneb/fork/BUILD.bazel index e096661e35..8bca51e277 100644 --- a/testing/spectest/shared/deneb/fork/BUILD.bazel +++ b/testing/spectest/shared/deneb/fork/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "transition.go", "upgrade_to_deneb.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/fork", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/fork", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/deneb:go_default_library", diff --git a/testing/spectest/shared/deneb/fork/transition.go b/testing/spectest/shared/deneb/fork/transition.go index ae0555f560..bc1533af20 100644 --- a/testing/spectest/shared/deneb/fork/transition.go +++ b/testing/spectest/shared/deneb/fork/transition.go @@ -5,16 +5,16 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/deneb/fork/upgrade_to_deneb.go b/testing/spectest/shared/deneb/fork/upgrade_to_deneb.go index 9195ea86bf..b639e6d175 100644 --- a/testing/spectest/shared/deneb/fork/upgrade_to_deneb.go +++ b/testing/spectest/shared/deneb/fork/upgrade_to_deneb.go @@ -4,13 +4,13 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/deneb" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/deneb" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/deneb/merkle_proof/BUILD.bazel b/testing/spectest/shared/deneb/merkle_proof/BUILD.bazel index 44b234258e..8beff8e6c9 100644 --- a/testing/spectest/shared/deneb/merkle_proof/BUILD.bazel +++ b/testing/spectest/shared/deneb/merkle_proof/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["merkle_proof.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/merkle_proof", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/merkle_proof", visibility = ["//visibility:public"], deps = [ "//testing/spectest/shared/common/merkle_proof:go_default_library", diff --git a/testing/spectest/shared/deneb/merkle_proof/merkle_proof.go b/testing/spectest/shared/deneb/merkle_proof/merkle_proof.go index 64b8c44566..1b342d7786 100644 --- a/testing/spectest/shared/deneb/merkle_proof/merkle_proof.go +++ b/testing/spectest/shared/deneb/merkle_proof/merkle_proof.go @@ -3,8 +3,8 @@ package merkle_proof import ( "testing" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/merkle_proof" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/ssz_static" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/merkle_proof" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/ssz_static" ) func RunMerkleProofTests(t *testing.T, config string) { diff --git a/testing/spectest/shared/deneb/operations/BUILD.bazel b/testing/spectest/shared/deneb/operations/BUILD.bazel index b66b600a39..6cb6eea050 100644 --- a/testing/spectest/shared/deneb/operations/BUILD.bazel +++ b/testing/spectest/shared/deneb/operations/BUILD.bazel @@ -16,7 +16,7 @@ go_library( "voluntary_exit.go", "withdrawals.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/operations", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/operations", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/deneb/operations/attestation.go b/testing/spectest/shared/deneb/operations/attestation.go index 92cd3335a4..7f3739ca83 100644 --- a/testing/spectest/shared/deneb/operations/attestation.go +++ b/testing/spectest/shared/deneb/operations/attestation.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttestation(attestationSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/deneb/operations/attester_slashing.go b/testing/spectest/shared/deneb/operations/attester_slashing.go index ef91a8d9e5..cdae368718 100644 --- a/testing/spectest/shared/deneb/operations/attester_slashing.go +++ b/testing/spectest/shared/deneb/operations/attester_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttesterSlashing(asSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/deneb/operations/block_header.go b/testing/spectest/shared/deneb/operations/block_header.go index cb48dcf8fe..93b0f41908 100644 --- a/testing/spectest/shared/deneb/operations/block_header.go +++ b/testing/spectest/shared/deneb/operations/block_header.go @@ -3,8 +3,8 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" ) func RunBlockHeaderTest(t *testing.T, config string) { diff --git a/testing/spectest/shared/deneb/operations/bls_to_execution_changes.go b/testing/spectest/shared/deneb/operations/bls_to_execution_changes.go index e3b07ac03d..76a78e511b 100644 --- a/testing/spectest/shared/deneb/operations/bls_to_execution_changes.go +++ b/testing/spectest/shared/deneb/operations/bls_to_execution_changes.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithBlsChange(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/deneb/operations/deposit.go b/testing/spectest/shared/deneb/operations/deposit.go index 17f89b3aca..35ad1a32cf 100644 --- a/testing/spectest/shared/deneb/operations/deposit.go +++ b/testing/spectest/shared/deneb/operations/deposit.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithDeposit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/deneb/operations/execution_payload.go b/testing/spectest/shared/deneb/operations/execution_payload.go index b22939ec28..3a778c4bd7 100644 --- a/testing/spectest/shared/deneb/operations/execution_payload.go +++ b/testing/spectest/shared/deneb/operations/execution_payload.go @@ -3,8 +3,8 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" ) func RunExecutionPayloadTest(t *testing.T, config string) { diff --git a/testing/spectest/shared/deneb/operations/helpers.go b/testing/spectest/shared/deneb/operations/helpers.go index 5259045e3a..9199d9dc5a 100644 --- a/testing/spectest/shared/deneb/operations/helpers.go +++ b/testing/spectest/shared/deneb/operations/helpers.go @@ -1,11 +1,11 @@ package operations import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func sszToState(b []byte) (state.BeaconState, error) { diff --git a/testing/spectest/shared/deneb/operations/proposer_slashing.go b/testing/spectest/shared/deneb/operations/proposer_slashing.go index 1d66fe97eb..b578341d58 100644 --- a/testing/spectest/shared/deneb/operations/proposer_slashing.go +++ b/testing/spectest/shared/deneb/operations/proposer_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithProposerSlashing(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/deneb/operations/sync_committee.go b/testing/spectest/shared/deneb/operations/sync_committee.go index 07155a34ac..c3d4eca7af 100644 --- a/testing/spectest/shared/deneb/operations/sync_committee.go +++ b/testing/spectest/shared/deneb/operations/sync_committee.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithSyncAggregate(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/deneb/operations/voluntary_exit.go b/testing/spectest/shared/deneb/operations/voluntary_exit.go index 51d6c905d5..50ae80af36 100644 --- a/testing/spectest/shared/deneb/operations/voluntary_exit.go +++ b/testing/spectest/shared/deneb/operations/voluntary_exit.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithVoluntaryExit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/deneb/operations/withdrawals.go b/testing/spectest/shared/deneb/operations/withdrawals.go index 3f63fa8aef..e7d90e32e2 100644 --- a/testing/spectest/shared/deneb/operations/withdrawals.go +++ b/testing/spectest/shared/deneb/operations/withdrawals.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithWithdrawals(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/deneb/rewards/BUILD.bazel b/testing/spectest/shared/deneb/rewards/BUILD.bazel index 66cb8f9368..179336538f 100644 --- a/testing/spectest/shared/deneb/rewards/BUILD.bazel +++ b/testing/spectest/shared/deneb/rewards/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["rewards_penalties.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/rewards", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/rewards", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/deneb/rewards/rewards_penalties.go b/testing/spectest/shared/deneb/rewards/rewards_penalties.go index 99bb90122c..07fcc2c5c3 100644 --- a/testing/spectest/shared/deneb/rewards/rewards_penalties.go +++ b/testing/spectest/shared/deneb/rewards/rewards_penalties.go @@ -9,13 +9,13 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/deneb/sanity/BUILD.bazel b/testing/spectest/shared/deneb/sanity/BUILD.bazel index cf137e4df1..ec76c41298 100644 --- a/testing/spectest/shared/deneb/sanity/BUILD.bazel +++ b/testing/spectest/shared/deneb/sanity/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "block_processing.yaml.go", "slot_processing.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/sanity", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/sanity", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/deneb/sanity/block_processing.go b/testing/spectest/shared/deneb/sanity/block_processing.go index c49b9a080e..273d103978 100644 --- a/testing/spectest/shared/deneb/sanity/block_processing.go +++ b/testing/spectest/shared/deneb/sanity/block_processing.go @@ -8,17 +8,17 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/deneb/sanity/slot_processing.go b/testing/spectest/shared/deneb/sanity/slot_processing.go index 00fc18a1c6..7161cc3e56 100644 --- a/testing/spectest/shared/deneb/sanity/slot_processing.go +++ b/testing/spectest/shared/deneb/sanity/slot_processing.go @@ -5,12 +5,12 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "google.golang.org/protobuf/proto" ) diff --git a/testing/spectest/shared/deneb/ssz_static/BUILD.bazel b/testing/spectest/shared/deneb/ssz_static/BUILD.bazel index 4252327d43..1af7f9a524 100644 --- a/testing/spectest/shared/deneb/ssz_static/BUILD.bazel +++ b/testing/spectest/shared/deneb/ssz_static/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["ssz_static.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/deneb/ssz_static", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/deneb/ssz_static", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/state/state-native:go_default_library", diff --git a/testing/spectest/shared/deneb/ssz_static/ssz_static.go b/testing/spectest/shared/deneb/ssz_static/ssz_static.go index f2a3040988..f0742bde84 100644 --- a/testing/spectest/shared/deneb/ssz_static/ssz_static.go +++ b/testing/spectest/shared/deneb/ssz_static/ssz_static.go @@ -5,11 +5,11 @@ import ( "errors" "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/ssz_static" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/ssz_static" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/testing/spectest/shared/electra/epoch_processing/BUILD.bazel b/testing/spectest/shared/electra/epoch_processing/BUILD.bazel index e316ab426f..48e466f8a5 100644 --- a/testing/spectest/shared/electra/epoch_processing/BUILD.bazel +++ b/testing/spectest/shared/electra/epoch_processing/BUILD.bazel @@ -20,7 +20,7 @@ go_library( "slashings_reset.go", "sync_committee_updates.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/epoch_processing", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/epoch_processing", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/electra:go_default_library", diff --git a/testing/spectest/shared/electra/epoch_processing/effective_balance_updates.go b/testing/spectest/shared/electra/epoch_processing/effective_balance_updates.go index 2b370f2a73..a1fc4c179b 100644 --- a/testing/spectest/shared/electra/epoch_processing/effective_balance_updates.go +++ b/testing/spectest/shared/electra/epoch_processing/effective_balance_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEffectiveBalanceUpdatesTests executes "epoch_processing/effective_balance_updates" tests. diff --git a/testing/spectest/shared/electra/epoch_processing/eth1_data_reset.go b/testing/spectest/shared/electra/epoch_processing/eth1_data_reset.go index 108041ae85..d885dfd0cc 100644 --- a/testing/spectest/shared/electra/epoch_processing/eth1_data_reset.go +++ b/testing/spectest/shared/electra/epoch_processing/eth1_data_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEth1DataResetTests executes "epoch_processing/eth1_data_reset" tests. diff --git a/testing/spectest/shared/electra/epoch_processing/helpers.go b/testing/spectest/shared/electra/epoch_processing/helpers.go index b0e73e078f..9e59b0b3b8 100644 --- a/testing/spectest/shared/electra/epoch_processing/helpers.go +++ b/testing/spectest/shared/electra/epoch_processing/helpers.go @@ -6,11 +6,11 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" diff --git a/testing/spectest/shared/electra/epoch_processing/historical_summaries_update.go b/testing/spectest/shared/electra/epoch_processing/historical_summaries_update.go index 654579dbe8..70262ab497 100644 --- a/testing/spectest/shared/electra/epoch_processing/historical_summaries_update.go +++ b/testing/spectest/shared/electra/epoch_processing/historical_summaries_update.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunHistoricalSummariesUpdateTests executes "epoch_processing/historical_Summaries_update" tests. diff --git a/testing/spectest/shared/electra/epoch_processing/inactivity_updates.go b/testing/spectest/shared/electra/epoch_processing/inactivity_updates.go index cd900b4946..55bab9506f 100644 --- a/testing/spectest/shared/electra/epoch_processing/inactivity_updates.go +++ b/testing/spectest/shared/electra/epoch_processing/inactivity_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunInactivityUpdatesTest executes "epoch_processing/inactivity_updates" tests. diff --git a/testing/spectest/shared/electra/epoch_processing/justification_and_finalization.go b/testing/spectest/shared/electra/epoch_processing/justification_and_finalization.go index ba1ec6a40a..ce642b72e3 100644 --- a/testing/spectest/shared/electra/epoch_processing/justification_and_finalization.go +++ b/testing/spectest/shared/electra/epoch_processing/justification_and_finalization.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunJustificationAndFinalizationTests executes "epoch_processing/justification_and_finalization" tests. diff --git a/testing/spectest/shared/electra/epoch_processing/participation_flag_updates.go b/testing/spectest/shared/electra/epoch_processing/participation_flag_updates.go index 4bea7bcac6..0882e6da60 100644 --- a/testing/spectest/shared/electra/epoch_processing/participation_flag_updates.go +++ b/testing/spectest/shared/electra/epoch_processing/participation_flag_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunParticipationFlagUpdatesTests executes "epoch_processing/participation_flag_updates" tests. diff --git a/testing/spectest/shared/electra/epoch_processing/pending_consolidations.go b/testing/spectest/shared/electra/epoch_processing/pending_consolidations.go index 5c31407e69..061a4c538c 100644 --- a/testing/spectest/shared/electra/epoch_processing/pending_consolidations.go +++ b/testing/spectest/shared/electra/epoch_processing/pending_consolidations.go @@ -5,10 +5,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) func RunPendingConsolidationsTests(t *testing.T, config string) { diff --git a/testing/spectest/shared/electra/epoch_processing/pending_deposit_updates.go b/testing/spectest/shared/electra/epoch_processing/pending_deposit_updates.go index d27fc62b53..a603a51c51 100644 --- a/testing/spectest/shared/electra/epoch_processing/pending_deposit_updates.go +++ b/testing/spectest/shared/electra/epoch_processing/pending_deposit_updates.go @@ -5,12 +5,12 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) func RunPendingDepositsTests(t *testing.T, config string) { diff --git a/testing/spectest/shared/electra/epoch_processing/randao_mixes_reset.go b/testing/spectest/shared/electra/epoch_processing/randao_mixes_reset.go index 4eabf28b02..cf1a7dbc85 100644 --- a/testing/spectest/shared/electra/epoch_processing/randao_mixes_reset.go +++ b/testing/spectest/shared/electra/epoch_processing/randao_mixes_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRandaoMixesResetTests executes "epoch_processing/randao_mixes_reset" tests. diff --git a/testing/spectest/shared/electra/epoch_processing/registry_updates.go b/testing/spectest/shared/electra/epoch_processing/registry_updates.go index 144276d6c6..ab9581beeb 100644 --- a/testing/spectest/shared/electra/epoch_processing/registry_updates.go +++ b/testing/spectest/shared/electra/epoch_processing/registry_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRegistryUpdatesTests executes "epoch_processing/registry_updates" tests. diff --git a/testing/spectest/shared/electra/epoch_processing/rewards_and_penalties.go b/testing/spectest/shared/electra/epoch_processing/rewards_and_penalties.go index 4a9de45490..a2801a757d 100644 --- a/testing/spectest/shared/electra/epoch_processing/rewards_and_penalties.go +++ b/testing/spectest/shared/electra/epoch_processing/rewards_and_penalties.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRewardsAndPenaltiesTests executes "epoch_processing/rewards_and_penalties" tests. diff --git a/testing/spectest/shared/electra/epoch_processing/slashings.go b/testing/spectest/shared/electra/epoch_processing/slashings.go index 0abe6d3977..172918951e 100644 --- a/testing/spectest/shared/electra/epoch_processing/slashings.go +++ b/testing/spectest/shared/electra/epoch_processing/slashings.go @@ -4,11 +4,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsTests executes "epoch_processing/slashings" tests. diff --git a/testing/spectest/shared/electra/epoch_processing/slashings_reset.go b/testing/spectest/shared/electra/epoch_processing/slashings_reset.go index 11fd1ad507..f248bcb102 100644 --- a/testing/spectest/shared/electra/epoch_processing/slashings_reset.go +++ b/testing/spectest/shared/electra/epoch_processing/slashings_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsResetTests executes "epoch_processing/slashings_reset" tests. diff --git a/testing/spectest/shared/electra/epoch_processing/sync_committee_updates.go b/testing/spectest/shared/electra/epoch_processing/sync_committee_updates.go index 08525f1863..587f1bdf00 100644 --- a/testing/spectest/shared/electra/epoch_processing/sync_committee_updates.go +++ b/testing/spectest/shared/electra/epoch_processing/sync_committee_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) func RunSyncCommitteeUpdatesTests(t *testing.T, config string) { diff --git a/testing/spectest/shared/electra/finality/BUILD.bazel b/testing/spectest/shared/electra/finality/BUILD.bazel index 210d7a5f98..361c93cd58 100644 --- a/testing/spectest/shared/electra/finality/BUILD.bazel +++ b/testing/spectest/shared/electra/finality/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["finality.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/finality", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/finality", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/electra/finality/finality.go b/testing/spectest/shared/electra/finality/finality.go index 213b2afeed..6be81820e5 100644 --- a/testing/spectest/shared/electra/finality/finality.go +++ b/testing/spectest/shared/electra/finality/finality.go @@ -5,15 +5,15 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/electra/fork/BUILD.bazel b/testing/spectest/shared/electra/fork/BUILD.bazel index ca7a859574..f4dc5601cc 100644 --- a/testing/spectest/shared/electra/fork/BUILD.bazel +++ b/testing/spectest/shared/electra/fork/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "transition.go", "upgrade_to_electra.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/fork", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/fork", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/electra:go_default_library", diff --git a/testing/spectest/shared/electra/fork/transition.go b/testing/spectest/shared/electra/fork/transition.go index 39eff493c3..78ab3172c3 100644 --- a/testing/spectest/shared/electra/fork/transition.go +++ b/testing/spectest/shared/electra/fork/transition.go @@ -5,16 +5,16 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/electra/fork/upgrade_to_electra.go b/testing/spectest/shared/electra/fork/upgrade_to_electra.go index 0cbf2c6fd2..e12b8b91d9 100644 --- a/testing/spectest/shared/electra/fork/upgrade_to_electra.go +++ b/testing/spectest/shared/electra/fork/upgrade_to_electra.go @@ -4,13 +4,13 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/electra/merkle_proof/BUILD.bazel b/testing/spectest/shared/electra/merkle_proof/BUILD.bazel index 5bb1bed03b..b285153b22 100644 --- a/testing/spectest/shared/electra/merkle_proof/BUILD.bazel +++ b/testing/spectest/shared/electra/merkle_proof/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["merkle_proof.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/merkle_proof", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/merkle_proof", visibility = ["//visibility:public"], deps = [ "//testing/spectest/shared/common/merkle_proof:go_default_library", diff --git a/testing/spectest/shared/electra/merkle_proof/merkle_proof.go b/testing/spectest/shared/electra/merkle_proof/merkle_proof.go index 5e3314274f..4e66ce1a7d 100644 --- a/testing/spectest/shared/electra/merkle_proof/merkle_proof.go +++ b/testing/spectest/shared/electra/merkle_proof/merkle_proof.go @@ -3,8 +3,8 @@ package merkle_proof import ( "testing" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/merkle_proof" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/ssz_static" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/merkle_proof" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/ssz_static" ) func RunMerkleProofTests(t *testing.T, config string) { diff --git a/testing/spectest/shared/electra/operations/BUILD.bazel b/testing/spectest/shared/electra/operations/BUILD.bazel index 966e41af61..f379228065 100644 --- a/testing/spectest/shared/electra/operations/BUILD.bazel +++ b/testing/spectest/shared/electra/operations/BUILD.bazel @@ -19,7 +19,7 @@ go_library( "withdrawal_request.go", "withdrawals.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/operations", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/operations", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/electra:go_default_library", diff --git a/testing/spectest/shared/electra/operations/attestation.go b/testing/spectest/shared/electra/operations/attestation.go index f33f089d46..b03d8eb054 100644 --- a/testing/spectest/shared/electra/operations/attestation.go +++ b/testing/spectest/shared/electra/operations/attestation.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttestation(attestationSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/electra/operations/attester_slashing.go b/testing/spectest/shared/electra/operations/attester_slashing.go index 87f38dc116..082783ce69 100644 --- a/testing/spectest/shared/electra/operations/attester_slashing.go +++ b/testing/spectest/shared/electra/operations/attester_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttesterSlashing(asSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/electra/operations/block_header.go b/testing/spectest/shared/electra/operations/block_header.go index e91bcc9c4e..98fae8d1d8 100644 --- a/testing/spectest/shared/electra/operations/block_header.go +++ b/testing/spectest/shared/electra/operations/block_header.go @@ -3,8 +3,8 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" ) func RunBlockHeaderTest(t *testing.T, config string) { diff --git a/testing/spectest/shared/electra/operations/bls_to_execution_changes.go b/testing/spectest/shared/electra/operations/bls_to_execution_changes.go index 6f047e1eed..c9c95730b4 100644 --- a/testing/spectest/shared/electra/operations/bls_to_execution_changes.go +++ b/testing/spectest/shared/electra/operations/bls_to_execution_changes.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithBlsChange(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/electra/operations/consolidations.go b/testing/spectest/shared/electra/operations/consolidations.go index af60897f38..653961a88d 100644 --- a/testing/spectest/shared/electra/operations/consolidations.go +++ b/testing/spectest/shared/electra/operations/consolidations.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithConsolidation(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/electra/operations/deposit.go b/testing/spectest/shared/electra/operations/deposit.go index 5ef916c456..0f9bc56516 100644 --- a/testing/spectest/shared/electra/operations/deposit.go +++ b/testing/spectest/shared/electra/operations/deposit.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithDeposit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/electra/operations/deposit_request.go b/testing/spectest/shared/electra/operations/deposit_request.go index fab828d264..a0d705688c 100644 --- a/testing/spectest/shared/electra/operations/deposit_request.go +++ b/testing/spectest/shared/electra/operations/deposit_request.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithDepositRequest(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/electra/operations/execution_payload.go b/testing/spectest/shared/electra/operations/execution_payload.go index ec2d6a0b8b..f5deb4b3ec 100644 --- a/testing/spectest/shared/electra/operations/execution_payload.go +++ b/testing/spectest/shared/electra/operations/execution_payload.go @@ -3,8 +3,8 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" ) func RunExecutionPayloadTest(t *testing.T, config string) { diff --git a/testing/spectest/shared/electra/operations/helpers.go b/testing/spectest/shared/electra/operations/helpers.go index 040c818fd7..192796bf1c 100644 --- a/testing/spectest/shared/electra/operations/helpers.go +++ b/testing/spectest/shared/electra/operations/helpers.go @@ -1,11 +1,11 @@ package operations import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func sszToState(b []byte) (state.BeaconState, error) { diff --git a/testing/spectest/shared/electra/operations/proposer_slashing.go b/testing/spectest/shared/electra/operations/proposer_slashing.go index 4877d86c90..4aacabeeee 100644 --- a/testing/spectest/shared/electra/operations/proposer_slashing.go +++ b/testing/spectest/shared/electra/operations/proposer_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithProposerSlashing(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/electra/operations/sync_committee.go b/testing/spectest/shared/electra/operations/sync_committee.go index d146399a12..6062fb1389 100644 --- a/testing/spectest/shared/electra/operations/sync_committee.go +++ b/testing/spectest/shared/electra/operations/sync_committee.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithSyncAggregate(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/electra/operations/voluntary_exit.go b/testing/spectest/shared/electra/operations/voluntary_exit.go index 8288f81b17..0cc20578bc 100644 --- a/testing/spectest/shared/electra/operations/voluntary_exit.go +++ b/testing/spectest/shared/electra/operations/voluntary_exit.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithVoluntaryExit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/electra/operations/withdrawal_request.go b/testing/spectest/shared/electra/operations/withdrawal_request.go index b100e64689..ca551d8983 100644 --- a/testing/spectest/shared/electra/operations/withdrawal_request.go +++ b/testing/spectest/shared/electra/operations/withdrawal_request.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithWithdrawalRequest(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/electra/operations/withdrawals.go b/testing/spectest/shared/electra/operations/withdrawals.go index 3797cc2c9f..9ac4951cc0 100644 --- a/testing/spectest/shared/electra/operations/withdrawals.go +++ b/testing/spectest/shared/electra/operations/withdrawals.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithWithdrawals(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/electra/rewards/BUILD.bazel b/testing/spectest/shared/electra/rewards/BUILD.bazel index 46fcaa20b9..d3aca12ad9 100644 --- a/testing/spectest/shared/electra/rewards/BUILD.bazel +++ b/testing/spectest/shared/electra/rewards/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["rewards_penalties.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/rewards", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/rewards", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/electra:go_default_library", diff --git a/testing/spectest/shared/electra/rewards/rewards_penalties.go b/testing/spectest/shared/electra/rewards/rewards_penalties.go index f3e04ed7fc..ef4f61db0c 100644 --- a/testing/spectest/shared/electra/rewards/rewards_penalties.go +++ b/testing/spectest/shared/electra/rewards/rewards_penalties.go @@ -9,13 +9,13 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/electra/sanity/BUILD.bazel b/testing/spectest/shared/electra/sanity/BUILD.bazel index 572067f37f..1e11c3d864 100644 --- a/testing/spectest/shared/electra/sanity/BUILD.bazel +++ b/testing/spectest/shared/electra/sanity/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "block_processing.yaml.go", "slot_processing.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/sanity", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/sanity", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/electra/sanity/block_processing.go b/testing/spectest/shared/electra/sanity/block_processing.go index 9a5195a147..788a3ece54 100644 --- a/testing/spectest/shared/electra/sanity/block_processing.go +++ b/testing/spectest/shared/electra/sanity/block_processing.go @@ -8,16 +8,16 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" diff --git a/testing/spectest/shared/electra/sanity/slot_processing.go b/testing/spectest/shared/electra/sanity/slot_processing.go index f261039275..5c7b9e2c45 100644 --- a/testing/spectest/shared/electra/sanity/slot_processing.go +++ b/testing/spectest/shared/electra/sanity/slot_processing.go @@ -5,12 +5,12 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "google.golang.org/protobuf/proto" ) diff --git a/testing/spectest/shared/electra/ssz_static/BUILD.bazel b/testing/spectest/shared/electra/ssz_static/BUILD.bazel index 431ecce594..b73104c509 100644 --- a/testing/spectest/shared/electra/ssz_static/BUILD.bazel +++ b/testing/spectest/shared/electra/ssz_static/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["ssz_static.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/electra/ssz_static", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/electra/ssz_static", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/state/state-native:go_default_library", diff --git a/testing/spectest/shared/electra/ssz_static/ssz_static.go b/testing/spectest/shared/electra/ssz_static/ssz_static.go index 107d95b99f..a0d18e75a5 100644 --- a/testing/spectest/shared/electra/ssz_static/ssz_static.go +++ b/testing/spectest/shared/electra/ssz_static/ssz_static.go @@ -5,11 +5,11 @@ import ( "errors" "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/ssz_static" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/ssz_static" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/testing/spectest/shared/fulu/epoch_processing/BUILD.bazel b/testing/spectest/shared/fulu/epoch_processing/BUILD.bazel index f23558ea12..ef2d97427b 100644 --- a/testing/spectest/shared/fulu/epoch_processing/BUILD.bazel +++ b/testing/spectest/shared/fulu/epoch_processing/BUILD.bazel @@ -21,7 +21,7 @@ go_library( "slashings_reset.go", "sync_committee_updates.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/epoch_processing", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/epoch_processing", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/electra:go_default_library", diff --git a/testing/spectest/shared/fulu/epoch_processing/effective_balance_updates.go b/testing/spectest/shared/fulu/epoch_processing/effective_balance_updates.go index c87ef2674c..384f5158dd 100644 --- a/testing/spectest/shared/fulu/epoch_processing/effective_balance_updates.go +++ b/testing/spectest/shared/fulu/epoch_processing/effective_balance_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEffectiveBalanceUpdatesTests executes "epoch_processing/effective_balance_updates" tests. diff --git a/testing/spectest/shared/fulu/epoch_processing/eth1_data_reset.go b/testing/spectest/shared/fulu/epoch_processing/eth1_data_reset.go index 8cdcb7edfd..fd547c37f2 100644 --- a/testing/spectest/shared/fulu/epoch_processing/eth1_data_reset.go +++ b/testing/spectest/shared/fulu/epoch_processing/eth1_data_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEth1DataResetTests executes "epoch_processing/eth1_data_reset" tests. diff --git a/testing/spectest/shared/fulu/epoch_processing/helpers.go b/testing/spectest/shared/fulu/epoch_processing/helpers.go index bc12153a1d..e7382f0446 100644 --- a/testing/spectest/shared/fulu/epoch_processing/helpers.go +++ b/testing/spectest/shared/fulu/epoch_processing/helpers.go @@ -6,11 +6,11 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" diff --git a/testing/spectest/shared/fulu/epoch_processing/historical_summaries_update.go b/testing/spectest/shared/fulu/epoch_processing/historical_summaries_update.go index b26549e217..e97a9796f0 100644 --- a/testing/spectest/shared/fulu/epoch_processing/historical_summaries_update.go +++ b/testing/spectest/shared/fulu/epoch_processing/historical_summaries_update.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunHistoricalSummariesUpdateTests executes "epoch_processing/historical_Summaries_update" tests. diff --git a/testing/spectest/shared/fulu/epoch_processing/inactivity_updates.go b/testing/spectest/shared/fulu/epoch_processing/inactivity_updates.go index c76e8dd45b..f034af8f90 100644 --- a/testing/spectest/shared/fulu/epoch_processing/inactivity_updates.go +++ b/testing/spectest/shared/fulu/epoch_processing/inactivity_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunInactivityUpdatesTest executes "epoch_processing/inactivity_updates" tests. diff --git a/testing/spectest/shared/fulu/epoch_processing/justification_and_finalization.go b/testing/spectest/shared/fulu/epoch_processing/justification_and_finalization.go index 73ee0533ee..63b9fe61e6 100644 --- a/testing/spectest/shared/fulu/epoch_processing/justification_and_finalization.go +++ b/testing/spectest/shared/fulu/epoch_processing/justification_and_finalization.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunJustificationAndFinalizationTests executes "epoch_processing/justification_and_finalization" tests. diff --git a/testing/spectest/shared/fulu/epoch_processing/participation_flag_updates.go b/testing/spectest/shared/fulu/epoch_processing/participation_flag_updates.go index 5b7e479358..fc4b5afb92 100644 --- a/testing/spectest/shared/fulu/epoch_processing/participation_flag_updates.go +++ b/testing/spectest/shared/fulu/epoch_processing/participation_flag_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunParticipationFlagUpdatesTests executes "epoch_processing/participation_flag_updates" tests. diff --git a/testing/spectest/shared/fulu/epoch_processing/pending_consolidations.go b/testing/spectest/shared/fulu/epoch_processing/pending_consolidations.go index 14ef890bbe..d14efd4c67 100644 --- a/testing/spectest/shared/fulu/epoch_processing/pending_consolidations.go +++ b/testing/spectest/shared/fulu/epoch_processing/pending_consolidations.go @@ -5,10 +5,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) func RunPendingConsolidationsTests(t *testing.T, config string) { diff --git a/testing/spectest/shared/fulu/epoch_processing/pending_deposit_updates.go b/testing/spectest/shared/fulu/epoch_processing/pending_deposit_updates.go index d4a61f9147..835cc2c3c3 100644 --- a/testing/spectest/shared/fulu/epoch_processing/pending_deposit_updates.go +++ b/testing/spectest/shared/fulu/epoch_processing/pending_deposit_updates.go @@ -5,12 +5,12 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) func RunPendingDepositsTests(t *testing.T, config string) { diff --git a/testing/spectest/shared/fulu/epoch_processing/proposer_lookahead.go b/testing/spectest/shared/fulu/epoch_processing/proposer_lookahead.go index 2360b28aef..e857ff0d3a 100644 --- a/testing/spectest/shared/fulu/epoch_processing/proposer_lookahead.go +++ b/testing/spectest/shared/fulu/epoch_processing/proposer_lookahead.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/fulu" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/fulu" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunProposerLookaheadTests executes "epoch_processing/proposer_lookahead" tests. diff --git a/testing/spectest/shared/fulu/epoch_processing/randao_mixes_reset.go b/testing/spectest/shared/fulu/epoch_processing/randao_mixes_reset.go index a1dc070ded..0955ffb9ad 100644 --- a/testing/spectest/shared/fulu/epoch_processing/randao_mixes_reset.go +++ b/testing/spectest/shared/fulu/epoch_processing/randao_mixes_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRandaoMixesResetTests executes "epoch_processing/randao_mixes_reset" tests. diff --git a/testing/spectest/shared/fulu/epoch_processing/registry_updates.go b/testing/spectest/shared/fulu/epoch_processing/registry_updates.go index 1d43b52d0c..1f470461af 100644 --- a/testing/spectest/shared/fulu/epoch_processing/registry_updates.go +++ b/testing/spectest/shared/fulu/epoch_processing/registry_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRegistryUpdatesTests executes "epoch_processing/registry_updates" tests. diff --git a/testing/spectest/shared/fulu/epoch_processing/rewards_and_penalties.go b/testing/spectest/shared/fulu/epoch_processing/rewards_and_penalties.go index 6b593cbd51..7916362335 100644 --- a/testing/spectest/shared/fulu/epoch_processing/rewards_and_penalties.go +++ b/testing/spectest/shared/fulu/epoch_processing/rewards_and_penalties.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRewardsAndPenaltiesTests executes "epoch_processing/rewards_and_penalties" tests. diff --git a/testing/spectest/shared/fulu/epoch_processing/slashings.go b/testing/spectest/shared/fulu/epoch_processing/slashings.go index f7435a10d0..2804dd2ea9 100644 --- a/testing/spectest/shared/fulu/epoch_processing/slashings.go +++ b/testing/spectest/shared/fulu/epoch_processing/slashings.go @@ -4,11 +4,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsTests executes "epoch_processing/slashings" tests. diff --git a/testing/spectest/shared/fulu/epoch_processing/slashings_reset.go b/testing/spectest/shared/fulu/epoch_processing/slashings_reset.go index d73993a23d..8382a2efe6 100644 --- a/testing/spectest/shared/fulu/epoch_processing/slashings_reset.go +++ b/testing/spectest/shared/fulu/epoch_processing/slashings_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsResetTests executes "epoch_processing/slashings_reset" tests. diff --git a/testing/spectest/shared/fulu/epoch_processing/sync_committee_updates.go b/testing/spectest/shared/fulu/epoch_processing/sync_committee_updates.go index ba73a90ee5..faf528a453 100644 --- a/testing/spectest/shared/fulu/epoch_processing/sync_committee_updates.go +++ b/testing/spectest/shared/fulu/epoch_processing/sync_committee_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) func RunSyncCommitteeUpdatesTests(t *testing.T, config string) { diff --git a/testing/spectest/shared/fulu/finality/BUILD.bazel b/testing/spectest/shared/fulu/finality/BUILD.bazel index aa9f4bd972..9c49fc4e7e 100644 --- a/testing/spectest/shared/fulu/finality/BUILD.bazel +++ b/testing/spectest/shared/fulu/finality/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["finality.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/finality", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/finality", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/fulu/finality/finality.go b/testing/spectest/shared/fulu/finality/finality.go index 4e7850d9d3..a155f693aa 100644 --- a/testing/spectest/shared/fulu/finality/finality.go +++ b/testing/spectest/shared/fulu/finality/finality.go @@ -5,15 +5,15 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/fulu/fork/BUILD.bazel b/testing/spectest/shared/fulu/fork/BUILD.bazel index 18d324c503..68873db4f3 100644 --- a/testing/spectest/shared/fulu/fork/BUILD.bazel +++ b/testing/spectest/shared/fulu/fork/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "transition.go", "upgrade_to_fulu.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/fork", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/fork", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/fulu:go_default_library", diff --git a/testing/spectest/shared/fulu/fork/transition.go b/testing/spectest/shared/fulu/fork/transition.go index 858220038e..9ec47ed12e 100644 --- a/testing/spectest/shared/fulu/fork/transition.go +++ b/testing/spectest/shared/fulu/fork/transition.go @@ -5,16 +5,16 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/fulu/fork/upgrade_to_fulu.go b/testing/spectest/shared/fulu/fork/upgrade_to_fulu.go index dd6ea89367..78e233acd3 100644 --- a/testing/spectest/shared/fulu/fork/upgrade_to_fulu.go +++ b/testing/spectest/shared/fulu/fork/upgrade_to_fulu.go @@ -4,13 +4,13 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/fulu" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/fulu" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/fulu/merkle_proof/BUILD.bazel b/testing/spectest/shared/fulu/merkle_proof/BUILD.bazel index 846d1a29cc..a1bb96eb5d 100644 --- a/testing/spectest/shared/fulu/merkle_proof/BUILD.bazel +++ b/testing/spectest/shared/fulu/merkle_proof/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["merkle_proof.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/merkle_proof", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/merkle_proof", visibility = ["//visibility:public"], deps = [ "//testing/spectest/shared/common/merkle_proof:go_default_library", diff --git a/testing/spectest/shared/fulu/merkle_proof/merkle_proof.go b/testing/spectest/shared/fulu/merkle_proof/merkle_proof.go index 41c37a1c72..4257356076 100644 --- a/testing/spectest/shared/fulu/merkle_proof/merkle_proof.go +++ b/testing/spectest/shared/fulu/merkle_proof/merkle_proof.go @@ -3,8 +3,8 @@ package merkle_proof import ( "testing" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/merkle_proof" - "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/ssz_static" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/merkle_proof" + "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/ssz_static" ) func RunMerkleProofTests(t *testing.T, config string) { diff --git a/testing/spectest/shared/fulu/networking/BUILD.bazel b/testing/spectest/shared/fulu/networking/BUILD.bazel index bec4d77f48..3e802207a1 100644 --- a/testing/spectest/shared/fulu/networking/BUILD.bazel +++ b/testing/spectest/shared/fulu/networking/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["custody_groups.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/networking", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/networking", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/peerdas:go_default_library", diff --git a/testing/spectest/shared/fulu/networking/custody_groups.go b/testing/spectest/shared/fulu/networking/custody_groups.go index 91cd525366..1e2e9c8727 100644 --- a/testing/spectest/shared/fulu/networking/custody_groups.go +++ b/testing/spectest/shared/fulu/networking/custody_groups.go @@ -4,10 +4,10 @@ import ( "math/big" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/p2p/enode" "gopkg.in/yaml.v3" ) diff --git a/testing/spectest/shared/fulu/operations/BUILD.bazel b/testing/spectest/shared/fulu/operations/BUILD.bazel index 89c22bfb38..6968c80929 100644 --- a/testing/spectest/shared/fulu/operations/BUILD.bazel +++ b/testing/spectest/shared/fulu/operations/BUILD.bazel @@ -19,7 +19,7 @@ go_library( "withdrawal_request.go", "withdrawals.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/operations", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/operations", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/electra:go_default_library", diff --git a/testing/spectest/shared/fulu/operations/attestation.go b/testing/spectest/shared/fulu/operations/attestation.go index cacaf60503..aa7455a6db 100644 --- a/testing/spectest/shared/fulu/operations/attestation.go +++ b/testing/spectest/shared/fulu/operations/attestation.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttestation(attestationSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/fulu/operations/attester_slashing.go b/testing/spectest/shared/fulu/operations/attester_slashing.go index ad6f4487bb..e1972d94af 100644 --- a/testing/spectest/shared/fulu/operations/attester_slashing.go +++ b/testing/spectest/shared/fulu/operations/attester_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttesterSlashing(asSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/fulu/operations/block_header.go b/testing/spectest/shared/fulu/operations/block_header.go index 68c624cc20..c116ccace7 100644 --- a/testing/spectest/shared/fulu/operations/block_header.go +++ b/testing/spectest/shared/fulu/operations/block_header.go @@ -3,8 +3,8 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" ) func RunBlockHeaderTest(t *testing.T, config string) { diff --git a/testing/spectest/shared/fulu/operations/bls_to_execution_changes.go b/testing/spectest/shared/fulu/operations/bls_to_execution_changes.go index 9d8a6d1733..8ac02006f3 100644 --- a/testing/spectest/shared/fulu/operations/bls_to_execution_changes.go +++ b/testing/spectest/shared/fulu/operations/bls_to_execution_changes.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithBlsChange(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/fulu/operations/consolidations.go b/testing/spectest/shared/fulu/operations/consolidations.go index a39e759e2f..ad1bd51fcc 100644 --- a/testing/spectest/shared/fulu/operations/consolidations.go +++ b/testing/spectest/shared/fulu/operations/consolidations.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithConsolidation(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/fulu/operations/deposit.go b/testing/spectest/shared/fulu/operations/deposit.go index d8065fb638..57084625fa 100644 --- a/testing/spectest/shared/fulu/operations/deposit.go +++ b/testing/spectest/shared/fulu/operations/deposit.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithDeposit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/fulu/operations/deposit_request.go b/testing/spectest/shared/fulu/operations/deposit_request.go index 9277fb60bc..fee9672114 100644 --- a/testing/spectest/shared/fulu/operations/deposit_request.go +++ b/testing/spectest/shared/fulu/operations/deposit_request.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithDepositRequest(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/fulu/operations/execution_payload.go b/testing/spectest/shared/fulu/operations/execution_payload.go index 36e02f0e9e..eebfd57c96 100644 --- a/testing/spectest/shared/fulu/operations/execution_payload.go +++ b/testing/spectest/shared/fulu/operations/execution_payload.go @@ -3,8 +3,8 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" ) func RunExecutionPayloadTest(t *testing.T, config string) { diff --git a/testing/spectest/shared/fulu/operations/helpers.go b/testing/spectest/shared/fulu/operations/helpers.go index 0421798547..4488b98ba2 100644 --- a/testing/spectest/shared/fulu/operations/helpers.go +++ b/testing/spectest/shared/fulu/operations/helpers.go @@ -1,11 +1,11 @@ package operations import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func sszToState(b []byte) (state.BeaconState, error) { diff --git a/testing/spectest/shared/fulu/operations/proposer_slashing.go b/testing/spectest/shared/fulu/operations/proposer_slashing.go index 07e60e6e8c..3229a81f15 100644 --- a/testing/spectest/shared/fulu/operations/proposer_slashing.go +++ b/testing/spectest/shared/fulu/operations/proposer_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithProposerSlashing(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/fulu/operations/sync_committee.go b/testing/spectest/shared/fulu/operations/sync_committee.go index db170c0edc..8e2fad5210 100644 --- a/testing/spectest/shared/fulu/operations/sync_committee.go +++ b/testing/spectest/shared/fulu/operations/sync_committee.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithSyncAggregate(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/fulu/operations/voluntary_exit.go b/testing/spectest/shared/fulu/operations/voluntary_exit.go index d505050324..2afd4895f8 100644 --- a/testing/spectest/shared/fulu/operations/voluntary_exit.go +++ b/testing/spectest/shared/fulu/operations/voluntary_exit.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithVoluntaryExit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/fulu/operations/withdrawal_request.go b/testing/spectest/shared/fulu/operations/withdrawal_request.go index fa9f119014..eea82d8206 100644 --- a/testing/spectest/shared/fulu/operations/withdrawal_request.go +++ b/testing/spectest/shared/fulu/operations/withdrawal_request.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithWithdrawalRequest(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/fulu/operations/withdrawals.go b/testing/spectest/shared/fulu/operations/withdrawals.go index a5193e3b8d..1eb699621b 100644 --- a/testing/spectest/shared/fulu/operations/withdrawals.go +++ b/testing/spectest/shared/fulu/operations/withdrawals.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithWithdrawals(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/fulu/rewards/BUILD.bazel b/testing/spectest/shared/fulu/rewards/BUILD.bazel index 47ea13c055..c48ae6e2bf 100644 --- a/testing/spectest/shared/fulu/rewards/BUILD.bazel +++ b/testing/spectest/shared/fulu/rewards/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["rewards_penalties.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/rewards", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/rewards", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/electra:go_default_library", diff --git a/testing/spectest/shared/fulu/rewards/rewards_penalties.go b/testing/spectest/shared/fulu/rewards/rewards_penalties.go index d972d39913..4ed3aebbfb 100644 --- a/testing/spectest/shared/fulu/rewards/rewards_penalties.go +++ b/testing/spectest/shared/fulu/rewards/rewards_penalties.go @@ -9,13 +9,13 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/electra" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/electra" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/fulu/sanity/BUILD.bazel b/testing/spectest/shared/fulu/sanity/BUILD.bazel index f8a41d961d..b1c5c127bc 100644 --- a/testing/spectest/shared/fulu/sanity/BUILD.bazel +++ b/testing/spectest/shared/fulu/sanity/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "block_processing.yaml.go", "slot_processing.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/sanity", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/sanity", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/fulu/sanity/block_processing.go b/testing/spectest/shared/fulu/sanity/block_processing.go index b7236c5481..01637af031 100644 --- a/testing/spectest/shared/fulu/sanity/block_processing.go +++ b/testing/spectest/shared/fulu/sanity/block_processing.go @@ -8,16 +8,16 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" diff --git a/testing/spectest/shared/fulu/sanity/slot_processing.go b/testing/spectest/shared/fulu/sanity/slot_processing.go index f241dc61f8..b2c81ad97e 100644 --- a/testing/spectest/shared/fulu/sanity/slot_processing.go +++ b/testing/spectest/shared/fulu/sanity/slot_processing.go @@ -5,12 +5,12 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "google.golang.org/protobuf/proto" ) diff --git a/testing/spectest/shared/fulu/ssz_static/BUILD.bazel b/testing/spectest/shared/fulu/ssz_static/BUILD.bazel index 809506e029..24918c3bf7 100644 --- a/testing/spectest/shared/fulu/ssz_static/BUILD.bazel +++ b/testing/spectest/shared/fulu/ssz_static/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["ssz_static.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/fulu/ssz_static", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/fulu/ssz_static", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/state/state-native:go_default_library", diff --git a/testing/spectest/shared/fulu/ssz_static/ssz_static.go b/testing/spectest/shared/fulu/ssz_static/ssz_static.go index bfae82446c..5c65791c30 100644 --- a/testing/spectest/shared/fulu/ssz_static/ssz_static.go +++ b/testing/spectest/shared/fulu/ssz_static/ssz_static.go @@ -5,11 +5,11 @@ import ( "errors" "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/ssz_static" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/ssz_static" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/testing/spectest/shared/gloas/ssz_static/BUILD.bazel b/testing/spectest/shared/gloas/ssz_static/BUILD.bazel index 50eca0ed7b..04a733ed15 100644 --- a/testing/spectest/shared/gloas/ssz_static/BUILD.bazel +++ b/testing/spectest/shared/gloas/ssz_static/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["ssz_static.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/gloas/ssz_static", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/gloas/ssz_static", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//proto/engine/v1:go_default_library", diff --git a/testing/spectest/shared/gloas/ssz_static/ssz_static.go b/testing/spectest/shared/gloas/ssz_static/ssz_static.go index 0a99c14f54..db668752d7 100644 --- a/testing/spectest/shared/gloas/ssz_static/ssz_static.go +++ b/testing/spectest/shared/gloas/ssz_static/ssz_static.go @@ -4,9 +4,9 @@ import ( "errors" "testing" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/ssz_static" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/ssz_static" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/testing/spectest/shared/phase0/epoch_processing/BUILD.bazel b/testing/spectest/shared/phase0/epoch_processing/BUILD.bazel index c2fea38ea0..498deb816d 100644 --- a/testing/spectest/shared/phase0/epoch_processing/BUILD.bazel +++ b/testing/spectest/shared/phase0/epoch_processing/BUILD.bazel @@ -16,7 +16,7 @@ go_library( "slashings.go", "slashings_reset.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/epoch_processing", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/epoch_processing", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/epoch:go_default_library", diff --git a/testing/spectest/shared/phase0/epoch_processing/effective_balance_updates.go b/testing/spectest/shared/phase0/epoch_processing/effective_balance_updates.go index e362bf9ade..b168447420 100644 --- a/testing/spectest/shared/phase0/epoch_processing/effective_balance_updates.go +++ b/testing/spectest/shared/phase0/epoch_processing/effective_balance_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEffectiveBalanceUpdatesTests executes "epoch_processing/effective_balance_updates" tests. diff --git a/testing/spectest/shared/phase0/epoch_processing/eth1_data_reset.go b/testing/spectest/shared/phase0/epoch_processing/eth1_data_reset.go index ad0d60b70c..3b574c76c6 100644 --- a/testing/spectest/shared/phase0/epoch_processing/eth1_data_reset.go +++ b/testing/spectest/shared/phase0/epoch_processing/eth1_data_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunEth1DataResetTests executes "epoch_processing/eth1_data_reset" tests. diff --git a/testing/spectest/shared/phase0/epoch_processing/helpers.go b/testing/spectest/shared/phase0/epoch_processing/helpers.go index 3c58bee572..9b9cdb5f8b 100644 --- a/testing/spectest/shared/phase0/epoch_processing/helpers.go +++ b/testing/spectest/shared/phase0/epoch_processing/helpers.go @@ -6,11 +6,11 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/phase0/epoch_processing/historical_roots_update.go b/testing/spectest/shared/phase0/epoch_processing/historical_roots_update.go index 28dc00f579..69b22e25c4 100644 --- a/testing/spectest/shared/phase0/epoch_processing/historical_roots_update.go +++ b/testing/spectest/shared/phase0/epoch_processing/historical_roots_update.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunHistoricalRootsUpdateTests executes "epoch_processing/historical_roots_update" tests. diff --git a/testing/spectest/shared/phase0/epoch_processing/justification_and_finalization.go b/testing/spectest/shared/phase0/epoch_processing/justification_and_finalization.go index d1de8bfa18..51d23cdce4 100644 --- a/testing/spectest/shared/phase0/epoch_processing/justification_and_finalization.go +++ b/testing/spectest/shared/phase0/epoch_processing/justification_and_finalization.go @@ -5,10 +5,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunJustificationAndFinalizationTests executes "epoch_processing/justification_and_finalization" tests. diff --git a/testing/spectest/shared/phase0/epoch_processing/participation_record_updates.go b/testing/spectest/shared/phase0/epoch_processing/participation_record_updates.go index 90e889d44e..05bf30af42 100644 --- a/testing/spectest/shared/phase0/epoch_processing/participation_record_updates.go +++ b/testing/spectest/shared/phase0/epoch_processing/participation_record_updates.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunParticipationRecordUpdatesTests executes "epoch_processing/participation_record_updates" tests. diff --git a/testing/spectest/shared/phase0/epoch_processing/randao_mixes_reset.go b/testing/spectest/shared/phase0/epoch_processing/randao_mixes_reset.go index c6cb43f018..666f154881 100644 --- a/testing/spectest/shared/phase0/epoch_processing/randao_mixes_reset.go +++ b/testing/spectest/shared/phase0/epoch_processing/randao_mixes_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRandaoMixesResetTests executes "epoch_processing/randao_mixes_reset" tests. diff --git a/testing/spectest/shared/phase0/epoch_processing/registry_updates.go b/testing/spectest/shared/phase0/epoch_processing/registry_updates.go index 16b4d3263b..34a17ff980 100644 --- a/testing/spectest/shared/phase0/epoch_processing/registry_updates.go +++ b/testing/spectest/shared/phase0/epoch_processing/registry_updates.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRegistryUpdatesTests executes "epoch_processing/registry_updates" tests. diff --git a/testing/spectest/shared/phase0/epoch_processing/rewards_and_penalties.go b/testing/spectest/shared/phase0/epoch_processing/rewards_and_penalties.go index 42b413e8e4..6dff005de8 100644 --- a/testing/spectest/shared/phase0/epoch_processing/rewards_and_penalties.go +++ b/testing/spectest/shared/phase0/epoch_processing/rewards_and_penalties.go @@ -5,11 +5,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunRewardsAndPenaltiesTests executes "epoch_processing/rewards_and_penalties" tests. diff --git a/testing/spectest/shared/phase0/epoch_processing/slashings.go b/testing/spectest/shared/phase0/epoch_processing/slashings.go index 68f3631fad..7443675a6d 100644 --- a/testing/spectest/shared/phase0/epoch_processing/slashings.go +++ b/testing/spectest/shared/phase0/epoch_processing/slashings.go @@ -5,12 +5,12 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsTests executes "epoch_processing/slashings" tests. diff --git a/testing/spectest/shared/phase0/epoch_processing/slashings_reset.go b/testing/spectest/shared/phase0/epoch_processing/slashings_reset.go index 32718d5c74..24c1717dac 100644 --- a/testing/spectest/shared/phase0/epoch_processing/slashings_reset.go +++ b/testing/spectest/shared/phase0/epoch_processing/slashings_reset.go @@ -4,10 +4,10 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" ) // RunSlashingsResetTests executes "epoch_processing/slashings_reset" tests. diff --git a/testing/spectest/shared/phase0/finality/BUILD.bazel b/testing/spectest/shared/phase0/finality/BUILD.bazel index 019af791d2..4b781824d1 100644 --- a/testing/spectest/shared/phase0/finality/BUILD.bazel +++ b/testing/spectest/shared/phase0/finality/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["runner.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/finality", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/finality", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/phase0/finality/runner.go b/testing/spectest/shared/phase0/finality/runner.go index 20ec40b0f7..48c14459bf 100644 --- a/testing/spectest/shared/phase0/finality/runner.go +++ b/testing/spectest/shared/phase0/finality/runner.go @@ -5,15 +5,15 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/phase0/operations/BUILD.bazel b/testing/spectest/shared/phase0/operations/BUILD.bazel index 5ba8c17159..2c6733d3ee 100644 --- a/testing/spectest/shared/phase0/operations/BUILD.bazel +++ b/testing/spectest/shared/phase0/operations/BUILD.bazel @@ -12,7 +12,7 @@ go_library( "proposer_slashing.go", "voluntary_exit.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/operations", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/operations", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/altair:go_default_library", diff --git a/testing/spectest/shared/phase0/operations/attestation.go b/testing/spectest/shared/phase0/operations/attestation.go index f33e3bb345..ed166af0ae 100644 --- a/testing/spectest/shared/phase0/operations/attestation.go +++ b/testing/spectest/shared/phase0/operations/attestation.go @@ -3,13 +3,13 @@ package operations import ( "testing" - b "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + b "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttestation(attestationSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/phase0/operations/attester_slashing.go b/testing/spectest/shared/phase0/operations/attester_slashing.go index a84b5c3745..8128f4f92d 100644 --- a/testing/spectest/shared/phase0/operations/attester_slashing.go +++ b/testing/spectest/shared/phase0/operations/attester_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithAttesterSlashing(asSSZ []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/phase0/operations/block_header.go b/testing/spectest/shared/phase0/operations/block_header.go index 8ea6845628..9055fad6c1 100644 --- a/testing/spectest/shared/phase0/operations/block_header.go +++ b/testing/spectest/shared/phase0/operations/block_header.go @@ -3,8 +3,8 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" ) func RunBlockHeaderTest(t *testing.T, config string) { diff --git a/testing/spectest/shared/phase0/operations/deposit.go b/testing/spectest/shared/phase0/operations/deposit.go index c13a413eb8..d4c7eb50d0 100644 --- a/testing/spectest/shared/phase0/operations/deposit.go +++ b/testing/spectest/shared/phase0/operations/deposit.go @@ -3,13 +3,13 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithDeposit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/phase0/operations/helpers.go b/testing/spectest/shared/phase0/operations/helpers.go index 526ac739f5..c385ad6345 100644 --- a/testing/spectest/shared/phase0/operations/helpers.go +++ b/testing/spectest/shared/phase0/operations/helpers.go @@ -1,11 +1,11 @@ package operations import ( - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func sszToState(b []byte) (state.BeaconState, error) { diff --git a/testing/spectest/shared/phase0/operations/proposer_slashing.go b/testing/spectest/shared/phase0/operations/proposer_slashing.go index 0704d7e1ec..9f603bb21f 100644 --- a/testing/spectest/shared/phase0/operations/proposer_slashing.go +++ b/testing/spectest/shared/phase0/operations/proposer_slashing.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithProposerSlashing(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/phase0/operations/voluntary_exit.go b/testing/spectest/shared/phase0/operations/voluntary_exit.go index 3a2e81f2bb..1dd3f32d9a 100644 --- a/testing/spectest/shared/phase0/operations/voluntary_exit.go +++ b/testing/spectest/shared/phase0/operations/voluntary_exit.go @@ -3,12 +3,12 @@ package operations import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/operations" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/operations" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func blockWithVoluntaryExit(ssz []byte) (interfaces.SignedBeaconBlock, error) { diff --git a/testing/spectest/shared/phase0/rewards/BUILD.bazel b/testing/spectest/shared/phase0/rewards/BUILD.bazel index 88d999de37..b17caf2f86 100644 --- a/testing/spectest/shared/phase0/rewards/BUILD.bazel +++ b/testing/spectest/shared/phase0/rewards/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["rewards_penalties.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/rewards", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/rewards", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/epoch/precompute:go_default_library", diff --git a/testing/spectest/shared/phase0/rewards/rewards_penalties.go b/testing/spectest/shared/phase0/rewards/rewards_penalties.go index 1b0e1de719..9048f2f14b 100644 --- a/testing/spectest/shared/phase0/rewards/rewards_penalties.go +++ b/testing/spectest/shared/phase0/rewards/rewards_penalties.go @@ -8,13 +8,13 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" ) diff --git a/testing/spectest/shared/phase0/sanity/BUILD.bazel b/testing/spectest/shared/phase0/sanity/BUILD.bazel index a57e6bfe4e..ea865904f7 100644 --- a/testing/spectest/shared/phase0/sanity/BUILD.bazel +++ b/testing/spectest/shared/phase0/sanity/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "block_processing.yaml.go", "slot_processing.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/sanity", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/sanity", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/phase0/sanity/block_processing.go b/testing/spectest/shared/phase0/sanity/block_processing.go index 001ce26168..6531d83f68 100644 --- a/testing/spectest/shared/phase0/sanity/block_processing.go +++ b/testing/spectest/shared/phase0/sanity/block_processing.go @@ -8,15 +8,15 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" diff --git a/testing/spectest/shared/phase0/sanity/slot_processing.go b/testing/spectest/shared/phase0/sanity/slot_processing.go index 84e93886d4..82a4001516 100644 --- a/testing/spectest/shared/phase0/sanity/slot_processing.go +++ b/testing/spectest/shared/phase0/sanity/slot_processing.go @@ -5,12 +5,12 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/golang/snappy" "github.com/google/go-cmp/cmp" "google.golang.org/protobuf/proto" diff --git a/testing/spectest/shared/phase0/shuffling/core/shuffle/BUILD.bazel b/testing/spectest/shared/phase0/shuffling/core/shuffle/BUILD.bazel index 5a65f871d8..a76c6adc4c 100644 --- a/testing/spectest/shared/phase0/shuffling/core/shuffle/BUILD.bazel +++ b/testing/spectest/shared/phase0/shuffling/core/shuffle/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "shuffle.go", "shuffle_test_format.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/shuffling/core/shuffle", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/shuffling/core/shuffle", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/testing/spectest/shared/phase0/shuffling/core/shuffle/shuffle.go b/testing/spectest/shared/phase0/shuffling/core/shuffle/shuffle.go index ac25a0d429..96fb847078 100644 --- a/testing/spectest/shared/phase0/shuffling/core/shuffle/shuffle.go +++ b/testing/spectest/shared/phase0/shuffling/core/shuffle/shuffle.go @@ -7,11 +7,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/spectest/utils" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/spectest/utils" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/ethereum/go-ethereum/common" "github.com/go-yaml/yaml" ) diff --git a/testing/spectest/shared/phase0/shuffling/core/shuffle/shuffle_test_format.go b/testing/spectest/shared/phase0/shuffling/core/shuffle/shuffle_test_format.go index 7bbbe21d8a..f9dabd9b5e 100644 --- a/testing/spectest/shared/phase0/shuffling/core/shuffle/shuffle_test_format.go +++ b/testing/spectest/shared/phase0/shuffling/core/shuffle/shuffle_test_format.go @@ -1,6 +1,6 @@ package shuffle -import "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" +import "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" // TestCase -- type TestCase struct { diff --git a/testing/spectest/shared/phase0/ssz_static/BUILD.bazel b/testing/spectest/shared/phase0/ssz_static/BUILD.bazel index c9002f0443..7c2211e2d5 100644 --- a/testing/spectest/shared/phase0/ssz_static/BUILD.bazel +++ b/testing/spectest/shared/phase0/ssz_static/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["ssz_static.go"], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/phase0/ssz_static", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/phase0/ssz_static", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//beacon-chain/state/state-native:go_default_library", diff --git a/testing/spectest/shared/phase0/ssz_static/ssz_static.go b/testing/spectest/shared/phase0/ssz_static/ssz_static.go index b0bdb9b36e..5220ba08db 100644 --- a/testing/spectest/shared/phase0/ssz_static/ssz_static.go +++ b/testing/spectest/shared/phase0/ssz_static/ssz_static.go @@ -5,10 +5,10 @@ import ( "errors" "testing" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - common "github.com/OffchainLabs/prysm/v6/testing/spectest/shared/common/ssz_static" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + common "github.com/OffchainLabs/prysm/v7/testing/spectest/shared/common/ssz_static" fssz "github.com/prysmaticlabs/fastssz" ) diff --git a/testing/spectest/utils/BUILD.bazel b/testing/spectest/utils/BUILD.bazel index 00ee1c43b1..3cdde3db2e 100644 --- a/testing/spectest/utils/BUILD.bazel +++ b/testing/spectest/utils/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "config.go", "utils.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/spectest/utils", + importpath = "github.com/OffchainLabs/prysm/v7/testing/spectest/utils", visibility = ["//testing/spectest:__subpackages__"], deps = [ "//config/params:go_default_library", diff --git a/testing/spectest/utils/config.go b/testing/spectest/utils/config.go index f1e21a1bc0..c680d0420d 100644 --- a/testing/spectest/utils/config.go +++ b/testing/spectest/utils/config.go @@ -6,7 +6,7 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" ) // SetConfig sets the global params for spec tests depending on the option chosen. diff --git a/testing/spectest/utils/config_test.go b/testing/spectest/utils/config_test.go index 1e7c221655..a27b5abe86 100644 --- a/testing/spectest/utils/config_test.go +++ b/testing/spectest/utils/config_test.go @@ -3,9 +3,9 @@ package utils import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestConfig(t *testing.T) { diff --git a/testing/spectest/utils/utils.go b/testing/spectest/utils/utils.go index cb76dc4b6b..5580936d7e 100644 --- a/testing/spectest/utils/utils.go +++ b/testing/spectest/utils/utils.go @@ -6,8 +6,8 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/bazelbuild/rules_go/go/tools/bazel" "github.com/ghodss/yaml" jsoniter "github.com/json-iterator/go" diff --git a/testing/util/BUILD.bazel b/testing/util/BUILD.bazel index 86f4c85fc6..01f7756ba2 100644 --- a/testing/util/BUILD.bazel +++ b/testing/util/BUILD.bazel @@ -33,7 +33,7 @@ go_library( "sync_committee.go", "wait_timeout.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/util", + importpath = "github.com/OffchainLabs/prysm/v7/testing/util", visibility = ["//visibility:public"], deps = [ "//beacon-chain/blockchain/kzg:go_default_library", diff --git a/testing/util/altair.go b/testing/util/altair.go index e7a9c09d67..a951c095f9 100644 --- a/testing/util/altair.go +++ b/testing/util/altair.go @@ -6,22 +6,22 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensusblocks "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensusblocks "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/testing/util/attestation.go b/testing/util/attestation.go index 96208edd5b..5eb4e493b1 100644 --- a/testing/util/attestation.go +++ b/testing/util/attestation.go @@ -6,20 +6,20 @@ import ( "math" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - attv1 "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + attv1 "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" log "github.com/sirupsen/logrus" ) diff --git a/testing/util/attestation_test.go b/testing/util/attestation_test.go index 58157b3220..e823249f0a 100644 --- a/testing/util/attestation_test.go +++ b/testing/util/attestation_test.go @@ -3,11 +3,11 @@ package util import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - v1 "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + v1 "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestHydrateAttestation(t *testing.T) { diff --git a/testing/util/bellatrix.go b/testing/util/bellatrix.go index 4b38f36240..867d664085 100644 --- a/testing/util/bellatrix.go +++ b/testing/util/bellatrix.go @@ -6,19 +6,19 @@ import ( "fmt" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/testing/util/bellatrix_state.go b/testing/util/bellatrix_state.go index c953d2ab8a..7c1f06d83e 100644 --- a/testing/util/bellatrix_state.go +++ b/testing/util/bellatrix_state.go @@ -5,16 +5,16 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/testing/util/bellatrix_state_test.go b/testing/util/bellatrix_state_test.go index 4ad73a7783..0f23510519 100644 --- a/testing/util/bellatrix_state_test.go +++ b/testing/util/bellatrix_state_test.go @@ -4,8 +4,8 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestDeterministicGenesisStateBellatrix(t *testing.T) { diff --git a/testing/util/blob.go b/testing/util/blob.go index 8351e367ea..58050c6a9b 100644 --- a/testing/util/blob.go +++ b/testing/util/blob.go @@ -1,8 +1,8 @@ package util import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // HydrateBlobSidecar hydrates a blob sidecar with correct field length sizes diff --git a/testing/util/block.go b/testing/util/block.go index cbc5245e94..d3dfbf7de9 100644 --- a/testing/util/block.go +++ b/testing/util/block.go @@ -6,25 +6,25 @@ import ( "fmt" "math/big" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - v1 "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assertions" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/iface" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + v1 "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assertions" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" ) diff --git a/testing/util/block_test.go b/testing/util/block_test.go index 4a5eebb0b7..d75b76a7f9 100644 --- a/testing/util/block_test.go +++ b/testing/util/block_test.go @@ -3,17 +3,17 @@ package util import ( "testing" - coreBlock "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition/stateutils" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpbv1 "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - ethpbalpha "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + coreBlock "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition/stateutils" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpbv1 "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + ethpbalpha "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestGenerateFullBlock_PassesStateTransition(t *testing.T) { diff --git a/testing/util/capella_block.go b/testing/util/capella_block.go index 68cfe01e6e..49a986f41b 100644 --- a/testing/util/capella_block.go +++ b/testing/util/capella_block.go @@ -5,19 +5,19 @@ import ( "fmt" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/testing/util/capella_block_test.go b/testing/util/capella_block_test.go index b9c55134f4..58a3a681cb 100644 --- a/testing/util/capella_block_test.go +++ b/testing/util/capella_block_test.go @@ -3,12 +3,12 @@ package util import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestGenerateBLSToExecutionChange(t *testing.T) { diff --git a/testing/util/capella_state.go b/testing/util/capella_state.go index 6683fe45f6..86b6ca96e6 100644 --- a/testing/util/capella_state.go +++ b/testing/util/capella_state.go @@ -4,15 +4,15 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/testing/util/data_column.go b/testing/util/data_column.go index a3a75079d5..99b9c46011 100644 --- a/testing/util/data_column.go +++ b/testing/util/data_column.go @@ -3,11 +3,11 @@ package util import ( "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) type ( diff --git a/testing/util/deneb.go b/testing/util/deneb.go index 122f04739e..ee3882e1e9 100644 --- a/testing/util/deneb.go +++ b/testing/util/deneb.go @@ -5,18 +5,18 @@ import ( "math/big" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/random" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/random" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" GoKZG "github.com/crate-crypto/go-kzg-4844" "github.com/ethereum/go-ethereum/common" gethTypes "github.com/ethereum/go-ethereum/core/types" diff --git a/testing/util/deneb_state.go b/testing/util/deneb_state.go index 2e6d387b0b..31879353f9 100644 --- a/testing/util/deneb_state.go +++ b/testing/util/deneb_state.go @@ -4,15 +4,15 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/testing/util/deneb_test.go b/testing/util/deneb_test.go index 218dd2a351..65acd572c7 100644 --- a/testing/util/deneb_test.go +++ b/testing/util/deneb_test.go @@ -3,9 +3,9 @@ package util import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestInclusionProofs(t *testing.T) { diff --git a/testing/util/deposits.go b/testing/util/deposits.go index 32109c23a5..3eb567bb04 100644 --- a/testing/util/deposits.go +++ b/testing/util/deposits.go @@ -4,16 +4,16 @@ import ( "sync" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/container/trie" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/interop" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/container/trie" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/interop" "github.com/pkg/errors" ) diff --git a/testing/util/deposits_test.go b/testing/util/deposits_test.go index 7b023b1e60..c685d8ccc3 100644 --- a/testing/util/deposits_test.go +++ b/testing/util/deposits_test.go @@ -5,9 +5,9 @@ import ( "encoding/hex" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" "google.golang.org/protobuf/proto" ) diff --git a/testing/util/electra.go b/testing/util/electra.go index 2ba802cc01..c2d4d08be5 100644 --- a/testing/util/electra.go +++ b/testing/util/electra.go @@ -4,18 +4,18 @@ import ( "math/big" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" gethTypes "github.com/ethereum/go-ethereum/core/types" ) diff --git a/testing/util/electra_block.go b/testing/util/electra_block.go index d61d8b32b7..62b11517e9 100644 --- a/testing/util/electra_block.go +++ b/testing/util/electra_block.go @@ -7,19 +7,19 @@ import ( "math/big" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" ) diff --git a/testing/util/electra_state.go b/testing/util/electra_state.go index 29c6cf8930..4dac18f127 100644 --- a/testing/util/electra_state.go +++ b/testing/util/electra_state.go @@ -4,17 +4,17 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/testing/util/fulu.go b/testing/util/fulu.go index 66613adf9c..9f9687f963 100644 --- a/testing/util/fulu.go +++ b/testing/util/fulu.go @@ -4,18 +4,18 @@ import ( "math/big" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/blockchain/kzg" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/peerdas" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common" gethTypes "github.com/ethereum/go-ethereum/core/types" ) diff --git a/testing/util/fulu_block.go b/testing/util/fulu_block.go index c9befda671..e6c8e23f7b 100644 --- a/testing/util/fulu_block.go +++ b/testing/util/fulu_block.go @@ -5,18 +5,18 @@ import ( "fmt" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - v1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + v1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/testing/util/fulu_state.go b/testing/util/fulu_state.go index ec77d9b56e..14e3472219 100644 --- a/testing/util/fulu_state.go +++ b/testing/util/fulu_state.go @@ -4,17 +4,17 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state/stateutil" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state/stateutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/testing/util/helpers.go b/testing/util/helpers.go index db54021b81..9e55ee07ac 100644 --- a/testing/util/helpers.go +++ b/testing/util/helpers.go @@ -7,20 +7,20 @@ import ( "math" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" ) diff --git a/testing/util/helpers_test.go b/testing/util/helpers_test.go index 8573c83d32..a33040f3a2 100644 --- a/testing/util/helpers_test.go +++ b/testing/util/helpers_test.go @@ -5,15 +5,15 @@ import ( "encoding/binary" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func TestBlockSignature(t *testing.T) { diff --git a/testing/util/lightclient.go b/testing/util/lightclient.go index d40363f2d4..ddc7789bd9 100644 --- a/testing/util/lightclient.go +++ b/testing/util/lightclient.go @@ -4,20 +4,20 @@ import ( "context" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - consensus_types "github.com/OffchainLabs/prysm/v6/consensus-types" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - lightclienttypes "github.com/OffchainLabs/prysm/v6/consensus-types/light-client" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/ssz" - v11 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + consensus_types "github.com/OffchainLabs/prysm/v7/consensus-types" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + lightclienttypes "github.com/OffchainLabs/prysm/v7/consensus-types/light-client" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/ssz" + v11 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/testing/util/lightclient_test.go b/testing/util/lightclient_test.go index 4aa6c8339a..ad81e70eaf 100644 --- a/testing/util/lightclient_test.go +++ b/testing/util/lightclient_test.go @@ -3,10 +3,10 @@ package util_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" ) func TestLightClientUtils(t *testing.T) { diff --git a/testing/util/logging_test.go b/testing/util/logging_test.go index 53894487cd..fb73ca5569 100644 --- a/testing/util/logging_test.go +++ b/testing/util/logging_test.go @@ -5,7 +5,7 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/testing/util/merge.go b/testing/util/merge.go index 136dbb6726..5878498521 100644 --- a/testing/util/merge.go +++ b/testing/util/merge.go @@ -1,7 +1,7 @@ package util import ( - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // ---------------------------------------------------------------------------- diff --git a/testing/util/slot.go b/testing/util/slot.go index a010fa5dcf..64b133e14b 100644 --- a/testing/util/slot.go +++ b/testing/util/slot.go @@ -3,9 +3,9 @@ package util import ( "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" ) func SlotAtEpoch(t *testing.T, e primitives.Epoch) primitives.Slot { diff --git a/testing/util/state.go b/testing/util/state.go index 319cb6e5af..f537e254a7 100644 --- a/testing/util/state.go +++ b/testing/util/state.go @@ -6,16 +6,16 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - b "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/iface" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + b "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/iface" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/testing/util/state_test.go b/testing/util/state_test.go index 86f5b27e77..4254a21d06 100644 --- a/testing/util/state_test.go +++ b/testing/util/state_test.go @@ -3,9 +3,9 @@ package util import ( "testing" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestNewBeaconState(t *testing.T) { diff --git a/testing/util/sync_aggregate.go b/testing/util/sync_aggregate.go index 3f2552e025..5eb120501f 100644 --- a/testing/util/sync_aggregate.go +++ b/testing/util/sync_aggregate.go @@ -2,14 +2,14 @@ package util import ( "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - p2pType "github.com/OffchainLabs/prysm/v6/beacon-chain/p2p/types" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + p2pType "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/types" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/testing/util/sync_committee.go b/testing/util/sync_committee.go index 7ef9ce148f..41346dd49c 100644 --- a/testing/util/sync_committee.go +++ b/testing/util/sync_committee.go @@ -1,10 +1,10 @@ package util import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // HydrateSyncCommittee hydrates the provided sync committee message. diff --git a/testing/validator-mock/BUILD.bazel b/testing/validator-mock/BUILD.bazel index b9c56d2452..95098d7fce 100644 --- a/testing/validator-mock/BUILD.bazel +++ b/testing/validator-mock/BUILD.bazel @@ -11,7 +11,7 @@ go_library( "validator_client_mock.go", "validator_mock.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/testing/validator-mock", + importpath = "github.com/OffchainLabs/prysm/v7/testing/validator-mock", visibility = ["//visibility:public"], deps = [ "//api/client/event:go_default_library", diff --git a/testing/validator-mock/chain_client_mock.go b/testing/validator-mock/chain_client_mock.go index bad85f699e..ffc4cc96b3 100644 --- a/testing/validator-mock/chain_client_mock.go +++ b/testing/validator-mock/chain_client_mock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/OffchainLabs/prysm/v6/validator/client/iface (interfaces: ChainClient) +// Source: github.com/OffchainLabs/prysm/v7/validator/client/iface (interfaces: ChainClient) // // Generated by this command: // -// mockgen -package=validator_mock -destination=testing/validator-mock/chain_client_mock.go github.com/OffchainLabs/prysm/v6/validator/client/iface ChainClient +// mockgen -package=validator_mock -destination=testing/validator-mock/chain_client_mock.go github.com/OffchainLabs/prysm/v7/validator/client/iface ChainClient // // Package validator_mock is a generated GoMock package. @@ -13,7 +13,7 @@ import ( context "context" reflect "reflect" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" gomock "go.uber.org/mock/gomock" emptypb "google.golang.org/protobuf/types/known/emptypb" ) diff --git a/testing/validator-mock/node_client_mock.go b/testing/validator-mock/node_client_mock.go index d26032d379..56c0140149 100644 --- a/testing/validator-mock/node_client_mock.go +++ b/testing/validator-mock/node_client_mock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/OffchainLabs/prysm/v6/validator/client/iface (interfaces: NodeClient) +// Source: github.com/OffchainLabs/prysm/v7/validator/client/iface (interfaces: NodeClient) // // Generated by this command: // -// mockgen -package=validator_mock -destination=testing/validator-mock/node_client_mock.go github.com/OffchainLabs/prysm/v6/validator/client/iface NodeClient +// mockgen -package=validator_mock -destination=testing/validator-mock/node_client_mock.go github.com/OffchainLabs/prysm/v7/validator/client/iface NodeClient // // Package validator_mock is a generated GoMock package. @@ -13,7 +13,7 @@ import ( context "context" reflect "reflect" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" gomock "go.uber.org/mock/gomock" emptypb "google.golang.org/protobuf/types/known/emptypb" ) diff --git a/testing/validator-mock/prysm_chain_client_mock.go b/testing/validator-mock/prysm_chain_client_mock.go index af80275c86..f2e8eea188 100644 --- a/testing/validator-mock/prysm_chain_client_mock.go +++ b/testing/validator-mock/prysm_chain_client_mock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/OffchainLabs/prysm/v6/validator/client/iface (interfaces: PrysmChainClient) +// Source: github.com/OffchainLabs/prysm/v7/validator/client/iface (interfaces: PrysmChainClient) // // Generated by this command: // -// mockgen -package=validator_mock -destination=testing/validator-mock/prysm_chain_client_mock.go github.com/OffchainLabs/prysm/v6/validator/client/iface PrysmChainClient +// mockgen -package=validator_mock -destination=testing/validator-mock/prysm_chain_client_mock.go github.com/OffchainLabs/prysm/v7/validator/client/iface PrysmChainClient // // Package validator_mock is a generated GoMock package. @@ -13,9 +13,9 @@ import ( context "context" reflect "reflect" - validator "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - iface "github.com/OffchainLabs/prysm/v6/validator/client/iface" + validator "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + iface "github.com/OffchainLabs/prysm/v7/validator/client/iface" gomock "go.uber.org/mock/gomock" ) diff --git a/testing/validator-mock/validator_client_mock.go b/testing/validator-mock/validator_client_mock.go index 739587af29..e269b16bec 100644 --- a/testing/validator-mock/validator_client_mock.go +++ b/testing/validator-mock/validator_client_mock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/OffchainLabs/prysm/v6/validator/client/iface (interfaces: ValidatorClient) +// Source: github.com/OffchainLabs/prysm/v7/validator/client/iface (interfaces: ValidatorClient) // // Generated by this command: // -// mockgen -package=validator_mock -destination=testing/validator-mock/validator_client_mock.go github.com/OffchainLabs/prysm/v6/validator/client/iface ValidatorClient +// mockgen -package=validator_mock -destination=testing/validator-mock/validator_client_mock.go github.com/OffchainLabs/prysm/v7/validator/client/iface ValidatorClient // // Package validator_mock is a generated GoMock package. @@ -13,10 +13,10 @@ import ( context "context" reflect "reflect" - event "github.com/OffchainLabs/prysm/v6/api/client/event" - primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - iface "github.com/OffchainLabs/prysm/v6/validator/client/iface" + event "github.com/OffchainLabs/prysm/v7/api/client/event" + primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + iface "github.com/OffchainLabs/prysm/v7/validator/client/iface" gomock "go.uber.org/mock/gomock" emptypb "google.golang.org/protobuf/types/known/emptypb" ) diff --git a/testing/validator-mock/validator_mock.go b/testing/validator-mock/validator_mock.go index 5ff3080c28..36339305cb 100644 --- a/testing/validator-mock/validator_mock.go +++ b/testing/validator-mock/validator_mock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/OffchainLabs/prysm/v6/validator/client/iface (interfaces: Validator) +// Source: github.com/OffchainLabs/prysm/v7/validator/client/iface (interfaces: Validator) // // Generated by this command: // -// mockgen -package=validator_mock -destination=testing/validator-mock/validator_mock.go github.com/OffchainLabs/prysm/v6/validator/client/iface Validator +// mockgen -package=validator_mock -destination=testing/validator-mock/validator_mock.go github.com/OffchainLabs/prysm/v7/validator/client/iface Validator // // Package validator_mock is a generated GoMock package. @@ -14,12 +14,12 @@ import ( reflect "reflect" time "time" - event "github.com/OffchainLabs/prysm/v6/api/client/event" - proposer "github.com/OffchainLabs/prysm/v6/config/proposer" - primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - iface "github.com/OffchainLabs/prysm/v6/validator/client/iface" - keymanager "github.com/OffchainLabs/prysm/v6/validator/keymanager" + event "github.com/OffchainLabs/prysm/v7/api/client/event" + proposer "github.com/OffchainLabs/prysm/v7/config/proposer" + primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + iface "github.com/OffchainLabs/prysm/v7/validator/client/iface" + keymanager "github.com/OffchainLabs/prysm/v7/validator/keymanager" gomock "go.uber.org/mock/gomock" ) diff --git a/time/BUILD.bazel b/time/BUILD.bazel index 0051fe07d7..a36af1adeb 100644 --- a/time/BUILD.bazel +++ b/time/BUILD.bazel @@ -3,6 +3,6 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["utils.go"], - importpath = "github.com/OffchainLabs/prysm/v6/time", + importpath = "github.com/OffchainLabs/prysm/v7/time", visibility = ["//visibility:public"], ) diff --git a/time/mclock/BUILD.bazel b/time/mclock/BUILD.bazel index c0fdc50735..34eac8825b 100644 --- a/time/mclock/BUILD.bazel +++ b/time/mclock/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["mclock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/time/mclock", + importpath = "github.com/OffchainLabs/prysm/v7/time/mclock", visibility = ["//visibility:public"], deps = ["@com_github_aristanetworks_goarista//monotime:go_default_library"], ) diff --git a/time/slots/BUILD.bazel b/time/slots/BUILD.bazel index 588a0f8c6d..45289a08db 100644 --- a/time/slots/BUILD.bazel +++ b/time/slots/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "slotticker.go", "slottime.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/time/slots", + importpath = "github.com/OffchainLabs/prysm/v7/time/slots", visibility = ["//visibility:public"], deps = [ "//config/params:go_default_library", diff --git a/time/slots/countdown.go b/time/slots/countdown.go index a718183300..46a5c2a787 100644 --- a/time/slots/countdown.go +++ b/time/slots/countdown.go @@ -5,8 +5,8 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/config/params" + prysmTime "github.com/OffchainLabs/prysm/v7/time" "github.com/sirupsen/logrus" ) diff --git a/time/slots/countdown_test.go b/time/slots/countdown_test.go index cccb2c0c6a..6a68096bb8 100644 --- a/time/slots/countdown_test.go +++ b/time/slots/countdown_test.go @@ -5,9 +5,9 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" + prysmTime "github.com/OffchainLabs/prysm/v7/time" "github.com/sirupsen/logrus" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/time/slots/slotticker.go b/time/slots/slotticker.go index 485dcf4508..fc7258f013 100644 --- a/time/slots/slotticker.go +++ b/time/slots/slotticker.go @@ -4,9 +4,9 @@ package slots import ( "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + prysmTime "github.com/OffchainLabs/prysm/v7/time" ) // The Ticker interface defines a type which can expose a diff --git a/time/slots/slotticker_test.go b/time/slots/slotticker_test.go index e1d03c3d05..ae65bc2cef 100644 --- a/time/slots/slotticker_test.go +++ b/time/slots/slotticker_test.go @@ -4,8 +4,8 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/stretchr/testify/require" ) diff --git a/time/slots/slottime.go b/time/slots/slottime.go index f7cd983782..cfa4bd0adf 100644 --- a/time/slots/slottime.go +++ b/time/slots/slottime.go @@ -5,11 +5,11 @@ import ( "math" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - mathutil "github.com/OffchainLabs/prysm/v6/math" - "github.com/OffchainLabs/prysm/v6/runtime/version" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + mathutil "github.com/OffchainLabs/prysm/v7/math" + "github.com/OffchainLabs/prysm/v7/runtime/version" + prysmTime "github.com/OffchainLabs/prysm/v7/time" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/time/slots/slottime_test.go b/time/slots/slottime_test.go index 26feda10ea..d5594f78e4 100644 --- a/time/slots/slottime_test.go +++ b/time/slots/slottime_test.go @@ -6,12 +6,12 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - prysmTime "github.com/OffchainLabs/prysm/v6/time" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + prysmTime "github.com/OffchainLabs/prysm/v7/time" ) func TestAbsoluteValueSlotDifference(t *testing.T) { diff --git a/time/slots/testing/BUILD.bazel b/time/slots/testing/BUILD.bazel index 4b198bf33b..fe691d3d11 100644 --- a/time/slots/testing/BUILD.bazel +++ b/time/slots/testing/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/time/slots/testing", + importpath = "github.com/OffchainLabs/prysm/v7/time/slots/testing", visibility = ["//visibility:public"], deps = ["//consensus-types/primitives:go_default_library"], ) diff --git a/time/slots/testing/mock.go b/time/slots/testing/mock.go index 959ccfe0ee..5576432a1e 100644 --- a/time/slots/testing/mock.go +++ b/time/slots/testing/mock.go @@ -1,7 +1,7 @@ // Package testing includes useful mocks for slot tickers in unit tests. package testing -import "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" +import "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" // MockTicker defines a useful struct for mocking the Ticker interface // from the slotutil package. diff --git a/time/slots/testing/mock_test.go b/time/slots/testing/mock_test.go index 9f9d390b0f..8e7933d407 100644 --- a/time/slots/testing/mock_test.go +++ b/time/slots/testing/mock_test.go @@ -1,7 +1,7 @@ package testing import ( - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/time/slots" ) var _ slots.Ticker = (*MockTicker)(nil) diff --git a/tools/analyzers/comparesame/BUILD.bazel b/tools/analyzers/comparesame/BUILD.bazel index 1ab0a2ef40..a6c5b020ea 100644 --- a/tools/analyzers/comparesame/BUILD.bazel +++ b/tools/analyzers/comparesame/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/comparesame", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/comparesame", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/comparesame/analyzer_test.go b/tools/analyzers/comparesame/analyzer_test.go index 6cebb01f4f..1f2c59d0f8 100644 --- a/tools/analyzers/comparesame/analyzer_test.go +++ b/tools/analyzers/comparesame/analyzer_test.go @@ -3,7 +3,7 @@ package comparesame import ( "testing" - "github.com/OffchainLabs/prysm/v6/build/bazel" + "github.com/OffchainLabs/prysm/v7/build/bazel" "golang.org/x/tools/go/analysis/analysistest" ) diff --git a/tools/analyzers/cryptorand/BUILD.bazel b/tools/analyzers/cryptorand/BUILD.bazel index 99f268d92e..1541fd902f 100644 --- a/tools/analyzers/cryptorand/BUILD.bazel +++ b/tools/analyzers/cryptorand/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/cryptorand", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/cryptorand", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/cryptorand/analyzer_test.go b/tools/analyzers/cryptorand/analyzer_test.go index 2afafa23f6..7597642ca3 100644 --- a/tools/analyzers/cryptorand/analyzer_test.go +++ b/tools/analyzers/cryptorand/analyzer_test.go @@ -3,7 +3,7 @@ package cryptorand import ( "testing" - "github.com/OffchainLabs/prysm/v6/build/bazel" + "github.com/OffchainLabs/prysm/v7/build/bazel" "golang.org/x/tools/go/analysis/analysistest" ) diff --git a/tools/analyzers/errcheck/BUILD.bazel b/tools/analyzers/errcheck/BUILD.bazel index ee3c1d043c..54a8b293d9 100644 --- a/tools/analyzers/errcheck/BUILD.bazel +++ b/tools/analyzers/errcheck/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/errcheck", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/errcheck", visibility = ["//visibility:public"], deps = ["@com_github_kisielk_errcheck//errcheck:go_default_library"], ) diff --git a/tools/analyzers/featureconfig/BUILD.bazel b/tools/analyzers/featureconfig/BUILD.bazel index 2c1afcd0b5..1789293ed4 100644 --- a/tools/analyzers/featureconfig/BUILD.bazel +++ b/tools/analyzers/featureconfig/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/featureconfig", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/featureconfig", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/gocognit/BUILD.bazel b/tools/analyzers/gocognit/BUILD.bazel index 75e2e2aeef..90f46cdd3d 100644 --- a/tools/analyzers/gocognit/BUILD.bazel +++ b/tools/analyzers/gocognit/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/gocognit", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/gocognit", visibility = ["//visibility:public"], deps = [ "@com_github_uudashr_gocognit//:go_default_library", diff --git a/tools/analyzers/ineffassign/BUILD.bazel b/tools/analyzers/ineffassign/BUILD.bazel index bd41817bc0..6791c8fec4 100644 --- a/tools/analyzers/ineffassign/BUILD.bazel +++ b/tools/analyzers/ineffassign/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "analyzer.go", "ineffassign.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/ineffassign", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/ineffassign", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/ineffassign/analyzer_test.go b/tools/analyzers/ineffassign/analyzer_test.go index 1283c56a9c..0ae110dd96 100644 --- a/tools/analyzers/ineffassign/analyzer_test.go +++ b/tools/analyzers/ineffassign/analyzer_test.go @@ -3,7 +3,7 @@ package ineffassign import ( "testing" - "github.com/OffchainLabs/prysm/v6/build/bazel" + "github.com/OffchainLabs/prysm/v7/build/bazel" "golang.org/x/tools/go/analysis/analysistest" ) diff --git a/tools/analyzers/interfacechecker/BUILD.bazel b/tools/analyzers/interfacechecker/BUILD.bazel index a45afb220f..80fb95f7fa 100644 --- a/tools/analyzers/interfacechecker/BUILD.bazel +++ b/tools/analyzers/interfacechecker/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/interfacechecker", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/interfacechecker", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/logcapitalization/BUILD.bazel b/tools/analyzers/logcapitalization/BUILD.bazel index 36e347c819..f70066de24 100644 --- a/tools/analyzers/logcapitalization/BUILD.bazel +++ b/tools/analyzers/logcapitalization/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/logcapitalization", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/logcapitalization", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/logcapitalization/analyzer_test.go b/tools/analyzers/logcapitalization/analyzer_test.go index f336f1312f..ca9e98e32d 100644 --- a/tools/analyzers/logcapitalization/analyzer_test.go +++ b/tools/analyzers/logcapitalization/analyzer_test.go @@ -5,8 +5,8 @@ import ( "golang.org/x/tools/go/analysis/analysistest" - "github.com/OffchainLabs/prysm/v6/build/bazel" - "github.com/OffchainLabs/prysm/v6/tools/analyzers/logcapitalization" + "github.com/OffchainLabs/prysm/v7/build/bazel" + "github.com/OffchainLabs/prysm/v7/tools/analyzers/logcapitalization" ) func init() { diff --git a/tools/analyzers/logruswitherror/BUILD.bazel b/tools/analyzers/logruswitherror/BUILD.bazel index 53ad21b3d5..e0afc816b2 100644 --- a/tools/analyzers/logruswitherror/BUILD.bazel +++ b/tools/analyzers/logruswitherror/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/logruswitherror", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/logruswitherror", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/logruswitherror/analyzer_test.go b/tools/analyzers/logruswitherror/analyzer_test.go index a288f3e5e2..8dfbadf73b 100644 --- a/tools/analyzers/logruswitherror/analyzer_test.go +++ b/tools/analyzers/logruswitherror/analyzer_test.go @@ -3,7 +3,7 @@ package logruswitherror import ( "testing" - "github.com/OffchainLabs/prysm/v6/build/bazel" + "github.com/OffchainLabs/prysm/v7/build/bazel" "golang.org/x/tools/go/analysis/analysistest" ) diff --git a/tools/analyzers/maligned/BUILD.bazel b/tools/analyzers/maligned/BUILD.bazel index f2b99f770b..f4e837f701 100644 --- a/tools/analyzers/maligned/BUILD.bazel +++ b/tools/analyzers/maligned/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "analyzer.go", "maligned.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/maligned", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/maligned", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/nop/BUILD.bazel b/tools/analyzers/nop/BUILD.bazel index 72863fbfe5..99cb8687c9 100644 --- a/tools/analyzers/nop/BUILD.bazel +++ b/tools/analyzers/nop/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/nop", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/nop", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/nop/analyzer_test.go b/tools/analyzers/nop/analyzer_test.go index 0eb0289d74..10a237f9d8 100644 --- a/tools/analyzers/nop/analyzer_test.go +++ b/tools/analyzers/nop/analyzer_test.go @@ -3,7 +3,7 @@ package nop import ( "testing" - "github.com/OffchainLabs/prysm/v6/build/bazel" + "github.com/OffchainLabs/prysm/v7/build/bazel" "golang.org/x/tools/go/analysis/analysistest" ) diff --git a/tools/analyzers/nopanic/BUILD.bazel b/tools/analyzers/nopanic/BUILD.bazel index a8aebb0e57..77c8767d05 100644 --- a/tools/analyzers/nopanic/BUILD.bazel +++ b/tools/analyzers/nopanic/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/nopanic", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/nopanic", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/nopanic/analyzer_test.go b/tools/analyzers/nopanic/analyzer_test.go index e7c3c5bd5a..7bfcb69c1d 100644 --- a/tools/analyzers/nopanic/analyzer_test.go +++ b/tools/analyzers/nopanic/analyzer_test.go @@ -3,7 +3,7 @@ package nopanic import ( "testing" - "github.com/OffchainLabs/prysm/v6/build/bazel" + "github.com/OffchainLabs/prysm/v7/build/bazel" "golang.org/x/tools/go/analysis/analysistest" ) diff --git a/tools/analyzers/properpermissions/BUILD.bazel b/tools/analyzers/properpermissions/BUILD.bazel index 61881c139e..56f655efb8 100644 --- a/tools/analyzers/properpermissions/BUILD.bazel +++ b/tools/analyzers/properpermissions/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/properpermissions", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/properpermissions", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/properpermissions/analyzer_test.go b/tools/analyzers/properpermissions/analyzer_test.go index f744eb8af7..7edd53a0f2 100644 --- a/tools/analyzers/properpermissions/analyzer_test.go +++ b/tools/analyzers/properpermissions/analyzer_test.go @@ -3,7 +3,7 @@ package properpermissions import ( "testing" - "github.com/OffchainLabs/prysm/v6/build/bazel" + "github.com/OffchainLabs/prysm/v7/build/bazel" "golang.org/x/tools/go/analysis/analysistest" ) diff --git a/tools/analyzers/recursivelock/BUILD.bazel b/tools/analyzers/recursivelock/BUILD.bazel index 00e01bb4c3..1dff0a82e7 100644 --- a/tools/analyzers/recursivelock/BUILD.bazel +++ b/tools/analyzers/recursivelock/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/recursivelock", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/recursivelock", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/recursivelock/analyzer_test.go b/tools/analyzers/recursivelock/analyzer_test.go index b0f2d459cc..929aca7fe3 100644 --- a/tools/analyzers/recursivelock/analyzer_test.go +++ b/tools/analyzers/recursivelock/analyzer_test.go @@ -3,7 +3,7 @@ package recursivelock import ( "testing" - "github.com/OffchainLabs/prysm/v6/build/bazel" + "github.com/OffchainLabs/prysm/v7/build/bazel" "golang.org/x/tools/go/analysis/analysistest" ) diff --git a/tools/analyzers/shadowpredecl/BUILD.bazel b/tools/analyzers/shadowpredecl/BUILD.bazel index e999e6426d..0cf4448426 100644 --- a/tools/analyzers/shadowpredecl/BUILD.bazel +++ b/tools/analyzers/shadowpredecl/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/shadowpredecl", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/shadowpredecl", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/shadowpredecl/analyzer_test.go b/tools/analyzers/shadowpredecl/analyzer_test.go index 717fc02d6b..434111de70 100644 --- a/tools/analyzers/shadowpredecl/analyzer_test.go +++ b/tools/analyzers/shadowpredecl/analyzer_test.go @@ -3,7 +3,7 @@ package shadowpredecl import ( "testing" - "github.com/OffchainLabs/prysm/v6/build/bazel" + "github.com/OffchainLabs/prysm/v7/build/bazel" "golang.org/x/tools/go/analysis/analysistest" ) diff --git a/tools/analyzers/slicedirect/BUILD.bazel b/tools/analyzers/slicedirect/BUILD.bazel index 6fb7689cc9..25fc71d08e 100644 --- a/tools/analyzers/slicedirect/BUILD.bazel +++ b/tools/analyzers/slicedirect/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/slicedirect", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/slicedirect", visibility = ["//visibility:public"], deps = [ "@org_golang_x_tools//go/analysis:go_default_library", diff --git a/tools/analyzers/slicedirect/analyzer_test.go b/tools/analyzers/slicedirect/analyzer_test.go index 36cd3c911f..807bd202a3 100644 --- a/tools/analyzers/slicedirect/analyzer_test.go +++ b/tools/analyzers/slicedirect/analyzer_test.go @@ -3,7 +3,7 @@ package slicedirect import ( "testing" - "github.com/OffchainLabs/prysm/v6/build/bazel" + "github.com/OffchainLabs/prysm/v7/build/bazel" "golang.org/x/tools/go/analysis/analysistest" ) diff --git a/tools/analyzers/uintcast/BUILD.bazel b/tools/analyzers/uintcast/BUILD.bazel index 6c34579e39..aebb147784 100644 --- a/tools/analyzers/uintcast/BUILD.bazel +++ b/tools/analyzers/uintcast/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["analyzer.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/analyzers/uintcast", + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/uintcast", visibility = ["//visibility:public"], deps = [ "@com_github_gostaticanalysis_comment//:go_default_library", diff --git a/tools/analyzers/uintcast/analyzer_test.go b/tools/analyzers/uintcast/analyzer_test.go index 9d4aa41c42..af8cbfd463 100644 --- a/tools/analyzers/uintcast/analyzer_test.go +++ b/tools/analyzers/uintcast/analyzer_test.go @@ -3,8 +3,8 @@ package uintcast_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/build/bazel" - "github.com/OffchainLabs/prysm/v6/tools/analyzers/uintcast" + "github.com/OffchainLabs/prysm/v7/build/bazel" + "github.com/OffchainLabs/prysm/v7/tools/analyzers/uintcast" "golang.org/x/tools/go/analysis/analysistest" ) diff --git a/tools/beacon-fuzz/BUILD.bazel b/tools/beacon-fuzz/BUILD.bazel index 9a886b328a..4701c39a11 100644 --- a/tools/beacon-fuzz/BUILD.bazel +++ b/tools/beacon-fuzz/BUILD.bazel @@ -5,7 +5,7 @@ load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/beacon-fuzz", + importpath = "github.com/OffchainLabs/prysm/v7/tools/beacon-fuzz", visibility = ["//visibility:private"], deps = [ "//io/file:go_default_library", diff --git a/tools/beacon-fuzz/main.go b/tools/beacon-fuzz/main.go index e7d8d69e2c..409dabad06 100644 --- a/tools/beacon-fuzz/main.go +++ b/tools/beacon-fuzz/main.go @@ -9,7 +9,7 @@ import ( "strconv" "text/template" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/io/file" ) var ( diff --git a/tools/benchmark-files-gen/BUILD.bazel b/tools/benchmark-files-gen/BUILD.bazel index 37783274cc..a8e20d2a54 100644 --- a/tools/benchmark-files-gen/BUILD.bazel +++ b/tools/benchmark-files-gen/BUILD.bazel @@ -5,7 +5,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/benchmark-files-gen", + importpath = "github.com/OffchainLabs/prysm/v7/tools/benchmark-files-gen", visibility = ["//visibility:private"], deps = [ "//beacon-chain/core/helpers:go_default_library", diff --git a/tools/benchmark-files-gen/main.go b/tools/benchmark-files-gen/main.go index b7368edda6..840e2d8789 100644 --- a/tools/benchmark-files-gen/main.go +++ b/tools/benchmark-files-gen/main.go @@ -6,20 +6,20 @@ import ( "os" "path" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/io/file" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/testing/benchmark" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/io/file" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/testing/benchmark" + "github.com/OffchainLabs/prysm/v7/testing/util" "github.com/pkg/errors" log "github.com/sirupsen/logrus" ) diff --git a/tools/blocktree/BUILD.bazel b/tools/blocktree/BUILD.bazel index 06425883d5..24d98ff8be 100644 --- a/tools/blocktree/BUILD.bazel +++ b/tools/blocktree/BUILD.bazel @@ -4,7 +4,7 @@ load("@io_bazel_rules_go//go:def.bzl", "go_binary") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/blocktree", + importpath = "github.com/OffchainLabs/prysm/v7/tools/blocktree", visibility = ["//visibility:private"], deps = [ "//beacon-chain/db/filters:go_default_library", diff --git a/tools/blocktree/main.go b/tools/blocktree/main.go index 267f602768..a5bfe2822a 100644 --- a/tools/blocktree/main.go +++ b/tools/blocktree/main.go @@ -15,9 +15,9 @@ import ( "fmt" "strconv" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/filters" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/filters" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/emicklei/dot" ) diff --git a/tools/bootnode/BUILD.bazel b/tools/bootnode/BUILD.bazel index bb04620905..32db0f785d 100644 --- a/tools/bootnode/BUILD.bazel +++ b/tools/bootnode/BUILD.bazel @@ -5,7 +5,7 @@ load("//tools:prysm_image.bzl", "prysm_image_upload") go_library( name = "go_default_library", srcs = ["bootnode.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/bootnode", + importpath = "github.com/OffchainLabs/prysm/v7/tools/bootnode", visibility = ["//visibility:private"], deps = [ "//async:go_default_library", diff --git a/tools/bootnode/bootnode.go b/tools/bootnode/bootnode.go index c0f955e873..16af7358a3 100644 --- a/tools/bootnode/bootnode.go +++ b/tools/bootnode/bootnode.go @@ -24,16 +24,16 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/config/params" - ecdsaprysm "github.com/OffchainLabs/prysm/v6/crypto/ecdsa" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/logs" - "github.com/OffchainLabs/prysm/v6/network" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - _ "github.com/OffchainLabs/prysm/v6/runtime/maxprocs" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/config/params" + ecdsaprysm "github.com/OffchainLabs/prysm/v7/crypto/ecdsa" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/logs" + "github.com/OffchainLabs/prysm/v7/network" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + _ "github.com/OffchainLabs/prysm/v7/runtime/maxprocs" + "github.com/OffchainLabs/prysm/v7/runtime/version" gcrypto "github.com/ethereum/go-ethereum/crypto" gethlog "github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/p2p/discover" diff --git a/tools/bootnode/bootnode_test.go b/tools/bootnode/bootnode_test.go index 454b49747a..b78e4cc3bb 100644 --- a/tools/bootnode/bootnode_test.go +++ b/tools/bootnode/bootnode_test.go @@ -9,11 +9,11 @@ import ( "testing" "time" - ecdsaprysm "github.com/OffchainLabs/prysm/v6/crypto/ecdsa" - "github.com/OffchainLabs/prysm/v6/network" - _ "github.com/OffchainLabs/prysm/v6/runtime/maxprocs" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + ecdsaprysm "github.com/OffchainLabs/prysm/v7/crypto/ecdsa" + "github.com/OffchainLabs/prysm/v7/network" + _ "github.com/OffchainLabs/prysm/v7/runtime/maxprocs" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/p2p/discover" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/libp2p/go-libp2p/core/crypto" diff --git a/tools/enr-calculator/BUILD.bazel b/tools/enr-calculator/BUILD.bazel index 4c09e30708..9acd7e5ee8 100644 --- a/tools/enr-calculator/BUILD.bazel +++ b/tools/enr-calculator/BUILD.bazel @@ -5,7 +5,7 @@ load("//tools:prysm_image.bzl", "prysm_image_upload") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/enr-calculator", + importpath = "github.com/OffchainLabs/prysm/v7/tools/enr-calculator", visibility = ["//visibility:private"], deps = [ "//crypto/ecdsa:go_default_library", diff --git a/tools/enr-calculator/main.go b/tools/enr-calculator/main.go index 3a5af8361f..dadf8fcc86 100644 --- a/tools/enr-calculator/main.go +++ b/tools/enr-calculator/main.go @@ -9,9 +9,9 @@ import ( "flag" "net" - ecdsaprysm "github.com/OffchainLabs/prysm/v6/crypto/ecdsa" - "github.com/OffchainLabs/prysm/v6/io/file" - _ "github.com/OffchainLabs/prysm/v6/runtime/maxprocs" + ecdsaprysm "github.com/OffchainLabs/prysm/v7/crypto/ecdsa" + "github.com/OffchainLabs/prysm/v7/io/file" + _ "github.com/OffchainLabs/prysm/v7/runtime/maxprocs" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/p2p/enr" "github.com/libp2p/go-libp2p/core/crypto" diff --git a/tools/eth1exporter/BUILD.bazel b/tools/eth1exporter/BUILD.bazel index 40c055ab0f..976ad8b4d1 100644 --- a/tools/eth1exporter/BUILD.bazel +++ b/tools/eth1exporter/BUILD.bazel @@ -5,7 +5,7 @@ load("//tools:prysm_image.bzl", "prysm_image_upload") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/eth1exporter", + importpath = "github.com/OffchainLabs/prysm/v7/tools/eth1exporter", visibility = ["//visibility:private"], deps = [ "//runtime/maxprocs:go_default_library", diff --git a/tools/eth1exporter/main.go b/tools/eth1exporter/main.go index 46e29230c9..2f5c3685b6 100644 --- a/tools/eth1exporter/main.go +++ b/tools/eth1exporter/main.go @@ -14,7 +14,7 @@ import ( "strings" "time" - _ "github.com/OffchainLabs/prysm/v6/runtime/maxprocs" + _ "github.com/OffchainLabs/prysm/v7/runtime/maxprocs" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/ethclient" "github.com/ethereum/go-ethereum/params" diff --git a/tools/exploredb/BUILD.bazel b/tools/exploredb/BUILD.bazel index 48f6651a74..f88184ae60 100644 --- a/tools/exploredb/BUILD.bazel +++ b/tools/exploredb/BUILD.bazel @@ -4,7 +4,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/exploredb", + importpath = "github.com/OffchainLabs/prysm/v7/tools/exploredb", visibility = ["//visibility:private"], deps = [ "//beacon-chain/db/kv:go_default_library", diff --git a/tools/exploredb/main.go b/tools/exploredb/main.go index e4f6bf5ff3..ce410e7d57 100644 --- a/tools/exploredb/main.go +++ b/tools/exploredb/main.go @@ -17,12 +17,12 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/dustin/go-humanize" log "github.com/sirupsen/logrus" "github.com/status-im/keycard-go/hexutils" diff --git a/tools/extractor/BUILD.bazel b/tools/extractor/BUILD.bazel index 2cc9bc288d..f7d25d189e 100644 --- a/tools/extractor/BUILD.bazel +++ b/tools/extractor/BUILD.bazel @@ -4,7 +4,7 @@ load("@io_bazel_rules_go//go:def.bzl", "go_binary") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/extractor", + importpath = "github.com/OffchainLabs/prysm/v7/tools/extractor", visibility = ["//visibility:public"], deps = [ "//beacon-chain/core/transition/interop:go_default_library", diff --git a/tools/extractor/main.go b/tools/extractor/main.go index b7609a58cc..bf961570ae 100644 --- a/tools/extractor/main.go +++ b/tools/extractor/main.go @@ -5,10 +5,10 @@ import ( "flag" "fmt" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition/interop" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition/interop" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) var ( diff --git a/tools/forkchecker/BUILD.bazel b/tools/forkchecker/BUILD.bazel index ca0bae477a..be01e90c80 100644 --- a/tools/forkchecker/BUILD.bazel +++ b/tools/forkchecker/BUILD.bazel @@ -4,7 +4,7 @@ load("@io_bazel_rules_go//go:def.bzl", "go_binary") go_library( name = "go_default_library", srcs = ["forkchecker.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/forkchecker", + importpath = "github.com/OffchainLabs/prysm/v7/tools/forkchecker", visibility = ["//visibility:private"], deps = [ "//config/params:go_default_library", diff --git a/tools/forkchecker/forkchecker.go b/tools/forkchecker/forkchecker.go index 7947384dd7..dcc716e2e7 100644 --- a/tools/forkchecker/forkchecker.go +++ b/tools/forkchecker/forkchecker.go @@ -16,9 +16,9 @@ import ( "reflect" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/sirupsen/logrus" "google.golang.org/grpc" "google.golang.org/protobuf/types/known/emptypb" diff --git a/tools/gocovmerge/BUILD.bazel b/tools/gocovmerge/BUILD.bazel index b2f4f6b2ba..b73c2a9a4c 100644 --- a/tools/gocovmerge/BUILD.bazel +++ b/tools/gocovmerge/BUILD.bazel @@ -4,7 +4,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/gocovmerge", + importpath = "github.com/OffchainLabs/prysm/v7/tools/gocovmerge", visibility = ["//visibility:private"], deps = [ "@com_github_sirupsen_logrus//:go_default_library", diff --git a/tools/http-request-sink/BUILD.bazel b/tools/http-request-sink/BUILD.bazel index 627f5a6bdb..c1ab0296fd 100644 --- a/tools/http-request-sink/BUILD.bazel +++ b/tools/http-request-sink/BUILD.bazel @@ -5,7 +5,7 @@ load("//tools:prysm_image.bzl", "prysm_image_upload") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/http-request-sink", + importpath = "github.com/OffchainLabs/prysm/v7/tools/http-request-sink", visibility = ["//visibility:private"], deps = ["//config/params:go_default_library"], ) diff --git a/tools/http-request-sink/main.go b/tools/http-request-sink/main.go index 8e07d6fb48..7690220972 100644 --- a/tools/http-request-sink/main.go +++ b/tools/http-request-sink/main.go @@ -15,7 +15,7 @@ import ( "strconv" "time" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" ) func main() { diff --git a/tools/http-request-sink/main_test.go b/tools/http-request-sink/main_test.go index e653a5a808..b4d6474073 100644 --- a/tools/http-request-sink/main_test.go +++ b/tools/http-request-sink/main_test.go @@ -9,8 +9,8 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" ) type sampleRPCRequest struct { diff --git a/tools/interop/convert-keys/BUILD.bazel b/tools/interop/convert-keys/BUILD.bazel index 3d836e016a..345de67a28 100644 --- a/tools/interop/convert-keys/BUILD.bazel +++ b/tools/interop/convert-keys/BUILD.bazel @@ -4,7 +4,7 @@ load("@io_bazel_rules_go//go:def.bzl", "go_binary") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/interop/convert-keys", + importpath = "github.com/OffchainLabs/prysm/v7/tools/interop/convert-keys", visibility = ["//visibility:public"], deps = [ "//config/params:go_default_library", diff --git a/tools/interop/convert-keys/main.go b/tools/interop/convert-keys/main.go index 6a959bb181..8eb79ab712 100644 --- a/tools/interop/convert-keys/main.go +++ b/tools/interop/convert-keys/main.go @@ -9,8 +9,8 @@ import ( "fmt" "os" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/tools/unencrypted-keys-gen/keygen" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/tools/unencrypted-keys-gen/keygen" log "github.com/sirupsen/logrus" "gopkg.in/yaml.v2" ) diff --git a/tools/interop/export-genesis/BUILD.bazel b/tools/interop/export-genesis/BUILD.bazel index c7a89e30a5..e8dfd808bc 100644 --- a/tools/interop/export-genesis/BUILD.bazel +++ b/tools/interop/export-genesis/BUILD.bazel @@ -4,7 +4,7 @@ load("@io_bazel_rules_go//go:def.bzl", "go_binary") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/interop/export-genesis", + importpath = "github.com/OffchainLabs/prysm/v7/tools/interop/export-genesis", visibility = ["//visibility:private"], deps = [ "//beacon-chain/db/kv:go_default_library", diff --git a/tools/interop/export-genesis/main.go b/tools/interop/export-genesis/main.go index 1832be686a..7c83c97423 100644 --- a/tools/interop/export-genesis/main.go +++ b/tools/interop/export-genesis/main.go @@ -5,8 +5,8 @@ import ( "fmt" "os" - "github.com/OffchainLabs/prysm/v6/beacon-chain/db/kv" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/beacon-chain/db/kv" + "github.com/OffchainLabs/prysm/v7/io/file" ) // A basic tool to extract genesis.ssz from existing beaconchain.db. diff --git a/tools/interop/split-keys/BUILD.bazel b/tools/interop/split-keys/BUILD.bazel index 07ee848a43..ad25aad0c5 100644 --- a/tools/interop/split-keys/BUILD.bazel +++ b/tools/interop/split-keys/BUILD.bazel @@ -4,7 +4,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/interop/split-keys", + importpath = "github.com/OffchainLabs/prysm/v7/tools/interop/split-keys", visibility = ["//visibility:private"], deps = [ "//io/file:go_default_library", diff --git a/tools/interop/split-keys/main.go b/tools/interop/split-keys/main.go index bb1d629d9d..f035d86a32 100644 --- a/tools/interop/split-keys/main.go +++ b/tools/interop/split-keys/main.go @@ -20,11 +20,11 @@ import ( "os" "path" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" "github.com/tyler-smith/go-bip39" util "github.com/wealdtech/go-eth2-util" ) diff --git a/tools/interop/split-keys/main_test.go b/tools/interop/split-keys/main_test.go index e1bc657bec..b3e752e93f 100644 --- a/tools/interop/split-keys/main_test.go +++ b/tools/interop/split-keys/main_test.go @@ -8,10 +8,10 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" ) const testMnemonic = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about" diff --git a/tools/keystores/BUILD.bazel b/tools/keystores/BUILD.bazel index fa0c0dc635..bf66d7c974 100644 --- a/tools/keystores/BUILD.bazel +++ b/tools/keystores/BUILD.bazel @@ -4,7 +4,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/keystores", + importpath = "github.com/OffchainLabs/prysm/v7/tools/keystores", visibility = ["//visibility:private"], deps = [ "//crypto/bls:go_default_library", diff --git a/tools/keystores/main.go b/tools/keystores/main.go index 2ed0a8a06a..c35e11ecc7 100644 --- a/tools/keystores/main.go +++ b/tools/keystores/main.go @@ -13,10 +13,10 @@ import ( "path/filepath" "strings" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/io/prompt" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/io/prompt" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/google/uuid" "github.com/logrusorgru/aurora" "github.com/pkg/errors" diff --git a/tools/keystores/main_test.go b/tools/keystores/main_test.go index 29882279d0..541a559eca 100644 --- a/tools/keystores/main_test.go +++ b/tools/keystores/main_test.go @@ -10,11 +10,11 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/google/uuid" "github.com/urfave/cli/v2" keystorev4 "github.com/wealdtech/go-eth2-wallet-encryptor-keystorev4" diff --git a/tools/nogo_config/BUILD.bazel b/tools/nogo_config/BUILD.bazel index fdc73ab5fb..257aec6fc7 100644 --- a/tools/nogo_config/BUILD.bazel +++ b/tools/nogo_config/BUILD.bazel @@ -10,7 +10,7 @@ go_library( "config_exclusion.go", "main.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/tools/nogo_config", + importpath = "github.com/OffchainLabs/prysm/v7/tools/nogo_config", visibility = ["//visibility:private"], ) diff --git a/tools/pcli/BUILD.bazel b/tools/pcli/BUILD.bazel index 2a90ef8f4c..e1d2932781 100644 --- a/tools/pcli/BUILD.bazel +++ b/tools/pcli/BUILD.bazel @@ -5,7 +5,7 @@ load("//tools:prysm_image.bzl", "prysm_image_upload") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/pcli", + importpath = "github.com/OffchainLabs/prysm/v7/tools/pcli", visibility = ["//visibility:private"], deps = [ "//beacon-chain/core/epoch/precompute:go_default_library", diff --git a/tools/pcli/main.go b/tools/pcli/main.go index 460e8a1ff0..3596c7e457 100644 --- a/tools/pcli/main.go +++ b/tools/pcli/main.go @@ -10,17 +10,17 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/epoch/precompute" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/transition" - "github.com/OffchainLabs/prysm/v6/beacon-chain/state" - state_native "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/detect" - "github.com/OffchainLabs/prysm/v6/encoding/ssz/equality" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - prefixed "github.com/OffchainLabs/prysm/v6/runtime/logging/logrus-prefixed-formatter" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/epoch/precompute" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" + "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/detect" + "github.com/OffchainLabs/prysm/v7/encoding/ssz/equality" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + prefixed "github.com/OffchainLabs/prysm/v7/runtime/logging/logrus-prefixed-formatter" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/kr/pretty" "github.com/pkg/errors" fssz "github.com/prysmaticlabs/fastssz" diff --git a/tools/replay-http/BUILD.bazel b/tools/replay-http/BUILD.bazel index 5554e2f394..0a898b2286 100644 --- a/tools/replay-http/BUILD.bazel +++ b/tools/replay-http/BUILD.bazel @@ -4,7 +4,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/replay-http", + importpath = "github.com/OffchainLabs/prysm/v7/tools/replay-http", visibility = ["//visibility:private"], deps = [ "@com_github_pkg_errors//:go_default_library", diff --git a/tools/specs-checker/BUILD.bazel b/tools/specs-checker/BUILD.bazel index df540c28fc..6faeeb3c6c 100644 --- a/tools/specs-checker/BUILD.bazel +++ b/tools/specs-checker/BUILD.bazel @@ -16,7 +16,7 @@ go_library( "data/ssz/merkle-proofs.md", "data/extra.md", ], - importpath = "github.com/OffchainLabs/prysm/v6/tools/specs-checker", + importpath = "github.com/OffchainLabs/prysm/v7/tools/specs-checker", visibility = ["//visibility:public"], deps = [ "//config/params:go_default_library", diff --git a/tools/specs-checker/download.go b/tools/specs-checker/download.go index aa537ddecd..414b58ebd9 100644 --- a/tools/specs-checker/download.go +++ b/tools/specs-checker/download.go @@ -10,7 +10,7 @@ import ( "path/filepath" "regexp" - "github.com/OffchainLabs/prysm/v6/config/params" + "github.com/OffchainLabs/prysm/v7/config/params" "github.com/urfave/cli/v2" ) diff --git a/tools/unencrypted-keys-gen/BUILD.bazel b/tools/unencrypted-keys-gen/BUILD.bazel index bd876d4d54..819f00e3f1 100644 --- a/tools/unencrypted-keys-gen/BUILD.bazel +++ b/tools/unencrypted-keys-gen/BUILD.bazel @@ -4,7 +4,7 @@ load("@io_bazel_rules_go//go:def.bzl", "go_binary") go_library( name = "go_default_library", srcs = ["main.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/unencrypted-keys-gen", + importpath = "github.com/OffchainLabs/prysm/v7/tools/unencrypted-keys-gen", visibility = [ "//tools/interop/convert-keys:__pkg__", ], diff --git a/tools/unencrypted-keys-gen/keygen/BUILD.bazel b/tools/unencrypted-keys-gen/keygen/BUILD.bazel index cc7e41ba22..65adf6b7c7 100644 --- a/tools/unencrypted-keys-gen/keygen/BUILD.bazel +++ b/tools/unencrypted-keys-gen/keygen/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["keygen.go"], - importpath = "github.com/OffchainLabs/prysm/v6/tools/unencrypted-keys-gen/keygen", + importpath = "github.com/OffchainLabs/prysm/v7/tools/unencrypted-keys-gen/keygen", visibility = ["//visibility:public"], deps = ["@com_github_sirupsen_logrus//:go_default_library"], ) diff --git a/tools/unencrypted-keys-gen/main.go b/tools/unencrypted-keys-gen/main.go index b4a55bc632..8729621774 100644 --- a/tools/unencrypted-keys-gen/main.go +++ b/tools/unencrypted-keys-gen/main.go @@ -5,9 +5,9 @@ import ( "log" "os" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/tools/unencrypted-keys-gen/keygen" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/tools/unencrypted-keys-gen/keygen" ) var ( diff --git a/tools/unencrypted-keys-gen/main_test.go b/tools/unencrypted-keys-gen/main_test.go index a6e2d0544d..6737b194e3 100644 --- a/tools/unencrypted-keys-gen/main_test.go +++ b/tools/unencrypted-keys-gen/main_test.go @@ -5,9 +5,9 @@ import ( "encoding/json" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/tools/unencrypted-keys-gen/keygen" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/tools/unencrypted-keys-gen/keygen" ) func TestSavesUnencryptedKeys(t *testing.T) { diff --git a/validator/accounts/BUILD.bazel b/validator/accounts/BUILD.bazel index 7137f6d181..873ea7913e 100644 --- a/validator/accounts/BUILD.bazel +++ b/validator/accounts/BUILD.bazel @@ -17,7 +17,7 @@ go_library( "wallet_create.go", "wallet_recover.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/accounts", + importpath = "github.com/OffchainLabs/prysm/v7/validator/accounts", visibility = [ "//cmd/validator:__subpackages__", "//validator:__pkg__", diff --git a/validator/accounts/accounts.go b/validator/accounts/accounts.go index 5bc61c9cdb..d4e90930fe 100644 --- a/validator/accounts/accounts.go +++ b/validator/accounts/accounts.go @@ -1,7 +1,7 @@ package accounts import ( - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" ) var ( diff --git a/validator/accounts/accounts_backup.go b/validator/accounts/accounts_backup.go index df34255c55..e9d52a21d1 100644 --- a/validator/accounts/accounts_backup.go +++ b/validator/accounts/accounts_backup.go @@ -8,8 +8,8 @@ import ( "os" "path/filepath" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/logrusorgru/aurora" "github.com/pkg/errors" ) diff --git a/validator/accounts/accounts_delete.go b/validator/accounts/accounts_delete.go index d4403bd0c9..95dc690a63 100644 --- a/validator/accounts/accounts_delete.go +++ b/validator/accounts/accounts_delete.go @@ -5,9 +5,9 @@ import ( "fmt" "strings" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/prompt" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/prompt" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/pkg/errors" ) diff --git a/validator/accounts/accounts_delete_test.go b/validator/accounts/accounts_delete_test.go index f68bc0f282..b8af5e1d53 100644 --- a/validator/accounts/accounts_delete_test.go +++ b/validator/accounts/accounts_delete_test.go @@ -4,11 +4,11 @@ import ( "bytes" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" "github.com/ethereum/go-ethereum/common/hexutil" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/validator/accounts/accounts_exit.go b/validator/accounts/accounts_exit.go index 76240ac7bd..ac4ca3ffdb 100644 --- a/validator/accounts/accounts_exit.go +++ b/validator/accounts/accounts_exit.go @@ -8,16 +8,16 @@ import ( "path" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/blocks" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/file" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/client" - beacon_api "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/file" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/client" + beacon_api "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "google.golang.org/protobuf/types/known/emptypb" diff --git a/validator/accounts/accounts_exit_test.go b/validator/accounts/accounts_exit_test.go index 5101549aa4..cd84cd1900 100644 --- a/validator/accounts/accounts_exit_test.go +++ b/validator/accounts/accounts_exit_test.go @@ -6,15 +6,15 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/build/bazel" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/file" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/build/bazel" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/file" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/sirupsen/logrus/hooks/test" ) diff --git a/validator/accounts/accounts_helper.go b/validator/accounts/accounts_helper.go index 50fb0fd92b..9c743d6b38 100644 --- a/validator/accounts/accounts_helper.go +++ b/validator/accounts/accounts_helper.go @@ -7,13 +7,13 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/prompt" - "github.com/OffchainLabs/prysm/v6/validator/accounts/petnames" - "github.com/OffchainLabs/prysm/v6/validator/accounts/userprompt" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/prompt" + "github.com/OffchainLabs/prysm/v7/validator/accounts/petnames" + "github.com/OffchainLabs/prysm/v7/validator/accounts/userprompt" "github.com/logrusorgru/aurora" "github.com/manifoldco/promptui" "github.com/pkg/errors" diff --git a/validator/accounts/accounts_import.go b/validator/accounts/accounts_import.go index b2fd4131eb..7970e9cee0 100644 --- a/validator/accounts/accounts_import.go +++ b/validator/accounts/accounts_import.go @@ -11,12 +11,12 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/io/prompt" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/io/prompt" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/google/uuid" "github.com/pkg/errors" keystorev4 "github.com/wealdtech/go-eth2-wallet-encryptor-keystorev4" diff --git a/validator/accounts/accounts_import_test.go b/validator/accounts/accounts_import_test.go index 8a38d6efef..d2d501e89e 100644 --- a/validator/accounts/accounts_import_test.go +++ b/validator/accounts/accounts_import_test.go @@ -8,14 +8,14 @@ import ( "sort" "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" ) func TestImportAccounts_NoPassword(t *testing.T) { diff --git a/validator/accounts/accounts_list.go b/validator/accounts/accounts_list.go index b452be084c..a0d086982e 100644 --- a/validator/accounts/accounts_list.go +++ b/validator/accounts/accounts_list.go @@ -5,10 +5,10 @@ import ( "fmt" "math" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/pkg/errors" ) diff --git a/validator/accounts/accounts_list_test.go b/validator/accounts/accounts_list_test.go index a1798a2134..c53c3bb2e4 100644 --- a/validator/accounts/accounts_list_test.go +++ b/validator/accounts/accounts_list_test.go @@ -11,18 +11,18 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - types "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - validatormock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" - constant "github.com/OffchainLabs/prysm/v6/validator/testing" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + types "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + validatormock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" + constant "github.com/OffchainLabs/prysm/v7/validator/testing" "github.com/google/uuid" "github.com/urfave/cli/v2" keystorev4 "github.com/wealdtech/go-eth2-wallet-encryptor-keystorev4" diff --git a/validator/accounts/cli_manager.go b/validator/accounts/cli_manager.go index 48f4dc5b3f..ae471bbe69 100644 --- a/validator/accounts/cli_manager.go +++ b/validator/accounts/cli_manager.go @@ -7,16 +7,16 @@ import ( "os" "time" - grpcutil "github.com/OffchainLabs/prysm/v6/api/grpc" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - beaconApi "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api" - iface "github.com/OffchainLabs/prysm/v6/validator/client/iface" - nodeClientFactory "github.com/OffchainLabs/prysm/v6/validator/client/node-client-factory" - validatorClientFactory "github.com/OffchainLabs/prysm/v6/validator/client/validator-client-factory" - validatorHelpers "github.com/OffchainLabs/prysm/v6/validator/helpers" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" + grpcutil "github.com/OffchainLabs/prysm/v7/api/grpc" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + beaconApi "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api" + iface "github.com/OffchainLabs/prysm/v7/validator/client/iface" + nodeClientFactory "github.com/OffchainLabs/prysm/v7/validator/client/node-client-factory" + validatorClientFactory "github.com/OffchainLabs/prysm/v7/validator/client/validator-client-factory" + validatorHelpers "github.com/OffchainLabs/prysm/v7/validator/helpers" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" "github.com/pkg/errors" "google.golang.org/grpc" ) diff --git a/validator/accounts/cli_options.go b/validator/accounts/cli_options.go index c87e6f1732..81c3b47662 100644 --- a/validator/accounts/cli_options.go +++ b/validator/accounts/cli_options.go @@ -4,9 +4,9 @@ import ( "io" "time" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "google.golang.org/grpc" ) diff --git a/validator/accounts/iface/BUILD.bazel b/validator/accounts/iface/BUILD.bazel index a884a6156d..2b86a7fb54 100644 --- a/validator/accounts/iface/BUILD.bazel +++ b/validator/accounts/iface/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["wallet.go"], - importpath = "github.com/OffchainLabs/prysm/v6/validator/accounts/iface", + importpath = "github.com/OffchainLabs/prysm/v7/validator/accounts/iface", visibility = [ "//cmd:__subpackages__", "//validator:__pkg__", diff --git a/validator/accounts/iface/wallet.go b/validator/accounts/iface/wallet.go index 582ec68721..1a51774d6e 100644 --- a/validator/accounts/iface/wallet.go +++ b/validator/accounts/iface/wallet.go @@ -3,8 +3,8 @@ package iface import ( "context" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - remoteweb3signer "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + remoteweb3signer "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer" ) // InitKeymanagerConfig defines configuration options for initializing a keymanager. diff --git a/validator/accounts/petnames/BUILD.bazel b/validator/accounts/petnames/BUILD.bazel index 2718909c07..b2b3e256fa 100644 --- a/validator/accounts/petnames/BUILD.bazel +++ b/validator/accounts/petnames/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["names.go"], - importpath = "github.com/OffchainLabs/prysm/v6/validator/accounts/petnames", + importpath = "github.com/OffchainLabs/prysm/v7/validator/accounts/petnames", visibility = [ "//cmd/validator:__subpackages__", "//validator:__subpackages__", diff --git a/validator/accounts/petnames/names.go b/validator/accounts/petnames/names.go index a007e47d95..d78b6b1d25 100644 --- a/validator/accounts/petnames/names.go +++ b/validator/accounts/petnames/names.go @@ -3,8 +3,8 @@ package petnames import ( "strings" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/crypto/rand" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/crypto/rand" ) var ( diff --git a/validator/accounts/testing/BUILD.bazel b/validator/accounts/testing/BUILD.bazel index 399aa3be73..65ee614c9c 100644 --- a/validator/accounts/testing/BUILD.bazel +++ b/validator/accounts/testing/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = 1, srcs = ["mock.go"], - importpath = "github.com/OffchainLabs/prysm/v6/validator/accounts/testing", + importpath = "github.com/OffchainLabs/prysm/v7/validator/accounts/testing", visibility = [ "//validator:__pkg__", "//validator:__subpackages__", diff --git a/validator/accounts/testing/mock.go b/validator/accounts/testing/mock.go index ee20974afd..80a06acf8c 100644 --- a/validator/accounts/testing/mock.go +++ b/validator/accounts/testing/mock.go @@ -9,8 +9,8 @@ import ( "strings" "sync" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" ) // Wallet contains an in-memory, simulated wallet implementation. diff --git a/validator/accounts/userprompt/BUILD.bazel b/validator/accounts/userprompt/BUILD.bazel index e3cd4eb97b..878edbf036 100644 --- a/validator/accounts/userprompt/BUILD.bazel +++ b/validator/accounts/userprompt/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "log.go", "prompt.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/accounts/userprompt", + importpath = "github.com/OffchainLabs/prysm/v7/validator/accounts/userprompt", visibility = [ "//cmd:__subpackages__", "//validator:__subpackages__", diff --git a/validator/accounts/userprompt/prompt.go b/validator/accounts/userprompt/prompt.go index dc433af0d2..15a7345a44 100644 --- a/validator/accounts/userprompt/prompt.go +++ b/validator/accounts/userprompt/prompt.go @@ -1,9 +1,9 @@ package userprompt import ( - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/io/prompt" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/io/prompt" "github.com/logrusorgru/aurora" "github.com/manifoldco/promptui" "github.com/pkg/errors" diff --git a/validator/accounts/wallet/BUILD.bazel b/validator/accounts/wallet/BUILD.bazel index 443857217e..017f08f40f 100644 --- a/validator/accounts/wallet/BUILD.bazel +++ b/validator/accounts/wallet/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "log.go", "wallet.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet", + importpath = "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet", visibility = [ "//cmd:__subpackages__", "//tools:__subpackages__", diff --git a/validator/accounts/wallet/wallet.go b/validator/accounts/wallet/wallet.go index 77de1bb018..ef38ae1427 100644 --- a/validator/accounts/wallet/wallet.go +++ b/validator/accounts/wallet/wallet.go @@ -8,16 +8,16 @@ import ( "path/filepath" "strings" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/io/prompt" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - accountsprompt "github.com/OffchainLabs/prysm/v6/validator/accounts/userprompt" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" - remoteweb3signer "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/io/prompt" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + accountsprompt "github.com/OffchainLabs/prysm/v7/validator/accounts/userprompt" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" + remoteweb3signer "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer" "github.com/pkg/errors" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" diff --git a/validator/accounts/wallet/wallet_test.go b/validator/accounts/wallet/wallet_test.go index 8dd563a061..d1468d797e 100644 --- a/validator/accounts/wallet/wallet_test.go +++ b/validator/accounts/wallet/wallet_test.go @@ -8,14 +8,14 @@ import ( "reflect" "testing" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - remoteweb3signer "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + remoteweb3signer "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" diff --git a/validator/accounts/wallet_create.go b/validator/accounts/wallet_create.go index fe63380408..9fd5172c45 100644 --- a/validator/accounts/wallet_create.go +++ b/validator/accounts/wallet_create.go @@ -4,10 +4,10 @@ import ( "context" "encoding/json" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" "github.com/pkg/errors" ) diff --git a/validator/accounts/wallet_recover.go b/validator/accounts/wallet_recover.go index 304d318d2c..928d27d330 100644 --- a/validator/accounts/wallet_recover.go +++ b/validator/accounts/wallet_recover.go @@ -4,9 +4,9 @@ import ( "context" "strings" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" "github.com/pkg/errors" ) diff --git a/validator/accounts/wallet_recover_fuzz_test.go b/validator/accounts/wallet_recover_fuzz_test.go index 4f5ee23919..724e22fb6a 100644 --- a/validator/accounts/wallet_recover_fuzz_test.go +++ b/validator/accounts/wallet_recover_fuzz_test.go @@ -5,7 +5,7 @@ package accounts_test import ( "testing" - "github.com/OffchainLabs/prysm/v6/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts" ) func FuzzValidateMnemonic(f *testing.F) { diff --git a/validator/client/BUILD.bazel b/validator/client/BUILD.bazel index 45e96d3d97..63603c2eec 100644 --- a/validator/client/BUILD.bazel +++ b/validator/client/BUILD.bazel @@ -18,7 +18,7 @@ go_library( "validator.go", "wait_for_activation.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/client", + importpath = "github.com/OffchainLabs/prysm/v7/validator/client", visibility = [ "//cmd:__subpackages__", "//validator:__subpackages__", diff --git a/validator/client/aggregate.go b/validator/client/aggregate.go index dc8e8f8777..24cc34e3cd 100644 --- a/validator/client/aggregate.go +++ b/validator/client/aggregate.go @@ -6,19 +6,19 @@ import ( "net/http" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/runtime/version" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/validator/client/aggregate_test.go b/validator/client/aggregate_test.go index dd29748fef..a59d72bcf4 100644 --- a/validator/client/aggregate_test.go +++ b/validator/client/aggregate_test.go @@ -8,16 +8,16 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" logTest "github.com/sirupsen/logrus/hooks/test" "go.uber.org/mock/gomock" ) diff --git a/validator/client/attest.go b/validator/client/attest.go index 87f99e9ab8..8d8aed684e 100644 --- a/validator/client/attest.go +++ b/validator/client/attest.go @@ -8,20 +8,20 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/validator/client/attest_test.go b/validator/client/attest_test.go index 0d62ae7be1..9b4dbb3f95 100644 --- a/validator/client/attest_test.go +++ b/validator/client/attest_test.go @@ -11,18 +11,18 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" "go.uber.org/mock/gomock" "gopkg.in/d4l3k/messagediff.v1" diff --git a/validator/client/beacon-api/BUILD.bazel b/validator/client/beacon-api/BUILD.bazel index ecfb847ff5..0816d7d63c 100644 --- a/validator/client/beacon-api/BUILD.bazel +++ b/validator/client/beacon-api/BUILD.bazel @@ -37,7 +37,7 @@ go_library( "sync_committee.go", "sync_committee_selections.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api", + importpath = "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api", visibility = ["//validator:__subpackages__"], deps = [ "//api:go_default_library", diff --git a/validator/client/beacon-api/attestation_data.go b/validator/client/beacon-api/attestation_data.go index ab85b1986f..9d01aea16a 100644 --- a/validator/client/beacon-api/attestation_data.go +++ b/validator/client/beacon-api/attestation_data.go @@ -5,12 +5,12 @@ import ( "net/url" "strconv" - "github.com/OffchainLabs/prysm/v6/api/apiutil" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/apiutil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/attestation_data_test.go b/validator/client/beacon-api/attestation_data_test.go index 7e424dbac6..4841f3426b 100644 --- a/validator/client/beacon-api/attestation_data_test.go +++ b/validator/client/beacon-api/attestation_data_test.go @@ -6,11 +6,11 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/beacon_api_beacon_chain_client.go b/validator/client/beacon-api/beacon_api_beacon_chain_client.go index 29ea1c4e37..c0aea27757 100644 --- a/validator/client/beacon-api/beacon_api_beacon_chain_client.go +++ b/validator/client/beacon-api/beacon_api_beacon_chain_client.go @@ -5,11 +5,11 @@ import ( "reflect" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/golang/protobuf/ptypes/empty" "github.com/pkg/errors" diff --git a/validator/client/beacon-api/beacon_api_beacon_chain_client_test.go b/validator/client/beacon-api/beacon_api_beacon_chain_client_test.go index 98d60e659a..f9f1218071 100644 --- a/validator/client/beacon-api/beacon_api_beacon_chain_client_test.go +++ b/validator/client/beacon-api/beacon_api_beacon_chain_client_test.go @@ -7,13 +7,13 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" "google.golang.org/protobuf/types/known/emptypb" diff --git a/validator/client/beacon-api/beacon_api_helpers.go b/validator/client/beacon-api/beacon_api_helpers.go index afe9a52ce4..4cb160f4ca 100644 --- a/validator/client/beacon-api/beacon_api_helpers.go +++ b/validator/client/beacon-api/beacon_api_helpers.go @@ -6,9 +6,9 @@ import ( "encoding/json" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/beacon_api_helpers_test.go b/validator/client/beacon-api/beacon_api_helpers_test.go index 25b6f94b33..f25a2276f6 100644 --- a/validator/client/beacon-api/beacon_api_helpers_test.go +++ b/validator/client/beacon-api/beacon_api_helpers_test.go @@ -6,10 +6,10 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/beacon_api_node_client.go b/validator/client/beacon-api/beacon_api_node_client.go index 5eeb92ce03..819ff90de6 100644 --- a/validator/client/beacon-api/beacon_api_node_client.go +++ b/validator/client/beacon-api/beacon_api_node_client.go @@ -4,9 +4,9 @@ import ( "context" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/golang/protobuf/ptypes/empty" "github.com/pkg/errors" diff --git a/validator/client/beacon-api/beacon_api_node_client_test.go b/validator/client/beacon-api/beacon_api_node_client_test.go index f998016bf5..e0bfb313d5 100644 --- a/validator/client/beacon-api/beacon_api_node_client_test.go +++ b/validator/client/beacon-api/beacon_api_node_client_test.go @@ -4,10 +4,10 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" "google.golang.org/protobuf/types/known/emptypb" diff --git a/validator/client/beacon-api/beacon_api_validator_client.go b/validator/client/beacon-api/beacon_api_validator_client.go index 784a7cae99..77e1066b66 100644 --- a/validator/client/beacon-api/beacon_api_validator_client.go +++ b/validator/client/beacon-api/beacon_api_validator_client.go @@ -5,12 +5,12 @@ import ( "net/http" "time" - "github.com/OffchainLabs/prysm/v6/api/client/event" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/api/client/event" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/golang/protobuf/ptypes/empty" "github.com/pkg/errors" diff --git a/validator/client/beacon-api/beacon_api_validator_client_test.go b/validator/client/beacon-api/beacon_api_validator_client_test.go index d86414fe1c..db69252ce6 100644 --- a/validator/client/beacon-api/beacon_api_validator_client_test.go +++ b/validator/client/beacon-api/beacon_api_validator_client_test.go @@ -7,16 +7,16 @@ import ( "net/http" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - rpctesting "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared/testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + rpctesting "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared/testing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/beacon_block_converter.go b/validator/client/beacon-api/beacon_block_converter.go index 08de73c374..a8e77d64cb 100644 --- a/validator/client/beacon-api/beacon_block_converter.go +++ b/validator/client/beacon-api/beacon_block_converter.go @@ -4,11 +4,11 @@ import ( "math/big" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/beacon_block_converter_test.go b/validator/client/beacon-api/beacon_block_converter_test.go index 735a7d78f3..925c06ee5a 100644 --- a/validator/client/beacon-api/beacon_block_converter_test.go +++ b/validator/client/beacon-api/beacon_block_converter_test.go @@ -3,10 +3,10 @@ package beacon_api import ( "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - testhelpers "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/test-helpers" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + testhelpers "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/test-helpers" ) func TestGetBeaconBlockConverter_Phase0Valid(t *testing.T) { diff --git a/validator/client/beacon-api/beacon_block_json_helpers.go b/validator/client/beacon-api/beacon_block_json_helpers.go index 336f2431e8..104a62e6af 100644 --- a/validator/client/beacon-api/beacon_block_json_helpers.go +++ b/validator/client/beacon-api/beacon_block_json_helpers.go @@ -3,10 +3,10 @@ package beacon_api import ( "strconv" - "github.com/OffchainLabs/prysm/v6/api/apiutil" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/apiutil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/validator/client/beacon-api/beacon_block_json_helpers_test.go b/validator/client/beacon-api/beacon_block_json_helpers_test.go index bd864159e9..c5ab9a3c54 100644 --- a/validator/client/beacon-api/beacon_block_json_helpers_test.go +++ b/validator/client/beacon-api/beacon_block_json_helpers_test.go @@ -3,10 +3,10 @@ package beacon_api import ( "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/validator/client/beacon-api/beacon_block_proto_helpers.go b/validator/client/beacon-api/beacon_block_proto_helpers.go index 20b2d2d607..305734b32a 100644 --- a/validator/client/beacon-api/beacon_block_proto_helpers.go +++ b/validator/client/beacon-api/beacon_block_proto_helpers.go @@ -3,10 +3,10 @@ package beacon_api import ( "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/beacon_block_proto_helpers_test.go b/validator/client/beacon-api/beacon_block_proto_helpers_test.go index e65fdabaa2..6081b868f3 100644 --- a/validator/client/beacon-api/beacon_block_proto_helpers_test.go +++ b/validator/client/beacon-api/beacon_block_proto_helpers_test.go @@ -3,11 +3,11 @@ package beacon_api import ( "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/validator/client/beacon-api/beacon_committee_selections.go b/validator/client/beacon-api/beacon_committee_selections.go index 17bb0fa138..977ed72dfb 100644 --- a/validator/client/beacon-api/beacon_committee_selections.go +++ b/validator/client/beacon-api/beacon_committee_selections.go @@ -5,7 +5,7 @@ import ( "context" "encoding/json" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/beacon_committee_selections_test.go b/validator/client/beacon-api/beacon_committee_selections_test.go index 91c96cac87..99232269ea 100644 --- a/validator/client/beacon-api/beacon_committee_selections_test.go +++ b/validator/client/beacon-api/beacon_committee_selections_test.go @@ -5,10 +5,10 @@ import ( "encoding/json" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" - testhelpers "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/test-helpers" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" + testhelpers "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/test-helpers" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/pkg/errors" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/domain_data.go b/validator/client/beacon-api/domain_data.go index 270c58cd0b..2801df5c8c 100644 --- a/validator/client/beacon-api/domain_data.go +++ b/validator/client/beacon-api/domain_data.go @@ -3,12 +3,12 @@ package beacon_api import ( "context" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/domain_data_test.go b/validator/client/beacon-api/domain_data_test.go index ee65efe24f..c6634777a9 100644 --- a/validator/client/beacon-api/domain_data_test.go +++ b/validator/client/beacon-api/domain_data_test.go @@ -4,12 +4,12 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/doppelganger.go b/validator/client/beacon-api/doppelganger.go index 2d154aee60..7d6a583764 100644 --- a/validator/client/beacon-api/doppelganger.go +++ b/validator/client/beacon-api/doppelganger.go @@ -6,10 +6,10 @@ import ( "fmt" "strconv" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/doppelganger_test.go b/validator/client/beacon-api/doppelganger_test.go index 09c286ae90..ce3ea71881 100644 --- a/validator/client/beacon-api/doppelganger_test.go +++ b/validator/client/beacon-api/doppelganger_test.go @@ -5,11 +5,11 @@ import ( "encoding/json" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "go.uber.org/mock/gomock" diff --git a/validator/client/beacon-api/duties.go b/validator/client/beacon-api/duties.go index e99f019dfd..b56c84b556 100644 --- a/validator/client/beacon-api/duties.go +++ b/validator/client/beacon-api/duties.go @@ -8,12 +8,12 @@ import ( "net/url" "strconv" - "github.com/OffchainLabs/prysm/v6/api/apiutil" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/apiutil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "golang.org/x/sync/errgroup" diff --git a/validator/client/beacon-api/duties_test.go b/validator/client/beacon-api/duties_test.go index 0e07168dca..df55346908 100644 --- a/validator/client/beacon-api/duties_test.go +++ b/validator/client/beacon-api/duties_test.go @@ -8,13 +8,13 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/genesis.go b/validator/client/beacon-api/genesis.go index ddbf543d92..40a9e82733 100644 --- a/validator/client/beacon-api/genesis.go +++ b/validator/client/beacon-api/genesis.go @@ -7,11 +7,11 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/genesis_test.go b/validator/client/beacon-api/genesis_test.go index c93a432212..2795cd66c8 100644 --- a/validator/client/beacon-api/genesis_test.go +++ b/validator/client/beacon-api/genesis_test.go @@ -3,10 +3,10 @@ package beacon_api import ( "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/pkg/errors" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/get_beacon_block.go b/validator/client/beacon-api/get_beacon_block.go index ef17b363d8..3fa3d89521 100644 --- a/validator/client/beacon-api/get_beacon_block.go +++ b/validator/client/beacon-api/get_beacon_block.go @@ -9,12 +9,12 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/apiutil" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/apiutil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/get_beacon_block_test.go b/validator/client/beacon-api/get_beacon_block_test.go index 9ce5fc2401..5b13b35217 100644 --- a/validator/client/beacon-api/get_beacon_block_test.go +++ b/validator/client/beacon-api/get_beacon_block_test.go @@ -7,14 +7,14 @@ import ( "net/http" "testing" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" - testhelpers "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/test-helpers" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" + testhelpers "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/test-helpers" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/index.go b/validator/client/beacon-api/index.go index 702af4f6d3..1904d603d8 100644 --- a/validator/client/beacon-api/index.go +++ b/validator/client/beacon-api/index.go @@ -5,8 +5,8 @@ import ( "fmt" "strconv" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/index_test.go b/validator/client/beacon-api/index_test.go index 4f6e905bbf..23946694ab 100644 --- a/validator/client/beacon-api/index_test.go +++ b/validator/client/beacon-api/index_test.go @@ -6,13 +6,13 @@ import ( "net/url" "testing" - "github.com/OffchainLabs/prysm/v6/api/apiutil" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/apiutil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "go.uber.org/mock/gomock" diff --git a/validator/client/beacon-api/mock/BUILD.bazel b/validator/client/beacon-api/mock/BUILD.bazel index ee02e898e1..d50d019dc8 100644 --- a/validator/client/beacon-api/mock/BUILD.bazel +++ b/validator/client/beacon-api/mock/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "json_rest_handler_mock.go", "state_validators_mock.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock", + importpath = "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock", visibility = ["//visibility:public"], deps = [ "//api/server/structs:go_default_library", diff --git a/validator/client/beacon-api/mock/beacon_block_converter_mock.go b/validator/client/beacon-api/mock/beacon_block_converter_mock.go index 508d1e48f3..0ca87316a9 100644 --- a/validator/client/beacon-api/mock/beacon_block_converter_mock.go +++ b/validator/client/beacon-api/mock/beacon_block_converter_mock.go @@ -12,8 +12,8 @@ package mock import ( reflect "reflect" - structs "github.com/OffchainLabs/prysm/v6/api/server/structs" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + structs "github.com/OffchainLabs/prysm/v7/api/server/structs" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" gomock "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/mock/duties_mock.go b/validator/client/beacon-api/mock/duties_mock.go index c557c190d6..19886a4038 100644 --- a/validator/client/beacon-api/mock/duties_mock.go +++ b/validator/client/beacon-api/mock/duties_mock.go @@ -13,8 +13,8 @@ import ( context "context" reflect "reflect" - structs "github.com/OffchainLabs/prysm/v6/api/server/structs" - primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + structs "github.com/OffchainLabs/prysm/v7/api/server/structs" + primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" gomock "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/mock/genesis_mock.go b/validator/client/beacon-api/mock/genesis_mock.go index 820f3b8424..e5740ce2af 100644 --- a/validator/client/beacon-api/mock/genesis_mock.go +++ b/validator/client/beacon-api/mock/genesis_mock.go @@ -13,7 +13,7 @@ import ( context "context" reflect "reflect" - structs "github.com/OffchainLabs/prysm/v6/api/server/structs" + structs "github.com/OffchainLabs/prysm/v7/api/server/structs" gomock "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/mock/state_validators_mock.go b/validator/client/beacon-api/mock/state_validators_mock.go index 0e39e5ca1b..d71d144bd1 100644 --- a/validator/client/beacon-api/mock/state_validators_mock.go +++ b/validator/client/beacon-api/mock/state_validators_mock.go @@ -13,8 +13,8 @@ import ( context "context" reflect "reflect" - structs "github.com/OffchainLabs/prysm/v6/api/server/structs" - primitives "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + structs "github.com/OffchainLabs/prysm/v7/api/server/structs" + primitives "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" gomock "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/prepare_beacon_proposer.go b/validator/client/beacon-api/prepare_beacon_proposer.go index 209f57c1d0..f2c3e688f5 100644 --- a/validator/client/beacon-api/prepare_beacon_proposer.go +++ b/validator/client/beacon-api/prepare_beacon_proposer.go @@ -6,8 +6,8 @@ import ( "encoding/json" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/prepare_beacon_proposer_test.go b/validator/client/beacon-api/prepare_beacon_proposer_test.go index 03f576f34b..dd1b419d53 100644 --- a/validator/client/beacon-api/prepare_beacon_proposer_test.go +++ b/validator/client/beacon-api/prepare_beacon_proposer_test.go @@ -5,11 +5,11 @@ import ( "encoding/json" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "go.uber.org/mock/gomock" diff --git a/validator/client/beacon-api/propose_attestation.go b/validator/client/beacon-api/propose_attestation.go index c251f41a59..036889ad47 100644 --- a/validator/client/beacon-api/propose_attestation.go +++ b/validator/client/beacon-api/propose_attestation.go @@ -5,10 +5,10 @@ import ( "context" "encoding/json" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/propose_attestation_test.go b/validator/client/beacon-api/propose_attestation_test.go index 3973d2bffe..babaeea463 100644 --- a/validator/client/beacon-api/propose_attestation_test.go +++ b/validator/client/beacon-api/propose_attestation_test.go @@ -6,16 +6,16 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" - testhelpers "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/test-helpers" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" + testhelpers "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/test-helpers" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/propose_beacon_block.go b/validator/client/beacon-api/propose_beacon_block.go index 7f2ed7f835..ce017c341e 100644 --- a/validator/client/beacon-api/propose_beacon_block.go +++ b/validator/client/beacon-api/propose_beacon_block.go @@ -6,9 +6,9 @@ import ( "encoding/json" "net/http" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/propose_beacon_block_test.go b/validator/client/beacon-api/propose_beacon_block_test.go index 8383efe058..f5671f3d07 100644 --- a/validator/client/beacon-api/propose_beacon_block_test.go +++ b/validator/client/beacon-api/propose_beacon_block_test.go @@ -7,15 +7,15 @@ import ( "net/http" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - rpctesting "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared/testing" - "github.com/OffchainLabs/prysm/v6/network/httputil" - engine "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" - testhelpers "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/test-helpers" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + rpctesting "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared/testing" + "github.com/OffchainLabs/prysm/v7/network/httputil" + engine "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" + testhelpers "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/test-helpers" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/propose_exit.go b/validator/client/beacon-api/propose_exit.go index 7dde0897b3..6d6f02e6f3 100644 --- a/validator/client/beacon-api/propose_exit.go +++ b/validator/client/beacon-api/propose_exit.go @@ -6,8 +6,8 @@ import ( "encoding/json" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/propose_exit_test.go b/validator/client/beacon-api/propose_exit_test.go index 1ef545711a..5f93b76a47 100644 --- a/validator/client/beacon-api/propose_exit_test.go +++ b/validator/client/beacon-api/propose_exit_test.go @@ -5,11 +5,11 @@ import ( "encoding/json" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "go.uber.org/mock/gomock" diff --git a/validator/client/beacon-api/prysm_beacon_chain_client.go b/validator/client/beacon-api/prysm_beacon_chain_client.go index 5dc9558b1c..09f7fb852b 100644 --- a/validator/client/beacon-api/prysm_beacon_chain_client.go +++ b/validator/client/beacon-api/prysm_beacon_chain_client.go @@ -9,11 +9,11 @@ import ( "strconv" "strings" - "github.com/OffchainLabs/prysm/v6/api/apiutil" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - validator2 "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/api/apiutil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + validator2 "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/prysm_beacon_chain_client_test.go b/validator/client/beacon-api/prysm_beacon_chain_client_test.go index 37bd17aff5..5be8bd4069 100644 --- a/validator/client/beacon-api/prysm_beacon_chain_client_test.go +++ b/validator/client/beacon-api/prysm_beacon_chain_client_test.go @@ -6,13 +6,13 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/registration.go b/validator/client/beacon-api/registration.go index 2f38055b81..d6e18f44a9 100644 --- a/validator/client/beacon-api/registration.go +++ b/validator/client/beacon-api/registration.go @@ -5,8 +5,8 @@ import ( "context" "encoding/json" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/registration_test.go b/validator/client/beacon-api/registration_test.go index 88eb4f25bc..247094982c 100644 --- a/validator/client/beacon-api/registration_test.go +++ b/validator/client/beacon-api/registration_test.go @@ -5,11 +5,11 @@ import ( "encoding/json" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/golang/protobuf/ptypes/empty" "github.com/pkg/errors" diff --git a/validator/client/beacon-api/rest_handler_client.go b/validator/client/beacon-api/rest_handler_client.go index 7f48af862a..f21964eab7 100644 --- a/validator/client/beacon-api/rest_handler_client.go +++ b/validator/client/beacon-api/rest_handler_client.go @@ -10,11 +10,11 @@ import ( "os" "strings" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/apiutil" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/apiutil" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/validator/client/beacon-api/rest_handler_client_test.go b/validator/client/beacon-api/rest_handler_client_test.go index 476cacff29..a6ca684927 100644 --- a/validator/client/beacon-api/rest_handler_client_test.go +++ b/validator/client/beacon-api/rest_handler_client_test.go @@ -11,13 +11,13 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" "github.com/sirupsen/logrus" "github.com/sirupsen/logrus/hooks/test" diff --git a/validator/client/beacon-api/state_validators.go b/validator/client/beacon-api/state_validators.go index a7f3b07c88..5553dd2c04 100644 --- a/validator/client/beacon-api/state_validators.go +++ b/validator/client/beacon-api/state_validators.go @@ -8,9 +8,9 @@ import ( "net/url" "strconv" - "github.com/OffchainLabs/prysm/v6/api/apiutil" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/api/apiutil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/state_validators_test.go b/validator/client/beacon-api/state_validators_test.go index e05a9fdff8..02ec1a01c8 100644 --- a/validator/client/beacon-api/state_validators_test.go +++ b/validator/client/beacon-api/state_validators_test.go @@ -6,12 +6,12 @@ import ( "net/url" "testing" - "github.com/OffchainLabs/prysm/v6/api/apiutil" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/apiutil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/pkg/errors" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/status.go b/validator/client/beacon-api/status.go index d6f2a47d5f..fc1f7456dd 100644 --- a/validator/client/beacon-api/status.go +++ b/validator/client/beacon-api/status.go @@ -4,10 +4,10 @@ import ( "context" "strconv" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/status_test.go b/validator/client/beacon-api/status_test.go index 64315cd17f..4fb66b09ea 100644 --- a/validator/client/beacon-api/status_test.go +++ b/validator/client/beacon-api/status_test.go @@ -5,14 +5,14 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/stream_blocks.go b/validator/client/beacon-api/stream_blocks.go index cfea7812a2..bac92a91ed 100644 --- a/validator/client/beacon-api/stream_blocks.go +++ b/validator/client/beacon-api/stream_blocks.go @@ -6,9 +6,9 @@ import ( "encoding/json" "time" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "google.golang.org/grpc" diff --git a/validator/client/beacon-api/stream_blocks_test.go b/validator/client/beacon-api/stream_blocks_test.go index eb10cfb644..0b65200136 100644 --- a/validator/client/beacon-api/stream_blocks_test.go +++ b/validator/client/beacon-api/stream_blocks_test.go @@ -6,13 +6,13 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - rpctesting "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared/testing" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" - testhelpers "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/test-helpers" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + rpctesting "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared/testing" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" + testhelpers "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/test-helpers" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "go.uber.org/mock/gomock" diff --git a/validator/client/beacon-api/submit_aggregate_selection_proof.go b/validator/client/beacon-api/submit_aggregate_selection_proof.go index ed0f89cdad..53a20d9cc9 100644 --- a/validator/client/beacon-api/submit_aggregate_selection_proof.go +++ b/validator/client/beacon-api/submit_aggregate_selection_proof.go @@ -6,11 +6,11 @@ import ( "net/url" "strconv" - "github.com/OffchainLabs/prysm/v6/api/apiutil" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/helpers" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/apiutil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/submit_aggregate_selection_proof_test.go b/validator/client/beacon-api/submit_aggregate_selection_proof_test.go index 8afcd27a6a..de9a5de317 100644 --- a/validator/client/beacon-api/submit_aggregate_selection_proof_test.go +++ b/validator/client/beacon-api/submit_aggregate_selection_proof_test.go @@ -6,13 +6,13 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" - testhelpers "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/test-helpers" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" + testhelpers "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/test-helpers" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/submit_signed_aggregate_proof.go b/validator/client/beacon-api/submit_signed_aggregate_proof.go index e493610a5b..6e901787f1 100644 --- a/validator/client/beacon-api/submit_signed_aggregate_proof.go +++ b/validator/client/beacon-api/submit_signed_aggregate_proof.go @@ -5,10 +5,10 @@ import ( "context" "encoding/json" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/submit_signed_aggregate_proof_test.go b/validator/client/beacon-api/submit_signed_aggregate_proof_test.go index caa2967883..6243fe4b81 100644 --- a/validator/client/beacon-api/submit_signed_aggregate_proof_test.go +++ b/validator/client/beacon-api/submit_signed_aggregate_proof_test.go @@ -5,15 +5,15 @@ import ( "encoding/json" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" - testhelpers "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/test-helpers" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" + testhelpers "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/test-helpers" "github.com/pkg/errors" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/submit_signed_contribution_and_proof.go b/validator/client/beacon-api/submit_signed_contribution_and_proof.go index bf220cff03..fb2d58e406 100644 --- a/validator/client/beacon-api/submit_signed_contribution_and_proof.go +++ b/validator/client/beacon-api/submit_signed_contribution_and_proof.go @@ -6,8 +6,8 @@ import ( "encoding/json" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/submit_signed_contribution_and_proof_test.go b/validator/client/beacon-api/submit_signed_contribution_and_proof_test.go index 4bc7c1e93d..46741325d7 100644 --- a/validator/client/beacon-api/submit_signed_contribution_and_proof_test.go +++ b/validator/client/beacon-api/submit_signed_contribution_and_proof_test.go @@ -6,11 +6,11 @@ import ( "errors" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/subscribe_committee_subnets.go b/validator/client/beacon-api/subscribe_committee_subnets.go index fd4a7fd157..1098ec0ba7 100644 --- a/validator/client/beacon-api/subscribe_committee_subnets.go +++ b/validator/client/beacon-api/subscribe_committee_subnets.go @@ -6,8 +6,8 @@ import ( "encoding/json" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/subscribe_committee_subnets_test.go b/validator/client/beacon-api/subscribe_committee_subnets_test.go index 838df84637..cbf6d088a4 100644 --- a/validator/client/beacon-api/subscribe_committee_subnets_test.go +++ b/validator/client/beacon-api/subscribe_committee_subnets_test.go @@ -7,12 +7,12 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/sync_committee.go b/validator/client/beacon-api/sync_committee.go index f41a1fd01a..5f1f616733 100644 --- a/validator/client/beacon-api/sync_committee.go +++ b/validator/client/beacon-api/sync_committee.go @@ -7,11 +7,11 @@ import ( "net/url" "strconv" - "github.com/OffchainLabs/prysm/v6/api/apiutil" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/api/apiutil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/sync_committee_selections.go b/validator/client/beacon-api/sync_committee_selections.go index 2e8dfe556b..04034fb0e6 100644 --- a/validator/client/beacon-api/sync_committee_selections.go +++ b/validator/client/beacon-api/sync_committee_selections.go @@ -5,7 +5,7 @@ import ( "context" "encoding/json" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/pkg/errors" ) diff --git a/validator/client/beacon-api/sync_committee_selections_test.go b/validator/client/beacon-api/sync_committee_selections_test.go index 3f1e1feca8..f5235dd446 100644 --- a/validator/client/beacon-api/sync_committee_selections_test.go +++ b/validator/client/beacon-api/sync_committee_selections_test.go @@ -5,10 +5,10 @@ import ( "encoding/json" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" - testhelpers "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/test-helpers" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" + testhelpers "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/test-helpers" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/pkg/errors" "go.uber.org/mock/gomock" ) diff --git a/validator/client/beacon-api/sync_committee_test.go b/validator/client/beacon-api/sync_committee_test.go index 73384a140f..e23996d470 100644 --- a/validator/client/beacon-api/sync_committee_test.go +++ b/validator/client/beacon-api/sync_committee_test.go @@ -7,14 +7,14 @@ import ( "net/url" "testing" - "github.com/OffchainLabs/prysm/v6/api/apiutil" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/apiutil" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/golang/protobuf/ptypes/empty" "github.com/pkg/errors" diff --git a/validator/client/beacon-api/test-helpers/BUILD.bazel b/validator/client/beacon-api/test-helpers/BUILD.bazel index a8ce261dff..b89ce36376 100644 --- a/validator/client/beacon-api/test-helpers/BUILD.bazel +++ b/validator/client/beacon-api/test-helpers/BUILD.bazel @@ -12,7 +12,7 @@ go_library( "phase0_beacon_block_test_helpers.go", "test_helpers.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/test-helpers", + importpath = "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/test-helpers", visibility = ["//validator:__subpackages__"], deps = [ "//api/server/structs:go_default_library", diff --git a/validator/client/beacon-api/test-helpers/altair_beacon_block_test_helpers.go b/validator/client/beacon-api/test-helpers/altair_beacon_block_test_helpers.go index dbe61215bc..26b52150c0 100644 --- a/validator/client/beacon-api/test-helpers/altair_beacon_block_test_helpers.go +++ b/validator/client/beacon-api/test-helpers/altair_beacon_block_test_helpers.go @@ -1,8 +1,8 @@ package test_helpers import ( - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func GenerateProtoAltairBeaconBlock() *ethpb.BeaconBlockAltair { diff --git a/validator/client/beacon-api/test-helpers/bellatrix_beacon_block_test_helpers.go b/validator/client/beacon-api/test-helpers/bellatrix_beacon_block_test_helpers.go index d8dfe1bb02..f8f0f347a0 100644 --- a/validator/client/beacon-api/test-helpers/bellatrix_beacon_block_test_helpers.go +++ b/validator/client/beacon-api/test-helpers/bellatrix_beacon_block_test_helpers.go @@ -1,10 +1,10 @@ package test_helpers import ( - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func GenerateProtoBellatrixBeaconBlock() *ethpb.BeaconBlockBellatrix { diff --git a/validator/client/beacon-api/test-helpers/capella_beacon_block_test_helpers.go b/validator/client/beacon-api/test-helpers/capella_beacon_block_test_helpers.go index 5aa8bb4a39..7eee7f2809 100644 --- a/validator/client/beacon-api/test-helpers/capella_beacon_block_test_helpers.go +++ b/validator/client/beacon-api/test-helpers/capella_beacon_block_test_helpers.go @@ -1,10 +1,10 @@ package test_helpers import ( - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func GenerateProtoCapellaBeaconBlock() *ethpb.BeaconBlockCapella { diff --git a/validator/client/beacon-api/test-helpers/deneb_beacon_block_test_helpers.go b/validator/client/beacon-api/test-helpers/deneb_beacon_block_test_helpers.go index 3c768b286e..5e5bbc7bf2 100644 --- a/validator/client/beacon-api/test-helpers/deneb_beacon_block_test_helpers.go +++ b/validator/client/beacon-api/test-helpers/deneb_beacon_block_test_helpers.go @@ -1,10 +1,10 @@ package test_helpers import ( - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func GenerateProtoDenebBeaconBlockContents() *ethpb.BeaconBlockContentsDeneb { diff --git a/validator/client/beacon-api/test-helpers/electra_beacon_block_test_helpers.go b/validator/client/beacon-api/test-helpers/electra_beacon_block_test_helpers.go index cfc8d9c4b9..b5b3125368 100644 --- a/validator/client/beacon-api/test-helpers/electra_beacon_block_test_helpers.go +++ b/validator/client/beacon-api/test-helpers/electra_beacon_block_test_helpers.go @@ -1,10 +1,10 @@ package test_helpers import ( - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - enginev1 "github.com/OffchainLabs/prysm/v6/proto/engine/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + enginev1 "github.com/OffchainLabs/prysm/v7/proto/engine/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func GenerateProtoElectraBeaconBlockContents() *ethpb.BeaconBlockContentsElectra { diff --git a/validator/client/beacon-api/test-helpers/phase0_beacon_block_test_helpers.go b/validator/client/beacon-api/test-helpers/phase0_beacon_block_test_helpers.go index 52d9a6c9f1..6bc7f757cd 100644 --- a/validator/client/beacon-api/test-helpers/phase0_beacon_block_test_helpers.go +++ b/validator/client/beacon-api/test-helpers/phase0_beacon_block_test_helpers.go @@ -1,8 +1,8 @@ package test_helpers import ( - "github.com/OffchainLabs/prysm/v6/api/server/structs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) func GenerateProtoPhase0BeaconBlock() *ethpb.BeaconBlock { diff --git a/validator/client/beacon-api/wait_for_chain_start_test.go b/validator/client/beacon-api/wait_for_chain_start_test.go index 5e91158b58..0ef14e323e 100644 --- a/validator/client/beacon-api/wait_for_chain_start_test.go +++ b/validator/client/beacon-api/wait_for_chain_start_test.go @@ -5,11 +5,11 @@ import ( "net/http" "testing" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api/mock" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api/mock" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" "google.golang.org/protobuf/types/known/emptypb" diff --git a/validator/client/beacon-chain-client-factory/BUILD.bazel b/validator/client/beacon-chain-client-factory/BUILD.bazel index 49b11ce738..533f771de7 100644 --- a/validator/client/beacon-chain-client-factory/BUILD.bazel +++ b/validator/client/beacon-chain-client-factory/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["beacon_chain_client_factory.go"], - importpath = "github.com/OffchainLabs/prysm/v6/validator/client/beacon-chain-client-factory", + importpath = "github.com/OffchainLabs/prysm/v7/validator/client/beacon-chain-client-factory", visibility = ["//visibility:public"], deps = [ "//config/features:go_default_library", diff --git a/validator/client/beacon-chain-client-factory/beacon_chain_client_factory.go b/validator/client/beacon-chain-client-factory/beacon_chain_client_factory.go index cac3c2c9c8..9b506d8f1f 100644 --- a/validator/client/beacon-chain-client-factory/beacon_chain_client_factory.go +++ b/validator/client/beacon-chain-client-factory/beacon_chain_client_factory.go @@ -1,12 +1,12 @@ package beacon_chain_client_factory import ( - "github.com/OffchainLabs/prysm/v6/config/features" - beaconApi "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api" - grpcApi "github.com/OffchainLabs/prysm/v6/validator/client/grpc-api" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" - nodeClientFactory "github.com/OffchainLabs/prysm/v6/validator/client/node-client-factory" - validatorHelpers "github.com/OffchainLabs/prysm/v6/validator/helpers" + "github.com/OffchainLabs/prysm/v7/config/features" + beaconApi "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api" + grpcApi "github.com/OffchainLabs/prysm/v7/validator/client/grpc-api" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" + nodeClientFactory "github.com/OffchainLabs/prysm/v7/validator/client/node-client-factory" + validatorHelpers "github.com/OffchainLabs/prysm/v7/validator/helpers" ) func NewChainClient(validatorConn validatorHelpers.NodeConnection, jsonRestHandler beaconApi.RestHandler) iface.ChainClient { diff --git a/validator/client/grpc-api/BUILD.bazel b/validator/client/grpc-api/BUILD.bazel index f8eda10448..d116021ec9 100644 --- a/validator/client/grpc-api/BUILD.bazel +++ b/validator/client/grpc-api/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "grpc_prysm_beacon_chain_client.go", "grpc_validator_client.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/client/grpc-api", + importpath = "github.com/OffchainLabs/prysm/v7/validator/client/grpc-api", visibility = ["//validator:__subpackages__"], deps = [ "//api/client:go_default_library", diff --git a/validator/client/grpc-api/grpc_beacon_chain_client.go b/validator/client/grpc-api/grpc_beacon_chain_client.go index 983ba691b9..e2f29626c2 100644 --- a/validator/client/grpc-api/grpc_beacon_chain_client.go +++ b/validator/client/grpc-api/grpc_beacon_chain_client.go @@ -3,8 +3,8 @@ package grpc_api import ( "context" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/golang/protobuf/ptypes/empty" "google.golang.org/grpc" ) diff --git a/validator/client/grpc-api/grpc_node_client.go b/validator/client/grpc-api/grpc_node_client.go index 8a4a3e070b..983fd4f55c 100644 --- a/validator/client/grpc-api/grpc_node_client.go +++ b/validator/client/grpc-api/grpc_node_client.go @@ -3,8 +3,8 @@ package grpc_api import ( "context" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/golang/protobuf/ptypes/empty" log "github.com/sirupsen/logrus" "google.golang.org/grpc" diff --git a/validator/client/grpc-api/grpc_prysm_beacon_chain_client.go b/validator/client/grpc-api/grpc_prysm_beacon_chain_client.go index cd23e8e3f3..08034e9019 100644 --- a/validator/client/grpc-api/grpc_prysm_beacon_chain_client.go +++ b/validator/client/grpc-api/grpc_prysm_beacon_chain_client.go @@ -5,13 +5,13 @@ import ( "fmt" "sort" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/helpers" - statenative "github.com/OffchainLabs/prysm/v6/beacon-chain/state/state-native" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - eth "github.com/OffchainLabs/prysm/v6/proto/eth/v1" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/helpers" + statenative "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + eth "github.com/OffchainLabs/prysm/v7/proto/eth/v1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/golang/protobuf/ptypes/empty" "github.com/pkg/errors" "google.golang.org/grpc" diff --git a/validator/client/grpc-api/grpc_prysm_beacon_chain_client_test.go b/validator/client/grpc-api/grpc_prysm_beacon_chain_client_test.go index 6df8da8e04..a65b32aa0c 100644 --- a/validator/client/grpc-api/grpc_prysm_beacon_chain_client_test.go +++ b/validator/client/grpc-api/grpc_prysm_beacon_chain_client_test.go @@ -3,14 +3,14 @@ package grpc_api import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - mock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + mock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "go.uber.org/mock/gomock" ) diff --git a/validator/client/grpc-api/grpc_validator_client.go b/validator/client/grpc-api/grpc_validator_client.go index e4f5086879..85663483b3 100644 --- a/validator/client/grpc-api/grpc_validator_client.go +++ b/validator/client/grpc-api/grpc_validator_client.go @@ -5,15 +5,15 @@ import ( "encoding/json" "strconv" - "github.com/OffchainLabs/prysm/v6/api/client" - eventClient "github.com/OffchainLabs/prysm/v6/api/client/event" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/api/client" + eventClient "github.com/OffchainLabs/prysm/v7/api/client/event" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/golang/protobuf/ptypes/empty" "github.com/pkg/errors" diff --git a/validator/client/grpc-api/grpc_validator_client_test.go b/validator/client/grpc-api/grpc_validator_client_test.go index 57a5e675d3..3197a77e5c 100644 --- a/validator/client/grpc-api/grpc_validator_client_test.go +++ b/validator/client/grpc-api/grpc_validator_client_test.go @@ -7,13 +7,13 @@ import ( "testing" "time" - eventClient "github.com/OffchainLabs/prysm/v6/api/client/event" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - mock2 "github.com/OffchainLabs/prysm/v6/testing/mock" - "github.com/OffchainLabs/prysm/v6/testing/require" + eventClient "github.com/OffchainLabs/prysm/v7/api/client/event" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + mock2 "github.com/OffchainLabs/prysm/v7/testing/mock" + "github.com/OffchainLabs/prysm/v7/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" "go.uber.org/mock/gomock" "google.golang.org/protobuf/types/known/emptypb" diff --git a/validator/client/health_monitor.go b/validator/client/health_monitor.go index 110a63e363..9f918941c4 100644 --- a/validator/client/health_monitor.go +++ b/validator/client/health_monitor.go @@ -5,9 +5,9 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/sirupsen/logrus" ) diff --git a/validator/client/health_monitor_test.go b/validator/client/health_monitor_test.go index 51bb4fe85f..640b70ecde 100644 --- a/validator/client/health_monitor_test.go +++ b/validator/client/health_monitor_test.go @@ -6,13 +6,13 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/async/event" + "github.com/OffchainLabs/prysm/v7/async/event" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "go.uber.org/mock/gomock" - "github.com/OffchainLabs/prysm/v6/config/params" - validatormock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" + "github.com/OffchainLabs/prysm/v7/config/params" + validatormock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" ) // TestHealthMonitor_IsHealthy_Concurrency tests thread-safety of IsHealthy. diff --git a/validator/client/iface/BUILD.bazel b/validator/client/iface/BUILD.bazel index c2e38d2b41..25d84b9326 100644 --- a/validator/client/iface/BUILD.bazel +++ b/validator/client/iface/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "validator.go", "validator_client.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/client/iface", + importpath = "github.com/OffchainLabs/prysm/v7/validator/client/iface", visibility = ["//visibility:public"], deps = [ "//api/client/event:go_default_library", diff --git a/validator/client/iface/chain_client.go b/validator/client/iface/chain_client.go index 01553f4601..9f72878f4d 100644 --- a/validator/client/iface/chain_client.go +++ b/validator/client/iface/chain_client.go @@ -3,7 +3,7 @@ package iface import ( "context" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/golang/protobuf/ptypes/empty" ) diff --git a/validator/client/iface/node_client.go b/validator/client/iface/node_client.go index 2062703460..859c067c27 100644 --- a/validator/client/iface/node_client.go +++ b/validator/client/iface/node_client.go @@ -3,7 +3,7 @@ package iface import ( "context" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/golang/protobuf/ptypes/empty" ) diff --git a/validator/client/iface/prysm_chain_client.go b/validator/client/iface/prysm_chain_client.go index 563a571f8c..d0f44db729 100644 --- a/validator/client/iface/prysm_chain_client.go +++ b/validator/client/iface/prysm_chain_client.go @@ -3,8 +3,8 @@ package iface import ( "context" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" ) diff --git a/validator/client/iface/validator.go b/validator/client/iface/validator.go index 4a8827d6d3..db3ecc044c 100644 --- a/validator/client/iface/validator.go +++ b/validator/client/iface/validator.go @@ -4,14 +4,14 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/api/client/event" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/api/client/event" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" ) // ValidatorRole defines the validator role. diff --git a/validator/client/iface/validator_client.go b/validator/client/iface/validator_client.go index 3f5e6d56a2..0ffaaa3e06 100644 --- a/validator/client/iface/validator_client.go +++ b/validator/client/iface/validator_client.go @@ -5,9 +5,9 @@ import ( "encoding/json" "strconv" - "github.com/OffchainLabs/prysm/v6/api/client/event" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/api/client/event" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/golang/protobuf/ptypes/empty" "github.com/pkg/errors" diff --git a/validator/client/key_reload.go b/validator/client/key_reload.go index 28be3be09f..a351a846a9 100644 --- a/validator/client/key_reload.go +++ b/validator/client/key_reload.go @@ -3,8 +3,8 @@ package client import ( "context" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" ) // HandleKeyReload makes sure the validator keeps operating correctly after a change to the underlying keys. diff --git a/validator/client/key_reload_test.go b/validator/client/key_reload_test.go index a39d4c6569..9cf167f778 100644 --- a/validator/client/key_reload_test.go +++ b/validator/client/key_reload_test.go @@ -4,12 +4,12 @@ import ( "testing" "time" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - validatormock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" - "github.com/OffchainLabs/prysm/v6/validator/client/testutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + validatormock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" + "github.com/OffchainLabs/prysm/v7/validator/client/testutil" "github.com/pkg/errors" logTest "github.com/sirupsen/logrus/hooks/test" "go.uber.org/mock/gomock" diff --git a/validator/client/log.go b/validator/client/log.go index 2949f3535a..952831aed1 100644 --- a/validator/client/log.go +++ b/validator/client/log.go @@ -4,9 +4,9 @@ import ( "fmt" "strconv" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/validator/client/log_test.go b/validator/client/log_test.go index a0a00ecf62..3118498adc 100644 --- a/validator/client/log_test.go +++ b/validator/client/log_test.go @@ -3,12 +3,12 @@ package client import ( "testing" - field_params "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" + field_params "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/validator/client/metrics.go b/validator/client/metrics.go index 9f74a54988..ef40759be1 100644 --- a/validator/client/metrics.go +++ b/validator/client/metrics.go @@ -4,13 +4,13 @@ import ( "context" "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/validator/client/metrics_test.go b/validator/client/metrics_test.go index 89e99bd26f..7f1bc252e0 100644 --- a/validator/client/metrics_test.go +++ b/validator/client/metrics_test.go @@ -3,13 +3,13 @@ package client import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/time/slots" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/validator/client/node-client-factory/BUILD.bazel b/validator/client/node-client-factory/BUILD.bazel index fb145daa4c..bdb885bde8 100644 --- a/validator/client/node-client-factory/BUILD.bazel +++ b/validator/client/node-client-factory/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["node_client_factory.go"], - importpath = "github.com/OffchainLabs/prysm/v6/validator/client/node-client-factory", + importpath = "github.com/OffchainLabs/prysm/v7/validator/client/node-client-factory", visibility = ["//visibility:public"], deps = [ "//config/features:go_default_library", diff --git a/validator/client/node-client-factory/node_client_factory.go b/validator/client/node-client-factory/node_client_factory.go index b933cd9116..9915266c01 100644 --- a/validator/client/node-client-factory/node_client_factory.go +++ b/validator/client/node-client-factory/node_client_factory.go @@ -1,11 +1,11 @@ package node_client_factory import ( - "github.com/OffchainLabs/prysm/v6/config/features" - beaconApi "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api" - grpcApi "github.com/OffchainLabs/prysm/v6/validator/client/grpc-api" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" - validatorHelpers "github.com/OffchainLabs/prysm/v6/validator/helpers" + "github.com/OffchainLabs/prysm/v7/config/features" + beaconApi "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api" + grpcApi "github.com/OffchainLabs/prysm/v7/validator/client/grpc-api" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" + validatorHelpers "github.com/OffchainLabs/prysm/v7/validator/helpers" ) func NewNodeClient(validatorConn validatorHelpers.NodeConnection, jsonRestHandler beaconApi.RestHandler) iface.NodeClient { diff --git a/validator/client/propose.go b/validator/client/propose.go index 36df26b2af..fb7ff1bfee 100644 --- a/validator/client/propose.go +++ b/validator/client/propose.go @@ -6,23 +6,23 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/golang/protobuf/ptypes/timestamp" "github.com/pkg/errors" diff --git a/validator/client/propose_test.go b/validator/client/propose_test.go index 9fcd3cce95..e9a75fe68f 100644 --- a/validator/client/propose_test.go +++ b/validator/client/propose_test.go @@ -8,26 +8,26 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - blocktest "github.com/OffchainLabs/prysm/v6/consensus-types/blocks/testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - validatormock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" - testing2 "github.com/OffchainLabs/prysm/v6/validator/db/testing" - "github.com/OffchainLabs/prysm/v6/validator/graffiti" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + blocktest "github.com/OffchainLabs/prysm/v7/consensus-types/blocks/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + validatormock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" + testing2 "github.com/OffchainLabs/prysm/v7/validator/db/testing" + "github.com/OffchainLabs/prysm/v7/validator/graffiti" "github.com/ethereum/go-ethereum/common/hexutil" logTest "github.com/sirupsen/logrus/hooks/test" "go.uber.org/mock/gomock" diff --git a/validator/client/registration.go b/validator/client/registration.go index a2043c19aa..342d7048a8 100644 --- a/validator/client/registration.go +++ b/validator/client/registration.go @@ -4,14 +4,14 @@ import ( "context" "strings" - "github.com/OffchainLabs/prysm/v6/beacon-chain/builder" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/beacon-chain/builder" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/client/registration_test.go b/validator/client/registration_test.go index 8e0f97e7dc..54ea1bdfd1 100644 --- a/validator/client/registration_test.go +++ b/validator/client/registration_test.go @@ -5,11 +5,11 @@ import ( "testing" "time" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "go.uber.org/mock/gomock" diff --git a/validator/client/runner.go b/validator/client/runner.go index 63f6066d19..37aa7c370b 100644 --- a/validator/client/runner.go +++ b/validator/client/runner.go @@ -6,14 +6,14 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/api/client" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - prysmTrace "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/api/client" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + prysmTrace "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/pkg/errors" "go.opentelemetry.io/otel/trace" "google.golang.org/grpc/codes" diff --git a/validator/client/runner_test.go b/validator/client/runner_test.go index 9d332472fd..5915a23397 100644 --- a/validator/client/runner_test.go +++ b/validator/client/runner_test.go @@ -11,23 +11,23 @@ import ( "time" "github.com/OffchainLabs/go-bitfield" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/cache/lru" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - validatormock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" - "github.com/OffchainLabs/prysm/v6/validator/client/testutil" - testing2 "github.com/OffchainLabs/prysm/v6/validator/db/testing" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/cache/lru" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + validatormock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/validator/client/testutil" + testing2 "github.com/OffchainLabs/prysm/v7/validator/db/testing" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/validator/client/service.go b/validator/client/service.go index c3d40cd886..f4e27254f5 100644 --- a/validator/client/service.go +++ b/validator/client/service.go @@ -6,28 +6,28 @@ import ( "strings" "time" - api "github.com/OffchainLabs/prysm/v6/api/client" - eventClient "github.com/OffchainLabs/prysm/v6/api/client/event" - grpcutil "github.com/OffchainLabs/prysm/v6/api/grpc" - "github.com/OffchainLabs/prysm/v6/async/event" - lruwrpr "github.com/OffchainLabs/prysm/v6/cache/lru" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - beaconApi "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api" - beaconChainClientFactory "github.com/OffchainLabs/prysm/v6/validator/client/beacon-chain-client-factory" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" - nodeclientfactory "github.com/OffchainLabs/prysm/v6/validator/client/node-client-factory" - validatorclientfactory "github.com/OffchainLabs/prysm/v6/validator/client/validator-client-factory" - "github.com/OffchainLabs/prysm/v6/validator/db" - "github.com/OffchainLabs/prysm/v6/validator/graffiti" - validatorHelpers "github.com/OffchainLabs/prysm/v6/validator/helpers" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" - remoteweb3signer "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer" + api "github.com/OffchainLabs/prysm/v7/api/client" + eventClient "github.com/OffchainLabs/prysm/v7/api/client/event" + grpcutil "github.com/OffchainLabs/prysm/v7/api/grpc" + "github.com/OffchainLabs/prysm/v7/async/event" + lruwrpr "github.com/OffchainLabs/prysm/v7/cache/lru" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + beaconApi "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api" + beaconChainClientFactory "github.com/OffchainLabs/prysm/v7/validator/client/beacon-chain-client-factory" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" + nodeclientfactory "github.com/OffchainLabs/prysm/v7/validator/client/node-client-factory" + validatorclientfactory "github.com/OffchainLabs/prysm/v7/validator/client/validator-client-factory" + "github.com/OffchainLabs/prysm/v7/validator/db" + "github.com/OffchainLabs/prysm/v7/validator/graffiti" + validatorHelpers "github.com/OffchainLabs/prysm/v7/validator/helpers" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" + remoteweb3signer "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer" "github.com/dgraph-io/ristretto/v2" middleware "github.com/grpc-ecosystem/go-grpc-middleware" grpcretry "github.com/grpc-ecosystem/go-grpc-middleware/retry" diff --git a/validator/client/service_test.go b/validator/client/service_test.go index 0cfaac6a19..9d12f8c170 100644 --- a/validator/client/service_test.go +++ b/validator/client/service_test.go @@ -6,9 +6,9 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/runtime" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/runtime" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" logTest "github.com/sirupsen/logrus/hooks/test" "google.golang.org/grpc/metadata" ) diff --git a/validator/client/slashing_protection_interchange_test.go b/validator/client/slashing_protection_interchange_test.go index 9e0bc5dd8c..29dcb66b0f 100644 --- a/validator/client/slashing_protection_interchange_test.go +++ b/validator/client/slashing_protection_interchange_test.go @@ -8,13 +8,13 @@ import ( "strings" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/io/file" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/validator/helpers" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/io/file" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/validator/helpers" "github.com/bazelbuild/rules_go/go/tools/bazel" ) diff --git a/validator/client/sync_committee.go b/validator/client/sync_committee.go index 85793c9991..b82a9fcd7e 100644 --- a/validator/client/sync_committee.go +++ b/validator/client/sync_committee.go @@ -5,18 +5,18 @@ import ( "fmt" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/signing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" "github.com/ethereum/go-ethereum/common/hexutil" emptypb "github.com/golang/protobuf/ptypes/empty" "github.com/pkg/errors" diff --git a/validator/client/sync_committee_test.go b/validator/client/sync_committee_test.go index 156500c046..4ed80ae8a7 100644 --- a/validator/client/sync_committee_test.go +++ b/validator/client/sync_committee_test.go @@ -7,13 +7,13 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" logTest "github.com/sirupsen/logrus/hooks/test" "go.uber.org/mock/gomock" diff --git a/validator/client/testutil/BUILD.bazel b/validator/client/testutil/BUILD.bazel index 2ebc08e194..c83d11fde9 100644 --- a/validator/client/testutil/BUILD.bazel +++ b/validator/client/testutil/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "helper.go", "mock_validator.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/client/testutil", + importpath = "github.com/OffchainLabs/prysm/v7/validator/client/testutil", visibility = ["//validator:__subpackages__"], deps = [ "//api/client:go_default_library", diff --git a/validator/client/testutil/helper.go b/validator/client/testutil/helper.go index 73d3e96f6d..b3544178d7 100644 --- a/validator/client/testutil/helper.go +++ b/validator/client/testutil/helper.go @@ -1,9 +1,9 @@ package testutil import ( - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // ActiveKey represents a public key whose status is ACTIVE. diff --git a/validator/client/testutil/mock_validator.go b/validator/client/testutil/mock_validator.go index bda12d2916..91b7a84a85 100644 --- a/validator/client/testutil/mock_validator.go +++ b/validator/client/testutil/mock_validator.go @@ -6,16 +6,16 @@ import ( "errors" "time" - api "github.com/OffchainLabs/prysm/v6/api/client" - "github.com/OffchainLabs/prysm/v6/api/client/event" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - prysmTime "github.com/OffchainLabs/prysm/v6/time" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + api "github.com/OffchainLabs/prysm/v7/api/client" + "github.com/OffchainLabs/prysm/v7/api/client/event" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" log "github.com/sirupsen/logrus" ) diff --git a/validator/client/validator-client-factory/BUILD.bazel b/validator/client/validator-client-factory/BUILD.bazel index f019b2c8f3..613140703c 100644 --- a/validator/client/validator-client-factory/BUILD.bazel +++ b/validator/client/validator-client-factory/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["validator_client_factory.go"], - importpath = "github.com/OffchainLabs/prysm/v6/validator/client/validator-client-factory", + importpath = "github.com/OffchainLabs/prysm/v7/validator/client/validator-client-factory", visibility = ["//visibility:public"], deps = [ "//config/features:go_default_library", diff --git a/validator/client/validator-client-factory/validator_client_factory.go b/validator/client/validator-client-factory/validator_client_factory.go index 57a7174eb8..671b6112a9 100644 --- a/validator/client/validator-client-factory/validator_client_factory.go +++ b/validator/client/validator-client-factory/validator_client_factory.go @@ -1,11 +1,11 @@ package validator_client_factory import ( - "github.com/OffchainLabs/prysm/v6/config/features" - beaconApi "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api" - grpcApi "github.com/OffchainLabs/prysm/v6/validator/client/grpc-api" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" - validatorHelpers "github.com/OffchainLabs/prysm/v6/validator/helpers" + "github.com/OffchainLabs/prysm/v7/config/features" + beaconApi "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api" + grpcApi "github.com/OffchainLabs/prysm/v7/validator/client/grpc-api" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" + validatorHelpers "github.com/OffchainLabs/prysm/v7/validator/helpers" ) func NewValidatorClient( diff --git a/validator/client/validator.go b/validator/client/validator.go index 2817efbdaf..ed4f8762ea 100644 --- a/validator/client/validator.go +++ b/validator/client/validator.go @@ -16,31 +16,31 @@ import ( "sync/atomic" "time" - "github.com/OffchainLabs/prysm/v6/api/client" - eventClient "github.com/OffchainLabs/prysm/v6/api/client/event" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/beacon-chain/core/altair" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" - accountsiface "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" - "github.com/OffchainLabs/prysm/v6/validator/db" - dbCommon "github.com/OffchainLabs/prysm/v6/validator/db/common" - "github.com/OffchainLabs/prysm/v6/validator/graffiti" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" - remoteweb3signer "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer" + "github.com/OffchainLabs/prysm/v7/api/client" + eventClient "github.com/OffchainLabs/prysm/v7/api/client/event" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/altair" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" + accountsiface "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/validator/db" + dbCommon "github.com/OffchainLabs/prysm/v7/validator/db/common" + "github.com/OffchainLabs/prysm/v7/validator/graffiti" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" + remoteweb3signer "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer" "github.com/dgraph-io/ristretto/v2" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" diff --git a/validator/client/validator_test.go b/validator/client/validator_test.go index 08698fd320..bf43769bf0 100644 --- a/validator/client/validator_test.go +++ b/validator/client/validator_test.go @@ -16,30 +16,30 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - validatorType "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - blsmock "github.com/OffchainLabs/prysm/v6/crypto/bls/common/mock" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - validatormock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/client/iface" - dbTest "github.com/OffchainLabs/prysm/v6/validator/db/testing" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" - remoteweb3signer "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + validatorType "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + blsmock "github.com/OffchainLabs/prysm/v7/crypto/bls/common/mock" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + validatormock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/client/iface" + dbTest "github.com/OffchainLabs/prysm/v7/validator/db/testing" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" + remoteweb3signer "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/golang/protobuf/ptypes/empty" diff --git a/validator/client/wait_for_activation.go b/validator/client/wait_for_activation.go index 6da846fc35..301a66c336 100644 --- a/validator/client/wait_for_activation.go +++ b/validator/client/wait_for_activation.go @@ -4,9 +4,9 @@ import ( "context" "time" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" octrace "go.opentelemetry.io/otel/trace" ) diff --git a/validator/client/wait_for_activation_test.go b/validator/client/wait_for_activation_test.go index c8b5a0290e..10a894ebfb 100644 --- a/validator/client/wait_for_activation_test.go +++ b/validator/client/wait_for_activation_test.go @@ -5,16 +5,16 @@ import ( "testing" "time" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - validatormock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" - walletMock "github.com/OffchainLabs/prysm/v6/validator/accounts/testing" - "github.com/OffchainLabs/prysm/v6/validator/client/testutil" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" - constant "github.com/OffchainLabs/prysm/v6/validator/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + validatormock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" + walletMock "github.com/OffchainLabs/prysm/v7/validator/accounts/testing" + "github.com/OffchainLabs/prysm/v7/validator/client/testutil" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" + constant "github.com/OffchainLabs/prysm/v7/validator/testing" "github.com/pkg/errors" logTest "github.com/sirupsen/logrus/hooks/test" "github.com/tyler-smith/go-bip39" diff --git a/validator/db/BUILD.bazel b/validator/db/BUILD.bazel index e47530b280..ad4ae74a13 100644 --- a/validator/db/BUILD.bazel +++ b/validator/db/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "migrate.go", "restore.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/db", + importpath = "github.com/OffchainLabs/prysm/v7/validator/db", visibility = [ "//cmd/validator:__subpackages__", "//validator:__subpackages__", diff --git a/validator/db/alias.go b/validator/db/alias.go index 5c406cc7c4..cc78434ff8 100644 --- a/validator/db/alias.go +++ b/validator/db/alias.go @@ -1,6 +1,6 @@ package db -import "github.com/OffchainLabs/prysm/v6/validator/db/iface" +import "github.com/OffchainLabs/prysm/v7/validator/db/iface" // Database defines the necessary methods for Prysm's validator client backend which may be implemented by any // key-value or relational database in practice. This is the full database interface which should diff --git a/validator/db/common/BUILD.bazel b/validator/db/common/BUILD.bazel index e23fc9595c..835a77255c 100644 --- a/validator/db/common/BUILD.bazel +++ b/validator/db/common/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "progress.go", "structs.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/db/common", + importpath = "github.com/OffchainLabs/prysm/v7/validator/db/common", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/validator/db/common/structs.go b/validator/db/common/structs.go index 6abdd56a57..14b04a335a 100644 --- a/validator/db/common/structs.go +++ b/validator/db/common/structs.go @@ -1,8 +1,8 @@ package common import ( - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) const FailedBlockSignLocalErr = "block rejected by local protection" diff --git a/validator/db/convert.go b/validator/db/convert.go index d05167a149..034d7620c4 100644 --- a/validator/db/convert.go +++ b/validator/db/convert.go @@ -4,14 +4,14 @@ import ( "context" "path/filepath" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/io/file" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/db/common" - "github.com/OffchainLabs/prysm/v6/validator/db/filesystem" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/io/file" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/db/common" + "github.com/OffchainLabs/prysm/v7/validator/db/filesystem" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" "github.com/pkg/errors" ) diff --git a/validator/db/convert_test.go b/validator/db/convert_test.go index 5627730dc9..059a717265 100644 --- a/validator/db/convert_test.go +++ b/validator/db/convert_test.go @@ -5,16 +5,16 @@ import ( "path/filepath" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/io/file" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/db/common" - "github.com/OffchainLabs/prysm/v6/validator/db/filesystem" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/io/file" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/db/common" + "github.com/OffchainLabs/prysm/v7/validator/db/filesystem" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/validator/db/filesystem/BUILD.bazel b/validator/db/filesystem/BUILD.bazel index 10d09510a9..7d5f1e81b3 100644 --- a/validator/db/filesystem/BUILD.bazel +++ b/validator/db/filesystem/BUILD.bazel @@ -12,7 +12,7 @@ go_library( "proposer_protection.go", "proposer_settings.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/db/filesystem", + importpath = "github.com/OffchainLabs/prysm/v7/validator/db/filesystem", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/validator/db/filesystem/attester_protection.go b/validator/db/filesystem/attester_protection.go index 15499963f2..327857ba6e 100644 --- a/validator/db/filesystem/attester_protection.go +++ b/validator/db/filesystem/attester_protection.go @@ -4,11 +4,11 @@ import ( "context" "strings" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/db/common" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/db/common" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" ) diff --git a/validator/db/filesystem/attester_protection_test.go b/validator/db/filesystem/attester_protection_test.go index e5bbf208a2..5a7c711e33 100644 --- a/validator/db/filesystem/attester_protection_test.go +++ b/validator/db/filesystem/attester_protection_test.go @@ -4,12 +4,12 @@ import ( "sync" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/db/common" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/db/common" ) func TestStore_EIPImportBlacklistedPublicKeys(t *testing.T) { diff --git a/validator/db/filesystem/db.go b/validator/db/filesystem/db.go index 22b424671f..1b39352842 100644 --- a/validator/db/filesystem/db.go +++ b/validator/db/filesystem/db.go @@ -10,10 +10,10 @@ import ( "sync" "time" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/io/file" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/io/file" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/validator/db/filesystem/db_test.go b/validator/db/filesystem/db_test.go index 3118e5836e..6cf45c4101 100644 --- a/validator/db/filesystem/db_test.go +++ b/validator/db/filesystem/db_test.go @@ -6,11 +6,11 @@ import ( "path" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/validator/db/filesystem/genesis_test.go b/validator/db/filesystem/genesis_test.go index 1e9ed81488..41404f7289 100644 --- a/validator/db/filesystem/genesis_test.go +++ b/validator/db/filesystem/genesis_test.go @@ -3,7 +3,7 @@ package filesystem import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStore_GenesisValidatorsRoot(t *testing.T) { diff --git a/validator/db/filesystem/graffiti_test.go b/validator/db/filesystem/graffiti_test.go index c8aad1a916..325afa23bb 100644 --- a/validator/db/filesystem/graffiti_test.go +++ b/validator/db/filesystem/graffiti_test.go @@ -3,8 +3,8 @@ package filesystem import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStore_SaveGraffitiOrderedIndex(t *testing.T) { diff --git a/validator/db/filesystem/import.go b/validator/db/filesystem/import.go index 6cd97d488f..c5c9c8134b 100644 --- a/validator/db/filesystem/import.go +++ b/validator/db/filesystem/import.go @@ -6,12 +6,12 @@ import ( "io" "strings" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/db/common" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/helpers" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/db/common" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/helpers" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/db/filesystem/import_test.go b/validator/db/filesystem/import_test.go index d3eff6ad4b..bb50ae6247 100644 --- a/validator/db/filesystem/import_test.go +++ b/validator/db/filesystem/import_test.go @@ -5,13 +5,13 @@ import ( "encoding/json" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/db/common" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" - valtest "github.com/OffchainLabs/prysm/v6/validator/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/db/common" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" + valtest "github.com/OffchainLabs/prysm/v7/validator/testing" ) func TestStore_ImportInterchangeData_BadJSON(t *testing.T) { diff --git a/validator/db/filesystem/migration_test.go b/validator/db/filesystem/migration_test.go index ac8f54db37..e8163bef29 100644 --- a/validator/db/filesystem/migration_test.go +++ b/validator/db/filesystem/migration_test.go @@ -3,7 +3,7 @@ package filesystem import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStore_RunUpMigrations(t *testing.T) { diff --git a/validator/db/filesystem/proposer_protection.go b/validator/db/filesystem/proposer_protection.go index ccd5a74e98..2e2ae2f767 100644 --- a/validator/db/filesystem/proposer_protection.go +++ b/validator/db/filesystem/proposer_protection.go @@ -4,10 +4,10 @@ import ( "context" "strings" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/validator/db/common" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/validator/db/common" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" ) diff --git a/validator/db/filesystem/proposer_protection_test.go b/validator/db/filesystem/proposer_protection_test.go index 46cbb4b716..0f66cf8fa5 100644 --- a/validator/db/filesystem/proposer_protection_test.go +++ b/validator/db/filesystem/proposer_protection_test.go @@ -3,14 +3,14 @@ package filesystem import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/validator/db/common" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/validator/db/common" ) func TestStore_ProposalHistoryForPubKey(t *testing.T) { diff --git a/validator/db/filesystem/proposer_settings.go b/validator/db/filesystem/proposer_settings.go index f4aa8c7118..04cfaa33bc 100644 --- a/validator/db/filesystem/proposer_settings.go +++ b/validator/db/filesystem/proposer_settings.go @@ -3,7 +3,7 @@ package filesystem import ( "context" - "github.com/OffchainLabs/prysm/v6/config/proposer" + "github.com/OffchainLabs/prysm/v7/config/proposer" "github.com/pkg/errors" ) diff --git a/validator/db/filesystem/proposer_settings_test.go b/validator/db/filesystem/proposer_settings_test.go index a9b370b527..1e3de64924 100644 --- a/validator/db/filesystem/proposer_settings_test.go +++ b/validator/db/filesystem/proposer_settings_test.go @@ -3,10 +3,10 @@ package filesystem import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/proposer" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/proposer" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/validator/db/iface/BUILD.bazel b/validator/db/iface/BUILD.bazel index ba7db1c14c..f2eeb226f8 100644 --- a/validator/db/iface/BUILD.bazel +++ b/validator/db/iface/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["interface.go"], - importpath = "github.com/OffchainLabs/prysm/v6/validator/db/iface", + importpath = "github.com/OffchainLabs/prysm/v7/validator/db/iface", visibility = [ "//cmd/validator/slashing-protection:__subpackages__", "//config:__subpackages__", diff --git a/validator/db/iface/interface.go b/validator/db/iface/interface.go index 25c44c3e7d..ade81a76f0 100644 --- a/validator/db/iface/interface.go +++ b/validator/db/iface/interface.go @@ -5,13 +5,13 @@ import ( "context" "io" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/monitoring/backup" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/db/common" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/backup" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/db/common" "github.com/prometheus/client_golang/prometheus" ) diff --git a/validator/db/kv/BUILD.bazel b/validator/db/kv/BUILD.bazel index 8ea2c453e7..a8b6709065 100644 --- a/validator/db/kv/BUILD.bazel +++ b/validator/db/kv/BUILD.bazel @@ -20,7 +20,7 @@ go_library( "prune_attester_protection.go", "schema.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/db/kv", + importpath = "github.com/OffchainLabs/prysm/v7/validator/db/kv", visibility = [ "//cmd:__subpackages__", "//validator:__subpackages__", diff --git a/validator/db/kv/attester_protection.go b/validator/db/kv/attester_protection.go index 36d96c3498..6084f0353b 100644 --- a/validator/db/kv/attester_protection.go +++ b/validator/db/kv/attester_protection.go @@ -7,14 +7,14 @@ import ( "sync" "time" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/slashings" - "github.com/OffchainLabs/prysm/v6/validator/db/common" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/slashings" + "github.com/OffchainLabs/prysm/v7/validator/db/common" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" bolt "go.etcd.io/bbolt" diff --git a/validator/db/kv/attester_protection_test.go b/validator/db/kv/attester_protection_test.go index a44fa3fc6e..88b1040210 100644 --- a/validator/db/kv/attester_protection_test.go +++ b/validator/db/kv/attester_protection_test.go @@ -7,14 +7,14 @@ import ( "sync" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/db/common" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/db/common" logTest "github.com/sirupsen/logrus/hooks/test" bolt "go.etcd.io/bbolt" ) diff --git a/validator/db/kv/backup.go b/validator/db/kv/backup.go index a7ea5fe898..5ee96f39a5 100644 --- a/validator/db/kv/backup.go +++ b/validator/db/kv/backup.go @@ -6,9 +6,9 @@ import ( "path" "time" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" bolt "go.etcd.io/bbolt" ) diff --git a/validator/db/kv/backup_test.go b/validator/db/kv/backup_test.go index 8266133394..5cd76ba9a3 100644 --- a/validator/db/kv/backup_test.go +++ b/validator/db/kv/backup_test.go @@ -5,10 +5,10 @@ import ( "path/filepath" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/db/common" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/db/common" ) func TestStore_Backup(t *testing.T) { diff --git a/validator/db/kv/db.go b/validator/db/kv/db.go index a008932bcc..f1a9d7d618 100644 --- a/validator/db/kv/db.go +++ b/validator/db/kv/db.go @@ -8,13 +8,13 @@ import ( "path/filepath" "time" - "github.com/OffchainLabs/prysm/v6/async/abool" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/async/abool" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" prombolt "github.com/prysmaticlabs/prombbolt" diff --git a/validator/db/kv/deprecated_attester_protection.go b/validator/db/kv/deprecated_attester_protection.go index b21723f740..ffe53d9e5e 100644 --- a/validator/db/kv/deprecated_attester_protection.go +++ b/validator/db/kv/deprecated_attester_protection.go @@ -3,10 +3,10 @@ package kv import ( "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ) const ( diff --git a/validator/db/kv/deprecated_attester_protection_test.go b/validator/db/kv/deprecated_attester_protection_test.go index cdf31e498b..028eb70479 100644 --- a/validator/db/kv/deprecated_attester_protection_test.go +++ b/validator/db/kv/deprecated_attester_protection_test.go @@ -3,11 +3,11 @@ package kv import ( "testing" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestNewAttestationHistoryArray(t *testing.T) { diff --git a/validator/db/kv/eip_blacklisted_keys.go b/validator/db/kv/eip_blacklisted_keys.go index c10deeaaf3..a013ee3607 100644 --- a/validator/db/kv/eip_blacklisted_keys.go +++ b/validator/db/kv/eip_blacklisted_keys.go @@ -3,8 +3,8 @@ package kv import ( "context" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" bolt "go.etcd.io/bbolt" ) diff --git a/validator/db/kv/eip_blacklisted_keys_test.go b/validator/db/kv/eip_blacklisted_keys_test.go index 634f4f296a..d006da34a2 100644 --- a/validator/db/kv/eip_blacklisted_keys_test.go +++ b/validator/db/kv/eip_blacklisted_keys_test.go @@ -4,9 +4,9 @@ import ( "fmt" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStore_EIPBlacklistedPublicKeys(t *testing.T) { diff --git a/validator/db/kv/genesis_test.go b/validator/db/kv/genesis_test.go index 7e46e2cc61..7b4211da56 100644 --- a/validator/db/kv/genesis_test.go +++ b/validator/db/kv/genesis_test.go @@ -3,9 +3,9 @@ package kv import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStore_GenesisValidatorsRoot_ReadAndWrite(t *testing.T) { diff --git a/validator/db/kv/graffiti.go b/validator/db/kv/graffiti.go index 6a131d417b..cf61e2eab0 100644 --- a/validator/db/kv/graffiti.go +++ b/validator/db/kv/graffiti.go @@ -4,7 +4,7 @@ import ( "bytes" "context" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" bolt "go.etcd.io/bbolt" ) diff --git a/validator/db/kv/graffiti_test.go b/validator/db/kv/graffiti_test.go index 53c0a05428..0ebf43219d 100644 --- a/validator/db/kv/graffiti_test.go +++ b/validator/db/kv/graffiti_test.go @@ -3,9 +3,9 @@ package kv import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestStore_GraffitiOrderedIndex_ReadAndWrite(t *testing.T) { diff --git a/validator/db/kv/import.go b/validator/db/kv/import.go index 2fad216a18..d290e5e6c4 100644 --- a/validator/db/kv/import.go +++ b/validator/db/kv/import.go @@ -7,15 +7,15 @@ import ( "fmt" "io" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/slashings" - "github.com/OffchainLabs/prysm/v6/validator/db/common" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/helpers" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/slashings" + "github.com/OffchainLabs/prysm/v7/validator/db/common" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/helpers" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" "github.com/pkg/errors" ) diff --git a/validator/db/kv/import_test.go b/validator/db/kv/import_test.go index 12c26beb18..d9a586c7f3 100644 --- a/validator/db/kv/import_test.go +++ b/validator/db/kv/import_test.go @@ -7,14 +7,14 @@ import ( "reflect" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/db/common" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" - valtest "github.com/OffchainLabs/prysm/v6/validator/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/db/common" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" + valtest "github.com/OffchainLabs/prysm/v7/validator/testing" logTest "github.com/sirupsen/logrus/hooks/test" ) diff --git a/validator/db/kv/kv_test.go b/validator/db/kv/kv_test.go index 79b8e9010d..013d52b50f 100644 --- a/validator/db/kv/kv_test.go +++ b/validator/db/kv/kv_test.go @@ -5,8 +5,8 @@ import ( "os" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/sirupsen/logrus" ) diff --git a/validator/db/kv/migration_optimal_attester_protection.go b/validator/db/kv/migration_optimal_attester_protection.go index 2552468d34..255cc5881b 100644 --- a/validator/db/kv/migration_optimal_attester_protection.go +++ b/validator/db/kv/migration_optimal_attester_protection.go @@ -4,11 +4,11 @@ import ( "bytes" "context" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/progress" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/progress" bolt "go.etcd.io/bbolt" ) diff --git a/validator/db/kv/migration_optimal_attester_protection_test.go b/validator/db/kv/migration_optimal_attester_protection_test.go index 3791f8ef50..c478b7893e 100644 --- a/validator/db/kv/migration_optimal_attester_protection_test.go +++ b/validator/db/kv/migration_optimal_attester_protection_test.go @@ -4,10 +4,10 @@ import ( "fmt" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" bolt "go.etcd.io/bbolt" ) diff --git a/validator/db/kv/migration_source_target_epochs_bucket.go b/validator/db/kv/migration_source_target_epochs_bucket.go index 7a4841ca47..cd855b1049 100644 --- a/validator/db/kv/migration_source_target_epochs_bucket.go +++ b/validator/db/kv/migration_source_target_epochs_bucket.go @@ -4,7 +4,7 @@ import ( "bytes" "context" - "github.com/OffchainLabs/prysm/v6/monitoring/progress" + "github.com/OffchainLabs/prysm/v7/monitoring/progress" bolt "go.etcd.io/bbolt" ) diff --git a/validator/db/kv/migration_source_target_epochs_bucket_test.go b/validator/db/kv/migration_source_target_epochs_bucket_test.go index d738c71507..a10a9749a2 100644 --- a/validator/db/kv/migration_source_target_epochs_bucket_test.go +++ b/validator/db/kv/migration_source_target_epochs_bucket_test.go @@ -6,9 +6,9 @@ import ( "reflect" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" bolt "go.etcd.io/bbolt" ) diff --git a/validator/db/kv/proposer_protection.go b/validator/db/kv/proposer_protection.go index 0018e63b13..9e0187d7ce 100644 --- a/validator/db/kv/proposer_protection.go +++ b/validator/db/kv/proposer_protection.go @@ -4,14 +4,14 @@ import ( "context" "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/db/common" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/db/common" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" bolt "go.etcd.io/bbolt" diff --git a/validator/db/kv/proposer_protection_test.go b/validator/db/kv/proposer_protection_test.go index 48a9bc3084..c5931f7bd1 100644 --- a/validator/db/kv/proposer_protection_test.go +++ b/validator/db/kv/proposer_protection_test.go @@ -3,16 +3,16 @@ package kv import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/validator/db/common" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/validator/db/common" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/validator/db/kv/proposer_settings.go b/validator/db/kv/proposer_settings.go index 7a45a6beb3..2cffaabcab 100644 --- a/validator/db/kv/proposer_settings.go +++ b/validator/db/kv/proposer_settings.go @@ -3,9 +3,9 @@ package kv import ( "context" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" "github.com/pkg/errors" bolt "go.etcd.io/bbolt" "google.golang.org/protobuf/proto" diff --git a/validator/db/kv/proposer_settings_test.go b/validator/db/kv/proposer_settings_test.go index 118ee61e20..b32a3d8726 100644 --- a/validator/db/kv/proposer_settings_test.go +++ b/validator/db/kv/proposer_settings_test.go @@ -3,12 +3,12 @@ package kv import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/validator/db/kv/prune_attester_protection.go b/validator/db/kv/prune_attester_protection.go index 826fa2feb5..307e4b0e70 100644 --- a/validator/db/kv/prune_attester_protection.go +++ b/validator/db/kv/prune_attester_protection.go @@ -3,10 +3,10 @@ package kv import ( "context" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" bolt "go.etcd.io/bbolt" ) diff --git a/validator/db/kv/prune_attester_protection_test.go b/validator/db/kv/prune_attester_protection_test.go index e5b79e96bf..83dbf369c8 100644 --- a/validator/db/kv/prune_attester_protection_test.go +++ b/validator/db/kv/prune_attester_protection_test.go @@ -4,11 +4,11 @@ import ( "fmt" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" bolt "go.etcd.io/bbolt" ) diff --git a/validator/db/migrate.go b/validator/db/migrate.go index 47849f423e..5c218d6541 100644 --- a/validator/db/migrate.go +++ b/validator/db/migrate.go @@ -4,9 +4,9 @@ import ( "context" "path" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) diff --git a/validator/db/migrate_test.go b/validator/db/migrate_test.go index f560b30f52..5cebcd5175 100644 --- a/validator/db/migrate_test.go +++ b/validator/db/migrate_test.go @@ -4,10 +4,10 @@ import ( "flag" "testing" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - dbtest "github.com/OffchainLabs/prysm/v6/validator/db/testing" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + dbtest "github.com/OffchainLabs/prysm/v7/validator/db/testing" "github.com/urfave/cli/v2" ) diff --git a/validator/db/restore.go b/validator/db/restore.go index 174fc7c62c..19f37099d7 100644 --- a/validator/db/restore.go +++ b/validator/db/restore.go @@ -5,10 +5,10 @@ import ( "path" "strings" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/io/prompt" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/io/prompt" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" "github.com/pkg/errors" "github.com/urfave/cli/v2" ) diff --git a/validator/db/restore_test.go b/validator/db/restore_test.go index 799056f984..eec78e093c 100644 --- a/validator/db/restore_test.go +++ b/validator/db/restore_test.go @@ -6,11 +6,11 @@ import ( "path" "testing" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" logTest "github.com/sirupsen/logrus/hooks/test" "github.com/urfave/cli/v2" ) diff --git a/validator/db/testing/BUILD.bazel b/validator/db/testing/BUILD.bazel index dbd961a9c6..55550465dc 100644 --- a/validator/db/testing/BUILD.bazel +++ b/validator/db/testing/BUILD.bazel @@ -3,7 +3,7 @@ load("@prysm//tools/go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = ["setup_db.go"], - importpath = "github.com/OffchainLabs/prysm/v6/validator/db/testing", + importpath = "github.com/OffchainLabs/prysm/v7/validator/db/testing", visibility = [ "//cmd:__subpackages__", "//config:__subpackages__", diff --git a/validator/db/testing/setup_db.go b/validator/db/testing/setup_db.go index 072e474eb0..d2dcc0691b 100644 --- a/validator/db/testing/setup_db.go +++ b/validator/db/testing/setup_db.go @@ -3,10 +3,10 @@ package testing import ( "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/validator/db/filesystem" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/validator/db/filesystem" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" ) // SetupDB instantiates and returns a DB instance for the validator client. diff --git a/validator/db/testing/setup_db_test.go b/validator/db/testing/setup_db_test.go index b4de0530bd..5cc03ae8f9 100644 --- a/validator/db/testing/setup_db_test.go +++ b/validator/db/testing/setup_db_test.go @@ -5,11 +5,11 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/db/filesystem" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/db/filesystem" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" ) func TestClearDB(t *testing.T) { diff --git a/validator/graffiti/BUILD.bazel b/validator/graffiti/BUILD.bazel index 7d06dc7d23..80e91f34bd 100644 --- a/validator/graffiti/BUILD.bazel +++ b/validator/graffiti/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "log.go", "parse_graffiti.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/graffiti", + importpath = "github.com/OffchainLabs/prysm/v7/validator/graffiti", visibility = ["//validator:__subpackages__"], deps = [ "//consensus-types/primitives:go_default_library", diff --git a/validator/graffiti/parse_graffiti.go b/validator/graffiti/parse_graffiti.go index 289705a341..25136faa3f 100644 --- a/validator/graffiti/parse_graffiti.go +++ b/validator/graffiti/parse_graffiti.go @@ -5,8 +5,8 @@ import ( "os" "strings" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" "github.com/pkg/errors" "gopkg.in/yaml.v2" ) diff --git a/validator/graffiti/parse_graffiti_test.go b/validator/graffiti/parse_graffiti_test.go index f66d550802..db9d03f369 100644 --- a/validator/graffiti/parse_graffiti_test.go +++ b/validator/graffiti/parse_graffiti_test.go @@ -5,10 +5,10 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/hash" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/hash" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestParseGraffitiFile_Default(t *testing.T) { diff --git a/validator/helpers/BUILD.bazel b/validator/helpers/BUILD.bazel index 4937c20a59..237f6c6673 100644 --- a/validator/helpers/BUILD.bazel +++ b/validator/helpers/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "metadata.go", "node_connection.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/helpers", + importpath = "github.com/OffchainLabs/prysm/v7/validator/helpers", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/validator/helpers/converts.go b/validator/helpers/converts.go index 4e5a8d4d25..1a36fade63 100644 --- a/validator/helpers/converts.go +++ b/validator/helpers/converts.go @@ -6,8 +6,8 @@ import ( "strconv" "strings" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) // Uint64FromString converts a string into a uint64 representation. diff --git a/validator/helpers/converts_test.go b/validator/helpers/converts_test.go index 208b7e4178..7bf3443e21 100644 --- a/validator/helpers/converts_test.go +++ b/validator/helpers/converts_test.go @@ -6,8 +6,8 @@ import ( "reflect" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" ) func Test_fromString(t *testing.T) { diff --git a/validator/helpers/metadata.go b/validator/helpers/metadata.go index c35319a4af..63affd1e70 100644 --- a/validator/helpers/metadata.go +++ b/validator/helpers/metadata.go @@ -5,8 +5,8 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" "github.com/pkg/errors" ) diff --git a/validator/helpers/metadata_test.go b/validator/helpers/metadata_test.go index 0cb1169675..5543b8e363 100644 --- a/validator/helpers/metadata_test.go +++ b/validator/helpers/metadata_test.go @@ -6,15 +6,15 @@ import ( "io" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/db/common" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/db/common" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" "github.com/prometheus/client_golang/prometheus" ) diff --git a/validator/keymanager/BUILD.bazel b/validator/keymanager/BUILD.bazel index 9f10806445..650a074d3e 100644 --- a/validator/keymanager/BUILD.bazel +++ b/validator/keymanager/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "constants.go", "types.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/keymanager", + importpath = "github.com/OffchainLabs/prysm/v7/validator/keymanager", visibility = [ "//visibility:public", ], diff --git a/validator/keymanager/derived/BUILD.bazel b/validator/keymanager/derived/BUILD.bazel index 84fb096ed3..d30b6d1ae3 100644 --- a/validator/keymanager/derived/BUILD.bazel +++ b/validator/keymanager/derived/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "log.go", "mnemonic.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived", + importpath = "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived", visibility = [ "//cmd/validator:__subpackages__", "//tools:__subpackages__", diff --git a/validator/keymanager/derived/eip_test.go b/validator/keymanager/derived/eip_test.go index 4e398ab35b..0fd7a96038 100644 --- a/validator/keymanager/derived/eip_test.go +++ b/validator/keymanager/derived/eip_test.go @@ -6,9 +6,9 @@ import ( "math/big" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" util "github.com/wealdtech/go-eth2-util" ) diff --git a/validator/keymanager/derived/keymanager.go b/validator/keymanager/derived/keymanager.go index f797150891..a912996880 100644 --- a/validator/keymanager/derived/keymanager.go +++ b/validator/keymanager/derived/keymanager.go @@ -4,13 +4,13 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/async/event" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/async/event" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" "github.com/logrusorgru/aurora" "github.com/pkg/errors" util "github.com/wealdtech/go-eth2-util" diff --git a/validator/keymanager/derived/keymanager_test.go b/validator/keymanager/derived/keymanager_test.go index a6d6c88533..94342cd5ff 100644 --- a/validator/keymanager/derived/keymanager_test.go +++ b/validator/keymanager/derived/keymanager_test.go @@ -4,14 +4,14 @@ import ( "fmt" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - mock "github.com/OffchainLabs/prysm/v6/validator/accounts/testing" - constant "github.com/OffchainLabs/prysm/v6/validator/testing" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + mock "github.com/OffchainLabs/prysm/v7/validator/accounts/testing" + constant "github.com/OffchainLabs/prysm/v7/validator/testing" "github.com/tyler-smith/go-bip39" util "github.com/wealdtech/go-eth2-util" ) diff --git a/validator/keymanager/derived/mnemonic.go b/validator/keymanager/derived/mnemonic.go index fd4197f6fb..6e9f22d960 100644 --- a/validator/keymanager/derived/mnemonic.go +++ b/validator/keymanager/derived/mnemonic.go @@ -4,8 +4,8 @@ import ( "fmt" "os" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/io/prompt" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/io/prompt" "github.com/pkg/errors" "github.com/tyler-smith/go-bip39" "github.com/tyler-smith/go-bip39/wordlists" diff --git a/validator/keymanager/derived/mnemonic_test.go b/validator/keymanager/derived/mnemonic_test.go index f0a85f14ef..0f60e0976d 100644 --- a/validator/keymanager/derived/mnemonic_test.go +++ b/validator/keymanager/derived/mnemonic_test.go @@ -3,8 +3,8 @@ package derived import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/pkg/errors" "github.com/tyler-smith/go-bip39" "github.com/tyler-smith/go-bip39/wordlists" diff --git a/validator/keymanager/local/BUILD.bazel b/validator/keymanager/local/BUILD.bazel index b8af4cb43e..7bb87c8388 100644 --- a/validator/keymanager/local/BUILD.bazel +++ b/validator/keymanager/local/BUILD.bazel @@ -12,7 +12,7 @@ go_library( "log.go", "refresh.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/keymanager/local", + importpath = "github.com/OffchainLabs/prysm/v7/validator/keymanager/local", visibility = [ "//cmd/validator:__subpackages__", "//tools:__subpackages__", diff --git a/validator/keymanager/local/backup.go b/validator/keymanager/local/backup.go index 5b2390d173..d4d6314498 100644 --- a/validator/keymanager/local/backup.go +++ b/validator/keymanager/local/backup.go @@ -4,9 +4,9 @@ import ( "context" "fmt" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/google/uuid" "github.com/pkg/errors" keystorev4 "github.com/wealdtech/go-eth2-wallet-encryptor-keystorev4" diff --git a/validator/keymanager/local/backup_test.go b/validator/keymanager/local/backup_test.go index 3561111cd7..3e9f6a9fc7 100644 --- a/validator/keymanager/local/backup_test.go +++ b/validator/keymanager/local/backup_test.go @@ -4,11 +4,11 @@ import ( "encoding/hex" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestLocalKeymanager_ExtractKeystores(t *testing.T) { diff --git a/validator/keymanager/local/delete.go b/validator/keymanager/local/delete.go index 616312d6fc..2378ba672e 100644 --- a/validator/keymanager/local/delete.go +++ b/validator/keymanager/local/delete.go @@ -4,9 +4,9 @@ import ( "bytes" "context" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/sirupsen/logrus" ) diff --git a/validator/keymanager/local/delete_test.go b/validator/keymanager/local/delete_test.go index 5e84e22102..7be7117ccb 100644 --- a/validator/keymanager/local/delete_test.go +++ b/validator/keymanager/local/delete_test.go @@ -6,11 +6,11 @@ import ( "strings" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/require" - mock "github.com/OffchainLabs/prysm/v6/validator/accounts/testing" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/require" + mock "github.com/OffchainLabs/prysm/v7/validator/accounts/testing" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" logTest "github.com/sirupsen/logrus/hooks/test" keystorev4 "github.com/wealdtech/go-eth2-wallet-encryptor-keystorev4" ) diff --git a/validator/keymanager/local/import.go b/validator/keymanager/local/import.go index 19a13e1b51..f5d3dcf0a1 100644 --- a/validator/keymanager/local/import.go +++ b/validator/keymanager/local/import.go @@ -6,8 +6,8 @@ import ( "fmt" "strings" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/k0kubun/go-ansi" "github.com/pkg/errors" "github.com/schollz/progressbar/v3" diff --git a/validator/keymanager/local/import_test.go b/validator/keymanager/local/import_test.go index c8485af0d3..f81fca5e0e 100644 --- a/validator/keymanager/local/import_test.go +++ b/validator/keymanager/local/import_test.go @@ -5,12 +5,12 @@ import ( "strconv" "testing" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - mock "github.com/OffchainLabs/prysm/v6/validator/accounts/testing" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + mock "github.com/OffchainLabs/prysm/v7/validator/accounts/testing" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/google/uuid" logTest "github.com/sirupsen/logrus/hooks/test" diff --git a/validator/keymanager/local/keymanager.go b/validator/keymanager/local/keymanager.go index 8c6ea90cbf..f0f06d6ddd 100644 --- a/validator/keymanager/local/keymanager.go +++ b/validator/keymanager/local/keymanager.go @@ -8,16 +8,16 @@ import ( "strings" "sync" - "github.com/OffchainLabs/prysm/v6/async/event" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/runtime/interop" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/accounts/petnames" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/async/event" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/runtime/interop" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/accounts/petnames" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/google/uuid" "github.com/logrusorgru/aurora" "github.com/pkg/errors" diff --git a/validator/keymanager/local/keymanager_test.go b/validator/keymanager/local/keymanager_test.go index 186ea4dd52..aa83842a27 100644 --- a/validator/keymanager/local/keymanager_test.go +++ b/validator/keymanager/local/keymanager_test.go @@ -5,14 +5,14 @@ import ( "strings" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - mock "github.com/OffchainLabs/prysm/v6/validator/accounts/testing" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + mock "github.com/OffchainLabs/prysm/v7/validator/accounts/testing" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" keystorev4 "github.com/wealdtech/go-eth2-wallet-encryptor-keystorev4" ) diff --git a/validator/keymanager/local/refresh.go b/validator/keymanager/local/refresh.go index 533e9560b2..5b656826fa 100644 --- a/validator/keymanager/local/refresh.go +++ b/validator/keymanager/local/refresh.go @@ -6,13 +6,13 @@ import ( "os" "path/filepath" - "github.com/OffchainLabs/prysm/v6/async" - "github.com/OffchainLabs/prysm/v6/config/features" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/async" + "github.com/OffchainLabs/prysm/v7/config/features" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/fsnotify/fsnotify" "github.com/pkg/errors" keystorev4 "github.com/wealdtech/go-eth2-wallet-encryptor-keystorev4" diff --git a/validator/keymanager/local/refresh_test.go b/validator/keymanager/local/refresh_test.go index 5e2a99b588..a4184a6208 100644 --- a/validator/keymanager/local/refresh_test.go +++ b/validator/keymanager/local/refresh_test.go @@ -4,12 +4,12 @@ import ( "encoding/json" "testing" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - mock "github.com/OffchainLabs/prysm/v6/validator/accounts/testing" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + mock "github.com/OffchainLabs/prysm/v7/validator/accounts/testing" "github.com/google/uuid" keystorev4 "github.com/wealdtech/go-eth2-wallet-encryptor-keystorev4" ) diff --git a/validator/keymanager/remote-web3signer/BUILD.bazel b/validator/keymanager/remote-web3signer/BUILD.bazel index 46119f746e..f8f53839fc 100644 --- a/validator/keymanager/remote-web3signer/BUILD.bazel +++ b/validator/keymanager/remote-web3signer/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "log.go", "metrics.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer", + importpath = "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer", visibility = [ "//cmd/validator:__subpackages__", "//validator:__subpackages__", diff --git a/validator/keymanager/remote-web3signer/internal/BUILD.bazel b/validator/keymanager/remote-web3signer/internal/BUILD.bazel index dddb6c9253..c89e136ac9 100644 --- a/validator/keymanager/remote-web3signer/internal/BUILD.bazel +++ b/validator/keymanager/remote-web3signer/internal/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "log.go", "metrics.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/internal", + importpath = "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/internal", visibility = ["//validator/keymanager/remote-web3signer:__subpackages__"], deps = [ "//config/fieldparams:go_default_library", diff --git a/validator/keymanager/remote-web3signer/internal/client.go b/validator/keymanager/remote-web3signer/internal/client.go index f4e4151529..e6a4675c2d 100644 --- a/validator/keymanager/remote-web3signer/internal/client.go +++ b/validator/keymanager/remote-web3signer/internal/client.go @@ -13,10 +13,10 @@ import ( "strings" "time" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "github.com/sirupsen/logrus" diff --git a/validator/keymanager/remote-web3signer/internal/client_test.go b/validator/keymanager/remote-web3signer/internal/client_test.go index 475b382084..0e91317460 100644 --- a/validator/keymanager/remote-web3signer/internal/client_test.go +++ b/validator/keymanager/remote-web3signer/internal/client_test.go @@ -9,8 +9,8 @@ import ( "net/url" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/internal" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/internal" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/stretchr/testify/assert" ) diff --git a/validator/keymanager/remote-web3signer/keymanager.go b/validator/keymanager/remote-web3signer/keymanager.go index ca6a7cbfde..b68007a43a 100644 --- a/validator/keymanager/remote-web3signer/keymanager.go +++ b/validator/keymanager/remote-web3signer/keymanager.go @@ -13,18 +13,18 @@ import ( "sync" "time" - "github.com/OffchainLabs/prysm/v6/async/event" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/time/slots" - "github.com/OffchainLabs/prysm/v6/validator/accounts/petnames" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/internal" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/types" + "github.com/OffchainLabs/prysm/v7/async/event" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/time/slots" + "github.com/OffchainLabs/prysm/v7/validator/accounts/petnames" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/internal" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/types" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/fsnotify/fsnotify" "github.com/go-playground/validator/v10" diff --git a/validator/keymanager/remote-web3signer/keymanager_test.go b/validator/keymanager/remote-web3signer/keymanager_test.go index fc6fdfd04d..d27b4e984b 100644 --- a/validator/keymanager/remote-web3signer/keymanager_test.go +++ b/validator/keymanager/remote-web3signer/keymanager_test.go @@ -14,14 +14,14 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/file" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/internal" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/types/mock" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/file" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/internal" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/types/mock" "github.com/ethereum/go-ethereum/common/hexutil" logTest "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" diff --git a/validator/keymanager/remote-web3signer/types/BUILD.bazel b/validator/keymanager/remote-web3signer/types/BUILD.bazel index 79cffaca88..e5ebd3091a 100644 --- a/validator/keymanager/remote-web3signer/types/BUILD.bazel +++ b/validator/keymanager/remote-web3signer/types/BUILD.bazel @@ -7,7 +7,7 @@ go_library( "requests.go", "web3signer_types.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/types", + importpath = "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/types", visibility = ["//visibility:public"], deps = [ "//config/params:go_default_library", diff --git a/validator/keymanager/remote-web3signer/types/custom_mappers.go b/validator/keymanager/remote-web3signer/types/custom_mappers.go index 0eca3ad293..53ae09e2c3 100644 --- a/validator/keymanager/remote-web3signer/types/custom_mappers.go +++ b/validator/keymanager/remote-web3signer/types/custom_mappers.go @@ -3,10 +3,10 @@ package types import ( "fmt" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/time/slots" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/keymanager/remote-web3signer/types/custom_mappers_test.go b/validator/keymanager/remote-web3signer/types/custom_mappers_test.go index 2e1d230bcb..73de1e3c7f 100644 --- a/validator/keymanager/remote-web3signer/types/custom_mappers_test.go +++ b/validator/keymanager/remote-web3signer/types/custom_mappers_test.go @@ -5,11 +5,11 @@ import ( "testing" "github.com/OffchainLabs/go-bitfield" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/types" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/types/mock" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/types" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/types/mock" ) func TestMapAggregateAndProof(t *testing.T) { diff --git a/validator/keymanager/remote-web3signer/types/mock/BUILD.bazel b/validator/keymanager/remote-web3signer/types/mock/BUILD.bazel index 40df946d36..6f3ed4891d 100644 --- a/validator/keymanager/remote-web3signer/types/mock/BUILD.bazel +++ b/validator/keymanager/remote-web3signer/types/mock/BUILD.bazel @@ -4,7 +4,7 @@ go_library( name = "go_default_library", testonly = True, srcs = ["mocks.go"], - importpath = "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/types/mock", + importpath = "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/types/mock", visibility = ["//visibility:public"], deps = [ "//config/fieldparams:go_default_library", diff --git a/validator/keymanager/remote-web3signer/types/mock/mocks.go b/validator/keymanager/remote-web3signer/types/mock/mocks.go index 4c70815f53..67d1057d69 100644 --- a/validator/keymanager/remote-web3signer/types/mock/mocks.go +++ b/validator/keymanager/remote-web3signer/types/mock/mocks.go @@ -6,14 +6,14 @@ import ( "strings" "github.com/OffchainLabs/go-bitfield" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/util" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/types" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/util" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/types" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/validator/keymanager/remote-web3signer/types/requests.go b/validator/keymanager/remote-web3signer/types/requests.go index a3f99babd2..9b868e1f86 100644 --- a/validator/keymanager/remote-web3signer/types/requests.go +++ b/validator/keymanager/remote-web3signer/types/requests.go @@ -5,10 +5,10 @@ import ( "fmt" "strings" - "github.com/OffchainLabs/prysm/v6/consensus-types/blocks" - "github.com/OffchainLabs/prysm/v6/consensus-types/interfaces" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/pkg/errors" ) diff --git a/validator/keymanager/remote-web3signer/types/requests_test.go b/validator/keymanager/remote-web3signer/types/requests_test.go index a12619146e..18e828043b 100644 --- a/validator/keymanager/remote-web3signer/types/requests_test.go +++ b/validator/keymanager/remote-web3signer/types/requests_test.go @@ -5,12 +5,12 @@ import ( "reflect" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/types" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer/types/mock" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/types" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer/types/mock" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/validator/keymanager/types.go b/validator/keymanager/types.go index bacb2fb1d0..c7e21b71a2 100644 --- a/validator/keymanager/types.go +++ b/validator/keymanager/types.go @@ -5,10 +5,10 @@ import ( "fmt" "strings" - "github.com/OffchainLabs/prysm/v6/async/event" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - validatorpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1/validator-client" + "github.com/OffchainLabs/prysm/v7/async/event" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" ) // IKeymanager defines a general keymanager interface for Prysm wallets. diff --git a/validator/keymanager/types_test.go b/validator/keymanager/types_test.go index 4e0e583303..18c7089d28 100644 --- a/validator/keymanager/types_test.go +++ b/validator/keymanager/types_test.go @@ -5,12 +5,12 @@ import ( "strings" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" - remoteweb3signer "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" + remoteweb3signer "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer" ) var ( diff --git a/validator/node/BUILD.bazel b/validator/node/BUILD.bazel index 5fbde0d71c..d4c6f496bc 100644 --- a/validator/node/BUILD.bazel +++ b/validator/node/BUILD.bazel @@ -27,7 +27,7 @@ go_library( "log.go", "node.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/node", + importpath = "github.com/OffchainLabs/prysm/v7/validator/node", visibility = [ "//cmd/validator:__subpackages__", "//validator:__subpackages__", diff --git a/validator/node/node.go b/validator/node/node.go index 84c1e2bc90..490051fab1 100644 --- a/validator/node/node.go +++ b/validator/node/node.go @@ -17,30 +17,30 @@ import ( "syscall" "time" - "github.com/OffchainLabs/prysm/v6/api/server/middleware" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/config/proposer/loader" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/monitoring/prometheus" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing" - "github.com/OffchainLabs/prysm/v6/runtime" - "github.com/OffchainLabs/prysm/v6/runtime/prereqs" - "github.com/OffchainLabs/prysm/v6/runtime/version" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/client" - "github.com/OffchainLabs/prysm/v6/validator/db" - "github.com/OffchainLabs/prysm/v6/validator/db/filesystem" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" - g "github.com/OffchainLabs/prysm/v6/validator/graffiti" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" - remoteweb3signer "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer" - "github.com/OffchainLabs/prysm/v6/validator/rpc" + "github.com/OffchainLabs/prysm/v7/api/server/middleware" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/config/proposer/loader" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/monitoring/prometheus" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/runtime" + "github.com/OffchainLabs/prysm/v7/runtime/prereqs" + "github.com/OffchainLabs/prysm/v7/runtime/version" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/client" + "github.com/OffchainLabs/prysm/v7/validator/db" + "github.com/OffchainLabs/prysm/v7/validator/db/filesystem" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" + g "github.com/OffchainLabs/prysm/v7/validator/graffiti" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" + remoteweb3signer "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer" + "github.com/OffchainLabs/prysm/v7/validator/rpc" "github.com/pkg/errors" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" diff --git a/validator/node/node_test.go b/validator/node/node_test.go index c41b7a1091..c88eb70b7a 100644 --- a/validator/node/node_test.go +++ b/validator/node/node_test.go @@ -8,16 +8,16 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/cmd" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - remoteweb3signer "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer" + "github.com/OffchainLabs/prysm/v7/cmd" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + remoteweb3signer "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer" logtest "github.com/sirupsen/logrus/hooks/test" "github.com/urfave/cli/v2" ) diff --git a/validator/rpc/BUILD.bazel b/validator/rpc/BUILD.bazel index f7e4b41fe1..86df1841d2 100644 --- a/validator/rpc/BUILD.bazel +++ b/validator/rpc/BUILD.bazel @@ -17,7 +17,7 @@ go_library( "server.go", "structs.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/rpc", + importpath = "github.com/OffchainLabs/prysm/v7/validator/rpc", visibility = [ "//visibility:public", ], diff --git a/validator/rpc/auth_token.go b/validator/rpc/auth_token.go index 5d7d251279..8f9ac76ada 100644 --- a/validator/rpc/auth_token.go +++ b/validator/rpc/auth_token.go @@ -12,10 +12,10 @@ import ( "path/filepath" "strings" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/io/file" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/io/file" "github.com/fsnotify/fsnotify" "github.com/golang-jwt/jwt/v4" "github.com/pkg/errors" diff --git a/validator/rpc/auth_token_test.go b/validator/rpc/auth_token_test.go index f8f023ceee..22cb744a6f 100644 --- a/validator/rpc/auth_token_test.go +++ b/validator/rpc/auth_token_test.go @@ -11,9 +11,9 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/golang-jwt/jwt/v4" logTest "github.com/sirupsen/logrus/hooks/test" diff --git a/validator/rpc/beacon.go b/validator/rpc/beacon.go index e18988e20c..01813fafd0 100644 --- a/validator/rpc/beacon.go +++ b/validator/rpc/beacon.go @@ -3,15 +3,15 @@ package rpc import ( "net/http" - api "github.com/OffchainLabs/prysm/v6/api/client" - grpcutil "github.com/OffchainLabs/prysm/v6/api/grpc" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/client" - beaconApi "github.com/OffchainLabs/prysm/v6/validator/client/beacon-api" - beaconChainClientFactory "github.com/OffchainLabs/prysm/v6/validator/client/beacon-chain-client-factory" - nodeClientFactory "github.com/OffchainLabs/prysm/v6/validator/client/node-client-factory" - validatorClientFactory "github.com/OffchainLabs/prysm/v6/validator/client/validator-client-factory" - validatorHelpers "github.com/OffchainLabs/prysm/v6/validator/helpers" + api "github.com/OffchainLabs/prysm/v7/api/client" + grpcutil "github.com/OffchainLabs/prysm/v7/api/grpc" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/client" + beaconApi "github.com/OffchainLabs/prysm/v7/validator/client/beacon-api" + beaconChainClientFactory "github.com/OffchainLabs/prysm/v7/validator/client/beacon-chain-client-factory" + nodeClientFactory "github.com/OffchainLabs/prysm/v7/validator/client/node-client-factory" + validatorClientFactory "github.com/OffchainLabs/prysm/v7/validator/client/validator-client-factory" + validatorHelpers "github.com/OffchainLabs/prysm/v7/validator/helpers" middleware "github.com/grpc-ecosystem/go-grpc-middleware" grpcretry "github.com/grpc-ecosystem/go-grpc-middleware/retry" grpcopentracing "github.com/grpc-ecosystem/go-grpc-middleware/tracing/opentracing" diff --git a/validator/rpc/beacon_test.go b/validator/rpc/beacon_test.go index dc436f02f0..b3842ad4a8 100644 --- a/validator/rpc/beacon_test.go +++ b/validator/rpc/beacon_test.go @@ -3,8 +3,8 @@ package rpc import ( "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" "google.golang.org/grpc/metadata" ) diff --git a/validator/rpc/handler_wallet.go b/validator/rpc/handler_wallet.go index c94e05c381..129bbf78b3 100644 --- a/validator/rpc/handler_wallet.go +++ b/validator/rpc/handler_wallet.go @@ -9,14 +9,14 @@ import ( "path/filepath" "strings" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/io/prompt" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/io/prompt" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/pkg/errors" "github.com/tyler-smith/go-bip39" "github.com/tyler-smith/go-bip39/wordlists" diff --git a/validator/rpc/handler_wallet_test.go b/validator/rpc/handler_wallet_test.go index 471b17ac93..c5624507f2 100644 --- a/validator/rpc/handler_wallet_test.go +++ b/validator/rpc/handler_wallet_test.go @@ -9,19 +9,19 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/client" - "github.com/OffchainLabs/prysm/v6/validator/client/testutil" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/client" + "github.com/OffchainLabs/prysm/v7/validator/client/testutil" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/google/uuid" "github.com/tyler-smith/go-bip39" keystorev4 "github.com/wealdtech/go-eth2-wallet-encryptor-keystorev4" diff --git a/validator/rpc/handlers_accounts.go b/validator/rpc/handlers_accounts.go index de9d71135a..92dc8b8ca8 100644 --- a/validator/rpc/handlers_accounts.go +++ b/validator/rpc/handlers_accounts.go @@ -10,19 +10,19 @@ import ( "net/http" "strconv" - "github.com/OffchainLabs/prysm/v6/api/pagination" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/cmd" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/petnames" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/local" + "github.com/OffchainLabs/prysm/v7/api/pagination" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/cmd" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/petnames" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/local" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" ) diff --git a/validator/rpc/handlers_accounts_test.go b/validator/rpc/handlers_accounts_test.go index f19fb84922..7eecd4a091 100644 --- a/validator/rpc/handlers_accounts_test.go +++ b/validator/rpc/handlers_accounts_test.go @@ -13,19 +13,19 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - validatormock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/client" - "github.com/OffchainLabs/prysm/v6/validator/client/testutil" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" - constant "github.com/OffchainLabs/prysm/v6/validator/testing" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + validatormock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/client" + "github.com/OffchainLabs/prysm/v7/validator/client/testutil" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" + constant "github.com/OffchainLabs/prysm/v7/validator/testing" "github.com/ethereum/go-ethereum/common/hexutil" "go.uber.org/mock/gomock" "google.golang.org/protobuf/types/known/timestamppb" diff --git a/validator/rpc/handlers_auth.go b/validator/rpc/handlers_auth.go index b564f374fa..3244f50cff 100644 --- a/validator/rpc/handlers_auth.go +++ b/validator/rpc/handlers_auth.go @@ -3,10 +3,10 @@ package rpc import ( "net/http" - "github.com/OffchainLabs/prysm/v6/io/file" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/io/file" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" "github.com/pkg/errors" ) diff --git a/validator/rpc/handlers_auth_test.go b/validator/rpc/handlers_auth_test.go index b4c7d033cb..20a640fcff 100644 --- a/validator/rpc/handlers_auth_test.go +++ b/validator/rpc/handlers_auth_test.go @@ -8,10 +8,10 @@ import ( "path/filepath" "testing" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" ) func TestInitialize(t *testing.T) { diff --git a/validator/rpc/handlers_beacon.go b/validator/rpc/handlers_beacon.go index 351acfd95e..c3b7c6d711 100644 --- a/validator/rpc/handlers_beacon.go +++ b/validator/rpc/handlers_beacon.go @@ -8,12 +8,12 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "google.golang.org/protobuf/types/known/emptypb" diff --git a/validator/rpc/handlers_beacon_test.go b/validator/rpc/handlers_beacon_test.go index c410ee7b8b..88d5a6b099 100644 --- a/validator/rpc/handlers_beacon_test.go +++ b/validator/rpc/handlers_beacon_test.go @@ -9,10 +9,10 @@ import ( "testing" "time" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - validatormock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + validatormock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" "go.uber.org/mock/gomock" diff --git a/validator/rpc/handlers_health.go b/validator/rpc/handlers_health.go index 32cf39e6fd..835095f966 100644 --- a/validator/rpc/handlers_health.go +++ b/validator/rpc/handlers_health.go @@ -5,11 +5,11 @@ import ( "fmt" "net/http" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/runtime/version" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/runtime/version" "google.golang.org/protobuf/types/known/emptypb" ) diff --git a/validator/rpc/handlers_health_test.go b/validator/rpc/handlers_health_test.go index 47e499b337..5eef237bd2 100644 --- a/validator/rpc/handlers_health_test.go +++ b/validator/rpc/handlers_health_test.go @@ -9,11 +9,11 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/io/logs/mock" - pb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/require" - validatormock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/io/logs/mock" + pb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/require" + validatormock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" "github.com/golang/protobuf/ptypes/empty" "go.uber.org/mock/gomock" "google.golang.org/grpc" diff --git a/validator/rpc/handlers_keymanager.go b/validator/rpc/handlers_keymanager.go index 97c8e9838f..f9df921392 100644 --- a/validator/rpc/handlers_keymanager.go +++ b/validator/rpc/handlers_keymanager.go @@ -9,22 +9,22 @@ import ( "net/http" "strings" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - "github.com/OffchainLabs/prysm/v6/beacon-chain/rpc/eth/shared" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/validator/client" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" - slashingprotection "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + "github.com/OffchainLabs/prysm/v7/beacon-chain/rpc/eth/shared" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/validator/client" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" + slashingprotection "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" diff --git a/validator/rpc/handlers_keymanager_test.go b/validator/rpc/handlers_keymanager_test.go index 31238cc2f7..9840a681db 100644 --- a/validator/rpc/handlers_keymanager_test.go +++ b/validator/rpc/handlers_keymanager_test.go @@ -13,32 +13,32 @@ import ( "testing" "time" - "github.com/OffchainLabs/prysm/v6/cmd/validator/flags" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/config/proposer" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/consensus-types/validator" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - validatormock "github.com/OffchainLabs/prysm/v6/testing/validator-mock" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/accounts/iface" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/client" - "github.com/OffchainLabs/prysm/v6/validator/client/testutil" - dbCommon "github.com/OffchainLabs/prysm/v6/validator/db/common" - "github.com/OffchainLabs/prysm/v6/validator/db/filesystem" - DBIface "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" - dbtest "github.com/OffchainLabs/prysm/v6/validator/db/testing" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/keymanager/derived" - remoteweb3signer "github.com/OffchainLabs/prysm/v6/validator/keymanager/remote-web3signer" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" - mocks "github.com/OffchainLabs/prysm/v6/validator/testing" + "github.com/OffchainLabs/prysm/v7/cmd/validator/flags" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/config/proposer" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/consensus-types/validator" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + validatormock "github.com/OffchainLabs/prysm/v7/testing/validator-mock" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/accounts/iface" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/client" + "github.com/OffchainLabs/prysm/v7/validator/client/testutil" + dbCommon "github.com/OffchainLabs/prysm/v7/validator/db/common" + "github.com/OffchainLabs/prysm/v7/validator/db/filesystem" + DBIface "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" + dbtest "github.com/OffchainLabs/prysm/v7/validator/db/testing" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/keymanager/derived" + remoteweb3signer "github.com/OffchainLabs/prysm/v7/validator/keymanager/remote-web3signer" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" + mocks "github.com/OffchainLabs/prysm/v7/validator/testing" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/urfave/cli/v2" diff --git a/validator/rpc/handlers_slashing.go b/validator/rpc/handlers_slashing.go index be93c3f0f0..2d8002e297 100644 --- a/validator/rpc/handlers_slashing.go +++ b/validator/rpc/handlers_slashing.go @@ -6,9 +6,9 @@ import ( "io" "net/http" - "github.com/OffchainLabs/prysm/v6/monitoring/tracing/trace" - "github.com/OffchainLabs/prysm/v6/network/httputil" - slashing "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + "github.com/OffchainLabs/prysm/v7/network/httputil" + slashing "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history" "github.com/pkg/errors" ) diff --git a/validator/rpc/handlers_slashing_test.go b/validator/rpc/handlers_slashing_test.go index 9ace45a7ca..e628c1f796 100644 --- a/validator/rpc/handlers_slashing_test.go +++ b/validator/rpc/handlers_slashing_test.go @@ -8,15 +8,15 @@ import ( "net/http/httptest" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/accounts" - "github.com/OffchainLabs/prysm/v6/validator/db/common" - "github.com/OffchainLabs/prysm/v6/validator/db/filesystem" - "github.com/OffchainLabs/prysm/v6/validator/db/iface" - "github.com/OffchainLabs/prysm/v6/validator/db/kv" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" - mocks "github.com/OffchainLabs/prysm/v6/validator/testing" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/accounts" + "github.com/OffchainLabs/prysm/v7/validator/db/common" + "github.com/OffchainLabs/prysm/v7/validator/db/filesystem" + "github.com/OffchainLabs/prysm/v7/validator/db/iface" + "github.com/OffchainLabs/prysm/v7/validator/db/kv" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" + mocks "github.com/OffchainLabs/prysm/v7/validator/testing" ) func TestImportSlashingProtection_Preconditions(t *testing.T) { diff --git a/validator/rpc/intercepter.go b/validator/rpc/intercepter.go index 12761eff0a..c0d8d36fcf 100644 --- a/validator/rpc/intercepter.go +++ b/validator/rpc/intercepter.go @@ -5,8 +5,8 @@ import ( "net/http" "strings" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/network/httputil" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/network/httputil" "github.com/sirupsen/logrus" "google.golang.org/grpc" "google.golang.org/grpc/codes" diff --git a/validator/rpc/intercepter_test.go b/validator/rpc/intercepter_test.go index 5fca66058c..1f34a05066 100644 --- a/validator/rpc/intercepter_test.go +++ b/validator/rpc/intercepter_test.go @@ -7,9 +7,9 @@ import ( "net/http/httptest" "testing" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/network/httputil" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/network/httputil" + "github.com/OffchainLabs/prysm/v7/testing/require" "google.golang.org/grpc" "google.golang.org/grpc/metadata" ) diff --git a/validator/rpc/server.go b/validator/rpc/server.go index 85322a910f..a718799cc8 100644 --- a/validator/rpc/server.go +++ b/validator/rpc/server.go @@ -9,18 +9,18 @@ import ( "strings" "time" - "github.com/OffchainLabs/prysm/v6/api" - "github.com/OffchainLabs/prysm/v6/api/server/httprest" - "github.com/OffchainLabs/prysm/v6/api/server/middleware" - "github.com/OffchainLabs/prysm/v6/async/event" - "github.com/OffchainLabs/prysm/v6/config/features" - "github.com/OffchainLabs/prysm/v6/io/logs" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/accounts/wallet" - "github.com/OffchainLabs/prysm/v6/validator/client" - iface "github.com/OffchainLabs/prysm/v6/validator/client/iface" - "github.com/OffchainLabs/prysm/v6/validator/db" - "github.com/OffchainLabs/prysm/v6/validator/web" + "github.com/OffchainLabs/prysm/v7/api" + "github.com/OffchainLabs/prysm/v7/api/server/httprest" + "github.com/OffchainLabs/prysm/v7/api/server/middleware" + "github.com/OffchainLabs/prysm/v7/async/event" + "github.com/OffchainLabs/prysm/v7/config/features" + "github.com/OffchainLabs/prysm/v7/io/logs" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/accounts/wallet" + "github.com/OffchainLabs/prysm/v7/validator/client" + iface "github.com/OffchainLabs/prysm/v7/validator/client/iface" + "github.com/OffchainLabs/prysm/v7/validator/db" + "github.com/OffchainLabs/prysm/v7/validator/web" "github.com/pkg/errors" ) diff --git a/validator/rpc/server_test.go b/validator/rpc/server_test.go index 200b25a34b..97c602ec13 100644 --- a/validator/rpc/server_test.go +++ b/validator/rpc/server_test.go @@ -4,7 +4,7 @@ import ( "net/http" "testing" - "github.com/OffchainLabs/prysm/v6/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/require" ) func TestServer_InitializeRoutes(t *testing.T) { diff --git a/validator/rpc/structs.go b/validator/rpc/structs.go index 9d9ecc1b67..77bfabff8f 100644 --- a/validator/rpc/structs.go +++ b/validator/rpc/structs.go @@ -4,13 +4,13 @@ import ( "fmt" "strconv" - "github.com/OffchainLabs/prysm/v6/api/server" - "github.com/OffchainLabs/prysm/v6/api/server/structs" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/validator/keymanager" + "github.com/OffchainLabs/prysm/v7/api/server" + "github.com/OffchainLabs/prysm/v7/api/server/structs" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/validator/keymanager" "github.com/ethereum/go-ethereum/common/hexutil" ) diff --git a/validator/slashing-protection-history/BUILD.bazel b/validator/slashing-protection-history/BUILD.bazel index da50a8e316..71909a91c1 100644 --- a/validator/slashing-protection-history/BUILD.bazel +++ b/validator/slashing-protection-history/BUILD.bazel @@ -6,7 +6,7 @@ go_library( "doc.go", "export.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history", + importpath = "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history", visibility = [ "//cmd:__subpackages__", "//validator:__subpackages__", diff --git a/validator/slashing-protection-history/export.go b/validator/slashing-protection-history/export.go index 17cfa2fea7..4da0132caf 100644 --- a/validator/slashing-protection-history/export.go +++ b/validator/slashing-protection-history/export.go @@ -6,12 +6,12 @@ import ( "sort" "strings" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/monitoring/progress" - "github.com/OffchainLabs/prysm/v6/validator/db" - "github.com/OffchainLabs/prysm/v6/validator/helpers" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/monitoring/progress" + "github.com/OffchainLabs/prysm/v7/validator/db" + "github.com/OffchainLabs/prysm/v7/validator/helpers" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" "github.com/pkg/errors" ) diff --git a/validator/slashing-protection-history/export_test.go b/validator/slashing-protection-history/export_test.go index 81219d56c8..2cee7b00a8 100644 --- a/validator/slashing-protection-history/export_test.go +++ b/validator/slashing-protection-history/export_test.go @@ -4,13 +4,13 @@ import ( "fmt" "testing" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - ethpb "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - dbtest "github.com/OffchainLabs/prysm/v6/validator/db/testing" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + dbtest "github.com/OffchainLabs/prysm/v7/validator/db/testing" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" ) func TestExportStandardProtectionJSON_EmptyGenesisRoot(t *testing.T) { diff --git a/validator/slashing-protection-history/format/BUILD.bazel b/validator/slashing-protection-history/format/BUILD.bazel index 8f8d7197ec..41c4353f5e 100644 --- a/validator/slashing-protection-history/format/BUILD.bazel +++ b/validator/slashing-protection-history/format/BUILD.bazel @@ -3,6 +3,6 @@ load("@prysm//tools/go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["format.go"], - importpath = "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format", + importpath = "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format", visibility = ["//visibility:public"], ) diff --git a/validator/slashing-protection-history/round_trip_test.go b/validator/slashing-protection-history/round_trip_test.go index c096f00b16..50a5d534bb 100644 --- a/validator/slashing-protection-history/round_trip_test.go +++ b/validator/slashing-protection-history/round_trip_test.go @@ -6,14 +6,14 @@ import ( "fmt" "testing" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/testing/assert" - "github.com/OffchainLabs/prysm/v6/testing/require" - "github.com/OffchainLabs/prysm/v6/validator/db/common" - dbtest "github.com/OffchainLabs/prysm/v6/validator/db/testing" - history "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" - slashtest "github.com/OffchainLabs/prysm/v6/validator/testing" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/validator/db/common" + dbtest "github.com/OffchainLabs/prysm/v7/validator/db/testing" + history "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" + slashtest "github.com/OffchainLabs/prysm/v7/validator/testing" ) // TestImportExport_RoundTrip tests that we can import and export slashing protection data diff --git a/validator/testing/BUILD.bazel b/validator/testing/BUILD.bazel index 99430e1d0c..61c0424cfd 100644 --- a/validator/testing/BUILD.bazel +++ b/validator/testing/BUILD.bazel @@ -8,7 +8,7 @@ go_library( "mock_protector.go", "protection_history.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/testing", + importpath = "github.com/OffchainLabs/prysm/v7/validator/testing", visibility = [ "//cmd:__subpackages__", "//validator:__subpackages__", diff --git a/validator/testing/mock_protector.go b/validator/testing/mock_protector.go index d9b0c4b5e8..a276e11e97 100644 --- a/validator/testing/mock_protector.go +++ b/validator/testing/mock_protector.go @@ -3,7 +3,7 @@ package testing import ( "context" - eth "github.com/OffchainLabs/prysm/v6/proto/prysm/v1alpha1" + eth "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" ) // MockProtector mocks the protector. diff --git a/validator/testing/protection_history.go b/validator/testing/protection_history.go index 29f33704b2..39614ac964 100644 --- a/validator/testing/protection_history.go +++ b/validator/testing/protection_history.go @@ -3,14 +3,14 @@ package testing import ( "fmt" - fieldparams "github.com/OffchainLabs/prysm/v6/config/fieldparams" - "github.com/OffchainLabs/prysm/v6/config/params" - "github.com/OffchainLabs/prysm/v6/consensus-types/primitives" - "github.com/OffchainLabs/prysm/v6/crypto/bls" - "github.com/OffchainLabs/prysm/v6/crypto/rand" - "github.com/OffchainLabs/prysm/v6/encoding/bytesutil" - "github.com/OffchainLabs/prysm/v6/validator/db/common" - "github.com/OffchainLabs/prysm/v6/validator/slashing-protection-history/format" + fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/crypto/bls" + "github.com/OffchainLabs/prysm/v7/crypto/rand" + "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" + "github.com/OffchainLabs/prysm/v7/validator/db/common" + "github.com/OffchainLabs/prysm/v7/validator/slashing-protection-history/format" ) // MockSlashingProtectionJSON creates a mock, full slashing protection JSON struct diff --git a/validator/web/BUILD.bazel b/validator/web/BUILD.bazel index d9c8670636..713989b5d8 100644 --- a/validator/web/BUILD.bazel +++ b/validator/web/BUILD.bazel @@ -9,7 +9,7 @@ go_library( "log.go", "site_data.go", ], - importpath = "github.com/OffchainLabs/prysm/v6/validator/web", + importpath = "github.com/OffchainLabs/prysm/v7/validator/web", visibility = [ "//validator:__subpackages__", ], diff --git a/validator/web/handler_test.go b/validator/web/handler_test.go index cfa044655d..609d6a99c9 100644 --- a/validator/web/handler_test.go +++ b/validator/web/handler_test.go @@ -5,7 +5,7 @@ import ( "net/http/httptest" "testing" - "github.com/OffchainLabs/prysm/v6/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/assert" ) func TestHandler(t *testing.T) { From 091e868a7b762d4664537842b4a02b59819145cc Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Thu, 6 Nov 2025 17:18:43 +0100 Subject: [PATCH 086/103] `SidecarProposerExpected`: Use the correct value of proposer index in the singleflight group. (#15993) --- beacon-chain/verification/data_column.go | 12 +++++++--- beacon-chain/verification/data_column_test.go | 22 +++++++++++++++---- changelog/manu-flight.md | 2 ++ testing/util/deneb.go | 7 +++--- 4 files changed, 33 insertions(+), 10 deletions(-) create mode 100644 changelog/manu-flight.md diff --git a/beacon-chain/verification/data_column.go b/beacon-chain/verification/data_column.go index c090c63af2..7feb333c86 100644 --- a/beacon-chain/verification/data_column.go +++ b/beacon-chain/verification/data_column.go @@ -481,7 +481,7 @@ func (dv *RODataColumnsVerifier) SidecarProposerExpected(ctx context.Context) (e parentRoot := dataColumn.ParentRoot() // Ensure the expensive index computation is only performed once for // concurrent requests for the same signature data. - if _, err, _ := dv.sg.Do(concatRootSlot(parentRoot, dataColumnSlot), func() (any, error) { + idxAny, err, _ := dv.sg.Do(concatRootSlot(parentRoot, dataColumnSlot), func() (any, error) { // Retrieve the parent state. parentState, err := dv.state(ctx, parentRoot) if err != nil { @@ -494,10 +494,16 @@ func (dv *RODataColumnsVerifier) SidecarProposerExpected(ctx context.Context) (e return nil, columnErrBuilder(errors.Wrap(err, "compute proposer")) } - return nil, nil - }); err != nil { + return idx, nil + }) + if err != nil { return err } + + var ok bool + if idx, ok = idxAny.(primitives.ValidatorIndex); !ok { + return columnErrBuilder(errors.New("type assertion to ValidatorIndex failed")) + } } if idx != dataColumn.ProposerIndex() { diff --git a/beacon-chain/verification/data_column_test.go b/beacon-chain/verification/data_column_test.go index 75cb6e2b16..f75342c74f 100644 --- a/beacon-chain/verification/data_column_test.go +++ b/beacon-chain/verification/data_column_test.go @@ -3,6 +3,7 @@ package verification import ( "context" "reflect" + "sync" "testing" "time" @@ -807,6 +808,8 @@ func TestDataColumnsSidecarProposerExpected(t *testing.T) { return firstColumn.ProposerIndex(), nil } + ctx := t.Context() + testCases := []struct { name string stateByRooter StateByRooter @@ -914,20 +917,31 @@ func TestDataColumnsSidecarProposerExpected(t *testing.T) { } verifier := initializer.NewDataColumnsVerifier(tc.columns, GossipDataColumnSidecarRequirements) - err := verifier.SidecarProposerExpected(t.Context()) + var wg sync.WaitGroup + + var err1, err2 error + wg.Go(func() { + err1 = verifier.SidecarProposerExpected(ctx) + }) + wg.Go(func() { + err2 = verifier.SidecarProposerExpected(ctx) + }) + wg.Wait() require.Equal(t, true, verifier.results.executed(RequireSidecarProposerExpected)) if len(tc.error) > 0 { - require.ErrorContains(t, tc.error, err) + require.ErrorContains(t, tc.error, err1) + require.ErrorContains(t, tc.error, err2) require.NotNil(t, verifier.results.result(RequireSidecarProposerExpected)) return } - require.NoError(t, err) + require.NoError(t, err1) + require.NoError(t, err2) require.NoError(t, verifier.results.result(RequireSidecarProposerExpected)) - err = verifier.SidecarProposerExpected(t.Context()) + err := verifier.SidecarProposerExpected(ctx) require.NoError(t, err) }) } diff --git a/changelog/manu-flight.md b/changelog/manu-flight.md new file mode 100644 index 0000000000..d544ce3da0 --- /dev/null +++ b/changelog/manu-flight.md @@ -0,0 +1,2 @@ +### Fixed +- `SidecarProposerExpected`: Use the correct value of proposer index in the singleflight group. \ No newline at end of file diff --git a/testing/util/deneb.go b/testing/util/deneb.go index ee3882e1e9..86d3b0715e 100644 --- a/testing/util/deneb.go +++ b/testing/util/deneb.go @@ -58,9 +58,10 @@ func WithDenebSlot(slot primitives.Slot) DenebBlockGeneratorOption { func GenerateTestDenebBlockWithSidecar(t *testing.T, parent [32]byte, slot primitives.Slot, nblobs int, opts ...DenebBlockGeneratorOption) (blocks.ROBlock, []blocks.ROBlob) { g := &denebBlockGenerator{ - parent: parent, - slot: slot, - nblobs: nblobs, + parent: parent, + slot: slot, + nblobs: nblobs, + proposer: 3, // Anything else than zero not to fallback to the default uin64 value. } for _, o := range opts { o(g) From 7794a77ae61bdceb70a019b9bb1f58651020eae9 Mon Sep 17 00:00:00 2001 From: Potuz Date: Thu, 6 Nov 2025 13:43:38 -0500 Subject: [PATCH 087/103] Use Head to validate sidecars when possible (#15977) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Use Head to validate sidecars when possible 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * LazyHeadStateProvider that avoids service registry * Remove extra file --------- Co-authored-by: Claude Co-authored-by: james-prysm <90280386+james-prysm@users.noreply.github.com> Co-authored-by: Kasey Kirkham --- beacon-chain/execution/BUILD.bazel | 1 + beacon-chain/execution/service_test.go | 3 +- beacon-chain/node/node.go | 5 +- .../sync/data_column_sidecars_test.go | 6 +- .../sync/initial-sync/service_test.go | 6 +- beacon-chain/sync/rpc_status_test.go | 6 +- .../validate_bls_to_execution_change_test.go | 2 +- .../validate_sync_committee_message_test.go | 2 +- .../validate_sync_contribution_proof_test.go | 2 +- beacon-chain/verification/BUILD.bazel | 1 + beacon-chain/verification/blob_test.go | 186 +++++++++++++++++- beacon-chain/verification/data_column.go | 99 +++++++--- beacon-chain/verification/data_column_test.go | 93 ++------- beacon-chain/verification/initializer.go | 22 ++- beacon-chain/verification/initializer_test.go | 2 +- changelog/potuz_use_head_datacolumn.md | 3 + .../shared/common/forkchoice/builder.go | 2 +- 17 files changed, 308 insertions(+), 133 deletions(-) create mode 100644 changelog/potuz_use_head_datacolumn.md diff --git a/beacon-chain/execution/BUILD.bazel b/beacon-chain/execution/BUILD.bazel index 7866c9049f..93d7343027 100644 --- a/beacon-chain/execution/BUILD.bazel +++ b/beacon-chain/execution/BUILD.bazel @@ -100,6 +100,7 @@ go_test( deps = [ "//async/event:go_default_library", "//beacon-chain/blockchain/kzg:go_default_library", + "//beacon-chain/blockchain/testing:go_default_library", "//beacon-chain/cache/depositsnapshot:go_default_library", "//beacon-chain/core/feed:go_default_library", "//beacon-chain/core/feed/state:go_default_library", diff --git a/beacon-chain/execution/service_test.go b/beacon-chain/execution/service_test.go index 6586c82ecf..fe4bc36e81 100644 --- a/beacon-chain/execution/service_test.go +++ b/beacon-chain/execution/service_test.go @@ -8,6 +8,7 @@ import ( "time" "github.com/OffchainLabs/prysm/v7/async/event" + chainMock "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/testing" "github.com/OffchainLabs/prysm/v7/beacon-chain/cache/depositsnapshot" dbutil "github.com/OffchainLabs/prysm/v7/beacon-chain/db/testing" mockExecution "github.com/OffchainLabs/prysm/v7/beacon-chain/execution/testing" @@ -99,7 +100,7 @@ func TestStart_OK(t *testing.T) { c := startup.NewClockSynchronizer() require.NoError(t, c.SetClock(startup.NewClock(time.Unix(0, 0), [32]byte{}))) waiter := verification.NewInitializerWaiter( - c, forkchoice.NewROForkChoice(nil), nil) + c, forkchoice.NewROForkChoice(nil), nil, &chainMock.ChainService{}) web3Service, err := NewService(t.Context(), WithHttpEndpoint(endpoint), diff --git a/beacon-chain/node/node.go b/beacon-chain/node/node.go index 18f492a6f6..f4f3adabc2 100644 --- a/beacon-chain/node/node.go +++ b/beacon-chain/node/node.go @@ -124,6 +124,7 @@ type BeaconNode struct { DataColumnStorage *filesystem.DataColumnStorage DataColumnStorageOptions []filesystem.DataColumnStorageOption verifyInitWaiter *verification.InitializerWaiter + lhsp *verification.LazyHeadStateProvider syncChecker *initialsync.SyncChecker slasherEnabled bool lcStore *lightclient.Store @@ -230,8 +231,9 @@ func New(cliCtx *cli.Context, cancel context.CancelFunc, opts ...Option) (*Beaco return nil, errors.Wrap(err, "could not start modules") } + beacon.lhsp = &verification.LazyHeadStateProvider{} beacon.verifyInitWaiter = verification.NewInitializerWaiter( - beacon.clockWaiter, forkchoice.NewROForkChoice(beacon.forkChoicer), beacon.stateGen) + beacon.clockWaiter, forkchoice.NewROForkChoice(beacon.forkChoicer), beacon.stateGen, beacon.lhsp) beacon.BackfillOpts = append( beacon.BackfillOpts, @@ -749,6 +751,7 @@ func (b *BeaconNode) registerBlockchainService(fc forkchoice.ForkChoicer, gs *st if err != nil { return errors.Wrap(err, "could not register blockchain service") } + b.lhsp.HeadStateProvider = blockchainService return b.services.RegisterService(blockchainService) } diff --git a/beacon-chain/sync/data_column_sidecars_test.go b/beacon-chain/sync/data_column_sidecars_test.go index 1d97589fc2..4d5faa0cb0 100644 --- a/beacon-chain/sync/data_column_sidecars_test.go +++ b/beacon-chain/sync/data_column_sidecars_test.go @@ -154,7 +154,7 @@ func TestFetchDataColumnSidecars(t *testing.T) { err = gs.SetClock(startup.NewClock(time.Unix(4113849600, 0), [fieldparams.RootLength]byte{})) require.NoError(t, err) - waiter := verification.NewInitializerWaiter(gs, nil, nil) + waiter := verification.NewInitializerWaiter(gs, nil, nil, nil) initializer, err := waiter.WaitForInitializer(t.Context()) require.NoError(t, err) @@ -787,7 +787,7 @@ func TestVerifyDataColumnSidecarsByPeer(t *testing.T) { err := gs.SetClock(startup.NewClock(time.Unix(4113849600, 0), [fieldparams.RootLength]byte{})) require.NoError(t, err) - waiter := verification.NewInitializerWaiter(gs, nil, nil) + waiter := verification.NewInitializerWaiter(gs, nil, nil, nil) initializer, err := waiter.WaitForInitializer(t.Context()) require.NoError(t, err) @@ -832,7 +832,7 @@ func TestVerifyDataColumnSidecarsByPeer(t *testing.T) { err := gs.SetClock(startup.NewClock(time.Unix(4113849600, 0), [fieldparams.RootLength]byte{})) require.NoError(t, err) - waiter := verification.NewInitializerWaiter(gs, nil, nil) + waiter := verification.NewInitializerWaiter(gs, nil, nil, nil) initializer, err := waiter.WaitForInitializer(t.Context()) require.NoError(t, err) diff --git a/beacon-chain/sync/initial-sync/service_test.go b/beacon-chain/sync/initial-sync/service_test.go index d7ffa7533a..a10aca944d 100644 --- a/beacon-chain/sync/initial-sync/service_test.go +++ b/beacon-chain/sync/initial-sync/service_test.go @@ -174,7 +174,7 @@ func TestService_InitStartStop(t *testing.T) { StateNotifier: &mock.MockStateNotifier{}, InitialSyncComplete: make(chan struct{}), }) - s.verifierWaiter = verification.NewInitializerWaiter(gs, nil, nil) + s.verifierWaiter = verification.NewInitializerWaiter(gs, nil, nil, nil) time.Sleep(500 * time.Millisecond) assert.NotNil(t, s) if tt.setGenesis != nil { @@ -217,7 +217,7 @@ func TestService_waitForStateInitialization(t *testing.T) { counter: ratecounter.NewRateCounter(counterSeconds * time.Second), genesisChan: make(chan time.Time), } - s.verifierWaiter = verification.NewInitializerWaiter(cs, nil, nil) + s.verifierWaiter = verification.NewInitializerWaiter(cs, nil, nil, nil) return s, cs } @@ -786,7 +786,7 @@ func TestFetchOriginColumns(t *testing.T) { err = gs.SetClock(startup.NewClock(time.Unix(4113849600, 0), [fieldparams.RootLength]byte{})) require.NoError(t, err) - waiter := verification.NewInitializerWaiter(gs, nil, nil) + waiter := verification.NewInitializerWaiter(gs, nil, nil, nil) initializer, err := waiter.WaitForInitializer(t.Context()) require.NoError(t, err) diff --git a/beacon-chain/sync/rpc_status_test.go b/beacon-chain/sync/rpc_status_test.go index 576c3bfacd..26df633bde 100644 --- a/beacon-chain/sync/rpc_status_test.go +++ b/beacon-chain/sync/rpc_status_test.go @@ -332,7 +332,7 @@ func TestHandshakeHandlers_Roundtrip(t *testing.T) { markInitSyncComplete(t, r) clock := startup.NewClockSynchronizer() require.NoError(t, clock.SetClock(startup.NewClock(time.Now(), [32]byte{}))) - r.verifierWaiter = verification.NewInitializerWaiter(clock, chain.ForkChoiceStore, r.cfg.stateGen) + r.verifierWaiter = verification.NewInitializerWaiter(clock, chain.ForkChoiceStore, r.cfg.stateGen, chain) p1.Digest, err = r.currentForkDigest() require.NoError(t, err) @@ -354,7 +354,7 @@ func TestHandshakeHandlers_Roundtrip(t *testing.T) { markInitSyncComplete(t, r2) clock = startup.NewClockSynchronizer() require.NoError(t, clock.SetClock(startup.NewClock(time.Now(), [32]byte{}))) - r2.verifierWaiter = verification.NewInitializerWaiter(clock, chain2.ForkChoiceStore, r2.cfg.stateGen) + r2.verifierWaiter = verification.NewInitializerWaiter(clock, chain2.ForkChoiceStore, r2.cfg.stateGen, chain2) p2.Digest, err = r.currentForkDigest() require.NoError(t, err) @@ -948,7 +948,7 @@ func TestStatusRPCRequest_BadPeerHandshake(t *testing.T) { markInitSyncComplete(t, r) clock := startup.NewClockSynchronizer() require.NoError(t, clock.SetClock(startup.NewClock(time.Now(), [32]byte{}))) - r.verifierWaiter = verification.NewInitializerWaiter(clock, chain.ForkChoiceStore, r.cfg.stateGen) + r.verifierWaiter = verification.NewInitializerWaiter(clock, chain.ForkChoiceStore, r.cfg.stateGen, chain) go r.Start() diff --git a/beacon-chain/sync/validate_bls_to_execution_change_test.go b/beacon-chain/sync/validate_bls_to_execution_change_test.go index 401558e6a5..9dc0933ef4 100644 --- a/beacon-chain/sync/validate_bls_to_execution_change_test.go +++ b/beacon-chain/sync/validate_bls_to_execution_change_test.go @@ -433,7 +433,7 @@ func TestService_ValidateBlsToExecutionChange(t *testing.T) { tt.clock = startup.NewClock(time.Now(), [32]byte{}) } require.NoError(t, cw.SetClock(tt.clock)) - svc.verifierWaiter = verification.NewInitializerWaiter(cw, chainService.ForkChoiceStore, svc.cfg.stateGen) + svc.verifierWaiter = verification.NewInitializerWaiter(cw, chainService.ForkChoiceStore, svc.cfg.stateGen, chainService) go svc.Start() marshalledObj, err := tt.args.msg.MarshalSSZ() diff --git a/beacon-chain/sync/validate_sync_committee_message_test.go b/beacon-chain/sync/validate_sync_committee_message_test.go index 18eb40a62f..c0dbafb499 100644 --- a/beacon-chain/sync/validate_sync_committee_message_test.go +++ b/beacon-chain/sync/validate_sync_committee_message_test.go @@ -411,7 +411,7 @@ func TestService_ValidateSyncCommitteeMessage(t *testing.T) { svc, tt.args.topic, clock = tt.setupSvc(svc, tt.args.msg, tt.args.topic) markInitSyncComplete(t, svc) require.NoError(t, cw.SetClock(clock)) - svc.verifierWaiter = verification.NewInitializerWaiter(cw, chainService.ForkChoiceStore, svc.cfg.stateGen) + svc.verifierWaiter = verification.NewInitializerWaiter(cw, chainService.ForkChoiceStore, svc.cfg.stateGen, chainService) go svc.Start() marshalledObj, err := tt.args.msg.MarshalSSZ() diff --git a/beacon-chain/sync/validate_sync_contribution_proof_test.go b/beacon-chain/sync/validate_sync_contribution_proof_test.go index 2de858bff5..33c6ad0e12 100644 --- a/beacon-chain/sync/validate_sync_contribution_proof_test.go +++ b/beacon-chain/sync/validate_sync_contribution_proof_test.go @@ -855,7 +855,7 @@ func TestService_ValidateSyncContributionAndProof(t *testing.T) { var clock *startup.Clock svc, clock = tt.setupSvc(svc, tt.args.msg) require.NoError(t, cw.SetClock(clock)) - svc.verifierWaiter = verification.NewInitializerWaiter(cw, chainService.ForkChoiceStore, svc.cfg.stateGen) + svc.verifierWaiter = verification.NewInitializerWaiter(cw, chainService.ForkChoiceStore, svc.cfg.stateGen, chainService) markInitSyncComplete(t, svc) go svc.Start() marshalledObj, err := tt.args.msg.MarshalSSZ() diff --git a/beacon-chain/verification/BUILD.bazel b/beacon-chain/verification/BUILD.bazel index 5ac2aabbf2..3af89f8abc 100644 --- a/beacon-chain/verification/BUILD.bazel +++ b/beacon-chain/verification/BUILD.bazel @@ -69,6 +69,7 @@ go_test( "//beacon-chain/forkchoice/types:go_default_library", "//beacon-chain/startup:go_default_library", "//beacon-chain/state:go_default_library", + "//beacon-chain/state/state-native:go_default_library", "//config/fieldparams:go_default_library", "//config/params:go_default_library", "//consensus-types/blocks:go_default_library", diff --git a/beacon-chain/verification/blob_test.go b/beacon-chain/verification/blob_test.go index fe36068231..27e73428e3 100644 --- a/beacon-chain/verification/blob_test.go +++ b/beacon-chain/verification/blob_test.go @@ -11,6 +11,7 @@ import ( forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" "github.com/OffchainLabs/prysm/v7/beacon-chain/state" + state_native "github.com/OffchainLabs/prysm/v7/beacon-chain/state/state-native" "github.com/OffchainLabs/prysm/v7/config/params" "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" @@ -626,6 +627,45 @@ func (sbr *mockStateByRooter) StateByRoot(ctx context.Context, root [32]byte) (s var _ StateByRooter = &mockStateByRooter{} +type mockHeadStateProvider struct { + headRoot []byte + headSlot primitives.Slot + headState state.BeaconState + headStateReadOnly state.ReadOnlyBeaconState +} + +func (m *mockHeadStateProvider) HeadRoot(_ context.Context) ([]byte, error) { + if m.headRoot != nil { + return m.headRoot, nil + } + root := make([]byte, 32) + root[0] = 0xff + return root, nil +} + +func (m *mockHeadStateProvider) HeadSlot() primitives.Slot { + if m.headSlot == 0 { + return 1000 + } + return m.headSlot +} + +func (m *mockHeadStateProvider) HeadState(_ context.Context) (state.BeaconState, error) { + if m.headState == nil { + return nil, errors.New("head state not available") + } + return m.headState, nil +} + +func (m *mockHeadStateProvider) HeadStateReadOnly(_ context.Context) (state.ReadOnlyBeaconState, error) { + if m.headStateReadOnly == nil { + return nil, errors.New("head state read only not available") + } + return m.headStateReadOnly, nil +} + +var _ HeadStateProvider = &mockHeadStateProvider{} + func sbrErrorIfCalled(t *testing.T) sbrfunc { return func(_ context.Context, _ [32]byte) (state.BeaconState, error) { t.Error("StateByRoot should not have been called") @@ -643,15 +683,56 @@ func sbrNotFound(t *testing.T, expectedRoot [32]byte) *mockStateByRooter { } func sbrForValOverride(idx primitives.ValidatorIndex, val *ethpb.Validator) *mockStateByRooter { + return sbrForValOverrideWithT(nil, idx, val) +} + +func sbrForValOverrideWithT(t testing.TB, idx primitives.ValidatorIndex, val *ethpb.Validator) *mockStateByRooter { return &mockStateByRooter{sbr: func(_ context.Context, root [32]byte) (state.BeaconState, error) { - return &validxStateOverride{vals: map[primitives.ValidatorIndex]*ethpb.Validator{ - idx: val, - }}, nil + // Use a real deterministic state so that helpers.BeaconProposerIndexAtSlot works correctly + numValidators := uint64(idx + 1) + if numValidators < 64 { + numValidators = 64 + } + + var st state.BeaconState + var err error + if t != nil { + st, _ = util.DeterministicGenesisStateFulu(t, numValidators) + } else { + // Fallback for blob tests that don't need the full state + return &validxStateOverride{ + slot: 0, + vals: map[primitives.ValidatorIndex]*ethpb.Validator{ + idx: val, + }, + }, nil + } + + // Override the specific validator if provided + if val != nil { + vals := st.Validators() + if idx < primitives.ValidatorIndex(len(vals)) { + vals[idx] = val + // Ensure the validator is active + if vals[idx].ActivationEpoch > 0 { + vals[idx].ActivationEpoch = 0 + } + if vals[idx].ExitEpoch == 0 || vals[idx].ExitEpoch < params.BeaconConfig().FarFutureEpoch { + vals[idx].ExitEpoch = params.BeaconConfig().FarFutureEpoch + } + if vals[idx].EffectiveBalance == 0 { + vals[idx].EffectiveBalance = params.BeaconConfig().MaxEffectiveBalance + } + _ = st.SetValidators(vals) + } + } + return st, err }} } type validxStateOverride struct { state.BeaconState + slot primitives.Slot vals map[primitives.ValidatorIndex]*ethpb.Validator } @@ -665,6 +746,105 @@ func (v *validxStateOverride) ValidatorAtIndex(idx primitives.ValidatorIndex) (* return val, nil } +func (v *validxStateOverride) Slot() primitives.Slot { + return v.slot +} + +func (v *validxStateOverride) Version() int { + // Return Fulu version (6) as default for tests + return 6 +} + +func (v *validxStateOverride) Validators() []*ethpb.Validator { + // Return all validators in the map as a slice + maxIdx := primitives.ValidatorIndex(0) + for idx := range v.vals { + if idx > maxIdx { + maxIdx = idx + } + } + // Ensure we have at least 64 validators for a valid beacon state + numValidators := maxIdx + 1 + if numValidators < 64 { + numValidators = 64 + } + validators := make([]*ethpb.Validator, numValidators) + for i := range validators { + if val, ok := v.vals[primitives.ValidatorIndex(i)]; ok { + validators[i] = val + } else { + // Default validator for indices we don't care about + validators[i] = ðpb.Validator{ + ActivationEpoch: 0, + ExitEpoch: params.BeaconConfig().FarFutureEpoch, + EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, + } + } + } + return validators +} + +func (v *validxStateOverride) RandaoMixAtIndex(idx uint64) ([]byte, error) { + // Return a zero mix for simplicity in tests + return make([]byte, 32), nil +} + +func (v *validxStateOverride) NumValidators() int { + return len(v.Validators()) +} + +func (v *validxStateOverride) ValidatorAtIndexReadOnly(idx primitives.ValidatorIndex) (state.ReadOnlyValidator, error) { + validators := v.Validators() + if idx >= primitives.ValidatorIndex(len(validators)) { + return nil, fmt.Errorf("validator index %d out of range", idx) + } + return state_native.NewValidator(validators[idx]) +} + +func (v *validxStateOverride) IsNil() bool { + return false +} + +func (v *validxStateOverride) LatestBlockHeader() *ethpb.BeaconBlockHeader { + // Return a minimal block header for tests + return ðpb.BeaconBlockHeader{ + Slot: v.slot, + ProposerIndex: 0, + ParentRoot: make([]byte, 32), + StateRoot: make([]byte, 32), + BodyRoot: make([]byte, 32), + } +} + +func (v *validxStateOverride) HashTreeRoot(ctx context.Context) ([32]byte, error) { + // Return a zero hash for tests + return [32]byte{}, nil +} + +func (v *validxStateOverride) UpdateStateRootAtIndex(idx uint64, stateRoot [32]byte) error { + // No-op for mock - we don't track state roots + return nil +} + +func (v *validxStateOverride) SetLatestBlockHeader(val *ethpb.BeaconBlockHeader) error { + // No-op for mock - we don't track block headers + return nil +} + +func (v *validxStateOverride) ReadFromEveryValidator(f func(idx int, val state.ReadOnlyValidator) error) error { + validators := v.Validators() + for i, val := range validators { + rov, err := state_native.NewValidator(val) + if err != nil { + return err + } + if err := f(i, rov); err != nil { + return err + } + } + return nil +} + type mockProposerCache struct { ComputeProposerCB func(ctx context.Context, root [32]byte, slot primitives.Slot, pst state.BeaconState) (primitives.ValidatorIndex, error) ProposerCB func(c *forkchoicetypes.Checkpoint, slot primitives.Slot) (primitives.ValidatorIndex, bool) diff --git a/beacon-chain/verification/data_column.go b/beacon-chain/verification/data_column.go index 7feb333c86..c6ec953f45 100644 --- a/beacon-chain/verification/data_column.go +++ b/beacon-chain/verification/data_column.go @@ -1,13 +1,16 @@ package verification import ( + "bytes" "context" "crypto/sha256" "fmt" "strings" "time" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/helpers" "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/transition" forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" "github.com/OffchainLabs/prysm/v7/beacon-chain/state" fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" @@ -66,6 +69,12 @@ var ( errBadTopic = errors.New("topic is not of the one expected") ) +type LazyHeadStateProvider struct { + HeadStateProvider +} + +var _ HeadStateProvider = &LazyHeadStateProvider{} + type ( RODataColumnsVerifier struct { *sharedResources @@ -262,14 +271,14 @@ func (dv *RODataColumnsVerifier) ValidProposerSignature(ctx context.Context) (er if _, err, _ = dv.sg.Do(signatureData.concat(), func() (any, error) { columnVerificationProposerSignatureCache.WithLabelValues("miss").Inc() - // Retrieve the parent state. - parentState, err := dv.state(ctx, dataColumn.ParentRoot()) + // Retrieve a state compatible with the data column for verification. + verifyingState, err := dv.getVerifyingState(ctx, dataColumn) if err != nil { - return nil, columnErrBuilder(errors.Wrap(err, "parent state")) + return nil, columnErrBuilder(errors.Wrap(err, "verifying state")) } // Full verification, which will subsequently be cached for anything sharing the signature cache. - if err = dv.sc.VerifySignature(signatureData, parentState); err != nil { + if err = dv.sc.VerifySignature(signatureData, verifyingState); err != nil { return nil, columnErrBuilder(errors.Wrap(err, "verify signature")) } @@ -282,6 +291,61 @@ func (dv *RODataColumnsVerifier) ValidProposerSignature(ctx context.Context) (er return nil } +// getVerifyingState returns a state that is compatible with the column sidecar and can be used to verify signature and proposer index. +// The returned state is guaranteed to be at the same epoch as the data column's epoch, and have the same randao mix and active +// validator indices as the data column's parent state advanced to the data column's slot. +func (dv *RODataColumnsVerifier) getVerifyingState(ctx context.Context, dataColumn blocks.RODataColumn) (state.ReadOnlyBeaconState, error) { + headRoot, err := dv.hsp.HeadRoot(ctx) + if err != nil { + return nil, err + } + parentRoot := dataColumn.ParentRoot() + dataColumnSlot := dataColumn.Slot() + dataColumnEpoch := slots.ToEpoch(dataColumnSlot) + headSlot := dv.hsp.HeadSlot() + headEpoch := slots.ToEpoch(headSlot) + + // Use head if it's the parent + if bytes.Equal(parentRoot[:], headRoot) { + // If they are in the same epoch, then we can return the head state directly + if dataColumnEpoch == headEpoch { + return dv.hsp.HeadStateReadOnly(ctx) + } + // Otherwise, we need to process the head state to the data column's slot + headState, err := dv.hsp.HeadState(ctx) + if err != nil { + return nil, err + } + return transition.ProcessSlotsUsingNextSlotCache(ctx, headState, headRoot, dataColumnSlot) + } + + // If head and data column are in the same epoch and head is compatible with the parent's target, then use head + if dataColumnEpoch == headEpoch { + headTarget, err := dv.fc.TargetRootForEpoch(bytesutil.ToBytes32(headRoot), dataColumnEpoch) + if err != nil { + return nil, err + } + parentTarget, err := dv.fc.TargetRootForEpoch(parentRoot, dataColumnEpoch) + if err != nil { + return nil, err + } + if bytes.Equal(headTarget[:], parentTarget[:]) { + return dv.hsp.HeadStateReadOnly(ctx) + } + } + + // Otherwise retrieve the parent state and advance it to the data column's slot + parentState, err := dv.sr.StateByRoot(ctx, parentRoot) + if err != nil { + return nil, err + } + parentEpoch := slots.ToEpoch(parentState.Slot()) + if dataColumnEpoch == parentEpoch { + return parentState, nil + } + return transition.ProcessSlotsUsingNextSlotCache(ctx, parentState, parentRoot[:], dataColumnSlot) +} + func (dv *RODataColumnsVerifier) SidecarParentSeen(parentSeen func([fieldparams.RootLength]byte) bool) (err error) { if ok, err := dv.results.cached(RequireSidecarParentSeen); ok { return err @@ -482,14 +546,12 @@ func (dv *RODataColumnsVerifier) SidecarProposerExpected(ctx context.Context) (e // Ensure the expensive index computation is only performed once for // concurrent requests for the same signature data. idxAny, err, _ := dv.sg.Do(concatRootSlot(parentRoot, dataColumnSlot), func() (any, error) { - // Retrieve the parent state. - parentState, err := dv.state(ctx, parentRoot) + verifyingState, err := dv.getVerifyingState(ctx, dataColumn) if err != nil { - return nil, columnErrBuilder(errors.Wrap(err, "parent state")) + return nil, columnErrBuilder(errors.Wrap(err, "verifying state")) } - // Compute the proposer index. - idx, err = dv.pc.ComputeProposer(ctx, parentRoot, dataColumnSlot, parentState) + idx, err = helpers.BeaconProposerIndexAtSlot(ctx, verifyingState, dataColumnSlot) if err != nil { return nil, columnErrBuilder(errors.Wrap(err, "compute proposer")) } @@ -514,25 +576,6 @@ func (dv *RODataColumnsVerifier) SidecarProposerExpected(ctx context.Context) (e return nil } -// state retrieves the state of the corresponding root from the cache if possible, else retrieves it from the state by rooter. -func (dv *RODataColumnsVerifier) state(ctx context.Context, root [fieldparams.RootLength]byte) (state.BeaconState, error) { - // If the parent root is already in the cache, return it. - if st, ok := dv.stateByRoot[root]; ok { - return st, nil - } - - // Retrieve the parent state from the state by rooter. - st, err := dv.sr.StateByRoot(ctx, root) - if err != nil { - return nil, errors.Wrap(err, "state by root") - } - - // Store the parent state in the cache. - dv.stateByRoot[root] = st - - return st, nil -} - func columnToSignatureData(d blocks.RODataColumn) signatureData { return signatureData{ Root: d.BlockRoot(), diff --git a/beacon-chain/verification/data_column_test.go b/beacon-chain/verification/data_column_test.go index f75342c74f..8c40887729 100644 --- a/beacon-chain/verification/data_column_test.go +++ b/beacon-chain/verification/data_column_test.go @@ -1,7 +1,6 @@ package verification import ( - "context" "reflect" "sync" "testing" @@ -11,7 +10,6 @@ import ( "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" forkchoicetypes "github.com/OffchainLabs/prysm/v7/beacon-chain/forkchoice/types" "github.com/OffchainLabs/prysm/v7/beacon-chain/startup" - "github.com/OffchainLabs/prysm/v7/beacon-chain/state" fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" "github.com/OffchainLabs/prysm/v7/config/params" "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" @@ -330,7 +328,7 @@ func TestValidProposerSignature(t *testing.T) { svcbError: nil, vscbShouldError: false, vscbError: nil, - stateByRooter: sbrForValOverride(firstColumn.ProposerIndex(), validator), + stateByRooter: sbrForValOverrideWithT(t, firstColumn.ProposerIndex(), validator), isError: false, }, { @@ -348,7 +346,7 @@ func TestValidProposerSignature(t *testing.T) { svcbError: nil, vscbShouldError: false, vscbError: errors.New("signature, not so good!"), - stateByRooter: sbrForValOverride(firstColumn.ProposerIndex(), validator), + stateByRooter: sbrForValOverrideWithT(t, firstColumn.ProposerIndex(), validator), isError: true, }, } @@ -378,8 +376,12 @@ func TestValidProposerSignature(t *testing.T) { initializer := Initializer{ shared: &sharedResources{ - sc: signatureCache, - sr: tc.stateByRooter, + sc: signatureCache, + sr: tc.stateByRooter, + hsp: &mockHeadStateProvider{}, + fc: &mockForkchoicer{ + TargetRootForEpochCB: fcReturnsTargetRoot([fieldparams.RootLength]byte{}), + }, }, } @@ -796,20 +798,7 @@ func TestDataColumnsSidecarProposerExpected(t *testing.T) { parentRoot := [fieldparams.RootLength]byte{} columns := GenerateTestDataColumns(t, parentRoot, columnSlot, blobCount) firstColumn := columns[0] - - newColumns := GenerateTestDataColumns(t, parentRoot, 2*params.BeaconConfig().SlotsPerEpoch, blobCount) - firstNewColumn := newColumns[0] - - validator := ðpb.Validator{} - - commonComputeProposerCB := func(_ context.Context, root [fieldparams.RootLength]byte, slot primitives.Slot, _ state.BeaconState) (primitives.ValidatorIndex, error) { - require.Equal(t, firstColumn.ParentRoot(), root) - require.Equal(t, firstColumn.Slot(), slot) - return firstColumn.ProposerIndex(), nil - } - ctx := t.Context() - testCases := []struct { name string stateByRooter StateByRooter @@ -841,66 +830,7 @@ func TestDataColumnsSidecarProposerExpected(t *testing.T) { ProposerCB: pcReturnsNotFound(), }, columns: columns, - error: "state by root", - }, - { - name: "Not cached, proposer matches", - stateByRooter: sbrForValOverride(firstColumn.ProposerIndex(), validator), - proposerCache: &mockProposerCache{ - ProposerCB: pcReturnsNotFound(), - ComputeProposerCB: commonComputeProposerCB, - }, - columns: columns, - }, - { - name: "Not cached, proposer matches", - stateByRooter: sbrForValOverride(firstColumn.ProposerIndex(), validator), - proposerCache: &mockProposerCache{ - ProposerCB: pcReturnsNotFound(), - ComputeProposerCB: commonComputeProposerCB, - }, - columns: columns, - }, - { - name: "Not cached, proposer matches for next epoch", - stateByRooter: sbrForValOverride(firstNewColumn.ProposerIndex(), validator), - proposerCache: &mockProposerCache{ - ProposerCB: pcReturnsNotFound(), - ComputeProposerCB: func(_ context.Context, root [32]byte, slot primitives.Slot, _ state.BeaconState) (primitives.ValidatorIndex, error) { - require.Equal(t, firstNewColumn.ParentRoot(), root) - require.Equal(t, firstNewColumn.Slot(), slot) - return firstColumn.ProposerIndex(), nil - }, - }, - columns: newColumns, - }, - { - name: "Not cached, proposer does not match", - stateByRooter: sbrForValOverride(firstColumn.ProposerIndex(), validator), - proposerCache: &mockProposerCache{ - ProposerCB: pcReturnsNotFound(), - ComputeProposerCB: func(_ context.Context, root [32]byte, slot primitives.Slot, _ state.BeaconState) (primitives.ValidatorIndex, error) { - require.Equal(t, firstColumn.ParentRoot(), root) - require.Equal(t, firstColumn.Slot(), slot) - return firstColumn.ProposerIndex() + 1, nil - }, - }, - columns: columns, - error: errSidecarUnexpectedProposer.Error(), - }, - { - name: "Not cached, ComputeProposer fails", - stateByRooter: sbrForValOverride(firstColumn.ProposerIndex(), validator), - proposerCache: &mockProposerCache{ - ProposerCB: pcReturnsNotFound(), - ComputeProposerCB: func(_ context.Context, root [32]byte, slot primitives.Slot, _ state.BeaconState) (primitives.ValidatorIndex, error) { - require.Equal(t, firstColumn.ParentRoot(), root) - require.Equal(t, firstColumn.Slot(), slot) - return 0, errors.New("ComputeProposer failed") - }, - }, - columns: columns, - error: "compute proposer", + error: "verifying state", }, } @@ -908,8 +838,9 @@ func TestDataColumnsSidecarProposerExpected(t *testing.T) { t.Run(tc.name, func(t *testing.T) { initializer := Initializer{ shared: &sharedResources{ - sr: tc.stateByRooter, - pc: tc.proposerCache, + sr: tc.stateByRooter, + pc: tc.proposerCache, + hsp: &mockHeadStateProvider{}, fc: &mockForkchoicer{ TargetRootForEpochCB: fcReturnsTargetRoot([fieldparams.RootLength]byte{}), }, diff --git a/beacon-chain/verification/initializer.go b/beacon-chain/verification/initializer.go index 14aba8b1ad..bb1de673f8 100644 --- a/beacon-chain/verification/initializer.go +++ b/beacon-chain/verification/initializer.go @@ -33,6 +33,16 @@ type StateByRooter interface { StateByRoot(ctx context.Context, blockRoot [32]byte) (state.BeaconState, error) } +// HeadStateProvider describes a type that can provide access to the current head state and related methods. +// This interface matches blockchain.HeadFetcher but is defined here to avoid import cycles +// (blockchain package imports verification package). +type HeadStateProvider interface { + HeadRoot(ctx context.Context) ([]byte, error) + HeadSlot() primitives.Slot + HeadState(ctx context.Context) (state.BeaconState, error) + HeadStateReadOnly(ctx context.Context) (state.ReadOnlyBeaconState, error) +} + // sharedResources provides access to resources that are required by different verification types. // for example, sidecar verification and block verification share the block signature verification cache. type sharedResources struct { @@ -41,6 +51,7 @@ type sharedResources struct { sc signatureCache pc proposerCache sr StateByRooter + hsp HeadStateProvider ic *inclusionProofCache sg singleflight.Group } @@ -96,14 +107,15 @@ func WithForkLookup(fl forkLookup) InitializerOption { } // NewInitializerWaiter creates an InitializerWaiter which can be used to obtain an Initializer once async dependencies are ready. -func NewInitializerWaiter(cw startup.ClockWaiter, fc Forkchoicer, sr StateByRooter, opts ...InitializerOption) *InitializerWaiter { +func NewInitializerWaiter(cw startup.ClockWaiter, fc Forkchoicer, sr StateByRooter, hsp HeadStateProvider, opts ...InitializerOption) *InitializerWaiter { pc := newPropCache() // signature cache is initialized in WaitForInitializer, since we need the genesis validators root, which can be obtained from startup.Clock. shared := &sharedResources{ - fc: fc, - pc: pc, - sr: sr, - ic: newInclusionProofCache(defaultInclusionProofCacheSize), + fc: fc, + pc: pc, + sr: sr, + hsp: hsp, + ic: newInclusionProofCache(defaultInclusionProofCacheSize), } iw := &InitializerWaiter{cw: cw, ini: &Initializer{shared: shared}} for _, o := range opts { diff --git a/beacon-chain/verification/initializer_test.go b/beacon-chain/verification/initializer_test.go index fc63d1fe90..51560061da 100644 --- a/beacon-chain/verification/initializer_test.go +++ b/beacon-chain/verification/initializer_test.go @@ -18,7 +18,7 @@ func TestInitializerWaiter(t *testing.T) { cs := startup.NewClockSynchronizer() require.NoError(t, cs.SetClock(c)) - w := NewInitializerWaiter(cs, &mockForkchoicer{}, &mockStateByRooter{}) + w := NewInitializerWaiter(cs, &mockForkchoicer{}, &mockStateByRooter{}, &mockHeadStateProvider{}) ini, err := w.WaitForInitializer(ctx) require.NoError(t, err) csc, ok := ini.shared.sc.(*sigCache) diff --git a/changelog/potuz_use_head_datacolumn.md b/changelog/potuz_use_head_datacolumn.md new file mode 100644 index 0000000000..6b219b2329 --- /dev/null +++ b/changelog/potuz_use_head_datacolumn.md @@ -0,0 +1,3 @@ +### Changed + +- Use head state readonly when possible to validate data column sidecars. diff --git a/testing/spectest/shared/common/forkchoice/builder.go b/testing/spectest/shared/common/forkchoice/builder.go index af6e49f179..a45d74fae3 100644 --- a/testing/spectest/shared/common/forkchoice/builder.go +++ b/testing/spectest/shared/common/forkchoice/builder.go @@ -41,7 +41,7 @@ func NewBuilder(t testing.TB, initialState state.BeaconState, initialBlock inter getFork := func(targetEpoch primitives.Epoch) (*ethpb.Fork, error) { return initialState.Fork(), nil } - bvw := verification.NewInitializerWaiter(cw, fc, sg, verification.WithForkLookup(getFork)) + bvw := verification.NewInitializerWaiter(cw, fc, sg, service, verification.WithForkLookup(getFork)) return &Builder{ service: service, execMock: execMock, From de0825f880f5f7c76ded5364fe6b28142f9294a0 Mon Sep 17 00:00:00 2001 From: Bastin <43618253+Inspector-Butters@users.noreply.github.com> Date: Thu, 6 Nov 2025 23:47:35 +0100 Subject: [PATCH 088/103] fulu lc (#15995) Co-authored-by: james-prysm <90280386+james-prysm@users.noreply.github.com> --- beacon-chain/blockchain/process_block_test.go | 2 +- beacon-chain/db/kv/lightclient.go | 15 + beacon-chain/db/kv/lightclient_test.go | 6 +- beacon-chain/light-client/lightclient_test.go | 344 +++++++++++++++--- .../rpc/eth/light-client/handlers_test.go | 19 +- changelog/bastin_update-lc-for-fulu.md | 3 + testing/util/lightclient.go | 180 +++++++++ 7 files changed, 507 insertions(+), 62 deletions(-) create mode 100644 changelog/bastin_update-lc-for-fulu.md diff --git a/beacon-chain/blockchain/process_block_test.go b/beacon-chain/blockchain/process_block_test.go index 7263cd4cad..0df990b01e 100644 --- a/beacon-chain/blockchain/process_block_test.go +++ b/beacon-chain/blockchain/process_block_test.go @@ -2804,7 +2804,7 @@ func TestProcessLightClientUpdate(t *testing.T) { require.NoError(t, s.cfg.BeaconDB.SaveState(ctx, headState, [32]byte{1, 2})) require.NoError(t, s.cfg.BeaconDB.SaveHeadBlockRoot(ctx, [32]byte{1, 2})) - for testVersion := version.Altair; testVersion <= version.Electra; testVersion++ { + for _, testVersion := range version.All()[1:] { t.Run(version.String(testVersion), func(t *testing.T) { l := util.NewTestLightClient(t, testVersion) diff --git a/beacon-chain/db/kv/lightclient.go b/beacon-chain/db/kv/lightclient.go index abf9695e14..212498507a 100644 --- a/beacon-chain/db/kv/lightclient.go +++ b/beacon-chain/db/kv/lightclient.go @@ -181,6 +181,13 @@ func decodeLightClientBootstrap(enc []byte) (interfaces.LightClientBootstrap, [] } m = bootstrap syncCommitteeHash = enc[len(ElectraKey) : len(ElectraKey)+32] + case hasFuluKey(enc): + bootstrap := ðpb.LightClientBootstrapElectra{} + if err := bootstrap.UnmarshalSSZ(enc[len(fuluKey)+32:]); err != nil { + return nil, nil, errors.Wrap(err, "could not unmarshal Electra light client bootstrap") + } + m = bootstrap + syncCommitteeHash = enc[len(fuluKey) : len(fuluKey)+32] default: return nil, nil, errors.New("decoding of saved light client bootstrap is unsupported") } @@ -296,6 +303,12 @@ func decodeLightClientUpdate(enc []byte) (interfaces.LightClientUpdate, error) { return nil, errors.Wrap(err, "could not unmarshal Electra light client update") } m = update + case hasFuluKey(enc): + update := ðpb.LightClientUpdateElectra{} + if err := update.UnmarshalSSZ(enc[len(fuluKey):]); err != nil { + return nil, errors.Wrap(err, "could not unmarshal Fulu light client update") + } + m = update default: return nil, errors.New("decoding of saved light client update is unsupported") } @@ -304,6 +317,8 @@ func decodeLightClientUpdate(enc []byte) (interfaces.LightClientUpdate, error) { func keyForLightClientUpdate(v int) ([]byte, error) { switch v { + case version.Fulu: + return fuluKey, nil case version.Electra: return ElectraKey, nil case version.Deneb: diff --git a/beacon-chain/db/kv/lightclient_test.go b/beacon-chain/db/kv/lightclient_test.go index 609471e064..cec59f2cbf 100644 --- a/beacon-chain/db/kv/lightclient_test.go +++ b/beacon-chain/db/kv/lightclient_test.go @@ -215,8 +215,7 @@ func TestStore_LightClientUpdate_CanSaveRetrieve(t *testing.T) { db := setupDB(t) ctx := t.Context() - - for testVersion := version.Altair; testVersion <= version.Electra; testVersion++ { + for _, testVersion := range version.All()[1:] { t.Run(version.String(testVersion), func(t *testing.T) { update, err := createUpdate(t, testVersion) require.NoError(t, err) @@ -572,8 +571,7 @@ func TestStore_LightClientBootstrap_CanSaveRetrieve(t *testing.T) { require.NoError(t, err) require.IsNil(t, retrievedBootstrap) }) - - for testVersion := version.Altair; testVersion <= version.Electra; testVersion++ { + for _, testVersion := range version.All()[1:] { t.Run(version.String(testVersion), func(t *testing.T) { bootstrap, err := createDefaultLightClientBootstrap(primitives.Slot(uint64(params.BeaconConfig().VersionToForkEpochMap()[testVersion]) * uint64(params.BeaconConfig().SlotsPerEpoch))) require.NoError(t, err) diff --git a/beacon-chain/light-client/lightclient_test.go b/beacon-chain/light-client/lightclient_test.go index 3fc490a9b8..09255a26af 100644 --- a/beacon-chain/light-client/lightclient_test.go +++ b/beacon-chain/light-client/lightclient_test.go @@ -29,58 +29,22 @@ func TestLightClient_NewLightClientOptimisticUpdateFromBeaconState(t *testing.T) cfg.CapellaForkEpoch = 3 cfg.DenebForkEpoch = 4 cfg.ElectraForkEpoch = 5 + cfg.FuluForkEpoch = 6 params.OverrideBeaconConfig(cfg) - t.Run("Altair", func(t *testing.T) { - l := util.NewTestLightClient(t, version.Altair) + for _, testVersion := range version.All()[1:] { + t.Run(version.String(testVersion), func(t *testing.T) { + l := util.NewTestLightClient(t, testVersion) - update, err := lightClient.NewLightClientOptimisticUpdateFromBeaconState(l.Ctx, l.State, l.Block, l.AttestedState, l.AttestedBlock) - require.NoError(t, err) - require.NotNil(t, update, "update is nil") - require.Equal(t, l.Block.Block().Slot(), update.SignatureSlot(), "Signature slot is not equal") + update, err := lightClient.NewLightClientOptimisticUpdateFromBeaconState(l.Ctx, l.State, l.Block, l.AttestedState, l.AttestedBlock) + require.NoError(t, err) + require.NotNil(t, update, "update is nil") + require.Equal(t, l.Block.Block().Slot(), update.SignatureSlot(), "Signature slot is not equal") - l.CheckSyncAggregate(update.SyncAggregate()) - l.CheckAttestedHeader(update.AttestedHeader()) - }) - - t.Run("Capella", func(t *testing.T) { - l := util.NewTestLightClient(t, version.Capella) - - update, err := lightClient.NewLightClientOptimisticUpdateFromBeaconState(l.Ctx, l.State, l.Block, l.AttestedState, l.AttestedBlock) - require.NoError(t, err) - require.NotNil(t, update, "update is nil") - - require.Equal(t, l.Block.Block().Slot(), update.SignatureSlot(), "Signature slot is not equal") - - l.CheckSyncAggregate(update.SyncAggregate()) - l.CheckAttestedHeader(update.AttestedHeader()) - }) - - t.Run("Deneb", func(t *testing.T) { - l := util.NewTestLightClient(t, version.Deneb) - - update, err := lightClient.NewLightClientOptimisticUpdateFromBeaconState(l.Ctx, l.State, l.Block, l.AttestedState, l.AttestedBlock) - require.NoError(t, err) - require.NotNil(t, update, "update is nil") - - require.Equal(t, l.Block.Block().Slot(), update.SignatureSlot(), "Signature slot is not equal") - - l.CheckSyncAggregate(update.SyncAggregate()) - l.CheckAttestedHeader(update.AttestedHeader()) - }) - - t.Run("Electra", func(t *testing.T) { - l := util.NewTestLightClient(t, version.Electra) - - update, err := lightClient.NewLightClientOptimisticUpdateFromBeaconState(l.Ctx, l.State, l.Block, l.AttestedState, l.AttestedBlock) - require.NoError(t, err) - require.NotNil(t, update, "update is nil") - - require.Equal(t, l.Block.Block().Slot(), update.SignatureSlot(), "Signature slot is not equal") - - l.CheckSyncAggregate(update.SyncAggregate()) - l.CheckAttestedHeader(update.AttestedHeader()) - }) + l.CheckSyncAggregate(update.SyncAggregate()) + l.CheckAttestedHeader(update.AttestedHeader()) + }) + } } func TestLightClient_NewLightClientFinalityUpdateFromBeaconState(t *testing.T) { @@ -91,6 +55,7 @@ func TestLightClient_NewLightClientFinalityUpdateFromBeaconState(t *testing.T) { cfg.CapellaForkEpoch = 3 cfg.DenebForkEpoch = 4 cfg.ElectraForkEpoch = 5 + cfg.FuluForkEpoch = 6 params.OverrideBeaconConfig(cfg) t.Run("Altair", func(t *testing.T) { @@ -538,6 +503,157 @@ func TestLightClient_NewLightClientFinalityUpdateFromBeaconState(t *testing.T) { require.DeepSSZEqual(t, execution, updateExecution.Proto(), "Finalized Block Execution is not equal") }) }) + + t.Run("Fulu", func(t *testing.T) { + t.Run("FinalizedBlock Not Nil", func(t *testing.T) { + l := util.NewTestLightClient(t, version.Fulu) + + update, err := lightClient.NewLightClientFinalityUpdateFromBeaconState(l.Ctx, l.State, l.Block, l.AttestedState, l.AttestedBlock, l.FinalizedBlock) + require.NoError(t, err) + require.NotNil(t, update, "update is nil") + + require.Equal(t, l.Block.Block().Slot(), update.SignatureSlot(), "Signature slot is not equal") + + l.CheckSyncAggregate(update.SyncAggregate()) + l.CheckAttestedHeader(update.AttestedHeader()) + + //zeroHash := params.BeaconConfig().ZeroHash[:] + finalizedBlockHeader, err := l.FinalizedBlock.Header() + require.NoError(t, err) + require.NotNil(t, update.FinalizedHeader(), "Finalized header is nil") + updateFinalizedHeaderBeacon := update.FinalizedHeader().Beacon() + require.Equal(t, finalizedBlockHeader.Header.Slot, updateFinalizedHeaderBeacon.Slot, "Finalized header slot is not equal") + require.Equal(t, finalizedBlockHeader.Header.ProposerIndex, updateFinalizedHeaderBeacon.ProposerIndex, "Finalized header proposer index is not equal") + require.DeepSSZEqual(t, finalizedBlockHeader.Header.ParentRoot, updateFinalizedHeaderBeacon.ParentRoot, "Finalized header parent root is not equal") + require.DeepSSZEqual(t, finalizedBlockHeader.Header.StateRoot, updateFinalizedHeaderBeacon.StateRoot, "Finalized header state root is not equal") + require.DeepSSZEqual(t, finalizedBlockHeader.Header.BodyRoot, updateFinalizedHeaderBeacon.BodyRoot, "Finalized header body root is not equal") + fb, err := update.FinalityBranchElectra() + require.NoError(t, err) + proof, err := l.AttestedState.FinalizedRootProof(l.Ctx) + require.NoError(t, err) + for i, leaf := range fb { + require.DeepSSZEqual(t, proof[i], leaf[:], "Leaf is not equal") + } + + // Check Execution BlockHash + payloadInterface, err := l.FinalizedBlock.Block().Body().Execution() + require.NoError(t, err) + transactionsRoot, err := payloadInterface.TransactionsRoot() + if errors.Is(err, consensustypes.ErrUnsupportedField) { + transactions, err := payloadInterface.Transactions() + require.NoError(t, err) + transactionsRootArray, err := ssz.TransactionsRoot(transactions) + require.NoError(t, err) + transactionsRoot = transactionsRootArray[:] + } else { + require.NoError(t, err) + } + withdrawalsRoot, err := payloadInterface.WithdrawalsRoot() + if errors.Is(err, consensustypes.ErrUnsupportedField) { + withdrawals, err := payloadInterface.Withdrawals() + require.NoError(t, err) + withdrawalsRootArray, err := ssz.WithdrawalSliceRoot(withdrawals, fieldparams.MaxWithdrawalsPerPayload) + require.NoError(t, err) + withdrawalsRoot = withdrawalsRootArray[:] + } else { + require.NoError(t, err) + } + execution := &v11.ExecutionPayloadHeaderDeneb{ + ParentHash: payloadInterface.ParentHash(), + FeeRecipient: payloadInterface.FeeRecipient(), + StateRoot: payloadInterface.StateRoot(), + ReceiptsRoot: payloadInterface.ReceiptsRoot(), + LogsBloom: payloadInterface.LogsBloom(), + PrevRandao: payloadInterface.PrevRandao(), + BlockNumber: payloadInterface.BlockNumber(), + GasLimit: payloadInterface.GasLimit(), + GasUsed: payloadInterface.GasUsed(), + Timestamp: payloadInterface.Timestamp(), + ExtraData: payloadInterface.ExtraData(), + BaseFeePerGas: payloadInterface.BaseFeePerGas(), + BlockHash: payloadInterface.BlockHash(), + TransactionsRoot: transactionsRoot, + WithdrawalsRoot: withdrawalsRoot, + } + updateExecution, err := update.FinalizedHeader().Execution() + require.NoError(t, err) + require.DeepSSZEqual(t, execution, updateExecution.Proto(), "Finalized Block Execution is not equal") + }) + + t.Run("FinalizedBlock In Previous Fork", func(t *testing.T) { + l := util.NewTestLightClient(t, version.Fulu, util.WithFinalizedCheckpointInPrevFork()) + + update, err := lightClient.NewLightClientFinalityUpdateFromBeaconState(l.Ctx, l.State, l.Block, l.AttestedState, l.AttestedBlock, l.FinalizedBlock) + require.NoError(t, err) + require.NotNil(t, update, "update is nil") + + require.Equal(t, l.Block.Block().Slot(), update.SignatureSlot(), "Signature slot is not equal") + + l.CheckSyncAggregate(update.SyncAggregate()) + l.CheckAttestedHeader(update.AttestedHeader()) + + finalizedBlockHeader, err := l.FinalizedBlock.Header() + require.NoError(t, err) + require.NotNil(t, update.FinalizedHeader(), "Finalized header is nil") + updateFinalizedHeaderBeacon := update.FinalizedHeader().Beacon() + require.Equal(t, finalizedBlockHeader.Header.Slot, updateFinalizedHeaderBeacon.Slot, "Finalized header slot is not equal") + require.Equal(t, finalizedBlockHeader.Header.ProposerIndex, updateFinalizedHeaderBeacon.ProposerIndex, "Finalized header proposer index is not equal") + require.DeepSSZEqual(t, finalizedBlockHeader.Header.ParentRoot, updateFinalizedHeaderBeacon.ParentRoot, "Finalized header parent root is not equal") + require.DeepSSZEqual(t, finalizedBlockHeader.Header.StateRoot, updateFinalizedHeaderBeacon.StateRoot, "Finalized header state root is not equal") + require.DeepSSZEqual(t, finalizedBlockHeader.Header.BodyRoot, updateFinalizedHeaderBeacon.BodyRoot, "Finalized header body root is not equal") + fb, err := update.FinalityBranchElectra() + require.NoError(t, err) + proof, err := l.AttestedState.FinalizedRootProof(l.Ctx) + require.NoError(t, err) + for i, leaf := range fb { + require.DeepSSZEqual(t, proof[i], leaf[:], "Leaf is not equal") + } + + // Check Execution BlockHash + payloadInterface, err := l.FinalizedBlock.Block().Body().Execution() + require.NoError(t, err) + transactionsRoot, err := payloadInterface.TransactionsRoot() + if errors.Is(err, consensustypes.ErrUnsupportedField) { + transactions, err := payloadInterface.Transactions() + require.NoError(t, err) + transactionsRootArray, err := ssz.TransactionsRoot(transactions) + require.NoError(t, err) + transactionsRoot = transactionsRootArray[:] + } else { + require.NoError(t, err) + } + withdrawalsRoot, err := payloadInterface.WithdrawalsRoot() + if errors.Is(err, consensustypes.ErrUnsupportedField) { + withdrawals, err := payloadInterface.Withdrawals() + require.NoError(t, err) + withdrawalsRootArray, err := ssz.WithdrawalSliceRoot(withdrawals, fieldparams.MaxWithdrawalsPerPayload) + require.NoError(t, err) + withdrawalsRoot = withdrawalsRootArray[:] + } else { + require.NoError(t, err) + } + execution := &v11.ExecutionPayloadHeaderDeneb{ + ParentHash: payloadInterface.ParentHash(), + FeeRecipient: payloadInterface.FeeRecipient(), + StateRoot: payloadInterface.StateRoot(), + ReceiptsRoot: payloadInterface.ReceiptsRoot(), + LogsBloom: payloadInterface.LogsBloom(), + PrevRandao: payloadInterface.PrevRandao(), + BlockNumber: payloadInterface.BlockNumber(), + GasLimit: payloadInterface.GasLimit(), + GasUsed: payloadInterface.GasUsed(), + Timestamp: payloadInterface.Timestamp(), + ExtraData: payloadInterface.ExtraData(), + BaseFeePerGas: payloadInterface.BaseFeePerGas(), + BlockHash: payloadInterface.BlockHash(), + TransactionsRoot: transactionsRoot, + WithdrawalsRoot: withdrawalsRoot, + } + updateExecution, err := update.FinalizedHeader().Execution() + require.NoError(t, err) + require.DeepSSZEqual(t, execution, updateExecution.Proto(), "Finalized Block Execution is not equal") + }) + }) } func TestLightClient_BlockToLightClientHeader(t *testing.T) { @@ -983,6 +1099,138 @@ func TestLightClient_BlockToLightClientHeader(t *testing.T) { }) }) + t.Run("Fulu", func(t *testing.T) { + t.Run("Non-Blinded Beacon Block", func(t *testing.T) { + l := util.NewTestLightClient(t, version.Fulu) + + header, err := lightClient.BlockToLightClientHeader(l.Ctx, version.Fulu, l.Block) + require.NoError(t, err) + require.NotNil(t, header, "header is nil") + + parentRoot := l.Block.Block().ParentRoot() + stateRoot := l.Block.Block().StateRoot() + bodyRoot, err := l.Block.Block().Body().HashTreeRoot() + require.NoError(t, err) + + payload, err := l.Block.Block().Body().Execution() + require.NoError(t, err) + + transactionsRoot, err := lightClient.ComputeTransactionsRoot(payload) + require.NoError(t, err) + + withdrawalsRoot, err := lightClient.ComputeWithdrawalsRoot(payload) + require.NoError(t, err) + + blobGasUsed, err := payload.BlobGasUsed() + require.NoError(t, err) + + excessBlobGas, err := payload.ExcessBlobGas() + require.NoError(t, err) + + executionHeader := &v11.ExecutionPayloadHeaderDeneb{ + ParentHash: payload.ParentHash(), + FeeRecipient: payload.FeeRecipient(), + StateRoot: payload.StateRoot(), + ReceiptsRoot: payload.ReceiptsRoot(), + LogsBloom: payload.LogsBloom(), + PrevRandao: payload.PrevRandao(), + BlockNumber: payload.BlockNumber(), + GasLimit: payload.GasLimit(), + GasUsed: payload.GasUsed(), + Timestamp: payload.Timestamp(), + ExtraData: payload.ExtraData(), + BaseFeePerGas: payload.BaseFeePerGas(), + BlockHash: payload.BlockHash(), + TransactionsRoot: transactionsRoot, + WithdrawalsRoot: withdrawalsRoot, + BlobGasUsed: blobGasUsed, + ExcessBlobGas: excessBlobGas, + } + + executionPayloadProof, err := blocks.PayloadProof(l.Ctx, l.Block.Block()) + require.NoError(t, err) + + require.Equal(t, l.Block.Block().Slot(), header.Beacon().Slot, "Slot is not equal") + require.Equal(t, l.Block.Block().ProposerIndex(), header.Beacon().ProposerIndex, "Proposer index is not equal") + require.DeepSSZEqual(t, parentRoot[:], header.Beacon().ParentRoot, "Parent root is not equal") + require.DeepSSZEqual(t, stateRoot[:], header.Beacon().StateRoot, "State root is not equal") + require.DeepSSZEqual(t, bodyRoot[:], header.Beacon().BodyRoot, "Body root is not equal") + + headerExecution, err := header.Execution() + require.NoError(t, err) + require.DeepSSZEqual(t, executionHeader, headerExecution.Proto(), "Execution headers are not equal") + + headerExecutionBranch, err := header.ExecutionBranch() + require.NoError(t, err) + require.DeepSSZEqual(t, executionPayloadProof, convertArrayToSlice(headerExecutionBranch), "Execution payload proofs are not equal") + }) + + t.Run("Blinded Beacon Block", func(t *testing.T) { + l := util.NewTestLightClient(t, version.Fulu, util.WithBlinded()) + + header, err := lightClient.BlockToLightClientHeader(l.Ctx, version.Fulu, l.Block) + require.NoError(t, err) + require.NotNil(t, header, "header is nil") + + parentRoot := l.Block.Block().ParentRoot() + stateRoot := l.Block.Block().StateRoot() + bodyRoot, err := l.Block.Block().Body().HashTreeRoot() + require.NoError(t, err) + + payload, err := l.Block.Block().Body().Execution() + require.NoError(t, err) + + transactionsRoot, err := payload.TransactionsRoot() + require.NoError(t, err) + + withdrawalsRoot, err := payload.WithdrawalsRoot() + require.NoError(t, err) + + blobGasUsed, err := payload.BlobGasUsed() + require.NoError(t, err) + + excessBlobGas, err := payload.ExcessBlobGas() + require.NoError(t, err) + + executionHeader := &v11.ExecutionPayloadHeaderDeneb{ + ParentHash: payload.ParentHash(), + FeeRecipient: payload.FeeRecipient(), + StateRoot: payload.StateRoot(), + ReceiptsRoot: payload.ReceiptsRoot(), + LogsBloom: payload.LogsBloom(), + PrevRandao: payload.PrevRandao(), + BlockNumber: payload.BlockNumber(), + GasLimit: payload.GasLimit(), + GasUsed: payload.GasUsed(), + Timestamp: payload.Timestamp(), + ExtraData: payload.ExtraData(), + BaseFeePerGas: payload.BaseFeePerGas(), + BlockHash: payload.BlockHash(), + TransactionsRoot: transactionsRoot, + WithdrawalsRoot: withdrawalsRoot, + BlobGasUsed: blobGasUsed, + ExcessBlobGas: excessBlobGas, + } + + executionPayloadProof, err := blocks.PayloadProof(l.Ctx, l.Block.Block()) + require.NoError(t, err) + + require.Equal(t, l.Block.Block().Slot(), header.Beacon().Slot, "Slot is not equal") + require.Equal(t, l.Block.Block().ProposerIndex(), header.Beacon().ProposerIndex, "Proposer index is not equal") + require.DeepSSZEqual(t, parentRoot[:], header.Beacon().ParentRoot, "Parent root is not equal") + require.DeepSSZEqual(t, stateRoot[:], header.Beacon().StateRoot, "State root is not equal") + require.DeepSSZEqual(t, bodyRoot[:], header.Beacon().BodyRoot, "Body root is not equal") + + headerExecution, err := header.Execution() + require.NoError(t, err) + require.DeepSSZEqual(t, executionHeader, headerExecution.Proto(), "Execution headers are not equal") + + headerExecutionBranch, err := header.ExecutionBranch() + require.NoError(t, err) + require.DeepSSZEqual(t, executionPayloadProof, convertArrayToSlice(headerExecutionBranch), "Execution payload proofs are not equal") + }) + }) + t.Run("Capella fork with Altair block", func(t *testing.T) { l := util.NewTestLightClient(t, version.Altair) diff --git a/beacon-chain/rpc/eth/light-client/handlers_test.go b/beacon-chain/rpc/eth/light-client/handlers_test.go index 3486c49220..6e87cfb00f 100644 --- a/beacon-chain/rpc/eth/light-client/handlers_test.go +++ b/beacon-chain/rpc/eth/light-client/handlers_test.go @@ -46,7 +46,7 @@ func TestLightClientHandler_GetLightClientBootstrap(t *testing.T) { cfg.FuluForkEpoch = 5 params.OverrideBeaconConfig(cfg) - for testVersion := version.Altair; testVersion <= version.Electra; testVersion++ { + for _, testVersion := range version.All()[1:] { t.Run(version.String(testVersion), func(t *testing.T) { l := util.NewTestLightClient(t, testVersion) @@ -131,7 +131,7 @@ func TestLightClientHandler_GetLightClientBootstrap(t *testing.T) { resp = &pb.LightClientBootstrapCapella{} case version.Deneb: resp = &pb.LightClientBootstrapDeneb{} - case version.Electra: + case version.Electra, version.Fulu: resp = &pb.LightClientBootstrapElectra{} default: t.Fatalf("Unsupported version %s", version.String(testVersion)) @@ -173,10 +173,11 @@ func TestLightClientHandler_GetLightClientByRange(t *testing.T) { config.CapellaForkEpoch = 2 config.DenebForkEpoch = 3 config.ElectraForkEpoch = 4 + config.FuluForkEpoch = 5 params.OverrideBeaconConfig(config) t.Run("can save retrieve", func(t *testing.T) { - for testVersion := version.Altair; testVersion <= version.Electra; testVersion++ { + for _, testVersion := range version.All()[1:] { t.Run(version.String(testVersion), func(t *testing.T) { slot := primitives.Slot(params.BeaconConfig().VersionToForkEpochMap()[testVersion] * primitives.Epoch(config.SlotsPerEpoch)).Add(1) @@ -252,7 +253,7 @@ func TestLightClientHandler_GetLightClientByRange(t *testing.T) { resp = &pb.LightClientUpdateCapella{} case version.Deneb: resp = &pb.LightClientUpdateDeneb{} - case version.Electra: + case version.Electra, version.Fulu: resp = &pb.LightClientUpdateElectra{} default: t.Fatalf("Unsupported version %s", version.String(testVersion)) @@ -313,7 +314,7 @@ func TestLightClientHandler_GetLightClientByRange(t *testing.T) { resp = &pb.LightClientUpdateCapella{} case version.Deneb: resp = &pb.LightClientUpdateDeneb{} - case version.Electra: + case version.Electra, version.Fulu: resp = &pb.LightClientUpdateElectra{} default: t.Fatalf("Unsupported version %s", version.String(testVersion)) @@ -730,7 +731,7 @@ func TestLightClientHandler_GetLightClientFinalityUpdate(t *testing.T) { require.Equal(t, http.StatusNotFound, writer.Code) }) - for testVersion := 1; testVersion < 6; testVersion++ { + for _, testVersion := range version.All()[1:] { t.Run(version.String(testVersion), func(t *testing.T) { ctx := t.Context() @@ -793,7 +794,7 @@ func TestLightClientHandler_GetLightClientFinalityUpdate(t *testing.T) { resp = &pb.LightClientFinalityUpdateCapella{} case version.Deneb: resp = &pb.LightClientFinalityUpdateDeneb{} - case version.Electra: + case version.Electra, version.Fulu: resp = &pb.LightClientFinalityUpdateElectra{} default: t.Fatalf("Unsupported version %s", version.String(testVersion)) @@ -825,7 +826,7 @@ func TestLightClientHandler_GetLightClientOptimisticUpdate(t *testing.T) { require.Equal(t, http.StatusNotFound, writer.Code) }) - for testVersion := 1; testVersion < 6; testVersion++ { + for _, testVersion := range version.All()[1:] { t.Run(version.String(testVersion), func(t *testing.T) { ctx := t.Context() l := util.NewTestLightClient(t, testVersion) @@ -886,7 +887,7 @@ func TestLightClientHandler_GetLightClientOptimisticUpdate(t *testing.T) { resp = &pb.LightClientOptimisticUpdateCapella{} case version.Deneb: resp = &pb.LightClientOptimisticUpdateDeneb{} - case version.Electra: + case version.Electra, version.Fulu: resp = &pb.LightClientOptimisticUpdateDeneb{} default: t.Fatalf("Unsupported version %s", version.String(testVersion)) diff --git a/changelog/bastin_update-lc-for-fulu.md b/changelog/bastin_update-lc-for-fulu.md new file mode 100644 index 0000000000..234fc9fda5 --- /dev/null +++ b/changelog/bastin_update-lc-for-fulu.md @@ -0,0 +1,3 @@ +### Added + +- add fulu support to light client processing. \ No newline at end of file diff --git a/testing/util/lightclient.go b/testing/util/lightclient.go index ddc7789bd9..58bbfce572 100644 --- a/testing/util/lightclient.go +++ b/testing/util/lightclient.go @@ -62,6 +62,8 @@ func NewTestLightClient(t *testing.T, forkVersion int, options ...LightClientOpt return l.setupTestDeneb() case version.Electra: return l.setupTestElectra() + case version.Fulu: + return l.setupTestFulu() default: l.T.Fatalf("Unsupported version %s", version.String(l.version)) return nil @@ -955,6 +957,184 @@ func (l *TestLightClient) setupTestElectra() *TestLightClient { return l } +func (l *TestLightClient) setupTestFulu() *TestLightClient { + ctx := context.Background() + + attestedSlot := primitives.Slot(uint64(params.BeaconConfig().FuluForkEpoch) * uint64(params.BeaconConfig().SlotsPerEpoch)).Add(1) + if l.increaseAttestedSlotBy > 0 { + attestedSlot = attestedSlot.Add(l.increaseAttestedSlotBy) + } + + signatureSlot := attestedSlot.Add(1) + if l.increaseSignatureSlotBy > 0 { + signatureSlot = signatureSlot.Add(l.increaseSignatureSlotBy) + } + + // Attested State & Block + attestedState, err := NewBeaconStateFulu() + require.NoError(l.T, err) + require.NoError(l.T, attestedState.SetSlot(attestedSlot)) + + var signedFinalizedBlock interfaces.SignedBeaconBlock + var finalizedState state.BeaconState + // Finalized checkpoint + if !l.noFinalizedCheckpoint { + var finalizedSlot primitives.Slot + + if l.finalizedCheckpointInPrevFork { + finalizedSlot = primitives.Slot(uint64(params.BeaconConfig().ElectraForkEpoch) * uint64(params.BeaconConfig().SlotsPerEpoch)) + if l.increaseFinalizedSlotBy > 0 { + finalizedSlot = finalizedSlot.Add(l.increaseFinalizedSlotBy) + } + + finalizedState, err = NewBeaconStateElectra() + require.NoError(l.T, err) + require.NoError(l.T, finalizedState.SetSlot(finalizedSlot)) + + finalizedBlock := NewBeaconBlockElectra() + require.NoError(l.T, err) + finalizedBlock.Block.Slot = finalizedSlot + signedFinalizedBlock, err = blocks.NewSignedBeaconBlock(finalizedBlock) + require.NoError(l.T, err) + finalizedHeader, err := signedFinalizedBlock.Header() + require.NoError(l.T, err) + require.NoError(l.T, finalizedState.SetLatestBlockHeader(finalizedHeader.Header)) + finalizedStateRoot, err := finalizedState.HashTreeRoot(ctx) + require.NoError(l.T, err) + finalizedBlock.Block.StateRoot = finalizedStateRoot[:] + signedFinalizedBlock, err = blocks.NewSignedBeaconBlock(finalizedBlock) + require.NoError(l.T, err) + } else { + finalizedSlot = primitives.Slot(uint64(params.BeaconConfig().FuluForkEpoch) * uint64(params.BeaconConfig().SlotsPerEpoch)) + if l.increaseFinalizedSlotBy > 0 { + finalizedSlot = finalizedSlot.Add(l.increaseFinalizedSlotBy) + } + + finalizedState, err = NewBeaconStateFulu() + require.NoError(l.T, err) + require.NoError(l.T, finalizedState.SetSlot(finalizedSlot)) + + finalizedBlock := NewBeaconBlockFulu() + require.NoError(l.T, err) + finalizedBlock.Block.Slot = finalizedSlot + signedFinalizedBlock, err = blocks.NewSignedBeaconBlock(finalizedBlock) + require.NoError(l.T, err) + finalizedHeader, err := signedFinalizedBlock.Header() + require.NoError(l.T, err) + require.NoError(l.T, finalizedState.SetLatestBlockHeader(finalizedHeader.Header)) + finalizedStateRoot, err := finalizedState.HashTreeRoot(ctx) + require.NoError(l.T, err) + finalizedBlock.Block.StateRoot = finalizedStateRoot[:] + signedFinalizedBlock, err = blocks.NewSignedBeaconBlock(finalizedBlock) + require.NoError(l.T, err) + } + + // Set the finalized checkpoint + finalizedBlockRoot, err := signedFinalizedBlock.Block().HashTreeRoot() + require.NoError(l.T, err) + finalizedCheckpoint := ðpb.Checkpoint{ + Epoch: slots.ToEpoch(finalizedSlot), + Root: finalizedBlockRoot[:], + } + require.NoError(l.T, attestedState.SetFinalizedCheckpoint(finalizedCheckpoint)) + } + + // Attested Block + attestedBlock := NewBeaconBlockFulu() + attestedBlock.Block.Slot = attestedSlot + attestedBlock.Block.ParentRoot = l.attestedParentRoot[:] + signedAttestedBlock, err := blocks.NewSignedBeaconBlock(attestedBlock) + require.NoError(l.T, err) + attestedBlockHeader, err := signedAttestedBlock.Header() + require.NoError(l.T, err) + require.NoError(l.T, attestedState.SetLatestBlockHeader(attestedBlockHeader.Header)) + attestedStateRoot, err := attestedState.HashTreeRoot(ctx) + require.NoError(l.T, err) + attestedBlock.Block.StateRoot = attestedStateRoot[:] + signedAttestedBlock, err = blocks.NewSignedBeaconBlock(attestedBlock) + require.NoError(l.T, err) + + // Signature State & Block + signatureState, err := NewBeaconStateFulu() + require.NoError(l.T, err) + require.NoError(l.T, signatureState.SetSlot(signatureSlot)) + + var signedSignatureBlock interfaces.SignedBeaconBlock + if l.blinded { + signatureBlock := NewBlindedBeaconBlockFulu() + signatureBlock.Message.Slot = signatureSlot + attestedBlockRoot, err := signedAttestedBlock.Block().HashTreeRoot() + require.NoError(l.T, err) + signatureBlock.Message.ParentRoot = attestedBlockRoot[:] + + var trueBitNum uint64 + if l.supermajority { + trueBitNum = uint64((float64(params.BeaconConfig().SyncCommitteeSize) * 2.0 / 3.0) + 1 + float64(l.increaseActiveParticipantsBy)) + } else { + trueBitNum = params.BeaconConfig().MinSyncCommitteeParticipants + } + for i := uint64(0); i < trueBitNum; i++ { + signatureBlock.Message.Body.SyncAggregate.SyncCommitteeBits.SetBitAt(i, true) + } + + signedSignatureBlock, err = blocks.NewSignedBeaconBlock(signatureBlock) + require.NoError(l.T, err) + + signatureBlockHeader, err := signedSignatureBlock.Header() + require.NoError(l.T, err) + + err = signatureState.SetLatestBlockHeader(signatureBlockHeader.Header) + require.NoError(l.T, err) + stateRoot, err := signatureState.HashTreeRoot(ctx) + require.NoError(l.T, err) + + signatureBlock.Message.StateRoot = stateRoot[:] + signedSignatureBlock, err = blocks.NewSignedBeaconBlock(signatureBlock) + require.NoError(l.T, err) + } else { + signatureBlock := NewBeaconBlockFulu() + signatureBlock.Block.Slot = signatureSlot + attestedBlockRoot, err := signedAttestedBlock.Block().HashTreeRoot() + require.NoError(l.T, err) + signatureBlock.Block.ParentRoot = attestedBlockRoot[:] + + var trueBitNum uint64 + if l.supermajority { + trueBitNum = uint64((float64(params.BeaconConfig().SyncCommitteeSize) * 2.0 / 3.0) + 1 + float64(l.increaseActiveParticipantsBy)) + } else { + trueBitNum = params.BeaconConfig().MinSyncCommitteeParticipants + } + for i := uint64(0); i < trueBitNum; i++ { + signatureBlock.Block.Body.SyncAggregate.SyncCommitteeBits.SetBitAt(i, true) + } + + signedSignatureBlock, err = blocks.NewSignedBeaconBlock(signatureBlock) + require.NoError(l.T, err) + + signatureBlockHeader, err := signedSignatureBlock.Header() + require.NoError(l.T, err) + + err = signatureState.SetLatestBlockHeader(signatureBlockHeader.Header) + require.NoError(l.T, err) + signatureStateRoot, err := signatureState.HashTreeRoot(ctx) + require.NoError(l.T, err) + + signatureBlock.Block.StateRoot = signatureStateRoot[:] + signedSignatureBlock, err = blocks.NewSignedBeaconBlock(signatureBlock) + require.NoError(l.T, err) + } + + l.State = signatureState + l.AttestedState = attestedState + l.AttestedBlock = signedAttestedBlock + l.Block = signedSignatureBlock + l.Ctx = ctx + l.FinalizedBlock = signedFinalizedBlock + l.FinalizedState = finalizedState + + return l +} + func (l *TestLightClient) CheckAttestedHeader(header interfaces.LightClientHeader) { updateAttestedHeaderBeacon := header.Beacon() testAttestedHeader, err := l.AttestedBlock.Header() From 619334115a27bd975d76678505b3a7337254157a Mon Sep 17 00:00:00 2001 From: Daniel Briskin Date: Sat, 8 Nov 2025 06:49:03 +0900 Subject: [PATCH 089/103] update interop genesis for fulu (#15948) --- changelog/dbxe_fulu-interop.md | 2 ++ cmd/prysmctl/testnet/generate_genesis.go | 4 ++++ runtime/interop/genesis.go | 20 ++++++++++++++++++++ 3 files changed, 26 insertions(+) create mode 100644 changelog/dbxe_fulu-interop.md diff --git a/changelog/dbxe_fulu-interop.md b/changelog/dbxe_fulu-interop.md new file mode 100644 index 0000000000..58ed4cd353 --- /dev/null +++ b/changelog/dbxe_fulu-interop.md @@ -0,0 +1,2 @@ +### Ignored +- Add osaka fork timestamp derivation to interop genesis diff --git a/cmd/prysmctl/testnet/generate_genesis.go b/cmd/prysmctl/testnet/generate_genesis.go index eb081cb4f5..d491f83018 100644 --- a/cmd/prysmctl/testnet/generate_genesis.go +++ b/cmd/prysmctl/testnet/generate_genesis.go @@ -264,6 +264,7 @@ func generateGenesis(ctx context.Context) (state.BeaconState, error) { gen.Config.ShanghaiTime = interop.GethShanghaiTime(genesis, params.BeaconConfig()) gen.Config.CancunTime = interop.GethCancunTime(genesis, params.BeaconConfig()) gen.Config.PragueTime = interop.GethPragueTime(genesis, params.BeaconConfig()) + gen.Config.OsakaTime = interop.GethOsakaTime(genesis, params.BeaconConfig()) fields := logrus.Fields{} if gen.Config.ShanghaiTime != nil { @@ -275,6 +276,9 @@ func generateGenesis(ctx context.Context) (state.BeaconState, error) { if gen.Config.PragueTime != nil { fields["prague"] = fmt.Sprintf("%d", *gen.Config.PragueTime) } + if gen.Config.OsakaTime != nil { + fields["osaka"] = fmt.Sprintf("%d", *gen.Config.OsakaTime) + } log.WithFields(fields).Info("Setting fork geth times") if v > version.Altair { // set ttd to zero so EL goes post-merge immediately diff --git a/runtime/interop/genesis.go b/runtime/interop/genesis.go index 70a8a1a4ff..94cf49418b 100644 --- a/runtime/interop/genesis.go +++ b/runtime/interop/genesis.go @@ -113,6 +113,21 @@ func GethPragueTime(genesisTime time.Time, cfg *clparams.BeaconChainConfig) *uin return pragueTime } +// GethOsakaTime calculates the absolute time of the osaka (aka fulu) fork block +// by adding the relative time of the capella the fork epoch to the given genesis timestamp. +func GethOsakaTime(genesisTime time.Time, cfg *clparams.BeaconChainConfig) *uint64 { + var osakaTime *uint64 + if cfg.FuluForkEpoch != math.MaxUint64 { + startSlot, err := slots.EpochStart(cfg.FuluForkEpoch) + if err == nil { + startTime := slots.UnsafeStartTime(genesisTime, startSlot) + newTime := uint64(startTime.Unix()) + osakaTime = &newTime + } + } + return osakaTime +} + // GethTestnetGenesis creates a genesis.json for eth1 clients with a set of defaults suitable for ephemeral testnets, // like in an e2e test. The parameters are minimal but the full value is returned unmarshaled so that it can be // customized as desired. @@ -130,6 +145,10 @@ func GethTestnetGenesis(genesis time.Time, cfg *clparams.BeaconChainConfig) *cor if cfg.ElectraForkEpoch == 0 { pragueTime = &genesisTime } + osakaTime := GethOsakaTime(genesis, cfg) + if cfg.FuluForkEpoch == 0 { + osakaTime = &genesisTime + } cc := ¶ms.ChainConfig{ ChainID: big.NewInt(defaultTestChainId), HomesteadBlock: bigz, @@ -151,6 +170,7 @@ func GethTestnetGenesis(genesis time.Time, cfg *clparams.BeaconChainConfig) *cor ShanghaiTime: shanghaiTime, CancunTime: cancunTime, PragueTime: pragueTime, + OsakaTime: osakaTime, DepositContractAddress: common.HexToAddress(cfg.DepositContractAddress), BlobScheduleConfig: ¶ms.BlobScheduleConfig{ Cancun: ¶ms.BlobConfig{ From 02fb1534e1dca58971aa70b9079635e474117245 Mon Sep 17 00:00:00 2001 From: Muzry Date: Sat, 8 Nov 2025 06:12:40 +0800 Subject: [PATCH 090/103] Improve readability in slashing import and remove duplicated code (#15957) Co-authored-by: james-prysm <90280386+james-prysm@users.noreply.github.com> --- changelog/muzry_improving_slashing_import.md | 2 ++ cmd/validator/slashing-protection/import.go | 17 +++++------------ 2 files changed, 7 insertions(+), 12 deletions(-) create mode 100644 changelog/muzry_improving_slashing_import.md diff --git a/changelog/muzry_improving_slashing_import.md b/changelog/muzry_improving_slashing_import.md new file mode 100644 index 0000000000..69fcabd6e3 --- /dev/null +++ b/changelog/muzry_improving_slashing_import.md @@ -0,0 +1,2 @@ +### Changed +- Improve readability in slashing import and remove duplicated code diff --git a/cmd/validator/slashing-protection/import.go b/cmd/validator/slashing-protection/import.go index 6c277ca1a2..a180398316 100644 --- a/cmd/validator/slashing-protection/import.go +++ b/cmd/validator/slashing-protection/import.go @@ -61,20 +61,13 @@ func importSlashingProtectionJSON(cliCtx *cli.Context) error { if isDatabaseMinimal { databaseFileDir = filesystem.DatabaseDirName } - return fmt.Errorf("%s (validator database) was not found at path %s, so nothing to export", databaseFileDir, dataDir) - } else { - if !isDatabaseMinimal { - matchPath = filepath.Dir(matchPath) // strip the file name - } - dataDir = matchPath - log.Infof("Found validator database at path %s", dataDir) + return fmt.Errorf("%s (validator database) was not found at path %s, so nothing to import", databaseFileDir, dataDir) } - message := "Found existing database inside of %s" - if !found { - message = "Did not find existing database inside of %s, creating a new one" + if !isDatabaseMinimal { + matchPath = filepath.Dir(matchPath) // strip the file name } - - log.Infof(message, dataDir) + dataDir = matchPath + log.Infof("Found validator database at path %s", dataDir) // Open the validator database. if isDatabaseMinimal { From 6735c921f80ff1b78027236b880ecfebd08ee9fb Mon Sep 17 00:00:00 2001 From: Potuz Date: Fri, 7 Nov 2025 20:18:44 -0500 Subject: [PATCH 091/103] Dependent root instead of target (#15996) * Add DepdenentRootForEpoch forkchoice helper * Use dependent root in helpers to get head --- beacon-chain/blockchain/chain_info.go | 8 ++ .../blockchain/process_attestation_helpers.go | 9 +- beacon-chain/blockchain/testing/mock.go | 5 ++ .../doubly-linked-tree/forkchoice.go | 17 ++-- .../doubly-linked-tree/store_test.go | 90 +++++++++++++++++++ beacon-chain/forkchoice/interfaces.go | 1 + beacon-chain/forkchoice/ro.go | 7 ++ beacon-chain/forkchoice/ro_test.go | 7 ++ beacon-chain/sync/validate_beacon_blocks.go | 8 +- beacon-chain/verification/blob_test.go | 15 ++-- beacon-chain/verification/data_column.go | 8 +- beacon-chain/verification/initializer.go | 1 + ...uz_use_dependent_root_instead_of_target.md | 3 + 13 files changed, 157 insertions(+), 22 deletions(-) create mode 100644 changelog/potuz_use_dependent_root_instead_of_target.md diff --git a/beacon-chain/blockchain/chain_info.go b/beacon-chain/blockchain/chain_info.go index 396c5e5b21..b9b6708825 100644 --- a/beacon-chain/blockchain/chain_info.go +++ b/beacon-chain/blockchain/chain_info.go @@ -79,6 +79,7 @@ type HeadFetcher interface { HeadPublicKeyToValidatorIndex(pubKey [fieldparams.BLSPubkeyLength]byte) (primitives.ValidatorIndex, bool) HeadValidatorIndexToPublicKey(ctx context.Context, index primitives.ValidatorIndex) ([fieldparams.BLSPubkeyLength]byte, error) ChainHeads() ([][32]byte, []primitives.Slot) + DependentRootForEpoch([32]byte, primitives.Epoch) ([32]byte, error) TargetRootForEpoch([32]byte, primitives.Epoch) ([32]byte, error) HeadSyncCommitteeFetcher HeadDomainFetcher @@ -470,6 +471,13 @@ func (s *Service) IsOptimisticForRoot(ctx context.Context, root [32]byte) (bool, return !isCanonical, nil } +// DependentRootForEpoch wraps the corresponding method in forkchoice +func (s *Service) DependentRootForEpoch(root [32]byte, epoch primitives.Epoch) ([32]byte, error) { + s.cfg.ForkChoiceStore.RLock() + defer s.cfg.ForkChoiceStore.RUnlock() + return s.cfg.ForkChoiceStore.DependentRootForEpoch(root, epoch) +} + // TargetRootForEpoch wraps the corresponding method in forkchoice func (s *Service) TargetRootForEpoch(root [32]byte, epoch primitives.Epoch) ([32]byte, error) { s.cfg.ForkChoiceStore.RLock() diff --git a/beacon-chain/blockchain/process_attestation_helpers.go b/beacon-chain/blockchain/process_attestation_helpers.go index 385a1780a5..6b211372db 100644 --- a/beacon-chain/blockchain/process_attestation_helpers.go +++ b/beacon-chain/blockchain/process_attestation_helpers.go @@ -1,7 +1,6 @@ package blockchain import ( - "bytes" "context" "fmt" "strconv" @@ -34,11 +33,15 @@ func (s *Service) getRecentPreState(ctx context.Context, c *ethpb.Checkpoint) st if err != nil { return nil } - headTarget, err := s.cfg.ForkChoiceStore.TargetRootForEpoch([32]byte(headRoot), c.Epoch) + headDependent, err := s.cfg.ForkChoiceStore.DependentRootForEpoch([32]byte(headRoot), c.Epoch) if err != nil { return nil } - if !bytes.Equal(c.Root, headTarget[:]) { + targetDependent, err := s.cfg.ForkChoiceStore.DependentRootForEpoch([32]byte(c.Root), c.Epoch) + if err != nil { + return nil + } + if targetDependent != headDependent { return nil } diff --git a/beacon-chain/blockchain/testing/mock.go b/beacon-chain/blockchain/testing/mock.go index 6c7e2d1463..2d2d8904e5 100644 --- a/beacon-chain/blockchain/testing/mock.go +++ b/beacon-chain/blockchain/testing/mock.go @@ -758,6 +758,11 @@ func (c *ChainService) ReceiveDataColumns(dcs []blocks.VerifiedRODataColumn) err return nil } +// DependentRootForEpoch mocks the same method in the chain service +func (c *ChainService) DependentRootForEpoch(_ [32]byte, _ primitives.Epoch) ([32]byte, error) { + return c.TargetRoot, nil +} + // TargetRootForEpoch mocks the same method in the chain service func (c *ChainService) TargetRootForEpoch(_ [32]byte, _ primitives.Epoch) ([32]byte, error) { return c.TargetRoot, nil diff --git a/beacon-chain/forkchoice/doubly-linked-tree/forkchoice.go b/beacon-chain/forkchoice/doubly-linked-tree/forkchoice.go index 516a3faf0f..118d247c21 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/forkchoice.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/forkchoice.go @@ -626,21 +626,26 @@ func (f *ForkChoice) Slot(root [32]byte) (primitives.Slot, error) { // DependentRoot returns the last root of the epoch prior to the requested ecoch in the canonical chain. func (f *ForkChoice) DependentRoot(epoch primitives.Epoch) ([32]byte, error) { - tr, err := f.TargetRootForEpoch(f.CachedHeadRoot(), epoch) + return f.DependentRootForEpoch(f.CachedHeadRoot(), epoch) +} + +// DependentRootForEpoch return the last root of the epoch prior to the requested ecoch for the given root. +func (f *ForkChoice) DependentRootForEpoch(root [32]byte, epoch primitives.Epoch) ([32]byte, error) { + tr, err := f.TargetRootForEpoch(root, epoch) if err != nil { return [32]byte{}, err } if tr == [32]byte{} { return [32]byte{}, nil } - n, ok := f.store.nodeByRoot[tr] - if !ok || n == nil { + node, ok := f.store.nodeByRoot[tr] + if !ok || node == nil { return [32]byte{}, ErrNilNode } - if slots.ToEpoch(n.slot) == epoch && n.parent != nil { - n = n.parent + if slots.ToEpoch(node.slot) >= epoch && node.parent != nil { + node = node.parent } - return n.root, nil + return node.root, nil } // TargetRootForEpoch returns the root of the target block for a given epoch. diff --git a/beacon-chain/forkchoice/doubly-linked-tree/store_test.go b/beacon-chain/forkchoice/doubly-linked-tree/store_test.go index a501a261d0..ec1f8404fa 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/store_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/store_test.go @@ -608,6 +608,96 @@ func TestStore_TargetRootForEpoch(t *testing.T) { require.Equal(t, blk4.Root(), target) } +func TestStore_DependentRootForEpoch(t *testing.T) { + ctx := t.Context() + f := setup(1, 1) + + // Build the following tree structure: + // /------------37 + // 0<--31<---32 <---33 <--- 35 <-------- 65 <--- 66 + // \-- 36 ------------- 38 + + // Insert block at slot 31 (epoch 0) + state, blk31, err := prepareForkchoiceState(ctx, 31, [32]byte{31}, params.BeaconConfig().ZeroHash, params.BeaconConfig().ZeroHash, 1, 1) + require.NoError(t, err) + require.NoError(t, f.InsertNode(ctx, state, blk31)) + + // Insert block at slot 32 (epoch 1) + state, blk32, err := prepareForkchoiceState(ctx, 32, [32]byte{32}, blk31.Root(), params.BeaconConfig().ZeroHash, 1, 1) + require.NoError(t, err) + require.NoError(t, f.InsertNode(ctx, state, blk32)) + + // Insert block at slot 33 (epoch 1) + state, blk33, err := prepareForkchoiceState(ctx, 33, [32]byte{33}, blk32.Root(), params.BeaconConfig().ZeroHash, 1, 1) + require.NoError(t, err) + require.NoError(t, f.InsertNode(ctx, state, blk33)) + + // Insert block at slot 35 (epoch 1) + state, blk35, err := prepareForkchoiceState(ctx, 35, [32]byte{35}, blk33.Root(), params.BeaconConfig().ZeroHash, 1, 1) + require.NoError(t, err) + require.NoError(t, f.InsertNode(ctx, state, blk35)) + + // Insert fork: block at slot 36 (epoch 1) descending from block 32 + state, blk36, err := prepareForkchoiceState(ctx, 36, [32]byte{36}, blk32.Root(), params.BeaconConfig().ZeroHash, 1, 1) + require.NoError(t, err) + require.NoError(t, f.InsertNode(ctx, state, blk36)) + + // Insert block at slot 37 (epoch 1) descending from block 33 + state, blk37, err := prepareForkchoiceState(ctx, 37, [32]byte{37}, blk33.Root(), params.BeaconConfig().ZeroHash, 1, 1) + require.NoError(t, err) + require.NoError(t, f.InsertNode(ctx, state, blk37)) + + // Insert block at slot 38 (epoch 1) descending from block 36 + state, blk38, err := prepareForkchoiceState(ctx, 38, [32]byte{38}, blk36.Root(), params.BeaconConfig().ZeroHash, 1, 1) + require.NoError(t, err) + require.NoError(t, f.InsertNode(ctx, state, blk38)) + + // Insert block at slot 65 (epoch 2) descending from block 35 + state, blk65, err := prepareForkchoiceState(ctx, 65, [32]byte{65}, blk35.Root(), params.BeaconConfig().ZeroHash, 1, 1) + require.NoError(t, err) + require.NoError(t, f.InsertNode(ctx, state, blk65)) + + // Insert block at slot 66 (epoch 2) descending from block 65 + state, blk66, err := prepareForkchoiceState(ctx, 66, [32]byte{66}, blk65.Root(), params.BeaconConfig().ZeroHash, 1, 1) + require.NoError(t, err) + require.NoError(t, f.InsertNode(ctx, state, blk66)) + + // Test dependent root for block 32 at epoch 1 - should be block 31 + dependent, err := f.DependentRootForEpoch(blk32.Root(), 1) + require.NoError(t, err) + require.Equal(t, blk31.Root(), dependent) + + // Test dependent root for block 32 at epoch 2 - should be block 32 + dependent, err = f.DependentRootForEpoch(blk32.Root(), 2) + require.NoError(t, err) + require.Equal(t, blk32.Root(), dependent) + + // Test dependent root for block 33 at epoch 1 - should be block 31 + dependent, err = f.DependentRootForEpoch(blk33.Root(), 1) + require.NoError(t, err) + require.Equal(t, blk31.Root(), dependent) + + // Test dependent root for block 38 at epoch 1 - should be block 31 + dependent, err = f.DependentRootForEpoch(blk38.Root(), 1) + require.NoError(t, err) + require.Equal(t, blk31.Root(), dependent) + + // Test dependent root for block 36 at epoch 2 - should be block 36 + dependent, err = f.DependentRootForEpoch(blk36.Root(), 2) + require.NoError(t, err) + require.Equal(t, blk36.Root(), dependent) + + // Test dependent root for block 66 at epoch 1 - should be block 31 + dependent, err = f.DependentRootForEpoch(blk66.Root(), 1) + require.NoError(t, err) + require.Equal(t, blk31.Root(), dependent) + + // Test dependent root for block 66 at epoch 2 - should be block 35 + dependent, err = f.DependentRootForEpoch(blk66.Root(), 2) + require.NoError(t, err) + require.Equal(t, blk35.Root(), dependent) +} + func TestStore_CleanupInserting(t *testing.T) { f := setup(0, 0) ctx := t.Context() diff --git a/beacon-chain/forkchoice/interfaces.go b/beacon-chain/forkchoice/interfaces.go index 11339520f1..d3a33f20dd 100644 --- a/beacon-chain/forkchoice/interfaces.go +++ b/beacon-chain/forkchoice/interfaces.go @@ -81,6 +81,7 @@ type FastGetter interface { ShouldOverrideFCU() bool Slot([32]byte) (primitives.Slot, error) DependentRoot(primitives.Epoch) ([32]byte, error) + DependentRootForEpoch([32]byte, primitives.Epoch) ([32]byte, error) TargetRootForEpoch([32]byte, primitives.Epoch) ([32]byte, error) UnrealizedJustifiedPayloadBlockHash() [32]byte Weight(root [32]byte) (uint64, error) diff --git a/beacon-chain/forkchoice/ro.go b/beacon-chain/forkchoice/ro.go index 5f010ff22f..dd04f56c1f 100644 --- a/beacon-chain/forkchoice/ro.go +++ b/beacon-chain/forkchoice/ro.go @@ -177,6 +177,13 @@ func (ro *ROForkChoice) DependentRoot(epoch primitives.Epoch) ([32]byte, error) return ro.getter.DependentRoot(epoch) } +// DependentRootForEpoch delegates to the underlying forkchoice call, under a lock. +func (ro *ROForkChoice) DependentRootForEpoch(root [32]byte, epoch primitives.Epoch) ([32]byte, error) { + ro.l.RLock() + defer ro.l.RUnlock() + return ro.getter.DependentRootForEpoch(root, epoch) +} + // TargetRootForEpoch delegates to the underlying forkchoice call, under a lock. func (ro *ROForkChoice) TargetRootForEpoch(root [32]byte, epoch primitives.Epoch) ([32]byte, error) { ro.l.RLock() diff --git a/beacon-chain/forkchoice/ro_test.go b/beacon-chain/forkchoice/ro_test.go index 257edf658d..7e31eb401e 100644 --- a/beacon-chain/forkchoice/ro_test.go +++ b/beacon-chain/forkchoice/ro_test.go @@ -40,6 +40,7 @@ const ( targetRootForEpochCalled parentRootCalled dependentRootCalled + dependentRootForEpochCalled ) func _discard(t *testing.T, e error) { @@ -305,6 +306,12 @@ func (ro *mockROForkchoice) DependentRoot(_ primitives.Epoch) ([32]byte, error) return [32]byte{}, nil } +// DependentRootForEpoch implements FastGetter. +func (ro *mockROForkchoice) DependentRootForEpoch(_ [32]byte, _ primitives.Epoch) ([32]byte, error) { + ro.calls = append(ro.calls, dependentRootForEpochCalled) + return [32]byte{}, nil +} + // TargetRootForEpoch implements FastGetter. func (ro *mockROForkchoice) TargetRootForEpoch(_ [32]byte, _ primitives.Epoch) ([32]byte, error) { ro.calls = append(ro.calls, targetRootForEpochCalled) diff --git a/beacon-chain/sync/validate_beacon_blocks.go b/beacon-chain/sync/validate_beacon_blocks.go index d203411742..021a1df0ed 100644 --- a/beacon-chain/sync/validate_beacon_blocks.go +++ b/beacon-chain/sync/validate_beacon_blocks.go @@ -337,17 +337,17 @@ func (s *Service) blockVerifyingState(ctx context.Context, blk interfaces.ReadOn } return transition.ProcessSlotsUsingNextSlotCache(ctx, headState, headRoot, blockSlot) } - // If head and block are in the same epoch and head is compatible with the parent's target, then use head + // If head and block are in the same epoch and head is compatible with the parent's dependent root, then use head if blockEpoch == headEpoch { - headTarget, err := s.cfg.chain.TargetRootForEpoch([32]byte(headRoot), blockEpoch) + headDependent, err := s.cfg.chain.DependentRootForEpoch([32]byte(headRoot), blockEpoch) if err != nil { return nil, err } - parentTarget, err := s.cfg.chain.TargetRootForEpoch([32]byte(parentRoot), blockEpoch) + parentDependent, err := s.cfg.chain.DependentRootForEpoch([32]byte(parentRoot), blockEpoch) if err != nil { return nil, err } - if bytes.Equal(headTarget[:], parentTarget[:]) { + if bytes.Equal(headDependent[:], parentDependent[:]) { return s.cfg.chain.HeadStateReadOnly(ctx) } } diff --git a/beacon-chain/verification/blob_test.go b/beacon-chain/verification/blob_test.go index 27e73428e3..88f8fe8790 100644 --- a/beacon-chain/verification/blob_test.go +++ b/beacon-chain/verification/blob_test.go @@ -548,11 +548,12 @@ func TestRequirementSatisfaction(t *testing.T) { } type mockForkchoicer struct { - FinalizedCheckpointCB func() *forkchoicetypes.Checkpoint - HasNodeCB func([32]byte) bool - IsCanonicalCB func(root [32]byte) bool - SlotCB func([32]byte) (primitives.Slot, error) - TargetRootForEpochCB func([32]byte, primitives.Epoch) ([32]byte, error) + FinalizedCheckpointCB func() *forkchoicetypes.Checkpoint + HasNodeCB func([32]byte) bool + IsCanonicalCB func(root [32]byte) bool + SlotCB func([32]byte) (primitives.Slot, error) + DependentRootForEpochCB func([32]byte, primitives.Epoch) ([32]byte, error) + TargetRootForEpochCB func([32]byte, primitives.Epoch) ([32]byte, error) } var _ Forkchoicer = &mockForkchoicer{} @@ -573,6 +574,10 @@ func (m *mockForkchoicer) Slot(root [32]byte) (primitives.Slot, error) { return m.SlotCB(root) } +func (m *mockForkchoicer) DependentRootForEpoch(root [32]byte, epoch primitives.Epoch) ([32]byte, error) { + return m.DependentRootForEpochCB(root, epoch) +} + func (m *mockForkchoicer) TargetRootForEpoch(root [32]byte, epoch primitives.Epoch) ([32]byte, error) { return m.TargetRootForEpochCB(root, epoch) } diff --git a/beacon-chain/verification/data_column.go b/beacon-chain/verification/data_column.go index c6ec953f45..b27b8df8c9 100644 --- a/beacon-chain/verification/data_column.go +++ b/beacon-chain/verification/data_column.go @@ -319,17 +319,17 @@ func (dv *RODataColumnsVerifier) getVerifyingState(ctx context.Context, dataColu return transition.ProcessSlotsUsingNextSlotCache(ctx, headState, headRoot, dataColumnSlot) } - // If head and data column are in the same epoch and head is compatible with the parent's target, then use head + // If head and data column are in the same epoch and head is compatible with the parent's depdendent root, then use head if dataColumnEpoch == headEpoch { - headTarget, err := dv.fc.TargetRootForEpoch(bytesutil.ToBytes32(headRoot), dataColumnEpoch) + headDependent, err := dv.fc.DependentRootForEpoch(bytesutil.ToBytes32(headRoot), dataColumnEpoch) if err != nil { return nil, err } - parentTarget, err := dv.fc.TargetRootForEpoch(parentRoot, dataColumnEpoch) + parentDependent, err := dv.fc.DependentRootForEpoch(parentRoot, dataColumnEpoch) if err != nil { return nil, err } - if bytes.Equal(headTarget[:], parentTarget[:]) { + if bytes.Equal(headDependent[:], parentDependent[:]) { return dv.hsp.HeadStateReadOnly(ctx) } } diff --git a/beacon-chain/verification/initializer.go b/beacon-chain/verification/initializer.go index bb1de673f8..265e5b16db 100644 --- a/beacon-chain/verification/initializer.go +++ b/beacon-chain/verification/initializer.go @@ -25,6 +25,7 @@ type Forkchoicer interface { HasNode([32]byte) bool IsCanonical(root [32]byte) bool Slot([32]byte) (primitives.Slot, error) + DependentRootForEpoch([32]byte, primitives.Epoch) ([32]byte, error) TargetRootForEpoch([32]byte, primitives.Epoch) ([32]byte, error) } diff --git a/changelog/potuz_use_dependent_root_instead_of_target.md b/changelog/potuz_use_dependent_root_instead_of_target.md new file mode 100644 index 0000000000..8ae14f591a --- /dev/null +++ b/changelog/potuz_use_dependent_root_instead_of_target.md @@ -0,0 +1,3 @@ +### Changed + +- Use dependent root instead of target when possible. From eae15697daf0de4a32fde6480132988e585d62ea Mon Sep 17 00:00:00 2001 From: satushh Date: Tue, 11 Nov 2025 17:20:48 +0000 Subject: [PATCH 092/103] nil block check (#16006) * nil block check * check err from OriginCheckpointBlockRoot * return error if block is not found * use block.IsNil() --- beacon-chain/sync/initial-sync/service.go | 7 +++++++ changelog/satushh-fetchoriginsidecars-bug.md | 3 +++ 2 files changed, 10 insertions(+) create mode 100644 changelog/satushh-fetchoriginsidecars-bug.md diff --git a/beacon-chain/sync/initial-sync/service.go b/beacon-chain/sync/initial-sync/service.go index b82d288234..8c6aea4a08 100644 --- a/beacon-chain/sync/initial-sync/service.go +++ b/beacon-chain/sync/initial-sync/service.go @@ -217,10 +217,17 @@ func (s *Service) fetchOriginSidecars(peers []peer.ID) error { return nil } + if err != nil { + return errors.Wrap(err, "error fetching origin checkpoint blockroot") + } + block, err := s.cfg.DB.Block(s.ctx, blockRoot) if err != nil { return errors.Wrap(err, "block") } + if block.IsNil() { + return errors.Errorf("origin block for root %#x not found in database", blockRoot) + } currentSlot, blockSlot := s.clock.CurrentSlot(), block.Block().Slot() currentEpoch, blockEpoch := slots.ToEpoch(currentSlot), slots.ToEpoch(blockSlot) diff --git a/changelog/satushh-fetchoriginsidecars-bug.md b/changelog/satushh-fetchoriginsidecars-bug.md new file mode 100644 index 0000000000..a2f61e7d2d --- /dev/null +++ b/changelog/satushh-fetchoriginsidecars-bug.md @@ -0,0 +1,3 @@ +### Fixed + +- Nil check for block if it doesn't exist in the DB in fetchOriginSidecars \ No newline at end of file From e1b98a4ca19c0e48215b5c7ba898e36022524330 Mon Sep 17 00:00:00 2001 From: james-prysm <90280386+james-prysm@users.noreply.github.com> Date: Wed, 12 Nov 2025 11:53:39 -0800 Subject: [PATCH 093/103] optimize get blobs (#15902) * init * reverting some functions * rolling back a change and fixing linting * wip * wip * fixing test * breaking up proofs and cells for cleaner code * fixing test and type * fixing safe conversion * fixing test * fixing more tests * fixing even more tests * fix the 0 indices option * adding a test for coverage * small test update * changelog * radek's suggestions * Update beacon-chain/core/peerdas/validator.go Co-authored-by: Manu NALEPA * addressing comments on kzg package * addressing suggestions for reconstruction * more manu feedback items * removing unneeded files * removing unneeded setter --------- Co-authored-by: james-prysm Co-authored-by: Manu NALEPA --- beacon-chain/blockchain/kzg/BUILD.bazel | 1 + beacon-chain/blockchain/kzg/kzg.go | 84 ++-- beacon-chain/blockchain/kzg/kzg_test.go | 236 ++++++++++ .../blockchain/kzg/validation_test.go | 12 +- beacon-chain/core/peerdas/BUILD.bazel | 1 + .../core/peerdas/p2p_interface_test.go | 4 +- beacon-chain/core/peerdas/reconstruction.go | 445 ++++++++++++------ .../peerdas/reconstruction_helpers_test.go | 79 ++++ .../core/peerdas/reconstruction_test.go | 229 +++++++-- beacon-chain/core/peerdas/validator.go | 32 +- beacon-chain/core/peerdas/validator_test.go | 88 ++-- beacon-chain/execution/engine_client.go | 18 +- beacon-chain/rpc/eth/beacon/handlers_test.go | 4 +- beacon-chain/rpc/eth/blob/handlers.go | 19 +- beacon-chain/rpc/lookup/blocker.go | 196 ++++++-- beacon-chain/rpc/lookup/blocker_test.go | 82 ++-- .../rpc/prysm/v1alpha1/validator/proposer.go | 4 +- beacon-chain/rpc/testutil/mock_blocker.go | 7 +- beacon-chain/verification/data_column_test.go | 4 +- changelog/james-prysm_optimize-get-blobs.md | 3 + ..._kzg__compute_cells_and_kzg_proofs_test.go | 12 +- ..._kzg__recover_cells_and_kzg_proofs_test.go | 12 +- testing/util/fulu.go | 16 +- 23 files changed, 1181 insertions(+), 407 deletions(-) create mode 100644 beacon-chain/blockchain/kzg/kzg_test.go create mode 100644 beacon-chain/core/peerdas/reconstruction_helpers_test.go create mode 100644 changelog/james-prysm_optimize-get-blobs.md diff --git a/beacon-chain/blockchain/kzg/BUILD.bazel b/beacon-chain/blockchain/kzg/BUILD.bazel index 6b5cdffc7c..e1595cd8b3 100644 --- a/beacon-chain/blockchain/kzg/BUILD.bazel +++ b/beacon-chain/blockchain/kzg/BUILD.bazel @@ -23,6 +23,7 @@ go_library( go_test( name = "go_default_test", srcs = [ + "kzg_test.go", "trusted_setup_test.go", "validation_test.go", ], diff --git a/beacon-chain/blockchain/kzg/kzg.go b/beacon-chain/blockchain/kzg/kzg.go index f7831ea59d..6bd336857a 100644 --- a/beacon-chain/blockchain/kzg/kzg.go +++ b/beacon-chain/blockchain/kzg/kzg.go @@ -34,12 +34,6 @@ type Bytes48 = ckzg4844.Bytes48 // Bytes32 is a 32-byte array. type Bytes32 = ckzg4844.Bytes32 -// CellsAndProofs represents the Cells and Proofs corresponding to a single blob. -type CellsAndProofs struct { - Cells []Cell - Proofs []Proof -} - // BlobToKZGCommitment computes a KZG commitment from a given blob. func BlobToKZGCommitment(blob *Blob) (Commitment, error) { var kzgBlob kzg4844.Blob @@ -65,7 +59,7 @@ func ComputeCells(blob *Blob) ([]Cell, error) { cells := make([]Cell, len(ckzgCells)) for i := range ckzgCells { - cells[i] = Cell(ckzgCells[i]) + copy(cells[i][:], ckzgCells[i][:]) } return cells, nil @@ -78,22 +72,35 @@ func ComputeBlobKZGProof(blob *Blob, commitment Commitment) (Proof, error) { proof, err := kzg4844.ComputeBlobProof(&kzgBlob, kzg4844.Commitment(commitment)) if err != nil { - return [48]byte{}, err + return Proof{}, err } - return Proof(proof), nil + var result Proof + copy(result[:], proof[:]) + return result, nil } // ComputeCellsAndKZGProofs computes the cells and cells KZG proofs from a given blob. -func ComputeCellsAndKZGProofs(blob *Blob) (CellsAndProofs, error) { +func ComputeCellsAndKZGProofs(blob *Blob) ([]Cell, []Proof, error) { var ckzgBlob ckzg4844.Blob copy(ckzgBlob[:], blob[:]) ckzgCells, ckzgProofs, err := ckzg4844.ComputeCellsAndKZGProofs(&ckzgBlob) if err != nil { - return CellsAndProofs{}, err + return nil, nil, err } - return makeCellsAndProofs(ckzgCells[:], ckzgProofs[:]) + if len(ckzgCells) != len(ckzgProofs) { + return nil, nil, errors.New("mismatched cells and proofs length") + } + + cells := make([]Cell, len(ckzgCells)) + proofs := make([]Proof, len(ckzgProofs)) + for i := range ckzgCells { + copy(cells[i][:], ckzgCells[i][:]) + copy(proofs[i][:], ckzgProofs[i][:]) + } + + return cells, proofs, nil } // VerifyCellKZGProofBatch verifies the KZG proofs for a given slice of commitments, cells indices, cells and proofs. @@ -103,44 +110,57 @@ func VerifyCellKZGProofBatch(commitmentsBytes []Bytes48, cellIndices []uint64, c ckzgCells := make([]ckzg4844.Cell, len(cells)) for i := range cells { - ckzgCells[i] = ckzg4844.Cell(cells[i]) + copy(ckzgCells[i][:], cells[i][:]) } return ckzg4844.VerifyCellKZGProofBatch(commitmentsBytes, cellIndices, ckzgCells, proofsBytes) } -// RecoverCellsAndKZGProofs recovers the complete cells and KZG proofs from a given set of cell indices and partial cells. +// RecoverCells recovers the complete cells from a given set of cell indices and partial cells. // Note: `len(cellIndices)` must be equal to `len(partialCells)` and `cellIndices` must be sorted in ascending order. -func RecoverCellsAndKZGProofs(cellIndices []uint64, partialCells []Cell) (CellsAndProofs, error) { +func RecoverCells(cellIndices []uint64, partialCells []Cell) ([]Cell, error) { // Convert `Cell` type to `ckzg4844.Cell` ckzgPartialCells := make([]ckzg4844.Cell, len(partialCells)) for i := range partialCells { - ckzgPartialCells[i] = ckzg4844.Cell(partialCells[i]) + copy(ckzgPartialCells[i][:], partialCells[i][:]) + } + + ckzgCells, err := ckzg4844.RecoverCells(cellIndices, ckzgPartialCells) + if err != nil { + return nil, errors.Wrap(err, "recover cells") + } + + cells := make([]Cell, len(ckzgCells)) + for i := range ckzgCells { + copy(cells[i][:], ckzgCells[i][:]) + } + + return cells, nil +} + +// RecoverCellsAndKZGProofs recovers the complete cells and KZG proofs from a given set of cell indices and partial cells. +// Note: `len(cellIndices)` must be equal to `len(partialCells)` and `cellIndices` must be sorted in ascending order. +func RecoverCellsAndKZGProofs(cellIndices []uint64, partialCells []Cell) ([]Cell, []Proof, error) { + // Convert `Cell` type to `ckzg4844.Cell` + ckzgPartialCells := make([]ckzg4844.Cell, len(partialCells)) + for i := range partialCells { + copy(ckzgPartialCells[i][:], partialCells[i][:]) } ckzgCells, ckzgProofs, err := ckzg4844.RecoverCellsAndKZGProofs(cellIndices, ckzgPartialCells) if err != nil { - return CellsAndProofs{}, errors.Wrap(err, "recover cells and KZG proofs") + return nil, nil, errors.Wrap(err, "recover cells and KZG proofs") } - return makeCellsAndProofs(ckzgCells[:], ckzgProofs[:]) -} - -// makeCellsAndProofs converts cells/proofs to the CellsAndProofs type defined in this package. -func makeCellsAndProofs(ckzgCells []ckzg4844.Cell, ckzgProofs []ckzg4844.KZGProof) (CellsAndProofs, error) { if len(ckzgCells) != len(ckzgProofs) { - return CellsAndProofs{}, errors.New("different number of cells/proofs") + return nil, nil, errors.New("mismatched cells and proofs length") } - cells := make([]Cell, 0, len(ckzgCells)) - proofs := make([]Proof, 0, len(ckzgProofs)) - + cells := make([]Cell, len(ckzgCells)) + proofs := make([]Proof, len(ckzgProofs)) for i := range ckzgCells { - cells = append(cells, Cell(ckzgCells[i])) - proofs = append(proofs, Proof(ckzgProofs[i])) + copy(cells[i][:], ckzgCells[i][:]) + copy(proofs[i][:], ckzgProofs[i][:]) } - return CellsAndProofs{ - Cells: cells, - Proofs: proofs, - }, nil + return cells, proofs, nil } diff --git a/beacon-chain/blockchain/kzg/kzg_test.go b/beacon-chain/blockchain/kzg/kzg_test.go new file mode 100644 index 0000000000..a805af64bf --- /dev/null +++ b/beacon-chain/blockchain/kzg/kzg_test.go @@ -0,0 +1,236 @@ +package kzg + +import ( + "testing" + + "github.com/OffchainLabs/prysm/v7/crypto/random" + "github.com/OffchainLabs/prysm/v7/testing/require" +) + +func TestComputeCells(t *testing.T) { + require.NoError(t, Start()) + + t.Run("valid blob", func(t *testing.T) { + randBlob := random.GetRandBlob(123) + var blob Blob + copy(blob[:], randBlob[:]) + + cells, err := ComputeCells(&blob) + require.NoError(t, err) + require.Equal(t, 128, len(cells)) + }) +} + +func TestComputeBlobKZGProof(t *testing.T) { + require.NoError(t, Start()) + + t.Run("valid blob and commitment", func(t *testing.T) { + randBlob := random.GetRandBlob(123) + var blob Blob + copy(blob[:], randBlob[:]) + + commitment, err := BlobToKZGCommitment(&blob) + require.NoError(t, err) + + proof, err := ComputeBlobKZGProof(&blob, commitment) + require.NoError(t, err) + require.Equal(t, BytesPerProof, len(proof)) + require.NotEqual(t, Proof{}, proof, "proof should not be empty") + }) +} + +func TestComputeCellsAndKZGProofs(t *testing.T) { + require.NoError(t, Start()) + + t.Run("valid blob returns matching cells and proofs", func(t *testing.T) { + randBlob := random.GetRandBlob(123) + var blob Blob + copy(blob[:], randBlob[:]) + + cells, proofs, err := ComputeCellsAndKZGProofs(&blob) + require.NoError(t, err) + require.Equal(t, 128, len(cells)) + require.Equal(t, 128, len(proofs)) + require.Equal(t, len(cells), len(proofs), "cells and proofs should have matching lengths") + }) +} + +func TestVerifyCellKZGProofBatch(t *testing.T) { + require.NoError(t, Start()) + + t.Run("valid proof batch", func(t *testing.T) { + randBlob := random.GetRandBlob(123) + var blob Blob + copy(blob[:], randBlob[:]) + + commitment, err := BlobToKZGCommitment(&blob) + require.NoError(t, err) + + cells, proofs, err := ComputeCellsAndKZGProofs(&blob) + require.NoError(t, err) + + // Verify a subset of cells + cellIndices := []uint64{0, 1, 2, 3, 4} + selectedCells := make([]Cell, len(cellIndices)) + commitmentsBytes := make([]Bytes48, len(cellIndices)) + proofsBytes := make([]Bytes48, len(cellIndices)) + + for i, idx := range cellIndices { + selectedCells[i] = cells[idx] + copy(commitmentsBytes[i][:], commitment[:]) + copy(proofsBytes[i][:], proofs[idx][:]) + } + + valid, err := VerifyCellKZGProofBatch(commitmentsBytes, cellIndices, selectedCells, proofsBytes) + require.NoError(t, err) + require.Equal(t, true, valid) + }) + + t.Run("invalid proof should fail", func(t *testing.T) { + randBlob := random.GetRandBlob(123) + var blob Blob + copy(blob[:], randBlob[:]) + + commitment, err := BlobToKZGCommitment(&blob) + require.NoError(t, err) + + cells, _, err := ComputeCellsAndKZGProofs(&blob) + require.NoError(t, err) + + // Use invalid proofs + cellIndices := []uint64{0} + selectedCells := []Cell{cells[0]} + commitmentsBytes := make([]Bytes48, 1) + copy(commitmentsBytes[0][:], commitment[:]) + + // Create an invalid proof + invalidProof := Bytes48{} + proofsBytes := []Bytes48{invalidProof} + + valid, err := VerifyCellKZGProofBatch(commitmentsBytes, cellIndices, selectedCells, proofsBytes) + require.NotNil(t, err) + require.Equal(t, false, valid) + }) +} + +func TestRecoverCells(t *testing.T) { + require.NoError(t, Start()) + + t.Run("recover from partial cells", func(t *testing.T) { + randBlob := random.GetRandBlob(123) + var blob Blob + copy(blob[:], randBlob[:]) + + cells, err := ComputeCells(&blob) + require.NoError(t, err) + + // Use half of the cells + partialIndices := make([]uint64, 64) + partialCells := make([]Cell, 64) + for i := range 64 { + partialIndices[i] = uint64(i) + partialCells[i] = cells[i] + } + + recoveredCells, err := RecoverCells(partialIndices, partialCells) + require.NoError(t, err) + require.Equal(t, 128, len(recoveredCells)) + + // Verify recovered cells match original + for i := range cells { + require.Equal(t, cells[i], recoveredCells[i]) + } + }) + + t.Run("insufficient cells should fail", func(t *testing.T) { + randBlob := random.GetRandBlob(123) + var blob Blob + copy(blob[:], randBlob[:]) + + cells, err := ComputeCells(&blob) + require.NoError(t, err) + + // Use only 32 cells (less than 50% required) + partialIndices := make([]uint64, 32) + partialCells := make([]Cell, 32) + for i := range 32 { + partialIndices[i] = uint64(i) + partialCells[i] = cells[i] + } + + _, err = RecoverCells(partialIndices, partialCells) + require.NotNil(t, err) + }) +} + +func TestRecoverCellsAndKZGProofs(t *testing.T) { + require.NoError(t, Start()) + + t.Run("recover cells and proofs from partial cells", func(t *testing.T) { + randBlob := random.GetRandBlob(123) + var blob Blob + copy(blob[:], randBlob[:]) + + cells, proofs, err := ComputeCellsAndKZGProofs(&blob) + require.NoError(t, err) + + // Use half of the cells + partialIndices := make([]uint64, 64) + partialCells := make([]Cell, 64) + for i := range 64 { + partialIndices[i] = uint64(i) + partialCells[i] = cells[i] + } + + recoveredCells, recoveredProofs, err := RecoverCellsAndKZGProofs(partialIndices, partialCells) + require.NoError(t, err) + require.Equal(t, 128, len(recoveredCells)) + require.Equal(t, 128, len(recoveredProofs)) + require.Equal(t, len(recoveredCells), len(recoveredProofs), "recovered cells and proofs should have matching lengths") + + // Verify recovered cells match original + for i := range cells { + require.Equal(t, cells[i], recoveredCells[i]) + require.Equal(t, proofs[i], recoveredProofs[i]) + } + }) + + t.Run("insufficient cells should fail", func(t *testing.T) { + randBlob := random.GetRandBlob(123) + var blob Blob + copy(blob[:], randBlob[:]) + + cells, err := ComputeCells(&blob) + require.NoError(t, err) + + // Use only 32 cells (less than 50% required) + partialIndices := make([]uint64, 32) + partialCells := make([]Cell, 32) + for i := range 32 { + partialIndices[i] = uint64(i) + partialCells[i] = cells[i] + } + + _, _, err = RecoverCellsAndKZGProofs(partialIndices, partialCells) + require.NotNil(t, err) + }) +} + +func TestBlobToKZGCommitment(t *testing.T) { + require.NoError(t, Start()) + + t.Run("valid blob", func(t *testing.T) { + randBlob := random.GetRandBlob(123) + var blob Blob + copy(blob[:], randBlob[:]) + + commitment, err := BlobToKZGCommitment(&blob) + require.NoError(t, err) + require.Equal(t, 48, len(commitment)) + + // Verify commitment is deterministic + commitment2, err := BlobToKZGCommitment(&blob) + require.NoError(t, err) + require.Equal(t, commitment, commitment2) + }) +} diff --git a/beacon-chain/blockchain/kzg/validation_test.go b/beacon-chain/blockchain/kzg/validation_test.go index fdfcd67297..734e056121 100644 --- a/beacon-chain/blockchain/kzg/validation_test.go +++ b/beacon-chain/blockchain/kzg/validation_test.go @@ -203,13 +203,13 @@ func TestVerifyCellKZGProofBatchFromBlobData(t *testing.T) { require.NoError(t, err) // Compute cells and proofs - cellsAndProofs, err := ComputeCellsAndKZGProofs(&blob) + _, proofs, err := ComputeCellsAndKZGProofs(&blob) require.NoError(t, err) // Create flattened cell proofs (like execution client format) cellProofs := make([][]byte, numberOfColumns) for i := range numberOfColumns { - cellProofs[i] = cellsAndProofs.Proofs[i][:] + cellProofs[i] = proofs[i][:] } blobs := [][]byte{blob[:]} @@ -236,7 +236,7 @@ func TestVerifyCellKZGProofBatchFromBlobData(t *testing.T) { require.NoError(t, err) // Compute cells and proofs - cellsAndProofs, err := ComputeCellsAndKZGProofs(&blob) + _, proofs, err := ComputeCellsAndKZGProofs(&blob) require.NoError(t, err) blobs[i] = blob[:] @@ -244,7 +244,7 @@ func TestVerifyCellKZGProofBatchFromBlobData(t *testing.T) { // Add cell proofs for this blob for j := range numberOfColumns { - allCellProofs = append(allCellProofs, cellsAndProofs.Proofs[j][:]) + allCellProofs = append(allCellProofs, proofs[j][:]) } } @@ -319,7 +319,7 @@ func TestVerifyCellKZGProofBatchFromBlobData(t *testing.T) { randBlob := random.GetRandBlob(123) var blob Blob copy(blob[:], randBlob[:]) - cellsAndProofs, err := ComputeCellsAndKZGProofs(&blob) + _, proofs, err := ComputeCellsAndKZGProofs(&blob) require.NoError(t, err) // Generate wrong commitment from different blob @@ -331,7 +331,7 @@ func TestVerifyCellKZGProofBatchFromBlobData(t *testing.T) { cellProofs := make([][]byte, numberOfColumns) for i := range numberOfColumns { - cellProofs[i] = cellsAndProofs.Proofs[i][:] + cellProofs[i] = proofs[i][:] } blobs := [][]byte{blob[:]} diff --git a/beacon-chain/core/peerdas/BUILD.bazel b/beacon-chain/core/peerdas/BUILD.bazel index 82dfdf3c5d..c81e541516 100644 --- a/beacon-chain/core/peerdas/BUILD.bazel +++ b/beacon-chain/core/peerdas/BUILD.bazel @@ -43,6 +43,7 @@ go_test( "das_core_test.go", "info_test.go", "p2p_interface_test.go", + "reconstruction_helpers_test.go", "reconstruction_test.go", "utils_test.go", "validator_test.go", diff --git a/beacon-chain/core/peerdas/p2p_interface_test.go b/beacon-chain/core/peerdas/p2p_interface_test.go index 559556e266..b47a07e0fc 100644 --- a/beacon-chain/core/peerdas/p2p_interface_test.go +++ b/beacon-chain/core/peerdas/p2p_interface_test.go @@ -387,10 +387,10 @@ func generateRandomSidecars(t testing.TB, seed, blobCount int64) []blocks.ROData sBlock, err := blocks.NewSignedBeaconBlock(dbBlock) require.NoError(t, err) - cellsAndProofs := util.GenerateCellsAndProofs(t, blobs) + cellsPerBlob, proofsPerBlob := util.GenerateCellsAndProofs(t, blobs) rob, err := blocks.NewROBlock(sBlock) require.NoError(t, err) - sidecars, err := peerdas.DataColumnSidecars(cellsAndProofs, peerdas.PopulateFromBlock(rob)) + sidecars, err := peerdas.DataColumnSidecars(cellsPerBlob, proofsPerBlob, peerdas.PopulateFromBlock(rob)) require.NoError(t, err) return sidecars diff --git a/beacon-chain/core/peerdas/reconstruction.go b/beacon-chain/core/peerdas/reconstruction.go index 455d892b71..18daee6123 100644 --- a/beacon-chain/core/peerdas/reconstruction.go +++ b/beacon-chain/core/peerdas/reconstruction.go @@ -2,6 +2,7 @@ package peerdas import ( "sort" + "sync" "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" @@ -28,6 +29,80 @@ func MinimumColumnCountToReconstruct() uint64 { return (params.BeaconConfig().NumberOfColumns + 1) / 2 } +// recoverCellsForBlobs reconstructs cells for specified blobs from the given data column sidecars. +// This is optimized to only recover cells without computing proofs. +// Returns a map from blob index to recovered cells. +func recoverCellsForBlobs(verifiedRoSidecars []blocks.VerifiedRODataColumn, blobIndices []int) (map[int][]kzg.Cell, error) { + sidecarCount := len(verifiedRoSidecars) + var wg errgroup.Group + + cellsPerBlob := make(map[int][]kzg.Cell, len(blobIndices)) + var mu sync.Mutex + + for _, blobIndex := range blobIndices { + wg.Go(func() error { + cellsIndices := make([]uint64, 0, sidecarCount) + cells := make([]kzg.Cell, 0, sidecarCount) + + for _, sidecar := range verifiedRoSidecars { + cell := sidecar.Column[blobIndex] + cells = append(cells, kzg.Cell(cell)) + cellsIndices = append(cellsIndices, sidecar.Index) + } + + recoveredCells, err := kzg.RecoverCells(cellsIndices, cells) + if err != nil { + return errors.Wrapf(err, "recover cells for blob %d", blobIndex) + } + + mu.Lock() + cellsPerBlob[blobIndex] = recoveredCells + mu.Unlock() + return nil + }) + } + + if err := wg.Wait(); err != nil { + return nil, errors.Wrap(err, "wait for RecoverCells") + } + return cellsPerBlob, nil +} + +// recoverCellsAndProofsForBlobs reconstructs both cells and proofs for specified blobs from the given data column sidecars. +func recoverCellsAndProofsForBlobs(verifiedRoSidecars []blocks.VerifiedRODataColumn, blobIndices []int) ([][]kzg.Cell, [][]kzg.Proof, error) { + sidecarCount := len(verifiedRoSidecars) + var wg errgroup.Group + + cellsPerBlob := make([][]kzg.Cell, len(blobIndices)) + proofsPerBlob := make([][]kzg.Proof, len(blobIndices)) + + for i, blobIndex := range blobIndices { + wg.Go(func() error { + cellsIndices := make([]uint64, 0, sidecarCount) + cells := make([]kzg.Cell, 0, sidecarCount) + + for _, sidecar := range verifiedRoSidecars { + cell := sidecar.Column[blobIndex] + cells = append(cells, kzg.Cell(cell)) + cellsIndices = append(cellsIndices, sidecar.Index) + } + + recoveredCells, recoveredProofs, err := kzg.RecoverCellsAndKZGProofs(cellsIndices, cells) + if err != nil { + return errors.Wrapf(err, "recover cells and KZG proofs for blob %d", blobIndex) + } + cellsPerBlob[i] = recoveredCells + proofsPerBlob[i] = recoveredProofs + return nil + }) + } + + if err := wg.Wait(); err != nil { + return nil, nil, errors.Wrap(err, "wait for RecoverCellsAndKZGProofs") + } + return cellsPerBlob, proofsPerBlob, nil +} + // ReconstructDataColumnSidecars reconstructs all the data column sidecars from the given input data column sidecars. // All input sidecars must be committed to the same block. // `inVerifiedRoSidecars` should contain enough sidecars to reconstruct the missing columns, and should not contain any duplicate. @@ -66,38 +141,16 @@ func ReconstructDataColumnSidecars(verifiedRoSidecars []blocks.VerifiedRODataCol }) // Recover cells and compute proofs in parallel. - var wg errgroup.Group - cellsAndProofs := make([]kzg.CellsAndProofs, blobCount) - for blobIndex := range uint64(blobCount) { - wg.Go(func() error { - cellsIndices := make([]uint64, 0, sidecarCount) - cells := make([]kzg.Cell, 0, sidecarCount) - - for _, sidecar := range verifiedRoSidecars { - cell := sidecar.Column[blobIndex] - cells = append(cells, kzg.Cell(cell)) - cellsIndices = append(cellsIndices, sidecar.Index) - } - - // Recover the cells and proofs for the corresponding blob - cellsAndProofsForBlob, err := kzg.RecoverCellsAndKZGProofs(cellsIndices, cells) - - if err != nil { - return errors.Wrapf(err, "recover cells and KZG proofs for blob %d", blobIndex) - } - - // It is safe for multiple goroutines to concurrently write to the same slice, - // as long as they are writing to different indices, which is the case here. - cellsAndProofs[blobIndex] = cellsAndProofsForBlob - return nil - }) + blobIndices := make([]int, blobCount) + for i := range blobIndices { + blobIndices[i] = i + } + cellsPerBlob, proofsPerBlob, err := recoverCellsAndProofsForBlobs(verifiedRoSidecars, blobIndices) + if err != nil { + return nil, errors.Wrap(err, "recover cells and proofs for blobs") } - if err := wg.Wait(); err != nil { - return nil, errors.Wrap(err, "wait for RecoverCellsAndKZGProofs") - } - - outSidecars, err := DataColumnSidecars(cellsAndProofs, PopulateFromSidecar(referenceSidecar)) + outSidecars, err := DataColumnSidecars(cellsPerBlob, proofsPerBlob, PopulateFromSidecar(referenceSidecar)) if err != nil { return nil, errors.Wrap(err, "data column sidecars from items") } @@ -113,18 +166,192 @@ func ReconstructDataColumnSidecars(verifiedRoSidecars []blocks.VerifiedRODataCol return reconstructedVerifiedRoSidecars, nil } -// ReconstructBlobs constructs verified read only blobs sidecars from verified read only blob sidecars. +// reconstructIfNeeded validates the input data column sidecars and returns the prepared sidecars +// (reconstructed if necessary). This function performs common validation and reconstruction logic used by +// both ReconstructBlobs and ReconstructBlobSidecars. +func reconstructIfNeeded(verifiedDataColumnSidecars []blocks.VerifiedRODataColumn) ([]blocks.VerifiedRODataColumn, error) { + if len(verifiedDataColumnSidecars) == 0 { + return nil, ErrNotEnoughDataColumnSidecars + } + + // Check if the sidecars are sorted by index and do not contain duplicates. + previousColumnIndex := verifiedDataColumnSidecars[0].Index + for _, dataColumnSidecar := range verifiedDataColumnSidecars[1:] { + columnIndex := dataColumnSidecar.Index + if columnIndex <= previousColumnIndex { + return nil, ErrDataColumnSidecarsNotSortedByIndex + } + + previousColumnIndex = columnIndex + } + + // Check if we have enough columns. + cellsPerBlob := fieldparams.CellsPerBlob + if len(verifiedDataColumnSidecars) < cellsPerBlob { + return nil, ErrNotEnoughDataColumnSidecars + } + + // If all column sidecars corresponding to (non-extended) blobs are present, no need to reconstruct. + if verifiedDataColumnSidecars[cellsPerBlob-1].Index == uint64(cellsPerBlob-1) { + return verifiedDataColumnSidecars, nil + } + + // We need to reconstruct the data column sidecars. + return ReconstructDataColumnSidecars(verifiedDataColumnSidecars) +} + +// ReconstructBlobSidecars constructs verified read only blobs sidecars from verified read only blob sidecars. // The following constraints must be satisfied: // - All `dataColumnSidecars` has to be committed to the same block, and // - `dataColumnSidecars` must be sorted by index and should not contain duplicates. // - `dataColumnSidecars` must contain either all sidecars corresponding to (non-extended) blobs, -// or either enough sidecars to reconstruct the blobs. -func ReconstructBlobs(block blocks.ROBlock, verifiedDataColumnSidecars []blocks.VerifiedRODataColumn, indices []int) ([]*blocks.VerifiedROBlob, error) { +// - either enough sidecars to reconstruct the blobs. +func ReconstructBlobSidecars(block blocks.ROBlock, verifiedDataColumnSidecars []blocks.VerifiedRODataColumn, indices []int) ([]*blocks.VerifiedROBlob, error) { // Return early if no blobs are requested. if len(indices) == 0 { return nil, nil } + // Validate and prepare data columns (reconstruct if necessary). + // This also checks if input is empty. + preparedDataColumnSidecars, err := reconstructIfNeeded(verifiedDataColumnSidecars) + if err != nil { + return nil, err + } + + // Check if the blob index is too high. + commitments, err := block.Block().Body().BlobKzgCommitments() + if err != nil { + return nil, errors.Wrap(err, "blob KZG commitments") + } + + for _, blobIndex := range indices { + if blobIndex >= len(commitments) { + return nil, ErrBlobIndexTooHigh + } + } + + // Check if the data column sidecars are aligned with the block. + dataColumnSidecars := make([]blocks.RODataColumn, 0, len(preparedDataColumnSidecars)) + for _, verifiedDataColumnSidecar := range preparedDataColumnSidecars { + dataColumnSidecar := verifiedDataColumnSidecar.RODataColumn + dataColumnSidecars = append(dataColumnSidecars, dataColumnSidecar) + } + + if err := DataColumnsAlignWithBlock(block, dataColumnSidecars); err != nil { + return nil, errors.Wrap(err, "data columns align with block") + } + + // Convert verified data column sidecars to verified blob sidecars. + blobSidecars, err := blobSidecarsFromDataColumnSidecars(block, preparedDataColumnSidecars, indices) + if err != nil { + return nil, errors.Wrap(err, "blob sidecars from data column sidecars") + } + + return blobSidecars, nil +} + +// ComputeCellsAndProofsFromFlat computes the cells and proofs from blobs and cell flat proofs. +func ComputeCellsAndProofsFromFlat(blobs [][]byte, cellProofs [][]byte) ([][]kzg.Cell, [][]kzg.Proof, error) { + numberOfColumns := params.BeaconConfig().NumberOfColumns + blobCount := uint64(len(blobs)) + cellProofsCount := uint64(len(cellProofs)) + + cellsCount := blobCount * numberOfColumns + if cellsCount != cellProofsCount { + return nil, nil, ErrBlobsCellsProofsMismatch + } + + cellsPerBlob := make([][]kzg.Cell, 0, blobCount) + proofsPerBlob := make([][]kzg.Proof, 0, blobCount) + for i, blob := range blobs { + var kzgBlob kzg.Blob + if copy(kzgBlob[:], blob) != len(kzgBlob) { + return nil, nil, errors.New("wrong blob size - should never happen") + } + + // Compute the extended cells from the (non-extended) blob. + cells, err := kzg.ComputeCells(&kzgBlob) + if err != nil { + return nil, nil, errors.Wrap(err, "compute cells") + } + + var proofs []kzg.Proof + for idx := uint64(i) * numberOfColumns; idx < (uint64(i)+1)*numberOfColumns; idx++ { + var kzgProof kzg.Proof + if copy(kzgProof[:], cellProofs[idx]) != len(kzgProof) { + return nil, nil, errors.New("wrong KZG proof size - should never happen") + } + + proofs = append(proofs, kzgProof) + } + + cellsPerBlob = append(cellsPerBlob, cells) + proofsPerBlob = append(proofsPerBlob, proofs) + } + + return cellsPerBlob, proofsPerBlob, nil +} + +// ComputeCellsAndProofsFromStructured computes the cells and proofs from blobs and cell proofs. +func ComputeCellsAndProofsFromStructured(blobsAndProofs []*pb.BlobAndProofV2) ([][]kzg.Cell, [][]kzg.Proof, error) { + numberOfColumns := params.BeaconConfig().NumberOfColumns + + cellsPerBlob := make([][]kzg.Cell, 0, len(blobsAndProofs)) + proofsPerBlob := make([][]kzg.Proof, 0, len(blobsAndProofs)) + for _, blobAndProof := range blobsAndProofs { + if blobAndProof == nil { + return nil, nil, ErrNilBlobAndProof + } + + var kzgBlob kzg.Blob + if copy(kzgBlob[:], blobAndProof.Blob) != len(kzgBlob) { + return nil, nil, errors.New("wrong blob size - should never happen") + } + + // Compute the extended cells from the (non-extended) blob. + cells, err := kzg.ComputeCells(&kzgBlob) + if err != nil { + return nil, nil, errors.Wrap(err, "compute cells") + } + + kzgProofs := make([]kzg.Proof, 0, numberOfColumns) + for _, kzgProofBytes := range blobAndProof.KzgProofs { + if len(kzgProofBytes) != kzg.BytesPerProof { + return nil, nil, errors.New("wrong KZG proof size - should never happen") + } + + var kzgProof kzg.Proof + if copy(kzgProof[:], kzgProofBytes) != len(kzgProof) { + return nil, nil, errors.New("wrong copied KZG proof size - should never happen") + } + + kzgProofs = append(kzgProofs, kzgProof) + } + + cellsPerBlob = append(cellsPerBlob, cells) + proofsPerBlob = append(proofsPerBlob, kzgProofs) + } + + return cellsPerBlob, proofsPerBlob, nil +} + +// ReconstructBlobs reconstructs blobs from data column sidecars without computing KZG proofs or creating sidecars. +// This is an optimized version for when only the blob data is needed (e.g., for the GetBlobs endpoint). +// The following constraints must be satisfied: +// - All `dataColumnSidecars` must be committed to the same block, and +// - `dataColumnSidecars` must be sorted by index and should not contain duplicates. +// - `dataColumnSidecars` must contain either all sidecars corresponding to (non-extended) blobs, +// - or enough sidecars to reconstruct the blobs. +func ReconstructBlobs(verifiedDataColumnSidecars []blocks.VerifiedRODataColumn, indices []int, blobCount int) ([][]byte, error) { + // If no specific indices are requested, populate with all blob indices. + if len(indices) == 0 { + indices = make([]int, blobCount) + for i := range indices { + indices[i] = i + } + } + if len(verifiedDataColumnSidecars) == 0 { return nil, ErrNotEnoughDataColumnSidecars } @@ -146,136 +373,70 @@ func ReconstructBlobs(block blocks.ROBlock, verifiedDataColumnSidecars []blocks. return nil, ErrNotEnoughDataColumnSidecars } - // Check if the blob index is too high. - commitments, err := block.Block().Body().BlobKzgCommitments() - if err != nil { - return nil, errors.Wrap(err, "blob KZG commitments") + // Verify that the actual blob count from the first sidecar matches the expected count + referenceSidecar := verifiedDataColumnSidecars[0] + actualBlobCount := len(referenceSidecar.Column) + if actualBlobCount != blobCount { + return nil, errors.Errorf("blob count mismatch: expected %d, got %d", blobCount, actualBlobCount) } + // Check if the blob index is too high. for _, blobIndex := range indices { - if blobIndex >= len(commitments) { + if blobIndex >= blobCount { return nil, ErrBlobIndexTooHigh } } - // Check if the data column sidecars are aligned with the block. - dataColumnSidecars := make([]blocks.RODataColumn, 0, len(verifiedDataColumnSidecars)) - for _, verifiedDataColumnSidecar := range verifiedDataColumnSidecars { - dataColumnSidecar := verifiedDataColumnSidecar.RODataColumn - dataColumnSidecars = append(dataColumnSidecars, dataColumnSidecar) + // Check if all columns have the same length and are committed to the same block. + blockRoot := referenceSidecar.BlockRoot() + for _, sidecar := range verifiedDataColumnSidecars[1:] { + if len(sidecar.Column) != blobCount { + return nil, ErrColumnLengthsDiffer + } + + if sidecar.BlockRoot() != blockRoot { + return nil, ErrBlockRootMismatch + } } - if err := DataColumnsAlignWithBlock(block, dataColumnSidecars); err != nil { - return nil, errors.Wrap(err, "data columns align with block") - } + // Check if we have all non-extended columns (0..63) - if so, no reconstruction needed. + hasAllNonExtendedColumns := verifiedDataColumnSidecars[cellsPerBlob-1].Index == uint64(cellsPerBlob-1) - // If all column sidecars corresponding to (non-extended) blobs are present, no need to reconstruct. - if verifiedDataColumnSidecars[cellsPerBlob-1].Index == uint64(cellsPerBlob-1) { - // Convert verified data column sidecars to verified blob sidecars. - blobSidecars, err := blobSidecarsFromDataColumnSidecars(block, verifiedDataColumnSidecars, indices) + var reconstructedCells map[int][]kzg.Cell + if !hasAllNonExtendedColumns { + // Need to reconstruct cells (but NOT proofs) for the requested blobs only. + var err error + reconstructedCells, err = recoverCellsForBlobs(verifiedDataColumnSidecars, indices) if err != nil { - return nil, errors.Wrap(err, "blob sidecars from data column sidecars") + return nil, errors.Wrap(err, "recover cells") } - - return blobSidecars, nil } - // We need to reconstruct the data column sidecars. - reconstructedDataColumnSidecars, err := ReconstructDataColumnSidecars(verifiedDataColumnSidecars) - if err != nil { - return nil, errors.Wrap(err, "reconstruct data column sidecars") - } + // Extract blob data without computing proofs. + blobs := make([][]byte, 0, len(indices)) + for _, blobIndex := range indices { + var blob kzg.Blob - // Convert verified data column sidecars to verified blob sidecars. - blobSidecars, err := blobSidecarsFromDataColumnSidecars(block, reconstructedDataColumnSidecars, indices) - if err != nil { - return nil, errors.Wrap(err, "blob sidecars from data column sidecars") - } - - return blobSidecars, nil -} - -// ComputeCellsAndProofsFromFlat computes the cells and proofs from blobs and cell flat proofs. -func ComputeCellsAndProofsFromFlat(blobs [][]byte, cellProofs [][]byte) ([]kzg.CellsAndProofs, error) { - numberOfColumns := params.BeaconConfig().NumberOfColumns - blobCount := uint64(len(blobs)) - cellProofsCount := uint64(len(cellProofs)) - - cellsCount := blobCount * numberOfColumns - if cellsCount != cellProofsCount { - return nil, ErrBlobsCellsProofsMismatch - } - - cellsAndProofs := make([]kzg.CellsAndProofs, 0, blobCount) - for i, blob := range blobs { - var kzgBlob kzg.Blob - if copy(kzgBlob[:], blob) != len(kzgBlob) { - return nil, errors.New("wrong blob size - should never happen") - } - - // Compute the extended cells from the (non-extended) blob. - cells, err := kzg.ComputeCells(&kzgBlob) - if err != nil { - return nil, errors.Wrap(err, "compute cells") - } - - var proofs []kzg.Proof - for idx := uint64(i) * numberOfColumns; idx < (uint64(i)+1)*numberOfColumns; idx++ { - var kzgProof kzg.Proof - if copy(kzgProof[:], cellProofs[idx]) != len(kzgProof) { - return nil, errors.New("wrong KZG proof size - should never happen") + // Compute the content of the blob. + for columnIndex := range cellsPerBlob { + var cell []byte + if hasAllNonExtendedColumns { + // Use existing cells from sidecars + cell = verifiedDataColumnSidecars[columnIndex].Column[blobIndex] + } else { + // Use reconstructed cells + cell = reconstructedCells[blobIndex][columnIndex][:] } - proofs = append(proofs, kzgProof) + if copy(blob[kzg.BytesPerCell*columnIndex:], cell) != kzg.BytesPerCell { + return nil, errors.New("wrong cell size - should never happen") + } } - cellsProofs := kzg.CellsAndProofs{Cells: cells, Proofs: proofs} - cellsAndProofs = append(cellsAndProofs, cellsProofs) + blobs = append(blobs, blob[:]) } - return cellsAndProofs, nil -} - -// ComputeCellsAndProofs computes the cells and proofs from blobs and cell proofs. -func ComputeCellsAndProofsFromStructured(blobsAndProofs []*pb.BlobAndProofV2) ([]kzg.CellsAndProofs, error) { - numberOfColumns := params.BeaconConfig().NumberOfColumns - - cellsAndProofs := make([]kzg.CellsAndProofs, 0, len(blobsAndProofs)) - for _, blobAndProof := range blobsAndProofs { - if blobAndProof == nil { - return nil, ErrNilBlobAndProof - } - - var kzgBlob kzg.Blob - if copy(kzgBlob[:], blobAndProof.Blob) != len(kzgBlob) { - return nil, errors.New("wrong blob size - should never happen") - } - - // Compute the extended cells from the (non-extended) blob. - cells, err := kzg.ComputeCells(&kzgBlob) - if err != nil { - return nil, errors.Wrap(err, "compute cells") - } - - kzgProofs := make([]kzg.Proof, 0, numberOfColumns) - for _, kzgProofBytes := range blobAndProof.KzgProofs { - if len(kzgProofBytes) != kzg.BytesPerProof { - return nil, errors.New("wrong KZG proof size - should never happen") - } - - var kzgProof kzg.Proof - if copy(kzgProof[:], kzgProofBytes) != len(kzgProof) { - return nil, errors.New("wrong copied KZG proof size - should never happen") - } - - kzgProofs = append(kzgProofs, kzgProof) - } - - cellsProofs := kzg.CellsAndProofs{Cells: cells, Proofs: kzgProofs} - cellsAndProofs = append(cellsAndProofs, cellsProofs) - } - - return cellsAndProofs, nil + return blobs, nil } // blobSidecarsFromDataColumnSidecars converts verified data column sidecars to verified blob sidecars. diff --git a/beacon-chain/core/peerdas/reconstruction_helpers_test.go b/beacon-chain/core/peerdas/reconstruction_helpers_test.go new file mode 100644 index 0000000000..1a3bc93f69 --- /dev/null +++ b/beacon-chain/core/peerdas/reconstruction_helpers_test.go @@ -0,0 +1,79 @@ +package peerdas_test + +// Test helpers for reconstruction tests + +import ( + "testing" + + "github.com/OffchainLabs/prysm/v7/beacon-chain/blockchain/kzg" + "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/testing/util" +) + +// testBlobSetup holds common test data for blob reconstruction tests. +type testBlobSetup struct { + blobCount int + blobs []kzg.Blob + roBlock blocks.ROBlock + roDataColumnSidecars []blocks.RODataColumn + verifiedRoDataColumnSidecars []blocks.VerifiedRODataColumn +} + +// setupTestBlobs creates a complete test setup with blobs, cells, proofs, and data column sidecars. +func setupTestBlobs(t *testing.T, blobCount int) *testBlobSetup { + _, roBlobSidecars := util.GenerateTestElectraBlockWithSidecar(t, [32]byte{}, 42, blobCount) + + blobs := make([]kzg.Blob, blobCount) + for i := range blobCount { + copy(blobs[i][:], roBlobSidecars[i].Blob) + } + + cellsPerBlob, proofsPerBlob := util.GenerateCellsAndProofs(t, blobs) + + fs := util.SlotAtEpoch(t, params.BeaconConfig().FuluForkEpoch) + roBlock, _, _ := util.GenerateTestFuluBlockWithSidecars(t, blobCount, util.WithSlot(fs)) + + roDataColumnSidecars, err := peerdas.DataColumnSidecars(cellsPerBlob, proofsPerBlob, peerdas.PopulateFromBlock(roBlock)) + require.NoError(t, err) + + verifiedRoSidecars := toVerifiedSidecars(roDataColumnSidecars) + + return &testBlobSetup{ + blobCount: blobCount, + blobs: blobs, + roBlock: roBlock, + roDataColumnSidecars: roDataColumnSidecars, + verifiedRoDataColumnSidecars: verifiedRoSidecars, + } +} + +// toVerifiedSidecars converts a slice of RODataColumn to VerifiedRODataColumn. +func toVerifiedSidecars(roDataColumnSidecars []blocks.RODataColumn) []blocks.VerifiedRODataColumn { + verifiedRoSidecars := make([]blocks.VerifiedRODataColumn, 0, len(roDataColumnSidecars)) + for _, roDataColumnSidecar := range roDataColumnSidecars { + verifiedRoSidecar := blocks.NewVerifiedRODataColumn(roDataColumnSidecar) + verifiedRoSidecars = append(verifiedRoSidecars, verifiedRoSidecar) + } + return verifiedRoSidecars +} + +// filterEvenIndexedSidecars returns only the even-indexed sidecars (0, 2, 4, ...). +// This is useful for forcing reconstruction in tests. +func filterEvenIndexedSidecars(sidecars []blocks.VerifiedRODataColumn) []blocks.VerifiedRODataColumn { + filtered := make([]blocks.VerifiedRODataColumn, 0, len(sidecars)/2) + for i := 0; i < len(sidecars); i += 2 { + filtered = append(filtered, sidecars[i]) + } + return filtered +} + +// setupFuluForkEpoch sets up the test configuration with Fulu fork after Electra. +func setupFuluForkEpoch(t *testing.T) primitives.Slot { + params.SetupTestConfigCleanup(t) + params.BeaconConfig().FuluForkEpoch = params.BeaconConfig().ElectraForkEpoch + 4096*2 + return util.SlotAtEpoch(t, params.BeaconConfig().FuluForkEpoch) +} diff --git a/beacon-chain/core/peerdas/reconstruction_test.go b/beacon-chain/core/peerdas/reconstruction_test.go index b1d7525125..474e82aa04 100644 --- a/beacon-chain/core/peerdas/reconstruction_test.go +++ b/beacon-chain/core/peerdas/reconstruction_test.go @@ -124,7 +124,7 @@ func TestReconstructDataColumnSidecars(t *testing.T) { }) } -func TestReconstructBlobs(t *testing.T) { +func TestReconstructBlobSidecars(t *testing.T) { params.SetupTestConfigCleanup(t) params.BeaconConfig().FuluForkEpoch = params.BeaconConfig().ElectraForkEpoch + 4096*2 @@ -133,13 +133,13 @@ func TestReconstructBlobs(t *testing.T) { fs := util.SlotAtEpoch(t, params.BeaconConfig().FuluForkEpoch) t.Run("no index", func(t *testing.T) { - actual, err := peerdas.ReconstructBlobs(emptyBlock, nil, nil) + actual, err := peerdas.ReconstructBlobSidecars(emptyBlock, nil, nil) require.NoError(t, err) require.IsNil(t, actual) }) t.Run("empty input", func(t *testing.T) { - _, err := peerdas.ReconstructBlobs(emptyBlock, nil, []int{0}) + _, err := peerdas.ReconstructBlobSidecars(emptyBlock, nil, []int{0}) require.ErrorIs(t, err, peerdas.ErrNotEnoughDataColumnSidecars) }) @@ -149,7 +149,7 @@ func TestReconstructBlobs(t *testing.T) { // Arbitrarily change the order of the sidecars. verifiedRoSidecars[3], verifiedRoSidecars[2] = verifiedRoSidecars[2], verifiedRoSidecars[3] - _, err := peerdas.ReconstructBlobs(emptyBlock, verifiedRoSidecars, []int{0}) + _, err := peerdas.ReconstructBlobSidecars(emptyBlock, verifiedRoSidecars, []int{0}) require.ErrorIs(t, err, peerdas.ErrDataColumnSidecarsNotSortedByIndex) }) @@ -159,7 +159,7 @@ func TestReconstructBlobs(t *testing.T) { // [0, 1, 1, 3, 4, ...] verifiedRoSidecars[2] = verifiedRoSidecars[1] - _, err := peerdas.ReconstructBlobs(emptyBlock, verifiedRoSidecars, []int{0}) + _, err := peerdas.ReconstructBlobSidecars(emptyBlock, verifiedRoSidecars, []int{0}) require.ErrorIs(t, err, peerdas.ErrDataColumnSidecarsNotSortedByIndex) }) @@ -169,7 +169,7 @@ func TestReconstructBlobs(t *testing.T) { // [0, 1, 2, 1, 4, ...] verifiedRoSidecars[3] = verifiedRoSidecars[1] - _, err := peerdas.ReconstructBlobs(emptyBlock, verifiedRoSidecars, []int{0}) + _, err := peerdas.ReconstructBlobSidecars(emptyBlock, verifiedRoSidecars, []int{0}) require.ErrorIs(t, err, peerdas.ErrDataColumnSidecarsNotSortedByIndex) }) @@ -177,7 +177,7 @@ func TestReconstructBlobs(t *testing.T) { _, _, verifiedRoSidecars := util.GenerateTestFuluBlockWithSidecars(t, 3) inputSidecars := verifiedRoSidecars[:fieldparams.CellsPerBlob-1] - _, err := peerdas.ReconstructBlobs(emptyBlock, inputSidecars, []int{0}) + _, err := peerdas.ReconstructBlobSidecars(emptyBlock, inputSidecars, []int{0}) require.ErrorIs(t, err, peerdas.ErrNotEnoughDataColumnSidecars) }) @@ -186,7 +186,7 @@ func TestReconstructBlobs(t *testing.T) { roBlock, _, verifiedRoSidecars := util.GenerateTestFuluBlockWithSidecars(t, blobCount) - _, err := peerdas.ReconstructBlobs(roBlock, verifiedRoSidecars, []int{1, blobCount}) + _, err := peerdas.ReconstructBlobSidecars(roBlock, verifiedRoSidecars, []int{1, blobCount}) require.ErrorIs(t, err, peerdas.ErrBlobIndexTooHigh) }) @@ -194,7 +194,7 @@ func TestReconstructBlobs(t *testing.T) { _, _, verifiedRoSidecars := util.GenerateTestFuluBlockWithSidecars(t, 3, util.WithParentRoot([fieldparams.RootLength]byte{1}), util.WithSlot(fs)) roBlock, _, _ := util.GenerateTestFuluBlockWithSidecars(t, 3, util.WithParentRoot([fieldparams.RootLength]byte{2}), util.WithSlot(fs)) - _, err := peerdas.ReconstructBlobs(roBlock, verifiedRoSidecars, []int{0}) + _, err := peerdas.ReconstructBlobSidecars(roBlock, verifiedRoSidecars, []int{0}) require.ErrorContains(t, peerdas.ErrRootMismatch.Error(), err) }) @@ -207,7 +207,8 @@ func TestReconstructBlobs(t *testing.T) { // Compute cells and proofs from blob sidecars. var wg errgroup.Group blobs := make([][]byte, blobCount) - inputCellsAndProofs := make([]kzg.CellsAndProofs, blobCount) + inputCellsPerBlob := make([][]kzg.Cell, blobCount) + inputProofsPerBlob := make([][]kzg.Proof, blobCount) for i := range blobCount { blob := roBlobSidecars[i].Blob blobs[i] = blob @@ -217,14 +218,15 @@ func TestReconstructBlobs(t *testing.T) { count := copy(kzgBlob[:], blob) require.Equal(t, len(kzgBlob), count) - cp, err := kzg.ComputeCellsAndKZGProofs(&kzgBlob) + cells, proofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlob) if err != nil { return errors.Wrapf(err, "compute cells and kzg proofs for blob %d", i) } // It is safe for multiple goroutines to concurrently write to the same slice, // as long as they are writing to different indices, which is the case here. - inputCellsAndProofs[i] = cp + inputCellsPerBlob[i] = cells + inputProofsPerBlob[i] = proofs return nil }) @@ -235,18 +237,18 @@ func TestReconstructBlobs(t *testing.T) { // Flatten proofs. cellProofs := make([][]byte, 0, blobCount*numberOfColumns) - for _, cp := range inputCellsAndProofs { - for _, proof := range cp.Proofs { + for _, proofs := range inputProofsPerBlob { + for _, proof := range proofs { cellProofs = append(cellProofs, proof[:]) } } // Compute celles and proofs from the blobs and cell proofs. - cellsAndProofs, err := peerdas.ComputeCellsAndProofsFromFlat(blobs, cellProofs) + cellsPerBlob, proofsPerBlob, err := peerdas.ComputeCellsAndProofsFromFlat(blobs, cellProofs) require.NoError(t, err) // Construct data column sidears from the signed block and cells and proofs. - roDataColumnSidecars, err := peerdas.DataColumnSidecars(cellsAndProofs, peerdas.PopulateFromBlock(roBlock)) + roDataColumnSidecars, err := peerdas.DataColumnSidecars(cellsPerBlob, proofsPerBlob, peerdas.PopulateFromBlock(roBlock)) require.NoError(t, err) // Convert to verified data column sidecars. @@ -260,7 +262,7 @@ func TestReconstructBlobs(t *testing.T) { t.Run("no reconstruction needed", func(t *testing.T) { // Reconstruct blobs. - reconstructedVerifiedRoBlobSidecars, err := peerdas.ReconstructBlobs(roBlock, verifiedRoSidecars, indices) + reconstructedVerifiedRoBlobSidecars, err := peerdas.ReconstructBlobSidecars(roBlock, verifiedRoSidecars, indices) require.NoError(t, err) // Compare blobs. @@ -280,7 +282,7 @@ func TestReconstructBlobs(t *testing.T) { } // Reconstruct blobs. - reconstructedVerifiedRoBlobSidecars, err := peerdas.ReconstructBlobs(roBlock, filteredSidecars, indices) + reconstructedVerifiedRoBlobSidecars, err := peerdas.ReconstructBlobSidecars(roBlock, filteredSidecars, indices) require.NoError(t, err) // Compare blobs. @@ -296,6 +298,135 @@ func TestReconstructBlobs(t *testing.T) { } +func TestReconstructBlobs(t *testing.T) { + setupFuluForkEpoch(t) + require.NoError(t, kzg.Start()) + + t.Run("empty indices with blobCount > 0", func(t *testing.T) { + setup := setupTestBlobs(t, 3) + + // Call with empty indices - should return all blobs + reconstructedBlobs, err := peerdas.ReconstructBlobs(setup.verifiedRoDataColumnSidecars, []int{}, setup.blobCount) + require.NoError(t, err) + require.Equal(t, setup.blobCount, len(reconstructedBlobs)) + + // Verify each blob matches + for i := 0; i < setup.blobCount; i++ { + require.DeepEqual(t, setup.blobs[i][:], reconstructedBlobs[i]) + } + }) + + t.Run("specific indices", func(t *testing.T) { + setup := setupTestBlobs(t, 3) + + // Request only blobs at indices 0 and 2 + indices := []int{0, 2} + reconstructedBlobs, err := peerdas.ReconstructBlobs(setup.verifiedRoDataColumnSidecars, indices, setup.blobCount) + require.NoError(t, err) + require.Equal(t, len(indices), len(reconstructedBlobs)) + + // Verify requested blobs match + for i, blobIndex := range indices { + require.DeepEqual(t, setup.blobs[blobIndex][:], reconstructedBlobs[i]) + } + }) + + t.Run("blob count mismatch", func(t *testing.T) { + setup := setupTestBlobs(t, 3) + + // Pass wrong blob count + wrongBlobCount := 5 + _, err := peerdas.ReconstructBlobs(setup.verifiedRoDataColumnSidecars, []int{0}, wrongBlobCount) + require.ErrorContains(t, "blob count mismatch", err) + }) + + t.Run("empty data columns", func(t *testing.T) { + _, err := peerdas.ReconstructBlobs([]blocks.VerifiedRODataColumn{}, []int{0}, 1) + require.ErrorIs(t, err, peerdas.ErrNotEnoughDataColumnSidecars) + }) + + t.Run("index too high", func(t *testing.T) { + setup := setupTestBlobs(t, 3) + + // Request blob index that's too high + _, err := peerdas.ReconstructBlobs(setup.verifiedRoDataColumnSidecars, []int{setup.blobCount}, setup.blobCount) + require.ErrorIs(t, err, peerdas.ErrBlobIndexTooHigh) + }) + + t.Run("not enough columns", func(t *testing.T) { + setup := setupTestBlobs(t, 3) + + // Only provide 63 columns (need at least 64) + inputSidecars := setup.verifiedRoDataColumnSidecars[:fieldparams.CellsPerBlob-1] + _, err := peerdas.ReconstructBlobs(inputSidecars, []int{0}, setup.blobCount) + require.ErrorIs(t, err, peerdas.ErrNotEnoughDataColumnSidecars) + }) + + t.Run("not sorted", func(t *testing.T) { + setup := setupTestBlobs(t, 3) + + // Swap two sidecars to make them unsorted + setup.verifiedRoDataColumnSidecars[3], setup.verifiedRoDataColumnSidecars[2] = setup.verifiedRoDataColumnSidecars[2], setup.verifiedRoDataColumnSidecars[3] + + _, err := peerdas.ReconstructBlobs(setup.verifiedRoDataColumnSidecars, []int{0}, setup.blobCount) + require.ErrorIs(t, err, peerdas.ErrDataColumnSidecarsNotSortedByIndex) + }) + + t.Run("with reconstruction needed", func(t *testing.T) { + setup := setupTestBlobs(t, 3) + + // Keep only even-indexed columns (will need reconstruction) + filteredSidecars := filterEvenIndexedSidecars(setup.verifiedRoDataColumnSidecars) + + // Reconstruct all blobs + reconstructedBlobs, err := peerdas.ReconstructBlobs(filteredSidecars, []int{}, setup.blobCount) + require.NoError(t, err) + require.Equal(t, setup.blobCount, len(reconstructedBlobs)) + + // Verify all blobs match + for i := range setup.blobCount { + require.DeepEqual(t, setup.blobs[i][:], reconstructedBlobs[i]) + } + }) + + t.Run("no reconstruction needed - all non-extended columns present", func(t *testing.T) { + setup := setupTestBlobs(t, 3) + + // Use all columns (no reconstruction needed since we have all non-extended columns 0-63) + reconstructedBlobs, err := peerdas.ReconstructBlobs(setup.verifiedRoDataColumnSidecars, []int{1}, setup.blobCount) + require.NoError(t, err) + require.Equal(t, 1, len(reconstructedBlobs)) + + // Verify blob matches + require.DeepEqual(t, setup.blobs[1][:], reconstructedBlobs[0]) + }) + + t.Run("reconstruct only requested blob indices", func(t *testing.T) { + // This test verifies the optimization: when reconstruction is needed and specific + // blob indices are requested, we only reconstruct those blobs, not all of them. + setup := setupTestBlobs(t, 6) + + // Keep only even-indexed columns (will need reconstruction) + // This ensures we don't have all non-extended columns (0-63) + filteredSidecars := filterEvenIndexedSidecars(setup.verifiedRoDataColumnSidecars) + + // Request only specific blob indices (not all of them) + requestedIndices := []int{1, 3, 5} + reconstructedBlobs, err := peerdas.ReconstructBlobs(filteredSidecars, requestedIndices, setup.blobCount) + require.NoError(t, err) + + // Should only get the requested blobs back (not all 6) + require.Equal(t, len(requestedIndices), len(reconstructedBlobs), + "should only reconstruct requested blobs, not all blobs") + + // Verify each requested blob matches the original + for i, blobIndex := range requestedIndices { + require.DeepEqual(t, setup.blobs[blobIndex][:], reconstructedBlobs[i], + "blob at index %d should match", blobIndex) + } + }) +} + func TestComputeCellsAndProofsFromFlat(t *testing.T) { // Start the trusted setup. err := kzg.Start() @@ -310,7 +441,7 @@ func TestComputeCellsAndProofsFromFlat(t *testing.T) { // Create proofs for 2 blobs worth of columns cellProofs := make([][]byte, 2*numberOfColumns) - _, err := peerdas.ComputeCellsAndProofsFromFlat(blobs, cellProofs) + _, _, err := peerdas.ComputeCellsAndProofsFromFlat(blobs, cellProofs) require.ErrorIs(t, err, peerdas.ErrBlobsCellsProofsMismatch) }) @@ -323,7 +454,8 @@ func TestComputeCellsAndProofsFromFlat(t *testing.T) { // Extract blobs and compute expected cells and proofs blobs := make([][]byte, blobCount) - expectedCellsAndProofs := make([]kzg.CellsAndProofs, blobCount) + expectedCellsPerBlob := make([][]kzg.Cell, blobCount) + expectedProofsPerBlob := make([][]kzg.Proof, blobCount) var wg errgroup.Group for i := range blobCount { @@ -335,12 +467,13 @@ func TestComputeCellsAndProofsFromFlat(t *testing.T) { count := copy(kzgBlob[:], blob) require.Equal(t, len(kzgBlob), count) - cp, err := kzg.ComputeCellsAndKZGProofs(&kzgBlob) + cells, proofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlob) if err != nil { return errors.Wrapf(err, "compute cells and kzg proofs for blob %d", i) } - expectedCellsAndProofs[i] = cp + expectedCellsPerBlob[i] = cells + expectedProofsPerBlob[i] = proofs return nil }) } @@ -350,30 +483,30 @@ func TestComputeCellsAndProofsFromFlat(t *testing.T) { // Flatten proofs cellProofs := make([][]byte, 0, blobCount*numberOfColumns) - for _, cp := range expectedCellsAndProofs { - for _, proof := range cp.Proofs { + for _, proofs := range expectedProofsPerBlob { + for _, proof := range proofs { cellProofs = append(cellProofs, proof[:]) } } // Test ComputeCellsAndProofs - actualCellsAndProofs, err := peerdas.ComputeCellsAndProofsFromFlat(blobs, cellProofs) + actualCellsPerBlob, actualProofsPerBlob, err := peerdas.ComputeCellsAndProofsFromFlat(blobs, cellProofs) require.NoError(t, err) - require.Equal(t, blobCount, len(actualCellsAndProofs)) + require.Equal(t, blobCount, len(actualCellsPerBlob)) // Verify the results match expected for i := range blobCount { - require.Equal(t, len(expectedCellsAndProofs[i].Cells), len(actualCellsAndProofs[i].Cells)) - require.Equal(t, len(expectedCellsAndProofs[i].Proofs), len(actualCellsAndProofs[i].Proofs)) + require.Equal(t, len(expectedCellsPerBlob[i]), len(actualCellsPerBlob[i])) + require.Equal(t, len(expectedProofsPerBlob[i]), len(actualProofsPerBlob[i])) // Compare cells - for j, expectedCell := range expectedCellsAndProofs[i].Cells { - require.Equal(t, expectedCell, actualCellsAndProofs[i].Cells[j]) + for j, expectedCell := range expectedCellsPerBlob[i] { + require.Equal(t, expectedCell, actualCellsPerBlob[i][j]) } // Compare proofs - for j, expectedProof := range expectedCellsAndProofs[i].Proofs { - require.Equal(t, expectedProof, actualCellsAndProofs[i].Proofs[j]) + for j, expectedProof := range expectedProofsPerBlob[i] { + require.Equal(t, expectedProof, actualProofsPerBlob[i][j]) } } }) @@ -381,7 +514,7 @@ func TestComputeCellsAndProofsFromFlat(t *testing.T) { func TestComputeCellsAndProofsFromStructured(t *testing.T) { t.Run("nil blob and proof", func(t *testing.T) { - _, err := peerdas.ComputeCellsAndProofsFromStructured([]*pb.BlobAndProofV2{nil}) + _, _, err := peerdas.ComputeCellsAndProofsFromStructured([]*pb.BlobAndProofV2{nil}) require.ErrorIs(t, err, peerdas.ErrNilBlobAndProof) }) @@ -397,7 +530,8 @@ func TestComputeCellsAndProofsFromStructured(t *testing.T) { // Extract blobs and compute expected cells and proofs blobsAndProofs := make([]*pb.BlobAndProofV2, blobCount) - expectedCellsAndProofs := make([]kzg.CellsAndProofs, blobCount) + expectedCellsPerBlob := make([][]kzg.Cell, blobCount) + expectedProofsPerBlob := make([][]kzg.Proof, blobCount) var wg errgroup.Group for i := range blobCount { @@ -408,14 +542,15 @@ func TestComputeCellsAndProofsFromStructured(t *testing.T) { count := copy(kzgBlob[:], blob) require.Equal(t, len(kzgBlob), count) - cellsAndProofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlob) + cells, proofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlob) if err != nil { return errors.Wrapf(err, "compute cells and kzg proofs for blob %d", i) } - expectedCellsAndProofs[i] = cellsAndProofs + expectedCellsPerBlob[i] = cells + expectedProofsPerBlob[i] = proofs - kzgProofs := make([][]byte, 0, len(cellsAndProofs.Proofs)) - for _, proof := range cellsAndProofs.Proofs { + kzgProofs := make([][]byte, 0, len(proofs)) + for _, proof := range proofs { kzgProofs = append(kzgProofs, proof[:]) } @@ -433,24 +568,24 @@ func TestComputeCellsAndProofsFromStructured(t *testing.T) { require.NoError(t, err) // Test ComputeCellsAndProofs - actualCellsAndProofs, err := peerdas.ComputeCellsAndProofsFromStructured(blobsAndProofs) + actualCellsPerBlob, actualProofsPerBlob, err := peerdas.ComputeCellsAndProofsFromStructured(blobsAndProofs) require.NoError(t, err) - require.Equal(t, blobCount, len(actualCellsAndProofs)) + require.Equal(t, blobCount, len(actualCellsPerBlob)) // Verify the results match expected for i := range blobCount { - require.Equal(t, len(expectedCellsAndProofs[i].Cells), len(actualCellsAndProofs[i].Cells)) - require.Equal(t, len(expectedCellsAndProofs[i].Proofs), len(actualCellsAndProofs[i].Proofs)) - require.Equal(t, len(expectedCellsAndProofs[i].Proofs), cap(actualCellsAndProofs[i].Proofs)) + require.Equal(t, len(expectedCellsPerBlob[i]), len(actualCellsPerBlob[i])) + require.Equal(t, len(expectedProofsPerBlob[i]), len(actualProofsPerBlob[i])) + require.Equal(t, len(expectedProofsPerBlob[i]), cap(actualProofsPerBlob[i])) // Compare cells - for j, expectedCell := range expectedCellsAndProofs[i].Cells { - require.Equal(t, expectedCell, actualCellsAndProofs[i].Cells[j]) + for j, expectedCell := range expectedCellsPerBlob[i] { + require.Equal(t, expectedCell, actualCellsPerBlob[i][j]) } // Compare proofs - for j, expectedProof := range expectedCellsAndProofs[i].Proofs { - require.Equal(t, expectedProof, actualCellsAndProofs[i].Proofs[j]) + for j, expectedProof := range expectedProofsPerBlob[i] { + require.Equal(t, expectedProof, actualProofsPerBlob[i][j]) } } }) diff --git a/beacon-chain/core/peerdas/validator.go b/beacon-chain/core/peerdas/validator.go index a83aa574a6..d5331524d4 100644 --- a/beacon-chain/core/peerdas/validator.go +++ b/beacon-chain/core/peerdas/validator.go @@ -93,19 +93,20 @@ func ValidatorsCustodyRequirement(state beaconState.ReadOnlyBeaconState, validat return min(max(count, validatorCustodyRequirement), numberOfCustodyGroups), nil } -// DataColumnSidecars, given ConstructionPopulator and the cells/proofs associated with each blob in the +// DataColumnSidecars given ConstructionPopulator and the cells/proofs associated with each blob in the // block, assembles sidecars which can be distributed to peers. +// cellsPerBlob and proofsPerBlob are parallel slices where each index represents a blob sidecar. // This is an adapted version of // https://github.com/ethereum/consensus-specs/blob/master/specs/fulu/validator.md#get_data_column_sidecars, // which is designed to be used both when constructing sidecars from a block and from a sidecar, replacing // https://github.com/ethereum/consensus-specs/blob/master/specs/fulu/validator.md#get_data_column_sidecars_from_block and // https://github.com/ethereum/consensus-specs/blob/master/specs/fulu/validator.md#get_data_column_sidecars_from_column_sidecar -func DataColumnSidecars(rows []kzg.CellsAndProofs, src ConstructionPopulator) ([]blocks.RODataColumn, error) { - if len(rows) == 0 { +func DataColumnSidecars(cellsPerBlob [][]kzg.Cell, proofsPerBlob [][]kzg.Proof, src ConstructionPopulator) ([]blocks.RODataColumn, error) { + if len(cellsPerBlob) == 0 { return nil, nil } start := time.Now() - cells, proofs, err := rotateRowsToCols(rows, params.BeaconConfig().NumberOfColumns) + cells, proofs, err := rotateRowsToCols(cellsPerBlob, proofsPerBlob, params.BeaconConfig().NumberOfColumns) if err != nil { return nil, errors.Wrap(err, "rotate cells and proofs") } @@ -197,26 +198,31 @@ func (b *BlockReconstructionSource) extract() (*blockInfo, error) { // rotateRowsToCols takes a 2D slice of cells and proofs, where the x is rows (blobs) and y is columns, // and returns a 2D slice where x is columns and y is rows. -func rotateRowsToCols(rows []kzg.CellsAndProofs, numCols uint64) ([][][]byte, [][][]byte, error) { - if len(rows) == 0 { +func rotateRowsToCols(cellsPerBlob [][]kzg.Cell, proofsPerBlob [][]kzg.Proof, numCols uint64) ([][][]byte, [][][]byte, error) { + if len(cellsPerBlob) == 0 { return nil, nil, nil } + if len(cellsPerBlob) != len(proofsPerBlob) { + return nil, nil, errors.New("cells and proofs length mismatch") + } cellCols := make([][][]byte, numCols) proofCols := make([][][]byte, numCols) - for i, cp := range rows { - if uint64(len(cp.Cells)) != numCols { + for i := range cellsPerBlob { + cells := cellsPerBlob[i] + proofs := proofsPerBlob[i] + if uint64(len(cells)) != numCols { return nil, nil, errors.Wrap(ErrNotEnoughDataColumnSidecars, "not enough cells") } - if len(cp.Cells) != len(cp.Proofs) { + if len(cells) != len(proofs) { return nil, nil, errors.Wrap(ErrNotEnoughDataColumnSidecars, "not enough proofs") } for j := uint64(0); j < numCols; j++ { if i == 0 { - cellCols[j] = make([][]byte, len(rows)) - proofCols[j] = make([][]byte, len(rows)) + cellCols[j] = make([][]byte, len(cellsPerBlob)) + proofCols[j] = make([][]byte, len(cellsPerBlob)) } - cellCols[j][i] = cp.Cells[j][:] - proofCols[j][i] = cp.Proofs[j][:] + cellCols[j][i] = cells[j][:] + proofCols[j][i] = proofs[j][:] } } return cellCols, proofCols, nil diff --git a/beacon-chain/core/peerdas/validator_test.go b/beacon-chain/core/peerdas/validator_test.go index 9c474400a9..99994a7dfb 100644 --- a/beacon-chain/core/peerdas/validator_test.go +++ b/beacon-chain/core/peerdas/validator_test.go @@ -68,16 +68,16 @@ func TestDataColumnSidecars(t *testing.T) { require.NoError(t, err) // Create cells and proofs. - cellsAndProofs := []kzg.CellsAndProofs{ - { - Cells: make([]kzg.Cell, params.BeaconConfig().NumberOfColumns), - Proofs: make([]kzg.Proof, params.BeaconConfig().NumberOfColumns), - }, + cellsPerBlob := [][]kzg.Cell{ + make([]kzg.Cell, params.BeaconConfig().NumberOfColumns), + } + proofsPerBlob := [][]kzg.Proof{ + make([]kzg.Proof, params.BeaconConfig().NumberOfColumns), } rob, err := blocks.NewROBlock(signedBeaconBlock) require.NoError(t, err) - _, err = peerdas.DataColumnSidecars(cellsAndProofs, peerdas.PopulateFromBlock(rob)) + _, err = peerdas.DataColumnSidecars(cellsPerBlob, proofsPerBlob, peerdas.PopulateFromBlock(rob)) require.ErrorIs(t, err, peerdas.ErrSizeMismatch) }) @@ -92,18 +92,18 @@ func TestDataColumnSidecars(t *testing.T) { // Create cells and proofs with insufficient cells for the number of columns. // This simulates a scenario where cellsAndProofs has fewer cells than expected columns. - cellsAndProofs := []kzg.CellsAndProofs{ - { - Cells: make([]kzg.Cell, 10), // Only 10 cells - Proofs: make([]kzg.Proof, 10), // Only 10 proofs - }, + cellsPerBlob := [][]kzg.Cell{ + make([]kzg.Cell, 10), // Only 10 cells + } + proofsPerBlob := [][]kzg.Proof{ + make([]kzg.Proof, 10), // Only 10 proofs } // This should fail because the function will try to access columns up to NumberOfColumns // but we only have 10 cells/proofs. rob, err := blocks.NewROBlock(signedBeaconBlock) require.NoError(t, err) - _, err = peerdas.DataColumnSidecars(cellsAndProofs, peerdas.PopulateFromBlock(rob)) + _, err = peerdas.DataColumnSidecars(cellsPerBlob, proofsPerBlob, peerdas.PopulateFromBlock(rob)) require.ErrorIs(t, err, peerdas.ErrNotEnoughDataColumnSidecars) }) @@ -118,17 +118,17 @@ func TestDataColumnSidecars(t *testing.T) { // Create cells and proofs with sufficient cells but insufficient proofs. numberOfColumns := params.BeaconConfig().NumberOfColumns - cellsAndProofs := []kzg.CellsAndProofs{ - { - Cells: make([]kzg.Cell, numberOfColumns), - Proofs: make([]kzg.Proof, 5), // Only 5 proofs, less than columns - }, + cellsPerBlob := [][]kzg.Cell{ + make([]kzg.Cell, numberOfColumns), + } + proofsPerBlob := [][]kzg.Proof{ + make([]kzg.Proof, 5), // Only 5 proofs, less than columns } // This should fail when trying to access proof beyond index 4. rob, err := blocks.NewROBlock(signedBeaconBlock) require.NoError(t, err) - _, err = peerdas.DataColumnSidecars(cellsAndProofs, peerdas.PopulateFromBlock(rob)) + _, err = peerdas.DataColumnSidecars(cellsPerBlob, proofsPerBlob, peerdas.PopulateFromBlock(rob)) require.ErrorIs(t, err, peerdas.ErrNotEnoughDataColumnSidecars) require.ErrorContains(t, "not enough proofs", err) }) @@ -150,28 +150,26 @@ func TestDataColumnSidecars(t *testing.T) { // Create cells and proofs with correct dimensions. numberOfColumns := params.BeaconConfig().NumberOfColumns - cellsAndProofs := []kzg.CellsAndProofs{ - { - Cells: make([]kzg.Cell, numberOfColumns), - Proofs: make([]kzg.Proof, numberOfColumns), - }, - { - Cells: make([]kzg.Cell, numberOfColumns), - Proofs: make([]kzg.Proof, numberOfColumns), - }, + cellsPerBlob := [][]kzg.Cell{ + make([]kzg.Cell, numberOfColumns), + make([]kzg.Cell, numberOfColumns), + } + proofsPerBlob := [][]kzg.Proof{ + make([]kzg.Proof, numberOfColumns), + make([]kzg.Proof, numberOfColumns), } // Set distinct values in cells and proofs for testing for i := range numberOfColumns { - cellsAndProofs[0].Cells[i][0] = byte(i) - cellsAndProofs[0].Proofs[i][0] = byte(i) - cellsAndProofs[1].Cells[i][0] = byte(i + 128) - cellsAndProofs[1].Proofs[i][0] = byte(i + 128) + cellsPerBlob[0][i][0] = byte(i) + proofsPerBlob[0][i][0] = byte(i) + cellsPerBlob[1][i][0] = byte(i + 128) + proofsPerBlob[1][i][0] = byte(i + 128) } rob, err := blocks.NewROBlock(signedBeaconBlock) require.NoError(t, err) - sidecars, err := peerdas.DataColumnSidecars(cellsAndProofs, peerdas.PopulateFromBlock(rob)) + sidecars, err := peerdas.DataColumnSidecars(cellsPerBlob, proofsPerBlob, peerdas.PopulateFromBlock(rob)) require.NoError(t, err) require.NotNil(t, sidecars) require.Equal(t, int(numberOfColumns), len(sidecars)) @@ -215,28 +213,26 @@ func TestReconstructionSource(t *testing.T) { // Create cells and proofs with correct dimensions. numberOfColumns := params.BeaconConfig().NumberOfColumns - cellsAndProofs := []kzg.CellsAndProofs{ - { - Cells: make([]kzg.Cell, numberOfColumns), - Proofs: make([]kzg.Proof, numberOfColumns), - }, - { - Cells: make([]kzg.Cell, numberOfColumns), - Proofs: make([]kzg.Proof, numberOfColumns), - }, + cellsPerBlob := [][]kzg.Cell{ + make([]kzg.Cell, numberOfColumns), + make([]kzg.Cell, numberOfColumns), + } + proofsPerBlob := [][]kzg.Proof{ + make([]kzg.Proof, numberOfColumns), + make([]kzg.Proof, numberOfColumns), } // Set distinct values in cells and proofs for testing for i := range numberOfColumns { - cellsAndProofs[0].Cells[i][0] = byte(i) - cellsAndProofs[0].Proofs[i][0] = byte(i) - cellsAndProofs[1].Cells[i][0] = byte(i + 128) - cellsAndProofs[1].Proofs[i][0] = byte(i + 128) + cellsPerBlob[0][i][0] = byte(i) + proofsPerBlob[0][i][0] = byte(i) + cellsPerBlob[1][i][0] = byte(i + 128) + proofsPerBlob[1][i][0] = byte(i + 128) } rob, err := blocks.NewROBlock(signedBeaconBlock) require.NoError(t, err) - sidecars, err := peerdas.DataColumnSidecars(cellsAndProofs, peerdas.PopulateFromBlock(rob)) + sidecars, err := peerdas.DataColumnSidecars(cellsPerBlob, proofsPerBlob, peerdas.PopulateFromBlock(rob)) require.NoError(t, err) require.NotNil(t, sidecars) require.Equal(t, int(numberOfColumns), len(sidecars)) diff --git a/beacon-chain/execution/engine_client.go b/beacon-chain/execution/engine_client.go index 73e41e32e0..84bce36665 100644 --- a/beacon-chain/execution/engine_client.go +++ b/beacon-chain/execution/engine_client.go @@ -660,18 +660,18 @@ func (s *Service) ConstructDataColumnSidecars(ctx context.Context, populator pee return nil, wrapWithBlockRoot(err, root, "commitments") } - cellsAndProofs, err := s.fetchCellsAndProofsFromExecution(ctx, commitments) + cellsPerBlob, proofsPerBlob, err := s.fetchCellsAndProofsFromExecution(ctx, commitments) if err != nil { return nil, wrapWithBlockRoot(err, root, "fetch cells and proofs from execution client") } // Return early if nothing is returned from the EL. - if len(cellsAndProofs) == 0 { + if len(cellsPerBlob) == 0 { return nil, nil } // Construct data column sidears from the signed block and cells and proofs. - roSidecars, err := peerdas.DataColumnSidecars(cellsAndProofs, populator) + roSidecars, err := peerdas.DataColumnSidecars(cellsPerBlob, proofsPerBlob, populator) if err != nil { return nil, wrapWithBlockRoot(err, populator.Root(), "data column sidcars from column sidecar") } @@ -684,7 +684,7 @@ func (s *Service) ConstructDataColumnSidecars(ctx context.Context, populator pee } // fetchCellsAndProofsFromExecution fetches cells and proofs from the execution client (using engine_getBlobsV2 execution API method) -func (s *Service) fetchCellsAndProofsFromExecution(ctx context.Context, kzgCommitments [][]byte) ([]kzg.CellsAndProofs, error) { +func (s *Service) fetchCellsAndProofsFromExecution(ctx context.Context, kzgCommitments [][]byte) ([][]kzg.Cell, [][]kzg.Proof, error) { // Collect KZG hashes for all blobs. versionedHashes := make([]common.Hash, 0, len(kzgCommitments)) for _, commitment := range kzgCommitments { @@ -695,21 +695,21 @@ func (s *Service) fetchCellsAndProofsFromExecution(ctx context.Context, kzgCommi // Fetch all blobsAndCellsProofs from the execution client. blobAndProofV2s, err := s.GetBlobsV2(ctx, versionedHashes) if err != nil { - return nil, errors.Wrapf(err, "get blobs V2") + return nil, nil, errors.Wrapf(err, "get blobs V2") } // Return early if nothing is returned from the EL. if len(blobAndProofV2s) == 0 { - return nil, nil + return nil, nil, nil } // Compute cells and proofs from the blobs and cell proofs. - cellsAndProofs, err := peerdas.ComputeCellsAndProofsFromStructured(blobAndProofV2s) + cellsPerBlob, proofsPerBlob, err := peerdas.ComputeCellsAndProofsFromStructured(blobAndProofV2s) if err != nil { - return nil, errors.Wrap(err, "compute cells and proofs") + return nil, nil, errors.Wrap(err, "compute cells and proofs") } - return cellsAndProofs, nil + return cellsPerBlob, proofsPerBlob, nil } // upgradeSidecarsToVerifiedSidecars upgrades a list of data column sidecars into verified data column sidecars. diff --git a/beacon-chain/rpc/eth/beacon/handlers_test.go b/beacon-chain/rpc/eth/beacon/handlers_test.go index 344b535f5a..c11ec05807 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_test.go @@ -3786,12 +3786,12 @@ func Test_validateBlobs(t *testing.T) { numberOfColumns := params.BeaconConfig().NumberOfColumns cellProofs := make([][]byte, uint64(blobCount)*numberOfColumns) for blobIdx := 0; blobIdx < blobCount; blobIdx++ { - cellsAndProofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlobs[blobIdx]) + _, proofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlobs[blobIdx]) require.NoError(t, err) for colIdx := uint64(0); colIdx < numberOfColumns; colIdx++ { cellProofIdx := uint64(blobIdx)*numberOfColumns + colIdx - cellProofs[cellProofIdx] = cellsAndProofs.Proofs[colIdx][:] + cellProofs[cellProofIdx] = proofs[colIdx][:] } } diff --git a/beacon-chain/rpc/eth/blob/handlers.go b/beacon-chain/rpc/eth/blob/handlers.go index afd1570a74..a970d7e61b 100644 --- a/beacon-chain/rpc/eth/blob/handlers.go +++ b/beacon-chain/rpc/eth/blob/handlers.go @@ -38,7 +38,7 @@ func (s *Server) Blobs(w http.ResponseWriter, r *http.Request) { segments := strings.Split(r.URL.Path, "/") blockId := segments[len(segments)-1] - verifiedBlobs, rpcErr := s.Blocker.Blobs(ctx, blockId, options.WithIndices(indices)) + verifiedBlobs, rpcErr := s.Blocker.BlobSidecars(ctx, blockId, options.WithIndices(indices)) if rpcErr != nil { code := core.ErrorReasonToHTTP(rpcErr.Reason) switch code { @@ -134,9 +134,6 @@ func (s *Server) GetBlobs(w http.ResponseWriter, r *http.Request) { segments := strings.Split(r.URL.Path, "/") blockId := segments[len(segments)-1] - var verifiedBlobs []*blocks.VerifiedROBlob - var rpcErr *core.RpcError - // Check if versioned_hashes parameter is provided versionedHashesStr := r.URL.Query()["versioned_hashes"] versionedHashes := make([][]byte, len(versionedHashesStr)) @@ -149,7 +146,7 @@ func (s *Server) GetBlobs(w http.ResponseWriter, r *http.Request) { versionedHashes[i] = hash } } - verifiedBlobs, rpcErr = s.Blocker.Blobs(ctx, blockId, options.WithVersionedHashes(versionedHashes)) + blobsData, rpcErr := s.Blocker.Blobs(ctx, blockId, options.WithVersionedHashes(versionedHashes)) if rpcErr != nil { code := core.ErrorReasonToHTTP(rpcErr.Reason) switch code { @@ -175,9 +172,9 @@ func (s *Server) GetBlobs(w http.ResponseWriter, r *http.Request) { if httputil.RespondWithSsz(r) { sszLen := fieldparams.BlobSize - sszData := make([]byte, len(verifiedBlobs)*sszLen) - for i := range verifiedBlobs { - copy(sszData[i*sszLen:(i+1)*sszLen], verifiedBlobs[i].Blob) + sszData := make([]byte, len(blobsData)*sszLen) + for i := range blobsData { + copy(sszData[i*sszLen:(i+1)*sszLen], blobsData[i]) } w.Header().Set(api.VersionHeader, version.String(blk.Version())) @@ -196,9 +193,9 @@ func (s *Server) GetBlobs(w http.ResponseWriter, r *http.Request) { return } - data := make([]string, len(verifiedBlobs)) - for i, v := range verifiedBlobs { - data[i] = hexutil.Encode(v.Blob) + data := make([]string, len(blobsData)) + for i, blob := range blobsData { + data[i] = hexutil.Encode(blob) } resp := &structs.GetBlobsResponse{ Data: data, diff --git a/beacon-chain/rpc/lookup/blocker.go b/beacon-chain/rpc/lookup/blocker.go index 958cbad67b..16178e9110 100644 --- a/beacon-chain/rpc/lookup/blocker.go +++ b/beacon-chain/rpc/lookup/blocker.go @@ -60,7 +60,8 @@ func (e BlockIdParseError) Error() string { // Blocker is responsible for retrieving blocks. type Blocker interface { Block(ctx context.Context, id []byte) (interfaces.ReadOnlySignedBeaconBlock, error) - Blobs(ctx context.Context, id string, opts ...options.BlobsOption) ([]*blocks.VerifiedROBlob, *core.RpcError) + BlobSidecars(ctx context.Context, id string, opts ...options.BlobsOption) ([]*blocks.VerifiedROBlob, *core.RpcError) + Blobs(ctx context.Context, id string, opts ...options.BlobsOption) ([][]byte, *core.RpcError) DataColumns(ctx context.Context, id string, indices []int) ([]blocks.VerifiedRODataColumn, *core.RpcError) } @@ -224,23 +225,18 @@ func (p *BeaconDbBlocker) Block(ctx context.Context, id []byte) (interfaces.Read return blk, nil } -// Blobs returns the fetched blobs for a given block ID with configurable options. -// Options can specify either blob indices or versioned hashes for retrieval. -// The identifier can be one of: -// - "head" (canonical head in node's view) -// - "genesis" -// - "finalized" -// - "justified" -// - -// - -// - -// -// cases: -// - no block, 404 -// - block exists, has commitments, inside retention period (greater of protocol- or user-specified) serve then w/ 200 unless we hit an error reading them. -// we are technically not supposed to import a block to forkchoice unless we have the blobs, so the nuance here is if we can't find the file and we are inside the protocol-defined retention period, then it's actually a 500. -// - block exists, has commitments, outside retention period (greater of protocol- or user-specified) - ie just like block exists, no commitment -func (p *BeaconDbBlocker) Blobs(ctx context.Context, id string, opts ...options.BlobsOption) ([]*blocks.VerifiedROBlob, *core.RpcError) { +// blobsContext holds common information needed for blob retrieval +type blobsContext struct { + root [fieldparams.RootLength]byte + roBlock blocks.ROBlock + commitments [][]byte + indices []int + postFulu bool +} + +// resolveBlobsContext extracts common blob retrieval logic including block resolution, +// validation, and index conversion from versioned hashes. +func (p *BeaconDbBlocker) resolveBlobsContext(ctx context.Context, id string, opts ...options.BlobsOption) (*blobsContext, *core.RpcError) { // Apply options cfg := &options.BlobsConfig{} for _, opt := range opts { @@ -279,11 +275,6 @@ func (p *BeaconDbBlocker) Blobs(ctx context.Context, id string, opts ...options. return nil, &core.RpcError{Err: errors.Wrapf(err, "failed to retrieve kzg commitments from block %#x", root), Reason: core.Internal} } - // If there are no commitments return 200 w/ empty list - if len(commitments) == 0 { - return make([]*blocks.VerifiedROBlob, 0), nil - } - // Compute the first Fulu slot. fuluForkSlot := primitives.Slot(math.MaxUint64) if fuluForkEpoch := params.BeaconConfig().FuluForkEpoch; fuluForkEpoch != primitives.Epoch(math.MaxUint64) { @@ -333,16 +324,156 @@ func (p *BeaconDbBlocker) Blobs(ctx context.Context, id string, opts ...options. } } + isPostFulu := false + // Create ROBlock with root for post-Fulu blocks + var roBlockWithRoot blocks.ROBlock if roBlock.Slot() >= fuluForkSlot { - roBlock, err := blocks.NewROBlockWithRoot(roSignedBlock, root) + roBlockWithRoot, err = blocks.NewROBlockWithRoot(roSignedBlock, root) if err != nil { return nil, &core.RpcError{Err: errors.Wrapf(err, "failed to create roBlock with root %#x", root), Reason: core.Internal} } - - return p.blobsFromStoredDataColumns(roBlock, indices) + isPostFulu = true } - return p.blobsFromStoredBlobs(commitments, root, indices) + return &blobsContext{ + root: root, + roBlock: roBlockWithRoot, + commitments: commitments, + indices: indices, + postFulu: isPostFulu, + }, nil +} + +// BlobSidecars returns the fetched blob sidecars (with full KZG proofs) for a given block ID. +// Options can specify either blob indices or versioned hashes for retrieval. +// The identifier can be one of: +// - "head" (canonical head in node's view) +// - "genesis" +// - "finalized" +// - "justified" +// - +// - +func (p *BeaconDbBlocker) BlobSidecars(ctx context.Context, id string, opts ...options.BlobsOption) ([]*blocks.VerifiedROBlob, *core.RpcError) { + bctx, rpcErr := p.resolveBlobsContext(ctx, id, opts...) + if rpcErr != nil { + return nil, rpcErr + } + + // If there are no commitments return 200 w/ empty list + if len(bctx.commitments) == 0 { + return make([]*blocks.VerifiedROBlob, 0), nil + } + + // Check if this is a post-Fulu block (uses data columns) + if bctx.postFulu { + return p.blobSidecarsFromStoredDataColumns(bctx.roBlock, bctx.indices) + } + + // Pre-Fulu block (uses blob sidecars) + return p.blobsFromStoredBlobs(bctx.commitments, bctx.root, bctx.indices) +} + +// Blobs returns just the blob data without computing KZG proofs or creating full sidecars. +// This is an optimized endpoint for when only blob data is needed (e.g., GetBlobs endpoint). +// The identifier can be one of: +// - "head" (canonical head in node's view) +// - "genesis" +// - "finalized" +// - "justified" +// - +// - +func (p *BeaconDbBlocker) Blobs(ctx context.Context, id string, opts ...options.BlobsOption) ([][]byte, *core.RpcError) { + bctx, rpcErr := p.resolveBlobsContext(ctx, id, opts...) + if rpcErr != nil { + return nil, rpcErr + } + + // If there are no commitments return 200 w/ empty list + if len(bctx.commitments) == 0 { + return make([][]byte, 0), nil + } + + // Check if this is a post-Fulu block (uses data columns) + if bctx.postFulu { + return p.blobsDataFromStoredDataColumns(bctx.root, bctx.indices, len(bctx.commitments)) + } + + // Pre-Fulu block (uses blob sidecars) + return p.blobsDataFromStoredBlobs(bctx.root, bctx.indices) +} + +// blobsDataFromStoredBlobs retrieves just blob data (without proofs) from stored blob sidecars. +func (p *BeaconDbBlocker) blobsDataFromStoredBlobs(root [fieldparams.RootLength]byte, indices []int) ([][]byte, *core.RpcError) { + summary := p.BlobStorage.Summary(root) + + // If no indices are provided, use all indices that are available in the summary. + if len(indices) == 0 { + maxBlobCount := summary.MaxBlobsForEpoch() + for index := 0; uint64(index) < maxBlobCount; index++ { // needed for safe conversion + if summary.HasIndex(uint64(index)) { + indices = append(indices, index) + } + } + } + + // Retrieve blob sidecars from the store and extract just the blob data. + blobsData := make([][]byte, 0, len(indices)) + for _, index := range indices { + if !summary.HasIndex(uint64(index)) { + return nil, &core.RpcError{ + Err: fmt.Errorf("requested index %d not found", index), + Reason: core.NotFound, + } + } + + blobSidecar, err := p.BlobStorage.Get(root, uint64(index)) + if err != nil { + return nil, &core.RpcError{ + Err: fmt.Errorf("could not retrieve blob for block root %#x at index %d", root, index), + Reason: core.Internal, + } + } + + blobsData = append(blobsData, blobSidecar.Blob) + } + + return blobsData, nil +} + +// blobsDataFromStoredDataColumns retrieves blob data from stored data columns without computing KZG proofs. +func (p *BeaconDbBlocker) blobsDataFromStoredDataColumns(root [fieldparams.RootLength]byte, indices []int, blobCount int) ([][]byte, *core.RpcError) { + // Count how many columns we have in the store. + summary := p.DataColumnStorage.Summary(root) + stored := summary.Stored() + count := uint64(len(stored)) + + if count < peerdas.MinimumColumnCountToReconstruct() { + // There is no way to reconstruct the data columns. + return nil, &core.RpcError{ + Err: errors.Errorf("the node does not custody enough data columns to reconstruct blobs - please start the beacon node with the `--%s` flag to ensure this call to succeed, or retry later if it is already the case", flags.SubscribeAllDataSubnets.Name), + Reason: core.NotFound, + } + } + + // Retrieve from the database needed data columns. + verifiedRoDataColumnSidecars, err := p.neededDataColumnSidecars(root, stored) + if err != nil { + return nil, &core.RpcError{ + Err: errors.Wrap(err, "needed data column sidecars"), + Reason: core.Internal, + } + } + + // Use optimized path to get just blob data without computing proofs. + blobsData, err := peerdas.ReconstructBlobs(verifiedRoDataColumnSidecars, indices, blobCount) + if err != nil { + return nil, &core.RpcError{ + Err: errors.Wrap(err, "reconstruct blobs data"), + Reason: core.Internal, + } + } + + return blobsData, nil } // blobsFromStoredBlobs retrieves blob sidercars corresponding to `indices` and `root` from the store. @@ -393,13 +524,12 @@ func (p *BeaconDbBlocker) blobsFromStoredBlobs(commitments [][]byte, root [field return blobs, nil } -// blobsFromStoredDataColumns retrieves data column sidecars from the store, -// reconstructs the whole matrix if needed, converts the matrix to blobs, -// and then returns converted blobs corresponding to `indices` and `root`. +// blobSidecarsFromStoredDataColumns retrieves data column sidecars from the store, +// reconstructs the whole matrix if needed, converts the matrix to blob sidecars with full KZG proofs. // This function expects data column sidecars to be stored (aka. no blob sidecars). // If not enough data column sidecars are available to convert blobs from them // (either directly or after reconstruction), an error is returned. -func (p *BeaconDbBlocker) blobsFromStoredDataColumns(block blocks.ROBlock, indices []int) ([]*blocks.VerifiedROBlob, *core.RpcError) { +func (p *BeaconDbBlocker) blobSidecarsFromStoredDataColumns(block blocks.ROBlock, indices []int) ([]*blocks.VerifiedROBlob, *core.RpcError) { root := block.Root() // Use all indices if none are provided. @@ -439,8 +569,8 @@ func (p *BeaconDbBlocker) blobsFromStoredDataColumns(block blocks.ROBlock, indic } } - // Reconstruct blob sidecars from data column sidecars. - verifiedRoBlobSidecars, err := peerdas.ReconstructBlobs(block, verifiedRoDataColumnSidecars, indices) + // Reconstruct blob sidecars with full KZG proofs. + verifiedRoBlobSidecars, err := peerdas.ReconstructBlobSidecars(block, verifiedRoDataColumnSidecars, indices) if err != nil { return nil, &core.RpcError{ Err: errors.Wrap(err, "blobs from data columns"), diff --git a/beacon-chain/rpc/lookup/blocker_test.go b/beacon-chain/rpc/lookup/blocker_test.go index 0828e56bdb..2da59cc282 100644 --- a/beacon-chain/rpc/lookup/blocker_test.go +++ b/beacon-chain/rpc/lookup/blocker_test.go @@ -182,7 +182,7 @@ func TestBlobsErrorHandling(t *testing.T) { BeaconDB: db, } - _, rpcErr := blocker.Blobs(ctx, "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef") + _, rpcErr := blocker.BlobSidecars(ctx, "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef") require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.NotFound), rpcErr.Reason) require.StringContains(t, "not found", rpcErr.Err.Error()) @@ -194,7 +194,7 @@ func TestBlobsErrorHandling(t *testing.T) { ChainInfoFetcher: &mockChain.ChainService{}, } - _, rpcErr := blocker.Blobs(ctx, "999999") + _, rpcErr := blocker.BlobSidecars(ctx, "999999") require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.NotFound), rpcErr.Reason) require.StringContains(t, "no blocks found at slot", rpcErr.Err.Error()) @@ -206,7 +206,7 @@ func TestBlobsErrorHandling(t *testing.T) { } // Note: genesis blocks don't support blobs, so this returns BadRequest - _, rpcErr := blocker.Blobs(ctx, "genesis") + _, rpcErr := blocker.BlobSidecars(ctx, "genesis") require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.BadRequest), rpcErr.Reason) require.StringContains(t, "not supported for Phase 0", rpcErr.Err.Error()) @@ -222,7 +222,7 @@ func TestBlobsErrorHandling(t *testing.T) { }, } - _, rpcErr := blocker.Blobs(ctx, "finalized") + _, rpcErr := blocker.BlobSidecars(ctx, "finalized") require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.NotFound), rpcErr.Reason) require.StringContains(t, "finalized block", rpcErr.Err.Error()) @@ -239,7 +239,7 @@ func TestBlobsErrorHandling(t *testing.T) { }, } - _, rpcErr := blocker.Blobs(ctx, "justified") + _, rpcErr := blocker.BlobSidecars(ctx, "justified") require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.NotFound), rpcErr.Reason) require.StringContains(t, "justified block", rpcErr.Err.Error()) @@ -251,7 +251,7 @@ func TestBlobsErrorHandling(t *testing.T) { BeaconDB: db, } - _, rpcErr := blocker.Blobs(ctx, "invalid-hex") + _, rpcErr := blocker.BlobSidecars(ctx, "invalid-hex") require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.BadRequest), rpcErr.Reason) require.StringContains(t, "could not parse block ID", rpcErr.Err.Error()) @@ -268,7 +268,7 @@ func TestBlobsErrorHandling(t *testing.T) { BeaconDB: db, } - _, rpcErr := blocker.Blobs(ctx, "100") + _, rpcErr := blocker.BlobSidecars(ctx, "100") require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.Internal), rpcErr.Reason) }) @@ -306,16 +306,18 @@ func TestGetBlob(t *testing.T) { fuluBlock, fuluBlobSidecars := util.GenerateTestElectraBlockWithSidecar(t, [fieldparams.RootLength]byte{}, fs, blobCount) fuluBlockRoot := fuluBlock.Root() - cellsAndProofsList := make([]kzg.CellsAndProofs, 0, len(fuluBlobSidecars)) + cellsPerBlobList := make([][]kzg.Cell, 0, len(fuluBlobSidecars)) + proofsPerBlobList := make([][]kzg.Proof, 0, len(fuluBlobSidecars)) for _, blob := range fuluBlobSidecars { var kzgBlob kzg.Blob copy(kzgBlob[:], blob.Blob) - cellsAndProofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlob) + cells, proofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlob) require.NoError(t, err) - cellsAndProofsList = append(cellsAndProofsList, cellsAndProofs) + cellsPerBlobList = append(cellsPerBlobList, cells) + proofsPerBlobList = append(proofsPerBlobList, proofs) } - roDataColumnSidecars, err := peerdas.DataColumnSidecars(cellsAndProofsList, peerdas.PopulateFromBlock(fuluBlock)) + roDataColumnSidecars, err := peerdas.DataColumnSidecars(cellsPerBlobList, proofsPerBlobList, peerdas.PopulateFromBlock(fuluBlock)) require.NoError(t, err) verifiedRoDataColumnSidecars := make([]blocks.VerifiedRODataColumn, 0, len(roDataColumnSidecars)) @@ -329,7 +331,7 @@ func TestGetBlob(t *testing.T) { t.Run("genesis", func(t *testing.T) { blocker := &BeaconDbBlocker{} - _, rpcErr := blocker.Blobs(ctx, "genesis") + _, rpcErr := blocker.BlobSidecars(ctx, "genesis") require.Equal(t, http.StatusBadRequest, core.ErrorReasonToHTTP(rpcErr.Reason)) require.StringContains(t, "not supported for Phase 0 fork", rpcErr.Err.Error()) }) @@ -347,7 +349,7 @@ func TestGetBlob(t *testing.T) { BlobStorage: blobStorage, } - retrievedVerifiedSidecars, rpcErr := blocker.Blobs(ctx, "head") + retrievedVerifiedSidecars, rpcErr := blocker.BlobSidecars(ctx, "head") require.IsNil(t, rpcErr) require.Equal(t, blobCount, len(retrievedVerifiedSidecars)) @@ -374,7 +376,7 @@ func TestGetBlob(t *testing.T) { BlobStorage: blobStorage, } - verifiedSidecars, rpcErr := blocker.Blobs(ctx, "finalized") + verifiedSidecars, rpcErr := blocker.BlobSidecars(ctx, "finalized") require.IsNil(t, rpcErr) require.Equal(t, blobCount, len(verifiedSidecars)) }) @@ -389,7 +391,7 @@ func TestGetBlob(t *testing.T) { BlobStorage: blobStorage, } - verifiedSidecars, rpcErr := blocker.Blobs(ctx, "justified") + verifiedSidecars, rpcErr := blocker.BlobSidecars(ctx, "justified") require.IsNil(t, rpcErr) require.Equal(t, blobCount, len(verifiedSidecars)) }) @@ -403,7 +405,7 @@ func TestGetBlob(t *testing.T) { BlobStorage: blobStorage, } - verifiedBlobs, rpcErr := blocker.Blobs(ctx, hexutil.Encode(denebBlockRoot[:])) + verifiedBlobs, rpcErr := blocker.BlobSidecars(ctx, hexutil.Encode(denebBlockRoot[:])) require.IsNil(t, rpcErr) require.Equal(t, blobCount, len(verifiedBlobs)) }) @@ -418,7 +420,7 @@ func TestGetBlob(t *testing.T) { BlobStorage: blobStorage, } - verifiedBlobs, rpcErr := blocker.Blobs(ctx, dsStr) + verifiedBlobs, rpcErr := blocker.BlobSidecars(ctx, dsStr) require.IsNil(t, rpcErr) require.Equal(t, blobCount, len(verifiedBlobs)) }) @@ -435,7 +437,7 @@ func TestGetBlob(t *testing.T) { BlobStorage: blobStorage, } - retrievedVerifiedSidecars, rpcErr := blocker.Blobs(ctx, dsStr, options.WithIndices([]int{index})) + retrievedVerifiedSidecars, rpcErr := blocker.BlobSidecars(ctx, dsStr, options.WithIndices([]int{index})) require.IsNil(t, rpcErr) require.Equal(t, 1, len(retrievedVerifiedSidecars)) @@ -459,7 +461,7 @@ func TestGetBlob(t *testing.T) { BlobStorage: filesystem.NewEphemeralBlobStorage(t), } - verifiedBlobs, rpcErr := blocker.Blobs(ctx, dsStr) + verifiedBlobs, rpcErr := blocker.BlobSidecars(ctx, dsStr) require.IsNil(t, rpcErr) require.Equal(t, 0, len(verifiedBlobs)) }) @@ -475,7 +477,7 @@ func TestGetBlob(t *testing.T) { } noBlobIndex := len(storedBlobSidecars) + 1 - _, rpcErr := blocker.Blobs(ctx, dsStr, options.WithIndices([]int{0, noBlobIndex})) + _, rpcErr := blocker.BlobSidecars(ctx, dsStr, options.WithIndices([]int{0, noBlobIndex})) require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.NotFound), rpcErr.Reason) }) @@ -489,7 +491,7 @@ func TestGetBlob(t *testing.T) { BeaconDB: db, BlobStorage: blobStorage, } - _, rpcErr := blocker.Blobs(ctx, dsStr, options.WithIndices([]int{0, math.MaxInt})) + _, rpcErr := blocker.BlobSidecars(ctx, dsStr, options.WithIndices([]int{0, math.MaxInt})) require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.BadRequest), rpcErr.Reason) }) @@ -508,7 +510,7 @@ func TestGetBlob(t *testing.T) { DataColumnStorage: dataColumnStorage, } - _, rpcErr := blocker.Blobs(ctx, hexutil.Encode(fuluBlockRoot[:])) + _, rpcErr := blocker.BlobSidecars(ctx, hexutil.Encode(fuluBlockRoot[:])) require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.NotFound), rpcErr.Reason) }) @@ -527,7 +529,7 @@ func TestGetBlob(t *testing.T) { DataColumnStorage: dataColumnStorage, } - retrievedVerifiedRoBlobs, rpcErr := blocker.Blobs(ctx, hexutil.Encode(fuluBlockRoot[:])) + retrievedVerifiedRoBlobs, rpcErr := blocker.BlobSidecars(ctx, hexutil.Encode(fuluBlockRoot[:])) require.IsNil(t, rpcErr) require.Equal(t, len(fuluBlobSidecars), len(retrievedVerifiedRoBlobs)) @@ -552,7 +554,7 @@ func TestGetBlob(t *testing.T) { DataColumnStorage: dataColumnStorage, } - retrievedVerifiedRoBlobs, rpcErr := blocker.Blobs(ctx, hexutil.Encode(fuluBlockRoot[:])) + retrievedVerifiedRoBlobs, rpcErr := blocker.BlobSidecars(ctx, hexutil.Encode(fuluBlockRoot[:])) require.IsNil(t, rpcErr) require.Equal(t, len(fuluBlobSidecars), len(retrievedVerifiedRoBlobs)) @@ -581,7 +583,7 @@ func TestGetBlob(t *testing.T) { BeaconDB: db, } - _, rpcErr := blocker.Blobs(ctx, hexutil.Encode(predenebBlockRoot[:])) + _, rpcErr := blocker.BlobSidecars(ctx, hexutil.Encode(predenebBlockRoot[:])) require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.BadRequest), rpcErr.Reason) require.Equal(t, http.StatusBadRequest, core.ErrorReasonToHTTP(rpcErr.Reason)) @@ -621,7 +623,7 @@ func TestGetBlob(t *testing.T) { } // Should successfully retrieve blobs even when FuluForkEpoch is not set - retrievedBlobs, rpcErr := blocker.Blobs(ctx, hexutil.Encode(denebBlockRoot[:])) + retrievedBlobs, rpcErr := blocker.BlobSidecars(ctx, hexutil.Encode(denebBlockRoot[:])) require.IsNil(t, rpcErr) require.Equal(t, 2, len(retrievedBlobs)) @@ -665,16 +667,18 @@ func TestBlobs_CommitmentOrdering(t *testing.T) { require.Equal(t, 3, len(commitments)) // Convert blob sidecars to data column sidecars for Fulu - cellsAndProofsList := make([]kzg.CellsAndProofs, 0, len(fuluBlobs)) + cellsPerBlobList := make([][]kzg.Cell, 0, len(fuluBlobs)) + proofsPerBlobList := make([][]kzg.Proof, 0, len(fuluBlobs)) for _, blob := range fuluBlobs { var kzgBlob kzg.Blob copy(kzgBlob[:], blob.Blob) - cellsAndProofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlob) + cells, proofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlob) require.NoError(t, err) - cellsAndProofsList = append(cellsAndProofsList, cellsAndProofs) + cellsPerBlobList = append(cellsPerBlobList, cells) + proofsPerBlobList = append(proofsPerBlobList, proofs) } - dataColumnSidecarPb, err := peerdas.DataColumnSidecars(cellsAndProofsList, peerdas.PopulateFromBlock(fuluBlock)) + dataColumnSidecarPb, err := peerdas.DataColumnSidecars(cellsPerBlobList, proofsPerBlobList, peerdas.PopulateFromBlock(fuluBlock)) require.NoError(t, err) verifiedRoDataColumnSidecars := make([]blocks.VerifiedRODataColumn, 0, len(dataColumnSidecarPb)) @@ -713,7 +717,7 @@ func TestBlobs_CommitmentOrdering(t *testing.T) { // Request versioned hashes in reverse order: 2, 1, 0 requestedHashes := [][]byte{hash2[:], hash1[:], hash0[:]} - verifiedBlobs, rpcErr := blocker.Blobs(ctx, "finalized", options.WithVersionedHashes(requestedHashes)) + verifiedBlobs, rpcErr := blocker.BlobSidecars(ctx, "finalized", options.WithVersionedHashes(requestedHashes)) if rpcErr != nil { t.Errorf("RPC Error: %v (reason: %v)", rpcErr.Err, rpcErr.Reason) return @@ -738,7 +742,7 @@ func TestBlobs_CommitmentOrdering(t *testing.T) { // Request hashes for indices 1 and 0 (out of order) requestedHashes := [][]byte{hash1[:], hash0[:]} - verifiedBlobs, rpcErr := blocker.Blobs(ctx, "finalized", options.WithVersionedHashes(requestedHashes)) + verifiedBlobs, rpcErr := blocker.BlobSidecars(ctx, "finalized", options.WithVersionedHashes(requestedHashes)) if rpcErr != nil { t.Errorf("RPC Error: %v (reason: %v)", rpcErr.Err, rpcErr.Reason) return @@ -764,7 +768,7 @@ func TestBlobs_CommitmentOrdering(t *testing.T) { // Request only the fake hash requestedHashes := [][]byte{fakeHash} - _, rpcErr := blocker.Blobs(ctx, "finalized", options.WithVersionedHashes(requestedHashes)) + _, rpcErr := blocker.BlobSidecars(ctx, "finalized", options.WithVersionedHashes(requestedHashes)) require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.NotFound), rpcErr.Reason) require.StringContains(t, "versioned hash(es) not found in block", rpcErr.Err.Error()) @@ -784,7 +788,7 @@ func TestBlobs_CommitmentOrdering(t *testing.T) { // Request valid hash with two fake hashes requestedHashes := [][]byte{fakeHash1, hash0[:], fakeHash2} - _, rpcErr := blocker.Blobs(ctx, "finalized", options.WithVersionedHashes(requestedHashes)) + _, rpcErr := blocker.BlobSidecars(ctx, "finalized", options.WithVersionedHashes(requestedHashes)) require.NotNil(t, rpcErr) require.Equal(t, core.ErrorReason(core.NotFound), rpcErr.Reason) require.StringContains(t, "versioned hash(es) not found in block", rpcErr.Err.Error()) @@ -829,16 +833,18 @@ func TestGetDataColumns(t *testing.T) { fuluBlock, fuluBlobSidecars := util.GenerateTestElectraBlockWithSidecar(t, [fieldparams.RootLength]byte{}, fuluForkSlot, blobCount) fuluBlockRoot := fuluBlock.Root() - cellsAndProofsList := make([]kzg.CellsAndProofs, 0, len(fuluBlobSidecars)) + cellsPerBlobList := make([][]kzg.Cell, 0, len(fuluBlobSidecars)) + proofsPerBlobList := make([][]kzg.Proof, 0, len(fuluBlobSidecars)) for _, blob := range fuluBlobSidecars { var kzgBlob kzg.Blob copy(kzgBlob[:], blob.Blob) - cellsAndProofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlob) + cells, proofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlob) require.NoError(t, err) - cellsAndProofsList = append(cellsAndProofsList, cellsAndProofs) + cellsPerBlobList = append(cellsPerBlobList, cells) + proofsPerBlobList = append(proofsPerBlobList, proofs) } - roDataColumnSidecars, err := peerdas.DataColumnSidecars(cellsAndProofsList, peerdas.PopulateFromBlock(fuluBlock)) + roDataColumnSidecars, err := peerdas.DataColumnSidecars(cellsPerBlobList, proofsPerBlobList, peerdas.PopulateFromBlock(fuluBlock)) require.NoError(t, err) verifiedRoDataColumnSidecars := make([]blocks.VerifiedRODataColumn, 0, len(roDataColumnSidecars)) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go index fef20a9e72..a06a1a5b6c 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go @@ -413,13 +413,13 @@ func (vs *Server) handleUnblindedBlock( if block.Version() >= version.Fulu { // Compute cells and proofs from the blobs and cell proofs. - cellsAndProofs, err := peerdas.ComputeCellsAndProofsFromFlat(rawBlobs, proofs) + cellsPerBlob, proofsPerBlob, err := peerdas.ComputeCellsAndProofsFromFlat(rawBlobs, proofs) if err != nil { return nil, nil, errors.Wrap(err, "compute cells and proofs") } // Construct data column sidecars from the signed block and cells and proofs. - roDataColumnSidecars, err := peerdas.DataColumnSidecars(cellsAndProofs, peerdas.PopulateFromBlock(block)) + roDataColumnSidecars, err := peerdas.DataColumnSidecars(cellsPerBlob, proofsPerBlob, peerdas.PopulateFromBlock(block)) if err != nil { return nil, nil, errors.Wrap(err, "data column sidcars") } diff --git a/beacon-chain/rpc/testutil/mock_blocker.go b/beacon-chain/rpc/testutil/mock_blocker.go index bfa4e3fbe7..b0938c6b71 100644 --- a/beacon-chain/rpc/testutil/mock_blocker.go +++ b/beacon-chain/rpc/testutil/mock_blocker.go @@ -39,8 +39,13 @@ func (m *MockBlocker) Block(_ context.Context, b []byte) (interfaces.ReadOnlySig return m.SlotBlockMap[primitives.Slot(slotNumber)], nil } +// BlobSidecars -- +func (*MockBlocker) BlobSidecars(_ context.Context, _ string, _ ...options.BlobsOption) ([]*blocks.VerifiedROBlob, *core.RpcError) { + return nil, &core.RpcError{} +} + // Blobs -- -func (*MockBlocker) Blobs(_ context.Context, _ string, _ ...options.BlobsOption) ([]*blocks.VerifiedROBlob, *core.RpcError) { +func (*MockBlocker) Blobs(_ context.Context, _ string, _ ...options.BlobsOption) ([][]byte, *core.RpcError) { return nil, &core.RpcError{} } diff --git a/beacon-chain/verification/data_column_test.go b/beacon-chain/verification/data_column_test.go index 8c40887729..ab943520c9 100644 --- a/beacon-chain/verification/data_column_test.go +++ b/beacon-chain/verification/data_column_test.go @@ -28,8 +28,8 @@ func GenerateTestDataColumns(t *testing.T, parent [fieldparams.RootLength]byte, blobs = append(blobs, kzg.Blob(roBlobs[i].Blob)) } - cellsAndProofs := util.GenerateCellsAndProofs(t, blobs) - roDataColumnSidecars, err := peerdas.DataColumnSidecars(cellsAndProofs, peerdas.PopulateFromBlock(roBlock)) + cellsPerBlob, proofsPerBlob := util.GenerateCellsAndProofs(t, blobs) + roDataColumnSidecars, err := peerdas.DataColumnSidecars(cellsPerBlob, proofsPerBlob, peerdas.PopulateFromBlock(roBlock)) require.NoError(t, err) return roDataColumnSidecars diff --git a/changelog/james-prysm_optimize-get-blobs.md b/changelog/james-prysm_optimize-get-blobs.md new file mode 100644 index 0000000000..6071fb2997 --- /dev/null +++ b/changelog/james-prysm_optimize-get-blobs.md @@ -0,0 +1,3 @@ +### Ignored + +- optimization to remove cell and blob proof computation on blob rest api. \ No newline at end of file diff --git a/testing/spectest/general/fulu__kzg__compute_cells_and_kzg_proofs_test.go b/testing/spectest/general/fulu__kzg__compute_cells_and_kzg_proofs_test.go index 574a71aa89..1cc1927706 100644 --- a/testing/spectest/general/fulu__kzg__compute_cells_and_kzg_proofs_test.go +++ b/testing/spectest/general/fulu__kzg__compute_cells_and_kzg_proofs_test.go @@ -42,18 +42,16 @@ func TestComputeCellsAndKzgProofs(t *testing.T) { } b := kzgPrysm.Blob(blob) - cellsAndProofsForBlob, err := kzgPrysm.ComputeCellsAndKZGProofs(&b) + cells, proofs, err := kzgPrysm.ComputeCellsAndKZGProofs(&b) if test.Output != nil { require.NoError(t, err) var combined [][]string - cs := cellsAndProofsForBlob.Cells - csRaw := make([]string, 0, len(cs)) - for _, c := range cs { + csRaw := make([]string, 0, len(cells)) + for _, c := range cells { csRaw = append(csRaw, hexutil.Encode(c[:])) } - ps := cellsAndProofsForBlob.Proofs - psRaw := make([]string, 0, len(ps)) - for _, p := range ps { + psRaw := make([]string, 0, len(proofs)) + for _, p := range proofs { psRaw = append(psRaw, hexutil.Encode(p[:])) } combined = append(combined, csRaw) diff --git a/testing/spectest/general/fulu__kzg__recover_cells_and_kzg_proofs_test.go b/testing/spectest/general/fulu__kzg__recover_cells_and_kzg_proofs_test.go index 1f613ffbb7..4d5da8ba15 100644 --- a/testing/spectest/general/fulu__kzg__recover_cells_and_kzg_proofs_test.go +++ b/testing/spectest/general/fulu__kzg__recover_cells_and_kzg_proofs_test.go @@ -69,18 +69,16 @@ func TestRecoverCellsAndKzgProofs(t *testing.T) { } // Recover the cells and proofs for the corresponding blob - cellsAndProofsForBlob, err := kzgPrysm.RecoverCellsAndKZGProofs(cellIndices, cells) + recoveredCells, recoveredProofs, err := kzgPrysm.RecoverCellsAndKZGProofs(cellIndices, cells) if test.Output != nil { require.NoError(t, err) var combined [][]string - cs := cellsAndProofsForBlob.Cells - csRaw := make([]string, 0, len(cs)) - for _, c := range cs { + csRaw := make([]string, 0, len(recoveredCells)) + for _, c := range recoveredCells { csRaw = append(csRaw, hexutil.Encode(c[:])) } - ps := cellsAndProofsForBlob.Proofs - psRaw := make([]string, 0, len(ps)) - for _, p := range ps { + psRaw := make([]string, 0, len(recoveredProofs)) + for _, p := range recoveredProofs { psRaw = append(psRaw, hexutil.Encode(p[:])) } combined = append(combined, csRaw) diff --git a/testing/util/fulu.go b/testing/util/fulu.go index 9f9687f963..d9ec558c8b 100644 --- a/testing/util/fulu.go +++ b/testing/util/fulu.go @@ -146,11 +146,11 @@ func GenerateTestFuluBlockWithSidecars(t *testing.T, blobCount int, options ...F signedBeaconBlock, err := blocks.NewSignedBeaconBlock(block) require.NoError(t, err) - cellsAndProofs := GenerateCellsAndProofs(t, blobs) + cellsPerBlob, proofsPerBlob := GenerateCellsAndProofs(t, blobs) rob, err := blocks.NewROBlockWithRoot(signedBeaconBlock, root) require.NoError(t, err) - roSidecars, err := peerdas.DataColumnSidecars(cellsAndProofs, peerdas.PopulateFromBlock(rob)) + roSidecars, err := peerdas.DataColumnSidecars(cellsPerBlob, proofsPerBlob, peerdas.PopulateFromBlock(rob)) require.NoError(t, err) verifiedRoSidecars := make([]blocks.VerifiedRODataColumn, 0, len(roSidecars)) @@ -167,12 +167,14 @@ func GenerateTestFuluBlockWithSidecars(t *testing.T, blobCount int, options ...F return roBlock, roSidecars, verifiedRoSidecars } -func GenerateCellsAndProofs(t testing.TB, blobs []kzg.Blob) []kzg.CellsAndProofs { - cellsAndProofs := make([]kzg.CellsAndProofs, len(blobs)) +func GenerateCellsAndProofs(t testing.TB, blobs []kzg.Blob) ([][]kzg.Cell, [][]kzg.Proof) { + cellsPerBlob := make([][]kzg.Cell, len(blobs)) + proofsPerBlob := make([][]kzg.Proof, len(blobs)) for i := range blobs { - cp, err := kzg.ComputeCellsAndKZGProofs(&blobs[i]) + cells, proofs, err := kzg.ComputeCellsAndKZGProofs(&blobs[i]) require.NoError(t, err) - cellsAndProofs[i] = cp + cellsPerBlob[i] = cells + proofsPerBlob[i] = proofs } - return cellsAndProofs + return cellsPerBlob, proofsPerBlob } From 1af12d841db72e3a23d0d36ab51b840bdd9e50d7 Mon Sep 17 00:00:00 2001 From: satushh Date: Thu, 13 Nov 2025 10:59:09 +0000 Subject: [PATCH 094/103] Metrics for eas (#16008) * metrics for eas * changelog --- beacon-chain/sync/custody.go | 9 ++++++++- beacon-chain/sync/metrics.go | 11 +++++++++++ changelog/satushh-eas-metric.md | 3 +++ 3 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 changelog/satushh-eas-metric.md diff --git a/beacon-chain/sync/custody.go b/beacon-chain/sync/custody.go index 70534d3bfc..473b0a2ce9 100644 --- a/beacon-chain/sync/custody.go +++ b/beacon-chain/sync/custody.go @@ -80,10 +80,17 @@ func (s *Service) updateCustodyInfoIfNeeded() error { return errors.Wrap(err, "p2p update custody info") } - if _, _, err := s.cfg.beaconDB.UpdateCustodyInfo(s.ctx, storedEarliestSlot, storedGroupCount); err != nil { + // Update the p2p earliest available slot metric + earliestAvailableSlotP2P.Set(float64(storedEarliestSlot)) + + dbEarliestSlot, _, err := s.cfg.beaconDB.UpdateCustodyInfo(s.ctx, storedEarliestSlot, storedGroupCount) + if err != nil { return errors.Wrap(err, "beacon db update custody info") } + // Update the DB earliest available slot metric + earliestAvailableSlotDB.Set(float64(dbEarliestSlot)) + return nil } diff --git a/beacon-chain/sync/metrics.go b/beacon-chain/sync/metrics.go index 99a828ae7f..2f12deedba 100644 --- a/beacon-chain/sync/metrics.go +++ b/beacon-chain/sync/metrics.go @@ -230,6 +230,17 @@ var ( Buckets: []float64{100, 250, 500, 750, 1000, 1500, 2000, 4000, 8000, 12000, 16000}, }, ) + + // Custody earliest available slot metrics + earliestAvailableSlotP2P = promauto.NewGauge(prometheus.GaugeOpts{ + Name: "custody_earliest_available_slot_p2p", + Help: "The earliest available slot tracked by the p2p service for custody purposes", + }) + + earliestAvailableSlotDB = promauto.NewGauge(prometheus.GaugeOpts{ + Name: "custody_earliest_available_slot_db", + Help: "The earliest available slot tracked by the database for custody purposes", + }) ) func (s *Service) updateMetrics() { diff --git a/changelog/satushh-eas-metric.md b/changelog/satushh-eas-metric.md new file mode 100644 index 0000000000..c68477b13d --- /dev/null +++ b/changelog/satushh-eas-metric.md @@ -0,0 +1,3 @@ +### Added + +- Metrics to track earliest available slot \ No newline at end of file From b94904b784aed1d8b45fb6a3e17ed8b60e5fb9ba Mon Sep 17 00:00:00 2001 From: Preston Van Loon Date: Thu, 13 Nov 2025 08:25:29 -0600 Subject: [PATCH 095/103] Update CHANGELOG.md for v7.0.0 release (#16004) * Unclog for v7.0.0 * Changelog notes * Changelog fraagment --- CHANGELOG.md | 83 ++++++++++++++++++- changelog/bastin_lc-prefix.md | 3 - changelog/bastin_upgrade-v6-to-v7.md | 3 - ...ho_ssz-ql-calculate-generalized-indices.md | 3 - ...fernantho_ssz-ql-update-path-processing.md | 3 - changelog/james-prysm_fulu-fork-epoch.md | 8 -- ...mes-prysm_remove-deposit-keymanager-log.md | 3 - changelog/james-prysm_v6.1.4.md | 3 - changelog/kasey_default-layout-by-epoch.md | 2 - changelog/kasey_disable-backfill-if-fulu.md | 2 - changelog/kasey_ignore-readdir-errors.md | 2 - changelog/manu-by-root-sidecars.md | 3 - changelog/manu-column-slot.md | 2 - changelog/manu-factory.md | 2 - changelog/manu-flight.md | 2 - changelog/manu-fulu-log.md | 2 - changelog/manu-go-netroute-2.md | 2 - changelog/manu-go-netroute.md | 2 - changelog/manu-metric.md | 2 - changelog/manu-metrics.md | 4 - changelog/manu-rate-limit.md | 2 - changelog/manu-singleflight.md | 2 - changelog/marius-builder-gas-limit.md | 2 - .../muzry_fix_attestation_send_on_fulu.md | 2 - changelog/muzry_fix_filepath_on_windows.md | 3 - changelog/potuz_head_target_compat.md | 3 - changelog/potuz_use_head_block_validation.md | 3 - changelog/potuz_use_head_datacolumn.md | 3 - changelog/pvl-active-val-count-lock.md | 4 - changelog/pvl-fulu-test-fix.md | 3 - changelog/pvl-v7-notes.md | 3 + changelog/radek_rest-custom-headers.md | 3 - changelog/radek_v7-remove-apis.md | 3 - changelog/remove-deprecated-flags.md | 21 ----- changelog/rocksload_use_slices_contains.md | 3 - .../syjn99-attestation-epoch-overflow.md | 2 - changelog/ttsao-go_bitfield.md | 3 - changelog/ttsao-v1.6.0-beta.2.md | 3 - .../ttsao_add-columns-recovery-metric.md | 3 - changelog/ttsao_add-gloas-protobufs.md | 3 - .../ttsao_fix-optimistic-blinded-blocks.md | 3 - changelog/ttsao_update-spec-tests.md | 3 - changelog/ttsao_use-slot-ticker-pruning.md | 3 - 43 files changed, 85 insertions(+), 134 deletions(-) delete mode 100644 changelog/bastin_lc-prefix.md delete mode 100644 changelog/bastin_upgrade-v6-to-v7.md delete mode 100644 changelog/fernantho_ssz-ql-calculate-generalized-indices.md delete mode 100644 changelog/fernantho_ssz-ql-update-path-processing.md delete mode 100644 changelog/james-prysm_fulu-fork-epoch.md delete mode 100644 changelog/james-prysm_remove-deposit-keymanager-log.md delete mode 100644 changelog/james-prysm_v6.1.4.md delete mode 100644 changelog/kasey_default-layout-by-epoch.md delete mode 100644 changelog/kasey_disable-backfill-if-fulu.md delete mode 100644 changelog/kasey_ignore-readdir-errors.md delete mode 100644 changelog/manu-by-root-sidecars.md delete mode 100644 changelog/manu-column-slot.md delete mode 100644 changelog/manu-factory.md delete mode 100644 changelog/manu-flight.md delete mode 100644 changelog/manu-fulu-log.md delete mode 100644 changelog/manu-go-netroute-2.md delete mode 100644 changelog/manu-go-netroute.md delete mode 100644 changelog/manu-metric.md delete mode 100644 changelog/manu-metrics.md delete mode 100644 changelog/manu-rate-limit.md delete mode 100644 changelog/manu-singleflight.md delete mode 100644 changelog/marius-builder-gas-limit.md delete mode 100644 changelog/muzry_fix_attestation_send_on_fulu.md delete mode 100644 changelog/muzry_fix_filepath_on_windows.md delete mode 100644 changelog/potuz_head_target_compat.md delete mode 100644 changelog/potuz_use_head_block_validation.md delete mode 100644 changelog/potuz_use_head_datacolumn.md delete mode 100644 changelog/pvl-active-val-count-lock.md delete mode 100644 changelog/pvl-fulu-test-fix.md create mode 100644 changelog/pvl-v7-notes.md delete mode 100644 changelog/radek_rest-custom-headers.md delete mode 100644 changelog/radek_v7-remove-apis.md delete mode 100644 changelog/remove-deprecated-flags.md delete mode 100644 changelog/rocksload_use_slices_contains.md delete mode 100644 changelog/syjn99-attestation-epoch-overflow.md delete mode 100644 changelog/ttsao-go_bitfield.md delete mode 100644 changelog/ttsao-v1.6.0-beta.2.md delete mode 100644 changelog/ttsao_add-columns-recovery-metric.md delete mode 100644 changelog/ttsao_add-gloas-protobufs.md delete mode 100644 changelog/ttsao_fix-optimistic-blinded-blocks.md delete mode 100644 changelog/ttsao_update-spec-tests.md delete mode 100644 changelog/ttsao_use-slot-ticker-pruning.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 4687007fb4..c5b8d73181 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,87 @@ All notable changes to this project will be documented in this file. The format is based on Keep a Changelog, and this project adheres to Semantic Versioning. +## [v7.0.0](https://github.com/prysmaticlabs/prysm/compare/v6.1.4...v7.0.0) - 2025-11-10 + +This is our initial mainnet release for the Ethereum mainnet Fulu fork on December 3rd, 2025. All operators MUST update to v7.0.0 or later release prior to the fulu fork epoch `411392`. See the [Ethereum Foundation blog post](https://blog.ethereum.org/2025/11/06/fusaka-mainnet-announcement) for more information on Fulu. + +Other than the mainnet fulu fork schedule, there are a few callouts in this release: +- `by-epoch` blob storage format is the default for new installations. Users that haven't migrated will see a warning to migrate to the new format. Existing deployments may set `--blob-storage-layout=by-epoch` to perform the migration. +- Several deprecated flags have been deleted! Please review the "removed" section of this changelog carefully. If you are referencing a removed flag, Prysm will not start! All of these flags had no effect for at least one release. +- Several deprecated API endpoints have been deleted. Please review the "removed" section of this changelog carefully. +- Backfill is not supported in Fulu. This is expected to be fixed in the next release and should be delivered prior to the mainnet activation fork. +- The builder default gas limit is raised from `45000000` (45 MGas) to `60000000` (60 MGas). +- Several bug fixes and improvements. + +### Added + +- Allow custom headers in validator client HTTP requests. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15884) +- Metric to track data columns recovered from execution layer. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15924) +- Metrics: Add count of peers per direction and type (inbound/outbound), (TCP/QUIC). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15922) +- `p2p_subscribed_topic_peer_total`: Reset to avoid dangling values. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15922) +- Add `p2p_minimum_peers_per_subnet` metric. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15922) +- Added GeneralizedIndicesFromPath function to calculate the GIs for a given sszInfo object and a PathElement. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15873) +- Add Gloas protobuf definitions with spec tests and SSZ serialization support. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15601) +- Fulu fork epoch for mainnet configurations set for December 3, 2025, 09:49:11pm UTC. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15975) +- Added BPO schedules for December 9, 2025, 02:21:11pm UTC and January 7, 2026, 01:01:11am UTC. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15975) + +### Changed + +- Updated consensus spec tests to v1.6.0-beta.1 with new hashes and URL template. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15918) +- Use the `by-epoch' blob storage layout by default and log a warning to users who continue to use the flat layout, encouraging them to switch. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15904) +- Update go-netroute to `v0.3.0`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15934) +- Introduced Path type for SSZ-QL queries and updated PathElement (removed Length field, kept Index) enforcing that len queries are terminal (at most one per path). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15935) +- Changed length query syntax from `block.payload.len(transactions)` to `len(block.payload.transactions)`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15935) +- Update `go-netroute` to `v0.4.0`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15949) +- Updated consensus spec tests to v1.6.0-beta.2. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15960) +- Updated go bitfield from prysmaticlabs to offchainlabs. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15968) +- Bump builder default gas limit from `45000000` (45 MGas) to `60000000` (60 MGas). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15979) +- Use head state for block pubsub validation when possible. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15972) +- updated consensus spec to 1.6.0 from 1.6.0-beta.2. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15975) +- Upgrade Prysm v6 to v7. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15989) +- Use head state readonly when possible to validate data column sidecars. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15977) + +### Removed + +- log mentioning removed flag `--show-deposit-data`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15926) +- Remove Beacon API endpoints that were deprecated in Electra: `GET /eth/v1/beacon/deposit_snapshot`, `GET /eth/v1/beacon/blocks/{block_id}/attestations`, `GET /eth/v1/beacon/pool/attestations`, `POST /eth/v1/beacon/pool/attestations`, `GET /eth/v1/beacon/pool/attester_slashings`, `POST /eth/v1/beacon/pool/attester_slashings`, `GET /eth/v1/validator/aggregate_attestation`, `POST /eth/v1/validator/aggregate_and_proofs`, `POST /eth/v1/beacon/blocks`, `POST /eth/v1/beacon/blinded_blocks`, `GET /eth/v1/builder/states/{state_id}/expected_withdrawals`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15962) +- Deprecated flag `--enable-optional-engine-methods` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--disable-build-block-parallel` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--disable-reorg-late-blocks` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--disable-optional-engine-methods` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--disable-aggregate-parallel` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--enable-eip-4881` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--disable-eip-4881` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--enable-verbose-sig-verification` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--enable-debug-rpc-endpoints` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--beacon-rpc-gateway-provider` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--disable-grpc-gateway` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--enable-experimental-state` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--enable-committee-aware-packing` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--interop-genesis-time` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--interop-num-validators` has been removed (from beacon-chain only; still available in validator client). [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--enable-quic` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--attest-timely` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--disable-experimental-state` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) +- Deprecated flag `--p2p-metadata` has been removed. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15986) + +### Fixed + +- Remove `Reading static P2P private key from a file.` log if Fulu is enabled. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15913) +- `blobSidecarByRootRPCHandler`: Do not serve a sidecar if the corresponding block is not available. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15933) +- `dataColumnSidecarByRootRPCHandler`: Do not serve a sidecar if the corresponding block is not available. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15933) +- Fix incorrect version used when sending attestation version in Fulu. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15950) +- Changed the behavior of topic subscriptions such that only topics that require the active validator count will compute that value. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15955) +- Added a Mutex to the computation of active validator count during topic subscription to avoid a race condition where multiple goroutines are computing the same work. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15955) +- `RODataColumnsVerifier.ValidProposerSignature`: Ensure the expensive signature verification is only performed once for concurrent requests for the same signature data. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15954) +- use filepath for path operations (clean, join, etc.) to ensure correct behavior on Windows. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15953) +- Fix #15969: Handle addition overflow in `/eth/v1/beacon/rewards/attestations/{epoch}`. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15970) +- `SidecarProposerExpected`: Add the slot in the single flight key. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15976) +- Ensures the rate limitation is respected for by root blob and data column sidecars requests. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15981) +- Use head only if its compatible with target for attestation validation. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15965) +- Backfill disabled if checkpoint sync origin is after fulu fork due to lack of DataColumnSidecar support in backfill. To track the availability of fulu-compatible backfill please watch https://github.com/OffchainLabs/prysm/issues/15982. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15987) +- `SidecarProposerExpected`: Use the correct value of proposer index in the singleflight group. [[PR]](https://github.com/prysmaticlabs/prysm/pull/15993) + ## [v6.1.4](https://github.com/prysmaticlabs/prysm/compare/v6.1.3...v6.1.4) - 2025-10-24 This release includes a bug fix affecting block proposals in rare cases, along with an important update for Windows users running post-Fusaka fork. @@ -3820,4 +3901,4 @@ There are no security updates in this release. # Older than v2.0.0 -For changelog history for releases older than v2.0.0, please refer to https://github.com/prysmaticlabs/prysm/releases \ No newline at end of file +For changelog history for releases older than v2.0.0, please refer to https://github.com/prysmaticlabs/prysm/releases diff --git a/changelog/bastin_lc-prefix.md b/changelog/bastin_lc-prefix.md deleted file mode 100644 index 3ee434cbdc..0000000000 --- a/changelog/bastin_lc-prefix.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Add log prefix to the light-client package. \ No newline at end of file diff --git a/changelog/bastin_upgrade-v6-to-v7.md b/changelog/bastin_upgrade-v6-to-v7.md deleted file mode 100644 index 9d2ea8313a..0000000000 --- a/changelog/bastin_upgrade-v6-to-v7.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Upgrade Prysm v6 to v7. \ No newline at end of file diff --git a/changelog/fernantho_ssz-ql-calculate-generalized-indices.md b/changelog/fernantho_ssz-ql-calculate-generalized-indices.md deleted file mode 100644 index 737e26a617..0000000000 --- a/changelog/fernantho_ssz-ql-calculate-generalized-indices.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Added GeneralizedIndicesFromPath function to calculate the GIs for a given sszInfo object and a PathElement diff --git a/changelog/fernantho_ssz-ql-update-path-processing.md b/changelog/fernantho_ssz-ql-update-path-processing.md deleted file mode 100644 index de626041a8..0000000000 --- a/changelog/fernantho_ssz-ql-update-path-processing.md +++ /dev/null @@ -1,3 +0,0 @@ -## Changed -- Introduced Path type for SSZ-QL queries and updated PathElement (removed Length field, kept Index) enforcing that len queries are terminal (at most one per path). -- Changed length query syntax from `block.payload.len(transactions)` to `len(block.payload.transactions)` diff --git a/changelog/james-prysm_fulu-fork-epoch.md b/changelog/james-prysm_fulu-fork-epoch.md deleted file mode 100644 index d2864ec640..0000000000 --- a/changelog/james-prysm_fulu-fork-epoch.md +++ /dev/null @@ -1,8 +0,0 @@ -### Added - -- Fulu fork epoch for mainnet configurations set for December 3, 2025, 09:49:11pm UTC -- Added BPO schedules for December 9, 2025, 02:21:11pm UTC and January 7, 2026, 01:01:11am UTC - -### Changed - -- updated consensus spec to 1.6.0 from 1.6.0-beta.2 \ No newline at end of file diff --git a/changelog/james-prysm_remove-deposit-keymanager-log.md b/changelog/james-prysm_remove-deposit-keymanager-log.md deleted file mode 100644 index ff2662c82e..0000000000 --- a/changelog/james-prysm_remove-deposit-keymanager-log.md +++ /dev/null @@ -1,3 +0,0 @@ -### Removed - -- log mentioning removed flag `--show-deposit-data` \ No newline at end of file diff --git a/changelog/james-prysm_v6.1.4.md b/changelog/james-prysm_v6.1.4.md deleted file mode 100644 index 8a66bd68f1..0000000000 --- a/changelog/james-prysm_v6.1.4.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Changelog entries for v6.1.4 through v6.1.3 \ No newline at end of file diff --git a/changelog/kasey_default-layout-by-epoch.md b/changelog/kasey_default-layout-by-epoch.md deleted file mode 100644 index f467dbf4f0..0000000000 --- a/changelog/kasey_default-layout-by-epoch.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- Use the `by-epoch' blob storage layout by default and log a warning to users who continue to use the flat layout, encouraging them to switch. diff --git a/changelog/kasey_disable-backfill-if-fulu.md b/changelog/kasey_disable-backfill-if-fulu.md deleted file mode 100644 index b512f44118..0000000000 --- a/changelog/kasey_disable-backfill-if-fulu.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Backfill disabled if checkpoint sync origin is after fulu fork due to lack of DataColumnSidecar support in backfill. To track the availability of fulu-compatible backfill please watch https://github.com/OffchainLabs/prysm/issues/15982 diff --git a/changelog/kasey_ignore-readdir-errors.md b/changelog/kasey_ignore-readdir-errors.md deleted file mode 100644 index 882415cd82..0000000000 --- a/changelog/kasey_ignore-readdir-errors.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- Fix bug with layout detection when readdirnames returns io.EOF. diff --git a/changelog/manu-by-root-sidecars.md b/changelog/manu-by-root-sidecars.md deleted file mode 100644 index 390077824f..0000000000 --- a/changelog/manu-by-root-sidecars.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed -- `blobSidecarByRootRPCHandler`: Do not serve a sidecar if the corresponding block is not available. -- `dataColumnSidecarByRootRPCHandler`: Do not serve a sidecar if the corresponding block is not available. \ No newline at end of file diff --git a/changelog/manu-column-slot.md b/changelog/manu-column-slot.md deleted file mode 100644 index a7e1a484d8..0000000000 --- a/changelog/manu-column-slot.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- `SidecarProposerExpected`: Add the slot in the single flight key. \ No newline at end of file diff --git a/changelog/manu-factory.md b/changelog/manu-factory.md deleted file mode 100644 index 765a26c571..0000000000 --- a/changelog/manu-factory.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- `BeaconBlockContainerToSignedBeaconBlock`: Add Fulu. \ No newline at end of file diff --git a/changelog/manu-flight.md b/changelog/manu-flight.md deleted file mode 100644 index d544ce3da0..0000000000 --- a/changelog/manu-flight.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- `SidecarProposerExpected`: Use the correct value of proposer index in the singleflight group. \ No newline at end of file diff --git a/changelog/manu-fulu-log.md b/changelog/manu-fulu-log.md deleted file mode 100644 index eb64046805..0000000000 --- a/changelog/manu-fulu-log.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Remove `Reading static P2P private key from a file.` log if Fulu is enabled. \ No newline at end of file diff --git a/changelog/manu-go-netroute-2.md b/changelog/manu-go-netroute-2.md deleted file mode 100644 index e1f10e0466..0000000000 --- a/changelog/manu-go-netroute-2.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- Update `go-netroute` to `v0.4.0` \ No newline at end of file diff --git a/changelog/manu-go-netroute.md b/changelog/manu-go-netroute.md deleted file mode 100644 index 45ec7db1da..0000000000 --- a/changelog/manu-go-netroute.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- Update go-netroute to `v0.3.0` \ No newline at end of file diff --git a/changelog/manu-metric.md b/changelog/manu-metric.md deleted file mode 100644 index afa1fd2245..0000000000 --- a/changelog/manu-metric.md +++ /dev/null @@ -1,2 +0,0 @@ -### Ignored -- `beacon_data_column_sidecar_gossip_verification_milliseconds`: Divide by 10. \ No newline at end of file diff --git a/changelog/manu-metrics.md b/changelog/manu-metrics.md deleted file mode 100644 index edd98236e5..0000000000 --- a/changelog/manu-metrics.md +++ /dev/null @@ -1,4 +0,0 @@ -### Added -- Metrics: Add count of peers per direction and type (inbound/outbound), (TCP/QUIC). -- `p2p_subscribed_topic_peer_total`: Reset to avoid dangling values. -- Add `p2p_minimum_peers_per_subnet` metric. \ No newline at end of file diff --git a/changelog/manu-rate-limit.md b/changelog/manu-rate-limit.md deleted file mode 100644 index b2e16c5cc5..0000000000 --- a/changelog/manu-rate-limit.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Ensures the rate limitation is respected for by root blob and data column sidecars requests. \ No newline at end of file diff --git a/changelog/manu-singleflight.md b/changelog/manu-singleflight.md deleted file mode 100644 index 0b0e7174e1..0000000000 --- a/changelog/manu-singleflight.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- `RODataColumnsVerifier.ValidProposerSignature`: Ensure the expensive signature verification is only performed once for concurrent requests for the same signature data. \ No newline at end of file diff --git a/changelog/marius-builder-gas-limit.md b/changelog/marius-builder-gas-limit.md deleted file mode 100644 index 5c871d19e8..0000000000 --- a/changelog/marius-builder-gas-limit.md +++ /dev/null @@ -1,2 +0,0 @@ -### Changed -- Bump builder default gas limit from `45000000` (45 MGas) to `60000000` (60 MGas) \ No newline at end of file diff --git a/changelog/muzry_fix_attestation_send_on_fulu.md b/changelog/muzry_fix_attestation_send_on_fulu.md deleted file mode 100644 index 035e24b761..0000000000 --- a/changelog/muzry_fix_attestation_send_on_fulu.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Fix incorrect version used when sending attestation version in Fulu diff --git a/changelog/muzry_fix_filepath_on_windows.md b/changelog/muzry_fix_filepath_on_windows.md deleted file mode 100644 index a15408aa6d..0000000000 --- a/changelog/muzry_fix_filepath_on_windows.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- use filepath for path operations (clean, join, etc.) to ensure correct behavior on Windows diff --git a/changelog/potuz_head_target_compat.md b/changelog/potuz_head_target_compat.md deleted file mode 100644 index fc87b72dcd..0000000000 --- a/changelog/potuz_head_target_compat.md +++ /dev/null @@ -1,3 +0,0 @@ -### Fixed - -- Use head only if its compatible with target for attestation validation. diff --git a/changelog/potuz_use_head_block_validation.md b/changelog/potuz_use_head_block_validation.md deleted file mode 100644 index 29988d3891..0000000000 --- a/changelog/potuz_use_head_block_validation.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Use head state for block pubsub validation when possible. diff --git a/changelog/potuz_use_head_datacolumn.md b/changelog/potuz_use_head_datacolumn.md deleted file mode 100644 index 6b219b2329..0000000000 --- a/changelog/potuz_use_head_datacolumn.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Use head state readonly when possible to validate data column sidecars. diff --git a/changelog/pvl-active-val-count-lock.md b/changelog/pvl-active-val-count-lock.md deleted file mode 100644 index 68a9b3b546..0000000000 --- a/changelog/pvl-active-val-count-lock.md +++ /dev/null @@ -1,4 +0,0 @@ -### Fixed - -- Changed the behavior of topic subscriptions such that only topics that require the active validator count will compute that value. -- Added a Mutex to the computation of active validator count during topic subscription to avoid a race condition where multiple goroutines are computing the same work. diff --git a/changelog/pvl-fulu-test-fix.md b/changelog/pvl-fulu-test-fix.md deleted file mode 100644 index 3bee1e008e..0000000000 --- a/changelog/pvl-fulu-test-fix.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Fix test setup to properly reference electra rather than unset the fulu epoch diff --git a/changelog/pvl-v7-notes.md b/changelog/pvl-v7-notes.md new file mode 100644 index 0000000000..83dfbfc0db --- /dev/null +++ b/changelog/pvl-v7-notes.md @@ -0,0 +1,3 @@ +### Ignored + +- Updated CHANGELOG.md with release notes from v7.0.0 diff --git a/changelog/radek_rest-custom-headers.md b/changelog/radek_rest-custom-headers.md deleted file mode 100644 index e0ca1b18da..0000000000 --- a/changelog/radek_rest-custom-headers.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Allow custom headers in validator client HTTP requests. \ No newline at end of file diff --git a/changelog/radek_v7-remove-apis.md b/changelog/radek_v7-remove-apis.md deleted file mode 100644 index 18f215d070..0000000000 --- a/changelog/radek_v7-remove-apis.md +++ /dev/null @@ -1,3 +0,0 @@ -### Removed - -- Remove Beacon API endpoints that were deprecated in Electra: `GET /eth/v1/beacon/deposit_snapshot`, `GET /eth/v1/beacon/blocks/{block_id}/attestations`, `GET /eth/v1/beacon/pool/attestations`, `POST /eth/v1/beacon/pool/attestations`, `GET /eth/v1/beacon/pool/attester_slashings`, `POST /eth/v1/beacon/pool/attester_slashings`, `GET /eth/v1/validator/aggregate_attestation`, `POST /eth/v1/validator/aggregate_and_proofs`, `POST /eth/v1/beacon/blocks`, `POST /eth/v1/beacon/blinded_blocks`, `GET /eth/v1/builder/states/{state_id}/expected_withdrawals`. \ No newline at end of file diff --git a/changelog/remove-deprecated-flags.md b/changelog/remove-deprecated-flags.md deleted file mode 100644 index 92412e4172..0000000000 --- a/changelog/remove-deprecated-flags.md +++ /dev/null @@ -1,21 +0,0 @@ -### Removed - -- Deprecated flag `--enable-optional-engine-methods` has been removed. -- Deprecated flag `--disable-build-block-parallel` has been removed. -- Deprecated flag `--disable-reorg-late-blocks` has been removed. -- Deprecated flag `--disable-optional-engine-methods` has been removed. -- Deprecated flag `--disable-aggregate-parallel` has been removed. -- Deprecated flag `--enable-eip-4881` has been removed. -- Deprecated flag `--disable-eip-4881` has been removed. -- Deprecated flag `--enable-verbose-sig-verification` has been removed. -- Deprecated flag `--enable-debug-rpc-endpoints` has been removed. -- Deprecated flag `--beacon-rpc-gateway-provider` has been removed. -- Deprecated flag `--disable-grpc-gateway` has been removed. -- Deprecated flag `--enable-experimental-state` has been removed. -- Deprecated flag `--enable-committee-aware-packing` has been removed. -- Deprecated flag `--interop-genesis-time` has been removed. -- Deprecated flag `--interop-num-validators` has been removed (from beacon-chain only; still available in validator client). -- Deprecated flag `--enable-quic` has been removed. -- Deprecated flag `--attest-timely` has been removed. -- Deprecated flag `--disable-experimental-state` has been removed. -- Deprecated flag `--p2p-metadata` has been removed. diff --git a/changelog/rocksload_use_slices_contains.md b/changelog/rocksload_use_slices_contains.md deleted file mode 100644 index 74c4d3210b..0000000000 --- a/changelog/rocksload_use_slices_contains.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Use slices.Contains to simplify code \ No newline at end of file diff --git a/changelog/syjn99-attestation-epoch-overflow.md b/changelog/syjn99-attestation-epoch-overflow.md deleted file mode 100644 index 90f9b08ed3..0000000000 --- a/changelog/syjn99-attestation-epoch-overflow.md +++ /dev/null @@ -1,2 +0,0 @@ -### Fixed -- Fix #15969: Handle addition overflow in `/eth/v1/beacon/rewards/attestations/{epoch}`. diff --git a/changelog/ttsao-go_bitfield.md b/changelog/ttsao-go_bitfield.md deleted file mode 100644 index 391e6a4b77..0000000000 --- a/changelog/ttsao-go_bitfield.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Updated go bitfield from prysmaticlabs to offchainlabs diff --git a/changelog/ttsao-v1.6.0-beta.2.md b/changelog/ttsao-v1.6.0-beta.2.md deleted file mode 100644 index 94b777e0f4..0000000000 --- a/changelog/ttsao-v1.6.0-beta.2.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Updated consensus spec tests to v1.6.0-beta.2 \ No newline at end of file diff --git a/changelog/ttsao_add-columns-recovery-metric.md b/changelog/ttsao_add-columns-recovery-metric.md deleted file mode 100644 index b4e4212e3a..0000000000 --- a/changelog/ttsao_add-columns-recovery-metric.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Metric to track data columns recovered from execution layer diff --git a/changelog/ttsao_add-gloas-protobufs.md b/changelog/ttsao_add-gloas-protobufs.md deleted file mode 100644 index 85a288ef91..0000000000 --- a/changelog/ttsao_add-gloas-protobufs.md +++ /dev/null @@ -1,3 +0,0 @@ -### Added - -- Add Gloas protobuf definitions with spec tests and SSZ serialization support diff --git a/changelog/ttsao_fix-optimistic-blinded-blocks.md b/changelog/ttsao_fix-optimistic-blinded-blocks.md deleted file mode 100644 index e125004b9d..0000000000 --- a/changelog/ttsao_fix-optimistic-blinded-blocks.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Return optimistic response only when handling blinded blocks in proposer diff --git a/changelog/ttsao_update-spec-tests.md b/changelog/ttsao_update-spec-tests.md deleted file mode 100644 index ca6b5849fa..0000000000 --- a/changelog/ttsao_update-spec-tests.md +++ /dev/null @@ -1,3 +0,0 @@ -### Changed - -- Updated consensus spec tests to v1.6.0-beta.1 with new hashes and URL template diff --git a/changelog/ttsao_use-slot-ticker-pruning.md b/changelog/ttsao_use-slot-ticker-pruning.md deleted file mode 100644 index 80a6d2caf0..0000000000 --- a/changelog/ttsao_use-slot-ticker-pruning.md +++ /dev/null @@ -1,3 +0,0 @@ -### Ignored - -- Use SlotTicker with offset instead of time.Ticker for attestation pool pruning to avoid conflicts with slot boundary operations From 7ba60d93f22e7acad88e2350e0329adc971ecbef Mon Sep 17 00:00:00 2001 From: james-prysm <90280386+james-prysm@users.noreply.github.com> Date: Thu, 13 Nov 2025 10:51:00 -0800 Subject: [PATCH 096/103] Changed `subscribe-all-data-subnets` to `supernode` (#16012) * adding alias * kasey's suggestion * updating description * Update changelog/james-prysm_supernode-alias.md Co-authored-by: kasey <489222+kasey@users.noreply.github.com> --------- Co-authored-by: kasey <489222+kasey@users.noreply.github.com> --- changelog/james-prysm_supernode-alias.md | 3 +++ cmd/beacon-chain/flags/base.go | 5 +++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 changelog/james-prysm_supernode-alias.md diff --git a/changelog/james-prysm_supernode-alias.md b/changelog/james-prysm_supernode-alias.md new file mode 100644 index 0000000000..76be28b380 --- /dev/null +++ b/changelog/james-prysm_supernode-alias.md @@ -0,0 +1,3 @@ +### Changed + +- Changed `--subscribe-all-data-subnets` flag to `--supernode` and aliased `--subscribe-all-data-subnets` for existing users. \ No newline at end of file diff --git a/cmd/beacon-chain/flags/base.go b/cmd/beacon-chain/flags/base.go index e7a35d8f58..7f27b27ccf 100644 --- a/cmd/beacon-chain/flags/base.go +++ b/cmd/beacon-chain/flags/base.go @@ -335,8 +335,9 @@ var ( } // SubscribeAllDataSubnets enables subscription to all data subnets. SubscribeAllDataSubnets = &cli.BoolFlag{ - Name: "subscribe-all-data-subnets", - Usage: "Enable subscription to all data subnets. Once set, unsetting this flag won't have any effect.", + Name: "supernode", + Aliases: []string{"subscribe-all-data-subnets"}, + Usage: "Enable subscription to all data subnets and store all blob columns, serving them over RPC. Required post-Fusaka for full blob reconstruction. This is effectively one-way: once enabled, the node keeps storing and serving all columns even if the flag is later unset.", } // BatchVerifierLimit sets the maximum number of signatures to batch verify at once. BatchVerifierLimit = &cli.IntFlag{ From f77b78943a37ce4bdb5baa9e9d4bec5c55b3dd42 Mon Sep 17 00:00:00 2001 From: terence Date: Thu, 13 Nov 2025 16:55:32 -0500 Subject: [PATCH 097/103] Use explicit slot component timing configs (#15999) * Use new timing configs (due BPS) * Bastin's feedback --- .../peerdas/reconstruction_helpers_test.go | 10 +- beacon-chain/db/kv/genesis_test.go | 2 +- .../forkchoice/doubly-linked-tree/node.go | 2 +- .../reorg_late_blocks_test.go | 4 +- .../forkchoice/doubly-linked-tree/store.go | 2 +- beacon-chain/p2p/broadcaster.go | 4 +- beacon-chain/p2p/broadcaster_test.go | 5 +- beacon-chain/p2p/service_test.go | 2 +- beacon-chain/rpc/eth/config/handlers_test.go | 17 ++- beacon-chain/sync/rpc_goodbye_test.go | 2 +- beacon-chain/sync/subscriber_test.go | 9 +- beacon-chain/sync/validate_light_client.go | 5 +- .../sync/validate_light_client_test.go | 6 +- changelog/ttsao_use-timing-configs.md | 3 + config/params/basis_points.go | 4 +- config/params/config.go | 30 ++++- config/params/config_test.go | 122 ++++++++++++++++++ config/params/configset_test.go | 1 + config/params/loader.go | 6 + config/params/loader_test.go | 10 +- config/params/mainnet_config.go | 10 +- config/params/minimal_config.go | 1 + config/params/testdata/e2e_config.yaml | 4 +- config/params/testnet_config_test.go | 1 + config/params/testnet_e2e_config.go | 2 + time/slots/slottime.go | 30 ++--- validator/client/BUILD.bazel | 2 + validator/client/aggregate.go | 30 +---- validator/client/aggregate_test.go | 10 +- validator/client/attest.go | 6 +- validator/client/runner_test.go | 2 +- validator/client/sync_committee.go | 2 +- validator/client/wait_for_activation_test.go | 4 +- validator/client/wait_helpers.go | 65 ++++++++++ validator/client/wait_helpers_test.go | 87 +++++++++++++ 35 files changed, 402 insertions(+), 100 deletions(-) create mode 100644 changelog/ttsao_use-timing-configs.md create mode 100644 validator/client/wait_helpers.go create mode 100644 validator/client/wait_helpers_test.go diff --git a/beacon-chain/core/peerdas/reconstruction_helpers_test.go b/beacon-chain/core/peerdas/reconstruction_helpers_test.go index 1a3bc93f69..ed8b108d49 100644 --- a/beacon-chain/core/peerdas/reconstruction_helpers_test.go +++ b/beacon-chain/core/peerdas/reconstruction_helpers_test.go @@ -16,11 +16,11 @@ import ( // testBlobSetup holds common test data for blob reconstruction tests. type testBlobSetup struct { - blobCount int - blobs []kzg.Blob - roBlock blocks.ROBlock - roDataColumnSidecars []blocks.RODataColumn - verifiedRoDataColumnSidecars []blocks.VerifiedRODataColumn + blobCount int + blobs []kzg.Blob + roBlock blocks.ROBlock + roDataColumnSidecars []blocks.RODataColumn + verifiedRoDataColumnSidecars []blocks.VerifiedRODataColumn } // setupTestBlobs creates a complete test setup with blobs, cells, proofs, and data column sidecars. diff --git a/beacon-chain/db/kv/genesis_test.go b/beacon-chain/db/kv/genesis_test.go index fb340174f3..06356cc3f0 100644 --- a/beacon-chain/db/kv/genesis_test.go +++ b/beacon-chain/db/kv/genesis_test.go @@ -146,7 +146,7 @@ func TestEnsureEmbeddedGenesis(t *testing.T) { params.SetupTestConfigCleanup(t) // Embedded Genesis works with Mainnet config cfg := params.MainnetConfig() - cfg.SecondsPerSlot = 1 + cfg.SlotDurationMilliseconds = 1000 undo, err := params.SetActiveWithUndo(cfg) require.NoError(t, err) defer func() { diff --git a/beacon-chain/forkchoice/doubly-linked-tree/node.go b/beacon-chain/forkchoice/doubly-linked-tree/node.go index 8ac6884d1b..3b36157f18 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/node.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/node.go @@ -134,7 +134,7 @@ func (n *Node) setNodeAndParentValidated(ctx context.Context) error { // slot will have secs = 3 below. func (n *Node) arrivedEarly(genesis time.Time) (bool, error) { sss, err := slots.SinceSlotStart(n.slot, genesis, n.timestamp.Truncate(time.Second)) // Truncate such that 3.9999 seconds will have a value of 3. - votingWindow := time.Duration(params.BeaconConfig().SecondsPerSlot/params.BeaconConfig().IntervalsPerSlot) * time.Second + votingWindow := params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().AttestationDueBPS) return sss < votingWindow, err } diff --git a/beacon-chain/forkchoice/doubly-linked-tree/reorg_late_blocks_test.go b/beacon-chain/forkchoice/doubly-linked-tree/reorg_late_blocks_test.go index a828e10a3b..34c10cf9d6 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/reorg_late_blocks_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/reorg_late_blocks_test.go @@ -134,8 +134,8 @@ func TestForkChoice_GetProposerHead(t *testing.T) { headRoot, err := f.Head(ctx) require.NoError(t, err) require.Equal(t, blk.Root(), headRoot) - orphanLateBlockFirstThreshold := params.BeaconConfig().SecondsPerSlot / params.BeaconConfig().IntervalsPerSlot - f.store.headNode.timestamp.Add(-1 * time.Duration(params.BeaconConfig().SecondsPerSlot-orphanLateBlockFirstThreshold) * time.Second) + orphanLateBlockFirstThreshold := params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().AttestationDueBPS) + f.store.headNode.timestamp.Add(-1 * (params.BeaconConfig().SlotDuration() - orphanLateBlockFirstThreshold)) t.Run("head is weak", func(t *testing.T) { require.Equal(t, parentRoot, f.GetProposerHead()) }) diff --git a/beacon-chain/forkchoice/doubly-linked-tree/store.go b/beacon-chain/forkchoice/doubly-linked-tree/store.go index c0d314df73..d645ab9f35 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/store.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/store.go @@ -137,7 +137,7 @@ func (s *Store) insert(ctx context.Context, if err != nil { return nil, fmt.Errorf("could not determine time since current slot started: %w", err) } - boostThreshold := time.Duration(params.BeaconConfig().SecondsPerSlot/params.BeaconConfig().IntervalsPerSlot) * time.Second + boostThreshold := params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().AttestationDueBPS) isFirstBlock := s.proposerBoostRoot == [32]byte{} if currentSlot == slot && sss < boostThreshold && isFirstBlock { s.proposerBoostRoot = root diff --git a/beacon-chain/p2p/broadcaster.go b/beacon-chain/p2p/broadcaster.go index 6066a4e052..6e638071ee 100644 --- a/beacon-chain/p2p/broadcaster.go +++ b/beacon-chain/p2p/broadcaster.go @@ -286,7 +286,7 @@ func (s *Service) BroadcastLightClientOptimisticUpdate(ctx context.Context, upda return err } timeSinceSlotStart := time.Since(slotStart) - expectedDelay := slots.ComponentDuration(primitives.BP(params.BeaconConfig().SyncMessageDueBPS)) + expectedDelay := params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().SyncMessageDueBPS) if timeSinceSlotStart < expectedDelay { waitDuration := expectedDelay - timeSinceSlotStart <-time.After(waitDuration) @@ -320,7 +320,7 @@ func (s *Service) BroadcastLightClientFinalityUpdate(ctx context.Context, update return err } timeSinceSlotStart := time.Since(slotStart) - expectedDelay := slots.ComponentDuration(primitives.BP(params.BeaconConfig().SyncMessageDueBPS)) + expectedDelay := params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().SyncMessageDueBPS) if timeSinceSlotStart < expectedDelay { waitDuration := expectedDelay - timeSinceSlotStart <-time.After(waitDuration) diff --git a/beacon-chain/p2p/broadcaster_test.go b/beacon-chain/p2p/broadcaster_test.go index 5ea0c13bcd..81041b0b3a 100644 --- a/beacon-chain/p2p/broadcaster_test.go +++ b/beacon-chain/p2p/broadcaster_test.go @@ -22,7 +22,6 @@ import ( "github.com/OffchainLabs/prysm/v7/config/params" "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/OffchainLabs/prysm/v7/consensus-types/wrapper" "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" @@ -598,7 +597,7 @@ func TestService_BroadcastLightClientOptimisticUpdate(t *testing.T) { slotStartTime, err := slots.StartTime(p.genesisTime, msg.SignatureSlot()) require.NoError(t, err) - expectedDelay := slots.ComponentDuration(primitives.BP(params.BeaconConfig().SyncMessageDueBPS)) + expectedDelay := params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().SyncMessageDueBPS) if time.Now().Before(slotStartTime.Add(expectedDelay)) { tt.Errorf("Message received too early, now %v, expected at least %v", time.Now(), slotStartTime.Add(expectedDelay)) } @@ -674,7 +673,7 @@ func TestService_BroadcastLightClientFinalityUpdate(t *testing.T) { slotStartTime, err := slots.StartTime(p.genesisTime, msg.SignatureSlot()) require.NoError(t, err) - expectedDelay := slots.ComponentDuration(primitives.BP(params.BeaconConfig().SyncMessageDueBPS)) + expectedDelay := params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().SyncMessageDueBPS) if time.Now().Before(slotStartTime.Add(expectedDelay)) { tt.Errorf("Message received too early, now %v, expected at least %v", time.Now(), slotStartTime.Add(expectedDelay)) } diff --git a/beacon-chain/p2p/service_test.go b/beacon-chain/p2p/service_test.go index 2596681ca7..f7b180beec 100644 --- a/beacon-chain/p2p/service_test.go +++ b/beacon-chain/p2p/service_test.go @@ -128,7 +128,7 @@ func TestService_Start_NoDiscoverFlag(t *testing.T) { beaconCfg.AltairForkEpoch = 0 beaconCfg.BellatrixForkEpoch = 0 beaconCfg.CapellaForkEpoch = 0 - beaconCfg.SecondsPerSlot = 1 + beaconCfg.SlotDurationMilliseconds = 1000 params.OverrideBeaconConfig(beaconCfg) exitRoutine := make(chan bool) diff --git a/beacon-chain/rpc/eth/config/handlers_test.go b/beacon-chain/rpc/eth/config/handlers_test.go index 8f174d80d6..04d11cf9dd 100644 --- a/beacon-chain/rpc/eth/config/handlers_test.go +++ b/beacon-chain/rpc/eth/config/handlers_test.go @@ -87,6 +87,7 @@ func TestGetSpec(t *testing.T) { config.ETH1AddressWithdrawalPrefixByte = byte('c') config.GenesisDelay = 24 config.SecondsPerSlot = 25 + config.SlotDurationMilliseconds = 120 config.MinAttestationInclusionDelay = 26 config.SlotsPerEpoch = 27 config.MinSeedLookahead = 28 @@ -129,6 +130,10 @@ func TestGetSpec(t *testing.T) { config.ProportionalSlashingMultiplierAltair = 69 config.InactivityScoreRecoveryRate = 70 config.MinSyncCommitteeParticipants = 71 + config.ProposerReorgCutoffBPS = primitives.BP(121) + config.AttestationDueBPS = primitives.BP(122) + config.AggregrateDueBPS = primitives.BP(123) + config.ContributionDueBPS = primitives.BP(124) config.TerminalBlockHash = common.HexToHash("TerminalBlockHash") config.TerminalBlockHashActivationEpoch = 72 config.TerminalTotalDifficulty = "73" @@ -201,7 +206,7 @@ func TestGetSpec(t *testing.T) { require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp)) data, ok := resp.Data.(map[string]interface{}) require.Equal(t, true, ok) - assert.Equal(t, 171, len(data)) + assert.Equal(t, 176, len(data)) for k, v := range data { t.Run(k, func(t *testing.T) { switch k { @@ -291,6 +296,8 @@ func TestGetSpec(t *testing.T) { assert.Equal(t, "24", v) case "SECONDS_PER_SLOT": assert.Equal(t, "25", v) + case "SLOT_DURATION_MS": + assert.Equal(t, "120", v) case "MIN_ATTESTATION_INCLUSION_DELAY": assert.Equal(t, "26", v) case "SLOTS_PER_EPOCH": @@ -447,6 +454,14 @@ func TestGetSpec(t *testing.T) { assert.Equal(t, "20", v) case "REORG_PARENT_WEIGHT_THRESHOLD": assert.Equal(t, "160", v) + case "PROPOSER_REORG_CUTOFF_BPS": + assert.Equal(t, "121", v) + case "ATTESTATION_DUE_BPS": + assert.Equal(t, "122", v) + case "AGGREGRATE_DUE_BPS": + assert.Equal(t, "123", v) + case "CONTRIBUTION_DUE_BPS": + assert.Equal(t, "124", v) case "MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT": assert.Equal(t, "8", v) case "MAX_REQUEST_LIGHT_CLIENT_UPDATES": diff --git a/beacon-chain/sync/rpc_goodbye_test.go b/beacon-chain/sync/rpc_goodbye_test.go index 17b2f8210c..6977e48851 100644 --- a/beacon-chain/sync/rpc_goodbye_test.go +++ b/beacon-chain/sync/rpc_goodbye_test.go @@ -23,7 +23,7 @@ import ( func TestGoodByeRPCHandler_Disconnects_With_Peer(t *testing.T) { params.SetupTestConfigCleanup(t) cfg := params.MainnetConfig() - cfg.SecondsPerSlot = 1 + cfg.SlotDurationMilliseconds = 1000 params.OverrideBeaconConfig(cfg) p1 := p2ptest.NewTestP2P(t) diff --git a/beacon-chain/sync/subscriber_test.go b/beacon-chain/sync/subscriber_test.go index 7fc3bf0883..3875dec4ef 100644 --- a/beacon-chain/sync/subscriber_test.go +++ b/beacon-chain/sync/subscriber_test.go @@ -130,7 +130,7 @@ func TestSubscribe_UnsubscribeTopic(t *testing.T) { func TestSubscribe_ReceivesAttesterSlashing(t *testing.T) { params.SetupTestConfigCleanup(t) cfg := params.MainnetConfig() - cfg.SecondsPerSlot = 1 + cfg.SlotDurationMilliseconds = 1000 params.OverrideBeaconConfig(cfg) p2pService := p2ptest.NewTestP2P(t) @@ -443,7 +443,7 @@ func Test_wrapAndReportValidation(t *testing.T) { func TestFilterSubnetPeers(t *testing.T) { params.SetupTestConfigCleanup(t) cfg := params.MainnetConfig() - cfg.SecondsPerSlot = 1 + cfg.SlotDurationMilliseconds = 1000 params.OverrideBeaconConfig(cfg) gFlags := new(flags.GlobalFlags) @@ -457,8 +457,9 @@ func TestFilterSubnetPeers(t *testing.T) { currSlot := primitives.Slot(100) gt := time.Now() + slotDuration := params.BeaconConfig().SlotDuration() genPlus100 := func() time.Time { - return gt.Add(time.Second * time.Duration(uint64(currSlot)*params.BeaconConfig().SecondsPerSlot)) + return gt.Add(time.Duration(uint64(currSlot)) * slotDuration) } chain := &mockChain.ChainService{ Genesis: gt, @@ -525,7 +526,7 @@ func TestFilterSubnetPeers(t *testing.T) { func TestSubscribeWithSyncSubnets_DynamicOK(t *testing.T) { params.SetupTestConfigCleanup(t) cfg := params.MainnetConfig() - cfg.SecondsPerSlot = 1 + cfg.SlotDurationMilliseconds = 1000 params.OverrideBeaconConfig(cfg) p := p2ptest.NewTestP2P(t) diff --git a/beacon-chain/sync/validate_light_client.go b/beacon-chain/sync/validate_light_client.go index d8b7ad2117..80b235c695 100644 --- a/beacon-chain/sync/validate_light_client.go +++ b/beacon-chain/sync/validate_light_client.go @@ -6,7 +6,6 @@ import ( "github.com/OffchainLabs/prysm/v7/config/params" "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/OffchainLabs/prysm/v7/monitoring/tracing" "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/OffchainLabs/prysm/v7/time/slots" @@ -60,7 +59,7 @@ func (s *Service) validateLightClientOptimisticUpdate(ctx context.Context, pid p return pubsub.ValidationReject, nil } earliestValidTime := slotStart. - Add(slots.ComponentDuration(primitives.BP(params.BeaconConfig().SyncMessageDueBPS))). + Add(params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().SyncMessageDueBPS)). Add(-params.BeaconConfig().MaximumGossipClockDisparityDuration()) if s.cfg.clock.Now().Before(earliestValidTime) { log.Debug("Newly received light client optimistic update ignored. not enough time passed for block to propagate") @@ -130,7 +129,7 @@ func (s *Service) validateLightClientFinalityUpdate(ctx context.Context, pid pee return pubsub.ValidationReject, nil } earliestValidTime := slotStart. - Add(slots.ComponentDuration(primitives.BP(params.BeaconConfig().SyncMessageDueBPS))). + Add(params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().SyncMessageDueBPS)). Add(-params.BeaconConfig().MaximumGossipClockDisparityDuration()) if s.cfg.clock.Now().Before(earliestValidTime) { log.Debug("Newly received light client finality update ignored. not enough time passed for block to propagate") diff --git a/beacon-chain/sync/validate_light_client_test.go b/beacon-chain/sync/validate_light_client_test.go index 080ed30b4c..c979ddff47 100644 --- a/beacon-chain/sync/validate_light_client_test.go +++ b/beacon-chain/sync/validate_light_client_test.go @@ -16,11 +16,9 @@ import ( mockSync "github.com/OffchainLabs/prysm/v7/beacon-chain/sync/initial-sync/testing" "github.com/OffchainLabs/prysm/v7/config/params" "github.com/OffchainLabs/prysm/v7/consensus-types/interfaces" - "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/OffchainLabs/prysm/v7/runtime/version" "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/OffchainLabs/prysm/v7/testing/util" - "github.com/OffchainLabs/prysm/v7/time/slots" pubsub "github.com/libp2p/go-libp2p-pubsub" pb "github.com/libp2p/go-libp2p-pubsub/pb" ) @@ -83,7 +81,7 @@ func TestValidateLightClientOptimisticUpdate(t *testing.T) { }, { name: "not enough time passed", - genesisDrift: -int(math.Ceil(float64(slots.ComponentDuration(primitives.BP(params.BeaconConfig().SyncMessageDueBPS))) / float64(time.Second))), + genesisDrift: -int(math.Ceil(float64(params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().SyncMessageDueBPS)) / float64(time.Second))), oldUpdateOptions: []util.LightClientOption{}, newUpdateOptions: []util.LightClientOption{}, expectedResult: pubsub.ValidationIgnore, @@ -209,7 +207,7 @@ func TestValidateLightClientFinalityUpdate(t *testing.T) { }, { name: "not enough time passed", - genesisDrift: -int(math.Ceil(float64(slots.ComponentDuration(primitives.BP(params.BeaconConfig().SyncMessageDueBPS))) / float64(time.Second))), + genesisDrift: -int(math.Ceil(float64(params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().SyncMessageDueBPS)) / float64(time.Second))), oldUpdateOptions: []util.LightClientOption{}, newUpdateOptions: []util.LightClientOption{}, expectedResult: pubsub.ValidationIgnore, diff --git a/changelog/ttsao_use-timing-configs.md b/changelog/ttsao_use-timing-configs.md new file mode 100644 index 0000000000..eebd16db99 --- /dev/null +++ b/changelog/ttsao_use-timing-configs.md @@ -0,0 +1,3 @@ +### Changed + +- Use explicit slot component timing configs \ No newline at end of file diff --git a/config/params/basis_points.go b/config/params/basis_points.go index f829bd21ed..9ff3ba0327 100644 --- a/config/params/basis_points.go +++ b/config/params/basis_points.go @@ -4,7 +4,7 @@ import "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" const BasisPoints = primitives.BP(10000) -// SlotBP returns the basis points for a given slot. +// SlotBP returns the duration of a slot expressed in milliseconds, represented as basis points of a slot. func SlotBP() primitives.BP { - return primitives.BP(12000) + return primitives.BP(BeaconConfig().SlotDurationMillis()) } diff --git a/config/params/config.go b/config/params/config.go index a561164124..17d7a0b4a3 100644 --- a/config/params/config.go +++ b/config/params/config.go @@ -66,6 +66,7 @@ type BeaconChainConfig struct { GenesisDelay uint64 `yaml:"GENESIS_DELAY" spec:"true"` // GenesisDelay is the minimum number of seconds to delay starting the Ethereum Beacon Chain genesis. Must be at least 1 second. MinAttestationInclusionDelay primitives.Slot `yaml:"MIN_ATTESTATION_INCLUSION_DELAY" spec:"true"` // MinAttestationInclusionDelay defines how many slots validator has to wait to include attestation for beacon block. SecondsPerSlot uint64 `yaml:"SECONDS_PER_SLOT" spec:"true"` // SecondsPerSlot is how many seconds are in a single slot. + SlotDurationMilliseconds uint64 `yaml:"SLOT_DURATION_MS" spec:"true"` // SlotDurationMilliseconds is the slot time expressed in milliseconds. SlotsPerEpoch primitives.Slot `yaml:"SLOTS_PER_EPOCH" spec:"true"` // SlotsPerEpoch is the number of slots in an epoch. SqrRootSlotsPerEpoch primitives.Slot // SqrRootSlotsPerEpoch is a hard coded value where we take the square root of `SlotsPerEpoch` and round down. MinSeedLookahead primitives.Epoch `yaml:"MIN_SEED_LOOKAHEAD" spec:"true"` // MinSeedLookahead is the duration of randao look ahead seed. @@ -84,6 +85,11 @@ type BeaconChainConfig struct { ReorgParentWeightThreshold uint64 `yaml:"REORG_PARENT_WEIGHT_THRESHOLD" spec:"true"` // ReorgParentWeightThreshold defines a value that is a % of the committee weight to consider a parent block strong and subject its child to being orphaned. ReorgMaxEpochsSinceFinalization primitives.Epoch `yaml:"REORG_MAX_EPOCHS_SINCE_FINALIZATION" spec:"true"` // This defines a limit to consider safe to orphan a block if the network is finalizing IntervalsPerSlot uint64 `yaml:"INTERVALS_PER_SLOT"` // IntervalsPerSlot defines the number of fork choice intervals in a slot defined in the fork choice spec. + ProposerReorgCutoffBPS primitives.BP `yaml:"PROPOSER_REORG_CUTOFF_BPS" spec:"true"` // ProposerReorgCutoffBPS defines the proposer reorg deadline in basis points of the slot. + AttestationDueBPS primitives.BP `yaml:"ATTESTATION_DUE_BPS" spec:"true"` // AttestationDueBPS defines the attestation due time in basis points of the slot. + AggregrateDueBPS primitives.BP `yaml:"AGGREGRATE_DUE_BPS" spec:"true"` // AggregrateDueBPS defines the aggregate due time in basis points of the slot. + SyncMessageDueBPS primitives.BP `yaml:"SYNC_MESSAGE_DUE_BPS" spec:"true"` // SyncMessageDueBPS defines the sync message due time in basis points of the slot. + ContributionDueBPS primitives.BP `yaml:"CONTRIBUTION_DUE_BPS" spec:"true"` // ContributionDueBPS defines the contribution due time in basis points of the slot. // Ethereum PoW parameters. DepositChainID uint64 `yaml:"DEPOSIT_CHAIN_ID" spec:"true"` // DepositChainID of the eth1 network. This used for replay protection. @@ -221,7 +227,6 @@ type BeaconChainConfig struct { // Light client MinSyncCommitteeParticipants uint64 `yaml:"MIN_SYNC_COMMITTEE_PARTICIPANTS" spec:"true"` // MinSyncCommitteeParticipants defines the minimum amount of sync committee participants for which the light client acknowledges the signature. MaxRequestLightClientUpdates uint64 `yaml:"MAX_REQUEST_LIGHT_CLIENT_UPDATES" spec:"true"` // MaxRequestLightClientUpdates defines the maximum amount of light client updates that can be requested in a single request. - SyncMessageDueBPS uint64 `yaml:"SYNC_MESSAGE_DUE_BPS" spec:"true"` // SyncMessageDueBPS defines the due time for a sync message. // Bellatrix TerminalBlockHash common.Hash `yaml:"TERMINAL_BLOCK_HASH" spec:"true"` // TerminalBlockHash of beacon chain. @@ -741,10 +746,29 @@ func SlotsForEpochs(count primitives.Epoch, b *BeaconChainConfig) primitives.Slo // SlotsDuration returns the time duration of the given number of slots. func SlotsDuration(count primitives.Slot, b *BeaconChainConfig) time.Duration { - return time.Duration(count) * SecondsPerSlot(b) + return time.Duration(count) * b.SlotDuration() } // SecondsPerSlot returns the time duration of a single slot. func SecondsPerSlot(b *BeaconChainConfig) time.Duration { - return time.Duration(b.SecondsPerSlot) * time.Second + return b.SlotDuration() +} + +// SlotDuration returns the configured slot duration as a time.Duration. +func (b *BeaconChainConfig) SlotDuration() time.Duration { + return time.Duration(b.SlotDurationMillis()) * time.Millisecond +} + +// SlotDurationMillis returns the configured slot duration in milliseconds. +func (b *BeaconChainConfig) SlotDurationMillis() uint64 { + if b.SlotDurationMilliseconds > 0 { + return b.SlotDurationMilliseconds + } + return b.SecondsPerSlot * 1000 +} + +// SlotComponentDuration returns the duration representing the given portion (in basis points) of a slot. +func (b *BeaconChainConfig) SlotComponentDuration(bp primitives.BP) time.Duration { + ms := uint64(bp) * b.SlotDurationMillis() / uint64(BasisPoints) + return time.Duration(ms) * time.Millisecond } diff --git a/config/params/config_test.go b/config/params/config_test.go index 0968fef2b6..c7102955b1 100644 --- a/config/params/config_test.go +++ b/config/params/config_test.go @@ -6,6 +6,7 @@ import ( "math" "sync" "testing" + "time" "github.com/OffchainLabs/prysm/v7/config/params" "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" @@ -202,6 +203,127 @@ func fillGVR(value byte) [32]byte { return gvr } +func TestBeaconChainConfigSlotDuration(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + cfg params.BeaconChainConfig + want time.Duration + }{ + { + name: "explicit duration", + cfg: params.BeaconChainConfig{SlotDurationMilliseconds: 12_000}, + want: 12 * time.Second, + }, + { + name: "fallback to seconds per slot", + cfg: params.BeaconChainConfig{SecondsPerSlot: 8}, + want: 8 * time.Second, + }, + { + name: "milliseconds override seconds per slot", + cfg: params.BeaconChainConfig{ + SlotDurationMilliseconds: 7_000, + SecondsPerSlot: 4, + }, + want: 7 * time.Second, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + require.Equal(t, tt.want, tt.cfg.SlotDuration()) + }) + } +} + +func TestBeaconChainConfigSlotDurationMillis(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + cfg params.BeaconChainConfig + want uint64 + }{ + { + name: "uses slot duration milliseconds when set", + cfg: params.BeaconChainConfig{SlotDurationMilliseconds: 4_800}, + want: 4_800, + }, + { + name: "derives from seconds per slot when unset", + cfg: params.BeaconChainConfig{SecondsPerSlot: 6}, + want: 6_000, + }, + { + name: "returns zero when no duration configured", + cfg: params.BeaconChainConfig{}, + want: 0, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + require.Equal(t, tt.want, tt.cfg.SlotDurationMillis()) + }) + } +} + +func TestBeaconChainConfigSlotComponentDuration(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + cfg params.BeaconChainConfig + bp primitives.BP + want time.Duration + }{ + { + name: "zero basis points produces zero duration", + cfg: params.BeaconChainConfig{SlotDurationMilliseconds: 12_000}, + bp: 0, + want: 0, + }, + { + name: "full slot basis points matches slot duration", + cfg: params.BeaconChainConfig{SlotDurationMilliseconds: 12_000}, + bp: params.BasisPoints, + want: 12 * time.Second, + }, + { + name: "quarter slot with explicit milliseconds", + cfg: params.BeaconChainConfig{SlotDurationMilliseconds: 12_000}, + bp: params.BasisPoints / 4, + want: 3 * time.Second, + }, + { + name: "fractional slot rounds down", + cfg: params.BeaconChainConfig{SlotDurationMilliseconds: 1_001}, + bp: params.BasisPoints / 3, + want: 333 * time.Millisecond, + }, + { + name: "uses seconds per slot fallback", + cfg: params.BeaconChainConfig{SecondsPerSlot: 9}, + bp: params.BasisPoints / 2, + want: 4500 * time.Millisecond, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + require.Equal(t, tt.want, tt.cfg.SlotComponentDuration(tt.bp)) + }) + } +} + func TestEntryWithForkDigest(t *testing.T) { var zero [32]byte one := fillGVR(byte(1)) diff --git a/config/params/configset_test.go b/config/params/configset_test.go index 773ef9ef73..0a724f1e8d 100644 --- a/config/params/configset_test.go +++ b/config/params/configset_test.go @@ -102,6 +102,7 @@ func compareConfigs(t *testing.T, expected, actual *BeaconChainConfig) { require.DeepEqual(t, expected.GenesisDelay, actual.GenesisDelay) require.DeepEqual(t, expected.MinAttestationInclusionDelay, actual.MinAttestationInclusionDelay) require.DeepEqual(t, expected.SecondsPerSlot, actual.SecondsPerSlot) + require.DeepEqual(t, expected.SlotDurationMilliseconds, actual.SlotDurationMilliseconds) require.DeepEqual(t, expected.SlotsPerEpoch, actual.SlotsPerEpoch) require.DeepEqual(t, expected.SqrRootSlotsPerEpoch, actual.SqrRootSlotsPerEpoch) require.DeepEqual(t, expected.MinSeedLookahead, actual.MinSeedLookahead) diff --git a/config/params/loader.go b/config/params/loader.go index 25fc18f237..767e17d17e 100644 --- a/config/params/loader.go +++ b/config/params/loader.go @@ -187,6 +187,7 @@ func ConfigToYaml(cfg *BeaconChainConfig) []byte { fmt.Sprintf("GENESIS_FORK_VERSION: %#x", cfg.GenesisForkVersion), fmt.Sprintf("CHURN_LIMIT_QUOTIENT: %d", cfg.ChurnLimitQuotient), fmt.Sprintf("SECONDS_PER_SLOT: %d", cfg.SecondsPerSlot), + fmt.Sprintf("SLOT_DURATION_MS: %d", cfg.SlotDurationMilliseconds), fmt.Sprintf("SLOTS_PER_EPOCH: %d", cfg.SlotsPerEpoch), fmt.Sprintf("SECONDS_PER_ETH1_BLOCK: %d", cfg.SecondsPerETH1Block), fmt.Sprintf("ETH1_FOLLOW_DISTANCE: %d", cfg.Eth1FollowDistance), @@ -241,6 +242,11 @@ func ConfigToYaml(cfg *BeaconChainConfig) []byte { fmt.Sprintf("MIN_EPOCHS_FOR_BLOCK_REQUESTS: %d", int(cfg.MinEpochsForBlockRequests)), fmt.Sprintf("MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: %d", cfg.MinPerEpochChurnLimitElectra), fmt.Sprintf("MAX_BLOBS_PER_BLOCK: %d", cfg.DeprecatedMaxBlobsPerBlock), + fmt.Sprintf("PROPOSER_REORG_CUTOFF_BPS: %d", cfg.ProposerReorgCutoffBPS), + fmt.Sprintf("ATTESTATION_DUE_BPS: %d", cfg.AttestationDueBPS), + fmt.Sprintf("AGGREGRATE_DUE_BPS: %d", cfg.AggregrateDueBPS), + fmt.Sprintf("SYNC_MESSAGE_DUE_BPS: %d", cfg.SyncMessageDueBPS), + fmt.Sprintf("CONTRIBUTION_DUE_BPS: %d", cfg.ContributionDueBPS), } if len(cfg.BlobSchedule) > 0 { diff --git a/config/params/loader_test.go b/config/params/loader_test.go index 2158702e4b..fd75ed892d 100644 --- a/config/params/loader_test.go +++ b/config/params/loader_test.go @@ -27,11 +27,9 @@ var placeholderFields = []string{ "AGGREGATE_DUE_BPS", "AGGREGATE_DUE_BPS_GLOAS", "ATTESTATION_DEADLINE", - "ATTESTATION_DUE_BPS", "ATTESTATION_DUE_BPS_GLOAS", "BLOB_SIDECAR_SUBNET_COUNT_FULU", "CELLS_PER_EXT_BLOB", - "CONTRIBUTION_DUE_BPS", "CONTRIBUTION_DUE_BPS_GLOAS", "EIP6110_FORK_EPOCH", "EIP6110_FORK_VERSION", @@ -60,10 +58,7 @@ var placeholderFields = []string{ "PAYLOAD_ATTESTATION_DUE_BPS", "PROPOSER_INCLUSION_LIST_CUTOFF", "PROPOSER_INCLUSION_LIST_CUTOFF_BPS", - "PROPOSER_REORG_CUTOFF_BPS", - "PROPOSER_SCORE_BOOST_EIP7732", "PROPOSER_SELECTION_GAP", - "SLOT_DURATION_MS", "SYNC_MESSAGE_DUE_BPS_GLOAS", "TARGET_NUMBER_OF_PEERS", "UPDATE_TIMEOUT", @@ -101,6 +96,10 @@ func assertEqualConfigs(t *testing.T, name string, fields []string, expected, ac assert.Equal(t, expected.HysteresisQuotient, actual.HysteresisQuotient, "%s: HysteresisQuotient", name) assert.Equal(t, expected.HysteresisDownwardMultiplier, actual.HysteresisDownwardMultiplier, "%s: HysteresisDownwardMultiplier", name) assert.Equal(t, expected.HysteresisUpwardMultiplier, actual.HysteresisUpwardMultiplier, "%s: HysteresisUpwardMultiplier", name) + assert.Equal(t, expected.AttestationDueBPS, actual.AttestationDueBPS, "%s: AttestationDueBPS", name) + assert.Equal(t, expected.AggregrateDueBPS, actual.AggregrateDueBPS, "%s: AggregrateDueBPS", name) + assert.Equal(t, expected.ContributionDueBPS, actual.ContributionDueBPS, "%s: ContributionDueBPS", name) + assert.Equal(t, expected.ProposerReorgCutoffBPS, actual.ProposerReorgCutoffBPS, "%s: ProposerReorgCutoffBPS", name) assert.Equal(t, expected.SyncMessageDueBPS, actual.SyncMessageDueBPS, "%s: SyncMessageDueBPS", name) // Validator params. @@ -129,6 +128,7 @@ func assertEqualConfigs(t *testing.T, name string, fields []string, expected, ac // Time parameters. assert.Equal(t, expected.GenesisDelay, actual.GenesisDelay, "%s: GenesisDelay", name) assert.Equal(t, expected.SecondsPerSlot, actual.SecondsPerSlot, "%s: SecondsPerSlot", name) + assert.Equal(t, expected.SlotDurationMilliseconds, actual.SlotDurationMilliseconds, "%s: SlotDurationMilliseconds", name) assert.Equal(t, expected.MinAttestationInclusionDelay, actual.MinAttestationInclusionDelay, "%s: MinAttestationInclusionDelay", name) assert.Equal(t, expected.SlotsPerEpoch, actual.SlotsPerEpoch, "%s: SlotsPerEpoch", name) assert.Equal(t, expected.MinSeedLookahead, actual.MinSeedLookahead, "%s: MinSeedLookahead", name) diff --git a/config/params/mainnet_config.go b/config/params/mainnet_config.go index 98ce3a0832..af6c47dbe1 100644 --- a/config/params/mainnet_config.go +++ b/config/params/mainnet_config.go @@ -5,6 +5,7 @@ import ( "time" fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ) @@ -98,6 +99,7 @@ var mainnetBeaconConfig = &BeaconChainConfig{ // Time parameter constants. MinAttestationInclusionDelay: 1, SecondsPerSlot: 12, + SlotDurationMilliseconds: 12000, SlotsPerEpoch: 32, SqrRootSlotsPerEpoch: 5, MinSeedLookahead: 1, @@ -116,6 +118,13 @@ var mainnetBeaconConfig = &BeaconChainConfig{ ReorgMaxEpochsSinceFinalization: 2, IntervalsPerSlot: 3, + // Time-based protocol parameters. + ProposerReorgCutoffBPS: primitives.BP(1667), + AttestationDueBPS: primitives.BP(3333), + AggregrateDueBPS: primitives.BP(6667), + SyncMessageDueBPS: primitives.BP(3333), + ContributionDueBPS: primitives.BP(6667), + // Ethereum PoW parameters. DepositChainID: 1, // Chain ID of eth1 mainnet. DepositNetworkID: 1, // Network ID of eth1 mainnet. @@ -257,7 +266,6 @@ var mainnetBeaconConfig = &BeaconChainConfig{ // Light client MinSyncCommitteeParticipants: 1, MaxRequestLightClientUpdates: 128, - SyncMessageDueBPS: 3333, // Bellatrix TerminalBlockHashActivationEpoch: 18446744073709551615, diff --git a/config/params/minimal_config.go b/config/params/minimal_config.go index d030cffb29..36851d1258 100644 --- a/config/params/minimal_config.go +++ b/config/params/minimal_config.go @@ -34,6 +34,7 @@ func MinimalSpecConfig() *BeaconChainConfig { // Time parameters minimalConfig.SecondsPerSlot = 6 + minimalConfig.SlotDurationMilliseconds = 6000 minimalConfig.MinAttestationInclusionDelay = 1 minimalConfig.SlotsPerEpoch = 8 minimalConfig.SqrRootSlotsPerEpoch = 2 diff --git a/config/params/testdata/e2e_config.yaml b/config/params/testdata/e2e_config.yaml index c2c57c07b5..c8d0df45fa 100644 --- a/config/params/testdata/e2e_config.yaml +++ b/config/params/testdata/e2e_config.yaml @@ -56,6 +56,8 @@ FULU_FORK_EPOCH: 18446744073709551615 # --------------------------------------------------------------- # [customized] Faster for testing purposes SECONDS_PER_SLOT: 10 # Override for e2e tests +# 10000 milliseconds, 10 seconds +SLOT_DURATION_MS: 10000 # 14 (estimate from Eth1 mainnet) SECONDS_PER_ETH1_BLOCK: 2 # Override for e2e tests # [customized] faster time for withdrawals @@ -133,4 +135,4 @@ BLOB_SCHEDULE: MAX_BLOBS_PER_BLOCK: 6 # Electra - EPOCH: 14 - MAX_BLOBS_PER_BLOCK: 9 \ No newline at end of file + MAX_BLOBS_PER_BLOCK: 9 diff --git a/config/params/testnet_config_test.go b/config/params/testnet_config_test.go index 70ab4995d6..bdd0285016 100644 --- a/config/params/testnet_config_test.go +++ b/config/params/testnet_config_test.go @@ -58,6 +58,7 @@ func compareConfigs(t *testing.T, expected, actual *params.BeaconChainConfig) { require.DeepEqual(t, expected.GenesisDelay, actual.GenesisDelay) require.DeepEqual(t, expected.MinAttestationInclusionDelay, actual.MinAttestationInclusionDelay) require.DeepEqual(t, expected.SecondsPerSlot, actual.SecondsPerSlot) + require.DeepEqual(t, expected.SlotDurationMilliseconds, actual.SlotDurationMilliseconds) require.DeepEqual(t, expected.SlotsPerEpoch, actual.SlotsPerEpoch) require.DeepEqual(t, expected.SqrRootSlotsPerEpoch, actual.SqrRootSlotsPerEpoch) require.DeepEqual(t, expected.MinSeedLookahead, actual.MinSeedLookahead) diff --git a/config/params/testnet_e2e_config.go b/config/params/testnet_e2e_config.go index addc427ca2..ad5ce74a76 100644 --- a/config/params/testnet_e2e_config.go +++ b/config/params/testnet_e2e_config.go @@ -27,6 +27,7 @@ func E2ETestConfig() *BeaconChainConfig { // Time parameters. e2eConfig.SecondsPerSlot = 10 + e2eConfig.SlotDurationMilliseconds = 10000 e2eConfig.SlotsPerEpoch = 6 e2eConfig.SqrRootSlotsPerEpoch = 2 e2eConfig.SecondsPerETH1Block = 2 @@ -81,6 +82,7 @@ func E2EMainnetTestConfig() *BeaconChainConfig { // Time parameters. e2eConfig.SecondsPerSlot = 6 + e2eConfig.SlotDurationMilliseconds = 6000 e2eConfig.SqrRootSlotsPerEpoch = 5 e2eConfig.SecondsPerETH1Block = 2 e2eConfig.ShardCommitteePeriod = 4 diff --git a/time/slots/slottime.go b/time/slots/slottime.go index cfa4bd0adf..6d6bf2f814 100644 --- a/time/slots/slottime.go +++ b/time/slots/slottime.go @@ -40,14 +40,17 @@ func EpochsSinceGenesis(genesis time.Time) primitives.Epoch { // in milliseconds, useful for dividing values such as 1 second into // millisecond-based durations. func DivideSlotBy(timesPerSlot int64) time.Duration { - return time.Duration(int64(params.BeaconConfig().SecondsPerSlot*1000)/timesPerSlot) * time.Millisecond + if timesPerSlot == 0 { + return 0 + } + return params.BeaconConfig().SlotDuration() / time.Duration(timesPerSlot) } // MultiplySlotBy multiplies the SECONDS_PER_SLOT configuration // parameter by a specified number. It returns a value of time.Duration // in millisecond-based durations. func MultiplySlotBy(times int64) time.Duration { - return time.Duration(int64(params.BeaconConfig().SecondsPerSlot)*times) * time.Second + return params.BeaconConfig().SlotDuration() * time.Duration(times) } // AbsoluteValueSlotDifference between two slots. @@ -175,12 +178,11 @@ func VerifyTime(genesis time.Time, slot primitives.Slot, timeTolerance time.Dura // StartTime takes the given slot and genesis time to determine the start time of the slot. // This method returns an error if the product of the slot duration * slot overflows int64. func StartTime(genesis time.Time, slot primitives.Slot) (time.Time, error) { - _, err := slot.SafeMul(params.BeaconConfig().SecondsPerSlot) + ms, err := slot.SafeMul(params.BeaconConfig().SlotDurationMillis()) if err != nil { return time.Unix(0, 0), fmt.Errorf("slot (%d) is in the far distant future: %w", slot, err) } - sd := time.Second * time.Duration(params.BeaconConfig().SecondsPerSlot) * time.Duration(slot) - return genesis.Add(sd), nil + return genesis.Add(time.Duration(ms) * time.Millisecond), nil } // CurrentSlot returns the current slot as determined by the local clock and @@ -194,7 +196,7 @@ func At(genesis, tm time.Time) primitives.Slot { if tm.Before(genesis) { return 0 } - return primitives.Slot(tm.Sub(genesis) / time.Second / time.Duration(params.BeaconConfig().SecondsPerSlot)) + return primitives.Slot(tm.Sub(genesis) / params.BeaconConfig().SlotDuration()) } // Duration computes the span of time between two instants, represented as Slots. @@ -202,7 +204,7 @@ func Duration(start, end time.Time) primitives.Slot { if end.Before(start) { return 0 } - return primitives.Slot(uint64(end.Unix()-start.Unix()) / params.BeaconConfig().SecondsPerSlot) + return primitives.Slot((end.Sub(start)) / params.BeaconConfig().SlotDuration()) } // ValidateClock validates a provided slot against the local @@ -231,7 +233,7 @@ func RoundUpToNearestEpoch(slot primitives.Slot) primitives.Slot { // depending on the provided genesis and current slot. func VotingPeriodStartTime(genesis uint64, slot primitives.Slot) uint64 { slots := params.BeaconConfig().SlotsPerEpoch.Mul(uint64(params.BeaconConfig().EpochsPerEth1VotingPeriod)) - startTime := uint64((slot - slot.ModSlot(slots)).Mul(params.BeaconConfig().SecondsPerSlot)) + startTime := uint64((slot - slot.ModSlot(slots)).Mul(params.BeaconConfig().SlotDurationMillis())) / 1000 return genesis + startTime } @@ -267,7 +269,7 @@ func SyncCommitteePeriodStartEpoch(e primitives.Epoch) (primitives.Epoch, error) // given slot start time. This method returns an error if the timestamp happens // before the given slot start time. func SinceSlotStart(s primitives.Slot, genesis time.Time, timestamp time.Time) (time.Duration, error) { - limit := genesis.Add(time.Duration(uint64(s)*params.BeaconConfig().SecondsPerSlot) * time.Second) + limit := genesis.Add(time.Duration(uint64(s)) * params.BeaconConfig().SlotDuration()) if timestamp.Before(limit) { return 0, fmt.Errorf("could not compute seconds since slot %d start: invalid timestamp, got %s < want %s", s, timestamp, limit) } @@ -277,8 +279,8 @@ func SinceSlotStart(s primitives.Slot, genesis time.Time, timestamp time.Time) ( // WithinVotingWindow returns whether the current time is within the voting window // (eg. 4 seconds on mainnet) of the current slot. func WithinVotingWindow(genesis time.Time, slot primitives.Slot) bool { - votingWindow := params.BeaconConfig().SecondsPerSlot / params.BeaconConfig().IntervalsPerSlot - return time.Since(UnsafeStartTime(genesis, slot)) < time.Duration(votingWindow)*time.Second + votingWindow := params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().AttestationDueBPS) + return time.Since(UnsafeStartTime(genesis, slot)) < votingWindow } // MaxSafeEpoch gives the largest epoch value that can be safely converted to a slot. @@ -307,9 +309,3 @@ func SecondsUntilNextEpochStart(genesis time.Time) (uint64, error) { }).Debugf("%d seconds until next epoch", waitTime) return waitTime, nil } - -// ComponentDuration calculates the duration of a slot component in milliseconds. -func ComponentDuration(component primitives.BP) time.Duration { - ms := (component * params.SlotBP()) / params.BasisPoints - return time.Duration(ms) * time.Millisecond -} diff --git a/validator/client/BUILD.bazel b/validator/client/BUILD.bazel index 63603c2eec..ea938b1ea6 100644 --- a/validator/client/BUILD.bazel +++ b/validator/client/BUILD.bazel @@ -17,6 +17,7 @@ go_library( "sync_committee.go", "validator.go", "wait_for_activation.go", + "wait_helpers.go", ], importpath = "github.com/OffchainLabs/prysm/v7/validator/client", visibility = [ @@ -115,6 +116,7 @@ go_test( "sync_committee_test.go", "validator_test.go", "wait_for_activation_test.go", + "wait_helpers_test.go", ], data = [ "@eip3076_spec_tests//:test_data", diff --git a/validator/client/aggregate.go b/validator/client/aggregate.go index 24cc34e3cd..a261a86209 100644 --- a/validator/client/aggregate.go +++ b/validator/client/aggregate.go @@ -4,20 +4,17 @@ import ( "context" "fmt" "net/http" - "time" "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" fieldparams "github.com/OffchainLabs/prysm/v7/config/fieldparams" "github.com/OffchainLabs/prysm/v7/config/params" "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" "github.com/OffchainLabs/prysm/v7/crypto/bls" - "github.com/OffchainLabs/prysm/v7/monitoring/tracing" "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" "github.com/OffchainLabs/prysm/v7/network/httputil" ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" validatorpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1/validator-client" "github.com/OffchainLabs/prysm/v7/runtime/version" - prysmTime "github.com/OffchainLabs/prysm/v7/time" "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/pkg/errors" "google.golang.org/grpc/codes" @@ -210,32 +207,7 @@ func (v *validator) signSlotWithSelectionProof(ctx context.Context, pubKey [fiel // such that any attestations from this slot have time to reach the beacon node // before creating the aggregated attestation. func (v *validator) waitToSlotTwoThirds(ctx context.Context, slot primitives.Slot) { - ctx, span := trace.StartSpan(ctx, "validator.waitToSlotTwoThirds") - defer span.End() - - oneThird := slots.DivideSlotBy(3 /* one third of slot duration */) - twoThird := oneThird + oneThird - delay := twoThird - - startTime, err := slots.StartTime(v.genesisTime, slot) - if err != nil { - log.WithError(err).WithField("slot", slot).Error("Slot overflows, unable to wait for slot two thirds!") - return - } - finalTime := startTime.Add(delay) - wait := prysmTime.Until(finalTime) - if wait <= 0 { - return - } - t := time.NewTimer(wait) - defer t.Stop() - select { - case <-ctx.Done(): - tracing.AnnotateError(span, ctx.Err()) - return - case <-t.C: - return - } + v.waitUntilSlotComponent(ctx, slot, params.BeaconConfig().AggregrateDueBPS) } // This returns the signature of validator signing over aggregate and diff --git a/validator/client/aggregate_test.go b/validator/client/aggregate_test.go index a59d72bcf4..f50b2fefa4 100644 --- a/validator/client/aggregate_test.go +++ b/validator/client/aggregate_test.go @@ -16,7 +16,6 @@ import ( "github.com/OffchainLabs/prysm/v7/testing/assert" "github.com/OffchainLabs/prysm/v7/testing/require" "github.com/OffchainLabs/prysm/v7/testing/util" - "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/OffchainLabs/prysm/v7/validator/client/iface" logTest "github.com/sirupsen/logrus/hooks/test" "go.uber.org/mock/gomock" @@ -256,9 +255,9 @@ func TestWaitForSlotTwoThird_WaitCorrectly(t *testing.T) { defer finish() currentTime := time.Now() numOfSlots := primitives.Slot(4) - validator.genesisTime = currentTime.Add(-1 * time.Duration(numOfSlots.Mul(params.BeaconConfig().SecondsPerSlot)) * time.Second) - oneThird := slots.DivideSlotBy(3 /* one third of slot duration */) - timeToSleep := oneThird + oneThird + slotDuration := params.BeaconConfig().SlotDuration() + validator.genesisTime = currentTime.Add(-slotDuration * time.Duration(numOfSlots)) + timeToSleep := params.BeaconConfig().SlotComponentDuration(params.BeaconConfig().AggregrateDueBPS) twoThirdTime := currentTime.Add(timeToSleep) validator.waitToSlotTwoThirds(t.Context(), numOfSlots) @@ -275,7 +274,8 @@ func TestWaitForSlotTwoThird_DoneContext_ReturnsImmediately(t *testing.T) { defer finish() currentTime := time.Now() numOfSlots := primitives.Slot(4) - validator.genesisTime = currentTime.Add(-1 * time.Duration(numOfSlots.Mul(params.BeaconConfig().SecondsPerSlot)) * time.Second) + slotDuration := params.BeaconConfig().SlotDuration() + validator.genesisTime = currentTime.Add(-slotDuration * time.Duration(numOfSlots)) expectedTime := time.Now() ctx, cancel := context.WithCancel(t.Context()) diff --git a/validator/client/attest.go b/validator/client/attest.go index 8d8aed684e..df35f8d712 100644 --- a/validator/client/attest.go +++ b/validator/client/attest.go @@ -280,13 +280,11 @@ func (v *validator) waitOneThirdOrValidBlock(ctx context.Context, slot primitive return } - delay := slots.DivideSlotBy(3 /* a third of the slot duration */) - startTime, err := slots.StartTime(v.genesisTime, slot) + finalTime, err := v.slotComponentDeadline(slot, params.BeaconConfig().AttestationDueBPS) if err != nil { - log.WithError(err).WithField("slot", slot).Error("Slot overflows, unable to wait for slot two thirds!") + log.WithError(err).WithField("slot", slot).Error("Slot overflows, unable to wait for attestation deadline") return } - finalTime := startTime.Add(delay) wait := prysmTime.Until(finalTime) if wait <= 0 { return diff --git a/validator/client/runner_test.go b/validator/client/runner_test.go index 5915a23397..76fc64afea 100644 --- a/validator/client/runner_test.go +++ b/validator/client/runner_test.go @@ -373,7 +373,7 @@ func TestRunnerPushesProposerSettings_ValidContext(t *testing.T) { logrus.SetOutput(tlogger{t}) cfg := params.BeaconConfig() - cfg.SecondsPerSlot = 1 + cfg.SlotDurationMilliseconds = 1000 params.SetActiveTestCleanup(t, cfg) timedCtx, cancel := context.WithTimeout(t.Context(), 1*time.Minute) diff --git a/validator/client/sync_committee.go b/validator/client/sync_committee.go index b82a9fcd7e..6c1a761726 100644 --- a/validator/client/sync_committee.go +++ b/validator/client/sync_committee.go @@ -127,7 +127,7 @@ func (v *validator) SubmitSignedContributionAndProof(ctx context.Context, slot p return } - v.waitToSlotTwoThirds(ctx, slot) + v.waitUntilSlotComponent(ctx, slot, params.BeaconConfig().ContributionDueBPS) coveredSubnets := make(map[uint64]bool) for i, comIdx := range indexRes.Indices { diff --git a/validator/client/wait_for_activation_test.go b/validator/client/wait_for_activation_test.go index 10a894ebfb..3311102f14 100644 --- a/validator/client/wait_for_activation_test.go +++ b/validator/client/wait_for_activation_test.go @@ -54,7 +54,7 @@ func TestWaitForActivation_RefetchKeys(t *testing.T) { params.SetupTestConfigCleanup(t) cfg := params.MainnetConfig() cfg.ConfigName = "test" - cfg.SecondsPerSlot = 1 + cfg.SlotDurationMilliseconds = 1000 params.OverrideBeaconConfig(cfg) hook := logTest.NewGlobal() ctrl := gomock.NewController(t) @@ -247,7 +247,7 @@ func TestWaitForActivation_AttemptsReconnectionOnFailure(t *testing.T) { params.SetupTestConfigCleanup(t) cfg := params.MainnetConfig() cfg.ConfigName = "test" - cfg.SecondsPerSlot = 1 + cfg.SlotDurationMilliseconds = 1000 params.OverrideBeaconConfig(cfg) ctrl := gomock.NewController(t) defer ctrl.Finish() diff --git a/validator/client/wait_helpers.go b/validator/client/wait_helpers.go new file mode 100644 index 0000000000..d0e7696533 --- /dev/null +++ b/validator/client/wait_helpers.go @@ -0,0 +1,65 @@ +package client + +import ( + "context" + "time" + + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing" + "github.com/OffchainLabs/prysm/v7/monitoring/tracing/trace" + prysmTime "github.com/OffchainLabs/prysm/v7/time" + "github.com/OffchainLabs/prysm/v7/time/slots" +) + +// slotComponentDeadline returns the absolute time corresponding to the provided slot component. +func (v *validator) slotComponentDeadline(slot primitives.Slot, component primitives.BP) (time.Time, error) { + startTime, err := slots.StartTime(v.genesisTime, slot) + if err != nil { + return time.Time{}, err + } + delay := params.BeaconConfig().SlotComponentDuration(component) + return startTime.Add(delay), nil +} + +func (v *validator) waitUntilSlotComponent(ctx context.Context, slot primitives.Slot, component primitives.BP) { + ctx, span := trace.StartSpan(ctx, v.slotComponentSpanName(component)) + defer span.End() + + finalTime, err := v.slotComponentDeadline(slot, component) + if err != nil { + log.WithError(err).WithField("slot", slot).Error("Slot overflows, unable to wait for slot component deadline") + return + } + wait := prysmTime.Until(finalTime) + if wait <= 0 { + return + } + t := time.NewTimer(wait) + defer t.Stop() + select { + case <-ctx.Done(): + tracing.AnnotateError(span, ctx.Err()) + return + case <-t.C: + return + } +} + +func (v *validator) slotComponentSpanName(component primitives.BP) string { + cfg := params.BeaconConfig() + switch component { + case cfg.AttestationDueBPS: + return "validator.waitAttestationWindow" + case cfg.AggregrateDueBPS: + return "validator.waitAggregateWindow" + case cfg.SyncMessageDueBPS: + return "validator.waitSyncMessageWindow" + case cfg.ContributionDueBPS: + return "validator.waitContributionWindow" + case cfg.ProposerReorgCutoffBPS: + return "validator.waitProposerReorgWindow" + default: + return "validator.waitSlotComponent" + } +} diff --git a/validator/client/wait_helpers_test.go b/validator/client/wait_helpers_test.go new file mode 100644 index 0000000000..e62ac053e4 --- /dev/null +++ b/validator/client/wait_helpers_test.go @@ -0,0 +1,87 @@ +package client + +import ( + "context" + "testing" + "time" + + "github.com/OffchainLabs/prysm/v7/config/params" + "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" + "github.com/OffchainLabs/prysm/v7/testing/assert" + "github.com/OffchainLabs/prysm/v7/testing/require" + "github.com/OffchainLabs/prysm/v7/time/slots" +) + +func TestSlotComponentDeadline(t *testing.T) { + params.SetupTestConfigCleanup(t) + + cfg := params.BeaconConfig() + v := &validator{genesisTime: time.Unix(1700000000, 0)} + slot := primitives.Slot(5) + component := cfg.AttestationDueBPS + + got, err := v.slotComponentDeadline(slot, component) + require.NoError(t, err) + + startTime, err := slots.StartTime(v.genesisTime, slot) + require.NoError(t, err) + expected := startTime.Add(cfg.SlotComponentDuration(component)) + + require.Equal(t, expected, got) +} + +func TestSlotComponentSpanName(t *testing.T) { + params.SetupTestConfigCleanup(t) + + cfg := params.BeaconConfig() + v := &validator{} + tests := []struct { + name string + component primitives.BP + expected string + }{ + { + name: "attestation", + component: cfg.AttestationDueBPS, + expected: "validator.waitAttestationWindow", + }, + { + name: "aggregate", + component: cfg.AggregrateDueBPS, + expected: "validator.waitAggregateWindow", + }, + { + name: "default", + component: cfg.AttestationDueBPS + 7, + expected: "validator.waitSlotComponent", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.expected, v.slotComponentSpanName(tt.component)) + }) + } +} + +func TestWaitUntilSlotComponent_ContextCancelReturnsImmediately(t *testing.T) { + params.SetupTestConfigCleanup(t) + cfg := params.BeaconConfig().Copy() + cfg.SlotDurationMilliseconds = 10000 + params.OverrideBeaconConfig(cfg) + + v := &validator{genesisTime: time.Now()} + ctx, cancel := context.WithCancel(context.Background()) + cancel() + + done := make(chan struct{}) + go func() { + v.waitUntilSlotComponent(ctx, 1, cfg.AttestationDueBPS) + close(done) + }() + + select { + case <-done: + case <-time.After(2 * time.Second): + t.Fatal("waitUntilSlotComponent did not return after context cancellation") + } +} From 2fd6bd81509d972ced02565fff12286b918ce1fc Mon Sep 17 00:00:00 2001 From: Preston Van Loon Date: Thu, 13 Nov 2025 19:27:22 -0600 Subject: [PATCH 098/103] Add golang.org/x/tools modernize static analyzer and fix violations (#15946) * Ran gopls modernize to fix everything go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./... * Override rules_go provided dependency for golang.org/x/tools to v0.38.0. To update this, checked out rules_go, then ran `bazel run //go/tools/releaser -- upgrade-dep -mirror=false org_golang_x_tools` and copied the patches. * Fix buildtag violations and ignore buildtag violations in external * Introduce modernize analyzer package. * Add modernize "any" analyzer. * Fix violations of any analyzer * Add modernize "appendclipped" analyzer. * Fix violations of appendclipped * Add modernize "bloop" analyzer. * Add modernize "fmtappendf" analyzer. * Add modernize "forvar" analyzer. * Add modernize "mapsloop" analyzer. * Add modernize "minmax" analyzer. * Fix violations of minmax analyzer * Add modernize "omitzero" analyzer. * Add modernize "rangeint" analyzer. * Fix violations of rangeint. * Add modernize "reflecttypefor" analyzer. * Fix violations of reflecttypefor analyzer. * Add modernize "slicescontains" analyzer. * Add modernize "slicessort" analyzer. * Add modernize "slicesdelete" analyzer. This is disabled by default for now. See https://go.dev/issue/73686. * Add modernize "stringscutprefix" analyzer. * Add modernize "stringsbuilder" analyzer. * Fix violations of stringsbuilder analyzer. * Add modernize "stringsseq" analyzer. * Add modernize "testingcontext" analyzer. * Add modernize "waitgroup" analyzer. * Changelog fragment * gofmt * gazelle * Add modernize "newexpr" analyzer. * Disable newexpr until go1.26 * Add more details in WORKSPACE on how to update the override * @nalepae feedback on min() * gofmt * Fix violations of forvar --- BUILD.bazel | 19 + WORKSPACE | 20 + api/apiutil/header.go | 2 +- api/client/builder/client.go | 2 +- api/client/builder/types.go | 2 +- api/grpc/grpcutils.go | 2 +- api/server/structs/endpoints_config.go | 2 +- async/abool/abool_test.go | 52 +- async/benchmark_test.go | 10 +- async/debounce.go | 2 +- async/debounce_test.go | 20 +- async/event/example_scope_test.go | 6 +- async/event/example_subscription_test.go | 2 +- async/event/interface.go | 4 +- async/event/subscription_test.go | 4 +- async/multilock_test.go | 6 +- async/scatter.go | 4 +- async/scatter_test.go | 12 +- .../blockchain/kzg/validation_test.go | 10 +- .../blockchain/process_attestation_test.go | 8 +- beacon-chain/blockchain/process_block.go | 2 +- beacon-chain/blockchain/process_block_test.go | 8 +- beacon-chain/blockchain/receive_block_test.go | 6 +- beacon-chain/blockchain/service_test.go | 6 +- beacon-chain/blockchain/testing/mock.go | 4 +- beacon-chain/builder/service.go | 2 +- beacon-chain/cache/active_balance_test.go | 4 +- beacon-chain/cache/committee.go | 2 +- beacon-chain/cache/committee_fuzz_test.go | 6 +- beacon-chain/cache/common.go | 2 +- .../depositsnapshot/deposit_cache_test.go | 18 +- .../depositsnapshot/deposit_tree_snapshot.go | 2 +- beacon-chain/cache/skip_slot_cache_test.go | 6 +- beacon-chain/cache/sync_committee.go | 2 +- beacon-chain/cache/sync_subnet_ids_test.go | 8 +- beacon-chain/core/altair/attestation_test.go | 2 +- beacon-chain/core/altair/block_test.go | 2 +- beacon-chain/core/altair/deposit.go | 5 +- beacon-chain/core/altair/deposit_fuzz_test.go | 10 +- beacon-chain/core/altair/epoch_precompute.go | 7 +- .../core/altair/sync_committee_test.go | 8 +- beacon-chain/core/altair/upgrade_test.go | 2 +- beacon-chain/core/blocks/attestation.go | 2 +- beacon-chain/core/blocks/attestation_test.go | 14 +- .../core/blocks/block_operations_fuzz_test.go | 42 +- beacon-chain/core/blocks/eth1_data_test.go | 4 +- beacon-chain/core/blocks/header_test.go | 12 +- beacon-chain/core/blocks/payload_test.go | 3 +- beacon-chain/core/electra/churn_test.go | 2 +- .../core/electra/deposit_fuzz_test.go | 4 +- beacon-chain/core/electra/deposits_test.go | 8 +- .../core/electra/registry_updates_test.go | 8 +- beacon-chain/core/epoch/epoch_processing.go | 5 +- .../core/epoch/epoch_processing_fuzz_test.go | 2 +- .../core/epoch/epoch_processing_test.go | 10 +- .../justification_finalization_test.go | 8 +- .../core/epoch/precompute/reward_penalty.go | 2 +- .../epoch/precompute/reward_penalty_test.go | 16 +- beacon-chain/core/feed/event.go | 2 +- beacon-chain/core/helpers/attestation_test.go | 2 +- beacon-chain/core/helpers/beacon_committee.go | 6 +- .../core/helpers/beacon_committee_test.go | 52 +- beacon-chain/core/helpers/randao_test.go | 6 +- .../core/helpers/rewards_penalties_test.go | 8 +- beacon-chain/core/helpers/shuffle.go | 7 +- beacon-chain/core/helpers/shuffle_test.go | 14 +- .../core/helpers/sync_committee_test.go | 28 +- beacon-chain/core/helpers/validators_test.go | 12 +- .../core/helpers/weak_subjectivity_test.go | 2 +- .../core/peerdas/p2p_interface_test.go | 2 +- beacon-chain/core/peerdas/validator.go | 2 +- .../core/signing/signing_root_test.go | 2 +- .../core/transition/benchmarks_test.go | 25 +- .../core/transition/skip_slot_cache_test.go | 2 +- .../core/transition/state-bellatrix.go | 10 +- beacon-chain/core/transition/state.go | 8 +- .../core/transition/state_fuzz_test.go | 6 +- .../core/transition/transition_fuzz_test.go | 18 +- .../core/transition/transition_test.go | 9 +- beacon-chain/core/validators/validator.go | 4 +- .../core/validators/validator_test.go | 4 +- beacon-chain/das/availability_blobs_test.go | 2 +- beacon-chain/das/blob_cache_test.go | 2 +- beacon-chain/db/filesystem/blob_test.go | 8 +- beacon-chain/db/filesystem/cache.go | 2 +- beacon-chain/db/filters/filter.go | 6 +- beacon-chain/db/kv/blocks.go | 6 +- beacon-chain/db/kv/blocks_test.go | 16 +- beacon-chain/db/kv/encoding.go | 2 +- .../db/kv/finalized_block_roots_test.go | 2 +- beacon-chain/db/kv/lightclient_test.go | 14 +- beacon-chain/db/kv/state.go | 2 +- beacon-chain/db/kv/state_test.go | 20 +- beacon-chain/db/kv/utils_test.go | 2 +- .../db/kv/validated_checkpoint_test.go | 3 +- beacon-chain/db/pruner/pruner.go | 2 +- beacon-chain/db/slasherkv/pruning_test.go | 8 +- beacon-chain/db/slasherkv/slasher.go | 8 +- beacon-chain/db/slasherkv/slasher_test.go | 42 +- beacon-chain/execution/block_cache.go | 6 +- beacon-chain/execution/block_reader.go | 2 +- beacon-chain/execution/block_reader_test.go | 6 +- beacon-chain/execution/engine_client.go | 2 +- .../execution/engine_client_fuzz_test.go | 25 +- beacon-chain/execution/engine_client_test.go | 70 +- beacon-chain/execution/log_processing_test.go | 12 +- beacon-chain/execution/mock_test.go | 12 +- beacon-chain/execution/payload_body.go | 2 +- beacon-chain/execution/service.go | 10 +- beacon-chain/execution/service_test.go | 16 +- .../execution/testing/mock_execution_chain.go | 2 +- .../doubly-linked-tree/proposer_boost_test.go | 4 +- beacon-chain/light-client/helpers.go | 4 +- beacon-chain/light-client/lightclient.go | 4 +- beacon-chain/light-client/lightclient_test.go | 8 +- beacon-chain/light-client/store.go | 5 +- beacon-chain/light-client/store_test.go | 8 +- beacon-chain/monitor/service.go | 4 +- beacon-chain/monitor/service_test.go | 6 +- beacon-chain/node/config_test.go | 4 +- .../operations/attestations/kv/aggregated.go | 2 +- beacon-chain/operations/blstoexec/pool.go | 5 +- .../operations/blstoexec/pool_test.go | 6 +- .../slashings/service_attester_test.go | 12 +- .../slashings/service_proposer_test.go | 8 +- .../operations/voluntaryexits/pool_test.go | 2 +- beacon-chain/p2p/BUILD.bazel | 1 + beacon-chain/p2p/broadcaster_test.go | 12 +- beacon-chain/p2p/connection_gater_test.go | 4 +- beacon-chain/p2p/discovery_test.go | 14 +- beacon-chain/p2p/encoder/varint.go | 2 +- beacon-chain/p2p/encoder/varint_test.go | 2 +- beacon-chain/p2p/gossip_scoring_params.go | 4 +- .../p2p/gossip_scoring_params_test.go | 4 +- beacon-chain/p2p/gossip_topic_mappings.go | 32 +- .../p2p/gossip_topic_mappings_test.go | 3 +- beacon-chain/p2p/interfaces.go | 2 +- beacon-chain/p2p/peers/assigner.go | 5 +- beacon-chain/p2p/peers/assigner_test.go | 3 +- beacon-chain/p2p/peers/benchmark_test.go | 4 +- .../p2p/peers/scorers/bad_responses_test.go | 12 +- .../p2p/peers/scorers/block_providers.go | 2 +- .../p2p/peers/scorers/block_providers_test.go | 4 +- beacon-chain/p2p/peers/status.go | 12 +- beacon-chain/p2p/peers/status_test.go | 40 +- beacon-chain/p2p/pubsub_filter.go | 4 +- beacon-chain/p2p/pubsub_filter_test.go | 2 +- beacon-chain/p2p/pubsub_test.go | 6 +- beacon-chain/p2p/rpc_topic_mappings.go | 14 +- beacon-chain/p2p/rpc_topic_mappings_test.go | 2 +- beacon-chain/p2p/sender.go | 2 +- beacon-chain/p2p/service_test.go | 2 +- beacon-chain/p2p/subnets.go | 11 +- beacon-chain/p2p/subnets_test.go | 2 +- beacon-chain/p2p/testing/fuzz_p2p.go | 2 +- beacon-chain/p2p/testing/p2p.go | 2 +- beacon-chain/p2p/types/types.go | 6 +- beacon-chain/p2p/types/types_test.go | 8 +- beacon-chain/rpc/core/validator.go | 10 +- beacon-chain/rpc/core/validator_test.go | 4 +- beacon-chain/rpc/eth/beacon/handlers.go | 4 +- beacon-chain/rpc/eth/beacon/handlers_pool.go | 11 +- beacon-chain/rpc/eth/beacon/handlers_state.go | 2 +- .../rpc/eth/beacon/handlers_state_test.go | 15 +- beacon-chain/rpc/eth/beacon/handlers_test.go | 24 +- .../eth/beacon/handlers_validators_test.go | 6 +- beacon-chain/rpc/eth/config/handlers.go | 14 +- beacon-chain/rpc/eth/config/handlers_test.go | 14 +- beacon-chain/rpc/eth/debug/handlers.go | 2 +- beacon-chain/rpc/eth/events/events_test.go | 2 +- .../rpc/eth/light-client/handlers_test.go | 14 +- .../rpc/eth/node/handlers_peers_test.go | 3 +- beacon-chain/rpc/eth/rewards/handlers.go | 2 +- beacon-chain/rpc/eth/rewards/handlers_test.go | 26 +- beacon-chain/rpc/eth/validator/handlers.go | 5 +- .../rpc/eth/validator/handlers_test.go | 18 +- beacon-chain/rpc/lookup/blocker_test.go | 6 +- beacon-chain/rpc/prysm/node/handlers_test.go | 2 +- .../prysm/v1alpha1/beacon/assignments_test.go | 8 +- .../rpc/prysm/v1alpha1/beacon/attestations.go | 2 +- .../v1alpha1/beacon/attestations_test.go | 22 +- .../rpc/prysm/v1alpha1/beacon/blocks_test.go | 4 +- .../prysm/v1alpha1/beacon/committees_test.go | 4 +- .../rpc/prysm/v1alpha1/beacon/config.go | 2 +- .../rpc/prysm/v1alpha1/beacon/config_test.go | 4 +- .../prysm/v1alpha1/beacon/validators_test.go | 28 +- .../prysm/v1alpha1/validator/attester_test.go | 9 +- .../prysm/v1alpha1/validator/blocks_test.go | 16 +- .../prysm/v1alpha1/validator/duties_test.go | 16 +- .../v1alpha1/validator/duties_v2_test.go | 10 +- .../rpc/prysm/v1alpha1/validator/proposer.go | 6 +- .../v1alpha1/validator/proposer_altair.go | 8 +- .../validator/proposer_altair_test.go | 4 +- .../validator/proposer_attestations.go | 2 +- .../validator/proposer_attestations_test.go | 9 +- .../validator/proposer_deneb_bench_test.go | 40 +- .../prysm/v1alpha1/validator/proposer_test.go | 16 +- .../validator/proposer_utils_bench_test.go | 2 +- .../prysm/v1alpha1/validator/server_test.go | 6 +- .../prysm/v1alpha1/validator/status_test.go | 12 +- .../rpc/prysm/validator/handlers_test.go | 6 +- .../validator/validator_performance_test.go | 2 +- beacon-chain/rpc/service.go | 6 +- beacon-chain/rpc/testutil/db.go | 2 +- beacon-chain/slasher/chunks.go | 4 +- beacon-chain/slasher/detect_attestations.go | 18 +- .../slasher/detect_attestations_test.go | 19 +- beacon-chain/slasher/service.go | 2 +- beacon-chain/state/fieldtrie/field_trie.go | 10 +- .../state/fieldtrie/field_trie_helpers.go | 14 +- beacon-chain/state/fieldtrie/helpers_test.go | 2 +- beacon-chain/state/interfaces.go | 4 +- .../state/state-native/getters_attestation.go | 4 +- .../state/state-native/getters_eth1.go | 2 +- .../getters_participation_test.go | 2 +- .../state/state-native/getters_state.go | 18 +- .../state/state-native/getters_validator.go | 8 +- .../state/state-native/getters_withdrawal.go | 2 +- .../state-native/getters_withdrawal_test.go | 4 +- .../state-native/setters_attestation_test.go | 14 +- .../state/state-native/setters_eth1_test.go | 2 +- .../state/state-native/setters_misc_test.go | 8 +- .../setters_participation_test.go | 3 +- .../setters_payload_header_test.go | 4 +- .../state/state-native/setters_validator.go | 2 +- .../state-native/setters_validator_test.go | 6 +- beacon-chain/state/state-native/state_test.go | 60 +- beacon-chain/state/state-native/state_trie.go | 10 +- beacon-chain/state/state-native/types_test.go | 32 +- .../stategen/epoch_boundary_state_cache.go | 6 +- beacon-chain/state/stategen/history.go | 2 +- beacon-chain/state/stategen/history_test.go | 6 +- beacon-chain/state/stategen/migrate.go | 2 +- .../state/stateutil/benchmark_test.go | 5 +- beacon-chain/state/stateutil/eth1_root.go | 4 +- .../state/stateutil/field_root_attestation.go | 2 +- .../state/stateutil/field_root_validator.go | 4 +- .../stateutil/field_root_validator_test.go | 6 +- .../state/stateutil/participation_bit_root.go | 10 +- .../state/stateutil/reference_bench_test.go | 2 +- .../state/stateutil/state_root_test.go | 20 +- beacon-chain/state/stateutil/trie_helpers.go | 2 +- .../state/stateutil/trie_helpers_test.go | 14 +- .../stateutil/unrealized_justification.go | 2 +- .../unrealized_justification_test.go | 4 +- .../state/stateutil/validator_root_test.go | 4 +- beacon-chain/sync/backfill/batch.go | 2 +- beacon-chain/sync/backfill/batcher_test.go | 2 +- beacon-chain/sync/backfill/blobs_test.go | 2 +- beacon-chain/sync/backfill/pool.go | 2 +- beacon-chain/sync/backfill/pool_test.go | 2 +- beacon-chain/sync/backfill/service.go | 5 +- beacon-chain/sync/backfill/service_test.go | 4 +- beacon-chain/sync/backfill/verify_test.go | 2 +- beacon-chain/sync/batch_verifier.go | 2 +- beacon-chain/sync/blobs_test.go | 4 +- beacon-chain/sync/block_batcher.go | 2 +- beacon-chain/sync/block_batcher_test.go | 4 +- .../sync/checkpoint/weak-subjectivity_test.go | 8 +- beacon-chain/sync/data_column_sidecars.go | 17 +- beacon-chain/sync/data_columns_reconstruct.go | 2 +- beacon-chain/sync/decode_pubsub.go | 8 +- beacon-chain/sync/decode_pubsub_test.go | 4 +- beacon-chain/sync/fork_watcher_test.go | 6 +- .../sync/initial-sync/blocks_fetcher.go | 6 +- .../sync/initial-sync/blocks_fetcher_peers.go | 10 +- .../initial-sync/blocks_fetcher_peers_test.go | 2 +- .../sync/initial-sync/blocks_fetcher_test.go | 2 +- .../sync/initial-sync/blocks_fetcher_utils.go | 2 +- .../initial-sync/blocks_fetcher_utils_test.go | 8 +- .../sync/initial-sync/blocks_queue.go | 10 +- .../sync/initial-sync/blocks_queue_test.go | 4 +- beacon-chain/sync/initial-sync/fsm.go | 10 +- .../sync/initial-sync/fsm_benchmark_test.go | 5 +- beacon-chain/sync/initial-sync/fsm_test.go | 18 +- .../sync/initial-sync/initial_sync_test.go | 4 +- beacon-chain/sync/initial-sync/service.go | 5 +- .../sync/initial-sync/service_test.go | 24 +- beacon-chain/sync/kzg_batch_verifier_test.go | 18 +- beacon-chain/sync/metrics.go | 4 +- .../sync/pending_attestations_queue_test.go | 22 +- beacon-chain/sync/pending_blocks_queue.go | 6 +- beacon-chain/sync/rate_limiter_test.go | 4 +- beacon-chain/sync/rpc.go | 2 +- .../sync/rpc_beacon_blocks_by_range.go | 7 +- .../sync/rpc_beacon_blocks_by_root.go | 2 +- .../sync/rpc_blob_sidecars_by_range.go | 7 +- .../sync/rpc_blob_sidecars_by_range_test.go | 17 +- .../sync/rpc_blob_sidecars_by_root.go | 2 +- .../sync/rpc_blob_sidecars_by_root_test.go | 6 +- .../sync/rpc_data_column_sidecars_by_range.go | 2 +- .../sync/rpc_data_column_sidecars_by_root.go | 2 +- beacon-chain/sync/rpc_goodbye.go | 2 +- beacon-chain/sync/rpc_handler_test.go | 2 +- beacon-chain/sync/rpc_light_client.go | 8 +- beacon-chain/sync/rpc_light_client_test.go | 2 +- beacon-chain/sync/rpc_metadata.go | 4 +- beacon-chain/sync/rpc_metadata_test.go | 8 +- beacon-chain/sync/rpc_ping.go | 2 +- beacon-chain/sync/rpc_send_request.go | 2 +- beacon-chain/sync/rpc_send_request_test.go | 13 +- beacon-chain/sync/rpc_status.go | 6 +- beacon-chain/sync/rpc_test.go | 4 +- beacon-chain/sync/service.go | 2 +- beacon-chain/sync/service_test.go | 4 +- beacon-chain/sync/slot_aware_cache.go | 4 +- beacon-chain/sync/slot_aware_cache_test.go | 2 +- beacon-chain/sync/subscriber.go | 2 +- beacon-chain/sync/subscriber_beacon_blocks.go | 2 +- beacon-chain/sync/subscriber_test.go | 2 +- beacon-chain/sync/sync_fuzz_test.go | 6 +- .../sync/validate_aggregate_proof_test.go | 14 +- .../sync/validate_attester_slashing_test.go | 12 +- .../sync/validate_beacon_attestation.go | 4 +- .../sync/validate_beacon_blocks_test.go | 42 +- beacon-chain/sync/validate_blob_test.go | 8 +- .../sync/validate_proposer_slashing_test.go | 12 +- .../sync/validate_sync_committee_message.go | 2 +- .../validate_sync_committee_message_test.go | 4 +- .../validate_sync_contribution_proof_test.go | 2 +- .../sync/validate_voluntary_exit_test.go | 6 +- beacon-chain/verification/blob_test.go | 10 +- cache/lru/lru_wrpr.go | 2 +- cache/lru/lru_wrpr_test.go | 6 +- cache/nonblocking/lru.go | 7 +- cache/nonblocking/lru_test.go | 4 +- changelog/pvl-modernize.md | 4 + cmd/beacon-chain/flags/api_module.go | 2 +- cmd/beacon-chain/storage/options.go | 6 +- cmd/beacon-chain/usage.go | 4 +- cmd/client-stats/usage.go | 4 +- cmd/prysmctl/db/span.go | 10 +- cmd/prysmctl/p2p/client.go | 4 +- cmd/prysmctl/p2p/handler.go | 2 +- cmd/prysmctl/p2p/handshake.go | 6 +- cmd/prysmctl/p2p/request_blobs.go | 2 +- cmd/prysmctl/p2p/request_blocks.go | 4 +- cmd/prysmctl/testnet/generate_genesis.go | 6 +- cmd/prysmctl/testnet/generate_genesis_test.go | 4 +- cmd/prysmctl/validator/withdraw.go | 2 +- cmd/validator/flags/flags_test.go | 4 +- .../slashing-protection/import_export_test.go | 2 +- cmd/validator/usage.go | 4 +- config/params/config_test.go | 7 +- config/params/loader_test.go | 4 +- config/util.go | 4 +- consensus-types/blocks/factory.go | 12 +- consensus-types/blocks/kzg_test.go | 4 +- consensus-types/hdiff/fuzz_test.go | 6 +- consensus-types/hdiff/property_test.go | 9 +- consensus-types/hdiff/security_test.go | 10 +- consensus-types/hdiff/state_diff.go | 2 +- consensus-types/hdiff/state_diff_test.go | 40 +- consensus-types/payload-attribute/types.go | 2 +- consensus-types/validator/custom_types.go | 2 +- consensus-types/wrapper/metadata.go | 6 +- container/leaky-bucket/heap.go | 4 +- container/leaky-bucket/heap_test.go | 8 +- container/leaky-bucket/leakybucket_test.go | 2 +- .../multi_value_slice_test.go | 54 +- container/queue/priority_queue.go | 6 +- container/slice/slice_test.go | 32 +- container/thread-safe/map_test.go | 14 +- container/trie/sparse_merkle.go | 2 +- container/trie/sparse_merkle_test.go | 23 +- contracts/deposit/deposit_tree_test.go | 4 +- crypto/bls/blst/bls_benchmark_test.go | 11 +- crypto/bls/blst/public_key_test.go | 48 +- crypto/bls/blst/signature.go | 8 +- crypto/bls/blst/signature_test.go | 10 +- crypto/bls/signature_batch.go | 14 +- crypto/bls/signature_batch_test.go | 8 +- crypto/hash/hash.go | 4 +- crypto/hash/hash_test.go | 8 +- crypto/hash/htr/hashtree.go | 2 +- crypto/hash/htr/hashtree_test.go | 6 +- crypto/keystore/key.go | 12 +- crypto/keystore/keystore.go | 2 +- crypto/keystore/utils.go | 2 +- crypto/random/random_test.go | 2 +- deps.bzl | 42 +- encoding/bytesutil/bytes_go120.go | 1 - encoding/bytesutil/bytes_legacy.go | 1 - encoding/bytesutil/bytes_test.go | 8 +- encoding/bytesutil/integers.go | 5 +- encoding/bytesutil/integers_test.go | 2 +- encoding/ssz/equality/deep_equal.go | 4 +- encoding/ssz/equality/deep_equal_test.go | 2 +- encoding/ssz/helpers.go | 10 +- encoding/ssz/htrutils_test.go | 4 +- encoding/ssz/merkleize.go | 4 +- encoding/ssz/slice_root.go | 2 +- genesis/initialize_test.go | 2 +- genesis/internal/embedded/mainnet.go | 1 - go.mod | 18 +- go.sum | 36 +- io/file/fileutil.go | 3 +- math/math_helper_test.go | 12 +- monitoring/journald/journalhook_linux.go | 2 +- monitoring/journald/journalhook_linux_test.go | 2 +- monitoring/prometheus/content_negotiation.go | 2 +- .../prometheus/logrus_collector_test.go | 6 +- .../tracing/recovery_interceptor_option.go | 2 +- network/auth_test.go | 6 +- nogo_config.json | 87 +- proto/engine/v1/execution_engine.go | 2 +- proto/engine/v1/execution_engine_fuzz_test.go | 2 +- proto/engine/v1/json_marshal_unmarshal.go | 8 +- .../engine/v1/json_marshal_unmarshal_test.go | 10 +- .../aggregation/attestations/attestations.go | 2 +- .../aggregation/attestations/maxcover.go | 6 +- .../aggregation/maxcover_bench_test.go | 4 +- .../aggregation/testing/bitlistutils.go | 10 +- .../v1alpha1/attestation/attestation_utils.go | 5 +- .../attestation/attestation_utils_test.go | 13 +- proto/prysm/v1alpha1/cloners.go | 2 +- proto/prysm/v1alpha1/cloners_test.go | 26 +- proto/prysm/v1alpha1/fuzz_test.go | 2 +- .../v1alpha1/metadata/metadata_interfaces.go | 2 +- proto/testing/tags_test.go | 4 +- runtime/interop/generate_genesis_state.go | 6 +- runtime/interop/generate_keys.go | 2 +- runtime/interop/premine-state.go | 10 +- .../logrus-prefixed-formatter/formatter.go | 4 +- runtime/service_registry.go | 2 +- runtime/service_registry_test.go | 12 +- testing/assert/assertions.go | 28 +- testing/assertions/assertions.go | 50 +- testing/assertions/assertions_test.go | 88 +- testing/endtoend/component_handler_test.go | 2 +- testing/endtoend/components/beacon_node.go | 2 +- testing/endtoend/components/builder.go | 2 +- testing/endtoend/components/eth1/depositor.go | 2 +- testing/endtoend/components/eth1/miner.go | 2 +- testing/endtoend/components/eth1/node.go | 2 +- testing/endtoend/components/eth1/node_set.go | 2 +- testing/endtoend/components/eth1/proxy.go | 4 +- .../endtoend/components/eth1/transactions.go | 9 +- .../endtoend/components/lighthouse_beacon.go | 2 +- .../components/lighthouse_validator.go | 4 +- testing/endtoend/components/validator.go | 2 +- testing/endtoend/endtoend_test.go | 11 +- .../endtoend/evaluators/beaconapi/requests.go | 8 +- .../endtoend/evaluators/beaconapi/types.go | 32 +- testing/endtoend/evaluators/beaconapi/util.go | 14 +- .../endtoend/evaluators/beaconapi/verify.go | 8 +- testing/endtoend/evaluators/data.go | 2 +- testing/endtoend/evaluators/metrics.go | 2 +- testing/endtoend/evaluators/node.go | 6 +- testing/endtoend/evaluators/peers.go | 2 +- testing/endtoend/helpers/helpers.go | 4 +- testing/endtoend/types/types.go | 2 +- testing/middleware/builder/builder.go | 20 +- testing/middleware/engine-api-proxy/proxy.go | 14 +- .../middleware/engine-api-proxy/proxy_test.go | 10 +- testing/require/requires.go | 30 +- .../simulator/attestation_generator.go | 5 +- .../shared/altair/ssz_static/ssz_static.go | 8 +- .../shared/bellatrix/ssz_static/ssz_static.go | 8 +- .../shared/capella/ssz_static/ssz_static.go | 8 +- .../shared/common/forkchoice/runner.go | 2 +- .../shared/common/ssz_static/ssz_static.go | 2 +- .../ssz_static/ssz_static_example_test.go | 8 +- .../shared/common/ssz_static/types.go | 6 +- .../shared/deneb/ssz_static/ssz_static.go | 8 +- .../shared/electra/ssz_static/ssz_static.go | 8 +- .../shared/fulu/ssz_static/ssz_static.go | 8 +- .../shared/gloas/ssz_static/ssz_static.go | 6 +- .../shared/phase0/ssz_static/ssz_static.go | 8 +- testing/spectest/utils/utils.go | 2 +- testing/util/altair.go | 8 +- testing/util/bellatrix_state.go | 10 +- testing/util/block.go | 8 +- testing/util/capella_state.go | 8 +- testing/util/deneb.go | 2 +- testing/util/deneb_state.go | 8 +- testing/util/deposits.go | 8 +- testing/util/deposits_test.go | 4 +- testing/util/electra_block.go | 6 +- testing/util/electra_state.go | 8 +- testing/util/fulu_state.go | 8 +- testing/util/helpers.go | 2 +- testing/util/lightclient.go | 2 +- testing/util/state.go | 6 +- third_party/hid/usb.go | 2 +- third_party/hid/usb_test.go | 12 +- .../org_golang_x_tools-deletegopls.patch | 203162 +++++++++++++++ third_party/org_golang_x_tools-gazelle.patch | 11931 + time/slots/slottime_test.go | 2 +- tools/analyzers/comparesame/analyzer.go | 2 +- tools/analyzers/cryptorand/analyzer.go | 2 +- tools/analyzers/featureconfig/analyzer.go | 2 +- tools/analyzers/gocognit/analyzer.go | 2 +- tools/analyzers/ineffassign/analyzer.go | 2 +- tools/analyzers/interfacechecker/analyzer.go | 2 +- tools/analyzers/logcapitalization/analyzer.go | 2 +- tools/analyzers/logruswitherror/analyzer.go | 4 +- tools/analyzers/maligned/analyzer.go | 2 +- tools/analyzers/maligned/maligned.go | 6 +- tools/analyzers/modernize/README.md | 5 + tools/analyzers/modernize/any/BUILD.bazel | 9 + tools/analyzers/modernize/any/analyzer.go | 5 + .../modernize/appendclipped/BUILD.bazel | 9 + .../modernize/appendclipped/analyzer.go | 5 + tools/analyzers/modernize/bloop/BUILD.bazel | 9 + tools/analyzers/modernize/bloop/analyzer.go | 5 + .../modernize/fmtappendf/BUILD.bazel | 9 + .../modernize/fmtappendf/analyzer.go | 5 + tools/analyzers/modernize/forvar/BUILD.bazel | 9 + tools/analyzers/modernize/forvar/analyzer.go | 5 + .../analyzers/modernize/mapsloop/BUILD.bazel | 9 + .../analyzers/modernize/mapsloop/analyzer.go | 5 + tools/analyzers/modernize/minmax/BUILD.bazel | 9 + tools/analyzers/modernize/minmax/analyzer.go | 5 + tools/analyzers/modernize/newexpr/BUILD.bazel | 9 + tools/analyzers/modernize/newexpr/analyzer.go | 5 + .../analyzers/modernize/omitzero/BUILD.bazel | 9 + .../analyzers/modernize/omitzero/analyzer.go | 5 + .../analyzers/modernize/rangeint/BUILD.bazel | 9 + .../analyzers/modernize/rangeint/analyzer.go | 5 + .../modernize/reflecttypefor/BUILD.bazel | 9 + .../modernize/reflecttypefor/analyzer.go | 5 + .../modernize/slicescontains/BUILD.bazel | 9 + .../modernize/slicescontains/analyzer.go | 5 + .../modernize/slicesdelete/BUILD.bazel | 9 + .../modernize/slicesdelete/analyzer.go | 5 + .../modernize/slicessort/BUILD.bazel | 9 + .../modernize/slicessort/analyzer.go | 5 + .../modernize/stringsbuilder/BUILD.bazel | 9 + .../modernize/stringsbuilder/analyzer.go | 5 + .../modernize/stringscutprefix/BUILD.bazel | 9 + .../modernize/stringscutprefix/analyzer.go | 5 + .../modernize/stringsseq/BUILD.bazel | 9 + .../modernize/stringsseq/analyzer.go | 5 + .../modernize/testingcontext/BUILD.bazel | 9 + .../modernize/testingcontext/analyzer.go | 5 + .../analyzers/modernize/waitgroup/BUILD.bazel | 9 + .../analyzers/modernize/waitgroup/analyzer.go | 5 + tools/analyzers/nop/analyzer.go | 2 +- tools/analyzers/nopanic/analyzer.go | 2 +- tools/analyzers/properpermissions/analyzer.go | 2 +- tools/analyzers/recursivelock/analyzer.go | 4 +- tools/analyzers/shadowpredecl/analyzer.go | 2 +- tools/analyzers/slicedirect/analyzer.go | 2 +- tools/analyzers/uintcast/analyzer.go | 2 +- tools/beacon-fuzz/main.go | 9 +- tools/blocktree/main.go | 2 +- tools/bootnode/bootnode.go | 2 +- tools/exploredb/main.go | 6 +- tools/http-request-sink/main.go | 6 +- tools/http-request-sink/main_test.go | 4 +- tools/interop/split-keys/main.go | 4 +- tools/interop/split-keys/main_test.go | 2 +- tools/nogo_config/main.go | 2 +- tools/specs-checker/check.go | 2 +- tools/unencrypted-keys-gen/main.go | 2 +- validator/accounts/accounts_delete_test.go | 2 +- validator/accounts/accounts_exit.go | 2 +- validator/accounts/accounts_helper.go | 2 +- validator/accounts/accounts_list_test.go | 2 +- validator/client/attest_test.go | 8 +- .../beacon_api_beacon_chain_client.go | 10 +- .../beacon_api_beacon_chain_client_test.go | 4 +- validator/client/beacon-api/duties_test.go | 8 +- .../beacon-api/get_beacon_block_test.go | 2 +- .../client/beacon-api/rest_handler_client.go | 10 +- validator/client/health_monitor_test.go | 16 +- validator/client/runner_test.go | 10 +- validator/client/testutil/mock_validator.go | 4 +- validator/client/validator_test.go | 8 +- validator/client/wait_for_activation_test.go | 4 +- .../db/filesystem/attester_protection_test.go | 3 +- validator/db/filesystem/import_test.go | 4 +- validator/db/kv/attester_protection_test.go | 8 +- validator/db/kv/eip_blacklisted_keys_test.go | 2 +- validator/db/kv/import_test.go | 4 +- ...ration_optimal_attester_protection_test.go | 2 +- ...ration_source_target_epochs_bucket_test.go | 4 +- .../db/kv/prune_attester_protection_test.go | 9 +- validator/keymanager/derived/keymanager.go | 4 +- .../keymanager/derived/keymanager_test.go | 4 +- validator/keymanager/local/backup_test.go | 2 +- validator/keymanager/local/delete_test.go | 2 +- validator/keymanager/local/import.go | 2 +- validator/keymanager/local/import_test.go | 6 +- validator/keymanager/local/keymanager.go | 22 +- validator/keymanager/local/keymanager_test.go | 6 +- validator/keymanager/local/refresh.go | 4 +- validator/keymanager/local/refresh_test.go | 2 +- .../remote-web3signer/internal/client.go | 2 +- .../remote-web3signer/keymanager.go | 2 +- validator/keymanager/types.go | 14 +- validator/node/node.go | 4 +- validator/rpc/auth_token_test.go | 2 +- validator/rpc/handler_wallet_test.go | 2 +- validator/rpc/handlers_accounts.go | 2 +- validator/rpc/handlers_health_test.go | 2 +- validator/rpc/handlers_keymanager.go | 6 +- validator/rpc/handlers_keymanager_test.go | 12 +- validator/rpc/handlers_slashing_test.go | 4 +- validator/rpc/intercepter.go | 4 +- validator/rpc/intercepter_test.go | 4 +- .../slashing-protection-history/export.go | 2 +- .../round_trip_test.go | 10 +- validator/testing/protection_history.go | 8 +- 605 files changed, 217475 insertions(+), 2228 deletions(-) create mode 100644 changelog/pvl-modernize.md create mode 100644 third_party/org_golang_x_tools-deletegopls.patch create mode 100644 third_party/org_golang_x_tools-gazelle.patch create mode 100644 tools/analyzers/modernize/README.md create mode 100644 tools/analyzers/modernize/any/BUILD.bazel create mode 100644 tools/analyzers/modernize/any/analyzer.go create mode 100644 tools/analyzers/modernize/appendclipped/BUILD.bazel create mode 100644 tools/analyzers/modernize/appendclipped/analyzer.go create mode 100644 tools/analyzers/modernize/bloop/BUILD.bazel create mode 100644 tools/analyzers/modernize/bloop/analyzer.go create mode 100644 tools/analyzers/modernize/fmtappendf/BUILD.bazel create mode 100644 tools/analyzers/modernize/fmtappendf/analyzer.go create mode 100644 tools/analyzers/modernize/forvar/BUILD.bazel create mode 100644 tools/analyzers/modernize/forvar/analyzer.go create mode 100644 tools/analyzers/modernize/mapsloop/BUILD.bazel create mode 100644 tools/analyzers/modernize/mapsloop/analyzer.go create mode 100644 tools/analyzers/modernize/minmax/BUILD.bazel create mode 100644 tools/analyzers/modernize/minmax/analyzer.go create mode 100644 tools/analyzers/modernize/newexpr/BUILD.bazel create mode 100644 tools/analyzers/modernize/newexpr/analyzer.go create mode 100644 tools/analyzers/modernize/omitzero/BUILD.bazel create mode 100644 tools/analyzers/modernize/omitzero/analyzer.go create mode 100644 tools/analyzers/modernize/rangeint/BUILD.bazel create mode 100644 tools/analyzers/modernize/rangeint/analyzer.go create mode 100644 tools/analyzers/modernize/reflecttypefor/BUILD.bazel create mode 100644 tools/analyzers/modernize/reflecttypefor/analyzer.go create mode 100644 tools/analyzers/modernize/slicescontains/BUILD.bazel create mode 100644 tools/analyzers/modernize/slicescontains/analyzer.go create mode 100644 tools/analyzers/modernize/slicesdelete/BUILD.bazel create mode 100644 tools/analyzers/modernize/slicesdelete/analyzer.go create mode 100644 tools/analyzers/modernize/slicessort/BUILD.bazel create mode 100644 tools/analyzers/modernize/slicessort/analyzer.go create mode 100644 tools/analyzers/modernize/stringsbuilder/BUILD.bazel create mode 100644 tools/analyzers/modernize/stringsbuilder/analyzer.go create mode 100644 tools/analyzers/modernize/stringscutprefix/BUILD.bazel create mode 100644 tools/analyzers/modernize/stringscutprefix/analyzer.go create mode 100644 tools/analyzers/modernize/stringsseq/BUILD.bazel create mode 100644 tools/analyzers/modernize/stringsseq/analyzer.go create mode 100644 tools/analyzers/modernize/testingcontext/BUILD.bazel create mode 100644 tools/analyzers/modernize/testingcontext/analyzer.go create mode 100644 tools/analyzers/modernize/waitgroup/BUILD.bazel create mode 100644 tools/analyzers/modernize/waitgroup/analyzer.go diff --git a/BUILD.bazel b/BUILD.bazel index 0036873cc5..aeb3e553b9 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -197,6 +197,25 @@ nogo( "//tools/analyzers/logcapitalization:go_default_library", "//tools/analyzers/logruswitherror:go_default_library", "//tools/analyzers/maligned:go_default_library", + "//tools/analyzers/modernize/any:go_default_library", + "//tools/analyzers/modernize/appendclipped:go_default_library", + "//tools/analyzers/modernize/bloop:go_default_library", + "//tools/analyzers/modernize/fmtappendf:go_default_library", + "//tools/analyzers/modernize/forvar:go_default_library", + "//tools/analyzers/modernize/mapsloop:go_default_library", + "//tools/analyzers/modernize/minmax:go_default_library", + #"//tools/analyzers/modernize/newexpr:go_default_library", # Disabled until go 1.26. + "//tools/analyzers/modernize/omitzero:go_default_library", + "//tools/analyzers/modernize/rangeint:go_default_library", + "//tools/analyzers/modernize/reflecttypefor:go_default_library", + "//tools/analyzers/modernize/slicescontains:go_default_library", + #"//tools/analyzers/modernize/slicesdelete:go_default_library", # Disabled, see https://go.dev/issue/73686 + "//tools/analyzers/modernize/slicessort:go_default_library", + "//tools/analyzers/modernize/stringsbuilder:go_default_library", + "//tools/analyzers/modernize/stringscutprefix:go_default_library", + "//tools/analyzers/modernize/stringsseq:go_default_library", + "//tools/analyzers/modernize/testingcontext:go_default_library", + "//tools/analyzers/modernize/waitgroup:go_default_library", "//tools/analyzers/nop:go_default_library", "//tools/analyzers/nopanic:go_default_library", "//tools/analyzers/properpermissions:go_default_library", diff --git a/WORKSPACE b/WORKSPACE index 9c774b7358..db1aa9daa9 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -205,6 +205,26 @@ prysm_image_deps() load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies") +# Override golang.org/x/tools to use v0.38.0 instead of v0.30.0 +# This is necessary as this dependency is required by rules_go and they do not accept dependency +# update PRs. Instead, they ask downstream projects to override the dependency. To generate the +# patches or update this dependency again, check out the rules_go repo then run the releaser tool. +# bazel run //go/tools/releaser -- upgrade-dep -mirror=false org_golang_x_tools +# Copy the patches and http_archive updates from rules_go here. +http_archive( + name = "org_golang_x_tools", + patch_args = ["-p1"], + patches = [ + "//third_party:org_golang_x_tools-deletegopls.patch", + "//third_party:org_golang_x_tools-gazelle.patch", + ], + sha256 = "8509908cd7fc35aa09ff49d8494e4fd25bab9e6239fbf57e0d8344f6bec5802b", + strip_prefix = "tools-0.38.0", + urls = [ + "https://github.com/golang/tools/archive/refs/tags/v0.38.0.zip", + ], +) + go_rules_dependencies() go_register_toolchains( diff --git a/api/apiutil/header.go b/api/apiutil/header.go index 4ef6ca3cb2..37fdf988e0 100644 --- a/api/apiutil/header.go +++ b/api/apiutil/header.go @@ -56,7 +56,7 @@ func ParseAccept(header string) []mediaRange { } var out []mediaRange - for _, field := range strings.Split(header, ",") { + for field := range strings.SplitSeq(header, ",") { if r, ok := parseMediaRange(field); ok { out = append(out, r) } diff --git a/api/client/builder/client.go b/api/client/builder/client.go index ca32e11bb6..6139a95629 100644 --- a/api/client/builder/client.go +++ b/api/client/builder/client.go @@ -421,7 +421,7 @@ func (c *Client) RegisterValidator(ctx context.Context, svr []*ethpb.SignedValid func jsonValidatorRegisterRequest(svr []*ethpb.SignedValidatorRegistrationV1) ([]byte, error) { vs := make([]*structs.SignedValidatorRegistration, len(svr)) - for i := 0; i < len(svr); i++ { + for i := range svr { vs[i] = structs.SignedValidatorRegistrationFromConsensus(svr[i]) } body, err := json.Marshal(vs) diff --git a/api/client/builder/types.go b/api/client/builder/types.go index e0d6316298..d0175e48ff 100644 --- a/api/client/builder/types.go +++ b/api/client/builder/types.go @@ -121,7 +121,7 @@ func (s *Uint64String) UnmarshalText(t []byte) error { // MarshalText returns a byte representation of the text from Uint64String. func (s Uint64String) MarshalText() ([]byte, error) { - return []byte(fmt.Sprintf("%d", s)), nil + return fmt.Appendf(nil, "%d", s), nil } // VersionResponse is a JSON representation of a field in the builder API header response. diff --git a/api/grpc/grpcutils.go b/api/grpc/grpcutils.go index d0dd8be5e4..54ac97958c 100644 --- a/api/grpc/grpcutils.go +++ b/api/grpc/grpcutils.go @@ -15,7 +15,7 @@ import ( func LogRequests( ctx context.Context, method string, req, - reply interface{}, + reply any, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption, diff --git a/api/server/structs/endpoints_config.go b/api/server/structs/endpoints_config.go index 12e9adf650..1b40522a71 100644 --- a/api/server/structs/endpoints_config.go +++ b/api/server/structs/endpoints_config.go @@ -14,5 +14,5 @@ type GetForkScheduleResponse struct { } type GetSpecResponse struct { - Data interface{} `json:"data"` + Data any `json:"data"` } diff --git a/async/abool/abool_test.go b/async/abool/abool_test.go index f870206abd..8447ed5e51 100644 --- a/async/abool/abool_test.go +++ b/async/abool/abool_test.go @@ -93,9 +93,9 @@ func TestToggleMultipleTimes(t *testing.T) { v := New() pre := !v.IsSet() - for i := 0; i < 100; i++ { + for i := range 100 { v.SetTo(false) - for j := 0; j < i; j++ { + for range i { pre = v.Toggle() } @@ -149,7 +149,7 @@ func TestRace(t *testing.T) { // Writer go func() { - for i := 0; i < repeat; i++ { + for range repeat { v.Set() wg.Done() } @@ -157,7 +157,7 @@ func TestRace(t *testing.T) { // Reader go func() { - for i := 0; i < repeat; i++ { + for range repeat { v.IsSet() wg.Done() } @@ -165,7 +165,7 @@ func TestRace(t *testing.T) { // Writer go func() { - for i := 0; i < repeat; i++ { + for range repeat { v.UnSet() wg.Done() } @@ -173,7 +173,7 @@ func TestRace(t *testing.T) { // Reader And Writer go func() { - for i := 0; i < repeat; i++ { + for range repeat { v.Toggle() wg.Done() } @@ -198,8 +198,8 @@ func ExampleAtomicBool() { func BenchmarkMutexRead(b *testing.B) { var m sync.RWMutex var v bool - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { m.RLock() _ = v m.RUnlock() @@ -208,16 +208,16 @@ func BenchmarkMutexRead(b *testing.B) { func BenchmarkAtomicValueRead(b *testing.B) { var v atomic.Value - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _ = v.Load() != nil } } func BenchmarkAtomicBoolRead(b *testing.B) { v := New() - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _ = v.IsSet() } } @@ -227,8 +227,8 @@ func BenchmarkAtomicBoolRead(b *testing.B) { func BenchmarkMutexWrite(b *testing.B) { var m sync.RWMutex var v bool - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { m.RLock() v = true m.RUnlock() @@ -239,16 +239,16 @@ func BenchmarkMutexWrite(b *testing.B) { func BenchmarkAtomicValueWrite(b *testing.B) { var v atomic.Value - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { v.Store(true) } } func BenchmarkAtomicBoolWrite(b *testing.B) { v := New() - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { v.Set() } } @@ -258,8 +258,8 @@ func BenchmarkAtomicBoolWrite(b *testing.B) { func BenchmarkMutexCAS(b *testing.B) { var m sync.RWMutex var v bool - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { m.Lock() if !v { v = true @@ -270,8 +270,8 @@ func BenchmarkMutexCAS(b *testing.B) { func BenchmarkAtomicBoolCAS(b *testing.B) { v := New() - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { v.SetToIf(false, true) } } @@ -281,8 +281,8 @@ func BenchmarkAtomicBoolCAS(b *testing.B) { func BenchmarkMutexToggle(b *testing.B) { var m sync.RWMutex var v bool - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { m.Lock() v = !v m.Unlock() @@ -291,8 +291,8 @@ func BenchmarkMutexToggle(b *testing.B) { func BenchmarkAtomicBoolToggle(b *testing.B) { v := New() - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { v.Toggle() } } diff --git a/async/benchmark_test.go b/async/benchmark_test.go index 83530091b8..6ce1589b4e 100644 --- a/async/benchmark_test.go +++ b/async/benchmark_test.go @@ -21,7 +21,7 @@ const ( func init() { input = make([][]byte, benchmarkElements) - for i := 0; i < benchmarkElements; i++ { + for i := range benchmarkElements { input[i] = make([]byte, benchmarkElementSize) _, err := rand.Read(input[i]) if err != nil { @@ -35,7 +35,7 @@ func hash(input [][]byte) [][]byte { output := make([][]byte, len(input)) for i := range input { copy(output, input) - for j := 0; j < benchmarkHashRuns; j++ { + for range benchmarkHashRuns { hash := sha256.Sum256(output[i]) output[i] = hash[:] } @@ -44,15 +44,15 @@ func hash(input [][]byte) [][]byte { } func BenchmarkHash(b *testing.B) { - for i := 0; i < b.N; i++ { + for b.Loop() { hash(input) } } func BenchmarkHashMP(b *testing.B) { output := make([][]byte, len(input)) - for i := 0; i < b.N; i++ { - workerResults, err := async.Scatter(len(input), func(offset int, entries int, _ *sync.RWMutex) (interface{}, error) { + for b.Loop() { + workerResults, err := async.Scatter(len(input), func(offset int, entries int, _ *sync.RWMutex) (any, error) { return hash(input[offset : offset+entries]), nil }) require.NoError(b, err) diff --git a/async/debounce.go b/async/debounce.go index 338fbb3eac..4d27acb972 100644 --- a/async/debounce.go +++ b/async/debounce.go @@ -7,7 +7,7 @@ import ( // Debounce events fired over a channel by a specified duration, ensuring no events // are handled until a certain interval of time has passed. -func Debounce(ctx context.Context, interval time.Duration, eventsChan <-chan interface{}, handler func(interface{})) { +func Debounce(ctx context.Context, interval time.Duration, eventsChan <-chan any, handler func(any)) { var timer *time.Timer defer func() { if timer != nil { diff --git a/async/debounce_test.go b/async/debounce_test.go index f9272d646a..2e3738bf72 100644 --- a/async/debounce_test.go +++ b/async/debounce_test.go @@ -14,7 +14,7 @@ import ( ) func TestDebounce_NoEvents(t *testing.T) { - eventsChan := make(chan interface{}, 100) + eventsChan := make(chan any, 100) ctx, cancel := context.WithCancel(t.Context()) interval := time.Second timesHandled := int32(0) @@ -26,7 +26,7 @@ func TestDebounce_NoEvents(t *testing.T) { }) }() go func() { - async.Debounce(ctx, interval, eventsChan, func(event interface{}) { + async.Debounce(ctx, interval, eventsChan, func(event any) { atomic.AddInt32(×Handled, 1) }) wg.Done() @@ -38,7 +38,7 @@ func TestDebounce_NoEvents(t *testing.T) { } func TestDebounce_CtxClosing(t *testing.T) { - eventsChan := make(chan interface{}, 100) + eventsChan := make(chan any, 100) ctx, cancel := context.WithCancel(t.Context()) interval := time.Second timesHandled := int32(0) @@ -62,7 +62,7 @@ func TestDebounce_CtxClosing(t *testing.T) { }) }() go func() { - async.Debounce(ctx, interval, eventsChan, func(event interface{}) { + async.Debounce(ctx, interval, eventsChan, func(event any) { atomic.AddInt32(×Handled, 1) }) wg.Done() @@ -74,14 +74,14 @@ func TestDebounce_CtxClosing(t *testing.T) { } func TestDebounce_SingleHandlerInvocation(t *testing.T) { - eventsChan := make(chan interface{}, 100) + eventsChan := make(chan any, 100) ctx, cancel := context.WithCancel(t.Context()) interval := time.Second timesHandled := int32(0) - go async.Debounce(ctx, interval, eventsChan, func(event interface{}) { + go async.Debounce(ctx, interval, eventsChan, func(event any) { atomic.AddInt32(×Handled, 1) }) - for i := 0; i < 100; i++ { + for range 100 { eventsChan <- struct{}{} } // We should expect 100 rapid fire changes to only have caused @@ -92,14 +92,14 @@ func TestDebounce_SingleHandlerInvocation(t *testing.T) { } func TestDebounce_MultipleHandlerInvocation(t *testing.T) { - eventsChan := make(chan interface{}, 100) + eventsChan := make(chan any, 100) ctx, cancel := context.WithCancel(t.Context()) interval := time.Second timesHandled := int32(0) - go async.Debounce(ctx, interval, eventsChan, func(event interface{}) { + go async.Debounce(ctx, interval, eventsChan, func(event any) { atomic.AddInt32(×Handled, 1) }) - for i := 0; i < 100; i++ { + for range 100 { eventsChan <- struct{}{} } require.Equal(t, int32(0), atomic.LoadInt32(×Handled), "Events must prevent from handler execution") diff --git a/async/event/example_scope_test.go b/async/event/example_scope_test.go index 510da227be..6df4a09065 100644 --- a/async/event/example_scope_test.go +++ b/async/event/example_scope_test.go @@ -93,9 +93,7 @@ func ExampleSubscriptionScope() { // Run a subscriber in the background. divsub := app.SubscribeResults('/', divs) mulsub := app.SubscribeResults('*', muls) - wg.Add(1) - go func() { - defer wg.Done() + wg.Go(func() { defer fmt.Println("subscriber exited") defer divsub.Unsubscribe() defer mulsub.Unsubscribe() @@ -111,7 +109,7 @@ func ExampleSubscriptionScope() { return } } - }() + }) // Interact with the app. app.Calc('/', 22, 11) diff --git a/async/event/example_subscription_test.go b/async/event/example_subscription_test.go index fe8c65eb68..ed04c1c169 100644 --- a/async/event/example_subscription_test.go +++ b/async/event/example_subscription_test.go @@ -26,7 +26,7 @@ func ExampleNewSubscription() { // Create a subscription that sends 10 integers on ch. ch := make(chan int) sub := event.NewSubscription(func(quit <-chan struct{}) error { - for i := 0; i < 10; i++ { + for i := range 10 { select { case ch <- i: case <-quit: diff --git a/async/event/interface.go b/async/event/interface.go index d54f9fd321..ed482544eb 100644 --- a/async/event/interface.go +++ b/async/event/interface.go @@ -3,6 +3,6 @@ package event // SubscriberSender is an abstract representation of an *event.Feed // to use in describing types that accept or return an *event.Feed. type SubscriberSender interface { - Subscribe(channel interface{}) Subscription - Send(value interface{}) (nsent int) + Subscribe(channel any) Subscription + Send(value any) (nsent int) } diff --git a/async/event/subscription_test.go b/async/event/subscription_test.go index e3f04d1661..58da7ada87 100644 --- a/async/event/subscription_test.go +++ b/async/event/subscription_test.go @@ -30,7 +30,7 @@ var errInts = errors.New("error in subscribeInts") func subscribeInts(max, fail int, c chan<- int) Subscription { return NewSubscription(func(quit <-chan struct{}) error { - for i := 0; i < max; i++ { + for i := range max { if i >= fail { return errInts } @@ -50,7 +50,7 @@ func TestNewSubscriptionError(t *testing.T) { channel := make(chan int) sub := subscribeInts(10, 2, channel) loop: - for want := 0; want < 10; want++ { + for want := range 10 { select { case got := <-channel: require.Equal(t, want, got) diff --git a/async/multilock_test.go b/async/multilock_test.go index 571460a373..6a64d6fa70 100644 --- a/async/multilock_test.go +++ b/async/multilock_test.go @@ -107,15 +107,13 @@ func TestLockUnlock(_ *testing.T) { func TestLockUnlock_CleansUnused(t *testing.T) { var wg sync.WaitGroup - wg.Add(1) - go func() { + wg.Go(func() { lock := NewMultilock("dog", "cat", "owl") lock.Lock() assert.Equal(t, 3, len(locks.list)) lock.Unlock() - wg.Done() - }() + }) wg.Wait() // We expect that unlocking completely cleared the locks list // given all 3 lock keys were unused at time of unlock. diff --git a/async/scatter.go b/async/scatter.go index ac3b743d80..8293965a16 100644 --- a/async/scatter.go +++ b/async/scatter.go @@ -9,14 +9,14 @@ import ( // WorkerResults are the results of a scatter worker. type WorkerResults struct { Offset int - Extent interface{} + Extent any } // Scatter scatters a computation across multiple goroutines. // This breaks the task in to a number of chunks and executes those chunks in parallel with the function provided. // Results returned are collected and presented as a set of WorkerResults, which can be reassembled by the calling function. // Any error that occurs in the workers will be passed back to the calling function. -func Scatter(inputLen int, sFunc func(int, int, *sync.RWMutex) (interface{}, error)) ([]*WorkerResults, error) { +func Scatter(inputLen int, sFunc func(int, int, *sync.RWMutex) (any, error)) ([]*WorkerResults, error) { if inputLen <= 0 { return nil, errors.New("input length must be greater than 0") } diff --git a/async/scatter_test.go b/async/scatter_test.go index 72d8eb2d50..76f0d3e356 100644 --- a/async/scatter_test.go +++ b/async/scatter_test.go @@ -46,9 +46,9 @@ func TestDouble(t *testing.T) { inValues[i] = i } outValues := make([]int, test.inValues) - workerResults, err := async.Scatter(len(inValues), func(offset int, entries int, _ *sync.RWMutex) (interface{}, error) { + workerResults, err := async.Scatter(len(inValues), func(offset int, entries int, _ *sync.RWMutex) (any, error) { extent := make([]int, entries) - for i := 0; i < entries; i++ { + for i := range entries { extent[i] = inValues[offset+i] * 2 } return extent, nil @@ -72,8 +72,8 @@ func TestDouble(t *testing.T) { func TestMutex(t *testing.T) { totalRuns := 1048576 val := 0 - _, err := async.Scatter(totalRuns, func(offset int, entries int, mu *sync.RWMutex) (interface{}, error) { - for i := 0; i < entries; i++ { + _, err := async.Scatter(totalRuns, func(offset int, entries int, mu *sync.RWMutex) (any, error) { + for range entries { mu.Lock() val++ mu.Unlock() @@ -90,8 +90,8 @@ func TestMutex(t *testing.T) { func TestError(t *testing.T) { totalRuns := 1024 val := 0 - _, err := async.Scatter(totalRuns, func(offset int, entries int, mu *sync.RWMutex) (interface{}, error) { - for i := 0; i < entries; i++ { + _, err := async.Scatter(totalRuns, func(offset int, entries int, mu *sync.RWMutex) (any, error) { + for range entries { mu.Lock() val++ if val == 1011 { diff --git a/beacon-chain/blockchain/kzg/validation_test.go b/beacon-chain/blockchain/kzg/validation_test.go index 734e056121..6f91f7bb87 100644 --- a/beacon-chain/blockchain/kzg/validation_test.go +++ b/beacon-chain/blockchain/kzg/validation_test.go @@ -70,7 +70,7 @@ func TestVerifyBlobKZGProofBatch(t *testing.T) { commitments := make([][]byte, blobCount) proofs := make([][]byte, blobCount) - for i := 0; i < blobCount; i++ { + for i := range blobCount { blob := random.GetRandBlob(int64(i)) commitment, proof, err := GenerateCommitmentAndProof(blob) require.NoError(t, err) @@ -432,8 +432,8 @@ func TestVerifyCellKZGProofBatchFromBlobData(t *testing.T) { commitments[1] = make([]byte, 32) // Wrong size // Add cell proofs for both blobs - for i := 0; i < blobCount; i++ { - for j := uint64(0); j < numberOfColumns; j++ { + for range blobCount { + for range numberOfColumns { allCellProofs = append(allCellProofs, make([]byte, 48)) } } @@ -450,7 +450,7 @@ func TestVerifyCellKZGProofBatchFromBlobData(t *testing.T) { commitments := make([][]byte, blobCount) var allCellProofs [][]byte - for i := 0; i < blobCount; i++ { + for i := range blobCount { randBlob := random.GetRandBlob(int64(i)) var blob Blob copy(blob[:], randBlob[:]) @@ -461,7 +461,7 @@ func TestVerifyCellKZGProofBatchFromBlobData(t *testing.T) { commitments[i] = commitment[:] // Add cell proofs - make some invalid in the second blob - for j := uint64(0); j < numberOfColumns; j++ { + for j := range numberOfColumns { if i == 1 && j == 64 { // Invalid proof size in middle of second blob's proofs allCellProofs = append(allCellProofs, make([]byte, 20)) diff --git a/beacon-chain/blockchain/process_attestation_test.go b/beacon-chain/blockchain/process_attestation_test.go index dbccecf0ec..1d78b10e37 100644 --- a/beacon-chain/blockchain/process_attestation_test.go +++ b/beacon-chain/blockchain/process_attestation_test.go @@ -209,16 +209,14 @@ func TestService_GetAttPreState_Concurrency(t *testing.T) { var wg sync.WaitGroup errChan := make(chan error, 1000) - for i := 0; i < 1000; i++ { - wg.Add(1) - go func() { - defer wg.Done() + for range 1000 { + wg.Go(func() { cp1 := ðpb.Checkpoint{Epoch: 1, Root: ckRoot} _, err := service.getAttPreState(ctx, cp1) if err != nil { errChan <- err } - }() + }) } go func() { diff --git a/beacon-chain/blockchain/process_block.go b/beacon-chain/blockchain/process_block.go index 44d098a8c2..01aae56f94 100644 --- a/beacon-chain/blockchain/process_block.go +++ b/beacon-chain/blockchain/process_block.go @@ -817,7 +817,7 @@ func (s *Service) areDataColumnsAvailable( } case <-ctx.Done(): - var missingIndices interface{} = "all" + var missingIndices any = "all" numberOfColumns := params.BeaconConfig().NumberOfColumns missingIndicesCount := uint64(len(missing)) diff --git a/beacon-chain/blockchain/process_block_test.go b/beacon-chain/blockchain/process_block_test.go index 0df990b01e..4306d16c8b 100644 --- a/beacon-chain/blockchain/process_block_test.go +++ b/beacon-chain/blockchain/process_block_test.go @@ -147,7 +147,7 @@ func TestStore_OnBlockBatch(t *testing.T) { bState := st.Copy() var blks []consensusblocks.ROBlock - for i := 0; i < 97; i++ { + for i := range 97 { b, err := util.GenerateFullBlock(bState, keys, util.DefaultBlockGenConfig(), primitives.Slot(i)) require.NoError(t, err) wsb, err := consensusblocks.NewSignedBeaconBlock(b) @@ -1323,7 +1323,7 @@ func TestOnBlock_ProcessBlocksParallel(t *testing.T) { require.NoError(t, err) logHook := logTest.NewGlobal() - for i := 0; i < 10; i++ { + for range 10 { fc := ðpb.Checkpoint{} st, blkRoot, err := prepareForkchoiceState(ctx, 0, wsb1.Block().ParentRoot(), [32]byte{}, [32]byte{}, fc, fc) require.NoError(t, err) @@ -1949,7 +1949,7 @@ func TestStore_NoViableHead_Liveness(t *testing.T) { require.Equal(t, true, optimistic) // Check that the invalid blocks are not in database - for i := 0; i < 19-13; i++ { + for i := range 19 - 13 { require.Equal(t, false, service.cfg.BeaconDB.HasBlock(ctx, invalidRoots[i])) } @@ -2879,7 +2879,7 @@ func TestProcessLightClientUpdate(t *testing.T) { // set a better sync aggregate scb := make([]byte, 64) - for i := 0; i < 5; i++ { + for i := range 5 { scb[i] = 0x01 } oldUpdate.SetSyncAggregate(ðpb.SyncAggregate{ diff --git a/beacon-chain/blockchain/receive_block_test.go b/beacon-chain/blockchain/receive_block_test.go index daf9d97a47..95577054f2 100644 --- a/beacon-chain/blockchain/receive_block_test.go +++ b/beacon-chain/blockchain/receive_block_test.go @@ -216,13 +216,11 @@ func TestService_ReceiveBlockUpdateHead(t *testing.T) { root, err := b.Block.HashTreeRoot() require.NoError(t, err) wg := sync.WaitGroup{} - wg.Add(1) - go func() { + wg.Go(func() { wsb, err := blocks.NewSignedBeaconBlock(b) require.NoError(t, err) require.NoError(t, s.ReceiveBlock(ctx, wsb, root, nil)) - wg.Done() - }() + }) wg.Wait() time.Sleep(100 * time.Millisecond) if recvd := len(s.cfg.StateNotifier.(*blockchainTesting.MockStateNotifier).ReceivedEvents()); recvd < 1 { diff --git a/beacon-chain/blockchain/service_test.go b/beacon-chain/blockchain/service_test.go index 03148af5d6..f1df2e9474 100644 --- a/beacon-chain/blockchain/service_test.go +++ b/beacon-chain/blockchain/service_test.go @@ -412,8 +412,7 @@ func BenchmarkHasBlockDB(b *testing.B) { r, err := blk.Block.HashTreeRoot() require.NoError(b, err) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { require.Equal(b, true, s.cfg.BeaconDB.HasBlock(ctx, r), "Block is not in DB") } } @@ -432,8 +431,7 @@ func BenchmarkHasBlockForkChoiceStore_DoublyLinkedTree(b *testing.B) { require.NoError(b, err) require.NoError(b, s.cfg.ForkChoiceStore.InsertNode(ctx, beaconState, roblock)) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { require.Equal(b, true, s.cfg.ForkChoiceStore.HasNode(r), "Block is not in fork choice store") } } diff --git a/beacon-chain/blockchain/testing/mock.go b/beacon-chain/blockchain/testing/mock.go index 2d2d8904e5..f9a406bb1d 100644 --- a/beacon-chain/blockchain/testing/mock.go +++ b/beacon-chain/blockchain/testing/mock.go @@ -106,7 +106,7 @@ type EventFeedWrapper struct { subscribed chan struct{} // this channel is closed once a subscription is made } -func (w *EventFeedWrapper) Subscribe(channel interface{}) event.Subscription { +func (w *EventFeedWrapper) Subscribe(channel any) event.Subscription { select { case <-w.subscribed: break // already closed @@ -116,7 +116,7 @@ func (w *EventFeedWrapper) Subscribe(channel interface{}) event.Subscription { return w.feed.Subscribe(channel) } -func (w *EventFeedWrapper) Send(value interface{}) int { +func (w *EventFeedWrapper) Send(value any) int { return w.feed.Send(value) } diff --git a/beacon-chain/builder/service.go b/beacon-chain/builder/service.go index 70d5001fce..8d6aa8a34e 100644 --- a/beacon-chain/builder/service.go +++ b/beacon-chain/builder/service.go @@ -166,7 +166,7 @@ func (s *Service) RegisterValidator(ctx context.Context, reg []*ethpb.SignedVali indexToRegistration := make(map[primitives.ValidatorIndex]*ethpb.ValidatorRegistrationV1) valid := make([]*ethpb.SignedValidatorRegistrationV1, 0) - for i := 0; i < len(reg); i++ { + for i := range reg { r := reg[i] nx, exists := s.cfg.headFetcher.HeadPublicKeyToValidatorIndex(bytesutil.ToBytes48(r.Message.Pubkey)) if !exists { diff --git a/beacon-chain/cache/active_balance_test.go b/beacon-chain/cache/active_balance_test.go index 591dfea4be..857ff01916 100644 --- a/beacon-chain/cache/active_balance_test.go +++ b/beacon-chain/cache/active_balance_test.go @@ -17,7 +17,7 @@ import ( func TestBalanceCache_AddGetBalance(t *testing.T) { blockRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { b := make([]byte, 8) binary.LittleEndian.PutUint64(b, uint64(i)) blockRoots[i] = b @@ -61,7 +61,7 @@ func TestBalanceCache_AddGetBalance(t *testing.T) { func TestBalanceCache_BalanceKey(t *testing.T) { blockRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { b := make([]byte, 8) binary.LittleEndian.PutUint64(b, uint64(i)) blockRoots[i] = b diff --git a/beacon-chain/cache/committee.go b/beacon-chain/cache/committee.go index e3db8b6f22..3f0430833c 100644 --- a/beacon-chain/cache/committee.go +++ b/beacon-chain/cache/committee.go @@ -51,7 +51,7 @@ type CommitteeCache struct { } // committeeKeyFn takes the seed as the key to retrieve shuffled indices of a committee in a given epoch. -func committeeKeyFn(obj interface{}) (string, error) { +func committeeKeyFn(obj any) (string, error) { info, ok := obj.(*Committees) if !ok { return "", ErrNotCommittee diff --git a/beacon-chain/cache/committee_fuzz_test.go b/beacon-chain/cache/committee_fuzz_test.go index e2a79a0b46..fb3e7280ad 100644 --- a/beacon-chain/cache/committee_fuzz_test.go +++ b/beacon-chain/cache/committee_fuzz_test.go @@ -14,7 +14,7 @@ func TestCommitteeKeyFuzz_OK(t *testing.T) { fuzzer := fuzz.NewWithSeed(0) c := &Committees{} - for i := 0; i < 100000; i++ { + for range 100000 { fuzzer.Fuzz(c) k, err := committeeKeyFn(c) require.NoError(t, err) @@ -27,7 +27,7 @@ func TestCommitteeCache_FuzzCommitteesByEpoch(t *testing.T) { fuzzer := fuzz.NewWithSeed(0) c := &Committees{} - for i := 0; i < 100000; i++ { + for range 100000 { fuzzer.Fuzz(c) require.NoError(t, cache.AddCommitteeShuffledList(t.Context(), c)) _, err := cache.Committee(t.Context(), 0, c.Seed, 0) @@ -42,7 +42,7 @@ func TestCommitteeCache_FuzzActiveIndices(t *testing.T) { fuzzer := fuzz.NewWithSeed(0) c := &Committees{} - for i := 0; i < 100000; i++ { + for range 100000 { fuzzer.Fuzz(c) require.NoError(t, cache.AddCommitteeShuffledList(t.Context(), c)) diff --git a/beacon-chain/cache/common.go b/beacon-chain/cache/common.go index 5eff08f49e..ad26f77bff 100644 --- a/beacon-chain/cache/common.go +++ b/beacon-chain/cache/common.go @@ -17,6 +17,6 @@ func trim(queue *cache.FIFO, maxSize uint64) { } // popProcessNoopFunc is a no-op function that never returns an error. -func popProcessNoopFunc(_ interface{}, _ bool) error { +func popProcessNoopFunc(_ any, _ bool) error { return nil } diff --git a/beacon-chain/cache/depositsnapshot/deposit_cache_test.go b/beacon-chain/cache/depositsnapshot/deposit_cache_test.go index fbf486ed93..6ef00f2beb 100644 --- a/beacon-chain/cache/depositsnapshot/deposit_cache_test.go +++ b/beacon-chain/cache/depositsnapshot/deposit_cache_test.go @@ -769,7 +769,7 @@ func TestFinalizedDeposits_ReturnsTrieCorrectly(t *testing.T) { } var ctrs []*ethpb.DepositContainer - for i := 0; i < 2000; i++ { + for i := range 2000 { ctrs = append(ctrs, generateCtr(uint64(10+(i/2)), int64(i))) } @@ -948,9 +948,9 @@ func rootCreator(rn byte) []byte { func BenchmarkDepositTree_InsertNewImplementation(b *testing.B) { totalDeposits := 10000 input := bytesutil.ToBytes32([]byte("foo")) - for i := 0; i < b.N; i++ { + for b.Loop() { dt := NewDepositTree() - for j := 0; j < totalDeposits; j++ { + for range totalDeposits { err := dt.Insert(input[:], 0) require.NoError(b, err) } @@ -959,10 +959,10 @@ func BenchmarkDepositTree_InsertNewImplementation(b *testing.B) { func BenchmarkDepositTree_InsertOldImplementation(b *testing.B) { totalDeposits := 10000 input := bytesutil.ToBytes32([]byte("foo")) - for i := 0; i < b.N; i++ { + for b.Loop() { dt, err := trie.NewTrie(33) require.NoError(b, err) - for j := 0; j < totalDeposits; j++ { + for range totalDeposits { err := dt.Insert(input[:], 0) require.NoError(b, err) } @@ -980,8 +980,8 @@ func BenchmarkDepositTree_HashTreeRootNewImplementation(b *testing.B) { } b.ReportAllocs() - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err = tr.HashTreeRoot() require.NoError(b, err) } @@ -999,8 +999,8 @@ func BenchmarkDepositTree_HashTreeRootOldImplementation(b *testing.B) { } b.ReportAllocs() - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err = dt.HashTreeRoot() require.NoError(b, err) } diff --git a/beacon-chain/cache/depositsnapshot/deposit_tree_snapshot.go b/beacon-chain/cache/depositsnapshot/deposit_tree_snapshot.go index eefb0330cf..a9bba4b7d2 100644 --- a/beacon-chain/cache/depositsnapshot/deposit_tree_snapshot.go +++ b/beacon-chain/cache/depositsnapshot/deposit_tree_snapshot.go @@ -20,7 +20,7 @@ func (ds *DepositTreeSnapshot) CalculateRoot() ([32]byte, error) { size := ds.depositCount index := len(ds.finalized) root := trie.ZeroHashes[0] - for i := 0; i < DepositContractDepth; i++ { + for i := range DepositContractDepth { if (size & 1) == 1 { if index == 0 { break diff --git a/beacon-chain/cache/skip_slot_cache_test.go b/beacon-chain/cache/skip_slot_cache_test.go index 6e364d1116..f8cf9c680d 100644 --- a/beacon-chain/cache/skip_slot_cache_test.go +++ b/beacon-chain/cache/skip_slot_cache_test.go @@ -47,15 +47,13 @@ func TestSkipSlotCache_DisabledAndEnabled(t *testing.T) { c.Enable() wg := new(sync.WaitGroup) - wg.Add(1) - go func() { + wg.Go(func() { // Get call will only terminate when // it is not longer in progress. obj, err := c.Get(ctx, r) require.NoError(t, err) require.IsNil(t, obj) - wg.Done() - }() + }) c.MarkNotInProgress(r) wg.Wait() diff --git a/beacon-chain/cache/sync_committee.go b/beacon-chain/cache/sync_committee.go index 8991a0022e..4ba9f5730e 100644 --- a/beacon-chain/cache/sync_committee.go +++ b/beacon-chain/cache/sync_committee.go @@ -236,7 +236,7 @@ func (s *SyncCommitteeCache) UpdatePositionsInCommittee(syncCommitteeBoundaryRoo // Given the `syncCommitteeIndexPosition` object, this returns the key of the object. // The key is the `currentSyncCommitteeRoot` within the field. // Error gets returned if input does not comply with `currentSyncCommitteeRoot` object. -func keyFn(obj interface{}) (string, error) { +func keyFn(obj any) (string, error) { info, ok := obj.(*syncCommitteeIndexPosition) if !ok { return "", errNotSyncCommitteeIndexPosition diff --git a/beacon-chain/cache/sync_subnet_ids_test.go b/beacon-chain/cache/sync_subnet_ids_test.go index 07122a4bef..bace4a20fb 100644 --- a/beacon-chain/cache/sync_subnet_ids_test.go +++ b/beacon-chain/cache/sync_subnet_ids_test.go @@ -12,12 +12,12 @@ import ( func TestSyncSubnetIDsCache_Roundtrip(t *testing.T) { c := newSyncSubnetIDs() - for i := 0; i < 20; i++ { + for i := range 20 { pubkey := [fieldparams.BLSPubkeyLength]byte{byte(i)} c.AddSyncCommitteeSubnets(pubkey[:], 100, []uint64{uint64(i)}, 0) } - for i := uint64(0); i < 20; i++ { + for i := range uint64(20) { pubkey := [fieldparams.BLSPubkeyLength]byte{byte(i)} idxs, _, ok, _ := c.GetSyncCommitteeSubnets(pubkey[:], 100) @@ -34,7 +34,7 @@ func TestSyncSubnetIDsCache_Roundtrip(t *testing.T) { func TestSyncSubnetIDsCache_ValidateCurrentEpoch(t *testing.T) { c := newSyncSubnetIDs() - for i := 0; i < 20; i++ { + for i := range 20 { pubkey := [fieldparams.BLSPubkeyLength]byte{byte(i)} c.AddSyncCommitteeSubnets(pubkey[:], 100, []uint64{uint64(i)}, 0) } @@ -42,7 +42,7 @@ func TestSyncSubnetIDsCache_ValidateCurrentEpoch(t *testing.T) { coms := c.GetAllSubnets(50) assert.Equal(t, 0, len(coms)) - for i := uint64(0); i < 20; i++ { + for i := range uint64(20) { pubkey := [fieldparams.BLSPubkeyLength]byte{byte(i)} _, jEpoch, ok, _ := c.GetSyncCommitteeSubnets(pubkey[:], 100) diff --git a/beacon-chain/core/altair/attestation_test.go b/beacon-chain/core/altair/attestation_test.go index 4b3f4916e7..8ceb34810f 100644 --- a/beacon-chain/core/altair/attestation_test.go +++ b/beacon-chain/core/altair/attestation_test.go @@ -461,7 +461,7 @@ func TestFuzzProcessAttestationsNoVerify_10000(t *testing.T) { fuzzer := gofuzz.NewWithSeed(0) st := ðpb.BeaconStateAltair{} b := ðpb.SignedBeaconBlockAltair{Block: ðpb.BeaconBlockAltair{}} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(st) fuzzer.Fuzz(b) if b.Block == nil { diff --git a/beacon-chain/core/altair/block_test.go b/beacon-chain/core/altair/block_test.go index 51867fefd9..188a9dbb22 100644 --- a/beacon-chain/core/altair/block_test.go +++ b/beacon-chain/core/altair/block_test.go @@ -240,7 +240,7 @@ func TestProcessSyncCommittee_processSyncAggregate(t *testing.T) { proposerIndex, err := helpers.BeaconProposerIndex(t.Context(), beaconState) require.NoError(t, err) - for i := 0; i < len(syncBits); i++ { + for i := range syncBits { if syncBits.BitAt(uint64(i)) { pk := bytesutil.ToBytes48(committeeKeys[i]) require.DeepEqual(t, true, votedMap[pk]) diff --git a/beacon-chain/core/altair/deposit.go b/beacon-chain/core/altair/deposit.go index 44fa4e7ff7..c23c879b8c 100644 --- a/beacon-chain/core/altair/deposit.go +++ b/beacon-chain/core/altair/deposit.go @@ -195,10 +195,7 @@ func AddValidatorToRegistry(beaconState state.BeaconState, pubKey []byte, withdr // withdrawable_epoch=FAR_FUTURE_EPOCH, // ) func GetValidatorFromDeposit(pubKey []byte, withdrawalCredentials []byte, amount uint64) *ethpb.Validator { - effectiveBalance := amount - (amount % params.BeaconConfig().EffectiveBalanceIncrement) - if params.BeaconConfig().MaxEffectiveBalance < effectiveBalance { - effectiveBalance = params.BeaconConfig().MaxEffectiveBalance - } + effectiveBalance := min(params.BeaconConfig().MaxEffectiveBalance, amount-(amount%params.BeaconConfig().EffectiveBalanceIncrement)) return ðpb.Validator{ PublicKey: pubKey, diff --git a/beacon-chain/core/altair/deposit_fuzz_test.go b/beacon-chain/core/altair/deposit_fuzz_test.go index b45b3347ba..588b5a6f7c 100644 --- a/beacon-chain/core/altair/deposit_fuzz_test.go +++ b/beacon-chain/core/altair/deposit_fuzz_test.go @@ -16,7 +16,7 @@ func TestFuzzProcessDeposits_10000(t *testing.T) { state := ðpb.BeaconStateAltair{} deposits := make([]*ethpb.Deposit, 100) ctx := t.Context() - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) for i := range deposits { fuzzer.Fuzz(deposits[i]) @@ -37,7 +37,7 @@ func TestFuzzProcessPreGenesisDeposit_10000(t *testing.T) { deposit := ðpb.Deposit{} ctx := t.Context() - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(deposit) s, err := state_native.InitializeFromProtoUnsafeAltair(state) @@ -56,7 +56,7 @@ func TestFuzzProcessPreGenesisDeposit_Phase0_10000(t *testing.T) { deposit := ðpb.Deposit{} ctx := t.Context() - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(deposit) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -74,7 +74,7 @@ func TestFuzzProcessDeposit_Phase0_10000(t *testing.T) { state := ðpb.BeaconState{} deposit := ðpb.Deposit{} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(deposit) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -92,7 +92,7 @@ func TestFuzzProcessDeposit_10000(t *testing.T) { state := ðpb.BeaconStateAltair{} deposit := ðpb.Deposit{} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(deposit) s, err := state_native.InitializeFromProtoUnsafeAltair(state) diff --git a/beacon-chain/core/altair/epoch_precompute.go b/beacon-chain/core/altair/epoch_precompute.go index 55ce28bbd1..aa70b8f8a8 100644 --- a/beacon-chain/core/altair/epoch_precompute.go +++ b/beacon-chain/core/altair/epoch_precompute.go @@ -122,11 +122,8 @@ func ProcessInactivityScores( } if !helpers.IsInInactivityLeak(prevEpoch, finalizedEpoch) { - score := recoveryRate // Prevents underflow below 0. - if score > v.InactivityScore { - score = v.InactivityScore - } + score := min(recoveryRate, v.InactivityScore) v.InactivityScore -= score } inactivityScores[i] = v.InactivityScore @@ -242,7 +239,7 @@ func ProcessRewardsAndPenaltiesPrecompute( } balances := beaconState.Balances() - for i := 0; i < numOfVals; i++ { + for i := range numOfVals { vals[i].BeforeEpochTransitionBalance = balances[i] // Compute the post balance of the validator after accounting for the diff --git a/beacon-chain/core/altair/sync_committee_test.go b/beacon-chain/core/altair/sync_committee_test.go index 096f4712b6..71b5e3646c 100644 --- a/beacon-chain/core/altair/sync_committee_test.go +++ b/beacon-chain/core/altair/sync_committee_test.go @@ -21,7 +21,7 @@ import ( func TestSyncCommitteeIndices_CanGet(t *testing.T) { getState := func(t *testing.T, count uint64, vers int) state.BeaconState { validators := make([]*ethpb.Validator, count) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, EffectiveBalance: params.BeaconConfig().MinDepositAmount, @@ -113,7 +113,7 @@ func TestSyncCommitteeIndices_DifferentPeriods(t *testing.T) { helpers.ClearCache() getState := func(t *testing.T, count uint64) state.BeaconState { validators := make([]*ethpb.Validator, count) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, EffectiveBalance: params.BeaconConfig().MinDepositAmount, @@ -147,7 +147,7 @@ func TestSyncCommitteeIndices_DifferentPeriods(t *testing.T) { func TestSyncCommittee_CanGet(t *testing.T) { getState := func(t *testing.T, count uint64) state.BeaconState { validators := make([]*ethpb.Validator, count) - for i := 0; i < len(validators); i++ { + for i := range validators { blsKey, err := bls.RandKey() require.NoError(t, err) validators[i] = ðpb.Validator{ @@ -394,7 +394,7 @@ func Test_ValidateSyncMessageTime(t *testing.T) { func getState(t *testing.T, count uint64) state.BeaconState { validators := make([]*ethpb.Validator, count) - for i := 0; i < len(validators); i++ { + for i := range validators { blsKey, err := bls.RandKey() require.NoError(t, err) validators[i] = ðpb.Validator{ diff --git a/beacon-chain/core/altair/upgrade_test.go b/beacon-chain/core/altair/upgrade_test.go index cb9e3e9cd9..836f777145 100644 --- a/beacon-chain/core/altair/upgrade_test.go +++ b/beacon-chain/core/altair/upgrade_test.go @@ -33,7 +33,7 @@ func TestTranslateParticipation(t *testing.T) { r, err := helpers.BlockRootAtSlot(s, 0) require.NoError(t, err) var pendingAtts []*ethpb.PendingAttestation - for i := 0; i < 3; i++ { + for i := range 3 { pendingAtts = append(pendingAtts, ðpb.PendingAttestation{ Data: ðpb.AttestationData{ CommitteeIndex: primitives.CommitteeIndex(i), diff --git a/beacon-chain/core/blocks/attestation.go b/beacon-chain/core/blocks/attestation.go index 6ad6a0f965..858464603d 100644 --- a/beacon-chain/core/blocks/attestation.go +++ b/beacon-chain/core/blocks/attestation.go @@ -257,7 +257,7 @@ func VerifyIndexedAttestation(ctx context.Context, beaconState state.ReadOnlyBea } indices := indexedAtt.GetAttestingIndices() var pubkeys []bls.PublicKey - for i := 0; i < len(indices); i++ { + for i := range indices { pubkeyAtIdx := beaconState.PubkeyAtIndex(primitives.ValidatorIndex(indices[i])) pk, err := bls.PublicKeyFromBytes(pubkeyAtIdx[:]) if err != nil { diff --git a/beacon-chain/core/blocks/attestation_test.go b/beacon-chain/core/blocks/attestation_test.go index 7d3af6f02e..24a068ef91 100644 --- a/beacon-chain/core/blocks/attestation_test.go +++ b/beacon-chain/core/blocks/attestation_test.go @@ -317,7 +317,7 @@ func TestVerifyAttestationNoVerifySignature_Electra(t *testing.T) { func TestConvertToIndexed_OK(t *testing.T) { helpers.ClearCache() validators := make([]*ethpb.Validator, 2*params.BeaconConfig().SlotsPerEpoch) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -373,7 +373,7 @@ func TestVerifyIndexedAttestation_OK(t *testing.T) { validators := make([]*ethpb.Validator, numOfValidators) _, keys, err := util.DeterministicDepositsAndKeys(numOfValidators) require.NoError(t, err) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, PublicKey: keys[i].PublicKey().Marshal(), @@ -481,7 +481,7 @@ func TestValidateIndexedAttestation_BadAttestationsSignatureSet(t *testing.T) { sig := keys[0].Sign([]byte{'t', 'e', 's', 't'}) list := bitfield.Bitlist{0b11111} var atts []ethpb.Att - for i := uint64(0); i < 1000; i++ { + for range uint64(1000) { atts = append(atts, ðpb.Attestation{ Data: ðpb.AttestationData{ CommitteeIndex: 1, @@ -498,7 +498,7 @@ func TestValidateIndexedAttestation_BadAttestationsSignatureSet(t *testing.T) { atts = []ethpb.Att{} list = bitfield.Bitlist{0b10000} - for i := uint64(0); i < 1000; i++ { + for range uint64(1000) { atts = append(atts, ðpb.Attestation{ Data: ðpb.AttestationData{ CommitteeIndex: 1, @@ -524,7 +524,7 @@ func TestVerifyAttestations_HandlesPlannedFork(t *testing.T) { validators := make([]*ethpb.Validator, numOfValidators) _, keys, err := util.DeterministicDepositsAndKeys(numOfValidators) require.NoError(t, err) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, PublicKey: keys[i].PublicKey().Marshal(), @@ -588,7 +588,7 @@ func TestRetrieveAttestationSignatureSet_VerifiesMultipleAttestations(t *testing validators := make([]*ethpb.Validator, numOfValidators) _, keys, err := util.DeterministicDepositsAndKeys(numOfValidators) require.NoError(t, err) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, PublicKey: keys[i].PublicKey().Marshal(), @@ -707,7 +707,7 @@ func TestRetrieveAttestationSignatureSet_AcrossFork(t *testing.T) { validators := make([]*ethpb.Validator, numOfValidators) _, keys, err := util.DeterministicDepositsAndKeys(numOfValidators) require.NoError(t, err) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, PublicKey: keys[i].PublicKey().Marshal(), diff --git a/beacon-chain/core/blocks/block_operations_fuzz_test.go b/beacon-chain/core/blocks/block_operations_fuzz_test.go index d65fb4c3a0..e12b1217e1 100644 --- a/beacon-chain/core/blocks/block_operations_fuzz_test.go +++ b/beacon-chain/core/blocks/block_operations_fuzz_test.go @@ -21,7 +21,7 @@ func TestFuzzProcessAttestationNoVerify_10000(t *testing.T) { state := ðpb.BeaconState{} att := ðpb.Attestation{} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(att) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -37,7 +37,7 @@ func TestFuzzProcessBlockHeader_10000(t *testing.T) { state := ðpb.BeaconState{} block := ðpb.SignedBeaconBlock{} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(block) @@ -63,7 +63,7 @@ func TestFuzzverifyDepositDataSigningRoot_10000(_ *testing.T) { var p []byte var s []byte var d []byte - for i := 0; i < 10000; i++ { + for range 10000 { fuzzer.Fuzz(&ba) fuzzer.Fuzz(&pubkey) fuzzer.Fuzz(&sig) @@ -83,7 +83,7 @@ func TestFuzzProcessEth1DataInBlock_10000(t *testing.T) { e := ðpb.Eth1Data{} state, err := state_native.InitializeFromProtoUnsafePhase0(ðpb.BeaconState{}) require.NoError(t, err) - for i := 0; i < 10000; i++ { + for range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(e) s, err := ProcessEth1DataInBlock(t.Context(), state, e) @@ -98,7 +98,7 @@ func TestFuzzareEth1DataEqual_10000(_ *testing.T) { eth1data := ðpb.Eth1Data{} eth1data2 := ðpb.Eth1Data{} - for i := 0; i < 10000; i++ { + for range 10000 { fuzzer.Fuzz(eth1data) fuzzer.Fuzz(eth1data2) AreEth1DataEqual(eth1data, eth1data2) @@ -110,7 +110,7 @@ func TestFuzzEth1DataHasEnoughSupport_10000(t *testing.T) { fuzzer := gofuzz.NewWithSeed(0) eth1data := ðpb.Eth1Data{} var stateVotes []*ethpb.Eth1Data - for i := 0; i < 100000; i++ { + for i := range 100000 { fuzzer.Fuzz(eth1data) fuzzer.Fuzz(&stateVotes) s, err := state_native.InitializeFromProtoPhase0(ðpb.BeaconState{ @@ -129,7 +129,7 @@ func TestFuzzProcessBlockHeaderNoVerify_10000(t *testing.T) { state := ðpb.BeaconState{} block := ðpb.BeaconBlock{} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(block) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -145,7 +145,7 @@ func TestFuzzProcessRandao_10000(t *testing.T) { state := ðpb.BeaconState{} b := ðpb.SignedBeaconBlock{} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(b) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -168,7 +168,7 @@ func TestFuzzProcessRandaoNoVerify_10000(t *testing.T) { state := ðpb.BeaconState{} blockBody := ðpb.BeaconBlockBody{} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(blockBody) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -186,7 +186,7 @@ func TestFuzzProcessProposerSlashings_10000(t *testing.T) { state := ðpb.BeaconState{} p := ðpb.ProposerSlashing{} ctx := t.Context() - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(p) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -203,7 +203,7 @@ func TestFuzzVerifyProposerSlashing_10000(t *testing.T) { fuzzer := gofuzz.NewWithSeed(0) state := ðpb.BeaconState{} proposerSlashing := ðpb.ProposerSlashing{} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(proposerSlashing) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -219,7 +219,7 @@ func TestFuzzProcessAttesterSlashings_10000(t *testing.T) { state := ðpb.BeaconState{} a := ðpb.AttesterSlashing{} ctx := t.Context() - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(a) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -237,7 +237,7 @@ func TestFuzzVerifyAttesterSlashing_10000(t *testing.T) { state := ðpb.BeaconState{} attesterSlashing := ðpb.AttesterSlashing{} ctx := t.Context() - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(attesterSlashing) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -253,7 +253,7 @@ func TestFuzzIsSlashableAttestationData_10000(_ *testing.T) { attestationData := ðpb.AttestationData{} attestationData2 := ðpb.AttestationData{} - for i := 0; i < 10000; i++ { + for range 10000 { fuzzer.Fuzz(attestationData) fuzzer.Fuzz(attestationData2) IsSlashableAttestationData(attestationData, attestationData2) @@ -264,7 +264,7 @@ func TestFuzzslashableAttesterIndices_10000(_ *testing.T) { fuzzer := gofuzz.NewWithSeed(0) attesterSlashing := ðpb.AttesterSlashing{} - for i := 0; i < 10000; i++ { + for range 10000 { fuzzer.Fuzz(attesterSlashing) SlashableAttesterIndices(attesterSlashing) } @@ -275,7 +275,7 @@ func TestFuzzProcessAttestationsNoVerify_10000(t *testing.T) { state := ðpb.BeaconState{} b := ðpb.SignedBeaconBlock{} ctx := t.Context() - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(b) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -298,7 +298,7 @@ func TestFuzzVerifyIndexedAttestationn_10000(t *testing.T) { state := ðpb.BeaconState{} idxAttestation := ðpb.IndexedAttestation{} ctx := t.Context() - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(idxAttestation) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -313,7 +313,7 @@ func TestFuzzverifyDeposit_10000(t *testing.T) { fuzzer := gofuzz.NewWithSeed(0) state := ðpb.BeaconState{} deposit := ðpb.Deposit{} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(deposit) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -329,7 +329,7 @@ func TestFuzzProcessVoluntaryExits_10000(t *testing.T) { state := ðpb.BeaconState{} e := ðpb.SignedVoluntaryExit{} ctx := t.Context() - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(e) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -346,7 +346,7 @@ func TestFuzzProcessVoluntaryExitsNoVerify_10000(t *testing.T) { fuzzer := gofuzz.NewWithSeed(0) state := ðpb.BeaconState{} e := ðpb.SignedVoluntaryExit{} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(e) s, err := state_native.InitializeFromProtoUnsafePhase0(state) @@ -366,7 +366,7 @@ func TestFuzzVerifyExit_10000(t *testing.T) { fork := ðpb.Fork{} var slot primitives.Slot - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(ve) fuzzer.Fuzz(rawVal) fuzzer.Fuzz(fork) diff --git a/beacon-chain/core/blocks/eth1_data_test.go b/beacon-chain/core/blocks/eth1_data_test.go index b2ef805225..0cafe3cd46 100644 --- a/beacon-chain/core/blocks/eth1_data_test.go +++ b/beacon-chain/core/blocks/eth1_data_test.go @@ -19,7 +19,7 @@ import ( func FakeDeposits(n uint64) []*ethpb.Eth1Data { deposits := make([]*ethpb.Eth1Data, n) - for i := uint64(0); i < n; i++ { + for i := range n { deposits[i] = ðpb.Eth1Data{ DepositCount: 1, DepositRoot: bytesutil.PadTo([]byte("root"), 32), @@ -175,7 +175,7 @@ func TestProcessEth1Data_SetsCorrectly(t *testing.T) { } period := uint64(params.BeaconConfig().SlotsPerEpoch.Mul(uint64(params.BeaconConfig().EpochsPerEth1VotingPeriod))) - for i := uint64(0); i < period; i++ { + for range period { processedState, err := blocks.ProcessEth1DataInBlock(t.Context(), beaconState, b.Block.Body.Eth1Data) require.NoError(t, err) require.Equal(t, true, processedState.Version() == version.Phase0) diff --git a/beacon-chain/core/blocks/header_test.go b/beacon-chain/core/blocks/header_test.go index ac70b853d9..cf4480bcf6 100644 --- a/beacon-chain/core/blocks/header_test.go +++ b/beacon-chain/core/blocks/header_test.go @@ -27,7 +27,7 @@ func init() { func TestProcessBlockHeader_ImproperBlockSlot(t *testing.T) { validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: make([]byte, 32), WithdrawalCredentials: make([]byte, 32), @@ -104,7 +104,7 @@ func TestProcessBlockHeader_WrongProposerSig(t *testing.T) { func TestProcessBlockHeader_DifferentSlots(t *testing.T) { validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: make([]byte, 32), WithdrawalCredentials: make([]byte, 32), @@ -148,7 +148,7 @@ func TestProcessBlockHeader_DifferentSlots(t *testing.T) { func TestProcessBlockHeader_PreviousBlockRootNotSignedRoot(t *testing.T) { validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: make([]byte, 48), WithdrawalCredentials: make([]byte, 32), @@ -189,7 +189,7 @@ func TestProcessBlockHeader_PreviousBlockRootNotSignedRoot(t *testing.T) { func TestProcessBlockHeader_SlashedProposer(t *testing.T) { validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: make([]byte, 48), WithdrawalCredentials: make([]byte, 32), @@ -233,7 +233,7 @@ func TestProcessBlockHeader_SlashedProposer(t *testing.T) { func TestProcessBlockHeader_OK(t *testing.T) { validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: make([]byte, 32), WithdrawalCredentials: make([]byte, 32), @@ -293,7 +293,7 @@ func TestProcessBlockHeader_OK(t *testing.T) { func TestBlockSignatureSet_OK(t *testing.T) { validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: make([]byte, 32), WithdrawalCredentials: make([]byte, 32), diff --git a/beacon-chain/core/blocks/payload_test.go b/beacon-chain/core/blocks/payload_test.go index 0e9cb067f6..d5119e1299 100644 --- a/beacon-chain/core/blocks/payload_test.go +++ b/beacon-chain/core/blocks/payload_test.go @@ -851,8 +851,7 @@ func BenchmarkBellatrixComplete(b *testing.B) { require.NoError(b, err) require.NoError(b, st.SetLatestExecutionPayloadHeader(h)) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := blocks.IsMergeTransitionComplete(st) require.NoError(b, err) } diff --git a/beacon-chain/core/electra/churn_test.go b/beacon-chain/core/electra/churn_test.go index 2c28c11726..def4464ffd 100644 --- a/beacon-chain/core/electra/churn_test.go +++ b/beacon-chain/core/electra/churn_test.go @@ -28,7 +28,7 @@ func createValidatorsWithTotalActiveBalance(totalBal primitives.Gwei) []*eth.Val ActivationEpoch: primitives.Epoch(0), EffectiveBalance: params.BeaconConfig().MinActivationBalance, ExitEpoch: params.BeaconConfig().FarFutureEpoch, - PublicKey: []byte(fmt.Sprintf("val_%d", i)), + PublicKey: fmt.Appendf(nil, "val_%d", i), WithdrawableEpoch: params.BeaconConfig().FarFutureEpoch, WithdrawalCredentials: wd, } diff --git a/beacon-chain/core/electra/deposit_fuzz_test.go b/beacon-chain/core/electra/deposit_fuzz_test.go index 04a7ac2dd1..59343e3675 100644 --- a/beacon-chain/core/electra/deposit_fuzz_test.go +++ b/beacon-chain/core/electra/deposit_fuzz_test.go @@ -16,7 +16,7 @@ func TestFuzzProcessDeposits_10000(t *testing.T) { state := ðpb.BeaconStateElectra{} deposits := make([]*ethpb.Deposit, 100) ctx := t.Context() - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) for i := range deposits { fuzzer.Fuzz(deposits[i]) @@ -36,7 +36,7 @@ func TestFuzzProcessDeposit_10000(t *testing.T) { state := ðpb.BeaconStateElectra{} deposit := ðpb.Deposit{} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(deposit) s, err := state_native.InitializeFromProtoUnsafeElectra(state) diff --git a/beacon-chain/core/electra/deposits_test.go b/beacon-chain/core/electra/deposits_test.go index 0c4d4658df..2c4a856452 100644 --- a/beacon-chain/core/electra/deposits_test.go +++ b/beacon-chain/core/electra/deposits_test.go @@ -95,7 +95,7 @@ func TestProcessPendingDeposits(t *testing.T) { require.NoError(t, err) require.Equal(t, primitives.Gwei(100), res) // Validators 0..9 should have their balance increased - for i := primitives.ValidatorIndex(0); i < 10; i++ { + for i := range primitives.ValidatorIndex(10) { b, err := st.BalanceAtIndex(i) require.NoError(t, err) require.Equal(t, params.BeaconConfig().MinActivationBalance+uint64(amountAvailForProcessing)/10, b) @@ -122,7 +122,7 @@ func TestProcessPendingDeposits(t *testing.T) { check: func(t *testing.T, st state.BeaconState) { amountAvailForProcessing := helpers.ActivationExitChurnLimit(1_000 * 1e9) // Validators 0..9 should have their balance increased - for i := primitives.ValidatorIndex(0); i < 2; i++ { + for i := range primitives.ValidatorIndex(2) { b, err := st.BalanceAtIndex(i) require.NoError(t, err) require.Equal(t, params.BeaconConfig().MinActivationBalance+uint64(amountAvailForProcessing), b) @@ -149,7 +149,7 @@ func TestProcessPendingDeposits(t *testing.T) { require.NoError(t, err) require.Equal(t, primitives.Gwei(0), res) // Validators 0..4 should have their balance increased - for i := primitives.ValidatorIndex(0); i < 4; i++ { + for i := range primitives.ValidatorIndex(4) { b, err := st.BalanceAtIndex(i) require.NoError(t, err) require.Equal(t, params.BeaconConfig().MinActivationBalance+uint64(amountAvailForProcessing)/5, b) @@ -528,7 +528,7 @@ func stateWithActiveBalanceETH(t *testing.T, balETH uint64) state.BeaconState { vals := make([]*eth.Validator, numVals) bals := make([]uint64, numVals) - for i := uint64(0); i < numVals; i++ { + for i := range numVals { wc := make([]byte, 32) wc[0] = params.BeaconConfig().ETH1AddressWithdrawalPrefixByte wc[31] = byte(i) diff --git a/beacon-chain/core/electra/registry_updates_test.go b/beacon-chain/core/electra/registry_updates_test.go index af393feac0..03883a072d 100644 --- a/beacon-chain/core/electra/registry_updates_test.go +++ b/beacon-chain/core/electra/registry_updates_test.go @@ -56,7 +56,7 @@ func TestProcessRegistryUpdates(t *testing.T) { Slot: 5 * params.BeaconConfig().SlotsPerEpoch, FinalizedCheckpoint: ð.Checkpoint{Epoch: finalizedEpoch, Root: make([]byte, fieldparams.RootLength)}, } - for i := uint64(0); i < 10; i++ { + for range uint64(10) { base.Validators = append(base.Validators, ð.Validator{ ActivationEligibilityEpoch: finalizedEpoch, EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, @@ -82,7 +82,7 @@ func TestProcessRegistryUpdates(t *testing.T) { Slot: 5 * params.BeaconConfig().SlotsPerEpoch, FinalizedCheckpoint: ð.Checkpoint{Epoch: finalizedEpoch, Root: make([]byte, fieldparams.RootLength)}, } - for i := uint64(0); i < 10; i++ { + for range uint64(10) { base.Validators = append(base.Validators, ð.Validator{ EffectiveBalance: params.BeaconConfig().EjectionBalance - 1, ExitEpoch: params.BeaconConfig().FarFutureEpoch, @@ -108,7 +108,7 @@ func TestProcessRegistryUpdates(t *testing.T) { Slot: 5 * params.BeaconConfig().SlotsPerEpoch, FinalizedCheckpoint: ð.Checkpoint{Epoch: finalizedEpoch, Root: make([]byte, fieldparams.RootLength)}, } - for i := uint64(0); i < 10; i++ { + for range uint64(10) { base.Validators = append(base.Validators, ð.Validator{ EffectiveBalance: params.BeaconConfig().EjectionBalance - 1, ExitEpoch: 10, @@ -157,7 +157,7 @@ func Benchmark_ProcessRegistryUpdates_MassEjection(b *testing.B) { st, err := util.NewBeaconStateElectra() require.NoError(b, err) - for i := 0; i < b.N; i++ { + for b.Loop() { b.StopTimer() if err := st.SetValidators(genValidators(100000)); err != nil { panic(err) diff --git a/beacon-chain/core/epoch/epoch_processing.go b/beacon-chain/core/epoch/epoch_processing.go index a5b097e5c4..dc246242c0 100644 --- a/beacon-chain/core/epoch/epoch_processing.go +++ b/beacon-chain/core/epoch/epoch_processing.go @@ -329,10 +329,7 @@ func ProcessEffectiveBalanceUpdates(st state.BeaconState) (state.BeaconState, er balance := bals[idx] if balance+downwardThreshold < val.EffectiveBalance() || val.EffectiveBalance()+upwardThreshold < balance { - effectiveBal := maxEffBalance - if effectiveBal > balance-balance%effBalanceInc { - effectiveBal = balance - balance%effBalanceInc - } + effectiveBal := min(maxEffBalance, balance-balance%effBalanceInc) if effectiveBal != val.EffectiveBalance() { newVal = val.Copy() newVal.EffectiveBalance = effectiveBal diff --git a/beacon-chain/core/epoch/epoch_processing_fuzz_test.go b/beacon-chain/core/epoch/epoch_processing_fuzz_test.go index de84b7b14e..a4ceb91f2d 100644 --- a/beacon-chain/core/epoch/epoch_processing_fuzz_test.go +++ b/beacon-chain/core/epoch/epoch_processing_fuzz_test.go @@ -14,7 +14,7 @@ func TestFuzzFinalUpdates_10000(t *testing.T) { fuzzer := gofuzz.NewWithSeed(0) base := ðpb.BeaconState{} - for i := 0; i < 10000; i++ { + for i := range 10000 { fuzzer.Fuzz(base) s, err := state_native.InitializeFromProtoUnsafePhase0(base) require.NoError(t, err) diff --git a/beacon-chain/core/epoch/epoch_processing_test.go b/beacon-chain/core/epoch/epoch_processing_test.go index dd2739a421..0c82afc087 100644 --- a/beacon-chain/core/epoch/epoch_processing_test.go +++ b/beacon-chain/core/epoch/epoch_processing_test.go @@ -218,7 +218,7 @@ func TestProcessRegistryUpdates_EligibleToActivate_Cancun(t *testing.T) { cfg.ChurnLimitQuotient = 1 params.OverrideBeaconConfig(cfg) - for i := uint64(0); i < 10; i++ { + for range uint64(10) { base.Validators = append(base.Validators, ðpb.Validator{ ActivationEligibilityEpoch: finalizedEpoch, EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, @@ -314,28 +314,28 @@ func TestProcessRegistryUpdates_CanExits(t *testing.T) { func buildState(t testing.TB, slot primitives.Slot, validatorCount uint64) state.BeaconState { validators := make([]*ethpb.Validator, validatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, } } validatorBalances := make([]uint64, len(validators)) - for i := 0; i < len(validatorBalances); i++ { + for i := range validatorBalances { validatorBalances[i] = params.BeaconConfig().MaxEffectiveBalance } latestActiveIndexRoots := make( [][]byte, params.BeaconConfig().EpochsPerHistoricalVector, ) - for i := 0; i < len(latestActiveIndexRoots); i++ { + for i := range latestActiveIndexRoots { latestActiveIndexRoots[i] = params.BeaconConfig().ZeroHash[:] } latestRandaoMixes := make( [][]byte, params.BeaconConfig().EpochsPerHistoricalVector, ) - for i := 0; i < len(latestRandaoMixes); i++ { + for i := range latestRandaoMixes { latestRandaoMixes[i] = params.BeaconConfig().ZeroHash[:] } s, err := util.NewBeaconState() diff --git a/beacon-chain/core/epoch/precompute/justification_finalization_test.go b/beacon-chain/core/epoch/precompute/justification_finalization_test.go index a0222d7f31..0c2cece56b 100644 --- a/beacon-chain/core/epoch/precompute/justification_finalization_test.go +++ b/beacon-chain/core/epoch/precompute/justification_finalization_test.go @@ -19,7 +19,7 @@ func TestProcessJustificationAndFinalizationPreCompute_ConsecutiveEpochs(t *test e := params.BeaconConfig().FarFutureEpoch a := params.BeaconConfig().MaxEffectiveBalance blockRoots := make([][]byte, params.BeaconConfig().SlotsPerEpoch*2+1) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { blockRoots[i] = []byte{byte(i)} } base := ðpb.BeaconState{ @@ -56,7 +56,7 @@ func TestProcessJustificationAndFinalizationPreCompute_JustifyCurrentEpoch(t *te e := params.BeaconConfig().FarFutureEpoch a := params.BeaconConfig().MaxEffectiveBalance blockRoots := make([][]byte, params.BeaconConfig().SlotsPerEpoch*2+1) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { blockRoots[i] = []byte{byte(i)} } base := ðpb.BeaconState{ @@ -93,7 +93,7 @@ func TestProcessJustificationAndFinalizationPreCompute_JustifyPrevEpoch(t *testi e := params.BeaconConfig().FarFutureEpoch a := params.BeaconConfig().MaxEffectiveBalance blockRoots := make([][]byte, params.BeaconConfig().SlotsPerEpoch*2+1) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { blockRoots[i] = []byte{byte(i)} } base := ðpb.BeaconState{ @@ -128,7 +128,7 @@ func TestProcessJustificationAndFinalizationPreCompute_JustifyPrevEpoch(t *testi func TestUnrealizedCheckpoints(t *testing.T) { validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount) balances := make([]uint64, len(validators)) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, diff --git a/beacon-chain/core/epoch/precompute/reward_penalty.go b/beacon-chain/core/epoch/precompute/reward_penalty.go index 6643719a04..7c674e5ed4 100644 --- a/beacon-chain/core/epoch/precompute/reward_penalty.go +++ b/beacon-chain/core/epoch/precompute/reward_penalty.go @@ -42,7 +42,7 @@ func ProcessRewardsAndPenaltiesPrecompute( return nil, errors.Wrap(err, "could not get proposer attestation delta") } validatorBals := state.Balances() - for i := 0; i < numOfVals; i++ { + for i := range numOfVals { vp[i].BeforeEpochTransitionBalance = validatorBals[i] // Compute the post balance of the validator after accounting for the diff --git a/beacon-chain/core/epoch/precompute/reward_penalty_test.go b/beacon-chain/core/epoch/precompute/reward_penalty_test.go index 83f600ff5d..7cc0762ace 100644 --- a/beacon-chain/core/epoch/precompute/reward_penalty_test.go +++ b/beacon-chain/core/epoch/precompute/reward_penalty_test.go @@ -24,7 +24,7 @@ func TestProcessRewardsAndPenaltiesPrecompute(t *testing.T) { validatorCount := uint64(2048) base := buildState(e+3, validatorCount) atts := make([]*ethpb.PendingAttestation, 3) - for i := 0; i < len(atts); i++ { + for i := range atts { atts[i] = ðpb.PendingAttestation{ Data: ðpb.AttestationData{ Target: ðpb.Checkpoint{Root: make([]byte, fieldparams.RootLength)}, @@ -63,7 +63,7 @@ func TestAttestationDeltas_ZeroEpoch(t *testing.T) { base := buildState(e+2, validatorCount) atts := make([]*ethpb.PendingAttestation, 3) var emptyRoot [32]byte - for i := 0; i < len(atts); i++ { + for i := range atts { atts[i] = ðpb.PendingAttestation{ Data: ðpb.AttestationData{ Target: ðpb.Checkpoint{ @@ -99,7 +99,7 @@ func TestAttestationDeltas_ZeroInclusionDelay(t *testing.T) { base := buildState(e+2, validatorCount) atts := make([]*ethpb.PendingAttestation, 3) var emptyRoot [32]byte - for i := 0; i < len(atts); i++ { + for i := range atts { atts[i] = ðpb.PendingAttestation{ Data: ðpb.AttestationData{ Target: ðpb.Checkpoint{ @@ -131,7 +131,7 @@ func TestProcessRewardsAndPenaltiesPrecompute_SlashedInactivePenalty(t *testing. validatorCount := uint64(2048) base := buildState(e+3, validatorCount) atts := make([]*ethpb.PendingAttestation, 3) - for i := 0; i < len(atts); i++ { + for i := range atts { atts[i] = ðpb.PendingAttestation{ Data: ðpb.AttestationData{ Target: ðpb.Checkpoint{Root: make([]byte, fieldparams.RootLength)}, @@ -176,28 +176,28 @@ func TestProcessRewardsAndPenaltiesPrecompute_SlashedInactivePenalty(t *testing. func buildState(slot primitives.Slot, validatorCount uint64) *ethpb.BeaconState { validators := make([]*ethpb.Validator, validatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, } } validatorBalances := make([]uint64, len(validators)) - for i := 0; i < len(validatorBalances); i++ { + for i := range validatorBalances { validatorBalances[i] = params.BeaconConfig().MaxEffectiveBalance } latestActiveIndexRoots := make( [][]byte, params.BeaconConfig().EpochsPerHistoricalVector, ) - for i := 0; i < len(latestActiveIndexRoots); i++ { + for i := range latestActiveIndexRoots { latestActiveIndexRoots[i] = params.BeaconConfig().ZeroHash[:] } latestRandaoMixes := make( [][]byte, params.BeaconConfig().EpochsPerHistoricalVector, ) - for i := 0; i < len(latestRandaoMixes); i++ { + for i := range latestRandaoMixes { latestRandaoMixes[i] = params.BeaconConfig().ZeroHash[:] } return ðpb.BeaconState{ diff --git a/beacon-chain/core/feed/event.go b/beacon-chain/core/feed/event.go index 871a3d562e..eadaa8c756 100644 --- a/beacon-chain/core/feed/event.go +++ b/beacon-chain/core/feed/event.go @@ -17,5 +17,5 @@ type Event struct { // Type is the type of event. Type EventType // Data is event-specific data. - Data interface{} + Data any } diff --git a/beacon-chain/core/helpers/attestation_test.go b/beacon-chain/core/helpers/attestation_test.go index 16d86f8e1e..8d861f2e5c 100644 --- a/beacon-chain/core/helpers/attestation_test.go +++ b/beacon-chain/core/helpers/attestation_test.go @@ -54,7 +54,7 @@ func TestAttestation_ComputeSubnetForAttestation(t *testing.T) { validatorCount := committeeCount * params.BeaconConfig().TargetCommitteeSize validators := make([]*ethpb.Validator, validatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ diff --git a/beacon-chain/core/helpers/beacon_committee.go b/beacon-chain/core/helpers/beacon_committee.go index fd04aef284..83565aa570 100644 --- a/beacon-chain/core/helpers/beacon_committee.go +++ b/beacon-chain/core/helpers/beacon_committee.go @@ -5,7 +5,7 @@ package helpers import ( "context" "fmt" - "sort" + "slices" "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v7/beacon-chain/cache" @@ -515,9 +515,7 @@ func UpdateCommitteeCache(ctx context.Context, state state.ReadOnlyBeaconState, // used for failing verify signature fallback. sortedIndices := make([]primitives.ValidatorIndex, len(shuffledIndices)) copy(sortedIndices, shuffledIndices) - sort.Slice(sortedIndices, func(i, j int) bool { - return sortedIndices[i] < sortedIndices[j] - }) + slices.Sort(sortedIndices) if err := committeeCache.AddCommitteeShuffledList(ctx, &cache.Committees{ ShuffledIndices: shuffledIndices, diff --git a/beacon-chain/core/helpers/beacon_committee_test.go b/beacon-chain/core/helpers/beacon_committee_test.go index b805b63a56..2d83d19d53 100644 --- a/beacon-chain/core/helpers/beacon_committee_test.go +++ b/beacon-chain/core/helpers/beacon_committee_test.go @@ -29,7 +29,7 @@ func TestComputeCommittee_WithoutCache(t *testing.T) { validatorCount := committeeCount * params.BeaconConfig().TargetCommitteeSize validators := make([]*ethpb.Validator, validatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -122,7 +122,7 @@ func TestCommitteeAssignments_NoProposerForSlot0(t *testing.T) { helpers.ClearCache() validators := make([]*ethpb.Validator, 4*params.BeaconConfig().SlotsPerEpoch) - for i := 0; i < len(validators); i++ { + for i := range validators { var activationEpoch primitives.Epoch if i >= len(validators)/2 { activationEpoch = 3 @@ -151,7 +151,7 @@ func TestCommitteeAssignments_CanRetrieve(t *testing.T) { // Initialize test with 256 validators, each slot and each index gets 4 validators. validators := make([]*ethpb.Validator, 4*params.BeaconConfig().SlotsPerEpoch) validatorIndices := make([]primitives.ValidatorIndex, len(validators)) - for i := 0; i < len(validators); i++ { + for i := range validators { // First 2 epochs only half validators are activated. var activationEpoch primitives.Epoch if i >= len(validators)/2 { @@ -234,7 +234,7 @@ func TestCommitteeAssignments_CannotRetrieveFuture(t *testing.T) { // Initialize test with 256 validators, each slot and each index gets 4 validators. validators := make([]*ethpb.Validator, 4*params.BeaconConfig().SlotsPerEpoch) - for i := 0; i < len(validators); i++ { + for i := range validators { // First 2 epochs only half validators are activated. var activationEpoch primitives.Epoch if i >= len(validators)/2 { @@ -266,7 +266,7 @@ func TestCommitteeAssignments_CannotRetrieveOlderThanSlotsPerHistoricalRoot(t *t // Initialize test with 256 validators, each slot and each index gets 4 validators. validators := make([]*ethpb.Validator, 4*params.BeaconConfig().SlotsPerEpoch) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -287,7 +287,7 @@ func TestCommitteeAssignments_EverySlotHasMin1Proposer(t *testing.T) { // Initialize test with 256 validators, each slot and each index gets 4 validators. validators := make([]*ethpb.Validator, 4*params.BeaconConfig().SlotsPerEpoch) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ActivationEpoch: 0, ExitEpoch: params.BeaconConfig().FarFutureEpoch, @@ -323,7 +323,7 @@ func TestCommitteeAssignments_EverySlotHasMin1Proposer(t *testing.T) { func TestVerifyAttestationBitfieldLengths_OK(t *testing.T) { validators := make([]*ethpb.Validator, 2*params.BeaconConfig().SlotsPerEpoch) activeRoots := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -489,7 +489,7 @@ func TestUpdateCommitteeCache_CanUpdateAcrossEpochs(t *testing.T) { func BenchmarkComputeCommittee300000_WithPreCache(b *testing.B) { validators := make([]*ethpb.Validator, 300000) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -512,8 +512,7 @@ func BenchmarkComputeCommittee300000_WithPreCache(b *testing.B) { panic(err) } - b.ResetTimer() - for n := 0; n < b.N; n++ { + for b.Loop() { _, err := helpers.ComputeCommittee(indices, seed, index, params.BeaconConfig().MaxCommitteesPerSlot) if err != nil { panic(err) @@ -523,7 +522,7 @@ func BenchmarkComputeCommittee300000_WithPreCache(b *testing.B) { func BenchmarkComputeCommittee3000000_WithPreCache(b *testing.B) { validators := make([]*ethpb.Validator, 3000000) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -546,8 +545,7 @@ func BenchmarkComputeCommittee3000000_WithPreCache(b *testing.B) { panic(err) } - b.ResetTimer() - for n := 0; n < b.N; n++ { + for b.Loop() { _, err := helpers.ComputeCommittee(indices, seed, index, params.BeaconConfig().MaxCommitteesPerSlot) if err != nil { panic(err) @@ -557,7 +555,7 @@ func BenchmarkComputeCommittee3000000_WithPreCache(b *testing.B) { func BenchmarkComputeCommittee128000_WithOutPreCache(b *testing.B) { validators := make([]*ethpb.Validator, 128000) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -576,8 +574,8 @@ func BenchmarkComputeCommittee128000_WithOutPreCache(b *testing.B) { i := uint64(0) index := uint64(0) - b.ResetTimer() - for n := 0; n < b.N; n++ { + + for b.Loop() { i++ _, err := helpers.ComputeCommittee(indices, seed, index, params.BeaconConfig().MaxCommitteesPerSlot) if err != nil { @@ -592,7 +590,7 @@ func BenchmarkComputeCommittee128000_WithOutPreCache(b *testing.B) { func BenchmarkComputeCommittee1000000_WithOutCache(b *testing.B) { validators := make([]*ethpb.Validator, 1000000) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -611,8 +609,8 @@ func BenchmarkComputeCommittee1000000_WithOutCache(b *testing.B) { i := uint64(0) index := uint64(0) - b.ResetTimer() - for n := 0; n < b.N; n++ { + + for b.Loop() { i++ _, err := helpers.ComputeCommittee(indices, seed, index, params.BeaconConfig().MaxCommitteesPerSlot) if err != nil { @@ -627,7 +625,7 @@ func BenchmarkComputeCommittee1000000_WithOutCache(b *testing.B) { func BenchmarkComputeCommittee4000000_WithOutCache(b *testing.B) { validators := make([]*ethpb.Validator, 4000000) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -646,8 +644,8 @@ func BenchmarkComputeCommittee4000000_WithOutCache(b *testing.B) { i := uint64(0) index := uint64(0) - b.ResetTimer() - for n := 0; n < b.N; n++ { + + for b.Loop() { i++ _, err := helpers.ComputeCommittee(indices, seed, index, params.BeaconConfig().MaxCommitteesPerSlot) if err != nil { @@ -663,7 +661,7 @@ func BenchmarkComputeCommittee4000000_WithOutCache(b *testing.B) { func TestBeaconCommitteeFromState_UpdateCacheForPreviousEpoch(t *testing.T) { committeeSize := uint64(16) validators := make([]*ethpb.Validator, params.BeaconConfig().SlotsPerEpoch.Mul(committeeSize)) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -688,7 +686,7 @@ func TestBeaconCommitteeFromState_UpdateCacheForPreviousEpoch(t *testing.T) { func TestPrecomputeProposerIndices_Ok(t *testing.T) { validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -732,7 +730,7 @@ func TestAttestationCommitteesFromState(t *testing.T) { ctx := t.Context() validators := make([]*ethpb.Validator, params.BeaconConfig().SlotsPerEpoch.Mul(params.BeaconConfig().TargetCommitteeSize)) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -768,7 +766,7 @@ func TestAttestationCommitteesFromCache(t *testing.T) { ctx := t.Context() validators := make([]*ethpb.Validator, params.BeaconConfig().SlotsPerEpoch.Mul(params.BeaconConfig().TargetCommitteeSize)) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -934,7 +932,7 @@ func TestInitializeProposerLookahead_RegressionTest(t *testing.T) { proposerLookahead, err := helpers.InitializeProposerLookahead(ctx, state, epoch) require.NoError(t, err) slotsPerEpoch := int(params.BeaconConfig().SlotsPerEpoch) - for epochOffset := primitives.Epoch(0); epochOffset < 2; epochOffset++ { + for epochOffset := range primitives.Epoch(2) { targetEpoch := epoch + epochOffset activeIndices, err := helpers.ActiveValidatorIndices(ctx, state, targetEpoch) diff --git a/beacon-chain/core/helpers/randao_test.go b/beacon-chain/core/helpers/randao_test.go index d9476ff49e..88cfaec82c 100644 --- a/beacon-chain/core/helpers/randao_test.go +++ b/beacon-chain/core/helpers/randao_test.go @@ -16,7 +16,7 @@ import ( func TestRandaoMix_OK(t *testing.T) { randaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(randaoMixes); i++ { + for i := range randaoMixes { intInBytes := make([]byte, 32) binary.LittleEndian.PutUint64(intInBytes, uint64(i)) randaoMixes[i] = intInBytes @@ -52,7 +52,7 @@ func TestRandaoMix_OK(t *testing.T) { func TestRandaoMix_CopyOK(t *testing.T) { randaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(randaoMixes); i++ { + for i := range randaoMixes { intInBytes := make([]byte, 32) binary.LittleEndian.PutUint64(intInBytes, uint64(i)) randaoMixes[i] = intInBytes @@ -96,7 +96,7 @@ func TestGenerateSeed_OK(t *testing.T) { helpers.ClearCache() randaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(randaoMixes); i++ { + for i := range randaoMixes { intInBytes := make([]byte, 32) binary.LittleEndian.PutUint64(intInBytes, uint64(i)) randaoMixes[i] = intInBytes diff --git a/beacon-chain/core/helpers/rewards_penalties_test.go b/beacon-chain/core/helpers/rewards_penalties_test.go index 5cedac6ef2..7d17cefd79 100644 --- a/beacon-chain/core/helpers/rewards_penalties_test.go +++ b/beacon-chain/core/helpers/rewards_penalties_test.go @@ -239,28 +239,28 @@ func TestIsInInactivityLeak(t *testing.T) { func buildState(slot primitives.Slot, validatorCount uint64) *ethpb.BeaconState { validators := make([]*ethpb.Validator, validatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, } } validatorBalances := make([]uint64, len(validators)) - for i := 0; i < len(validatorBalances); i++ { + for i := range validatorBalances { validatorBalances[i] = params.BeaconConfig().MaxEffectiveBalance } latestActiveIndexRoots := make( [][]byte, params.BeaconConfig().EpochsPerHistoricalVector, ) - for i := 0; i < len(latestActiveIndexRoots); i++ { + for i := range latestActiveIndexRoots { latestActiveIndexRoots[i] = params.BeaconConfig().ZeroHash[:] } latestRandaoMixes := make( [][]byte, params.BeaconConfig().EpochsPerHistoricalVector, ) - for i := 0; i < len(latestRandaoMixes); i++ { + for i := range latestRandaoMixes { latestRandaoMixes[i] = params.BeaconConfig().ZeroHash[:] } return ðpb.BeaconState{ diff --git a/beacon-chain/core/helpers/shuffle.go b/beacon-chain/core/helpers/shuffle.go index e10053bdbe..2b3df4f0e1 100644 --- a/beacon-chain/core/helpers/shuffle.go +++ b/beacon-chain/core/helpers/shuffle.go @@ -23,7 +23,7 @@ var maxShuffleListSize uint64 = 1 << 40 func SplitIndices(l []uint64, n uint64) [][]uint64 { var divided [][]uint64 var lSize = uint64(len(l)) - for i := uint64(0); i < n; i++ { + for i := range n { start := slice.SplitOffset(lSize, n, i) end := slice.SplitOffset(lSize, n, i+1) divided = append(divided, l[start:end]) @@ -103,10 +103,7 @@ func ComputeShuffledIndex(index primitives.ValidatorIndex, indexCount uint64, se pivot := hash8Int % indexCount flip := (pivot + indexCount - uint64(index)) % indexCount // Consider every pair only once by picking the highest pair index to retrieve randomness. - position := uint64(index) - if flip > position { - position = flip - } + position := max(flip, uint64(index)) // Add position except its last byte to []buf for randomness, // it will be used later to select a bit from the resulting hash. binary.LittleEndian.PutUint64(posBuffer[:8], position>>8) diff --git a/beacon-chain/core/helpers/shuffle_test.go b/beacon-chain/core/helpers/shuffle_test.go index d1e985b38f..7e3c1103ef 100644 --- a/beacon-chain/core/helpers/shuffle_test.go +++ b/beacon-chain/core/helpers/shuffle_test.go @@ -30,7 +30,7 @@ func TestShuffleList_OK(t *testing.T) { var list1 []primitives.ValidatorIndex seed1 := [32]byte{1, 128, 12} seed2 := [32]byte{2, 128, 12} - for i := 0; i < 10; i++ { + for i := range 10 { list1 = append(list1, primitives.ValidatorIndex(i)) } @@ -55,7 +55,7 @@ func TestSplitIndices_OK(t *testing.T) { var l []uint64 numValidators := uint64(64000) - for i := uint64(0); i < numValidators; i++ { + for i := range numValidators { l = append(l, i) } split := SplitIndices(l, uint64(params.BeaconConfig().SlotsPerEpoch)) @@ -104,7 +104,7 @@ func BenchmarkIndexComparison(b *testing.B) { seed := [32]byte{123, 42} for _, listSize := range listSizes { b.Run(fmt.Sprintf("Indexwise_ShuffleList_%d", listSize), func(ib *testing.B) { - for i := 0; i < ib.N; i++ { + for ib.Loop() { // Simulate a list-shuffle by running shuffle-index listSize times. for j := primitives.ValidatorIndex(0); uint64(j) < listSize; j++ { _, err := ShuffledIndex(j, listSize, seed) @@ -120,11 +120,11 @@ func BenchmarkShuffleList(b *testing.B) { seed := [32]byte{123, 42} for _, listSize := range listSizes { testIndices := make([]primitives.ValidatorIndex, listSize) - for i := uint64(0); i < listSize; i++ { + for i := range listSize { testIndices[i] = primitives.ValidatorIndex(i) } b.Run(fmt.Sprintf("ShuffleList_%d", listSize), func(ib *testing.B) { - for i := 0; i < ib.N; i++ { + for ib.Loop() { _, err := ShuffleList(testIndices, seed) assert.NoError(b, err) } @@ -161,12 +161,12 @@ func TestSplitIndicesAndOffset_OK(t *testing.T) { var l []uint64 validators := uint64(64000) - for i := uint64(0); i < validators; i++ { + for i := range validators { l = append(l, i) } chunks := uint64(6) split := SplitIndices(l, chunks) - for i := uint64(0); i < chunks; i++ { + for i := range chunks { if !reflect.DeepEqual(split[i], l[slice.SplitOffset(uint64(len(l)), chunks, i):slice.SplitOffset(uint64(len(l)), chunks, i+1)]) { t.Errorf("Want: %v got: %v", l[slice.SplitOffset(uint64(len(l)), chunks, i):slice.SplitOffset(uint64(len(l)), chunks, i+1)], split[i]) break diff --git a/beacon-chain/core/helpers/sync_committee_test.go b/beacon-chain/core/helpers/sync_committee_test.go index 8b328d90c1..0d51eb1c56 100644 --- a/beacon-chain/core/helpers/sync_committee_test.go +++ b/beacon-chain/core/helpers/sync_committee_test.go @@ -24,7 +24,7 @@ func TestCurrentPeriodPositions(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ Pubkeys: make([][]byte, params.BeaconConfig().SyncCommitteeSize), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -56,7 +56,7 @@ func TestIsCurrentEpochSyncCommittee_UsingCache(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -87,7 +87,7 @@ func TestIsCurrentEpochSyncCommittee_UsingCommittee(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -116,7 +116,7 @@ func TestIsCurrentEpochSyncCommittee_DoesNotExist(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -144,7 +144,7 @@ func TestIsNextEpochSyncCommittee_UsingCache(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -175,7 +175,7 @@ func TestIsNextEpochSyncCommittee_UsingCommittee(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -203,7 +203,7 @@ func TestIsNextEpochSyncCommittee_DoesNotExist(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -231,7 +231,7 @@ func TestCurrentEpochSyncSubcommitteeIndices_UsingCache(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -262,7 +262,7 @@ func TestCurrentEpochSyncSubcommitteeIndices_UsingCommittee(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -304,7 +304,7 @@ func TestCurrentEpochSyncSubcommitteeIndices_DoesNotExist(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -332,7 +332,7 @@ func TestNextEpochSyncSubcommitteeIndices_UsingCache(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -363,7 +363,7 @@ func TestNextEpochSyncSubcommitteeIndices_UsingCommittee(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -391,7 +391,7 @@ func TestNextEpochSyncSubcommitteeIndices_DoesNotExist(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ @@ -449,7 +449,7 @@ func TestIsCurrentEpochSyncCommittee_SameBlockRoot(t *testing.T) { syncCommittee := ðpb.SyncCommittee{ AggregatePubkey: bytesutil.PadTo([]byte{}, params.BeaconConfig().BLSPubkeyLength), } - for i := 0; i < len(validators); i++ { + for i := range validators { k := make([]byte, 48) copy(k, strconv.Itoa(i)) validators[i] = ðpb.Validator{ diff --git a/beacon-chain/core/helpers/validators_test.go b/beacon-chain/core/helpers/validators_test.go index a449bb2a42..9a18899c53 100644 --- a/beacon-chain/core/helpers/validators_test.go +++ b/beacon-chain/core/helpers/validators_test.go @@ -184,7 +184,7 @@ func TestBeaconProposerIndex_OK(t *testing.T) { c.MinGenesisActiveValidatorCount = 16384 params.OverrideBeaconConfig(c) validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount/8) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -241,7 +241,7 @@ func TestBeaconProposerIndex_BadState(t *testing.T) { c.MinGenesisActiveValidatorCount = 16384 params.OverrideBeaconConfig(c) validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount/8) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -270,7 +270,7 @@ func TestComputeProposerIndex_Compatibility(t *testing.T) { helpers.ClearCache() validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -322,7 +322,7 @@ func TestActiveValidatorCount_Genesis(t *testing.T) { c := 1000 validators := make([]*ethpb.Validator, c) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -357,7 +357,7 @@ func TestChurnLimit_OK(t *testing.T) { helpers.ClearCache() validators := make([]*ethpb.Validator, test.validatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, } @@ -861,7 +861,7 @@ func TestLastActivatedValidatorIndex_OK(t *testing.T) { validators := make([]*ethpb.Validator, 4) balances := make([]uint64, len(validators)) - for i := uint64(0); i < 4; i++ { + for i := range uint64(4) { validators[i] = ðpb.Validator{ PublicKey: make([]byte, params.BeaconConfig().BLSPubkeyLength), WithdrawalCredentials: make([]byte, 32), diff --git a/beacon-chain/core/helpers/weak_subjectivity_test.go b/beacon-chain/core/helpers/weak_subjectivity_test.go index 582271f328..cb11a6e8b8 100644 --- a/beacon-chain/core/helpers/weak_subjectivity_test.go +++ b/beacon-chain/core/helpers/weak_subjectivity_test.go @@ -270,7 +270,7 @@ func genState(t *testing.T, valCount, avgBalance uint64) state.BeaconState { validators := make([]*ethpb.Validator, valCount) balances := make([]uint64, len(validators)) - for i := uint64(0); i < valCount; i++ { + for i := range valCount { validators[i] = ðpb.Validator{ PublicKey: make([]byte, params.BeaconConfig().BLSPubkeyLength), WithdrawalCredentials: make([]byte, 32), diff --git a/beacon-chain/core/peerdas/p2p_interface_test.go b/beacon-chain/core/peerdas/p2p_interface_test.go index b47a07e0fc..38ec58f778 100644 --- a/beacon-chain/core/peerdas/p2p_interface_test.go +++ b/beacon-chain/core/peerdas/p2p_interface_test.go @@ -100,7 +100,7 @@ func Test_VerifyKZGInclusionProofColumn(t *testing.T) { // Generate random KZG commitments `blobCount` blobs. kzgCommitments := make([][]byte, blobCount) - for i := 0; i < blobCount; i++ { + for i := range blobCount { kzgCommitments[i] = make([]byte, 48) _, err := rand.Read(kzgCommitments[i]) require.NoError(t, err) diff --git a/beacon-chain/core/peerdas/validator.go b/beacon-chain/core/peerdas/validator.go index d5331524d4..aa679db806 100644 --- a/beacon-chain/core/peerdas/validator.go +++ b/beacon-chain/core/peerdas/validator.go @@ -216,7 +216,7 @@ func rotateRowsToCols(cellsPerBlob [][]kzg.Cell, proofsPerBlob [][]kzg.Proof, nu if len(cells) != len(proofs) { return nil, nil, errors.Wrap(ErrNotEnoughDataColumnSidecars, "not enough proofs") } - for j := uint64(0); j < numCols; j++ { + for j := range numCols { if i == 0 { cellCols[j] = make([][]byte, len(cellsPerBlob)) proofCols[j] = make([][]byte, len(cellsPerBlob)) diff --git a/beacon-chain/core/signing/signing_root_test.go b/beacon-chain/core/signing/signing_root_test.go index ba42b1f777..b3b93d733a 100644 --- a/beacon-chain/core/signing/signing_root_test.go +++ b/beacon-chain/core/signing/signing_root_test.go @@ -119,7 +119,7 @@ func TestFuzzverifySigningRoot_10000(_ *testing.T) { var p []byte var s []byte var d []byte - for i := 0; i < 10000; i++ { + for range 10000 { fuzzer.Fuzz(st) fuzzer.Fuzz(&pubkey) fuzzer.Fuzz(&sig) diff --git a/beacon-chain/core/transition/benchmarks_test.go b/beacon-chain/core/transition/benchmarks_test.go index 0422390963..d78b6c39f7 100644 --- a/beacon-chain/core/transition/benchmarks_test.go +++ b/beacon-chain/core/transition/benchmarks_test.go @@ -28,8 +28,7 @@ func BenchmarkExecuteStateTransition_FullBlock(b *testing.B) { block, err := benchmark.PreGenFullBlock() require.NoError(b, err) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for i := 0; b.Loop(); i++ { wsb, err := blocks.NewSignedBeaconBlock(block) require.NoError(b, err) _, err = coreState.ExecuteStateTransition(b.Context(), cleanStates[i], wsb) @@ -60,8 +59,7 @@ func BenchmarkExecuteStateTransition_WithCache(b *testing.B) { _, err = coreState.ExecuteStateTransition(b.Context(), beaconState, wsb) require.NoError(b, err, "Failed to process block, benchmarks will fail") - b.ResetTimer() - for i := 0; i < b.N; i++ { + for i := 0; b.Loop(); i++ { wsb, err := blocks.NewSignedBeaconBlock(block) require.NoError(b, err) _, err = coreState.ExecuteStateTransition(b.Context(), cleanStates[i], wsb) @@ -83,8 +81,7 @@ func BenchmarkProcessEpoch_2FullEpochs(b *testing.B) { require.NoError(b, helpers.UpdateCommitteeCache(b.Context(), beaconState, time.CurrentEpoch(beaconState))) require.NoError(b, beaconState.SetSlot(currentSlot)) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { // ProcessEpochPrecompute is the optimized version of process epoch. It's enabled by default // at run time. _, err := coreState.ProcessEpochPrecompute(b.Context(), beaconState.Copy()) @@ -96,8 +93,7 @@ func BenchmarkHashTreeRoot_FullState(b *testing.B) { beaconState, err := benchmark.PreGenstateFullEpochs() require.NoError(b, err) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := beaconState.HashTreeRoot(b.Context()) require.NoError(b, err) } @@ -113,8 +109,7 @@ func BenchmarkHashTreeRootState_FullState(b *testing.B) { _, err = beaconState.HashTreeRoot(ctx) require.NoError(b, err) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := beaconState.HashTreeRoot(ctx) require.NoError(b, err) } @@ -128,7 +123,7 @@ func BenchmarkMarshalState_FullState(b *testing.B) { b.Run("Proto_Marshal", func(b *testing.B) { b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := proto.Marshal(natState) require.NoError(b, err) } @@ -137,7 +132,7 @@ func BenchmarkMarshalState_FullState(b *testing.B) { b.Run("Fast_SSZ_Marshal", func(b *testing.B) { b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := natState.MarshalSSZ() require.NoError(b, err) } @@ -157,7 +152,7 @@ func BenchmarkUnmarshalState_FullState(b *testing.B) { b.Run("Proto_Unmarshal", func(b *testing.B) { b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { require.NoError(b, proto.Unmarshal(protoObject, ðpb.BeaconState{})) } }) @@ -165,7 +160,7 @@ func BenchmarkUnmarshalState_FullState(b *testing.B) { b.Run("Fast_SSZ_Unmarshal", func(b *testing.B) { b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { sszState := ðpb.BeaconState{} require.NoError(b, sszState.UnmarshalSSZ(sszObject)) } @@ -174,7 +169,7 @@ func BenchmarkUnmarshalState_FullState(b *testing.B) { func clonedStates(beaconState state.BeaconState) []state.BeaconState { clonedStates := make([]state.BeaconState, runAmount) - for i := 0; i < runAmount; i++ { + for i := range runAmount { clonedStates[i] = beaconState.Copy() } return clonedStates diff --git a/beacon-chain/core/transition/skip_slot_cache_test.go b/beacon-chain/core/transition/skip_slot_cache_test.go index f4458eba88..4ddf30d194 100644 --- a/beacon-chain/core/transition/skip_slot_cache_test.go +++ b/beacon-chain/core/transition/skip_slot_cache_test.go @@ -108,7 +108,7 @@ func TestSkipSlotCache_ConcurrentMixup(t *testing.T) { // prepare copies for both states var setups []state.BeaconState - for i := uint64(0); i < 300; i++ { + for i := range uint64(300) { var st state.BeaconState if i%2 == 0 { st = s1 diff --git a/beacon-chain/core/transition/state-bellatrix.go b/beacon-chain/core/transition/state-bellatrix.go index 55b106449a..ea444bb489 100644 --- a/beacon-chain/core/transition/state-bellatrix.go +++ b/beacon-chain/core/transition/state-bellatrix.go @@ -95,7 +95,7 @@ func OptimizedGenesisBeaconStateBellatrix(genesisTime uint64, preState state.Bea } randaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(randaoMixes); i++ { + for i := range randaoMixes { h := make([]byte, 32) copy(h, eth1Data.BlockHash) randaoMixes[i] = h @@ -104,17 +104,17 @@ func OptimizedGenesisBeaconStateBellatrix(genesisTime uint64, preState state.Bea zeroHash := params.BeaconConfig().ZeroHash[:] activeIndexRoots := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(activeIndexRoots); i++ { + for i := range activeIndexRoots { activeIndexRoots[i] = zeroHash } blockRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { blockRoots[i] = zeroHash } stateRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(stateRoots); i++ { + for i := range stateRoots { stateRoots[i] = zeroHash } @@ -131,7 +131,7 @@ func OptimizedGenesisBeaconStateBellatrix(genesisTime uint64, preState state.Bea } scoresMissing := len(preState.Validators()) - len(scores) if scoresMissing > 0 { - for i := 0; i < scoresMissing; i++ { + for range scoresMissing { scores = append(scores, 0) } } diff --git a/beacon-chain/core/transition/state.go b/beacon-chain/core/transition/state.go index bad69cd22e..8248515087 100644 --- a/beacon-chain/core/transition/state.go +++ b/beacon-chain/core/transition/state.go @@ -122,7 +122,7 @@ func OptimizedGenesisBeaconState(genesisTime uint64, preState state.BeaconState, } randaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(randaoMixes); i++ { + for i := range randaoMixes { h := make([]byte, 32) copy(h, eth1Data.BlockHash) randaoMixes[i] = h @@ -131,17 +131,17 @@ func OptimizedGenesisBeaconState(genesisTime uint64, preState state.BeaconState, zeroHash := params.BeaconConfig().ZeroHash[:] activeIndexRoots := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(activeIndexRoots); i++ { + for i := range activeIndexRoots { activeIndexRoots[i] = zeroHash } blockRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { blockRoots[i] = zeroHash } stateRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(stateRoots); i++ { + for i := range stateRoots { stateRoots[i] = zeroHash } diff --git a/beacon-chain/core/transition/state_fuzz_test.go b/beacon-chain/core/transition/state_fuzz_test.go index da29755479..6bc1aba27c 100644 --- a/beacon-chain/core/transition/state_fuzz_test.go +++ b/beacon-chain/core/transition/state_fuzz_test.go @@ -17,7 +17,7 @@ func TestGenesisBeaconState_1000(t *testing.T) { deposits := make([]*ethpb.Deposit, 300000) var genesisTime uint64 eth1Data := ðpb.Eth1Data{} - for i := 0; i < 1000; i++ { + for range 1000 { fuzzer.Fuzz(&deposits) fuzzer.Fuzz(&genesisTime) fuzzer.Fuzz(eth1Data) @@ -40,7 +40,7 @@ func TestOptimizedGenesisBeaconState_1000(t *testing.T) { preState, err := state_native.InitializeFromProtoUnsafePhase0(ðpb.BeaconState{}) require.NoError(t, err) eth1Data := ðpb.Eth1Data{} - for i := 0; i < 1000; i++ { + for range 1000 { fuzzer.Fuzz(&genesisTime) fuzzer.Fuzz(eth1Data) fuzzer.Fuzz(preState) @@ -60,7 +60,7 @@ func TestIsValidGenesisState_100000(_ *testing.T) { fuzzer := fuzz.NewWithSeed(0) fuzzer.NilChance(0.1) var chainStartDepositCount, currentTime uint64 - for i := 0; i < 100000; i++ { + for range 100000 { fuzzer.Fuzz(&chainStartDepositCount) fuzzer.Fuzz(¤tTime) IsValidGenesisState(chainStartDepositCount, currentTime) diff --git a/beacon-chain/core/transition/transition_fuzz_test.go b/beacon-chain/core/transition/transition_fuzz_test.go index 78a688c6d6..652c90e01b 100644 --- a/beacon-chain/core/transition/transition_fuzz_test.go +++ b/beacon-chain/core/transition/transition_fuzz_test.go @@ -21,7 +21,7 @@ func TestFuzzExecuteStateTransition_1000(t *testing.T) { sb := ðpb.SignedBeaconBlock{} fuzzer := fuzz.NewWithSeed(0) fuzzer.NilChance(0.1) - for i := 0; i < 1000; i++ { + for range 1000 { fuzzer.Fuzz(state) fuzzer.Fuzz(sb) if sb.Block == nil || sb.Block.Body == nil { @@ -45,7 +45,7 @@ func TestFuzzCalculateStateRoot_1000(t *testing.T) { sb := ðpb.SignedBeaconBlock{} fuzzer := fuzz.NewWithSeed(0) fuzzer.NilChance(0.1) - for i := 0; i < 1000; i++ { + for range 1000 { fuzzer.Fuzz(state) fuzzer.Fuzz(sb) if sb.Block == nil || sb.Block.Body == nil { @@ -68,7 +68,7 @@ func TestFuzzProcessSlot_1000(t *testing.T) { require.NoError(t, err) fuzzer := fuzz.NewWithSeed(0) fuzzer.NilChance(0.1) - for i := 0; i < 1000; i++ { + for range 1000 { fuzzer.Fuzz(state) s, err := ProcessSlot(ctx, state) if err != nil && s != nil { @@ -86,7 +86,7 @@ func TestFuzzProcessSlots_1000(t *testing.T) { slot := primitives.Slot(0) fuzzer := fuzz.NewWithSeed(0) fuzzer.NilChance(0.1) - for i := 0; i < 1000; i++ { + for range 1000 { fuzzer.Fuzz(state) fuzzer.Fuzz(&slot) s, err := ProcessSlots(ctx, state, slot) @@ -105,7 +105,7 @@ func TestFuzzprocessOperationsNoVerify_1000(t *testing.T) { bb := ðpb.BeaconBlock{} fuzzer := fuzz.NewWithSeed(0) fuzzer.NilChance(0.1) - for i := 0; i < 1000; i++ { + for range 1000 { fuzzer.Fuzz(state) fuzzer.Fuzz(bb) if bb.Body == nil { @@ -128,7 +128,7 @@ func TestFuzzverifyOperationLengths_10000(t *testing.T) { bb := ðpb.BeaconBlock{} fuzzer := fuzz.NewWithSeed(0) fuzzer.NilChance(0.1) - for i := 0; i < 10000; i++ { + for range 10000 { fuzzer.Fuzz(state) fuzzer.Fuzz(bb) if bb.Body == nil { @@ -148,7 +148,7 @@ func TestFuzzCanProcessEpoch_10000(t *testing.T) { require.NoError(t, err) fuzzer := fuzz.NewWithSeed(0) fuzzer.NilChance(0.1) - for i := 0; i < 10000; i++ { + for range 10000 { fuzzer.Fuzz(state) time.CanProcessEpoch(state) } @@ -162,7 +162,7 @@ func TestFuzzProcessEpochPrecompute_1000(t *testing.T) { require.NoError(t, err) fuzzer := fuzz.NewWithSeed(0) fuzzer.NilChance(0.1) - for i := 0; i < 1000; i++ { + for range 1000 { fuzzer.Fuzz(state) s, err := ProcessEpochPrecompute(ctx, state) if err != nil && s != nil { @@ -180,7 +180,7 @@ func TestFuzzProcessBlockForStateRoot_1000(t *testing.T) { sb := ðpb.SignedBeaconBlock{} fuzzer := fuzz.NewWithSeed(0) fuzzer.NilChance(0.1) - for i := 0; i < 1000; i++ { + for range 1000 { fuzzer.Fuzz(state) fuzzer.Fuzz(sb) if sb.Block == nil || sb.Block.Body == nil || sb.Block.Body.Eth1Data == nil { diff --git a/beacon-chain/core/transition/transition_test.go b/beacon-chain/core/transition/transition_test.go index 908e392465..04f8263d0d 100644 --- a/beacon-chain/core/transition/transition_test.go +++ b/beacon-chain/core/transition/transition_test.go @@ -754,8 +754,7 @@ func BenchmarkProcessSlots_Capella(b *testing.B) { var err error - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { st, err = transition.ProcessSlots(b.Context(), st, st.Slot()+1) if err != nil { b.Fatalf("Failed to process slot %v", err) @@ -768,8 +767,7 @@ func BenchmarkProcessSlots_Deneb(b *testing.B) { var err error - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { st, err = transition.ProcessSlots(b.Context(), st, st.Slot()+1) if err != nil { b.Fatalf("Failed to process slot %v", err) @@ -782,8 +780,7 @@ func BenchmarkProcessSlots_Electra(b *testing.B) { var err error - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { st, err = transition.ProcessSlots(b.Context(), st, st.Slot()+1) if err != nil { b.Fatalf("Failed to process slot %v", err) diff --git a/beacon-chain/core/validators/validator.go b/beacon-chain/core/validators/validator.go index 9e1079eaf1..9673400d75 100644 --- a/beacon-chain/core/validators/validator.go +++ b/beacon-chain/core/validators/validator.go @@ -307,7 +307,7 @@ func SlashValidator( // ActivatedValidatorIndices determines the indices activated during the given epoch. func ActivatedValidatorIndices(epoch primitives.Epoch, validators []*ethpb.Validator) []primitives.ValidatorIndex { activations := make([]primitives.ValidatorIndex, 0) - for i := 0; i < len(validators); i++ { + for i := range validators { val := validators[i] if val.ActivationEpoch <= epoch && epoch < val.ExitEpoch { activations = append(activations, primitives.ValidatorIndex(i)) @@ -319,7 +319,7 @@ func ActivatedValidatorIndices(epoch primitives.Epoch, validators []*ethpb.Valid // SlashedValidatorIndices determines the indices slashed during the given epoch. func SlashedValidatorIndices(epoch primitives.Epoch, validators []*ethpb.Validator) []primitives.ValidatorIndex { slashed := make([]primitives.ValidatorIndex, 0) - for i := 0; i < len(validators); i++ { + for i := range validators { val := validators[i] maxWithdrawableEpoch := primitives.MaxEpoch(val.WithdrawableEpoch, epoch+params.BeaconConfig().EpochsPerSlashingsVector) if val.WithdrawableEpoch == maxWithdrawableEpoch && val.Slashed { diff --git a/beacon-chain/core/validators/validator_test.go b/beacon-chain/core/validators/validator_test.go index 8e2edde42a..e2e5488ce3 100644 --- a/beacon-chain/core/validators/validator_test.go +++ b/beacon-chain/core/validators/validator_test.go @@ -172,7 +172,7 @@ func TestSlashValidator_OK(t *testing.T) { validatorCount := 100 registry := make([]*ethpb.Validator, 0, validatorCount) balances := make([]uint64, 0, validatorCount) - for i := 0; i < validatorCount; i++ { + for range validatorCount { registry = append(registry, ðpb.Validator{ ActivationEpoch: 0, ExitEpoch: params.BeaconConfig().FarFutureEpoch, @@ -226,7 +226,7 @@ func TestSlashValidator_Electra(t *testing.T) { validatorCount := 100 registry := make([]*ethpb.Validator, 0, validatorCount) balances := make([]uint64, 0, validatorCount) - for i := 0; i < validatorCount; i++ { + for range validatorCount { registry = append(registry, ðpb.Validator{ ActivationEpoch: 0, ExitEpoch: params.BeaconConfig().FarFutureEpoch, diff --git a/beacon-chain/das/availability_blobs_test.go b/beacon-chain/das/availability_blobs_test.go index 1f41e96aba..c5d6cc03f2 100644 --- a/beacon-chain/das/availability_blobs_test.go +++ b/beacon-chain/das/availability_blobs_test.go @@ -26,7 +26,7 @@ func Test_commitmentsToCheck(t *testing.T) { windowSlots = windowSlots + primitives.Slot(params.BeaconConfig().FuluForkEpoch) maxBlobs := params.LastNetworkScheduleEntry().MaxBlobsPerBlock commits := make([][]byte, maxBlobs+1) - for i := 0; i < len(commits); i++ { + for i := range commits { commits[i] = bytesutil.PadTo([]byte{byte(i)}, 48) } cases := []struct { diff --git a/beacon-chain/das/blob_cache_test.go b/beacon-chain/das/blob_cache_test.go index 8b8e41d434..e21b56f853 100644 --- a/beacon-chain/das/blob_cache_test.go +++ b/beacon-chain/das/blob_cache_test.go @@ -44,7 +44,7 @@ func filterTestCaseSetup(slot primitives.Slot, nBlobs int, onDisk []int, numExpe entry.setDiskSummary(sum) } expected := make([]blocks.ROBlob, 0, nBlobs) - for i := 0; i < len(commits); i++ { + for i := range commits { if entry.diskSummary.HasIndex(uint64(i)) { continue } diff --git a/beacon-chain/db/filesystem/blob_test.go b/beacon-chain/db/filesystem/blob_test.go index b8356cd9cd..f1ebcff356 100644 --- a/beacon-chain/db/filesystem/blob_test.go +++ b/beacon-chain/db/filesystem/blob_test.go @@ -112,12 +112,10 @@ func TestBlobStorage_SaveBlobData(t *testing.T) { blob := testSidecars[0] var wg sync.WaitGroup - for i := 0; i < 100; i++ { - wg.Add(1) - go func() { - defer wg.Done() + for range 100 { + wg.Go(func() { require.NoError(t, b.Save(blob)) - }() + }) } wg.Wait() diff --git a/beacon-chain/db/filesystem/cache.go b/beacon-chain/db/filesystem/cache.go index fa5f56f03d..5c238b91a1 100644 --- a/beacon-chain/db/filesystem/cache.go +++ b/beacon-chain/db/filesystem/cache.go @@ -32,7 +32,7 @@ func (s BlobStorageSummary) AllAvailable(count int) bool { if count > len(s.mask) { return false } - for i := 0; i < count; i++ { + for i := range count { if !s.mask[i] { return false } diff --git a/beacon-chain/db/filters/filter.go b/beacon-chain/db/filters/filter.go index 132a702603..c072c326ca 100644 --- a/beacon-chain/db/filters/filter.go +++ b/beacon-chain/db/filters/filter.go @@ -74,7 +74,7 @@ func (aq AncestryQuery) Span() primitives.Slot { // QueryFilter defines a generic interface for type-asserting // specific filters to use in querying DB objects. type QueryFilter struct { - queries map[FilterType]interface{} + queries map[FilterType]any ancestry AncestryQuery } @@ -82,14 +82,14 @@ type QueryFilter struct { // certain Ethereum data types by attribute. func NewFilter() *QueryFilter { return &QueryFilter{ - queries: make(map[FilterType]interface{}), + queries: make(map[FilterType]any), } } // Filters returns and underlying map of FilterType to interface{}, giving us // a copy of the currently set filters which can then be iterated over and type // asserted for use anywhere. -func (q *QueryFilter) Filters() map[FilterType]interface{} { +func (q *QueryFilter) Filters() map[FilterType]any { return q.queries } diff --git a/beacon-chain/db/kv/blocks.go b/beacon-chain/db/kv/blocks.go index d10c3aa5c5..818ad0571f 100644 --- a/beacon-chain/db/kv/blocks.go +++ b/beacon-chain/db/kv/blocks.go @@ -215,7 +215,7 @@ func (s *Store) Blocks(ctx context.Context, f *filters.QueryFilter) ([]interface return err } - for i := 0; i < len(keys); i++ { + for i := range keys { encoded := bkt.Get(keys[i]) blk, err := unmarshalBlock(ctx, encoded) if err != nil { @@ -307,7 +307,7 @@ func (s *Store) BlockRoots(ctx context.Context, f *filters.QueryFilter) ([][32]b return err } - for i := 0; i < len(keys); i++ { + for i := range keys { blockRoots = append(blockRoots, bytesutil.ToBytes32(keys[i])) } return nil @@ -1063,7 +1063,7 @@ func blockRootsByFilter(ctx context.Context, tx *bolt.Tx, f *filters.QueryFilter func blockRootsBySlotRange( ctx context.Context, bkt *bolt.Bucket, - startSlotEncoded, endSlotEncoded, startEpochEncoded, endEpochEncoded, slotStepEncoded interface{}, + startSlotEncoded, endSlotEncoded, startEpochEncoded, endEpochEncoded, slotStepEncoded any, ) ([][]byte, error) { _, span := trace.StartSpan(ctx, "BeaconDB.blockRootsBySlotRange") defer span.End() diff --git a/beacon-chain/db/kv/blocks_test.go b/beacon-chain/db/kv/blocks_test.go index 94f1ceafb5..88511c10f4 100644 --- a/beacon-chain/db/kv/blocks_test.go +++ b/beacon-chain/db/kv/blocks_test.go @@ -172,7 +172,7 @@ func TestStore_SaveBlock_NoDuplicates(t *testing.T) { // Even with a full cache, saving new blocks should not cause // duplicated blocks in the DB. - for i := 0; i < 100; i++ { + for range 100 { require.NoError(t, db.SaveBlock(ctx, blk)) } @@ -255,7 +255,7 @@ func TestStore_BlocksHandleZeroCase(t *testing.T) { ctx := t.Context() numBlocks := 10 totalBlocks := make([]interfaces.ReadOnlySignedBeaconBlock, numBlocks) - for i := 0; i < len(totalBlocks); i++ { + for i := range totalBlocks { b, err := tt.newBlock(primitives.Slot(i), bytesutil.PadTo([]byte("parent"), 32)) require.NoError(t, err) totalBlocks[i] = b @@ -279,7 +279,7 @@ func TestStore_BlocksHandleInvalidEndSlot(t *testing.T) { numBlocks := 10 totalBlocks := make([]interfaces.ReadOnlySignedBeaconBlock, numBlocks) // Save blocks from slot 1 onwards. - for i := 0; i < len(totalBlocks); i++ { + for i := range totalBlocks { b, err := tt.newBlock(primitives.Slot(i+1), bytesutil.PadTo([]byte("parent"), 32)) require.NoError(t, err) totalBlocks[i] = b @@ -927,7 +927,7 @@ func TestStore_Blocks_Retrieve_SlotRange(t *testing.T) { t.Run(tt.name, func(t *testing.T) { db := setupDB(t) totalBlocks := make([]interfaces.ReadOnlySignedBeaconBlock, 500) - for i := 0; i < 500; i++ { + for i := range 500 { b, err := tt.newBlock(primitives.Slot(i), bytesutil.PadTo([]byte("parent"), 32)) require.NoError(t, err) totalBlocks[i] = b @@ -947,7 +947,7 @@ func TestStore_Blocks_Retrieve_Epoch(t *testing.T) { db := setupDB(t) slots := params.BeaconConfig().SlotsPerEpoch.Mul(7) totalBlocks := make([]interfaces.ReadOnlySignedBeaconBlock, slots) - for i := primitives.Slot(0); i < slots; i++ { + for i := range slots { b, err := tt.newBlock(i, bytesutil.PadTo([]byte("parent"), 32)) require.NoError(t, err) totalBlocks[i] = b @@ -971,7 +971,7 @@ func TestStore_Blocks_Retrieve_SlotRangeWithStep(t *testing.T) { t.Run(tt.name, func(t *testing.T) { db := setupDB(t) totalBlocks := make([]interfaces.ReadOnlySignedBeaconBlock, 500) - for i := 0; i < 500; i++ { + for i := range 500 { b, err := tt.newBlock(primitives.Slot(i), bytesutil.PadTo([]byte("parent"), 32)) require.NoError(t, err) totalBlocks[i] = b @@ -1140,7 +1140,7 @@ func TestStore_SaveBlocks_HasCachedBlocks(t *testing.T) { ctx := t.Context() b := make([]interfaces.ReadOnlySignedBeaconBlock, 500) - for i := 0; i < 500; i++ { + for i := range 500 { blk, err := tt.newBlock(primitives.Slot(i), bytesutil.PadTo([]byte("parent"), 32)) require.NoError(t, err) b[i] = blk @@ -1164,7 +1164,7 @@ func TestStore_SaveBlocks_HasRootsMatched(t *testing.T) { ctx := t.Context() b := make([]interfaces.ReadOnlySignedBeaconBlock, 500) - for i := 0; i < 500; i++ { + for i := range 500 { blk, err := tt.newBlock(primitives.Slot(i), bytesutil.PadTo([]byte("parent"), 32)) require.NoError(t, err) b[i] = blk diff --git a/beacon-chain/db/kv/encoding.go b/beacon-chain/db/kv/encoding.go index d1a5c9d5b9..d894913ba0 100644 --- a/beacon-chain/db/kv/encoding.go +++ b/beacon-chain/db/kv/encoding.go @@ -58,7 +58,7 @@ func encode(ctx context.Context, msg proto.Message) ([]byte, error) { } // isSSZStorageFormat returns true if the object type should be saved in SSZ encoded format. -func isSSZStorageFormat(obj interface{}) bool { +func isSSZStorageFormat(obj any) bool { switch obj.(type) { case *ethpb.BeaconState: return true diff --git a/beacon-chain/db/kv/finalized_block_roots_test.go b/beacon-chain/db/kv/finalized_block_roots_test.go index 91e2c057a6..01d3dff1cd 100644 --- a/beacon-chain/db/kv/finalized_block_roots_test.go +++ b/beacon-chain/db/kv/finalized_block_roots_test.go @@ -161,7 +161,7 @@ func TestStore_IsFinalizedChildBlock(t *testing.T) { require.NoError(t, db.SaveFinalizedCheckpoint(ctx, cp)) // All blocks up to slotsPerEpoch should have a finalized child block. - for i := uint64(0); i < slotsPerEpoch; i++ { + for i := range slotsPerEpoch { root, err := blks[i].Block().HashTreeRoot() require.NoError(t, err) assert.Equal(t, true, db.IsFinalizedBlock(ctx, root), "Block at index %d was not considered finalized in the index", i) diff --git a/beacon-chain/db/kv/lightclient_test.go b/beacon-chain/db/kv/lightclient_test.go index cec59f2cbf..bb49e67b57 100644 --- a/beacon-chain/db/kv/lightclient_test.go +++ b/beacon-chain/db/kv/lightclient_test.go @@ -29,14 +29,14 @@ func createUpdate(t *testing.T, v int) (interfaces.LightClientUpdate, error) { var err error sampleRoot := make([]byte, 32) - for i := 0; i < 32; i++ { + for i := range 32 { sampleRoot[i] = byte(i) } sampleExecutionBranch := make([][]byte, fieldparams.ExecutionBranchDepth) - for i := 0; i < 4; i++ { + for i := range 4 { sampleExecutionBranch[i] = make([]byte, 32) - for j := 0; j < 32; j++ { + for j := range 32 { sampleExecutionBranch[i][j] = byte(i + j) } } @@ -460,7 +460,7 @@ func createDefaultLightClientUpdate(currentSlot primitives.Slot, attestedState s syncCommitteeSize := params.BeaconConfig().SyncCommitteeSize pubKeys := make([][]byte, syncCommitteeSize) - for i := uint64(0); i < syncCommitteeSize; i++ { + for i := range syncCommitteeSize { pubKeys[i] = make([]byte, fieldparams.BLSPubkeyLength) } nextSyncCommittee := &pb.SyncCommittee{ @@ -479,7 +479,7 @@ func createDefaultLightClientUpdate(currentSlot primitives.Slot, attestedState s } executionBranch := make([][]byte, fieldparams.ExecutionBranchDepth) - for i := 0; i < fieldparams.ExecutionBranchDepth; i++ { + for i := range fieldparams.ExecutionBranchDepth { executionBranch[i] = make([]byte, 32) } @@ -731,7 +731,7 @@ func createDefaultLightClientBootstrap(currentSlot primitives.Slot) (interfaces. currentEpoch := slots.ToEpoch(currentSlot) syncCommitteeSize := params.BeaconConfig().SyncCommitteeSize pubKeys := make([][]byte, syncCommitteeSize) - for i := uint64(0); i < syncCommitteeSize; i++ { + for i := range syncCommitteeSize { pubKeys[i] = make([]byte, fieldparams.BLSPubkeyLength) } currentSyncCommittee := &pb.SyncCommittee{ @@ -750,7 +750,7 @@ func createDefaultLightClientBootstrap(currentSlot primitives.Slot) (interfaces. } executionBranch := make([][]byte, fieldparams.ExecutionBranchDepth) - for i := 0; i < fieldparams.ExecutionBranchDepth; i++ { + for i := range fieldparams.ExecutionBranchDepth { executionBranch[i] = make([]byte, 32) } diff --git a/beacon-chain/db/kv/state.go b/beacon-chain/db/kv/state.go index 485c5bcadd..a84890fcf6 100644 --- a/beacon-chain/db/kv/state.go +++ b/beacon-chain/db/kv/state.go @@ -922,7 +922,7 @@ func createStateIndicesFromStateSlot(ctx context.Context, slot primitives.Slot) indices := [][]byte{ bytesutil.SlotToBytesBigEndian(slot), } - for i := 0; i < len(buckets); i++ { + for i := range buckets { indicesByBucket[string(buckets[i])] = indices[i] } return indicesByBucket diff --git a/beacon-chain/db/kv/state_test.go b/beacon-chain/db/kv/state_test.go index 38af8b225e..ca4ec52d27 100644 --- a/beacon-chain/db/kv/state_test.go +++ b/beacon-chain/db/kv/state_test.go @@ -318,7 +318,7 @@ func TestState_CanSaveRetrieveValidatorEntriesFromCache(t *testing.T) { assert.Equal(t, true, db.HasState(t.Context(), r)) // check if the state is in cache - for i := 0; i < len(stateValidators); i++ { + for i := range stateValidators { hash, hashErr := stateValidators[i].HashTreeRoot() assert.NoError(t, hashErr) @@ -505,7 +505,7 @@ func TestStore_StatesBatchDelete(t *testing.T) { totalBlocks := make([]interfaces.ReadOnlySignedBeaconBlock, numBlocks) blockRoots := make([][32]byte, 0) evenBlockRoots := make([][32]byte, 0) - for i := 0; i < len(totalBlocks); i++ { + for i := range totalBlocks { b := util.NewBeaconBlock() b.Block.Slot = primitives.Slot(i) var err error @@ -874,7 +874,7 @@ func TestAltairState_CanDelete(t *testing.T) { func validators(limit int) []*ethpb.Validator { var vals []*ethpb.Validator - for i := 0; i < limit; i++ { + for i := range limit { pubKey := make([]byte, params.BeaconConfig().BLSPubkeyLength) binary.LittleEndian.PutUint64(pubKey, mathRand.Uint64()) val := ðpb.Validator{ @@ -893,13 +893,12 @@ func validators(limit int) []*ethpb.Validator { } func checkStateSaveTime(b *testing.B, saveCount int) { - b.StopTimer() db := setupDB(b) initialSetOfValidators := validators(100000) // construct some states and save to randomize benchmark. - for i := 0; i < saveCount; i++ { + for range saveCount { key := make([]byte, 32) _, err := rand.Read(key) require.NoError(b, err) @@ -925,14 +924,13 @@ func checkStateSaveTime(b *testing.B, saveCount int) { require.NoError(b, st.SetValidators(initialSetOfValidators)) b.ReportAllocs() - b.StartTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { require.NoError(b, db.SaveState(b.Context(), st, r)) } } func checkStateReadTime(b *testing.B, saveCount int) { - b.StopTimer() db := setupDB(b) initialSetOfValidators := validators(100000) @@ -945,7 +943,7 @@ func checkStateReadTime(b *testing.B, saveCount int) { require.NoError(b, db.SaveState(b.Context(), st, r)) // construct some states and save to randomize benchmark. - for i := 0; i < saveCount; i++ { + for range saveCount { key := make([]byte, 32) _, err := rand.Read(key) require.NoError(b, err) @@ -965,8 +963,8 @@ func checkStateReadTime(b *testing.B, saveCount int) { } b.ReportAllocs() - b.StartTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err := db.State(b.Context(), r) require.NoError(b, err) } diff --git a/beacon-chain/db/kv/utils_test.go b/beacon-chain/db/kv/utils_test.go index cafa836337..2f1542baea 100644 --- a/beacon-chain/db/kv/utils_test.go +++ b/beacon-chain/db/kv/utils_test.go @@ -151,7 +151,7 @@ func TestSplitRoots(t *testing.T) { bt := make([][32]byte, 0) for _, x := range []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9} { var b [32]byte - for i := 0; i < 32; i++ { + for i := range 32 { b[i] = x } bt = append(bt, b) diff --git a/beacon-chain/db/kv/validated_checkpoint_test.go b/beacon-chain/db/kv/validated_checkpoint_test.go index 895ea6bbdf..387da4f8e1 100644 --- a/beacon-chain/db/kv/validated_checkpoint_test.go +++ b/beacon-chain/db/kv/validated_checkpoint_test.go @@ -64,8 +64,7 @@ func BenchmarkStore_SaveLastValidatedCheckpoint(b *testing.B) { require.NoError(b, db.SaveState(ctx, st, root)) db.stateSummaryCache.clear() - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { require.NoError(b, db.SaveLastValidatedCheckpoint(ctx, cp)) } } diff --git a/beacon-chain/db/pruner/pruner.go b/beacon-chain/db/pruner/pruner.go index 7573435214..a078b1aab1 100644 --- a/beacon-chain/db/pruner/pruner.go +++ b/beacon-chain/db/pruner/pruner.go @@ -221,7 +221,7 @@ func (p *Service) pruneBatches(pruneUpto primitives.Slot) (int, error) { case <-ctx.Done(): return numBatches, nil default: - for i := 0; i < defaultNumBatchesToPrune; i++ { + for range defaultNumBatchesToPrune { slotsDeleted, err := p.db.DeleteHistoricalDataBeforeSlot(ctx, pruneUpto, defaultPrunableBatchSize) if err != nil { return 0, errors.Wrapf(err, "could not delete upto slot %d", pruneUpto) diff --git a/beacon-chain/db/slasherkv/pruning_test.go b/beacon-chain/db/slasherkv/pruning_test.go index 3e4912fd9a..cc4eee2e1d 100644 --- a/beacon-chain/db/slasherkv/pruning_test.go +++ b/beacon-chain/db/slasherkv/pruning_test.go @@ -66,7 +66,7 @@ func TestStore_PruneProposalsAtEpoch(t *testing.T) { expectedNumPruned := 2 * uint(pruningLimitEpoch+1) * uint(slotsPerEpoch) proposals := make([]*slashertypes.SignedBlockHeaderWrapper, 0, uint64(currentEpoch)*uint64(slotsPerEpoch)*2) - for i := primitives.Epoch(0); i < currentEpoch; i++ { + for i := range currentEpoch { startSlot, err := slots.EpochStart(i) require.NoError(t, err) endSlot, err := slots.EpochStart(i + 1) @@ -86,7 +86,7 @@ func TestStore_PruneProposalsAtEpoch(t *testing.T) { require.Equal(t, expectedNumPruned, actualNumPruned) // Everything before epoch 10 should be deleted. - for i := primitives.Epoch(0); i < pruningLimitEpoch; i++ { + for i := range pruningLimitEpoch { err = beaconDB.db.View(func(tx *bolt.Tx) error { bkt := tx.Bucket(proposalRecordsBucket) startSlot, err := slots.EpochStart(i) @@ -164,7 +164,7 @@ func TestStore_PruneAttestations_OK(t *testing.T) { expectedNumPruned := 2 * uint(pruningLimitEpoch+1) * uint(slotsPerEpoch) attestations := make([]*slashertypes.IndexedAttestationWrapper, 0, uint64(currentEpoch)*uint64(slotsPerEpoch)*2) - for i := primitives.Epoch(0); i < currentEpoch; i++ { + for i := range currentEpoch { startSlot, err := slots.EpochStart(i) require.NoError(t, err) endSlot, err := slots.EpochStart(i + 1) @@ -191,7 +191,7 @@ func TestStore_PruneAttestations_OK(t *testing.T) { require.Equal(t, expectedNumPruned, actualNumPruned) // Everything before epoch 10 should be deleted. - for i := primitives.Epoch(0); i < pruningLimitEpoch; i++ { + for i := range pruningLimitEpoch { err = beaconDB.db.View(func(tx *bolt.Tx) error { bkt := tx.Bucket(attestationDataRootsBucket) startSlot, err := slots.EpochStart(i) diff --git a/beacon-chain/db/slasherkv/slasher.go b/beacon-chain/db/slasherkv/slasher.go index 355861e746..3246c501bd 100644 --- a/beacon-chain/db/slasherkv/slasher.go +++ b/beacon-chain/db/slasherkv/slasher.go @@ -429,7 +429,7 @@ func (s *Store) SaveSlasherChunks( encodedKeys := make([][]byte, chunksCount) encodedChunks := make([][]byte, chunksCount) - for i := 0; i < chunksCount; i++ { + for i := range chunksCount { chunkKey, chunk := chunkKeys[i], chunks[i] encodedKey := append(encodedKind, chunkKey...) @@ -452,7 +452,7 @@ func (s *Store) SaveSlasherChunks( if err := s.db.Update(func(tx *bolt.Tx) error { bkt := tx.Bucket(slasherChunksBucket) - for i := 0; i < batchSize; i++ { + for i := range batchSize { if err := bkt.Put(encodedKeysBatch[i], encodedChunksBatch[i]); err != nil { return err } @@ -617,7 +617,7 @@ func (s *Store) HighestAttestations( err = s.db.View(func(tx *bolt.Tx) error { signingRootsBkt := tx.Bucket(attestationDataRootsBucket) attRecordsBkt := tx.Bucket(attestationRecordsBucket) - for i := 0; i < len(encodedIndices); i++ { + for i := range encodedIndices { c := signingRootsBkt.Cursor() for k, v := c.Last(); k != nil; k, v = c.Prev() { if suffixForAttestationRecordsKey(k, encodedIndices[i]) { @@ -659,7 +659,7 @@ func keyForValidatorProposal(slot primitives.Slot, proposerIndex primitives.Vali func encodeSlasherChunk(chunk []uint16) ([]byte, error) { val := make([]byte, 0) - for i := 0; i < len(chunk); i++ { + for i := range chunk { val = append(val, ssz.MarshalUint16(make([]byte, 0), chunk[i])...) } if len(val) == 0 { diff --git a/beacon-chain/db/slasherkv/slasher_test.go b/beacon-chain/db/slasherkv/slasher_test.go index a76128e5ee..ced8939beb 100644 --- a/beacon-chain/db/slasherkv/slasher_test.go +++ b/beacon-chain/db/slasherkv/slasher_test.go @@ -26,7 +26,7 @@ func TestStore_AttestationRecordForValidator_SaveRetrieve(t *testing.T) { // Defines attestations to save and retrieve. attWrappers := make([]*slashertypes.IndexedAttestationWrapper, attestationsCount) - for i := 0; i < attestationsCount; i++ { + for i := range attestationsCount { var dataRoot [32]byte binary.LittleEndian.PutUint64(dataRoot[:], uint64(i)) @@ -41,7 +41,7 @@ func TestStore_AttestationRecordForValidator_SaveRetrieve(t *testing.T) { attWrappers[i] = attWrapper } attWrappersElectra := make([]*slashertypes.IndexedAttestationWrapper, attestationsCount) - for i := 0; i < attestationsCount; i++ { + for i := range attestationsCount { var dataRoot [32]byte binary.LittleEndian.PutUint64(dataRoot[:], uint64(i)) @@ -107,13 +107,13 @@ func TestStore_LastEpochWrittenForValidators(t *testing.T) { indices := make([]primitives.ValidatorIndex, validatorsCount) epochs := make([]primitives.Epoch, validatorsCount) - for i := 0; i < validatorsCount; i++ { + for i := range validatorsCount { indices[i] = primitives.ValidatorIndex(i) epochs[i] = primitives.Epoch(i) } epochsByValidator := make(map[primitives.ValidatorIndex]primitives.Epoch, validatorsCount) - for i := 0; i < validatorsCount; i++ { + for i := range validatorsCount { epochsByValidator[indices[i]] = epochs[i] } @@ -215,7 +215,7 @@ func TestStore_SlasherChunk_SaveRetrieve(t *testing.T) { minChunkKeys := make([][]byte, totalChunks) minChunks := make([][]uint16, totalChunks) - for i := 0; i < totalChunks; i++ { + for i := range totalChunks { // Create chunk key. chunkKey := ssz.MarshalUint64(make([]byte, 0), uint64(i)) minChunkKeys[i] = chunkKey @@ -223,7 +223,7 @@ func TestStore_SlasherChunk_SaveRetrieve(t *testing.T) { // Create chunk. chunk := make([]uint16, elemsPerChunk) - for j := 0; j < len(chunk); j++ { + for j := range chunk { chunk[j] = uint16(i + j) } @@ -234,7 +234,7 @@ func TestStore_SlasherChunk_SaveRetrieve(t *testing.T) { maxChunkKeys := make([][]byte, totalChunks) maxChunks := make([][]uint16, totalChunks) - for i := 0; i < totalChunks; i++ { + for i := range totalChunks { // Create chunk key. chunkKey := ssz.MarshalUint64(make([]byte, 0), uint64(i+1)) maxChunkKeys[i] = chunkKey @@ -242,7 +242,7 @@ func TestStore_SlasherChunk_SaveRetrieve(t *testing.T) { // Create chunk. chunk := make([]uint16, elemsPerChunk) - for j := 0; j < len(chunk); j++ { + for j := range chunk { chunk[j] = uint16(i + j + 1) } @@ -315,7 +315,7 @@ func TestStore_SlasherChunk_PreventsSavingWrongLength(t *testing.T) { totalChunks := 64 chunkKeys := make([][]byte, totalChunks) chunks := make([][]uint16, totalChunks) - for i := 0; i < totalChunks; i++ { + for i := range totalChunks { chunks[i] = []uint16{} chunkKeys[i] = ssz.MarshalUint64(make([]byte, 0), uint64(i)) } @@ -566,11 +566,11 @@ func TestStore_HighestAttestations(t *testing.T) { } func BenchmarkHighestAttestations(b *testing.B) { - b.StopTimer() + count := 10000 valsPerAtt := 100 indicesPerAtt := make([][]uint64, count) - for i := 0; i < count; i++ { + for i := range count { indicesForAtt := make([]uint64, valsPerAtt) for r := i * count; r < valsPerAtt*(i+1); r++ { indicesForAtt[i] = uint64(r) @@ -578,7 +578,7 @@ func BenchmarkHighestAttestations(b *testing.B) { indicesPerAtt[i] = indicesForAtt } atts := make([]*slashertypes.IndexedAttestationWrapper, count) - for i := 0; i < count; i++ { + for i := range count { atts[i] = createAttestationWrapper(version.Phase0, primitives.Epoch(i), primitives.Epoch(i+2), indicesPerAtt[i], []byte{}) } @@ -587,27 +587,27 @@ func BenchmarkHighestAttestations(b *testing.B) { require.NoError(b, beaconDB.SaveAttestationRecordsForValidators(ctx, atts)) allIndices := make([]primitives.ValidatorIndex, 0, valsPerAtt*count) - for i := 0; i < count; i++ { + for i := range count { indicesForAtt := make([]primitives.ValidatorIndex, valsPerAtt) - for r := 0; r < valsPerAtt; r++ { + for r := range valsPerAtt { indicesForAtt[r] = primitives.ValidatorIndex(atts[i].IndexedAttestation.GetAttestingIndices()[r]) } allIndices = append(allIndices, indicesForAtt...) } b.ReportAllocs() - b.StartTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err := beaconDB.HighestAttestations(ctx, allIndices) require.NoError(b, err) } } func BenchmarkStore_CheckDoubleBlockProposals(b *testing.B) { - b.StopTimer() + count := 10000 valsPerAtt := 100 indicesPerAtt := make([][]uint64, count) - for i := 0; i < count; i++ { + for i := range count { indicesForAtt := make([]uint64, valsPerAtt) for r := i * count; r < valsPerAtt*(i+1); r++ { indicesForAtt[i] = uint64(r) @@ -615,7 +615,7 @@ func BenchmarkStore_CheckDoubleBlockProposals(b *testing.B) { indicesPerAtt[i] = indicesForAtt } atts := make([]*slashertypes.IndexedAttestationWrapper, count) - for i := 0; i < count; i++ { + for i := range count { atts[i] = createAttestationWrapper(version.Phase0, primitives.Epoch(i), primitives.Epoch(i+2), indicesPerAtt[i], []byte{}) } @@ -627,8 +627,8 @@ func BenchmarkStore_CheckDoubleBlockProposals(b *testing.B) { rand.Shuffle(count, func(i, j int) { atts[i], atts[j] = atts[j], atts[i] }) b.ReportAllocs() - b.StartTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err := beaconDB.CheckAttesterDoubleVotes(ctx, atts) require.NoError(b, err) } diff --git a/beacon-chain/execution/block_cache.go b/beacon-chain/execution/block_cache.go index 3762e9acce..7ae86c7d83 100644 --- a/beacon-chain/execution/block_cache.go +++ b/beacon-chain/execution/block_cache.go @@ -39,7 +39,7 @@ var ( ) // hashKeyFn takes the hex string representation as the key for a headerInfo. -func hashKeyFn(obj interface{}) (string, error) { +func hashKeyFn(obj any) (string, error) { hInfo, ok := obj.(*types.HeaderInfo) if !ok { return "", ErrNotAHeaderInfo @@ -50,7 +50,7 @@ func hashKeyFn(obj interface{}) (string, error) { // heightKeyFn takes the string representation of the block header number as the key // for a headerInfo. -func heightKeyFn(obj interface{}) (string, error) { +func heightKeyFn(obj any) (string, error) { hInfo, ok := obj.(*types.HeaderInfo) if !ok { return "", ErrNotAHeaderInfo @@ -164,6 +164,6 @@ func trim(queue *cache.FIFO, maxSize uint64) { } // popProcessNoopFunc is a no-op function that never returns an error. -func popProcessNoopFunc(_ interface{}, _ bool) error { +func popProcessNoopFunc(_ any, _ bool) error { return nil } diff --git a/beacon-chain/execution/block_reader.go b/beacon-chain/execution/block_reader.go index f3a558dd46..9f09f98189 100644 --- a/beacon-chain/execution/block_reader.go +++ b/beacon-chain/execution/block_reader.go @@ -118,7 +118,7 @@ func (s *Service) BlockByTimestamp(ctx context.Context, time uint64) (*types.Hea maxTimeBuffer := searchThreshold * params.BeaconConfig().SecondsPerETH1Block // Terminate if we can't find an acceptable block after // repeated searches. - for i := 0; i < repeatedSearches; i++ { + for range repeatedSearches { if ctx.Err() != nil { return nil, ctx.Err() } diff --git a/beacon-chain/execution/block_reader_test.go b/beacon-chain/execution/block_reader_test.go index 11cbca4fc8..7d7e8bf3a7 100644 --- a/beacon-chain/execution/block_reader_test.go +++ b/beacon-chain/execution/block_reader_test.go @@ -226,7 +226,7 @@ func TestService_BlockNumberByTimestamp(t *testing.T) { params.OverrideBeaconConfig(conf) initialHead, err := testAcc.Backend.Client().HeaderByNumber(ctx, nil) require.NoError(t, err) - for i := 0; i < 200; i++ { + for range 200 { testAcc.Backend.Commit() } @@ -258,7 +258,7 @@ func TestService_BlockNumberByTimestampLessTargetTime(t *testing.T) { web3Service = setDefaultMocks(web3Service) web3Service.rpcClient = &mockExecution.RPCClient{Backend: testAcc.Backend} - for i := 0; i < 200; i++ { + for range 200 { testAcc.Backend.Commit() } ctx := t.Context() @@ -296,7 +296,7 @@ func TestService_BlockNumberByTimestampMoreTargetTime(t *testing.T) { web3Service = setDefaultMocks(web3Service) web3Service.rpcClient = &mockExecution.RPCClient{Backend: testAcc.Backend} - for i := 0; i < 200; i++ { + for range 200 { testAcc.Backend.Commit() } ctx := t.Context() diff --git a/beacon-chain/execution/engine_client.go b/beacon-chain/execution/engine_client.go index 84bce36665..43702a9596 100644 --- a/beacon-chain/execution/engine_client.go +++ b/beacon-chain/execution/engine_client.go @@ -475,7 +475,7 @@ func (s *Service) ExecutionBlocksByHashes(ctx context.Context, hashes []common.H newH := h elems = append(elems, gethRPC.BatchElem{ Method: BlockByHashMethod, - Args: []interface{}{newH, withTxs}, + Args: []any{newH, withTxs}, Result: blk, Error: error(nil), }) diff --git a/beacon-chain/execution/engine_client_fuzz_test.go b/beacon-chain/execution/engine_client_fuzz_test.go index 21e8b6bdb3..64187607ef 100644 --- a/beacon-chain/execution/engine_client_fuzz_test.go +++ b/beacon-chain/execution/engine_client_fuzz_test.go @@ -1,6 +1,3 @@ -//go:build go1.18 -// +build go1.18 - package execution_test import ( @@ -164,7 +161,7 @@ func FuzzExecutionBlock(f *testing.F) { f.Add(output) f.Fuzz(func(t *testing.T, jsonBlob []byte) { - gethResp := make(map[string]interface{}) + gethResp := make(map[string]any) prysmResp := &pb.ExecutionBlock{} gethErr := json.Unmarshal(jsonBlob, &gethResp) prysmErr := json.Unmarshal(jsonBlob, prysmResp) @@ -187,10 +184,10 @@ func FuzzExecutionBlock(f *testing.F) { gethBlob, gethErr := json.Marshal(gethResp) prysmBlob, prysmErr := json.Marshal(prysmResp) assert.Equal(t, gethErr != nil, prysmErr != nil, "geth and prysm unmarshaller return inconsistent errors") - newGethResp := make(map[string]interface{}) + newGethResp := make(map[string]any) newGethErr := json.Unmarshal(prysmBlob, &newGethResp) assert.NoError(t, newGethErr) - newGethResp2 := make(map[string]interface{}) + newGethResp2 := make(map[string]any) newGethErr = json.Unmarshal(gethBlob, &newGethResp2) assert.NoError(t, newGethErr) @@ -199,13 +196,13 @@ func FuzzExecutionBlock(f *testing.F) { }) } -func isBogusTransactionHash(blk *pb.ExecutionBlock, jsonMap map[string]interface{}) bool { +func isBogusTransactionHash(blk *pb.ExecutionBlock, jsonMap map[string]any) bool { if blk.Transactions == nil { return false } for i, tx := range blk.Transactions { - jsonTx, ok := jsonMap["transactions"].([]interface{})[i].(map[string]interface{}) + jsonTx, ok := jsonMap["transactions"].([]any)[i].(map[string]any) if !ok { return true } @@ -244,13 +241,13 @@ func compareHeaders(t *testing.T, jsonBlob []byte) { assert.DeepEqual(t, newGethResp, newGethResp2) } -func validateBlockConsistency(execBlock *pb.ExecutionBlock, jsonMap map[string]interface{}) error { +func validateBlockConsistency(execBlock *pb.ExecutionBlock, jsonMap map[string]any) error { blockVal := reflect.ValueOf(execBlock).Elem() - bType := reflect.TypeOf(execBlock).Elem() + bType := reflect.TypeFor[pb.ExecutionBlock]() fieldnum := bType.NumField() - for i := 0; i < fieldnum; i++ { + for i := range fieldnum { field := bType.Field(i) fName := field.Tag.Get("json") if field.Name == "Header" { @@ -278,12 +275,12 @@ func validateBlockConsistency(execBlock *pb.ExecutionBlock, jsonMap map[string]i return nil } -func jsonFieldsAreValid(execBlock *pb.ExecutionBlock, jsonMap map[string]interface{}) (bool, error) { - bType := reflect.TypeOf(execBlock).Elem() +func jsonFieldsAreValid(execBlock *pb.ExecutionBlock, jsonMap map[string]any) (bool, error) { + bType := reflect.TypeFor[pb.ExecutionBlock]() fieldnum := bType.NumField() - for i := 0; i < fieldnum; i++ { + for i := range fieldnum { field := bType.Field(i) fName := field.Tag.Get("json") if field.Name == "Header" { diff --git a/beacon-chain/execution/engine_client_test.go b/beacon-chain/execution/engine_client_test.go index b7368e81c4..727d345469 100644 --- a/beacon-chain/execution/engine_client_test.go +++ b/beacon-chain/execution/engine_client_test.go @@ -56,7 +56,7 @@ func (RPCClientBad) BatchCall([]rpc.BatchElem) error { return errors.New("rpc client is not initialized") } -func (RPCClientBad) CallContext(context.Context, interface{}, string, ...interface{}) error { +func (RPCClientBad) CallContext(context.Context, any, string, ...any) error { return ethereum.NotFound } @@ -192,7 +192,7 @@ func TestClient_HTTP(t *testing.T) { require.Equal(t, true, strings.Contains( jsonRequestString, string(reqArg), )) - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": want, @@ -238,7 +238,7 @@ func TestClient_HTTP(t *testing.T) { require.Equal(t, true, strings.Contains( jsonRequestString, string(reqArg), )) - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": want, @@ -291,7 +291,7 @@ func TestClient_HTTP(t *testing.T) { require.Equal(t, true, strings.Contains( jsonRequestString, string(reqArg), )) - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": want, @@ -346,7 +346,7 @@ func TestClient_HTTP(t *testing.T) { require.Equal(t, true, strings.Contains( jsonRequestString, string(reqArg), )) - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": want, @@ -428,7 +428,7 @@ func TestClient_HTTP(t *testing.T) { require.Equal(t, true, strings.Contains( jsonRequestString, string(reqArg), )) - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": want, @@ -939,7 +939,7 @@ func TestClient_HTTP(t *testing.T) { defer func() { require.NoError(t, r.Body.Close()) }() - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": want, @@ -977,7 +977,7 @@ func TestClient_HTTP(t *testing.T) { require.Equal(t, true, strings.Contains( jsonRequestString, fmt.Sprintf("%#x", arg), )) - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": want, @@ -1039,7 +1039,7 @@ func TestReconstructFullBellatrixBlock(t *testing.T) { payload, ok := fix["ExecutionPayload"].(*pb.ExecutionPayload) require.Equal(t, true, ok) - jsonPayload := make(map[string]interface{}) + jsonPayload := make(map[string]any) tx := gethtypes.NewTransaction( 0, common.HexToAddress("095e7baea6a6c7c4c2dfeb977efac326af552d87"), @@ -1064,10 +1064,10 @@ func TestReconstructFullBellatrixBlock(t *testing.T) { defer func() { require.NoError(t, r.Body.Close()) }() - respJSON := map[string]interface{}{ + respJSON := map[string]any{ "jsonrpc": "2.0", "id": 1, - "result": []map[string]interface{}{jsonPayload}, + "result": []map[string]any{jsonPayload}, } require.NoError(t, json.NewEncoder(w).Encode(respJSON)) })) @@ -1131,7 +1131,7 @@ func TestReconstructFullBellatrixBlockBatch(t *testing.T) { payload, ok := fix["ExecutionPayload"].(*pb.ExecutionPayload) require.Equal(t, true, ok) - jsonPayload := make(map[string]interface{}) + jsonPayload := make(map[string]any) tx := gethtypes.NewTransaction( 0, common.HexToAddress("095e7baea6a6c7c4c2dfeb977efac326af552d87"), @@ -1168,10 +1168,10 @@ func TestReconstructFullBellatrixBlockBatch(t *testing.T) { require.NoError(t, r.Body.Close()) }() - respJSON := map[string]interface{}{ + respJSON := map[string]any{ "jsonrpc": "2.0", "id": 1, - "result": []map[string]interface{}{jsonPayload}, + "result": []map[string]any{jsonPayload}, } require.NoError(t, json.NewEncoder(w).Encode(respJSON)) @@ -1206,7 +1206,7 @@ func TestReconstructFullBellatrixBlockBatch(t *testing.T) { payload, ok := fix["ExecutionPayload"].(*pb.ExecutionPayload) require.Equal(t, true, ok) - jsonPayload := make(map[string]interface{}) + jsonPayload := make(map[string]any) tx := gethtypes.NewTransaction( 0, common.HexToAddress("095e7baea6a6c7c4c2dfeb977efac326af552d87"), @@ -1241,10 +1241,10 @@ func TestReconstructFullBellatrixBlockBatch(t *testing.T) { require.NoError(t, r.Body.Close()) }() - respJSON := map[string]interface{}{ + respJSON := map[string]any{ "jsonrpc": "2.0", "id": 1, - "result": []map[string]interface{}{}, + "result": []map[string]any{}, } require.NoError(t, json.NewEncoder(w).Encode(respJSON)) @@ -1473,7 +1473,7 @@ func (c *customError) Timeout() bool { type dataError struct { code int - data interface{} + data any } func (c *dataError) ErrorCode() int { @@ -1484,7 +1484,7 @@ func (*dataError) Error() string { return "something went wrong" } -func (c *dataError) ErrorData() interface{} { +func (c *dataError) ErrorData() any { return c.data } @@ -1576,9 +1576,9 @@ func newTestIPCServer(t *testing.T) *rpc.Server { return server } -func fixtures() map[string]interface{} { +func fixtures() map[string]any { s := fixturesStruct() - return map[string]interface{}{ + return map[string]any{ "ExecutionBlock": s.ExecutionBlock, "ExecutionPayloadBody": s.ExecutionPayloadBody, "ExecutionPayload": s.ExecutionPayload, @@ -2173,7 +2173,7 @@ func forkchoiceUpdateSetup(t *testing.T, fcs *pb.ForkchoiceState, att *pb.Payloa require.Equal(t, true, strings.Contains( jsonRequestString, string(payloadAttrsReq), )) - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": res, @@ -2212,7 +2212,7 @@ func forkchoiceUpdateSetupV2(t *testing.T, fcs *pb.ForkchoiceState, att *pb.Payl require.Equal(t, true, strings.Contains( jsonRequestString, string(payloadAttrsReq), )) - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": res, @@ -2246,7 +2246,7 @@ func newPayloadSetup(t *testing.T, status *pb.PayloadStatus, payload *pb.Executi require.Equal(t, true, strings.Contains( jsonRequestString, string(reqArg), )) - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": status, @@ -2280,7 +2280,7 @@ func newPayloadV2Setup(t *testing.T, status *pb.PayloadStatus, payload *pb.Execu require.Equal(t, true, strings.Contains( jsonRequestString, string(reqArg), )) - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": status, @@ -2314,7 +2314,7 @@ func newPayloadV3Setup(t *testing.T, status *pb.PayloadStatus, payload *pb.Execu require.Equal(t, true, strings.Contains( jsonRequestString, string(reqArg), )) - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": status, @@ -2362,7 +2362,7 @@ func newPayloadV4Setup(t *testing.T, status *pb.PayloadStatus, payload *pb.Execu jsonRequestString, string(jsonRequests), )) - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": status, @@ -2418,7 +2418,7 @@ func Test_ExchangeCapabilities(t *testing.T) { defer func() { require.NoError(t, r.Body.Close()) }() - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": []string{}, @@ -2451,7 +2451,7 @@ func Test_ExchangeCapabilities(t *testing.T) { require.NoError(t, r.Body.Close()) }() - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": []string{"A", "B", "C"}, @@ -2509,7 +2509,7 @@ func TestReconstructBlobSidecars(t *testing.T) { require.Equal(t, 0, len(verifiedBlobs)) }) - client.capabilityCache = &capabilityCache{capabilities: map[string]interface{}{GetBlobsV1: nil}} + client.capabilityCache = &capabilityCache{capabilities: map[string]any{GetBlobsV1: nil}} t.Run("recovered 6 missing blobs", func(t *testing.T) { srv := createBlobServer(t, 6) @@ -2652,10 +2652,10 @@ func createBlobServer(t *testing.T, numBlobs int, callbackFuncs ...func()) *http blobs := make([]pb.BlobAndProofJson, numBlobs) for i := range blobs { - blobs[i] = pb.BlobAndProofJson{Blob: []byte(fmt.Sprintf("blob%d", i+1)), KzgProof: []byte(fmt.Sprintf("proof%d", i+1))} + blobs[i] = pb.BlobAndProofJson{Blob: fmt.Appendf(nil, "blob%d", i+1), KzgProof: fmt.Appendf(nil, "proof%d", i+1)} } - respJSON := map[string]interface{}{ + respJSON := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": blobs, @@ -2689,7 +2689,7 @@ func createBlobServerV2(t *testing.T, numBlobs int, blobMasks []bool) *httptest. } } - respJSON := map[string]interface{}{ + respJSON := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": blobAndCellProofs, @@ -2705,7 +2705,7 @@ func setupRpcClient(t *testing.T, url string, client *Service) (*rpc.Client, *Se require.NoError(t, err) client.rpcClient = rpcClient - client.capabilityCache = &capabilityCache{capabilities: map[string]interface{}{GetBlobsV1: nil}} + client.capabilityCache = &capabilityCache{capabilities: map[string]any{GetBlobsV1: nil}} client.blobVerifier = testNewBlobVerifier() return rpcClient, client @@ -2713,7 +2713,7 @@ func setupRpcClient(t *testing.T, url string, client *Service) (*rpc.Client, *Se func setupRpcClientV2(t *testing.T, url string, client *Service) (*rpc.Client, *Service) { rpcClient, client := setupRpcClient(t, url, client) - client.capabilityCache = &capabilityCache{capabilities: map[string]interface{}{GetBlobsV2: nil}} + client.capabilityCache = &capabilityCache{capabilities: map[string]any{GetBlobsV2: nil}} return rpcClient, client } diff --git a/beacon-chain/execution/log_processing_test.go b/beacon-chain/execution/log_processing_test.go index a3673bc797..11bd290a8a 100644 --- a/beacon-chain/execution/log_processing_test.go +++ b/beacon-chain/execution/log_processing_test.go @@ -254,7 +254,7 @@ func TestProcessETH2GenesisLog_8DuplicatePubkeys(t *testing.T) { // 64 Validators are used as size required for beacon-chain to start. This number // is defined in the deposit contract as the number required for the testnet. The actual number // is 2**14 - for i := 0; i < depositsReqForChainStart; i++ { + for range depositsReqForChainStart { testAcc.TxOpts.Value = mock.Amount32Eth() _, err = testAcc.Contract.Deposit(testAcc.TxOpts, data.PublicKey, data.WithdrawalCredentials, data.Signature, depositRoots[0]) require.NoError(t, err, "Could not deposit to deposit contract") @@ -325,7 +325,7 @@ func TestProcessETH2GenesisLog(t *testing.T) { // 64 Validators are used as size required for beacon-chain to start. This number // is defined in the deposit contract as the number required for the testnet. The actual number // is 2**14 - for i := 0; i < depositsReqForChainStart; i++ { + for i := range depositsReqForChainStart { data := deposits[i].Data testAcc.TxOpts.Value = mock.Amount32Eth() testAcc.TxOpts.GasLimit = 1000000 @@ -429,7 +429,7 @@ func TestProcessETH2GenesisLog_CorrectNumOfDeposits(t *testing.T) { // 64 Validators are used as size required for beacon-chain to start. This number // is defined in the deposit contract as the number required for the testnet. The actual number // is 2**14 - for i := 0; i < totalNumOfDeposits; i++ { + for i := range totalNumOfDeposits { data := deposits[i].Data testAcc.TxOpts.Value = mock.Amount32Eth() testAcc.TxOpts.GasLimit = 1000000 @@ -530,7 +530,7 @@ func TestProcessLogs_DepositRequestsStarted(t *testing.T) { // 64 Validators are used as size required for beacon-chain to start. This number // is defined in the deposit contract as the number required for the testnet. The actual number // is 2**14 - for i := 0; i < totalNumOfDeposits; i++ { + for i := range totalNumOfDeposits { data := deposits[i].Data testAcc.TxOpts.Value = mock.Amount32Eth() testAcc.TxOpts.GasLimit = 1000000 @@ -616,7 +616,7 @@ func TestProcessETH2GenesisLog_LargePeriodOfNoLogs(t *testing.T) { // 64 Validators are used as size required for beacon-chain to start. This number // is defined in the deposit contract as the number required for the testnet. The actual number // is 2**14 - for i := 0; i < totalNumOfDeposits; i++ { + for i := range totalNumOfDeposits { data := deposits[i].Data testAcc.TxOpts.Value = mock.Amount32Eth() testAcc.TxOpts.GasLimit = 1000000 @@ -629,7 +629,7 @@ func TestProcessETH2GenesisLog_LargePeriodOfNoLogs(t *testing.T) { } } // Forward the chain to 'mine' blocks without logs - for i := uint64(0); i < 1500; i++ { + for range uint64(1500) { testAcc.Backend.Commit() } genesisBlock, err := testAcc.Backend.Client().BlockByNumber(t.Context(), nil) diff --git a/beacon-chain/execution/mock_test.go b/beacon-chain/execution/mock_test.go index 500f71c529..c39b7ed979 100644 --- a/beacon-chain/execution/mock_test.go +++ b/beacon-chain/execution/mock_test.go @@ -15,9 +15,9 @@ import ( var mockHandlerDefaultName = "__default__" type jsonError struct { - Code int `json:"code"` - Message string `json:"message"` - Data interface{} `json:"data,omitempty"` + Code int `json:"code"` + Message string `json:"message"` + Data any `json:"data,omitempty"` } type jsonrpcMessage struct { @@ -154,12 +154,12 @@ func TestParseRequest(t *testing.T) { }) result := make([]*pb.ExecutionPayloadBody, 0) - var args []interface{} + var args []any if len(c.byteArgs) > 0 { - args = []interface{}{c.byteArgs} + args = []any{c.byteArgs} } if len(c.hexArgs) > 0 { - args = make([]interface{}, len(c.hexArgs)) + args = make([]any, len(c.hexArgs)) for i := range c.hexArgs { args[i] = c.hexArgs[i] } diff --git a/beacon-chain/execution/payload_body.go b/beacon-chain/execution/payload_body.go index bda68063f5..fe10bbdd8d 100644 --- a/beacon-chain/execution/payload_body.go +++ b/beacon-chain/execution/payload_body.go @@ -142,7 +142,7 @@ func computeRanges(hbns []hashBlockNumber) []byRangeReq { ranges := make([]byRangeReq, 0) start := hbns[0].n count := uint64(0) - for i := 0; i < len(hbns); i++ { + for i := range hbns { if hbns[i].n == start+count { count++ continue diff --git a/beacon-chain/execution/service.go b/beacon-chain/execution/service.go index f621fd1d3c..f9d35fd7a5 100644 --- a/beacon-chain/execution/service.go +++ b/beacon-chain/execution/service.go @@ -103,7 +103,7 @@ type Chain interface { type RPCClient interface { Close() BatchCall(b []gethRPC.BatchElem) error - CallContext(ctx context.Context, result interface{}, method string, args ...interface{}) error + CallContext(ctx context.Context, result any, method string, args ...any) error } type RPCClientEmpty struct { @@ -114,7 +114,7 @@ func (RPCClientEmpty) BatchCall([]gethRPC.BatchElem) error { return errors.New("rpc client is not initialized") } -func (RPCClientEmpty) CallContext(context.Context, interface{}, string, ...interface{}) error { +func (RPCClientEmpty) CallContext(context.Context, any, string, ...any) error { return errors.New("rpc client is not initialized") } @@ -426,7 +426,7 @@ func (s *Service) batchRequestHeaders(startBlock, endBlock uint64) ([]*types.Hea header := &types.HeaderInfo{} elems = append(elems, gethRPC.BatchElem{ Method: "eth_getBlockByNumber", - Args: []interface{}{hexutil.EncodeBig(new(big.Int).SetUint64(i)), false}, + Args: []any{hexutil.EncodeBig(new(big.Int).SetUint64(i)), false}, Result: header, Error: error(nil), }) @@ -922,7 +922,7 @@ func newBlobVerifierFromInitializer(ini *verification.Initializer) verification. } type capabilityCache struct { - capabilities map[string]interface{} + capabilities map[string]any capabilitiesLock sync.RWMutex } @@ -931,7 +931,7 @@ func (c *capabilityCache) save(cs []string) { defer c.capabilitiesLock.Unlock() if c.capabilities == nil { - c.capabilities = make(map[string]interface{}) + c.capabilities = make(map[string]any) } for _, capability := range cs { diff --git a/beacon-chain/execution/service_test.go b/beacon-chain/execution/service_test.go index fe4bc36e81..e7be8bd9cb 100644 --- a/beacon-chain/execution/service_test.go +++ b/beacon-chain/execution/service_test.go @@ -61,7 +61,7 @@ func (g *goodLogger) SubscribeFilterLogs(ctx context.Context, q ethereum.FilterQ func (g *goodLogger) FilterLogs(ctx context.Context, q ethereum.FilterQuery) ([]gethTypes.Log, error) { if g.backend == nil { logs := make([]gethTypes.Log, 3) - for i := 0; i < len(logs); i++ { + for i := range logs { logs[i].Address = common.Address{} logs[i].Topics = make([]common.Hash, 5) logs[i].Topics[0] = common.Hash{'a'} @@ -246,7 +246,7 @@ func TestFollowBlock_OK(t *testing.T) { numToForward := uint64(2) expectedHeight := numToForward + baseHeight // forward 2 blocks - for i := uint64(0); i < numToForward; i++ { + for range numToForward { testAcc.Backend.Commit() } @@ -344,7 +344,7 @@ func TestLogTillGenesis_OK(t *testing.T) { web3Service.rpcClient = &mockExecution.RPCClient{Backend: testAcc.Backend} web3Service.httpLogger = testAcc.Backend.Client() - for i := 0; i < 30; i++ { + for range 30 { testAcc.Backend.Commit() } web3Service.latestEth1Data = ðpb.LatestETH1Data{LastRequestedBlock: 0} @@ -498,7 +498,7 @@ func TestNewService_EarliestVotingBlock(t *testing.T) { numToForward := 1500 // forward 1500 blocks - for i := 0; i < numToForward; i++ { + for range numToForward { testAcc.Backend.Commit() } currHeader, err := testAcc.Backend.Client().HeaderByNumber(t.Context(), nil) @@ -695,7 +695,7 @@ func TestService_ValidateDepositContainers(t *testing.T) { name: "ordered containers", ctrsFunc: func() []*ethpb.DepositContainer { ctrs := make([]*ethpb.DepositContainer, 0) - for i := 0; i < 10; i++ { + for i := range 10 { ctrs = append(ctrs, ðpb.DepositContainer{Index: int64(i), Eth1BlockHeight: uint64(i + 10)}) } return ctrs @@ -717,7 +717,7 @@ func TestService_ValidateDepositContainers(t *testing.T) { name: "skipped containers", ctrsFunc: func() []*ethpb.DepositContainer { ctrs := make([]*ethpb.DepositContainer, 0) - for i := 0; i < 10; i++ { + for i := range 10 { if i == 5 || i == 7 { continue } @@ -833,7 +833,7 @@ func (s *slowRPCClient) BatchCall(b []rpc.BatchElem) error { return nil } -func (s *slowRPCClient) CallContext(_ context.Context, _ interface{}, _ string, _ ...interface{}) error { +func (s *slowRPCClient) CallContext(_ context.Context, _ any, _ string, _ ...any) error { panic("implement me") } @@ -869,7 +869,7 @@ func TestService_migrateOldDepositTree(t *testing.T) { dt, err := trie.NewTrie(32) require.NoError(t, err) - for i := 0; i < totalDeposits; i++ { + for i := range totalDeposits { err := dt.Insert(input[:], i) require.NoError(t, err) } diff --git a/beacon-chain/execution/testing/mock_execution_chain.go b/beacon-chain/execution/testing/mock_execution_chain.go index c1eb76d369..bdbc40e88e 100644 --- a/beacon-chain/execution/testing/mock_execution_chain.go +++ b/beacon-chain/execution/testing/mock_execution_chain.go @@ -147,7 +147,7 @@ type RPCClient struct { func (*RPCClient) Close() {} -func (r *RPCClient) CallContext(ctx context.Context, obj interface{}, methodName string, args ...interface{}) error { +func (r *RPCClient) CallContext(ctx context.Context, obj any, methodName string, args ...any) error { if r.BlockNumMap != nil && methodName == "eth_getBlockByNumber" { val, ok := args[0].(string) if !ok { diff --git a/beacon-chain/forkchoice/doubly-linked-tree/proposer_boost_test.go b/beacon-chain/forkchoice/doubly-linked-tree/proposer_boost_test.go index fe8e12e18c..80f88ac8c6 100644 --- a/beacon-chain/forkchoice/doubly-linked-tree/proposer_boost_test.go +++ b/beacon-chain/forkchoice/doubly-linked-tree/proposer_boost_test.go @@ -31,7 +31,7 @@ func TestForkChoice_BoostProposerRoot_PreventsExAnteAttack(t *testing.T) { jEpoch, fEpoch := primitives.Epoch(0), primitives.Epoch(0) zeroHash := params.BeaconConfig().ZeroHash balances := make([]uint64, 64) // 64 active validators. - for i := 0; i < len(balances); i++ { + for i := range balances { balances[i] = 10 } t.Run("back-propagates boost score to ancestors after proposer boosting", func(t *testing.T) { @@ -482,7 +482,7 @@ func TestForkChoice_missingProposerBoostRoots(t *testing.T) { ctx := t.Context() f := setup(1, 1) balances := make([]uint64, 64) // 64 active validators. - for i := 0; i < len(balances); i++ { + for i := range balances { balances[i] = 10 } f.justifiedBalances = balances diff --git a/beacon-chain/light-client/helpers.go b/beacon-chain/light-client/helpers.go index ef37bd082a..f23202a5cd 100644 --- a/beacon-chain/light-client/helpers.go +++ b/beacon-chain/light-client/helpers.go @@ -23,7 +23,7 @@ func createDefaultLightClientBootstrap(currentSlot primitives.Slot) (interfaces. currentEpoch := slots.ToEpoch(currentSlot) syncCommitteeSize := params.BeaconConfig().SyncCommitteeSize pubKeys := make([][]byte, syncCommitteeSize) - for i := uint64(0); i < syncCommitteeSize; i++ { + for i := range syncCommitteeSize { pubKeys[i] = make([]byte, fieldparams.BLSPubkeyLength) } currentSyncCommittee := &pb.SyncCommittee{ @@ -42,7 +42,7 @@ func createDefaultLightClientBootstrap(currentSlot primitives.Slot) (interfaces. } executionBranch := make([][]byte, fieldparams.ExecutionBranchDepth) - for i := 0; i < fieldparams.ExecutionBranchDepth; i++ { + for i := range fieldparams.ExecutionBranchDepth { executionBranch[i] = make([]byte, 32) } diff --git a/beacon-chain/light-client/lightclient.go b/beacon-chain/light-client/lightclient.go index 7e6063c5b4..29cdde2cc3 100644 --- a/beacon-chain/light-client/lightclient.go +++ b/beacon-chain/light-client/lightclient.go @@ -243,7 +243,7 @@ func NewLightClientUpdateFromBeaconState( func CreateDefaultLightClientUpdate(attestedBlock interfaces.ReadOnlySignedBeaconBlock) (interfaces.LightClientUpdate, error) { syncCommitteeSize := params.BeaconConfig().SyncCommitteeSize pubKeys := make([][]byte, syncCommitteeSize) - for i := uint64(0); i < syncCommitteeSize; i++ { + for i := range syncCommitteeSize { pubKeys[i] = make([]byte, fieldparams.BLSPubkeyLength) } nextSyncCommittee := &pb.SyncCommittee{ @@ -262,7 +262,7 @@ func CreateDefaultLightClientUpdate(attestedBlock interfaces.ReadOnlySignedBeaco } executionBranch := make([][]byte, fieldparams.ExecutionBranchDepth) - for i := 0; i < fieldparams.ExecutionBranchDepth; i++ { + for i := range fieldparams.ExecutionBranchDepth { executionBranch[i] = make([]byte, 32) } diff --git a/beacon-chain/light-client/lightclient_test.go b/beacon-chain/light-client/lightclient_test.go index 09255a26af..8c1e5a544c 100644 --- a/beacon-chain/light-client/lightclient_test.go +++ b/beacon-chain/light-client/lightclient_test.go @@ -76,7 +76,7 @@ func TestLightClient_NewLightClientFinalityUpdateFromBeaconState(t *testing.T) { //zeroHash := params.BeaconConfig().ZeroHash[:] require.NotNil(t, update.FinalizedHeader(), "Finalized header is nil") - require.Equal(t, reflect.TypeOf(update.FinalizedHeader().Proto()), reflect.TypeOf(&pb.LightClientHeaderAltair{}), "Finalized header is not Altair") + require.Equal(t, reflect.TypeOf(update.FinalizedHeader().Proto()), reflect.TypeFor[*pb.LightClientHeaderAltair](), "Finalized header is not Altair") updateFinalizedHeaderBeacon := update.FinalizedHeader().Beacon() require.Equal(t, finalizedBlockHeader.Header.Slot, updateFinalizedHeaderBeacon.Slot, "Finalized header slot is not equal") require.Equal(t, finalizedBlockHeader.Header.ProposerIndex, updateFinalizedHeaderBeacon.ProposerIndex, "Finalized header proposer index is not equal") @@ -109,7 +109,7 @@ func TestLightClient_NewLightClientFinalityUpdateFromBeaconState(t *testing.T) { finalizedBlockHeader, err := l.FinalizedBlock.Header() require.NoError(t, err) require.NotNil(t, update.FinalizedHeader(), "Finalized header is nil") - require.Equal(t, reflect.TypeOf(update.FinalizedHeader().Proto()), reflect.TypeOf(&pb.LightClientHeaderCapella{}), "Finalized header is not Capella") + require.Equal(t, reflect.TypeOf(update.FinalizedHeader().Proto()), reflect.TypeFor[*pb.LightClientHeaderCapella](), "Finalized header is not Capella") updateFinalizedHeaderBeacon := update.FinalizedHeader().Beacon() require.Equal(t, finalizedBlockHeader.Header.Slot, updateFinalizedHeaderBeacon.Slot, "Finalized header slot is not equal") require.Equal(t, finalizedBlockHeader.Header.ProposerIndex, updateFinalizedHeaderBeacon.ProposerIndex, "Finalized header proposer index is not equal") @@ -183,7 +183,7 @@ func TestLightClient_NewLightClientFinalityUpdateFromBeaconState(t *testing.T) { finalizedBlockHeader, err := l.FinalizedBlock.Header() require.NoError(t, err) require.NotNil(t, update.FinalizedHeader(), "Finalized header is nil") - require.Equal(t, reflect.TypeOf(update.FinalizedHeader().Proto()), reflect.TypeOf(&pb.LightClientHeaderCapella{}), "Finalized header is not Capella") + require.Equal(t, reflect.TypeOf(update.FinalizedHeader().Proto()), reflect.TypeFor[*pb.LightClientHeaderCapella](), "Finalized header is not Capella") updateFinalizedHeaderBeacon := update.FinalizedHeader().Beacon() require.Equal(t, finalizedBlockHeader.Header.Slot, updateFinalizedHeaderBeacon.Slot, "Finalized header slot is not equal") require.Equal(t, finalizedBlockHeader.Header.ProposerIndex, updateFinalizedHeaderBeacon.ProposerIndex, "Finalized header proposer index is not equal") @@ -293,7 +293,7 @@ func TestLightClient_NewLightClientFinalityUpdateFromBeaconState(t *testing.T) { require.NoError(t, err) require.NotNil(t, update.FinalizedHeader(), "Finalized header is nil") updateFinalizedHeaderBeacon := update.FinalizedHeader().Beacon() - require.Equal(t, reflect.TypeOf(update.FinalizedHeader().Proto()), reflect.TypeOf(&pb.LightClientHeaderDeneb{}), "Finalized header is not Deneb") + require.Equal(t, reflect.TypeOf(update.FinalizedHeader().Proto()), reflect.TypeFor[*pb.LightClientHeaderDeneb](), "Finalized header is not Deneb") require.Equal(t, finalizedBlockHeader.Header.Slot, updateFinalizedHeaderBeacon.Slot, "Finalized header slot is not equal") require.Equal(t, finalizedBlockHeader.Header.ProposerIndex, updateFinalizedHeaderBeacon.ProposerIndex, "Finalized header proposer index is not equal") require.DeepSSZEqual(t, finalizedBlockHeader.Header.ParentRoot, updateFinalizedHeaderBeacon.ParentRoot, "Finalized header parent root is not equal") diff --git a/beacon-chain/light-client/store.go b/beacon-chain/light-client/store.go index 74da9f343f..d2ce7e77bd 100644 --- a/beacon-chain/light-client/store.go +++ b/beacon-chain/light-client/store.go @@ -2,6 +2,7 @@ package light_client import ( "context" + "maps" "sync" "github.com/OffchainLabs/prysm/v7/async/event" @@ -202,9 +203,7 @@ func (s *Store) LightClientUpdates(ctx context.Context, startPeriod, endPeriod u return nil, errors.Wrapf(err, "failed to get updates from cache") } - for period, update := range cacheUpdatesByPeriod { - updatesMap[period] = update - } + maps.Copy(updatesMap, cacheUpdatesByPeriod) var updates []interfaces.LightClientUpdate diff --git a/beacon-chain/light-client/store_test.go b/beacon-chain/light-client/store_test.go index 06bd48f9d3..2f399ecaa6 100644 --- a/beacon-chain/light-client/store_test.go +++ b/beacon-chain/light-client/store_test.go @@ -366,7 +366,7 @@ func TestLightClientStore_MigrateToCold(t *testing.T) { s := NewLightClientStore(&p2pTesting.FakeP2P{}, new(event.Feed), beaconDB) require.NotNil(t, s) - for i := 0; i < 3; i++ { + for i := range 3 { newBlock := util.NewBeaconBlock() newBlock.Block.Slot = primitives.Slot(32 + uint64(i)) newBlock.Block.ParentRoot = finalizedBlockRoot[:] @@ -394,7 +394,7 @@ func TestLightClientStore_MigrateToCold(t *testing.T) { s := NewLightClientStore(&p2pTesting.FakeP2P{}, new(event.Feed), beaconDB) require.NotNil(t, s) - for i := 0; i < 3; i++ { + for i := range 3 { newBlock := util.NewBeaconBlock() newBlock.Block.Slot = primitives.Slot(32 + uint64(i)) newBlock.Block.ParentRoot = finalizedBlockRoot[:] @@ -844,7 +844,7 @@ func TestLightClientStore_LightClientUpdatesByRange(t *testing.T) { updates, err := s.LightClientUpdates(ctx, 1, 5, headBlock) require.NoError(t, err) require.Equal(t, 5, len(updates)) - for i := 0; i < 5; i++ { + for i := range 5 { require.DeepEqual(t, update, updates[i], "Expected to find the update in the store") } }) @@ -899,7 +899,7 @@ func TestLightClientStore_LightClientUpdatesByRange(t *testing.T) { require.NoError(t, err) require.Equal(t, 5, len(updates)) // first two updates should be update1 - for i := 0; i < 2; i++ { + for i := range 2 { require.DeepEqual(t, update1, updates[i], "Expected to find the update in the store") } // next three updates should be update2 - as cache overrides db diff --git a/beacon-chain/monitor/service.go b/beacon-chain/monitor/service.go index 7674bb2d76..0771f201c0 100644 --- a/beacon-chain/monitor/service.go +++ b/beacon-chain/monitor/service.go @@ -3,7 +3,7 @@ package monitor import ( "context" "errors" - "sort" + "slices" "sync" "github.com/OffchainLabs/prysm/v7/async/event" @@ -108,7 +108,7 @@ func (s *Service) Start() { for idx := range s.TrackedValidators { tracked = append(tracked, idx) } - sort.Slice(tracked, func(i, j int) bool { return tracked[i] < tracked[j] }) + slices.Sort(tracked) log.WithFields(logrus.Fields{ "validatorIndices": tracked, diff --git a/beacon-chain/monitor/service_test.go b/beacon-chain/monitor/service_test.go index e4c4d84243..0e83bd46c2 100644 --- a/beacon-chain/monitor/service_test.go +++ b/beacon-chain/monitor/service_test.go @@ -204,12 +204,10 @@ func TestMonitorRoutine(t *testing.T) { stateSub := s.config.StateNotifier.StateFeed().Subscribe(stateChannel) wg := &sync.WaitGroup{} - wg.Add(1) - go func() { + wg.Go(func() { s.monitorRoutine(stateChannel, stateSub) - wg.Done() - }() + }) genesis, keys := util.DeterministicGenesisStateAltair(t, 64) c, err := altair.NextSyncCommittee(ctx, genesis) diff --git a/beacon-chain/node/config_test.go b/beacon-chain/node/config_test.go index f03e106a62..0dc9d39c86 100644 --- a/beacon-chain/node/config_test.go +++ b/beacon-chain/node/config_test.go @@ -138,9 +138,9 @@ func TestConfigureNetwork_ConfigFile(t *testing.T) { set := flag.NewFlagSet("test", 0) context := cli.NewContext(&app, set, nil) - require.NoError(t, os.WriteFile("flags_test.yaml", []byte(fmt.Sprintf("%s:\n - %s\n - %s\n", cmd.BootstrapNode.Name, + require.NoError(t, os.WriteFile("flags_test.yaml", fmt.Appendf(nil, "%s:\n - %s\n - %s\n", cmd.BootstrapNode.Name, "node1", - "node2")), 0666)) + "node2"), 0666)) require.NoError(t, set.Parse([]string{"test-command", "--" + cmd.ConfigFileFlag.Name, "flags_test.yaml"})) comFlags := cmd.WrapFlags([]cli.Flag{ diff --git a/beacon-chain/operations/attestations/kv/aggregated.go b/beacon-chain/operations/attestations/kv/aggregated.go index e15e439b11..3339f36d89 100644 --- a/beacon-chain/operations/attestations/kv/aggregated.go +++ b/beacon-chain/operations/attestations/kv/aggregated.go @@ -72,7 +72,7 @@ func (c *AttCaches) aggregateParallel(atts map[attestation.Id][]ethpb.Att, leftO n := runtime.GOMAXPROCS(0) // defaults to the value of runtime.NumCPU ch := make(chan []ethpb.Att, n) wg.Add(n) - for i := 0; i < n; i++ { + for range n { go func() { defer wg.Done() for as := range ch { diff --git a/beacon-chain/operations/blstoexec/pool.go b/beacon-chain/operations/blstoexec/pool.go index 447b0f9a14..809a6d7647 100644 --- a/beacon-chain/operations/blstoexec/pool.go +++ b/beacon-chain/operations/blstoexec/pool.go @@ -1,6 +1,7 @@ package blstoexec import ( + "maps" "sync" "github.com/OffchainLabs/prysm/v7/beacon-chain/core/blocks" @@ -54,9 +55,7 @@ func NewPool() *Pool { // Copies the internal map and returns a new one. func (p *Pool) cycleMap() { newMap := make(map[primitives.ValidatorIndex]*doublylinkedlist.Node[*ethpb.SignedBLSToExecutionChange]) - for k, v := range p.m { - newMap[k] = v - } + maps.Copy(newMap, p.m) p.m = newMap } diff --git a/beacon-chain/operations/blstoexec/pool_test.go b/beacon-chain/operations/blstoexec/pool_test.go index 76ad873dce..a54867ca42 100644 --- a/beacon-chain/operations/blstoexec/pool_test.go +++ b/beacon-chain/operations/blstoexec/pool_test.go @@ -122,7 +122,7 @@ func TestBLSToExecChangesForInclusion(t *testing.T) { }) t.Run("more than MaxBlsToExecutionChanges in pool", func(t *testing.T) { pool := NewPool() - for i := uint64(0); i < numValidators; i++ { + for i := range numValidators { pool.InsertBLSToExecChange(signedChanges[i]) } changes, err := pool.BLSToExecChangesForInclusion(st) @@ -137,7 +137,7 @@ func TestBLSToExecChangesForInclusion(t *testing.T) { pool := NewPool() saveByte := signedChanges[1].Message.FromBlsPubkey[5] signedChanges[1].Message.FromBlsPubkey[5] = 0xff - for i := uint64(0); i < numValidators; i++ { + for i := range numValidators { pool.InsertBLSToExecChange(signedChanges[i]) } changes, err := pool.BLSToExecChangesForInclusion(st) @@ -149,7 +149,7 @@ func TestBLSToExecChangesForInclusion(t *testing.T) { t.Run("One Bad Signature", func(t *testing.T) { pool := NewPool() copy(signedChanges[30].Signature, signedChanges[31].Signature) - for i := uint64(0); i < numValidators; i++ { + for i := range numValidators { pool.InsertBLSToExecChange(signedChanges[i]) } changes, err := pool.BLSToExecChangesForInclusion(st) diff --git a/beacon-chain/operations/slashings/service_attester_test.go b/beacon-chain/operations/slashings/service_attester_test.go index 6f74036cb8..3758d41fc9 100644 --- a/beacon-chain/operations/slashings/service_attester_test.go +++ b/beacon-chain/operations/slashings/service_attester_test.go @@ -105,7 +105,7 @@ func TestPool_InsertAttesterSlashing(t *testing.T) { setupFunc := func(beaconState state.BeaconState, privKeys []bls.SecretKey) []testCase { pendingSlashings := make([]*PendingAttesterSlashing, 20) slashings := make([]ethpb.AttSlashing, 20) - for i := 0; i < len(pendingSlashings); i++ { + for i := range pendingSlashings { generatedSl, err := util.GenerateAttesterSlashingForValidator(beaconState, privKeys[i], primitives.ValidatorIndex(i)) require.NoError(t, err) pendingSlashings[i] = &PendingAttesterSlashing{ @@ -341,7 +341,7 @@ func TestPool_InsertAttesterSlashing_SigFailsVerify_ClearPool(t *testing.T) { beaconState, privKeys := util.DeterministicGenesisState(t, 64) pendingSlashings := make([]*PendingAttesterSlashing, 2) slashings := make([]*ethpb.AttesterSlashing, 2) - for i := 0; i < 2; i++ { + for i := range 2 { generatedSl, err := util.GenerateAttesterSlashingForValidator(beaconState, privKeys[i], primitives.ValidatorIndex(i)) require.NoError(t, err) pendingSlashings[i] = &PendingAttesterSlashing{ @@ -522,7 +522,7 @@ func TestPool_PendingAttesterSlashings(t *testing.T) { beaconState, privKeys := util.DeterministicGenesisState(t, 64) pendingSlashings := make([]*PendingAttesterSlashing, 20) slashings := make([]ethpb.AttSlashing, 20) - for i := 0; i < len(pendingSlashings); i++ { + for i := range pendingSlashings { sl, err := util.GenerateAttesterSlashingForValidator(beaconState, privKeys[i], primitives.ValidatorIndex(i)) require.NoError(t, err) pendingSlashings[i] = &PendingAttesterSlashing{ @@ -586,7 +586,7 @@ func TestPool_PendingAttesterSlashings_AfterElectra(t *testing.T) { pendingSlashings := make([]*PendingAttesterSlashing, 20) slashings := make([]ethpb.AttSlashing, 20) - for i := 0; i < len(pendingSlashings); i++ { + for i := range pendingSlashings { sl, err := util.GenerateAttesterSlashingForValidator(beaconState, privKeys[i], primitives.ValidatorIndex(i)) require.NoError(t, err) pendingSlashings[i] = &PendingAttesterSlashing{ @@ -661,7 +661,7 @@ func TestPool_PendingAttesterSlashings_Slashed(t *testing.T) { pendingSlashings := make([]*PendingAttesterSlashing, 20) pendingSlashings2 := make([]*PendingAttesterSlashing, 20) slashings := make([]ethpb.AttSlashing, 20) - for i := 0; i < len(pendingSlashings); i++ { + for i := range pendingSlashings { sl, err := util.GenerateAttesterSlashingForValidator(beaconState, privKeys[i], primitives.ValidatorIndex(i)) require.NoError(t, err) pendingSlashings[i] = &PendingAttesterSlashing{ @@ -719,7 +719,7 @@ func TestPool_PendingAttesterSlashings_NoDuplicates(t *testing.T) { beaconState, privKeys := util.DeterministicGenesisState(t, 64) pendingSlashings := make([]*PendingAttesterSlashing, 3) slashings := make([]ethpb.AttSlashing, 3) - for i := 0; i < 2; i++ { + for i := range 2 { sl, err := util.GenerateAttesterSlashingForValidator(beaconState, privKeys[i], primitives.ValidatorIndex(i)) require.NoError(t, err) pendingSlashings[i] = &PendingAttesterSlashing{ diff --git a/beacon-chain/operations/slashings/service_proposer_test.go b/beacon-chain/operations/slashings/service_proposer_test.go index d8e043d740..2c3ffd41cb 100644 --- a/beacon-chain/operations/slashings/service_proposer_test.go +++ b/beacon-chain/operations/slashings/service_proposer_test.go @@ -35,7 +35,7 @@ func TestPool_InsertProposerSlashing(t *testing.T) { beaconState, privKeys := util.DeterministicGenesisState(t, 64) slashings := make([]*ethpb.ProposerSlashing, 20) - for i := 0; i < len(slashings); i++ { + for i := range slashings { sl, err := util.GenerateProposerSlashingForValidator(beaconState, privKeys[i], primitives.ValidatorIndex(i)) require.NoError(t, err) slashings[i] = sl @@ -185,7 +185,7 @@ func TestPool_InsertProposerSlashing_SigFailsVerify_ClearPool(t *testing.T) { params.OverrideBeaconConfig(conf) beaconState, privKeys := util.DeterministicGenesisState(t, 64) slashings := make([]*ethpb.ProposerSlashing, 2) - for i := 0; i < 2; i++ { + for i := range 2 { sl, err := util.GenerateProposerSlashingForValidator(beaconState, privKeys[i], primitives.ValidatorIndex(i)) require.NoError(t, err) slashings[i] = sl @@ -328,7 +328,7 @@ func TestPool_PendingProposerSlashings(t *testing.T) { } beaconState, privKeys := util.DeterministicGenesisState(t, 64) slashings := make([]*ethpb.ProposerSlashing, 20) - for i := 0; i < len(slashings); i++ { + for i := range slashings { sl, err := util.GenerateProposerSlashingForValidator(beaconState, privKeys[i], primitives.ValidatorIndex(i)) require.NoError(t, err) slashings[i] = sl @@ -395,7 +395,7 @@ func TestPool_PendingProposerSlashings_Slashed(t *testing.T) { slashings := make([]*ethpb.ProposerSlashing, 32) slashings2 := make([]*ethpb.ProposerSlashing, 32) result := make([]*ethpb.ProposerSlashing, 32) - for i := 0; i < len(slashings); i++ { + for i := range slashings { sl, err := util.GenerateProposerSlashingForValidator(beaconState, privKeys[i], primitives.ValidatorIndex(i)) require.NoError(t, err) slashings[i] = sl diff --git a/beacon-chain/operations/voluntaryexits/pool_test.go b/beacon-chain/operations/voluntaryexits/pool_test.go index 29f89fc034..2eb3318d17 100644 --- a/beacon-chain/operations/voluntaryexits/pool_test.go +++ b/beacon-chain/operations/voluntaryexits/pool_test.go @@ -124,7 +124,7 @@ func TestExitsForInclusion(t *testing.T) { }) t.Run("more than MaxVoluntaryExits in pool", func(t *testing.T) { pool := NewPool() - for i := uint64(0); i < numValidators; i++ { + for i := range numValidators { pool.InsertVoluntaryExit(signedExits[i]) } exits, err := pool.ExitsForInclusion(st, stateSlot) diff --git a/beacon-chain/p2p/BUILD.bazel b/beacon-chain/p2p/BUILD.bazel index 8d27988d82..714af413e5 100644 --- a/beacon-chain/p2p/BUILD.bazel +++ b/beacon-chain/p2p/BUILD.bazel @@ -142,6 +142,7 @@ go_test( "topics_test.go", "utils_test.go", ], + data = glob(["testdata/**"]), embed = [":go_default_library"], flaky = True, tags = ["requires-network"], diff --git a/beacon-chain/p2p/broadcaster_test.go b/beacon-chain/p2p/broadcaster_test.go index 81041b0b3a..88b26ca032 100644 --- a/beacon-chain/p2p/broadcaster_test.go +++ b/beacon-chain/p2p/broadcaster_test.go @@ -60,7 +60,7 @@ func TestService_Broadcast(t *testing.T) { topic := "/eth2/%x/testing" // Set a test gossip mapping for testpb.TestSimpleMessage. - GossipTypeMapping[reflect.TypeOf(msg)] = topic + GossipTypeMapping[reflect.TypeFor[*ethpb.Fork]()] = topic digest, err := p.currentForkDigest() require.NoError(t, err) topic = fmt.Sprintf(topic, digest) @@ -106,7 +106,7 @@ func TestService_Broadcast_ReturnsErr_TopicNotMapped(t *testing.T) { } func TestService_Attestation_Subnet(t *testing.T) { - if gtm := GossipTypeMapping[reflect.TypeOf(ðpb.Attestation{})]; gtm != AttestationSubnetTopicFormat { + if gtm := GossipTypeMapping[reflect.TypeFor[*ethpb.Attestation]()]; gtm != AttestationSubnetTopicFormat { t.Errorf("Constant is out of date. Wanted %s, got %s", AttestationSubnetTopicFormat, gtm) } @@ -174,7 +174,7 @@ func TestService_BroadcastAttestation(t *testing.T) { subnet := uint64(5) topic := AttestationSubnetTopicFormat - GossipTypeMapping[reflect.TypeOf(msg)] = topic + GossipTypeMapping[reflect.TypeFor[*ethpb.Attestation]()] = topic digest, err := p.currentForkDigest() require.NoError(t, err) topic = fmt.Sprintf(topic, digest, subnet) @@ -354,7 +354,7 @@ func TestService_BroadcastAttestationWithDiscoveryAttempts(t *testing.T) { msg := util.HydrateAttestation(ðpb.Attestation{AggregationBits: bitfield.NewBitlist(7)}) topic := AttestationSubnetTopicFormat - GossipTypeMapping[reflect.TypeOf(msg)] = topic + GossipTypeMapping[reflect.TypeFor[*ethpb.Attestation]()] = topic digest, err := p.currentForkDigest() require.NoError(t, err) topic = fmt.Sprintf(topic, digest, subnet) @@ -432,7 +432,7 @@ func TestService_BroadcastSyncCommittee(t *testing.T) { subnet := uint64(5) topic := SyncCommitteeSubnetTopicFormat - GossipTypeMapping[reflect.TypeOf(msg)] = topic + GossipTypeMapping[reflect.TypeFor[*ethpb.SyncCommitteeMessage]()] = topic digest, err := p.currentForkDigest() require.NoError(t, err) topic = fmt.Sprintf(topic, digest, subnet) @@ -509,7 +509,7 @@ func TestService_BroadcastBlob(t *testing.T) { subnet := uint64(0) topic := BlobSubnetTopicFormat - GossipTypeMapping[reflect.TypeOf(blobSidecar)] = topic + GossipTypeMapping[reflect.TypeFor[*ethpb.BlobSidecar]()] = topic digest, err := p.currentForkDigest() require.NoError(t, err) topic = fmt.Sprintf(topic, digest, subnet) diff --git a/beacon-chain/p2p/connection_gater_test.go b/beacon-chain/p2p/connection_gater_test.go index b40a55e7a4..166796b9ef 100644 --- a/beacon-chain/p2p/connection_gater_test.go +++ b/beacon-chain/p2p/connection_gater_test.go @@ -48,7 +48,7 @@ func TestPeer_AtMaxLimit(t *testing.T) { require.NoError(t, err) }() - for i := 0; i < highWatermarkBuffer; i++ { + for range highWatermarkBuffer { addPeer(t, s.peers, peers.Connected, false) } @@ -85,7 +85,7 @@ func TestService_InterceptBannedIP(t *testing.T) { require.NoError(t, err) s.started = true - for i := 0; i < ipBurst; i++ { + for range ipBurst { valid := s.validateDial(multiAddress) if !valid { t.Errorf("Expected multiaddress with ip %s to not be rejected", ip) diff --git a/beacon-chain/p2p/discovery_test.go b/beacon-chain/p2p/discovery_test.go index fc73c6d075..a18a649618 100644 --- a/beacon-chain/p2p/discovery_test.go +++ b/beacon-chain/p2p/discovery_test.go @@ -536,14 +536,14 @@ func TestInboundPeerLimit(t *testing.T) { host: fakePeer.BHost, } - for i := 0; i < 30; i++ { + for range 30 { _ = addPeer(t, s.peers, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED), false) } require.Equal(t, true, s.isPeerAtLimit(all), "not at limit for outbound peers") require.Equal(t, false, s.isPeerAtLimit(inbound), "at limit for inbound peers") - for i := 0; i < highWatermarkBuffer; i++ { + for range highWatermarkBuffer { _ = addPeer(t, s.peers, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED), false) } @@ -562,13 +562,13 @@ func TestOutboundPeerThreshold(t *testing.T) { host: fakePeer.BHost, } - for i := 0; i < 2; i++ { + for range 2 { _ = addPeer(t, s.peers, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED), true) } require.Equal(t, true, s.isBelowOutboundPeerThreshold(), "not at outbound peer threshold") - for i := 0; i < 3; i++ { + for range 3 { _ = addPeer(t, s.peers, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED), true) } @@ -697,7 +697,7 @@ func addPeer(t *testing.T, p *peers.Status, state peerdata.ConnectionState, outb func createAndConnectPeer(t *testing.T, p2pService *testp2p.TestP2P, offset int) { // Create the private key. privateKeyBytes := make([]byte, 32) - for i := 0; i < 32; i++ { + for i := range 32 { privateKeyBytes[i] = byte(offset + i) } @@ -932,7 +932,7 @@ func TestRefreshPersistentSubnets(t *testing.T) { // Create the private key. privateKeyBytes := make([]byte, 32) - for i := 0; i < 32; i++ { + for i := range 32 { privateKeyBytes[i] = byte(i) } @@ -1279,7 +1279,7 @@ func TestFindPeers_received_bad_existing_node(t *testing.T) { if peerData != nil { service.peers.Add(node1_seq2.Record(), peerData.ID, nil, network.DirUnknown) // Mark as bad peer - need enough increments to exceed threshold (6) - for i := 0; i < 10; i++ { + for range 10 { service.peers.Scorers().BadResponsesScorer().Increment(peerData.ID) } } diff --git a/beacon-chain/p2p/encoder/varint.go b/beacon-chain/p2p/encoder/varint.go index f1554c83b1..98c24089ff 100644 --- a/beacon-chain/p2p/encoder/varint.go +++ b/beacon-chain/p2p/encoder/varint.go @@ -15,7 +15,7 @@ var errExcessMaxLength = errors.Errorf("provided header exceeds the max varint l // the length of the remaining bytes in the reader. func readVarint(r io.Reader) (uint64, error) { b := make([]byte, 0, maxVarintLength) - for i := 0; i < maxVarintLength; i++ { + for i := range maxVarintLength { b1 := make([]byte, 1) n, err := r.Read(b1) if err != nil { diff --git a/beacon-chain/p2p/encoder/varint_test.go b/beacon-chain/p2p/encoder/varint_test.go index 65fa6bfbba..ed35d73ff2 100644 --- a/beacon-chain/p2p/encoder/varint_test.go +++ b/beacon-chain/p2p/encoder/varint_test.go @@ -23,7 +23,7 @@ func TestReadVarint_ExceedsMaxLength(t *testing.T) { // Terminating byte. tByte := byte(1 << 6) var header []byte - for i := 0; i < 9; i++ { + for range 9 { header = append(header, fByte) } header = append(header, tByte) diff --git a/beacon-chain/p2p/gossip_scoring_params.go b/beacon-chain/p2p/gossip_scoring_params.go index d48d578eef..374be0cdb1 100644 --- a/beacon-chain/p2p/gossip_scoring_params.go +++ b/beacon-chain/p2p/gossip_scoring_params.go @@ -651,8 +651,8 @@ func logGossipParameters(topic string, params *pubsub.TopicScoreParams) { numOfFields := rawParams.NumField() fields := make(logrus.Fields, numOfFields) - for i := 0; i < numOfFields; i++ { - fields[reflect.TypeOf(params).Elem().Field(i).Name] = rawParams.Field(i).Interface() + for i := range numOfFields { + fields[reflect.TypeFor[pubsub.TopicScoreParams]().Field(i).Name] = rawParams.Field(i).Interface() } log.WithFields(fields).Debugf("Topic Parameters for %s", topic) } diff --git a/beacon-chain/p2p/gossip_scoring_params_test.go b/beacon-chain/p2p/gossip_scoring_params_test.go index 31a9f4f645..af96ae89c6 100644 --- a/beacon-chain/p2p/gossip_scoring_params_test.go +++ b/beacon-chain/p2p/gossip_scoring_params_test.go @@ -26,7 +26,7 @@ func TestCorrect_ActiveValidatorsCount(t *testing.T) { } bState, err := util.NewBeaconState(func(state *ethpb.BeaconState) error { validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: make([]byte, 48), WithdrawalCredentials: make([]byte, 32), @@ -43,7 +43,7 @@ func TestCorrect_ActiveValidatorsCount(t *testing.T) { vals, err := s.retrieveActiveValidators() assert.NoError(t, err, "genesis state not retrieved") assert.Equal(t, int(params.BeaconConfig().MinGenesisActiveValidatorCount), int(vals), "mainnet genesis active count isn't accurate") - for i := 0; i < 100; i++ { + for range 100 { require.NoError(t, bState.AppendValidator(ðpb.Validator{ PublicKey: make([]byte, 48), WithdrawalCredentials: make([]byte, 32), diff --git a/beacon-chain/p2p/gossip_topic_mappings.go b/beacon-chain/p2p/gossip_topic_mappings.go index fbb9fcd605..cbfb7a0cf5 100644 --- a/beacon-chain/p2p/gossip_topic_mappings.go +++ b/beacon-chain/p2p/gossip_topic_mappings.go @@ -118,30 +118,30 @@ func init() { } // Specially handle Altair objects. - GossipTypeMapping[reflect.TypeOf(ðpb.SignedBeaconBlockAltair{})] = BlockSubnetTopicFormat - GossipTypeMapping[reflect.TypeOf(ðpb.LightClientFinalityUpdateAltair{})] = LightClientFinalityUpdateTopicFormat - GossipTypeMapping[reflect.TypeOf(ðpb.LightClientOptimisticUpdateAltair{})] = LightClientOptimisticUpdateTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlockAltair]()] = BlockSubnetTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.LightClientFinalityUpdateAltair]()] = LightClientFinalityUpdateTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.LightClientOptimisticUpdateAltair]()] = LightClientOptimisticUpdateTopicFormat // Specially handle Bellatrix objects. - GossipTypeMapping[reflect.TypeOf(ðpb.SignedBeaconBlockBellatrix{})] = BlockSubnetTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlockBellatrix]()] = BlockSubnetTopicFormat // Specially handle Capella objects. - GossipTypeMapping[reflect.TypeOf(ðpb.SignedBeaconBlockCapella{})] = BlockSubnetTopicFormat - GossipTypeMapping[reflect.TypeOf(ðpb.LightClientOptimisticUpdateCapella{})] = LightClientOptimisticUpdateTopicFormat - GossipTypeMapping[reflect.TypeOf(ðpb.LightClientFinalityUpdateCapella{})] = LightClientFinalityUpdateTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlockCapella]()] = BlockSubnetTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.LightClientOptimisticUpdateCapella]()] = LightClientOptimisticUpdateTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.LightClientFinalityUpdateCapella]()] = LightClientFinalityUpdateTopicFormat // Specially handle Deneb objects. - GossipTypeMapping[reflect.TypeOf(ðpb.SignedBeaconBlockDeneb{})] = BlockSubnetTopicFormat - GossipTypeMapping[reflect.TypeOf(ðpb.LightClientOptimisticUpdateDeneb{})] = LightClientOptimisticUpdateTopicFormat - GossipTypeMapping[reflect.TypeOf(ðpb.LightClientFinalityUpdateDeneb{})] = LightClientFinalityUpdateTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlockDeneb]()] = BlockSubnetTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.LightClientOptimisticUpdateDeneb]()] = LightClientOptimisticUpdateTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.LightClientFinalityUpdateDeneb]()] = LightClientFinalityUpdateTopicFormat // Specially handle Electra objects. - GossipTypeMapping[reflect.TypeOf(ðpb.SignedBeaconBlockElectra{})] = BlockSubnetTopicFormat - GossipTypeMapping[reflect.TypeOf(ðpb.SingleAttestation{})] = AttestationSubnetTopicFormat - GossipTypeMapping[reflect.TypeOf(ðpb.AttesterSlashingElectra{})] = AttesterSlashingSubnetTopicFormat - GossipTypeMapping[reflect.TypeOf(ðpb.SignedAggregateAttestationAndProofElectra{})] = AggregateAndProofSubnetTopicFormat - GossipTypeMapping[reflect.TypeOf(ðpb.LightClientFinalityUpdateElectra{})] = LightClientFinalityUpdateTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlockElectra]()] = BlockSubnetTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.SingleAttestation]()] = AttestationSubnetTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.AttesterSlashingElectra]()] = AttesterSlashingSubnetTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.SignedAggregateAttestationAndProofElectra]()] = AggregateAndProofSubnetTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.LightClientFinalityUpdateElectra]()] = LightClientFinalityUpdateTopicFormat // Specially handle Fulu objects. - GossipTypeMapping[reflect.TypeOf(ðpb.SignedBeaconBlockFulu{})] = BlockSubnetTopicFormat + GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlockFulu]()] = BlockSubnetTopicFormat } diff --git a/beacon-chain/p2p/gossip_topic_mappings_test.go b/beacon-chain/p2p/gossip_topic_mappings_test.go index 441a56f811..1fdb4db2a3 100644 --- a/beacon-chain/p2p/gossip_topic_mappings_test.go +++ b/beacon-chain/p2p/gossip_topic_mappings_test.go @@ -9,6 +9,7 @@ import ( "github.com/OffchainLabs/prysm/v7/encoding/bytesutil" ethpb "github.com/OffchainLabs/prysm/v7/proto/prysm/v1alpha1" "github.com/OffchainLabs/prysm/v7/testing/assert" + "google.golang.org/protobuf/proto" ) func TestMappingHasNoDuplicates(t *testing.T) { @@ -18,7 +19,7 @@ func TestMappingHasNoDuplicates(t *testing.T) { if _, ok := m[reflect.TypeOf(v())]; ok { t.Errorf("%T is duplicated in the topic mapping", v) } - m[reflect.TypeOf(v)] = true + m[reflect.TypeFor[func() proto.Message]()] = true } } diff --git a/beacon-chain/p2p/interfaces.go b/beacon-chain/p2p/interfaces.go index bb77b5b4f5..ae9dad63d1 100644 --- a/beacon-chain/p2p/interfaces.go +++ b/beacon-chain/p2p/interfaces.go @@ -107,7 +107,7 @@ type ( // Sender abstracts the sending functionality from libp2p. Sender interface { - Send(context.Context, interface{}, string, peer.ID) (network.Stream, error) + Send(context.Context, any, string, peer.ID) (network.Stream, error) } // PeersProvider abstracts obtaining our current list of known peers status. diff --git a/beacon-chain/p2p/peers/assigner.go b/beacon-chain/p2p/peers/assigner.go index 45a27b4888..6c0f944e2c 100644 --- a/beacon-chain/p2p/peers/assigner.go +++ b/beacon-chain/p2p/peers/assigner.go @@ -38,10 +38,7 @@ type Assigner struct { var ErrInsufficientSuitable = errors.New("no suitable peers") func (a *Assigner) freshPeers() ([]peer.ID, error) { - required := params.BeaconConfig().MaxPeersToSync - if flags.Get().MinimumSyncPeers < required { - required = flags.Get().MinimumSyncPeers - } + required := min(flags.Get().MinimumSyncPeers, params.BeaconConfig().MaxPeersToSync) _, peers := a.ps.BestFinalized(params.BeaconConfig().MaxPeersToSync, a.fc.FinalizedCheckpoint().Epoch) if len(peers) < required { log.WithFields(logrus.Fields{ diff --git a/beacon-chain/p2p/peers/assigner_test.go b/beacon-chain/p2p/peers/assigner_test.go index 8fd064dc1e..816b8f3414 100644 --- a/beacon-chain/p2p/peers/assigner_test.go +++ b/beacon-chain/p2p/peers/assigner_test.go @@ -2,6 +2,7 @@ package peers import ( "fmt" + "slices" "testing" "github.com/OffchainLabs/prysm/v7/testing/require" @@ -35,7 +36,7 @@ func TestPickBest(t *testing.T) { { name: "all busy except i=5", n: 1, - busy: testBusyMap(append(append([]peer.ID{}, best[0:5]...), best[6:]...)), + busy: testBusyMap(slices.Concat(best[0:5], best[6:])), expected: []peer.ID{best[5]}, }, { diff --git a/beacon-chain/p2p/peers/benchmark_test.go b/beacon-chain/p2p/peers/benchmark_test.go index e756a65baa..42ad7734a9 100644 --- a/beacon-chain/p2p/peers/benchmark_test.go +++ b/beacon-chain/p2p/peers/benchmark_test.go @@ -11,8 +11,8 @@ func Benchmark_retrieveIndicesFromBitfield(b *testing.B) { for i := uint64(0); i < bv.Len(); i++ { bv.SetBitAt(i, true) } - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { indicesFromBitfield(bv) } } diff --git a/beacon-chain/p2p/peers/scorers/bad_responses_test.go b/beacon-chain/p2p/peers/scorers/bad_responses_test.go index 45fb515ec5..89033dcd5a 100644 --- a/beacon-chain/p2p/peers/scorers/bad_responses_test.go +++ b/beacon-chain/p2p/peers/scorers/bad_responses_test.go @@ -1,7 +1,7 @@ package scorers_test import ( - "sort" + "slices" "testing" "github.com/OffchainLabs/prysm/v7/beacon-chain/p2p/peers" @@ -151,7 +151,7 @@ func TestScorers_BadResponses_IsBadPeer(t *testing.T) { peerStatuses.Add(nil, pid, nil, network.DirUnknown) assert.NoError(t, scorer.IsBadPeer(pid)) - for i := 0; i < scorers.DefaultBadResponsesThreshold; i++ { + for i := range scorers.DefaultBadResponsesThreshold { scorer.Increment(pid) if i == scorers.DefaultBadResponsesThreshold-1 { assert.NotNil(t, scorer.IsBadPeer(pid), "Unexpected peer status") @@ -170,10 +170,10 @@ func TestScorers_BadResponses_BadPeers(t *testing.T) { }) scorer := peerStatuses.Scorers().BadResponsesScorer() pids := []peer.ID{peer.ID("peer1"), peer.ID("peer2"), peer.ID("peer3"), peer.ID("peer4"), peer.ID("peer5")} - for i := 0; i < len(pids); i++ { + for i := range pids { peerStatuses.Add(nil, pids[i], nil, network.DirUnknown) } - for i := 0; i < scorers.DefaultBadResponsesThreshold; i++ { + for range scorers.DefaultBadResponsesThreshold { scorer.Increment(pids[1]) scorer.Increment(pids[2]) scorer.Increment(pids[4]) @@ -185,8 +185,6 @@ func TestScorers_BadResponses_BadPeers(t *testing.T) { assert.NotNil(t, scorer.IsBadPeer(pids[4]), "Invalid peer status") want := []peer.ID{pids[1], pids[2], pids[4]} badPeers := scorer.BadPeers() - sort.Slice(badPeers, func(i, j int) bool { - return badPeers[i] < badPeers[j] - }) + slices.Sort(badPeers) assert.DeepEqual(t, want, badPeers, "Unexpected list of bad peers") } diff --git a/beacon-chain/p2p/peers/scorers/block_providers.go b/beacon-chain/p2p/peers/scorers/block_providers.go index 9fea1b12bb..451c83ef4a 100644 --- a/beacon-chain/p2p/peers/scorers/block_providers.go +++ b/beacon-chain/p2p/peers/scorers/block_providers.go @@ -239,7 +239,7 @@ func (s *BlockProviderScorer) WeightSorted( scores, _ := s.mapScoresAndPeers(pids, scoreFn) peers := make([]peer.ID, 0) - for i := 0; i < len(pids); i++ { + for range pids { if pid := nextPID(scores); pid != "" { peers = append(peers, pid) delete(scores, pid) diff --git a/beacon-chain/p2p/peers/scorers/block_providers_test.go b/beacon-chain/p2p/peers/scorers/block_providers_test.go index f4a9b51cd5..50e90d321e 100644 --- a/beacon-chain/p2p/peers/scorers/block_providers_test.go +++ b/beacon-chain/p2p/peers/scorers/block_providers_test.go @@ -178,7 +178,7 @@ func TestScorers_BlockProvider_WeightSorted(t *testing.T) { } var pids []peer.ID - for i := uint64(0); i < 10; i++ { + for i := range uint64(10) { pid := peer.ID(strconv.FormatUint(i, 10)) scorer.IncrementProcessedBlocks(pid, i*batchSize) pids = append(pids, pid) @@ -190,7 +190,7 @@ func TestScorers_BlockProvider_WeightSorted(t *testing.T) { // Run weighted sort lots of time, to get accurate statistics of whether more heavy items // are indeed preferred when sorting. scores := make(map[peer.ID]int, len(pids)) - for i := 0; i < 1000; i++ { + for range 1000 { score := len(pids) - 1 // The earlier in the list the item is, the more of a score will it get. for _, pid := range scorer.WeightSorted(r, shuffle(pids), nil) { diff --git a/beacon-chain/p2p/peers/status.go b/beacon-chain/p2p/peers/status.go index 7dafeb574c..0d2e959947 100644 --- a/beacon-chain/p2p/peers/status.go +++ b/beacon-chain/p2p/peers/status.go @@ -646,10 +646,7 @@ func (p *Status) Prune() { return peersToPrune[i].score > peersToPrune[j].score }) - limitDiff := len(p.store.Peers()) - p.store.Config().MaxPeers - if limitDiff > len(peersToPrune) { - limitDiff = len(peersToPrune) - } + limitDiff := min(len(p.store.Peers())-p.store.Config().MaxPeers, len(peersToPrune)) peersToPrune = peersToPrune[:limitDiff] @@ -698,10 +695,7 @@ func (p *Status) deprecatedPrune() { return peersToPrune[i].badResp < peersToPrune[j].badResp }) - limitDiff := len(p.store.Peers()) - p.store.Config().MaxPeers - if limitDiff > len(peersToPrune) { - limitDiff = len(peersToPrune) - } + limitDiff := min(len(p.store.Peers())-p.store.Config().MaxPeers, len(peersToPrune)) peersToPrune = peersToPrune[:limitDiff] // Delete peers from map. for _, peerData := range peersToPrune { @@ -1129,7 +1123,7 @@ func sameIP(firstAddr, secondAddr ma.Multiaddr) bool { func indicesFromBitfield(bitV bitfield.Bitvector64) []uint64 { committeeIdxs := make([]uint64, 0, bitV.Count()) - for i := uint64(0); i < 64; i++ { + for i := range uint64(64) { if bitV.BitAt(i) { committeeIdxs = append(committeeIdxs, i) } diff --git a/beacon-chain/p2p/peers/status_test.go b/beacon-chain/p2p/peers/status_test.go index 97bd4b7a90..6412bbbff3 100644 --- a/beacon-chain/p2p/peers/status_test.go +++ b/beacon-chain/p2p/peers/status_test.go @@ -183,7 +183,7 @@ func TestPeerCommitteeIndices(t *testing.T) { record.Set(enr.WithEntry("test", []byte{'a'})) p.Add(record, id, address, direction) bitV := bitfield.NewBitvector64() - for i := 0; i < 64; i++ { + for i := range 64 { if i == 2 || i == 8 || i == 9 { bitV.SetBitAt(uint64(i), true) } @@ -218,7 +218,7 @@ func TestPeerSubscribedToSubnet(t *testing.T) { } expectedPeer := p.All()[1] bitV := bitfield.NewBitvector64() - for i := 0; i < 64; i++ { + for i := range 64 { if i == 2 || i == 8 || i == 9 { bitV.SetBitAt(uint64(i), true) } @@ -391,7 +391,7 @@ func TestAddMetaData(t *testing.T) { // Add some peers with different states numPeers := 5 - for i := 0; i < numPeers; i++ { + for range numPeers { addPeer(t, p, peers.Connected) } newPeer := p.All()[2] @@ -420,19 +420,19 @@ func TestPeerConnectionStatuses(t *testing.T) { // Add some peers with different states numPeersDisconnected := 11 - for i := 0; i < numPeersDisconnected; i++ { + for range numPeersDisconnected { addPeer(t, p, peers.Disconnected) } numPeersConnecting := 7 - for i := 0; i < numPeersConnecting; i++ { + for range numPeersConnecting { addPeer(t, p, peers.Connecting) } numPeersConnected := 43 - for i := 0; i < numPeersConnected; i++ { + for range numPeersConnected { addPeer(t, p, peers.Connected) } numPeersDisconnecting := 4 - for i := 0; i < numPeersDisconnecting; i++ { + for range numPeersDisconnecting { addPeer(t, p, peers.Disconnecting) } @@ -461,7 +461,7 @@ func TestPeerValidTime(t *testing.T) { }) numPeersConnected := 6 - for i := 0; i < numPeersConnected; i++ { + for range numPeersConnected { addPeer(t, p, peers.Connected) } @@ -564,7 +564,7 @@ func TestPeerIPTracker(t *testing.T) { badIP := "211.227.218.116" var badPeers []peer.ID - for i := 0; i < peers.CollocationLimit+10; i++ { + for i := range peers.CollocationLimit + 10 { port := strconv.Itoa(3000 + i) addr, err := ma.NewMultiaddr("/ip4/" + badIP + "/tcp/" + port) if err != nil { @@ -685,12 +685,12 @@ func TestAtInboundPeerLimit(t *testing.T) { }, }, }) - for i := 0; i < 15; i++ { + for range 15 { // Peer added to peer handler. createPeer(t, p, nil, network.DirOutbound, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED)) } assert.Equal(t, false, p.IsAboveInboundLimit(), "Inbound limit exceeded") - for i := 0; i < 31; i++ { + for range 31 { // Peer added to peer handler. createPeer(t, p, nil, network.DirInbound, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED)) } @@ -710,7 +710,7 @@ func TestPrunePeers(t *testing.T) { }, }, }) - for i := 0; i < 15; i++ { + for range 15 { // Peer added to peer handler. createPeer(t, p, nil, network.DirOutbound, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED)) } @@ -718,7 +718,7 @@ func TestPrunePeers(t *testing.T) { peersToPrune := p.PeersToPrune() assert.Equal(t, 0, len(peersToPrune)) - for i := 0; i < 18; i++ { + for range 18 { // Peer added to peer handler. createPeer(t, p, nil, network.DirInbound, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED)) } @@ -728,7 +728,7 @@ func TestPrunePeers(t *testing.T) { assert.Equal(t, 3, len(peersToPrune)) // Add in more peers. - for i := 0; i < 13; i++ { + for range 13 { // Peer added to peer handler. createPeer(t, p, nil, network.DirInbound, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED)) } @@ -738,7 +738,7 @@ func TestPrunePeers(t *testing.T) { for i, pid := range inboundPeers { modulo := i % 5 // Increment bad scores for peers. - for j := 0; j < modulo; j++ { + for range modulo { p.Scorers().BadResponsesScorer().Increment(pid) } } @@ -772,7 +772,7 @@ func TestPrunePeers_TrustedPeers(t *testing.T) { }, }) - for i := 0; i < 15; i++ { + for range 15 { // Peer added to peer handler. createPeer(t, p, nil, network.DirOutbound, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED)) } @@ -780,7 +780,7 @@ func TestPrunePeers_TrustedPeers(t *testing.T) { peersToPrune := p.PeersToPrune() assert.Equal(t, 0, len(peersToPrune)) - for i := 0; i < 18; i++ { + for range 18 { // Peer added to peer handler. createPeer(t, p, nil, network.DirInbound, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED)) } @@ -790,7 +790,7 @@ func TestPrunePeers_TrustedPeers(t *testing.T) { assert.Equal(t, 3, len(peersToPrune)) // Add in more peers. - for i := 0; i < 13; i++ { + for range 13 { // Peer added to peer handler. createPeer(t, p, nil, network.DirInbound, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED)) } @@ -801,7 +801,7 @@ func TestPrunePeers_TrustedPeers(t *testing.T) { for i, pid := range inboundPeers { modulo := i % 5 // Increment bad scores for peers. - for j := 0; j < modulo; j++ { + for range modulo { p.Scorers().BadResponsesScorer().Increment(pid) } if modulo == 4 { @@ -826,7 +826,7 @@ func TestPrunePeers_TrustedPeers(t *testing.T) { } // Add more peers to check if trusted peers can be pruned after they are deleted from trusted peer set. - for i := 0; i < 9; i++ { + for range 9 { // Peer added to peer handler. createPeer(t, p, nil, network.DirInbound, peerdata.ConnectionState(ethpb.ConnectionState_CONNECTED)) } diff --git a/beacon-chain/p2p/pubsub_filter.go b/beacon-chain/p2p/pubsub_filter.go index ae0cf816a0..cf7af2a721 100644 --- a/beacon-chain/p2p/pubsub_filter.go +++ b/beacon-chain/p2p/pubsub_filter.go @@ -148,8 +148,8 @@ func scanfcheck(input, format string) (int, error) { // This method only checks that the input conforms to the format, the arguments are not used and // therefore we can reuse the same integer pointer. var cnt = strings.Count(format, "%") - var args []interface{} - for i := 0; i < cnt; i++ { + var args []any + for range cnt { args = append(args, &t) } return fmt.Sscanf(input, format, args...) diff --git a/beacon-chain/p2p/pubsub_filter_test.go b/beacon-chain/p2p/pubsub_filter_test.go index 20bb5638ad..06fd63fab0 100644 --- a/beacon-chain/p2p/pubsub_filter_test.go +++ b/beacon-chain/p2p/pubsub_filter_test.go @@ -85,7 +85,7 @@ func TestService_CanSubscribe(t *testing.T) { // Ensure all gossip topic mappings pass validation. for _, topic := range AllTopics() { - formatting := []interface{}{digest} + formatting := []any{digest} // Special case for attestation subnets which have a second formatting placeholder. topics := map[string]bool{ diff --git a/beacon-chain/p2p/pubsub_test.go b/beacon-chain/p2p/pubsub_test.go index 0ddb7f91c4..876d875478 100644 --- a/beacon-chain/p2p/pubsub_test.go +++ b/beacon-chain/p2p/pubsub_test.go @@ -50,7 +50,7 @@ func TestService_PublishToTopicConcurrentMapWrite(t *testing.T) { wg := sync.WaitGroup{} wg.Add(10) - for i := 0; i < 10; i++ { + for i := range 10 { go func(i int) { assert.NoError(t, s.PublishToTopic(ctx, topic, []byte{})) wg.Done() @@ -131,8 +131,8 @@ func TestExtractGossipDigest(t *testing.T) { func BenchmarkExtractGossipDigest(b *testing.B) { topic := fmt.Sprintf(BlockSubnetTopicFormat, []byte{0xb5, 0x30, 0x3f, 0x2a}) + "/" + encoder.ProtocolSuffixSSZSnappy - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err := ExtractGossipDigest(topic) if err != nil { b.Fatal(err) diff --git a/beacon-chain/p2p/rpc_topic_mappings.go b/beacon-chain/p2p/rpc_topic_mappings.go index cb206f3765..313e0a090d 100644 --- a/beacon-chain/p2p/rpc_topic_mappings.go +++ b/beacon-chain/p2p/rpc_topic_mappings.go @@ -129,7 +129,7 @@ const ( // RPCTopicMappings map the base message type to the rpc request. var ( - RPCTopicMappings = map[string]interface{}{ + RPCTopicMappings = map[string]any{ // RPC Status Message RPCStatusTopicV1: new(pb.Status), RPCStatusTopicV2: new(pb.StatusV2), @@ -149,9 +149,9 @@ var ( RPCPingTopicV1: new(primitives.SSZUint64), // RPC Metadata Message - RPCMetaDataTopicV1: new(interface{}), - RPCMetaDataTopicV2: new(interface{}), - RPCMetaDataTopicV3: new(interface{}), + RPCMetaDataTopicV1: new(any), + RPCMetaDataTopicV2: new(any), + RPCMetaDataTopicV3: new(any), // BlobSidecarsByRange v1 Message RPCBlobSidecarsByRangeTopicV1: new(pb.BlobSidecarsByRangeRequest), @@ -162,8 +162,8 @@ var ( // Light client RPCLightClientBootstrapTopicV1: new([fieldparams.RootLength]byte), RPCLightClientUpdatesByRangeTopicV1: new(pb.LightClientUpdatesByRangeRequest), - RPCLightClientFinalityUpdateTopicV1: new(interface{}), - RPCLightClientOptimisticUpdateTopicV1: new(interface{}), + RPCLightClientFinalityUpdateTopicV1: new(any), + RPCLightClientOptimisticUpdateTopicV1: new(any), // DataColumnSidecarsByRange v1 Message RPCDataColumnSidecarsByRangeTopicV1: new(pb.DataColumnSidecarsByRangeRequest), @@ -230,7 +230,7 @@ var ( // VerifyTopicMapping verifies that the topic and its accompanying // message type is correct. -func VerifyTopicMapping(topic string, msg interface{}) error { +func VerifyTopicMapping(topic string, msg any) error { msgType, ok := RPCTopicMappings[topic] if !ok { return errors.New("rpc topic is not registered currently") diff --git a/beacon-chain/p2p/rpc_topic_mappings_test.go b/beacon-chain/p2p/rpc_topic_mappings_test.go index 9b14e93839..435e2fc6e5 100644 --- a/beacon-chain/p2p/rpc_topic_mappings_test.go +++ b/beacon-chain/p2p/rpc_topic_mappings_test.go @@ -19,7 +19,7 @@ func TestVerifyRPCMappings(t *testing.T) { assert.NoError(t, VerifyTopicMapping(RPCStatusTopicV1, &pb.Status{}), "Failed to verify status rpc topic") assert.NotNil(t, VerifyTopicMapping(RPCStatusTopicV1, new([]byte)), "Incorrect message type verified for status rpc topic") - assert.NoError(t, VerifyTopicMapping(RPCMetaDataTopicV1, new(interface{})), "Failed to verify metadata rpc topic") + assert.NoError(t, VerifyTopicMapping(RPCMetaDataTopicV1, new(any)), "Failed to verify metadata rpc topic") assert.NotNil(t, VerifyTopicMapping(RPCStatusTopicV1, new([]byte)), "Incorrect message type verified for metadata rpc topic") assert.NoError(t, VerifyTopicMapping(RPCBlocksByRootTopicV1, new(types.BeaconBlockByRootsReq)), "Failed to verify blocks by root rpc topic") diff --git a/beacon-chain/p2p/sender.go b/beacon-chain/p2p/sender.go index abcc5377c0..6a65ca2640 100644 --- a/beacon-chain/p2p/sender.go +++ b/beacon-chain/p2p/sender.go @@ -18,7 +18,7 @@ import ( // closed for writing. // // When done, the caller must Close or Reset on the stream. -func (s *Service) Send(ctx context.Context, message interface{}, baseTopic string, pid peer.ID) (network.Stream, error) { +func (s *Service) Send(ctx context.Context, message any, baseTopic string, pid peer.ID) (network.Stream, error) { ctx, span := trace.StartSpan(ctx, "p2p.Send") defer span.End() if err := VerifyTopicMapping(baseTopic, message); err != nil { diff --git a/beacon-chain/p2p/service_test.go b/beacon-chain/p2p/service_test.go index f7b180beec..c6d68dd508 100644 --- a/beacon-chain/p2p/service_test.go +++ b/beacon-chain/p2p/service_test.go @@ -369,7 +369,7 @@ func TestService_connectWithPeer(t *testing.T) { ps := peers.NewStatus(t.Context(), &peers.StatusConfig{ ScorerParams: &scorers.Config{}, }) - for i := 0; i < 10; i++ { + for range 10 { ps.Scorers().BadResponsesScorer().Increment("bad") } return ps diff --git a/beacon-chain/p2p/subnets.go b/beacon-chain/p2p/subnets.go index 8629da03c1..d2e448cd05 100644 --- a/beacon-chain/p2p/subnets.go +++ b/beacon-chain/p2p/subnets.go @@ -3,6 +3,7 @@ package p2p import ( "context" "fmt" + "maps" "math" "strings" "sync" @@ -165,9 +166,7 @@ func (s *Service) findPeersWithSubnets( ) ([]*enode.Node, error) { // Copy the defective subnets map to avoid modifying the original map. defectiveSubnets := make(map[uint64]int, len(defectiveSubnetsOrigin)) - for k, v := range defectiveSubnetsOrigin { - defectiveSubnets[k] = v - } + maps.Copy(defectiveSubnets, defectiveSubnetsOrigin) // Create an discovery iterator to find new peers. iterator := s.dv5Listener.RandomNodes() @@ -302,9 +301,7 @@ func (s *Service) dialPeers(ctx context.Context, maxConcurrentDials int, nodes [ continue } - wg.Add(1) - go func() { - defer wg.Done() + wg.Go(func() { if err := s.connectWithPeer(ctx, *info); err != nil { log.WithError(err).WithField("info", info.String()).Debug("Could not connect with peer") return @@ -313,7 +310,7 @@ func (s *Service) dialPeers(ctx context.Context, maxConcurrentDials int, nodes [ mut.Lock() defer mut.Unlock() counter++ - }() + }) } wg.Wait() diff --git a/beacon-chain/p2p/subnets_test.go b/beacon-chain/p2p/subnets_test.go index 8a22e608a4..dc9def338c 100644 --- a/beacon-chain/p2p/subnets_test.go +++ b/beacon-chain/p2p/subnets_test.go @@ -1094,7 +1094,7 @@ func TestFindPeersWithSubnets_received_bad_existing_node(t *testing.T) { if peerData != nil { service.peers.Add(node1_seq2.Record(), peerData.ID, nil, network.DirUnknown) // Mark as bad peer - need enough increments to exceed threshold (6) - for i := 0; i < 10; i++ { + for range 10 { service.peers.Scorers().BadResponsesScorer().Increment(peerData.ID) } } diff --git a/beacon-chain/p2p/testing/fuzz_p2p.go b/beacon-chain/p2p/testing/fuzz_p2p.go index 130fc9f8d7..e0881600b6 100644 --- a/beacon-chain/p2p/testing/fuzz_p2p.go +++ b/beacon-chain/p2p/testing/fuzz_p2p.go @@ -99,7 +99,7 @@ func (*FakeP2P) PublishToTopic(_ context.Context, _ string, _ []byte, _ ...pubsu } // Send -- fake. -func (*FakeP2P) Send(_ context.Context, _ interface{}, _ string, _ peer.ID) (network.Stream, error) { +func (*FakeP2P) Send(_ context.Context, _ any, _ string, _ peer.ID) (network.Stream, error) { return nil, nil } diff --git a/beacon-chain/p2p/testing/p2p.go b/beacon-chain/p2p/testing/p2p.go index 208eb248ff..f31fb31f30 100644 --- a/beacon-chain/p2p/testing/p2p.go +++ b/beacon-chain/p2p/testing/p2p.go @@ -371,7 +371,7 @@ func (p *TestP2P) AddDisconnectionHandler(f func(ctx context.Context, id peer.ID } // Send a message to a specific peer. -func (p *TestP2P) Send(ctx context.Context, msg interface{}, topic string, pid peer.ID) (network.Stream, error) { +func (p *TestP2P) Send(ctx context.Context, msg any, topic string, pid peer.ID) (network.Stream, error) { metadataTopics := map[string]bool{metadataV1Topic: true, metadataV2Topic: true, metadataV3Topic: true} t := topic diff --git a/beacon-chain/p2p/types/types.go b/beacon-chain/p2p/types/types.go index c82d6487ee..9a6e46217f 100644 --- a/beacon-chain/p2p/types/types.go +++ b/beacon-chain/p2p/types/types.go @@ -78,7 +78,7 @@ func (r *BeaconBlockByRootsReq) UnmarshalSSZ(buf []byte) error { } numOfRoots := bufLen / fieldparams.RootLength roots := make([][fieldparams.RootLength]byte, 0, numOfRoots) - for i := 0; i < numOfRoots; i++ { + for i := range numOfRoots { var rt [fieldparams.RootLength]byte copy(rt[:], buf[i*fieldparams.RootLength:(i+1)*fieldparams.RootLength]) roots = append(roots, rt) @@ -175,7 +175,7 @@ func (b *BlobSidecarsByRootReq) UnmarshalSSZ(buf []byte) error { } count := bufLen / blobIdSize *b = make([]*eth.BlobIdentifier, count) - for i := 0; i < count; i++ { + for i := range count { id := ð.BlobIdentifier{} err := id.UnmarshalSSZ(buf[i*blobIdSize : (i+1)*blobIdSize]) if err != nil { @@ -323,7 +323,7 @@ func (d DataColumnsByRootIdentifiers) MarshalSSZTo(dst []byte) ([]byte, error) { // SizeSSZ implements ssz.Marshaler. It returns the size of the serialized representation. func (d DataColumnsByRootIdentifiers) SizeSSZ() int { size := 0 - for i := 0; i < len(d); i++ { + for i := range d { size += 4 size += (d)[i].SizeSSZ() } diff --git a/beacon-chain/p2p/types/types_test.go b/beacon-chain/p2p/types/types_test.go index 945feb1185..681b2de7b1 100644 --- a/beacon-chain/p2p/types/types_test.go +++ b/beacon-chain/p2p/types/types_test.go @@ -16,7 +16,7 @@ import ( func generateBlobIdentifiers(n int) []*eth.BlobIdentifier { r := make([]*eth.BlobIdentifier, n) - for i := 0; i < n; i++ { + for i := range n { r[i] = ð.BlobIdentifier{ BlockRoot: bytesutil.PadTo([]byte{byte(i)}, 32), Index: 0, @@ -111,7 +111,7 @@ func TestBeaconBlockByRootsReq_Limit(t *testing.T) { func TestErrorResponse_Limit(t *testing.T) { errorMessage := make([]byte, 0) // Provide a message of size 6400 bytes. - for i := uint64(0); i < 200; i++ { + for i := range uint64(200) { byteArr := [32]byte{byte(i)} errorMessage = append(errorMessage, byteArr[:]...) } @@ -126,7 +126,7 @@ func TestRoundTripSerialization(t *testing.T) { func roundTripTestBlocksByRootReq(t *testing.T) { fixedRoots := make([][32]byte, 0) - for i := 0; i < 200; i++ { + for i := range 200 { fixedRoots = append(fixedRoots, [32]byte{byte(i)}) } req := BeaconBlockByRootsReq(fixedRoots) @@ -210,7 +210,7 @@ func hexDecodeOrDie(t *testing.T, str string) []byte { // ==================================== func generateDataColumnIdentifiers(n int) []*eth.DataColumnsByRootIdentifier { r := make([]*eth.DataColumnsByRootIdentifier, n) - for i := 0; i < n; i++ { + for i := range n { r[i] = ð.DataColumnsByRootIdentifier{ BlockRoot: bytesutil.PadTo([]byte{byte(i)}, 32), Columns: []uint64{uint64(i)}, diff --git a/beacon-chain/rpc/core/validator.go b/beacon-chain/rpc/core/validator.go index 18e9f29b7d..ef24f61415 100644 --- a/beacon-chain/rpc/core/validator.go +++ b/beacon-chain/rpc/core/validator.go @@ -4,7 +4,7 @@ import ( "bytes" "context" "fmt" - "sort" + "slices" "time" "github.com/OffchainLabs/prysm/v7/api/server" @@ -133,9 +133,7 @@ func (s *Service) ComputeValidatorPerformance( } } // Depending on the indices and public keys given, results might not be sorted. - sort.Slice(validatorIndices, func(i, j int) bool { - return validatorIndices[i] < validatorIndices[j] - }) + slices.Sort(validatorIndices) currentEpoch := coreTime.CurrentEpoch(headState) responseCap = len(validatorIndices) @@ -239,9 +237,7 @@ func (s *Service) IndividualVotes( filteredIndices = append(filteredIndices, index) } } - sort.Slice(filteredIndices, func(i, j int) bool { - return filteredIndices[i] < filteredIndices[j] - }) + slices.Sort(filteredIndices) var v []*precompute.Validator var bal *precompute.Balance diff --git a/beacon-chain/rpc/core/validator_test.go b/beacon-chain/rpc/core/validator_test.go index 849beef301..f017e1bdb0 100644 --- a/beacon-chain/rpc/core/validator_test.go +++ b/beacon-chain/rpc/core/validator_test.go @@ -22,7 +22,7 @@ func TestRegisterSyncSubnetProto(t *testing.T) { k := pubKey(3) committee := make([][]byte, 0) - for i := 0; i < 100; i++ { + for i := range 100 { committee = append(committee, pubKey(uint64(i))) } sCommittee := ðpb.SyncCommittee{ @@ -44,7 +44,7 @@ func TestRegisterSyncSubnet(t *testing.T) { k := pubKey(3) committee := make([][]byte, 0) - for i := 0; i < 100; i++ { + for i := range 100 { committee = append(committee, pubKey(uint64(i))) } sCommittee := ðpb.SyncCommittee{ diff --git a/beacon-chain/rpc/eth/beacon/handlers.go b/beacon-chain/rpc/eth/beacon/handlers.go index 58eff169c2..bfd9352e59 100644 --- a/beacon-chain/rpc/eth/beacon/handlers.go +++ b/beacon-chain/rpc/eth/beacon/handlers.go @@ -304,7 +304,7 @@ func (s *Server) GetBlockAttestationsV2(w http.ResponseWriter, r *http.Request) consensusAtts := blk.Block().Body().Attestations() v := blk.Block().Version() - attStructs := make([]interface{}, len(consensusAtts)) + attStructs := make([]any, len(consensusAtts)) if v >= version.Electra { for index, att := range consensusAtts { a, ok := att.(*eth.AttestationElectra) @@ -887,7 +887,7 @@ func (s *Server) proposeBlock(ctx context.Context, w http.ResponseWriter, blk *e } } -func unmarshalStrict(data []byte, v interface{}) error { +func unmarshalStrict(data []byte, v any) error { dec := json.NewDecoder(bytes.NewReader(data)) dec.DisallowUnknownFields() return dec.Decode(v) diff --git a/beacon-chain/rpc/eth/beacon/handlers_pool.go b/beacon-chain/rpc/eth/beacon/handlers_pool.go index e6adb7b1e1..09d7eeaa23 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_pool.go +++ b/beacon-chain/rpc/eth/beacon/handlers_pool.go @@ -62,7 +62,7 @@ func (s *Server) ListAttestationsV2(w http.ResponseWriter, r *http.Request) { attestations = append(attestations, unaggAtts...) } - filteredAtts := make([]interface{}, 0, len(attestations)) + filteredAtts := make([]any, 0, len(attestations)) for _, att := range attestations { var includeAttestation bool if v >= version.Electra && att.Version() >= version.Electra { @@ -594,10 +594,7 @@ func (s *Server) SubmitBLSToExecutionChanges(w http.ResponseWriter, r *http.Requ // It validates the messages again because they could have been invalidated by being included in blocks since the last validation. // It removes the messages from the slice and modifies it in place. func (s *Server) broadcastBLSBatch(ctx context.Context, ptr *[]*eth.SignedBLSToExecutionChange) { - limit := broadcastBLSChangesRateLimit - if len(*ptr) < broadcastBLSChangesRateLimit { - limit = len(*ptr) - } + limit := min(len(*ptr), broadcastBLSChangesRateLimit) st, err := s.ChainInfoFetcher.HeadStateReadOnly(ctx) if err != nil { log.WithError(err).Error("Could not get head state") @@ -668,9 +665,9 @@ func (s *Server) GetAttesterSlashingsV2(w http.ResponseWriter, r *http.Request) } sourceSlashings := s.SlashingsPool.PendingAttesterSlashings(ctx, headState, true /* return unlimited slashings */) - attStructs := make([]interface{}, 0, len(sourceSlashings)) + attStructs := make([]any, 0, len(sourceSlashings)) for _, slashing := range sourceSlashings { - var attStruct interface{} + var attStruct any if v >= version.Electra && slashing.Version() >= version.Electra { a, ok := slashing.(*eth.AttesterSlashingElectra) if !ok { diff --git a/beacon-chain/rpc/eth/beacon/handlers_state.go b/beacon-chain/rpc/eth/beacon/handlers_state.go index 579cd93dfa..310a513e24 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_state.go +++ b/beacon-chain/rpc/eth/beacon/handlers_state.go @@ -289,7 +289,7 @@ func nextCommitteeIndicesFromState(st state.BeaconState) ([]string, *ethpbalpha. func extractSyncSubcommittees(st state.BeaconState, committee *ethpbalpha.SyncCommittee) ([][]string, error) { subcommitteeCount := params.BeaconConfig().SyncCommitteeSubnetCount subcommittees := make([][]string, subcommitteeCount) - for i := uint64(0); i < subcommitteeCount; i++ { + for i := range subcommitteeCount { pubkeys, err := altair.SyncSubCommitteePubkeys(committee, primitives.CommitteeIndex(i)) if err != nil { return nil, fmt.Errorf( diff --git a/beacon-chain/rpc/eth/beacon/handlers_state_test.go b/beacon-chain/rpc/eth/beacon/handlers_state_test.go index f73d3f9f75..190a4d6c11 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_state_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_state_test.go @@ -384,7 +384,7 @@ func Test_currentCommitteeIndicesFromState(t *testing.T) { vals := st.Validators() wantedCommittee := make([][]byte, params.BeaconConfig().SyncCommitteeSize) wantedIndices := make([]string, len(wantedCommittee)) - for i := 0; i < len(wantedCommittee); i++ { + for i := range wantedCommittee { wantedIndices[i] = strconv.FormatUint(uint64(i), 10) wantedCommittee[i] = vals[i].PublicKey } @@ -415,7 +415,7 @@ func Test_nextCommitteeIndicesFromState(t *testing.T) { vals := st.Validators() wantedCommittee := make([][]byte, params.BeaconConfig().SyncCommitteeSize) wantedIndices := make([]string, len(wantedCommittee)) - for i := 0; i < len(wantedCommittee); i++ { + for i := range wantedCommittee { wantedIndices[i] = strconv.FormatUint(uint64(i), 10) wantedCommittee[i] = vals[i].PublicKey } @@ -445,7 +445,7 @@ func Test_extractSyncSubcommittees(t *testing.T) { st, _ := util.DeterministicGenesisStateAltair(t, params.BeaconConfig().SyncCommitteeSize) vals := st.Validators() syncCommittee := make([][]byte, params.BeaconConfig().SyncCommitteeSize) - for i := 0; i < len(syncCommittee); i++ { + for i := range syncCommittee { syncCommittee[i] = vals[i].PublicKey } require.NoError(t, st.SetCurrentSyncCommittee(ðpbalpha.SyncCommittee{ @@ -460,10 +460,7 @@ func Test_extractSyncSubcommittees(t *testing.T) { for i := uint64(0); i < commSize; i += subCommSize { sub := make([]string, 0) start := i - end := i + subCommSize - if end > commSize { - end = commSize - } + end := min(i+subCommSize, commSize) for j := start; j < end; j++ { sub = append(sub, strconv.FormatUint(j, 10)) } @@ -498,7 +495,7 @@ func TestGetSyncCommittees(t *testing.T) { st, _ := util.DeterministicGenesisStateAltair(t, params.BeaconConfig().SyncCommitteeSize) syncCommittee := make([][]byte, params.BeaconConfig().SyncCommitteeSize) vals := st.Validators() - for i := 0; i < len(syncCommittee); i++ { + for i := range syncCommittee { syncCommittee[i] = vals[i].PublicKey } require.NoError(t, st.SetCurrentSyncCommittee(ðpbalpha.SyncCommittee{ @@ -661,7 +658,7 @@ func TestGetSyncCommittees_Future(t *testing.T) { st, _ := util.DeterministicGenesisStateAltair(t, params.BeaconConfig().SyncCommitteeSize) syncCommittee := make([][]byte, params.BeaconConfig().SyncCommitteeSize) vals := st.Validators() - for i := 0; i < len(syncCommittee); i++ { + for i := range syncCommittee { syncCommittee[i] = vals[i].PublicKey } require.NoError(t, st.SetNextSyncCommittee(ðpbalpha.SyncCommittee{ diff --git a/beacon-chain/rpc/eth/beacon/handlers_test.go b/beacon-chain/rpc/eth/beacon/handlers_test.go index c11ec05807..3711deffb9 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_test.go @@ -61,7 +61,7 @@ func fillDBTestBlocks(ctx context.Context, t *testing.T, beaconDB db.Database) ( count := primitives.Slot(100) blks := make([]interfaces.ReadOnlySignedBeaconBlock, count) blkContainers := make([]*eth.BeaconBlockContainer, count) - for i := primitives.Slot(0); i < count; i++ { + for i := range count { b := util.NewBeaconBlock() b.Block.Slot = i b.Block.ParentRoot = bytesutil.PadTo([]byte{uint8(i)}, 32) @@ -3765,7 +3765,7 @@ func Test_validateBlobs(t *testing.T) { fuluBlobs := make([][]byte, blobCount) var kzgBlobs []kzg.Blob - for i := 0; i < blobCount; i++ { + for i := range blobCount { blob := util.GetRandBlob(int64(i)) fuluBlobs[i] = blob[:] var kzgBlob kzg.Blob @@ -3785,11 +3785,11 @@ func Test_validateBlobs(t *testing.T) { // Generate cell proofs for the blobs (flattened format like execution client) numberOfColumns := params.BeaconConfig().NumberOfColumns cellProofs := make([][]byte, uint64(blobCount)*numberOfColumns) - for blobIdx := 0; blobIdx < blobCount; blobIdx++ { + for blobIdx := range blobCount { _, proofs, err := kzg.ComputeCellsAndKZGProofs(&kzgBlobs[blobIdx]) require.NoError(t, err) - for colIdx := uint64(0); colIdx < numberOfColumns; colIdx++ { + for colIdx := range numberOfColumns { cellProofIdx := uint64(blobIdx)*numberOfColumns + colIdx cellProofs[cellProofIdx] = proofs[colIdx][:] } @@ -3808,7 +3808,7 @@ func Test_validateBlobs(t *testing.T) { blobCount := 2 commitments := make([][]byte, blobCount) fuluBlobs := make([][]byte, blobCount) - for i := 0; i < blobCount; i++ { + for i := range blobCount { blob := util.GetRandBlob(int64(i)) fuluBlobs[i] = blob[:] @@ -3977,7 +3977,7 @@ func TestGetPendingConsolidations(t *testing.T) { consolidationSize := (ð.PendingConsolidation{}).SizeSSZ() require.Equal(t, len(responseBytes), consolidationSize*len(cs)) - for i := 0; i < len(cs); i++ { + for i := range cs { start := i * consolidationSize end := start + consolidationSize @@ -4103,7 +4103,7 @@ func TestGetPendingDeposits(t *testing.T) { validators := st.Validators() dummySig := make([]byte, 96) - for j := 0; j < 96; j++ { + for j := range 96 { dummySig[j] = byte(j) } deps := make([]*eth.PendingDeposit, 10) @@ -4170,7 +4170,7 @@ func TestGetPendingDeposits(t *testing.T) { depositSize := (ð.PendingDeposit{}).SizeSSZ() require.Equal(t, len(responseBytes), depositSize*len(deps)) - for i := 0; i < len(deps); i++ { + for i := range deps { start := i * depositSize end := start + depositSize @@ -4357,7 +4357,7 @@ func TestGetPendingPartialWithdrawals(t *testing.T) { withdrawalSize := (ð.PendingPartialWithdrawal{}).SizeSSZ() require.Equal(t, len(responseBytes), withdrawalSize*len(withdrawals)) - for i := 0; i < len(withdrawals); i++ { + for i := range withdrawals { start := i * withdrawalSize end := start + withdrawalSize @@ -4487,7 +4487,7 @@ func TestGetProposerLookahead(t *testing.T) { st, _ := util.DeterministicGenesisStateFulu(t, uint64(numValidators)) lookaheadSize := int(params.BeaconConfig().MinSeedLookahead+1) * int(params.BeaconConfig().SlotsPerEpoch) lookahead := make([]primitives.ValidatorIndex, lookaheadSize) - for i := 0; i < lookaheadSize; i++ { + for i := range lookaheadSize { lookahead[i] = primitives.ValidatorIndex(i % numValidators) // Cycle through validators } @@ -4525,7 +4525,7 @@ func TestGetProposerLookahead(t *testing.T) { // Verify the data require.Equal(t, lookaheadSize, len(resp.Data)) - for i := 0; i < lookaheadSize; i++ { + for i := range lookaheadSize { expectedIdx := strconv.FormatUint(uint64(i%numValidators), 10) require.Equal(t, expectedIdx, resp.Data[i]) } @@ -4546,7 +4546,7 @@ func TestGetProposerLookahead(t *testing.T) { require.Equal(t, len(responseBytes), validatorIndexSize*lookaheadSize) recoveredIndices := make([]primitives.ValidatorIndex, lookaheadSize) - for i := 0; i < lookaheadSize; i++ { + for i := range lookaheadSize { start := i * validatorIndexSize end := start + validatorIndexSize diff --git a/beacon-chain/rpc/eth/beacon/handlers_validators_test.go b/beacon-chain/rpc/eth/beacon/handlers_validators_test.go index 8ff8389b49..a0263afe7c 100644 --- a/beacon-chain/rpc/eth/beacon/handlers_validators_test.go +++ b/beacon-chain/rpc/eth/beacon/handlers_validators_test.go @@ -842,7 +842,7 @@ func TestGetValidatorBalances(t *testing.T) { count := uint64(4) st, _ = util.DeterministicGenesisState(t, count) balances := make([]uint64, count) - for i := uint64(0); i < count; i++ { + for i := range count { balances[i] = i } require.NoError(t, st.SetBalances(balances)) @@ -1189,7 +1189,7 @@ func TestGetValidatorIdentities(t *testing.T) { count := uint64(4) genesisState, _ := util.DeterministicGenesisState(t, count) st := genesisState.ToProtoUnsafe().(*eth.BeaconState) - for i := uint64(0); i < count; i++ { + for i := range count { st.Validators[i].ActivationEpoch = primitives.Epoch(i) } @@ -1218,7 +1218,7 @@ func TestGetValidatorIdentities(t *testing.T) { resp := &structs.GetValidatorIdentitiesResponse{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), resp)) require.Equal(t, 4, len(resp.Data)) - for i := uint64(0); i < count; i++ { + for i := range count { assert.Equal(t, fmt.Sprintf("%d", i), resp.Data[i].Index) assert.DeepEqual(t, hexutil.Encode(st.Validators[i].PublicKey), resp.Data[i].Pubkey) assert.Equal(t, fmt.Sprintf("%d", st.Validators[i].ActivationEpoch), resp.Data[i].ActivationEpoch) diff --git a/beacon-chain/rpc/eth/config/handlers.go b/beacon-chain/rpc/eth/config/handlers.go index 44185dd94c..6ef4da6e17 100644 --- a/beacon-chain/rpc/eth/config/handlers.go +++ b/beacon-chain/rpc/eth/config/handlers.go @@ -72,7 +72,7 @@ func GetSpec(w http.ResponseWriter, r *http.Request) { httputil.WriteJson(w, &structs.GetSpecResponse{Data: data}) } -func convertValueForJSON(v reflect.Value, tag string) interface{} { +func convertValueForJSON(v reflect.Value, tag string) any { // Unwrap pointers / interfaces for v.Kind() == reflect.Interface || v.Kind() == reflect.Ptr { if v.IsNil() { @@ -109,8 +109,8 @@ func convertValueForJSON(v reflect.Value, tag string) interface{} { } // Generic slice/array handling n := v.Len() - out := make([]interface{}, n) - for i := 0; i < n; i++ { + out := make([]any, n) + for i := range n { out[i] = convertValueForJSON(v.Index(i), tag) } return out @@ -118,7 +118,7 @@ func convertValueForJSON(v reflect.Value, tag string) interface{} { // ===== Struct ===== case reflect.Struct: t := v.Type() - m := make(map[string]interface{}, v.NumField()) + m := make(map[string]any, v.NumField()) for i := 0; i < v.NumField(); i++ { f := t.Field(i) if !v.Field(i).CanInterface() { @@ -158,11 +158,11 @@ func convertValueForJSON(v reflect.Value, tag string) interface{} { } } -func prepareConfigSpec() (map[string]interface{}, error) { - data := make(map[string]interface{}) +func prepareConfigSpec() (map[string]any, error) { + data := make(map[string]any) config := *params.BeaconConfig() - t := reflect.TypeOf(config) + t := reflect.TypeFor[params.BeaconChainConfig]() v := reflect.ValueOf(config) for i := 0; i < t.NumField(); i++ { diff --git a/beacon-chain/rpc/eth/config/handlers_test.go b/beacon-chain/rpc/eth/config/handlers_test.go index 04d11cf9dd..1c7386f7b3 100644 --- a/beacon-chain/rpc/eth/config/handlers_test.go +++ b/beacon-chain/rpc/eth/config/handlers_test.go @@ -204,7 +204,7 @@ func TestGetSpec(t *testing.T) { require.Equal(t, http.StatusOK, writer.Code) resp := structs.GetSpecResponse{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp)) - data, ok := resp.Data.(map[string]interface{}) + data, ok := resp.Data.(map[string]any) require.Equal(t, true, ok) assert.Equal(t, 176, len(data)) for k, v := range data { @@ -583,7 +583,7 @@ func TestGetSpec(t *testing.T) { case "SYNC_MESSAGE_DUE_BPS": assert.Equal(t, "104", v) case "BLOB_SCHEDULE": - blobSchedule, ok := v.([]interface{}) + blobSchedule, ok := v.([]any) assert.Equal(t, true, ok) assert.Equal(t, 2, len(blobSchedule)) default: @@ -664,7 +664,7 @@ func TestGetSpec_BlobSchedule(t *testing.T) { require.Equal(t, http.StatusOK, writer.Code) resp := structs.GetSpecResponse{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp)) - data, ok := resp.Data.(map[string]interface{}) + data, ok := resp.Data.(map[string]any) require.Equal(t, true, ok) // Verify BLOB_SCHEDULE is present and properly formatted @@ -673,13 +673,13 @@ func TestGetSpec_BlobSchedule(t *testing.T) { // Verify it's a slice of maps (actual JSON object, not string) // The JSON unmarshaling converts it to []interface{} with map[string]interface{} entries - blobScheduleSlice, ok := blobScheduleValue.([]interface{}) + blobScheduleSlice, ok := blobScheduleValue.([]any) require.Equal(t, true, ok) // Convert to generic interface for easier testing - var blobSchedule []map[string]interface{} + var blobSchedule []map[string]any for _, entry := range blobScheduleSlice { - entryMap, ok := entry.(map[string]interface{}) + entryMap, ok := entry.(map[string]any) require.Equal(t, true, ok) blobSchedule = append(blobSchedule, entryMap) } @@ -735,7 +735,7 @@ func TestGetSpec_BlobSchedule_NotFulu(t *testing.T) { require.Equal(t, http.StatusOK, writer.Code) resp := structs.GetSpecResponse{} require.NoError(t, json.Unmarshal(writer.Body.Bytes(), &resp)) - data, ok := resp.Data.(map[string]interface{}) + data, ok := resp.Data.(map[string]any) require.Equal(t, true, ok) _, exists := data["BLOB_SCHEDULE"] diff --git a/beacon-chain/rpc/eth/debug/handlers.go b/beacon-chain/rpc/eth/debug/handlers.go index 7e279bdfaf..a0c20c4a83 100644 --- a/beacon-chain/rpc/eth/debug/handlers.go +++ b/beacon-chain/rpc/eth/debug/handlers.go @@ -67,7 +67,7 @@ func (s *Server) getBeaconStateV2(ctx context.Context, w http.ResponseWriter, id return } isFinalized := s.FinalizationFetcher.IsFinalized(ctx, blockRoot) - var respSt interface{} + var respSt any switch st.Version() { case version.Phase0: diff --git a/beacon-chain/rpc/eth/events/events_test.go b/beacon-chain/rpc/eth/events/events_test.go index 329140a4e3..f34181f003 100644 --- a/beacon-chain/rpc/eth/events/events_test.go +++ b/beacon-chain/rpc/eth/events/events_test.go @@ -677,7 +677,7 @@ func TestFillEventData(t *testing.T) { func setActiveValidators(t *testing.T, st state.BeaconState, count int) { balances := make([]uint64, count) validators := make([]*eth.Validator, 0, count) - for i := 0; i < count; i++ { + for i := range count { pubKey := make([]byte, params.BeaconConfig().BLSPubkeyLength) binary.LittleEndian.PutUint64(pubKey, uint64(i)) balances[i] = uint64(i) diff --git a/beacon-chain/rpc/eth/light-client/handlers_test.go b/beacon-chain/rpc/eth/light-client/handlers_test.go index 6e87cfb00f..bc36785542 100644 --- a/beacon-chain/rpc/eth/light-client/handlers_test.go +++ b/beacon-chain/rpc/eth/light-client/handlers_test.go @@ -493,7 +493,7 @@ func TestLightClientHandler_GetLightClientByRange(t *testing.T) { updatePeriod := slot.Div(uint64(config.EpochsPerSyncCommitteePeriod)).Div(uint64(config.SlotsPerEpoch)) - for i := 0; i < 3; i++ { + for i := range 3 { updates[i], err = createUpdate(t, version.Altair) require.NoError(t, err) @@ -551,7 +551,7 @@ func TestLightClientHandler_GetLightClientByRange(t *testing.T) { updatePeriod := slot.Div(uint64(config.EpochsPerSyncCommitteePeriod)).Div(uint64(config.SlotsPerEpoch)) - for i := 0; i < 3; i++ { + for i := range 3 { updates[i], err = createUpdate(t, version.Altair) require.NoError(t, err) @@ -633,7 +633,7 @@ func TestLightClientHandler_GetLightClientByRange(t *testing.T) { updatePeriod := slot.Div(uint64(config.EpochsPerSyncCommitteePeriod)).Div(uint64(config.SlotsPerEpoch)) - for i := 0; i < 3; i++ { + for i := range 3 { if i == 1 { // skip this update updatePeriod++ continue @@ -687,7 +687,7 @@ func TestLightClientHandler_GetLightClientByRange(t *testing.T) { updatePeriod := slot.Div(uint64(config.EpochsPerSyncCommitteePeriod)).Div(uint64(config.SlotsPerEpoch)) - for i := 0; i < 3; i++ { + for i := range 3 { if i == 0 { // skip this update updatePeriod++ continue @@ -910,14 +910,14 @@ func createUpdate(t *testing.T, v int) (interfaces.LightClientUpdate, error) { var err error sampleRoot := make([]byte, 32) - for i := 0; i < 32; i++ { + for i := range 32 { sampleRoot[i] = byte(i) } sampleExecutionBranch := make([][]byte, fieldparams.ExecutionBranchDepth) - for i := 0; i < 4; i++ { + for i := range 4 { sampleExecutionBranch[i] = make([]byte, 32) - for j := 0; j < 32; j++ { + for j := range 32 { sampleExecutionBranch[i][j] = byte(i + j) } } diff --git a/beacon-chain/rpc/eth/node/handlers_peers_test.go b/beacon-chain/rpc/eth/node/handlers_peers_test.go index a9b89c0ad1..036ef20ea3 100644 --- a/beacon-chain/rpc/eth/node/handlers_peers_test.go +++ b/beacon-chain/rpc/eth/node/handlers_peers_test.go @@ -341,8 +341,7 @@ func BenchmarkGetPeers(b *testing.B) { writer.Body = &bytes.Buffer{} s.GetPeers(writer, request) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { s.GetPeers(writer, request) } } diff --git a/beacon-chain/rpc/eth/rewards/handlers.go b/beacon-chain/rpc/eth/rewards/handlers.go index a30e843553..87be7a5a60 100644 --- a/beacon-chain/rpc/eth/rewards/handlers.go +++ b/beacon-chain/rpc/eth/rewards/handlers.go @@ -436,7 +436,7 @@ func requestedValIndices(w http.ResponseWriter, r *http.Request, st state.Beacon } if len(valIndices) == 0 { valIndices = make([]primitives.ValidatorIndex, len(allVals)) - for i := 0; i < len(allVals); i++ { + for i := range allVals { valIndices[i] = primitives.ValidatorIndex(i) } } diff --git a/beacon-chain/rpc/eth/rewards/handlers_test.go b/beacon-chain/rpc/eth/rewards/handlers_test.go index 55d76f8c1a..e2d05e939b 100644 --- a/beacon-chain/rpc/eth/rewards/handlers_test.go +++ b/beacon-chain/rpc/eth/rewards/handlers_test.go @@ -85,7 +85,7 @@ func BlockRewardTestSetup(t *testing.T, ver int) (state.BeaconState, interfaces. validators := make([]*eth.Validator, 0, valCount) balances := make([]uint64, 0, valCount) secretKeys := make([]bls.SecretKey, 0, valCount) - for i := 0; i < valCount; i++ { + for range valCount { blsKey, err := bls.RandKey() require.NoError(t, err) secretKeys = append(secretKeys, blsKey) @@ -475,7 +475,7 @@ func TestAttestationRewards(t *testing.T) { validators := make([]*eth.Validator, 0, valCount) balances := make([]uint64, 0, valCount) secretKeys := make([]bls.SecretKey, 0, valCount) - for i := 0; i < valCount; i++ { + for i := range valCount { blsKey, err := bls.RandKey() require.NoError(t, err) secretKeys = append(secretKeys, blsKey) @@ -778,7 +778,7 @@ func TestSyncCommiteeRewards(t *testing.T) { require.NoError(t, st.SetSlot(params.BeaconConfig().SlotsPerEpoch-1)) validators := make([]*eth.Validator, 0, valCount) secretKeys := make([]bls.SecretKey, 0, valCount) - for i := 0; i < valCount; i++ { + for range valCount { blsKey, err := bls.RandKey() require.NoError(t, err) secretKeys = append(secretKeys, blsKey) @@ -792,7 +792,7 @@ func TestSyncCommiteeRewards(t *testing.T) { require.NoError(t, st.SetValidators(validators)) require.NoError(t, st.SetInactivityScores(make([]uint64, len(validators)))) syncCommitteePubkeys := make([][]byte, fieldparams.SyncCommitteeLength) - for i := 0; i < fieldparams.SyncCommitteeLength; i++ { + for i := range fieldparams.SyncCommitteeLength { syncCommitteePubkeys[i] = secretKeys[i].PublicKey().Marshal() } aggPubkey, err := bls.AggregatePublicKeys(syncCommitteePubkeys) @@ -807,7 +807,7 @@ func TestSyncCommiteeRewards(t *testing.T) { b.Block.ProposerIndex = proposerIndex scBits := bitfield.NewBitvector512() // last 10 sync committee members didn't perform their duty - for i := uint64(0); i < fieldparams.SyncCommitteeLength-10; i++ { + for i := range uint64(fieldparams.SyncCommitteeLength - 10) { scBits.SetBitAt(i, true) } domain, err := signing.Domain(st.Fork(), 0, params.BeaconConfig().DomainSyncCommittee, st.GenesisValidatorsRoot()) @@ -845,7 +845,7 @@ func TestSyncCommiteeRewards(t *testing.T) { t.Run("ok - filtered vals", func(t *testing.T) { balances := make([]uint64, 0, valCount) - for i := 0; i < valCount; i++ { + for range valCount { balances = append(balances, params.BeaconConfig().MaxEffectiveBalance) } require.NoError(t, st.SetBalances(balances)) @@ -878,7 +878,7 @@ func TestSyncCommiteeRewards(t *testing.T) { }) t.Run("ok - all vals", func(t *testing.T) { balances := make([]uint64, 0, valCount) - for i := 0; i < valCount; i++ { + for range valCount { balances = append(balances, params.BeaconConfig().MaxEffectiveBalance) } require.NoError(t, st.SetBalances(balances)) @@ -903,7 +903,7 @@ func TestSyncCommiteeRewards(t *testing.T) { }) t.Run("ok - validator outside sync committee is ignored", func(t *testing.T) { balances := make([]uint64, 0, valCount) - for i := 0; i < valCount; i++ { + for range valCount { balances = append(balances, params.BeaconConfig().MaxEffectiveBalance) } require.NoError(t, st.SetBalances(balances)) @@ -934,7 +934,7 @@ func TestSyncCommiteeRewards(t *testing.T) { }) t.Run("ok - proposer reward is deducted", func(t *testing.T) { balances := make([]uint64, 0, valCount) - for i := 0; i < valCount; i++ { + for range valCount { balances = append(balances, params.BeaconConfig().MaxEffectiveBalance) } require.NoError(t, st.SetBalances(balances)) @@ -965,7 +965,7 @@ func TestSyncCommiteeRewards(t *testing.T) { }) t.Run("invalid validator index/pubkey", func(t *testing.T) { balances := make([]uint64, 0, valCount) - for i := 0; i < valCount; i++ { + for range valCount { balances = append(balances, params.BeaconConfig().MaxEffectiveBalance) } require.NoError(t, st.SetBalances(balances)) @@ -989,7 +989,7 @@ func TestSyncCommiteeRewards(t *testing.T) { }) t.Run("unknown validator pubkey", func(t *testing.T) { balances := make([]uint64, 0, valCount) - for i := 0; i < valCount; i++ { + for range valCount { balances = append(balances, params.BeaconConfig().MaxEffectiveBalance) } require.NoError(t, st.SetBalances(balances)) @@ -1016,7 +1016,7 @@ func TestSyncCommiteeRewards(t *testing.T) { }) t.Run("validator index too large", func(t *testing.T) { balances := make([]uint64, 0, valCount) - for i := 0; i < valCount; i++ { + for range valCount { balances = append(balances, params.BeaconConfig().MaxEffectiveBalance) } require.NoError(t, st.SetBalances(balances)) @@ -1040,7 +1040,7 @@ func TestSyncCommiteeRewards(t *testing.T) { }) t.Run("phase 0", func(t *testing.T) { balances := make([]uint64, 0, valCount) - for i := 0; i < valCount; i++ { + for range valCount { balances = append(balances, params.BeaconConfig().MaxEffectiveBalance) } require.NoError(t, st.SetBalances(balances)) diff --git a/beacon-chain/rpc/eth/validator/handlers.go b/beacon-chain/rpc/eth/validator/handlers.go index 24b1924891..2377a5fe9e 100644 --- a/beacon-chain/rpc/eth/validator/handlers.go +++ b/beacon-chain/rpc/eth/validator/handlers.go @@ -1168,10 +1168,7 @@ func (s *Server) GetSyncCommitteeDuties(w http.ResponseWriter, r *http.Request) return } - startingEpoch := requestedEpoch - if startingEpoch > currentEpoch { - startingEpoch = currentEpoch - } + startingEpoch := min(requestedEpoch, currentEpoch) slot, err := slots.EpochStart(startingEpoch) if err != nil { httputil.HandleError(w, "Could not get sync committee slot: "+err.Error(), http.StatusInternalServerError) diff --git a/beacon-chain/rpc/eth/validator/handlers_test.go b/beacon-chain/rpc/eth/validator/handlers_test.go index 56ec6b0b04..51498b0c62 100644 --- a/beacon-chain/rpc/eth/validator/handlers_test.go +++ b/beacon-chain/rpc/eth/validator/handlers_test.go @@ -786,7 +786,7 @@ func TestSubmitSyncCommitteeSubscription(t *testing.T) { require.NoError(t, bs.SetBlockRoots(roots)) pubkeys := make([][]byte, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubkeys[i] = deposits[i].Data.PublicKey } @@ -958,7 +958,7 @@ func TestSubmitBeaconCommitteeSubscription(t *testing.T) { require.NoError(t, bs.SetBlockRoots(roots)) pubkeys := make([][]byte, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubkeys[i] = deposits[i].Data.PublicKey } @@ -1979,7 +1979,7 @@ func TestGetAttesterDuties(t *testing.T) { require.NoError(t, bs.SetValidators(vals)) pubKeys := make([][]byte, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = deposits[i].Data.PublicKey } @@ -2242,7 +2242,7 @@ func TestGetProposerDuties(t *testing.T) { roots[31] = []byte("next_epoch_dependent_root") pubKeys := make([][]byte, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = deposits[i].Data.PublicKey } @@ -2441,7 +2441,7 @@ func TestGetSyncCommitteeDuties(t *testing.T) { require.NoError(t, st.SetGenesisTime(genesisTime)) vals := st.Validators() currCommittee := ðpbalpha.SyncCommittee{} - for i := 0; i < 5; i++ { + for i := range 5 { currCommittee.Pubkeys = append(currCommittee.Pubkeys, vals[i].PublicKey) currCommittee.AggregatePubkey = make([]byte, 48) } @@ -2633,7 +2633,7 @@ func TestGetSyncCommitteeDuties(t *testing.T) { } require.NoError(t, newSyncPeriodSt.SetCurrentSyncCommittee(currCommittee)) nextCommittee := ðpbalpha.SyncCommittee{} - for i := 0; i < 5; i++ { + for i := range 5 { nextCommittee.Pubkeys = append(nextCommittee.Pubkeys, vals[i].PublicKey) nextCommittee.AggregatePubkey = make([]byte, 48) @@ -2949,14 +2949,14 @@ func BenchmarkServer_PrepareBeaconProposer(b *testing.B) { } f := bytesutil.PadTo([]byte{0xFF, 0x01, 0xFF, 0x01, 0xFF, 0x01, 0xFF, 0x01, 0xFF, 0xFF, 0x01, 0xFF, 0x01, 0xFF, 0x01, 0xFF, 0x01, 0xFF}, fieldparams.FeeRecipientLength) recipients := make([]*structs.FeeRecipient, 0) - for i := 0; i < 10000; i++ { + for i := range 10000 { recipients = append(recipients, &structs.FeeRecipient{FeeRecipient: hexutil.Encode(f), ValidatorIndex: fmt.Sprint(i)}) } byt, err := json.Marshal(recipients) require.NoError(b, err) var body bytes.Buffer - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err = body.WriteString(string(byt)) require.NoError(b, err) url := "http://example.com/eth/v1/validator/prepare_beacon_proposer" diff --git a/beacon-chain/rpc/lookup/blocker_test.go b/beacon-chain/rpc/lookup/blocker_test.go index 2da59cc282..2b1f980a6e 100644 --- a/beacon-chain/rpc/lookup/blocker_test.go +++ b/beacon-chain/rpc/lookup/blocker_test.go @@ -96,7 +96,7 @@ func TestGetBlock(t *testing.T) { }, { name: "non canonical", - blockID: []byte(fmt.Sprintf("%d", nextSlot)), + blockID: fmt.Appendf(nil, "%d", nextSlot), want: nil, }, { @@ -761,7 +761,7 @@ func TestBlobs_CommitmentOrdering(t *testing.T) { t.Run("request non-existent hash", func(t *testing.T) { // Create a fake versioned hash fakeHash := make([]byte, 32) - for i := 0; i < 32; i++ { + for i := range 32 { fakeHash[i] = 0xFF } @@ -780,7 +780,7 @@ func TestBlobs_CommitmentOrdering(t *testing.T) { // Create two fake versioned hashes fakeHash1 := make([]byte, 32) fakeHash2 := make([]byte, 32) - for i := 0; i < 32; i++ { + for i := range 32 { fakeHash1[i] = 0xAA fakeHash2[i] = 0xBB } diff --git a/beacon-chain/rpc/prysm/node/handlers_test.go b/beacon-chain/rpc/prysm/node/handlers_test.go index 7e6875438a..42fa7c2942 100644 --- a/beacon-chain/rpc/prysm/node/handlers_test.go +++ b/beacon-chain/rpc/prysm/node/handlers_test.go @@ -91,7 +91,7 @@ func TestListTrustedPeer(t *testing.T) { // assert number of trusted peer is right assert.Equal(t, 9, len(peers)) - for i := 0; i < 9; i++ { + for i := range 9 { pid, err := peer.Decode(peers[i].PeerId) require.NoError(t, err) if pid == ids[8] { diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/assignments_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/assignments_test.go index a114919742..f7a56cea21 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/assignments_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/assignments_test.go @@ -49,7 +49,7 @@ func TestServer_ListAssignments_Pagination_InputOutOfRange(t *testing.T) { ctx := t.Context() count := 100 validators := make([]*ethpb.Validator, 0, count) - for i := 0; i < count; i++ { + for i := range count { pubKey := make([]byte, params.BeaconConfig().BLSPubkeyLength) withdrawalCred := make([]byte, 32) binary.LittleEndian.PutUint64(pubKey, uint64(i)) @@ -114,7 +114,7 @@ func TestServer_ListAssignments_Pagination_DefaultPageSize_NoArchive(t *testing. ctx := t.Context() count := 500 validators := make([]*ethpb.Validator, 0, count) - for i := 0; i < count; i++ { + for i := range count { pubKey := make([]byte, params.BeaconConfig().BLSPubkeyLength) withdrawalCred := make([]byte, 32) binary.LittleEndian.PutUint64(pubKey, uint64(i)) @@ -200,7 +200,7 @@ func TestServer_ListAssignments_FilterPubkeysIndices_NoPagination(t *testing.T) count := 100 validators := make([]*ethpb.Validator, 0, count) withdrawCreds := make([]byte, 32) - for i := 0; i < count; i++ { + for i := range count { pubKey := make([]byte, params.BeaconConfig().BLSPubkeyLength) binary.LittleEndian.PutUint64(pubKey, uint64(i)) val := ðpb.Validator{ @@ -272,7 +272,7 @@ func TestServer_ListAssignments_CanFilterPubkeysIndices_WithPagination(t *testin count := 100 validators := make([]*ethpb.Validator, 0, count) withdrawCred := make([]byte, 32) - for i := 0; i < count; i++ { + for i := range count { pubKey := make([]byte, params.BeaconConfig().BLSPubkeyLength) binary.LittleEndian.PutUint64(pubKey, uint64(i)) val := ðpb.Validator{ diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations.go index 1290130067..38e31626f5 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations.go @@ -446,7 +446,7 @@ func blockIndexedAttestations[T ethpb.IndexedAtt]( err, ) } - for i := 0; i < len(atts); i++ { + for i := range atts { att := atts[i] committee, err := helpers.BeaconCommitteeFromState(ctx, attState, att.GetData().Slot, att.GetData().CommitteeIndex) if err != nil { diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations_test.go index fe72b7d89a..b330d4d796 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/attestations_test.go @@ -111,7 +111,7 @@ func TestServer_ListAttestations_NoPagination(t *testing.T) { count := primitives.Slot(8) atts := make([]*ethpb.Attestation, 0, count) - for i := primitives.Slot(0); i < count; i++ { + for i := range count { blockExample := util.NewBeaconBlock() blockExample.Block.Body.Attestations = []*ethpb.Attestation{ { @@ -264,7 +264,7 @@ func TestServer_ListAttestations_Pagination_CustomPageParameters(t *testing.T) { count := params.BeaconConfig().SlotsPerEpoch * 4 atts := make([]ethpb.Att, 0, count) for i := primitives.Slot(0); i < params.BeaconConfig().SlotsPerEpoch; i++ { - for s := primitives.CommitteeIndex(0); s < 4; s++ { + for s := range primitives.CommitteeIndex(4) { blockExample := util.NewBeaconBlock() blockExample.Block.Slot = i blockExample.Block.Body.Attestations = []*ethpb.Attestation{ @@ -370,7 +370,7 @@ func TestServer_ListAttestations_Pagination_OutOfRange(t *testing.T) { util.NewBeaconBlock() count := primitives.Slot(1) atts := make([]*ethpb.Attestation, 0, count) - for i := primitives.Slot(0); i < count; i++ { + for i := range count { blockExample := util.HydrateSignedBeaconBlock(ðpb.SignedBeaconBlock{ Block: ðpb.BeaconBlock{ Body: ðpb.BeaconBlockBody{ @@ -426,7 +426,7 @@ func TestServer_ListAttestations_Pagination_DefaultPageSize(t *testing.T) { count := primitives.Slot(params.BeaconConfig().DefaultPageSize) atts := make([]*ethpb.Attestation, 0, count) - for i := primitives.Slot(0); i < count; i++ { + for i := range count { blockExample := util.NewBeaconBlock() blockExample.Block.Body.Attestations = []*ethpb.Attestation{ { @@ -520,7 +520,7 @@ func TestServer_mapAttestationToTargetRoot(t *testing.T) { targetRoot1 := bytesutil.ToBytes32([]byte("root1")) targetRoot2 := bytesutil.ToBytes32([]byte("root2")) - for i := primitives.Slot(0); i < count; i++ { + for i := range count { var targetRoot [32]byte if i%2 == 0 { targetRoot = targetRoot1 @@ -556,7 +556,7 @@ func TestServer_ListIndexedAttestations_GenesisEpoch(t *testing.T) { atts := make([]*ethpb.Attestation, 0, count) atts2 := make([]*ethpb.Attestation, 0, count) - for i := primitives.Slot(0); i < count; i++ { + for i := range count { var targetRoot [32]byte if i%2 == 0 { targetRoot = targetRoot1 @@ -697,7 +697,7 @@ func TestServer_ListIndexedAttestations_OldEpoch(t *testing.T) { state, _ := util.DeterministicGenesisState(t, numValidators) randaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(randaoMixes); i++ { + for i := range randaoMixes { randaoMixes[i] = make([]byte, fieldparams.RootLength) } require.NoError(t, state.SetRandaoMixes(randaoMixes)) @@ -754,7 +754,7 @@ func TestServer_ListIndexedAttestationsElectra(t *testing.T) { atts := make([]*ethpb.AttestationElectra, 0, count) atts2 := make([]*ethpb.AttestationElectra, 0, count) - for i := primitives.Slot(0); i < count; i++ { + for i := range count { var targetRoot [32]byte if i%2 == 0 { targetRoot = targetRoot1 @@ -924,7 +924,7 @@ func TestServer_AttestationPool_Pagination_DefaultPageSize(t *testing.T) { } atts := make([]ethpb.Att, params.BeaconConfig().DefaultPageSize+1) - for i := 0; i < len(atts); i++ { + for i := range atts { att := util.NewAttestation() att.Data.Slot = primitives.Slot(i) atts[i] = att @@ -946,7 +946,7 @@ func TestServer_AttestationPool_Pagination_CustomPageSize(t *testing.T) { numAtts := 100 atts := make([]ethpb.Att, numAtts) - for i := 0; i < len(atts); i++ { + for i := range atts { att := util.NewAttestation() att.Data.Slot = primitives.Slot(i) atts[i] = att @@ -1002,7 +1002,7 @@ func TestServer_AttestationPoolElectra(t *testing.T) { } atts := make([]ethpb.Att, params.BeaconConfig().DefaultPageSize+1) - for i := 0; i < len(atts); i++ { + for i := range atts { att := util.NewAttestationElectra() att.Data.Slot = primitives.Slot(i) atts[i] = att diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/blocks_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/blocks_test.go index a9df700a31..9703fafe03 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/blocks_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/blocks_test.go @@ -419,7 +419,7 @@ func runListBeaconBlocksGenesisMultiBlocks(t *testing.T, genBlock interfaces.Rea count := primitives.Slot(100) blks := make([]interfaces.ReadOnlySignedBeaconBlock, count) - for i := primitives.Slot(0); i < count; i++ { + for i := range count { blks[i] = blockCreator(i) } require.NoError(t, db.SaveBlocks(ctx, blks)) @@ -554,7 +554,7 @@ func runListBeaconBlocksPagination(t *testing.T, orphanedBlk interfaces.ReadOnly count := primitives.Slot(100) blks := make([]interfaces.ReadOnlySignedBeaconBlock, count) blkContainers := make([]*ethpb.BeaconBlockContainer, count) - for i := primitives.Slot(0); i < count; i++ { + for i := range count { b := blockCreator(i) root, err := b.Block().HashTreeRoot() require.NoError(t, err) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/committees_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/committees_test.go index 0c746cc3fb..7fde903463 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/committees_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/committees_test.go @@ -95,7 +95,7 @@ func TestServer_ListBeaconCommittees_PreviousEpoch(t *testing.T) { headState := setupActiveValidators(t, numValidators) mixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(mixes); i++ { + for i := range mixes { mixes[i] = make([]byte, fieldparams.RootLength) } require.NoError(t, headState.SetRandaoMixes(mixes)) @@ -216,7 +216,7 @@ func TestRetrieveCommitteesForRoot(t *testing.T) { func setupActiveValidators(t *testing.T, count int) state.BeaconState { balances := make([]uint64, count) validators := make([]*ethpb.Validator, 0, count) - for i := 0; i < count; i++ { + for i := range count { pubKey := make([]byte, params.BeaconConfig().BLSPubkeyLength) binary.LittleEndian.PutUint64(pubKey, uint64(i)) balances[i] = uint64(i) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/config.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/config.go index 98a7184950..4bab546ca8 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/config.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/config.go @@ -18,7 +18,7 @@ func (_ *Server) GetBeaconConfig(_ context.Context, _ *emptypb.Empty) (*ethpb.Be val := reflect.ValueOf(conf).Elem() numFields := val.Type().NumField() res := make(map[string]string, numFields) - for i := 0; i < numFields; i++ { + for i := range numFields { field := val.Type().Field(i) if field.IsExported() { res[field.Name] = fmt.Sprintf("%v", val.Field(i).Interface()) diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/config_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/config_test.go index 799b27e54d..851b03fdad 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/config_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/config_test.go @@ -17,12 +17,12 @@ func TestServer_GetBeaconConfig(t *testing.T) { res, err := bs.GetBeaconConfig(ctx, &emptypb.Empty{}) require.NoError(t, err) conf := params.BeaconConfig() - confType := reflect.TypeOf(conf).Elem() + confType := reflect.TypeFor[params.BeaconChainConfig]() numFields := confType.NumField() // Count only exported fields, as unexported fields are not included in the config exportedFields := 0 - for i := 0; i < numFields; i++ { + for i := range numFields { if confType.Field(i).IsExported() { exportedFields++ } diff --git a/beacon-chain/rpc/prysm/v1alpha1/beacon/validators_test.go b/beacon-chain/rpc/prysm/v1alpha1/beacon/validators_test.go index e7e5583f50..cfe78de180 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/beacon/validators_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/beacon/validators_test.go @@ -151,7 +151,7 @@ func TestServer_ListValidatorBalances_DefaultResponse_NoArchive(t *testing.T) { validators := make([]*ethpb.Validator, numItems) balances := make([]uint64, numItems) balancesResponse := make([]*ethpb.ValidatorBalances_Balance, numItems) - for i := 0; i < numItems; i++ { + for i := range numItems { validators[i] = ðpb.Validator{ PublicKey: pubKey(uint64(i)), WithdrawalCredentials: make([]byte, 32), @@ -531,7 +531,7 @@ func TestServer_ListValidators_OnlyActiveValidators(t *testing.T) { balances := make([]uint64, count) validators := make([]*ethpb.Validator, count) activeValidators := make([]*ethpb.Validators_ValidatorContainer, 0) - for i := 0; i < count; i++ { + for i := range count { pubKey := pubKey(uint64(i)) balances[i] = params.BeaconConfig().MaxEffectiveBalance @@ -594,7 +594,7 @@ func TestServer_ListValidators_InactiveInTheMiddle(t *testing.T) { balances := make([]uint64, count) validators := make([]*ethpb.Validator, count) activeValidators := make([]*ethpb.Validators_ValidatorContainer, 0) - for i := 0; i < count; i++ { + for i := range count { pubKey := pubKey(uint64(i)) balances[i] = params.BeaconConfig().MaxEffectiveBalance @@ -708,7 +708,7 @@ func TestServer_ListValidators_NoPagination(t *testing.T) { validators, _, headState := setupValidators(t, beaconDB, 100) want := make([]*ethpb.Validators_ValidatorContainer, len(validators)) - for i := 0; i < len(validators); i++ { + for i := range validators { want[i] = ðpb.Validators_ValidatorContainer{ Index: primitives.ValidatorIndex(i), Validator: validators[i], @@ -741,7 +741,7 @@ func TestServer_ListValidators_StategenNotUsed(t *testing.T) { validators, _, headState := setupValidators(t, beaconDB, 100) want := make([]*ethpb.Validators_ValidatorContainer, len(validators)) - for i := 0; i < len(validators); i++ { + for i := range validators { want[i] = ðpb.Validators_ValidatorContainer{ Index: primitives.ValidatorIndex(i), Validator: validators[i], @@ -991,7 +991,7 @@ func TestServer_ListValidators_DefaultPageSize(t *testing.T) { validators, _, headState := setupValidators(t, beaconDB, 1000) want := make([]*ethpb.Validators_ValidatorContainer, len(validators)) - for i := 0; i < len(validators); i++ { + for i := range validators { want[i] = ðpb.Validators_ValidatorContainer{ Index: primitives.ValidatorIndex(i), Validator: validators[i], @@ -1099,7 +1099,7 @@ func TestServer_ListValidators_ProcessHeadStateSlots(t *testing.T) { numValidators := params.BeaconConfig().MinGenesisActiveValidatorCount validators := make([]*ethpb.Validator, numValidators) balances := make([]uint64, numValidators) - for i := uint64(0); i < numValidators; i++ { + for i := range numValidators { validators[i] = ðpb.Validator{ ActivationEpoch: 0, PublicKey: make([]byte, 48), @@ -1109,7 +1109,7 @@ func TestServer_ListValidators_ProcessHeadStateSlots(t *testing.T) { balances[i] = params.BeaconConfig().MaxEffectiveBalance } want := make([]*ethpb.Validators_ValidatorContainer, len(validators)) - for i := 0; i < len(validators); i++ { + for i := range validators { want[i] = ðpb.Validators_ValidatorContainer{ Index: primitives.ValidatorIndex(i), Validator: validators[i], @@ -1154,7 +1154,7 @@ func TestServer_ListValidators_ProcessHeadStateSlots(t *testing.T) { func TestServer_GetValidator(t *testing.T) { count := primitives.Epoch(30) validators := make([]*ethpb.Validator, count) - for i := primitives.Epoch(0); i < count; i++ { + for i := range count { validators[i] = ðpb.Validator{ ActivationEpoch: i, PublicKey: pubKey(uint64(i)), @@ -1241,7 +1241,7 @@ func TestServer_GetValidatorActiveSetChanges(t *testing.T) { require.NoError(t, err) require.NoError(t, headState.SetSlot(0)) require.NoError(t, headState.SetValidators(validators)) - for i := 0; i < len(validators); i++ { + for i := range validators { activationEpoch := params.BeaconConfig().FarFutureEpoch withdrawableEpoch := params.BeaconConfig().FarFutureEpoch exitEpoch := params.BeaconConfig().FarFutureEpoch @@ -1513,7 +1513,7 @@ func TestServer_GetValidatorParticipation_CurrentAndPrevEpoch(t *testing.T) { validators := make([]*ethpb.Validator, validatorCount) balances := make([]uint64, validatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: bytesutil.ToBytes(uint64(i), 48), WithdrawalCredentials: make([]byte, 32), @@ -1597,7 +1597,7 @@ func TestServer_GetValidatorParticipation_OrphanedUntilGenesis(t *testing.T) { validators := make([]*ethpb.Validator, validatorCount) balances := make([]uint64, validatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: bytesutil.ToBytes(uint64(i), 48), WithdrawalCredentials: make([]byte, 32), @@ -1823,7 +1823,7 @@ func TestGetValidatorPerformance_OK(t *testing.T) { require.NoError(t, err) require.NoError(t, headState.SetSlot(params.BeaconConfig().SlotsPerEpoch.Mul(uint64(epoch+1)))) atts := make([]*ethpb.PendingAttestation, 3) - for i := 0; i < len(atts); i++ { + for i := range atts { atts[i] = ðpb.PendingAttestation{ Data: ðpb.AttestationData{ Target: ðpb.Checkpoint{Root: make([]byte, 32)}, @@ -2249,7 +2249,7 @@ func TestGetValidatorPerformanceCapella_OK(t *testing.T) { func setupValidators(t testing.TB, _ db.Database, count int) ([]*ethpb.Validator, []uint64, state.BeaconState) { balances := make([]uint64, count) validators := make([]*ethpb.Validator, 0, count) - for i := 0; i < count; i++ { + for i := range count { pubKey := pubKey(uint64(i)) balances[i] = uint64(i) validators = append(validators, ðpb.Validator{ diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/attester_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/attester_test.go index 7d2faf7302..70916c4ac7 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/attester_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/attester_test.go @@ -46,7 +46,7 @@ func TestProposeAttestation(t *testing.T) { require.NoError(t, err) validators := make([]*ethpb.Validator, 64) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: make([]byte, 48), WithdrawalCredentials: make([]byte, 32), @@ -253,12 +253,11 @@ func BenchmarkGetAttestationDataConcurrent(b *testing.B) { Slot: 3*params.BeaconConfig().SlotsPerEpoch + 1, } - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { var wg sync.WaitGroup wg.Add(5000) // for 5000 concurrent accesses - for j := 0; j < 5000; j++ { + for range 5000 { go func() { defer wg.Done() _, err := attesterServer.GetAttestationData(b.Context(), req) @@ -577,7 +576,7 @@ func TestServer_SubscribeCommitteeSubnets_MultipleSlots(t *testing.T) { randGen := rand.New(s) validators := make([]*ethpb.Validator, 64) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/blocks_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/blocks_test.go index 25a2f1904e..8bba2a5704 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/blocks_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/blocks_test.go @@ -94,7 +94,7 @@ func TestServer_StreamAltairBlocks_OnHeadUpdated(t *testing.T) { defer ctrl.Finish() mockStream := mock.NewMockBeaconNodeValidatorAltair_StreamBlocksServer(ctrl) - mockStream.EXPECT().Send(ðpb.StreamBlocksResponse{Block: ðpb.StreamBlocksResponse_AltairBlock{AltairBlock: b}}).Do(func(arg0 interface{}) { + mockStream.EXPECT().Send(ðpb.StreamBlocksResponse{Block: ðpb.StreamBlocksResponse_AltairBlock{AltairBlock: b}}).Do(func(arg0 any) { exitRoutine <- true }) mockStream.EXPECT().Context().Return(ctx).AnyTimes() @@ -139,7 +139,7 @@ func TestServer_StreamCapellaBlocks_OnHeadUpdated(t *testing.T) { defer ctrl.Finish() mockStream := mock.NewMockBeaconNodeValidatorAltair_StreamBlocksServer(ctrl) - mockStream.EXPECT().Send(ðpb.StreamBlocksResponse{Block: ðpb.StreamBlocksResponse_CapellaBlock{CapellaBlock: b}}).Do(func(arg0 interface{}) { + mockStream.EXPECT().Send(ðpb.StreamBlocksResponse{Block: ðpb.StreamBlocksResponse_CapellaBlock{CapellaBlock: b}}).Do(func(arg0 any) { exitRoutine <- true }) mockStream.EXPECT().Context().Return(ctx).AnyTimes() @@ -185,7 +185,7 @@ func TestServer_StreamAltairBlocksVerified_OnHeadUpdated(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() mockStream := mock.NewMockBeaconNodeValidatorAltair_StreamBlocksServer(ctrl) - mockStream.EXPECT().Send(ðpb.StreamBlocksResponse{Block: ðpb.StreamBlocksResponse_AltairBlock{AltairBlock: b}}).Do(func(arg0 interface{}) { + mockStream.EXPECT().Send(ðpb.StreamBlocksResponse{Block: ðpb.StreamBlocksResponse_AltairBlock{AltairBlock: b}}).Do(func(arg0 any) { exitRoutine <- true }) mockStream.EXPECT().Context().Return(ctx).AnyTimes() @@ -229,7 +229,7 @@ func TestServer_StreamCapellaBlocksVerified_OnHeadUpdated(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() mockStream := mock.NewMockBeaconNodeValidatorAltair_StreamBlocksServer(ctrl) - mockStream.EXPECT().Send(ðpb.StreamBlocksResponse{Block: ðpb.StreamBlocksResponse_CapellaBlock{CapellaBlock: b}}).Do(func(arg0 interface{}) { + mockStream.EXPECT().Send(ðpb.StreamBlocksResponse{Block: ðpb.StreamBlocksResponse_CapellaBlock{CapellaBlock: b}}).Do(func(arg0 any) { exitRoutine <- true }) mockStream.EXPECT().Context().Return(ctx).AnyTimes() @@ -320,7 +320,7 @@ func TestServer_StreamSlots_OnHeadUpdated(t *testing.T) { Slot: 123, PreviousDutyDependentRoot: params.BeaconConfig().ZeroHash[:], CurrentDutyDependentRoot: params.BeaconConfig().ZeroHash[:], - }).Do(func(arg0 interface{}) { + }).Do(func(arg0 any) { exitRoutine <- true }) mockStream.EXPECT().Context().Return(ctx).AnyTimes() @@ -359,7 +359,7 @@ func TestServer_StreamSlotsVerified_OnHeadUpdated(t *testing.T) { Slot: 123, PreviousDutyDependentRoot: params.BeaconConfig().ZeroHash[:], CurrentDutyDependentRoot: params.BeaconConfig().ZeroHash[:], - }).Do(func(arg0 interface{}) { + }).Do(func(arg0 any) { exitRoutine <- true }) mockStream.EXPECT().Context().Return(ctx).AnyTimes() @@ -405,7 +405,7 @@ func TestServer_StreamBlocksVerified_FuluBlock(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() mockStream := mock.NewMockBeaconNodeValidatorAltair_StreamBlocksServer(ctrl) - mockStream.EXPECT().Send(ðpb.StreamBlocksResponse{Block: ðpb.StreamBlocksResponse_FuluBlock{FuluBlock: b}}).Do(func(arg0 interface{}) { + mockStream.EXPECT().Send(ðpb.StreamBlocksResponse{Block: ðpb.StreamBlocksResponse_FuluBlock{FuluBlock: b}}).Do(func(arg0 any) { exitRoutine <- true }) mockStream.EXPECT().Context().Return(ctx).AnyTimes() @@ -448,7 +448,7 @@ func TestServer_StreamBlocks_FuluBlock(t *testing.T) { defer ctrl.Finish() mockStream := mock.NewMockBeaconNodeValidatorAltair_StreamBlocksServer(ctrl) - mockStream.EXPECT().Send(ðpb.StreamBlocksResponse{Block: ðpb.StreamBlocksResponse_FuluBlock{FuluBlock: b}}).Do(func(arg0 interface{}) { + mockStream.EXPECT().Send(ðpb.StreamBlocksResponse{Block: ðpb.StreamBlocksResponse_FuluBlock{FuluBlock: b}}).Do(func(arg0 any) { exitRoutine <- true }) mockStream.EXPECT().Context().Return(ctx).AnyTimes() diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/duties_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/duties_test.go index 65eb5b45bd..f19532516a 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/duties_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/duties_test.go @@ -45,7 +45,7 @@ func TestGetDuties_OK(t *testing.T) { pubKeys := make([][]byte, len(deposits)) indices := make([]uint64, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = deposits[i].Data.PublicKey indices[i] = uint64(i) } @@ -123,7 +123,7 @@ func TestGetAltairDuties_SyncCommitteeOK(t *testing.T) { require.NoError(t, bs.SetCurrentSyncCommittee(syncCommittee)) pubKeys := make([][]byte, len(deposits)) indices := make([]uint64, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = deposits[i].Data.PublicKey indices[i] = uint64(i) } @@ -227,7 +227,7 @@ func TestGetBellatrixDuties_SyncCommitteeOK(t *testing.T) { require.NoError(t, bs.SetCurrentSyncCommittee(syncCommittee)) pubKeys := make([][]byte, len(deposits)) indices := make([]uint64, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = deposits[i].Data.PublicKey indices[i] = uint64(i) } @@ -391,7 +391,7 @@ func TestGetDuties_CurrentEpoch_ShouldNotFail(t *testing.T) { pubKeys := make([][fieldparams.BLSPubkeyLength]byte, len(deposits)) indices := make([]uint64, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = bytesutil.ToBytes48(deposits[i].Data.PublicKey) indices[i] = uint64(i) } @@ -431,7 +431,7 @@ func TestGetDuties_MultipleKeys_OK(t *testing.T) { pubKeys := make([][fieldparams.BLSPubkeyLength]byte, len(deposits)) indices := make([]uint64, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = bytesutil.ToBytes48(deposits[i].Data.PublicKey) indices[i] = uint64(i) } @@ -484,7 +484,7 @@ func BenchmarkCommitteeAssignment(b *testing.B) { pubKeys := make([][fieldparams.BLSPubkeyLength]byte, len(deposits)) indices := make([]uint64, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = bytesutil.ToBytes48(deposits[i].Data.PublicKey) indices[i] = uint64(i) } @@ -505,8 +505,8 @@ func BenchmarkCommitteeAssignment(b *testing.B) { PublicKeys: pks, Epoch: 0, } - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err := vs.GetDuties(b.Context(), req) assert.NoError(b, err) } diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/duties_v2_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/duties_v2_test.go index 09e58e7a10..73a386104c 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/duties_v2_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/duties_v2_test.go @@ -39,7 +39,7 @@ func TestGetDutiesV2_OK(t *testing.T) { pubKeys := make([][]byte, len(deposits)) indices := make([]uint64, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = deposits[i].Data.PublicKey indices[i] = uint64(i) } @@ -117,7 +117,7 @@ func TestGetAltairDutiesV2_SyncCommitteeOK(t *testing.T) { require.NoError(t, bs.SetCurrentSyncCommittee(syncCommittee)) pubKeys := make([][]byte, len(deposits)) indices := make([]uint64, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = deposits[i].Data.PublicKey indices[i] = uint64(i) } @@ -221,7 +221,7 @@ func TestGetBellatrixDutiesV2_SyncCommitteeOK(t *testing.T) { require.NoError(t, bs.SetCurrentSyncCommittee(syncCommittee)) pubKeys := make([][]byte, len(deposits)) indices := make([]uint64, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = deposits[i].Data.PublicKey indices[i] = uint64(i) } @@ -428,7 +428,7 @@ func TestGetDutiesV2_CurrentEpoch_ShouldNotFail(t *testing.T) { pubKeys := make([][fieldparams.BLSPubkeyLength]byte, len(deposits)) indices := make([]uint64, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = bytesutil.ToBytes48(deposits[i].Data.PublicKey) indices[i] = uint64(i) } @@ -468,7 +468,7 @@ func TestGetDutiesV2_MultipleKeys_OK(t *testing.T) { pubKeys := make([][fieldparams.BLSPubkeyLength]byte, len(deposits)) indices := make([]uint64, len(deposits)) - for i := 0; i < len(deposits); i++ { + for i := range deposits { pubKeys[i] = bytesutil.ToBytes48(deposits[i].Data.PublicKey) indices[i] = uint64(i) } diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go index a06a1a5b6c..d3bb406952 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go @@ -191,9 +191,7 @@ func (vs *Server) getParentState(ctx context.Context, slot primitives.Slot) (sta func (vs *Server) BuildBlockParallel(ctx context.Context, sBlk interfaces.SignedBeaconBlock, head state.BeaconState, skipMevBoost bool, builderBoostFactor primitives.Gwei) (*ethpb.GenericBeaconBlock, error) { // Build consensus fields in background var wg sync.WaitGroup - wg.Add(1) - go func() { - defer wg.Done() + wg.Go(func() { // Set eth1 data. eth1Data, err := vs.eth1DataMajorityVote(ctx, head) @@ -233,7 +231,7 @@ func (vs *Server) BuildBlockParallel(ctx context.Context, sBlk interfaces.Signed // Set bls to execution change. New in Capella. vs.setBlsToExecData(sBlk, head) - }() + }) winningBid := primitives.ZeroWei() var bundle enginev1.BlobsBundler diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go index 7894c49261..e2ea14693a 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair.go @@ -71,12 +71,12 @@ func (vs *Server) getSyncAggregate(ctx context.Context, slot primitives.Slot, ro subcommitteeCount := params.BeaconConfig().SyncCommitteeSubnetCount var bitsHolder [][]byte - for i := uint64(0); i < subcommitteeCount; i++ { + for range subcommitteeCount { bitsHolder = append(bitsHolder, ethpb.NewSyncCommitteeAggregationBits()) } sigsHolder := make([]bls.Signature, 0, params.BeaconConfig().SyncCommitteeSize/subcommitteeCount) - for i := uint64(0); i < subcommitteeCount; i++ { + for i := range subcommitteeCount { cs := proposerContributions.filterBySubIndex(i) aggregates, err := synccontribution.Aggregate(cs) if err != nil { @@ -130,7 +130,7 @@ func (vs *Server) aggregatedSyncCommitteeMessages( subcommitteeSize := params.BeaconConfig().SyncCommitteeSize / subcommitteeCount sigsPerSubcommittee := make([][][]byte, subcommitteeCount) bitsPerSubcommittee := make([]bitfield.Bitfield, subcommitteeCount) - for i := uint64(0); i < subcommitteeCount; i++ { + for i := range subcommitteeCount { sigsPerSubcommittee[i] = make([][]byte, 0, subcommitteeSize) bitsPerSubcommittee[i] = ethpb.NewSyncCommitteeAggregationBits() } @@ -180,7 +180,7 @@ func (vs *Server) aggregatedSyncCommitteeMessages( // Aggregate. result := make([]*ethpb.SyncCommitteeContribution, 0, subcommitteeCount) - for i := uint64(0); i < subcommitteeCount; i++ { + for i := range subcommitteeCount { aggregatedSig := make([]byte, 96) aggregatedSig[0] = 0xC0 if len(sigsPerSubcommittee[i]) != 0 { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go index b97366263a..a73d41ac02 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_altair_test.go @@ -271,14 +271,14 @@ func TestGetSyncAggregate_CorrectStateAtSyncCommitteePeriodBoundary(t *testing.T wrongStateHasValidators := false correctStateHasValidators := false - for i := 0; i < len(wrongStateBits); i++ { + for i := range wrongStateBits { if wrongStateBits[i] != 0 { wrongStateHasValidators = true break } } - for i := 0; i < len(correctStateBits); i++ { + for i := range correctStateBits { if correctStateBits[i] != 0 { correctStateHasValidators = true break diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations.go index ad8a57f701..7dbd2fcab9 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations.go @@ -289,7 +289,7 @@ func (a proposerAtts) sortByProfitabilityUsingMaxCover_committeeAwarePacking() ( return a, nil } candidates := make([]*bitfield.Bitlist64, len(a)) - for i := 0; i < len(a); i++ { + for i := range a { var err error candidates[i], err = a[i].GetAggregationBits().ToBitlist64() if err != nil { diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_test.go index 8ae2978674..20a9434216 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_attestations_test.go @@ -703,7 +703,7 @@ func Benchmark_packAttestations_Electra(b *testing.B) { r := rand.New(rand.NewSource(123)) var atts []ethpb.Att - for c := uint64(0); c < committeeCount; c++ { + for c := range committeeCount { for a := uint64(0); a < params.BeaconConfig().TargetAggregatorsPerCommittee; a++ { cb := primitives.NewAttestationCommitteeBits() cb.SetBitAt(c, true) @@ -718,7 +718,7 @@ func Benchmark_packAttestations_Electra(b *testing.B) { CommitteeBits: cb, Signature: sig.Marshal(), } - for bit := uint64(0); bit < valsPerCommittee; bit++ { + for bit := range valsPerCommittee { att.AggregationBits.SetBitAt(bit, r.Intn(100) < 2) // 2% that the bit is set } } else { @@ -728,7 +728,7 @@ func Benchmark_packAttestations_Electra(b *testing.B) { CommitteeBits: cb, Signature: sig.Marshal(), } - for bit := uint64(0); bit < valsPerCommittee; bit++ { + for bit := range valsPerCommittee { att.AggregationBits.SetBitAt(bit, r.Intn(100) < 98) // 98% that the bit is set } } @@ -745,8 +745,7 @@ func Benchmark_packAttestations_Electra(b *testing.B) { require.NoError(b, st.SetSlot(params.BeaconConfig().SlotsPerEpoch)) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err = s.packAttestations(ctx, st, params.BeaconConfig().SlotsPerEpoch+1) require.NoError(b, err) } diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb_bench_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb_bench_test.go index 7427dd46b9..770d6da939 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb_bench_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_deneb_bench_test.go @@ -55,7 +55,7 @@ func setupBenchmarkData(b *testing.B, numBlobs int) (interfaces.SignedBeaconBloc // Create KZG commitments kzgCommitments := make([][]byte, numBlobs) - for i := 0; i < numBlobs; i++ { + for i := range numBlobs { kzgCommitments[i] = bytesutil.PadTo([]byte{byte(i)}, 48) } @@ -70,7 +70,7 @@ func setupBenchmarkData(b *testing.B, numBlobs int) (interfaces.SignedBeaconBloc // Create blobs blobs := make([][]byte, numBlobs) - for i := 0; i < numBlobs; i++ { + for i := range numBlobs { blobs[i] = make([]byte, fieldparams.BlobLength) // Add some variation to the blob data blobs[i][0] = byte(i) @@ -82,7 +82,7 @@ func setupBenchmarkData(b *testing.B, numBlobs int) (interfaces.SignedBeaconBloc b.Fatal(err) } kzgProofs := make([][]byte, numBlobs) - for i := 0; i < numBlobs; i++ { + for i := range numBlobs { kzgProofs[i] = proof } @@ -92,8 +92,7 @@ func setupBenchmarkData(b *testing.B, numBlobs int) (interfaces.SignedBeaconBloc func BenchmarkBuildBlobSidecars_Original_1Blob(b *testing.B) { blk, blobs, kzgProofs := setupBenchmarkData(b, 1) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := BuildBlobSidecarsOriginal(blk, blobs, kzgProofs) if err != nil { b.Fatal(err) @@ -104,8 +103,7 @@ func BenchmarkBuildBlobSidecars_Original_1Blob(b *testing.B) { func BenchmarkBuildBlobSidecars_Optimized_1Blob(b *testing.B) { blk, blobs, kzgProofs := setupBenchmarkData(b, 1) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := BuildBlobSidecars(blk, blobs, kzgProofs) if err != nil { b.Fatal(err) @@ -116,8 +114,7 @@ func BenchmarkBuildBlobSidecars_Optimized_1Blob(b *testing.B) { func BenchmarkBuildBlobSidecars_Original_2Blobs(b *testing.B) { blk, blobs, kzgProofs := setupBenchmarkData(b, 2) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := BuildBlobSidecarsOriginal(blk, blobs, kzgProofs) if err != nil { b.Fatal(err) @@ -128,8 +125,7 @@ func BenchmarkBuildBlobSidecars_Original_2Blobs(b *testing.B) { func BenchmarkBuildBlobSidecars_Optimized_3Blobs(b *testing.B) { blk, blobs, kzgProofs := setupBenchmarkData(b, 3) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := BuildBlobSidecars(blk, blobs, kzgProofs) if err != nil { b.Fatal(err) @@ -140,8 +136,7 @@ func BenchmarkBuildBlobSidecars_Optimized_3Blobs(b *testing.B) { func BenchmarkBuildBlobSidecars_Original_3Blobs(b *testing.B) { blk, blobs, kzgProofs := setupBenchmarkData(b, 3) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := BuildBlobSidecarsOriginal(blk, blobs, kzgProofs) if err != nil { b.Fatal(err) @@ -152,8 +147,7 @@ func BenchmarkBuildBlobSidecars_Original_3Blobs(b *testing.B) { func BenchmarkBuildBlobSidecars_Optimized_4Blobs(b *testing.B) { blk, blobs, kzgProofs := setupBenchmarkData(b, 4) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := BuildBlobSidecars(blk, blobs, kzgProofs) if err != nil { b.Fatal(err) @@ -164,8 +158,7 @@ func BenchmarkBuildBlobSidecars_Optimized_4Blobs(b *testing.B) { func BenchmarkBuildBlobSidecars_Original_9Blobs(b *testing.B) { blk, blobs, kzgProofs := setupBenchmarkData(b, 9) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := BuildBlobSidecarsOriginal(blk, blobs, kzgProofs) if err != nil { b.Fatal(err) @@ -176,8 +169,7 @@ func BenchmarkBuildBlobSidecars_Original_9Blobs(b *testing.B) { func BenchmarkBuildBlobSidecars_Optimized_9Blobs(b *testing.B) { blk, blobs, kzgProofs := setupBenchmarkData(b, 9) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := BuildBlobSidecars(blk, blobs, kzgProofs) if err != nil { b.Fatal(err) @@ -190,9 +182,8 @@ func BenchmarkMerkleProofKZGCommitment_Original(b *testing.B) { blk, _, _ := setupBenchmarkData(b, 4) body := blk.Block().Body() - b.ResetTimer() - for i := 0; i < b.N; i++ { - for j := 0; j < 4; j++ { + for b.Loop() { + for j := range 4 { _, err := blocks.MerkleProofKZGCommitment(body, j) if err != nil { b.Fatal(err) @@ -205,8 +196,7 @@ func BenchmarkMerkleProofKZGCommitment_Optimized(b *testing.B) { blk, _, _ := setupBenchmarkData(b, 4) body := blk.Block().Body() - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { // Pre-compute components once components, err := blocks.PrecomputeMerkleProofComponents(body) if err != nil { @@ -214,7 +204,7 @@ func BenchmarkMerkleProofKZGCommitment_Optimized(b *testing.B) { } // Generate proofs for each index - for j := 0; j < 4; j++ { + for j := range 4 { _, err := blocks.MerkleProofKZGCommitmentFromComponents(components, j) if err != nil { b.Fatal(err) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go index 7365b6a501..11681e5ef8 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go @@ -1121,7 +1121,7 @@ func TestProposer_ProposeBlock_OK(t *testing.T) { numberOfColumns := uint64(128) // For Fulu, we have cell proofs (blobs * numberOfColumns) cellProofs := make([][]byte, numberOfColumns) - for i := uint64(0); i < numberOfColumns; i++ { + for i := range numberOfColumns { cellProofs[i] = bytesutil.PadTo([]byte{byte(i)}, 48) } // Blob must be exactly 131072 bytes @@ -1155,7 +1155,7 @@ func TestProposer_ProposeBlock_OK(t *testing.T) { } // Create properly sized blobs (131072 bytes each) blobs := make([][]byte, blobCount) - for i := 0; i < blobCount; i++ { + for i := range blobCount { blob := make([]byte, 131072) blob[0] = byte(i + 1) blobs[i] = blob @@ -1244,7 +1244,7 @@ func TestProposer_ProposeBlock_OK(t *testing.T) { // Create cell proofs for Fulu blocks (128 proofs per blob) numberOfColumns := uint64(128) cellProofs := make([][]byte, numberOfColumns) - for i := uint64(0); i < numberOfColumns; i++ { + for i := range numberOfColumns { cellProofs[i] = bytesutil.PadTo([]byte{byte(i)}, 48) } // Create properly sized blob for mock builder @@ -2908,7 +2908,7 @@ func TestProposer_FilterAttestation(t *testing.T) { name: "invalid attestations", inputAtts: func() []ethpb.Att { atts := make([]ethpb.Att, 10) - for i := 0; i < len(atts); i++ { + for i := range atts { atts[i] = util.HydrateAttestation(ðpb.Attestation{ Data: ðpb.AttestationData{ CommitteeIndex: primitives.CommitteeIndex(i), @@ -2925,7 +2925,7 @@ func TestProposer_FilterAttestation(t *testing.T) { name: "filter aggregates ok", inputAtts: func() []ethpb.Att { atts := make([]ethpb.Att, 10) - for i := 0; i < len(atts); i++ { + for i := range atts { atts[i] = util.HydrateAttestation(ðpb.Attestation{ Data: ðpb.AttestationData{ CommitteeIndex: primitives.CommitteeIndex(i), @@ -3227,15 +3227,15 @@ func BenchmarkServer_PrepareBeaconProposer(b *testing.B) { } f := bytesutil.PadTo([]byte{0xFF, 0x01, 0xFF, 0x01, 0xFF, 0x01, 0xFF, 0x01, 0xFF, 0xFF, 0x01, 0xFF, 0x01, 0xFF, 0x01, 0xFF, 0x01, 0xFF}, fieldparams.FeeRecipientLength) recipients := make([]*ethpb.PrepareBeaconProposerRequest_FeeRecipientContainer, 0) - for i := 0; i < 10000; i++ { + for i := range 10000 { recipients = append(recipients, ðpb.PrepareBeaconProposerRequest_FeeRecipientContainer{FeeRecipient: f, ValidatorIndex: primitives.ValidatorIndex(i)}) } req := ðpb.PrepareBeaconProposerRequest{ Recipients: recipients, } - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err := proposerServer.PrepareBeaconProposer(ctx, req) if err != nil { b.Fatal(err) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_utils_bench_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_utils_bench_test.go index 2bc3b57da1..2d493b3368 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_utils_bench_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_utils_bench_test.go @@ -58,7 +58,7 @@ func BenchmarkProposerAtts_sortByProfitability(b *testing.B) { b.StopTimer() atts := aggtesting.MakeAttestationsFromBitlists(tt.inputs) b.StartTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { runner(atts) } }) diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/server_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/server_test.go index 8d8f20c616..6a1e68629b 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/server_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/server_test.go @@ -268,14 +268,12 @@ func TestWaitForChainStart_HeadStateDoesNotExist(t *testing.T) { mockStream.EXPECT().Context().Return(t.Context()) wg := new(sync.WaitGroup) - wg.Add(1) - go func() { + wg.Go(func() { err := Server.WaitForChainStart(&emptypb.Empty{}, mockStream) if s, _ := status.FromError(err); s.Code() != codes.Canceled { assert.NoError(t, err) } - wg.Done() - }() + }) util.WaitTimeout(wg, time.Second) } diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/status_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/status_test.go index b38b96bad2..baa6105c23 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/status_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/status_test.go @@ -661,7 +661,7 @@ func TestValidatorStatus_CorrectActivationQueue(t *testing.T) { depositCache, err := depositsnapshot.New() require.NoError(t, err) - for i := 0; i < 6; i++ { + for i := range 6 { depData := ðpb.Deposit_Data{ PublicKey: pubKey(uint64(i)), Signature: bytesutil.PadTo([]byte("hi"), 96), @@ -975,7 +975,7 @@ func TestServer_CheckDoppelGanger(t *testing.T) { ValidatorRequests: make([]*ethpb.DoppelGangerRequest_ValidatorRequest, 0), } response := ðpb.DoppelGangerResponse{Responses: make([]*ethpb.DoppelGangerResponse_ValidatorResponse, 0)} - for i := 0; i < 3; i++ { + for i := range 3 { request.ValidatorRequests = append(request.ValidatorRequests, ðpb.DoppelGangerRequest_ValidatorRequest{ PublicKey: keys[i].PublicKey().Marshal(), Epoch: 1, @@ -1011,7 +1011,7 @@ func TestServer_CheckDoppelGanger(t *testing.T) { ValidatorRequests: make([]*ethpb.DoppelGangerRequest_ValidatorRequest, 0), } response := ðpb.DoppelGangerResponse{Responses: make([]*ethpb.DoppelGangerResponse_ValidatorResponse, 0)} - for i := 0; i < 2; i++ { + for i := range 2 { request.ValidatorRequests = append(request.ValidatorRequests, ðpb.DoppelGangerRequest_ValidatorRequest{ PublicKey: keys[i].PublicKey().Marshal(), Epoch: 1, @@ -1058,7 +1058,7 @@ func TestServer_CheckDoppelGanger(t *testing.T) { ValidatorRequests: make([]*ethpb.DoppelGangerRequest_ValidatorRequest, 0), } response := ðpb.DoppelGangerResponse{Responses: make([]*ethpb.DoppelGangerResponse_ValidatorResponse, 0)} - for i := 0; i < 2; i++ { + for i := range 2 { request.ValidatorRequests = append(request.ValidatorRequests, ðpb.DoppelGangerRequest_ValidatorRequest{ PublicKey: keys[i].PublicKey().Marshal(), Epoch: 1, @@ -1161,7 +1161,7 @@ func TestServer_CheckDoppelGanger(t *testing.T) { ValidatorRequests: make([]*ethpb.DoppelGangerRequest_ValidatorRequest, 0), } response := ðpb.DoppelGangerResponse{Responses: make([]*ethpb.DoppelGangerResponse_ValidatorResponse, 0)} - for i := 0; i < 15; i++ { + for i := range 15 { request.ValidatorRequests = append(request.ValidatorRequests, ðpb.DoppelGangerRequest_ValidatorRequest{ PublicKey: keys[i].PublicKey().Marshal(), Epoch: 2, @@ -1198,7 +1198,7 @@ func TestServer_CheckDoppelGanger(t *testing.T) { ValidatorRequests: make([]*ethpb.DoppelGangerRequest_ValidatorRequest, 0), } response := ðpb.DoppelGangerResponse{Responses: make([]*ethpb.DoppelGangerResponse_ValidatorResponse, 0)} - for i := 0; i < 15; i++ { + for i := range 15 { request.ValidatorRequests = append(request.ValidatorRequests, ðpb.DoppelGangerRequest_ValidatorRequest{ PublicKey: keys[i].PublicKey().Marshal(), Epoch: 1, diff --git a/beacon-chain/rpc/prysm/validator/handlers_test.go b/beacon-chain/rpc/prysm/validator/handlers_test.go index 730f144348..0900e7226f 100644 --- a/beacon-chain/rpc/prysm/validator/handlers_test.go +++ b/beacon-chain/rpc/prysm/validator/handlers_test.go @@ -85,7 +85,7 @@ func TestServer_GetValidatorParticipation_CurrentAndPrevEpoch(t *testing.T) { validators := make([]*ethpb.Validator, validatorCount) balances := make([]uint64, validatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: bytesutil.ToBytes(uint64(i), 48), WithdrawalCredentials: make([]byte, 32), @@ -189,7 +189,7 @@ func TestServer_GetValidatorParticipation_OrphanedUntilGenesis(t *testing.T) { validators := make([]*ethpb.Validator, validatorCount) balances := make([]uint64, validatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: bytesutil.ToBytes(uint64(i), 48), WithdrawalCredentials: make([]byte, 32), @@ -446,7 +446,7 @@ func TestServer_GetValidatorActiveSetChanges(t *testing.T) { require.NoError(t, err) require.NoError(t, headState.SetSlot(0)) require.NoError(t, headState.SetValidators(validators)) - for i := 0; i < len(validators); i++ { + for i := range validators { activationEpoch := params.BeaconConfig().FarFutureEpoch withdrawableEpoch := params.BeaconConfig().FarFutureEpoch exitEpoch := params.BeaconConfig().FarFutureEpoch diff --git a/beacon-chain/rpc/prysm/validator/validator_performance_test.go b/beacon-chain/rpc/prysm/validator/validator_performance_test.go index 5d055b3769..795e2ae285 100644 --- a/beacon-chain/rpc/prysm/validator/validator_performance_test.go +++ b/beacon-chain/rpc/prysm/validator/validator_performance_test.go @@ -425,7 +425,7 @@ func setHeadState(t *testing.T, headState state.BeaconState, publicKeys [][48]by require.NoError(t, headState.SetSlot(params.BeaconConfig().SlotsPerEpoch.Mul(uint64(epoch+1)))) if headState.Version() < version.Altair { atts := make([]*ethpb.PendingAttestation, 3) - for i := 0; i < len(atts); i++ { + for i := range atts { atts[i] = ðpb.PendingAttestation{ Data: ðpb.AttestationData{ Target: ðpb.Checkpoint{Root: make([]byte, 32)}, diff --git a/beacon-chain/rpc/service.go b/beacon-chain/rpc/service.go index 1aeeb18448..8953780f8a 100644 --- a/beacon-chain/rpc/service.go +++ b/beacon-chain/rpc/service.go @@ -377,7 +377,7 @@ func (s *Service) Status() error { // Stream interceptor for new validator client connections to the beacon node. func (s *Service) validatorStreamConnectionInterceptor( - srv interface{}, + srv any, ss grpc.ServerStream, _ *grpc.StreamServerInfo, handler grpc.StreamHandler, @@ -389,10 +389,10 @@ func (s *Service) validatorStreamConnectionInterceptor( // Unary interceptor for new validator client connections to the beacon node. func (s *Service) validatorUnaryConnectionInterceptor( ctx context.Context, - req interface{}, + req any, _ *grpc.UnaryServerInfo, handler grpc.UnaryHandler, -) (interface{}, error) { +) (any, error) { s.logNewClientConnection(ctx) return handler(ctx, req) } diff --git a/beacon-chain/rpc/testutil/db.go b/beacon-chain/rpc/testutil/db.go index 5eb563bc5d..41871680c4 100644 --- a/beacon-chain/rpc/testutil/db.go +++ b/beacon-chain/rpc/testutil/db.go @@ -26,7 +26,7 @@ func FillDBWithBlocks(ctx context.Context, t *testing.T, beaconDB db.Database) ( count := primitives.Slot(100) blks := make([]interfaces.ReadOnlySignedBeaconBlock, count) blkContainers := make([]*ethpbalpha.BeaconBlockContainer, count) - for i := primitives.Slot(0); i < count; i++ { + for i := range count { b := util.NewBeaconBlock() b.Block.Slot = i b.Block.ParentRoot = bytesutil.PadTo([]byte{uint8(i)}, 32) diff --git a/beacon-chain/slasher/chunks.go b/beacon-chain/slasher/chunks.go index aa979a32a8..074c3cb2d1 100644 --- a/beacon-chain/slasher/chunks.go +++ b/beacon-chain/slasher/chunks.go @@ -101,7 +101,7 @@ func EmptyMinSpanChunksSlice(params *Parameters) *MinSpanChunksSlice { params: params, } data := make([]uint16, params.chunkSize*params.validatorChunkSize) - for i := 0; i < len(data); i++ { + for i := range data { data[i] = m.NeutralElement() } m.data = data @@ -116,7 +116,7 @@ func EmptyMaxSpanChunksSlice(params *Parameters) *MaxSpanChunksSlice { params: params, } data := make([]uint16, params.chunkSize*params.validatorChunkSize) - for i := 0; i < len(data); i++ { + for i := range data { data[i] = m.NeutralElement() } m.data = data diff --git a/beacon-chain/slasher/detect_attestations.go b/beacon-chain/slasher/detect_attestations.go index 28cb94e4c8..24c83634aa 100644 --- a/beacon-chain/slasher/detect_attestations.go +++ b/beacon-chain/slasher/detect_attestations.go @@ -29,9 +29,7 @@ func (s *Service) checkSlashableAttestations( return nil, errors.Wrap(err, "could not check slashable double votes") } - for root, slashing := range doubleVoteSlashings { - slashings[root] = slashing - } + maps.Copy(slashings, doubleVoteSlashings) // Save the attestation records to our database. // If multiple attestations are provided for the same validator index + target epoch combination, @@ -46,9 +44,7 @@ func (s *Service) checkSlashableAttestations( return nil, errors.Wrap(err, "could not check slashable surround votes") } - for root, slashing := range surroundSlashings { - slashings[root] = slashing - } + maps.Copy(slashings, surroundSlashings) return slashings, nil } @@ -97,9 +93,7 @@ func (s *Service) checkSurroundVotes( return nil, errors.Wrapf(err, "could not update min attestation spans for validator chunk index %d", validatorChunkIndex) } - for root, slashing := range surroundingSlashings { - slashings[root] = slashing - } + maps.Copy(slashings, surroundingSlashings) // Check for surrounded votes. surroundedSlashings, err := s.updateSpans(ctx, maxChunkByChunkIndex, attWrappersByChunkIndex, slashertypes.MaxSpan, validatorChunkIndex, currentEpoch) @@ -107,9 +101,7 @@ func (s *Service) checkSurroundVotes( return nil, errors.Wrapf(err, "could not update max attestation spans for validator chunk index %d", validatorChunkIndex) } - for root, slashing := range surroundedSlashings { - slashings[root] = slashing - } + maps.Copy(slashings, surroundedSlashings) // Memoize the updated chunks for the current validator chunk index. minChunkByChunkIndexByValidatorChunkIndex[validatorChunkIndex] = minChunkByChunkIndex @@ -755,7 +747,7 @@ func (s *Service) loadChunksFromDisk( // Initialize the chunks. chunksByChunkIdx := make(map[uint64]Chunker, chunksCount) - for i := 0; i < len(rawChunks); i++ { + for i := range rawChunks { // If the chunk exists in the database, we initialize it from the raw bytes data. // If it does not exist, we initialize an empty chunk. var ( diff --git a/beacon-chain/slasher/detect_attestations_test.go b/beacon-chain/slasher/detect_attestations_test.go index ee5c9d2c3a..c1690aa34b 100644 --- a/beacon-chain/slasher/detect_attestations_test.go +++ b/beacon-chain/slasher/detect_attestations_test.go @@ -659,7 +659,7 @@ func Test_processAttestations(t *testing.T) { validators := make([]*ethpb.Validator, numVals) privateKeys := make([]bls.SecretKey, numVals) - for i := uint64(0); i < numVals; i++ { + for i := range numVals { // Create a random private key. privateKey, err := bls.RandKey() require.NoError(t, err) @@ -1463,7 +1463,7 @@ func Benchmark_saveChunksToDisk(b *testing.B) { chunkByChunkIndexByValidatorChunkIndex := make(map[uint64]map[uint64]Chunker, validatorsChunksCount) // Populate the chunkers. - for i := 0; i < validatorsChunksCount; i++ { + for i := range validatorsChunksCount { data := make([]uint16, params.chunkSize) for j := 0; j < int(params.chunkSize); j++ { data[j] = uint16(rand.Intn(1 << 16)) @@ -1481,10 +1481,9 @@ func Benchmark_saveChunksToDisk(b *testing.B) { require.NoError(b, err) // Reset the benchmark timer. - b.ResetTimer() // Run the benchmark. - for i := 0; i < b.N; i++ { + for b.Loop() { b.StartTimer() err = service.saveChunksToDisk(ctx, slashertypes.MinSpan, chunkByChunkIndexByValidatorChunkIndex) b.StopTimer() @@ -1553,11 +1552,11 @@ func BenchmarkCheckSlashableAttestations(b *testing.B) { func runAttestationsBenchmark(b *testing.B, s *Service, numAtts, numValidators uint64) { indices := make([]uint64, numValidators) - for i := uint64(0); i < numValidators; i++ { + for i := range numValidators { indices[i] = i } atts := make([]*slashertypes.IndexedAttestationWrapper, numAtts) - for i := uint64(0); i < numAtts; i++ { + for i := range numAtts { source := primitives.Epoch(i) target := primitives.Epoch(i + 1) var signingRoot [32]byte @@ -1571,7 +1570,7 @@ func runAttestationsBenchmark(b *testing.B, s *Service, numAtts, numValidators u signingRoot[:], /* signingRoot */ ) } - for i := 0; i < b.N; i++ { + for b.Loop() { numEpochs := numAtts totalSeconds := numEpochs * uint64(params.BeaconConfig().SlotsPerEpoch) * params.BeaconConfig().SecondsPerSlot genesisTime := time.Now().Add(-time.Second * time.Duration(totalSeconds)) @@ -1623,7 +1622,7 @@ func Benchmark_checkSurroundVotes(b *testing.B) { attestingValidatorsCount := validatorsCount / slotsPerEpoch validatorIndexes := make([]uint64, attestingValidatorsCount) - for i := 0; i < attestingValidatorsCount; i++ { + for i := range attestingValidatorsCount { validatorIndexes[i] = 32 * uint64(i) } @@ -1633,8 +1632,8 @@ func Benchmark_checkSurroundVotes(b *testing.B) { attWrappers := []*slashertypes.IndexedAttestationWrapper{attWrapper} // Run the benchmark. - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { b.StartTimer() _, err = service.checkSurroundVotes(ctx, attWrappers, currentEpoch) b.StopTimer() diff --git a/beacon-chain/slasher/service.go b/beacon-chain/slasher/service.go index 870fcb5c37..f6b2cc426a 100644 --- a/beacon-chain/slasher/service.go +++ b/beacon-chain/slasher/service.go @@ -99,7 +99,7 @@ func (s *Service) run() { } numVals := headState.NumValidators() validatorIndices := make([]primitives.ValidatorIndex, numVals) - for i := 0; i < numVals; i++ { + for i := range numVals { validatorIndices[i] = primitives.ValidatorIndex(i) } start := time.Now() diff --git a/beacon-chain/state/fieldtrie/field_trie.go b/beacon-chain/state/fieldtrie/field_trie.go index abcb25b5cc..953f49eae2 100644 --- a/beacon-chain/state/fieldtrie/field_trie.go +++ b/beacon-chain/state/fieldtrie/field_trie.go @@ -40,7 +40,7 @@ type FieldTrie struct { // NewFieldTrie is the constructor for the field trie data structure. It creates the corresponding // trie according to the given parameters. Depending on whether the field is a basic/composite array // which is either fixed/variable length, it will appropriately determine the trie. -func NewFieldTrie(field types.FieldIndex, fieldInfo types.DataType, elements interface{}, length uint64) (*FieldTrie, error) { +func NewFieldTrie(field types.FieldIndex, fieldInfo types.DataType, elements any, length uint64) (*FieldTrie, error) { if elements == nil { return &FieldTrie{ field: field, @@ -92,14 +92,14 @@ func NewFieldTrie(field types.FieldIndex, fieldInfo types.DataType, elements int numOfElems: numOfElems, }, nil default: - return nil, errors.Errorf("unrecognized data type in field map: %v", reflect.TypeOf(fieldInfo).Name()) + return nil, errors.Errorf("unrecognized data type in field map: %v", reflect.TypeFor[types.DataType]().Name()) } } // RecomputeTrie rebuilds the affected branches in the trie according to the provided // changed indices and elements. This recomputes the trie according to the particular // field the trie is based on. -func (f *FieldTrie) RecomputeTrie(indices []uint64, elements interface{}) ([32]byte, error) { +func (f *FieldTrie) RecomputeTrie(indices []uint64, elements any) ([32]byte, error) { f.Lock() defer f.Unlock() var fieldRoot [32]byte @@ -162,7 +162,7 @@ func (f *FieldTrie) RecomputeTrie(indices []uint64, elements interface{}) ([32]b } return stateutil.AddInMixin(fieldRoot, uint64(f.numOfElems)) default: - return [32]byte{}, errors.Errorf("unrecognized data type in field map: %v", reflect.TypeOf(f.dataType).Name()) + return [32]byte{}, errors.Errorf("unrecognized data type in field map: %v", reflect.TypeFor[types.DataType]().Name()) } } @@ -251,7 +251,7 @@ func (f *FieldTrie) TrieRoot() ([32]byte, error) { trieRoot := *f.fieldLayers[len(f.fieldLayers)-1][0] return stateutil.AddInMixin(trieRoot, uint64(f.numOfElems)) default: - return [32]byte{}, errors.Errorf("unrecognized data type in field map: %v", reflect.TypeOf(f.dataType).Name()) + return [32]byte{}, errors.Errorf("unrecognized data type in field map: %v", reflect.TypeFor[types.DataType]().Name()) } } diff --git a/beacon-chain/state/fieldtrie/field_trie_helpers.go b/beacon-chain/state/fieldtrie/field_trie_helpers.go index 07667eb0eb..dad12a85da 100644 --- a/beacon-chain/state/fieldtrie/field_trie_helpers.go +++ b/beacon-chain/state/fieldtrie/field_trie_helpers.go @@ -31,7 +31,7 @@ func (f *FieldTrie) validateIndices(idxs []uint64) error { return nil } -func validateElements(field types.FieldIndex, fieldInfo types.DataType, elements interface{}, length uint64) error { +func validateElements(field types.FieldIndex, fieldInfo types.DataType, elements any, length uint64) error { if fieldInfo == types.CompressedArray { comLength, err := field.ElemsInChunk() if err != nil { @@ -54,7 +54,7 @@ func validateElements(field types.FieldIndex, fieldInfo types.DataType, elements } // fieldConverters converts the corresponding field and the provided elements to the appropriate roots. -func fieldConverters(field types.FieldIndex, indices []uint64, elements interface{}, convertAll bool) ([][32]byte, error) { +func fieldConverters(field types.FieldIndex, indices []uint64, elements any, convertAll bool) ([][32]byte, error) { switch field { case types.BlockRoots, types.StateRoots, types.RandaoMixes: return convertRoots(indices, elements, convertAll) @@ -71,7 +71,7 @@ func fieldConverters(field types.FieldIndex, indices []uint64, elements interfac } } -func convertRoots(indices []uint64, elements interface{}, convertAll bool) ([][32]byte, error) { +func convertRoots(indices []uint64, elements any, convertAll bool) ([][32]byte, error) { switch castedType := elements.(type) { case customtypes.BlockRoots: return handle32ByteMVslice(multi_value_slice.BuildEmptyCompositeSlice[[32]byte](castedType), indices, convertAll) @@ -86,7 +86,7 @@ func convertRoots(indices []uint64, elements interface{}, convertAll bool) ([][3 } } -func convertEth1DataVotes(indices []uint64, elements interface{}, convertAll bool) ([][32]byte, error) { +func convertEth1DataVotes(indices []uint64, elements any, convertAll bool) ([][32]byte, error) { val, ok := elements.([]*ethpb.Eth1Data) if !ok { return nil, errors.Errorf("Wanted type of %T but got %T", []*ethpb.Eth1Data{}, elements) @@ -94,7 +94,7 @@ func convertEth1DataVotes(indices []uint64, elements interface{}, convertAll boo return handleEth1DataSlice(val, indices, convertAll) } -func convertValidators(indices []uint64, elements interface{}, convertAll bool) ([][32]byte, error) { +func convertValidators(indices []uint64, elements any, convertAll bool) ([][32]byte, error) { switch casted := elements.(type) { case []*ethpb.Validator: return handleValidatorMVSlice(multi_value_slice.BuildEmptyCompositeSlice[*ethpb.Validator](casted), indices, convertAll) @@ -105,7 +105,7 @@ func convertValidators(indices []uint64, elements interface{}, convertAll bool) } } -func convertAttestations(indices []uint64, elements interface{}, convertAll bool) ([][32]byte, error) { +func convertAttestations(indices []uint64, elements any, convertAll bool) ([][32]byte, error) { val, ok := elements.([]*ethpb.PendingAttestation) if !ok { return nil, errors.Errorf("Wanted type of %T but got %T", []*ethpb.PendingAttestation{}, elements) @@ -113,7 +113,7 @@ func convertAttestations(indices []uint64, elements interface{}, convertAll bool return handlePendingAttestationSlice(val, indices, convertAll) } -func convertBalances(indices []uint64, elements interface{}, convertAll bool) ([][32]byte, error) { +func convertBalances(indices []uint64, elements any, convertAll bool) ([][32]byte, error) { switch casted := elements.(type) { case []uint64: return handleBalanceMVSlice(multi_value_slice.BuildEmptyCompositeSlice[uint64](casted), indices, convertAll) diff --git a/beacon-chain/state/fieldtrie/helpers_test.go b/beacon-chain/state/fieldtrie/helpers_test.go index 3ba766e9dd..0a97f3c4b3 100644 --- a/beacon-chain/state/fieldtrie/helpers_test.go +++ b/beacon-chain/state/fieldtrie/helpers_test.go @@ -94,7 +94,7 @@ func TestFieldTrie_NativeState_fieldConvertersNative(t *testing.T) { type args struct { field types.FieldIndex indices []uint64 - elements interface{} + elements any convertAll bool } tests := []struct { diff --git a/beacon-chain/state/interfaces.go b/beacon-chain/state/interfaces.go index 51511fe9f5..c884a92d50 100644 --- a/beacon-chain/state/interfaces.go +++ b/beacon-chain/state/interfaces.go @@ -63,8 +63,8 @@ type ReadOnlyBeaconState interface { ReadOnlyDeposits ReadOnlyConsolidations ReadOnlyProposerLookahead - ToProtoUnsafe() interface{} - ToProto() interface{} + ToProtoUnsafe() any + ToProto() any GenesisTime() time.Time GenesisValidatorsRoot() []byte Slot() primitives.Slot diff --git a/beacon-chain/state/state-native/getters_attestation.go b/beacon-chain/state/state-native/getters_attestation.go index ee3a24e2ca..15a01a99fd 100644 --- a/beacon-chain/state/state-native/getters_attestation.go +++ b/beacon-chain/state/state-native/getters_attestation.go @@ -29,7 +29,7 @@ func (b *BeaconState) previousEpochAttestationsVal() []*ethpb.PendingAttestation } res := make([]*ethpb.PendingAttestation, len(b.previousEpochAttestations)) - for i := 0; i < len(res); i++ { + for i := range res { res[i] = b.previousEpochAttestations[i].Copy() } return res @@ -59,7 +59,7 @@ func (b *BeaconState) currentEpochAttestationsVal() []*ethpb.PendingAttestation } res := make([]*ethpb.PendingAttestation, len(b.currentEpochAttestations)) - for i := 0; i < len(res); i++ { + for i := range res { res[i] = b.currentEpochAttestations[i].Copy() } return res diff --git a/beacon-chain/state/state-native/getters_eth1.go b/beacon-chain/state/state-native/getters_eth1.go index 6abf95bc28..9884597805 100644 --- a/beacon-chain/state/state-native/getters_eth1.go +++ b/beacon-chain/state/state-native/getters_eth1.go @@ -48,7 +48,7 @@ func (b *BeaconState) eth1DataVotesVal() []*ethpb.Eth1Data { } res := make([]*ethpb.Eth1Data, len(b.eth1DataVotes)) - for i := 0; i < len(res); i++ { + for i := range res { res[i] = b.eth1DataVotes[i].Copy() } return res diff --git a/beacon-chain/state/state-native/getters_participation_test.go b/beacon-chain/state/state-native/getters_participation_test.go index 7e18caad65..96098de404 100644 --- a/beacon-chain/state/state-native/getters_participation_test.go +++ b/beacon-chain/state/state-native/getters_participation_test.go @@ -11,7 +11,7 @@ import ( func TestState_UnrealizedCheckpointBalances(t *testing.T) { validators := make([]*ethpb.Validator, params.BeaconConfig().MinGenesisActiveValidatorCount) balances := make([]uint64, params.BeaconConfig().MinGenesisActiveValidatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, diff --git a/beacon-chain/state/state-native/getters_state.go b/beacon-chain/state/state-native/getters_state.go index bb00922978..86a503fdc2 100644 --- a/beacon-chain/state/state-native/getters_state.go +++ b/beacon-chain/state/state-native/getters_state.go @@ -9,7 +9,7 @@ import ( // ToProtoUnsafe returns the pointer value of the underlying // beacon state proto object, bypassing immutability. Use with care. -func (b *BeaconState) ToProtoUnsafe() interface{} { +func (b *BeaconState) ToProtoUnsafe() any { if b == nil { return nil } @@ -265,7 +265,7 @@ func (b *BeaconState) ToProtoUnsafe() interface{} { } // ToProto the beacon state into a protobuf for usage. -func (b *BeaconState) ToProto() interface{} { +func (b *BeaconState) ToProto() any { if b == nil { return nil } @@ -552,7 +552,7 @@ func (b *BeaconState) StateRootAtIndex(idx uint64) ([]byte, error) { // ProtobufBeaconStatePhase0 transforms an input into beacon state in the form of protobuf. // Error is returned if the input is not type protobuf beacon state. -func ProtobufBeaconStatePhase0(s interface{}) (*ethpb.BeaconState, error) { +func ProtobufBeaconStatePhase0(s any) (*ethpb.BeaconState, error) { pbState, ok := s.(*ethpb.BeaconState) if !ok { return nil, errors.New("input is not type ethpb.BeaconState") @@ -562,7 +562,7 @@ func ProtobufBeaconStatePhase0(s interface{}) (*ethpb.BeaconState, error) { // ProtobufBeaconStateAltair transforms an input into beacon state Altair in the form of protobuf. // Error is returned if the input is not type protobuf beacon state. -func ProtobufBeaconStateAltair(s interface{}) (*ethpb.BeaconStateAltair, error) { +func ProtobufBeaconStateAltair(s any) (*ethpb.BeaconStateAltair, error) { pbState, ok := s.(*ethpb.BeaconStateAltair) if !ok { return nil, errors.New("input is not type pb.BeaconStateAltair") @@ -572,7 +572,7 @@ func ProtobufBeaconStateAltair(s interface{}) (*ethpb.BeaconStateAltair, error) // ProtobufBeaconStateBellatrix transforms an input into beacon state Bellatrix in the form of protobuf. // Error is returned if the input is not type protobuf beacon state. -func ProtobufBeaconStateBellatrix(s interface{}) (*ethpb.BeaconStateBellatrix, error) { +func ProtobufBeaconStateBellatrix(s any) (*ethpb.BeaconStateBellatrix, error) { pbState, ok := s.(*ethpb.BeaconStateBellatrix) if !ok { return nil, errors.New("input is not type pb.BeaconStateBellatrix") @@ -582,7 +582,7 @@ func ProtobufBeaconStateBellatrix(s interface{}) (*ethpb.BeaconStateBellatrix, e // ProtobufBeaconStateCapella transforms an input into beacon state Capella in the form of protobuf. // Error is returned if the input is not type protobuf beacon state. -func ProtobufBeaconStateCapella(s interface{}) (*ethpb.BeaconStateCapella, error) { +func ProtobufBeaconStateCapella(s any) (*ethpb.BeaconStateCapella, error) { pbState, ok := s.(*ethpb.BeaconStateCapella) if !ok { return nil, errors.New("input is not type pb.BeaconStateCapella") @@ -592,7 +592,7 @@ func ProtobufBeaconStateCapella(s interface{}) (*ethpb.BeaconStateCapella, error // ProtobufBeaconStateDeneb transforms an input into beacon state Deneb in the form of protobuf. // Error is returned if the input is not type protobuf beacon state. -func ProtobufBeaconStateDeneb(s interface{}) (*ethpb.BeaconStateDeneb, error) { +func ProtobufBeaconStateDeneb(s any) (*ethpb.BeaconStateDeneb, error) { pbState, ok := s.(*ethpb.BeaconStateDeneb) if !ok { return nil, errors.New("input is not type pb.BeaconStateDeneb") @@ -602,7 +602,7 @@ func ProtobufBeaconStateDeneb(s interface{}) (*ethpb.BeaconStateDeneb, error) { // ProtobufBeaconStateElectra transforms an input into beacon state Electra in the form of protobuf. // Error is returned if the input is not type protobuf beacon state. -func ProtobufBeaconStateElectra(s interface{}) (*ethpb.BeaconStateElectra, error) { +func ProtobufBeaconStateElectra(s any) (*ethpb.BeaconStateElectra, error) { pbState, ok := s.(*ethpb.BeaconStateElectra) if !ok { return nil, errors.New("input is not type pb.BeaconStateElectra") @@ -612,7 +612,7 @@ func ProtobufBeaconStateElectra(s interface{}) (*ethpb.BeaconStateElectra, error // ProtobufBeaconStateFulu transforms an input into beacon state Fulu in the form of protobuf. // Error is returned if the input is not type protobuf beacon state. -func ProtobufBeaconStateFulu(s interface{}) (*ethpb.BeaconStateFulu, error) { +func ProtobufBeaconStateFulu(s any) (*ethpb.BeaconStateFulu, error) { pbState, ok := s.(*ethpb.BeaconStateFulu) if !ok { return nil, errors.New("input is not type pb.BeaconStateFulu") diff --git a/beacon-chain/state/state-native/getters_validator.go b/beacon-chain/state/state-native/getters_validator.go index 037418e167..5029887059 100644 --- a/beacon-chain/state/state-native/getters_validator.go +++ b/beacon-chain/state/state-native/getters_validator.go @@ -34,7 +34,7 @@ func (b *BeaconState) validatorsVal() []*ethpb.Validator { v = b.validatorsMultiValue.Value(b) res := make([]*ethpb.Validator, len(v)) - for i := 0; i < len(res); i++ { + for i := range res { val := v[i] if val == nil { continue @@ -52,7 +52,7 @@ func (b *BeaconState) validatorsReadOnlyVal() []state.ReadOnlyValidator { res := make([]state.ReadOnlyValidator, len(v)) var err error - for i := 0; i < len(res); i++ { + for i := range res { val := v[i] if val == nil { continue @@ -172,7 +172,7 @@ func (b *BeaconState) PublicKeys() ([][fieldparams.BLSPubkeyLength]byte, error) l := b.validatorsLen() res := make([][fieldparams.BLSPubkeyLength]byte, l) - for i := 0; i < l; i++ { + for i := range l { val, err := b.validatorsMultiValue.At(b, uint64(i)) if err != nil { return nil, err @@ -201,7 +201,7 @@ func (b *BeaconState) ReadFromEveryValidator(f func(idx int, val state.ReadOnlyV return state.ErrNilValidatorsInState } l := b.validatorsMultiValue.Len(b) - for i := 0; i < l; i++ { + for i := range l { v, err := b.validatorsMultiValue.At(b, uint64(i)) if err != nil { return err diff --git a/beacon-chain/state/state-native/getters_withdrawal.go b/beacon-chain/state/state-native/getters_withdrawal.go index 3b6ea2a3d0..fad6beeafd 100644 --- a/beacon-chain/state/state-native/getters_withdrawal.go +++ b/beacon-chain/state/state-native/getters_withdrawal.go @@ -161,7 +161,7 @@ func (b *BeaconState) ExpectedWithdrawals() ([]*enginev1.Withdrawal, uint64, err validatorsLen := b.validatorsLen() bound := min(uint64(validatorsLen), params.BeaconConfig().MaxValidatorsPerWithdrawalsSweep) - for i := uint64(0); i < bound; i++ { + for range bound { val, err := b.validatorAtIndexReadOnly(validatorIndex) if err != nil { return nil, 0, errors.Wrapf(err, "could not retrieve validator at index %d", validatorIndex) diff --git a/beacon-chain/state/state-native/getters_withdrawal_test.go b/beacon-chain/state/state-native/getters_withdrawal_test.go index 4154504dc2..b2f64c702c 100644 --- a/beacon-chain/state/state-native/getters_withdrawal_test.go +++ b/beacon-chain/state/state-native/getters_withdrawal_test.go @@ -92,7 +92,7 @@ func TestExpectedWithdrawals(t *testing.T) { vals := make([]*ethpb.Validator, 100) balances := make([]uint64, 100) - for i := 0; i < 100; i++ { + for i := range 100 { balances[i] = params.BeaconConfig().MaxEffectiveBalance val := ðpb.Validator{ WithdrawalCredentials: make([]byte, 32), @@ -124,7 +124,7 @@ func TestExpectedWithdrawals(t *testing.T) { vals := make([]*ethpb.Validator, 100) balances := make([]uint64, 100) - for i := 0; i < 100; i++ { + for i := range 100 { balances[i] = params.BeaconConfig().MaxEffectiveBalance val := ðpb.Validator{ WithdrawalCredentials: make([]byte, 32), diff --git a/beacon-chain/state/state-native/setters_attestation_test.go b/beacon-chain/state/state-native/setters_attestation_test.go index ee16f0c622..d591dda47a 100644 --- a/beacon-chain/state/state-native/setters_attestation_test.go +++ b/beacon-chain/state/state-native/setters_attestation_test.go @@ -29,16 +29,16 @@ func TestBeaconState_RotateAttestations(t *testing.T) { func TestAppendBeyondIndicesLimit(t *testing.T) { zeroHash := params.BeaconConfig().ZeroHash mockblockRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(mockblockRoots); i++ { + for i := range mockblockRoots { mockblockRoots[i] = zeroHash[:] } mockstateRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(mockstateRoots); i++ { + for i := range mockstateRoots { mockstateRoots[i] = zeroHash[:] } mockrandaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(mockrandaoMixes); i++ { + for i := range mockrandaoMixes { mockrandaoMixes[i] = zeroHash[:] } st, err := InitializeFromProtoPhase0(ðpb.BeaconState{ @@ -61,13 +61,13 @@ func TestAppendBeyondIndicesLimit(t *testing.T) { } _, err = st.HashTreeRoot(t.Context()) require.NoError(t, err) - for i := 0; i < 10; i++ { + for range 10 { assert.NoError(t, st.AppendValidator(ðpb.Validator{})) } assert.Equal(t, false, s.rebuildTrie[types.Validators]) assert.NotEqual(t, len(s.dirtyIndices[types.Validators]), 0) - for i := 0; i < indicesLimit; i++ { + for range indicesLimit { assert.NoError(t, st.AppendValidator(ðpb.Validator{})) } assert.Equal(t, true, s.rebuildTrie[types.Validators]) @@ -88,10 +88,8 @@ func BenchmarkAppendPreviousEpochAttestations(b *testing.B) { require.NoError(b, err) } - b.ResetTimer() - ref := st.Copy() - for i := 0; i < b.N; i++ { + for i := 0; b.Loop(); i++ { err := ref.AppendPreviousEpochAttestations(ðpb.PendingAttestation{Data: ðpb.AttestationData{Slot: primitives.Slot(i)}}) require.NoError(b, err) ref = st.Copy() diff --git a/beacon-chain/state/state-native/setters_eth1_test.go b/beacon-chain/state/state-native/setters_eth1_test.go index 5908abc159..ffa8454c58 100644 --- a/beacon-chain/state/state-native/setters_eth1_test.go +++ b/beacon-chain/state/state-native/setters_eth1_test.go @@ -30,7 +30,7 @@ func BenchmarkAppendEth1DataVotes(b *testing.B) { ref := st.Copy() - for i := 0; i < b.N; i++ { + for i := 0; b.Loop(); i++ { err := ref.AppendEth1DataVotes(ðpb.Eth1Data{DepositCount: uint64(i)}) require.NoError(b, err) ref = st.Copy() diff --git a/beacon-chain/state/state-native/setters_misc_test.go b/beacon-chain/state/state-native/setters_misc_test.go index 594c4a295b..ffda120523 100644 --- a/beacon-chain/state/state-native/setters_misc_test.go +++ b/beacon-chain/state/state-native/setters_misc_test.go @@ -27,9 +27,7 @@ func BenchmarkAppendHistoricalRoots(b *testing.B) { ref := st.Copy() - b.ResetTimer() - - for i := 0; i < b.N; i++ { + for b.Loop() { err := ref.AppendHistoricalRoots(root) require.NoError(b, err) ref = st.Copy() @@ -52,9 +50,7 @@ func BenchmarkAppendHistoricalSummaries(b *testing.B) { ref := st.Copy() - b.ResetTimer() - - for i := 0; i < b.N; i++ { + for b.Loop() { err := ref.AppendHistoricalSummaries(ðpb.HistoricalSummary{}) require.NoError(b, err) ref = st.Copy() diff --git a/beacon-chain/state/state-native/setters_participation_test.go b/beacon-chain/state/state-native/setters_participation_test.go index 3d51816cab..4b65ac26b4 100644 --- a/beacon-chain/state/state-native/setters_participation_test.go +++ b/beacon-chain/state/state-native/setters_participation_test.go @@ -18,9 +18,8 @@ func BenchmarkParticipationBits(b *testing.B) { } ref := st.Copy() - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { require.NoError(b, ref.AppendCurrentParticipationBits(byte(2))) ref = st.Copy() } diff --git a/beacon-chain/state/state-native/setters_payload_header_test.go b/beacon-chain/state/state-native/setters_payload_header_test.go index 51d552e42b..4fe4ac6cd2 100644 --- a/beacon-chain/state/state-native/setters_payload_header_test.go +++ b/beacon-chain/state/state-native/setters_payload_header_test.go @@ -88,8 +88,8 @@ func TestSetLatestExecutionPayloadHeader(t *testing.T) { t.Run("mismatched type version returns error", func(t *testing.T) { require.Equal(t, len(payloads), len(payloadHeaders), "This test will fail if the payloads and payload headers are not same length") - for i := 0; i < len(payloads); i++ { - for j := 0; j < len(payloads); j++ { + for i := range payloads { + for j := range payloads { if i == j { continue } diff --git a/beacon-chain/state/state-native/setters_validator.go b/beacon-chain/state/state-native/setters_validator.go index 70d10e23e2..01f79bb7fd 100644 --- a/beacon-chain/state/state-native/setters_validator.go +++ b/beacon-chain/state/state-native/setters_validator.go @@ -33,7 +33,7 @@ func (b *BeaconState) SetValidators(val []*ethpb.Validator) error { func (b *BeaconState) ApplyToEveryValidator(f func(idx int, val state.ReadOnlyValidator) (*ethpb.Validator, error)) error { var changedVals []uint64 l := b.validatorsMultiValue.Len(b) - for i := 0; i < l; i++ { + for i := range l { v, err := b.validatorsMultiValue.At(b, uint64(i)) if err != nil { return err diff --git a/beacon-chain/state/state-native/setters_validator_test.go b/beacon-chain/state/state-native/setters_validator_test.go index 83b19766cd..3d8b598361 100644 --- a/beacon-chain/state/state-native/setters_validator_test.go +++ b/beacon-chain/state/state-native/setters_validator_test.go @@ -18,9 +18,8 @@ func BenchmarkAppendBalance(b *testing.B) { } ref := st.Copy() - b.ResetTimer() - for i := 0; i < b.N; i++ { + for i := 0; b.Loop(); i++ { require.NoError(b, ref.AppendBalance(uint64(i))) ref = st.Copy() } @@ -36,9 +35,8 @@ func BenchmarkAppendInactivityScore(b *testing.B) { } ref := st.Copy() - b.ResetTimer() - for i := 0; i < b.N; i++ { + for i := 0; b.Loop(); i++ { require.NoError(b, ref.AppendInactivityScore(uint64(i))) ref = st.Copy() } diff --git a/beacon-chain/state/state-native/state_test.go b/beacon-chain/state/state-native/state_test.go index 56f1f4a632..e6e2626674 100644 --- a/beacon-chain/state/state-native/state_test.go +++ b/beacon-chain/state/state-native/state_test.go @@ -47,11 +47,10 @@ func TestBeaconState_NoDeadlock_Phase0(t *testing.T) { wg := new(sync.WaitGroup) - wg.Add(1) - go func() { + wg.Go(func() { // Continuously lock and unlock the state // by acquiring the lock. - for i := 0; i < 1000; i++ { + for range 1000 { for _, f := range st.stateFieldLeaves { f.Lock() if f.Empty() { @@ -61,12 +60,11 @@ func TestBeaconState_NoDeadlock_Phase0(t *testing.T) { f.FieldReference().AddRef() } } - wg.Done() - }() + }) // Constantly read from the offending portion // of the code to ensure there is no possible // recursive read locking. - for i := 0; i < 1000; i++ { + for range 1000 { go func() { _ = st.FieldReferencesCount() }() @@ -103,11 +101,10 @@ func TestBeaconState_NoDeadlock_Altair(t *testing.T) { wg := new(sync.WaitGroup) - wg.Add(1) - go func() { + wg.Go(func() { // Continuously lock and unlock the state // by acquiring the lock. - for i := 0; i < 1000; i++ { + for range 1000 { for _, f := range s.stateFieldLeaves { f.Lock() if f.Empty() { @@ -117,12 +114,11 @@ func TestBeaconState_NoDeadlock_Altair(t *testing.T) { f.FieldReference().AddRef() } } - wg.Done() - }() + }) // Constantly read from the offending portion // of the code to ensure there is no possible // recursive read locking. - for i := 0; i < 1000; i++ { + for range 1000 { go func() { _ = st.FieldReferencesCount() }() @@ -159,11 +155,10 @@ func TestBeaconState_NoDeadlock_Bellatrix(t *testing.T) { wg := new(sync.WaitGroup) - wg.Add(1) - go func() { + wg.Go(func() { // Continuously lock and unlock the state // by acquiring the lock. - for i := 0; i < 1000; i++ { + for range 1000 { for _, f := range s.stateFieldLeaves { f.Lock() if f.Empty() { @@ -173,12 +168,11 @@ func TestBeaconState_NoDeadlock_Bellatrix(t *testing.T) { f.FieldReference().AddRef() } } - wg.Done() - }() + }) // Constantly read from the offending portion // of the code to ensure there is no possible // recursive read locking. - for i := 0; i < 1000; i++ { + for range 1000 { go func() { _ = st.FieldReferencesCount() }() @@ -215,11 +209,10 @@ func TestBeaconState_NoDeadlock_Capella(t *testing.T) { wg := new(sync.WaitGroup) - wg.Add(1) - go func() { + wg.Go(func() { // Continuously lock and unlock the state // by acquiring the lock. - for i := 0; i < 1000; i++ { + for range 1000 { for _, f := range s.stateFieldLeaves { f.Lock() if f.Empty() { @@ -229,12 +222,11 @@ func TestBeaconState_NoDeadlock_Capella(t *testing.T) { f.FieldReference().AddRef() } } - wg.Done() - }() + }) // Constantly read from the offending portion // of the code to ensure there is no possible // recursive read locking. - for i := 0; i < 1000; i++ { + for range 1000 { go func() { _ = st.FieldReferencesCount() }() @@ -271,11 +263,10 @@ func TestBeaconState_NoDeadlock_Deneb(t *testing.T) { wg := new(sync.WaitGroup) - wg.Add(1) - go func() { + wg.Go(func() { // Continuously lock and unlock the state // by acquiring the lock. - for i := 0; i < 1000; i++ { + for range 1000 { for _, f := range s.stateFieldLeaves { f.Lock() if f.Empty() { @@ -285,12 +276,11 @@ func TestBeaconState_NoDeadlock_Deneb(t *testing.T) { f.FieldReference().AddRef() } } - wg.Done() - }() + }) // Constantly read from the offending portion // of the code to ensure there is no possible // recursive read locking. - for i := 0; i < 1000; i++ { + for range 1000 { go func() { _ = st.FieldReferencesCount() }() @@ -307,7 +297,7 @@ func TestBeaconState_AppendBalanceWithTrie(t *testing.T) { _, err := st.HashTreeRoot(t.Context()) assert.NoError(t, err) - for i := 0; i < 100; i++ { + for i := range 100 { if i%2 == 0 { assert.NoError(t, st.UpdateBalancesAtIndex(primitives.ValidatorIndex(i), 1000)) } @@ -392,7 +382,7 @@ func TestDuplicateDirtyIndices(t *testing.T) { rebuildTrie: make(map[types.FieldIndex]bool), dirtyIndices: make(map[types.FieldIndex][]uint64), } - for i := uint64(0); i < indicesLimit-5; i++ { + for i := range uint64(indicesLimit - 5) { newState.dirtyIndices[types.Balances] = append(newState.dirtyIndices[types.Balances], i) } // Append duplicates @@ -430,16 +420,16 @@ func generateState(t *testing.T) state.BeaconState { } zeroHash := params.BeaconConfig().ZeroHash mockblockRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(mockblockRoots); i++ { + for i := range mockblockRoots { mockblockRoots[i] = zeroHash[:] } mockstateRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(mockstateRoots); i++ { + for i := range mockstateRoots { mockstateRoots[i] = zeroHash[:] } mockrandaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(mockrandaoMixes); i++ { + for i := range mockrandaoMixes { mockrandaoMixes[i] = zeroHash[:] } newState, err := InitializeFromProtoPhase0(ðpb.BeaconState{ diff --git a/beacon-chain/state/state-native/state_trie.go b/beacon-chain/state/state-native/state_trie.go index b4e6011fd2..d9eca3866e 100644 --- a/beacon-chain/state/state-native/state_trie.go +++ b/beacon-chain/state/state-native/state_trie.go @@ -4,7 +4,7 @@ import ( "context" "fmt" "runtime" - "sort" + "slices" "github.com/OffchainLabs/prysm/v7/beacon-chain/state" "github.com/OffchainLabs/prysm/v7/beacon-chain/state/fieldtrie" @@ -1204,7 +1204,7 @@ func (b *BeaconState) CopyAllTries() { } } -func (b *BeaconState) recomputeFieldTrie(index types.FieldIndex, elements interface{}) ([32]byte, error) { +func (b *BeaconState) recomputeFieldTrie(index types.FieldIndex, elements any) ([32]byte, error) { fTrie := b.stateFieldLeaves[index] fTrieMutex := fTrie.RWMutex // We can't lock the trie directly because the trie's variable gets reassigned, @@ -1241,9 +1241,7 @@ func (b *BeaconState) recomputeFieldTrie(index types.FieldIndex, elements interf // remove duplicate indexes b.dirtyIndices[index] = slice.SetUint64(b.dirtyIndices[index]) // sort indexes again - sort.Slice(b.dirtyIndices[index], func(i int, j int) bool { - return b.dirtyIndices[index][i] < b.dirtyIndices[index][j] - }) + slices.Sort(b.dirtyIndices[index]) root, err := fTrie.RecomputeTrie(b.dirtyIndices[index], elements) if err != nil { return [32]byte{}, err @@ -1252,7 +1250,7 @@ func (b *BeaconState) recomputeFieldTrie(index types.FieldIndex, elements interf return root, nil } -func (b *BeaconState) resetFieldTrie(index types.FieldIndex, elements interface{}, length uint64) error { +func (b *BeaconState) resetFieldTrie(index types.FieldIndex, elements any, length uint64) error { fTrie, err := fieldtrie.NewFieldTrie(index, fieldMap[index], elements, length) if err != nil { return err diff --git a/beacon-chain/state/state-native/types_test.go b/beacon-chain/state/state-native/types_test.go index a749c8455c..8ead7cbe49 100644 --- a/beacon-chain/state/state-native/types_test.go +++ b/beacon-chain/state/state-native/types_test.go @@ -74,11 +74,11 @@ func setupGenesisState(t testing.TB, count uint64) *ethpb.BeaconState { } func BenchmarkCloneValidators_Proto(b *testing.B) { - b.StopTimer() + validators := make([]*ethpb.Validator, 16384) somePubKey := [fieldparams.BLSPubkeyLength]byte{1, 2, 3} someRoot := [32]byte{3, 4, 5} - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: somePubKey[:], WithdrawalCredentials: someRoot[:], @@ -90,18 +90,18 @@ func BenchmarkCloneValidators_Proto(b *testing.B) { WithdrawableEpoch: 5, } } - b.StartTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { cloneValidatorsWithProto(validators) } } func BenchmarkCloneValidators_Manual(b *testing.B) { - b.StopTimer() + validators := make([]*ethpb.Validator, 16384) somePubKey := [fieldparams.BLSPubkeyLength]byte{1, 2, 3} someRoot := [32]byte{3, 4, 5} - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ PublicKey: somePubKey[:], WithdrawalCredentials: someRoot[:], @@ -113,33 +113,33 @@ func BenchmarkCloneValidators_Manual(b *testing.B) { WithdrawableEpoch: 5, } } - b.StartTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { cloneValidatorsManually(validators) } } func BenchmarkStateClone_Proto(b *testing.B) { - b.StopTimer() + params.SetupTestConfigCleanup(b) params.OverrideBeaconConfig(params.MinimalSpecConfig()) genesis := setupGenesisState(b, 64) - b.StartTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, ok := proto.Clone(genesis).(*ethpb.BeaconState) assert.Equal(b, true, ok, "Entity is not of type *ethpb.BeaconState") } } func BenchmarkStateClone_Manual(b *testing.B) { - b.StopTimer() + params.SetupTestConfigCleanup(b) params.OverrideBeaconConfig(params.MinimalSpecConfig()) genesis := setupGenesisState(b, 64) st, err := statenative.InitializeFromProtoPhase0(genesis) require.NoError(b, err) - b.StartTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _ = st.ToProto() } } @@ -147,7 +147,7 @@ func BenchmarkStateClone_Manual(b *testing.B) { func cloneValidatorsWithProto(vals []*ethpb.Validator) []*ethpb.Validator { var ok bool res := make([]*ethpb.Validator, len(vals)) - for i := 0; i < len(res); i++ { + for i := range res { res[i], ok = proto.Clone(vals[i]).(*ethpb.Validator) if !ok { log.Debug("Entity is not of type *ethpb.Validator") @@ -158,7 +158,7 @@ func cloneValidatorsWithProto(vals []*ethpb.Validator) []*ethpb.Validator { func cloneValidatorsManually(vals []*ethpb.Validator) []*ethpb.Validator { res := make([]*ethpb.Validator, len(vals)) - for i := 0; i < len(res); i++ { + for i := range res { val := vals[i] res[i] = ðpb.Validator{ PublicKey: val.PublicKey, diff --git a/beacon-chain/state/stategen/epoch_boundary_state_cache.go b/beacon-chain/state/stategen/epoch_boundary_state_cache.go index 83043152b8..c429da939b 100644 --- a/beacon-chain/state/stategen/epoch_boundary_state_cache.go +++ b/beacon-chain/state/stategen/epoch_boundary_state_cache.go @@ -26,7 +26,7 @@ type slotRootInfo struct { // slotKeyFn takes the string representation of the slot to be used as key // to retrieve root. -func slotKeyFn(obj interface{}) (string, error) { +func slotKeyFn(obj any) (string, error) { s, ok := obj.(*slotRootInfo) if !ok { return "", errNotSlotRootInfo @@ -42,7 +42,7 @@ type rootStateInfo struct { // rootKeyFn takes the string representation of the block root to be used as key // to retrieve epoch boundary state. -func rootKeyFn(obj interface{}) (string, error) { +func rootKeyFn(obj any) (string, error) { s, ok := obj.(*rootStateInfo) if !ok { return "", errNotRootStateInfo @@ -184,7 +184,7 @@ func trim(queue *cache.FIFO, maxSize uint64) { } // popProcessNoopFunc is a no-op function that never returns an error. -func popProcessNoopFunc(_ interface{}, _ bool) error { +func popProcessNoopFunc(_ any, _ bool) error { return nil } diff --git a/beacon-chain/state/stategen/history.go b/beacon-chain/state/stategen/history.go index e5858dd0ab..f161d43ab8 100644 --- a/beacon-chain/state/stategen/history.go +++ b/beacon-chain/state/stategen/history.go @@ -188,7 +188,7 @@ func (c *CanonicalHistory) ancestorChain(ctx context.Context, tail interfaces.Re func reverseChain(c []interfaces.ReadOnlySignedBeaconBlock) { last := len(c) - 1 swaps := (last + 1) / 2 - for i := 0; i < swaps; i++ { + for i := range swaps { c[i], c[last-i] = c[last-i], c[i] } } diff --git a/beacon-chain/state/stategen/history_test.go b/beacon-chain/state/stategen/history_test.go index b586d1d472..b58c32e0c4 100644 --- a/beacon-chain/state/stategen/history_test.go +++ b/beacon-chain/state/stategen/history_test.go @@ -530,7 +530,7 @@ func (m *mockCanonicalChecker) IsCanonical(_ context.Context, root [32]byte) (bo func TestReverseChain(t *testing.T) { // test 0,1,2,3 elements to handle: zero case; single element; even number; odd number - for i := 0; i < 4; i++ { + for i := range 4 { t.Run(fmt.Sprintf("reverseChain with %d elements", i), func(t *testing.T) { actual := mockBlocks(i, incrFwd) expected := mockBlocks(i, incrBwd) @@ -538,7 +538,7 @@ func TestReverseChain(t *testing.T) { if len(actual) != len(expected) { t.Errorf("different list lengths") } - for i := 0; i < len(actual); i++ { + for i := range actual { sblockA, ok := actual[i].(*mock.SignedBeaconBlock) require.Equal(t, true, ok) blockA, ok := sblockA.BeaconBlock.(*mock.BeaconBlock) @@ -561,7 +561,7 @@ func incrBwd(n int, c chan uint32) { } func incrFwd(n int, c chan uint32) { - for i := 0; i < n; i++ { + for i := range n { c <- uint32(i) } close(c) diff --git a/beacon-chain/state/stategen/migrate.go b/beacon-chain/state/stategen/migrate.go index 498caa8e50..25407bc969 100644 --- a/beacon-chain/state/stategen/migrate.go +++ b/beacon-chain/state/stategen/migrate.go @@ -86,7 +86,7 @@ func (s *State) MigrateToCold(ctx context.Context, fRoot [32]byte) error { // you can just remove it from the hot state cache as it becomes redundant. s.saveHotStateDB.lock.Lock() roots := s.saveHotStateDB.blockRootsOfSavedStates - for i := 0; i < len(roots); i++ { + for i := range roots { if aRoot == roots[i] { s.saveHotStateDB.blockRootsOfSavedStates = append(roots[:i], roots[i+1:]...) // There shouldn't be duplicated roots in `blockRootsOfSavedStates`. diff --git a/beacon-chain/state/stateutil/benchmark_test.go b/beacon-chain/state/stateutil/benchmark_test.go index a97ea609a9..9d73381a15 100644 --- a/beacon-chain/state/stateutil/benchmark_test.go +++ b/beacon-chain/state/stateutil/benchmark_test.go @@ -10,7 +10,7 @@ import ( func BenchmarkMerkleize_Buffered(b *testing.B) { roots := make([][32]byte, 8192) - for i := 0; i < 8192; i++ { + for i := range 8192 { roots[0] = [32]byte{byte(i)} } @@ -21,9 +21,8 @@ func BenchmarkMerkleize_Buffered(b *testing.B) { return ssz.Merkleize(ssz.NewHasherFunc(hash.CustomSHA256Hasher()), count, limit, leafIndexer), nil } - b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := newMerkleize(roots, 8192, 8192) require.NoError(b, err) } diff --git a/beacon-chain/state/stateutil/eth1_root.go b/beacon-chain/state/stateutil/eth1_root.go index 4189b6d616..4ed2ee5db5 100644 --- a/beacon-chain/state/stateutil/eth1_root.go +++ b/beacon-chain/state/stateutil/eth1_root.go @@ -18,7 +18,7 @@ func Eth1DataRootWithHasher(eth1Data *ethpb.Eth1Data) ([32]byte, error) { } fieldRoots := make([][32]byte, 3) - for i := 0; i < len(fieldRoots); i++ { + for i := range fieldRoots { fieldRoots[i] = [32]byte{} } @@ -42,7 +42,7 @@ func Eth1DataRootWithHasher(eth1Data *ethpb.Eth1Data) ([32]byte, error) { // Eth1DatasRoot returns the hash tree root of input `eth1Datas`. func Eth1DatasRoot(eth1Datas []*ethpb.Eth1Data) ([32]byte, error) { eth1VotesRoots := make([][32]byte, 0, len(eth1Datas)) - for i := 0; i < len(eth1Datas); i++ { + for i := range eth1Datas { eth1, err := Eth1DataRootWithHasher(eth1Datas[i]) if err != nil { return [32]byte{}, errors.Wrap(err, "could not compute eth1data merkleization") diff --git a/beacon-chain/state/stateutil/field_root_attestation.go b/beacon-chain/state/stateutil/field_root_attestation.go index 0c26460f77..d17504b780 100644 --- a/beacon-chain/state/stateutil/field_root_attestation.go +++ b/beacon-chain/state/stateutil/field_root_attestation.go @@ -24,7 +24,7 @@ func EpochAttestationsRoot(atts []*ethpb.PendingAttestation) ([32]byte, error) { } roots := make([][32]byte, len(atts)) - for i := 0; i < len(atts); i++ { + for i := range atts { pendingRoot, err := pendingAttestationRoot(atts[i]) if err != nil { return [32]byte{}, errors.Wrap(err, "could not attestation merkleization") diff --git a/beacon-chain/state/stateutil/field_root_validator.go b/beacon-chain/state/stateutil/field_root_validator.go index b1772043ca..fbc6bd905f 100644 --- a/beacon-chain/state/stateutil/field_root_validator.go +++ b/beacon-chain/state/stateutil/field_root_validator.go @@ -56,7 +56,7 @@ func validatorRegistryRoot(validators []*ethpb.Validator) ([32]byte, error) { func hashValidatorHelper(validators []*ethpb.Validator, roots [][32]byte, j int, groupSize int, wg *sync.WaitGroup) { defer wg.Done() - for i := 0; i < groupSize; i++ { + for i := range groupSize { fRoots, err := ValidatorFieldRoots(validators[j*groupSize+i]) if err != nil { logrus.WithError(err).Error("Could not get validator field roots") @@ -98,7 +98,7 @@ func OptimizedValidatorRoots(validators []*ethpb.Validator) ([][32]byte, error) // A validator's tree can represented with a depth of 3. As log2(8) = 3 // Using this property we can lay out all the individual fields of a // validator and hash them in single level using our vectorized routine. - for i := 0; i < validatorTreeDepth; i++ { + for range validatorTreeDepth { // Overwrite input lists as we are hashing by level // and only need the highest level to proceed. roots = htr.VectorizedSha256(roots) diff --git a/beacon-chain/state/stateutil/field_root_validator_test.go b/beacon-chain/state/stateutil/field_root_validator_test.go index 7e4c1b86e6..cc2a23b3bb 100644 --- a/beacon-chain/state/stateutil/field_root_validator_test.go +++ b/beacon-chain/state/stateutil/field_root_validator_test.go @@ -18,7 +18,7 @@ func TestValidatorConstants(t *testing.T) { numFields := refV.NumField() numOfValFields := 0 - for i := 0; i < numFields; i++ { + for i := range numFields { if strings.Contains(refV.Type().Field(i).Name, "state") || strings.Contains(refV.Type().Field(i).Name, "sizeCache") || strings.Contains(refV.Type().Field(i).Name, "unknownFields") { @@ -43,13 +43,13 @@ func TestHashValidatorHelper(t *testing.T) { } roots := make([][32]byte, len(valList)) hashValidatorHelper(valList, roots, 2, 2, &wg) - for i := 0; i < 4*validatorFieldRoots; i++ { + for i := range 4 * validatorFieldRoots { require.Equal(t, [32]byte{}, roots[i]) } emptyValRoots, err := ValidatorFieldRoots(v) require.NoError(t, err) for i := 4; i < 6; i++ { - for j := 0; j < validatorFieldRoots; j++ { + for j := range validatorFieldRoots { require.Equal(t, emptyValRoots[j], roots[i*validatorFieldRoots+j]) } } diff --git a/beacon-chain/state/stateutil/participation_bit_root.go b/beacon-chain/state/stateutil/participation_bit_root.go index fc0166805e..202b5f9e72 100644 --- a/beacon-chain/state/stateutil/participation_bit_root.go +++ b/beacon-chain/state/stateutil/participation_bit_root.go @@ -34,12 +34,10 @@ func packParticipationBits(bytes []byte) ([][32]byte, error) { numItems := len(bytes) chunks := make([][32]byte, 0, numItems/32) for i := 0; i < numItems; i += 32 { - j := i + 32 - // We create our upper bound index of the chunk, if it is greater than numItems, - // we set it as numItems itself. - if j > numItems { - j = numItems - } + j := min( + // We create our upper bound index of the chunk, if it is greater than numItems, + // we set it as numItems itself. + i+32, numItems) // We create chunks from the list of items based on the // indices determined above. var chunk [32]byte diff --git a/beacon-chain/state/stateutil/reference_bench_test.go b/beacon-chain/state/stateutil/reference_bench_test.go index 1dcad8e012..9aa2af5ae0 100644 --- a/beacon-chain/state/stateutil/reference_bench_test.go +++ b/beacon-chain/state/stateutil/reference_bench_test.go @@ -9,7 +9,7 @@ func BenchmarkReference_MinusRef(b *testing.B) { ref := &Reference{ refs: math.MaxUint64, } - for i := 0; i < b.N; i++ { + for b.Loop() { ref.MinusRef() } } diff --git a/beacon-chain/state/stateutil/state_root_test.go b/beacon-chain/state/stateutil/state_root_test.go index 04ed135884..78392023b7 100644 --- a/beacon-chain/state/stateutil/state_root_test.go +++ b/beacon-chain/state/stateutil/state_root_test.go @@ -15,7 +15,7 @@ import ( func TestState_FieldCount(t *testing.T) { count := params.BeaconConfig().BeaconStateFieldCount - typ := reflect.TypeOf(ethpb.BeaconState{}) + typ := reflect.TypeFor[ethpb.BeaconState]() numFields := 0 for i := 0; i < typ.NumField(); i++ { if typ.Field(i).Name == "state" || @@ -29,30 +29,30 @@ func TestState_FieldCount(t *testing.T) { } func BenchmarkHashTreeRoot_Generic_512(b *testing.B) { - b.StopTimer() + genesisState := setupGenesisState(b, 512) - b.StartTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err := genesisState.HashTreeRoot() require.NoError(b, err) } } func BenchmarkHashTreeRoot_Generic_16384(b *testing.B) { - b.StopTimer() + genesisState := setupGenesisState(b, 16384) - b.StartTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err := genesisState.HashTreeRoot() require.NoError(b, err) } } func BenchmarkHashTreeRoot_Generic_300000(b *testing.B) { - b.StopTimer() + genesisState := setupGenesisState(b, 300000) - b.StartTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err := genesisState.HashTreeRoot() require.NoError(b, err) } diff --git a/beacon-chain/state/stateutil/trie_helpers.go b/beacon-chain/state/stateutil/trie_helpers.go index 4cd2fd5b19..5e3e25336e 100644 --- a/beacon-chain/state/stateutil/trie_helpers.go +++ b/beacon-chain/state/stateutil/trie_helpers.go @@ -61,7 +61,7 @@ func ReturnTrieLayerVariable(elements [][32]byte, length uint64) [][]*[32]byte { buffer := bytes.NewBuffer([]byte{}) buffer.Grow(64) - for i := uint8(0); i < depth; i++ { + for i := range depth { layerLen := len(layers[i]) oddNodeLength := layerLen%2 == 1 if oddNodeLength { diff --git a/beacon-chain/state/stateutil/trie_helpers_test.go b/beacon-chain/state/stateutil/trie_helpers_test.go index 90a3642b59..8b1ed17aad 100644 --- a/beacon-chain/state/stateutil/trie_helpers_test.go +++ b/beacon-chain/state/stateutil/trie_helpers_test.go @@ -36,8 +36,7 @@ func BenchmarkReturnTrieLayer_NormalAlgorithm(b *testing.B) { require.NoError(b, err) roots := retrieveBlockRoots(newState) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { layers, err := stateutil.ReturnTrieLayer(roots, uint64(len(roots))) assert.NoError(b, err) newRoot := *layers[len(layers)-1][0] @@ -51,8 +50,7 @@ func BenchmarkReturnTrieLayer_VectorizedAlgorithm(b *testing.B) { require.NoError(b, err) roots := retrieveBlockRoots(newState) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { layers, err := stateutil.ReturnTrieLayer(roots, uint64(len(roots))) assert.NoError(b, err) newRoot := *layers[len(layers)-1][0] @@ -96,8 +94,8 @@ func BenchmarkReturnTrieLayerVariable_NormalAlgorithm(b *testing.B) { require.NoError(b, err) roots = append(roots, rt) } - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { layers := stateutil.ReturnTrieLayerVariable(roots, params.BeaconConfig().ValidatorRegistryLimit) newRoot := *layers[len(layers)-1][0] newRoot, err = stateutil.AddInMixin(newRoot, uint64(len(validators))) @@ -118,8 +116,8 @@ func BenchmarkReturnTrieLayerVariable_VectorizedAlgorithm(b *testing.B) { require.NoError(b, err) roots = append(roots, rt) } - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { layers := stateutil.ReturnTrieLayerVariable(roots, params.BeaconConfig().ValidatorRegistryLimit) newRoot := *layers[len(layers)-1][0] newRoot, err = stateutil.AddInMixin(newRoot, uint64(len(validators))) diff --git a/beacon-chain/state/stateutil/unrealized_justification.go b/beacon-chain/state/stateutil/unrealized_justification.go index 9500ed739a..9e79a21991 100644 --- a/beacon-chain/state/stateutil/unrealized_justification.go +++ b/beacon-chain/state/stateutil/unrealized_justification.go @@ -22,7 +22,7 @@ func UnrealizedCheckpointBalances(cp, pp []byte, validators ValReader, currentEp } valLength := validators.Len() - for i := 0; i < valLength; i++ { + for i := range valLength { v, err := validators.At(i) if err != nil { return 0, 0, 0, err diff --git a/beacon-chain/state/stateutil/unrealized_justification_test.go b/beacon-chain/state/stateutil/unrealized_justification_test.go index dabb95452b..36d27b7675 100644 --- a/beacon-chain/state/stateutil/unrealized_justification_test.go +++ b/beacon-chain/state/stateutil/unrealized_justification_test.go @@ -15,7 +15,7 @@ func TestState_UnrealizedCheckpointBalances(t *testing.T) { expectedActive := params.BeaconConfig().MinGenesisActiveValidatorCount * params.BeaconConfig().MaxEffectiveBalance balances := make([]uint64, params.BeaconConfig().MinGenesisActiveValidatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, @@ -100,7 +100,7 @@ func TestState_MVSlice_UnrealizedCheckpointBalances(t *testing.T) { expectedActive := params.BeaconConfig().MinGenesisActiveValidatorCount * params.BeaconConfig().MaxEffectiveBalance balances := make([]uint64, params.BeaconConfig().MinGenesisActiveValidatorCount) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ ExitEpoch: params.BeaconConfig().FarFutureEpoch, EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, diff --git a/beacon-chain/state/stateutil/validator_root_test.go b/beacon-chain/state/stateutil/validator_root_test.go index 144f458163..34d51543e9 100644 --- a/beacon-chain/state/stateutil/validator_root_test.go +++ b/beacon-chain/state/stateutil/validator_root_test.go @@ -8,11 +8,11 @@ import ( func BenchmarkUint64ListRootWithRegistryLimit(b *testing.B) { balances := make([]uint64, 100000) - for i := 0; i < len(balances); i++ { + for i := range balances { balances[i] = uint64(i) } b.Run("100k balances", func(b *testing.B) { - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := stateutil.Uint64ListRootWithRegistryLimit(balances) if err != nil { b.Fatal(err) diff --git a/beacon-chain/sync/backfill/batch.go b/beacon-chain/sync/backfill/batch.go index a8874d7410..871f82790b 100644 --- a/beacon-chain/sync/backfill/batch.go +++ b/beacon-chain/sync/backfill/batch.go @@ -78,7 +78,7 @@ type batch struct { } func (b batch) logFields() logrus.Fields { - f := map[string]interface{}{ + f := map[string]any{ "batchId": b.id(), "state": b.state.String(), "scheduled": b.scheduled.String(), diff --git a/beacon-chain/sync/backfill/batcher_test.go b/beacon-chain/sync/backfill/batcher_test.go index 52667dfb31..1f0670ecdf 100644 --- a/beacon-chain/sync/backfill/batcher_test.go +++ b/beacon-chain/sync/backfill/batcher_test.go @@ -112,7 +112,7 @@ func TestBatchSequencer(t *testing.T) { } got, err := seq.sequence() require.Equal(t, seqLen, len(got)) - for i := 0; i < seqLen; i++ { + for i := range seqLen { g := got[i] exp := expected[i] require.NoError(t, err) diff --git a/beacon-chain/sync/backfill/blobs_test.go b/beacon-chain/sync/backfill/blobs_test.go index 19eab84ebb..a0022d51b0 100644 --- a/beacon-chain/sync/backfill/blobs_test.go +++ b/beacon-chain/sync/backfill/blobs_test.go @@ -16,7 +16,7 @@ import ( func testBlobGen(t *testing.T, start primitives.Slot, n int) ([]blocks.ROBlock, [][]blocks.ROBlob) { blks := make([]blocks.ROBlock, n) blobs := make([][]blocks.ROBlob, n) - for i := 0; i < n; i++ { + for i := range n { bk, bl := util.GenerateTestDenebBlockWithSidecar(t, [32]byte{}, start+primitives.Slot(i), 3) blks[i] = bk blobs[i] = bl diff --git a/beacon-chain/sync/backfill/pool.go b/beacon-chain/sync/backfill/pool.go index 95bdb6b757..477ef466b2 100644 --- a/beacon-chain/sync/backfill/pool.go +++ b/beacon-chain/sync/backfill/pool.go @@ -65,7 +65,7 @@ func newP2PBatchWorkerPool(p p2p.P2P, maxBatches int) *p2pBatchWorkerPool { func (p *p2pBatchWorkerPool) spawn(ctx context.Context, n int, c *startup.Clock, a PeerAssigner, v *verifier, cm sync.ContextByteVersions, nbv verification.NewBlobVerifier, bfs *filesystem.BlobStorage) { p.ctx, p.cancel = context.WithCancel(ctx) go p.batchRouter(a) - for i := 0; i < n; i++ { + for i := range n { go p.newWorker(workerId(i), p.toWorkers, p.fromWorkers, c, v, cm, nbv, bfs).run(p.ctx) } } diff --git a/beacon-chain/sync/backfill/pool_test.go b/beacon-chain/sync/backfill/pool_test.go index 54c184376f..ae2b131155 100644 --- a/beacon-chain/sync/backfill/pool_test.go +++ b/beacon-chain/sync/backfill/pool_test.go @@ -57,7 +57,7 @@ func TestPoolDetectAllEnded(t *testing.T) { br := batcher{min: 10, size: 10} endSeq := br.before(0) require.Equal(t, batchEndSequence, endSeq.state) - for i := 0; i < nw; i++ { + for range nw { pool.todo(endSeq) } b, err := pool.complete() diff --git a/beacon-chain/sync/backfill/service.go b/beacon-chain/sync/backfill/service.go index 2229cc908d..18d4c1ebde 100644 --- a/beacon-chain/sync/backfill/service.go +++ b/beacon-chain/sync/backfill/service.go @@ -367,10 +367,7 @@ func (*Service) Status() error { // minimumBackfillSlot determines the lowest slot that backfill needs to download based on looking back // MIN_EPOCHS_FOR_BLOCK_REQUESTS from the current slot. func minimumBackfillSlot(current primitives.Slot) primitives.Slot { - oe := primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests) - if oe > slots.MaxSafeEpoch() { - oe = slots.MaxSafeEpoch() - } + oe := min(primitives.Epoch(params.BeaconConfig().MinEpochsForBlockRequests), slots.MaxSafeEpoch()) offset := slots.UnsafeEpochStart(oe) if offset >= current { // Slot 0 is the genesis block, therefore the signature in it is invalid. diff --git a/beacon-chain/sync/backfill/service_test.go b/beacon-chain/sync/backfill/service_test.go index 9636b76daa..f7e708a546 100644 --- a/beacon-chain/sync/backfill/service_test.go +++ b/beacon-chain/sync/backfill/service_test.go @@ -69,7 +69,7 @@ func TestServiceInit(t *testing.T) { todo := make([]batch, 0) todo = testReadN(ctx, t, pool.todoChan, nWorkers, todo) require.Equal(t, nWorkers, len(todo)) - for i := 0; i < remaining; i++ { + for i := range remaining { b := todo[i] if b.state == batchSequenced { b.state = batchImportable @@ -96,7 +96,7 @@ func TestMinimumBackfillSlot(t *testing.T) { } func testReadN(ctx context.Context, t *testing.T, c chan batch, n int, into []batch) []batch { - for i := 0; i < n; i++ { + for range n { select { case b := <-c: into = append(into, b) diff --git a/beacon-chain/sync/backfill/verify_test.go b/beacon-chain/sync/backfill/verify_test.go index 0f3466d43e..12ec04d986 100644 --- a/beacon-chain/sync/backfill/verify_test.go +++ b/beacon-chain/sync/backfill/verify_test.go @@ -51,7 +51,7 @@ func testBlocksWithKeys(t *testing.T, nBlocks uint64, nBlobs int, vr []byte) ([] sks, pks, err := interop.DeterministicallyGenerateKeys(0, nBlocks) require.NoError(t, err) prevRoot := [32]byte{} - for i := uint64(0); i < nBlocks; i++ { + for i := range nBlocks { block, blobs := util.GenerateTestDenebBlockWithSidecar(t, prevRoot, primitives.Slot(i), nBlobs, util.WithProposerSigning(primitives.ValidatorIndex(i), sks[i], vr)) prevRoot = block.Root() blks[i] = block diff --git a/beacon-chain/sync/batch_verifier.go b/beacon-chain/sync/batch_verifier.go index 4e6a517993..0f1e61f35d 100644 --- a/beacon-chain/sync/batch_verifier.go +++ b/beacon-chain/sync/batch_verifier.go @@ -130,7 +130,7 @@ func verifyBatch(verifierBatch []*signatureVerifier) { verificationErr = errors.New("batch signature verification failed") } } - for i := 0; i < len(verifierBatch); i++ { + for i := range verifierBatch { verifierBatch[i].resChan <- verificationErr } } diff --git a/beacon-chain/sync/blobs_test.go b/beacon-chain/sync/blobs_test.go index 7bf450f4ee..e2ef055a8c 100644 --- a/beacon-chain/sync/blobs_test.go +++ b/beacon-chain/sync/blobs_test.go @@ -53,8 +53,8 @@ type blobsTestCase struct { } type testHandler func(s *Service) rpcHandler -type expectedDefiner func(t *testing.T, scs []blocks.ROBlob, req interface{}) []*expectedBlobChunk -type requestFromSidecars func([]blocks.ROBlob) interface{} +type expectedDefiner func(t *testing.T, scs []blocks.ROBlob, req any) []*expectedBlobChunk +type requestFromSidecars func([]blocks.ROBlob) any type oldestSlotCallback func(t *testing.T) types.Slot type expectedRequirer func(*testing.T, *Service, []*expectedBlobChunk) func(network.Stream) diff --git a/beacon-chain/sync/block_batcher.go b/beacon-chain/sync/block_batcher.go index aa1963d425..ca7a2a1146 100644 --- a/beacon-chain/sync/block_batcher.go +++ b/beacon-chain/sync/block_batcher.go @@ -100,7 +100,7 @@ func (bb *blockRangeBatcher) next(ctx context.Context, stream libp2pcore.Stream) } rob = append(rob, gb) } - for i := 0; i < len(blks); i++ { + for i := range blks { rb, err := blocks.NewROBlockWithRoot(blks[i], roots[i]) if err != nil { return blockBatch{err: errors.Wrap(err, "Could not initialize ROBlock")}, false diff --git a/beacon-chain/sync/block_batcher_test.go b/beacon-chain/sync/block_batcher_test.go index 7cdce247da..74de2bce23 100644 --- a/beacon-chain/sync/block_batcher_test.go +++ b/beacon-chain/sync/block_batcher_test.go @@ -19,7 +19,7 @@ func TestSortedObj_SortBlocksRoots(t *testing.T) { } var blks []blocks.ROBlock - for i := 0; i < 10; i++ { + for range 10 { slot := primitives.Slot(randFunc()) newBlk, err := blocks.NewSignedBeaconBlock(ðpb.SignedBeaconBlock{Block: ðpb.BeaconBlock{Slot: slot, Body: ðpb.BeaconBlockBody{}}}) require.NoError(t, err) @@ -47,7 +47,7 @@ func TestSortedObj_NoDuplicates(t *testing.T) { return randGen.Int63n(50) } - for i := 0; i < 10; i++ { + for range 10 { slot := primitives.Slot(randFunc()) newBlk := ðpb.SignedBeaconBlock{Block: ðpb.BeaconBlock{Slot: slot, Body: ðpb.BeaconBlockBody{}}} // append twice diff --git a/beacon-chain/sync/checkpoint/weak-subjectivity_test.go b/beacon-chain/sync/checkpoint/weak-subjectivity_test.go index e7e2548419..9f577beea8 100644 --- a/beacon-chain/sync/checkpoint/weak-subjectivity_test.go +++ b/beacon-chain/sync/checkpoint/weak-subjectivity_test.go @@ -24,7 +24,7 @@ import ( "github.com/pkg/errors" ) -func marshalToEnvelope(val interface{}) ([]byte, error) { +func marshalToEnvelope(val any) ([]byte, error) { raw, err := json.Marshal(val) if err != nil { return nil, errors.Wrap(err, "error marshaling value to place in data envelope") @@ -136,8 +136,8 @@ func TestDownloadWeakSubjectivityCheckpoint(t *testing.T) { Root: fmt.Sprintf("%#x", bRoot), } wsr := struct { - Checkpoint interface{} `json:"ws_checkpoint"` - StateRoot string `json:"state_root"` + Checkpoint any `json:"ws_checkpoint"` + StateRoot string `json:"state_root"` }{ Checkpoint: cp, StateRoot: fmt.Sprintf("%#x", wRoot), @@ -306,7 +306,7 @@ func defaultTestHeadState(t *testing.T, cfg *params.BeaconChainConfig) (state.Be func populateValidators(cfg *params.BeaconChainConfig, st state.BeaconState, valCount, avgBalance uint64) error { validators := make([]*ethpb.Validator, valCount) balances := make([]uint64, len(validators)) - for i := uint64(0); i < valCount; i++ { + for i := range valCount { validators[i] = ðpb.Validator{ PublicKey: make([]byte, cfg.BLSPubkeyLength), WithdrawalCredentials: make([]byte, 32), diff --git a/beacon-chain/sync/data_column_sidecars.go b/beacon-chain/sync/data_column_sidecars.go index 2e9c895244..9206025c5b 100644 --- a/beacon-chain/sync/data_column_sidecars.go +++ b/beacon-chain/sync/data_column_sidecars.go @@ -3,6 +3,7 @@ package sync import ( "bytes" "context" + "maps" "slices" "sync" "time" @@ -129,9 +130,7 @@ func FetchDataColumnSidecars( return nil, nil, errors.Wrap(err, "try merge storage and mandatory inputs") } - for root, sidecars := range mergedSidecarsByRoot { - result[root] = sidecars - } + maps.Copy(result, mergedSidecarsByRoot) if len(incompleteRoots) == 0 { log.WithField("finalMissingRootCount", 0).Debug("Fetched data column sidecars from storage and peers") @@ -150,9 +149,7 @@ func FetchDataColumnSidecars( return nil, nil, errors.Wrap(err, "try merge storage and all inputs") } - for root, sidecars := range mergedSidecarsByRoot { - result[root] = sidecars - } + maps.Copy(result, mergedSidecarsByRoot) if len(incompleteRoots) == 0 { log.WithField("finalMissingRootCount", 0).Debug("Fetched data column sidecars from storage and peers using rescue mode") @@ -165,9 +162,7 @@ func FetchDataColumnSidecars( return nil, nil, errors.Wrap(err, "assemble available sidecars for incomplete roots") } - for root, sidecars := range incompleteSidecarsByRoot { - result[root] = sidecars - } + maps.Copy(result, incompleteSidecarsByRoot) log.WithField("finalMissingRootCount", len(incompleteRoots)).Warning("Failed to fetch data column sidecars") return result, missingByRoot, nil @@ -1159,9 +1154,7 @@ func copyIndicesByRoot(original map[[fieldparams.RootLength]byte]map[uint64]bool copied := make(map[[fieldparams.RootLength]byte]map[uint64]bool, len(original)) for root, indexMap := range original { copied[root] = make(map[uint64]bool, len(indexMap)) - for index, value := range indexMap { - copied[root][index] = value - } + maps.Copy(copied[root], indexMap) } return copied } diff --git a/beacon-chain/sync/data_columns_reconstruct.go b/beacon-chain/sync/data_columns_reconstruct.go index dd81b2de77..47b4704f06 100644 --- a/beacon-chain/sync/data_columns_reconstruct.go +++ b/beacon-chain/sync/data_columns_reconstruct.go @@ -21,7 +21,7 @@ import ( // https:github.com/ethereum/consensus-specs/blob/master/specs/fulu/das-core.md#reconstruction-and-cross-seeding func (s *Service) processDataColumnSidecarsFromReconstruction(ctx context.Context, sidecar blocks.VerifiedRODataColumn) error { key := fmt.Sprintf("%#x", sidecar.BlockRoot()) - if _, err, _ := s.reconstructionSingleFlight.Do(key, func() (interface{}, error) { + if _, err, _ := s.reconstructionSingleFlight.Do(key, func() (any, error) { var wg sync.WaitGroup root := sidecar.BlockRoot() diff --git a/beacon-chain/sync/decode_pubsub.go b/beacon-chain/sync/decode_pubsub.go index 050798ff57..c2c6c31820 100644 --- a/beacon-chain/sync/decode_pubsub.go +++ b/beacon-chain/sync/decode_pubsub.go @@ -35,15 +35,15 @@ func (s *Service) decodePubsubMessage(msg *pubsub.Message) (ssz.Unmarshaler, err // Specially handle subnet messages. switch { case strings.Contains(topic, p2p.GossipAttestationMessage): - topic = p2p.GossipTypeMapping[reflect.TypeOf(ðpb.Attestation{})] + topic = p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.Attestation]()] // Given that both sync message related subnets have the same message name, we have to // differentiate them below. case strings.Contains(topic, p2p.GossipSyncCommitteeMessage) && !strings.Contains(topic, p2p.SyncContributionAndProofSubnetTopicFormat): - topic = p2p.GossipTypeMapping[reflect.TypeOf(ðpb.SyncCommitteeMessage{})] + topic = p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SyncCommitteeMessage]()] case strings.Contains(topic, p2p.GossipBlobSidecarMessage): - topic = p2p.GossipTypeMapping[reflect.TypeOf(ðpb.BlobSidecar{})] + topic = p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.BlobSidecar]()] case strings.Contains(topic, p2p.GossipDataColumnSidecarMessage): - topic = p2p.GossipTypeMapping[reflect.TypeOf(ðpb.DataColumnSidecar{})] + topic = p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.DataColumnSidecar]()] } base := p2p.GossipTopicMappings(topic, 0) diff --git a/beacon-chain/sync/decode_pubsub_test.go b/beacon-chain/sync/decode_pubsub_test.go index 0d5c2e0209..8bd263c4b3 100644 --- a/beacon-chain/sync/decode_pubsub_test.go +++ b/beacon-chain/sync/decode_pubsub_test.go @@ -35,7 +35,7 @@ func TestService_decodePubsubMessage(t *testing.T) { name string topic string input *pubsub.Message - want interface{} + want any wantErr error }{ { @@ -64,7 +64,7 @@ func TestService_decodePubsubMessage(t *testing.T) { }, { name: "valid message -- beacon block", - topic: fmt.Sprintf(p2p.GossipTypeMapping[reflect.TypeOf(ðpb.SignedBeaconBlock{})], entry.ForkDigest), + topic: fmt.Sprintf(p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()], entry.ForkDigest), input: &pubsub.Message{ Message: &pb.Message{ Data: func() []byte { diff --git a/beacon-chain/sync/fork_watcher_test.go b/beacon-chain/sync/fork_watcher_test.go index 2d81bb37a5..d9c5756893 100644 --- a/beacon-chain/sync/fork_watcher_test.go +++ b/beacon-chain/sync/fork_watcher_test.go @@ -233,11 +233,9 @@ func TestService_CheckForNextEpochFork(t *testing.T) { func attachSpawner(s *Service) *sync.WaitGroup { wg := new(sync.WaitGroup) s.subscriptionSpawner = func(f func()) { - wg.Add(1) - go func() { - defer wg.Done() + wg.Go(func() { f() - }() + }) } return wg } diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher.go b/beacon-chain/sync/initial-sync/blocks_fetcher.go index cbb5c62acd..3d32afcfe4 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher.go @@ -267,14 +267,12 @@ func (f *blocksFetcher) loop() { log.Debug("Context closed, exiting goroutine (blocks fetcher)") return case req := <-f.fetchRequests: - wg.Add(1) - go func() { - defer wg.Done() + wg.Go(func() { select { case <-f.ctx.Done(): case f.fetchResponses <- f.handleRequest(req.ctx, req.start, req.count): } - }() + }) } } } diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher_peers.go b/beacon-chain/sync/initial-sync/blocks_fetcher_peers.go index a6f1edcd01..9796f3ee2f 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher_peers.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher_peers.go @@ -62,10 +62,7 @@ func (f *blocksFetcher) selectFailOverPeer(excludedPID peer.ID, peers []peer.ID) // waitForMinimumPeers spins and waits up until enough peers are available. func (f *blocksFetcher) waitForMinimumPeers(ctx context.Context) ([]peer.ID, error) { - required := params.BeaconConfig().MaxPeersToSync - if flags.Get().MinimumSyncPeers < required { - required = flags.Get().MinimumSyncPeers - } + required := min(flags.Get().MinimumSyncPeers, params.BeaconConfig().MaxPeersToSync) for { if ctx.Err() != nil { return nil, ctx.Err() @@ -123,10 +120,7 @@ func (f *blocksFetcher) filterPeers(ctx context.Context, peers []peer.ID, peersP // trimPeers limits peer list, returning only specified percentage of peers. // Takes system constraints into account (min/max peers to sync). func trimPeers(peers []peer.ID, peersPercentage float64) []peer.ID { - required := params.BeaconConfig().MaxPeersToSync - if flags.Get().MinimumSyncPeers < required { - required = flags.Get().MinimumSyncPeers - } + required := min(flags.Get().MinimumSyncPeers, params.BeaconConfig().MaxPeersToSync) // Weak/slow peers will be pushed down the list and trimmed since only percentage of peers is selected. limit := uint64(math.Round(float64(len(peers)) * peersPercentage)) // Limit cannot be less that minimum peers required by sync mechanism. diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher_peers_test.go b/beacon-chain/sync/initial-sync/blocks_fetcher_peers_test.go index 8e41e65df0..13100bc1a2 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher_peers_test.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher_peers_test.go @@ -252,7 +252,7 @@ func TestBlocksFetcher_filterPeers(t *testing.T) { peerStats := make(map[peer.ID]int, len(tt.want)) var filteredPIDs []peer.ID var err error - for i := 0; i < 1000; i++ { + for range 1000 { filteredPIDs = fetcher.filterPeers(t.Context(), peerIDs, tt.args.peersPercentage) if len(filteredPIDs) <= 1 { break diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher_test.go b/beacon-chain/sync/initial-sync/blocks_fetcher_test.go index 1e4be4fa5e..e7fef8674c 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher_test.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher_test.go @@ -388,7 +388,7 @@ func TestBlocksFetcher_scheduleRequest(t *testing.T) { t.Run("unblock on context cancellation", func(t *testing.T) { fetcher := newBlocksFetcher(t.Context(), &blocksFetcherConfig{}) - for i := 0; i < maxPendingRequests; i++ { + for range maxPendingRequests { assert.NoError(t, fetcher.scheduleRequest(t.Context(), 1, blockBatchLimit)) } diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher_utils.go b/beacon-chain/sync/initial-sync/blocks_fetcher_utils.go index b44b1fad7e..3db193aa50 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher_utils.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher_utils.go @@ -297,7 +297,7 @@ func (f *blocksFetcher) findForkWithPeer(ctx context.Context, pid peer.ID, slot // findAncestor tries to figure out common ancestor slot that connects a given root to known block. func (f *blocksFetcher) findAncestor(ctx context.Context, pid peer.ID, b interfaces.ReadOnlySignedBeaconBlock) (*forkData, error) { outBlocks := []interfaces.ReadOnlySignedBeaconBlock{b} - for i := uint64(0); i < backtrackingMaxHops; i++ { + for range uint64(backtrackingMaxHops) { parentRoot := outBlocks[len(outBlocks)-1].Block().ParentRoot() if f.chain.HasBlock(ctx, parentRoot) { // Common ancestor found, forward blocks back to processor. diff --git a/beacon-chain/sync/initial-sync/blocks_fetcher_utils_test.go b/beacon-chain/sync/initial-sync/blocks_fetcher_utils_test.go index 456c8a5036..9e526d2b6e 100644 --- a/beacon-chain/sync/initial-sync/blocks_fetcher_utils_test.go +++ b/beacon-chain/sync/initial-sync/blocks_fetcher_utils_test.go @@ -35,7 +35,7 @@ func TestBlocksFetcher_nonSkippedSlotAfter(t *testing.T) { blocks = append(blocks, 55000) blocks = append(blocks, makeSequence(57000, 57256)...) var peersData []*peerData - for i := 0; i < size; i++ { + for range size { peersData = append(peersData, &peerData{ blocks: blocks, finalizedEpoch: 1800, @@ -177,7 +177,7 @@ func TestBlocksFetcher_findFork(t *testing.T) { DB: beaconDB, FinalizedCheckPoint: ðpb.Checkpoint{ Epoch: finalizedEpoch, - Root: []byte(fmt.Sprintf("finalized_root %d", finalizedEpoch)), + Root: fmt.Appendf(nil, "finalized_root %d", finalizedEpoch), }, Genesis: time.Now(), ValidatorsRoot: [32]byte{}, @@ -199,7 +199,7 @@ func TestBlocksFetcher_findFork(t *testing.T) { // Consume all chain1 blocks from many peers (alternative fork will be featured by a single peer, // and should still be enough to explore alternative paths). peers := make([]peer.ID, 0) - for i := 0; i < 5; i++ { + for range 5 { peers = append(peers, connectPeerHavingBlocks(t, p2p, chain1, finalizedSlot, p2p.Peers())) } @@ -487,7 +487,7 @@ func TestBlocksFetcher_findAncestor(t *testing.T) { DB: beaconDB, FinalizedCheckPoint: ðpb.Checkpoint{ Epoch: finalizedEpoch, - Root: []byte(fmt.Sprintf("finalized_root %d", finalizedEpoch)), + Root: fmt.Appendf(nil, "finalized_root %d", finalizedEpoch), }, Genesis: time.Now(), ValidatorsRoot: [32]byte{}, diff --git a/beacon-chain/sync/initial-sync/blocks_queue.go b/beacon-chain/sync/initial-sync/blocks_queue.go index 8701885c18..c8be8e6d97 100644 --- a/beacon-chain/sync/initial-sync/blocks_queue.go +++ b/beacon-chain/sync/initial-sync/blocks_queue.go @@ -303,7 +303,7 @@ func waitHighestExpectedSlot(q *blocksQueue) bool { // onScheduleEvent is an event called on newly arrived epochs. Transforms state to scheduled. func (q *blocksQueue) onScheduleEvent(ctx context.Context) eventHandlerFn { - return func(m *stateMachine, in interface{}) (stateID, error) { + return func(m *stateMachine, in any) (stateID, error) { if m.state != stateNew { return m.state, errInvalidInitialState } @@ -321,7 +321,7 @@ func (q *blocksQueue) onScheduleEvent(ctx context.Context) eventHandlerFn { // onDataReceivedEvent is an event called when data is received from fetcher. func (q *blocksQueue) onDataReceivedEvent(ctx context.Context) eventHandlerFn { - return func(m *stateMachine, in interface{}) (stateID, error) { + return func(m *stateMachine, in any) (stateID, error) { if ctx.Err() != nil { return m.state, ctx.Err() } @@ -359,7 +359,7 @@ func (q *blocksQueue) onDataReceivedEvent(ctx context.Context) eventHandlerFn { // onReadyToSendEvent is an event called to allow epochs with available blocks to send them downstream. func (q *blocksQueue) onReadyToSendEvent(ctx context.Context) eventHandlerFn { - return func(m *stateMachine, in interface{}) (stateID, error) { + return func(m *stateMachine, in any) (stateID, error) { if ctx.Err() != nil { return m.state, ctx.Err() } @@ -404,7 +404,7 @@ func (q *blocksQueue) onReadyToSendEvent(ctx context.Context) eventHandlerFn { // onProcessSkippedEvent is an event triggered on skipped machines, allowing handlers to // extend lookahead window, in case where progress is not possible otherwise. func (q *blocksQueue) onProcessSkippedEvent(ctx context.Context) eventHandlerFn { - return func(m *stateMachine, in interface{}) (stateID, error) { + return func(m *stateMachine, in any) (stateID, error) { if ctx.Err() != nil { return m.state, ctx.Err() } @@ -468,7 +468,7 @@ func (q *blocksQueue) downscorePeer(peerID peer.ID, reason string) { // onCheckStaleEvent is an event that allows to mark stale epochs, // so that they can be re-processed. func onCheckStaleEvent(ctx context.Context) eventHandlerFn { - return func(m *stateMachine, in interface{}) (stateID, error) { + return func(m *stateMachine, in any) (stateID, error) { if ctx.Err() != nil { return m.state, ctx.Err() } diff --git a/beacon-chain/sync/initial-sync/blocks_queue_test.go b/beacon-chain/sync/initial-sync/blocks_queue_test.go index 6d058f0375..3050341c8d 100644 --- a/beacon-chain/sync/initial-sync/blocks_queue_test.go +++ b/beacon-chain/sync/initial-sync/blocks_queue_test.go @@ -1049,7 +1049,7 @@ func TestBlocksQueue_stuckInUnfavourableFork(t *testing.T) { DB: beaconDB, FinalizedCheckPoint: ð.Checkpoint{ Epoch: finalizedEpoch, - Root: []byte(fmt.Sprintf("finalized_root %d", finalizedEpoch)), + Root: fmt.Appendf(nil, "finalized_root %d", finalizedEpoch), }, Genesis: time.Now(), ValidatorsRoot: [32]byte{}, @@ -1252,7 +1252,7 @@ func TestBlocksQueue_stuckWhenHeadIsSetToOrphanedBlock(t *testing.T) { DB: beaconDB, FinalizedCheckPoint: ð.Checkpoint{ Epoch: finalizedEpoch, - Root: []byte(fmt.Sprintf("finalized_root %d", finalizedEpoch)), + Root: fmt.Appendf(nil, "finalized_root %d", finalizedEpoch), }, Genesis: time.Now(), ValidatorsRoot: [32]byte{}, diff --git a/beacon-chain/sync/initial-sync/fsm.go b/beacon-chain/sync/initial-sync/fsm.go index 284b1e5d5a..a59375fe69 100644 --- a/beacon-chain/sync/initial-sync/fsm.go +++ b/beacon-chain/sync/initial-sync/fsm.go @@ -3,7 +3,7 @@ package initialsync import ( "errors" "fmt" - "sort" + "slices" "time" "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" @@ -48,7 +48,7 @@ type stateMachine struct { } // eventHandlerFn is an event handler function's signature. -type eventHandlerFn func(m *stateMachine, data interface{}) (newState stateID, err error) +type eventHandlerFn func(m *stateMachine, data any) (newState stateID, err error) // newStateMachineManager returns fully initialized state machine manager. func newStateMachineManager() *stateMachineManager { @@ -110,9 +110,7 @@ func (smm *stateMachineManager) recalculateMachineAttribs() { for key := range smm.machines { keys = append(keys, key) } - sort.Slice(keys, func(i, j int) bool { - return keys[i] < keys[j] - }) + slices.Sort(keys) smm.keys = keys } @@ -159,7 +157,7 @@ func (m *stateMachine) setState(name stateID) { } // trigger invokes the event handler on a given state machine. -func (m *stateMachine) trigger(event eventID, data interface{}) error { +func (m *stateMachine) trigger(event eventID, data any) error { handlers, ok := m.smm.handlers[m.state] if !ok { return fmt.Errorf("no event handlers registered for event: %v, state: %v", event, m.state) diff --git a/beacon-chain/sync/initial-sync/fsm_benchmark_test.go b/beacon-chain/sync/initial-sync/fsm_benchmark_test.go index 684985fcdc..66f7c828fa 100644 --- a/beacon-chain/sync/initial-sync/fsm_benchmark_test.go +++ b/beacon-chain/sync/initial-sync/fsm_benchmark_test.go @@ -9,7 +9,7 @@ import ( func BenchmarkStateMachine_trigger(b *testing.B) { sm := newStateMachineManager() - handlerFn := func(m *stateMachine, in interface{}) (id stateID, err error) { + handlerFn := func(m *stateMachine, in any) (id stateID, err error) { response, ok := in.(*fetchRequestParams) if !ok { return 0, errInputNotFetchRequestParams @@ -26,9 +26,8 @@ func BenchmarkStateMachine_trigger(b *testing.B) { sm.addStateMachine(64) b.ReportAllocs() - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { data := &fetchRequestParams{ start: 23, count: 32, diff --git a/beacon-chain/sync/initial-sync/fsm_test.go b/beacon-chain/sync/initial-sync/fsm_test.go index a9965bd2af..40262d6195 100644 --- a/beacon-chain/sync/initial-sync/fsm_test.go +++ b/beacon-chain/sync/initial-sync/fsm_test.go @@ -58,7 +58,7 @@ func TestStateMachine_EventIDString(t *testing.T) { func TestStateMachineManager_addEventHandler(t *testing.T) { smm := newStateMachineManager() - smm.addEventHandler(eventTick, stateNew, func(m *stateMachine, i interface{}) (id stateID, err error) { + smm.addEventHandler(eventTick, stateNew, func(m *stateMachine, i any) (id stateID, err error) { return stateScheduled, nil }) assert.Equal(t, 1, len(smm.handlers[stateNew]), "Unexpected size") @@ -67,7 +67,7 @@ func TestStateMachineManager_addEventHandler(t *testing.T) { assert.Equal(t, stateScheduled, state, "Unexpected state") // Add second handler to the same event - smm.addEventHandler(eventTick, stateSent, func(m *stateMachine, i interface{}) (id stateID, err error) { + smm.addEventHandler(eventTick, stateSent, func(m *stateMachine, i any) (id stateID, err error) { return stateDataParsed, nil }) assert.Equal(t, 1, len(smm.handlers[stateSent]), "Unexpected size") @@ -76,7 +76,7 @@ func TestStateMachineManager_addEventHandler(t *testing.T) { assert.Equal(t, stateDataParsed, state, "Unexpected state") // Add another handler to existing event/state pair. Should have no effect. - smm.addEventHandler(eventTick, stateSent, func(m *stateMachine, i interface{}) (id stateID, err error) { + smm.addEventHandler(eventTick, stateSent, func(m *stateMachine, i any) (id stateID, err error) { return stateSkipped, nil }) assert.Equal(t, 1, len(smm.handlers[stateSent]), "Unexpected size") @@ -97,7 +97,7 @@ func TestStateMachine_trigger(t *testing.T) { name eventID returnState stateID epoch primitives.Epoch - data interface{} + data any } tests := []struct { name string @@ -157,7 +157,7 @@ func TestStateMachine_trigger(t *testing.T) { }, } fn := func(e event) eventHandlerFn { - return func(m *stateMachine, in interface{}) (stateID, error) { + return func(m *stateMachine, in any) (stateID, error) { if e.err { return m.state, errors.New("invalid") } @@ -198,19 +198,19 @@ func TestStateMachine_trigger(t *testing.T) { func TestStateMachineManager_QueueLoop(t *testing.T) { smm := newStateMachineManager() - smm.addEventHandler(eventTick, stateNew, func(m *stateMachine, data interface{}) (stateID, error) { + smm.addEventHandler(eventTick, stateNew, func(m *stateMachine, data any) (stateID, error) { return stateScheduled, nil }) - smm.addEventHandler(eventTick, stateScheduled, func(m *stateMachine, data interface{}) (stateID, error) { + smm.addEventHandler(eventTick, stateScheduled, func(m *stateMachine, data any) (stateID, error) { if m.start < 256 { return stateDataParsed, nil } return stateSkipped, nil }) - smm.addEventHandler(eventTick, stateDataParsed, func(m *stateMachine, data interface{}) (stateID, error) { + smm.addEventHandler(eventTick, stateDataParsed, func(m *stateMachine, data any) (stateID, error) { return stateSent, nil }) - smm.addEventHandler(eventTick, stateSkipped, func(m *stateMachine, data interface{}) (stateID, error) { + smm.addEventHandler(eventTick, stateSkipped, func(m *stateMachine, data any) (stateID, error) { dataParsed, ok := data.(int) if !ok { return m.state, errors.New("invalid data type") diff --git a/beacon-chain/sync/initial-sync/initial_sync_test.go b/beacon-chain/sync/initial-sync/initial_sync_test.go index 08460cf85f..27f4a84a41 100644 --- a/beacon-chain/sync/initial-sync/initial_sync_test.go +++ b/beacon-chain/sync/initial-sync/initial_sync_test.go @@ -234,7 +234,7 @@ func connectPeer(t *testing.T, host *p2pt.TestP2P, datum *peerData, peerStatus * peerStatus.SetConnectionState(p.PeerID(), peers.Connected) peerStatus.SetChainState(p.PeerID(), ðpb.StatusV2{ ForkDigest: params.BeaconConfig().GenesisForkVersion, - FinalizedRoot: []byte(fmt.Sprintf("finalized_root %d", datum.finalizedEpoch)), + FinalizedRoot: fmt.Appendf(nil, "finalized_root %d", datum.finalizedEpoch), FinalizedEpoch: datum.finalizedEpoch, HeadRoot: bytesutil.PadTo([]byte("head_root"), 32), HeadSlot: datum.headSlot, @@ -333,7 +333,7 @@ func connectPeerHavingBlocks( peerStatus.SetConnectionState(p.PeerID(), peers.Connected) peerStatus.SetChainState(p.PeerID(), ðpb.StatusV2{ ForkDigest: params.BeaconConfig().GenesisForkVersion, - FinalizedRoot: []byte(fmt.Sprintf("finalized_root %d", finalizedEpoch)), + FinalizedRoot: fmt.Appendf(nil, "finalized_root %d", finalizedEpoch), FinalizedEpoch: finalizedEpoch, HeadRoot: headRoot[:], HeadSlot: blks[len(blks)-1].Block.Slot, diff --git a/beacon-chain/sync/initial-sync/service.go b/beacon-chain/sync/initial-sync/service.go index 8c6aea4a08..90de787e9a 100644 --- a/beacon-chain/sync/initial-sync/service.go +++ b/beacon-chain/sync/initial-sync/service.go @@ -312,10 +312,7 @@ func (s *Service) Resync() error { } func (s *Service) waitForMinimumPeers() ([]peer.ID, error) { - required := params.BeaconConfig().MaxPeersToSync - if flags.Get().MinimumSyncPeers < required { - required = flags.Get().MinimumSyncPeers - } + required := min(flags.Get().MinimumSyncPeers, params.BeaconConfig().MaxPeersToSync) for { if s.ctx.Err() != nil { return nil, s.ctx.Err() diff --git a/beacon-chain/sync/initial-sync/service_test.go b/beacon-chain/sync/initial-sync/service_test.go index a10aca944d..88175537b9 100644 --- a/beacon-chain/sync/initial-sync/service_test.go +++ b/beacon-chain/sync/initial-sync/service_test.go @@ -182,11 +182,9 @@ func TestService_InitStartStop(t *testing.T) { } wg := &sync.WaitGroup{} - wg.Add(1) - go func() { + wg.Go(func() { s.Start() - wg.Done() - }() + }) go func() { // Allow to exit from test (on no head loop waiting for head is started). @@ -228,11 +226,9 @@ func TestService_waitForStateInitialization(t *testing.T) { s, _ := newService(ctx, &mock.ChainService{Genesis: time.Now(), ValidatorsRoot: [32]byte{}}) wg := &sync.WaitGroup{} - wg.Add(1) - go func() { + wg.Go(func() { s.Start() - wg.Done() - }() + }) go func() { time.AfterFunc(500*time.Millisecond, func() { cancel() @@ -259,11 +255,9 @@ func TestService_waitForStateInitialization(t *testing.T) { expectedGenesisTime := gt wg := &sync.WaitGroup{} - wg.Add(1) - go func() { + wg.Go(func() { s.Start() - wg.Done() - }() + }) rg := func() time.Time { return gt.Add(time.Second * 12) } go func() { time.AfterFunc(200*time.Millisecond, func() { @@ -290,15 +284,13 @@ func TestService_waitForStateInitialization(t *testing.T) { expectedGenesisTime := time.Now().Add(60 * time.Second) wg := &sync.WaitGroup{} - wg.Add(1) - go func() { + wg.Go(func() { time.AfterFunc(500*time.Millisecond, func() { var vr [32]byte require.NoError(t, gs.SetClock(startup.NewClock(expectedGenesisTime, vr))) }) s.Start() - wg.Done() - }() + }) if util.WaitTimeout(wg, time.Second*5) { t.Fatalf("Test should have exited by now, timed out") diff --git a/beacon-chain/sync/kzg_batch_verifier_test.go b/beacon-chain/sync/kzg_batch_verifier_test.go index 5929b139bd..554ab34567 100644 --- a/beacon-chain/sync/kzg_batch_verifier_test.go +++ b/beacon-chain/sync/kzg_batch_verifier_test.go @@ -53,8 +53,7 @@ func TestValidateWithKzgBatchVerifier(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() + ctx := t.Context() service := &Service{ ctx: ctx, @@ -79,8 +78,7 @@ func TestVerifierRoutine(t *testing.T) { require.NoError(t, err) t.Run("processes single request", func(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() + ctx := t.Context() service := &Service{ ctx: ctx, @@ -101,8 +99,7 @@ func TestVerifierRoutine(t *testing.T) { }) t.Run("batches multiple requests", func(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() + ctx := t.Context() service := &Service{ ctx: ctx, @@ -200,8 +197,7 @@ func TestKzgBatchVerifierConcurrency(t *testing.T) { err := kzg.Start() require.NoError(t, err) - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() + ctx := t.Context() service := &Service{ ctx: ctx, @@ -237,8 +233,7 @@ func TestKzgBatchVerifierFallback(t *testing.T) { require.NoError(t, err) t.Run("fallback handles mixed valid/invalid batch correctly", func(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() + ctx := t.Context() service := &Service{ ctx: ctx, @@ -259,8 +254,7 @@ func TestKzgBatchVerifierFallback(t *testing.T) { }) t.Run("empty data columns fallback", func(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() + ctx := t.Context() service := &Service{ ctx: ctx, diff --git a/beacon-chain/sync/metrics.go b/beacon-chain/sync/metrics.go index 2f12deedba..226f3df42a 100644 --- a/beacon-chain/sync/metrics.go +++ b/beacon-chain/sync/metrics.go @@ -256,8 +256,8 @@ func (s *Service) updateMetrics() { } indices := aggregatorSubnetIndices(s.cfg.clock.CurrentSlot()) syncIndices := cache.SyncSubnetIDs.GetAllSubnets(slots.ToEpoch(s.cfg.clock.CurrentSlot())) - attTopic := p2p.GossipTypeMapping[reflect.TypeOf(&pb.Attestation{})] - syncTopic := p2p.GossipTypeMapping[reflect.TypeOf(&pb.SyncCommitteeMessage{})] + attTopic := p2p.GossipTypeMapping[reflect.TypeFor[*pb.Attestation]()] + syncTopic := p2p.GossipTypeMapping[reflect.TypeFor[*pb.SyncCommitteeMessage]()] attTopic += s.cfg.p2p.Encoding().ProtocolSuffix() syncTopic += s.cfg.p2p.Encoding().ProtocolSuffix() if flags.Get().SubscribeToAllSubnets { diff --git a/beacon-chain/sync/pending_attestations_queue_test.go b/beacon-chain/sync/pending_attestations_queue_test.go index 89db1a4803..19fc363494 100644 --- a/beacon-chain/sync/pending_attestations_queue_test.go +++ b/beacon-chain/sync/pending_attestations_queue_test.go @@ -175,9 +175,7 @@ func TestProcessPendingAtts_HasBlockSaveUnaggregatedAtt(t *testing.T) { require.NoError(t, r.processPendingAttsForBlock(t.Context(), root)) var wg sync.WaitGroup - wg.Add(1) - go func() { - defer wg.Done() + wg.Go(func() { for { select { case received := <-done: @@ -188,7 +186,7 @@ func TestProcessPendingAtts_HasBlockSaveUnaggregatedAtt(t *testing.T) { return } } - }() + }) atts := r.cfg.attPool.UnaggregatedAttestations() assert.Equal(t, 1, len(atts), "Did not save unaggregated att") assert.DeepEqual(t, att, atts[0], "Incorrect saved att") @@ -268,9 +266,7 @@ func TestProcessPendingAtts_HasBlockSaveUnaggregatedAttElectra(t *testing.T) { r.blkRootToPendingAtts[root] = []any{att} require.NoError(t, r.processPendingAttsForBlock(t.Context(), root)) var wg sync.WaitGroup - wg.Add(1) - go func() { - defer wg.Done() + wg.Go(func() { for { select { case received := <-done: @@ -281,7 +277,7 @@ func TestProcessPendingAtts_HasBlockSaveUnaggregatedAttElectra(t *testing.T) { return } } - }() + }) atts := r.cfg.attPool.UnaggregatedAttestations() require.Equal(t, 1, len(atts), "Did not save unaggregated att") assert.DeepEqual(t, att.ToAttestationElectra(committee), atts[0], "Incorrect saved att") @@ -395,9 +391,7 @@ func TestProcessPendingAtts_HasBlockSaveUnAggregatedAttElectra_VerifyAlreadySeen // Verify that the event feed receives the expected attestation. var wg sync.WaitGroup - wg.Add(1) - go func() { - defer wg.Done() + wg.Go(func() { for { select { case received := <-done: @@ -408,7 +402,7 @@ func TestProcessPendingAtts_HasBlockSaveUnAggregatedAttElectra_VerifyAlreadySeen return } } - }() + }) // Verify unaggregated attestations are saved correctly. atts := r.cfg.attPool.UnaggregatedAttestations() @@ -831,7 +825,7 @@ func TestValidatePendingAtts_CanPruneOldAtts(t *testing.T) { r2 := [32]byte{'B'} r3 := [32]byte{'C'} - for i := primitives.Slot(0); i < 100; i++ { + for i := range primitives.Slot(100) { s.savePendingAtt(ðpb.Attestation{Data: ðpb.AttestationData{Slot: i, BeaconBlockRoot: r1[:]}}) s.savePendingAtt(ðpb.Attestation{Data: ðpb.AttestationData{Slot: i, BeaconBlockRoot: r2[:]}}) s.savePendingAtt(ðpb.Attestation{Data: ðpb.AttestationData{Slot: i, BeaconBlockRoot: r3[:]}}) @@ -877,7 +871,7 @@ func TestSavePendingAtts_BeyondLimit(t *testing.T) { blkRootToPendingAtts: make(map[[32]byte][]any), } - for i := 0; i < pendingAttsLimit; i++ { + for i := range pendingAttsLimit { s.savePendingAtt(ðpb.Attestation{Data: ðpb.AttestationData{Slot: 1, BeaconBlockRoot: bytesutil.Bytes32(uint64(i))}}) } r1 := [32]byte(bytesutil.Bytes32(0)) diff --git a/beacon-chain/sync/pending_blocks_queue.go b/beacon-chain/sync/pending_blocks_queue.go index 213a97e6c9..af3f5bfa1c 100644 --- a/beacon-chain/sync/pending_blocks_queue.go +++ b/beacon-chain/sync/pending_blocks_queue.go @@ -4,7 +4,7 @@ import ( "context" "encoding/hex" "fmt" - "sort" + "slices" "sync" "time" @@ -452,9 +452,7 @@ func (s *Service) sortedPendingSlots() []primitives.Slot { slot := cacheKeyToSlot(k) ss = append(ss, slot) } - sort.Slice(ss, func(i, j int) bool { - return ss[i] < ss[j] - }) + slices.Sort(ss) return ss } diff --git a/beacon-chain/sync/rate_limiter_test.go b/beacon-chain/sync/rate_limiter_test.go index 9d10390a10..f0400e846a 100644 --- a/beacon-chain/sync/rate_limiter_test.go +++ b/beacon-chain/sync/rate_limiter_test.go @@ -84,7 +84,7 @@ func TestRateLimiter_ExceedRawCapacity(t *testing.T) { stream, err := p1.BHost.NewStream(t.Context(), p2.PeerID(), protocol.ID(topic)) require.NoError(t, err, "could not create stream") - for i := 0; i < 2*defaultBurstLimit; i++ { + for range 2 * defaultBurstLimit { err = rlimiter.validateRawRpcRequest(stream, 1) rlimiter.addRawStream(stream) require.NoError(t, err, "could not validate incoming request") @@ -93,7 +93,7 @@ func TestRateLimiter_ExceedRawCapacity(t *testing.T) { assert.ErrorContains(t, p2ptypes.ErrRateLimited.Error(), rlimiter.validateRawRpcRequest(stream, 1)) // Make Peer bad. - for i := 0; i < defaultBurstLimit; i++ { + for range defaultBurstLimit { assert.ErrorContains(t, p2ptypes.ErrRateLimited.Error(), rlimiter.validateRawRpcRequest(stream, 1)) } assert.NotNil(t, p1.Peers().IsBad(p2.PeerID()), "peer is not marked as a bad peer") diff --git a/beacon-chain/sync/rpc.go b/beacon-chain/sync/rpc.go index 66156895c1..669bf7fd2e 100644 --- a/beacon-chain/sync/rpc.go +++ b/beacon-chain/sync/rpc.go @@ -34,7 +34,7 @@ var ( // rpcHandler is responsible for handling and responding to any incoming message. // This method may return an error to internal monitoring, but the error will // not be relayed to the peer. -type rpcHandler func(context.Context, interface{}, libp2pcore.Stream) error +type rpcHandler func(context.Context, any, libp2pcore.Stream) error // rpcHandlerByTopicFromFork returns the RPC handlers for a given fork index. func (s *Service) rpcHandlerByTopicFromFork(forkIndex int) (map[string]rpcHandler, error) { diff --git a/beacon-chain/sync/rpc_beacon_blocks_by_range.go b/beacon-chain/sync/rpc_beacon_blocks_by_range.go index fd668fe405..9281dadae1 100644 --- a/beacon-chain/sync/rpc_beacon_blocks_by_range.go +++ b/beacon-chain/sync/rpc_beacon_blocks_by_range.go @@ -21,7 +21,7 @@ import ( ) // beaconBlocksByRangeRPCHandler looks up the request blocks from the database from a given start block. -func (s *Service) beaconBlocksByRangeRPCHandler(ctx context.Context, msg interface{}, stream libp2pcore.Stream) error { +func (s *Service) beaconBlocksByRangeRPCHandler(ctx context.Context, msg any, stream libp2pcore.Stream) error { ctx, span := trace.StartSpan(ctx, "sync.BeaconBlocksByRangeHandler") defer span.End() ctx, cancel := context.WithTimeout(ctx, respTimeout) @@ -144,10 +144,7 @@ func validateRangeRequest(r *pb.BeaconBlocksByRangeRequest, current primitives.S return rangeParams{}, p2ptypes.ErrInvalidRequest } - limit := uint64(flags.Get().BlockBatchLimit) - if limit > maxRequest { - limit = maxRequest - } + limit := min(uint64(flags.Get().BlockBatchLimit), maxRequest) if rp.size > limit { rp.size = limit } diff --git a/beacon-chain/sync/rpc_beacon_blocks_by_root.go b/beacon-chain/sync/rpc_beacon_blocks_by_root.go index c9afe0a9b7..d2ca52c879 100644 --- a/beacon-chain/sync/rpc_beacon_blocks_by_root.go +++ b/beacon-chain/sync/rpc_beacon_blocks_by_root.go @@ -174,7 +174,7 @@ func (s *Service) requestAndSaveMissingBlobSidecars(block interfaces.ReadOnlySig } // beaconBlocksRootRPCHandler looks up the request blocks from the database from the given block roots. -func (s *Service) beaconBlocksRootRPCHandler(ctx context.Context, msg interface{}, stream libp2pcore.Stream) error { +func (s *Service) beaconBlocksRootRPCHandler(ctx context.Context, msg any, stream libp2pcore.Stream) error { ctx, cancel := context.WithTimeout(ctx, ttfbTimeout) defer cancel() SetRPCStreamDeadlines(stream) diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_range.go b/beacon-chain/sync/rpc_blob_sidecars_by_range.go index 7b4f0f3838..041fe5a5b5 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_range.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_range.go @@ -60,7 +60,7 @@ func (s *Service) streamBlobBatch(ctx context.Context, batch blockBatch, wQuota var blobRpcThrottleInterval = time.Second // blobsSidecarsByRangeRPCHandler looks up the request blobs from the database from a given start slot index -func (s *Service) blobSidecarsByRangeRPCHandler(ctx context.Context, msg interface{}, stream libp2pcore.Stream) error { +func (s *Service) blobSidecarsByRangeRPCHandler(ctx context.Context, msg any, stream libp2pcore.Stream) error { var err error ctx, span := trace.StartSpan(ctx, "sync.BlobsSidecarsByRangeHandler") defer span.End() @@ -209,10 +209,7 @@ func validateBlobsByRange(r *pb.BlobSidecarsByRangeRequest, current primitives.S rp.end = rp.start } - limit := blobBatchLimit(current) - if limit > maxRequest { - limit = maxRequest - } + limit := min(blobBatchLimit(current), maxRequest) if rp.size > limit { rp.size = limit } diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_range_test.go b/beacon-chain/sync/rpc_blob_sidecars_by_range_test.go index 637d37bb03..5655f5916a 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_range_test.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_range_test.go @@ -16,15 +16,12 @@ import ( func (c *blobsTestCase) defaultOldestSlotByRange(t *testing.T) types.Slot { currentEpoch := c.clock.CurrentEpoch() - oldestEpoch := currentEpoch - params.BeaconConfig().MinEpochsForBlobsSidecarsRequest - if oldestEpoch < params.BeaconConfig().DenebForkEpoch { - oldestEpoch = params.BeaconConfig().DenebForkEpoch - } + oldestEpoch := max(currentEpoch-params.BeaconConfig().MinEpochsForBlobsSidecarsRequest, params.BeaconConfig().DenebForkEpoch) oldestSlot := util.SlotAtEpoch(t, oldestEpoch) return oldestSlot } -func blobRangeRequestFromSidecars(scs []blocks.ROBlob) interface{} { +func blobRangeRequestFromSidecars(scs []blocks.ROBlob) any { maxBlobs := params.BeaconConfig().MaxBlobsPerBlock(scs[0].Slot()) count := uint64(len(scs) / maxBlobs) return ðpb.BlobSidecarsByRangeRequest{ @@ -33,7 +30,7 @@ func blobRangeRequestFromSidecars(scs []blocks.ROBlob) interface{} { } } -func (c *blobsTestCase) filterExpectedByRange(t *testing.T, scs []blocks.ROBlob, req interface{}) []*expectedBlobChunk { +func (c *blobsTestCase) filterExpectedByRange(t *testing.T, scs []blocks.ROBlob, req any) []*expectedBlobChunk { var expect []*expectedBlobChunk blockOffset := 0 lastRoot := scs[0].BlockRoot() @@ -103,7 +100,7 @@ func TestBlobByRangeOK(t *testing.T) { { name: "10 slots before window, 10 slots after, count = 20", nblocks: 10, - requestFromSidecars: func(scs []blocks.ROBlob) interface{} { + requestFromSidecars: func(scs []blocks.ROBlob) any { return ðpb.BlobSidecarsByRangeRequest{ StartSlot: scs[0].Slot() - 10, Count: 20, @@ -113,7 +110,7 @@ func TestBlobByRangeOK(t *testing.T) { { name: "request before window, empty response", nblocks: 10, - requestFromSidecars: func(scs []blocks.ROBlob) interface{} { + requestFromSidecars: func(scs []blocks.ROBlob) any { return ðpb.BlobSidecarsByRangeRequest{ StartSlot: scs[0].Slot() - 10, Count: 10, @@ -124,7 +121,7 @@ func TestBlobByRangeOK(t *testing.T) { { name: "10 blocks * 4 blobs = 40", nblocks: 10, - requestFromSidecars: func(scs []blocks.ROBlob) interface{} { + requestFromSidecars: func(scs []blocks.ROBlob) any { return ðpb.BlobSidecarsByRangeRequest{ StartSlot: scs[0].Slot() - 10, Count: 20, @@ -135,7 +132,7 @@ func TestBlobByRangeOK(t *testing.T) { { name: "when request count > MAX_REQUEST_BLOCKS_DENEB, MAX_REQUEST_BLOBS_SIDECARS sidecars in response", nblocks: int(params.BeaconConfig().MaxRequestBlocksDeneb) + 1, - requestFromSidecars: func(scs []blocks.ROBlob) interface{} { + requestFromSidecars: func(scs []blocks.ROBlob) any { return ðpb.BlobSidecarsByRangeRequest{ StartSlot: scs[0].Slot(), Count: params.BeaconConfig().MaxRequestBlocksDeneb + 1, diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_root.go b/beacon-chain/sync/rpc_blob_sidecars_by_root.go index a8415d8849..e401250119 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_root.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_root.go @@ -24,7 +24,7 @@ import ( // blobSidecarByRootRPCHandler handles the /eth2/beacon_chain/req/blob_sidecars_by_root/1/ RPC request. // spec: https://github.com/ethereum/consensus-specs/blob/a7e45db9ac2b60a33e144444969ad3ac0aae3d4c/specs/deneb/p2p-interface.md#blobsidecarsbyroot-v1 -func (s *Service) blobSidecarByRootRPCHandler(ctx context.Context, msg interface{}, stream libp2pcore.Stream) error { +func (s *Service) blobSidecarByRootRPCHandler(ctx context.Context, msg any, stream libp2pcore.Stream) error { ctx, span := trace.StartSpan(ctx, "sync.blobSidecarByRootRPCHandler") defer span.End() ctx, cancel := context.WithTimeout(ctx, ttfbTimeout) diff --git a/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go b/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go index e1888f101b..ac75e45722 100644 --- a/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go +++ b/beacon-chain/sync/rpc_blob_sidecars_by_root_test.go @@ -25,7 +25,7 @@ func (c *blobsTestCase) defaultOldestSlotByRoot(t *testing.T) types.Slot { return oldest } -func blobRootRequestFromSidecars(scs []blocks.ROBlob) interface{} { +func blobRootRequestFromSidecars(scs []blocks.ROBlob) any { req := make(p2pTypes.BlobSidecarsByRootReq, 0) for i := range scs { sc := scs[i] @@ -34,7 +34,7 @@ func blobRootRequestFromSidecars(scs []blocks.ROBlob) interface{} { return &req } -func (c *blobsTestCase) filterExpectedByRoot(t *testing.T, scs []blocks.ROBlob, r interface{}) []*expectedBlobChunk { +func (c *blobsTestCase) filterExpectedByRoot(t *testing.T, scs []blocks.ROBlob, r any) []*expectedBlobChunk { rp, ok := r.(*p2pTypes.BlobSidecarsByRootReq) if !ok { panic("unexpected request type in filterExpectedByRoot") @@ -278,7 +278,7 @@ func TestValidateBlobByRootRequest(t *testing.T) { // Helper function to create blob identifiers createBlobIdents := func(count int) p2pTypes.BlobSidecarsByRootReq { idents := make([]*ethpb.BlobIdentifier, count) - for i := 0; i < count; i++ { + for i := range count { idents[i] = ðpb.BlobIdentifier{ BlockRoot: make([]byte, 32), Index: uint64(i), diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_range.go b/beacon-chain/sync/rpc_data_column_sidecars_by_range.go index 90e9eee2a4..4d1e42cab6 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_range.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_range.go @@ -24,7 +24,7 @@ const rateLimitingAmount = 1 var notDataColumnsByRangeIdentifiersError = errors.New("not data columns by range identifiers") // dataColumnSidecarsByRangeRPCHandler looks up the request data columns from the database from a given start slot index -func (s *Service) dataColumnSidecarsByRangeRPCHandler(ctx context.Context, msg interface{}, stream libp2pcore.Stream) error { +func (s *Service) dataColumnSidecarsByRangeRPCHandler(ctx context.Context, msg any, stream libp2pcore.Stream) error { ctx, span := trace.StartSpan(ctx, "sync.DataColumnSidecarsByRangeHandler") defer span.End() diff --git a/beacon-chain/sync/rpc_data_column_sidecars_by_root.go b/beacon-chain/sync/rpc_data_column_sidecars_by_root.go index 28fb61f233..7d198d21f7 100644 --- a/beacon-chain/sync/rpc_data_column_sidecars_by_root.go +++ b/beacon-chain/sync/rpc_data_column_sidecars_by_root.go @@ -29,7 +29,7 @@ var ( // dataColumnSidecarByRootRPCHandler handles the data column sidecars by root RPC request. // https://github.com/ethereum/consensus-specs/blob/master/specs/fulu/p2p-interface.md#datacolumnsidecarsbyroot-v1 -func (s *Service) dataColumnSidecarByRootRPCHandler(ctx context.Context, msg interface{}, stream libp2pcore.Stream) error { +func (s *Service) dataColumnSidecarByRootRPCHandler(ctx context.Context, msg any, stream libp2pcore.Stream) error { ctx, span := trace.StartSpan(ctx, "sync.dataColumnSidecarByRootRPCHandler") defer span.End() diff --git a/beacon-chain/sync/rpc_goodbye.go b/beacon-chain/sync/rpc_goodbye.go index 63c2f0f920..a61286ba31 100644 --- a/beacon-chain/sync/rpc_goodbye.go +++ b/beacon-chain/sync/rpc_goodbye.go @@ -35,7 +35,7 @@ var backOffTime = map[primitives.SSZUint64]time.Duration{ } // goodbyeRPCHandler reads the incoming goodbye rpc message from the peer. -func (s *Service) goodbyeRPCHandler(_ context.Context, msg interface{}, stream libp2pcore.Stream) error { +func (s *Service) goodbyeRPCHandler(_ context.Context, msg any, stream libp2pcore.Stream) error { const amount = 1 SetRPCStreamDeadlines(stream) peerID := stream.Conn().RemotePeer() diff --git a/beacon-chain/sync/rpc_handler_test.go b/beacon-chain/sync/rpc_handler_test.go index d020a8514b..b83467ae42 100644 --- a/beacon-chain/sync/rpc_handler_test.go +++ b/beacon-chain/sync/rpc_handler_test.go @@ -20,7 +20,7 @@ type rpcHandlerTest struct { s *Service } -func (rt *rpcHandlerTest) testHandler(streamHandler network.StreamHandler, rpcHandler rpcHandler, message interface{}) { +func (rt *rpcHandlerTest) testHandler(streamHandler network.StreamHandler, rpcHandler rpcHandler, message any) { ctx, cancel := context.WithTimeout(context.Background(), rt.timeout) defer func() { cancel() diff --git a/beacon-chain/sync/rpc_light_client.go b/beacon-chain/sync/rpc_light_client.go index c31f7d845f..39f7091d26 100644 --- a/beacon-chain/sync/rpc_light_client.go +++ b/beacon-chain/sync/rpc_light_client.go @@ -16,7 +16,7 @@ import ( ) // lightClientBootstrapRPCHandler handles the /eth2/beacon_chain/req/light_client_bootstrap/1/ RPC request. -func (s *Service) lightClientBootstrapRPCHandler(ctx context.Context, msg interface{}, stream libp2pcore.Stream) error { +func (s *Service) lightClientBootstrapRPCHandler(ctx context.Context, msg any, stream libp2pcore.Stream) error { ctx, span := trace.StartSpan(ctx, "sync.lightClientBootstrapRPCHandler") defer span.End() ctx, cancel := context.WithTimeout(ctx, ttfbTimeout) @@ -67,7 +67,7 @@ func (s *Service) lightClientBootstrapRPCHandler(ctx context.Context, msg interf } // lightClientUpdatesByRangeRPCHandler handles the /eth2/beacon_chain/req/light_client_updates_by_range/1/ RPC request. -func (s *Service) lightClientUpdatesByRangeRPCHandler(ctx context.Context, msg interface{}, stream libp2pcore.Stream) error { +func (s *Service) lightClientUpdatesByRangeRPCHandler(ctx context.Context, msg any, stream libp2pcore.Stream) error { ctx, span := trace.StartSpan(ctx, "sync.lightClientUpdatesByRangeRPCHandler") defer span.End() ctx, cancel := context.WithTimeout(ctx, ttfbTimeout) @@ -153,7 +153,7 @@ func (s *Service) lightClientUpdatesByRangeRPCHandler(ctx context.Context, msg i } // lightClientFinalityUpdateRPCHandler handles the /eth2/beacon_chain/req/light_client_finality_update/1/ RPC request. -func (s *Service) lightClientFinalityUpdateRPCHandler(ctx context.Context, _ interface{}, stream libp2pcore.Stream) error { +func (s *Service) lightClientFinalityUpdateRPCHandler(ctx context.Context, _ any, stream libp2pcore.Stream) error { ctx, span := trace.StartSpan(ctx, "sync.lightClientFinalityUpdateRPCHandler") defer span.End() _, cancel := context.WithTimeout(ctx, ttfbTimeout) @@ -189,7 +189,7 @@ func (s *Service) lightClientFinalityUpdateRPCHandler(ctx context.Context, _ int } // lightClientOptimisticUpdateRPCHandler handles the /eth2/beacon_chain/req/light_client_optimistic_update/1/ RPC request. -func (s *Service) lightClientOptimisticUpdateRPCHandler(ctx context.Context, _ interface{}, stream libp2pcore.Stream) error { +func (s *Service) lightClientOptimisticUpdateRPCHandler(ctx context.Context, _ any, stream libp2pcore.Stream) error { ctx, span := trace.StartSpan(ctx, "sync.lightClientOptimisticUpdateRPCHandler") defer span.End() _, cancel := context.WithTimeout(ctx, ttfbTimeout) diff --git a/beacon-chain/sync/rpc_light_client_test.go b/beacon-chain/sync/rpc_light_client_test.go index e8cc016821..b85aa9a1a1 100644 --- a/beacon-chain/sync/rpc_light_client_test.go +++ b/beacon-chain/sync/rpc_light_client_test.go @@ -431,7 +431,7 @@ func TestRPC_LightClientUpdatesByRange(t *testing.T) { for i := 1; i <= 5; i++ { t.Run(version.String(i), func(t *testing.T) { - for j := 0; j < 5; j++ { + for j := range 5 { l := util.NewTestLightClient(t, i, util.WithIncreasedAttestedSlot(uint64(j))) update, err := lightClient.NewLightClientUpdateFromBeaconState(ctx, l.State, l.Block, l.AttestedState, l.AttestedBlock, l.FinalizedBlock) require.NoError(t, err) diff --git a/beacon-chain/sync/rpc_metadata.go b/beacon-chain/sync/rpc_metadata.go index 52b7eefa05..56550f48bf 100644 --- a/beacon-chain/sync/rpc_metadata.go +++ b/beacon-chain/sync/rpc_metadata.go @@ -18,7 +18,7 @@ import ( ) // metaDataHandler reads the incoming metadata RPC request from the peer. -func (s *Service) metaDataHandler(_ context.Context, _ interface{}, stream libp2pcore.Stream) error { +func (s *Service) metaDataHandler(_ context.Context, _ any, stream libp2pcore.Stream) error { SetRPCStreamDeadlines(stream) // Validate the incoming request regarding rate limiting. @@ -161,7 +161,7 @@ func (s *Service) sendMetaDataRequest(ctx context.Context, peerID peer.ID) (meta } // Send the METADATA request to the peer. - message := new(interface{}) + message := new(any) stream, err := s.cfg.p2p.Send(ctx, message, topic, peerID) if err != nil { return nil, errors.Wrap(err, "send metadata request") diff --git a/beacon-chain/sync/rpc_metadata_test.go b/beacon-chain/sync/rpc_metadata_test.go index 3c1192a579..f6b8b3453b 100644 --- a/beacon-chain/sync/rpc_metadata_test.go +++ b/beacon-chain/sync/rpc_metadata_test.go @@ -67,7 +67,7 @@ func TestMetaDataRPCHandler_ReceivesMetadata(t *testing.T) { stream1, err := p1.BHost.NewStream(t.Context(), p2.BHost.ID(), pcl) require.NoError(t, err) - assert.NoError(t, r.metaDataHandler(t.Context(), new(interface{}), stream1)) + assert.NoError(t, r.metaDataHandler(t.Context(), new(any), stream1)) if util.WaitTimeout(&wg, 1*time.Second) { t.Fatal("Did not receive stream within 1 sec") @@ -295,7 +295,7 @@ func TestMetadataRPCHandler_SendMetadataRequest(t *testing.T) { wg.Add(1) peer2.BHost.SetStreamHandler(protocolID, func(stream network.Stream) { defer wg.Done() - err := servicePeer2.metaDataHandler(ctx, new(interface{}), stream) + err := servicePeer2.metaDataHandler(ctx, new(any), stream) require.NoError(t, err) }) @@ -368,7 +368,7 @@ func TestMetadataRPCHandler_SendsMetadataQUIC(t *testing.T) { wg.Add(1) p2.BHost.SetStreamHandler(pcl, func(stream network.Stream) { defer wg.Done() - err := r2.metaDataHandler(t.Context(), new(interface{}), stream) + err := r2.metaDataHandler(t.Context(), new(any), stream) assert.NoError(t, err) }) @@ -389,7 +389,7 @@ func TestMetadataRPCHandler_SendsMetadataQUIC(t *testing.T) { wg.Add(1) p2.BHost.SetStreamHandler(pcl, func(stream network.Stream) { defer wg.Done() - assert.NoError(t, r2.metaDataHandler(t.Context(), new(interface{}), stream)) + assert.NoError(t, r2.metaDataHandler(t.Context(), new(any), stream)) }) md, err := r.sendMetaDataRequest(t.Context(), p2.BHost.ID()) diff --git a/beacon-chain/sync/rpc_ping.go b/beacon-chain/sync/rpc_ping.go index 7a4eb51831..01af32bb64 100644 --- a/beacon-chain/sync/rpc_ping.go +++ b/beacon-chain/sync/rpc_ping.go @@ -20,7 +20,7 @@ import ( // If the peer's sequence number is higher than the one stored locally, // a METADATA request is sent to the peer to retrieve and update the latest metadata. // Note: This function is misnamed, as it performs more than just reading a ping message. -func (s *Service) pingHandler(_ context.Context, msg interface{}, stream libp2pcore.Stream) error { +func (s *Service) pingHandler(_ context.Context, msg any, stream libp2pcore.Stream) error { SetRPCStreamDeadlines(stream) // Convert the message to SSW Uint64 type. diff --git a/beacon-chain/sync/rpc_send_request.go b/beacon-chain/sync/rpc_send_request.go index be44e4f881..cbed387b37 100644 --- a/beacon-chain/sync/rpc_send_request.go +++ b/beacon-chain/sync/rpc_send_request.go @@ -485,7 +485,7 @@ func SendDataColumnSidecarsByRangeRequest( } // Build the logs. - var columnsLog interface{} = "all" + var columnsLog any = "all" if columnsCount < numberOfColumns { columns := request.Columns slices.Sort(columns) diff --git a/beacon-chain/sync/rpc_send_request_test.go b/beacon-chain/sync/rpc_send_request_test.go index 67c6359556..2325cfae75 100644 --- a/beacon-chain/sync/rpc_send_request_test.go +++ b/beacon-chain/sync/rpc_send_request_test.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "io" + "slices" "sync" "testing" "time" @@ -51,7 +52,7 @@ func TestSendRequest_SendBeaconBlocksByRangeRequest(t *testing.T) { genesisBlkRoot, err := genesisBlk.Block.HashTreeRoot() require.NoError(t, err) parentRoot := genesisBlkRoot - for i := 0; i < 255; i++ { + for i := range 255 { blk := util.NewBeaconBlock() blk.Block.Slot = primitives.Slot(i) blk.Block.ParentRoot = parentRoot[:] @@ -309,7 +310,7 @@ func TestSendRequest_SendBeaconBlocksByRootRequest(t *testing.T) { knownBlocks := make(map[[32]byte]*ethpb.SignedBeaconBlock) knownRoots := make([][32]byte, 0) - for i := 0; i < 5; i++ { + for range 5 { blk := util.NewBeaconBlock() blkRoot, err := blk.Block.HashTreeRoot() require.NoError(t, err) @@ -635,7 +636,7 @@ func TestSeqBlobValid(t *testing.T) { }{ { name: "all valid", - seq: append(append([]blocks.ROBlob{}, oneBlobs...), twoBlobs...), + seq: slices.Concat(oneBlobs, twoBlobs), }, { name: "idx out of bounds", @@ -660,7 +661,7 @@ func TestSeqBlobValid(t *testing.T) { }, { name: "slots not ascending", - seq: append(append([]blocks.ROBlob{}, twoBlobs...), oops...), + seq: slices.Concat(twoBlobs, oops), err: errChunkResponseSlotNotAsc, errAt: len(twoBlobs), }, @@ -790,7 +791,7 @@ func TestSendBlobsByRangeRequest(t *testing.T) { require.NoError(t, err) prevRoot = bRoot // Send the maximum possible blobs per slot. - for j := 0; j < maxBlobsForSlot; j++ { + for j := range maxBlobsForSlot { b := util.HydrateBlobSidecar(ðpb.BlobSidecar{}) b.SignedBlockHeader = header b.Index = uint64(j) @@ -858,7 +859,7 @@ func TestSendBlobsByRangeRequest(t *testing.T) { require.NoError(t, err) prevRoot = bRoot // Send the maximum possible blobs per slot. - for j := 0; j < maxBlobsForSlot; j++ { + for j := range maxBlobsForSlot { b := util.HydrateBlobSidecar(ðpb.BlobSidecar{}) b.SignedBlockHeader = header b.Index = uint64(j) diff --git a/beacon-chain/sync/rpc_status.go b/beacon-chain/sync/rpc_status.go index fdb031a558..518eb3b956 100644 --- a/beacon-chain/sync/rpc_status.go +++ b/beacon-chain/sync/rpc_status.go @@ -236,7 +236,7 @@ func (s *Service) reValidatePeer(ctx context.Context, id peer.ID) error { // statusRPCHandler reads the incoming Status RPC from the peer and responds with our version of a status message. // This handler will disconnect any peer that does not match our fork version. -func (s *Service) statusRPCHandler(ctx context.Context, msg interface{}, stream libp2pcore.Stream) error { +func (s *Service) statusRPCHandler(ctx context.Context, msg any, stream libp2pcore.Stream) error { ctx, cancel := context.WithTimeout(ctx, ttfbTimeout) defer cancel() SetRPCStreamDeadlines(stream) @@ -424,7 +424,7 @@ func (s *Service) buildStatusFromEpoch( return status, nil } -func (s *Service) validateStatusMessage(ctx context.Context, genericMsg interface{}) error { +func (s *Service) validateStatusMessage(ctx context.Context, genericMsg any) error { msg, err := statusV2(genericMsg) if err != nil { return errors.Wrap(err, "status data") @@ -500,7 +500,7 @@ func (s *Service) validateStatusMessage(ctx context.Context, genericMsg interfac return p2ptypes.ErrInvalidEpoch } -func statusV2(msg interface{}) (*pb.StatusV2, error) { +func statusV2(msg any) (*pb.StatusV2, error) { if status, ok := msg.(*pb.StatusV2); ok { return status, nil } diff --git a/beacon-chain/sync/rpc_test.go b/beacon-chain/sync/rpc_test.go index 242c09dc1b..85567d7113 100644 --- a/beacon-chain/sync/rpc_test.go +++ b/beacon-chain/sync/rpc_test.go @@ -59,7 +59,7 @@ func TestRegisterRPC_ReceivesValidMessage(t *testing.T) { var wg sync.WaitGroup wg.Add(1) topic := "/testing/foobar/1" - handler := func(ctx context.Context, msg interface{}, stream libp2pcore.Stream) error { + handler := func(ctx context.Context, msg any, stream libp2pcore.Stream) error { m, ok := msg.(*ethpb.Fork) if !ok { t.Error("Object is not of type *pb.TestSimpleMessage") @@ -95,7 +95,7 @@ func TestRPC_ReceivesInvalidMessage(t *testing.T) { } topic := "/testing/foobar/1" - handler := func(ctx context.Context, msg interface{}, stream libp2pcore.Stream) error { + handler := func(ctx context.Context, msg any, stream libp2pcore.Stream) error { m, ok := msg.(*ethpb.Fork) if !ok { t.Error("Object is not of type *pb.Fork") diff --git a/beacon-chain/sync/service.go b/beacon-chain/sync/service.go index 1b389382e4..b4b74f71d3 100644 --- a/beacon-chain/sync/service.go +++ b/beacon-chain/sync/service.go @@ -212,7 +212,7 @@ func NewService(ctx context.Context, opts ...Option) *Service { r.kzgChan = make(chan *kzgVerifier, 100) // Correctly remove it from our seen pending block map. // The eviction method always assumes that the mutex is held. - r.slotToPendingBlocks.OnEvicted(func(s string, i interface{}) { + r.slotToPendingBlocks.OnEvicted(func(s string, i any) { if !mutexasserts.RWMutexLocked(&r.pendingQueueLock) { log.Errorf("Mutex is not locked during cache eviction of values") // Continue on to allow elements to be properly removed. diff --git a/beacon-chain/sync/service_test.go b/beacon-chain/sync/service_test.go index 3bf8583c83..fefc0dd7f2 100644 --- a/beacon-chain/sync/service_test.go +++ b/beacon-chain/sync/service_test.go @@ -381,7 +381,7 @@ func TestService_Stop_ConcurrentGoodbyeMessages(t *testing.T) { testPeers := make([]*p2ptest.TestP2P, numPeers) // Create and connect multiple peers - for i := 0; i < numPeers; i++ { + for i := range numPeers { testPeers[i] = p2ptest.NewTestP2P(t) p1.Connect(testPeers[i]) // Register peer in the peer status @@ -419,7 +419,7 @@ func TestService_Stop_ConcurrentGoodbyeMessages(t *testing.T) { var wg sync.WaitGroup wg.Add(numPeers) - for i := 0; i < numPeers; i++ { + for i := range numPeers { idx := i // capture loop variable testPeers[idx].BHost.SetStreamHandler(pcl, func(stream network.Stream) { defer wg.Done() diff --git a/beacon-chain/sync/slot_aware_cache.go b/beacon-chain/sync/slot_aware_cache.go index d3d2b52a3d..4e55662ede 100644 --- a/beacon-chain/sync/slot_aware_cache.go +++ b/beacon-chain/sync/slot_aware_cache.go @@ -28,14 +28,14 @@ func newSlotAwareCache(size int) *slotAwareCache { } // Get retrieves a value from the cache. -func (c *slotAwareCache) Get(key string) (interface{}, bool) { +func (c *slotAwareCache) Get(key string) (any, bool) { c.mu.RLock() defer c.mu.RUnlock() return c.cache.Get(key) } // Add adds a value to the cache associated with a specific slot. -func (c *slotAwareCache) Add(slot primitives.Slot, key string, value interface{}) { +func (c *slotAwareCache) Add(slot primitives.Slot, key string, value any) { c.mu.Lock() defer c.mu.Unlock() diff --git a/beacon-chain/sync/slot_aware_cache_test.go b/beacon-chain/sync/slot_aware_cache_test.go index d1101486fe..45e75cc4ce 100644 --- a/beacon-chain/sync/slot_aware_cache_test.go +++ b/beacon-chain/sync/slot_aware_cache_test.go @@ -109,7 +109,7 @@ func TestSlotAwareCache(t *testing.T) { cache := newSlotAwareCache(200000) // Large cache to avoid LRU eviction // Add entries for 1005 slots, each with one key - for i := 0; i < 1005; i++ { + for i := range 1005 { slot := primitives.Slot(i) key := fmt.Sprintf("key%d", i) cache.Add(slot, key, fmt.Sprintf("value%d", i)) diff --git a/beacon-chain/sync/subscriber.go b/beacon-chain/sync/subscriber.go index 9b53ae4e57..7882a81cb5 100644 --- a/beacon-chain/sync/subscriber.go +++ b/beacon-chain/sync/subscriber.go @@ -749,7 +749,7 @@ func (s *Service) filterNeededPeers(pids []peer.ID) []peer.ID { wantedSubnets[subnet] = true } - topic := p2p.GossipTypeMapping[reflect.TypeOf(ðpb.Attestation{})] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.Attestation]()] // Map of peers in subnets peerMap := make(map[peer.ID]bool) diff --git a/beacon-chain/sync/subscriber_beacon_blocks.go b/beacon-chain/sync/subscriber_beacon_blocks.go index 44fde992f2..533d34b346 100644 --- a/beacon-chain/sync/subscriber_beacon_blocks.go +++ b/beacon-chain/sync/subscriber_beacon_blocks.go @@ -165,7 +165,7 @@ func (s *Service) processBlobSidecarsFromExecution(ctx context.Context, block in // builds corresponding sidecars, save them to the storage, and broadcasts them over P2P if necessary. func (s *Service) processDataColumnSidecarsFromExecution(ctx context.Context, source peerdas.ConstructionPopulator) error { key := fmt.Sprintf("%#x", source.Root()) - if _, err, _ := s.columnSidecarsExecSingleFlight.Do(key, func() (interface{}, error) { + if _, err, _ := s.columnSidecarsExecSingleFlight.Do(key, func() (any, error) { const delay = 250 * time.Millisecond secondsPerHalfSlot := time.Duration(params.BeaconConfig().SecondsPerSlot/2) * time.Second diff --git a/beacon-chain/sync/subscriber_test.go b/beacon-chain/sync/subscriber_test.go index 3875dec4ef..9123958bd6 100644 --- a/beacon-chain/sync/subscriber_test.go +++ b/beacon-chain/sync/subscriber_test.go @@ -262,7 +262,7 @@ func TestSubscribe_HandlesPanic(t *testing.T) { nse := params.GetNetworkScheduleEntry(r.cfg.clock.CurrentEpoch()) p.Digest = nse.ForkDigest - topic := p2p.GossipTypeMapping[reflect.TypeOf(&pb.SignedVoluntaryExit{})] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*pb.SignedVoluntaryExit]()] var wg sync.WaitGroup wg.Add(1) diff --git a/beacon-chain/sync/sync_fuzz_test.go b/beacon-chain/sync/sync_fuzz_test.go index db23ebc154..6d11ce41bb 100644 --- a/beacon-chain/sync/sync_fuzz_test.go +++ b/beacon-chain/sync/sync_fuzz_test.go @@ -79,7 +79,7 @@ func FuzzValidateBeaconBlockPubSub_Phase0(f *testing.F) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(f, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(f, err) topic = r.addDigestToTopic(topic, digest) @@ -163,7 +163,7 @@ func FuzzValidateBeaconBlockPubSub_Altair(f *testing.F) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(f, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(f, err) topic = r.addDigestToTopic(topic, digest) @@ -247,7 +247,7 @@ func FuzzValidateBeaconBlockPubSub_Bellatrix(f *testing.F) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(f, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(f, err) topic = r.addDigestToTopic(topic, digest) diff --git a/beacon-chain/sync/validate_aggregate_proof_test.go b/beacon-chain/sync/validate_aggregate_proof_test.go index bdb82a549a..7fa4fdddea 100644 --- a/beacon-chain/sync/validate_aggregate_proof_test.go +++ b/beacon-chain/sync/validate_aggregate_proof_test.go @@ -230,7 +230,7 @@ func TestValidateAggregateAndProof_NoBlock(t *testing.T) { _, err := p.Encoding().EncodeGossip(buf, signedAggregateAndProof) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(signedAggregateAndProof)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedAggregateAttestationAndProof]()] msg := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), @@ -300,7 +300,7 @@ func TestValidateAggregateAndProof_NotWithinSlotRange(t *testing.T) { _, err = p.Encoding().EncodeGossip(buf, signedAggregateAndProof) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(signedAggregateAndProof)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedAggregateAttestationAndProof]()] msg := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), @@ -382,7 +382,7 @@ func TestValidateAggregateAndProof_ExistedInPool(t *testing.T) { _, err = p.Encoding().EncodeGossip(buf, signedAggregateAndProof) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(signedAggregateAndProof)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedAggregateAttestationAndProof]()] msg := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), @@ -485,7 +485,7 @@ func TestValidateAggregateAndProof_CanValidate(t *testing.T) { _, err = p.Encoding().EncodeGossip(buf, signedAggregateAndProof) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(signedAggregateAndProof)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedAggregateAttestationAndProof]()] d, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, d) @@ -588,7 +588,7 @@ func TestVerifyIndexInCommittee_SeenAggregatorEpoch(t *testing.T) { _, err = p.Encoding().EncodeGossip(buf, signedAggregateAndProof) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(signedAggregateAndProof)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedAggregateAttestationAndProof]()] d, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, d) @@ -700,7 +700,7 @@ func TestValidateAggregateAndProof_BadBlock(t *testing.T) { _, err = p.Encoding().EncodeGossip(buf, signedAggregateAndProof) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(signedAggregateAndProof)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedAggregateAttestationAndProof]()] msg := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), @@ -790,7 +790,7 @@ func TestValidateAggregateAndProof_RejectWhenAttEpochDoesntEqualTargetEpoch(t *t _, err = p.Encoding().EncodeGossip(buf, signedAggregateAndProof) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(signedAggregateAndProof)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedAggregateAttestationAndProof]()] msg := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), diff --git a/beacon-chain/sync/validate_attester_slashing_test.go b/beacon-chain/sync/validate_attester_slashing_test.go index 48a8de8e88..64839764bb 100644 --- a/beacon-chain/sync/validate_attester_slashing_test.go +++ b/beacon-chain/sync/validate_attester_slashing_test.go @@ -98,7 +98,7 @@ func TestValidateAttesterSlashing_ValidSlashing(t *testing.T) { _, err := p.Encoding().EncodeGossip(buf, slashing) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(slashing)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.AttesterSlashing]()] d, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, d) @@ -143,7 +143,7 @@ func TestValidateAttesterSlashing_ValidOldSlashing(t *testing.T) { _, err := p.Encoding().EncodeGossip(buf, slashing) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(slashing)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.AttesterSlashing]()] d, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, d) @@ -188,7 +188,7 @@ func TestValidateAttesterSlashing_InvalidSlashing_WithdrawableEpoch(t *testing.T _, err := p.Encoding().EncodeGossip(buf, slashing) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(slashing)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.AttesterSlashing]()] d, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, d) @@ -237,7 +237,7 @@ func TestValidateAttesterSlashing_CanFilter(t *testing.T) { r.setAttesterSlashingIndicesSeen([]uint64{1, 2, 3, 4}, []uint64{3, 4, 5, 6}) // The below attestations should be filtered hence bad signature is ok. - topic := p2p.GossipTypeMapping[reflect.TypeOf(ðpb.AttesterSlashing{})] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.AttesterSlashing]()] d, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, d) @@ -308,7 +308,7 @@ func TestValidateAttesterSlashing_ContextTimeout(t *testing.T) { _, err := p.Encoding().EncodeGossip(buf, slashing) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(slashing)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.AttesterSlashing]()] msg := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), @@ -339,7 +339,7 @@ func TestValidateAttesterSlashing_Syncing(t *testing.T) { _, err := p.Encoding().EncodeGossip(buf, slashing) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(slashing)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.AttesterSlashing]()] msg := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), diff --git a/beacon-chain/sync/validate_beacon_attestation.go b/beacon-chain/sync/validate_beacon_attestation.go index 2384bd7773..e8daa29b62 100644 --- a/beacon-chain/sync/validate_beacon_attestation.go +++ b/beacon-chain/sync/validate_beacon_attestation.go @@ -156,7 +156,7 @@ func (s *Service) validateCommitteeIndexBeaconAttestation( var ( attForValidation eth.Att // what we'll pass to further validation eventType feed.EventType - eventData interface{} + eventData any ) if att.Version() >= version.Electra { @@ -240,7 +240,7 @@ func (s *Service) validateUnaggregatedAttTopic(ctx context.Context, a eth.Att, b return result, err } subnet := helpers.ComputeSubnetForAttestation(valCount, a) - format := p2p.GossipTypeMapping[reflect.TypeOf(ð.Attestation{})] + format := p2p.GossipTypeMapping[reflect.TypeFor[*eth.Attestation]()] digest, err := s.currentForkDigest() if err != nil { tracing.AnnotateError(span, err) diff --git a/beacon-chain/sync/validate_beacon_blocks_test.go b/beacon-chain/sync/validate_beacon_blocks_test.go index 00db86133c..b01e965451 100644 --- a/beacon-chain/sync/validate_beacon_blocks_test.go +++ b/beacon-chain/sync/validate_beacon_blocks_test.go @@ -94,7 +94,7 @@ func TestValidateBeaconBlockPubSub_InvalidSignature(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -166,7 +166,7 @@ func TestValidateBeaconBlockPubSub_InvalidSignature_MarksBlockAsBad(t *testing.T buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -213,7 +213,7 @@ func TestValidateBeaconBlockPubSub_BlockAlreadyPresentInDB(t *testing.T) { _, err := p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -276,7 +276,7 @@ func TestValidateBeaconBlockPubSub_CanRecoverStateSummary(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -342,7 +342,7 @@ func TestValidateBeaconBlockPubSub_IsInCache(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -408,7 +408,7 @@ func TestValidateBeaconBlockPubSub_ValidProposerSignature(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -477,7 +477,7 @@ func TestValidateBeaconBlockPubSub_WithLookahead(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -545,7 +545,7 @@ func TestValidateBeaconBlockPubSub_AdvanceEpochsForState(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -591,7 +591,7 @@ func TestValidateBeaconBlockPubSub_Syncing(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] m := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), @@ -654,7 +654,7 @@ func TestValidateBeaconBlockPubSub_IgnoreAndQueueBlocksFromNearFuture(t *testing buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -705,7 +705,7 @@ func TestValidateBeaconBlockPubSub_RejectBlocksFromFuture(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -756,7 +756,7 @@ func TestValidateBeaconBlockPubSub_RejectBlocksFromThePast(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -835,7 +835,7 @@ func TestValidateBeaconBlockPubSub_SeenProposerSlot(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msgClone) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msgClone)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -895,7 +895,7 @@ func TestValidateBeaconBlockPubSub_FilterByFinalizedEpoch(t *testing.T) { require.NoError(t, err) digest, err := signing.ComputeForkDigest(params.BeaconConfig().GenesisForkVersion, params.BeaconConfig().GenesisValidatorsRoot[:]) assert.NoError(t, err) - topic := fmt.Sprintf(p2p.GossipTypeMapping[reflect.TypeOf(b)], digest) + topic := fmt.Sprintf(p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()], digest) m := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), @@ -976,7 +976,7 @@ func TestValidateBeaconBlockPubSub_ParentNotFinalizedDescendant(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -1042,7 +1042,7 @@ func TestValidateBeaconBlockPubSub_InvalidParentBlock(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -1136,7 +1136,7 @@ func TestValidateBeaconBlockPubSub_InsertValidPendingBlock(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -1221,7 +1221,7 @@ func TestValidateBeaconBlockPubSub_RejectBlocksFromBadParent(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlock]()] digest, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, digest) @@ -1326,7 +1326,7 @@ func TestValidateBeaconBlockPubSub_ValidExecutionPayload(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlockBellatrix]()] genesisValidatorsRoot := r.cfg.clock.GenesisValidatorsRoot() BellatrixDigest, err := signing.ComputeForkDigest(params.BeaconConfig().BellatrixForkVersion, genesisValidatorsRoot[:]) require.NoError(t, err) @@ -1398,7 +1398,7 @@ func TestValidateBeaconBlockPubSub_InvalidPayloadTimestamp(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlockBellatrix]()] genesisValidatorsRoot := r.cfg.clock.GenesisValidatorsRoot() BellatrixDigest, err := signing.ComputeForkDigest(params.BeaconConfig().BellatrixForkVersion, genesisValidatorsRoot[:]) assert.NoError(t, err) @@ -1564,7 +1564,7 @@ func Test_validateBeaconBlockProcessingWhenParentIsOptimistic(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedBeaconBlockBellatrix]()] genesisValidatorsRoot := r.cfg.clock.GenesisValidatorsRoot() BellatrixDigest, err := signing.ComputeForkDigest(params.BeaconConfig().BellatrixForkVersion, genesisValidatorsRoot[:]) require.NoError(t, err) diff --git a/beacon-chain/sync/validate_blob_test.go b/beacon-chain/sync/validate_blob_test.go index db1ccccbcd..aa8f98ceb2 100644 --- a/beacon-chain/sync/validate_blob_test.go +++ b/beacon-chain/sync/validate_blob_test.go @@ -67,7 +67,7 @@ func TestValidateBlob_InvalidMessageType(t *testing.T) { _, err := p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*eth.SignedBeaconBlock]()] digest, err := s.currentForkDigest() require.NoError(t, err) topic = s.addDigestToTopic(topic, digest) @@ -126,7 +126,7 @@ func TestValidateBlob_AlreadySeenInCache(t *testing.T) { _, err = p.Encoding().EncodeGossip(buf, b) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(b)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*eth.BlobSidecar]()] digest, err := s.currentForkDigest() require.NoError(t, err) topic = s.addDigestAndIndexToTopic(topic, digest, 0) @@ -156,7 +156,7 @@ func TestValidateBlob_InvalidTopicIndex(t *testing.T) { _, err := p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*eth.BlobSidecar]()] digest, err := s.currentForkDigest() require.NoError(t, err) topic = s.addDigestAndIndexToTopic(topic, digest, 1) @@ -271,7 +271,7 @@ func TestValidateBlob_ErrorPathsWithMock(t *testing.T) { _, err := p.Encoding().EncodeGossip(buf, msg) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(msg)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*eth.BlobSidecar]()] digest, err := s.currentForkDigest() require.NoError(t, err) topic = s.addDigestAndIndexToTopic(topic, digest, 0) diff --git a/beacon-chain/sync/validate_proposer_slashing_test.go b/beacon-chain/sync/validate_proposer_slashing_test.go index 0d794435b0..70d0827874 100644 --- a/beacon-chain/sync/validate_proposer_slashing_test.go +++ b/beacon-chain/sync/validate_proposer_slashing_test.go @@ -31,7 +31,7 @@ import ( func setupValidProposerSlashing(t *testing.T) (*ethpb.ProposerSlashing, state.BeaconState) { validators := make([]*ethpb.Validator, 100) - for i := 0; i < len(validators); i++ { + for i := range validators { validators[i] = ðpb.Validator{ EffectiveBalance: params.BeaconConfig().MaxEffectiveBalance, Slashed: false, @@ -41,7 +41,7 @@ func setupValidProposerSlashing(t *testing.T) (*ethpb.ProposerSlashing, state.Be } } validatorBalances := make([]uint64, len(validators)) - for i := 0; i < len(validatorBalances); i++ { + for i := range validatorBalances { validatorBalances[i] = params.BeaconConfig().MaxEffectiveBalance } @@ -129,7 +129,7 @@ func TestValidateProposerSlashing_ValidSlashing(t *testing.T) { buf := new(bytes.Buffer) _, err := p.Encoding().EncodeGossip(buf, slashing) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(slashing)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.ProposerSlashing]()] d, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, d) @@ -172,7 +172,7 @@ func TestValidateProposerSlashing_ValidOldSlashing(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, slashing) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(slashing)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.ProposerSlashing]()] d, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, d) @@ -213,7 +213,7 @@ func TestValidateProposerSlashing_ContextTimeout(t *testing.T) { buf := new(bytes.Buffer) _, err = p.Encoding().EncodeGossip(buf, slashing) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(slashing)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.ProposerSlashing]()] m := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), @@ -243,7 +243,7 @@ func TestValidateProposerSlashing_Syncing(t *testing.T) { buf := new(bytes.Buffer) _, err := p.Encoding().EncodeGossip(buf, slashing) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(slashing)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.ProposerSlashing]()] m := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), diff --git a/beacon-chain/sync/validate_sync_committee_message.go b/beacon-chain/sync/validate_sync_committee_message.go index 9c0ba0dff6..a8990ccb86 100644 --- a/beacon-chain/sync/validate_sync_committee_message.go +++ b/beacon-chain/sync/validate_sync_committee_message.go @@ -185,7 +185,7 @@ func (s *Service) rejectIncorrectSyncCommittee( return pubsub.ValidationIgnore, err } - format := p2p.GossipTypeMapping[reflect.TypeOf(ðpb.SyncCommitteeMessage{})] + format := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SyncCommitteeMessage]()] // Validate that the validator is in the correct committee. subCommitteeSize := params.BeaconConfig().SyncCommitteeSize / params.BeaconConfig().SyncCommitteeSubnetCount for _, idx := range committeeIndices { diff --git a/beacon-chain/sync/validate_sync_committee_message_test.go b/beacon-chain/sync/validate_sync_committee_message_test.go index c0dbafb499..b9ebe73247 100644 --- a/beacon-chain/sync/validate_sync_committee_message_test.go +++ b/beacon-chain/sync/validate_sync_committee_message_test.go @@ -425,7 +425,7 @@ func TestService_ValidateSyncCommitteeMessage(t *testing.T) { ReceivedFrom: "", ValidatorData: nil, } - for i := 0; i < 10; i++ { + for range 10 { if !svc.chainIsStarted() { time.Sleep(100 * time.Millisecond) } @@ -522,7 +522,7 @@ func TestService_rejectIncorrectSyncCommittee(t *testing.T) { }, committeeIndices: []primitives.CommitteeIndex{0}, setupTopic: func(s *Service) string { - format := p2p.GossipTypeMapping[reflect.TypeOf(ðpb.SyncCommitteeMessage{})] + format := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SyncCommitteeMessage]()] digest, err := s.currentForkDigest() require.NoError(t, err) diff --git a/beacon-chain/sync/validate_sync_contribution_proof_test.go b/beacon-chain/sync/validate_sync_contribution_proof_test.go index 33c6ad0e12..7e01b7f7b8 100644 --- a/beacon-chain/sync/validate_sync_contribution_proof_test.go +++ b/beacon-chain/sync/validate_sync_contribution_proof_test.go @@ -872,7 +872,7 @@ func TestService_ValidateSyncContributionAndProof(t *testing.T) { // a lot happens in the chain service after SetClock is called, // give it a moment before calling internal methods that would typically // only execute after waitFor - for i := 0; i < 10; i++ { + for range 10 { if !svc.chainIsStarted() { time.Sleep(100 * time.Millisecond) } diff --git a/beacon-chain/sync/validate_voluntary_exit_test.go b/beacon-chain/sync/validate_voluntary_exit_test.go index 55cc06f1f5..1b6b15efc4 100644 --- a/beacon-chain/sync/validate_voluntary_exit_test.go +++ b/beacon-chain/sync/validate_voluntary_exit_test.go @@ -101,7 +101,7 @@ func TestValidateVoluntaryExit_ValidExit(t *testing.T) { buf := new(bytes.Buffer) _, err := p.Encoding().EncodeGossip(buf, exit) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(exit)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedVoluntaryExit]()] d, err := r.currentForkDigest() assert.NoError(t, err) topic = r.addDigestToTopic(topic, d) @@ -158,7 +158,7 @@ func TestValidateVoluntaryExit_InvalidExitSlot(t *testing.T) { buf := new(bytes.Buffer) _, err := p.Encoding().EncodeGossip(buf, exit) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(exit)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedVoluntaryExit]()] m := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), @@ -189,7 +189,7 @@ func TestValidateVoluntaryExit_ValidExit_Syncing(t *testing.T) { buf := new(bytes.Buffer) _, err := p.Encoding().EncodeGossip(buf, exit) require.NoError(t, err) - topic := p2p.GossipTypeMapping[reflect.TypeOf(exit)] + topic := p2p.GossipTypeMapping[reflect.TypeFor[*ethpb.SignedVoluntaryExit]()] m := &pubsub.Message{ Message: &pubsubpb.Message{ Data: buf.Bytes(), diff --git a/beacon-chain/verification/blob_test.go b/beacon-chain/verification/blob_test.go index 88f8fe8790..441dda9a54 100644 --- a/beacon-chain/verification/blob_test.go +++ b/beacon-chain/verification/blob_test.go @@ -694,10 +694,7 @@ func sbrForValOverride(idx primitives.ValidatorIndex, val *ethpb.Validator) *moc func sbrForValOverrideWithT(t testing.TB, idx primitives.ValidatorIndex, val *ethpb.Validator) *mockStateByRooter { return &mockStateByRooter{sbr: func(_ context.Context, root [32]byte) (state.BeaconState, error) { // Use a real deterministic state so that helpers.BeaconProposerIndexAtSlot works correctly - numValidators := uint64(idx + 1) - if numValidators < 64 { - numValidators = 64 - } + numValidators := max(uint64(idx+1), 64) var st state.BeaconState var err error @@ -769,10 +766,7 @@ func (v *validxStateOverride) Validators() []*ethpb.Validator { } } // Ensure we have at least 64 validators for a valid beacon state - numValidators := maxIdx + 1 - if numValidators < 64 { - numValidators = 64 - } + numValidators := max(maxIdx+1, 64) validators := make([]*ethpb.Validator, numValidators) for i := range validators { if val, ok := v.vals[primitives.ValidatorIndex(i)]; ok { diff --git a/cache/lru/lru_wrpr.go b/cache/lru/lru_wrpr.go index 5f261a2384..2cf965ef0e 100644 --- a/cache/lru/lru_wrpr.go +++ b/cache/lru/lru_wrpr.go @@ -17,7 +17,7 @@ func New(size int) *lru.Cache { // NewWithEvict constructs a fixed size cache with the given eviction // callback. -func NewWithEvict(size int, onEvicted func(key interface{}, value interface{})) *lru.Cache { +func NewWithEvict(size int, onEvicted func(key any, value any)) *lru.Cache { cache, err := lru.NewWithEvict(size, onEvicted) if err != nil { panic(fmt.Errorf("lru new with evict failed: %w", err)) // lint:nopanic -- This should never panic. diff --git a/cache/lru/lru_wrpr_test.go b/cache/lru/lru_wrpr_test.go index 7e484e746d..a1ce199fa7 100644 --- a/cache/lru/lru_wrpr_test.go +++ b/cache/lru/lru_wrpr_test.go @@ -23,15 +23,15 @@ func TestNew_ZeroOrNegativeSize(t *testing.T) { func TestNewWithEvict(t *testing.T) { assert.NotPanics(t, func() { - NewWithEvict(10, func(key interface{}, value interface{}) {}) + NewWithEvict(10, func(key any, value any) {}) }) } func TestNewWithEvict_ZeroOrNegativeSize(t *testing.T) { assert.Panics(t, func() { - NewWithEvict(0, func(key interface{}, value interface{}) {}) + NewWithEvict(0, func(key any, value any) {}) }) assert.Panics(t, func() { - NewWithEvict(-1, func(key interface{}, value interface{}) {}) + NewWithEvict(-1, func(key any, value any) {}) }) } diff --git a/cache/nonblocking/lru.go b/cache/nonblocking/lru.go index e37f1022e7..79e5856a55 100644 --- a/cache/nonblocking/lru.go +++ b/cache/nonblocking/lru.go @@ -100,11 +100,8 @@ func (c *LRU[K, V]) Len() int { // Resize changes the cache size. func (c *LRU[K, V]) Resize(size int) (evicted int) { - diff := c.Len() - size - if diff < 0 { - diff = 0 - } - for i := 0; i < diff; i++ { + diff := max(c.Len()-size, 0) + for range diff { c.removeOldest() } c.size = size diff --git a/cache/nonblocking/lru_test.go b/cache/nonblocking/lru_test.go index f909bf04c8..b5f56a9fd1 100644 --- a/cache/nonblocking/lru_test.go +++ b/cache/nonblocking/lru_test.go @@ -18,7 +18,7 @@ func TestLRU_Concurrency(t *testing.T) { } ctx, cancel := context.WithTimeout(t.Context(), time.Second*2) defer cancel() - for i := 0; i < 100; i++ { + for i := range 100 { go func(j int) { for { if ctx.Err() != nil { @@ -43,7 +43,7 @@ func TestLRU_Eviction(t *testing.T) { if err != nil { t.Fatalf("err: %v", err) } - for i := 0; i < 20; i++ { + for i := range 20 { cache.Add(i, i) cache.Get(i) } diff --git a/changelog/pvl-modernize.md b/changelog/pvl-modernize.md new file mode 100644 index 0000000000..029ccdd1cd --- /dev/null +++ b/changelog/pvl-modernize.md @@ -0,0 +1,4 @@ +### Ignored + +- Updated golang.org/x/tools +- Introduced modernize static analyzers to nogo diff --git a/cmd/beacon-chain/flags/api_module.go b/cmd/beacon-chain/flags/api_module.go index 31dfe70765..2afad7f5d1 100644 --- a/cmd/beacon-chain/flags/api_module.go +++ b/cmd/beacon-chain/flags/api_module.go @@ -14,7 +14,7 @@ func EnableHTTPEthAPI(httpModules string) bool { } func enableAPI(httpModules, api string) bool { - for _, m := range strings.Split(httpModules, ",") { + for m := range strings.SplitSeq(httpModules, ",") { if strings.EqualFold(m, api) { return true } diff --git a/cmd/beacon-chain/storage/options.go b/cmd/beacon-chain/storage/options.go index a1134d444c..416ea30206 100644 --- a/cmd/beacon-chain/storage/options.go +++ b/cmd/beacon-chain/storage/options.go @@ -146,10 +146,8 @@ func detectLayout(dir string, c stringFlagGetter) (string, error) { } return "", errors.Wrap(err, "reading blob storage directory") } - for _, entry := range entries { - if filesystem.IsBlockRootDir(entry) { - return filesystem.LayoutNameFlat, nil - } + if slices.ContainsFunc(entries, filesystem.IsBlockRootDir) { + return filesystem.LayoutNameFlat, nil } return filesystem.LayoutNameByEpoch, nil } diff --git a/cmd/beacon-chain/usage.go b/cmd/beacon-chain/usage.go index af8b89400f..97c05c1efe 100644 --- a/cmd/beacon-chain/usage.go +++ b/cmd/beacon-chain/usage.go @@ -235,12 +235,12 @@ func init() { cli.AppHelpTemplate = appHelpTemplate type helpData struct { - App interface{} + App any FlagGroups []flagGroup } originalHelpPrinter := cli.HelpPrinter - cli.HelpPrinter = func(w io.Writer, tmpl string, data interface{}) { + cli.HelpPrinter = func(w io.Writer, tmpl string, data any) { if tmpl == appHelpTemplate { for _, group := range appHelpFlagGroups { sort.Sort(cli.FlagsByName(group.Flags)) diff --git a/cmd/client-stats/usage.go b/cmd/client-stats/usage.go index eefb511f71..062c5cda54 100644 --- a/cmd/client-stats/usage.go +++ b/cmd/client-stats/usage.go @@ -63,12 +63,12 @@ func init() { cli.AppHelpTemplate = appHelpTemplate type helpData struct { - App interface{} + App any FlagGroups []flagGroup } originalHelpPrinter := cli.HelpPrinter - cli.HelpPrinter = func(w io.Writer, tmpl string, data interface{}) { + cli.HelpPrinter = func(w io.Writer, tmpl string, data any) { if tmpl == appHelpTemplate { for _, group := range appHelpFlagGroups { sort.Sort(cli.FlagsByName(group.Flags)) diff --git a/cmd/prysmctl/db/span.go b/cmd/prysmctl/db/span.go index a855ea7043..87c84f7a65 100644 --- a/cmd/prysmctl/db/span.go +++ b/cmd/prysmctl/db/span.go @@ -164,10 +164,7 @@ func spanAction(cliCtx *cli.Context) error { b := chunk.Chunk() c := uint64(0) for z := uint64(0); z < uint64(len(b)); z += params.ChunkSize() { - end := z + params.ChunkSize() - if end > uint64(len(b)) { - end = uint64(len(b)) - } + end := min(z+params.ChunkSize(), uint64(len(b))) subChunk := b[z:end] row := make(table.Row, params.ChunkSize()+1) @@ -191,10 +188,7 @@ func spanAction(cliCtx *cli.Context) error { b := chunk.Chunk() c := uint64(0) for z := uint64(0); z < uint64(len(b)); z += params.ChunkSize() { - end := z + params.ChunkSize() - if end > uint64(len(b)) { - end = uint64(len(b)) - } + end := min(z+params.ChunkSize(), uint64(len(b))) subChunk := b[z:end] row := make(table.Row, 2) diff --git a/cmd/prysmctl/p2p/client.go b/cmd/prysmctl/p2p/client.go index 808bc8d95e..b6c4833837 100644 --- a/cmd/prysmctl/p2p/client.go +++ b/cmd/prysmctl/p2p/client.go @@ -107,7 +107,7 @@ func (c *client) MetadataSeq() uint64 { // When done, the caller must Close() or Reset() on the stream. func (c *client) Send( ctx context.Context, - message interface{}, + message any, baseTopic string, pid peer.ID, ) (corenet.Stream, error) { @@ -154,7 +154,7 @@ func (c *client) retrievePeerAddressesViaRPC(ctx context.Context, beaconEndpoint return nil, errors.New("no beacon RPC endpoints specified") } peers := make([]string, 0) - for i := 0; i < len(beaconEndpoints); i++ { + for i := range beaconEndpoints { conn, err := grpc.Dial(beaconEndpoints[i], grpc.WithInsecure()) if err != nil { return nil, err diff --git a/cmd/prysmctl/p2p/handler.go b/cmd/prysmctl/p2p/handler.go index 3ad525de7d..668b713ed9 100644 --- a/cmd/prysmctl/p2p/handler.go +++ b/cmd/prysmctl/p2p/handler.go @@ -15,7 +15,7 @@ import ( ssz "github.com/prysmaticlabs/fastssz" ) -type rpcHandler func(context.Context, interface{}, libp2pcore.Stream) error +type rpcHandler func(context.Context, any, libp2pcore.Stream) error // registerRPC for a given topic with an expected protobuf message type. func (c *client) registerRPCHandler(baseTopic string, handle rpcHandler) { diff --git a/cmd/prysmctl/p2p/handshake.go b/cmd/prysmctl/p2p/handshake.go index 7d38e7f0f9..a9d05f3480 100644 --- a/cmd/prysmctl/p2p/handshake.go +++ b/cmd/prysmctl/p2p/handshake.go @@ -22,7 +22,7 @@ func (c *client) registerHandshakeHandlers() { } // pingHandler reads the incoming ping rpc message from the peer. -func (c *client) pingHandler(_ context.Context, _ interface{}, stream libp2pcore.Stream) error { +func (c *client) pingHandler(_ context.Context, _ any, stream libp2pcore.Stream) error { defer closeStream(stream) if _, err := stream.Write([]byte{responseCodeSuccess}); err != nil { return err @@ -34,13 +34,13 @@ func (c *client) pingHandler(_ context.Context, _ interface{}, stream libp2pcore return nil } -func (c *client) goodbyeHandler(_ context.Context, _ interface{}, _ libp2pcore.Stream) error { +func (c *client) goodbyeHandler(_ context.Context, _ any, _ libp2pcore.Stream) error { return nil } // statusRPCHandler reads the incoming Status RPC from the peer and responds with our version of a status message. // This handler will disconnect any peer that does not match our fork version. -func (c *client) statusRPCHandler(ctx context.Context, _ interface{}, stream libp2pcore.Stream) error { +func (c *client) statusRPCHandler(ctx context.Context, _ any, stream libp2pcore.Stream) error { defer closeStream(stream) chainHead, err := c.beaconClient.GetChainHead(ctx, &emptypb.Empty{}) if err != nil { diff --git a/cmd/prysmctl/p2p/request_blobs.go b/cmd/prysmctl/p2p/request_blobs.go index 065f2c8b54..c90e0cfd45 100644 --- a/cmd/prysmctl/p2p/request_blobs.go +++ b/cmd/prysmctl/p2p/request_blobs.go @@ -125,7 +125,7 @@ func cliActionRequestBlobs(cliCtx *cli.Context) error { c.registerHandshakeHandlers() c.registerRPCHandler(p2p.RPCBlobSidecarsByRangeTopicV1, func( - ctx context.Context, i interface{}, stream libp2pcore.Stream, + ctx context.Context, i any, stream libp2pcore.Stream, ) error { return nil }) diff --git a/cmd/prysmctl/p2p/request_blocks.go b/cmd/prysmctl/p2p/request_blocks.go index a153664290..566374215e 100644 --- a/cmd/prysmctl/p2p/request_blocks.go +++ b/cmd/prysmctl/p2p/request_blocks.go @@ -159,12 +159,12 @@ func cliActionRequestBlocks(cliCtx *cli.Context) error { c.registerHandshakeHandlers() c.registerRPCHandler(p2p.RPCBlocksByRangeTopicV1, func( - ctx context.Context, i interface{}, stream libp2pcore.Stream, + ctx context.Context, i any, stream libp2pcore.Stream, ) error { return nil }) c.registerRPCHandler(p2p.RPCBlocksByRangeTopicV2, func( - ctx context.Context, i interface{}, stream libp2pcore.Stream, + ctx context.Context, i any, stream libp2pcore.Stream, ) error { return nil }) diff --git a/cmd/prysmctl/testnet/generate_genesis.go b/cmd/prysmctl/testnet/generate_genesis.go index d491f83018..3508c7ab01 100644 --- a/cmd/prysmctl/testnet/generate_genesis.go +++ b/cmd/prysmctl/testnet/generate_genesis.go @@ -188,7 +188,7 @@ func cliActionGenerateGenesisState(cliCtx *cli.Context) error { type MinimumSSZMarshal interface { MarshalSSZ() ([]byte, error) } - marshalFn := func(o interface{}) ([]byte, error) { + marshalFn := func(o any) ([]byte, error) { marshaler, ok := o.(MinimumSSZMarshal) if !ok { return nil, errors.New("not a marshaler") @@ -397,8 +397,8 @@ func depositJSONToDepositData(input *depositDataJSON) ([]byte, *ethpb.Deposit_Da func writeToOutputFile( fPath string, - data interface{}, - marshalFn func(o interface{}) ([]byte, error), + data any, + marshalFn func(o any) ([]byte, error), ) error { encoded, err := marshalFn(data) if err != nil { diff --git a/cmd/prysmctl/testnet/generate_genesis_test.go b/cmd/prysmctl/testnet/generate_genesis_test.go index b0d7312b9c..87763b538d 100644 --- a/cmd/prysmctl/testnet/generate_genesis_test.go +++ b/cmd/prysmctl/testnet/generate_genesis_test.go @@ -33,7 +33,7 @@ func Test_genesisStateFromJSONValidators(t *testing.T) { func createGenesisDepositData(t *testing.T, numKeys int) []*depositDataJSON { pubKeys := make([]bls.PublicKey, numKeys) privKeys := make([]bls.SecretKey, numKeys) - for i := 0; i < numKeys; i++ { + for i := range numKeys { randKey, err := bls.RandKey() require.NoError(t, err) privKeys[i] = randKey @@ -42,7 +42,7 @@ func createGenesisDepositData(t *testing.T, numKeys int) []*depositDataJSON { dataList, _, err := interop.DepositDataFromKeys(privKeys, pubKeys) require.NoError(t, err) jsonData := make([]*depositDataJSON, numKeys) - for i := 0; i < numKeys; i++ { + for i := range numKeys { dataRoot, err := dataList[i].HashTreeRoot() require.NoError(t, err) jsonData[i] = &depositDataJSON{ diff --git a/cmd/prysmctl/validator/withdraw.go b/cmd/prysmctl/validator/withdraw.go index e5161e8fe3..eebf761146 100644 --- a/cmd/prysmctl/validator/withdraw.go +++ b/cmd/prysmctl/validator/withdraw.go @@ -96,7 +96,7 @@ func callWithdrawalEndpoints(ctx context.Context, host string, request []*struct if err != nil { return err } - data, ok := spec.Data.(map[string]interface{}) + data, ok := spec.Data.(map[string]any) if !ok { return errors.New("config has incorrect structure") } diff --git a/cmd/validator/flags/flags_test.go b/cmd/validator/flags/flags_test.go index a4a3b57876..ab3860d8d3 100644 --- a/cmd/validator/flags/flags_test.go +++ b/cmd/validator/flags/flags_test.go @@ -20,9 +20,9 @@ func TestLoadFlagsFromConfig_PreProcessing_Web3signer(t *testing.T) { pubkey1 := "0xbd36226746676565cd40141a7f0fe1445b9a3fbeb222288b226392c4b230ed0b" pubkey2 := "0xbd36226746676565cd40141a7f0fe1445b9a3fbeb222288b226392c4b230ed0a" - require.NoError(t, os.WriteFile("flags_test.yaml", []byte(fmt.Sprintf("%s:\n - %s\n - %s\n", Web3SignerPublicValidatorKeysFlag.Name, + require.NoError(t, os.WriteFile("flags_test.yaml", fmt.Appendf(nil, "%s:\n - %s\n - %s\n", Web3SignerPublicValidatorKeysFlag.Name, pubkey1, - pubkey2)), 0666)) + pubkey2), 0666)) require.NoError(t, set.Parse([]string{"test-command", "--" + cmd.ConfigFileFlag.Name, "flags_test.yaml"})) comFlags := cmd.WrapFlags([]cli.Flag{ diff --git a/cmd/validator/slashing-protection/import_export_test.go b/cmd/validator/slashing-protection/import_export_test.go index a25b490250..c4aca04cc2 100644 --- a/cmd/validator/slashing-protection/import_export_test.go +++ b/cmd/validator/slashing-protection/import_export_test.go @@ -155,7 +155,7 @@ func TestImportExportSlashingProtectionCli_EmptyData(t *testing.T) { require.NoError(t, err) attestingHistory := make([][]*common.AttestationRecord, 0) proposalHistory := make([]common.ProposalHistoryForPubkey, len(pubKeys)) - for i := 0; i < len(pubKeys); i++ { + for i := range pubKeys { proposalHistory[i].Proposals = make([]common.Proposal, 0) } mockJSON, err := mocks.MockSlashingProtectionJSON(pubKeys, attestingHistory, proposalHistory) diff --git a/cmd/validator/usage.go b/cmd/validator/usage.go index 5ad08d0fe6..8e2f4fed1b 100644 --- a/cmd/validator/usage.go +++ b/cmd/validator/usage.go @@ -166,12 +166,12 @@ func init() { cli.AppHelpTemplate = appHelpTemplate type helpData struct { - App interface{} + App any FlagGroups []flagGroup } originalHelpPrinter := cli.HelpPrinter - cli.HelpPrinter = func(w io.Writer, tmpl string, data interface{}) { + cli.HelpPrinter = func(w io.Writer, tmpl string, data any) { if tmpl == appHelpTemplate { for _, group := range appHelpFlagGroups { sort.Sort(cli.FlagsByName(group.Flags)) diff --git a/config/params/config_test.go b/config/params/config_test.go index c7102955b1..15d9dbdffa 100644 --- a/config/params/config_test.go +++ b/config/params/config_test.go @@ -46,7 +46,7 @@ func TestConfig_OverrideBeaconConfigTestTeardown(t *testing.T) { func TestConfig_DataRace(t *testing.T) { params.SetupTestConfigCleanup(t) wg := new(sync.WaitGroup) - for i := 0; i < 10; i++ { + for range 10 { wg.Add(2) go func() { defer wg.Done() @@ -197,7 +197,7 @@ func Test_TargetBlobCount(t *testing.T) { func fillGVR(value byte) [32]byte { var gvr [32]byte - for i := 0; i < len(gvr); i++ { + for i := range len(gvr) { gvr[i] = value } return gvr @@ -232,7 +232,6 @@ func TestBeaconChainConfigSlotDuration(t *testing.T) { } for _, tt := range tests { - tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() require.Equal(t, tt.want, tt.cfg.SlotDuration()) @@ -266,7 +265,6 @@ func TestBeaconChainConfigSlotDurationMillis(t *testing.T) { } for _, tt := range tests { - tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() require.Equal(t, tt.want, tt.cfg.SlotDurationMillis()) @@ -316,7 +314,6 @@ func TestBeaconChainConfigSlotComponentDuration(t *testing.T) { } for _, tt := range tests { - tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() require.Equal(t, tt.want, tt.cfg.SlotComponentDuration(tt.bp)) diff --git a/config/params/loader_test.go b/config/params/loader_test.go index fd75ed892d..476c46069b 100644 --- a/config/params/loader_test.go +++ b/config/params/loader_test.go @@ -401,7 +401,7 @@ func fieldsFromYamls(t *testing.T, fps []string) []string { for _, fp := range fps { yamlFile, err := os.ReadFile(fp) require.NoError(t, err) - m := make(map[string]interface{}) + m := make(map[string]any) require.NoError(t, yaml.Unmarshal(yamlFile, &m)) for k := range m { @@ -421,7 +421,7 @@ func fieldsFromYamls(t *testing.T, fps []string) []string { func assertYamlFieldsMatch(t *testing.T, name string, fields []string, c1, c2 *params.BeaconChainConfig) { // Ensure all fields from the yaml file exist, were set, and correctly match the expected value. - ft1 := reflect.TypeOf(*c1) + ft1 := reflect.TypeFor[params.BeaconChainConfig]() for _, field := range fields { var found bool for i := 0; i < ft1.NumField(); i++ { diff --git a/config/util.go b/config/util.go index 6c69aecfe8..54eb90d16a 100644 --- a/config/util.go +++ b/config/util.go @@ -16,7 +16,7 @@ import ( "k8s.io/apimachinery/pkg/util/yaml" ) -func UnmarshalFromURL(ctx context.Context, from string, to interface{}) error { +func UnmarshalFromURL(ctx context.Context, from string, to any) error { u, err := url.ParseRequestURI(from) if err != nil { return err @@ -48,7 +48,7 @@ func UnmarshalFromURL(ctx context.Context, from string, to interface{}) error { return nil } -func UnmarshalFromFile(from string, to interface{}) error { +func UnmarshalFromFile(from string, to any) error { cleanpath := filepath.Clean(from) b, err := os.ReadFile(cleanpath) if err != nil { diff --git a/consensus-types/blocks/factory.go b/consensus-types/blocks/factory.go index c518845459..74a21d970a 100644 --- a/consensus-types/blocks/factory.go +++ b/consensus-types/blocks/factory.go @@ -30,7 +30,7 @@ var ( ) // NewSignedBeaconBlock creates a signed beacon block from a protobuf signed beacon block. -func NewSignedBeaconBlock(i interface{}) (interfaces.SignedBeaconBlock, error) { +func NewSignedBeaconBlock(i any) (interfaces.SignedBeaconBlock, error) { switch b := i.(type) { case nil: return nil, ErrNilObject @@ -88,7 +88,7 @@ func NewSignedBeaconBlock(i interface{}) (interfaces.SignedBeaconBlock, error) { } // NewBeaconBlock creates a beacon block from a protobuf beacon block. -func NewBeaconBlock(i interface{}) (interfaces.ReadOnlyBeaconBlock, error) { +func NewBeaconBlock(i any) (interfaces.ReadOnlyBeaconBlock, error) { switch b := i.(type) { case nil: return nil, ErrNilObject @@ -144,7 +144,7 @@ func NewBeaconBlock(i interface{}) (interfaces.ReadOnlyBeaconBlock, error) { } // NewBeaconBlockBody creates a beacon block body from a protobuf beacon block body. -func NewBeaconBlockBody(i interface{}) (interfaces.ReadOnlyBeaconBlockBody, error) { +func NewBeaconBlockBody(i any) (interfaces.ReadOnlyBeaconBlockBody, error) { switch b := i.(type) { case nil: return nil, ErrNilObject @@ -265,7 +265,7 @@ func BuildSignedBeaconBlock(blk interfaces.ReadOnlyBeaconBlock, signature []byte } } -func getWrappedPayload(payload interface{}) (wrappedPayload interfaces.ExecutionData, wrapErr error) { +func getWrappedPayload(payload any) (wrappedPayload interfaces.ExecutionData, wrapErr error) { switch p := payload.(type) { case *enginev1.ExecutionPayload: wrappedPayload, wrapErr = WrappedExecutionPayload(p) @@ -308,7 +308,7 @@ func checkPayloadAgainstHeader(wrappedPayload, payloadHeader interfaces.Executio // BuildSignedBeaconBlockFromExecutionPayload takes a signed, blinded beacon block and converts into // a full, signed beacon block by specifying an execution payload. // nolint:gocognit -func BuildSignedBeaconBlockFromExecutionPayload(blk interfaces.ReadOnlySignedBeaconBlock, payload interface{}) (interfaces.SignedBeaconBlock, error) { +func BuildSignedBeaconBlockFromExecutionPayload(blk interfaces.ReadOnlySignedBeaconBlock, payload any) (interfaces.SignedBeaconBlock, error) { if err := BeaconBlockIsNil(blk); err != nil { return nil, err } @@ -338,7 +338,7 @@ func BuildSignedBeaconBlockFromExecutionPayload(blk interfaces.ReadOnlySignedBea graffiti := b.Body().Graffiti() sig := blk.Signature() - var fullBlock interface{} + var fullBlock any switch blk.Version() { case version.Bellatrix: p, ok := payload.(*enginev1.ExecutionPayload) diff --git a/consensus-types/blocks/kzg_test.go b/consensus-types/blocks/kzg_test.go index 2349b379a3..982c6ff84b 100644 --- a/consensus-types/blocks/kzg_test.go +++ b/consensus-types/blocks/kzg_test.go @@ -230,8 +230,8 @@ func Benchmark_MerkleProofKZGCommitment(b *testing.B) { body, err := NewBeaconBlockBody(pbBody) require.NoError(b, err) index := 1 - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { _, err := MerkleProofKZGCommitment(body, index) require.NoError(b, err) } diff --git a/consensus-types/hdiff/fuzz_test.go b/consensus-types/hdiff/fuzz_test.go index a0f692f681..b387e0a512 100644 --- a/consensus-types/hdiff/fuzz_test.go +++ b/consensus-types/hdiff/fuzz_test.go @@ -444,14 +444,14 @@ func FuzzKmpIndex(f *testing.F) { // Parse comma-separated strings into int slices var source, target []int if sourceStr != "" { - for _, s := range strings.Split(sourceStr, ",") { + for s := range strings.SplitSeq(sourceStr, ",") { if val, err := strconv.Atoi(strings.TrimSpace(s)); err == nil { source = append(source, val) } } } if targetStr != "" { - for _, s := range strings.Split(targetStr, ",") { + for s := range strings.SplitSeq(targetStr, ",") { if val, err := strconv.Atoi(strings.TrimSpace(s)); err == nil { target = append(target, val) } @@ -508,7 +508,7 @@ func FuzzComputeLPS(f *testing.F) { // Parse comma-separated string into int slice var pattern []int if patternStr != "" { - for _, s := range strings.Split(patternStr, ",") { + for s := range strings.SplitSeq(patternStr, ",") { if val, err := strconv.Atoi(strings.TrimSpace(s)); err == nil { pattern = append(pattern, val) } diff --git a/consensus-types/hdiff/property_test.go b/consensus-types/hdiff/property_test.go index 1130e77ecc..3e15d90d26 100644 --- a/consensus-types/hdiff/property_test.go +++ b/consensus-types/hdiff/property_test.go @@ -47,10 +47,11 @@ func FuzzPropertyRoundTrip(f *testing.F) { ctx := t.Context() // Create source state with reasonable size - validatorCount := uint64(len(validatorChanges) + 8) // Minimum 8 validators - if validatorCount > 64 { - validatorCount = 64 // Cap at 64 for performance - } + validatorCount := min( + // Minimum 8 validators + uint64(len(validatorChanges)+8), + // Cap at 64 for performance + 64) source, _ := util.DeterministicGenesisStateElectra(t, validatorCount) // Create target state with modifications diff --git a/consensus-types/hdiff/security_test.go b/consensus-types/hdiff/security_test.go index b108e78507..effb01ec70 100644 --- a/consensus-types/hdiff/security_test.go +++ b/consensus-types/hdiff/security_test.go @@ -63,7 +63,7 @@ func TestReasonablePerformance(t *testing.T) { // Make realistic changes _ = target.SetSlot(source.Slot() + 32) // One epoch validators := target.Validators() - for i := 0; i < 100; i++ { // 10% of validators changed + for i := range 100 { // 10% of validators changed validators[i].EffectiveBalance += 1000000000 // 1 ETH change } _ = target.SetValidators(validators) @@ -129,7 +129,7 @@ func TestStateTransitionValidation(t *testing.T) { // Some validators get rewards, others get penalties balances := target.Balances() - for i := 0; i < len(balances); i++ { + for i := range balances { if i%2 == 0 { balances[i] += 100000000 // 0.1 ETH reward } else { @@ -331,12 +331,12 @@ func TestConcurrencySafety(t *testing.T) { var wg sync.WaitGroup errors := make(chan error, numGoroutines*iterations) - for i := 0; i < numGoroutines; i++ { + for i := range numGoroutines { wg.Add(1) go func(workerID int) { defer wg.Done() - for j := 0; j < iterations; j++ { + for j := range iterations { _, err := Diff(source, target) if err != nil { errors <- fmt.Errorf("worker %d iteration %d: %v", workerID, j, err) @@ -367,7 +367,7 @@ func TestConcurrencySafety(t *testing.T) { var wg sync.WaitGroup errors := make(chan error, numGoroutines) - for i := 0; i < numGoroutines; i++ { + for i := range numGoroutines { wg.Add(1) go func(workerID int) { defer wg.Done() diff --git a/consensus-types/hdiff/state_diff.go b/consensus-types/hdiff/state_diff.go index 3d88541576..2ae3b59718 100644 --- a/consensus-types/hdiff/state_diff.go +++ b/consensus-types/hdiff/state_diff.go @@ -752,7 +752,7 @@ func (ret *stateDiff) readProposerLookahead(data *[]byte) error { // Read the proposer lookahead (2 * SlotsPerEpoch uint64 values) numProposers := 2 * fieldparams.SlotsPerEpoch ret.proposerLookahead = make([]uint64, numProposers) - for i := 0; i < numProposers; i++ { + for i := range numProposers { ret.proposerLookahead[i] = binary.LittleEndian.Uint64((*data)[i*8 : (i+1)*8]) } *data = (*data)[proposerLookaheadLength:] diff --git a/consensus-types/hdiff/state_diff_test.go b/consensus-types/hdiff/state_diff_test.go index 082425ee10..083db65e43 100644 --- a/consensus-types/hdiff/state_diff_test.go +++ b/consensus-types/hdiff/state_diff_test.go @@ -42,7 +42,7 @@ func Test_diffToState(t *testing.T) { func Test_kmpIndex(t *testing.T) { intSlice := make([]*int, 10) - for i := 0; i < len(intSlice); i++ { + for i := range intSlice { intSlice[i] = new(int) *intSlice[i] = i } @@ -544,7 +544,7 @@ func Test_diffToBalances(t *testing.T) { } targetBals := target.Balances() - for i := 0; i < len(sourceBals); i++ { + for i := range sourceBals { require.Equal(t, targetBals[i], sourceBals[i], "balance mismatch at index %d", i) } }) @@ -665,7 +665,7 @@ func Test_applyStateDiff(t *testing.T) { // Test_computeLPS tests the LPS array computation for KMP algorithm func Test_computeLPS(t *testing.T) { intSlice := make([]*int, 10) - for i := 0; i < len(intSlice); i++ { + for i := range intSlice { intSlice[i] = new(int) *intSlice[i] = i } @@ -955,8 +955,7 @@ func BenchmarkGetDiff(b *testing.B) { source, target, err := getMainnetStates() require.NoError(b, err) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { hdiff, err := Diff(source, target) b.Log("Diff size:", len(hdiff.StateDiff)+len(hdiff.BalancesDiff)+len(hdiff.ValidatorDiffs)) require.NoError(b, err) @@ -971,8 +970,8 @@ func BenchmarkApplyDiff(b *testing.B) { require.NoError(b, err) hdiff, err := Diff(source, target) require.NoError(b, err) - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { source, err = ApplyDiff(b.Context(), source, hdiff) require.NoError(b, err) } @@ -998,7 +997,7 @@ func BenchmarkDiffCreation(b *testing.B) { _ = target.SetValidators(validators) b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := Diff(source, target) if err != nil { b.Fatal(err) @@ -1026,7 +1025,7 @@ func BenchmarkDiffApplication(b *testing.B) { } b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { // Need fresh source for each iteration freshSource := source.Copy() _, err := ApplyDiff(ctx, freshSource, diff) @@ -1049,8 +1048,7 @@ func BenchmarkSerialization(b *testing.B) { b.Fatal(err) } - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _ = hdiff.serialize() } } @@ -1067,8 +1065,7 @@ func BenchmarkDeserialization(b *testing.B) { b.Fatal(err) } - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := newHdiff(diff) if err != nil { b.Fatal(err) @@ -1093,7 +1090,7 @@ func BenchmarkBalanceDiff(b *testing.B) { _ = target.SetBalances(balances) b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := diffToBalances(source, target) if err != nil { b.Fatal(err) @@ -1123,7 +1120,7 @@ func BenchmarkValidatorDiff(b *testing.B) { _ = target.SetValidators(validators) b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := diffToVals(source, target) if err != nil { b.Fatal(err) @@ -1172,7 +1169,7 @@ func BenchmarkKMPAlgorithm(b *testing.B) { } b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _ = kmpIndex(len(pattern), text, intEquals) } }) @@ -1201,7 +1198,7 @@ func BenchmarkCompressionRatio(b *testing.B) { name: "balance_changes", modifier: func(target state.BeaconState) { balances := target.Balances() - for i := 0; i < 10; i++ { + for i := range 10 { if i < len(balances) { balances[i] += 1000 } @@ -1213,7 +1210,7 @@ func BenchmarkCompressionRatio(b *testing.B) { name: "validator_changes", modifier: func(target state.BeaconState) { validators := target.Validators() - for i := 0; i < 10; i++ { + for i := range 10 { if i < len(validators) { validators[i].EffectiveBalance += 1000 } @@ -1235,7 +1232,7 @@ func BenchmarkCompressionRatio(b *testing.B) { } b.ResetTimer() - for i := 0; i < b.N; i++ { + for i := 0; b.Loop(); i++ { diff, err := Diff(source, testTarget) if err != nil { b.Fatal(err) @@ -1262,7 +1259,7 @@ func BenchmarkMemoryUsage(b *testing.B) { // Modify some data validators := target.Validators() - for i := 0; i < 25; i++ { + for i := range 25 { if i < len(validators) { validators[i].EffectiveBalance += 1000 } @@ -1270,9 +1267,8 @@ func BenchmarkMemoryUsage(b *testing.B) { _ = target.SetValidators(validators) b.ReportAllocs() - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { diff, err := Diff(source, target) if err != nil { b.Fatal(err) diff --git a/consensus-types/payload-attribute/types.go b/consensus-types/payload-attribute/types.go index 563f7dff8b..da2b5730b4 100644 --- a/consensus-types/payload-attribute/types.go +++ b/consensus-types/payload-attribute/types.go @@ -30,7 +30,7 @@ var ( ) // New returns a new payload attribute with the given input object. -func New(i interface{}) (Attributer, error) { +func New(i any) (Attributer, error) { switch a := i.(type) { case nil: return nil, blocks.ErrNilObject diff --git a/consensus-types/validator/custom_types.go b/consensus-types/validator/custom_types.go index 8f00ed9583..b29e89e4de 100644 --- a/consensus-types/validator/custom_types.go +++ b/consensus-types/validator/custom_types.go @@ -25,7 +25,7 @@ func (u *Uint64) UnmarshalJSON(bs []byte) error { } // UnmarshalYAML custom unmarshal function for yaml -func (u *Uint64) UnmarshalYAML(unmarshal func(interface{}) error) error { +func (u *Uint64) UnmarshalYAML(unmarshal func(any) error) error { var str string err := unmarshal(&str) if err != nil { diff --git a/consensus-types/wrapper/metadata.go b/consensus-types/wrapper/metadata.go index 08c5fce8f6..5de5137fe3 100644 --- a/consensus-types/wrapper/metadata.go +++ b/consensus-types/wrapper/metadata.go @@ -42,7 +42,7 @@ func (m MetadataV0) CustodyGroupCount() uint64 { } // InnerObject returns the underlying metadata protobuf structure. -func (m MetadataV0) InnerObject() interface{} { +func (m MetadataV0) InnerObject() any { return m.md } @@ -136,7 +136,7 @@ func (m MetadataV1) CustodyGroupCount() uint64 { } // InnerObject returns the underlying metadata protobuf structure. -func (m MetadataV1) InnerObject() interface{} { +func (m MetadataV1) InnerObject() any { return m.md } @@ -230,7 +230,7 @@ func (m MetadataV2) CustodyGroupCount() uint64 { } // InnerObject returns the underlying metadata protobuf structure. -func (m MetadataV2) InnerObject() interface{} { +func (m MetadataV2) InnerObject() any { return m.md } diff --git a/container/leaky-bucket/heap.go b/container/leaky-bucket/heap.go index 7f36db23f4..29289a4ba1 100644 --- a/container/leaky-bucket/heap.go +++ b/container/leaky-bucket/heap.go @@ -28,7 +28,7 @@ func (pq priorityQueue) Swap(i, j int) { } // Push a LeakyBucket to priorityQueue -func (pq *priorityQueue) Push(x interface{}) { +func (pq *priorityQueue) Push(x any) { n := len(*pq) b, ok := x.(*LeakyBucket) if !ok { @@ -38,7 +38,7 @@ func (pq *priorityQueue) Push(x interface{}) { *pq = append(*pq, b) } -func (pq *priorityQueue) Pop() interface{} { +func (pq *priorityQueue) Pop() any { old := *pq n := len(old) b := old[n-1] diff --git a/container/leaky-bucket/heap_test.go b/container/leaky-bucket/heap_test.go index f153d6d598..4a8888f20f 100644 --- a/container/leaky-bucket/heap_test.go +++ b/container/leaky-bucket/heap_test.go @@ -34,7 +34,7 @@ func TestLen(t *testing.T) { func TestPeak(t *testing.T) { q := make(priorityQueue, 0, 4096) - for i := 0; i < 5; i++ { + for range 5 { b := NewLeakyBucket(1.0, 5, time.Second) q.Push(b) } @@ -43,7 +43,7 @@ func TestPeak(t *testing.T) { func TestLess(t *testing.T) { q := make(priorityQueue, 0, 4096) - for i := 0; i < 5; i++ { + for i := range 5 { b := NewLeakyBucket(1.0, 5, time.Second) b.p = now().Add(time.Duration(i)) q.Push(b) @@ -59,7 +59,7 @@ func TestLess(t *testing.T) { func TestSwap(t *testing.T) { q := make(priorityQueue, 0, 4096) - for i := 0; i < 5; i++ { + for range 5 { b := NewLeakyBucket(1.0, 5, time.Second) q.Push(b) } @@ -80,7 +80,7 @@ func TestSwap(t *testing.T) { func TestPush(t *testing.T) { q := make(priorityQueue, 0, 4096) - for i := 0; i < 5; i++ { + for range 5 { b := NewLeakyBucket(1.0, 5, time.Second) q.Push(b) diff --git a/container/leaky-bucket/leakybucket_test.go b/container/leaky-bucket/leakybucket_test.go index d3d7e6cf4f..9eb0f0eaa9 100644 --- a/container/leaky-bucket/leakybucket_test.go +++ b/container/leaky-bucket/leakybucket_test.go @@ -48,7 +48,7 @@ func TestNewLeakyBucket(t *testing.T) { type actionSet struct { count int64 action string - value interface{} + value any } type testSet struct { diff --git a/container/multi-value-slice/multi_value_slice_test.go b/container/multi-value-slice/multi_value_slice_test.go index e967b12b36..79559137b7 100644 --- a/container/multi-value-slice/multi_value_slice_test.go +++ b/container/multi-value-slice/multi_value_slice_test.go @@ -632,7 +632,7 @@ func BenchmarkValue(b *testing.B) { b.Run("100,000 shared items", func(b *testing.B) { s := &Slice[int]{} s.Init(make([]int, _100k)) - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(&testObject{}) } }) @@ -641,11 +641,11 @@ func BenchmarkValue(b *testing.B) { s.Init(make([]int, _100k)) s.individualItems[0] = &MultiValueItem[int]{Values: []*Value[int]{{val: 999, ids: []uint64{}}}} objs := make([]*testObject, _100k) - for i := 0; i < len(objs); i++ { + for i := range objs { objs[i] = &testObject{id: uint64(i)} s.individualItems[0].Values[0].ids = append(s.individualItems[0].Values[0].ids, uint64(i)) } - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(objs[rand.Intn(_100k)]) } }) @@ -653,11 +653,11 @@ func BenchmarkValue(b *testing.B) { s := &Slice[int]{} s.Init(make([]int, _100k)) objs := make([]*testObject, _100k) - for i := 0; i < len(objs); i++ { + for i := range objs { objs[i] = &testObject{id: uint64(i)} s.individualItems[uint64(i)] = &MultiValueItem[int]{Values: []*Value[int]{{val: i, ids: []uint64{uint64(i)}}}} } - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(objs[rand.Intn(_100k)]) } }) @@ -666,11 +666,11 @@ func BenchmarkValue(b *testing.B) { s.Init(make([]int, _100k)) s.appendedItems = []*MultiValueItem[int]{{Values: []*Value[int]{{val: 999, ids: []uint64{}}}}} objs := make([]*testObject, _100k) - for i := 0; i < len(objs); i++ { + for i := range objs { objs[i] = &testObject{id: uint64(i)} s.appendedItems[0].Values[0].ids = append(s.appendedItems[0].Values[0].ids, uint64(i)) } - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(objs[rand.Intn(_100k)]) } }) @@ -679,18 +679,18 @@ func BenchmarkValue(b *testing.B) { s.Init(make([]int, _100k)) s.appendedItems = []*MultiValueItem[int]{} objs := make([]*testObject, _100k) - for i := 0; i < len(objs); i++ { + for i := range objs { objs[i] = &testObject{id: uint64(i)} s.appendedItems = append(s.appendedItems, &MultiValueItem[int]{Values: []*Value[int]{{val: i, ids: []uint64{uint64(i)}}}}) } - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(objs[rand.Intn(_100k)]) } }) b.Run("1,000,000 shared items", func(b *testing.B) { s := &Slice[int]{} s.Init(make([]int, _1m)) - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(&testObject{}) } }) @@ -699,11 +699,11 @@ func BenchmarkValue(b *testing.B) { s.Init(make([]int, _1m)) s.individualItems[0] = &MultiValueItem[int]{Values: []*Value[int]{{val: 999, ids: []uint64{}}}} objs := make([]*testObject, _1m) - for i := 0; i < len(objs); i++ { + for i := range objs { objs[i] = &testObject{id: uint64(i)} s.individualItems[0].Values[0].ids = append(s.individualItems[0].Values[0].ids, uint64(i)) } - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(objs[rand.Intn(_1m)]) } }) @@ -711,11 +711,11 @@ func BenchmarkValue(b *testing.B) { s := &Slice[int]{} s.Init(make([]int, _1m)) objs := make([]*testObject, _1m) - for i := 0; i < len(objs); i++ { + for i := range objs { objs[i] = &testObject{id: uint64(i)} s.individualItems[uint64(i)] = &MultiValueItem[int]{Values: []*Value[int]{{val: i, ids: []uint64{uint64(i)}}}} } - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(objs[rand.Intn(_1m)]) } }) @@ -724,11 +724,11 @@ func BenchmarkValue(b *testing.B) { s.Init(make([]int, _1m)) s.appendedItems = []*MultiValueItem[int]{{Values: []*Value[int]{{val: 999, ids: []uint64{}}}}} objs := make([]*testObject, _1m) - for i := 0; i < len(objs); i++ { + for i := range objs { objs[i] = &testObject{id: uint64(i)} s.appendedItems[0].Values[0].ids = append(s.appendedItems[0].Values[0].ids, uint64(i)) } - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(objs[rand.Intn(_1m)]) } }) @@ -737,18 +737,18 @@ func BenchmarkValue(b *testing.B) { s.Init(make([]int, _1m)) s.appendedItems = []*MultiValueItem[int]{} objs := make([]*testObject, _1m) - for i := 0; i < len(objs); i++ { + for i := range objs { objs[i] = &testObject{id: uint64(i)} s.appendedItems = append(s.appendedItems, &MultiValueItem[int]{Values: []*Value[int]{{val: i, ids: []uint64{uint64(i)}}}}) } - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(objs[rand.Intn(_1m)]) } }) b.Run("10,000,000 shared items", func(b *testing.B) { s := &Slice[int]{} s.Init(make([]int, _10m)) - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(&testObject{}) } }) @@ -757,11 +757,11 @@ func BenchmarkValue(b *testing.B) { s.Init(make([]int, _10m)) s.individualItems[0] = &MultiValueItem[int]{Values: []*Value[int]{{val: 999, ids: []uint64{}}}} objs := make([]*testObject, _10m) - for i := 0; i < len(objs); i++ { + for i := range objs { objs[i] = &testObject{id: uint64(i)} s.individualItems[0].Values[0].ids = append(s.individualItems[0].Values[0].ids, uint64(i)) } - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(objs[rand.Intn(_10m)]) } }) @@ -769,11 +769,11 @@ func BenchmarkValue(b *testing.B) { s := &Slice[int]{} s.Init(make([]int, _10m)) objs := make([]*testObject, _10m) - for i := 0; i < len(objs); i++ { + for i := range objs { objs[i] = &testObject{id: uint64(i)} s.individualItems[uint64(i)] = &MultiValueItem[int]{Values: []*Value[int]{{val: i, ids: []uint64{uint64(i)}}}} } - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(objs[rand.Intn(_10m)]) } }) @@ -782,11 +782,11 @@ func BenchmarkValue(b *testing.B) { s.Init(make([]int, _10m)) s.appendedItems = []*MultiValueItem[int]{{Values: []*Value[int]{{val: 999, ids: []uint64{}}}}} objs := make([]*testObject, _10m) - for i := 0; i < len(objs); i++ { + for i := range objs { objs[i] = &testObject{id: uint64(i)} s.appendedItems[0].Values[0].ids = append(s.appendedItems[0].Values[0].ids, uint64(i)) } - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(objs[rand.Intn(_10m)]) } }) @@ -795,11 +795,11 @@ func BenchmarkValue(b *testing.B) { s.Init(make([]int, _10m)) s.appendedItems = []*MultiValueItem[int]{} objs := make([]*testObject, _10m) - for i := 0; i < len(objs); i++ { + for i := range objs { objs[i] = &testObject{id: uint64(i)} s.appendedItems = append(s.appendedItems, &MultiValueItem[int]{Values: []*Value[int]{{val: i, ids: []uint64{uint64(i)}}}}) } - for i := 0; i < b.N; i++ { + for b.Loop() { s.Value(objs[rand.Intn(_10m)]) } }) diff --git a/container/queue/priority_queue.go b/container/queue/priority_queue.go index 3c3d339fc0..e5a9ceca31 100644 --- a/container/queue/priority_queue.go +++ b/container/queue/priority_queue.go @@ -64,7 +64,7 @@ type Item struct { Key string // Value is an unspecified type that implementations can use to store // information - Value interface{} + Value any // Priority determines ordering in the queue, with the lowest value being the // highest priority @@ -185,7 +185,7 @@ func (q queue) Swap(i, j int) { // Push is used by heap.Interface to push items onto the heap. This method is // invoked by container/heap, and should not be used directly. // See: https://golang.org/pkg/container/heap/#Interface -func (q *queue) Push(x interface{}) { +func (q *queue) Push(x any) { n := len(*q) item, ok := x.(*Item) if !ok { @@ -198,7 +198,7 @@ func (q *queue) Push(x interface{}) { // Pop is used by heap.Interface to pop items off of the heap. This method is // invoked by container/heap, and should not be used directly. // See: https://golang.org/pkg/container/heap/#Interface -func (q *queue) Pop() interface{} { +func (q *queue) Pop() any { old := *q n := len(old) item := old[n-1] diff --git a/container/slice/slice_test.go b/container/slice/slice_test.go index 56aae7a9b3..14e49ac12c 100644 --- a/container/slice/slice_test.go +++ b/container/slice/slice_test.go @@ -2,7 +2,7 @@ package slice_test import ( "reflect" - "sort" + "slices" "testing" "github.com/OffchainLabs/prysm/v7/consensus-types/primitives" @@ -55,13 +55,11 @@ func TestIntersectionUint64(t *testing.T) { {[]uint64{1, 1, 1}, []uint64{1, 1}, []uint64{1, 2, 3}, []uint64{1}}, } for _, tt := range testCases { - setA := append([]uint64{}, tt.setA...) - setB := append([]uint64{}, tt.setB...) - setC := append([]uint64{}, tt.setC...) + setA := slices.Clone(tt.setA) + setB := slices.Clone(tt.setB) + setC := slices.Clone(tt.setC) result := slice.IntersectionUint64(setA, setB, setC) - sort.Slice(result, func(i, j int) bool { - return result[i] < result[j] - }) + slices.Sort(result) if !reflect.DeepEqual(result, tt.out) { t.Errorf("got %d, want %d", result, tt.out) } @@ -119,13 +117,11 @@ func TestIntersectionInt64(t *testing.T) { {[]int64{1, 1, 1}, []int64{1, 1}, []int64{1, 2, 3}, []int64{1}}, } for _, tt := range testCases { - setA := append([]int64{}, tt.setA...) - setB := append([]int64{}, tt.setB...) - setC := append([]int64{}, tt.setC...) + setA := slices.Clone(tt.setA) + setB := slices.Clone(tt.setB) + setC := slices.Clone(tt.setC) result := slice.IntersectionInt64(setA, setB, setC) - sort.Slice(result, func(i, j int) bool { - return result[i] < result[j] - }) + slices.Sort(result) if !reflect.DeepEqual(result, tt.out) { t.Errorf("got %d, want %d", result, tt.out) } @@ -525,13 +521,11 @@ func TestIntersectionSlot(t *testing.T) { {[]primitives.Slot{1, 1, 1}, []primitives.Slot{1, 1}, []primitives.Slot{1, 2, 3}, []primitives.Slot{1}}, } for _, tt := range testCases { - setA := append([]primitives.Slot{}, tt.setA...) - setB := append([]primitives.Slot{}, tt.setB...) - setC := append([]primitives.Slot{}, tt.setC...) + setA := slices.Clone(tt.setA) + setB := slices.Clone(tt.setB) + setC := slices.Clone(tt.setC) result := slice.IntersectionSlot(setA, setB, setC) - sort.Slice(result, func(i, j int) bool { - return result[i] < result[j] - }) + slices.Sort(result) if !reflect.DeepEqual(result, tt.out) { t.Errorf("got %d, want %d", result, tt.out) } diff --git a/container/thread-safe/map_test.go b/container/thread-safe/map_test.go index a3956318f3..ec0f55a67f 100644 --- a/container/thread-safe/map_test.go +++ b/container/thread-safe/map_test.go @@ -36,8 +36,8 @@ func BenchmarkMap_Concrete(b *testing.B) { mm := &safeMap{ items: make(map[int]string), } - for i := 0; i < b.N; i++ { - for j := 0; j < 1000; j++ { + for b.Loop() { + for j := range 1000 { mm.Put(j, "foo") mm.Get(j) mm.Delete(j) @@ -48,8 +48,8 @@ func BenchmarkMap_Concrete(b *testing.B) { func BenchmarkMap_Generic(b *testing.B) { items := make(map[int]string) mm := NewThreadSafeMap(items) - for i := 0; i < b.N; i++ { - for j := 0; j < 1000; j++ { + for b.Loop() { + for j := range 1000 { mm.Put(j, "foo") mm.Get(j) mm.Delete(j) @@ -59,8 +59,8 @@ func BenchmarkMap_Generic(b *testing.B) { func BenchmarkMap_GenericTx(b *testing.B) { items := make(map[int]string) mm := NewThreadSafeMap(items) - for i := 0; i < b.N; i++ { - for j := 0; j < 1000; j++ { + for b.Loop() { + for j := range 1000 { mm.Do(func(mp map[int]string) { mp[j] = "foo" _ = mp[j] @@ -85,7 +85,7 @@ func TestMap(t *testing.T) { require.Equal(t, 3, tMap.Len()) var wg sync.WaitGroup - for i := 0; i < 100; i++ { + for range 100 { wg.Add(1) go func(w *sync.WaitGroup, scopedMap *Map[int, string]) { defer w.Done() diff --git a/container/trie/sparse_merkle.go b/container/trie/sparse_merkle.go index 2c95f367f1..02a67ed5a6 100644 --- a/container/trie/sparse_merkle.go +++ b/container/trie/sparse_merkle.go @@ -82,7 +82,7 @@ func GenerateTrieFromItems(items [][]byte, depth uint64) (*SparseMerkleTrie, err transformedLeaves[i] = arr[:] } layers[0] = transformedLeaves - for i := uint64(0); i < depth; i++ { + for i := range depth { if len(layers[i])%2 == 1 { layers[i] = append(layers[i], ZeroHashes[i][:]) } diff --git a/container/trie/sparse_merkle_test.go b/container/trie/sparse_merkle_test.go index dfc13413ff..fb1f11811a 100644 --- a/container/trie/sparse_merkle_test.go +++ b/container/trie/sparse_merkle_test.go @@ -20,7 +20,7 @@ func TestCreateTrieFromProto_Validation(t *testing.T) { h := hash.Hash([]byte("hi")) genValidLayers := func(num int) []*ethpb.TrieLayer { l := make([]*ethpb.TrieLayer, num) - for i := 0; i < num; i++ { + for i := range num { l[i] = ðpb.TrieLayer{ Layer: [][]byte{h[:]}, } @@ -339,17 +339,17 @@ func BenchmarkGenerateTrieFromItems(b *testing.B) { []byte("FFFFFF"), []byte("GGGGGGG"), } - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := trie.GenerateTrieFromItems(items, params.BeaconConfig().DepositContractTreeDepth) require.NoError(b, err, "Could not generate Merkle trie from items") } } func BenchmarkInsertTrie_Optimized(b *testing.B) { - b.StopTimer() + numDeposits := 16000 items := make([][]byte, numDeposits) - for i := 0; i < numDeposits; i++ { + for i := range numDeposits { someRoot := bytesutil.ToBytes32([]byte(strconv.Itoa(i))) items[i] = someRoot[:] } @@ -357,14 +357,14 @@ func BenchmarkInsertTrie_Optimized(b *testing.B) { require.NoError(b, err) someItem := bytesutil.ToBytes32([]byte("hello-world")) - b.StartTimer() - for i := 0; i < b.N; i++ { + + for i := 0; b.Loop(); i++ { require.NoError(b, tr.Insert(someItem[:], i%numDeposits)) } } func BenchmarkGenerateProof(b *testing.B) { - b.StopTimer() + items := [][]byte{ []byte("A"), []byte("BB"), @@ -377,15 +377,14 @@ func BenchmarkGenerateProof(b *testing.B) { normalTrie, err := trie.GenerateTrieFromItems(items, params.BeaconConfig().DepositContractTreeDepth) require.NoError(b, err) - b.StartTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := normalTrie.MerkleProof(3) require.NoError(b, err) } } func BenchmarkVerifyMerkleProofWithDepth(b *testing.B) { - b.StopTimer() + items := [][]byte{ []byte("A"), []byte("BB"), @@ -402,8 +401,8 @@ func BenchmarkVerifyMerkleProofWithDepth(b *testing.B) { root, err := m.HashTreeRoot() require.NoError(b, err) - b.StartTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { if ok := trie.VerifyMerkleProofWithDepth(root[:], items[2], 2, proof, params.BeaconConfig().DepositContractTreeDepth); !ok { b.Error("Merkle proof did not verify") } diff --git a/contracts/deposit/deposit_tree_test.go b/contracts/deposit/deposit_tree_test.go index fd228b8561..388e3c8983 100644 --- a/contracts/deposit/deposit_tree_test.go +++ b/contracts/deposit/deposit_tree_test.go @@ -34,7 +34,7 @@ func TestDepositTrieRoot_OK(t *testing.T) { testAcc.TxOpts.Value = depositcontract.Amount32Eth() - for i := 0; i < 100; i++ { + for i := range 100 { data := depositDataItems[i] var dataRoot [32]byte copy(dataRoot[:], depositDataRoots[i]) @@ -75,7 +75,7 @@ func TestDepositTrieRoot_Fail(t *testing.T) { require.NoError(t, err) testAcc.TxOpts.Value = depositcontract.Amount32Eth() - for i := 0; i < 100; i++ { + for i := range 100 { data := depositDataItems[i] var dataRoot [32]byte copy(dataRoot[:], depositDataRoots[i]) diff --git a/crypto/bls/blst/bls_benchmark_test.go b/crypto/bls/blst/bls_benchmark_test.go index 6f63a623ea..7b22129e53 100644 --- a/crypto/bls/blst/bls_benchmark_test.go +++ b/crypto/bls/blst/bls_benchmark_test.go @@ -17,8 +17,7 @@ func BenchmarkSignature_Verify(b *testing.B) { msg := []byte("Some msg") sig := sk.Sign(msg) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { if !sig.Verify(sk.PublicKey(), msg) { b.Fatal("could not verify sig") } @@ -31,7 +30,7 @@ func BenchmarkSignature_AggregateVerify(b *testing.B) { var pks []common.PublicKey var sigs []common.Signature var msgs [][32]byte - for i := 0; i < sigN; i++ { + for i := range sigN { msg := [32]byte{'s', 'i', 'g', 'n', 'e', 'd', byte(i)} sk, err := blst.RandKey() require.NoError(b, err) @@ -42,9 +41,8 @@ func BenchmarkSignature_AggregateVerify(b *testing.B) { } aggregated := blst.AggregateSignatures(sigs) - b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { if !aggregated.AggregateVerify(pks, msgs) { b.Fatal("could not verify aggregate sig") } @@ -56,8 +54,7 @@ func BenchmarkSecretKey_Marshal(b *testing.B) { require.NoError(b, err) d := key.Marshal() - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := blst.SecretKeyFromBytes(d) _ = err } diff --git a/crypto/bls/blst/public_key_test.go b/crypto/bls/blst/public_key_test.go index f1eea07a1b..978331a441 100644 --- a/crypto/bls/blst/public_key_test.go +++ b/crypto/bls/blst/public_key_test.go @@ -95,7 +95,7 @@ func TestPublicKey_Aggregate(t *testing.T) { func TestPublicKey_Aggregation_NoCorruption(t *testing.T) { var pubkeys []common.PublicKey - for i := 0; i < 100; i++ { + for range 100 { priv, err := blst.RandKey() require.NoError(t, err) pubkey := priv.PublicKey() @@ -113,54 +113,40 @@ func TestPublicKey_Aggregation_NoCorruption(t *testing.T) { wg := new(sync.WaitGroup) // Aggregate different sets of keys. - wg.Add(1) - go func() { + wg.Go(func() { _, err := blst.AggregatePublicKeys(compressedKeys) require.NoError(t, err) - wg.Done() - }() + }) - wg.Add(1) - go func() { + wg.Go(func() { _, err := blst.AggregatePublicKeys(compressedKeys[:10]) require.NoError(t, err) - wg.Done() - }() + }) - wg.Add(1) - go func() { + wg.Go(func() { _, err := blst.AggregatePublicKeys(compressedKeys[:40]) require.NoError(t, err) - wg.Done() - }() + }) - wg.Add(1) - go func() { + wg.Go(func() { _, err := blst.AggregatePublicKeys(compressedKeys[20:60]) require.NoError(t, err) - wg.Done() - }() + }) - wg.Add(1) - go func() { + wg.Go(func() { _, err := blst.AggregatePublicKeys(compressedKeys[80:]) require.NoError(t, err) - wg.Done() - }() + }) - wg.Add(1) - go func() { + wg.Go(func() { _, err := blst.AggregatePublicKeys(compressedKeys[60:90]) require.NoError(t, err) - wg.Done() - }() + }) - wg.Add(1) - go func() { + wg.Go(func() { _, err := blst.AggregatePublicKeys(compressedKeys[40:99]) require.NoError(t, err) - wg.Done() - }() + }) wg.Wait() @@ -185,7 +171,7 @@ func BenchmarkPublicKeyFromBytes(b *testing.B) { b.Run("cache on", func(b *testing.B) { blst.EnableCaches() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := blst.PublicKeyFromBytes(pubkeyBytes) require.NoError(b, err) } @@ -193,7 +179,7 @@ func BenchmarkPublicKeyFromBytes(b *testing.B) { b.Run("cache off", func(b *testing.B) { blst.DisableCaches() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := blst.PublicKeyFromBytes(pubkeyBytes) require.NoError(b, err) } diff --git a/crypto/bls/blst/signature.go b/crypto/bls/blst/signature.go index d6e789adab..8983963995 100644 --- a/crypto/bls/blst/signature.go +++ b/crypto/bls/blst/signature.go @@ -144,7 +144,7 @@ func (s *Signature) AggregateVerify(pubKeys []common.PublicKey, msgs [][32]byte) } msgSlices := make([][]byte, len(msgs)) rawKeys := make([]*blstPublicKey, len(msgs)) - for i := 0; i < size; i++ { + for i := range size { msgSlices[i] = msgs[i][:] rawKeys[i] = pubKeys[i].(*PublicKey).p } @@ -168,7 +168,7 @@ func (s *Signature) FastAggregateVerify(pubKeys []common.PublicKey, msg [32]byte return false } rawKeys := make([]*blstPublicKey, len(pubKeys)) - for i := 0; i < len(pubKeys); i++ { + for i := range pubKeys { rawKeys[i] = pubKeys[i].(*PublicKey).p } return s.s.FastAggregateVerify(true, rawKeys, msg[:], dst) @@ -206,7 +206,7 @@ func AggregateSignatures(sigs []common.Signature) common.Signature { } rawSigs := make([]*blstSignature, len(sigs)) - for i := 0; i < len(sigs); i++ { + for i := range sigs { rawSigs[i] = sigs[i].(*Signature).s } @@ -246,7 +246,7 @@ func VerifyMultipleSignatures(sigs [][]byte, msgs [][32]byte, pubKeys []common.P mulP1Aff := make([]*blstPublicKey, length) rawMsgs := make([]blst.Message, length) - for i := 0; i < length; i++ { + for i := range length { mulP1Aff[i] = pubKeys[i].(*PublicKey).p rawMsgs[i] = msgs[i][:] } diff --git a/crypto/bls/blst/signature_test.go b/crypto/bls/blst/signature_test.go index 2890bf7161..7d5965fa22 100644 --- a/crypto/bls/blst/signature_test.go +++ b/crypto/bls/blst/signature_test.go @@ -25,7 +25,7 @@ func TestAggregateVerify(t *testing.T) { pubkeys := make([]common.PublicKey, 0, 100) sigs := make([]common.Signature, 0, 100) var msgs [][32]byte - for i := 0; i < 100; i++ { + for i := range 100 { msg := [32]byte{'h', 'e', 'l', 'l', 'o', byte(i)} priv, err := RandKey() require.NoError(t, err) @@ -45,7 +45,7 @@ func TestAggregateVerify_CompressedSignatures(t *testing.T) { sigs := make([]common.Signature, 0, 100) var sigBytes [][]byte var msgs [][32]byte - for i := 0; i < 100; i++ { + for i := range 100 { msg := [32]byte{'h', 'e', 'l', 'l', 'o', byte(i)} priv, err := RandKey() require.NoError(t, err) @@ -69,7 +69,7 @@ func TestFastAggregateVerify(t *testing.T) { pubkeys := make([]common.PublicKey, 0, 100) sigs := make([]common.Signature, 0, 100) msg := [32]byte{'h', 'e', 'l', 'l', 'o'} - for i := 0; i < 100; i++ { + for range 100 { priv, err := RandKey() require.NoError(t, err) pub := priv.PublicKey() @@ -119,7 +119,7 @@ func TestMultipleSignatureVerification(t *testing.T) { pubkeys := make([]common.PublicKey, 0, 100) sigs := make([][]byte, 0, 100) var msgs [][32]byte - for i := 0; i < 100; i++ { + for i := range 100 { msg := [32]byte{'h', 'e', 'l', 'l', 'o', byte(i)} priv, err := RandKey() require.NoError(t, err) @@ -146,7 +146,7 @@ func TestEth2FastAggregateVerify(t *testing.T) { pubkeys := make([]common.PublicKey, 0, 100) sigs := make([]common.Signature, 0, 100) msg := [32]byte{'h', 'e', 'l', 'l', 'o'} - for i := 0; i < 100; i++ { + for range 100 { priv, err := RandKey() require.NoError(t, err) pub := priv.PublicKey() diff --git a/crypto/bls/signature_batch.go b/crypto/bls/signature_batch.go index 607616e595..c13c8652b0 100644 --- a/crypto/bls/signature_batch.go +++ b/crypto/bls/signature_batch.go @@ -3,6 +3,7 @@ package bls import ( "encoding/hex" "fmt" + "strings" "github.com/pkg/errors" ) @@ -54,7 +55,8 @@ func (s *SignatureBatch) VerifyVerbosely() (bool, error) { // if signature batch is invalid, we then verify signatures one by one. - errmsg := "some signatures are invalid. details:" + var errmsg strings.Builder + errmsg.WriteString("some signatures are invalid. details:") for i := 0; i < len(s.Signatures); i++ { sig := s.Signatures[i] msg := s.Messages[i] @@ -64,20 +66,20 @@ func (s *SignatureBatch) VerifyVerbosely() (bool, error) { if !valid { desc := s.Descriptions[i] if err != nil { - errmsg += fmt.Sprintf("\nsignature '%s' is invalid."+ + errmsg.WriteString(fmt.Sprintf("\nsignature '%s' is invalid."+ " signature: 0x%s, public key: 0x%s, message: 0x%v, error: %v", desc, hex.EncodeToString(sig), hex.EncodeToString(pubKey.Marshal()), - hex.EncodeToString(msg[:]), err) + hex.EncodeToString(msg[:]), err)) } else { - errmsg += fmt.Sprintf("\nsignature '%s' is invalid."+ + errmsg.WriteString(fmt.Sprintf("\nsignature '%s' is invalid."+ " signature: 0x%s, public key: 0x%s, message: 0x%v", desc, hex.EncodeToString(sig), hex.EncodeToString(pubKey.Marshal()), - hex.EncodeToString(msg[:])) + hex.EncodeToString(msg[:]))) } } } - return false, errors.New(errmsg) + return false, errors.New(errmsg.String()) } // Copy the attached signature batch and return it diff --git a/crypto/bls/signature_batch_test.go b/crypto/bls/signature_batch_test.go index a33c0b4a00..2a235c41ff 100644 --- a/crypto/bls/signature_batch_test.go +++ b/crypto/bls/signature_batch_test.go @@ -90,7 +90,7 @@ func TestVerifyVerbosely_VerificationThrowsError(t *testing.T) { func TestSignatureBatch_RemoveDuplicates(t *testing.T) { var keys []SecretKey - for i := 0; i < 100; i++ { + for range 100 { key, err := RandKey() assert.NoError(t, err) keys = append(keys, key) @@ -379,7 +379,7 @@ func TestSignatureBatch_RemoveDuplicates(t *testing.T) { func TestSignatureBatch_AggregateBatch(t *testing.T) { var keys []SecretKey - for i := 0; i < 100; i++ { + for range 100 { key, err := RandKey() assert.NoError(t, err) keys = append(keys, key) @@ -654,7 +654,7 @@ func NewValidSignatureSet(t *testing.T, msgBody string, num int) *SignatureBatch Descriptions: make([]string, num), } - for i := 0; i < num; i++ { + for i := range num { priv, err := RandKey() require.NoError(t, err) pubkey := priv.PublicKey() @@ -679,7 +679,7 @@ func NewInvalidSignatureSet(t *testing.T, msgBody string, num int, throwErr bool Descriptions: make([]string, num), } - for i := 0; i < num; i++ { + for i := range num { priv, err := RandKey() require.NoError(t, err) pubkey := priv.PublicKey() diff --git a/crypto/hash/hash.go b/crypto/hash/hash.go index 972f006032..d0473c0d49 100644 --- a/crypto/hash/hash.go +++ b/crypto/hash/hash.go @@ -19,7 +19,7 @@ import ( // or has nil objects within lists. var ErrNilProto = errors.New("cannot hash a nil protobuf message") -var sha256Pool = sync.Pool{New: func() interface{} { +var sha256Pool = sync.Pool{New: func() any { return sha256.New() }} @@ -75,7 +75,7 @@ func CustomSHA256Hasher() func([]byte) [32]byte { } } -var keccak256Pool = sync.Pool{New: func() interface{} { +var keccak256Pool = sync.Pool{New: func() any { return sha3.NewLegacyKeccak256() }} diff --git a/crypto/hash/hash_test.go b/crypto/hash/hash_test.go index afd15a35e8..9d8ae01cf3 100644 --- a/crypto/hash/hash_test.go +++ b/crypto/hash/hash_test.go @@ -26,7 +26,7 @@ func TestHash(t *testing.T) { } func BenchmarkHash(b *testing.B) { - for i := 0; i < b.N; i++ { + for b.Loop() { hash.Hash([]byte("abc")) } } @@ -50,7 +50,7 @@ func TestHashKeccak256(t *testing.T) { } func BenchmarkHashKeccak256(b *testing.B) { - for i := 0; i < b.N; i++ { + for b.Loop() { hash.Keccak256([]byte("abc")) } } @@ -77,7 +77,7 @@ func TestHashProtoFuzz(t *testing.T) { } }(t) - for i := 0; i < 1000; i++ { + for range 1000 { msg := &pb.AddressBook{} f.Fuzz(msg) _, err := hash.Proto(msg) @@ -98,7 +98,7 @@ func BenchmarkHashProto(b *testing.B) { Signature: bls.NewAggregateSignature().Marshal(), } - for i := 0; i < b.N; i++ { + for b.Loop() { if _, err := hash.Proto(att); err != nil { b.Log(err) } diff --git a/crypto/hash/htr/hashtree.go b/crypto/hash/htr/hashtree.go index 6a052226e1..5ff9d33b53 100644 --- a/crypto/hash/htr/hashtree.go +++ b/crypto/hash/htr/hashtree.go @@ -35,7 +35,7 @@ func VectorizedSha256(inputList [][32]byte) [][32]byte { wg := sync.WaitGroup{} wg.Add(n) groupSize := len(inputList) / (2 * (n + 1)) - for j := 0; j < n; j++ { + for j := range n { go hashParallel(inputList[j*2*groupSize:(j+1)*2*groupSize], outputList[j*groupSize:], &wg) } err := gohashtree.Hash(outputList[n*groupSize:], inputList[n*2*groupSize:]) diff --git a/crypto/hash/htr/hashtree_test.go b/crypto/hash/htr/hashtree_test.go index 57f1e12ea2..d57f355597 100644 --- a/crypto/hash/htr/hashtree_test.go +++ b/crypto/hash/htr/hashtree_test.go @@ -12,12 +12,10 @@ func Test_VectorizedSha256(t *testing.T) { secondLargeSlice := make([][32]byte, 32*minSliceSizeToParallelize) hash1 := make([][32]byte, 16*minSliceSizeToParallelize) wg := sync.WaitGroup{} - wg.Add(1) - go func() { - defer wg.Done() + wg.Go(func() { tempHash := VectorizedSha256(largeSlice) copy(hash1, tempHash) - }() + }) wg.Wait() hash2 := VectorizedSha256(secondLargeSlice) require.Equal(t, len(hash1), len(hash2)) diff --git a/crypto/keystore/key.go b/crypto/keystore/key.go index e4d538ef84..3dae549b58 100644 --- a/crypto/keystore/key.go +++ b/crypto/keystore/key.go @@ -75,12 +75,12 @@ type encryptedKeyJSON struct { } type cryptoJSON struct { - Cipher string `json:"cipher"` - CipherText string `json:"ciphertext"` - CipherParams cipherparamsJSON `json:"cipherparams"` - KDF string `json:"kdf"` - KDFParams map[string]interface{} `json:"kdfparams"` - MAC string `json:"mac"` + Cipher string `json:"cipher"` + CipherText string `json:"ciphertext"` + CipherParams cipherparamsJSON `json:"cipherparams"` + KDF string `json:"kdf"` + KDFParams map[string]any `json:"kdfparams"` + MAC string `json:"mac"` } type cipherparamsJSON struct { diff --git a/crypto/keystore/keystore.go b/crypto/keystore/keystore.go index e4fbf98ad1..9fb01d9efb 100644 --- a/crypto/keystore/keystore.go +++ b/crypto/keystore/keystore.go @@ -153,7 +153,7 @@ func EncryptKey(key *Key, password string, scryptN, scryptP int) ([]byte, error) mac := Keccak256(derivedKey[16:32], cipherText) - scryptParamsJSON := make(map[string]interface{}, 5) + scryptParamsJSON := make(map[string]any, 5) scryptParamsJSON["n"] = scryptN scryptParamsJSON["r"] = scryptR scryptParamsJSON["p"] = scryptP diff --git a/crypto/keystore/utils.go b/crypto/keystore/utils.go index 6b518438b1..05e9021a68 100644 --- a/crypto/keystore/utils.go +++ b/crypto/keystore/utils.go @@ -41,7 +41,7 @@ func aesCTRXOR(key, inText, iv []byte) ([]byte, error) { return outText, err } -func ensureInt(x interface{}) int { +func ensureInt(x any) int { res, ok := x.(int) if !ok { res = int(x.(float64)) diff --git a/crypto/random/random_test.go b/crypto/random/random_test.go index 3d61f3e241..676b52480c 100644 --- a/crypto/random/random_test.go +++ b/crypto/random/random_test.go @@ -48,7 +48,7 @@ func TestGetRandBlobElements(t *testing.T) { blob := GetRandBlob(seed) // Check that each field element in the blob matches what we'd get from GetRandFieldElement - for i := 0; i < GoKZG.ScalarsPerBlob; i++ { + for i := range GoKZG.ScalarsPerBlob { start := i * GoKZG.SerializedScalarSize end := start + GoKZG.SerializedScalarSize diff --git a/deps.bzl b/deps.bzl index 8082c17f83..1b41264eb3 100644 --- a/deps.bzl +++ b/deps.bzl @@ -4784,8 +4784,8 @@ def prysm_deps(): go_repository( name = "org_golang_x_crypto", importpath = "golang.org/x/crypto", - sum = "h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=", - version = "v0.36.0", + sum = "h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=", + version = "v0.43.0", ) go_repository( name = "org_golang_x_exp", @@ -4820,14 +4820,14 @@ def prysm_deps(): go_repository( name = "org_golang_x_mod", importpath = "golang.org/x/mod", - sum = "h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM=", - version = "v0.23.0", + sum = "h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=", + version = "v0.29.0", ) go_repository( name = "org_golang_x_net", importpath = "golang.org/x/net", - sum = "h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=", - version = "v0.38.0", + sum = "h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=", + version = "v0.46.0", ) go_repository( name = "org_golang_x_oauth2", @@ -4844,32 +4844,32 @@ def prysm_deps(): go_repository( name = "org_golang_x_sync", importpath = "golang.org/x/sync", - sum = "h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=", - version = "v0.12.0", + sum = "h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=", + version = "v0.17.0", ) go_repository( name = "org_golang_x_sys", importpath = "golang.org/x/sys", - sum = "h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=", - version = "v0.31.0", + sum = "h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=", + version = "v0.37.0", ) go_repository( name = "org_golang_x_telemetry", importpath = "golang.org/x/telemetry", - sum = "h1:zf5N6UOrA487eEFacMePxjXAJctxKmyjKUsjA11Uzuk=", - version = "v0.0.0-20240521205824-bda55230c457", + sum = "h1:LvzTn0GQhWuvKH/kVRS3R3bVAsdQWI7hvfLHGgh9+lU=", + version = "v0.0.0-20251008203120-078029d740a8", ) go_repository( name = "org_golang_x_term", importpath = "golang.org/x/term", - sum = "h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y=", - version = "v0.30.0", + sum = "h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q=", + version = "v0.36.0", ) go_repository( name = "org_golang_x_text", importpath = "golang.org/x/text", - sum = "h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=", - version = "v0.23.0", + sum = "h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=", + version = "v0.30.0", ) go_repository( name = "org_golang_x_time", @@ -4880,8 +4880,14 @@ def prysm_deps(): go_repository( name = "org_golang_x_tools", importpath = "golang.org/x/tools", - sum = "h1:BgcpHewrV5AUp2G9MebG4XPFI1E2W41zU1SaqVA9vJY=", - version = "v0.30.0", + sum = "h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=", + version = "v0.38.0", + ) + go_repository( + name = "org_golang_x_tools_go_expect", + importpath = "golang.org/x/tools/go/expect", + sum = "h1:jpBZDwmgPhXsKZC6WhL20P4b/wmnpsEAGHaNy0n/rJM=", + version = "v0.1.1-deprecated", ) go_repository( name = "org_golang_x_xerrors", diff --git a/encoding/bytesutil/bytes_go120.go b/encoding/bytesutil/bytes_go120.go index e35dfa1791..7bc7acdbad 100644 --- a/encoding/bytesutil/bytes_go120.go +++ b/encoding/bytesutil/bytes_go120.go @@ -1,5 +1,4 @@ //go:build go1.20 -// +build go1.20 package bytesutil diff --git a/encoding/bytesutil/bytes_legacy.go b/encoding/bytesutil/bytes_legacy.go index 7ede8a934f..53b3f16916 100644 --- a/encoding/bytesutil/bytes_legacy.go +++ b/encoding/bytesutil/bytes_legacy.go @@ -1,5 +1,4 @@ //go:build !go1.20 -// +build !go1.20 package bytesutil diff --git a/encoding/bytesutil/bytes_test.go b/encoding/bytesutil/bytes_test.go index e086831dce..81956d5262 100644 --- a/encoding/bytesutil/bytes_test.go +++ b/encoding/bytesutil/bytes_test.go @@ -235,7 +235,7 @@ func BenchmarkUnsafeCastToString(b *testing.B) { var nilData []byte b.Run("string(b)", func(b *testing.B) { - for i := 0; i < b.N; i++ { + for b.Loop() { _ = string(data) _ = string(empty) _ = string(nilData) @@ -243,7 +243,7 @@ func BenchmarkUnsafeCastToString(b *testing.B) { }) b.Run("bytesutil.UnsafeCastToString(b)", func(b *testing.B) { - for i := 0; i < b.N; i++ { + for b.Loop() { _ = bytesutil.UnsafeCastToString(data) _ = bytesutil.UnsafeCastToString(empty) _ = bytesutil.UnsafeCastToString(nilData) @@ -263,7 +263,7 @@ func FuzzUnsafeCastToString(f *testing.F) { func BenchmarkToBytes32(b *testing.B) { x := []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31} - for i := 0; i < b.N; i++ { + for b.Loop() { bytesutil.ToBytes32(x) } } @@ -318,7 +318,7 @@ func TestSafeCopyBytes_Copy(t *testing.T) { func BenchmarkSafeCopyBytes(b *testing.B) { dSlice := make([][]byte, 900000) - for i := 0; i < 900000; i++ { + for i := range 900000 { slice := make([]byte, 32) slice[0] = 'A' dSlice[i] = slice diff --git a/encoding/bytesutil/integers.go b/encoding/bytesutil/integers.go index 1e91542b4c..4c62474a4d 100644 --- a/encoding/bytesutil/integers.go +++ b/encoding/bytesutil/integers.go @@ -15,10 +15,7 @@ func ToBytes(x uint64, length int) []byte { if length < 0 { length = 0 } - makeLength := length - if length < 8 { - makeLength = 8 - } + makeLength := max(length, 8) bytes := make([]byte, makeLength) binary.LittleEndian.PutUint64(bytes, x) return bytes[:length] diff --git a/encoding/bytesutil/integers_test.go b/encoding/bytesutil/integers_test.go index 968a390dd0..b9cc86994f 100644 --- a/encoding/bytesutil/integers_test.go +++ b/encoding/bytesutil/integers_test.go @@ -254,7 +254,7 @@ func TestUint32ToBytes4(t *testing.T) { } func TestUint64ToBytes_RoundTrip(t *testing.T) { - for i := uint64(0); i < 10000; i++ { + for i := range uint64(10000) { b := bytesutil.Uint64ToBytesBigEndian(i) if got := bytesutil.BytesToUint64BigEndian(b); got != i { t.Error("Round trip did not match original value") diff --git a/encoding/ssz/equality/deep_equal.go b/encoding/ssz/equality/deep_equal.go index d7e6714510..744cdb004b 100644 --- a/encoding/ssz/equality/deep_equal.go +++ b/encoding/ssz/equality/deep_equal.go @@ -296,7 +296,7 @@ func deepValueBaseTypeEqual(v1, v2 reflect.Value) bool { // // Credits go to the Go team as this is an extension of the official Go source code's // reflect.DeepEqual function to handle special SSZ edge cases. -func DeepEqual(x, y interface{}) bool { +func DeepEqual(x, y any) bool { if x == nil || y == nil { return x == y } @@ -312,7 +312,7 @@ func DeepEqual(x, y interface{}) bool { return deepValueEqual(v1, v2, make(map[visit]bool), 0) } -func IsProto(item interface{}) bool { +func IsProto(item any) bool { typ := reflect.TypeOf(item) kind := typ.Kind() if kind != reflect.Slice && kind != reflect.Array && kind != reflect.Map { diff --git a/encoding/ssz/equality/deep_equal_test.go b/encoding/ssz/equality/deep_equal_test.go index 31b7357954..cff6ccec35 100644 --- a/encoding/ssz/equality/deep_equal_test.go +++ b/encoding/ssz/equality/deep_equal_test.go @@ -89,7 +89,7 @@ func TestDeepEqualProto(t *testing.T) { func Test_IsProto(t *testing.T) { tests := []struct { name string - item interface{} + item any want bool }{ { diff --git a/encoding/ssz/helpers.go b/encoding/ssz/helpers.go index c5c9553037..73a218bae5 100644 --- a/encoding/ssz/helpers.go +++ b/encoding/ssz/helpers.go @@ -83,12 +83,10 @@ func PackByChunk(serializedItems [][]byte) ([][BytesPerChunk]byte, error) { numItems := len(orderedItems) var chunks [][BytesPerChunk]byte for i := 0; i < numItems; i += BytesPerChunk { - j := i + BytesPerChunk - // We create our upper bound index of the chunk, if it is greater than numItems, - // we set it as numItems itself. - if j > numItems { - j = numItems - } + j := min( + // We create our upper bound index of the chunk, if it is greater than numItems, + // we set it as numItems itself. + i+BytesPerChunk, numItems) // We create chunks from the list of items based on the // indices determined above. // Right-pad the last chunk with zero bytes if it does not diff --git a/encoding/ssz/htrutils_test.go b/encoding/ssz/htrutils_test.go index 0188fafca1..8529e03a81 100644 --- a/encoding/ssz/htrutils_test.go +++ b/encoding/ssz/htrutils_test.go @@ -124,7 +124,7 @@ func TestTransactionsRoot(t *testing.T) { name: "max txs", txs: func() [][]byte { var txs [][]byte - for i := 0; i < fieldparams.MaxTxsPerPayloadLength; i++ { + for range fieldparams.MaxTxsPerPayloadLength { txs = append(txs, []byte{}) } return txs @@ -135,7 +135,7 @@ func TestTransactionsRoot(t *testing.T) { name: "exceed max txs", txs: func() [][]byte { var txs [][]byte - for i := 0; i < fieldparams.MaxTxsPerPayloadLength+1; i++ { + for range fieldparams.MaxTxsPerPayloadLength + 1 { txs = append(txs, []byte{}) } return txs diff --git a/encoding/ssz/merkleize.go b/encoding/ssz/merkleize.go index 76feec0469..72845204ef 100644 --- a/encoding/ssz/merkleize.go +++ b/encoding/ssz/merkleize.go @@ -113,7 +113,7 @@ func Merkleize(hasher Hasher, count, limit uint64, leaf func(i uint64) []byte) ( } // merge in leaf by leaf. - for i := uint64(0); i < count; i++ { + for i := range count { copy(h, leaf(i)) merge(i) } @@ -141,7 +141,7 @@ func MerkleizeVector(elements [][32]byte, length uint64) [32]byte { if len(elements) == 0 { return trie.ZeroHashes[depth] } - for i := uint8(0); i < depth; i++ { + for i := range depth { layerLen := len(elements) oddNodeLength := layerLen%2 == 1 if oddNodeLength { diff --git a/encoding/ssz/slice_root.go b/encoding/ssz/slice_root.go index 85575f03d3..fe116fe6c8 100644 --- a/encoding/ssz/slice_root.go +++ b/encoding/ssz/slice_root.go @@ -16,7 +16,7 @@ func SliceRoot[T Hashable](slice []T, limit uint64) ([32]byte, error) { } roots := make([][32]byte, len(slice)) - for i := 0; i < len(slice); i++ { + for i := range slice { r, err := slice[i].HashTreeRoot() if err != nil { return [32]byte{}, errors.Wrap(err, "could not merkleize object") diff --git a/genesis/initialize_test.go b/genesis/initialize_test.go index af4fef1aba..db4d89cf4c 100644 --- a/genesis/initialize_test.go +++ b/genesis/initialize_test.go @@ -98,7 +98,7 @@ func createTestGenesisState(t *testing.T, numValidators uint64, slot primitives. } // Initialize validators and balances - for i := uint64(0); i < numValidators; i++ { + for i := range numValidators { pb.Validators[i] = ðpb.Validator{ PublicKey: deposits[i].Data.PublicKey, WithdrawalCredentials: deposits[i].Data.WithdrawalCredentials, diff --git a/genesis/internal/embedded/mainnet.go b/genesis/internal/embedded/mainnet.go index ca855c7313..730402a62f 100644 --- a/genesis/internal/embedded/mainnet.go +++ b/genesis/internal/embedded/mainnet.go @@ -1,5 +1,4 @@ //go:build !noMainnetGenesis -// +build !noMainnetGenesis package embedded diff --git a/go.mod b/go.mod index d829269912..d0abcd3c1d 100644 --- a/go.mod +++ b/go.mod @@ -88,10 +88,10 @@ require ( go.opentelemetry.io/otel/trace v1.35.0 go.uber.org/automaxprocs v1.5.2 go.uber.org/mock v0.5.2 - golang.org/x/crypto v0.36.0 + golang.org/x/crypto v0.43.0 golang.org/x/exp v0.0.0-20250128182459-e0ece0dbea4c - golang.org/x/sync v0.12.0 - golang.org/x/tools v0.30.0 + golang.org/x/sync v0.17.0 + golang.org/x/tools v0.38.0 google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 google.golang.org/grpc v1.71.0 google.golang.org/protobuf v1.36.5 @@ -263,12 +263,14 @@ require ( go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678 // indirect - golang.org/x/mod v0.23.0 // indirect - golang.org/x/net v0.38.0 // indirect + golang.org/x/mod v0.29.0 // indirect + golang.org/x/net v0.46.0 // indirect golang.org/x/oauth2 v0.25.0 // indirect - golang.org/x/term v0.30.0 // indirect - golang.org/x/text v0.23.0 // indirect + golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 // indirect + golang.org/x/term v0.36.0 // indirect + golang.org/x/text v0.30.0 // indirect golang.org/x/time v0.9.0 // indirect + golang.org/x/tools/go/expect v0.1.1-deprecated // indirect gopkg.in/cenkalti/backoff.v1 v1.1.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect @@ -288,7 +290,7 @@ require ( github.com/go-playground/validator/v10 v10.13.0 github.com/peterh/liner v1.2.0 // indirect github.com/prysmaticlabs/gohashtree v0.0.5-beta - golang.org/x/sys v0.31.0 // indirect + golang.org/x/sys v0.37.0 // indirect k8s.io/klog/v2 v2.120.1 // indirect k8s.io/utils v0.0.0-20230726121419-3b25d923346b // indirect ) diff --git a/go.sum b/go.sum index 46d405baa0..b413fb263a 100644 --- a/go.sum +++ b/go.sum @@ -1173,8 +1173,8 @@ golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0 golang.org/x/crypto v0.8.0/go.mod h1:mRqEX+O9/h5TFCrQhkgjo2yKi0yYA+9ecGkdQoHrywE= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= -golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= -golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= +golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1217,8 +1217,8 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM= -golang.org/x/mod v0.23.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= +golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1274,8 +1274,8 @@ golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= -golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= -golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= +golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= golang.org/x/oauth2 v0.0.0-20170912212905-13449ad91cb2/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1305,8 +1305,8 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= -golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= +golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180810173357-98c5dad5d1a0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1395,8 +1395,10 @@ golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= -golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= +golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 h1:LvzTn0GQhWuvKH/kVRS3R3bVAsdQWI7hvfLHGgh9+lU= +golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8/go.mod h1:Pi4ztBfryZoJEkyFTI5/Ocsu2jXyDr6iSdgJiYE/uwE= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -1405,8 +1407,8 @@ golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= -golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y= -golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g= +golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= +golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1421,8 +1423,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= -golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= +golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= +golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= golang.org/x/time v0.0.0-20170424234030-8be79e1e0910/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -1495,8 +1497,10 @@ golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.30.0 h1:BgcpHewrV5AUp2G9MebG4XPFI1E2W41zU1SaqVA9vJY= -golang.org/x/tools v0.30.0/go.mod h1:c347cR/OJfw5TI+GfX7RUPNMdDRRbjvYTS0jPyvsVtY= +golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= +golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= +golang.org/x/tools/go/expect v0.1.1-deprecated h1:jpBZDwmgPhXsKZC6WhL20P4b/wmnpsEAGHaNy0n/rJM= +golang.org/x/tools/go/expect v0.1.1-deprecated/go.mod h1:eihoPOH+FgIqa3FpoTwguz/bVUSGBlGQU67vpBeOrBY= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/io/file/fileutil.go b/io/file/fileutil.go index 004d162f33..9996e9529c 100644 --- a/io/file/fileutil.go +++ b/io/file/fileutil.go @@ -9,6 +9,7 @@ import ( "os/user" "path" "path/filepath" + "slices" "sort" "strings" @@ -326,7 +327,7 @@ func HashDir(dir string) (string, error) { } h := sha256.New() - files = append([]string(nil), files...) + files = slices.Clone(files) sort.Strings(files) for _, file := range files { hf, err := HashFile(filepath.Join(dir, file)) diff --git a/math/math_helper_test.go b/math/math_helper_test.go index f90c740125..5aff499652 100644 --- a/math/math_helper_test.go +++ b/math/math_helper_test.go @@ -156,21 +156,21 @@ func TestMath_Mod(t *testing.T) { func BenchmarkIntegerSquareRootBelow52Bits(b *testing.B) { val := uint64(1 << 33) - for i := 0; i < b.N; i++ { + for b.Loop() { require.Equal(b, uint64(92681), math.IntegerSquareRoot(val)) } } func BenchmarkIntegerSquareRootAbove52Bits(b *testing.B) { val := uint64(1 << 62) - for i := 0; i < b.N; i++ { + for b.Loop() { require.Equal(b, uint64(1<<31), math.IntegerSquareRoot(val)) } } func BenchmarkSquareRootEffectiveBalance(b *testing.B) { val := uint64(1 << 62) - for i := 0; i < b.N; i++ { + for b.Loop() { require.Equal(b, uint64(1<<31), math.CachedSquareRoot(val)) } } @@ -178,7 +178,7 @@ func BenchmarkSquareRootEffectiveBalance(b *testing.B) { func BenchmarkSquareRootBabylonian(b *testing.B) { //Start with 700K validators' effective balance val := uint64(22400000000000000) - for i := 0; i < b.N; i++ { + for b.Loop() { sqr := math.CachedSquareRoot(val) require.Equal(b, true, sqr^2 <= val) require.Equal(b, true, (sqr+1)*(sqr+1) > val) @@ -189,7 +189,7 @@ func BenchmarkSquareRootBabylonian(b *testing.B) { func BenchmarkSquareRootOldWay(b *testing.B) { //Start with 700K validators' effective balance val := uint64(22400000000000000) - for i := 0; i < b.N; i++ { + for b.Loop() { sqr := math.IntegerSquareRoot(val) require.Equal(b, true, sqr^2 <= val) require.Equal(b, true, (sqr+1)*(sqr+1) > val) @@ -199,7 +199,7 @@ func BenchmarkSquareRootOldWay(b *testing.B) { func BenchmarkIntegerSquareRoot_WithDatatable(b *testing.B) { val := uint64(1024) - for i := 0; i < b.N; i++ { + for b.Loop() { require.Equal(b, uint64(32), math.IntegerSquareRoot(val)) } } diff --git a/monitoring/journald/journalhook_linux.go b/monitoring/journald/journalhook_linux.go index 091c645e43..7f136687d6 100644 --- a/monitoring/journald/journalhook_linux.go +++ b/monitoring/journald/journalhook_linux.go @@ -49,7 +49,7 @@ func stringifyKey(key string) string { } // Journal wants strings but logrus takes anything. -func stringifyEntries(data map[string]interface{}) map[string]string { +func stringifyEntries(data map[string]any) map[string]string { entries := make(map[string]string) for k, v := range data { key := stringifyKey(k) diff --git a/monitoring/journald/journalhook_linux_test.go b/monitoring/journald/journalhook_linux_test.go index 4d2832f202..44ddf7476d 100644 --- a/monitoring/journald/journalhook_linux_test.go +++ b/monitoring/journald/journalhook_linux_test.go @@ -3,7 +3,7 @@ package journald import "testing" func TestStringifyEntries(t *testing.T) { - input := map[string]interface{}{ + input := map[string]any{ "foo": "bar", "baz": 123, "foo-foo": "x", diff --git a/monitoring/prometheus/content_negotiation.go b/monitoring/prometheus/content_negotiation.go index 0b49e6ab3d..74677f0cf7 100644 --- a/monitoring/prometheus/content_negotiation.go +++ b/monitoring/prometheus/content_negotiation.go @@ -20,7 +20,7 @@ type generatedResponse struct { Err string `json:"error"` // Data is response output, if any. - Data interface{} `json:"data"` + Data any `json:"data"` } // negotiateContentType parses "Accept:" header and returns preferred content type string. diff --git a/monitoring/prometheus/logrus_collector_test.go b/monitoring/prometheus/logrus_collector_test.go index 5e4e57852a..fa41e56632 100644 --- a/monitoring/prometheus/logrus_collector_test.go +++ b/monitoring/prometheus/logrus_collector_test.go @@ -17,9 +17,9 @@ import ( ) type logger interface { - Info(args ...interface{}) - Warn(args ...interface{}) - Error(args ...interface{}) + Info(args ...any) + Warn(args ...any) + Error(args ...any) } func TestLogrusCollector(t *testing.T) { diff --git a/monitoring/tracing/recovery_interceptor_option.go b/monitoring/tracing/recovery_interceptor_option.go index 756440a63a..e7d8c05bc5 100644 --- a/monitoring/tracing/recovery_interceptor_option.go +++ b/monitoring/tracing/recovery_interceptor_option.go @@ -13,7 +13,7 @@ import ( // RecoveryHandlerFunc is a function that recovers from the panic `p` by returning an `error`. // The context can be used to extract request scoped metadata and context values. -func RecoveryHandlerFunc(ctx context.Context, p interface{}) error { +func RecoveryHandlerFunc(ctx context.Context, p any) error { span := trace.FromContext(ctx) if span != nil { span.SetAttributes(trace.StringAttribute("stack", string(debug.Stack()))) diff --git a/network/auth_test.go b/network/auth_test.go index 2b7c6327ab..00a3c60b68 100644 --- a/network/auth_test.go +++ b/network/auth_test.go @@ -28,7 +28,7 @@ func TestJWTAuthTransport(t *testing.T) { // The format should be `Bearer ${token}`. require.Equal(t, 2, len(splitToken)) reqToken = strings.TrimSpace(splitToken[1]) - token, err := jwt.Parse(reqToken, func(token *jwt.Token) (interface{}, error) { + token, err := jwt.Parse(reqToken, func(token *jwt.Token) (any, error) { // We should be doing HMAC signing. _, ok := token.Method.(*jwt.SigningMethodHMAC) require.Equal(t, true, ok) @@ -70,7 +70,7 @@ func TestJWTWithId(t *testing.T) { // The format should be `Bearer ${token}`. require.Equal(t, 2, len(splitToken)) reqToken = strings.TrimSpace(splitToken[1]) - token, err := jwt.Parse(reqToken, func(token *jwt.Token) (interface{}, error) { + token, err := jwt.Parse(reqToken, func(token *jwt.Token) (any, error) { // We should be doing HMAC signing. _, ok := token.Method.(*jwt.SigningMethodHMAC) require.Equal(t, true, ok) @@ -114,7 +114,7 @@ func TestJWTWithoutId(t *testing.T) { // The format should be `Bearer ${token}`. require.Equal(t, 2, len(splitToken)) reqToken = strings.TrimSpace(splitToken[1]) - token, err := jwt.Parse(reqToken, func(token *jwt.Token) (interface{}, error) { + token, err := jwt.Parse(reqToken, func(token *jwt.Token) (any, error) { // We should be doing HMAC signing. _, ok := token.Method.(*jwt.SigningMethodHMAC) require.Equal(t, true, ok) diff --git a/nogo_config.json b/nogo_config.json index 01a3b69333..f0dbf3e83f 100644 --- a/nogo_config.json +++ b/nogo_config.json @@ -215,7 +215,92 @@ "external/com_github_ethereum_go_ethereum/.*": "Unsafe third party code", "rules_go_work-.*": "Third party code" } - }, + }, + "buildtag": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "any": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "appendclipped": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "fmtappendf": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "forvar": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "mapsloop": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "minmax": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "newexpr": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "omitzero": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "rangeint": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "reflecttypefor": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "slicescontains": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "slicessort": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "stringsbuilder": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "stringscutprefix": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "stringsseq": { + "exclude_files": { + "external/.*": "Third party code" + } + }, + "waitgroup": { + "exclude_files": { + "external/.*": "Third party code" + } + }, "nopanic": { "exclude_files": { "validator/web/site_data.go": "generated code", diff --git a/proto/engine/v1/execution_engine.go b/proto/engine/v1/execution_engine.go index 00b542b863..d10e249a99 100644 --- a/proto/engine/v1/execution_engine.go +++ b/proto/engine/v1/execution_engine.go @@ -9,7 +9,7 @@ type copier[T any] interface { func copySlice[T any, C copier[T]](original []C) []T { // Create a new slice with the same length as the original newSlice := make([]T, len(original)) - for i := 0; i < len(newSlice); i++ { + for i := range newSlice { newSlice[i] = original[i].Copy() } return newSlice diff --git a/proto/engine/v1/execution_engine_fuzz_test.go b/proto/engine/v1/execution_engine_fuzz_test.go index 43f9bc2f99..b4d0fdf1ec 100644 --- a/proto/engine/v1/execution_engine_fuzz_test.go +++ b/proto/engine/v1/execution_engine_fuzz_test.go @@ -25,7 +25,7 @@ func fuzzCopies[T any, C enginev1.Copier[T]](t *testing.T, obj C) { fuzzer := fuzz.NewWithSeed(0) amount := 1000 t.Run(fmt.Sprintf("%T", obj), func(t *testing.T) { - for i := 0; i < amount; i++ { + for range amount { fuzzer.Fuzz(obj) // Populate thing with random values got := obj.Copy() require.DeepEqual(t, obj, got) diff --git a/proto/engine/v1/json_marshal_unmarshal.go b/proto/engine/v1/json_marshal_unmarshal.go index d89c9988ec..34a7db3700 100644 --- a/proto/engine/v1/json_marshal_unmarshal.go +++ b/proto/engine/v1/json_marshal_unmarshal.go @@ -80,7 +80,7 @@ type ExecutionBlock struct { } func (e *ExecutionBlock) MarshalJSON() ([]byte, error) { - decoded := make(map[string]interface{}) + decoded := make(map[string]any) encodedHeader, err := e.Header.MarshalJSON() if err != nil { return nil, err @@ -110,7 +110,7 @@ func (e *ExecutionBlock) UnmarshalJSON(enc []byte) error { if err := e.Header.UnmarshalJSON(enc); err != nil { return err } - decoded := make(map[string]interface{}) + decoded := make(map[string]any) if err := json.Unmarshal(enc, &decoded); err != nil { return err } @@ -162,7 +162,7 @@ func (e *ExecutionBlock) UnmarshalJSON(enc []byte) error { // Exit early if there are no transactions stored in the json payload. return nil } - txsList, ok := rawTxList.([]interface{}) + txsList, ok := rawTxList.([]any) if !ok { return errors.Errorf("expected transaction list to be of a slice interface type.") } @@ -186,7 +186,7 @@ func (e *ExecutionBlock) UnmarshalJSON(enc []byte) error { // UnmarshalJSON -- func (b *PayloadIDBytes) UnmarshalJSON(enc []byte) error { var res [8]byte - if err := hexutil.UnmarshalFixedJSON(reflect.TypeOf(b), enc, res[:]); err != nil { + if err := hexutil.UnmarshalFixedJSON(reflect.TypeFor[*PayloadIDBytes](), enc, res[:]); err != nil { return err } *b = res diff --git a/proto/engine/v1/json_marshal_unmarshal_test.go b/proto/engine/v1/json_marshal_unmarshal_test.go index a5bee9e040..e6c1c7df71 100644 --- a/proto/engine/v1/json_marshal_unmarshal_test.go +++ b/proto/engine/v1/json_marshal_unmarshal_test.go @@ -296,7 +296,7 @@ func TestJsonMarshalUnmarshal(t *testing.T) { enc, err := json.Marshal(want) require.NoError(t, err) - payloadItems := make(map[string]interface{}) + payloadItems := make(map[string]any) require.NoError(t, json.Unmarshal(enc, &payloadItems)) blockHash := want.Hash() @@ -351,7 +351,7 @@ func TestJsonMarshalUnmarshal(t *testing.T) { enc, err := json.Marshal(want) require.NoError(t, err) - payloadItems := make(map[string]interface{}) + payloadItems := make(map[string]any) require.NoError(t, json.Unmarshal(enc, &payloadItems)) blockHash := want.Hash() @@ -410,7 +410,7 @@ func TestJsonMarshalUnmarshal(t *testing.T) { enc, err := json.Marshal(want) require.NoError(t, err) - payloadItems := make(map[string]interface{}) + payloadItems := make(map[string]any) require.NoError(t, json.Unmarshal(enc, &payloadItems)) tx := gethtypes.NewTransaction( @@ -478,7 +478,7 @@ func TestJsonMarshalUnmarshal(t *testing.T) { enc, err := json.Marshal(want) require.NoError(t, err) - payloadItems := make(map[string]interface{}) + payloadItems := make(map[string]any) require.NoError(t, json.Unmarshal(enc, &payloadItems)) blockHash := want.Hash() @@ -569,7 +569,7 @@ func TestJsonMarshalUnmarshal(t *testing.T) { enc, err := json.Marshal(want) require.NoError(t, err) - payloadItems := make(map[string]interface{}) + payloadItems := make(map[string]any) require.NoError(t, json.Unmarshal(enc, &payloadItems)) blockHash := want.Hash() diff --git a/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations.go b/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations.go index 78889dbb13..0c0aa043ae 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/attestations/attestations.go @@ -60,7 +60,7 @@ func AggregateDisjointOneBitAtts(atts []ethpb.Att) (ethpb.Att, error) { } } keys := make([]int, len(atts)) - for i := 0; i < len(atts); i++ { + for i := range atts { keys[i] = i } idx, err := aggregateAttestations(atts, keys, coverage) diff --git a/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover.go b/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover.go index 4b8c078cbf..ad454a5f2a 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/attestations/maxcover.go @@ -27,7 +27,7 @@ func MaxCoverAttestationAggregation(atts []ethpb.Att) ([]ethpb.Att, error) { // In the future this conversion will be redundant, as attestation bitlist will be of a Bitlist64 // type, so incoming `atts` parameters can be used as candidates list directly. candidates := make([]*bitfield.Bitlist64, len(atts)) - for i := 0; i < len(atts); i++ { + for i := range atts { var err error candidates[i], err = atts[i].GetAggregationBits().ToBitlist64() if err != nil { @@ -114,7 +114,7 @@ func MaxCoverAttestationAggregation(atts []ethpb.Att) ([]ethpb.Att, error) { // NewMaxCover returns initialized Maximum Coverage problem for attestations aggregation. func NewMaxCover(atts []*ethpb.Attestation) *aggregation.MaxCoverProblem { candidates := make([]*aggregation.MaxCoverCandidate, len(atts)) - for i := 0; i < len(atts); i++ { + for i := range atts { candidates[i] = aggregation.NewMaxCoverCandidate(i, &atts[i].AggregationBits) } return &aggregation.MaxCoverProblem{Candidates: candidates} @@ -126,7 +126,7 @@ func (al attList) aggregate(coverage bitfield.Bitlist) (*ethpb.Attestation, erro return nil, errors.Wrap(ErrInvalidAttestationCount, "cannot aggregate") } signs := make([]bls.Signature, len(al)) - for i := 0; i < len(al); i++ { + for i := range al { sig, err := signatureFromBytes(al[i].GetSignature()) if err != nil { return nil, err diff --git a/proto/prysm/v1alpha1/attestation/aggregation/maxcover_bench_test.go b/proto/prysm/v1alpha1/attestation/aggregation/maxcover_bench_test.go index 8d47200eb1..f65e572b53 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/maxcover_bench_test.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/maxcover_bench_test.go @@ -78,7 +78,7 @@ func BenchmarkMaxCoverProblem_MaxCover(b *testing.B) { } b.StartTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { candidates := make([]*aggregation.MaxCoverCandidate, len(bitlists)) for i := 0; i < len(bitlists); i++ { candidates[i] = aggregation.NewMaxCoverCandidate(i, &bitlists[i]) @@ -98,7 +98,7 @@ func BenchmarkMaxCoverProblem_MaxCover(b *testing.B) { } b.StartTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, _, err := aggregation.MaxCover(bitlists, len(bitlists), tt.allowOverlaps) _ = err } diff --git a/proto/prysm/v1alpha1/attestation/aggregation/testing/bitlistutils.go b/proto/prysm/v1alpha1/attestation/aggregation/testing/bitlistutils.go index 48c6392d91..7872fa4bb5 100644 --- a/proto/prysm/v1alpha1/attestation/aggregation/testing/bitlistutils.go +++ b/proto/prysm/v1alpha1/attestation/aggregation/testing/bitlistutils.go @@ -14,7 +14,7 @@ import ( // BitlistWithAllBitsSet creates list of bitlists with all bits set. func BitlistWithAllBitsSet(length uint64) bitfield.Bitlist { b := bitfield.NewBitlist(length) - for i := uint64(0); i < length; i++ { + for i := range length { b.SetBitAt(i, true) } return b @@ -23,7 +23,7 @@ func BitlistWithAllBitsSet(length uint64) bitfield.Bitlist { // BitlistsWithSingleBitSet creates list of bitlists with a single bit set in each. func BitlistsWithSingleBitSet(n, length uint64) []bitfield.Bitlist { lists := make([]bitfield.Bitlist, n) - for i := uint64(0); i < n; i++ { + for i := range n { b := bitfield.NewBitlist(length) b.SetBitAt(i%length, true) lists[i] = b @@ -34,7 +34,7 @@ func BitlistsWithSingleBitSet(n, length uint64) []bitfield.Bitlist { // Bitlists64WithSingleBitSet creates list of bitlists with a single bit set in each. func Bitlists64WithSingleBitSet(n, length uint64) []*bitfield.Bitlist64 { lists := make([]*bitfield.Bitlist64, n) - for i := uint64(0); i < n; i++ { + for i := range n { b := bitfield.NewBitlist64(length) b.SetBitAt(i%length, true) lists[i] = b @@ -48,7 +48,7 @@ func BitlistsWithMultipleBitSet(t testing.TB, n, length, count uint64) []bitfiel t.Logf("bitlistsWithMultipleBitSet random seed: %v", seed) r := rand.New(rand.NewSource(seed)) // #nosec G404 lists := make([]bitfield.Bitlist, n) - for i := uint64(0); i < n; i++ { + for i := range n { b := bitfield.NewBitlist(length) keys := r.Perm(int(length)) // lint:ignore uintcast -- This is safe in test code. for _, key := range keys[:count] { @@ -65,7 +65,7 @@ func Bitlists64WithMultipleBitSet(t testing.TB, n, length, count uint64) []*bitf t.Logf("Bitlists64WithMultipleBitSet random seed: %v", seed) r := rand.New(rand.NewSource(seed)) // #nosec G404 lists := make([]*bitfield.Bitlist64, n) - for i := uint64(0); i < n; i++ { + for i := range n { b := bitfield.NewBitlist64(length) keys := r.Perm(int(length)) // lint:ignore uintcast -- This is safe in test code. for _, key := range keys[:count] { diff --git a/proto/prysm/v1alpha1/attestation/attestation_utils.go b/proto/prysm/v1alpha1/attestation/attestation_utils.go index effec69855..086eede8e9 100644 --- a/proto/prysm/v1alpha1/attestation/attestation_utils.go +++ b/proto/prysm/v1alpha1/attestation/attestation_utils.go @@ -8,7 +8,6 @@ import ( "fmt" "runtime/debug" "slices" - "sort" "github.com/OffchainLabs/go-bitfield" "github.com/OffchainLabs/prysm/v7/beacon-chain/core/signing" @@ -45,9 +44,7 @@ func ConvertToIndexed(_ context.Context, attestation ethpb.Att, committees ...[] return nil, err } - sort.Slice(attIndices, func(i, j int) bool { - return attIndices[i] < attIndices[j] - }) + slices.Sort(attIndices) if attestation.Version() >= version.Electra { return ðpb.IndexedAttestationElectra{ diff --git a/proto/prysm/v1alpha1/attestation/attestation_utils_test.go b/proto/prysm/v1alpha1/attestation/attestation_utils_test.go index 565d9132df..a2ec72fe87 100644 --- a/proto/prysm/v1alpha1/attestation/attestation_utils_test.go +++ b/proto/prysm/v1alpha1/attestation/attestation_utils_test.go @@ -217,8 +217,7 @@ func BenchmarkAttestingIndices_PartialCommittee(b *testing.B) { bf := bitfield.Bitlist{0b11111111, 0b11111111, 0b10000111, 0b11111111, 0b100} committee := []primitives.ValidatorIndex{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33} - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := attestation.AttestingIndices(ð.Attestation{AggregationBits: bf}, committee) require.NoError(b, err) } @@ -226,7 +225,7 @@ func BenchmarkAttestingIndices_PartialCommittee(b *testing.B) { func BenchmarkIsValidAttestationIndices(b *testing.B) { indices := make([]uint64, params.BeaconConfig().MaxValidatorsPerCommittee) - for i := 0; i < len(indices); i++ { + for i := range indices { indices[i] = uint64(i) } att := ð.IndexedAttestation{ @@ -237,8 +236,8 @@ func BenchmarkIsValidAttestationIndices(b *testing.B) { }, Signature: make([]byte, fieldparams.BLSSignatureLength), } - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { if err := attestation.IsValidAttestationIndices(b.Context(), att, params.BeaconConfig().MaxValidatorsPerCommittee, params.BeaconConfig().MaxCommitteesPerSlot); err != nil { require.NoError(b, err) } @@ -460,14 +459,14 @@ func BenchmarkAttDataIsEqual(b *testing.B) { b.Run("fast", func(b *testing.B) { b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { assert.Equal(b, true, attestation.AttDataIsEqual(attData1, attData2)) } }) b.Run("proto.Equal", func(b *testing.B) { b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { assert.Equal(b, true, attestation.AttDataIsEqual(attData1, attData2)) } }) diff --git a/proto/prysm/v1alpha1/cloners.go b/proto/prysm/v1alpha1/cloners.go index 2d76721dd0..436196cc2f 100644 --- a/proto/prysm/v1alpha1/cloners.go +++ b/proto/prysm/v1alpha1/cloners.go @@ -11,7 +11,7 @@ type copier[T any] interface { func CopySlice[T any, C copier[T]](original []C) []T { // Create a new slice with the same length as the original newSlice := make([]T, len(original)) - for i := 0; i < len(newSlice); i++ { + for i := range newSlice { newSlice[i] = original[i].Copy() } return newSlice diff --git a/proto/prysm/v1alpha1/cloners_test.go b/proto/prysm/v1alpha1/cloners_test.go index 1697134968..a6e9975bc9 100644 --- a/proto/prysm/v1alpha1/cloners_test.go +++ b/proto/prysm/v1alpha1/cloners_test.go @@ -312,7 +312,7 @@ func TestCopyBlindedBeaconBlockBodyDeneb(t *testing.T) { func bytes(length int) []byte { b := make([]byte, length) - for i := 0; i < length; i++ { + for i := range length { b[i] = uint8(rand.Int31n(255) + 1) } return b @@ -382,7 +382,7 @@ func genAttestation() *v1alpha1.Attestation { func genAttestations(num int) []*v1alpha1.Attestation { atts := make([]*v1alpha1.Attestation, num) - for i := 0; i < num; i++ { + for i := range num { atts[i] = genAttestation() } return atts @@ -461,7 +461,7 @@ func genProposerSlashing() *v1alpha1.ProposerSlashing { func genProposerSlashings(num int) []*v1alpha1.ProposerSlashing { ps := make([]*v1alpha1.ProposerSlashing, num) - for i := 0; i < num; i++ { + for i := range num { ps[i] = genProposerSlashing() } return ps @@ -484,7 +484,7 @@ func genIndexedAttestation() *v1alpha1.IndexedAttestation { func genAttesterSlashings(num int) []*v1alpha1.AttesterSlashing { as := make([]*v1alpha1.AttesterSlashing, num) - for i := 0; i < num; i++ { + for i := range num { as[i] = genAttesterSlashing() } return as @@ -525,7 +525,7 @@ func genDeposit() *v1alpha1.Deposit { func genDeposits(num int) []*v1alpha1.Deposit { d := make([]*v1alpha1.Deposit, num) - for i := 0; i < num; i++ { + for i := range num { d[i] = genDeposit() } return d @@ -547,7 +547,7 @@ func genSignedVoluntaryExit() *v1alpha1.SignedVoluntaryExit { func genSignedVoluntaryExits(num int) []*v1alpha1.SignedVoluntaryExit { sv := make([]*v1alpha1.SignedVoluntaryExit, num) - for i := 0; i < num; i++ { + for i := range num { sv[i] = genSignedVoluntaryExit() } return sv @@ -765,7 +765,7 @@ func genBlindedBeaconBlockBodyDeneb() *v1alpha1.BlindedBeaconBlockBodyDeneb { func getKZGCommitments(n int) [][]byte { kzgs := make([][]byte, n) - for i := 0; i < n; i++ { + for i := range n { kzgs[i] = bytes(48) } return kzgs @@ -941,7 +941,7 @@ func genWithdrawal() *enginev1.Withdrawal { func genBLSToExecutionChanges(num int) []*v1alpha1.SignedBLSToExecutionChange { changes := make([]*v1alpha1.SignedBLSToExecutionChange, num) - for i := 0; i < num; i++ { + for i := range num { changes[i] = genBLSToExecutionChange() } return changes @@ -969,7 +969,7 @@ func genAttestationElectra() *v1alpha1.AttestationElectra { func genAttesterSlashingsElectra(num int) []*v1alpha1.AttesterSlashingElectra { as := make([]*v1alpha1.AttesterSlashingElectra, num) - for i := 0; i < num; i++ { + for i := range num { as[i] = genAttesterSlashingElectra() } return as @@ -992,7 +992,7 @@ func genIndexedAttestationElectra() *v1alpha1.IndexedAttestationElectra { func genAttestationsElectra(num int) []*v1alpha1.AttestationElectra { atts := make([]*v1alpha1.AttestationElectra, num) - for i := 0; i < num; i++ { + for i := range num { atts[i] = genAttestationElectra() } return atts @@ -1078,7 +1078,7 @@ func genExecutionRequests() *enginev1.ExecutionRequests { func genDepositRequests(num int) []*enginev1.DepositRequest { drs := make([]*enginev1.DepositRequest, num) - for i := 0; i < num; i++ { + for i := range num { drs[i] = genDepositRequest() } return drs @@ -1096,7 +1096,7 @@ func genDepositRequest() *enginev1.DepositRequest { func genWithdrawalRequests(num int) []*enginev1.WithdrawalRequest { wrs := make([]*enginev1.WithdrawalRequest, num) - for i := 0; i < num; i++ { + for i := range num { wrs[i] = genWithdrawalRequest() } return wrs @@ -1112,7 +1112,7 @@ func genWithdrawalRequest() *enginev1.WithdrawalRequest { func genConsolidationRequests(num int) []*enginev1.ConsolidationRequest { crs := make([]*enginev1.ConsolidationRequest, num) - for i := 0; i < num; i++ { + for i := range num { crs[i] = genConsolidationRequest() } return crs diff --git a/proto/prysm/v1alpha1/fuzz_test.go b/proto/prysm/v1alpha1/fuzz_test.go index b8b8c138db..bcfbf10e05 100644 --- a/proto/prysm/v1alpha1/fuzz_test.go +++ b/proto/prysm/v1alpha1/fuzz_test.go @@ -13,7 +13,7 @@ func fuzzCopies[T any, C eth.Copier[T]](t *testing.T, obj C) { fuzzer := fuzz.NewWithSeed(0) amount := 1000 t.Run(fmt.Sprintf("%T", obj), func(t *testing.T) { - for i := 0; i < amount; i++ { + for range amount { fuzzer.Fuzz(obj) // Populate thing with random values got := obj.Copy() diff --git a/proto/prysm/v1alpha1/metadata/metadata_interfaces.go b/proto/prysm/v1alpha1/metadata/metadata_interfaces.go index dec759f4ce..d2a04e5cf0 100644 --- a/proto/prysm/v1alpha1/metadata/metadata_interfaces.go +++ b/proto/prysm/v1alpha1/metadata/metadata_interfaces.go @@ -12,7 +12,7 @@ type Metadata interface { AttnetsBitfield() bitfield.Bitvector64 SyncnetsBitfield() bitfield.Bitvector4 CustodyGroupCount() uint64 - InnerObject() interface{} + InnerObject() any IsNil() bool Copy() Metadata ssz.Marshaler diff --git a/proto/testing/tags_test.go b/proto/testing/tags_test.go index a164347019..fc8d96bf08 100644 --- a/proto/testing/tags_test.go +++ b/proto/testing/tags_test.go @@ -35,7 +35,7 @@ func TestSSZTagSize(t *testing.T) { assert.Equal(t, pubKeySize, sizes[0], "Unexpected signature size") } -func sszTagSizes(i interface{}, fName string) ([]int, error) { +func sszTagSizes(i any, fName string) ([]int, error) { v := reflect.ValueOf(i) field, exists := v.Type().FieldByName(fName) if !exists { @@ -49,7 +49,7 @@ func sszTagSizes(i interface{}, fName string) ([]int, error) { items := strings.Split(tag[start+1:], ",") sizes := make([]int, len(items)) var err error - for i := 0; i < len(items); i++ { + for i := range items { if items[i] == "?" { sizes[i] = 0 continue diff --git a/runtime/interop/generate_genesis_state.go b/runtime/interop/generate_genesis_state.go index 83e5705edf..e2cfcbd841 100644 --- a/runtime/interop/generate_genesis_state.go +++ b/runtime/interop/generate_genesis_state.go @@ -79,7 +79,7 @@ func GenerateGenesisStateFromDepositData( // GenerateDepositsFromData a list of deposit items by creating proofs for each of them from a sparse Merkle trie. func GenerateDepositsFromData(depositDataItems []*ethpb.Deposit_Data, trie *trie.SparseMerkleTrie) ([]*ethpb.Deposit, error) { deposits := make([]*ethpb.Deposit, len(depositDataItems)) - results, err := async.Scatter(len(depositDataItems), func(offset int, entries int, _ *sync.RWMutex) (interface{}, error) { + results, err := async.Scatter(len(depositDataItems), func(offset int, entries int, _ *sync.RWMutex) (any, error) { return generateDepositsFromData(depositDataItems[offset:offset+entries], offset, trie) }) if err != nil { @@ -119,7 +119,7 @@ func DepositDataFromKeys(privKeys []bls.SecretKey, pubKeys []bls.PublicKey) ([]* } depositDataItems := make([]*ethpb.Deposit_Data, len(privKeys)) depositDataRoots := make([][]byte, len(privKeys)) - results, err := async.Scatter(len(privKeys), func(offset int, entries int, _ *sync.RWMutex) (interface{}, error) { + results, err := async.Scatter(len(privKeys), func(offset int, entries int, _ *sync.RWMutex) (any, error) { items, roots, err := depositDataFromKeys(privKeys[offset:offset+entries], pubKeys[offset:offset+entries], 0) return &depositData{items: items, roots: roots}, err }) @@ -145,7 +145,7 @@ func DepositDataFromKeysWithExecCreds(privKeys []bls.SecretKey, pubKeys []bls.Pu func depositDataFromKeys(privKeys []bls.SecretKey, pubKeys []bls.PublicKey, numOfCreds uint64) ([]*ethpb.Deposit_Data, [][]byte, error) { dataRoots := make([][]byte, len(privKeys)) depositDataItems := make([]*ethpb.Deposit_Data, len(privKeys)) - for i := 0; i < len(privKeys); i++ { + for i := range privKeys { withCred := uint64(i) < numOfCreds data, err := createDepositData(privKeys[i], pubKeys[i], withCred) if err != nil { diff --git a/runtime/interop/generate_keys.go b/runtime/interop/generate_keys.go index 655dad5abc..84ac022b6f 100644 --- a/runtime/interop/generate_keys.go +++ b/runtime/interop/generate_keys.go @@ -27,7 +27,7 @@ func DeterministicallyGenerateKeys(startIndex, numKeys uint64) ([]bls.SecretKey, publics []bls.PublicKey } // lint:ignore uintcast -- this is safe because we can reasonably expect that the number of keys is less than max int64. - results, err := async.Scatter(int(numKeys), func(offset int, entries int, _ *sync.RWMutex) (interface{}, error) { + results, err := async.Scatter(int(numKeys), func(offset int, entries int, _ *sync.RWMutex) (any, error) { secs, pubs, err := deterministicallyGenerateKeys(uint64(offset)+startIndex, uint64(entries)) return &keys{secrets: secs, publics: pubs}, err }) diff --git a/runtime/interop/premine-state.go b/runtime/interop/premine-state.go index be38b420a2..292ef1a907 100644 --- a/runtime/interop/premine-state.go +++ b/runtime/interop/premine-state.go @@ -372,7 +372,7 @@ func (s *PremineGenesisConfig) setInactivityScores(g state.BeaconState) error { } scoresMissing := len(g.Validators()) - len(scores) if scoresMissing > 0 { - for i := 0; i < scoresMissing; i++ { + for range scoresMissing { scores = append(scores, 0) } } @@ -390,7 +390,7 @@ func (s *PremineGenesisConfig) setCurrentEpochParticipation(g state.BeaconState) } missing := len(g.Validators()) - len(p) if missing > 0 { - for i := 0; i < missing; i++ { + for range missing { p = append(p, 0) } } @@ -408,7 +408,7 @@ func (s *PremineGenesisConfig) setPrevEpochParticipation(g state.BeaconState) er } missing := len(g.Validators()) - len(p) if missing > 0 { - for i := 0; i < missing; i++ { + for range missing { p = append(p, 0) } } @@ -755,7 +755,7 @@ func unwrapUint64Ptr(u *uint64) uint64 { func nZeroRoots(n uint64) [][]byte { roots := make([][]byte, n) zh := params.BeaconConfig().ZeroHash[:] - for i := uint64(0); i < n; i++ { + for i := range n { roots[i] = zh } return roots @@ -763,7 +763,7 @@ func nZeroRoots(n uint64) [][]byte { func nSetRoots(n uint64, r []byte) [][]byte { roots := make([][]byte, n) - for i := uint64(0); i < n; i++ { + for i := range n { h := make([]byte, 32) copy(h, r) roots[i] = h diff --git a/runtime/logging/logrus-prefixed-formatter/formatter.go b/runtime/logging/logrus-prefixed-formatter/formatter.go index 9f43579ca7..241802b654 100644 --- a/runtime/logging/logrus-prefixed-formatter/formatter.go +++ b/runtime/logging/logrus-prefixed-formatter/formatter.go @@ -343,7 +343,7 @@ func extractPrefix(msg string) (string, string) { return prefix, msg } -func (f *TextFormatter) appendKeyValue(b *bytes.Buffer, key string, value interface{}, appendSpace bool) error { +func (f *TextFormatter) appendKeyValue(b *bytes.Buffer, key string, value any, appendSpace bool) error { b.WriteString(key) b.WriteByte('=') if err := f.appendValue(b, value); err != nil { @@ -356,7 +356,7 @@ func (f *TextFormatter) appendKeyValue(b *bytes.Buffer, key string, value interf return nil } -func (f *TextFormatter) appendValue(b *bytes.Buffer, value interface{}) (err error) { +func (f *TextFormatter) appendValue(b *bytes.Buffer, value any) (err error) { switch value := value.(type) { case string: if !f.needsQuoting(value) { diff --git a/runtime/service_registry.go b/runtime/service_registry.go index 84fcba0a04..1c51802a8e 100644 --- a/runtime/service_registry.go +++ b/runtime/service_registry.go @@ -83,7 +83,7 @@ func (s *ServiceRegistry) RegisterService(service Service) error { // FetchService takes in a struct pointer and sets the value of that pointer // to a service currently stored in the service registry. This ensures the input argument is // set to the right pointer that refers to the originally registered service. -func (s *ServiceRegistry) FetchService(service interface{}) error { +func (s *ServiceRegistry) FetchService(service any) error { if reflect.TypeOf(service).Kind() != reflect.Ptr { return fmt.Errorf("input must be of pointer type, received value type instead: %T", service) } diff --git a/runtime/service_registry_test.go b/runtime/service_registry_test.go index 1fbf0173c7..266956ed17 100644 --- a/runtime/service_registry_test.go +++ b/runtime/service_registry_test.go @@ -63,11 +63,11 @@ func TestRegisterService_Different(t *testing.T) { require.Equal(t, 2, len(registry.serviceTypes)) - _, exists := registry.services[reflect.TypeOf(m)] - assert.Equal(t, true, exists, "service of type %v not registered", reflect.TypeOf(m)) + _, exists := registry.services[reflect.TypeFor[*mockService]()] + assert.Equal(t, true, exists, "service of type %v not registered", reflect.TypeFor[*mockService]()) - _, exists = registry.services[reflect.TypeOf(s)] - assert.Equal(t, true, exists, "service of type %v not registered", reflect.TypeOf(s)) + _, exists = registry.services[reflect.TypeFor[*secondMockService]()] + assert.Equal(t, true, exists, "service of type %v not registered", reflect.TypeFor[*secondMockService]()) } func TestFetchService_OK(t *testing.T) { @@ -104,6 +104,6 @@ func TestServiceStatus_OK(t *testing.T) { statuses := registry.Statuses() - assert.ErrorContains(t, "something bad has happened", statuses[reflect.TypeOf(m)]) - assert.ErrorContains(t, "woah, horsee", statuses[reflect.TypeOf(s)]) + assert.ErrorContains(t, "something bad has happened", statuses[reflect.TypeFor[*mockService]()]) + assert.ErrorContains(t, "woah, horsee", statuses[reflect.TypeFor[*secondMockService]()]) } diff --git a/testing/assert/assertions.go b/testing/assert/assertions.go index 434bdc6b4a..79620a4733 100644 --- a/testing/assert/assertions.go +++ b/testing/assert/assertions.go @@ -6,12 +6,12 @@ import ( ) // Equal compares values using comparison operator. -func Equal(tb assertions.AssertionTestingTB, expected, actual interface{}, msg ...interface{}) { +func Equal(tb assertions.AssertionTestingTB, expected, actual any, msg ...any) { assertions.Equal(tb.Errorf, expected, actual, msg...) } // NotEqual compares values using comparison operator. -func NotEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, msg ...interface{}) { +func NotEqual(tb assertions.AssertionTestingTB, expected, actual any, msg ...any) { assertions.NotEqual(tb.Errorf, expected, actual, msg...) } @@ -19,7 +19,7 @@ func NotEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, ms // NOTE: this function does not work for checking arrays/slices or maps of protobuf messages. // For arrays/slices, please use DeepSSZEqual. // For maps, please iterate through and compare the individual keys and values. -func DeepEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, msg ...interface{}) { +func DeepEqual(tb assertions.AssertionTestingTB, expected, actual any, msg ...any) { assertions.DeepEqual(tb.Errorf, expected, actual, msg...) } @@ -27,57 +27,57 @@ func DeepEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, m // NOTE: this function does not work for checking arrays/slices or maps of protobuf messages. // For arrays/slices, please use DeepNotSSZEqual. // For maps, please iterate through and compare the individual keys and values. -func DeepNotEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, msg ...interface{}) { +func DeepNotEqual(tb assertions.AssertionTestingTB, expected, actual any, msg ...any) { assertions.DeepNotEqual(tb.Errorf, expected, actual, msg...) } // DeepSSZEqual compares values using ssz.DeepEqual. -func DeepSSZEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, msg ...interface{}) { +func DeepSSZEqual(tb assertions.AssertionTestingTB, expected, actual any, msg ...any) { assertions.DeepSSZEqual(tb.Errorf, expected, actual, msg...) } // DeepNotSSZEqual compares values using ssz.DeepEqual. -func DeepNotSSZEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, msg ...interface{}) { +func DeepNotSSZEqual(tb assertions.AssertionTestingTB, expected, actual any, msg ...any) { assertions.DeepNotSSZEqual(tb.Errorf, expected, actual, msg...) } // StringContains asserts a string contains specified substring. -func StringContains(tb assertions.AssertionTestingTB, expected, actual string, msg ...interface{}) { +func StringContains(tb assertions.AssertionTestingTB, expected, actual string, msg ...any) { assertions.StringContains(tb.Errorf, expected, actual, true, msg...) } // StringNotContains asserts a string does not contain specified substring. -func StringNotContains(tb assertions.AssertionTestingTB, expected, actual string, msg ...interface{}) { +func StringNotContains(tb assertions.AssertionTestingTB, expected, actual string, msg ...any) { assertions.StringContains(tb.Errorf, expected, actual, false, msg...) } // NoError asserts that error is nil. -func NoError(tb assertions.AssertionTestingTB, err error, msg ...interface{}) { +func NoError(tb assertions.AssertionTestingTB, err error, msg ...any) { assertions.NoError(tb.Errorf, err, msg...) } // ErrorContains asserts that actual error contains wanted message. -func ErrorContains(tb assertions.AssertionTestingTB, want string, err error, msg ...interface{}) { +func ErrorContains(tb assertions.AssertionTestingTB, want string, err error, msg ...any) { assertions.ErrorContains(tb.Errorf, want, err, msg...) } // NotNil asserts that passed value is not nil. -func NotNil(tb assertions.AssertionTestingTB, obj interface{}, msg ...interface{}) { +func NotNil(tb assertions.AssertionTestingTB, obj any, msg ...any) { assertions.NotNil(tb.Errorf, obj, msg...) } // LogsContain checks that the desired string is a subset of the current log output. -func LogsContain(tb assertions.AssertionTestingTB, hook *test.Hook, want string, msg ...interface{}) { +func LogsContain(tb assertions.AssertionTestingTB, hook *test.Hook, want string, msg ...any) { assertions.LogsContain(tb.Errorf, hook, want, true, msg...) } // LogsDoNotContain is the inverse check of LogsContain. -func LogsDoNotContain(tb assertions.AssertionTestingTB, hook *test.Hook, want string, msg ...interface{}) { +func LogsDoNotContain(tb assertions.AssertionTestingTB, hook *test.Hook, want string, msg ...any) { assertions.LogsContain(tb.Errorf, hook, want, false, msg...) } // NotEmpty checks that the object fields are not empty. This method also checks all of the // pointer fields to ensure none of those fields are empty. -func NotEmpty(tb assertions.AssertionTestingTB, obj interface{}, msg ...interface{}) { +func NotEmpty(tb assertions.AssertionTestingTB, obj any, msg ...any) { assertions.NotEmpty(tb.Errorf, obj, msg...) } diff --git a/testing/assertions/assertions.go b/testing/assertions/assertions.go index 9aa2f11800..27a2570e3f 100644 --- a/testing/assertions/assertions.go +++ b/testing/assertions/assertions.go @@ -19,20 +19,20 @@ import ( // AssertionTestingTB exposes enough testing.TB methods for assertions. type AssertionTestingTB interface { - Errorf(format string, args ...interface{}) - Fatalf(format string, args ...interface{}) + Errorf(format string, args ...any) + Fatalf(format string, args ...any) } -type assertionLoggerFn func(string, ...interface{}) +type assertionLoggerFn func(string, ...any) func SprintfAssertionLoggerFn(s *string) assertionLoggerFn { - return func(ef string, eargs ...interface{}) { + return func(ef string, eargs ...any) { *s = fmt.Sprintf(ef, eargs...) } } // Equal compares values using comparison operator. -func Equal(loggerFn assertionLoggerFn, expected, actual interface{}, msg ...interface{}) { +func Equal(loggerFn assertionLoggerFn, expected, actual any, msg ...any) { if expected != actual { errMsg := parseMsg("Values are not equal", msg...) _, file, line, _ := runtime.Caller(2) @@ -41,7 +41,7 @@ func Equal(loggerFn assertionLoggerFn, expected, actual interface{}, msg ...inte } // NotEqual compares values using comparison operator. -func NotEqual(loggerFn assertionLoggerFn, expected, actual interface{}, msg ...interface{}) { +func NotEqual(loggerFn assertionLoggerFn, expected, actual any, msg ...any) { if expected == actual { errMsg := parseMsg("Values are equal", msg...) _, file, line, _ := runtime.Caller(2) @@ -50,7 +50,7 @@ func NotEqual(loggerFn assertionLoggerFn, expected, actual interface{}, msg ...i } // DeepEqual compares values using DeepEqual. -func DeepEqual(loggerFn assertionLoggerFn, expected, actual interface{}, msg ...interface{}) { +func DeepEqual(loggerFn assertionLoggerFn, expected, actual any, msg ...any) { if !isDeepEqual(expected, actual) { errMsg := parseMsg("Values are not equal", msg...) _, file, line, _ := runtime.Caller(2) @@ -68,7 +68,7 @@ var protobufPrivateFields = map[string]bool{ "state": true, } -func ProtobufPrettyDiff(a, b interface{}) string { +func ProtobufPrettyDiff(a, b any) string { d, _ := messagediff.DeepDiff(a, b) var dstr []string appendNotProto := func(path, str string) { @@ -92,7 +92,7 @@ func ProtobufPrettyDiff(a, b interface{}) string { } // DeepNotEqual compares values using DeepEqual. -func DeepNotEqual(loggerFn assertionLoggerFn, expected, actual interface{}, msg ...interface{}) { +func DeepNotEqual(loggerFn assertionLoggerFn, expected, actual any, msg ...any) { if isDeepEqual(expected, actual) { errMsg := parseMsg("Values are equal", msg...) _, file, line, _ := runtime.Caller(2) @@ -101,7 +101,7 @@ func DeepNotEqual(loggerFn assertionLoggerFn, expected, actual interface{}, msg } // DeepSSZEqual compares values using ssz.DeepEqual. -func DeepSSZEqual(loggerFn assertionLoggerFn, expected, actual interface{}, msg ...interface{}) { +func DeepSSZEqual(loggerFn assertionLoggerFn, expected, actual any, msg ...any) { if !equality.DeepEqual(expected, actual) { errMsg := parseMsg("Values are not equal", msg...) _, file, line, _ := runtime.Caller(2) @@ -111,7 +111,7 @@ func DeepSSZEqual(loggerFn assertionLoggerFn, expected, actual interface{}, msg } // DeepNotSSZEqual compares values using ssz.DeepEqual. -func DeepNotSSZEqual(loggerFn assertionLoggerFn, expected, actual interface{}, msg ...interface{}) { +func DeepNotSSZEqual(loggerFn assertionLoggerFn, expected, actual any, msg ...any) { if equality.DeepEqual(expected, actual) { errMsg := parseMsg("Values are equal", msg...) _, file, line, _ := runtime.Caller(2) @@ -120,7 +120,7 @@ func DeepNotSSZEqual(loggerFn assertionLoggerFn, expected, actual interface{}, m } // StringContains checks whether a string contains specified substring. If flag is false, inverse is checked. -func StringContains(loggerFn assertionLoggerFn, expected, actual string, flag bool, msg ...interface{}) { +func StringContains(loggerFn assertionLoggerFn, expected, actual string, flag bool, msg ...any) { if flag { if !strings.Contains(actual, expected) { errMsg := parseMsg("Expected substring is not found", msg...) @@ -137,7 +137,7 @@ func StringContains(loggerFn assertionLoggerFn, expected, actual string, flag bo } // NoError asserts that error is nil. -func NoError(loggerFn assertionLoggerFn, err error, msg ...interface{}) { +func NoError(loggerFn assertionLoggerFn, err error, msg ...any) { // reflect.ValueOf is needed for nil instances of custom types implementing Error if err != nil && !reflect.ValueOf(err).IsNil() { errMsg := parseMsg("Unexpected error", msg...) @@ -148,7 +148,7 @@ func NoError(loggerFn assertionLoggerFn, err error, msg ...interface{}) { // ErrorIs uses Errors.Is to recursively unwrap err looking for target in the chain. // If any error in the chain matches target, the assertion will pass. -func ErrorIs(loggerFn assertionLoggerFn, err, target error, msg ...interface{}) { +func ErrorIs(loggerFn assertionLoggerFn, err, target error, msg ...any) { if !errors.Is(err, target) { errMsg := parseMsg(fmt.Sprintf("error %s", target), msg...) _, file, line, _ := runtime.Caller(2) @@ -157,7 +157,7 @@ func ErrorIs(loggerFn assertionLoggerFn, err, target error, msg ...interface{}) } // ErrorContains asserts that actual error contains wanted message. -func ErrorContains(loggerFn assertionLoggerFn, want string, err error, msg ...interface{}) { +func ErrorContains(loggerFn assertionLoggerFn, want string, err error, msg ...any) { if want == "" { loggerFn("Want string can't be empty") } @@ -169,7 +169,7 @@ func ErrorContains(loggerFn assertionLoggerFn, want string, err error, msg ...in } // NotNil asserts that passed value is not nil. -func NotNil(loggerFn assertionLoggerFn, obj interface{}, msg ...interface{}) { +func NotNil(loggerFn assertionLoggerFn, obj any, msg ...any) { if deepNil(obj) { errMsg := parseMsg("Unexpected nil value", msg...) _, file, line, _ := runtime.Caller(2) @@ -178,7 +178,7 @@ func NotNil(loggerFn assertionLoggerFn, obj interface{}, msg ...interface{}) { } // IsNil asserts that observed value is nil. -func IsNil(loggerFn assertionLoggerFn, got interface{}, msg ...interface{}) { +func IsNil(loggerFn assertionLoggerFn, got any, msg ...any) { if !deepNil(got) { errMsg := parseMsg("Value is unexpectedly not nil", msg...) _, file, line, _ := runtime.Caller(2) @@ -187,7 +187,7 @@ func IsNil(loggerFn assertionLoggerFn, got interface{}, msg ...interface{}) { } // deepNil checks that underlying value of obj is nil. -func deepNil(got interface{}) bool { +func deepNil(got any) bool { if got == nil { return true } @@ -200,7 +200,7 @@ func deepNil(got interface{}) bool { } // LogsContain checks whether a given substring is a part of logs. If flag=false, inverse is checked. -func LogsContain(loggerFn assertionLoggerFn, hook *test.Hook, want string, flag bool, msg ...interface{}) { +func LogsContain(loggerFn assertionLoggerFn, hook *test.Hook, want string, flag bool, msg ...any) { _, file, line, _ := runtime.Caller(2) entries := hook.AllEntries() logs := make([]string, 0, len(entries)) @@ -236,7 +236,7 @@ func LogsContain(loggerFn assertionLoggerFn, hook *test.Hook, want string, flag } } -func parseMsg(defaultMsg string, msg ...interface{}) string { +func parseMsg(defaultMsg string, msg ...any) string { if len(msg) >= 1 { msgFormat, ok := msg[0].(string) if !ok { @@ -247,7 +247,7 @@ func parseMsg(defaultMsg string, msg ...interface{}) string { return defaultMsg } -func isDeepEqual(expected, actual interface{}) bool { +func isDeepEqual(expected, actual any) bool { _, isProto := expected.(proto.Message) if isProto { return proto.Equal(expected.(proto.Message), actual.(proto.Message)) @@ -257,7 +257,7 @@ func isDeepEqual(expected, actual interface{}) bool { // NotEmpty asserts that an object's fields are not empty. This function recursively checks each // pointer / struct field. -func NotEmpty(loggerFn assertionLoggerFn, obj interface{}, msg ...interface{}) { +func NotEmpty(loggerFn assertionLoggerFn, obj any, msg ...any) { _, ignoreFieldsWithoutTags := obj.(proto.Message) notEmpty(loggerFn, obj, ignoreFieldsWithoutTags, []string{} /*fields*/, 0 /*stackSize*/, msg...) } @@ -265,7 +265,7 @@ func NotEmpty(loggerFn assertionLoggerFn, obj interface{}, msg ...interface{}) { // notEmpty checks all fields are not zero, including pointer field references to other structs. // This method has the option to ignore fields without struct tags, which is helpful for checking // protobuf messages that have internal fields. -func notEmpty(loggerFn assertionLoggerFn, obj interface{}, ignoreFieldsWithoutTags bool, fields []string, stackSize int, msg ...interface{}) { +func notEmpty(loggerFn assertionLoggerFn, obj any, ignoreFieldsWithoutTags bool, fields []string, stackSize int, msg ...any) { var v reflect.Value if vo, ok := obj.(reflect.Value); ok { v = reflect.Indirect(vo) @@ -333,11 +333,11 @@ type TBMock struct { } // Errorf writes testing logs to ErrorfMsg. -func (tb *TBMock) Errorf(format string, args ...interface{}) { +func (tb *TBMock) Errorf(format string, args ...any) { tb.ErrorfMsg = fmt.Sprintf(format, args...) } // Fatalf writes testing logs to FatalfMsg. -func (tb *TBMock) Fatalf(format string, args ...interface{}) { +func (tb *TBMock) Fatalf(format string, args ...any) { tb.FatalfMsg = fmt.Sprintf(format, args...) } diff --git a/testing/assertions/assertions_test.go b/testing/assertions/assertions_test.go index c81034c90a..0cd40cb0d1 100644 --- a/testing/assertions/assertions_test.go +++ b/testing/assertions/assertions_test.go @@ -20,9 +20,9 @@ import ( func Test_Equal(t *testing.T) { type args struct { tb *assertions.TBMock - expected interface{} - actual interface{} - msgs []interface{} + expected any + actual any + msgs []any } tests := []struct { name string @@ -61,7 +61,7 @@ func Test_Equal(t *testing.T) { tb: &assertions.TBMock{}, expected: 42, actual: 41, - msgs: []interface{}{"Custom values are not equal"}, + msgs: []any{"Custom values are not equal"}, }, expectedErr: "Custom values are not equal, want: 42 (int), got: 41 (int)", }, @@ -71,7 +71,7 @@ func Test_Equal(t *testing.T) { tb: &assertions.TBMock{}, expected: 42, actual: 41, - msgs: []interface{}{"Custom values are not equal (for slot %d)", 12}, + msgs: []any{"Custom values are not equal (for slot %d)", 12}, }, expectedErr: "Custom values are not equal (for slot 12), want: 42 (int), got: 41 (int)", }, @@ -98,9 +98,9 @@ func Test_Equal(t *testing.T) { func Test_NotEqual(t *testing.T) { type args struct { tb *assertions.TBMock - expected interface{} - actual interface{} - msgs []interface{} + expected any + actual any + msgs []any } tests := []struct { name string @@ -138,7 +138,7 @@ func Test_NotEqual(t *testing.T) { tb: &assertions.TBMock{}, expected: 42, actual: 42, - msgs: []interface{}{"Custom values are equal"}, + msgs: []any{"Custom values are equal"}, }, expectedErr: "Custom values are equal, both values are equal", }, @@ -165,9 +165,9 @@ func Test_NotEqual(t *testing.T) { func TestAssert_DeepEqual(t *testing.T) { type args struct { tb *assertions.TBMock - expected interface{} - actual interface{} - msgs []interface{} + expected any + actual any + msgs []any } tests := []struct { name string @@ -197,7 +197,7 @@ func TestAssert_DeepEqual(t *testing.T) { tb: &assertions.TBMock{}, expected: struct{ i int }{42}, actual: struct{ i int }{41}, - msgs: []interface{}{"Custom values are not equal"}, + msgs: []any{"Custom values are not equal"}, }, expectedErr: "Custom values are not equal, expected != actual, diff: struct{ i int }{\n- \ti: 42,\n+ \ti: 41,\n }", }, @@ -207,7 +207,7 @@ func TestAssert_DeepEqual(t *testing.T) { tb: &assertions.TBMock{}, expected: struct{ i int }{42}, actual: struct{ i int }{41}, - msgs: []interface{}{"Custom values are not equal (for slot %d)", 12}, + msgs: []any{"Custom values are not equal (for slot %d)", 12}, }, expectedErr: "Custom values are not equal (for slot 12), expected != actual, diff: struct{ i int }{\n- \ti: 42,\n+ \ti: 41,\n }\n", }, @@ -249,9 +249,9 @@ func TestAssert_DeepEqual(t *testing.T) { func TestAssert_DeepNotEqual(t *testing.T) { type args struct { tb *assertions.TBMock - expected interface{} - actual interface{} - msgs []interface{} + expected any + actual any + msgs []any } tests := []struct { name string @@ -281,7 +281,7 @@ func TestAssert_DeepNotEqual(t *testing.T) { tb: &assertions.TBMock{}, expected: struct{ i int }{42}, actual: struct{ i int }{42}, - msgs: []interface{}{"Custom values are equal"}, + msgs: []any{"Custom values are equal"}, }, expectedErr: "Custom values are equal, want: struct { i int }{i:42}, got: struct { i int }{i:42}", }, @@ -291,7 +291,7 @@ func TestAssert_DeepNotEqual(t *testing.T) { tb: &assertions.TBMock{}, expected: struct{ i int }{42}, actual: struct{ i int }{42}, - msgs: []interface{}{"Custom values are equal (for slot %d)", 12}, + msgs: []any{"Custom values are equal (for slot %d)", 12}, }, expectedErr: "Custom values are equal (for slot 12), want: struct { i int }{i:42}, got: struct { i int }{i:42}", }, @@ -318,8 +318,8 @@ func TestAssert_DeepNotEqual(t *testing.T) { func TestAssert_DeepSSZEqual(t *testing.T) { type args struct { tb *assertions.TBMock - expected interface{} - actual interface{} + expected any + actual any } tests := []struct { name string @@ -380,8 +380,8 @@ func TestAssert_DeepSSZEqual(t *testing.T) { func TestAssert_DeepNotSSZEqual(t *testing.T) { type args struct { tb *assertions.TBMock - expected interface{} - actual interface{} + expected any + actual any } tests := []struct { name string @@ -443,7 +443,7 @@ func TestAssert_NoError(t *testing.T) { type args struct { tb *assertions.TBMock err error - msgs []interface{} + msgs []any } tests := []struct { name string @@ -469,7 +469,7 @@ func TestAssert_NoError(t *testing.T) { args: args{ tb: &assertions.TBMock{}, err: errors.New("failed"), - msgs: []interface{}{"Custom error message"}, + msgs: []any{"Custom error message"}, }, expectedErr: "Custom error message: failed", }, @@ -478,7 +478,7 @@ func TestAssert_NoError(t *testing.T) { args: args{ tb: &assertions.TBMock{}, err: errors.New("failed"), - msgs: []interface{}{"Custom error message (for slot %d)", 12}, + msgs: []any{"Custom error message (for slot %d)", 12}, }, expectedErr: "Custom error message (for slot 12): failed", }, @@ -507,7 +507,7 @@ func TestAssert_ErrorContains(t *testing.T) { tb *assertions.TBMock want string err error - msgs []interface{} + msgs []any } tests := []struct { name string @@ -546,7 +546,7 @@ func TestAssert_ErrorContains(t *testing.T) { tb: &assertions.TBMock{}, want: "another error", err: errors.New("failed"), - msgs: []interface{}{"Something wrong"}, + msgs: []any{"Something wrong"}, }, expectedErr: "Something wrong, got: failed, want: another error", }, @@ -556,7 +556,7 @@ func TestAssert_ErrorContains(t *testing.T) { tb: &assertions.TBMock{}, want: "failed", err: errors.New("failed"), - msgs: []interface{}{"Something wrong"}, + msgs: []any{"Something wrong"}, }, expectedErr: "", }, @@ -566,7 +566,7 @@ func TestAssert_ErrorContains(t *testing.T) { tb: &assertions.TBMock{}, want: "another error", err: errors.New("failed"), - msgs: []interface{}{"Something wrong (for slot %d)", 12}, + msgs: []any{"Something wrong (for slot %d)", 12}, }, expectedErr: "Something wrong (for slot 12), got: failed, want: another error", }, @@ -576,7 +576,7 @@ func TestAssert_ErrorContains(t *testing.T) { tb: &assertions.TBMock{}, want: "failed", err: errors.New("failed"), - msgs: []interface{}{"Something wrong (for slot %d)", 12}, + msgs: []any{"Something wrong (for slot %d)", 12}, }, expectedErr: "", }, @@ -586,7 +586,7 @@ func TestAssert_ErrorContains(t *testing.T) { tb: &assertions.TBMock{}, want: "", err: errors.New("failed"), - msgs: []interface{}{"Something wrong (for slot %d)", 12}, + msgs: []any{"Something wrong (for slot %d)", 12}, }, expectedErr: "Want string can't be empty", }, @@ -613,8 +613,8 @@ func TestAssert_ErrorContains(t *testing.T) { func Test_NotNil(t *testing.T) { type args struct { tb *assertions.TBMock - obj interface{} - msgs []interface{} + obj any + msgs []any } var nilBlock *eth.SignedBeaconBlock = nil tests := []struct { @@ -633,7 +633,7 @@ func Test_NotNil(t *testing.T) { name: "nil custom message", args: args{ tb: &assertions.TBMock{}, - msgs: []interface{}{"This should not be nil"}, + msgs: []any{"This should not be nil"}, }, expectedErr: "This should not be nil", }, @@ -641,7 +641,7 @@ func Test_NotNil(t *testing.T) { name: "nil custom message with params", args: args{ tb: &assertions.TBMock{}, - msgs: []interface{}{"This should not be nil (for slot %d)", 12}, + msgs: []any{"This should not be nil (for slot %d)", 12}, }, expectedErr: "This should not be nil (for slot 12)", }, @@ -693,7 +693,7 @@ func Test_LogsContainDoNotContain(t *testing.T) { tb *assertions.TBMock want string flag bool - msgs []interface{} + msgs []any } tests := []struct { name string @@ -726,7 +726,7 @@ func Test_LogsContainDoNotContain(t *testing.T) { name: "should contain not found custom message", args: args{ tb: &assertions.TBMock{}, - msgs: []interface{}{"Waited for logs"}, + msgs: []any{"Waited for logs"}, want: "here goes some expected log string", flag: true, }, @@ -736,7 +736,7 @@ func Test_LogsContainDoNotContain(t *testing.T) { name: "should contain not found custom message with params", args: args{ tb: &assertions.TBMock{}, - msgs: []interface{}{"Waited for %d logs", 10}, + msgs: []any{"Waited for %d logs", 10}, want: "here goes some expected log string", flag: true, }, @@ -765,7 +765,7 @@ func Test_LogsContainDoNotContain(t *testing.T) { name: "should not contain but found custom message", args: args{ tb: &assertions.TBMock{}, - msgs: []interface{}{"Dit not expect logs"}, + msgs: []any{"Dit not expect logs"}, want: "here goes some unexpected log string", }, updateLogs: func(log *logrus.Logger) { @@ -777,7 +777,7 @@ func Test_LogsContainDoNotContain(t *testing.T) { name: "should not contain but found custom message with params", args: args{ tb: &assertions.TBMock{}, - msgs: []interface{}{"Dit not expect %d logs", 10}, + msgs: []any{"Dit not expect %d logs", 10}, want: "here goes some unexpected log string", }, updateLogs: func(log *logrus.Logger) { @@ -824,9 +824,9 @@ func Test_LogsContainDoNotContain(t *testing.T) { func TestAssert_NotEmpty(t *testing.T) { type args struct { tb *assertions.TBMock - input interface{} - actual interface{} - msgs []interface{} + input any + actual any + msgs []any } tests := []struct { name string diff --git a/testing/endtoend/component_handler_test.go b/testing/endtoend/component_handler_test.go index 9743268791..48a67947f4 100644 --- a/testing/endtoend/component_handler_test.go +++ b/testing/endtoend/component_handler_test.go @@ -256,7 +256,7 @@ func (c *componentHandler) required() []e2etypes.ComponentRunner { return requiredComponents } -func (c *componentHandler) printPIDs(logger func(string, ...interface{})) { +func (c *componentHandler) printPIDs(logger func(string, ...any)) { msg := "\nPID of components. Attach a debugger... if you dare!\n\n" msg += "This test PID: " + strconv.Itoa(os.Getpid()) + " (parent=" + strconv.Itoa(os.Getppid()) + ")\n" diff --git a/testing/endtoend/components/beacon_node.go b/testing/endtoend/components/beacon_node.go index 98f9131394..ad827b86be 100644 --- a/testing/endtoend/components/beacon_node.go +++ b/testing/endtoend/components/beacon_node.go @@ -72,7 +72,7 @@ func (s *BeaconNodeSet) Start(ctx context.Context) error { // Once nodes are ready passed in handler function will be called. return helpers.WaitOnNodes(ctx, nodes, func() { if s.config.UseFixedPeerIDs { - for i := 0; i < len(nodes); i++ { + for i := range nodes { s.ids = append(s.ids, nodes[i].(*BeaconNode).peerID) } s.config.PeerIDs = s.ids diff --git a/testing/endtoend/components/builder.go b/testing/endtoend/components/builder.go index 467b27b8f0..d3e8aaa482 100644 --- a/testing/endtoend/components/builder.go +++ b/testing/endtoend/components/builder.go @@ -36,7 +36,7 @@ func NewBuilderSet() *BuilderSet { func (s *BuilderSet) Start(ctx context.Context) error { totalNodeCount := e2e.TestParams.BeaconNodeCount + e2e.TestParams.LighthouseBeaconNodeCount nodes := make([]e2etypes.ComponentRunner, totalNodeCount) - for i := 0; i < totalNodeCount; i++ { + for i := range totalNodeCount { nodes[i] = NewBuilder(i) } s.builders = nodes diff --git a/testing/endtoend/components/eth1/depositor.go b/testing/endtoend/components/eth1/depositor.go index f7dda70ab6..73993f3655 100644 --- a/testing/endtoend/components/eth1/depositor.go +++ b/testing/endtoend/components/eth1/depositor.go @@ -198,7 +198,7 @@ func (d *Depositor) SendAndMineByBatch(ctx context.Context, offset, nvals, batch } numBatch := len(deposits) / batchSize log.WithField("numDeposits", len(deposits)).WithField("batchSize", batchSize).WithField("numBatches", numBatch).WithField("balance", balance.String()).WithField("account", d.Key.Address.Hex()).Info("SendAndMineByBatch check") - for i := 0; i < numBatch; i++ { + for i := range numBatch { txo, err := d.txops(ctx) if err != nil { return err diff --git a/testing/endtoend/components/eth1/miner.go b/testing/endtoend/components/eth1/miner.go index 3f09a9e866..a8cb378e5e 100644 --- a/testing/endtoend/components/eth1/miner.go +++ b/testing/endtoend/components/eth1/miner.go @@ -191,7 +191,7 @@ func (m *Miner) Start(ctx context.Context) error { // give the miner start a couple of tries, since the p2p networking check is flaky var retryErr error var minerLog *os.File - for attempt := 0; attempt < 3; attempt++ { + for attempt := range 3 { minerLog, retryErr = m.initAttempt(ctx, attempt) if retryErr == nil { log.Infof("Miner started after %d retries", attempt) diff --git a/testing/endtoend/components/eth1/node.go b/testing/endtoend/components/eth1/node.go index 50d6f73eb4..ef9d9b5bea 100644 --- a/testing/endtoend/components/eth1/node.go +++ b/testing/endtoend/components/eth1/node.go @@ -116,7 +116,7 @@ func (node *Node) Start(ctx context.Context) error { // give the miner start a couple of tries, since the p2p networking check is flaky var retryErr error - for retries := 0; retries < 3; retries++ { + for retries := range 3 { retryErr = nil log.Infof("Starting eth1 node %d, attempt %d with flags: %s", node.index, retries, strings.Join(args[2:], " ")) runCmd := exec.CommandContext(ctx, binaryPath, args...) // #nosec G204 -- Safe diff --git a/testing/endtoend/components/eth1/node_set.go b/testing/endtoend/components/eth1/node_set.go index ccd80636ce..0cdab45d24 100644 --- a/testing/endtoend/components/eth1/node_set.go +++ b/testing/endtoend/components/eth1/node_set.go @@ -37,7 +37,7 @@ func (s *NodeSet) Start(ctx context.Context) error { // beacon node will connect to the already existing Eth1 miner. totalNodeCount := e2e.TestParams.BeaconNodeCount + e2e.TestParams.LighthouseBeaconNodeCount - 1 nodes := make([]e2etypes.ComponentRunner, totalNodeCount) - for i := 0; i < totalNodeCount; i++ { + for i := range totalNodeCount { // We start indexing nodes from 1 because the miner has an implicit 0 index. node := NewNode(i+1, s.enr) nodes[i] = node diff --git a/testing/endtoend/components/eth1/proxy.go b/testing/endtoend/components/eth1/proxy.go index 1de8c1cda2..8ab3144a34 100644 --- a/testing/endtoend/components/eth1/proxy.go +++ b/testing/endtoend/components/eth1/proxy.go @@ -36,7 +36,7 @@ func NewProxySet() *ProxySet { func (s *ProxySet) Start(ctx context.Context) error { totalNodeCount := e2e.TestParams.BeaconNodeCount + e2e.TestParams.LighthouseBeaconNodeCount nodes := make([]e2etypes.ComponentRunner, totalNodeCount) - for i := 0; i < totalNodeCount; i++ { + for i := range totalNodeCount { nodes[i] = NewProxy(i) } s.proxies = nodes @@ -194,7 +194,7 @@ func (node *Proxy) Stop() error { } // AddRequestInterceptor adds in a json-rpc request interceptor. -func (node *Proxy) AddRequestInterceptor(rpcMethodName string, responseGen func() interface{}, trigger func() bool) { +func (node *Proxy) AddRequestInterceptor(rpcMethodName string, responseGen func() any, trigger func() bool) { node.engineProxy.AddRequestInterceptor(rpcMethodName, responseGen, trigger) } diff --git a/testing/endtoend/components/eth1/transactions.go b/testing/endtoend/components/eth1/transactions.go index e6b8f15679..0ff409dcb4 100644 --- a/testing/endtoend/components/eth1/transactions.go +++ b/testing/endtoend/components/eth1/transactions.go @@ -138,7 +138,7 @@ func SendTransaction(client *rpc.Client, key *ecdsa.PrivateKey, f *filler.Filler } g, _ := errgroup.WithContext(context.Background()) txs := make([]*types.Transaction, 10) - for i := uint64(0); i < 10; i++ { + for i := range uint64(10) { index := i g.Go(func() error { tx, err := RandomBlobTx(client, f, fundedAccount.Address, nonce+index, gasPrice, chainid, al) @@ -182,7 +182,7 @@ func SendTransaction(client *rpc.Client, key *ecdsa.PrivateKey, f *filler.Filler } txs = make([]*types.Transaction, N) - for i := uint64(0); i < N; i++ { + for i := range N { index := i g.Go(func() error { tx, err := txfuzz.RandomValidTx(client, f, sender, nonce+index, gasPrice, chainid, al) @@ -359,10 +359,7 @@ func encodeBlobs(data []byte) []kzg4844.Blob { blobIndex++ fieldIndex = 0 } - max := i + 31 - if max > len(data) { - max = len(data) - } + max := min(i+31, len(data)) copy(blobs[blobIndex][fieldIndex*32+1:], data[i:max]) } return blobs diff --git a/testing/endtoend/components/lighthouse_beacon.go b/testing/endtoend/components/lighthouse_beacon.go index 3eb5c4a6e9..6e3be4dc93 100644 --- a/testing/endtoend/components/lighthouse_beacon.go +++ b/testing/endtoend/components/lighthouse_beacon.go @@ -261,7 +261,7 @@ func (node *LighthouseBeaconNode) createTestnetDir(ctx context.Context, index in return "", err } bootPath := filepath.Join(testNetDir, "boot_enr.yaml") - enrYaml := []byte(fmt.Sprintf("[%s]", node.enr)) + enrYaml := fmt.Appendf(nil, "[%s]", node.enr) if err := file.WriteFile(bootPath, enrYaml); err != nil { return "", err } diff --git a/testing/endtoend/components/lighthouse_validator.go b/testing/endtoend/components/lighthouse_validator.go index 7cbdcd530b..da9e7054ce 100644 --- a/testing/endtoend/components/lighthouse_validator.go +++ b/testing/endtoend/components/lighthouse_validator.go @@ -59,7 +59,7 @@ func (s *LighthouseValidatorNodeSet) Start(ctx context.Context) error { // Create validator nodes. nodes := make([]types.ComponentRunner, lighthouseBeaconNum) - for i := 0; i < lighthouseBeaconNum; i++ { + for i := range lighthouseBeaconNum { offsetIdx := i + prysmBeaconNum nodes[i] = NewLighthouseValidatorNode(s.config, validatorsPerNode, i, validatorsPerNode*offsetIdx) } @@ -260,7 +260,7 @@ func (k *KeystoreGenerator) Start(_ context.Context) error { } validatorsPerNode := validatorNum / beaconNodeNum - for i := 0; i < lighthouseBeaconNum; i++ { + for i := range lighthouseBeaconNum { offsetIdx := i + prysmBeaconNum _, err := setupKeystores(i, validatorsPerNode*offsetIdx, validatorsPerNode) if err != nil { diff --git a/testing/endtoend/components/validator.go b/testing/endtoend/components/validator.go index 0fa5c6aa03..bf50868d37 100644 --- a/testing/endtoend/components/validator.go +++ b/testing/endtoend/components/validator.go @@ -62,7 +62,7 @@ func (s *ValidatorNodeSet) Start(ctx context.Context) error { validatorsPerNode := validatorNum / beaconNodeNum // Create validator nodes. nodes := make([]e2etypes.ComponentRunner, prysmBeaconNodeNum) - for i := 0; i < prysmBeaconNodeNum; i++ { + for i := range prysmBeaconNodeNum { nodes[i] = NewValidatorNode(s.config, validatorsPerNode, i, validatorsPerNode*i) } s.nodes = nodes diff --git a/testing/endtoend/endtoend_test.go b/testing/endtoend/endtoend_test.go index 70c661ae59..bf08bbaa64 100644 --- a/testing/endtoend/endtoend_test.go +++ b/testing/endtoend/endtoend_test.go @@ -11,6 +11,7 @@ import ( "math/big" "os" "path" + "slices" "strings" "sync" "testing" @@ -322,7 +323,7 @@ func (r *testRunner) testCheckpointSync(ctx context.Context, g *errgroup.Group, return err } - flags := append([]string{}, r.config.BeaconFlags...) + flags := slices.Clone(r.config.BeaconFlags) flags = append(flags, fmt.Sprintf("--checkpoint-sync-url=%s", bnAPI)) flags = append(flags, fmt.Sprintf("--genesis-beacon-api-url=%s", bnAPI)) @@ -689,7 +690,7 @@ func (r *testRunner) multiScenarioMulticlient(ec *e2etypes.EvaluationContext, ep // Set it for prysm beacon node. component, err := r.comHandler.eth1Proxy.ComponentAtIndex(0) require.NoError(r.t, err) - component.(e2etypes.EngineProxy).AddRequestInterceptor(newPayloadMethod, func() interface{} { + component.(e2etypes.EngineProxy).AddRequestInterceptor(newPayloadMethod, func() any { return &enginev1.PayloadStatus{ Status: enginev1.PayloadStatus_SYNCING, LatestValidHash: make([]byte, 32), @@ -700,7 +701,7 @@ func (r *testRunner) multiScenarioMulticlient(ec *e2etypes.EvaluationContext, ep // Set it for lighthouse beacon node. component, err = r.comHandler.eth1Proxy.ComponentAtIndex(2) require.NoError(r.t, err) - component.(e2etypes.EngineProxy).AddRequestInterceptor(newPayloadMethod, func() interface{} { + component.(e2etypes.EngineProxy).AddRequestInterceptor(newPayloadMethod, func() any { return &enginev1.PayloadStatus{ Status: enginev1.PayloadStatus_SYNCING, LatestValidHash: make([]byte, 32), @@ -709,7 +710,7 @@ func (r *testRunner) multiScenarioMulticlient(ec *e2etypes.EvaluationContext, ep return true }) - component.(e2etypes.EngineProxy).AddRequestInterceptor(forkChoiceUpdatedMethod, func() interface{} { + component.(e2etypes.EngineProxy).AddRequestInterceptor(forkChoiceUpdatedMethod, func() any { return &ForkchoiceUpdatedResponse{ Status: &enginev1.PayloadStatus{ Status: enginev1.PayloadStatus_SYNCING, @@ -814,7 +815,7 @@ func (r *testRunner) multiScenario(ec *e2etypes.EvaluationContext, epoch uint64, case optimisticStartEpoch: component, err := r.comHandler.eth1Proxy.ComponentAtIndex(0) require.NoError(r.t, err) - component.(e2etypes.EngineProxy).AddRequestInterceptor(newPayloadMethod, func() interface{} { + component.(e2etypes.EngineProxy).AddRequestInterceptor(newPayloadMethod, func() any { return &enginev1.PayloadStatus{ Status: enginev1.PayloadStatus_SYNCING, LatestValidHash: make([]byte, 32), diff --git a/testing/endtoend/evaluators/beaconapi/requests.go b/testing/endtoend/evaluators/beaconapi/requests.go index 7abdaa4249..854dd65019 100644 --- a/testing/endtoend/evaluators/beaconapi/requests.go +++ b/testing/endtoend/evaluators/beaconapi/requests.go @@ -149,7 +149,7 @@ var getRequests = map[string]endpoint{ withSanityCheckOnly()), "/config/fork_schedule": newMetadata[structs.GetForkScheduleResponse]( v1PathTemplate, - withCustomEval(func(p interface{}, lh interface{}) error { + withCustomEval(func(p any, lh any) error { pResp, ok := p.(*structs.GetForkScheduleResponse) if !ok { return fmt.Errorf(msgWrongJSON, &structs.GetForkScheduleResponse{}, p) @@ -199,7 +199,7 @@ var getRequests = map[string]endpoint{ withSanityCheckOnly()), "/node/version": newMetadata[structs.GetVersionResponse]( v1PathTemplate, - withCustomEval(func(p interface{}, _ interface{}) error { + withCustomEval(func(p any, _ any) error { pResp, ok := p.(*structs.GetVersionResponse) if !ok { return fmt.Errorf(msgWrongJSON, &structs.GetVersionResponse{}, p) @@ -218,7 +218,7 @@ var getRequests = map[string]endpoint{ withParams(func(currentEpoch primitives.Epoch) []string { return []string{fmt.Sprintf("%v", currentEpoch)} }), - withCustomEval(func(p interface{}, lh interface{}) error { + withCustomEval(func(p any, lh any) error { pResp, ok := p.(*structs.GetProposerDutiesResponse) if !ok { return fmt.Errorf(msgWrongJSON, &structs.GetProposerDutiesResponse{}, p) @@ -259,7 +259,7 @@ var ( withParams(func(_ primitives.Epoch) []string { return []string{"head"} }), - withPOSTObj(func() interface{} { + withPOSTObj(func() any { return struct { Ids []string `json:"ids"` Statuses []string `json:"statuses"` diff --git a/testing/endtoend/evaluators/beaconapi/types.go b/testing/endtoend/evaluators/beaconapi/types.go index 8253f79eae..d9118de700 100644 --- a/testing/endtoend/evaluators/beaconapi/types.go +++ b/testing/endtoend/evaluators/beaconapi/types.go @@ -14,16 +14,16 @@ type endpoint interface { setSszResp(resp []byte) // sets the Prysm SSZ response getStart() primitives.Epoch setStart(start primitives.Epoch) - getPOSTObj() interface{} - setPOSTObj(obj interface{}) - getPResp() interface{} // retrieves the Prysm JSON response - getLHResp() interface{} // retrieves the Lighthouse JSON response + getPOSTObj() any + setPOSTObj(obj any) + getPResp() any // retrieves the Prysm JSON response + getLHResp() any // retrieves the Lighthouse JSON response getParams(currentEpoch primitives.Epoch) []string setParams(f func(currentEpoch primitives.Epoch) []string) getQueryParams(currentEpoch primitives.Epoch) []string setQueryParams(f func(currentEpoch primitives.Epoch) []string) - getCustomEval() func(interface{}, interface{}) error - setCustomEval(f func(interface{}, interface{}) error) + getCustomEval() func(any, any) error + setCustomEval(f func(any, any) error) } type apiEndpoint[Resp any] struct { @@ -31,13 +31,13 @@ type apiEndpoint[Resp any] struct { sanity bool ssz bool start primitives.Epoch - postObj interface{} + postObj any pResp *Resp // Prysm JSON response lhResp *Resp // Lighthouse JSON response sszResp []byte // Prysm SSZ response params func(currentEpoch primitives.Epoch) []string queryParams func(currentEpoch primitives.Epoch) []string - customEval func(interface{}, interface{}) error + customEval func(any, any) error } func (e *apiEndpoint[Resp]) getBasePath() string { @@ -76,19 +76,19 @@ func (e *apiEndpoint[Resp]) setStart(start primitives.Epoch) { e.start = start } -func (e *apiEndpoint[Resp]) getPOSTObj() interface{} { +func (e *apiEndpoint[Resp]) getPOSTObj() any { return e.postObj } -func (e *apiEndpoint[Resp]) setPOSTObj(obj interface{}) { +func (e *apiEndpoint[Resp]) setPOSTObj(obj any) { e.postObj = obj } -func (e *apiEndpoint[Resp]) getPResp() interface{} { +func (e *apiEndpoint[Resp]) getPResp() any { return e.pResp } -func (e *apiEndpoint[Resp]) getLHResp() interface{} { +func (e *apiEndpoint[Resp]) getLHResp() any { return e.lhResp } @@ -114,11 +114,11 @@ func (e *apiEndpoint[Resp]) setQueryParams(f func(currentEpoch primitives.Epoch) e.queryParams = f } -func (e *apiEndpoint[Resp]) getCustomEval() func(interface{}, interface{}) error { +func (e *apiEndpoint[Resp]) getCustomEval() func(any, any) error { return e.customEval } -func (e *apiEndpoint[Resp]) setCustomEval(f func(interface{}, interface{}) error) { +func (e *apiEndpoint[Resp]) setCustomEval(f func(any, any) error) { e.customEval = f } @@ -158,7 +158,7 @@ func withStart(start primitives.Epoch) endpointOpt { } // We perform a POST instead of GET, sending an object. -func withPOSTObj(obj interface{}) endpointOpt { +func withPOSTObj(obj any) endpointOpt { return func(e endpoint) { e.setPOSTObj(obj) } @@ -179,7 +179,7 @@ func withQueryParams(f func(currentEpoch primitives.Epoch) []string) endpointOpt } // We perform custom evaluation on responses. -func withCustomEval(f func(interface{}, interface{}) error) endpointOpt { +func withCustomEval(f func(any, any) error) endpointOpt { return func(e endpoint) { e.setCustomEval(f) } diff --git a/testing/endtoend/evaluators/beaconapi/util.go b/testing/endtoend/evaluators/beaconapi/util.go index 43cb842537..93001b23da 100644 --- a/testing/endtoend/evaluators/beaconapi/util.go +++ b/testing/endtoend/evaluators/beaconapi/util.go @@ -25,7 +25,7 @@ const ( msgSSZUnmarshalFailed = "failed to unmarshal SSZ" ) -func doJSONGETRequest(template, requestPath string, beaconNodeIdx int, resp interface{}, bnType ...string) error { +func doJSONGETRequest(template, requestPath string, beaconNodeIdx int, resp any, bnType ...string) error { if len(bnType) == 0 { bnType = []string{"Prysm"} } @@ -46,7 +46,7 @@ func doJSONGETRequest(template, requestPath string, beaconNodeIdx int, resp inte return errors.Wrap(err, "request failed") } - var body interface{} + var body any if httpResp.StatusCode != http.StatusOK { if httpResp.Header.Get("Content-Type") == api.JsonMediaType { if err = json.NewDecoder(httpResp.Body).Decode(&body); err != nil { @@ -95,7 +95,7 @@ func doSSZGETRequest(template, requestPath string, beaconNodeIdx int, bnType ... return nil, errors.Wrap(err, "request failed") } if resp.StatusCode != http.StatusOK { - var body interface{} + var body any if err := json.NewDecoder(resp.Body).Decode(&body); err != nil { return nil, errors.Wrap(err, "failed to decode response body") } @@ -110,7 +110,7 @@ func doSSZGETRequest(template, requestPath string, beaconNodeIdx int, bnType ... return body, nil } -func doJSONPOSTRequest(template, requestPath string, beaconNodeIdx int, postObj, resp interface{}, bnType ...string) error { +func doJSONPOSTRequest(template, requestPath string, beaconNodeIdx int, postObj, resp any, bnType ...string) error { if len(bnType) == 0 { bnType = []string{"Prysm"} } @@ -139,7 +139,7 @@ func doJSONPOSTRequest(template, requestPath string, beaconNodeIdx int, postObj, return errors.Wrap(err, "request failed") } - var body interface{} + var body any if httpResp.StatusCode != http.StatusOK { if httpResp.Header.Get("Content-Type") == api.JsonMediaType { if err = json.NewDecoder(httpResp.Body).Decode(&body); err != nil { @@ -161,7 +161,7 @@ func doJSONPOSTRequest(template, requestPath string, beaconNodeIdx int, postObj, return nil } -func doSSZPOSTRequest(template, requestPath string, beaconNodeIdx int, postObj interface{}, bnType ...string) ([]byte, error) { +func doSSZPOSTRequest(template, requestPath string, beaconNodeIdx int, postObj any, bnType ...string) ([]byte, error) { if len(bnType) == 0 { bnType = []string{"Prysm"} } @@ -193,7 +193,7 @@ func doSSZPOSTRequest(template, requestPath string, beaconNodeIdx int, postObj i return nil, errors.Wrap(err, "request failed") } if resp.StatusCode != http.StatusOK { - var body interface{} + var body any if err := json.NewDecoder(resp.Body).Decode(&body); err != nil { return nil, errors.Wrap(err, "failed to decode response body") } diff --git a/testing/endtoend/evaluators/beaconapi/verify.go b/testing/endtoend/evaluators/beaconapi/verify.go index a47e599380..2cf97b51c2 100644 --- a/testing/endtoend/evaluators/beaconapi/verify.go +++ b/testing/endtoend/evaluators/beaconapi/verify.go @@ -244,7 +244,7 @@ func postEvaluation(nodeIdx int, requests map[string]endpoint, epoch primitives. return nil } -func compareGETJSON(nodeIdx int, base, path string, pResp, lhResp interface{}, customEval func(interface{}, interface{}) error) error { +func compareGETJSON(nodeIdx int, base, path string, pResp, lhResp any, customEval func(any, any) error) error { if err := doJSONGETRequest(base, path, nodeIdx, pResp); err != nil { return errors.Wrapf(err, "issue during Prysm JSON GET request for path %s", path) } @@ -264,7 +264,7 @@ func compareGETJSON(nodeIdx int, base, path string, pResp, lhResp interface{}, c } } -func comparePOSTJSON(nodeIdx int, base, path string, postObj, pResp, lhResp interface{}, customEval func(interface{}, interface{}) error) error { +func comparePOSTJSON(nodeIdx int, base, path string, postObj, pResp, lhResp any, customEval func(any, any) error) error { if err := doJSONPOSTRequest(base, path, nodeIdx, postObj, pResp); err != nil { return errors.Wrapf(err, "issue during Prysm JSON POST request for path %s", path) } @@ -299,7 +299,7 @@ func compareGETSSZ(nodeIdx int, base, path string) ([]byte, error) { return pResp, nil } -func comparePOSTSSZ(nodeIdx int, base, path string, postObj interface{}) ([]byte, error) { +func comparePOSTSSZ(nodeIdx int, base, path string, postObj any) ([]byte, error) { pResp, err := doSSZPOSTRequest(base, path, nodeIdx, postObj) if err != nil { return nil, errors.Wrapf(err, "issue during Prysm SSZ POST request for path %s", path) @@ -314,7 +314,7 @@ func comparePOSTSSZ(nodeIdx int, base, path string, postObj interface{}) ([]byte return pResp, nil } -func compareJSON(pResp, lhResp interface{}) error { +func compareJSON(pResp, lhResp any) error { if !reflect.DeepEqual(pResp, lhResp) { p, err := json.Marshal(pResp) if err != nil { diff --git a/testing/endtoend/evaluators/data.go b/testing/endtoend/evaluators/data.go index 080c9d8ad2..370ebaea42 100644 --- a/testing/endtoend/evaluators/data.go +++ b/testing/endtoend/evaluators/data.go @@ -26,7 +26,7 @@ func checkColdStateCheckpoint(_ *e2etypes.EvaluationContext, conns ...*grpc.Clie ctx := context.Background() client := eth.NewBeaconChainClient(conns[0]) - for i := primitives.Epoch(0); i < epochToCheck; i++ { + for i := range primitives.Epoch(epochToCheck) { res, err := client.ListValidatorAssignments(ctx, ð.ListValidatorAssignmentsRequest{ QueryFilter: ð.ListValidatorAssignmentsRequest_Epoch{Epoch: i}, }) diff --git a/testing/endtoend/evaluators/metrics.go b/testing/endtoend/evaluators/metrics.go index 27e022892d..29d057b9bf 100644 --- a/testing/endtoend/evaluators/metrics.go +++ b/testing/endtoend/evaluators/metrics.go @@ -90,7 +90,7 @@ func metricsTest(_ *types.EvaluationContext, conns ...*grpc.ClientConn) error { currentSlot := slots.CurrentSlot(genesis.Time()) currentEpoch := slots.ToEpoch(currentSlot) forkDigest := params.ForkDigest(currentEpoch) - for i := 0; i < len(conns); i++ { + for i := range conns { response, err := http.Get(fmt.Sprintf("http://localhost:%d/metrics", e2e.TestParams.Ports.PrysmBeaconNodeMetricsPort+i)) if err != nil { // Continue if the connection fails, regular flake. diff --git a/testing/endtoend/evaluators/node.go b/testing/endtoend/evaluators/node.go index fb3cb23c54..1d3fe98bab 100644 --- a/testing/endtoend/evaluators/node.go +++ b/testing/endtoend/evaluators/node.go @@ -55,7 +55,7 @@ var AllNodesHaveSameHead = e2etypes.Evaluator{ func healthzCheck(_ *e2etypes.EvaluationContext, conns ...*grpc.ClientConn) error { count := len(conns) - for i := 0; i < count; i++ { + for i := range count { resp, err := http.Get(fmt.Sprintf("http://localhost:%d/healthz", e2e.TestParams.Ports.PrysmBeaconNodeMetricsPort+i)) if err != nil { // Continue if the connection fails, regular flake. @@ -74,7 +74,7 @@ func healthzCheck(_ *e2etypes.EvaluationContext, conns ...*grpc.ClientConn) erro time.Sleep(connTimeDelay) } - for i := 0; i < count; i++ { + for i := range count { resp, err := http.Get(fmt.Sprintf("http://localhost:%d/healthz", e2e.TestParams.Ports.ValidatorMetricsPort+i)) if err != nil { // Continue if the connection fails, regular flake. @@ -157,7 +157,7 @@ func allNodesHaveSameHead(_ *e2etypes.EvaluationContext, conns ...*grpc.ClientCo return err } - for i := 0; i < len(conns); i++ { + for i := range conns { if headEpochs[0] != headEpochs[i] { return fmt.Errorf( "received conflicting head epochs on node %d, expected %d, received %d", diff --git a/testing/endtoend/evaluators/peers.go b/testing/endtoend/evaluators/peers.go index 813d3705c5..de963e1c88 100644 --- a/testing/endtoend/evaluators/peers.go +++ b/testing/endtoend/evaluators/peers.go @@ -55,7 +55,7 @@ func peersTest(_ *types.EvaluationContext, conns ...*grpc.ClientConn) error { return baseErr } -func wrapError(err error, format string, args ...interface{}) error { +func wrapError(err error, format string, args ...any) error { if err == nil { err = errors.New("") } diff --git a/testing/endtoend/helpers/helpers.go b/testing/endtoend/helpers/helpers.go index 54911dab3c..2c075fc3ea 100644 --- a/testing/endtoend/helpers/helpers.go +++ b/testing/endtoend/helpers/helpers.go @@ -303,7 +303,7 @@ func NewLocalConnection(ctx context.Context, port int) (*grpc.ClientConn, error) // NewLocalConnections returns number of GRPC connections, along with function to close all of them. func NewLocalConnections(ctx context.Context, numConns int) ([]*grpc.ClientConn, func(), error) { conns := make([]*grpc.ClientConn, numConns) - for i := 0; i < len(conns); i++ { + for i := range conns { conn, err := NewLocalConnection(ctx, e2e.TestParams.Ports.PrysmBeaconNodeRPCPort+i) if err != nil { return nil, nil, err @@ -322,7 +322,7 @@ func NewLocalConnections(ctx context.Context, numConns int) ([]*grpc.ClientConn, // BeaconAPIHostnames constructs a hostname:port string for the func BeaconAPIHostnames(numConns int) []string { hostnames := make([]string, 0) - for i := 0; i < numConns; i++ { + for i := range numConns { port := e2e.TestParams.Ports.PrysmBeaconNodeHTTPPort + i hostnames = append(hostnames, net.JoinHostPort("127.0.0.1", strconv.Itoa(port))) } diff --git a/testing/endtoend/types/types.go b/testing/endtoend/types/types.go index 887ead02be..d68494aeda 100644 --- a/testing/endtoend/types/types.go +++ b/testing/endtoend/types/types.go @@ -184,7 +184,7 @@ type MultipleComponentRunners interface { type EngineProxy interface { ComponentRunner // AddRequestInterceptor adds in a json-rpc request interceptor. - AddRequestInterceptor(rpcMethodName string, responseGen func() interface{}, trigger func() bool) + AddRequestInterceptor(rpcMethodName string, responseGen func() any, trigger func() bool) // RemoveRequestInterceptor removes the request interceptor for the provided method. RemoveRequestInterceptor(rpcMethodName string) // ReleaseBackedUpRequests releases backed up http requests. diff --git a/testing/middleware/builder/builder.go b/testing/middleware/builder/builder.go index 6833a6d812..2d5c43ef18 100644 --- a/testing/middleware/builder/builder.go +++ b/testing/middleware/builder/builder.go @@ -67,18 +67,18 @@ var ( ) type jsonRPCObject struct { - Jsonrpc string `json:"jsonrpc"` - Method string `json:"method"` - Params []interface{} `json:"params"` - ID uint64 `json:"id"` - Result interface{} `json:"result"` + Jsonrpc string `json:"jsonrpc"` + Method string `json:"method"` + Params []any `json:"params"` + ID uint64 `json:"id"` + Result any `json:"result"` } type ForkchoiceUpdatedResponse struct { - Jsonrpc string `json:"jsonrpc"` - Method string `json:"method"` - Params []interface{} `json:"params"` - ID uint64 `json:"id"` + Jsonrpc string `json:"jsonrpc"` + Method string `json:"method"` + Params []any `json:"params"` + ID uint64 `json:"id"` Result struct { Status *v1.PayloadStatus `json:"payloadStatus"` PayloadId *v1.PayloadIDBytes `json:"payloadId"` @@ -728,7 +728,7 @@ var errInvalidTypeConversion = errors.New("unable to translate between api and f // This involves serializing the execution payload value so that the abstract payload envelope can be used. func ExecutionPayloadResponseFromData(v int, ed interfaces.ExecutionData, bundle *v1.BlobsBundle) (*builderAPI.ExecutionPayloadResponse, error) { pb := ed.Proto() - var data interface{} + var data any var err error ver := version.String(v) switch pbStruct := pb.(type) { diff --git a/testing/middleware/engine-api-proxy/proxy.go b/testing/middleware/engine-api-proxy/proxy.go index c01b9033f1..56ebfef580 100644 --- a/testing/middleware/engine-api-proxy/proxy.go +++ b/testing/middleware/engine-api-proxy/proxy.go @@ -26,15 +26,15 @@ var ( ) type jsonRPCObject struct { - Jsonrpc string `json:"jsonrpc"` - Method string `json:"method"` - Params []interface{} `json:"params"` - ID uint64 `json:"id"` - Result interface{} `json:"result"` + Jsonrpc string `json:"jsonrpc"` + Method string `json:"method"` + Params []any `json:"params"` + ID uint64 `json:"id"` + Result any `json:"result"` } type interceptorConfig struct { - responseGen func() interface{} + responseGen func() any trigger func() bool } @@ -128,7 +128,7 @@ func (p *Proxy) ServeHTTP(w http.ResponseWriter, r *http.Request) { // AddRequestInterceptor for a desired json-rpc method by specifying a custom response // and a function that checks if the interceptor should be triggered. -func (p *Proxy) AddRequestInterceptor(rpcMethodName string, response func() interface{}, trigger func() bool) { +func (p *Proxy) AddRequestInterceptor(rpcMethodName string, response func() any, trigger func() bool) { p.lock.Lock() defer p.lock.Unlock() p.cfg.logger.Infof("Adding in interceptor for method %s", rpcMethodName) diff --git a/testing/middleware/engine-api-proxy/proxy_test.go b/testing/middleware/engine-api-proxy/proxy_test.go index 38bc7fce69..1c20ac7a39 100644 --- a/testing/middleware/engine-api-proxy/proxy_test.go +++ b/testing/middleware/engine-api-proxy/proxy_test.go @@ -113,7 +113,7 @@ func TestProxy_CustomInterceptors(t *testing.T) { // RPC method to intercept. proxy.AddRequestInterceptor( method, - func() interface{} { + func() any { return &syncingResponse{Syncing: false} }, // Custom response. func() bool { @@ -162,7 +162,7 @@ func TestProxy_CustomInterceptors(t *testing.T) { method := "engine_newPayloadV1" // RPC method to intercept. - wantInterceptedResponse := func() interface{} { + wantInterceptedResponse := func() any { return &engineResponse{BlockHash: common.BytesToHash([]byte("bar"))} } conditional := false @@ -229,7 +229,7 @@ func TestProxy_CustomInterceptors(t *testing.T) { method := "engine_newPayloadV1" // RPC method to intercept. - wantInterceptedResponse := func() interface{} { + wantInterceptedResponse := func() any { return &engineResponse{BlockHash: common.BytesToHash([]byte("bar"))} } proxy.AddRequestInterceptor( @@ -295,13 +295,13 @@ func Test_isEngineAPICall(t *testing.T) { } } -func destinationServerSetup(t *testing.T, response interface{}) *httptest.Server { +func destinationServerSetup(t *testing.T, response any) *httptest.Server { return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") defer func() { require.NoError(t, r.Body.Close()) }() - resp := map[string]interface{}{ + resp := map[string]any{ "jsonrpc": "2.0", "id": 1, "result": response, diff --git a/testing/require/requires.go b/testing/require/requires.go index 98cb8d6702..a4f5caa2d3 100644 --- a/testing/require/requires.go +++ b/testing/require/requires.go @@ -6,12 +6,12 @@ import ( ) // Equal compares values using comparison operator. -func Equal(tb assertions.AssertionTestingTB, expected, actual interface{}, msg ...interface{}) { +func Equal(tb assertions.AssertionTestingTB, expected, actual any, msg ...any) { assertions.Equal(tb.Fatalf, expected, actual, msg...) } // NotEqual compares values using comparison operator. -func NotEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, msg ...interface{}) { +func NotEqual(tb assertions.AssertionTestingTB, expected, actual any, msg ...any) { assertions.NotEqual(tb.Fatalf, expected, actual, msg...) } @@ -19,7 +19,7 @@ func NotEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, ms // NOTE: this function does not work for checking arrays/slices or maps of protobuf messages. // For arrays/slices, please use DeepSSZEqual. // For maps, please iterate through and compare the individual keys and values. -func DeepEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, msg ...interface{}) { +func DeepEqual(tb assertions.AssertionTestingTB, expected, actual any, msg ...any) { assertions.DeepEqual(tb.Fatalf, expected, actual, msg...) } @@ -27,63 +27,63 @@ func DeepEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, m // NOTE: this function does not work for checking arrays/slices or maps of protobuf messages. // For arrays/slices, please use DeepNotSSZEqual. // For maps, please iterate through and compare the individual keys and values. -func DeepNotEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, msg ...interface{}) { +func DeepNotEqual(tb assertions.AssertionTestingTB, expected, actual any, msg ...any) { assertions.DeepNotEqual(tb.Fatalf, expected, actual, msg...) } // DeepSSZEqual compares values using DeepEqual. -func DeepSSZEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, msg ...interface{}) { +func DeepSSZEqual(tb assertions.AssertionTestingTB, expected, actual any, msg ...any) { assertions.DeepSSZEqual(tb.Fatalf, expected, actual, msg...) } // DeepNotSSZEqual compares values using DeepEqual. -func DeepNotSSZEqual(tb assertions.AssertionTestingTB, expected, actual interface{}, msg ...interface{}) { +func DeepNotSSZEqual(tb assertions.AssertionTestingTB, expected, actual any, msg ...any) { assertions.DeepNotSSZEqual(tb.Fatalf, expected, actual, msg...) } // NoError asserts that error is nil. -func NoError(tb assertions.AssertionTestingTB, err error, msg ...interface{}) { +func NoError(tb assertions.AssertionTestingTB, err error, msg ...any) { assertions.NoError(tb.Fatalf, err, msg...) } // ErrorContains asserts that actual error contains wanted message. -func ErrorContains(tb assertions.AssertionTestingTB, want string, err error, msg ...interface{}) { +func ErrorContains(tb assertions.AssertionTestingTB, want string, err error, msg ...any) { assertions.ErrorContains(tb.Fatalf, want, err, msg...) } // IsNil asserts that the observed value is nil. -func IsNil(tb assertions.AssertionTestingTB, got interface{}, msg ...interface{}) { +func IsNil(tb assertions.AssertionTestingTB, got any, msg ...any) { assertions.IsNil(tb.Fatalf, got, msg...) } // NotNil asserts that passed value is not nil. -func NotNil(tb assertions.AssertionTestingTB, obj interface{}, msg ...interface{}) { +func NotNil(tb assertions.AssertionTestingTB, obj any, msg ...any) { assertions.NotNil(tb.Fatalf, obj, msg...) } // LogsContain checks that the desired string is a subset of the current log output. -func LogsContain(tb assertions.AssertionTestingTB, hook *test.Hook, want string, msg ...interface{}) { +func LogsContain(tb assertions.AssertionTestingTB, hook *test.Hook, want string, msg ...any) { assertions.LogsContain(tb.Fatalf, hook, want, true, msg...) } // LogsDoNotContain is the inverse check of LogsContain. -func LogsDoNotContain(tb assertions.AssertionTestingTB, hook *test.Hook, want string, msg ...interface{}) { +func LogsDoNotContain(tb assertions.AssertionTestingTB, hook *test.Hook, want string, msg ...any) { assertions.LogsContain(tb.Fatalf, hook, want, false, msg...) } // NotEmpty checks that the object fields are not empty. This method also checks all of the // pointer fields to ensure none of those fields are empty. -func NotEmpty(tb assertions.AssertionTestingTB, obj interface{}, msg ...interface{}) { +func NotEmpty(tb assertions.AssertionTestingTB, obj any, msg ...any) { assertions.NotEmpty(tb.Fatalf, obj, msg...) } // ErrorIs uses Errors.Is to recursively unwrap err looking for target in the chain. // If any error in the chain matches target, the assertion will pass. -func ErrorIs(tb assertions.AssertionTestingTB, err, target error, msg ...interface{}) { +func ErrorIs(tb assertions.AssertionTestingTB, err, target error, msg ...any) { assertions.ErrorIs(tb.Fatalf, err, target, msg) } // StringContains asserts that actual string contains expected message. -func StringContains(tb assertions.AssertionTestingTB, expected, actual string, msg ...interface{}) { +func StringContains(tb assertions.AssertionTestingTB, expected, actual string, msg ...any) { assertions.StringContains(tb.Fatalf, expected, actual, true, msg) } diff --git a/testing/slasher/simulator/attestation_generator.go b/testing/slasher/simulator/attestation_generator.go index aa2a89f0a6..05ceadc3b3 100644 --- a/testing/slasher/simulator/attestation_generator.go +++ b/testing/slasher/simulator/attestation_generator.go @@ -55,10 +55,7 @@ func (s *Simulator) generateAttestationsForSlot(ctx context.Context, ver int, sl valsPerAttestation := uint64(math.Floor(s.srvConfig.Params.AggregationPercent * float64(valsPerCommittee))) for i := startIdx; i < endIdx; i += valsPerAttestation { - attEndIdx := i + valsPerAttestation - if attEndIdx >= endIdx { - attEndIdx = endIdx - } + attEndIdx := min(i+valsPerAttestation, endIdx) indices := make([]uint64, 0, valsPerAttestation) for idx := i; idx < attEndIdx; idx++ { indices = append(indices, idx) diff --git a/testing/spectest/shared/altair/ssz_static/ssz_static.go b/testing/spectest/shared/altair/ssz_static/ssz_static.go index 0924bda4dc..4a135fcddb 100644 --- a/testing/spectest/shared/altair/ssz_static/ssz_static.go +++ b/testing/spectest/shared/altair/ssz_static/ssz_static.go @@ -17,10 +17,10 @@ func RunSSZStaticTests(t *testing.T, config string) { common.RunSSZStaticTests(t, config, "altair", unmarshalledSSZ, customHtr) } -func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR { +func customHtr(t *testing.T, htrs []common.HTR, object any) []common.HTR { switch object.(type) { case *ethpb.BeaconStateAltair: - htrs = append(htrs, func(s interface{}) ([32]byte, error) { + htrs = append(htrs, func(s any) ([32]byte, error) { beaconState, err := state_native.InitializeFromProtoAltair(s.(*ethpb.BeaconStateAltair)) require.NoError(t, err) return beaconState.HashTreeRoot(context.Background()) @@ -30,8 +30,8 @@ func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR } // unmarshalledSSZ unmarshalls serialized input. -func unmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (interface{}, error) { - var obj interface{} +func unmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (any, error) { + var obj any switch folderName { case "Attestation": obj = ðpb.Attestation{} diff --git a/testing/spectest/shared/bellatrix/ssz_static/ssz_static.go b/testing/spectest/shared/bellatrix/ssz_static/ssz_static.go index 718670c317..dac59f8450 100644 --- a/testing/spectest/shared/bellatrix/ssz_static/ssz_static.go +++ b/testing/spectest/shared/bellatrix/ssz_static/ssz_static.go @@ -18,10 +18,10 @@ func RunSSZStaticTests(t *testing.T, config string) { common.RunSSZStaticTests(t, config, "bellatrix", unmarshalledSSZ, customHtr) } -func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR { +func customHtr(t *testing.T, htrs []common.HTR, object any) []common.HTR { switch object.(type) { case *ethpb.BeaconStateBellatrix: - htrs = append(htrs, func(s interface{}) ([32]byte, error) { + htrs = append(htrs, func(s any) ([32]byte, error) { beaconState, err := state_native.InitializeFromProtoBellatrix(s.(*ethpb.BeaconStateBellatrix)) require.NoError(t, err) return beaconState.HashTreeRoot(context.Background()) @@ -31,8 +31,8 @@ func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR } // unmarshalledSSZ unmarshalls serialized input. -func unmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (interface{}, error) { - var obj interface{} +func unmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (any, error) { + var obj any switch folderName { case "ExecutionPayload": obj = &enginev1.ExecutionPayload{} diff --git a/testing/spectest/shared/capella/ssz_static/ssz_static.go b/testing/spectest/shared/capella/ssz_static/ssz_static.go index 1976e9b3e6..28ef7fc1aa 100644 --- a/testing/spectest/shared/capella/ssz_static/ssz_static.go +++ b/testing/spectest/shared/capella/ssz_static/ssz_static.go @@ -18,10 +18,10 @@ func RunSSZStaticTests(t *testing.T, config string) { common.RunSSZStaticTests(t, config, "capella", unmarshalledSSZ, customHtr) } -func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR { +func customHtr(t *testing.T, htrs []common.HTR, object any) []common.HTR { switch object.(type) { case *ethpb.BeaconStateCapella: - htrs = append(htrs, func(s interface{}) ([32]byte, error) { + htrs = append(htrs, func(s any) ([32]byte, error) { beaconState, err := state_native.InitializeFromProtoCapella(s.(*ethpb.BeaconStateCapella)) require.NoError(t, err) return beaconState.HashTreeRoot(context.Background()) @@ -31,8 +31,8 @@ func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR } // unmarshalledSSZ unmarshalls serialized input. -func unmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (interface{}, error) { - var obj interface{} +func unmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (any, error) { + var obj any switch folderName { case "ExecutionPayload": obj = &enginev1.ExecutionPayloadCapella{} diff --git a/testing/spectest/shared/common/forkchoice/runner.go b/testing/spectest/shared/common/forkchoice/runner.go index 92908590f4..4cb9561f1d 100644 --- a/testing/spectest/shared/common/forkchoice/runner.go +++ b/testing/spectest/shared/common/forkchoice/runner.go @@ -352,7 +352,7 @@ func runDataColumnStep(t *testing.T, } else { numCells := len(kzgs) column := make([][]byte, numCells) - for cellIndex := 0; cellIndex < numCells; cellIndex++ { + for cellIndex := range numCells { cell := make([]byte, 2048) cellStart := cellIndex * 2048 cellEnd := cellStart + 2048 diff --git a/testing/spectest/shared/common/ssz_static/ssz_static.go b/testing/spectest/shared/common/ssz_static/ssz_static.go index 459722c0e0..74954a3fb8 100644 --- a/testing/spectest/shared/common/ssz_static/ssz_static.go +++ b/testing/spectest/shared/common/ssz_static/ssz_static.go @@ -56,7 +56,7 @@ func RunSSZStaticTests(t *testing.T, config, forkOrPhase string, unmarshaller Un // All types support fastssz generated code, but may also include a custom HTR method. var htrs []HTR - htrs = append(htrs, func(s interface{}) ([32]byte, error) { + htrs = append(htrs, func(s any) ([32]byte, error) { sszObj, ok := s.(fssz.HashRoot) if !ok { return [32]byte{}, errors.New("could not get hash root, not compatible object") diff --git a/testing/spectest/shared/common/ssz_static/ssz_static_example_test.go b/testing/spectest/shared/common/ssz_static/ssz_static_example_test.go index 7fa24b2db6..4ae469f961 100644 --- a/testing/spectest/shared/common/ssz_static/ssz_static_example_test.go +++ b/testing/spectest/shared/common/ssz_static/ssz_static_example_test.go @@ -16,8 +16,8 @@ import ( func ExampleRunSSZStaticTests() { // Define an unmarshaller to select the correct go type based on the string // name provided in spectests and then populate it with the serialized bytes. - unmarshaller := func(t *testing.T, serializedBytes []byte, objectName string) (interface{}, error) { - var obj interface{} + unmarshaller := func(t *testing.T, serializedBytes []byte, objectName string) (any, error) { + var obj any switch objectName { case "Attestation": obj = ðpb.Attestation{} @@ -45,10 +45,10 @@ func ExampleRunSSZStaticTests() { // This argument may be nil if your test does not require custom HTR methods. // Most commonly, this is used when a handwritten HTR method with specialized caching // is used and you want to ensure it passes spectests. - customHTR := func(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR { + customHTR := func(t *testing.T, htrs []common.HTR, object any) []common.HTR { switch object.(type) { case *ethpb.BeaconState: - htrs = append(htrs, func(s interface{}) ([32]byte, error) { + htrs = append(htrs, func(s any) ([32]byte, error) { beaconState, err := state_native.InitializeFromProtoPhase0(s.(*ethpb.BeaconState)) require.NoError(t, err) return beaconState.HashTreeRoot(context.TODO()) diff --git a/testing/spectest/shared/common/ssz_static/types.go b/testing/spectest/shared/common/ssz_static/types.go index 7d509a36f5..ca70c9fac3 100644 --- a/testing/spectest/shared/common/ssz_static/types.go +++ b/testing/spectest/shared/common/ssz_static/types.go @@ -5,7 +5,7 @@ import ( ) // HTR is the HashTreeRoot function signature. -type HTR func(interface{}) ([32]byte, error) +type HTR func(any) ([32]byte, error) // SSZRoots is the format used to read spectest test data. type SSZRoots struct { @@ -15,8 +15,8 @@ type SSZRoots struct { // Unmarshaller determines the correct type per ObjectName and then hydrates the object from the // serializedBytes. This method may call t.Skip if the type is not supported. -type Unmarshaller func(t *testing.T, serializedBytes []byte, objectName string) (interface{}, error) +type Unmarshaller func(t *testing.T, serializedBytes []byte, objectName string) (any, error) // CustomHTRAdder adds any custom HTR methods for the given object. This method should return a HTR // slice with the custom HTR method applied. -type CustomHTRAdder func(t *testing.T, htrs []HTR, object interface{}) []HTR +type CustomHTRAdder func(t *testing.T, htrs []HTR, object any) []HTR diff --git a/testing/spectest/shared/deneb/ssz_static/ssz_static.go b/testing/spectest/shared/deneb/ssz_static/ssz_static.go index f0742bde84..347fc69f37 100644 --- a/testing/spectest/shared/deneb/ssz_static/ssz_static.go +++ b/testing/spectest/shared/deneb/ssz_static/ssz_static.go @@ -18,13 +18,13 @@ func RunSSZStaticTests(t *testing.T, config string) { common.RunSSZStaticTests(t, config, "deneb", UnmarshalledSSZ, customHtr) } -func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR { +func customHtr(t *testing.T, htrs []common.HTR, object any) []common.HTR { _, ok := object.(*ethpb.BeaconStateDeneb) if !ok { return htrs } - htrs = append(htrs, func(s interface{}) ([32]byte, error) { + htrs = append(htrs, func(s any) ([32]byte, error) { beaconState, err := state_native.InitializeFromProtoDeneb(s.(*ethpb.BeaconStateDeneb)) require.NoError(t, err) return beaconState.HashTreeRoot(context.Background()) @@ -33,8 +33,8 @@ func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR } // UnmarshalledSSZ unmarshalls serialized input. -func UnmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (interface{}, error) { - var obj interface{} +func UnmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (any, error) { + var obj any switch folderName { case "ExecutionPayload": obj = &enginev1.ExecutionPayloadDeneb{} diff --git a/testing/spectest/shared/electra/ssz_static/ssz_static.go b/testing/spectest/shared/electra/ssz_static/ssz_static.go index a0d18e75a5..6fc77f552f 100644 --- a/testing/spectest/shared/electra/ssz_static/ssz_static.go +++ b/testing/spectest/shared/electra/ssz_static/ssz_static.go @@ -18,13 +18,13 @@ func RunSSZStaticTests(t *testing.T, config string) { common.RunSSZStaticTests(t, config, "electra", UnmarshalledSSZ, customHtr) } -func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR { +func customHtr(t *testing.T, htrs []common.HTR, object any) []common.HTR { _, ok := object.(*ethpb.BeaconStateElectra) if !ok { return htrs } - htrs = append(htrs, func(s interface{}) ([32]byte, error) { + htrs = append(htrs, func(s any) ([32]byte, error) { beaconState, err := state_native.InitializeFromProtoElectra(s.(*ethpb.BeaconStateElectra)) require.NoError(t, err) return beaconState.HashTreeRoot(context.Background()) @@ -33,8 +33,8 @@ func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR } // UnmarshalledSSZ unmarshalls serialized input. -func UnmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (interface{}, error) { - var obj interface{} +func UnmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (any, error) { + var obj any switch folderName { case "ExecutionPayload": obj = &enginev1.ExecutionPayloadDeneb{} diff --git a/testing/spectest/shared/fulu/ssz_static/ssz_static.go b/testing/spectest/shared/fulu/ssz_static/ssz_static.go index 5c65791c30..b13ce5686b 100644 --- a/testing/spectest/shared/fulu/ssz_static/ssz_static.go +++ b/testing/spectest/shared/fulu/ssz_static/ssz_static.go @@ -18,13 +18,13 @@ func RunSSZStaticTests(t *testing.T, config string) { common.RunSSZStaticTests(t, config, "fulu", UnmarshalledSSZ, customHtr) } -func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR { +func customHtr(t *testing.T, htrs []common.HTR, object any) []common.HTR { _, ok := object.(*ethpb.BeaconStateFulu) if !ok { return htrs } - htrs = append(htrs, func(s interface{}) ([32]byte, error) { + htrs = append(htrs, func(s any) ([32]byte, error) { beaconState, err := state_native.InitializeFromProtoFulu(s.(*ethpb.BeaconStateFulu)) require.NoError(t, err) return beaconState.HashTreeRoot(context.Background()) @@ -33,8 +33,8 @@ func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR } // UnmarshalledSSZ unmarshalls serialized input. -func UnmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (interface{}, error) { - var obj interface{} +func UnmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (any, error) { + var obj any switch folderName { case "ExecutionPayload": obj = &enginev1.ExecutionPayloadDeneb{} diff --git a/testing/spectest/shared/gloas/ssz_static/ssz_static.go b/testing/spectest/shared/gloas/ssz_static/ssz_static.go index db668752d7..0fea08594a 100644 --- a/testing/spectest/shared/gloas/ssz_static/ssz_static.go +++ b/testing/spectest/shared/gloas/ssz_static/ssz_static.go @@ -15,15 +15,15 @@ func RunSSZStaticTests(t *testing.T, config string) { common.RunSSZStaticTests(t, config, "gloas", unmarshalledSSZ, customHtr) } -func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR { +func customHtr(t *testing.T, htrs []common.HTR, object any) []common.HTR { // TODO: Add custom HTR for BeaconStateGloas when state-native support is implemented // For now, only use the default fastssz HTR methods return htrs } // unmarshalledSSZ unmarshalls serialized input. -func unmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (interface{}, error) { - var obj interface{} +func unmarshalledSSZ(t *testing.T, serializedBytes []byte, folderName string) (any, error) { + var obj any switch folderName { // Gloas specific types diff --git a/testing/spectest/shared/phase0/ssz_static/ssz_static.go b/testing/spectest/shared/phase0/ssz_static/ssz_static.go index 5220ba08db..793e6447a5 100644 --- a/testing/spectest/shared/phase0/ssz_static/ssz_static.go +++ b/testing/spectest/shared/phase0/ssz_static/ssz_static.go @@ -17,10 +17,10 @@ func RunSSZStaticTests(t *testing.T, config string) { common.RunSSZStaticTests(t, config, "phase0", unmarshalledSSZ, customHtr) } -func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR { +func customHtr(t *testing.T, htrs []common.HTR, object any) []common.HTR { switch object.(type) { case *ethpb.BeaconState: - htrs = append(htrs, func(s interface{}) ([32]byte, error) { + htrs = append(htrs, func(s any) ([32]byte, error) { beaconState, err := state_native.InitializeFromProtoPhase0(s.(*ethpb.BeaconState)) require.NoError(t, err) return beaconState.HashTreeRoot(context.TODO()) @@ -30,8 +30,8 @@ func customHtr(t *testing.T, htrs []common.HTR, object interface{}) []common.HTR } // unmarshalledSSZ unmarshalls serialized input. -func unmarshalledSSZ(t *testing.T, serializedBytes []byte, objectName string) (interface{}, error) { - var obj interface{} +func unmarshalledSSZ(t *testing.T, serializedBytes []byte, objectName string) (any, error) { + var obj any switch objectName { case "Attestation": obj = ðpb.Attestation{} diff --git a/testing/spectest/utils/utils.go b/testing/spectest/utils/utils.go index 5580936d7e..4ef52a4d68 100644 --- a/testing/spectest/utils/utils.go +++ b/testing/spectest/utils/utils.go @@ -22,7 +22,7 @@ var json = jsoniter.Config{ // UnmarshalYaml using a customized json encoder that supports "spec-name" // override tag. -func UnmarshalYaml(y []byte, dest interface{}) error { +func UnmarshalYaml(y []byte, dest any) error { j, err := yaml.YAMLToJSON(y) if err != nil { return err diff --git a/testing/util/altair.go b/testing/util/altair.go index a951c095f9..0d4fce1d99 100644 --- a/testing/util/altair.go +++ b/testing/util/altair.go @@ -88,7 +88,7 @@ func buildGenesisBeaconState(genesisTime uint64, preState state.BeaconState, eth } randaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(randaoMixes); i++ { + for i := range randaoMixes { h := make([]byte, 32) copy(h, eth1Data.BlockHash) randaoMixes[i] = h @@ -97,17 +97,17 @@ func buildGenesisBeaconState(genesisTime uint64, preState state.BeaconState, eth zeroHash := params.BeaconConfig().ZeroHash[:] activeIndexRoots := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(activeIndexRoots); i++ { + for i := range activeIndexRoots { activeIndexRoots[i] = zeroHash } blockRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { blockRoots[i] = zeroHash } stateRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(stateRoots); i++ { + for i := range stateRoots { stateRoots[i] = zeroHash } diff --git a/testing/util/bellatrix_state.go b/testing/util/bellatrix_state.go index 7c1f06d83e..1e034d0054 100644 --- a/testing/util/bellatrix_state.go +++ b/testing/util/bellatrix_state.go @@ -93,7 +93,7 @@ func buildGenesisBeaconStateBellatrix(genesisTime time.Time, preState state.Beac } randaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(randaoMixes); i++ { + for i := range randaoMixes { h := make([]byte, 32) copy(h, eth1Data.BlockHash) randaoMixes[i] = h @@ -102,17 +102,17 @@ func buildGenesisBeaconStateBellatrix(genesisTime time.Time, preState state.Beac zeroHash := params.BeaconConfig().ZeroHash[:] activeIndexRoots := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(activeIndexRoots); i++ { + for i := range activeIndexRoots { activeIndexRoots[i] = zeroHash } blockRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { blockRoots[i] = zeroHash } stateRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(stateRoots); i++ { + for i := range stateRoots { stateRoots[i] = zeroHash } @@ -137,7 +137,7 @@ func buildGenesisBeaconStateBellatrix(genesisTime time.Time, preState state.Beac } scoresMissing := len(preState.Validators()) - len(scores) if scoresMissing > 0 { - for i := 0; i < scoresMissing; i++ { + for range scoresMissing { scores = append(scores, 0) } } diff --git a/testing/util/block.go b/testing/util/block.go index d3dfbf7de9..b4eaa59d3a 100644 --- a/testing/util/block.go +++ b/testing/util/block.go @@ -279,7 +279,7 @@ func generateProposerSlashings( numSlashings uint64, ) ([]*ethpb.ProposerSlashing, error) { proposerSlashings := make([]*ethpb.ProposerSlashing, numSlashings) - for i := uint64(0); i < numSlashings; i++ { + for i := range numSlashings { proposerIndex, err := randValIndex(bState) if err != nil { return nil, err @@ -407,7 +407,7 @@ func generateAttesterSlashings( ) ([]ethpb.AttSlashing, error) { attesterSlashings := make([]ethpb.AttSlashing, numSlashings) randGen := rand.NewDeterministicGenerator() - for i := uint64(0); i < numSlashings; i++ { + for i := range numSlashings { committeeIndex := randGen.Uint64() % helpers.SlotCommitteeCount(uint64(bState.NumValidators())) committee, err := helpers.BeaconCommitteeFromState(context.Background(), bState, bState.Slot(), primitives.CommitteeIndex(committeeIndex)) if err != nil { @@ -625,7 +625,7 @@ func HydrateV1BeaconBlockBody(b *v1.BeaconBlockBody) *v1.BeaconBlockBody { return b } -func SaveBlock(tb assertions.AssertionTestingTB, ctx context.Context, db iface.NoHeadAccessDatabase, b interface{}) interfaces.SignedBeaconBlock { +func SaveBlock(tb assertions.AssertionTestingTB, ctx context.Context, db iface.NoHeadAccessDatabase, b any) interfaces.SignedBeaconBlock { wsb, err := blocks.NewSignedBeaconBlock(b) require.NoError(tb, err) require.NoError(tb, db.SaveBlock(ctx, wsb)) @@ -1293,7 +1293,7 @@ func generateWithdrawals( numWithdrawals uint64, ) ([]*enginev1.Withdrawal, error) { withdrawalRequests := make([]*enginev1.Withdrawal, numWithdrawals) - for i := uint64(0); i < numWithdrawals; i++ { + for i := range numWithdrawals { valIndex, err := randValIndex(bState) if err != nil { return nil, err diff --git a/testing/util/capella_state.go b/testing/util/capella_state.go index 86b6ca96e6..363a25a845 100644 --- a/testing/util/capella_state.go +++ b/testing/util/capella_state.go @@ -91,7 +91,7 @@ func buildGenesisBeaconStateCapella(genesisTime uint64, preState state.BeaconSta } randaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(randaoMixes); i++ { + for i := range randaoMixes { h := make([]byte, 32) copy(h, eth1Data.BlockHash) randaoMixes[i] = h @@ -100,17 +100,17 @@ func buildGenesisBeaconStateCapella(genesisTime uint64, preState state.BeaconSta zeroHash := params.BeaconConfig().ZeroHash[:] activeIndexRoots := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(activeIndexRoots); i++ { + for i := range activeIndexRoots { activeIndexRoots[i] = zeroHash } blockRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { blockRoots[i] = zeroHash } stateRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(stateRoots); i++ { + for i := range stateRoots { stateRoots[i] = zeroHash } diff --git a/testing/util/deneb.go b/testing/util/deneb.go index 86d3b0715e..787fbce359 100644 --- a/testing/util/deneb.go +++ b/testing/util/deneb.go @@ -194,7 +194,7 @@ func ExtendBlocksPlusBlobs(t *testing.T, blks []blocks.ROBlock, size int) ([]blo blks = append(blks, blk) } - for i := 0; i < size; i++ { + for range size { prev := blks[len(blks)-1] blk, blb := GenerateTestDenebBlockWithSidecar(t, prev.Root(), prev.Block().Slot()+1, 6) blobs = append(blobs, blb...) diff --git a/testing/util/deneb_state.go b/testing/util/deneb_state.go index 31879353f9..c41a68d4c1 100644 --- a/testing/util/deneb_state.go +++ b/testing/util/deneb_state.go @@ -91,7 +91,7 @@ func buildGenesisBeaconStateDeneb(genesisTime uint64, preState state.BeaconState } randaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(randaoMixes); i++ { + for i := range randaoMixes { h := make([]byte, 32) copy(h, eth1Data.BlockHash) randaoMixes[i] = h @@ -100,17 +100,17 @@ func buildGenesisBeaconStateDeneb(genesisTime uint64, preState state.BeaconState zeroHash := params.BeaconConfig().ZeroHash[:] activeIndexRoots := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(activeIndexRoots); i++ { + for i := range activeIndexRoots { activeIndexRoots[i] = zeroHash } blockRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { blockRoots[i] = zeroHash } stateRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(stateRoots); i++ { + for i := range stateRoots { stateRoots[i] = zeroHash } diff --git a/testing/util/deposits.go b/testing/util/deposits.go index 3eb567bb04..cc85653139 100644 --- a/testing/util/deposits.go +++ b/testing/util/deposits.go @@ -55,7 +55,7 @@ func DeterministicDepositsAndKeys(numDeposits uint64) ([]*ethpb.Deposit, []bls.S privKeys = append(privKeys, secretKeys[:len(secretKeys)-1]...) // Create the new deposits and add them to the trie. - for i := uint64(0); i < numRequired; i++ { + for i := range numRequired { balance := params.BeaconConfig().MaxEffectiveBalance deposit, err := signedDeposit(secretKeys[i], publicKeys[i].Marshal(), publicKeys[i+1].Marshal(), balance) if err != nil { @@ -123,7 +123,7 @@ func DepositsWithBalance(balances []uint64) ([]*ethpb.Deposit, *trie.SparseMerkl deposits := make([]*ethpb.Deposit, numDeposits) // Create the new deposits and add them to the trie. - for i := uint64(0); i < numDeposits; i++ { + for i := range numDeposits { balance := params.BeaconConfig().MaxEffectiveBalance // lint:ignore uintcast -- test code if len(balances) == int(numDeposits) { @@ -275,7 +275,7 @@ func DeterministicGenesisState(t testing.TB, numValidators uint64) (state.Beacon func DepositTrieFromDeposits(deposits []*ethpb.Deposit) (*trie.SparseMerkleTrie, [][32]byte, error) { encodedDeposits := make([][]byte, len(deposits)) roots := make([][32]byte, len(deposits)) - for i := 0; i < len(encodedDeposits); i++ { + for i := range encodedDeposits { hashedDeposit, err := deposits[i].Data.HashTreeRoot() if err != nil { return nil, [][32]byte{}, errors.Wrap(err, "could not tree hash deposit data") @@ -330,7 +330,7 @@ func DeterministicDepositsAndKeysSameValidator(numDeposits uint64) ([]*ethpb.Dep privKeys = append(privKeys, secretKeys[:len(secretKeys)-1]...) // Create the new deposits and add them to the trie. Always use the first validator to create deposit - for i := uint64(0); i < numRequired; i++ { + for i := range numRequired { withdrawalCreds := hash.Hash(publicKeys[1].Marshal()) withdrawalCreds[0] = params.BeaconConfig().BLSWithdrawalPrefixByte diff --git a/testing/util/deposits_test.go b/testing/util/deposits_test.go index c685d8ccc3..51ce54971e 100644 --- a/testing/util/deposits_test.go +++ b/testing/util/deposits_test.go @@ -103,7 +103,7 @@ func TestDepositsWithBalance_MatchesDeterministic(t *testing.T) { entries := 64 resetCache() balances := make([]uint64, entries) - for i := 0; i < entries; i++ { + for i := range entries { balances[i] = params.BeaconConfig().MaxEffectiveBalance } deposits, depositTrie, err := DepositsWithBalance(balances) @@ -116,7 +116,7 @@ func TestDepositsWithBalance_MatchesDeterministic(t *testing.T) { _, determDepositDataRoots, err := DeterministicDepositTrie(entries) require.NoError(t, err) - for i := 0; i < entries; i++ { + for i := range entries { if !proto.Equal(deposits[i], determDeposits[i]) { t.Errorf("Expected deposit %d to match", i) } diff --git a/testing/util/electra_block.go b/testing/util/electra_block.go index 62b11517e9..0d046b060c 100644 --- a/testing/util/electra_block.go +++ b/testing/util/electra_block.go @@ -270,7 +270,7 @@ func generateWithdrawalRequests( numRequests uint64, ) ([]*v1.WithdrawalRequest, error) { withdrawalRequests := make([]*v1.WithdrawalRequest, numRequests) - for i := uint64(0); i < numRequests; i++ { + for i := range numRequests { valIndex, err := randValIndex(bState) if err != nil { return nil, err @@ -310,7 +310,7 @@ func generateDepositRequests( numRequests uint64, ) ([]*v1.DepositRequest, error) { depositRequests := make([]*v1.DepositRequest, numRequests) - for i := uint64(0); i < numRequests; i++ { + for i := range numRequests { valIndex, err := randValIndex(bState) if err != nil { return nil, err @@ -362,7 +362,7 @@ func generateConsolidationRequests( numRequests uint64, ) ([]*v1.ConsolidationRequest, error) { consolidationRequests := make([]*v1.ConsolidationRequest, numRequests) - for i := uint64(0); i < numRequests; i++ { + for i := range numRequests { valIndex, err := randValIndex(bState) if err != nil { return nil, err diff --git a/testing/util/electra_state.go b/testing/util/electra_state.go index 4dac18f127..72077e609c 100644 --- a/testing/util/electra_state.go +++ b/testing/util/electra_state.go @@ -120,7 +120,7 @@ func buildGenesisBeaconStateElectra(genesisTime uint64, preState state.BeaconSta } randaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(randaoMixes); i++ { + for i := range randaoMixes { h := make([]byte, 32) copy(h, eth1Data.BlockHash) randaoMixes[i] = h @@ -129,17 +129,17 @@ func buildGenesisBeaconStateElectra(genesisTime uint64, preState state.BeaconSta zeroHash := params.BeaconConfig().ZeroHash[:] activeIndexRoots := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(activeIndexRoots); i++ { + for i := range activeIndexRoots { activeIndexRoots[i] = zeroHash } blockRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { blockRoots[i] = zeroHash } stateRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(stateRoots); i++ { + for i := range stateRoots { stateRoots[i] = zeroHash } diff --git a/testing/util/fulu_state.go b/testing/util/fulu_state.go index 14e3472219..968f27fefa 100644 --- a/testing/util/fulu_state.go +++ b/testing/util/fulu_state.go @@ -115,7 +115,7 @@ func buildGenesisBeaconStateFulu(genesisTime uint64, preState state.BeaconState, } randaoMixes := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(randaoMixes); i++ { + for i := range randaoMixes { h := make([]byte, 32) copy(h, eth1Data.BlockHash) randaoMixes[i] = h @@ -124,17 +124,17 @@ func buildGenesisBeaconStateFulu(genesisTime uint64, preState state.BeaconState, zeroHash := params.BeaconConfig().ZeroHash[:] activeIndexRoots := make([][]byte, params.BeaconConfig().EpochsPerHistoricalVector) - for i := 0; i < len(activeIndexRoots); i++ { + for i := range activeIndexRoots { activeIndexRoots[i] = zeroHash } blockRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(blockRoots); i++ { + for i := range blockRoots { blockRoots[i] = zeroHash } stateRoots := make([][]byte, params.BeaconConfig().SlotsPerHistoricalRoot) - for i := 0; i < len(stateRoots); i++ { + for i := range stateRoots { stateRoots[i] = zeroHash } diff --git a/testing/util/helpers.go b/testing/util/helpers.go index 9e55ee07ac..c6161f3d94 100644 --- a/testing/util/helpers.go +++ b/testing/util/helpers.go @@ -42,7 +42,7 @@ func RandaoReveal(beaconState state.ReadOnlyBeaconState, epoch primitives.Epoch, // BlockSignature calculates the post-state root of the block and returns the signature. func BlockSignature( bState state.BeaconState, - block interface{}, + block any, privKeys []bls.SecretKey, ) (bls.Signature, error) { var wsb interfaces.ReadOnlySignedBeaconBlock diff --git a/testing/util/lightclient.go b/testing/util/lightclient.go index 58bbfce572..49ee757c14 100644 --- a/testing/util/lightclient.go +++ b/testing/util/lightclient.go @@ -1285,7 +1285,7 @@ func MockOptimisticUpdate() (interfaces.LightClientOptimisticUpdate, error) { func MockFinalityUpdate() (interfaces.LightClientFinalityUpdate, error) { finalityBranch := make([][]byte, fieldparams.FinalityBranchDepth) - for i := 0; i < len(finalityBranch); i++ { + for i := range finalityBranch { finalityBranch[i] = make([]byte, 32) } diff --git a/testing/util/state.go b/testing/util/state.go index f537e254a7..1118407490 100644 --- a/testing/util/state.go +++ b/testing/util/state.go @@ -514,7 +514,7 @@ func NewBeaconStateFulu(options ...func(state *ethpb.BeaconStateFulu) error) (st // trip testing. func filledByteSlice2D(length, innerLen uint64) [][]byte { b := make([][]byte, length) - for i := uint64(0); i < length; i++ { + for i := range length { b[i] = make([]byte, innerLen) } return b @@ -524,10 +524,10 @@ func filledByteSlice2D(length, innerLen uint64) [][]byte { // Example: 16 becomes 0x00...0f. func PrepareRoots(size int) ([][]byte, error) { roots := make([][]byte, size) - for i := 0; i < size; i++ { + for i := range size { roots[i] = make([]byte, fieldparams.RootLength) } - for j := 0; j < len(roots); j++ { + for j := range roots { // Remove '0x' prefix and left-pad '0' to have 64 chars in total. s := fmt.Sprintf("%064s", hexutil.EncodeUint64(uint64(j))[2:]) h, err := hexutil.Decode("0x" + s) diff --git a/third_party/hid/usb.go b/third_party/hid/usb.go index 4aa13353fe..9f54c80022 100644 --- a/third_party/hid/usb.go +++ b/third_party/hid/usb.go @@ -46,7 +46,7 @@ type DeviceInfo struct { Interface int // Raw low level libusb endpoint data for simplified communication - rawDevice interface{} + rawDevice any rawPort *uint8 // Pointer to differentiate between unset and port 0 rawReader *uint8 // Pointer to differentiate between unset and endpoint 0 rawWriter *uint8 // Pointer to differentiate between unset and endpoint 0 diff --git a/third_party/hid/usb_test.go b/third_party/hid/usb_test.go index c05e21785c..7162e9887a 100644 --- a/third_party/hid/usb_test.go +++ b/third_party/hid/usb_test.go @@ -25,12 +25,12 @@ import ( // Tests that HID enumeration can be called concurrently from multiple threads. func TestThreadedEnumerateHid(t *testing.T) { var pend sync.WaitGroup - for i := 0; i < 8; i++ { + for i := range 8 { pend.Add(1) go func(index int) { defer pend.Done() - for j := 0; j < 512; j++ { + for j := range 512 { if _, err := EnumerateHid(uint16(index), 0); err != nil { t.Errorf("thread %d, iter %d: failed to enumerate: %v", index, j, err) } @@ -48,12 +48,12 @@ func TestThreadedEnumerateRaw(t *testing.T) { } // Yay, we can actually test this var pend sync.WaitGroup - for i := 0; i < 8; i++ { + for i := range 8 { pend.Add(1) go func(index int) { defer pend.Done() - for j := 0; j < 512; j++ { + for j := range 512 { if _, err := EnumerateRaw(uint16(index), 0); err != nil { t.Errorf("thread %d, iter %d: failed to enumerate: %v", index, j, err) } @@ -70,12 +70,12 @@ func TestThreadedEnumerate(t *testing.T) { t.Skip("Linux on Travis doesn't have usbfs, skipping test") } var pend sync.WaitGroup - for i := 0; i < 8; i++ { + for i := range 8 { pend.Add(1) go func(index int) { defer pend.Done() - for j := 0; j < 512; j++ { + for j := range 512 { if _, err := Enumerate(uint16(index), 0); err != nil { t.Errorf("thread %d, iter %d: failed to enumerate: %v", index, j, err) } diff --git a/third_party/org_golang_x_tools-deletegopls.patch b/third_party/org_golang_x_tools-deletegopls.patch new file mode 100644 index 0000000000..98512faa8e --- /dev/null +++ b/third_party/org_golang_x_tools-deletegopls.patch @@ -0,0 +1,203162 @@ +diff -urN a/gopls/contributors.txt b/gopls/contributors.txt +--- a/gopls/contributors.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/contributors.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +-# This list is used to help generate the GitHub handles in the +-# contributor shout-out of each gopls release note. +-# +-# Use a command such as this to convert a range of git commits: +-# +-# $ join -o 2.2 <(git log --format="format: %aE" gopls/v0.18.1..gopls/v0.19.0-pre.3 | sort -u) \ +-# <(grep '^[^#]' ~/w/xtools/gopls/contributors.txt | sort) +-# +-# Use -v 1 instead of -o 2.2 to detect any missing entries. +- +-acehinnnqru@gmail.com acehinnnqru +-adonovan@google.com adonovan +-albertofanjul@gmail.com albfan +-alessandro.arzilli@gmail.com aarzilli +-ashurbekovz23@gmail.com ashurbekovz +-cuong.manhle.vn@gmail.com cuonglm +-dmitshur@golang.org dmitshur +-dneil@google.com neild +-egonelbre@gmail.com egonelbre +-email@ssnk.in shashank-priyadarshi +-ethan.reesor@gmail.com firelizzard18 +-gobot@golang.org gopherbot +-hxjiang@golang.org h9jiang +-imcusg@gmail.com cuishuang +-jacob.b.bailey@gmail.com jakebailey +-jba@google.com jba +-mkalil@google.com madelinekalil +-mrnk3078@gmail.com karamaru-alpha +-nguyenbao1917@gmail.com danztran +-nick.ripley@datadoghq.com nsrip-dd +-pjw@google.com pjweinb +-rfindley@google.com findleyr +-samthanawalla@google.com samthanawalla +-sean@liao.dev seankhliao +-tklauser@distanz.ch tklauser +-veblomqvist@gmail.com vikblom +-weingartner@google.com kwjw +-xieyuschen@gmail.com xieyuschen +- +-# GitHub ID unknown: +-# - shibinxf@gmail.com +diff -urN a/gopls/doc/advanced.md b/gopls/doc/advanced.md +--- a/gopls/doc/advanced.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/advanced.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,59 +0,0 @@ +---- +-title: "Gopls: Advanced topics" +---- +- +-This documentation is for advanced `gopls` users, who may want to test +-unreleased versions or try out special features. +- +-## Installing unreleased versions +- +-To get a specific version of `gopls` (for example, to test a prerelease +-version), run: +- +-```sh +-$ go install golang.org/x/tools/gopls@vX.Y.Z +-``` +- +-Where `vX.Y.Z` is the desired version. +- +-### Unstable versions +- +-To update `gopls` to the latest **unstable** version, use the following +-commands. +- +-```sh +-# Create an empty go.mod file, only for tracking requirements. +-cd $(mktemp -d) +-go mod init gopls-unstable +- +-# Use 'go get' to add requirements and to ensure they work together. +-go get -d golang.org/x/tools/gopls@master golang.org/x/tools@master +- +-go install golang.org/x/tools/gopls +-``` +- +-## Working on the Go source distribution +- +-If you are working on the [Go project] itself, the `go` command that `gopls` +-invokes will have to correspond to the version of the source you are working +-on. That is, if you have checked out the Go project to `$HOME/go`, your `go` +-command should be the `$HOME/go/bin/go` executable that you built with +-`make.bash` or equivalent. +- +-You can achieve this by adding the right version of `go` to your `PATH` +-(`export PATH=$HOME/go/bin:$PATH` on Unix systems) or by configuring your +-editor. +- +-To work on both `std` and `cmd` simultaneously, add a `go.work` file to +-`GOROOT/src`: +- +-``` +-cd $(go env GOROOT)/src +-go work init . cmd +-``` +- +-Note that you must work inside the `GOROOT/src` subdirectory, as the `go` +-command does not recognize `go.work` files in a parent of `GOROOT/src` +-(https://go.dev/issue/59429). +- +-[Go project]: https://go.googlesource.com/go +diff -urN a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md +--- a/gopls/doc/analyzers.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/analyzers.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,4358 +0,0 @@ +---- +-title: "Gopls: Analyzers" +---- +- +- +- +-Gopls contains a driver for pluggable, modular static +-[analyzers](https://pkg.go.dev/golang.org/x/tools/go/analysis#hdr-Analyzer), +-such as those used by [go vet](https://pkg.go.dev/cmd/vet). +- +-Most analyzers report mistakes in your code; +-some suggest "quick fixes" that can be directly applied in your editor. +-Every time you edit your code, gopls re-runs its analyzers. +-Analyzer diagnostics help you detect bugs sooner, +-before you run your tests, or even before you save your files. +- +-This document describes the suite of analyzers available in gopls, +-which aggregates analyzers from a variety of sources: +- +-- all the usual bug-finding analyzers from the `go vet` suite (e.g. `printf`; see [`go tool vet help`](https://pkg.go.dev/cmd/vet) for the complete list); +-- a number of analyzers with more substantial dependencies that prevent them from being used in `go vet` (e.g. `nilness`); +-- analyzers that augment compilation errors by suggesting quick fixes to common mistakes (e.g. `fillreturns`); and +-- a handful of analyzers that suggest possible style improvements (e.g. `simplifyrange`). +- +-To enable or disable analyzers, use the [analyses](settings.md#analyses) setting. +- +-In addition, gopls includes the [`staticcheck` suite](https://staticcheck.dev/docs/checks). +-When the [`staticcheck`](settings.md#staticcheck`) boolean option is +-unset, slightly more than half of these analyzers are enabled by +-default; this subset has been chosen for precision and efficiency. Set +-`staticcheck` to `true` to enable the complete set, or to `false` to +-disable the complete set. +- +-Staticcheck analyzers, like all other analyzers, can be explicitly +-enabled or disabled using the `analyzers` configuration setting; this +-setting takes precedence over the `staticcheck` setting, so, +-regardless of what value of `staticcheck` you use (true/false/unset), +-you can make adjustments to your preferred set of analyzers. +- +- +- +- +-## `QF1001`: Apply De Morgan's law +- +-Available since +- +- 2021.1 +- +- +-Default: off. Enable by setting `"analyses": {"QF1001": true}`. +- +-Package documentation: [QF1001](https://staticcheck.dev/docs/checks/#QF1001) +- +- +-## `QF1002`: Convert untagged switch to tagged switch +- +-An untagged switch that compares a single variable against a series of values can be replaced with a tagged switch. +- +-Before: +- +- switch { +- case x == 1 || x == 2, x == 3: +- ... +- case x == 4: +- ... +- default: +- ... +- } +- +-After: +- +- switch x { +- case 1, 2, 3: +- ... +- case 4: +- ... +- default: +- ... +- } +- +-Available since +- +- 2021.1 +- +- +-Default: on. +- +-Package documentation: [QF1002](https://staticcheck.dev/docs/checks/#QF1002) +- +- +-## `QF1003`: Convert if/else-if chain to tagged switch +- +-A series of if/else-if checks comparing the same variable against values can be replaced with a tagged switch. +- +-Before: +- +- if x == 1 || x == 2 { +- ... +- } else if x == 3 { +- ... +- } else { +- ... +- } +- +-After: +- +- switch x { +- case 1, 2: +- ... +- case 3: +- ... +- default: +- ... +- } +- +-Available since +- +- 2021.1 +- +- +-Default: on. +- +-Package documentation: [QF1003](https://staticcheck.dev/docs/checks/#QF1003) +- +- +-## `QF1004`: Use strings.ReplaceAll instead of strings.Replace with n == -1 +- +-Available since +- +- 2021.1 +- +- +-Default: on. +- +-Package documentation: [QF1004](https://staticcheck.dev/docs/checks/#QF1004) +- +- +-## `QF1005`: Expand call to math.Pow +- +-Some uses of math.Pow can be simplified to basic multiplication. +- +-Before: +- +- math.Pow(x, 2) +- +-After: +- +- x * x +- +-Available since +- +- 2021.1 +- +- +-Default: off. Enable by setting `"analyses": {"QF1005": true}`. +- +-Package documentation: [QF1005](https://staticcheck.dev/docs/checks/#QF1005) +- +- +-## `QF1006`: Lift if+break into loop condition +- +-Before: +- +- for { +- if done { +- break +- } +- ... +- } +- +-After: +- +- for !done { +- ... +- } +- +-Available since +- +- 2021.1 +- +- +-Default: off. Enable by setting `"analyses": {"QF1006": true}`. +- +-Package documentation: [QF1006](https://staticcheck.dev/docs/checks/#QF1006) +- +- +-## `QF1007`: Merge conditional assignment into variable declaration +- +-Before: +- +- x := false +- if someCondition { +- x = true +- } +- +-After: +- +- x := someCondition +- +-Available since +- +- 2021.1 +- +- +-Default: off. Enable by setting `"analyses": {"QF1007": true}`. +- +-Package documentation: [QF1007](https://staticcheck.dev/docs/checks/#QF1007) +- +- +-## `QF1008`: Omit embedded fields from selector expression +- +-Available since +- +- 2021.1 +- +- +-Default: off. Enable by setting `"analyses": {"QF1008": true}`. +- +-Package documentation: [QF1008](https://staticcheck.dev/docs/checks/#QF1008) +- +- +-## `QF1009`: Use time.Time.Equal instead of == operator +- +-Available since +- +- 2021.1 +- +- +-Default: on. +- +-Package documentation: [QF1009](https://staticcheck.dev/docs/checks/#QF1009) +- +- +-## `QF1010`: Convert slice of bytes to string when printing it +- +-Available since +- +- 2021.1 +- +- +-Default: on. +- +-Package documentation: [QF1010](https://staticcheck.dev/docs/checks/#QF1010) +- +- +-## `QF1011`: Omit redundant type from variable declaration +- +-Available since +- +- 2021.1 +- +- +-Default: off. Enable by setting `"analyses": {"QF1011": true}`. +- +-Package documentation: [QF1011](https://staticcheck.dev/docs/checks/#) +- +- +-## `QF1012`: Use fmt.Fprintf(x, ...) instead of x.Write(fmt.Sprintf(...)) +- +-Available since +- +- 2022.1 +- +- +-Default: on. +- +-Package documentation: [QF1012](https://staticcheck.dev/docs/checks/#QF1012) +- +- +-## `S1000`: Use plain channel send or receive instead of single-case select +- +-Select statements with a single case can be replaced with a simple send or receive. +- +-Before: +- +- select { +- case x := <-ch: +- fmt.Println(x) +- } +- +-After: +- +- x := <-ch +- fmt.Println(x) +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1000](https://staticcheck.dev/docs/checks/#S1000) +- +- +-## `S1001`: Replace for loop with call to copy +- +-Use copy() for copying elements from one slice to another. For arrays of identical size, you can use simple assignment. +- +-Before: +- +- for i, x := range src { +- dst[i] = x +- } +- +-After: +- +- copy(dst, src) +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1001](https://staticcheck.dev/docs/checks/#S1001) +- +- +-## `S1002`: Omit comparison with boolean constant +- +-Before: +- +- if x == true {} +- +-After: +- +- if x {} +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"S1002": true}`. +- +-Package documentation: [S1002](https://staticcheck.dev/docs/checks/#S1002) +- +- +-## `S1003`: Replace call to strings.Index with strings.Contains +- +-Before: +- +- if strings.Index(x, y) != -1 {} +- +-After: +- +- if strings.Contains(x, y) {} +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1003](https://staticcheck.dev/docs/checks/#S1003) +- +- +-## `S1004`: Replace call to bytes.Compare with bytes.Equal +- +-Before: +- +- if bytes.Compare(x, y) == 0 {} +- +-After: +- +- if bytes.Equal(x, y) {} +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1004](https://staticcheck.dev/docs/checks/#S1004) +- +- +-## `S1005`: Drop unnecessary use of the blank identifier +- +-In many cases, assigning to the blank identifier is unnecessary. +- +-Before: +- +- for _ = range s {} +- x, _ = someMap[key] +- _ = <-ch +- +-After: +- +- for range s{} +- x = someMap[key] +- <-ch +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"S1005": true}`. +- +-Package documentation: [S1005](https://staticcheck.dev/docs/checks/#S1005) +- +- +-## `S1006`: Use 'for { ... }' for infinite loops +- +-For infinite loops, using for { ... } is the most idiomatic choice. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"S1006": true}`. +- +-Package documentation: [S1006](https://staticcheck.dev/docs/checks/#S1006) +- +- +-## `S1007`: Simplify regular expression by using raw string literal +- +-Raw string literals use backticks instead of quotation marks and do not support any escape sequences. This means that the backslash can be used freely, without the need of escaping. +- +-Since regular expressions have their own escape sequences, raw strings can improve their readability. +- +-Before: +- +- regexp.Compile("\\A(\\w+) profile: total \\d+\\n\\z") +- +-After: +- +- regexp.Compile(`\A(\w+) profile: total \d+\n\z`) +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1007](https://staticcheck.dev/docs/checks/#S1007) +- +- +-## `S1008`: Simplify returning boolean expression +- +-Before: +- +- if { +- return true +- } +- return false +- +-After: +- +- return +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"S1008": true}`. +- +-Package documentation: [S1008](https://staticcheck.dev/docs/checks/#S1008) +- +- +-## `S1009`: Omit redundant nil check on slices, maps, and channels +- +-The len function is defined for all slices, maps, and channels, even nil ones, which have a length of zero. It is not necessary to check for nil before checking that their length is not zero. +- +-Before: +- +- if x != nil && len(x) != 0 {} +- +-After: +- +- if len(x) != 0 {} +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1009](https://staticcheck.dev/docs/checks/#S1009) +- +- +-## `S1010`: Omit default slice index +- +-When slicing, the second index defaults to the length of the value, making s\[n:len(s)] and s\[n:] equivalent. +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1010](https://staticcheck.dev/docs/checks/#S1010) +- +- +-## `S1011`: Use a single append to concatenate two slices +- +-Before: +- +- for _, e := range y { +- x = append(x, e) +- } +- +- for i := range y { +- x = append(x, y[i]) +- } +- +- for i := range y { +- v := y[i] +- x = append(x, v) +- } +- +-After: +- +- x = append(x, y...) +- x = append(x, y...) +- x = append(x, y...) +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"S1011": true}`. +- +-Package documentation: [S1011](https://staticcheck.dev/docs/checks/#S1011) +- +- +-## `S1012`: Replace time.Now().Sub(x) with time.Since(x) +- +-The time.Since helper has the same effect as using time.Now().Sub(x) but is easier to read. +- +-Before: +- +- time.Now().Sub(x) +- +-After: +- +- time.Since(x) +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1012](https://staticcheck.dev/docs/checks/#S1012) +- +- +-## `S1016`: Use a type conversion instead of manually copying struct fields +- +-Two struct types with identical fields can be converted between each other. In older versions of Go, the fields had to have identical struct tags. Since Go 1.8, however, struct tags are ignored during conversions. It is thus not necessary to manually copy every field individually. +- +-Before: +- +- var x T1 +- y := T2{ +- Field1: x.Field1, +- Field2: x.Field2, +- } +- +-After: +- +- var x T1 +- y := T2(x) +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"S1016": true}`. +- +-Package documentation: [S1016](https://staticcheck.dev/docs/checks/#S1016) +- +- +-## `S1017`: Replace manual trimming with strings.TrimPrefix +- +-Instead of using strings.HasPrefix and manual slicing, use the strings.TrimPrefix function. If the string doesn't start with the prefix, the original string will be returned. Using strings.TrimPrefix reduces complexity, and avoids common bugs, such as off-by-one mistakes. +- +-Before: +- +- if strings.HasPrefix(str, prefix) { +- str = str[len(prefix):] +- } +- +-After: +- +- str = strings.TrimPrefix(str, prefix) +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1017](https://staticcheck.dev/docs/checks/#S1017) +- +- +-## `S1018`: Use 'copy' for sliding elements +- +-copy() permits using the same source and destination slice, even with overlapping ranges. This makes it ideal for sliding elements in a slice. +- +-Before: +- +- for i := 0; i < n; i++ { +- bs[i] = bs[offset+i] +- } +- +-After: +- +- copy(bs[:n], bs[offset:]) +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1018](https://staticcheck.dev/docs/checks/#S1018) +- +- +-## `S1019`: Simplify 'make' call by omitting redundant arguments +- +-The 'make' function has default values for the length and capacity arguments. For channels, the length defaults to zero, and for slices, the capacity defaults to the length. +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1019](https://staticcheck.dev/docs/checks/#S1019) +- +- +-## `S1020`: Omit redundant nil check in type assertion +- +-Before: +- +- if _, ok := i.(T); ok && i != nil {} +- +-After: +- +- if _, ok := i.(T); ok {} +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1020](https://staticcheck.dev/docs/checks/#S1020) +- +- +-## `S1021`: Merge variable declaration and assignment +- +-Before: +- +- var x uint +- x = 1 +- +-After: +- +- var x uint = 1 +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"S1021": true}`. +- +-Package documentation: [S1021](https://staticcheck.dev/docs/checks/#S1021) +- +- +-## `S1023`: Omit redundant control flow +- +-Functions that have no return value do not need a return statement as the final statement of the function. +- +-Switches in Go do not have automatic fallthrough, unlike languages like C. It is not necessary to have a break statement as the final statement in a case block. +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1023](https://staticcheck.dev/docs/checks/#S1023) +- +- +-## `S1024`: Replace x.Sub(time.Now()) with time.Until(x) +- +-The time.Until helper has the same effect as using x.Sub(time.Now()) but is easier to read. +- +-Before: +- +- x.Sub(time.Now()) +- +-After: +- +- time.Until(x) +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1024](https://staticcheck.dev/docs/checks/#S1024) +- +- +-## `S1025`: Don't use fmt.Sprintf("%s", x) unnecessarily +- +-In many instances, there are easier and more efficient ways of getting a value's string representation. Whenever a value's underlying type is a string already, or the type has a String method, they should be used directly. +- +-Given the following shared definitions +- +- type T1 string +- type T2 int +- +- func (T2) String() string { return "Hello, world" } +- +- var x string +- var y T1 +- var z T2 +- +-we can simplify +- +- fmt.Sprintf("%s", x) +- fmt.Sprintf("%s", y) +- fmt.Sprintf("%s", z) +- +-to +- +- x +- string(y) +- z.String() +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"S1025": true}`. +- +-Package documentation: [S1025](https://staticcheck.dev/docs/checks/#S1025) +- +- +-## `S1028`: Simplify error construction with fmt.Errorf +- +-Before: +- +- errors.New(fmt.Sprintf(...)) +- +-After: +- +- fmt.Errorf(...) +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1028](https://staticcheck.dev/docs/checks/#S1028) +- +- +-## `S1029`: Range over the string directly +- +-Ranging over a string will yield byte offsets and runes. If the offset isn't used, this is functionally equivalent to converting the string to a slice of runes and ranging over that. Ranging directly over the string will be more performant, however, as it avoids allocating a new slice, the size of which depends on the length of the string. +- +-Before: +- +- for _, r := range []rune(s) {} +- +-After: +- +- for _, r := range s {} +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"S1029": true}`. +- +-Package documentation: [S1029](https://staticcheck.dev/docs/checks/#S1029) +- +- +-## `S1030`: Use bytes.Buffer.String or bytes.Buffer.Bytes +- +-bytes.Buffer has both a String and a Bytes method. It is almost never necessary to use string(buf.Bytes()) or \[]byte(buf.String()) – simply use the other method. +- +-The only exception to this are map lookups. Due to a compiler optimization, m\[string(buf.Bytes())] is more efficient than m\[buf.String()]. +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1030](https://staticcheck.dev/docs/checks/#S1030) +- +- +-## `S1031`: Omit redundant nil check around loop +- +-You can use range on nil slices and maps, the loop will simply never execute. This makes an additional nil check around the loop unnecessary. +- +-Before: +- +- if s != nil { +- for _, x := range s { +- ... +- } +- } +- +-After: +- +- for _, x := range s { +- ... +- } +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [S1031](https://staticcheck.dev/docs/checks/#S1031) +- +- +-## `S1032`: Use sort.Ints(x), sort.Float64s(x), and sort.Strings(x) +- +-The sort.Ints, sort.Float64s and sort.Strings functions are easier to read than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x)) and sort.Sort(sort.StringSlice(x)). +- +-Before: +- +- sort.Sort(sort.StringSlice(x)) +- +-After: +- +- sort.Strings(x) +- +-Available since +- +- 2019.1 +- +- +-Default: on. +- +-Package documentation: [S1032](https://staticcheck.dev/docs/checks/#S1032) +- +- +-## `S1033`: Unnecessary guard around call to 'delete' +- +-Calling delete on a nil map is a no-op. +- +-Available since +- +- 2019.2 +- +- +-Default: on. +- +-Package documentation: [S1033](https://staticcheck.dev/docs/checks/#S1033) +- +- +-## `S1034`: Use result of type assertion to simplify cases +- +-Available since +- +- 2019.2 +- +- +-Default: on. +- +-Package documentation: [S1034](https://staticcheck.dev/docs/checks/#S1034) +- +- +-## `S1035`: Redundant call to net/http.CanonicalHeaderKey in method call on net/http.Header +- +-The methods on net/http.Header, namely Add, Del, Get and Set, already canonicalize the given header name. +- +-Available since +- +- 2020.1 +- +- +-Default: on. +- +-Package documentation: [S1035](https://staticcheck.dev/docs/checks/#S1035) +- +- +-## `S1036`: Unnecessary guard around map access +- +-When accessing a map key that doesn't exist yet, one receives a zero value. Often, the zero value is a suitable value, for example when using append or doing integer math. +- +-The following +- +- if _, ok := m["foo"]; ok { +- m["foo"] = append(m["foo"], "bar") +- } else { +- m["foo"] = []string{"bar"} +- } +- +-can be simplified to +- +- m["foo"] = append(m["foo"], "bar") +- +-and +- +- if _, ok := m2["k"]; ok { +- m2["k"] += 4 +- } else { +- m2["k"] = 4 +- } +- +-can be simplified to +- +- m["k"] += 4 +- +-Available since +- +- 2020.1 +- +- +-Default: on. +- +-Package documentation: [S1036](https://staticcheck.dev/docs/checks/#S1036) +- +- +-## `S1037`: Elaborate way of sleeping +- +-Using a select statement with a single case receiving from the result of time.After is a very elaborate way of sleeping that can much simpler be expressed with a simple call to time.Sleep. +- +-Available since +- +- 2020.1 +- +- +-Default: on. +- +-Package documentation: [S1037](https://staticcheck.dev/docs/checks/#S1037) +- +- +-## `S1038`: Unnecessarily complex way of printing formatted string +- +-Instead of using fmt.Print(fmt.Sprintf(...)), one can use fmt.Printf(...). +- +-Available since +- +- 2020.1 +- +- +-Default: on. +- +-Package documentation: [S1038](https://staticcheck.dev/docs/checks/#S1038) +- +- +-## `S1039`: Unnecessary use of fmt.Sprint +- +-Calling fmt.Sprint with a single string argument is unnecessary and identical to using the string directly. +- +-Available since +- +- 2020.1 +- +- +-Default: on. +- +-Package documentation: [S1039](https://staticcheck.dev/docs/checks/#S1039) +- +- +-## `S1040`: Type assertion to current type +- +-The type assertion x.(SomeInterface), when x already has type SomeInterface, can only fail if x is nil. Usually, this is left-over code from when x had a different type and you can safely delete the type assertion. If you want to check that x is not nil, consider being explicit and using an actual if x == nil comparison instead of relying on the type assertion panicking. +- +-Available since +- +- 2021.1 +- +- +-Default: on. +- +-Package documentation: [S1040](https://staticcheck.dev/docs/checks/#S1040) +- +- +-## `SA1000`: Invalid regular expression +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1000": true}`. +- +-Package documentation: [SA1000](https://staticcheck.dev/docs/checks/#SA1000) +- +- +-## `SA1001`: Invalid template +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA1001](https://staticcheck.dev/docs/checks/#SA1001) +- +- +-## `SA1002`: Invalid format in time.Parse +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1002": true}`. +- +-Package documentation: [SA1002](https://staticcheck.dev/docs/checks/#SA1002) +- +- +-## `SA1003`: Unsupported argument to functions in encoding/binary +- +-The encoding/binary package can only serialize types with known sizes. This precludes the use of the int and uint types, as their sizes differ on different architectures. Furthermore, it doesn't support serializing maps, channels, strings, or functions. +- +-Before Go 1.8, bool wasn't supported, either. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1003": true}`. +- +-Package documentation: [SA1003](https://staticcheck.dev/docs/checks/#SA1003) +- +- +-## `SA1004`: Suspiciously small untyped constant in time.Sleep +- +-The time.Sleep function takes a time.Duration as its only argument. Durations are expressed in nanoseconds. Thus, calling time.Sleep(1) will sleep for 1 nanosecond. This is a common source of bugs, as sleep functions in other languages often accept seconds or milliseconds. +- +-The time package provides constants such as time.Second to express large durations. These can be combined with arithmetic to express arbitrary durations, for example 5 \* time.Second for 5 seconds. +- +-If you truly meant to sleep for a tiny amount of time, use n \* time.Nanosecond to signal to Staticcheck that you did mean to sleep for some amount of nanoseconds. +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA1004](https://staticcheck.dev/docs/checks/#SA1004) +- +- +-## `SA1005`: Invalid first argument to exec.Command +- +-os/exec runs programs directly (using variants of the fork and exec system calls on Unix systems). This shouldn't be confused with running a command in a shell. The shell will allow for features such as input redirection, pipes, and general scripting. The shell is also responsible for splitting the user's input into a program name and its arguments. For example, the equivalent to +- +- ls / /tmp +- +-would be +- +- exec.Command("ls", "/", "/tmp") +- +-If you want to run a command in a shell, consider using something like the following – but be aware that not all systems, particularly Windows, will have a /bin/sh program: +- +- exec.Command("/bin/sh", "-c", "ls | grep Awesome") +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA1005](https://staticcheck.dev/docs/checks/#SA1005) +- +- +-## `SA1007`: Invalid URL in net/url.Parse +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1007": true}`. +- +-Package documentation: [SA1007](https://staticcheck.dev/docs/checks/#SA1007) +- +- +-## `SA1008`: Non-canonical key in http.Header map +- +-Keys in http.Header maps are canonical, meaning they follow a specific combination of uppercase and lowercase letters. Methods such as http.Header.Add and http.Header.Del convert inputs into this canonical form before manipulating the map. +- +-When manipulating http.Header maps directly, as opposed to using the provided methods, care should be taken to stick to canonical form in order to avoid inconsistencies. The following piece of code demonstrates one such inconsistency: +- +- h := http.Header{} +- h["etag"] = []string{"1234"} +- h.Add("etag", "5678") +- fmt.Println(h) +- +- // Output: +- // map[Etag:[5678] etag:[1234]] +- +-The easiest way of obtaining the canonical form of a key is to use http.CanonicalHeaderKey. +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA1008](https://staticcheck.dev/docs/checks/#SA1008) +- +- +-## `SA1010`: (*regexp.Regexp).FindAll called with n == 0, which will always return zero results +- +-If n >= 0, the function returns at most n matches/submatches. To return all results, specify a negative number. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1010": true}`. +- +-Package documentation: [SA1010](https://staticcheck.dev/docs/checks/#SA1010) +- +- +-## `SA1011`: Various methods in the 'strings' package expect valid UTF-8, but invalid input is provided +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1011": true}`. +- +-Package documentation: [SA1011](https://staticcheck.dev/docs/checks/#SA1011) +- +- +-## `SA1012`: A nil context.Context is being passed to a function, consider using context.TODO instead +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA1012](https://staticcheck.dev/docs/checks/#SA1012) +- +- +-## `SA1013`: io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA1013](https://staticcheck.dev/docs/checks/#SA1013) +- +- +-## `SA1014`: Non-pointer value passed to Unmarshal or Decode +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1014": true}`. +- +-Package documentation: [SA1014](https://staticcheck.dev/docs/checks/#SA1014) +- +- +-## `SA1015`: Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions +- +-Before Go 1.23, time.Tickers had to be closed to be able to be garbage collected. Since time.Tick doesn't make it possible to close the underlying ticker, using it repeatedly would leak memory. +- +-Go 1.23 fixes this by allowing tickers to be collected even if they weren't closed. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1015": true}`. +- +-Package documentation: [SA1015](https://staticcheck.dev/docs/checks/#SA1015) +- +- +-## `SA1016`: Trapping a signal that cannot be trapped +- +-Not all signals can be intercepted by a process. Specifically, on UNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are never passed to the process, but instead handled directly by the kernel. It is therefore pointless to try and handle these signals. +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA1016](https://staticcheck.dev/docs/checks/#SA1016) +- +- +-## `SA1017`: Channels used with os/signal.Notify should be buffered +- +-The os/signal package uses non-blocking channel sends when delivering signals. If the receiving end of the channel isn't ready and the channel is either unbuffered or full, the signal will be dropped. To avoid missing signals, the channel should be buffered and of the appropriate size. For a channel used for notification of just one signal value, a buffer of size 1 is sufficient. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1017": true}`. +- +-Package documentation: [SA1017](https://staticcheck.dev/docs/checks/#SA1017) +- +- +-## `SA1018`: strings.Replace called with n == 0, which does nothing +- +-With n == 0, zero instances will be replaced. To replace all instances, use a negative number, or use strings.ReplaceAll. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1018": true}`. +- +-Package documentation: [SA1018](https://staticcheck.dev/docs/checks/#SA1018) +- +- +-## `SA1020`: Using an invalid host:port pair with a net.Listen-related function +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1020": true}`. +- +-Package documentation: [SA1020](https://staticcheck.dev/docs/checks/#SA1020) +- +- +-## `SA1021`: Using bytes.Equal to compare two net.IP +- +-A net.IP stores an IPv4 or IPv6 address as a slice of bytes. The length of the slice for an IPv4 address, however, can be either 4 or 16 bytes long, using different ways of representing IPv4 addresses. In order to correctly compare two net.IPs, the net.IP.Equal method should be used, as it takes both representations into account. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1021": true}`. +- +-Package documentation: [SA1021](https://staticcheck.dev/docs/checks/#SA1021) +- +- +-## `SA1023`: Modifying the buffer in an io.Writer implementation +- +-Write must not modify the slice data, even temporarily. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1023": true}`. +- +-Package documentation: [SA1023](https://staticcheck.dev/docs/checks/#SA1023) +- +- +-## `SA1024`: A string cutset contains duplicate characters +- +-The strings.TrimLeft and strings.TrimRight functions take cutsets, not prefixes. A cutset is treated as a set of characters to remove from a string. For example, +- +- strings.TrimLeft("42133word", "1234") +- +-will result in the string "word" – any characters that are 1, 2, 3 or 4 are cut from the left of the string. +- +-In order to remove one string from another, use strings.TrimPrefix instead. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1024": true}`. +- +-Package documentation: [SA1024](https://staticcheck.dev/docs/checks/#SA1024) +- +- +-## `SA1025`: It is not possible to use (*time.Timer).Reset's return value correctly +- +-Available since +- +- 2019.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1025": true}`. +- +-Package documentation: [SA1025](https://staticcheck.dev/docs/checks/#SA1025) +- +- +-## `SA1026`: Cannot marshal channels or functions +- +-Available since +- +- 2019.2 +- +- +-Default: off. Enable by setting `"analyses": {"SA1026": true}`. +- +-Package documentation: [SA1026](https://staticcheck.dev/docs/checks/#SA1026) +- +- +-## `SA1027`: Atomic access to 64-bit variable must be 64-bit aligned +- +-On ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to arrange for 64-bit alignment of 64-bit words accessed atomically. The first word in a variable or in an allocated struct, array, or slice can be relied upon to be 64-bit aligned. +- +-You can use the structlayout tool to inspect the alignment of fields in a struct. +- +-Available since +- +- 2019.2 +- +- +-Default: off. Enable by setting `"analyses": {"SA1027": true}`. +- +-Package documentation: [SA1027](https://staticcheck.dev/docs/checks/#SA1027) +- +- +-## `SA1028`: sort.Slice can only be used on slices +- +-The first argument of sort.Slice must be a slice. +- +-Available since +- +- 2020.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1028": true}`. +- +-Package documentation: [SA1028](https://staticcheck.dev/docs/checks/#SA1028) +- +- +-## `SA1029`: Inappropriate key in call to context.WithValue +- +-The provided key must be comparable and should not be of type string or any other built-in type to avoid collisions between packages using context. Users of WithValue should define their own types for keys. +- +-To avoid allocating when assigning to an interface{}, context keys often have concrete type struct{}. Alternatively, exported context key variables' static type should be a pointer or interface. +- +-Available since +- +- 2020.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1029": true}`. +- +-Package documentation: [SA1029](https://staticcheck.dev/docs/checks/#SA1029) +- +- +-## `SA1030`: Invalid argument in call to a strconv function +- +-This check validates the format, number base and bit size arguments of the various parsing and formatting functions in strconv. +- +-Available since +- +- 2021.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1030": true}`. +- +-Package documentation: [SA1030](https://staticcheck.dev/docs/checks/#SA1030) +- +- +-## `SA1031`: Overlapping byte slices passed to an encoder +- +-In an encoding function of the form Encode(dst, src), dst and src were found to reference the same memory. This can result in src bytes being overwritten before they are read, when the encoder writes more than one byte per src byte. +- +-Available since +- +- 2024.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1031": true}`. +- +-Package documentation: [SA1031](https://staticcheck.dev/docs/checks/#SA1031) +- +- +-## `SA1032`: Wrong order of arguments to errors.Is +- +-The first argument of the function errors.Is is the error that we have and the second argument is the error we're trying to match against. For example: +- +- if errors.Is(err, io.EOF) { ... } +- +-This check detects some cases where the two arguments have been swapped. It flags any calls where the first argument is referring to a package-level error variable, such as +- +- if errors.Is(io.EOF, err) { /* this is wrong */ } +- +-Available since +- +- 2024.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA1032": true}`. +- +-Package documentation: [SA1032](https://staticcheck.dev/docs/checks/#SA1032) +- +- +-## `SA2001`: Empty critical section, did you mean to defer the unlock? +- +-Empty critical sections of the kind +- +- mu.Lock() +- mu.Unlock() +- +-are very often a typo, and the following was intended instead: +- +- mu.Lock() +- defer mu.Unlock() +- +-Do note that sometimes empty critical sections can be useful, as a form of signaling to wait on another goroutine. Many times, there are simpler ways of achieving the same effect. When that isn't the case, the code should be amply commented to avoid confusion. Combining such comments with a //lint:ignore directive can be used to suppress this rare false positive. +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA2001](https://staticcheck.dev/docs/checks/#SA2001) +- +- +-## `SA2002`: Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA2002": true}`. +- +-Package documentation: [SA2002](https://staticcheck.dev/docs/checks/#SA2002) +- +- +-## `SA2003`: Deferred Lock right after locking, likely meant to defer Unlock instead +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA2003": true}`. +- +-Package documentation: [SA2003](https://staticcheck.dev/docs/checks/#SA2003) +- +- +-## `SA3000`: TestMain doesn't call os.Exit, hiding test failures +- +-Test executables (and in turn 'go test') exit with a non-zero status code if any tests failed. When specifying your own TestMain function, it is your responsibility to arrange for this, by calling os.Exit with the correct code. The correct code is returned by (\*testing.M).Run, so the usual way of implementing TestMain is to end it with os.Exit(m.Run()). +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA3000](https://staticcheck.dev/docs/checks/#SA3000) +- +- +-## `SA3001`: Assigning to b.N in benchmarks distorts the results +- +-The testing package dynamically sets b.N to improve the reliability of benchmarks and uses it in computations to determine the duration of a single operation. Benchmark code must not alter b.N as this would falsify results. +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA3001](https://staticcheck.dev/docs/checks/#SA3001) +- +- +-## `SA4000`: Binary operator has identical expressions on both sides +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA4000](https://staticcheck.dev/docs/checks/#SA4000) +- +- +-## `SA4001`: &*x gets simplified to x, it does not copy x +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA4001](https://staticcheck.dev/docs/checks/#SA4001) +- +- +-## `SA4003`: Comparing unsigned values against negative values is pointless +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA4003](https://staticcheck.dev/docs/checks/#SA4003) +- +- +-## `SA4004`: The loop exits unconditionally after one iteration +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA4004](https://staticcheck.dev/docs/checks/#SA4004) +- +- +-## `SA4005`: Field assignment that will never be observed. Did you mean to use a pointer receiver? +- +-Available since +- +- 2021.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA4005": true}`. +- +-Package documentation: [SA4005](https://staticcheck.dev/docs/checks/#SA4005) +- +- +-## `SA4006`: A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code? +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA4006": true}`. +- +-Package documentation: [SA4006](https://staticcheck.dev/docs/checks/#SA4006) +- +- +-## `SA4008`: The variable in the loop condition never changes, are you incrementing the wrong variable? +- +-For example: +- +- for i := 0; i < 10; j++ { ... } +- +-This may also occur when a loop can only execute once because of unconditional control flow that terminates the loop. For example, when a loop body contains an unconditional break, return, or panic: +- +- func f() { +- panic("oops") +- } +- func g() { +- for i := 0; i < 10; i++ { +- // f unconditionally calls panic, which means "i" is +- // never incremented. +- f() +- } +- } +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA4008": true}`. +- +-Package documentation: [SA4008](https://staticcheck.dev/docs/checks/#SA4008) +- +- +-## `SA4009`: A function argument is overwritten before its first use +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA4009": true}`. +- +-Package documentation: [SA4009](https://staticcheck.dev/docs/checks/#SA4009) +- +- +-## `SA4010`: The result of append will never be observed anywhere +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA4010": true}`. +- +-Package documentation: [SA4010](https://staticcheck.dev/docs/checks/#SA4010) +- +- +-## `SA4011`: Break statement with no effect. Did you mean to break out of an outer loop? +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA4011](https://staticcheck.dev/docs/checks/#SA4011) +- +- +-## `SA4012`: Comparing a value against NaN even though no value is equal to NaN +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA4012": true}`. +- +-Package documentation: [SA4012](https://staticcheck.dev/docs/checks/#SA4012) +- +- +-## `SA4013`: Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo. +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA4013](https://staticcheck.dev/docs/checks/#SA4013) +- +- +-## `SA4014`: An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA4014](https://staticcheck.dev/docs/checks/#SA4014) +- +- +-## `SA4015`: Calling functions like math.Ceil on floats converted from integers doesn't do anything useful +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA4015": true}`. +- +-Package documentation: [SA4015](https://staticcheck.dev/docs/checks/#SA4015) +- +- +-## `SA4016`: Certain bitwise operations, such as x ^ 0, do not do anything useful +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA4016](https://staticcheck.dev/docs/checks/#SA4016) +- +- +-## `SA4017`: Discarding the return values of a function without side effects, making the call pointless +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA4017": true}`. +- +-Package documentation: [SA4017](https://staticcheck.dev/docs/checks/#SA4017) +- +- +-## `SA4018`: Self-assignment of variables +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA4018": true}`. +- +-Package documentation: [SA4018](https://staticcheck.dev/docs/checks/#SA4018) +- +- +-## `SA4019`: Multiple, identical build constraints in the same file +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA4019](https://staticcheck.dev/docs/checks/#SA4019) +- +- +-## `SA4020`: Unreachable case clause in a type switch +- +-In a type switch like the following +- +- type T struct{} +- func (T) Read(b []byte) (int, error) { return 0, nil } +- +- var v any = T{} +- +- switch v.(type) { +- case io.Reader: +- // ... +- case T: +- // unreachable +- } +- +-the second case clause can never be reached because T implements io.Reader and case clauses are evaluated in source order. +- +-Another example: +- +- type T struct{} +- func (T) Read(b []byte) (int, error) { return 0, nil } +- func (T) Close() error { return nil } +- +- var v any = T{} +- +- switch v.(type) { +- case io.Reader: +- // ... +- case io.ReadCloser: +- // unreachable +- } +- +-Even though T has a Close method and thus implements io.ReadCloser, io.Reader will always match first. The method set of io.Reader is a subset of io.ReadCloser. Thus it is impossible to match the second case without matching the first case. +- +-### Structurally equivalent interfaces {#hdr-Structurally_equivalent_interfaces} +- +-A special case of the previous example are structurally identical interfaces. Given these declarations +- +- type T error +- type V error +- +- func doSomething() error { +- err, ok := doAnotherThing() +- if ok { +- return T(err) +- } +- +- return U(err) +- } +- +-the following type switch will have an unreachable case clause: +- +- switch doSomething().(type) { +- case T: +- // ... +- case V: +- // unreachable +- } +- +-T will always match before V because they are structurally equivalent and therefore doSomething()'s return value implements both. +- +-Available since +- +- 2019.2 +- +- +-Default: on. +- +-Package documentation: [SA4020](https://staticcheck.dev/docs/checks/#SA4020) +- +- +-## `SA4022`: Comparing the address of a variable against nil +- +-Code such as 'if &x == nil' is meaningless, because taking the address of a variable always yields a non-nil pointer. +- +-Available since +- +- 2020.1 +- +- +-Default: on. +- +-Package documentation: [SA4022](https://staticcheck.dev/docs/checks/#SA4022) +- +- +-## `SA4023`: Impossible comparison of interface value with untyped nil +- +-Under the covers, interfaces are implemented as two elements, a type T and a value V. V is a concrete value such as an int, struct or pointer, never an interface itself, and has type T. For instance, if we store the int value 3 in an interface, the resulting interface value has, schematically, (T=int, V=3). The value V is also known as the interface's dynamic value, since a given interface variable might hold different values V (and corresponding types T) during the execution of the program. +- +-An interface value is nil only if the V and T are both unset, (T=nil, V is not set), In particular, a nil interface will always hold a nil type. If we store a nil pointer of type \*int inside an interface value, the inner type will be \*int regardless of the value of the pointer: (T=\*int, V=nil). Such an interface value will therefore be non-nil even when the pointer value V inside is nil. +- +-This situation can be confusing, and arises when a nil value is stored inside an interface value such as an error return: +- +- func returnsError() error { +- var p *MyError = nil +- if bad() { +- p = ErrBad +- } +- return p // Will always return a non-nil error. +- } +- +-If all goes well, the function returns a nil p, so the return value is an error interface value holding (T=\*MyError, V=nil). This means that if the caller compares the returned error to nil, it will always look as if there was an error even if nothing bad happened. To return a proper nil error to the caller, the function must return an explicit nil: +- +- func returnsError() error { +- if bad() { +- return ErrBad +- } +- return nil +- } +- +-It's a good idea for functions that return errors always to use the error type in their signature (as we did above) rather than a concrete type such as \*MyError, to help guarantee the error is created correctly. As an example, os.Open returns an error even though, if not nil, it's always of concrete type \*os.PathError. +- +-Similar situations to those described here can arise whenever interfaces are used. Just keep in mind that if any concrete value has been stored in the interface, the interface will not be nil. For more information, see The Laws of Reflection at [https://golang.org/doc/articles/laws\_of\_reflection.html](https://golang.org/doc/articles/laws_of_reflection.html). +- +-This text has been copied from [https://golang.org/doc/faq#nil\_error](https://golang.org/doc/faq#nil_error), licensed under the Creative Commons Attribution 3.0 License. +- +-Available since +- +- 2020.2 +- +- +-Default: off. Enable by setting `"analyses": {"SA4023": true}`. +- +-Package documentation: [SA4023](https://staticcheck.dev/docs/checks/#SA4023) +- +- +-## `SA4024`: Checking for impossible return value from a builtin function +- +-Return values of the len and cap builtins cannot be negative. +- +-See [https://golang.org/pkg/builtin/#len](https://golang.org/pkg/builtin/#len) and [https://golang.org/pkg/builtin/#cap](https://golang.org/pkg/builtin/#cap). +- +-Example: +- +- if len(slice) < 0 { +- fmt.Println("unreachable code") +- } +- +-Available since +- +- 2021.1 +- +- +-Default: on. +- +-Package documentation: [SA4024](https://staticcheck.dev/docs/checks/#SA4024) +- +- +-## `SA4025`: Integer division of literals that results in zero +- +-When dividing two integer constants, the result will also be an integer. Thus, a division such as 2 / 3 results in 0. This is true for all of the following examples: +- +- _ = 2 / 3 +- const _ = 2 / 3 +- const _ float64 = 2 / 3 +- _ = float64(2 / 3) +- +-Staticcheck will flag such divisions if both sides of the division are integer literals, as it is highly unlikely that the division was intended to truncate to zero. Staticcheck will not flag integer division involving named constants, to avoid noisy positives. +- +-Available since +- +- 2021.1 +- +- +-Default: on. +- +-Package documentation: [SA4025](https://staticcheck.dev/docs/checks/#SA4025) +- +- +-## `SA4026`: Go constants cannot express negative zero +- +-In IEEE 754 floating point math, zero has a sign and can be positive or negative. This can be useful in certain numerical code. +- +-Go constants, however, cannot express negative zero. This means that the literals -0.0 and 0.0 have the same ideal value (zero) and will both represent positive zero at runtime. +- +-To explicitly and reliably create a negative zero, you can use the math.Copysign function: math.Copysign(0, -1). +- +-Available since +- +- 2021.1 +- +- +-Default: on. +- +-Package documentation: [SA4026](https://staticcheck.dev/docs/checks/#SA4026) +- +- +-## `SA4027`: (*net/url.URL).Query returns a copy, modifying it doesn't change the URL +- +-(\*net/url.URL).Query parses the current value of net/url.URL.RawQuery and returns it as a map of type net/url.Values. Subsequent changes to this map will not affect the URL unless the map gets encoded and assigned to the URL's RawQuery. +- +-As a consequence, the following code pattern is an expensive no-op: u.Query().Add(key, value). +- +-Available since +- +- 2021.1 +- +- +-Default: on. +- +-Package documentation: [SA4027](https://staticcheck.dev/docs/checks/#SA4027) +- +- +-## `SA4028`: x % 1 is always zero +- +-Available since +- +- 2022.1 +- +- +-Default: on. +- +-Package documentation: [SA4028](https://staticcheck.dev/docs/checks/#SA4028) +- +- +-## `SA4029`: Ineffective attempt at sorting slice +- +-sort.Float64Slice, sort.IntSlice, and sort.StringSlice are types, not functions. Doing x = sort.StringSlice(x) does nothing, especially not sort any values. The correct usage is sort.Sort(sort.StringSlice(x)) or sort.StringSlice(x).Sort(), but there are more convenient helpers, namely sort.Float64s, sort.Ints, and sort.Strings. +- +-Available since +- +- 2022.1 +- +- +-Default: on. +- +-Package documentation: [SA4029](https://staticcheck.dev/docs/checks/#SA4029) +- +- +-## `SA4030`: Ineffective attempt at generating random number +- +-Functions in the math/rand package that accept upper limits, such as Intn, generate random numbers in the half-open interval \[0,n). In other words, the generated numbers will be >= 0 and \< n – they don't include n. rand.Intn(1) therefore doesn't generate 0 or 1, it always generates 0. +- +-Available since +- +- 2022.1 +- +- +-Default: on. +- +-Package documentation: [SA4030](https://staticcheck.dev/docs/checks/#SA4030) +- +- +-## `SA4031`: Checking never-nil value against nil +- +-Available since +- +- 2022.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA4031": true}`. +- +-Package documentation: [SA4031](https://staticcheck.dev/docs/checks/#SA4031) +- +- +-## `SA4032`: Comparing runtime.GOOS or runtime.GOARCH against impossible value +- +-Available since +- +- 2024.1 +- +- +-Default: on. +- +-Package documentation: [SA4032](https://staticcheck.dev/docs/checks/#SA4032) +- +- +-## `SA5000`: Assignment to nil map +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA5000": true}`. +- +-Package documentation: [SA5000](https://staticcheck.dev/docs/checks/#SA5000) +- +- +-## `SA5001`: Deferring Close before checking for a possible error +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA5001](https://staticcheck.dev/docs/checks/#SA5001) +- +- +-## `SA5002`: The empty for loop ('for {}') spins and can block the scheduler +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA5002": true}`. +- +-Package documentation: [SA5002](https://staticcheck.dev/docs/checks/#SA5002) +- +- +-## `SA5003`: Defers in infinite loops will never execute +- +-Defers are scoped to the surrounding function, not the surrounding block. In a function that never returns, i.e. one containing an infinite loop, defers will never execute. +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA5003](https://staticcheck.dev/docs/checks/#SA5003) +- +- +-## `SA5004`: 'for { select { ...' with an empty default branch spins +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA5004](https://staticcheck.dev/docs/checks/#SA5004) +- +- +-## `SA5005`: The finalizer references the finalized object, preventing garbage collection +- +-A finalizer is a function associated with an object that runs when the garbage collector is ready to collect said object, that is when the object is no longer referenced by anything. +- +-If the finalizer references the object, however, it will always remain as the final reference to that object, preventing the garbage collector from collecting the object. The finalizer will never run, and the object will never be collected, leading to a memory leak. That is why the finalizer should instead use its first argument to operate on the object. That way, the number of references can temporarily go to zero before the object is being passed to the finalizer. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA5005": true}`. +- +-Package documentation: [SA5005](https://staticcheck.dev/docs/checks/#SA5005) +- +- +-## `SA5007`: Infinite recursive call +- +-A function that calls itself recursively needs to have an exit condition. Otherwise it will recurse forever, until the system runs out of memory. +- +-This issue can be caused by simple bugs such as forgetting to add an exit condition. It can also happen "on purpose". Some languages have tail call optimization which makes certain infinite recursive calls safe to use. Go, however, does not implement TCO, and as such a loop should be used instead. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA5007": true}`. +- +-Package documentation: [SA5007](https://staticcheck.dev/docs/checks/#SA5007) +- +- +-## `SA5008`: Invalid struct tag +- +-Available since +- +- 2019.2 +- +- +-Default: on. +- +-Package documentation: [SA5008](https://staticcheck.dev/docs/checks/#SA5008) +- +- +-## `SA5010`: Impossible type assertion +- +-Some type assertions can be statically proven to be impossible. This is the case when the method sets of both arguments of the type assertion conflict with each other, for example by containing the same method with different signatures. +- +-The Go compiler already applies this check when asserting from an interface value to a concrete type. If the concrete type misses methods from the interface, or if function signatures don't match, then the type assertion can never succeed. +- +-This check applies the same logic when asserting from one interface to another. If both interface types contain the same method but with different signatures, then the type assertion can never succeed, either. +- +-Available since +- +- 2020.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA5010": true}`. +- +-Package documentation: [SA5010](https://staticcheck.dev/docs/checks/#SA5010) +- +- +-## `SA5011`: Possible nil pointer dereference +- +-A pointer is being dereferenced unconditionally, while also being checked against nil in another place. This suggests that the pointer may be nil and dereferencing it may panic. This is commonly a result of improperly ordered code or missing return statements. Consider the following examples: +- +- func fn(x *int) { +- fmt.Println(*x) +- +- // This nil check is equally important for the previous dereference +- if x != nil { +- foo(*x) +- } +- } +- +- func TestFoo(t *testing.T) { +- x := compute() +- if x == nil { +- t.Errorf("nil pointer received") +- } +- +- // t.Errorf does not abort the test, so if x is nil, the next line will panic. +- foo(*x) +- } +- +-Staticcheck tries to deduce which functions abort control flow. For example, it is aware that a function will not continue execution after a call to panic or log.Fatal. However, sometimes this detection fails, in particular in the presence of conditionals. Consider the following example: +- +- func Log(msg string, level int) { +- fmt.Println(msg) +- if level == levelFatal { +- os.Exit(1) +- } +- } +- +- func Fatal(msg string) { +- Log(msg, levelFatal) +- } +- +- func fn(x *int) { +- if x == nil { +- Fatal("unexpected nil pointer") +- } +- fmt.Println(*x) +- } +- +-Staticcheck will flag the dereference of x, even though it is perfectly safe. Staticcheck is not able to deduce that a call to Fatal will exit the program. For the time being, the easiest workaround is to modify the definition of Fatal like so: +- +- func Fatal(msg string) { +- Log(msg, levelFatal) +- panic("unreachable") +- } +- +-We also hard-code functions from common logging packages such as logrus. Please file an issue if we're missing support for a popular package. +- +-Available since +- +- 2020.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA5011": true}`. +- +-Package documentation: [SA5011](https://staticcheck.dev/docs/checks/#SA5011) +- +- +-## `SA5012`: Passing odd-sized slice to function expecting even size +- +-Some functions that take slices as parameters expect the slices to have an even number of elements. Often, these functions treat elements in a slice as pairs. For example, strings.NewReplacer takes pairs of old and new strings, and calling it with an odd number of elements would be an error. +- +-Available since +- +- 2020.2 +- +- +-Default: off. Enable by setting `"analyses": {"SA5012": true}`. +- +-Package documentation: [SA5012](https://staticcheck.dev/docs/checks/#SA5012) +- +- +-## `SA6000`: Using regexp.Match or related in a loop, should use regexp.Compile +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA6000": true}`. +- +-Package documentation: [SA6000](https://staticcheck.dev/docs/checks/#SA6000) +- +- +-## `SA6001`: Missing an optimization opportunity when indexing maps by byte slices +- +-Map keys must be comparable, which precludes the use of byte slices. This usually leads to using string keys and converting byte slices to strings. +- +-Normally, a conversion of a byte slice to a string needs to copy the data and causes allocations. The compiler, however, recognizes m\[string(b)] and uses the data of b directly, without copying it, because it knows that the data can't change during the map lookup. This leads to the counter-intuitive situation that +- +- k := string(b) +- println(m[k]) +- println(m[k]) +- +-will be less efficient than +- +- println(m[string(b)]) +- println(m[string(b)]) +- +-because the first version needs to copy and allocate, while the second one does not. +- +-For some history on this optimization, check out commit f5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA6001": true}`. +- +-Package documentation: [SA6001](https://staticcheck.dev/docs/checks/#SA6001) +- +- +-## `SA6002`: Storing non-pointer values in sync.Pool allocates memory +- +-A sync.Pool is used to avoid unnecessary allocations and reduce the amount of work the garbage collector has to do. +- +-When passing a value that is not a pointer to a function that accepts an interface, the value needs to be placed on the heap, which means an additional allocation. Slices are a common thing to put in sync.Pools, and they're structs with 3 fields (length, capacity, and a pointer to an array). In order to avoid the extra allocation, one should store a pointer to the slice instead. +- +-See the comments on [https://go-review.googlesource.com/c/go/+/24371](https://go-review.googlesource.com/c/go/+/24371) that discuss this problem. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA6002": true}`. +- +-Package documentation: [SA6002](https://staticcheck.dev/docs/checks/#SA6002) +- +- +-## `SA6003`: Converting a string to a slice of runes before ranging over it +- +-You may want to loop over the runes in a string. Instead of converting the string to a slice of runes and looping over that, you can loop over the string itself. That is, +- +- for _, r := range s {} +- +-and +- +- for _, r := range []rune(s) {} +- +-will yield the same values. The first version, however, will be faster and avoid unnecessary memory allocations. +- +-Do note that if you are interested in the indices, ranging over a string and over a slice of runes will yield different indices. The first one yields byte offsets, while the second one yields indices in the slice of runes. +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA6003": true}`. +- +-Package documentation: [SA6003](https://staticcheck.dev/docs/checks/#SA6003) +- +- +-## `SA6005`: Inefficient string comparison with strings.ToLower or strings.ToUpper +- +-Converting two strings to the same case and comparing them like so +- +- if strings.ToLower(s1) == strings.ToLower(s2) { +- ... +- } +- +-is significantly more expensive than comparing them with strings.EqualFold(s1, s2). This is due to memory usage as well as computational complexity. +- +-strings.ToLower will have to allocate memory for the new strings, as well as convert both strings fully, even if they differ on the very first byte. strings.EqualFold, on the other hand, compares the strings one character at a time. It doesn't need to create two intermediate strings and can return as soon as the first non-matching character has been found. +- +-For a more in-depth explanation of this issue, see [https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/](https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/) +- +-Available since +- +- 2019.2 +- +- +-Default: on. +- +-Package documentation: [SA6005](https://staticcheck.dev/docs/checks/#SA6005) +- +- +-## `SA6006`: Using io.WriteString to write []byte +- +-Using io.WriteString to write a slice of bytes, as in +- +- io.WriteString(w, string(b)) +- +-is both unnecessary and inefficient. Converting from \[]byte to string has to allocate and copy the data, and we could simply use w.Write(b) instead. +- +-Available since +- +- 2024.1 +- +- +-Default: on. +- +-Package documentation: [SA6006](https://staticcheck.dev/docs/checks/#SA6006) +- +- +-## `SA9001`: Defers in range loops may not run when you expect them to +- +-Available since +- +- 2017.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA9001": true}`. +- +-Package documentation: [SA9001](https://staticcheck.dev/docs/checks/#SA9001) +- +- +-## `SA9002`: Using a non-octal os.FileMode that looks like it was meant to be in octal. +- +-Available since +- +- 2017.1 +- +- +-Default: on. +- +-Package documentation: [SA9002](https://staticcheck.dev/docs/checks/#SA9002) +- +- +-## `SA9003`: Empty body in an if or else branch +- +-Available since +- +- 2017.1, non-default +- +- +-Default: off. Enable by setting `"analyses": {"SA9003": true}`. +- +-Package documentation: [SA9003](https://staticcheck.dev/docs/checks/#SA9003) +- +- +-## `SA9004`: Only the first constant has an explicit type +- +-In a constant declaration such as the following: +- +- const ( +- First byte = 1 +- Second = 2 +- ) +- +-the constant Second does not have the same type as the constant First. This construct shouldn't be confused with +- +- const ( +- First byte = iota +- Second +- ) +- +-where First and Second do indeed have the same type. The type is only passed on when no explicit value is assigned to the constant. +- +-When declaring enumerations with explicit values it is therefore important not to write +- +- const ( +- EnumFirst EnumType = 1 +- EnumSecond = 2 +- EnumThird = 3 +- ) +- +-This discrepancy in types can cause various confusing behaviors and bugs. +- +-### Wrong type in variable declarations {#hdr-Wrong_type_in_variable_declarations} +- +-The most obvious issue with such incorrect enumerations expresses itself as a compile error: +- +- package pkg +- +- const ( +- EnumFirst uint8 = 1 +- EnumSecond = 2 +- ) +- +- func fn(useFirst bool) { +- x := EnumSecond +- if useFirst { +- x = EnumFirst +- } +- } +- +-fails to compile with +- +- ./const.go:11:5: cannot use EnumFirst (type uint8) as type int in assignment +- +-### Losing method sets {#hdr-Losing_method_sets} +- +-A more subtle issue occurs with types that have methods and optional interfaces. Consider the following: +- +- package main +- +- import "fmt" +- +- type Enum int +- +- func (e Enum) String() string { +- return "an enum" +- } +- +- const ( +- EnumFirst Enum = 1 +- EnumSecond = 2 +- ) +- +- func main() { +- fmt.Println(EnumFirst) +- fmt.Println(EnumSecond) +- } +- +-This code will output +- +- an enum +- 2 +- +-as EnumSecond has no explicit type, and thus defaults to int. +- +-Available since +- +- 2019.1 +- +- +-Default: on. +- +-Package documentation: [SA9004](https://staticcheck.dev/docs/checks/#SA9004) +- +- +-## `SA9005`: Trying to marshal a struct with no public fields nor custom marshaling +- +-The encoding/json and encoding/xml packages only operate on exported fields in structs, not unexported ones. It is usually an error to try to (un)marshal structs that only consist of unexported fields. +- +-This check will not flag calls involving types that define custom marshaling behavior, e.g. via MarshalJSON methods. It will also not flag empty structs. +- +-Available since +- +- 2019.2 +- +- +-Default: off. Enable by setting `"analyses": {"SA9005": true}`. +- +-Package documentation: [SA9005](https://staticcheck.dev/docs/checks/#SA9005) +- +- +-## `SA9006`: Dubious bit shifting of a fixed size integer value +- +-Bit shifting a value past its size will always clear the value. +- +-For instance: +- +- v := int8(42) +- v >>= 8 +- +-will always result in 0. +- +-This check flags bit shifting operations on fixed size integer values only. That is, int, uint and uintptr are never flagged to avoid potential false positives in somewhat exotic but valid bit twiddling tricks: +- +- // Clear any value above 32 bits if integers are more than 32 bits. +- func f(i int) int { +- v := i >> 32 +- v = v << 32 +- return i-v +- } +- +-Available since +- +- 2020.2 +- +- +-Default: on. +- +-Package documentation: [SA9006](https://staticcheck.dev/docs/checks/#SA9006) +- +- +-## `SA9007`: Deleting a directory that shouldn't be deleted +- +-It is virtually never correct to delete system directories such as /tmp or the user's home directory. However, it can be fairly easy to do by mistake, for example by mistakenly using os.TempDir instead of ioutil.TempDir, or by forgetting to add a suffix to the result of os.UserHomeDir. +- +-Writing +- +- d := os.TempDir() +- defer os.RemoveAll(d) +- +-in your unit tests will have a devastating effect on the stability of your system. +- +-This check flags attempts at deleting the following directories: +- +-\- os.TempDir - os.UserCacheDir - os.UserConfigDir - os.UserHomeDir +- +-Available since +- +- 2022.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA9007": true}`. +- +-Package documentation: [SA9007](https://staticcheck.dev/docs/checks/#SA9007) +- +- +-## `SA9008`: else branch of a type assertion is probably not reading the right value +- +-When declaring variables as part of an if statement (like in 'if foo := ...; foo {'), the same variables will also be in the scope of the else branch. This means that in the following example +- +- if x, ok := x.(int); ok { +- // ... +- } else { +- fmt.Printf("unexpected type %T", x) +- } +- +-x in the else branch will refer to the x from x, ok :=; it will not refer to the x that is being type-asserted. The result of a failed type assertion is the zero value of the type that is being asserted to, so x in the else branch will always have the value 0 and the type int. +- +-Available since +- +- 2022.1 +- +- +-Default: off. Enable by setting `"analyses": {"SA9008": true}`. +- +-Package documentation: [SA9008](https://staticcheck.dev/docs/checks/#SA9008) +- +- +-## `SA9009`: Ineffectual Go compiler directive +- +-A potential Go compiler directive was found, but is ineffectual as it begins with whitespace. +- +-Available since +- +- 2024.1 +- +- +-Default: on. +- +-Package documentation: [SA9009](https://staticcheck.dev/docs/checks/#SA9009) +- +- +-## `ST1000`: Incorrect or missing package comment +- +-Packages must have a package comment that is formatted according to the guidelines laid out in [https://go.dev/wiki/CodeReviewComments#package-comments](https://go.dev/wiki/CodeReviewComments#package-comments). +- +-Available since +- +- 2019.1, non-default +- +- +-Default: off. Enable by setting `"analyses": {"ST1000": true}`. +- +-Package documentation: [ST1000](https://staticcheck.dev/docs/checks/#ST1000) +- +- +-## `ST1001`: Dot imports are discouraged +- +-Dot imports that aren't in external test packages are discouraged. +- +-The dot\_import\_whitelist option can be used to whitelist certain imports. +- +-Quoting Go Code Review Comments: +- +-> The import . form can be useful in tests that, due to circular > dependencies, cannot be made part of the package being tested: > > package foo\_test > > import ( > "bar/testutil" // also imports "foo" > . "foo" > ) > > In this case, the test file cannot be in package foo because it > uses bar/testutil, which imports foo. So we use the import . > form to let the file pretend to be part of package foo even though > it is not. Except for this one case, do not use import . in your > programs. It makes the programs much harder to read because it is > unclear whether a name like Quux is a top-level identifier in the > current package or in an imported package. +- +-Available since +- +- 2019.1 +- +-Options +- +- dot_import_whitelist +- +- +-Default: off. Enable by setting `"analyses": {"ST1001": true}`. +- +-Package documentation: [ST1001](https://staticcheck.dev/docs/checks/#ST1001) +- +- +-## `ST1003`: Poorly chosen identifier +- +-Identifiers, such as variable and package names, follow certain rules. +- +-See the following links for details: +- +-\- [https://go.dev/doc/effective\_go#package-names](https://go.dev/doc/effective_go#package-names) - [https://go.dev/doc/effective\_go#mixed-caps](https://go.dev/doc/effective_go#mixed-caps) - [https://go.dev/wiki/CodeReviewComments#initialisms](https://go.dev/wiki/CodeReviewComments#initialisms) - [https://go.dev/wiki/CodeReviewComments#variable-names](https://go.dev/wiki/CodeReviewComments#variable-names) +- +-Available since +- +- 2019.1, non-default +- +-Options +- +- initialisms +- +- +-Default: off. Enable by setting `"analyses": {"ST1003": true}`. +- +-Package documentation: [ST1003](https://staticcheck.dev/docs/checks/#ST1003) +- +- +-## `ST1005`: Incorrectly formatted error string +- +-Error strings follow a set of guidelines to ensure uniformity and good composability. +- +-Quoting Go Code Review Comments: +- +-> Error strings should not be capitalized (unless beginning with > proper nouns or acronyms) or end with punctuation, since they are > usually printed following other context. That is, use > fmt.Errorf("something bad") not fmt.Errorf("Something bad"), so > that log.Printf("Reading %s: %v", filename, err) formats without a > spurious capital letter mid-message. +- +-Available since +- +- 2019.1 +- +- +-Default: off. Enable by setting `"analyses": {"ST1005": true}`. +- +-Package documentation: [ST1005](https://staticcheck.dev/docs/checks/#ST1005) +- +- +-## `ST1006`: Poorly chosen receiver name +- +-Quoting Go Code Review Comments: +- +-> The name of a method's receiver should be a reflection of its > identity; often a one or two letter abbreviation of its type > suffices (such as "c" or "cl" for "Client"). Don't use generic > names such as "me", "this" or "self", identifiers typical of > object-oriented languages that place more emphasis on methods as > opposed to functions. The name need not be as descriptive as that > of a method argument, as its role is obvious and serves no > documentary purpose. It can be very short as it will appear on > almost every line of every method of the type; familiarity admits > brevity. Be consistent, too: if you call the receiver "c" in one > method, don't call it "cl" in another. +- +-Available since +- +- 2019.1 +- +- +-Default: off. Enable by setting `"analyses": {"ST1006": true}`. +- +-Package documentation: [ST1006](https://staticcheck.dev/docs/checks/#ST1006) +- +- +-## `ST1008`: A function's error value should be its last return value +- +-A function's error value should be its last return value. +- +-Available since +- +- 2019.1 +- +- +-Default: off. Enable by setting `"analyses": {"ST1008": true}`. +- +-Package documentation: [ST1008](https://staticcheck.dev/docs/checks/#ST1008) +- +- +-## `ST1011`: Poorly chosen name for variable of type time.Duration +- +-time.Duration values represent an amount of time, which is represented as a count of nanoseconds. An expression like 5 \* time.Microsecond yields the value 5000. It is therefore not appropriate to suffix a variable of type time.Duration with any time unit, such as Msec or Milli. +- +-Available since +- +- 2019.1 +- +- +-Default: off. Enable by setting `"analyses": {"ST1011": true}`. +- +-Package documentation: [ST1011](https://staticcheck.dev/docs/checks/#ST1011) +- +- +-## `ST1012`: Poorly chosen name for error variable +- +-Error variables that are part of an API should be called errFoo or ErrFoo. +- +-Available since +- +- 2019.1 +- +- +-Default: off. Enable by setting `"analyses": {"ST1012": true}`. +- +-Package documentation: [ST1012](https://staticcheck.dev/docs/checks/#ST1012) +- +- +-## `ST1013`: Should use constants for HTTP error codes, not magic numbers +- +-HTTP has a tremendous number of status codes. While some of those are well known (200, 400, 404, 500), most of them are not. The net/http package provides constants for all status codes that are part of the various specifications. It is recommended to use these constants instead of hard-coding magic numbers, to vastly improve the readability of your code. +- +-Available since +- +- 2019.1 +- +-Options +- +- http_status_code_whitelist +- +- +-Default: off. Enable by setting `"analyses": {"ST1013": true}`. +- +-Package documentation: [ST1013](https://staticcheck.dev/docs/checks/#ST1013) +- +- +-## `ST1015`: A switch's default case should be the first or last case +- +-Available since +- +- 2019.1 +- +- +-Default: off. Enable by setting `"analyses": {"ST1015": true}`. +- +-Package documentation: [ST1015](https://staticcheck.dev/docs/checks/#ST1015) +- +- +-## `ST1016`: Use consistent method receiver names +- +-Available since +- +- 2019.1, non-default +- +- +-Default: off. Enable by setting `"analyses": {"ST1016": true}`. +- +-Package documentation: [ST1016](https://staticcheck.dev/docs/checks/#ST1016) +- +- +-## `ST1017`: Don't use Yoda conditions +- +-Yoda conditions are conditions of the kind 'if 42 == x', where the literal is on the left side of the comparison. These are a common idiom in languages in which assignment is an expression, to avoid bugs of the kind 'if (x = 42)'. In Go, which doesn't allow for this kind of bug, we prefer the more idiomatic 'if x == 42'. +- +-Available since +- +- 2019.2 +- +- +-Default: off. Enable by setting `"analyses": {"ST1017": true}`. +- +-Package documentation: [ST1017](https://staticcheck.dev/docs/checks/#ST1017) +- +- +-## `ST1018`: Avoid zero-width and control characters in string literals +- +-Available since +- +- 2019.2 +- +- +-Default: off. Enable by setting `"analyses": {"ST1018": true}`. +- +-Package documentation: [ST1018](https://staticcheck.dev/docs/checks/#ST1018) +- +- +-## `ST1019`: Importing the same package multiple times +- +-Go allows importing the same package multiple times, as long as different import aliases are being used. That is, the following bit of code is valid: +- +- import ( +- "fmt" +- fumpt "fmt" +- format "fmt" +- _ "fmt" +- ) +- +-However, this is very rarely done on purpose. Usually, it is a sign of code that got refactored, accidentally adding duplicate import statements. It is also a rarely known feature, which may contribute to confusion. +- +-Do note that sometimes, this feature may be used intentionally (see for example [https://github.com/golang/go/commit/3409ce39bfd7584523b7a8c150a310cea92d879d](https://github.com/golang/go/commit/3409ce39bfd7584523b7a8c150a310cea92d879d)) – if you want to allow this pattern in your code base, you're advised to disable this check. +- +-Available since +- +- 2020.1 +- +- +-Default: off. Enable by setting `"analyses": {"ST1019": true}`. +- +-Package documentation: [ST1019](https://staticcheck.dev/docs/checks/#ST1019) +- +- +-## `ST1020`: The documentation of an exported function should start with the function's name +- +-Doc comments work best as complete sentences, which allow a wide variety of automated presentations. The first sentence should be a one-sentence summary that starts with the name being declared. +- +-If every doc comment begins with the name of the item it describes, you can use the doc subcommand of the go tool and run the output through grep. +- +-See [https://go.dev/doc/effective\_go#commentary](https://go.dev/doc/effective_go#commentary) for more information on how to write good documentation. +- +-Available since +- +- 2020.1, non-default +- +- +-Default: off. Enable by setting `"analyses": {"ST1020": true}`. +- +-Package documentation: [ST1020](https://staticcheck.dev/docs/checks/#ST1020) +- +- +-## `ST1021`: The documentation of an exported type should start with type's name +- +-Doc comments work best as complete sentences, which allow a wide variety of automated presentations. The first sentence should be a one-sentence summary that starts with the name being declared. +- +-If every doc comment begins with the name of the item it describes, you can use the doc subcommand of the go tool and run the output through grep. +- +-See [https://go.dev/doc/effective\_go#commentary](https://go.dev/doc/effective_go#commentary) for more information on how to write good documentation. +- +-Available since +- +- 2020.1, non-default +- +- +-Default: off. Enable by setting `"analyses": {"ST1021": true}`. +- +-Package documentation: [ST1021](https://staticcheck.dev/docs/checks/#ST1021) +- +- +-## `ST1022`: The documentation of an exported variable or constant should start with variable's name +- +-Doc comments work best as complete sentences, which allow a wide variety of automated presentations. The first sentence should be a one-sentence summary that starts with the name being declared. +- +-If every doc comment begins with the name of the item it describes, you can use the doc subcommand of the go tool and run the output through grep. +- +-See [https://go.dev/doc/effective\_go#commentary](https://go.dev/doc/effective_go#commentary) for more information on how to write good documentation. +- +-Available since +- +- 2020.1, non-default +- +- +-Default: off. Enable by setting `"analyses": {"ST1022": true}`. +- +-Package documentation: [ST1022](https://staticcheck.dev/docs/checks/#ST1022) +- +- +-## `ST1023`: Redundant type in variable declaration +- +-Available since +- +- 2021.1, non-default +- +- +-Default: off. Enable by setting `"analyses": {"ST1023": true}`. +- +-Package documentation: [ST1023](https://staticcheck.dev/docs/checks/#) +- +- +-## `any`: replace interface{} with any +- +-The any analyzer suggests replacing uses of the empty interface type, \`interface{}\`, with the \`any\` alias, which was introduced in Go 1.18. This is a purely stylistic change that makes code more readable. +- +- +-Default: on. +- +-Package documentation: [any](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#any) +- +- +-## `appendclipped`: simplify append chains using slices.Concat +- +-The appendclipped analyzer suggests replacing chains of append calls with a single call to slices.Concat, which was added in Go 1.21. For example, append(append(s, s1...), s2...) would be simplified to slices.Concat(s, s1, s2). +- +-In the simple case of appending to a newly allocated slice, such as append(\[]T(nil), s...), the analyzer suggests the more concise slices.Clone(s). For byte slices, it will prefer bytes.Clone if the "bytes" package is already imported. +- +-This fix is only applied when the base of the append tower is a "clipped" slice, meaning its length and capacity are equal (e.g. x\[:0:0] or \[]T{}). This is to avoid changing program behavior by eliminating intended side effects on the base slice's underlying array. +- +-This analyzer is currently disabled by default as the transformation does not preserve the nilness of the base slice in all cases; see [https://go.dev/issue/73557](https://go.dev/issue/73557). +- +- +-Default: off. Enable by setting `"analyses": {"appendclipped": true}`. +- +-Package documentation: [appendclipped](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#appendclipped) +- +- +-## `appends`: check for missing values after append +- +-This checker reports calls to append that pass no values to be appended to the slice. +- +- s := []string{"a", "b", "c"} +- _ = append(s) +- +-Such calls are always no-ops and often indicate an underlying mistake. +- +- +-Default: on. +- +-Package documentation: [appends](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/appends) +- +- +-## `asmdecl`: report mismatches between assembly files and Go declarations +- +- +- +-Default: on. +- +-Package documentation: [asmdecl](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/asmdecl) +- +- +-## `assign`: check for useless assignments +- +-This checker reports assignments of the form x = x or a\[i] = a\[i]. These are almost always useless, and even when they aren't they are usually a mistake. +- +- +-Default: on. +- +-Package documentation: [assign](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/assign) +- +- +-## `atomic`: check for common mistakes using the sync/atomic package +- +-The atomic checker looks for assignment statements of the form: +- +- x = atomic.AddUint64(&x, 1) +- +-which are not atomic. +- +- +-Default: on. +- +-Package documentation: [atomic](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/atomic) +- +- +-## `atomicalign`: check for non-64-bits-aligned arguments to sync/atomic functions +- +- +- +-Default: on. +- +-Package documentation: [atomicalign](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/atomicalign) +- +- +-## `bloop`: replace for-range over b.N with b.Loop +- +-The bloop analyzer suggests replacing benchmark loops of the form \`for i := 0; i \< b.N; i++\` or \`for range b.N\` with the more modern \`for b.Loop()\`, which was added in Go 1.24. +- +-This change makes benchmark code more readable and also removes the need for manual timer control, so any preceding calls to b.StartTimer, b.StopTimer, or b.ResetTimer within the same function will also be removed. +- +-Caveats: The b.Loop() method is designed to prevent the compiler from optimizing away the benchmark loop, which can occasionally result in slower execution due to increased allocations in some specific cases. +- +- +-Default: on. +- +-Package documentation: [bloop](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#bloop) +- +- +-## `bools`: check for common mistakes involving boolean operators +- +- +- +-Default: on. +- +-Package documentation: [bools](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/bools) +- +- +-## `buildtag`: check //go:build and // +build directives +- +- +- +-Default: on. +- +-Package documentation: [buildtag](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/buildtag) +- +- +-## `cgocall`: detect some violations of the cgo pointer passing rules +- +-Check for invalid cgo pointer passing. This looks for code that uses cgo to call C code passing values whose types are almost always invalid according to the cgo pointer sharing rules. Specifically, it warns about attempts to pass a Go chan, map, func, or slice to C, either directly, or via a pointer, array, or struct. +- +- +-Default: on. +- +-Package documentation: [cgocall](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/cgocall) +- +- +-## `composites`: check for unkeyed composite literals +- +-This analyzer reports a diagnostic for composite literals of struct types imported from another package that do not use the field-keyed syntax. Such literals are fragile because the addition of a new field (even if unexported) to the struct will cause compilation to fail. +- +-As an example, +- +- err = &net.DNSConfigError{err} +- +-should be replaced by: +- +- err = &net.DNSConfigError{Err: err} +- +- +-Default: on. +- +-Package documentation: [composites](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/composite) +- +- +-## `copylocks`: check for locks erroneously passed by value +- +-Inadvertently copying a value containing a lock, such as sync.Mutex or sync.WaitGroup, may cause both copies to malfunction. Generally such values should be referred to through a pointer. +- +- +-Default: on. +- +-Package documentation: [copylocks](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/copylock) +- +- +-## `deepequalerrors`: check for calls of reflect.DeepEqual on error values +- +-The deepequalerrors checker looks for calls of the form: +- +- reflect.DeepEqual(err1, err2) +- +-where err1 and err2 are errors. Using reflect.DeepEqual to compare errors is discouraged. +- +- +-Default: on. +- +-Package documentation: [deepequalerrors](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/deepequalerrors) +- +- +-## `defers`: report common mistakes in defer statements +- +-The defers analyzer reports a diagnostic when a defer statement would result in a non-deferred call to time.Since, as experience has shown that this is nearly always a mistake. +- +-For example: +- +- start := time.Now() +- ... +- defer recordLatency(time.Since(start)) // error: call to time.Since is not deferred +- +-The correct code is: +- +- defer func() { recordLatency(time.Since(start)) }() +- +- +-Default: on. +- +-Package documentation: [defers](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/defers) +- +- +-## `deprecated`: check for use of deprecated identifiers +- +-The deprecated analyzer looks for deprecated symbols and package imports. +- +-See [https://go.dev/wiki/Deprecated](https://go.dev/wiki/Deprecated) to learn about Go's convention for documenting and signaling deprecated identifiers. +- +- +-Default: on. +- +-Package documentation: [deprecated](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/deprecated) +- +- +-## `directive`: check Go toolchain directives such as //go:debug +- +-This analyzer checks for problems with known Go toolchain directives in all Go source files in a package directory, even those excluded by //go:build constraints, and all non-Go source files too. +- +-For //go:debug (see [https://go.dev/doc/godebug](https://go.dev/doc/godebug)), the analyzer checks that the directives are placed only in Go source files, only above the package comment, and only in package main or \*\_test.go files. +- +-Support for other known directives may be added in the future. +- +-This analyzer does not check //go:build, which is handled by the buildtag analyzer. +- +- +-Default: on. +- +-Package documentation: [directive](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/directive) +- +- +-## `embed`: check //go:embed directive usage +- +-This analyzer checks that the embed package is imported if //go:embed directives are present, providing a suggested fix to add the import if it is missing. +- +-This analyzer also checks that //go:embed directives precede the declaration of a single variable. +- +- +-Default: on. +- +-Package documentation: [embed](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/embeddirective) +- +- +-## `errorsas`: report passing non-pointer or non-error values to errors.As +- +-The errorsas analyzer reports calls to errors.As where the type of the second argument is not a pointer to a type implementing error. +- +- +-Default: on. +- +-Package documentation: [errorsas](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/errorsas) +- +- +-## `errorsastype`: replace errors.As with errors.AsType[T] +- +-This analyzer suggests fixes to simplify uses of [errors.As](/errors#As) of this form: +- +- var myerr *MyErr +- if errors.As(err, &myerr) { +- handle(myerr) +- } +- +-by using the less error-prone generic [errors.AsType](/errors#AsType) function, introduced in Go 1.26: +- +- if myerr, ok := errors.AsType[*MyErr](err); ok { +- handle(myerr) +- } +- +-The fix is only offered if the var declaration has the form shown and there are no uses of myerr outside the if statement. +- +- +-Default: on. +- +-Package documentation: [errorsastype](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#errorsastype) +- +- +-## `fillreturns`: suggest fixes for errors due to an incorrect number of return values +- +-This checker provides suggested fixes for type errors of the type "wrong number of return values (want %d, got %d)". For example: +- +- func m() (int, string, *bool, error) { +- return +- } +- +-will turn into +- +- func m() (int, string, *bool, error) { +- return 0, "", nil, nil +- } +- +-This functionality is similar to [https://github.com/sqs/goreturns](https://github.com/sqs/goreturns). +- +- +-Default: on. +- +-Package documentation: [fillreturns](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/fillreturns) +- +- +-## `fmtappendf`: replace []byte(fmt.Sprintf) with fmt.Appendf +- +-The fmtappendf analyzer suggests replacing \`\[]byte(fmt.Sprintf(...))\` with \`fmt.Appendf(nil, ...)\`. This avoids the intermediate allocation of a string by Sprintf, making the code more efficient. The suggestion also applies to fmt.Sprint and fmt.Sprintln. +- +- +-Default: on. +- +-Package documentation: [fmtappendf](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#fmtappendf) +- +- +-## `forvar`: remove redundant re-declaration of loop variables +- +-The forvar analyzer removes unnecessary shadowing of loop variables. Before Go 1.22, it was common to write \`for \_, x := range s { x := x ... }\` to create a fresh variable for each iteration. Go 1.22 changed the semantics of \`for\` loops, making this pattern redundant. This analyzer removes the unnecessary \`x := x\` statement. +- +-This fix only applies to \`range\` loops. +- +- +-Default: on. +- +-Package documentation: [forvar](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#forvar) +- +- +-## `framepointer`: report assembly that clobbers the frame pointer before saving it +- +- +- +-Default: on. +- +-Package documentation: [framepointer](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/framepointer) +- +- +-## `hostport`: check format of addresses passed to net.Dial +- +-This analyzer flags code that produce network address strings using fmt.Sprintf, as in this example: +- +- addr := fmt.Sprintf("%s:%d", host, 12345) // "will not work with IPv6" +- ... +- conn, err := net.Dial("tcp", addr) // "when passed to dial here" +- +-The analyzer suggests a fix to use the correct approach, a call to net.JoinHostPort: +- +- addr := net.JoinHostPort(host, "12345") +- ... +- conn, err := net.Dial("tcp", addr) +- +-A similar diagnostic and fix are produced for a format string of "%s:%s". +- +- +-Default: on. +- +-Package documentation: [hostport](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/hostport) +- +- +-## `httpresponse`: check for mistakes using HTTP responses +- +-A common mistake when using the net/http package is to defer a function call to close the http.Response Body before checking the error that determines whether the response is valid: +- +- resp, err := http.Head(url) +- defer resp.Body.Close() +- if err != nil { +- log.Fatal(err) +- } +- // (defer statement belongs here) +- +-This checker helps uncover latent nil dereference bugs by reporting a diagnostic for such mistakes. +- +- +-Default: on. +- +-Package documentation: [httpresponse](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/httpresponse) +- +- +-## `ifaceassert`: detect impossible interface-to-interface type assertions +- +-This checker flags type assertions v.(T) and corresponding type-switch cases in which the static type V of v is an interface that cannot possibly implement the target interface T. This occurs when V and T contain methods with the same name but different signatures. Example: +- +- var v interface { +- Read() +- } +- _ = v.(io.Reader) +- +-The Read method in v has a different signature than the Read method in io.Reader, so this assertion cannot succeed. +- +- +-Default: on. +- +-Package documentation: [ifaceassert](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/ifaceassert) +- +- +-## `infertypeargs`: check for unnecessary type arguments in call expressions +- +-Explicit type arguments may be omitted from call expressions if they can be inferred from function arguments, or from other type arguments: +- +- func f[T any](T) {} +- +- func _() { +- f[string]("foo") // string could be inferred +- } +- +- +-Default: on. +- +-Package documentation: [infertypeargs](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/infertypeargs) +- +- +-## `inline`: apply fixes based on 'go:fix inline' comment directives +- +-The inline analyzer inlines functions and constants that are marked for inlining. +- +-\## Functions +- +-Given a function that is marked for inlining, like this one: +- +- //go:fix inline +- func Square(x int) int { return Pow(x, 2) } +- +-this analyzer will recommend that calls to the function elsewhere, in the same or other packages, should be inlined. +- +-Inlining can be used to move off of a deprecated function: +- +- // Deprecated: prefer Pow(x, 2). +- //go:fix inline +- func Square(x int) int { return Pow(x, 2) } +- +-It can also be used to move off of an obsolete package, as when the import path has changed or a higher major version is available: +- +- package pkg +- +- import pkg2 "pkg/v2" +- +- //go:fix inline +- func F() { pkg2.F(nil) } +- +-Replacing a call pkg.F() by pkg2.F(nil) can have no effect on the program, so this mechanism provides a low-risk way to update large numbers of calls. We recommend, where possible, expressing the old API in terms of the new one to enable automatic migration. +- +-The inliner takes care to avoid behavior changes, even subtle ones, such as changes to the order in which argument expressions are evaluated. When it cannot safely eliminate all parameter variables, it may introduce a "binding declaration" of the form +- +- var params = args +- +-to evaluate argument expressions in the correct order and bind them to parameter variables. Since the resulting code transformation may be stylistically suboptimal, such inlinings may be disabled by specifying the -inline.allow\_binding\_decl=false flag to the analyzer driver. +- +-(In cases where it is not safe to "reduce" a call—that is, to replace a call f(x) by the body of function f, suitably substituted—the inliner machinery is capable of replacing f by a function literal, func(){...}(). However, the inline analyzer discards all such "literalizations" unconditionally, again on grounds of style.) +- +-\## Constants +- +-Given a constant that is marked for inlining, like this one: +- +- //go:fix inline +- const Ptr = Pointer +- +-this analyzer will recommend that uses of Ptr should be replaced with Pointer. +- +-As with functions, inlining can be used to replace deprecated constants and constants in obsolete packages. +- +-A constant definition can be marked for inlining only if it refers to another named constant. +- +-The "//go:fix inline" comment must appear before a single const declaration on its own, as above; before a const declaration that is part of a group, as in this case: +- +- const ( +- C = 1 +- //go:fix inline +- Ptr = Pointer +- ) +- +-or before a group, applying to every constant in the group: +- +- //go:fix inline +- const ( +- Ptr = Pointer +- Val = Value +- ) +- +-The proposal [https://go.dev/issue/32816](https://go.dev/issue/32816) introduces the "//go:fix inline" directives. +- +-You can use this command to apply inline fixes en masse: +- +- $ go run golang.org/x/tools/go/analysis/passes/inline/cmd/inline@latest -fix ./... +- +- +-Default: on. +- +-Package documentation: [inline](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/inline) +- +- +-## `loopclosure`: check references to loop variables from within nested functions +- +-This analyzer reports places where a function literal references the iteration variable of an enclosing loop, and the loop calls the function in such a way (e.g. with go or defer) that it may outlive the loop iteration and possibly observe the wrong value of the variable. +- +-Note: An iteration variable can only outlive a loop iteration in Go versions \<=1.21. In Go 1.22 and later, the loop variable lifetimes changed to create a new iteration variable per loop iteration. (See go.dev/issue/60078.) +- +-In this example, all the deferred functions run after the loop has completed, so all observe the final value of v \[\ +-## `lostcancel`: check cancel func returned by context.WithCancel is called +- +-The cancellation function returned by context.WithCancel, WithTimeout, WithDeadline and variants such as WithCancelCause must be called, or the new context will remain live until its parent context is cancelled. (The background context is never cancelled.) +- +- +-Default: on. +- +-Package documentation: [lostcancel](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/lostcancel) +- +- +-## `maprange`: checks for unnecessary calls to maps.Keys and maps.Values in range statements +- +-Consider a loop written like this: +- +- for val := range maps.Values(m) { +- fmt.Println(val) +- } +- +-This should instead be written without the call to maps.Values: +- +- for _, val := range m { +- fmt.Println(val) +- } +- +-golang.org/x/exp/maps returns slices for Keys/Values instead of iterators, but unnecessary calls should similarly be removed: +- +- for _, key := range maps.Keys(m) { +- fmt.Println(key) +- } +- +-should be rewritten as: +- +- for key := range m { +- fmt.Println(key) +- } +- +- +-Default: on. +- +-Package documentation: [maprange](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/maprange) +- +- +-## `mapsloop`: replace explicit loops over maps with calls to maps package +- +-The mapsloop analyzer replaces loops of the form +- +- for k, v := range x { m[k] = v } +- +-with a single call to a function from the \`maps\` package, added in Go 1.23. Depending on the context, this could be \`maps.Copy\`, \`maps.Insert\`, \`maps.Clone\`, or \`maps.Collect\`. +- +-The transformation to \`maps.Clone\` is applied conservatively, as it preserves the nilness of the source map, which may be a subtle change in behavior if the original code did not handle a nil map in the same way. +- +- +-Default: on. +- +-Package documentation: [mapsloop](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#mapsloop) +- +- +-## `minmax`: replace if/else statements with calls to min or max +- +-The minmax analyzer simplifies conditional assignments by suggesting the use of the built-in \`min\` and \`max\` functions, introduced in Go 1.21. For example, +- +- if a < b { x = a } else { x = b } +- +-is replaced by +- +- x = min(a, b). +- +-This analyzer avoids making suggestions for floating-point types, as the behavior of \`min\` and \`max\` with NaN values can differ from the original if/else statement. +- +- +-Default: on. +- +-Package documentation: [minmax](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#minmax) +- +- +-## `newexpr`: simplify code by using go1.26's new(expr) +- +-This analyzer finds declarations of functions of this form: +- +- func varOf(x int) *int { return &x } +- +-and suggests a fix to turn them into inlinable wrappers around go1.26's built-in new(expr) function: +- +- func varOf(x int) *int { return new(x) } +- +-In addition, this analyzer suggests a fix for each call to one of the functions before it is transformed, so that +- +- use(varOf(123)) +- +-is replaced by: +- +- use(new(123)) +- +-(Wrapper functions such as varOf are common when working with Go serialization packages such as for JSON or protobuf, where pointers are often used to express optionality.) +- +- +-Default: on. +- +-Package documentation: [newexpr](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize#newexpr) +- +- +-## `nilfunc`: check for useless comparisons between functions and nil +- +-A useless comparison is one like f == nil as opposed to f() == nil. +- +- +-Default: on. +- +-Package documentation: [nilfunc](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/nilfunc) +- +- +-## `nilness`: check for redundant or impossible nil comparisons +- +-The nilness checker inspects the control-flow graph of each function in a package and reports nil pointer dereferences, degenerate nil pointers, and panics with nil values. A degenerate comparison is of the form x==nil or x!=nil where x is statically known to be nil or non-nil. These are often a mistake, especially in control flow related to errors. Panics with nil values are checked because they are not detectable by +- +- if r := recover(); r != nil { +- +-This check reports conditions such as: +- +- if f == nil { // impossible condition (f is a function) +- } +- +-and: +- +- p := &v +- ... +- if p != nil { // tautological condition +- } +- +-and: +- +- if p == nil { +- print(*p) // nil dereference +- } +- +-and: +- +- if p == nil { +- panic(p) +- } +- +-Sometimes the control flow may be quite complex, making bugs hard to spot. In the example below, the err.Error expression is guaranteed to panic because, after the first return, err must be nil. The intervening loop is just a distraction. +- +- ... +- err := g.Wait() +- if err != nil { +- return err +- } +- partialSuccess := false +- for _, err := range errs { +- if err == nil { +- partialSuccess = true +- break +- } +- } +- if partialSuccess { +- reportStatus(StatusMessage{ +- Code: code.ERROR, +- Detail: err.Error(), // "nil dereference in dynamic method call" +- }) +- return nil +- } +- +-... +- +- +-Default: on. +- +-Package documentation: [nilness](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/nilness) +- +- +-## `nonewvars`: suggested fixes for "no new vars on left side of :=" +- +-This checker provides suggested fixes for type errors of the type "no new vars on left side of :=". For example: +- +- z := 1 +- z := 2 +- +-will turn into +- +- z := 1 +- z = 2 +- +- +-Default: on. +- +-Package documentation: [nonewvars](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/nonewvars) +- +- +-## `noresultvalues`: suggested fixes for unexpected return values +- +-This checker provides suggested fixes for type errors of the type "no result values expected" or "too many return values". For example: +- +- func z() { return nil } +- +-will turn into +- +- func z() { return } +- +- +-Default: on. +- +-Package documentation: [noresultvalues](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/noresultvalues) +- +- +-## `omitzero`: suggest replacing omitempty with omitzero for struct fields +- +-The omitzero analyzer identifies uses of the \`omitempty\` JSON struct tag on fields that are themselves structs. The \`omitempty\` tag has no effect on struct-typed fields. The analyzer offers two suggestions: either remove the tag, or replace it with \`omitzero\` (added in Go 1.24), which correctly omits the field if the struct value is zero. +- +-Replacing \`omitempty\` with \`omitzero\` is a change in behavior. The original code would always encode the struct field, whereas the modified code will omit it if it is a zero-value. +- +- +-Default: on. +- +-Package documentation: [omitzero](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#omitzero) +- +- +-## `printf`: check consistency of Printf format strings and arguments +- +-The check applies to calls of the formatting functions such as [fmt.Printf](/fmt#Printf) and [fmt.Sprintf](/fmt#Sprintf), as well as any detected wrappers of those functions such as [log.Printf](/log#Printf). It reports a variety of mistakes such as syntax errors in the format string and mismatches (of number and type) between the verbs and their arguments. +- +-See the documentation of the fmt package for the complete set of format operators and their operand types. +- +- +-Default: on. +- +-Package documentation: [printf](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/printf) +- +- +-## `rangeint`: replace 3-clause for loops with for-range over integers +- +-The rangeint analyzer suggests replacing traditional for loops such as +- +- for i := 0; i < n; i++ { ... } +- +-with the more idiomatic Go 1.22 style: +- +- for i := range n { ... } +- +-This transformation is applied only if (a) the loop variable is not modified within the loop body and (b) the loop's limit expression is not modified within the loop, as \`for range\` evaluates its operand only once. +- +- +-Default: on. +- +-Package documentation: [rangeint](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#rangeint) +- +- +-## `recursiveiter`: check for inefficient recursive iterators +- +-This analyzer reports when a function that returns an iterator (iter.Seq or iter.Seq2) calls itself as the operand of a range statement, as this is inefficient. +- +-When implementing an iterator (e.g. iter.Seq\[T]) for a recursive data type such as a tree or linked list, it is tempting to recursively range over the iterator for each child element. +- +-Here's an example of a naive iterator over a binary tree: +- +- type tree struct { +- value int +- left, right *tree +- } +- +- func (t *tree) All() iter.Seq[int] { +- return func(yield func(int) bool) { +- if t != nil { +- for elem := range t.left.All() { // "inefficient recursive iterator" +- if !yield(elem) { +- return +- } +- } +- if !yield(t.value) { +- return +- } +- for elem := range t.right.All() { // "inefficient recursive iterator" +- if !yield(elem) { +- return +- } +- } +- } +- } +- } +- +-Though it correctly enumerates the elements of the tree, it hides a significant performance problem--two, in fact. Consider a balanced tree of N nodes. Iterating the root node will cause All to be called once on every node of the tree. This results in a chain of nested active range-over-func statements when yield(t.value) is called on a leaf node. +- +-The first performance problem is that each range-over-func statement must typically heap-allocate a variable, so iteration of the tree allocates as many variables as there are elements in the tree, for a total of O(N) allocations, all unnecessary. +- +-The second problem is that each call to yield for a leaf of the tree causes each of the enclosing range loops to receive a value, which they then immediately pass on to their respective yield function. This results in a chain of log(N) dynamic yield calls per element, a total of O(N\*log N) dynamic calls overall, when only O(N) are necessary. +- +-A better implementation strategy for recursive iterators is to first define the "every" operator for your recursive data type, where every(f) reports whether an arbitrary predicate f(x) is true for every element x in the data type. For our tree, the every function would be: +- +- func (t *tree) every(f func(int) bool) bool { +- return t == nil || +- t.left.every(f) && f(t.value) && t.right.every(f) +- } +- +-For example, this use of the every operator prints whether every element in the tree is an even number: +- +- even := func(x int) bool { return x&1 == 0 } +- println(t.every(even)) +- +-Then the iterator can be simply expressed as a trivial wrapper around the every operator: +- +- func (t *tree) All() iter.Seq[int] { +- return func(yield func(int) bool) { +- _ = t.every(yield) +- } +- } +- +-In effect, tree.All computes whether yield returns true for each element, short-circuiting if it ever returns false, then discards the final boolean result. +- +-This has much better performance characteristics: it makes one dynamic call per element of the tree, and it doesn't heap-allocate anything. It is also clearer. +- +- +-Default: on. +- +-Package documentation: [recursiveiter](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/recursiveiter) +- +- +-## `reflecttypefor`: replace reflect.TypeOf(x) with TypeFor[T]() +- +-This analyzer suggests fixes to replace uses of reflect.TypeOf(x) with reflect.TypeFor, introduced in go1.22, when the desired runtime type is known at compile time, for example: +- +- reflect.TypeOf(uint32(0)) -> reflect.TypeFor[uint32]() +- reflect.TypeOf((*ast.File)(nil)) -> reflect.TypeFor[*ast.File]() +- +-It also offers a fix to simplify the construction below, which uses reflect.TypeOf to return the runtime type for an interface type, +- +- reflect.TypeOf((*io.Reader)(nil)).Elem() +- +-to: +- +- reflect.TypeFor[io.Reader]() +- +-No fix is offered in cases when the runtime type is dynamic, such as: +- +- var r io.Reader = ... +- reflect.TypeOf(r) +- +-or when the operand has potential side effects. +- +- +-Default: on. +- +-Package documentation: [reflecttypefor](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#reflecttypefor) +- +- +-## `shadow`: check for possible unintended shadowing of variables +- +-This analyzer check for shadowed variables. A shadowed variable is a variable declared in an inner scope with the same name and type as a variable in an outer scope, and where the outer variable is mentioned after the inner one is declared. +- +-(This definition can be refined; the module generates too many false positives and is not yet enabled by default.) +- +-For example: +- +- func BadRead(f *os.File, buf []byte) error { +- var err error +- for { +- n, err := f.Read(buf) // shadows the function variable 'err' +- if err != nil { +- break // causes return of wrong value +- } +- foo(buf) +- } +- return err +- } +- +- +-Default: off. Enable by setting `"analyses": {"shadow": true}`. +- +-Package documentation: [shadow](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/shadow) +- +- +-## `shift`: check for shifts that equal or exceed the width of the integer +- +- +- +-Default: on. +- +-Package documentation: [shift](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/shift) +- +- +-## `sigchanyzer`: check for unbuffered channel of os.Signal +- +-This checker reports call expression of the form +- +- signal.Notify(c <-chan os.Signal, sig ...os.Signal), +- +-where c is an unbuffered channel, which can be at risk of missing the signal. +- +- +-Default: on. +- +-Package documentation: [sigchanyzer](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/sigchanyzer) +- +- +-## `simplifycompositelit`: check for composite literal simplifications +- +-An array, slice, or map composite literal of the form: +- +- []T{T{}, T{}} +- +-will be simplified to: +- +- []T{{}, {}} +- +-This is one of the simplifications that "gofmt -s" applies. +- +-This analyzer ignores generated code. +- +- +-Default: on. +- +-Package documentation: [simplifycompositelit](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifycompositelit) +- +- +-## `simplifyrange`: check for range statement simplifications +- +-A range of the form: +- +- for x, _ = range v {...} +- +-will be simplified to: +- +- for x = range v {...} +- +-A range of the form: +- +- for _ = range v {...} +- +-will be simplified to: +- +- for range v {...} +- +-This is one of the simplifications that "gofmt -s" applies. +- +-This analyzer ignores generated code. +- +- +-Default: on. +- +-Package documentation: [simplifyrange](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifyrange) +- +- +-## `simplifyslice`: check for slice simplifications +- +-A slice expression of the form: +- +- s[a:len(s)] +- +-will be simplified to: +- +- s[a:] +- +-This is one of the simplifications that "gofmt -s" applies. +- +-This analyzer ignores generated code. +- +- +-Default: on. +- +-Package documentation: [simplifyslice](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifyslice) +- +- +-## `slicescontains`: replace loops with slices.Contains or slices.ContainsFunc +- +-The slicescontains analyzer simplifies loops that check for the existence of an element in a slice. It replaces them with calls to \`slices.Contains\` or \`slices.ContainsFunc\`, which were added in Go 1.21. +- +-If the expression for the target element has side effects, this transformation will cause those effects to occur only once, not once per tested slice element. +- +- +-Default: on. +- +-Package documentation: [slicescontains](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicescontains) +- +- +-## `slicesdelete`: replace append-based slice deletion with slices.Delete +- +-The slicesdelete analyzer suggests replacing the idiom +- +- s = append(s[:i], s[j:]...) +- +-with the more explicit +- +- s = slices.Delete(s, i, j) +- +-introduced in Go 1.21. +- +-This analyzer is disabled by default. The \`slices.Delete\` function zeros the elements between the new length and the old length of the slice to prevent memory leaks, which is a subtle difference in behavior compared to the append-based idiom; see [https://go.dev/issue/73686](https://go.dev/issue/73686). +- +- +-Default: off. Enable by setting `"analyses": {"slicesdelete": true}`. +- +-Package documentation: [slicesdelete](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicesdelete) +- +- +-## `slicessort`: replace sort.Slice with slices.Sort for basic types +- +-The slicessort analyzer simplifies sorting slices of basic ordered types. It replaces +- +- sort.Slice(s, func(i, j int) bool { return s[i] < s[j] }) +- +-with the simpler \`slices.Sort(s)\`, which was added in Go 1.21. +- +- +-Default: on. +- +-Package documentation: [slicessort](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicessort) +- +- +-## `slog`: check for invalid structured logging calls +- +-The slog checker looks for calls to functions from the log/slog package that take alternating key-value pairs. It reports calls where an argument in a key position is neither a string nor a slog.Attr, and where a final key is missing its value. For example,it would report +- +- slog.Warn("message", 11, "k") // slog.Warn arg "11" should be a string or a slog.Attr +- +-and +- +- slog.Info("message", "k1", v1, "k2") // call to slog.Info missing a final value +- +- +-Default: on. +- +-Package documentation: [slog](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/slog) +- +- +-## `sortslice`: check the argument type of sort.Slice +- +-sort.Slice requires an argument of a slice type. Check that the interface{} value passed to sort.Slice is actually a slice. +- +- +-Default: on. +- +-Package documentation: [sortslice](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/sortslice) +- +- +-## `stditerators`: use iterators instead of Len/At-style APIs +- +-This analyzer suggests a fix to replace each loop of the form: +- +- for i := 0; i < x.Len(); i++ { +- use(x.At(i)) +- } +- +-or its "for elem := range x.Len()" equivalent by a range loop over an iterator offered by the same data type: +- +- for elem := range x.All() { +- use(x.At(i) +- } +- +-where x is one of various well-known types in the standard library. +- +- +-Default: on. +- +-Package documentation: [stditerators](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stditerators) +- +- +-## `stdmethods`: check signature of methods of well-known interfaces +- +-Sometimes a type may be intended to satisfy an interface but may fail to do so because of a mistake in its method signature. For example, the result of this WriteTo method should be (int64, error), not error, to satisfy io.WriterTo: +- +- type myWriterTo struct{...} +- func (myWriterTo) WriteTo(w io.Writer) error { ... } +- +-This check ensures that each method whose name matches one of several well-known interface methods from the standard library has the correct signature for that interface. +- +-Checked method names include: +- +- Format GobEncode GobDecode MarshalJSON MarshalXML +- Peek ReadByte ReadFrom ReadRune Scan Seek +- UnmarshalJSON UnreadByte UnreadRune WriteByte +- WriteTo +- +- +-Default: on. +- +-Package documentation: [stdmethods](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stdmethods) +- +- +-## `stdversion`: report uses of too-new standard library symbols +- +-The stdversion analyzer reports references to symbols in the standard library that were introduced by a Go release higher than the one in force in the referring file. (Recall that the file's Go version is defined by the 'go' directive its module's go.mod file, or by a "//go:build go1.X" build tag at the top of the file.) +- +-The analyzer does not report a diagnostic for a reference to a "too new" field or method of a type that is itself "too new", as this may have false positives, for example if fields or methods are accessed through a type alias that is guarded by a Go version constraint. +- +- +-Default: on. +- +-Package documentation: [stdversion](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stdversion) +- +- +-## `stringintconv`: check for string(int) conversions +- +-This checker flags conversions of the form string(x) where x is an integer (but not byte or rune) type. Such conversions are discouraged because they return the UTF-8 representation of the Unicode code point x, and not a decimal string representation of x as one might expect. Furthermore, if x denotes an invalid code point, the conversion cannot be statically rejected. +- +-For conversions that intend on using the code point, consider replacing them with string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the string representation of the value in the desired base. +- +- +-Default: on. +- +-Package documentation: [stringintconv](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stringintconv) +- +- +-## `stringsbuilder`: replace += with strings.Builder +- +-This analyzer replaces repeated string += string concatenation operations with calls to Go 1.10's strings.Builder. +- +-For example: +- +- var s = "[" +- for x := range seq { +- s += x +- s += "." +- } +- s += "]" +- use(s) +- +-is replaced by: +- +- var s strings.Builder +- s.WriteString("[") +- for x := range seq { +- s.WriteString(x) +- s.WriteString(".") +- } +- s.WriteString("]") +- use(s.String()) +- +-This avoids quadratic memory allocation and improves performance. +- +-The analyzer requires that all references to s except the final one are += operations. To avoid warning about trivial cases, at least one must appear within a loop. The variable s must be a local variable, not a global or parameter. +- +-The sole use of the finished string must be the last reference to the variable s. (It may appear within an intervening loop or function literal, since even s.String() is called repeatedly, it does not allocate memory.) +- +- +-Default: on. +- +-Package documentation: [stringsbuilder](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringbuilder) +- +- +-## `stringscutprefix`: replace HasPrefix/TrimPrefix with CutPrefix +- +-The stringscutprefix analyzer simplifies a common pattern where code first checks for a prefix with \`strings.HasPrefix\` and then removes it with \`strings.TrimPrefix\`. It replaces this two-step process with a single call to \`strings.CutPrefix\`, introduced in Go 1.20. The analyzer also handles the equivalent functions in the \`bytes\` package. +- +-For example, this input: +- +- if strings.HasPrefix(s, prefix) { +- use(strings.TrimPrefix(s, prefix)) +- } +- +-is fixed to: +- +- if after, ok := strings.CutPrefix(s, prefix); ok { +- use(after) +- } +- +-The analyzer also offers fixes to use CutSuffix in a similar way. This input: +- +- if strings.HasSuffix(s, suffix) { +- use(strings.TrimSuffix(s, suffix)) +- } +- +-is fixed to: +- +- if before, ok := strings.CutSuffix(s, suffix); ok { +- use(before) +- } +- +- +-Default: on. +- +-Package documentation: [stringscutprefix](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringscutprefix) +- +- +-## `stringsseq`: replace ranging over Split/Fields with SplitSeq/FieldsSeq +- +-The stringsseq analyzer improves the efficiency of iterating over substrings. It replaces +- +- for range strings.Split(...) +- +-with the more efficient +- +- for range strings.SplitSeq(...) +- +-which was added in Go 1.24 and avoids allocating a slice for the substrings. The analyzer also handles strings.Fields and the equivalent functions in the bytes package. +- +- +-Default: on. +- +-Package documentation: [stringsseq](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringsseq) +- +- +-## `structtag`: check that struct field tags conform to reflect.StructTag.Get +- +-Also report certain struct tags (json, xml) used with unexported fields. +- +- +-Default: on. +- +-Package documentation: [structtag](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/structtag) +- +- +-## `testingcontext`: replace context.WithCancel with t.Context in tests +- +-The testingcontext analyzer simplifies context management in tests. It replaces the manual creation of a cancellable context, +- +- ctx, cancel := context.WithCancel(context.Background()) +- defer cancel() +- +-with a single call to t.Context(), which was added in Go 1.24. +- +-This change is only suggested if the \`cancel\` function is not used for any other purpose. +- +- +-Default: on. +- +-Package documentation: [testingcontext](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#testingcontext) +- +- +-## `testinggoroutine`: report calls to (*testing.T).Fatal from goroutines started by a test +- +-Functions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and Skip{,f,Now} methods of \*testing.T, must be called from the test goroutine itself. This checker detects calls to these functions that occur within a goroutine started by the test. For example: +- +- func TestFoo(t *testing.T) { +- go func() { +- t.Fatal("oops") // error: (*T).Fatal called from non-test goroutine +- }() +- } +- +- +-Default: on. +- +-Package documentation: [testinggoroutine](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/testinggoroutine) +- +- +-## `tests`: check for common mistaken usages of tests and examples +- +-The tests checker walks Test, Benchmark, Fuzzing and Example functions checking malformed names, wrong signatures and examples documenting non-existent identifiers. +- +-Please see the documentation for package testing in golang.org/pkg/testing for the conventions that are enforced for Tests, Benchmarks, and Examples. +- +- +-Default: on. +- +-Package documentation: [tests](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/tests) +- +- +-## `timeformat`: check for calls of (time.Time).Format or time.Parse with 2006-02-01 +- +-The timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm) format. Internationally, "yyyy-dd-mm" does not occur in common calendar date standards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended. +- +- +-Default: on. +- +-Package documentation: [timeformat](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/timeformat) +- +- +-## `unmarshal`: report passing non-pointer or non-interface values to unmarshal +- +-The unmarshal analysis reports calls to functions such as json.Unmarshal in which the argument type is not a pointer or an interface. +- +- +-Default: on. +- +-Package documentation: [unmarshal](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unmarshal) +- +- +-## `unreachable`: check for unreachable code +- +-The unreachable analyzer finds statements that execution can never reach because they are preceded by a return statement, a call to panic, an infinite loop, or similar constructs. +- +- +-Default: on. +- +-Package documentation: [unreachable](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unreachable) +- +- +-## `unsafeptr`: check for invalid conversions of uintptr to unsafe.Pointer +- +-The unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer to convert integers to pointers. A conversion from uintptr to unsafe.Pointer is invalid if it implies that there is a uintptr-typed word in memory that holds a pointer value, because that word will be invisible to stack copying and to the garbage collector. +- +- +-Default: on. +- +-Package documentation: [unsafeptr](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unsafeptr) +- +- +-## `unusedfunc`: check for unused functions, methods, etc +- +-The unusedfunc analyzer reports functions and methods that are never referenced outside of their own declaration. +- +-A function is considered unused if it is unexported and not referenced (except within its own declaration). +- +-A method is considered unused if it is unexported, not referenced (except within its own declaration), and its name does not match that of any method of an interface type declared within the same package. +- +-The tool may report false positives in some situations, for example: +- +- - for a declaration of an unexported function that is referenced from another package using the go:linkname mechanism, if the declaration's doc comment does not also have a go:linkname comment. +- +- (Such code is in any case strongly discouraged: linkname annotations, if they must be used at all, should be used on both the declaration and the alias.) +- +- - for compiler intrinsics in the "runtime" package that, though never referenced, are known to the compiler and are called indirectly by compiled object code. +- +- - for functions called only from assembly. +- +- - for functions called only from files whose build tags are not selected in the current build configuration. +- +-Since these situations are relatively common in the low-level parts of the runtime, this analyzer ignores the standard library. See [https://go.dev/issue/71686](https://go.dev/issue/71686) and [https://go.dev/issue/74130](https://go.dev/issue/74130) for further discussion of these limitations. +- +-The unusedfunc algorithm is not as precise as the golang.org/x/tools/cmd/deadcode tool, but it has the advantage that it runs within the modular analysis framework, enabling near real-time feedback within gopls. +- +-The unusedfunc analyzer also reports unused types, vars, and constants. Enums--constants defined with iota--are ignored since even the unused values must remain present to preserve the logical ordering. +- +- +-Default: on. +- +-Package documentation: [unusedfunc](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedfunc) +- +- +-## `unusedparams`: check for unused parameters of functions +- +-The unusedparams analyzer checks functions to see if there are any parameters that are not being used. +- +-To ensure soundness, it ignores: +- +- - "address-taken" functions, that is, functions that are used as a value rather than being called directly; their signatures may be required to conform to a func type. +- - exported functions or methods, since they may be address-taken in another package. +- - unexported methods whose name matches an interface method declared in the same package, since the method's signature may be required to conform to the interface type. +- - functions with empty bodies, or containing just a call to panic. +- - parameters that are unnamed, or named "\_", the blank identifier. +- +-The analyzer suggests a fix of replacing the parameter name by "\_", but in such cases a deeper fix can be obtained by invoking the "Refactor: remove unused parameter" code action, which will eliminate the parameter entirely, along with all corresponding arguments at call sites, while taking care to preserve any side effects in the argument expressions; see [https://github.com/golang/tools/releases/tag/gopls%2Fv0.14](https://github.com/golang/tools/releases/tag/gopls%2Fv0.14). +- +-This analyzer ignores generated code. +- +- +-Default: on. +- +-Package documentation: [unusedparams](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedparams) +- +- +-## `unusedresult`: check for unused results of calls to some functions +- +-Some functions like fmt.Errorf return a result and have no side effects, so it is always a mistake to discard the result. Other functions may return an error that must not be ignored, or a cleanup operation that must be called. This analyzer reports calls to functions like these when the result of the call is ignored. +- +-The set of functions may be controlled using flags. +- +- +-Default: on. +- +-Package documentation: [unusedresult](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedresult) +- +- +-## `unusedvariable`: check for unused variables and suggest fixes +- +- +- +-Default: on. +- +-Package documentation: [unusedvariable](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedvariable) +- +- +-## `unusedwrite`: checks for unused writes +- +-The analyzer reports instances of writes to struct fields and arrays that are never read. Specifically, when a struct object or an array is copied, its elements are copied implicitly by the compiler, and any element write to this copy does nothing with the original object. +- +-For example: +- +- type T struct { x int } +- +- func f(input []T) { +- for i, v := range input { // v is a copy +- v.x = i // unused write to field x +- } +- } +- +-Another example is about non-pointer receiver: +- +- type T struct { x int } +- +- func (t T) f() { // t is a copy +- t.x = i // unused write to field x +- } +- +- +-Default: on. +- +-Package documentation: [unusedwrite](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedwrite) +- +- +-## `waitgroup`: check for misuses of sync.WaitGroup +- +-This analyzer detects mistaken calls to the (\*sync.WaitGroup).Add method from inside a new goroutine, causing Add to race with Wait: +- +- // WRONG +- var wg sync.WaitGroup +- go func() { +- wg.Add(1) // "WaitGroup.Add called from inside new goroutine" +- defer wg.Done() +- ... +- }() +- wg.Wait() // (may return prematurely before new goroutine starts) +- +-The correct code calls Add before starting the goroutine: +- +- // RIGHT +- var wg sync.WaitGroup +- wg.Add(1) +- go func() { +- defer wg.Done() +- ... +- }() +- wg.Wait() +- +- +-Default: on. +- +-Package documentation: [waitgroup](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/waitgroup) +- +- +-## `waitgroup`: replace wg.Add(1)/go/wg.Done() with wg.Go +- +-The waitgroup analyzer simplifies goroutine management with \`sync.WaitGroup\`. It replaces the common pattern +- +- wg.Add(1) +- go func() { +- defer wg.Done() +- ... +- }() +- +-with a single call to +- +- wg.Go(func(){ ... }) +- +-which was added in Go 1.25. +- +- +-Default: on. +- +-Package documentation: [waitgroup](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#waitgroup) +- +- +-## `yield`: report calls to yield where the result is ignored +- +-After a yield function returns false, the caller should not call the yield function again; generally the iterator should return promptly. +- +-This example fails to check the result of the call to yield, causing this analyzer to report a diagnostic: +- +- yield(1) // yield may be called again (on L2) after returning false +- yield(2) +- +-The corrected code is either this: +- +- if yield(1) { yield(2) } +- +-or simply: +- +- _ = yield(1) && yield(2) +- +-It is not always a mistake to ignore the result of yield. For example, this is a valid single-element iterator: +- +- yield(1) // ok to ignore result +- return +- +-It is only a mistake when the yield call that returned false may be followed by another call. +- +- +-Default: on. +- +-Package documentation: [yield](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/yield) +- +- +Binary files a/gopls/doc/assets/add-test-for-func.png and b/gopls/doc/assets/add-test-for-func.png differ +diff -urN a/gopls/doc/assets/assets.go b/gopls/doc/assets/assets.go +--- a/gopls/doc/assets/assets.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/assets/assets.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,7 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package assets is an empty package to appease "go test ./...", +-// as run by our CI builders, which doesn't like an empty module. +-package assets +Binary files a/gopls/doc/assets/browse-assembly.png and b/gopls/doc/assets/browse-assembly.png differ +Binary files a/gopls/doc/assets/browse-free-symbols.png and b/gopls/doc/assets/browse-free-symbols.png differ +Binary files a/gopls/doc/assets/browse-pkg-doc.png and b/gopls/doc/assets/browse-pkg-doc.png differ +Binary files a/gopls/doc/assets/code-action-doc.png and b/gopls/doc/assets/code-action-doc.png differ +Binary files a/gopls/doc/assets/convert-string-interpreted.png and b/gopls/doc/assets/convert-string-interpreted.png differ +Binary files a/gopls/doc/assets/convert-string-raw.png and b/gopls/doc/assets/convert-string-raw.png differ +Binary files a/gopls/doc/assets/diagnostic-analysis.png and b/gopls/doc/assets/diagnostic-analysis.png differ +Binary files a/gopls/doc/assets/diagnostic-typeerror.png and b/gopls/doc/assets/diagnostic-typeerror.png differ +Binary files a/gopls/doc/assets/document-highlight.png and b/gopls/doc/assets/document-highlight.png differ +Binary files a/gopls/doc/assets/documentlink.png and b/gopls/doc/assets/documentlink.png differ +Binary files a/gopls/doc/assets/extract-function-after.png and b/gopls/doc/assets/extract-function-after.png differ +Binary files a/gopls/doc/assets/extract-function-before.png and b/gopls/doc/assets/extract-function-before.png differ +Binary files a/gopls/doc/assets/extract-to-new-file-after.png and b/gopls/doc/assets/extract-to-new-file-after.png differ +Binary files a/gopls/doc/assets/extract-to-new-file-before.png and b/gopls/doc/assets/extract-to-new-file-before.png differ +Binary files a/gopls/doc/assets/extract-val-all-before.png and b/gopls/doc/assets/extract-val-all-before.png differ +Binary files a/gopls/doc/assets/extract-var-after.png and b/gopls/doc/assets/extract-var-after.png differ +Binary files a/gopls/doc/assets/extract-var-all-after.png and b/gopls/doc/assets/extract-var-all-after.png differ +Binary files a/gopls/doc/assets/extract-var-before.png and b/gopls/doc/assets/extract-var-before.png differ +Binary files a/gopls/doc/assets/fill-struct-after.png and b/gopls/doc/assets/fill-struct-after.png differ +Binary files a/gopls/doc/assets/fill-struct-before.png and b/gopls/doc/assets/fill-struct-before.png differ +Binary files a/gopls/doc/assets/fill-switch-after.png and b/gopls/doc/assets/fill-switch-after.png differ +Binary files a/gopls/doc/assets/fill-switch-before.png and b/gopls/doc/assets/fill-switch-before.png differ +Binary files a/gopls/doc/assets/fill-switch-enum-after.png and b/gopls/doc/assets/fill-switch-enum-after.png differ +Binary files a/gopls/doc/assets/fill-switch-enum-before.png and b/gopls/doc/assets/fill-switch-enum-before.png differ +Binary files a/gopls/doc/assets/foldingrange.png and b/gopls/doc/assets/foldingrange.png differ +diff -urN a/gopls/doc/assets/go.mod b/gopls/doc/assets/go.mod +--- a/gopls/doc/assets/go.mod 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/assets/go.mod 1969-12-31 18:00:00.000000000 -0600 +@@ -1,7 +0,0 @@ +-// This module contains no Go code, but serves to carve out a hole in +-// its parent module to avoid bloating it with large image files that +-// would otherwise be downloaded by "go install golang.org/x/tools/gopls@latest". +- +-module golang.org/x/tools/gopls/doc/assets +- +-go 1.24.0 +Binary files a/gopls/doc/assets/hover-basic.png and b/gopls/doc/assets/hover-basic.png differ +Binary files a/gopls/doc/assets/hover-doclink.png and b/gopls/doc/assets/hover-doclink.png differ +Binary files a/gopls/doc/assets/hover-embed.png and b/gopls/doc/assets/hover-embed.png differ +Binary files a/gopls/doc/assets/hover-field-tag.png and b/gopls/doc/assets/hover-field-tag.png differ +Binary files a/gopls/doc/assets/hover-linkname.png and b/gopls/doc/assets/hover-linkname.png differ +Binary files a/gopls/doc/assets/hover-size-field.png and b/gopls/doc/assets/hover-size-field.png differ +Binary files a/gopls/doc/assets/hover-size-struct.png and b/gopls/doc/assets/hover-size-struct.png differ +Binary files a/gopls/doc/assets/hover-size-wasteful.png and b/gopls/doc/assets/hover-size-wasteful.png differ +Binary files a/gopls/doc/assets/inlayhint-parameternames.png and b/gopls/doc/assets/inlayhint-parameternames.png differ +Binary files a/gopls/doc/assets/inline-after.png and b/gopls/doc/assets/inline-after.png differ +Binary files a/gopls/doc/assets/inline-before.png and b/gopls/doc/assets/inline-before.png differ +Binary files a/gopls/doc/assets/invert-if-after.png and b/gopls/doc/assets/invert-if-after.png differ +Binary files a/gopls/doc/assets/invert-if-before.png and b/gopls/doc/assets/invert-if-before.png differ +Binary files a/gopls/doc/assets/outgoingcalls.png and b/gopls/doc/assets/outgoingcalls.png differ +Binary files a/gopls/doc/assets/remove-unusedparam-after.png and b/gopls/doc/assets/remove-unusedparam-after.png differ +Binary files a/gopls/doc/assets/remove-unusedparam-before.png and b/gopls/doc/assets/remove-unusedparam-before.png differ +Binary files a/gopls/doc/assets/rename-conflict.png and b/gopls/doc/assets/rename-conflict.png differ +Binary files a/gopls/doc/assets/signature-help.png and b/gopls/doc/assets/signature-help.png differ +Binary files a/gopls/doc/assets/splitpkg-deps.png and b/gopls/doc/assets/splitpkg-deps.png differ +Binary files a/gopls/doc/assets/splitpkg.png and b/gopls/doc/assets/splitpkg.png differ +Binary files a/gopls/doc/assets/subtypes.png and b/gopls/doc/assets/subtypes.png differ +Binary files a/gopls/doc/assets/supertypes.png and b/gopls/doc/assets/supertypes.png differ +Binary files a/gopls/doc/assets/zeroconfig.png and b/gopls/doc/assets/zeroconfig.png differ +diff -urN a/gopls/doc/codelenses.md b/gopls/doc/codelenses.md +--- a/gopls/doc/codelenses.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/codelenses.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,155 +0,0 @@ +---- +-title: "Gopls: Code lenses" +---- +- +-A "code lens" is a command associated with a range of a source file. +-The VS Code manual describes code lenses as +-"[actionable, contextual information, interspersed in your source +-code](https://code.visualstudio.com/blogs/2017/02/12/code-lens-roundup)". +-The LSP [`textDocument/codeLens`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_codeLens) operation requests the +-current set of code lenses for a file. +- +-Gopls generates code lenses from a number of sources. +-This document describes them. +- +-They can be enabled and disabled using the +-[`codelenses`](settings.md#codelenses) setting. +-Their features are subject to change. +- +-Client support: +-- **VS Code**: Code Lenses appear as small text links above a line of source code. +-- **Emacs + eglot**: Not supported, but prototype exists at https://github.com/joaotavora/eglot/pull/71. +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls codelens`. For example, `gopls codelens -exec file.go:123 "run test"` runs the test at the specified line. +- +- +- +- +-## `generate`: Run `go generate` +- +- +-This codelens source annotates any `//go:generate` comments +-with commands to run `go generate` in this directory, on +-all directories recursively beneath this one. +- +-See [Generating code](https://go.dev/blog/generate) for +-more details. +- +- +-Default: on +- +-File type: Go +- +-## `regenerate_cgo`: Re-generate cgo declarations +- +- +-This codelens source annotates an `import "C"` declaration +-with a command to re-run the [cgo +-command](https://pkg.go.dev/cmd/cgo) to regenerate the +-corresponding Go declarations. +- +-Use this after editing the C code in comments attached to +-the import, or in C header files included by it. +- +- +-Default: on +- +-File type: Go +- +-## `test`: Run tests and benchmarks +- +- +-This codelens source annotates each `Test` and `Benchmark` +-function in a `*_test.go` file with a command to run it. +- +-This source is off by default because VS Code has +-a client-side custom UI for testing, and because progress +-notifications are not a great UX for streamed test output. +-See: +-- golang/go#67400 for a discussion of this feature. +-- https://github.com/joaotavora/eglot/discussions/1402 +- for an alternative approach. +- +- +-Default: off +- +-File type: Go +- +-## `run_govulncheck`: Run govulncheck (legacy) +- +- +-This codelens source annotates the `module` directive in a go.mod file +-with a command to run Govulncheck asynchronously. +- +-[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that +-computes the set of functions reachable within your application, including +-dependencies; queries a database of known security vulnerabilities; and +-reports any potential problems it finds. +- +- +-Default: on +- +-File type: go.mod +- +-## `tidy`: Tidy go.mod file +- +- +-This codelens source annotates the `module` directive in a +-go.mod file with a command to run [`go mod +-tidy`](https://go.dev/ref/mod#go-mod-tidy), which ensures +-that the go.mod file matches the source code in the module. +- +- +-Default: on +- +-File type: go.mod +- +-## `upgrade_dependency`: Update dependencies +- +- +-This codelens source annotates the `module` directive in a +-go.mod file with commands to: +- +-- check for available upgrades, +-- upgrade direct dependencies, and +-- upgrade all dependencies transitively. +- +- +-Default: on +- +-File type: go.mod +- +-## `vendor`: Update vendor directory +- +- +-This codelens source annotates the `module` directive in a +-go.mod file with a command to run [`go mod +-vendor`](https://go.dev/ref/mod#go-mod-vendor), which +-creates or updates the directory named `vendor` in the +-module root so that it contains an up-to-date copy of all +-necessary package dependencies. +- +- +-Default: on +- +-File type: go.mod +- +-## `vulncheck`: Run govulncheck +- +-**This setting is experimental and may be deleted.** +- +- +-This codelens source annotates the `module` directive in a go.mod file +-with a command to run govulncheck synchronously. +- +-[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that +-computes the set of functions reachable within your application, including +-dependencies; queries a database of known security vulnerabilities; and +-reports any potential problems it finds. +- +- +-Default: off +- +-File type: go.mod +- +- +diff -urN a/gopls/doc/command-line.md b/gopls/doc/command-line.md +--- a/gopls/doc/command-line.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/command-line.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,37 +0,0 @@ +---- +-title: "Gopls: Command-line interface" +---- +- +-The `gopls` command provides a number of subcommands that expose much +-of the server's functionality. However, the interface is currently +-**experimental** and **subject to change at any point.** +-It is not efficient, complete, flexible, or officially supported. +- +-Its primary use is as a debugging aid. +-For example, this command reports the location of references to the +-symbol at the specified file/line/column: +- +-``` +-$ gopls references ./gopls/main.go:35:8 +-Log: Loading packages... +-Info: Finished loading packages. +-/home/gopher/xtools/go/packages/gopackages/main.go:27:7-11 +-/home/gopher/xtools/gopls/internal/cmd/integration_test.go:1062:7-11 +-/home/gopher/xtools/gopls/internal/test/integration/bench/bench_test.go:59:8-12 +-/home/gopher/xtools/gopls/internal/test/integration/regtest.go:140:8-12 +-/home/gopher/xtools/gopls/main.go:35:7-11 +-``` +- +-See https://go.dev/issue/63693 for a discussion of its future. +- +-Learn about available commands and flags by running `gopls help`. +- +-Positions within files are specified as `file.go:line:column` triples, +-where the line and column start at 1, and columns are measured in +-bytes of the UTF-8 encoding. +-Alternatively, positions may be specified by the byte offset within +-the UTF-8 encoding of the file, starting from zero, for example +-`file.go:#1234`. +-(When working in non-ASCII files, beware that your editor may report a +-position's offset within its file using a different measure such as +-UTF-16 codes, Unicode code points, or graphemes). +diff -urN a/gopls/doc/contributing.md b/gopls/doc/contributing.md +--- a/gopls/doc/contributing.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/contributing.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,227 +0,0 @@ +---- +-title: "Gopls: Contributing" +---- +- +-Contributions are welcome! However, development is fast moving, +-and we are limited in our capacity to review contributions. +-So, before sending a CL, please please please: +- +-- **file an issue** for a bug or feature request, if one does not +- exist already. This allows us to identify redundant requests, or to +- merge a specific problem into a more general one, and to assess the +- importance of the problem. +-- **claim it for yourself** by commenting on the issue or, if you are +- able, by assigning the issue to yourself. This helps us avoid two +- people working on the same problem. +-- **propose an implementation plan** in the issue tracker for CLs of +- any complexity. It is much more efficient to discuss the plan at a +- high level before we start getting bogged down in the details of +- a code review. +- +-When you send a CL, it should include: +- +-- a **CL description** that summarizes the change, +- motivates why it is necessary, +- explains it at a high level, +- contrasts it with more obvious or simpler approaches, and +- links to relevant issues; +-- **tests** (integration tests or marker tests); +-- **documentation**, for new or modified features; and +-- **release notes**, for new features or significant changes. +- +-During code review, please address all reviewer comments. +-Some comments result in straightforward code changes; +-others demand a more complex response. +-When a reviewer asks a question, the best response is +-often not to respond to it directly, but to change the +-code to avoid raising the question, +-for example by making the code self-explanatory. +-It's fine to disagree with a comment, +-point out a reviewer's mistake, +-or offer to address a comment in a follow-up change, +-leaving a `TODO` comment in the current CL. +-But please don't dismiss or quietly ignore a comment without action, +-as it may lead reviewers to repeat themselves, +-or to serious problems being neglected. +- +-For more detail, see the Go project's +-[contribution guidelines](https://golang.org/doc/contribute.html). +- +-## Finding issues +- +-All `gopls` issues are labeled as such (see the [`gopls` label][issue-gopls]). +-Issues that are suitable for contributors are additionally tagged with the +-[`help-wanted` label][issue-wanted]. +- +-Before you begin working on an issue, please leave a comment that you are +-claiming it. +- +-## Getting started +- +-[![PkgGoDev](https://pkg.go.dev/badge/golang.org/x/tools/gopls/internal)](https://pkg.go.dev/golang.org/x/tools/gopls/internal) +- +-Most of the `gopls` logic is in the `golang.org/x/tools/gopls/internal` directory. +-See [design/implementation.md](./design/implementation.md) for an overview of the code organization. +- +-## Build +- +-To build a version of `gopls` with your changes applied: +- +-```bash +-cd /path/to/tools/gopls +-go install +-``` +- +-To confirm that you are testing with the correct `gopls` version, check that +-your `gopls` version looks like this: +- +-```bash +-$ gopls version +-golang.org/x/tools/gopls master +- golang.org/x/tools/gopls@(devel) +-``` +- +-## Getting help +- +-The best way to contact the gopls team directly is via the +-[#gopls-dev](https://app.slack.com/client/T029RQSE6/CRWSN9NCD) channel on the +-gophers slack. Please feel free to ask any questions about your contribution or +-about contributing in general. +- +- +-## Error handling +- +-It is important for the user experience that, whenever practical, +-minor logic errors in a particular feature don't cause the server to +-crash. +- +-The representation of a Go program is complex. The import graph of +-package metadata, the syntax trees of parsed files, and their +-associated type information together form a huge API surface area. +-Even when the input is valid, there are many edge cases to consider, +-and this grows by an order of magnitude when you consider missing +-imports, parse errors, and type errors. +- +-What should you do when your logic must handle an error that you +-believe "can't happen"? +- +-- If it's possible to return an error, then use the `bug.Errorf` +- function to return an error to the user, but also record the bug in +- gopls' cache so that it is less likely to be ignored. +- +-- If it's safe to proceed, you can call `bug.Reportf` to record the +- error and continue as normal. +- +-- If there's no way to proceed, call `bug.Fatalf` to record the error +- and then stop the program with `log.Fatalf`. You can also use +- `bug.Panicf` if there's a chance that a recover handler might save +- the situation. +- +-- Only if you can prove locally that an error is impossible should you +- call `log.Fatal`. If the error may happen for some input, however +- unlikely, then you should use one of the approaches above. Also, if +- the proof of safety depends on invariants broadly distributed across +- the code base, then you should instead use `bug.Panicf`. +- +-Note also that panicking is preferable to `log.Fatal` because it +-allows VS Code's crash reporting to recognize and capture the stack. +- +-Bugs reported through `bug.Errorf` and friends are retrieved using the +-`gopls bug` command, which opens a GitHub Issue template and populates +-it with a summary of each bug and its frequency. +-The text of the bug is rather fastidiously printed to stdout to avoid +-sharing user names and error message strings (which could contain +-project identifiers) with GitHub. +-Users are invited to share it if they are willing. +- +-## Testing +- +-The normal command you should use to run the tests after a change is: +- +-```bash +-gopls$ go test -short ./... +-``` +- +-(The `-short` flag skips some slow-running ones. The trybot builders +-run the complete set, on a wide range of platforms.) +- +-Gopls tests are a mix of two kinds. +- +-- [Marker tests](https://golang.org/x/tools/gopls/internal/test/marker) express each test scenario +- in a standalone text file that contains the target .go, go.mod, and +- go.work files, in which special annotations embedded in comments +- drive the test. These tests are generally easy to write and fast +- to iterate, but have limitations on what they can express. +- +-- [Integration tests](https://golang.org/x/tools/gopls/internal/test/integration) are regular Go +- `func Test(*testing.T)` functions that make a series of calls to an +- API for a fake LSP-enabled client editor. The API allows you to open +- and edit a file, navigate to a definition, invoke other LSP +- operations, and assert properties about the state. +- +- Due to the asynchronous nature of the LSP, integration tests make +- assertions about states that the editor must achieve eventually, +- even when the program goes wrong quickly, it may take a while before +- the error is reported as a failure to achieve the desired state +- within several minutes. We recommend that you set +- `GOPLS_INTEGRATION_TEST_TIMEOUT=10s` to reduce the timeout for +- integration tests when debugging. +- +- When they fail, the integration tests print the log of the LSP +- session between client and server. Though verbose, they are very +- helpful for debugging once you know how to read them. +- +-Don't hesitate to [reach out](#getting-help) to the gopls team if you +-need help. +- +-### CI +- +-When you mail your CL and you or a fellow contributor assigns the +-`Run-TryBot=1` label in Gerrit, the +-[TryBots](https://golang.org/doc/contribute.html#trybots) will run tests in +-both the `golang.org/x/tools` and `golang.org/x/tools/gopls` modules, as +-described above. +- +-Furthermore, an additional "gopls-CI" pass will be run by _Kokoro_, which is a +-Jenkins-like Google infrastructure for running Dockerized tests. This allows us +-to run gopls tests in various environments that would be difficult to add to +-the TryBots. Notably, Kokoro runs tests on +-[older Go versions](index.md#supported-go-versions) that are no longer supported +-by the TryBots. Per that policy, support for these older Go versions is +-best-effort, and test failures may be skipped rather than fixed. +- +-Kokoro runs are triggered by the `Run-TryBot=1` label, just like TryBots, but +-unlike TryBots they do not automatically re-run if the "gopls-CI" result is +-removed in Gerrit. To force a re-run of the Kokoro CI on a CL containing the +-`Run-TryBot=1` label, you can reply in Gerrit with the comment "kokoro rerun". +- +-## Debugging +- +-The easiest way to debug your change is to run a single `gopls` test with a +-debugger. +- +-See also [Troubleshooting](troubleshooting.md#troubleshooting). +- +- +- +-[issue-gopls]: https://github.com/golang/go/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+label%3Agopls "gopls issues" +-[issue-wanted]: https://github.com/golang/go/issues?utf8=✓&q=is%3Aissue+is%3Aopen+label%3Agopls+label%3A"help+wanted" "help wanted" +- +-## Documentation +- +-Each CL that adds or changes a feature should include, in addition to +-a test that exercises the new behavior: +- +-- a **release note** that briefly explains the change, and +-- **comprehensive documentation** in the [index of features](features/). +- +-The release note should go in the file named for the forthcoming +-release, for example [release/v0.16.0.md](release/v0.16.0.md). (Create +-the file if your feature is the first to be added after a release.) +- +-## Design documentation +- +-* [Integrating `gopls` with an editor](design/integrating.md) +-* [Design requirements and decisions](design/design.md) +-* [Implementation overview](design/implementation.md) +diff -urN a/gopls/doc/daemon.md b/gopls/doc/daemon.md +--- a/gopls/doc/daemon.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/daemon.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,185 +0,0 @@ +---- +-title: "Gopls: Running as a daemon" +---- +- +-**Note: this feature is new. If you encounter bugs, please [file an +-issue](troubleshooting.md#file-an-issue).** +- +-If you just want to try this out, skip ahead to the [quickstart](#quickstart). +- +-## Background: gopls execution modes +- +-Gopls was originally implemented as an LSP sidecar: a process started by +-editors or editor plugins, and communicated with using jsonrpc 2.0 over +-stdin/stdout. By executing as a stateful process, gopls can maintain a +-significant amount of cache and can eagerly perform analysis on the source code +-being edited. +- +-This execution mode does not work as well when there are many separate editor +-processes or when editor processes are short-lived, as is often the case for +-users of non-IDE editors such as Vim or Emacs. Having many processes means +-having many caches, consuming a significant amount of system resources. Using +-short-lived sessions means paying a start-up cost each time a session is +-created. +- +-To support these types of workflows, a new mode of gopls execution is supported +-wherein a single, persistent, shared gopls "daemon" process is responsible for +-managing all gopls sessions. In this mode, editors still start a gopls sidecar, +-but this sidecar merely acts as a thin "forwarder", responsible for forwarding +-the LSP to the shared gopls instance and recording metrics, logs, and rpc +-traces. +- +-## Quickstart +- +-To use a shared gopls instance you must either manage the daemon process +-yourself, or let the gopls forwarder processes start the shared daemon as +-needed. +- +-### Running with `-remote=auto` +- +-Automatic management of the daemon is easiest, and can be done by passing the +-flag `-remote=auto` to the gopls process started by your editor. This will +-cause this process to auto-start the gopls daemon if needed, connect to it, and +-forward the LSP. For example, here is a reasonable gopls invocation, that sets +-some additional flags for easier [debugging](#debugging): +- +-```bash +-gopls -remote=auto -logfile=auto -debug=:0 -remote.debug=:0 -rpc.trace +-``` +- +-Note that the shared gopls process will automatically shut down after one +-minute with no connected clients. +- +-### Managing the daemon manually +- +-To manage the gopls daemon process via external means rather than having the +-forwarders manage it, you must start a gopls daemon process with the +-`-listen=` flag, and then pass `-remote=` to the gopls processes +-started by your editor. +- +-For example, to host the daemon on the TCP port `37374`, do: +- +-```bash +-gopls -listen=:37374 -logfile=auto -debug=:0 +-``` +- +-And then from the editor, run +- +-```bash +-gopls -remote=:37374 -logfile=auto -debug=:0 -rpc.trace +-``` +- +-If you are on a POSIX system, you can also use unix domain sockets by prefixing +-the flag values with `unix;`. For example: +- +-```bash +-gopls -listen="unix;/tmp/gopls-daemon-socket" -logfile=auto -debug=:0 +-``` +- +-And connect via: +- +-```bash +-gopls -remote="unix;/tmp/gopls-daemon-socket" -logfile=auto -debug=:0 -rpc.trace +-``` +- +-(Note that these flag values MUST be enclosed in quotes, because ';' is a +-special shell character. For this reason, this syntax is subject to change in +-the future.) +- +-## Debugging +- +-Debugging a shared gopls session is more complicated than a singleton session, +-because there are now two gopls processes involved with handling the LSP. Here +-are some tips: +- +-### Finding logfiles and debug addresses +- +-When running in daemon mode, you can use the `gopls inspect sessions` command +-to find the logfile and debug port for your gopls daemon instance (as well as +-for all its connected clients). By default, this inspects the default daemon +-(i.e. `-remote=auto`). To inspect a different daemon, use the `-remote` flag +-explicitly: `gopls -remote=localhost:12345 inspect sessions`. +- +-This works whether or not you have enabled `-remote.debug`. +- +-### Traversing debug pages +- +-When `-debug=:0` is passed to gopls, it runs a webserver that serves stateful +-debug pages (see [troubleshooting.md](troubleshooting.md)). You can find the +-actual port hosting these pages by either using the `gopls inspect sessions` +-command, or by checking the start of the logfile -- it will be one of the first +-log messages. For example, if using `-logfile=auto`, find the debug address by +-checking `head /tmp/gopls-.log`. +- +-By default, the gopls daemon is not started with `-debug`. To enable it, set +-the `-remote.debug` flag on the forwarder instance, so that it invokes gopls +-with `-debug` when starting the daemon. +- +-The debug pages of the forwarder process will have a link to the debug pages of +-the daemon server process. Correspondingly, the debug pages of the daemon +-process will have a link to each of its clients. +- +-This can help you find metrics, traces, and log files for all of the various +-servers and clients. +- +-### Using logfiles +- +-The gopls daemon is started with logging disabled by default. To customize +-this, pass `-remote.logfile` to the gopls forwarder. Using +-`-remote.logfile=auto`, the daemon will log to a default location (on posix +-systems: `/tmp/gopls-daemon-.log`). +- +-The gopls daemon does not log session-scoped messages: those are instead +-reflected back to the forwarder so that they can be accessed by the editor. +-Daemon logs will only contain global messages, for example logs when sessions +-connect and disconnect. +- +-It is recommended to start the forwarder gopls process with `-rpc.trace`, so +-that its logfile will contain rpc trace logs specific to the LSP session. +- +-## Using multiple shared gopls instances +- +-There may be environments where it is desirable to have more than one shared +-gopls instance. If managing the daemon manually, this can be done by simply +-choosing different `-listen` addresses for each distinct daemon process. +- +-On POSIX systems, there is also support for automatic management of distinct +-shared gopls processes: distinct daemons can be selected by passing +-`-remote="auto;"`. Any gopls forwarder passing the same value for `` +-will use the same shared daemon. +- +-## FAQ +- +-**Q: Why am I not saving as much memory as I expected when using a shared gopls?** +- +-A: As described in [implementation.md](design/implementation.md), gopls has a +-concept of view/session/cache. Each session and view map onto exactly one +-editor session (because they contain things like edited but unsaved buffers). +-The cache contains things that are independent of any editor session, and can +-therefore be shared. +- +-When, for example, three editor session are sharing a single gopls process, +-they will share the cache but will each have their own session and view. The +-memory savings in this mode, when compared to three separate gopls processes, +-corresponds to the amount of cache overlap across sessions. +- +-Because this hasn't mattered much in the past, it is likely that there is state +-that can be moved out of the session/view, and into the cache, thereby +-increasing the amount of memory savings in the shared mode. +- +-**Q: How do I customize the daemon instance when using `-remote=auto`?** +- +-The daemon may be customized using flags of the form `-remote.*` on the +-forwarder gopls. This causes the forwarder to invoke gopls with these settings +-when starting the daemon. As of writing, we expose the following configuration: +- +-* `-remote.logfile`: the location of the daemon logfile +-* `-remote.debug`: the daemon's debug address +-* `-remote.listen.timeout`: the amount of time the daemon should wait for new +- connections while there are no current connections, before shutting down. +- Must be set to a valid `time.Duration` (e.g. `30s` or `5m`). If `0`, listen +- indefinitely. Default: `1m`. +- +-Note that once the daemon is already running, setting these flags will not +-change its configuration. These flags only matter for the forwarder process +-that actually starts the daemon. +diff -urN a/gopls/doc/default.tmpl b/gopls/doc/default.tmpl +--- a/gopls/doc/default.tmpl 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/default.tmpl 1969-12-31 18:00:00.000000000 -0600 +@@ -1,4 +0,0 @@ +-{{- /* For golang.org/x/website/cmd/golangorg */ -}} +-{{define "layout"}} +-{{doclayout .}} +-{{end}} +diff -urN a/gopls/doc/design/architecture.svg b/gopls/doc/design/architecture.svg +--- a/gopls/doc/design/architecture.svg 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/design/architecture.svg 1969-12-31 18:00:00.000000000 -0600 +@@ -1 +0,0 @@ +- +\ No newline at end of file +diff -urN a/gopls/doc/design/design.md b/gopls/doc/design/design.md +--- a/gopls/doc/design/design.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/design/design.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,437 +0,0 @@ +---- +-title: "Gopls: Design" +---- +- +-## _A note from the future_ +- +-What follows below is the original design document for gopls, aggregated from +-various sources spanning 2018 and 2019. Since then, all of the features listed +-below have been implemented, along with many others. The first two goals have +-been achieved: gopls is a full implementation of the LSP, and the default +-backend for VS Code Go and many other editors. The third goal has only been +-partially realized: while gopls has gained many features, it is not extensible +-in the sense used in this document: the only way to extend gopls is to modify +-gopls. The fourth goal is not achieved: while some notable companies are able +-to use gopls with Bazel, the experience is subpar, and the Go command is the +-only officially supported build system. +- +-On the other hand, two of the explicit non-goals have been reconsidered. One is +-minor: syntax highlighting is now supported in the LSP by way of semantic +-tokens. The other is major: as gopls gained popularity, it became apparent that +-its memory footprint was a problem. The size of developer workspaces was +-increasing faster than the RAM available in typically development environments +-(particularly with containerized development). Gopls now uses a hybrid of +-on-disk indexes and in-memory caches, described in more detail in our +-[blog post on scalability](https://go.dev/blog/gopls-scalability). +- +-Notably, in anticipating difficulties this doc turned out to be prescient. +-Gopls has indeed struggled against the core standary library packages upon +-which it is built, and its user experience is still limited by the LSP. +-Nevertheless, sticking with the standard library and LSP was the right +-approach, as despite our small team these decisions have helped gopls keep up +-with the evolving Go language (i.e. generics), and to integrate with many new +-text editors. +- +-Gopls development continues, more than four years later, with a focus on +-simplicity, reliability, and extensibility. The new, opt-in +-[Go telemetry](https://github.com/golang/tools/releases/tag/gopls%2Fv0.14.0) +-will help us attain a higher standard of stability in our releases than we've +-been able to achieve through Github issues alone. Furthermore, telemetry will +-allow us to focus on high-priority features, and deprecate historical +-workarounds that burden the codebase. With greater velocity, we look forward +-to working with the community on improved refactoring, static analysis, and +-whatever else the future brings. +- +-- _Rob Findley (rfindley@google.com), 2023_ +- +-## Goals +- +-* `gopls` should **become the default editor backend** for the major editors used by Go programmers, fully supported by the Go team. +-* `gopls` will be a **full implementation of LSP**, as described in the [LSP specification], to standardize as many of its features as possible. +-* `gopls` will be **clean and extensible** so that it can encompass additional features in the future, allowing Go tooling to become best in class once more. +-* `gopls` will **support alternate build systems and file layouts**, allowing Go development to be simpler and more powerful in any environment. +- +-## Context +- +-While Go has a number of excellent and useful command-line tools that enhance the developer experience, it has become clear that integrating these tools with IDEs can pose challenges. +- +-Support of these tools has relied on the goodwill of community members, and they have been put under a large burden of support at times as the language, toolchain and environments change. As a result many tools have ceased to work, have had support problems, or become confusing with forks and replacements, or provided an experience that is not as good as it could be. +-See the section below on [existing solutions](#existing-solutions) for more problems and details. +- +-This is fine for tools used occasionally, but for core IDE features, this is not acceptable. +-Autocompletion, jump to definition, formatting, and other such features should always work, as they are key for Go development. +- +-The Go team will create an editor backend that works in any build system. +-It will also be able to improve upon the latency of Go tools, since each tool will no longer have to individually run the type-checker on each invocation, instead there will be a long-running process and data can be shared between the definitions, completions, diagnostics, and other features. +- +-By taking ownership of these tools and packaging them together in the form of gopls, the Go team will ensure that the Go development experience isn’t unnecessarily complicated for Go users. +-Having one editor backend will simplify the lives of Go developers, the Go team, and the maintainers of Go editor plugins. +- +-See Rebecca's excellent GopherCon keynote [talk] and [slides] for some more context. +- +-## Non-Goals +- +-* Command line speed +- +- Although gopls will have a command line mode, it will be optimized for long running and not command responsiveness, as such it may not be the right tool for things like CI systems. +- For such cases there will have to be an alternate tool using the same underlying libraries for consistency. +- +-* Low memory environments +- +- In order to do a good job of processing large projects with very low latencies gopls will be holding a lot of information in memory. +- It is presumed that developers are normally working on systems with significant RAM and this will not be a problem. +- In general this is upheld by the large memory usage of existing IDE solutions (like IntelliJ) +- +-* Syntax highlighting +- +- At the moment there is no editor that delegates this functionality to a separate binary, and no standard way of doing it. +- +-## Existing solutions +- +-Every year the Go team conducts a survey, asking developers about their experiences with the language. +- +-One question that is asked is “How do you feel about your editor?”. +- +-The responses told a very negative story. Some categorized quotes: +- +-* Setup +- * "Hard to install and configure" +- * "Inadequate documentation" +-* Performance +- * "Performance is very poor" +- * "Pretty slow in large projects" +-* Reliability +- * "Features work one day, but not the next" +- * "Tooling is not updated with new language features" +- +-Each editor has its own plugin that shells out to a variety of tools, many of which break with new Go releases or because they are no longer maintained. +- +-The individual tools each have to do the work to understand the code and all its transitive dependencies. +- +-Each feature is a different tool, with a different set of patterns for its command line, a different way to accept input and parse output, a different way of specifying source code locations. +-To support its existing feature set, VSCode installed 24 different command line tools, many of which have options or forks to configure. When looking at the set of tools that needed to be migrated to modules, across all the editors, there were 63 separate tools. +- +-All these tools need to understand the code, and they use the same standard libraries to do it. Those libraries are optimized for these kinds of tools, but even so processing that much code takes a lot of time time. Almost none of the tools are capable of returning results within 100ms. +-As developers type in their editor, multiple of these features need to activate, which means they are not just paying the cost once, but many times. The overall effect is an editing experience that feels sluggish, and features that are either not enabled or sometimes produce results that appear so slowly they are no longer useful when they arrive. This is a problem that increases with the size of the code base, which means it is getting worse over time, and is especially bad for the kinds of large code bases companies are dealing with as they use Go for more major tasks. +- +-## Requirements +- +-### Complete feature set +- +-For gopls to be considered a success it has to implement the full feature set discussed [below](#Features). +-This is the set of features that users need in order to feel as productive as they were with the tooling it is replacing. It does not include every feature of previous implementations, there are some features that are almost never used that should be dropped (like guru's pointer analysis) and some other features that do not easily fit and will have to be worked around (replacing the save hook/linter). +- +-### Equivalent or better experience +- +-For all of those features, the user experience must match or exceed the current one available in all editors. +-This is an easy statement to make, but a hard one to validate or measure. Many of the possible measures fail to capture the experience. +- +-For instance, if an attempt was made to measure the latency of a jump to definition call, the results would be fairly consistent from the old godef tool. From the gopls implementation there may be a much larger range of latencies, with the best being orders of magnitude faster, and the worse slightly worse, because gopls attempts to do far more work, but manages to cache it across calls. +- +-Or for a completion call, it might be slower but produce a better first match such that users accept it more often, resulting in an overall better experience. +- +-For the most part this has to rely on user reports. If users are refusing to switch because the experience is not better, it is clearly not done, if they are switching but most people are complaining, there are probably enough areas that are better to make the switch compelling but other areas which are worse. If most people are switching and either staying silent or being positive, it is probably done. When writing tools, the user is all that matters. +- +-### Solid community of contributors +- +-The scope and scale of the problem gopls is trying to solve is untenable for the core Go team, it is going to require a strong community to make it all happen. +- +-This implies the code must be easy to contribute to, and easy for many developers to work on in parallel. The functionality needs to be well decoupled, and have a thorough testing story. +- +-### Latencies that fall within user tolerance +- +-There has been a lot of research on acceptable latencies for user actions. +- +-The main result that affects gopls is that feedback in direct response to continuous user actions needs to be under 100ms to be imperceptible, and anything above 200ms aggravates the user. +-This means in general the aim has to be <100ms for anything that happens as the developer types. +-There will always be cases where gopls fails to meet this deadline, and there needs to be ways to make the user experience okay in those cases, but in general the point of this deadline is to inform the basic architecture design, any solution that cannot theoretically meet this goal in the long term is the wrong answer. +- +-### Easy to configure +- +-Developers are very particular, and have very differing desires in their coding experience. gopls is going to have to support a significant amount of flexibility, in order to meet those desires. +-The default settings however with no configuration at all must be the one that is best experience for most users, and where possible the features must be flexible without configuration so that the client can easily make the choices about treatment without changing its communication with gopls. +- +-## Difficulties +- +-### Volume of data +- +- +-* Small: +-* Medium: +-* Large: +-* Corporate mono-repo: Much much bigger +- +-Parsing and type checking large amounts of code is quite expensive, and the converted forms use a lot of space. As gopls has to keep updating this information while the developer types, it needs to manage how it caches the converted forms very carefully to balance memory use vs speed. +- +-### Cache invalidation +- +-The basic unit of operation for the type checking is the package, but the basic unit of operation for an editor is the file. +-gopls needs to be able to map files to packages efficiently, so that when files change it knows which packages need to be updated (along with any other packages that transitively depended on them). +-This is made especially difficult by the fact that changing the content of a file can modify which packages it is considered part of (either by changing the package declaration or the build tags), a file can be in more than one package, and changes can be made to files without using the editor, in which case it will not notify us of the changes. +- +-### Inappropriate core functionality +- +-The base libraries for Go (things like [go/token], [go/ast] and [go/types]) are all designed for compiler-like applications. +-They tend to worry more about throughput than memory use, they have structures that are intended to grow and then be thrown away at program exit, and they are not designed to keep going in the presence of errors in the source they are handling. +-They also have no abilities to do incremental changes. +- +-Making a long running service work well with those libraries is a very large challenge, but writing new libraries would be far more work, and cause a significant long term cost as both sets of libraries would have to be maintained. Right now it is more important to get a working tool into the hands of users. In the long term this decision may have to be revisited, new low level libraries may be the only way to keep pushing the capabilities forwards. +- +-### Build system capabilities +- +-gopls is supposed to be build system agnostic, but it must use the build system to discover how files map to packages. When it tries to do so, even when the functionality is the same, the costs (in time, CPU and memory) are very different, and can significantly impact the user experience. Designing how gopls interacts with the build system to try to minimize or hide these differences is hard. +- +-### Build tags +- +-The build tag system in Go is quite powerful, and has many use cases. Source files can exclude themselves using powerful boolean logic on the set of active tags. +-It is however designed for specifying the set of active tags on the command line, and the libraries are all designed to cope with only one valid combination at a time. There is also no way to work out the set of valid combinations. +- +-Type checking a file requires knowledge of all the other files in the same package, and that set of files is modified by the build tags. The set of exported identifiers of a package is also affected by which files are in the package, and thus its build tags. +- +-This means that even for files or packages that have no build tag controls it is not possible to produce correct results without knowing the set of build tags to consider. +-This makes it very hard to produce useful results when viewing a file. +- +-### Features not supported by LSP +- +-There are some things it would be good to be able to do that do not fit easily into the existing LSP protocol. +-For instance, displaying control flow information, automatic struct tags, complex refactoring... +- +-Each feature will have to be considered carefully, and either propose a change to LSP, or add a way to have gopls specific extensions to the protocol that are still easy to use in all the editor plugins. +- +-To avoid these at the start, only core LSP features will be implemented, as they are sufficient to meet the baseline requirements anyway, but the potential features need to be kept in mind in the core architecture. +- +-### Distribution +- +-Making sure that users are using the right version of gopls is going to be a problem. Each editor plugin is probably going to install the tools in its own way, some will choose to install it system wide, some will keep their own copy. +- +-Because it is a brand new tool, it will be changing rapidly. If users are not informed they are on an old version they will be experiencing problems that have already been fixed, which is worse for them, and then probably reporting them, which wastes time for the gopls team. There needs to be a mechanism for gopls to check if is up to date, and a recommended way to install an up to date version. +- +-### Debugging user problems +- +-gopls is essentially a very stateful long running server on the developer's machine. Its basic operation is affected by many things, from the users environment to the contents of the local build cache. The data it is operating on is often a confidential code base that cannot be shared. +-All of these things make it hard for users to report a bug usefully, or create a minimal reproduction. +- +-There needs to be easy ways for users to report what information they can, and ways to attempt to reproduce problems without their entire state. This is also needed to produce regression tests. +- +-## Basic design decisions +- +-There are some fundamental architecture decisions that affect much of the rest of the design of the tool, making fundamental trade offs that impact the user experience. +- +-### Process lifetime: *managed by the editor* +- +-Processing a large code base to fully type check and then analyze it within the latency requirements is not feasible, and is one of the primary problems with the existing solutions. This remains true even if the computed information was cached on disk, as running analyzers and type checkers ends up requiring the full AST of all files in the dependency graph. +-It is theoretically possible to do better, but only with a major re-write of the existing parsing and type checking libraries, something that is not feasible at this time. +- +-This implies that gopls should be a long running process, that is able to cache and pre-calculate results in memory so that when a request arrives it can produce the answer much faster. +- +-It could run as a daemon on the user's machine, but there are a lot of issues with managing a daemon. It may well be the right choice in the long term, and it should be allowed for in the fundamental architecture design, but to start with it will instead have a process that lasts as long as the editor that starts it, and that can easily be restarted. +- +-### Caching: *in memory* +- +-Persistent disk caches are very expensive to maintain, and require solving a lot of extra problems. +-Although building the information required is expensive compared to the latencies required of the requests, it is fairly minor compared to the startup times of an editor, so it is expected that rebuilding the information when gopls is restarted will be acceptable. +- +-The advantage gained from this is that gopls becomes stateless across restarts which means if it has issues or gets its state confused, a simple restart will often fix the problem. +-It also means that when users report problems, the entire state of the on disk cache is not needed to diagnose and reproduce the issue. +- +-### Communication: *stdin/stdout JSON* +- +-The LSP specification defines the JSON messages that are normally used, but it does not define how those message should be sent, and there are implementations of the LSP that do not use JSON (for instance, Protocol buffers are an option). +- +-The constraints on gopls are that it must be easy to integrate into *every editor* on *all operating systems*, and that it should not have large external dependencies. +- +-JSON is part of the Go standard library, and is also the native language of LSP, so it makes the most sense. By far the best supported communication mechanism is the standard input and output of a process, and the common client implementations all have ways of using [JSON rpc 2] in this mode. There were no complete and low dependency implementations of this protocol in Go, but it is a fairly small protocol on top of the JSON library that can be implemented with a moderate effort, and would be a generally useful library to have anyway. +- +-In the future it is expected to run in separated client server mode, so writing it in a way that could use sockets instead of stdin/stdout from the start was the best way to make sure it remained possible. It was also a huge debugging aid to be able to run the gopls server by hand and watch/debug it outside the editor. +- +-### Running other tools: *no* +- +- +- +-## Features +- +- +- +-There is a set of features that gopls needs to expose to be a comprehensive IDE solution. +-The following is the minimum set of features, along with their existing solutions and how they should map to the LSP. +- +-### Introspection +- +-Introspection features tell developers information about their code while they work. They do not make or suggest changes. +- +---- +-Diagnostics | Static analysis results of the code, including compilation and lint errors +------------ | --- +-Requires | Full go/analysis run, which needs full AST, type and SSA information +-LSP | [`textDocument/publishDiagnostics`] +-Previous | `go build`, `go vet`, `golint`, [errcheck], [staticcheck] +-| | This is one of the most important IDE features, allowing fast turn around without having to run compilers and checkers in the shell. Often used to power problem lists, gutter markers and squiggle underlines in the IDE.
There is some complicated design work to do in order to let users customize the set of checks being run, preferably without having to recompile the main LSP binary. +- +---- +-Hover | Information about the code under the cursor. +--------- | --- +-Requires | AST and type information for the file and all dependencies +-LSP | [`textDocument/hover`] +-Previous | [godoc], [gogetdoc] +-| | Used when reading code to display information known to the compiler but not always obvious from the code. For instance it may return the types of identifiers, or the documentation. +- +---- +-Signature help | Function parameter information and documentation +--------------- | --- +-Requires | AST and type information for the file and all dependencies +-LSP | [`textDocument/signatureHelp`] +-Previous | [gogetdoc] +-| | As a function call is being typed into code, it is helpful to know the parameters of that call to enable the developer to call it correctly. +- +-### Navigation +- +-Navigation features are designed to make it easier for a developer to find their way round a code base. +- +---- +-Definition | Select an identifier, and jump to the code where that identifier was defined. +----------- | --- +-Requires | Full type information for file and all dependencies +-LSP | [`textDocument/declaration`] +-| | [`textDocument/definition`] +-| | [`textDocument/typeDefinition`] +-Previous | [godef] | +-| | Asking the editor to open the place where a symbol was defined is one of the most commonly used code navigation tools inside an IDE when available. It is especially valuable when exploring an unfamiliar code base.
Due to a limitation of the compiler output, it is not possible to use the binary data for this task (specifically it does not know column information) and thus it must parse from source. +- +---- +-Implementation | Reports the types that implement an interface +--------------- | --- +-Requires | Full workspace type knowledge +-LSP | [`textDocument/implementation`] +-Previous | [impl] +-| | This feature is hard to scale up to large code bases, and is going to take thought to get right. It may be feasible to implemented a more limited form in the meantime. +- +---- +-Document symbols | Provides the set of top level symbols in the current file. +----------------- | --- +-Requires | AST of the current file only +-LSP | [`textDocument/documentSymbol`] +-Previous | [go-outline], [go-symbols] +-| | Used to drive things like outline mode. +- +---- +-References | Find all references to the symbol under the cursor. +----------- | --- +-Requires | AST and type information for the **reverse** transitive closure +-LSP | [`textDocument/references`] +-Previous | [guru] +-| | This requires knowledge of every package that could possible depend on any packages the current file is part of. In the past this has been implemented either by global knowledge, which does not scale, or by specifying a "scope" which confused users to the point where they just did not use the tools. gopls is probably going to need a more powerful solution in the long term, but to start with automatically limiting the scope may produce acceptable results. This would probably be the module if known, or some sensible parent directory otherwise. +- +---- +-Folding | Report logical hierarchies of blocks +--------- | --- +-Requires | AST of the current file only +-LSP | [`textDocument/foldingRange`] +-Previous | [go-outline] +-| | This is normally used to provide expand and collapse behavior in editors. +- +---- +-Selection | Report regions of logical selection around the cursor +---------- | --- +-Requires | AST of the current file only +-LSP | [`textDocument/selectionRange`] +-Previous | [guru] +-| | Used in editor features like expand selection. +- +- +-### Edit assistance +- +-These features suggest or apply edits to the code for the user, including refactoring features, for which there are many potential use cases. +-Refactoring is one of the places where Go tools could potentially be very strong, but have not been so far, and thus there is huge potential for improvements in the developer experience. +-There is not yet a clear understanding of the kinds of refactoring people need or how they should express them however, and there are weaknesses in the LSP protocol around this. +-This means it may be much more of a research project. +- +- +---- +-Format | Fix the formatting of the file +--------- | --- +-Requires | AST of current file +-LSP | [`textDocument/formatting`] +-| | [`textDocument/rangeFormatting`] +-| | [`textDocument/onTypeFormatting`] +-Previous | [gofmt], [goimports], [goreturns] +-| | It will use the standard format package.
Current limitations are that it does not work on malformed code. It may need some very careful changes to the formatter to allow for formatting an invalid AST or changes to force the AST to a valid mode. These changes would improve range and file mode as well, but are basically vital to onTypeFormatting +- +---- +-Imports | Rewrite the imports block automatically to match the symbols used. +--------- | --- +-Requires | AST of the current file and full symbol knowledge for all candidate packages. +-LSP | [`textDocument/codeAction`] +-Previous | [goimports], [goreturns] +-| | This needs knowledge of packages that are not yet in use, and the ability to find those packages by name.
It also needs exported symbol information for all the packages it discovers.
It should be implemented using the standard imports package, but there may need to be exposed a more fine grained API than just a file rewrite for some of the interactions. +- +---- +-Autocompletion | Makes suggestions to complete the entity currently being typed. +--------------- | --- +-Requires | AST and type information for the file and all dependencies
Also full exported symbol knowledge for all packages. +-LSP | [`textDocument/completion`] +-| | [`completionItem/resolve`] +-Previous | [gocode] +-| | Autocomplete is one of the most complicated features, and the more it knows the better its suggestions can be. For instance it can autocomplete into packages that are not yet being imported if it has their public symbols. It can make better suggestions of options if it knows what kind of program you are writing. It can suggest better arguments if it knows how you normally call a function. It can suggest entire patterns of code if it knows they are common. Unlike many other features, which have a specific task, and once it is doing that task the feature is done, autocomplete will never be finished. Balancing and improving both the candidates and how they are ranked will be a research problem for a long time to come. +- +---- +-Rename | Rename an identifier +--------- | --- +-Requires | AST and type information for the **reverse** transitive closure +-LSP | [`textDocument/rename`] +-| | [`textDocument/prepareRename`] +-Previous | golang.org/x/tools/cmd/gorename +-| | This uses the same information that find references does, with all the same problems and limitations. It is slightly worse because the changes it suggests make it intolerant of incorrect results. It is also dangerous using it to change the public API of a package. +- +---- +-Suggested fixes | Suggestions that can be manually or automatically accepted to change the code +---------------- | --- +-Requires | Full go/analysis run, which needs full AST, type and SSA information +-LSP | [`textDocument/codeAction`] +-Previous | N/A +-| | This is a brand new feature powered by the new go/analysis engine, and it should allow a huge amount of automated refactoring. +- +-[LSP specification]: https://microsoft.github.io/language-server-protocol/specifications/specification-3-14/ +-[talk]: https://www.youtube.com/watch?v=EFJfdWzBHwE +-[slides]: https://github.com/gophercon/2019-talks/blob/master/RebeccaStambler-GoPleaseStopBreakingMyEditor/slides.pdf "Go, please stop breaking my editor!" +-[JSON rpc 2]: https://www.jsonrpc.org/specification +- +-[errcheck]: https://github.com/kisielk/errcheck +-[go-outline]: https://github.com/lukehoban/go-outline +-[go-symbols]: https://github.com/acroca/go-symbols +-[gocode]: https://github.com/stamblerre/gocode +-[godef]: https://github.com/rogpeppe/godef +-[godoc]: https://golang.org/cmd/godoc +-[gofmt]: https://golang.org/cmd/gofmt +-[gogetdoc]: https://github.com/zmb3/gogetdoc +-[goimports]: https://pkg.go.dev/golang.org/x/tools/cmd/goimports +-[goreturns]: https://github.com/sqs/goreturns +-[gotags]: https://github.com/jstemmer/gotags +-[guru]: https://pkg.go.dev/golang.org/x/tools/cmd/guru +-[impl]: https://github.com/josharian/impl +-[staticcheck]: https://staticcheck.io/docs/ +-[go/types]: https://golang.org/pkg/go/types/ +-[go/ast]: https://golang.org/pkg/go/ast/ +-[go/token]: https://golang.org/pkg/go/token/ +- +-[`completionItem/resolve`]:https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#completionItem_resolve +-[`textDocument/codeAction`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_codeAction +-[`textDocument/completion`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_completion +-[`textDocument/declaration`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_declaration +-[`textDocument/definition`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_definition +-[`textDocument/documentLink`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_documentLink +-[`textDocument/documentSymbol`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_documentSymbol +-[`textDocument/foldingRange`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_foldingRange +-[`textDocument/formatting`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_formatting +-[`textDocument/highlight`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_highlight +-[`textDocument/hover`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_hover +-[`textDocument/implementation`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_implementation +-[`textDocument/onTypeFormatting`]:https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_onTypeFormatting +-[`textDocument/prepareRename`]:https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_prepareRename +-[`textDocument/publishDiagnostics`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_publishDiagnostics +-[`textDocument/rangeFormatting`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_rangeFormatting +-[`textDocument/references`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_references +-[`textDocument/rename`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_rename +-[`textDocument/selectionRange`]:https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_selectionRange +-[`textDocument/signatureHelp`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_signatureHelp +-[`textDocument/typeDefinition`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_typeDefinition +-[`workspace/didChangeWatchedFiles`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#workspace_didChangeWatchedFiles +diff -urN a/gopls/doc/design/implementation.md b/gopls/doc/design/implementation.md +--- a/gopls/doc/design/implementation.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/design/implementation.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,172 +0,0 @@ +---- +-title: "Gopls: Implementation" +---- +- +-Last major update: Jan 16 2024 +- +-This doc presents a high-level overview of the structure of gopls to +-help new contributors find their way. It is not intended to be a +-complete description of the implementation, nor even of any key +-components; for that, the package documentation (linked below) and +-other comments within the code are a better guide. +- +-The diagram below shows selected components of the gopls module and +-their relationship to each other according to the Go import graph. +-Tests and test infrastructure are not shown, nor are utility packages, +-nor packages from the [x/tools] module. For brevity, packages are +-referred to by their last segment, which is usually unambiguous. +- +-The height of each blob corresponds loosely to its technical depth. +-Some blocks are wide and shallow, such as [protocol], which declares +-Go types for the entire LSP protocol. Others are deep, such as [cache] +-and [golang], as they contain a lot of dense logic and algorithms. +- +- +-![Gopls architecture](architecture.svg) +- +-Starting from the bottom, we'll describe the various components. +- +-The lowest layer defines the request and response types of the +-Language Server Protocol: +- +-- The [protocol] package defines the standard protocol; it is mostly +- generated mechanically from the schema definition provided by +- Microsoft. +- The most important type is DocumentURI, which represents a `file:` +- URL that identifies a client editor document. It also provides +- `Mapper`, which maps between the different coordinate systems used +- for source positions: UTF-8, UTF-16, and token.Pos. +- +-- The [command] package defines Gopls's non-standard commands, which +- are all invoked through the `workspace/executeCommand` extension +- mechanism. These commands are typically returned by the server as +- continuations of Code Actions or Code Lenses; most clients do not +- construct calls to them directly. +- +-The next layer defines a number of important and very widely used data structures: +- +-- The [file] package defines the primary abstractions of a client +- file: its `Identity` (URI and content hash), and its `Handle` (which +- additionally provides the version and content of a particular +- snapshot of the file. +- +-- The [parsego] package defines `File`, the parsed form of a Go source +- file, including its content, syntax tree, and coordinary mappings +- (Mapper and token.File). The package performs various kinds of tree +- repair to work around error-recovery shortcomings of the Go parser. +- +-- The [metadata] package defines `Package`, an abstraction of the +- metadata of a Go package, similar to the output of `go list -json`. +- Metadata is produced from [go/packages], which takes +- care of invoking `go list`. (Users report that it works to some extent +- with a GOPACKAGESDRIVER for Bazel, though we maintain no tests for this +- scenario.) +- +- The package also provides `Graph`, the complete import graph for a +- workspace; each graph node is a `Package`. +- +-The [settings] layer defines the data structure (effectively a large +-tree) for gopls configuration options, along with its JSON encoding. +- +-The [cache] layer is the largest and most complex component of gopls. +-It is concerned with state management, dependency analysis, and invalidation: +-the `Session` of communication with the client; +-the `Folder`s that the client has opened; +-the `View` of a particular workspace tree with particular build +-options; +-the `Snapshot` of the state of all files in the workspace after a +-particular edit operation; +-the contents of all files, whether saved to disk (`DiskFile`) or +-edited and unsaved (`Overlay`); +-the `Cache` of in-memory memoized computations, +-such as parsing go.mod files or build the symbol index; +-and the `Package`, which holds the results of type checking a package +-from Go syntax. +- +-The cache layer depends on various auxiliary packages, including: +- +-- The [filecache] package, which manages gopls' persistent, transactional, +- file-based key/value store. +- +-- The [xrefs], [methodsets], and [typerefs] packages define algorithms +- for constructing indexes of information derived from type-checking, +- and for encoding and decoding these serializable indexes in the file +- cache. +- +- Together these packages enable the fast restart, reduced memory +- consumption, and synergy across processes that were delivered by the +- v0.12 redesign and described in ["Scaling gopls for the growing Go +- ecosystem"](https://go.dev/blog/gopls-scalability). +- +-The cache also defines gopls's [go/analysis] driver, which runs +-modular analysis (similar to `go vet`) across the workspace. +-Gopls also includes a number of analysis passes that are not part of vet. +- +-The next layer defines four packages, each for handling files in a +-particular language: +-[mod] for go.mod files; +-[work] for go.work files; +-[template] for files in `text/template` syntax; and +-[golang], for files in Go itself. +-This package, by far the largest, provides the main features of gopls: +-navigation, analysis, and refactoring of Go code. +-As most users imagine it, this package _is_ gopls. +- +-The [server] package defines the LSP service implementation, with one +-handler method per LSP request type. Each handler switches on the type +-of the file and dispatches to one of the four language-specific +-packages. +- +-The [lsprpc] package connects the service interface to our [jsonrpc2](https://www.jsonrpc.org/specification) server. +- +-Bear in mind that the diagram is a dependency graph, a "static" +-viewpoint of the program's structure. A more dynamic viewpoint would +-order the packages based on the sequence in which they are encountered +-during processing of a particular request; in such a view, the bottom +-layer would represent the "wire" (protocol and command), the next +-layer up would hold the RPC-related packages (lsprpc and server), and +-features (e.g. golang, mod, work, template) would be at the top. +- +- +- +-The [cmd] package defines the command-line interface of the `gopls` +-command, around which gopls's main package is just a trivial wrapper. +-It is usually run without arguments, causing it to start a server and +-listen indefinitely. +-It also provides a number of subcommands that start a server, make a +-single request to it, and exit, providing traditional batch-command +-access to server functionality. These subcommands are primarily +-provided as a debugging aid; but see https://go.dev/issue/63693. +- +-[cache]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cache +-[cmd]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cmd +-[command]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/protocol/command +-[debug]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/debug +-[file]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/file +-[filecache]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/filecache +-[go/analysis]: https://pkg.go.dev/golang.org/x/tools@master/go/analysis +-[go/packages]: https://pkg.go.dev/golang.org/x/tools@master/go/packages +-[gopls]: https://pkg.go.dev/golang.org/x/tools/gopls@master +-[jsonrpc]: https://pkg.go.dev/golang.org/x/tools@master/internal/jsonrpc +-[jsonrpc2]: https://pkg.go.dev/golang.org/x/tools@master/internal/jsonrpc2 +-[lsprpc]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/lsprpc +-[memoize]: https://github.com/golang/tools/tree/master/internal/memoize +-[metadata]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cache/metadata +-[methodsets]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cache/methodsets +-[mod]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/mod +-[parsego]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cache/parsego +-[protocol]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/protocol +-[server]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/server +-[settings]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/settings +-[golang]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/golang +-[template]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/template +-[typerefs]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cache/typerefs +-[work]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/work +-[x/tools]: https://github.com/golang/tools@master +-[xrefs]: https://pkg.go.dev/golang.org/x/tools/gopls@master/internal/cache/xrefs +diff -urN a/gopls/doc/design/integrating.md b/gopls/doc/design/integrating.md +--- a/gopls/doc/design/integrating.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/design/integrating.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,93 +0,0 @@ +---- +-title: "Documentation for plugin authors" +---- +- +-If you are integrating `gopls` into an editor by writing an editor plugin, there are quite a few semantics of the communication between the editor and `gopls` that are not specified by the [LSP specification]. +- +-We attempt to document those details along with any other information that has been helpful to other plugin authors here. +- +-If you are implementing a plugin yourself and have questions this page does not answer, please reach out to us to ask, and then also contribute your findings back to this page. +- +-## Supported features +- +-For the most part you should look at the [Index of features](../features/) +-to know whether gopls supports a feature. +- +-For a truly authoritative answer you should check the [result][InitializeResult] of the [initialize] request, where gopls enumerates its support in the [ServerCapabilities]. +- +- +-## Positions and ranges +- +-Many LSP requests pass position or range information. This is described in the [LSP specification][lsp-text-documents]: +- +-> A position inside a document (see Position definition below) is expressed as a zero-based line and character offset. The offsets are based on a UTF-16 string representation. So a string of the form a𐐀b the character offset of the character a is 0, the character offset of 𐐀 is 1 and the character offset of b is 3 since 𐐀 is represented using two code units in UTF-16. +- +-This means that integrators will need to calculate UTF-16 based column offsets. +-Use `protocol.Mapper` for all the conversions. +- +-## Edits +- +-In order to deliver changes from gopls to the editor, the LSP supports arrays of [`TextEdit`][lsp-textedit]s in responses. +-The spec specifies exactly how these should be applied: +- +-> All text edits ranges refer to positions in the original document. Text edits ranges must never overlap, that means no part of the original document must be manipulated by more than one edit. However, it is possible that multiple edits have the same start position: multiple inserts, or any number of inserts followed by a single remove or replace edit. If multiple inserts have the same position, the order in the array defines the order in which the inserted strings appear in the resulting text. +- +-All `[]TextEdit` are sorted such that applying the array of deltas received in reverse order achieves the desired result that holds with the spec. +- +-## Errors +- +-Various error codes are described in the [LSP specification][lsp-response]. We are still determining what it means for a method to return an error; are errors only for low-level LSP/transport issues or can other conditions cause errors to be returned? See some of this discussion on [#31526]. +- +-The method chosen is currently influenced by the exact treatment in the currently popular editor integrations. It may well change, and ideally would become more coherent across requests. +- +-* [`textDocument/codeAction`]: Return error if there was an error computing code actions. +-* [`textDocument/completion`]: Log errors, return empty result list. +-* [`textDocument/definition`]: Return error if there was an error computing the definition for the position. +-* [`textDocument/typeDefinition`]: Return error if there was an error computing the type definition for the position. +-* [`textDocument/formatting`]: Return error if there was an error formatting the file. +-* [`textDocument/highlight`]: Log errors, return empty result. +-* [`textDocument/hover`]: Return empty result. +-* [`textDocument/documentLink`]: Log errors, return nil result. +-* [`textDocument/publishDiagnostics`]: Log errors if there were any while computing diagnostics. +-* [`textDocument/references`]: Log errors, return empty result. +-* [`textDocument/rename`]: Return error if there was an error computing renames. +-* [`textDocument/signatureHelp`]: Log errors, return nil result. +-* [`textDocument/documentSymbols`]: Return error if there was an error computing document symbols. +- +-## Watching files +- +-It is fairly normal for files that affect `gopls` to be modified outside of the editor it is associated with. +- +-For instance, files that are needed to do correct type checking are modified by switching branches in git, or updated by a code generator. +- +-Monitoring files inside gopls directly has a lot of awkward problems, but the [LSP specification] has methods that allow gopls to request that the client notify it of file system changes, specifically [`workspace/didChangeWatchedFiles`]. +-This is currently being added to gopls by a community member, and tracked in [#31553] +- +-[InitializeResult]: https://pkg.go.dev/golang.org/x/tools/gopls/internal/protocol#InitializeResult +-[ServerCapabilities]: https://pkg.go.dev/golang.org/x/tools/gopls/internal/protocol#ServerCapabilities +-[`golang.org/x/tools/gopls/internal/protocol`]: https://pkg.go.dev/golang.org/x/tools/internal/protocol#NewPoint +- +-[LSP specification]: https://microsoft.github.io/language-server-protocol/specifications/specification-3-14/ +-[lsp-response]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#response-message +-[initialize]: https://microsoft.github.io/language-server-protocol/specifications/specification-3-14/#initialize +-[lsp-text-documents]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#text-documents +-[lsp-textedit]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textedit +- +-[`textDocument/codeAction`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_codeAction +-[`textDocument/completion`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_completion +-[`textDocument/definition`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_definition +-[`textDocument/typeDefinition`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_typeDefinition +-[`textDocument/formatting`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_formatting +-[`textDocument/highlight`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_highlight +-[`textDocument/hover`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_hover +-[`textDocument/documentLink`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_documentLink +-[`textDocument/publishDiagnostics`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_publishDiagnostics +-[`textDocument/references`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_references +-[`textDocument/rename`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_rename +-[`textDocument/signatureHelp`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_signatureHelp +-[`textDocument/documentSymbols`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#textDocument_documentSymbols +-[`workspace/didChangeWatchedFiles`]: https://github.com/Microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-14.md#workspace_didChangeWatchedFiles +- +-[#31080]: https://github.com/golang/go/issues/31080 +-[#31553]: https://github.com/golang/go/issues/31553 +-[#31526]: https://github.com/golang/go/issues/31526 +diff -urN a/gopls/doc/editor/emacs.md b/gopls/doc/editor/emacs.md +--- a/gopls/doc/editor/emacs.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/editor/emacs.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,190 +0,0 @@ +---- +-title: "Gopls: Using Emacs" +---- +- +-## Installing `gopls` +- +-To use `gopls` with Emacs, you must first +-[install the `gopls` executable](../index.md#installation) and ensure that the directory +-containing the resulting binary (either `$(go env GOBIN)` or `$(go env +-GOPATH)/bin`) is in your `PATH`. +- +-## Choosing an Emacs LSP client +- +-To use `gopls` with Emacs, you will need to choose and install an Emacs LSP +-client package. Two popular client packages are [LSP Mode] and [Eglot]. +- +-LSP Mode takes a batteries-included approach, with many integrations enabled +-“out of the box” and several additional behaviors provided by `lsp-mode` itself. +- +-Eglot takes a minimally-intrusive approach, focusing on smooth integration with +-other established packages. It provides a few of its own `eglot-` commands but +-no additional keybindings by default. +- +-Once you have selected which client you want to use, install it per the packages +-instructions: see [Eglot 1-2-3](https://github.com/joaotavora/eglot#1-2-3) or +-[LSP Mode Installation](https://emacs-lsp.github.io/lsp-mode/page/installation/). +- +-## Common configuration +- +-Both Eglot and LSP Mode can integrate with popular packages in the Emacs +-ecosystem: +- +-* The built-in [`xref`] package provides cross-references. +-* The built-in [Flymake] package provides an on-the-fly diagnostic overlay. +-* [Company] mode displays code completion candidates (with a richer UI than +- the built-in [`completion-at-point`]). +- +-Eglot provides documentation using the built-in [ElDoc] minor mode, while LSP +-Mode by default provides documentation using its own [`lsp-ui`] mode. +- +-Eglot by default locates the project root using the [`project`] package. In LSP +-Mode, this behavior can be configured using the `lsp-auto-guess-root` setting. +- +-## Configuring LSP Mode +- +-### Loading LSP Mode in `.emacs` +- +-```elisp +-(require 'lsp-mode) +-(add-hook 'go-mode-hook #'lsp-deferred) +- +-;; Set up before-save hooks to format buffer and add/delete imports. +-;; Make sure you don't have other gofmt/goimports hooks enabled. +-(defun lsp-go-install-save-hooks () +- (add-hook 'before-save-hook #'lsp-format-buffer t t) +- (add-hook 'before-save-hook #'lsp-organize-imports t t)) +-(add-hook 'go-mode-hook #'lsp-go-install-save-hooks) +-``` +- +-### Configuring `gopls` via LSP Mode +- +-See [settings](../settings) for information about available gopls settings. +- +-Stable gopls settings have corresponding configuration variables in `lsp-mode`. +-For example, `(setq lsp-gopls-use-placeholders nil)` will disable placeholders +-in completion snippets. See [`lsp-go`] for a list of available variables. +- +-Experimental settings can be configured via `lsp-register-custom-settings`: +- +-```lisp +-(lsp-register-custom-settings +- '(("gopls.completeUnimported" t t) +- ("gopls.staticcheck" t t))) +-``` +- +-Note that after changing settings you must restart gopls using e.g. `M-x +-lsp-restart-workspace`. +- +-## Configuring Eglot +- +-### Configuring `project` for Go modules in `.emacs` +- +-Eglot uses the built-in `project` package to identify the LSP workspace for a +-newly-opened buffer. The `project` package does not natively know about `GOPATH` +-or Go modules. Fortunately, you can give it a custom hook to tell it to look for +-the nearest parent `go.mod` file (that is, the root of the Go module) as the +-project root. +- +-```elisp +-(require 'project) +- +-(defun project-find-go-module (dir) +- (when-let ((root (locate-dominating-file dir "go.mod"))) +- (cons 'go-module root))) +- +-(cl-defmethod project-root ((project (head go-module))) +- (cdr project)) +- +-(add-hook 'project-find-functions #'project-find-go-module) +-``` +- +-### Loading Eglot in `.emacs` +- +-```elisp +-;; Optional: load other packages before eglot to enable eglot integrations. +-(require 'company) +-(require 'yasnippet) +- +-(require 'go-mode) +-(require 'eglot) +-(add-hook 'go-mode-hook 'eglot-ensure) +- +-;; Optional: install eglot-format-buffer as a save hook. +-;; The depth of -10 places this before eglot's willSave notification, +-;; so that notification reports the actual contents that will be saved. +-(defun eglot-format-buffer-before-save () +- (add-hook 'before-save-hook #'eglot-format-buffer -10 t)) +-(add-hook 'go-mode-hook #'eglot-format-buffer-before-save) +-``` +- +-Use `M-x eglot-upgrade-eglot` to upgrade to the latest version of +-Eglot. +- +-### Configuring `gopls` via Eglot +- +-See [settings](../settings) for information about available gopls settings. +- +-LSP server settings are controlled by the `eglot-workspace-configuration` +-variable, which can be set either globally in `.emacs` or in a `.dir-locals.el` file in the project root. +- +-`.emacs`: +-```elisp +-(setq-default eglot-workspace-configuration +- '((:gopls . +- ((staticcheck . t) +- (matcher . "CaseSensitive"))))) +-``` +- +-`.dir-locals.el`: +-```elisp +-((nil (eglot-workspace-configuration . ((gopls . ((staticcheck . t) +- (matcher . "CaseSensitive"))))))) +-``` +- +-### Organizing imports with Eglot +- +-`gopls` provides the import-organizing functionality of `goimports` as an LSP +-code action, which you can invoke as needed by running `M-x eglot-code-actions` +-(or a key of your choice bound to the `eglot-code-actions` function) and +-selecting `Organize Imports` at the prompt. +- +-To automatically organize imports before saving, add a hook: +- +-```elisp +-(add-hook 'before-save-hook +- (lambda () +- (call-interactively 'eglot-code-action-organize-imports)) +- nil t) +-``` +- +-## Troubleshooting +- +-Common errors: +- +-* When prompted by Emacs for your project folder, if you are using modules you +- must select the module's root folder (i.e. the directory with the "go.mod"). +- If you are using GOPATH, select your $GOPATH as your folder. +-* Emacs must have your environment set properly (PATH, GOPATH, etc). You can +- run `M-x getenv PATH ` to see if your PATH is set in Emacs. If +- not, you can try starting Emacs from your terminal, using [this +- package][exec-path-from-shell], or moving your shell config from `.bashrc` +- into `.profile` and logging out and back in. +-* Make sure only one LSP client mode is installed. (For example, if using +- `lsp-mode`, ensure that you are not _also_ enabling `eglot`.) +-* Look for errors in the `*lsp-log*` buffer or run `M-x eglot-events-buffer`. +-* Ask for help in the `#emacs` channel on the [Gophers slack]. +- +-[LSP Mode]: https://emacs-lsp.github.io/lsp-mode/ +-[Eglot]: https://github.com/joaotavora/eglot/blob/master/README.md +-[`xref`]: https://www.gnu.org/software/emacs/manual/html_node/emacs/Xref.html +-[Flymake]: https://www.gnu.org/software/emacs/manual/html_node/flymake/Using-Flymake.html#Using-Flymake +-[Company]: https://company-mode.github.io/ +-[`completion-at-point`]: https://www.gnu.org/software/emacs/manual/html_node/elisp/Completion-in-Buffers.html +-[ElDoc]: https://elpa.gnu.org/packages/eldoc.html +-[`lsp-ui`]: https://emacs-lsp.github.io/lsp-ui/ +-[`lsp-go`]: https://github.com/emacs-lsp/lsp-mode/blob/master/clients/lsp-go.el +-[`use-package`]: https://github.com/jwiegley/use-package +-[`exec-path-from-shell`]: https://github.com/purcell/exec-path-from-shell +-[settings]: settings.md +-[Gophers slack]: https://invite.slack.golangbridge.org/ +diff -urN a/gopls/doc/editor/helix.md b/gopls/doc/editor/helix.md +--- a/gopls/doc/editor/helix.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/editor/helix.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,53 +0,0 @@ +---- +-title: "Gopls: Using Helix" +---- +- +-Configuring `gopls` to work with Helix is rather straightforward. Install `gopls`, and then add it to the `PATH` variable. If it is in the `PATH` variable, Helix will be able to detect it automatically. +- +-The documentation explaining how to install the default language servers for Helix can be found [here](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) +- +-## Installing `gopls` +- +-The first step is to install `gopls` on your machine. +-You can follow installation instructions [here](https://github.com/golang/tools/tree/master/gopls#installation). +- +-## Setting your path to include `gopls` +- +-Set your `PATH` environment variable to point to `gopls`. +-If you used `go install` to download `gopls`, it should be in `$GOPATH/bin`. +-If you don't have `GOPATH` set, you can use `go env GOPATH` to find it. +- +-## Additional information +- +-You can find more information about how to set up the LSP formatter [here](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers#autoformatting). +- +-It is possible to use `hx --health go` to see that the language server is properly set up. +- +-### Configuration +- +-The settings for `gopls` can be configured in the `languages.toml` file. +-The official Helix documentation for this can be found [here](https://docs.helix-editor.com/languages.html) +- +-Configuration pertaining to `gopls` should be in the table `language-server.gopls`. +- +-#### How to set flags +- +-To set flags, add them to the `args` array in the `language-server.gopls` section of the `languages.toml` file. +- +-#### How to set LSP configuration +- +-Configuration options can be set in the `language-server.gopls.config` section of the `languages.toml` file, or in the `config` key of the `language-server.gopls` section of the `languages.toml` file. +- +-#### A minimal config example +- +-In the `~/.config/helix/languages.toml` file, the following snippet would set up `gopls` with a logfile located at `/tmp/gopls.log` and enable staticcheck. +- +-```toml +-[language-server.gopls] +-command = "gopls" +-args = ["-logfile=/tmp/gopls.log", "serve"] +-[language-server.gopls.config] +-"ui.diagnostic.staticcheck" = true +-``` +- +- +diff -urN a/gopls/doc/editor/sublime.md b/gopls/doc/editor/sublime.md +--- a/gopls/doc/editor/sublime.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/editor/sublime.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,83 +0,0 @@ +---- +-title: "Gopls: Using Sublime Text" +---- +- +-Use the [LSP] package. After installing it using Package Control, do the following: +- +-* Open the **Command Palette** +-* Find and run the command **LSP: Enable Language Server Globally** +-* Select the **gopls** item. Be careful not to select the similarly named *golsp* by mistake. +- +-Finally, you should familiarise yourself with the LSP package's *Settings* and *Key Bindings*. Find them under the menu item **Preferences > Package Settings > LSP**. +- +-## Examples +-Minimal global LSP settings, that assume **gopls** and **go** appear on the PATH seen by Sublime Text:
+-``` +-{ +- "clients": { +- "gopls": { +- "enabled": true, +- } +- } +-} +-``` +- +-Global LSP settings that supply a specific PATH for finding **gopls** and **go**, as well as some settings for Sublime LSP itself: +-``` +-{ +- "clients": { +- "gopls": { +- "enabled": true, +- "env": { +- "PATH": "/path/to/your/go/bin", +- } +- } +- }, +- // Recommended by https://agniva.me/gopls/2021/01/02/setting-up-gopls-sublime.html +- // except log_stderr mentioned there is no longer recognized. +- "show_references_in_quick_panel": true, +- "log_debug": true, +- // These two are recommended by LSP-json as replacement for deprecated only_show_lsp_completions +- "inhibit_snippet_completions": true, +- "inhibit_word_completions": true, +- } +- ``` +- +-LSP and gopls settings can also be adjusted on a per-project basis to override global settings. +-``` +-{ +- "folders": [ +- { +- "path": "/path/to/a/folder/one" +- }, +- { +- // If you happen to be working on Go itself, this can be helpful; go-dev/bin should be on PATH. +- "path": "/path/to/your/go-dev/src/cmd" +- } +- ], +- "settings": { +- "LSP": { +- "gopls": { +- // To use a specific version of gopls with Sublime Text LSP (e.g., to try new features in development) +- "command": [ +- "/path/to/your/go/bin/gopls" +- ], +- "env": { +- "PATH": "/path/to/your/go-dev/bin:/path/to/your/go/bin", +- "GOPATH": "", +- }, +- "settings": { +- "experimentalWorkspaceModule": true +- } +- } +- }, +- // This will apply for all languages in this project that have +- // LSP servers, not just Go, however cannot enable just for Go. +- "lsp_format_on_save": true, +- } +-} +-``` +- +-Usually changes to these settings are recognized after saving the project file, but it may sometimes be necessary to either restart the server(s) (**Tools > LSP > Restart Servers**) or quit and restart Sublime Text itself. +- +-[LSP]: https://packagecontrol.io/packages/LSP +diff -urN a/gopls/doc/editor/vim.md b/gopls/doc/editor/vim.md +--- a/gopls/doc/editor/vim.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/editor/vim.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,236 +0,0 @@ +---- +-title: "Gopls: Using Vim or Neovim" +---- +- +-* [vim-go](#vimgo) +-* [LanguageClient-neovim](#lcneovim) +-* [Ale](#ale) +-* [vim-lsp](#vimlsp) +-* [vim-lsc](#vimlsc) +-* [coc.nvim](#cocnvim) +-* [govim](#govim) +-* [Neovim v0.5.0+](#neovim) +- * [Installation](#neovim-install) +- * [Custom Configuration](#neovim-config) +- * [Imports](#neovim-imports) +- * [Omnifunc](#neovim-omnifunc) +- * [Additional Links](#neovim-links) +- +-## vim-go +- +-Use [vim-go] ver 1.20+, with the following configuration: +- +-```vim +-let g:go_def_mode='gopls' +-let g:go_info_mode='gopls' +-``` +- +-## LanguageClient-neovim +- +-Use [LanguageClient-neovim], with the following configuration: +- +-```vim +-" Launch gopls when Go files are in use +-let g:LanguageClient_serverCommands = { +- \ 'go': ['gopls'] +- \ } +-" Run gofmt on save +-autocmd BufWritePre *.go :call LanguageClient#textDocument_formatting_sync() +-``` +- +-## Ale +- +-Use [ale]: +- +-```vim +-let g:ale_linters = { +- \ 'go': ['gopls'], +- \} +-``` +- +-see [this issue][ale-issue-2179] +- +-## vim-lsp +- +-Use [prabirshrestha/vim-lsp], with the following configuration: +- +-```vim +-augroup LspGo +- au! +- autocmd User lsp_setup call lsp#register_server({ +- \ 'name': 'gopls', +- \ 'cmd': {server_info->['gopls']}, +- \ 'whitelist': ['go'], +- \ }) +- autocmd FileType go setlocal omnifunc=lsp#complete +- "autocmd FileType go nmap gd (lsp-definition) +- "autocmd FileType go nmap ,n (lsp-next-error) +- "autocmd FileType go nmap ,p (lsp-previous-error) +-augroup END +-``` +- +-## vim-lsc +- +-Use [natebosch/vim-lsc], with the following configuration: +- +-```vim +-let g:lsc_server_commands = { +-\ "go": { +-\ "command": "gopls serve", +-\ "log_level": -1, +-\ "suppress_stderr": v:true, +-\ }, +-\} +-``` +- +-The `log_level` and `suppress_stderr` parts are needed to prevent breakage from logging. See +-issues [#180](https://github.com/natebosch/vim-lsc/issues/180) and +-[#213](https://github.com/natebosch/vim-lsc/issues/213). +- +-## coc.nvim +- +-Use [coc.nvim], with the following `coc-settings.json` configuration: +- +-```json +- "languageserver": { +- "go": { +- "command": "gopls", +- "rootPatterns": ["go.work", "go.mod", ".vim/", ".git/", ".hg/"], +- "filetypes": ["go"], +- "initializationOptions": { +- "usePlaceholders": true +- } +- } +- } +-``` +- +-If you use `go.work` files, you may want to set the +-`workspace.workspaceFolderCheckCwd` option. This will force coc.nvim to search +-parent directories for `go.work` files, even if the current open directory has +-a `go.mod` file. See the +-[coc.nvim documentation](https://github.com/neoclide/coc.nvim/wiki/Using-workspaceFolders) +-for more details. +- +-Other [settings](../settings) can be added in `initializationOptions` too. +- +-The `editor.action.organizeImport` code action will auto-format code and add missing imports. To run this automatically on save, add the following line to your `init.vim`: +- +-```vim +-autocmd BufWritePre *.go :call CocAction('runCommand', 'editor.action.organizeImport') +-``` +- +-## govim +- +-In vim classic only, use the experimental [`govim`], simply follow the [install steps][govim-install]. +- +-## Neovim v0.5.0+ +- +-To use the new native LSP client in Neovim, make sure you +-[install][nvim-install] Neovim v.0.5.0+, +-the `nvim-lspconfig` configuration helper plugin, and check the +-[`gopls` configuration section][nvim-lspconfig] there. +- +-### Installation +- +-You can use Neovim's native plugin system. On a Unix system, you can do that by +-cloning the `nvim-lspconfig` repository into the correct directory: +- +-```sh +-dir="${HOME}/.local/share/nvim/site/pack/nvim-lspconfig/opt/nvim-lspconfig/" +-mkdir -p "$dir" +-cd "$dir" +-git clone 'https://github.com/neovim/nvim-lspconfig.git' . +-``` +- +-### Configuration +- +-nvim-lspconfig aims to provide reasonable defaults, so your setup can be very +-brief. +- +-```lua +-local lspconfig = require("lspconfig") +-lspconfig.gopls.setup({}) +-``` +- +-However, you can also configure `gopls` for your preferences. Here's an +-example that enables `unusedparams`, `staticcheck`, and `gofumpt`. +- +-```lua +-local lspconfig = require("lspconfig") +-lspconfig.gopls.setup({ +- settings = { +- gopls = { +- analyses = { +- unusedparams = true, +- }, +- staticcheck = true, +- gofumpt = true, +- }, +- }, +-}) +-``` +- +-### Imports and Formatting +- +-Use the following configuration to have your imports organized on save using +-the logic of `goimports` and your code formatted. +- +-```lua +-autocmd("BufWritePre", { +- pattern = "*.go", +- callback = function() +- local params = vim.lsp.util.make_range_params() +- params.context = {only = {"source.organizeImports"}} +- -- buf_request_sync defaults to a 1000ms timeout. Depending on your +- -- machine and codebase, you may want longer. Add an additional +- -- argument after params if you find that you have to write the file +- -- twice for changes to be saved. +- -- E.g., vim.lsp.buf_request_sync(0, "textDocument/codeAction", params, 3000) +- local result = vim.lsp.buf_request_sync(0, "textDocument/codeAction", params) +- for cid, res in pairs(result or {}) do +- for _, r in pairs(res.result or {}) do +- if r.edit then +- local enc = (vim.lsp.get_client_by_id(cid) or {}).offset_encoding or "utf-16" +- vim.lsp.util.apply_workspace_edit(r.edit, enc) +- end +- end +- end +- vim.lsp.buf.format({async = false}) +- end +-}) +-``` +- +-### Omnifunc +- +-In Neovim v0.8.1 and later if you don't set the option `omnifunc`, it will auto +-set to `v:lua.vim.lsp.omnifunc`. If you are using an earlier version, you can +-configure it manually: +- +-```lua +-local on_attach = function(client, bufnr) +- -- Enable completion triggered by +- vim.api.nvim_buf_set_option(bufnr, 'omnifunc', 'v:lua.vim.lsp.omnifunc') +-end +-require('lspconfig').gopls.setup({ +- on_attach = on_attach +-}) +-``` +- +-### Additional Links +- +-* [Neovim's official LSP documentation][nvim-docs]. +- +-[vim-go]: https://github.com/fatih/vim-go +-[LanguageClient-neovim]: https://github.com/autozimu/LanguageClient-neovim +-[ale]: https://github.com/w0rp/ale +-[ale-issue-2179]: https://github.com/w0rp/ale/issues/2179 +-[prabirshrestha/vim-lsp]: https://github.com/prabirshrestha/vim-lsp/ +-[natebosch/vim-lsc]: https://github.com/natebosch/vim-lsc/ +-[natebosch/vim-lsc#180]: https://github.com/natebosch/vim-lsc/issues/180 +-[coc.nvim]: https://github.com/neoclide/coc.nvim/ +-[`govim`]: https://github.com/myitcv/govim +-[govim-install]: https://github.com/myitcv/govim/blob/master/README.md#govim---go-development-plugin-for-vim8 +-[nvim-docs]: https://neovim.io/doc/user/lsp.html +-[nvim-install]: https://github.com/neovim/neovim/wiki/Installing-Neovim +-[nvim-lspconfig]: https://github.com/neovim/nvim-lspconfig/blob/master/doc/configs.md#gopls +-[nvim-lspconfig-imports]: https://github.com/neovim/nvim-lspconfig/issues/115 +diff -urN a/gopls/doc/editor/zed.md b/gopls/doc/editor/zed.md +--- a/gopls/doc/editor/zed.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/editor/zed.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,19 +0,0 @@ +---- +-title: "Gopls: Using Zed" +---- +- +-## Install `gopls` +- +-To use `gopls` with [Zed](https://zed.dev/), first +-[install the `gopls` executable](../index.md#installation) and ensure that the directory +-containing the resulting binary (either `$(go env GOBIN)` or `$(go env +-GOPATH)/bin`) is in your `PATH`. +- +-## That's it +- +-Zed has a built-in LSP client and knows to run `gopls` when visiting a +-Go source file, so most features work right out of the box. +- +-Zed does not yet support external `window/showDocument` requests, +-so web-based features will not work; +-see [Zed issue 24852](https://github.com/zed-industries/zed/discussions/24852). +diff -urN a/gopls/doc/features/assembly.md b/gopls/doc/features/assembly.md +--- a/gopls/doc/features/assembly.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/features/assembly.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,31 +0,0 @@ +---- +-title: "Gopls: Support for Go *.s assembly files" +---- +- +-Gopls has rudimentary support for LSP operations in Go assembly files. +- +-Go assembly files have a `.s` file name extension. LSP clients need to +-be configured to recognize `.s` files as Go assembly files, since this +-file name extension is also used for assembly files in other +-languages. A good heuristic is that if a file named `*.s` belongs to a +-directory containing at least one `*.go` file, then the `.s` file is +-Go assembly, and its appropriate language server is gopls. +- +-Only Definition (`textDocument/definition`) requests are currently +-supported. For example, a Definition request on the `sigpanic` +-symbol in this file in GOROOT/src/runtime/asm.s: +- +-```asm +- JMP ·sigpanic(SB) +-``` +- +-returns the location of the function declaration in +-GOROOT/src/runtime/signal_unix.go: +- +-```go +-//go:linkname sigpanic +-func sigpanic() { +-``` +- +-See also issue https://go.dev/issue/71754, which tracks the development of LSP +-features in Go assembly files. +\ No newline at end of file +diff -urN a/gopls/doc/features/completion.md b/gopls/doc/features/completion.md +--- a/gopls/doc/features/completion.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/features/completion.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,5 +0,0 @@ +---- +-title: "Gopls: Completion" +---- +- +-TODO(https://go.dev/issue/62022): document +diff -urN a/gopls/doc/features/diagnostics.md b/gopls/doc/features/diagnostics.md +--- a/gopls/doc/features/diagnostics.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/features/diagnostics.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,328 +0,0 @@ +---- +-title: "Gopls: Diagnostics" +---- +- +-Gopls continuously annotates all your open files of source code with a +-variety of diagnostics. Every time you edit a file or make a +-configuration change, gopls asynchronously recomputes these +-diagnostics and sends them to the client using the LSP +-[`publishDiagnostics`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_publishDiagnostics) +-notification, giving you real-time feedback that reduces the cost of +-common mistakes. +- +-Diagnostics come from two main sources: compilation errors and analysis findings. +- +-- **Compilation errors** are those that you would obtain from running `go +-build`. Gopls doesn't actually run the compiler; that would be too +- slow. Instead it runs `go list` (when needed) to compute the +- metadata of the compilation, then processes those packages in a similar +- manner to the compiler front-end: reading, scanning, and parsing the +- source files, then type-checking them. Each of these steps can +- produce errors that gopls will surface as a diagnostic. +- +- The `source` field of the LSP `Diagnostic` record indicates where +- the diagnostic came from: those with source `"go list"` come from +- the `go list` command, and those with source `"compiler"` come from +- gopls' parsing or type checking phases, which are similar to those +- used in the Go compiler. +- +- ![A diagnostic due to a type error](../assets/diagnostic-typeerror.png) +- +- The example above shows a `string + int` addition, causes the type +- checker to report a `MismatchedTypes` error. The diagnostic contains +- a link to the documentation about this class of type error. +- +-- **Analysis findings** come from the [**Go analysis +- framework**](https://golang.org/x/tools/go/analysis), the system +- used by `go vet` to apply a variety of additional static checks to +- your Go code. The best-known example is the [`printf` +- analyzer](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/printf), +- which reports calls to [`fmt.Printf`](https://pkg.go.dev/fmt#Printf) +- where the format "verb" doesn't match the argument, such as +- `fmt.Printf("%d", "three")`. +- +- Gopls provides dozens of analyzers aggregated from a variety of +- suites; see [Analyzers](../analyzers.md) for the complete list. The +- `source` field of each diagnostic produced by an analyzer records +- the name of the analyzer that produced it. +- +- ![A diagnostic due to an analysis finding](../assets/diagnostic-analysis.png) +- +- The example above shows a `printf` formatting mistake. The diagnostic contains +- a link to the documentation for the `printf` analyzer. +- +-There is an optional third source of diagnostics: +- +- +- +-- **Compiler optimization details** are diagnostics that report +- details relevant to optimization decisions made by the Go +- compiler, such as whether a variable escapes or a slice index +- requires a bounds check. +- +- Optimization decisions include: +- whether a variable escapes, and how escape is inferred; +- whether a nil-pointer check is implied or eliminated; and +- whether a function can be inlined. +- +- This source is disabled by default but can be enabled on a +- package-by-package basis by invoking the +- `source.toggleCompilerOptDetails` ("{Show,Hide} compiler optimization +- details") code action. +- +- Remember that the compiler's optimizer runs only on packages that +- are transitively free from errors, so optimization diagnostics +- will not be shown on packages that do not build. +- +- +-## Recomputation of diagnostics +- +-By default, diagnostics are automatically recomputed each time the source files +-are edited. +- +-Compilation errors in open files are updated after a very short delay +-(tens of milliseconds) after each file change, potentially after every keystroke. +-This ensures rapid feedback of syntax and type errors while editing. +- +-Compilation and analysis diagnostics for the whole workspace are much +-more expensive to compute, so they are usually recomputed after a +-short idle period (around 1s) following an edit. +- +-The [`diagnosticsDelay`](../settings.md#diagnosticsDelay) setting determines +-this period. +-Alternatively, diagnostics may be triggered only after an edited file +-is saved, using the +-[`diagnosticsTrigger`](../settings.md#diagnosticsTrigger) setting. +- +-When initialized with `"pullDiagnostics": true`, gopls also supports +-["pull diagnostics"](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_pullDiagnostics), +-an alternative mechanism for recomputing diagnostics in which the client +-requests diagnostics from gopls explicitly using the `textDocument/diagnostic` +-request. This feature is off by default until the performance of pull +-diagnostics is comparable to push diagnostics. +- +-## Quick fixes +- +-Each analyzer diagnostic may suggest one or more alternative +-ways to fix the problem by editing the code. +-For example, when a `return` statement has too few operands, +-the [`fillreturns`](../analyzers.md#fillreturns) analyzer +-suggests a fix that heuristically fills in the missing ones +-with suitable values. Applying the fix eliminates the compilation error. +- +-![An analyzer diagnostic with two alternative fixes](../assets/remove-unusedparam-before.png) +- +-The screenshot above shows VS Code's Quick Fix menu for an "unused +-parameter" analysis diagnostic with two alternative fixes. +-(See [Remove unused parameter](transformation.md#remove-unused-parameter) for more detail.) +- +-Suggested fixes that are indisputably safe are [code +-actions](transformation.md#code-actions) whose kind is +-`"source.fixAll"`. +-Many client editors have a shortcut to apply all such fixes. +- +- +- +-TODO(adonovan): audit all the analyzers to ensure that their +-documentation is up-to-date w.r.t. any fixes they suggest. +- +-Settings: +- +-- The [`diagnosticsDelay`](../settings.md#diagnosticsDelay) setting determines +- the idle period after an edit before diagnostics are recomputed. +-- The [`diagnosticsTriggerr`](../settings.md#diagnosticsTrigger) setting determines +- what events cause recomputation of diagnostics. +-- The [`linkTarget`](../settings.md#linkTarget) setting specifies +- the base URI for Go package links in the Diagnostic.CodeDescription field. +- +-Client support: +- +-- **VS Code**: Each diagnostic appears as a squiggly underline. +- Hovering reveals the details, along with any suggested fixes. +-- **Emacs + eglot**: Each diagnostic appears as a squiggly underline. +- Hovering reveals the details. Use `M-x eglot-code-action-quickfix` +- to apply available fixes; it will prompt if there are more than one. +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls check file.go` +- +- +- +-### `stubMissingInterfaceMethods`: Declare missing methods of I +- +-When a value of a concrete type is assigned to a variable of an +-interface type, but the concrete type does not possess all the +-necessary methods, the type checker will report a "missing method" +-error. +- +-In this situation, gopls offers a quick fix to add stub declarations +-of all the missing methods to the concrete type so that it implements +-the interface. +- +-For example, this function will not compile because the value +-`NegativeErr{}` does not implement the "error" interface: +- +-```go +-func sqrt(x float64) (float64, error) { +- if x < 0 { +- return 0, NegativeErr{} // error: missing method +- } +- ... +-} +- +-type NegativeErr struct{} +-``` +- +-Gopls will offer a quick fix to declare this method: +- +-```go +- +-// Error implements [error.Error]. +-func (NegativeErr) Error() string { +- panic("unimplemented") +-} +-``` +- +-Beware that the new declarations appear alongside the concrete type, +-which may be in a different file or even package from the cursor +-position. +-(Perhaps gopls should send a `showDocument` request to navigate the +-client there, or a progress notification indicating that something +-happened.) +- +-### `StubMissingCalledFunction`: Declare missing method T.f +- +-When you attempt to call a method on a type that does not have that method, +-the compiler will report an error such as "type X has no field or method Y". +-In this scenario, gopls now offers a quick fix to generate a stub declaration of +-the missing method, inferring its type from the call. +- +-Consider the following code where `Foo` does not have a method `bar`: +- +-```go +-type Foo struct{} +- +-func main() { +- var s string +- f := Foo{} +- s = f.bar("str", 42) // error: f.bar undefined (type Foo has no field or method bar) +-} +-``` +- +-Gopls will offer a quick fix, "Declare missing method Foo.bar". +-When invoked, it creates the following declaration: +- +-```go +-func (f Foo) bar(s string, i int) string { +- panic("unimplemented") +-} +-``` +- +-### `CreateUndeclared`: Create missing declaration for "undeclared name: X" +- +-A Go compiler error "undeclared name: X" indicates that a variable or function is being used before +-it has been declared in the current scope. In this scenario, gopls offers a quick fix to create the declaration. +- +-#### Declare a new variable +- +-When you reference a variable that hasn't been declared: +- +-```go +-func main() { +- x := 42 +- min(x, y) // error: undefined: y +-} +-``` +- +-The quick fix would insert a declaration with a default +-value inferring its type from the context: +- +-```go +-func main() { +- x := 42 +- y := 0 +- min(x, y) +-} +-``` +- +-#### Declare a new function +- +-Similarly, if you call a function that hasn't been declared: +- +-```go +-func main() { +- var s string +- s = doSomething(42) // error: undefined: doSomething +-} +-``` +- +-Gopls will insert a new function declaration below, +-inferring its type from the call: +- +-```go +-func main() { +- var s string +- s = doSomething(42) +-} +- +-func doSomething(i int) string { +- panic("unimplemented") +-} +-``` +- +diff -urN a/gopls/doc/features/index.md b/gopls/doc/features/index.md +--- a/gopls/doc/features/index.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/features/index.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,72 +0,0 @@ +---- +-title: "Gopls: Index of features" +---- +- +-This page provides an index of all supported features of gopls that +-are accessible through the [language server protocol](https://microsoft.github.io/language-server-protocol/) (LSP). +-It is intended for: +-- **users of gopls** learning its capabilities so that they get the most out of their editor; +-- **editor maintainers** adding or improving Go support in an LSP-capable editor; and +-- **contributors to gopls** trying to understand how it works. +- +-In an ideal world, Go users would not need to know that gopls or even +-LSP exists, as their LSP-enabled editors would implement every facet +-of the protocol and expose each feature in a natural and discoverable +-way. In reality, editors vary widely in their support for LSP, so +-unfortunately these documents necessarily involve many details of the +-protocol. +- +-We also list [settings](../settings.md) that affect each feature. +- +-Most features are illustrated with reference to VS Code, but we will +-briefly mention whether each feature is supported in other popular +-clients, and if so, how to find it. We welcome contributions, edits, +-and updates from users of any editor. +- +-Contributors should [update this documentation](../contributing.md#documentation) +-when making significant changes to existing features or when adding new ones. +- +-- [Passive](passive.md): features that are always on and require no special action +- - [Hover](passive.md#hover): information about the symbol under the cursor +- - [Signature Help](passive.md#signature-help): type information about the enclosing function call +- - [Document Highlight](passive.md#document-highlight): highlight identifiers referring to the same symbol +- - [Inlay Hint](passive.md#inlay-hint): show implicit names of struct fields and parameter names +- - [Semantic Tokens](passive.md#semantic-tokens): report syntax information used by editors to color the text +- - [Folding Range](passive.md#folding-range): report text regions that can be "folded" (expanded/collapsed) in an editor +- - [Document Link](passive.md#document-link): extracts URLs from doc comments, strings in current file so client can linkify +-- [Diagnostics](diagnostics.md): compile errors and static analysis findings +-- [Navigation](navigation.md): navigation of cross-references, types, and symbols +- - [Definition](navigation.md#definition): go to definition of selected symbol +- - [Type Definition](navigation.md#type-definition): go to definition of type of selected symbol +- - [References](navigation.md#references): list references to selected symbol +- - [Implementation](navigation.md#implementation): show "implements" relationships of selected type +- - [Document Symbol](navigation.md#document-symbol): outline of symbols defined in current file +- - [Symbol](navigation.md#symbol): fuzzy search for symbol by name +- - [Selection Range](navigation.md#selection-range): select enclosing unit of syntax +- - [Call Hierarchy](navigation.md#call-hierarchy): show outgoing/incoming calls to the current function +- - [Type Hierarchy](navigation.md#type-hierarchy): show interfaces/implementations of the current type +-- [Completion](completion.md): context-aware completion of identifiers, statements +-- [Code transformation](transformation.md): fixes and refactorings +- - [Formatting](transformation.md#formatting): format the source code +- - [Rename](transformation.md#rename): rename a symbol or package +- - [Organize imports](transformation.md#source.organizeImports): organize the import declaration +- - [Extract](transformation.md#refactor.extract): extract selection to a new file/function/variable +- - [Inline](transformation.md#refactor.inline.call): inline a call to a function or method +- - [Miscellaneous rewrites](transformation.md#refactor.rewrite): various Go-specific refactorings +- - [Add test for func](transformation.md#source.addTest): create a test for the selected function +-- [Web-based queries](web.md): commands that open a browser page +- - [Package documentation](web.md#doc): browse documentation for current Go package +- - [Free symbols](web.md#freesymbols): show symbols used by a selected block of code +- - [Assembly](web.md#assembly): show listing of assembly code for selected function +- - [Split package](web.md#splitpkg): split a package into two or more components +-- Support for non-Go files: +- - [Template files](templates.md): files parsed by `text/template` and `html/template` +- - [go.mod and go.work files](modfiles.md): Go module and workspace manifests +- - [Go *.s assembly files](assembly.md): Go assembly files +-- [Command-line interface](../command-line.md): CLI for debugging and scripting (unstable) +-- [Model Context Protocol (MCP)](mcp.md): use some features in AI-assisted environments +- +-You can find this page from within your editor by executing the +-`gopls.doc.features` [code action](transformation.md#code-actions), +-which opens it in a web browser. +-In VS Code, you can find it on the "Quick fix" menu. +diff -urN a/gopls/doc/features/mcp.md b/gopls/doc/features/mcp.md +--- a/gopls/doc/features/mcp.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/features/mcp.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,74 +0,0 @@ +---- +-title: "Gopls: Model Context Protocol support" +---- +- +-Gopls includes an experimental built-in server for the [Model Context +-Protocol](https://modelcontextprotocol.io/introduction) (MCP), allowing it to +-expose a subset of its functionality to AI assistants in the form of MCP tools. +- +-## Running the MCP server +- +-There are two modes for running this server: 'attached' and 'detached'. In +-attached mode, the MCP server operates in the context of an active gopls LSP +-session, and so is able to share memory with your LSP session and observe the +-current unsaved buffer state. In detached mode, gopls interacts with a headless +-LSP session, and therefore only sees saved files on disk. +- +-### Attached mode +- +-To use the 'attached' mode, run gopls with the `-mcp.listen` flag. For +-example: +- +-``` +-gopls serve -mcp.listen=localhost:8092 +-``` +- +-This exposes an HTTP based MCP server using the server-sent event transport +-(SSE), available at `http://localhost:8092/sessions/1` (assuming you have only +-one [session](../daemon.md) on your gopls instance). +- +-### Detached mode +- +-To use the 'detached' mode, run the `mcp` subcommand: +- +-``` +-gopls mcp +-``` +- +-This runs a standalone gopls instance that speaks MCP over stdin/stdout. +- +-## Instructions to the model +- +-This gopls MCP server includes model instructions for its usage, describing +-workflows for interacting with Go code using its available tools. These +-instructions are automatically published during the MCP server initialization, +-but you may want to also load them as additional context in your AI-assisted +-session, to emphasize their importance. The `-instructions` flag causes them to +-be printed, so that you can do, for example: +- +-``` +-gopls mcp -instructions > /path/to/contextFile.md +-``` +- +-## Security considerations +- +-The gopls MCP server is a wrapper around the functionality ordinarily exposed +-by gopls through the Language Server Protocol (LSP). As such, gopls' tools +-may perform any of the operations gopls normally performs, including: +- +-- reading files from the file system, and returning their contents in tool +- results (such as when providing context); +-- executing the `go` command to load package information, which may result in +- calls to https://proxy.golang.org to download Go modules, and writes to go +- caches; +-- writing to gopls' cache or persistant configuration files; and +-- uploading weekly telemetry data **if you have opted in** to [Go telemetry](https://go.dev/doc/telemetry). +- +-The gopls MCP server does not perform any operations not already performed by +-gopls in an ordinary IDE session. Like most LSP servers, gopls does not +-generally write directly to your source tree, though it may instruct the client +-to apply edits. Nor does it make arbitrary requests over the network, though it +-may make narrowly scoped requests to certain services such as the Go module +-mirror or the Go vulnerability database, which can't readily be exploited as a +-vehicle for exfiltration by a confused agent. Nevertheless, these capabilities +-may require additional consideration when used as part of an AI-enabled system. +diff -urN a/gopls/doc/features/modfiles.md b/gopls/doc/features/modfiles.md +--- a/gopls/doc/features/modfiles.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/features/modfiles.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,11 +0,0 @@ +---- +-title: "Gopls: Support for go.mod and go.work files" +---- +- +-TODO: document these features for go.{mod,work} files: +-- hover +-- vulncheck +-- add dependency +-- update dependency +-- diagnostics +- +diff -urN a/gopls/doc/features/navigation.md b/gopls/doc/features/navigation.md +--- a/gopls/doc/features/navigation.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/features/navigation.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,337 +0,0 @@ +---- +-title: "Gopls: Navigation features" +---- +- +-This page documents gopls features for navigating your source code. +- +- +- +-## Definition +- +-The LSP [`textDocument/definition`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_definition) +-request returns the location of the declaration of the symbol under the cursor. +-Most editors provide a command to navigate directly to that location. +- +-A definition query also works in these unexpected places: +- +-- On an **import path**, it returns the list of locations, of +- each package declaration in the files of the imported package. +-- On a **package declaration**, it returns the location of +- the package declaration that provides the documentation of that package. +-- On a symbol in a **[`go:linkname` directive](https://pkg.go.dev/cmd/compile)**, +- it returns the location of that symbol's declaration. +-- On a **[doc link](https://tip.golang.org/doc/comment#doclinks)**, it returns +- (like [`hover`](passive.md#hover)) the location of the linked symbol. +-- On a file name in a **[`go:embed` directive](https://pkg.go.dev/embed)**, +- it returns the location of the embedded file. +-- On the declaration of a non-Go function (a `func` with no body), +- it returns the location of the assembly implementation, if any, +-- On a **return statement**, it returns the location of the function's result variables. +-- On a **goto**, **break**, or **continue** statement, it returns the +- location of the label, the closing brace of the relevant block statement, or the +- start of the relevant loop, respectively. +- +- +- +-Client support: +-- **VS Code**: Use [Go to Definition](https://code.visualstudio.com/docs/editor/editingevolved#_go-to-definition) (`F12` or `⌘`-click). +- If the cursor is already at the declaration, the request is instead interpreted as "Go to References". +-- **Emacs + eglot**: use [`M-x xref-find-definitions`](https://www.gnu.org/software/emacs/manual/html_node/emacs/Xref.html). +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls definition file.go:#offset` +- +-## References +- +-The LSP [`textDocument/references`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_references) +-request returns the locations of all identifiers that refer to the symbol under the cursor. +- +-The references algorithm handles various parts of syntax as follows: +- +-- The references to a **symbol** report all uses of that symbol. +- In the case of exported symbols this may include locations in other packages. +-- The references to a **package declaration** are all the +- direct imports of the package, along with all the other package +- declarations in the same package. +-- It is an error to request the references to a **built-in symbol** +- such as `int` or `append`, +- as they are presumed too numerous to be of interest. +-- The references to an **interface method** include references to +- concrete types that implement the interface. Similarly, the +- references to a **method of a concrete type** include references to +- corresponding interface methods. +-- An **embedded field** `T` in a struct type such as `struct{T}` is +- unique in Go in that it is both a reference (to a type) and a +- definition (of a field). +- The `references` operation reports only the references to it [as a field](https://go.dev/issue/63521). +- To find references to the type, jump to the type declararation first. +- +-Be aware that a references query returns information only about the +-build configuration used to analyze the selected file, so if you ask +-for the references to a symbol defined in `foo_windows.go`, the result +-will never include the file `bar_linux.go`, even if that file refers +-to a symbol of the same name; see https://go.dev/issue/65755. +- +-Clients can request that the declaration be included among the +-references; most do. +- +-Client support: +-- **VS Code**: Use [`Go to References`](https://code.visualstudio.com/docs/editor/editingevolved#_peek) to quickly "peek" at the references, +- or `Find all References` to open the references panel. +-- **Emacs + eglot**: Via [`xref` package](https://www.gnu.org/software/emacs/manual/html_node/emacs/Xref.html): use `M-x xref-find-references`. +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls references file.go:#offset` +- +-## Implementation +- +-The LSP +-[`textDocument/implementation`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_implementation) +-request queries the relation between abstract and concrete types and +-their methods. +- +-Interfaces and concrete types are matched using method sets: +- +-- When invoked on a reference to an **interface type**, it returns the +- location of the declaration of each type that implements +- the interface. +-- When invoked on a **concrete type**, +- it returns the locations of the matching interface types. +-- When invoked on an **interface method**, it returns the corresponding +- methods of the types that satisfy the interface. +-- When invoked on a **concrete method**, +- it returns the locations of the matching interface methods. +- +-For example: +-- `implementation(io.Reader)` includes subinterfaces such as `io.ReadCloser`, +- and concrete implementations such as `*os.File`. It also includes +- other declarations equivalent to `io.Reader`. +-- `implementation(os.File)` includes only interfaces, such as +- `io.Reader` and `io.ReadCloser`. +- +-The LSP's Implementation feature has a built-in bias towards subtypes, +-possibly because in languages such as Java and C++ the relationship +-between a type and its supertypes is explicit in the syntax, so the +-corresponding "Go to interfaces" operation can be achieved as sequence +-of two or more "Go to definition" steps: the first to visit the type +-declaration, and the rest to sequentially visit ancestors. +-(See https://github.com/microsoft/language-server-protocol/issues/2037.) +- +-In Go, where there is no syntactic relationship between two types, a +-search is required when navigating in either direction between +-subtypes and supertypes. The heuristic above works well in many cases, +-but it is not possible to ask for the superinterfaces of +-`io.ReadCloser`. For more explicit navigation between subtypes and +-supertypes, use the [Type Hierarchy](#Type Hierarchy) feature. +- +-Only non-trivial interfaces are considered; no implementations are +-reported for type `any`. +- +-Within the same package, all matching types/methods are reported. +-However, across packages, only exported package-level types and their +-methods are reported, so local types (whether interfaces, or struct +-types with methods due to embedding) may be missing from the results. +- +- +-Functions, `func` types, and dynamic function calls are matched using signatures: +- +-- When invoked on the `func` token of a **function definition**, +- it returns the locations of the matching signature types +- and dynamic call expressions. +-- When invoked on the `func` token of a **signature type**, +- it returns the locations of the matching concrete function definitions. +-- When invoked on the `(` token of a **dynamic function call**, +- it returns the locations of the matching concrete function +- definitions. +- +-If either the target type or the candidate type are generic, the +-results will include the candidate type if there is any instantiation +-of the two types that would allow one to implement the other. +-(Note: the matcher doesn't current implement full unification, so type +-parameters are treated like wildcards that may match arbitrary +-types, without regard to consistency of substitutions across the +-method set or even within a single method. +-This may lead to occasional spurious matches.) +- +-Since a type may be both a function type and a named type with methods +-(for example, `http.HandlerFunc`), it may participate in both kinds of +-implementation queries (by method-sets and function signatures). +-Queries using method-sets should be invoked on the type or method name, +-and queries using signatures should be invoked on a `func` or `(` token. +- +-Client support: +-- **VS Code**: Use [Go to Implementations](https://code.visualstudio.com/docs/editor/editingevolved#_go-to-implementation) (`⌘F12`). +-- **Emacs + eglot**: Use `M-x eglot-find-implementation`. +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls implementation file.go:#offset` +- +- +-## Type Definition +- +-The LSP +-[`textDocument/typeDefinition`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_typeDefinition) +-request returns the location of the type of the selected symbol. +- +-For example, if the selection is the name `buf` of a local variable of +-type `*bytes.Buffer`, a `typeDefinition` query will return the +-location of the type `bytes.Buffer`. +-Clients typically navigate to that location. +- +-Type constructors such as pointer, array, slice, channel, and map are +-stripped off the selected type in the search for a named type. For +-example, if x is of type `chan []*T`, the reported type definition +-will be that of `T`. +-Similarly, if the symbol's type is a function with one "interesting" +-(named, non-`error`) result type, the function's result type is used. +- +-Gopls currently requires that a `typeDefinition` query be applied to a +-symbol, not to an arbitrary expression; see https://go.dev/issue/67890 for +-potential extensions of this functionality. +- +- +-Client support: +-- **VS Code**: Use [Go to Type Definition](https://code.visualstudio.com/docs/editor/editingevolved#_go-to-implementation). +-- **Emacs + eglot**: Use `M-x eglot-find-typeDefinition`. +-- **Vim + coc.nvim**: ?? +-- **CLI**: not supported +- +-## Document Symbol +- +-The `textDocument/documentSymbol` LSP query reports the list of +-top-level declarations in this file. Clients may use this information +-to present an overview of the file, and an index for faster navigation. +- +-Gopls responds with the +-[`DocumentSymbol`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbol) +-type if the client indicates +-[`hierarchicalDocumentSymbolSupport`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolClientCapabilities); +-otherwise it returns a +-[`SymbolInformation`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#symbolInformation). +- +-Client support: +-- **VS Code**: Use the [Outline view](https://code.visualstudio.com/docs/getstarted/userinterface#_outline-view) for navigation. +-- **Emacs + eglot**: Use [`M-x imenu`](https://www.gnu.org/software/emacs/manual/html_node/emacs/Imenu.html#Imenu) to jump to a symbol. +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls links file.go` +- +- +-## Symbol +- +-The +-[`workspace/symbol`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_symbol) +-LSP query searches an index of all the symbols in the workspace. +- +-The default symbol matching algorithm (`fastFuzzy`), inspired by the +-popular fuzzy matcher [FZF](https://github.com/junegunn/fzf), attempts +-a variety of inexact matches to correct for misspellings or abbreviations in your +-query. For example, it considers `DocSym` a match for `DocumentSymbol`. +- +- +- +-Settings: +-- The [`symbolMatcher`](../settings.md#symbolMatcher) setting controls the algorithm used for symbol matching. +-- The [`symbolStyle`](../settings.md#symbolStyle) setting controls how symbols are qualified in symbol responses. +-- The [`symbolScope`](../settings.md#symbolScope) setting determines the scope of the query. +-- The [`directoryFilters`](../settings.md#directoryFilters) setting specifies directories to be excluded from the search. +- +-Client support: +-- **VS Code**: Use ⌘T to open [Go to Symbol](https://code.visualstudio.com/docs/editor/editingevolved#_go-to-symbol) with workspace scope. (Alternatively, use Ctrl-Shift-O, and add a `@` prefix to search within the file or a `#` prefix to search throughout the workspace.) +-- **Emacs + eglot**: Use [`M-x xref-find-apropos`](https://www.gnu.org/software/emacs/manual/html_node/emacs/Looking-Up-Identifiers.html) to show symbols that match a search term. +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls links file.go` +- +- +-## Selection Range +- +-The +-[`textDocument/selectionRange`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_selectionRange) +-LSP query returns information about the lexical extent of each piece +-of syntax enclosing the current selection. +-Clients may use it to provide an operation to expand the selection +-to successively larger expressions. +- +-Client support: +-- **VSCode**: Use `⌘⇧^→` to expand the selection or `⌘⇧^←` to contract it again; watch this [video](https://www.youtube.com/watch?v=dO4SGAMl7uQ). +-- **Emacs + eglot**: Not standard. Use `M-x eglot-expand-selection` defined in [this configuration snippet](https://github.com/joaotavora/eglot/discussions/1220#discussioncomment-9321061). +-- **Vim + coc.nvim**: ?? +-- **CLI**: not supported +- +-## Call Hierarchy +- +-The LSP CallHierarchy mechanism consists of three queries that +-together enable clients to present a hierarchical view of a portion of +-the static call graph: +- +-- [`textDocument/prepareCallHierarchy`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_prepareCallHierarchy) returns a list of [items](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyItem) for a given position, each representing a named function or method enclosing the position; +-- [`callHierarchyItem/incomingCalls`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchy_incomingCalls) returns the set of call sites that call the selected item; and +-- [`callHierarchy/outgoingCalls`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchy_incomingCalls) returns the set of functions called by the selected item. +- +-Invoke the command while selecting the name in a function declaration. +- +-Dynamic calls are not included, because it is not analytically +-practical to detect them. So, beware that the results may not be +-exhaustive, and perform a [References](#references) query if necessary. +- +-The hierarchy does not consider a nested function distinct from its +-enclosing named function. (Without the ability to detect dynamic +-calls, it would make little sense do so.) +- +-The screenshot below shows the outgoing call tree rooted at `f`. The +-tree has been expanded to show a path from `f` to the `String` method +-of `fmt.Stringer` through the guts of `fmt.Sprint:` +- +- +- +-Client support: +-- **VS Code**: `Show Call Hierarchy` menu item (`⌥⇧H`) opens [Call hierarchy view](https://code.visualstudio.com/docs/cpp/cpp-ide#_call-hierarchy) (note: docs refer to C++ but the idea is the same for Go). +-- **Emacs + eglot**: Not standard; install with `(package-vc-install "https://github.com/dolmens/eglot-hierarchy")`. Use `M-x eglot-hierarchy-call-hierarchy` to show the direct incoming calls to the selected function; use a prefix argument (`C-u`) to show the direct outgoing calls. There is no way to expand the tree. +-- **CLI**: `gopls call_hierarchy file.go:#offset` shows outgoing and incoming calls. +- +- +-## Type Hierarchy +- +-The LSP TypeHierarchy mechanism consists of three queries that +-together enable clients to present a hierarchical view of a portion of +-the subtyping relation over named types. +- +-- [`textDocument/prepareTypeHierarchy`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_prepareTypeHierarchy) returns an [item](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyItem) describing the named type at the current position; +-- [`typeHierarchyItem/subtypes`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchy_subtypes) returns the set of subtypes of the selected (interface) type; and +-- [`typeHierarchy/supertypes`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchy_supertypes) returns the set of supertypes (interface types) of the selected type. +- +-Invoke the command while selecting the name of a type. +- +-As with an Implementation query, a type hierarchy query reports +-function-local types only within the same package as the query type. +-Also the result does not include alias types, only defined types. +- +- +- +- +- +-Caveats: +- +-- The type hierarchy supports only named types and their assignability +- relation. By contrast, the Implementations request also reports the +- relation between unnamed `func` types and function declarations, +- function literals, and dynamic calls of values of those types. +- +-Client support: +-- **VS Code**: `Show Type Hierarchy` menu item opens [Type hierarchy view](https://code.visualstudio.com/docs/java/java-editing#_type-hierarchy) (note: docs refer to Java but the idea is the same for Go). +-- **Emacs + eglot**: Support added in March 2025. Use `M-x eglot-show-call-hierarchy`. +-- **CLI**: not yet supported. +diff -urN a/gopls/doc/features/passive.md b/gopls/doc/features/passive.md +--- a/gopls/doc/features/passive.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/features/passive.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,321 +0,0 @@ +---- +-title: "Gopls: Passive features" +---- +- +-This page documents the fundamental LSP features of gopls that may be +-described as "passive", since many editors use them to continuously +-provide information about your source files without requiring any +-special action. +- +-See also [Code Lenses](../codelenses.md), some of which annotate your +-source code with additional information and may thus also be +-considered passive features. +- +- +-## Hover +- +-The LSP [`textDocument/hover`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_hover) +-query returns a description of the code currently under the cursor, such +-as its name, kind, type, value (for a constant), abbreviated +-declaration (for a type), doc comment (if any), and a link to the +-symbol's documentation on `pkg.go.dev`. The client may request either +-plain text or Markdown. +- +- +- +-Depending on the selection, the response may include additional information. +-For example, hovering over a type shows its declared methods, +-plus any methods promoted from embedded fields. +- +-**Doc links**: A doc comment may refer to another symbol using square +-brackets, for example `[fmt.Printf]`. Hovering over one of these +-[doc links](https://go.dev/doc/comment#doclinks) reveals +-information about the referenced symbol. +- +- +- +-**Struct size/offset info**: for declarations of struct types, +-hovering over the name reveals the struct's size in bytes: +- +- +- +-And hovering over each field name shows the size and offset of that field: +- +- +- +-This information may be useful when optimizing the layout of your data +-structures, or when reading assembly files or stack traces that refer +-to each field by its cryptic byte offset. +- +-In addition, Hover reports: +-- the struct's size class, which is the number of bytes actually +- allocated by the Go runtime for a single object of this type; and +-- the percentage of wasted space due to suboptimal ordering of struct +- fields, if this figure is 20% or higher: +- +- +- +-In the struct above, alignment rules require each of the two boolean +-fields (1 byte) to occupy a complete word (8 bytes), leading to (7 + +-7) / (3 * 8) = 58% waste. +-Placing the two booleans together would save a word. +-(In most structures clarity is more important than compactness, so you +-should reorder fields to save space only in data structures that have +-been shown by profiling to be very frequently allocated.) +- +-**Embed directives**: hovering over the file name pattern in +-[`//go:embed` directive](https://pkg.go.dev/embed), for example +-`*.html`, reveals the list of file names to which the wildcard +-expands. +- +- +- +- +-**Linkname directives**: a [`//go:linkname` directive](https://pkg.go.dev/cmd/compile#hdr-Compiler_Directives) creates a linker-level alias for another symbol. +-Hovering over the directive shows information about the other symbol. +- +- +- +-The hover information for symbols from the standard library added +-after Go 1.0 states the Go release that added the symbol. +- +-Settings: +-- The [`hoverKind`](../settings.md#hoverKind) setting controls the verbosity of documentation. +-- The [`linkTarget`](../settings.md#linkTarget) setting specifies +- the base URI for Go package links +- +-Caveats: +-- It is an unfortunate limitation of the LSP that a `Hover` request +- currently includes only a position but not a selection, as this +- means it is impossible to request information about the type and +- methods of, say, the `f(x)` portion of the larger expression +- `f(x).y`. Please upvote microsoft/language-server-protocol#1466 if +- you would like to see this addressed. +- +-Client support: +-- **VS Code**: enabled by default. Displays rendered Markdown in a panel near the cursor. +-- **Emacs + eglot**: enabled by default. Displays a one-line summary in the echo area. +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls definition file.go:#start-#end` includes information from a Hover query. +- +- +-## Signature Help +- +-The LSP [`textDocument/signatureHelp`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_signatureHelp) +-query returns information about the innermost function call enclosing +-the cursor or selection, including the signature of the function and +-the names, types, and documentation of each parameter. +- +-Clients may provide this information to help remind the user of the +-purpose of each parameter and their order, while reading or editing a +-function call. +- +- +- +-Call parens are not necessary if the cursor is within an identifier +-that denotes a function or method. For example, Signature Help at +-`once.Do(initialize‸)` will describe `initialize`, not `once.Do`. +- +-Client support: +-- **VS Code**: enabled by default. +- Also known as "[parameter hints](https://code.visualstudio.com/api/references/vscode-api#SignatureHelpProvider)" in the [IntelliSense settings](https://code.visualstudio.com/docs/editor/intellisense#_settings). +- Displays signature and doc comment alongside Hover information. +-- **Emacs + eglot**: enabled by default. Displays signature in the echo area. +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls signature file.go:#start-#end` +- +- +-## Document Highlight +- +-The LSP [`textDocument/documentHighlight`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_documentHighlight) +-query reports a set of source ranges that should be highlighted based +-on the current cursor position or selection, to emphasize the +-relationship between them. +- +-Each of the following parts of syntax forms a set so that if you +-select any one member, gopls will highlight the complete set: +- +-- each identifier that refers to the same symbol (as in the screenshot below); +-- a named result variable and all its corresponding operands of `return` statements; +-- the `for`, `break`, and `continue` tokens of the same loop; +-- the `switch` and `break` tokens of the same switch statement; +-- the `func` keyword of a function and all of its `return` statements. +- +-More than one of these rules may be activated by a single selection, +-for example, by an identifier that is also a return operand. +- +-Different occurrences of the same identifier may be color-coded to distinguish +-"read" from "write" references to a given variable symbol. +- +- +- +-Client support: +-- **VS Code**: enabled by default. Triggered by cursor motion, or single click. +- (Note: double clicking activates a simple syntax-oblivious textual match.) +-- **Emacs + eglot**: enabled by default. Triggered by cursor motion or selection. +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls signature file.go:#start-#end` +- +- +-## Inlay Hint +- +-The LSP [`textDocument/inlayHint`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_inlayHint) +-query returns a set of annotations to be spliced into the current file +-that reveal implicit information. +- +- +- +-Examples: +- +-- In a function call `f(1, 2)`, hints provide the +- names of the parameters (`parameterNames`), as in the screenshot above. +-- In a call to a generic function, hints provide the type arguments +- (`functionTypeParameters`). +-- In an assignment `x, y = 1, 2`, hints provide the types of the +- variables (`assignVariableTypes`). +-- In a struct literal such as `Point2D{1, 2}`, hints provide the field +- names (`compositeLiteralFields`). +-- In a nested composite literal `T{{...}}`, a hint provides the type of +- the inner literal, `{...}` (`compositeLiteralTypes`). +-- In a `for k, v := range x {}` loop, hints provide the types of the +- variables k and v (`rangeVariableTypes`). +-- For a constant expression (perhaps using `iota`), a hint provides +- its computed value (`constantValues`). +- +-See [Inlay hints](../inlayHints.md) for a complete list with examples. +- +- +- +-Settings: +-- The [`hints`](../settings.md#hints) setting indicates the desired set of hints. +- To reduce distractions, its default value is empty. +- To enable hints, add one or more of the identifiers above to the hints +- map. For example: +- ```json5 +- "hints": {"parameterNames": true} +- ``` +- +-Client support: +-- **VS Code**: in addition to the `hints` configuration value, VS Code provides a graphical +- configuration menu ("Preferences: Open Settings (UI)" the search for "Go Inlay Hints") +- for each supported kind of inlay hint. +-- **Emacs + eglot**: disabled by default. Needs `M-x eglot-inlay-hints-mode` plus the configuration [described here](https://www.reddit.com/r/emacs/comments/11bqzvk/emacs29_and_eglot_inlay_hints/) +-- **Vim + coc.nvim**: ?? +-- **CLI**: not supported +- +-## Semantic Tokens +- +-The LSP [`textDocument/semanticTokens`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_semanticTokens) +-query reports information about all the tokens in the current file, or +-a portion of it. +-The client may use this information to provide syntax highlighting +-that conveys semantic distinctions between, for example, functions and +-types, constants and variables, or library functions and built-ins. +- +-The client must specify the sets of types and modifiers it is interested in. +- +-Gopls reports the following token types: +- +-- `"comment"`: a comment +-- `"function"`: a function +-- `"keyword"`: a keyword +-- `"label"`: a control label (not an LSP standard type) +-- `"macro"`: text/template tokens +-- `"method"`: a method +-- `"namespace"`: an imported package name +-- `"number"`: a numeric literal +-- `"operator"`: an operator +-- `"parameter"`: a parameter variable +-- `"string"`: a string literal +-- `"type"`: a type name (plus other uses) +-- `"typeParameter"`: a type parameter +-- `"variable"`: a var or const (see `readonly` modifier) +- +-Gopls also reports the following standard modifiers: +- +-- `"defaultLibrary"`: predeclared symbols +-- `"definition"`: the declaring identifier of a symbol +-- `"readonly"`: for constants +- +-plus these non-standard modifiers each representing the top-level +-constructor of each symbols's type: +- +-- `"array"` +-- `"bool"` +-- `"chan"` +-- `"interface"` +-- `"map"` +-- `"number"` +-- `"pointer"` +-- `"signature"` +-- `"slice"` +-- `"string"` +-- `"struct"` +- +-Settings: +-- The [`semanticTokens`](../settings.md#semanticTokens) setting determines whether +- gopls responds to semantic token requests. This option allows users to disable +- semantic tokens even when their client provides no client-side control over the +- feature. Because gopls' semantic-tokens algorithm depends on type checking, +- which adds a tangible latency, this feature is currently disabled by default +- to avoid any delay in syntax highlighting; see https://go.dev/issue/#45313, https://go.dev/issue/#47465. +-- The experimental [`noSemanticString`](../settings.md#noSemanticString) and +- [`noSemanticNumber`](../settings.md#noSemanticNumber) settings cause the server +- to exclude the `string` and `number` kinds from the response, as some clients +- may do a more colorful job highlighting these tokens; see https://go.dev/issue/45753. +- +-Client Support: +-- **VS Code**: See [Semantic Highlighting Guide](https://code.visualstudio.com/api/language-extensions/semantic-highlight-guide). +-- **Emacs + eglot**: Not supported; see joaotavora/eglot#615. +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls semtok file.go` +- +-## Folding Range +- +-The LSP [`textDocument/foldingRange`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_foldingRange) +-query reports the list of regions in the current file that may be +-independently collapsed or expanded. For example, it may be convenient +-to collapse large comments or functions when studying some code so +-that more of it fits in a single screen. +- +- +- +-The protocol [allows](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#foldingRangeClientCapabilities) clients to indicate whether they prefer +-fine-grained ranges such as matched pairs of brackets, or only ranges +-consisting of complete lines. +- +-Client support: +-- **VS Code**: displayed in left margin. Toggle the chevrons (`∨` and `>`) to collapse or expand. +-- **Emacs + eglot**: not supported. +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls folding_ranges file.go` +- +-## Document Link +- +-The LSP [`textDocument/documentLink`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_documentLink) +-query uses heuristics to extracts URLs from doc comments and string +-literals in the current file so that the client can present them as +-clickable links. +- +- +- +-In addition to explicit URLs, gopls also turns string literals in +-import declarations into links to the pkg.go.dev documentation for the +-imported package. +- +-Settings: +-- The [`importShortcut`](../settings.md#importShortcut) setting determines +- what kind of link is returned for an `import` declaration. +-- The [`linkTarget`](../settings.md#linkTarget) setting specifies +- the base URI for Go package links. +- +-Client support: +-- **VS Code**: Hovering over a link displays a "Follow link (cmd+click)" popup. +-- **Emacs + eglot**: not currently used. +-- **Vim + coc.nvim**: ?? +-- **CLI**: `gopls links file.go` +diff -urN a/gopls/doc/features/README.md b/gopls/doc/features/README.md +--- a/gopls/doc/features/README.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/features/README.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1 +0,0 @@ +-See [index.md](index.md). +diff -urN a/gopls/doc/features/templates.md b/gopls/doc/features/templates.md +--- a/gopls/doc/features/templates.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/features/templates.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,51 +0,0 @@ +---- +-title: "Gopls: Support for template files" +---- +- +-Gopls provides some support for Go template files, that is, files that +-are parsed by [`text/template`](https://pkg.go.dev/text/template) or +-[`html/template`](https://pkg.go.dev/html/template). +- +-## Enabling template support +- +-Gopls recognizes template files based on their file extension, which +-may be configured by the +-[`templateExtensions`](../settings.md#templateExtensions) setting. If +-this list is empty, template support is disabled. (This is the default +-value, since Go templates don't have a canonical file extension.) +- +-Additional configuration may be necessary to ensure that your client +-chooses the correct language kind when opening template files. +-Gopls recognizes both `"tmpl"` and `"gotmpl"` for template files. +-For example, in `VS Code` you will also need to add an +-entry to the +-[`files.associations`](https://code.visualstudio.com/docs/languages/identifiers) +-mapping: +-```json +-"files.associations": { +- ".mytemplate": "gotmpl" +-}, +-``` +- +- +-## Features +-In template files, template support works inside +-the default `{{` delimiters. (Go template parsing +-allows the user to specify other delimiters, but +-gopls does not know how to do that.) +- +-Gopls template support includes the following features: +-+ **Diagnostics**: if template parsing returns an error, +-it is presented as a diagnostic. (Missing functions do not produce errors.) +-+ **Syntax Highlighting**: syntax highlighting is provided for template files. +-+ **Definitions**: gopls provides jump-to-definition inside templates, though it does not understand scoping (all templates are considered to be in one global scope). +-+ **References**: gopls provides find-references, with the same scoping limitation as definitions. +-+ **Completions**: gopls will attempt to suggest completions inside templates. +- +-TODO: also +-+ Hover +-+ SemanticTokens +-+ Symbol search +-+ DocumentHighlight +- +- +diff -urN a/gopls/doc/features/transformation.md b/gopls/doc/features/transformation.md +--- a/gopls/doc/features/transformation.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/features/transformation.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,913 +0,0 @@ +---- +-title: "Gopls: Code transformation features" +---- +- +-This document describes gopls' features for code transformation, which +-include a range of behavior-preserving changes (refactorings, +-formatting, simplifications), code repair (fixes), and editing support +-(filling in struct literals and switch statements). +- +-Code transformations are not a single category in the LSP: +- +-- A few, such as Formatting and Rename, are primary operations in the +- protocol. +-- Some transformations are exposed through [Code Lenses](../codelenses.md), +- which return _commands_, arbitrary server +- operations invoked for their side effects through a +- [`workspace/executeCommand`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#workspace_executeCommand) request; +- however, no current code lenses are transformations of Go syntax. +- +-- Most transformations are defined as *code actions*. +- +-## Code Actions +- +-A **code action** is an action associated with a portion of the file. +-Each time the selection changes, a typical client makes a +-[`textDocument/codeAction`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_codeAction) +-request for the set of available actions, then updates its UI +-elements (menus, icons, tooltips) to reflect them. +-The VS Code manual describes code actions as +-"[Quick fixes + Refactorings](https://code.visualstudio.com/docs/editor/refactoring#_code-actions-quick-fixes-and-refactorings)". +- +-A `codeAction` request delivers the menu, so to speak, but it does +-not order the meal. Once the user chooses an action, one of two things happens. +-In trivial cases, the action itself contains an edit that the +-client can directly apply to the file. +-But in most cases the action contains a command, +-similar to the command associated with a code lens. +-This allows the work of computing the patch to be done lazily, only +-when actually needed. (Most aren't.) +-The server may then compute the edit and send the client a +-[`workspace/applyEdit`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#workspace_applyEdit) +-request to patch the files. +-Not all code actions' commands have an `applyEdit` side effect: some +-may change the state of the server, for example to toggle a variable +-or to cause the server to send other requests to the client, +-such as a `showDocument` request to open a report in a web browser. +- +-The main difference between code lenses and code actions is this: +- +-- a `codeLens` request obtains commands for the entire file. +- Each command specifies its applicable source range, +- and typically appears as an annotation on that source range. +-- a `codeAction` request obtains commands only for a particular range: the current selection. +- All the commands are presented together in a menu at that location. +- +-Each action has a _kind_, +-which is a hierarchical identifier such as `refactor.inline.call`. +-Clients may filter actions based on their kind. +-For example, VS Code has: +-two menus, "Refactor..." and "Source action...", each populated by +-different kinds of code actions (`refactor` and `source`); +-a lightbulb icon that triggers a menu of "quick fixes" (of kind `quickfix`); +-and a "Fix All" command that executes all code actions of +-kind `source.fixAll`, which are those deemed unambiguously safe to apply. +- +-Gopls supports the following code actions: +- +-- `quickfix`, which applies unambiguously safe fixes +-- [`source.organizeImports`](#source.organizeImports) +-- [`source.assembly`](web.md#assembly) +-- [`source.doc`](web.md#doc) +-- [`source.freesymbols`](web.md#freesymbols) +-- `source.test` (undocumented) +-- [`source.addTest`](#source.addTest) +-- [`source.toggleCompilerOptDetails`](diagnostics.md#toggleCompilerOptDetails) +-- [`gopls.doc.features`](README.md), which opens gopls' index of features in a browser +-- [`refactor.extract.constant`](#extract) +-- [`refactor.extract.function`](#extract) +-- [`refactor.extract.method`](#extract) +-- [`refactor.extract.toNewFile`](#extract.toNewFile) +-- [`refactor.extract.variable`](#extract) +-- [`refactor.extract.variable-all`](#extract) +-- [`refactor.inline.call`](#refactor.inline.call) +-- [`refactor.inline.variable`](#refactor.inline.variable) +-- [`refactor.rewrite.addTags`](#refactor.rewrite.addTags) +-- [`refactor.rewrite.changeQuote`](#refactor.rewrite.changeQuote) +-- [`refactor.rewrite.fillStruct`](#refactor.rewrite.fillStruct) +-- [`refactor.rewrite.fillSwitch`](#refactor.rewrite.fillSwitch) +-- [`refactor.rewrite.invertIf`](#refactor.rewrite.invertIf) +-- [`refactor.rewrite.joinLines`](#refactor.rewrite.joinLines) +-- [`refactor.rewrite.moveParamLeft`](#refactor.rewrite.moveParamLeft) +-- [`refactor.rewrite.moveParamRight`](#refactor.rewrite.moveParamRight) +-- [`refactor.rewrite.removeTags`](#refactor.rewrite.removeTags) +-- [`refactor.rewrite.removeUnusedParam`](#refactor.rewrite.removeUnusedParam) +-- [`refactor.rewrite.splitLines`](#refactor.rewrite.splitLines) +- +-Gopls reports some code actions twice, with two different kinds, so +-that they appear in multiple UI elements: simplifications, +-for example from `for _ = range m` to `for range m`, +-have kinds `quickfix` and `source.fixAll`, +-so they appear in the "Quick Fix" menu and +-are activated by the "Fix All" command. +- +- +- +-Many transformations are computed by [analyzers](../analyzers.md) +-that, in the course of reporting a diagnostic about a problem, +-also suggest a fix. +-A `codeActions` request will return any fixes accompanying diagnostics +-for the current selection. +- +- +- +- +- +-Caveats: +- +-- Many of gopls code transformations are limited by Go's syntax tree +- representation, which currently records comments not in the tree +- but in a side table; consequently, transformations such as Extract +- and Inline are prone to losing comments. This is issue +- https://go.dev/issue/20744, and it is a priority for us to fix in 2024. +- +-- Generated files, as identified by the conventional +- [DO NOT EDIT](https://go.dev/s/generatedcode) comment, +- are not offered code actions for transformations. +- +- +-Client support for code actions: +- +-- **VS Code**: Depending on their kind, code actions are found in +- the "Refactor..." menu (`^⇧R`), +- the "Source action..." menu, +- the 💡 (light bulb) icon's menu, or +- the "Quick fix" (`⌘.`) menu. +- The "Fix All" command applies all actions of kind `source.fixAll`. +-- **Emacs + eglot**: Code actions are invisible. +- Use `M-x eglot-code-actions` to select one from those that are +- available (if there are multiple) and execute it. +- Some action kinds have filtering shortcuts, +- e.g. [`M-x eglot-code-action-{inline,extract,rewrite}`](https://joaotavora.github.io/eglot/#index-M_002dx-eglot_002dcode_002daction_002dinline). +-- **CLI**: `gopls codeaction -exec -kind k,... -diff file.go:#123-#456` executes code actions of the specified +- kinds (e.g. `refactor.inline`) on the selected range, specified using zero-based byte offsets, and displays the diff. +- +- +-## Formatting +- +-The LSP +-[`textDocument/formatting`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_formatting) +-request returns edits that format a file. +-Gopls applies Go's canonical formatting algorithm, +-[`go fmt`](https://pkg.go.dev/cmd/gofmt). +-LSP formatting options are ignored. +- +-Most clients are configured to format files and organize imports +-whenever a file is saved. +- +-Settings: +- +-- The [`gofumpt`](../settings.md#gofumpt) setting causes gopls to use an +- alternative formatter, [`github.com/mvdan/gofumpt`](https://pkg.go.dev/mvdan.cc/gofumpt). +- +-Client support: +- +-- **VS Code**: Formats on save by default. Use `Format document` menu item (`⌥⇧F`) to invoke manually. +-- **Emacs + eglot**: Use `M-x eglot-format-buffer` to format. Attach it to `before-save-hook` to format on save. For formatting combined with organize-imports, many users take the legacy approach of setting `"goimports"` as their `gofmt-command` using [go-mode](https://github.com/dominikh/go-mode.el), and adding `gofmt-before-save` to `before-save-hook`. An LSP-based solution requires code such as https://github.com/joaotavora/eglot/discussions/1409. +-- **CLI**: `gopls format file.go` +- +- +-## `source.organizeImports`: Organize imports +- +-A `codeActions` request in a file whose imports are not organized will +-return an action of the standard kind `source.organizeImports`. +-Its command has the effect of organizing the imports: +-deleting existing imports that are duplicate or unused, +-adding new ones for undefined symbols, +-and sorting them into the conventional order. +- +-The addition of new imports is based on heuristics that depend on +-your workspace and the contents of your GOMODCACHE directory; they may +-sometimes make surprising choices. +- +-Many editors automatically organize imports and format the code before +-saving any edited file. +- +-Some users dislike the automatic removal of imports that are +-unreferenced because, for example, the sole line that refers to the +-import is temporarily commented out for debugging; see https://go.dev/issue/54362. +- +-Settings: +- +-- The [`local`](../settings.md#local) setting is a comma-separated list of +- prefixes of import paths that are "local" to the current file and +- should appear after standard and third-party packages in the sort order. +- +-Client support: +- +-- **VS Code**: automatically invokes `source.organizeImports` before save. +- To disable it, use the snippet below, and invoke the "Organize Imports" command manually as needed. +- ``` +- "[go]": { +- "editor.codeActionsOnSave": { "source.organizeImports": false } +- } +- ``` +-- **Emacs + eglot**: Use `M-x eglot-code-action-organize-imports` to invoke manually. +- Many users of [go-mode](https://github.com/dominikh/go-mode.el) use these lines to +- organize imports and reformat each modified file before saving it, but this +- approach is based on the legacy +- [`goimports`](https://pkg.go.dev/golang.org/x/tools/cmd/goimports) tool, not gopls: +- ```lisp +- (setq gofmt-command "goimports") +- (add-hook 'before-save-hook 'gofmt-before-save) +- ``` +-- **CLI**: `gopls fix -a file.go:#offset source.organizeImports` +- +- +-## `source.addTest`: Add test for function or method +- +-If the selected chunk of code is part of a function or method declaration F, +-gopls will offer the "Add test for F" code action, which adds a new test for the +-selected function in the corresponding `_test.go` file. The generated test takes +-into account its signature, including input parameters and results. +- +-**Test file**: if the `_test.go` file does not exist, gopls creates it, based on +-the name of the current file (`a.go` -> `a_test.go`), copying any copyright and +-build constraint comments from the original file. +- +-**Test package**: for new files that test code in package `p`, the test file +-uses `p_test` package name whenever possible, to encourage testing only exported +-functions. (If the test file already exists, the new test is added to that file.) +- +-**Parameters**: each of the function's non-blank parameters becomes an item in +-the struct used for the table-driven test. (For each blank `_` parameter, the +-value has no effect, so the test provides a zero-valued argument.) +- +-**Contexts**: If the first parameter is `context.Context`, the test passes +-`context.Background()`. +- +-**Results**: the function's results are assigned to variables (`got`, `got2`, +-and so on) and compared with expected values (`want`, `want2`, etc.`) defined in +-the test case struct. The user should edit the logic to perform the appropriate +-comparison. If the final result is an `error`, the test case defines a `wantErr` +-boolean. +- +-**Method receivers**: When testing a method `T.F` or `(*T).F`, the test must +-construct an instance of T to pass as the receiver. Gopls searches the package +-for a suitable function that constructs a value of type T or \*T, optionally with +-an error, preferring a function named `NewT`. +- +-**Imports**: Gopls adds missing imports to the test file, using the last +-corresponding import specifier from the original file. It avoids duplicate +-imports, preserving any existing imports in the test file. +- +- +- +- +-## Rename +- +-The LSP +-[`textDocument/rename`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_rename) +-request renames a symbol. +- +-Renaming is a two-stage process. The first step, a +-[`prepareRename`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_prepareRename) query, returns the current +-name of the identifier under the cursor (if indeed there is one). +-The client then displays a dialog prompting the user to choose a new +-name by editing the old one. The second step, `rename` proper, applies +-the changes. (This simple dialog support is unique among LSP +-refactoring operations; see microsoft/language-server-protocol#1164.) +- +-Gopls' renaming algorithm takes great care to detect situations in +-which renaming might introduce a compilation error. +-For example, changing a name may cause a symbol to become "shadowed", +-so that some existing references are no longer in scope. Gopls will +-report an error, stating the pair of symbols and the shadowed reference: +- +- +- +-As another example, consider renaming a method of a concrete type. +-Renaming may cause the type to no longer satisfy the same interfaces +-as before, which could cause the program to fail to compile. +-To avoid this, gopls inspects each conversion (explicit or implicit) +-from the affected type to an interface type, and checks whether it +-would remain valid after the renaming. If not, it aborts the renaming +-with an error. +- +-If you intend to rename both the original method and the corresponding +-methods of any matching interface types (as well as any methods of +-types matching them in turn), you can indicate this by invoking the +-rename operation on the interface method. +- +-Similarly, gopls will report an error if you rename a field of a +-struct that happens to be an "anonymous" field that embeds a type, +-since that would require a larger renaming involving the type as well. +-If that is what you intend, you can again indicate this by +-invoking the rename operation on the type. +- +-Renaming should never introduce a compilation error, but it may +-introduce dynamic errors. For example, in a method renaming, if there +-is no direct conversion of the affected type to the interface type, +-but there is an intermediate conversion to a broader type (such as `any`) followed by a +-type assertion to the interface type, then gopls may proceed to rename +-the method, causing the type assertion to fail at run time. +-Similar problems may arise with packages that use reflection, such as +-`encoding/json` or `text/template`. There is no substitute for good +-judgment and testing. +- +-Special cases: +- +-- When renaming the declaration of a method receiver, the tool also +- attempts to rename the receivers of all other methods associated +- with the same named type. Each other receiver that cannot be fully +- renamed is quietly skipped. Renaming any _use_ of a receiver affects +- only that variable. +- +- ```go +- type Counter struct { x int } +- +- Rename here to affect only this method +- ↓ +- func (c *Counter) Inc() { c.x++ } +- func (c *Counter) Dec() { c.x++ } +- ↑ +- Rename here to affect all methods +- ``` +- +-- Renaming a package declaration additionally causes the package's +- directory to be renamed. +- +-Some tips for best results: +- +-- The safety checks performed by the Rename algorithm require type +- information. If the program is grossly malformed, there may be +- insufficient information for it to run (https://go.dev/issue/41870), +- and renaming cannot generally be used to fix a type error (https://go.dev/issue/41851). +- When refactoring, we recommend working in small steps, repairing any +- problems as you go, so that as much as possible of the program +- compiles at each step. +-- Sometimes it may be desirable for a renaming operation to change the +- reference structure of the program, for example to intentionally +- combine two variables x and y by renaming y to x. +- The renaming tool is too strict to help in this case (https://go.dev/issue/41852). +- +- +- +-For the gory details of gopls' rename algorithm, you may be interested +-in the latter half of this 2015 GothamGo talk: +-[Using go/types for Code Comprehension and Refactoring Tools](https://www.youtube.com/watch?v=p_cz7AxVdfg). +- +-Client support: +- +-- **VS Code**: Use "[Rename symbol](https://code.visualstudio.com/docs/editor/editingevolved#_rename-symbol)" menu item (`F2`). +-- **Emacs + eglot**: Use `M-x eglot-rename`, or `M-x go-rename` from [go-mode](https://github.com/dominikh/go-mode.el). +-- **Vim + coc.nvim**: Use the `coc-rename` command. +-- **CLI**: `gopls rename file.go:#offset newname` +- +- +-## `refactor.extract`: Extract function/method/variable +- +-The `refactor.extract` family of code actions all return commands that +-replace the selected expression or statements with a reference to a +-newly created declaration that contains the selected code: +- +-- **`refactor.extract.function`** replaces one or more complete statements by a +- call to a new function named `newFunction` whose body contains the +- statements. The selection must enclose fewer statements than the +- entire body of the existing function. +- +- ![Before extracting a function](../assets/extract-function-before.png) +- ![After extracting a function](../assets/extract-function-after.png) +- +-- **`refactor.extract.method`** is a variant of "Extract function" offered when +- the selected statements belong to a method. The newly created function +- will be a method of the same receiver type. +- +-- **`refactor.extract.variable`** replaces an expression by a reference to a new +- local variable named `newVar` initialized by the expression: +- +- ![Before extracting a var](../assets/extract-var-before.png) +- ![After extracting a var](../assets/extract-var-after.png) +- +-- **`refactor.extract.constant** does the same thing for a constant +- expression, introducing a local const declaration. +-- **`refactor.extract.variable-all`** replaces all occurrences of the selected expression +-within the function with a reference to a new local variable named `newVar`. +-This extracts the expression once and reuses it wherever it appears in the function. +- +- ![Before extracting all occurrences of EXPR](../assets/extract-var-all-before.png) +- ![After extracting all occurrences of EXPR](../assets/extract-var-all-after.png) +- +- - **`refactor.extract.constant-all** does the same thing for a constant +- expression, introducing a local const declaration. +-If the default name for the new declaration is already in use, gopls +-generates a fresh name. +- +-Extraction is a challenging problem requiring consideration of +-identifier scope and shadowing, control +-flow such as `break`/`continue` in a loop or `return` in a +-function, cardinality of variables, and even subtle issues of style. +-In each case, the tool will try to update the extracted statements +-as needed to avoid build breakage or behavior changes. +-Unfortunately, gopls' Extract algorithms are considerably less +-rigorous than the Rename and Inline operations, and we are aware of a +-number of cases where it falls short, including: +- +-- https://github.com/golang/go/issues/66289 +-- https://github.com/golang/go/issues/65944 +-- https://github.com/golang/go/issues/63394 +-- https://github.com/golang/go/issues/61496 +- +-The following Extract features are planned for 2024 but not yet supported: +- +-- **Extract parameter struct** will replace two or more parameters of a +- function by a struct type with one field per parameter; see https://go.dev/issue/65552. +- +- +-- **Extract interface for type** will create a declaration of an +- interface type with all the methods of the selected concrete type; +- see https://go.dev/issue/65721 and https://go.dev/issue/46665. +- +- +-## `refactor.extract.toNewFile`: Extract declarations to new file +- +-(Available from gopls/v0.17.0) +- +-If you select one or more top-level declarations, gopls will offer an +-"Extract declarations to new file" code action that moves the selected +-declarations into a new file whose name is based on the first declared +-symbol. +-Import declarations are created as needed. +-Gopls also offers this code action when the selection is just the +-first token of the declaration, such as `func` or `type`. +- +-![Before: select the declarations to move](../assets/extract-to-new-file-before.png) +-![After: the new file is based on the first symbol name](../assets/extract-to-new-file-after.png) +- +- +- +-## `refactor.inline.call`: Inline call to function +- +-For a `codeActions` request where the selection is (or is within) a +-call of a function or method, gopls will return a command of kind +-`refactor.inline.call`, whose effect is to inline the function call. +- +-The screenshots below show a call to `sum` before and after inlining: +- +- +- +-![Before: select Refactor... Inline call to sum](../assets/inline-before.png) +-![After: the call has been replaced by the sum logic](../assets/inline-after.png) +- +-Inlining replaces the call expression by a copy of the function body, +-with parameters replaced by arguments. +-Inlining is useful for a number of reasons. +-Perhaps you want to eliminate a call to a deprecated +-function such as `ioutil.ReadFile` by replacing it with a call to the +-newer `os.ReadFile`; inlining will do that for you. +-Or perhaps you want to copy and modify an existing function in some +-way; inlining can provide a starting point. +-The inlining logic also provides a building block for +-other refactorings, such as "change signature". +- +-Not every call can be inlined. +-Of course, the tool needs to know which function is being called, so +-you can't inline a dynamic call through a function value or interface +-method; but static calls to methods are fine. +-Nor can you inline a call if the callee is declared in another package +-and refers to non-exported parts of that package, or to [internal +-packages](https://go.dev/doc/go1.4#internalpackages) that are +-inaccessible to the caller. +-Calls to generic functions are not yet supported +-(https://go.dev/issue/63352), though we plan to fix that. +- +-When inlining is possible, it's critical that the tool preserve +-the original behavior of the program. +-We don't want refactoring to break the build, or, worse, to introduce +-subtle latent bugs. +-This is especially important when inlining tools are used to perform +-automated clean-ups in large code bases; +-we must be able to trust the tool. +-Our inliner is very careful not to make guesses or unsound +-assumptions about the behavior of the code. +-However, that does mean it sometimes produces a change that differs +-from what someone with expert knowledge of the same code might have +-written by hand. +- +-In the most difficult cases, especially with complex control flow, it +-may not be safe to eliminate the function call at all. +-For example, the behavior of a `defer` statement is intimately tied to +-its enclosing function call, and `defer` is the only control +-construct that can be used to handle panics, so it cannot be reduced +-into simpler constructs. +-So, for example, given a function f defined as: +- +-```go +-func f(s string) { +- defer fmt.Println("goodbye") +- fmt.Println(s) +-} +-``` +- +-a call `f("hello")` will be inlined to: +- +-```go +- func() { +- defer fmt.Println("goodbye") +- fmt.Println("hello") +- }() +-``` +- +-Although the parameter was eliminated, the function call remains. +- +-An inliner is a bit like an optimizing compiler. +-A compiler is considered "correct" if it doesn't change the meaning of +-the program in translation from source language to target language. +-An _optimizing_ compiler exploits the particulars of the input to +-generate better code, where "better" usually means more efficient. +-As users report inputs that cause the compiler to emit suboptimal +-code, the compiler is improved to recognize more cases, or more rules, +-and more exceptions to rules---but this process has no end. +-Inlining is similar, except that "better" code means tidier code. +-The most conservative translation provides a simple but (hopefully) +-correct foundation, on top of which endless rules, and exceptions to +-rules, can embellish and improve the quality of the output. +- +-Here are some of the technical challenges involved in sound inlining: +- +-- **Effects:** When replacing a parameter by its argument expression, +- we must be careful not to change the effects of the call. For +- example, if we call a function `func twice(x int) int { return x + x }` +- with `twice(g())`, we do not want to see `g() + g()`, which would +- cause g's effects to occur twice, and potentially each call might +- return a different value. All effects must occur the same number of +- times, and in the same order. This requires analyzing both the +- arguments and the callee function to determine whether they are +- "pure", whether they read variables, or whether (and when) they +- update them too. The inliner will introduce a declaration such as +- `var x int = g()` when it cannot prove that it is safe to substitute +- the argument throughout. +- +-- **Constants:** If inlining always replaced a parameter by its argument +- when the value is constant, some programs would no longer build +- because checks previously done at run time would happen at compile time. +- For example `func index(s string, i int) byte { return s[i] }` +- is a valid function, but if inlining were to replace the call `index("abc", 3)` +- by the expression `"abc"[3]`, the compiler will report that the +- index `3` is out of bounds for the string `"abc"`. +- The inliner will prevent substitution of parameters by problematic +- constant arguments, again introducing a `var` declaration instead. +- +-- **Referential integrity:** When a parameter variable is replaced by +- its argument expression, we must ensure that any names in the +- argument expression continue to refer to the same thing---not to a +- different declaration in the callee function body that happens to +- use the same name. The inliner must replace local references such as +- `Printf` by qualified references such as `fmt.Printf`, and add an +- import of package `fmt` as needed. +- +-- **Implicit conversions:** When passing an argument to a function, it is +- implicitly converted to the parameter type. If we eliminate the parameter +- variable, we don't want to lose the conversion as it may be important. For +- example, in `func f(x any) { y := x; fmt.Printf("%T", &y) }` the type of +- variable y is `any`, so the program prints `"*interface{}"`. But if inlining +- the call `f(1)` were to produce the statement `y := 1`, then the type of y +- would have changed to `int`, which could cause a compile error or, as in this +- case, a bug, as the program now prints `"*int"`. When the inliner substitutes +- a parameter variable by its argument value, it may need to introduce explicit +- conversions of each value to the original parameter type, such as `y := +- any(1)`. +- +-- **Last reference:** When an argument expression has no effects +- and its corresponding parameter is never used, the expression +- may be eliminated. However, if the expression contains the last +- reference to a local variable at the caller, this may cause a compile +- error because the variable is now unused. So the inliner must be +- cautious about eliminating references to local variables. +- +-This is just a taste of the problem domain. If you're curious, the +-documentation for [golang.org/x/tools/internal/refactor/inline](https://pkg.go.dev/golang.org/x/tools/internal/refactor/inline) has +-more detail. All of this is to say, it's a complex problem, and we aim +-for correctness first of all. We've already implemented a number of +-important "tidiness optimizations" and we expect more to follow. +- +- +- +-## `refactor.inline.variable`: Inline local variable +- +-For a `codeActions` request where the selection is (or is within) an +-identifier that is a use of a local variable whose declaration has an +-initializer expression, gopls will return a code action of kind +-`refactor.inline.variable`, whose effect is to inline the variable: +-that is, to replace the reference by the variable's initializer +-expression. +- +-For example, if invoked on the identifier `s` in the call `println(s)`: +-```go +-func f(x int) { +- s := fmt.Sprintf("+%d", x) +- println(s) +-} +-``` +-the code action transforms the code to: +- +-```go +-func f(x int) { +- s := fmt.Sprintf("+%d", x) +- println(fmt.Sprintf("+%d", x)) +-} +-``` +- +-(In this instance, `s` becomes an unreferenced variable which you will +-need to remove.) +- +-The code action always replaces the reference by the initializer +-expression, even if there are later assignments to the variable (such +-as `s = ""`). +- +-The code action reports an error if it is not possible to make the +-transformation because one of the identifiers within the initializer +-expression (e.g. `x` in the example above) is shadowed by an +-intervening declaration, as in this example: +- +-```go +-func f(x int) { +- s := fmt.Sprintf("+%d", x) +- { +- x := 123 +- println(s, x) // error: cannot replace s with fmt.Sprintf(...) since x is shadowed +- } +-} +-``` +- +- +-## `refactor.rewrite`: Miscellaneous rewrites +- +-This section covers a number of transformations that are accessible as +-code actions whose kinds are children of `refactor.rewrite`. +- +- +-### `refactor.rewrite.removeUnusedParam`: Remove unused parameter +- +-The [`unusedparams` analyzer](../analyzers.md#unusedparams) reports a +-diagnostic for each parameter that is not used within the function body. +-For example: +- +-```go +-func f(x, y int) { // "unused parameter: x" +- fmt.Println(y) +-} +-``` +- +-It does _not_ report diagnostics for address-taken functions, which +-may need all their parameters, even unused ones, in order to conform +-to a particular function signature. +-Nor does it report diagnostics for exported functions, +-which may be address-taken by another package. +-(A function is _address-taken_ if it is used other than in call position, `f(...)`.) +- +-In addition to the diagnostic, it suggests two possible fixes: +- +-1. rename the parameter to `_` to emphasize that it is unreferenced (an immediate edit); or +-2. delete the parameter altogether, using a `ChangeSignature` command, updating all callers. +- +-Fix \#2 uses the same machinery as "Inline function call" (see above) +-to ensure that the behavior of all existing calls is preserved, even +-when the argument expression for the deleted parameter has side +-effects, as in the example below. +- +-![The parameter x is unused](../assets/remove-unusedparam-before.png) +-![The parameter x has been deleted](../assets/remove-unusedparam-after.png) +- +-Observe that in the first call, the argument `chargeCreditCard()` was +-not deleted because of potential side effects, whereas in the second +-call, the argument 2, a constant, was safely deleted. +- +- +- +-### `refactor.rewrite.moveParam{Left,Right}`: Move function parameters +- +-When the selection is a parameter in a function or method signature, gopls +-offers a code action to move the parameter left or right (if feasible), +-updating all callers accordingly. +- +-For example: +- +-```go +-func Foo(x, y int) int { +- return x + y +-} +- +-func _() { +- _ = Foo(0, 1) +-} +-``` +- +-becomes +- +-```go +-func Foo(y, x int) int { +- return x + y +-} +- +-func _() { +- _ = Foo(1, 0) +-} +-``` +- +-following a request to move `x` right, or `y` left. +- +-This is a primitive building block of more general "Change signature" +-operations. We plan to generalize this to arbitrary signature rewriting, but +-the language server protocol does not currently offer good support for user +-input into refactoring operations (see +-[microsoft/language-server-protocol#1164](https://github.com/microsoft/language-server-protocol/issues/1164)). +-Therefore, any such refactoring will require custom client-side logic. (As a +-very hacky workaround, you can express arbitrary parameter movement by invoking +-Rename on the `func` keyword of a function declaration, but this interface is +-just a temporary stopgap.) +- +- +-### `refactor.rewrite.changeQuote`: Convert string literal between raw and interpreted +- +-When the selection is a string literal, gopls offers a code action +-to convert the string between raw form (`` `abc` ``) and interpreted +-form (`"abc"`) where this is possible: +- +-![Convert to interpreted](../assets/convert-string-interpreted.png) +-![Convert to raw](../assets/convert-string-raw.png) +- +-Applying the code action a second time reverts back to the original +-form. +- +- +-### `refactor.rewrite.invertIf`: Invert 'if' condition +- +-When the selection is within an `if`/`else` statement that is not +-followed by `else if`, gopls offers a code action to invert the +-statement, negating the condition and swapping the `if` and and `else` +-blocks. +- +-![Before "Invert if condition"](../assets/invert-if-before.png) +-![After "Invert if condition"](../assets/invert-if-after.png) +- +- +- +- +- +-### `refactor.rewrite.{split,join}Lines`: Split elements into separate lines +- +-When the selection is within a bracketed list of items such as: +- +-- the **elements** of a composite literal, `[]T{a, b, c}`, +-- the **arguments** of a function call, `f(a, b, c)`, +-- the **groups of parameters** of a function signature, `func(a, b, c int, d, e bool)`, or +-- its **groups of results**, `func() (x, y string, z rune)`, +- +-gopls will offer the "Split [items] into separate lines" code +-action, which would transform the forms above into these forms: +- +-```go +-[]T{ +- a, +- b, +- c, +-} +- +-f( +- a, +- b, +- c, +-) +- +-func( +- a, b, c int, +- d, e bool, +-) +- +-func() ( +- x, y string, +- z rune, +-) +-``` +- +-Observe that in the last two cases, each +-[group](https://pkg.go.dev/go/ast#Field) of parameters or results is +-treated as a single item. +- +-The opposite code action, "Join [items] into one line", undoes the operation. +-Neither action is offered if the list is already full split or joined, +-respectively, or trivial (fewer than two items). +- +-These code actions are not offered for lists containing `//`-style +-comments, which run to the end of the line. +- +- +- +- +-### `refactor.rewrite.fillStruct`: Fill struct literal +- +-When the cursor is within a struct literal `S{}`, gopls offers the +-"Fill S" code action, which populates each missing field of the +-literal that is accessible. +- +-It uses the following heuristic to choose the value assigned to each +-field: it finds candidate variables, constants, and functions that are +-assignable to the field, and picks the one whose name is the closest +-match to the field name. +-If there are none, it uses the zero value (such as `0`, `""`, or +-`nil`) of the field's type. +- +-In the example below, a +-[`slog.HandlerOptions`](https://pkg.go.dev/golang.org/x/exp/slog#HandlerOptions) +-struct literal is filled in using two local variables (`level` and +-`add`) and a function (`replace`): +- +-![Before "Fill slog.HandlerOptions"](../assets/fill-struct-before.png) +-![After "Fill slog.HandlerOptions"](../assets/fill-struct-after.png) +- +-Caveats: +- +-- This code action requires type information for the struct type, so +- if it is defined in another package that is not yet imported, you +- may need to "organize imports" first, for example by saving the +- file. +-- Candidate declarations are sought only in the current file, and only +- above the current point. Symbols declared beneath the current point, +- or in other files in the package, are not considered; see +- https://go.dev/issue/68224. +- +- +-### `refactor.rewrite.fillSwitch`: Fill switch +- +-When the cursor is within a switch statement whose operand type is an +-_enum_ (a finite set of named constants), or within a type switch, +-gopls offers the "Add cases for T" code action, which populates the +-switch statement by adding a case for each accessible named constant +-of the enum type, or, for a type switch, by adding a case for each +-accessible named non-interface type that implements the interface. +-Only missing cases are added. +- +-The screenshots below show a type switch whose operand has the +-[`net.Addr`](https://pkg.go.dev/net#Addr) interface type. The code +-action adds one case per concrete network address type, plus a default +-case that panics with an informative message if an unexpected operand +-is encountered. +- +-![Before "Add cases for Addr"](../assets/fill-switch-before.png) +-![After "Add cases for Addr"](../assets/fill-switch-after.png) +- +-And these screenshots illustrate the code action adding cases for each +-value of the +-[`html.TokenType`](https://pkg.go.dev/golang.org/x/net/html#TokenType) +-enum type, which represents the various types of token from +-which HTML documents are composed: +- +-![Before "Add cases for Addr"](../assets/fill-switch-enum-before.png) +-![After "Add cases for Addr"](../assets/fill-switch-enum-after.png) +- +- +- +-### `refactor.rewrite.eliminateDotImport`: Eliminate dot import +- +-When the cursor is on a dot import gopls can offer the "Eliminate dot import" +-code action, which removes the dot from the import and qualifies uses of the +-package throughout the file. This code action is offered only if +-each use of the package can be qualified without collisions with existing names. +- +- +-### `refactor.rewrite.addTags`: Add struct tags +- +-When the cursor is within a struct, this code action adds to each field a `json` +-struct tag that specifies its JSON name, using lower case with underscores +-(e.g. LinkTarget becomes link_target). For a highlighted selection, it only +-adds tags on selected fields. +- +- +-### `refactor.rewrite.removeTags`: Remove struct tags +- +-When the cursor is within a struct, this code action clears struct tags on +-all struct fields. For a highlighted selection, it removes tags from only +-the selected fields. +diff -urN a/gopls/doc/features/web.md b/gopls/doc/features/web.md +--- a/gopls/doc/features/web.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/features/web.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,185 +0,0 @@ +---- +-title: "Gopls: Web-based features" +---- +- +-The LSP +-[`window.showDocument`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#window_showDocument) request +-allows the server to instruct the client to open a file in the editor +-or a web page in a browser. It is the basis for a number of gopls +-features that report information about your program through a web +-interface. +- +-We recognize that a web interface is not ideal for everyone: some +-users prefer a full-screen editor layout and dislike switching +-windows; others may work in a text-only terminal without a window +-system, perhaps over remote ssh or on the Linux console. +-Unfortunately, the LSP lacks several natural kinds of extensibility, +-including the ability for servers to define: +- +-- queries that [generalize a References +- query](https://github.com/microsoft/language-server-protocol/issues/1911), +- displaying results using similar UI elements; +-- commands that [produce a stream of +- text](https://github.com/joaotavora/eglot/discussions/1402), like a +- typical shell command or compiler, that the client can redirect to +- the editor's usual terminal-like UI element; or +-- refactoring operations that, like Rename, [prompt the +- user](https://github.com/microsoft/language-server-protocol/issues/1164) +- for additional information. +- +-The web-based UI can help fill these gaps until such time as the LSP +-provides standard ways of implementing these features. +- +-Gopls' web server listens on a `localhost` port. For security, all its +-endpoints include a random string that serves as an authentication +-token. The client, provided authenticated URLs by the server, will be +-able to access your source code, but arbitrary processes running on +-your machine will not. +-Restarting the gopls process causes this secret to change, rendering +-all existing previous URLs invalid; existing pages will display a banner +-indicating that they have become disconnected. +- +-TODO: combine the web server and the debug server; see https://go.dev/issue/68229. +- +-Gopls supports two-way communication between the web browser and the +-client editor. All of the web-based reports contain links to +-declarations in your source code. Clicking on one of these links +-causes gopls to send a `showDocument` request to your editor to open +-the relevant source file at the appropriate line. This works even when +-your source code has been modified but not saved. +-(VS Code users: please upvote microsoft/vscode#208093 if you would +-like your editor to raise its window when handling this event.) +- +- +-## `source.doc`: Browse package documentation +- +-In any Go source file, a code action request returns a command to +-"Browse package documentation". This command opens a browser window +-showing the documentation for the current Go package, presented using +-a similar design to https://pkg.go.dev. +- +-This allows you to preview the documentation for your packages, even +-internal ones that may be unpublished externally. Reloading the page +-updates the documentation to reflect your changes. It is not necessary +-to save modified Go source files. +- +- +- +-Clicking on the link for a package-level symbol or method, which in +-`pkg.go.dev` would ordinarily take you to a source-code viewer such as +-GitHub or Google Code Search, causes your editor to navigate to the +-relevant source file and line. +- +-Client support: +-- **VS Code**: Use the "Source Action... > Browse documentation for package P" menu. +-- **Emacs + eglot**: Use `M-x go-browse-doc` in [go-mode](https://github.com/dominikh/go-mode.el). +-- **Vim + coc.nvim**: ?? +- +- +- +-## `source.freesymbols`: Browse free symbols +- +-When studying code, either to understand it or to evaluate a different +-organization or factoring, it is common to need to know what the +-"inputs" are to a given chunk of code, either because you are +-considering extracting it into its own function and want to know what +-parameters it would take, or just to understand how one piece of a long +-function relates to the preceding pieces. +- +-If you select a chunk of code, and invoke the "Browse free symbols" +-[code action](transformation.md#code-actions), your editor will +-open a browser displaying a report on the free symbols of the +-selection. A symbol is "free" if it is referenced from within the +-selection but defined outside of it. In essence, these are the inputs +-to the selected chunk. +- +- +- +-The report classifies the symbols into imported, local, and +-package-level symbols. The imported symbols are grouped by package, +-and link to the documentation for the package, as described above. +-Each of the remaining symbols is presented as a link that causes your +-editor to navigate to its declaration. +- +-TODO: explain dotted paths. +- +-Client support: +-- **VS Code**: Use the "Source Action... > Browse free symbols" menu. +-- **Emacs + eglot**: Use `M-x go-browse-freesymbols` in [go-mode](https://github.com/dominikh/go-mode.el). +-- **Vim + coc.nvim**: ?? +- +- +- +-## `source.assembly`: Browse assembly +- +-When you're optimizing the performance of your code or investigating +-an unexpected crash, it may sometimes be helpful to inspect the +-assembly code produced by the compiler for a given Go function. +- +-If you position the cursor or selection within a function f, +-gopls offers the "Browse assembly for f" [code action](transformation.md#code-actions). +-This opens a web-based listing of the assembly for the function, plus +-any functions nested within it. +- +-Each time you edit your source and reload the page, the current +-package is recompiled and the listing is updated. It is not necessary +-to save your modified files. +- +-The compiler's target architecture is the same as the one gopls uses +-when analyzing the file: typically, this is your machine's GOARCH, but +-when viewing a file with a build tag, such as one named `foo_amd64.go` +-or containing the comment `//go:build amd64`, the tags determine the +-architecture. +- +-Each instruction is displayed with a link that causes your editor to +-navigate to the source line responsible for the instruction, according +-to the debug information. +- +- +- +-The example above shows the arm64 assembly listing of +-[`time.NewTimer`](https://pkg.go.dev/time#NewTimer). +-Observe that the indicated instruction links to a source location +-inside a different function, `syncTimer`, because the compiler +-inlined the call from `NewTimer` to `syncTimer`. +- +-Browsing assembly is not yet supported for generic functions, package +-initializers (`func init`), or functions in test packages. +-(Contributions welcome!) +- +-Client support: +-- **VS Code**: Use the "Source Action... > Browse GOARCH assembly for f" menu. +-- **Emacs + eglot**: Use `M-x go-browse-assembly` in [go-mode](https://github.com/dominikh/go-mode.el). +-- **Vim + coc.nvim**: ?? +- +- +- +-## `source.splitPackage`: Split package into components +- +-The web-based "Split package" tool can help you split a complex +-package into two or more components, ensuring that the dependencies +-among those components are acyclic. +- +-Follow the instructions on the page to choose a set of named components, +-assign each declaration to the most appropriate component, and then +-visualize the dependencies between those components created by references +-from one symbol to another. +- +-The figure below shows the tool operating on the `fmt` package, which +-could (in principle) be split into three subpackages, one for +-formatting (`Printf` and friends), one for scanning (`Scanf`), and one +-for their common dependencies. +- +- +- +-(Try playing with the tool on this package: it's an instructive +-exercise. The figure below shows the solution.) +- +- +- +-The tool does not currently perform the code transformation (moving +-declarations to new packages, renaming symbols to export them as +-needed), but we hope to add that in a future release. +- +-Client support: +-- **VS Code**: Use the "Source Action... > Split package P" menu. +diff -urN a/gopls/doc/index.md b/gopls/doc/index.md +--- a/gopls/doc/index.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/index.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,196 +0,0 @@ +---- +-title: "Gopls: The language server for Go" +---- +- +- +-`gopls` (pronounced "Go please") is the official [language +-server](https://langserver.org) for Go, developed by the Go team. It +-provides a wide variety of [IDE features](features/) to any +-[LSP](https://microsoft.github.io/language-server-protocol/)-compatible +-editor. +- +- +- +-You should not need to interact with `gopls` directly--it will be automatically +-integrated into your editor. The specific features and settings vary slightly +-by editor, so we recommend that you proceed to the +-[documentation for your editor](#editors) below. +-Also, the gopls documentation for each feature describes whether it is +-supported in each client editor. +- +-This documentation (https://go.dev/gopls) describes the most recent release of gopls. +-To preview documentation for the release under development, visit https://tip.golang.org/gopls. +- +-## Features +- +-Gopls supports a wide range of standard LSP features for navigation, +-completion, diagnostics, analysis, and refactoring, and a number of +-additional features not found in other language servers. +- +-See the [Index of features](features/) for complete +-documentation on what Gopls can do for you. +- +-## Editors +- +-To get started with `gopls`, install an LSP plugin in your editor of choice. +- +- +- +-* [Acme](https://github.com/fhs/acme-lsp/blob/master/README.md) +-* [Atom](https://github.com/MordFustang21/ide-gopls/blob/master/README.md) +-* [Emacs](editor/emacs.md) +-* [Helix](editor/helix.md) +-* [Lapce](https://github.com/lapce-community/lapce-go/blob/master/README.md) +-* [Sublime Text](editor/sublime.md) +-* [VS Code](https://github.com/golang/vscode-go/blob/master/README.md) +-* [Vim or Neovim](editor/vim.md) +-* [Zed](editor/zed.md) +- +-If you use `gopls` with an editor that is not on this list, please send us a CL +-[updating this documentation](contributing.md). +- +-## Installation +- +-To install the latest stable release of `gopls`, run the following command: +- +-```sh +-go install golang.org/x/tools/gopls@latest +-``` +- +-Some editors, such as VS Code, will handle this step for you, and +-ensure that Gopls is updated when a new stable version is released. +- +-After updating, you may need to restart running Gopls processes to +-observe the effect. Each client has its own way to restart the server. +-(On a UNIX machine, you can use the command `killall gopls`.) +- +-Learn more in the +-[advanced installation instructions](advanced.md#installing-unreleased-versions). +- +-## Releases +- +-Gopls [releases](release/) follow [semantic versioning](http://semver.org), with +-major changes and new features introduced only in new minor versions +-(i.e. versions of the form `v*.N.0` for some N). Subsequent patch +-releases contain only cherry-picked fixes or superficial updates. +- +-In order to align with the +-[Go release timeline](https://github.com/golang/go/wiki/Go-Release-Cycle#timeline), +-we aim to release a new minor version of Gopls approximately every three +-months, with patch releases approximately every month, according to the +-following table: +- +-| Month | Version(s) | +-| ---- | ------- | +-| Jan | `v*..0` | +-| Jan-Mar | `v*..*` | +-| Apr | `v*..0` | +-| Apr-Jun | `v*..*` | +-| Jul | `v*..0` | +-| Jul-Sep | `v*..*` | +-| Oct | `v*..0` | +-| Oct-Dec | `v*..*` | +- +-For more background on this policy, see https://go.dev/issue/55267. +- +-## Setting up your workspace +- +-`gopls` supports both Go module, multi-module and GOPATH modes. See the +-[workspace documentation](workspace.md) for information on supported +-workspace layouts. +- +-## Configuration +- +-You can configure `gopls` to change your editor experience or view additional +-debugging information. Configuration options will be made available by your +-editor, so see your [editor's instructions](#editors) for specific details. A +-full list of `gopls` settings can be found in the [settings documentation](settings.md). +- +-### Environment variables +- +-`gopls` inherits your editor's environment, so be aware of any environment +-variables you configure. Some editors, such as VS Code, allow users to +-selectively override the values of some environment variables. +- +-## Support policy +- +-Gopls is maintained by engineers on the +-[Go tools team](https://github.com/orgs/golang/teams/tools-team/members), +-who actively monitor the +-[Go](https://github.com/golang/go/issues?q=is%3Aissue+is%3Aopen+label%3Agopls) +-and +-[VS Code Go](https://github.com/golang/vscode-go/issues) issue trackers. +- +-### Supported Go versions +- +-`gopls` follows the +-[Go Release Policy](https://go.dev/doc/devel/release#policy), meaning +-that it officially supports only the two most recent major Go releases. +- +-When using gopls, there are three versions to be aware of: +-1. The _gopls build go version_: the version of Go used to build gopls. +-2. The _go command version_: the version of the go list command executed by +- gopls to load information about your workspace. +-3. The _language version_: the version in the go directive of the current +- file's enclosing go.mod file, which determines the file's Go language +- semantics. +- +-Starting with the release of Go 1.23.0 and gopls@v0.17.0 in August 2024, we +-will only support the most recent Go version as the _gopls build go version_. +-However, due to the [forward compatibility](https://go.dev/blog/toolchain) +-support added in Go 1.21, as long as Go 1.21 or later are used to install +-gopls, any necessary toolchain upgrade will be handled automatically, just like +-any other dependency. +- +-Additionally, starting with gopls@v0.17.0, the _go command version_ will narrow +-from 4 versions to 3. This is more consistent with the Go Release Policy. +- +-Gopls supports **all** Go versions as its _language version_, by providing +-compiler errors based on the language version and filtering available standard +-library symbols based on the standard library APIs available at that Go +-version. +- +-Maintaining support for building gopls with legacy versions of Go caused +-[significant friction](https://go.dev/issue/50825) for gopls maintainers and +-held back other improvements. If you are unable to install a supported version +-of Go on your system, you can still install an older version of gopls. The +-following table shows the final gopls version that supports a given Go version. +-Go releases more recent than those in the table can be used with any version of +-gopls. +- +-| Go Version | Final gopls version with support (without warnings) | +-| ----------- | --------------------------------------------------- | +-| Go 1.12 | [gopls@v0.7.5](https://github.com/golang/tools/releases/tag/gopls%2Fv0.7.5) | +-| Go 1.15 | [gopls@v0.9.5](https://github.com/golang/tools/releases/tag/gopls%2Fv0.9.5) | +-| Go 1.17 | [gopls@v0.11.0](https://github.com/golang/tools/releases/tag/gopls%2Fv0.11.0) | +-| Go 1.18 | [gopls@v0.14.2](https://github.com/golang/tools/releases/tag/gopls%2Fv0.14.2) | +-| Go 1.20 | [gopls@v0.15.3](https://github.com/golang/tools/releases/tag/gopls%2Fv0.15.3) | +- +-### Supported build systems +- +-`gopls` currently only supports the `go` command, so if you are using +-a different build system, `gopls` will not work well. Bazel is not officially +-supported, but may be made to work with an appropriately configured +-[go/packages driver](https://pkg.go.dev/golang.org/x/tools/go/packages#hdr-The_driver_protocol). +-See [bazelbuild/rules_go#512](https://github.com/bazelbuild/rules_go/issues/512) +-for more information. +-You can follow [these instructions](https://github.com/bazelbuild/rules_go/wiki/Editor-setup) +-to configure your `gopls` to work with Bazel. +- +-### Troubleshooting +- +-If you are having issues with `gopls`, please follow the steps described in the +-[troubleshooting guide](troubleshooting.md). +- +-## Additional information +- +-* [Command-line interface](command-line.md) +-* [Advanced topics](advanced.md) +-* [Open issues](https://github.com/golang/go/issues?q=is%3Aissue+is%3Aopen+label%3Agopls) +-* [Contributing to `gopls`](contributing.md) +diff -urN a/gopls/doc/inlayHints.md b/gopls/doc/inlayHints.md +--- a/gopls/doc/inlayHints.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/inlayHints.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,112 +0,0 @@ +---- +-title: "Gopls: Inlay hints" +---- +- +-Inlay hints are helpful annotations that the editor can optionally +-display in-line in the source code, such as the names of parameters in +-a function call. This document describes the inlay hints available +-from `gopls`. +- +- +- +-## **assignVariableTypes** +- +-`"assignVariableTypes"` controls inlay hints for variable types in assign statements: +-```go +- i/* int*/, j/* int*/ := 0, len(r)-1 +-``` +- +- +-**Disabled by default. Enable it by setting `"hints": {"assignVariableTypes": true}`.** +- +-## **compositeLiteralFields** +- +-`"compositeLiteralFields"` inlay hints for composite literal field names: +-```go +- {/*in: */"Hello, world", /*want: */"dlrow ,olleH"} +-``` +- +- +-**Disabled by default. Enable it by setting `"hints": {"compositeLiteralFields": true}`.** +- +-## **compositeLiteralTypes** +- +-`"compositeLiteralTypes"` controls inlay hints for composite literal types: +-```go +- for _, c := range []struct { +- in, want string +- }{ +- /*struct{ in string; want string }*/{"Hello, world", "dlrow ,olleH"}, +- } +-``` +- +- +-**Disabled by default. Enable it by setting `"hints": {"compositeLiteralTypes": true}`.** +- +-## **constantValues** +- +-`"constantValues"` controls inlay hints for constant values: +-```go +- const ( +- KindNone Kind = iota/* = 0*/ +- KindPrint/* = 1*/ +- KindPrintf/* = 2*/ +- KindErrorf/* = 3*/ +- ) +-``` +- +- +-**Disabled by default. Enable it by setting `"hints": {"constantValues": true}`.** +- +-## **functionTypeParameters** +- +-`"functionTypeParameters"` inlay hints for implicit type parameters on generic functions: +-```go +- myFoo/*[int, string]*/(1, "hello") +-``` +- +- +-**Disabled by default. Enable it by setting `"hints": {"functionTypeParameters": true}`.** +- +-## **ignoredError** +- +-`"ignoredError"` inlay hints for implicitly discarded errors: +-```go +- f.Close() // ignore error +-``` +-This check inserts an `// ignore error` hint following any +-statement that is a function call whose error result is +-implicitly ignored. +- +-To suppress the hint, write an actual comment containing +-"ignore error" following the call statement, or explictly +-assign the result to a blank variable. A handful of common +-functions such as `fmt.Println` are excluded from the +-check. +- +- +-**Disabled by default. Enable it by setting `"hints": {"ignoredError": true}`.** +- +-## **parameterNames** +- +-`"parameterNames"` controls inlay hints for parameter names: +-```go +- parseInt(/* str: */ "123", /* radix: */ 8) +-``` +- +- +-**Disabled by default. Enable it by setting `"hints": {"parameterNames": true}`.** +- +-## **rangeVariableTypes** +- +-`"rangeVariableTypes"` controls inlay hints for variable types in range statements: +-```go +- for k/* int*/, v/* string*/ := range []string{} { +- fmt.Println(k, v) +- } +-``` +- +- +-**Disabled by default. Enable it by setting `"hints": {"rangeVariableTypes": true}`.** +- +- +diff -urN a/gopls/doc/README.md b/gopls/doc/README.md +--- a/gopls/doc/README.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/README.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1 +0,0 @@ +-See [index.md](index.md). +diff -urN a/gopls/doc/release/README b/gopls/doc/release/README +--- a/gopls/doc/release/README 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/release/README 1969-12-31 18:00:00.000000000 -0600 +@@ -1,10 +0,0 @@ +-This directory contains the draft release notes for each upcoming release. +- +-Be sure to update the file for the forthcoming release in the same CL +-that you add new features or fix noteworthy bugs. +- +-See https://github.com/golang/tools/releases for all past releases. +- +-Tip: when reviewing edits to markdown files in Gerrit, to see the +-rendered form, click the "Open in Code Search" link (magnifying glass +-in blue square) then click "View in > gitiles" (shortcut: `v g`). +diff -urN a/gopls/doc/release/v0.16.0.md b/gopls/doc/release/v0.16.0.md +--- a/gopls/doc/release/v0.16.0.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/release/v0.16.0.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,288 +0,0 @@ +---- +-title: "Gopls release v0.16.0" +---- +- +-``` +-go install golang.org/x/tools/gopls@v0.16.2 +-``` +- +-This release includes several features and bug fixes, and is the first +-version of gopls to support Go 1.23. To install it, run: +- +-## New support policy; end of support for Go 1.19 and Go 1.20 +- +-**TL;DR: We are narrowing gopls' support window, but this is unlikely to +-affect you as long as you use at least Go 1.21 to build gopls. This doesn't +-affect gopls' support for the code you are writing.** +- +-This is the last release of gopls that may be built with Go 1.19 or Go 1.20, +-and also the last to support integrating with go command versions 1.19 and +-1.20. If built or used with either of these Go versions, it will display +-a message advising the user to upgrade. +- +-When using gopls, there are three versions to be aware of: +- +-1. The _gopls build go version_: the version of Go used to build gopls. +-2. The _go command version_: the version of the go list command executed by +- gopls to load information about your workspace. +-3. The _language version_: the version in the go directive of the current +- file's enclosing go.mod file, which determines the file's Go language +- semantics. +- +-This gopls release, v0.16.0, is the final release to support Go 1.19 and Go +-1.20 as the _gopls build go version_ or _go command version_. There is no +-change to gopls' support for all _language versions_--in fact this support has +-somewhat improved with the addition of the `stdversion` analyzer (see below). +- +-Starting with gopls@v0.17.0, which will be released after Go 1.23.0 is released +-in August, gopls will only support the latest version of Go as the +-_gopls build go version_. +-However, thanks to the [forward compatibility](https://go.dev/blog/toolchain) +-added to Go 1.21, any necessary toolchain upgrade should be handled +-automatically for users of Go 1.21 or later, just like any other dependency. +-Additionally, we are reducing our _go command version_ support window from +-4 versions to 3. Note that this means if you have at least Go 1.21 installed on +-your system, you should still be able to `go install` and use gopls@v0.17.0. +- +-We have no plans to ever change our _language version_ support: we expect that +-gopls will always support developing programs that target _any_ Go version. +- +-By focusing on building gopls with the latest Go version, we can significantly +-reduce our maintenance burden and help improve the stability of future gopls +-releases. See the newly updated +-[support policy](https://github.com/golang/tools/tree/master/gopls#support-policy) +-for details. Please comment on golang/go#65917 if +-you have concerns about this change. +- +-## Configuration changes +- +-- The experimental `allowImplicitNetworkAccess` setting is deprecated (but not +- yet removed). Please comment on golang/go#66861 if you use this +- setting and would be impacted by its removal. +- +-## New features +- +-### Go 1.23 support +- +-This version of gopls is the first to support the new language features of Go 1.23, +-including +-[range-over-func](https://go.dev/wiki/RangefuncExperiment) iterators +-and support for the +-[`godebug` directive](https://go.dev/ref/mod#go-mod-file-godebug) +-in go.mod files. +- +-### Integrated documentation viewer +- +-Gopls now offers a "Browse documentation" code action that opens a +-local web page displaying the generated documentation for Go packages +-and symbols in a form similar to https://pkg.go.dev. +-The package or symbol is chosen based on the current selection. +- +-Use this feature to preview the marked-up documentation as you prepare API +-changes, or to read the documentation for locally edited packages, +-even ones that have not yet been saved. Reload the page after an edit +-to see updated documentation. +- +- +- +-As in `pkg.go.dev`, the heading for each symbol contains a link to the +-source code of its declaration. In `pkg.go.dev`, these links would refer +-to a source code page on a site such as GitHub or Google Code Search. +-However, in gopls' internal viewer, clicking on one of these links will +-cause your editor to navigate to the declaration. +-(This feature requires that your LSP client honors the `showDocument` downcall.) +- +- +- +-Editor support: +- +-- VS Code: use the "Source action > Browse documentation for func fmt.Println" menu item. +- Note: source links navigate the editor but don't yet raise the window yet. +- Please upvote microsoft/vscode#208093 and microsoft/vscode#207634 (temporarily closed). +-- Emacs: requires eglot v1.17. Use `M-x go-browse-doc` from github.com/dominikh/go-mode.el. +- +-The `linksInHover` setting now supports a new value, `"gopls"`, +-that causes documentation links in the Markdown output +-of the Hover operation to link to gopls' internal doc viewer. +- +-### Browse free symbols +- +-Gopls offers another web-based code action, "Browse free symbols", +-which displays the free symbols referenced by the selected code. +- +-A symbol is "free" if it is referenced within the selection but +-declared outside of it. The free symbols that are variables are +-approximately the set of parameters that would be needed if the block +-were extracted into its own function. +- +-Even when you don't intend to extract a block into a new function, +-this information can help you to tell at a glance what names a block +-of code depends on. +- +-Each dotted path of identifiers (such as `file.Name.Pos`) is reported +-as a separate item, so that you can see which parts of a complex +-type are actually needed. +- +-The free symbols of the body of a function may reveal that +-only a small part (a single field of a struct, say) of one of the +-function's parameters is used, allowing you to simplify and generalize +-the function by choosing a different type for that parameter. +- +- +- +-Editor support: +- +-- VS Code: use the `Source action > Browse free symbols` menu item. +-- Emacs: requires eglot v1.17. Use `M-x go-browse-freesymbols` from github.com/dominikh/go-mode.el. +- +-### Browse assembly +- +-Gopls offers a third web-based code action, "Browse assembly for f", +-which displays an assembly listing of the declaration of the function +-f enclosing the selected code, plus any nested functions such as +-function literals or deferred calls. +- +-Gopls invokes the compiler to generate the report; +-reloading the page updates the report. +- +-The machine architecture is determined by the build +-configuration that gopls selects for the current file. +-This is usually the same as your machine's GOARCH unless you are +-working in a file with `go:build` tags for a different architecture. +- +- +- +-Gopls cannot yet display assembly for generic functions: +-generic functions are not fully compiled until they are instantiated, +-but any function declaration enclosing the selection cannot be an +-instantiated generic function. +- +- +- +-Editor support: +- +-- VS Code: use the "Source action > Browse assembly for f" menu item. +-- Emacs: requires eglot v1.17. Use `M-x go-browse-assembly` from github.com/dominikh/go-mode.el. +- +-### `unusedwrite` analyzer +- +-The new +-[unusedwrite](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedwrite) +-analyzer reports assignments, often to fields of structs, that have no +-effect because, for example, the struct is never used again: +- +-```go +-func scheme(host string) string { +- u := &url.URL{ +- Host: host, // "unused write to field Host" (no need to construct a URL) +- Scheme: "https:", +- } +- return u.Scheme +-} +-``` +- +-This is at best an indication that the code is unnecessarily complex +-(for instance, some dead code could be removed), but often indicates a +-bug, as in this example: +- +-```go +-type S struct { x int } +- +-func (s S) set(x int) { +- s.x = x // "unused write to field x" (s should be a *S pointer) +-} +-``` +- +-### `stdversion` analyzer +- +-The new +-[`stdversion`](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stdversion) +-analyzer warns about the use of too-new standard library symbols based on the +-version of the `go` directive in your `go.mod` file. This improves our support +-for older _language versions_ (see above), even when gopls is built with +-a recent Go version. +- +-Consider the go.mod file and Go file below. +-The declaration of `var `alias refers to a type, `types.Alias`, +-introduced in go1.22, but the file belongs to a module that requires +-only go1.21, so the analyzer reports a diagnostic: +- +-``` +-module example.com +-go 1.21 +-``` +- +-```go +-package p +- +-import "go/types" +- +-var alias types.Alias // types.Alias requires go1.22 or later (module is go1.21) +-``` +- +-When an individual file is build-tagged for a release of Go other than +-than module's version, the analyzer will apply appropriate checks for +-the file's version. +- +-### Two more vet analyzers +- +-The [framepointer](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/framepointer) +-and [sigchanyzer](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/sigchanyzer) +-analyzers have long been part of go vet's suite, +-but had been overlooked in previous versions of gopls. +- +-Henceforth, gopls will always include any analyzers run by vet. +- +-### Hover shows size/offset info, and struct tags +- +-Hovering over the identifier that declares a type or struct field now +-displays the size information for the type: +- +- +- +-and the offset information for the field: +- +- +- +-In addition, it reports the percentage of wasted space due to +-suboptimal ordering of struct fields, if this figure is 20% or higher: +- +- +- +-In the struct above, alignment rules require each of the two boolean +-fields (1 byte) to occupy a complete word (8 bytes), leading to (7 + 7) / (3 \* 8) = 58% waste. +-Placing the two booleans together would save a word. +- +-This information may be helpful when making space optimizations to +-your data structures, or when reading assembly code. +- +-Also, hovering over a reference to a field with a struct tag now also +-display the tag: +- +- +- +-### Hover and "Go to Definition" work on symbols in doc comments +- +-Go 1.19 added support for [doc links](https://go.dev/doc/comment#links), +-allowing the doc comment for one symbol to reference another. +- +-Gopls' Hover and Definition operations now treat these links just +-like identifiers, so hovering over one will display information about +-the symbol: +- +- +- +-Similarly, "Go to definition" will navigate to its declaration. +-Thanks to @rogeryk for contributing this feature. +- +-## Bugs fixed +- +-## Thank you to our contributors! +- +-@guodongli-google for the `unusedwrite` analyzer. +-TODO: they're a xoogler; is there a more current GH account? +- +-@rogeryk +diff -urN a/gopls/doc/release/v0.17.0.md b/gopls/doc/release/v0.17.0.md +--- a/gopls/doc/release/v0.17.0.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/release/v0.17.0.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,200 +0,0 @@ +---- +-title: "Gopls release v0.17.0" +---- +- +- +- +-``` +-go install golang.org/x/tools/gopls@v0.17.0-pre.4 +-``` +- +-## New support policies +- +-With this release, we are narrowing our official support window to align with +-the [Go support policy](https://go.dev/doc/devel/release#policy). This will +-reduce the considerable costs to us of testing against older Go versions, +-allowing us to spend more time fixing bugs and adding features that benefit the +-majority of gopls users who run recent versions of Go. +- +-This narrowing is occurring in two dimensions: **build compatibility** refers to +-the versions of the Go toolchain that can be used to build gopls, and **go +-command compatibility** refers to the versions of the `go` command that can be +-used by gopls to list information about packages and modules in your workspace. +- +-### Build compatibility: the most recent major Go version +- +-As described in the [v0.16.0 release +-notes](https://github.com/golang/tools/releases/tag/gopls%2Fv0.16.0), building the +-latest version of gopls will now require the latest major version of the Go +-toolchain. Therefore this release (gopls@v0.17.0) must be built with Go 1.23.0 +-or later. Thanks to [automatic toolchain +-upgrades](https://go.dev/blog/toolchain), if your system Go version is at least +-Go 1.21.0 and you have `GOTOOLCHAIN=auto` set (the default), the `go` command +-will automatically download the new Go toolchain as needed, similar to +-upgrading a module dependency. +- +-### Go command compatibility: the 2 most recent major Go versions +- +-The gopls@v0.17.x releases will be the final versions of gopls to nominally +-support integrating with more than the 2 most recent Go releases. In the past, +-we implied "best effort" support for up to 4 versions, though in practice we +-did not have resources to fix bugs that were present only with older Go +-versions. With gopls@v0.17.0, we narrowed this best effort support to 3 +-versions, primarily because users need at least Go 1.21 to benefit from +-automatic toolchain upgrades (see above). +- +-Starting with gopls@v0.18.0, we will officially support integrating with only +-the 2 most recent major versions of the `go` command. This is consistent with +-the Go support policy. See golang/go#69321 (or [this +-comment](https://github.com/golang/go/issues/69321#issuecomment-2344996677) +-specifically) for details. +- +-We won't prevent gopls from being used with older Go versions (just as we +-don't disallow integration with arbitrary +-[`go/packages`](https://pkg.go.dev/golang.org/x/tools/go/packages) drivers), +-but we won't run integration tests against older Go versions, and won't fix +-bugs that are only present when used with old Go versions. +- +-## Configuration Changes +- +-- The `fieldalignment` analyzer, previously disabled by default, has +- been removed: it is redundant with the hover size/offset information +- displayed by v0.16.0 and its diagnostics were confusing. +-- The `undeclaredname` analyzer has been replaced with an ordinary code action. +-- The kind (identifiers) of all of gopls' code actions have changed +- to use more specific hierarchical names. For example, "Inline call" +- has changed from `refactor.inline` to `refactor.inline.call`. +- This allows clients to request particular code actions more precisely. +- The user manual now includes the identifier in the documentation for each code action. +-- The experimental `allowImplicitNetworkAccess` setting is removed, following +- its deprecation in gopls@v0.16.0. See golang/go#66861 for details. +- +-## New features +- +-### Refactoring +- +-This release contains a number of new features related to refactoring. +-Additionally, it fixes [many +-bugs](https://github.com/golang/go/issues?q=is%3Aissue+milestone%3Agopls%2Fv0.17.0+label%3ARefactoring+is%3Aclosed) +-in existing refactoring operations, primarily related to **extract**, and **inline**. +- +-These improvements move us toward a longer term goal of offering a more robust +-and complete set of refactoring tools. We still have [much to +-do](https://github.com/golang/go/issues?q=is%3Aissue+label%3Agopls+label%3ARefactoring+is%3Aopen+), +-and this effort will continue into 2025. +- +-#### Move parameter refactorings +- +-Gopls now offers code actions to move function and method parameters left or +-right in the function signature, updating all callers. +- +-Unfortunately, there is no native LSP operation that provides a good user +-interface for arbitrary "change signature" refactoring. We plan to build such +-an interface within VS Code. In the short term, we have made it possible to +-express more complicated parameter transformations by invoking 'rename' on the +-'func' keyword. This user interface is a temporary stop-gap until a better +-mechanism is available for LSP commands that enable client-side dialogs. +- +-#### Extract declarations to new file +- +-Gopls now offers another code action, +-"Extract declarations to new file" (`refactor.extract.toNewFile`), +-which moves selected code sections to a newly created file within the +-same package. The created filename is chosen as the first {function, type, +-const, var} name encountered. In addition, import declarations are added or +-removed as needed. +- +-The user can invoke this code action by selecting a function name, the keywords +-`func`, `const`, `var`, `type`, or by placing the caret on them without selecting, +-or by selecting a whole declaration or multiple declarations. +- +-In order to avoid ambiguity and surprise about what to extract, some kinds +-of partial selection of a declaration cannot invoke this code action. +- +-#### Extract constant +- +-When the selection is a constant expression, gopls now offers "Extract +-constant" instead of "Extract variable", and generates a `const` +-declaration instead of a local variable. +- +-Also, extraction of a constant or variable now works at top-level, +-outside of any function. +- +-#### Generate missing method from function call +- +-When you attempt to call a method on a type that lacks that method, the +-compiler will report an error like “type T has no field or method f”. Gopls now +-offers a new code action, “Declare missing method of T.f”, where T is the +-concrete type and f is the undefined method. The stub method's signature is +-inferred from the context of the call. +- +-#### Generate a test for a function or method +- +-If the selected chunk of code is part of a function or method declaration F, +-gopls will offer the "Add test for F" code action, which adds a new test for the +-selected function in the corresponding `_test.go` file. The generated test takes +-into account its signature, including input parameters and results. +- +-Since this feature is implemented by the server (gopls), it is compatible with +-all LSP-compliant editors. VS Code users may continue to use the client-side +-`Go: Generate Unit Tests For file/function/package` command, which runs the +-[gotests](https://github.com/cweill/gotests) tool. +- +-### Initial support for pull diagnostics +- +-When initialized with the option `"pullDiagnostics": true`, gopls will advertise support for the +-`textDocument.diagnostic` +-[client capability](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_pullDiagnostics), +-which allows editors to request diagnostics directly from gopls using a +-`textDocument/diagnostic` request, rather than wait for a +-`textDocument/publishDiagnostics` notification. This feature is off by default +-until the feature set of pull diagnostics is comparable to push diagnostics. +- +-### Hover improvements +- +-The `textDocument/hover` response has slightly tweaked markdown rendering, and +-includes the following additional information: +- +-- Hovering over a standard library symbol now displays information about the +- first Go release containing the symbol. For example, hovering over +- `errors.As` shows "Added in go1.13". +-- Hovering over the package name in a package declaration includes additional +- package metadata. +- +-### Semantic token modifiers of top-level constructor of types +- +-The semantic tokens response now includes additional modifiers for the top-level +-constructor of the type of each symbol: +-`interface`, `struct`, `signature`, `pointer`, `array`, `map`, `slice`, `chan`, `string`, `number`, `bool`, and `invalid`. +-Editors may use this for syntax coloring. +- +-### SignatureHelp for ident and values. +- +-Now, function signature help can be used on any identifier with a function +-signature, not just within the parentheses of a function being called. +- +-### Jump to assembly definition +- +-A Definition query on a reference to a function jumps to the +-function's Go `func` declaration. If the function is implemented in C +-or assembly, the function has no body. Executing a second Definition +-query (while already at the Go declaration) will navigate you to the +-assembly implementation. +- +-### `yield` analyzer +- +-The new `yield` analyzer detects mistakes using the `yield` function +-in a Go 1.23 iterator, such as failure to check its boolean result and +-break out of a loop. +- +-### `waitgroup` analyzer +- +-The new `waitgroup` analyzer detects calls to the `Add` method of +-`sync.WaitGroup` that are (mistakenly) made within the new goroutine, +-causing `Add` to race with `Wait`. +-(This check is equivalent to +-[staticcheck's SA2000](https://staticcheck.dev/docs/checks#SA2000), +-but is enabled by default.) +diff -urN a/gopls/doc/release/v0.18.0.md b/gopls/doc/release/v0.18.0.md +--- a/gopls/doc/release/v0.18.0.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/release/v0.18.0.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,184 +0,0 @@ +---- +-title: "Gopls release v0.18.0" +---- +- +-## Configuration Changes +- +- +- +-- The experimental `Structured` value for the `hoverKind` option is no longer +- supported. +- +-- The `gc_details` code lens has been deleted. (It was previously disabled by +- default.) This functionality is now available through the +- `toggleCompilerOptDetails` code action (documented below), as code +- actions are better supported than code lenses across a range of clients. +- +- VS Code's special "Go: Toggle GC details" command continues to work. +- +-- The experimental `semanticTokenTypes` and `semanticTokenModifiers` options +- allow selectively disabling certain types of tokens or token modifiers in +- `textDocument/semanticTokens` responses. +- +- These options supersede the `noSemanticString` and `noSemanticTokenNumber` +- options, which are now deprecated. Users can instead set +- `"semanticTokenTypes": {"string": false, "number": false}` to achieve the +- same result. For now, gopls still honors `noSemanticTokenString` and +- `noSemanticToken`, but will stop supporting them in a future release. +- +-- The new `workspaceFiles` option allows configuring glob patterns matching +- files that define the logical build of the workspace. This option is only +- needed in environments that use a custom golang.org/x/tools/go/packages +- driver. +- +-## New features +- +-### "{Show,Hide} compiler optimization details" code action +- +-This code action, accessible through the "Source Action" menu in VS +-Code, toggles a per-directory flag that causes Go compiler optimization +-details to be reported as diagnostics. For example, it indicates which +-variables escape to the heap, and which array accesses require bounds +-checks. +- +-TODO: add links to the complete manual for each item. +- +-### New `modernize` analyzer +- +-Gopls now reports when code could be simplified or clarified by +-using more modern features of Go, and provides a quick fix to apply +-the change. +- +-For example, a conditional assignment using an if/else statement may +-be replaced by a call to the `min` or `max` built-in functions added +-in Go 1.18. +- +-Use this command to apply modernization fixes en masse: +-``` +-$ go run golang.org/x/tools/go/analysis/passes/modernize/cmd/modernize@latest -fix ./... +-``` +- +-### New `unusedfunc` analyzer +- +-Gopls now reports unused functions and methods, giving you near +-real-time feedback about dead code that may be safely deleted. +-Because the analysis is local to each package, only unexported +-functions and methods are candidates. +-(For a more precise analysis that may report unused exported +-functions too, use the `golang.org/x/tools/cmd/deadcode` command.) +- +-### New `hostport` analyzer +- +-With the growing use of IPv6, forming a "host:port" string using +-`fmt.Sprintf("%s:%d")` is no longer appropriate because host names may +-contain colons. Gopls now reports places where a string constructed in +-this fashion (or with `%s` for the port) is passed to `net.Dial` or a +-related function, and offers a fix to use `net.JoinHostPort` +-instead. +- +-### Other analyzer changes +- +-- The `unusedvariable` quickfix is now on by default. +-- The `unusedparams` analyzer no longer reports finding for generated files. +- +-### New `gofix` analyzer +- +-Gopls now reports when a function call or a use of a constant should be inlined. +-These diagnostics and the associated code actions are triggered by "//go:fix inline" +-directives at the function and constant definitions. +-(See [the go:fix proposal](https://go.dev/issue/32816).) +- +-For example, consider a package `intmath` with a function `Square(int) int`. +-Later the more general `Pow(int, int) int` is introduced, and `Square` is deprecated +-in favor of calling `Pow` with a second argument of 2. The author of `intmath` +-can write this: +-``` +-//go:fix inline +-func Square(x int) int { return Pow(x, 2) } +-``` +-If gopls sees a call to `intmath.Square` in your code, it will suggest inlining +-it, and will offer a code action to do so. +- +-The same feature works for constants. +-With a constant definition like this: +-``` +-//go:fix inline +-const Ptr = Pointer +-``` +-gopls will suggest replacing `Ptr` in your code with `Pointer`. +- +-Use this command to apply such fixes en masse: +- +-``` +-$ go run golang.org/x/tools/go/analysis/passes/inline/cmd/inline@latest -fix ./... +-``` +- +-### "Implementations" supports generics +- +-At long last, the "Go to Implementations" feature now fully supports +-generic types and functions (#59224). +- +-For example, invoking the feature on the interface method `Stack.Push` +-below will report the concrete method `C[T].Push`, and vice versa. +- +-```go +-package p +- +-type Stack[T any] interface { +- Push(T) error +- Pop() (T, bool) +-} +- +-type C[T any] struct{} +- +-func (C[T]) Push(t T) error { ... } +-func (C[T]) Pop() (T, bool) { ... } +- +-var _ Stack[int] = C[int]{} +-``` +- +-### Extract all occurrences of the same expression under selection +- +-When you have multiple instances of the same expression in a function, +-you can use this code action to extract it into a variable. +-All occurrences of the expression will be replaced with a reference to the new variable. +- +-### Improvements to "Definition" +- +-The Definition query now supports additional locations: +- +-- When invoked on a return statement, it reports the location +- of the function's result variables. +-- When invoked on a break, goto, or continue statement, it reports +- the location of the label, the closing brace of the relevant +- block statement, or the start of the relevant loop, respectively. +- +-### Improvements to "Hover" +- +-When invoked on a return statement, hover reports the types of +-the function's result variables. +- +-### UX improvements to format strings +- +-#### "DocumentHighlight" +- +-When your cursor is inside a printf-like function, gopls now highlights the relationship between +-formatting verbs and arguments as visual cues to differentiate how operands are used in the format string. +- +-```go +-fmt.Printf("Hello %s, you scored %d", name, score) +-``` +- +-If the cursor is either on `%s` or `name`, gopls will highlight `%s` as a write operation, +-and `name` as a read operation. +- +-#### "SemanticHighlight" +- +-Similar to the improvements to DocumentHighlight, gopls also reports formatting verbs +-as "format" modifier for token type "string" to better distinguish them with other parts of the format string. +- +-```go +-fmt.Printf("Hello %s, you scored %d", name, score) +-``` +- +-`%s` and `%d` will have token type "string" and modifier "format". +diff -urN a/gopls/doc/release/v0.19.0.md b/gopls/doc/release/v0.19.0.md +--- a/gopls/doc/release/v0.19.0.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/release/v0.19.0.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,249 +0,0 @@ +---- +-title: "Gopls release v0.19.0" +---- +- +-## Configuration Changes +- +-- The `gopls check` subcommand now accepts a `-severity` flag to set a minimum +- severity for the diagnostics it reports. By default, the minimum severity +- is "warning", so `gopls check` may report fewer diagnostics than before. Set +- `-severity=hint` to reproduce the previous behavior. +- +-## Navigation features +- +-### "Implementations" supports signature types (within same package) +- +- +- +-The Implementations query reports the correspondence between abstract +-and concrete types and their methods based on their method sets. +-Now, it also reports the correspondence between function types, +-dynamic function calls, and function definitions, based on their signatures. +- +-To use it, invoke an Implementations query on the `func` token of the +-definition of a named function, named method, or function literal. +-Gopls reports the set of function signature types that abstract this +-function, and the set of dynamic calls through values of such types. +- +-Conversely, an Implementations query on the `func` token of a +-signature type, or on the `(` paren of a dynamic function call, +-reports the set of concrete functions that the signature abstracts +-or that the call dispatches to. +- +-Since a type may be both a function type and a named type with methods +-(for example, `http.HandlerFunc`), it may participate in both kinds of +-Implements queries (method-sets and function signatures). +-Queries using method-sets should be invoked on the type or method name, +-and queries using signatures should be invoked on a `func` or `(` token. +- +-Only the local (same-package) algorithm is currently supported. +-(https://go.dev/issue/56572 tracks the global algorithm.) +- +-### "Go to Implementation" reports interface-to-interface relations +- +- +- +-The "Go to Implementation" operation now reports relationships between +-interfaces. Gopls now uses the concreteness of the query type to +-determine whether a query is "downwards" (from an interface to the +-types that implement it) or "upwards" (from a concrete type to the +-interfaces to which it may be assigned). So, for example: +- +-- `implementation(io.Reader)` subinterfaces such as `io.ReadCloser`, +- and concrete implementations such as `*os.File`. +- +-- `implementation(os.File)` includes only interfaces, such as +- `io.Reader` and `io.ReadCloser`. +- +-To request an "upwards" query starting from an interface, for example +-to find the superinterfaces of `io.ReadCloser`, use the Type Hierarchy +-feature described below. +-(See https://github.com/microsoft/language-server-protocol/issues/2037.) +- +-### Support for Type Hierarchy +- +- +- +-Gopls now implements the three LSP methods related to the Type +-Hierarchy viewer: `textDocument/prepareTypeHierarchy`, +-`typeHierarchy/supertypes`, `typeHierarchy/subtypes`. +- +-In VS Code, select "Show Type Hierarchy" from the context menu +-to see a tree widget displaying all the supertypes or subtypes +-of the selected named type. +- +- +- +- +- +- +-## Editing features +- +-### Completion: auto-complete package clause for new Go files +- +-Gopls now automatically adds the appropriate `package` clause to newly created Go files, +-so that you can immediately get started writing the interesting part. +- +-It requires client support for `workspace/didCreateFiles` +- +-### New GOMODCACHE index for faster Organize Imports and unimported completions +- +-By default, gopls now builds and maintains a persistent index of +-packages in the module cache (GOMODCACHE). The operations of Organize +-Imports and completion of symbols from unimported pacakges are an +-order of magnitude faster. +- +-To revert to the old behavior, set the `importsSource` option (whose +-new default is `"gopls"`) to `"goimports"`. Users who don't want the +-module cache used at all for imports or completions can change the +-option to "off". +- +-## Analysis features +- +-### Most `staticcheck` analyzers are enabled by default +- +-Slightly more than half of the analyzers in the +-[Staticcheck](https://staticcheck.dev/docs/checks) suite are now +-enabled by default. This subset has been chosen for precision and +-efficiency. +- +-Previously, Staticcheck analyzers (all of them) would be run only if +-the experimental `staticcheck` boolean option was set to `true`. This +-value continues to enable the complete set, and a value of `false` +-continues to disable the complete set. Leaving the option unspecified +-enables the preferred subset of analyzers. +- +-Staticcheck analyzers, like all other analyzers, can be explicitly +-enabled or disabled using the `analyzers` configuration setting; this +-setting now takes precedence over the `staticcheck` setting, so, +-regardless of what value of `staticcheck` you use (true/false/unset), +-you can make adjustments to your preferred set of analyzers. +- +-### `recursiveiter`: "inefficient recursive iterator" +- +-A common pitfall when writing a function that returns an iterator +-(`iter.Seq`) for a recursive data type is to recursively call the +-function from its own implementation, leading to a stack of nested +-coroutines, which is inefficient. +- +-The new `recursiveiter` analyzer detects such mistakes; see +-[its documentation](https://golang.org/x/tools/gopls/internal/analysis/recursiveiter) +-for details, including tips on how to define simple and efficient +-recursive iterators. +- +-### `maprange`: "inefficient range over maps.Keys/Values" +- +-The new `maprange` analyzer detects redundant calls to `maps.Keys` or +-`maps.Values` as the operand of a range loop; maps can of course be +-ranged over directly. See +-[its documentation](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/maprange) +-for details). +- +-## Code transformation features +- +-### Rename method receivers +- +- +- +-The Rename operation, when applied to the declaration of a method +-receiver, now also attempts to rename the receivers of all other +-methods associated with the same named type. Each other receiver that +-cannot be fully renamed is quietly skipped. +- +-Renaming a _use_ of a method receiver continues to affect only that +-variable. +- +-```go +-type Counter struct { x int } +- +- Rename here to affect only this method +- ↓ +-func (c *Counter) Inc() { c.x++ } +-func (c *Counter) Dec() { c.x++ } +- ↑ +- Rename here to affect all methods +-``` +- +-### "Eliminate dot import" code action +- +- +- +-This code action, available on a dotted import, will offer to replace +-the import with a regular one and qualify each use of the package +-with its name. +- +-### Add/remove tags from struct fields +- +-Gopls now provides two new code actions, available on an entire struct +-or some of its fields, that allow you to add and remove struct tags. +-It adds only 'json' tags with a snakecase naming format, or clears all +-tags within the selection. +- +-Add tags example: +-```go +-type Info struct { +- LinkTarget string -> LinkTarget string `json:"link_target"` +- ... +-} +-``` +- +-### Inline local variable +- +- +- +-The new `refactor.inline.variable` code action replaces a reference to +-a local variable by that variable's initializer expression. For +-example, when applied to `s` in `println(s)`: +- +-```go +-func f(x int) { +- s := fmt.Sprintf("+%d", x) +- println(s) +-} +-``` +-it transforms the code to: +-```go +-func f(x int) { +- s := fmt.Sprintf("+%d", x) +- println(fmt.Sprintf("+%d", x)) +-} +-``` +- +-Only a single reference is replaced; issue https://go.dev/issue/70085 +-tracks the feature to "inline all" uses of the variable and eliminate +-it. +- +- +-## Thank you to our contributors! +- +-[@acehinnnqru](https://github.com/acehinnnqru) +-[@adonovan](https://github.com/adonovan) +-[@albfan](https://github.com/albfan) +-[@aarzilli](https://github.com/aarzilli) +-[@ashurbekovz](https://github.com/ashurbekovz) +-[@cuonglm](https://github.com/cuonglm) +-[@dmitshur](https://github.com/dmitshur) +-[@neild](https://github.com/neild) +-[@egonelbre](https://github.com/egonelbre) +-[@shashank](https://github.com/shashank) +-[priyadarshi](https://github.compriyadarshi) +-[@firelizzard18](https://github.com/firelizzard18) +-[@gopherbot](https://github.com/gopherbot) +-[@h9jiang](https://github.com/h9jiang) +-[@cuishuang](https://github.com/cuishuang) +-[@jakebailey](https://github.com/jakebailey) +-[@jba](https://github.com/jba) +-[@madelinekalil](https://github.com/madelinekalil) +-[@karamaru](https://github.com/karamaru) +-[alpha](https://github.comalpha) +-[@danztran](https://github.com/danztran) +-[@nsrip](https://github.com/nsrip) +-[dd](https://github.comdd) +-[@pjweinb](https://github.com/pjweinb) +-[@findleyr](https://github.com/findleyr) +-[@samthanawalla](https://github.com/samthanawalla) +-[@seankhliao](https://github.com/seankhliao) +-[@tklauser](https://github.com/tklauser) +-[@vikblom](https://github.com/vikblom) +-[@kwjw](https://github.com/kwjw) +-[@xieyuschen](https://github.com/xieyuschen) +diff -urN a/gopls/doc/release/v0.20.0.md b/gopls/doc/release/v0.20.0.md +--- a/gopls/doc/release/v0.20.0.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/release/v0.20.0.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,99 +0,0 @@ +---- +-title: "Gopls release v0.20.0" +---- +- +-This release contains a new experimental Model Context Protocol (MCP) +-server for gopls, which may be used to integrate a subset of gopls' +-features in AI-assisted environments. +- +-Gopls' documentation is now available on the Go project's website at +-https://go.dev/gopls. (This link reflects the latest gopls release; +-use https://tip.golang.org/gopls to see docs for the latest commit.) +-Unlike Markdown files in GitHub, these pages are crawled by Google's +-web search index. +- +-## Configuration changes +- +-This release enables by default the new persistent index of packages +-in the module cache. This was first attempted in [v0.19](./v0.19.0.md) but reverted +-due to problems that have since been fixed. +- +-## Web-based features +- +-### "Split package" tool +- +-The `source.splitPackage` code action opens a web-based tool that +-helps you split a package into two or more components whose +-dependencies are acyclic. +- +-To use it, name a set of components, assign each declaration to a +-component, then visualize the dependencies among the components +-(including whether they form a cycle). +-Refresh the page each time you edit your code to see the latest +-information. +- +-

+- +-The tool makes it easy to iterate over potential decompositions +-until you find one you are happy with. A future version of +-the tool will automate the code transformation, but for now +-you must do that step by hand. +- +- +-## Editing features +- +-### Model Context Protocol server +- +-Gopls now includes an experimental built-in server for the Model Context +-Protocol (MCP), allowing it to expose a subset of its functionality to +-AI assistants in the form of MCP tools. +- +-See the [documentation](../features/mcp.md) for more information. +- +-**Caveats:** This is a brand new mode of operation for gopls, and so we're +-still experimenting with the best set of tools and instructions to provide. +-Please let us know how well it works for you. Also, please be aware that +-allowing LLMs to execute operations in your workspace entails additional +-security considerations, as discussed in the documentation above. +- +-## Analysis features +- +-### `ignoredError` inlay hint +- +-The new `ignoredError` inlay hint helps catch mistakenly discarded +-errors. It inserts an `// ignore error` hint following any statement +-that is a function call whose error result is implicitly ignored. For +-example, this code: +- +-```go +-f.Close() +-``` +-will appear as: +-```go +-f.Close() // ignore error +-``` +- +-To suppress the hint, write an actual comment containing `ignore +-error` following the call statement, or explictly assign the result +-to a blank `_` variable. A handful of common functions such as +-`fmt.Println` are excluded from the check. +- +-Enable it using this configuration: `{"hints": {"ignoredError": true}}`. +- +-### `unusedfunc` reports unused `type`, `var`, and `const` declarations too +- +- +- +-The +-[unusedfunc](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedfunc/) +-analyzer now reports any unexported types, variables, and constants +-that are unreferenced within their declaring package. +-(The problem of unreferenced exported symbols can't be expressed in +-the analysis framework since it depends on the entire workspace.) +- +-## Code transformation features +- +- +-The Rename operation now allows you to rename an embedded field, such +-as T in `struct{ T }`, so long as the operation is requested at the +-field declaration (T). Both the field and its type will be renamed. +diff -urN a/gopls/doc/release/v0.21.0.md b/gopls/doc/release/v0.21.0.md +--- a/gopls/doc/release/v0.21.0.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/release/v0.21.0.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,76 +0,0 @@ +---- +-title: "Gopls release v0.21.0 (forthcoming)" +---- +- +-## Configuration changes +- +-- The new `newGoFileHeader` option allows toggling automatic insertion of the copyright comment +- and package declaration in a newly created Go file. +- +-## Web-based features +-## Editing features +-## Analysis features +- +-### `reflecttypefor` analyzer +- +- +- +-The new `reflecttypefor` modernizer simplifies calls to +-`reflect.TypeOf` to use `reflect.TypeFor` when the runtime type is +-known at compile time. For example, `reflect.TypeOf(uint32(0))` +-becomes `reflect.TypeFor[uint32]()`. +- +-### `newexpr` analyzer +- +- +- +-The `newexpr` modernizer finds declarations of and calls to functions +-of this form: +-```go +-func varOf(x int) *int { return &x } +- +-use(varOf(123)) +-``` +-so that they are transformed to: +-```go +-//go:fix inline +-func varOf(x int) *int { return new(x) } +- +-use(new(123)) +-``` +-(Such wrapper functions are widely used in serialization packages, +-for instance the proto.{Int64,String,Bool} helpers used with +-protobufs.) +- +-### `iterators` analyzer +- +- +- +-The `iterators` modernizer replaces loops of this form, +- +- for i := 0; i < x.Len(); i++ { +- use(x.At(i)) +- } +- +-or their "range x.Len()" equivalent, by +- +- for elem := range x.All() { +- use(x.At(i) +- } +- +-for various types in the standard library that now offer an +-iterator-based API. +- +-## Code transformation features +- +- +-The Rename operation now treats [Doc Links](https://tip.golang.org/doc/comment#doclinks) +-like identifiers, so you can initiate a renaming from a Doc Link. +- +- +diff -urN a/gopls/doc/settings.md b/gopls/doc/settings.md +--- a/gopls/doc/settings.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/settings.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,635 +0,0 @@ +---- +-title: "Gopls: Settings" +---- +- +-This document describes gopls' configuration settings. +- +-Gopls settings are defined by a JSON object whose valid fields are +-described below. These fields are gopls-specific, and generic LSP +-clients have no knowledge of them. +- +-Different clients present configuration settings in their user +-interfaces in a wide variety of ways. +-For example, some expect the user to edit the raw JSON object while +-others use a data structure in the editor's configuration language; +-still others (such as VS Code) have a graphical configuration system. +-Be sure to consult the documentation for how to express configuration +-settings in your client. +-Some clients also permit settings to be configured differently for +-each workspace folder. +- +-Any settings that are experimental or for debugging purposes are +-marked as such. +- +- +- +- +- +- +-* [Build](#build) +-* [Formatting](#formatting) +-* [UI](#ui) +- * [Completion](#completion) +- * [Diagnostic](#diagnostic) +- * [Documentation](#documentation) +- * [Inlayhint](#inlayhint) +- * [Navigation](#navigation) +- +- +-## Build +- +- +-### `buildFlags []string` +- +-buildFlags is the set of flags passed on to the build system when invoked. +-It is applied to queries like `go list`, which is used when discovering files. +-The most common use is to set `-tags`. +- +-Default: `[]`. +- +- +-### `env map[string]string` +- +-env adds environment variables to external commands run by `gopls`, most notably `go list`. +- +-Default: `{}`. +- +- +-### `directoryFilters []string` +- +-directoryFilters can be used to exclude unwanted directories from the +-workspace. By default, all directories are included. Filters are an +-operator, `+` to include and `-` to exclude, followed by a path prefix +-relative to the workspace folder. They are evaluated in order, and +-the last filter that applies to a path controls whether it is included. +-The path prefix can be empty, so an initial `-` excludes everything. +- +-DirectoryFilters also supports the `**` operator to match 0 or more directories. +- +-Examples: +- +-Exclude node_modules at current depth: `-node_modules` +- +-Exclude node_modules at any depth: `-**/node_modules` +- +-Include only project_a: `-` (exclude everything), `+project_a` +- +-Include only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules` +- +-Default: `["-**/node_modules"]`. +- +- +-### `templateExtensions []string` +- +-templateExtensions gives the extensions of file names that are treated +-as template files. (The extension +-is the part of the file name after the final dot.) +- +-Default: `[]`. +- +- +-### `memoryMode string` +- +-**This setting is experimental and may be deleted.** +- +-obsolete, no effect +- +-Default: `""`. +- +- +-### `expandWorkspaceToModule bool` +- +-**This setting is experimental and may be deleted.** +- +-expandWorkspaceToModule determines which packages are considered +-"workspace packages" when the workspace is using modules. +- +-Workspace packages affect the scope of workspace-wide operations. Notably, +-gopls diagnoses all packages considered to be part of the workspace after +-every keystroke, so by setting "ExpandWorkspaceToModule" to false, and +-opening a nested workspace directory, you can reduce the amount of work +-gopls has to do to keep your workspace up to date. +- +-Default: `true`. +- +- +-### `standaloneTags []string` +- +-standaloneTags specifies a set of build constraints that identify +-individual Go source files that make up the entire main package of an +-executable. +- +-A common example of standalone main files is the convention of using the +-directive `//go:build ignore` to denote files that are not intended to be +-included in any package, for example because they are invoked directly by +-the developer using `go run`. +- +-Gopls considers a file to be a standalone main file if and only if it has +-package name "main" and has a build directive of the exact form +-"//go:build tag" or "// +build tag", where tag is among the list of tags +-configured by this setting. Notably, if the build constraint is more +-complicated than a simple tag (such as the composite constraint +-`//go:build tag && go1.18`), the file is not considered to be a standalone +-main file. +- +-This setting is only supported when gopls is built with Go 1.16 or later. +- +-Default: `["ignore"]`. +- +- +-### `workspaceFiles []string` +- +-workspaceFiles configures the set of globs that match files defining the +-logical build of the current workspace. Any on-disk changes to any files +-matching a glob specified here will trigger a reload of the workspace. +- +-This setting need only be customized in environments with a custom +-GOPACKAGESDRIVER. +- +-Default: `[]`. +- +- +-## Formatting +- +- +-### `local string` +- +-local is the equivalent of the `goimports -local` flag, which puts +-imports beginning with this string after third-party packages. It should +-be the prefix of the import path whose imports should be grouped +-separately. +- +-It is used when tidying imports (during an LSP Organize +-Imports request) or when inserting new ones (for example, +-during completion); an LSP Formatting request merely sorts the +-existing imports. +- +-Default: `""`. +- +- +-### `gofumpt bool` +- +-gofumpt indicates if we should run gofumpt formatting. +- +-Default: `false`. +- +- +-## UI +- +- +-### `codelenses map[enum]bool` +- +-codelenses overrides the enabled/disabled state of each of gopls' +-sources of [Code Lenses](codelenses.md). +- +-Example Usage: +- +-```json5 +-"gopls": { +-... +- "codelenses": { +- "generate": false, // Don't show the `go generate` lens. +- } +-... +-} +-``` +- +-Default: `{"generate":true,"regenerate_cgo":true,"run_govulncheck":true,"tidy":true,"upgrade_dependency":true,"vendor":true}`. +- +- +-### `semanticTokens bool` +- +-**This setting is experimental and may be deleted.** +- +-semanticTokens controls whether the LSP server will send +-semantic tokens to the client. +- +-Default: `false`. +- +- +-### `noSemanticString bool` +- +-**This setting is experimental and may be deleted.** +- +-noSemanticString turns off the sending of the semantic token 'string' +- +-Deprecated: Use SemanticTokenTypes["string"] = false instead. See +-golang/vscode-go#3632 +- +-Default: `false`. +- +- +-### `noSemanticNumber bool` +- +-**This setting is experimental and may be deleted.** +- +-noSemanticNumber turns off the sending of the semantic token 'number' +- +-Deprecated: Use SemanticTokenTypes["number"] = false instead. See +-golang/vscode-go#3632. +- +-Default: `false`. +- +- +-### `semanticTokenTypes map[string]bool` +- +-**This setting is experimental and may be deleted.** +- +-semanticTokenTypes configures the semantic token types. It allows +-disabling types by setting each value to false. +-By default, all types are enabled. +- +-Default: `{}`. +- +- +-### `semanticTokenModifiers map[string]bool` +- +-**This setting is experimental and may be deleted.** +- +-semanticTokenModifiers configures the semantic token modifiers. It allows +-disabling modifiers by setting each value to false. +-By default, all modifiers are enabled. +- +-Default: `{}`. +- +- +-### `newGoFileHeader bool` +- +-newGoFileHeader enables automatic insertion of the copyright comment +-and package declaration in a newly created Go file. +- +-Default: `true`. +- +- +-### `packageMove bool` +- +-**This setting is experimental and may be deleted.** +- +-packageMove enables PrepareRename to send the full package path +-and allows users to move a package via renaming. +- +-Default: `false`. +- +- +-## Completion +- +- +-### `usePlaceholders bool` +- +-placeholders enables placeholders for function parameters or struct +-fields in completion responses. +- +-Default: `false`. +- +- +-### `completionBudget time.Duration` +- +-**This setting is for debugging purposes only.** +- +-completionBudget is the soft latency goal for completion requests. Most +-requests finish in a couple milliseconds, but in some cases deep +-completions can take much longer. As we use up our budget we +-dynamically reduce the search scope to ensure we return timely +-results. Zero means unlimited. +- +-Default: `"100ms"`. +- +- +-### `matcher enum` +- +-**This is an advanced setting and should not be configured by most `gopls` users.** +- +-matcher sets the algorithm that is used when calculating completion +-candidates. +- +-Must be one of: +- +-* `"CaseInsensitive"` +-* `"CaseSensitive"` +-* `"Fuzzy"` +- +-Default: `"Fuzzy"`. +- +- +-### `experimentalPostfixCompletions bool` +- +-**This setting is experimental and may be deleted.** +- +-experimentalPostfixCompletions enables artificial method snippets +-such as "someSlice.sort!". +- +-Default: `true`. +- +- +-### `completeFunctionCalls bool` +- +-completeFunctionCalls enables function call completion. +- +-When completing a statement, or when a function return type matches the +-expected of the expression being completed, completion may suggest call +-expressions (i.e. may include parentheses). +- +-Default: `true`. +- +- +-## Diagnostic +- +- +-### `analyses map[string]bool` +- +-analyses specify analyses that the user would like to enable or disable. +-A map of the names of analysis passes that should be enabled/disabled. +-A full list of analyzers that gopls uses can be found in +-[analyzers.md](https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md). +- +-Example Usage: +- +-```json5 +-... +-"analyses": { +- "unreachable": false, // Disable the unreachable analyzer. +- "unusedvariable": true // Enable the unusedvariable analyzer. +-} +-... +-``` +- +-Default: `{}`. +- +- +-### `staticcheck bool` +- +-**This setting is experimental and may be deleted.** +- +-staticcheck configures the default set of analyses staticcheck.io. +-These analyses are documented on +-[Staticcheck's website](https://staticcheck.io/docs/checks/). +- +-The "staticcheck" option has three values: +-- false: disable all staticcheck analyzers +-- true: enable all staticcheck analyzers +-- unset: enable a subset of staticcheck analyzers +- selected by gopls maintainers for runtime efficiency +- and analytic precision. +- +-Regardless of this setting, individual analyzers can be +-selectively enabled or disabled using the `analyses` setting. +- +-Default: `false`. +- +- +-### `staticcheckProvided bool` +- +-**This setting is experimental and may be deleted.** +- +- +-Default: `false`. +- +- +-### `annotations map[enum]bool` +- +-annotations specifies the various kinds of compiler +-optimization details that should be reported as diagnostics +-when enabled for a package by the "Toggle compiler +-optimization details" (`gopls.gc_details`) command. +- +-(Some users care only about one kind of annotation in their +-profiling efforts. More importantly, in large packages, the +-number of annotations can sometimes overwhelm the user +-interface and exceed the per-file diagnostic limit.) +- +-TODO(adonovan): rename this field to CompilerOptDetail. +- +-Each enum must be one of: +- +-* `"bounds"` controls bounds checking diagnostics. +-* `"escape"` controls diagnostics about escape choices. +-* `"inline"` controls diagnostics about inlining choices. +-* `"nil"` controls nil checks. +- +-Default: `{"bounds":true,"escape":true,"inline":true,"nil":true}`. +- +- +-### `vulncheck enum` +- +-**This setting is experimental and may be deleted.** +- +-vulncheck enables vulnerability scanning. +- +-Must be one of: +- +-* `"Imports"`: In Imports mode, `gopls` will report vulnerabilities that affect packages +-directly and indirectly used by the analyzed main module. +-* `"Off"`: Disable vulnerability analysis. +- +-Default: `"Off"`. +- +- +-### `diagnosticsDelay time.Duration` +- +-**This is an advanced setting and should not be configured by most `gopls` users.** +- +-diagnosticsDelay controls the amount of time that gopls waits +-after the most recent file modification before computing deep diagnostics. +-Simple diagnostics (parsing and type-checking) are always run immediately +-on recently modified packages. +- +-This option must be set to a valid duration string, for example `"250ms"`. +- +-Default: `"1s"`. +- +- +-### `diagnosticsTrigger enum` +- +-**This setting is experimental and may be deleted.** +- +-diagnosticsTrigger controls when to run diagnostics. +- +-Must be one of: +- +-* `"Edit"`: Trigger diagnostics on file edit and save. (default) +-* `"Save"`: Trigger diagnostics only on file save. Events like initial workspace load +-or configuration change will still trigger diagnostics. +- +-Default: `"Edit"`. +- +- +-### `analysisProgressReporting bool` +- +-analysisProgressReporting controls whether gopls sends progress +-notifications when construction of its index of analysis facts is taking a +-long time. Cancelling these notifications will cancel the indexing task, +-though it will restart after the next change in the workspace. +- +-When a package is opened for the first time and heavyweight analyses such as +-staticcheck are enabled, it can take a while to construct the index of +-analysis facts for all its dependencies. The index is cached in the +-filesystem, so subsequent analysis should be faster. +- +-Default: `true`. +- +- +-## Documentation +- +- +-### `hoverKind enum` +- +-hoverKind controls the information that appears in the hover text. +-SingleLine is intended for use only by authors of editor plugins. +- +-Must be one of: +- +-* `"FullDocumentation"` +-* `"NoDocumentation"` +-* `"SingleLine"` +-* `"Structured"` is a misguided experimental setting that returns a JSON +-hover format. This setting should not be used, as it will be removed in a +-future release of gopls. +-* `"SynopsisDocumentation"` +- +-Default: `"FullDocumentation"`. +- +- +-### `linkTarget string` +- +-linkTarget is the base URL for links to Go package +-documentation returned by LSP operations such as Hover and +-DocumentLinks and in the CodeDescription field of each +-Diagnostic. +- +-It might be one of: +- +-* `"godoc.org"` +-* `"pkg.go.dev"` +- +-If company chooses to use its own `godoc.org`, its address can be used as well. +- +-Modules matching the GOPRIVATE environment variable will not have +-documentation links in hover. +- +-Default: `"pkg.go.dev"`. +- +- +-### `linksInHover enum` +- +-linksInHover controls the presence of documentation links in hover markdown. +- +-Must be one of: +- +-* false: do not show links +-* true: show links to the `linkTarget` domain +-* `"gopls"`: show links to gopls' internal documentation viewer +- +-Default: `true`. +- +- +-## Inlayhint +- +- +-### `hints map[enum]bool` +- +-**This setting is experimental and may be deleted.** +- +-hints specify inlay hints that users want to see. A full list of hints +-that gopls uses can be found in +-[inlayHints.md](https://github.com/golang/tools/blob/master/gopls/doc/inlayHints.md). +- +-Default: `{}`. +- +- +-## Navigation +- +- +-### `importShortcut enum` +- +-importShortcut specifies whether import statements should link to +-documentation or go to definitions. +- +-Must be one of: +- +-* `"Both"` +-* `"Definition"` +-* `"Link"` +- +-Default: `"Both"`. +- +- +-### `symbolMatcher enum` +- +-**This is an advanced setting and should not be configured by most `gopls` users.** +- +-symbolMatcher sets the algorithm that is used when finding workspace symbols. +- +-Must be one of: +- +-* `"CaseInsensitive"` +-* `"CaseSensitive"` +-* `"FastFuzzy"` +-* `"Fuzzy"` +- +-Default: `"FastFuzzy"`. +- +- +-### `symbolStyle enum` +- +-**This is an advanced setting and should not be configured by most `gopls` users.** +- +-symbolStyle controls how symbols are qualified in symbol responses. +- +-Example Usage: +- +-```json5 +-"gopls": { +-... +- "symbolStyle": "Dynamic", +-... +-} +-``` +- +-Must be one of: +- +-* `"Dynamic"` uses whichever qualifier results in the highest scoring +-match for the given symbol query. Here a "qualifier" is any "/" or "." +-delimited suffix of the fully qualified symbol. i.e. "to/pkg.Foo.Field" or +-just "Foo.Field". +-* `"Full"` is fully qualified symbols, i.e. +-"path/to/pkg.Foo.Field". +-* `"Package"` is package qualified symbols i.e. +-"pkg.Foo.Field". +- +-Default: `"Dynamic"`. +- +- +-### `symbolScope enum` +- +-symbolScope controls which packages are searched for workspace/symbol +-requests. When the scope is "workspace", gopls searches only workspace +-packages. When the scope is "all", gopls searches all loaded packages, +-including dependencies and the standard library. +- +-Must be one of: +- +-* `"all"` matches symbols in any loaded package, including +-dependencies. +-* `"workspace"` matches symbols in workspace packages only. +- +-Default: `"all"`. +- +- +-### `verboseOutput bool` +- +-**This setting is for debugging purposes only.** +- +-verboseOutput enables additional debug logging. +- +-Default: `false`. +- +- +diff -urN a/gopls/doc/troubleshooting.md b/gopls/doc/troubleshooting.md +--- a/gopls/doc/troubleshooting.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/troubleshooting.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,50 +0,0 @@ +---- +-title: "Gopls: Troubleshooting" +---- +- +-If you suspect that `gopls` is crashing or not working correctly, please follow the troubleshooting steps below. +- +-If `gopls` is using too much memory, please follow the steps under [Memory usage](#debug-memory-usage). +- +-## Steps +- +-VS Code users should follow [their troubleshooting guide](https://github.com/golang/vscode-go/blob/master/docs/troubleshooting.md), which has more a more specific version of these instructions. +- +-1. Verify that your project is in good shape by working with it outside of your editor. Running a command like `go build ./...` in the workspace directory will compile everything. For modules, `go mod tidy` is another good check, though it may modify your `go.mod`. +-1. Check that your editor isn't showing any diagnostics that indicate a problem with your workspace. They may appear as diagnostics on a Go file's package declaration, diagnostics in a go.mod file, or as a status or progress message. Problems in the workspace configuration can cause many different symptoms. See the [workspace setup instructions](workspace.md) for help. +-1. Make sure `gopls` is up to date by following the [installation instructions](index.md#installation), then [restarting gopls](#restart-gopls). +-1. Optionally, [ask for help](#ask-for-help) on Gophers Slack. +-1. Finally, [report the issue](#file-an-issue) to the `gopls` developers. +- +-## Restart `gopls` +- +-`gopls` has no persistent state, so restarting it will fix transient problems. This is good and bad: good, because you can keep working, and bad, because you won't be able to debug the issue until it recurs. +- +-In most cases, closing all your open editors will guarantee that `gopls` is killed and restarted. If you don't want to do that, there may be an editor command you can use to restart only `gopls`. Note that some `vim` configurations keep the server alive for a while after the editor exits; you may need to explicitly kill `gopls` if you use `vim`. +- +-## Ask for help +- +-Gophers Slack has active editor-specific channels like [#emacs](https://gophers.slack.com/archives/C0HKHULEM), [#vim](https://gophers.slack.com/archives/C07GBR52P), and [#vscode](https://gophers.slack.com/archives/C2B4L99RS) that can help debug further. If you're confident the problem is with `gopls`, you can go straight to [#gopls](https://gophers.slack.com/archives/CJZH85XCZ). Invites are [available to everyone](https://invite.slack.golangbridge.org). Come prepared with a short description of the issue, and try to be available to answer questions for a while afterward. +- +-## File an issue +- +-We can't diagnose a problem from just a description. When filing an issue, please include as much as possible of the following information: +- +-1. Your editor and any settings you have configured (for example, your VSCode `settings.json` file). +-1. A sample program that reproduces the issue, if possible. +-1. The output of `gopls version` on the command line. +-1. A complete gopls log file from a session where the issue occurred. It should have a `go env for ` log line near the beginning. It's also helpful to tell us the timestamp the problem occurred, so we can find it the log. See the [instructions](#capture-logs) for information on how to capture gopls logs. +- +-Your editor may have a command that fills out some of the necessary information, such as `:GoReportGitHubIssue` in `vim-go`. Otherwise, you can use `gopls bug` on the command line. If neither of those work you can start from scratch directly on the [Go issue tracker](https://github.com/golang/go/issues/new?title=x%2Ftools%2Fgopls%3A%20%3Cfill%20this%20in%3E). +- +-## Capture logs +- +-You may have to change your editor's configuration to pass a `-logfile` flag to gopls. +- +-To increase the level of detail in your logs, start `gopls` with the `-rpc.trace` flag. To start a debug server that will allow you to see profiles and memory usage, start `gopls` with `serve --debug=localhost:6060`. You will then be able to view debug information by navigating to `localhost:6060`. +- +-If you are unsure of how to pass a flag to `gopls` through your editor, please see the [documentation for your editor](index.md#editors). +- +-## Debug memory usage +- +-`gopls` automatically writes out memory debug information when your usage exceeds 1GB. This information can be found in your temporary directory with names like `gopls.1234-5GiB-withnames.zip`. On Windows, your temporary directory will be located at `%TMP%`, and on Unixes, it will be `$TMPDIR`, which is usually `/tmp`. Please [file an issue](#file-an-issue) with this memory debug information attached. If you are uncomfortable sharing the package names of your code, you can share the `-nonames` zip instead, but it's much less useful. +diff -urN a/gopls/doc/workspace.md b/gopls/doc/workspace.md +--- a/gopls/doc/workspace.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/doc/workspace.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,141 +0,0 @@ +---- +-title: "Gopls: Setting up your workspace" +---- +- +-In the language server protocol, a "workspace" consists of a folder along with +-per-folder configuration. Some LSP clients such as VS Code allow configuring +-workspaces explicitly, while others do so automatically by looking for special +-files defining a workspace root (such as a `.git` directory or `go.mod` file). +- +-In order to function, gopls needs a defined scope in which language features +-like references, rename, and implementation should operate. Put differently, +-gopls needs to infer from the LSP workspace which `go build` invocations you +-would use to build your workspace, including the working directory, +-environment, and build flags. +- +-In the past, it could be tricky to set up your workspace so that gopls would +-infer the correct build information. It required opening the correct directory +-or using a `go.work` file to tell gopls about the modules you're working on, +-and configuring the correct operating system and architecture in advance. +-When this didn't work as expected, gopls would often fail in mysterious +-ways--the dreaded "No packages found" error. +- +-Starting with gopls v0.15.0, workspace configuration is much simpler, and gopls +-will typically work when you open a Go file anywhere in your workspace. If it +-isn't working for you, or if you want to better understand how gopls models +-your workspace, please read on. +- +-## Workspace builds +- +-Starting with gopls v0.15.0, gopls will guess the builds you are working on +-based on the set of open files. When you open a file in a workspace folder, +-gopls checks whether the file is contained in a module, `go.work` workspace, or +-GOPATH directory, and configures the build accordingly. Additionally, if you +-open a file that is constrained to a different operating system or +-architecture, for example opening `foo_windows.go` when working on Linux, gopls +-will create a scope with `GOOS` and `GOARCH` set to a value that matches the +-file. +- +-For example, suppose we had a repository with three modules: `moda`, `modb`, +-and `modc`, and a `go.work` file using modules `moda` and `modb`. If we open +-the files `moda/a.go`, `modb/b.go`, `moda/a_windows.go`, and `modc/c.go`, gopls +-will automatically create three builds: +- +-![Zero Config gopls](assets/zeroconfig.png) +- +-This allows gopls to _just work_ when you open a Go file, but it does come with +-several caveats: +- +-- It causes gopls to do more work, since it is now tracking three builds +- instead of one. However, the recent +- [scalability redesign](https://go.dev/blog/gopls-scalability) +- allows much of this work to be avoided through efficient caching. +-- For operations invoked from a given file, such as "References" +- or "Implementations", gopls executes the operation in +- _the default build for that file_. For example, finding references to +- a symbol `S` from `foo_linux.go` will return references from the Linux build, +- and finding references to the same symbol `S` from `foo_windows.go` will +- return references from the Windows build. Gopls searches the default build +- for the file, but it doesn't search all the other possible builds (even +- though that would be nice) because it is liable to be too expensive. +- Issues [#65757](https://go.dev/issue/65757) and +- [#65755](https://go.dev/issue/65755) propose improvements to this behavior. +-- When selecting a `GOOS/GOARCH` combination to match a build-constrained file, +- gopls will choose the first matching combination from +- [this list](https://cs.opensource.google/go/x/tools/+/master:gopls/internal/cache/port.go;l=30;drc=f872b3d6f05822d290bc7bdd29db090fd9d89f5c). +- In some cases, that may be surprising. +-- When working in a `GOOS/GOARCH` constrained file that does not match your +- default toolchain, `CGO_ENABLED=0` is implicitly set, since a C toolchain for +- that target is unlikely to be available. This means that gopls will not +- work in files including `import "C"`. Issue +- [#65758](https://go.dev/issue/65758) may lead to improvements in this +- behavior. +-- Gopls is currently unable to guess build flags that include arbitrary +- user-defined build constraints, such as a file with the build directive +- `//go:build mytag`. Issue [#65089](https://go.dev/issue/65089) proposes +- a heuristic by which gopls could handle this automatically. +- +-Please provide feedback on this behavior by upvoting or commenting the issues +-mentioned above, or opening a [new issue](https://go.dev/issue/new) for other +-improvements you'd like to see. +- +-## When to use a `go.work` file for development +- +-Starting with Go 1.18, the `go` command has built-in support for multi-module +-workspaces specified by [`go.work`](https://go.dev/ref/mod#workspaces) files. +-Gopls will recognize these files if they are present in your workspace. +- +-Use a `go.work` file when: +- +-- you want to work on multiple modules simultaneously in a single logical +- build, for example if you want changes to one module to be reflected in +- another. +-- you want to improve gopls' memory usage or performance by reducing the number +- of builds it must track. +-- you want gopls to know which modules you are working on in a multi-module +- workspace, without opening any files. For example, it may be convenient to use +- `workspace/symbol` queries before any files are open. +-- you are using gopls v0.14.2 or earlier, and want to work on multiple +- modules. +- +-For example, suppose this repo is checked out into the `$WORK/tools` directory, +-and [`x/mod`](https://pkg.go.dev/golang.org/x/mod) is checked out into +-`$WORK/mod`, and you are working on a new `x/mod` API for editing `go.mod` +-files that you want to simultaneously integrate into gopls. +- +-You can work on both `golang.org/x/tools/gopls` and `golang.org/x/mod` +-simultaneously by creating a `go.work` file: +- +-```sh +-cd $WORK +-go work init +-go work use tools/gopls mod +-``` +- +-then opening the `$WORK` directory in your editor. +- +-## When to manually configure `GOOS`, `GOARCH`, or `-tags` +- +-As described in the first section, gopls v0.15.0 and later will try to +-configure a new build scope automatically when you open a file that doesn't +-match the system default operating system (`GOOS`) or architecture (`GOARCH`). +- +-However, per the caveats listed in that section, this automatic behavior comes +-with limitations. Customize your gopls environment by setting `GOOS` or +-`GOARCH` in your +-[`"build.env"`](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#env) +-or `-tags=...` in your" +-["build.buildFlags"](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#buildflags) +-when: +- +-- You want to modify the default build environment. +-- Gopls is not guessing the `GOOS/GOARCH` combination you want to use for +- cross platform development. +-- You need to work on a file that is constrained by a user-defined build tags, +- such as the build directive `//go:build mytag`. +- +-## GOPATH mode +- +-When opening a directory within a `GOPATH` directory, the workspace scope will +-be just that directory and all directories contained within it. Note that +-opening a large GOPATH directory can make gopls very slow to start. +diff -urN a/gopls/go.mod b/gopls/go.mod +--- a/gopls/go.mod 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/go.mod 1969-12-31 18:00:00.000000000 -0600 +@@ -1,37 +0,0 @@ +-module golang.org/x/tools/gopls +- +-go 1.25 +- +-require ( +- github.com/fatih/gomodifytags v1.17.1-0.20250423142747-f3939df9aa3c +- github.com/fsnotify/fsnotify v1.9.0 +- github.com/google/go-cmp v0.7.0 +- github.com/jba/templatecheck v0.7.1 +- github.com/modelcontextprotocol/go-sdk v0.8.0 +- golang.org/x/mod v0.29.0 +- golang.org/x/sync v0.17.0 +- golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 +- golang.org/x/text v0.30.0 +- golang.org/x/tools v0.37.0 +- golang.org/x/vuln v1.1.4 +- gopkg.in/yaml.v3 v3.0.1 +- honnef.co/go/tools v0.7.0-0.dev.0.20250523013057-bbc2f4dd71ea +- mvdan.cc/gofumpt v0.8.0 +- mvdan.cc/xurls/v2 v2.6.0 +-) +- +-require ( +- github.com/BurntSushi/toml v1.5.0 // indirect +- github.com/fatih/camelcase v1.0.0 // indirect +- github.com/fatih/structtag v1.2.0 // indirect +- github.com/google/jsonschema-go v0.3.0 // indirect +- github.com/google/safehtml v0.1.0 // indirect +- github.com/yosida95/uritemplate/v3 v3.0.2 // indirect +- golang.org/x/exp/typeparams v0.0.0-20251002181428-27f1f14c8bb9 // indirect +- golang.org/x/sys v0.37.0 // indirect +- golang.org/x/tools/go/expect v0.1.1-deprecated // indirect +- golang.org/x/tools/go/packages/packagestest v0.1.1-deprecated // indirect +- gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect +-) +- +-replace golang.org/x/tools => ../ +diff -urN a/gopls/go.sum b/gopls/go.sum +--- a/gopls/go.sum 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/go.sum 1969-12-31 18:00:00.000000000 -0600 +@@ -1,102 +0,0 @@ +-github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg= +-github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +-github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8= +-github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc= +-github.com/fatih/gomodifytags v1.17.1-0.20250423142747-f3939df9aa3c h1:dDSgAjoOMp8da3egfz0t2S+t8RGOpEmEXZubcGuc0Bg= +-github.com/fatih/gomodifytags v1.17.1-0.20250423142747-f3939df9aa3c/go.mod h1:YVLagR57bBxMai8IAEc7V4E/MWUYi0oUutLrZcTcnI8= +-github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= +-github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= +-github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +-github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +-github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI= +-github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow= +-github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +-github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +-github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +-github.com/google/jsonschema-go v0.3.0 h1:6AH2TxVNtk3IlvkkhjrtbUc4S8AvO0Xii0DxIygDg+Q= +-github.com/google/jsonschema-go v0.3.0/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE= +-github.com/google/safehtml v0.1.0 h1:EwLKo8qawTKfsi0orxcQAZzu07cICaBeFMegAU9eaT8= +-github.com/google/safehtml v0.1.0/go.mod h1:L4KWwDsUJdECRAEpZoBn3O64bQaywRscowZjJAzjHnU= +-github.com/jba/templatecheck v0.7.1 h1:yOEIFazBEwzdTPYHZF3Pm81NF1ksxx1+vJncSEwvjKc= +-github.com/jba/templatecheck v0.7.1/go.mod h1:n1Etw+Rrw1mDDD8dDRsEKTwMZsJ98EkktgNJC6wLUGo= +-github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +-github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +-github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +-github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +-github.com/modelcontextprotocol/go-sdk v0.8.0 h1:jdsBtGzBLY287WKSIjYovOXAqtJkP+HtFQFKrZd4a6c= +-github.com/modelcontextprotocol/go-sdk v0.8.0/go.mod h1:nYtYQroQ2KQiM0/SbyEPUWQ6xs4B95gJjEalc9AQyOs= +-github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +-github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +-github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4= +-github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4= +-github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +-golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +-golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +-golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= +-golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= +-golang.org/x/crypto v0.42.0/go.mod h1:4+rDnOTJhQCx2q7/j6rAN5XDw8kPjeaXEUR2eL94ix8= +-golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= +-golang.org/x/exp/typeparams v0.0.0-20251002181428-27f1f14c8bb9 h1:EvjuVHWMoRaAxH402KMgrQpGUjoBy/OWvZjLOqQnwNk= +-golang.org/x/exp/typeparams v0.0.0-20251002181428-27f1f14c8bb9/go.mod h1:4Mzdyp/6jzw9auFDJ3OMF5qksa7UvPnzKqTVGcb04ms= +-golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +-golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +-golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ= +-golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc= +-golang.org/x/mod v0.28.0/go.mod h1:yfB/L0NOf/kmEbXjzCPOx1iK1fRutOydrCMsqRhEBxI= +-golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= +-golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= +-golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +-golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +-golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= +-golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= +-golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= +-golang.org/x/net v0.45.0/go.mod h1:ECOoLqd5U3Lhyeyo/QDCEVQ4sNgYsqvCZ722XogGieY= +-golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= +-golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +-golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +-golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= +-golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +-golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +-golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +-golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +-golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +-golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +-golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +-golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +-golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= +-golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +-golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 h1:LvzTn0GQhWuvKH/kVRS3R3bVAsdQWI7hvfLHGgh9+lU= +-golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8/go.mod h1:Pi4ztBfryZoJEkyFTI5/Ocsu2jXyDr6iSdgJiYE/uwE= +-golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +-golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +-golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= +-golang.org/x/term v0.33.0/go.mod h1:s18+ql9tYWp1IfpV9DmCtQDDSRBUjKaw9M1eAv5UeF0= +-golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw= +-golang.org/x/term v0.35.0/go.mod h1:TPGtkTLesOwf2DE8CgVYiZinHAOuy5AYUYT1lENIZnA= +-golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= +-golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +-golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +-golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +-golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +-golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= +-golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= +-golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4= +-golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= +-golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +-golang.org/x/tools/go/expect v0.1.1-deprecated h1:jpBZDwmgPhXsKZC6WhL20P4b/wmnpsEAGHaNy0n/rJM= +-golang.org/x/tools/go/expect v0.1.1-deprecated/go.mod h1:eihoPOH+FgIqa3FpoTwguz/bVUSGBlGQU67vpBeOrBY= +-golang.org/x/tools/go/packages/packagestest v0.1.1-deprecated h1:1h2MnaIAIXISqTFKdENegdpAgUXz6NrPEsbIeWaBRvM= +-golang.org/x/tools/go/packages/packagestest v0.1.1-deprecated/go.mod h1:RVAQXBGNv1ib0J382/DPCRS/BPnsGebyM1Gj5VSDpG8= +-golang.org/x/vuln v1.1.4 h1:Ju8QsuyhX3Hk8ma3CesTbO8vfJD9EvUBgHvkxHBzj0I= +-golang.org/x/vuln v1.1.4/go.mod h1:F+45wmU18ym/ca5PLTPLsSzr2KppzswxPP603ldA67s= +-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +-gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= +-gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +-gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +-gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +-honnef.co/go/tools v0.7.0-0.dev.0.20250523013057-bbc2f4dd71ea h1:fj8r9irJSpolAGUdZBxJIRY3lLc4jH2Dt4lwnWyWwpw= +-honnef.co/go/tools v0.7.0-0.dev.0.20250523013057-bbc2f4dd71ea/go.mod h1:EPDDhEZqVHhWuPI5zPAsjU0U7v9xNIWjoOVyZ5ZcniQ= +-mvdan.cc/gofumpt v0.8.0 h1:nZUCeC2ViFaerTcYKstMmfysj6uhQrA2vJe+2vwGU6k= +-mvdan.cc/gofumpt v0.8.0/go.mod h1:vEYnSzyGPmjvFkqJWtXkh79UwPWP9/HMxQdGEXZHjpg= +-mvdan.cc/xurls/v2 v2.6.0 h1:3NTZpeTxYVWNSokW3MKeyVkz/j7uYXYiMtXRUfmjbgI= +-mvdan.cc/xurls/v2 v2.6.0/go.mod h1:bCvEZ1XvdA6wDnxY7jPPjEmigDtvtvPXAD/Exa9IMSk= +diff -urN a/gopls/integration/govim/artifacts.go b/gopls/integration/govim/artifacts.go +--- a/gopls/integration/govim/artifacts.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/integration/govim/artifacts.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,67 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package main +- +-import ( +- "flag" +- "fmt" +- "io" +- "net/http" +- "os" +- "path" +-) +- +-var bucket = flag.String("bucket", "golang-gopls_integration_tests", "GCS bucket holding test artifacts.") +- +-const usage = ` +-artifacts [--bucket=] +- +-Fetch artifacts from an integration test run. Evaluation ID should be extracted +-from the cloud build notification. +- +-In order for this to work, the GCS bucket that artifacts were written to must +-be publicly readable. By default, this fetches from the +-golang-gopls_integration_tests bucket. +-` +- +-func main() { +- flag.Usage = func() { +- fmt.Fprint(flag.CommandLine.Output(), usage) +- } +- flag.Parse() +- if flag.NArg() != 1 { +- flag.Usage() +- os.Exit(2) +- } +- evalID := flag.Arg(0) +- logURL := fmt.Sprintf("https://storage.googleapis.com/%s/log-%s.txt", *bucket, evalID) +- if err := download(logURL); err != nil { +- fmt.Fprintf(os.Stderr, "downloading logs: %v", err) +- } +- tarURL := fmt.Sprintf("https://storage.googleapis.com/%s/govim/%s/artifacts.tar.gz", *bucket, evalID) +- if err := download(tarURL); err != nil { +- fmt.Fprintf(os.Stderr, "downloading artifact tarball: %v", err) +- } +-} +- +-func download(artifactURL string) error { +- name := path.Base(artifactURL) +- resp, err := http.Get(artifactURL) +- if err != nil { +- return fmt.Errorf("fetching from GCS: %v", err) +- } +- defer resp.Body.Close() +- if resp.StatusCode != http.StatusOK { +- return fmt.Errorf("got status code %d from GCS", resp.StatusCode) +- } +- data, err := io.ReadAll(resp.Body) +- if err != nil { +- return fmt.Errorf("reading result: %v", err) +- } +- if err := os.WriteFile(name, data, 0644); err != nil { +- return fmt.Errorf("writing artifact: %v", err) +- } +- return nil +-} +diff -urN a/gopls/integration/govim/cloudbuild.harness.yaml b/gopls/integration/govim/cloudbuild.harness.yaml +--- a/gopls/integration/govim/cloudbuild.harness.yaml 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/integration/govim/cloudbuild.harness.yaml 1969-12-31 18:00:00.000000000 -0600 +@@ -1,21 +0,0 @@ +-# Copyright 2019 The Go Authors. All rights reserved. +-# Use of this source code is governed by a BSD-style +-# license that can be found in the LICENSE file. +- +-# Build the govim test harness that will be used to run govim integration tests +-# for gopls. See README.md for instructions on how to use this. +-steps: +- - name: 'gcr.io/cloud-builders/docker' +- args: ['build', +- # To allow for breaking changes to this test harness, tag with a major +- # version number. +- '-t', 'gcr.io/$PROJECT_ID/govim-harness:latest', +- '-t', 'gcr.io/$PROJECT_ID/govim-harness:3', +- # It is assumed that this build is running from the root directory of the +- # tools repository. +- '-f', 'gopls/integration/govim/Dockerfile', +- # Use the integration test directory as build context: the test harness +- # doesn't actually require any local files. +- 'gopls/integration/govim'] +-images: +- - gcr.io/$PROJECT_ID/govim-harness +diff -urN a/gopls/integration/govim/cloudbuild.yaml b/gopls/integration/govim/cloudbuild.yaml +--- a/gopls/integration/govim/cloudbuild.yaml 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/integration/govim/cloudbuild.yaml 1969-12-31 18:00:00.000000000 -0600 +@@ -1,51 +0,0 @@ +-# Copyright 2019 The Go Authors. All rights reserved. +-# Use of this source code is governed by a BSD-style +-# license that can be found in the LICENSE file. +- +-# Build gopls, and run the govim integration tests. See README.md for +-# instructions on how to use this. +- +-substitutions: +- # This bucket must be owned by the GCP project executing the build. If +- # you are running this from your own project, override using --substitutions. +- _RESULT_BUCKET: 'golang-gopls_integration_tests' +- +-steps: +- # Build gopls from source, to use with the govim integration tests. +- - name: 'golang:1.14' +- env: ['GOPROXY=https://proxy.golang.org'] +- dir: 'gopls' +- args: ['go', 'build'] +- +- # Run the tests. Note that the script in this step does not return the exit +- # code from `go test`, but rather saves it for use in the final step after +- # uploading artifacts. +- - name: 'gcr.io/$PROJECT_ID/govim-harness:3' +- dir: '/src/govim' +- volumes: +- - name: artifacts +- path: /artifacts +- env: +- - GOVIM_TESTSCRIPT_WORKDIR_ROOT=/artifacts +- - VIM_FLAVOR=vim +- args: ['/workspace/gopls/integration/govim/run_tests_for_cloudbuild.sh'] +- +- # The govim tests produce a large number of artifacts; tarball/gzip to reduce +- # roundtrips and save space. +- - name: 'ubuntu' +- volumes: +- - name: artifacts +- path: /artifacts +- args: ['tar', '-czf', 'artifacts.tar.gz', '/artifacts'] +- +- # Upload artifacts to GCS. +- - name: 'gcr.io/cloud-builders/gsutil' +- args: ['cp', 'artifacts.tar.gz', 'gs://${_RESULT_BUCKET}/govim/${BUILD_ID}/artifacts.tar.gz'] +- +- # Exit with the actual exit code of the integration tests. +- - name: 'ubuntu' +- args: ['bash', 'govim_test_result.sh'] +- +-# Write build logs to the same bucket as artifacts, so they can be more easily +-# shared. +-logsBucket: 'gs://${_RESULT_BUCKET}' +diff -urN a/gopls/integration/govim/Dockerfile b/gopls/integration/govim/Dockerfile +--- a/gopls/integration/govim/Dockerfile 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/integration/govim/Dockerfile 1969-12-31 18:00:00.000000000 -0600 +@@ -1,16 +0,0 @@ +-# Copyright 2019 The Go Authors. All rights reserved. +-# Use of this source code is governed by a BSD-style +-# license that can be found in the LICENSE file. +- +-# govim requires a more recent version of vim than is available in most +-# distros, so we build from their base image. +-FROM govim/govim:latest-vim +-ARG GOVIM_REF +- +-ENV GOPROXY=https://proxy.golang.org GOPATH=/go VIM_FLAVOR=vim +-WORKDIR /src +- +-# Clone govim. In order to use the go command for resolving latest, we download +-# a redundant copy of govim to the build cache using `go mod download`. +-RUN git clone https://github.com/govim/govim /src/govim && cd /src/govim && \ +- git checkout $GOVIM_REF +diff -urN a/gopls/integration/govim/README.md b/gopls/integration/govim/README.md +--- a/gopls/integration/govim/README.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/integration/govim/README.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,47 +0,0 @@ +-# govim integration tests +- +-Files in this directory configure Cloud Build to run [govim] integration tests +-against a gopls binary built from source. +- +-## Running on GCP +- +-To run these integration tests in Cloud Build, use the following steps. Here +-we assume that `$PROJECT_ID` is a valid GCP project and `$BUCKET` is a cloud +-storage bucket owned by that project. +- +-- `cd` to the root directory of the tools project. +-- (at least once per GCP project) Build the test harness: +-``` +-$ gcloud builds submit \ +- --project="${PROJECT_ID}" \ +- --config=gopls/integration/govim/cloudbuild.harness.yaml +-``` +-- Run the integration tests: +-``` +-$ gcloud builds submit \ +- --project="${PROJECT_ID}" \ +- --config=gopls/integration/govim/cloudbuild.yaml \ +- --substitutions=_RESULT_BUCKET="${BUCKET}" +-``` +- +-## Fetching Artifacts +- +-Assuming the artifacts bucket is world readable, you can fetch integration from +-GCS. They are located at: +- +-- logs: `https://storage.googleapis.com/${BUCKET}/log-${EVALUATION_ID}.txt` +-- artifact tarball: `https://storage.googleapis.com/${BUCKET}/govim/${EVALUATION_ID}/artifacts.tar.gz` +- +-The `artifacts.go` command can be used to fetch both artifacts using an +-evaluation id. +- +-## Running locally +- +-Run `gopls/integration/govim/run_local.sh`. This may take a while the first +-time it is run, as it will require building the test harness. This script +-accepts two flags to modify its behavior: +- +-**--sudo**: run docker with `sudo` +-**--short**: run `go test -short` +- +-[govim]: https://github.com/govim/govim +diff -urN a/gopls/integration/govim/run_local.sh b/gopls/integration/govim/run_local.sh +--- a/gopls/integration/govim/run_local.sh 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/integration/govim/run_local.sh 1969-12-31 18:00:00.000000000 -0600 +@@ -1,96 +0,0 @@ +-#!/bin/bash -e +- +-# Copyright 2019 The Go Authors. All rights reserved. +-# Use of this source code is governed by a BSD-style +-# license that can be found in the LICENSE file. +- +-# Run govim integration tests against a local gopls. +- +-usage() { +- cat < /workspace/govim_test_result.sh +- +-# Clean up unnecessary artifacts. This is based on govim/_scripts/tidyUp.bash. +-# Since we're fetching govim using the go command, we won't have this non-go +-# source directory available to us. +-if [[ -n "$GOVIM_TESTSCRIPT_WORKDIR_ROOT" ]]; then +- echo "Cleaning up build artifacts..." +- # Make artifacts writable so that rm -rf doesn't complain. +- chmod -R u+w "$GOVIM_TESTSCRIPT_WORKDIR_ROOT" +- +- # Remove directories we don't care about. +- find "$GOVIM_TESTSCRIPT_WORKDIR_ROOT" -type d \( -name .vim -o -name gopath \) -prune -exec rm -rf '{}' \; +-fi +diff -urN a/gopls/internal/analysis/deprecated/deprecated.go b/gopls/internal/analysis/deprecated/deprecated.go +--- a/gopls/internal/analysis/deprecated/deprecated.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/deprecated/deprecated.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,252 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package deprecated +- +-import ( +- "bytes" +- "go/ast" +- "go/format" +- "go/token" +- "go/types" +- "strconv" +- "strings" +- +- _ "embed" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/internal/analysisinternal" +- internalastutil "golang.org/x/tools/internal/astutil" +-) +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "deprecated", +- Doc: analysisinternal.MustExtractDoc(doc, "deprecated"), +- Requires: []*analysis.Analyzer{inspect.Analyzer}, +- Run: checkDeprecated, +- FactTypes: []analysis.Fact{(*deprecationFact)(nil)}, +- RunDespiteErrors: true, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/deprecated", +-} +- +-// checkDeprecated is a simplified copy of staticcheck.CheckDeprecated. +-func checkDeprecated(pass *analysis.Pass) (any, error) { +- inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- +- deprs, err := collectDeprecatedNames(pass, inspector) +- if err != nil || (len(deprs.packages) == 0 && len(deprs.objects) == 0) { +- return nil, err +- } +- +- reportDeprecation := func(depr *deprecationFact, node ast.Node) { +- // TODO(hyangah): staticcheck.CheckDeprecated has more complex logic. Do we need it here? +- // TODO(hyangah): Scrub depr.Msg. depr.Msg may contain Go comments +- // markdown syntaxes but LSP diagnostics do not support markdown syntax. +- +- buf := new(bytes.Buffer) +- if err := format.Node(buf, pass.Fset, node); err != nil { +- // This shouldn't happen but let's be conservative. +- buf.Reset() +- buf.WriteString("declaration") +- } +- pass.ReportRangef(node, "%s is deprecated: %s", buf, depr.Msg) +- } +- +- nodeFilter := []ast.Node{(*ast.SelectorExpr)(nil)} +- inspector.Preorder(nodeFilter, func(node ast.Node) { +- // Caveat: this misses dot-imported objects +- sel, ok := node.(*ast.SelectorExpr) +- if !ok { +- return +- } +- +- obj := pass.TypesInfo.ObjectOf(sel.Sel) +- if fn, ok := obj.(*types.Func); ok { +- obj = fn.Origin() +- } +- if obj == nil || obj.Pkg() == nil { +- // skip invalid sel.Sel. +- return +- } +- +- if obj.Pkg() == pass.Pkg { +- // A package is allowed to use its own deprecated objects +- return +- } +- +- // A package "foo" has two related packages "foo_test" and "foo.test", for external tests and the package main +- // generated by 'go test' respectively. "foo_test" can import and use "foo", "foo.test" imports and uses "foo" +- // and "foo_test". +- +- if strings.TrimSuffix(pass.Pkg.Path(), "_test") == obj.Pkg().Path() { +- // foo_test (the external tests of foo) can use objects from foo. +- return +- } +- if strings.TrimSuffix(pass.Pkg.Path(), ".test") == obj.Pkg().Path() { +- // foo.test (the main package of foo's tests) can use objects from foo. +- return +- } +- if strings.TrimSuffix(pass.Pkg.Path(), ".test") == strings.TrimSuffix(obj.Pkg().Path(), "_test") { +- // foo.test (the main package of foo's tests) can use objects from foo's external tests. +- return +- } +- +- if depr, ok := deprs.objects[obj]; ok { +- reportDeprecation(depr, sel) +- } +- }) +- +- for _, f := range pass.Files { +- for _, spec := range f.Imports { +- var imp *types.Package +- var obj types.Object +- if spec.Name != nil { +- obj = pass.TypesInfo.ObjectOf(spec.Name) +- } else { +- obj = pass.TypesInfo.Implicits[spec] +- } +- pkgName, ok := obj.(*types.PkgName) +- if !ok { +- continue +- } +- imp = pkgName.Imported() +- +- path, err := strconv.Unquote(spec.Path.Value) +- if err != nil { +- continue +- } +- pkgPath := pass.Pkg.Path() +- if strings.TrimSuffix(pkgPath, "_test") == path { +- // foo_test can import foo +- continue +- } +- if strings.TrimSuffix(pkgPath, ".test") == path { +- // foo.test can import foo +- continue +- } +- if strings.TrimSuffix(pkgPath, ".test") == strings.TrimSuffix(path, "_test") { +- // foo.test can import foo_test +- continue +- } +- if depr, ok := deprs.packages[imp]; ok { +- reportDeprecation(depr, spec.Path) +- } +- } +- } +- return nil, nil +-} +- +-type deprecationFact struct{ Msg string } +- +-func (*deprecationFact) AFact() {} +-func (d *deprecationFact) String() string { return "Deprecated: " + d.Msg } +- +-type deprecatedNames struct { +- objects map[types.Object]*deprecationFact +- packages map[*types.Package]*deprecationFact +-} +- +-// collectDeprecatedNames collects deprecated identifiers and publishes +-// them both as Facts and the return value. This is a simplified copy +-// of staticcheck's fact_deprecated analyzer. +-func collectDeprecatedNames(pass *analysis.Pass, ins *inspector.Inspector) (deprecatedNames, error) { +- doDocs := func(names []*ast.Ident, docs *ast.CommentGroup) { +- alt := strings.TrimPrefix(internalastutil.Deprecation(docs), "Deprecated: ") +- if alt == "" { +- return +- } +- +- for _, name := range names { +- obj := pass.TypesInfo.ObjectOf(name) +- pass.ExportObjectFact(obj, &deprecationFact{alt}) +- } +- } +- +- // Is package deprecated? +- // +- // Don't mark package syscall as deprecated, even though +- // it is. A lot of people still use it for simple +- // constants like SIGKILL, and I am not comfortable +- // telling them to use x/sys for that. +- if pass.Pkg.Path() != "syscall" { +- for _, f := range pass.Files { +- if depr := internalastutil.Deprecation(f.Doc); depr != "" { +- pass.ExportPackageFact(&deprecationFact{depr}) +- break +- } +- } +- } +- +- nodeFilter := []ast.Node{ +- (*ast.GenDecl)(nil), +- (*ast.FuncDecl)(nil), +- (*ast.TypeSpec)(nil), +- (*ast.ValueSpec)(nil), +- (*ast.File)(nil), +- (*ast.StructType)(nil), +- (*ast.InterfaceType)(nil), +- } +- ins.Preorder(nodeFilter, func(node ast.Node) { +- var names []*ast.Ident +- var docs *ast.CommentGroup +- switch node := node.(type) { +- case *ast.GenDecl: +- switch node.Tok { +- case token.TYPE, token.CONST, token.VAR: +- docs = node.Doc +- for i := range node.Specs { +- switch n := node.Specs[i].(type) { +- case *ast.ValueSpec: +- names = append(names, n.Names...) +- case *ast.TypeSpec: +- names = append(names, n.Name) +- } +- } +- default: +- return +- } +- case *ast.FuncDecl: +- docs = node.Doc +- names = []*ast.Ident{node.Name} +- case *ast.TypeSpec: +- docs = node.Doc +- names = []*ast.Ident{node.Name} +- case *ast.ValueSpec: +- docs = node.Doc +- names = node.Names +- case *ast.StructType: +- for _, field := range node.Fields.List { +- doDocs(field.Names, field.Doc) +- } +- case *ast.InterfaceType: +- for _, field := range node.Methods.List { +- doDocs(field.Names, field.Doc) +- } +- } +- if docs != nil && len(names) > 0 { +- doDocs(names, docs) +- } +- }) +- +- // Every identifier is potentially deprecated, so we will need +- // to look up facts a lot. Construct maps of all facts propagated +- // to this pass for fast lookup. +- out := deprecatedNames{ +- objects: map[types.Object]*deprecationFact{}, +- packages: map[*types.Package]*deprecationFact{}, +- } +- for _, fact := range pass.AllObjectFacts() { +- out.objects[fact.Object] = fact.Fact.(*deprecationFact) +- } +- for _, fact := range pass.AllPackageFacts() { +- out.packages[fact.Package] = fact.Fact.(*deprecationFact) +- } +- +- return out, nil +-} +diff -urN a/gopls/internal/analysis/deprecated/deprecated_test.go b/gopls/internal/analysis/deprecated/deprecated_test.go +--- a/gopls/internal/analysis/deprecated/deprecated_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/deprecated/deprecated_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,16 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package deprecated +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +-) +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.Run(t, testdata, Analyzer, "a") +-} +diff -urN a/gopls/internal/analysis/deprecated/doc.go b/gopls/internal/analysis/deprecated/doc.go +--- a/gopls/internal/analysis/deprecated/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/deprecated/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,16 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package deprecated defines an Analyzer that marks deprecated symbols and package imports. +-// +-// # Analyzer deprecated +-// +-// deprecated: check for use of deprecated identifiers +-// +-// The deprecated analyzer looks for deprecated symbols and package +-// imports. +-// +-// See https://go.dev/wiki/Deprecated to learn about Go's convention +-// for documenting and signaling deprecated identifiers. +-package deprecated +diff -urN a/gopls/internal/analysis/deprecated/testdata/src/a/a.go b/gopls/internal/analysis/deprecated/testdata/src/a/a.go +--- a/gopls/internal/analysis/deprecated/testdata/src/a/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/deprecated/testdata/src/a/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package usedeprecated +- +-import "io/ioutil" // want "\"io/ioutil\" is deprecated: .*" +- +-func x() { +- _, _ = ioutil.ReadFile("") // want "ioutil.ReadFile is deprecated: As of Go 1.16, .*" +- Legacy() // expect no deprecation notice. +-} +- +-// Legacy is deprecated. +-// +-// Deprecated: use X instead. +-func Legacy() {} // want Legacy:"Deprecated: use X instead." +diff -urN a/gopls/internal/analysis/deprecated/testdata/src/a/a_test.go b/gopls/internal/analysis/deprecated/testdata/src/a/a_test.go +--- a/gopls/internal/analysis/deprecated/testdata/src/a/a_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/deprecated/testdata/src/a/a_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,12 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package usedeprecated +- +-import "testing" +- +-func TestF(t *testing.T) { +- Legacy() // expect no deprecation notice. +- x() +-} +diff -urN a/gopls/internal/analysis/embeddirective/doc.go b/gopls/internal/analysis/embeddirective/doc.go +--- a/gopls/internal/analysis/embeddirective/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/embeddirective/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,18 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package embeddirective defines an Analyzer that validates //go:embed directives. +-// The analyzer defers fixes to its parent golang.Analyzer. +-// +-// # Analyzer embed +-// +-// embed: check //go:embed directive usage +-// +-// This analyzer checks that the embed package is imported if //go:embed +-// directives are present, providing a suggested fix to add the import if +-// it is missing. +-// +-// This analyzer also checks that //go:embed directives precede the +-// declaration of a single variable. +-package embeddirective +diff -urN a/gopls/internal/analysis/embeddirective/embeddirective.go b/gopls/internal/analysis/embeddirective/embeddirective.go +--- a/gopls/internal/analysis/embeddirective/embeddirective.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/embeddirective/embeddirective.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,167 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package embeddirective +- +-import ( +- _ "embed" +- "go/ast" +- "go/token" +- "go/types" +- "strings" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/internal/analysisinternal" +- "golang.org/x/tools/internal/refactor" +-) +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "embed", +- Doc: analysisinternal.MustExtractDoc(doc, "embed"), +- Run: run, +- RunDespiteErrors: true, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/embeddirective", +-} +- +-func run(pass *analysis.Pass) (any, error) { +- for _, f := range pass.Files { +- comments := embedDirectiveComments(f) +- if len(comments) == 0 { +- continue // nothing to check +- } +- +- hasEmbedImport := false +- for _, imp := range f.Imports { +- if imp.Path.Value == `"embed"` { +- hasEmbedImport = true +- break +- } +- } +- +- for _, c := range comments { +- pos, end := c.Pos(), c.Pos()+token.Pos(len("//go:embed")) +- +- if !hasEmbedImport { +- // Add blank import of "embed". +- _, edits := refactor.AddImport(pass.TypesInfo, f, "_", "embed", "", c.Pos()) +- if len(edits) > 0 { +- pass.Report(analysis.Diagnostic{ +- Pos: pos, +- End: end, +- Message: `must import "embed" when using go:embed directives`, +- SuggestedFixes: []analysis.SuggestedFix{{ +- Message: `Add missing "embed" import`, +- TextEdits: edits, +- }}, +- }) +- } +- } +- +- var msg string +- spec := nextVarSpec(c, f) +- switch { +- case spec == nil: +- msg = `go:embed directives must precede a "var" declaration` +- case len(spec.Names) != 1: +- msg = "declarations following go:embed directives must define a single variable" +- case len(spec.Values) > 0: +- msg = "declarations following go:embed directives must not specify a value" +- case !embeddableType(pass.TypesInfo.Defs[spec.Names[0]]): +- msg = "declarations following go:embed directives must be of type string, []byte or embed.FS" +- } +- if msg != "" { +- pass.Report(analysis.Diagnostic{ +- Pos: pos, +- End: end, +- Message: msg, +- }) +- } +- } +- } +- return nil, nil +-} +- +-// embedDirectiveComments returns all comments in f that contains a //go:embed directive. +-func embedDirectiveComments(f *ast.File) []*ast.Comment { +- comments := []*ast.Comment{} +- for _, cg := range f.Comments { +- for _, c := range cg.List { +- if strings.HasPrefix(c.Text, "//go:embed ") { +- comments = append(comments, c) +- } +- } +- } +- return comments +-} +- +-// nextVarSpec returns the ValueSpec for the variable declaration immediately following +-// the go:embed comment, or nil if the next declaration is not a variable declaration. +-func nextVarSpec(com *ast.Comment, f *ast.File) *ast.ValueSpec { +- // Embed directives must be followed by a declaration of one variable with no value. +- // There may be comments and empty lines between the directive and the declaration. +- var nextDecl ast.Decl +- for _, d := range f.Decls { +- if com.End() < d.End() { +- nextDecl = d +- break +- } +- } +- if nextDecl == nil || nextDecl.Pos() == token.NoPos { +- return nil +- } +- decl, ok := nextDecl.(*ast.GenDecl) +- if !ok { +- return nil +- } +- if decl.Tok != token.VAR { +- return nil +- } +- +- // var declarations can be both freestanding and blocks (with parenthesis). +- // Only the first variable spec following the directive is interesting. +- var nextSpec ast.Spec +- for _, s := range decl.Specs { +- if com.End() < s.End() { +- nextSpec = s +- break +- } +- } +- if nextSpec == nil { +- return nil +- } +- spec, ok := nextSpec.(*ast.ValueSpec) +- if !ok { +- // Invalid AST, but keep going. +- return nil +- } +- return spec +-} +- +-// embeddableType in go:embed directives are string, []byte or embed.FS. +-func embeddableType(o types.Object) bool { +- if o == nil { +- return false +- } +- +- // For embed.FS the underlying type is an implementation detail. +- // As long as the named type resolves to embed.FS, it is OK. +- if named, ok := types.Unalias(o.Type()).(*types.Named); ok { +- obj := named.Obj() +- if obj.Pkg() != nil && obj.Pkg().Path() == "embed" && obj.Name() == "FS" { +- return true +- } +- } +- +- switch v := o.Type().Underlying().(type) { +- case *types.Basic: +- return types.Identical(v, types.Typ[types.String]) +- case *types.Slice: +- return types.Identical(v.Elem(), types.Typ[types.Byte]) +- } +- +- return false +-} +diff -urN a/gopls/internal/analysis/embeddirective/embeddirective_test.go b/gopls/internal/analysis/embeddirective/embeddirective_test.go +--- a/gopls/internal/analysis/embeddirective/embeddirective_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/embeddirective/embeddirective_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,16 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package embeddirective +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +-) +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.RunWithSuggestedFixes(t, testdata, Analyzer, "a") +-} +diff -urN a/gopls/internal/analysis/embeddirective/testdata/src/a/embedText b/gopls/internal/analysis/embeddirective/testdata/src/a/embedText +--- a/gopls/internal/analysis/embeddirective/testdata/src/a/embedText 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/embeddirective/testdata/src/a/embedText 1969-12-31 18:00:00.000000000 -0600 +@@ -1 +0,0 @@ +-Hello World +\ No newline at end of file +diff -urN a/gopls/internal/analysis/embeddirective/testdata/src/a/import_missing.go b/gopls/internal/analysis/embeddirective/testdata/src/a/import_missing.go +--- a/gopls/internal/analysis/embeddirective/testdata/src/a/import_missing.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/embeddirective/testdata/src/a/import_missing.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package a +- +-import ( +- "fmt" +-) +- +-//go:embed embedtext // want "must import \"embed\" when using go:embed directives" +-var s string +- +-// This is main function +-func main() { +- fmt.Println(s) +-} +diff -urN a/gopls/internal/analysis/embeddirective/testdata/src/a/import_missing.go.golden b/gopls/internal/analysis/embeddirective/testdata/src/a/import_missing.go.golden +--- a/gopls/internal/analysis/embeddirective/testdata/src/a/import_missing.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/embeddirective/testdata/src/a/import_missing.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,18 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package a +- +-import ( +- _ "embed" +- "fmt" +-) +- +-//go:embed embedtext // want "must import \"embed\" when using go:embed directives" +-var s string +- +-// This is main function +-func main() { +- fmt.Println(s) +-} +diff -urN a/gopls/internal/analysis/embeddirective/testdata/src/a/import_present.go b/gopls/internal/analysis/embeddirective/testdata/src/a/import_present.go +--- a/gopls/internal/analysis/embeddirective/testdata/src/a/import_present.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/embeddirective/testdata/src/a/import_present.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,129 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package a +- +-// Misplaced, above imports. +-//go:embed embedText // want "go:embed directives must precede a \"var\" declaration" +- +-import ( +- "embed" +- embedPkg "embed" +- "fmt" +- +- _ "embed" +-) +- +-//go:embed embedText // ok +-var e1 string +- +-// The analyzer does not check for many directives using the same var. +-// +-//go:embed embedText // ok +-//go:embed embedText // ok +-var e2 string +- +-// Comments and blank lines between are OK. All types OK. +-// +-//go:embed embedText // ok +-// +-// foo +- +-var e3 string +- +-//go:embed embedText //ok +-var e4 []byte +- +-//go:embed embedText //ok +-var e5 embed.FS +- +-// Followed by wrong kind of decl. +-// +-//go:embed embedText // want "go:embed directives must precede a \"var\" declaration" +-func fooFunc() {} +- +-// Multiple variable specs. +-// +-//go:embed embedText // want "declarations following go:embed directives must define a single variable" +-var e6, e7 []byte +- +-// Specifying a value is not allowed. +-// +-//go:embed embedText // want "declarations following go:embed directives must not specify a value" +-var e8 string = "foo" +- +-// TODO: This should not be OK, misplaced according to compiler. +-// +-//go:embed embedText // ok +-var ( +- e9 string +- e10 string +-) +- +-// Type definition. +-type fooType []byte +- +-//go:embed embedText //ok +-var e11 fooType +- +-// Type alias. +-type barType = string +- +-//go:embed embedText //ok +-var e12 barType +- +-// Renamed embed package. +- +-//go:embed embedText //ok +-var e13 embedPkg.FS +- +-// Renamed embed package alias. +-type embedAlias = embedPkg.FS +- +-//go:embed embedText //ok +-var e14 embedAlias +- +-// var blocks are OK as long as the variable following the directive is OK. +-var ( +- x, y, z string +- //go:embed embedText // ok +- e20 string +- q, r, t string +-) +- +-//go:embed embedText // want "go:embed directives must precede a \"var\" declaration" +-var () +- +-// Incorrect types. +- +-//go:embed embedText // want `declarations following go:embed directives must be of type string, \[\]byte or embed.FS` +-var e16 byte +- +-//go:embed embedText // want `declarations following go:embed directives must be of type string, \[\]byte or embed.FS` +-var e17 []string +- +-//go:embed embedText // want `declarations following go:embed directives must be of type string, \[\]byte or embed.FS` +-var e18 embed.Foo +- +-//go:embed embedText // want `declarations following go:embed directives must be of type string, \[\]byte or embed.FS` +-var e19 foo.FS +- +-type byteAlias byte +- +-//go:embed embedText // want `declarations following go:embed directives must be of type string, \[\]byte or embed.FS` +-var e15 byteAlias +- +-// A type declaration of embed.FS is not accepted by the compiler, in contrast to an alias. +-type embedDecl embed.FS +- +-//go:embed embedText // want `declarations following go:embed directives must be of type string, \[\]byte or embed.FS` +-var e16 embedDecl +- +-// This is main function +-func main() { +- fmt.Println(s) +-} +- +-// No declaration following. +-//go:embed embedText // want "go:embed directives must precede a \"var\" declaration" +diff -urN a/gopls/internal/analysis/embeddirective/testdata/src/a/import_present_go120.go b/gopls/internal/analysis/embeddirective/testdata/src/a/import_present_go120.go +--- a/gopls/internal/analysis/embeddirective/testdata/src/a/import_present_go120.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/embeddirective/testdata/src/a/import_present_go120.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,26 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build go1.20 +-// +build go1.20 +- +-package a +- +-var ( +- // Okay directive wise but the compiler will complain that +- // imports must appear before other declarations. +- //go:embed embedText // ok +- foo string +-) +- +-import ( +- "fmt" +- +- _ "embed" +-) +- +-// This is main function +-func main() { +- fmt.Println(s) +-} +diff -urN a/gopls/internal/analysis/fillreturns/doc.go b/gopls/internal/analysis/fillreturns/doc.go +--- a/gopls/internal/analysis/fillreturns/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillreturns/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package fillreturns defines an Analyzer that will attempt to +-// automatically fill in a return statement that has missing +-// values with zero value elements. +-// +-// # Analyzer fillreturns +-// +-// fillreturns: suggest fixes for errors due to an incorrect number of return values +-// +-// This checker provides suggested fixes for type errors of the +-// type "wrong number of return values (want %d, got %d)". For example: +-// +-// func m() (int, string, *bool, error) { +-// return +-// } +-// +-// will turn into +-// +-// func m() (int, string, *bool, error) { +-// return 0, "", nil, nil +-// } +-// +-// This functionality is similar to https://github.com/sqs/goreturns. +-package fillreturns +diff -urN a/gopls/internal/analysis/fillreturns/fillreturns.go b/gopls/internal/analysis/fillreturns/fillreturns.go +--- a/gopls/internal/analysis/fillreturns/fillreturns.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillreturns/fillreturns.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,244 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fillreturns +- +-import ( +- "bytes" +- _ "embed" +- "fmt" +- "go/ast" +- "go/format" +- "go/types" +- "regexp" +- "slices" +- "strings" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/fuzzy" +- "golang.org/x/tools/gopls/internal/util/cursorutil" +- "golang.org/x/tools/internal/analysisinternal" +- "golang.org/x/tools/internal/moreiters" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "fillreturns", +- Doc: analysisinternal.MustExtractDoc(doc, "fillreturns"), +- Requires: []*analysis.Analyzer{inspect.Analyzer}, +- Run: run, +- RunDespiteErrors: true, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/fillreturns", +-} +- +-func run(pass *analysis.Pass) (any, error) { +- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- info := pass.TypesInfo +- +-outer: +- for _, typeErr := range pass.TypeErrors { +- if !fixesError(typeErr) { +- continue // irrelevant type error +- } +- _, start, end, ok := typesinternal.ErrorCodeStartEnd(typeErr) +- if !ok { +- continue // no position information +- } +- curErr, ok := inspect.Root().FindByPos(start, end) +- if !ok { +- continue // can't find node +- } +- +- // Find cursor for enclosing return statement (which may be curErr itself). +- ret, curRet := cursorutil.FirstEnclosing[*ast.ReturnStmt](curErr) +- if ret == nil { +- continue // no enclosing return +- } +- +- // Skip if any return argument is a tuple-valued function call. +- for _, expr := range ret.Results { +- e, ok := expr.(*ast.CallExpr) +- if !ok { +- continue +- } +- if tup, ok := info.TypeOf(e).(*types.Tuple); ok && tup.Len() > 1 { +- continue outer +- } +- } +- +- // Get type of innermost enclosing function. +- var funcType *ast.FuncType +- curFunc, _ := enclosingFunc(curRet) // can't fail +- switch fn := curFunc.Node().(type) { +- case *ast.FuncLit: +- funcType = fn.Type +- case *ast.FuncDecl: +- funcType = fn.Type +- +- // Skip generic functions since type parameters don't have zero values. +- // TODO(rfindley): We should be able to handle this if the return +- // values are all concrete types. +- if funcType.TypeParams.NumFields() > 0 { +- continue +- } +- } +- if funcType.Results == nil { +- continue +- } +- +- // Duplicate the return values to track which values have been matched. +- remaining := make([]ast.Expr, len(ret.Results)) +- copy(remaining, ret.Results) +- +- fixed := make([]ast.Expr, len(funcType.Results.List)) +- +- // For each value in the return function declaration, find the leftmost element +- // in the return statement that has the desired type. If no such element exists, +- // fill in the missing value with the appropriate "zero" value. +- // Beware that type information may be incomplete. +- var retTyps []types.Type +- for _, ret := range funcType.Results.List { +- retTyp := info.TypeOf(ret.Type) +- if retTyp == nil { +- return nil, nil +- } +- retTyps = append(retTyps, retTyp) +- } +- +- file, _ := cursorutil.FirstEnclosing[*ast.File](curRet) +- matches := analysisinternal.MatchingIdents(retTyps, file, ret.Pos(), info, pass.Pkg) +- qual := typesinternal.FileQualifier(file, pass.Pkg) +- for i, retTyp := range retTyps { +- var match ast.Expr +- var idx int +- for j, val := range remaining { +- if t := info.TypeOf(val); t == nil || !matchingTypes(t, retTyp) { +- continue +- } +- if !isZeroExpr(val) { +- match, idx = val, j +- break +- } +- // If the current match is a "zero" value, we keep searching in +- // case we find a non-"zero" value match. If we do not find a +- // non-"zero" value, we will use the "zero" value. +- match, idx = val, j +- } +- +- if match != nil { +- fixed[i] = match +- remaining = slices.Delete(remaining, idx, idx+1) +- } else { +- names, ok := matches[retTyp] +- if !ok { +- return nil, fmt.Errorf("invalid return type: %v", retTyp) +- } +- // Find the identifier most similar to the return type. +- // If no identifier matches the pattern, generate a zero value. +- if best := fuzzy.BestMatch(retTyp.String(), names); best != "" { +- fixed[i] = ast.NewIdent(best) +- } else if zero, isValid := typesinternal.ZeroExpr(retTyp, qual); isValid { +- fixed[i] = zero +- } else { +- return nil, nil +- } +- } +- } +- +- // Remove any non-matching "zero values" from the leftover values. +- var nonZeroRemaining []ast.Expr +- for _, expr := range remaining { +- if !isZeroExpr(expr) { +- nonZeroRemaining = append(nonZeroRemaining, expr) +- } +- } +- // Append leftover return values to end of new return statement. +- fixed = append(fixed, nonZeroRemaining...) +- +- newRet := &ast.ReturnStmt{ +- Return: ret.Pos(), +- Results: fixed, +- } +- +- // Convert the new return statement AST to text. +- var newBuf bytes.Buffer +- if err := format.Node(&newBuf, pass.Fset, newRet); err != nil { +- return nil, err +- } +- +- pass.Report(analysis.Diagnostic{ +- Pos: start, +- End: end, +- Message: typeErr.Msg, +- SuggestedFixes: []analysis.SuggestedFix{{ +- Message: "Fill in return values", +- TextEdits: []analysis.TextEdit{{ +- Pos: ret.Pos(), +- End: ret.End(), +- NewText: newBuf.Bytes(), +- }}, +- }}, +- }) +- } +- return nil, nil +-} +- +-func matchingTypes(want, got types.Type) bool { +- if want == got || types.Identical(want, got) { +- return true +- } +- // Code segment to help check for untyped equality from (golang/go#32146). +- if rhs, ok := want.(*types.Basic); ok && rhs.Info()&types.IsUntyped > 0 { +- if lhs, ok := got.Underlying().(*types.Basic); ok { +- return rhs.Info()&types.IsConstType == lhs.Info()&types.IsConstType +- } +- } +- return types.AssignableTo(want, got) || types.ConvertibleTo(want, got) +-} +- +-// Error messages have changed across Go versions. These regexps capture recent +-// incarnations. +-// +-// TODO(rfindley): once error codes are exported and exposed via go/packages, +-// use error codes rather than string matching here. +-var wrongReturnNumRegexes = []*regexp.Regexp{ +- regexp.MustCompile(`wrong number of return values \(want (\d+), got (\d+)\)`), +- regexp.MustCompile(`too many return values`), +- regexp.MustCompile(`not enough return values`), +-} +- +-func fixesError(err types.Error) bool { +- msg := strings.TrimSpace(err.Msg) +- for _, rx := range wrongReturnNumRegexes { +- if rx.MatchString(msg) { +- return true +- } +- } +- return false +-} +- +-// enclosingFunc returns the cursor for the innermost Func{Decl,Lit} +-// that encloses c, if any. +-func enclosingFunc(c inspector.Cursor) (inspector.Cursor, bool) { +- return moreiters.First(c.Enclosing((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil))) +-} +- +-// isZeroExpr uses simple syntactic heuristics to report whether expr +-// is a obvious zero value, such as 0, "", nil, or false. +-// It cannot do better without type information. +-func isZeroExpr(expr ast.Expr) bool { +- switch e := expr.(type) { +- case *ast.BasicLit: +- return e.Value == "0" || e.Value == `""` +- case *ast.Ident: +- return e.Name == "nil" || e.Name == "false" +- default: +- return false +- } +-} +diff -urN a/gopls/internal/analysis/fillreturns/fillreturns_test.go b/gopls/internal/analysis/fillreturns/fillreturns_test.go +--- a/gopls/internal/analysis/fillreturns/fillreturns_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillreturns/fillreturns_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fillreturns_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/fillreturns" +-) +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.RunWithSuggestedFixes(t, testdata, fillreturns.Analyzer, "a", "typeparams") +-} +diff -urN a/gopls/internal/analysis/fillreturns/testdata/src/a/a.go b/gopls/internal/analysis/fillreturns/testdata/src/a/a.go +--- a/gopls/internal/analysis/fillreturns/testdata/src/a/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillreturns/testdata/src/a/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,139 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fillreturns +- +-import ( +- "errors" +- "go/ast" +- ast2 "go/ast" +- "io" +- "net/http" +- . "net/http" +- "net/url" +- "strconv" +-) +- +-type T struct{} +-type T1 = T +-type I interface{} +-type I1 = I +-type z func(string, http.Handler) error +- +-func x() error { +- return errors.New("foo") +-} +- +-// The error messages below changed in 1.18; "return values" covers both forms. +- +-func b() (string, int, error) { +- return "", errors.New("foo") // want "return values" +-} +- +-func c() (string, int, error) { +- return 7, errors.New("foo") // want "return values" +-} +- +-func d() (string, int, error) { +- return "", 7 // want "return values" +-} +- +-func e() (T, error, *bool) { +- return (z(http.ListenAndServe))("", nil) // want "return values" +-} +- +-func preserveLeft() (int, int, error) { +- return 1, errors.New("foo") // want "return values" +-} +- +-func matchValues() (int, error, string) { +- return errors.New("foo"), 3 // want "return values" +-} +- +-func preventDataOverwrite() (int, string) { +- return errors.New("foo") // want "return values" +-} +- +-func closure() (string, error) { +- _ = func() (int, error) { +- return // want "return values" +- } +- return // want "return values" +-} +- +-func basic() (uint8, uint16, uint32, uint64, int8, int16, int32, int64, float32, float64, complex64, complex128, byte, rune, uint, int, uintptr, string, bool, error) { +- return // want "return values" +-} +- +-func complex() (*int, []int, [2]int, map[int]int) { +- return // want "return values" +-} +- +-func structsAndInterfaces() (T, url.URL, T1, I, I1, io.Reader, Client, ast2.Stmt) { +- return // want "return values" +-} +- +-func m() (int, error) { +- if 1 == 2 { +- return // want "return values" +- } else if 1 == 3 { +- return errors.New("foo") // want "return values" +- } else { +- return 1 // want "return values" +- } +- return // want "return values" +-} +- +-func convertibleTypes() (ast2.Expr, int) { +- return &ast2.ArrayType{} // want "return values" +-} +- +-func assignableTypes() (map[string]int, int) { +- type X map[string]int +- var x X +- return x // want "return values" +-} +- +-func interfaceAndError() (I, int) { +- return errors.New("foo") // want "return values" +-} +- +-func funcOneReturn() (string, error) { +- return strconv.Itoa(1) // want "return values" +-} +- +-func funcMultipleReturn() (int, error, string) { +- return strconv.Atoi("1") +-} +- +-func localFuncMultipleReturn() (string, int, error, string) { +- return b() +-} +- +-func multipleUnused() (int, string, string, string) { +- return 3, 4, 5 // want "return values" +-} +- +-func gotTooMany() int { +- if true { +- return 0, "" // want "return values" +- } else { +- return 1, 0, nil // want "return values" +- } +- return 0, 5, false // want "return values" +-} +- +-func fillVars() (int, string, ast.Node, bool, error) { +- eint := 0 +- s := "a" +- var t bool +- if true { +- err := errors.New("fail") +- return // want "return values" +- } +- n := ast.NewIdent("ident") +- int := 3 +- var b bool +- return "" // want "return values" +-} +diff -urN a/gopls/internal/analysis/fillreturns/testdata/src/a/a.go.golden b/gopls/internal/analysis/fillreturns/testdata/src/a/a.go.golden +--- a/gopls/internal/analysis/fillreturns/testdata/src/a/a.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillreturns/testdata/src/a/a.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,139 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fillreturns +- +-import ( +- "errors" +- "go/ast" +- ast2 "go/ast" +- "io" +- "net/http" +- . "net/http" +- "net/url" +- "strconv" +-) +- +-type T struct{} +-type T1 = T +-type I interface{} +-type I1 = I +-type z func(string, http.Handler) error +- +-func x() error { +- return errors.New("foo") +-} +- +-// The error messages below changed in 1.18; "return values" covers both forms. +- +-func b() (string, int, error) { +- return "", 0, errors.New("foo") // want "return values" +-} +- +-func c() (string, int, error) { +- return "", 7, errors.New("foo") // want "return values" +-} +- +-func d() (string, int, error) { +- return "", 7, nil // want "return values" +-} +- +-func e() (T, error, *bool) { +- return T{}, (z(http.ListenAndServe))("", nil), nil // want "return values" +-} +- +-func preserveLeft() (int, int, error) { +- return 1, 0, errors.New("foo") // want "return values" +-} +- +-func matchValues() (int, error, string) { +- return 3, errors.New("foo"), "" // want "return values" +-} +- +-func preventDataOverwrite() (int, string) { +- return 0, "", errors.New("foo") // want "return values" +-} +- +-func closure() (string, error) { +- _ = func() (int, error) { +- return 0, nil // want "return values" +- } +- return "", nil // want "return values" +-} +- +-func basic() (uint8, uint16, uint32, uint64, int8, int16, int32, int64, float32, float64, complex64, complex128, byte, rune, uint, int, uintptr, string, bool, error) { +- return 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, "", false, nil // want "return values" +-} +- +-func complex() (*int, []int, [2]int, map[int]int) { +- return nil, nil, [2]int{}, nil // want "return values" +-} +- +-func structsAndInterfaces() (T, url.URL, T1, I, I1, io.Reader, Client, ast2.Stmt) { +- return T{}, url.URL{}, T1{}, nil, nil, nil, Client{}, nil // want "return values" +-} +- +-func m() (int, error) { +- if 1 == 2 { +- return 0, nil // want "return values" +- } else if 1 == 3 { +- return 0, errors.New("foo") // want "return values" +- } else { +- return 1, nil // want "return values" +- } +- return 0, nil // want "return values" +-} +- +-func convertibleTypes() (ast2.Expr, int) { +- return &ast2.ArrayType{}, 0 // want "return values" +-} +- +-func assignableTypes() (map[string]int, int) { +- type X map[string]int +- var x X +- return x, 0 // want "return values" +-} +- +-func interfaceAndError() (I, int) { +- return errors.New("foo"), 0 // want "return values" +-} +- +-func funcOneReturn() (string, error) { +- return strconv.Itoa(1), nil // want "return values" +-} +- +-func funcMultipleReturn() (int, error, string) { +- return strconv.Atoi("1") +-} +- +-func localFuncMultipleReturn() (string, int, error, string) { +- return b() +-} +- +-func multipleUnused() (int, string, string, string) { +- return 3, "", "", "", 4, 5 // want "return values" +-} +- +-func gotTooMany() int { +- if true { +- return 0 // want "return values" +- } else { +- return 1 // want "return values" +- } +- return 5 // want "return values" +-} +- +-func fillVars() (int, string, ast.Node, bool, error) { +- eint := 0 +- s := "a" +- var t bool +- if true { +- err := errors.New("fail") +- return eint, s, nil, false, err // want "return values" +- } +- n := ast.NewIdent("ident") +- int := 3 +- var b bool +- return int, "", n, b, nil // want "return values" +-} +diff -urN a/gopls/internal/analysis/fillreturns/testdata/src/typeparams/a.go b/gopls/internal/analysis/fillreturns/testdata/src/typeparams/a.go +--- a/gopls/internal/analysis/fillreturns/testdata/src/typeparams/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillreturns/testdata/src/typeparams/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,5 +0,0 @@ +-package fillreturns +- +-func hello[T any]() int { +- return +-} +diff -urN a/gopls/internal/analysis/fillreturns/testdata/src/typeparams/a.go.golden b/gopls/internal/analysis/fillreturns/testdata/src/typeparams/a.go.golden +--- a/gopls/internal/analysis/fillreturns/testdata/src/typeparams/a.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillreturns/testdata/src/typeparams/a.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,5 +0,0 @@ +-package fillreturns +- +-func hello[T any]() int { +- return +-} +diff -urN a/gopls/internal/analysis/fillstruct/fillstruct.go b/gopls/internal/analysis/fillstruct/fillstruct.go +--- a/gopls/internal/analysis/fillstruct/fillstruct.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillstruct/fillstruct.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,444 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package fillstruct defines an Analyzer that automatically +-// fills in a struct declaration with zero value elements for each field. +-// +-// The analyzer's diagnostic is merely a prompt. +-// The actual fix is created by a separate direct call from gopls to +-// the SuggestedFixes function. +-// Tests of Analyzer.Run can be found in ./testdata/src. +-// Tests of the SuggestedFixes logic live in ../../testdata/fillstruct. +-package fillstruct +- +-import ( +- "bytes" +- "fmt" +- "go/ast" +- "go/format" +- "go/printer" +- "go/token" +- "go/types" +- "strings" +- "unicode" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/fuzzy" +- "golang.org/x/tools/gopls/internal/util/cursorutil" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/analysisinternal" +- "golang.org/x/tools/internal/typeparams" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// Diagnose computes diagnostics for fillable struct literals overlapping with +-// the provided start and end position of file f. +-// +-// The diagnostic contains a lazy fix; the actual patch is computed +-// (via the ApplyFix command) by a call to [SuggestedFix]. +-// +-// If either start or end is invalid, the entire file is inspected. +-func Diagnose(f *ast.File, start, end token.Pos, pkg *types.Package, info *types.Info) []analysis.Diagnostic { +- var diags []analysis.Diagnostic +- ast.Inspect(f, func(n ast.Node) bool { +- if n == nil { +- return true // pop +- } +- if start.IsValid() && n.End() < start || end.IsValid() && n.Pos() > end { +- return false // skip non-overlapping subtree +- } +- expr, ok := n.(*ast.CompositeLit) +- if !ok { +- return true +- } +- typ := info.TypeOf(expr) +- if typ == nil { +- return true +- } +- +- // Find reference to the type declaration of the struct being initialized. +- typ = typeparams.Deref(typ) +- tStruct, ok := typeparams.CoreType(typ).(*types.Struct) +- if !ok { +- return true +- } +- // Inv: typ is the possibly-named struct type. +- +- fieldCount := tStruct.NumFields() +- +- // Skip any struct that is already populated or that has no fields. +- if fieldCount == 0 || fieldCount == len(expr.Elts) { +- return true +- } +- +- // Are any fields in need of filling? +- var fillableFields []string +- for i := range fieldCount { +- field := tStruct.Field(i) +- // Ignore fields that are not accessible in the current package. +- if field.Pkg() != nil && field.Pkg() != pkg && !field.Exported() { +- continue +- } +- fillableFields = append(fillableFields, fmt.Sprintf("%s: %s", field.Name(), field.Type().String())) +- } +- if len(fillableFields) == 0 { +- return true +- } +- +- // Derive a name for the struct type. +- var name string +- if typ != tStruct { +- // named struct type (e.g. pkg.S[T]) +- name = types.TypeString(typ, typesinternal.NameRelativeTo(pkg)) +- } else { +- // anonymous struct type +- totalFields := len(fillableFields) +- const maxLen = 20 +- // Find the index to cut off printing of fields. +- var i, fieldLen int +- for i = range fillableFields { +- if fieldLen > maxLen { +- break +- } +- fieldLen += len(fillableFields[i]) +- } +- fillableFields = fillableFields[:i] +- if i < totalFields { +- fillableFields = append(fillableFields, "...") +- } +- name = fmt.Sprintf("anonymous struct{ %s }", strings.Join(fillableFields, ", ")) +- } +- diags = append(diags, analysis.Diagnostic{ +- Message: fmt.Sprintf("%s literal has missing fields", name), +- Pos: expr.Pos(), +- End: expr.End(), +- Category: FixCategory, +- SuggestedFixes: []analysis.SuggestedFix{{ +- Message: fmt.Sprintf("Fill %s", name), +- // No TextEdits => computed later by gopls. +- }}, +- }) +- return true +- }) +- +- return diags +-} +- +-const FixCategory = "fillstruct" // recognized by gopls ApplyFix +- +-// SuggestedFix computes the suggested fix for the kinds of +-// diagnostics produced by the Analyzer above. +-func SuggestedFix(cpkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- var ( +- file = pgf.File +- fset = cpkg.FileSet() +- pkg = cpkg.Types() +- info = cpkg.TypesInfo() +- pos = start // don't use end +- ) +- cur, ok := pgf.Cursor.FindByPos(pos, pos) +- if !ok { +- return nil, nil, fmt.Errorf("no enclosing ast.Node") +- } +- expr, _ := cursorutil.FirstEnclosing[*ast.CompositeLit](cur) +- typ := info.TypeOf(expr) +- if typ == nil { +- return nil, nil, fmt.Errorf("no composite literal") +- } +- +- // Find reference to the type declaration of the struct being initialized. +- typ = typeparams.Deref(typ) +- tStruct, ok := typ.Underlying().(*types.Struct) +- if !ok { +- return nil, nil, fmt.Errorf("%s is not a (pointer to) struct type", +- types.TypeString(typ, typesinternal.NameRelativeTo(pkg))) +- } +- // Inv: typ is the possibly-named struct type. +- +- fieldCount := tStruct.NumFields() +- +- // Check which types have already been filled in. (we only want to fill in +- // the unfilled types, or else we'll blat user-supplied details) +- prefilledFields := map[string]ast.Expr{} +- var elts []ast.Expr +- for _, e := range expr.Elts { +- if kv, ok := e.(*ast.KeyValueExpr); ok { +- if key, ok := kv.Key.(*ast.Ident); ok { +- prefilledFields[key.Name] = kv.Value +- elts = append(elts, kv) +- } +- } +- } +- +- var fieldTyps []types.Type +- for i := range fieldCount { +- field := tStruct.Field(i) +- // Ignore fields that are not accessible in the current package. +- if field.Pkg() != nil && field.Pkg() != pkg && !field.Exported() { +- fieldTyps = append(fieldTyps, nil) +- continue +- } +- fieldTyps = append(fieldTyps, field.Type()) +- } +- matches := analysisinternal.MatchingIdents(fieldTyps, file, start, info, pkg) +- qual := typesinternal.FileQualifier(file, pkg) +- +- for i, fieldTyp := range fieldTyps { +- if fieldTyp == nil { +- continue // TODO(adonovan): is this reachable? +- } +- fieldName := tStruct.Field(i).Name() +- if _, ok := prefilledFields[fieldName]; ok { +- // We already stored these when looping over expr.Elt. +- // Want to preserve the original order of prefilled fields +- continue +- } +- +- kv := &ast.KeyValueExpr{ +- Key: &ast.Ident{ +- Name: fieldName, +- }, +- } +- +- names, ok := matches[fieldTyp] +- if !ok { +- return nil, nil, fmt.Errorf("invalid struct field type: %v", fieldTyp) +- } +- +- // Find the name most similar to the field name. +- // If no name matches the pattern, generate a zero value. +- // NOTE: We currently match on the name of the field key rather than the field type. +- if best := fuzzy.BestMatch(fieldName, names); best != "" { +- kv.Value = ast.NewIdent(best) +- } else if expr, isValid := populateValue(fieldTyp, qual); isValid { +- kv.Value = expr +- } else { +- return nil, nil, nil // no fix to suggest +- } +- +- elts = append(elts, kv) +- } +- +- // If all of the struct's fields are unexported, we have nothing to do. +- if len(elts) == 0 { +- return nil, nil, fmt.Errorf("no elements to fill") +- } +- +- // Find the line on which the composite literal is declared. +- split := bytes.Split(pgf.Src, []byte("\n")) +- lineNumber := safetoken.StartPosition(fset, expr.Lbrace).Line +- firstLine := split[lineNumber-1] // lines are 1-indexed +- +- // Trim the whitespace from the left of the line, and use the index +- // to get the amount of whitespace on the left. +- trimmed := bytes.TrimLeftFunc(firstLine, unicode.IsSpace) +- index := bytes.Index(firstLine, trimmed) +- whitespace := firstLine[:index] +- +- // Write a new composite literal "_{...}" composed of all prefilled and new elements, +- // preserving existing formatting and comments. +- // An alternative would be to only format the new fields, +- // but by printing the entire composite literal, we ensure +- // that the result is gofmt'ed. +- var buf bytes.Buffer +- buf.WriteString("_{\n") +- fcmap := ast.NewCommentMap(fset, file, file.Comments) +- comments := fcmap.Filter(expr).Comments() // comments inside the expr, in source order +- for _, elt := range elts { +- // Print comments before the current elt +- for len(comments) > 0 && comments[0].Pos() < elt.Pos() { +- for _, co := range comments[0].List { +- fmt.Fprintln(&buf, co.Text) +- } +- comments = comments[1:] +- } +- +- // Print the current elt with comments +- eltcomments := fcmap.Filter(elt).Comments() +- if err := format.Node(&buf, fset, &printer.CommentedNode{Node: elt, Comments: eltcomments}); err != nil { +- return nil, nil, err +- } +- buf.WriteString(",") +- +- // Prune comments up to the end of the elt +- for len(comments) > 0 && comments[0].Pos() < elt.End() { +- comments = comments[1:] +- } +- +- // Write comments associated with the current elt that appear after it +- // printer.CommentedNode only prints comments inside the elt. +- for _, cg := range eltcomments { +- for _, co := range cg.List { +- if co.Pos() >= elt.End() { +- fmt.Fprintln(&buf, co.Text) +- if len(comments) > 0 { +- comments = comments[1:] +- } +- } +- } +- } +- buf.WriteString("\n") +- } +- buf.WriteString("}") +- formatted, err := format.Source(buf.Bytes()) +- if err != nil { +- return nil, nil, err +- } +- +- sug := indent(formatted, whitespace) +- // Remove _ +- idx := bytes.IndexByte(sug, '{') // cannot fail +- sug = sug[idx:] +- +- return fset, &analysis.SuggestedFix{ +- TextEdits: []analysis.TextEdit{ +- { +- Pos: expr.Lbrace, +- End: expr.Rbrace + token.Pos(len("}")), +- NewText: sug, +- }, +- }, +- }, nil +-} +- +-// indent works line by line through str, indenting (prefixing) each line with +-// ind. +-func indent(str, ind []byte) []byte { +- split := bytes.Split(str, []byte("\n")) +- newText := bytes.NewBuffer(nil) +- for i, s := range split { +- if len(s) == 0 { +- continue +- } +- // Don't add the extra indentation to the first line. +- if i != 0 { +- newText.Write(ind) +- } +- newText.Write(s) +- if i < len(split)-1 { +- newText.WriteByte('\n') +- } +- } +- return newText.Bytes() +-} +- +-// populateValue constructs an expression to fill the value of a struct field. +-// +-// When the type of a struct field is a basic literal or interface, we return +-// default values. For other types, such as maps, slices, and channels, we create +-// empty expressions such as []T{} or make(chan T) rather than using default values. +-// +-// The reasoning here is that users will call fillstruct with the intention of +-// initializing the struct, in which case setting these fields to nil has no effect. +-// +-// If the input contains an invalid type, populateValue may panic or return +-// expression that may not compile. +-func populateValue(typ types.Type, qual types.Qualifier) (_ ast.Expr, isValid bool) { +- switch t := typ.(type) { +- case *types.TypeParam, *types.Interface, *types.Struct, *types.Basic: +- return typesinternal.ZeroExpr(t, qual) +- +- case *types.Alias, *types.Named: +- switch t.Underlying().(type) { +- // Avoid typesinternal.ZeroExpr here as we don't want to return nil. +- case *types.Map, *types.Slice: +- return &ast.CompositeLit{ +- Type: typesinternal.TypeExpr(t, qual), +- }, true +- default: +- return typesinternal.ZeroExpr(t, qual) +- } +- +- // Avoid typesinternal.ZeroExpr here as we don't want to return nil. +- case *types.Map, *types.Slice: +- return &ast.CompositeLit{ +- Type: typesinternal.TypeExpr(t, qual), +- }, true +- +- case *types.Array: +- return &ast.CompositeLit{ +- Type: &ast.ArrayType{ +- Elt: typesinternal.TypeExpr(t.Elem(), qual), +- Len: &ast.BasicLit{ +- Kind: token.INT, Value: fmt.Sprintf("%v", t.Len()), +- }, +- }, +- }, true +- +- case *types.Chan: +- dir := ast.ChanDir(t.Dir()) +- if t.Dir() == types.SendRecv { +- dir = ast.SEND | ast.RECV +- } +- return &ast.CallExpr{ +- Fun: ast.NewIdent("make"), +- Args: []ast.Expr{ +- &ast.ChanType{ +- Dir: dir, +- Value: typesinternal.TypeExpr(t.Elem(), qual), +- }, +- }, +- }, true +- +- case *types.Signature: +- return &ast.FuncLit{ +- Type: typesinternal.TypeExpr(t, qual).(*ast.FuncType), +- // The body of the function literal contains a panic statement to +- // avoid type errors. +- Body: &ast.BlockStmt{ +- List: []ast.Stmt{ +- &ast.ExprStmt{ +- X: &ast.CallExpr{ +- Fun: ast.NewIdent("panic"), +- Args: []ast.Expr{ +- &ast.BasicLit{ +- Kind: token.STRING, +- Value: `"TODO"`, +- }, +- }, +- }, +- }, +- }, +- }, +- }, true +- +- case *types.Pointer: +- switch tt := types.Unalias(t.Elem()).(type) { +- case *types.Basic: +- return &ast.CallExpr{ +- Fun: &ast.Ident{ +- Name: "new", +- }, +- Args: []ast.Expr{ +- &ast.Ident{ +- Name: t.Elem().String(), +- }, +- }, +- }, true +- // Pointer to type parameter should return new(T) instead of &*new(T). +- case *types.TypeParam: +- return &ast.CallExpr{ +- Fun: &ast.Ident{ +- Name: "new", +- }, +- Args: []ast.Expr{ +- &ast.Ident{ +- Name: tt.Obj().Name(), +- }, +- }, +- }, true +- default: +- // TODO(hxjiang): & prefix only works if populateValue returns a +- // composite literal T{} or the expression new(T). +- expr, isValid := populateValue(t.Elem(), qual) +- return &ast.UnaryExpr{ +- Op: token.AND, +- X: expr, +- }, isValid +- } +- } +- return nil, false +-} +diff -urN a/gopls/internal/analysis/fillstruct/fillstruct_test.go b/gopls/internal/analysis/fillstruct/fillstruct_test.go +--- a/gopls/internal/analysis/fillstruct/fillstruct_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillstruct/fillstruct_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fillstruct_test +- +-import ( +- "go/token" +- "testing" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/fillstruct" +-) +- +-// analyzer allows us to test the fillstruct code action using the analysistest +-// harness. (fillstruct used to be a gopls analyzer.) +-var analyzer = &analysis.Analyzer{ +- Name: "fillstruct", +- Doc: "test only", +- Run: func(pass *analysis.Pass) (any, error) { +- for _, f := range pass.Files { +- for _, diag := range fillstruct.Diagnose(f, token.NoPos, token.NoPos, pass.Pkg, pass.TypesInfo) { +- pass.Report(diag) +- } +- } +- return nil, nil +- }, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/fillstruct", +- RunDespiteErrors: true, +-} +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.Run(t, testdata, analyzer, "a", "typeparams") +-} +diff -urN a/gopls/internal/analysis/fillstruct/testdata/src/a/a.go b/gopls/internal/analysis/fillstruct/testdata/src/a/a.go +--- a/gopls/internal/analysis/fillstruct/testdata/src/a/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillstruct/testdata/src/a/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,112 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fillstruct +- +-import ( +- data "b" +- "go/ast" +- "go/token" +- "unsafe" +-) +- +-type emptyStruct struct{} +- +-var _ = emptyStruct{} +- +-type basicStruct struct { +- foo int +-} +- +-var _ = basicStruct{} // want `basicStruct literal has missing fields` +- +-type twoArgStruct struct { +- foo int +- bar string +-} +- +-var _ = twoArgStruct{} // want `twoArgStruct literal has missing fields` +- +-var _ = twoArgStruct{ // want `twoArgStruct literal has missing fields` +- bar: "bar", +-} +- +-type nestedStruct struct { +- bar string +- basic basicStruct +-} +- +-var _ = nestedStruct{} // want `nestedStruct literal has missing fields` +- +-var _ = data.B{} // want `fillstruct.B literal has missing fields` +- +-type typedStruct struct { +- m map[string]int +- s []int +- c chan int +- c1 <-chan int +- a [2]string +-} +- +-var _ = typedStruct{} // want `typedStruct literal has missing fields` +- +-type funStruct struct { +- fn func(i int) int +-} +- +-var _ = funStruct{} // want `funStruct literal has missing fields` +- +-type funStructComplex struct { +- fn func(i int, s string) (string, int) +-} +- +-var _ = funStructComplex{} // want `funStructComplex literal has missing fields` +- +-type funStructEmpty struct { +- fn func() +-} +- +-var _ = funStructEmpty{} // want `funStructEmpty literal has missing fields` +- +-type Foo struct { +- A int +-} +- +-type Bar struct { +- X *Foo +- Y *Foo +-} +- +-var _ = Bar{} // want `Bar literal has missing fields` +- +-type importedStruct struct { +- m map[*ast.CompositeLit]ast.Field +- s []ast.BadExpr +- a [3]token.Token +- c chan ast.EmptyStmt +- fn func(ast_decl ast.DeclStmt) ast.Ellipsis +- st ast.CompositeLit +-} +- +-var _ = importedStruct{} // want `importedStruct literal has missing fields` +- +-type pointerBuiltinStruct struct { +- b *bool +- s *string +- i *int +-} +- +-var _ = pointerBuiltinStruct{} // want `pointerBuiltinStruct literal has missing fields` +- +-var _ = []ast.BasicLit{ +- {}, // want `ast.BasicLit literal has missing fields` +-} +- +-var _ = []ast.BasicLit{{}} // want "ast.BasicLit literal has missing fields" +- +-type unsafeStruct struct { +- foo unsafe.Pointer +-} +- +-var _ = unsafeStruct{} // want `unsafeStruct literal has missing fields` +diff -urN a/gopls/internal/analysis/fillstruct/testdata/src/b/b.go b/gopls/internal/analysis/fillstruct/testdata/src/b/b.go +--- a/gopls/internal/analysis/fillstruct/testdata/src/b/b.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillstruct/testdata/src/b/b.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,6 +0,0 @@ +-package fillstruct +- +-type B struct { +- ExportedInt int +- unexportedInt int +-} +diff -urN a/gopls/internal/analysis/fillstruct/testdata/src/typeparams/typeparams.go b/gopls/internal/analysis/fillstruct/testdata/src/typeparams/typeparams.go +--- a/gopls/internal/analysis/fillstruct/testdata/src/typeparams/typeparams.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillstruct/testdata/src/typeparams/typeparams.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,54 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fillstruct +- +-type emptyStruct[A any] struct{} +- +-var _ = emptyStruct[int]{} +- +-type basicStruct[T any] struct { +- foo T +-} +- +-var _ = basicStruct[int]{} // want `basicStruct\[int\] literal has missing fields` +- +-type twoArgStruct[F, B any] struct { +- foo F +- bar B +-} +- +-var _ = twoArgStruct[string, int]{} // want `twoArgStruct\[string, int\] literal has missing fields` +- +-var _ = twoArgStruct[int, string]{ // want `twoArgStruct\[int, string\] literal has missing fields` +- bar: "bar", +-} +- +-type nestedStruct struct { +- bar string +- basic basicStruct[int] +-} +- +-var _ = nestedStruct{} // want "nestedStruct literal has missing fields" +- +-func _[T any]() { +- type S struct{ t T } +- x := S{} // want "S" +- _ = x +-} +- +-func Test() { +- var tests = []struct { +- a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p string +- }{ +- {}, // want "anonymous struct{ a: string, b: string, c: string, ... } literal has missing fields" +- } +- for _, test := range tests { +- _ = test +- } +-} +- +-func _[T twoArgStruct[int, int]]() { +- _ = T{} // want "T literal has missing fields" +-} +diff -urN a/gopls/internal/analysis/fillswitch/doc.go b/gopls/internal/analysis/fillswitch/doc.go +--- a/gopls/internal/analysis/fillswitch/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillswitch/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,66 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package fillswitch identifies switches with missing cases. +-// +-// It reports a diagnostic for each type switch or 'enum' switch that +-// has missing cases, and suggests a fix to fill them in. +-// +-// The possible cases are: for a type switch, each accessible named +-// type T or pointer *T that is assignable to the interface type; and +-// for an 'enum' switch, each accessible named constant of the same +-// type as the switch value. +-// +-// For an 'enum' switch, it will suggest cases for all possible values of the +-// type. +-// +-// type Suit int8 +-// const ( +-// Spades Suit = iota +-// Hearts +-// Diamonds +-// Clubs +-// ) +-// +-// var s Suit +-// switch s { +-// case Spades: +-// } +-// +-// It will report a diagnostic with a suggested fix to fill in the remaining +-// cases: +-// +-// var s Suit +-// switch s { +-// case Spades: +-// case Hearts: +-// case Diamonds: +-// case Clubs: +-// default: +-// panic(fmt.Sprintf("unexpected Suit: %v", s)) +-// } +-// +-// For a type switch, it will suggest cases for all types that implement the +-// interface. +-// +-// var stmt ast.Stmt +-// switch stmt.(type) { +-// case *ast.IfStmt: +-// } +-// +-// It will report a diagnostic with a suggested fix to fill in the remaining +-// cases: +-// +-// var stmt ast.Stmt +-// switch stmt.(type) { +-// case *ast.IfStmt: +-// case *ast.ForStmt: +-// case *ast.RangeStmt: +-// case *ast.AssignStmt: +-// case *ast.GoStmt: +-// ... +-// default: +-// panic(fmt.Sprintf("unexpected ast.Stmt: %T", stmt)) +-// } +-package fillswitch +diff -urN a/gopls/internal/analysis/fillswitch/fillswitch.go b/gopls/internal/analysis/fillswitch/fillswitch.go +--- a/gopls/internal/analysis/fillswitch/fillswitch.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillswitch/fillswitch.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,300 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fillswitch +- +-import ( +- "bytes" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// Diagnose computes diagnostics for switch statements with missing cases +-// overlapping with the provided start and end position of file f. +-// +-// If either start or end is invalid, the entire file is inspected. +-func Diagnose(f *ast.File, start, end token.Pos, pkg *types.Package, info *types.Info) []analysis.Diagnostic { +- var diags []analysis.Diagnostic +- ast.Inspect(f, func(n ast.Node) bool { +- if n == nil { +- return true // pop +- } +- if start.IsValid() && n.End() < start || +- end.IsValid() && n.Pos() > end { +- return false // skip non-overlapping subtree +- } +- var fix *analysis.SuggestedFix +- switch n := n.(type) { +- case *ast.SwitchStmt: +- fix = suggestedFixSwitch(n, pkg, info) +- case *ast.TypeSwitchStmt: +- fix = suggestedFixTypeSwitch(n, pkg, info) +- } +- if fix != nil { +- diags = append(diags, analysis.Diagnostic{ +- Message: fix.Message, +- Pos: n.Pos(), +- End: n.Pos() + token.Pos(len("switch")), +- SuggestedFixes: []analysis.SuggestedFix{*fix}, +- }) +- } +- return true +- }) +- +- return diags +-} +- +-func suggestedFixTypeSwitch(stmt *ast.TypeSwitchStmt, pkg *types.Package, info *types.Info) *analysis.SuggestedFix { +- if hasDefaultCase(stmt.Body) { +- return nil +- } +- +- namedType := namedTypeFromTypeSwitch(stmt, info) +- if namedType == nil { +- return nil +- } +- +- existingCases := caseTypes(stmt.Body, info) +- // Gather accessible package-level concrete types +- // that implement the switch interface type. +- scope := namedType.Obj().Pkg().Scope() +- var buf bytes.Buffer +- for _, name := range scope.Names() { +- obj := scope.Lookup(name) +- if tname, ok := obj.(*types.TypeName); !ok || tname.IsAlias() { +- continue // not a defined type +- } +- +- if types.IsInterface(obj.Type()) { +- continue +- } +- +- samePkg := obj.Pkg() == pkg +- if !samePkg && !obj.Exported() { +- continue // inaccessible +- } +- +- var key caseType +- if types.AssignableTo(obj.Type(), namedType.Obj().Type()) { +- key.named = obj.Type().(*types.Named) +- } else if ptr := types.NewPointer(obj.Type()); types.AssignableTo(ptr, namedType.Obj().Type()) { +- key.named = obj.Type().(*types.Named) +- key.ptr = true +- } +- +- if key.named != nil { +- if existingCases[key] { +- continue +- } +- +- if buf.Len() > 0 { +- buf.WriteString("\t") +- } +- +- buf.WriteString("case ") +- if key.ptr { +- buf.WriteByte('*') +- } +- +- if p := key.named.Obj().Pkg(); p != pkg { +- // TODO: use the correct package name when the import is renamed +- buf.WriteString(p.Name()) +- buf.WriteByte('.') +- } +- buf.WriteString(key.named.Obj().Name()) +- buf.WriteString(":\n") +- } +- } +- +- if buf.Len() == 0 { +- return nil +- } +- +- switch assign := stmt.Assign.(type) { +- case *ast.AssignStmt: +- addDefaultCase(&buf, namedType, assign.Lhs[0]) +- case *ast.ExprStmt: +- if assert, ok := assign.X.(*ast.TypeAssertExpr); ok { +- addDefaultCase(&buf, namedType, assert.X) +- } +- } +- +- return &analysis.SuggestedFix{ +- Message: "Add cases for " + types.TypeString(namedType, typesinternal.NameRelativeTo(pkg)), +- TextEdits: []analysis.TextEdit{{ +- Pos: stmt.End() - token.Pos(len("}")), +- End: stmt.End() - token.Pos(len("}")), +- NewText: buf.Bytes(), +- }}, +- } +-} +- +-func suggestedFixSwitch(stmt *ast.SwitchStmt, pkg *types.Package, info *types.Info) *analysis.SuggestedFix { +- if hasDefaultCase(stmt.Body) { +- return nil +- } +- +- namedType, ok := info.TypeOf(stmt.Tag).(*types.Named) +- if !ok { +- return nil +- } +- +- existingCases := caseConsts(stmt.Body, info) +- // Gather accessible named constants of the same type as the switch value. +- scope := namedType.Obj().Pkg().Scope() +- var buf bytes.Buffer +- for _, name := range scope.Names() { +- obj := scope.Lookup(name) +- if c, ok := obj.(*types.Const); ok && +- (obj.Pkg() == pkg || obj.Exported()) && // accessible +- types.Identical(obj.Type(), namedType.Obj().Type()) && +- !existingCases[c] { +- +- if buf.Len() > 0 { +- buf.WriteString("\t") +- } +- +- buf.WriteString("case ") +- if c.Pkg() != pkg { +- buf.WriteString(c.Pkg().Name()) +- buf.WriteByte('.') +- } +- buf.WriteString(c.Name()) +- buf.WriteString(":\n") +- } +- } +- +- if buf.Len() == 0 { +- return nil +- } +- +- addDefaultCase(&buf, namedType, stmt.Tag) +- +- return &analysis.SuggestedFix{ +- Message: "Add cases for " + types.TypeString(namedType, typesinternal.NameRelativeTo(pkg)), +- TextEdits: []analysis.TextEdit{{ +- Pos: stmt.End() - token.Pos(len("}")), +- End: stmt.End() - token.Pos(len("}")), +- NewText: buf.Bytes(), +- }}, +- } +-} +- +-func addDefaultCase(buf *bytes.Buffer, named *types.Named, expr ast.Expr) { +- var dottedBuf bytes.Buffer +- // writeDotted emits a dotted path a.b.c. +- var writeDotted func(e ast.Expr) bool +- writeDotted = func(e ast.Expr) bool { +- switch e := e.(type) { +- case *ast.SelectorExpr: +- if !writeDotted(e.X) { +- return false +- } +- dottedBuf.WriteByte('.') +- dottedBuf.WriteString(e.Sel.Name) +- return true +- case *ast.Ident: +- dottedBuf.WriteString(e.Name) +- return true +- } +- return false +- } +- +- buf.WriteString("\tdefault:\n") +- typeName := fmt.Sprintf("%s.%s", named.Obj().Pkg().Name(), named.Obj().Name()) +- if writeDotted(expr) { +- // Switch tag expression is a dotted path. +- // It is safe to re-evaluate it in the default case. +- format := fmt.Sprintf("unexpected %s: %%#v", typeName) +- fmt.Fprintf(buf, "\t\tpanic(fmt.Sprintf(%q, %s))\n\t", format, dottedBuf.String()) +- } else { +- // Emit simpler message, without re-evaluating tag expression. +- fmt.Fprintf(buf, "\t\tpanic(%q)\n\t", "unexpected "+typeName) +- } +-} +- +-func namedTypeFromTypeSwitch(stmt *ast.TypeSwitchStmt, info *types.Info) *types.Named { +- switch assign := stmt.Assign.(type) { +- case *ast.ExprStmt: +- if typ, ok := assign.X.(*ast.TypeAssertExpr); ok { +- if named, ok := info.TypeOf(typ.X).(*types.Named); ok { +- return named +- } +- } +- +- case *ast.AssignStmt: +- if typ, ok := assign.Rhs[0].(*ast.TypeAssertExpr); ok { +- if named, ok := info.TypeOf(typ.X).(*types.Named); ok { +- return named +- } +- } +- } +- +- return nil +-} +- +-func hasDefaultCase(body *ast.BlockStmt) bool { +- for _, clause := range body.List { +- if len(clause.(*ast.CaseClause).List) == 0 { +- return true +- } +- } +- +- return false +-} +- +-func caseConsts(body *ast.BlockStmt, info *types.Info) map[*types.Const]bool { +- out := map[*types.Const]bool{} +- for _, stmt := range body.List { +- for _, e := range stmt.(*ast.CaseClause).List { +- if info.Types[e].Value == nil { +- continue // not a constant +- } +- +- if sel, ok := e.(*ast.SelectorExpr); ok { +- e = sel.Sel // replace pkg.C with C +- } +- +- if e, ok := e.(*ast.Ident); ok { +- if c, ok := info.Uses[e].(*types.Const); ok { +- out[c] = true +- } +- } +- } +- } +- +- return out +-} +- +-type caseType struct { +- named *types.Named +- ptr bool +-} +- +-func caseTypes(body *ast.BlockStmt, info *types.Info) map[caseType]bool { +- out := map[caseType]bool{} +- for _, stmt := range body.List { +- for _, e := range stmt.(*ast.CaseClause).List { +- if tv, ok := info.Types[e]; ok && tv.IsType() { +- t := tv.Type +- ptr := false +- if p, ok := t.(*types.Pointer); ok { +- t = p.Elem() +- ptr = true +- } +- +- if named, ok := t.(*types.Named); ok { +- out[caseType{named, ptr}] = true +- } +- } +- } +- } +- +- return out +-} +diff -urN a/gopls/internal/analysis/fillswitch/fillswitch_test.go b/gopls/internal/analysis/fillswitch/fillswitch_test.go +--- a/gopls/internal/analysis/fillswitch/fillswitch_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillswitch/fillswitch_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fillswitch_test +- +-import ( +- "go/token" +- "testing" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/fillswitch" +-) +- +-// analyzer allows us to test the fillswitch code action using the analysistest +-// harness. +-var analyzer = &analysis.Analyzer{ +- Name: "fillswitch", +- Doc: "test only", +- Run: func(pass *analysis.Pass) (any, error) { +- for _, f := range pass.Files { +- for _, diag := range fillswitch.Diagnose(f, token.NoPos, token.NoPos, pass.Pkg, pass.TypesInfo) { +- pass.Report(diag) +- } +- } +- return nil, nil +- }, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/fillswitch", +- RunDespiteErrors: true, +-} +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.Run(t, testdata, analyzer, "a") +-} +diff -urN a/gopls/internal/analysis/fillswitch/testdata/src/a/a.go b/gopls/internal/analysis/fillswitch/testdata/src/a/a.go +--- a/gopls/internal/analysis/fillswitch/testdata/src/a/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillswitch/testdata/src/a/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,76 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fillswitch +- +-import altb "b" +- +-type typeA int +- +-const ( +- typeAOne typeA = iota +- typeATwo +- typeAThree +-) +- +-func doSwitch() { +- var a typeA +- switch a { // want `Add cases for typeA` +- } +- +- switch a { // want `Add cases for typeA` +- case typeAOne: +- } +- +- switch a { +- case typeAOne: +- default: +- } +- +- switch a { +- case typeAOne: +- case typeATwo: +- case typeAThree: +- } +- +- var b altb.TypeB +- switch b { // want `Add cases for b.TypeB` +- case altb.TypeBOne: +- } +-} +- +-type notification interface { +- isNotification() +-} +- +-type notificationOne struct{} +- +-func (notificationOne) isNotification() {} +- +-type notificationTwo struct{} +- +-func (notificationTwo) isNotification() {} +- +-func doTypeSwitch() { +- var not notification +- switch not.(type) { // want `Add cases for notification` +- } +- +- switch not.(type) { // want `Add cases for notification` +- case notificationOne: +- } +- +- switch not.(type) { +- case notificationOne: +- case notificationTwo: +- } +- +- switch not.(type) { +- default: +- } +- +- var t data.ExportedInterface +- switch t { +- } +-} +diff -urN a/gopls/internal/analysis/fillswitch/testdata/src/b/b.go b/gopls/internal/analysis/fillswitch/testdata/src/b/b.go +--- a/gopls/internal/analysis/fillswitch/testdata/src/b/b.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/fillswitch/testdata/src/b/b.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,21 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package b +- +-type TypeB int +- +-const ( +- TypeBOne TypeB = iota +- TypeBTwo +- TypeBThree +-) +- +-type ExportedInterface interface { +- isExportedInterface() +-} +- +-type notExportedType struct{} +- +-func (notExportedType) isExportedInterface() {} +diff -urN a/gopls/internal/analysis/infertypeargs/infertypeargs.go b/gopls/internal/analysis/infertypeargs/infertypeargs.go +--- a/gopls/internal/analysis/infertypeargs/infertypeargs.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/infertypeargs/infertypeargs.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,148 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package infertypeargs +- +-import ( +- "go/ast" +- "go/token" +- "go/types" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/internal/typeparams" +-) +- +-const Doc = `check for unnecessary type arguments in call expressions +- +-Explicit type arguments may be omitted from call expressions if they can be +-inferred from function arguments, or from other type arguments: +- +- func f[T any](T) {} +- +- func _() { +- f[string]("foo") // string could be inferred +- } +-` +- +-var Analyzer = &analysis.Analyzer{ +- Name: "infertypeargs", +- Doc: Doc, +- Requires: []*analysis.Analyzer{inspect.Analyzer}, +- Run: run, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/infertypeargs", +-} +- +-func run(pass *analysis.Pass) (any, error) { +- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- for _, diag := range diagnose(pass.Fset, inspect, token.NoPos, token.NoPos, pass.Pkg, pass.TypesInfo) { +- pass.Report(diag) +- } +- return nil, nil +-} +- +-// Diagnose reports diagnostics describing simplifications to type +-// arguments overlapping with the provided start and end position. +-// +-// If start or end is token.NoPos, the corresponding bound is not checked +-// (i.e. if both start and end are NoPos, all call expressions are considered). +-func diagnose(fset *token.FileSet, inspect *inspector.Inspector, start, end token.Pos, pkg *types.Package, info *types.Info) []analysis.Diagnostic { +- var diags []analysis.Diagnostic +- +- nodeFilter := []ast.Node{(*ast.CallExpr)(nil)} +- inspect.Preorder(nodeFilter, func(node ast.Node) { +- call := node.(*ast.CallExpr) +- x, lbrack, indices, rbrack := typeparams.UnpackIndexExpr(call.Fun) +- ident := calledIdent(x) +- if ident == nil || len(indices) == 0 { +- return // no explicit args, nothing to do +- } +- +- if (start.IsValid() && call.End() < start) || (end.IsValid() && call.Pos() > end) { +- return // non-overlapping +- } +- +- // Confirm that instantiation actually occurred at this ident. +- idata, ok := info.Instances[ident] +- if !ok { +- return // something went wrong, but fail open +- } +- instance := idata.Type +- +- // Start removing argument expressions from the right, and check if we can +- // still infer the call expression. +- required := len(indices) // number of type expressions that are required +- for i := len(indices) - 1; i >= 0; i-- { +- var fun ast.Expr +- if i == 0 { +- // No longer an index expression: just use the parameterized operand. +- fun = x +- } else { +- fun = typeparams.PackIndexExpr(x, lbrack, indices[:i], indices[i-1].End()) +- } +- newCall := &ast.CallExpr{ +- Fun: fun, +- Lparen: call.Lparen, +- Args: call.Args, +- Ellipsis: call.Ellipsis, +- Rparen: call.Rparen, +- } +- info := &types.Info{ +- Instances: make(map[*ast.Ident]types.Instance), +- FileVersions: make(map[*ast.File]string), +- } +- if err := types.CheckExpr(fset, pkg, call.Pos(), newCall, info); err != nil { +- // Most likely inference failed. +- break +- } +- newIData := info.Instances[ident] +- newInstance := newIData.Type +- if !types.Identical(instance, newInstance) { +- // The inferred result type does not match the original result type, so +- // this simplification is not valid. +- break +- } +- required = i +- } +- if required < len(indices) { +- var s, e token.Pos +- var edit analysis.TextEdit +- if required == 0 { +- s, e = lbrack, rbrack+1 // erase the entire index +- edit = analysis.TextEdit{Pos: s, End: e} +- } else { +- s = indices[required].Pos() +- e = rbrack +- // erase from end of last arg to include last comma & white-spaces +- edit = analysis.TextEdit{Pos: indices[required-1].End(), End: e} +- } +- // Recheck that our (narrower) fixes overlap with the requested range. +- if (start.IsValid() && e < start) || (end.IsValid() && s > end) { +- return // non-overlapping +- } +- diags = append(diags, analysis.Diagnostic{ +- Pos: s, +- End: e, +- Message: "unnecessary type arguments", +- SuggestedFixes: []analysis.SuggestedFix{{ +- Message: "Simplify type arguments", +- TextEdits: []analysis.TextEdit{edit}, +- }}, +- }) +- } +- }) +- +- return diags +-} +- +-func calledIdent(x ast.Expr) *ast.Ident { +- switch x := x.(type) { +- case *ast.Ident: +- return x +- case *ast.SelectorExpr: +- return x.Sel +- } +- return nil +-} +diff -urN a/gopls/internal/analysis/infertypeargs/infertypeargs_test.go b/gopls/internal/analysis/infertypeargs/infertypeargs_test.go +--- a/gopls/internal/analysis/infertypeargs/infertypeargs_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/infertypeargs/infertypeargs_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package infertypeargs_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/infertypeargs" +-) +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.RunWithSuggestedFixes(t, testdata, infertypeargs.Analyzer, "a") +-} +diff -urN a/gopls/internal/analysis/infertypeargs/testdata/src/a/basic.go b/gopls/internal/analysis/infertypeargs/testdata/src/a/basic.go +--- a/gopls/internal/analysis/infertypeargs/testdata/src/a/basic.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/infertypeargs/testdata/src/a/basic.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// This file contains tests for the infertyepargs checker. +- +-package a +- +-func f[T any](T) {} +- +-func g[T any]() T { var x T; return x } +- +-func h[P interface{ ~*T }, T any]() {} +- +-func _() { +- f[string]("hello") // want "unnecessary type arguments" +- f[int](2) // want "unnecessary type arguments" +- _ = g[int]() +- h[*int, int]() // want "unnecessary type arguments" +-} +diff -urN a/gopls/internal/analysis/infertypeargs/testdata/src/a/basic.go.golden b/gopls/internal/analysis/infertypeargs/testdata/src/a/basic.go.golden +--- a/gopls/internal/analysis/infertypeargs/testdata/src/a/basic.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/infertypeargs/testdata/src/a/basic.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// This file contains tests for the infertyepargs checker. +- +-package a +- +-func f[T any](T) {} +- +-func g[T any]() T { var x T; return x } +- +-func h[P interface{ ~*T }, T any]() {} +- +-func _() { +- f("hello") // want "unnecessary type arguments" +- f(2) // want "unnecessary type arguments" +- _ = g[int]() +- h[*int]() // want "unnecessary type arguments" +-} +diff -urN a/gopls/internal/analysis/infertypeargs/testdata/src/a/imported/imported.go b/gopls/internal/analysis/infertypeargs/testdata/src/a/imported/imported.go +--- a/gopls/internal/analysis/infertypeargs/testdata/src/a/imported/imported.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/infertypeargs/testdata/src/a/imported/imported.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,7 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package imported +- +-func F[T any](T) {} +diff -urN a/gopls/internal/analysis/infertypeargs/testdata/src/a/imported.go b/gopls/internal/analysis/infertypeargs/testdata/src/a/imported.go +--- a/gopls/internal/analysis/infertypeargs/testdata/src/a/imported.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/infertypeargs/testdata/src/a/imported.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,12 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package a +- +-import "a/imported" +- +-func _() { +- var x int +- imported.F[int](x) // want "unnecessary type arguments" +-} +diff -urN a/gopls/internal/analysis/infertypeargs/testdata/src/a/imported.go.golden b/gopls/internal/analysis/infertypeargs/testdata/src/a/imported.go.golden +--- a/gopls/internal/analysis/infertypeargs/testdata/src/a/imported.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/infertypeargs/testdata/src/a/imported.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,12 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package a +- +-import "a/imported" +- +-func _() { +- var x int +- imported.F(x) // want "unnecessary type arguments" +-} +diff -urN a/gopls/internal/analysis/infertypeargs/testdata/src/a/notypechange.go b/gopls/internal/analysis/infertypeargs/testdata/src/a/notypechange.go +--- a/gopls/internal/analysis/infertypeargs/testdata/src/a/notypechange.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/infertypeargs/testdata/src/a/notypechange.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,26 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// We should not suggest removing type arguments if doing so would change the +-// resulting type. +- +-package a +- +-func id[T any](t T) T { return t } +- +-var _ = id[int](1) // want "unnecessary type arguments" +-var _ = id[string]("foo") // want "unnecessary type arguments" +-var _ = id[int64](2) +- +-func pair[T any](t T) (T, T) { return t, t } +- +-var _, _ = pair[int](3) // want "unnecessary type arguments" +-var _, _ = pair[int64](3) +- +-func noreturn[T any](t T) {} +- +-func _() { +- noreturn[int64](4) +- noreturn[int](4) // want "unnecessary type arguments" +-} +diff -urN a/gopls/internal/analysis/infertypeargs/testdata/src/a/notypechange.go.golden b/gopls/internal/analysis/infertypeargs/testdata/src/a/notypechange.go.golden +--- a/gopls/internal/analysis/infertypeargs/testdata/src/a/notypechange.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/infertypeargs/testdata/src/a/notypechange.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,26 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// We should not suggest removing type arguments if doing so would change the +-// resulting type. +- +-package a +- +-func id[T any](t T) T { return t } +- +-var _ = id(1) // want "unnecessary type arguments" +-var _ = id("foo") // want "unnecessary type arguments" +-var _ = id[int64](2) +- +-func pair[T any](t T) (T, T) { return t, t } +- +-var _, _ = pair(3) // want "unnecessary type arguments" +-var _, _ = pair[int64](3) +- +-func noreturn[T any](t T) {} +- +-func _() { +- noreturn[int64](4) +- noreturn(4) // want "unnecessary type arguments" +-} +diff -urN a/gopls/internal/analysis/maprange/cmd/maprange/main.go b/gopls/internal/analysis/maprange/cmd/maprange/main.go +--- a/gopls/internal/analysis/maprange/cmd/maprange/main.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/maprange/cmd/maprange/main.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,14 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The maprange command applies the golang.org/x/tools/gopls/internal/analysis/maprange +-// analysis to the specified packages of Go source code. +-package main +- +-import ( +- "golang.org/x/tools/go/analysis/singlechecker" +- "golang.org/x/tools/gopls/internal/analysis/maprange" +-) +- +-func main() { singlechecker.Main(maprange.Analyzer) } +diff -urN a/gopls/internal/analysis/maprange/doc.go b/gopls/internal/analysis/maprange/doc.go +--- a/gopls/internal/analysis/maprange/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/maprange/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,37 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package maprange defines an Analyzer that checks for redundant use +-// of the functions maps.Keys and maps.Values in "for" statements with +-// "range" clauses. +-// +-// # Analyzer maprange +-// +-// maprange: checks for unnecessary calls to maps.Keys and maps.Values in range statements +-// +-// Consider a loop written like this: +-// +-// for val := range maps.Values(m) { +-// fmt.Println(val) +-// } +-// +-// This should instead be written without the call to maps.Values: +-// +-// for _, val := range m { +-// fmt.Println(val) +-// } +-// +-// golang.org/x/exp/maps returns slices for Keys/Values instead of iterators, +-// but unnecessary calls should similarly be removed: +-// +-// for _, key := range maps.Keys(m) { +-// fmt.Println(key) +-// } +-// +-// should be rewritten as: +-// +-// for key := range m { +-// fmt.Println(key) +-// } +-package maprange +diff -urN a/gopls/internal/analysis/maprange/main.go b/gopls/internal/analysis/maprange/main.go +--- a/gopls/internal/analysis/maprange/main.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/maprange/main.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build ignore +- +-// The unusedfunc command runs the maprange analyzer. +-package main +- +-import ( +- "golang.org/x/tools/go/analysis/singlechecker" +- "golang.org/x/tools/gopls/internal/analysis/maprange" +-) +- +-func main() { singlechecker.Main(maprange.Analyzer) } +diff -urN a/gopls/internal/analysis/maprange/maprange.go b/gopls/internal/analysis/maprange/maprange.go +--- a/gopls/internal/analysis/maprange/maprange.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/maprange/maprange.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,158 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package maprange +- +-import ( +- _ "embed" +- "fmt" +- "go/ast" +- "go/types" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/ast/edge" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/util/cursorutil" +- "golang.org/x/tools/internal/analysisinternal" +- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" +- "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/typesinternal/typeindex" +- "golang.org/x/tools/internal/versions" +-) +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "maprange", +- Doc: analysisinternal.MustExtractDoc(doc, "maprange"), +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/maprange", +- Requires: []*analysis.Analyzer{typeindexanalyzer.Analyzer}, +- Run: run, +-} +- +-// This is a variable because the package name is different in Google's code base. +-var xmaps = "golang.org/x/exp/maps" +- +-func run(pass *analysis.Pass) (any, error) { +- switch pass.Pkg.Path() { +- case "maps", xmaps: +- // These packages know how to use their own APIs. +- return nil, nil +- } +- var ( +- index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) +- mapsKeys = index.Object("maps", "Keys") +- mapsValues = index.Object("maps", "Values") +- xmapsKeys = index.Object(xmaps, "Keys") +- xmapsValues = index.Object(xmaps, "Values") +- ) +- for _, callee := range []types.Object{mapsKeys, mapsValues, xmapsKeys, xmapsValues} { +- for curCall := range index.Calls(callee) { +- if astutil.IsChildOf(curCall, edge.RangeStmt_X) { +- analyzeRangeStmt(pass, callee, curCall) +- } +- } +- } +- return nil, nil +-} +- +-// analyzeRangeStmt analyzes range statements iterating over calls to maps.Keys +-// or maps.Values (from the standard library "maps" or "golang.org/x/exp/maps"). +-// +-// It reports a diagnostic with a suggested fix to simplify the loop by removing +-// the unnecessary function call and adjusting range variables, if possible. +-// For certain patterns involving x/exp/maps.Keys before Go 1.22, it reports +-// a diagnostic about potential incorrect usage without a suggested fix. +-// No diagnostic is reported if the range statement doesn't require changes. +-func analyzeRangeStmt(pass *analysis.Pass, callee types.Object, curCall inspector.Cursor) { +- var ( +- call = curCall.Node().(*ast.CallExpr) +- rangeStmt = curCall.Parent().Node().(*ast.RangeStmt) +- pkg = callee.Pkg().Path() +- fn = callee.Name() +- ) +- var edits []analysis.TextEdit +- +- // Check if the call to maps.Keys or maps.Values can be removed/replaced. +- // Example: +- // for range maps.Keys(m) +- // ^^^^^^^^^ removeCall +- // for i, _ := range maps.Keys(m) +- // ^^^^^^^^^ replace with `len` +- // +- // If we have: for i, k := range maps.Keys(m) (only possible using x/exp/maps) +- // or: for i, v = range maps.Values(m) +- // do not remove the call. +- removeCall := !isSet(rangeStmt.Key) || !isSet(rangeStmt.Value) +- replace := "" +- if pkg == xmaps && isSet(rangeStmt.Key) && rangeStmt.Value == nil { +- // If we have: for i := range maps.Keys(m) (using x/exp/maps), +- // Replace with: for i := range len(m) +- replace = "len" +- canRangeOverInt := fileUses(pass.TypesInfo, curCall, "go1.22") +- if !canRangeOverInt { +- pass.Report(analysis.Diagnostic{ +- Pos: call.Pos(), +- End: call.End(), +- Message: fmt.Sprintf("likely incorrect use of %s.%s (returns a slice)", pkg, fn), +- }) +- return +- } +- } +- if removeCall { +- edits = append(edits, analysis.TextEdit{ +- Pos: call.Fun.Pos(), +- End: call.Fun.End(), +- NewText: []byte(replace)}) +- } +- // Check if the key of the range statement should be removed. +- // Example: +- // for _, k := range maps.Keys(m) +- // ^^^ removeKey ^^^^^^^^^ removeCall +- removeKey := pkg == xmaps && fn == "Keys" && !isSet(rangeStmt.Key) && isSet(rangeStmt.Value) +- if removeKey { +- edits = append(edits, analysis.TextEdit{ +- Pos: rangeStmt.Key.Pos(), +- End: rangeStmt.Value.Pos(), +- }) +- } +- // Check if a key should be inserted to the range statement. +- // Example: +- // for _, v := range maps.Values(m) +- // ^^^ addKey ^^^^^^^^^^^ removeCall +- addKey := pkg == "maps" && fn == "Values" && isSet(rangeStmt.Key) +- if addKey { +- edits = append(edits, analysis.TextEdit{ +- Pos: rangeStmt.Key.Pos(), +- End: rangeStmt.Key.Pos(), +- NewText: []byte("_, "), +- }) +- } +- +- if len(edits) > 0 { +- pass.Report(analysis.Diagnostic{ +- Pos: call.Pos(), +- End: call.End(), +- Message: fmt.Sprintf("unnecessary and inefficient call of %s.%s", pkg, fn), +- SuggestedFixes: []analysis.SuggestedFix{{ +- Message: fmt.Sprintf("Remove unnecessary call to %s.%s", pkg, fn), +- TextEdits: edits, +- }}, +- }) +- } +-} +- +-// isSet reports whether an ast.Expr is a non-nil expression that is not the blank identifier. +-func isSet(expr ast.Expr) bool { +- ident, ok := expr.(*ast.Ident) +- return expr != nil && (!ok || ident.Name != "_") +-} +- +-// fileUses reports whether the file containing the specified cursor +-// uses at least the specified version of Go (e.g. "go1.24"). +-func fileUses(info *types.Info, c inspector.Cursor, version string) bool { +- file, _ := cursorutil.FirstEnclosing[*ast.File](c) +- return !versions.Before(info.FileVersions[file], version) +-} +diff -urN a/gopls/internal/analysis/maprange/maprange_test.go b/gopls/internal/analysis/maprange/maprange_test.go +--- a/gopls/internal/analysis/maprange/maprange_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/maprange/maprange_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,23 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package maprange_test +- +-import ( +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/maprange" +- "golang.org/x/tools/internal/testfiles" +- "path/filepath" +- "testing" +-) +- +-func TestBasic(t *testing.T) { +- dir := testfiles.ExtractTxtarFileToTmp(t, filepath.Join(analysistest.TestData(), "basic.txtar")) +- analysistest.RunWithSuggestedFixes(t, dir, maprange.Analyzer, "maprange") +-} +- +-func TestOld(t *testing.T) { +- dir := testfiles.ExtractTxtarFileToTmp(t, filepath.Join(analysistest.TestData(), "old.txtar")) +- analysistest.RunWithSuggestedFixes(t, dir, maprange.Analyzer, "maprange") +-} +diff -urN a/gopls/internal/analysis/maprange/testdata/basic.txtar b/gopls/internal/analysis/maprange/testdata/basic.txtar +--- a/gopls/internal/analysis/maprange/testdata/basic.txtar 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/maprange/testdata/basic.txtar 1969-12-31 18:00:00.000000000 -0600 +@@ -1,209 +0,0 @@ +-Test of fixing redundant calls to maps.Keys and maps.Values +-(both stdlib "maps" and "golang.org/x/exp/maps") for Go 1.24. +- +--- go.mod -- +-module maprange +- +-require golang.org/x/exp v0.0.0 +- +-replace golang.org/x/exp => ./exp +- +-go 1.24 +- +--- basic.go -- +-package basic +- +-import "maps" +- +-func _() { +- m := make(map[int]int) +- +- for range maps.Keys(m) { // want `unnecessary and inefficient call of maps.Keys` +- } +- +- for range maps.Values(m) { // want `unnecessary and inefficient call of maps.Values` +- } +- +- var x struct { +- Map map[int]int +- } +- x.Map = make(map[int]int) +- for x.Map[1] = range maps.Keys(m) { // want `unnecessary and inefficient call of maps.Keys` +- } +- +- for x.Map[2] = range maps.Values(m) { // want `unnecessary and inefficient call of maps.Values` +- } +- +- for k := range maps.Keys(m) { // want `unnecessary and inefficient call of maps.Keys` +- _ = k +- } +- +- for v := range maps.Values(m) { // want `unnecessary and inefficient call of maps.Values` +- _ = v +- } +- +- for range maps.Keys(x.Map) { // want `unnecessary and inefficient call of maps.Keys` +- } +- +- for /* comment */ k := range /* comment */ maps.Keys(/* comment */ m) { // want `unnecessary and inefficient call of maps.Keys` +- _ = k +- } +-} +- +--- basic.go.golden -- +-package basic +- +-import "maps" +- +-func _() { +- m := make(map[int]int) +- +- for range m { // want `unnecessary and inefficient call of maps.Keys` +- } +- +- for range m { // want `unnecessary and inefficient call of maps.Values` +- } +- +- var x struct { +- Map map[int]int +- } +- x.Map = make(map[int]int) +- for x.Map[1] = range m { // want `unnecessary and inefficient call of maps.Keys` +- } +- +- for _, x.Map[2] = range m { // want `unnecessary and inefficient call of maps.Values` +- } +- +- for k := range m { // want `unnecessary and inefficient call of maps.Keys` +- _ = k +- } +- +- for _, v := range m { // want `unnecessary and inefficient call of maps.Values` +- _ = v +- } +- +- for range x.Map { // want `unnecessary and inefficient call of maps.Keys` +- } +- +- for /* comment */ k := range /* comment */ /* comment */ m { // want `unnecessary and inefficient call of maps.Keys` +- _ = k +- } +-} +- +--- xmaps.go -- +-package basic +- +-import "golang.org/x/exp/maps" +- +-func _() { +- m := make(map[int]int) +- +- for range maps.Keys(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` +- } +- +- for range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` +- } +- +- for i := range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` +- _ = i +- } +- +- var x struct { +- Map map[int]int +- } +- x.Map = make(map[int]int) +- for _, x.Map[1] = range maps.Keys(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` +- } +- +- for _, x.Map[2] = range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` +- } +- +- for _, k := range maps.Keys(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` +- _ = k +- } +- +- for _, v := range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` +- _ = v +- } +- +- for range maps.Keys(x.Map) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` +- } +- +- for i, k := range maps.Keys(m) { // ok: this can't be straightforwardly rewritten +- _, _ = i, k +- } +- +- for _, _ = range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` +- } +-} +- +--- xmaps.go.golden -- +-package basic +- +-import "golang.org/x/exp/maps" +- +-func _() { +- m := make(map[int]int) +- +- for range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` +- } +- +- for range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` +- } +- +- for i := range len(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` +- _ = i +- } +- +- var x struct { +- Map map[int]int +- } +- x.Map = make(map[int]int) +- for x.Map[1] = range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` +- } +- +- for _, x.Map[2] = range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` +- } +- +- for k := range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` +- _ = k +- } +- +- for _, v := range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` +- _ = v +- } +- +- for range x.Map { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` +- } +- +- for i, k := range maps.Keys(m) { // ok: this can't be straightforwardly rewritten +- _, _ = i, k +- } +- +- for _, _ = range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` +- } +-} +- +--- exp/go.mod -- +-module golang.org/x/exp +- +-go 1.24 +- +--- exp/maps/maps.go -- +-package maps +- +-func Keys[M ~map[K]V, K comparable, V any](m M) []K { +- r := make([]K, 0, len(m)) +- for k := range m { +- r = append(r, k) +- } +- return r +-} +- +-func Values[M ~map[K]V, K comparable, V any](m M) []V { +- r := make([]V, 0, len(m)) +- for _, v := range m { +- r = append(r, v) +- } +- return r +-} +\ No newline at end of file +diff -urN a/gopls/internal/analysis/maprange/testdata/old.txtar b/gopls/internal/analysis/maprange/testdata/old.txtar +--- a/gopls/internal/analysis/maprange/testdata/old.txtar 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/maprange/testdata/old.txtar 1969-12-31 18:00:00.000000000 -0600 +@@ -1,62 +0,0 @@ +-Test of fixing redundant calls to maps.Keys and maps.Values +-(both stdlib "maps" and "golang.org/x/exp/maps") for Go 1.21, +-before range over int made suggesting a fix for a rare case easier. +- +--- go.mod -- +-module maprange +- +-require golang.org/x/exp v0.0.0 +- +-replace golang.org/x/exp => ./exp +- +-go 1.21 +- +--- old.go -- +-package old +- +-import "golang.org/x/exp/maps" +- +-func _() { +- m := make(map[int]int) +- +- for i := range maps.Keys(m) { // want `likely incorrect use of golang.org/x/exp/maps.Keys \(returns a slice\)` +- _ = i +- } +-} +- +--- old.go.golden -- +-package old +- +-import "golang.org/x/exp/maps" +- +-func _() { +- m := make(map[int]int) +- +- for i := range maps.Keys(m) { // want `likely incorrect use of golang.org/x/exp/maps.Keys \(returns a slice\)` +- _ = i +- } +-} +- +--- exp/go.mod -- +-module golang.org/x/exp +- +-go 1.21 +- +--- exp/maps/maps.go -- +-package maps +- +-func Keys[M ~map[K]V, K comparable, V any](m M) []K { +- r := make([]K, 0, len(m)) +- for k := range m { +- r = append(r, k) +- } +- return r +-} +- +-func Values[M ~map[K]V, K comparable, V any](m M) []V { +- r := make([]V, 0, len(m)) +- for _, v := range m { +- r = append(r, v) +- } +- return r +-} +\ No newline at end of file +diff -urN a/gopls/internal/analysis/modernize/cmd/modernize/main.go b/gopls/internal/analysis/modernize/cmd/modernize/main.go +--- a/gopls/internal/analysis/modernize/cmd/modernize/main.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/modernize/cmd/modernize/main.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,21 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The modernize command suggests (or, with -fix, applies) fixes that +-// clarify Go code by using more modern features. +-// +-// See [golang.org/x/tools/go/analysis/passes/modernize] for details. +-// +-// Deprecated: use 'go run +-// golang.org/x/tools/go/passes/modernize/cmd/modernize' instead. In +-// due course the modernizer suite will be accessed through "go fix"; +-// see https://go.dev/issue/71859. +-package main +- +-import ( +- "golang.org/x/tools/go/analysis/multichecker" +- "golang.org/x/tools/go/analysis/passes/modernize" +-) +- +-func main() { multichecker.Main(modernize.Suite...) } +diff -urN a/gopls/internal/analysis/nonewvars/doc.go b/gopls/internal/analysis/nonewvars/doc.go +--- a/gopls/internal/analysis/nonewvars/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/nonewvars/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,22 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package nonewvars defines an Analyzer that applies suggested fixes +-// to errors of the type "no new variables on left side of :=". +-// +-// # Analyzer nonewvars +-// +-// nonewvars: suggested fixes for "no new vars on left side of :=" +-// +-// This checker provides suggested fixes for type errors of the +-// type "no new vars on left side of :=". For example: +-// +-// z := 1 +-// z := 2 +-// +-// will turn into +-// +-// z := 1 +-// z = 2 +-package nonewvars +diff -urN a/gopls/internal/analysis/nonewvars/nonewvars.go b/gopls/internal/analysis/nonewvars/nonewvars.go +--- a/gopls/internal/analysis/nonewvars/nonewvars.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/nonewvars/nonewvars.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,73 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package nonewvars defines an Analyzer that applies suggested fixes +-// to errors of the type "no new variables on left side of :=". +-package nonewvars +- +-import ( +- _ "embed" +- "go/ast" +- "go/token" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/util/cursorutil" +- "golang.org/x/tools/internal/analysisinternal" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "nonewvars", +- Doc: analysisinternal.MustExtractDoc(doc, "nonewvars"), +- Requires: []*analysis.Analyzer{inspect.Analyzer}, +- Run: run, +- RunDespiteErrors: true, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/nonewvars", +-} +- +-func run(pass *analysis.Pass) (any, error) { +- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- +- for _, typeErr := range pass.TypeErrors { +- if typeErr.Msg != "no new variables on left side of :=" { +- continue // irrelevant error +- } +- _, start, end, ok := typesinternal.ErrorCodeStartEnd(typeErr) +- if !ok { +- continue // can't get position info +- } +- curErr, ok := inspect.Root().FindByPos(start, end) +- if !ok { +- continue // can't find errant node +- } +- +- // Find enclosing assignment (which may be curErr itself). +- assign, _ := cursorutil.FirstEnclosing[*ast.AssignStmt](curErr) +- if assign == nil { +- continue // no enclosing assignment +- } +- if assign.Tok != token.DEFINE { +- continue // not a := statement +- } +- +- pass.Report(analysis.Diagnostic{ +- Pos: assign.TokPos, +- End: assign.TokPos + token.Pos(len(":=")), +- Message: typeErr.Msg, +- SuggestedFixes: []analysis.SuggestedFix{{ +- Message: "Change ':=' to '='", +- TextEdits: []analysis.TextEdit{{ +- Pos: assign.TokPos, +- End: assign.TokPos + token.Pos(len(":")), +- }}, +- }}, +- }) +- } +- return nil, nil +-} +diff -urN a/gopls/internal/analysis/nonewvars/nonewvars_test.go b/gopls/internal/analysis/nonewvars/nonewvars_test.go +--- a/gopls/internal/analysis/nonewvars/nonewvars_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/nonewvars/nonewvars_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package nonewvars_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/nonewvars" +-) +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.RunWithSuggestedFixes(t, testdata, nonewvars.Analyzer, "a", "typeparams") +-} +diff -urN a/gopls/internal/analysis/nonewvars/testdata/src/a/a.go b/gopls/internal/analysis/nonewvars/testdata/src/a/a.go +--- a/gopls/internal/analysis/nonewvars/testdata/src/a/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/nonewvars/testdata/src/a/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,16 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package nonewvars +- +-import "log" +- +-func x() { +- z := 1 +- z := 2 // want "no new variables on left side of :=" +- +- _, z := 3, 100 // want "no new variables on left side of :=" +- +- log.Println(z) +-} +diff -urN a/gopls/internal/analysis/nonewvars/testdata/src/a/a.go.golden b/gopls/internal/analysis/nonewvars/testdata/src/a/a.go.golden +--- a/gopls/internal/analysis/nonewvars/testdata/src/a/a.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/nonewvars/testdata/src/a/a.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,16 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package nonewvars +- +-import "log" +- +-func x() { +- z := 1 +- z = 2 // want "no new variables on left side of :=" +- +- _, z = 3, 100 // want "no new variables on left side of :=" +- +- log.Println(z) +-} +diff -urN a/gopls/internal/analysis/nonewvars/testdata/src/typeparams/a.go b/gopls/internal/analysis/nonewvars/testdata/src/typeparams/a.go +--- a/gopls/internal/analysis/nonewvars/testdata/src/typeparams/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/nonewvars/testdata/src/typeparams/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,6 +0,0 @@ +-package nonewvars +- +-func hello[T any]() int { +- var z T +- z := 1 // want "no new variables on left side of :=" +-} +diff -urN a/gopls/internal/analysis/nonewvars/testdata/src/typeparams/a.go.golden b/gopls/internal/analysis/nonewvars/testdata/src/typeparams/a.go.golden +--- a/gopls/internal/analysis/nonewvars/testdata/src/typeparams/a.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/nonewvars/testdata/src/typeparams/a.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,6 +0,0 @@ +-package nonewvars +- +-func hello[T any]() int { +- var z T +- z = 1 // want "no new variables on left side of :=" +-} +diff -urN a/gopls/internal/analysis/noresultvalues/doc.go b/gopls/internal/analysis/noresultvalues/doc.go +--- a/gopls/internal/analysis/noresultvalues/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/noresultvalues/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,21 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package noresultvalues defines an Analyzer that applies suggested fixes +-// to errors of the type "no result values expected". +-// +-// # Analyzer noresultvalues +-// +-// noresultvalues: suggested fixes for unexpected return values +-// +-// This checker provides suggested fixes for type errors of the +-// type "no result values expected" or "too many return values". +-// For example: +-// +-// func z() { return nil } +-// +-// will turn into +-// +-// func z() { return } +-package noresultvalues +diff -urN a/gopls/internal/analysis/noresultvalues/noresultvalues.go b/gopls/internal/analysis/noresultvalues/noresultvalues.go +--- a/gopls/internal/analysis/noresultvalues/noresultvalues.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/noresultvalues/noresultvalues.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,72 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package noresultvalues +- +-import ( +- "go/ast" +- "go/token" +- "strings" +- +- _ "embed" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/util/cursorutil" +- "golang.org/x/tools/internal/analysisinternal" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "noresultvalues", +- Doc: analysisinternal.MustExtractDoc(doc, "noresultvalues"), +- Requires: []*analysis.Analyzer{inspect.Analyzer}, +- Run: run, +- RunDespiteErrors: true, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/noresultvalues", +-} +- +-func run(pass *analysis.Pass) (any, error) { +- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- +- for _, typErr := range pass.TypeErrors { +- if !fixesError(typErr.Msg) { +- continue // irrelevant error +- } +- _, start, end, ok := typesinternal.ErrorCodeStartEnd(typErr) +- if !ok { +- continue // can't get position info +- } +- curErr, ok := inspect.Root().FindByPos(start, end) +- if !ok { +- continue // can't find errant node +- } +- // Find first enclosing return statement, if any. +- ret, _ := cursorutil.FirstEnclosing[*ast.ReturnStmt](curErr) +- if ret != nil { +- pass.Report(analysis.Diagnostic{ +- Pos: start, +- End: end, +- Message: typErr.Msg, +- SuggestedFixes: []analysis.SuggestedFix{{ +- Message: "Delete return values", +- TextEdits: []analysis.TextEdit{{ +- Pos: ret.Pos() + token.Pos(len("return")), +- End: ret.End(), +- }}, +- }}, +- }) +- } +- } +- return nil, nil +-} +- +-func fixesError(msg string) bool { +- return msg == "no result values expected" || +- strings.HasPrefix(msg, "too many return values") && strings.Contains(msg, "want ()") +-} +diff -urN a/gopls/internal/analysis/noresultvalues/noresultvalues_test.go b/gopls/internal/analysis/noresultvalues/noresultvalues_test.go +--- a/gopls/internal/analysis/noresultvalues/noresultvalues_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/noresultvalues/noresultvalues_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package noresultvalues_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/noresultvalues" +-) +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.RunWithSuggestedFixes(t, testdata, noresultvalues.Analyzer, "a", "typeparams") +-} +diff -urN a/gopls/internal/analysis/noresultvalues/testdata/src/a/a.go b/gopls/internal/analysis/noresultvalues/testdata/src/a/a.go +--- a/gopls/internal/analysis/noresultvalues/testdata/src/a/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/noresultvalues/testdata/src/a/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,9 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package noresultvalues +- +-func x() { return nil } // want `no result values expected|too many return values` +- +-func y() { return nil, "hello" } // want `no result values expected|too many return values` +diff -urN a/gopls/internal/analysis/noresultvalues/testdata/src/a/a.go.golden b/gopls/internal/analysis/noresultvalues/testdata/src/a/a.go.golden +--- a/gopls/internal/analysis/noresultvalues/testdata/src/a/a.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/noresultvalues/testdata/src/a/a.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,9 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package noresultvalues +- +-func x() { return } // want `no result values expected|too many return values` +- +-func y() { return } // want `no result values expected|too many return values` +diff -urN a/gopls/internal/analysis/noresultvalues/testdata/src/typeparams/a.go b/gopls/internal/analysis/noresultvalues/testdata/src/typeparams/a.go +--- a/gopls/internal/analysis/noresultvalues/testdata/src/typeparams/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/noresultvalues/testdata/src/typeparams/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,6 +0,0 @@ +-package noresult +- +-func hello[T any]() { +- var z T +- return z // want `no result values expected|too many return values` +-} +diff -urN a/gopls/internal/analysis/noresultvalues/testdata/src/typeparams/a.go.golden b/gopls/internal/analysis/noresultvalues/testdata/src/typeparams/a.go.golden +--- a/gopls/internal/analysis/noresultvalues/testdata/src/typeparams/a.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/noresultvalues/testdata/src/typeparams/a.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,6 +0,0 @@ +-package noresult +- +-func hello[T any]() { +- var z T +- return // want `no result values expected|too many return values` +-} +diff -urN a/gopls/internal/analysis/recursiveiter/doc.go b/gopls/internal/analysis/recursiveiter/doc.go +--- a/gopls/internal/analysis/recursiveiter/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/recursiveiter/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,99 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package recursiveiter defines an Analyzer that checks for mistakes +-// in iterators for recursive data structures. +-// +-// # Analyzer recursiveiter +-// +-// recursiveiter: check for inefficient recursive iterators +-// +-// This analyzer reports when a function that returns an iterator +-// (iter.Seq or iter.Seq2) calls itself as the operand of a range +-// statement, as this is inefficient. +-// +-// When implementing an iterator (e.g. iter.Seq[T]) for a recursive +-// data type such as a tree or linked list, it is tempting to +-// recursively range over the iterator for each child element. +-// +-// Here's an example of a naive iterator over a binary tree: +-// +-// type tree struct { +-// value int +-// left, right *tree +-// } +-// +-// func (t *tree) All() iter.Seq[int] { +-// return func(yield func(int) bool) { +-// if t != nil { +-// for elem := range t.left.All() { // "inefficient recursive iterator" +-// if !yield(elem) { +-// return +-// } +-// } +-// if !yield(t.value) { +-// return +-// } +-// for elem := range t.right.All() { // "inefficient recursive iterator" +-// if !yield(elem) { +-// return +-// } +-// } +-// } +-// } +-// } +-// +-// Though it correctly enumerates the elements of the tree, it hides a +-// significant performance problem--two, in fact. Consider a balanced +-// tree of N nodes. Iterating the root node will cause All to be +-// called once on every node of the tree. This results in a chain of +-// nested active range-over-func statements when yield(t.value) is +-// called on a leaf node. +-// +-// The first performance problem is that each range-over-func +-// statement must typically heap-allocate a variable, so iteration of +-// the tree allocates as many variables as there are elements in the +-// tree, for a total of O(N) allocations, all unnecessary. +-// +-// The second problem is that each call to yield for a leaf of the +-// tree causes each of the enclosing range loops to receive a value, +-// which they then immediately pass on to their respective yield +-// function. This results in a chain of log(N) dynamic yield calls per +-// element, a total of O(N*log N) dynamic calls overall, when only +-// O(N) are necessary. +-// +-// A better implementation strategy for recursive iterators is to +-// first define the "every" operator for your recursive data type, +-// where every(f) reports whether an arbitrary predicate f(x) is true +-// for every element x in the data type. For our tree, the every +-// function would be: +-// +-// func (t *tree) every(f func(int) bool) bool { +-// return t == nil || +-// t.left.every(f) && f(t.value) && t.right.every(f) +-// } +-// +-// For example, this use of the every operator prints whether every +-// element in the tree is an even number: +-// +-// even := func(x int) bool { return x&1 == 0 } +-// println(t.every(even)) +-// +-// Then the iterator can be simply expressed as a trivial wrapper +-// around the every operator: +-// +-// func (t *tree) All() iter.Seq[int] { +-// return func(yield func(int) bool) { +-// _ = t.every(yield) +-// } +-// } +-// +-// In effect, tree.All computes whether yield returns true for each +-// element, short-circuiting if it ever returns false, then discards +-// the final boolean result. +-// +-// This has much better performance characteristics: it makes one +-// dynamic call per element of the tree, and it doesn't heap-allocate +-// anything. It is also clearer. +-package recursiveiter +diff -urN a/gopls/internal/analysis/recursiveiter/main.go b/gopls/internal/analysis/recursiveiter/main.go +--- a/gopls/internal/analysis/recursiveiter/main.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/recursiveiter/main.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,16 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build ignore +- +-// The recursiveiter command applies the yield analyzer to the +-// specified packages of Go source code. +-package main +- +-import ( +- "golang.org/x/tools/go/analysis/singlechecker" +- "golang.org/x/tools/gopls/internal/analysis/recursiveiter" +-) +- +-func main() { singlechecker.Main(recursiveiter.Analyzer) } +diff -urN a/gopls/internal/analysis/recursiveiter/recursiveiter.go b/gopls/internal/analysis/recursiveiter/recursiveiter.go +--- a/gopls/internal/analysis/recursiveiter/recursiveiter.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/recursiveiter/recursiveiter.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,99 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package recursiveiter +- +-import ( +- _ "embed" +- "fmt" +- "go/ast" +- "go/types" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/go/types/typeutil" +- "golang.org/x/tools/internal/analysisinternal" +- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" +- "golang.org/x/tools/internal/typesinternal/typeindex" +-) +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "recursiveiter", +- Doc: analysisinternal.MustExtractDoc(doc, "recursiveiter"), +- Requires: []*analysis.Analyzer{inspect.Analyzer, typeindexanalyzer.Analyzer}, +- Run: run, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/recursiveiter", +-} +- +-func run(pass *analysis.Pass) (any, error) { +- var ( +- inspector = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) +- info = pass.TypesInfo +- iterSeq = index.Object("iter", "Seq") +- iterSeq2 = index.Object("iter", "Seq2") +- ) +- if iterSeq == nil || iterSeq2 == nil { +- return nil, nil // fast path: no iterators +- } +- +- // Search for a function or method f that returns an iter.Seq +- // or Seq2 and calls itself recursively within a range stmt: +- // +- // func f(...) iter.Seq[E] { +- // return func(yield func(E) bool) { +- // ... +- // for range f(...) { ... } +- // } +- // } +- for curDecl := range inspector.Root().Preorder((*ast.FuncDecl)(nil)) { +- decl := curDecl.Node().(*ast.FuncDecl) +- fn := info.Defs[decl.Name].(*types.Func) +- results := fn.Signature().Results() +- if results.Len() != 1 { +- continue // result not a singleton +- } +- retType, ok := results.At(0).Type().(*types.Named) +- if !ok { +- continue // result not a named type +- } +- switch retType.Origin().Obj() { +- case iterSeq, iterSeq2: +- default: +- continue // result not iter.Seq{,2} +- } +- // Have: a FuncDecl that returns an iterator. +- for curRet := range curDecl.Preorder((*ast.ReturnStmt)(nil)) { +- ret := curRet.Node().(*ast.ReturnStmt) +- if len(ret.Results) != 1 || !is[*ast.FuncLit](ret.Results[0]) { +- continue // not "return func(){...}" +- } +- for curRange := range curRet.Preorder((*ast.RangeStmt)(nil)) { +- rng := curRange.Node().(*ast.RangeStmt) +- call, ok := rng.X.(*ast.CallExpr) +- if !ok { +- continue +- } +- if typeutil.StaticCallee(info, call) == fn { +- pass.Report(analysis.Diagnostic{ +- Pos: rng.Range, +- End: rng.X.End(), +- Message: fmt.Sprintf("inefficient recursion in iterator %s", fn.Name()), +- }) +- } +- } +- } +- } +- +- return nil, nil +-} +- +-func is[T any](x any) bool { +- _, ok := x.(T) +- return ok +-} +diff -urN a/gopls/internal/analysis/recursiveiter/recursiveiter_test.go b/gopls/internal/analysis/recursiveiter/recursiveiter_test.go +--- a/gopls/internal/analysis/recursiveiter/recursiveiter_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/recursiveiter/recursiveiter_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package recursiveiter_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/recursiveiter" +-) +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.Run(t, testdata, recursiveiter.Analyzer, "a") +-} +diff -urN a/gopls/internal/analysis/recursiveiter/testdata/src/a/a.go b/gopls/internal/analysis/recursiveiter/testdata/src/a/a.go +--- a/gopls/internal/analysis/recursiveiter/testdata/src/a/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/recursiveiter/testdata/src/a/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package recursiveiter +- +-import "iter" +- +-type cons struct { +- car int +- cdr *cons +-} +- +-func (cons *cons) All() iter.Seq[int] { +- return func(yield func(int) bool) { +- // The correct recursion is: +- // func (cons *cons) all(f func(int) bool) { +- // return cons == nil || yield(cons.car) && cons.cdr.all() +- // } +- // then: +- // _ = cons.all(yield) +- if cons != nil && yield(cons.car) { +- for elem := range cons.All() { // want "inefficient recursion in iterator All" +- if !yield(elem) { +- break +- } +- } +- } +- } +-} +diff -urN a/gopls/internal/analysis/simplifycompositelit/doc.go b/gopls/internal/analysis/simplifycompositelit/doc.go +--- a/gopls/internal/analysis/simplifycompositelit/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifycompositelit/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package simplifycompositelit defines an Analyzer that simplifies composite literals. +-// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go +-// https://golang.org/cmd/gofmt/#hdr-The_simplify_command +-// +-// # Analyzer simplifycompositelit +-// +-// simplifycompositelit: check for composite literal simplifications +-// +-// An array, slice, or map composite literal of the form: +-// +-// []T{T{}, T{}} +-// +-// will be simplified to: +-// +-// []T{{}, {}} +-// +-// This is one of the simplifications that "gofmt -s" applies. +-// +-// This analyzer ignores generated code. +-package simplifycompositelit +diff -urN a/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go b/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go +--- a/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,205 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package simplifycompositelit defines an Analyzer that simplifies composite literals. +-// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go +-// https://golang.org/cmd/gofmt/#hdr-The_simplify_command +-package simplifycompositelit +- +-import ( +- "bytes" +- _ "embed" +- "fmt" +- "go/ast" +- "go/printer" +- "go/token" +- "reflect" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/internal/analysisinternal" +-) +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "simplifycompositelit", +- Doc: analysisinternal.MustExtractDoc(doc, "simplifycompositelit"), +- Requires: []*analysis.Analyzer{inspect.Analyzer}, +- Run: run, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifycompositelit", +-} +- +-func run(pass *analysis.Pass) (any, error) { +- // Gather information whether file is generated or not +- generated := make(map[*token.File]bool) +- for _, file := range pass.Files { +- if ast.IsGenerated(file) { +- generated[pass.Fset.File(file.FileStart)] = true +- } +- } +- +- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- nodeFilter := []ast.Node{(*ast.CompositeLit)(nil)} +- inspect.Preorder(nodeFilter, func(n ast.Node) { +- if _, ok := generated[pass.Fset.File(n.Pos())]; ok { +- return // skip checking if it's generated code +- } +- +- expr := n.(*ast.CompositeLit) +- +- outer := expr +- var keyType, eltType ast.Expr +- switch typ := outer.Type.(type) { +- case *ast.ArrayType: +- eltType = typ.Elt +- case *ast.MapType: +- keyType = typ.Key +- eltType = typ.Value +- } +- +- if eltType == nil { +- return +- } +- var ktyp reflect.Value +- if keyType != nil { +- ktyp = reflect.ValueOf(keyType) +- } +- typ := reflect.ValueOf(eltType) +- for _, x := range outer.Elts { +- // look at value of indexed/named elements +- if t, ok := x.(*ast.KeyValueExpr); ok { +- if keyType != nil { +- simplifyLiteral(pass, ktyp, keyType, t.Key) +- } +- x = t.Value +- } +- simplifyLiteral(pass, typ, eltType, x) +- } +- }) +- return nil, nil +-} +- +-func simplifyLiteral(pass *analysis.Pass, typ reflect.Value, astType, x ast.Expr) { +- // if the element is a composite literal and its literal type +- // matches the outer literal's element type exactly, the inner +- // literal type may be omitted +- if inner, ok := x.(*ast.CompositeLit); ok && match(typ, reflect.ValueOf(inner.Type)) { +- var b bytes.Buffer +- printer.Fprint(&b, pass.Fset, inner.Type) // ignore error +- createDiagnostic(pass, inner.Type.Pos(), inner.Type.End(), b.String()) +- } +- // if the outer literal's element type is a pointer type *T +- // and the element is & of a composite literal of type T, +- // the inner &T may be omitted. +- if ptr, ok := astType.(*ast.StarExpr); ok { +- if addr, ok := x.(*ast.UnaryExpr); ok && addr.Op == token.AND { +- if inner, ok := addr.X.(*ast.CompositeLit); ok { +- if match(reflect.ValueOf(ptr.X), reflect.ValueOf(inner.Type)) { +- var b bytes.Buffer +- printer.Fprint(&b, pass.Fset, inner.Type) // ignore error +- // Account for the & by subtracting 1 from typ.Pos(). +- createDiagnostic(pass, inner.Type.Pos()-1, inner.Type.End(), "&"+b.String()) +- } +- } +- } +- } +-} +- +-func createDiagnostic(pass *analysis.Pass, start, end token.Pos, typ string) { +- pass.Report(analysis.Diagnostic{ +- Pos: start, +- End: end, +- Message: "redundant type from array, slice, or map composite literal", +- SuggestedFixes: []analysis.SuggestedFix{{ +- Message: fmt.Sprintf("Remove '%s'", typ), +- TextEdits: []analysis.TextEdit{{ +- Pos: start, +- End: end, +- NewText: []byte{}, +- }}, +- }}, +- }) +-} +- +-// match reports whether pattern matches val, +-// recording wildcard submatches in m. +-// If m == nil, match checks whether pattern == val. +-// from https://github.com/golang/go/blob/26154f31ad6c801d8bad5ef58df1e9263c6beec7/src/cmd/gofmt/rewrite.go#L160 +-func match(pattern, val reflect.Value) bool { +- // Otherwise, pattern and val must match recursively. +- if !pattern.IsValid() || !val.IsValid() { +- return !pattern.IsValid() && !val.IsValid() +- } +- if pattern.Type() != val.Type() { +- return false +- } +- +- // Special cases. +- switch pattern.Type() { +- case identType: +- // For identifiers, only the names need to match +- // (and none of the other *ast.Object information). +- // This is a common case, handle it all here instead +- // of recursing down any further via reflection. +- p := pattern.Interface().(*ast.Ident) +- v := val.Interface().(*ast.Ident) +- return p == nil && v == nil || p != nil && v != nil && p.Name == v.Name +- case objectPtrType, positionType: +- // object pointers and token positions always match +- return true +- case callExprType: +- // For calls, the Ellipsis fields (token.Position) must +- // match since that is how f(x) and f(x...) are different. +- // Check them here but fall through for the remaining fields. +- p := pattern.Interface().(*ast.CallExpr) +- v := val.Interface().(*ast.CallExpr) +- if p.Ellipsis.IsValid() != v.Ellipsis.IsValid() { +- return false +- } +- } +- +- p := reflect.Indirect(pattern) +- v := reflect.Indirect(val) +- if !p.IsValid() || !v.IsValid() { +- return !p.IsValid() && !v.IsValid() +- } +- +- switch p.Kind() { +- case reflect.Slice: +- if p.Len() != v.Len() { +- return false +- } +- for i := 0; i < p.Len(); i++ { +- if !match(p.Index(i), v.Index(i)) { +- return false +- } +- } +- return true +- +- case reflect.Struct: +- for i := 0; i < p.NumField(); i++ { +- if !match(p.Field(i), v.Field(i)) { +- return false +- } +- } +- return true +- +- case reflect.Interface: +- return match(p.Elem(), v.Elem()) +- } +- +- // Handle token integers, etc. +- return p.Interface() == v.Interface() +-} +- +-// Values/types for special cases. +-var ( +- identType = reflect.TypeOf((*ast.Ident)(nil)) +- objectPtrType = reflect.TypeOf((*ast.Object)(nil)) +- positionType = reflect.TypeOf(token.NoPos) +- callExprType = reflect.TypeOf((*ast.CallExpr)(nil)) +-) +diff -urN a/gopls/internal/analysis/simplifycompositelit/simplifycompositelit_test.go b/gopls/internal/analysis/simplifycompositelit/simplifycompositelit_test.go +--- a/gopls/internal/analysis/simplifycompositelit/simplifycompositelit_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifycompositelit/simplifycompositelit_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package simplifycompositelit_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/simplifycompositelit" +-) +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.RunWithSuggestedFixes(t, testdata, simplifycompositelit.Analyzer, "a", "generatedcode") +-} +diff -urN a/gopls/internal/analysis/simplifycompositelit/testdata/src/a/a.go b/gopls/internal/analysis/simplifycompositelit/testdata/src/a/a.go +--- a/gopls/internal/analysis/simplifycompositelit/testdata/src/a/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifycompositelit/testdata/src/a/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,234 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package testdata +- +-type T struct { +- x, y int +-} +- +-type T2 struct { +- w, z int +-} +- +-var _ = [42]T{ +- T{}, // want "redundant type from array, slice, or map composite literal" +- T{1, 2}, // want "redundant type from array, slice, or map composite literal" +- T{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = [...]T{ +- T{}, // want "redundant type from array, slice, or map composite literal" +- T{1, 2}, // want "redundant type from array, slice, or map composite literal" +- T{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []T{ +- T{}, // want "redundant type from array, slice, or map composite literal" +- T{1, 2}, // want "redundant type from array, slice, or map composite literal" +- T{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []T{ +- T{}, // want "redundant type from array, slice, or map composite literal" +- 10: T{1, 2}, // want "redundant type from array, slice, or map composite literal" +- 20: T{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []struct { +- x, y int +-}{ +- struct{ x, y int }{}, // want "redundant type from array, slice, or map composite literal" +- 10: struct{ x, y int }{1, 2}, // want "redundant type from array, slice, or map composite literal" +- 20: struct{ x, y int }{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []interface{}{ +- T{}, +- 10: T{1, 2}, +- 20: T{3, 4}, +-} +- +-var _ = [][]int{ +- []int{}, // want "redundant type from array, slice, or map composite literal" +- []int{1, 2}, // want "redundant type from array, slice, or map composite literal" +- []int{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = [][]int{ +- ([]int{}), +- ([]int{1, 2}), +- []int{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = [][][]int{ +- [][]int{}, // want "redundant type from array, slice, or map composite literal" +- [][]int{ // want "redundant type from array, slice, or map composite literal" +- []int{}, // want "redundant type from array, slice, or map composite literal" +- []int{0, 1, 2, 3}, // want "redundant type from array, slice, or map composite literal" +- []int{4, 5}, // want "redundant type from array, slice, or map composite literal" +- }, +-} +- +-var _ = map[string]T{ +- "foo": T{}, // want "redundant type from array, slice, or map composite literal" +- "bar": T{1, 2}, // want "redundant type from array, slice, or map composite literal" +- "bal": T{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[string]struct { +- x, y int +-}{ +- "foo": struct{ x, y int }{}, // want "redundant type from array, slice, or map composite literal" +- "bar": struct{ x, y int }{1, 2}, // want "redundant type from array, slice, or map composite literal" +- "bal": struct{ x, y int }{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[string]interface{}{ +- "foo": T{}, +- "bar": T{1, 2}, +- "bal": T{3, 4}, +-} +- +-var _ = map[string][]int{ +- "foo": []int{}, // want "redundant type from array, slice, or map composite literal" +- "bar": []int{1, 2}, // want "redundant type from array, slice, or map composite literal" +- "bal": []int{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[string][]int{ +- "foo": ([]int{}), +- "bar": ([]int{1, 2}), +- "bal": []int{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-type Point struct { +- a int +- b int +-} +- +-type Piece struct { +- a int +- b int +- c Point +- d []Point +- e *Point +- f *Point +-} +- +-// from exp/4s/data.go +-var pieces3 = []Piece{ +- Piece{0, 0, Point{4, 1}, []Point{Point{0, 0}, Point{1, 0}, Point{1, 0}, Point{1, 0}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- Piece{1, 0, Point{1, 4}, []Point{Point{0, 0}, Point{0, 1}, Point{0, 1}, Point{0, 1}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- Piece{2, 0, Point{4, 1}, []Point{Point{0, 0}, Point{1, 0}, Point{1, 0}, Point{1, 0}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- Piece{3, 0, Point{1, 4}, []Point{Point{0, 0}, Point{0, 1}, Point{0, 1}, Point{0, 1}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +-} +- +-var _ = [42]*T{ +- &T{}, // want "redundant type from array, slice, or map composite literal" +- &T{1, 2}, // want "redundant type from array, slice, or map composite literal" +- &T{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = [...]*T{ +- &T{}, // want "redundant type from array, slice, or map composite literal" +- &T{1, 2}, // want "redundant type from array, slice, or map composite literal" +- &T{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []*T{ +- &T{}, // want "redundant type from array, slice, or map composite literal" +- &T{1, 2}, // want "redundant type from array, slice, or map composite literal" +- &T{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []*T{ +- &T{}, // want "redundant type from array, slice, or map composite literal" +- 10: &T{1, 2}, // want "redundant type from array, slice, or map composite literal" +- 20: &T{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []*struct { +- x, y int +-}{ +- &struct{ x, y int }{}, // want "redundant type from array, slice, or map composite literal" +- 10: &struct{ x, y int }{1, 2}, // want "redundant type from array, slice, or map composite literal" +- 20: &struct{ x, y int }{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []interface{}{ +- &T{}, +- 10: &T{1, 2}, +- 20: &T{3, 4}, +-} +- +-var _ = []*[]int{ +- &[]int{}, // want "redundant type from array, slice, or map composite literal" +- &[]int{1, 2}, // want "redundant type from array, slice, or map composite literal" +- &[]int{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []*[]int{ +- (&[]int{}), +- (&[]int{1, 2}), +- &[]int{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []*[]*[]int{ +- &[]*[]int{}, // want "redundant type from array, slice, or map composite literal" +- &[]*[]int{ // want "redundant type from array, slice, or map composite literal" +- &[]int{}, // want "redundant type from array, slice, or map composite literal" +- &[]int{0, 1, 2, 3}, // want "redundant type from array, slice, or map composite literal" +- &[]int{4, 5}, // want "redundant type from array, slice, or map composite literal" +- }, +-} +- +-var _ = map[string]*T{ +- "foo": &T{}, // want "redundant type from array, slice, or map composite literal" +- "bar": &T{1, 2}, // want "redundant type from array, slice, or map composite literal" +- "bal": &T{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[string]*struct { +- x, y int +-}{ +- "foo": &struct{ x, y int }{}, // want "redundant type from array, slice, or map composite literal" +- "bar": &struct{ x, y int }{1, 2}, // want "redundant type from array, slice, or map composite literal" +- "bal": &struct{ x, y int }{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[string]interface{}{ +- "foo": &T{}, +- "bar": &T{1, 2}, +- "bal": &T{3, 4}, +-} +- +-var _ = map[string]*[]int{ +- "foo": &[]int{}, // want "redundant type from array, slice, or map composite literal" +- "bar": &[]int{1, 2}, // want "redundant type from array, slice, or map composite literal" +- "bal": &[]int{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[string]*[]int{ +- "foo": (&[]int{}), +- "bar": (&[]int{1, 2}), +- "bal": &[]int{3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var pieces4 = []*Piece{ +- &Piece{0, 0, Point{4, 1}, []Point{Point{0, 0}, Point{1, 0}, Point{1, 0}, Point{1, 0}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- &Piece{1, 0, Point{1, 4}, []Point{Point{0, 0}, Point{0, 1}, Point{0, 1}, Point{0, 1}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- &Piece{2, 0, Point{4, 1}, []Point{Point{0, 0}, Point{1, 0}, Point{1, 0}, Point{1, 0}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- &Piece{3, 0, Point{1, 4}, []Point{Point{0, 0}, Point{0, 1}, Point{0, 1}, Point{0, 1}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[T]T2{ +- T{1, 2}: T2{3, 4}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- T{5, 6}: T2{7, 8}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[*T]*T2{ +- &T{1, 2}: &T2{3, 4}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- &T{5, 6}: &T2{7, 8}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +-} +diff -urN a/gopls/internal/analysis/simplifycompositelit/testdata/src/a/a.go.golden b/gopls/internal/analysis/simplifycompositelit/testdata/src/a/a.go.golden +--- a/gopls/internal/analysis/simplifycompositelit/testdata/src/a/a.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifycompositelit/testdata/src/a/a.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,234 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package testdata +- +-type T struct { +- x, y int +-} +- +-type T2 struct { +- w, z int +-} +- +-var _ = [42]T{ +- {}, // want "redundant type from array, slice, or map composite literal" +- {1, 2}, // want "redundant type from array, slice, or map composite literal" +- {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = [...]T{ +- {}, // want "redundant type from array, slice, or map composite literal" +- {1, 2}, // want "redundant type from array, slice, or map composite literal" +- {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []T{ +- {}, // want "redundant type from array, slice, or map composite literal" +- {1, 2}, // want "redundant type from array, slice, or map composite literal" +- {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []T{ +- {}, // want "redundant type from array, slice, or map composite literal" +- 10: {1, 2}, // want "redundant type from array, slice, or map composite literal" +- 20: {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []struct { +- x, y int +-}{ +- {}, // want "redundant type from array, slice, or map composite literal" +- 10: {1, 2}, // want "redundant type from array, slice, or map composite literal" +- 20: {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []interface{}{ +- T{}, +- 10: T{1, 2}, +- 20: T{3, 4}, +-} +- +-var _ = [][]int{ +- {}, // want "redundant type from array, slice, or map composite literal" +- {1, 2}, // want "redundant type from array, slice, or map composite literal" +- {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = [][]int{ +- ([]int{}), +- ([]int{1, 2}), +- {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = [][][]int{ +- {}, // want "redundant type from array, slice, or map composite literal" +- { // want "redundant type from array, slice, or map composite literal" +- {}, // want "redundant type from array, slice, or map composite literal" +- {0, 1, 2, 3}, // want "redundant type from array, slice, or map composite literal" +- {4, 5}, // want "redundant type from array, slice, or map composite literal" +- }, +-} +- +-var _ = map[string]T{ +- "foo": {}, // want "redundant type from array, slice, or map composite literal" +- "bar": {1, 2}, // want "redundant type from array, slice, or map composite literal" +- "bal": {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[string]struct { +- x, y int +-}{ +- "foo": {}, // want "redundant type from array, slice, or map composite literal" +- "bar": {1, 2}, // want "redundant type from array, slice, or map composite literal" +- "bal": {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[string]interface{}{ +- "foo": T{}, +- "bar": T{1, 2}, +- "bal": T{3, 4}, +-} +- +-var _ = map[string][]int{ +- "foo": {}, // want "redundant type from array, slice, or map composite literal" +- "bar": {1, 2}, // want "redundant type from array, slice, or map composite literal" +- "bal": {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[string][]int{ +- "foo": ([]int{}), +- "bar": ([]int{1, 2}), +- "bal": {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-type Point struct { +- a int +- b int +-} +- +-type Piece struct { +- a int +- b int +- c Point +- d []Point +- e *Point +- f *Point +-} +- +-// from exp/4s/data.go +-var pieces3 = []Piece{ +- {0, 0, Point{4, 1}, []Point{{0, 0}, {1, 0}, {1, 0}, {1, 0}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- {1, 0, Point{1, 4}, []Point{{0, 0}, {0, 1}, {0, 1}, {0, 1}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- {2, 0, Point{4, 1}, []Point{{0, 0}, {1, 0}, {1, 0}, {1, 0}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- {3, 0, Point{1, 4}, []Point{{0, 0}, {0, 1}, {0, 1}, {0, 1}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +-} +- +-var _ = [42]*T{ +- {}, // want "redundant type from array, slice, or map composite literal" +- {1, 2}, // want "redundant type from array, slice, or map composite literal" +- {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = [...]*T{ +- {}, // want "redundant type from array, slice, or map composite literal" +- {1, 2}, // want "redundant type from array, slice, or map composite literal" +- {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []*T{ +- {}, // want "redundant type from array, slice, or map composite literal" +- {1, 2}, // want "redundant type from array, slice, or map composite literal" +- {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []*T{ +- {}, // want "redundant type from array, slice, or map composite literal" +- 10: {1, 2}, // want "redundant type from array, slice, or map composite literal" +- 20: {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []*struct { +- x, y int +-}{ +- {}, // want "redundant type from array, slice, or map composite literal" +- 10: {1, 2}, // want "redundant type from array, slice, or map composite literal" +- 20: {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []interface{}{ +- &T{}, +- 10: &T{1, 2}, +- 20: &T{3, 4}, +-} +- +-var _ = []*[]int{ +- {}, // want "redundant type from array, slice, or map composite literal" +- {1, 2}, // want "redundant type from array, slice, or map composite literal" +- {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []*[]int{ +- (&[]int{}), +- (&[]int{1, 2}), +- {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = []*[]*[]int{ +- {}, // want "redundant type from array, slice, or map composite literal" +- { // want "redundant type from array, slice, or map composite literal" +- {}, // want "redundant type from array, slice, or map composite literal" +- {0, 1, 2, 3}, // want "redundant type from array, slice, or map composite literal" +- {4, 5}, // want "redundant type from array, slice, or map composite literal" +- }, +-} +- +-var _ = map[string]*T{ +- "foo": {}, // want "redundant type from array, slice, or map composite literal" +- "bar": {1, 2}, // want "redundant type from array, slice, or map composite literal" +- "bal": {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[string]*struct { +- x, y int +-}{ +- "foo": {}, // want "redundant type from array, slice, or map composite literal" +- "bar": {1, 2}, // want "redundant type from array, slice, or map composite literal" +- "bal": {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[string]interface{}{ +- "foo": &T{}, +- "bar": &T{1, 2}, +- "bal": &T{3, 4}, +-} +- +-var _ = map[string]*[]int{ +- "foo": {}, // want "redundant type from array, slice, or map composite literal" +- "bar": {1, 2}, // want "redundant type from array, slice, or map composite literal" +- "bal": {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[string]*[]int{ +- "foo": (&[]int{}), +- "bar": (&[]int{1, 2}), +- "bal": {3, 4}, // want "redundant type from array, slice, or map composite literal" +-} +- +-var pieces4 = []*Piece{ +- {0, 0, Point{4, 1}, []Point{{0, 0}, {1, 0}, {1, 0}, {1, 0}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- {1, 0, Point{1, 4}, []Point{{0, 0}, {0, 1}, {0, 1}, {0, 1}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- {2, 0, Point{4, 1}, []Point{{0, 0}, {1, 0}, {1, 0}, {1, 0}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- {3, 0, Point{1, 4}, []Point{{0, 0}, {0, 1}, {0, 1}, {0, 1}}, nil, nil}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[T]T2{ +- {1, 2}: {3, 4}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- {5, 6}: {7, 8}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +-} +- +-var _ = map[*T]*T2{ +- {1, 2}: {3, 4}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +- {5, 6}: {7, 8}, // want "redundant type from array, slice, or map composite literal" "redundant type from array, slice, or map composite literal" +-} +diff -urN a/gopls/internal/analysis/simplifycompositelit/testdata/src/generatedcode/generatedcode.go b/gopls/internal/analysis/simplifycompositelit/testdata/src/generatedcode/generatedcode.go +--- a/gopls/internal/analysis/simplifycompositelit/testdata/src/generatedcode/generatedcode.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifycompositelit/testdata/src/generatedcode/generatedcode.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated with somegen DO NOT EDIT. +- +-package testdata +- +-type T struct { +- x, y int +-} +- +-var _ = [42]T{ +- T{}, // No simplification fix is offered in generated code. +- T{1, 2}, // No simplification fix is offered in generated code. +- T{3, 4}, // No simplification fix is offered in generated code. +-} +diff -urN a/gopls/internal/analysis/simplifycompositelit/testdata/src/generatedcode/generatedcode.go.golden b/gopls/internal/analysis/simplifycompositelit/testdata/src/generatedcode/generatedcode.go.golden +--- a/gopls/internal/analysis/simplifycompositelit/testdata/src/generatedcode/generatedcode.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifycompositelit/testdata/src/generatedcode/generatedcode.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated with somegen DO NOT EDIT. +- +-package testdata +- +-type T struct { +- x, y int +-} +- +-var _ = [42]T{ +- T{}, // No simplification fix is offered in generated code. +- T{1, 2}, // No simplification fix is offered in generated code. +- T{3, 4}, // No simplification fix is offered in generated code. +-} +diff -urN a/gopls/internal/analysis/simplifyrange/doc.go b/gopls/internal/analysis/simplifyrange/doc.go +--- a/gopls/internal/analysis/simplifyrange/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyrange/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,32 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package simplifyrange defines an Analyzer that simplifies range statements. +-// https://golang.org/cmd/gofmt/#hdr-The_simplify_command +-// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go +-// +-// # Analyzer simplifyrange +-// +-// simplifyrange: check for range statement simplifications +-// +-// A range of the form: +-// +-// for x, _ = range v {...} +-// +-// will be simplified to: +-// +-// for x = range v {...} +-// +-// A range of the form: +-// +-// for _ = range v {...} +-// +-// will be simplified to: +-// +-// for range v {...} +-// +-// This is one of the simplifications that "gofmt -s" applies. +-// +-// This analyzer ignores generated code. +-package simplifyrange +diff -urN a/gopls/internal/analysis/simplifyrange/simplifyrange.go b/gopls/internal/analysis/simplifyrange/simplifyrange.go +--- a/gopls/internal/analysis/simplifyrange/simplifyrange.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyrange/simplifyrange.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,87 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package simplifyrange +- +-import ( +- _ "embed" +- "go/ast" +- "go/token" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/internal/analysisinternal" +-) +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "simplifyrange", +- Doc: analysisinternal.MustExtractDoc(doc, "simplifyrange"), +- Requires: []*analysis.Analyzer{inspect.Analyzer}, +- Run: run, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifyrange", +-} +- +-func run(pass *analysis.Pass) (any, error) { +- // Gather information whether file is generated or not +- generated := make(map[*token.File]bool) +- for _, file := range pass.Files { +- if ast.IsGenerated(file) { +- generated[pass.Fset.File(file.FileStart)] = true +- } +- } +- +- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- nodeFilter := []ast.Node{ +- (*ast.RangeStmt)(nil), +- } +- inspect.Preorder(nodeFilter, func(n ast.Node) { +- rng := n.(*ast.RangeStmt) +- +- kblank := isBlank(rng.Key) +- vblank := isBlank(rng.Value) +- var start, end token.Pos +- switch { +- case kblank && (rng.Value == nil || vblank): +- // for _ = range x {} +- // for _, _ = range x {} +- // ^^^^^^^ +- start, end = rng.Key.Pos(), rng.Range +- +- case vblank: +- // for k, _ := range x {} +- // ^^^ +- start, end = rng.Key.End(), rng.Value.End() +- +- default: +- return +- } +- +- if generated[pass.Fset.File(n.Pos())] { +- return +- } +- +- pass.Report(analysis.Diagnostic{ +- Pos: start, +- End: end, +- Message: "simplify range expression", +- SuggestedFixes: []analysis.SuggestedFix{{ +- Message: "Remove empty value", +- TextEdits: []analysis.TextEdit{{ +- Pos: start, +- End: end, +- }}, +- }}, +- }) +- }) +- return nil, nil +-} +- +-func isBlank(e ast.Expr) bool { +- id, ok := e.(*ast.Ident) +- return ok && id.Name == "_" +-} +diff -urN a/gopls/internal/analysis/simplifyrange/simplifyrange_test.go b/gopls/internal/analysis/simplifyrange/simplifyrange_test.go +--- a/gopls/internal/analysis/simplifyrange/simplifyrange_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyrange/simplifyrange_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,19 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package simplifyrange_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/simplifyrange" +-) +- +-func Test(t *testing.T) { +- analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), simplifyrange.Analyzer, +- "a", +- "generatedcode", +- "rangeoverfunc") +-} +diff -urN a/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go b/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go +--- a/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,23 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package testdata +- +-import "log" +- +-func m() { +- maps := make(map[string]string) +- for k, _ := range maps { // want "simplify range expression" +- log.Println(k) +- } +- for _ = range maps { // want "simplify range expression" +- } +- for _, _ = range maps { // want "simplify range expression" +- } +- for _, v := range maps { // nope +- println(v) +- } +- for range maps { // nope +- } +-} +diff -urN a/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go.golden b/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go.golden +--- a/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,23 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package testdata +- +-import "log" +- +-func m() { +- maps := make(map[string]string) +- for k := range maps { // want "simplify range expression" +- log.Println(k) +- } +- for range maps { // want "simplify range expression" +- } +- for range maps { // want "simplify range expression" +- } +- for _, v := range maps { // nope +- println(v) +- } +- for range maps { // nope +- } +-} +diff -urN a/gopls/internal/analysis/simplifyrange/testdata/src/generatedcode/generatedcode.go b/gopls/internal/analysis/simplifyrange/testdata/src/generatedcode/generatedcode.go +--- a/gopls/internal/analysis/simplifyrange/testdata/src/generatedcode/generatedcode.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyrange/testdata/src/generatedcode/generatedcode.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,18 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated with somegen DO NOT EDIT. +- +-package testdata +- +-import "log" +- +-func mgeneratedcode() { +- maps := make(map[string]string) +- for k, _ := range maps { // No simplification fix is offered in generated code. +- log.Println(k) +- } +- for _ = range maps { // No simplification fix is offered in generated code. +- } +-} +diff -urN a/gopls/internal/analysis/simplifyrange/testdata/src/generatedcode/generatedcode.go.golden b/gopls/internal/analysis/simplifyrange/testdata/src/generatedcode/generatedcode.go.golden +--- a/gopls/internal/analysis/simplifyrange/testdata/src/generatedcode/generatedcode.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyrange/testdata/src/generatedcode/generatedcode.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,18 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated with somegen DO NOT EDIT. +- +-package testdata +- +-import "log" +- +-func mgeneratedcode() { +- maps := make(map[string]string) +- for k, _ := range maps { // No simplification fix is offered in generated code. +- log.Println(k) +- } +- for _ = range maps { // No simplification fix is offered in generated code. +- } +-} +diff -urN a/gopls/internal/analysis/simplifyrange/testdata/src/rangeoverfunc/rangeoverfunc.go b/gopls/internal/analysis/simplifyrange/testdata/src/rangeoverfunc/rangeoverfunc.go +--- a/gopls/internal/analysis/simplifyrange/testdata/src/rangeoverfunc/rangeoverfunc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyrange/testdata/src/rangeoverfunc/rangeoverfunc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package testdata +- +-import "iter" +- +-func _(seq1 iter.Seq[int], seq2 iter.Seq2[int, int]) { +- // range-over-func is (once again) consistent with other types (#65236) +- for _ = range "" { // want "simplify range expression" +- } +- for _ = range seq1 { // want `simplify range expression` +- } +- for _, v := range seq2 { // silence +- _ = v +- } +- for _, _ = range seq2 { // want `simplify range expression` +- } +-} +diff -urN a/gopls/internal/analysis/simplifyrange/testdata/src/rangeoverfunc/rangeoverfunc.go.golden b/gopls/internal/analysis/simplifyrange/testdata/src/rangeoverfunc/rangeoverfunc.go.golden +--- a/gopls/internal/analysis/simplifyrange/testdata/src/rangeoverfunc/rangeoverfunc.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyrange/testdata/src/rangeoverfunc/rangeoverfunc.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package testdata +- +-import "iter" +- +-func _(seq1 iter.Seq[int], seq2 iter.Seq2[int, int]) { +- // range-over-func is (once again) consistent with other types (#65236) +- for range "" { // want "simplify range expression" +- } +- for range seq1 { // want `simplify range expression` +- } +- for _, v := range seq2 { // silence +- _ = v +- } +- for range seq2 { // want `simplify range expression` +- } +-} +diff -urN a/gopls/internal/analysis/simplifyslice/doc.go b/gopls/internal/analysis/simplifyslice/doc.go +--- a/gopls/internal/analysis/simplifyslice/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyslice/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package simplifyslice defines an Analyzer that simplifies slice statements. +-// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go +-// https://golang.org/cmd/gofmt/#hdr-The_simplify_command +-// +-// # Analyzer simplifyslice +-// +-// simplifyslice: check for slice simplifications +-// +-// A slice expression of the form: +-// +-// s[a:len(s)] +-// +-// will be simplified to: +-// +-// s[a:] +-// +-// This is one of the simplifications that "gofmt -s" applies. +-// +-// This analyzer ignores generated code. +-package simplifyslice +diff -urN a/gopls/internal/analysis/simplifyslice/simplifyslice.go b/gopls/internal/analysis/simplifyslice/simplifyslice.go +--- a/gopls/internal/analysis/simplifyslice/simplifyslice.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyslice/simplifyslice.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,101 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package simplifyslice +- +-import ( +- "bytes" +- _ "embed" +- "fmt" +- "go/ast" +- "go/printer" +- "go/token" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/internal/analysisinternal" +-) +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "simplifyslice", +- Doc: analysisinternal.MustExtractDoc(doc, "simplifyslice"), +- Requires: []*analysis.Analyzer{inspect.Analyzer}, +- Run: run, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifyslice", +-} +- +-// Note: We could also simplify slice expressions of the form s[0:b] to s[:b] +-// but we leave them as is since sometimes we want to be very explicit +-// about the lower bound. +-// An example where the 0 helps: +-// x, y, z := b[0:2], b[2:4], b[4:6] +-// An example where it does not: +-// x, y := b[:n], b[n:] +- +-func run(pass *analysis.Pass) (any, error) { +- // Gather information whether file is generated or not +- generated := make(map[*token.File]bool) +- for _, file := range pass.Files { +- if ast.IsGenerated(file) { +- generated[pass.Fset.File(file.FileStart)] = true +- } +- } +- +- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- nodeFilter := []ast.Node{ +- (*ast.SliceExpr)(nil), +- } +- inspect.Preorder(nodeFilter, func(n ast.Node) { +- if _, ok := generated[pass.Fset.File(n.Pos())]; ok { +- return // skip checking if it's generated code +- } +- +- expr := n.(*ast.SliceExpr) +- // - 3-index slices always require the 2nd and 3rd index +- if expr.Max != nil { +- return +- } +- s, ok := expr.X.(*ast.Ident) +- // the array/slice object is a single, resolved identifier +- if !ok || s.Obj == nil { +- return +- } +- call, ok := expr.High.(*ast.CallExpr) +- // the high expression is a function call with a single argument +- if !ok || len(call.Args) != 1 || call.Ellipsis.IsValid() { +- return +- } +- fun, ok := call.Fun.(*ast.Ident) +- // the function called is "len" and it is not locally defined; and +- // because we don't have dot imports, it must be the predefined len() +- if !ok || fun.Name != "len" || fun.Obj != nil { +- return +- } +- arg, ok := call.Args[0].(*ast.Ident) +- // the len argument is the array/slice object +- if !ok || arg.Obj != s.Obj { +- return +- } +- var b bytes.Buffer +- printer.Fprint(&b, pass.Fset, expr.High) // ignore error +- pass.Report(analysis.Diagnostic{ +- Pos: expr.High.Pos(), +- End: expr.High.End(), +- Message: fmt.Sprintf("unneeded: %s", b.String()), +- SuggestedFixes: []analysis.SuggestedFix{{ +- Message: fmt.Sprintf("Remove '%s'", b.String()), +- TextEdits: []analysis.TextEdit{{ +- Pos: expr.High.Pos(), +- End: expr.High.End(), +- NewText: []byte{}, +- }}, +- }}, +- }) +- }) +- return nil, nil +-} +diff -urN a/gopls/internal/analysis/simplifyslice/simplifyslice_test.go b/gopls/internal/analysis/simplifyslice/simplifyslice_test.go +--- a/gopls/internal/analysis/simplifyslice/simplifyslice_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyslice/simplifyslice_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package simplifyslice_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/simplifyslice" +-) +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.RunWithSuggestedFixes(t, testdata, simplifyslice.Analyzer, "a", "generatedcode", "typeparams") +-} +diff -urN a/gopls/internal/analysis/simplifyslice/testdata/src/a/a.go b/gopls/internal/analysis/simplifyslice/testdata/src/a/a.go +--- a/gopls/internal/analysis/simplifyslice/testdata/src/a/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyslice/testdata/src/a/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,70 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package testdata +- +-var ( +- a [10]byte +- b [20]float32 +- s []int +- t struct { +- s []byte +- } +- +- _ = a[0:] +- _ = a[1:10] +- _ = a[2:len(a)] // want "unneeded: len\\(a\\)" +- _ = a[3:(len(a))] +- _ = a[len(a)-1 : len(a)] // want "unneeded: len\\(a\\)" +- _ = a[2:len(a):len(a)] +- +- _ = a[:] +- _ = a[:10] +- _ = a[:len(a)] // want "unneeded: len\\(a\\)" +- _ = a[:(len(a))] +- _ = a[:len(a)-1] +- _ = a[:len(a):len(a)] +- +- _ = s[0:] +- _ = s[1:10] +- _ = s[2:len(s)] // want "unneeded: len\\(s\\)" +- _ = s[3:(len(s))] +- _ = s[len(a) : len(s)-1] +- _ = s[0:len(b)] +- _ = s[2:len(s):len(s)] +- +- _ = s[:] +- _ = s[:10] +- _ = s[:len(s)] // want "unneeded: len\\(s\\)" +- _ = s[:(len(s))] +- _ = s[:len(s)-1] +- _ = s[:len(b)] +- _ = s[:len(s):len(s)] +- +- _ = t.s[0:] +- _ = t.s[1:10] +- _ = t.s[2:len(t.s)] +- _ = t.s[3:(len(t.s))] +- _ = t.s[len(a) : len(t.s)-1] +- _ = t.s[0:len(b)] +- _ = t.s[2:len(t.s):len(t.s)] +- +- _ = t.s[:] +- _ = t.s[:10] +- _ = t.s[:len(t.s)] +- _ = t.s[:(len(t.s))] +- _ = t.s[:len(t.s)-1] +- _ = t.s[:len(b)] +- _ = t.s[:len(t.s):len(t.s)] +-) +- +-func _() { +- s := s[0:len(s)] // want "unneeded: len\\(s\\)" +- _ = s +-} +- +-func m() { +- maps := []int{} +- _ = maps[1:len(maps)] // want "unneeded: len\\(maps\\)" +-} +diff -urN a/gopls/internal/analysis/simplifyslice/testdata/src/a/a.go.golden b/gopls/internal/analysis/simplifyslice/testdata/src/a/a.go.golden +--- a/gopls/internal/analysis/simplifyslice/testdata/src/a/a.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyslice/testdata/src/a/a.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,70 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package testdata +- +-var ( +- a [10]byte +- b [20]float32 +- s []int +- t struct { +- s []byte +- } +- +- _ = a[0:] +- _ = a[1:10] +- _ = a[2:] // want "unneeded: len\\(a\\)" +- _ = a[3:(len(a))] +- _ = a[len(a)-1:] // want "unneeded: len\\(a\\)" +- _ = a[2:len(a):len(a)] +- +- _ = a[:] +- _ = a[:10] +- _ = a[:] // want "unneeded: len\\(a\\)" +- _ = a[:(len(a))] +- _ = a[:len(a)-1] +- _ = a[:len(a):len(a)] +- +- _ = s[0:] +- _ = s[1:10] +- _ = s[2:] // want "unneeded: len\\(s\\)" +- _ = s[3:(len(s))] +- _ = s[len(a) : len(s)-1] +- _ = s[0:len(b)] +- _ = s[2:len(s):len(s)] +- +- _ = s[:] +- _ = s[:10] +- _ = s[:] // want "unneeded: len\\(s\\)" +- _ = s[:(len(s))] +- _ = s[:len(s)-1] +- _ = s[:len(b)] +- _ = s[:len(s):len(s)] +- +- _ = t.s[0:] +- _ = t.s[1:10] +- _ = t.s[2:len(t.s)] +- _ = t.s[3:(len(t.s))] +- _ = t.s[len(a) : len(t.s)-1] +- _ = t.s[0:len(b)] +- _ = t.s[2:len(t.s):len(t.s)] +- +- _ = t.s[:] +- _ = t.s[:10] +- _ = t.s[:len(t.s)] +- _ = t.s[:(len(t.s))] +- _ = t.s[:len(t.s)-1] +- _ = t.s[:len(b)] +- _ = t.s[:len(t.s):len(t.s)] +-) +- +-func _() { +- s := s[0:] // want "unneeded: len\\(s\\)" +- _ = s +-} +- +-func m() { +- maps := []int{} +- _ = maps[1:] // want "unneeded: len\\(maps\\)" +-} +diff -urN a/gopls/internal/analysis/simplifyslice/testdata/src/generatedcode/generatedcode.go b/gopls/internal/analysis/simplifyslice/testdata/src/generatedcode/generatedcode.go +--- a/gopls/internal/analysis/simplifyslice/testdata/src/generatedcode/generatedcode.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyslice/testdata/src/generatedcode/generatedcode.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,72 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated with somegen DO NOT EDIT. +- +-package testdata +- +-var ( +- a [10]byte +- b [20]float32 +- s []int +- t struct { +- s []byte +- } +- +- _ = a[0:] +- _ = a[1:10] +- _ = a[2:len(a)] // No simplification fix is offered in generated code. +- _ = a[3:(len(a))] +- _ = a[len(a)-1 : len(a)] // No simplification fix is offered in generated code. +- _ = a[2:len(a):len(a)] +- +- _ = a[:] +- _ = a[:10] +- _ = a[:len(a)] // No simplification fix is offered in generated code. +- _ = a[:(len(a))] +- _ = a[:len(a)-1] +- _ = a[:len(a):len(a)] +- +- _ = s[0:] +- _ = s[1:10] +- _ = s[2:len(s)] // No simplification fix is offered in generated code. +- _ = s[3:(len(s))] +- _ = s[len(a) : len(s)-1] +- _ = s[0:len(b)] +- _ = s[2:len(s):len(s)] +- +- _ = s[:] +- _ = s[:10] +- _ = s[:len(s)] // No simplification fix is offered in generated code. +- _ = s[:(len(s))] +- _ = s[:len(s)-1] +- _ = s[:len(b)] +- _ = s[:len(s):len(s)] +- +- _ = t.s[0:] +- _ = t.s[1:10] +- _ = t.s[2:len(t.s)] +- _ = t.s[3:(len(t.s))] +- _ = t.s[len(a) : len(t.s)-1] +- _ = t.s[0:len(b)] +- _ = t.s[2:len(t.s):len(t.s)] +- +- _ = t.s[:] +- _ = t.s[:10] +- _ = t.s[:len(t.s)] +- _ = t.s[:(len(t.s))] +- _ = t.s[:len(t.s)-1] +- _ = t.s[:len(b)] +- _ = t.s[:len(t.s):len(t.s)] +-) +- +-func _() { +- s := s[0:len(s)] // No simplification fix is offered in generated code. +- _ = s +-} +- +-func m() { +- maps := []int{} +- _ = maps[1:len(maps)] // No simplification fix is offered in generated code. +-} +diff -urN a/gopls/internal/analysis/simplifyslice/testdata/src/generatedcode/generatedcode.go.golden b/gopls/internal/analysis/simplifyslice/testdata/src/generatedcode/generatedcode.go.golden +--- a/gopls/internal/analysis/simplifyslice/testdata/src/generatedcode/generatedcode.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyslice/testdata/src/generatedcode/generatedcode.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,72 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated with somegen DO NOT EDIT. +- +-package testdata +- +-var ( +- a [10]byte +- b [20]float32 +- s []int +- t struct { +- s []byte +- } +- +- _ = a[0:] +- _ = a[1:10] +- _ = a[2:len(a)] // No simplification fix is offered in generated code. +- _ = a[3:(len(a))] +- _ = a[len(a)-1 : len(a)] // No simplification fix is offered in generated code. +- _ = a[2:len(a):len(a)] +- +- _ = a[:] +- _ = a[:10] +- _ = a[:len(a)] // No simplification fix is offered in generated code. +- _ = a[:(len(a))] +- _ = a[:len(a)-1] +- _ = a[:len(a):len(a)] +- +- _ = s[0:] +- _ = s[1:10] +- _ = s[2:len(s)] // No simplification fix is offered in generated code. +- _ = s[3:(len(s))] +- _ = s[len(a) : len(s)-1] +- _ = s[0:len(b)] +- _ = s[2:len(s):len(s)] +- +- _ = s[:] +- _ = s[:10] +- _ = s[:len(s)] // No simplification fix is offered in generated code. +- _ = s[:(len(s))] +- _ = s[:len(s)-1] +- _ = s[:len(b)] +- _ = s[:len(s):len(s)] +- +- _ = t.s[0:] +- _ = t.s[1:10] +- _ = t.s[2:len(t.s)] +- _ = t.s[3:(len(t.s))] +- _ = t.s[len(a) : len(t.s)-1] +- _ = t.s[0:len(b)] +- _ = t.s[2:len(t.s):len(t.s)] +- +- _ = t.s[:] +- _ = t.s[:10] +- _ = t.s[:len(t.s)] +- _ = t.s[:(len(t.s))] +- _ = t.s[:len(t.s)-1] +- _ = t.s[:len(b)] +- _ = t.s[:len(t.s):len(t.s)] +-) +- +-func _() { +- s := s[0:len(s)] // No simplification fix is offered in generated code. +- _ = s +-} +- +-func m() { +- maps := []int{} +- _ = maps[1:len(maps)] // No simplification fix is offered in generated code. +-} +diff -urN a/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go b/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go +--- a/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package testdata +- +-type List[E any] []E +- +-// TODO(suzmue): add a test for generic slice expressions when https://github.com/golang/go/issues/48618 is closed. +-// type S interface{ ~[]int } +- +-var ( +- a [10]byte +- b [20]float32 +- p List[int] +- +- _ = p[0:] +- _ = p[1:10] +- _ = p[2:len(p)] // want "unneeded: len\\(p\\)" +- _ = p[3:(len(p))] +- _ = p[len(a) : len(p)-1] +- _ = p[0:len(b)] +- _ = p[2:len(p):len(p)] +- +- _ = p[:] +- _ = p[:10] +- _ = p[:len(p)] // want "unneeded: len\\(p\\)" +- _ = p[:(len(p))] +- _ = p[:len(p)-1] +- _ = p[:len(b)] +- _ = p[:len(p):len(p)] +-) +- +-func foo[E any](a List[E]) { +- _ = a[0:len(a)] // want "unneeded: len\\(a\\)" +-} +diff -urN a/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden b/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden +--- a/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package testdata +- +-type List[E any] []E +- +-// TODO(suzmue): add a test for generic slice expressions when https://github.com/golang/go/issues/48618 is closed. +-// type S interface{ ~[]int } +- +-var ( +- a [10]byte +- b [20]float32 +- p List[int] +- +- _ = p[0:] +- _ = p[1:10] +- _ = p[2:] // want "unneeded: len\\(p\\)" +- _ = p[3:(len(p))] +- _ = p[len(a) : len(p)-1] +- _ = p[0:len(b)] +- _ = p[2:len(p):len(p)] +- +- _ = p[:] +- _ = p[:10] +- _ = p[:] // want "unneeded: len\\(p\\)" +- _ = p[:(len(p))] +- _ = p[:len(p)-1] +- _ = p[:len(b)] +- _ = p[:len(p):len(p)] +-) +- +-func foo[E any](a List[E]) { +- _ = a[0:] // want "unneeded: len\\(a\\)" +-} +diff -urN a/gopls/internal/analysis/unusedfunc/doc.go b/gopls/internal/analysis/unusedfunc/doc.go +--- a/gopls/internal/analysis/unusedfunc/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedfunc/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,58 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package unusedfunc defines an analyzer that checks for unused +-// functions and methods +-// +-// # Analyzer unusedfunc +-// +-// unusedfunc: check for unused functions, methods, etc +-// +-// The unusedfunc analyzer reports functions and methods that are +-// never referenced outside of their own declaration. +-// +-// A function is considered unused if it is unexported and not +-// referenced (except within its own declaration). +-// +-// A method is considered unused if it is unexported, not referenced +-// (except within its own declaration), and its name does not match +-// that of any method of an interface type declared within the same +-// package. +-// +-// The tool may report false positives in some situations, for +-// example: +-// +-// - for a declaration of an unexported function that is referenced +-// from another package using the go:linkname mechanism, if the +-// declaration's doc comment does not also have a go:linkname +-// comment. +-// +-// (Such code is in any case strongly discouraged: linkname +-// annotations, if they must be used at all, should be used on both +-// the declaration and the alias.) +-// +-// - for compiler intrinsics in the "runtime" package that, though +-// never referenced, are known to the compiler and are called +-// indirectly by compiled object code. +-// +-// - for functions called only from assembly. +-// +-// - for functions called only from files whose build tags are not +-// selected in the current build configuration. +-// +-// Since these situations are relatively common in the low-level parts +-// of the runtime, this analyzer ignores the standard library. +-// See https://go.dev/issue/71686 and https://go.dev/issue/74130 for +-// further discussion of these limitations. +-// +-// The unusedfunc algorithm is not as precise as the +-// golang.org/x/tools/cmd/deadcode tool, but it has the advantage that +-// it runs within the modular analysis framework, enabling near +-// real-time feedback within gopls. +-// +-// The unusedfunc analyzer also reports unused types, vars, and +-// constants. Enums--constants defined with iota--are ignored since +-// even the unused values must remain present to preserve the logical +-// ordering. +-package unusedfunc +diff -urN a/gopls/internal/analysis/unusedfunc/main.go b/gopls/internal/analysis/unusedfunc/main.go +--- a/gopls/internal/analysis/unusedfunc/main.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedfunc/main.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build ignore +- +-// The unusedfunc command runs the unusedfunc analyzer. +-package main +- +-import ( +- "golang.org/x/tools/go/analysis/singlechecker" +- "golang.org/x/tools/gopls/internal/analysis/unusedfunc" +-) +- +-func main() { singlechecker.Main(unusedfunc.Analyzer) } +diff -urN a/gopls/internal/analysis/unusedfunc/testdata/basic.txtar b/gopls/internal/analysis/unusedfunc/testdata/basic.txtar +--- a/gopls/internal/analysis/unusedfunc/testdata/basic.txtar 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedfunc/testdata/basic.txtar 1969-12-31 18:00:00.000000000 -0600 +@@ -1,137 +0,0 @@ +-Basic test of unusedfunc. +- +--- go.mod -- +-module example.com +- +-go 1.21 +- +--- a/a.go -- +-package a +- +-func main() { +- _ = live +-} +- +-// -- functions -- +- +-func Exported() {} +- +-func dead() { // want `function "dead" is unused` +-} +- +-func deadRecursive() int { // want `function "deadRecursive" is unused` +- return deadRecursive() +-} +- +-func live() {} +- +-//go:linkname foo +-func apparentlyDeadButHasPrecedingLinknameComment() {} +- +-// -- methods -- +- +-type ExportedType int +-type unexportedType int +- +-func (ExportedType) Exported() {} +-func (unexportedType) Exported() {} +- +-func (x ExportedType) dead() { // want `method "dead" is unused` +- x.dead() +-} +- +-func (u unexportedType) dead() { // want `method "dead" is unused` +- u.dead() +-} +- +-func (x ExportedType) dynamic() {} // matches name of interface method => live +- +-type _ interface{ dynamic() } +- +-// -- types without methods -- +- +-type ExportedType2 int +- +-// self-references don't count +-type unusedUnexportedType2 struct{ *unusedUnexportedType2 } // want `type "unusedUnexportedType2" is unused` +- +-type ( +- one int +- two one // want `type "two" is unused` +-) +- +-// -- generic methods -- +- +-type g[T any] int +- +-func (g[T]) method() {} // want `method "method" is unused` +- +-// -- constants -- +- +-const unusedConst = 1 // want `const "unusedConst" is unused` +- +-const ( +- unusedEnum = iota +-) +- +-const ( +- constOne = 1 +- unusedConstTwo = constOne // want `const "unusedConstTwo" is unused` +-) +- +--- a/a.go.golden -- +-package a +- +-func main() { +- _ = live +-} +- +-// -- functions -- +- +-func Exported() {} +- +-func live() {} +- +-//go:linkname foo +-func apparentlyDeadButHasPrecedingLinknameComment() {} +- +-// -- methods -- +- +-type ExportedType int +-type unexportedType int +- +-func (ExportedType) Exported() {} +-func (unexportedType) Exported() {} +- +-func (x ExportedType) dynamic() {} // matches name of interface method => live +- +-type _ interface{ dynamic() } +- +- +-// -- types without methods -- +- +-type ExportedType2 int +- +-// want `type "unusedUnexportedType2" is unused` +- +-type ( +- one int +-) +- +-// -- generic methods -- +- +-type g[T any] int +- +-// want `method "method" is unused` +- +-// -- constants -- +- +-// want `const "unusedConst" is unused` +- +-const ( +- unusedEnum = iota +-) +- +-const ( +- constOne = 1 +-) +diff -urN a/gopls/internal/analysis/unusedfunc/unusedfunc.go b/gopls/internal/analysis/unusedfunc/unusedfunc.go +--- a/gopls/internal/analysis/unusedfunc/unusedfunc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedfunc/unusedfunc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,264 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package unusedfunc +- +-import ( +- _ "embed" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "strings" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/edge" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/internal/analysisinternal" +- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" +- "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/typesinternal/typeindex" +-) +- +-// Assumptions +-// +-// Like unusedparams, this analyzer depends on the invariant of the +-// gopls analysis driver that only the "widest" package (the one with +-// the most files) for a given file is analyzed. This invariant allows +-// the algorithm to make "closed world" assumptions about the target +-// package. (In general, analysis of Go test packages cannot make that +-// assumption because in-package tests add new files to existing +-// packages, potentially invalidating results.) Consequently, running +-// this analyzer in, say, unitchecker or multichecker may produce +-// incorrect results. +-// +-// A function is unreferenced if it is never referenced except within +-// its own declaration, and it is unexported. (Exported functions must +-// be assumed to be referenced from other packages.) +-// +-// For methods, we assume that the receiver type is "live" (variables +-// of that type are created) and "address taken" (its rtype ends up in +-// an at least one interface value). This means exported methods may +-// be called via reflection or by interfaces defined in other +-// packages, so again we are concerned only with unexported methods. +-// +-// To discount the possibility of a method being called via an +-// interface, we must additionally ensure that no literal interface +-// type within the package has a method of the same name. +-// (Unexported methods cannot be called through interfaces declared +-// in other packages because each package has a private namespace +-// for unexported identifiers.) +-// +-// Types (sans methods), constants, and vars are more straightforward. +-// For now we ignore enums (const decls using iota) since it is +-// common for at least some values to be unused when they are added +-// for symmetry, future use, or to conform to some external pattern. +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "unusedfunc", +- Doc: analysisinternal.MustExtractDoc(doc, "unusedfunc"), +- Requires: []*analysis.Analyzer{inspect.Analyzer, typeindexanalyzer.Analyzer}, +- Run: run, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedfunc", +-} +- +-func run(pass *analysis.Pass) (any, error) { +- // The standard library makes heavy use of intrinsics, linknames, etc, +- // that confuse this algorithm; so skip it (#74130). +- if analysisinternal.IsStdPackage(pass.Pkg.Path()) { +- return nil, nil +- } +- +- var ( +- inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) +- ) +- +- // Gather names of unexported interface methods declared in this package. +- localIfaceMethods := make(map[string]bool) +- nodeFilter := []ast.Node{(*ast.InterfaceType)(nil)} +- inspect.Preorder(nodeFilter, func(n ast.Node) { +- iface := n.(*ast.InterfaceType) +- for _, field := range iface.Methods.List { +- if len(field.Names) > 0 { +- id := field.Names[0] +- if !id.IsExported() { +- // TODO(adonovan): check not just name but signature too. +- localIfaceMethods[id.Name] = true +- } +- } +- } +- }) +- +- // checkUnused reports a diagnostic if the object declared at id +- // is unexported and unused. References within curSelf are ignored. +- checkUnused := func(noun string, id *ast.Ident, node ast.Node, curSelf inspector.Cursor) { +- // Exported functions may be called from other packages. +- if id.IsExported() { +- return +- } +- +- // Blank functions are exempt from diagnostics. +- if id.Name == "_" { +- return +- } +- +- // Check for uses (including selections). +- obj := pass.TypesInfo.Defs[id] +- for curId := range index.Uses(obj) { +- // Ignore self references. +- if !curSelf.Contains(curId) { +- return // symbol is referenced +- } +- } +- +- // Expand to include leading doc comment. +- pos := node.Pos() +- if doc := astutil.DocComment(node); doc != nil { +- pos = doc.Pos() +- } +- +- // Expand to include trailing line comment. +- end := node.End() +- if doc := eolComment(node); doc != nil { +- end = doc.End() +- } +- +- pass.Report(analysis.Diagnostic{ +- Pos: id.Pos(), +- End: id.End(), +- Message: fmt.Sprintf("%s %q is unused", noun, id.Name), +- SuggestedFixes: []analysis.SuggestedFix{{ +- Message: fmt.Sprintf("Delete %s %q", noun, id.Name), +- TextEdits: []analysis.TextEdit{{ +- Pos: pos, +- End: end, +- }}, +- }}, +- }) +- } +- +- // Gather the set of enums (const GenDecls that use iota). +- enums := make(map[inspector.Cursor]bool) +- for curId := range index.Uses(types.Universe.Lookup("iota")) { +- for curDecl := range curId.Enclosing((*ast.GenDecl)(nil)) { +- enums[curDecl] = true +- break +- } +- } +- +- // Check each package-level declaration (and method) for uses. +- for curFile := range inspect.Root().Preorder((*ast.File)(nil)) { +- file := curFile.Node().(*ast.File) +- if ast.IsGenerated(file) { +- continue // skip generated files +- } +- +- nextDecl: +- for i := range file.Decls { +- curDecl := curFile.ChildAt(edge.File_Decls, i) +- decl := curDecl.Node().(ast.Decl) +- +- // Skip if there's a preceding //go:linkname directive. +- // (This is relevant only to func and var decls.) +- // +- // (A program can link fine without such a directive, +- // but it is bad style; and the directive may +- // appear anywhere, not just on the preceding line, +- // but again that is poor form.) +- if doc := astutil.DocComment(decl); doc != nil { +- for _, comment := range doc.List { +- // TODO(adonovan): use ast.ParseDirective when #68021 lands. +- if strings.HasPrefix(comment.Text, "//go:linkname ") { +- continue nextDecl +- } +- } +- } +- +- switch decl := decl.(type) { +- case *ast.FuncDecl: +- id := decl.Name +- // An (unexported) method whose name matches an +- // interface method declared in the same package +- // may be dynamically called via that interface. +- if decl.Recv != nil && localIfaceMethods[id.Name] { +- continue +- } +- +- // main and init functions are implicitly always used +- if decl.Recv == nil && (id.Name == "init" || id.Name == "main") { +- continue +- } +- +- noun := cond(decl.Recv == nil, "function", "method") +- checkUnused(noun, decl.Name, decl, curDecl) +- +- case *ast.GenDecl: +- // Instead of deleting a spec in a singleton decl, +- // delete the whole decl. +- singleton := len(decl.Specs) == 1 +- +- switch decl.Tok { +- case token.TYPE: +- for i, spec := range decl.Specs { +- var ( +- spec = spec.(*ast.TypeSpec) +- id = spec.Name +- curSelf = curDecl.ChildAt(edge.GenDecl_Specs, i) +- ) +- checkUnused("type", id, cond[ast.Node](singleton, decl, spec), curSelf) +- } +- +- case token.CONST, token.VAR: +- // Skip enums: values are often unused. +- if enums[curDecl] { +- continue +- } +- for i, spec := range decl.Specs { +- spec := spec.(*ast.ValueSpec) +- curSpec := curDecl.ChildAt(edge.GenDecl_Specs, i) +- +- // Ignore n:n and n:1 assignments for now. +- // TODO(adonovan): support these cases. +- if len(spec.Names) != 1 { +- continue +- } +- id := spec.Names[0] +- checkUnused(decl.Tok.String(), id, cond[ast.Node](singleton, decl, spec), curSpec) +- } +- } +- } +- } +- } +- +- return nil, nil +-} +- +-func eolComment(n ast.Node) *ast.CommentGroup { +- // TODO(adonovan): support: +- // func f() {...} // comment +- switch n := n.(type) { +- case *ast.GenDecl: +- if !n.TokPos.IsValid() && len(n.Specs) == 1 { +- return eolComment(n.Specs[0]) +- } +- case *ast.ValueSpec: +- return n.Comment +- case *ast.TypeSpec: +- return n.Comment +- } +- return nil +-} +- +-func cond[T any](cond bool, t, f T) T { +- if cond { +- return t +- } else { +- return f +- } +-} +diff -urN a/gopls/internal/analysis/unusedfunc/unusedfunc_test.go b/gopls/internal/analysis/unusedfunc/unusedfunc_test.go +--- a/gopls/internal/analysis/unusedfunc/unusedfunc_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedfunc/unusedfunc_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,19 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package unusedfunc_test +- +-import ( +- "path/filepath" +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/unusedfunc" +- "golang.org/x/tools/internal/testfiles" +-) +- +-func Test(t *testing.T) { +- dir := testfiles.ExtractTxtarFileToTmp(t, filepath.Join(analysistest.TestData(), "basic.txtar")) +- analysistest.RunWithSuggestedFixes(t, dir, unusedfunc.Analyzer, "example.com/a") +-} +diff -urN a/gopls/internal/analysis/unusedparams/cmd/main.go b/gopls/internal/analysis/unusedparams/cmd/main.go +--- a/gopls/internal/analysis/unusedparams/cmd/main.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedparams/cmd/main.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,13 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The unusedparams command runs the unusedparams analyzer. +-package main +- +-import ( +- "golang.org/x/tools/go/analysis/singlechecker" +- "golang.org/x/tools/gopls/internal/analysis/unusedparams" +-) +- +-func main() { singlechecker.Main(unusedparams.Analyzer) } +diff -urN a/gopls/internal/analysis/unusedparams/doc.go b/gopls/internal/analysis/unusedparams/doc.go +--- a/gopls/internal/analysis/unusedparams/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedparams/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package unusedparams defines an analyzer that checks for unused +-// parameters of functions. +-// +-// # Analyzer unusedparams +-// +-// unusedparams: check for unused parameters of functions +-// +-// The unusedparams analyzer checks functions to see if there are +-// any parameters that are not being used. +-// +-// To ensure soundness, it ignores: +-// - "address-taken" functions, that is, functions that are used as +-// a value rather than being called directly; their signatures may +-// be required to conform to a func type. +-// - exported functions or methods, since they may be address-taken +-// in another package. +-// - unexported methods whose name matches an interface method +-// declared in the same package, since the method's signature +-// may be required to conform to the interface type. +-// - functions with empty bodies, or containing just a call to panic. +-// - parameters that are unnamed, or named "_", the blank identifier. +-// +-// The analyzer suggests a fix of replacing the parameter name by "_", +-// but in such cases a deeper fix can be obtained by invoking the +-// "Refactor: remove unused parameter" code action, which will +-// eliminate the parameter entirely, along with all corresponding +-// arguments at call sites, while taking care to preserve any side +-// effects in the argument expressions; see +-// https://github.com/golang/tools/releases/tag/gopls%2Fv0.14. +-// +-// This analyzer ignores generated code. +-package unusedparams +diff -urN a/gopls/internal/analysis/unusedparams/testdata/src/a/a.go b/gopls/internal/analysis/unusedparams/testdata/src/a/a.go +--- a/gopls/internal/analysis/unusedparams/testdata/src/a/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedparams/testdata/src/a/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,87 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package a +- +-import ( +- "bytes" +- "fmt" +- "net/http" +-) +- +-type parent interface { +- n(f bool) +-} +- +-type yuh struct { +- a int +-} +- +-func (y *yuh) n(f bool) { +- for i := 0; i < 10; i++ { +- fmt.Println(i) +- } +-} +- +-func a(i1 int, i2 int, i3 int) int { // want "unused parameter: i2" +- i3 += i1 +- _ = func(z int) int { // want "unused parameter: z" +- _ = 1 +- return 1 +- } +- return i3 +-} +- +-func b(c bytes.Buffer) { // want "unused parameter: c" +- _ = 1 +-} +- +-func z(h http.ResponseWriter, _ *http.Request) { // no report: func z is address-taken +- fmt.Println("Before") +-} +- +-func l(h http.Handler) http.Handler { // want "unused parameter: h" +- return http.HandlerFunc(z) +-} +- +-func mult(a, b int) int { // want "unused parameter: b" +- a += 1 +- return a +-} +- +-func y(a int) { +- panic("yo") +-} +- +-var _ = func(x int) {} // empty body: no diagnostic +- +-var _ = func(x int) { println() } // want "unused parameter: x" +- +-var ( +- calledGlobal = func(x int) { println() } // want "unused parameter: x" +- addressTakenGlobal = func(x int) { println() } // no report: function is address-taken +-) +- +-func _() { +- calledGlobal(1) +- println(addressTakenGlobal) +-} +- +-func Exported(unused int) {} // no finding: an exported function may be address-taken +- +-type T int +- +-func (T) m(f bool) { println() } // want "unused parameter: f" +-func (T) n(f bool) { println() } // no finding: n may match the interface method parent.n +- +-func _() { +- var fib func(x, y int) int +- fib = func(x, y int) int { // want "unused parameter: y" +- if x < 2 { +- return x +- } +- return fib(x-1, 123) + fib(x-2, 456) +- } +- fib(10, 42) +-} +diff -urN a/gopls/internal/analysis/unusedparams/testdata/src/a/a.go.golden b/gopls/internal/analysis/unusedparams/testdata/src/a/a.go.golden +--- a/gopls/internal/analysis/unusedparams/testdata/src/a/a.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedparams/testdata/src/a/a.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,87 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package a +- +-import ( +- "bytes" +- "fmt" +- "net/http" +-) +- +-type parent interface { +- n(f bool) +-} +- +-type yuh struct { +- a int +-} +- +-func (y *yuh) n(f bool) { +- for i := 0; i < 10; i++ { +- fmt.Println(i) +- } +-} +- +-func a(i1 int, _ int, i3 int) int { // want "unused parameter: i2" +- i3 += i1 +- _ = func(_ int) int { // want "unused parameter: z" +- _ = 1 +- return 1 +- } +- return i3 +-} +- +-func b(_ bytes.Buffer) { // want "unused parameter: c" +- _ = 1 +-} +- +-func z(h http.ResponseWriter, _ *http.Request) { // no report: func z is address-taken +- fmt.Println("Before") +-} +- +-func l(_ http.Handler) http.Handler { // want "unused parameter: h" +- return http.HandlerFunc(z) +-} +- +-func mult(a, _ int) int { // want "unused parameter: b" +- a += 1 +- return a +-} +- +-func y(a int) { +- panic("yo") +-} +- +-var _ = func(x int) {} // empty body: no diagnostic +- +-var _ = func(_ int) { println() } // want "unused parameter: x" +- +-var ( +- calledGlobal = func(_ int) { println() } // want "unused parameter: x" +- addressTakenGlobal = func(x int) { println() } // no report: function is address-taken +-) +- +-func _() { +- calledGlobal(1) +- println(addressTakenGlobal) +-} +- +-func Exported(unused int) {} // no finding: an exported function may be address-taken +- +-type T int +- +-func (T) m(_ bool) { println() } // want "unused parameter: f" +-func (T) n(f bool) { println() } // no finding: n may match the interface method parent.n +- +-func _() { +- var fib func(x, y int) int +- fib = func(x, _ int) int { // want "unused parameter: y" +- if x < 2 { +- return x +- } +- return fib(x-1, 123) + fib(x-2, 456) +- } +- fib(10, 42) +-} +diff -urN a/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go +--- a/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-// Code generated with somegen DO NOT EDIT. +-// +-// Because this file is generated, there should be no diagnostics +-// reported for any unused parameters. +- +-package generatedcode +- +-// generatedInterface exists to ensure that the generated code +-// is considered when determining whether parameters are used +-// in non-generated code. +-type generatedInterface interface{ n(f bool) } +- +-func a(x bool) { println() } +- +-var v = func(x bool) { println() } +diff -urN a/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go.golden b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go.golden +--- a/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-// Code generated with somegen DO NOT EDIT. +-// +-// Because this file is generated, there should be no diagnostics +-// reported for any unused parameters. +- +-package generatedcode +- +-// generatedInterface exists to ensure that the generated code +-// is considered when determining whether parameters are used +-// in non-generated code. +-type generatedInterface interface{ n(f bool) } +- +-func a(x bool) { println() } +- +-var v = func(x bool) { println() } +diff -urN a/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go +--- a/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-package generatedcode +- +-// This file does not have the generated code comment. +-// It exists to ensure that generated code is considered +-// when determining whether or not function parameters +-// are used. +- +-type implementsGeneratedInterface struct{} +- +-// The f parameter should not be reported as unused, +-// because this method implements the parent interface defined +-// in the generated code. +-func (implementsGeneratedInterface) n(f bool) { +- // The body must not be empty, otherwise unusedparams will +- // not report the unused parameter regardless of the +- // interface. +- println() +-} +- +-func b(x bool) { println() } // want "unused parameter: x" +diff -urN a/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go.golden b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go.golden +--- a/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-package generatedcode +- +-// This file does not have the generated code comment. +-// It exists to ensure that generated code is considered +-// when determining whether or not function parameters +-// are used. +- +-type implementsGeneratedInterface struct{} +- +-// The f parameter should not be reported as unused, +-// because this method implements the parent interface defined +-// in the generated code. +-func (implementsGeneratedInterface) n(f bool) { +- // The body must not be empty, otherwise unusedparams will +- // not report the unused parameter regardless of the +- // interface. +- println() +-} +- +-func b(_ bool) { println() } // want "unused parameter: x" +diff -urN a/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go b/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go +--- a/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,55 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package typeparams +- +-import ( +- "bytes" +- "fmt" +- "net/http" +-) +- +-type parent[T any] interface { +- n(f T) +-} +- +-type yuh[T any] struct { +- a T +-} +- +-func (y *yuh[int]) n(f bool) { +- for i := 0; i < 10; i++ { +- fmt.Println(i) +- } +-} +- +-func a[T comparable](i1 int, i2 T, i3 int) int { // want "unused parameter: i2" +- i3 += i1 +- _ = func(z int) int { // want "unused parameter: z" +- _ = 1 +- return 1 +- } +- return i3 +-} +- +-func b[T any](c bytes.Buffer) { // want "unused parameter: c" +- _ = 1 +-} +- +-func z[T http.ResponseWriter](h T, _ *http.Request) { // no report: func z is address-taken +- fmt.Println("Before") +-} +- +-func l(h http.Handler) http.Handler { // want "unused parameter: h" +- return http.HandlerFunc(z[http.ResponseWriter]) +-} +- +-func mult(a, b int) int { // want "unused parameter: b" +- a += 1 +- return a +-} +- +-func y[T any](a T) { +- panic("yo") +-} +diff -urN a/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden b/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden +--- a/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,55 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package typeparams +- +-import ( +- "bytes" +- "fmt" +- "net/http" +-) +- +-type parent[T any] interface { +- n(f T) +-} +- +-type yuh[T any] struct { +- a T +-} +- +-func (y *yuh[int]) n(f bool) { +- for i := 0; i < 10; i++ { +- fmt.Println(i) +- } +-} +- +-func a[T comparable](i1 int, _ T, i3 int) int { // want "unused parameter: i2" +- i3 += i1 +- _ = func(_ int) int { // want "unused parameter: z" +- _ = 1 +- return 1 +- } +- return i3 +-} +- +-func b[T any](_ bytes.Buffer) { // want "unused parameter: c" +- _ = 1 +-} +- +-func z[T http.ResponseWriter](h T, _ *http.Request) { // no report: func z is address-taken +- fmt.Println("Before") +-} +- +-func l(_ http.Handler) http.Handler { // want "unused parameter: h" +- return http.HandlerFunc(z[http.ResponseWriter]) +-} +- +-func mult(a, _ int) int { // want "unused parameter: b" +- a += 1 +- return a +-} +- +-func y[T any](a T) { +- panic("yo") +-} +diff -urN a/gopls/internal/analysis/unusedparams/unusedparams.go b/gopls/internal/analysis/unusedparams/unusedparams.go +--- a/gopls/internal/analysis/unusedparams/unusedparams.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedparams/unusedparams.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,293 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package unusedparams +- +-import ( +- _ "embed" +- "fmt" +- "go/ast" +- "go/types" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/edge" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/util/moreslices" +- "golang.org/x/tools/internal/analysisinternal" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "unusedparams", +- Doc: analysisinternal.MustExtractDoc(doc, "unusedparams"), +- Requires: []*analysis.Analyzer{inspect.Analyzer}, +- Run: run, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedparams", +-} +- +-const FixCategory = "unusedparams" // recognized by gopls ApplyFix +- +-func run(pass *analysis.Pass) (any, error) { +- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- +- // First find all "address-taken" functions. +- // We must conservatively assume that their parameters +- // are all required to conform to some signature. +- // +- // A named function is address-taken if it is somewhere +- // used not in call position: +- // +- // f(...) // not address-taken +- // use(f) // address-taken +- // +- // A literal function is address-taken if it is not +- // immediately bound to a variable, or if that variable is +- // used not in call position: +- // +- // f := func() { ... }; f() used only in call position +- // var f func(); f = func() { ...f()... }; f() ditto +- // use(func() { ... }) address-taken +- // +- +- // Note: this algorithm relies on the assumption that the +- // analyzer is called only for the "widest" package for a +- // given file: that is, p_test in preference to p, if both +- // exist. Analyzing only package p may produce diagnostics +- // that would be falsified based on declarations in p_test.go +- // files. The gopls analysis driver does this, but most +- // drivers to not, so running this command in, say, +- // unitchecker or multichecker may produce incorrect results. +- +- // Gather global information: +- // - uses of functions not in call position +- // - unexported interface methods +- // - all referenced variables +- +- usesOutsideCall := make(map[types.Object][]*ast.Ident) +- unexportedIMethodNames := make(map[string]bool) +- { +- callPosn := make(map[*ast.Ident]bool) // all idents f appearing in f() calls +- filter := []ast.Node{ +- (*ast.CallExpr)(nil), +- (*ast.InterfaceType)(nil), +- } +- inspect.Preorder(filter, func(n ast.Node) { +- switch n := n.(type) { +- case *ast.CallExpr: +- id := typesinternal.UsedIdent(pass.TypesInfo, n.Fun) +- // Find object: +- // record non-exported function, method, or func-typed var. +- if id != nil && !id.IsExported() { +- switch pass.TypesInfo.Uses[id].(type) { +- case *types.Func, *types.Var: +- callPosn[id] = true +- } +- } +- +- case *ast.InterfaceType: +- // Record the set of names of unexported interface methods. +- // (It would be more precise to record signatures but +- // generics makes it tricky, and this conservative +- // heuristic is close enough.) +- t := pass.TypesInfo.TypeOf(n).(*types.Interface) +- for i := 0; i < t.NumExplicitMethods(); i++ { +- m := t.ExplicitMethod(i) +- if !m.Exported() && m.Name() != "_" { +- unexportedIMethodNames[m.Name()] = true +- } +- } +- } +- }) +- +- for id, obj := range pass.TypesInfo.Uses { +- if !callPosn[id] { +- // This includes "f = func() {...}", which we deal with below. +- usesOutsideCall[obj] = append(usesOutsideCall[obj], id) +- } +- } +- } +- +- // Find all vars (notably parameters) that are used. +- usedVars := make(map[*types.Var]bool) +- for _, obj := range pass.TypesInfo.Uses { +- if v, ok := obj.(*types.Var); ok { +- if v.IsField() { +- continue // no point gathering these +- } +- usedVars[v] = true +- } +- } +- +- // Check each non-address-taken function's parameters are all used. +-funcloop: +- for c := range inspect.Root().Preorder((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)) { +- var ( +- fn types.Object // function symbol (*Func, possibly *Var for a FuncLit) +- ftype *ast.FuncType +- body *ast.BlockStmt +- ) +- switch n := c.Node().(type) { +- case *ast.FuncDecl: +- // We can't analyze non-Go functions. +- if n.Body == nil { +- continue +- } +- +- // Ignore exported functions and methods: we +- // must assume they may be address-taken in +- // another package. +- if n.Name.IsExported() { +- continue +- } +- +- // Ignore methods that match the name of any +- // interface method declared in this package, +- // as the method's signature may need to conform +- // to the interface. +- if n.Recv != nil && unexportedIMethodNames[n.Name.Name] { +- continue +- } +- +- fn = pass.TypesInfo.Defs[n.Name].(*types.Func) +- ftype, body = n.Type, n.Body +- +- case *ast.FuncLit: +- // Find the symbol for the variable (if any) +- // to which the FuncLit is bound. +- // (We don't bother to allow ParenExprs.) +- switch parent := c.Parent().Node().(type) { +- case *ast.AssignStmt: +- // f = func() {...} +- // f := func() {...} +- if ek, idx := c.ParentEdge(); ek == edge.AssignStmt_Rhs { +- // Inv: n == AssignStmt.Rhs[idx] +- if id, ok := parent.Lhs[idx].(*ast.Ident); ok { +- fn = pass.TypesInfo.ObjectOf(id) +- +- // Edge case: f = func() {...} +- // should not count as a use. +- if pass.TypesInfo.Uses[id] != nil { +- usesOutsideCall[fn] = moreslices.Remove(usesOutsideCall[fn], id) +- } +- +- if fn == nil && id.Name == "_" { +- // Edge case: _ = func() {...} +- // has no local var. Fake one. +- v := types.NewVar(id.Pos(), pass.Pkg, id.Name, pass.TypesInfo.TypeOf(n)) +- v.SetKind(types.LocalVar) +- fn = v +- } +- } +- } +- +- case *ast.ValueSpec: +- // var f = func() { ... } +- // (unless f is an exported package-level var) +- for i, val := range parent.Values { +- if val == n { +- v := pass.TypesInfo.Defs[parent.Names[i]] +- if !(v.Parent() == pass.Pkg.Scope() && v.Exported()) { +- fn = v +- } +- break +- } +- } +- } +- +- ftype, body = n.Type, n.Body +- } +- +- // Ignore address-taken functions and methods: unused +- // parameters may be needed to conform to a func type. +- if fn == nil || len(usesOutsideCall[fn]) > 0 { +- continue +- } +- +- // If there are no parameters, there are no unused parameters. +- if ftype.Params.NumFields() == 0 { +- continue +- } +- +- // To reduce false positives, ignore functions with an +- // empty or panic body. +- // +- // We choose not to ignore functions whose body is a +- // single return statement (as earlier versions did) +- // func f() { return } +- // func f() { return g(...) } +- // as we suspect that was just heuristic to reduce +- // false positives in the earlier unsound algorithm. +- switch len(body.List) { +- case 0: +- // Empty body. Although the parameter is +- // unnecessary, it's pretty obvious to the +- // reader that that's the case, so we allow it. +- continue // func f() {} +- case 1: +- if stmt, ok := body.List[0].(*ast.ExprStmt); ok { +- // We allow a panic body, as it is often a +- // placeholder for a future implementation: +- // func f() { panic(...) } +- if call, ok := stmt.X.(*ast.CallExpr); ok { +- if fun, ok := call.Fun.(*ast.Ident); ok && fun.Name == "panic" { +- continue +- } +- } +- } +- } +- +- // Don't report diagnostics on generated files. +- // (We can't skip analysis of generated files, though.) +- for curFile := range c.Enclosing((*ast.File)(nil)) { +- if ast.IsGenerated(curFile.Node().(*ast.File)) { +- continue funcloop +- } +- } +- +- // Report each unused parameter. +- for _, field := range ftype.Params.List { +- for _, id := range field.Names { +- if id.Name == "_" { +- continue +- } +- param := pass.TypesInfo.Defs[id].(*types.Var) +- if !usedVars[param] { +- start, end := field.Pos(), field.End() +- if len(field.Names) > 1 { +- start, end = id.Pos(), id.End() +- } +- +- // This diagnostic carries both an edit-based fix to +- // rename the unused parameter, and a command-based fix +- // to remove it (see golang.RemoveUnusedParameter). +- pass.Report(analysis.Diagnostic{ +- Pos: start, +- End: end, +- Message: fmt.Sprintf("unused parameter: %s", id.Name), +- Category: FixCategory, +- SuggestedFixes: []analysis.SuggestedFix{ +- { +- Message: `Rename parameter to "_"`, +- TextEdits: []analysis.TextEdit{{ +- Pos: id.Pos(), +- End: id.End(), +- NewText: []byte("_"), +- }}, +- }, +- { +- Message: fmt.Sprintf("Remove unused parameter %q", id.Name), +- // No TextEdits => computed by gopls command +- }, +- }, +- }) +- } +- } +- } +- } +- return nil, nil +-} +diff -urN a/gopls/internal/analysis/unusedparams/unusedparams_test.go b/gopls/internal/analysis/unusedparams/unusedparams_test.go +--- a/gopls/internal/analysis/unusedparams/unusedparams_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedparams/unusedparams_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package unusedparams_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/unusedparams" +-) +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.RunWithSuggestedFixes(t, testdata, unusedparams.Analyzer, "a", "generatedcode", "typeparams") +-} +diff -urN a/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go b/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go +--- a/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,87 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package a +- +-import ( +- "fmt" +- "os" +-) +- +-type A struct { +- b int +-} +- +-func singleAssignment() { +- v := "s" // want `declared (and|but) not used` +- +- s := []int{ // want `declared (and|but) not used` +- 1, +- 2, +- } +- +- a := func(s string) bool { // want `declared (and|but) not used` +- return false +- } +- +- if 1 == 1 { +- s := "v" // want `declared (and|but) not used` +- } +- +- panic("I should survive") +-} +- +-func noOtherStmtsInBlock() { +- v := "s" // want `declared (and|but) not used` +-} +- +-func partOfMultiAssignment() { +- f, err := os.Open("file") // want `declared (and|but) not used` +- panic(err) +-} +- +-func sideEffects(cBool chan bool, cInt chan int) { +- b := <-c // want `declared (and|but) not used` +- s := fmt.Sprint("") // want `declared (and|but) not used` +- a := A{ // want `declared (and|but) not used` +- b: func() int { +- return 1 +- }(), +- } +- c := A{<-cInt} // want `declared (and|but) not used` +- d := fInt() + <-cInt // want `declared (and|but) not used` +- e := fBool() && <-cBool // want `declared (and|but) not used` +- f := map[int]int{ // want `declared (and|but) not used` +- fInt(): <-cInt, +- } +- g := []int{<-cInt} // want `declared (and|but) not used` +- h := func(s string) {} // want `declared (and|but) not used` +- +- // (ill-typed) +- i := func(s string) {}() // want `declared (and|but) not used` +-} +- +-func commentAbove() { +- // v is a variable +- v := "s" // want `declared (and|but) not used` +-} +- +-func commentBelow() { +- v := "s" // want `declared (and|but) not used` +- // v is a variable +-} +- +-func commentSpaceBelow() { +- v := "s" // want `declared (and|but) not used` +- +- // v is a variable +-} +- +-func fBool() bool { +- return true +-} +- +-func fInt() int { +- return 1 +-} +diff -urN a/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go.golden b/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go.golden +--- a/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,78 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package a +- +-import ( +- "fmt" +- "os" +-) +- +-type A struct { +- b int +-} +- +-func singleAssignment() { +- // want `declared (and|but) not used` +- +- if 1 == 1 { +- // want `declared (and|but) not used` +- } +- +- panic("I should survive") +-} +- +-func noOtherStmtsInBlock() { +- // want `declared (and|but) not used` +-} +- +-func partOfMultiAssignment() { +- _, err := os.Open("file") // want `declared (and|but) not used` +- panic(err) +-} +- +-func sideEffects(cBool chan bool, cInt chan int) { +- _ = <-c // want `declared (and|but) not used` +- _ = fmt.Sprint("") // want `declared (and|but) not used` +- _ = A{ // want `declared (and|but) not used` +- b: func() int { +- return 1 +- }(), +- } +- _ = A{<-cInt} // want `declared (and|but) not used` +- _ = fInt() + <-cInt // want `declared (and|but) not used` +- _ = fBool() && <-cBool // want `declared (and|but) not used` +- _ = map[int]int{ // want `declared (and|but) not used` +- fInt(): <-cInt, +- } +- _ = []int{<-cInt} // want `declared (and|but) not used` +- // want `declared (and|but) not used` +- +- // (ill-typed) +- _ = func(s string) {}() // want `declared (and|but) not used` +-} +- +-func commentAbove() { +- // v is a variable +- // want `declared (and|but) not used` +-} +- +-func commentBelow() { +- // want `declared (and|but) not used` +- // v is a variable +-} +- +-func commentSpaceBelow() { +- // want `declared (and|but) not used` +- +- // v is a variable +-} +- +-func fBool() bool { +- return true +-} +- +-func fInt() int { +- return 1 +-} +diff -urN a/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go b/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go +--- a/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package decl +- +-func a() { +- var b, c bool // want `declared (and|but) not used` +- panic(c) +- +- if 1 == 1 { +- var s string // want `declared (and|but) not used` +- } +-} +- +-func b() { +- // b is a variable +- var b bool // want `declared (and|but) not used` +-} +- +-func c() { +- var ( +- d string +- +- // some comment for c +- c bool // want `declared (and|but) not used` +- ) +- +- panic(d) +-} +diff -urN a/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go.golden b/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go.golden +--- a/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go.golden 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedvariable/testdata/src/decl/a.go.golden 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package decl +- +-func a() { +- var c bool // want `declared (and|but) not used` +- panic(c) +- +- if 1 == 1 { +- // want `declared (and|but) not used` +- } +-} +- +-func b() { +- // b is a variable +- // want `declared (and|but) not used` +-} +- +-func c() { +- var ( +- d string +- ) +- +- panic(d) +-} +diff -urN a/gopls/internal/analysis/unusedvariable/unusedvariable.go b/gopls/internal/analysis/unusedvariable/unusedvariable.go +--- a/gopls/internal/analysis/unusedvariable/unusedvariable.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedvariable/unusedvariable.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,75 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package unusedvariable defines an analyzer that checks for unused variables. +-package unusedvariable +- +-import ( +- "fmt" +- "go/ast" +- "regexp" +- "strings" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/internal/refactor" +-) +- +-const Doc = `check for unused variables and suggest fixes` +- +-var Analyzer = &analysis.Analyzer{ +- Name: "unusedvariable", +- Doc: Doc, +- Requires: []*analysis.Analyzer{inspect.Analyzer}, +- Run: run, +- RunDespiteErrors: true, // an unusedvariable diagnostic is a compile error +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedvariable", +-} +- +-// The suffix for this error message changed in Go 1.20 and Go 1.23. +-var unusedVariableRegexp = []*regexp.Regexp{ +- regexp.MustCompile("^(.*) declared and not used$"), // Go 1.20+ +- regexp.MustCompile("^declared and not used: (.*)$"), // Go 1.23+ +-} +- +-func run(pass *analysis.Pass) (any, error) { +- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- +- for _, typeErr := range pass.TypeErrors { +- for _, re := range unusedVariableRegexp { +- match := re.FindStringSubmatch(typeErr.Msg) +- if len(match) == 0 { +- continue +- } +- // Since Go 1.23, go/types' error messages quote vars as `v'. +- varName := strings.Trim(match[1], "`'") +- +- curId, ok := inspect.Root().FindByPos(typeErr.Pos, typeErr.Pos) +- if !ok { +- continue // can't find error node +- } +- ident, ok := curId.Node().(*ast.Ident) +- if !ok || ident.Name != varName { +- continue // not the right identifier +- } +- +- tokFile := pass.Fset.File(ident.Pos()) +- edits := refactor.DeleteVar(tokFile, pass.TypesInfo, curId) +- if len(edits) > 0 { +- pass.Report(analysis.Diagnostic{ +- Pos: ident.Pos(), +- End: ident.End(), +- Message: typeErr.Msg, +- SuggestedFixes: []analysis.SuggestedFix{{ +- Message: fmt.Sprintf("Remove variable %s", ident.Name), +- TextEdits: edits, +- }}, +- }) +- } +- } +- } +- +- return nil, nil +-} +diff -urN a/gopls/internal/analysis/unusedvariable/unusedvariable_test.go b/gopls/internal/analysis/unusedvariable/unusedvariable_test.go +--- a/gopls/internal/analysis/unusedvariable/unusedvariable_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/unusedvariable/unusedvariable_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package unusedvariable_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/unusedvariable" +-) +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- +- t.Run("decl", func(t *testing.T) { +- analysistest.RunWithSuggestedFixes(t, testdata, unusedvariable.Analyzer, "decl") +- }) +- +- t.Run("assign", func(t *testing.T) { +- analysistest.RunWithSuggestedFixes(t, testdata, unusedvariable.Analyzer, "assign") +- }) +-} +diff -urN a/gopls/internal/analysis/yield/doc.go b/gopls/internal/analysis/yield/doc.go +--- a/gopls/internal/analysis/yield/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/yield/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,38 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package yield defines an Analyzer that checks for mistakes related +-// to the yield function used in iterators. +-// +-// # Analyzer yield +-// +-// yield: report calls to yield where the result is ignored +-// +-// After a yield function returns false, the caller should not call +-// the yield function again; generally the iterator should return +-// promptly. +-// +-// This example fails to check the result of the call to yield, +-// causing this analyzer to report a diagnostic: +-// +-// yield(1) // yield may be called again (on L2) after returning false +-// yield(2) +-// +-// The corrected code is either this: +-// +-// if yield(1) { yield(2) } +-// +-// or simply: +-// +-// _ = yield(1) && yield(2) +-// +-// It is not always a mistake to ignore the result of yield. +-// For example, this is a valid single-element iterator: +-// +-// yield(1) // ok to ignore result +-// return +-// +-// It is only a mistake when the yield call that returned false may be +-// followed by another call. +-package yield +diff -urN a/gopls/internal/analysis/yield/main.go b/gopls/internal/analysis/yield/main.go +--- a/gopls/internal/analysis/yield/main.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/yield/main.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,16 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build ignore +- +-// The yield command applies the yield analyzer to the specified +-// packages of Go source code. +-package main +- +-import ( +- "golang.org/x/tools/go/analysis/singlechecker" +- "golang.org/x/tools/gopls/internal/analysis/yield" +-) +- +-func main() { singlechecker.Main(yield.Analyzer) } +diff -urN a/gopls/internal/analysis/yield/testdata/src/a/a.go b/gopls/internal/analysis/yield/testdata/src/a/a.go +--- a/gopls/internal/analysis/yield/testdata/src/a/a.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/yield/testdata/src/a/a.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,120 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package yield +- +-import ( +- "bufio" +- "io" +-) +- +-// +-// +-// Modify this block of comment lines as needed when changing imports +-// to avoid perturbing subsequent line numbers (and thus error messages). +-// +-// This is L16. +- +-func goodIter(yield func(int) bool) { +- _ = yield(1) && yield(2) && yield(3) // ok +-} +- +-func badIterOR(yield func(int) bool) { +- _ = yield(1) || // want `yield may be called again \(on L25\) after returning false` +- yield(2) || // want `yield may be called again \(on L26\) after returning false` +- yield(3) +-} +- +-func badIterSeq(yield func(int) bool) { +- yield(1) // want `yield may be called again \(on L31\) after returning false` +- yield(2) // want `yield may be called again \(on L32\) after returning false` +- yield(3) // ok +-} +- +-func badIterLoop(yield func(int) bool) { +- for { +- yield(1) // want `yield may be called again after returning false` +- } +-} +- +-func goodIterLoop(yield func(int) bool) { +- for { +- if !yield(1) { +- break +- } +- } +-} +- +-func badIterIf(yield func(int) bool) { +- ok := yield(1) // want `yield may be called again \(on L52\) after returning false` +- if !ok { +- yield(2) +- } else { +- yield(3) +- } +-} +- +-func singletonIter(yield func(int) bool) { +- yield(1) // ok +-} +- +-func twoArgumentYield(yield func(int, int) bool) { +- _ = yield(1, 1) || // want `yield may be called again \(on L64\) after returning false` +- yield(2, 2) +-} +- +-func zeroArgumentYield(yield func() bool) { +- _ = yield() || // want `yield may be called again \(on L69\) after returning false` +- yield() +-} +- +-func tricky(in io.ReadCloser) func(yield func(string, error) bool) { +- return func(yield func(string, error) bool) { +- scan := bufio.NewScanner(in) +- for scan.Scan() { +- if !yield(scan.Text(), nil) { // want `yield may be called again \(on L82\) after returning false` +- _ = in.Close() +- break +- } +- } +- if err := scan.Err(); err != nil { +- yield("", err) +- } +- } +-} +- +-// Regression test for issue #70598. +-func shortCircuitAND(yield func(int) bool) { +- ok := yield(1) +- ok = ok && yield(2) +- ok = ok && yield(3) +- ok = ok && yield(4) +-} +- +-// This example has a bug because a false yield(2) may be followed by yield(3). +-func tricky2(yield func(int) bool) { +- cleanup := func() {} +- ok := yield(1) // want "yield may be called again .on L104" +- stop := !ok || yield(2) // want "yield may be called again .on L104" +- if stop { +- cleanup() +- } else { +- // dominated by !stop => !(!ok || yield(2)) => yield(1) && !yield(2): bad. +- yield(3) +- } +-} +- +-// This example is sound, but the analyzer reports a false positive. +-// TODO(adonovan): prune infeasible paths more carefully. +-func tricky3(yield func(int) bool) { +- cleanup := func() {} +- ok := yield(1) // want "yield may be called again .on L118" +- stop := !ok || !yield(2) // want "yield may be called again .on L118" +- if stop { +- cleanup() +- } else { +- // dominated by !stop => !(!ok || !yield(2)) => yield(1) && yield(2): good. +- yield(3) +- } +-} +diff -urN a/gopls/internal/analysis/yield/yield.go b/gopls/internal/analysis/yield/yield.go +--- a/gopls/internal/analysis/yield/yield.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/yield/yield.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,193 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package yield +- +-// TODO(adonovan): also check for this pattern: +-// +-// for x := range seq { +-// yield(x) +-// } +-// +-// which should be entirely rewritten as +-// +-// seq(yield) +-// +-// to avoid unnecessary range desugaring and chains of dynamic calls. +- +-import ( +- _ "embed" +- "fmt" +- "go/ast" +- "go/constant" +- "go/token" +- "go/types" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/buildssa" +- "golang.org/x/tools/go/analysis/passes/inspect" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/go/ssa" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/analysisinternal" +-) +- +-//go:embed doc.go +-var doc string +- +-var Analyzer = &analysis.Analyzer{ +- Name: "yield", +- Doc: analysisinternal.MustExtractDoc(doc, "yield"), +- Requires: []*analysis.Analyzer{inspect.Analyzer, buildssa.Analyzer}, +- Run: run, +- URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/yield", +-} +- +-func run(pass *analysis.Pass) (any, error) { +- inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +- +- // Find all calls to yield of the right type. +- yieldCalls := make(map[token.Pos]*ast.CallExpr) // keyed by CallExpr.Lparen. +- nodeFilter := []ast.Node{(*ast.CallExpr)(nil)} +- inspector.Preorder(nodeFilter, func(n ast.Node) { +- call := n.(*ast.CallExpr) +- if id, ok := call.Fun.(*ast.Ident); ok && id.Name == "yield" { +- if sig, ok := pass.TypesInfo.TypeOf(id).(*types.Signature); ok && +- sig.Params().Len() < 3 && +- sig.Results().Len() == 1 && +- types.Identical(sig.Results().At(0).Type(), types.Typ[types.Bool]) { +- yieldCalls[call.Lparen] = call +- } +- } +- }) +- +- // Common case: nothing to do. +- if len(yieldCalls) == 0 { +- return nil, nil +- } +- +- // Study the control flow using SSA. +- buildssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) +- for _, fn := range buildssa.SrcFuncs { +- // TODO(adonovan): opt: skip functions that don't contain any yield calls. +- +- // Find the yield calls in SSA. +- type callInfo struct { +- syntax *ast.CallExpr +- index int // index of instruction within its block +- reported bool +- } +- ssaYieldCalls := make(map[*ssa.Call]*callInfo) +- for _, b := range fn.Blocks { +- for i, instr := range b.Instrs { +- if call, ok := instr.(*ssa.Call); ok { +- if syntax, ok := yieldCalls[call.Pos()]; ok { +- ssaYieldCalls[call] = &callInfo{syntax: syntax, index: i} +- } +- } +- } +- } +- +- // Now search for a control path from the instruction after a +- // yield call to another yield call--possible the same one, +- // following all block successors except "if yield() { ... }"; +- // in such cases we know that yield returned true. +- // +- // Note that this is a "may" dataflow analysis: it +- // reports when a yield function _may_ be called again +- // without a positive intervening check, but it is +- // possible that the check is beyond the ability of +- // the representation to detect, perhaps involving +- // sophisticated use of booleans, indirect state (not +- // in SSA registers), or multiple flow paths some of +- // which are infeasible. +- // +- // A "must" analysis (which would report when a second +- // yield call can only be reached after failing the +- // boolean check) would be too conservative. +- // In particular, the most common mistake is to +- // forget to check the boolean at all. +- for call, info := range ssaYieldCalls { +- visited := make([]bool, len(fn.Blocks)) // visited BasicBlock.Indexes +- +- // visit visits the instructions of a block (or a suffix if start > 0). +- var visit func(b *ssa.BasicBlock, start int) +- visit = func(b *ssa.BasicBlock, start int) { +- if !visited[b.Index] { +- if start == 0 { +- visited[b.Index] = true +- } +- for _, instr := range b.Instrs[start:] { +- switch instr := instr.(type) { +- case *ssa.Call: +- if !info.reported && ssaYieldCalls[instr] != nil { +- info.reported = true +- where := "" // "" => same yield call (a loop) +- if instr != call { +- otherLine := safetoken.StartPosition(pass.Fset, instr.Pos()).Line +- where = fmt.Sprintf("(on L%d) ", otherLine) +- } +- pass.Reportf(call.Pos(), "yield may be called again %safter returning false", where) +- } +- case *ssa.If: +- // Visit both successors, unless cond is yield() or its negation. +- // In that case visit only the "if !yield()" block. +- cond := instr.Cond +- t, f := b.Succs[0], b.Succs[1] +- +- // Strip off any NOT operator. +- cond, t, f = unnegate(cond, t, f) +- +- // As a peephole optimization for this special case: +- // ok := yield() +- // ok = ok && yield() +- // ok = ok && yield() +- // which in SSA becomes: +- // yield() +- // phi(false, yield()) +- // phi(false, yield()) +- // we reduce a cond of phi(false, x) to just x. +- if phi, ok := cond.(*ssa.Phi); ok { +- var nonFalse []ssa.Value +- for _, v := range phi.Edges { +- if c, ok := v.(*ssa.Const); ok && +- !constant.BoolVal(c.Value) { +- continue // constant false +- } +- nonFalse = append(nonFalse, v) +- } +- if len(nonFalse) == 1 { +- cond = nonFalse[0] +- cond, t, f = unnegate(cond, t, f) +- } +- } +- +- if cond, ok := cond.(*ssa.Call); ok && ssaYieldCalls[cond] != nil { +- // Skip the successor reached by "if yield() { ... }". +- } else { +- visit(t, 0) +- } +- visit(f, 0) +- +- case *ssa.Jump: +- visit(b.Succs[0], 0) +- } +- } +- } +- } +- +- // Start at the instruction after the yield call. +- visit(call.Block(), info.index+1) +- } +- } +- +- return nil, nil +-} +- +-func unnegate(cond ssa.Value, t, f *ssa.BasicBlock) (_ ssa.Value, _, _ *ssa.BasicBlock) { +- if unop, ok := cond.(*ssa.UnOp); ok && unop.Op == token.NOT { +- return unop.X, f, t +- } +- return cond, t, f +-} +diff -urN a/gopls/internal/analysis/yield/yield_test.go b/gopls/internal/analysis/yield/yield_test.go +--- a/gopls/internal/analysis/yield/yield_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/analysis/yield/yield_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package yield_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/go/analysis/analysistest" +- "golang.org/x/tools/gopls/internal/analysis/yield" +-) +- +-func Test(t *testing.T) { +- testdata := analysistest.TestData() +- analysistest.Run(t, testdata, yield.Analyzer, "a") +-} +diff -urN a/gopls/internal/bloom/filter.go b/gopls/internal/bloom/filter.go +--- a/gopls/internal/bloom/filter.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/bloom/filter.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,105 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bloom +- +-import ( +- "hash/maphash" +- "math" +-) +- +-// block is the element type of the filter bitfield. +-type block = uint8 +- +-const blockBits = 8 +- +-// Filter is a bloom filter for a set of strings. +-type Filter struct { +- seeds []maphash.Seed +- blocks []block +-} +- +-// NewFilter constructs a new Filter with the given elements. +-func NewFilter(elems []string) *Filter { +- // Tolerate a 5% false positive rate. +- nblocks, nseeds := calibrate(0.05, len(elems)) +- f := &Filter{ +- blocks: make([]block, nblocks), +- seeds: make([]maphash.Seed, nseeds), +- } +- for i := range nseeds { +- f.seeds[i] = maphash.MakeSeed() +- } +- for _, elem := range elems { +- for _, seed := range f.seeds { +- index, bit := f.locate(seed, elem) +- f.blocks[index] |= bit +- } +- } +- return f +-} +- +-// locate returns the block index and bit corresponding to the given hash seed and +-// string. +-func (f *Filter) locate(seed maphash.Seed, s string) (index int, bit block) { +- h := uint(maphash.String(seed, s)) +- blk := h / blockBits % uint(len(f.blocks)) +- bit = block(1 << (h % blockBits)) +- return int(blk), bit +-} +- +-func assert(cond bool, msg string) { +- if !cond { +- panic(msg) +- } +-} +- +-// calibrate approximates the number of blocks and seeds to use for a bloom +-// filter with desired false positive rate fpRate, given n elements. +-func calibrate(fpRate float64, n int) (blocks, seeds int) { +- // We following the terms of https://en.wikipedia.org/wiki/Bloom_filter: +- // - k is the number of hash functions, +- // - m is the size of the bit field; +- // - n is the number of set bits. +- +- assert(0 < fpRate && fpRate < 1, "invalid false positive rate") +- assert(n >= 0, "invalid set size") +- +- if n == 0 { +- // degenerate case; use the simplest filter +- return 1, 1 +- } +- +- // Calibrate the number of blocks based on the optimal number of bits per +- // element. In this case we round up, as more bits leads to fewer false +- // positives. +- logFpRate := math.Log(fpRate) // reused for k below +- m := -(float64(n) * logFpRate) / (math.Ln2 * math.Ln2) +- blocks = int(m) / blockBits +- if float64(blocks*blockBits) < m { +- blocks += 1 +- } +- +- // Estimate the number of hash functions (=seeds). This is imprecise, not +- // least since the formula in the article above assumes that the number of +- // bits per element is not rounded. +- // +- // Here we round to the nearest integer (not unconditionally round up), since +- // more hash functions do not always lead to better results. +- k := -logFpRate / math.Ln2 +- seeds = max(int(math.Round(k)), 1) +- +- return blocks, seeds +-} +- +-// MayContain reports whether the filter may contain s. +-func (f *Filter) MayContain(s string) bool { +- for _, seed := range f.seeds { +- index, bit := f.locate(seed, s) +- if f.blocks[index]&bit == 0 { +- return false +- } +- } +- return true +-} +diff -urN a/gopls/internal/bloom/filter_test.go b/gopls/internal/bloom/filter_test.go +--- a/gopls/internal/bloom/filter_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/bloom/filter_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,93 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bloom +- +-import ( +- "math" +- "math/rand/v2" +- "testing" +-) +- +-func TestFilter(t *testing.T) { +- elems := []string{ +- "a", "apple", "b", "banana", "an arbitrarily long string", "", "世界", +- } +- +- // First, sanity check that the filter contains all the given elements. +- f := NewFilter(elems) +- for _, elem := range elems { +- if got := f.MayContain(elem); !got { +- t.Errorf("MayContain(%q) = %t, want true", elem, got) +- } +- } +- +- // Measure the false positives rate. +- // +- // Of course, we can't assert on the results, since they are probabilistic, +- // but this can be useful for interactive use. +- +- fpRate := falsePositiveRate(len(f.blocks), len(f.seeds), len(elems)) +- t.Logf("%d blocks, %d seeds, %.2g%% expected false positives", len(f.blocks), len(f.seeds), 100*fpRate) +- +- // In practice, all positives below will be false, but be precise anyway. +- truePositive := make(map[string]bool) +- for _, e := range elems { +- truePositive[e] = true +- } +- +- // Generate a large number of random strings to measure the false positive +- // rate. +- g := newStringGenerator() +- const samples = 1000 +- falsePositives := 0 +- for range samples { +- s := g.next() +- got := f.MayContain(s) +- if false { +- t.Logf("MayContain(%q) = %t", s, got) +- } +- if got && !truePositive[s] { +- falsePositives++ +- } +- } +- t.Logf("false positives: %.1f%% (%d/%d)", 100*float64(falsePositives)/float64(samples), falsePositives, samples) +-} +- +-// falsePositiveRate estimates the expected false positive rate for a filter +-// with the given number of blocks, seeds, and elements. +-func falsePositiveRate(block, seeds, elems int) float64 { +- k, m, n := float64(seeds), float64(block*blockBits), float64(elems) +- return math.Pow(1-math.Exp(-k*n/m), k) +-} +- +-type stringGenerator struct { +- r *rand.Rand +-} +- +-func newStringGenerator() *stringGenerator { +- return &stringGenerator{rand.New(rand.NewPCG(1, 2))} +-} +- +-func (g *stringGenerator) next() string { +- l := g.r.IntN(50) // length +- var runes []rune +- for range l { +- runes = append(runes, rune(' '+rand.IntN('~'-' '))) +- } +- return string(runes) +-} +- +-// TestDegenerateFilter checks that the degenerate filter with no elements +-// results in no false positives. +-func TestDegenerateFilter(t *testing.T) { +- f := NewFilter(nil) +- g := newStringGenerator() +- for range 100 { +- s := g.next() +- if f.MayContain(s) { +- t.Errorf("MayContain(%q) = true, want false", s) +- } +- } +-} +diff -urN a/gopls/internal/cache/analysis.go b/gopls/internal/cache/analysis.go +--- a/gopls/internal/cache/analysis.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/analysis.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1450 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-// This file defines gopls' driver for modular static analysis (go/analysis). +- +-import ( +- "bytes" +- "context" +- "crypto/sha256" +- "encoding/gob" +- "encoding/json" +- "errors" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "log" +- urlpkg "net/url" +- "path/filepath" +- "reflect" +- "runtime" +- "runtime/debug" +- "slices" +- "sort" +- "strings" +- "sync" +- "sync/atomic" +- "time" +- +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/filecache" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/progress" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/frob" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- "golang.org/x/tools/gopls/internal/util/persistent" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/analysisinternal" +- "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/facts" +-) +- +-/* +- +- DESIGN +- +- An analysis request ([Snapshot.Analyze]) computes diagnostics for the +- requested packages using the set of analyzers enabled in this view. Each +- request constructs a transitively closed DAG of nodes, each representing a +- package, then works bottom up in parallel postorder calling +- [analysisNode.runCached] to ensure that each node's analysis summary is up +- to date. The summary contains the analysis diagnostics and serialized facts. +- +- The entire DAG is ephemeral. Each node in the DAG records the set of +- analyzers to run: the complete set for the root packages, and the "facty" +- subset for dependencies. Each package is thus analyzed at most once. +- +- Each node has a cryptographic key, which is either memoized in the Snapshot +- or computed by [analysisNode.cacheKey]. This key is a hash of the "recipe" +- for the analysis step, including the inputs into the type checked package +- (and its reachable dependencies), the set of analyzers, and importable +- facts. +- +- The key is sought in a machine-global persistent file-system based cache. If +- this gopls process, or another gopls process on the same machine, has +- already performed this analysis step, runCached will make a cache hit and +- load the serialized summary of the results. If not, it will have to proceed +- to run() to parse and type-check the package and then apply a set of +- analyzers to it. (The set of analyzers applied to a single package itself +- forms a graph of "actions", and it too is evaluated in parallel postorder; +- these dependency edges within the same package are called "horizontal".) +- Finally it writes a new cache entry containing serialized diagnostics and +- analysis facts. +- +- The summary must record whether a package is transitively error-free +- (whether it would compile) because many analyzers are not safe to run on +- packages with inconsistent types. +- +- For fact encoding, we use the same fact set as the unitchecker (vet) to +- record and serialize analysis facts. The fact serialization mechanism is +- analogous to "deep" export data. +- +-*/ +- +-// TODO(adonovan): +-// - Add a (white-box) test of pruning when a change doesn't affect export data. +-// - Optimise pruning based on subset of packages mentioned in exportdata. +-// - Better logging so that it is possible to deduce why an analyzer is not +-// being run--often due to very indirect failures. Even if the ultimate +-// consumer decides to ignore errors, tests and other situations want to be +-// assured of freedom from errors, not just missing results. This should be +-// recorded. +- +-// AnalysisProgressTitle is the title of the progress report for ongoing +-// analysis. It is sought by regression tests for the progress reporting +-// feature. +-const AnalysisProgressTitle = "Analyzing Dependencies" +- +-// Analyze applies the set of enabled analyzers to the packages in the pkgs +-// map, and returns their diagnostics. +-// +-// Notifications of progress may be sent to the optional reporter. +-func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Package, reporter *progress.Tracker) ([]*Diagnostic, error) { +- start := time.Now() // for progress reporting +- +- var tagStr string // sorted comma-separated list of PackageIDs +- { +- keys := make([]string, 0, len(pkgs)) +- for id := range pkgs { +- keys = append(keys, string(id)) +- } +- sort.Strings(keys) +- tagStr = strings.Join(keys, ",") +- } +- ctx, done := event.Start(ctx, "snapshot.Analyze", label.Package.Of(tagStr)) +- defer done() +- +- // Filter and sort enabled root analyzers. +- // A disabled analyzer may still be run if required by another. +- var ( +- toSrc = make(map[*analysis.Analyzer]*settings.Analyzer) +- enabledAnalyzers []*analysis.Analyzer // enabled subset + transitive requirements +- ) +- for _, a := range settings.AllAnalyzers { +- if a.Enabled(s.Options()) { +- toSrc[a.Analyzer()] = a +- enabledAnalyzers = append(enabledAnalyzers, a.Analyzer()) +- } +- } +- sort.Slice(enabledAnalyzers, func(i, j int) bool { +- return enabledAnalyzers[i].Name < enabledAnalyzers[j].Name +- }) +- +- enabledAnalyzers = requiredAnalyzers(enabledAnalyzers) +- +- // Perform basic sanity checks. +- // (Ideally we would do this only once.) +- if err := analysis.Validate(enabledAnalyzers); err != nil { +- return nil, fmt.Errorf("invalid analyzer configuration: %v", err) +- } +- +- stableNames := make(map[*analysis.Analyzer]string) +- +- var facty []*analysis.Analyzer // facty subset of enabled + transitive requirements +- for _, a := range enabledAnalyzers { +- // TODO(adonovan): reject duplicate stable names (very unlikely). +- stableNames[a] = stableName(a) +- +- // Register fact types of all required analyzers. +- if len(a.FactTypes) > 0 { +- facty = append(facty, a) +- for _, f := range a.FactTypes { +- gob.Register(f) // <2us +- } +- } +- } +- facty = requiredAnalyzers(facty) +- +- batch, release := s.acquireTypeChecking() +- defer release() +- +- ids := moremaps.KeySlice(pkgs) +- handles, err := s.getPackageHandles(ctx, ids) +- if err != nil { +- return nil, err +- } +- batch.addHandles(handles) +- +- // Starting from the root packages and following DepsByPkgPath, +- // build the DAG of packages we're going to analyze. +- // +- // Root nodes will run the enabled set of analyzers, +- // whereas dependencies will run only the facty set. +- // Because (by construction) enabled is a superset of facty, +- // we can analyze each node with exactly one set of analyzers. +- nodes := make(map[PackageID]*analysisNode) +- var leaves []*analysisNode // nodes with no unfinished successors +- var makeNode func(from *analysisNode, id PackageID) (*analysisNode, error) +- makeNode = func(from *analysisNode, id PackageID) (*analysisNode, error) { +- an, ok := nodes[id] +- if !ok { +- ph := handles[id] +- if ph == nil { +- return nil, bug.Errorf("no metadata for %s", id) +- } +- +- // -- preorder -- +- +- an = &analysisNode{ +- parseCache: s.view.parseCache, +- fsource: s, // expose only ReadFile +- batch: batch, +- ph: ph, +- analyzers: facty, // all nodes run at least the facty analyzers +- stableNames: stableNames, +- } +- nodes[id] = an +- +- // -- recursion -- +- +- // Build subgraphs for dependencies. +- an.succs = make(map[PackageID]*analysisNode, len(ph.mp.DepsByPkgPath)) +- for _, depID := range ph.mp.DepsByPkgPath { +- dep, err := makeNode(an, depID) +- if err != nil { +- return nil, err +- } +- an.succs[depID] = dep +- } +- +- // -- postorder -- +- +- // Add leaf nodes (no successors) directly to queue. +- if len(an.succs) == 0 { +- leaves = append(leaves, an) +- } +- } +- // Add edge from predecessor. +- if from != nil { +- from.unfinishedSuccs.Add(+1) // incref +- an.preds = append(an.preds, from) +- } +- // Increment unfinishedPreds even for root nodes (from==nil), so that their +- // Action summaries are never cleared. +- an.unfinishedPreds.Add(+1) +- return an, nil +- } +- +- // For root packages, we run the enabled set of analyzers. +- var roots []*analysisNode +- for id := range pkgs { +- root, err := makeNode(nil, id) +- if err != nil { +- return nil, err +- } +- root.analyzers = enabledAnalyzers +- roots = append(roots, root) +- } +- +- // Progress reporting. If supported, gopls reports progress on analysis +- // passes that are taking a long time. +- maybeReport := func(completed int64) {} +- +- // Enable progress reporting if enabled by the user +- // and we have a capable reporter. +- if reporter != nil && reporter.SupportsWorkDoneProgress() && s.Options().AnalysisProgressReporting { +- var reportAfter = s.Options().ReportAnalysisProgressAfter // tests may set this to 0 +- const reportEvery = 1 * time.Second +- +- ctx, cancel := context.WithCancel(ctx) +- defer cancel() +- +- var ( +- reportMu sync.Mutex +- lastReport time.Time +- wd *progress.WorkDone +- ) +- defer func() { +- reportMu.Lock() +- defer reportMu.Unlock() +- +- if wd != nil { +- wd.End(ctx, "Done.") // ensure that the progress report exits +- } +- }() +- maybeReport = func(completed int64) { +- now := time.Now() +- if now.Sub(start) < reportAfter { +- return +- } +- +- reportMu.Lock() +- defer reportMu.Unlock() +- +- if wd == nil { +- wd = reporter.Start(ctx, AnalysisProgressTitle, "", nil, cancel) +- } +- +- if now.Sub(lastReport) > reportEvery { +- lastReport = now +- // Trailing space is intentional: some LSP clients strip newlines. +- msg := fmt.Sprintf(`Indexed %d/%d packages. (Set "analysisProgressReporting" to false to disable notifications.)`, +- completed, len(nodes)) +- wd.Report(ctx, msg, float64(completed)/float64(len(nodes))) +- } +- } +- } +- +- // Execute phase: run leaves first, adding +- // new nodes to the queue as they become leaves. +- var g errgroup.Group +- +- // Analysis is CPU-bound. +- // +- // Note: avoid g.SetLimit here: it makes g.Go stop accepting work, which +- // prevents workers from enqeuing, and thus finishing, and thus allowing the +- // group to make progress: deadlock. +- limiter := make(chan unit, runtime.GOMAXPROCS(0)) +- var completed atomic.Int64 +- +- var enqueue func(*analysisNode) +- enqueue = func(an *analysisNode) { +- g.Go(func() error { +- limiter <- unit{} +- defer func() { <-limiter }() +- +- // Check to see if we already have a valid cache key. If not, compute it. +- // +- // The snapshot field that memoizes keys depends on whether this key is +- // for the analysis result including all enabled analyzer, or just facty analyzers. +- var keys *persistent.Map[PackageID, file.Hash] +- if _, root := pkgs[an.ph.mp.ID]; root { +- keys = s.fullAnalysisKeys +- } else { +- keys = s.factyAnalysisKeys +- } +- +- // As keys is referenced by a snapshot field, it's guarded by s.mu. +- s.mu.Lock() +- key, keyFound := keys.Get(an.ph.mp.ID) +- s.mu.Unlock() +- +- if !keyFound { +- key = an.cacheKey() +- s.mu.Lock() +- keys.Set(an.ph.mp.ID, key, nil) +- s.mu.Unlock() +- } +- +- summary, err := an.runCached(ctx, key) +- if err != nil { +- return err // cancelled, or failed to produce a package +- } +- +- maybeReport(completed.Add(1)) +- an.summary = summary +- +- // Notify each waiting predecessor, +- // and enqueue it when it becomes a leaf. +- for _, pred := range an.preds { +- if pred.unfinishedSuccs.Add(-1) == 0 { // decref +- enqueue(pred) +- } +- } +- +- // Notify each successor that we no longer need +- // its action summaries, which hold Result values. +- // After the last one, delete it, so that we +- // free up large results such as SSA. +- for _, succ := range an.succs { +- succ.decrefPreds() +- } +- return nil +- }) +- } +- for _, leaf := range leaves { +- enqueue(leaf) +- } +- if err := g.Wait(); err != nil { +- return nil, err // cancelled, or failed to produce a package +- } +- +- // Inv: all root nodes now have a summary (#66732). +- // +- // We know this is falsified empirically. This means either +- // the summary was "successfully" set to nil (above), or there +- // is a problem with the graph such the enqueuing leaves does +- // not lead to completion of roots (or an error). +- for _, root := range roots { +- if root.summary == nil { +- bug.Report("root analysisNode has nil summary") +- } +- } +- +- // Report diagnostics only from enabled actions that succeeded. +- // Errors from creating or analyzing packages are ignored. +- // Diagnostics are reported in the order of the analyzers argument. +- // +- // TODO(adonovan): ignoring action errors gives the caller no way +- // to distinguish "there are no problems in this code" from +- // "the code (or analyzers!) are so broken that we couldn't even +- // begin the analysis you asked for". +- // Even if current callers choose to discard the +- // results, we should propagate the per-action errors. +- var results []*Diagnostic +- for _, root := range roots { +- for _, a := range enabledAnalyzers { +- // Skip analyzers that were added only to +- // fulfil requirements of the original set. +- srcAnalyzer, ok := toSrc[a] +- if !ok { +- // Although this 'skip' operation is logically sound, +- // it is nonetheless surprising that its absence should +- // cause #60909 since none of the analyzers currently added for +- // requirements (e.g. ctrlflow, inspect, buildssa) +- // is capable of reporting diagnostics. +- if summary := root.summary.Actions[stableNames[a]]; summary != nil { +- if n := len(summary.Diagnostics); n > 0 { +- bug.Reportf("Internal error: got %d unexpected diagnostics from analyzer %s. This analyzer was added only to fulfil the requirements of the requested set of analyzers, and it is not expected that such analyzers report diagnostics. Please report this in issue #60909.", n, a) +- } +- } +- continue +- } +- +- // Inv: root.summary is the successful result of run (via runCached). +- // TODO(adonovan): fix: root.summary is sometimes nil! (#66732). +- summary, ok := root.summary.Actions[stableNames[a]] +- if summary == nil { +- panic(fmt.Sprintf("analyzeSummary.Actions[%q] = (nil, %t); got %v (#60551)", +- stableNames[a], ok, root.summary.Actions)) +- } +- if summary.Err != "" { +- continue // action failed +- } +- for _, gobDiag := range summary.Diagnostics { +- results = append(results, toSourceDiagnostic(srcAnalyzer, &gobDiag)) +- } +- } +- } +- return results, nil +-} +- +-func (an *analysisNode) decrefPreds() { +- if an.unfinishedPreds.Add(-1) == 0 { +- an.summary.Actions = nil +- } +-} +- +-// An analysisNode is a node in a doubly-linked DAG isomorphic to the +-// import graph. Each node represents a single package, and the DAG +-// represents a batch of analysis work done at once using a single +-// realm of token.Pos or types.Object values. +-// +-// A complete DAG is created anew for each batch of analysis; +-// subgraphs are not reused over time. +-// TODO(rfindley): with cached keys we can typically avoid building the full +-// DAG, so as an optimization we should rewrite this using a top-down +-// traversal, rather than bottom-up. +-// +-// Each node's run method is called in parallel postorder. On success, +-// its summary field is populated, either from the cache (hit), or by +-// type-checking and analyzing syntax (miss). +-type analysisNode struct { +- parseCache *parseCache // shared parse cache +- fsource file.Source // Snapshot.ReadFile, for use by Pass.ReadFile +- batch *typeCheckBatch // type checking batch, for shared type checking +- ph *packageHandle // package handle, for key and reachability analysis +- analyzers []*analysis.Analyzer // set of analyzers to run +- preds []*analysisNode // graph edges: +- succs map[PackageID]*analysisNode // (preds -> self -> succs) +- unfinishedSuccs atomic.Int32 +- unfinishedPreds atomic.Int32 // effectively a summary.Actions refcount +- summary *analyzeSummary // serializable result of analyzing this package +- stableNames map[*analysis.Analyzer]string // cross-process stable names for Analyzers +- +- summaryHashOnce sync.Once +- _summaryHash file.Hash // memoized hash of data affecting dependents +-} +- +-func (an *analysisNode) String() string { return string(an.ph.mp.ID) } +- +-// summaryHash computes the hash of the node summary, which may affect other +-// nodes depending on this node. +-// +-// The result is memoized to avoid redundant work when analyzing multiple +-// dependents. +-func (an *analysisNode) summaryHash() file.Hash { +- an.summaryHashOnce.Do(func() { +- hasher := sha256.New() +- fmt.Fprintf(hasher, "dep: %s\n", an.ph.mp.PkgPath) +- fmt.Fprintf(hasher, "compiles: %t\n", an.summary.Compiles) +- +- // action results: errors and facts +- for name, summary := range moremaps.Sorted(an.summary.Actions) { +- fmt.Fprintf(hasher, "action %s\n", name) +- if summary.Err != "" { +- fmt.Fprintf(hasher, "error %s\n", summary.Err) +- } else { +- fmt.Fprintf(hasher, "facts %s\n", summary.FactsHash) +- // We can safely omit summary.diagnostics +- // from the key since they have no downstream effect. +- } +- } +- hasher.Sum(an._summaryHash[:0]) +- }) +- return an._summaryHash +-} +- +-// analyzeSummary is a gob-serializable summary of successfully +-// applying a list of analyzers to a package. +-type analyzeSummary struct { +- Compiles bool // transitively free of list/parse/type errors +- Actions actionMap // maps analyzer stablename to analysis results (*actionSummary) +-} +- +-// actionMap defines a stable Gob encoding for a map. +-// TODO(adonovan): generalize and move to a library when we can use generics. +-type actionMap map[string]*actionSummary +- +-var ( +- _ gob.GobEncoder = (actionMap)(nil) +- _ gob.GobDecoder = (*actionMap)(nil) +-) +- +-type actionsMapEntry struct { +- K string +- V *actionSummary +-} +- +-func (m actionMap) GobEncode() ([]byte, error) { +- entries := make([]actionsMapEntry, 0, len(m)) +- for k, v := range m { +- entries = append(entries, actionsMapEntry{k, v}) +- } +- sort.Slice(entries, func(i, j int) bool { +- return entries[i].K < entries[j].K +- }) +- var buf bytes.Buffer +- err := gob.NewEncoder(&buf).Encode(entries) +- return buf.Bytes(), err +-} +- +-func (m *actionMap) GobDecode(data []byte) error { +- var entries []actionsMapEntry +- if err := gob.NewDecoder(bytes.NewReader(data)).Decode(&entries); err != nil { +- return err +- } +- *m = make(actionMap, len(entries)) +- for _, e := range entries { +- (*m)[e.K] = e.V +- } +- return nil +-} +- +-// actionSummary is a gob-serializable summary of one possibly failed analysis action. +-// If Err is non-empty, the other fields are undefined. +-type actionSummary struct { +- Facts []byte // the encoded facts.Set +- FactsHash file.Hash // hash(Facts) +- Diagnostics []gobDiagnostic +- Err string // "" => success +-} +- +-var ( +- // inFlightAnalyses records active analysis operations so that later requests +- // can be satisfied by joining onto earlier requests that are still active. +- // +- // Note that persistent=false, so results are cleared once they are delivered +- // to awaiting goroutines. +- inFlightAnalyses = newFutureCache[file.Hash, *analyzeSummary](false) +- +- // cacheLimit reduces parallelism of filecache updates. +- // We allow more than typical GOMAXPROCS as it's a mix of CPU and I/O. +- cacheLimit = make(chan unit, 32) +-) +- +-// runCached applies a list of analyzers (plus any others +-// transitively required by them) to a package. It succeeds as long +-// as it could produce a types.Package, even if there were direct or +-// indirect list/parse/type errors, and even if all the analysis +-// actions failed. It usually fails only if the package was unknown, +-// a file was missing, or the operation was cancelled. +-// +-// The provided key is the cache key for this package. +-func (an *analysisNode) runCached(ctx context.Context, key file.Hash) (*analyzeSummary, error) { +- // At this point we have the action results (serialized packages and facts) +- // of our immediate dependencies, and the metadata and content of this +- // package. +- // +- // We now consult a global cache of promised results. If nothing material has +- // changed, we'll make a hit in the shared cache. +- +- // Access the cache. +- var summary *analyzeSummary +- const cacheKind = "analysis" +- if data, err := filecache.Get(cacheKind, key); err == nil { +- // cache hit +- analyzeSummaryCodec.Decode(data, &summary) +- if summary == nil { // debugging #66732 +- bug.Reportf("analyzeSummaryCodec.Decode yielded nil *analyzeSummary") +- } +- } else if err != filecache.ErrNotFound { +- return nil, bug.Errorf("internal error reading shared cache: %v", err) +- } else { +- // Cache miss: do the work. +- cachedSummary, err := inFlightAnalyses.get(ctx, key, func(ctx context.Context) (*analyzeSummary, error) { +- summary, err := an.run(ctx) +- if err != nil { +- return nil, err +- } +- if summary == nil { // debugging #66732 (can't happen) +- bug.Reportf("analyzeNode.run returned nil *analyzeSummary") +- } +- go func() { +- cacheLimit <- unit{} // acquire token +- defer func() { <-cacheLimit }() // release token +- +- data := analyzeSummaryCodec.Encode(summary) +- if false { +- log.Printf("Set key=%d value=%d id=%s\n", len(key), len(data), an.ph.mp.ID) +- } +- if err := filecache.Set(cacheKind, key, data); err != nil { +- event.Error(ctx, "internal error updating analysis shared cache", err) +- } +- }() +- return summary, nil +- }) +- if err != nil { +- return nil, err +- } +- +- // Copy the computed summary. In decrefPreds, we may zero out +- // summary.actions, but can't mutate a shared result. +- copy := *cachedSummary +- summary = © +- } +- +- return summary, nil +-} +- +-// cacheKey returns a cache key that is a cryptographic digest +-// of the all the values that might affect type checking and analysis: +-// the analyzer names, package metadata, names and contents of +-// compiled Go files, and vdeps (successor) information +-// (export data and facts). +-func (an *analysisNode) cacheKey() file.Hash { +- hasher := sha256.New() +- +- // In principle, a key must be the hash of an +- // unambiguous encoding of all the relevant data. +- // If it's ambiguous, we risk collisions. +- +- // analyzers +- fmt.Fprintf(hasher, "analyzers: %d\n", len(an.analyzers)) +- for _, a := range an.analyzers { +- fmt.Fprintln(hasher, a.Name) +- } +- +- // type checked package +- fmt.Fprintf(hasher, "package: %s\n", an.ph.key) +- +- // metadata errors: used for 'compiles' field +- fmt.Fprintf(hasher, "errors: %d", len(an.ph.mp.Errors)) +- +- // vdeps, in PackageID order +- for _, vdep := range moremaps.Sorted(an.succs) { +- hash := vdep.summaryHash() +- hasher.Write(hash[:]) +- } +- +- var hash file.Hash +- hasher.Sum(hash[:0]) +- return hash +-} +- +-// run implements the cache-miss case. +-// This function does not access the snapshot. +-// +-// Postcondition: on success, the analyzeSummary.Actions +-// key set is {a.Name for a in analyzers}. +-func (an *analysisNode) run(ctx context.Context) (*analyzeSummary, error) { +- // Type-check the package syntax. +- pkg, err := an.typeCheck(ctx) +- if err != nil { +- return nil, err +- } +- +- // Poll cancellation state. +- if err := ctx.Err(); err != nil { +- return nil, err +- } +- +- // -- analysis -- +- +- // Build action graph for this package. +- // Each graph node (action) is one unit of analysis. +- actions := make(map[*analysis.Analyzer]*action) +- var mkAction func(a *analysis.Analyzer) *action +- mkAction = func(a *analysis.Analyzer) *action { +- act, ok := actions[a] +- if !ok { +- var hdeps []*action +- for _, req := range a.Requires { +- hdeps = append(hdeps, mkAction(req)) +- } +- act = &action{ +- a: a, +- fsource: an.fsource, +- stableName: an.stableNames[a], +- pkg: pkg, +- vdeps: an.succs, +- hdeps: hdeps, +- } +- actions[a] = act +- } +- return act +- } +- +- // Build actions for initial package. +- var roots []*action +- for _, a := range an.analyzers { +- roots = append(roots, mkAction(a)) +- } +- +- // Execute the graph in parallel. +- execActions(ctx, roots) +- // Inv: each root's summary is set (whether success or error). +- +- // Don't return (or cache) the result in case of cancellation. +- if err := ctx.Err(); err != nil { +- return nil, err // cancelled +- } +- +- // Return summaries only for the requested actions. +- summaries := make(map[string]*actionSummary) +- for _, root := range roots { +- if root.summary == nil { +- panic("root has nil action.summary (#60551)") +- } +- summaries[root.stableName] = root.summary +- } +- +- return &analyzeSummary{ +- Compiles: pkg.compiles, +- Actions: summaries, +- }, nil +-} +- +-func (an *analysisNode) typeCheck(ctx context.Context) (*analysisPackage, error) { +- ppkg, err := an.batch.getPackage(ctx, an.ph) +- if err != nil { +- return nil, err +- } +- +- compiles := len(an.ph.mp.Errors) == 0 && len(ppkg.TypeErrors()) == 0 +- +- // The go/analysis framework implicitly promises to deliver +- // trees with legacy ast.Object resolution. Do that now. +- files := make([]*ast.File, len(ppkg.CompiledGoFiles())) +- for i, p := range ppkg.CompiledGoFiles() { +- p.Resolve() +- files[i] = p.File +- if p.ParseErr != nil { +- compiles = false // parse error +- } +- } +- +- // The fact decoder needs a means to look up a Package by path. +- pkgLookup := typesLookup(ppkg.Types()) +- factsDecoder := facts.NewDecoderFunc(ppkg.Types(), func(path string) *types.Package { +- // Note: Decode is called concurrently, and thus so is this function. +- +- // Does the fact relate to a package reachable through imports? +- if !an.ph.reachable.MayContain(path) { +- return nil +- } +- +- return pkgLookup(path) +- }) +- +- var typeErrors []types.Error +-filterErrors: +- for _, typeError := range ppkg.TypeErrors() { +- // Suppress type errors in files with parse errors +- // as parser recovery can be quite lossy (#59888). +- for _, p := range ppkg.CompiledGoFiles() { +- if p.ParseErr != nil && astutil.NodeContains(p.File, typeError.Pos) { +- continue filterErrors +- } +- } +- typeErrors = append(typeErrors, typeError) +- } +- +- for _, vdep := range an.succs { +- if !vdep.summary.Compiles { +- compiles = false // transitive error +- } +- } +- +- return &analysisPackage{ +- pkg: ppkg, +- files: files, +- typeErrors: typeErrors, +- compiles: compiles, +- factsDecoder: factsDecoder, +- }, nil +-} +- +-// typesLookup implements a concurrency safe depth-first traversal searching +-// imports of pkg for a given package path. +-func typesLookup(pkg *types.Package) func(string) *types.Package { +- var ( +- mu sync.Mutex // guards impMap and pending +- +- // impMap memoizes the lookup of package paths. +- impMap = map[string]*types.Package{ +- pkg.Path(): pkg, +- } +- // pending is a FIFO queue of packages that have yet to have their +- // dependencies fully scanned. +- // Invariant: all entries in pending are already mapped in impMap. +- pending = []*types.Package{pkg} +- ) +- +- // search scans children the next package in pending, looking for pkgPath. +- search := func(pkgPath string) (sought *types.Package, numPending int) { +- mu.Lock() +- defer mu.Unlock() +- +- if p, ok := impMap[pkgPath]; ok { +- return p, len(pending) +- } +- +- if len(pending) == 0 { +- return nil, 0 +- } +- +- pkg := pending[0] +- pending = pending[1:] +- for _, dep := range pkg.Imports() { +- depPath := dep.Path() +- if _, ok := impMap[depPath]; ok { +- continue +- } +- impMap[depPath] = dep +- +- pending = append(pending, dep) +- if depPath == pkgPath { +- // Don't return early; finish processing pkg's deps. +- sought = dep +- } +- } +- return sought, len(pending) +- } +- +- return func(pkgPath string) *types.Package { +- p, np := (*types.Package)(nil), 1 +- for p == nil && np > 0 { +- p, np = search(pkgPath) +- } +- return p +- } +-} +- +-// analysisPackage contains information about a package, including +-// syntax trees, used transiently during its type-checking and analysis. +-type analysisPackage struct { +- pkg *Package +- files []*ast.File // same as parsed[i].File +- typeErrors []types.Error // filtered type checker errors +- compiles bool // package is transitively free of list/parse/type errors +- factsDecoder *facts.Decoder +-} +- +-// An action represents one unit of analysis work: the application of +-// one analysis to one package. Actions form a DAG, both within a +-// package (as different analyzers are applied, either in sequence or +-// parallel), and across packages (as dependencies are analyzed). +-type action struct { +- once sync.Once +- a *analysis.Analyzer +- fsource file.Source // Snapshot.ReadFile, for Pass.ReadFile +- stableName string // cross-process stable name of analyzer +- pkg *analysisPackage +- hdeps []*action // horizontal dependencies +- vdeps map[PackageID]*analysisNode // vertical dependencies +- +- // results of action.exec(): +- result any // result of Run function, of type a.ResultType +- summary *actionSummary +- err error +-} +- +-func (act *action) String() string { +- return fmt.Sprintf("%s@%s", act.a.Name, act.pkg.pkg.metadata.ID) +-} +- +-// execActions executes a set of action graph nodes in parallel. +-// Postcondition: each action.summary is set, even in case of error. +-func execActions(ctx context.Context, actions []*action) { +- var wg sync.WaitGroup +- for _, act := range actions { +- wg.Go(func() { +- act.once.Do(func() { +- execActions(ctx, act.hdeps) // analyze "horizontal" dependencies +- act.result, act.summary, act.err = act.exec(ctx) +- if act.err != nil { +- act.summary = &actionSummary{Err: act.err.Error()} +- // TODO(adonovan): suppress logging. But +- // shouldn't the root error's causal chain +- // include this information? +- if false { // debugging +- log.Printf("act.exec(%v) failed: %v", act, act.err) +- } +- } +- }) +- if act.summary == nil { +- panic("nil action.summary (#60551)") +- } +- }) +- } +- wg.Wait() +-} +- +-// exec defines the execution of a single action. +-// It returns the (ephemeral) result of the analyzer's Run function, +-// along with its (serializable) facts and diagnostics. +-// Or it returns an error if the analyzer did not run to +-// completion and deliver a valid result. +-func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { +- analyzer := act.a +- apkg := act.pkg +- +- hasFacts := len(analyzer.FactTypes) > 0 +- +- // Report an error if any action dependency (vertical or horizontal) failed. +- // To avoid long error messages describing chains of failure, +- // we return the dependencies' error' unadorned. +- if hasFacts { +- // TODO(adonovan): use deterministic order. +- for _, vdep := range act.vdeps { +- if summ := vdep.summary.Actions[act.stableName]; summ.Err != "" { +- return nil, nil, errors.New(summ.Err) +- } +- } +- } +- for _, dep := range act.hdeps { +- if dep.err != nil { +- return nil, nil, dep.err +- } +- } +- // Inv: all action dependencies succeeded. +- +- // Were there list/parse/type errors that might prevent analysis? +- if !apkg.compiles && !analyzer.RunDespiteErrors { +- return nil, nil, fmt.Errorf("skipping analysis %q because package %q does not compile", analyzer.Name, apkg.pkg.metadata.ID) +- } +- // Inv: package is well-formed enough to proceed with analysis. +- +- if false { // debugging +- log.Println("action.exec", act) +- } +- +- // Gather analysis Result values from horizontal dependencies. +- inputs := make(map[*analysis.Analyzer]any) +- for _, dep := range act.hdeps { +- inputs[dep.a] = dep.result +- } +- +- // TODO(adonovan): opt: facts.Set works but it may be more +- // efficient to fork and tailor it to our precise needs. +- // +- // We've already sharded the fact encoding by action +- // so that it can be done in parallel. +- // We could eliminate locking. +- // We could also dovetail more closely with the export data +- // decoder to obtain a more compact representation of +- // packages and objects (e.g. its internal IDs, instead +- // of PkgPaths and objectpaths.) +- // More importantly, we should avoid re-export of +- // facts that related to objects that are discarded +- // by "deep" export data. Better still, use a "shallow" approach. +- +- // Read and decode analysis facts for each direct import. +- factset, err := apkg.factsDecoder.Decode(func(pkgPath string) ([]byte, error) { +- if !hasFacts { +- return nil, nil // analyzer doesn't use facts, so no vdeps +- } +- +- // Package.Imports() may contain a fake "C" package. Ignore it. +- if pkgPath == "C" { +- return nil, nil +- } +- +- id, ok := apkg.pkg.metadata.DepsByPkgPath[PackagePath(pkgPath)] +- if !ok { +- // This may mean imp was synthesized by the type +- // checker because it failed to import it for any reason +- // (e.g. bug processing export data; metadata ignoring +- // a cycle-forming import). +- // In that case, the fake package's imp.Path +- // is set to the failed importPath (and thus +- // it may lack a "vendor/" prefix). +- // +- // For now, silently ignore it on the assumption +- // that the error is already reported elsewhere. +- // return nil, fmt.Errorf("missing metadata") +- return nil, nil +- } +- +- vdep := act.vdeps[id] +- if vdep == nil { +- return nil, bug.Errorf("internal error in %s: missing vdep for id=%s", apkg.pkg.Types().Path(), id) +- } +- +- return vdep.summary.Actions[act.stableName].Facts, nil +- }) +- if err != nil { +- return nil, nil, fmt.Errorf("internal error decoding analysis facts: %w", err) +- } +- +- // TODO(adonovan): make Export*Fact panic rather than discarding +- // undeclared fact types, so that we discover bugs in analyzers. +- factFilter := make(map[reflect.Type]bool) +- for _, f := range analyzer.FactTypes { +- factFilter[reflect.TypeOf(f)] = true +- } +- +- // Now run the (pkg, analyzer) action. +- var diagnostics []gobDiagnostic +- +- pass := &analysis.Pass{ +- Analyzer: analyzer, +- Fset: apkg.pkg.FileSet(), +- Files: apkg.files, +- OtherFiles: nil, // since gopls doesn't handle non-Go (e.g. asm) files +- IgnoredFiles: nil, // zero-config gopls should analyze these files in another view +- Pkg: apkg.pkg.Types(), +- TypesInfo: apkg.pkg.TypesInfo(), +- TypesSizes: apkg.pkg.TypesSizes(), +- TypeErrors: apkg.typeErrors, +- ResultOf: inputs, +- Report: func(d analysis.Diagnostic) { +- // Assert that SuggestedFixes are well formed. +- // +- // ValidateFixes allows a fix.End to be slightly beyond +- // EOF to avoid spurious assertions when reporting +- // fixes as the end of truncated files; see #71659. +- if err := analysisinternal.ValidateFixes(apkg.pkg.FileSet(), analyzer, d.SuggestedFixes); err != nil { +- bug.Reportf("invalid SuggestedFixes: %v", err) +- d.SuggestedFixes = nil +- } +- diagnostic, err := toGobDiagnostic(apkg.pkg, analyzer, d) +- if err != nil { +- // Don't bug.Report here: these errors all originate in +- // posToLocation, and we can more accurately discriminate +- // severe errors from benign ones in that function. +- event.Error(ctx, fmt.Sprintf("internal error converting diagnostic from analyzer %q", analyzer.Name), err) +- return +- } +- diagnostics = append(diagnostics, diagnostic) +- }, +- ImportObjectFact: factset.ImportObjectFact, +- ExportObjectFact: factset.ExportObjectFact, +- ImportPackageFact: factset.ImportPackageFact, +- ExportPackageFact: factset.ExportPackageFact, +- AllObjectFacts: func() []analysis.ObjectFact { return factset.AllObjectFacts(factFilter) }, +- AllPackageFacts: func() []analysis.PackageFact { return factset.AllPackageFacts(factFilter) }, +- } +- +- pass.ReadFile = func(filename string) ([]byte, error) { +- // Read file from snapshot, to ensure reads are consistent. +- // +- // TODO(adonovan): make the dependency analysis sound by +- // incorporating these additional files into the analysis +- // hash. This requires either (a) preemptively reading and +- // hashing a potentially large number of mostly irrelevant +- // files; or (b) some kind of dynamic dependency discovery +- // system like used in Bazel for C++ headers. Neither entices. +- if err := analysisinternal.CheckReadable(pass, filename); err != nil { +- return nil, err +- } +- h, err := act.fsource.ReadFile(ctx, protocol.URIFromPath(filename)) +- if err != nil { +- return nil, err +- } +- content, err := h.Content() +- if err != nil { +- return nil, err // file doesn't exist +- } +- return slices.Clone(content), nil // follow ownership of os.ReadFile +- } +- +- // Recover from panics (only) within the analyzer logic. +- // (Use an anonymous function to limit the recover scope.) +- var result any +- func() { +- start := time.Now() +- defer func() { +- if r := recover(); r != nil { +- // An Analyzer panicked, likely due to a bug. +- // +- // In general we want to discover and fix such panics quickly, +- // so we don't suppress them, but some bugs in third-party +- // analyzers cannot be quickly fixed, so we use an allowlist +- // to suppress panics. +- const strict = true +- if strict && bug.PanicOnBugs && +- analyzer.Name != "buildir" { // see https://github.com/dominikh/go-tools/issues/1343 +- // Uncomment this when debugging suspected failures +- // in the driver, not the analyzer. +- if false { +- debug.SetTraceback("all") // show all goroutines +- } +- panic(r) +- } else { +- // In production, suppress the panic and press on. +- err = fmt.Errorf("analysis %s for package %s panicked: %v", analyzer.Name, pass.Pkg.Path(), r) +- } +- } +- +- // Accumulate running time for each checker. +- analyzerRunTimesMu.Lock() +- analyzerRunTimes[analyzer] += time.Since(start) +- analyzerRunTimesMu.Unlock() +- }() +- +- result, err = pass.Analyzer.Run(pass) +- }() +- if err != nil { +- return nil, nil, err +- } +- +- if got, want := reflect.TypeOf(result), pass.Analyzer.ResultType; got != want { +- return nil, nil, bug.Errorf( +- "internal error: on package %s, analyzer %s returned a result of type %v, but declared ResultType %v", +- pass.Pkg.Path(), pass.Analyzer, got, want) +- } +- +- // Disallow Export*Fact calls after Run. +- // (A panic means the Analyzer is abusing concurrency.) +- pass.ExportObjectFact = func(obj types.Object, fact analysis.Fact) { +- panic(fmt.Sprintf("%v: Pass.ExportObjectFact(%s, %T) called after Run", act, obj, fact)) +- } +- pass.ExportPackageFact = func(fact analysis.Fact) { +- panic(fmt.Sprintf("%v: Pass.ExportPackageFact(%T) called after Run", act, fact)) +- } +- +- factsdata := factset.Encode() +- return result, &actionSummary{ +- Diagnostics: diagnostics, +- Facts: factsdata, +- FactsHash: file.HashOf(factsdata), +- }, nil +-} +- +-var ( +- analyzerRunTimesMu sync.Mutex +- analyzerRunTimes = make(map[*analysis.Analyzer]time.Duration) +-) +- +-type LabelDuration struct { +- Label string +- Duration time.Duration +-} +- +-// AnalyzerRunTimes returns the accumulated time spent in each Analyzer's +-// Run function since process start, in descending order. +-func AnalyzerRunTimes() []LabelDuration { +- analyzerRunTimesMu.Lock() +- defer analyzerRunTimesMu.Unlock() +- +- slice := make([]LabelDuration, 0, len(analyzerRunTimes)) +- for a, t := range analyzerRunTimes { +- slice = append(slice, LabelDuration{Label: a.Name, Duration: t}) +- } +- sort.Slice(slice, func(i, j int) bool { +- return slice[i].Duration > slice[j].Duration +- }) +- return slice +-} +- +-// requiredAnalyzers returns the transitive closure of required analyzers in preorder. +-func requiredAnalyzers(analyzers []*analysis.Analyzer) []*analysis.Analyzer { +- var result []*analysis.Analyzer +- seen := make(map[*analysis.Analyzer]bool) +- var visitAll func([]*analysis.Analyzer) +- visitAll = func(analyzers []*analysis.Analyzer) { +- for _, a := range analyzers { +- if !seen[a] { +- seen[a] = true +- result = append(result, a) +- visitAll(a.Requires) +- } +- } +- } +- visitAll(analyzers) +- return result +-} +- +-var analyzeSummaryCodec = frob.CodecFor[*analyzeSummary]() +- +-// -- data types for serialization of analysis.Diagnostic and golang.Diagnostic -- +- +-// (The name says gob but we use frob.) +-var diagnosticsCodec = frob.CodecFor[[]gobDiagnostic]() +- +-type gobDiagnostic struct { +- Location protocol.Location +- Severity protocol.DiagnosticSeverity +- Code string +- CodeHref string +- Source string +- Message string +- SuggestedFixes []gobSuggestedFix +- Related []gobRelatedInformation +- Tags []protocol.DiagnosticTag +-} +- +-type gobRelatedInformation struct { +- Location protocol.Location +- Message string +-} +- +-type gobSuggestedFix struct { +- Message string +- TextEdits []gobTextEdit +- Command *gobCommand +- ActionKind protocol.CodeActionKind +-} +- +-type gobCommand struct { +- Title string +- Command string +- Arguments []json.RawMessage +-} +- +-type gobTextEdit struct { +- Location protocol.Location +- NewText []byte +-} +- +-// toGobDiagnostic converts an analysis.Diagnosic to a serializable gobDiagnostic, +-// which requires expanding token.Pos positions into protocol.Location form. +-func toGobDiagnostic(pkg *Package, a *analysis.Analyzer, diag analysis.Diagnostic) (gobDiagnostic, error) { +- var fixes []gobSuggestedFix +- for _, fix := range diag.SuggestedFixes { +- var gobEdits []gobTextEdit +- for _, textEdit := range fix.TextEdits { +- loc, err := diagnosticPosToLocation(pkg, false, textEdit.Pos, textEdit.End) +- if err != nil { +- return gobDiagnostic{}, fmt.Errorf("in SuggestedFixes: %w", err) +- } +- gobEdits = append(gobEdits, gobTextEdit{ +- Location: loc, +- NewText: textEdit.NewText, +- }) +- } +- fixes = append(fixes, gobSuggestedFix{ +- Message: fix.Message, +- TextEdits: gobEdits, +- }) +- } +- +- var related []gobRelatedInformation +- for _, r := range diag.Related { +- // The position of RelatedInformation may be +- // within another (dependency) package. +- const allowDeps = true +- loc, err := diagnosticPosToLocation(pkg, allowDeps, r.Pos, r.End) +- if err != nil { +- return gobDiagnostic{}, fmt.Errorf("in Related: %w", err) +- } +- related = append(related, gobRelatedInformation{ +- Location: loc, +- Message: r.Message, +- }) +- } +- +- loc, err := diagnosticPosToLocation(pkg, false, diag.Pos, diag.End) +- if err != nil { +- return gobDiagnostic{}, err +- } +- +- // The Code column of VSCode's Problems table renders this +- // information as "Source(Code)" where code is a link to CodeHref. +- // (The code field must be nonempty for anything to appear.) +- diagURL := effectiveURL(a, diag) +- code := "default" +- if diag.Category != "" { +- code = diag.Category +- } +- +- return gobDiagnostic{ +- Location: loc, +- // Severity for analysis diagnostics is dynamic, +- // based on user configuration per analyzer. +- Code: code, +- CodeHref: diagURL, +- Source: a.Name, +- Message: diag.Message, +- SuggestedFixes: fixes, +- Related: related, +- // Analysis diagnostics do not contain tags. +- }, nil +-} +- +-// diagnosticPosToLocation converts from token.Pos to protocol form, in the +-// context of the specified package and, optionally, its dependencies. +-func diagnosticPosToLocation(pkg *Package, allowDeps bool, start, end token.Pos) (protocol.Location, error) { +- if end == token.NoPos { +- end = start +- } +- +- fset := pkg.FileSet() +- tokFile := fset.File(start) +- +- // Find existing mapper by file name. +- // (Don't require an exact token.File match +- // as the analyzer may have re-parsed the file.) +- var ( +- mapper *protocol.Mapper +- fixed bool +- ) +- for _, p := range pkg.CompiledGoFiles() { +- if p.Tok.Name() == tokFile.Name() { +- mapper = p.Mapper +- fixed = p.Fixed() // suppress some assertions after parser recovery +- break +- } +- } +- // TODO(adonovan): search pkg.AsmFiles too; see #71754. +- if mapper != nil { +- // debugging #64547 +- fileStart := token.Pos(tokFile.Base()) +- fileEnd := fileStart + token.Pos(tokFile.Size()) +- if start < fileStart { +- if !fixed { +- bug.Reportf("start < start of file") +- } +- start = fileStart +- } +- if end < start { +- // This can happen if End is zero (#66683) +- // or a small positive displacement from zero +- // due to recursive Node.End() computation. +- // This usually arises from poor parser recovery +- // of an incomplete term at EOF. +- if !fixed { +- bug.Reportf("end < start of file") +- } +- end = fileEnd +- } +- if end > fileEnd+1 { +- if !fixed { +- bug.Reportf("end > end of file + 1") +- } +- end = fileEnd +- } +- +- return mapper.PosLocation(tokFile, start, end) +- } +- +- // Inv: the positions are not within this package. +- +- if allowDeps { +- // Positions in Diagnostic.RelatedInformation may belong to a +- // dependency package. We cannot accurately map them to +- // protocol.Location coordinates without a Mapper for the +- // relevant file, but none exists if the file was loaded from +- // export data, and we have no means (Snapshot) of loading it. +- // +- // So, fall back to approximate conversion to UTF-16: +- // for non-ASCII text, the column numbers may be wrong. +- var ( +- startPosn = safetoken.StartPosition(fset, start) +- endPosn = safetoken.EndPosition(fset, end) +- ) +- return protocol.Location{ +- URI: protocol.URIFromPath(startPosn.Filename), +- Range: protocol.Range{ +- Start: protocol.Position{ +- Line: uint32(startPosn.Line - 1), +- Character: uint32(startPosn.Column - 1), +- }, +- End: protocol.Position{ +- Line: uint32(endPosn.Line - 1), +- Character: uint32(endPosn.Column - 1), +- }, +- }, +- }, nil +- } +- +- // The start position was not among the package's parsed +- // Go files, indicating that the analyzer added new files +- // to the FileSet. +- // +- // For example, the cgocall analyzer re-parses and +- // type-checks some of the files in a special environment; +- // and asmdecl and other low-level runtime analyzers call +- // ReadFile to parse non-Go files. +- // (This is a supported feature, documented at go/analysis.) +- // +- // In principle these files could be: +- // +- // - OtherFiles (non-Go files such as asm). +- // However, we set Pass.OtherFiles=[] because +- // gopls won't service "diagnose" requests +- // for non-Go files, so there's no point +- // reporting diagnostics in them. +- // +- // - IgnoredFiles (files tagged for other configs). +- // However, we set Pass.IgnoredFiles=[] because, +- // in most cases, zero-config gopls should create +- // another view that covers these files. +- // +- // - Referents of //line directives, as in cgo packages. +- // The file names in this case are not known a priori. +- // gopls generally tries to avoid honoring line directives, +- // but analyzers such as cgocall may honor them. +- // +- // In short, it's unclear how this can be reached +- // other than due to an analyzer bug. +- +- return protocol.Location{}, bug.Errorf("diagnostic location is not among files of package: %s", tokFile.Name()) +-} +- +-// effectiveURL computes the effective URL of diag, +-// using the algorithm specified at Diagnostic.URL. +-func effectiveURL(a *analysis.Analyzer, diag analysis.Diagnostic) string { +- u := diag.URL +- if u == "" && diag.Category != "" { +- u = "#" + diag.Category +- } +- if base, err := urlpkg.Parse(a.URL); err == nil { +- if rel, err := urlpkg.Parse(u); err == nil { +- u = base.ResolveReference(rel).String() +- } +- } +- return u +-} +- +-// stableName returns a name for the analyzer that is unique and +-// stable across address spaces. +-// +-// Analyzer names are not unique. For example, gopls includes +-// both x/tools/passes/nilness and staticcheck/nilness. +-// For serialization, we must assign each analyzer a unique identifier +-// that two gopls processes accessing the cache can agree on. +-func stableName(a *analysis.Analyzer) string { +- // Incorporate the file and line of the analyzer's Run function. +- addr := reflect.ValueOf(a.Run).Pointer() +- fn := runtime.FuncForPC(addr) +- file, line := fn.FileLine(addr) +- +- // It is tempting to use just a.Name as the stable name when +- // it is unique, but making them always differ helps avoid +- // name/stablename confusion. +- return fmt.Sprintf("%s(%s:%d)", a.Name, filepath.Base(file), line) +-} +diff -urN a/gopls/internal/cache/cache.go b/gopls/internal/cache/cache.go +--- a/gopls/internal/cache/cache.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/cache.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,122 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "reflect" +- "strconv" +- "sync/atomic" +- +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/util/memoize" +- "golang.org/x/tools/internal/imports" +-) +- +-// ballast is a 100MB unused byte slice that exists only to reduce garbage +-// collector CPU in small workspaces and at startup. +-// +-// The redesign of gopls described at https://go.dev/blog/gopls-scalability +-// moved gopls to a model where it has a significantly smaller heap, yet still +-// allocates many short-lived data structures during parsing and type checking. +-// As a result, for some workspaces, particularly when opening a low-level +-// package, the steady-state heap may be a small fraction of total allocation +-// while rechecking the workspace, paradoxically causing the GC to consume much +-// more CPU. For example, in one benchmark that analyzes the starlark +-// repository, the steady-state heap was ~10MB, and the process of diagnosing +-// the workspace allocated 100-200MB. +-// +-// The reason for this paradoxical behavior is that GC pacing +-// (https://tip.golang.org/doc/gc-guide#GOGC) causes the collector to trigger +-// at some multiple of the steady-state heap size, so a small steady-state heap +-// causes GC to trigger sooner and more often when allocating the ephemeral +-// structures. +-// +-// Allocating a 100MB ballast avoids this problem by ensuring a minimum heap +-// size. The value of 100MB was chosen to be proportional to the in-memory +-// cache in front the filecache package, and the throughput of type checking. +-// Gopls already requires hundreds of megabytes of RAM to function. +-// +-// Note that while other use cases for a ballast were made obsolete by +-// GOMEMLIMIT, ours is not. GOMEMLIMIT helps in cases where you have a +-// containerized service and want to optimize its latency and throughput by +-// taking advantage of available memory. However, in our case gopls is running +-// on the developer's machine alongside other applications, and can have a wide +-// range of memory footprints depending on the size of the user's workspace. +-// Setting GOMEMLIMIT to too low a number would make gopls perform poorly on +-// large repositories, and setting it to too high a number would make gopls a +-// badly behaved tenant. Short of calibrating GOMEMLIMIT based on the user's +-// workspace (an intractible problem), there is no way for gopls to use +-// GOMEMLIMIT to solve its GC CPU problem. +-// +-// Because this allocation is large and occurs early, there is a good chance +-// that rather than being recycled, it comes directly from the OS already +-// zeroed, and since it is never accessed, the memory region may avoid being +-// backed by pages of RAM. But see +-// https://groups.google.com/g/golang-nuts/c/66d0cItfkjY/m/3NvgzL_sAgAJ +-// +-// For more details on this technique, see: +-// https://blog.twitch.tv/en/2019/04/10/go-memory-ballast-how-i-learnt-to-stop-worrying-and-love-the-heap/ +-var ballast = make([]byte, 100*1e6) +- +-// New Creates a new cache for gopls operation results, using the given file +-// set, shared store, and session options. +-// +-// Both the fset and store may be nil, but if store is non-nil so must be fset +-// (and they must always be used together), otherwise it may be possible to get +-// cached data referencing token.Pos values not mapped by the FileSet. +-func New(store *memoize.Store) *Cache { +- index := atomic.AddInt64(&cacheIndex, 1) +- +- if store == nil { +- store = &memoize.Store{} +- } +- +- c := &Cache{ +- id: strconv.FormatInt(index, 10), +- store: store, +- memoizedFS: newMemoizedFS(), +- modCache: &sharedModCache{ +- caches: make(map[string]*imports.DirInfoCache), +- timers: make(map[string]*refreshTimer), +- }, +- } +- return c +-} +- +-// A Cache holds content that is shared across multiple gopls sessions. +-type Cache struct { +- id string +- +- // store holds cached calculations. +- // +- // TODO(rfindley): at this point, these are not important, as we've moved our +- // content-addressable cache to the file system (the filecache package). It +- // is unlikely that this shared cache provides any shared value. We should +- // consider removing it, replacing current uses with a simpler futures cache, +- // as we've done for e.g. type-checked packages. +- store *memoize.Store +- +- // memoizedFS holds a shared file.Source that caches reads. +- // +- // Reads are invalidated when *any* session gets a didChangeWatchedFile +- // notification. This is fine: it is the responsibility of memoizedFS to hold +- // our best knowledge of the current file system state. +- *memoizedFS +- +- // modCache holds the shared goimports state for GOMODCACHE directories +- modCache *sharedModCache +-} +- +-var cacheIndex, sessionIndex, viewIndex int64 +- +-func (c *Cache) ID() string { return c.id } +-func (c *Cache) MemStats() map[reflect.Type]int { return c.store.Stats() } +- +-// FileStats returns information about the set of files stored in the cache. +-// It is intended for debugging only. +-func (c *Cache) FileStats() (stats command.FileStats) { +- stats.Total, stats.Largest, stats.Errs = c.fileStats() +- return +-} +diff -urN a/gopls/internal/cache/check.go b/gopls/internal/cache/check.go +--- a/gopls/internal/cache/check.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/check.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,2214 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "bytes" +- "context" +- "crypto/sha256" +- "fmt" +- "go/ast" +- "go/build" +- "go/parser" +- "go/scanner" +- "go/token" +- "go/types" +- "regexp" +- "runtime" +- "slices" +- "sort" +- "strings" +- "sync" +- "sync/atomic" +- +- "golang.org/x/mod/module" +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/bloom" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/cache/typerefs" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/filecache" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/gopls/internal/util/tokeninternal" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/gcimporter" +- "golang.org/x/tools/internal/packagesinternal" +- "golang.org/x/tools/internal/typesinternal" +- "golang.org/x/tools/internal/versions" +-) +- +-type unit = struct{} +- +-// A typeCheckBatch holds data for a logical type-checking operation, which may +-// type check many unrelated packages. +-// +-// It shares state such as parsed files and imports, to optimize type-checking +-// for packages with overlapping dependency graphs. +-type typeCheckBatch struct { +- // handleMu guards _handles, which must only be accessed via addHandles or +- // getHandle. +- // +- // An alternative would be to simply verify that package handles are present +- // on the Snapshot, and access them directly, rather than copying maps for +- // each caller. However, handles are accessed very frequently during type +- // checking, and ordinary go maps are measurably faster than the +- // persistent.Map used to store handles on the snapshot. +- handleMu sync.Mutex +- _handles map[PackageID]*packageHandle +- +- parseCache *parseCache +- fset *token.FileSet // describes all parsed or imported files +- cpulimit chan unit // concurrency limiter for CPU-bound operations +- syntaxPackages *futureCache[PackageID, *Package] // transient cache of in-progress syntax futures +- importPackages *futureCache[PackageID, *types.Package] // persistent cache of imports +- gopackagesdriver bool // for bug reporting: were packages loaded with a driver? +-} +- +-// addHandles is called by each goroutine joining the type check batch, to +-// ensure that the batch has all inputs necessary for type checking. +-func (b *typeCheckBatch) addHandles(handles map[PackageID]*packageHandle) { +- b.handleMu.Lock() +- defer b.handleMu.Unlock() +- for id, ph := range handles { +- assert(ph.state >= validKey, "invalid handle") +- +- if alt, ok := b._handles[id]; !ok || alt.state < ph.state { +- b._handles[id] = ph +- } +- } +-} +- +-// getHandle retrieves the packageHandle for the given id. +-func (b *typeCheckBatch) getHandle(id PackageID) *packageHandle { +- b.handleMu.Lock() +- defer b.handleMu.Unlock() +- return b._handles[id] +-} +- +-// TypeCheck parses and type-checks the specified packages, +-// and returns them in the same order as the ids. +-// The resulting packages' types may belong to different importers, +-// so types from different packages are incommensurable. +-// +-// The resulting packages slice always contains len(ids) entries, though some +-// of them may be nil if (and only if) the resulting error is non-nil. +-// +-// An error is returned if any of the requested packages fail to type-check. +-// This is different from having type-checking errors: a failure to type-check +-// indicates context cancellation or otherwise significant failure to perform +-// the type-checking operation. +-// +-// In general, clients should never need to type-checked syntax for an +-// intermediate test variant (ITV) package. Callers should apply +-// RemoveIntermediateTestVariants (or equivalent) before this method, or any +-// of the potentially type-checking methods below. +-func (s *Snapshot) TypeCheck(ctx context.Context, ids ...PackageID) ([]*Package, error) { +- pkgs := make([]*Package, len(ids)) +- post := func(i int, pkg *Package) { +- pkgs[i] = pkg +- } +- return pkgs, s.forEachPackage(ctx, ids, nil, post) +-} +- +-// Package visiting functions used by forEachPackage; see the documentation of +-// forEachPackage for details. +-type ( +- preTypeCheck = func(int, *packageHandle) bool // false => don't type check +- postTypeCheck = func(int, *Package) +-) +- +-// forEachPackage does a pre- and post- order traversal of the packages +-// specified by ids using the provided pre and post functions. +-// +-// The pre func is optional. If set, pre is evaluated after the package +-// handle has been constructed, but before type-checking. If pre returns false, +-// type-checking is skipped for this package handle. +-// +-// post is called with a syntax package after type-checking completes +-// successfully. It is only called if pre returned true. +-// +-// Both pre and post may be called concurrently. +-func (s *Snapshot) forEachPackage(ctx context.Context, ids []PackageID, pre preTypeCheck, post postTypeCheck) error { +- ctx, done := event.Start(ctx, "cache.forEachPackage", label.PackageCount.Of(len(ids))) +- defer done() +- +- var ( +- needIDs []PackageID // ids to type-check +- indexes []int // original index of requested ids +- ) +- +- // Check for existing active packages. +- // +- // Since gopls can't depend on package identity, any instance of the +- // requested package must be ok to return. +- // +- // This is an optimization to avoid redundant type-checking: following +- // changes to an open package many LSP clients send several successive +- // requests for package information for the modified package (semantic +- // tokens, code lens, inlay hints, etc.) +- for i, id := range ids { +- s.mu.Lock() +- ph, ok := s.packages.Get(id) +- s.mu.Unlock() +- if ok && ph.state >= validPackage { +- post(i, ph.pkgData.pkg) +- } else { +- needIDs = append(needIDs, id) +- indexes = append(indexes, i) +- } +- } +- +- if len(needIDs) == 0 { +- return nil // short cut: many call sites do not handle empty ids +- } +- +- b, release := s.acquireTypeChecking() +- defer release() +- +- handles, err := s.getPackageHandles(ctx, needIDs) +- if err != nil { +- return err +- } +- +- // Wrap the pre- and post- funcs to translate indices. +- var pre2 preTypeCheck +- if pre != nil { +- pre2 = func(i int, ph *packageHandle) bool { +- return pre(indexes[i], ph) +- } +- } +- post2 := func(i int, pkg *Package) { +- id := pkg.metadata.ID +- if ph := handles[id]; ph.isOpen && ph.state < validPackage { +- // Cache open type checked packages. +- ph = ph.clone() +- ph.pkgData = &packageData{ +- fset: pkg.FileSet(), +- imports: pkg.Types().Imports(), +- pkg: pkg, +- } +- ph.state = validPackage +- +- s.mu.Lock() +- if alt, ok := s.packages.Get(id); !ok || alt.state < ph.state { +- s.packages.Set(id, ph, nil) +- } +- s.mu.Unlock() +- } +- +- post(indexes[i], pkg) +- } +- +- return b.query(ctx, needIDs, pre2, post2, handles) +-} +- +-// acquireTypeChecking joins or starts a concurrent type checking batch. +-// +-// The batch may be queried for package information using [typeCheckBatch.query]. +-// The second result must be called when the batch is no longer needed, to +-// release the resource. +-func (s *Snapshot) acquireTypeChecking() (*typeCheckBatch, func()) { +- s.typeCheckMu.Lock() +- defer s.typeCheckMu.Unlock() +- +- if s.batch == nil { +- assert(s.batchRef == 0, "miscounted type checking") +- s.batch = newTypeCheckBatch(s.view.parseCache, s.view.typ == GoPackagesDriverView) +- } +- s.batchRef++ +- +- return s.batch, func() { +- s.typeCheckMu.Lock() +- defer s.typeCheckMu.Unlock() +- assert(s.batchRef > 0, "miscounted type checking 2") +- s.batchRef-- +- if s.batchRef == 0 { +- s.batch = nil +- } +- } +-} +- +-// newTypeCheckBatch creates a new type checking batch using the provided +-// shared parseCache. +-// +-// If a non-nil importGraph is provided, imports in this graph will be reused. +-func newTypeCheckBatch(parseCache *parseCache, gopackagesdriver bool) *typeCheckBatch { +- return &typeCheckBatch{ +- _handles: make(map[PackageID]*packageHandle), +- parseCache: parseCache, +- fset: fileSetWithBase(reservedForParsing), +- cpulimit: make(chan unit, runtime.GOMAXPROCS(0)), +- syntaxPackages: newFutureCache[PackageID, *Package](false), // don't persist syntax packages +- importPackages: newFutureCache[PackageID, *types.Package](true), // ...but DO persist imports +- gopackagesdriver: gopackagesdriver, +- } +-} +- +-// query executes a traversal of package information in the given typeCheckBatch. +-// For each package in importIDs, the package will be loaded "for import" (sans +-// syntax). +-// +-// For each package in syntaxIDs, the package will be handled following the +-// pre- and post- traversal logic of [Snapshot.forEachPackage]. +-// +-// Package handles must be provided for each package in the forward transitive +-// closure of either importIDs or syntaxIDs. +-// +-// TODO(rfindley): simplify this API by clarifying shared import graph and +-// package handle logic. +-func (b *typeCheckBatch) query(ctx context.Context, syntaxIDs []PackageID, pre preTypeCheck, post postTypeCheck, handles map[PackageID]*packageHandle) error { +- b.addHandles(handles) +- +- // Start a single goroutine for each requested package. +- // +- // Other packages are reached recursively, and will not be evaluated if they +- // are not needed. +- var g errgroup.Group +- for i, id := range syntaxIDs { +- g.Go(func() error { +- if ctx.Err() != nil { +- return ctx.Err() +- } +- return b.handleSyntaxPackage(ctx, i, id, pre, post) +- }) +- } +- return g.Wait() +-} +- +-// TODO(rfindley): re-order the declarations below to read better from top-to-bottom. +- +-// getImportPackage returns the *types.Package to use for importing the +-// package referenced by id. +-// +-// This may be the package produced by type-checking syntax (as in the case +-// where id is in the set of requested IDs), a package loaded from export data, +-// or a package type-checked for import only. +-func (b *typeCheckBatch) getImportPackage(ctx context.Context, id PackageID) (pkg *types.Package, err error) { +- return b.importPackages.get(ctx, id, func(ctx context.Context) (*types.Package, error) { +- ph := b.getHandle(id) +- +- // "unsafe" cannot be imported or type-checked. +- // +- // We check PkgPath, not id, as the structure of the ID +- // depends on the build system (in particular, +- // Bazel+gopackagesdriver appears to use something other than +- // "unsafe", though we aren't sure what; even 'go list' can +- // use "p [q.test]" for testing or if PGO is enabled. +- // See golang/go#60890. +- if ph.mp.PkgPath == "unsafe" { +- return types.Unsafe, nil +- } +- +- data, err := filecache.Get(exportDataKind, ph.key) +- if err == filecache.ErrNotFound { +- // No cached export data: type-check as fast as possible. +- return b.checkPackageForImport(ctx, ph) +- } +- if err != nil { +- return nil, fmt.Errorf("failed to read cache data for %s: %v", ph.mp.ID, err) +- } +- return b.importPackage(ctx, ph.mp, data) +- }) +-} +- +-// handleSyntaxPackage handles one package from the ids slice. +-// +-// If type checking occurred while handling the package, it returns the +-// resulting types.Package so that it may be used for importing. +-// +-// handleSyntaxPackage returns (nil, nil) if pre returned false. +-func (b *typeCheckBatch) handleSyntaxPackage(ctx context.Context, i int, id PackageID, pre preTypeCheck, post postTypeCheck) error { +- ph := b.getHandle(id) +- if pre != nil && !pre(i, ph) { +- return nil // skip: not needed +- } +- +- // Check if we have a syntax package stored on ph. +- // +- // This was checked in [Snapshot.forEachPackage], but may have since changed. +- if ph.state >= validPackage { +- post(i, ph.pkgData.pkg) +- return nil +- } +- +- pkg, err := b.getPackage(ctx, ph) +- if err != nil { +- return err +- } +- +- post(i, pkg) +- return nil +-} +- +-// getPackage type checks one [Package] in the batch. +-func (b *typeCheckBatch) getPackage(ctx context.Context, ph *packageHandle) (*Package, error) { +- return b.syntaxPackages.get(ctx, ph.mp.ID, func(ctx context.Context) (*Package, error) { +- // Wait for predecessors. +- // Record imports of this package to avoid redundant work in typesConfig. +- imports := make(map[PackagePath]*types.Package) +- fset := b.fset +- if ph.state >= validImports { +- for _, imp := range ph.pkgData.imports { +- imports[PackagePath(imp.Path())] = imp +- } +- // Reusing imports requires that their positions are mapped by the FileSet. +- fset = tokeninternal.CloneFileSet(ph.pkgData.fset) +- } else { +- var impMu sync.Mutex +- var g errgroup.Group +- for depPath, depID := range ph.mp.DepsByPkgPath { +- g.Go(func() error { +- imp, err := b.getImportPackage(ctx, depID) +- if err == nil { +- impMu.Lock() +- imports[depPath] = imp +- impMu.Unlock() +- } +- return err +- }) +- } +- if err := g.Wait(); err != nil { +- // Failure to import a package should not abort the whole operation. +- // Stop only if the context was cancelled, a likely cause. +- // Import errors will be reported as type diagnostics. +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- } +- } +- +- // Wait to acquire a CPU token. +- // +- // Note: it is important to acquire this token only after awaiting +- // predecessors, to avoid starvation. +- select { +- case <-ctx.Done(): +- return nil, ctx.Err() +- case b.cpulimit <- unit{}: +- defer func() { +- <-b.cpulimit // release CPU token +- }() +- } +- +- // Compute the syntax package. +- p, err := b.checkPackage(ctx, fset, ph, imports) +- if err != nil { +- return nil, err // e.g. I/O error, cancelled +- } +- +- // Update caches. +- go storePackageResults(ctx, ph, p) // ...and write all packages to disk +- return p, nil +- }) +-} +- +-// storePackageResults serializes and writes information derived from p to the +-// file cache. +-// The context is used only for logging; cancellation does not affect the operation. +-func storePackageResults(ctx context.Context, ph *packageHandle, p *Package) { +- toCache := map[string][]byte{ +- xrefsKind: p.pkg.xrefs(), +- methodSetsKind: p.pkg.methodsets().Encode(), +- testsKind: p.pkg.tests().Encode(), +- diagnosticsKind: encodeDiagnostics(p.pkg.diagnostics), +- } +- +- if p.metadata.PkgPath != "unsafe" { // unsafe cannot be exported +- exportData, err := gcimporter.IExportShallow(p.pkg.fset, p.pkg.types, bug.Reportf) +- if err != nil { +- bug.Reportf("exporting package %v: %v", p.metadata.ID, err) +- } else { +- toCache[exportDataKind] = exportData +- } +- } +- +- for kind, data := range toCache { +- if err := filecache.Set(kind, ph.key, data); err != nil { +- event.Error(ctx, fmt.Sprintf("storing %s data for %s", kind, ph.mp.ID), err) +- } +- } +-} +- +-// Metadata implements the [metadata.Source] interface. +-func (b *typeCheckBatch) Metadata(id PackageID) *metadata.Package { +- ph := b.getHandle(id) +- if ph == nil { +- return nil +- } +- return ph.mp +-} +- +-// importPackage loads the given package from its export data in p.exportData +-// (which must already be populated). +-func (b *typeCheckBatch) importPackage(ctx context.Context, mp *metadata.Package, data []byte) (*types.Package, error) { +- ctx, done := event.Start(ctx, "cache.typeCheckBatch.importPackage", label.Package.Of(string(mp.ID))) +- defer done() +- +- importLookup := importLookup(mp, b) +- +- thisPackage := types.NewPackage(string(mp.PkgPath), string(mp.Name)) +- getPackages := func(items []gcimporter.GetPackagesItem) error { +- for i, item := range items { +- var id PackageID +- var pkg *types.Package +- if item.Path == string(mp.PkgPath) { +- id = mp.ID +- pkg = thisPackage +- +- // debugging issues #60904, #64235 +- if pkg.Name() != item.Name { +- // This would mean that mp.Name != item.Name, so the +- // manifest in the export data of mp.PkgPath is +- // inconsistent with mp.Name. Or perhaps there +- // are duplicate PkgPath items in the manifest? +- if b.gopackagesdriver { +- return bug.Errorf("internal error: package name is %q, want %q (id=%q, path=%q) (see issue #60904) (using GOPACKAGESDRIVER)", +- pkg.Name(), item.Name, id, item.Path) +- } else { +- // There's a package in the export data with the same path as the +- // imported package, but a different name. +- // +- // This is observed to occur (very frequently!) in telemetry, yet +- // we don't yet have a plausible explanation: any self import or +- // circular import should have resulted in a broken import, which +- // can't be referenced by export data. (Any type qualified by the +- // broken import name will be invalid.) +- // +- // However, there are some mechanisms that could potentially be +- // involved: +- // 1. go/types will synthesize package names based on the import +- // path for fake packages (but as mentioned above, I don't think +- // these can be referenced by export data.) +- // 2. Test variants have the same path as non-test variant. Could +- // that somehow be involved? (I don't see how, particularly using +- // the go list driver, but nevertheless it's worth considering.) +- // 3. Command-line arguments and main packages may have special +- // handling that we don't fully understand. +- // Try to sort these potential causes into unique stacks, as well +- // as a few other pathological scenarios. +- report := func() error { +- return bug.Errorf("internal error: package name is %q, want %q (id=%q, path=%q) (see issue #60904)", +- pkg.Name(), item.Name, id, item.Path) +- } +- impliedName := "" +- if i := strings.LastIndex(item.Path, "/"); i >= 0 { +- impliedName = item.Path[i+1:] +- } +- switch { +- case pkg.Name() == "": +- return report() +- case item.Name == "": +- return report() +- case metadata.IsCommandLineArguments(mp.ID): +- return report() +- case mp.ForTest != "": +- return report() +- case len(mp.CompiledGoFiles) == 0: +- return report() +- case len(mp.Errors) > 0: +- return report() +- case impliedName != "" && impliedName != string(mp.Name): +- return report() +- case len(mp.CompiledGoFiles) != len(mp.GoFiles): +- return report() +- case mp.Module == nil: +- return report() +- case mp.Name == "main": +- return report() +- default: +- return report() +- } +- } +- } +- } else { +- var alt PackageID +- id, alt = importLookup(PackagePath(item.Path)) +- if alt != "" { +- // Any bug leading to this scenario would have already been reported +- // in importLookup. +- return fmt.Errorf("inconsistent metadata during import: for package path %q, found both IDs %q and %q", item.Path, id, alt) +- } +- var err error +- pkg, err = b.getImportPackage(ctx, id) +- if err != nil { +- return err +- } +- +- // We intentionally duplicate the bug.Errorf calls because +- // telemetry tells us only the program counter, not the message. +- +- // debugging issues #60904, #64235 +- if pkg.Name() != item.Name { +- // This means that, while reading the manifest of the +- // export data of mp.PkgPath, one of its indirect +- // dependencies had a name that differs from the +- // Metadata.Name +- return bug.Errorf("internal error: package name is %q, want %q (id=%q, path=%q) (see issue #60904)", +- pkg.Name(), item.Name, id, item.Path) +- } +- } +- items[i].Pkg = pkg +- +- } +- return nil +- } +- +- // Importing is potentially expensive, and might not encounter cancellations +- // via dependencies (e.g. if they have already been evaluated). +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- +- imported, err := gcimporter.IImportShallow(b.fset, getPackages, data, string(mp.PkgPath), bug.Reportf) +- if err != nil { +- return nil, fmt.Errorf("import failed for %q: %v", mp.ID, err) +- } +- return imported, nil +-} +- +-// checkPackageForImport type checks, but skips function bodies and does not +-// record syntax information. +-func (b *typeCheckBatch) checkPackageForImport(ctx context.Context, ph *packageHandle) (*types.Package, error) { +- ctx, done := event.Start(ctx, "cache.typeCheckBatch.checkPackageForImport", label.Package.Of(string(ph.mp.ID))) +- defer done() +- +- onError := func(e error) { +- // Ignore errors for exporting. +- } +- cfg := b.typesConfig(ctx, ph.localInputs, nil, onError) +- cfg.IgnoreFuncBodies = true +- +- // Parse the compiled go files, bypassing the parse cache as packages checked +- // for import are unlikely to get cache hits. Additionally, we can optimize +- // parsing slightly by not passing parser.ParseComments. +- pgfs := make([]*parsego.File, len(ph.localInputs.compiledGoFiles)) +- { +- var group errgroup.Group +- // Set an arbitrary concurrency limit; we want some parallelism but don't +- // need GOMAXPROCS, as there is already a lot of concurrency among calls to +- // checkPackageForImport. +- // +- // TODO(rfindley): is there a better way to limit parallelism here? We could +- // have a global limit on the type-check batch, but would have to be very +- // careful to avoid starvation. +- group.SetLimit(4) +- for i, fh := range ph.localInputs.compiledGoFiles { +- group.Go(func() error { +- pgf, err := parseGoImpl(ctx, b.fset, fh, parser.SkipObjectResolution, false) +- pgfs[i] = pgf +- return err +- }) +- } +- if err := group.Wait(); err != nil { +- return nil, err // cancelled, or catastrophic error (e.g. missing file) +- } +- } +- pkg := types.NewPackage(string(ph.localInputs.pkgPath), string(ph.localInputs.name)) +- check := types.NewChecker(cfg, b.fset, pkg, nil) +- +- files := make([]*ast.File, len(pgfs)) +- for i, pgf := range pgfs { +- files[i] = pgf.File +- } +- +- // Type checking is expensive, and we may not have encountered cancellations +- // via parsing (e.g. if we got nothing but cache hits for parsed files). +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- +- _ = check.Files(files) // ignore errors +- +- // If the context was cancelled, we may have returned a ton of transient +- // errors to the type checker. Swallow them. +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- +- // Asynchronously record export data. +- go func() { +- exportData, err := gcimporter.IExportShallow(b.fset, pkg, bug.Reportf) +- if err != nil { +- // Internal error; the stack will have been reported via +- // bug.Reportf within IExportShallow, so there's not much +- // to do here (issue #71067). +- event.Error(ctx, "IExportShallow failed", err, label.Package.Of(string(ph.mp.ID))) +- return +- } +- if err := filecache.Set(exportDataKind, ph.key, exportData); err != nil { +- event.Error(ctx, fmt.Sprintf("storing export data for %s", ph.mp.ID), err) +- } +- }() +- return pkg, nil +-} +- +-// importLookup returns a function that may be used to look up a package ID for +-// a given package path, based on the forward transitive closure of the initial +-// package (id). +-// +-// If the second result is non-empty, it is another ID discovered in the import +-// graph for the same package path. This means the import graph is +-// incoherent--see #63822 and the long comment below. +-// +-// The resulting function is not concurrency safe. +-func importLookup(mp *metadata.Package, source metadata.Source) func(PackagePath) (id, altID PackageID) { +- assert(mp != nil, "nil metadata") +- +- // This function implements an incremental depth first scan through the +- // package imports. Previous implementations of import mapping built the +- // entire PackagePath->PackageID mapping eagerly, but that resulted in a +- // large amount of unnecessary work: most imports are either directly +- // imported, or found through a shallow scan. +- +- // impMap memoizes the lookup of package paths. +- impMap := map[PackagePath]PackageID{ +- mp.PkgPath: mp.ID, +- } +- +- // altIDs records alternative IDs for the given path, to report inconsistent +- // metadata. +- var altIDs map[PackagePath]PackageID +- +- // pending is a FIFO queue of package metadata that has yet to have its +- // dependencies fully scanned. +- // Invariant: all entries in pending are already mapped in impMap. +- pending := []*metadata.Package{mp} +- +- // search scans children the next package in pending, looking for pkgPath. +- // Invariant: whenever search is called, pkgPath is not yet mapped. +- search := func(pkgPath PackagePath) (id PackageID, found bool) { +- pkg := pending[0] +- pending = pending[1:] +- for depPath, depID := range pkg.DepsByPkgPath { +- if prevID, ok := impMap[depPath]; ok { +- // debugging #63822 +- if prevID != depID { +- if altIDs == nil { +- altIDs = make(map[PackagePath]PackageID) +- } +- if _, ok := altIDs[depPath]; !ok { +- altIDs[depPath] = depID +- } +- prev := source.Metadata(prevID) +- curr := source.Metadata(depID) +- switch { +- case prev == nil || curr == nil: +- bug.Reportf("inconsistent view of dependencies (missing dep)") +- case prev.ForTest != curr.ForTest: +- // This case is unfortunately understood to be possible. +- // +- // To explain this, consider a package a_test testing the package +- // a, and for brevity denote by b' the intermediate test variant of +- // the package b, which is created for the import graph of a_test, +- // if b imports a. +- // +- // Now imagine that we have the following import graph, where +- // higher packages import lower ones. +- // +- // a_test +- // / \ +- // b' c +- // / \ / +- // a d +- // +- // In this graph, there is one intermediate test variant b', +- // because b imports a and so b' must hold the test variant import. +- // +- // Now, imagine that an on-disk change (perhaps due to a branch +- // switch) affects the above import graph such that d imports a. +- // +- // a_test +- // / \ +- // b' c* +- // / \ / +- // / d* +- // a---/ +- // +- // In this case, c and d should really be intermediate test +- // variants, because they reach a. However, suppose that gopls does +- // not know this yet (as indicated by '*'). +- // +- // Now suppose that the metadata of package c is invalidated, for +- // example due to a change in an unrelated import or an added file. +- // This will invalidate the metadata of c and a_test (but NOT b), +- // and now gopls observes this graph: +- // +- // a_test +- // / \ +- // b' c' +- // /| | +- // / d d' +- // a-----/ +- // +- // That is: a_test now sees c', which sees d', but since b was not +- // invalidated, gopls still thinks that b' imports d (not d')! +- // +- // The problem, of course, is that gopls never observed the change +- // to d, which would have invalidated b. This may be due to racing +- // file watching events, in which case the problem should +- // self-correct when gopls sees the change to d, or it may be due +- // to d being outside the coverage of gopls' file watching glob +- // patterns, or it may be due to buggy or entirely absent +- // client-side file watching. +- // +- // TODO(rfindley): fix this, one way or another. It would be hard +- // or impossible to repair gopls' state here, during type checking. +- // However, we could perhaps reload metadata in Snapshot.load until +- // we achieve a consistent state, or better, until the loaded state +- // is consistent with our view of the filesystem, by making the Go +- // command report digests of the files it reads. Both of those are +- // tricker than they may seem, and have significant performance +- // implications. +- default: +- bug.Reportf("inconsistent view of dependencies") +- } +- } +- continue +- } +- impMap[depPath] = depID +- +- dep := source.Metadata(depID) +- assert(dep != nil, "missing dep metadata") +- +- pending = append(pending, dep) +- if depPath == pkgPath { +- // Don't return early; finish processing pkg's deps. +- id = depID +- found = true +- } +- } +- return id, found +- } +- +- return func(pkgPath PackagePath) (id, altID PackageID) { +- if id, ok := impMap[pkgPath]; ok { +- return id, altIDs[pkgPath] +- } +- for len(pending) > 0 { +- if id, found := search(pkgPath); found { +- return id, altIDs[pkgPath] +- } +- } +- return "", "" +- } +-} +- +-// A packageState is the state of a [packageHandle]; see below for details. +-type packageState uint8 +- +-const ( +- validMetadata packageState = iota // the package has valid metadata +- validLocalData // local package files have been analyzed +- validKey // dependencies have been analyzed, and key produced +- validImports // pkgData.fset and pkgData.imports are valid +- validPackage // pkgData.pkg is valid +-) +- +-// A packageHandle holds information derived from a metadata.Package, and +-// records its degree of validity as state changes occur: successful analysis +-// causes the state to progress; invalidation due to changes causes it to +-// regress. +-// +-// In the initial state (validMetadata), all we know is the metadata for the +-// package itself. This is the lowest state, and it cannot become invalid +-// because the metadata for a given snapshot never changes. (Each handle is +-// implicitly associated with a Snapshot.) +-// +-// After the files of the package have been read (validLocalData), we can +-// perform computations that are local to that package, such as parsing, or +-// building the symbol reference graph (SRG). This information is invalidated +-// by a change to any file in the package. The local information is thus +-// sufficient to form a cache key for saved parsed trees or the SRG. +-// +-// Once all dependencies have been analyzed (validKey), we can type-check the +-// package. This information is invalidated by any change to the package +-// itself, or to any dependency that is transitively reachable through the SRG. +-// The cache key for saved type information must thus incorporate information +-// from all reachable dependencies. This reachability analysis implements what +-// we sometimes refer to as "precise pruning", or fine-grained invalidation: +-// https://go.dev/blog/gopls-scalability#invalidation +-// +-// After type checking, package information for open packages is cached in the +-// pkgData field (validPackage), to optimize subsequent requests oriented +-// around open files. +-// +-// Following a change, the packageHandle is cloned in the new snapshot with a +-// new state set to its least known valid state, as described above: if package +-// files changed, it is reset to validMetadata; if dependencies changed, it is +-// reset to validLocalData. However, the derived data from its previous state +-// is not yet removed, as keys may not have changed after they are reevaluated, +-// in which case we can avoid recomputing the derived data. In particular, if +-// the cache key did not change, the pkgData field (if set) remains valid. As a +-// special case, if the cache key did change, but none of the keys of +-// dependencies changed, the pkgData.fset and pkgData.imports fields are still +-// valid, though the pkgData.pkg field is not (validImports). +-// +-// See [packageHandleBuilder.evaluatePackageHandle] for more details of the +-// reevaluation algorithm. +-// +-// packageHandles are immutable once they are stored in the Snapshot.packages +-// map: any changes to packageHandle fields evaluatePackageHandle must be made +-// to a cloned packageHandle, and inserted back into Snapshot.packages. Data +-// referred to by the packageHandle may be shared by multiple clones, and so +-// referents must not be mutated. +-type packageHandle struct { +- mp *metadata.Package +- +- // state indicates which data below are still valid. +- state packageState +- +- // Local data: +- +- // loadDiagnostics memoizes the result of processing error messages from +- // go/packages (i.e. `go list`). +- // +- // These are derived from metadata using a snapshot. Since they depend on +- // file contents (for translating positions), they should theoretically be +- // invalidated by file changes, but historically haven't been. In practice +- // they are rare and indicate a fundamental error that needs to be corrected +- // before development can continue, so it may not be worth significant +- // engineering effort to implement accurate invalidation here. +- // +- // TODO(rfindley): loadDiagnostics are out of place here, as they don't +- // directly relate to type checking. We should perhaps move the caching of +- // load diagnostics to an entirely separate component, so that Packages need +- // only be concerned with parsing and type checking. +- // (Nevertheless, since the lifetime of load diagnostics matches that of the +- // Metadata, it is convenient to memoize them here.) +- loadDiagnostics []*Diagnostic +- // localInputs holds all local type-checking localInputs, excluding +- // dependencies. +- localInputs *typeCheckInputs +- // isOpen reports whether the package has any open files. +- isOpen bool +- // localKey is a hash of localInputs. +- localKey file.Hash +- // refs is the result of syntactic dependency analysis produced by the +- // typerefs package. Derived from localInputs. +- refs map[string][]typerefs.Symbol +- +- // Keys, computed through reachability analysis of dependencies. +- +- // depKeys records the key of each dependency that was used to calculate the +- // key below. If state < validKey, we must re-check that each still matches. +- depKeys map[PackageID]file.Hash +- +- // reachable is used to filter reachable package paths for go/analysis fact +- // importing. +- reachable *bloom.Filter +- +- // key is the hashed key for the package. +- // +- // It includes the all bits of the transitive closure of +- // dependencies's sources. +- key file.Hash +- +- // pkgData caches data derived from type checking the package. +- // This data is set during [Snapshot.forEachPackage], and may be partially +- // invalidated in [packageHandleBuilder.evaluatePackageHandle]. +- // +- // If state == validPackage, all fields of pkgData are valid. If state == +- // validImports, only fset and imports are valid. +- pkgData *packageData +-} +- +-// packageData holds the (possibly partial) result of type checking this +-// package. See the pkgData field of [packageHandle]. +-// +-// packageData instances are immutable. +-type packageData struct { +- fset *token.FileSet // pkg.FileSet() +- imports []*types.Package // pkg.Types().Imports() +- pkg *Package // pkg, if state==validPackage; nil in lower states +-} +- +-// clone returns a shallow copy of the receiver. +-func (ph *packageHandle) clone() *packageHandle { +- clone := *ph +- return &clone +-} +- +-// getPackageHandles gets package handles for all given ids and their +-// dependencies, recursively. The resulting [packageHandle] values are fully +-// evaluated (their state will be at least validKey). +-func (s *Snapshot) getPackageHandles(ctx context.Context, ids []PackageID) (map[PackageID]*packageHandle, error) { +- // perform a two-pass traversal. +- // +- // On the first pass, build up a bidirectional graph of handle nodes, and collect leaves. +- // Then build package handles from bottom up. +- b := &packageHandleBuilder{ +- s: s, +- transitiveRefs: make(map[typerefs.IndexID]*partialRefs), +- nodes: make(map[typerefs.IndexID]*handleNode), +- } +- +- meta := s.MetadataGraph() +- +- var leaves []*handleNode +- var makeNode func(*handleNode, PackageID) *handleNode +- makeNode = func(from *handleNode, id PackageID) *handleNode { +- idxID := s.view.pkgIndex.IndexID(id) +- n, ok := b.nodes[idxID] +- if !ok { +- mp := meta.Packages[id] +- if mp == nil { +- panic(fmt.Sprintf("nil metadata for %q", id)) +- } +- n = &handleNode{ +- mp: mp, +- idxID: idxID, +- unfinishedSuccs: int32(len(mp.DepsByPkgPath)), +- } +- if n.unfinishedSuccs == 0 { +- leaves = append(leaves, n) +- } else { +- n.succs = make(map[PackageID]*handleNode, n.unfinishedSuccs) +- } +- b.nodes[idxID] = n +- for _, depID := range mp.DepsByPkgPath { +- n.succs[depID] = makeNode(n, depID) +- } +- } +- // Add edge from predecessor. +- if from != nil { +- n.preds = append(n.preds, from) +- } +- return n +- } +- for _, id := range ids { +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- makeNode(nil, id) +- } +- +- g, ctx := errgroup.WithContext(ctx) +- +- // files are preloaded, so building package handles is CPU-bound. +- // +- // Note that we can't use g.SetLimit, as that could result in starvation: +- // g.Go blocks until a slot is available, and so all existing goroutines +- // could be blocked trying to enqueue a predecessor. +- limiter := make(chan unit, runtime.GOMAXPROCS(0)) +- +- var enqueue func(*handleNode) +- enqueue = func(n *handleNode) { +- g.Go(func() error { +- limiter <- unit{} +- defer func() { <-limiter }() +- +- if ctx.Err() != nil { +- return ctx.Err() +- } +- +- if err := b.evaluatePackageHandle(ctx, n); err != nil { +- return err +- } +- +- for _, pred := range n.preds { +- if atomic.AddInt32(&pred.unfinishedSuccs, -1) == 0 { +- enqueue(pred) +- } +- } +- return nil +- }) +- } +- for _, leaf := range leaves { +- enqueue(leaf) +- } +- +- if err := g.Wait(); err != nil { +- return nil, err +- } +- +- // Copy handles into the result map. +- handles := make(map[PackageID]*packageHandle, len(b.nodes)) +- for _, v := range b.nodes { +- assert(v.ph != nil, "nil handle") +- handles[v.mp.ID] = v.ph +- } +- +- return handles, nil +-} +- +-// A packageHandleBuilder computes a batch of packageHandles concurrently, +-// sharing computed transitive reachability sets used to compute package keys. +-type packageHandleBuilder struct { +- s *Snapshot +- +- // nodes are assembled synchronously. +- nodes map[typerefs.IndexID]*handleNode +- +- // transitiveRefs is incrementally evaluated as package handles are built. +- transitiveRefsMu sync.Mutex +- transitiveRefs map[typerefs.IndexID]*partialRefs // see getTransitiveRefs +-} +- +-// A handleNode represents a to-be-computed packageHandle within a graph of +-// predecessors and successors. +-// +-// It is used to implement a bottom-up construction of packageHandles. +-type handleNode struct { +- mp *metadata.Package +- idxID typerefs.IndexID +- ph *packageHandle +- preds []*handleNode +- succs map[PackageID]*handleNode +- unfinishedSuccs int32 +-} +- +-// partialRefs maps names declared by a given package to their set of +-// transitive references. +-// +-// If complete is set, refs is known to be complete for the package in +-// question. Otherwise, it may only map a subset of all names declared by the +-// package. +-type partialRefs struct { +- refs map[string]*typerefs.PackageSet +- complete bool +-} +- +-// getTransitiveRefs gets or computes the set of transitively reachable +-// packages for each exported name in the package specified by id. +-// +-// The operation may fail if building a predecessor failed. If and only if this +-// occurs, the result will be nil. +-func (b *packageHandleBuilder) getTransitiveRefs(pkgID PackageID) map[string]*typerefs.PackageSet { +- b.transitiveRefsMu.Lock() +- defer b.transitiveRefsMu.Unlock() +- +- idxID := b.s.view.pkgIndex.IndexID(pkgID) +- trefs, ok := b.transitiveRefs[idxID] +- if !ok { +- trefs = &partialRefs{ +- refs: make(map[string]*typerefs.PackageSet), +- } +- b.transitiveRefs[idxID] = trefs +- } +- +- if !trefs.complete { +- trefs.complete = true +- node := b.nodes[idxID] +- for name := range node.ph.refs { +- if ('A' <= name[0] && name[0] <= 'Z') || token.IsExported(name) { +- if _, ok := trefs.refs[name]; !ok { +- pkgs := b.s.view.pkgIndex.NewSet() +- for _, sym := range node.ph.refs[name] { +- pkgs.Add(sym.Package) +- otherSet := b.getOneTransitiveRefLocked(sym) +- pkgs.Union(otherSet) +- } +- trefs.refs[name] = pkgs +- } +- } +- } +- } +- +- return trefs.refs +-} +- +-// getOneTransitiveRefLocked computes the full set packages transitively +-// reachable through the given sym reference. +-// +-// It may return nil if the reference is invalid (i.e. the referenced name does +-// not exist). +-func (b *packageHandleBuilder) getOneTransitiveRefLocked(sym typerefs.Symbol) *typerefs.PackageSet { +- assert(token.IsExported(sym.Name), "expected exported symbol") +- +- trefs := b.transitiveRefs[sym.Package] +- if trefs == nil { +- trefs = &partialRefs{ +- refs: make(map[string]*typerefs.PackageSet), +- complete: false, +- } +- b.transitiveRefs[sym.Package] = trefs +- } +- +- pkgs, ok := trefs.refs[sym.Name] +- if ok && pkgs == nil { +- // See below, where refs is set to nil before recursing. +- bug.Reportf("cycle detected to %q in reference graph", sym.Name) +- } +- +- // Note that if (!ok && trefs.complete), the name does not exist in the +- // referenced package, and we should not write to trefs as that may introduce +- // a race. +- if !ok && !trefs.complete { +- n := b.nodes[sym.Package] +- if n == nil { +- // We should always have IndexID in our node set, because symbol references +- // should only be recorded for packages that actually exist in the import graph. +- // +- // However, it is not easy to prove this (typerefs are serialized and +- // deserialized), so make this code temporarily defensive while we are on a +- // point release. +- // +- // TODO(rfindley): in the future, we should turn this into an assertion. +- bug.Reportf("missing reference to package %s", b.s.view.pkgIndex.PackageID(sym.Package)) +- return nil +- } +- +- // Break cycles. This is perhaps overly defensive as cycles should not +- // exist at this point: metadata cycles should have been broken at load +- // time, and intra-package reference cycles should have been contracted by +- // the typerefs algorithm. +- // +- // See the "cycle detected" bug report above. +- trefs.refs[sym.Name] = nil +- +- pkgs := b.s.view.pkgIndex.NewSet() +- for _, sym2 := range n.ph.refs[sym.Name] { +- pkgs.Add(sym2.Package) +- otherSet := b.getOneTransitiveRefLocked(sym2) +- pkgs.Union(otherSet) +- } +- trefs.refs[sym.Name] = pkgs +- } +- +- return pkgs +-} +- +-// evaluatePackageHandle recomputes the derived information in the package handle. +-// On success, the handle's state is validKey. +-// +-// evaluatePackageHandle must only be called from getPackageHandles. +-func (b *packageHandleBuilder) evaluatePackageHandle(ctx context.Context, n *handleNode) (err error) { +- b.s.mu.Lock() +- ph, hit := b.s.packages.Get(n.mp.ID) +- b.s.mu.Unlock() +- +- defer func() { +- if err == nil { +- assert(ph.state >= validKey, "invalid handle") +- +- // Record the now valid key in the snapshot. +- // There may be a race, so avoid the write if the recorded handle is +- // already valid. +- b.s.mu.Lock() +- if alt, ok := b.s.packages.Get(n.mp.ID); !ok || alt.state < ph.state { +- b.s.packages.Set(n.mp.ID, ph, nil) +- } else { +- ph = alt +- } +- b.s.mu.Unlock() +- +- // Initialize n.ph. +- n.ph = ph +- } +- }() +- +- if hit && ph.state >= validKey { +- return nil // already valid +- } else { +- // We'll need to update the package handle. Since this could happen +- // concurrently, make a copy. +- if hit { +- ph = ph.clone() // state < validKey +- } else { +- ph = &packageHandle{ +- mp: n.mp, +- state: validMetadata, +- } +- } +- } +- +- // Invariant: ph is either +- // - a new handle in state validMetadata, or +- // - a clone of an existing handle in state validMetadata or validLocalData. +- +- // State transition: validMetadata -> validLocalInputs. +- localKeyChanged := false +- if ph.state < validLocalData { +- prevLocalKey := ph.localKey // may be zero +- // No package handle: read and analyze the package syntax. +- inputs, err := b.s.typeCheckInputs(ctx, n.mp) +- if err != nil { +- return err +- } +- refs, err := b.s.typerefs(ctx, n.mp, inputs.compiledGoFiles) +- if err != nil { +- return err +- } +- ph.loadDiagnostics = computeLoadDiagnostics(ctx, b.s, n.mp) +- ph.localInputs = inputs +- +- checkOpen: +- for _, files := range [][]file.Handle{inputs.goFiles, inputs.compiledGoFiles} { +- for _, fh := range files { +- if _, ok := fh.(*overlay); ok { +- ph.isOpen = true +- break checkOpen +- } +- } +- } +- if !ph.isOpen { +- // ensure we don't hold data for closed packages +- ph.pkgData = nil +- } +- ph.localKey = localPackageKey(inputs) +- ph.refs = refs +- ph.state = validLocalData +- localKeyChanged = ph.localKey != prevLocalKey +- } +- +- assert(ph.state == validLocalData, "unexpected handle state") +- +- // State transition: validLocalInputs -> validKey +- +- // Check if any dependencies have actually changed. +- depsChanged := true +- if ph.depKeys != nil { // ph was previously evaluated +- depsChanged = len(ph.depKeys) != len(n.succs) +- if !depsChanged { +- for id, succ := range n.succs { +- oldKey, ok := ph.depKeys[id] +- assert(ok, "missing dep") +- if oldKey != succ.ph.key { +- depsChanged = true +- break +- } +- } +- } +- } +- +- // Optimization: if the local package information did not change, nor did any +- // of the dependencies, we don't need to re-run the reachability algorithm. +- // +- // Concretely: suppose A -> B -> C -> D, where '->' means "imports". If I +- // type in a function body of D, I will probably invalidate types in D that C +- // uses, because positions change, and therefore the package key of C will +- // change. But B probably doesn't reach any types in D, and therefore the +- // package key of B will not change. We still need to re-run the reachability +- // algorithm on B to confirm. But if the key of B did not change, we don't +- // even need to run the reachability algorithm on A. +- if !localKeyChanged && !depsChanged { +- ph.state = validKey +- } +- +- keyChanged := false +- if ph.state < validKey { +- prevKey := ph.key +- +- // If we get here, it must be the case that deps have changed, so we must +- // run the reachability algorithm. +- ph.depKeys = make(map[PackageID]file.Hash) +- +- // See the typerefs package: the reachable set of packages is defined to be +- // the set of packages containing syntax that is reachable through the +- // symbol reference graph starting at the exported symbols in the +- // dependencies of ph. +- reachable := b.s.view.pkgIndex.NewSet() +- for depID, succ := range n.succs { +- ph.depKeys[depID] = succ.ph.key +- reachable.Add(succ.idxID) +- trefs := b.getTransitiveRefs(succ.mp.ID) +- assert(trefs != nil, "nil trefs") +- for _, set := range trefs { +- reachable.Union(set) +- } +- } +- +- // Collect reachable nodes. +- var reachableNodes []*handleNode +- // In the presence of context cancellation, any package may be missing. +- // We need all dependencies to produce a key. +- reachable.Elems(func(id typerefs.IndexID) { +- dh := b.nodes[id] +- if dh == nil { +- // Previous code reported an error (not a bug) here. +- bug.Reportf("missing reachable node for %q", id) +- } else { +- reachableNodes = append(reachableNodes, dh) +- } +- }) +- +- // Sort for stability. +- sort.Slice(reachableNodes, func(i, j int) bool { +- return reachableNodes[i].mp.ID < reachableNodes[j].mp.ID +- }) +- +- // Key is the hash of the local key of this package, and the local key of +- // all reachable packages. +- depHasher := sha256.New() +- depHasher.Write(ph.localKey[:]) +- reachablePaths := make([]string, len(reachableNodes)) +- for i, dh := range reachableNodes { +- depHasher.Write(dh.ph.localKey[:]) +- reachablePaths[i] = string(dh.ph.mp.PkgPath) +- } +- depHasher.Sum(ph.key[:0]) +- ph.reachable = bloom.NewFilter(reachablePaths) +- ph.state = validKey +- keyChanged = ph.key != prevKey +- } +- +- assert(ph.state == validKey, "unexpected handle state") +- +- // Validate ph.pkgData, upgrading state if the package or its imports are +- // still valid. +- if ph.pkgData != nil { +- pkgData := *ph.pkgData // make a copy +- ph.pkgData = &pkgData +- ph.state = validPackage +- if keyChanged || ph.pkgData.pkg == nil { +- ph.pkgData.pkg = nil // ensure we don't hold on to stale packages +- ph.state = validImports +- } +- if depsChanged { +- ph.pkgData = nil +- ph.state = validKey +- } +- } +- +- // Postcondition: state >= validKey +- +- return nil +-} +- +-// typerefs returns typerefs for the package described by m and cgfs, after +-// either computing it or loading it from the file cache. +-func (s *Snapshot) typerefs(ctx context.Context, mp *metadata.Package, cgfs []file.Handle) (map[string][]typerefs.Symbol, error) { +- imports := make(map[ImportPath]*metadata.Package) +- for impPath, id := range mp.DepsByImpPath { +- if id != "" { +- imports[impPath] = s.Metadata(id) +- } +- } +- +- data, err := s.typerefData(ctx, mp.ID, imports, cgfs) +- if err != nil { +- return nil, err +- } +- classes := typerefs.Decode(s.view.pkgIndex, data) +- refs := make(map[string][]typerefs.Symbol) +- for _, class := range classes { +- for _, decl := range class.Decls { +- refs[decl] = class.Refs +- } +- } +- return refs, nil +-} +- +-// typerefData retrieves encoded typeref data from the filecache, or computes it on +-// a cache miss. +-func (s *Snapshot) typerefData(ctx context.Context, id PackageID, imports map[ImportPath]*metadata.Package, cgfs []file.Handle) ([]byte, error) { +- key := typerefsKey(id, imports, cgfs) +- if data, err := filecache.Get(typerefsKind, key); err == nil { +- return data, nil +- } else if err != filecache.ErrNotFound { +- bug.Reportf("internal error reading typerefs data: %v", err) +- // Unexpected error: treat as cache miss, and fall through. +- } +- +- pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), parsego.Full&^parser.ParseComments, true, cgfs...) +- if err != nil { +- return nil, err +- } +- data := typerefs.Encode(pgfs, imports) +- +- // Store the resulting data in the cache. +- go func() { +- if err := filecache.Set(typerefsKind, key, data); err != nil { +- event.Error(ctx, fmt.Sprintf("storing typerefs data for %s", id), err) +- } +- }() +- +- return data, nil +-} +- +-// typerefsKey produces a key for the reference information produced by the +-// typerefs package. +-func typerefsKey(id PackageID, imports map[ImportPath]*metadata.Package, compiledGoFiles []file.Handle) file.Hash { +- hasher := sha256.New() +- +- fmt.Fprintf(hasher, "typerefs: %s\n", id) +- +- for importPath, imp := range moremaps.Sorted(imports) { +- // TODO(rfindley): strength reduce the typerefs.Export API to guarantee +- // that it only depends on these attributes of dependencies. +- fmt.Fprintf(hasher, "import %s %s %s", importPath, imp.ID, imp.Name) +- } +- +- fmt.Fprintf(hasher, "compiledGoFiles: %d\n", len(compiledGoFiles)) +- for _, fh := range compiledGoFiles { +- fmt.Fprintln(hasher, fh.Identity()) +- } +- +- var hash [sha256.Size]byte +- hasher.Sum(hash[:0]) +- return hash +-} +- +-// typeCheckInputs contains the inputs of a call to typeCheckImpl, which +-// type-checks a package. +-// +-// Part of the purpose of this type is to keep type checking in-sync with the +-// package handle key, by explicitly identifying the inputs to type checking. +-type typeCheckInputs struct { +- id PackageID +- +- // Used for type checking: +- pkgPath PackagePath +- name PackageName +- goFiles, compiledGoFiles []file.Handle +- sizes types.Sizes +- depsByImpPath map[ImportPath]PackageID +- goVersion string // packages.Module.GoVersion, e.g. "1.18" +- +- // Used for type check diagnostics: +- // TODO(rfindley): consider storing less data in gobDiagnostics, and +- // interpreting each diagnostic in the context of a fixed set of options. +- // Then these fields need not be part of the type checking inputs. +- supportsRelatedInformation bool +- linkTarget string +- viewType ViewType +-} +- +-func (s *Snapshot) typeCheckInputs(ctx context.Context, mp *metadata.Package) (*typeCheckInputs, error) { +- // Read both lists of files of this package. +- // +- // Parallelism is not necessary here as the files will have already been +- // pre-read at load time. +- // +- // goFiles aren't presented to the type checker--nor +- // are they included in the key, unsoundly--but their +- // syntax trees are available from (*pkg).File(URI). +- // TODO(adonovan): consider parsing them on demand? +- // The need should be rare. +- goFiles, err := readFiles(ctx, s, mp.GoFiles) +- if err != nil { +- return nil, err +- } +- compiledGoFiles, err := readFiles(ctx, s, mp.CompiledGoFiles) +- if err != nil { +- return nil, err +- } +- +- goVersion := "" +- if mp.Module != nil && mp.Module.GoVersion != "" { +- goVersion = mp.Module.GoVersion +- } else { +- // Fall back on the go version implied by the ambient Go command. +- goVersion = fmt.Sprintf("1.%d", s.View().GoVersion()) +- } +- +- return &typeCheckInputs{ +- id: mp.ID, +- pkgPath: mp.PkgPath, +- name: mp.Name, +- goFiles: goFiles, +- compiledGoFiles: compiledGoFiles, +- sizes: mp.TypesSizes, +- depsByImpPath: mp.DepsByImpPath, +- goVersion: goVersion, +- +- supportsRelatedInformation: s.Options().RelatedInformationSupported, +- linkTarget: s.Options().LinkTarget, +- viewType: s.view.typ, +- }, nil +-} +- +-// readFiles reads the content of each file URL from the source +-// (e.g. snapshot or cache). +-func readFiles(ctx context.Context, fs file.Source, uris []protocol.DocumentURI) (_ []file.Handle, err error) { +- fhs := make([]file.Handle, len(uris)) +- for i, uri := range uris { +- fhs[i], err = fs.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- } +- return fhs, nil +-} +- +-// localPackageKey returns a key for local inputs into type-checking, excluding +-// dependency information: files, metadata, and configuration. +-func localPackageKey(inputs *typeCheckInputs) file.Hash { +- hasher := sha256.New() +- +- // In principle, a key must be the hash of an +- // unambiguous encoding of all the relevant data. +- // If it's ambiguous, we risk collisions. +- +- // package identifiers +- fmt.Fprintf(hasher, "package: %s %s %s\n", inputs.id, inputs.name, inputs.pkgPath) +- +- // module Go version +- fmt.Fprintf(hasher, "go %s\n", inputs.goVersion) +- +- // import map +- for impPath, depID := range moremaps.Sorted(inputs.depsByImpPath) { +- fmt.Fprintf(hasher, "import %s %s", impPath, depID) +- } +- +- // file names and contents +- fmt.Fprintf(hasher, "compiledGoFiles: %d\n", len(inputs.compiledGoFiles)) +- for _, fh := range inputs.compiledGoFiles { +- fmt.Fprintln(hasher, fh.Identity()) +- } +- fmt.Fprintf(hasher, "goFiles: %d\n", len(inputs.goFiles)) +- for _, fh := range inputs.goFiles { +- fmt.Fprintln(hasher, fh.Identity()) +- } +- +- // types sizes +- wordSize := inputs.sizes.Sizeof(types.Typ[types.Int]) +- maxAlign := inputs.sizes.Alignof(types.NewPointer(types.Typ[types.Int64])) +- fmt.Fprintf(hasher, "sizes: %d %d\n", wordSize, maxAlign) +- +- fmt.Fprintf(hasher, "relatedInformation: %t\n", inputs.supportsRelatedInformation) +- fmt.Fprintf(hasher, "linkTarget: %s\n", inputs.linkTarget) +- fmt.Fprintf(hasher, "viewType: %d\n", inputs.viewType) +- +- var hash [sha256.Size]byte +- hasher.Sum(hash[:0]) +- return hash +-} +- +-// checkPackage type checks the parsed source files in compiledGoFiles. +-// (The resulting pkg also holds the parsed but not type-checked goFiles.) +-// deps holds the future results of type-checking the direct dependencies. +-func (b *typeCheckBatch) checkPackage(ctx context.Context, fset *token.FileSet, ph *packageHandle, imports map[PackagePath]*types.Package) (*Package, error) { +- inputs := ph.localInputs +- ctx, done := event.Start(ctx, "cache.typeCheckBatch.checkPackage", label.Package.Of(string(inputs.id))) +- defer done() +- +- pkg := &syntaxPackage{ +- id: inputs.id, +- fset: fset, // must match parse call below +- types: types.NewPackage(string(inputs.pkgPath), string(inputs.name)), +- typesSizes: inputs.sizes, +- typesInfo: &types.Info{ +- Types: make(map[ast.Expr]types.TypeAndValue), +- Defs: make(map[*ast.Ident]types.Object), +- Uses: make(map[*ast.Ident]types.Object), +- Implicits: make(map[ast.Node]types.Object), +- Instances: make(map[*ast.Ident]types.Instance), +- Selections: make(map[*ast.SelectorExpr]*types.Selection), +- Scopes: make(map[ast.Node]*types.Scope), +- FileVersions: make(map[*ast.File]string), +- }, +- } +- +- // Collect parsed files from the type check pass, capturing parse errors from +- // compiled files. +- var err error +- pkg.goFiles, err = b.parseCache.parseFiles(ctx, pkg.fset, parsego.Full, false, inputs.goFiles...) +- if err != nil { +- return nil, err +- } +- pkg.compiledGoFiles, err = b.parseCache.parseFiles(ctx, pkg.fset, parsego.Full, false, inputs.compiledGoFiles...) +- if err != nil { +- return nil, err +- } +- for _, pgf := range pkg.compiledGoFiles { +- if pgf.ParseErr != nil { +- pkg.parseErrors = append(pkg.parseErrors, pgf.ParseErr) +- } +- } +- +- // Use the default type information for the unsafe package. +- if inputs.pkgPath == "unsafe" { +- // Don't type check Unsafe: it's unnecessary, and doing so exposes a data +- // race to Unsafe.completed. +- pkg.types = types.Unsafe +- } else { +- +- if len(pkg.compiledGoFiles) == 0 { +- // No files most likely means go/packages failed. +- // +- // TODO(rfindley): in the past, we would capture go list errors in this +- // case, to present go list errors to the user. However we had no tests for +- // this behavior. It is unclear if anything better can be done here. +- return nil, fmt.Errorf("no parsed files for package %s", inputs.pkgPath) +- } +- +- onError := func(e error) { +- pkg.typeErrors = append(pkg.typeErrors, e.(types.Error)) +- } +- cfg := b.typesConfig(ctx, inputs, imports, onError) +- check := types.NewChecker(cfg, pkg.fset, pkg.types, pkg.typesInfo) +- +- var files []*ast.File +- for _, cgf := range pkg.compiledGoFiles { +- files = append(files, cgf.File) +- } +- +- // Type checking is expensive, and we may not have encountered cancellations +- // via parsing (e.g. if we got nothing but cache hits for parsed files). +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- +- // Type checking errors are handled via the config, so ignore them here. +- _ = check.Files(files) // 50us-15ms, depending on size of package +- +- // If the context was cancelled, we may have returned a ton of transient +- // errors to the type checker. Swallow them. +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- +- // Collect imports by package path for the DependencyTypes API. +- pkg.importMap = make(map[PackagePath]*types.Package) +- var collectDeps func(*types.Package) +- collectDeps = func(p *types.Package) { +- pkgPath := PackagePath(p.Path()) +- if _, ok := pkg.importMap[pkgPath]; ok { +- return +- } +- pkg.importMap[pkgPath] = p +- for _, imp := range p.Imports() { +- collectDeps(imp) +- } +- } +- collectDeps(pkg.types) +- +- // Work around golang/go#61561: interface instances aren't concurrency-safe +- // as they are not completed by the type checker. +- for _, inst := range pkg.typesInfo.Instances { +- if iface, _ := inst.Type.Underlying().(*types.Interface); iface != nil { +- iface.Complete() +- } +- } +- } +- +- // Our heuristic for whether to show type checking errors is: +- // + If there is a parse error _in the current file_, suppress type +- // errors in that file. +- // + Otherwise, show type errors even in the presence of parse errors in +- // other package files. go/types attempts to suppress follow-on errors +- // due to bad syntax, so on balance type checking errors still provide +- // a decent signal/noise ratio as long as the file in question parses. +- +- // Track URIs with parse errors so that we can suppress type errors for these +- // files. +- unparsable := map[protocol.DocumentURI]bool{} +- for _, e := range pkg.parseErrors { +- diags, err := parseErrorDiagnostics(pkg, e) +- if err != nil { +- event.Error(ctx, "unable to compute positions for parse errors", err, label.Package.Of(string(inputs.id))) +- continue +- } +- for _, diag := range diags { +- unparsable[diag.URI] = true +- pkg.diagnostics = append(pkg.diagnostics, diag) +- } +- } +- +- diags := typeErrorsToDiagnostics(pkg, inputs, pkg.typeErrors) +- for _, diag := range diags { +- // If the file didn't parse cleanly, it is highly likely that type +- // checking errors will be confusing or redundant. But otherwise, type +- // checking usually provides a good enough signal to include. +- if !unparsable[diag.URI] { +- pkg.diagnostics = append(pkg.diagnostics, diag) +- } +- } +- +- return &Package{ph.mp, ph.loadDiagnostics, pkg}, nil +-} +- +-// e.g. "go1" or "go1.2" or "go1.2.3" +-var goVersionRx = regexp.MustCompile(`^go[1-9][0-9]*(?:\.(0|[1-9][0-9]*)){0,2}$`) +- +-func (b *typeCheckBatch) typesConfig(ctx context.Context, inputs *typeCheckInputs, imports map[PackagePath]*types.Package, onError func(e error)) *types.Config { +- cfg := &types.Config{ +- Sizes: inputs.sizes, +- Error: onError, +- Importer: importerFunc(func(path string) (*types.Package, error) { +- // While all of the import errors could be reported +- // based on the metadata before we start type checking, +- // reporting them via types.Importer places the errors +- // at the correct source location. +- id, ok := inputs.depsByImpPath[ImportPath(path)] +- if !ok { +- // If the import declaration is broken, +- // go list may fail to report metadata about it. +- // See TestFixImportDecl for an example. +- return nil, fmt.Errorf("missing metadata for import of %q", path) +- } +- depPH := b.getHandle(id) +- if depPH == nil { +- // e.g. missing metadata for dependencies in buildPackageHandle +- return nil, missingPkgError(inputs.id, path, inputs.viewType) +- } +- if !metadata.IsValidImport(inputs.pkgPath, depPH.mp.PkgPath, inputs.viewType != GoPackagesDriverView) { +- return nil, fmt.Errorf("invalid use of internal package %q", path) +- } +- // For syntax packages, the set of required imports is known and +- // precomputed. For import packages (checkPackageForImport), imports are +- // constructed lazily, because they may not have been needed if we could +- // have imported from export data. +- // +- // TODO(rfindley): refactor to move this logic to the callsite. +- if imports != nil { +- imp, ok := imports[depPH.mp.PkgPath] +- if !ok { +- return nil, fmt.Errorf("missing import %s", id) +- } +- return imp, nil +- } +- return b.getImportPackage(ctx, id) +- }), +- } +- +- if inputs.goVersion != "" { +- goVersion := "go" + inputs.goVersion +- if validGoVersion(goVersion) { +- cfg.GoVersion = goVersion +- } +- } +- +- // We want to type check cgo code if go/types supports it. +- // We passed typecheckCgo to go/packages when we Loaded. +- typesinternal.SetUsesCgo(cfg) +- return cfg +-} +- +-// validGoVersion reports whether goVersion is a valid Go version for go/types. +-// types.NewChecker panics if GoVersion is invalid. +-// +-// Note that, prior to go1.21, go/types required exactly two components to the +-// version number. For example, go types would panic with the Go version +-// go1.21.1. validGoVersion handles this case when built with go1.20 or earlier. +-func validGoVersion(goVersion string) bool { +- if !goVersionRx.MatchString(goVersion) { +- return false // malformed version string +- } +- +- if relVer := releaseVersion(); relVer != "" && versions.Before(versions.Lang(relVer), versions.Lang(goVersion)) { +- return false // 'go list' is too new for go/types +- } +- +- // TODO(rfindley): remove once we no longer support building gopls with Go +- // 1.20 or earlier. +- if !slices.Contains(build.Default.ReleaseTags, "go1.21") && strings.Count(goVersion, ".") >= 2 { +- return false // unsupported patch version +- } +- +- return true +-} +- +-// releaseVersion reports the Go language version used to compile gopls, or "" +-// if it cannot be determined. +-func releaseVersion() string { +- if len(build.Default.ReleaseTags) > 0 { +- v := build.Default.ReleaseTags[len(build.Default.ReleaseTags)-1] +- var dummy int +- if _, err := fmt.Sscanf(v, "go1.%d", &dummy); err == nil { +- return v +- } +- } +- return "" +-} +- +-// depsErrors creates diagnostics for each metadata error (e.g. import cycle). +-// These may be attached to import declarations in the transitive source files +-// of pkg, or to 'requires' declarations in the package's go.mod file. +-// +-// TODO(rfindley): move this to load.go +-func depsErrors(ctx context.Context, snapshot *Snapshot, mp *metadata.Package) ([]*Diagnostic, error) { +- // Select packages that can't be found, and were imported in non-workspace packages. +- // Workspace packages already show their own errors. +- var relevantErrors []*packagesinternal.PackageError +- for _, depsError := range mp.DepsErrors { +- // Up to Go 1.15, the missing package was included in the stack, which +- // was presumably a bug. We want the next one up. +- directImporterIdx := len(depsError.ImportStack) - 1 +- if directImporterIdx < 0 { +- continue +- } +- +- directImporter := depsError.ImportStack[directImporterIdx] +- if snapshot.IsWorkspacePackage(PackageID(directImporter)) { +- continue +- } +- relevantErrors = append(relevantErrors, depsError) +- } +- +- // Don't build the import index for nothing. +- if len(relevantErrors) == 0 { +- return nil, nil +- } +- +- // Subsequent checks require Go files. +- if len(mp.CompiledGoFiles) == 0 { +- return nil, nil +- } +- +- // Build an index of all imports in the package. +- type fileImport struct { +- cgf *parsego.File +- imp *ast.ImportSpec +- } +- allImports := map[string][]fileImport{} +- for _, uri := range mp.CompiledGoFiles { +- pgf, err := parseGoURI(ctx, snapshot, uri, parsego.Header) +- if err != nil { +- return nil, err +- } +- fset := tokeninternal.FileSetFor(pgf.Tok) +- // TODO(adonovan): modify Imports() to accept a single token.File (cgf.Tok). +- for _, group := range astutil.Imports(fset, pgf.File) { +- for _, imp := range group { +- if imp.Path == nil { +- continue +- } +- path := strings.Trim(imp.Path.Value, `"`) +- allImports[path] = append(allImports[path], fileImport{pgf, imp}) +- } +- } +- } +- +- // Apply a diagnostic to any import involved in the error, stopping once +- // we reach the workspace. +- var errors []*Diagnostic +- for _, depErr := range relevantErrors { +- for i := len(depErr.ImportStack) - 1; i >= 0; i-- { +- item := depErr.ImportStack[i] +- if snapshot.IsWorkspacePackage(PackageID(item)) { +- break +- } +- +- for _, imp := range allImports[item] { +- rng, err := imp.cgf.NodeRange(imp.imp) +- if err != nil { +- return nil, err +- } +- diag := &Diagnostic{ +- URI: imp.cgf.URI, +- Range: rng, +- Severity: protocol.SeverityError, +- Source: TypeError, +- Message: fmt.Sprintf("error while importing %v: %v", item, depErr.Err), +- SuggestedFixes: goGetQuickFixes(mp.Module != nil, imp.cgf.URI, item), +- } +- if !bundleLazyFixes(diag) { +- bug.Reportf("failed to bundle fixes for diagnostic %q", diag.Message) +- } +- errors = append(errors, diag) +- } +- } +- } +- +- modFile, err := findRootPattern(ctx, mp.CompiledGoFiles[0].Dir(), "go.mod", snapshot) +- if err != nil { +- return nil, err +- } +- pm, err := parseModURI(ctx, snapshot, modFile) +- if err != nil { +- return nil, err +- } +- +- // Add a diagnostic to the module that contained the lowest-level import of +- // the missing package. +- for _, depErr := range relevantErrors { +- for i := len(depErr.ImportStack) - 1; i >= 0; i-- { +- item := depErr.ImportStack[i] +- mp := snapshot.Metadata(PackageID(item)) +- if mp == nil || mp.Module == nil { +- continue +- } +- modVer := module.Version{Path: mp.Module.Path, Version: mp.Module.Version} +- reference := findModuleReference(pm.File, modVer) +- if reference == nil { +- continue +- } +- rng, err := pm.Mapper.OffsetRange(reference.Start.Byte, reference.End.Byte) +- if err != nil { +- return nil, err +- } +- diag := &Diagnostic{ +- URI: pm.URI, +- Range: rng, +- Severity: protocol.SeverityError, +- Source: TypeError, +- Message: fmt.Sprintf("error while importing %v: %v", item, depErr.Err), +- SuggestedFixes: goGetQuickFixes(true, pm.URI, item), +- } +- if !bundleLazyFixes(diag) { +- bug.Reportf("failed to bundle fixes for diagnostic %q", diag.Message) +- } +- errors = append(errors, diag) +- break +- } +- } +- return errors, nil +-} +- +-// missingPkgError returns an error message for a missing package that varies +-// based on the user's workspace mode. +-func missingPkgError(from PackageID, pkgPath string, viewType ViewType) error { +- switch viewType { +- case GoModView, GoWorkView: +- if metadata.IsCommandLineArguments(from) { +- return fmt.Errorf("current file is not included in a workspace module") +- } else { +- // Previously, we would present the initialization error here. +- return fmt.Errorf("no required module provides package %q", pkgPath) +- } +- case AdHocView: +- return fmt.Errorf("cannot find package %q in GOROOT", pkgPath) +- case GoPackagesDriverView: +- return fmt.Errorf("go/packages driver could not load %q", pkgPath) +- case GOPATHView: +- return fmt.Errorf("cannot find package %q in GOROOT or GOPATH", pkgPath) +- default: +- return fmt.Errorf("unable to load package") +- } +-} +- +-// typeErrorsToDiagnostics translates a slice of types.Errors into a slice of +-// Diagnostics. +-// +-// In addition to simply mapping data such as position information and error +-// codes, this function interprets related go/types "continuation" errors as +-// protocol.DiagnosticRelatedInformation. Continuation errors are go/types +-// errors whose messages starts with "\t". By convention, these errors relate +-// to the previous error in the errs slice (such as if they were printed in +-// sequence to a terminal). +-// +-// Fields in typeCheckInputs may affect the resulting diagnostics. +-func typeErrorsToDiagnostics(pkg *syntaxPackage, inputs *typeCheckInputs, errs []types.Error) []*Diagnostic { +- var result []*Diagnostic +- +- // batch records diagnostics for a set of related types.Errors. +- // (related[0] is the primary error.) +- batch := func(related []types.Error) { +- var diags []*Diagnostic +- for i, e := range related { +- code, start, end, ok := typesinternal.ErrorCodeStartEnd(e) +- if !ok || !start.IsValid() || !end.IsValid() { +- start, end = e.Pos, e.Pos +- code = 0 +- } +- if !start.IsValid() { +- // Type checker errors may be missing position information if they +- // relate to synthetic syntax, such as if the file were fixed. In that +- // case, we should have a parse error anyway, so skipping the type +- // checker error is likely benign. +- // +- // TODO(golang/go#64335): we should eventually verify that all type +- // checked syntax has valid positions, and promote this skip to a bug +- // report. +- continue +- } +- +- // Invariant: both start and end are IsValid. +- if !end.IsValid() { +- panic("end is invalid") +- } +- +- posn := safetoken.StartPosition(e.Fset, start) +- if !posn.IsValid() { +- // All valid positions produced by the type checker should described by +- // its fileset, yet since type checker errors are associated with +- // positions in the AST, and AST nodes can overflow the file +- // (golang/go#48300), we can't rely on this. +- // +- // We should fix the parser, but in the meantime type errors are not +- // significant if there are parse errors, so we can safely ignore this +- // case. +- if len(pkg.parseErrors) == 0 { +- bug.Reportf("internal error: type checker error %q outside its Fset", e) +- } +- continue +- } +- pgf, err := pkg.File(protocol.URIFromPath(posn.Filename)) +- if err != nil { +- // Sometimes type-checker errors refer to positions in other packages, +- // such as when a declaration duplicates a dot-imported name. +- // +- // In these cases, we don't want to report an error in the other +- // package (the message would be rather confusing), but we do want to +- // report an error in the current package (golang/go#59005). +- if i == 0 { +- if pkg.hasFixedFiles() { +- bug.Reportf("internal error: could not locate file for primary type checker error %v: %v (fixed files)", e, err) +- } else { +- bug.Reportf("internal error: could not locate file for primary type checker error %v: %v", e, err) +- } +- } +- continue +- } +- +- // debugging golang/go#65960 +- // +- // At this point, we know 'start' IsValid, and +- // StartPosition(start) worked (with e.Fset). +- // +- // If the asserted condition is true, 'start' +- // is also in range for pgf.Tok, which means +- // the PosRange failure must be caused by 'end'. +- if pgf.Tok != e.Fset.File(start) { +- if pkg.hasFixedFiles() { +- bug.Reportf("internal error: inconsistent token.Files for pos (fixed files)") +- } else { +- bug.Reportf("internal error: inconsistent token.Files for pos") +- } +- } +- +- if end == start { +- // Expand the end position to a more meaningful span. +- // +- // TODO(adonovan): It is the type checker's responsibility +- // to ensure that (start, end) are meaningful; see #71803. +- end = typeErrorEndPos(pgf.Tok, pgf.Src, start) +- +- // debugging golang/go#65960 +- if _, err := safetoken.Offset(pgf.Tok, end); err != nil { +- if pkg.hasFixedFiles() { +- bug.Reportf("TypeErrorEndPos returned invalid end: %v (fixed files)", err) +- } else { +- bug.Reportf("TypeErrorEndPos returned invalid end: %v", err) +- } +- } +- } else { +- // TODO(adonovan): check File(start)==File(end). +- +- // debugging golang/go#65960 +- if _, err := safetoken.Offset(pgf.Tok, end); err != nil { +- if pkg.hasFixedFiles() { +- bug.Reportf("ErrorCodeStartEnd returned invalid end: %v (fixed files)", err) +- } else { +- bug.Reportf("ErrorCodeStartEnd returned invalid end: %v", err) +- } +- } +- } +- +- rng, err := pgf.Mapper.PosRange(pgf.Tok, start, end) +- if err != nil { +- bug.Reportf("internal error: could not compute pos to range for %v: %v", e, err) +- continue +- } +- msg := related[0].Msg // primary +- if i > 0 { +- if inputs.supportsRelatedInformation { +- msg += " (see details)" +- } else { +- msg += fmt.Sprintf(" (this error: %v)", e.Msg) +- } +- } +- diag := &Diagnostic{ +- URI: pgf.URI, +- Range: rng, +- Severity: protocol.SeverityError, +- Source: TypeError, +- Message: msg, +- } +- if code != 0 { +- diag.Code = code.String() +- diag.CodeHref = typesCodeHref(inputs.linkTarget, code) +- } +- if code == typesinternal.UnusedVar || code == typesinternal.UnusedImport { +- diag.Tags = append(diag.Tags, protocol.Unnecessary) +- } +- if match := importErrorRe.FindStringSubmatch(e.Msg); match != nil { +- diag.SuggestedFixes = append(diag.SuggestedFixes, goGetQuickFixes(inputs.viewType.usesModules(), pgf.URI, match[1])...) +- } +- if match := unsupportedFeatureRe.FindStringSubmatch(e.Msg); match != nil { +- diag.SuggestedFixes = append(diag.SuggestedFixes, editGoDirectiveQuickFix(inputs.viewType.usesModules(), pgf.URI, match[1])...) +- } +- +- // Link up related information. For the primary error, all related errors +- // are treated as related information. For secondary errors, only the +- // primary is related. +- // +- // This is because go/types assumes that errors are read top-down, such as +- // in the cycle error "A refers to...". The structure of the secondary +- // error set likely only makes sense for the primary error. +- // +- // NOTE: len(diags) == 0 if the primary diagnostic has invalid positions. +- // See also golang/go#66731. +- if i > 0 && len(diags) > 0 { +- primary := diags[0] +- primary.Related = append(primary.Related, protocol.DiagnosticRelatedInformation{ +- Location: diag.URI.Location(diag.Range), +- Message: related[i].Msg, // use the unmodified secondary error for related errors. +- }) +- diag.Related = []protocol.DiagnosticRelatedInformation{{ +- Location: primary.URI.Location(primary.Range), +- }} +- } +- diags = append(diags, diag) +- } +- result = append(result, diags...) +- } +- +- // Process batches of related errors. +- for len(errs) > 0 { +- related := []types.Error{errs[0]} +- for i := 1; i < len(errs); i++ { +- spl := errs[i] +- if len(spl.Msg) == 0 || spl.Msg[0] != '\t' { +- break +- } +- spl.Msg = spl.Msg[len("\t"):] +- related = append(related, spl) +- } +- batch(related) +- errs = errs[len(related):] +- } +- +- return result +-} +- +-// This heuristic is ill-defined. +-func typeErrorEndPos(tokFile *token.File, src []byte, start token.Pos) token.Pos { +- // Get the end position for the type error. +- offset, err := safetoken.Offset(tokFile, start) +- if err != nil || offset > len(src) { +- return start +- } +- src = src[offset:] +- +- // Attempt to find a reasonable end position for the type error. +- // +- // TODO(rfindley): the heuristic implemented here is unclear. It looks like +- // it seeks the end of the primary operand starting at start, but that is not +- // quite implemented (for example, given a func literal this heuristic will +- // return the range of the func keyword). +- // +- // We should formalize this heuristic, or deprecate it by finally proposing +- // to add end position to all type checker errors. +- // +- // Nevertheless, ensure that the end position at least spans the current +- // token at the cursor (this was golang/go#69505). +- end := start +- { +- var s scanner.Scanner +- fset := token.NewFileSet() +- f := fset.AddFile("", fset.Base(), len(src)) +- s.Init(f, src, nil /* no error handler */, scanner.ScanComments) +- pos, tok, lit := s.Scan() +- if tok != token.SEMICOLON { +- if off, err := safetoken.Offset(f, pos); err == nil { +- off += len(lit) +- src = src[off:] +- end += token.Pos(off) +- } +- } +- } +- +- // Look for bytes that might terminate the current operand. See note above: +- // this is imprecise. +- if width := bytes.IndexAny(src, " \n,():;[]+-*/"); width > 0 { +- end += token.Pos(width) +- } +- return end +-} +- +-// An importFunc is an implementation of the single-method +-// types.Importer interface based on a function value. +-type importerFunc func(path string) (*types.Package, error) +- +-func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } +diff -urN a/gopls/internal/cache/constraints.go b/gopls/internal/cache/constraints.go +--- a/gopls/internal/cache/constraints.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/constraints.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,60 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "go/ast" +- "go/build/constraint" +- "go/parser" +- "go/token" +- "slices" +-) +- +-// isStandaloneFile reports whether a file with the given contents should be +-// considered a 'standalone main file', meaning a package that consists of only +-// a single file. +-func isStandaloneFile(src []byte, standaloneTags []string) bool { +- f, err := parser.ParseFile(token.NewFileSet(), "", src, parser.PackageClauseOnly|parser.ParseComments) +- if err != nil { +- return false +- } +- +- if f.Name == nil || f.Name.Name != "main" { +- return false +- } +- +- found := false +- walkConstraints(f, func(c constraint.Expr) bool { +- if tag, ok := c.(*constraint.TagExpr); ok { +- if slices.Contains(standaloneTags, tag.Tag) { +- found = true +- return false +- } +- } +- return true +- }) +- +- return found +-} +- +-// walkConstraints calls f for each constraint expression in the file, until +-// all constraints are exhausted or f returns false. +-func walkConstraints(file *ast.File, f func(constraint.Expr) bool) { +- for _, cg := range file.Comments { +- // Even with PackageClauseOnly the parser consumes the semicolon following +- // the package clause, so we must guard against comments that come after +- // the package name. +- if cg.Pos() > file.Name.Pos() { +- continue +- } +- for _, comment := range cg.List { +- if c, err := constraint.Parse(comment.Text); err == nil { +- if !f(c) { +- return +- } +- } +- } +- } +-} +diff -urN a/gopls/internal/cache/constraints_test.go b/gopls/internal/cache/constraints_test.go +--- a/gopls/internal/cache/constraints_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/constraints_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,126 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build go1.16 +-// +build go1.16 +- +-package cache +- +-import ( +- "testing" +-) +- +-func TestIsStandaloneFile(t *testing.T) { +- tests := []struct { +- desc string +- contents string +- standaloneTags []string +- want bool +- }{ +- { +- "new syntax", +- "//go:build ignore\n\npackage main\n", +- []string{"ignore"}, +- true, +- }, +- { +- "legacy syntax", +- "// +build ignore\n\npackage main\n", +- []string{"ignore"}, +- true, +- }, +- { +- "multiple tags", +- "//go:build ignore\n\npackage main\n", +- []string{"exclude", "ignore"}, +- true, +- }, +- { +- "invalid tag", +- "// +build ignore\n\npackage main\n", +- []string{"script"}, +- false, +- }, +- { +- "non-main package", +- "//go:build ignore\n\npackage p\n", +- []string{"ignore"}, +- false, +- }, +- { +- "alternate tag", +- "// +build script\n\npackage main\n", +- []string{"script"}, +- true, +- }, +- { +- "both syntax", +- "//go:build ignore\n// +build ignore\n\npackage main\n", +- []string{"ignore"}, +- true, +- }, +- { +- "after comments", +- "// A non-directive comment\n//go:build ignore\n\npackage main\n", +- []string{"ignore"}, +- true, +- }, +- { +- "after package decl", +- "package main //go:build ignore\n", +- []string{"ignore"}, +- false, +- }, +- { +- "on line after package decl", +- "package main\n\n//go:build ignore\n", +- []string{"ignore"}, +- false, +- }, +- { +- "combined with other expressions", +- "\n\n//go:build ignore || darwin\n\npackage main\n", +- []string{"ignore"}, +- false, +- }, +- } +- +- for _, test := range tests { +- t.Run(test.desc, func(t *testing.T) { +- if got := isStandaloneFile([]byte(test.contents), test.standaloneTags); got != test.want { +- t.Errorf("isStandaloneFile(%q, %v) = %t, want %t", test.contents, test.standaloneTags, got, test.want) +- } +- }) +- } +-} +- +-func TestVersionRegexp(t *testing.T) { +- // good +- for _, s := range []string{ +- "go1", +- "go1.2", +- "go1.2.3", +- "go1.0.33", +- } { +- if !goVersionRx.MatchString(s) { +- t.Errorf("Valid Go version %q does not match the regexp", s) +- } +- } +- +- // bad +- for _, s := range []string{ +- "go", // missing numbers +- "go0", // Go starts at 1 +- "go01", // leading zero +- "go1.π", // non-decimal +- "go1.-1", // negative +- "go1.02.3", // leading zero +- "go1.2.3.4", // too many segments +- "go1.2.3-pre", // textual suffix +- } { +- if goVersionRx.MatchString(s) { +- t.Errorf("Invalid Go version %q unexpectedly matches the regexp", s) +- } +- } +-} +diff -urN a/gopls/internal/cache/debug.go b/gopls/internal/cache/debug.go +--- a/gopls/internal/cache/debug.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/debug.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,12 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-// assert panics with the given msg if cond is not true. +-func assert(cond bool, msg string) { +- if !cond { +- panic(msg) +- } +-} +diff -urN a/gopls/internal/cache/diagnostics.go b/gopls/internal/cache/diagnostics.go +--- a/gopls/internal/cache/diagnostics.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/diagnostics.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,247 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "crypto/sha256" +- "encoding/json" +- "fmt" +- "strings" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +-) +- +-// A InitializationError is an error that causes snapshot initialization to fail. +-// It is either the error returned from go/packages.Load, or an error parsing a +-// workspace go.work or go.mod file. +-// +-// Such an error generally indicates that the View is malformed, and will never +-// be usable. +-type InitializationError struct { +- // MainError is the primary error. Must be non-nil. +- MainError error +- +- // Diagnostics contains any supplemental (structured) diagnostics extracted +- // from the load error. +- Diagnostics map[protocol.DocumentURI][]*Diagnostic +-} +- +-func byURI(d *Diagnostic) protocol.DocumentURI { return d.URI } // For use in maps.Group. +- +-// A Diagnostic corresponds to an LSP Diagnostic. +-// https://microsoft.github.io/language-server-protocol/specification#diagnostic +-// +-// It is (effectively) gob-serializable; see {encode,decode}Diagnostics. +-type Diagnostic struct { +- URI protocol.DocumentURI // of diagnosed file (not diagnostic documentation) +- Range protocol.Range +- Severity protocol.DiagnosticSeverity +- Code string // analysis.Diagnostic.Category (or "default" if empty) or hidden go/types error code +- CodeHref string +- +- // Source is a human-readable description of the source of the error. +- // Diagnostics generated by an analysis.Analyzer set it to Analyzer.Name. +- Source DiagnosticSource +- +- Message string +- +- Tags []protocol.DiagnosticTag +- Related []protocol.DiagnosticRelatedInformation +- +- // Fields below are used internally to generate lazy fixes. They aren't +- // part of the LSP spec and historically didn't leave the server. +- // +- // Update(2023-05): version 3.16 of the LSP spec included support for the +- // Diagnostic.data field, which holds arbitrary data preserved in the +- // diagnostic for codeAction requests. This field allows bundling additional +- // information for lazy fixes, and gopls can (and should) use this +- // information to avoid re-evaluating diagnostics in code-action handlers. +- // +- // In order to stage this transition incrementally, the 'BundledFixes' field +- // may store a 'bundled' (=json-serialized) form of the associated +- // SuggestedFixes. Not all diagnostics have their fixes bundled. +- BundledFixes *json.RawMessage +- SuggestedFixes []SuggestedFix +-} +- +-func (d *Diagnostic) String() string { +- return fmt.Sprintf("%v: %s", d.Range, d.Message) +-} +- +-// Hash computes a hash to identify the diagnostic. +-// The hash is for deduplicating within a file, so does not incorporate d.URI. +-func (d *Diagnostic) Hash() file.Hash { +- h := sha256.New() +- for _, t := range d.Tags { +- fmt.Fprintf(h, "tag: %s\n", t) +- } +- for _, r := range d.Related { +- fmt.Fprintf(h, "related: %s %s %s\n", r.Location.URI, r.Message, r.Location.Range) +- } +- fmt.Fprintf(h, "code: %s\n", d.Code) +- fmt.Fprintf(h, "codeHref: %s\n", d.CodeHref) +- fmt.Fprintf(h, "message: %s\n", d.Message) +- fmt.Fprintf(h, "range: %s\n", d.Range) +- fmt.Fprintf(h, "severity: %s\n", d.Severity) +- fmt.Fprintf(h, "source: %s\n", d.Source) +- if d.BundledFixes != nil { +- fmt.Fprintf(h, "fixes: %s\n", *d.BundledFixes) +- } +- var hash [sha256.Size]byte +- h.Sum(hash[:0]) +- return hash +-} +- +-func ToProtocolDiagnostics(diagnostics ...*Diagnostic) []protocol.Diagnostic { +- // TODO(rfindley): support bundling edits, and bundle all suggested fixes here. +- // (see cache.bundleLazyFixes). +- +- reports := []protocol.Diagnostic{} +- for _, diag := range diagnostics { +- pdiag := protocol.Diagnostic{ +- // diag.Message might start with \n or \t +- Message: strings.TrimSpace(diag.Message), +- Range: diag.Range, +- Severity: diag.Severity, +- Source: string(diag.Source), +- Tags: protocol.NonNilSlice(diag.Tags), +- RelatedInformation: diag.Related, +- Data: diag.BundledFixes, +- } +- if diag.Code != "" { +- pdiag.Code = diag.Code +- } +- if diag.CodeHref != "" { +- pdiag.CodeDescription = &protocol.CodeDescription{Href: diag.CodeHref} +- } +- reports = append(reports, pdiag) +- } +- return reports +-} +- +-// A DiagnosticSource identifies the source of a diagnostic. +-// +-// Its value may be one of the distinguished string values below, or +-// the Name of an [analysis.Analyzer]. +-type DiagnosticSource string +- +-const ( +- UnknownError DiagnosticSource = "" +- ListError DiagnosticSource = "go list" +- ParseError DiagnosticSource = "syntax" +- TypeError DiagnosticSource = "compiler" +- ModTidyError DiagnosticSource = "go mod tidy" +- CompilerOptDetailsInfo DiagnosticSource = "optimizer details" // cmd/compile -json=0,dir +- UpgradeNotification DiagnosticSource = "upgrade available" +- Vulncheck DiagnosticSource = "vulncheck imports" +- Govulncheck DiagnosticSource = "govulncheck" +- TemplateError DiagnosticSource = "template" +- WorkFileError DiagnosticSource = "go.work file" +-) +- +-// A SuggestedFix represents a suggested fix (for a diagnostic) +-// produced by analysis, in protocol form. +-// +-// The fixes are reported to the client as a set of code actions in +-// response to a CodeAction query for a set of diagnostics. Multiple +-// SuggestedFixes may be produced for the same logical fix, varying +-// only in ActionKind. For example, a fix may be both a Refactor +-// (which should appear on the refactoring menu) and a SourceFixAll (a +-// clear fix that can be safely applied without explicit consent). +-type SuggestedFix struct { +- Title string +- Edits map[protocol.DocumentURI][]protocol.TextEdit +- Command *protocol.Command +- ActionKind protocol.CodeActionKind +-} +- +-// SuggestedFixFromCommand returns a suggested fix to run the given command. +-func SuggestedFixFromCommand(cmd *protocol.Command, kind protocol.CodeActionKind) SuggestedFix { +- return SuggestedFix{ +- Title: cmd.Title, +- Command: cmd, +- ActionKind: kind, +- } +-} +- +-// lazyFixesJSON is a JSON-serializable list of code actions (arising +-// from "lazy" SuggestedFixes with no Edits) to be saved in the +-// protocol.Diagnostic.Data field. Computation of the edits is thus +-// deferred until the action's command is invoked. +-type lazyFixesJSON struct { +- // TODO(rfindley): pack some sort of identifier here for later +- // lookup/validation? +- Actions []protocol.CodeAction +-} +- +-// bundleLazyFixes attempts to bundle sd.SuggestedFixes into the +-// sd.BundledFixes field, so that it can be round-tripped through the client. +-// It returns false if the fixes cannot be bundled. +-func bundleLazyFixes(sd *Diagnostic) bool { +- if len(sd.SuggestedFixes) == 0 { +- return true +- } +- var actions []protocol.CodeAction +- for _, fix := range sd.SuggestedFixes { +- if fix.Edits != nil { +- // For now, we only support bundled code actions that execute commands. +- // +- // In order to cleanly support bundled edits, we'd have to guarantee that +- // the edits were generated on the current snapshot. But this naively +- // implies that every fix would have to include a snapshot ID, which +- // would require us to republish all diagnostics on each new snapshot. +- // +- // TODO(rfindley): in order to avoid this additional chatter, we'd need +- // to build some sort of registry or other mechanism on the snapshot to +- // check whether a diagnostic is still valid. +- return false +- } +- action := protocol.CodeAction{ +- Title: fix.Title, +- Kind: fix.ActionKind, +- Command: fix.Command, +- } +- actions = append(actions, action) +- } +- fixes := lazyFixesJSON{ +- Actions: actions, +- } +- data, err := json.Marshal(fixes) +- if err != nil { +- bug.Reportf("marshalling lazy fixes: %v", err) +- return false +- } +- msg := json.RawMessage(data) +- sd.BundledFixes = &msg +- return true +-} +- +-// BundledLazyFixes extracts any bundled codeActions from the +-// diag.Data field. +-func BundledLazyFixes(diag protocol.Diagnostic) ([]protocol.CodeAction, error) { +- var fix lazyFixesJSON +- if diag.Data != nil { +- err := protocol.UnmarshalJSON(*diag.Data, &fix) +- if err != nil { +- return nil, fmt.Errorf("unmarshalling fix from diagnostic data: %v", err) +- } +- } +- +- var actions []protocol.CodeAction +- for _, action := range fix.Actions { +- // See bundleLazyFixes: for now we only support bundling commands. +- if action.Edit != nil { +- return nil, fmt.Errorf("bundled fix %q includes workspace edits", action.Title) +- } +- // associate the action with the incoming diagnostic +- // (Note that this does not mutate the fix.Fixes slice). +- action.Diagnostics = []protocol.Diagnostic{diag} +- actions = append(actions, action) +- } +- +- return actions, nil +-} +diff -urN a/gopls/internal/cache/errors.go b/gopls/internal/cache/errors.go +--- a/gopls/internal/cache/errors.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/errors.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,494 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-// This file defines routines to convert diagnostics from go list, go +-// get, go/packages, parsing, type checking, and analysis into +-// golang.Diagnostic form, and suggesting quick fixes. +- +-import ( +- "context" +- "fmt" +- "go/parser" +- "go/scanner" +- "go/token" +- "path/filepath" +- "regexp" +- "strconv" +- "strings" +- +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// goPackagesErrorDiagnostics translates the given go/packages Error into a +-// diagnostic, using the provided metadata and filesource. +-// +-// The slice of diagnostics may be empty. +-func goPackagesErrorDiagnostics(ctx context.Context, e packages.Error, mp *metadata.Package, fs file.Source) ([]*Diagnostic, error) { +- if diag, err := parseGoListImportCycleError(ctx, e, mp, fs); err != nil { +- return nil, err +- } else if diag != nil { +- return []*Diagnostic{diag}, nil +- } +- +- // Parse error location and attempt to convert to protocol form. +- loc, err := func() (protocol.Location, error) { +- filename, line, col8 := parseGoListError(e, mp.LoadDir) +- uri := protocol.URIFromPath(filename) +- +- fh, err := fs.ReadFile(ctx, uri) +- if err != nil { +- return protocol.Location{}, err +- } +- content, err := fh.Content() +- if err != nil { +- return protocol.Location{}, err +- } +- mapper := protocol.NewMapper(uri, content) +- posn, err := mapper.LineCol8Position(line, col8) +- if err != nil { +- return protocol.Location{}, err +- } +- return protocol.Location{ +- URI: uri, +- Range: protocol.Range{ +- Start: posn, +- End: posn, +- }, +- }, nil +- }() +- +- // TODO(rfindley): in some cases the go command outputs invalid spans, for +- // example (from TestGoListErrors): +- // +- // package a +- // import +- // +- // In this case, the go command will complain about a.go:2:8, which is after +- // the trailing newline but still considered to be on the second line, most +- // likely because *token.File lacks information about newline termination. +- // +- // We could do better here by handling that case. +- if err != nil { +- // Unable to parse a valid position. +- // Apply the error to all files to be safe. +- var diags []*Diagnostic +- for _, uri := range mp.CompiledGoFiles { +- diags = append(diags, &Diagnostic{ +- URI: uri, +- Severity: protocol.SeverityError, +- Source: ListError, +- Message: e.Msg, +- }) +- } +- return diags, nil +- } +- return []*Diagnostic{{ +- URI: loc.URI, +- Range: loc.Range, +- Severity: protocol.SeverityError, +- Source: ListError, +- Message: e.Msg, +- }}, nil +-} +- +-func parseErrorDiagnostics(pkg *syntaxPackage, errList scanner.ErrorList) ([]*Diagnostic, error) { +- // The first parser error is likely the root cause of the problem. +- if errList.Len() <= 0 { +- return nil, fmt.Errorf("no errors in %v", errList) +- } +- e := errList[0] +- pgf, err := pkg.File(protocol.URIFromPath(e.Pos.Filename)) +- if err != nil { +- return nil, err +- } +- rng, err := pgf.Mapper.OffsetRange(e.Pos.Offset, e.Pos.Offset) +- if err != nil { +- return nil, err +- } +- return []*Diagnostic{{ +- URI: pgf.URI, +- Range: rng, +- Severity: protocol.SeverityError, +- Source: ParseError, +- Message: e.Msg, +- }}, nil +-} +- +-var importErrorRe = regexp.MustCompile(`could not import ([^\s]+)`) +-var unsupportedFeatureRe = regexp.MustCompile(`.*require.* go(\d+\.\d+) or later`) +- +-func goGetQuickFixes(haveModule bool, uri protocol.DocumentURI, pkg string) []SuggestedFix { +- // Go get only supports module mode for now. +- if !haveModule { +- return nil +- } +- title := fmt.Sprintf("go get package %v", pkg) +- cmd := command.NewGoGetPackageCommand(title, command.GoGetPackageArgs{ +- URI: uri, +- AddRequire: true, +- Pkg: pkg, +- }) +- return []SuggestedFix{SuggestedFixFromCommand(cmd, protocol.QuickFix)} +-} +- +-func editGoDirectiveQuickFix(haveModule bool, uri protocol.DocumentURI, version string) []SuggestedFix { +- // Go mod edit only supports module mode. +- if !haveModule { +- return nil +- } +- title := fmt.Sprintf("go mod edit -go=%s", version) +- cmd := command.NewEditGoDirectiveCommand(title, command.EditGoDirectiveArgs{ +- URI: uri, +- Version: version, +- }) +- return []SuggestedFix{SuggestedFixFromCommand(cmd, protocol.QuickFix)} +-} +- +-// encodeDiagnostics gob-encodes the given diagnostics. +-func encodeDiagnostics(srcDiags []*Diagnostic) []byte { +- var gobDiags []gobDiagnostic +- for _, srcDiag := range srcDiags { +- var gobFixes []gobSuggestedFix +- for _, srcFix := range srcDiag.SuggestedFixes { +- gobFix := gobSuggestedFix{ +- Message: srcFix.Title, +- ActionKind: srcFix.ActionKind, +- } +- for uri, srcEdits := range srcFix.Edits { +- for _, srcEdit := range srcEdits { +- gobFix.TextEdits = append(gobFix.TextEdits, gobTextEdit{ +- Location: uri.Location(srcEdit.Range), +- NewText: []byte(srcEdit.NewText), +- }) +- } +- } +- if srcCmd := srcFix.Command; srcCmd != nil { +- gobFix.Command = &gobCommand{ +- Title: srcCmd.Title, +- Command: srcCmd.Command, +- Arguments: srcCmd.Arguments, +- } +- } +- gobFixes = append(gobFixes, gobFix) +- } +- var gobRelated []gobRelatedInformation +- for _, srcRel := range srcDiag.Related { +- gobRel := gobRelatedInformation(srcRel) +- gobRelated = append(gobRelated, gobRel) +- } +- gobDiag := gobDiagnostic{ +- Location: srcDiag.URI.Location(srcDiag.Range), +- Severity: srcDiag.Severity, +- Code: srcDiag.Code, +- CodeHref: srcDiag.CodeHref, +- Source: string(srcDiag.Source), +- Message: srcDiag.Message, +- SuggestedFixes: gobFixes, +- Related: gobRelated, +- Tags: srcDiag.Tags, +- } +- gobDiags = append(gobDiags, gobDiag) +- } +- return diagnosticsCodec.Encode(gobDiags) +-} +- +-// decodeDiagnostics decodes the given gob-encoded diagnostics. +-func decodeDiagnostics(data []byte) []*Diagnostic { +- var gobDiags []gobDiagnostic +- diagnosticsCodec.Decode(data, &gobDiags) +- var srcDiags []*Diagnostic +- for _, gobDiag := range gobDiags { +- var srcFixes []SuggestedFix +- for _, gobFix := range gobDiag.SuggestedFixes { +- srcFix := SuggestedFix{ +- Title: gobFix.Message, +- ActionKind: gobFix.ActionKind, +- } +- for _, gobEdit := range gobFix.TextEdits { +- if srcFix.Edits == nil { +- srcFix.Edits = make(map[protocol.DocumentURI][]protocol.TextEdit) +- } +- srcEdit := protocol.TextEdit{ +- Range: gobEdit.Location.Range, +- NewText: string(gobEdit.NewText), +- } +- uri := gobEdit.Location.URI +- srcFix.Edits[uri] = append(srcFix.Edits[uri], srcEdit) +- } +- if gobCmd := gobFix.Command; gobCmd != nil { +- srcFix.Command = &protocol.Command{ +- Title: gobCmd.Title, +- Command: gobCmd.Command, +- Arguments: gobCmd.Arguments, +- } +- } +- srcFixes = append(srcFixes, srcFix) +- } +- var srcRelated []protocol.DiagnosticRelatedInformation +- for _, gobRel := range gobDiag.Related { +- srcRel := protocol.DiagnosticRelatedInformation(gobRel) +- srcRelated = append(srcRelated, srcRel) +- } +- srcDiag := &Diagnostic{ +- URI: gobDiag.Location.URI, +- Range: gobDiag.Location.Range, +- Severity: gobDiag.Severity, +- Code: gobDiag.Code, +- CodeHref: gobDiag.CodeHref, +- Source: DiagnosticSource(gobDiag.Source), +- Message: gobDiag.Message, +- Tags: gobDiag.Tags, +- Related: srcRelated, +- SuggestedFixes: srcFixes, +- } +- srcDiags = append(srcDiags, srcDiag) +- } +- return srcDiags +-} +- +-// toSourceDiagnostic converts a gobDiagnostic to "source" form. +-func toSourceDiagnostic(srcAnalyzer *settings.Analyzer, gobDiag *gobDiagnostic) *Diagnostic { +- var related []protocol.DiagnosticRelatedInformation +- for _, gobRelated := range gobDiag.Related { +- related = append(related, protocol.DiagnosticRelatedInformation(gobRelated)) +- } +- +- diag := &Diagnostic{ +- URI: gobDiag.Location.URI, +- Range: gobDiag.Location.Range, +- Severity: srcAnalyzer.Severity(), +- Code: gobDiag.Code, +- CodeHref: gobDiag.CodeHref, +- Source: DiagnosticSource(gobDiag.Source), +- Message: gobDiag.Message, +- Related: related, +- Tags: srcAnalyzer.Tags(), +- } +- +- // We cross the set of fixes (whether edit- or command-based) +- // with the set of kinds, as a single fix may represent more +- // than one kind of action (e.g. refactor, quickfix, fixall), +- // each corresponding to a distinct client UI element +- // or operation. +- kinds := srcAnalyzer.ActionKinds() +- if len(kinds) == 0 { +- kinds = []protocol.CodeActionKind{protocol.QuickFix} +- } +- +- var fixes []SuggestedFix +- for _, fix := range gobDiag.SuggestedFixes { +- if len(fix.TextEdits) > 0 { +- // Accumulate edit-based fixes supplied by the diagnostic itself. +- edits := make(map[protocol.DocumentURI][]protocol.TextEdit) +- for _, e := range fix.TextEdits { +- uri := e.Location.URI +- edits[uri] = append(edits[uri], protocol.TextEdit{ +- Range: e.Location.Range, +- NewText: string(e.NewText), +- }) +- } +- for _, kind := range kinds { +- fixes = append(fixes, SuggestedFix{ +- Title: fix.Message, +- Edits: edits, +- ActionKind: kind, +- }) +- } +- +- } else { +- // Accumulate command-based fixes, whose edits +- // are not provided by the analyzer but are computed on demand +- // by logic "adjacent to" the analyzer. +- // +- // The analysis.Diagnostic.Category is used as the fix name. +- cmd := command.NewApplyFixCommand(fix.Message, command.ApplyFixArgs{ +- Fix: diag.Code, +- Location: gobDiag.Location, +- }) +- for _, kind := range kinds { +- fixes = append(fixes, SuggestedFixFromCommand(cmd, kind)) +- } +- +- // Ensure that the analyzer specifies a category for all its no-edit fixes. +- // This is asserted by analysistest.RunWithSuggestedFixes, but there +- // may be gaps in test coverage. +- if diag.Code == "" || diag.Code == "default" { +- bug.Reportf("missing Diagnostic.Code: %#v", *diag) +- } +- } +- } +- diag.SuggestedFixes = fixes +- +- // If the fixes only delete code, assume that the diagnostic is reporting dead code. +- if onlyDeletions(diag.SuggestedFixes) { +- diag.Tags = append(diag.Tags, protocol.Unnecessary) +- } +- return diag +-} +- +-// onlyDeletions returns true if fixes is non-empty and all of the suggested +-// fixes are deletions. +-func onlyDeletions(fixes []SuggestedFix) bool { +- for _, fix := range fixes { +- if fix.Command != nil { +- return false +- } +- for _, edits := range fix.Edits { +- for _, edit := range edits { +- if edit.NewText != "" { +- return false +- } +- if protocol.ComparePosition(edit.Range.Start, edit.Range.End) == 0 { +- return false +- } +- } +- } +- } +- return len(fixes) > 0 +-} +- +-func typesCodeHref(linkTarget string, code typesinternal.ErrorCode) string { +- return BuildLink(linkTarget, "golang.org/x/tools/internal/typesinternal", code.String()) +-} +- +-// BuildLink constructs a URL with the given target, path, and anchor. +-func BuildLink(target, path, anchor string) protocol.URI { +- link := fmt.Sprintf("https://%s/%s", target, path) +- if anchor == "" { +- return link +- } +- return link + "#" + anchor +-} +- +-func parseGoListError(e packages.Error, dir string) (filename string, line, col8 int) { +- input := e.Pos +- if input == "" { +- // No position. Attempt to parse one out of a +- // go list error of the form "file:line:col: +- // message" by stripping off the message. +- input = strings.TrimSpace(e.Msg) +- if i := strings.Index(input, ": "); i >= 0 { +- input = input[:i] +- } +- } +- +- filename, line, col8 = splitFileLineCol(input) +- if !filepath.IsAbs(filename) { +- filename = filepath.Join(dir, filename) +- } +- return filename, line, col8 +-} +- +-// splitFileLineCol splits s into "filename:line:col", +-// where line and col consist of decimal digits. +-func splitFileLineCol(s string) (file string, line, col8 int) { +- // Beware that the filename may contain colon on Windows. +- +- // stripColonDigits removes a ":%d" suffix, if any. +- stripColonDigits := func(s string) (rest string, num int) { +- if i := strings.LastIndex(s, ":"); i >= 0 { +- if v, err := strconv.ParseInt(s[i+1:], 10, 32); err == nil { +- return s[:i], int(v) +- } +- } +- return s, -1 +- } +- +- // strip col ":%d" +- s, n1 := stripColonDigits(s) +- if n1 < 0 { +- return s, 1, 1 // "filename" +- } +- +- // strip line ":%d" +- s, n2 := stripColonDigits(s) +- if n2 < 0 { +- return s, n1, 1 // "filename:line" +- } +- +- return s, n2, n1 // "filename:line:col" +-} +- +-// parseGoListImportCycleError attempts to parse the given go/packages error as +-// an import cycle, returning a diagnostic if successful. +-// +-// If the error is not detected as an import cycle error, it returns nil, nil. +-func parseGoListImportCycleError(ctx context.Context, e packages.Error, mp *metadata.Package, fs file.Source) (*Diagnostic, error) { +- re := regexp.MustCompile(`(.*): import stack: \[(.+)\]`) +- matches := re.FindStringSubmatch(strings.TrimSpace(e.Msg)) +- if len(matches) < 3 { +- return nil, nil +- } +- msg := matches[1] +- importList := strings.Split(matches[2], " ") +- // Since the error is relative to the current package. The import that is causing +- // the import cycle error is the second one in the list. +- if len(importList) < 2 { +- return nil, nil +- } +- // Imports have quotation marks around them. +- circImp := strconv.Quote(importList[1]) +- for _, uri := range mp.CompiledGoFiles { +- pgf, err := parseGoURI(ctx, fs, uri, parsego.Header) +- if err != nil { +- return nil, err +- } +- // Search file imports for the import that is causing the import cycle. +- for _, imp := range pgf.File.Imports { +- if imp.Path.Value == circImp { +- rng, err := pgf.NodeRange(imp) +- if err != nil { +- return nil, nil +- } +- +- return &Diagnostic{ +- URI: pgf.URI, +- Range: rng, +- Severity: protocol.SeverityError, +- Source: ListError, +- Message: msg, +- }, nil +- } +- } +- } +- return nil, nil +-} +- +-// parseGoURI is a helper to parse the Go file at the given URI from the file +-// source fs. The resulting syntax and token.File belong to an ephemeral, +-// encapsulated FileSet, so this file stands only on its own: it's not suitable +-// to use in a list of file of a package, for example. +-// +-// It returns an error if the file could not be read. +-// +-// TODO(rfindley): eliminate this helper. +-func parseGoURI(ctx context.Context, fs file.Source, uri protocol.DocumentURI, mode parser.Mode) (*parsego.File, error) { +- fh, err := fs.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- return parseGoImpl(ctx, token.NewFileSet(), fh, mode, false) +-} +- +-// parseModURI is a helper to parse the Mod file at the given URI from the file +-// source fs. +-// +-// It returns an error if the file could not be read. +-func parseModURI(ctx context.Context, fs file.Source, uri protocol.DocumentURI) (*ParsedModule, error) { +- fh, err := fs.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- return parseModImpl(ctx, fh) +-} +diff -urN a/gopls/internal/cache/errors_test.go b/gopls/internal/cache/errors_test.go +--- a/gopls/internal/cache/errors_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/errors_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,128 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "encoding/json" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-func TestParseErrorMessage(t *testing.T) { +- tests := []struct { +- name string +- in string +- expectedFileName string +- expectedLine int // (missing => 1) +- expectedColumn int // (missing => 1) +- }{ +- { +- name: "from go list output", +- in: "\nattributes.go:13:1: expected 'package', found 'type'", +- expectedFileName: "attributes.go", +- expectedLine: 13, +- expectedColumn: 1, +- }, +- { +- name: "windows driver letter", +- in: "C:\\foo\\bar.go:13: message", +- expectedFileName: "bar.go", +- expectedLine: 13, +- expectedColumn: 1, +- }, +- } +- +- for _, tt := range tests { +- t.Run(tt.name, func(t *testing.T) { +- fn, line, col8 := parseGoListError(packages.Error{Msg: tt.in}, ".") +- +- if !strings.HasSuffix(fn, tt.expectedFileName) { +- t.Errorf("expected filename with suffix %v but got %v", tt.expectedFileName, fn) +- } +- if line != tt.expectedLine { +- t.Errorf("expected line %v but got %v", tt.expectedLine, line) +- } +- if col8 != tt.expectedColumn { +- t.Errorf("expected col %v but got %v", tt.expectedLine, col8) +- } +- }) +- } +-} +- +-func TestDiagnosticEncoding(t *testing.T) { +- diags := []*Diagnostic{ +- {}, // empty +- { +- URI: "file///foo", +- Range: protocol.Range{ +- Start: protocol.Position{Line: 4, Character: 2}, +- End: protocol.Position{Line: 6, Character: 7}, +- }, +- Severity: protocol.SeverityWarning, +- Code: "red", +- CodeHref: "https://go.dev", +- Source: "test", +- Message: "something bad happened", +- Tags: []protocol.DiagnosticTag{81}, +- Related: []protocol.DiagnosticRelatedInformation{ +- { +- Location: protocol.Location{ +- URI: "file:///other", +- Range: protocol.Range{ +- Start: protocol.Position{Line: 3, Character: 6}, +- End: protocol.Position{Line: 4, Character: 9}, +- }, +- }, +- Message: "psst, over here", +- }, +- }, +- +- // Fields below are used internally to generate quick fixes. They aren't +- // part of the LSP spec and don't leave the server. +- SuggestedFixes: []SuggestedFix{ +- { +- Title: "fix it!", +- Edits: map[protocol.DocumentURI][]protocol.TextEdit{ +- "file:///foo": {{ +- Range: protocol.Range{ +- Start: protocol.Position{Line: 4, Character: 2}, +- End: protocol.Position{Line: 6, Character: 7}, +- }, +- NewText: "abc", +- }}, +- "file:///other": {{ +- Range: protocol.Range{ +- Start: protocol.Position{Line: 4, Character: 2}, +- End: protocol.Position{Line: 6, Character: 7}, +- }, +- NewText: "!@#!", +- }}, +- }, +- Command: &protocol.Command{ +- Title: "run a command", +- Command: "gopls.fix", +- Arguments: []json.RawMessage{json.RawMessage(`{"a":1}`)}, +- }, +- ActionKind: protocol.QuickFix, +- }, +- }, +- }, +- { +- URI: "file//bar", +- // other fields tested above +- }, +- } +- +- data := encodeDiagnostics(diags) +- diags2 := decodeDiagnostics(data) +- +- if diff := cmp.Diff(diags, diags2); diff != "" { +- t.Errorf("decoded diagnostics do not match (-original +decoded):\n%s", diff) +- } +-} +diff -urN a/gopls/internal/cache/filemap.go b/gopls/internal/cache/filemap.go +--- a/gopls/internal/cache/filemap.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/filemap.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,152 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "iter" +- "path/filepath" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/persistent" +-) +- +-// A fileMap maps files in the snapshot, with some additional bookkeeping: +-// It keeps track of overlays as well as directories containing any observed +-// file. +-type fileMap struct { +- files *persistent.Map[protocol.DocumentURI, file.Handle] +- overlays *persistent.Map[protocol.DocumentURI, *overlay] // the subset of files that are overlays +- dirs *persistent.Set[string] // all dirs containing files; if nil, dirs have not been initialized +-} +- +-func newFileMap() *fileMap { +- return &fileMap{ +- files: new(persistent.Map[protocol.DocumentURI, file.Handle]), +- overlays: new(persistent.Map[protocol.DocumentURI, *overlay]), +- dirs: new(persistent.Set[string]), +- } +-} +- +-// clone creates a copy of the fileMap, incorporating the changes specified by +-// the changes map. +-func (m *fileMap) clone(changes map[protocol.DocumentURI]file.Handle) *fileMap { +- m2 := &fileMap{ +- files: m.files.Clone(), +- overlays: m.overlays.Clone(), +- } +- if m.dirs != nil { +- m2.dirs = m.dirs.Clone() +- } +- +- // Handle file changes. +- // +- // Note, we can't simply delete the file unconditionally and let it be +- // re-read by the snapshot, as (1) the snapshot must always observe all +- // overlays, and (2) deleting a file forces directories to be reevaluated, as +- // it may be the last file in a directory. We want to avoid that work in the +- // common case where a file has simply changed. +- // +- // For that reason, we also do this in two passes, processing deletions +- // first, as a set before a deletion would result in pointless work. +- for uri, fh := range changes { +- if !fileExists(fh) { +- m2.delete(uri) +- } +- } +- for uri, fh := range changes { +- if fileExists(fh) { +- m2.set(uri, fh) +- } +- } +- return m2 +-} +- +-func (m *fileMap) destroy() { +- m.files.Destroy() +- m.overlays.Destroy() +- if m.dirs != nil { +- m.dirs.Destroy() +- } +-} +- +-// get returns the file handle mapped by the given key, or (nil, false) if the +-// key is not present. +-func (m *fileMap) get(key protocol.DocumentURI) (file.Handle, bool) { +- return m.files.Get(key) +-} +- +-// all returns the sequence of (uri, fh) entries in the map. +-func (m *fileMap) all() iter.Seq2[protocol.DocumentURI, file.Handle] { +- return m.files.All() +-} +- +-// set stores the given file handle for key, updating overlays and directories +-// accordingly. +-func (m *fileMap) set(key protocol.DocumentURI, fh file.Handle) { +- m.files.Set(key, fh, nil) +- +- // update overlays +- if o, ok := fh.(*overlay); ok { +- m.overlays.Set(key, o, nil) +- } else { +- // Setting a non-overlay must delete the corresponding overlay, to preserve +- // the accuracy of the overlay set. +- m.overlays.Delete(key) +- } +- +- // update dirs, if they have been computed +- if m.dirs != nil { +- m.addDirs(key) +- } +-} +- +-// addDirs adds all directories containing u to the dirs set. +-func (m *fileMap) addDirs(u protocol.DocumentURI) { +- dir := u.DirPath() +- for dir != "" && !m.dirs.Contains(dir) { +- m.dirs.Add(dir) +- dir = filepath.Dir(dir) +- } +-} +- +-// delete removes a file from the map, and updates overlays and dirs +-// accordingly. +-func (m *fileMap) delete(key protocol.DocumentURI) { +- m.files.Delete(key) +- m.overlays.Delete(key) +- +- // Deleting a file may cause the set of dirs to shrink; therefore we must +- // re-evaluate the dir set. +- // +- // Do this lazily, to avoid work if there are multiple deletions in a row. +- if m.dirs != nil { +- m.dirs.Destroy() +- m.dirs = nil +- } +-} +- +-// getOverlays returns a new unordered array of overlay files. +-func (m *fileMap) getOverlays() []*overlay { +- var overlays []*overlay +- for _, o := range m.overlays.All() { +- overlays = append(overlays, o) +- } +- return overlays +-} +- +-// getDirs reports returns the set of dirs observed by the fileMap. +-// +-// This operation mutates the fileMap. +-// The result must not be mutated by the caller. +-func (m *fileMap) getDirs() *persistent.Set[string] { +- if m.dirs == nil { +- m.dirs = new(persistent.Set[string]) +- for uri := range m.files.All() { +- m.addDirs(uri) +- } +- } +- return m.dirs +-} +diff -urN a/gopls/internal/cache/filemap_test.go b/gopls/internal/cache/filemap_test.go +--- a/gopls/internal/cache/filemap_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/filemap_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,112 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "path/filepath" +- "sort" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-func TestFileMap(t *testing.T) { +- const ( +- set = iota +- del +- ) +- type op struct { +- op int // set or remove +- path string +- overlay bool +- } +- tests := []struct { +- label string +- ops []op +- wantFiles []string +- wantOverlays []string +- wantDirs []string +- }{ +- {"empty", nil, nil, nil, nil}, +- {"singleton", []op{ +- {set, "/a/b", false}, +- }, []string{"/a/b"}, nil, []string{"/", "/a"}}, +- {"overlay", []op{ +- {set, "/a/b", true}, +- }, []string{"/a/b"}, []string{"/a/b"}, []string{"/", "/a"}}, +- {"replace overlay", []op{ +- {set, "/a/b", true}, +- {set, "/a/b", false}, +- }, []string{"/a/b"}, nil, []string{"/", "/a"}}, +- {"multi dir", []op{ +- {set, "/a/b", false}, +- {set, "/c/d", false}, +- }, []string{"/a/b", "/c/d"}, nil, []string{"/", "/a", "/c"}}, +- {"empty dir", []op{ +- {set, "/a/b", false}, +- {set, "/c/d", false}, +- {del, "/a/b", false}, +- }, []string{"/c/d"}, nil, []string{"/", "/c"}}, +- } +- +- // Normalize paths for windows compatibility. +- normalize := func(path string) string { +- y := filepath.ToSlash(path) +- // Windows paths may start with a drive letter +- if len(y) > 2 && y[1] == ':' && y[0] >= 'A' && y[0] <= 'Z' { +- y = y[2:] +- } +- return y +- } +- +- for _, test := range tests { +- t.Run(test.label, func(t *testing.T) { +- m := newFileMap() +- for _, op := range test.ops { +- uri := protocol.URIFromPath(filepath.FromSlash(op.path)) +- switch op.op { +- case set: +- var fh file.Handle +- if op.overlay { +- fh = &overlay{uri: uri} +- } else { +- fh = &diskFile{uri: uri} +- } +- m.set(uri, fh) +- case del: +- m.delete(uri) +- } +- } +- +- var gotFiles []string +- for uri := range m.all() { +- gotFiles = append(gotFiles, normalize(uri.Path())) +- } +- sort.Strings(gotFiles) +- if diff := cmp.Diff(test.wantFiles, gotFiles); diff != "" { +- t.Errorf("Files mismatch (-want +got):\n%s", diff) +- } +- +- var gotOverlays []string +- for _, o := range m.getOverlays() { +- gotOverlays = append(gotOverlays, normalize(o.URI().Path())) +- } +- if diff := cmp.Diff(test.wantOverlays, gotOverlays); diff != "" { +- t.Errorf("Overlays mismatch (-want +got):\n%s", diff) +- } +- +- var gotDirs []string +- for dir := range m.getDirs().All() { +- gotDirs = append(gotDirs, normalize(dir)) +- } +- sort.Strings(gotDirs) +- if diff := cmp.Diff(test.wantDirs, gotDirs); diff != "" { +- t.Errorf("Dirs mismatch (-want +got):\n%s", diff) +- } +- }) +- } +-} +diff -urN a/gopls/internal/cache/filterer.go b/gopls/internal/cache/filterer.go +--- a/gopls/internal/cache/filterer.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/filterer.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,93 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "path" +- "path/filepath" +- "regexp" +- "strings" +-) +- +-// PathIncludeFunc creates a function that determines if a given file path +-// should be included based on a set of inclusion/exclusion rules. +-// +-// The `rules` parameter is a slice of strings, where each string represents a +-// filtering rule. Each rule consists of an operator (`+` for inclusion, `-` +-// for exclusion) followed by a path pattern. See more detail of rules syntax +-// at [settings.BuildOptions.DirectoryFilters]. +-// +-// Rules are evaluated in order, and the last matching rule determines +-// whether a path is included or excluded. +-// +-// Examples: +-// - []{"-foo"}: Exclude "foo" at the current depth. +-// - []{"-**foo"}: Exclude "foo" at any depth. +-// - []{"+bar"}: Include "bar" at the current depth. +-// - []{"-foo", "+foo/**/bar"}: Exclude all "foo" at current depth except +-// directory "bar" under "foo" at any depth. +-func PathIncludeFunc(rules []string) func(string) bool { +- var matchers []*regexp.Regexp +- var included []bool +- for _, filter := range rules { +- filter = path.Clean(filepath.ToSlash(filter)) +- // TODO(dungtuanle): fix: validate [+-] prefix. +- op, prefix := filter[0], filter[1:] +- // convertFilterToRegexp adds "/" at the end of prefix to handle cases +- // where a filter is a prefix of another filter. +- // For example, it prevents [+foobar, -foo] from excluding "foobar". +- matchers = append(matchers, convertFilterToRegexp(filepath.ToSlash(prefix))) +- included = append(included, op == '+') +- } +- +- return func(path string) bool { +- // Ensure leading and trailing slashes. +- if !strings.HasPrefix(path, "/") { +- path = "/" + path +- } +- if !strings.HasSuffix(path, "/") { +- path += "/" +- } +- +- // TODO(adonovan): opt: iterate in reverse and break at first match. +- include := true +- for i, filter := range matchers { +- if filter.MatchString(path) { +- include = included[i] // last match wins +- } +- } +- return include +- } +-} +- +-// convertFilterToRegexp replaces glob-like operator substrings in a string file path to their equivalent regex forms. +-// Supporting glob-like operators: +-// - **: match zero or more complete path segments +-func convertFilterToRegexp(filter string) *regexp.Regexp { +- if filter == "" { +- return regexp.MustCompile(".*") +- } +- var ret strings.Builder +- ret.WriteString("^/") +- segs := strings.SplitSeq(filter, "/") +- for seg := range segs { +- // Inv: seg != "" since path is clean. +- if seg == "**" { +- ret.WriteString(".*") +- } else { +- ret.WriteString(regexp.QuoteMeta(seg)) +- } +- ret.WriteString("/") +- } +- pattern := ret.String() +- +- // Remove unnecessary "^.*" prefix, which increased +- // BenchmarkWorkspaceSymbols time by ~20% (even though +- // filter CPU time increased by only by ~2.5%) when the +- // default filter was changed to "**/node_modules". +- pattern = strings.TrimPrefix(pattern, "^/.*") +- +- return regexp.MustCompile(pattern) +-} +diff -urN a/gopls/internal/cache/fs_memoized.go b/gopls/internal/cache/fs_memoized.go +--- a/gopls/internal/cache/fs_memoized.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/fs_memoized.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,174 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "os" +- "sync" +- "time" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/robustio" +-) +- +-// A memoizedFS is a file source that memoizes reads, to reduce IO. +-type memoizedFS struct { +- mu sync.Mutex +- +- // filesByID maps existing file inodes to the result of a read. +- // (The read may have failed, e.g. due to EACCES or a delete between stat+read.) +- // Each slice is a non-empty list of aliases: different URIs. +- filesByID map[robustio.FileID][]*diskFile +-} +- +-func newMemoizedFS() *memoizedFS { +- return &memoizedFS{filesByID: make(map[robustio.FileID][]*diskFile)} +-} +- +-// A diskFile is a file in the filesystem, or a failure to read one. +-// It implements the file.Source interface. +-type diskFile struct { +- uri protocol.DocumentURI +- modTime time.Time +- content []byte +- hash file.Hash +- err error +-} +- +-func (h *diskFile) String() string { return h.uri.Path() } +- +-func (h *diskFile) URI() protocol.DocumentURI { return h.uri } +- +-func (h *diskFile) Identity() file.Identity { +- return file.Identity{ +- URI: h.uri, +- Hash: h.hash, +- } +-} +- +-func (h *diskFile) SameContentsOnDisk() bool { return true } +-func (h *diskFile) Version() int32 { return 0 } +-func (h *diskFile) Content() ([]byte, error) { return h.content, h.err } +-func (h *diskFile) ModTime() (time.Time, error) { return h.modTime, h.err } +- +-// ReadFile stats and (maybe) reads the file, updates the cache, and returns it. +-func (fs *memoizedFS) ReadFile(ctx context.Context, uri protocol.DocumentURI) (file.Handle, error) { +- id, mtime, err := robustio.GetFileID(uri.Path()) +- if err != nil { +- // file does not exist +- return &diskFile{ +- err: err, +- uri: uri, +- }, nil +- } +- +- // We check if the file has changed by comparing modification times. Notably, +- // this is an imperfect heuristic as various systems have low resolution +- // mtimes (as much as 1s on WSL or s390x builders), so we only cache +- // filehandles if mtime is old enough to be reliable, meaning that we don't +- // expect a subsequent write to have the same mtime. +- // +- // The coarsest mtime precision we've seen in practice is 1s, so consider +- // mtime to be unreliable if it is less than 2s old. Capture this before +- // doing anything else. +- recentlyModified := time.Since(mtime) < 2*time.Second +- +- fs.mu.Lock() +- fhs, ok := fs.filesByID[id] +- if ok && fhs[0].modTime.Equal(mtime) { +- var fh *diskFile +- // We have already seen this file and it has not changed. +- for _, h := range fhs { +- if h.uri == uri { +- fh = h +- break +- } +- } +- // No file handle for this exact URI. Create an alias, but share content. +- if fh == nil { +- newFH := *fhs[0] +- newFH.uri = uri +- fh = &newFH +- fhs = append(fhs, fh) +- fs.filesByID[id] = fhs +- } +- fs.mu.Unlock() +- return fh, nil +- } +- fs.mu.Unlock() +- +- // Unknown file, or file has changed. Read (or re-read) it. +- fh, err := readFile(ctx, uri, mtime) // ~25us +- if err != nil { +- return nil, err // e.g. cancelled (not: read failed) +- } +- +- fs.mu.Lock() +- if !recentlyModified { +- fs.filesByID[id] = []*diskFile{fh} +- } else { +- delete(fs.filesByID, id) +- } +- fs.mu.Unlock() +- return fh, nil +-} +- +-// fileStats returns information about the set of files stored in fs. It is +-// intended for debugging only. +-func (fs *memoizedFS) fileStats() (files, largest, errs int) { +- fs.mu.Lock() +- defer fs.mu.Unlock() +- +- files = len(fs.filesByID) +- largest = 0 +- errs = 0 +- +- for _, files := range fs.filesByID { +- rep := files[0] +- if len(rep.content) > largest { +- largest = len(rep.content) +- } +- if rep.err != nil { +- errs++ +- } +- } +- return files, largest, errs +-} +- +-// ioLimit limits the number of parallel file reads per process. +-var ioLimit = make(chan struct{}, 128) +- +-func readFile(ctx context.Context, uri protocol.DocumentURI, mtime time.Time) (*diskFile, error) { +- select { +- case ioLimit <- struct{}{}: +- case <-ctx.Done(): +- return nil, ctx.Err() +- } +- defer func() { <-ioLimit }() +- +- ctx, done := event.Start(ctx, "cache.readFile", label.File.Of(uri.Path())) +- _ = ctx +- defer done() +- +- // It is possible that a race causes us to read a file with different file +- // ID, or whose mtime differs from the given mtime. However, in these cases +- // we expect the client to notify of a subsequent file change, and the file +- // content should be eventually consistent. +- content, err := os.ReadFile(uri.Path()) // ~20us +- if err != nil { +- content = nil // just in case +- } +- return &diskFile{ +- modTime: mtime, +- uri: uri, +- content: content, +- hash: file.HashOf(content), +- err: err, +- }, nil +-} +diff -urN a/gopls/internal/cache/fs_overlay.go b/gopls/internal/cache/fs_overlay.go +--- a/gopls/internal/cache/fs_overlay.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/fs_overlay.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,84 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "sync" +- "time" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// An overlayFS is a file.Source that keeps track of overlays on top of a +-// delegate FileSource. +-type overlayFS struct { +- delegate file.Source +- +- mu sync.Mutex +- overlays map[protocol.DocumentURI]*overlay +-} +- +-func newOverlayFS(delegate file.Source) *overlayFS { +- return &overlayFS{ +- delegate: delegate, +- overlays: make(map[protocol.DocumentURI]*overlay), +- } +-} +- +-// Overlays returns a new unordered array of overlays. +-func (fs *overlayFS) Overlays() []*overlay { +- fs.mu.Lock() +- defer fs.mu.Unlock() +- overlays := make([]*overlay, 0, len(fs.overlays)) +- for _, overlay := range fs.overlays { +- overlays = append(overlays, overlay) +- } +- return overlays +-} +- +-func (fs *overlayFS) ReadFile(ctx context.Context, uri protocol.DocumentURI) (file.Handle, error) { +- fs.mu.Lock() +- overlay, ok := fs.overlays[uri] +- fs.mu.Unlock() +- if ok { +- return overlay, nil +- } +- return fs.delegate.ReadFile(ctx, uri) +-} +- +-// An overlay is a file open in the editor. It may have unsaved edits. +-// It implements the file.Handle interface, and the implicit contract +-// of the debug.FileTmpl template. +-type overlay struct { +- uri protocol.DocumentURI +- content []byte +- modTime time.Time +- hash file.Hash +- version int32 +- kind file.Kind +- +- // saved is true if a file matches the state on disk, +- // and therefore does not need to be part of the overlay sent to go/packages. +- saved bool +-} +- +-func (o *overlay) String() string { return o.uri.Path() } +- +-func (o *overlay) URI() protocol.DocumentURI { return o.uri } +- +-func (o *overlay) Identity() file.Identity { +- return file.Identity{ +- URI: o.uri, +- Hash: o.hash, +- } +-} +- +-func (o *overlay) Content() ([]byte, error) { return o.content, nil } +-func (o *overlay) ModTime() (time.Time, error) { return o.modTime, nil } +-func (o *overlay) Version() int32 { return o.version } +-func (o *overlay) SameContentsOnDisk() bool { return o.saved } +-func (o *overlay) Kind() file.Kind { return o.kind } +diff -urN a/gopls/internal/cache/future.go b/gopls/internal/cache/future.go +--- a/gopls/internal/cache/future.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/future.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,136 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "sync" +-) +- +-// A futureCache is a key-value store of "futures", which are values that might +-// not yet be processed. By accessing values using [futureCache.get], the +-// caller may share work with other goroutines that require the same key. +-// +-// This is a relatively common pattern, though this implementation includes the +-// following two non-standard additions: +-// +-// 1. futures are cancellable and retryable. If the context being used to +-// compute the future is cancelled, it will abort the computation. If other +-// goroutes are awaiting the future, they will acquire the right to compute +-// it, and start anew. +-// 2. futures may be either persistent or transient. Persistent futures are +-// the standard pattern: the results of the computation are preserved for +-// the lifetime of the cache. However, if the cache is transient +-// (persistent=false), the futures will be discarded once their value has +-// been passed to all awaiting goroutines. +-// +-// These specific extensions are used to implement the concurrency model of the +-// [typeCheckBatch], which allows multiple operations to piggy-back on top of +-// an ongoing type checking operation, requesting new packages asynchronously +-// without unduly increasing the in-use memory required by the type checking +-// pass. +-type futureCache[K comparable, V any] struct { +- persistent bool +- +- mu sync.Mutex +- cache map[K]*future[V] +-} +- +-// newFutureCache returns a futureCache that is ready to coordinate +-// computations via [futureCache.get]. +-// +-// If persistent is true, the results of these computations are stored for the +-// lifecycle of cache. Otherwise, results are discarded after they have been +-// passed to all awaiting goroutines. +-func newFutureCache[K comparable, V any](persistent bool) *futureCache[K, V] { +- return &futureCache[K, V]{ +- persistent: persistent, +- cache: make(map[K]*future[V]), +- } +-} +- +-type future[V any] struct { +- // refs is the number of goroutines awaiting this future, to be used for +- // cleaning up transient cache entries. +- // +- // Guarded by futureCache.mu. +- refs int +- +- // done is closed when the future has been fully computed. +- done chan unit +- +- // acquire used to select an awaiting goroutine to run the computation. +- // acquire is 1-buffered, and initialized with one unit, so that the first +- // requester starts a computation. If that computation is cancelled, the +- // requester pushes the unit back to acquire, so that another goroutine may +- // execute the computation. +- acquire chan unit +- +- // v and err store the result of the computation, guarded by done. +- v V +- err error +-} +- +-// cacheFunc is the type of a future computation function. +-type cacheFunc[V any] func(context.Context) (V, error) +- +-// get retrieves or computes the value corresponding to k. +-// +-// If the cache if persistent and the value has already been computed, get +-// returns the result of the previous computation. Otherwise, get either starts +-// a computation or joins an ongoing computation. If that computation is +-// cancelled, get will reassign the computation to a new goroutine as long as +-// there are awaiters. +-// +-// Once the computation completes, the result is passed to all awaiting +-// goroutines. If the cache is transient (persistent=false), the corresponding +-// cache entry is removed, and the next call to get will execute a new +-// computation. +-// +-// It is therefore the responsibility of the caller to ensure that the given +-// compute function is safely retryable, and always returns the same value. +-func (c *futureCache[K, V]) get(ctx context.Context, k K, compute cacheFunc[V]) (V, error) { +- c.mu.Lock() +- f, ok := c.cache[k] +- if !ok { +- f = &future[V]{ +- done: make(chan unit), +- acquire: make(chan unit, 1), +- } +- f.acquire <- unit{} // make available for computation +- c.cache[k] = f +- } +- f.refs++ +- c.mu.Unlock() +- +- defer func() { +- c.mu.Lock() +- defer c.mu.Unlock() +- f.refs-- +- if f.refs == 0 && !c.persistent { +- delete(c.cache, k) +- } +- }() +- +- var zero V +- select { +- case <-ctx.Done(): +- return zero, ctx.Err() +- case <-f.done: +- return f.v, f.err +- case <-f.acquire: +- } +- +- v, err := compute(ctx) +- if err := ctx.Err(); err != nil { +- f.acquire <- unit{} // hand off work to the next requester +- return zero, err +- } +- +- f.v = v +- f.err = err +- close(f.done) +- return v, err +-} +diff -urN a/gopls/internal/cache/future_test.go b/gopls/internal/cache/future_test.go +--- a/gopls/internal/cache/future_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/future_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,156 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "fmt" +- "sync/atomic" +- "testing" +- "time" +- +- "golang.org/x/sync/errgroup" +-) +- +-func TestFutureCache_Persistent(t *testing.T) { +- c := newFutureCache[int, int](true) +- ctx := context.Background() +- +- var computed atomic.Int32 +- compute := func(i int) cacheFunc[int] { +- return func(context.Context) (int, error) { +- computed.Add(1) +- return i, ctx.Err() +- } +- } +- +- testFutureCache(t, ctx, c, compute) +- +- // Since this cache is persistent, we should get exactly 10 computations, +- // since there are 10 distinct keys in [testFutureCache]. +- if got := computed.Load(); got != 10 { +- t.Errorf("computed %d times, want 10", got) +- } +-} +- +-func TestFutureCache_Ephemeral(t *testing.T) { +- c := newFutureCache[int, int](false) +- ctx := context.Background() +- +- var computed atomic.Int32 +- compute := func(i int) cacheFunc[int] { +- return func(context.Context) (int, error) { +- time.Sleep(1 * time.Millisecond) +- computed.Add(1) +- return i, ctx.Err() +- } +- } +- +- testFutureCache(t, ctx, c, compute) +- +- // Since this cache is ephemeral, we should get at least 30 computations, +- // since there are 10 distinct keys and three synchronous passes in +- // [testFutureCache]. +- if got := computed.Load(); got < 30 { +- t.Errorf("computed %d times, want at least 30", got) +- } else { +- t.Logf("compute ran %d times", got) +- } +-} +- +-// testFutureCache starts 100 goroutines concurrently, indexed by j, each +-// getting key j%10 from the cache. It repeats this three times, synchronizing +-// after each. +-// +-// This is designed to exercise both concurrent and synchronous access to the +-// cache. +-func testFutureCache(t *testing.T, ctx context.Context, c *futureCache[int, int], compute func(int) cacheFunc[int]) { +- for range 3 { +- var g errgroup.Group +- for j := range 100 { +- mod := j % 10 +- compute := compute(mod) +- g.Go(func() error { +- got, err := c.get(ctx, mod, compute) +- if err == nil && got != mod { +- t.Errorf("get() = %d, want %d", got, mod) +- } +- return err +- }) +- } +- if err := g.Wait(); err != nil { +- t.Fatal(err) +- } +- } +-} +- +-func TestFutureCache_Retrying(t *testing.T) { +- // This test verifies the retry behavior of cache entries, +- // by checking that cancelled work is handed off to the next awaiter. +- // +- // The setup is a little tricky: 10 goroutines are started, and the first 9 +- // are cancelled whereas the 10th is allowed to finish. As a result, the +- // computation should always succeed with value 9. +- +- ctx := context.Background() +- +- for _, persistent := range []bool{true, false} { +- t.Run(fmt.Sprintf("persistent=%t", persistent), func(t *testing.T) { +- c := newFutureCache[int, int](persistent) +- +- var started atomic.Int32 +- +- // compute returns a new cacheFunc that produces the value i, after the +- // provided done channel is closed. +- compute := func(i int, done <-chan struct{}) cacheFunc[int] { +- return func(ctx context.Context) (int, error) { +- started.Add(1) +- select { +- case <-ctx.Done(): +- return 0, ctx.Err() +- case <-done: +- return i, nil +- } +- } +- } +- +- // goroutines are either cancelled, or allowed to complete, +- // as controlled by cancels and dones. +- var ( +- cancels = make([]func(), 10) +- dones = make([]chan struct{}, 10) +- ) +- +- var g errgroup.Group +- var lastValue atomic.Int32 // keep track of the last successfully computed value +- for i := range 10 { +- ctx, cancel := context.WithCancel(ctx) +- done := make(chan struct{}) +- cancels[i] = cancel +- dones[i] = done +- compute := compute(i, done) +- g.Go(func() error { +- v, err := c.get(ctx, 0, compute) +- if err == nil { +- lastValue.Store(int32(v)) +- } +- return nil +- }) +- } +- for _, cancel := range cancels[:9] { +- cancel() +- } +- defer cancels[9]() +- +- dones[9] <- struct{}{} +- _ = g.Wait() // can't fail +- +- t.Logf("started %d computations", started.Load()) +- if got := lastValue.Load(); got != 9 { +- t.Errorf("after cancelling computation 0-8, got %d, want 9", got) +- } +- }) +- } +-} +diff -urN a/gopls/internal/cache/imports.go b/gopls/internal/cache/imports.go +--- a/gopls/internal/cache/imports.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/imports.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,310 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "fmt" +- "log" +- "sync" +- "time" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/event/keys" +- "golang.org/x/tools/internal/imports" +- "golang.org/x/tools/internal/modindex" +-) +- +-// refreshTimer implements delayed asynchronous refreshing of state. +-// +-// See the [refreshTimer.schedule] documentation for more details. +-type refreshTimer struct { +- mu sync.Mutex +- duration time.Duration +- timer *time.Timer +- refreshFn func() +-} +- +-// newRefreshTimer constructs a new refresh timer which schedules refreshes +-// using the given function. +-func newRefreshTimer(refresh func()) *refreshTimer { +- return &refreshTimer{ +- refreshFn: refresh, +- } +-} +- +-// stop stops any future scheduled refresh. +-func (t *refreshTimer) stop() { +- t.mu.Lock() +- defer t.mu.Unlock() +- +- if t.timer != nil { +- t.timer.Stop() +- t.timer = nil +- t.refreshFn = nil // release resources +- } +-} +- +-// schedule schedules the refresh function to run at some point in the future, +-// if no existing refresh is already scheduled. +-// +-// At a minimum, scheduled refreshes are delayed by 30s, but they may be +-// delayed longer to keep their expected execution time under 2% of wall clock +-// time. +-func (t *refreshTimer) schedule() { +- t.mu.Lock() +- defer t.mu.Unlock() +- +- if t.timer == nil { +- // Don't refresh more than twice per minute. +- // Don't spend more than ~2% of the time refreshing. +- delay := max(30*time.Second, 50*t.duration) +- t.timer = time.AfterFunc(delay, func() { +- start := time.Now() +- t.mu.Lock() +- refreshFn := t.refreshFn +- t.mu.Unlock() +- if refreshFn != nil { // timer may be stopped. +- refreshFn() +- t.mu.Lock() +- t.duration = time.Since(start) +- t.timer = nil +- t.mu.Unlock() +- } +- }) +- } +-} +- +-// A sharedModCache tracks goimports state for GOMODCACHE directories +-// (each session may have its own GOMODCACHE). +-// +-// This state is refreshed independently of view-specific imports state. +-type sharedModCache struct { +- mu sync.Mutex +- caches map[string]*imports.DirInfoCache // GOMODCACHE -> cache content; never invalidated +- // TODO(rfindley): consider stopping these timers when the session shuts down. +- timers map[string]*refreshTimer // GOMODCACHE -> timer +-} +- +-func (c *sharedModCache) dirCache(dir string) *imports.DirInfoCache { +- c.mu.Lock() +- defer c.mu.Unlock() +- +- cache, ok := c.caches[dir] +- if !ok { +- cache = imports.NewDirInfoCache() +- c.caches[dir] = cache +- } +- return cache +-} +- +-// refreshDir schedules a refresh of the given directory, which must be a +-// module cache. +-func (c *sharedModCache) refreshDir(ctx context.Context, dir string, logf func(string, ...any)) { +- cache := c.dirCache(dir) +- +- c.mu.Lock() +- defer c.mu.Unlock() +- timer, ok := c.timers[dir] +- if !ok { +- timer = newRefreshTimer(func() { +- _, done := event.Start(ctx, "cache.sharedModCache.refreshDir", label.Directory.Of(dir)) +- defer done() +- imports.ScanModuleCache(dir, cache, logf) +- }) +- c.timers[dir] = timer +- } +- +- timer.schedule() +-} +- +-// importsState tracks view-specific imports state. +-type importsState struct { +- ctx context.Context +- modCache *sharedModCache +- refreshTimer *refreshTimer +- +- mu sync.Mutex +- processEnv *imports.ProcessEnv +- cachedModFileHash file.Hash +-} +- +-// newImportsState constructs a new imports state for running goimports +-// functions via [runProcessEnvFunc]. +-// +-// The returned state will automatically refresh itself following a delay. +-func newImportsState(backgroundCtx context.Context, modCache *sharedModCache, env *imports.ProcessEnv) *importsState { +- s := &importsState{ +- ctx: backgroundCtx, +- modCache: modCache, +- processEnv: env, +- } +- s.refreshTimer = newRefreshTimer(s.refreshProcessEnv) +- s.refreshTimer.schedule() +- return s +-} +- +-// modcacheState holds a modindex.Index and controls its updates +-type modcacheState struct { +- gomodcache string +- refreshTimer *refreshTimer +- +- // (index, indexErr) is zero in the initial state. +- // Thereafter they hold the memoized result pair for getIndex. +- mu sync.Mutex +- index *modindex.Index +- indexErr error +-} +- +-// newModcacheState constructs a new modcacheState for goimports. +-// The returned state is automatically updated until [modcacheState.stopTimer] is called. +-func newModcacheState(gomodcache string) *modcacheState { +- s := &modcacheState{ +- gomodcache: gomodcache, +- } +- s.refreshTimer = newRefreshTimer(s.refreshIndex) +- go s.refreshIndex() +- return s +-} +- +-// getIndex reads the module cache index. +-func (s *modcacheState) getIndex() (*modindex.Index, error) { +- s.mu.Lock() +- defer s.mu.Unlock() +- +- if s.index == nil && s.indexErr == nil { +- // getIndex was called before refreshIndex finished. +- // Read, but don't update, whatever index is present. +- s.index, s.indexErr = modindex.Read(s.gomodcache) +- } +- +- return s.index, s.indexErr +-} +- +-func (s *modcacheState) refreshIndex() { +- index, err := modindex.Update(s.gomodcache) +- s.mu.Lock() +- if err != nil { +- if s.indexErr != nil { +- s.indexErr = err // prefer most recent error +- } else { +- // Keep using stale s.index (if any). +- log.Printf("modcacheState.refreshIndex: %v", err) +- } +- } else { +- s.index, s.indexErr = index, nil // success +- } +- s.mu.Unlock() +-} +- +-func (s *modcacheState) stopTimer() { +- s.refreshTimer.stop() +-} +- +-// stopTimer stops scheduled refreshes of this imports state. +-func (s *importsState) stopTimer() { +- s.refreshTimer.stop() +-} +- +-// runProcessEnvFunc runs goimports. +-// +-// Any call to runProcessEnvFunc will schedule a refresh of the imports state +-// at some point in the future, if such a refresh is not already scheduled. See +-// [refreshTimer] for more details. +-func (s *importsState) runProcessEnvFunc(ctx context.Context, snapshot *Snapshot, fn func(context.Context, *imports.Options) error) error { +- ctx, done := event.Start(ctx, "cache.importsState.runProcessEnvFunc") +- defer done() +- +- s.mu.Lock() +- defer s.mu.Unlock() +- +- // Find the hash of active mod files, if any. Using the unsaved content +- // is slightly wasteful, since we'll drop caches a little too often, but +- // the mod file shouldn't be changing while people are autocompleting. +- // +- // TODO(rfindley): consider instead hashing on-disk modfiles here. +- var modFileHash file.Hash +- for m := range snapshot.view.workspaceModFiles { +- fh, err := snapshot.ReadFile(ctx, m) +- if err != nil { +- return err +- } +- modFileHash.XORWith(fh.Identity().Hash) +- } +- +- // If anything relevant to imports has changed, clear caches and +- // update the processEnv. Clearing caches blocks on any background +- // scans. +- if modFileHash != s.cachedModFileHash { +- s.processEnv.ClearModuleInfo() +- s.cachedModFileHash = modFileHash +- } +- +- // Run the user function. +- opts := &imports.Options{ +- // Defaults. +- AllErrors: true, +- Comments: true, +- Fragment: true, +- FormatOnly: false, +- TabIndent: true, +- TabWidth: 8, +- Env: s.processEnv, +- LocalPrefix: snapshot.Options().Local, +- } +- +- if err := fn(ctx, opts); err != nil { +- return err +- } +- +- // Refresh the imports resolver after usage. This may seem counterintuitive, +- // since it means the first ProcessEnvFunc after a long period of inactivity +- // may be stale, but in practice we run ProcessEnvFuncs frequently during +- // active development (e.g. during completion), and so this mechanism will be +- // active while gopls is in use, and inactive when gopls is idle. +- s.refreshTimer.schedule() +- +- // TODO(rfindley): the GOMODCACHE value used here isn't directly tied to the +- // ProcessEnv.Env["GOMODCACHE"], though they should theoretically always +- // agree. It would be better if we guaranteed this, possibly by setting all +- // required environment variables in ProcessEnv.Env, to avoid the redundant +- // Go command invocation. +- gomodcache := snapshot.view.folder.Env.GOMODCACHE +- s.modCache.refreshDir(s.ctx, gomodcache, s.processEnv.Logf) +- +- return nil +-} +- +-func (s *importsState) refreshProcessEnv() { +- ctx, done := event.Start(s.ctx, "cache.importsState.refreshProcessEnv") +- defer done() +- +- start := time.Now() +- +- s.mu.Lock() +- resolver, err := s.processEnv.GetResolver() +- s.mu.Unlock() +- if err != nil { +- event.Error(s.ctx, "failed to get import resolver", err) +- return +- } +- +- event.Log(s.ctx, "background imports cache refresh starting") +- resolver2 := resolver.ClearForNewScan() +- +- // Prime the new resolver before updating the processEnv, so that gopls +- // doesn't wait on an unprimed cache. +- if err := imports.PrimeCache(context.Background(), resolver2); err == nil { +- event.Log(ctx, fmt.Sprintf("background refresh finished after %v", time.Since(start))) +- } else { +- event.Log(ctx, fmt.Sprintf("background refresh finished after %v", time.Since(start)), keys.Err.Of(err)) +- } +- +- s.mu.Lock() +- s.processEnv.UpdateResolver(resolver2) +- s.mu.Unlock() +-} +diff -urN a/gopls/internal/cache/keys.go b/gopls/internal/cache/keys.go +--- a/gopls/internal/cache/keys.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/keys.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,54 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-// session event tracing +- +-import ( +- "io" +- +- "golang.org/x/tools/internal/event/label" +-) +- +-var ( +- KeyCreateSession = NewSessionKey("create_session", "A new session was added") +- KeyUpdateSession = NewSessionKey("update_session", "Updated information about a session") +- KeyShutdownSession = NewSessionKey("shutdown_session", "A session was shut down") +-) +- +-// SessionKey represents an event label key that has a *Session value. +-type SessionKey struct { +- name string +- description string +-} +- +-// NewSessionKey creates a new Key for *Session values. +-func NewSessionKey(name, description string) *SessionKey { +- return &SessionKey{name: name, description: description} +-} +- +-func (k *SessionKey) Name() string { return k.name } +-func (k *SessionKey) Description() string { return k.description } +- +-func (k *SessionKey) Format(w io.Writer, buf []byte, l label.Label) { +- io.WriteString(w, k.From(l).ID()) +-} +- +-// Of creates a new Label with this key and the supplied session. +-func (k *SessionKey) Of(v *Session) label.Label { return label.OfValue(k, v) } +- +-// Get can be used to get the session for the key from a label.Map. +-func (k *SessionKey) Get(lm label.Map) *Session { +- if t := lm.Find(k); t.Valid() { +- return k.From(t) +- } +- return nil +-} +- +-// From can be used to get the session value from a Label. +-func (k *SessionKey) From(t label.Label) *Session { +- err, _ := t.UnpackValue().(*Session) +- return err +-} +diff -urN a/gopls/internal/cache/load.go b/gopls/internal/cache/load.go +--- a/gopls/internal/cache/load.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/load.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,822 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "bytes" +- "context" +- "errors" +- "fmt" +- "go/types" +- "path/filepath" +- "slices" +- "sort" +- "strings" +- "sync/atomic" +- "time" +- +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/immutable" +- "golang.org/x/tools/gopls/internal/util/pathutil" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/packagesinternal" +- "golang.org/x/tools/internal/typesinternal" +- "golang.org/x/tools/internal/xcontext" +-) +- +-var loadID uint64 // atomic identifier for loads +- +-// errNoPackages indicates that a load query matched no packages. +-var errNoPackages = errors.New("no packages returned") +- +-// load calls packages.Load for the given scopes, updating package metadata, +-// import graph, and mapped files with the result. +-// +-// The resulting error may wrap the moduleErrorMap error type, representing +-// errors associated with specific modules. +-// +-// If scopes contains a file scope there must be exactly one scope. +-func (s *Snapshot) load(ctx context.Context, allowNetwork AllowNetwork, scopes ...loadScope) (err error) { +- if ctx.Err() != nil { +- // Check context cancellation before incrementing id below: a load on a +- // cancelled context should be a no-op. +- return ctx.Err() +- } +- id := atomic.AddUint64(&loadID, 1) +- eventName := fmt.Sprintf("go/packages.Load #%d", id) // unique name for logging +- +- var query []string +- var standalone bool // whether this is a load of a standalone file +- +- // Keep track of module query -> module path so that we can later correlate query +- // errors with errors. +- moduleQueries := make(map[string]string) +- +- for _, scope := range scopes { +- switch scope := scope.(type) { +- case packageLoadScope: +- // The only time we pass package paths is when we're doing a +- // partial workspace load. In those cases, the paths came back from +- // go list and should already be GOPATH-vendorized when appropriate. +- query = append(query, string(scope)) +- +- case fileLoadScope: +- // Given multiple scopes, the resulting load might contain inaccurate +- // information. For example go/packages returns at most one command-line +- // arguments package, and does not handle a combination of standalone +- // files and packages. +- uri := protocol.DocumentURI(scope) +- if len(scopes) > 1 { +- panic(fmt.Sprintf("internal error: load called with multiple scopes when a file scope is present (file: %s)", uri)) +- } +- fh := s.FindFile(uri) +- if fh == nil || s.FileKind(fh) != file.Go { +- // Don't try to load a file that doesn't exist, or isn't a go file. +- continue +- } +- contents, err := fh.Content() +- if err != nil { +- continue +- } +- if isStandaloneFile(contents, s.Options().StandaloneTags) { +- standalone = true +- query = append(query, uri.Path()) +- } else { +- query = append(query, fmt.Sprintf("file=%s", uri.Path())) +- } +- +- case moduleLoadScope: +- modQuery := fmt.Sprintf("%s%c...", scope.dir, filepath.Separator) +- query = append(query, modQuery) +- moduleQueries[modQuery] = scope.modulePath +- +- case viewLoadScope: +- // If we are outside of GOPATH, a module, or some other known +- // build system, don't load subdirectories. +- if s.view.typ == AdHocView { +- query = append(query, "./") +- } else { +- query = append(query, "./...") +- } +- +- default: +- panic(fmt.Sprintf("unknown scope type %T", scope)) +- } +- } +- if len(query) == 0 { +- return nil +- } +- sort.Strings(query) // for determinism +- +- ctx, done := event.Start(ctx, "cache.snapshot.load", label.Query.Of(query)) +- defer done() +- +- startTime := time.Now() +- +- // Set a last resort deadline on packages.Load since it calls the go +- // command, which may hang indefinitely if it has a bug. golang/go#42132 +- // and golang/go#42255 have more context. +- ctx, cancel := context.WithTimeout(ctx, 10*time.Minute) +- defer cancel() +- +- cfg := s.config(ctx, allowNetwork) +- pkgs, err := packages.Load(cfg, query...) +- +- // If the context was canceled, return early. Otherwise, we might be +- // type-checking an incomplete result. Check the context directly, +- // because go/packages adds extra information to the error. +- if ctx.Err() != nil { +- return ctx.Err() +- } +- +- // This log message is sought for by TestReloadOnlyOnce. +- { +- lbls := append(s.Labels(), +- label.Query.Of(query), +- label.PackageCount.Of(len(pkgs)), +- label.Duration.Of(time.Since(startTime)), +- ) +- if err != nil { +- event.Error(ctx, eventName, err, lbls...) +- } else { +- event.Log(ctx, eventName, lbls...) +- } +- } +- +- if err != nil { +- return fmt.Errorf("packages.Load error: %w", err) +- } +- +- if standalone { +- // Handle standalone package result. +- // +- // In general, this should just be a single "command-line-arguments" +- // package containing the requested file. However, if the file is a test +- // file, go/packages may return test variants of the command-line-arguments +- // package. We don't support this; theoretically we could, but it seems +- // unnecessarily complicated. +- // +- // It's possible that we get no packages here, for example if the file is a +- // cgo file and cgo is not enabled. +- var standalonePkg *packages.Package +- for _, pkg := range pkgs { +- if pkg.ID == "command-line-arguments" { +- if standalonePkg != nil { +- return fmt.Errorf("go/packages returned multiple standalone packages") +- } +- standalonePkg = pkg +- } else if pkg.ForTest == "" && !strings.HasSuffix(pkg.ID, ".test") { +- return fmt.Errorf("go/packages returned unexpected package %q for standalone file", pkg.ID) +- } +- } +- if standalonePkg == nil { +- return fmt.Errorf("go/packages failed to return non-test standalone package") +- } +- if len(standalonePkg.CompiledGoFiles) > 0 { +- pkgs = []*packages.Package{standalonePkg} +- } else { +- pkgs = nil +- } +- } +- +- if len(pkgs) == 0 { +- return fmt.Errorf("packages.Load error: %w", errNoPackages) +- } +- +- moduleErrs := make(map[string][]packages.Error) // module path -> errors +- filterFunc := s.view.filterFunc() +- newMetadata := make(map[PackageID]*metadata.Package) +- for _, pkg := range pkgs { +- if pkg.Module != nil && strings.Contains(pkg.Module.Path, "command-line-arguments") { +- // golang/go#61543: modules containing "command-line-arguments" cause +- // gopls to get all sorts of confused, because anything containing the +- // string "command-line-arguments" is treated as a script. And yes, this +- // happened in practice! (https://xkcd.com/327). Rather than try to work +- // around this very rare edge case, just fail loudly. +- return fmt.Errorf(`load failed: module name in %s contains "command-line-arguments", which is disallowed`, pkg.Module.GoMod) +- } +- // The Go command returns synthetic list results for module queries that +- // encountered module errors. +- // +- // For example, given a module path a.mod, we'll query for "a.mod/..." and +- // the go command will return a package named "a.mod/..." holding this +- // error. Save it for later interpretation. +- // +- // See golang/go#50862 for more details. +- if mod := moduleQueries[pkg.PkgPath]; mod != "" { // a synthetic result for the unloadable module +- if len(pkg.Errors) > 0 { +- moduleErrs[mod] = pkg.Errors +- } +- continue +- } +- +- if s.Options().VerboseOutput { +- event.Log(ctx, eventName, append( +- s.Labels(), +- label.Package.Of(pkg.ID), +- label.Files.Of(pkg.CompiledGoFiles))...) +- } +- +- // Ignore packages with no sources, since we will never be able to +- // correctly invalidate that metadata. +- if len(pkg.GoFiles) == 0 && len(pkg.CompiledGoFiles) == 0 { +- continue +- } +- // Special case for the builtin package, as it has no dependencies. +- if pkg.PkgPath == "builtin" { +- if len(pkg.GoFiles) != 1 { +- return fmt.Errorf("only expected 1 file for builtin, got %v", len(pkg.GoFiles)) +- } +- s.setBuiltin(pkg.GoFiles[0]) +- continue +- } +- if pkg.ForTest == "builtin" { +- // We don't care about test variants of builtin. This caused test +- // failures in https://go.dev/cl/620196, when a test file was added to +- // builtin. +- continue +- } +- // Skip test main packages. +- if isTestMain(pkg, s.view.folder.Env.GOCACHE) { +- continue +- } +- // Skip filtered packages. They may be added anyway if they're +- // dependencies of non-filtered packages. +- // +- // TODO(rfindley): why exclude metadata arbitrarily here? It should be safe +- // to capture all metadata. +- // TODO(rfindley): what about compiled go files? +- if allFilesExcluded(pkg.GoFiles, filterFunc) { +- continue +- } +- buildMetadata(newMetadata, cfg.Dir, standalone, pkg) +- } +- +- s.mu.Lock() +- +- // Assert the invariant s.packages.Get(id).m == s.meta.metadata[id]. +- for id, ph := range s.packages.All() { +- if s.meta.Packages[id] != ph.mp { +- panic("inconsistent metadata") +- } +- } +- +- // Compute the minimal metadata updates (for Clone) +- // required to preserve the above invariant. +- var files []protocol.DocumentURI // files to preload +- seenFiles := make(map[protocol.DocumentURI]bool) +- updates := make(map[PackageID]*metadata.Package) +- for _, mp := range newMetadata { +- if existing := s.meta.Packages[mp.ID]; existing == nil { +- // Record any new files we should pre-load. +- for _, uri := range mp.CompiledGoFiles { +- if !seenFiles[uri] { +- seenFiles[uri] = true +- files = append(files, uri) +- } +- } +- updates[mp.ID] = mp +- s.shouldLoad.Delete(mp.ID) +- } +- } +- +- if s.Options().VerboseOutput { +- event.Log(ctx, fmt.Sprintf("%s: updating metadata for %d packages", eventName, len(updates))) +- } +- +- meta := s.meta.Update(updates) +- workspacePackages := computeWorkspacePackagesLocked(ctx, s, meta) +- s.meta = meta +- s.workspacePackages = workspacePackages +- +- s.mu.Unlock() +- +- // Opt: preLoad files in parallel. +- // +- // Requesting files in batch optimizes the underlying filesystem reads. +- // However, this is also currently necessary for correctness: populating all +- // files in the snapshot is necessary for certain operations that rely on the +- // completeness of the file map, e.g. computing the set of directories to +- // watch. +- // +- // TODO(rfindley, golang/go#57558): determine the set of directories based on +- // loaded packages, so that reading files here is not necessary for +- // correctness. +- s.preloadFiles(ctx, files) +- +- if len(moduleErrs) > 0 { +- return &moduleErrorMap{moduleErrs} +- } +- +- return nil +-} +- +-type moduleErrorMap struct { +- errs map[string][]packages.Error // module path -> errors +-} +- +-func (m *moduleErrorMap) Error() string { +- var paths []string // sort for stability +- for path, errs := range m.errs { +- if len(errs) > 0 { // should always be true, but be cautious +- paths = append(paths, path) +- } +- } +- sort.Strings(paths) +- +- var buf bytes.Buffer +- fmt.Fprintf(&buf, "%d modules have errors:\n", len(paths)) +- for _, path := range paths { +- fmt.Fprintf(&buf, "\t%s:%s\n", path, m.errs[path][0].Msg) +- } +- +- return buf.String() +-} +- +-// config returns the configuration used for the snapshot's interaction with +-// the go/packages API. It uses the given working directory. +-// +-// TODO(rstambler): go/packages requires that we do not provide overlays for +-// multiple modules in one config, so buildOverlay needs to filter overlays by +-// module. +-// TODO(rfindley): ^^ is this still true? +-func (s *Snapshot) config(ctx context.Context, allowNetwork AllowNetwork) *packages.Config { +- cfg := &packages.Config{ +- Context: ctx, +- Dir: s.view.root.Path(), +- Env: s.view.Env(), +- BuildFlags: slices.Clone(s.view.folder.Options.BuildFlags), +- Mode: packages.NeedName | +- packages.NeedFiles | +- packages.NeedCompiledGoFiles | +- packages.NeedImports | +- packages.NeedDeps | +- packages.NeedTypesSizes | +- packages.NeedModule | +- packages.NeedEmbedFiles | +- packages.LoadMode(packagesinternal.DepsErrors) | +- packages.NeedForTest, +- Fset: nil, // we do our own parsing +- Overlay: s.buildOverlays(), +- Logf: func(format string, args ...any) { +- if s.view.folder.Options.VerboseOutput { +- event.Log(ctx, fmt.Sprintf(format, args...)) +- } +- }, +- Tests: true, +- } +- if !allowNetwork { +- cfg.Env = append(cfg.Env, "GOPROXY=off") +- } +- // We want to type check cgo code if go/types supports it. +- if typesinternal.SetUsesCgo(&types.Config{}) { +- cfg.Mode |= packages.LoadMode(packagesinternal.TypecheckCgo) +- } +- return cfg +-} +- +-// buildMetadata populates the updates map with metadata updates to +-// apply, based on the given pkg. It recurs through pkg.Imports to ensure that +-// metadata exists for all dependencies. +-// +-// Returns the metadata.Package that was built (or which was already present in +-// updates), or nil if the package could not be built. Notably, the resulting +-// metadata.Package may have an ID that differs from pkg.ID. +-func buildMetadata(updates map[PackageID]*metadata.Package, loadDir string, standalone bool, pkg *packages.Package) *metadata.Package { +- // Allow for multiple ad-hoc packages in the workspace (see #47584). +- pkgPath := PackagePath(pkg.PkgPath) +- id := PackageID(pkg.ID) +- +- if metadata.IsCommandLineArguments(id) { +- var f string // file to use as disambiguating suffix +- if len(pkg.GoFiles) > 0 { +- f = pkg.GoFiles[0] +- +- // If there are multiple files, we can't use only the first. Note that we +- // consider GoFiles, rather than CompiledGoFiles, as there can be +- // multiple CompiledGoFiles in the presence of cgo processing, whereas a +- // command-line-arguments package should always have exactly one nominal +- // Go source file. (See golang/go#64557.) +- if len(pkg.GoFiles) > 1 { +- bug.Reportf("unexpected files in command-line-arguments package: %v", pkg.GoFiles) +- return nil +- } +- } else if len(pkg.IgnoredFiles) > 0 { +- // A file=empty.go query results in IgnoredFiles=[empty.go]. +- f = pkg.IgnoredFiles[0] +- } else { +- bug.Reportf("command-line-arguments package has neither GoFiles nor IgnoredFiles") +- return nil +- } +- id = PackageID(pkg.ID + f) +- pkgPath = PackagePath(pkg.PkgPath + f) +- } +- +- // Duplicate? +- if existing, ok := updates[id]; ok { +- // A package was encountered twice due to shared +- // subgraphs (common) or cycles (rare). Although "go +- // list" usually breaks cycles, we don't rely on it. +- // breakImportCycles in metadataGraph.Clone takes care +- // of it later. +- return existing +- } +- +- if pkg.TypesSizes == nil { +- panic(id + ".TypeSizes is nil") +- } +- +- // Recreate the metadata rather than reusing it to avoid locking. +- mp := &metadata.Package{ +- ID: id, +- PkgPath: pkgPath, +- Name: PackageName(pkg.Name), +- ForTest: PackagePath(pkg.ForTest), +- TypesSizes: pkg.TypesSizes, +- LoadDir: loadDir, +- Module: pkg.Module, +- Errors: pkg.Errors, +- DepsErrors: packagesinternal.GetDepsErrors(pkg), +- Standalone: standalone, +- } +- +- updates[id] = mp +- +- copyURIs := func(dst *[]protocol.DocumentURI, src []string) { +- for _, filename := range src { +- *dst = append(*dst, protocol.URIFromPath(filename)) +- } +- } +- copyURIs(&mp.CompiledGoFiles, pkg.CompiledGoFiles) +- copyURIs(&mp.GoFiles, pkg.GoFiles) +- copyURIs(&mp.IgnoredFiles, pkg.IgnoredFiles) +- copyURIs(&mp.OtherFiles, pkg.OtherFiles) +- +- depsByImpPath := make(map[ImportPath]PackageID) +- depsByPkgPath := make(map[PackagePath]PackageID) +- for importPath, imported := range pkg.Imports { +- importPath := ImportPath(importPath) +- +- // It is not an invariant that importPath == imported.PkgPath. +- // For example, package "net" imports "golang.org/x/net/dns/dnsmessage" +- // which refers to the package whose ID and PkgPath are both +- // "vendor/golang.org/x/net/dns/dnsmessage". Notice the ImportMap, +- // which maps ImportPaths to PackagePaths: +- // +- // $ go list -json net vendor/golang.org/x/net/dns/dnsmessage +- // { +- // "ImportPath": "net", +- // "Name": "net", +- // "Imports": [ +- // "C", +- // "vendor/golang.org/x/net/dns/dnsmessage", +- // "vendor/golang.org/x/net/route", +- // ... +- // ], +- // "ImportMap": { +- // "golang.org/x/net/dns/dnsmessage": "vendor/golang.org/x/net/dns/dnsmessage", +- // "golang.org/x/net/route": "vendor/golang.org/x/net/route" +- // }, +- // ... +- // } +- // { +- // "ImportPath": "vendor/golang.org/x/net/dns/dnsmessage", +- // "Name": "dnsmessage", +- // ... +- // } +- // +- // (Beware that, for historical reasons, go list uses +- // the JSON field "ImportPath" for the package's +- // path--effectively the linker symbol prefix.) +- // +- // The example above is slightly special to go list +- // because it's in the std module. Otherwise, +- // vendored modules are simply modules whose directory +- // is vendor/ instead of GOMODCACHE, and the +- // import path equals the package path. +- // +- // But in GOPATH (non-module) mode, it's possible for +- // package vendoring to cause a non-identity ImportMap, +- // as in this example: +- // +- // $ cd $HOME/src +- // $ find . -type f +- // ./b/b.go +- // ./vendor/example.com/a/a.go +- // $ cat ./b/b.go +- // package b +- // import _ "example.com/a" +- // $ cat ./vendor/example.com/a/a.go +- // package a +- // $ GOPATH=$HOME GO111MODULE=off go list -json ./b | grep -A2 ImportMap +- // "ImportMap": { +- // "example.com/a": "vendor/example.com/a" +- // }, +- +- // Don't remember any imports with significant errors. +- // +- // The len=0 condition is a heuristic check for imports of +- // non-existent packages (for which go/packages will create +- // an edge to a synthesized node). The heuristic is unsound +- // because some valid packages have zero files, for example, +- // a directory containing only the file p_test.go defines an +- // empty package p. +- // TODO(adonovan): clarify this. Perhaps go/packages should +- // report which nodes were synthesized. +- if importPath != "unsafe" && len(imported.CompiledGoFiles) == 0 { +- depsByImpPath[importPath] = "" // missing +- continue +- } +- +- // Don't record self-import edges. +- // (This simplifies metadataGraph's cycle check.) +- if PackageID(imported.ID) == id { +- if len(pkg.Errors) == 0 { +- bug.Reportf("self-import without error in package %s", id) +- } +- continue +- } +- +- dep := buildMetadata(updates, loadDir, false, imported) // only top level packages can be standalone +- +- // Don't record edges to packages with no name, as they cause trouble for +- // the importer (golang/go#60952). +- // +- // Also don't record edges to packages whose ID was modified (i.e. +- // command-line-arguments packages), as encountered in golang/go#66109. In +- // this case, we could theoretically keep the edge through dep.ID, but +- // since this import doesn't make any sense in the first place, we instead +- // choose to consider it invalid. +- // +- // However, we do want to insert these packages into the update map +- // (buildMetadata above), so that we get type-checking diagnostics for the +- // invalid packages. +- if dep == nil || dep.ID != PackageID(imported.ID) || imported.Name == "" { +- depsByImpPath[importPath] = "" // missing +- continue +- } +- +- depsByImpPath[importPath] = PackageID(imported.ID) +- depsByPkgPath[PackagePath(imported.PkgPath)] = PackageID(imported.ID) +- } +- mp.DepsByImpPath = depsByImpPath +- mp.DepsByPkgPath = depsByPkgPath +- return mp +- +- // m.Diagnostics is set later in the loading pass, using +- // computeLoadDiagnostics. +-} +- +-// computeLoadDiagnostics computes and sets m.Diagnostics for the given metadata m. +-// +-// It should only be called during package handle construction in buildPackageHandle. +-func computeLoadDiagnostics(ctx context.Context, snapshot *Snapshot, mp *metadata.Package) []*Diagnostic { +- var diags []*Diagnostic +- for _, packagesErr := range mp.Errors { +- // Filter out parse errors from go list. We'll get them when we +- // actually parse, and buggy overlay support may generate spurious +- // errors. (See TestNewModule_Issue38207.) +- if strings.Contains(packagesErr.Msg, "expected '") { +- continue +- } +- pkgDiags, err := goPackagesErrorDiagnostics(ctx, packagesErr, mp, snapshot) +- if err != nil { +- // There are certain cases where the go command returns invalid +- // positions, so we cannot panic or even bug.Reportf here. +- event.Error(ctx, "unable to compute positions for list errors", err, label.Package.Of(string(mp.ID))) +- continue +- } +- diags = append(diags, pkgDiags...) +- } +- +- // TODO(rfindley): this is buggy: an insignificant change to a modfile +- // (or an unsaved modfile) could affect the position of deps errors, +- // without invalidating the package. +- depsDiags, err := depsErrors(ctx, snapshot, mp) +- if err != nil { +- if ctx.Err() == nil { +- // TODO(rfindley): consider making this a bug.Reportf. depsErrors should +- // not normally fail. +- event.Error(ctx, "unable to compute deps errors", err, label.Package.Of(string(mp.ID))) +- } +- } else { +- diags = append(diags, depsDiags...) +- } +- return diags +-} +- +-// isWorkspacePackageLocked reports whether p is a workspace package for the +-// snapshot s. +-// +-// Workspace packages are packages that we consider the user to be actively +-// working on. As such, they are re-diagnosed on every keystroke, and searched +-// for various workspace-wide queries such as references or workspace symbols. +-// +-// See the commentary inline for a description of the workspace package +-// heuristics. +-// +-// s.mu must be held while calling this function. +-// +-// TODO(rfindley): remove 'meta' from this function signature. Whether or not a +-// package is a workspace package should depend only on the package, view +-// definition, and snapshot file source. While useful, the heuristic +-// "allFilesHaveRealPackages" does not add that much value and is path +-// dependent as it depends on the timing of loads. +-func isWorkspacePackageLocked(ctx context.Context, s *Snapshot, meta *metadata.Graph, pkg *metadata.Package) bool { +- if metadata.IsCommandLineArguments(pkg.ID) { +- // Ad-hoc command-line-arguments packages aren't workspace packages. +- // With zero-config gopls (golang/go#57979) they should be very rare, as +- // they should only arise when the user opens a file outside the workspace +- // which isn't present in the import graph of a workspace package. +- // +- // Considering them as workspace packages tends to be racy, as they don't +- // deterministically belong to any view. +- if !pkg.Standalone { +- return false +- } +- +- // If all the files contained in pkg have a real package, we don't need to +- // keep pkg as a workspace package. +- if allFilesHaveRealPackages(meta, pkg) { +- return false +- } +- +- // For now, allow open standalone packages (i.e. go:build ignore) to be +- // workspace packages, but this means they could belong to multiple views. +- return containsOpenFileLocked(s, pkg) +- } +- +- // If a real package is open, consider it to be part of the workspace. +- // +- // TODO(rfindley): reconsider this. In golang/go#66145, we saw that even if a +- // View sees a real package for a file, it doesn't mean that View is able to +- // cleanly diagnose the package. Yet, we do want to show diagnostics for open +- // packages outside the workspace. Is there a better way to ensure that only +- // the 'best' View gets a workspace package for the open file? +- if containsOpenFileLocked(s, pkg) { +- return true +- } +- +- // Apply filtering logic. +- // +- // Workspace packages must contain at least one non-filtered file. +- filterFunc := s.view.filterFunc() +- uris := make(map[protocol.DocumentURI]unit) // filtered package URIs +- for _, uri := range slices.Concat(pkg.CompiledGoFiles, pkg.GoFiles) { +- if !strings.Contains(string(uri), "/vendor/") && !filterFunc(uri) { +- uris[uri] = struct{}{} +- } +- } +- if len(uris) == 0 { +- return false // no non-filtered files +- } +- +- // For non-module views (of type GOPATH or AdHoc), or if +- // expandWorkspaceToModule is unset, workspace packages must be contained in +- // the workspace folder. +- // +- // For module views (of type GoMod or GoWork), packages must in any case be +- // in a workspace module (enforced below). +- if !s.view.typ.usesModules() || !s.Options().ExpandWorkspaceToModule { +- folder := s.view.folder.Dir.Path() +- inFolder := false +- for uri := range uris { +- if pathutil.InDir(folder, uri.Path()) { +- inFolder = true +- break +- } +- } +- if !inFolder { +- return false +- } +- } +- +- // In module mode, a workspace package must be contained in a workspace +- // module. +- if s.view.typ.usesModules() { +- var modURI protocol.DocumentURI +- if pkg.Module != nil { +- modURI = protocol.URIFromPath(pkg.Module.GoMod) +- } else { +- // golang/go#65816: for std and cmd, Module is nil. +- // Fall back to an inferior heuristic. +- if len(pkg.CompiledGoFiles) == 0 { +- return false // need at least one file to guess the go.mod file +- } +- dir := pkg.CompiledGoFiles[0].Dir() +- var err error +- modURI, err = findRootPattern(ctx, dir, "go.mod", lockedSnapshot{s}) +- if err != nil || modURI == "" { +- // err != nil implies context cancellation, in which case the result of +- // this query does not matter. +- return false +- } +- } +- _, ok := s.view.workspaceModFiles[modURI] +- return ok +- } +- +- return true // an ad-hoc package or GOPATH package +-} +- +-// containsOpenFileLocked reports whether any file referenced by m is open in +-// the snapshot s. +-// +-// s.mu must be held while calling this function. +-func containsOpenFileLocked(s *Snapshot, mp *metadata.Package) bool { +- uris := map[protocol.DocumentURI]struct{}{} +- for _, uri := range mp.CompiledGoFiles { +- uris[uri] = struct{}{} +- } +- for _, uri := range mp.GoFiles { +- uris[uri] = struct{}{} +- } +- +- for uri := range uris { +- fh, _ := s.files.get(uri) +- if _, open := fh.(*overlay); open { +- return true +- } +- } +- return false +-} +- +-// computeWorkspacePackagesLocked computes workspace packages in the +-// snapshot s for the given metadata graph. The result does not +-// contain intermediate test variants. +-// +-// s.mu must be held while calling this function. +-func computeWorkspacePackagesLocked(ctx context.Context, s *Snapshot, meta *metadata.Graph) immutable.Map[PackageID, PackagePath] { +- // The provided context is used for reading snapshot files, which can only +- // fail due to context cancellation. Don't let this happen as it could lead +- // to inconsistent results. +- ctx = xcontext.Detach(ctx) +- workspacePackages := make(map[PackageID]PackagePath) +- for _, mp := range meta.Packages { +- if !isWorkspacePackageLocked(ctx, s, meta, mp) { +- continue +- } +- +- switch { +- case mp.ForTest == "": +- // A normal package. +- workspacePackages[mp.ID] = mp.PkgPath +- case mp.ForTest == mp.PkgPath, mp.ForTest+"_test" == mp.PkgPath: +- // The test variant of some workspace package or its x_test. +- // To load it, we need to load the non-test variant with -test. +- // +- // Notably, this excludes intermediate test variants from workspace +- // packages. +- assert(!mp.IsIntermediateTestVariant(), "unexpected ITV") +- workspacePackages[mp.ID] = mp.ForTest +- } +- } +- return immutable.MapOf(workspacePackages) +-} +- +-// allFilesHaveRealPackages reports whether all files referenced by m are +-// contained in a "real" package (not command-line-arguments). +-// +-// If m is valid but all "real" packages containing any file are invalid, this +-// function returns false. +-// +-// If m is not a command-line-arguments package, this is trivially true. +-func allFilesHaveRealPackages(g *metadata.Graph, mp *metadata.Package) bool { +- n := len(mp.CompiledGoFiles) +-checkURIs: +- for _, uri := range slices.Concat(mp.CompiledGoFiles[0:n:n], mp.GoFiles) { +- for _, pkg := range g.ForFile[uri] { +- if !metadata.IsCommandLineArguments(pkg.ID) { +- continue checkURIs +- } +- } +- return false +- } +- return true +-} +- +-func isTestMain(pkg *packages.Package, gocache string) bool { +- // Test mains must have an import path that ends with ".test". +- if !strings.HasSuffix(pkg.PkgPath, ".test") { +- return false +- } +- // Test main packages are always named "main". +- if pkg.Name != "main" { +- return false +- } +- // Test mains always have exactly one GoFile that is in the build cache. +- if len(pkg.GoFiles) > 1 { +- return false +- } +- if !pathutil.InDir(gocache, pkg.GoFiles[0]) { +- return false +- } +- return true +-} +diff -urN a/gopls/internal/cache/metadata/cycle_test.go b/gopls/internal/cache/metadata/cycle_test.go +--- a/gopls/internal/cache/metadata/cycle_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/metadata/cycle_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,145 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package metadata +- +-import ( +- "maps" +- "sort" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/util/bug" +-) +- +-func init() { +- bug.PanicOnBugs = true +-} +- +-// This is an internal test of the breakImportCycles logic. +-func TestBreakImportCycles(t *testing.T) { +- +- // parse parses an import dependency graph. +- // The input is a semicolon-separated list of node descriptions. +- // Each node description is a package ID, optionally followed by +- // "->" and a comma-separated list of successor IDs. +- // Thus "a->b;b->c,d;e" represents the set of nodes {a,b,e} +- // and the set of edges {a->b, b->c, b->d}. +- parse := func(s string) map[PackageID]*Package { +- m := make(map[PackageID]*Package) +- makeNode := func(name string) *Package { +- id := PackageID(name) +- n, ok := m[id] +- if !ok { +- n = &Package{ +- ID: id, +- DepsByPkgPath: make(map[PackagePath]PackageID), +- } +- m[id] = n +- } +- return n +- } +- if s != "" { +- for item := range strings.SplitSeq(s, ";") { +- nodeID, succIDs, ok := strings.Cut(item, "->") +- node := makeNode(nodeID) +- if ok { +- for succID := range strings.SplitSeq(succIDs, ",") { +- node.DepsByPkgPath[PackagePath(succID)] = PackageID(succID) +- } +- } +- } +- } +- return m +- } +- +- // Sanity check of cycle detector. +- { +- got := cyclic(parse("a->b;b->c;c->a,d")) +- has := func(s string) bool { return strings.Contains(got, s) } +- if !(has("a->b") && has("b->c") && has("c->a") && !has("d")) { +- t.Fatalf("cyclic: got %q, want a->b->c->a or equivalent", got) +- } +- } +- +- // format formats an import graph, in lexicographic order, +- // in the notation of parse, but with a "!" after the name +- // of each node that has errors. +- format := func(graph map[PackageID]*Package) string { +- var items []string +- for _, mp := range graph { +- item := string(mp.ID) +- if len(mp.Errors) > 0 { +- item += "!" +- } +- var succs []string +- for _, depID := range mp.DepsByPkgPath { +- succs = append(succs, string(depID)) +- } +- if succs != nil { +- sort.Strings(succs) +- item += "->" + strings.Join(succs, ",") +- } +- items = append(items, item) +- } +- sort.Strings(items) +- return strings.Join(items, ";") +- } +- +- // We needn't test self-cycles as they are eliminated at Metadata construction. +- for _, test := range []struct { +- metadata, updates, want string +- }{ +- // Simple 2-cycle. +- {"a->b", "b->a", +- "a->b;b!"}, // broke b->a +- +- {"a->b;b->c;c", "b->a,c", +- "a->b;b!->c;c"}, // broke b->a +- +- // Reversing direction of p->s edge creates pqrs cycle. +- {"a->p,q,r,s;p->q,s,z;q->r,z;r->s,z;s->z", "p->q,z;s->p,z", +- "a->p,q,r,s;p!->z;q->r,z;r->s,z;s!->z"}, // broke p->q, s->p +- +- // We break all intra-SCC edges from updated nodes, +- // which may be more than necessary (e.g. a->b). +- {"a->b;b->c;c;d->a", "a->b,e;c->d", +- "a!->e;b->c;c!;d->a"}, // broke a->b, c->d +- } { +- metadata := parse(test.metadata) +- updates := parse(test.updates) +- +- if cycle := cyclic(metadata); cycle != "" { +- t.Errorf("initial metadata %s has cycle %s: ", format(metadata), cycle) +- continue +- } +- +- t.Log("initial", format(metadata)) +- +- // Apply updates. +- // (parse doesn't have a way to express node deletions, +- // but they aren't very interesting.) +- maps.Copy(metadata, updates) +- +- t.Log("updated", format(metadata)) +- +- // breakImportCycles accesses only these fields of Metadata: +- // DepsByImpPath, ID - read +- // DepsByPkgPath - read, updated +- // Errors - updated +- breakImportCycles(metadata, updates) +- +- t.Log("acyclic", format(metadata)) +- +- if cycle := cyclic(metadata); cycle != "" { +- t.Errorf("resulting metadata %s has cycle %s: ", format(metadata), cycle) +- } +- +- got := format(metadata) +- if got != test.want { +- t.Errorf("test.metadata=%s test.updates=%s: got=%s want=%s", +- test.metadata, test.updates, got, test.want) +- } +- } +-} +diff -urN a/gopls/internal/cache/metadata/graph.go b/gopls/internal/cache/metadata/graph.go +--- a/gopls/internal/cache/metadata/graph.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/metadata/graph.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,483 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package metadata +- +-import ( +- "cmp" +- "iter" +- "maps" +- "slices" +- "sort" +- "strings" +- +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +-) +- +-// A Graph is an immutable and transitively closed graph of [Package] data. +-type Graph struct { +- // Packages maps package IDs to their associated Packages. +- Packages map[PackageID]*Package +- +- // Each of the three maps below is an index of the pointer values held +- // by the Packages map. However, Package pointers are not generally canonical. +- +- // ImportedBy maps package IDs to the list of packages that import them. +- ImportedBy map[PackageID][]*Package +- +- // ForPackagePath maps package by their package path to their package ID. +- // Non-test packages appear before test packages, and within each of those +- // categories, packages with fewer CompiledGoFiles appear first. +- ForPackagePath map[PackagePath][]*Package +- +- // ForFile maps file URIs to packages, sorted by (!valid, cli, packageID). +- // A single file may belong to multiple packages due to tests packages. +- ForFile map[protocol.DocumentURI][]*Package +-} +- +-// Metadata implements the [Source] interface +-func (g *Graph) Metadata(id PackageID) *Package { +- return g.Packages[id] +-} +- +-// Update creates a new Graph containing the result of applying the given +-// updates to the receiver, though the receiver is not itself mutated. As a +-// special case, if updates is empty, Update just returns the receiver. +-// +-// A nil map value is used to indicate a deletion. +-func (g *Graph) Update(updates map[PackageID]*Package) *Graph { +- if len(updates) == 0 { +- // Optimization: since the graph is immutable, we can return the receiver. +- return g +- } +- +- // Debugging golang/go#64227, golang/vscode-go#3126: +- // Assert that the existing metadata graph is acyclic. +- if cycle := cyclic(g.Packages); cycle != "" { +- bug.Reportf("metadata is cyclic even before updates: %s", cycle) +- } +- // Assert that the updates contain no self-cycles. +- for id, mp := range updates { +- if mp != nil { +- for _, depID := range mp.DepsByPkgPath { +- if depID == id { +- bug.Reportf("self-cycle in metadata update: %s", id) +- } +- } +- } +- } +- +- // Copy pkgs map then apply updates. +- pkgs := make(map[PackageID]*Package, len(g.Packages)) +- maps.Copy(pkgs, g.Packages) +- for id, mp := range updates { +- if mp == nil { +- delete(pkgs, id) +- } else { +- pkgs[id] = mp +- } +- } +- +- // Break import cycles involving updated nodes. +- breakImportCycles(pkgs, updates) +- +- return newGraph(pkgs) +-} +- +-// newGraph returns a new metadataGraph, +-// deriving relations from the specified metadata. +-func newGraph(pkgs map[PackageID]*Package) *Graph { +- // Build the import graph. +- importedBy := make(map[PackageID][]*Package) +- byPackagePath := make(map[PackagePath][]*Package) +- for _, mp := range pkgs { +- for _, depID := range mp.DepsByPkgPath { +- importedBy[depID] = append(importedBy[depID], mp) +- } +- byPackagePath[mp.PkgPath] = append(byPackagePath[mp.PkgPath], mp) +- } +- +- // Collect file associations. +- uriPkgs := make(map[protocol.DocumentURI][]*Package) +- for _, mp := range pkgs { +- uris := map[protocol.DocumentURI]struct{}{} +- for _, uri := range mp.CompiledGoFiles { +- uris[uri] = struct{}{} +- } +- for _, uri := range mp.GoFiles { +- uris[uri] = struct{}{} +- } +- for _, uri := range mp.OtherFiles { +- if strings.HasSuffix(string(uri), ".s") { // assembly +- uris[uri] = struct{}{} +- } +- } +- for uri := range uris { +- uriPkgs[uri] = append(uriPkgs[uri], mp) +- } +- } +- +- // Sort and filter file associations. +- for uri, pkgs := range uriPkgs { +- sort.Slice(pkgs, func(i, j int) bool { +- cli := IsCommandLineArguments(pkgs[i].ID) +- clj := IsCommandLineArguments(pkgs[j].ID) +- if cli != clj { +- return clj +- } +- +- // 2. packages appear in name order. +- return pkgs[i].ID < pkgs[j].ID +- }) +- +- // Choose the best packages for each URI, according to the following rules: +- // - If there are any valid real packages, choose them. +- // - Else, choose the first valid command-line-argument package, if it exists. +- // +- // TODO(rfindley): it might be better to track all packages here, and exclude +- // them later when type checking, but this is the existing behavior. +- for i, pkg := range pkgs { +- // If we've seen *anything* prior to command-line arguments package, take +- // it. Note that pkgs[0] may itself be command-line-arguments. +- if i > 0 && IsCommandLineArguments(pkg.ID) { +- uriPkgs[uri] = pkgs[:i] +- break +- } +- } +- } +- +- for _, mps := range byPackagePath { +- slices.SortFunc(mps, func(a, b *Package) int { +- if (a.ForTest == "") != (b.ForTest == "") { +- if a.ForTest == "" { +- return -1 +- } +- return 1 +- } +- if c := cmp.Compare(len(a.CompiledGoFiles), len(b.CompiledGoFiles)); c != 0 { +- return c +- } +- return cmp.Compare(a.ID, b.ID) +- }) +- } +- +- return &Graph{ +- Packages: pkgs, +- ImportedBy: importedBy, +- ForPackagePath: byPackagePath, +- ForFile: uriPkgs, +- } +-} +- +-// ReverseReflexiveTransitiveClosure returns a new mapping containing the +-// metadata for the specified packages along with any package that +-// transitively imports one of them, keyed by ID, including all the initial packages. +-func (g *Graph) ReverseReflexiveTransitiveClosure(ids ...PackageID) map[PackageID]*Package { +- seen := make(map[PackageID]*Package) +- var visitAll func([]*Package) +- visitAll = func(pkgs []*Package) { +- for _, pkg := range pkgs { +- if seen[pkg.ID] == nil { +- seen[pkg.ID] = pkg +- visitAll(g.ImportedBy[pkg.ID]) +- } +- } +- } +- var initial []*Package +- for _, id := range ids { +- if pkg := g.Packages[id]; pkg != nil { +- initial = append(initial, pkg) +- } +- } +- visitAll(initial) +- return seen +-} +- +-// ForwardReflexiveTransitiveClosure returns an iterator over the +-// specified nodes and all their forward dependencies, in an arbitrary +-// topological (dependencies-first) order. The order may vary. +-func (g *Graph) ForwardReflexiveTransitiveClosure(ids ...PackageID) iter.Seq[*Package] { +- return func(yield func(*Package) bool) { +- seen := make(map[PackageID]bool) +- var visit func(PackageID) bool +- visit = func(id PackageID) bool { +- if !seen[id] { +- seen[id] = true +- if mp := g.Packages[id]; mp != nil { +- for _, depID := range mp.DepsByPkgPath { +- if !visit(depID) { +- return false +- } +- } +- if !yield(mp) { +- return false +- } +- } +- } +- return true +- } +- for _, id := range ids { +- visit(id) +- } +- } +-} +- +-// breakImportCycles breaks import cycles in the metadata by deleting +-// Deps* edges. It modifies only metadata present in the 'updates' +-// subset. This function has an internal test. +-func breakImportCycles(metadata, updates map[PackageID]*Package) { +- // 'go list' should never report a cycle without flagging it +- // as such, but we're extra cautious since we're combining +- // information from multiple runs of 'go list'. Also, Bazel +- // may silently report cycles. +- cycles := detectImportCycles(metadata, updates) +- if len(cycles) > 0 { +- // There were cycles (uncommon). Break them. +- // +- // The naive way to break cycles would be to perform a +- // depth-first traversal and to detect and delete +- // cycle-forming edges as we encounter them. +- // However, we're not allowed to modify the existing +- // Metadata records, so we can only break edges out of +- // the 'updates' subset. +- // +- // Another possibility would be to delete not the +- // cycle forming edge but the topmost edge on the +- // stack whose tail is an updated node. +- // However, this would require that we retroactively +- // undo all the effects of the traversals that +- // occurred since that edge was pushed on the stack. +- // +- // We use a simpler scheme: we compute the set of cycles. +- // All cyclic paths necessarily involve at least one +- // updated node, so it is sufficient to break all +- // edges from each updated node to other members of +- // the strong component. +- // +- // This may result in the deletion of dominating +- // edges, causing some dependencies to appear +- // spuriously unreachable. Consider A <-> B -> C +- // where updates={A,B}. The cycle is {A,B} so the +- // algorithm will break both A->B and B->A, causing +- // A to no longer depend on B or C. +- // +- // But that's ok: any error in Metadata.Errors is +- // conservatively assumed by snapshot.clone to be a +- // potential import cycle error, and causes special +- // invalidation so that if B later drops its +- // cycle-forming import of A, both A and B will be +- // invalidated. +- for _, cycle := range cycles { +- cyclic := make(map[PackageID]bool) +- for _, mp := range cycle { +- cyclic[mp.ID] = true +- } +- for id := range cyclic { +- if mp := updates[id]; mp != nil { +- for path, depID := range mp.DepsByImpPath { +- if cyclic[depID] { +- delete(mp.DepsByImpPath, path) +- } +- } +- for path, depID := range mp.DepsByPkgPath { +- if cyclic[depID] { +- delete(mp.DepsByPkgPath, path) +- } +- } +- +- // Set m.Errors to enable special +- // invalidation logic in snapshot.clone. +- if len(mp.Errors) == 0 { +- mp.Errors = []packages.Error{{ +- Msg: "detected import cycle", +- Kind: packages.ListError, +- }} +- } +- } +- } +- } +- +- // double-check when debugging +- if false { +- if cycles := detectImportCycles(metadata, updates); len(cycles) > 0 { +- bug.Reportf("unbroken cycle: %v", cycles) +- } +- } +- } +-} +- +-// cyclic returns a description of a cycle, +-// if the graph is cyclic, otherwise "". +-func cyclic(graph map[PackageID]*Package) string { +- const ( +- unvisited = 0 +- visited = 1 +- onstack = 2 +- ) +- color := make(map[PackageID]int) +- var visit func(id PackageID) string +- visit = func(id PackageID) string { +- switch color[id] { +- case unvisited: +- color[id] = onstack +- case onstack: +- return string(id) // cycle! +- case visited: +- return "" +- } +- if mp := graph[id]; mp != nil { +- for _, depID := range mp.DepsByPkgPath { +- if cycle := visit(depID); cycle != "" { +- return string(id) + "->" + cycle +- } +- } +- } +- color[id] = visited +- return "" +- } +- for id := range graph { +- if cycle := visit(id); cycle != "" { +- return cycle +- } +- } +- return "" +-} +- +-// detectImportCycles reports cycles in the metadata graph. It returns a new +-// unordered array of all cycles (nontrivial strong components) in the +-// metadata graph reachable from a non-nil 'updates' value. +-func detectImportCycles(metadata, updates map[PackageID]*Package) [][]*Package { +- // We use the depth-first algorithm of Tarjan. +- // https://doi.org/10.1137/0201010 +- // +- // TODO(adonovan): when we can use generics, consider factoring +- // in common with the other implementation of Tarjan (in typerefs), +- // abstracting over the node and edge representation. +- +- // A node wraps a Metadata with its working state. +- // (Unfortunately we can't intrude on shared Metadata.) +- type node struct { +- rep *node +- mp *Package +- index, lowlink int32 +- scc int8 // TODO(adonovan): opt: cram these 1.5 bits into previous word +- } +- nodes := make(map[PackageID]*node, len(metadata)) +- nodeOf := func(id PackageID) *node { +- n, ok := nodes[id] +- if !ok { +- mp := metadata[id] +- if mp == nil { +- // Dangling import edge. +- // Not sure whether a go/packages driver ever +- // emits this, but create a dummy node in case. +- // Obviously it won't be part of any cycle. +- mp = &Package{ID: id} +- } +- n = &node{mp: mp} +- n.rep = n +- nodes[id] = n +- } +- return n +- } +- +- // find returns the canonical node decl. +- // (The nodes form a disjoint set forest.) +- var find func(*node) *node +- find = func(n *node) *node { +- rep := n.rep +- if rep != n { +- rep = find(rep) +- n.rep = rep // simple path compression (no union-by-rank) +- } +- return rep +- } +- +- // global state +- var ( +- index int32 = 1 +- stack []*node +- sccs [][]*Package // set of nontrivial strongly connected components +- ) +- +- // visit implements the depth-first search of Tarjan's SCC algorithm +- // Precondition: x is canonical. +- var visit func(*node) +- visit = func(x *node) { +- x.index = index +- x.lowlink = index +- index++ +- +- stack = append(stack, x) // push +- x.scc = -1 +- +- for _, yid := range x.mp.DepsByPkgPath { +- y := nodeOf(yid) +- // Loop invariant: x is canonical. +- y = find(y) +- if x == y { +- continue // nodes already combined (self-edges are impossible) +- } +- +- switch { +- case y.scc > 0: +- // y is already a collapsed SCC +- +- case y.scc < 0: +- // y is on the stack, and thus in the current SCC. +- if y.index < x.lowlink { +- x.lowlink = y.index +- } +- +- default: +- // y is unvisited; visit it now. +- visit(y) +- // Note: x and y are now non-canonical. +- x = find(x) +- if y.lowlink < x.lowlink { +- x.lowlink = y.lowlink +- } +- } +- } +- +- // Is x the root of an SCC? +- if x.lowlink == x.index { +- // Gather all metadata in the SCC (if nontrivial). +- var scc []*Package +- for { +- // Pop y from stack. +- i := len(stack) - 1 +- y := stack[i] +- stack = stack[:i] +- if x != y || scc != nil { +- scc = append(scc, y.mp) +- } +- if x == y { +- break // complete +- } +- // x becomes y's canonical representative. +- y.rep = x +- } +- if scc != nil { +- sccs = append(sccs, scc) +- } +- x.scc = 1 +- } +- } +- +- // Visit only the updated nodes: +- // the existing metadata graph has no cycles, +- // so any new cycle must involve an updated node. +- for id, mp := range updates { +- if mp != nil { +- if n := nodeOf(id); n.index == 0 { // unvisited +- visit(n) +- } +- } +- } +- +- return sccs +-} +diff -urN a/gopls/internal/cache/metadata/metadata.go b/gopls/internal/cache/metadata/metadata.go +--- a/gopls/internal/cache/metadata/metadata.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/metadata/metadata.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,256 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The metadata package defines types and functions for working with package +-// metadata, which describes Go packages and their relationships. +-// +-// Package metadata is loaded by gopls using go/packages, and the [Package] +-// type is itself a projection and translation of data from +-// go/packages.Package. +-// +-// Packages are assembled into an immutable [Graph] +-package metadata +- +-import ( +- "go/ast" +- "go/types" +- "sort" +- "strconv" +- "strings" +- +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/packagesinternal" +-) +- +-// Declare explicit types for package paths, names, and IDs to ensure that we +-// never use an ID where a path belongs, and vice versa. If we confused these, +-// it would result in confusing errors because package IDs often look like +-// package paths. +-type ( +- PackageID string // go list's unique identifier for a package (e.g. "vendor/example.com/foo [vendor/example.com/bar.test]") +- PackagePath string // name used to prefix linker symbols (e.g. "vendor/example.com/foo") +- PackageName string // identifier in 'package' declaration (e.g. "foo") +- ImportPath string // path that appears in an import declaration (e.g. "example.com/foo") +-) +- +-// Package represents package metadata retrieved from go/packages. +-// The DepsBy{Imp,Pkg}Path maps do not contain self-import edges. +-// +-// An ad-hoc package (without go.mod or GOPATH) has its ID, PkgPath, +-// and LoadDir equal to the absolute path of its directory. +-type Package struct { +- ID PackageID +- PkgPath PackagePath +- Name PackageName +- +- // These fields are as defined by go/packages.Package +- GoFiles []protocol.DocumentURI +- CompiledGoFiles []protocol.DocumentURI +- IgnoredFiles []protocol.DocumentURI +- OtherFiles []protocol.DocumentURI +- +- ForTest PackagePath // q in a "p [q.test]" package, else "" +- TypesSizes types.Sizes +- Errors []packages.Error // must be set for packages in import cycles +- DepsByImpPath map[ImportPath]PackageID // may contain dups; empty ID => missing +- DepsByPkgPath map[PackagePath]PackageID // values are unique and non-empty +- Module *packages.Module +- DepsErrors []*packagesinternal.PackageError +- LoadDir string // directory from which go/packages was run +- Standalone bool // package synthesized for a standalone file (e.g. ignore-tagged) +-} +- +-func (mp *Package) String() string { return string(mp.ID) } +- +-// IsIntermediateTestVariant reports whether the given package is an +-// intermediate test variant (ITV), e.g. "net/http [net/url.test]". +-// +-// An ITV has identical syntax to the regular variant, but different +-// import metadata (DepsBy{Imp,Pkg}Path). +-// +-// Such test variants arise when an x_test package (in this case net/url_test) +-// imports a package (in this case net/http) that itself imports the +-// non-x_test package (in this case net/url). +-// +-// This is done so that the forward transitive closure of net/url_test has +-// only one package for the "net/url" import. +-// The ITV exists to hold the test variant import: +-// +-// net/url_test [net/url.test] +-// +-// | "net/http" -> net/http [net/url.test] +-// | "net/url" -> net/url [net/url.test] +-// | ... +-// +-// net/http [net/url.test] +-// +-// | "net/url" -> net/url [net/url.test] +-// | ... +-// +-// This restriction propagates throughout the import graph of net/http: for +-// every package imported by net/http that imports net/url, there must be an +-// intermediate test variant that instead imports "net/url [net/url.test]". +-// +-// As one can see from the example of net/url and net/http, intermediate test +-// variants can result in many additional packages that are essentially (but +-// not quite) identical. For this reason, we filter these variants wherever +-// possible. +-// +-// # Why we mostly ignore intermediate test variants +-// +-// In projects with complicated tests, there may be a very large +-// number of ITVs--asymptotically more than the number of ordinary +-// variants. Since they have identical syntax, it is fine in most +-// cases to ignore them since the results of analyzing the ordinary +-// variant suffice. However, this is not entirely sound. +-// +-// Consider this package: +-// +-// // p/p.go -- in all variants of p +-// package p +-// type T struct { io.Closer } +-// +-// // p/p_test.go -- in test variant of p +-// package p +-// func (T) Close() error { ... } +-// +-// The ordinary variant "p" defines T with a Close method promoted +-// from io.Closer. But its test variant "p [p.test]" defines a type T +-// with a Close method from p_test.go. +-// +-// Now consider a package q that imports p, perhaps indirectly. Within +-// it, T.Close will resolve to the first Close method: +-// +-// // q/q.go -- in all variants of q +-// package q +-// import "p" +-// var _ = new(p.T).Close +-// +-// Let's assume p also contains this file defining an external test (xtest): +-// +-// // p/p_x_test.go -- external test of p +-// package p_test +-// import ( "q"; "testing" ) +-// func Test(t *testing.T) { ... } +-// +-// Note that q imports p, but p's xtest imports q. Now, in "q +-// [p.test]", the intermediate test variant of q built for p's +-// external test, T.Close resolves not to the io.Closer.Close +-// interface method, but to the concrete method of T.Close +-// declared in p_test.go. +-// +-// If we now request all references to the T.Close declaration in +-// p_test.go, the result should include the reference from q's ITV. +-// (It's not just methods that can be affected; fields can too, though +-// it requires bizarre code to achieve.) +-// +-// As a matter of policy, gopls mostly ignores this subtlety, +-// because to account for it would require that we type-check every +-// intermediate test variant of p, of which there could be many. +-// Good code doesn't rely on such trickery. +-// +-// Most callers of MetadataForFile set removeIntermediateTestVariants parameter +-// to discard them before requesting type checking, or the products of +-// type-checking such as the cross-reference index or method set index. +-// +-// MetadataForFile doesn't do this filtering itself because in some +-// cases we need to make a reverse dependency query on the metadata +-// graph, and it's important to include the rdeps of ITVs in that +-// query. But the filtering of ITVs should be applied after that step, +-// before type checking. +-// +-// In general, we should never type check an ITV. +-func (mp *Package) IsIntermediateTestVariant() bool { +- return mp.ForTest != "" && mp.ForTest != mp.PkgPath && mp.ForTest+"_test" != mp.PkgPath +-} +- +-// A Source maps package IDs to metadata for the packages. +-type Source interface { +- // Metadata returns the [Package] for the given package ID, or nil if it does +- // not exist. +- // TODO(rfindley): consider returning (*Metadata, bool) +- // TODO(rfindley): consider renaming this method. +- Metadata(PackageID) *Package +-} +- +-// TODO(rfindley): move the utility functions below to a util.go file. +- +-// IsCommandLineArguments reports whether a given value denotes +-// "command-line-arguments" package, which is a package with an unknown ID +-// created by the go command. It can have a test variant, which is why callers +-// should not check that a value equals "command-line-arguments" directly. +-func IsCommandLineArguments(id PackageID) bool { +- return strings.Contains(string(id), "command-line-arguments") +-} +- +-// SortPostOrder sorts the IDs so that if x depends on y, then y appears before x. +-func SortPostOrder(meta Source, ids []PackageID) { +- postorder := make(map[PackageID]int) +- order := 0 +- var visit func(PackageID) +- visit = func(id PackageID) { +- if _, ok := postorder[id]; !ok { +- postorder[id] = -1 // break recursion +- if mp := meta.Metadata(id); mp != nil { +- for _, depID := range mp.DepsByPkgPath { +- visit(depID) +- } +- } +- order++ +- postorder[id] = order +- } +- } +- for _, id := range ids { +- visit(id) +- } +- sort.Slice(ids, func(i, j int) bool { +- return postorder[ids[i]] < postorder[ids[j]] +- }) +-} +- +-// UnquoteImportPath returns the unquoted import path of s, +-// or "" if the path is not properly quoted. +-func UnquoteImportPath(spec *ast.ImportSpec) ImportPath { +- path, err := strconv.Unquote(spec.Path.Value) +- if err != nil { +- return "" +- } +- return ImportPath(path) +-} +- +-// RemoveIntermediateTestVariants removes intermediate test variants, modifying +-// the array. We use a pointer to a slice make it impossible to forget to use +-// the result. +-func RemoveIntermediateTestVariants(pmetas *[]*Package) { +- metas := *pmetas +- res := metas[:0] +- for _, mp := range metas { +- if !mp.IsIntermediateTestVariant() { +- res = append(res, mp) +- } +- } +- *pmetas = res +-} +- +-// IsValidImport returns whether from may import to. +-func IsValidImport(from, to PackagePath, goList bool) bool { +- // If the metadata came from a build system other than go list +- // (e.g. bazel) it is beyond our means to compute visibility. +- if !goList { +- return true +- } +- i := strings.LastIndex(string(to), "/internal/") +- if i == -1 { +- return true +- } +- // TODO(rfindley): this looks wrong: IsCommandLineArguments is meant to +- // operate on package IDs, not package paths. +- if IsCommandLineArguments(PackageID(from)) { +- return true +- } +- // TODO(rfindley): this is wrong. mod.testx/p should not be able to +- // import mod.test/internal: https://go.dev/play/p/-Ca6P-E4V4q +- return strings.HasPrefix(string(from), string(to[:i])) +-} +diff -urN a/gopls/internal/cache/methodsets/methodsets.go b/gopls/internal/cache/methodsets/methodsets.go +--- a/gopls/internal/cache/methodsets/methodsets.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/methodsets/methodsets.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,446 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package methodsets defines an incremental, serializable index of +-// method-set information that allows efficient 'implements' queries +-// across packages of the workspace without using the type checker. +-// +-// This package provides only the "global" (all workspace) search; the +-// "local" search within a given package uses a different +-// implementation based on type-checker data structures for a single +-// package plus variants; see ../implementation.go. +-// The local algorithm is more precise as it tests function-local types too. +-// +-// A global index of function-local types is challenging since they +-// may reference other local types, for which we would need to invent +-// stable names, an unsolved problem described in passing in Go issue +-// 57497. The global algorithm also does not index anonymous interface +-// types, even outside function bodies. +-// +-// Consequently, global results are not symmetric: applying the +-// operation twice may not get you back where you started. +-package methodsets +- +-// DESIGN +-// +-// See https://go.dev/cl/452060 for a minimal exposition of the algorithm. +-// +-// For each method, we compute a fingerprint: a string representing +-// the method name and type such that equal fingerprint strings mean +-// identical method types. +-// +-// For efficiency, the fingerprint is reduced to a single bit +-// of a uint64, so that the method set can be represented as +-// the union of those method bits (a uint64 bitmask). +-// Assignability thus reduces to a subset check on bitmasks +-// followed by equality checks on fingerprints. +-// +-// In earlier experiments, using 128-bit masks instead of 64 reduced +-// the number of candidates by about 2x. Using (like a Bloom filter) a +-// different hash function to compute a second 64-bit mask and +-// performing a second mask test reduced it by about 4x. +-// Neither had much effect on the running time, presumably because a +-// single 64-bit mask is quite effective. See CL 452060 for details. +- +-import ( +- "go/token" +- "go/types" +- "hash/crc32" +- "slices" +- "sync/atomic" +- +- "golang.org/x/tools/go/types/objectpath" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/fingerprint" +- "golang.org/x/tools/gopls/internal/util/frob" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// An Index records the non-empty method sets of all package-level +-// types in a package in a form that permits assignability queries +-// without the type checker. +-type Index struct { +- pkg gobPackage +- PkgPath metadata.PackagePath +-} +- +-// Decode decodes the given gob-encoded data as an Index. +-func Decode(pkgpath metadata.PackagePath, data []byte) *Index { +- var pkg gobPackage +- packageCodec.Decode(data, &pkg) +- return &Index{pkg: pkg, PkgPath: pkgpath} +-} +- +-// Encode encodes the receiver as gob-encoded data. +-func (index *Index) Encode() []byte { +- return packageCodec.Encode(index.pkg) +-} +- +-// NewIndex returns a new index of method-set information for all +-// package-level types in the specified package. +-func NewIndex(fset *token.FileSet, pkg *types.Package) *Index { +- return new(indexBuilder).build(fset, pkg) +-} +- +-// A Location records the extent of an identifier in byte-offset form. +-// +-// Conversion to protocol (UTF-16) form is done by the caller after a +-// search, not during index construction. +-type Location struct { +- Filename string +- Start, End int // byte offsets +-} +- +-// A Key represents the method set of a given type in a form suitable +-// to pass to the (*Index).Search method of many different Indexes. +-type Key struct { +- mset *gobMethodSet // note: lacks position information +-} +- +-// KeyOf returns the search key for the method sets of a given type. +-// It returns false if the type has no methods. +-func KeyOf(t types.Type) (Key, bool) { +- mset := methodSetInfo(t, nil) +- if mset.Mask == 0 { +- return Key{}, false // no methods +- } +- return Key{mset}, true +-} +- +-// A Result reports a matching type or method in a method-set search. +-type Result struct { +- TypeName string // name of the named type +- IsInterface bool // matched type (or method) is abstract +- Location Location // location of the type or method +- +- // methods only: +- PkgPath string // path of declaring package (may differ due to embedding) +- ObjectPath objectpath.Path // path of method within declaring package +-} +- +-// TypeRelation indicates the direction of subtyping relation, +-// if any, between two types. +-// +-// It is a bitset, so that clients of Implementations may use +-// Supertype|Subtype to request an undirected match. +-type TypeRelation int8 +- +-const ( +- Supertype TypeRelation = 0x1 +- Subtype TypeRelation = 0x2 +-) +- +-// Search reports each type that implements (Supertype ∈ want) or is +-// implemented by (Subtype ∈ want) the type that produced the search key. +-// +-// If method is non-nil, only that method of each type is reported. +-// +-// The result does not include the error.Error method. +-// TODO(adonovan): give this special case a more systematic treatment. +-func (index *Index) Search(key Key, want TypeRelation, method *types.Func) []Result { +- var results []Result +- for _, candidate := range index.pkg.MethodSets { +- // Test the direction of the relation. +- // The client may request either direction or both +- // (e.g. when the client is References), +- // and the Result reports each test independently; +- // both tests succeed when comparing identical +- // interface types. +- var got TypeRelation +- if want&Subtype != 0 && implements(candidate, key.mset) { +- got |= Subtype +- } +- if want&Supertype != 0 && implements(key.mset, candidate) { +- got |= Supertype +- } +- if got == 0 { +- continue +- } +- +- typeName := index.pkg.Strings[candidate.TypeName] +- if method == nil { +- results = append(results, Result{ +- TypeName: typeName, +- IsInterface: candidate.IsInterface, +- Location: index.location(candidate.Posn), +- }) +- } else { +- for _, m := range candidate.Methods { +- if m.ID == method.Id() { +- // Don't report error.Error among the results: +- // it has no true source location, no package, +- // and is excluded from the xrefs index. +- if m.PkgPath == 0 || m.ObjectPath == 0 { +- if m.ID != "Error" { +- panic("missing info for" + m.ID) +- } +- continue +- } +- +- results = append(results, Result{ +- TypeName: typeName, +- IsInterface: candidate.IsInterface, +- Location: index.location(m.Posn), +- PkgPath: index.pkg.Strings[m.PkgPath], +- ObjectPath: objectpath.Path(index.pkg.Strings[m.ObjectPath]), +- }) +- break +- } +- } +- } +- } +- return results +-} +- +-// implements reports whether x implements y. +-func implements(x, y *gobMethodSet) bool { +- if !y.IsInterface { +- return false +- } +- +- // Fast path: neither method set is tricky, so all methods can +- // be compared by equality of ID and Fingerprint, and the +- // entire subset check can be done using the bit mask. +- if !x.Tricky && !y.Tricky { +- if x.Mask&y.Mask != y.Mask { +- return false // x lacks a method of interface y +- } +- } +- +- // At least one operand is tricky (e.g. contains a type parameter), +- // so we must used tree-based matching (unification). +- +- // nonmatching reports whether interface method 'my' lacks +- // a matching method in set x. (The sense is inverted for use +- // with slice.ContainsFunc below.) +- nonmatching := func(my *gobMethod) bool { +- for _, mx := range x.Methods { +- if mx.ID == my.ID { +- var match bool +- if !mx.Tricky && !my.Tricky { +- // Fast path: neither method is tricky, +- // so a string match is sufficient. +- match = mx.Sum&my.Sum == my.Sum && mx.Fingerprint == my.Fingerprint +- } else { +- match = fingerprint.Matches(mx.parse(), my.parse()) +- } +- return !match +- } +- } +- return true // method of y not found in x +- } +- +- // Each interface method must have a match. +- // (This would be more readable with a DeMorganized +- // variant of ContainsFunc.) +- return !slices.ContainsFunc(y.Methods, nonmatching) +-} +- +-func (index *Index) location(posn gobPosition) Location { +- return Location{ +- Filename: index.pkg.Strings[posn.File], +- Start: posn.Offset, +- End: posn.Offset + posn.Len, +- } +-} +- +-// An indexBuilder builds an index for a single package. +-type indexBuilder struct { +- gobPackage +- stringIndex map[string]int +-} +- +-// build adds to the index all package-level named types of the specified package. +-func (b *indexBuilder) build(fset *token.FileSet, pkg *types.Package) *Index { +- _ = b.string("") // 0 => "" +- +- objectPos := func(obj types.Object) gobPosition { +- posn := safetoken.StartPosition(fset, obj.Pos()) +- return gobPosition{b.string(posn.Filename), posn.Offset, len(obj.Name())} +- } +- +- objectpathFor := new(objectpath.Encoder).For +- +- // setindexInfo sets the (Posn, PkgPath, ObjectPath) fields for each method declaration. +- setIndexInfo := func(m *gobMethod, method *types.Func) { +- // error.Error has empty Position, PkgPath, and ObjectPath. +- if method.Pkg() == nil { +- return +- } +- +- // Instantiations of generic methods don't have an +- // object path, so we use the generic. +- p, err := objectpathFor(method.Origin()) +- if err != nil { +- // This should never happen for a method of a package-level type. +- // ...but it does (golang/go#70418). +- // Refine the crash into various bug reports. +- report := func() { +- bug.Reportf("missing object path for %s", method.FullName()) +- } +- sig := method.Signature() +- if sig.Recv() == nil { +- report() +- return +- } +- _, named := typesinternal.ReceiverNamed(sig.Recv()) +- switch { +- case named == nil: +- report() +- case sig.TypeParams().Len() > 0: +- report() +- case method.Origin() != method: +- report() // instantiated? +- case sig.RecvTypeParams().Len() > 0: +- report() // generic? +- default: +- report() +- } +- return +- } +- +- m.Posn = objectPos(method) +- m.PkgPath = b.string(method.Pkg().Path()) +- m.ObjectPath = b.string(string(p)) +- } +- +- // We ignore aliases, though in principle they could define a +- // struct{...} or interface{...} type, or an instantiation of +- // a generic, that has a novel method set. +- scope := pkg.Scope() +- for _, name := range scope.Names() { +- if tname, ok := scope.Lookup(name).(*types.TypeName); ok && !tname.IsAlias() { +- if mset := methodSetInfo(tname.Type(), setIndexInfo); mset.Mask != 0 { +- mset.TypeName = b.string(name) +- mset.Posn = objectPos(tname) +- // Only record types with non-trivial method sets. +- b.MethodSets = append(b.MethodSets, mset) +- } +- } +- } +- +- return &Index{ +- pkg: b.gobPackage, +- PkgPath: metadata.PackagePath(pkg.Path()), +- } +-} +- +-// string returns a small integer that encodes the string. +-func (b *indexBuilder) string(s string) int { +- i, ok := b.stringIndex[s] +- if !ok { +- i = len(b.Strings) +- if b.stringIndex == nil { +- b.stringIndex = make(map[string]int) +- } +- b.stringIndex[s] = i +- b.Strings = append(b.Strings, s) +- } +- return i +-} +- +-// methodSetInfo returns the method-set fingerprint of a type. +-// It calls the optional setIndexInfo function for each gobMethod. +-// This is used during index construction, but not search (KeyOf), +-// to store extra information. +-func methodSetInfo(t types.Type, setIndexInfo func(*gobMethod, *types.Func)) *gobMethodSet { +- // For non-interface types, use *T +- // (if T is not already a pointer) +- // since it may have more methods. +- mset := types.NewMethodSet(EnsurePointer(t)) +- +- // Convert the method set into a compact summary. +- var mask uint64 +- tricky := false +- var buf []byte +- methods := make([]*gobMethod, mset.Len()) +- for i := 0; i < mset.Len(); i++ { +- m := mset.At(i).Obj().(*types.Func) +- id := m.Id() +- fp, isTricky := fingerprint.Encode(m.Signature()) +- if isTricky { +- tricky = true +- } +- buf = append(append(buf[:0], id...), fp...) +- sum := crc32.ChecksumIEEE(buf) +- methods[i] = &gobMethod{ID: id, Fingerprint: fp, Sum: sum, Tricky: isTricky} +- if setIndexInfo != nil { +- setIndexInfo(methods[i], m) // set Position, PkgPath, ObjectPath +- } +- mask |= 1 << uint64(((sum>>24)^(sum>>16)^(sum>>8)^sum)&0x3f) +- } +- return &gobMethodSet{ +- IsInterface: types.IsInterface(t), +- Tricky: tricky, +- Mask: mask, +- Methods: methods, +- } +-} +- +-// EnsurePointer wraps T in a types.Pointer if T is a named, non-interface type. +-// This is useful to make sure you consider a named type's full method set. +-func EnsurePointer(T types.Type) types.Type { +- if _, ok := types.Unalias(T).(*types.Named); ok && !types.IsInterface(T) { +- return types.NewPointer(T) +- } +- +- return T +-} +- +-// -- serial format of index -- +- +-// (The name says gob but in fact we use frob.) +-var packageCodec = frob.CodecFor[gobPackage]() +- +-// A gobPackage records the method set of each package-level type for a single package. +-type gobPackage struct { +- Strings []string // index of strings used by gobPosition.File, gobMethod.{Pkg,Object}Path +- MethodSets []*gobMethodSet +-} +- +-// A gobMethodSet records the method set of a single type. +-type gobMethodSet struct { +- TypeName int // name (string index) of the package-level type +- Posn gobPosition +- IsInterface bool +- Tricky bool // at least one method is tricky; fingerprint must be parsed + unified +- Mask uint64 // mask with 1 bit from each of methods[*].sum +- Methods []*gobMethod +-} +- +-// A gobMethod records the name, type, and position of a single method. +-type gobMethod struct { +- ID string // (*types.Func).Id() value of method +- Fingerprint string // encoding of types as string of form "(params)(results)" +- Sum uint32 // checksum of ID + fingerprint +- Tricky bool // method type contains tricky features (type params, interface types) +- +- // index records only (zero in KeyOf; also for index of error.Error). +- Posn gobPosition // location of method declaration +- PkgPath int // path of package containing method declaration +- ObjectPath int // object path of method relative to PkgPath +- +- // internal fields (not serialized) +- tree atomic.Pointer[fingerprint.Tree] // fingerprint tree, parsed on demand +-} +- +-// A gobPosition records the file, offset, and length of an identifier. +-type gobPosition struct { +- File int // index into gobPackage.Strings +- Offset, Len int // in bytes +-} +- +-// parse returns the method's parsed fingerprint tree. +-// It may return a new instance or a cached one. +-func (m *gobMethod) parse() fingerprint.Tree { +- ptr := m.tree.Load() +- if ptr == nil { +- tree := fingerprint.Parse(m.Fingerprint) +- ptr = &tree +- m.tree.Store(ptr) // may race; that's ok +- } +- return *ptr +-} +diff -urN a/gopls/internal/cache/mod.go b/gopls/internal/cache/mod.go +--- a/gopls/internal/cache/mod.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/mod.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,528 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "errors" +- "fmt" +- "regexp" +- "strings" +- +- "golang.org/x/mod/modfile" +- "golang.org/x/mod/module" +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/util/memoize" +- "golang.org/x/tools/internal/event" +-) +- +-// A ParsedModule contains the results of parsing a go.mod file. +-type ParsedModule struct { +- URI protocol.DocumentURI +- File *modfile.File +- ReplaceMap map[module.Version]module.Version +- Mapper *protocol.Mapper +- ParseErrors []*Diagnostic +-} +- +-// ParseMod parses a go.mod file, using a cache. It may return partial results and an error. +-func (s *Snapshot) ParseMod(ctx context.Context, fh file.Handle) (*ParsedModule, error) { +- uri := fh.URI() +- +- s.mu.Lock() +- entry, hit := s.parseModHandles.Get(uri) +- s.mu.Unlock() +- +- type parseModKey file.Identity +- type parseModResult struct { +- parsed *ParsedModule +- err error +- } +- +- // cache miss? +- if !hit { +- promise, release := s.store.Promise(parseModKey(fh.Identity()), func(ctx context.Context, _ any) any { +- parsed, err := parseModImpl(ctx, fh) +- return parseModResult{parsed, err} +- }) +- +- entry = promise +- s.mu.Lock() +- s.parseModHandles.Set(uri, entry, func(_, _ any) { release() }) +- s.mu.Unlock() +- } +- +- // Await result. +- v, err := s.awaitPromise(ctx, entry) +- if err != nil { +- return nil, err +- } +- res := v.(parseModResult) +- return res.parsed, res.err +-} +- +-// parseModImpl parses the go.mod file whose name and contents are in fh. +-// It may return partial results and an error. +-func parseModImpl(ctx context.Context, fh file.Handle) (*ParsedModule, error) { +- _, done := event.Start(ctx, "cache.ParseMod", label.URI.Of(fh.URI())) +- defer done() +- +- contents, err := fh.Content() +- if err != nil { +- return nil, err +- } +- m := protocol.NewMapper(fh.URI(), contents) +- file, parseErr := modfile.Parse(fh.URI().Path(), contents, nil) +- // Attempt to convert the error to a standardized parse error. +- var parseErrors []*Diagnostic +- if parseErr != nil { +- mfErrList, ok := parseErr.(modfile.ErrorList) +- if !ok { +- return nil, fmt.Errorf("unexpected parse error type %v", parseErr) +- } +- for _, mfErr := range mfErrList { +- rng, err := m.OffsetRange(mfErr.Pos.Byte, mfErr.Pos.Byte) +- if err != nil { +- return nil, err +- } +- parseErrors = append(parseErrors, &Diagnostic{ +- URI: fh.URI(), +- Range: rng, +- Severity: protocol.SeverityError, +- Source: ParseError, +- Message: mfErr.Err.Error(), +- }) +- } +- } +- +- replaceMap := make(map[module.Version]module.Version) +- if parseErr == nil { +- for _, rep := range file.Replace { +- replaceMap[rep.Old] = rep.New +- } +- } +- +- return &ParsedModule{ +- URI: fh.URI(), +- Mapper: m, +- File: file, +- ReplaceMap: replaceMap, +- ParseErrors: parseErrors, +- }, parseErr +-} +- +-// A ParsedWorkFile contains the results of parsing a go.work file. +-type ParsedWorkFile struct { +- URI protocol.DocumentURI +- File *modfile.WorkFile +- Mapper *protocol.Mapper +- ParseErrors []*Diagnostic +-} +- +-// ParseWork parses a go.work file, using a cache. It may return partial results and an error. +-// TODO(adonovan): move to new work.go file. +-func (s *Snapshot) ParseWork(ctx context.Context, fh file.Handle) (*ParsedWorkFile, error) { +- uri := fh.URI() +- +- s.mu.Lock() +- entry, hit := s.parseWorkHandles.Get(uri) +- s.mu.Unlock() +- +- type parseWorkKey file.Identity +- type parseWorkResult struct { +- parsed *ParsedWorkFile +- err error +- } +- +- // cache miss? +- if !hit { +- handle, release := s.store.Promise(parseWorkKey(fh.Identity()), func(ctx context.Context, _ any) any { +- parsed, err := parseWorkImpl(ctx, fh) +- return parseWorkResult{parsed, err} +- }) +- +- entry = handle +- s.mu.Lock() +- s.parseWorkHandles.Set(uri, entry, func(_, _ any) { release() }) +- s.mu.Unlock() +- } +- +- // Await result. +- v, err := s.awaitPromise(ctx, entry) +- if err != nil { +- return nil, err +- } +- res := v.(parseWorkResult) +- return res.parsed, res.err +-} +- +-// parseWorkImpl parses a go.work file. It may return partial results and an error. +-func parseWorkImpl(ctx context.Context, fh file.Handle) (*ParsedWorkFile, error) { +- _, done := event.Start(ctx, "cache.ParseWork", label.URI.Of(fh.URI())) +- defer done() +- +- content, err := fh.Content() +- if err != nil { +- return nil, err +- } +- m := protocol.NewMapper(fh.URI(), content) +- file, parseErr := modfile.ParseWork(fh.URI().Path(), content, nil) +- // Attempt to convert the error to a standardized parse error. +- var parseErrors []*Diagnostic +- if parseErr != nil { +- mfErrList, ok := parseErr.(modfile.ErrorList) +- if !ok { +- return nil, fmt.Errorf("unexpected parse error type %v", parseErr) +- } +- for _, mfErr := range mfErrList { +- rng, err := m.OffsetRange(mfErr.Pos.Byte, mfErr.Pos.Byte) +- if err != nil { +- return nil, err +- } +- parseErrors = append(parseErrors, &Diagnostic{ +- URI: fh.URI(), +- Range: rng, +- Severity: protocol.SeverityError, +- Source: ParseError, +- Message: mfErr.Err.Error(), +- }) +- } +- } +- return &ParsedWorkFile{ +- URI: fh.URI(), +- Mapper: m, +- File: file, +- ParseErrors: parseErrors, +- }, parseErr +-} +- +-// ModWhy returns the "go mod why" result for each module named in a +-// require statement in the go.mod file. +-// TODO(adonovan): move to new mod_why.go file. +-func (s *Snapshot) ModWhy(ctx context.Context, fh file.Handle) (map[string]string, error) { +- uri := fh.URI() +- +- if s.FileKind(fh) != file.Mod { +- return nil, fmt.Errorf("%s is not a go.mod file", uri) +- } +- +- s.mu.Lock() +- entry, hit := s.modWhyHandles.Get(uri) +- s.mu.Unlock() +- +- type modWhyResult struct { +- why map[string]string +- err error +- } +- +- // cache miss? +- if !hit { +- handle := memoize.NewPromise("modWhy", func(ctx context.Context, arg any) any { +- why, err := modWhyImpl(ctx, arg.(*Snapshot), fh) +- return modWhyResult{why, err} +- }) +- +- entry = handle +- s.mu.Lock() +- s.modWhyHandles.Set(uri, entry, nil) +- s.mu.Unlock() +- } +- +- // Await result. +- v, err := s.awaitPromise(ctx, entry) +- if err != nil { +- return nil, err +- } +- res := v.(modWhyResult) +- return res.why, res.err +-} +- +-// modWhyImpl returns the result of "go mod why -m" on the specified go.mod file. +-func modWhyImpl(ctx context.Context, snapshot *Snapshot, fh file.Handle) (map[string]string, error) { +- ctx, done := event.Start(ctx, "cache.ModWhy", label.URI.Of(fh.URI())) +- defer done() +- +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil { +- return nil, err +- } +- // No requires to explain. +- if len(pm.File.Require) == 0 { +- return nil, nil // empty result +- } +- // Run `go mod why` on all the dependencies. +- args := []string{"why", "-m"} +- for _, req := range pm.File.Require { +- args = append(args, req.Mod.Path) +- } +- inv, cleanupInvocation, err := snapshot.GoCommandInvocation(NoNetwork, fh.URI().DirPath(), "mod", args) +- if err != nil { +- return nil, err +- } +- defer cleanupInvocation() +- stdout, err := snapshot.View().GoCommandRunner().Run(ctx, *inv) +- if err != nil { +- return nil, err +- } +- whyList := strings.Split(stdout.String(), "\n\n") +- if len(whyList) != len(pm.File.Require) { +- return nil, fmt.Errorf("mismatched number of results: got %v, want %v", len(whyList), len(pm.File.Require)) +- } +- why := make(map[string]string, len(pm.File.Require)) +- for i, req := range pm.File.Require { +- why[req.Mod.Path] = whyList[i] +- } +- return why, nil +-} +- +-// extractGoCommandErrors tries to parse errors that come from the go command +-// and shape them into go.mod diagnostics. +-// TODO: rename this to 'load errors' +-func (s *Snapshot) extractGoCommandErrors(ctx context.Context, goCmdError error) []*Diagnostic { +- if goCmdError == nil { +- return nil +- } +- +- type locatedErr struct { +- loc protocol.Location +- msg string +- } +- diagLocations := map[*ParsedModule]locatedErr{} +- backupDiagLocations := map[*ParsedModule]locatedErr{} +- +- // If moduleErrs is non-nil, go command errors are scoped to specific +- // modules. +- var moduleErrs *moduleErrorMap +- _ = errors.As(goCmdError, &moduleErrs) +- +- // Match the error against all the mod files in the workspace. +- for _, uri := range s.View().ModFiles() { +- fh, err := s.ReadFile(ctx, uri) +- if err != nil { +- event.Error(ctx, "getting modfile for Go command error", err) +- continue +- } +- pm, err := s.ParseMod(ctx, fh) +- if err != nil { +- // Parsing errors are reported elsewhere +- return nil +- } +- var msgs []string // error messages to consider +- if moduleErrs != nil { +- if pm.File.Module != nil { +- for _, mes := range moduleErrs.errs[pm.File.Module.Mod.Path] { +- msgs = append(msgs, mes.Error()) +- } +- } +- } else { +- msgs = append(msgs, goCmdError.Error()) +- } +- for _, msg := range msgs { +- if strings.Contains(goCmdError.Error(), "errors parsing go.mod") { +- // The go command emits parse errors for completely invalid go.mod files. +- // Those are reported by our own diagnostics and can be ignored here. +- // As of writing, we are not aware of any other errors that include +- // file/position information, so don't even try to find it. +- continue +- } +- loc, found, err := s.matchErrorToModule(pm, msg) +- if err != nil { +- event.Error(ctx, "matching error to module", err) +- continue +- } +- le := locatedErr{ +- loc: loc, +- msg: msg, +- } +- if found { +- diagLocations[pm] = le +- } else { +- backupDiagLocations[pm] = le +- } +- } +- } +- +- // If we didn't find any good matches, assign diagnostics to all go.mod files. +- if len(diagLocations) == 0 { +- diagLocations = backupDiagLocations +- } +- +- var srcErrs []*Diagnostic +- for pm, le := range diagLocations { +- diag, err := s.goCommandDiagnostic(pm, le.loc, le.msg) +- if err != nil { +- event.Error(ctx, "building go command diagnostic", err) +- continue +- } +- srcErrs = append(srcErrs, diag) +- } +- return srcErrs +-} +- +-var moduleVersionInErrorRe = regexp.MustCompile(`[:\s]([+-._~0-9A-Za-z]+)@([+-._~0-9A-Za-z]+)[:\s]`) +- +-// matchErrorToModule matches a go command error message to a go.mod file. +-// Some examples: +-// +-// example.com@v1.2.2: reading example.com/@v/v1.2.2.mod: no such file or directory +-// go: github.com/cockroachdb/apd/v2@v2.0.72: reading github.com/cockroachdb/apd/go.mod at revision v2.0.72: unknown revision v2.0.72 +-// go: example.com@v1.2.3 requires\n\trandom.org@v1.2.3: parsing go.mod:\n\tmodule declares its path as: bob.org\n\tbut was required as: random.org +-// +-// It returns the location of a reference to the one of the modules and true +-// if one exists. If none is found it returns a fallback location and false. +-func (s *Snapshot) matchErrorToModule(pm *ParsedModule, goCmdError string) (protocol.Location, bool, error) { +- var reference *modfile.Line +- matches := moduleVersionInErrorRe.FindAllStringSubmatch(goCmdError, -1) +- +- for i := len(matches) - 1; i >= 0; i-- { +- ver := module.Version{Path: matches[i][1], Version: matches[i][2]} +- if err := module.Check(ver.Path, ver.Version); err != nil { +- continue +- } +- reference = findModuleReference(pm.File, ver) +- if reference != nil { +- break +- } +- } +- +- if reference == nil { +- // No match for the module path was found in the go.mod file. +- // Show the error on the module declaration, if one exists, or +- // just the first line of the file. +- var start, end int +- if pm.File.Module != nil && pm.File.Module.Syntax != nil { +- syntax := pm.File.Module.Syntax +- start, end = syntax.Start.Byte, syntax.End.Byte +- } +- loc, err := pm.Mapper.OffsetLocation(start, end) +- return loc, false, err +- } +- +- loc, err := pm.Mapper.OffsetLocation(reference.Start.Byte, reference.End.Byte) +- return loc, true, err +-} +- +-// goCommandDiagnostic creates a diagnostic for a given go command error. +-func (s *Snapshot) goCommandDiagnostic(pm *ParsedModule, loc protocol.Location, goCmdError string) (*Diagnostic, error) { +- matches := moduleVersionInErrorRe.FindAllStringSubmatch(goCmdError, -1) +- var innermost *module.Version +- for i := len(matches) - 1; i >= 0; i-- { +- ver := module.Version{Path: matches[i][1], Version: matches[i][2]} +- if err := module.Check(ver.Path, ver.Version); err != nil { +- continue +- } +- innermost = &ver +- break +- } +- +- switch { +- case strings.Contains(goCmdError, "inconsistent vendoring"): +- cmd := command.NewVendorCommand("Run go mod vendor", command.URIArg{URI: pm.URI}) +- return &Diagnostic{ +- URI: pm.URI, +- Range: loc.Range, +- Severity: protocol.SeverityError, +- Source: ListError, +- Message: `Inconsistent vendoring detected. Please re-run "go mod vendor". +-See https://github.com/golang/go/issues/39164 for more detail on this issue.`, +- SuggestedFixes: []SuggestedFix{SuggestedFixFromCommand(cmd, protocol.QuickFix)}, +- }, nil +- +- case strings.Contains(goCmdError, "updates to go.sum needed"), strings.Contains(goCmdError, "missing go.sum entry"): +- var args []protocol.DocumentURI +- args = append(args, s.View().ModFiles()...) +- tidyCmd := command.NewTidyCommand("Run go mod tidy", command.URIArgs{URIs: args}) +- updateCmd := command.NewUpdateGoSumCommand("Update go.sum", command.URIArgs{URIs: args}) +- msg := "go.sum is out of sync with go.mod. Please update it by applying the quick fix." +- if innermost != nil { +- msg = fmt.Sprintf("go.sum is out of sync with go.mod: entry for %v is missing. Please updating it by applying the quick fix.", innermost) +- } +- return &Diagnostic{ +- URI: pm.URI, +- Range: loc.Range, +- Severity: protocol.SeverityError, +- Source: ListError, +- Message: msg, +- SuggestedFixes: []SuggestedFix{ +- SuggestedFixFromCommand(tidyCmd, protocol.QuickFix), +- SuggestedFixFromCommand(updateCmd, protocol.QuickFix), +- }, +- }, nil +- case strings.Contains(goCmdError, "disabled by GOPROXY=off") && innermost != nil: +- title := fmt.Sprintf("Download %v@%v", innermost.Path, innermost.Version) +- cmd := command.NewAddDependencyCommand(title, command.DependencyArgs{ +- URI: pm.URI, +- AddRequire: false, +- GoCmdArgs: []string{fmt.Sprintf("%v@%v", innermost.Path, innermost.Version)}, +- }) +- return &Diagnostic{ +- URI: pm.URI, +- Range: loc.Range, +- Severity: protocol.SeverityError, +- Message: fmt.Sprintf("%v@%v has not been downloaded", innermost.Path, innermost.Version), +- Source: ListError, +- SuggestedFixes: []SuggestedFix{SuggestedFixFromCommand(cmd, protocol.QuickFix)}, +- }, nil +- default: +- return &Diagnostic{ +- URI: pm.URI, +- Range: loc.Range, +- Severity: protocol.SeverityError, +- Source: ListError, +- Message: goCmdError, +- }, nil +- } +-} +- +-func findModuleReference(mf *modfile.File, ver module.Version) *modfile.Line { +- for _, req := range mf.Require { +- if req.Mod == ver { +- return req.Syntax +- } +- } +- for _, ex := range mf.Exclude { +- if ex.Mod == ver { +- return ex.Syntax +- } +- } +- for _, rep := range mf.Replace { +- if rep.New == ver || rep.Old == ver { +- return rep.Syntax +- } +- } +- return nil +-} +- +-// ResolvedVersion returns the version used for a module, which considers replace directive. +-func ResolvedVersion(module *packages.Module) string { +- // don't visit replace recursively as src/cmd/go/internal/modinfo/info.go +- // visits replace field only once. +- if module.Replace != nil { +- return module.Replace.Version +- } +- return module.Version +-} +- +-// ResolvedPath returns the module path, which considers replace directive. +-func ResolvedPath(module *packages.Module) string { +- if module.Replace != nil { +- return module.Replace.Path +- } +- return module.Path +-} +- +-// ResolvedString returns a representation of the Version suitable for logging +-// (Path@Version, or just Path if Version is empty), +-// which considers replace directive. +-func ResolvedString(module *packages.Module) string { +- if ResolvedVersion(module) == "" { +- ResolvedPath(module) +- } +- return ResolvedPath(module) + "@" + ResolvedVersion(module) +-} +diff -urN a/gopls/internal/cache/mod_tidy.go b/gopls/internal/cache/mod_tidy.go +--- a/gopls/internal/cache/mod_tidy.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/mod_tidy.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,498 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "errors" +- "fmt" +- "go/ast" +- "go/token" +- "os" +- "path/filepath" +- "strconv" +- "strings" +- +- "golang.org/x/mod/modfile" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/util/memoize" +- "golang.org/x/tools/internal/diff" +- "golang.org/x/tools/internal/event" +-) +- +-// This error is sought by mod diagnostics. +-var ErrNoModOnDisk = errors.New("go.mod file is not on disk") +- +-// A TidiedModule contains the results of running `go mod tidy` on a module. +-type TidiedModule struct { +- // Diagnostics representing changes made by `go mod tidy`. +- Diagnostics []*Diagnostic +- // The bytes of the go.mod file after it was tidied. +- TidiedContent []byte +-} +- +-// ModTidy returns the go.mod file that would be obtained by running +-// "go mod tidy". Concurrent requests are combined into a single command. +-func (s *Snapshot) ModTidy(ctx context.Context, pm *ParsedModule) (*TidiedModule, error) { +- ctx, done := event.Start(ctx, "cache.snapshot.ModTidy") +- defer done() +- +- uri := pm.URI +- if pm.File == nil { +- return nil, fmt.Errorf("cannot tidy unparsable go.mod file: %v", uri) +- } +- +- s.mu.Lock() +- entry, hit := s.modTidyHandles.Get(uri) +- s.mu.Unlock() +- +- type modTidyResult struct { +- tidied *TidiedModule +- err error +- } +- +- // Cache miss? +- if !hit { +- // If the file handle is an overlay, it may not be written to disk. +- // The go.mod file has to be on disk for `go mod tidy` to work. +- // TODO(rfindley): is this still true with Go 1.16 overlay support? +- fh, err := s.ReadFile(ctx, pm.URI) +- if err != nil { +- return nil, err +- } +- if _, ok := fh.(*overlay); ok { +- if info, _ := os.Stat(uri.Path()); info == nil { +- return nil, ErrNoModOnDisk +- } +- } +- +- if err := s.awaitLoaded(ctx); err != nil { +- return nil, err +- } +- +- handle := memoize.NewPromise("modTidy", func(ctx context.Context, arg any) any { +- tidied, err := modTidyImpl(ctx, arg.(*Snapshot), pm) +- return modTidyResult{tidied, err} +- }) +- +- entry = handle +- s.mu.Lock() +- s.modTidyHandles.Set(uri, entry, nil) +- s.mu.Unlock() +- } +- +- // Await result. +- v, err := s.awaitPromise(ctx, entry) +- if err != nil { +- return nil, err +- } +- res := v.(modTidyResult) +- return res.tidied, res.err +-} +- +-// modTidyImpl runs "go mod tidy" on a go.mod file. +-func modTidyImpl(ctx context.Context, snapshot *Snapshot, pm *ParsedModule) (*TidiedModule, error) { +- ctx, done := event.Start(ctx, "cache.ModTidy", label.URI.Of(pm.URI)) +- defer done() +- +- tempDir, cleanup, err := TempModDir(ctx, snapshot, pm.URI) +- if err != nil { +- return nil, err +- } +- defer cleanup() +- +- args := []string{"tidy", "-modfile=" + filepath.Join(tempDir, "go.mod")} +- inv, cleanupInvocation, err := snapshot.GoCommandInvocation(NoNetwork, pm.URI.DirPath(), "mod", args, "GOWORK=off") +- if err != nil { +- return nil, err +- } +- defer cleanupInvocation() +- if _, err := snapshot.view.gocmdRunner.Run(ctx, *inv); err != nil { +- return nil, err +- } +- +- // Go directly to disk to get the temporary mod file, +- // since it is always on disk. +- tempMod := filepath.Join(tempDir, "go.mod") +- tempContents, err := os.ReadFile(tempMod) +- if err != nil { +- return nil, err +- } +- ideal, err := modfile.Parse(tempMod, tempContents, nil) +- if err != nil { +- // We do not need to worry about the temporary file's parse errors +- // since it has been "tidied". +- return nil, err +- } +- +- // Compare the original and tidied go.mod files to compute errors and +- // suggested fixes. +- diagnostics, err := modTidyDiagnostics(ctx, snapshot, pm, ideal) +- if err != nil { +- return nil, err +- } +- +- return &TidiedModule{ +- Diagnostics: diagnostics, +- TidiedContent: tempContents, +- }, nil +-} +- +-// modTidyDiagnostics computes the differences between the original and tidied +-// go.mod files to produce diagnostic and suggested fixes. Some diagnostics +-// may appear on the Go files that import packages from missing modules. +-func modTidyDiagnostics(ctx context.Context, snapshot *Snapshot, pm *ParsedModule, ideal *modfile.File) (diagnostics []*Diagnostic, err error) { +- // First, determine which modules are unused and which are missing from the +- // original go.mod file. +- var ( +- unused = make(map[string]*modfile.Require, len(pm.File.Require)) +- missing = make(map[string]*modfile.Require, len(ideal.Require)) +- wrongDirectness = make(map[string]*modfile.Require, len(pm.File.Require)) +- ) +- for _, req := range pm.File.Require { +- unused[req.Mod.Path] = req +- } +- for _, req := range ideal.Require { +- origReq := unused[req.Mod.Path] +- if origReq == nil { +- missing[req.Mod.Path] = req +- continue +- } else if origReq.Indirect != req.Indirect { +- wrongDirectness[req.Mod.Path] = origReq +- } +- delete(unused, req.Mod.Path) +- } +- for _, req := range wrongDirectness { +- // Handle dependencies that are incorrectly labeled indirect and +- // vice versa. +- srcDiag, err := directnessDiagnostic(pm.Mapper, req) +- if err != nil { +- // We're probably in a bad state if we can't compute a +- // directnessDiagnostic, but try to keep going so as to not suppress +- // other, valid diagnostics. +- event.Error(ctx, "computing directness diagnostic", err) +- continue +- } +- diagnostics = append(diagnostics, srcDiag) +- } +- // Next, compute any diagnostics for modules that are missing from the +- // go.mod file. The fixes will be for the go.mod file, but the +- // diagnostics should also appear in both the go.mod file and the import +- // statements in the Go files in which the dependencies are used. +- // Finally, add errors for any unused dependencies. +- if len(missing) > 0 { +- missingModuleDiagnostics, err := missingModuleDiagnostics(ctx, snapshot, pm, ideal, missing) +- if err != nil { +- return nil, err +- } +- diagnostics = append(diagnostics, missingModuleDiagnostics...) +- } +- +- // Opt: if this is the only diagnostic, we can avoid textual edits and just +- // run the Go command. +- // +- // See also the documentation for command.RemoveDependencyArgs.OnlyDiagnostic. +- onlyDiagnostic := len(diagnostics) == 0 && len(unused) == 1 +- for _, req := range unused { +- srcErr, err := unusedDiagnostic(pm.Mapper, req, onlyDiagnostic) +- if err != nil { +- return nil, err +- } +- diagnostics = append(diagnostics, srcErr) +- } +- return diagnostics, nil +-} +- +-func missingModuleDiagnostics(ctx context.Context, snapshot *Snapshot, pm *ParsedModule, ideal *modfile.File, missing map[string]*modfile.Require) ([]*Diagnostic, error) { +- missingModuleFixes := map[*modfile.Require][]SuggestedFix{} +- var diagnostics []*Diagnostic +- for _, req := range missing { +- srcDiag, err := missingModuleDiagnostic(pm, req) +- if err != nil { +- return nil, err +- } +- missingModuleFixes[req] = srcDiag.SuggestedFixes +- diagnostics = append(diagnostics, srcDiag) +- } +- +- // Add diagnostics for missing modules anywhere they are imported in the +- // workspace. +- metas, err := snapshot.WorkspaceMetadata(ctx) +- if err != nil { +- return nil, err +- } +- // TODO(adonovan): opt: opportunities for parallelism abound. +- for _, mp := range metas { +- // Read both lists of files of this package. +- // +- // Parallelism is not necessary here as the files will have already been +- // pre-read at load time. +- goFiles, err := readFiles(ctx, snapshot, mp.GoFiles) +- if err != nil { +- return nil, err +- } +- compiledGoFiles, err := readFiles(ctx, snapshot, mp.CompiledGoFiles) +- if err != nil { +- return nil, err +- } +- +- missingImports := map[string]*modfile.Require{} +- +- // If -mod=readonly is not set we may have successfully imported +- // packages from missing modules. Otherwise they'll be in +- // MissingDependencies. Combine both. +- imps, err := parseImports(ctx, snapshot, goFiles) +- if err != nil { +- return nil, err +- } +- for imp := range imps { +- if req, ok := missing[imp]; ok { +- missingImports[imp] = req +- break +- } +- // If the import is a package of the dependency, then add the +- // package to the map, this will eliminate the need to do this +- // prefix package search on each import for each file. +- // Example: +- // +- // import ( +- // "golang.org/x/tools/internal/expect" +- // "golang.org/x/tools/go/packages" +- // ) +- // They both are related to the same module: "golang.org/x/tools". +- var match string +- for _, req := range ideal.Require { +- if strings.HasPrefix(imp, req.Mod.Path) && len(req.Mod.Path) > len(match) { +- match = req.Mod.Path +- } +- } +- if req, ok := missing[match]; ok { +- missingImports[imp] = req +- } +- } +- // None of this package's imports are from missing modules. +- if len(missingImports) == 0 { +- continue +- } +- for _, goFile := range compiledGoFiles { +- pgf, err := snapshot.ParseGo(ctx, goFile, parsego.Header) +- if err != nil { +- continue +- } +- file, m := pgf.File, pgf.Mapper +- if file == nil || m == nil { +- continue +- } +- imports := make(map[string]*ast.ImportSpec) +- for _, imp := range file.Imports { +- if imp.Path == nil { +- continue +- } +- if target, err := strconv.Unquote(imp.Path.Value); err == nil { +- imports[target] = imp +- } +- } +- if len(imports) == 0 { +- continue +- } +- for importPath, req := range missingImports { +- imp, ok := imports[importPath] +- if !ok { +- continue +- } +- fixes, ok := missingModuleFixes[req] +- if !ok { +- return nil, fmt.Errorf("no missing module fix for %q (%q)", importPath, req.Mod.Path) +- } +- srcErr, err := missingModuleForImport(pgf, imp, req, fixes) +- if err != nil { +- return nil, err +- } +- diagnostics = append(diagnostics, srcErr) +- } +- } +- } +- return diagnostics, nil +-} +- +-// unusedDiagnostic returns a Diagnostic for an unused require. +-func unusedDiagnostic(m *protocol.Mapper, req *modfile.Require, onlyDiagnostic bool) (*Diagnostic, error) { +- rng, err := m.OffsetRange(req.Syntax.Start.Byte, req.Syntax.End.Byte) +- if err != nil { +- return nil, err +- } +- title := fmt.Sprintf("Remove dependency: %s", req.Mod.Path) +- cmd := command.NewRemoveDependencyCommand(title, command.RemoveDependencyArgs{ +- URI: m.URI, +- OnlyDiagnostic: onlyDiagnostic, +- ModulePath: req.Mod.Path, +- }) +- return &Diagnostic{ +- URI: m.URI, +- Range: rng, +- Severity: protocol.SeverityWarning, +- Source: ModTidyError, +- Message: fmt.Sprintf("%s is not used in this module", req.Mod.Path), +- SuggestedFixes: []SuggestedFix{SuggestedFixFromCommand(cmd, protocol.QuickFix)}, +- }, nil +-} +- +-// directnessDiagnostic extracts errors when a dependency is labeled indirect when +-// it should be direct and vice versa. +-func directnessDiagnostic(m *protocol.Mapper, req *modfile.Require) (*Diagnostic, error) { +- rng, err := m.OffsetRange(req.Syntax.Start.Byte, req.Syntax.End.Byte) +- if err != nil { +- return nil, err +- } +- direction := "indirect" +- if req.Indirect { +- direction = "direct" +- +- // If the dependency should be direct, just highlight the // indirect. +- if comments := req.Syntax.Comment(); comments != nil && len(comments.Suffix) > 0 { +- end := comments.Suffix[0].Start +- end.LineRune += len(comments.Suffix[0].Token) +- end.Byte += len(comments.Suffix[0].Token) +- rng, err = m.OffsetRange(comments.Suffix[0].Start.Byte, end.Byte) +- if err != nil { +- return nil, err +- } +- } +- } +- // If the dependency should be indirect, add the // indirect. +- edits, err := switchDirectness(req, m) +- if err != nil { +- return nil, err +- } +- return &Diagnostic{ +- URI: m.URI, +- Range: rng, +- Severity: protocol.SeverityWarning, +- Source: ModTidyError, +- Message: fmt.Sprintf("%s should be %s", req.Mod.Path, direction), +- SuggestedFixes: []SuggestedFix{{ +- Title: fmt.Sprintf("Change %s to %s", req.Mod.Path, direction), +- Edits: map[protocol.DocumentURI][]protocol.TextEdit{ +- m.URI: edits, +- }, +- ActionKind: protocol.QuickFix, +- }}, +- }, nil +-} +- +-func missingModuleDiagnostic(pm *ParsedModule, req *modfile.Require) (*Diagnostic, error) { +- var rng protocol.Range +- // Default to the start of the file if there is no module declaration. +- if pm.File != nil && pm.File.Module != nil && pm.File.Module.Syntax != nil { +- start, end := pm.File.Module.Syntax.Span() +- var err error +- rng, err = pm.Mapper.OffsetRange(start.Byte, end.Byte) +- if err != nil { +- return nil, err +- } +- } +- title := fmt.Sprintf("Add %s to your go.mod file", req.Mod.Path) +- cmd := command.NewAddDependencyCommand(title, command.DependencyArgs{ +- URI: pm.Mapper.URI, +- AddRequire: !req.Indirect, +- GoCmdArgs: []string{req.Mod.Path + "@" + req.Mod.Version}, +- }) +- return &Diagnostic{ +- URI: pm.Mapper.URI, +- Range: rng, +- Severity: protocol.SeverityError, +- Source: ModTidyError, +- Message: fmt.Sprintf("%s is not in your go.mod file", req.Mod.Path), +- SuggestedFixes: []SuggestedFix{SuggestedFixFromCommand(cmd, protocol.QuickFix)}, +- }, nil +-} +- +-// switchDirectness gets the edits needed to change an indirect dependency to +-// direct and vice versa. +-func switchDirectness(req *modfile.Require, m *protocol.Mapper) ([]protocol.TextEdit, error) { +- // We need a private copy of the parsed go.mod file, since we're going to +- // modify it. +- copied, err := modfile.Parse("", m.Content, nil) +- if err != nil { +- return nil, err +- } +- // Change the directness in the matching require statement. To avoid +- // reordering the require statements, rewrite all of them. +- var requires []*modfile.Require +- seenVersions := make(map[string]string) +- for _, r := range copied.Require { +- if seen := seenVersions[r.Mod.Path]; seen != "" && seen != r.Mod.Version { +- // Avoid a panic in SetRequire below, which panics on conflicting +- // versions. +- return nil, fmt.Errorf("%q has conflicting versions: %q and %q", r.Mod.Path, seen, r.Mod.Version) +- } +- seenVersions[r.Mod.Path] = r.Mod.Version +- if r.Mod.Path == req.Mod.Path { +- requires = append(requires, &modfile.Require{ +- Mod: r.Mod, +- Syntax: r.Syntax, +- Indirect: !r.Indirect, +- }) +- continue +- } +- requires = append(requires, r) +- } +- copied.SetRequire(requires) +- newContent, err := copied.Format() +- if err != nil { +- return nil, err +- } +- // Calculate the edits to be made due to the change. +- edits := diff.Bytes(m.Content, newContent) +- return protocol.EditsFromDiffEdits(m, edits) +-} +- +-// missingModuleForImport creates an error for a given import path that comes +-// from a missing module. +-func missingModuleForImport(pgf *parsego.File, imp *ast.ImportSpec, req *modfile.Require, fixes []SuggestedFix) (*Diagnostic, error) { +- if req.Syntax == nil { +- return nil, fmt.Errorf("no syntax for %v", req) +- } +- rng, err := pgf.NodeRange(imp.Path) +- if err != nil { +- return nil, err +- } +- return &Diagnostic{ +- URI: pgf.URI, +- Range: rng, +- Severity: protocol.SeverityError, +- Source: ModTidyError, +- Message: fmt.Sprintf("%s is not in your go.mod file", req.Mod.Path), +- SuggestedFixes: fixes, +- }, nil +-} +- +-// parseImports parses the headers of the specified files and returns +-// the set of strings that appear in import declarations within +-// GoFiles. Errors are ignored. +-// +-// (We can't simply use Metadata.Imports because it is based on +-// CompiledGoFiles, after cgo processing.) +-// +-// TODO(rfindley): this should key off ImportPath. +-func parseImports(ctx context.Context, s *Snapshot, files []file.Handle) (map[string]bool, error) { +- pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), parsego.Header, false, files...) +- if err != nil { // e.g. context cancellation +- return nil, err +- } +- +- seen := make(map[string]bool) +- for _, pgf := range pgfs { +- for _, spec := range pgf.File.Imports { +- path, _ := strconv.Unquote(spec.Path.Value) +- seen[path] = true +- } +- } +- return seen, nil +-} +diff -urN a/gopls/internal/cache/mod_vuln.go b/gopls/internal/cache/mod_vuln.go +--- a/gopls/internal/cache/mod_vuln.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/mod_vuln.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,388 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "fmt" +- "io" +- "os" +- "sort" +- "strings" +- "sync" +- +- "golang.org/x/mod/semver" +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/memoize" +- "golang.org/x/tools/gopls/internal/vulncheck" +- "golang.org/x/tools/gopls/internal/vulncheck/govulncheck" +- "golang.org/x/tools/gopls/internal/vulncheck/osv" +- isem "golang.org/x/tools/gopls/internal/vulncheck/semver" +- "golang.org/x/vuln/scan" +-) +- +-// ModVuln returns import vulnerability analysis for the given go.mod URI. +-// Concurrent requests are combined into a single command. +-func (s *Snapshot) ModVuln(ctx context.Context, modURI protocol.DocumentURI) (*vulncheck.Result, error) { +- s.mu.Lock() +- entry, hit := s.modVulnHandles.Get(modURI) +- s.mu.Unlock() +- +- type modVuln struct { +- result *vulncheck.Result +- err error +- } +- +- // Cache miss? +- if !hit { +- handle := memoize.NewPromise("modVuln", func(ctx context.Context, arg any) any { +- result, err := modVulnImpl(ctx, arg.(*Snapshot)) +- return modVuln{result, err} +- }) +- +- entry = handle +- s.mu.Lock() +- s.modVulnHandles.Set(modURI, entry, nil) +- s.mu.Unlock() +- } +- +- // Await result. +- v, err := s.awaitPromise(ctx, entry) +- if err != nil { +- return nil, err +- } +- res := v.(modVuln) +- return res.result, res.err +-} +- +-// GoVersionForVulnTest is an internal environment variable used in gopls +-// testing to examine govulncheck behavior with a go version different +-// than what `go version` returns in the system. +-const GoVersionForVulnTest = "_GOPLS_TEST_VULNCHECK_GOVERSION" +- +-// modVulnImpl queries the vulndb and reports which vulnerabilities +-// apply to this snapshot. The result contains a set of packages, +-// grouped by vuln ID and by module. This implements the "import-based" +-// vulnerability report on go.mod files. +-func modVulnImpl(ctx context.Context, snapshot *Snapshot) (*vulncheck.Result, error) { +- // TODO(hyangah): can we let 'govulncheck' take a package list +- // used in the workspace and implement this function? +- +- // We want to report the intersection of vulnerable packages in the vulndb +- // and packages transitively imported by this module ('go list -deps all'). +- // We use snapshot.AllMetadata to retrieve the list of packages +- // as an approximation. +- // +- // TODO(hyangah): snapshot.AllMetadata is a superset of +- // `go list all` - e.g. when the workspace has multiple main modules +- // (multiple go.mod files), that can include packages that are not +- // used by this module. Vulncheck behavior with go.work is not well +- // defined. Figure out the meaning, and if we decide to present +- // the result as if each module is analyzed independently, make +- // gopls track a separate build list for each module and use that +- // information instead of snapshot.AllMetadata. +- allMeta, err := snapshot.AllMetadata(ctx) +- if err != nil { +- return nil, err +- } +- +- // TODO(hyangah): handle vulnerabilities in the standard library. +- +- // Group packages by modules since vuln db is keyed by module. +- packagesByModule := map[metadata.PackagePath][]*metadata.Package{} +- for _, mp := range allMeta { +- modulePath := metadata.PackagePath(osv.GoStdModulePath) +- if mi := mp.Module; mi != nil { +- modulePath = metadata.PackagePath(mi.Path) +- } +- packagesByModule[modulePath] = append(packagesByModule[modulePath], mp) +- } +- +- var ( +- mu sync.Mutex +- // Keys are osv.Entry.ID +- osvs = map[string]*osv.Entry{} +- findings []*govulncheck.Finding +- ) +- +- goVersion := snapshot.Options().Env[GoVersionForVulnTest] +- if goVersion == "" { +- goVersion = snapshot.GoVersionString() +- } +- +- stdlibModule := &packages.Module{ +- Path: osv.GoStdModulePath, +- Version: goVersion, +- } +- +- // GOVULNDB may point the test db URI. +- db := GetEnv(snapshot, "GOVULNDB") +- +- var group errgroup.Group +- group.SetLimit(10) // limit govulncheck api runs +- for _, mps := range packagesByModule { +- group.Go(func() error { +- effectiveModule := stdlibModule +- if m := mps[0].Module; m != nil { +- effectiveModule = m +- } +- for effectiveModule.Replace != nil { +- effectiveModule = effectiveModule.Replace +- } +- ver := effectiveModule.Version +- if ver == "" || !isem.Valid(ver) { +- // skip invalid version strings. the underlying scan api is strict. +- return nil +- } +- +- // TODO(hyangah): batch these requests and add in-memory cache for efficiency. +- vulns, err := osvsByModule(ctx, db, effectiveModule.Path+"@"+ver) +- if err != nil { +- return err +- } +- if len(vulns) == 0 { // No known vulnerability. +- return nil +- } +- +- // set of packages in this module known to gopls. +- // This will be lazily initialized when we need it. +- var knownPkgs map[metadata.PackagePath]bool +- +- // Report vulnerabilities that affect packages of this module. +- for _, entry := range vulns { +- var vulnerablePkgs []*govulncheck.Finding +- fixed := fixedVersion(effectiveModule.Path, entry.Affected) +- +- for _, a := range entry.Affected { +- if a.Module.Ecosystem != osv.GoEcosystem || a.Module.Path != effectiveModule.Path { +- continue +- } +- for _, imp := range a.EcosystemSpecific.Packages { +- if knownPkgs == nil { +- knownPkgs = toPackagePathSet(mps) +- } +- if knownPkgs[metadata.PackagePath(imp.Path)] { +- vulnerablePkgs = append(vulnerablePkgs, &govulncheck.Finding{ +- OSV: entry.ID, +- FixedVersion: fixed, +- Trace: []*govulncheck.Frame{ +- { +- Module: effectiveModule.Path, +- Version: effectiveModule.Version, +- Package: imp.Path, +- }, +- }, +- }) +- } +- } +- } +- if len(vulnerablePkgs) == 0 { +- continue +- } +- mu.Lock() +- osvs[entry.ID] = entry +- findings = append(findings, vulnerablePkgs...) +- mu.Unlock() +- } +- return nil +- }) +- } +- if err := group.Wait(); err != nil { +- return nil, err +- } +- +- // Sort so the results are deterministic. +- sort.Slice(findings, func(i, j int) bool { +- x, y := findings[i], findings[j] +- if x.OSV != y.OSV { +- return x.OSV < y.OSV +- } +- return x.Trace[0].Package < y.Trace[0].Package +- }) +- ret := &vulncheck.Result{ +- Entries: osvs, +- Findings: findings, +- Mode: vulncheck.ModeImports, +- } +- return ret, nil +-} +- +-// TODO(rfindley): this function was exposed during refactoring. Reconsider it. +-func GetEnv(snapshot *Snapshot, key string) string { +- val, ok := snapshot.Options().Env[key] +- if ok { +- return val +- } +- return os.Getenv(key) +-} +- +-// toPackagePathSet transforms the metadata to a set of package paths. +-func toPackagePathSet(mds []*metadata.Package) map[metadata.PackagePath]bool { +- pkgPaths := make(map[metadata.PackagePath]bool, len(mds)) +- for _, md := range mds { +- pkgPaths[md.PkgPath] = true +- } +- return pkgPaths +-} +- +-func fixedVersion(modulePath string, affected []osv.Affected) string { +- fixed := latestFixed(modulePath, affected) +- if fixed != "" { +- fixed = versionString(modulePath, fixed) +- } +- return fixed +-} +- +-// latestFixed returns the latest fixed version in the list of affected ranges, +-// or the empty string if there are no fixed versions. +-func latestFixed(modulePath string, as []osv.Affected) string { +- v := "" +- for _, a := range as { +- if a.Module.Path != modulePath { +- continue +- } +- for _, r := range a.Ranges { +- if r.Type == osv.RangeTypeSemver { +- for _, e := range r.Events { +- if e.Fixed != "" && (v == "" || +- semver.Compare(isem.CanonicalizeSemverPrefix(e.Fixed), isem.CanonicalizeSemverPrefix(v)) > 0) { +- v = e.Fixed +- } +- } +- } +- } +- } +- return v +-} +- +-// versionString prepends a version string prefix (`v` or `go` +-// depending on the modulePath) to the given semver-style version string. +-func versionString(modulePath, version string) string { +- if version == "" { +- return "" +- } +- v := "v" + version +- // These are internal Go module paths used by the vuln DB +- // when listing vulns in standard library and the go command. +- if modulePath == "stdlib" || modulePath == "toolchain" { +- return semverToGoTag(v) +- } +- return v +-} +- +-// semverToGoTag returns the Go standard library repository tag corresponding +-// to semver, a version string without the initial "v". +-// Go tags differ from standard semantic versions in a few ways, +-// such as beginning with "go" instead of "v". +-func semverToGoTag(v string) string { +- if strings.HasPrefix(v, "v0.0.0") { +- return "master" +- } +- // Special case: v1.0.0 => go1. +- if v == "v1.0.0" { +- return "go1" +- } +- if !semver.IsValid(v) { +- return fmt.Sprintf("", v) +- } +- goVersion := semver.Canonical(v) +- prerelease := semver.Prerelease(goVersion) +- versionWithoutPrerelease := strings.TrimSuffix(goVersion, prerelease) +- patch := strings.TrimPrefix(versionWithoutPrerelease, semver.MajorMinor(goVersion)+".") +- if patch == "0" { +- versionWithoutPrerelease = strings.TrimSuffix(versionWithoutPrerelease, ".0") +- } +- goVersion = fmt.Sprintf("go%s", strings.TrimPrefix(versionWithoutPrerelease, "v")) +- if prerelease != "" { +- // Go prereleases look like "beta1" instead of "beta.1". +- // "beta1" is bad for sorting (since beta10 comes before beta9), so +- // require the dot form. +- i := finalDigitsIndex(prerelease) +- if i >= 1 { +- if prerelease[i-1] != '.' { +- return fmt.Sprintf("", v) +- } +- // Remove the dot. +- prerelease = prerelease[:i-1] + prerelease[i:] +- } +- goVersion += strings.TrimPrefix(prerelease, "-") +- } +- return goVersion +-} +- +-// finalDigitsIndex returns the index of the first digit in the sequence of digits ending s. +-// If s doesn't end in digits, it returns -1. +-func finalDigitsIndex(s string) int { +- // Assume ASCII (since the semver package does anyway). +- var i int +- for i = len(s) - 1; i >= 0; i-- { +- if s[i] < '0' || s[i] > '9' { +- break +- } +- } +- if i == len(s)-1 { +- return -1 +- } +- return i + 1 +-} +- +-// osvsByModule runs a govulncheck database query. +-func osvsByModule(ctx context.Context, db, moduleVersion string) ([]*osv.Entry, error) { +- var args []string +- args = append(args, "-mode=query", "-json") +- if db != "" { +- args = append(args, "-db="+db) +- } +- args = append(args, moduleVersion) +- +- ir, iw := io.Pipe() +- handler := &osvReader{} +- +- var g errgroup.Group +- g.Go(func() error { +- defer iw.Close() // scan API doesn't close cmd.Stderr/cmd.Stdout. +- cmd := scan.Command(ctx, args...) +- cmd.Stdout = iw +- // TODO(hakim): Do we need to set cmd.Env = getEnvSlices(), +- // or is the process environment good enough? +- if err := cmd.Start(); err != nil { +- return err +- } +- return cmd.Wait() +- }) +- g.Go(func() error { +- return govulncheck.HandleJSON(ir, handler) +- }) +- +- if err := g.Wait(); err != nil { +- return nil, err +- } +- return handler.entry, nil +-} +- +-// osvReader implements govulncheck.Handler. +-type osvReader struct { +- entry []*osv.Entry +-} +- +-func (h *osvReader) OSV(entry *osv.Entry) error { +- h.entry = append(h.entry, entry) +- return nil +-} +- +-func (h *osvReader) Config(config *govulncheck.Config) error { +- return nil +-} +- +-func (h *osvReader) Finding(finding *govulncheck.Finding) error { +- return nil +-} +- +-func (h *osvReader) Progress(progress *govulncheck.Progress) error { +- return nil +-} +diff -urN a/gopls/internal/cache/os_darwin.go b/gopls/internal/cache/os_darwin.go +--- a/gopls/internal/cache/os_darwin.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/os_darwin.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,59 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "bytes" +- "fmt" +- "os" +- "path/filepath" +- "strings" +- "syscall" +- "unsafe" +-) +- +-func init() { +- checkPathValid = darwinCheckPathValid +-} +- +-func darwinCheckPathValid(path string) error { +- // Darwin provides fcntl(F_GETPATH) to get a path for an arbitrary FD. +- // Conveniently for our purposes, it gives the canonical case back. But +- // there's no guarantee that it will follow the same route through the +- // filesystem that the original path did. +- +- path, err := filepath.Abs(path) +- if err != nil { +- return err +- } +- fd, err := syscall.Open(path, os.O_RDONLY, 0) +- if err != nil { +- return err +- } +- defer syscall.Close(fd) +- buf := make([]byte, 4096) // No MAXPATHLEN in syscall, I think it's 1024, this is bigger. +- +- // Wheeee! syscall doesn't expose a way to call Fcntl except FcntlFlock. +- // As of writing, it just passes the pointer through, so we can just lie. +- if err := syscall.FcntlFlock(uintptr(fd), syscall.F_GETPATH, (*syscall.Flock_t)(unsafe.Pointer(&buf[0]))); err != nil { +- return err +- } +- buf = buf[:bytes.IndexByte(buf, 0)] +- +- isRoot := func(p string) bool { +- return p[len(p)-1] == filepath.Separator +- } +- // Darwin seems to like having multiple names for the same folder. Match as much of the suffix as we can. +- for got, want := path, string(buf); !isRoot(got) && !isRoot(want); got, want = filepath.Dir(got), filepath.Dir(want) { +- g, w := filepath.Base(got), filepath.Base(want) +- if !strings.EqualFold(g, w) { +- break +- } +- if g != w { +- return fmt.Errorf("invalid path %q: component %q is listed by macOS as %q", path, g, w) +- } +- } +- return nil +-} +diff -urN a/gopls/internal/cache/os_windows.go b/gopls/internal/cache/os_windows.go +--- a/gopls/internal/cache/os_windows.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/os_windows.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,56 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "fmt" +- "path/filepath" +- "syscall" +-) +- +-func init() { +- checkPathValid = windowsCheckPathValid +-} +- +-func windowsCheckPathValid(path string) error { +- // Back in the day, Windows used to have short and long filenames, and +- // it still supports those APIs. GetLongPathName gets the real case for a +- // path, so we can use it here. Inspired by +- // http://stackoverflow.com/q/2113822. +- +- // Short paths can be longer than long paths, and unicode, so be generous. +- buflen := 4 * len(path) +- namep, err := syscall.UTF16PtrFromString(path) +- if err != nil { +- return err +- } +- short := make([]uint16, buflen) +- n, err := syscall.GetShortPathName(namep, &short[0], uint32(len(short)*2)) // buflen is in bytes. +- if err != nil { +- return err +- } +- if int(n) > len(short)*2 { +- return fmt.Errorf("short buffer too short: %v vs %v*2", n, len(short)) +- } +- long := make([]uint16, buflen) +- n, err = syscall.GetLongPathName(&short[0], &long[0], uint32(len(long)*2)) +- if err != nil { +- return err +- } +- if int(n) > len(long)*2 { +- return fmt.Errorf("long buffer too short: %v vs %v*2", n, len(long)) +- } +- longstr := syscall.UTF16ToString(long) +- +- isRoot := func(p string) bool { +- return p[len(p)-1] == filepath.Separator +- } +- for got, want := path, longstr; !isRoot(got) && !isRoot(want); got, want = filepath.Dir(got), filepath.Dir(want) { +- if g, w := filepath.Base(got), filepath.Base(want); g != w { +- return fmt.Errorf("invalid path %q: component %q is listed by Windows as %q", path, g, w) +- } +- } +- return nil +-} +diff -urN a/gopls/internal/cache/package.go b/gopls/internal/cache/package.go +--- a/gopls/internal/cache/package.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/package.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,214 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "fmt" +- "go/ast" +- "go/scanner" +- "go/token" +- "go/types" +- "slices" +- "sync" +- +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/methodsets" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/cache/testfuncs" +- "golang.org/x/tools/gopls/internal/cache/xrefs" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/safetoken" +-) +- +-// Convenient aliases for very heavily used types. +-type ( +- PackageID = metadata.PackageID +- PackagePath = metadata.PackagePath +- PackageName = metadata.PackageName +- ImportPath = metadata.ImportPath +-) +- +-// A Package is the union of package metadata and type checking results. +-// +-// TODO(rfindley): for now, we do not persist the post-processing of +-// loadDiagnostics, because the value of the snapshot.packages map is just the +-// package handle. Fix this. +-type Package struct { +- metadata *metadata.Package +- loadDiagnostics []*Diagnostic +- pkg *syntaxPackage +-} +- +-// syntaxPackage contains parse trees and type information for a package. +-type syntaxPackage struct { +- // -- identifiers -- +- id PackageID +- +- // -- outputs -- +- fset *token.FileSet // for now, same as the snapshot's FileSet +- goFiles []*parsego.File +- compiledGoFiles []*parsego.File +- diagnostics []*Diagnostic +- parseErrors []scanner.ErrorList +- typeErrors []types.Error +- types *types.Package +- typesInfo *types.Info +- typesSizes types.Sizes +- importMap map[PackagePath]*types.Package +- +- xrefsOnce sync.Once +- _xrefs []byte // only used by the xrefs method +- +- methodsetsOnce sync.Once +- _methodsets *methodsets.Index // only used by the methodsets method +- +- testsOnce sync.Once +- _tests *testfuncs.Index // only used by the tests method +-} +- +-func (p *syntaxPackage) xrefs() []byte { +- p.xrefsOnce.Do(func() { +- p._xrefs = xrefs.Index(p.compiledGoFiles, p.types, p.typesInfo) +- }) +- return p._xrefs +-} +- +-func (p *syntaxPackage) methodsets() *methodsets.Index { +- p.methodsetsOnce.Do(func() { +- p._methodsets = methodsets.NewIndex(p.fset, p.types) +- }) +- return p._methodsets +-} +- +-func (p *syntaxPackage) tests() *testfuncs.Index { +- p.testsOnce.Do(func() { +- p._tests = testfuncs.NewIndex(p.compiledGoFiles, p.typesInfo) +- }) +- return p._tests +-} +- +-// hasFixedFiles reports whether there are any 'fixed' compiled go files in the +-// package. +-// +-// Intended to be used to refine bug reports. +-func (p *syntaxPackage) hasFixedFiles() bool { +- return slices.ContainsFunc(p.compiledGoFiles, (*parsego.File).Fixed) +-} +- +-func (p *Package) String() string { return string(p.metadata.ID) } +- +-func (p *Package) Metadata() *metadata.Package { return p.metadata } +- +-// A loadScope defines a package loading scope for use with go/packages. +-// +-// TODO(rfindley): move this to load.go. +-type loadScope interface { +- aScope() +-} +- +-// TODO(rfindley): move to load.go +-type ( +- fileLoadScope protocol.DocumentURI // load packages containing a file (including command-line-arguments) +- packageLoadScope string // load a specific package (the value is its PackageID) +- moduleLoadScope struct { +- dir string // dir containing the go.mod file +- modulePath string // parsed module path +- } +- viewLoadScope struct{} // load the workspace +-) +- +-// Implement the loadScope interface. +-func (fileLoadScope) aScope() {} +-func (packageLoadScope) aScope() {} +-func (moduleLoadScope) aScope() {} +-func (viewLoadScope) aScope() {} +- +-func (p *Package) CompiledGoFiles() []*parsego.File { +- return p.pkg.compiledGoFiles +-} +- +-func (p *Package) File(uri protocol.DocumentURI) (*parsego.File, error) { +- return p.pkg.File(uri) +-} +- +-// FileEnclosing returns the file of pkg that encloses the specified position, +-// which must be mapped by p.FileSet(). +-func (p *Package) FileEnclosing(pos token.Pos) (*parsego.File, error) { +- for _, files := range [...][]*parsego.File{p.pkg.compiledGoFiles, p.pkg.goFiles} { +- for _, pgf := range files { +- if pgf.File.FileStart <= pos && pos <= pgf.File.FileEnd { +- return pgf, nil +- } +- } +- } +- return nil, fmt.Errorf("no parsed file for position %d (%s) in %v", +- pos, safetoken.StartPosition(p.FileSet(), pos), p.pkg.id) +-} +- +-func (pkg *syntaxPackage) File(uri protocol.DocumentURI) (*parsego.File, error) { +- for _, files := range [...][]*parsego.File{pkg.compiledGoFiles, pkg.goFiles} { +- for _, pgf := range files { +- if pgf.URI == uri { +- return pgf, nil +- } +- } +- } +- return nil, fmt.Errorf("no parsed file for %s in %v", uri, pkg.id) +-} +- +-// Syntax returns parsed compiled Go files contained in this package. +-func (p *Package) Syntax() []*ast.File { +- var syntax []*ast.File +- for _, pgf := range p.pkg.compiledGoFiles { +- syntax = append(syntax, pgf.File) +- } +- return syntax +-} +- +-// FileSet returns the FileSet describing this package's positions. +-// +-// The returned FileSet is guaranteed to describe all Syntax, but may also +-// describe additional files. +-func (p *Package) FileSet() *token.FileSet { +- return p.pkg.fset +-} +- +-// Types returns the type checked go/types.Package. +-func (p *Package) Types() *types.Package { +- return p.pkg.types +-} +- +-// TypesInfo returns the go/types.Info annotating the Syntax of this package +-// with type information. +-// +-// All fields in the resulting Info are populated. +-func (p *Package) TypesInfo() *types.Info { +- return p.pkg.typesInfo +-} +- +-// TypesSizes returns the sizing function used for types in this package. +-func (p *Package) TypesSizes() types.Sizes { +- return p.pkg.typesSizes +-} +- +-// DependencyTypes returns the type checker's symbol for the specified +-// package. It returns nil if path is not among the transitive +-// dependencies of p, or if no symbols from that package were +-// referenced during the type-checking of p. +-func (p *Package) DependencyTypes(path PackagePath) *types.Package { +- return p.pkg.importMap[path] +-} +- +-// ParseErrors returns a slice containing all non-empty parse errors produces +-// while parsing p.Syntax, or nil if the package contains no parse errors. +-func (p *Package) ParseErrors() []scanner.ErrorList { +- return p.pkg.parseErrors +-} +- +-// TypeErrors returns the go/types.Errors produced during type checking this +-// package, if any. +-func (p *Package) TypeErrors() []types.Error { +- return p.pkg.typeErrors +-} +diff -urN a/gopls/internal/cache/parse_cache.go b/gopls/internal/cache/parse_cache.go +--- a/gopls/internal/cache/parse_cache.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/parse_cache.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,418 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "bytes" +- "container/heap" +- "context" +- "fmt" +- "go/parser" +- "go/token" +- "math/bits" +- "runtime" +- "sync" +- "time" +- +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/memoize" +-) +- +-// This file contains an implementation of an LRU parse cache, that offsets the +-// base token.Pos value of each cached file so that they may be later described +-// by a single dedicated FileSet. +-// +-// This is achieved by tracking a monotonic offset in the token.Pos space, that +-// is incremented before parsing allow room for the resulting parsed file. +- +-// reservedForParsing defines the room in the token.Pos space reserved for +-// cached parsed files. +-// +-// Files parsed through the parseCache are guaranteed not to have overlapping +-// spans: the parseCache tracks a monotonic base for newly parsed files. +-// +-// By offsetting the initial base of a FileSet, we can allow other operations +-// accepting the FileSet (such as the gcimporter) to add new files using the +-// normal FileSet APIs without overlapping with cached parsed files. +-// +-// Note that 1<<60 represents an exabyte of parsed data, more than any gopls +-// process can ever parse. +-// +-// On 32-bit systems we don't cache parse results (see parseFiles). +-const reservedForParsing = 1 << (bits.UintSize - 4) +- +-// fileSetWithBase returns a new token.FileSet with Base() equal to the +-// requested base. +-// +-// If base < 1, fileSetWithBase panics. +-// (1 is the smallest permitted FileSet base). +-func fileSetWithBase(base int) *token.FileSet { +- fset := token.NewFileSet() +- if base > 1 { +- // Add a dummy file to set the base of fset. We won't ever use the +- // resulting FileSet, so it doesn't matter how we achieve this. +- // +- // FileSets leave a 1-byte padding between files, so we set the base by +- // adding a zero-length file at base-1. +- fset.AddFile("", base-1, 0) +- } +- if fset.Base() != base { +- panic("unexpected FileSet.Base") +- } +- return fset +-} +- +-const ( +- // Always keep 100 recent files, independent of their wall-clock age, to +- // optimize the case where the user resumes editing after a delay. +- parseCacheMinFiles = 100 +-) +- +-// parsePadding is additional padding allocated to allow for increases in +-// length (such as appending missing braces) caused by fixAST. +-// +-// This is used to mitigate a chicken and egg problem: we must know the base +-// offset of the file we're about to parse, before we start parsing, and yet +-// src fixups may affect the actual size of the parsed content (and therefore +-// the offsets of subsequent files). +-// +-// When we encounter a file that no longer fits in its allocated space in the +-// fileset, we have no choice but to re-parse it. Leaving a generous padding +-// reduces the likelihood of this "slow path". +-// +-// This value is mutable for testing, so that we can exercise the slow path. +-var parsePadding = 1000 // mutable for testing +- +-// A parseCache holds recently accessed parsed Go files. After new files are +-// stored, older files may be evicted from the cache via garbage collection. +-// +-// The parseCache.parseFiles method exposes a batch API for parsing (and +-// caching) multiple files. This is necessary for type-checking, where files +-// must be parsed in a common fileset. +-type parseCache struct { +- expireAfter time.Duration // interval at which to collect expired cache entries +- done chan struct{} // closed when GC is stopped +- +- mu sync.Mutex +- m map[parseKey]*parseCacheEntry +- lru queue // min-atime priority queue of *parseCacheEntry +- clock uint64 // clock time, incremented when the cache is updated +- nextBase int // base offset for the next parsed file +-} +- +-// newParseCache creates a new parse cache and starts a goroutine to garbage +-// collect entries whose age is at least expireAfter. +-// +-// Callers must call parseCache.stop when the parse cache is no longer in use. +-func newParseCache(expireAfter time.Duration) *parseCache { +- c := &parseCache{ +- expireAfter: expireAfter, +- m: make(map[parseKey]*parseCacheEntry), +- done: make(chan struct{}), +- } +- go c.gc() +- return c +-} +- +-// stop causes the GC goroutine to exit. +-func (c *parseCache) stop() { +- close(c.done) +-} +- +-// parseKey uniquely identifies a parsed Go file. +-type parseKey struct { +- uri protocol.DocumentURI +- mode parser.Mode +- purgeFuncBodies bool +-} +- +-type parseCacheEntry struct { +- key parseKey +- hash file.Hash +- promise *memoize.Promise // memoize.Promise[*parsego.File] +- atime uint64 // clock time of last access, for use in LRU sorting +- walltime time.Time // actual time of last access, for use in time-based eviction; too coarse for LRU on some systems +- lruIndex int // owned by the queue implementation +-} +- +-// startParse prepares a parsing pass, creating new promises in the cache for +-// any cache misses. +-// +-// The resulting slice has an entry for every given file handle, though some +-// entries may be nil if there was an error reading the file (in which case the +-// resulting error will be non-nil). +-func (c *parseCache) startParse(mode parser.Mode, purgeFuncBodies bool, fhs ...file.Handle) ([]*memoize.Promise, error) { +- c.mu.Lock() +- defer c.mu.Unlock() +- +- // Any parsing pass increments the clock, as we'll update access times. +- // (technically, if fhs is empty this isn't necessary, but that's a degenerate case). +- // +- // All entries parsed from a single call get the same access time. +- c.clock++ +- walltime := time.Now() +- +- // Read file data and collect cacheable files. +- var ( +- data = make([][]byte, len(fhs)) // file content for each readable file +- promises = make([]*memoize.Promise, len(fhs)) +- firstReadError error // first error from fh.Read, or nil +- ) +- for i, fh := range fhs { +- content, err := fh.Content() +- if err != nil { +- if firstReadError == nil { +- firstReadError = err +- } +- continue +- } +- data[i] = content +- +- key := parseKey{ +- uri: fh.URI(), +- mode: mode, +- purgeFuncBodies: purgeFuncBodies, +- } +- +- if e, ok := c.m[key]; ok { +- if e.hash == fh.Identity().Hash { // cache hit +- e.atime = c.clock +- e.walltime = walltime +- heap.Fix(&c.lru, e.lruIndex) +- promises[i] = e.promise +- continue +- } else { +- // A cache hit, for a different version. Delete it. +- delete(c.m, e.key) +- heap.Remove(&c.lru, e.lruIndex) +- } +- } +- +- uri := fh.URI() +- promise := memoize.NewPromise("parseCache.parse", func(ctx context.Context, _ any) any { +- // Allocate 2*len(content)+parsePadding to allow for re-parsing once +- // inside of parseGoSrc without exceeding the allocated space. +- base, nextBase := c.allocateSpace(2*len(content) + parsePadding) +- +- pgf, fixes1 := parsego.Parse(ctx, fileSetWithBase(base), uri, content, mode, purgeFuncBodies) +- file := pgf.Tok +- if file.Base()+file.Size()+1 > nextBase { +- // The parsed file exceeds its allocated space, likely due to multiple +- // passes of src fixing. In this case, we have no choice but to re-do +- // the operation with the correct size. +- // +- // Even though the final successful parse requires only file.Size() +- // bytes of Pos space, we need to accommodate all the missteps to get +- // there, as parseGoSrc will repeat them. +- actual := file.Base() + file.Size() - base // actual size consumed, after re-parsing +- base2, nextBase2 := c.allocateSpace(actual) +- pgf2, fixes2 := parsego.Parse(ctx, fileSetWithBase(base2), uri, content, mode, purgeFuncBodies) +- +- // In golang/go#59097 we observed that this panic condition was hit. +- // One bug was found and fixed, but record more information here in +- // case there is still a bug here. +- if end := pgf2.Tok.Base() + pgf2.Tok.Size(); end != nextBase2-1 { +- var errBuf bytes.Buffer +- fmt.Fprintf(&errBuf, "internal error: non-deterministic parsing result:\n") +- fmt.Fprintf(&errBuf, "\t%q (%d-%d) does not span %d-%d\n", uri, pgf2.Tok.Base(), base2, end, nextBase2-1) +- fmt.Fprintf(&errBuf, "\tfirst %q (%d-%d)\n", pgf.URI, pgf.Tok.Base(), pgf.Tok.Base()+pgf.Tok.Size()) +- fmt.Fprintf(&errBuf, "\tfirst space: (%d-%d), second space: (%d-%d)\n", base, nextBase, base2, nextBase2) +- fmt.Fprintf(&errBuf, "\tfirst mode: %v, second mode: %v", pgf.Mode, pgf2.Mode) +- fmt.Fprintf(&errBuf, "\tfirst err: %v, second err: %v", pgf.ParseErr, pgf2.ParseErr) +- fmt.Fprintf(&errBuf, "\tfirst fixes: %v, second fixes: %v", fixes1, fixes2) +- panic(errBuf.String()) +- } +- pgf = pgf2 +- } +- return pgf +- }) +- promises[i] = promise +- +- // add new entry; entries are gc'ed asynchronously +- e := &parseCacheEntry{ +- key: key, +- hash: fh.Identity().Hash, +- promise: promise, +- atime: c.clock, +- walltime: walltime, +- } +- c.m[e.key] = e +- heap.Push(&c.lru, e) +- } +- +- if len(c.m) != len(c.lru) { +- panic("map and LRU are inconsistent") +- } +- +- return promises, firstReadError +-} +- +-func (c *parseCache) gc() { +- const period = 10 * time.Second // gc period +- timer := time.NewTicker(period) +- defer timer.Stop() +- +- for { +- select { +- case <-c.done: +- return +- case <-timer.C: +- } +- +- c.gcOnce() +- } +-} +- +-func (c *parseCache) gcOnce() { +- now := time.Now() +- c.mu.Lock() +- defer c.mu.Unlock() +- +- for len(c.m) > parseCacheMinFiles { +- e := heap.Pop(&c.lru).(*parseCacheEntry) +- if now.Sub(e.walltime) >= c.expireAfter { +- delete(c.m, e.key) +- } else { +- heap.Push(&c.lru, e) +- break +- } +- } +-} +- +-// allocateSpace reserves the next n bytes of token.Pos space in the +-// cache. +-// +-// It returns the resulting file base, next base, and an offset FileSet to use +-// for parsing. +-func (c *parseCache) allocateSpace(size int) (int, int) { +- c.mu.Lock() +- defer c.mu.Unlock() +- +- if c.nextBase == 0 { +- // FileSet base values must be at least 1. +- c.nextBase = 1 +- } +- base := c.nextBase +- c.nextBase += size + 1 +- return base, c.nextBase +-} +- +-// parseFiles returns a parsego.File for each file handle in fhs, in the +-// requested parse mode. +-// +-// For parsed files that already exists in the cache, access time will be +-// updated. For others, parseFiles will parse and store as many results in the +-// cache as space allows. +-// +-// The token.File for each resulting parsed file will be added to the provided +-// FileSet, using the tokeninternal.AddExistingFiles API. Consequently, the +-// given fset should only be used in other APIs if its base is >= +-// reservedForParsing. +-// +-// If parseFiles returns an error, it still returns a slice, +-// but with a nil entry for each file that could not be parsed. +-func (c *parseCache) parseFiles(ctx context.Context, fset *token.FileSet, mode parser.Mode, purgeFuncBodies bool, fhs ...file.Handle) ([]*parsego.File, error) { +- pgfs := make([]*parsego.File, len(fhs)) +- +- // Temporary fall-back for 32-bit systems, where reservedForParsing is too +- // small to be viable. We don't actually support 32-bit systems, so this +- // workaround is only for tests and can be removed when we stop running +- // 32-bit TryBots for gopls. +- if bits.UintSize == 32 { +- for i, fh := range fhs { +- var err error +- pgfs[i], err = parseGoImpl(ctx, fset, fh, mode, purgeFuncBodies) +- if err != nil { +- return pgfs, err +- } +- } +- return pgfs, nil +- } +- +- promises, firstErr := c.startParse(mode, purgeFuncBodies, fhs...) +- +- // Await all parsing. +- var g errgroup.Group +- g.SetLimit(runtime.GOMAXPROCS(-1)) // parsing is CPU-bound. +- for i, promise := range promises { +- if promise == nil { +- continue +- } +- i := i +- promise := promise +- g.Go(func() error { +- result, err := promise.Get(ctx, nil) +- if err != nil { +- return err +- } +- pgfs[i] = result.(*parsego.File) +- return nil +- }) +- } +- +- if err := g.Wait(); err != nil && firstErr == nil { +- firstErr = err +- } +- +- // Augment the FileSet to map all parsed files. +- var tokenFiles []*token.File +- for _, pgf := range pgfs { +- if pgf == nil { +- continue +- } +- tokenFiles = append(tokenFiles, pgf.Tok) +- } +- fset.AddExistingFiles(tokenFiles...) +- +- const debugIssue59080 = true +- if debugIssue59080 { +- for _, f := range tokenFiles { +- pos := token.Pos(f.Base()) +- f2 := fset.File(pos) +- if f2 != f { +- panic(fmt.Sprintf("internal error: File(%d (start)) = %v, not %v", pos, f2, f)) +- } +- pos = token.Pos(f.Base() + f.Size()) +- f2 = fset.File(pos) +- if f2 != f { +- panic(fmt.Sprintf("internal error: File(%d (end)) = %v, not %v", pos, f2, f)) +- } +- } +- } +- +- return pgfs, firstErr +-} +- +-// -- priority queue boilerplate -- +- +-// queue is a min-atime priority queue of cache entries. +-type queue []*parseCacheEntry +- +-func (q queue) Len() int { return len(q) } +- +-func (q queue) Less(i, j int) bool { return q[i].atime < q[j].atime } +- +-func (q queue) Swap(i, j int) { +- q[i], q[j] = q[j], q[i] +- q[i].lruIndex = i +- q[j].lruIndex = j +-} +- +-func (q *queue) Push(x any) { +- e := x.(*parseCacheEntry) +- e.lruIndex = len(*q) +- *q = append(*q, e) +-} +- +-func (q *queue) Pop() any { +- last := len(*q) - 1 +- e := (*q)[last] +- (*q)[last] = nil // aid GC +- *q = (*q)[:last] +- return e +-} +diff -urN a/gopls/internal/cache/parse_cache_test.go b/gopls/internal/cache/parse_cache_test.go +--- a/gopls/internal/cache/parse_cache_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/parse_cache_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,238 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "fmt" +- "go/token" +- "math/bits" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-func skipIfNoParseCache(t *testing.T) { +- if bits.UintSize == 32 { +- t.Skip("the parse cache is not supported on 32-bit systems") +- } +-} +- +-func TestParseCache(t *testing.T) { +- skipIfNoParseCache(t) +- +- ctx := context.Background() +- uri := protocol.DocumentURI("file:///myfile") +- fh := makeFakeFileHandle(uri, []byte("package p\n\nconst _ = \"foo\"")) +- fset := token.NewFileSet() +- +- cache := newParseCache(0) +- pgfs1, err := cache.parseFiles(ctx, fset, parsego.Full, false, fh) +- if err != nil { +- t.Fatal(err) +- } +- pgf1 := pgfs1[0] +- pgfs2, err := cache.parseFiles(ctx, fset, parsego.Full, false, fh) +- pgf2 := pgfs2[0] +- if err != nil { +- t.Fatal(err) +- } +- if pgf1 != pgf2 { +- t.Errorf("parseFiles(%q): unexpected cache miss on repeated call", uri) +- } +- +- // Fill up the cache with other files, but don't evict the file above. +- cache.gcOnce() +- files := []file.Handle{fh} +- files = append(files, dummyFileHandles(parseCacheMinFiles-1)...) +- +- pgfs3, err := cache.parseFiles(ctx, fset, parsego.Full, false, files...) +- if err != nil { +- t.Fatal(err) +- } +- pgf3 := pgfs3[0] +- if pgf3 != pgf1 { +- t.Errorf("parseFiles(%q, ...): unexpected cache miss", uri) +- } +- if pgf3.Tok.Base() != pgf1.Tok.Base() || pgf3.Tok.Size() != pgf1.Tok.Size() { +- t.Errorf("parseFiles(%q, ...): result.Tok has base: %d, size: %d, want (%d, %d)", uri, pgf3.Tok.Base(), pgf3.Tok.Size(), pgf1.Tok.Base(), pgf1.Tok.Size()) +- } +- if tok := fset.File(token.Pos(pgf3.Tok.Base())); tok != pgf3.Tok { +- t.Errorf("parseFiles(%q, ...): result.Tok not contained in FileSet", uri) +- } +- +- // Now overwrite the cache, after which we should get new results. +- cache.gcOnce() +- files = dummyFileHandles(parseCacheMinFiles) +- _, err = cache.parseFiles(ctx, fset, parsego.Full, false, files...) +- if err != nil { +- t.Fatal(err) +- } +- // force a GC, which should collect the recently parsed files +- cache.gcOnce() +- pgfs4, err := cache.parseFiles(ctx, fset, parsego.Full, false, fh) +- if err != nil { +- t.Fatal(err) +- } +- if pgfs4[0] == pgf1 { +- t.Errorf("parseFiles(%q): unexpected cache hit after overwriting cache", uri) +- } +-} +- +-func TestParseCache_Reparsing(t *testing.T) { +- skipIfNoParseCache(t) +- +- defer func(padding int) { +- parsePadding = padding +- }(parsePadding) +- parsePadding = 0 +- +- files := dummyFileHandles(parseCacheMinFiles) +- danglingSelector := []byte("package p\nfunc _() {\n\tx.\n}") +- files = append(files, makeFakeFileHandle("file:///bad1", danglingSelector)) +- files = append(files, makeFakeFileHandle("file:///bad2", danglingSelector)) +- +- // Parsing should succeed even though we overflow the padding. +- cache := newParseCache(0) +- _, err := cache.parseFiles(context.Background(), token.NewFileSet(), parsego.Full, false, files...) +- if err != nil { +- t.Fatal(err) +- } +-} +- +-// Re-parsing the first file should not panic. +-func TestParseCache_Issue59097(t *testing.T) { +- skipIfNoParseCache(t) +- +- defer func(padding int) { +- parsePadding = padding +- }(parsePadding) +- parsePadding = 0 +- +- danglingSelector := []byte("package p\nfunc _() {\n\tx.\n}") +- files := []file.Handle{makeFakeFileHandle("file:///bad", danglingSelector)} +- +- // Parsing should succeed even though we overflow the padding. +- cache := newParseCache(0) +- _, err := cache.parseFiles(context.Background(), token.NewFileSet(), parsego.Full, false, files...) +- if err != nil { +- t.Fatal(err) +- } +-} +- +-func TestParseCache_TimeEviction(t *testing.T) { +- skipIfNoParseCache(t) +- +- ctx := context.Background() +- fset := token.NewFileSet() +- uri := protocol.DocumentURI("file:///myfile") +- fh := makeFakeFileHandle(uri, []byte("package p\n\nconst _ = \"foo\"")) +- +- const gcDuration = 10 * time.Millisecond +- cache := newParseCache(gcDuration) +- cache.stop() // we'll manage GC manually, for testing. +- +- pgfs0, err := cache.parseFiles(ctx, fset, parsego.Full, false, fh, fh) +- if err != nil { +- t.Fatal(err) +- } +- +- files := dummyFileHandles(parseCacheMinFiles) +- _, err = cache.parseFiles(ctx, fset, parsego.Full, false, files...) +- if err != nil { +- t.Fatal(err) +- } +- +- // Even after filling up the 'min' files, we get a cache hit for our original file. +- pgfs1, err := cache.parseFiles(ctx, fset, parsego.Full, false, fh, fh) +- if err != nil { +- t.Fatal(err) +- } +- +- if pgfs0[0] != pgfs1[0] { +- t.Errorf("before GC, got unexpected cache miss") +- } +- +- // But after GC, we get a cache miss. +- _, err = cache.parseFiles(ctx, fset, parsego.Full, false, files...) // mark dummy files as newer +- if err != nil { +- t.Fatal(err) +- } +- time.Sleep(gcDuration) +- cache.gcOnce() +- +- pgfs2, err := cache.parseFiles(ctx, fset, parsego.Full, false, fh, fh) +- if err != nil { +- t.Fatal(err) +- } +- +- if pgfs0[0] == pgfs2[0] { +- t.Errorf("after GC, got unexpected cache hit for %s", pgfs0[0].URI) +- } +-} +- +-func TestParseCache_Duplicates(t *testing.T) { +- skipIfNoParseCache(t) +- +- ctx := context.Background() +- uri := protocol.DocumentURI("file:///myfile") +- fh := makeFakeFileHandle(uri, []byte("package p\n\nconst _ = \"foo\"")) +- +- cache := newParseCache(0) +- pgfs, err := cache.parseFiles(ctx, token.NewFileSet(), parsego.Full, false, fh, fh) +- if err != nil { +- t.Fatal(err) +- } +- if pgfs[0] != pgfs[1] { +- t.Errorf("parseFiles(fh, fh): = [%p, %p], want duplicate files", pgfs[0].File, pgfs[1].File) +- } +-} +- +-func dummyFileHandles(n int) []file.Handle { +- var fhs []file.Handle +- for i := range n { +- uri := protocol.DocumentURI(fmt.Sprintf("file:///_%d", i)) +- src := fmt.Appendf(nil, "package p\nvar _ = %d", i) +- fhs = append(fhs, makeFakeFileHandle(uri, src)) +- } +- return fhs +-} +- +-func makeFakeFileHandle(uri protocol.DocumentURI, src []byte) fakeFileHandle { +- return fakeFileHandle{ +- uri: uri, +- data: src, +- hash: file.HashOf(src), +- } +-} +- +-type fakeFileHandle struct { +- file.Handle +- uri protocol.DocumentURI +- data []byte +- hash file.Hash +-} +- +-func (h fakeFileHandle) String() string { +- return h.uri.Path() +-} +- +-func (h fakeFileHandle) URI() protocol.DocumentURI { +- return h.uri +-} +- +-func (h fakeFileHandle) Content() ([]byte, error) { +- return h.data, nil +-} +- +-func (h fakeFileHandle) Identity() file.Identity { +- return file.Identity{ +- URI: h.uri, +- Hash: h.hash, +- } +-} +diff -urN a/gopls/internal/cache/parse.go b/gopls/internal/cache/parse.go +--- a/gopls/internal/cache/parse.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/parse.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,45 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "fmt" +- "go/parser" +- "go/token" +- "path/filepath" +- +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +-) +- +-// ParseGo parses the file whose contents are provided by fh. +-// The resulting tree may have been fixed up. +-// If the file is not available, returns nil and an error. +-func (s *Snapshot) ParseGo(ctx context.Context, fh file.Handle, mode parser.Mode) (*parsego.File, error) { +- pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), mode, false, fh) +- if err != nil { +- return nil, err +- } +- return pgfs[0], nil +-} +- +-// parseGoImpl parses the Go source file whose content is provided by fh. +-func parseGoImpl(ctx context.Context, fset *token.FileSet, fh file.Handle, mode parser.Mode, purgeFuncBodies bool) (*parsego.File, error) { +- ext := filepath.Ext(fh.URI().Path()) +- if ext != ".go" && ext != "" { // files generated by cgo have no extension +- return nil, fmt.Errorf("cannot parse non-Go file %s", fh.URI()) +- } +- content, err := fh.Content() +- if err != nil { +- return nil, err +- } +- // Check for context cancellation before actually doing the parse. +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- pgf, _ := parsego.Parse(ctx, fset, fh.URI(), content, mode, purgeFuncBodies) // ignore 'fixes' +- return pgf, nil +-} +diff -urN a/gopls/internal/cache/parsego/file.go b/gopls/internal/cache/parsego/file.go +--- a/gopls/internal/cache/parsego/file.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/parsego/file.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,186 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package parsego +- +-import ( +- "go/ast" +- "go/parser" +- "go/scanner" +- "go/token" +- "sync" +- "unicode" +- +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +-) +- +-// A File contains the results of parsing a Go file. +-type File struct { +- URI protocol.DocumentURI +- Mode parser.Mode +- +- // File is the file resulting from parsing. It is always non-nil. +- // +- // Clients must not access the AST's legacy ast.Object-related +- // fields without first ensuring that [File.Resolve] was +- // already called. +- File *ast.File +- Tok *token.File +- // Source code used to build the AST. It may be different from the +- // actual content of the file if we have fixed the AST. +- Src []byte +- +- Cursor inspector.Cursor // cursor of *ast.File, sans sibling files +- +- // fixedSrc and fixedAST report on "fixing" that occurred during parsing of +- // this file. +- // +- // fixedSrc means Src holds file content that was modified to improve parsing. +- // fixedAST means File was modified after parsing, so AST positions may not +- // reflect the content of Src. +- // +- // TODO(rfindley): there are many places where we haphazardly use the Src or +- // positions without checking these fields. Audit these places and guard +- // accordingly. After doing so, we may find that we don't need to +- // differentiate fixedSrc and fixedAST. +- fixedSrc bool +- fixedAST bool +- Mapper *protocol.Mapper // may map fixed Src, not file content +- ParseErr scanner.ErrorList +- +- // resolveOnce guards the lazy ast.Object resolution. See [File.Resolve]. +- resolveOnce sync.Once +-} +- +-func (pgf *File) String() string { return string(pgf.URI) } +- +-// Fixed reports whether p was "Fixed", meaning that its source or positions +-// may not correlate with the original file. +-func (pgf *File) Fixed() bool { +- return pgf.fixedSrc || pgf.fixedAST +-} +- +-// -- go/token domain convenience helpers -- +- +-// PositionPos returns the token.Pos of protocol position p within the file. +-func (pgf *File) PositionPos(p protocol.Position) (token.Pos, error) { +- offset, err := pgf.Mapper.PositionOffset(p) +- if err != nil { +- return token.NoPos, err +- } +- return safetoken.Pos(pgf.Tok, offset) +-} +- +-// PosPosition returns a protocol Position for the token.Pos in this file. +-func (pgf *File) PosPosition(pos token.Pos) (protocol.Position, error) { +- return pgf.Mapper.PosPosition(pgf.Tok, pos) +-} +- +-// PosRange returns a protocol Range for the token.Pos interval in this file. +-func (pgf *File) PosRange(start, end token.Pos) (protocol.Range, error) { +- return pgf.Mapper.PosRange(pgf.Tok, start, end) +-} +- +-// PosLocation returns a protocol Location for the token.Pos interval in this file. +-func (pgf *File) PosLocation(start, end token.Pos) (protocol.Location, error) { +- return pgf.Mapper.PosLocation(pgf.Tok, start, end) +-} +- +-// PosText returns the source text for the token.Pos interval in this file. +-func (pgf *File) PosText(start, end token.Pos) ([]byte, error) { +- return pgf.Mapper.PosText(pgf.Tok, start, end) +-} +- +-// NodeRange returns a protocol Range for the ast.Node interval in this file. +-func (pgf *File) NodeRange(node ast.Node) (protocol.Range, error) { +- return pgf.Mapper.NodeRange(pgf.Tok, node) +-} +- +-// NodeOffsets returns offsets for the ast.Node. +-func (pgf *File) NodeOffsets(node ast.Node) (start int, end int, _ error) { +- return safetoken.Offsets(pgf.Tok, node.Pos(), node.End()) +-} +- +-// NodeLocation returns a protocol Location for the ast.Node interval in this file. +-func (pgf *File) NodeLocation(node ast.Node) (protocol.Location, error) { +- return pgf.Mapper.PosLocation(pgf.Tok, node.Pos(), node.End()) +-} +- +-// NodeText returns the source text for the ast.Node interval in this file. +-func (pgf *File) NodeText(node ast.Node) ([]byte, error) { +- return pgf.Mapper.NodeText(pgf.Tok, node) +-} +- +-// RangePos parses a protocol Range back into the go/token domain. +-func (pgf *File) RangePos(r protocol.Range) (token.Pos, token.Pos, error) { +- start, end, err := pgf.Mapper.RangeOffsets(r) +- if err != nil { +- return token.NoPos, token.NoPos, err +- } +- return pgf.Tok.Pos(start), pgf.Tok.Pos(end), nil +-} +- +-// CheckNode asserts that the Node's positions are valid w.r.t. pgf.Tok. +-func (pgf *File) CheckNode(node ast.Node) { +- // Avoid safetoken.Offsets, and put each assertion on its own source line. +- pgf.CheckPos(node.Pos()) +- pgf.CheckPos(node.End()) +-} +- +-// CheckPos asserts that the position is valid w.r.t. pgf.Tok. +-func (pgf *File) CheckPos(pos token.Pos) { +- if !pos.IsValid() { +- bug.Report("invalid token.Pos") +- } else if _, err := safetoken.Offset(pgf.Tok, pos); err != nil { +- bug.Report("token.Pos out of range") +- } +-} +- +-// Resolve lazily resolves ast.Ident.Objects in the enclosed syntax tree. +-// +-// Resolve must be called before accessing any of: +-// - pgf.File.Scope +-// - pgf.File.Unresolved +-// - Ident.Obj, for any Ident in pgf.File +-func (pgf *File) Resolve() { +- pgf.resolveOnce.Do(func() { +- if pgf.File.Scope != nil { +- return // already resolved by parsing without SkipObjectResolution. +- } +- defer func() { +- // (panic handler duplicated from go/parser) +- if e := recover(); e != nil { +- // A bailout indicates the resolution stack has exceeded max depth. +- if _, ok := e.(bailout); !ok { +- panic(e) +- } +- } +- }() +- declErr := func(token.Pos, string) {} +- resolveFile(pgf.File, pgf.Tok, declErr) +- }) +-} +- +-// Indentation returns the string of spaces representing the indentation +-// of the line containing the specified position. +-// This can be used to ensure that inserted code maintains consistent indentation +-// and column alignment. +-func (pgf *File) Indentation(pos token.Pos) (string, error) { +- line := safetoken.Line(pgf.Tok, pos) +- start, end, err := safetoken.Offsets(pgf.Tok, pgf.Tok.LineStart(line), pos) +- if err != nil { +- return "", err +- } +- +- s := string(pgf.Src[start:end]) +- for i, r := range s { +- if !unicode.IsSpace(r) { +- return s[:i], nil // prefix of spaces +- } +- } +- return s, nil +-} +diff -urN a/gopls/internal/cache/parsego/parse.go b/gopls/internal/cache/parsego/parse.go +--- a/gopls/internal/cache/parsego/parse.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/parsego/parse.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,941 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:generate go run resolver_gen.go +- +-// The parsego package defines the [File] type, a wrapper around a go/ast.File +-// that is useful for answering LSP queries. Notably, it bundles the +-// *token.File and *protocol.Mapper necessary for token.Pos locations to and +-// from UTF-16 LSP positions. +-// +-// Run `go generate` to update resolver.go from GOROOT. +-package parsego +- +-import ( +- "bytes" +- "context" +- "fmt" +- "go/ast" +- "go/parser" +- "go/scanner" +- "go/token" +- "reflect" +- "slices" +- +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/diff" +- "golang.org/x/tools/internal/event" +-) +- +-// Common parse modes; these should be reused wherever possible to increase +-// cache hits. +-const ( +- // Header specifies that the main package declaration and imports are needed. +- // This is the mode used when attempting to examine the package graph structure. +- Header = parser.AllErrors | parser.ParseComments | parser.ImportsOnly | parser.SkipObjectResolution +- +- // Full specifies the full AST is needed. +- // This is used for files of direct interest where the entire contents must +- // be considered. +- Full = parser.AllErrors | parser.ParseComments | parser.SkipObjectResolution +-) +- +-// Parse parses a buffer of Go source, repairing the tree if necessary. +-// +-// The provided ctx is used only for logging. +-func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, src []byte, mode parser.Mode, purgeFuncBodies bool) (res *File, fixes []FixType) { +- if purgeFuncBodies { +- src = astutil.PurgeFuncBodies(src) +- } +- ctx, done := event.Start(ctx, "cache.ParseGoSrc", label.File.Of(uri.Path())) +- defer done() +- +- file, err := parser.ParseFile(fset, uri.Path(), src, mode) +- var parseErr scanner.ErrorList +- if err != nil { +- // We passed a byte slice, so the only possible error is a parse error. +- parseErr = err.(scanner.ErrorList) +- } +- // Inv: file != nil. +- +- tokenFile := func(file *ast.File) *token.File { +- return fset.File(file.FileStart) +- } +- +- tok := tokenFile(file) +- +- fixedSrc := false +- fixedAST := false +- // If there were parse errors, attempt to fix them up. +- if parseErr != nil { +- // Fix any badly parsed parts of the AST. +- astFixes := fixAST(file, tok, src) +- fixedAST = len(astFixes) > 0 +- if fixedAST { +- fixes = append(fixes, astFixes...) +- } +- +- for i := range 10 { +- // Fix certain syntax errors that render the file unparsable. +- newSrc, srcFix := fixSrc(file, tok, src) +- if newSrc == nil { +- break +- } +- +- // If we thought there was something to fix 10 times in a row, +- // it is likely we got stuck in a loop somehow. Log out a diff +- // of the last changes we made to aid in debugging. +- if i == 9 { +- unified := diff.Unified("before", "after", string(src), string(newSrc)) +- event.Log(ctx, fmt.Sprintf("fixSrc loop - last diff:\n%v", unified), label.File.Of(tok.Name())) +- } +- +- newFile, newErr := parser.ParseFile(fset, uri.Path(), newSrc, mode) +- assert(newFile != nil, "ParseFile returned nil") // I/O error can't happen +- +- // Maintain the original parseError so we don't try formatting the +- // doctored file. +- file = newFile +- src = newSrc +- tok = tokenFile(file) +- +- // Only now that we accept the fix do we record the src fix from above. +- fixes = append(fixes, srcFix) +- fixedSrc = true +- +- if newErr == nil { +- break // nothing to fix +- } +- +- // Note that fixedAST is reset after we fix src. +- astFixes = fixAST(file, tok, src) +- fixedAST = len(astFixes) > 0 +- if fixedAST { +- fixes = append(fixes, astFixes...) +- } +- } +- } +- assert(file != nil, "nil *ast.File") +- +- // Provide a cursor for fast and convenient navigation. +- inspect := inspector.New([]*ast.File{file}) +- curFile, _ := inspect.Root().FirstChild() +- _ = curFile.Node().(*ast.File) +- +- return &File{ +- URI: uri, +- Mode: mode, +- Src: src, +- fixedSrc: fixedSrc, +- fixedAST: fixedAST, +- File: file, +- Tok: tok, +- Cursor: curFile, +- Mapper: protocol.NewMapper(uri, src), +- ParseErr: parseErr, +- }, fixes +-} +- +-// fixAST inspects the AST and potentially modifies any *ast.BadStmts so that it can be +-// type-checked more effectively. +-// +-// If fixAST returns true, the resulting AST is considered "fixed", meaning +-// positions have been mangled, and type checker errors may not make sense. +-func fixAST(n ast.Node, tok *token.File, src []byte) (fixes []FixType) { +- var err error +- ast.PreorderStack(n, nil, func(n ast.Node, stack []ast.Node) bool { +- var parent ast.Node +- if len(stack) > 0 { +- parent = stack[len(stack)-1] +- } +- +- switch n := n.(type) { +- case *ast.BadStmt: +- if fixDeferOrGoStmt(n, parent, tok, src) { +- fixes = append(fixes, FixedDeferOrGo) +- // Recursively fix in our fixed node. +- moreFixes := fixAST(parent, tok, src) +- fixes = append(fixes, moreFixes...) +- } else { +- err = fmt.Errorf("unable to parse defer or go from *ast.BadStmt: %v", err) +- } +- return false +- case *ast.BadExpr: +- if fixArrayType(n, parent, tok, src) { +- fixes = append(fixes, FixedArrayType) +- // Recursively fix in our fixed node. +- moreFixes := fixAST(parent, tok, src) +- fixes = append(fixes, moreFixes...) +- return false +- } +- +- // Fix cases where parser interprets if/for/switch "init" +- // statement as "cond" expression, e.g.: +- // +- // // "i := foo" is init statement, not condition. +- // for i := foo +- // +- if fixInitStmt(n, parent, tok, src) { +- fixes = append(fixes, FixedInit) +- } +- return false +- case *ast.SelectorExpr: +- // Fix cases where a keyword prefix results in a phantom "_" selector, e.g.: +- // +- // foo.var<> // want to complete to "foo.variance" +- // +- if fixPhantomSelector(n, tok, src) { +- fixes = append(fixes, FixedPhantomSelector) +- } +- return true +- +- case *ast.BlockStmt: +- switch parent.(type) { +- case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: +- // Adjust closing curly brace of empty switch/select +- // statements so we can complete inside them. +- if fixEmptySwitch(n, tok, src) { +- fixes = append(fixes, FixedEmptySwitch) +- } +- } +- +- return true +- default: +- return true +- } +- }) +- return fixes +-} +- +-// TODO(rfindley): revert this instrumentation once we're certain the crash in +-// #59097 is fixed. +-type FixType int +- +-const ( +- noFix FixType = iota +- FixedCurlies +- FixedDanglingSelector +- FixedDeferOrGo +- FixedArrayType +- FixedInit +- FixedPhantomSelector +- FixedEmptySwitch +-) +- +-// fixSrc attempts to modify the file's source code to fix certain +-// syntax errors that leave the rest of the file unparsed. +-// +-// fixSrc returns a non-nil result if and only if a fix was applied. +-func fixSrc(f *ast.File, tf *token.File, src []byte) (newSrc []byte, fix FixType) { +- ast.PreorderStack(f, nil, func(n ast.Node, stack []ast.Node) bool { +- if newSrc != nil { +- return false +- } +- +- switch n := n.(type) { +- case *ast.BlockStmt: +- parent := stack[len(stack)-1] +- newSrc = fixMissingCurlies(f, n, parent, tf, src) +- if newSrc != nil { +- fix = FixedCurlies +- } +- case *ast.SelectorExpr: +- newSrc = fixDanglingSelector(n, tf, src) +- if newSrc != nil { +- fix = FixedDanglingSelector +- } +- } +- +- return newSrc == nil +- }) +- +- return newSrc, fix +-} +- +-// fixMissingCurlies adds in curly braces for block statements that +-// are missing curly braces. For example: +-// +-// if foo +-// +-// becomes +-// +-// if foo {} +-func fixMissingCurlies(f *ast.File, b *ast.BlockStmt, parent ast.Node, tok *token.File, src []byte) []byte { +- // If the "{" is already in the source code, there isn't anything to +- // fix since we aren't missing curlies. +- if b.Lbrace.IsValid() { +- braceOffset, err := safetoken.Offset(tok, b.Lbrace) +- if err != nil { +- return nil +- } +- if braceOffset < len(src) && src[braceOffset] == '{' { +- return nil +- } +- } +- +- parentLine := safetoken.Line(tok, parent.Pos()) +- +- if parentLine >= tok.LineCount() { +- // If we are the last line in the file, no need to fix anything. +- return nil +- } +- +- // Insert curlies at the end of parent's starting line. The parent +- // is the statement that contains the block, e.g. *ast.IfStmt. The +- // block's Pos()/End() can't be relied upon because they are based +- // on the (missing) curly braces. We assume the statement is a +- // single line for now and try sticking the curly braces at the end. +- insertPos := tok.LineStart(parentLine+1) - 1 +- +- // Scootch position backwards until it's not in a comment. For example: +- // +- // if foo<> // some amazing comment | +- // someOtherCode() +- // +- // insertPos will be located at "|", so we back it out of the comment. +- didSomething := true +- for didSomething { +- didSomething = false +- for _, c := range f.Comments { +- if c.Pos() < insertPos && insertPos <= c.End() { +- insertPos = c.Pos() +- didSomething = true +- } +- } +- } +- +- // Bail out if line doesn't end in an ident or ".". This is to avoid +- // cases like below where we end up making things worse by adding +- // curlies: +- // +- // if foo && +- // bar<> +- switch precedingToken(insertPos, tok, src) { +- case token.IDENT, token.PERIOD: +- // ok +- default: +- return nil +- } +- +- var buf bytes.Buffer +- buf.Grow(len(src) + 3) +- offset, err := safetoken.Offset(tok, insertPos) +- if err != nil { +- return nil +- } +- buf.Write(src[:offset]) +- +- // Detect if we need to insert a semicolon to fix "for" loop situations like: +- // +- // for i := foo(); foo<> +- // +- // Just adding curlies is not sufficient to make things parse well. +- if fs, ok := parent.(*ast.ForStmt); ok { +- if _, ok := fs.Cond.(*ast.BadExpr); !ok { +- if xs, ok := fs.Post.(*ast.ExprStmt); ok { +- if _, ok := xs.X.(*ast.BadExpr); ok { +- buf.WriteByte(';') +- } +- } +- } +- } +- +- // Insert "{}" at insertPos. +- buf.WriteByte('{') +- buf.WriteByte('}') +- buf.Write(src[offset:]) +- return buf.Bytes() +-} +- +-// fixEmptySwitch moves empty switch/select statements' closing curly +-// brace down one line. This allows us to properly detect incomplete +-// "case" and "default" keywords as inside the switch statement. For +-// example: +-// +-// switch { +-// def<> +-// } +-// +-// gets parsed like: +-// +-// switch { +-// } +-// +-// Later we manually pull out the "def" token, but we need to detect +-// that our "<>" position is inside the switch block. To do that we +-// move the curly brace so it looks like: +-// +-// switch { +-// +-// } +-// +-// The resulting bool reports whether any fixing occurred. +-func fixEmptySwitch(body *ast.BlockStmt, tok *token.File, src []byte) bool { +- // We only care about empty switch statements. +- if len(body.List) > 0 || !body.Rbrace.IsValid() { +- return false +- } +- +- // If the right brace is actually in the source code at the +- // specified position, don't mess with it. +- braceOffset, err := safetoken.Offset(tok, body.Rbrace) +- if err != nil { +- return false +- } +- if braceOffset < len(src) && src[braceOffset] == '}' { +- return false +- } +- +- braceLine := safetoken.Line(tok, body.Rbrace) +- if braceLine >= tok.LineCount() { +- // If we are the last line in the file, no need to fix anything. +- return false +- } +- +- // Move the right brace down one line. +- body.Rbrace = tok.LineStart(braceLine + 1) +- return true +-} +- +-// fixDanglingSelector inserts a real "_" selector expression in place +-// of a phantom parser-inserted "_" selector so that the parser will +-// not consume the following non-identifier token. +-// For example: +-// +-// func _() { +-// x.<> +-// } +-// +-// var x struct { i int } +-// +-// To fix completion at "<>", we insert a real "_" after the "." so the +-// following declaration of "x" can be parsed and type checked +-// normally. +-func fixDanglingSelector(s *ast.SelectorExpr, tf *token.File, src []byte) []byte { +- if !isPhantomUnderscore(s.Sel, tf, src) { +- return nil +- } +- +- if !s.X.End().IsValid() { +- return nil +- } +- +- insertOffset, err := safetoken.Offset(tf, s.X.End()) +- if err != nil { +- return nil +- } +- // Insert directly after the selector's ".". +- insertOffset++ +- if src[insertOffset-1] != '.' { +- return nil +- } +- +- return slices.Concat(src[:insertOffset], []byte("_"), src[insertOffset:]) +-} +- +-// fixPhantomSelector tries to fix selector expressions whose Sel is a +-// phantom (parser-invented) "_". If the text after the '.' is a +-// keyword, it updates Sel to a fake ast.Ident of that name. For +-// example: +-// +-// foo.var +-// +-// yields a "_" selector instead of "var" since "var" is a keyword. +-// +-// TODO(rfindley): should this constitute an ast 'fix'? +-// +-// The resulting bool reports whether any fixing occurred. +-func fixPhantomSelector(sel *ast.SelectorExpr, tf *token.File, src []byte) bool { +- if !isPhantomUnderscore(sel.Sel, tf, src) { +- return false +- } +- +- // Only consider selectors directly abutting the selector ".". This +- // avoids false positives in cases like: +- // +- // foo. // don't think "var" is our selector +- // var bar = 123 +- // +- if sel.Sel.Pos() != sel.X.End()+1 { +- return false +- } +- +- maybeKeyword := readKeyword(sel.Sel.Pos(), tf, src) +- if maybeKeyword == "" { +- return false +- } +- +- return replaceNode(sel, sel.Sel, &ast.Ident{ +- Name: maybeKeyword, +- NamePos: sel.Sel.Pos(), +- }) +-} +- +-// isPhantomUnderscore reports whether the given ident from a +-// SelectorExpr.Sel was invented by the parser and is not present in +-// source text. The parser creates a blank "_" identifier when the +-// syntax (e.g. a selector) demands one but none is present. The fixer +-// also inserts them. +-func isPhantomUnderscore(id *ast.Ident, tok *token.File, src []byte) bool { +- switch id.Name { +- case "_": // go1.24 parser +- offset, err := safetoken.Offset(tok, id.Pos()) +- return err == nil && offset < len(src) && src[offset] != '_' +- } +- return false // real +-} +- +-// fixInitStmt fixes cases where the parser misinterprets an +-// if/for/switch "init" statement as the "cond" conditional. In cases +-// like "if i := 0" the user hasn't typed the semicolon yet so the +-// parser is looking for the conditional expression. However, "i := 0" +-// are not valid expressions, so we get a BadExpr. +-// +-// The resulting bool reports whether any fixing occurred. +-func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) bool { +- if !bad.Pos().IsValid() || !bad.End().IsValid() { +- return false +- } +- +- // Try to extract a statement from the BadExpr. +- start, end, err := safetoken.Offsets(tok, bad.Pos(), bad.End()) +- if err != nil { +- return false +- } +- assert(end <= len(src), "offset overflow") // golang/go#72026 +- stmtBytes := src[start:end] +- stmt, err := parseStmt(tok, bad.Pos(), stmtBytes) +- if err != nil { +- return false +- } +- +- // If the parent statement doesn't already have an "init" statement, +- // move the extracted statement into the "init" field and insert a +- // dummy expression into the required "cond" field. +- switch p := parent.(type) { +- case *ast.IfStmt: +- if p.Init != nil { +- return false +- } +- p.Init = stmt +- p.Cond = &ast.Ident{ +- Name: "_", +- NamePos: stmt.End(), +- } +- return true +- case *ast.ForStmt: +- if p.Init != nil { +- return false +- } +- p.Init = stmt +- p.Cond = &ast.Ident{ +- Name: "_", +- NamePos: stmt.End(), +- } +- return true +- case *ast.SwitchStmt: +- if p.Init != nil { +- return false +- } +- p.Init = stmt +- p.Tag = nil +- return true +- } +- return false +-} +- +-// readKeyword reads the keyword starting at pos, if any. +-func readKeyword(pos token.Pos, tok *token.File, src []byte) string { +- var kwBytes []byte +- offset, err := safetoken.Offset(tok, pos) +- if err != nil { +- return "" +- } +- for i := offset; i < len(src); i++ { +- // Use a simplified identifier check since keywords are always lowercase ASCII. +- if src[i] < 'a' || src[i] > 'z' { +- break +- } +- kwBytes = append(kwBytes, src[i]) +- +- // Stop search at arbitrarily chosen too-long-for-a-keyword length. +- if len(kwBytes) > 15 { +- return "" +- } +- } +- +- if kw := string(kwBytes); token.Lookup(kw).IsKeyword() { +- return kw +- } +- +- return "" +-} +- +-// fixArrayType tries to parse an *ast.BadExpr into an *ast.ArrayType. +-// go/parser often turns lone array types like "[]int" into BadExprs +-// if it isn't expecting a type. +-func fixArrayType(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) bool { +- // Our expected input is a bad expression that looks like "[]someExpr". +- +- from, to := bad.Pos(), bad.End() +- fromOffset, toOffset, err := safetoken.Offsets(tok, from, to) +- if err != nil { +- return false +- } +- +- exprBytes := bytes.TrimSpace(slices.Clone(src[fromOffset:toOffset])) +- +- // If our expression ends in "]" (e.g. "[]"), add a phantom selector +- // so we can complete directly after the "[]". +- if bytes.HasSuffix(exprBytes, []byte("]")) { +- exprBytes = append(exprBytes, '_') +- } +- +- // Add "{}" to turn our ArrayType into a CompositeLit. This is to +- // handle the case of "[...]int" where we must make it a composite +- // literal to be parseable. +- exprBytes = append(exprBytes, '{', '}') +- +- expr, err := parseExpr(tok, from, exprBytes) +- if err != nil { +- return false +- } +- +- cl, _ := expr.(*ast.CompositeLit) +- if cl == nil { +- return false +- } +- +- at, _ := cl.Type.(*ast.ArrayType) +- if at == nil { +- return false +- } +- +- return replaceNode(parent, bad, at) +-} +- +-// precedingToken scans src to find the token preceding pos. +-func precedingToken(pos token.Pos, tok *token.File, src []byte) token.Token { +- s := &scanner.Scanner{} +- s.Init(tok, src, nil, 0) +- +- var lastTok token.Token +- for { +- p, t, _ := s.Scan() +- if t == token.EOF || p >= pos { +- break +- } +- +- lastTok = t +- } +- return lastTok +-} +- +-// fixDeferOrGoStmt tries to parse an *ast.BadStmt into a defer or a go statement. +-// +-// go/parser packages a statement of the form "defer x." as an *ast.BadStmt because +-// it does not include a call expression. This means that go/types skips type-checking +-// this statement entirely, and we can't use the type information when completing. +-// Here, we try to generate a fake *ast.DeferStmt or *ast.GoStmt to put into the AST, +-// instead of the *ast.BadStmt. +-func fixDeferOrGoStmt(bad *ast.BadStmt, parent ast.Node, tok *token.File, src []byte) bool { +- // Check if we have a bad statement containing either a "go" or "defer". +- s := &scanner.Scanner{} +- s.Init(tok, src, nil, 0) +- +- var ( +- pos token.Pos +- tkn token.Token +- ) +- for { +- if tkn == token.EOF { +- return false +- } +- if pos >= bad.From { +- break +- } +- pos, tkn, _ = s.Scan() +- } +- +- var stmt ast.Stmt +- switch tkn { +- case token.DEFER: +- stmt = &ast.DeferStmt{ +- Defer: pos, +- } +- case token.GO: +- stmt = &ast.GoStmt{ +- Go: pos, +- } +- default: +- return false +- } +- +- var ( +- from, to, last token.Pos +- lastToken token.Token +- braceDepth int +- phantomSelectors []token.Pos +- ) +-FindTo: +- for { +- to, tkn, _ = s.Scan() +- +- if from == token.NoPos { +- from = to +- } +- +- switch tkn { +- case token.EOF: +- break FindTo +- case token.SEMICOLON: +- // If we aren't in nested braces, end of statement means +- // end of expression. +- if braceDepth == 0 { +- break FindTo +- } +- case token.LBRACE: +- braceDepth++ +- } +- +- // This handles the common dangling selector case. For example in +- // +- // defer fmt. +- // y := 1 +- // +- // we notice the dangling period and end our expression. +- // +- // If the previous token was a "." and we are looking at a "}", +- // the period is likely a dangling selector and needs a phantom +- // "_". Likewise if the current token is on a different line than +- // the period, the period is likely a dangling selector. +- if lastToken == token.PERIOD && (tkn == token.RBRACE || safetoken.Line(tok, to) > safetoken.Line(tok, last)) { +- // Insert phantom "_" selector after the dangling ".". +- phantomSelectors = append(phantomSelectors, last+1) +- // If we aren't in a block then end the expression after the ".". +- if braceDepth == 0 { +- to = last + 1 +- break +- } +- } +- +- lastToken = tkn +- last = to +- +- switch tkn { +- case token.RBRACE: +- braceDepth-- +- if braceDepth <= 0 { +- if braceDepth == 0 { +- // +1 to include the "}" itself. +- to += 1 +- } +- break FindTo +- } +- } +- } +- +- fromOffset, toOffset, err := safetoken.Offsets(tok, from, to) +- if err != nil { +- return false +- } +- if !from.IsValid() || fromOffset >= len(src) { +- return false +- } +- if !to.IsValid() || toOffset >= len(src) { +- return false +- } +- +- // Insert any phantom selectors needed to prevent dangling "." from messing +- // up the AST. +- exprBytes := make([]byte, 0, int(to-from)+len(phantomSelectors)) +- for i, b := range src[fromOffset:toOffset] { +- if len(phantomSelectors) > 0 && from+token.Pos(i) == phantomSelectors[0] { +- exprBytes = append(exprBytes, '_') +- phantomSelectors = phantomSelectors[1:] +- } +- exprBytes = append(exprBytes, b) +- } +- +- if len(phantomSelectors) > 0 { +- exprBytes = append(exprBytes, '_') +- } +- +- expr, err := parseExpr(tok, from, exprBytes) +- if err != nil { +- return false +- } +- +- // Package the expression into a fake *ast.CallExpr and re-insert +- // into the function. +- call := &ast.CallExpr{ +- Fun: expr, +- Lparen: to, +- Rparen: to, +- } +- +- switch stmt := stmt.(type) { +- case *ast.DeferStmt: +- stmt.Call = call +- case *ast.GoStmt: +- stmt.Call = call +- } +- +- return replaceNode(parent, bad, stmt) +-} +- +-// parseStmt parses the statement in src and updates its position to +-// start at pos. +-// +-// tok is the original file containing pos. Used to ensure that all adjusted +-// positions are valid. +-func parseStmt(tok *token.File, pos token.Pos, src []byte) (ast.Stmt, error) { +- // Wrap our expression to make it a valid Go file we can pass to ParseFile. +- fileSrc := slices.Concat([]byte("package fake;func _(){"), src, []byte("}")) +- +- // Use ParseFile instead of ParseExpr because ParseFile has +- // best-effort behavior, whereas ParseExpr fails hard on any error. +- fakeFile, err := parser.ParseFile(token.NewFileSet(), "", fileSrc, parser.SkipObjectResolution) +- if fakeFile == nil { +- return nil, fmt.Errorf("error reading fake file source: %v", err) +- } +- +- // Extract our expression node from inside the fake file. +- if len(fakeFile.Decls) == 0 { +- return nil, fmt.Errorf("error parsing fake file: %v", err) +- } +- +- fakeDecl, _ := fakeFile.Decls[0].(*ast.FuncDecl) +- if fakeDecl == nil || len(fakeDecl.Body.List) == 0 { +- return nil, fmt.Errorf("no statement in %s: %v", src, err) +- } +- +- stmt := fakeDecl.Body.List[0] +- +- // parser.ParseFile returns undefined positions. +- // Adjust them for the current file. +- offsetPositions(tok, stmt, pos-1-(stmt.Pos()-1)) +- +- return stmt, nil +-} +- +-// parseExpr parses the expression in src and updates its position to +-// start at pos. +-func parseExpr(tok *token.File, pos token.Pos, src []byte) (ast.Expr, error) { +- stmt, err := parseStmt(tok, pos, src) +- if err != nil { +- return nil, err +- } +- +- exprStmt, ok := stmt.(*ast.ExprStmt) +- if !ok { +- return nil, fmt.Errorf("no expr in %s: %v", src, err) +- } +- +- return exprStmt.X, nil +-} +- +-var tokenPosType = reflect.TypeOf(token.NoPos) +- +-// offsetPositions applies an offset to the positions in an ast.Node. +-func offsetPositions(tok *token.File, n ast.Node, offset token.Pos) { +- fileBase := token.Pos(tok.Base()) +- fileEnd := fileBase + token.Pos(tok.Size()) +- ast.Inspect(n, func(n ast.Node) bool { +- if n == nil { +- return false +- } +- +- v := reflect.ValueOf(n).Elem() +- +- switch v.Kind() { +- case reflect.Struct: +- for i := 0; i < v.NumField(); i++ { +- f := v.Field(i) +- if f.Type() != tokenPosType { +- continue +- } +- +- if !f.CanSet() { +- continue +- } +- +- pos := token.Pos(f.Int()) +- +- // Don't offset invalid positions: they should stay invalid. +- if !pos.IsValid() { +- continue +- } +- +- // Clamp value to valid range; see #64335. +- // +- // TODO(golang/go#64335): this is a hack, because our fixes should not +- // produce positions that overflow (but they do; see golang/go#64488, +- // #73438, #66790, #66683, #67704). +- pos = min(max(pos+offset, fileBase), fileEnd) +- +- f.SetInt(int64(pos)) +- } +- } +- +- return true +- }) +-} +- +-// replaceNode updates parent's child oldChild to be newChild. It +-// returns whether it replaced successfully. +-func replaceNode(parent, oldChild, newChild ast.Node) bool { +- if parent == nil || oldChild == nil || newChild == nil { +- return false +- } +- +- parentVal := reflect.ValueOf(parent).Elem() +- if parentVal.Kind() != reflect.Struct { +- return false +- } +- +- newChildVal := reflect.ValueOf(newChild) +- +- tryReplace := func(v reflect.Value) bool { +- if !v.CanSet() || !v.CanInterface() { +- return false +- } +- +- // If the existing value is oldChild, we found our child. Make +- // sure our newChild is assignable and then make the swap. +- if v.Interface() == oldChild && newChildVal.Type().AssignableTo(v.Type()) { +- v.Set(newChildVal) +- return true +- } +- +- return false +- } +- +- // Loop over parent's struct fields. +- for i := 0; i < parentVal.NumField(); i++ { +- f := parentVal.Field(i) +- +- switch f.Kind() { +- // Check interface and pointer fields. +- case reflect.Interface, reflect.Pointer: +- if tryReplace(f) { +- return true +- } +- +- // Search through any slice fields. +- case reflect.Slice: +- for i := 0; i < f.Len(); i++ { +- if tryReplace(f.Index(i)) { +- return true +- } +- } +- } +- } +- +- return false +-} +diff -urN a/gopls/internal/cache/parsego/parse_test.go b/gopls/internal/cache/parsego/parse_test.go +--- a/gopls/internal/cache/parsego/parse_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/parsego/parse_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,367 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package parsego_test +- +-import ( +- "context" +- "fmt" +- "go/ast" +- "go/parser" +- "go/token" +- "reflect" +- "slices" +- "testing" +- +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/gopls/internal/util/tokeninternal" +- "golang.org/x/tools/internal/astutil" +-) +- +-// TODO(golang/go#64335): we should have many more tests for fixed syntax. +- +-func TestFixPosition_Issue64488(t *testing.T) { +- // This test reproduces the conditions of golang/go#64488, where a type error +- // on fixed syntax overflows the token.File. +- const src = ` +-package foo +- +-func _() { +- type myThing struct{} +- var foo []myThing +- for ${1:}, ${2:} := range foo { +- $0 +-} +-} +-` +- +- pgf, _ := parsego.Parse(context.Background(), token.NewFileSet(), "file://foo.go", []byte(src), parsego.Full, false) +- fset := tokeninternal.FileSetFor(pgf.Tok) +- ast.Inspect(pgf.File, func(n ast.Node) bool { +- if n != nil { +- posn := safetoken.StartPosition(fset, n.Pos()) +- if !posn.IsValid() { +- t.Fatalf("invalid position for %T (%v): %v not in [%d, %d]", n, n, n.Pos(), pgf.Tok.Base(), pgf.Tok.Base()+pgf.Tok.Size()) +- } +- } +- return true +- }) +-} +- +-func TestFixGoAndDefer(t *testing.T) { +- var testCases = []struct { +- source string +- fixes []parsego.FixType +- wantFix string +- }{ +- {source: "", fixes: nil}, // keyword alone +- {source: "a.b(", fixes: nil}, +- {source: "a.b()", fixes: nil}, +- {source: "func {", fixes: nil}, +- { +- source: "f", +- fixes: []parsego.FixType{parsego.FixedDeferOrGo}, +- wantFix: "f()", +- }, +- { +- source: "func", +- fixes: []parsego.FixType{parsego.FixedDeferOrGo}, +- wantFix: "(func())()", +- }, +- { +- source: "func {}", +- fixes: []parsego.FixType{parsego.FixedDeferOrGo}, +- wantFix: "(func())()", +- }, +- { +- source: "func {}(", +- fixes: []parsego.FixType{parsego.FixedDeferOrGo}, +- wantFix: "(func())()", +- }, +- { +- source: "func {}()", +- fixes: []parsego.FixType{parsego.FixedDeferOrGo}, +- wantFix: "(func())()", +- }, +- { +- source: "a.", +- fixes: []parsego.FixType{parsego.FixedDeferOrGo, parsego.FixedDanglingSelector, parsego.FixedDeferOrGo}, +- wantFix: "a._()", +- }, +- { +- source: "a.b", +- fixes: []parsego.FixType{parsego.FixedDeferOrGo}, +- wantFix: "a.b()", +- }, +- } +- +- for _, keyword := range []string{"go", "defer"} { +- for _, tc := range testCases { +- source := fmt.Sprintf("%s %s", keyword, tc.source) +- t.Run(source, func(t *testing.T) { +- src := filesrc(source) +- pgf, fixes := parsego.Parse(context.Background(), token.NewFileSet(), "file://foo.go", src, parsego.Full, false) +- if !slices.Equal(fixes, tc.fixes) { +- t.Fatalf("got %v want %v", fixes, tc.fixes) +- } +- if tc.fixes == nil { +- return +- } +- +- fset := tokeninternal.FileSetFor(pgf.Tok) +- inspect(t, pgf, func(stmt ast.Stmt) { +- var call *ast.CallExpr +- switch stmt := stmt.(type) { +- case *ast.DeferStmt: +- call = stmt.Call +- case *ast.GoStmt: +- call = stmt.Call +- default: +- return +- } +- +- if got := astutil.Format(fset, call); got != tc.wantFix { +- t.Fatalf("got %v want %v", got, tc.wantFix) +- } +- }) +- }) +- } +- } +-} +- +-// TestFixInit tests the init stmt after if/for/switch which is put under cond after parsing +-// will be fixed and moved to Init. +-func TestFixInit(t *testing.T) { +- var testCases = []struct { +- name string +- source string +- fixes []parsego.FixType +- wantInitFix string +- }{ +- { +- name: "simple define", +- source: "i := 0", +- fixes: []parsego.FixType{parsego.FixedInit}, +- wantInitFix: "i := 0", +- }, +- { +- name: "simple assign", +- source: "i = 0", +- fixes: []parsego.FixType{parsego.FixedInit}, +- wantInitFix: "i = 0", +- }, +- { +- name: "define with function call", +- source: "i := f()", +- fixes: []parsego.FixType{parsego.FixedInit}, +- wantInitFix: "i := f()", +- }, +- { +- name: "assign with function call", +- source: "i = f()", +- fixes: []parsego.FixType{parsego.FixedInit}, +- wantInitFix: "i = f()", +- }, +- { +- name: "assign with receiving chan", +- source: "i = <-ch", +- fixes: []parsego.FixType{parsego.FixedInit}, +- wantInitFix: "i = <-ch", +- }, +- +- // fixInitStmt won't fix the following cases. +- { +- name: "call in if", +- source: `fmt.Println("helloworld")`, +- fixes: nil, +- }, +- { +- name: "receive chan", +- source: `<- ch`, +- fixes: nil, +- }, +- } +- +- // currently, switch will leave its Tag empty after fix because it allows empty, +- // and if and for will leave an underscore in Cond. +- getWantCond := func(keyword string) string { +- if keyword == "switch" { +- return "" +- } +- return "_" +- } +- +- for _, keyword := range []string{"if", "for", "switch"} { +- for _, tc := range testCases { +- caseName := fmt.Sprintf("%s %s", keyword, tc.name) +- t.Run(caseName, func(t *testing.T) { +- // the init stmt is treated as a cond. +- src := filesrc(fmt.Sprintf("%s %s {}", keyword, tc.source)) +- pgf, fixes := parsego.Parse(context.Background(), token.NewFileSet(), "file://foo.go", src, parsego.Full, false) +- if !slices.Equal(fixes, tc.fixes) { +- t.Fatalf("TestFixArrayType(): got %v want %v", fixes, tc.fixes) +- } +- if tc.fixes == nil { +- return +- } +- +- // ensure the init stmt is parsed to a BadExpr. +- ensureSource(t, src, func(bad *ast.BadExpr) {}) +- +- info := func(n ast.Node, wantStmt string) (init ast.Stmt, cond ast.Expr, has bool) { +- switch wantStmt { +- case "if": +- if e, ok := n.(*ast.IfStmt); ok { +- return e.Init, e.Cond, true +- } +- case "switch": +- if e, ok := n.(*ast.SwitchStmt); ok { +- return e.Init, e.Tag, true +- } +- case "for": +- if e, ok := n.(*ast.ForStmt); ok { +- return e.Init, e.Cond, true +- } +- } +- return nil, nil, false +- } +- fset := tokeninternal.FileSetFor(pgf.Tok) +- inspect(t, pgf, func(n ast.Stmt) { +- if init, cond, ok := info(n, keyword); ok { +- if got := astutil.Format(fset, init); got != tc.wantInitFix { +- t.Fatalf("%s: Init got %v want %v", tc.source, got, tc.wantInitFix) +- } +- +- wantCond := getWantCond(keyword) +- if got := astutil.Format(fset, cond); got != wantCond { +- t.Fatalf("%s: Cond got %v want %v", tc.source, got, wantCond) +- } +- } +- }) +- }) +- } +- } +-} +- +-func TestFixPhantomSelector(t *testing.T) { +- wantFixes := []parsego.FixType{parsego.FixedPhantomSelector} +- var testCases = []struct { +- source string +- fixes []parsego.FixType +- }{ +- {source: "a.break", fixes: wantFixes}, +- {source: "_.break", fixes: wantFixes}, +- {source: "a.case", fixes: wantFixes}, +- {source: "a.chan", fixes: wantFixes}, +- {source: "a.const", fixes: wantFixes}, +- {source: "a.continue", fixes: wantFixes}, +- {source: "a.default", fixes: wantFixes}, +- {source: "a.defer", fixes: wantFixes}, +- {source: "a.else", fixes: wantFixes}, +- {source: "a.fallthrough", fixes: wantFixes}, +- {source: "a.for", fixes: wantFixes}, +- {source: "a.func", fixes: wantFixes}, +- {source: "a.go", fixes: wantFixes}, +- {source: "a.goto", fixes: wantFixes}, +- {source: "a.if", fixes: wantFixes}, +- {source: "a.import", fixes: wantFixes}, +- {source: "a.interface", fixes: wantFixes}, +- {source: "a.map", fixes: wantFixes}, +- {source: "a.package", fixes: wantFixes}, +- {source: "a.range", fixes: wantFixes}, +- {source: "a.return", fixes: wantFixes}, +- {source: "a.select", fixes: wantFixes}, +- {source: "a.struct", fixes: wantFixes}, +- {source: "a.switch", fixes: wantFixes}, +- {source: "a.type", fixes: wantFixes}, +- {source: "a.var", fixes: wantFixes}, +- +- {source: "break.break"}, +- {source: "a.BREAK"}, +- {source: "a.break_"}, +- {source: "a.breaka"}, +- } +- +- for _, tc := range testCases { +- t.Run(tc.source, func(t *testing.T) { +- src := filesrc(tc.source) +- pgf, fixes := parsego.Parse(context.Background(), token.NewFileSet(), "file://foo.go", src, parsego.Full, false) +- if !slices.Equal(fixes, tc.fixes) { +- t.Fatalf("got %v want %v", fixes, tc.fixes) +- } +- +- // some fixes don't fit the fix scenario, but we want to confirm it. +- if fixes == nil { +- return +- } +- +- // ensure the selector has been converted to underscore by parser. +- ensureSource(t, src, func(sel *ast.SelectorExpr) { +- if sel.Sel.Name != "_" { +- t.Errorf("%s: selector name is %q, want _", tc.source, sel.Sel.Name) +- } +- }) +- +- fset := tokeninternal.FileSetFor(pgf.Tok) +- inspect(t, pgf, func(sel *ast.SelectorExpr) { +- // the fix should restore the selector as is. +- if got, want := astutil.Format(fset, sel), tc.source; got != want { +- t.Fatalf("got %v want %v", got, want) +- } +- }) +- }) +- } +-} +- +-// inspect helps to go through each node of pgf and trigger checkFn if the type matches T. +-func inspect[T ast.Node](t *testing.T, pgf *parsego.File, checkFn func(n T)) { +- fset := tokeninternal.FileSetFor(pgf.Tok) +- var visited bool +- ast.Inspect(pgf.File, func(node ast.Node) bool { +- if node != nil { +- posn := safetoken.StartPosition(fset, node.Pos()) +- if !posn.IsValid() { +- t.Fatalf("invalid position for %T (%v): %v not in [%d, %d]", node, node, node.Pos(), pgf.Tok.Base(), pgf.Tok.Base()+pgf.Tok.Size()) +- } +- if n, ok := node.(T); ok { +- visited = true +- checkFn(n) +- } +- } +- return true +- }) +- if !visited { +- var n T +- t.Fatalf("got no %s node but want at least one", reflect.TypeOf(n)) +- } +-} +- +-// ensureSource helps to parse src into an ast.File by go/parser and trigger checkFn if the type matches T. +-func ensureSource[T ast.Node](t *testing.T, src []byte, checkFn func(n T)) { +- // tolerate error as usually the src is problematic. +- originFile, _ := parser.ParseFile(token.NewFileSet(), "file://foo.go", src, parsego.Full) +- var visited bool +- ast.Inspect(originFile, func(node ast.Node) bool { +- if n, ok := node.(T); ok { +- visited = true +- checkFn(n) +- } +- return true +- }) +- +- if !visited { +- var n T +- t.Fatalf("got no %s node but want at least one", reflect.TypeOf(n)) +- } +-} +- +-func filesrc(expressions string) []byte { +- const srcTmpl = `package foo +- +-func _() { +- %s +-}` +- return fmt.Appendf(nil, srcTmpl, expressions) +-} +diff -urN a/gopls/internal/cache/parsego/resolver_compat.go b/gopls/internal/cache/parsego/resolver_compat.go +--- a/gopls/internal/cache/parsego/resolver_compat.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/parsego/resolver_compat.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// This file contains declarations needed for compatibility with resolver.go +-// copied from GOROOT. +- +-package parsego +- +-import "go/token" +- +-// assert panics with the given msg if cond is not true. +-func assert(cond bool, msg string) { +- if !cond { +- panic(msg) +- } +-} +- +-// A bailout panic is raised to indicate early termination. pos and msg are +-// only populated when bailing out of object resolution. +-type bailout struct { +- pos token.Pos +- msg string +-} +diff -urN a/gopls/internal/cache/parsego/resolver_gen.go b/gopls/internal/cache/parsego/resolver_gen.go +--- a/gopls/internal/cache/parsego/resolver_gen.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/parsego/resolver_gen.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,32 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build ignore +- +-package main +- +-import ( +- "bytes" +- "log" +- "os" +- "os/exec" +- "path/filepath" +- "strings" +-) +- +-func main() { +- output, err := exec.Command("go", "env", "GOROOT").Output() +- if err != nil { +- log.Fatalf("resolving GOROOT: %v", err) +- } +- goroot := strings.TrimSpace(string(output)) +- data, err := os.ReadFile(filepath.Join(goroot, "src/go/parser/resolver.go")) +- if err != nil { +- log.Fatalf("reading resolver.go: %v", err) +- } +- data = bytes.Replace(data, []byte("\npackage parser"), []byte("\n// Code generated by resolver_gen.go. DO NOT EDIT.\n\npackage parsego"), 1) +- if err := os.WriteFile("resolver.go", data, 0666); err != nil { +- log.Fatalf("writing resolver.go: %v", err) +- } +-} +diff -urN a/gopls/internal/cache/parsego/resolver.go b/gopls/internal/cache/parsego/resolver.go +--- a/gopls/internal/cache/parsego/resolver.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/parsego/resolver.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,614 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated by resolver_gen.go. DO NOT EDIT. +- +-package parsego +- +-import ( +- "fmt" +- "go/ast" +- "go/token" +- "strings" +-) +- +-const debugResolve = false +- +-// resolveFile walks the given file to resolve identifiers within the file +-// scope, updating ast.Ident.Obj fields with declaration information. +-// +-// If declErr is non-nil, it is used to report declaration errors during +-// resolution. tok is used to format position in error messages. +-func resolveFile(file *ast.File, handle *token.File, declErr func(token.Pos, string)) { +- pkgScope := ast.NewScope(nil) +- r := &resolver{ +- handle: handle, +- declErr: declErr, +- topScope: pkgScope, +- pkgScope: pkgScope, +- depth: 1, +- } +- +- for _, decl := range file.Decls { +- ast.Walk(r, decl) +- } +- +- r.closeScope() +- assert(r.topScope == nil, "unbalanced scopes") +- assert(r.labelScope == nil, "unbalanced label scopes") +- +- // resolve global identifiers within the same file +- i := 0 +- for _, ident := range r.unresolved { +- // i <= index for current ident +- assert(ident.Obj == unresolved, "object already resolved") +- ident.Obj = r.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel +- if ident.Obj == nil { +- r.unresolved[i] = ident +- i++ +- } else if debugResolve { +- pos := ident.Obj.Decl.(interface{ Pos() token.Pos }).Pos() +- r.trace("resolved %s@%v to package object %v", ident.Name, ident.Pos(), pos) +- } +- } +- file.Scope = r.pkgScope +- file.Unresolved = r.unresolved[0:i] +-} +- +-const maxScopeDepth int = 1e3 +- +-type resolver struct { +- handle *token.File +- declErr func(token.Pos, string) +- +- // Ordinary identifier scopes +- pkgScope *ast.Scope // pkgScope.Outer == nil +- topScope *ast.Scope // top-most scope; may be pkgScope +- unresolved []*ast.Ident // unresolved identifiers +- depth int // scope depth +- +- // Label scopes +- // (maintained by open/close LabelScope) +- labelScope *ast.Scope // label scope for current function +- targetStack [][]*ast.Ident // stack of unresolved labels +-} +- +-func (r *resolver) trace(format string, args ...any) { +- fmt.Println(strings.Repeat(". ", r.depth) + r.sprintf(format, args...)) +-} +- +-func (r *resolver) sprintf(format string, args ...any) string { +- for i, arg := range args { +- switch arg := arg.(type) { +- case token.Pos: +- args[i] = r.handle.Position(arg) +- } +- } +- return fmt.Sprintf(format, args...) +-} +- +-func (r *resolver) openScope(pos token.Pos) { +- r.depth++ +- if r.depth > maxScopeDepth { +- panic(bailout{pos: pos, msg: "exceeded max scope depth during object resolution"}) +- } +- if debugResolve { +- r.trace("opening scope @%v", pos) +- } +- r.topScope = ast.NewScope(r.topScope) +-} +- +-func (r *resolver) closeScope() { +- r.depth-- +- if debugResolve { +- r.trace("closing scope") +- } +- r.topScope = r.topScope.Outer +-} +- +-func (r *resolver) openLabelScope() { +- r.labelScope = ast.NewScope(r.labelScope) +- r.targetStack = append(r.targetStack, nil) +-} +- +-func (r *resolver) closeLabelScope() { +- // resolve labels +- n := len(r.targetStack) - 1 +- scope := r.labelScope +- for _, ident := range r.targetStack[n] { +- ident.Obj = scope.Lookup(ident.Name) +- if ident.Obj == nil && r.declErr != nil { +- r.declErr(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name)) +- } +- } +- // pop label scope +- r.targetStack = r.targetStack[0:n] +- r.labelScope = r.labelScope.Outer +-} +- +-func (r *resolver) declare(decl, data any, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) { +- for _, ident := range idents { +- if ident.Obj != nil { +- panic(fmt.Sprintf("%v: identifier %s already declared or resolved", ident.Pos(), ident.Name)) +- } +- obj := ast.NewObj(kind, ident.Name) +- // remember the corresponding declaration for redeclaration +- // errors and global variable resolution/typechecking phase +- obj.Decl = decl +- obj.Data = data +- // Identifiers (for receiver type parameters) are written to the scope, but +- // never set as the resolved object. See go.dev/issue/50956. +- if _, ok := decl.(*ast.Ident); !ok { +- ident.Obj = obj +- } +- if ident.Name != "_" { +- if debugResolve { +- r.trace("declaring %s@%v", ident.Name, ident.Pos()) +- } +- if alt := scope.Insert(obj); alt != nil && r.declErr != nil { +- prevDecl := "" +- if pos := alt.Pos(); pos.IsValid() { +- prevDecl = r.sprintf("\n\tprevious declaration at %v", pos) +- } +- r.declErr(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl)) +- } +- } +- } +-} +- +-func (r *resolver) shortVarDecl(decl *ast.AssignStmt) { +- // Go spec: A short variable declaration may redeclare variables +- // provided they were originally declared in the same block with +- // the same type, and at least one of the non-blank variables is new. +- n := 0 // number of new variables +- for _, x := range decl.Lhs { +- if ident, isIdent := x.(*ast.Ident); isIdent { +- assert(ident.Obj == nil, "identifier already declared or resolved") +- obj := ast.NewObj(ast.Var, ident.Name) +- // remember corresponding assignment for other tools +- obj.Decl = decl +- ident.Obj = obj +- if ident.Name != "_" { +- if debugResolve { +- r.trace("declaring %s@%v", ident.Name, ident.Pos()) +- } +- if alt := r.topScope.Insert(obj); alt != nil { +- ident.Obj = alt // redeclaration +- } else { +- n++ // new declaration +- } +- } +- } +- } +- if n == 0 && r.declErr != nil { +- r.declErr(decl.Lhs[0].Pos(), "no new variables on left side of :=") +- } +-} +- +-// The unresolved object is a sentinel to mark identifiers that have been added +-// to the list of unresolved identifiers. The sentinel is only used for verifying +-// internal consistency. +-var unresolved = new(ast.Object) +- +-// If x is an identifier, resolve attempts to resolve x by looking up +-// the object it denotes. If no object is found and collectUnresolved is +-// set, x is marked as unresolved and collected in the list of unresolved +-// identifiers. +-func (r *resolver) resolve(ident *ast.Ident, collectUnresolved bool) { +- if ident.Obj != nil { +- panic(r.sprintf("%v: identifier %s already declared or resolved", ident.Pos(), ident.Name)) +- } +- // '_' should never refer to existing declarations, because it has special +- // handling in the spec. +- if ident.Name == "_" { +- return +- } +- for s := r.topScope; s != nil; s = s.Outer { +- if obj := s.Lookup(ident.Name); obj != nil { +- if debugResolve { +- r.trace("resolved %v:%s to %v", ident.Pos(), ident.Name, obj) +- } +- assert(obj.Name != "", "obj with no name") +- // Identifiers (for receiver type parameters) are written to the scope, +- // but never set as the resolved object. See go.dev/issue/50956. +- if _, ok := obj.Decl.(*ast.Ident); !ok { +- ident.Obj = obj +- } +- return +- } +- } +- // all local scopes are known, so any unresolved identifier +- // must be found either in the file scope, package scope +- // (perhaps in another file), or universe scope --- collect +- // them so that they can be resolved later +- if collectUnresolved { +- ident.Obj = unresolved +- r.unresolved = append(r.unresolved, ident) +- } +-} +- +-func (r *resolver) walkExprs(list []ast.Expr) { +- for _, node := range list { +- ast.Walk(r, node) +- } +-} +- +-func (r *resolver) walkLHS(list []ast.Expr) { +- for _, expr := range list { +- expr := ast.Unparen(expr) +- if _, ok := expr.(*ast.Ident); !ok && expr != nil { +- ast.Walk(r, expr) +- } +- } +-} +- +-func (r *resolver) walkStmts(list []ast.Stmt) { +- for _, stmt := range list { +- ast.Walk(r, stmt) +- } +-} +- +-func (r *resolver) Visit(node ast.Node) ast.Visitor { +- if debugResolve && node != nil { +- r.trace("node %T@%v", node, node.Pos()) +- } +- +- switch n := node.(type) { +- +- // Expressions. +- case *ast.Ident: +- r.resolve(n, true) +- +- case *ast.FuncLit: +- r.openScope(n.Pos()) +- defer r.closeScope() +- r.walkFuncType(n.Type) +- r.walkBody(n.Body) +- +- case *ast.SelectorExpr: +- ast.Walk(r, n.X) +- // Note: don't try to resolve n.Sel, as we don't support qualified +- // resolution. +- +- case *ast.StructType: +- r.openScope(n.Pos()) +- defer r.closeScope() +- r.walkFieldList(n.Fields, ast.Var) +- +- case *ast.FuncType: +- r.openScope(n.Pos()) +- defer r.closeScope() +- r.walkFuncType(n) +- +- case *ast.CompositeLit: +- if n.Type != nil { +- ast.Walk(r, n.Type) +- } +- for _, e := range n.Elts { +- if kv, _ := e.(*ast.KeyValueExpr); kv != nil { +- // See go.dev/issue/45160: try to resolve composite lit keys, but don't +- // collect them as unresolved if resolution failed. This replicates +- // existing behavior when resolving during parsing. +- if ident, _ := kv.Key.(*ast.Ident); ident != nil { +- r.resolve(ident, false) +- } else { +- ast.Walk(r, kv.Key) +- } +- ast.Walk(r, kv.Value) +- } else { +- ast.Walk(r, e) +- } +- } +- +- case *ast.InterfaceType: +- r.openScope(n.Pos()) +- defer r.closeScope() +- r.walkFieldList(n.Methods, ast.Fun) +- +- // Statements +- case *ast.LabeledStmt: +- r.declare(n, nil, r.labelScope, ast.Lbl, n.Label) +- ast.Walk(r, n.Stmt) +- +- case *ast.AssignStmt: +- r.walkExprs(n.Rhs) +- if n.Tok == token.DEFINE { +- r.shortVarDecl(n) +- } else { +- r.walkExprs(n.Lhs) +- } +- +- case *ast.BranchStmt: +- // add to list of unresolved targets +- if n.Tok != token.FALLTHROUGH && n.Label != nil { +- depth := len(r.targetStack) - 1 +- r.targetStack[depth] = append(r.targetStack[depth], n.Label) +- } +- +- case *ast.BlockStmt: +- r.openScope(n.Pos()) +- defer r.closeScope() +- r.walkStmts(n.List) +- +- case *ast.IfStmt: +- r.openScope(n.Pos()) +- defer r.closeScope() +- if n.Init != nil { +- ast.Walk(r, n.Init) +- } +- ast.Walk(r, n.Cond) +- ast.Walk(r, n.Body) +- if n.Else != nil { +- ast.Walk(r, n.Else) +- } +- +- case *ast.CaseClause: +- r.walkExprs(n.List) +- r.openScope(n.Pos()) +- defer r.closeScope() +- r.walkStmts(n.Body) +- +- case *ast.SwitchStmt: +- r.openScope(n.Pos()) +- defer r.closeScope() +- if n.Init != nil { +- ast.Walk(r, n.Init) +- } +- if n.Tag != nil { +- // The scope below reproduces some unnecessary behavior of the parser, +- // opening an extra scope in case this is a type switch. It's not needed +- // for expression switches. +- // TODO: remove this once we've matched the parser resolution exactly. +- if n.Init != nil { +- r.openScope(n.Tag.Pos()) +- defer r.closeScope() +- } +- ast.Walk(r, n.Tag) +- } +- if n.Body != nil { +- r.walkStmts(n.Body.List) +- } +- +- case *ast.TypeSwitchStmt: +- if n.Init != nil { +- r.openScope(n.Pos()) +- defer r.closeScope() +- ast.Walk(r, n.Init) +- } +- r.openScope(n.Assign.Pos()) +- defer r.closeScope() +- ast.Walk(r, n.Assign) +- // s.Body consists only of case clauses, so does not get its own +- // scope. +- if n.Body != nil { +- r.walkStmts(n.Body.List) +- } +- +- case *ast.CommClause: +- r.openScope(n.Pos()) +- defer r.closeScope() +- if n.Comm != nil { +- ast.Walk(r, n.Comm) +- } +- r.walkStmts(n.Body) +- +- case *ast.SelectStmt: +- // as for switch statements, select statement bodies don't get their own +- // scope. +- if n.Body != nil { +- r.walkStmts(n.Body.List) +- } +- +- case *ast.ForStmt: +- r.openScope(n.Pos()) +- defer r.closeScope() +- if n.Init != nil { +- ast.Walk(r, n.Init) +- } +- if n.Cond != nil { +- ast.Walk(r, n.Cond) +- } +- if n.Post != nil { +- ast.Walk(r, n.Post) +- } +- ast.Walk(r, n.Body) +- +- case *ast.RangeStmt: +- r.openScope(n.Pos()) +- defer r.closeScope() +- ast.Walk(r, n.X) +- var lhs []ast.Expr +- if n.Key != nil { +- lhs = append(lhs, n.Key) +- } +- if n.Value != nil { +- lhs = append(lhs, n.Value) +- } +- if len(lhs) > 0 { +- if n.Tok == token.DEFINE { +- // Note: we can't exactly match the behavior of object resolution +- // during the parsing pass here, as it uses the position of the RANGE +- // token for the RHS OpPos. That information is not contained within +- // the AST. +- as := &ast.AssignStmt{ +- Lhs: lhs, +- Tok: token.DEFINE, +- TokPos: n.TokPos, +- Rhs: []ast.Expr{&ast.UnaryExpr{Op: token.RANGE, X: n.X}}, +- } +- // TODO(rFindley): this walkLHS reproduced the parser resolution, but +- // is it necessary? By comparison, for a normal AssignStmt we don't +- // walk the LHS in case there is an invalid identifier list. +- r.walkLHS(lhs) +- r.shortVarDecl(as) +- } else { +- r.walkExprs(lhs) +- } +- } +- ast.Walk(r, n.Body) +- +- // Declarations +- case *ast.GenDecl: +- switch n.Tok { +- case token.CONST, token.VAR: +- for i, spec := range n.Specs { +- spec := spec.(*ast.ValueSpec) +- kind := ast.Con +- if n.Tok == token.VAR { +- kind = ast.Var +- } +- r.walkExprs(spec.Values) +- if spec.Type != nil { +- ast.Walk(r, spec.Type) +- } +- r.declare(spec, i, r.topScope, kind, spec.Names...) +- } +- case token.TYPE: +- for _, spec := range n.Specs { +- spec := spec.(*ast.TypeSpec) +- // Go spec: The scope of a type identifier declared inside a function begins +- // at the identifier in the TypeSpec and ends at the end of the innermost +- // containing block. +- r.declare(spec, nil, r.topScope, ast.Typ, spec.Name) +- if spec.TypeParams != nil { +- r.openScope(spec.Pos()) +- defer r.closeScope() +- r.walkTParams(spec.TypeParams) +- } +- ast.Walk(r, spec.Type) +- } +- } +- +- case *ast.FuncDecl: +- // Open the function scope. +- r.openScope(n.Pos()) +- defer r.closeScope() +- +- r.walkRecv(n.Recv) +- +- // Type parameters are walked normally: they can reference each other, and +- // can be referenced by normal parameters. +- if n.Type.TypeParams != nil { +- r.walkTParams(n.Type.TypeParams) +- // TODO(rFindley): need to address receiver type parameters. +- } +- +- // Resolve and declare parameters in a specific order to get duplicate +- // declaration errors in the correct location. +- r.resolveList(n.Type.Params) +- r.resolveList(n.Type.Results) +- r.declareList(n.Recv, ast.Var) +- r.declareList(n.Type.Params, ast.Var) +- r.declareList(n.Type.Results, ast.Var) +- +- r.walkBody(n.Body) +- if n.Recv == nil && n.Name.Name != "init" { +- r.declare(n, nil, r.pkgScope, ast.Fun, n.Name) +- } +- +- default: +- return r +- } +- +- return nil +-} +- +-func (r *resolver) walkFuncType(typ *ast.FuncType) { +- // typ.TypeParams must be walked separately for FuncDecls. +- r.resolveList(typ.Params) +- r.resolveList(typ.Results) +- r.declareList(typ.Params, ast.Var) +- r.declareList(typ.Results, ast.Var) +-} +- +-func (r *resolver) resolveList(list *ast.FieldList) { +- if list == nil { +- return +- } +- for _, f := range list.List { +- if f.Type != nil { +- ast.Walk(r, f.Type) +- } +- } +-} +- +-func (r *resolver) declareList(list *ast.FieldList, kind ast.ObjKind) { +- if list == nil { +- return +- } +- for _, f := range list.List { +- r.declare(f, nil, r.topScope, kind, f.Names...) +- } +-} +- +-func (r *resolver) walkRecv(recv *ast.FieldList) { +- // If our receiver has receiver type parameters, we must declare them before +- // trying to resolve the rest of the receiver, and avoid re-resolving the +- // type parameter identifiers. +- if recv == nil || len(recv.List) == 0 { +- return // nothing to do +- } +- typ := recv.List[0].Type +- if ptr, ok := typ.(*ast.StarExpr); ok { +- typ = ptr.X +- } +- +- var declareExprs []ast.Expr // exprs to declare +- var resolveExprs []ast.Expr // exprs to resolve +- switch typ := typ.(type) { +- case *ast.IndexExpr: +- declareExprs = []ast.Expr{typ.Index} +- resolveExprs = append(resolveExprs, typ.X) +- case *ast.IndexListExpr: +- declareExprs = typ.Indices +- resolveExprs = append(resolveExprs, typ.X) +- default: +- resolveExprs = append(resolveExprs, typ) +- } +- for _, expr := range declareExprs { +- if id, _ := expr.(*ast.Ident); id != nil { +- r.declare(expr, nil, r.topScope, ast.Typ, id) +- } else { +- // The receiver type parameter expression is invalid, but try to resolve +- // it anyway for consistency. +- resolveExprs = append(resolveExprs, expr) +- } +- } +- for _, expr := range resolveExprs { +- if expr != nil { +- ast.Walk(r, expr) +- } +- } +- // The receiver is invalid, but try to resolve it anyway for consistency. +- for _, f := range recv.List[1:] { +- if f.Type != nil { +- ast.Walk(r, f.Type) +- } +- } +-} +- +-func (r *resolver) walkFieldList(list *ast.FieldList, kind ast.ObjKind) { +- if list == nil { +- return +- } +- r.resolveList(list) +- r.declareList(list, kind) +-} +- +-// walkTParams is like walkFieldList, but declares type parameters eagerly so +-// that they may be resolved in the constraint expressions held in the field +-// Type. +-func (r *resolver) walkTParams(list *ast.FieldList) { +- r.declareList(list, ast.Typ) +- r.resolveList(list) +-} +- +-func (r *resolver) walkBody(body *ast.BlockStmt) { +- if body == nil { +- return +- } +- r.openLabelScope() +- defer r.closeLabelScope() +- r.walkStmts(body.List) +-} +diff -urN a/gopls/internal/cache/parsego/resolver_test.go b/gopls/internal/cache/parsego/resolver_test.go +--- a/gopls/internal/cache/parsego/resolver_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/parsego/resolver_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,158 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package parsego +- +-import ( +- "go/ast" +- "go/types" +- "os" +- "strings" +- "testing" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/testenv" +-) +- +-// TestGoplsSourceDoesNotUseObjectResolution verifies that gopls does not +-// read fields that are set during syntactic object resolution, except in +-// locations where we can guarantee that object resolution has occurred. This +-// is achieved via static analysis of gopls source code to find references to +-// the legacy Object symbols, checking the results against an allowlist +-// +-// Reading these fields would introduce a data race, due to the lazy +-// resolution implemented by File.Resolve. +-func TestGoplsSourceDoesNotUseObjectResolution(t *testing.T) { +- +- testenv.NeedsGoPackages(t) +- testenv.NeedsLocalXTools(t) +- +- cfg := &packages.Config{ +- Mode: packages.NeedName | packages.NeedModule | packages.NeedCompiledGoFiles | packages.NeedTypes | packages.NeedTypesInfo | packages.NeedSyntax | packages.NeedImports | packages.NeedDeps, +- } +- cfg.Env = os.Environ() +- cfg.Env = append(cfg.Env, +- "GOPACKAGESDRIVER=off", +- "GOWORK=off", // necessary for -mod=mod below +- "GOFLAGS=-mod=mod", +- ) +- +- pkgs, err := packages.Load(cfg, +- "go/ast", +- "golang.org/x/tools/go/ast/astutil", +- "golang.org/x/tools/gopls/...") +- +- if err != nil { +- t.Fatal(err) +- } +- var astPkg, astutilPkg *packages.Package +- for _, pkg := range pkgs { +- switch pkg.PkgPath { +- case "go/ast": +- astPkg = pkg +- case "golang.org/x/tools/go/ast/astutil": +- astutilPkg = pkg +- } +- } +- if astPkg == nil { +- t.Fatal("missing package go/ast") +- } +- if astutilPkg == nil { +- t.Fatal("missing package golang.org/x/tools/go/ast/astutil") +- } +- +- File := astPkg.Types.Scope().Lookup("File").Type() +- Ident := astPkg.Types.Scope().Lookup("Ident").Type() +- +- Scope, _, _ := types.LookupFieldOrMethod(File, true, astPkg.Types, "Scope") +- assert(Scope != nil, "nil Scope") +- Unresolved, _, _ := types.LookupFieldOrMethod(File, true, astPkg.Types, "Unresolved") +- assert(Unresolved != nil, "nil unresolved") +- Obj, _, _ := types.LookupFieldOrMethod(Ident, true, astPkg.Types, "Obj") +- assert(Obj != nil, "nil Obj") +- UsesImport := astutilPkg.Types.Scope().Lookup("UsesImport") +- assert(UsesImport != nil, "nil UsesImport") +- +- disallowed := map[types.Object]bool{ +- Scope: true, +- Unresolved: true, +- Obj: true, +- UsesImport: true, +- } +- +- // exceptions catalogues packages or declarations that are allowed to use +- // forbidden symbols, with a rationale. +- // +- // - If the exception ends with '/', it is a prefix. +- // - If it ends with a qualified name, it is a declaration. +- // - Otherwise, it is an exact package path. +- // +- // TODO(rfindley): some sort of callgraph analysis would make these +- // exceptions much easier to maintain. +- exceptions := []string{ +- "golang.org/x/tools/go/analysis/passes/", // analyzers may rely on object resolution +- "golang.org/x/tools/gopls/internal/analysis/simplifyslice", // restrict ourselves to one blessed analyzer +- "golang.org/x/tools/gopls/internal/cache/parsego", // used by parsego.File.Resolve, of course +- "golang.org/x/tools/gopls/internal/golang.builtinDecl", // the builtin file is resolved +- "golang.org/x/tools/gopls/internal/golang.NewBuiltinSignature", // ditto +- "golang.org/x/tools/gopls/internal/golang/completion.builtinArgKind", // ditto +- "golang.org/x/tools/internal/imports", // goimports does its own parsing +- "golang.org/x/tools/go/ast/astutil.UsesImport", // disallowed +- "golang.org/x/tools/go/ast/astutil.isTopName", // only reached from astutil.UsesImport +- "go/ast", +- "go/parser", +- "go/doc", // manually verified that our usage is safe +- } +- +- packages.Visit(pkgs, nil, func(pkg *packages.Package) { +- for _, exception := range exceptions { +- if strings.HasSuffix(exception, "/") { +- if strings.HasPrefix(pkg.PkgPath, exception) { +- return +- } +- } else if pkg.PkgPath == exception { +- return +- } +- } +- +- searchUses: +- for ident, obj := range pkg.TypesInfo.Uses { +- if disallowed[obj] { +- decl := findEnclosingFuncDecl(ident, pkg) +- if decl == "" { +- posn := safetoken.Position(pkg.Fset.File(ident.Pos()), ident.Pos()) +- t.Fatalf("%s: couldn't find enclosing decl for use of %s", posn, ident.Name) +- } +- qualified := pkg.PkgPath + "." + decl +- for _, exception := range exceptions { +- if exception == qualified { +- continue searchUses +- } +- } +- posn := safetoken.StartPosition(pkg.Fset, ident.Pos()) +- t.Errorf("%s: forbidden use of %v in %s", posn, obj, qualified) +- } +- } +- }) +-} +- +-// findEnclosingFuncDecl finds the name of the func decl enclosing the usage, +-// or "". +-// +-// (Usage could theoretically exist in e.g. var initializers, but that would be +-// odd.) +-func findEnclosingFuncDecl(ident *ast.Ident, pkg *packages.Package) string { +- for _, file := range pkg.Syntax { +- if file.FileStart <= ident.Pos() && ident.Pos() < file.FileEnd { +- path, _ := astutil.PathEnclosingInterval(file, ident.Pos(), ident.End()) +- decl, ok := path[len(path)-2].(*ast.FuncDecl) +- if ok { +- return decl.Name.Name +- } +- } +- } +- return "" +-} +diff -urN a/gopls/internal/cache/port.go b/gopls/internal/cache/port.go +--- a/gopls/internal/cache/port.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/port.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,205 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "bytes" +- "go/build" +- "go/build/constraint" +- "go/parser" +- "go/token" +- "io" +- "path/filepath" +- "strings" +- +- "golang.org/x/tools/gopls/internal/util/bug" +-) +- +-type port struct{ GOOS, GOARCH string } +- +-var ( +- // preferredPorts holds GOOS/GOARCH combinations for which we dynamically +- // create new Views, by setting GOOS=... and GOARCH=... on top of +- // user-provided configuration when we detect that the default build +- // configuration does not match an open file. Ports are matched in the order +- // defined below, so that when multiple ports match a file we use the port +- // occurring at a lower index in the slice. For that reason, we sort first +- // class ports ahead of secondary ports, and (among first class ports) 64-bit +- // ports ahead of the less common 32-bit ports. +- preferredPorts = []port{ +- // First class ports, from https://go.dev/wiki/PortingPolicy. +- {"darwin", "amd64"}, +- {"darwin", "arm64"}, +- {"linux", "amd64"}, +- {"linux", "arm64"}, +- {"windows", "amd64"}, +- {"linux", "arm"}, +- {"linux", "386"}, +- {"windows", "386"}, +- +- // Secondary ports, from GOROOT/src/internal/platform/zosarch.go. +- // (First class ports are commented out.) +- {"aix", "ppc64"}, +- {"dragonfly", "amd64"}, +- {"freebsd", "386"}, +- {"freebsd", "amd64"}, +- {"freebsd", "arm"}, +- {"freebsd", "arm64"}, +- {"illumos", "amd64"}, +- {"linux", "ppc64"}, +- {"linux", "ppc64le"}, +- {"linux", "mips"}, +- {"linux", "mipsle"}, +- {"linux", "mips64"}, +- {"linux", "mips64le"}, +- {"linux", "riscv64"}, +- {"linux", "s390x"}, +- {"android", "386"}, +- {"android", "amd64"}, +- {"android", "arm"}, +- {"android", "arm64"}, +- {"ios", "arm64"}, +- {"ios", "amd64"}, +- {"js", "wasm"}, +- {"netbsd", "386"}, +- {"netbsd", "amd64"}, +- {"netbsd", "arm"}, +- {"netbsd", "arm64"}, +- {"openbsd", "386"}, +- {"openbsd", "amd64"}, +- {"openbsd", "arm"}, +- {"openbsd", "arm64"}, +- {"openbsd", "mips64"}, +- {"plan9", "386"}, +- {"plan9", "amd64"}, +- {"plan9", "arm"}, +- {"solaris", "amd64"}, +- {"windows", "arm"}, +- {"windows", "arm64"}, +- +- {"aix", "ppc64"}, +- {"android", "386"}, +- {"android", "amd64"}, +- {"android", "arm"}, +- {"android", "arm64"}, +- // {"darwin", "amd64"}, +- // {"darwin", "arm64"}, +- {"dragonfly", "amd64"}, +- {"freebsd", "386"}, +- {"freebsd", "amd64"}, +- {"freebsd", "arm"}, +- {"freebsd", "arm64"}, +- {"freebsd", "riscv64"}, +- {"illumos", "amd64"}, +- {"ios", "amd64"}, +- {"ios", "arm64"}, +- {"js", "wasm"}, +- // {"linux", "386"}, +- // {"linux", "amd64"}, +- // {"linux", "arm"}, +- // {"linux", "arm64"}, +- {"linux", "loong64"}, +- {"linux", "mips"}, +- {"linux", "mips64"}, +- {"linux", "mips64le"}, +- {"linux", "mipsle"}, +- {"linux", "ppc64"}, +- {"linux", "ppc64le"}, +- {"linux", "riscv64"}, +- {"linux", "s390x"}, +- {"linux", "sparc64"}, +- {"netbsd", "386"}, +- {"netbsd", "amd64"}, +- {"netbsd", "arm"}, +- {"netbsd", "arm64"}, +- {"openbsd", "386"}, +- {"openbsd", "amd64"}, +- {"openbsd", "arm"}, +- {"openbsd", "arm64"}, +- {"openbsd", "mips64"}, +- {"openbsd", "ppc64"}, +- {"openbsd", "riscv64"}, +- {"plan9", "386"}, +- {"plan9", "amd64"}, +- {"plan9", "arm"}, +- {"solaris", "amd64"}, +- {"wasip1", "wasm"}, +- // {"windows", "386"}, +- // {"windows", "amd64"}, +- {"windows", "arm"}, +- {"windows", "arm64"}, +- } +-) +- +-// matches reports whether the port matches a file with the given absolute path +-// and content. +-// +-// Note that this function accepts content rather than e.g. a file.Handle, +-// because we trim content before matching for performance reasons, and +-// therefore need to do this outside of matches when considering multiple ports. +-func (p port) matches(path string, content []byte) bool { +- ctxt := build.Default // make a copy +- ctxt.UseAllFiles = false +- path = filepath.Clean(path) +- if !filepath.IsAbs(path) { +- bug.Reportf("non-abs file path %q", path) +- return false // fail closed +- } +- dir, name := filepath.Split(path) +- +- // The only virtualized operation called by MatchFile is OpenFile. +- ctxt.OpenFile = func(p string) (io.ReadCloser, error) { +- if p != path { +- return nil, bug.Errorf("unexpected file %q", p) +- } +- return io.NopCloser(bytes.NewReader(content)), nil +- } +- +- ctxt.GOOS = p.GOOS +- ctxt.GOARCH = p.GOARCH +- ok, err := ctxt.MatchFile(dir, name) +- return err == nil && ok +-} +- +-// trimContentForPortMatch trims the given Go file content to a minimal file +-// containing the same build constraints, if any. +-// +-// This is an unfortunate but necessary optimization, as matching build +-// constraints using go/build has significant overhead, and involves parsing +-// more than just the build constraint. +-// +-// TestMatchingPortsConsistency enforces consistency by comparing results +-// without trimming content. +-func trimContentForPortMatch(content []byte) []byte { +- buildComment := buildComment(content) +- // The package name does not matter, but +build lines +- // require a blank line before the package declaration. +- return []byte(buildComment + "\n\npackage p") +-} +- +-// buildComment returns the first matching //go:build comment in the given +-// content, or "" if none exists. +-func buildComment(content []byte) string { +- var lines []string +- +- f, err := parser.ParseFile(token.NewFileSet(), "", content, parser.PackageClauseOnly|parser.ParseComments) +- if err != nil { +- return "" +- } +- +- for _, cg := range f.Comments { +- for _, c := range cg.List { +- if constraint.IsGoBuild(c.Text) { +- // A file must have only one //go:build line. +- return c.Text +- } +- if constraint.IsPlusBuild(c.Text) { +- // A file may have several // +build lines. +- lines = append(lines, c.Text) +- } +- } +- } +- return strings.Join(lines, "\n") +-} +diff -urN a/gopls/internal/cache/port_test.go b/gopls/internal/cache/port_test.go +--- a/gopls/internal/cache/port_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/port_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,123 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "os" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/internal/testenv" +-) +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- os.Exit(m.Run()) +-} +- +-func TestMatchingPortsStdlib(t *testing.T) { +- // This test checks that we don't encounter a bug when matching ports, and +- // sanity checks that the optimization to use trimmed/fake file content +- // before delegating to go/build.Context.MatchFile does not affect +- // correctness. +- if testing.Short() { +- t.Skip("skipping in short mode: takes to long on slow file systems") +- } +- +- testenv.NeedsTool(t, "go") +- +- // Load, parse and type-check the program. +- cfg := &packages.Config{ +- Mode: packages.LoadFiles, +- Tests: true, +- } +- pkgs, err := packages.Load(cfg, "std", "cmd") +- if err != nil { +- t.Fatal(err) +- } +- +- var g errgroup.Group +- packages.Visit(pkgs, nil, func(pkg *packages.Package) { +- for _, f := range pkg.CompiledGoFiles { +- g.Go(func() error { +- content, err := os.ReadFile(f) +- // We report errors via t.Error, not by returning, +- // so that a single test can report multiple test failures. +- if err != nil { +- t.Errorf("failed to read %s: %v", f, err) +- return nil +- } +- fh := makeFakeFileHandle(protocol.URIFromPath(f), content) +- fastPorts := matchingPreferredPorts(t, fh, true) +- slowPorts := matchingPreferredPorts(t, fh, false) +- if diff := cmp.Diff(fastPorts, slowPorts); diff != "" { +- t.Errorf("%s: ports do not match (-trimmed +untrimmed):\n%s", f, diff) +- return nil +- } +- return nil +- }) +- } +- }) +- _ = g.Wait() // can't fail +-} +- +-func matchingPreferredPorts(tb testing.TB, fh file.Handle, trimContent bool) map[port]unit { +- content, err := fh.Content() +- if err != nil { +- tb.Fatal(err) +- } +- if trimContent { +- content = trimContentForPortMatch(content) +- } +- path := fh.URI().Path() +- matching := make(map[port]unit) +- for _, port := range preferredPorts { +- if port.matches(path, content) { +- matching[port] = unit{} +- } +- } +- return matching +-} +- +-func BenchmarkMatchingPreferredPorts(b *testing.B) { +- // Copy of robustio_posix.go +- const src = ` +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build unix +-// +build unix +- +-package robustio +- +-import ( +- "os" +- "syscall" +- "time" +-) +- +-func getFileID(filename string) (FileID, time.Time, error) { +- fi, err := os.Stat(filename) +- if err != nil { +- return FileID{}, time.Time{}, err +- } +- stat := fi.Sys().(*syscall.Stat_t) +- return FileID{ +- device: uint64(stat.Dev), // (int32 on darwin, uint64 on linux) +- inode: stat.Ino, +- }, fi.ModTime(), nil +-} +-` +- fh := makeFakeFileHandle("file:///path/to/test/file.go", []byte(src)) +- for b.Loop() { +- _ = matchingPreferredPorts(b, fh, true) +- } +-} +diff -urN a/gopls/internal/cache/session.go b/gopls/internal/cache/session.go +--- a/gopls/internal/cache/session.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/session.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1244 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "errors" +- "fmt" +- "maps" +- "os" +- "path/filepath" +- "slices" +- "strconv" +- "strings" +- "sync" +- "sync/atomic" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/typerefs" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/memoize" +- "golang.org/x/tools/gopls/internal/util/persistent" +- "golang.org/x/tools/gopls/internal/vulncheck" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/event/keys" +- "golang.org/x/tools/internal/gocommand" +- "golang.org/x/tools/internal/imports" +- "golang.org/x/tools/internal/xcontext" +-) +- +-// NewSession creates a new gopls session with the given cache. +-func NewSession(ctx context.Context, c *Cache) *Session { +- index := atomic.AddInt64(&sessionIndex, 1) +- s := &Session{ +- id: strconv.FormatInt(index, 10), +- cache: c, +- gocmdRunner: &gocommand.Runner{}, +- overlayFS: newOverlayFS(c), +- parseCache: newParseCache(1 * time.Minute), // keep recently parsed files for a minute, to optimize typing CPU +- viewMap: make(map[protocol.DocumentURI]*View), +- } +- event.Log(ctx, "New session", KeyCreateSession.Of(s)) +- return s +-} +- +-// A Session holds the state (views, file contents, parse cache, +-// memoized computations) of a gopls server process. +-// +-// It implements the file.Source interface. +-type Session struct { +- // Unique identifier for this session. +- id string +- +- // Immutable attributes shared across views. +- cache *Cache // shared cache +- gocmdRunner *gocommand.Runner // limits go command concurrency +- +- viewMu sync.Mutex +- views []*View +- viewMap map[protocol.DocumentURI]*View // file->best view or nil; nil after shutdown; the key must be a clean uri. +- +- // snapshots is a counting semaphore that records the number +- // of unreleased snapshots associated with this session. +- // Shutdown waits for it to fall to zero. +- snapshotWG sync.WaitGroup +- +- parseCache *parseCache +- +- *overlayFS +-} +- +-// ID returns the unique identifier for this session on this server. +-func (s *Session) ID() string { return s.id } +-func (s *Session) String() string { return s.id } +- +-// GoCommandRunner returns the gocommand Runner for this session. +-func (s *Session) GoCommandRunner() *gocommand.Runner { +- return s.gocmdRunner +-} +- +-// Shutdown the session and all views it has created. +-func (s *Session) Shutdown(ctx context.Context) { +- var views []*View +- s.viewMu.Lock() +- views = append(views, s.views...) +- s.views = nil +- s.viewMap = nil +- s.viewMu.Unlock() +- for _, view := range views { +- view.shutdown() +- } +- s.parseCache.stop() +- s.snapshotWG.Wait() // wait for all work on associated snapshots to finish +- event.Log(ctx, "Shutdown session", KeyShutdownSession.Of(s)) +-} +- +-// Cache returns the cache that created this session, for debugging only. +-func (s *Session) Cache() *Cache { +- return s.cache +-} +- +-// TODO(rfindley): is the logic surrounding this error actually necessary? +-var ErrViewExists = errors.New("view already exists for session") +- +-// NewView creates a new View, returning it and its first snapshot. If a +-// non-empty tempWorkspace directory is provided, the View will record a copy +-// of its gopls workspace module in that directory, so that client tooling +-// can execute in the same main module. On success it also returns a release +-// function that must be called when the Snapshot is no longer needed. +-func (s *Session) NewView(ctx context.Context, folder *Folder) (*View, *Snapshot, func(), error) { +- s.viewMu.Lock() +- defer s.viewMu.Unlock() +- +- if s.viewMap == nil { +- return nil, nil, nil, fmt.Errorf("session is shut down") +- } +- +- // Querying the file system to check whether +- // two folders denote the same existing directory. +- if inode1, err := os.Stat(filepath.FromSlash(folder.Dir.Path())); err == nil { +- for _, view := range s.views { +- inode2, err := os.Stat(filepath.FromSlash(view.folder.Dir.Path())) +- if err == nil && os.SameFile(inode1, inode2) { +- return nil, nil, nil, ErrViewExists +- } +- } +- } +- +- def, err := defineView(ctx, s, folder, nil) +- if err != nil { +- return nil, nil, nil, err +- } +- view, snapshot, release := s.createView(ctx, def) +- s.views = append(s.views, view) +- s.viewMap[folder.Dir.Clean()] = view +- return view, snapshot, release, nil +-} +- +-// HasView checks whether the uri's view exists. +-func (s *Session) HasView(uri protocol.DocumentURI) bool { +- uri = uri.Clean() +- s.viewMu.Lock() +- defer s.viewMu.Unlock() +- _, ok := s.viewMap[uri] +- return ok +-} +- +-// createView creates a new view, with an initial snapshot that retains the +-// supplied context, detached from events and cancellation. +-// +-// The caller is responsible for calling the release function once. +-func (s *Session) createView(ctx context.Context, def *viewDefinition) (*View, *Snapshot, func()) { +- index := atomic.AddInt64(&viewIndex, 1) +- +- // We want a true background context and not a detached context here +- // the spans need to be unrelated and no tag values should pollute it. +- baseCtx := event.Detach(xcontext.Detach(ctx)) +- backgroundCtx, cancel := context.WithCancel(baseCtx) +- +- // Compute a skip function to use for module cache scanning. +- // +- // Note that unlike other filtering operations, we definitely don't want to +- // exclude the gomodcache here, even if it is contained in the workspace +- // folder. +- // +- // TODO(rfindley): consolidate with relPathExcludedByFilter(Func), Filterer, +- // View.filterFunc. +- var skipPath func(string) bool +- { +- // Compute a prefix match, respecting segment boundaries, by ensuring +- // the pattern (dir) has a trailing slash. +- dirPrefix := strings.TrimSuffix(string(def.folder.Dir), "/") + "/" +- pathIncluded := PathIncludeFunc(def.folder.Options.DirectoryFilters) +- skipPath = func(dir string) bool { +- uri := strings.TrimSuffix(string(protocol.URIFromPath(dir)), "/") +- // Note that the logic below doesn't handle the case where uri == +- // v.folder.Dir, because there is no point in excluding the entire +- // workspace folder! +- if rel, ok := strings.CutPrefix(uri, dirPrefix); ok { +- return !pathIncluded(rel) +- } +- return false +- } +- } +- +- var ignoreFilter *ignoreFilter +- { +- var dirs []string +- if len(def.workspaceModFiles) == 0 { +- for _, entry := range filepath.SplitList(def.folder.Env.GOPATH) { +- dirs = append(dirs, filepath.Join(entry, "src")) +- } +- } else { +- dirs = append(dirs, def.folder.Env.GOMODCACHE) +- for m := range def.workspaceModFiles { +- dirs = append(dirs, m.DirPath()) +- } +- } +- ignoreFilter = newIgnoreFilter(dirs) +- } +- +- var pe *imports.ProcessEnv +- { +- env := make(map[string]string) +- envSlice := slices.Concat(os.Environ(), def.folder.Options.EnvSlice(), []string{"GO111MODULE=" + def.adjustedGO111MODULE()}) +- for _, kv := range envSlice { +- if k, v, ok := strings.Cut(kv, "="); ok { +- env[k] = v +- } +- } +- pe = &imports.ProcessEnv{ +- GocmdRunner: s.gocmdRunner, +- BuildFlags: slices.Clone(def.folder.Options.BuildFlags), +- // TODO(rfindley): an old comment said "processEnv operations should not mutate the modfile" +- // But shouldn't we honor the default behavior of mod vendoring? +- ModFlag: "readonly", +- SkipPathInScan: skipPath, +- Env: env, +- WorkingDir: def.root.Path(), +- ModCache: s.cache.modCache.dirCache(def.folder.Env.GOMODCACHE), +- } +- if def.folder.Options.VerboseOutput { +- pe.Logf = func(format string, args ...any) { +- event.Log(ctx, fmt.Sprintf(format, args...)) +- } +- } +- } +- +- v := &View{ +- id: strconv.FormatInt(index, 10), +- gocmdRunner: s.gocmdRunner, +- initialWorkspaceLoad: make(chan struct{}), +- initializationSema: make(chan struct{}, 1), +- baseCtx: baseCtx, +- pkgIndex: typerefs.NewPackageIndex(), +- parseCache: s.parseCache, +- ignoreFilter: ignoreFilter, +- fs: s.overlayFS, +- viewDefinition: def, +- importsState: newImportsState(backgroundCtx, s.cache.modCache, pe), +- } +- +- // Keep this in sync with golang.computeImportEdits. +- // +- // TODO(rfindley): encapsulate the imports state logic so that the handling +- // for Options.ImportsSource is in a single location. +- if def.folder.Options.ImportsSource == settings.ImportsSourceGopls { +- v.modcacheState = newModcacheState(def.folder.Env.GOMODCACHE) +- } +- +- s.snapshotWG.Add(1) +- v.snapshot = &Snapshot{ +- view: v, +- backgroundCtx: backgroundCtx, +- cancel: cancel, +- store: s.cache.store, +- refcount: 1, // Snapshots are born referenced. +- done: s.snapshotWG.Done, +- packages: new(persistent.Map[PackageID, *packageHandle]), +- fullAnalysisKeys: new(persistent.Map[PackageID, file.Hash]), +- factyAnalysisKeys: new(persistent.Map[PackageID, file.Hash]), +- meta: new(metadata.Graph), +- files: newFileMap(), +- shouldLoad: new(persistent.Map[PackageID, []PackagePath]), +- unloadableFiles: new(persistent.Set[protocol.DocumentURI]), +- parseModHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), +- parseWorkHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), +- modTidyHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), +- modVulnHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), +- modWhyHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), +- moduleUpgrades: new(persistent.Map[protocol.DocumentURI, map[string]string]), +- vulns: new(persistent.Map[protocol.DocumentURI, *vulncheck.Result]), +- } +- +- // Snapshots must observe all open files, as there are some caching +- // heuristics that change behavior depending on open files. +- for _, o := range s.overlayFS.Overlays() { +- _, _ = v.snapshot.ReadFile(ctx, o.URI()) +- } +- +- // Record the environment of the newly created view in the log. +- event.Log(ctx, fmt.Sprintf("Created View (#%s)", v.id), +- label.Directory.Of(v.folder.Dir.Path()), +- viewTypeKey.Of(v.typ.String()), +- rootDirKey.Of(string(v.root)), +- goVersionKey.Of(strings.TrimRight(v.folder.Env.GoVersionOutput, "\n")), +- buildFlagsKey.Of(fmt.Sprint(v.folder.Options.BuildFlags)), +- envKey.Of(fmt.Sprintf("%+v", v.folder.Env)), +- envOverlayKey.Of(v.EnvOverlay()), +- ) +- +- // Initialize the view without blocking. +- initCtx, initCancel := context.WithCancel(xcontext.Detach(ctx)) +- v.cancelInitialWorkspaceLoad = initCancel +- snapshot := v.snapshot +- +- // Pass a second reference to the background goroutine. +- bgRelease := snapshot.Acquire() +- go func() { +- defer bgRelease() +- snapshot.initialize(initCtx, true) +- }() +- +- // Return a third reference to the caller. +- return v, snapshot, snapshot.Acquire() +-} +- +-// These keys are used to log view metadata in createView. +-var ( +- viewTypeKey = keys.NewString("view_type", "") +- rootDirKey = keys.NewString("root_dir", "") +- goVersionKey = keys.NewString("go_version", "") +- buildFlagsKey = keys.New("build_flags", "") +- envKey = keys.New("env", "") +- envOverlayKey = keys.New("env_overlay", "") +-) +- +-// RemoveView removes from the session the view rooted at the specified directory. +-// It reports whether a view of that directory was removed. +-func (s *Session) RemoveView(ctx context.Context, dir protocol.DocumentURI) bool { +- s.viewMu.Lock() +- defer s.viewMu.Unlock() +- +- if s.viewMap == nil { +- return false // Session is shutdown. +- } +- s.viewMap = make(map[protocol.DocumentURI]*View) // reset view associations +- +- var newViews []*View +- for _, view := range s.views { +- if view.folder.Dir == dir { +- view.shutdown() +- } else { +- newViews = append(newViews, view) +- } +- } +- removed := len(s.views) - len(newViews) +- if removed != 1 { +- // This isn't a bug report, because it could be a client-side bug. +- event.Error(ctx, "removing view", fmt.Errorf("removed %d views, want exactly 1", removed)) +- } +- s.views = newViews +- return removed > 0 +-} +- +-// View returns the view with a matching id, if present. +-func (s *Session) View(id string) (*View, error) { +- s.viewMu.Lock() +- defer s.viewMu.Unlock() +- for _, view := range s.views { +- if view.ID() == id { +- return view, nil +- } +- } +- return nil, fmt.Errorf("no view with ID %q", id) +-} +- +-// SnapshotOf returns a Snapshot corresponding to the given URI. +-// +-// In the case where the file can be can be associated with a View by +-// [RelevantViews] (based on directory information alone, without package +-// metadata), SnapshotOf returns the current Snapshot for that View. Otherwise, +-// it awaits loading package metadata and returns a Snapshot for the first View +-// containing a real (=not command-line-arguments) package for the file. +-// +-// If that also fails to find a View, SnapshotOf returns a Snapshot for the +-// first view in s.views that is not shut down (i.e. s.views[0] unless we lose +-// a race), for determinism in tests and so that we tend to aggregate the +-// resulting command-line-arguments packages into a single view. +-// +-// SnapshotOf returns an error if a failure occurs along the way (most likely due +-// to context cancellation), or if there are no Views in the Session. +-// +-// On success, the caller must call the returned function to release the snapshot. +-func (s *Session) SnapshotOf(ctx context.Context, uri protocol.DocumentURI) (*Snapshot, func(), error) { +- uri = uri.Clean() +- // Fast path: if the uri has a static association with a view, return it. +- s.viewMu.Lock() +- v, err := s.viewOfLocked(ctx, uri) +- s.viewMu.Unlock() +- +- if err != nil { +- return nil, nil, err +- } +- +- if v != nil { +- snapshot, release, err := v.Snapshot() +- if err == nil { +- return snapshot, release, nil +- } +- // View is shut down. Forget this association. +- s.viewMu.Lock() +- if s.viewMap[uri] == v { +- delete(s.viewMap, uri) +- } +- s.viewMu.Unlock() +- } +- +- // Fall-back: none of the views could be associated with uri based on +- // directory information alone. +- // +- // Don't memoize the view association in viewMap, as it is not static: Views +- // may change as metadata changes. +- // +- // TODO(rfindley): we could perhaps optimize this case by peeking at existing +- // metadata before awaiting the load (after all, a load only adds metadata). +- // But that seems potentially tricky, when in the common case no loading +- // should be required. +- views := s.Views() +- for _, v := range views { +- snapshot, release, err := v.Snapshot() +- if err != nil { +- continue // view was shut down +- } +- // We don't check the error from awaitLoaded, because a load failure (that +- // doesn't result from context cancellation) should not prevent us from +- // continuing to search for the best view. +- _ = snapshot.awaitLoaded(ctx) +- g := snapshot.MetadataGraph() +- if ctx.Err() != nil { +- release() +- return nil, nil, ctx.Err() +- } +- // Special handling for the builtin file, since it doesn't have packages. +- if snapshot.IsBuiltin(uri) { +- return snapshot, release, nil +- } +- // Only match this view if it loaded a real package for the file. +- // +- // Any view can load a command-line-arguments package; aggregate those into +- // views[0] below. +- for _, pkg := range g.ForFile[uri] { +- if !metadata.IsCommandLineArguments(pkg.ID) || pkg.Standalone { +- return snapshot, release, nil +- } +- } +- release() +- } +- +- for _, v := range views { +- snapshot, release, err := v.Snapshot() +- if err == nil { +- return snapshot, release, nil // first valid snapshot +- } +- } +- return nil, nil, errNoViews +-} +- +-// FileOf returns the file for a given URI and its snapshot. +-// On success, the returned function must be called to release the snapshot. +-func (s *Session) FileOf(ctx context.Context, uri protocol.DocumentURI) (file.Handle, *Snapshot, func(), error) { +- snapshot, release, err := s.SnapshotOf(ctx, uri) +- if err != nil { +- return nil, nil, nil, err +- } +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- release() +- return nil, nil, nil, err +- } +- return fh, snapshot, release, nil +-} +- +-// errNoViews is sought by orphaned file diagnostics, to detect the case where +-// we have no view containing a file. +-var errNoViews = errors.New("no views") +- +-// viewOfLocked evaluates the best view for uri, memoizing its result in +-// s.viewMap. +-// +-// Precondition: caller holds s.viewMu lock; uri must be clean. +-// +-// May return (nil, nil) if no best view can be determined. +-func (s *Session) viewOfLocked(ctx context.Context, uri protocol.DocumentURI) (*View, error) { +- if s.viewMap == nil { +- return nil, errors.New("session is shut down") +- } +- v, hit := s.viewMap[uri] +- if !hit { +- // Cache miss: compute (and memoize) the best view. +- fh, err := s.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- relevantViews, err := RelevantViews(ctx, s, fh.URI(), s.views) +- if err != nil { +- return nil, err +- } +- v = matchingView(fh, relevantViews) +- if v == nil && len(relevantViews) > 0 { +- // If we have relevant views, but none of them matched the file's build +- // constraints, then we are still better off using one of them here. +- // Otherwise, logic may fall back to an inferior view, which lacks +- // relevant module information, leading to misleading diagnostics. +- // (as in golang/go#60776). +- v = relevantViews[0] +- } +- s.viewMap[uri] = v // may be nil +- } +- return v, nil +-} +- +-func (s *Session) Views() []*View { +- s.viewMu.Lock() +- defer s.viewMu.Unlock() +- result := make([]*View, len(s.views)) +- copy(result, s.views) +- return result +-} +- +-// selectViewDefs constructs the best set of views covering the provided workspace +-// folders and open files. +-// +-// This implements the zero-config algorithm of golang/go#57979. +-func selectViewDefs(ctx context.Context, fs file.Source, folders []*Folder, openFiles []protocol.DocumentURI) ([]*viewDefinition, error) { +- var defs []*viewDefinition +- +- // First, compute a default view for each workspace folder. +- // TODO(golang/go#57979): technically, this is path dependent, since +- // DidChangeWorkspaceFolders could introduce a path-dependent ordering on +- // folders. We should keep folders sorted, or sort them here. +- for _, folder := range folders { +- def, err := defineView(ctx, fs, folder, nil) +- if err != nil { +- return nil, err +- } +- defs = append(defs, def) +- } +- +- // Next, ensure that the set of views covers all open files contained in a +- // workspace folder. +- // +- // We only do this for files contained in a workspace folder, because other +- // open files are most likely the result of jumping to a definition from a +- // workspace file; we don't want to create additional views in those cases: +- // they should be resolved after initialization. +- +- folderForFile := func(uri protocol.DocumentURI) *Folder { +- var longest *Folder +- for _, folder := range folders { +- // Check that this is a better match than longest, but not through a +- // vendor directory. Count occurrences of "/vendor/" as a quick check +- // that the vendor directory is between the folder and the file. Note the +- // addition of a trailing "/" to handle the odd case where the folder is named +- // vendor (which I hope is exceedingly rare in any case). +- // +- // Vendored packages are, by definition, part of an existing view. +- if (longest == nil || len(folder.Dir) > len(longest.Dir)) && +- folder.Dir.Encloses(uri) && +- strings.Count(string(uri), "/vendor/") == strings.Count(string(folder.Dir)+"/", "/vendor/") { +- +- longest = folder +- } +- } +- return longest +- } +- +-checkFiles: +- for _, uri := range openFiles { +- folder := folderForFile(uri) +- if folder == nil || !folder.Options.ZeroConfig { +- continue // only guess views for open files +- } +- fh, err := fs.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- relevantViews, err := RelevantViews(ctx, fs, fh.URI(), defs) +- if err != nil { +- // We should never call selectViewDefs with a cancellable context, so +- // this should never fail. +- return nil, bug.Errorf("failed to find best view for open file: %v", err) +- } +- def := matchingView(fh, relevantViews) +- if def != nil { +- continue // file covered by an existing view +- } +- def, err = defineView(ctx, fs, folder, fh) +- if err != nil { +- // e.g. folder path is invalid? +- return nil, fmt.Errorf("failed to define view for open file: %v", err) +- } +- // It need not strictly be the case that the best view for a file is +- // distinct from other views, as the logic of getViewDefinition and +- // [RelevantViews] does not align perfectly. This is not necessarily a bug: +- // there may be files for which we can't construct a valid view. +- // +- // Nevertheless, we should not create redundant views. +- for _, alt := range defs { +- if viewDefinitionsEqual(alt, def) { +- continue checkFiles +- } +- } +- defs = append(defs, def) +- } +- +- return defs, nil +-} +- +-// The viewDefiner interface allows the [RelevantViews] algorithm to operate on both +-// Views and viewDefinitions. +-type viewDefiner interface{ definition() *viewDefinition } +- +-// RelevantViews returns the views that may contain the given URI, or nil if +-// none exist. A view is "relevant" if, ignoring build constraints, it may have +-// a workspace package containing uri. Therefore, the definition of relevance +-// depends on the view type. +-func RelevantViews[V viewDefiner](ctx context.Context, fs file.Source, uri protocol.DocumentURI, views []V) ([]V, error) { +- if len(views) == 0 { +- return nil, nil // avoid the call to findRootPattern +- } +- dir := uri.Dir() +- modURI, err := findRootPattern(ctx, dir, "go.mod", fs) +- if err != nil { +- return nil, err +- } +- +- // Prefer GoWork > GoMod > GOPATH > GoPackages > AdHoc. +- var ( +- goPackagesViews []V // prefer longest +- workViews []V // prefer longest +- modViews []V // exact match +- gopathViews []V // prefer longest +- adHocViews []V // exact match +- ) +- +- // pushView updates the views slice with the matching view v, using the +- // heuristic that views with a longer root are preferable. Accordingly, +- // pushView may be a no op if v's root is shorter than the roots in the views +- // slice. +- // +- // Invariant: the length of all roots in views is the same. +- pushView := func(views *[]V, v V) { +- if len(*views) == 0 { +- *views = []V{v} +- return +- } +- better := func(l, r V) bool { +- return len(l.definition().root) > len(r.definition().root) +- } +- existing := (*views)[0] +- switch { +- case better(existing, v): +- case better(v, existing): +- *views = []V{v} +- default: +- *views = append(*views, v) +- } +- } +- +- for _, view := range views { +- switch def := view.definition(); def.Type() { +- case GoPackagesDriverView: +- if def.root.Encloses(dir) { +- pushView(&goPackagesViews, view) +- } +- case GoWorkView: +- if _, ok := def.workspaceModFiles[modURI]; ok || uri == def.gowork { +- pushView(&workViews, view) +- } +- case GoModView: +- if _, ok := def.workspaceModFiles[modURI]; ok { +- modViews = append(modViews, view) +- } +- case GOPATHView: +- if def.root.Encloses(dir) { +- pushView(&gopathViews, view) +- } +- case AdHocView: +- if def.root == dir { +- adHocViews = append(adHocViews, view) +- } +- } +- } +- +- // Now that we've collected matching views, choose the best match, +- // considering ports. +- // +- // We only consider one type of view, since the matching view created by +- // defineView should be of the best type. +- var relevantViews []V +- switch { +- case len(workViews) > 0: +- relevantViews = workViews +- case len(modViews) > 0: +- relevantViews = modViews +- case len(gopathViews) > 0: +- relevantViews = gopathViews +- case len(goPackagesViews) > 0: +- relevantViews = goPackagesViews +- case len(adHocViews) > 0: +- relevantViews = adHocViews +- } +- +- return relevantViews, nil +-} +- +-// matchingView returns the View or viewDefinition out of relevantViews that +-// matches the given file's build constraints, or nil if no match is found. +-// +-// Making this function generic is convenient so that we can avoid mapping view +-// definitions back to views inside Session.DidModifyFiles, where performance +-// matters. It is, however, not the cleanest application of generics. +-// +-// Note: keep this function in sync with defineView. +-func matchingView[V viewDefiner](fh file.Handle, relevantViews []V) V { +- var zero V +- +- if len(relevantViews) == 0 { +- return zero +- } +- +- content, err := fh.Content() +- +- // Port matching doesn't apply to non-go files, or files that no longer exist. +- // Note that the behavior here on non-existent files shouldn't matter much, +- // since there will be a subsequent failure. +- if fileKind(fh) != file.Go || err != nil { +- return relevantViews[0] +- } +- +- // Find the first view that matches constraints. +- // Content trimming is nontrivial, so do this outside of the loop below. +- path := fh.URI().Path() +- content = trimContentForPortMatch(content) +- for _, v := range relevantViews { +- def := v.definition() +- viewPort := port{def.GOOS(), def.GOARCH()} +- if viewPort.matches(path, content) { +- return v +- } +- } +- +- return zero // no view found +-} +- +-// ResetView resets the best view for the given URI. +-func (s *Session) ResetView(ctx context.Context, uri protocol.DocumentURI) (*View, error) { +- s.viewMu.Lock() +- defer s.viewMu.Unlock() +- +- if s.viewMap == nil { +- return nil, fmt.Errorf("session is shut down") +- } +- +- view, err := s.viewOfLocked(ctx, uri.Clean()) +- if err != nil { +- return nil, err +- } +- if view == nil { +- return nil, fmt.Errorf("no view for %s", uri) +- } +- +- s.viewMap = make(map[protocol.DocumentURI]*View) +- for i, v := range s.views { +- if v == view { +- v2, _, release := s.createView(ctx, view.viewDefinition) +- release() // don't need the snapshot +- v.shutdown() +- s.views[i] = v2 +- return v2, nil +- } +- } +- +- return nil, bug.Errorf("missing view") // can't happen... +-} +- +-// DidModifyFiles reports a file modification to the session. It returns +-// the new snapshots after the modifications have been applied, paired with +-// the affected file URIs for those snapshots. +-// On success, it returns a release function that +-// must be called when the snapshots are no longer needed. +-// +-// TODO(rfindley): what happens if this function fails? It must leave us in a +-// broken state, which we should surface to the user, probably as a request to +-// restart gopls. +-func (s *Session) DidModifyFiles(ctx context.Context, modifications []file.Modification) (map[*View][]protocol.DocumentURI, error) { +- s.viewMu.Lock() +- defer s.viewMu.Unlock() +- +- // Short circuit the logic below if s is shut down. +- if s.viewMap == nil { +- return nil, fmt.Errorf("session is shut down") +- } +- +- // Update overlays. +- // +- // This is done while holding viewMu because the set of open files affects +- // the set of views, and to prevent views from seeing updated file content +- // before they have processed invalidations. +- replaced, err := s.updateOverlays(ctx, modifications) +- if err != nil { +- return nil, err +- } +- +- // checkViews controls whether the set of views needs to be recomputed, for +- // example because a go.mod file was created or deleted, or a go.work file +- // changed on disk. +- checkViews := false +- +- // Hack: collect folders from existing views. +- // TODO(golang/go#57979): we really should track folders independent of +- // Views, but since we always have a default View for each folder, this +- // works for now. +- var folders []*Folder // preserve folder order +- workspaceFileGlobsSet := make(map[string]bool) +- seen := make(map[*Folder]unit) +- for _, v := range s.views { +- if _, ok := seen[v.folder]; ok { +- continue +- } +- seen[v.folder] = unit{} +- folders = append(folders, v.folder) +- for _, glob := range v.folder.Options.WorkspaceFiles { +- workspaceFileGlobsSet[glob] = true +- } +- } +- workspaceFileGlobs := slices.Collect(maps.Keys(workspaceFileGlobsSet)) +- +- changed := make(map[protocol.DocumentURI]file.Handle) +- for _, c := range modifications { +- fh := mustReadFile(ctx, s, c.URI) +- changed[c.URI] = fh +- +- // Any change to the set of open files causes views to be recomputed. +- if c.Action == file.Open || c.Action == file.Close { +- checkViews = true +- } +- +- // Any on-disk change to a go.work or go.mod file causes recomputing views. +- // +- // TODO(rfindley): go.work files need not be named "go.work" -- we need to +- // check each view's source to handle the case of an explicit GOWORK value. +- // Write a test that fails, and fix this. +- if (isGoWork(c.URI) || isGoMod(c.URI) || isWorkspaceFile(c.URI, workspaceFileGlobs)) && (c.Action == file.Save || c.OnDisk) { +- checkViews = true +- } +- +- // Any change to the set of supported ports in a file may affect view +- // selection. This is perhaps more subtle than it first seems: since the +- // algorithm for selecting views considers open files in a deterministic +- // order, a change in supported ports may cause a different port to be +- // chosen, even if all open files still match an existing View! +- // +- // We endeavor to avoid that sort of path dependence, so must re-run the +- // view selection algorithm whenever any input changes. +- // +- // However, extracting the build comment is nontrivial, so we don't want to +- // pay this cost when e.g. processing a bunch of on-disk changes due to a +- // branch change. Be careful to only do this if both files are open Go +- // files. +- if old, ok := replaced[c.URI]; ok && !checkViews && fileKind(fh) == file.Go { +- if new, ok := fh.(*overlay); ok { +- if buildComment(old.content) != buildComment(new.content) { +- checkViews = true +- } +- } +- } +- } +- +- if checkViews { +- var openFiles []protocol.DocumentURI +- for _, o := range s.Overlays() { +- openFiles = append(openFiles, o.URI()) +- } +- // Sort for determinism. +- slices.Sort(openFiles) +- +- // TODO(rfindley): can we avoid running the go command (go env) +- // synchronously to change processing? Can we assume that the env did not +- // change, and derive go.work using a combination of the configured +- // GOWORK value and filesystem? +- defs, err := selectViewDefs(ctx, s, folders, openFiles) +- if err != nil { +- // Catastrophic failure, equivalent to a failure of session +- // initialization and therefore should almost never happen. One +- // scenario where this failure mode could occur is if some file +- // permissions have changed preventing us from reading go.mod +- // files. +- // +- // TODO(rfindley): consider surfacing this error more loudly. We +- // could report a bug, but it's not really a bug. +- event.Error(ctx, "selecting new views", err) +- } else { +- kept := make(map[*View]unit) +- var newViews []*View +- for _, def := range defs { +- var newView *View +- // Reuse existing view? +- for _, v := range s.views { +- if viewDefinitionsEqual(def, v.viewDefinition) { +- newView = v +- kept[v] = unit{} +- break +- } +- } +- if newView == nil { +- v, _, release := s.createView(ctx, def) +- release() +- newView = v +- } +- newViews = append(newViews, newView) +- } +- for _, v := range s.views { +- if _, ok := kept[v]; !ok { +- v.shutdown() +- } +- } +- s.views = newViews +- s.viewMap = make(map[protocol.DocumentURI]*View) +- } +- } +- +- // We only want to run fast-path diagnostics (i.e. diagnoseChangedFiles) once +- // for each changed file, in its best view. +- viewsToDiagnose := map[*View][]protocol.DocumentURI{} +- for _, mod := range modifications { +- v, err := s.viewOfLocked(ctx, mod.URI) +- if err != nil { +- // viewOfLocked only returns an error in the event of context +- // cancellation, or if the session is shut down. Since state changes +- // should occur on an uncancellable context, and s.viewMap was checked at +- // the top of this function, an error here is a bug. +- bug.Reportf("finding best view for change: %v", err) +- continue +- } +- if v != nil { +- viewsToDiagnose[v] = append(viewsToDiagnose[v], mod.URI) +- } +- } +- +- // ...but changes may be relevant to other views, for example if they are +- // changes to a shared package. +- for _, v := range s.views { +- _, release, needsDiagnosis := s.invalidateViewLocked(ctx, v, StateChange{Modifications: modifications, Files: changed}) +- release() +- +- if needsDiagnosis || checkViews { +- if _, ok := viewsToDiagnose[v]; !ok { +- viewsToDiagnose[v] = nil +- } +- } +- } +- +- return viewsToDiagnose, nil +-} +- +-// ExpandModificationsToDirectories returns the set of changes with the +-// directory changes removed and expanded to include all of the files in +-// the directory. +-func (s *Session) ExpandModificationsToDirectories(ctx context.Context, changes []file.Modification) []file.Modification { +- var snapshots []*Snapshot +- s.viewMu.Lock() +- for _, v := range s.views { +- snapshot, release, err := v.Snapshot() +- if err != nil { +- continue // view is shut down; continue with others +- } +- defer release() +- snapshots = append(snapshots, snapshot) +- } +- s.viewMu.Unlock() +- +- // Expand the modification to any file we could care about, which we define +- // to be any file observed by any of the snapshots. +- // +- // There may be other files in the directory, but if we haven't read them yet +- // we don't need to invalidate them. +- var result []file.Modification +- for _, c := range changes { +- expanded := make(map[protocol.DocumentURI]bool) +- for _, snapshot := range snapshots { +- for _, uri := range snapshot.filesInDir(c.URI) { +- expanded[uri] = true +- } +- } +- if len(expanded) == 0 { +- result = append(result, c) +- } else { +- for uri := range expanded { +- result = append(result, file.Modification{ +- URI: uri, +- Action: c.Action, +- LanguageID: "", +- OnDisk: c.OnDisk, +- // changes to directories cannot include text or versions +- }) +- } +- } +- } +- return result +-} +- +-// updateOverlays updates the set of overlays and returns a map of any existing +-// overlay values that were replaced. +-// +-// Precondition: caller holds s.viewMu lock. +-// TODO(rfindley): move this to fs_overlay.go. +-func (fs *overlayFS) updateOverlays(ctx context.Context, changes []file.Modification) (map[protocol.DocumentURI]*overlay, error) { +- fs.mu.Lock() +- defer fs.mu.Unlock() +- +- replaced := make(map[protocol.DocumentURI]*overlay) +- for _, c := range changes { +- o, ok := fs.overlays[c.URI] +- if ok { +- replaced[c.URI] = o +- } +- +- // If the file is not opened in an overlay and the change is on disk, +- // there's no need to update an overlay. If there is an overlay, we +- // may need to update the overlay's saved value. +- if !ok && c.OnDisk { +- continue +- } +- +- // Determine the file kind on open, otherwise, assume it has been cached. +- var kind file.Kind +- switch c.Action { +- case file.Open: +- kind = file.KindForLang(c.LanguageID) +- default: +- if !ok { +- return nil, fmt.Errorf("updateOverlays: modifying unopened overlay %v", c.URI) +- } +- kind = o.kind +- } +- +- // Closing a file just deletes its overlay. +- if c.Action == file.Close { +- delete(fs.overlays, c.URI) +- continue +- } +- +- // If the file is on disk, check if its content is the same as in the +- // overlay. Saves and on-disk file changes don't come with the file's +- // content. +- text := c.Text +- if text == nil && (c.Action == file.Save || c.OnDisk) { +- if !ok { +- return nil, fmt.Errorf("no known content for overlay for %s", c.Action) +- } +- text = o.content +- } +- // On-disk changes don't come with versions. +- version := c.Version +- if c.OnDisk || c.Action == file.Save { +- version = o.version +- } +- hash := file.HashOf(text) +- var sameContentOnDisk bool +- switch c.Action { +- case file.Delete: +- // Do nothing. sameContentOnDisk should be false. +- case file.Save: +- // Make sure the version and content (if present) is the same. +- if false && o.version != version { // Client no longer sends the version +- return nil, fmt.Errorf("updateOverlays: saving %s at version %v, currently at %v", c.URI, c.Version, o.version) +- } +- if c.Text != nil && o.hash != hash { +- return nil, fmt.Errorf("updateOverlays: overlay %s changed on save", c.URI) +- } +- sameContentOnDisk = true +- default: +- fh := mustReadFile(ctx, fs.delegate, c.URI) +- _, readErr := fh.Content() +- sameContentOnDisk = (readErr == nil && fh.Identity().Hash == hash) +- } +- o = &overlay{ +- uri: c.URI, +- version: version, +- content: text, +- modTime: time.Now(), +- kind: kind, +- hash: hash, +- saved: sameContentOnDisk, +- } +- +- // NOTE: previous versions of this code checked here that the overlay had a +- // view and file kind (but we don't know why). +- +- fs.overlays[c.URI] = o +- } +- +- return replaced, nil +-} +- +-func mustReadFile(ctx context.Context, fs file.Source, uri protocol.DocumentURI) file.Handle { +- ctx = xcontext.Detach(ctx) +- fh, err := fs.ReadFile(ctx, uri) +- if err != nil { +- // ReadFile cannot fail with an uncancellable context. +- bug.Reportf("reading file failed unexpectedly: %v", err) +- return brokenFile{uri, err} +- } +- return fh +-} +- +-// A brokenFile represents an unexpected failure to read a file. +-type brokenFile struct { +- uri protocol.DocumentURI +- err error +-} +- +-func (b brokenFile) String() string { return b.uri.Path() } +-func (b brokenFile) URI() protocol.DocumentURI { return b.uri } +-func (b brokenFile) Identity() file.Identity { return file.Identity{URI: b.uri} } +-func (b brokenFile) SameContentsOnDisk() bool { return false } +-func (b brokenFile) Version() int32 { return 0 } +-func (b brokenFile) Content() ([]byte, error) { return nil, b.err } +-func (b brokenFile) ModTime() (time.Time, error) { return time.Time{}, b.err } +- +-// FileWatchingGlobPatterns returns a set of glob patterns that the client is +-// required to watch for changes, and notify the server of them, in order to +-// keep the server's state up to date. +-// +-// This set includes +-// 1. all go.mod and go.work files in the workspace; and +-// 2. all files defined by the WorkspaceFiles option in BuildOptions (to support custom GOPACKAGESDRIVERS); and +-// 3. for each Snapshot, its modules (or directory for ad-hoc views). In +-// module mode, this is the set of active modules (and for VS Code, all +-// workspace directories within them, due to golang/go#42348). +-// +-// The watch for workspace files in (1) is sufficient to +-// capture changes to the repo structure that may affect the set of views. +-// Whenever this set changes, we reload the workspace and invalidate memoized +-// files. +-// +-// The watch for workspace directories in (2) should keep each View up to date, +-// as it should capture any newly added/modified/deleted Go files. +-// +-// Patterns are returned as a set of protocol.RelativePatterns, since they can +-// always be later translated to glob patterns (i.e. strings) if the client +-// lacks relative pattern support. By convention, any pattern returned with +-// empty baseURI should be served as a glob pattern. +-// +-// In general, we prefer to serve relative patterns, as they work better on +-// most clients that support both, and do not have issues with Windows driver +-// letter casing: +-// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#relativePattern +-// +-// TODO(golang/go#57979): we need to reset the memoizedFS when a view changes. +-// Consider the case where we incidentally read a file, then it moved outside +-// of an active module, and subsequently changed: we would still observe the +-// original file state. +-func (s *Session) FileWatchingGlobPatterns(ctx context.Context) map[protocol.RelativePattern]unit { +- s.viewMu.Lock() +- defer s.viewMu.Unlock() +- +- // Always watch files that may change the set of views. +- patterns := map[protocol.RelativePattern]unit{ +- {Pattern: "**/*.{mod,work}"}: {}, +- } +- +- for _, view := range s.views { +- snapshot, release, err := view.Snapshot() +- if err != nil { +- continue // view is shut down; continue with others +- } +- maps.Copy(patterns, snapshot.fileWatchingGlobPatterns()) +- release() +- } +- return patterns +-} +- +-// OrphanedFileDiagnostics reports diagnostics describing why open files have +-// no packages or have only command-line-arguments packages. +-// +-// If the resulting diagnostic is nil, the file is either not orphaned or we +-// can't produce a good diagnostic. +-// +-// The caller must not mutate the result. +-func (s *Session) OrphanedFileDiagnostics(ctx context.Context) (map[protocol.DocumentURI][]*Diagnostic, error) { +- if err := ctx.Err(); err != nil { +- // Avoid collecting diagnostics if the context is cancelled. +- // (Previously, it was possible to get all the way to packages.Load on a cancelled context) +- return nil, err +- } +- // Note: diagnostics holds a slice for consistency with other diagnostic +- // funcs. +- diagnostics := make(map[protocol.DocumentURI][]*Diagnostic) +- +- byView := make(map[*View][]*overlay) +- for _, o := range s.Overlays() { +- uri := o.URI() +- snapshot, release, err := s.SnapshotOf(ctx, uri) +- if err != nil { +- // TODO(golang/go#57979): we have to use the .go suffix as an approximation for +- // file kind here, because we don't have access to Options if no View was +- // matched. +- // +- // But Options are really a property of Folder, not View, and we could +- // match a folder here. +- // +- // Refactor so that Folders are tracked independently of Views, and use +- // the correct options here to get the most accurate file kind. +- // +- // TODO(golang/go#57979): once we switch entirely to the zeroconfig +- // logic, we should use this diagnostic for the fallback case of +- // s.views[0] in the ViewOf logic. +- if errors.Is(err, errNoViews) { +- if strings.HasSuffix(string(uri), ".go") { +- if _, rng, ok := orphanedFileDiagnosticRange(ctx, s.parseCache, o); ok { +- diagnostics[uri] = []*Diagnostic{{ +- URI: uri, +- Range: rng, +- Severity: protocol.SeverityWarning, +- Source: ListError, +- Message: fmt.Sprintf("No active builds contain %s: consider opening a new workspace folder containing it", uri.Path()), +- }} +- } +- } +- continue +- } +- return nil, err +- } +- v := snapshot.View() +- release() +- byView[v] = append(byView[v], o) +- } +- +- for view, overlays := range byView { +- snapshot, release, err := view.Snapshot() +- if err != nil { +- continue // view is shutting down +- } +- defer release() +- diags, err := snapshot.orphanedFileDiagnostics(ctx, overlays) +- if err != nil { +- return nil, err +- } +- for _, d := range diags { +- diagnostics[d.URI] = append(diagnostics[d.URI], d) +- } +- } +- return diagnostics, nil +-} +diff -urN a/gopls/internal/cache/session_test.go b/gopls/internal/cache/session_test.go +--- a/gopls/internal/cache/session_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/session_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,406 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "os" +- "path" +- "path/filepath" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/internal/testenv" +-) +- +-func TestZeroConfigAlgorithm(t *testing.T) { +- testenv.NeedsExec(t) // executes the Go command +- t.Setenv("GOPACKAGESDRIVER", "off") +- +- type viewSummary struct { +- // fields exported for cmp.Diff +- Type ViewType +- Root string +- Env []string +- } +- +- type folderSummary struct { +- dir string +- options func(dir string) map[string]any // options may refer to the temp dir +- } +- +- includeReplaceInWorkspace := func(string) map[string]any { +- return map[string]any{ +- "includeReplaceInWorkspace": true, +- } +- } +- +- type test struct { +- name string +- files map[string]string // use a map rather than txtar as file content is tiny +- folders []folderSummary +- open []string // open files +- want []viewSummary +- } +- +- tests := []test{ +- // TODO(rfindley): add a test for GOPACKAGESDRIVER. +- // Doing so doesn't yet work using options alone (user env is not honored) +- +- // TODO(rfindley): add a test for degenerate cases, such as missing +- // workspace folders (once we decide on the correct behavior). +- { +- "basic go.work workspace", +- map[string]string{ +- "go.work": "go 1.18\nuse (\n\t./a\n\t./b\n)\n", +- "a/go.mod": "module golang.org/a\ngo 1.18\n", +- "b/go.mod": "module golang.org/b\ngo 1.18\n", +- }, +- []folderSummary{{dir: "."}}, +- nil, +- []viewSummary{{GoWorkView, ".", nil}}, +- }, +- { +- "basic go.mod workspace", +- map[string]string{ +- "go.mod": "module golang.org/a\ngo 1.18\n", +- }, +- []folderSummary{{dir: "."}}, +- nil, +- []viewSummary{{GoModView, ".", nil}}, +- }, +- { +- "basic GOPATH workspace", +- map[string]string{ +- "src/golang.org/a/a.go": "package a", +- "src/golang.org/b/b.go": "package b", +- }, +- []folderSummary{{ +- dir: "src", +- options: func(dir string) map[string]any { +- return map[string]any{ +- "env": map[string]any{ +- "GO111MODULE": "", // golang/go#70196: must be unset +- "GOPATH": dir, +- }, +- } +- }, +- }}, +- []string{"src/golang.org/a//a.go", "src/golang.org/b/b.go"}, +- []viewSummary{{GOPATHView, "src", nil}}, +- }, +- { +- "basic AdHoc workspace", +- map[string]string{ +- "foo.go": "package foo", +- }, +- []folderSummary{{dir: "."}}, +- nil, +- []viewSummary{{AdHocView, ".", nil}}, +- }, +- { +- "multi-folder workspace", +- map[string]string{ +- "a/go.mod": "module golang.org/a\ngo 1.18\n", +- "b/go.mod": "module golang.org/b\ngo 1.18\n", +- }, +- []folderSummary{{dir: "a"}, {dir: "b"}}, +- nil, +- []viewSummary{{GoModView, "a", nil}, {GoModView, "b", nil}}, +- }, +- { +- "multi-module workspace", +- map[string]string{ +- "a/go.mod": "module golang.org/a\ngo 1.18\n", +- "b/go.mod": "module golang.org/b\ngo 1.18\n", +- }, +- []folderSummary{{dir: "."}}, +- nil, +- []viewSummary{{AdHocView, ".", nil}}, +- }, +- { +- "zero-config open module", +- map[string]string{ +- "a/go.mod": "module golang.org/a\ngo 1.18\n", +- "a/a.go": "package a", +- "b/go.mod": "module golang.org/b\ngo 1.18\n", +- "b/b.go": "package b", +- }, +- []folderSummary{{dir: "."}}, +- []string{"a/a.go"}, +- []viewSummary{ +- {AdHocView, ".", nil}, +- {GoModView, "a", nil}, +- }, +- }, +- { +- "zero-config open modules", +- map[string]string{ +- "a/go.mod": "module golang.org/a\ngo 1.18\n", +- "a/a.go": "package a", +- "b/go.mod": "module golang.org/b\ngo 1.18\n", +- "b/b.go": "package b", +- }, +- []folderSummary{{dir: "."}}, +- []string{"a/a.go", "b/b.go"}, +- []viewSummary{ +- {AdHocView, ".", nil}, +- {GoModView, "a", nil}, +- {GoModView, "b", nil}, +- }, +- }, +- { +- "unified workspace", +- map[string]string{ +- "go.work": "go 1.18\nuse (\n\t./a\n\t./b\n)\n", +- "a/go.mod": "module golang.org/a\ngo 1.18\n", +- "a/a.go": "package a", +- "b/go.mod": "module golang.org/b\ngo 1.18\n", +- "b/b.go": "package b", +- }, +- []folderSummary{{dir: "."}}, +- []string{"a/a.go", "b/b.go"}, +- []viewSummary{{GoWorkView, ".", nil}}, +- }, +- { +- "go.work from env", +- map[string]string{ +- "nested/go.work": "go 1.18\nuse (\n\t../a\n\t../b\n)\n", +- "a/go.mod": "module golang.org/a\ngo 1.18\n", +- "a/a.go": "package a", +- "b/go.mod": "module golang.org/b\ngo 1.18\n", +- "b/b.go": "package b", +- }, +- []folderSummary{{ +- dir: ".", +- options: func(dir string) map[string]any { +- return map[string]any{ +- "env": map[string]any{ +- "GOWORK": filepath.Join(dir, "nested", "go.work"), +- }, +- } +- }, +- }}, +- []string{"a/a.go", "b/b.go"}, +- []viewSummary{{GoWorkView, ".", nil}}, +- }, +- { +- "independent module view", +- map[string]string{ +- "go.work": "go 1.18\nuse (\n\t./a\n)\n", // not using b +- "a/go.mod": "module golang.org/a\ngo 1.18\n", +- "a/a.go": "package a", +- "b/go.mod": "module golang.org/a\ngo 1.18\n", +- "b/b.go": "package b", +- }, +- []folderSummary{{dir: "."}}, +- []string{"a/a.go", "b/b.go"}, +- []viewSummary{ +- {GoWorkView, ".", nil}, +- {GoModView, "b", []string{"GOWORK=off"}}, +- }, +- }, +- { +- "multiple go.work", +- map[string]string{ +- "go.work": "go 1.18\nuse (\n\t./a\n\t./b\n)\n", +- "a/go.mod": "module golang.org/a\ngo 1.18\n", +- "a/a.go": "package a", +- "b/go.work": "go 1.18\nuse (\n\t.\n\t./c\n)\n", +- "b/go.mod": "module golang.org/b\ngo 1.18\n", +- "b/b.go": "package b", +- "b/c/go.mod": "module golang.org/c\ngo 1.18\n", +- }, +- []folderSummary{{dir: "."}}, +- []string{"a/a.go", "b/b.go", "b/c/c.go"}, +- []viewSummary{{GoWorkView, ".", nil}, {GoWorkView, "b", nil}}, +- }, +- { +- "multiple go.work, c unused", +- map[string]string{ +- "go.work": "go 1.18\nuse (\n\t./a\n\t./b\n)\n", +- "a/go.mod": "module golang.org/a\ngo 1.18\n", +- "a/a.go": "package a", +- "b/go.work": "go 1.18\nuse (\n\t.\n)\n", +- "b/go.mod": "module golang.org/b\ngo 1.18\n", +- "b/b.go": "package b", +- "b/c/go.mod": "module golang.org/c\ngo 1.18\n", +- }, +- []folderSummary{{dir: "."}}, +- []string{"a/a.go", "b/b.go", "b/c/c.go"}, +- []viewSummary{{GoWorkView, ".", nil}, {GoModView, "b/c", []string{"GOWORK=off"}}}, +- }, +- { +- "go.mod with nested replace", +- map[string]string{ +- "go.mod": "module golang.org/a\n require golang.org/b v1.2.3\nreplace example.com/b => ./b", +- "a.go": "package a", +- "b/go.mod": "module golang.org/b\ngo 1.18\n", +- "b/b.go": "package b", +- }, +- []folderSummary{{dir: ".", options: includeReplaceInWorkspace}}, +- []string{"a/a.go", "b/b.go"}, +- []viewSummary{{GoModView, ".", nil}}, +- }, +- { +- "go.mod with parent replace, parent folder", +- map[string]string{ +- "go.mod": "module golang.org/a", +- "a.go": "package a", +- "b/go.mod": "module golang.org/b\ngo 1.18\nrequire golang.org/a v1.2.3\nreplace golang.org/a => ../", +- "b/b.go": "package b", +- }, +- []folderSummary{{dir: ".", options: includeReplaceInWorkspace}}, +- []string{"a/a.go", "b/b.go"}, +- []viewSummary{{GoModView, ".", nil}, {GoModView, "b", nil}}, +- }, +- { +- "go.mod with multiple replace", +- map[string]string{ +- "go.mod": ` +-module golang.org/root +- +-require ( +- golang.org/a v1.2.3 +- golang.org/b v1.2.3 +- golang.org/c v1.2.3 +-) +- +-replace ( +- golang.org/b => ./b +- golang.org/c => ./c +- // Note: d is not replaced +-) +-`, +- "a.go": "package a", +- "b/go.mod": "module golang.org/b\ngo 1.18", +- "b/b.go": "package b", +- "c/go.mod": "module golang.org/c\ngo 1.18", +- "c/c.go": "package c", +- "d/go.mod": "module golang.org/d\ngo 1.18", +- "d/d.go": "package d", +- }, +- []folderSummary{{dir: ".", options: includeReplaceInWorkspace}}, +- []string{"b/b.go", "c/c.go", "d/d.go"}, +- []viewSummary{{GoModView, ".", nil}, {GoModView, "d", nil}}, +- }, +- { +- "go.mod with replace outside the workspace", +- map[string]string{ +- "go.mod": "module golang.org/a\ngo 1.18", +- "a.go": "package a", +- "b/go.mod": "module golang.org/b\ngo 1.18\nrequire golang.org/a v1.2.3\nreplace golang.org/a => ../", +- "b/b.go": "package b", +- }, +- []folderSummary{{dir: "b"}}, +- []string{"a.go", "b/b.go"}, +- []viewSummary{{GoModView, "b", nil}}, +- }, +- { +- "go.mod with replace directive; workspace replace off", +- map[string]string{ +- "go.mod": "module golang.org/a\n require golang.org/b v1.2.3\nreplace example.com/b => ./b", +- "a.go": "package a", +- "b/go.mod": "module golang.org/b\ngo 1.18\n", +- "b/b.go": "package b", +- }, +- []folderSummary{{ +- dir: ".", +- options: func(string) map[string]any { +- return map[string]any{ +- "includeReplaceInWorkspace": false, +- } +- }, +- }}, +- []string{"a/a.go", "b/b.go"}, +- []viewSummary{{GoModView, ".", nil}, {GoModView, "b", nil}}, +- }, +- } +- +- for _, test := range tests { +- ctx := context.Background() +- t.Run(test.name, func(t *testing.T) { +- dir := writeFiles(t, test.files) +- rel := fake.RelativeTo(dir) +- fs := newMemoizedFS() +- +- toURI := func(path string) protocol.DocumentURI { +- return protocol.URIFromPath(rel.AbsPath(path)) +- } +- +- var folders []*Folder +- for _, f := range test.folders { +- opts := settings.DefaultOptions() +- if f.options != nil { +- _, errs := opts.Set(f.options(dir)) +- for _, err := range errs { +- t.Fatal(err) +- } +- } +- uri := toURI(f.dir) +- env, err := FetchGoEnv(ctx, uri, opts) +- if err != nil { +- t.Fatalf("FetchGoEnv failed: %v", err) +- } +- t.Logf("FetchGoEnv(%q) = %+v", uri, env) +- folders = append(folders, &Folder{ +- Dir: uri, +- Name: path.Base(f.dir), +- Options: opts, +- Env: *env, +- }) +- } +- +- var openFiles []protocol.DocumentURI +- for _, path := range test.open { +- openFiles = append(openFiles, toURI(path)) +- } +- +- defs, err := selectViewDefs(ctx, fs, folders, openFiles) +- if err != nil { +- t.Fatal(err) +- } +- var got []viewSummary +- for _, def := range defs { +- got = append(got, viewSummary{ +- Type: def.Type(), +- Root: rel.RelPath(def.root.Path()), +- Env: def.EnvOverlay(), +- }) +- } +- if diff := cmp.Diff(test.want, got); diff != "" { +- t.Errorf("selectViews() mismatch (-want +got):\n%s", diff) +- } +- }) +- } +-} +- +-// TODO(rfindley): this function could be meaningfully factored with the +-// various other test helpers of this nature. +-func writeFiles(t *testing.T, files map[string]string) string { +- root := t.TempDir() +- +- // This unfortunate step is required because gopls output +- // expands symbolic links in its input file names (arguably it +- // should not), and on macOS the temp dir is in /var -> private/var. +- root, err := filepath.EvalSymlinks(root) +- if err != nil { +- t.Fatal(err) +- } +- +- for name, content := range files { +- filename := filepath.Join(root, name) +- if err := os.MkdirAll(filepath.Dir(filename), 0777); err != nil { +- t.Fatal(err) +- } +- if err := os.WriteFile(filename, []byte(content), 0666); err != nil { +- t.Fatal(err) +- } +- } +- return root +-} +diff -urN a/gopls/internal/cache/snapshot.go b/gopls/internal/cache/snapshot.go +--- a/gopls/internal/cache/snapshot.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/snapshot.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,2218 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "bytes" +- "cmp" +- "context" +- "errors" +- "fmt" +- "go/ast" +- "go/build/constraint" +- "go/parser" +- "go/token" +- "os" +- "path" +- "path/filepath" +- "regexp" +- "slices" +- "strconv" +- "strings" +- "sync" +- +- "golang.org/x/tools/go/types/objectpath" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/methodsets" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/cache/testfuncs" +- "golang.org/x/tools/gopls/internal/cache/xrefs" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/filecache" +- label1 "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/constraints" +- "golang.org/x/tools/gopls/internal/util/immutable" +- "golang.org/x/tools/gopls/internal/util/memoize" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- "golang.org/x/tools/gopls/internal/util/pathutil" +- "golang.org/x/tools/gopls/internal/util/persistent" +- "golang.org/x/tools/gopls/internal/vulncheck" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/event/label" +- "golang.org/x/tools/internal/gocommand" +-) +- +-// A Snapshot represents the current state for a given view. +-// +-// It is first and foremost an idempotent implementation of file.Source whose +-// ReadFile method returns consistent information about the existence and +-// content of each file throughout its lifetime. +-// +-// However, the snapshot also manages additional state (such as parsed files +-// and packages) that are derived from file content. +-// +-// Snapshots are responsible for bookkeeping and invalidation of this state, +-// implemented in Snapshot.clone. +-type Snapshot struct { +- // sequenceID is the monotonically increasing ID of this snapshot within its View. +- // +- // Sequence IDs for Snapshots from different Views cannot be compared. +- sequenceID uint64 +- +- // TODO(rfindley): the snapshot holding a reference to the view poses +- // lifecycle problems: a view may be shut down and waiting for work +- // associated with this snapshot to complete. While most accesses of the view +- // are benign (options or workspace information), this is not formalized and +- // it is wrong for the snapshot to use a shutdown view. +- // +- // Fix this by passing options and workspace information to the snapshot, +- // both of which should be immutable for the snapshot. +- view *View +- +- cancel func() +- backgroundCtx context.Context +- +- store *memoize.Store // cache of handles shared by all snapshots +- +- refMu sync.Mutex +- +- // refcount holds the number of outstanding references to the current +- // Snapshot. When refcount is decremented to 0, the Snapshot maps are +- // destroyed and the done function is called. +- // +- // TODO(rfindley): use atomic.Int32 on Go 1.19+. +- refcount int +- done func() // for implementing Session.Shutdown +- +- // mu guards all of the maps in the snapshot, as well as the builtin URI and +- // initialized. +- mu sync.Mutex +- +- // initialized reports whether the snapshot has been initialized. Concurrent +- // initialization is guarded by the view.initializationSema. Each snapshot is +- // initialized at most once: concurrent initialization is guarded by +- // view.initializationSema. +- initialized bool +- +- // initialErr holds the last error resulting from initialization. If +- // initialization fails, we only retry when the workspace modules change, +- // to avoid too many go/packages calls. +- // If initialized is false, initialErr still holds the error resulting from +- // the previous initialization. +- // TODO(rfindley): can we unify the lifecycle of initialized and initialErr. +- initialErr *InitializationError +- +- // builtin is the location of builtin.go in GOROOT. +- // +- // TODO(rfindley): would it make more sense to eagerly parse builtin, and +- // instead store a *parsego.File here? +- builtin protocol.DocumentURI +- +- // meta holds loaded metadata. +- // +- // meta is guarded by mu, but the Graph itself is immutable. +- // +- // TODO(rfindley): in many places we hold mu while operating on meta, even +- // though we only need to hold mu while reading the pointer. +- meta *metadata.Graph +- +- // files maps file URIs to their corresponding FileHandles. +- // It may invalidated when a file's content changes. +- files *fileMap +- +- // packages maps a packageKey to a *packageHandle. +- // It may be invalidated when a file's content changes. +- // +- // Invariants to preserve: +- // - packages.Get(id).meta == meta.metadata[id] for all ids +- // - if a package is in packages, then all of its dependencies should also +- // be in packages, unless there is a missing import +- packages *persistent.Map[PackageID, *packageHandle] +- +- // fullAnalysisKeys and factyAnalysisKeys hold memoized cache keys for +- // analysis packages. "full" refers to the cache key including all enabled +- // analyzers, whereas "facty" is the key including only the subset of enabled +- // analyzers that produce facts, such as is required for transitively +- // imported packages. +- // +- // These keys are memoized because they can be quite expensive to compute. +- fullAnalysisKeys *persistent.Map[PackageID, file.Hash] +- factyAnalysisKeys *persistent.Map[PackageID, file.Hash] +- +- // workspacePackages contains the workspace's packages, which are loaded +- // when the view is created. It does not contain intermediate test variants. +- workspacePackages immutable.Map[PackageID, PackagePath] +- +- // shouldLoad tracks packages that need to be reloaded, mapping a PackageID +- // to the package paths that should be used to reload it +- // +- // When we try to load a package, we clear it from the shouldLoad map +- // regardless of whether the load succeeded, to prevent endless loads. +- shouldLoad *persistent.Map[PackageID, []PackagePath] +- +- // unloadableFiles keeps track of files that we've failed to load. +- unloadableFiles *persistent.Set[protocol.DocumentURI] +- +- // TODO(rfindley): rename the handles below to "promises". A promise is +- // different from a handle (we mutate the package handle.) +- +- // parseModHandles keeps track of any parseModHandles for the snapshot. +- // The handles need not refer to only the view's go.mod file. +- parseModHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[parseModResult] +- +- // parseWorkHandles keeps track of any parseWorkHandles for the snapshot. +- // The handles need not refer to only the view's go.work file. +- parseWorkHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[parseWorkResult] +- +- // Preserve go.mod-related handles to avoid garbage-collecting the results +- // of various calls to the go command. The handles need not refer to only +- // the view's go.mod file. +- modTidyHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[modTidyResult] +- modWhyHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[modWhyResult] +- modVulnHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[modVulnResult] +- +- // moduleUpgrades tracks known upgrades for module paths in each modfile. +- // Each modfile has a map of module name to upgrade version. +- moduleUpgrades *persistent.Map[protocol.DocumentURI, map[string]string] +- +- // vulns maps each go.mod file's URI to its known vulnerabilities. +- vulns *persistent.Map[protocol.DocumentURI, *vulncheck.Result] +- +- // compilerOptDetails is the set of directories whose packages +- // and tests need compiler optimization details in the diagnostics. +- compilerOptDetails map[protocol.DocumentURI]unit +- +- // Concurrent type checking: +- // typeCheckMu guards the ongoing type checking batch, and reference count of +- // ongoing type checking operations. +- // When the batch is no longer needed (batchRef=0), it is discarded. +- typeCheckMu sync.Mutex +- batchRef int +- batch *typeCheckBatch +-} +- +-var _ memoize.RefCounted = (*Snapshot)(nil) // snapshots are reference-counted +- +-func (s *Snapshot) awaitPromise(ctx context.Context, p *memoize.Promise) (any, error) { +- return p.Get(ctx, s) +-} +- +-// Acquire prevents the snapshot from being destroyed until the returned +-// function is called. +-// +-// (s.Acquire().release() could instead be expressed as a pair of +-// method calls s.IncRef(); s.DecRef(). The latter has the advantage +-// that the DecRefs are fungible and don't require holding anything in +-// addition to the refcounted object s, but paradoxically that is also +-// an advantage of the current approach, which forces the caller to +-// consider the release function at every stage, making a reference +-// leak more obvious.) +-func (s *Snapshot) Acquire() func() { +- s.refMu.Lock() +- defer s.refMu.Unlock() +- assert(s.refcount > 0, "non-positive refs") +- s.refcount++ +- +- return s.decref +-} +- +-// decref should only be referenced by Acquire, and by View when it frees its +-// reference to View.snapshot. +-func (s *Snapshot) decref() { +- s.refMu.Lock() +- defer s.refMu.Unlock() +- +- assert(s.refcount > 0, "non-positive refs") +- s.refcount-- +- if s.refcount == 0 { +- s.packages.Destroy() +- s.files.destroy() +- s.parseModHandles.Destroy() +- s.parseWorkHandles.Destroy() +- s.modTidyHandles.Destroy() +- s.modVulnHandles.Destroy() +- s.modWhyHandles.Destroy() +- s.unloadableFiles.Destroy() +- s.moduleUpgrades.Destroy() +- s.vulns.Destroy() +- s.done() +- } +-} +- +-// SequenceID is the sequence id of this snapshot within its containing +-// view. +-// +-// Relative to their view sequence ids are monotonically increasing, but this +-// does not hold globally: when new views are created their initial snapshot +-// has sequence ID 0. +-func (s *Snapshot) SequenceID() uint64 { +- return s.sequenceID +-} +- +-// SnapshotLabels returns a new slice of labels that should be used for events +-// related to a snapshot. +-func (s *Snapshot) Labels() []label.Label { +- return []label.Label{ +- label1.ViewID.Of(s.view.id), +- label1.Snapshot.Of(s.SequenceID()), +- label1.Directory.Of(s.Folder().Path()), +- } +-} +- +-// Folder returns the folder at the base of this snapshot. +-func (s *Snapshot) Folder() protocol.DocumentURI { +- return s.view.folder.Dir +-} +- +-// View returns the View associated with this snapshot. +-func (s *Snapshot) View() *View { +- return s.view +-} +- +-// FileKind returns the kind of a file. +-// +-// We can't reliably deduce the kind from the file name alone, +-// as some editors can be told to interpret a buffer as +-// language different from the file name heuristic, e.g. that +-// an .html file actually contains Go "html/template" syntax, +-// or even that a .go file contains Python. +-func (s *Snapshot) FileKind(fh file.Handle) file.Kind { +- if k := fileKind(fh); k != file.UnknownKind { +- return k +- } +- fext := filepath.Ext(fh.URI().Path()) +- exts := s.Options().TemplateExtensions +- for _, ext := range exts { +- if fext == ext || fext == "."+ext { +- return file.Tmpl +- } +- } +- +- // and now what? This should never happen, but it does for cgo before go1.15 +- // +- // TODO(rfindley): this doesn't look right. We should default to UnknownKind. +- // Also, I don't understand the comment above, though I'd guess before go1.15 +- // we encountered cgo files without the .go extension. +- return file.Go +-} +- +-// fileKind returns the default file kind for a file, before considering +-// template file extensions. See [Snapshot.FileKind]. +-func fileKind(fh file.Handle) file.Kind { +- // The kind of an unsaved buffer comes from the +- // TextDocumentItem.LanguageID field in the didChange event, +- // not from the file name. They may differ. +- if o, ok := fh.(*overlay); ok { +- if o.kind != file.UnknownKind { +- return o.kind +- } +- } +- +- fext := filepath.Ext(fh.URI().Path()) +- switch fext { +- case ".go": +- return file.Go +- case ".mod": +- return file.Mod +- case ".sum": +- return file.Sum +- case ".work": +- return file.Work +- case ".s": +- return file.Asm +- } +- return file.UnknownKind +-} +- +-// Options returns the options associated with this snapshot. +-func (s *Snapshot) Options() *settings.Options { +- return s.view.folder.Options +-} +- +-// BackgroundContext returns a context used for all background processing +-// on behalf of this snapshot. +-func (s *Snapshot) BackgroundContext() context.Context { +- return s.backgroundCtx +-} +- +-// Templates returns the .tmpl files. +-func (s *Snapshot) Templates() map[protocol.DocumentURI]file.Handle { +- s.mu.Lock() +- defer s.mu.Unlock() +- +- tmpls := map[protocol.DocumentURI]file.Handle{} +- for k, fh := range s.files.all() { +- if s.FileKind(fh) == file.Tmpl { +- tmpls[k] = fh +- } +- } +- return tmpls +-} +- +-// RunGoModUpdateCommands runs a series of `go` commands that updates the go.mod +-// and go.sum file for wd, and returns their updated contents. +-// +-// TODO(rfindley): the signature of RunGoModUpdateCommands is very confusing, +-// and is the only thing forcing the ModFlag and ModFile indirection. +-// Simplify it. +-func (s *Snapshot) RunGoModUpdateCommands(ctx context.Context, modURI protocol.DocumentURI, run func(invoke func(...string) (*bytes.Buffer, error)) error) ([]byte, []byte, error) { +- tempDir, cleanupModDir, err := TempModDir(ctx, s, modURI) +- if err != nil { +- return nil, nil, err +- } +- defer cleanupModDir() +- +- // TODO(rfindley): we must use ModFlag and ModFile here (rather than simply +- // setting Args), because without knowing the verb, we can't know whether +- // ModFlag is appropriate. Refactor so that args can be set by the caller. +- inv, cleanupInvocation, err := s.GoCommandInvocation(NetworkOK, modURI.DirPath(), "", nil, "GOWORK=off") +- if err != nil { +- return nil, nil, err +- } +- defer cleanupInvocation() +- +- inv.ModFlag = "mod" +- inv.ModFile = filepath.Join(tempDir, "go.mod") +- invoke := func(args ...string) (*bytes.Buffer, error) { +- inv.Verb = args[0] +- inv.Args = args[1:] +- return s.view.gocmdRunner.Run(ctx, *inv) +- } +- if err := run(invoke); err != nil { +- return nil, nil, err +- } +- var modBytes, sumBytes []byte +- modBytes, err = os.ReadFile(filepath.Join(tempDir, "go.mod")) +- if err != nil && !os.IsNotExist(err) { +- return nil, nil, err +- } +- sumBytes, err = os.ReadFile(filepath.Join(tempDir, "go.sum")) +- if err != nil && !os.IsNotExist(err) { +- return nil, nil, err +- } +- return modBytes, sumBytes, nil +-} +- +-// TempModDir creates a temporary directory with the contents of the provided +-// modURI, as well as its corresponding go.sum file, if it exists. On success, +-// it is the caller's responsibility to call the cleanup function to remove the +-// directory when it is no longer needed. +-func TempModDir(ctx context.Context, fs file.Source, modURI protocol.DocumentURI) (dir string, _ func(), rerr error) { +- dir, err := os.MkdirTemp("", "gopls-tempmod") +- if err != nil { +- return "", nil, err +- } +- cleanup := func() { +- if err := os.RemoveAll(dir); err != nil { +- event.Error(ctx, "cleaning temp dir", err) +- } +- } +- defer func() { +- if rerr != nil { +- cleanup() +- } +- }() +- +- // If go.mod exists, write it. +- modFH, err := fs.ReadFile(ctx, modURI) +- if err != nil { +- return "", nil, err // context cancelled +- } +- if data, err := modFH.Content(); err == nil { +- if err := os.WriteFile(filepath.Join(dir, "go.mod"), data, 0666); err != nil { +- return "", nil, err +- } +- } +- +- // If go.sum exists, write it. +- sumURI := protocol.DocumentURI(strings.TrimSuffix(string(modURI), ".mod") + ".sum") +- sumFH, err := fs.ReadFile(ctx, sumURI) +- if err != nil { +- return "", nil, err // context cancelled +- } +- if data, err := sumFH.Content(); err == nil { +- if err := os.WriteFile(filepath.Join(dir, "go.sum"), data, 0666); err != nil { +- return "", nil, err +- } +- } +- +- return dir, cleanup, nil +-} +- +-// AllowNetwork determines whether Go commands are permitted to use the +-// network. (Controlled via GOPROXY=off.) +-type AllowNetwork bool +- +-const ( +- NoNetwork AllowNetwork = false +- NetworkOK AllowNetwork = true +-) +- +-// GoCommandInvocation populates inv with configuration for running go commands +-// on the snapshot. +-// +-// On success, the caller must call the cleanup function exactly once +-// when the invocation is no longer needed. +-// +-// TODO(rfindley): although this function has been simplified significantly, +-// additional refactoring is still required: the responsibility for Env and +-// BuildFlags should be more clearly expressed in the API. +-// +-// If allowNetwork is NoNetwork, set GOPROXY=off. +-func (s *Snapshot) GoCommandInvocation(allowNetwork AllowNetwork, dir, verb string, args []string, env ...string) (_ *gocommand.Invocation, cleanup func(), _ error) { +- inv := &gocommand.Invocation{ +- Verb: verb, +- Args: args, +- WorkingDir: dir, +- Env: append(s.view.Env(), env...), +- BuildFlags: slices.Clone(s.Options().BuildFlags), +- } +- if !allowNetwork { +- inv.Env = append(inv.Env, "GOPROXY=off") +- } +- +- // Write overlay files for unsaved editor buffers. +- overlay, cleanup, err := gocommand.WriteOverlays(s.buildOverlays()) +- if err != nil { +- return nil, nil, err +- } +- inv.Overlay = overlay +- return inv, cleanup, nil +-} +- +-// buildOverlays returns a new mapping from logical file name to +-// effective content, for each unsaved editor buffer, in the same form +-// as [packages.Cfg]'s Overlay field. +-func (s *Snapshot) buildOverlays() map[string][]byte { +- overlays := make(map[string][]byte) +- for _, overlay := range s.Overlays() { +- if overlay.saved { +- continue +- } +- // TODO(rfindley): previously, there was a todo here to make sure we don't +- // send overlays outside of the current view. IMO we should instead make +- // sure this doesn't matter. +- overlays[overlay.URI().Path()] = overlay.content +- } +- return overlays +-} +- +-// Overlays returns the set of overlays at this snapshot. +-// +-// Note that this may differ from the set of overlays on the server, if the +-// snapshot observed a historical state. +-func (s *Snapshot) Overlays() []*overlay { +- s.mu.Lock() +- defer s.mu.Unlock() +- +- return s.files.getOverlays() +-} +- +-// Package data kinds, identifying various package data that may be stored in +-// the file cache. +-const ( +- xrefsKind = "xrefs" +- methodSetsKind = "methodsets" +- testsKind = "tests" +- exportDataKind = "export" +- diagnosticsKind = "diagnostics" +- typerefsKind = "typerefs" +- symbolsKind = "symbols" +-) +- +-// PackageDiagnostics returns diagnostics for files contained in specified +-// packages. +-// +-// If these diagnostics cannot be loaded from cache, the requested packages +-// may be type-checked. +-func (s *Snapshot) PackageDiagnostics(ctx context.Context, ids ...PackageID) (map[protocol.DocumentURI][]*Diagnostic, error) { +- ctx, done := event.Start(ctx, "cache.snapshot.PackageDiagnostics") +- defer done() +- +- var mu sync.Mutex +- perFile := make(map[protocol.DocumentURI][]*Diagnostic) +- collect := func(diags []*Diagnostic) { +- mu.Lock() +- defer mu.Unlock() +- for _, diag := range diags { +- perFile[diag.URI] = append(perFile[diag.URI], diag) +- } +- } +- pre := func(_ int, ph *packageHandle) bool { +- data, err := filecache.Get(diagnosticsKind, ph.key) +- if err == nil { // hit +- collect(ph.loadDiagnostics) +- collect(decodeDiagnostics(data)) +- return false +- } else if err != filecache.ErrNotFound { +- event.Error(ctx, "reading diagnostics from filecache", err) +- } +- return true +- } +- post := func(_ int, pkg *Package) { +- collect(pkg.loadDiagnostics) +- collect(pkg.pkg.diagnostics) +- } +- return perFile, s.forEachPackage(ctx, ids, pre, post) +-} +- +-// References returns cross-reference indexes for the specified packages. +-// +-// If these indexes cannot be loaded from cache, the requested packages may +-// be type-checked. +-func (s *Snapshot) References(ctx context.Context, ids ...PackageID) ([]xrefIndex, error) { +- ctx, done := event.Start(ctx, "cache.snapshot.References") +- defer done() +- +- indexes := make([]xrefIndex, len(ids)) +- pre := func(i int, ph *packageHandle) bool { +- data, err := filecache.Get(xrefsKind, ph.key) +- if err == nil { // hit +- indexes[i] = xrefIndex{mp: ph.mp, data: data} +- return false +- } else if err != filecache.ErrNotFound { +- event.Error(ctx, "reading xrefs from filecache", err) +- } +- return true +- } +- post := func(i int, pkg *Package) { +- indexes[i] = xrefIndex{mp: pkg.metadata, data: pkg.pkg.xrefs()} +- } +- return indexes, s.forEachPackage(ctx, ids, pre, post) +-} +- +-// An xrefIndex is a helper for looking up references in a given package. +-type xrefIndex struct { +- mp *metadata.Package +- data []byte +-} +- +-func (index xrefIndex) Lookup(targets map[PackagePath]map[objectpath.Path]struct{}) []protocol.Location { +- return xrefs.Lookup(index.mp, index.data, targets) +-} +- +-// MethodSets returns method-set indexes for the specified packages. +-// +-// If these indexes cannot be loaded from cache, the requested packages may +-// be type-checked. +-func (s *Snapshot) MethodSets(ctx context.Context, ids ...PackageID) ([]*methodsets.Index, error) { +- ctx, done := event.Start(ctx, "cache.snapshot.MethodSets") +- defer done() +- +- indexes := make([]*methodsets.Index, len(ids)) +- pre := func(i int, ph *packageHandle) bool { +- data, err := filecache.Get(methodSetsKind, ph.key) +- if err == nil { // hit +- indexes[i] = methodsets.Decode(ph.mp.PkgPath, data) +- return false +- } else if err != filecache.ErrNotFound { +- event.Error(ctx, "reading methodsets from filecache", err) +- } +- return true +- } +- post := func(i int, pkg *Package) { +- indexes[i] = pkg.pkg.methodsets() +- } +- return indexes, s.forEachPackage(ctx, ids, pre, post) +-} +- +-// Tests returns test-set indexes for the specified packages. There is a +-// one-to-one correspondence between ID and Index. +-// +-// If these indexes cannot be loaded from cache, the requested packages may be +-// type-checked. +-func (s *Snapshot) Tests(ctx context.Context, ids ...PackageID) ([]*testfuncs.Index, error) { +- ctx, done := event.Start(ctx, "cache.snapshot.Tests") +- defer done() +- +- indexes := make([]*testfuncs.Index, len(ids)) +- pre := func(i int, ph *packageHandle) bool { +- data, err := filecache.Get(testsKind, ph.key) +- if err == nil { // hit +- indexes[i] = testfuncs.Decode(data) +- return false +- } else if err != filecache.ErrNotFound { +- event.Error(ctx, "reading tests from filecache", err) +- } +- return true +- } +- post := func(i int, pkg *Package) { +- indexes[i] = pkg.pkg.tests() +- } +- return indexes, s.forEachPackage(ctx, ids, pre, post) +-} +- +-// NarrowestMetadataForFile returns metadata for the narrowest package +-// (the one with the fewest files) that encloses the specified file. +-// The result may be a test variant, but never an intermediate test variant. +-func (snapshot *Snapshot) NarrowestMetadataForFile(ctx context.Context, uri protocol.DocumentURI) (*metadata.Package, error) { +- mps, err := snapshot.MetadataForFile(ctx, uri, true) +- if err != nil { +- return nil, err +- } +- if len(mps) == 0 { +- return nil, fmt.Errorf("no package metadata for file %s", uri) +- } +- return mps[0], nil +-} +- +-// MetadataForFile returns a new slice containing metadata for each +-// package containing the Go file identified by uri, ordered by the +-// number of CompiledGoFiles (i.e. "narrowest" to "widest" package), +-// and secondarily by IsIntermediateTestVariant (false < true). +-// The result may include tests and intermediate test variants of +-// importable packages. If removeIntermediateTestVariants is provided, +-// intermediate test variants will be excluded. +-// It returns an error if the context was cancelled. +-func (s *Snapshot) MetadataForFile(ctx context.Context, uri protocol.DocumentURI, removeIntermediateTestVariants bool) ([]*metadata.Package, error) { +- if s.view.typ == AdHocView { +- // As described in golang/go#57209, in ad-hoc workspaces (where we load ./ +- // rather than ./...), preempting the directory load with file loads can +- // lead to an inconsistent outcome, where certain files are loaded with +- // command-line-arguments packages and others are loaded only in the ad-hoc +- // package. Therefore, ensure that the workspace is loaded before doing any +- // file loads. +- if err := s.awaitLoaded(ctx); err != nil { +- return nil, err +- } +- } +- +- s.mu.Lock() +- +- // Start with the set of package associations derived from the last load. +- pkgs := s.meta.ForFile[uri] +- +- shouldLoad := false // whether any packages containing uri are marked 'shouldLoad' +- for _, pkg := range pkgs { +- if p, _ := s.shouldLoad.Get(pkg.ID); len(p) > 0 { +- shouldLoad = true +- } +- } +- +- // Check if uri is known to be unloadable. +- unloadable := s.unloadableFiles.Contains(uri) +- +- s.mu.Unlock() +- +- // Reload if loading is likely to improve the package associations for uri: +- // - uri is not contained in any valid packages +- // - ...or one of the packages containing uri is marked 'shouldLoad' +- // - ...but uri is not unloadable +- if (shouldLoad || len(pkgs) == 0) && !unloadable { +- scope := fileLoadScope(uri) +- err := s.load(ctx, NoNetwork, scope) +- +- // Return the context error here as the current operation is no longer +- // valid. +- if err != nil { +- // Guard against failed loads due to context cancellation. We don't want +- // to mark loads as completed if they failed due to context cancellation. +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- +- // Don't return an error here, as we may still return stale IDs. +- // Furthermore, the result of MetadataForFile should be consistent upon +- // subsequent calls, even if the file is marked as unloadable. +- if !errors.Is(err, errNoPackages) { +- event.Error(ctx, "MetadataForFile", err) +- } +- } +- +- // We must clear scopes after loading. +- // +- // TODO(rfindley): unlike reloadWorkspace, this is simply marking loaded +- // packages as loaded. We could do this from snapshot.load and avoid +- // raciness. +- s.clearShouldLoad(scope) +- } +- +- // Retrieve the metadata. +- s.mu.Lock() +- defer s.mu.Unlock() +- // TODO(rfindley): is there any reason not to make the sorting below the +- // canonical sorting, so that we don't need to mutate this slice? +- metas := slices.Clone(s.meta.ForFile[uri]) +- +- // Metadata is only ever added by loading, +- // so if we get here and still have +- // no packages, uri is unloadable. +- if !unloadable && len(metas) == 0 { +- s.unloadableFiles.Add(uri) +- } +- +- if removeIntermediateTestVariants { +- metadata.RemoveIntermediateTestVariants(&metas) +- } +- +- // Sort packages "narrowest" to "widest" (in practice: +- // non-tests before tests), and regular packages before +- // their intermediate test variants (which have the same +- // files but different imports). +- slices.SortFunc(metas, func(x, y *metadata.Package) int { +- if sign := cmp.Compare(len(x.CompiledGoFiles), len(y.CompiledGoFiles)); sign != 0 { +- return sign +- } +- // Skip ITV-specific ordering if they were removed. +- if removeIntermediateTestVariants { +- return 0 +- } +- return boolCompare(x.IsIntermediateTestVariant(), y.IsIntermediateTestVariant()) +- }) +- +- return metas, nil +-} +- +-// btoi returns int(b) as proposed in #64825. +-func btoi(b bool) int { +- if b { +- return 1 +- } else { +- return 0 +- } +-} +- +-// boolCompare is a comparison function for booleans, returning -1 if x < y, 0 +-// if x == y, and 1 if x > y, where false < true. +-func boolCompare(x, y bool) int { +- return btoi(x) - btoi(y) +-} +- +-// ReverseDependencies returns a new mapping whose entries are +-// the ID and Metadata of each package in the workspace that +-// directly or transitively depend on the package denoted by id, +-// excluding id itself. +-func (s *Snapshot) ReverseDependencies(ctx context.Context, id PackageID, transitive bool) (map[PackageID]*metadata.Package, error) { +- if err := s.awaitLoaded(ctx); err != nil { +- return nil, err +- } +- +- meta := s.MetadataGraph() +- var rdeps map[PackageID]*metadata.Package +- if transitive { +- rdeps = meta.ReverseReflexiveTransitiveClosure(id) +- +- // Remove the original package ID from the map. +- // (Callers all want irreflexivity but it's easier +- // to compute reflexively then subtract.) +- delete(rdeps, id) +- +- } else { +- // direct reverse dependencies +- rdeps = make(map[PackageID]*metadata.Package) +- for _, rdep := range meta.ImportedBy[id] { +- rdeps[rdep.ID] = rdep +- } +- } +- +- return rdeps, nil +-} +- +-// See Session.FileWatchingGlobPatterns for a description of gopls' file +-// watching heuristic. +-func (s *Snapshot) fileWatchingGlobPatterns() map[protocol.RelativePattern]unit { +- // Always watch files that may change the view definition. +- patterns := make(map[protocol.RelativePattern]unit) +- +- // If GOWORK is outside the folder, ensure we are watching it. +- if s.view.gowork != "" && !s.view.folder.Dir.Encloses(s.view.gowork) { +- workPattern := protocol.RelativePattern{ +- BaseURI: s.view.gowork.Dir(), +- Pattern: path.Base(string(s.view.gowork)), +- } +- patterns[workPattern] = unit{} +- } +- +- for _, glob := range s.Options().WorkspaceFiles { +- patterns[protocol.RelativePattern{Pattern: glob}] = unit{} +- } +- +- var extensions strings.Builder +- extensions.WriteString("go,mod,sum,work") +- for _, ext := range s.Options().TemplateExtensions { +- extensions.WriteString(",") +- extensions.WriteString(ext) +- } +- watchGoFiles := fmt.Sprintf("**/*.{%s}", extensions.String()) +- +- var dirs []string +- if s.view.typ.usesModules() { +- if s.view.typ == GoWorkView { +- workVendorDir := filepath.Join(s.view.gowork.DirPath(), "vendor") +- workVendorURI := protocol.URIFromPath(workVendorDir) +- patterns[protocol.RelativePattern{BaseURI: workVendorURI, Pattern: watchGoFiles}] = unit{} +- } +- +- // In module mode, watch directories containing active modules, and collect +- // these dirs for later filtering the set of known directories. +- // +- // The assumption is that the user is not actively editing non-workspace +- // modules, so don't pay the price of file watching. +- for modFile := range s.view.workspaceModFiles { +- dirs = append(dirs, modFile.DirPath()) +- +- // TODO(golang/go#64724): thoroughly test these patterns, particularly on +- // on Windows. +- // +- // Note that glob patterns should use '/' on Windows: +- // https://code.visualstudio.com/docs/editor/glob-patterns +- patterns[protocol.RelativePattern{BaseURI: modFile.Dir(), Pattern: watchGoFiles}] = unit{} +- } +- } else { +- // In non-module modes (GOPATH or AdHoc), we just watch the workspace root. +- dirs = []string{s.view.root.Path()} +- patterns[protocol.RelativePattern{Pattern: watchGoFiles}] = unit{} +- } +- +- if s.watchSubdirs() { +- // Some clients (e.g. VS Code) do not send notifications for changes to +- // directories that contain Go code (golang/go#42348). To handle this, +- // explicitly watch all of the directories in the workspace. We find them +- // by adding the directories of every file in the snapshot's workspace +- // directories. There may be thousands of patterns, each a single +- // directory. +- // +- // We compute this set by looking at files that we've previously observed. +- // This may miss changed to directories that we haven't observed, but that +- // shouldn't matter as there is nothing to invalidate (if a directory falls +- // in forest, etc). +- // +- // (A previous iteration created a single glob pattern holding a union of +- // all the directories, but this was found to cause VS Code to get stuck +- // for several minutes after a buffer was saved twice in a workspace that +- // had >8000 watched directories.) +- // +- // Some clients (notably coc.nvim, which uses watchman for globs) perform +- // poorly with a large list of individual directories. +- s.addKnownSubdirs(patterns, dirs) +- } +- +- return patterns +-} +- +-func (s *Snapshot) addKnownSubdirs(patterns map[protocol.RelativePattern]unit, wsDirs []string) { +- s.mu.Lock() +- defer s.mu.Unlock() +- +- for dir := range s.files.getDirs().All() { +- for _, wsDir := range wsDirs { +- if pathutil.InDir(wsDir, dir) { +- patterns[protocol.RelativePattern{Pattern: filepath.ToSlash(dir)}] = unit{} +- } +- } +- } +-} +- +-// watchSubdirs reports whether gopls should request separate file watchers for +-// each relevant subdirectory. This is necessary only for clients (namely VS +-// Code) that do not send notifications for individual files in a directory +-// when the entire directory is deleted. +-func (s *Snapshot) watchSubdirs() bool { +- switch p := s.Options().SubdirWatchPatterns; p { +- case settings.SubdirWatchPatternsOn: +- return true +- case settings.SubdirWatchPatternsOff: +- return false +- case settings.SubdirWatchPatternsAuto: +- // See the documentation of InternalOptions.SubdirWatchPatterns for an +- // explanation of why VS Code gets a different default value here. +- // +- // Unfortunately, there is no authoritative list of client names, nor any +- // requirements that client names do not change. We should update the VS +- // Code extension to set a default value of "subdirWatchPatterns" to "on", +- // so that this workaround is only temporary. +- switch s.Options().ClientInfo.Name { +- case "Visual Studio Code", "Visual Studio Code - Insiders": +- return true +- default: +- return false +- } +- default: +- bug.Reportf("invalid subdirWatchPatterns: %q", p) +- return false +- } +-} +- +-// filesInDir returns all files observed by the snapshot that are contained in +-// a directory with the provided URI. +-func (s *Snapshot) filesInDir(uri protocol.DocumentURI) []protocol.DocumentURI { +- s.mu.Lock() +- defer s.mu.Unlock() +- +- dir := uri.Path() +- if !s.files.getDirs().Contains(dir) { +- return nil +- } +- var files []protocol.DocumentURI +- for uri := range s.files.all() { +- if pathutil.InDir(dir, uri.Path()) { +- files = append(files, uri) +- } +- } +- return files +-} +- +-// WorkspaceMetadata returns a new, unordered slice containing +-// metadata for all ordinary and test packages (but not +-// intermediate test variants) in the workspace. +-// +-// The workspace is the set of modules typically defined by a +-// go.work file. It is not transitively closed: for example, +-// the standard library is not usually part of the workspace +-// even though every module in the workspace depends on it. +-// +-// Operations that must inspect all the dependencies of the +-// workspace packages should instead use AllMetadata. +-func (s *Snapshot) WorkspaceMetadata(ctx context.Context) ([]*metadata.Package, error) { +- if err := s.awaitLoaded(ctx); err != nil { +- return nil, err +- } +- +- s.mu.Lock() +- defer s.mu.Unlock() +- +- meta := make([]*metadata.Package, 0, s.workspacePackages.Len()) +- for id, _ := range s.workspacePackages.All() { +- meta = append(meta, s.meta.Packages[id]) +- } +- return meta, nil +-} +- +-// WorkspacePackages returns the map of workspace package to package path. +-// +-// The set of workspace packages is updated after every load. A package is a +-// workspace package if and only if it is present in this map. +-func (s *Snapshot) WorkspacePackages() immutable.Map[PackageID, PackagePath] { +- s.mu.Lock() +- defer s.mu.Unlock() +- return s.workspacePackages +-} +- +-// IsWorkspacePackage reports whether the given package ID refers to a +-// workspace package for the Snapshot. It is equivalent to looking up the +-// package in [Snapshot.WorkspacePackages]. +-func (s *Snapshot) IsWorkspacePackage(id PackageID) bool { +- s.mu.Lock() +- defer s.mu.Unlock() +- _, ok := s.workspacePackages.Value(id) +- return ok +-} +- +-// AllMetadata returns a new unordered array of metadata for +-// all packages known to this snapshot, which includes the +-// packages of all workspace modules plus their transitive +-// import dependencies. +-// +-// It may also contain ad-hoc packages for standalone files. +-// It includes all test variants. +-// +-// TODO(rfindley): Replace usage of function this with s.LoadMetadataGraph(). +-func (s *Snapshot) AllMetadata(ctx context.Context) ([]*metadata.Package, error) { +- g, err := s.LoadMetadataGraph(ctx) +- if err != nil { +- return nil, err +- } +- return moremaps.ValueSlice(g.Packages), nil +-} +- +-// GoModForFile returns the URI of the go.mod file for the given URI. +-// +-// TODO(rfindley): clarify that this is only active modules. Or update to just +-// use findRootPattern. +-func (s *Snapshot) GoModForFile(uri protocol.DocumentURI) protocol.DocumentURI { +- return moduleForURI(s.view.workspaceModFiles, uri) +-} +- +-func moduleForURI(modFiles map[protocol.DocumentURI]struct{}, uri protocol.DocumentURI) protocol.DocumentURI { +- var match protocol.DocumentURI +- for modURI := range modFiles { +- if !modURI.Dir().Encloses(uri) { +- continue +- } +- if len(modURI) > len(match) { +- match = modURI +- } +- } +- return match +-} +- +-// Metadata returns the metadata for the specified package, +-// or nil if it was not found. +-func (s *Snapshot) Metadata(id PackageID) *metadata.Package { +- s.mu.Lock() +- defer s.mu.Unlock() +- return s.meta.Packages[id] +-} +- +-// clearShouldLoad clears package IDs that no longer need to be reloaded after +-// scopes has been loaded. +-func (s *Snapshot) clearShouldLoad(scopes ...loadScope) { +- s.mu.Lock() +- defer s.mu.Unlock() +- +- for _, scope := range scopes { +- switch scope := scope.(type) { +- case packageLoadScope: +- scopePath := PackagePath(scope) +- var toDelete []PackageID +- for id, pkgPaths := range s.shouldLoad.All() { +- if slices.Contains(pkgPaths, scopePath) { +- toDelete = append(toDelete, id) +- } +- } +- for _, id := range toDelete { +- s.shouldLoad.Delete(id) +- } +- case fileLoadScope: +- uri := protocol.DocumentURI(scope) +- for _, pkg := range s.meta.ForFile[uri] { +- s.shouldLoad.Delete(pkg.ID) +- } +- } +- } +-} +- +-// FindFile returns the FileHandle for the given URI, if it is already +-// in the given snapshot. +-// TODO(adonovan): delete this operation; use ReadFile instead. +-func (s *Snapshot) FindFile(uri protocol.DocumentURI) file.Handle { +- s.mu.Lock() +- defer s.mu.Unlock() +- +- result, _ := s.files.get(uri) +- return result +-} +- +-// ReadFile returns a File for the given URI. If the file is unknown it is added +-// to the managed set. +-// +-// ReadFile succeeds even if the file does not exist. A non-nil error return +-// indicates some type of internal error, for example if ctx is cancelled. +-func (s *Snapshot) ReadFile(ctx context.Context, uri protocol.DocumentURI) (file.Handle, error) { +- s.mu.Lock() +- defer s.mu.Unlock() +- +- return lockedSnapshot{s}.ReadFile(ctx, uri) +-} +- +-// lockedSnapshot implements the file.Source interface, while holding s.mu. +-// +-// TODO(rfindley): This unfortunate type had been eliminated, but it had to be +-// restored to fix golang/go#65801. We should endeavor to remove it again. +-type lockedSnapshot struct { +- s *Snapshot +-} +- +-func (s lockedSnapshot) ReadFile(ctx context.Context, uri protocol.DocumentURI) (file.Handle, error) { +- fh, ok := s.s.files.get(uri) +- if !ok { +- var err error +- fh, err = s.s.view.fs.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- s.s.files.set(uri, fh) +- } +- return fh, nil +-} +- +-// preloadFiles delegates to the view FileSource to read the requested uris in +-// parallel, without holding the snapshot lock. +-func (s *Snapshot) preloadFiles(ctx context.Context, uris []protocol.DocumentURI) { +- files := make([]file.Handle, len(uris)) +- var wg sync.WaitGroup +- iolimit := make(chan struct{}, 20) // I/O concurrency limiting semaphore +- for i, uri := range uris { +- wg.Add(1) +- iolimit <- struct{}{} +- go func(i int, uri protocol.DocumentURI) { +- defer wg.Done() +- fh, err := s.view.fs.ReadFile(ctx, uri) +- <-iolimit +- if err != nil && ctx.Err() == nil { +- event.Error(ctx, fmt.Sprintf("reading %s", uri), err) +- return +- } +- files[i] = fh +- }(i, uri) +- } +- wg.Wait() +- +- s.mu.Lock() +- defer s.mu.Unlock() +- +- for i, fh := range files { +- if fh == nil { +- continue // error logged above +- } +- uri := uris[i] +- if _, ok := s.files.get(uri); !ok { +- s.files.set(uri, fh) +- } +- } +-} +- +-// IsOpen returns whether the editor currently has a file open. +-func (s *Snapshot) IsOpen(uri protocol.DocumentURI) bool { +- s.mu.Lock() +- defer s.mu.Unlock() +- +- fh, _ := s.files.get(uri) +- _, open := fh.(*overlay) +- return open +-} +- +-// MetadataGraph returns the current metadata graph for the Snapshot. +-func (s *Snapshot) MetadataGraph() *metadata.Graph { +- s.mu.Lock() +- defer s.mu.Unlock() +- return s.meta +-} +- +-// LoadMetadataGraph is like [Snapshot.MetadataGraph], but awaits snapshot reloading. +-func (s *Snapshot) LoadMetadataGraph(ctx context.Context) (*metadata.Graph, error) { +- if err := s.awaitLoaded(ctx); err != nil { +- return nil, err +- } +- return s.MetadataGraph(), nil +-} +- +-// InitializationError returns the last error from initialization. +-func (s *Snapshot) InitializationError() *InitializationError { +- s.mu.Lock() +- defer s.mu.Unlock() +- return s.initialErr +-} +- +-// awaitLoaded awaits initialization and package reloading, and returns +-// ctx.Err(). +-func (s *Snapshot) awaitLoaded(ctx context.Context) error { +- // Do not return results until the snapshot's view has been initialized. +- s.AwaitInitialized(ctx) +- s.reloadWorkspace(ctx) +- return ctx.Err() +-} +- +-// AwaitInitialized waits until the snapshot's view is initialized. +-func (s *Snapshot) AwaitInitialized(ctx context.Context) { +- select { +- case <-ctx.Done(): +- return +- case <-s.view.initialWorkspaceLoad: +- } +- // We typically prefer to run something as intensive as the IWL without +- // blocking. I'm not sure if there is a way to do that here. +- s.initialize(ctx, false) +-} +- +-// reloadWorkspace reloads the metadata for all invalidated workspace packages. +-func (s *Snapshot) reloadWorkspace(ctx context.Context) { +- if ctx.Err() != nil { +- return +- } +- +- var scopes []loadScope +- var seen map[PackagePath]bool +- s.mu.Lock() +- for _, pkgPaths := range s.shouldLoad.All() { +- for _, pkgPath := range pkgPaths { +- if seen == nil { +- seen = make(map[PackagePath]bool) +- } +- if seen[pkgPath] { +- continue +- } +- seen[pkgPath] = true +- scopes = append(scopes, packageLoadScope(pkgPath)) +- } +- } +- s.mu.Unlock() +- +- if len(scopes) == 0 { +- return +- } +- +- // For an ad-hoc view, we cannot reload by package path. Just reload the view. +- if s.view.typ == AdHocView { +- scopes = []loadScope{viewLoadScope{}} +- } +- +- err := s.load(ctx, NoNetwork, scopes...) +- +- // Unless the context was canceled, set "shouldLoad" to false for all +- // of the metadata we attempted to load. +- if !errors.Is(err, context.Canceled) { +- s.clearShouldLoad(scopes...) +- if err != nil { +- event.Error(ctx, "reloading workspace", err, s.Labels()...) +- } +- } +-} +- +-func (s *Snapshot) orphanedFileDiagnostics(ctx context.Context, overlays []*overlay) ([]*Diagnostic, error) { +- if err := s.awaitLoaded(ctx); err != nil { +- return nil, err +- } +- +- var diagnostics []*Diagnostic +- var orphaned []*overlay +-searchOverlays: +- for _, o := range overlays { +- uri := o.URI() +- if s.IsBuiltin(uri) || s.FileKind(o) != file.Go { +- continue +- } +- mps, err := s.MetadataForFile(ctx, uri, true) +- if err != nil { +- return nil, err +- } +- for _, mp := range mps { +- if !metadata.IsCommandLineArguments(mp.ID) || mp.Standalone { +- continue searchOverlays +- } +- } +- +- // With zero-config gopls (golang/go#57979), orphaned file diagnostics +- // include diagnostics for orphaned files -- not just diagnostics relating +- // to the reason the files are opened. +- // +- // This is because orphaned files are never considered part of a workspace +- // package: if they are loaded by a view, that view is arbitrary, and they +- // may be loaded by multiple views. If they were to be diagnosed by +- // multiple views, their diagnostics may become inconsistent. +- if len(mps) > 0 { +- diags, err := s.PackageDiagnostics(ctx, mps[0].ID) +- if err != nil { +- return nil, err +- } +- diagnostics = append(diagnostics, diags[uri]...) +- } +- orphaned = append(orphaned, o) +- } +- +- if len(orphaned) == 0 { +- return nil, nil +- } +- +- loadedModFiles := make(map[protocol.DocumentURI]struct{}) // all mod files, including dependencies +- ignoredFiles := make(map[protocol.DocumentURI]bool) // files reported in packages.Package.IgnoredFiles +- +- g := s.MetadataGraph() +- for _, meta := range g.Packages { +- if meta.Module != nil && meta.Module.GoMod != "" { +- gomod := protocol.URIFromPath(meta.Module.GoMod) +- loadedModFiles[gomod] = struct{}{} +- } +- for _, ignored := range meta.IgnoredFiles { +- ignoredFiles[ignored] = true +- } +- } +- +- initialErr := s.InitializationError() +- +- for _, fh := range orphaned { +- pgf, rng, ok := orphanedFileDiagnosticRange(ctx, s.view.parseCache, fh) +- if !ok { +- continue // e.g. cancellation or parse error +- } +- +- var ( +- msg string // if non-empty, report a diagnostic with this message +- suggestedFixes []SuggestedFix // associated fixes, if any +- ) +- if initialErr != nil { +- msg = fmt.Sprintf("initialization failed: %v", initialErr.MainError) +- } else if goMod, err := findRootPattern(ctx, fh.URI().Dir(), "go.mod", file.Source(s)); err == nil && goMod != "" { +- // Check if the file's module should be loadable by considering both +- // loaded modules and workspace modules. The former covers cases where +- // the file is outside of a workspace folder. The latter covers cases +- // where the file is inside a workspace module, but perhaps no packages +- // were loaded for that module. +- _, loadedMod := loadedModFiles[goMod] +- _, workspaceMod := s.view.workspaceModFiles[goMod] +- // If we have a relevant go.mod file, check whether the file is orphaned +- // due to its go.mod file being inactive. We could also offer a +- // prescriptive diagnostic in the case that there is no go.mod file, but +- // it is harder to be precise in that case, and less important. +- if !(loadedMod || workspaceMod) { +- modDir := goMod.DirPath() +- viewDir := s.view.folder.Dir.Path() +- +- // When the module is underneath the view dir, we offer +- // "use all modules" quick-fixes. +- inDir := pathutil.InDir(viewDir, modDir) +- +- if rel, err := filepath.Rel(viewDir, modDir); err == nil { +- modDir = rel +- } +- +- var fix string +- if s.view.folder.Env.GoVersion >= 18 { +- if s.view.gowork != "" { +- fix = fmt.Sprintf("To fix this problem, you can add this module to your go.work file (%s)", s.view.gowork) +- +- cmd := command.NewRunGoWorkCommandCommand("Run `go work use`", command.RunGoWorkArgs{ +- ViewID: s.view.ID(), +- Args: []string{"use", modDir}, +- }) +- suggestedFixes = append(suggestedFixes, SuggestedFix{ +- Title: "Use this module in your go.work file", +- Command: cmd, +- ActionKind: protocol.QuickFix, +- }) +- +- if inDir { +- cmd := command.NewRunGoWorkCommandCommand("Run `go work use -r`", command.RunGoWorkArgs{ +- ViewID: s.view.ID(), +- Args: []string{"use", "-r", "."}, +- }) +- suggestedFixes = append(suggestedFixes, SuggestedFix{ +- Title: "Use all modules in your workspace", +- Command: cmd, +- ActionKind: protocol.QuickFix, +- }) +- } +- } else { +- fix = "To fix this problem, you can add a go.work file that uses this directory." +- +- cmd := command.NewRunGoWorkCommandCommand("Run `go work init && go work use`", command.RunGoWorkArgs{ +- ViewID: s.view.ID(), +- InitFirst: true, +- Args: []string{"use", modDir}, +- }) +- suggestedFixes = []SuggestedFix{ +- { +- Title: "Add a go.work file using this module", +- Command: cmd, +- ActionKind: protocol.QuickFix, +- }, +- } +- +- if inDir { +- cmd := command.NewRunGoWorkCommandCommand("Run `go work init && go work use -r`", command.RunGoWorkArgs{ +- ViewID: s.view.ID(), +- InitFirst: true, +- Args: []string{"use", "-r", "."}, +- }) +- suggestedFixes = append(suggestedFixes, SuggestedFix{ +- Title: "Add a go.work file using all modules in your workspace", +- Command: cmd, +- ActionKind: protocol.QuickFix, +- }) +- } +- } +- } else { +- fix = `To work with multiple modules simultaneously, please upgrade to Go 1.18 or +-later, reinstall gopls, and use a go.work file.` +- } +- +- msg = fmt.Sprintf(`This file is within module %q, which is not included in your workspace. +-%s +-See the documentation for more information on setting up your workspace: +-https://github.com/golang/tools/blob/master/gopls/doc/workspace.md.`, modDir, fix) +- } +- } +- +- if msg == "" { +- if ignoredFiles[fh.URI()] { +- // TODO(rfindley): use the constraint package to check if the file +- // _actually_ satisfies the current build context. +- hasConstraint := false +- walkConstraints(pgf.File, func(constraint.Expr) bool { +- hasConstraint = true +- return false +- }) +- var fix string +- if hasConstraint { +- fix = `This file may be excluded due to its build tags; try adding "-tags=" to your gopls "buildFlags" configuration +-See the documentation for more information on working with build tags: +-https://github.com/golang/tools/blob/master/gopls/doc/settings.md#buildflags.` +- } else if strings.Contains(fh.URI().Base(), "_") { +- fix = `This file may be excluded due to its GOOS/GOARCH, or other build constraints.` +- } else { +- fix = `This file is ignored by your gopls build.` // we don't know why +- } +- msg = fmt.Sprintf("No packages found for open file %s.\n%s", fh.URI().Path(), fix) +- } else { +- // Fall back: we're not sure why the file is orphaned. +- // TODO(rfindley): we could do better here, diagnosing the lack of a +- // go.mod file and malformed file names (see the perc%ent marker test). +- msg = fmt.Sprintf("No packages found for open file %s.", fh.URI().Path()) +- } +- } +- +- if msg != "" { +- d := &Diagnostic{ +- URI: fh.URI(), +- Range: rng, +- Severity: protocol.SeverityWarning, +- Source: ListError, +- Message: msg, +- SuggestedFixes: suggestedFixes, +- } +- if ok := bundleLazyFixes(d); !ok { +- bug.Reportf("failed to bundle quick fixes for %v", d) +- } +- // Only report diagnostics if we detect an actual exclusion. +- diagnostics = append(diagnostics, d) +- } +- } +- return diagnostics, nil +-} +- +-// orphanedFileDiagnosticRange returns the position to use for orphaned file diagnostics. +-// We only warn about an orphaned file if it is well-formed enough to actually +-// be part of a package. Otherwise, we need more information. +-func orphanedFileDiagnosticRange(ctx context.Context, cache *parseCache, fh file.Handle) (*parsego.File, protocol.Range, bool) { +- pgfs, err := cache.parseFiles(ctx, token.NewFileSet(), parsego.Header, false, fh) +- if err != nil { +- return nil, protocol.Range{}, false +- } +- pgf := pgfs[0] +- name := pgf.File.Name +- if !name.Pos().IsValid() { +- return nil, protocol.Range{}, false +- } +- rng, err := pgf.PosRange(name.Pos(), name.End()) +- if err != nil { +- return nil, protocol.Range{}, false +- } +- return pgf, rng, true +-} +- +-// TODO(golang/go#53756): this function needs to consider more than just the +-// absolute URI, for example: +-// - the position of /vendor/ with respect to the relevant module root +-// - whether or not go.work is in use (as vendoring isn't supported in workspace mode) +-// +-// Most likely, each call site of inVendor needs to be reconsidered to +-// understand and correctly implement the desired behavior. +-func inVendor(uri protocol.DocumentURI) bool { +- _, after, found := strings.Cut(string(uri), "/vendor/") +- // Only subdirectories of /vendor/ are considered vendored +- // (/vendor/a/foo.go is vendored, /vendor/foo.go is not). +- return found && strings.Contains(after, "/") +-} +- +-// clone copies state from the receiver into a new Snapshot, applying the given +-// state changes. +-// +-// The caller of clone must call Snapshot.decref on the returned +-// snapshot when they are finished using it. +-// +-// The resulting bool reports whether the change invalidates any derived +-// diagnostics for the snapshot, for example because it invalidates Packages or +-// parsed go.mod files. This is used to mark a view as needing diagnosis in the +-// server. +-// +-// TODO(rfindley): long term, it may be better to move responsibility for +-// diagnostics into the Snapshot (e.g. a Snapshot.Diagnostics method), at which +-// point the Snapshot could be responsible for tracking and forwarding a +-// 'viewsToDiagnose' field. As is, this field is instead externalized in the +-// server.viewsToDiagnose map. Moving it to the snapshot would entirely +-// eliminate any 'relevance' heuristics from Session.DidModifyFiles, but would +-// also require more strictness about diagnostic dependencies. For example, +-// template.Diagnostics currently re-parses every time: there is no Snapshot +-// data responsible for providing these diagnostics. +-func (s *Snapshot) clone(ctx, bgCtx context.Context, changed StateChange, done func()) (*Snapshot, bool) { +- changedFiles := changed.Files +- ctx, stop := event.Start(ctx, "cache.snapshot.clone") +- defer stop() +- +- s.mu.Lock() +- defer s.mu.Unlock() +- +- // TODO(rfindley): reorganize this function to make the derivation of +- // needsDiagnosis clearer. +- needsDiagnosis := len(changed.CompilerOptDetails) > 0 || len(changed.ModuleUpgrades) > 0 || len(changed.Vulns) > 0 +- +- bgCtx, cancel := context.WithCancel(bgCtx) +- result := &Snapshot{ +- sequenceID: s.sequenceID + 1, +- store: s.store, +- refcount: 1, // Snapshots are born referenced. +- done: done, +- view: s.view, +- backgroundCtx: bgCtx, +- cancel: cancel, +- builtin: s.builtin, +- initialized: s.initialized, +- initialErr: s.initialErr, +- packages: s.packages.Clone(), +- fullAnalysisKeys: s.fullAnalysisKeys.Clone(), +- factyAnalysisKeys: s.factyAnalysisKeys.Clone(), +- files: s.files.clone(changedFiles), +- workspacePackages: s.workspacePackages, +- shouldLoad: s.shouldLoad.Clone(), // not cloneWithout: shouldLoad is cleared on loads +- unloadableFiles: s.unloadableFiles.Clone(), // not cloneWithout: typing in a file doesn't necessarily make it loadable +- parseModHandles: cloneWithout(s.parseModHandles, changedFiles, &needsDiagnosis), +- parseWorkHandles: cloneWithout(s.parseWorkHandles, changedFiles, &needsDiagnosis), +- modTidyHandles: cloneWithout(s.modTidyHandles, changedFiles, &needsDiagnosis), +- modWhyHandles: cloneWithout(s.modWhyHandles, changedFiles, &needsDiagnosis), +- modVulnHandles: cloneWithout(s.modVulnHandles, changedFiles, &needsDiagnosis), +- moduleUpgrades: cloneWith(s.moduleUpgrades, changed.ModuleUpgrades), +- vulns: cloneWith(s.vulns, changed.Vulns), +- } +- +- // Compute the new set of packages for which we want compiler +- // optimization details, after applying changed.CompilerOptDetails. +- if len(s.compilerOptDetails) > 0 || len(changed.CompilerOptDetails) > 0 { +- newCompilerOptDetails := make(map[protocol.DocumentURI]unit) +- for dir := range s.compilerOptDetails { +- if _, ok := changed.CompilerOptDetails[dir]; !ok { +- newCompilerOptDetails[dir] = unit{} // no change +- } +- } +- for dir, want := range changed.CompilerOptDetails { +- if want { +- newCompilerOptDetails[dir] = unit{} +- } +- } +- if len(newCompilerOptDetails) > 0 { +- result.compilerOptDetails = newCompilerOptDetails +- } +- } +- +- reinit := false +- for _, mod := range changed.Modifications { +- // Changes to vendor tree may require reinitialization, +- // either because of an initialization error +- // (e.g. "inconsistent vendoring detected"), or because +- // one or more modules may have moved into or out of the +- // vendor tree after 'go mod vendor' or 'rm -fr vendor/'. +- // +- // In this case, we consider the actual modification to see if was a creation +- // or deletion. +- // +- // TODO(rfindley): revisit the location of this check. +- if inVendor(mod.URI) && (mod.Action == file.Create || mod.Action == file.Delete) || +- strings.HasSuffix(string(mod.URI), "/vendor/modules.txt") { +- +- reinit = true +- break +- } +- +- // Changes to workspace files, as a rule of thumb, should require reinitialization. Since their behavior +- // is generally user-defined, we want to do something sensible by re-triggering a query to the active GOPACKAGESDRIVER, +- // and reloading the state of the workspace. +- if isWorkspaceFile(mod.URI, s.view.folder.Options.WorkspaceFiles) && (mod.Action == file.Save || mod.OnDisk) { +- reinit = true +- break +- } +- } +- +- // Collect observed file handles for changed URIs from the old snapshot, if +- // they exist. Importantly, we don't call ReadFile here: consider the case +- // where a file is added on disk; we don't want to read the newly added file +- // into the old snapshot, as that will break our change detection below. +- // +- // TODO(rfindley): it may be more accurate to rely on the modification type +- // here, similarly to what we do for vendored files above. If we happened not +- // to have read a file in the previous snapshot, that's not the same as it +- // actually being created. +- oldFiles := make(map[protocol.DocumentURI]file.Handle) +- for uri := range changedFiles { +- if fh, ok := s.files.get(uri); ok { +- oldFiles[uri] = fh +- } +- } +- // changedOnDisk determines if the new file handle may have changed on disk. +- // It over-approximates, returning true if the new file is saved and either +- // the old file wasn't saved, or the on-disk contents changed. +- // +- // oldFH may be nil. +- changedOnDisk := func(oldFH, newFH file.Handle) bool { +- if !newFH.SameContentsOnDisk() { +- return false +- } +- if oe, ne := (oldFH != nil && fileExists(oldFH)), fileExists(newFH); !oe || !ne { +- return oe != ne +- } +- return !oldFH.SameContentsOnDisk() || oldFH.Identity() != newFH.Identity() +- } +- +- // Reinitialize if any workspace mod file has changed on disk. +- for uri, newFH := range changedFiles { +- if _, ok := result.view.workspaceModFiles[uri]; ok && changedOnDisk(oldFiles[uri], newFH) { +- reinit = true +- } +- } +- +- // Finally, process sumfile changes that may affect loading. +- for uri, newFH := range changedFiles { +- if !changedOnDisk(oldFiles[uri], newFH) { +- continue // like with go.mod files, we only reinit when things change on disk +- } +- dir, base := filepath.Split(uri.Path()) +- if base == "go.work.sum" && s.view.typ == GoWorkView && dir == s.view.gowork.DirPath() { +- reinit = true +- } +- if base == "go.sum" { +- modURI := protocol.URIFromPath(filepath.Join(dir, "go.mod")) +- if _, active := result.view.workspaceModFiles[modURI]; active { +- reinit = true +- } +- } +- } +- +- // The snapshot should be initialized if either s was uninitialized, or we've +- // detected a change that triggers reinitialization. +- if reinit { +- result.initialized = false +- needsDiagnosis = true +- } +- +- // directIDs keeps track of package IDs that have directly changed. +- // Note: this is not a set, it's a map from id to invalidateMetadata. +- directIDs := map[PackageID]bool{} +- +- // Invalidate all package metadata if the workspace module has changed. +- if reinit { +- for k := range s.meta.Packages { +- // TODO(rfindley): this seems brittle; can we just start over? +- directIDs[k] = true +- } +- } +- +- // Compute invalidations based on file changes. +- anyImportDeleted := false // import deletions can resolve cycles +- anyFileOpenedOrClosed := false // opened files affect workspace packages +- anyPkgFileChanged := false // adding a file to a package can resolve missing dependencies +- +- for uri, newFH := range changedFiles { +- // The original FileHandle for this URI is cached on the snapshot. +- oldFH := oldFiles[uri] // may be nil +- _, oldOpen := oldFH.(*overlay) +- _, newOpen := newFH.(*overlay) +- +- // TODO(rfindley): consolidate with 'metadataChanges' logic below, which +- // also considers existential changes. +- anyFileOpenedOrClosed = anyFileOpenedOrClosed || (oldOpen != newOpen) +- anyPkgFileChanged = anyPkgFileChanged || (oldFH == nil || !fileExists(oldFH)) && fileExists(newFH) +- +- // If uri is a Go file, check if it has changed in a way that would +- // invalidate metadata. Note that we can't use s.view.FileKind here, +- // because the file type that matters is not what the *client* tells us, +- // but what the Go command sees. +- var invalidateMetadata, pkgFileChanged, importDeleted bool +- if strings.HasSuffix(uri.Path(), ".go") { +- invalidateMetadata, pkgFileChanged, importDeleted = metadataChanges(ctx, s, oldFH, newFH) +- } +- if invalidateMetadata { +- // If this is a metadata-affecting change, perhaps a reload will succeed. +- result.unloadableFiles.Remove(uri) +- needsDiagnosis = true +- } +- +- invalidateMetadata = invalidateMetadata || reinit +- anyImportDeleted = anyImportDeleted || importDeleted +- anyPkgFileChanged = anyPkgFileChanged || pkgFileChanged +- +- // Mark all of the package IDs containing the given file. +- filePackageIDs := invalidatedPackageIDs(uri, s.meta.ForFile, pkgFileChanged) +- for id := range filePackageIDs { +- directIDs[id] = directIDs[id] || invalidateMetadata // may insert 'false' +- } +- +- // Invalidate the previous modTidyHandle if any of the files have been +- // saved or if any of the metadata has been invalidated. +- // +- // TODO(rfindley): this seems like too-aggressive invalidation of mod +- // results. We should instead thread through overlays to the Go command +- // invocation and only run this if invalidateMetadata (and perhaps then +- // still do it less frequently). +- if invalidateMetadata || fileWasSaved(oldFH, newFH) { +- // Only invalidate mod tidy results for the most relevant modfile in the +- // workspace. This is a potentially lossy optimization for workspaces +- // with many modules (such as google-cloud-go, which has 145 modules as +- // of writing). +- // +- // While it is theoretically possible that a change in workspace module A +- // could affect the mod-tidiness of workspace module B (if B transitively +- // requires A), such changes are probably unlikely and not worth the +- // penalty of re-running go mod tidy for everything. Note that mod tidy +- // ignores GOWORK, so the two modules would have to be related by a chain +- // of replace directives. +- // +- // We could improve accuracy by inspecting replace directives, using +- // overlays in go mod tidy, and/or checking for metadata changes from the +- // on-disk content. +- // +- // Note that we iterate the modTidyHandles map here, rather than e.g. +- // using nearestModFile, because we don't have access to an accurate +- // FileSource at this point in the snapshot clone. +- const onlyInvalidateMostRelevant = true +- if onlyInvalidateMostRelevant { +- deleteMostRelevantModFile(result.modTidyHandles, uri) +- } else { +- result.modTidyHandles.Clear() +- } +- +- // TODO(rfindley): should we apply the above heuristic to mod vuln or mod +- // why handles as well? +- // +- // TODO(rfindley): no tests fail if I delete the line below. +- result.modWhyHandles.Clear() +- result.modVulnHandles.Clear() +- } +- } +- +- // Deleting an import can cause list errors due to import cycles to be +- // resolved. The best we can do without parsing the list error message is to +- // hope that list errors may have been resolved by a deleted import. +- // +- // We could do better by parsing the list error message. We already do this +- // to assign a better range to the list error, but for such critical +- // functionality as metadata, it's better to be conservative until it proves +- // impractical. +- // +- // We could also do better by looking at which imports were deleted and +- // trying to find cycles they are involved in. This fails when the file goes +- // from an unparsable state to a parseable state, as we don't have a +- // starting point to compare with. +- if anyImportDeleted { +- for id, mp := range s.meta.Packages { +- if len(mp.Errors) > 0 { +- directIDs[id] = true +- } +- } +- } +- +- // Adding a file can resolve missing dependencies from existing packages. +- // +- // We could be smart here and try to guess which packages may have been +- // fixed, but until that proves necessary, just invalidate metadata for any +- // package with missing dependencies. +- if anyPkgFileChanged { +- for id, mp := range s.meta.Packages { +- for _, impID := range mp.DepsByImpPath { +- if impID == "" { // missing import +- directIDs[id] = true +- break +- } +- } +- } +- } +- +- // Invalidate reverse dependencies too. +- // idsToInvalidate keeps track of transitive reverse dependencies. +- // If an ID is present in the map, invalidate its types. +- // If an ID's value is true, invalidate its metadata too. +- idsToInvalidate := map[PackageID]bool{} +- var addRevDeps func(PackageID, bool) +- addRevDeps = func(id PackageID, invalidateMetadata bool) { +- current, seen := idsToInvalidate[id] +- newInvalidateMetadata := current || invalidateMetadata +- +- // If we've already seen this ID, and the value of invalidate +- // metadata has not changed, we can return early. +- if seen && current == newInvalidateMetadata { +- return +- } +- idsToInvalidate[id] = newInvalidateMetadata +- for _, rdep := range s.meta.ImportedBy[id] { +- addRevDeps(rdep.ID, invalidateMetadata) +- } +- } +- for id, invalidateMetadata := range directIDs { +- addRevDeps(id, invalidateMetadata) +- } +- +- // Invalidated package information. +- for id, invalidateMetadata := range idsToInvalidate { +- // See the [packageHandle] documentation for more details about this +- // invalidation. +- if ph, ok := result.packages.Get(id); ok { +- needsDiagnosis = true +- +- // Always invalidate analysis keys, as we do not implement fine-grained +- // invalidation for analysis. +- result.fullAnalysisKeys.Delete(id) +- result.factyAnalysisKeys.Delete(id) +- +- if invalidateMetadata { +- result.packages.Delete(id) +- } else { +- // If the package was just invalidated by a dependency, its local +- // inputs are still valid. +- ph = ph.clone() +- if _, ok := directIDs[id]; ok { +- ph.state = validMetadata // local inputs changed +- } else { +- ph.state = min(ph.state, validLocalData) // a dependency changed +- } +- result.packages.Set(id, ph, nil) +- } +- } +- } +- +- // Compute which metadata updates are required. We only need to invalidate +- // packages directly containing the affected file, and only if it changed in +- // a relevant way. +- metadataUpdates := make(map[PackageID]*metadata.Package) +- for id, mp := range s.meta.Packages { +- invalidateMetadata := idsToInvalidate[id] +- +- // For metadata that has been newly invalidated, capture package paths +- // requiring reloading in the shouldLoad map. +- if invalidateMetadata && !metadata.IsCommandLineArguments(mp.ID) { +- needsReload := []PackagePath{mp.PkgPath} +- if mp.ForTest != "" && mp.ForTest != mp.PkgPath { +- // When reloading test variants, always reload their ForTest package as +- // well. Otherwise, we may miss test variants in the resulting load. +- // +- // TODO(rfindley): is this actually sufficient? Is it possible that +- // other test variants may be invalidated? Either way, we should +- // determine exactly what needs to be reloaded here. +- needsReload = append(needsReload, mp.ForTest) +- } +- result.shouldLoad.Set(id, needsReload, nil) +- } +- +- // Check whether the metadata should be deleted. +- if invalidateMetadata { +- needsDiagnosis = true +- metadataUpdates[id] = nil +- continue +- } +- } +- +- // Update metadata, if necessary. +- result.meta = s.meta.Update(metadataUpdates) +- +- // Update workspace and active packages, if necessary. +- if result.meta != s.meta || anyFileOpenedOrClosed { +- needsDiagnosis = true +- result.workspacePackages = computeWorkspacePackagesLocked(ctx, result, result.meta) +- } else { +- result.workspacePackages = s.workspacePackages +- } +- +- return result, needsDiagnosis +-} +- +-// cloneWithout clones m then deletes from it the keys of changes. +-// +-// The optional didDelete variable is set to true if there were deletions. +-func cloneWithout[K constraints.Ordered, V1, V2 any](m *persistent.Map[K, V1], changes map[K]V2, didDelete *bool) *persistent.Map[K, V1] { +- m2 := m.Clone() +- for k := range changes { +- if m2.Delete(k) && didDelete != nil { +- *didDelete = true +- } +- } +- return m2 +-} +- +-// cloneWith clones m then inserts the changes into it. +-func cloneWith[K constraints.Ordered, V any](m *persistent.Map[K, V], changes map[K]V) *persistent.Map[K, V] { +- m2 := m.Clone() +- for k, v := range changes { +- m2.Set(k, v, nil) +- } +- return m2 +-} +- +-// deleteMostRelevantModFile deletes the mod file most likely to be the mod +-// file for the changed URI, if it exists. +-// +-// Specifically, this is the longest mod file path in a directory containing +-// changed. This might not be accurate if there is another mod file closer to +-// changed that happens not to be present in the map, but that's OK: the goal +-// of this function is to guarantee that IF the nearest mod file is present in +-// the map, it is invalidated. +-func deleteMostRelevantModFile(m *persistent.Map[protocol.DocumentURI, *memoize.Promise], changed protocol.DocumentURI) { +- var mostRelevant protocol.DocumentURI +- changedFile := changed.Path() +- +- for modURI := range m.All() { +- if len(modURI) > len(mostRelevant) { +- if pathutil.InDir(modURI.DirPath(), changedFile) { +- mostRelevant = modURI +- } +- } +- } +- if mostRelevant != "" { +- m.Delete(mostRelevant) +- } +-} +- +-// invalidatedPackageIDs returns all packages invalidated by a change to uri. +-// If we haven't seen this URI before, we guess based on files in the same +-// directory. This is of course incorrect in build systems where packages are +-// not organized by directory. +-// +-// If packageFileChanged is set, the file is either a new file, or has a new +-// package name. In this case, all known packages in the directory will be +-// invalidated. +-func invalidatedPackageIDs(uri protocol.DocumentURI, known map[protocol.DocumentURI][]*metadata.Package, packageFileChanged bool) map[PackageID]struct{} { +- invalidated := make(map[PackageID]struct{}) +- +- // At a minimum, we invalidate packages known to contain uri. +- for _, pkg := range known[uri] { +- invalidated[pkg.ID] = struct{}{} +- } +- +- // If the file didn't move to a new package, we should only invalidate the +- // packages it is currently contained inside. +- if !packageFileChanged && len(invalidated) > 0 { +- return invalidated +- } +- +- // This is a file we don't yet know about, or which has moved packages. Guess +- // relevant packages by considering files in the same directory. +- +- // Cache of FileInfo to avoid unnecessary stats for multiple files in the +- // same directory. +- stats := make(map[string]struct { +- os.FileInfo +- error +- }) +- getInfo := func(dir string) (os.FileInfo, error) { +- if res, ok := stats[dir]; ok { +- return res.FileInfo, res.error +- } +- fi, err := os.Stat(dir) +- stats[dir] = struct { +- os.FileInfo +- error +- }{fi, err} +- return fi, err +- } +- dir := uri.DirPath() +- fi, err := getInfo(dir) +- if err == nil { +- // Aggregate all possibly relevant package IDs. +- for knownURI, pkgs := range known { +- knownDir := knownURI.DirPath() +- knownFI, err := getInfo(knownDir) +- if err != nil { +- continue +- } +- if os.SameFile(fi, knownFI) { +- for _, pkg := range pkgs { +- invalidated[pkg.ID] = struct{}{} +- } +- } +- } +- } +- return invalidated +-} +- +-// fileWasSaved reports whether the FileHandle passed in has been saved. It +-// accomplishes this by checking to see if the original and current FileHandles +-// are both overlays, and if the current FileHandle is saved while the original +-// FileHandle was not saved. +-func fileWasSaved(originalFH, currentFH file.Handle) bool { +- c, ok := currentFH.(*overlay) +- if !ok || c == nil { +- return true +- } +- o, ok := originalFH.(*overlay) +- if !ok || o == nil { +- return c.saved +- } +- return !o.saved && c.saved +-} +- +-// metadataChanges detects features of the change from oldFH->newFH that may +-// affect package metadata. +-// +-// It uses lockedSnapshot to access cached parse information. lockedSnapshot +-// must be locked. +-// +-// The result parameters have the following meaning: +-// - invalidate means that package metadata for packages containing the file +-// should be invalidated. +-// - pkgFileChanged means that the file->package associates for the file have +-// changed (possibly because the file is new, or because its package name has +-// changed). +-// - importDeleted means that an import has been deleted, or we can't +-// determine if an import was deleted due to errors. +-func metadataChanges(ctx context.Context, lockedSnapshot *Snapshot, oldFH, newFH file.Handle) (invalidate, pkgFileChanged, importDeleted bool) { +- if oe, ne := oldFH != nil && fileExists(oldFH), fileExists(newFH); !oe || !ne { // existential changes +- changed := oe != ne +- return changed, changed, !ne // we don't know if an import was deleted +- } +- +- // If the file hasn't changed, there's no need to reload. +- if oldFH.Identity() == newFH.Identity() { +- return false, false, false +- } +- +- fset := token.NewFileSet() +- // Parse headers to compare package names and imports. +- oldHeads, oldErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, parsego.Header, false, oldFH) +- newHeads, newErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, parsego.Header, false, newFH) +- +- if oldErr != nil || newErr != nil { +- errChanged := (oldErr == nil) != (newErr == nil) +- return errChanged, errChanged, (newErr != nil) // we don't know if an import was deleted +- } +- +- oldHead := oldHeads[0] +- newHead := newHeads[0] +- +- // `go list` fails completely if the file header cannot be parsed. If we go +- // from a non-parsing state to a parsing state, we should reload. +- if oldHead.ParseErr != nil && newHead.ParseErr == nil { +- return true, true, true // We don't know what changed, so fall back on full invalidation. +- } +- +- // If a package name has changed, the set of package imports may have changed +- // in ways we can't detect here. Assume an import has been deleted. +- if oldHead.File.Name.Name != newHead.File.Name.Name { +- return true, true, true +- } +- +- // Check whether package imports have changed. Only consider potentially +- // valid imports paths. +- oldImports := validImportPaths(oldHead.File.Imports) +- newImports := validImportPaths(newHead.File.Imports) +- +- for path := range newImports { +- if _, ok := oldImports[path]; ok { +- delete(oldImports, path) +- } else { +- invalidate = true // a new, potentially valid import was added +- } +- } +- +- if len(oldImports) > 0 { +- invalidate = true +- importDeleted = true +- } +- +- // If the change does not otherwise invalidate metadata, get the full ASTs in +- // order to check magic comments. +- // +- // Note: if this affects performance we can probably avoid parsing in the +- // common case by first scanning the source for potential comments. +- if !invalidate { +- origFulls, oldErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, parsego.Full, false, oldFH) +- newFulls, newErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, parsego.Full, false, newFH) +- if oldErr == nil && newErr == nil { +- invalidate = magicCommentsChanged(origFulls[0].File, newFulls[0].File) +- } else { +- // At this point, we shouldn't ever fail to produce a parsego.File, as +- // we're already past header parsing. +- bug.Reportf("metadataChanges: unparsable file %v (old error: %v, new error: %v)", oldFH.URI(), oldErr, newErr) +- } +- } +- +- return invalidate, pkgFileChanged, importDeleted +-} +- +-func magicCommentsChanged(original *ast.File, current *ast.File) bool { +- oldComments := extractMagicComments(original) +- newComments := extractMagicComments(current) +- if len(oldComments) != len(newComments) { +- return true +- } +- for i := range oldComments { +- if oldComments[i] != newComments[i] { +- return true +- } +- } +- return false +-} +- +-// validImportPaths extracts the set of valid import paths from imports. +-func validImportPaths(imports []*ast.ImportSpec) map[string]struct{} { +- m := make(map[string]struct{}) +- for _, spec := range imports { +- if path := spec.Path.Value; validImportPath(path) { +- m[path] = struct{}{} +- } +- } +- return m +-} +- +-func validImportPath(path string) bool { +- path, err := strconv.Unquote(path) +- if err != nil { +- return false +- } +- if path == "" { +- return false +- } +- if path[len(path)-1] == '/' { +- return false +- } +- return true +-} +- +-var buildConstraintOrEmbedRe = regexp.MustCompile(`^//(go:embed|go:build|\s*\+build).*`) +- +-// extractMagicComments finds magic comments that affect metadata in f. +-func extractMagicComments(f *ast.File) []string { +- var results []string +- for _, cg := range f.Comments { +- for _, c := range cg.List { +- if buildConstraintOrEmbedRe.MatchString(c.Text) { +- results = append(results, c.Text) +- } +- } +- } +- return results +-} +- +-// BuiltinFile returns the pseudo-source file builtins.go, +-// parsed with legacy ast.Object resolution. +-func (s *Snapshot) BuiltinFile(ctx context.Context) (*parsego.File, error) { +- s.AwaitInitialized(ctx) +- +- s.mu.Lock() +- builtin := s.builtin +- s.mu.Unlock() +- +- if builtin == "" { +- return nil, fmt.Errorf("no builtin package for view %s", s.view.folder.Name) +- } +- +- fh, err := s.ReadFile(ctx, builtin) +- if err != nil { +- return nil, err +- } +- // For the builtin file only, we need syntactic object resolution +- // (since we can't type check). +- mode := parsego.Full &^ parser.SkipObjectResolution +- pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), mode, false, fh) +- if err != nil { +- return nil, err +- } +- return pgfs[0], nil +-} +- +-// IsBuiltin reports whether uri is part of the builtin package. +-func (s *Snapshot) IsBuiltin(uri protocol.DocumentURI) bool { +- s.mu.Lock() +- defer s.mu.Unlock() +- // We should always get the builtin URI in a canonical form, so use simple +- // string comparison here. span.CompareURI is too expensive. +- return uri == s.builtin +-} +- +-func (s *Snapshot) setBuiltin(path string) { +- s.mu.Lock() +- defer s.mu.Unlock() +- +- s.builtin = protocol.URIFromPath(path) +-} +- +-// WantCompilerOptDetails reports whether to compute compiler +-// optimization details for packages and tests in the given directory. +-func (s *Snapshot) WantCompilerOptDetails(dir protocol.DocumentURI) bool { +- _, ok := s.compilerOptDetails[dir] +- return ok +-} +- +-// A CodeLensSourceFunc is a function that reports CodeLenses (range-associated +-// commands) for a given file. +-type CodeLensSourceFunc func(context.Context, *Snapshot, file.Handle) ([]protocol.CodeLens, error) +diff -urN a/gopls/internal/cache/source.go b/gopls/internal/cache/source.go +--- a/gopls/internal/cache/source.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/source.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,399 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "log" +- "maps" +- "slices" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/symbols" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- "golang.org/x/tools/internal/imports" +-) +- +-// goplsSource is an imports.Source that provides import information using +-// gopls and the module cache index. +-type goplsSource struct { +- snapshot *Snapshot +- envSource *imports.ProcessEnvSource +- +- // set by each invocation of ResolveReferences +- ctx context.Context +-} +- +-func (s *Snapshot) NewGoplsSource(is *imports.ProcessEnvSource) *goplsSource { +- return &goplsSource{ +- snapshot: s, +- envSource: is, +- } +-} +- +-func (s *goplsSource) LoadPackageNames(ctx context.Context, srcDir string, paths []imports.ImportPath) (map[imports.ImportPath]imports.PackageName, error) { +- // TODO: use metadata graph. Aside from debugging, this is the only used of envSource +- return s.envSource.LoadPackageNames(ctx, srcDir, paths) +-} +- +-type result struct { +- res *imports.Result +- deprecated bool +-} +- +-// ResolveReferences tries to find resolving imports in the workspace, and failing +-// that, in the module cache. It uses heuristics to decide among alternatives. +-// The heuristics will usually prefer a v2 version, if there is one. +-// TODO: It does not take advantage of hints provided by the user: +-// 1. syntactic context: pkg.Name().Foo +-// 3. already imported files in the same module +-func (s *goplsSource) ResolveReferences(ctx context.Context, filename string, missing imports.References) ([]*imports.Result, error) { +- s.ctx = ctx +- // get results from the workspace. There will at most one for each package name +- fromWS, err := s.resolveWorkspaceReferences(filename, missing) +- if err != nil { +- return nil, err +- } +- // collect the ones that are still +- needed := maps.Clone(missing) +- for _, a := range fromWS { +- delete(needed, a.Package.Name) +- } +- // when debug (below) is gone, change this to: if len(needed) == 0 {return fromWS, nil} +- var fromCache []*result +- if len(needed) != 0 { +- var err error +- fromCache, err = s.resolveCacheReferences(needed) +- if err != nil { +- return nil, err +- } +- // trim cans to one per missing package. +- byPkgNm := make(map[string][]*result) +- for _, c := range fromCache { +- byPkgNm[c.res.Package.Name] = append(byPkgNm[c.res.Package.Name], c) +- } +- for k, v := range byPkgNm { +- fromWS = append(fromWS, s.bestCache(k, v)) +- } +- } +- const debug = false +- if debug { // debugging. +- // what does the old one find? +- old, err := s.envSource.ResolveReferences(ctx, filename, missing) +- if err != nil { +- log.Fatal(err) +- } +- log.Printf("fromCache:%d %s", len(fromCache), filename) +- for i, c := range fromCache { +- log.Printf("cans%d %#v %#v %v", i, c.res.Import, c.res.Package, c.deprecated) +- } +- for k, v := range missing { +- for x := range v { +- log.Printf("missing %s.%s", k, x) +- } +- } +- for k, v := range needed { +- for x := range v { +- log.Printf("needed %s.%s", k, x) +- } +- } +- +- dbgpr := func(hdr string, v []*imports.Result) { +- for i := range v { +- log.Printf("%s%d %+v %+v", hdr, i, v[i].Import, v[i].Package) +- } +- } +- +- dbgpr("fromWS", fromWS) +- dbgpr("old", old) +- for k, v := range s.snapshot.workspacePackages.All() { +- log.Printf("workspacePackages[%s]=%s", k, v) +- } +- // anything in ans with >1 matches? +- seen := make(map[string]int) +- for _, a := range fromWS { +- seen[a.Package.Name]++ +- } +- for k, v := range seen { +- if v > 1 { +- log.Printf("saw %d %s", v, k) +- for i, x := range fromWS { +- if x.Package.Name == k { +- log.Printf("%d: %+v %+v", i, x.Package, x.Import) +- } +- } +- } +- } +- } +- return fromWS, nil +- +-} +- +-func (s *goplsSource) resolveCacheReferences(missing imports.References) ([]*result, error) { +- ix, err := s.snapshot.view.ModcacheIndex() +- if err != nil { +- return nil, err +- } +- +- found := make(map[string]*result) +- for pkgName, nameSet := range missing { +- names := moremaps.KeySlice(nameSet) +- for importPath, cands := range ix.LookupAll(pkgName, names...) { +- res := found[importPath] +- if res == nil { +- res = &result{ +- res: &imports.Result{ +- Import: &imports.ImportInfo{ +- ImportPath: importPath, +- }, +- Package: &imports.PackageInfo{ +- Name: pkgName, +- Exports: make(map[string]bool)}, +- }, +- deprecated: false, +- } +- found[importPath] = res +- } +- for _, c := range cands { +- res.res.Package.Exports[c.Name] = true +- // The import path is deprecated if a symbol that would be used is deprecated +- res.deprecated = res.deprecated || c.Deprecated +- } +- } +- +- } +- return moremaps.ValueSlice(found), nil +-} +- +-type found struct { +- sym *symbols.Package +- res *imports.Result +-} +- +-func (s *goplsSource) resolveWorkspaceReferences(filename string, missing imports.References) ([]*imports.Result, error) { +- uri := protocol.URIFromPath(filename) +- mypkgs, err := s.snapshot.MetadataForFile(s.ctx, uri, false) +- if err != nil { +- return nil, err +- } +- if len(mypkgs) == 0 { +- return nil, nil +- } +- mypkg := mypkgs[0] // narrowest package +- // search the metadata graph for package ids correstponding to missing +- g := s.snapshot.MetadataGraph() +- var ids []metadata.PackageID +- var pkgs []*metadata.Package +- for pid, pkg := range g.Packages { +- // no test packages, except perhaps for ourselves +- if pkg.ForTest != "" && pkg != mypkg { +- continue +- } +- if missingWants(missing, pkg.Name) { +- ids = append(ids, pid) +- pkgs = append(pkgs, pkg) +- } +- } +- // find the symbols in those packages +- // the syms occur in the same order as the ids and the pkgs +- syms, err := s.snapshot.Symbols(s.ctx, ids...) +- if err != nil { +- return nil, err +- } +- // keep track of used syms and found results by package name +- // TODO: avoid import cycles (is current package in forward closure) +- founds := make(map[string][]found) +- for i := range len(ids) { +- nm := string(pkgs[i].Name) +- if satisfies(syms[i], missing[nm]) { +- got := &imports.Result{ +- Import: &imports.ImportInfo{ +- Name: "", +- ImportPath: string(pkgs[i].PkgPath), +- }, +- Package: &imports.PackageInfo{ +- Name: string(pkgs[i].Name), +- Exports: missing[imports.PackageName(pkgs[i].Name)], +- }, +- } +- founds[nm] = append(founds[nm], found{syms[i], got}) +- } +- } +- var ans []*imports.Result +- for _, v := range founds { +- // make sure the elements of v are unique +- // (Import.ImportPath or Package.Name must differ) +- cmp := func(l, r found) int { +- switch strings.Compare(l.res.Import.ImportPath, r.res.Import.ImportPath) { +- case -1: +- return -1 +- case 1: +- return 1 +- } +- return strings.Compare(l.res.Package.Name, r.res.Package.Name) +- } +- slices.SortFunc(v, cmp) +- newv := make([]found, 0, len(v)) +- newv = append(newv, v[0]) +- for i := 1; i < len(v); i++ { +- if cmp(v[i], v[i-1]) != 0 { +- newv = append(newv, v[i]) +- } +- } +- ans = append(ans, bestImport(filename, newv)) +- } +- return ans, nil +-} +- +-// for each package name, choose one using heuristics +-func bestImport(filename string, got []found) *imports.Result { +- if len(got) == 1 { +- return got[0].res +- } +- isTestFile := strings.HasSuffix(filename, "_test.go") +- var leftovers []found +- for _, g := range got { +- // don't use _test packages unless isTestFile +- testPkg := strings.HasSuffix(string(g.res.Package.Name), "_test") || strings.HasSuffix(string(g.res.Import.Name), "_test") +- if testPkg && !isTestFile { +- continue // no test covers this +- } +- if imports.CanUse(filename, g.sym.Files[0].DirPath()) { +- leftovers = append(leftovers, g) +- } +- } +- switch len(leftovers) { +- case 0: +- break // use got, they are all bad +- case 1: +- return leftovers[0].res // only one left +- default: +- got = leftovers // filtered some out +- } +- +- // TODO: if there are versions (like /v2) prefer them +- +- // use distance to common ancestor with filename +- // (TestDirectoryFilters_MultiRootImportScanning) +- // filename is .../a/main.go, choices are +- // .../a/hi/hi.go and .../b/hi/hi.go +- longest := -1 +- ix := -1 +- for i := 0; i < len(got); i++ { +- d := commonpref(filename, got[i].sym.Files[0].Path()) +- if d > longest { +- longest = d +- ix = i +- } +- } +- // it is possible that there were several tied, but we return the first +- return got[ix].res +-} +- +-// choose the best result for the package named nm from the module cache +-func (s *goplsSource) bestCache(nm string, got []*result) *imports.Result { +- if len(got) == 1 { +- return got[0].res +- } +- // does the go.mod file choose one? +- if ans := s.fromGoMod(got); ans != nil { +- return ans +- } +- got = preferUndeprecated(got) +- // want the best Import.ImportPath +- // these are all for the package named nm, +- // nm (probably) occurs in all the paths; +- // choose the longest (after nm), so as to get /v2 +- maxlen, which := -1, -1 +- for i := 0; i < len(got); i++ { +- ix := strings.Index(got[i].res.Import.ImportPath, nm) +- if ix == -1 { +- continue // now what? +- } +- cnt := len(got[i].res.Import.ImportPath) - ix +- if cnt > maxlen { +- maxlen = cnt +- which = i +- } +- // what about ties? (e.g., /v2 and /v3) +- } +- if which >= 0 { +- return got[which].res +- } +- return got[0].res // arbitrary guess +-} +- +-// if go.mod requires one of the packages, return that +-func (s *goplsSource) fromGoMod(got []*result) *imports.Result { +- // should we use s.S.view.worsspaceModFiles, and the union of their requires? +- // (note that there are no tests where it contains more than one) +- modURI := s.snapshot.view.gomod +- modfh, ok := s.snapshot.files.get(modURI) +- if !ok { +- return nil +- } +- parsed, err := s.snapshot.ParseMod(s.ctx, modfh) +- if err != nil { +- return nil +- } +- reqs := parsed.File.Require +- for _, g := range got { +- for _, req := range reqs { +- if strings.HasPrefix(g.res.Import.ImportPath, req.Syntax.Token[1]) { +- return g.res +- } +- } +- } +- return nil +-} +- +-func commonpref(filename string, path string) int { +- k := 0 +- for ; k < len(filename) && k < len(path) && filename[k] == path[k]; k++ { +- } +- return k +-} +- +-func satisfies(pkg *symbols.Package, missing map[string]bool) bool { +- syms := make(map[string]bool) +- for _, x := range pkg.Symbols { +- for _, s := range x { +- syms[s.Name] = true +- } +- } +- for k := range missing { +- if !syms[k] { +- return false +- } +- } +- return true +-} +- +-// does pkgPath potentially satisfy a missing reference? +-func missingWants(missing imports.References, pkgPath metadata.PackageName) bool { +- for k := range missing { +- if string(k) == string(pkgPath) { +- return true +- } +- } +- return false +-} +- +-// If there are both deprecated and undprecated ones +-// then return only the undeprecated one +-func preferUndeprecated(got []*result) []*result { +- var ok []*result +- for _, g := range got { +- if !g.deprecated { +- ok = append(ok, g) +- } +- } +- if len(ok) > 0 { +- return ok +- } +- return got +-} +diff -urN a/gopls/internal/cache/symbols/symbols.go b/gopls/internal/cache/symbols/symbols.go +--- a/gopls/internal/cache/symbols/symbols.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/symbols/symbols.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,186 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package symbols defines the serializable index of package symbols extracted +-// from parsed package files. +-package symbols +- +-import ( +- "go/ast" +- "go/token" +- "go/types" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/frob" +- "golang.org/x/tools/internal/astutil" +-) +- +-// Symbol holds a precomputed symbol value. This is a subset of the information +-// in the full protocol.SymbolInformation struct to reduce the size of each +-// symbol. +-type Symbol struct { +- Name string +- Kind protocol.SymbolKind +- Range protocol.Range +-} +- +-// A Package holds information about symbols declared by each file of a +-// package. +-// +-// The symbols included are: package-level declarations, and fields and methods +-// of type declarations. +-type Package struct { +- Files []protocol.DocumentURI // package files +- Symbols [][]Symbol // symbols in each file +-} +- +-var codec = frob.CodecFor[Package]() +- +-// Decode decodes data from [Package.Encode]. +-func Decode(data []byte) *Package { +- var pkg Package +- codec.Decode(data, &pkg) +- return &pkg +-} +- +-// Encode encodes the package. +-func (pkg *Package) Encode() []byte { +- return codec.Encode(*pkg) +-} +- +-// New returns a new [Package] summarizing symbols in the given files. +-func New(files []*parsego.File) *Package { +- var ( +- uris []protocol.DocumentURI +- symbols [][]Symbol +- ) +- for _, pgf := range files { +- uris = append(uris, pgf.URI) +- syms := symbolizeFile(pgf) +- symbols = append(symbols, syms) +- } +- return &Package{ +- Files: uris, +- Symbols: symbols, +- } +-} +- +-// symbolizeFile reads and parses a file and extracts symbols from it. +-func symbolizeFile(pgf *parsego.File) []Symbol { +- w := &symbolWalker{ +- nodeRange: pgf.NodeRange, +- } +- +- for _, decl := range pgf.File.Decls { +- switch decl := decl.(type) { +- case *ast.FuncDecl: +- kind := protocol.Function +- var recv *ast.Ident +- if decl.Recv.NumFields() > 0 { +- kind = protocol.Method +- _, recv, _ = astutil.UnpackRecv(decl.Recv.List[0].Type) +- } +- w.declare(decl.Name.Name, kind, decl.Name, recv) +- +- case *ast.GenDecl: +- for _, spec := range decl.Specs { +- switch spec := spec.(type) { +- case *ast.TypeSpec: +- kind := protocol.Class +- switch spec.Type.(type) { +- case *ast.InterfaceType: +- kind = protocol.Interface +- case *ast.StructType: +- kind = protocol.Struct +- case *ast.FuncType: +- kind = protocol.Function +- } +- w.declare(spec.Name.Name, kind, spec.Name) +- w.walkType(spec.Type, spec.Name) +- case *ast.ValueSpec: +- for _, name := range spec.Names { +- kind := protocol.Variable +- if decl.Tok == token.CONST { +- kind = protocol.Constant +- } +- w.declare(name.Name, kind, name) +- } +- } +- } +- } +- } +- +- return w.symbols +-} +- +-type symbolWalker struct { +- nodeRange func(node ast.Node) (protocol.Range, error) // for computing positions +- +- symbols []Symbol +-} +- +-// declare declares a symbol of the specified name, kind, node location, and enclosing dotted path of identifiers. +-func (w *symbolWalker) declare(name string, kind protocol.SymbolKind, node ast.Node, path ...*ast.Ident) { +- var b strings.Builder +- for _, ident := range path { +- if ident != nil { +- b.WriteString(ident.Name) +- b.WriteString(".") +- } +- } +- b.WriteString(name) +- +- rng, err := w.nodeRange(node) +- if err != nil { +- // TODO(rfindley): establish an invariant that node positions cannot exceed +- // the file. This is not currently the case--for example see +- // golang/go#48300 (this can also happen due to phantom selectors). +- // +- // For now, we have nothing to do with this error. +- return +- } +- sym := Symbol{ +- Name: b.String(), +- Kind: kind, +- Range: rng, +- } +- w.symbols = append(w.symbols, sym) +-} +- +-// walkType processes symbols related to a type expression. path is path of +-// nested type identifiers to the type expression. +-func (w *symbolWalker) walkType(typ ast.Expr, path ...*ast.Ident) { +- switch st := typ.(type) { +- case *ast.StructType: +- for _, field := range st.Fields.List { +- w.walkField(field, protocol.Field, protocol.Field, path...) +- } +- case *ast.InterfaceType: +- for _, field := range st.Methods.List { +- w.walkField(field, protocol.Interface, protocol.Method, path...) +- } +- } +-} +- +-// walkField processes symbols related to the struct field or interface method. +-// +-// unnamedKind and namedKind are the symbol kinds if the field is resp. unnamed +-// or named. path is the path of nested identifiers containing the field. +-func (w *symbolWalker) walkField(field *ast.Field, unnamedKind, namedKind protocol.SymbolKind, path ...*ast.Ident) { +- if len(field.Names) == 0 { +- switch typ := field.Type.(type) { +- case *ast.SelectorExpr: +- // embedded qualified type +- w.declare(typ.Sel.Name, unnamedKind, field, path...) +- default: +- w.declare(types.ExprString(field.Type), unnamedKind, field, path...) +- } +- } +- for _, name := range field.Names { +- w.declare(name.Name, namedKind, name, path...) +- w.walkType(field.Type, append(path, name)...) +- } +-} +diff -urN a/gopls/internal/cache/symbols.go b/gopls/internal/cache/symbols.go +--- a/gopls/internal/cache/symbols.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/symbols.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,103 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "crypto/sha256" +- "fmt" +- "go/parser" +- "go/token" +- "runtime" +- +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/cache/symbols" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/filecache" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/internal/event" +-) +- +-// Symbols extracts and returns symbol information for every file contained in +-// a loaded package. It awaits snapshot loading. +-// +-// If workspaceOnly is set, this only includes symbols from files in a +-// workspace package. Otherwise, it returns symbols from all loaded packages. +-func (s *Snapshot) Symbols(ctx context.Context, ids ...PackageID) ([]*symbols.Package, error) { +- meta := s.MetadataGraph() +- +- res := make([]*symbols.Package, len(ids)) +- var g errgroup.Group +- g.SetLimit(runtime.GOMAXPROCS(-1)) // symbolizing is cpu bound +- for i, id := range ids { +- g.Go(func() error { +- mp := meta.Packages[id] +- if mp == nil { +- return bug.Errorf("missing metadata for %q", id) +- } +- +- key, fhs, err := symbolKey(ctx, mp, s) +- if err != nil { +- return err +- } +- +- if data, err := filecache.Get(symbolsKind, key); err == nil { +- res[i] = symbols.Decode(data) +- return nil +- } else if err != filecache.ErrNotFound { +- bug.Reportf("internal error reading symbol data: %v", err) +- } +- +- pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), parsego.Full&^parser.ParseComments, false, fhs...) +- if err != nil { +- return err +- } +- pkg := symbols.New(pgfs) +- +- // Store the resulting data in the cache. +- go func() { +- data := pkg.Encode() +- if err := filecache.Set(symbolsKind, key, data); err != nil { +- event.Error(ctx, fmt.Sprintf("storing symbol data for %s", id), err) +- } +- }() +- +- res[i] = pkg +- return nil +- }) +- } +- +- return res, g.Wait() +-} +- +-func symbolKey(ctx context.Context, mp *metadata.Package, fs file.Source) (file.Hash, []file.Handle, error) { +- seen := make(map[protocol.DocumentURI]bool) +- var fhs []file.Handle +- for _, list := range [][]protocol.DocumentURI{mp.GoFiles, mp.CompiledGoFiles} { +- for _, uri := range list { +- if !seen[uri] { +- seen[uri] = true +- fh, err := fs.ReadFile(ctx, uri) +- if err != nil { +- return file.Hash{}, nil, err // context cancelled +- } +- fhs = append(fhs, fh) +- } +- } +- } +- +- hasher := sha256.New() +- fmt.Fprintf(hasher, "symbols: %s\n", mp.PkgPath) +- fmt.Fprintf(hasher, "files: %d\n", len(fhs)) +- for _, fh := range fhs { +- fmt.Fprintln(hasher, fh.Identity()) +- } +- var hash file.Hash +- hasher.Sum(hash[:0]) +- return hash, fhs, nil +-} +diff -urN a/gopls/internal/cache/testfuncs/match.go b/gopls/internal/cache/testfuncs/match.go +--- a/gopls/internal/cache/testfuncs/match.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/testfuncs/match.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,116 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package testfuncs +- +-import ( +- "fmt" +- "strconv" +- "strings" +-) +- +-// The functions in this file are copies of those from the testing package. +-// +-// https://cs.opensource.google/go/go/+/refs/tags/go1.22.5:src/testing/match.go +- +-// uniqueName creates a unique name for the given parent and subname by affixing +-// it with one or more counts, if necessary. +-func (b *indexBuilder) uniqueName(parent, subname string) string { +- base := parent + "/" + subname +- +- for { +- n := b.subNames[base] +- if n < 0 { +- panic("subtest count overflow") +- } +- b.subNames[base] = n + 1 +- +- if n == 0 && subname != "" { +- prefix, nn := parseSubtestNumber(base) +- if len(prefix) < len(base) && nn < b.subNames[prefix] { +- // This test is explicitly named like "parent/subname#NN", +- // and #NN was already used for the NNth occurrence of "parent/subname". +- // Loop to add a disambiguating suffix. +- continue +- } +- return base +- } +- +- name := fmt.Sprintf("%s#%02d", base, n) +- if b.subNames[name] != 0 { +- // This is the nth occurrence of base, but the name "parent/subname#NN" +- // collides with the first occurrence of a subtest *explicitly* named +- // "parent/subname#NN". Try the next number. +- continue +- } +- +- return name +- } +-} +- +-// parseSubtestNumber splits a subtest name into a "#%02d"-formatted int +-// suffix (if present), and a prefix preceding that suffix (always). +-func parseSubtestNumber(s string) (prefix string, nn int) { +- i := strings.LastIndex(s, "#") +- if i < 0 { +- return s, 0 +- } +- +- prefix, suffix := s[:i], s[i+1:] +- if len(suffix) < 2 || (len(suffix) > 2 && suffix[0] == '0') { +- // Even if suffix is numeric, it is not a possible output of a "%02" format +- // string: it has either too few digits or too many leading zeroes. +- return s, 0 +- } +- if suffix == "00" { +- if !strings.HasSuffix(prefix, "/") { +- // We only use "#00" as a suffix for subtests named with the empty +- // string — it isn't a valid suffix if the subtest name is non-empty. +- return s, 0 +- } +- } +- +- n, err := strconv.ParseInt(suffix, 10, 32) +- if err != nil || n < 0 { +- return s, 0 +- } +- return prefix, int(n) +-} +- +-// rewrite rewrites a subname to having only printable characters and no white +-// space. +-func rewrite(s string) string { +- b := []byte{} +- for _, r := range s { +- switch { +- case isSpace(r): +- b = append(b, '_') +- case !strconv.IsPrint(r): +- s := strconv.QuoteRune(r) +- b = append(b, s[1:len(s)-1]...) +- default: +- b = append(b, string(r)...) +- } +- } +- return string(b) +-} +- +-func isSpace(r rune) bool { +- if r < 0x2000 { +- switch r { +- // Note: not the same as Unicode Z class. +- case '\t', '\n', '\v', '\f', '\r', ' ', 0x85, 0xA0, 0x1680: +- return true +- } +- } else { +- if r <= 0x200a { +- return true +- } +- switch r { +- case 0x2028, 0x2029, 0x202f, 0x205f, 0x3000: +- return true +- } +- } +- return false +-} +diff -urN a/gopls/internal/cache/testfuncs/tests.go b/gopls/internal/cache/testfuncs/tests.go +--- a/gopls/internal/cache/testfuncs/tests.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/testfuncs/tests.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,359 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package testfuncs +- +-import ( +- "go/ast" +- "go/constant" +- "go/types" +- "strings" +- "unicode" +- "unicode/utf8" +- +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/frob" +- "golang.org/x/tools/gopls/internal/util/safetoken" +-) +- +-// An Index records the test set of a package. +-type Index struct { +- pkg gobPackage +-} +- +-// Decode decodes the given gob-encoded data as an Index. +-func Decode(data []byte) *Index { +- var pkg gobPackage +- packageCodec.Decode(data, &pkg) +- return &Index{pkg} +-} +- +-// Encode encodes the receiver as gob-encoded data. +-func (index *Index) Encode() []byte { +- return packageCodec.Encode(index.pkg) +-} +- +-func (index *Index) All() []Result { +- var results []Result +- for _, file := range index.pkg.Files { +- for _, test := range file.Tests { +- results = append(results, test.result()) +- } +- } +- return results +-} +- +-// A Result reports a test function +-type Result struct { +- Location protocol.Location // location of the test +- Name string // name of the test +-} +- +-// NewIndex returns a new index of method-set information for all +-// package-level types in the specified package. +-func NewIndex(files []*parsego.File, info *types.Info) *Index { +- b := &indexBuilder{ +- fileIndex: make(map[protocol.DocumentURI]int), +- subNames: make(map[string]int), +- visited: make(map[*types.Func]bool), +- } +- return b.build(files, info) +-} +- +-// build adds to the index all tests of the specified package. +-func (b *indexBuilder) build(files []*parsego.File, info *types.Info) *Index { +- for _, file := range files { +- if !strings.HasSuffix(file.Tok.Name(), "_test.go") { +- continue +- } +- +- for _, decl := range file.File.Decls { +- decl, ok := decl.(*ast.FuncDecl) +- if !ok { +- continue +- } +- obj, ok := info.ObjectOf(decl.Name).(*types.Func) +- if !ok || !obj.Exported() { +- continue +- } +- +- // error.Error has empty Position, PkgPath, and ObjectPath. +- if obj.Pkg() == nil { +- continue +- } +- +- isTest, isExample := isTestOrExample(obj) +- if !isTest && !isExample { +- continue +- } +- +- var t gobTest +- t.Name = decl.Name.Name +- t.Location.URI = file.URI +- t.Location.Range, _ = file.NodeRange(decl) +- +- i, ok := b.fileIndex[t.Location.URI] +- if !ok { +- i = len(b.Files) +- b.Files = append(b.Files, gobFile{}) +- b.fileIndex[t.Location.URI] = i +- } +- +- b.Files[i].Tests = append(b.Files[i].Tests, t) +- b.visited[obj] = true +- +- // Check for subtests +- if isTest { +- b.Files[i].Tests = append(b.Files[i].Tests, b.findSubtests(t, decl.Type, decl.Body, file, files, info)...) +- } +- } +- } +- +- return &Index{pkg: b.gobPackage} +-} +- +-func (b *indexBuilder) findSubtests(parent gobTest, typ *ast.FuncType, body *ast.BlockStmt, file *parsego.File, files []*parsego.File, info *types.Info) []gobTest { +- if body == nil { +- return nil +- } +- +- // If the [testing.T] parameter is unnamed, the func cannot call +- // [testing.T.Run] and thus cannot create any subtests +- if len(typ.Params.List[0].Names) == 0 { +- return nil +- } +- +- // This "can't fail" because testKind should guarantee that the function has +- // one parameter and the check above guarantees that parameter is named +- param := info.ObjectOf(typ.Params.List[0].Names[0]) +- +- // Find statements of form t.Run(name, func(...) {...}) where t is the +- // parameter of the enclosing test function. +- var tests []gobTest +- for _, stmt := range body.List { +- expr, ok := stmt.(*ast.ExprStmt) +- if !ok { +- continue +- } +- +- call, ok := expr.X.(*ast.CallExpr) +- if !ok || len(call.Args) != 2 { +- continue +- } +- fun, ok := call.Fun.(*ast.SelectorExpr) +- if !ok || fun.Sel.Name != "Run" { +- continue +- } +- recv, ok := fun.X.(*ast.Ident) +- if !ok || info.ObjectOf(recv) != param { +- continue +- } +- +- sig, ok := info.TypeOf(call.Args[1]).(*types.Signature) +- if !ok { +- continue +- } +- if _, ok := testKind(sig); !ok { +- continue // subtest has wrong signature +- } +- +- val := info.Types[call.Args[0]].Value // may be zero +- if val == nil || val.Kind() != constant.String { +- continue +- } +- +- var t gobTest +- t.Name = b.uniqueName(parent.Name, rewrite(constant.StringVal(val))) +- t.Location.URI = file.URI +- t.Location.Range, _ = file.NodeRange(call) +- tests = append(tests, t) +- +- fn, typ, body := findFunc(files, info, body, call.Args[1]) +- if typ == nil { +- continue +- } +- +- // Function literals don't have an associated object +- if fn == nil { +- tests = append(tests, b.findSubtests(t, typ, body, file, files, info)...) +- continue +- } +- +- // Never recurse if the second argument is a top-level test function +- if isTest, _ := isTestOrExample(fn); isTest { +- continue +- } +- +- // Don't recurse into functions that have already been visited +- if b.visited[fn] { +- continue +- } +- +- b.visited[fn] = true +- tests = append(tests, b.findSubtests(t, typ, body, file, files, info)...) +- } +- return tests +-} +- +-// findFunc finds the type and body of the given expr, which may be a function +-// literal or reference to a declared function. If the expression is a declared +-// function, findFunc returns its [types.Func]. If the expression is a function +-// literal, findFunc returns nil for the first return value. If no function is +-// found, findFunc returns (nil, nil, nil). +-func findFunc(files []*parsego.File, info *types.Info, body *ast.BlockStmt, expr ast.Expr) (*types.Func, *ast.FuncType, *ast.BlockStmt) { +- var obj types.Object +- switch arg := expr.(type) { +- case *ast.FuncLit: +- return nil, arg.Type, arg.Body +- +- case *ast.Ident: +- obj = info.ObjectOf(arg) +- if obj == nil { +- return nil, nil, nil +- } +- +- case *ast.SelectorExpr: +- // Look for methods within the current package. We will not handle +- // imported functions and methods for now, as that would require access +- // to the source of other packages and would be substantially more +- // complex. However, those cases should be rare. +- sel, ok := info.Selections[arg] +- if !ok { +- return nil, nil, nil +- } +- obj = sel.Obj() +- +- default: +- return nil, nil, nil +- } +- +- if v, ok := obj.(*types.Var); ok { +- // TODO: Handle vars. This could handled by walking over the body (and +- // the file), but that doesn't account for assignment. If the variable +- // is assigned multiple times, we could easily get the wrong one. +- _, _ = v, body +- return nil, nil, nil +- } +- +- for _, file := range files { +- // Skip files that don't contain the object (there should only be a +- // single file that _does_ contain it) +- if _, err := safetoken.Offset(file.Tok, obj.Pos()); err != nil { +- continue +- } +- +- for _, decl := range file.File.Decls { +- decl, ok := decl.(*ast.FuncDecl) +- if !ok { +- continue +- } +- +- if info.ObjectOf(decl.Name) == obj { +- return obj.(*types.Func), decl.Type, decl.Body +- } +- } +- } +- return nil, nil, nil +-} +- +-// isTestOrExample reports whether the given func is a testing func or an +-// example func (or neither). isTestOrExample returns (true, false) for testing +-// funcs, (false, true) for example funcs, and (false, false) otherwise. +-func isTestOrExample(fn *types.Func) (isTest, isExample bool) { +- sig := fn.Type().(*types.Signature) +- if sig.Params().Len() == 0 && +- sig.Results().Len() == 0 { +- return false, isTestName(fn.Name(), "Example") +- } +- +- kind, ok := testKind(sig) +- if !ok { +- return false, false +- } +- switch kind.Name() { +- case "T": +- return isTestName(fn.Name(), "Test"), false +- case "B": +- return isTestName(fn.Name(), "Benchmark"), false +- case "F": +- return isTestName(fn.Name(), "Fuzz"), false +- default: +- return false, false // "can't happen" (see testKind) +- } +-} +- +-// isTestName reports whether name is a valid test name for the test kind +-// indicated by the given prefix ("Test", "Benchmark", etc.). +-// +-// Adapted from go/analysis/passes/tests. +-func isTestName(name, prefix string) bool { +- suffix, ok := strings.CutPrefix(name, prefix) +- if !ok { +- return false +- } +- if len(suffix) == 0 { +- // "Test" is ok. +- return true +- } +- r, _ := utf8.DecodeRuneInString(suffix) +- return !unicode.IsLower(r) +-} +- +-// testKind returns the parameter type TypeName of a test, benchmark, or fuzz +-// function (one of testing.[TBF]). +-func testKind(sig *types.Signature) (*types.TypeName, bool) { +- if sig.Params().Len() != 1 || +- sig.Results().Len() != 0 { +- return nil, false +- } +- +- ptr, ok := sig.Params().At(0).Type().(*types.Pointer) +- if !ok { +- return nil, false +- } +- +- named, ok := ptr.Elem().(*types.Named) +- if !ok || named.Obj().Pkg() == nil || named.Obj().Pkg().Path() != "testing" { +- return nil, false +- } +- +- switch named.Obj().Name() { +- case "T", "B", "F": +- return named.Obj(), true +- } +- return nil, false +-} +- +-// An indexBuilder builds an index for a single package. +-type indexBuilder struct { +- gobPackage +- fileIndex map[protocol.DocumentURI]int +- subNames map[string]int +- visited map[*types.Func]bool +-} +- +-// -- serial format of index -- +- +-// (The name says gob but in fact we use frob.) +-var packageCodec = frob.CodecFor[gobPackage]() +- +-// A gobPackage records the test set of each package-level type for a single package. +-type gobPackage struct { +- Files []gobFile +-} +- +-type gobFile struct { +- Tests []gobTest +-} +- +-// A gobTest records the name, type, and position of a single test. +-type gobTest struct { +- Location protocol.Location // location of the test +- Name string // name of the test +-} +- +-func (t *gobTest) result() Result { +- return Result(*t) +-} +diff -urN a/gopls/internal/cache/typerefs/doc.go b/gopls/internal/cache/typerefs/doc.go +--- a/gopls/internal/cache/typerefs/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/typerefs/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,151 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package typerefs extracts symbol-level reachability information +-// from the syntax of a Go package. +-// +-// # Background +-// +-// The goal of this analysis is to determine, for each package P, a nearly +-// minimal set of packages that could affect the type checking of P. This set +-// may contain false positives, but the smaller this set the better we can +-// invalidate and prune packages in gopls. +-// +-// More precisely, for each package P we define the set of "reachable" packages +-// from P as the set of packages that may affect the (deep) export data of the +-// direct dependencies of P. By this definition, the complement of this set +-// cannot affect any information derived from type checking P, such as +-// diagnostics, cross references, or method sets. Therefore we need not +-// invalidate any results for P when a package in the complement of this set +-// changes. +-// +-// # Computing references +-// +-// For a given declaration D, references are computed based on identifiers or +-// dotted identifiers referenced in the declaration of D, that may affect +-// the type of D. However, these references reflect only local knowledge of the +-// package and its dependency metadata, and do not depend on any analysis of +-// the dependencies themselves. This allows the reference information for +-// a package to be cached independent of all others. +-// +-// Specifically, if a referring identifier I appears in the declaration, we +-// record an edge from D to each object possibly referenced by I. We search for +-// references within type syntax, but do not actually type-check, so we can't +-// reliably determine whether an expression is a type or a term, or whether a +-// function is a builtin or generic. For example, the type of x in var x = +-// p.F(W) only depends on W if p.F is a builtin or generic function, which we +-// cannot know without type-checking package p. So we may over-approximate in +-// this way. +-// +-// - If I is declared in the current package, record a reference to its +-// declaration. +-// - Otherwise, if there are any dot imports in the current +-// file and I is exported, record a (possibly dangling) edge to +-// the corresponding declaration in each dot-imported package. +-// +-// If a dotted identifier q.I appears in the declaration, we +-// perform a similar operation: +-// +-// - If q is declared in the current package, we record a reference to that +-// object. It may be a var or const that has a field or method I. +-// - Otherwise, if q is a valid import name based on imports in the current file +-// and the provided metadata for dependency package names, record a +-// reference to the object I in that package. +-// - Additionally, handle the case where Q is exported, and Q.I may refer to +-// a field or method in a dot-imported package. +-// +-// That is essentially the entire algorithm, though there is some subtlety to +-// visiting the set of identifiers or dotted identifiers that may affect the +-// declaration type. See the visitDeclOrSpec function for the details of this +-// analysis. Notably, we also skip identifiers that refer to type parameters in +-// generic declarations. +-// +-// # Graph optimizations +-// +-// The references extracted from the syntax are used to construct +-// edges between nodes representing declarations. Edges are of two +-// kinds: internal references, from one package-level declaration to +-// another; and external references, from a symbol in this package to +-// a symbol imported from a direct dependency. +-// +-// Once the symbol reference graph is constructed, we find its +-// strongly connected components (SCCs) using Tarjan's algorithm. +-// As we coalesce the nodes of each SCC we compute the union of +-// external references reached by each package-level declaration. +-// The final result is the mapping from each exported package-level +-// declaration to the set of external (imported) declarations that it +-// reaches. +-// +-// Because it is common for many package members to have the same +-// reachability, the result takes the form of a set of equivalence +-// classes, each mapping a set of package-level declarations to a set +-// of external symbols. We use a hash table to canonicalize sets so that +-// repeated occurrences of the same set (which are common) are only +-// represented once in memory or in the file system. +-// For example, all declarations that ultimately reference only +-// {fmt.Println,strings.Join} would be classed as equivalent. +-// +-// This approach was inspired by the Hash-Value Numbering (HVN) +-// optimization described by Hardekopf and Lin. See +-// golang.org/x/tools/go/pointer/hvn.go for an implementation. (Like +-// pointer analysis, this problem is fundamentally one of graph +-// reachability.) The HVN algorithm takes the compression a step +-// further by preserving the topology of the SCC DAG, in which edges +-// represent "is a superset of" constraints. Redundant edges that +-// don't increase the solution can be deleted. We could apply the same +-// technique here to further reduce the worst-case size of the result, +-// but the current implementation seems adequate. +-// +-// # API +-// +-// The main entry point for this analysis is the [Encode] function, +-// which implements the analysis described above for one package, and +-// encodes the result as a binary message. +-// +-// The [Decode] function decodes the message into a usable form: a set +-// of equivalence classes. The decoder uses a shared [PackageIndex] to +-// enable more compact representations of sets of packages +-// ([PackageSet]) during the global reacahability computation. +-// +-// The [BuildPackageGraph] constructor implements a whole-graph analysis similar +-// to that which will be implemented by gopls, but for various reasons the +-// logic for this analysis will eventually live in the +-// [golang.org/x/tools/gopls/internal/cache] package. Nevertheless, +-// BuildPackageGraph and its test serve to verify the syntactic analysis, and +-// may serve as a proving ground for new optimizations of the whole-graph analysis. +-// +-// # Export data is insufficient +-// +-// At first it may seem that the simplest way to implement this analysis would +-// be to consider the types.Packages of the dependencies of P, for example +-// during export. After all, it makes sense that the type checked packages +-// themselves could describe their dependencies. However, this does not work as +-// type information does not describe certain syntactic relationships. +-// +-// For example, the following scenarios cause type information to miss +-// syntactic relationships: +-// +-// Named type forwarding: +-// +-// package a; type A b.B +-// package b; type B int +-// +-// Aliases: +-// +-// package a; func A(f b.B) +-// package b; type B = func() +-// +-// Initializers: +-// +-// package a; var A = b.B() +-// package b; func B() string { return "hi" } +-// +-// Use of the unsafe package: +-// +-// package a; type A [unsafe.Sizeof(B{})]int +-// package b; type B struct { f1, f2, f3 int } +-// +-// In all of these examples, types do not contain information about the edge +-// between the a.A and b.B declarations. +-package typerefs +diff -urN a/gopls/internal/cache/typerefs/packageset.go b/gopls/internal/cache/typerefs/packageset.go +--- a/gopls/internal/cache/typerefs/packageset.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/typerefs/packageset.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,142 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package typerefs +- +-import ( +- "fmt" +- "math/bits" +- "strings" +- "sync" +- +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/util/moremaps" +-) +- +-// PackageIndex stores common data to enable efficient representation of +-// references and package sets. +-type PackageIndex struct { +- // For now, PackageIndex just indexes package ids, to save space and allow for +- // faster unions via sparse int vectors. +- mu sync.Mutex +- ids []metadata.PackageID +- m map[metadata.PackageID]IndexID +-} +- +-// NewPackageIndex creates a new PackageIndex instance for use in building +-// reference and package sets. +-func NewPackageIndex() *PackageIndex { +- return &PackageIndex{ +- m: make(map[metadata.PackageID]IndexID), +- } +-} +- +-// IndexID returns the packageIdx referencing id, creating one if id is not yet +-// tracked by the receiver. +-func (index *PackageIndex) IndexID(id metadata.PackageID) IndexID { +- index.mu.Lock() +- defer index.mu.Unlock() +- if i, ok := index.m[id]; ok { +- return i +- } +- i := IndexID(len(index.ids)) +- index.m[id] = i +- index.ids = append(index.ids, id) +- return i +-} +- +-// PackageID returns the PackageID for idx. +-// +-// idx must have been created by this PackageIndex instance. +-func (index *PackageIndex) PackageID(idx IndexID) metadata.PackageID { +- index.mu.Lock() +- defer index.mu.Unlock() +- return index.ids[idx] +-} +- +-// A PackageSet is a set of metadata.PackageIDs, optimized for inuse memory +-// footprint and efficient union operations. +-type PackageSet struct { +- // PackageSet is a sparse int vector of package indexes from parent. +- parent *PackageIndex +- sparse map[int]blockType // high bits in key, set of low bits in value +-} +- +-type blockType = uint // type of each sparse vector element +-const blockSize = bits.UintSize +- +-// NewSet creates a new PackageSet bound to this PackageIndex instance. +-// +-// PackageSets may only be combined with other PackageSets from the same +-// instance. +-func (index *PackageIndex) NewSet() *PackageSet { +- return &PackageSet{ +- parent: index, +- sparse: make(map[int]blockType), +- } +-} +- +-// DeclaringPackage returns the ID of the symbol's declaring package. +-// The package index must be the one used during decoding. +-func (index *PackageIndex) DeclaringPackage(sym Symbol) metadata.PackageID { +- return index.PackageID(sym.Package) +-} +- +-// Add records a new element in the package set, for the provided package ID. +-func (s *PackageSet) AddPackage(id metadata.PackageID) { +- s.Add(s.parent.IndexID(id)) +-} +- +-// Add records a new element in the package set. +-// It is the caller's responsibility to ensure that idx was created with the +-// same PackageIndex as the PackageSet. +-func (s *PackageSet) Add(idx IndexID) { +- i := int(idx) +- s.sparse[i/blockSize] |= 1 << (i % blockSize) +-} +- +-// Union records all elements from other into the receiver, mutating the +-// receiver set but not the argument set. The receiver must not be nil, but the +-// argument set may be nil. +-// +-// Precondition: both package sets were created with the same PackageIndex. +-func (s *PackageSet) Union(other *PackageSet) { +- if other == nil { +- return // e.g. unsafe +- } +- if other.parent != s.parent { +- panic("other set is from a different PackageIndex instance") +- } +- for k, v := range other.sparse { +- if v0 := s.sparse[k]; v0 != v { +- s.sparse[k] = v0 | v +- } +- } +-} +- +-// Contains reports whether id is contained in the receiver set. +-func (s *PackageSet) Contains(id metadata.PackageID) bool { +- i := int(s.parent.IndexID(id)) +- return s.sparse[i/blockSize]&(1<<(i%blockSize)) != 0 +-} +- +-// Elems calls f for each element of the set in ascending order. +-func (s *PackageSet) Elems(f func(IndexID)) { +- for i, v := range moremaps.Sorted(s.sparse) { +- for b := range blockSize { +- if (v & (1 << b)) != 0 { +- f(IndexID(i*blockSize + b)) +- } +- } +- } +-} +- +-// String returns a human-readable representation of the set: {A, B, ...}. +-func (s *PackageSet) String() string { +- var ids []string +- s.Elems(func(id IndexID) { +- ids = append(ids, string(s.parent.PackageID(id))) +- }) +- return fmt.Sprintf("{%s}", strings.Join(ids, ", ")) +-} +diff -urN a/gopls/internal/cache/typerefs/pkggraph_test.go b/gopls/internal/cache/typerefs/pkggraph_test.go +--- a/gopls/internal/cache/typerefs/pkggraph_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/typerefs/pkggraph_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,243 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package typerefs_test +- +-// This file is logically part of the test in pkgrefs_test.go: that +-// file defines the test assertion logic; this file provides a +-// reference implementation of a client of the typerefs package. +- +-import ( +- "bytes" +- "context" +- "fmt" +- "os" +- "runtime" +- "sync" +- +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/cache/typerefs" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-const ( +- // trace enables additional trace output to stdout, for debugging. +- // +- // Warning: produces a lot of output! Best to run with small package queries. +- trace = false +-) +- +-// A Package holds reference information for a single package. +-type Package struct { +- // metapkg holds metapkg about this package and its dependencies. +- metapkg *metadata.Package +- +- // transitiveRefs records, for each exported declaration in the package, the +- // transitive set of packages within the containing graph that are +- // transitively reachable through references, starting with the given decl. +- transitiveRefs map[string]*typerefs.PackageSet +- +- // ReachesByDeps records the set of packages in the containing graph whose +- // syntax may affect the current package's types. See the package +- // documentation for more details of what this means. +- ReachesByDeps *typerefs.PackageSet +-} +- +-// A PackageGraph represents a fully analyzed graph of packages and their +-// dependencies. +-type PackageGraph struct { +- pkgIndex *typerefs.PackageIndex +- meta metadata.Source +- parse func(context.Context, protocol.DocumentURI) (*parsego.File, error) +- +- mu sync.Mutex +- packages map[metadata.PackageID]*futurePackage +-} +- +-// BuildPackageGraph analyzes the package graph for the requested ids, whose +-// metadata is described by meta. +-// +-// The provided parse function is used to parse the CompiledGoFiles of each package. +-// +-// The resulting PackageGraph is fully evaluated, and may be investigated using +-// the Package method. +-// +-// See the package documentation for more information on the package reference +-// algorithm. +-func BuildPackageGraph(ctx context.Context, meta metadata.Source, ids []metadata.PackageID, parse func(context.Context, protocol.DocumentURI) (*parsego.File, error)) (*PackageGraph, error) { +- g := &PackageGraph{ +- pkgIndex: typerefs.NewPackageIndex(), +- meta: meta, +- parse: parse, +- packages: make(map[metadata.PackageID]*futurePackage), +- } +- metadata.SortPostOrder(meta, ids) +- +- workers := runtime.GOMAXPROCS(0) +- if trace { +- workers = 1 +- } +- +- var eg errgroup.Group +- eg.SetLimit(workers) +- for _, id := range ids { +- eg.Go(func() error { +- _, err := g.Package(ctx, id) +- return err +- }) +- } +- return g, eg.Wait() +-} +- +-// futurePackage is a future result of analyzing a package, for use from Package only. +-type futurePackage struct { +- done chan struct{} +- pkg *Package +- err error +-} +- +-// Package gets the result of analyzing references for a single package. +-func (g *PackageGraph) Package(ctx context.Context, id metadata.PackageID) (*Package, error) { +- g.mu.Lock() +- fut, ok := g.packages[id] +- if ok { +- g.mu.Unlock() +- select { +- case <-fut.done: +- case <-ctx.Done(): +- return nil, ctx.Err() +- } +- } else { +- fut = &futurePackage{done: make(chan struct{})} +- g.packages[id] = fut +- g.mu.Unlock() +- fut.pkg, fut.err = g.buildPackage(ctx, id) +- close(fut.done) +- } +- return fut.pkg, fut.err +-} +- +-// buildPackage parses a package and extracts its reference graph. It should +-// only be called from Package. +-func (g *PackageGraph) buildPackage(ctx context.Context, id metadata.PackageID) (*Package, error) { +- p := &Package{ +- metapkg: g.meta.Metadata(id), +- transitiveRefs: make(map[string]*typerefs.PackageSet), +- } +- var files []*parsego.File +- for _, filename := range p.metapkg.CompiledGoFiles { +- f, err := g.parse(ctx, filename) +- if err != nil { +- return nil, err +- } +- files = append(files, f) +- } +- imports := make(map[metadata.ImportPath]*metadata.Package) +- for impPath, depID := range p.metapkg.DepsByImpPath { +- if depID != "" { +- imports[impPath] = g.meta.Metadata(depID) +- } +- } +- +- // Compute the symbol-level dependencies through this package. +- data := typerefs.Encode(files, imports) +- +- // data can be persisted in a filecache, keyed +- // by hash(id, CompiledGoFiles, imports). +- +- // This point separates the local preprocessing +- // -- of a single package (above) from the global -- +- // transitive reachability query (below). +- +- // classes records syntactic edges between declarations in this +- // package and declarations in this package or another +- // package. See the package documentation for a detailed +- // description of what these edges do (and do not) represent. +- classes := typerefs.Decode(g.pkgIndex, data) +- +- // Debug +- if trace && len(classes) > 0 { +- var buf bytes.Buffer +- fmt.Fprintf(&buf, "%s\n", id) +- for _, class := range classes { +- for i, name := range class.Decls { +- if i == 0 { +- fmt.Fprintf(&buf, "\t") +- } +- fmt.Fprintf(&buf, " .%s", name) +- } +- // Group symbols by package. +- var prevID PackageID +- for _, sym := range class.Refs { +- id := g.pkgIndex.DeclaringPackage(sym) +- if id != prevID { +- prevID = id +- fmt.Fprintf(&buf, "\n\t\t-> %s:", id) +- } +- fmt.Fprintf(&buf, " .%s", sym.Name) +- } +- fmt.Fprintln(&buf) +- } +- os.Stderr.Write(buf.Bytes()) +- } +- +- // Now compute the transitive closure of packages reachable +- // from any exported symbol of this package. +- for _, class := range classes { +- set := g.pkgIndex.NewSet() +- +- // The Refs slice is sorted by (PackageID, name), +- // so we can economize by calling g.Package only +- // when the package id changes. +- depP := p +- for _, sym := range class.Refs { +- symPkgID := g.pkgIndex.DeclaringPackage(sym) +- if symPkgID == id { +- panic("intra-package edge") +- } +- if depP.metapkg.ID != symPkgID { +- // package changed +- var err error +- depP, err = g.Package(ctx, symPkgID) +- if err != nil { +- return nil, err +- } +- } +- set.Add(sym.Package) +- set.Union(depP.transitiveRefs[sym.Name]) +- } +- for _, name := range class.Decls { +- p.transitiveRefs[name] = set +- } +- } +- +- // Finally compute the union of transitiveRefs +- // across the direct deps of this package. +- byDeps, err := g.reachesByDeps(ctx, p.metapkg) +- if err != nil { +- return nil, err +- } +- p.ReachesByDeps = byDeps +- +- return p, nil +-} +- +-// reachesByDeps computes the set of packages that are reachable through +-// dependencies of the package m. +-func (g *PackageGraph) reachesByDeps(ctx context.Context, mp *metadata.Package) (*typerefs.PackageSet, error) { +- transitive := g.pkgIndex.NewSet() +- for _, depID := range mp.DepsByPkgPath { +- dep, err := g.Package(ctx, depID) +- if err != nil { +- return nil, err +- } +- transitive.AddPackage(dep.metapkg.ID) +- for _, set := range dep.transitiveRefs { +- transitive.Union(set) +- } +- } +- return transitive, nil +-} +diff -urN a/gopls/internal/cache/typerefs/pkgrefs_test.go b/gopls/internal/cache/typerefs/pkgrefs_test.go +--- a/gopls/internal/cache/typerefs/pkgrefs_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/typerefs/pkgrefs_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,403 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package typerefs_test +- +-import ( +- "bytes" +- "context" +- "flag" +- "fmt" +- "go/token" +- "go/types" +- "os" +- "slices" +- "strings" +- "sync" +- "testing" +- "time" +- +- "golang.org/x/tools/go/gcexportdata" +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/cache/typerefs" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/packagesinternal" +- "golang.org/x/tools/internal/testenv" +-) +- +-var ( +- dir = flag.String("dir", "", "dir to run go/packages from") +- query = flag.String("query", "std", "go/packages load query to use for walkdecl tests") +- verify = flag.Bool("verify", true, "whether to verify reachable packages using export data (may be slow on large graphs)") +-) +- +-type ( +- packageName = metadata.PackageName +- PackageID = metadata.PackageID +- ImportPath = metadata.ImportPath +- PackagePath = metadata.PackagePath +- Metadata = metadata.Package +- MetadataSource = metadata.Source +-) +- +-// TestBuildPackageGraph tests the BuildPackageGraph constructor, which uses +-// the reference analysis of the Refs function to build a graph of +-// relationships between packages. +-// +-// It simulates the operation of gopls at startup: packages are loaded via +-// go/packages, and their syntax+metadata analyzed to determine which packages +-// are reachable from others. +-// +-// The test then verifies that the 'load' graph (the graph of relationships in +-// export data) is a subgraph of the 'reach' graph constructed by +-// BuildPackageGraph. While doing so, it constructs some statistics about the +-// relative sizes of these graphs, along with the 'transitive imports' graph, +-// to report the effectiveness of the reachability analysis. +-// +-// The following flags affect this test: +-// - dir sets the dir from which to run go/packages +-// - query sets the go/packages query to load +-// - verify toggles the verification w.r.t. the load graph (which may be +-// prohibitively expensive with large queries). +-func TestBuildPackageGraph(t *testing.T) { +- if testing.Short() { +- t.Skip("skipping with -short: loading the packages can take a long time with a cold cache") +- } +- testenv.NeedsGoBuild(t) // for go/packages +- +- t0 := time.Now() +- exports, meta, err := loadPackages(*query, *verify) +- if err != nil { +- t.Fatalf("loading failed: %v", err) +- } +- t.Logf("loaded %d packages in %v", len(exports), time.Since(t0)) +- +- ctx := context.Background() +- var ids []PackageID +- for id := range exports { +- ids = append(ids, id) +- } +- slices.Sort(ids) +- +- t0 = time.Now() +- g, err := BuildPackageGraph(ctx, meta, ids, newParser().parse) +- if err != nil { +- t.Fatal(err) +- } +- t.Logf("building package graph took %v", time.Since(t0)) +- +- // Collect information about the edges between packages for later analysis. +- // +- // We compare the following package graphs: +- // - the imports graph: edges are transitive imports +- // - the reaches graph: edges are reachability relationships through syntax +- // of imports (as defined in the package doc) +- // - the loads graph: edges are packages loaded through the export data of +- // imports +- // +- // By definition, loads < reaches < imports. +- type edgeSet map[PackageID]map[PackageID]bool +- var ( +- imports = make(edgeSet) // A imports B transitively +- importedBy = make(edgeSet) // A is imported by B transitively +- reaches = make(edgeSet) // A reaches B through top-level declaration syntax +- reachedBy = make(edgeSet) // A is reached by B through top-level declaration syntax +- loads = make(edgeSet) // A loads B through export data of its direct dependencies +- loadedBy = make(edgeSet) // A is loaded by B through export data of B's direct dependencies +- ) +- recordEdge := func(from, to PackageID, fwd, rev edgeSet) { +- if fwd[from] == nil { +- fwd[from] = make(map[PackageID]bool) +- } +- fwd[from][to] = true +- if rev[to] == nil { +- rev[to] = make(map[PackageID]bool) +- } +- rev[to][from] = true +- } +- +- exportedPackages := make(map[PackageID]*types.Package) +- importPackage := func(id PackageID) *types.Package { +- exportFile := exports[id] +- if exportFile == "" { +- return nil // no exported symbols +- } +- mp := meta.Metadata(id) +- tpkg, ok := exportedPackages[id] +- if !ok { +- pkgPath := string(mp.PkgPath) +- tpkg, err = importFromExportData(pkgPath, exportFile) +- if err != nil { +- t.Fatalf("importFromExportData(%s, %s) failed: %v", pkgPath, exportFile, err) +- } +- exportedPackages[id] = tpkg +- } +- return tpkg +- } +- +- for _, id := range ids { +- pkg, err := g.Package(ctx, id) +- if err != nil { +- t.Fatal(err) +- } +- pkg.ReachesByDeps.Elems(func(id2 typerefs.IndexID) { +- recordEdge(id, g.pkgIndex.PackageID(id2), reaches, reachedBy) +- }) +- +- importMap := importMap(id, meta) +- for _, id2 := range importMap { +- recordEdge(id, id2, imports, importedBy) +- } +- +- if *verify { +- for _, depID := range meta.Metadata(id).DepsByPkgPath { +- tpkg := importPackage(depID) +- if tpkg == nil { +- continue +- } +- for _, imp := range tpkg.Imports() { +- depID, ok := importMap[PackagePath(imp.Path())] +- if !ok { +- t.Errorf("import map (len: %d) for %s missing imported types.Package %s", len(importMap), id, imp.Path()) +- continue +- } +- recordEdge(id, depID, loads, loadedBy) +- } +- } +- +- for depID := range loads[id] { +- if !pkg.ReachesByDeps.Contains(depID) { +- t.Errorf("package %s was imported by %s, but not detected as reachable", depID, id) +- } +- } +- } +- } +- +- if testing.Verbose() { +- fmt.Printf("%-52s%8s%8s%8s%8s%8s%8s\n", "package ID", "imp", "impBy", "reach", "reachBy", "load", "loadBy") +- for _, id := range ids { +- fmt.Printf("%-52s%8d%8d%8d%8d%8d%8d\n", id, len(imports[id]), len(importedBy[id]), len(reaches[id]), len(reachedBy[id]), len(loads[id]), len(loadedBy[id])) +- } +- fmt.Println(strings.Repeat("-", 100)) +- fmt.Printf("%-52s%8s%8s%8s%8s%8s%8s\n", "package ID", "imp", "impBy", "reach", "reachBy", "load", "loadBy") +- +- avg := func(m edgeSet) float64 { +- var avg float64 +- for _, id := range ids { +- s := m[id] +- avg += float64(len(s)) / float64(len(ids)) +- } +- return avg +- } +- fmt.Printf("%52s%8.1f%8.1f%8.1f%8.1f%8.1f%8.1f\n", "averages:", avg(imports), avg(importedBy), avg(reaches), avg(reachedBy), avg(loads), avg(loadedBy)) +- } +-} +- +-func importMap(id PackageID, meta MetadataSource) map[PackagePath]PackageID { +- imports := make(map[PackagePath]PackageID) +- var recordIDs func(PackageID) +- recordIDs = func(id PackageID) { +- mp := meta.Metadata(id) +- if _, ok := imports[mp.PkgPath]; ok { +- return +- } +- imports[mp.PkgPath] = id +- for _, id := range mp.DepsByPkgPath { +- recordIDs(id) +- } +- } +- for _, id := range meta.Metadata(id).DepsByPkgPath { +- recordIDs(id) +- } +- return imports +-} +- +-func importFromExportData(pkgPath, exportFile string) (*types.Package, error) { +- file, err := os.Open(exportFile) +- if err != nil { +- return nil, err +- } +- r, err := gcexportdata.NewReader(file) +- if err != nil { +- file.Close() // ignore error +- return nil, err +- } +- fset := token.NewFileSet() +- tpkg, err := gcexportdata.Read(r, fset, make(map[string]*types.Package), pkgPath) +- file.Close() // ignore error +- if err != nil { +- return nil, err +- } +- // The export file reported by go/packages is produced by the compiler, which +- // has additional package dependencies due to inlining. +- // +- // Export and re-import so that we only observe dependencies from the +- // exported API. +- var out bytes.Buffer +- err = gcexportdata.Write(&out, fset, tpkg) +- if err != nil { +- return nil, err +- } +- return gcexportdata.Read(&out, token.NewFileSet(), make(map[string]*types.Package), pkgPath) +-} +- +-func BenchmarkBuildPackageGraph(b *testing.B) { +- t0 := time.Now() +- exports, meta, err := loadPackages(*query, *verify) +- if err != nil { +- b.Fatalf("loading failed: %v", err) +- } +- b.Logf("loaded %d packages in %v", len(exports), time.Since(t0)) +- ctx := context.Background() +- var ids []PackageID +- for id := range exports { +- ids = append(ids, id) +- } +- +- for b.Loop() { +- _, err := BuildPackageGraph(ctx, meta, ids, newParser().parse) +- if err != nil { +- b.Fatal(err) +- } +- } +-} +- +-type memoizedParser struct { +- mu sync.Mutex +- files map[protocol.DocumentURI]*futureParse +-} +- +-type futureParse struct { +- done chan struct{} +- pgf *parsego.File +- err error +-} +- +-func newParser() *memoizedParser { +- return &memoizedParser{ +- files: make(map[protocol.DocumentURI]*futureParse), +- } +-} +- +-func (p *memoizedParser) parse(ctx context.Context, uri protocol.DocumentURI) (*parsego.File, error) { +- doParse := func(ctx context.Context, uri protocol.DocumentURI) (*parsego.File, error) { +- // TODO(adonovan): hoist this operation outside the benchmark critsec. +- content, err := os.ReadFile(uri.Path()) +- if err != nil { +- return nil, err +- } +- content = astutil.PurgeFuncBodies(content) +- pgf, _ := parsego.Parse(ctx, token.NewFileSet(), uri, content, parsego.Full, false) +- return pgf, nil +- } +- +- p.mu.Lock() +- fut, ok := p.files[uri] +- if ok { +- p.mu.Unlock() +- select { +- case <-fut.done: +- case <-ctx.Done(): +- return nil, ctx.Err() +- } +- } else { +- fut = &futureParse{done: make(chan struct{})} +- p.files[uri] = fut +- p.mu.Unlock() +- fut.pgf, fut.err = doParse(ctx, uri) +- close(fut.done) +- } +- return fut.pgf, fut.err +-} +- +-type mapMetadataSource struct { +- m map[PackageID]*Metadata +-} +- +-func (s mapMetadataSource) Metadata(id PackageID) *Metadata { +- return s.m[id] +-} +- +-// This function is a compressed version of snapshot.load from the +-// internal/cache package, for use in testing. +-// +-// TODO(rfindley): it may be valuable to extract this logic from the snapshot, +-// since it is otherwise standalone. +-func loadPackages(query string, needExport bool) (map[PackageID]string, MetadataSource, error) { +- cfg := &packages.Config{ +- Dir: *dir, +- Mode: packages.NeedName | +- packages.NeedFiles | +- packages.NeedCompiledGoFiles | +- packages.NeedImports | +- packages.NeedDeps | +- packages.NeedTypesSizes | +- packages.NeedModule | +- packages.NeedEmbedFiles | +- packages.LoadMode(packagesinternal.DepsErrors) | +- packages.NeedForTest, +- Tests: true, +- } +- if needExport { +- cfg.Mode |= packages.NeedExportFile // ExportFile is not requested by gopls: this is used to verify reachability +- } +- pkgs, err := packages.Load(cfg, query) +- if err != nil { +- return nil, nil, err +- } +- +- meta := make(map[PackageID]*Metadata) +- var buildMetadata func(pkg *packages.Package) +- buildMetadata = func(pkg *packages.Package) { +- id := PackageID(pkg.ID) +- if meta[id] != nil { +- return +- } +- mp := &Metadata{ +- ID: id, +- PkgPath: PackagePath(pkg.PkgPath), +- Name: packageName(pkg.Name), +- ForTest: PackagePath(pkg.ForTest), +- TypesSizes: pkg.TypesSizes, +- LoadDir: cfg.Dir, +- Module: pkg.Module, +- Errors: pkg.Errors, +- DepsErrors: packagesinternal.GetDepsErrors(pkg), +- } +- meta[id] = mp +- +- for _, filename := range pkg.CompiledGoFiles { +- mp.CompiledGoFiles = append(mp.CompiledGoFiles, protocol.URIFromPath(filename)) +- } +- for _, filename := range pkg.GoFiles { +- mp.GoFiles = append(mp.GoFiles, protocol.URIFromPath(filename)) +- } +- +- mp.DepsByImpPath = make(map[ImportPath]PackageID) +- mp.DepsByPkgPath = make(map[PackagePath]PackageID) +- for importPath, imported := range pkg.Imports { +- importPath := ImportPath(importPath) +- +- // see note in gopls/internal/cache/load.go for an explanation of this check. +- if importPath != "unsafe" && len(imported.CompiledGoFiles) == 0 { +- mp.DepsByImpPath[importPath] = "" // missing +- continue +- } +- +- mp.DepsByImpPath[importPath] = PackageID(imported.ID) +- mp.DepsByPkgPath[PackagePath(imported.PkgPath)] = PackageID(imported.ID) +- buildMetadata(imported) +- } +- } +- +- exportFiles := make(map[PackageID]string) +- for _, pkg := range pkgs { +- exportFiles[PackageID(pkg.ID)] = pkg.ExportFile +- buildMetadata(pkg) +- } +- return exportFiles, &mapMetadataSource{meta}, nil +-} +diff -urN a/gopls/internal/cache/typerefs/refs.go b/gopls/internal/cache/typerefs/refs.go +--- a/gopls/internal/cache/typerefs/refs.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/typerefs/refs.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,832 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package typerefs +- +-import ( +- "fmt" +- "go/ast" +- "go/token" +- "sort" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/util/frob" +- "golang.org/x/tools/internal/astutil" +-) +- +-// Encode analyzes the Go syntax trees of a package, constructs a +-// reference graph, and uses it to compute, for each exported +-// declaration, the set of exported symbols of directly imported +-// packages that it references, perhaps indirectly. +-// +-// It returns a serializable index of this information. +-// Use Decode to expand the result. +-func Encode(files []*parsego.File, imports map[metadata.ImportPath]*metadata.Package) []byte { +- return index(files, imports) +-} +- +-// Decode decodes a serializable index of symbol +-// reachability produced by Encode. +-// +-// Because many declarations reference the exact same set of symbols, +-// the results are grouped into equivalence classes. +-// Classes are sorted by Decls[0], ascending. +-// The class with empty reachability is omitted. +-// +-// See the package documentation for more details as to what a +-// reference does (and does not) represent. +-func Decode(pkgIndex *PackageIndex, data []byte) []Class { +- return decode(pkgIndex, data) +-} +- +-// A Class is a reachability equivalence class. +-// +-// It attests that each exported package-level declaration in Decls +-// references (perhaps indirectly) one of the external (imported) +-// symbols in Refs. +-// +-// Because many Decls reach the same Refs, +-// it is more efficient to group them into classes. +-type Class struct { +- Decls []string // sorted set of names of exported decls with same reachability +- Refs []Symbol // set of external symbols, in ascending (PackageID, Name) order +-} +- +-// A Symbol represents an external (imported) symbol +-// referenced by the analyzed package. +-type Symbol struct { +- Package IndexID // w.r.t. PackageIndex passed to decoder +- Name string +-} +- +-// An IndexID is a small integer that uniquely identifies a package within a +-// given PackageIndex. +-type IndexID int +- +-// -- internals -- +- +-// A symbolSet is a set of symbols used internally during index construction. +-// +-// TODO(adonovan): opt: evaluate unifying Symbol and symbol. +-// (Encode would have to create a private PackageIndex.) +-type symbolSet map[symbol]bool +- +-// A symbol is the internal representation of an external +-// (imported) symbol referenced by the analyzed package. +-type symbol struct { +- pkg metadata.PackageID +- name string +-} +- +-// declNode holds information about a package-level declaration +-// (or more than one with the same name, in ill-typed code). +-// +-// It is a node in the symbol reference graph, whose outgoing edges +-// are of two kinds: intRefs and extRefs. +-type declNode struct { +- name string +- rep *declNode // canonical representative of this SCC (initially self) +- +- // outgoing graph edges +- intRefs map[*declNode]bool // to symbols in this package +- extRefs symbolSet // to imported symbols +- extRefsClass int // extRefs equivalence class number (-1 until set at end) +- +- // Tarjan's SCC algorithm +- index, lowlink int32 // Tarjan numbering +- scc int32 // -ve => on stack; 0 => unvisited; +ve => node is root of a found SCC +-} +- +-// state holds the working state of the Refs algorithm for a single package. +-// +-// The number of distinct symbols referenced by a single package +-// (measured across all of kubernetes), was found to be: +-// - max = 1750. +-// - Several packages reference > 100 symbols. +-// - p95 = 32, p90 = 22, p50 = 8. +-type state struct { +- // numbering of unique symbol sets +- class []symbolSet // unique symbol sets +- classIndex map[string]int // index of above (using SymbolSet.hash as key) +- +- // Tarjan's SCC algorithm +- index int32 +- stack []*declNode +-} +- +-// getClassIndex returns the small integer (an index into +-// state.class) that identifies the given set. +-func (st *state) getClassIndex(set symbolSet) int { +- key := classKey(set) +- i, ok := st.classIndex[key] +- if !ok { +- i = len(st.class) +- st.classIndex[key] = i +- st.class = append(st.class, set) +- } +- return i +-} +- +-// appendSorted appends the symbols to syms, sorts by ascending +-// (PackageID, name), and returns the result. +-// The argument must be an empty slice, ideally with capacity len(set). +-func (set symbolSet) appendSorted(syms []symbol) []symbol { +- for sym := range set { +- syms = append(syms, sym) +- } +- sort.Slice(syms, func(i, j int) bool { +- x, y := syms[i], syms[j] +- if x.pkg != y.pkg { +- return x.pkg < y.pkg +- } +- return x.name < y.name +- }) +- return syms +-} +- +-// classKey returns a key such that equal keys imply equal sets. +-// (e.g. a sorted string representation, or a cryptographic hash of same). +-func classKey(set symbolSet) string { +- // Sort symbols into a stable order. +- // TODO(adonovan): opt: a cheap crypto hash (e.g. BLAKE2b) might +- // make a cheaper map key than a large string. +- // Try using a hasher instead of a builder. +- var s strings.Builder +- for _, sym := range set.appendSorted(make([]symbol, 0, len(set))) { +- fmt.Fprintf(&s, "%s:%s;", sym.pkg, sym.name) +- } +- return s.String() +-} +- +-// index builds the reference graph and encodes the index. +-func index(pgfs []*parsego.File, imports map[metadata.ImportPath]*metadata.Package) []byte { +- // First pass: gather package-level names and create a declNode for each. +- // +- // In ill-typed code, there may be multiple declarations of the +- // same name; a single declInfo node will represent them all. +- decls := make(map[string]*declNode) +- addDecl := func(id *ast.Ident) { +- if name := id.Name; name != "_" && decls[name] == nil { +- node := &declNode{name: name, extRefsClass: -1} +- node.rep = node +- decls[name] = node +- } +- } +- for _, pgf := range pgfs { +- for _, d := range pgf.File.Decls { +- switch d := d.(type) { +- case *ast.GenDecl: +- switch d.Tok { +- case token.TYPE: +- for _, spec := range d.Specs { +- addDecl(spec.(*ast.TypeSpec).Name) +- } +- +- case token.VAR, token.CONST: +- for _, spec := range d.Specs { +- for _, ident := range spec.(*ast.ValueSpec).Names { +- addDecl(ident) +- } +- } +- } +- +- case *ast.FuncDecl: +- // non-method functions +- if d.Recv.NumFields() == 0 { +- addDecl(d.Name) +- } +- } +- } +- } +- +- // Second pass: process files to collect referring identifiers. +- st := &state{classIndex: make(map[string]int)} +- for _, pgf := range pgfs { +- visitFile(pgf.File, imports, decls) +- } +- +- // Find the strong components of the declNode graph +- // using Tarjan's algorithm, and coalesce each component. +- st.index = 1 +- for _, decl := range decls { +- if decl.index == 0 { // unvisited +- st.visit(decl) +- } +- } +- +- // TODO(adonovan): opt: consider compressing the serialized +- // representation by recording not the classes but the DAG of +- // non-trivial union operations (the "pointer equivalence" +- // optimization of Hardekopf & Lin). Unlike that algorithm, +- // which piggybacks on SCC coalescing, in our case it would +- // be better to make a forward traversal from the exported +- // decls, since it avoids visiting unreachable nodes, and +- // results in a dense (not sparse) numbering of the sets. +- +- // Tabulate the unique reachability sets of +- // each exported package member. +- classNames := make(map[int][]string) // set of decls (names) for a given reachability set +- for name, decl := range decls { +- if !ast.IsExported(name) { +- continue +- } +- +- decl = decl.find() +- +- // Skip decls with empty reachability. +- if len(decl.extRefs) == 0 { +- continue +- } +- +- // Canonicalize the set (and memoize). +- class := decl.extRefsClass +- if class < 0 { +- class = st.getClassIndex(decl.extRefs) +- decl.extRefsClass = class +- } +- classNames[class] = append(classNames[class], name) +- } +- +- return encode(classNames, st.class) +-} +- +-// visitFile inspects the file syntax for referring identifiers, and +-// populates the internal and external references of decls. +-func visitFile(file *ast.File, imports map[metadata.ImportPath]*metadata.Package, decls map[string]*declNode) { +- // Import information for this file. Multiple packages +- // may be referenced by a given name in the presence +- // of type errors (or multiple dot imports, which are +- // keyed by "."). +- fileImports := make(map[string][]metadata.PackageID) +- +- // importEdge records a reference from decl to an imported symbol +- // (pkgname.name). The package name may be ".". +- importEdge := func(decl *declNode, pkgname, name string) { +- if token.IsExported(name) { +- for _, depID := range fileImports[pkgname] { +- if decl.extRefs == nil { +- decl.extRefs = make(symbolSet) +- } +- decl.extRefs[symbol{depID, name}] = true +- } +- } +- } +- +- // visit finds refs within node and builds edges from fromId's decl. +- // References to the type parameters are ignored. +- visit := func(fromId *ast.Ident, node ast.Node, tparams map[string]bool) { +- if fromId.Name == "_" { +- return +- } +- from := decls[fromId.Name] +- // When visiting a method, there may not be a valid type declaration for +- // the receiver. In this case there is no way to refer to the method, so +- // we need not record edges. +- if from == nil { +- return +- } +- +- // Visit each reference to name or name.sel. +- visitDeclOrSpec(node, func(name, sel string) { +- // Ignore references to type parameters. +- if tparams[name] { +- return +- } +- +- // If name is declared in the package scope, +- // record an edge whether or not sel is empty. +- // A field or method selector may affect the +- // type of the current decl via initializers: +- // +- // package p +- // var x = y.F +- // var y = struct{ F int }{} +- if to, ok := decls[name]; ok { +- if from.intRefs == nil { +- from.intRefs = make(map[*declNode]bool) +- } +- from.intRefs[to] = true +- +- } else { +- // Only record an edge to dot-imported packages +- // if there was no edge to a local name. +- // This assumes that there are no duplicate declarations. +- // We conservatively, assume that this name comes from +- // every dot-imported package. +- importEdge(from, ".", name) +- } +- +- // Record an edge to an import if it matches the name, even if that +- // name collides with a package level name. Unlike the case of dotted +- // imports, we know the package is invalid here, and choose to fail +- // conservatively. +- if sel != "" { +- importEdge(from, name, sel) +- } +- }) +- } +- +- // Visit the declarations and gather reference edges. +- // Import declarations appear before all others. +- for _, d := range file.Decls { +- switch d := d.(type) { +- case *ast.GenDecl: +- switch d.Tok { +- case token.IMPORT: +- // Record local import names for this file. +- for _, spec := range d.Specs { +- spec := spec.(*ast.ImportSpec) +- path := metadata.UnquoteImportPath(spec) +- if path == "" { +- continue +- } +- dep := imports[path] +- if dep == nil { +- // Note here that we don't try to "guess" +- // the name of an import based on e.g. +- // its importPath. Doing so would only +- // result in edges that don't go anywhere. +- continue +- } +- name := string(dep.Name) +- if spec.Name != nil { +- if spec.Name.Name == "_" { +- continue +- } +- name = spec.Name.Name // possibly "." +- } +- fileImports[name] = append(fileImports[name], dep.ID) +- } +- +- case token.TYPE: +- for _, spec := range d.Specs { +- spec := spec.(*ast.TypeSpec) +- tparams := tparamsMap(spec.TypeParams) +- visit(spec.Name, spec, tparams) +- } +- +- case token.VAR, token.CONST: +- for _, spec := range d.Specs { +- spec := spec.(*ast.ValueSpec) +- for _, name := range spec.Names { +- visit(name, spec, nil) +- } +- } +- } +- +- case *ast.FuncDecl: +- // This check for NumFields() > 0 is consistent with go/types, +- // which reports an error but treats the declaration like a +- // normal function when Recv is non-nil but empty +- // (as in func () f()). +- if d.Recv.NumFields() > 0 { +- // Method. Associate it with the receiver. +- _, id, typeParams := astutil.UnpackRecv(d.Recv.List[0].Type) +- if id != nil { +- var tparams map[string]bool +- if len(typeParams) > 0 { +- tparams = make(map[string]bool) +- for _, tparam := range typeParams { +- if tparam.Name != "_" { +- tparams[tparam.Name] = true +- } +- } +- } +- visit(id, d, tparams) +- } +- } else { +- // Non-method. +- tparams := tparamsMap(d.Type.TypeParams) +- visit(d.Name, d, tparams) +- } +- } +- } +-} +- +-// tparamsMap returns a set recording each name declared by the provided field +-// list. It so happens that we only care about names declared by type parameter +-// lists. +-func tparamsMap(tparams *ast.FieldList) map[string]bool { +- if tparams == nil || len(tparams.List) == 0 { +- return nil +- } +- m := make(map[string]bool) +- for _, f := range tparams.List { +- for _, name := range f.Names { +- if name.Name != "_" { +- m[name.Name] = true +- } +- } +- } +- return m +-} +- +-// A refVisitor visits referring identifiers and dotted identifiers. +-// +-// For a referring identifier I, name="I" and sel="". For a dotted identifier +-// q.I, name="q" and sel="I". +-type refVisitor = func(name, sel string) +- +-// visitDeclOrSpec visits referring idents or dotted idents that may affect +-// the type of the declaration at the given node, which must be an ast.Decl or +-// ast.Spec. +-func visitDeclOrSpec(node ast.Node, f refVisitor) { +- // Declarations +- switch n := node.(type) { +- // ImportSpecs should not appear here, and will panic in the default case. +- +- case *ast.ValueSpec: +- // Skip Doc, Names, Comments, which do not affect the decl type. +- // Initializers only affect the type of a value spec if the type is unset. +- if n.Type != nil { +- visitExpr(n.Type, f) +- } else { // only need to walk expr list if type is nil +- visitExprList(n.Values, f) +- } +- +- case *ast.TypeSpec: +- // Skip Doc, Name, and Comment, which do not affect the decl type. +- if tparams := n.TypeParams; tparams != nil { +- visitFieldList(tparams, f) +- } +- visitExpr(n.Type, f) +- +- case *ast.BadDecl: +- // nothing to do +- +- // We should not reach here with a GenDecl, so panic below in the default case. +- +- case *ast.FuncDecl: +- // Skip Doc, Name, and Body, which do not affect the type. +- // Recv is handled by Refs: methods are associated with their type. +- visitExpr(n.Type, f) +- +- default: +- panic(fmt.Sprintf("unexpected node type %T", node)) +- } +-} +- +-// visitExpr visits referring idents and dotted idents that may affect the +-// type of expr. +-// +-// visitExpr can't reliably distinguish a dotted ident pkg.X from a +-// selection expr.f or T.method. +-func visitExpr(expr ast.Expr, f refVisitor) { +- switch n := expr.(type) { +- // These four cases account for about two thirds of all nodes, +- // so we place them first to shorten the common control paths. +- // (See go.dev/cl/480915.) +- case *ast.Ident: +- f(n.Name, "") +- +- case *ast.BasicLit: +- // nothing to do +- +- case *ast.SelectorExpr: +- if ident, ok := n.X.(*ast.Ident); ok { +- f(ident.Name, n.Sel.Name) +- } else { +- visitExpr(n.X, f) +- // Skip n.Sel as we don't care about which field or method is selected, +- // as we'll have recorded an edge to all declarations relevant to the +- // receiver type via visiting n.X above. +- } +- +- case *ast.CallExpr: +- visitExpr(n.Fun, f) +- visitExprList(n.Args, f) // args affect types for unsafe.Sizeof or builtins or generics +- +- // Expressions +- case *ast.Ellipsis: +- if n.Elt != nil { +- visitExpr(n.Elt, f) +- } +- +- case *ast.FuncLit: +- visitExpr(n.Type, f) +- // Skip Body, which does not affect the type. +- +- case *ast.CompositeLit: +- if n.Type != nil { +- visitExpr(n.Type, f) +- } +- // Skip Elts, which do not affect the type. +- +- case *ast.ParenExpr: +- visitExpr(n.X, f) +- +- case *ast.IndexExpr: +- visitExpr(n.X, f) +- visitExpr(n.Index, f) // may affect type for instantiations +- +- case *ast.IndexListExpr: +- visitExpr(n.X, f) +- for _, index := range n.Indices { +- visitExpr(index, f) // may affect the type for instantiations +- } +- +- case *ast.SliceExpr: +- visitExpr(n.X, f) +- // skip Low, High, and Max, which do not affect type. +- +- case *ast.TypeAssertExpr: +- // Skip X, as it doesn't actually affect the resulting type of the type +- // assertion. +- if n.Type != nil { +- visitExpr(n.Type, f) +- } +- +- case *ast.StarExpr: +- visitExpr(n.X, f) +- +- case *ast.UnaryExpr: +- visitExpr(n.X, f) +- +- case *ast.BinaryExpr: +- visitExpr(n.X, f) +- visitExpr(n.Y, f) +- +- case *ast.KeyValueExpr: +- panic("unreachable") // unreachable, as we don't descend into elts of composite lits. +- +- case *ast.ArrayType: +- if n.Len != nil { +- visitExpr(n.Len, f) +- } +- visitExpr(n.Elt, f) +- +- case *ast.StructType: +- visitFieldList(n.Fields, f) +- +- case *ast.FuncType: +- if tparams := n.TypeParams; tparams != nil { +- visitFieldList(tparams, f) +- } +- if n.Params != nil { +- visitFieldList(n.Params, f) +- } +- if n.Results != nil { +- visitFieldList(n.Results, f) +- } +- +- case *ast.InterfaceType: +- visitFieldList(n.Methods, f) +- +- case *ast.MapType: +- visitExpr(n.Key, f) +- visitExpr(n.Value, f) +- +- case *ast.ChanType: +- visitExpr(n.Value, f) +- +- case *ast.BadExpr: +- // nothing to do +- +- default: +- panic(fmt.Sprintf("ast.Walk: unexpected node type %T", n)) +- } +-} +- +-func visitExprList(list []ast.Expr, f refVisitor) { +- for _, x := range list { +- visitExpr(x, f) +- } +-} +- +-func visitFieldList(n *ast.FieldList, f refVisitor) { +- for _, field := range n.List { +- visitExpr(field.Type, f) +- } +-} +- +-// -- strong component graph construction (plundered from go/pointer) -- +- +-// visit implements the depth-first search of Tarjan's SCC algorithm +-// (see https://doi.org/10.1137/0201010). +-// Precondition: x is canonical. +-func (st *state) visit(x *declNode) { +- checkCanonical(x) +- x.index = st.index +- x.lowlink = st.index +- st.index++ +- +- st.stack = append(st.stack, x) // push +- assert(x.scc == 0, "node revisited") +- x.scc = -1 +- +- for y := range x.intRefs { +- // Loop invariant: x is canonical. +- +- y := y.find() +- +- if x == y { +- continue // nodes already coalesced +- } +- +- switch { +- case y.scc > 0: +- // y is already a collapsed SCC +- +- case y.scc < 0: +- // y is on the stack, and thus in the current SCC. +- if y.index < x.lowlink { +- x.lowlink = y.index +- } +- +- default: +- // y is unvisited; visit it now. +- st.visit(y) +- // Note: x and y are now non-canonical. +- +- x = x.find() +- +- if y.lowlink < x.lowlink { +- x.lowlink = y.lowlink +- } +- } +- } +- checkCanonical(x) +- +- // Is x the root of an SCC? +- if x.lowlink == x.index { +- // Coalesce all nodes in the SCC. +- for { +- // Pop y from stack. +- i := len(st.stack) - 1 +- y := st.stack[i] +- st.stack = st.stack[:i] +- +- checkCanonical(x) +- checkCanonical(y) +- +- if x == y { +- break // SCC is complete. +- } +- coalesce(x, y) +- } +- +- // Accumulate union of extRefs over +- // internal edges (to other SCCs). +- for y := range x.intRefs { +- y := y.find() +- if y == x { +- continue // already coalesced +- } +- assert(y.scc == 1, "edge to non-scc node") +- for z := range y.extRefs { +- if x.extRefs == nil { +- x.extRefs = make(symbolSet) +- } +- x.extRefs[z] = true // extRefs: x U= y +- } +- } +- +- x.scc = 1 +- } +-} +- +-// coalesce combines two nodes in the strong component graph. +-// Precondition: x and y are canonical. +-func coalesce(x, y *declNode) { +- // x becomes y's canonical representative. +- y.rep = x +- +- // x accumulates y's internal references. +- for z := range y.intRefs { +- x.intRefs[z] = true +- } +- y.intRefs = nil +- +- // x accumulates y's external references. +- for z := range y.extRefs { +- if x.extRefs == nil { +- x.extRefs = make(symbolSet) +- } +- x.extRefs[z] = true +- } +- y.extRefs = nil +-} +- +-// find returns the canonical node decl. +-// (The nodes form a disjoint set forest.) +-func (decl *declNode) find() *declNode { +- rep := decl.rep +- if rep != decl { +- rep = rep.find() +- decl.rep = rep // simple path compression (no union-by-rank) +- } +- return rep +-} +- +-const debugSCC = false // enable assertions in strong-component algorithm +- +-func checkCanonical(x *declNode) { +- if debugSCC { +- assert(x == x.find(), "not canonical") +- } +-} +- +-func assert(cond bool, msg string) { +- if debugSCC && !cond { +- panic(msg) +- } +-} +- +-// -- serialization -- +- +-// (The name says gob but in fact we use frob.) +-var classesCodec = frob.CodecFor[gobClasses]() +- +-type gobClasses struct { +- Strings []string // table of strings (PackageIDs and names) +- Classes []gobClass +-} +- +-type gobClass struct { +- Decls []int32 // indices into gobClasses.Strings +- Refs []int32 // list of (package, name) pairs, each an index into gobClasses.Strings +-} +- +-// encode encodes the equivalence classes, +-// (classNames[i], classes[i]), for i in range classes. +-// +-// With the current encoding, across kubernetes, +-// the encoded size distribution has +-// p50 = 511B, p95 = 4.4KB, max = 108K. +-func encode(classNames map[int][]string, classes []symbolSet) []byte { +- payload := gobClasses{ +- Classes: make([]gobClass, 0, len(classNames)), +- } +- +- // index of unique strings +- strings := make(map[string]int32) +- stringIndex := func(s string) int32 { +- i, ok := strings[s] +- if !ok { +- i = int32(len(payload.Strings)) +- strings[s] = i +- payload.Strings = append(payload.Strings, s) +- } +- return i +- } +- +- var refs []symbol // recycled temporary +- for class, names := range classNames { +- set := classes[class] +- +- // names, sorted +- sort.Strings(names) +- gobDecls := make([]int32, len(names)) +- for i, name := range names { +- gobDecls[i] = stringIndex(name) +- } +- +- // refs, sorted by ascending (PackageID, name) +- gobRefs := make([]int32, 0, 2*len(set)) +- for _, sym := range set.appendSorted(refs[:0]) { +- gobRefs = append(gobRefs, +- stringIndex(string(sym.pkg)), +- stringIndex(sym.name)) +- } +- payload.Classes = append(payload.Classes, gobClass{ +- Decls: gobDecls, +- Refs: gobRefs, +- }) +- } +- +- return classesCodec.Encode(payload) +-} +- +-func decode(pkgIndex *PackageIndex, data []byte) []Class { +- var payload gobClasses +- classesCodec.Decode(data, &payload) +- +- classes := make([]Class, len(payload.Classes)) +- for i, gobClass := range payload.Classes { +- decls := make([]string, len(gobClass.Decls)) +- for i, decl := range gobClass.Decls { +- decls[i] = payload.Strings[decl] +- } +- refs := make([]Symbol, len(gobClass.Refs)/2) +- for i := range refs { +- pkgID := pkgIndex.IndexID(metadata.PackageID(payload.Strings[gobClass.Refs[2*i]])) +- name := payload.Strings[gobClass.Refs[2*i+1]] +- refs[i] = Symbol{Package: pkgID, Name: name} +- } +- classes[i] = Class{ +- Decls: decls, +- Refs: refs, +- } +- } +- +- // Sort by ascending Decls[0]. +- // TODO(adonovan): move sort to encoder. Determinism is good. +- sort.Slice(classes, func(i, j int) bool { +- return classes[i].Decls[0] < classes[j].Decls[0] +- }) +- +- return classes +-} +diff -urN a/gopls/internal/cache/typerefs/refs_test.go b/gopls/internal/cache/typerefs/refs_test.go +--- a/gopls/internal/cache/typerefs/refs_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/typerefs/refs_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,549 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package typerefs_test +- +-import ( +- "context" +- "fmt" +- "go/token" +- "sort" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/cache/typerefs" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// TestRefs checks that the analysis reports, for each exported member +-// of the test package ("p"), its correct dependencies on exported +-// members of its direct imports (e.g. "ext"). +-func TestRefs(t *testing.T) { +- ctx := context.Background() +- +- tests := []struct { +- label string +- srcs []string // source for the local package; package name must be p +- imports map[string]string // for simplicity: importPath -> pkgID/pkgName (we set pkgName == pkgID); 'ext' is always available. +- want map[string][]string // decl name -> id. +- allowErrs bool // whether we expect parsing errors +- }{ +- { +- label: "empty package", +- want: map[string][]string{}, +- }, +- { +- label: "fields", +- srcs: []string{` +-package p +- +-import "ext" +- +-type A struct{ b B } +-type B func(c C) (d D) +-type C ext.C +-type D ext.D +- +-// Should not be referenced by field names. +-type b ext.B_ +-type c int.C_ +-type d ext.D_ +-`}, +- want: map[string][]string{ +- "A": {"ext.C", "ext.D"}, +- "B": {"ext.C", "ext.D"}, +- "C": {"ext.C"}, +- "D": {"ext.D"}, +- }, +- }, +- { +- label: "embedding", +- srcs: []string{` +-package p +- +-import "ext" +- +-type A struct{ +- B +- _ struct { +- C +- } +- D +-} +-type B ext.B +-type C ext.C +-type D interface{ +- B +-} +-`}, +- want: map[string][]string{ +- "A": {"ext.B", "ext.C"}, +- "B": {"ext.B"}, +- "C": {"ext.C"}, +- "D": {"ext.B"}, +- }, +- }, +- { +- label: "constraint embedding", +- srcs: []string{` +-package p +- +-import "ext" +- +-type A interface{ +- int | B | ~C +- struct{D} +-} +- +-type B ext.B +-type C ext.C +-type D ext.D +-`}, +- want: map[string][]string{ +- "A": {"ext.B", "ext.C", "ext.D"}, +- "B": {"ext.B"}, +- "C": {"ext.C"}, +- "D": {"ext.D"}, +- }, +- }, +- { +- label: "funcs", +- srcs: []string{` +-package p +- +-import "ext" +- +-type A ext.A +-type B ext.B +-const C B = 2 +-func F(A) B { +- return C +-} +-var V = F(W) +-var W A +-`}, +- want: map[string][]string{ +- "A": {"ext.A"}, +- "B": {"ext.B"}, +- "C": {"ext.B"}, +- "F": {"ext.A", "ext.B"}, +- "V": { +- "ext.A", // via F +- "ext.B", // via W: can't be eliminated: F could be builtin or generic +- }, +- "W": {"ext.A"}, +- }, +- }, +- { +- label: "methods", +- srcs: []string{`package p +- +-import "ext" +- +-type A ext.A +-type B ext.B +-`, `package p +- +-func (A) M(B) +-func (*B) M(A) +-`}, +- want: map[string][]string{ +- "A": {"ext.A", "ext.B"}, +- "B": {"ext.A", "ext.B"}, +- }, +- }, +- { +- label: "initializers", +- srcs: []string{` +-package p +- +-import "ext" +- +-var A b = C // type does not depend on C +-type b ext.B +-var C = d // type does depend on D +-var d b +- +-var e = d + a +- +-var F = func() B { return E } +- +-var G = struct{ +- A b +- _ [unsafe.Sizeof(ext.V)]int // array size + Sizeof creates edge to a var +- _ [unsafe.Sizeof(G)]int // creates a self edge; doesn't affect output though +-}{} +- +-var H = (D + A + C*C) +- +-var I = (A+C).F +-`}, +- want: map[string][]string{ +- "A": {"ext.B"}, +- "C": {"ext.B"}, // via d +- "G": {"ext.B", "ext.V"}, // via b,C +- "H": {"ext.B"}, // via d,A,C +- "I": {"ext.B"}, +- }, +- }, +- { +- label: "builtins", +- srcs: []string{`package p +- +-import "ext" +- +-var A = new(b) +-type b struct{ ext.B } +- +-type C chan d +-type d ext.D +- +-type S []ext.S +-type t ext.T +-var U = append(([]*S)(nil), new(t)) +- +-type X map[k]v +-type k ext.K +-type v ext.V +- +-var Z = make(map[k]A) +- +-// close, delete, and panic cannot occur outside of statements +-`}, +- want: map[string][]string{ +- "A": {"ext.B"}, +- "C": {"ext.D"}, +- "S": {"ext.S"}, +- "U": {"ext.S", "ext.T"}, // ext.T edge could be eliminated +- "X": {"ext.K", "ext.V"}, +- "Z": {"ext.B", "ext.K"}, +- }, +- }, +- { +- label: "builtin shadowing", +- srcs: []string{`package p +- +-import "ext" +- +-var A = new(ext.B) +-func new() c +-type c ext.C +-`}, +- want: map[string][]string{ +- "A": {"ext.B", "ext.C"}, +- }, +- }, +- { +- label: "named forwarding", +- srcs: []string{`package p +- +-import "ext" +- +-type A B +-type B c +-type c ext.C +-`}, +- want: map[string][]string{ +- "A": {"ext.C"}, +- "B": {"ext.C"}, +- }, +- }, +- { +- label: "aliases", +- srcs: []string{`package p +- +-import "ext" +- +-type A = B +-type B = C +-type C = ext.C +-`}, +- want: map[string][]string{ +- "A": {"ext.C"}, +- "B": {"ext.C"}, +- "C": {"ext.C"}, +- }, +- }, +- { +- label: "array length", +- srcs: []string{`package p +- +-import "ext" +-import "unsafe" +- +-type A [unsafe.Sizeof(ext.B{ext.C})]int +-type A2 [unsafe.Sizeof(ext.B{f:ext.C})]int // use a KeyValueExpr +- +-type D [unsafe.Sizeof(struct{ f E })]int +-type E ext.E +- +-type F [3]G +-type G [ext.C]int +-`}, +- want: map[string][]string{ +- "A": {"ext.B"}, // no ext.C: doesn't enter CompLit +- "A2": {"ext.B"}, // ditto +- "D": {"ext.E"}, +- "E": {"ext.E"}, +- "F": {"ext.C"}, +- "G": {"ext.C"}, +- }, +- }, +- { +- label: "imports", +- srcs: []string{`package p +- +-import "ext" +- +-import ( +- "q" +- r2 "r" +- "s" // note: package name is t +- "z" +-) +- +-type A struct { +- q.Q +- r2.R +- s.S // invalid ref +- z.Z // references both external z.Z as well as package-level type z +-} +- +-type B struct { +- r.R // invalid ref +- t.T +-} +- +-var X int = q.V // X={}: no descent into RHS of 'var v T = rhs' +-var Y = q.V.W +- +-type z ext.Z +-`}, +- imports: map[string]string{"q": "q", "r": "r", "s": "t", "z": "z"}, +- want: map[string][]string{ +- "A": {"ext.Z", "q.Q", "r.R", "z.Z"}, +- "B": {"t.T"}, +- "Y": {"q.V"}, +- }, +- }, +- { +- label: "import blank", +- srcs: []string{`package p +- +-import _ "q" +- +-type A q.Q +-`}, +- imports: map[string]string{"q": "q"}, +- want: map[string][]string{}, +- }, +- { +- label: "import dot", +- srcs: []string{`package p +- +-import . "q" +- +-type A q.Q // not actually an edge, since q is imported . +-type B struct { +- C // assumed to be an edge to q +- D // resolved to package decl +-} +- +- +-type E error // unexported, therefore must be universe.error +-type F Field +-var G = Field.X +-`, `package p +- +-import "ext" +-import "q" +- +-type D ext.D +-`}, +- imports: map[string]string{"q": "q"}, +- want: map[string][]string{ +- "B": {"ext.D", "q.C"}, +- "D": {"ext.D"}, +- "F": {"q.Field"}, +- "G": {"q.Field"}, +- }, +- }, +- { +- label: "typeparams", +- srcs: []string{`package p +- +-import "ext" +- +-type A[T any] struct { +- t T +- b B +-} +- +-type B ext.B +- +-func F1[T any](T, B) +-func F2[T C]()(T, B) +- +-type T ext.T +- +-type C ext.C +- +-func F3[T1 ~[]T2, T2 ~[]T3](t1 T1, t2 T2) +-type T3 ext.T3 +-`, `package p +- +-func (A[B]) M(C) {} +-`}, +- want: map[string][]string{ +- "A": {"ext.B", "ext.C"}, +- "B": {"ext.B"}, +- "C": {"ext.C"}, +- "F1": {"ext.B"}, +- "F2": {"ext.B", "ext.C"}, +- "F3": {"ext.T3"}, +- "T": {"ext.T"}, +- "T3": {"ext.T3"}, +- }, +- }, +- { +- label: "instances", +- srcs: []string{`package p +- +-import "ext" +- +-type A[T any] ext.A +-type B[T1, T2 any] ext.B +- +-type C A[int] +-type D B[int, A[E]] +-type E ext.E +-`}, +- want: map[string][]string{ +- "A": {"ext.A"}, +- "B": {"ext.B"}, +- "C": {"ext.A"}, +- "D": {"ext.A", "ext.B", "ext.E"}, +- "E": {"ext.E"}, +- }, +- }, +- { +- label: "duplicate decls", +- srcs: []string{`package p +- +-import "a" +-import "ext" +- +-type a a.A +-type A a +-type b ext.B +-type C a.A +-func (C) Foo(x) {} // invalid parameter, but that does not matter +-type C b +-func (C) Bar(y) {} // invalid parameter, but that does not matter +- +-var x ext.X +-var y ext.Y +-`}, +- imports: map[string]string{"a": "a", "b": "b"}, // "b" import should not matter, since it isn't in this file +- want: map[string][]string{ +- "A": {"a.A"}, +- "C": {"a.A", "ext.B", "ext.X", "ext.Y"}, +- }, +- }, +- { +- label: "invalid decls", +- srcs: []string{`package p +- +-import "ext" +- +-type A B +- +-func () Foo(B){} +- +-var B struct{ ext.B +-`}, +- want: map[string][]string{ +- "A": {"ext.B"}, +- "B": {"ext.B"}, +- "Foo": {"ext.B"}, +- }, +- allowErrs: true, +- }, +- { +- label: "unmapped receiver", +- srcs: []string{`package p +- +-type P struct{} +- +-func (a) x(P) +-`}, +- want: map[string][]string{}, +- allowErrs: true, +- }, +- { +- label: "SCC special case", +- srcs: []string{`package p +- +-import "ext" +- +-type X Y +-type Y struct { Z; *X } +-type Z map[ext.A]ext.B +-`}, +- want: map[string][]string{ +- "X": {"ext.A", "ext.B"}, +- "Y": {"ext.A", "ext.B"}, +- "Z": {"ext.A", "ext.B"}, +- }, +- allowErrs: true, +- }, +- } +- +- for _, test := range tests { +- t.Run(test.label, func(t *testing.T) { +- var pgfs []*parsego.File +- for i, src := range test.srcs { +- uri := protocol.DocumentURI(fmt.Sprintf("file:///%d.go", i)) +- pgf, _ := parsego.Parse(ctx, token.NewFileSet(), uri, []byte(src), parsego.Full, false) +- if !test.allowErrs && pgf.ParseErr != nil { +- t.Fatalf("ParseGoSrc(...) returned parse errors: %v", pgf.ParseErr) +- } +- pgfs = append(pgfs, pgf) +- } +- +- imports := map[metadata.ImportPath]*metadata.Package{ +- "ext": {ID: "ext", Name: "ext"}, // this one comes for free +- } +- for path, mp := range test.imports { +- imports[metadata.ImportPath(path)] = &metadata.Package{ +- ID: metadata.PackageID(mp), +- Name: metadata.PackageName(mp), +- } +- } +- +- data := typerefs.Encode(pgfs, imports) +- +- got := make(map[string][]string) +- index := typerefs.NewPackageIndex() +- for _, class := range typerefs.Decode(index, data) { +- // We redundantly expand out the name x refs cross product +- // here since that's what the existing tests expect. +- for _, name := range class.Decls { +- var syms []string +- for _, sym := range class.Refs { +- syms = append(syms, fmt.Sprintf("%s.%s", index.DeclaringPackage(sym), sym.Name)) +- } +- sort.Strings(syms) +- got[name] = syms +- } +- } +- +- if diff := cmp.Diff(test.want, got); diff != "" { +- t.Errorf("Refs(...) returned unexpected refs (-want +got):\n%s", diff) +- } +- }) +- } +-} +diff -urN a/gopls/internal/cache/view.go b/gopls/internal/cache/view.go +--- a/gopls/internal/cache/view.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/view.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1272 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package cache is the core of gopls: it is concerned with state +-// management, dependency analysis, and invalidation; and it holds the +-// machinery of type checking and modular static analysis. Its +-// principal types are [Session], [Folder], [View], [Snapshot], +-// [Cache], and [Package]. +-package cache +- +-import ( +- "context" +- "encoding/json" +- "errors" +- "fmt" +- "log" +- "maps" +- "os" +- "os/exec" +- "path" +- "path/filepath" +- "slices" +- "sort" +- "strings" +- "sync" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache/typerefs" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- "golang.org/x/tools/gopls/internal/util/pathutil" +- "golang.org/x/tools/gopls/internal/vulncheck" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/gocommand" +- "golang.org/x/tools/internal/imports" +- "golang.org/x/tools/internal/modindex" +- "golang.org/x/tools/internal/xcontext" +-) +- +-// A Folder represents an LSP workspace folder, together with its per-folder +-// options and environment variables that affect build configuration. +-// +-// Folders (Name and Dir) are specified by the 'initialize' and subsequent +-// 'didChangeWorkspaceFolders' requests; their options come from +-// didChangeConfiguration. +-// +-// Folders must not be mutated, as they may be shared across multiple views. +-type Folder struct { +- Dir protocol.DocumentURI +- Name string // decorative name for UI; not necessarily unique +- Options *settings.Options +- Env GoEnv +-} +- +-// GoEnv holds the environment variables and data from the Go command that is +-// required for operating on a workspace folder. +-type GoEnv struct { +- // Go environment variables. These correspond directly with the Go env var of +- // the same name. +- GOOS string +- GOARCH string +- GOCACHE string +- GOMODCACHE string +- GOPATH string +- GOPRIVATE string +- GOFLAGS string +- GO111MODULE string +- GOTOOLCHAIN string +- GOROOT string +- +- // Go version output. +- GoVersion int // The X in Go 1.X +- GoVersionOutput string // complete go version output +- +- // OS environment variables (notably not go env). +- +- // ExplicitGOWORK is the GOWORK value set explicitly in the environment. This +- // may differ from `go env GOWORK` when the GOWORK value is implicit from the +- // working directory. +- ExplicitGOWORK string +- +- // EffectiveGOPACKAGESDRIVER is the effective go/packages driver binary that +- // will be used. This may be set via GOPACKAGESDRIVER, or may be discovered +- // via os.LookPath("gopackagesdriver"). The latter functionality is +- // undocumented and may be removed in the future. +- // +- // If GOPACKAGESDRIVER is set to "off", EffectiveGOPACKAGESDRIVER is "". +- EffectiveGOPACKAGESDRIVER string +-} +- +-// View represents a single build for a workspace. +-// +-// A View is a logical build (the viewDefinition) along with a state of that +-// build (the Snapshot). +-type View struct { +- id string // a unique string to identify this View in (e.g.) serialized Commands +- +- *viewDefinition // build configuration +- +- gocmdRunner *gocommand.Runner // limits go command concurrency +- +- // baseCtx is the context handed to NewView. This is the parent of all +- // background contexts created for this view. +- baseCtx context.Context +- +- // importsState is for the old imports code +- importsState *importsState +- +- // modcacheState is the replacement for importsState, to be used for +- // goimports operations when the imports source is "gopls". +- // +- // It may be nil, if the imports source is not "gopls". +- modcacheState *modcacheState +- +- // pkgIndex is an index of package IDs, for efficient storage of typerefs. +- pkgIndex *typerefs.PackageIndex +- +- // parseCache holds an LRU cache of recently parsed files. +- parseCache *parseCache +- +- // fs is the file source used to populate this view. +- fs *overlayFS +- +- // ignoreFilter is used for fast checking of ignored files. +- ignoreFilter *ignoreFilter +- +- // cancelInitialWorkspaceLoad can be used to terminate the view's first +- // attempt at initialization. +- cancelInitialWorkspaceLoad context.CancelFunc +- +- snapshotMu sync.Mutex +- snapshot *Snapshot // latest snapshot; nil after shutdown has been called +- +- // initialWorkspaceLoad is closed when the first workspace initialization has +- // completed. If we failed to load, we only retry if the go.mod file changes, +- // to avoid too many go/packages calls. +- initialWorkspaceLoad chan struct{} +- +- // initializationSema is used limit concurrent initialization of snapshots in +- // the view. We use a channel instead of a mutex to avoid blocking when a +- // context is canceled. +- // +- // This field (along with snapshot.initialized) guards against duplicate +- // initialization of snapshots. Do not change it without adjusting snapshot +- // accordingly. +- initializationSema chan struct{} +- +- // Document filters are constructed once, in View.filterFunc. +- filterFuncOnce sync.Once +- _filterFunc func(protocol.DocumentURI) bool // only accessed by View.filterFunc +-} +- +-// definition implements the viewDefiner interface. +-func (v *View) definition() *viewDefinition { return v.viewDefinition } +- +-// A viewDefinition is a logical build, i.e. configuration (Folder) along with +-// a build directory and possibly an environment overlay (e.g. GOWORK=off or +-// GOOS, GOARCH=...) to affect the build. +-// +-// This type is immutable, and compared to see if the View needs to be +-// reconstructed. +-// +-// Note: whenever modifying this type, also modify the equivalence relation +-// implemented by viewDefinitionsEqual. +-// +-// TODO(golang/go#57979): viewDefinition should be sufficient for running +-// go/packages. Enforce this in the API. +-type viewDefinition struct { +- folder *Folder // pointer comparison is OK, as any new Folder creates a new def +- +- typ ViewType +- root protocol.DocumentURI // root directory; where to run the Go command +- gomod protocol.DocumentURI // the nearest go.mod file, or "" +- gowork protocol.DocumentURI // the nearest go.work file, or "" +- +- // workspaceModFiles holds the set of mod files active in this snapshot. +- // +- // For a go.work workspace, this is the set of workspace modfiles. For a +- // go.mod workspace, this contains the go.mod file defining the workspace +- // root, as well as any locally replaced modules (if +- // "includeReplaceInWorkspace" is set). +- // +- // TODO(rfindley): should we just run `go list -m` to compute this set? +- workspaceModFiles map[protocol.DocumentURI]struct{} +- workspaceModFilesErr error // error encountered computing workspaceModFiles +- +- // envOverlay holds additional environment to apply to this viewDefinition. +- envOverlay map[string]string +-} +- +-// definition implements the viewDefiner interface. +-func (d *viewDefinition) definition() *viewDefinition { return d } +- +-// Type returns the ViewType type, which determines how go/packages are loaded +-// for this View. +-func (d *viewDefinition) Type() ViewType { return d.typ } +- +-// Root returns the view root, which determines where packages are loaded from. +-func (d *viewDefinition) Root() protocol.DocumentURI { return d.root } +- +-// GoMod returns the nearest go.mod file for this view's root, or "". +-func (d *viewDefinition) GoMod() protocol.DocumentURI { return d.gomod } +- +-// GoWork returns the nearest go.work file for this view's root, or "". +-func (d *viewDefinition) GoWork() protocol.DocumentURI { return d.gowork } +- +-// EnvOverlay returns a new sorted slice of environment variables (in the form +-// "k=v") for this view definition's env overlay. +-func (d *viewDefinition) EnvOverlay() []string { +- var env []string +- for k, v := range d.envOverlay { +- env = append(env, fmt.Sprintf("%s=%s", k, v)) +- } +- sort.Strings(env) +- return env +-} +- +-// GOOS returns the effective GOOS value for this view definition, accounting +-// for its env overlay. +-func (d *viewDefinition) GOOS() string { +- if goos, ok := d.envOverlay["GOOS"]; ok { +- return goos +- } +- return d.folder.Env.GOOS +-} +- +-// GOARCH returns the effective GOARCH value for this view definition, accounting +-// for its env overlay. +-func (d *viewDefinition) GOARCH() string { +- if goarch, ok := d.envOverlay["GOARCH"]; ok { +- return goarch +- } +- return d.folder.Env.GOARCH +-} +- +-// adjustedGO111MODULE is the value of GO111MODULE to use for loading packages. +-// It is adjusted to default to "auto" rather than "on", since if we are in +-// GOPATH and have no module, we may as well allow a GOPATH view to work. +-func (d viewDefinition) adjustedGO111MODULE() string { +- if d.folder.Env.GO111MODULE != "" { +- return d.folder.Env.GO111MODULE +- } +- return "auto" +-} +- +-// ModFiles returns a new, sorted slice of go.mod files for main modules +-// enclosed in the snapshot's view and known to the snapshot. +-func (d viewDefinition) ModFiles() []protocol.DocumentURI { +- return slices.Sorted(maps.Keys(d.workspaceModFiles)) +-} +- +-// viewDefinitionsEqual reports whether x and y are equivalent. +-func viewDefinitionsEqual(x, y *viewDefinition) bool { +- if (x.workspaceModFilesErr == nil) != (y.workspaceModFilesErr == nil) { +- return false +- } +- if x.workspaceModFilesErr != nil { +- if x.workspaceModFilesErr.Error() != y.workspaceModFilesErr.Error() { +- return false +- } +- } else if !moremaps.SameKeys(x.workspaceModFiles, y.workspaceModFiles) { +- return false +- } +- if len(x.envOverlay) != len(y.envOverlay) { +- return false +- } +- for i, xv := range x.envOverlay { +- if xv != y.envOverlay[i] { +- return false +- } +- } +- return x.folder == y.folder && +- x.typ == y.typ && +- x.root == y.root && +- x.gomod == y.gomod && +- x.gowork == y.gowork +-} +- +-// A ViewType describes how we load package information for a view. +-// +-// This is used for constructing the go/packages.Load query, and for +-// interpreting missing packages, imports, or errors. +-// +-// See the documentation for individual ViewType values for details. +-type ViewType int +- +-const ( +- // GoPackagesDriverView is a view with a non-empty GOPACKAGESDRIVER +- // environment variable. +- // +- // Load: ./... from the workspace folder. +- GoPackagesDriverView ViewType = iota +- +- // GOPATHView is a view in GOPATH mode. +- // +- // I.e. in GOPATH, with GO111MODULE=off, or GO111MODULE=auto with no +- // go.mod file. +- // +- // Load: ./... from the workspace folder. +- GOPATHView +- +- // GoModView is a view in module mode with a single Go module. +- // +- // Load: /... from the module root. +- GoModView +- +- // GoWorkView is a view in module mode with a go.work file. +- // +- // Load: /... from the workspace folder, for each module. +- GoWorkView +- +- // An AdHocView is a collection of files in a given directory, not in GOPATH +- // or a module. +- // +- // Load: . from the workspace folder. +- AdHocView +-) +- +-func (t ViewType) String() string { +- switch t { +- case GoPackagesDriverView: +- return "GoPackagesDriver" +- case GOPATHView: +- return "GOPATH" +- case GoModView: +- return "GoMod" +- case GoWorkView: +- return "GoWork" +- case AdHocView: +- return "AdHoc" +- default: +- return "Unknown" +- } +-} +- +-// usesModules reports whether the view uses Go modules. +-func (typ ViewType) usesModules() bool { +- switch typ { +- case GoModView, GoWorkView: +- return true +- default: +- return false +- } +-} +- +-// ID returns the unique ID of this View. +-func (v *View) ID() string { return v.id } +- +-// GoCommandRunner returns the shared gocommand.Runner for this view. +-func (v *View) GoCommandRunner() *gocommand.Runner { +- return v.gocmdRunner +-} +- +-// Folder returns the folder at the base of this view. +-func (v *View) Folder() *Folder { +- return v.folder +-} +- +-// Env returns the environment to use for running go commands in this view. +-func (v *View) Env() []string { +- return slices.Concat( +- os.Environ(), +- v.folder.Options.EnvSlice(), +- []string{"GO111MODULE=" + v.adjustedGO111MODULE()}, +- v.EnvOverlay(), +- ) +-} +- +-// ModcacheIndex returns the module cache index +-func (v *View) ModcacheIndex() (*modindex.Index, error) { +- return v.modcacheState.getIndex() +-} +- +-// UpdateFolders updates the set of views for the new folders. +-// +-// Calling this causes each view to be reinitialized. +-func (s *Session) UpdateFolders(ctx context.Context, newFolders []*Folder) error { +- s.viewMu.Lock() +- defer s.viewMu.Unlock() +- +- overlays := s.Overlays() +- var openFiles []protocol.DocumentURI +- for _, o := range overlays { +- openFiles = append(openFiles, o.URI()) +- } +- +- defs, err := selectViewDefs(ctx, s, newFolders, openFiles) +- if err != nil { +- return err +- } +- var newViews []*View +- for _, def := range defs { +- v, _, release := s.createView(ctx, def) +- release() +- newViews = append(newViews, v) +- } +- for _, v := range s.views { +- v.shutdown() +- } +- s.views = newViews +- return nil +-} +- +-// RunProcessEnvFunc runs fn with the process env for this snapshot's view. +-// Note: the process env contains cached module and filesystem state. +-func (s *Snapshot) RunProcessEnvFunc(ctx context.Context, fn func(context.Context, *imports.Options) error) error { +- return s.view.importsState.runProcessEnvFunc(ctx, s, fn) +-} +- +-// separated out from its sole use in locateTemplateFiles for testability +-func fileHasExtension(path string, suffixes []string) bool { +- ext := filepath.Ext(path) +- if ext != "" && ext[0] == '.' { +- ext = ext[1:] +- } +- for _, s := range suffixes { +- if s != "" && ext == s { +- return true +- } +- } +- return false +-} +- +-// locateTemplateFiles ensures that the snapshot has mapped template files +-// within the workspace folder. +-func (s *Snapshot) locateTemplateFiles(ctx context.Context) { +- suffixes := s.Options().TemplateExtensions +- if len(suffixes) == 0 { +- return +- } +- +- searched := 0 +- filterFunc := s.view.filterFunc() +- err := filepath.WalkDir(s.view.folder.Dir.Path(), func(path string, entry os.DirEntry, err error) error { +- if err != nil { +- return err +- } +- if entry.IsDir() { +- return nil +- } +- if fileLimit > 0 && searched > fileLimit { +- return errExhausted +- } +- searched++ +- if !fileHasExtension(path, suffixes) { +- return nil +- } +- uri := protocol.URIFromPath(path) +- if filterFunc(uri) { +- return nil +- } +- // Get the file in order to include it in the snapshot. +- // TODO(golang/go#57558): it is fundamentally broken to track files in this +- // way; we may lose them if configuration or layout changes cause a view to +- // be recreated. +- // +- // Furthermore, this operation must ignore errors, including context +- // cancellation, or risk leaving the snapshot in an undefined state. +- s.ReadFile(ctx, uri) +- return nil +- }) +- if err != nil { +- event.Error(ctx, "searching for template files failed", err) +- } +-} +- +-// filterFunc returns a func that reports whether uri is filtered by the currently configured +-// directoryFilters. +-func (v *View) filterFunc() func(protocol.DocumentURI) bool { +- v.filterFuncOnce.Do(func() { +- folderDir := v.folder.Dir.Path() +- gomodcache := v.folder.Env.GOMODCACHE +- var filters []string +- filters = append(filters, v.folder.Options.DirectoryFilters...) +- if pref, ok := strings.CutPrefix(gomodcache, folderDir); ok { +- modcacheFilter := "-" + strings.TrimPrefix(filepath.ToSlash(pref), "/") +- filters = append(filters, modcacheFilter) +- } +- pathIncluded := PathIncludeFunc(filters) +- v._filterFunc = func(uri protocol.DocumentURI) bool { +- // Only filter relative to the configured root directory. +- if pathutil.InDir(folderDir, uri.Path()) { +- return relPathExcludedByFilter(strings.TrimPrefix(uri.Path(), folderDir), pathIncluded) +- } +- return false +- } +- }) +- return v._filterFunc +-} +- +-// shutdown releases resources associated with the view. +-func (v *View) shutdown() { +- // Cancel the initial workspace load if it is still running. +- v.cancelInitialWorkspaceLoad() +- v.importsState.stopTimer() +- if v.modcacheState != nil { +- v.modcacheState.stopTimer() +- } +- +- v.snapshotMu.Lock() +- if v.snapshot != nil { +- v.snapshot.cancel() +- v.snapshot.decref() +- v.snapshot = nil +- } +- v.snapshotMu.Unlock() +-} +- +-// ScanImports scans the module cache synchronously. +-// For use in tests. +-func (v *View) ScanImports() { +- gomodcache := v.folder.Env.GOMODCACHE +- dirCache := v.importsState.modCache.dirCache(gomodcache) +- imports.ScanModuleCache(gomodcache, dirCache, log.Printf) +-} +- +-// IgnoredFile reports if a file would be ignored by a `go list` of the whole +-// workspace. +-// +-// While go list ./... skips directories starting with '.', '_', or 'testdata', +-// gopls may still load them via file queries. Explicitly filter them out. +-func (s *Snapshot) IgnoredFile(uri protocol.DocumentURI) bool { +- // Fast path: if uri doesn't contain '.', '_', or 'testdata', it is not +- // possible that it is ignored. +- { +- uriStr := string(uri) +- if !strings.Contains(uriStr, ".") && !strings.Contains(uriStr, "_") && !strings.Contains(uriStr, "testdata") { +- return false +- } +- } +- +- return s.view.ignoreFilter.ignored(uri.Path()) +-} +- +-// An ignoreFilter implements go list's exclusion rules via its 'ignored' method. +-type ignoreFilter struct { +- prefixes []string // root dirs, ending in filepath.Separator +-} +- +-// newIgnoreFilter returns a new ignoreFilter implementing exclusion rules +-// relative to the provided directories. +-func newIgnoreFilter(dirs []string) *ignoreFilter { +- f := new(ignoreFilter) +- for _, d := range dirs { +- f.prefixes = append(f.prefixes, filepath.Clean(d)+string(filepath.Separator)) +- } +- return f +-} +- +-func (f *ignoreFilter) ignored(filename string) bool { +- for _, prefix := range f.prefixes { +- if suffix, ok := strings.CutPrefix(filename, prefix); ok { +- if checkIgnored(suffix) { +- return true +- } +- } +- } +- return false +-} +- +-// checkIgnored implements go list's exclusion rules. +-// Quoting “go help list”: +-// +-// Directory and file names that begin with "." or "_" are ignored +-// by the go tool, as are directories named "testdata". +-func checkIgnored(suffix string) bool { +- // Note: this could be further optimized by writing a HasSegment helper, a +- // segment-boundary respecting variant of strings.Contains. +- for component := range strings.SplitSeq(suffix, string(filepath.Separator)) { +- if len(component) == 0 { +- continue +- } +- if component[0] == '.' || component[0] == '_' || component == "testdata" { +- return true +- } +- } +- return false +-} +- +-// Snapshot returns the current snapshot for the view, and a +-// release function that must be called when the Snapshot is +-// no longer needed. +-// +-// The resulting error is non-nil if and only if the view is shut down, in +-// which case the resulting release function will also be nil. +-func (v *View) Snapshot() (*Snapshot, func(), error) { +- v.snapshotMu.Lock() +- defer v.snapshotMu.Unlock() +- if v.snapshot == nil { +- return nil, nil, errors.New("view is shutdown") +- } +- return v.snapshot, v.snapshot.Acquire(), nil +-} +- +-// initialize loads the metadata (and currently, file contents, due to +-// golang/go#57558) for the main package query of the View, which depends on +-// the view type (see ViewType). If s.initialized is already true, initialize +-// is a no op. +-// +-// The first attempt--which populates the first snapshot for a new view--must +-// be allowed to run to completion without being cancelled. +-// +-// Subsequent attempts are triggered by conditions where gopls can't enumerate +-// specific packages that require reloading, such as a change to a go.mod file. +-// These attempts may be cancelled, and then retried by a later call. +-// +-// Postcondition: if ctx was not cancelled, s.initialized is true, s.initialErr +-// holds the error resulting from initialization, if any, and s.metadata holds +-// the resulting metadata graph. +-func (s *Snapshot) initialize(ctx context.Context, firstAttempt bool) { +- // Acquire initializationSema, which is +- // (in effect) a mutex with a timeout. +- select { +- case <-ctx.Done(): +- return +- case s.view.initializationSema <- struct{}{}: +- } +- +- defer func() { +- <-s.view.initializationSema +- }() +- +- s.mu.Lock() +- initialized := s.initialized +- s.mu.Unlock() +- +- if initialized { +- return +- } +- +- defer func() { +- if firstAttempt { +- close(s.view.initialWorkspaceLoad) +- } +- }() +- +- // TODO(rFindley): we should only locate template files on the first attempt, +- // or guard it via a different mechanism. +- s.locateTemplateFiles(ctx) +- +- // Collect module paths to load by parsing go.mod files. If a module fails to +- // parse, capture the parsing failure as a critical diagnostic. +- var scopes []loadScope // scopes to load +- var modDiagnostics []*Diagnostic // diagnostics for broken go.mod files +- addError := func(uri protocol.DocumentURI, err error) { +- modDiagnostics = append(modDiagnostics, &Diagnostic{ +- URI: uri, +- Severity: protocol.SeverityError, +- Source: ListError, +- Message: err.Error(), +- }) +- } +- +- if len(s.view.workspaceModFiles) > 0 { +- for modURI := range s.view.workspaceModFiles { +- // Verify that the modfile is valid before trying to load it. +- // +- // TODO(rfindley): now that we no longer need to parse the modfile in +- // order to load scope, we could move these diagnostics to a more general +- // location where we diagnose problems with modfiles or the workspace. +- // +- // Be careful not to add context cancellation errors as critical module +- // errors. +- fh, err := s.ReadFile(ctx, modURI) +- if err != nil { +- if ctx.Err() != nil { +- return +- } +- addError(modURI, err) +- continue +- } +- parsed, err := s.ParseMod(ctx, fh) +- if err != nil { +- if ctx.Err() != nil { +- return +- } +- addError(modURI, err) +- continue +- } +- if parsed.File == nil || parsed.File.Module == nil { +- addError(modURI, fmt.Errorf("no module path for %s", modURI)) +- continue +- } +- // Previously, we loaded /... for each module path, but that +- // is actually incorrect when the pattern may match packages in more than +- // one module. See golang/go#59458 for more details. +- scopes = append(scopes, moduleLoadScope{dir: modURI.DirPath(), modulePath: parsed.File.Module.Mod.Path}) +- } +- } else { +- scopes = append(scopes, viewLoadScope{}) +- } +- +- // If we're loading anything, ensure we also load builtin, +- // since it provides fake definitions (and documentation) +- // for types like int that are used everywhere. +- if len(scopes) > 0 { +- scopes = append(scopes, packageLoadScope("builtin")) +- } +- loadErr := s.load(ctx, NetworkOK, scopes...) +- +- // A failure is retryable if it may have been due to context cancellation, +- // and this is not the initial workspace load (firstAttempt==true). +- // +- // The IWL runs on a detached context with a long (~10m) timeout, so +- // if the context was canceled we consider loading to have failed +- // permanently. +- if loadErr != nil && ctx.Err() != nil && !firstAttempt { +- return +- } +- +- var initialErr *InitializationError +- switch { +- case loadErr != nil && ctx.Err() != nil: +- event.Error(ctx, fmt.Sprintf("initial workspace load: %v", loadErr), loadErr) +- initialErr = &InitializationError{ +- MainError: loadErr, +- } +- case loadErr != nil: +- event.Error(ctx, "initial workspace load failed", loadErr) +- extractedDiags := s.extractGoCommandErrors(ctx, loadErr) +- initialErr = &InitializationError{ +- MainError: loadErr, +- Diagnostics: moremaps.Group(extractedDiags, byURI), +- } +- case s.view.workspaceModFilesErr != nil: +- initialErr = &InitializationError{ +- MainError: s.view.workspaceModFilesErr, +- } +- case len(modDiagnostics) > 0: +- initialErr = &InitializationError{ +- MainError: errors.New(modDiagnostics[0].Message), +- } +- } +- +- s.mu.Lock() +- defer s.mu.Unlock() +- +- s.initialized = true +- s.initialErr = initialErr +-} +- +-// A StateChange describes external state changes that may affect a snapshot. +-// +-// By far the most common of these is a change to file state, but a query of +-// module upgrade information or vulnerabilities also affects gopls' behavior. +-type StateChange struct { +- Modifications []file.Modification // if set, the raw modifications originating this change +- Files map[protocol.DocumentURI]file.Handle +- ModuleUpgrades map[protocol.DocumentURI]map[string]string +- Vulns map[protocol.DocumentURI]*vulncheck.Result +- CompilerOptDetails map[protocol.DocumentURI]bool // package directory -> whether or not we want details +-} +- +-// InvalidateView processes the provided state change, invalidating any derived +-// results that depend on the changed state. +-// +-// The resulting snapshot is non-nil, representing the outcome of the state +-// change. The second result is a function that must be called to release the +-// snapshot when the snapshot is no longer needed. +-// +-// An error is returned if the given view is no longer active in the session. +-func (s *Session) InvalidateView(ctx context.Context, view *View, changed StateChange) (*Snapshot, func(), error) { +- s.viewMu.Lock() +- defer s.viewMu.Unlock() +- +- if !slices.Contains(s.views, view) { +- return nil, nil, fmt.Errorf("view is no longer active") +- } +- snapshot, release, _ := s.invalidateViewLocked(ctx, view, changed) +- return snapshot, release, nil +-} +- +-// invalidateViewLocked invalidates the content of the given view. +-// (See [Session.InvalidateView]). +-// +-// The resulting bool reports whether the View needs to be re-diagnosed. +-// (See [Snapshot.clone]). +-// +-// s.viewMu must be held while calling this method. +-func (s *Session) invalidateViewLocked(ctx context.Context, v *View, changed StateChange) (*Snapshot, func(), bool) { +- // Detach the context so that content invalidation cannot be canceled. +- ctx = xcontext.Detach(ctx) +- +- // This should be the only time we hold the view's snapshot lock for any period of time. +- v.snapshotMu.Lock() +- defer v.snapshotMu.Unlock() +- +- prevSnapshot := v.snapshot +- +- if prevSnapshot == nil { +- panic("invalidateContent called after shutdown") +- } +- +- // Cancel all still-running previous requests, since they would be +- // operating on stale data. +- prevSnapshot.cancel() +- +- // Do not clone a snapshot until its view has finished initializing. +- // +- // TODO(rfindley): shouldn't we do this before canceling? +- prevSnapshot.AwaitInitialized(ctx) +- +- var needsDiagnosis bool +- s.snapshotWG.Add(1) +- v.snapshot, needsDiagnosis = prevSnapshot.clone(ctx, v.baseCtx, changed, s.snapshotWG.Done) +- +- // Remove the initial reference created when prevSnapshot was created. +- prevSnapshot.decref() +- +- // Return a second lease to the caller. +- return v.snapshot, v.snapshot.Acquire(), needsDiagnosis +-} +- +-// defineView computes the view definition for the provided workspace folder +-// and URI. +-// +-// If forURI is non-empty, this view should be the best view including forURI. +-// Otherwise, it is the default view for the folder. +-// +-// defineView may return an error if the context is cancelled, or the +-// workspace folder path is invalid. +-// +-// Note: keep this function in sync with [RelevantViews]. +-// +-// TODO(rfindley): we should be able to remove the error return, as +-// findModules is going away, and all other I/O is memoized. +-// +-// TODO(rfindley): pass in a narrower interface for the file.Source +-// (e.g. fileExists func(DocumentURI) bool) to make clear that this +-// process depends only on directory information, not file contents. +-func defineView(ctx context.Context, fs file.Source, folder *Folder, forFile file.Handle) (*viewDefinition, error) { +- if err := checkPathValid(folder.Dir.Path()); err != nil { +- return nil, fmt.Errorf("invalid workspace folder path: %w; check that the spelling of the configured workspace folder path agrees with the spelling reported by the operating system", err) +- } +- dir := folder.Dir.Path() +- if forFile != nil { +- dir = forFile.URI().DirPath() +- } +- +- def := new(viewDefinition) +- def.folder = folder +- +- if forFile != nil && fileKind(forFile) == file.Go { +- // If the file has GOOS/GOARCH build constraints that +- // don't match the folder's environment (which comes from +- // 'go env' in the folder, plus user options), +- // add those constraints to the viewDefinition's environment. +- +- // Content trimming is nontrivial, so do this outside of the loop below. +- // Keep this in sync with [RelevantViews]. +- path := forFile.URI().Path() +- if content, err := forFile.Content(); err == nil { +- // Note the err == nil condition above: by convention a non-existent file +- // does not have any constraints. See the related note in [RelevantViews]: this +- // choice of behavior shouldn't actually matter. In this case, we should +- // only call defineView with Overlays, which always have content. +- content = trimContentForPortMatch(content) +- viewPort := port{def.folder.Env.GOOS, def.folder.Env.GOARCH} +- if !viewPort.matches(path, content) { +- for _, p := range preferredPorts { +- if p.matches(path, content) { +- if def.envOverlay == nil { +- def.envOverlay = make(map[string]string) +- } +- def.envOverlay["GOOS"] = p.GOOS +- def.envOverlay["GOARCH"] = p.GOARCH +- break +- } +- } +- } +- } +- } +- +- var err error +- dirURI := protocol.URIFromPath(dir) +- goworkFromEnv := false +- if folder.Env.ExplicitGOWORK != "off" && folder.Env.ExplicitGOWORK != "" { +- goworkFromEnv = true +- def.gowork = protocol.URIFromPath(folder.Env.ExplicitGOWORK) +- } else { +- def.gowork, err = findRootPattern(ctx, dirURI, "go.work", fs) +- if err != nil { +- return nil, err +- } +- } +- +- // When deriving the best view for a given file, we only want to search +- // up the directory hierarchy for modfiles. +- def.gomod, err = findRootPattern(ctx, dirURI, "go.mod", fs) +- if err != nil { +- return nil, err +- } +- +- // Determine how we load and where to load package information for this view +- // +- // Specifically, set +- // - def.typ +- // - def.root +- // - def.workspaceModFiles, and +- // - def.envOverlay. +- +- // If GOPACKAGESDRIVER is set it takes precedence. +- if def.folder.Env.EffectiveGOPACKAGESDRIVER != "" { +- def.typ = GoPackagesDriverView +- def.root = dirURI +- return def, nil +- } +- +- // From go.dev/ref/mod, module mode is active if GO111MODULE=on, or +- // GO111MODULE=auto or "" and we are inside a module or have a GOWORK value. +- // But gopls is less strict, allowing GOPATH mode if GO111MODULE="", and +- // AdHoc views if no module is found. +- +- // gomodWorkspace is a helper to compute the correct set of workspace +- // modfiles for a go.mod file, based on folder options. +- gomodWorkspace := func() map[protocol.DocumentURI]unit { +- modFiles := map[protocol.DocumentURI]struct{}{def.gomod: {}} +- if folder.Options.IncludeReplaceInWorkspace { +- includingReplace, err := goModModules(ctx, def.gomod, fs) +- if err == nil { +- modFiles = includingReplace +- } else { +- // If the go.mod file fails to parse, we don't know anything about +- // replace directives, so fall back to a view of just the root module. +- } +- } +- return modFiles +- } +- +- // Prefer a go.work file if it is available and contains the module relevant +- // to forURI. +- if def.adjustedGO111MODULE() != "off" && folder.Env.ExplicitGOWORK != "off" && def.gowork != "" { +- def.typ = GoWorkView +- if goworkFromEnv { +- // The go.work file could be anywhere, which can lead to confusing error +- // messages. +- def.root = dirURI +- } else { +- // The go.work file could be anywhere, which can lead to confusing error +- def.root = def.gowork.Dir() +- } +- def.workspaceModFiles, def.workspaceModFilesErr = goWorkModules(ctx, def.gowork, fs) +- +- // If forURI is in a module but that module is not +- // included in the go.work file, use a go.mod view with GOWORK=off. +- if forFile != nil && def.workspaceModFilesErr == nil && def.gomod != "" { +- if _, ok := def.workspaceModFiles[def.gomod]; !ok { +- def.typ = GoModView +- def.root = def.gomod.Dir() +- def.workspaceModFiles = gomodWorkspace() +- if def.envOverlay == nil { +- def.envOverlay = make(map[string]string) +- } +- def.envOverlay["GOWORK"] = "off" +- } +- } +- return def, nil +- } +- +- // Otherwise, use the active module, if in module mode. +- // +- // Note, we could override GO111MODULE here via envOverlay if we wanted to +- // support the case where someone opens a module with GO111MODULE=off. But +- // that is probably not worth worrying about (at this point, folks probably +- // shouldn't be setting GO111MODULE). +- if def.adjustedGO111MODULE() != "off" && def.gomod != "" { +- def.typ = GoModView +- def.root = def.gomod.Dir() +- def.workspaceModFiles = gomodWorkspace() +- return def, nil +- } +- +- // Check if the workspace is within any GOPATH directory. +- inGOPATH := false +- for _, gp := range filepath.SplitList(folder.Env.GOPATH) { +- if pathutil.InDir(filepath.Join(gp, "src"), dir) { +- inGOPATH = true +- break +- } +- } +- if def.adjustedGO111MODULE() != "on" && inGOPATH { +- def.typ = GOPATHView +- def.root = dirURI +- return def, nil +- } +- +- // We're not in a workspace, module, or GOPATH, so have no better choice than +- // an ad-hoc view. +- def.typ = AdHocView +- def.root = dirURI +- return def, nil +-} +- +-// FetchGoEnv queries the environment and Go command to collect environment +-// variables necessary for the workspace folder. +-func FetchGoEnv(ctx context.Context, folder protocol.DocumentURI, opts *settings.Options) (*GoEnv, error) { +- dir := folder.Path() +- // All of the go commands invoked here should be fast. No need to share a +- // runner with other operations. +- runner := new(gocommand.Runner) +- inv := gocommand.Invocation{ +- WorkingDir: dir, +- Env: opts.EnvSlice(), +- } +- +- var ( +- env = new(GoEnv) +- err error +- ) +- envvars := map[string]*string{ +- "GOOS": &env.GOOS, +- "GOARCH": &env.GOARCH, +- "GOCACHE": &env.GOCACHE, +- "GOPATH": &env.GOPATH, +- "GOPRIVATE": &env.GOPRIVATE, +- "GOMODCACHE": &env.GOMODCACHE, +- "GOFLAGS": &env.GOFLAGS, +- "GO111MODULE": &env.GO111MODULE, +- "GOTOOLCHAIN": &env.GOTOOLCHAIN, +- "GOROOT": &env.GOROOT, +- } +- if err := loadGoEnv(ctx, dir, opts.EnvSlice(), runner, envvars); err != nil { +- return nil, err +- } +- +- env.GoVersion, err = gocommand.GoVersion(ctx, inv, runner) +- if err != nil { +- return nil, err +- } +- env.GoVersionOutput, err = gocommand.GoVersionOutput(ctx, inv, runner) +- if err != nil { +- return nil, err +- } +- +- // The value of GOPACKAGESDRIVER is not returned through the go command. +- if driver, ok := opts.Env["GOPACKAGESDRIVER"]; ok { +- if driver != "off" { +- env.EffectiveGOPACKAGESDRIVER = driver +- } +- } else if driver := os.Getenv("GOPACKAGESDRIVER"); driver != "off" { +- env.EffectiveGOPACKAGESDRIVER = driver +- // A user may also have a gopackagesdriver binary on their machine, which +- // works the same way as setting GOPACKAGESDRIVER. +- // +- // TODO(rfindley): remove this call to LookPath. We should not support this +- // undocumented method of setting GOPACKAGESDRIVER. +- if env.EffectiveGOPACKAGESDRIVER == "" { +- tool, err := exec.LookPath("gopackagesdriver") +- if err == nil && tool != "" { +- env.EffectiveGOPACKAGESDRIVER = tool +- } +- } +- } +- +- // While GOWORK is available through the Go command, we want to differentiate +- // between an explicit GOWORK value and one which is implicit from the file +- // system. The former doesn't change unless the environment changes. +- if gowork, ok := opts.Env["GOWORK"]; ok { +- env.ExplicitGOWORK = gowork +- } else { +- env.ExplicitGOWORK = os.Getenv("GOWORK") +- } +- return env, nil +-} +- +-// loadGoEnv loads `go env` values into the provided map, keyed by Go variable +-// name. +-func loadGoEnv(ctx context.Context, dir string, configEnv []string, runner *gocommand.Runner, vars map[string]*string) error { +- // We can save ~200 ms by requesting only the variables we care about. +- args := []string{"-json"} +- for k := range vars { +- args = append(args, k) +- } +- +- inv := gocommand.Invocation{ +- Verb: "env", +- Args: args, +- Env: configEnv, +- WorkingDir: dir, +- } +- stdout, err := runner.Run(ctx, inv) +- if err != nil { +- return err +- } +- envMap := make(map[string]string) +- if err := json.Unmarshal(stdout.Bytes(), &envMap); err != nil { +- return fmt.Errorf("internal error unmarshaling JSON from 'go env': %w", err) +- } +- for key, ptr := range vars { +- *ptr = envMap[key] +- } +- +- return nil +-} +- +-// findRootPattern looks for files with the given basename in dir or any parent +-// directory of dir, using the provided FileSource. It returns the first match, +-// starting from dir and search parents. +-// +-// The resulting string is either the file path of a matching file with the +-// given basename, or "" if none was found. +-// +-// findRootPattern only returns an error in the case of context cancellation. +-func findRootPattern(ctx context.Context, dirURI protocol.DocumentURI, basename string, fs file.Source) (protocol.DocumentURI, error) { +- dir := dirURI.Path() +- for dir != "" { +- target := filepath.Join(dir, basename) +- uri := protocol.URIFromPath(target) +- fh, err := fs.ReadFile(ctx, uri) +- if err != nil { +- return "", err // context cancelled +- } +- if fileExists(fh) { +- return uri, nil +- } +- // Trailing separators must be trimmed, otherwise filepath.Split is a noop. +- next, _ := filepath.Split(strings.TrimRight(dir, string(filepath.Separator))) +- if next == dir { +- break +- } +- dir = next +- } +- return "", nil +-} +- +-// checkPathValid performs an OS-specific path validity check. The +-// implementation varies for filesystems that are case-insensitive +-// (e.g. macOS, Windows), and for those that disallow certain file +-// names (e.g. path segments ending with a period on Windows, or +-// reserved names such as "com"; see +-// https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file). +-var checkPathValid = defaultCheckPathValid +- +-// CheckPathValid checks whether a directory is suitable as a workspace folder. +-func CheckPathValid(dir string) error { return checkPathValid(dir) } +- +-func defaultCheckPathValid(path string) error { +- return nil +-} +- +-// IsGoPrivatePath reports whether target is a private import path, as identified +-// by the GOPRIVATE environment variable. +-func (s *Snapshot) IsGoPrivatePath(target string) bool { +- return globsMatchPath(s.view.folder.Env.GOPRIVATE, target) +-} +- +-// ModuleUpgrades returns known module upgrades for the dependencies of +-// modfile. +-func (s *Snapshot) ModuleUpgrades(modfile protocol.DocumentURI) map[string]string { +- s.mu.Lock() +- defer s.mu.Unlock() +- upgrades := map[string]string{} +- orig, _ := s.moduleUpgrades.Get(modfile) +- maps.Copy(upgrades, orig) +- return upgrades +-} +- +-// MaxGovulncheckResultsAge defines the maximum vulnerability age considered +-// valid by gopls. +-// +-// Mutable for testing. +-var MaxGovulncheckResultAge = 1 * time.Hour +- +-// Vulnerabilities returns known vulnerabilities for the given modfile. +-// +-// Results more than an hour old are excluded. +-// +-// TODO(suzmue): replace command.Vuln with a different type, maybe +-// https://pkg.go.dev/golang.org/x/vuln/cmd/govulncheck/govulnchecklib#Summary? +-// +-// TODO(rfindley): move to snapshot.go +-func (s *Snapshot) Vulnerabilities(modfiles ...protocol.DocumentURI) map[protocol.DocumentURI]*vulncheck.Result { +- m := make(map[protocol.DocumentURI]*vulncheck.Result) +- now := time.Now() +- +- s.mu.Lock() +- defer s.mu.Unlock() +- +- if len(modfiles) == 0 { // empty means all modfiles +- modfiles = slices.Collect(s.vulns.Keys()) +- } +- for _, modfile := range modfiles { +- vuln, _ := s.vulns.Get(modfile) +- if vuln != nil && now.Sub(vuln.AsOf) > MaxGovulncheckResultAge { +- vuln = nil +- } +- m[modfile] = vuln +- } +- return m +-} +- +-// GoVersion returns the effective release Go version (the X in go1.X) for this +-// view. +-func (v *View) GoVersion() int { +- return v.folder.Env.GoVersion +-} +- +-// GoVersionString returns the effective Go version string for this view. +-// +-// Unlike [GoVersion], this encodes the minor version and commit hash information. +-func (v *View) GoVersionString() string { +- return gocommand.ParseGoVersionOutput(v.folder.Env.GoVersionOutput) +-} +- +-// GoVersionString is temporarily available from the snapshot. +-// +-// TODO(rfindley): refactor so that this method is not necessary. +-func (s *Snapshot) GoVersionString() string { +- return s.view.GoVersionString() +-} +- +-// Copied from +-// https://cs.opensource.google/go/go/+/master:src/cmd/go/internal/str/path.go;l=58;drc=2910c5b4a01a573ebc97744890a07c1a3122c67a +-func globsMatchPath(globs, target string) bool { +- for globs != "" { +- // Extract next non-empty glob in comma-separated list. +- var glob string +- if i := strings.Index(globs, ","); i >= 0 { +- glob, globs = globs[:i], globs[i+1:] +- } else { +- glob, globs = globs, "" +- } +- if glob == "" { +- continue +- } +- +- // A glob with N+1 path elements (N slashes) needs to be matched +- // against the first N+1 path elements of target, +- // which end just before the N+1'th slash. +- n := strings.Count(glob, "/") +- prefix := target +- // Walk target, counting slashes, truncating at the N+1'th slash. +- for i := range len(target) { +- if target[i] == '/' { +- if n == 0 { +- prefix = target[:i] +- break +- } +- n-- +- } +- } +- if n > 0 { +- // Not enough prefix elements. +- continue +- } +- matched, _ := path.Match(glob, prefix) +- if matched { +- return true +- } +- } +- return false +-} +- +-// TODO(rfindley): clean up the redundancy of allFilesExcluded, +-// pathExcludedByFilterFunc, pathExcludedByFilter, view.filterFunc... +-func allFilesExcluded(files []string, filterFunc func(protocol.DocumentURI) bool) bool { +- for _, f := range files { +- uri := protocol.URIFromPath(f) +- if !filterFunc(uri) { +- return false +- } +- } +- return true +-} +- +-func relPathExcludedByFilter(path string, pathIncluded func(string) bool) bool { +- path = strings.TrimPrefix(filepath.ToSlash(path), "/") +- return !pathIncluded(path) +-} +diff -urN a/gopls/internal/cache/view_test.go b/gopls/internal/cache/view_test.go +--- a/gopls/internal/cache/view_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/view_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,175 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +-package cache +- +-import ( +- "os" +- "path/filepath" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-func TestCaseInsensitiveFilesystem(t *testing.T) { +- base := t.TempDir() +- +- inner := filepath.Join(base, "a/B/c/DEFgh") +- if err := os.MkdirAll(inner, 0777); err != nil { +- t.Fatal(err) +- } +- file := filepath.Join(inner, "f.go") +- if err := os.WriteFile(file, []byte("hi"), 0777); err != nil { +- t.Fatal(err) +- } +- if _, err := os.Stat(filepath.Join(inner, "F.go")); err != nil { +- t.Skip("filesystem is case-sensitive") +- } +- +- tests := []struct { +- path string +- err bool +- }{ +- {file, false}, +- {filepath.Join(inner, "F.go"), true}, +- {filepath.Join(base, "a/b/c/defgh/f.go"), true}, +- } +- for _, tt := range tests { +- err := checkPathValid(tt.path) +- if err != nil != tt.err { +- t.Errorf("checkPathValid(%q) = %v, wanted error: %v", tt.path, err, tt.err) +- } +- } +-} +- +-func TestInVendor(t *testing.T) { +- for _, tt := range []struct { +- path string +- inVendor bool +- }{ +- {"foo/vendor/x.go", false}, +- {"foo/vendor/x/x.go", true}, +- {"foo/x.go", false}, +- {"foo/vendor/foo.txt", false}, +- {"foo/vendor/modules.txt", false}, +- } { +- if got := inVendor(protocol.URIFromPath(tt.path)); got != tt.inVendor { +- t.Errorf("expected %s inVendor %v, got %v", tt.path, tt.inVendor, got) +- } +- } +-} +- +-func TestFilters(t *testing.T) { +- tests := []struct { +- filters []string +- included []string +- excluded []string +- }{ +- { +- included: []string{"x"}, +- }, +- { +- filters: []string{"-"}, +- excluded: []string{"x", "x/a"}, +- }, +- { +- filters: []string{"-x", "+y"}, +- included: []string{"y", "y/a", "z"}, +- excluded: []string{"x", "x/a"}, +- }, +- { +- filters: []string{"-x", "+x/y", "-x/y/z"}, +- included: []string{"x/y", "x/y/a", "a"}, +- excluded: []string{"x", "x/a", "x/y/z/a"}, +- }, +- { +- filters: []string{"+foobar", "-foo"}, +- included: []string{"foobar", "foobar/a"}, +- excluded: []string{"foo", "foo/a"}, +- }, +- } +- +- for _, tt := range tests { +- pathIncluded := PathIncludeFunc(tt.filters) +- for _, inc := range tt.included { +- if relPathExcludedByFilter(inc, pathIncluded) { +- t.Errorf("filters %q excluded %v, wanted included", tt.filters, inc) +- } +- } +- for _, exc := range tt.excluded { +- if !relPathExcludedByFilter(exc, pathIncluded) { +- t.Errorf("filters %q included %v, wanted excluded", tt.filters, exc) +- } +- } +- } +-} +- +-func TestSuffixes(t *testing.T) { +- type file struct { +- path string +- want bool +- } +- type cases struct { +- option []string +- files []file +- } +- tests := []cases{ +- {[]string{"tmpl", "gotmpl"}, []file{ // default +- {"foo", false}, +- {"foo.tmpl", true}, +- {"foo.gotmpl", true}, +- {"tmpl", false}, +- {"tmpl.go", false}}, +- }, +- {[]string{"tmpl", "gotmpl", "html", "gohtml"}, []file{ +- {"foo.gotmpl", true}, +- {"foo.html", true}, +- {"foo.gohtml", true}, +- {"html", false}}, +- }, +- {[]string{"tmpl", "gotmpl", ""}, []file{ // possible user mistake +- {"foo.gotmpl", true}, +- {"foo.go", false}, +- {"foo", false}}, +- }, +- } +- for _, a := range tests { +- suffixes := a.option +- for _, b := range a.files { +- got := fileHasExtension(b.path, suffixes) +- if got != b.want { +- t.Errorf("got %v, want %v, option %q, file %q (%+v)", +- got, b.want, a.option, b.path, b) +- } +- } +- } +-} +- +-func TestIgnoreFilter(t *testing.T) { +- tests := []struct { +- dirs []string +- path string +- want bool +- }{ +- {[]string{"a"}, "a/testdata/foo", true}, +- {[]string{"a"}, "a/_ignore/foo", true}, +- {[]string{"a"}, "a/.ignore/foo", true}, +- {[]string{"a"}, "b/testdata/foo", false}, +- {[]string{"a"}, "testdata/foo", false}, +- {[]string{"a", "b"}, "b/testdata/foo", true}, +- {[]string{"a"}, "atestdata/foo", false}, +- } +- +- for _, test := range tests { +- // convert to filepaths, for convenience +- for i, dir := range test.dirs { +- test.dirs[i] = filepath.FromSlash(dir) +- } +- test.path = filepath.FromSlash(test.path) +- +- f := newIgnoreFilter(test.dirs) +- if got := f.ignored(test.path); got != test.want { +- t.Errorf("newIgnoreFilter(%q).ignore(%q) = %t, want %t", test.dirs, test.path, got, test.want) +- } +- } +-} +diff -urN a/gopls/internal/cache/workspace.go b/gopls/internal/cache/workspace.go +--- a/gopls/internal/cache/workspace.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/workspace.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,128 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cache +- +-import ( +- "context" +- "errors" +- "fmt" +- "path/filepath" +- +- "golang.org/x/mod/modfile" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/test/integration/fake/glob" +-) +- +-// isGoWork reports if uri is a go.work file. +-func isGoWork(uri protocol.DocumentURI) bool { +- return uri.Base() == "go.work" +-} +- +-// goWorkModules returns the URIs of go.mod files named by the go.work file. +-func goWorkModules(ctx context.Context, gowork protocol.DocumentURI, fs file.Source) (map[protocol.DocumentURI]unit, error) { +- fh, err := fs.ReadFile(ctx, gowork) +- if err != nil { +- return nil, err // canceled +- } +- content, err := fh.Content() +- if err != nil { +- return nil, err +- } +- filename := gowork.Path() +- dir := filepath.Dir(filename) +- workFile, err := modfile.ParseWork(filename, content, nil) +- if err != nil { +- return nil, fmt.Errorf("parsing go.work: %w", err) +- } +- var usedDirs []string +- for _, use := range workFile.Use { +- usedDirs = append(usedDirs, use.Path) +- } +- return localModFiles(dir, usedDirs), nil +-} +- +-// localModFiles builds a set of local go.mod files referenced by +-// goWorkOrModPaths, which is a slice of paths as contained in a go.work 'use' +-// directive or go.mod 'replace' directive (and which therefore may use either +-// '/' or '\' as a path separator). +-func localModFiles(relativeTo string, goWorkOrModPaths []string) map[protocol.DocumentURI]unit { +- modFiles := make(map[protocol.DocumentURI]unit) +- for _, path := range goWorkOrModPaths { +- modDir := filepath.FromSlash(path) +- if !filepath.IsAbs(modDir) { +- modDir = filepath.Join(relativeTo, modDir) +- } +- modURI := protocol.URIFromPath(filepath.Join(modDir, "go.mod")) +- modFiles[modURI] = unit{} +- } +- return modFiles +-} +- +-// isGoMod reports if uri is a go.mod file. +-func isGoMod(uri protocol.DocumentURI) bool { +- return uri.Base() == "go.mod" +-} +- +-// isWorkspaceFile reports if uri matches a set of globs defined in workspaceFiles +-func isWorkspaceFile(uri protocol.DocumentURI, workspaceFiles []string) bool { +- for _, workspaceFile := range workspaceFiles { +- g, err := glob.Parse(workspaceFile) +- if err != nil { +- continue +- } +- +- if g.Match(uri.Path()) { +- return true +- } +- } +- return false +-} +- +-// goModModules returns the URIs of "workspace" go.mod files defined by a +-// go.mod file. This set is defined to be the given go.mod file itself, as well +-// as the modfiles of any locally replaced modules in the go.mod file. +-func goModModules(ctx context.Context, gomod protocol.DocumentURI, fs file.Source) (map[protocol.DocumentURI]unit, error) { +- fh, err := fs.ReadFile(ctx, gomod) +- if err != nil { +- return nil, err // canceled +- } +- content, err := fh.Content() +- if err != nil { +- return nil, err +- } +- filename := gomod.Path() +- dir := filepath.Dir(filename) +- modFile, err := modfile.Parse(filename, content, nil) +- if err != nil { +- return nil, err +- } +- var localReplaces []string +- for _, replace := range modFile.Replace { +- if modfile.IsDirectoryPath(replace.New.Path) { +- localReplaces = append(localReplaces, replace.New.Path) +- } +- } +- modFiles := localModFiles(dir, localReplaces) +- modFiles[gomod] = unit{} +- return modFiles, nil +-} +- +-// fileExists reports whether the file has a Content (which may be empty). +-// An overlay exists even if it is not reflected in the file system. +-func fileExists(fh file.Handle) bool { +- _, err := fh.Content() +- return err == nil +-} +- +-// errExhausted is returned by findModules if the file scan limit is reached. +-var errExhausted = errors.New("exhausted") +- +-// Limit go.mod search to 1 million files. As a point of reference, +-// Kubernetes has 22K files (as of 2020-11-24). +-// +-// Note: per golang/go#56496, the previous limit of 1M files was too slow, at +-// which point this limit was decreased to 100K. +-const fileLimit = 100_000 +diff -urN a/gopls/internal/cache/xrefs/xrefs.go b/gopls/internal/cache/xrefs/xrefs.go +--- a/gopls/internal/cache/xrefs/xrefs.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cache/xrefs/xrefs.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,194 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package xrefs defines the serializable index of cross-package +-// references that is computed during type checking. +-// +-// See ../references.go for the 'references' query. +-package xrefs +- +-import ( +- "go/ast" +- "go/types" +- "sort" +- +- "golang.org/x/tools/go/types/objectpath" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/frob" +-) +- +-// Index constructs a serializable index of outbound cross-references +-// for the specified type-checked package. +-func Index(files []*parsego.File, pkg *types.Package, info *types.Info) []byte { +- // pkgObjects maps each referenced package Q to a mapping: +- // from each referenced symbol in Q to the ordered list +- // of references to that symbol from this package. +- // A nil types.Object indicates a reference +- // to the package as a whole: an import. +- pkgObjects := make(map[*types.Package]map[types.Object]*gobObject) +- +- // getObjects returns the object-to-references mapping for a package. +- getObjects := func(pkg *types.Package) map[types.Object]*gobObject { +- objects, ok := pkgObjects[pkg] +- if !ok { +- objects = make(map[types.Object]*gobObject) +- pkgObjects[pkg] = objects +- } +- return objects +- } +- +- objectpathFor := new(objectpath.Encoder).For +- +- for fileIndex, pgf := range files { +- for cur := range pgf.Cursor.Preorder((*ast.Ident)(nil), (*ast.ImportSpec)(nil)) { +- switch n := cur.Node().(type) { +- case *ast.Ident: +- // Report a reference for each identifier that +- // uses a symbol exported from another package. +- // (The built-in error.Error method has no package.) +- if n.IsExported() { +- if obj, ok := info.Uses[n]; ok && +- obj.Pkg() != nil && +- obj.Pkg() != pkg { +- +- // For instantiations of generic methods, +- // use the generic object (see issue #60622). +- if fn, ok := obj.(*types.Func); ok { +- obj = fn.Origin() +- } +- +- objects := getObjects(obj.Pkg()) +- gobObj, ok := objects[obj] +- if !ok { +- path, err := objectpathFor(obj) +- if err != nil { +- // Capitalized but not exported +- // (e.g. local const/var/type). +- continue +- } +- gobObj = &gobObject{Path: path} +- objects[obj] = gobObj +- } +- +- // golang/go#66683: nodes can under/overflow the file. +- // For example, "var _ = x." creates a SelectorExpr(Sel=Ident("_")) +- // that is beyond EOF. (Arguably Ident.Name should be "".) +- if rng, err := pgf.NodeRange(n); err == nil { +- gobObj.Refs = append(gobObj.Refs, gobRef{ +- FileIndex: fileIndex, +- Range: rng, +- }) +- } +- } +- } +- +- case *ast.ImportSpec: +- // Report a reference from each import path +- // string to the imported package. +- pkgname := info.PkgNameOf(n) +- if pkgname == nil { +- continue // missing import +- } +- objects := getObjects(pkgname.Imported()) +- gobObj, ok := objects[nil] +- if !ok { +- gobObj = &gobObject{Path: ""} +- objects[nil] = gobObj +- } +- // golang/go#66683: nodes can under/overflow the file. +- if rng, err := pgf.NodeRange(n.Path); err == nil { +- gobObj.Refs = append(gobObj.Refs, gobRef{ +- FileIndex: fileIndex, +- Range: rng, +- }) +- } else { +- bug.Reportf("out of bounds import spec %+v", n.Path) +- } +- } +- } +- } +- +- // Flatten the maps into slices, and sort for determinism. +- var packages []*gobPackage +- for p := range pkgObjects { +- objects := pkgObjects[p] +- gp := &gobPackage{ +- PkgPath: metadata.PackagePath(p.Path()), +- Objects: make([]*gobObject, 0, len(objects)), +- } +- for _, gobObj := range objects { +- gp.Objects = append(gp.Objects, gobObj) +- } +- sort.Slice(gp.Objects, func(i, j int) bool { +- return gp.Objects[i].Path < gp.Objects[j].Path +- }) +- packages = append(packages, gp) +- } +- sort.Slice(packages, func(i, j int) bool { +- return packages[i].PkgPath < packages[j].PkgPath +- }) +- +- return packageCodec.Encode(packages) +-} +- +-// Lookup searches a serialized index produced by an indexPackage +-// operation on m, and returns the locations of all references from m +-// to any object in the target set. Each object is denoted by a pair +-// of (package path, object path). +-func Lookup(mp *metadata.Package, data []byte, targets map[metadata.PackagePath]map[objectpath.Path]struct{}) (locs []protocol.Location) { +- var packages []*gobPackage +- packageCodec.Decode(data, &packages) +- for _, gp := range packages { +- if objectSet, ok := targets[gp.PkgPath]; ok { +- for _, gobObj := range gp.Objects { +- if _, ok := objectSet[gobObj.Path]; ok { +- for _, ref := range gobObj.Refs { +- uri := mp.CompiledGoFiles[ref.FileIndex] +- locs = append(locs, protocol.Location{ +- URI: uri, +- Range: ref.Range, +- }) +- } +- } +- } +- } +- } +- +- return locs +-} +- +-// -- serialized representation -- +- +-// The cross-reference index records the location of all references +-// from one package to symbols defined in other packages +-// (dependencies). It does not record within-package references. +-// The index for package P consists of a list of gopPackage records, +-// each enumerating references to symbols defined a single dependency, Q. +- +-// TODO(adonovan): opt: choose a more compact encoding. +-// The gobRef.Range field is the obvious place to begin. +- +-// (The name says gob but in fact we use frob.) +-var packageCodec = frob.CodecFor[[]*gobPackage]() +- +-// A gobPackage records the set of outgoing references from the index +-// package to symbols defined in a dependency package. +-type gobPackage struct { +- PkgPath metadata.PackagePath // defining package (Q) +- Objects []*gobObject // set of Q objects referenced by P +-} +- +-// A gobObject records all references to a particular symbol. +-type gobObject struct { +- Path objectpath.Path // symbol name within package; "" => import of package itself +- Refs []gobRef // locations of references within P, in lexical order +-} +- +-type gobRef struct { +- FileIndex int // index of enclosing file within P's CompiledGoFiles +- Range protocol.Range // source range of reference +-} +diff -urN a/gopls/internal/clonetest/clonetest.go b/gopls/internal/clonetest/clonetest.go +--- a/gopls/internal/clonetest/clonetest.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/clonetest/clonetest.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,151 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package clonetest provides utility functions for testing Clone operations. +-// +-// The [NonZero] helper may be used to construct a type in which fields are +-// recursively set to a non-zero value. This value can then be cloned, and the +-// [ZeroOut] helper can set values stored in the clone to zero, recursively. +-// Doing so should not mutate the original. +-package clonetest +- +-import ( +- "fmt" +- "reflect" +- "slices" +-) +- +-// NonZero returns a T set to some appropriate nonzero value: +-// - Values of basic type are set to an arbitrary non-zero value. +-// - Struct fields are set to a non-zero value. +-// - Array indices are set to a non-zero value. +-// - Pointers point to a non-zero value. +-// - Maps and slices are given a non-zero element. +-// - Chan, Func, Interface, UnsafePointer are all unsupported. +-// +-// NonZero breaks cycles by returning a zero value for recursive types. +-func NonZero[T any]() T { +- var x T +- t := reflect.TypeOf(x) +- if t == nil { +- panic("untyped nil") +- } +- v := nonZeroValue(t, nil) +- return v.Interface().(T) +-} +- +-// nonZeroValue returns a non-zero, addressable value of the given type. +-func nonZeroValue(t reflect.Type, seen []reflect.Type) reflect.Value { +- if slices.Contains(seen, t) { +- // Cycle: return the zero value. +- return reflect.Zero(t) +- } +- seen = append(seen, t) +- v := reflect.New(t).Elem() +- switch t.Kind() { +- case reflect.Bool: +- v.SetBool(true) +- +- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: +- v.SetInt(1) +- +- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: +- v.SetUint(1) +- +- case reflect.Float32, reflect.Float64: +- v.SetFloat(1) +- +- case reflect.Complex64, reflect.Complex128: +- v.SetComplex(1) +- +- case reflect.Array: +- for i := 0; i < v.Len(); i++ { +- v.Index(i).Set(nonZeroValue(t.Elem(), seen)) +- } +- +- case reflect.Map: +- v2 := reflect.MakeMap(t) +- v2.SetMapIndex(nonZeroValue(t.Key(), seen), nonZeroValue(t.Elem(), seen)) +- v.Set(v2) +- +- case reflect.Pointer: +- v2 := nonZeroValue(t.Elem(), seen) +- v.Set(v2.Addr()) +- +- case reflect.Slice: +- v2 := reflect.Append(v, nonZeroValue(t.Elem(), seen)) +- v.Set(v2) +- +- case reflect.String: +- v.SetString(".") +- +- case reflect.Struct: +- for i := 0; i < v.NumField(); i++ { +- v.Field(i).Set(nonZeroValue(t.Field(i).Type, seen)) +- } +- +- default: // Chan, Func, Interface, UnsafePointer +- panic(fmt.Sprintf("reflect kind %v not supported", t.Kind())) +- } +- return v +-} +- +-// ZeroOut recursively sets values contained in t to zero. +-// Values of king Chan, Func, Interface, UnsafePointer are all unsupported. +-// +-// No attempt is made to handle cyclic values. +-func ZeroOut[T any](t *T) { +- v := reflect.ValueOf(t).Elem() +- zeroOutValue(v) +-} +- +-func zeroOutValue(v reflect.Value) { +- if v.IsZero() { +- return // nothing to do; this also handles untyped nil values +- } +- +- switch v.Kind() { +- case reflect.Bool, +- reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, +- reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, +- reflect.Float32, reflect.Float64, +- reflect.Complex64, reflect.Complex128, +- reflect.String: +- +- v.Set(reflect.Zero(v.Type())) +- +- case reflect.Array: +- for i := 0; i < v.Len(); i++ { +- zeroOutValue(v.Index(i)) +- } +- +- case reflect.Map: +- iter := v.MapRange() +- for iter.Next() { +- mv := iter.Value() +- if mv.CanAddr() { +- zeroOutValue(mv) +- } else { +- mv = reflect.New(mv.Type()).Elem() +- } +- v.SetMapIndex(iter.Key(), mv) +- } +- +- case reflect.Pointer: +- zeroOutValue(v.Elem()) +- +- case reflect.Slice: +- for i := 0; i < v.Len(); i++ { +- zeroOutValue(v.Index(i)) +- } +- +- case reflect.Struct: +- for i := 0; i < v.NumField(); i++ { +- zeroOutValue(v.Field(i)) +- } +- +- default: +- panic(fmt.Sprintf("reflect kind %v not supported", v.Kind())) +- } +-} +diff -urN a/gopls/internal/clonetest/clonetest_test.go b/gopls/internal/clonetest/clonetest_test.go +--- a/gopls/internal/clonetest/clonetest_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/clonetest/clonetest_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,74 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package clonetest_test +- +-import ( +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/clonetest" +-) +- +-func Test(t *testing.T) { +- doTest(t, true, false) +- type B bool +- doTest(t, B(true), false) +- doTest(t, 1, 0) +- doTest(t, int(1), 0) +- doTest(t, int8(1), 0) +- doTest(t, int16(1), 0) +- doTest(t, int32(1), 0) +- doTest(t, int64(1), 0) +- doTest(t, uint(1), 0) +- doTest(t, uint8(1), 0) +- doTest(t, uint16(1), 0) +- doTest(t, uint32(1), 0) +- doTest(t, uint64(1), 0) +- doTest(t, uintptr(1), 0) +- doTest(t, float32(1), 0) +- doTest(t, float64(1), 0) +- doTest(t, complex64(1), 0) +- doTest(t, complex128(1), 0) +- doTest(t, [3]int{1, 1, 1}, [3]int{0, 0, 0}) +- doTest(t, ".", "") +- m1, m2 := map[string]int{".": 1}, map[string]int{".": 0} +- doTest(t, m1, m2) +- doTest(t, &m1, &m2) +- doTest(t, []int{1}, []int{0}) +- i, j := 1, 0 +- doTest(t, &i, &j) +- k, l := &i, &j +- doTest(t, &k, &l) +- +- s1, s2 := []int{1}, []int{0} +- doTest(t, &s1, &s2) +- +- type S struct { +- Field int +- } +- doTest(t, S{1}, S{0}) +- +- doTest(t, []*S{{1}}, []*S{{0}}) +- +- // An arbitrary recursive type. +- type LinkedList[T any] struct { +- V T +- Next *LinkedList[T] +- } +- doTest(t, &LinkedList[int]{V: 1}, &LinkedList[int]{V: 0}) +-} +- +-// doTest checks that the result of NonZero matches the nonzero argument, and +-// that zeroing out that result matches the zero argument. +-func doTest[T any](t *testing.T, nonzero, zero T) { +- got := clonetest.NonZero[T]() +- if diff := cmp.Diff(nonzero, got); diff != "" { +- t.Fatalf("NonZero() returned unexpected diff (-want +got):\n%s", diff) +- } +- clonetest.ZeroOut(&got) +- if diff := cmp.Diff(zero, got); diff != "" { +- t.Errorf("ZeroOut() returned unexpected diff (-want +got):\n%s", diff) +- } +-} +diff -urN a/gopls/internal/cmd/call_hierarchy.go b/gopls/internal/cmd/call_hierarchy.go +--- a/gopls/internal/cmd/call_hierarchy.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/call_hierarchy.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,143 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- "strings" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/tool" +-) +- +-// callHierarchy implements the callHierarchy verb for gopls. +-type callHierarchy struct { +- app *Application +-} +- +-func (c *callHierarchy) Name() string { return "call_hierarchy" } +-func (c *callHierarchy) Parent() string { return c.app.Name() } +-func (c *callHierarchy) Usage() string { return "" } +-func (c *callHierarchy) ShortHelp() string { return "display selected identifier's call hierarchy" } +-func (c *callHierarchy) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Example: +- +- $ # 1-indexed location (:line:column or :#offset) of the target identifier +- $ gopls call_hierarchy helper/helper.go:8:6 +- $ gopls call_hierarchy helper/helper.go:#53 +-`) +- printFlagDefaults(f) +-} +- +-func (c *callHierarchy) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return tool.CommandLineErrorf("call_hierarchy expects 1 argument (position)") +- } +- +- cli, _, err := c.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- from := parseSpan(args[0]) +- file, err := cli.openFile(ctx, from.URI()) +- if err != nil { +- return err +- } +- +- loc, err := file.spanLocation(from) +- if err != nil { +- return err +- } +- +- p := protocol.CallHierarchyPrepareParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- +- callItems, err := cli.server.PrepareCallHierarchy(ctx, &p) +- if err != nil { +- return err +- } +- if len(callItems) == 0 { +- return fmt.Errorf("function declaration identifier not found at %v", args[0]) +- } +- +- for _, item := range callItems { +- incomingCalls, err := cli.server.IncomingCalls(ctx, &protocol.CallHierarchyIncomingCallsParams{Item: item}) +- if err != nil { +- return err +- } +- for i, call := range incomingCalls { +- // From the spec: CallHierarchyIncomingCall.FromRanges is relative to +- // the caller denoted by CallHierarchyIncomingCall.from. +- printString, err := callItemPrintString(ctx, cli, call.From, call.From.URI, call.FromRanges) +- if err != nil { +- return err +- } +- fmt.Printf("caller[%d]: %s\n", i, printString) +- } +- +- printString, err := callItemPrintString(ctx, cli, item, "", nil) +- if err != nil { +- return err +- } +- fmt.Printf("identifier: %s\n", printString) +- +- outgoingCalls, err := cli.server.OutgoingCalls(ctx, &protocol.CallHierarchyOutgoingCallsParams{Item: item}) +- if err != nil { +- return err +- } +- for i, call := range outgoingCalls { +- // From the spec: CallHierarchyOutgoingCall.FromRanges is the range +- // relative to the caller, e.g the item passed to +- printString, err := callItemPrintString(ctx, cli, call.To, item.URI, call.FromRanges) +- if err != nil { +- return err +- } +- fmt.Printf("callee[%d]: %s\n", i, printString) +- } +- } +- +- return nil +-} +- +-// callItemPrintString returns a protocol.CallHierarchyItem object represented as a string. +-// item and call ranges (protocol.Range) are converted to user friendly spans (1-indexed). +-func callItemPrintString(ctx context.Context, cli *client, item protocol.CallHierarchyItem, callsURI protocol.DocumentURI, calls []protocol.Range) (string, error) { +- itemFile, err := cli.openFile(ctx, item.URI) +- if err != nil { +- return "", err +- } +- itemSpan, err := itemFile.rangeSpan(item.Range) +- if err != nil { +- return "", err +- } +- +- var callRanges []string +- if callsURI != "" { +- callsFile, err := cli.openFile(ctx, callsURI) +- if err != nil { +- return "", err +- } +- for _, rng := range calls { +- call, err := callsFile.rangeSpan(rng) +- if err != nil { +- return "", err +- } +- callRange := fmt.Sprintf("%d:%d-%d", call.Start().Line(), call.Start().Column(), call.End().Column()) +- callRanges = append(callRanges, callRange) +- } +- } +- +- printString := fmt.Sprintf("function %s in %v", item.Name, itemSpan) +- if len(calls) > 0 { +- printString = fmt.Sprintf("ranges %s in %s from/to %s", strings.Join(callRanges, ", "), callsURI.Path(), printString) +- } +- return printString, nil +-} +diff -urN a/gopls/internal/cmd/capabilities_test.go b/gopls/internal/cmd/capabilities_test.go +--- a/gopls/internal/cmd/capabilities_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/capabilities_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,171 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "fmt" +- "os" +- "path/filepath" +- "testing" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/server" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/internal/testenv" +-) +- +-// TestCapabilities does some minimal validation of the server's adherence to the LSP. +-// The checks in the test are added as changes are made and errors noticed. +-func TestCapabilities(t *testing.T) { +- // server.DidOpen fails to obtain metadata without go command (e.g. on wasm). +- testenv.NeedsTool(t, "go") +- +- tmpDir, err := os.MkdirTemp("", "fake") +- if err != nil { +- t.Fatal(err) +- } +- tmpFile := filepath.Join(tmpDir, "fake.go") +- if err := os.WriteFile(tmpFile, []byte(""), 0775); err != nil { +- t.Fatal(err) +- } +- if err := os.WriteFile(filepath.Join(tmpDir, "go.mod"), []byte("module fake\n\ngo 1.12\n"), 0775); err != nil { +- t.Fatal(err) +- } +- defer os.RemoveAll(tmpDir) +- +- app := New() +- ctx := context.Background() +- +- // Initialize the client. +- // (Unlike app.connect, we use minimal Initialize params.) +- client := newClient(app) +- options := settings.DefaultOptions(app.options) +- server := server.New(cache.NewSession(ctx, cache.New(nil)), client, options) +- params := &protocol.ParamInitialize{} +- params.RootURI = protocol.URIFromPath(tmpDir) +- params.Capabilities.Workspace.Configuration = true +- if err := client.initialize(ctx, server, params); err != nil { +- t.Fatal(err) +- } +- defer client.terminate(ctx) +- +- if err := validateCapabilities(client.initializeResult); err != nil { +- t.Error(err) +- } +- +- // Open the file on the server side. +- uri := protocol.URIFromPath(tmpFile) +- if err := server.DidOpen(ctx, &protocol.DidOpenTextDocumentParams{ +- TextDocument: protocol.TextDocumentItem{ +- URI: uri, +- LanguageID: "go", +- Version: 1, +- Text: `package main; func main() {};`, +- }, +- }); err != nil { +- t.Fatal(err) +- } +- +- // If we are sending a full text change, the change.Range must be nil. +- // It is not enough for the Change to be empty, as that is ambiguous. +- if err := server.DidChange(ctx, &protocol.DidChangeTextDocumentParams{ +- TextDocument: protocol.VersionedTextDocumentIdentifier{ +- TextDocumentIdentifier: protocol.TextDocumentIdentifier{ +- URI: uri, +- }, +- Version: 2, +- }, +- ContentChanges: []protocol.TextDocumentContentChangeEvent{ +- { +- Range: nil, +- Text: `package main; func main() { fmt.Println("") }`, +- }, +- }, +- }); err != nil { +- t.Fatal(err) +- } +- +- // Send a code action request to validate expected types. +- actions, err := server.CodeAction(ctx, &protocol.CodeActionParams{ +- TextDocument: protocol.TextDocumentIdentifier{ +- URI: uri, +- }, +- Context: protocol.CodeActionContext{ +- Only: []protocol.CodeActionKind{protocol.SourceOrganizeImports}, +- }, +- }) +- if err != nil { +- t.Fatal(err) +- } +- for _, action := range actions { +- // Validate that an empty command is sent along with import organization responses. +- if action.Kind == protocol.SourceOrganizeImports && action.Command != nil { +- t.Errorf("unexpected command for import organization") +- } +- } +- +- if err := server.DidSave(ctx, &protocol.DidSaveTextDocumentParams{ +- TextDocument: protocol.TextDocumentIdentifier{ +- URI: uri, +- }, +- // LSP specifies that a file can be saved with optional text, so this field must be nil. +- Text: nil, +- }); err != nil { +- t.Fatal(err) +- } +- +- // Send a completion request to validate expected types. +- list, err := server.Completion(ctx, &protocol.CompletionParams{ +- TextDocumentPositionParams: protocol.TextDocumentPositionParams{ +- TextDocument: protocol.TextDocumentIdentifier{ +- URI: uri, +- }, +- Position: protocol.Position{ +- Line: 0, +- Character: 28, +- }, +- }, +- }) +- if err != nil { +- t.Fatal(err) +- } +- for _, item := range list.Items { +- // All other completion items should have nil commands. +- // An empty command will be treated as a command with the name '' by VS Code. +- // This causes VS Code to report errors to users about invalid commands. +- if item.Command != nil { +- t.Errorf("unexpected command for completion item") +- } +- // The item's TextEdit must be a pointer, as VS Code considers TextEdits +- // that don't contain the cursor position to be invalid. +- var textEdit = item.TextEdit.Value +- switch textEdit.(type) { +- case protocol.TextEdit, protocol.InsertReplaceEdit: +- default: +- t.Errorf("textEdit is not TextEdit nor InsertReplaceEdit, instead it is %T", textEdit) +- } +- } +- if err := server.Shutdown(ctx); err != nil { +- t.Fatal(err) +- } +- if err := server.Exit(ctx); err != nil { +- t.Fatal(err) +- } +-} +- +-func validateCapabilities(result *protocol.InitializeResult) error { +- // If the client sends "false" for RenameProvider.PrepareSupport, +- // the server must respond with a boolean. +- if v, ok := result.Capabilities.RenameProvider.(bool); !ok { +- return fmt.Errorf("RenameProvider must be a boolean if PrepareSupport is false (got %T)", v) +- } +- // The same goes for CodeActionKind.ValueSet. +- if v, ok := result.Capabilities.CodeActionProvider.(bool); !ok { +- return fmt.Errorf("CodeActionSupport must be a boolean if CodeActionKind.ValueSet has length 0 (got %T)", v) +- } +- return nil +-} +diff -urN a/gopls/internal/cmd/check.go b/gopls/internal/cmd/check.go +--- a/gopls/internal/cmd/check.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/check.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,128 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- "slices" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +-) +- +-// check implements the check verb for gopls. +-type check struct { +- app *Application +- Severity string `flag:"severity" help:"minimum diagnostic severity (hint, info, warning, or error)"` +-} +- +-func (c *check) Name() string { return "check" } +-func (c *check) Parent() string { return c.app.Name() } +-func (c *check) Usage() string { return "" } +-func (c *check) ShortHelp() string { return "show diagnostic results for the specified file" } +-func (c *check) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Example: show the diagnostic results of this file: +- +- $ gopls check internal/cmd/check.go +-`) +- printFlagDefaults(f) +-} +- +-// Run performs the check on the files specified by args and prints the +-// results to stdout. +-func (c *check) Run(ctx context.Context, args ...string) error { +- severityCutoff := protocol.SeverityWarning +- switch c.Severity { +- case "hint": +- severityCutoff = protocol.SeverityHint +- case "info": +- severityCutoff = protocol.SeverityInformation +- case "warning": +- // default +- case "error": +- severityCutoff = protocol.SeverityError +- default: +- return fmt.Errorf("unrecognized -severity value %q", c.Severity) +- } +- +- if len(args) == 0 { +- return nil +- } +- +- // TODO(adonovan): formally, we are required to set this +- // option if we want RelatedInformation, but it appears to +- // have no effect on the server, even though the default is +- // false. Investigate. +- origOptions := c.app.options +- c.app.options = func(opts *settings.Options) { +- if origOptions != nil { +- origOptions(opts) +- } +- opts.RelatedInformationSupported = true +- } +- +- cli, _, err := c.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- // Open and diagnose the requested files. +- var ( +- uris []protocol.DocumentURI +- checking = make(map[protocol.DocumentURI]*cmdFile) +- ) +- for _, arg := range args { +- uri := protocol.URIFromPath(arg) +- uris = append(uris, uri) +- file, err := cli.openFile(ctx, uri) +- if err != nil { +- return err +- } +- checking[uri] = file +- } +- if err := diagnoseFiles(ctx, cli.server, uris); err != nil { +- return err +- } +- +- // print prints a single element of a diagnostic. +- print := func(uri protocol.DocumentURI, rng protocol.Range, message string) error { +- file, err := cli.openFile(ctx, uri) +- if err != nil { +- return err +- } +- spn, err := file.rangeSpan(rng) +- if err != nil { +- return fmt.Errorf("could not convert position %v for %q", rng, message) +- } +- fmt.Printf("%v: %v\n", spn, message) +- return nil +- } +- +- for _, file := range checking { +- file.diagnosticsMu.Lock() +- diags := slices.Clone(file.diagnostics) +- file.diagnosticsMu.Unlock() +- +- for _, diag := range diags { +- if diag.Severity > severityCutoff { // lower severity value => greater severity, counterintuitively +- continue +- } +- if err := print(file.uri, diag.Range, diag.Message); err != nil { +- return err +- } +- for _, rel := range diag.RelatedInformation { +- if err := print(rel.Location.URI, rel.Location.Range, "- "+rel.Message); err != nil { +- return err +- } +- } +- +- } +- } +- return nil +-} +diff -urN a/gopls/internal/cmd/cmd.go b/gopls/internal/cmd/cmd.go +--- a/gopls/internal/cmd/cmd.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/cmd.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,942 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package cmd handles the gopls command line. +-// It contains a handler for each of the modes, along with all the flag handling +-// and the command line output format. +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- "log" +- "os" +- "path/filepath" +- "reflect" +- "sort" +- "strings" +- "sync" +- "text/tabwriter" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/debug" +- "golang.org/x/tools/gopls/internal/filecache" +- "golang.org/x/tools/gopls/internal/lsprpc" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/protocol/semtok" +- "golang.org/x/tools/gopls/internal/server" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/browser" +- bugpkg "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/moreslices" +- "golang.org/x/tools/internal/diff" +- "golang.org/x/tools/internal/jsonrpc2" +- "golang.org/x/tools/internal/tool" +-) +- +-// Application is the main application as passed to tool.Main +-// It handles the main command line parsing and dispatch to the sub commands. +-type Application struct { +- // Core application flags +- +- // Embed the basic profiling flags supported by the tool package +- tool.Profile +- +- // We include the server configuration directly for now, so the flags work +- // even without the verb. +- // TODO: Remove this when we stop allowing the serve verb by default. +- Serve Serve +- +- // the options configuring function to invoke when building a server +- options func(*settings.Options) +- +- // Support for remote LSP server. +- Remote string `flag:"remote" help:"forward all commands to a remote lsp specified by this flag. With no special prefix, this is assumed to be a TCP address. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. If 'auto', or prefixed by 'auto;', the remote address is automatically resolved based on the executing environment."` +- +- // Verbose enables verbose logging. +- Verbose bool `flag:"v,verbose" help:"verbose output"` +- +- // VeryVerbose enables a higher level of verbosity in logging output. +- VeryVerbose bool `flag:"vv,veryverbose" help:"very verbose output"` +- +- // PrepareOptions is called to update the options when a new view is built. +- // It is primarily to allow the behavior of gopls to be modified by hooks. +- PrepareOptions func(*settings.Options) +- +- // editFlags holds flags that control how file edit operations +- // are applied, in particular when the server makes an ApplyEdits +- // downcall to the client. Present only for commands that apply edits. +- editFlags *EditFlags +-} +- +-// EditFlags defines flags common to {code{action,lens},format,imports,rename} +-// that control how edits are applied to the client's files. +-// +-// The type is exported for flag reflection. +-// +-// The -write, -diff, and -list flags are orthogonal but any +-// of them suppresses the default behavior, which is to print +-// the edited file contents. +-type EditFlags struct { +- Write bool `flag:"w,write" help:"write edited content to source files"` +- Preserve bool `flag:"preserve" help:"with -write, make copies of original files"` +- Diff bool `flag:"d,diff" help:"display diffs instead of edited file content"` +- List bool `flag:"l,list" help:"display names of edited files"` +-} +- +-func (app *Application) verbose() bool { +- return app.Verbose || app.VeryVerbose +-} +- +-// New returns a new Application ready to run. +-func New() *Application { +- app := &Application{ +- Serve: Serve{ +- RemoteListenTimeout: 1 * time.Minute, +- }, +- } +- app.Serve.app = app +- return app +-} +- +-// Name implements tool.Application returning the binary name. +-func (app *Application) Name() string { return "gopls" } +- +-// Usage implements tool.Application returning empty extra argument usage. +-func (app *Application) Usage() string { return "" } +- +-// ShortHelp implements tool.Application returning the main binary help. +-func (app *Application) ShortHelp() string { +- return "" +-} +- +-// DetailedHelp implements tool.Application returning the main binary help. +-// This includes the short help for all the sub commands. +-func (app *Application) DetailedHelp(f *flag.FlagSet) { +- w := tabwriter.NewWriter(f.Output(), 0, 0, 2, ' ', 0) +- defer w.Flush() +- +- fmt.Fprint(w, ` +-gopls is a Go language server. +- +-It is typically used with an editor to provide language features. When no +-command is specified, gopls will default to the 'serve' command. The language +-features can also be accessed via the gopls command-line interface. +- +-For documentation of all its features, see: +- +- https://github.com/golang/tools/blob/master/gopls/doc/features +- +-Usage: +- gopls help [] +- +-Command: +-`) +- fmt.Fprint(w, "\nMain\t\n") +- for _, c := range app.mainCommands() { +- fmt.Fprintf(w, " %s\t%s\n", c.Name(), c.ShortHelp()) +- } +- fmt.Fprint(w, "\t\nFeatures\t\n") +- for _, c := range app.featureCommands() { +- fmt.Fprintf(w, " %s\t%s\n", c.Name(), c.ShortHelp()) +- } +- if app.verbose() { +- fmt.Fprint(w, "\t\nInternal Use Only\t\n") +- for _, c := range app.internalCommands() { +- fmt.Fprintf(w, " %s\t%s\n", c.Name(), c.ShortHelp()) +- } +- } +- fmt.Fprint(w, "\nflags:\n") +- printFlagDefaults(f) +-} +- +-// this is a slightly modified version of flag.PrintDefaults to give us control +-func printFlagDefaults(s *flag.FlagSet) { +- var flags [][]*flag.Flag +- seen := map[flag.Value]int{} +- s.VisitAll(func(f *flag.Flag) { +- if i, ok := seen[f.Value]; !ok { +- seen[f.Value] = len(flags) +- flags = append(flags, []*flag.Flag{f}) +- } else { +- flags[i] = append(flags[i], f) +- } +- }) +- for _, entry := range flags { +- sort.SliceStable(entry, func(i, j int) bool { +- return len(entry[i].Name) < len(entry[j].Name) +- }) +- var b strings.Builder +- for i, f := range entry { +- switch i { +- case 0: +- b.WriteString(" -") +- default: +- b.WriteString(",-") +- } +- b.WriteString(f.Name) +- } +- +- f := entry[0] +- name, usage := flag.UnquoteUsage(f) +- if len(name) > 0 { +- b.WriteString("=") +- b.WriteString(name) +- } +- // Boolean flags of one ASCII letter are so common we +- // treat them specially, putting their usage on the same line. +- if b.Len() <= 4 { // space, space, '-', 'x'. +- b.WriteString("\t") +- } else { +- // Four spaces before the tab triggers good alignment +- // for both 4- and 8-space tab stops. +- b.WriteString("\n \t") +- } +- b.WriteString(strings.ReplaceAll(usage, "\n", "\n \t")) +- if !isZeroValue(f, f.DefValue) { +- if reflect.TypeOf(f.Value).Elem().Name() == "stringValue" { +- fmt.Fprintf(&b, " (default %q)", f.DefValue) +- } else { +- fmt.Fprintf(&b, " (default %v)", f.DefValue) +- } +- } +- fmt.Fprint(s.Output(), b.String(), "\n") +- } +-} +- +-// isZeroValue is copied from the flags package +-func isZeroValue(f *flag.Flag, value string) bool { +- // Build a zero value of the flag's Value type, and see if the +- // result of calling its String method equals the value passed in. +- // This works unless the Value type is itself an interface type. +- typ := reflect.TypeOf(f.Value) +- var z reflect.Value +- if typ.Kind() == reflect.Pointer { +- z = reflect.New(typ.Elem()) +- } else { +- z = reflect.Zero(typ) +- } +- return value == z.Interface().(flag.Value).String() +-} +- +-// Run takes the args after top level flag processing, and invokes the correct +-// sub command as specified by the first argument. +-// If no arguments are passed it will invoke the server sub command, as a +-// temporary measure for compatibility. +-func (app *Application) Run(ctx context.Context, args ...string) error { +- // In the category of "things we can do while waiting for the Go command": +- // Pre-initialize the filecache, which takes ~50ms to hash the gopls +- // executable, and immediately runs a gc. +- filecache.Start() +- +- ctx = debug.WithInstance(ctx) +- if len(args) == 0 { +- s := flag.NewFlagSet(app.Name(), flag.ExitOnError) +- return tool.Run(ctx, s, &app.Serve, args) +- } +- command, args := args[0], args[1:] +- for _, c := range app.Commands() { +- if c.Name() == command { +- s := flag.NewFlagSet(app.Name(), flag.ExitOnError) +- return tool.Run(ctx, s, c, args) +- } +- } +- return tool.CommandLineErrorf("Unknown command %v", command) +-} +- +-// Commands returns the set of commands supported by the gopls tool on the +-// command line. +-// The command is specified by the first non flag argument. +-func (app *Application) Commands() []tool.Application { +- var commands []tool.Application +- commands = append(commands, app.mainCommands()...) +- commands = append(commands, app.featureCommands()...) +- commands = append(commands, app.internalCommands()...) +- return commands +-} +- +-func (app *Application) mainCommands() []tool.Application { +- return []tool.Application{ +- &app.Serve, +- &version{app: app}, +- &bug{app: app}, +- &help{app: app}, +- &apiJSON{app: app}, +- &licenses{app: app}, +- } +-} +- +-func (app *Application) internalCommands() []tool.Application { +- return []tool.Application{ +- &vulncheck{app: app}, +- } +-} +- +-func (app *Application) featureCommands() []tool.Application { +- return []tool.Application{ +- &callHierarchy{app: app}, +- &check{app: app, Severity: "warning"}, +- &codeaction{app: app}, +- &codelens{app: app}, +- &definition{app: app}, +- &execute{app: app}, +- &fix{app: app}, // (non-functional) +- &foldingRanges{app: app}, +- &format{app: app}, +- &headlessMCP{app: app}, +- &highlight{app: app}, +- &implementation{app: app}, +- &imports{app: app}, +- newRemote(app, ""), +- newRemote(app, "inspect"), +- &links{app: app}, +- &prepareRename{app: app}, +- &references{app: app}, +- &rename{app: app}, +- &semanticToken{app: app}, +- &signature{app: app}, +- &stats{app: app}, +- &symbols{app: app}, +- +- &workspaceSymbol{app: app}, +- } +-} +- +-// connect creates and initializes a new in-process gopls LSP session. +-func (app *Application) connect(ctx context.Context) (*client, *cache.Session, error) { +- root, err := os.Getwd() +- if err != nil { +- return nil, nil, fmt.Errorf("finding workdir: %v", err) +- } +- options := settings.DefaultOptions(app.options) +- client := newClient(app) +- var ( +- svr protocol.Server +- sess *cache.Session +- ) +- if app.Remote == "" { +- // local +- sess = cache.NewSession(ctx, cache.New(nil)) +- svr = server.New(sess, client, options) +- ctx = protocol.WithClient(ctx, client) +- } else { +- // remote +- netConn, err := lsprpc.ConnectToRemote(ctx, app.Remote) +- if err != nil { +- return nil, nil, err +- } +- stream := jsonrpc2.NewHeaderStream(netConn) +- jsonConn := jsonrpc2.NewConn(stream) +- svr = protocol.ServerDispatcher(jsonConn) +- ctx = protocol.WithClient(ctx, client) +- jsonConn.Go(ctx, +- protocol.Handlers( +- protocol.ClientHandler(client, jsonrpc2.MethodNotFound))) +- } +- if err := client.initialize(ctx, svr, initParams(root, options)); err != nil { +- return nil, nil, err +- } +- return client, sess, nil +-} +- +-func initParams(rootDir string, opts *settings.Options) *protocol.ParamInitialize { +- params := &protocol.ParamInitialize{} +- params.RootURI = protocol.URIFromPath(rootDir) +- params.Capabilities.Workspace.Configuration = true +- +- // If you add an additional option here, +- // you must update the map key of settings.DefaultOptions called in (*Application).connect. +- params.Capabilities.TextDocument.Hover = &protocol.HoverClientCapabilities{ +- ContentFormat: []protocol.MarkupKind{opts.PreferredContentFormat}, +- } +- params.Capabilities.TextDocument.DocumentSymbol.HierarchicalDocumentSymbolSupport = opts.HierarchicalDocumentSymbolSupport +- params.Capabilities.TextDocument.SemanticTokens = protocol.SemanticTokensClientCapabilities{} +- params.Capabilities.TextDocument.SemanticTokens.Formats = []protocol.TokenFormat{"relative"} +- params.Capabilities.TextDocument.SemanticTokens.Requests.Range = &protocol.Or_ClientSemanticTokensRequestOptions_range{Value: true} +- // params.Capabilities.TextDocument.SemanticTokens.Requests.Range.Value = true +- params.Capabilities.TextDocument.SemanticTokens.Requests.Full = &protocol.Or_ClientSemanticTokensRequestOptions_full{Value: true} +- params.Capabilities.TextDocument.SemanticTokens.TokenTypes = moreslices.ConvertStrings[string](semtok.TokenTypes) +- params.Capabilities.TextDocument.SemanticTokens.TokenModifiers = moreslices.ConvertStrings[string](semtok.TokenModifiers) +- params.Capabilities.TextDocument.CodeAction = protocol.CodeActionClientCapabilities{ +- CodeActionLiteralSupport: protocol.ClientCodeActionLiteralOptions{ +- CodeActionKind: protocol.ClientCodeActionKindOptions{ +- ValueSet: []protocol.CodeActionKind{protocol.Empty}, // => all +- }, +- }, +- } +- params.Capabilities.Window.WorkDoneProgress = true +- params.Capabilities.Workspace.FileOperations = &protocol.FileOperationClientCapabilities{ +- DidCreate: true, +- } +- params.InitializationOptions = map[string]any{ +- "symbolMatcher": string(opts.SymbolMatcher), +- } +- return params +-} +- +-// initialize performs LSP's two-call client/server handshake. +-func (cli *client) initialize(ctx context.Context, server protocol.Server, params *protocol.ParamInitialize) error { +- result, err := server.Initialize(ctx, params) +- if err != nil { +- return err +- } +- if err := server.Initialized(ctx, &protocol.InitializedParams{}); err != nil { +- return err +- } +- cli.server = server +- cli.initializeResult = result +- return nil +-} +- +-// client implements [protocol.Client] and defines the LSP client +-// operations of the gopls command. +-// +-// It holds the client-side state of a single client/server +-// connection; it conceptually corresponds to a single call to +-// connect(2). +-type client struct { +- app *Application +- +- server protocol.Server +- initializeResult *protocol.InitializeResult // includes server capabilities +- +- progressMu sync.Mutex +- iwlToken protocol.ProgressToken +- iwlDone chan struct{} +- +- filesMu sync.Mutex // guards files map +- files map[protocol.DocumentURI]*cmdFile +-} +- +-// cmdFile represents an open file in the gopls command LSP client. +-type cmdFile struct { +- uri protocol.DocumentURI +- mapper *protocol.Mapper +- err error +- diagnosticsMu sync.Mutex +- diagnostics []protocol.Diagnostic +-} +- +-func newClient(app *Application) *client { +- return &client{ +- app: app, +- files: make(map[protocol.DocumentURI]*cmdFile), +- iwlDone: make(chan struct{}), +- } +-} +- +-func (cli *client) TextDocumentContentRefresh(context.Context, *protocol.TextDocumentContentRefreshParams) error { +- return nil +-} +- +-func (cli *client) CodeLensRefresh(context.Context) error { return nil } +- +-func (cli *client) FoldingRangeRefresh(context.Context) error { return nil } +- +-func (cli *client) LogTrace(context.Context, *protocol.LogTraceParams) error { return nil } +- +-func (cli *client) ShowMessage(ctx context.Context, p *protocol.ShowMessageParams) error { +- fmt.Fprintf(os.Stderr, "%s: %s\n", p.Type, p.Message) +- return nil +-} +- +-func (cli *client) ShowMessageRequest(ctx context.Context, p *protocol.ShowMessageRequestParams) (*protocol.MessageActionItem, error) { +- return nil, nil +-} +- +-func (cli *client) LogMessage(ctx context.Context, p *protocol.LogMessageParams) error { +- // This logic causes server logging to be double-prefixed with a timestamp. +- // 2023/11/08 10:50:21 Error:2023/11/08 10:50:21 +- // TODO(adonovan): print just p.Message, plus a newline if needed? +- switch p.Type { +- case protocol.Error: +- log.Print("Error:", p.Message) +- case protocol.Warning: +- log.Print("Warning:", p.Message) +- case protocol.Info: +- if cli.app.verbose() { +- log.Print("Info:", p.Message) +- } +- case protocol.Log: +- if cli.app.verbose() { +- log.Print("Log:", p.Message) +- } +- default: +- if cli.app.verbose() { +- log.Print(p.Message) +- } +- } +- return nil +-} +- +-func (cli *client) Event(ctx context.Context, t *any) error { return nil } +- +-func (cli *client) RegisterCapability(ctx context.Context, p *protocol.RegistrationParams) error { +- return nil +-} +- +-func (cli *client) UnregisterCapability(ctx context.Context, p *protocol.UnregistrationParams) error { +- return nil +-} +- +-func (cli *client) WorkspaceFolders(ctx context.Context) ([]protocol.WorkspaceFolder, error) { +- return nil, nil +-} +- +-func (cli *client) Configuration(ctx context.Context, p *protocol.ParamConfiguration) ([]any, error) { +- results := make([]any, len(p.Items)) +- for i, item := range p.Items { +- if item.Section != "gopls" { +- continue +- } +- m := map[string]any{ +- "analyses": map[string]any{ +- "fillreturns": true, +- "nonewvars": true, +- "noresultvalues": true, +- "undeclaredname": true, +- }, +- } +- if cli.app.VeryVerbose { +- m["verboseOutput"] = true +- } +- results[i] = m +- } +- return results, nil +-} +- +-func (cli *client) ApplyEdit(ctx context.Context, p *protocol.ApplyWorkspaceEditParams) (*protocol.ApplyWorkspaceEditResult, error) { +- if err := cli.applyWorkspaceEdit(&p.Edit); err != nil { +- return &protocol.ApplyWorkspaceEditResult{FailureReason: err.Error()}, nil +- } +- return &protocol.ApplyWorkspaceEditResult{Applied: true}, nil +-} +- +-// applyWorkspaceEdit applies a complete WorkspaceEdit to the client's +-// files, honoring the preferred edit mode specified by cli.app.editMode. +-// (Used by rename and by ApplyEdit downcalls.) +-// +-// See also: +-// - changedFiles in ../test/marker/marker_test.go for the golden-file capturing variant +-// - applyWorkspaceEdit in ../test/integration/fake/editor.go for the Editor variant +-func (cli *client) applyWorkspaceEdit(wsedit *protocol.WorkspaceEdit) error { +- +- create := func(uri protocol.DocumentURI, content []byte) error { +- edits := []diff.Edit{{Start: 0, End: 0, New: string(content)}} +- return updateFile(uri.Path(), nil, content, edits, cli.app.editFlags) +- } +- +- delete := func(uri protocol.DocumentURI, content []byte) error { +- edits := []diff.Edit{{Start: 0, End: len(content), New: ""}} +- return updateFile(uri.Path(), content, nil, edits, cli.app.editFlags) +- } +- +- for _, c := range wsedit.DocumentChanges { +- switch { +- case c.TextDocumentEdit != nil: +- f := cli.getFile(c.TextDocumentEdit.TextDocument.URI) +- if f.err != nil { +- return f.err +- } +- // TODO(adonovan): sanity-check c.TextDocumentEdit.TextDocument.Version +- edits := protocol.AsTextEdits(c.TextDocumentEdit.Edits) +- if err := applyTextEdits(f.mapper, edits, cli.app.editFlags); err != nil { +- return err +- } +- +- case c.CreateFile != nil: +- if err := create(c.CreateFile.URI, []byte{}); err != nil { +- return err +- } +- +- case c.RenameFile != nil: +- // Analyze as creation + deletion. (NB: loses file mode.) +- f := cli.getFile(c.RenameFile.OldURI) +- if f.err != nil { +- return f.err +- } +- if err := create(c.RenameFile.NewURI, f.mapper.Content); err != nil { +- return err +- } +- if err := delete(f.mapper.URI, f.mapper.Content); err != nil { +- return err +- } +- +- case c.DeleteFile != nil: +- f := cli.getFile(c.DeleteFile.URI) +- if f.err != nil { +- return f.err +- } +- if err := delete(f.mapper.URI, f.mapper.Content); err != nil { +- return err +- } +- +- default: +- return fmt.Errorf("unknown DocumentChange: %#v", c) +- } +- } +- return nil +-} +- +-// applyTextEdits applies a list of edits to the mapper file content, +-// using the preferred edit mode. It is a no-op if there are no edits. +-func applyTextEdits(mapper *protocol.Mapper, edits []protocol.TextEdit, flags *EditFlags) error { +- if len(edits) == 0 { +- return nil +- } +- newContent, diffEdits, err := protocol.ApplyEdits(mapper, edits) +- if err != nil { +- return err +- } +- return updateFile(mapper.URI.Path(), mapper.Content, newContent, diffEdits, flags) +-} +- +-// updateFile performs a content update operation on the specified file. +-// If the old content is nil, the operation creates the file. +-// If the new content is nil, the operation deletes the file. +-// The flags control whether the operation is written, or merely listed, diffed, or printed. +-func updateFile(filename string, old, new []byte, edits []diff.Edit, flags *EditFlags) error { +- if flags.List { +- fmt.Println(filename) +- } +- +- if flags.Write { +- if flags.Preserve && old != nil { // edit or delete +- if err := os.WriteFile(filename+".orig", old, 0666); err != nil { +- return err +- } +- } +- +- if new != nil { +- // create or edit +- if err := os.WriteFile(filename, new, 0666); err != nil { +- return err +- } +- } else { +- // delete +- if err := os.Remove(filename); err != nil { +- return err +- } +- } +- } +- +- if flags.Diff { +- // For diffing, creations and deletions are equivalent +- // updating an empty file and making an existing file empty. +- unified, err := diff.ToUnified(filename+".orig", filename, string(old), edits, diff.DefaultContextLines) +- if err != nil { +- return err +- } +- fmt.Print(unified) +- } +- +- // No flags: just print edited file content. +- // +- // This makes no sense for multiple files. +- // (We should probably change the default to -diff.) +- if !(flags.List || flags.Write || flags.Diff) { +- os.Stdout.Write(new) +- } +- +- return nil +-} +- +-func (cli *client) PublishDiagnostics(ctx context.Context, p *protocol.PublishDiagnosticsParams) error { +- // Don't worry about diagnostics without versions. +- // +- // (Note: the representation of PublishDiagnosticsParams +- // cannot distinguish a missing Version from v0, but the +- // server never sends back an explicit zero.) +- if p.Version == 0 { +- return nil +- } +- +- file := cli.getFile(p.URI) +- +- file.diagnosticsMu.Lock() +- defer file.diagnosticsMu.Unlock() +- +- file.diagnostics = append(file.diagnostics, p.Diagnostics...) +- +- // Perform a crude in-place deduplication. +- // TODO(golang/go#60122): replace the gopls.diagnose_files +- // command with support for textDocument/diagnostic, +- // so that we don't need to do this de-duplication. +- type key [6]any +- seen := make(map[key]bool) +- out := file.diagnostics[:0] +- for _, d := range file.diagnostics { +- var codeHref string +- if desc := d.CodeDescription; desc != nil { +- codeHref = desc.Href +- } +- k := key{d.Range, d.Severity, d.Code, codeHref, d.Source, d.Message} +- if !seen[k] { +- seen[k] = true +- out = append(out, d) +- } +- } +- file.diagnostics = out +- +- return nil +-} +- +-func (cli *client) Progress(_ context.Context, params *protocol.ProgressParams) error { +- if _, ok := params.Token.(string); !ok { +- return fmt.Errorf("unexpected progress token: %[1]T %[1]v", params.Token) +- } +- +- switch v := params.Value.(type) { +- case *protocol.WorkDoneProgressBegin: +- if v.Title == server.DiagnosticWorkTitle(server.FromInitialWorkspaceLoad) { +- cli.progressMu.Lock() +- cli.iwlToken = params.Token +- cli.progressMu.Unlock() +- } +- +- case *protocol.WorkDoneProgressReport: +- if cli.app.Verbose { +- fmt.Fprintln(os.Stderr, v.Message) +- } +- +- case *protocol.WorkDoneProgressEnd: +- cli.progressMu.Lock() +- iwlToken := cli.iwlToken +- cli.progressMu.Unlock() +- +- if params.Token == iwlToken { +- close(cli.iwlDone) +- } +- } +- return nil +-} +- +-func (cli *client) ShowDocument(ctx context.Context, params *protocol.ShowDocumentParams) (*protocol.ShowDocumentResult, error) { +- var success bool +- if params.External { +- // Open URI in external browser. +- success = browser.Open(params.URI) +- } else { +- // Open file in editor, optionally taking focus and selecting a range. +- // (client has no editor. Should it fork+exec $EDITOR?) +- log.Printf("Server requested that client editor open %q (takeFocus=%t, selection=%+v)", +- params.URI, params.TakeFocus, params.Selection) +- success = true +- } +- return &protocol.ShowDocumentResult{Success: success}, nil +-} +- +-func (cli *client) WorkDoneProgressCreate(context.Context, *protocol.WorkDoneProgressCreateParams) error { +- return nil +-} +- +-func (cli *client) DiagnosticRefresh(context.Context) error { +- return nil +-} +- +-func (cli *client) InlayHintRefresh(context.Context) error { +- return nil +-} +- +-func (cli *client) SemanticTokensRefresh(context.Context) error { +- return nil +-} +- +-func (cli *client) InlineValueRefresh(context.Context) error { +- return nil +-} +- +-// getFile returns the specified file, adding it to the client state if needed. +-func (cli *client) getFile(uri protocol.DocumentURI) *cmdFile { +- cli.filesMu.Lock() +- defer cli.filesMu.Unlock() +- +- file, found := cli.files[uri] +- if !found || file.err != nil { +- file = &cmdFile{ +- uri: uri, +- } +- cli.files[uri] = file +- } +- if file.mapper == nil { +- content, err := os.ReadFile(uri.Path()) +- if err != nil { +- file.err = fmt.Errorf("getFile: %v: %v", uri, err) +- return file +- } +- file.mapper = protocol.NewMapper(uri, content) +- } +- return file +-} +- +-// openFile returns the specified file, adding it to the client state +-// if needed, and notifying the server that it was opened. +-func (cli *client) openFile(ctx context.Context, uri protocol.DocumentURI) (*cmdFile, error) { +- file := cli.getFile(uri) +- if file.err != nil { +- return nil, file.err +- } +- +- // Choose language ID from file extension. +- var langID protocol.LanguageKind // "" eventually maps to file.UnknownKind +- switch filepath.Ext(uri.Path()) { +- case ".go": +- langID = "go" +- case ".mod": +- langID = "go.mod" +- case ".sum": +- langID = "go.sum" +- case ".work": +- langID = "go.work" +- case ".s": +- langID = "go.s" +- } +- +- p := &protocol.DidOpenTextDocumentParams{ +- TextDocument: protocol.TextDocumentItem{ +- URI: uri, +- LanguageID: langID, +- Version: 1, +- Text: string(file.mapper.Content), +- }, +- } +- if err := cli.server.DidOpen(ctx, p); err != nil { +- // TODO(adonovan): is this assignment concurrency safe? +- file.err = fmt.Errorf("%v: %v", uri, err) +- return nil, file.err +- } +- return file, nil +-} +- +-func diagnoseFiles(ctx context.Context, server protocol.Server, files []protocol.DocumentURI) error { +- cmd := command.NewDiagnoseFilesCommand("Diagnose files", command.DiagnoseFilesArgs{ +- Files: files, +- }) +- _, err := executeCommand(ctx, server, cmd) +- return err +-} +- +-func (cli *client) terminate(ctx context.Context) { +- if err := cli.server.Shutdown(ctx); err != nil { +- log.Printf("server shutdown failed: %v", err) +- } +- +- // Don't call Exit as it terminates the server process, +- // which is the same as this client process. +- // c.server.Exit(ctx) +-} +- +-// Implement io.Closer. +-func (cli *client) Close() error { +- return nil +-} +- +-// -- conversions to span (UTF-8) domain -- +- +-// locationSpan converts a protocol (UTF-16) Location to a (UTF-8) span. +-// Precondition: the URIs of Location and Mapper match. +-func (f *cmdFile) locationSpan(loc protocol.Location) (span, error) { +- // TODO(adonovan): check that l.URI matches m.URI. +- return f.rangeSpan(loc.Range) +-} +- +-// rangeSpan converts a protocol (UTF-16) range to a (UTF-8) span. +-// The resulting span has valid Positions and Offsets. +-func (f *cmdFile) rangeSpan(r protocol.Range) (span, error) { +- start, end, err := f.mapper.RangeOffsets(r) +- if err != nil { +- return span{}, err +- } +- return f.offsetSpan(start, end) +-} +- +-// offsetSpan converts a byte-offset interval to a (UTF-8) span. +-// The resulting span contains line, column, and offset information. +-func (f *cmdFile) offsetSpan(start, end int) (span, error) { +- if start > end { +- return span{}, fmt.Errorf("start offset (%d) > end (%d)", start, end) +- } +- startPoint, err := offsetPoint(f.mapper, start) +- if err != nil { +- return span{}, fmt.Errorf("start: %v", err) +- } +- endPoint, err := offsetPoint(f.mapper, end) +- if err != nil { +- return span{}, fmt.Errorf("end: %v", err) +- } +- return newSpan(f.mapper.URI, startPoint, endPoint), nil +-} +- +-// offsetPoint converts a byte offset to a span (UTF-8) point. +-// The resulting point contains line, column, and offset information. +-func offsetPoint(m *protocol.Mapper, offset int) (point, error) { +- if !(0 <= offset && offset <= len(m.Content)) { +- return point{}, fmt.Errorf("invalid offset %d (want 0-%d)", offset, len(m.Content)) +- } +- line, col8 := m.OffsetLineCol8(offset) +- return newPoint(line, col8, offset), nil +-} +- +-// -- conversions from span (UTF-8) domain -- +- +-// spanLocation converts a (UTF-8) span to a protocol (UTF-16) range. +-// Precondition: the URIs of spanLocation and Mapper match. +-func (f *cmdFile) spanLocation(s span) (protocol.Location, error) { +- rng, err := f.spanRange(s) +- if err != nil { +- return protocol.Location{}, err +- } +- return f.mapper.URI.Location(rng), nil +-} +- +-// spanRange converts a (UTF-8) span to a protocol (UTF-16) range. +-// Precondition: the URIs of span and Mapper match. +-func (f *cmdFile) spanRange(s span) (protocol.Range, error) { +- // Assert that we aren't using the wrong mapper. +- // We check only the base name, and case insensitively, +- // because we can't assume clean paths, no symbolic links, +- // case-sensitive directories. The authoritative answer +- // requires querying the file system, and we don't want +- // to do that. +- if !strings.EqualFold(f.mapper.URI.Base(), s.URI().Base()) { +- return protocol.Range{}, bugpkg.Errorf("mapper is for file %q instead of %q", f.mapper.URI, s.URI()) +- } +- start, err := pointPosition(f.mapper, s.Start()) +- if err != nil { +- return protocol.Range{}, fmt.Errorf("start: %w", err) +- } +- end, err := pointPosition(f.mapper, s.End()) +- if err != nil { +- return protocol.Range{}, fmt.Errorf("end: %w", err) +- } +- return protocol.Range{Start: start, End: end}, nil +-} +- +-// pointPosition converts a valid span (UTF-8) point to a protocol (UTF-16) position. +-func pointPosition(m *protocol.Mapper, p point) (protocol.Position, error) { +- if p.HasPosition() { +- return m.LineCol8Position(p.Line(), p.Column()) +- } +- if p.HasOffset() { +- return m.OffsetPosition(p.Offset()) +- } +- return protocol.Position{}, fmt.Errorf("point has neither offset nor line/column") +-} +- +-// TODO(adonovan): delete in 2025. +-type fix struct{ app *Application } +- +-func (*fix) Name() string { return "fix" } +-func (cmd *fix) Parent() string { return cmd.app.Name() } +-func (*fix) Usage() string { return "" } +-func (*fix) ShortHelp() string { return "apply suggested fixes (obsolete)" } +-func (*fix) DetailedHelp(flags *flag.FlagSet) { +- fmt.Fprintf(flags.Output(), `No longer supported; use "gopls codeaction" instead.`) +-} +-func (*fix) Run(ctx context.Context, args ...string) error { +- return tool.CommandLineErrorf(`no longer supported; use "gopls codeaction" instead`) +-} +diff -urN a/gopls/internal/cmd/codeaction.go b/gopls/internal/cmd/codeaction.go +--- a/gopls/internal/cmd/codeaction.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/codeaction.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,226 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- "regexp" +- "slices" +- "strings" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/tool" +-) +- +-// codeaction implements the codeaction verb for gopls. +-type codeaction struct { +- EditFlags +- Kind string `flag:"kind" help:"comma-separated list of code action kinds to filter"` +- Title string `flag:"title" help:"regular expression to match title"` +- Exec bool `flag:"exec" help:"execute the first matching code action"` +- +- app *Application +-} +- +-func (cmd *codeaction) Name() string { return "codeaction" } +-func (cmd *codeaction) Parent() string { return cmd.app.Name() } +-func (cmd *codeaction) Usage() string { return "[codeaction-flags] filename[:line[:col]]" } +-func (cmd *codeaction) ShortHelp() string { return "list or execute code actions" } +-func (cmd *codeaction) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprintf(f.Output(), ` +- +-The codeaction command lists or executes code actions for the +-specified file or range of a file. Each code action contains +-either an edit to be directly applied to the file, or a command +-to be executed by the server, which may have an effect such as: +-- requesting that the client apply an edit; +-- changing the state of the server; or +-- requesting that the client open a document. +- +-The -kind and and -title flags filter the list of actions. +- +-The -kind flag specifies a comma-separated list of LSP CodeAction kinds. +-Only actions of these kinds will be requested from the server. +-Valid kinds include: +- +- gopls.doc.features +- quickfix +- refactor +- refactor.extract +- refactor.extract.constant +- refactor.extract.function +- refactor.extract.method +- refactor.extract.toNewFile +- refactor.extract.variable +- refactor.inline +- refactor.inline.call +- refactor.rewrite +- refactor.rewrite.changeQuote +- refactor.rewrite.fillStruct +- refactor.rewrite.fillSwitch +- refactor.rewrite.invertIf +- refactor.rewrite.joinLines +- refactor.rewrite.removeUnusedParam +- refactor.rewrite.splitLines +- source +- source.assembly +- source.doc +- source.fixAll +- source.freesymbols +- source.organizeImports +- source.test +- +-Kinds are hierarchical, so "refactor" includes "refactor.inline". +-(Note: actions of kind "source.test" are not returned unless explicitly +-requested.) +- +-The -title flag specifies a regular expression that must match the +-action's title. (Ideally kinds would be specific enough that this +-isn't necessary; we really need to subdivide refactor.rewrite; see +-gopls/internal/settings/codeactionkind.go.) +- +-The -exec flag causes the first matching code action to be executed. +-Without the flag, the matching actions are merely listed. +- +-It is not currently possible to execute more than one action, +-as that requires a way to detect and resolve conflicts. +-TODO(adonovan): support it when golang/go#67049 is resolved. +- +-If executing an action causes the server to send a patch to the +-client, the usual -write, -preserve, -diff, and -list flags govern how +-the client deals with the patch. +- +-Example: execute the first "quick fix" in the specified file and show the diff: +- +- $ gopls codeaction -kind=quickfix -exec -diff ./gopls/main.go +- +-codeaction-flags: +-`) +- printFlagDefaults(f) +-} +- +-func (cmd *codeaction) Run(ctx context.Context, args ...string) error { +- if len(args) < 1 { +- return tool.CommandLineErrorf("codeaction expects at least 1 argument") +- } +- cmd.app.editFlags = &cmd.EditFlags +- cli, _, err := cmd.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- from := parseSpan(args[0]) +- uri := from.URI() +- file, err := cli.openFile(ctx, uri) +- if err != nil { +- return err +- } +- rng, err := file.spanRange(from) +- if err != nil { +- return err +- } +- +- titleRE, err := regexp.Compile(cmd.Title) +- if err != nil { +- return err +- } +- +- // Get diagnostics, as they may encode various lazy code actions. +- if err := diagnoseFiles(ctx, cli.server, []protocol.DocumentURI{uri}); err != nil { +- return err +- } +- file.diagnosticsMu.Lock() +- diagnostics := slices.Clone(file.diagnostics) +- file.diagnosticsMu.Unlock() +- +- // Request code actions of the desired kinds. +- var kinds []protocol.CodeActionKind +- if cmd.Kind != "" { +- for kind := range strings.SplitSeq(cmd.Kind, ",") { +- kinds = append(kinds, protocol.CodeActionKind(kind)) +- } +- } else { +- kinds = append(kinds, protocol.Empty) // => all +- } +- actions, err := cli.server.CodeAction(ctx, &protocol.CodeActionParams{ +- TextDocument: protocol.TextDocumentIdentifier{URI: uri}, +- Range: rng, +- Context: protocol.CodeActionContext{ +- Only: kinds, +- Diagnostics: diagnostics, +- }, +- }) +- if err != nil { +- return fmt.Errorf("%v: %v", from, err) +- } +- +- // Gather edits from matching code actions. +- var edits []protocol.TextEdit +- for _, act := range actions { +- if act.Disabled != nil { +- continue +- } +- if !titleRE.MatchString(act.Title) { +- continue +- } +- +- // If the provided span has a position (not just offsets), +- // and the action has diagnostics, the action must have a +- // diagnostic with the same range as it. +- if from.HasPosition() && len(act.Diagnostics) > 0 && +- !slices.ContainsFunc(act.Diagnostics, func(diag protocol.Diagnostic) bool { +- return diag.Range.Start == rng.Start +- }) { +- continue +- } +- +- if cmd.Exec { +- // -exec: run the first matching code action. +- if act.Command != nil { +- // This may cause the server to make +- // an ApplyEdit downcall to the client. +- if _, err := executeCommand(ctx, cli.server, act.Command); err != nil { +- return err +- } +- // The specification says that commands should +- // be executed _after_ edits are applied, not +- // instead of them, but we don't want to +- // duplicate edits. +- } else { +- // Partially apply CodeAction.Edit, a WorkspaceEdit. +- // (See also cli.applyWorkspaceEdit(a.Edit)). +- for _, c := range act.Edit.DocumentChanges { +- tde := c.TextDocumentEdit +- if tde != nil && tde.TextDocument.URI == uri { +- // TODO(adonovan): this logic will butcher an edit that spans files. +- // It will also ignore create/delete/rename operations. +- // Fix or document. Need a three-way merge. +- edits = append(edits, protocol.AsTextEdits(tde.Edits)...) +- } +- } +- return applyTextEdits(file.mapper, edits, cmd.app.editFlags) +- } +- return nil +- } else { +- // No -exec: list matching code actions. +- action := "edit" +- if act.Command != nil { +- action = "command" +- } +- fmt.Printf("%s\t%q [%s]\n", +- action, +- act.Title, +- act.Kind) +- } +- } +- +- if cmd.Exec { +- return fmt.Errorf("no matching code action at %s", from) +- } +- return nil +-} +diff -urN a/gopls/internal/cmd/codelens.go b/gopls/internal/cmd/codelens.go +--- a/gopls/internal/cmd/codelens.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/codelens.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,137 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/internal/tool" +-) +- +-// codelens implements the codelens verb for gopls. +-type codelens struct { +- EditFlags +- app *Application +- +- Exec bool `flag:"exec" help:"execute the first matching code lens"` +-} +- +-func (r *codelens) Name() string { return "codelens" } +-func (r *codelens) Parent() string { return r.app.Name() } +-func (r *codelens) Usage() string { return "[codelens-flags] file[:line[:col]] [title]" } +-func (r *codelens) ShortHelp() string { return "List or execute code lenses for a file" } +-func (r *codelens) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-The codelens command lists or executes code lenses for the specified +-file, or line within a file. A code lens is a command associated with +-a position in the code. +- +-With an optional title argument, only code lenses matching that +-title are considered. +- +-By default, the codelens command lists the available lenses for the +-specified file or line within a file, including the title and +-title of the command. With the -exec flag, the first matching command +-is executed, and its output is printed to stdout. +- +-Example: +- +- $ gopls codelens a_test.go # list code lenses in a file +- $ gopls codelens a_test.go:10 # list code lenses on line 10 +- $ gopls codelens a_test.go "run test" # list gopls.run_tests commands +- $ gopls codelens -exec a_test.go:10 "run test" # run a specific test +- +-codelens-flags: +-`) +- printFlagDefaults(f) +-} +- +-func (r *codelens) Run(ctx context.Context, args ...string) error { +- var filename, title string +- switch len(args) { +- case 0: +- return tool.CommandLineErrorf("codelens requires a file name") +- case 2: +- title = args[1] +- fallthrough +- case 1: +- filename = args[0] +- default: +- return tool.CommandLineErrorf("codelens expects at most two arguments") +- } +- +- r.app.editFlags = &r.EditFlags // in case a codelens perform an edit +- +- // Override the default setting for codelenses["test"], which is +- // off by default because VS Code has a superior client-side +- // implementation. But this client is not VS Code. +- // See golang.LensFuncs(). +- origOptions := r.app.options +- r.app.options = func(opts *settings.Options) { +- if origOptions != nil { +- origOptions(opts) +- } +- if opts.Codelenses == nil { +- opts.Codelenses = make(map[settings.CodeLensSource]bool) +- } +- opts.Codelenses[settings.CodeLensTest] = true +- } +- +- cli, _, err := r.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- filespan := parseSpan(filename) +- file, err := cli.openFile(ctx, filespan.URI()) +- if err != nil { +- return err +- } +- loc, err := file.spanLocation(filespan) +- if err != nil { +- return err +- } +- +- p := protocol.CodeLensParams{ +- TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, +- } +- lenses, err := cli.server.CodeLens(ctx, &p) +- if err != nil { +- return err +- } +- +- for _, lens := range lenses { +- sp, err := file.rangeSpan(lens.Range) +- if err != nil { +- return nil +- } +- +- if title != "" && lens.Command.Title != title { +- continue // title was specified but does not match +- } +- if filespan.HasPosition() && !protocol.Intersect(loc.Range, lens.Range) { +- continue // position was specified but does not match +- } +- +- // -exec: run the first matching code lens. +- if r.Exec { +- _, err := executeCommand(ctx, cli.server, lens.Command) +- return err +- } +- +- // No -exec: list matching code lenses. +- fmt.Printf("%v: %q [%s]\n", sp, lens.Command.Title, lens.Command.Command) +- } +- +- if r.Exec { +- return fmt.Errorf("no code lens at %s with title %q", filespan, title) +- } +- return nil +-} +diff -urN a/gopls/internal/cmd/counters.go b/gopls/internal/cmd/counters.go +--- a/gopls/internal/cmd/counters.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/counters.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import "golang.org/x/telemetry/counter" +- +-// Proposed counters for evaluating usage of the Go MCP Server. These counters +-// increment when the user starts up the server in attached or headless mode. +-var ( +- countHeadlessMCPStdIO = counter.New("gopls/mcp-headless:stdio") +- countHeadlessMCPSSE = counter.New("gopls/mcp-headless:sse") +- countAttachedMCP = counter.New("gopls/mcp") +-) +diff -urN a/gopls/internal/cmd/definition.go b/gopls/internal/cmd/definition.go +--- a/gopls/internal/cmd/definition.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/definition.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,137 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "encoding/json" +- "flag" +- "fmt" +- "os" +- "strings" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/internal/tool" +-) +- +-// A Definition is the result of a 'definition' query. +-type Definition struct { +- Span span `json:"span"` // span of the definition +- Description string `json:"description"` // description of the denoted object +-} +- +-// These constant is printed in the help, and then used in a test to verify the +-// help is still valid. +-// They refer to "Set" in "flag.FlagSet" from the DetailedHelp method below. +-const ( +- exampleLine = 44 +- exampleColumn = 47 +- exampleOffset = 1270 +-) +- +-// definition implements the definition verb for gopls. +-type definition struct { +- app *Application +- +- JSON bool `flag:"json" help:"emit output in JSON format"` +- MarkdownSupported bool `flag:"markdown" help:"support markdown in responses"` +-} +- +-func (d *definition) Name() string { return "definition" } +-func (d *definition) Parent() string { return d.app.Name() } +-func (d *definition) Usage() string { return "[definition-flags] " } +-func (d *definition) ShortHelp() string { return "show declaration of selected identifier" } +-func (d *definition) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprintf(f.Output(), ` +-Example: show the definition of the identifier at syntax at offset %[1]v in this file (flag.FlagSet): +- +- $ gopls definition internal/cmd/definition.go:%[1]v:%[2]v +- $ gopls definition internal/cmd/definition.go:#%[3]v +- +-definition-flags: +-`, exampleLine, exampleColumn, exampleOffset) +- printFlagDefaults(f) +-} +- +-// Run performs the definition query as specified by args and prints the +-// results to stdout. +-func (d *definition) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return tool.CommandLineErrorf("definition expects 1 argument") +- } +- // Plaintext makes more sense for the command line. +- opts := d.app.options +- d.app.options = func(o *settings.Options) { +- if opts != nil { +- opts(o) +- } +- o.PreferredContentFormat = protocol.PlainText +- if d.MarkdownSupported { +- o.PreferredContentFormat = protocol.Markdown +- } +- } +- cli, _, err := d.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- from := parseSpan(args[0]) +- file, err := cli.openFile(ctx, from.URI()) +- if err != nil { +- return err +- } +- loc, err := file.spanLocation(from) +- if err != nil { +- return err +- } +- p := protocol.DefinitionParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- locs, err := cli.server.Definition(ctx, &p) +- if err != nil { +- return fmt.Errorf("%v: %v", from, err) +- } +- +- if len(locs) == 0 { +- return fmt.Errorf("%v: no definition location (not an identifier?)", from) +- } +- file, err = cli.openFile(ctx, locs[0].URI) +- if err != nil { +- return fmt.Errorf("%v: %v", from, err) +- } +- definition, err := file.locationSpan(locs[0]) +- if err != nil { +- return fmt.Errorf("%v: %v", from, err) +- } +- +- q := protocol.HoverParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- hover, err := cli.server.Hover(ctx, &q) +- if err != nil { +- return fmt.Errorf("%v: %v", from, err) +- } +- var description string +- if hover != nil { +- description = strings.TrimSpace(hover.Contents.Value) +- } +- +- result := &Definition{ +- Span: definition, +- Description: description, +- } +- if d.JSON { +- enc := json.NewEncoder(os.Stdout) +- enc.SetIndent("", "\t") +- return enc.Encode(result) +- } +- fmt.Printf("%v", result.Span) +- if len(result.Description) > 0 { +- fmt.Printf(": defined here as %s", result.Description) +- } +- fmt.Printf("\n") +- return nil +-} +diff -urN a/gopls/internal/cmd/execute.go b/gopls/internal/cmd/execute.go +--- a/gopls/internal/cmd/execute.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/execute.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,104 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "encoding/json" +- "flag" +- "fmt" +- "log" +- "slices" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/internal/tool" +-) +- +-// execute implements the LSP ExecuteCommand verb for gopls. +-type execute struct { +- EditFlags +- app *Application +-} +- +-func (e *execute) Name() string { return "execute" } +-func (e *execute) Parent() string { return e.app.Name() } +-func (e *execute) Usage() string { return "[flags] command argument..." } +-func (e *execute) ShortHelp() string { return "Execute a gopls custom LSP command" } +-func (e *execute) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-The execute command sends an LSP ExecuteCommand request to gopls, +-with a set of optional JSON argument values. +-Some commands return a result, also JSON. +- +-Gopls' command set is defined by the command.Interface type; see +-https://pkg.go.dev/golang.org/x/tools/gopls/internal/protocol/command#Interface. +-It is not a stable interface: commands may change or disappear without notice. +- +-Examples: +- +- $ gopls execute gopls.add_import '{"ImportPath": "fmt", "URI": "file:///hello.go"}' +- $ gopls execute gopls.run_tests '{"URI": "file:///a_test.go", "Tests": ["Test"]}' +- $ gopls execute gopls.list_known_packages '{"URI": "file:///hello.go"}' +- +-execute-flags: +-`) +- printFlagDefaults(f) +-} +- +-func (e *execute) Run(ctx context.Context, args ...string) error { +- if len(args) == 0 { +- return tool.CommandLineErrorf("execute requires a command name") +- } +- cmd := args[0] +- if !slices.Contains(command.Commands, command.Command(cmd)) { +- return tool.CommandLineErrorf("unrecognized command: %s", cmd) +- } +- +- // A command may have multiple arguments, though the only one +- // that currently does so is the "legacy" gopls.test, +- // so we don't show an example of it. +- var jsonArgs []json.RawMessage +- for i, arg := range args[1:] { +- var dummy any +- if err := json.Unmarshal([]byte(arg), &dummy); err != nil { +- return fmt.Errorf("argument %d is not valid JSON: %v", i+1, err) +- } +- jsonArgs = append(jsonArgs, json.RawMessage(arg)) +- } +- +- e.app.editFlags = &e.EditFlags // in case command performs an edit +- +- cli, _, err := e.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- res, err := executeCommand(ctx, cli.server, &protocol.Command{ +- Command: cmd, +- Arguments: jsonArgs, +- }) +- if err != nil { +- return err +- } +- if res != nil { +- data, err := json.MarshalIndent(res, "", "\t") +- if err != nil { +- log.Fatal(err) +- } +- fmt.Printf("%s\n", data) +- } +- return nil +-} +- +-// executeCommand executes a protocol.Command, displaying progress +-// messages and awaiting completion of asynchronous commands. +-func executeCommand(ctx context.Context, server protocol.Server, cmd *protocol.Command) (any, error) { +- return server.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ +- Command: cmd.Command, +- Arguments: cmd.Arguments, +- }) +-} +diff -urN a/gopls/internal/cmd/folding_range.go b/gopls/internal/cmd/folding_range.go +--- a/gopls/internal/cmd/folding_range.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/folding_range.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,72 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/tool" +-) +- +-// foldingRanges implements the folding_ranges verb for gopls +-type foldingRanges struct { +- app *Application +-} +- +-func (r *foldingRanges) Name() string { return "folding_ranges" } +-func (r *foldingRanges) Parent() string { return r.app.Name() } +-func (r *foldingRanges) Usage() string { return "" } +-func (r *foldingRanges) ShortHelp() string { return "display selected file's folding ranges" } +-func (r *foldingRanges) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Example: +- +- $ gopls folding_ranges helper/helper.go +-`) +- printFlagDefaults(f) +-} +- +-func (r *foldingRanges) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return tool.CommandLineErrorf("folding_ranges expects 1 argument (file)") +- } +- +- cli, _, err := r.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- from := parseSpan(args[0]) +- if _, err := cli.openFile(ctx, from.URI()); err != nil { +- return err +- } +- +- p := protocol.FoldingRangeParams{ +- TextDocument: protocol.TextDocumentIdentifier{ +- URI: from.URI(), +- }, +- } +- +- ranges, err := cli.server.FoldingRange(ctx, &p) +- if err != nil { +- return err +- } +- +- for _, r := range ranges { +- // We assume our server always supplies these fields. +- fmt.Printf("%v:%v-%v:%v\n", +- *r.StartLine+1, +- *r.StartCharacter+1, +- *r.EndLine+1, +- *r.EndCharacter+1, +- ) +- } +- +- return nil +-} +diff -urN a/gopls/internal/cmd/format.go b/gopls/internal/cmd/format.go +--- a/gopls/internal/cmd/format.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/format.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,75 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// format implements the format verb for gopls. +-type format struct { +- EditFlags +- app *Application +-} +- +-func (c *format) Name() string { return "format" } +-func (c *format) Parent() string { return c.app.Name() } +-func (c *format) Usage() string { return "[format-flags] " } +-func (c *format) ShortHelp() string { return "format the code according to the go standard" } +-func (c *format) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-The arguments supplied may be simple file names, or ranges within files. +- +-Example: reformat this file: +- +- $ gopls format -w internal/cmd/check.go +- +-format-flags: +-`) +- printFlagDefaults(f) +-} +- +-// Run performs the check on the files specified by args and prints the +-// results to stdout. +-func (c *format) Run(ctx context.Context, args ...string) error { +- if len(args) == 0 { +- return nil +- } +- c.app.editFlags = &c.EditFlags +- cli, _, err := c.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- for _, arg := range args { +- spn := parseSpan(arg) +- file, err := cli.openFile(ctx, spn.URI()) +- if err != nil { +- return err +- } +- loc, err := file.spanLocation(spn) +- if err != nil { +- return err +- } +- if loc.Range.Start != loc.Range.End { +- return fmt.Errorf("only full file formatting supported") +- } +- p := protocol.DocumentFormattingParams{ +- TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, +- } +- edits, err := cli.server.Formatting(ctx, &p) +- if err != nil { +- return fmt.Errorf("%v: %v", spn, err) +- } +- if err := applyTextEdits(file.mapper, edits, c.app.editFlags); err != nil { +- return err +- } +- } +- return nil +-} +diff -urN a/gopls/internal/cmd/help_test.go b/gopls/internal/cmd/help_test.go +--- a/gopls/internal/cmd/help_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/help_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,90 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd_test +- +-// This file defines tests to ensure the cmd/usage/*.hlp files match +-// the output of the tool. The .hlp files are not actually needed by +-// the executable (they are not //go:embed-ded, say), but they make it +-// easier to review changes to the gopls command's help logic since +-// any effects are manifest as changes to these files. +- +-//go:generate go test -run Help -update-help-files +- +-import ( +- "bytes" +- "context" +- "flag" +- "os" +- "path/filepath" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/cmd" +- "golang.org/x/tools/internal/testenv" +- "golang.org/x/tools/internal/tool" +-) +- +-var updateHelpFiles = flag.Bool("update-help-files", false, "Write out the help files instead of checking them") +- +-const appName = "gopls" +- +-func TestHelpFiles(t *testing.T) { +- testenv.NeedsGoBuild(t) // This is a lie. We actually need the source code. +- app := cmd.New() +- ctx := context.Background() +- for _, page := range append(app.Commands(), app) { +- t.Run(page.Name(), func(t *testing.T) { +- var buf bytes.Buffer +- s := flag.NewFlagSet(page.Name(), flag.ContinueOnError) +- s.SetOutput(&buf) +- tool.Run(ctx, s, page, []string{"-h"}) // ignore error +- name := page.Name() +- if name == appName { +- name = "usage" +- } +- helpFile := filepath.Join("usage", name+".hlp") +- got := buf.Bytes() +- if *updateHelpFiles { +- if err := os.WriteFile(helpFile, got, 0666); err != nil { +- t.Errorf("Failed writing %v: %v", helpFile, err) +- } +- return +- } +- want, err := os.ReadFile(helpFile) +- if err != nil { +- t.Fatalf("Missing help file %q", helpFile) +- } +- if diff := cmp.Diff(string(want), string(got)); diff != "" { +- t.Errorf("Help file %q did not match, run with -update-help-files to fix (-want +got)\n%s", helpFile, diff) +- } +- }) +- } +-} +- +-func TestVerboseHelp(t *testing.T) { +- testenv.NeedsGoBuild(t) // This is a lie. We actually need the source code. +- app := cmd.New() +- ctx := context.Background() +- var buf bytes.Buffer +- s := flag.NewFlagSet(appName, flag.ContinueOnError) +- s.SetOutput(&buf) +- tool.Run(ctx, s, app, []string{"-v", "-h"}) // ignore error +- got := buf.Bytes() +- +- helpFile := filepath.Join("usage", "usage-v.hlp") +- if *updateHelpFiles { +- if err := os.WriteFile(helpFile, got, 0666); err != nil { +- t.Errorf("Failed writing %v: %v", helpFile, err) +- } +- return +- } +- want, err := os.ReadFile(helpFile) +- if err != nil { +- t.Fatalf("Missing help file %q", helpFile) +- } +- if diff := cmp.Diff(string(want), string(got)); diff != "" { +- t.Errorf("Help file %q did not match, run with -update-help-files to fix (-want +got)\n%s", helpFile, diff) +- } +-} +diff -urN a/gopls/internal/cmd/highlight.go b/gopls/internal/cmd/highlight.go +--- a/gopls/internal/cmd/highlight.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/highlight.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,81 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/tool" +-) +- +-// highlight implements the highlight verb for gopls. +-type highlight struct { +- app *Application +-} +- +-func (r *highlight) Name() string { return "highlight" } +-func (r *highlight) Parent() string { return r.app.Name() } +-func (r *highlight) Usage() string { return "" } +-func (r *highlight) ShortHelp() string { return "display selected identifier's highlights" } +-func (r *highlight) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Example: +- +- $ # 1-indexed location (:line:column or :#offset) of the target identifier +- $ gopls highlight helper/helper.go:8:6 +- $ gopls highlight helper/helper.go:#53 +-`) +- printFlagDefaults(f) +-} +- +-func (r *highlight) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return tool.CommandLineErrorf("highlight expects 1 argument (position)") +- } +- +- cli, _, err := r.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- from := parseSpan(args[0]) +- file, err := cli.openFile(ctx, from.URI()) +- if err != nil { +- return err +- } +- +- loc, err := file.spanLocation(from) +- if err != nil { +- return err +- } +- +- p := protocol.DocumentHighlightParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- highlights, err := cli.server.DocumentHighlight(ctx, &p) +- if err != nil { +- return err +- } +- +- var results []span +- for _, h := range highlights { +- s, err := file.rangeSpan(h.Range) +- if err != nil { +- return err +- } +- results = append(results, s) +- } +- // Sort results to make tests deterministic since DocumentHighlight uses a map. +- sortSpans(results) +- +- for _, s := range results { +- fmt.Println(s) +- } +- return nil +-} +diff -urN a/gopls/internal/cmd/implementation.go b/gopls/internal/cmd/implementation.go +--- a/gopls/internal/cmd/implementation.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/implementation.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,86 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- "sort" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/tool" +-) +- +-// implementation implements the implementation verb for gopls +-type implementation struct { +- app *Application +-} +- +-func (i *implementation) Name() string { return "implementation" } +-func (i *implementation) Parent() string { return i.app.Name() } +-func (i *implementation) Usage() string { return "" } +-func (i *implementation) ShortHelp() string { return "display selected identifier's implementation" } +-func (i *implementation) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Example: +- +- $ # 1-indexed location (:line:column or :#offset) of the target identifier +- $ gopls implementation helper/helper.go:8:6 +- $ gopls implementation helper/helper.go:#53 +-`) +- printFlagDefaults(f) +-} +- +-func (i *implementation) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return tool.CommandLineErrorf("implementation expects 1 argument (position)") +- } +- +- cli, _, err := i.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- from := parseSpan(args[0]) +- file, err := cli.openFile(ctx, from.URI()) +- if err != nil { +- return err +- } +- +- loc, err := file.spanLocation(from) +- if err != nil { +- return err +- } +- +- p := protocol.ImplementationParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- implementations, err := cli.server.Implementation(ctx, &p) +- if err != nil { +- return err +- } +- +- var spans []string +- for _, impl := range implementations { +- f, err := cli.openFile(ctx, impl.URI) +- if err != nil { +- return err +- } +- span, err := f.locationSpan(impl) +- if err != nil { +- return err +- } +- spans = append(spans, fmt.Sprint(span)) +- } +- sort.Strings(spans) +- +- for _, s := range spans { +- fmt.Println(s) +- } +- +- return nil +-} +diff -urN a/gopls/internal/cmd/imports.go b/gopls/internal/cmd/imports.go +--- a/gopls/internal/cmd/imports.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/imports.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,80 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/tool" +-) +- +-// imports implements the import verb for gopls. +-type imports struct { +- EditFlags +- app *Application +-} +- +-func (t *imports) Name() string { return "imports" } +-func (t *imports) Parent() string { return t.app.Name() } +-func (t *imports) Usage() string { return "[imports-flags] " } +-func (t *imports) ShortHelp() string { return "updates import statements" } +-func (t *imports) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprintf(f.Output(), ` +-Example: update imports statements in a file: +- +- $ gopls imports -w internal/cmd/check.go +- +-imports-flags: +-`) +- printFlagDefaults(f) +-} +- +-// Run performs diagnostic checks on the file specified and either; +-// - if -w is specified, updates the file in place; +-// - if -d is specified, prints out unified diffs of the changes; or +-// - otherwise, prints the new versions to stdout. +-func (t *imports) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return tool.CommandLineErrorf("imports expects 1 argument") +- } +- t.app.editFlags = &t.EditFlags +- cli, _, err := t.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- from := parseSpan(args[0]) +- uri := from.URI() +- file, err := cli.openFile(ctx, uri) +- if err != nil { +- return err +- } +- actions, err := cli.server.CodeAction(ctx, &protocol.CodeActionParams{ +- TextDocument: protocol.TextDocumentIdentifier{ +- URI: uri, +- }, +- Context: protocol.CodeActionContext{ +- Only: []protocol.CodeActionKind{protocol.SourceOrganizeImports}, +- }, +- }) +- if err != nil { +- return fmt.Errorf("%v: %v", from, err) +- } +- var edits []protocol.TextEdit +- for _, a := range actions { +- for _, c := range a.Edit.DocumentChanges { +- // This code action should affect only the specified file; +- // it is safe to ignore others. +- if c.TextDocumentEdit != nil && c.TextDocumentEdit.TextDocument.URI == uri { +- edits = append(edits, protocol.AsTextEdits(c.TextDocumentEdit.Edits)...) +- } +- } +- } +- return applyTextEdits(file.mapper, edits, t.app.editFlags) +-} +diff -urN a/gopls/internal/cmd/info.go b/gopls/internal/cmd/info.go +--- a/gopls/internal/cmd/info.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/info.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,316 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-// This file defines the help, bug, version, api-json, licenses commands. +- +-import ( +- "bytes" +- "context" +- "flag" +- "fmt" +- "io" +- "net/url" +- "os" +- "sort" +- "strings" +- +- "golang.org/x/tools/gopls/internal/debug" +- "golang.org/x/tools/gopls/internal/doc" +- "golang.org/x/tools/gopls/internal/filecache" +- licensespkg "golang.org/x/tools/gopls/internal/licenses" +- "golang.org/x/tools/gopls/internal/util/browser" +- goplsbug "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/internal/tool" +-) +- +-// help implements the help command. +-type help struct { +- app *Application +-} +- +-func (h *help) Name() string { return "help" } +-func (h *help) Parent() string { return h.app.Name() } +-func (h *help) Usage() string { return "" } +-func (h *help) ShortHelp() string { return "print usage information for subcommands" } +-func (h *help) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +- +-Examples: +-$ gopls help # main gopls help message +-$ gopls help remote # help on 'remote' command +-$ gopls help remote sessions # help on 'remote sessions' subcommand +-`) +- printFlagDefaults(f) +-} +- +-// Run prints help information about a subcommand. +-func (h *help) Run(ctx context.Context, args ...string) error { +- find := func(cmds []tool.Application, name string) tool.Application { +- for _, cmd := range cmds { +- if cmd.Name() == name { +- return cmd +- } +- } +- return nil +- } +- +- // Find the subcommand denoted by args (empty => h.app). +- var cmd tool.Application = h.app +- for i, arg := range args { +- cmd = find(getSubcommands(cmd), arg) +- if cmd == nil { +- return tool.CommandLineErrorf( +- "no such subcommand: %s", strings.Join(args[:i+1], " ")) +- } +- } +- +- // 'gopls help cmd subcmd' is equivalent to 'gopls cmd subcmd -h'. +- // The flag package prints the usage information (defined by tool.Run) +- // when it sees the -h flag. +- fs := flag.NewFlagSet(cmd.Name(), flag.ExitOnError) +- return tool.Run(ctx, fs, h.app, append(args[:len(args):len(args)], "-h")) +-} +- +-// version implements the version command. +-type version struct { +- JSON bool `flag:"json" help:"outputs in json format."` +- +- app *Application +-} +- +-func (v *version) Name() string { return "version" } +-func (v *version) Parent() string { return v.app.Name() } +-func (v *version) Usage() string { return "" } +-func (v *version) ShortHelp() string { return "print the gopls version information" } +-func (v *version) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ``) +- printFlagDefaults(f) +-} +- +-// Run prints version information to stdout. +-func (v *version) Run(ctx context.Context, args ...string) error { +- var mode = debug.PlainText +- if v.JSON { +- mode = debug.JSON +- } +- var buf bytes.Buffer +- debug.WriteVersionInfo(&buf, v.app.verbose(), mode) +- _, err := io.Copy(os.Stdout, &buf) +- return err +-} +- +-// bug implements the bug command. +-type bug struct { +- app *Application +-} +- +-func (b *bug) Name() string { return "bug" } +-func (b *bug) Parent() string { return b.app.Name() } +-func (b *bug) Usage() string { return "" } +-func (b *bug) ShortHelp() string { return "report a bug in gopls" } +-func (b *bug) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ``) +- printFlagDefaults(f) +-} +- +-const goplsBugPrefix = "x/tools/gopls: " +-const goplsBugHeader = `ATTENTION: Please answer these questions BEFORE submitting your issue. Thanks! +- +-#### What did you do? +-If possible, provide a recipe for reproducing the error. +-A complete runnable program is good. +-A link on play.golang.org is better. +-A failing unit test is the best. +- +-#### What did you expect to see? +- +- +-#### What did you see instead? +- +- +-` +- +-// Run collects some basic information and then prepares an issue ready to +-// be reported. +-func (b *bug) Run(ctx context.Context, args ...string) error { +- // This undocumented environment variable allows +- // the cmd integration test (and maintainers) to +- // trigger a call to bug.Report. +- if msg := os.Getenv("TEST_GOPLS_BUG"); msg != "" { +- filecache.Start() // register bug handler +- goplsbug.Report(msg) +- return nil +- } +- +- // Enumerate bug reports, grouped and sorted. +- _, reports := filecache.BugReports() +- sort.Slice(reports, func(i, j int) bool { +- x, y := reports[i], reports[i] +- if x.Key != y.Key { +- return x.Key < y.Key // ascending key order +- } +- return y.AtTime.Before(x.AtTime) // most recent first +- }) +- keyDenom := make(map[string]int) // key is "file:line" +- for _, report := range reports { +- keyDenom[report.Key]++ +- } +- +- // Privacy: the content of 'public' will be posted to GitHub +- // to populate an issue textarea. Even though the user must +- // submit the form to share the information with the world, +- // merely populating the form causes us to share the +- // information with GitHub itself. +- // +- // For that reason, we cannot write private information to +- // public, such as bug reports, which may quote source code. +- public := &bytes.Buffer{} +- fmt.Fprint(public, goplsBugHeader) +- if len(reports) > 0 { +- fmt.Fprintf(public, "#### Internal errors\n\n") +- fmt.Fprintf(public, "Gopls detected %d internal errors, %d distinct:\n", +- len(reports), len(keyDenom)) +- for key, denom := range keyDenom { +- fmt.Fprintf(public, "- %s (%d)\n", key, denom) +- } +- fmt.Fprintf(public, "\nPlease copy the full information printed by `gopls bug` here, if you are comfortable sharing it.\n\n") +- } +- debug.WriteVersionInfo(public, true, debug.Markdown) +- body := public.String() +- title := strings.Join(args, " ") +- if !strings.HasPrefix(title, goplsBugPrefix) { +- title = goplsBugPrefix + title +- } +- if !browser.Open("https://github.com/golang/go/issues/new?title=" + url.QueryEscape(title) + "&body=" + url.QueryEscape(body)) { +- fmt.Print("Please file a new issue at golang.org/issue/new using this template:\n\n") +- fmt.Print(body) +- } +- +- // Print bug reports to stdout (not GitHub). +- keyNum := make(map[string]int) +- for _, report := range reports { +- fmt.Printf("-- %v -- \n", report.AtTime) +- +- // Append seq number (e.g. " (1/2)") for repeated keys. +- var seq string +- if denom := keyDenom[report.Key]; denom > 1 { +- keyNum[report.Key]++ +- seq = fmt.Sprintf(" (%d/%d)", keyNum[report.Key], denom) +- } +- +- // Privacy: +- // - File and Stack may contain the name of the user that built gopls. +- // - Description may contain names of the user's packages/files/symbols. +- fmt.Printf("%s:%d: %s%s\n\n", report.File, report.Line, report.Description, seq) +- fmt.Printf("%s\n\n", report.Stack) +- } +- if len(reports) > 0 { +- fmt.Printf("Please copy the above information into the GitHub issue, if you are comfortable sharing it.\n") +- } +- +- return nil +-} +- +-type apiJSON struct { +- app *Application +-} +- +-func (j *apiJSON) Name() string { return "api-json" } +-func (j *apiJSON) Parent() string { return j.app.Name() } +-func (j *apiJSON) Usage() string { return "" } +-func (j *apiJSON) ShortHelp() string { return "print JSON describing gopls API" } +-func (j *apiJSON) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-The api-json command prints a JSON value that describes +-and documents all gopls' public interfaces. +-Its schema is defined by golang.org/x/tools/gopls/internal/doc.API. +-`) +- printFlagDefaults(f) +-} +- +-func (j *apiJSON) Run(ctx context.Context, args ...string) error { +- os.Stdout.WriteString(doc.JSON) +- fmt.Println() +- return nil +-} +- +-type licenses struct { +- app *Application +-} +- +-func (l *licenses) Name() string { return "licenses" } +-func (l *licenses) Parent() string { return l.app.Name() } +-func (l *licenses) Usage() string { return "" } +-func (l *licenses) ShortHelp() string { return "print licenses of included software" } +-func (l *licenses) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ``) +- printFlagDefaults(f) +-} +- +-const licensePreamble = ` +-gopls is made available under the following BSD-style license: +- +-Copyright (c) 2009 The Go Authors. All rights reserved. +- +-Redistribution and use in source and binary forms, with or without +-modification, are permitted provided that the following conditions are +-met: +- +- * Redistributions of source code must retain the above copyright +-notice, this list of conditions and the following disclaimer. +- * Redistributions in binary form must reproduce the above +-copyright notice, this list of conditions and the following disclaimer +-in the documentation and/or other materials provided with the +-distribution. +- * Neither the name of Google Inc. nor the names of its +-contributors may be used to endorse or promote products derived from +-this software without specific prior written permission. +- +-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +- +-gopls implements the LSP specification, which is made available under the following license: +- +-Copyright (c) Microsoft Corporation +- +-All rights reserved. +- +-MIT License +- +-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation +-files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, +-modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software +-is furnished to do so, subject to the following conditions: +- +-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +- +-THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +-OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +-BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT +-OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +- +-gopls also includes software made available under these licenses: +-` +- +-func (l *licenses) Run(ctx context.Context, args ...string) error { +- txt := licensePreamble +- if licensespkg.Text == "" { +- txt += "(development gopls, license information not available)" +- } else { +- txt += licensespkg.Text +- } +- fmt.Fprint(os.Stdout, txt) +- return nil +-} +diff -urN a/gopls/internal/cmd/integration_test.go b/gopls/internal/cmd/integration_test.go +--- a/gopls/internal/cmd/integration_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/integration_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1257 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package cmdtest contains the test suite for the command line behavior of gopls. +-package cmd_test +- +-// This file defines integration tests of each gopls subcommand that +-// fork+exec the command in a separate process. +-// +-// (Rather than execute 'go build gopls' during the test, we reproduce +-// the main entrypoint in the test executable.) +-// +-// The purpose of this test is to exercise client-side logic such as +-// argument parsing and formatting of LSP RPC responses, not server +-// behavior; see lsp_test for that. +-// +-// All tests run in parallel. +-// +-// TODO(adonovan): +-// - Use markers to represent positions in the input and in assertions. +-// - Coverage of cross-cutting things like cwd, environ, span parsing, etc. +-// - Subcommands that accept -write and -diff flags implement them +-// consistently; factor their tests. +-// - Add missing test for 'vulncheck' subcommand. +-// - Add tests for client-only commands: serve, bug, help, api-json, licenses. +- +-import ( +- "bytes" +- "context" +- "encoding/json" +- "fmt" +- "math/rand" +- "os" +- "os/exec" +- "path/filepath" +- "regexp" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/cmd" +- "golang.org/x/tools/gopls/internal/debug" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/version" +- "golang.org/x/tools/internal/testenv" +- "golang.org/x/tools/internal/tool" +- "golang.org/x/tools/txtar" +-) +- +-// TestVersion tests the 'version' subcommand (info.go). +-func TestVersion(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, "") +- +- // There's not much we can robustly assert about the actual version. +- want := version.Version() // e.g. "master" +- +- // basic +- { +- res := gopls(t, tree, "version") +- res.checkExit(true) +- res.checkStdout(want) +- } +- +- // basic, with version override +- { +- res := goplsWithEnv(t, tree, []string{"TEST_GOPLS_VERSION=v1.2.3"}, "version") +- res.checkExit(true) +- res.checkStdout(`v1\.2\.3`) +- } +- +- // -json flag +- { +- res := gopls(t, tree, "version", "-json") +- res.checkExit(true) +- var v debug.ServerVersion +- if res.toJSON(&v) { +- if v.Version != want { +- t.Errorf("expected Version %q, got %q (%v)", want, v.Version, res) +- } +- } +- } +-} +- +-// TestCheck tests the 'check' subcommand (check.go). +-func TestCheck(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +-import "fmt" +-var _ = fmt.Sprintf("%s", 123) +- +--- b.go -- +-package a +-import "fmt" +-var _ = fmt.Sprintf("%d", "123") +--- c/c.go -- +-package c +-var C int +--- c/c2.go -- +-package c +-var C int +--- d/d.go -- +-package d +- +-import "io/ioutil" +- +-var _ = ioutil.ReadFile +-`) +- +- // no files +- { +- res := gopls(t, tree, "check") +- res.checkExit(true) +- if res.stdout != "" { +- t.Errorf("unexpected output: %v", res) +- } +- } +- +- // one file +- { +- res := gopls(t, tree, "check", "./a.go") +- res.checkExit(true) +- res.checkStdout("fmt.Sprintf format %s has arg 123 of wrong type int") +- } +- +- // two files +- { +- res := gopls(t, tree, "check", "./a.go", "./b.go") +- res.checkExit(true) +- res.checkStdout(`a.go:.* fmt.Sprintf format %s has arg 123 of wrong type int`) +- res.checkStdout(`b.go:.* fmt.Sprintf format %d has arg "123" of wrong type string`) +- } +- +- // diagnostic with related information spanning files +- { +- res := gopls(t, tree, "check", "./c/c2.go") +- res.checkExit(true) +- res.checkStdout(`c2.go:2:5-6: C redeclared in this block`) +- res.checkStdout(`c.go:2:5-6: - other declaration of C`) +- } +- +- // No deprecated (hint) diagnostic without -severity. +- { +- res := gopls(t, tree, "check", "./d/d.go") +- res.checkExit(true) +- if len(res.stdout) > 0 { +- t.Errorf("check ./d/d.go returned unexpected output:\n%s", res.stdout) +- } +- } +- +- // Deprecated (hint) diagnostics with -severity=hint +- { +- res := gopls(t, tree, "check", "-severity=hint", "./d/d.go") +- res.checkExit(true) +- res.checkStdout(`ioutil.ReadFile is deprecated`) +- } +-} +- +-// TestCallHierarchy tests the 'call_hierarchy' subcommand (call_hierarchy.go). +-func TestCallHierarchy(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +-func f() {} +-func g() { +- f() +-} +-func h() { +- f() +- f() +-} +-`) +- // missing position +- { +- res := gopls(t, tree, "call_hierarchy") +- res.checkExit(false) +- res.checkStderr("expects 1 argument") +- } +- // wrong place +- { +- res := gopls(t, tree, "call_hierarchy", "a.go:1") +- res.checkExit(false) +- res.checkStderr("identifier not found") +- } +- // f is called once from g and twice from h. +- { +- res := gopls(t, tree, "call_hierarchy", "a.go:2:6") +- res.checkExit(true) +- // We use regexp '.' as an OS-agnostic path separator. +- res.checkStdout("ranges 7:2-3, 8:2-3 in ..a.go from/to function h in ..a.go:6:6-7") +- res.checkStdout("ranges 4:2-3 in ..a.go from/to function g in ..a.go:3:6-7") +- res.checkStdout("identifier: function f in ..a.go:2:6-7") +- } +-} +- +-// TestCodeLens tests the 'codelens' subcommand (codelens.go). +-func TestCodeLens(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +--- a/a_test.go -- +-package a_test +-import "testing" +-func TestPass(t *testing.T) {} +-func TestFail(t *testing.T) { t.Fatal("fail") } +-`) +- // missing position +- { +- res := gopls(t, tree, "codelens") +- res.checkExit(false) +- res.checkStderr("requires a file name") +- } +- // list code lenses +- { +- res := gopls(t, tree, "codelens", "./a/a_test.go") +- res.checkExit(true) +- res.checkStdout(`a_test.go:3: "run test" \[gopls.run_tests\]`) +- res.checkStdout(`a_test.go:4: "run test" \[gopls.run_tests\]`) +- } +- // no codelens with title/position +- { +- res := gopls(t, tree, "codelens", "-exec", "./a/a_test.go:1", "nope") +- res.checkExit(false) +- res.checkStderr(`no code lens at .* with title "nope"`) +- } +- // run the passing test +- { +- res := gopls(t, tree, "-v", "codelens", "-exec", "./a/a_test.go:3", "run test") +- res.checkExit(true) +- res.checkStderr(`PASS: TestPass`) // from go test +- res.checkStderr("Info: all tests passed") // from gopls.test +- } +- // run the failing test +- { +- res := gopls(t, tree, "codelens", "-exec", "./a/a_test.go:4", "run test") +- res.checkExit(false) +- res.checkStderr(`FAIL example.com/a`) +- res.checkStderr("Info: 1 / 1 tests failed") +- } +-} +- +-// TestDefinition tests the 'definition' subcommand (definition.go). +-func TestDefinition(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +-import "fmt" +-func f() { +- fmt.Println() +-} +-func g() { +- f() +-} +-`) +- // missing position +- { +- res := gopls(t, tree, "definition") +- res.checkExit(false) +- res.checkStderr("expects 1 argument") +- } +- // intra-package +- { +- res := gopls(t, tree, "definition", "a.go:7:2") // "f()" +- res.checkExit(true) +- res.checkStdout("a.go:3:6-7: defined here as func f") +- } +- // cross-package +- { +- res := gopls(t, tree, "definition", "a.go:4:7") // "Println" +- res.checkExit(true) +- res.checkStdout("print.go.* defined here as func fmt.Println") +- res.checkStdout("Println formats using the default formats for its operands") +- } +- // -json and -markdown +- { +- res := gopls(t, tree, "definition", "-json", "-markdown", "a.go:4:7") +- res.checkExit(true) +- var defn cmd.Definition +- if res.toJSON(&defn) { +- if !strings.HasPrefix(defn.Description, "```go\nfunc fmt.Println") { +- t.Errorf("Description does not start with markdown code block. Got: %s", defn.Description) +- } +- } +- } +-} +- +-// TestExecute tests the 'execute' subcommand (execute.go). +-func TestExecute(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- hello.go -- +-package a +-func main() {} +- +--- hello_test.go -- +-package a +-import "testing" +-func TestHello(t *testing.T) { +- t.Fatal("oops") +-} +-`) +- // missing command name +- { +- res := gopls(t, tree, "execute") +- res.checkExit(false) +- res.checkStderr("requires a command") +- } +- // bad command +- { +- res := gopls(t, tree, "execute", "gopls.foo") +- res.checkExit(false) +- res.checkStderr("unrecognized command: gopls.foo") +- } +- // too few arguments +- { +- res := gopls(t, tree, "execute", "gopls.run_tests") +- res.checkExit(false) +- res.checkStderr("expected 1 input arguments, got 0") +- } +- // too many arguments +- { +- res := gopls(t, tree, "execute", "gopls.run_tests", "null", "null") +- res.checkExit(false) +- res.checkStderr("expected 1 input arguments, got 2") +- } +- // argument is not JSON +- { +- res := gopls(t, tree, "execute", "gopls.run_tests", "hello") +- res.checkExit(false) +- res.checkStderr("argument 1 is not valid JSON: invalid character 'h'") +- } +- // add import, show diff +- hello := "file://" + filepath.ToSlash(tree) + "/hello.go" +- { +- res := gopls(t, tree, "execute", "-d", "gopls.add_import", `{"ImportPath": "fmt", "URI": "`+hello+`"}`) +- res.checkExit(true) +- res.checkStdout(`[+]import "fmt"`) +- } +- // list known packages (has a result) +- { +- res := gopls(t, tree, "execute", "gopls.list_known_packages", `{"URI": "`+hello+`"}`) +- res.checkExit(true) +- res.checkStdout(`"fmt"`) +- res.checkStdout(`"encoding/json"`) +- } +- // run tests +- { +- helloTest := "file://" + filepath.ToSlash(tree) + "/hello_test.go" +- res := gopls(t, tree, "execute", "gopls.run_tests", `{"URI": "`+helloTest+`", "Tests": ["TestHello"]}`) +- res.checkExit(false) +- res.checkStderr(`hello_test.go:4: oops`) +- res.checkStderr(`1 / 1 tests failed`) +- } +-} +- +-// TestFoldingRanges tests the 'folding_ranges' subcommand (folding_range.go). +-func TestFoldingRanges(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +-func f(x int) { +- // hello +-} +-`) +- // missing filename +- { +- res := gopls(t, tree, "folding_ranges") +- res.checkExit(false) +- res.checkStderr("expects 1 argument") +- } +- // success +- { +- res := gopls(t, tree, "folding_ranges", "a.go") +- res.checkExit(true) +- res.checkStdout("2:8-2:13") // params (x int) +- res.checkStdout("2:16-4:1") // body { ... } +- } +-} +- +-// TestFormat tests the 'format' subcommand (format.go). +-func TestFormat(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- a.go -- +-package a ; func f ( ) { } +-`) +- const want = `package a +- +-func f() {} +-` +- +- // no files => nop +- { +- res := gopls(t, tree, "format") +- res.checkExit(true) +- } +- // default => print formatted result +- { +- res := gopls(t, tree, "format", "a.go") +- res.checkExit(true) +- if res.stdout != want { +- t.Errorf("format: got <<%s>>, want <<%s>>", res.stdout, want) +- } +- } +- // start/end position not supported (unless equal to start/end of file) +- { +- res := gopls(t, tree, "format", "a.go:1-2") +- res.checkExit(false) +- res.checkStderr("only full file formatting supported") +- } +- // -list: show only file names +- { +- res := gopls(t, tree, "format", "-list", "a.go") +- res.checkExit(true) +- res.checkStdout("a.go") +- } +- // -diff prints a unified diff +- { +- res := gopls(t, tree, "format", "-diff", "a.go") +- res.checkExit(true) +- // We omit the filenames as they vary by OS. +- want := ` +--package a ; func f ( ) { } +-+package a +-+ +-+func f() {} +-` +- res.checkStdout(regexp.QuoteMeta(want)) +- } +- // -write updates the file +- { +- res := gopls(t, tree, "format", "-write", "a.go") +- res.checkExit(true) +- res.checkStdout("^$") // empty +- checkContent(t, filepath.Join(tree, "a.go"), want) +- } +-} +- +-// TestHighlight tests the 'highlight' subcommand (highlight.go). +-func TestHighlight(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- a.go -- +-package a +-import "fmt" +-func f() { +- fmt.Println() +- fmt.Println() +-} +-`) +- +- // no arguments +- { +- res := gopls(t, tree, "highlight") +- res.checkExit(false) +- res.checkStderr("expects 1 argument") +- } +- // all occurrences of Println +- { +- res := gopls(t, tree, "highlight", "a.go:4:7") +- res.checkExit(true) +- res.checkStdout("a.go:4:6-13") +- res.checkStdout("a.go:5:6-13") +- } +-} +- +-// TestImplementations tests the 'implementation' subcommand (implementation.go). +-func TestImplementations(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- a.go -- +-package a +-import "fmt" +-type T int +-func (T) String() string { return "" } +-`) +- +- // no arguments +- { +- res := gopls(t, tree, "implementation") +- res.checkExit(false) +- res.checkStderr("expects 1 argument") +- } +- // T.String +- { +- res := gopls(t, tree, "implementation", "a.go:4:10") +- res.checkExit(true) +- // TODO(adonovan): extract and check the content of the reported ranges? +- // We use regexp '.' as an OS-agnostic path separator. +- res.checkStdout("fmt.print.go:") // fmt.Stringer.String +- res.checkStdout("runtime.error.go:") // runtime.stringer.String +- } +-} +- +-// TestImports tests the 'imports' subcommand (imports.go). +-func TestImports(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- a.go -- +-package a +-func _() { +- fmt.Println() +-} +-`) +- +- want := ` +-package a +- +-import "fmt" +-func _() { +- fmt.Println() +-} +-`[1:] +- +- // no arguments +- { +- res := gopls(t, tree, "imports") +- res.checkExit(false) +- res.checkStderr("expects 1 argument") +- } +- // default: print with imports +- { +- res := gopls(t, tree, "imports", "a.go") +- res.checkExit(true) +- if res.stdout != want { +- t.Errorf("imports: got <<%s>>, want <<%s>>", res.stdout, want) +- } +- } +- // -diff: show a unified diff +- { +- res := gopls(t, tree, "imports", "-diff", "a.go") +- res.checkExit(true) +- res.checkStdout(regexp.QuoteMeta(`+import "fmt"`)) +- } +- // -write: update file +- { +- res := gopls(t, tree, "imports", "-write", "a.go") +- res.checkExit(true) +- checkContent(t, filepath.Join(tree, "a.go"), want) +- } +-} +- +-// TestLinks tests the 'links' subcommand (links.go). +-func TestLinks(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- a.go -- +-// Link in package doc: https://pkg.go.dev/ +-package a +- +-// Link in internal comment: https://go.dev/cl +- +-// Doc comment link: https://blog.go.dev/ +-func f() {} +-`) +- // no arguments +- { +- res := gopls(t, tree, "links") +- res.checkExit(false) +- res.checkStderr("expects 1 argument") +- } +- // success +- { +- res := gopls(t, tree, "links", "a.go") +- res.checkExit(true) +- res.checkStdout("https://go.dev/cl") +- res.checkStdout("https://pkg.go.dev") +- res.checkStdout("https://blog.go.dev/") +- } +- // -json +- { +- res := gopls(t, tree, "links", "-json", "a.go") +- res.checkExit(true) +- res.checkStdout("https://pkg.go.dev") +- res.checkStdout("https://go.dev/cl") +- res.checkStdout("https://blog.go.dev/") // at 5:21-5:41 +- var links []protocol.DocumentLink +- if res.toJSON(&links) { +- // Check just one of the three locations. +- if got, want := fmt.Sprint(links[2].Range), "5:21-5:41"; got != want { +- t.Errorf("wrong link location: got %v, want %v", got, want) +- } +- } +- } +-} +- +-// TestReferences tests the 'references' subcommand (references.go). +-func TestReferences(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +-import "fmt" +-func f() { +- fmt.Println() +-} +- +--- b.go -- +-package a +-import "fmt" +-func g() { +- fmt.Println() +-} +-`) +- // no arguments +- { +- res := gopls(t, tree, "references") +- res.checkExit(false) +- res.checkStderr("expects 1 argument") +- } +- // fmt.Println +- { +- res := gopls(t, tree, "references", "a.go:4:10") +- res.checkExit(true) +- res.checkStdout("a.go:4:6-13") +- res.checkStdout("b.go:4:6-13") +- } +-} +- +-// TestSignature tests the 'signature' subcommand (signature.go). +-func TestSignature(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +-import "fmt" +-func f() { +- fmt.Println(123) +-} +-`) +- // no arguments +- { +- res := gopls(t, tree, "signature") +- res.checkExit(false) +- res.checkStderr("expects 1 argument") +- } +- // at 123 inside fmt.Println() call +- { +- res := gopls(t, tree, "signature", "a.go:4:15") +- res.checkExit(true) +- res.checkStdout("Println\\(a ...") +- res.checkStdout("Println formats using the default formats...") +- } +-} +- +-// TestPrepareRename tests the 'prepare_rename' subcommand (prepare_rename.go). +-func TestPrepareRename(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +-func oldname() {} +-`) +- // no arguments +- { +- res := gopls(t, tree, "prepare_rename") +- res.checkExit(false) +- res.checkStderr("expects 1 argument") +- } +- // in 'package' keyword +- { +- res := gopls(t, tree, "prepare_rename", "a.go:1:3") +- res.checkExit(false) +- res.checkStderr("request is not valid at the given position") +- } +- // in 'package' identifier (not supported by client) +- { +- res := gopls(t, tree, "prepare_rename", "a.go:1:9") +- res.checkExit(false) +- res.checkStderr("can't rename package") +- } +- // in func oldname +- { +- res := gopls(t, tree, "prepare_rename", "a.go:2:9") +- res.checkExit(true) +- res.checkStdout("a.go:2:6-13") // all of "oldname" +- } +-} +- +-// TestRename tests the 'rename' subcommand (rename.go). +-func TestRename(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +-func oldname() {} +-`) +- // no arguments +- { +- res := gopls(t, tree, "rename") +- res.checkExit(false) +- res.checkStderr("expects 2 arguments") +- } +- // missing newname +- { +- res := gopls(t, tree, "rename", "a.go:1:3") +- res.checkExit(false) +- res.checkStderr("expects 2 arguments") +- } +- // in 'package' keyword +- { +- res := gopls(t, tree, "rename", "a.go:1:3", "newname") +- res.checkExit(false) +- res.checkStderr("no identifier found") +- } +- // in 'package' identifier +- { +- res := gopls(t, tree, "rename", "a.go:1:9", "newname") +- res.checkExit(false) +- res.checkStderr(`cannot rename package: module path .* same as the package path, so .* no effect`) +- } +- // success, func oldname (and -diff) +- { +- res := gopls(t, tree, "rename", "-diff", "a.go:2:9", "newname") +- res.checkExit(true) +- res.checkStdout(regexp.QuoteMeta("-func oldname() {}")) +- res.checkStdout(regexp.QuoteMeta("+func newname() {}")) +- } +-} +- +-// TestSymbols tests the 'symbols' subcommand (symbols.go). +-func TestSymbols(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +-func f() +-var v int +-const c = 0 +-`) +- // no files +- { +- res := gopls(t, tree, "symbols") +- res.checkExit(false) +- res.checkStderr("expects 1 argument") +- } +- // success +- { +- res := gopls(t, tree, "symbols", "a.go:123:456") // (line/col ignored) +- res.checkExit(true) +- res.checkStdout("f Function 2:6-2:7") +- res.checkStdout("v Variable 3:5-3:6") +- res.checkStdout("c Constant 4:7-4:8") +- } +-} +- +-// TestSemtok tests the 'semtok' subcommand (semantictokens.go). +-func TestSemtok(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +-func f() +-var v int +-const c = 0 +-`) +- // no files +- { +- res := gopls(t, tree, "semtok") +- res.checkExit(false) +- res.checkStderr("expected one file name") +- } +- // success +- { +- res := gopls(t, tree, "semtok", "a.go") +- res.checkExit(true) +- got := res.stdout +- want := ` +-/*⇒7,keyword,[]*/package /*⇒1,namespace,[]*/a +-/*⇒4,keyword,[]*/func /*⇒1,function,[definition signature]*/f() +-/*⇒3,keyword,[]*/var /*⇒1,variable,[definition number]*/v /*⇒3,type,[defaultLibrary number]*/int +-/*⇒5,keyword,[]*/const /*⇒1,variable,[definition readonly number]*/c = /*⇒1,number,[]*/0 +-`[1:] +- if got != want { +- t.Errorf("semtok: got <<%s>>, want <<%s>>", got, want) +- } +- } +-} +- +-func TestStats(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +--- b/b.go -- +-package b +--- testdata/foo.go -- +-package foo +-`) +- +- // Trigger a bug report with a distinctive string +- // and check that it was durably recorded. +- oops := fmt.Sprintf("oops-%d", rand.Int()) +- { +- env := []string{"TEST_GOPLS_BUG=" + oops} +- res := goplsWithEnv(t, tree, env, "bug") +- res.checkExit(true) +- } +- +- res := gopls(t, tree, "stats") +- res.checkExit(true) +- +- var stats cmd.GoplsStats +- if err := json.Unmarshal([]byte(res.stdout), &stats); err != nil { +- t.Fatalf("failed to unmarshal JSON output of stats command: %v", err) +- } +- +- // a few sanity checks +- checks := []struct { +- field string +- got int +- want int +- }{ +- { +- "WorkspaceStats.Views[0].WorkspaceModules", +- stats.WorkspaceStats.Views[0].WorkspacePackages.Modules, +- 1, +- }, +- { +- "WorkspaceStats.Views[0].WorkspacePackages", +- stats.WorkspaceStats.Views[0].WorkspacePackages.Packages, +- 2, +- }, +- {"DirStats.Files", stats.DirStats.Files, 4}, +- {"DirStats.GoFiles", stats.DirStats.GoFiles, 2}, +- {"DirStats.ModFiles", stats.DirStats.ModFiles, 1}, +- {"DirStats.TestdataFiles", stats.DirStats.TestdataFiles, 1}, +- } +- for _, check := range checks { +- if check.got != check.want { +- t.Errorf("stats.%s = %d, want %d", check.field, check.got, check.want) +- } +- } +- +- // Check that we got a BugReport with the expected message. +- { +- got := fmt.Sprint(stats.BugReports) +- wants := []string{ +- "cmd/info.go", // File containing call to bug.Report +- oops, // Description +- } +- for _, want := range wants { +- if !strings.Contains(got, want) { +- t.Errorf("BugReports does not contain %q. Got:<<%s>>", want, got) +- break +- } +- } +- } +- +- // Check that -anon suppresses fields containing user information. +- { +- res2 := gopls(t, tree, "stats", "-anon") +- res2.checkExit(true) +- +- var stats2 cmd.GoplsStats +- if err := json.Unmarshal([]byte(res2.stdout), &stats2); err != nil { +- t.Fatalf("failed to unmarshal JSON output of stats command: %v", err) +- } +- if got := len(stats2.BugReports); got > 0 { +- t.Errorf("Got %d bug reports with -anon, want 0. Reports:%+v", got, stats2.BugReports) +- } +- var stats2AsMap map[string]any +- if err := json.Unmarshal([]byte(res2.stdout), &stats2AsMap); err != nil { +- t.Fatalf("failed to unmarshal JSON output of stats command: %v", err) +- } +- // GOPACKAGESDRIVER is user information, but is ok to print zero value. +- if v, ok := stats2AsMap["GOPACKAGESDRIVER"]; ok && v != "" { +- t.Errorf(`Got GOPACKAGESDRIVER=(%v, %v); want ("", true(found))`, v, ok) +- } +- } +- +- // Check that -anon suppresses fields containing non-zero user information. +- { +- res3 := goplsWithEnv(t, tree, []string{"GOPACKAGESDRIVER=off"}, "stats", "-anon") +- res3.checkExit(true) +- +- var statsAsMap3 map[string]any +- if err := json.Unmarshal([]byte(res3.stdout), &statsAsMap3); err != nil { +- t.Fatalf("failed to unmarshal JSON output of stats command: %v", err) +- } +- // GOPACKAGESDRIVER is user information, want non-empty value to be omitted. +- if v, ok := statsAsMap3["GOPACKAGESDRIVER"]; ok { +- t.Errorf(`Got GOPACKAGESDRIVER=(%q, %v); want ("", false(not found))`, v, ok) +- } +- } +-} +- +-// TestCodeAction tests the 'codeaction' subcommand (codeaction.go). +-func TestCodeAction(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +-type T int +-func f() (int, string) { return } +- +--- a/b.go -- +-package a +-import "io" +-var _ io.Reader = C{} +-type C struct{} +-`) +- +- // no arguments +- { +- res := gopls(t, tree, "codeaction") +- res.checkExit(false) +- res.checkStderr("expects at least 1 argument") +- } +- // list code actions in file +- { +- res := gopls(t, tree, "codeaction", "a/a.go") +- res.checkExit(true) +- res.checkStdout(`edit "Fill in return values" \[quickfix\]`) +- res.checkStdout(`command "Browse documentation for package a" \[source.doc\]`) +- } +- // list code actions in file, filtering by title +- { +- res := gopls(t, tree, "codeaction", "-title=Browse.*doc", "a/a.go") +- res.checkExit(true) +- got := res.stdout +- want := `command "Browse documentation for package a" [source.doc]` + +- "\n" + +- `command "Browse gopls feature documentation" [gopls.doc.features]` + +- "\n" +- if got != want { +- t.Errorf("codeaction: got <<%s>>, want <<%s>>\nstderr:\n%s", got, want, res.stderr) +- } +- } +- // list code actions in file, filtering (hierarchically) by kind +- { +- res := gopls(t, tree, "codeaction", "-kind=source", "a/a.go") +- res.checkExit(true) +- got := res.stdout +- want := `command "Browse documentation for package a" [source.doc]` + +- "\n" + +- `command "Split package \"a\"" [source.splitPackage]` + +- "\n" + +- `command "Show compiler optimization details for \"a\"" [source.toggleCompilerOptDetails]` + +- "\n" +- if got != want { +- t.Errorf("codeaction: got <<%s>>, want <<%s>>\nstderr:\n%s", got, want, res.stderr) +- } +- } +- // list code actions at position (of io.Reader) +- { +- res := gopls(t, tree, "codeaction", "a/b.go:#31") +- res.checkExit(true) +- res.checkStdout(`command "Browse documentation for type io.Reader" \[source.doc]`) +- } +- // list quick fixes at position (of type T) +- { +- res := gopls(t, tree, "codeaction", "-kind=quickfix", "a/a.go:#15") +- res.checkExit(true) +- got := res.stdout +- want := `edit "Fill in return values" [quickfix]` + "\n" +- if got != want { +- t.Errorf("codeaction: got <<%s>>, want <<%s>>\nstderr:\n%s", got, want, res.stderr) +- } +- } +- // success, with explicit CodeAction kind and diagnostics span. +- { +- res := gopls(t, tree, "codeaction", "-kind=quickfix", "-exec", "a/b.go:#40") +- res.checkExit(true) +- got := res.stdout +- want := ` +-package a +- +-import "io" +- +-var _ io.Reader = C{} +- +-type C struct{} +- +-// Read implements [io.Reader]. +-func (c C) Read(p []byte) (n int, err error) { +- panic("unimplemented") +-} +-`[1:] +- if got != want { +- t.Errorf("codeaction: got <<%s>>, want <<%s>>\nstderr:\n%s", got, want, res.stderr) +- } +- } +-} +- +-// TestWorkspaceSymbol tests the 'workspace_symbol' subcommand (workspace_symbol.go). +-func TestWorkspaceSymbol(t *testing.T) { +- t.Parallel() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +-func someFunctionName() +-`) +- // no files +- { +- res := gopls(t, tree, "workspace_symbol") +- res.checkExit(false) +- res.checkStderr("expects 1 argument") +- } +- // success +- { +- res := gopls(t, tree, "workspace_symbol", "meFun") +- res.checkExit(true) +- res.checkStdout("a.go:2:6-22 someFunctionName Function") +- } +-} +- +-// -- test framework -- +- +-func TestMain(m *testing.M) { +- switch os.Getenv("ENTRYPOINT") { +- case "goplsMain": +- goplsMain() +- default: +- os.Exit(m.Run()) +- } +-} +- +-// This function is a stand-in for gopls.main in ../../../../main.go. +-func goplsMain() { +- // Panic on bugs (unlike the production gopls command), +- // except in tests that inject calls to bug.Report. +- if os.Getenv("TEST_GOPLS_BUG") == "" { +- bug.PanicOnBugs = true +- } +- +- if v := os.Getenv("TEST_GOPLS_VERSION"); v != "" { +- version.VersionOverride = v +- } +- +- tool.Main(context.Background(), cmd.New(), os.Args[1:]) +-} +- +-// writeTree extracts a txtar archive into a new directory and returns its path. +-func writeTree(t *testing.T, archive string) string { +- root := t.TempDir() +- +- // This unfortunate step is required because gopls output +- // expands symbolic links in its input file names (arguably it +- // should not), and on macOS the temp dir is in /var -> private/var. +- root, err := filepath.EvalSymlinks(root) +- if err != nil { +- t.Fatal(err) +- } +- +- for _, f := range txtar.Parse([]byte(archive)).Files { +- filename := filepath.Join(root, f.Name) +- if err := os.MkdirAll(filepath.Dir(filename), 0777); err != nil { +- t.Fatal(err) +- } +- if err := os.WriteFile(filename, f.Data, 0666); err != nil { +- t.Fatal(err) +- } +- } +- return root +-} +- +-// gopls executes gopls in a child process. +-func gopls(t *testing.T, dir string, args ...string) *result { +- return goplsWithEnv(t, dir, nil, args...) +-} +- +-func goplsWithEnv(t *testing.T, dir string, env []string, args ...string) *result { +- testenv.NeedsTool(t, "go") +- +- // Catch inadvertent use of dir=".", which would make +- // the ReplaceAll below unpredictable. +- if !filepath.IsAbs(dir) { +- t.Fatalf("dir is not absolute: %s", dir) +- } +- +- goplsCmd := exec.Command(os.Args[0], args...) +- goplsCmd.Env = append(os.Environ(), "ENTRYPOINT=goplsMain") +- goplsCmd.Env = append(goplsCmd.Env, "GOPACKAGESDRIVER=off") +- goplsCmd.Env = append(goplsCmd.Env, env...) +- goplsCmd.Dir = dir +- goplsCmd.Stdout = new(bytes.Buffer) +- goplsCmd.Stderr = new(bytes.Buffer) +- +- cmdErr := goplsCmd.Run() +- +- stdout := strings.ReplaceAll(fmt.Sprint(goplsCmd.Stdout), dir, ".") +- stderr := strings.ReplaceAll(fmt.Sprint(goplsCmd.Stderr), dir, ".") +- exitcode := 0 +- if cmdErr != nil { +- if exitErr, ok := cmdErr.(*exec.ExitError); ok { +- exitcode = exitErr.ExitCode() +- } else { +- stderr = cmdErr.Error() // (execve failure) +- exitcode = -1 +- } +- } +- res := &result{ +- t: t, +- command: "gopls " + strings.Join(args, " "), +- exitcode: exitcode, +- stdout: stdout, +- stderr: stderr, +- } +- if false { +- t.Log(res) +- } +- return res +-} +- +-// A result holds the result of a gopls invocation, and provides assertion helpers. +-type result struct { +- t *testing.T +- command string +- exitcode int +- stdout, stderr string +-} +- +-func (res *result) String() string { +- return fmt.Sprintf("%s: exit=%d stdout=<<%s>> stderr=<<%s>>", +- res.command, res.exitcode, res.stdout, res.stderr) +-} +- +-// checkExit asserts that gopls returned the expected exit code. +-func (res *result) checkExit(success bool) { +- res.t.Helper() +- if (res.exitcode == 0) != success { +- res.t.Errorf("%s: exited with code %d, want success: %t (%s)", +- res.command, res.exitcode, success, res) +- } +-} +- +-// checkStdout asserts that the gopls standard output matches the pattern. +-func (res *result) checkStdout(pattern string) { +- res.t.Helper() +- res.checkOutput(pattern, "stdout", res.stdout) +-} +- +-// checkStderr asserts that the gopls standard error matches the pattern. +-func (res *result) checkStderr(pattern string) { +- res.t.Helper() +- res.checkOutput(pattern, "stderr", res.stderr) +-} +- +-func (res *result) checkOutput(pattern, name, content string) { +- res.t.Helper() +- if match, err := regexp.MatchString(pattern, content); err != nil { +- res.t.Errorf("invalid regexp: %v", err) +- } else if !match { +- res.t.Errorf("%s: %s does not match [%s]; got <<%s>>", +- res.command, name, pattern, content) +- } +-} +- +-// toJSON decodes res.stdout as JSON into to *ptr and reports its success. +-func (res *result) toJSON(ptr any) bool { +- if err := json.Unmarshal([]byte(res.stdout), ptr); err != nil { +- res.t.Errorf("invalid JSON %v", err) +- return false +- } +- return true +-} +- +-// checkContent checks that the contents of the file are as expected. +-func checkContent(t *testing.T, filename, want string) { +- data, err := os.ReadFile(filename) +- if err != nil { +- t.Error(err) +- return +- } +- if got := string(data); got != want { +- t.Errorf("content of %s is <<%s>>, want <<%s>>", filename, got, want) +- } +-} +diff -urN a/gopls/internal/cmd/links.go b/gopls/internal/cmd/links.go +--- a/gopls/internal/cmd/links.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/links.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,76 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "encoding/json" +- "flag" +- "fmt" +- "os" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/tool" +-) +- +-// links implements the links verb for gopls. +-type links struct { +- JSON bool `flag:"json" help:"emit document links in JSON format"` +- +- app *Application +-} +- +-func (l *links) Name() string { return "links" } +-func (l *links) Parent() string { return l.app.Name() } +-func (l *links) Usage() string { return "[links-flags] " } +-func (l *links) ShortHelp() string { return "list links in a file" } +-func (l *links) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprintf(f.Output(), ` +-Example: list links contained within a file: +- +- $ gopls links internal/cmd/check.go +- +-links-flags: +-`) +- printFlagDefaults(f) +-} +- +-// Run finds all the links within a document +-// - if -json is specified, outputs location range and uri +-// - otherwise, prints the a list of unique links +-func (l *links) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return tool.CommandLineErrorf("links expects 1 argument") +- } +- cli, _, err := l.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- from := parseSpan(args[0]) +- uri := from.URI() +- +- if _, err := cli.openFile(ctx, uri); err != nil { +- return err +- } +- results, err := cli.server.DocumentLink(ctx, &protocol.DocumentLinkParams{ +- TextDocument: protocol.TextDocumentIdentifier{ +- URI: uri, +- }, +- }) +- if err != nil { +- return fmt.Errorf("%v: %v", from, err) +- } +- if l.JSON { +- enc := json.NewEncoder(os.Stdout) +- enc.SetIndent("", "\t") +- return enc.Encode(results) +- } +- for _, v := range results { +- fmt.Println(*v.Target) +- } +- return nil +-} +diff -urN a/gopls/internal/cmd/mcp.go b/gopls/internal/cmd/mcp.go +--- a/gopls/internal/cmd/mcp.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/mcp.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,177 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- "io" +- "log" +- "os" +- "sync" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/filewatcher" +- "golang.org/x/tools/gopls/internal/mcp" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-type headlessMCP struct { +- app *Application +- +- Address string `flag:"listen" help:"the address on which to run the mcp server"` +- Logfile string `flag:"logfile" help:"filename to log to; if unset, logs to stderr"` +- RPCTrace bool `flag:"rpc.trace" help:"print MCP rpc traces; cannot be used with -listen"` +- Instructions bool `flag:"instructions" help:"if set, print gopls' MCP instructions and exit"` +-} +- +-func (m *headlessMCP) Name() string { return "mcp" } +-func (m *headlessMCP) Parent() string { return m.app.Name() } +-func (m *headlessMCP) Usage() string { return "[mcp-flags]" } +-func (m *headlessMCP) ShortHelp() string { return "start the gopls MCP server in headless mode" } +- +-func (m *headlessMCP) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Starts the gopls MCP server in headless mode, without needing an LSP client. +-Starts the server over stdio or sse with http, depending on whether the listen flag is provided. +- +-Examples: +- $ gopls mcp -listen=localhost:3000 +- $ gopls mcp //start over stdio +-`) +- printFlagDefaults(f) +-} +- +-func (m *headlessMCP) Run(ctx context.Context, args ...string) error { +- if m.Instructions { +- fmt.Println(mcp.Instructions) +- return nil +- } +- if m.Address != "" && m.RPCTrace { +- // There's currently no way to plumb logging instrumentation into the SSE +- // transport that is created on connections to the HTTP handler, so we must +- // disallow the -rpc.trace flag when using -listen. +- return fmt.Errorf("-listen is incompatible with -rpc.trace") +- } +- if m.Logfile != "" { +- f, err := os.Create(m.Logfile) +- if err != nil { +- return fmt.Errorf("opening logfile: %v", err) +- } +- log.SetOutput(f) +- defer f.Close() +- } +- +- // Start a new in-process gopls session and create a fake client +- // to connect to it. +- cli, sess, err := m.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- var ( +- queueMu sync.Mutex +- queue []protocol.FileEvent +- nonempty = make(chan struct{}) // receivable when len(queue) > 0 +- stop = make(chan struct{}) // closed when Run returns +- ) +- defer close(stop) +- +- // This goroutine forwards file change events to the LSP server. +- go func() { +- for { +- select { +- case <-stop: +- return +- case <-nonempty: +- queueMu.Lock() +- q := queue +- queue = nil +- queueMu.Unlock() +- +- if len(q) > 0 { +- if err := cli.server.DidChangeWatchedFiles(ctx, &protocol.DidChangeWatchedFilesParams{ +- Changes: q, +- }); err != nil { +- log.Printf("failed to notify changed files: %v", err) +- } +- } +- +- } +- } +- }() +- +- w, err := filewatcher.New(500*time.Millisecond, nil, func(events []protocol.FileEvent, err error) { +- if err != nil { +- log.Printf("watch error: %v", err) +- return +- } +- +- if len(events) == 0 { +- return +- } +- +- // Since there is no promise [protocol.Server.DidChangeWatchedFiles] +- // will return immediately, we should buffer the captured events and +- // sent them whenever available in a separate go routine. +- queueMu.Lock() +- queue = append(queue, events...) +- queueMu.Unlock() +- +- select { +- case nonempty <- struct{}{}: +- default: +- } +- }) +- if err != nil { +- return err +- } +- defer w.Close() +- +- // TODO(hxjiang): replace this with LSP initial param workspace root. +- dir, err := os.Getwd() +- if err != nil { +- return err +- } +- if err := w.WatchDir(dir); err != nil { +- return err +- } +- +- if m.Address != "" { +- countHeadlessMCPSSE.Inc() +- return mcp.Serve(ctx, m.Address, &staticSessions{sess, cli.server}, false) +- } else { +- countHeadlessMCPStdIO.Inc() +- var rpcLog io.Writer +- if m.RPCTrace { +- rpcLog = log.Writer() // possibly redirected by -logfile above +- } +- log.Printf("Listening for MCP messages on stdin...") +- return mcp.StartStdIO(ctx, sess, cli.server, rpcLog) +- } +-} +- +-// staticSessions implements the [mcp.Sessions] interface for a single gopls +-// session. +-type staticSessions struct { +- session *cache.Session +- server protocol.Server +-} +- +-func (s *staticSessions) SetSessionExitFunc(func(string)) {} +- +-func (s *staticSessions) FirstSession() (*cache.Session, protocol.Server) { +- return s.session, s.server +-} +- +-func (s *staticSessions) Session(id string) (*cache.Session, protocol.Server) { +- if s.session.ID() == id { +- return s.session, s.server +- } +- return nil, nil +-} +diff -urN a/gopls/internal/cmd/mcp_test.go b/gopls/internal/cmd/mcp_test.go +--- a/gopls/internal/cmd/mcp_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/mcp_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,417 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd_test +- +-import ( +- "bufio" +- "bytes" +- "context" +- "encoding/json" +- "fmt" +- "net" +- "os" +- "os/exec" +- "path/filepath" +- "runtime" +- "slices" +- "strconv" +- "strings" +- "testing" +- "time" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +- internal_mcp "golang.org/x/tools/gopls/internal/mcp" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/gopls/internal/vulncheck/vulntest" +- "golang.org/x/tools/internal/testenv" +- "golang.org/x/tools/txtar" +-) +- +-func TestMCPCommandStdio(t *testing.T) { +- // Test that the headless MCP subcommand works, and recognizes file changes. +- if !supportsFsnotify(runtime.GOOS) { +- // See golang/go#74580 +- t.Skipf("skipping on %s; fsnotify is not supported", runtime.GOOS) +- } +- testenv.NeedsExec(t) // stdio transport uses execve(2) +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package p +- +-const A = 1 +- +--- b.go -- +-package p +- +-const B = 2 +-`) +- +- goplsCmd := exec.Command(os.Args[0], "mcp") +- goplsCmd.Env = append(os.Environ(), "ENTRYPOINT=goplsMain") +- goplsCmd.Dir = tree +- +- ctx := t.Context() +- client := mcp.NewClient(&mcp.Implementation{Name: "client", Version: "v0.0.1"}, nil) +- mcpSession, err := client.Connect(ctx, &mcp.CommandTransport{Command: goplsCmd}, nil) +- if err != nil { +- t.Fatal(err) +- } +- defer func() { +- if err := mcpSession.Close(); err != nil { +- t.Errorf("closing MCP connection: %v", err) +- } +- }() +- var ( +- tool = "go_diagnostics" +- args = map[string]any{"files": []string{filepath.Join(tree, "a.go")}} +- ) +- // On the first diagnostics call, there should be no diagnostics. +- { +- // Match on a substring of the expected output from the context tool. +- res, err := mcpSession.CallTool(ctx, &mcp.CallToolParams{Name: tool, Arguments: args}) +- if err != nil { +- t.Fatal(err) +- } +- got := resultText(t, res) +- want := "No diagnostics" +- if !strings.Contains(got, want) { +- t.Errorf("CallTool(%s, %v) = %v, want containing %q", tool, args, got, want) +- } +- } +- // Now, create a duplicate diagnostic in "b.go", and expect that the headless +- // MCP server detects the file change. In order to guarantee that the change +- // is detected, sleep long to ensure a different mtime. +- time.Sleep(100 * time.Millisecond) +- newContent := "package p\n\nconst A = 2\n" +- if err := os.WriteFile(filepath.Join(tree, "b.go"), []byte(newContent), 0666); err != nil { +- t.Fatal(err) +- } +- { +- res, err := mcpSession.CallTool(ctx, &mcp.CallToolParams{Name: tool, Arguments: args}) +- if err != nil { +- t.Fatal(err) +- } +- got := resultText(t, res) +- want := "redeclared" +- if !strings.Contains(got, want) { +- t.Errorf("CallTool(%s, %v) = %v, want containing %q", tool, args, got, want) +- } +- } +-} +- +-func TestMCPCommandLogging(t *testing.T) { +- // Test that logging flags for headless MCP subcommand work as intended. +- if !supportsFsnotify(runtime.GOOS) { +- // See golang/go#74580 +- t.Skipf("skipping on %s; fsnotify is not supported", runtime.GOOS) +- } +- testenv.NeedsExec(t) // stdio transport uses execve(2) +- +- tests := []struct { +- logFile string // also the subtest name +- trace bool +- want string +- dontWant string +- }{ +- {"notrace.log", false, "stdin", "initialized"}, +- {"trace.log", true, "initialized", ""}, +- } +- +- dir := t.TempDir() +- for _, test := range tests { +- t.Run(test.logFile, func(t *testing.T) { +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package p +-`) +- +- logFile := filepath.Join(dir, test.logFile) +- args := []string{"mcp", "-logfile", logFile} +- if test.trace { +- args = append(args, "-rpc.trace") +- } +- goplsCmd := exec.Command(os.Args[0], args...) +- goplsCmd.Env = append(os.Environ(), "ENTRYPOINT=goplsMain") +- goplsCmd.Dir = tree +- +- ctx := t.Context() +- client := mcp.NewClient(&mcp.Implementation{Name: "client", Version: "v0.0.1"}, nil) +- mcpSession, err := client.Connect(ctx, &mcp.CommandTransport{Command: goplsCmd}, nil) +- if err != nil { +- t.Fatal(err) +- } +- if err := mcpSession.Close(); err != nil { +- t.Errorf("closing MCP connection: %v", err) +- } +- logs, err := os.ReadFile(logFile) +- if err != nil { +- t.Fatal(err) +- } +- if test.want != "" && !bytes.Contains(logs, []byte(test.want)) { +- t.Errorf("logs do not contain expected %q", test.want) +- } +- if test.dontWant != "" && bytes.Contains(logs, []byte(test.dontWant)) { +- t.Errorf("logs contain unexpected %q", test.dontWant) +- } +- if t.Failed() { +- t.Logf("Logs:\n%s", string(logs)) +- } +- }) +- } +-} +- +-func TestMCPCommandHTTP(t *testing.T) { +- if !supportsFsnotify(runtime.GOOS) { +- // See golang/go#74580 +- t.Skipf("skipping on %s; fsnotify is not supported", runtime.GOOS) +- } +- testenv.NeedsExec(t) +- tree := writeTree(t, ` +--- go.mod -- +-module example.com +-go 1.18 +--- a.go -- +-package a +- +-import "example.com/b" +- +--- b/b.go -- +-package b +- +-func MyFun() {} +-`) +- port := strconv.Itoa(getRandomPort()) +- addr := "localhost:" + port +- goplsCmd := exec.Command(os.Args[0], "-v", "mcp", "-listen="+addr) +- goplsCmd.Env = append(os.Environ(), "ENTRYPOINT=goplsMain") +- goplsCmd.Dir = tree +- goplsCmd.Stdout = os.Stderr +- +- // Pipe stderr to a scanner, so that we can wait for the log message that +- // tells us the server has started. +- stderr, err := goplsCmd.StderrPipe() +- if err != nil { +- t.Fatal(err) +- } +- // forward stdout to test output +- if err := goplsCmd.Start(); err != nil { +- t.Fatalf("starting gopls: %v", err) +- } +- defer func() { +- if err := goplsCmd.Process.Kill(); err != nil { +- t.Fatalf("killing gopls: %v", err) +- } +- // Wait for the gopls process to exit before we return and the test framework +- // attempts to clean up the temporary directory. +- // We expect an error because we killed the process. +- goplsCmd.Wait() +- }() +- +- // Wait for the MCP server to start listening. The referenced log occurs +- // after the connection is opened via net.Listen and the HTTP handlers are +- // set up. +- ready := make(chan bool) +- go func() { +- // Copy from the pipe to stderr, keeping an eye out for the "mcp http +- // server listening" string. +- scan := bufio.NewScanner(stderr) +- for scan.Scan() { +- line := scan.Text() +- if strings.Contains(line, "mcp http server listening") { +- ready <- true +- } +- fmt.Fprintln(os.Stderr, line) +- } +- if err := scan.Err(); err != nil { +- t.Logf("reading from pipe: %v", err) +- } +- }() +- +- <-ready +- client := mcp.NewClient(&mcp.Implementation{Name: "client", Version: "v0.0.1"}, nil) +- ctx := t.Context() +- mcpSession, err := client.Connect(ctx, &mcp.SSEClientTransport{Endpoint: "http://" + addr}, nil) +- if err != nil { +- t.Fatalf("connecting to server: %v", err) +- } +- defer func() { +- if err := mcpSession.Close(); err != nil { +- t.Errorf("closing MCP connection: %v", err) +- } +- }() +- +- var ( +- tool = "go_file_context" +- args = map[string]any{"file": filepath.Join(tree, "a.go")} +- ) +- res, err := mcpSession.CallTool(ctx, &mcp.CallToolParams{Name: tool, Arguments: args}) +- if err != nil { +- t.Fatal(err) +- } +- got := resultText(t, res) +- want := "example.com" +- if !strings.Contains(got, want) { +- t.Errorf("CallTool(%s, %v) = %+v, want containing %q", tool, args, got, want) +- } +-} +- +-func TestMCPVulncheckCommand(t *testing.T) { +- if !supportsFsnotify(runtime.GOOS) { +- // See golang/go#74580 +- t.Skipf("skipping on %s; fsnotify is not supported", runtime.GOOS) +- } +- testenv.NeedsTool(t, "go") +- const proxyData = ` +--- example.com/vulnmod@v1.0.0/go.mod -- +-module example.com/vulnmod +-go 1.18 +--- example.com/vulnmod@v1.0.0/vuln.go -- +-package vulnmod +- +-// VulnFunc is a vulnerable function. +-func VulnFunc() {} +-` +- const vulnData = ` +--- GO-TEST-0001.yaml -- +-modules: +- - module: example.com/vulnmod +- versions: +- - introduced: "1.0.0" +- packages: +- - package: example.com/vulnmod +- symbols: +- - VulnFunc +-` +- proxyArchive := txtar.Parse([]byte(proxyData)) +- proxyFiles := make(map[string][]byte) +- for _, f := range proxyArchive.Files { +- proxyFiles[f.Name] = f.Data +- } +- goproxy, err := fake.WriteProxy(t.TempDir(), proxyFiles) +- if err != nil { +- t.Fatal(err) +- } +- +- db, err := vulntest.NewDatabase(context.Background(), []byte(vulnData)) +- if err != nil { +- t.Fatal(err) +- } +- defer db.Clean() +- +- tree := writeTree(t, ` +--- go.mod -- +-module example.com/user +-go 1.18 +-require example.com/vulnmod v1.0.0 +--- main.go -- +-package main +-import "example.com/vulnmod" +-func main() { +- vulnmod.VulnFunc() +-} +-`) +- +- // Update go.sum before running gopls, to avoid load failures. +- tidyCmd := exec.CommandContext(t.Context(), "go", "mod", "tidy") +- tidyCmd.Dir = tree +- tidyCmd.Env = append(os.Environ(), "GOPROXY="+goproxy, "GOSUMDB=off") +- if output, err := tidyCmd.CombinedOutput(); err != nil { +- t.Fatalf("go mod tidy failed: %v\n%s", err, output) +- } +- +- goplsCmd := exec.Command(os.Args[0], "mcp") +- goplsCmd.Env = append(os.Environ(), +- "ENTRYPOINT=goplsMain", +- "GOPROXY="+goproxy, +- "GOSUMDB=off", +- "GOVULNDB="+db.URI(), +- ) +- goplsCmd.Dir = tree +- +- ctx := t.Context() +- client := mcp.NewClient(&mcp.Implementation{Name: "client", Version: "v0.0.1"}, nil) +- mcpSession, err := client.Connect(ctx, &mcp.CommandTransport{Command: goplsCmd}, nil) +- if err != nil { +- t.Fatal(err) +- } +- defer func() { +- if err := mcpSession.Close(); err != nil { +- t.Errorf("closing MCP connection: %v", err) +- } +- }() +- +- res, err := mcpSession.CallTool(ctx, &mcp.CallToolParams{Name: "go_vulncheck", Arguments: map[string]any{}}) +- if err != nil { +- t.Fatal(err) +- } +- jsonBytes, err := json.Marshal(res.StructuredContent) +- if err != nil { +- t.Fatal(err) +- } +- +- var result internal_mcp.VulncheckResultOutput +- if err := json.Unmarshal(jsonBytes, &result); err != nil { +- t.Fatal(err) +- } +- if len(result.Findings) != 1 { +- t.Errorf("expected 1 finding, got %d", len(result.Findings)) +- } else { +- finding := result.Findings[0] +- if finding.ID != "GO-TEST-0001" { +- t.Errorf("expected ID 'GO-TEST-0001', got %q", finding.ID) +- } +- expectedPackages := []string{"Go standard library", "example.com/vulnmod"} +- if !slices.Equal(finding.AffectedPackages, expectedPackages) { +- t.Errorf("expected affected packages %v, got %v", expectedPackages, finding.AffectedPackages) +- } +- } +- +- if result.Logs == "" { +- t.Errorf("expected logs to be non-empty") +- } else { +- t.Logf("Logs:\n%s", result.Logs) +- } +-} +- +-// resultText concatenates the textual content of the given result, reporting +-// an error if any content values are non-textual. +-func resultText(t *testing.T, res *mcp.CallToolResult) string { +- t.Helper() +- +- var buf bytes.Buffer +- for _, content := range res.Content { +- if c, ok := content.(*mcp.TextContent); ok { +- fmt.Fprintf(&buf, "%s\n", c.Text) +- } else { +- t.Errorf("Not text content: %T", content) +- } +- } +- return buf.String() +-} +- +-// getRandomPort returns the number of a random available port. Inherently racy: +-// nothing stops another process from listening on it - but this should be fine +-// for testing purposes. +-func getRandomPort() int { +- listener, err := net.Listen("tcp", "localhost:0") +- if err != nil { +- panic(err) +- } +- defer listener.Close() +- return listener.Addr().(*net.TCPAddr).Port +-} +- +-// supportsFsnotify returns true if fsnotify supports the os. +-func supportsFsnotify(os string) bool { +- return os == "darwin" || os == "linux" || os == "windows" +-} +diff -urN a/gopls/internal/cmd/parsespan.go b/gopls/internal/cmd/parsespan.go +--- a/gopls/internal/cmd/parsespan.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/parsespan.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,106 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "strconv" +- "strings" +- "unicode/utf8" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// parseSpan returns the location represented by the input. +-// Only file paths are accepted, not URIs. +-// The returned span will be normalized, and thus if printed may produce a +-// different string. +-func parseSpan(input string) span { +- uri := protocol.URIFromPath +- +- // :0:0#0-0:0#0 +- valid := input +- var hold, offset int +- hadCol := false +- suf := rstripSuffix(input) +- if suf.sep == "#" { +- offset = suf.num +- suf = rstripSuffix(suf.remains) +- } +- if suf.sep == ":" { +- valid = suf.remains +- hold = suf.num +- hadCol = true +- suf = rstripSuffix(suf.remains) +- } +- switch { +- case suf.sep == ":": +- return newSpan(uri(suf.remains), newPoint(suf.num, hold, offset), point{}) +- case suf.sep == "-": +- // we have a span, fall out of the case to continue +- default: +- // separator not valid, rewind to either the : or the start +- return newSpan(uri(valid), newPoint(hold, 0, offset), point{}) +- } +- // only the span form can get here +- // at this point we still don't know what the numbers we have mean +- // if have not yet seen a : then we might have either a line or a column depending +- // on whether start has a column or not +- // we build an end point and will fix it later if needed +- end := newPoint(suf.num, hold, offset) +- hold, offset = 0, 0 +- suf = rstripSuffix(suf.remains) +- if suf.sep == "#" { +- offset = suf.num +- suf = rstripSuffix(suf.remains) +- } +- if suf.sep != ":" { +- // turns out we don't have a span after all, rewind +- return newSpan(uri(valid), end, point{}) +- } +- valid = suf.remains +- hold = suf.num +- suf = rstripSuffix(suf.remains) +- if suf.sep != ":" { +- // line#offset only +- return newSpan(uri(valid), newPoint(hold, 0, offset), end) +- } +- // we have a column, so if end only had one number, it is also the column +- if !hadCol { +- end = newPoint(suf.num, end.v.Line, end.v.Offset) +- } +- return newSpan(uri(suf.remains), newPoint(suf.num, hold, offset), end) +-} +- +-type suffix struct { +- remains string +- sep string +- num int +-} +- +-func rstripSuffix(input string) suffix { +- if len(input) == 0 { +- return suffix{"", "", -1} +- } +- remains := input +- +- // Remove optional trailing decimal number. +- num := -1 +- last := strings.LastIndexFunc(remains, func(r rune) bool { return r < '0' || r > '9' }) +- if last >= 0 && last < len(remains)-1 { +- number, err := strconv.ParseInt(remains[last+1:], 10, 64) +- if err == nil { +- num = int(number) +- remains = remains[:last+1] +- } +- } +- // now see if we have a trailing separator +- r, w := utf8.DecodeLastRuneInString(remains) +- // TODO(adonovan): this condition is clearly wrong. Should the third byte be '-'? +- if r != ':' && r != '#' && r == '#' { +- return suffix{input, "", -1} +- } +- remains = remains[:len(remains)-w] +- return suffix{remains, string(r), num} +-} +diff -urN a/gopls/internal/cmd/prepare_rename.go b/gopls/internal/cmd/prepare_rename.go +--- a/gopls/internal/cmd/prepare_rename.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/prepare_rename.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,79 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "errors" +- "flag" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/tool" +-) +- +-// prepareRename implements the prepare_rename verb for gopls. +-type prepareRename struct { +- app *Application +-} +- +-func (r *prepareRename) Name() string { return "prepare_rename" } +-func (r *prepareRename) Parent() string { return r.app.Name() } +-func (r *prepareRename) Usage() string { return "" } +-func (r *prepareRename) ShortHelp() string { return "test validity of a rename operation at location" } +-func (r *prepareRename) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Example: +- +- $ # 1-indexed location (:line:column or :#offset) of the target identifier +- $ gopls prepare_rename helper/helper.go:8:6 +- $ gopls prepare_rename helper/helper.go:#53 +-`) +- printFlagDefaults(f) +-} +- +-// ErrInvalidRenamePosition is returned when prepareRename is run at a position that +-// is not a candidate for renaming. +-var ErrInvalidRenamePosition = errors.New("request is not valid at the given position") +- +-func (r *prepareRename) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return tool.CommandLineErrorf("prepare_rename expects 1 argument (file)") +- } +- +- cli, _, err := r.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- from := parseSpan(args[0]) +- file, err := cli.openFile(ctx, from.URI()) +- if err != nil { +- return err +- } +- loc, err := file.spanLocation(from) +- if err != nil { +- return err +- } +- p := protocol.PrepareRenameParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- result, err := cli.server.PrepareRename(ctx, &p) +- if err != nil { +- return fmt.Errorf("prepare_rename failed: %w", err) +- } +- if result == nil { +- return ErrInvalidRenamePosition +- } +- +- s, err := file.rangeSpan(result.Range) +- if err != nil { +- return err +- } +- +- fmt.Println(s) +- return nil +-} +diff -urN a/gopls/internal/cmd/references.go b/gopls/internal/cmd/references.go +--- a/gopls/internal/cmd/references.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/references.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,91 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- "sort" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/tool" +-) +- +-// references implements the references verb for gopls +-type references struct { +- IncludeDeclaration bool `flag:"d,declaration" help:"include the declaration of the specified identifier in the results"` +- +- app *Application +-} +- +-func (r *references) Name() string { return "references" } +-func (r *references) Parent() string { return r.app.Name() } +-func (r *references) Usage() string { return "[references-flags] " } +-func (r *references) ShortHelp() string { return "display selected identifier's references" } +-func (r *references) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Example: +- +- $ # 1-indexed location (:line:column or :#offset) of the target identifier +- $ gopls references helper/helper.go:8:6 +- $ gopls references helper/helper.go:#53 +- +-references-flags: +-`) +- printFlagDefaults(f) +-} +- +-func (r *references) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return tool.CommandLineErrorf("references expects 1 argument (position)") +- } +- +- cli, _, err := r.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- from := parseSpan(args[0]) +- file, err := cli.openFile(ctx, from.URI()) +- if err != nil { +- return err +- } +- loc, err := file.spanLocation(from) +- if err != nil { +- return err +- } +- p := protocol.ReferenceParams{ +- Context: protocol.ReferenceContext{ +- IncludeDeclaration: r.IncludeDeclaration, +- }, +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- locations, err := cli.server.References(ctx, &p) +- if err != nil { +- return err +- } +- var spans []string +- for _, l := range locations { +- f, err := cli.openFile(ctx, l.URI) +- if err != nil { +- return err +- } +- // convert location to span for user-friendly 1-indexed line +- // and column numbers +- span, err := f.locationSpan(l) +- if err != nil { +- return err +- } +- spans = append(spans, fmt.Sprint(span)) +- } +- +- sort.Strings(spans) +- for _, s := range spans { +- fmt.Println(s) +- } +- return nil +-} +diff -urN a/gopls/internal/cmd/remote.go b/gopls/internal/cmd/remote.go +--- a/gopls/internal/cmd/remote.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/remote.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,164 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "encoding/json" +- "errors" +- "flag" +- "fmt" +- "log" +- "os" +- +- "golang.org/x/tools/gopls/internal/lsprpc" +- "golang.org/x/tools/gopls/internal/protocol/command" +-) +- +-type remote struct { +- app *Application +- subcommands +- +- // For backward compatibility, allow aliasing this command (it was previously +- // called 'inspect'). +- // +- // TODO(rFindley): delete this after allowing some transition time in case +- // there were any users of 'inspect' (I suspect not). +- alias string +-} +- +-func newRemote(app *Application, alias string) *remote { +- return &remote{ +- app: app, +- subcommands: subcommands{ +- &listSessions{app: app}, +- &startDebugging{app: app}, +- }, +- alias: alias, +- } +-} +- +-func (r *remote) Name() string { +- if r.alias != "" { +- return r.alias +- } +- return "remote" +-} +- +-func (r *remote) Parent() string { return r.app.Name() } +- +-func (r *remote) ShortHelp() string { +- short := "interact with the gopls daemon" +- if r.alias != "" { +- short += " (deprecated: use 'remote')" +- } +- return short +-} +- +-// listSessions is an inspect subcommand to list current sessions. +-type listSessions struct { +- app *Application +-} +- +-func (c *listSessions) Name() string { return "sessions" } +-func (c *listSessions) Parent() string { return c.app.Name() } +-func (c *listSessions) Usage() string { return "" } +-func (c *listSessions) ShortHelp() string { +- return "print information about current gopls sessions" +-} +- +-const listSessionsExamples = ` +-Examples: +- +-1) list sessions for the default daemon: +- +-$ gopls -remote=auto remote sessions +-or just +-$ gopls remote sessions +- +-2) list sessions for a specific daemon: +- +-$ gopls -remote=localhost:8082 remote sessions +-` +- +-func (c *listSessions) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), listSessionsExamples) +- printFlagDefaults(f) +-} +- +-func (c *listSessions) Run(ctx context.Context, args ...string) error { +- remote := c.app.Remote +- if remote == "" { +- remote = "auto" +- } +- state, err := lsprpc.QueryServerState(ctx, remote) +- if err != nil { +- return err +- } +- v, err := json.MarshalIndent(state, "", "\t") +- if err != nil { +- log.Fatal(err) +- } +- os.Stdout.Write(v) +- return nil +-} +- +-type startDebugging struct { +- app *Application +-} +- +-func (c *startDebugging) Name() string { return "debug" } +-func (c *startDebugging) Usage() string { return "[host:port]" } +-func (c *startDebugging) ShortHelp() string { +- return "start the debug server" +-} +- +-const startDebuggingExamples = ` +-Examples: +- +-1) start a debug server for the default daemon, on an arbitrary port: +- +-$ gopls -remote=auto remote debug +-or just +-$ gopls remote debug +- +-2) start for a specific daemon, on a specific port: +- +-$ gopls -remote=localhost:8082 remote debug localhost:8083 +-` +- +-func (c *startDebugging) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), startDebuggingExamples) +- printFlagDefaults(f) +-} +- +-func (c *startDebugging) Run(ctx context.Context, args ...string) error { +- if len(args) > 1 { +- fmt.Fprintln(os.Stderr, c.Usage()) +- return errors.New("invalid usage") +- } +- remote := c.app.Remote +- if remote == "" { +- remote = "auto" +- } +- debugAddr := "" +- if len(args) > 0 { +- debugAddr = args[0] +- } +- debugArgs := command.DebuggingArgs{ +- Addr: debugAddr, +- } +- var result command.DebuggingResult +- if err := lsprpc.ExecuteCommand(ctx, remote, command.StartDebugging.String(), debugArgs, &result); err != nil { +- return err +- } +- if len(result.URLs) == 0 { +- return errors.New("no debugging URLs") +- } +- for _, url := range result.URLs { +- fmt.Printf("debugging on %s\n", url) +- } +- return nil +-} +diff -urN a/gopls/internal/cmd/rename.go b/gopls/internal/cmd/rename.go +--- a/gopls/internal/cmd/rename.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/rename.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,73 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/tool" +-) +- +-// rename implements the rename verb for gopls. +-type rename struct { +- EditFlags +- app *Application +-} +- +-func (r *rename) Name() string { return "rename" } +-func (r *rename) Parent() string { return r.app.Name() } +-func (r *rename) Usage() string { return "[rename-flags] " } +-func (r *rename) ShortHelp() string { return "rename selected identifier" } +-func (r *rename) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Example: +- +- $ # 1-based location (:line:column or :#position) of the thing to change +- $ gopls rename helper/helper.go:8:6 Foo +- $ gopls rename helper/helper.go:#53 Foo +- +-rename-flags: +-`) +- printFlagDefaults(f) +-} +- +-// Run renames the specified identifier and either; +-// - if -w is specified, updates the file(s) in place; +-// - if -d is specified, prints out unified diffs of the changes; or +-// - otherwise, prints the new versions to stdout. +-func (r *rename) Run(ctx context.Context, args ...string) error { +- if len(args) != 2 { +- return tool.CommandLineErrorf("rename expects 2 arguments (position, new name)") +- } +- r.app.editFlags = &r.EditFlags +- cli, _, err := r.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- from := parseSpan(args[0]) +- file, err := cli.openFile(ctx, from.URI()) +- if err != nil { +- return err +- } +- loc, err := file.spanLocation(from) +- if err != nil { +- return err +- } +- p := protocol.RenameParams{ +- TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, +- Position: loc.Range.Start, +- NewName: args[1], +- } +- edit, err := cli.server.Rename(ctx, &p) +- if err != nil { +- return err +- } +- return cli.applyWorkspaceEdit(edit) +-} +diff -urN a/gopls/internal/cmd/semantictokens.go b/gopls/internal/cmd/semantictokens.go +--- a/gopls/internal/cmd/semantictokens.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/semantictokens.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,213 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "bytes" +- "context" +- "flag" +- "fmt" +- "log" +- "os" +- "unicode/utf8" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/semtok" +- "golang.org/x/tools/gopls/internal/settings" +-) +- +-// generate semantic tokens and interpolate them in the file +- +-// The output is the input file decorated with comments showing the +-// syntactic tokens. The comments are stylized: +-// /*,,[ is the length of the token in runes, is one +-// of the supported semantic token types, and " } +-func (c *semanticToken) ShortHelp() string { return "show semantic tokens for the specified file" } +-func (c *semanticToken) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Example: show the semantic tokens for this file: +- +- $ gopls semtok internal/cmd/semtok.go +-`) +- printFlagDefaults(f) +-} +- +-// Run performs the semtok on the files specified by args and prints the +-// results to stdout in the format described above. +-func (c *semanticToken) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return fmt.Errorf("expected one file name, got %d", len(args)) +- } +- // perhaps simpler if app had just had a FlagSet member +- origOptions := c.app.options +- c.app.options = func(opts *settings.Options) { +- if origOptions != nil { +- origOptions(opts) +- } +- opts.SemanticTokens = true +- } +- cli, _, err := c.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- uri := protocol.URIFromPath(args[0]) +- file, err := cli.openFile(ctx, uri) +- if err != nil { +- return err +- } +- +- lines := bytes.Split(file.mapper.Content, []byte{'\n'}) +- params := &protocol.SemanticTokensRangeParams{ +- TextDocument: protocol.TextDocumentIdentifier{ +- URI: uri, +- }, +- Range: protocol.Range{Start: protocol.Position{Line: 0, Character: 0}, +- End: protocol.Position{ +- Line: uint32(len(lines) - 1), +- Character: uint32(len(lines[len(lines)-1]))}, +- }, +- } +- resp, err := cli.server.SemanticTokensRange(ctx, params) // use Range to avoid limits on Full +- if err != nil { +- return err +- } +- legend := cli.initializeResult.Capabilities.SemanticTokensProvider.(protocol.SemanticTokensOptions).Legend +- return decorate(legend, file, resp.Data) +-} +- +-// mark provides a human-readable representation of protocol.SemanticTokens. +-// It translates token types and modifiers to strings instead of uint32 values. +-type mark struct { +- line, offset int // 1-based, from RangeSpan +- len int // bytes, not runes +- typ semtok.Type +- mods []semtok.Modifier +-} +- +-// prefixes for semantic token comments +-const ( +- SemanticLeft = "/*⇐" +- SemanticRight = "/*⇒" +-) +- +-func markLine(m mark, lines [][]byte) { +- l := lines[m.line-1] // mx is 1-based +- length := utf8.RuneCount(l[m.offset-1 : m.offset-1+m.len]) +- splitAt := m.offset - 1 +- insert := "" +- if m.typ == "namespace" && m.offset-1+m.len < len(l) && l[m.offset-1+m.len] == '"' { +- // it is the last component of an import spec +- // cannot put a comment inside a string +- insert = fmt.Sprintf("%s%d,namespace,[]*/", SemanticLeft, length) +- splitAt = m.offset + m.len +- } else { +- // be careful not to generate //* +- spacer := "" +- if splitAt-1 >= 0 && l[splitAt-1] == '/' { +- spacer = " " +- } +- insert = fmt.Sprintf("%s%s%d,%s,%v*/", spacer, SemanticRight, length, m.typ, m.mods) +- } +- x := append([]byte(insert), l[splitAt:]...) +- l = append(l[:splitAt], x...) +- lines[m.line-1] = l +-} +- +-// decorate translates semantic token data (protocol.SemanticTokens) from its +-// raw []uint32 format into a human-readable representation and prints it to stdout. +-func decorate(legend protocol.SemanticTokensLegend, file *cmdFile, data []uint32) error { +- marks := newMarks(legend, file, data) +- if len(marks) == 0 { +- return nil +- } +- lines := bytes.Split(file.mapper.Content, []byte{'\n'}) +- for i := len(marks) - 1; i >= 0; i-- { +- mx := marks[i] +- markLine(mx, lines) +- } +- os.Stdout.Write(bytes.Join(lines, []byte{'\n'})) +- return nil +-} +- +-func newMarks(legend protocol.SemanticTokensLegend, file *cmdFile, data []uint32) []mark { +- ans := []mark{} +- // the following two loops could be merged, at the cost +- // of making the logic slightly more complicated to understand +- // first, convert from deltas to absolute, in LSP coordinates +- lspLine := make([]uint32, len(data)/5) +- lspChar := make([]uint32, len(data)/5) +- var line, char uint32 +- for i := 0; 5*i < len(data); i++ { +- lspLine[i] = line + data[5*i+0] +- if data[5*i+0] > 0 { +- char = 0 +- } +- lspChar[i] = char + data[5*i+1] +- char = lspChar[i] +- line = lspLine[i] +- } +- // second, convert to gopls coordinates +- for i := 0; 5*i < len(data); i++ { +- pr := protocol.Range{ +- Start: protocol.Position{ +- Line: lspLine[i], +- Character: lspChar[i], +- }, +- End: protocol.Position{ +- Line: lspLine[i], +- Character: lspChar[i] + data[5*i+2], +- }, +- } +- spn, err := file.rangeSpan(pr) +- if err != nil { +- log.Fatal(err) +- } +- +- var mods []semtok.Modifier +- { +- n := int(data[5*i+4]) +- for i, mod := range legend.TokenModifiers { +- if (n & (1 << i)) != 0 { +- mods = append(mods, semtok.Modifier(mod)) +- } +- } +- } +- +- m := mark{ +- line: spn.Start().Line(), +- offset: spn.Start().Column(), +- len: spn.End().Column() - spn.Start().Column(), +- typ: semtok.Type(legend.TokenTypes[data[5*i+3]]), +- mods: mods, +- } +- ans = append(ans, m) +- } +- return ans +-} +diff -urN a/gopls/internal/cmd/serve.go b/gopls/internal/cmd/serve.go +--- a/gopls/internal/cmd/serve.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/serve.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,197 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "errors" +- "flag" +- "fmt" +- "io" +- "log" +- "os" +- "time" +- +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/debug" +- "golang.org/x/tools/gopls/internal/lsprpc" +- "golang.org/x/tools/gopls/internal/mcp" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/fakenet" +- "golang.org/x/tools/internal/jsonrpc2" +- "golang.org/x/tools/internal/tool" +-) +- +-// Serve is a struct that exposes the configurable parts of the LSP and MCP +-// server as flags, in the right form for tool.Main to consume. +-type Serve struct { +- Logfile string `flag:"logfile" help:"filename to log to. if value is \"auto\", then logging to a default output file is enabled"` +- Mode string `flag:"mode" help:"no effect"` +- Port int `flag:"port" help:"port on which to run gopls for debugging purposes"` +- Address string `flag:"listen" help:"address on which to listen for remote connections. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. Otherwise, TCP is used."` +- IdleTimeout time.Duration `flag:"listen.timeout" help:"when used with -listen, shut down the server when there are no connected clients for this duration"` +- Trace bool `flag:"rpc.trace" help:"print the full rpc trace in lsp inspector format"` +- Debug string `flag:"debug" help:"serve debug information on the supplied address"` +- +- RemoteListenTimeout time.Duration `flag:"remote.listen.timeout" help:"when used with -remote=auto, the -listen.timeout value used to start the daemon"` +- RemoteDebug string `flag:"remote.debug" help:"when used with -remote=auto, the -debug value used to start the daemon"` +- RemoteLogfile string `flag:"remote.logfile" help:"when used with -remote=auto, the -logfile value used to start the daemon"` +- +- // MCP Server related configurations. +- MCPAddress string `flag:"mcp.listen" help:"experimental: address on which to listen for model context protocol connections. If port is localhost:0, pick a random port in localhost instead."` +- +- app *Application +-} +- +-func (s *Serve) Name() string { return "serve" } +-func (s *Serve) Parent() string { return s.app.Name() } +-func (s *Serve) Usage() string { return "[server-flags]" } +-func (s *Serve) ShortHelp() string { +- return "run a server for Go code using the Language Server Protocol" +-} +-func (s *Serve) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` gopls [flags] [server-flags] +- +-The server communicates using JSONRPC2 on stdin and stdout, and is intended to be run directly as +-a child of an editor process. +- +-server-flags: +-`) +- printFlagDefaults(f) +-} +- +-func (s *Serve) remoteArgs(network, address string) []string { +- args := []string{"serve", +- "-listen", fmt.Sprintf(`%s;%s`, network, address), +- } +- if s.RemoteDebug != "" { +- args = append(args, "-debug", s.RemoteDebug) +- } +- if s.RemoteListenTimeout != 0 { +- args = append(args, "-listen.timeout", s.RemoteListenTimeout.String()) +- } +- if s.RemoteLogfile != "" { +- args = append(args, "-logfile", s.RemoteLogfile) +- } +- return args +-} +- +-// Run configures a server based on the flags, and then runs it. +-// It blocks until the server shuts down. +-func (s *Serve) Run(ctx context.Context, args ...string) error { +- if len(args) > 0 { +- return tool.CommandLineErrorf("server does not take arguments, got %v", args) +- } +- +- di := debug.GetInstance(ctx) +- isDaemon := s.Address != "" || s.Port != 0 +- if di != nil { +- closeLog, err := di.SetLogFile(s.Logfile, isDaemon) +- if err != nil { +- return err +- } +- defer closeLog() +- di.ServerAddress = s.Address +- di.Serve(ctx, s.Debug) +- } +- +- var ( +- ss jsonrpc2.StreamServer +- sessions mcp.Sessions // if non-nil, handle MCP sessions +- ) +- if s.app.Remote != "" { +- var err error +- ss, err = lsprpc.NewForwarder(s.app.Remote, s.remoteArgs) +- if err != nil { +- return fmt.Errorf("creating forwarder: %w", err) +- } +- } else { +- lsprpcServer := lsprpc.NewStreamServer(cache.New(nil), isDaemon, s.app.options) +- ss = lsprpcServer +- if s.MCPAddress != "" { +- sessions = lsprpcServer +- } +- } +- +- group, ctx := errgroup.WithContext(ctx) +- // Indicate success by a special error so that successful termination +- // of one server causes cancellation of the other. +- success := errors.New("success") +- +- // Start MCP server. +- if sessions != nil { +- countAttachedMCP.Inc() +- group.Go(func() (err error) { +- defer func() { +- if err == nil { +- err = success +- } +- }() +- +- return mcp.Serve(ctx, s.MCPAddress, sessions, isDaemon) +- }) +- } +- +- // Start LSP server. +- group.Go(func() (err error) { +- defer func() { +- if err == nil { +- err = success +- } +- }() +- +- var network, addr string +- if s.Address != "" { +- network, addr = lsprpc.ParseAddr(s.Address) +- } +- if s.Port != 0 { +- network = "tcp" +- // TODO(adonovan): should gopls ever be listening on network +- // sockets, or only local ones? +- // +- // Ian says this was added in anticipation of +- // something related to "VS Code remote" that turned +- // out to be unnecessary. So I propose we limit it to +- // localhost, if only so that we avoid the macOS +- // firewall prompt. +- // +- // Hana says: "s.Address is for the remote access (LSP) +- // and s.Port is for debugging purpose (according to +- // the Server type documentation). I am not sure why the +- // existing code here is mixing up and overwriting addr. +- // For debugging endpoint, I think localhost makes perfect sense." +- // +- // TODO(adonovan): disentangle Address and Port, +- // and use only localhost for the latter. +- addr = fmt.Sprintf(":%v", s.Port) +- } +- +- if addr != "" { +- log.Printf("Gopls LSP daemon: listening on %s network, address %s...", network, addr) +- defer log.Printf("Gopls LSP daemon: exiting") +- return jsonrpc2.ListenAndServe(ctx, network, addr, ss, s.IdleTimeout) +- } else { +- stream := jsonrpc2.NewHeaderStream(fakenet.NewConn("stdio", os.Stdin, os.Stdout)) +- if s.Trace && di != nil { +- stream = protocol.LoggingStream(stream, di.LogWriter) +- } +- conn := jsonrpc2.NewConn(stream) +- if err := ss.ServeStream(ctx, conn); errors.Is(err, io.EOF) { +- return nil +- } else { +- return err +- } +- } +- }) +- +- // Wait for all servers to terminate, returning only the first error +- // encountered. Subsequent errors are typically due to context cancellation +- // and are disregarded. +- if err := group.Wait(); err != nil && !errors.Is(err, success) { +- return err +- } +- return nil +-} +diff -urN a/gopls/internal/cmd/signature.go b/gopls/internal/cmd/signature.go +--- a/gopls/internal/cmd/signature.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/signature.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,87 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/tool" +-) +- +-// signature implements the signature verb for gopls +-type signature struct { +- app *Application +-} +- +-func (r *signature) Name() string { return "signature" } +-func (r *signature) Parent() string { return r.app.Name() } +-func (r *signature) Usage() string { return "" } +-func (r *signature) ShortHelp() string { return "display selected identifier's signature" } +-func (r *signature) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Example: +- +- $ # 1-indexed location (:line:column or :#offset) of the target identifier +- $ gopls signature helper/helper.go:8:6 +- $ gopls signature helper/helper.go:#53 +-`) +- printFlagDefaults(f) +-} +- +-func (r *signature) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return tool.CommandLineErrorf("signature expects 1 argument (position)") +- } +- +- cli, _, err := r.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- from := parseSpan(args[0]) +- file, err := cli.openFile(ctx, from.URI()) +- if err != nil { +- return err +- } +- +- loc, err := file.spanLocation(from) +- if err != nil { +- return err +- } +- +- p := protocol.SignatureHelpParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- +- s, err := cli.server.SignatureHelp(ctx, &p) +- if err != nil { +- return err +- } +- +- if s == nil || len(s.Signatures) == 0 { +- return tool.CommandLineErrorf("%v: not a function", from) +- } +- +- // there is only ever one possible signature, +- // see toProtocolSignatureHelp in lsp/signature_help.go +- signature := s.Signatures[0] +- fmt.Printf("%s\n", signature.Label) +- switch x := signature.Documentation.Value.(type) { +- case string: +- if x != "" { +- fmt.Printf("\n%s\n", x) +- } +- case protocol.MarkupContent: +- if x.Value != "" { +- fmt.Printf("\n%s\n", x.Value) +- } +- } +- +- return nil +-} +diff -urN a/gopls/internal/cmd/spanformat_test.go b/gopls/internal/cmd/spanformat_test.go +--- a/gopls/internal/cmd/spanformat_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/spanformat_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,55 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "fmt" +- "path/filepath" +- "strings" +- "testing" +-) +- +-func TestSpanFormat(t *testing.T) { +- formats := []string{"%v", "%#v", "%+v"} +- +- // Element 0 is the input, and the elements 0-2 are the expected +- // output in [%v %#v %+v] formats. Thus the first must be in +- // canonical form (invariant under parseSpan + fmt.Sprint). +- // The '#' form displays offsets; the '+' form outputs a URI. +- // If len=4, element 0 is a noncanonical input and 1-3 are expected outputs. +- for _, test := range [][]string{ +- {"C:/file_a", "C:/file_a", "file:///C:/file_a:#0"}, +- {"C:/file_b:1:2", "C:/file_b:1:2", "file:///C:/file_b:1:2"}, +- {"C:/file_c:1000", "C:/file_c:1000", "file:///C:/file_c:1000:1"}, +- {"C:/file_d:14:9", "C:/file_d:14:9", "file:///C:/file_d:14:9"}, +- {"C:/file_e:1:2-7", "C:/file_e:1:2-7", "file:///C:/file_e:1:2-1:7"}, +- {"C:/file_f:500-502", "C:/file_f:500-502", "file:///C:/file_f:500:1-502:1"}, +- {"C:/file_g:3:7-8", "C:/file_g:3:7-8", "file:///C:/file_g:3:7-3:8"}, +- {"C:/file_h:3:7-4:8", "C:/file_h:3:7-4:8", "file:///C:/file_h:3:7-4:8"}, +- {"C:/file_i:#100", "C:/file_i:#100", "file:///C:/file_i:#100"}, +- {"C:/file_j:#26-#28", "C:/file_j:#26-#28", "file:///C:/file_j:#26-0#28"}, // 0#28? +- {"C:/file_h:3:7#26-4:8#37", // not canonical +- "C:/file_h:3:7-4:8", "C:/file_h:#26-#37", "file:///C:/file_h:3:7#26-4:8#37"}} { +- input := test[0] +- spn := parseSpan(input) +- wants := test[0:3] +- if len(test) == 4 { +- wants = test[1:4] +- } +- for i, format := range formats { +- want := toPath(wants[i]) +- if got := fmt.Sprintf(format, spn); got != want { +- t.Errorf("Sprintf(%q, %q) = %q, want %q", format, input, got, want) +- } +- } +- } +-} +- +-func toPath(value string) string { +- if strings.HasPrefix(value, "file://") { +- return value +- } +- return filepath.FromSlash(value) +-} +diff -urN a/gopls/internal/cmd/span.go b/gopls/internal/cmd/span.go +--- a/gopls/internal/cmd/span.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/span.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,237 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-// span and point represent positions and ranges in text files. +- +-import ( +- "encoding/json" +- "fmt" +- "path" +- "sort" +- "strings" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// A span represents a range of text within a source file. The start +-// and end points of a valid span may be hold either its byte offset, +-// or its (line, column) pair, or both. Columns are measured in bytes. +-// +-// Spans are appropriate in user interfaces (e.g. command-line tools) +-// and tests where a position is notated without access to the content +-// of the file. +-// +-// Use protocol.Mapper to convert between span and other +-// representations, such as go/token (also UTF-8) or the LSP protocol +-// (UTF-16). The latter requires access to file contents. +-// +-// See overview comments at ../protocol/mapper.go. +-type span struct { +- v _span +-} +- +-// point represents a single point within a file. +-// In general this should only be used as part of a span, as on its own it +-// does not carry enough information. +-type point struct { +- v _point +-} +- +-// The span_/point_ types have public fields to support JSON encoding, +-// but the span/point types hide these fields by defining methods that +-// shadow them. (This is used by a few of the command-line tool +-// subcommands, which emit spans and have a -json flag.) +-// +-// TODO(adonovan): simplify now that it's all internal to cmd. +- +-type _span struct { +- URI protocol.DocumentURI `json:"uri"` +- Start _point `json:"start"` +- End _point `json:"end"` +-} +- +-type _point struct { +- Line int `json:"line"` // 1-based line number +- Column int `json:"column"` // 1-based, UTF-8 codes (bytes) +- Offset int `json:"offset"` // 0-based byte offset +-} +- +-func newSpan(uri protocol.DocumentURI, start, end point) span { +- s := span{v: _span{URI: uri, Start: start.v, End: end.v}} +- s.v.clean() +- return s +-} +- +-func newPoint(line, col, offset int) point { +- p := point{v: _point{Line: line, Column: col, Offset: offset}} +- p.v.clean() +- return p +-} +- +-// sortSpans sorts spans into a stable but unspecified order. +-func sortSpans(spans []span) { +- sort.SliceStable(spans, func(i, j int) bool { +- return compare(spans[i], spans[j]) < 0 +- }) +-} +- +-// compare implements a three-valued ordered comparison of Spans. +-func compare(a, b span) int { +- // This is a textual comparison. It does not perform path +- // cleaning, case folding, resolution of symbolic links, +- // testing for existence, or any I/O. +- if cmp := strings.Compare(string(a.URI()), string(b.URI())); cmp != 0 { +- return cmp +- } +- if cmp := comparePoint(a.v.Start, b.v.Start); cmp != 0 { +- return cmp +- } +- return comparePoint(a.v.End, b.v.End) +-} +- +-func comparePoint(a, b _point) int { +- if !a.hasPosition() { +- if a.Offset < b.Offset { +- return -1 +- } +- if a.Offset > b.Offset { +- return 1 +- } +- return 0 +- } +- if a.Line < b.Line { +- return -1 +- } +- if a.Line > b.Line { +- return 1 +- } +- if a.Column < b.Column { +- return -1 +- } +- if a.Column > b.Column { +- return 1 +- } +- return 0 +-} +- +-func (s span) HasPosition() bool { return s.v.Start.hasPosition() } +-func (s span) HasOffset() bool { return s.v.Start.hasOffset() } +-func (s span) IsValid() bool { return s.v.Start.isValid() } +-func (s span) IsPoint() bool { return s.v.Start == s.v.End } +-func (s span) URI() protocol.DocumentURI { return s.v.URI } +-func (s span) Start() point { return point{s.v.Start} } +-func (s span) End() point { return point{s.v.End} } +-func (s *span) MarshalJSON() ([]byte, error) { return json.Marshal(&s.v) } +-func (s *span) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &s.v) } +- +-func (p point) HasPosition() bool { return p.v.hasPosition() } +-func (p point) HasOffset() bool { return p.v.hasOffset() } +-func (p point) IsValid() bool { return p.v.isValid() } +-func (p *point) MarshalJSON() ([]byte, error) { return json.Marshal(&p.v) } +-func (p *point) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &p.v) } +-func (p point) Line() int { +- if !p.v.hasPosition() { +- panic(fmt.Errorf("position not set in %v", p.v)) +- } +- return p.v.Line +-} +-func (p point) Column() int { +- if !p.v.hasPosition() { +- panic(fmt.Errorf("position not set in %v", p.v)) +- } +- return p.v.Column +-} +-func (p point) Offset() int { +- if !p.v.hasOffset() { +- panic(fmt.Errorf("offset not set in %v", p.v)) +- } +- return p.v.Offset +-} +- +-func (p _point) hasPosition() bool { return p.Line > 0 } +-func (p _point) hasOffset() bool { return p.Offset >= 0 } +-func (p _point) isValid() bool { return p.hasPosition() || p.hasOffset() } +-func (p _point) isZero() bool { +- return (p.Line == 1 && p.Column == 1) || (!p.hasPosition() && p.Offset == 0) +-} +- +-func (s *_span) clean() { +- //this presumes the points are already clean +- if !s.End.isValid() || (s.End == _point{}) { +- s.End = s.Start +- } +-} +- +-func (p *_point) clean() { +- if p.Line < 0 { +- p.Line = 0 +- } +- if p.Column <= 0 { +- if p.Line > 0 { +- p.Column = 1 +- } else { +- p.Column = 0 +- } +- } +- if p.Offset == 0 && (p.Line > 1 || p.Column > 1) { +- p.Offset = -1 +- } +-} +- +-// Format implements fmt.Formatter to print the Location in a standard form. +-// The format produced is one that can be read back in using parseSpan. +-// +-// TODO(adonovan): this is esoteric, and the formatting options are +-// never used outside of TestFormat. +-func (s span) Format(f fmt.State, c rune) { +- fullForm := f.Flag('+') +- preferOffset := f.Flag('#') +- // we should always have a uri, simplify if it is file format +- //TODO: make sure the end of the uri is unambiguous +- uri := string(s.v.URI) +- if c == 'f' { +- uri = path.Base(uri) +- } else if !fullForm { +- uri = s.v.URI.Path() +- } +- fmt.Fprint(f, uri) +- if !s.IsValid() || (!fullForm && s.v.Start.isZero() && s.v.End.isZero()) { +- return +- } +- // see which bits of start to write +- printOffset := s.HasOffset() && (fullForm || preferOffset || !s.HasPosition()) +- printLine := s.HasPosition() && (fullForm || !printOffset) +- printColumn := printLine && (fullForm || (s.v.Start.Column > 1 || s.v.End.Column > 1)) +- fmt.Fprint(f, ":") +- if printLine { +- fmt.Fprintf(f, "%d", s.v.Start.Line) +- } +- if printColumn { +- fmt.Fprintf(f, ":%d", s.v.Start.Column) +- } +- if printOffset { +- fmt.Fprintf(f, "#%d", s.v.Start.Offset) +- } +- // start is written, do we need end? +- if s.IsPoint() { +- return +- } +- // we don't print the line if it did not change +- printLine = fullForm || (printLine && s.v.End.Line > s.v.Start.Line) +- fmt.Fprint(f, "-") +- if printLine { +- fmt.Fprintf(f, "%d", s.v.End.Line) +- } +- if printColumn { +- if printLine { +- fmt.Fprint(f, ":") +- } +- fmt.Fprintf(f, "%d", s.v.End.Column) +- } +- if printOffset { +- fmt.Fprintf(f, "#%d", s.v.End.Offset) +- } +-} +diff -urN a/gopls/internal/cmd/stats.go b/gopls/internal/cmd/stats.go +--- a/gopls/internal/cmd/stats.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/stats.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,248 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "encoding/json" +- "flag" +- "fmt" +- "go/token" +- "io/fs" +- "os" +- "path/filepath" +- "reflect" +- "runtime" +- "strings" +- "time" +- +- "golang.org/x/tools/gopls/internal/filecache" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/settings" +- bugpkg "golang.org/x/tools/gopls/internal/util/bug" +- versionpkg "golang.org/x/tools/gopls/internal/version" +- "golang.org/x/tools/internal/event" +-) +- +-type stats struct { +- app *Application +- +- Anon bool `flag:"anon" help:"hide any fields that may contain user names, file names, or source code"` +-} +- +-func (s *stats) Name() string { return "stats" } +-func (r *stats) Parent() string { return r.app.Name() } +-func (s *stats) Usage() string { return "" } +-func (s *stats) ShortHelp() string { return "print workspace statistics" } +- +-func (s *stats) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Load the workspace for the current directory, and output a JSON summary of +-workspace information relevant to performance. As a side effect, this command +-populates the gopls file cache for the current workspace. +- +-By default, this command may include output that refers to the location or +-content of user code. When the -anon flag is set, fields that may refer to user +-code are hidden. +- +-Example: +- $ gopls stats -anon +-`) +- printFlagDefaults(f) +-} +- +-func (s *stats) Run(ctx context.Context, args ...string) error { +- if s.app.Remote != "" { +- // stats does not work with -remote. +- // Other sessions on the daemon may interfere with results. +- // Additionally, the type assertions in below only work if progress +- // notifications bypass jsonrpc2 serialization. +- return fmt.Errorf("the stats subcommand does not work with -remote") +- } +- +- if !s.app.Verbose { +- event.SetExporter(nil) // don't log errors to stderr +- } +- +- stats := GoplsStats{ +- GOOS: runtime.GOOS, +- GOARCH: runtime.GOARCH, +- GOPLSCACHE: os.Getenv("GOPLSCACHE"), +- GoVersion: runtime.Version(), +- GoplsVersion: versionpkg.Version(), +- GOPACKAGESDRIVER: os.Getenv("GOPACKAGESDRIVER"), +- } +- +- opts := s.app.options +- s.app.options = func(o *settings.Options) { +- if opts != nil { +- opts(o) +- } +- o.VerboseWorkDoneProgress = true +- } +- +- // do executes a timed section of the stats command. +- do := func(name string, f func() error) (time.Duration, error) { +- start := time.Now() +- fmt.Fprintf(os.Stderr, "%-30s", name+"...") +- if err := f(); err != nil { +- return time.Since(start), err +- } +- d := time.Since(start) +- fmt.Fprintf(os.Stderr, "done (%v)\n", d) +- return d, nil +- } +- +- var cli *client +- iwlDuration, err := do("Initializing workspace", func() (err error) { +- cli, _, err = s.app.connect(ctx) +- if err != nil { +- return err +- } +- select { +- case <-cli.iwlDone: +- case <-ctx.Done(): +- return ctx.Err() +- } +- return nil +- }) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- stats.InitialWorkspaceLoadDuration = fmt.Sprint(iwlDuration) +- +- // Gather bug reports produced by any process using +- // this executable and persisted in the cache. +- do("Gathering bug reports", func() error { +- stats.CacheDir, stats.BugReports = filecache.BugReports() +- if stats.BugReports == nil { +- stats.BugReports = []bugpkg.Bug{} // non-nil for JSON +- } +- return nil +- }) +- +- if _, err := do("Querying memstats", func() error { +- memStats, err := executeCommand(ctx, cli.server, &protocol.Command{ +- Command: command.MemStats.String(), +- }) +- if err != nil { +- return err +- } +- stats.MemStats = memStats.(command.MemStatsResult) +- return nil +- }); err != nil { +- return err +- } +- +- if _, err := do("Querying workspace stats", func() error { +- wsStats, err := executeCommand(ctx, cli.server, &protocol.Command{ +- Command: command.WorkspaceStats.String(), +- }) +- if err != nil { +- return err +- } +- stats.WorkspaceStats = wsStats.(command.WorkspaceStatsResult) +- return nil +- }); err != nil { +- return err +- } +- +- if _, err := do("Collecting directory info", func() error { +- var err error +- stats.DirStats, err = findDirStats() +- if err != nil { +- return err +- } +- return nil +- }); err != nil { +- return err +- } +- +- // Filter JSON output to fields that are consistent with s.Anon. +- okFields := make(map[string]any) +- { +- v := reflect.ValueOf(stats) +- t := v.Type() +- for i := 0; i < t.NumField(); i++ { +- f := t.Field(i) +- if !token.IsExported(f.Name) { +- continue +- } +- vf := v.FieldByName(f.Name) +- if s.Anon && f.Tag.Get("anon") != "ok" && !vf.IsZero() { +- // Fields that can be served with -anon must be explicitly marked as OK. +- // But, if it's zero value, it's ok to print. +- continue +- } +- okFields[f.Name] = vf.Interface() +- } +- } +- data, err := json.MarshalIndent(okFields, "", " ") +- if err != nil { +- return err +- } +- +- os.Stdout.Write(data) +- fmt.Println() +- return nil +-} +- +-// GoplsStats holds information extracted from a gopls session in the current +-// workspace. +-// +-// Fields that should be printed with the -anon flag should be explicitly +-// marked as `anon:"ok"`. Only fields that cannot refer to user files or code +-// should be marked as such. +-type GoplsStats struct { +- GOOS, GOARCH string `anon:"ok"` +- GOPLSCACHE string +- GoVersion string `anon:"ok"` +- GoplsVersion string `anon:"ok"` +- GOPACKAGESDRIVER string +- InitialWorkspaceLoadDuration string `anon:"ok"` // in time.Duration string form +- CacheDir string +- BugReports []bugpkg.Bug +- MemStats command.MemStatsResult `anon:"ok"` +- WorkspaceStats command.WorkspaceStatsResult `anon:"ok"` +- DirStats dirStats `anon:"ok"` +-} +- +-type dirStats struct { +- Files int +- TestdataFiles int +- GoFiles int +- ModFiles int +- Dirs int +-} +- +-// findDirStats collects information about the current directory and its +-// subdirectories. +-func findDirStats() (dirStats, error) { +- var ds dirStats +- err := filepath.WalkDir(".", func(path string, d fs.DirEntry, err error) error { +- if err != nil { +- return err +- } +- if d.IsDir() { +- ds.Dirs++ +- } else { +- ds.Files++ +- slashed := filepath.ToSlash(path) +- switch { +- case strings.Contains(slashed, "/testdata/") || strings.HasPrefix(slashed, "testdata/"): +- ds.TestdataFiles++ +- case strings.HasSuffix(path, ".go"): +- ds.GoFiles++ +- case strings.HasSuffix(path, ".mod"): +- ds.ModFiles++ +- } +- } +- return nil +- }) +- return ds, err +-} +diff -urN a/gopls/internal/cmd/subcommands.go b/gopls/internal/cmd/subcommands.go +--- a/gopls/internal/cmd/subcommands.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/subcommands.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,59 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- "text/tabwriter" +- +- "golang.org/x/tools/internal/tool" +-) +- +-// subcommands is a helper that may be embedded for commands that delegate to +-// subcommands. +-type subcommands []tool.Application +- +-func (s subcommands) DetailedHelp(f *flag.FlagSet) { +- w := tabwriter.NewWriter(f.Output(), 0, 0, 2, ' ', 0) +- defer w.Flush() +- fmt.Fprint(w, "\nSubcommand:\n") +- for _, c := range s { +- fmt.Fprintf(w, " %s\t%s\n", c.Name(), c.ShortHelp()) +- } +- printFlagDefaults(f) +-} +- +-func (s subcommands) Usage() string { return " [arg]..." } +- +-func (s subcommands) Run(ctx context.Context, args ...string) error { +- if len(args) == 0 { +- return tool.CommandLineErrorf("must provide subcommand") +- } +- command, args := args[0], args[1:] +- for _, c := range s { +- if c.Name() == command { +- s := flag.NewFlagSet(c.Name(), flag.ExitOnError) +- return tool.Run(ctx, s, c, args) +- } +- } +- return tool.CommandLineErrorf("unknown subcommand %v", command) +-} +- +-func (s subcommands) Commands() []tool.Application { return s } +- +-// getSubcommands returns the subcommands of a given Application. +-func getSubcommands(a tool.Application) []tool.Application { +- // This interface is satisfied both by tool.Applications +- // that embed subcommands, and by *cmd.Application. +- type hasCommands interface { +- Commands() []tool.Application +- } +- if sub, ok := a.(hasCommands); ok { +- return sub.Commands() +- } +- return nil +-} +diff -urN a/gopls/internal/cmd/symbols.go b/gopls/internal/cmd/symbols.go +--- a/gopls/internal/cmd/symbols.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/symbols.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,115 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "encoding/json" +- "flag" +- "fmt" +- "sort" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/tool" +-) +- +-// symbols implements the symbols verb for gopls +-type symbols struct { +- app *Application +-} +- +-func (r *symbols) Name() string { return "symbols" } +-func (r *symbols) Parent() string { return r.app.Name() } +-func (r *symbols) Usage() string { return "" } +-func (r *symbols) ShortHelp() string { return "display selected file's symbols" } +-func (r *symbols) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Example: +- $ gopls symbols helper/helper.go +-`) +- printFlagDefaults(f) +-} +-func (r *symbols) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return tool.CommandLineErrorf("symbols expects 1 argument (position)") +- } +- +- cli, _, err := r.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- from := parseSpan(args[0]) +- p := protocol.DocumentSymbolParams{ +- TextDocument: protocol.TextDocumentIdentifier{ +- URI: from.URI(), +- }, +- } +- symbols, err := cli.server.DocumentSymbol(ctx, &p) +- if err != nil { +- return err +- } +- for _, s := range symbols { +- if m, ok := s.(map[string]any); ok { +- s, err = mapToSymbol(m) +- if err != nil { +- return err +- } +- } +- switch t := s.(type) { +- case protocol.DocumentSymbol: +- printDocumentSymbol(t) +- case protocol.SymbolInformation: +- printSymbolInformation(t) +- } +- } +- return nil +-} +- +-func mapToSymbol(m map[string]any) (any, error) { +- b, err := json.Marshal(m) +- if err != nil { +- return nil, err +- } +- +- if _, ok := m["selectionRange"]; ok { +- var s protocol.DocumentSymbol +- if err := json.Unmarshal(b, &s); err != nil { +- return nil, err +- } +- return s, nil +- } +- +- var s protocol.SymbolInformation +- if err := json.Unmarshal(b, &s); err != nil { +- return nil, err +- } +- return s, nil +-} +- +-func printDocumentSymbol(s protocol.DocumentSymbol) { +- fmt.Printf("%s %s %s\n", s.Name, s.Kind, positionToString(s.SelectionRange)) +- // Sort children for consistency +- sort.Slice(s.Children, func(i, j int) bool { +- return s.Children[i].Name < s.Children[j].Name +- }) +- for _, c := range s.Children { +- fmt.Printf("\t%s %s %s\n", c.Name, c.Kind, positionToString(c.SelectionRange)) +- } +-} +- +-func printSymbolInformation(s protocol.SymbolInformation) { +- fmt.Printf("%s %s %s\n", s.Name, s.Kind, positionToString(s.Location.Range)) +-} +- +-func positionToString(r protocol.Range) string { +- return fmt.Sprintf("%v:%v-%v:%v", +- r.Start.Line+1, +- r.Start.Character+1, +- r.End.Line+1, +- r.End.Character+1, +- ) +-} +diff -urN a/gopls/internal/cmd/usage/api-json.hlp b/gopls/internal/cmd/usage/api-json.hlp +--- a/gopls/internal/cmd/usage/api-json.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/api-json.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,8 +0,0 @@ +-print JSON describing gopls API +- +-Usage: +- gopls [flags] api-json +- +-The api-json command prints a JSON value that describes +-and documents all gopls' public interfaces. +-Its schema is defined by golang.org/x/tools/gopls/internal/doc.API. +diff -urN a/gopls/internal/cmd/usage/bug.hlp b/gopls/internal/cmd/usage/bug.hlp +--- a/gopls/internal/cmd/usage/bug.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/bug.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,4 +0,0 @@ +-report a bug in gopls +- +-Usage: +- gopls [flags] bug +diff -urN a/gopls/internal/cmd/usage/call_hierarchy.hlp b/gopls/internal/cmd/usage/call_hierarchy.hlp +--- a/gopls/internal/cmd/usage/call_hierarchy.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/call_hierarchy.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,10 +0,0 @@ +-display selected identifier's call hierarchy +- +-Usage: +- gopls [flags] call_hierarchy +- +-Example: +- +- $ # 1-indexed location (:line:column or :#offset) of the target identifier +- $ gopls call_hierarchy helper/helper.go:8:6 +- $ gopls call_hierarchy helper/helper.go:#53 +diff -urN a/gopls/internal/cmd/usage/check.hlp b/gopls/internal/cmd/usage/check.hlp +--- a/gopls/internal/cmd/usage/check.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/check.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,10 +0,0 @@ +-show diagnostic results for the specified file +- +-Usage: +- gopls [flags] check +- +-Example: show the diagnostic results of this file: +- +- $ gopls check internal/cmd/check.go +- -severity=string +- minimum diagnostic severity (hint, info, warning, or error) (default "warning") +diff -urN a/gopls/internal/cmd/usage/codeaction.hlp b/gopls/internal/cmd/usage/codeaction.hlp +--- a/gopls/internal/cmd/usage/codeaction.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/codeaction.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,86 +0,0 @@ +-list or execute code actions +- +-Usage: +- gopls [flags] codeaction [codeaction-flags] filename[:line[:col]] +- +- +-The codeaction command lists or executes code actions for the +-specified file or range of a file. Each code action contains +-either an edit to be directly applied to the file, or a command +-to be executed by the server, which may have an effect such as: +-- requesting that the client apply an edit; +-- changing the state of the server; or +-- requesting that the client open a document. +- +-The -kind and and -title flags filter the list of actions. +- +-The -kind flag specifies a comma-separated list of LSP CodeAction kinds. +-Only actions of these kinds will be requested from the server. +-Valid kinds include: +- +- gopls.doc.features +- quickfix +- refactor +- refactor.extract +- refactor.extract.constant +- refactor.extract.function +- refactor.extract.method +- refactor.extract.toNewFile +- refactor.extract.variable +- refactor.inline +- refactor.inline.call +- refactor.rewrite +- refactor.rewrite.changeQuote +- refactor.rewrite.fillStruct +- refactor.rewrite.fillSwitch +- refactor.rewrite.invertIf +- refactor.rewrite.joinLines +- refactor.rewrite.removeUnusedParam +- refactor.rewrite.splitLines +- source +- source.assembly +- source.doc +- source.fixAll +- source.freesymbols +- source.organizeImports +- source.test +- +-Kinds are hierarchical, so "refactor" includes "refactor.inline". +-(Note: actions of kind "source.test" are not returned unless explicitly +-requested.) +- +-The -title flag specifies a regular expression that must match the +-action's title. (Ideally kinds would be specific enough that this +-isn't necessary; we really need to subdivide refactor.rewrite; see +-gopls/internal/settings/codeactionkind.go.) +- +-The -exec flag causes the first matching code action to be executed. +-Without the flag, the matching actions are merely listed. +- +-It is not currently possible to execute more than one action, +-as that requires a way to detect and resolve conflicts. +-TODO(adonovan): support it when golang/go#67049 is resolved. +- +-If executing an action causes the server to send a patch to the +-client, the usual -write, -preserve, -diff, and -list flags govern how +-the client deals with the patch. +- +-Example: execute the first "quick fix" in the specified file and show the diff: +- +- $ gopls codeaction -kind=quickfix -exec -diff ./gopls/main.go +- +-codeaction-flags: +- -d,-diff +- display diffs instead of edited file content +- -exec +- execute the first matching code action +- -kind=string +- comma-separated list of code action kinds to filter +- -l,-list +- display names of edited files +- -preserve +- with -write, make copies of original files +- -title=string +- regular expression to match title +- -w,-write +- write edited content to source files +diff -urN a/gopls/internal/cmd/usage/codelens.hlp b/gopls/internal/cmd/usage/codelens.hlp +--- a/gopls/internal/cmd/usage/codelens.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/codelens.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,35 +0,0 @@ +-List or execute code lenses for a file +- +-Usage: +- gopls [flags] codelens [codelens-flags] file[:line[:col]] [title] +- +-The codelens command lists or executes code lenses for the specified +-file, or line within a file. A code lens is a command associated with +-a position in the code. +- +-With an optional title argument, only code lenses matching that +-title are considered. +- +-By default, the codelens command lists the available lenses for the +-specified file or line within a file, including the title and +-title of the command. With the -exec flag, the first matching command +-is executed, and its output is printed to stdout. +- +-Example: +- +- $ gopls codelens a_test.go # list code lenses in a file +- $ gopls codelens a_test.go:10 # list code lenses on line 10 +- $ gopls codelens a_test.go "run test" # list gopls.run_tests commands +- $ gopls codelens -exec a_test.go:10 "run test" # run a specific test +- +-codelens-flags: +- -d,-diff +- display diffs instead of edited file content +- -exec +- execute the first matching code lens +- -l,-list +- display names of edited files +- -preserve +- with -write, make copies of original files +- -w,-write +- write edited content to source files +diff -urN a/gopls/internal/cmd/usage/definition.hlp b/gopls/internal/cmd/usage/definition.hlp +--- a/gopls/internal/cmd/usage/definition.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/definition.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-show declaration of selected identifier +- +-Usage: +- gopls [flags] definition [definition-flags] +- +-Example: show the definition of the identifier at syntax at offset 44 in this file (flag.FlagSet): +- +- $ gopls definition internal/cmd/definition.go:44:47 +- $ gopls definition internal/cmd/definition.go:#1270 +- +-definition-flags: +- -json +- emit output in JSON format +- -markdown +- support markdown in responses +diff -urN a/gopls/internal/cmd/usage/execute.hlp b/gopls/internal/cmd/usage/execute.hlp +--- a/gopls/internal/cmd/usage/execute.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/execute.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,28 +0,0 @@ +-Execute a gopls custom LSP command +- +-Usage: +- gopls [flags] execute [flags] command argument... +- +-The execute command sends an LSP ExecuteCommand request to gopls, +-with a set of optional JSON argument values. +-Some commands return a result, also JSON. +- +-Gopls' command set is defined by the command.Interface type; see +-https://pkg.go.dev/golang.org/x/tools/gopls/internal/protocol/command#Interface. +-It is not a stable interface: commands may change or disappear without notice. +- +-Examples: +- +- $ gopls execute gopls.add_import '{"ImportPath": "fmt", "URI": "file:///hello.go"}' +- $ gopls execute gopls.run_tests '{"URI": "file:///a_test.go", "Tests": ["Test"]}' +- $ gopls execute gopls.list_known_packages '{"URI": "file:///hello.go"}' +- +-execute-flags: +- -d,-diff +- display diffs instead of edited file content +- -l,-list +- display names of edited files +- -preserve +- with -write, make copies of original files +- -w,-write +- write edited content to source files +diff -urN a/gopls/internal/cmd/usage/fix.hlp b/gopls/internal/cmd/usage/fix.hlp +--- a/gopls/internal/cmd/usage/fix.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/fix.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,5 +0,0 @@ +-apply suggested fixes (obsolete) +- +-Usage: +- gopls [flags] fix +-No longer supported; use "gopls codeaction" instead. +\ No newline at end of file +diff -urN a/gopls/internal/cmd/usage/folding_ranges.hlp b/gopls/internal/cmd/usage/folding_ranges.hlp +--- a/gopls/internal/cmd/usage/folding_ranges.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/folding_ranges.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,8 +0,0 @@ +-display selected file's folding ranges +- +-Usage: +- gopls [flags] folding_ranges +- +-Example: +- +- $ gopls folding_ranges helper/helper.go +diff -urN a/gopls/internal/cmd/usage/format.hlp b/gopls/internal/cmd/usage/format.hlp +--- a/gopls/internal/cmd/usage/format.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/format.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-format the code according to the go standard +- +-Usage: +- gopls [flags] format [format-flags] +- +-The arguments supplied may be simple file names, or ranges within files. +- +-Example: reformat this file: +- +- $ gopls format -w internal/cmd/check.go +- +-format-flags: +- -d,-diff +- display diffs instead of edited file content +- -l,-list +- display names of edited files +- -preserve +- with -write, make copies of original files +- -w,-write +- write edited content to source files +diff -urN a/gopls/internal/cmd/usage/help.hlp b/gopls/internal/cmd/usage/help.hlp +--- a/gopls/internal/cmd/usage/help.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/help.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,10 +0,0 @@ +-print usage information for subcommands +- +-Usage: +- gopls [flags] help +- +- +-Examples: +-$ gopls help # main gopls help message +-$ gopls help remote # help on 'remote' command +-$ gopls help remote sessions # help on 'remote sessions' subcommand +diff -urN a/gopls/internal/cmd/usage/highlight.hlp b/gopls/internal/cmd/usage/highlight.hlp +--- a/gopls/internal/cmd/usage/highlight.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/highlight.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,10 +0,0 @@ +-display selected identifier's highlights +- +-Usage: +- gopls [flags] highlight +- +-Example: +- +- $ # 1-indexed location (:line:column or :#offset) of the target identifier +- $ gopls highlight helper/helper.go:8:6 +- $ gopls highlight helper/helper.go:#53 +diff -urN a/gopls/internal/cmd/usage/implementation.hlp b/gopls/internal/cmd/usage/implementation.hlp +--- a/gopls/internal/cmd/usage/implementation.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/implementation.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,10 +0,0 @@ +-display selected identifier's implementation +- +-Usage: +- gopls [flags] implementation +- +-Example: +- +- $ # 1-indexed location (:line:column or :#offset) of the target identifier +- $ gopls implementation helper/helper.go:8:6 +- $ gopls implementation helper/helper.go:#53 +diff -urN a/gopls/internal/cmd/usage/imports.hlp b/gopls/internal/cmd/usage/imports.hlp +--- a/gopls/internal/cmd/usage/imports.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/imports.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,18 +0,0 @@ +-updates import statements +- +-Usage: +- gopls [flags] imports [imports-flags] +- +-Example: update imports statements in a file: +- +- $ gopls imports -w internal/cmd/check.go +- +-imports-flags: +- -d,-diff +- display diffs instead of edited file content +- -l,-list +- display names of edited files +- -preserve +- with -write, make copies of original files +- -w,-write +- write edited content to source files +diff -urN a/gopls/internal/cmd/usage/inspect.hlp b/gopls/internal/cmd/usage/inspect.hlp +--- a/gopls/internal/cmd/usage/inspect.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/inspect.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,8 +0,0 @@ +-interact with the gopls daemon (deprecated: use 'remote') +- +-Usage: +- gopls [flags] inspect [arg]... +- +-Subcommand: +- sessions print information about current gopls sessions +- debug start the debug server +diff -urN a/gopls/internal/cmd/usage/licenses.hlp b/gopls/internal/cmd/usage/licenses.hlp +--- a/gopls/internal/cmd/usage/licenses.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/licenses.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,4 +0,0 @@ +-print licenses of included software +- +-Usage: +- gopls [flags] licenses +diff -urN a/gopls/internal/cmd/usage/links.hlp b/gopls/internal/cmd/usage/links.hlp +--- a/gopls/internal/cmd/usage/links.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/links.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,12 +0,0 @@ +-list links in a file +- +-Usage: +- gopls [flags] links [links-flags] +- +-Example: list links contained within a file: +- +- $ gopls links internal/cmd/check.go +- +-links-flags: +- -json +- emit document links in JSON format +diff -urN a/gopls/internal/cmd/usage/mcp.hlp b/gopls/internal/cmd/usage/mcp.hlp +--- a/gopls/internal/cmd/usage/mcp.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/mcp.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,19 +0,0 @@ +-start the gopls MCP server in headless mode +- +-Usage: +- gopls [flags] mcp [mcp-flags] +- +-Starts the gopls MCP server in headless mode, without needing an LSP client. +-Starts the server over stdio or sse with http, depending on whether the listen flag is provided. +- +-Examples: +- $ gopls mcp -listen=localhost:3000 +- $ gopls mcp //start over stdio +- -instructions +- if set, print gopls' MCP instructions and exit +- -listen=string +- the address on which to run the mcp server +- -logfile=string +- filename to log to; if unset, logs to stderr +- -rpc.trace +- print MCP rpc traces; cannot be used with -listen +diff -urN a/gopls/internal/cmd/usage/prepare_rename.hlp b/gopls/internal/cmd/usage/prepare_rename.hlp +--- a/gopls/internal/cmd/usage/prepare_rename.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/prepare_rename.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,10 +0,0 @@ +-test validity of a rename operation at location +- +-Usage: +- gopls [flags] prepare_rename +- +-Example: +- +- $ # 1-indexed location (:line:column or :#offset) of the target identifier +- $ gopls prepare_rename helper/helper.go:8:6 +- $ gopls prepare_rename helper/helper.go:#53 +diff -urN a/gopls/internal/cmd/usage/references.hlp b/gopls/internal/cmd/usage/references.hlp +--- a/gopls/internal/cmd/usage/references.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/references.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,14 +0,0 @@ +-display selected identifier's references +- +-Usage: +- gopls [flags] references [references-flags] +- +-Example: +- +- $ # 1-indexed location (:line:column or :#offset) of the target identifier +- $ gopls references helper/helper.go:8:6 +- $ gopls references helper/helper.go:#53 +- +-references-flags: +- -d,-declaration +- include the declaration of the specified identifier in the results +diff -urN a/gopls/internal/cmd/usage/remote.hlp b/gopls/internal/cmd/usage/remote.hlp +--- a/gopls/internal/cmd/usage/remote.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/remote.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,8 +0,0 @@ +-interact with the gopls daemon +- +-Usage: +- gopls [flags] remote [arg]... +- +-Subcommand: +- sessions print information about current gopls sessions +- debug start the debug server +diff -urN a/gopls/internal/cmd/usage/rename.hlp b/gopls/internal/cmd/usage/rename.hlp +--- a/gopls/internal/cmd/usage/rename.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/rename.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-rename selected identifier +- +-Usage: +- gopls [flags] rename [rename-flags] +- +-Example: +- +- $ # 1-based location (:line:column or :#position) of the thing to change +- $ gopls rename helper/helper.go:8:6 Foo +- $ gopls rename helper/helper.go:#53 Foo +- +-rename-flags: +- -d,-diff +- display diffs instead of edited file content +- -l,-list +- display names of edited files +- -preserve +- with -write, make copies of original files +- -w,-write +- write edited content to source files +diff -urN a/gopls/internal/cmd/usage/semtok.hlp b/gopls/internal/cmd/usage/semtok.hlp +--- a/gopls/internal/cmd/usage/semtok.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/semtok.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,8 +0,0 @@ +-show semantic tokens for the specified file +- +-Usage: +- gopls [flags] semtok +- +-Example: show the semantic tokens for this file: +- +- $ gopls semtok internal/cmd/semtok.go +diff -urN a/gopls/internal/cmd/usage/serve.hlp b/gopls/internal/cmd/usage/serve.hlp +--- a/gopls/internal/cmd/usage/serve.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/serve.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,32 +0,0 @@ +-run a server for Go code using the Language Server Protocol +- +-Usage: +- gopls [flags] serve [server-flags] +- gopls [flags] [server-flags] +- +-The server communicates using JSONRPC2 on stdin and stdout, and is intended to be run directly as +-a child of an editor process. +- +-server-flags: +- -debug=string +- serve debug information on the supplied address +- -listen=string +- address on which to listen for remote connections. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. Otherwise, TCP is used. +- -listen.timeout=duration +- when used with -listen, shut down the server when there are no connected clients for this duration +- -logfile=string +- filename to log to. if value is "auto", then logging to a default output file is enabled +- -mcp.listen=string +- experimental: address on which to listen for model context protocol connections. If port is localhost:0, pick a random port in localhost instead. +- -mode=string +- no effect +- -port=int +- port on which to run gopls for debugging purposes +- -remote.debug=string +- when used with -remote=auto, the -debug value used to start the daemon +- -remote.listen.timeout=duration +- when used with -remote=auto, the -listen.timeout value used to start the daemon (default 1m0s) +- -remote.logfile=string +- when used with -remote=auto, the -logfile value used to start the daemon +- -rpc.trace +- print the full rpc trace in lsp inspector format +diff -urN a/gopls/internal/cmd/usage/signature.hlp b/gopls/internal/cmd/usage/signature.hlp +--- a/gopls/internal/cmd/usage/signature.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/signature.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,10 +0,0 @@ +-display selected identifier's signature +- +-Usage: +- gopls [flags] signature +- +-Example: +- +- $ # 1-indexed location (:line:column or :#offset) of the target identifier +- $ gopls signature helper/helper.go:8:6 +- $ gopls signature helper/helper.go:#53 +diff -urN a/gopls/internal/cmd/usage/stats.hlp b/gopls/internal/cmd/usage/stats.hlp +--- a/gopls/internal/cmd/usage/stats.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/stats.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-print workspace statistics +- +-Usage: +- gopls [flags] stats +- +-Load the workspace for the current directory, and output a JSON summary of +-workspace information relevant to performance. As a side effect, this command +-populates the gopls file cache for the current workspace. +- +-By default, this command may include output that refers to the location or +-content of user code. When the -anon flag is set, fields that may refer to user +-code are hidden. +- +-Example: +- $ gopls stats -anon +- -anon +- hide any fields that may contain user names, file names, or source code +diff -urN a/gopls/internal/cmd/usage/symbols.hlp b/gopls/internal/cmd/usage/symbols.hlp +--- a/gopls/internal/cmd/usage/symbols.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/symbols.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,7 +0,0 @@ +-display selected file's symbols +- +-Usage: +- gopls [flags] symbols +- +-Example: +- $ gopls symbols helper/helper.go +diff -urN a/gopls/internal/cmd/usage/usage.hlp b/gopls/internal/cmd/usage/usage.hlp +--- a/gopls/internal/cmd/usage/usage.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/usage.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,89 +0,0 @@ +- +-gopls is a Go language server. +- +-It is typically used with an editor to provide language features. When no +-command is specified, gopls will default to the 'serve' command. The language +-features can also be accessed via the gopls command-line interface. +- +-For documentation of all its features, see: +- +- https://github.com/golang/tools/blob/master/gopls/doc/features +- +-Usage: +- gopls help [] +- +-Command: +- +-Main +- serve run a server for Go code using the Language Server Protocol +- version print the gopls version information +- bug report a bug in gopls +- help print usage information for subcommands +- api-json print JSON describing gopls API +- licenses print licenses of included software +- +-Features +- call_hierarchy display selected identifier's call hierarchy +- check show diagnostic results for the specified file +- codeaction list or execute code actions +- codelens List or execute code lenses for a file +- definition show declaration of selected identifier +- execute Execute a gopls custom LSP command +- fix apply suggested fixes (obsolete) +- folding_ranges display selected file's folding ranges +- format format the code according to the go standard +- mcp start the gopls MCP server in headless mode +- highlight display selected identifier's highlights +- implementation display selected identifier's implementation +- imports updates import statements +- remote interact with the gopls daemon +- inspect interact with the gopls daemon (deprecated: use 'remote') +- links list links in a file +- prepare_rename test validity of a rename operation at location +- references display selected identifier's references +- rename rename selected identifier +- semtok show semantic tokens for the specified file +- signature display selected identifier's signature +- stats print workspace statistics +- symbols display selected file's symbols +- workspace_symbol search symbols in workspace +- +-flags: +- -debug=string +- serve debug information on the supplied address +- -listen=string +- address on which to listen for remote connections. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. Otherwise, TCP is used. +- -listen.timeout=duration +- when used with -listen, shut down the server when there are no connected clients for this duration +- -logfile=string +- filename to log to. if value is "auto", then logging to a default output file is enabled +- -mcp.listen=string +- experimental: address on which to listen for model context protocol connections. If port is localhost:0, pick a random port in localhost instead. +- -mode=string +- no effect +- -port=int +- port on which to run gopls for debugging purposes +- -profile.alloc=string +- write alloc profile to this file +- -profile.block=string +- write block profile to this file +- -profile.cpu=string +- write CPU profile to this file +- -profile.mem=string +- write memory profile to this file +- -profile.trace=string +- write trace log to this file +- -remote=string +- forward all commands to a remote lsp specified by this flag. With no special prefix, this is assumed to be a TCP address. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. If 'auto', or prefixed by 'auto;', the remote address is automatically resolved based on the executing environment. +- -remote.debug=string +- when used with -remote=auto, the -debug value used to start the daemon +- -remote.listen.timeout=duration +- when used with -remote=auto, the -listen.timeout value used to start the daemon (default 1m0s) +- -remote.logfile=string +- when used with -remote=auto, the -logfile value used to start the daemon +- -rpc.trace +- print the full rpc trace in lsp inspector format +- -v,-verbose +- verbose output +- -vv,-veryverbose +- very verbose output +diff -urN a/gopls/internal/cmd/usage/usage-v.hlp b/gopls/internal/cmd/usage/usage-v.hlp +--- a/gopls/internal/cmd/usage/usage-v.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/usage-v.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,92 +0,0 @@ +- +-gopls is a Go language server. +- +-It is typically used with an editor to provide language features. When no +-command is specified, gopls will default to the 'serve' command. The language +-features can also be accessed via the gopls command-line interface. +- +-For documentation of all its features, see: +- +- https://github.com/golang/tools/blob/master/gopls/doc/features +- +-Usage: +- gopls help [] +- +-Command: +- +-Main +- serve run a server for Go code using the Language Server Protocol +- version print the gopls version information +- bug report a bug in gopls +- help print usage information for subcommands +- api-json print JSON describing gopls API +- licenses print licenses of included software +- +-Features +- call_hierarchy display selected identifier's call hierarchy +- check show diagnostic results for the specified file +- codeaction list or execute code actions +- codelens List or execute code lenses for a file +- definition show declaration of selected identifier +- execute Execute a gopls custom LSP command +- fix apply suggested fixes (obsolete) +- folding_ranges display selected file's folding ranges +- format format the code according to the go standard +- mcp start the gopls MCP server in headless mode +- highlight display selected identifier's highlights +- implementation display selected identifier's implementation +- imports updates import statements +- remote interact with the gopls daemon +- inspect interact with the gopls daemon (deprecated: use 'remote') +- links list links in a file +- prepare_rename test validity of a rename operation at location +- references display selected identifier's references +- rename rename selected identifier +- semtok show semantic tokens for the specified file +- signature display selected identifier's signature +- stats print workspace statistics +- symbols display selected file's symbols +- workspace_symbol search symbols in workspace +- +-Internal Use Only +- vulncheck run vulncheck analysis (internal-use only) +- +-flags: +- -debug=string +- serve debug information on the supplied address +- -listen=string +- address on which to listen for remote connections. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. Otherwise, TCP is used. +- -listen.timeout=duration +- when used with -listen, shut down the server when there are no connected clients for this duration +- -logfile=string +- filename to log to. if value is "auto", then logging to a default output file is enabled +- -mcp.listen=string +- experimental: address on which to listen for model context protocol connections. If port is localhost:0, pick a random port in localhost instead. +- -mode=string +- no effect +- -port=int +- port on which to run gopls for debugging purposes +- -profile.alloc=string +- write alloc profile to this file +- -profile.block=string +- write block profile to this file +- -profile.cpu=string +- write CPU profile to this file +- -profile.mem=string +- write memory profile to this file +- -profile.trace=string +- write trace log to this file +- -remote=string +- forward all commands to a remote lsp specified by this flag. With no special prefix, this is assumed to be a TCP address. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. If 'auto', or prefixed by 'auto;', the remote address is automatically resolved based on the executing environment. +- -remote.debug=string +- when used with -remote=auto, the -debug value used to start the daemon +- -remote.listen.timeout=duration +- when used with -remote=auto, the -listen.timeout value used to start the daemon (default 1m0s) +- -remote.logfile=string +- when used with -remote=auto, the -logfile value used to start the daemon +- -rpc.trace +- print the full rpc trace in lsp inspector format +- -v,-verbose +- verbose output +- -vv,-veryverbose +- very verbose output +diff -urN a/gopls/internal/cmd/usage/version.hlp b/gopls/internal/cmd/usage/version.hlp +--- a/gopls/internal/cmd/usage/version.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/version.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,6 +0,0 @@ +-print the gopls version information +- +-Usage: +- gopls [flags] version +- -json +- outputs in json format. +diff -urN a/gopls/internal/cmd/usage/vulncheck.hlp b/gopls/internal/cmd/usage/vulncheck.hlp +--- a/gopls/internal/cmd/usage/vulncheck.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/vulncheck.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,13 +0,0 @@ +-run vulncheck analysis (internal-use only) +- +-Usage: +- gopls [flags] vulncheck +- +- WARNING: this command is for internal-use only. +- +- By default, the command outputs a JSON-encoded +- golang.org/x/tools/gopls/internal/protocol/command.VulncheckResult +- message. +- Example: +- $ gopls vulncheck +- +diff -urN a/gopls/internal/cmd/usage/workspace_symbol.hlp b/gopls/internal/cmd/usage/workspace_symbol.hlp +--- a/gopls/internal/cmd/usage/workspace_symbol.hlp 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/usage/workspace_symbol.hlp 1969-12-31 18:00:00.000000000 -0600 +@@ -1,13 +0,0 @@ +-search symbols in workspace +- +-Usage: +- gopls [flags] workspace_symbol [workspace_symbol-flags] +- +-Example: +- +- $ gopls workspace_symbol -matcher fuzzy 'wsymbols' +- +-workspace_symbol-flags: +- -matcher=string +- specifies the type of matcher: fuzzy, fastfuzzy, casesensitive, or caseinsensitive. +- The default is caseinsensitive. +diff -urN a/gopls/internal/cmd/vulncheck.go b/gopls/internal/cmd/vulncheck.go +--- a/gopls/internal/cmd/vulncheck.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/vulncheck.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,47 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- "os" +- +- "golang.org/x/tools/gopls/internal/vulncheck/scan" +-) +- +-// vulncheck implements the vulncheck command. +-// TODO(hakim): hide from the public. +-type vulncheck struct { +- app *Application +-} +- +-func (v *vulncheck) Name() string { return "vulncheck" } +-func (v *vulncheck) Parent() string { return v.app.Name() } +-func (v *vulncheck) Usage() string { return "" } +-func (v *vulncheck) ShortHelp() string { +- return "run vulncheck analysis (internal-use only)" +-} +-func (v *vulncheck) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +- WARNING: this command is for internal-use only. +- +- By default, the command outputs a JSON-encoded +- golang.org/x/tools/gopls/internal/protocol/command.VulncheckResult +- message. +- Example: +- $ gopls vulncheck +- +-`) +-} +- +-func (v *vulncheck) Run(ctx context.Context, args ...string) error { +- if err := scan.Main(ctx, args...); err != nil { +- fmt.Fprintln(os.Stderr, err) +- os.Exit(1) +- } +- return nil +-} +diff -urN a/gopls/internal/cmd/workspace_symbol.go b/gopls/internal/cmd/workspace_symbol.go +--- a/gopls/internal/cmd/workspace_symbol.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/cmd/workspace_symbol.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,89 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package cmd +- +-import ( +- "context" +- "flag" +- "fmt" +- "strings" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/internal/tool" +-) +- +-// workspaceSymbol implements the workspace_symbol verb for gopls. +-type workspaceSymbol struct { +- Matcher string `flag:"matcher" help:"specifies the type of matcher: fuzzy, fastfuzzy, casesensitive, or caseinsensitive.\nThe default is caseinsensitive."` +- +- app *Application +-} +- +-func (r *workspaceSymbol) Name() string { return "workspace_symbol" } +-func (r *workspaceSymbol) Parent() string { return r.app.Name() } +-func (r *workspaceSymbol) Usage() string { return "[workspace_symbol-flags] " } +-func (r *workspaceSymbol) ShortHelp() string { return "search symbols in workspace" } +-func (r *workspaceSymbol) DetailedHelp(f *flag.FlagSet) { +- fmt.Fprint(f.Output(), ` +-Example: +- +- $ gopls workspace_symbol -matcher fuzzy 'wsymbols' +- +-workspace_symbol-flags: +-`) +- printFlagDefaults(f) +-} +- +-func (r *workspaceSymbol) Run(ctx context.Context, args ...string) error { +- if len(args) != 1 { +- return tool.CommandLineErrorf("workspace_symbol expects 1 argument") +- } +- +- opts := r.app.options +- r.app.options = func(o *settings.Options) { +- if opts != nil { +- opts(o) +- } +- switch strings.ToLower(r.Matcher) { +- case "fuzzy": +- o.SymbolMatcher = settings.SymbolFuzzy +- case "casesensitive": +- o.SymbolMatcher = settings.SymbolCaseSensitive +- case "fastfuzzy": +- o.SymbolMatcher = settings.SymbolFastFuzzy +- default: +- o.SymbolMatcher = settings.SymbolCaseInsensitive +- } +- } +- +- cli, _, err := r.app.connect(ctx) +- if err != nil { +- return err +- } +- defer cli.terminate(ctx) +- +- p := protocol.WorkspaceSymbolParams{ +- Query: args[0], +- } +- +- symbols, err := cli.server.Symbol(ctx, &p) +- if err != nil { +- return err +- } +- for _, s := range symbols { +- f, err := cli.openFile(ctx, s.Location.URI) +- if err != nil { +- return err +- } +- span, err := f.locationSpan(s.Location) +- if err != nil { +- return err +- } +- fmt.Printf("%s %s %s\n", span, s.Name, s.Kind) +- } +- +- return nil +-} +diff -urN a/gopls/internal/debug/flight.go b/gopls/internal/debug/flight.go +--- a/gopls/internal/debug/flight.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/debug/flight.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,162 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build go1.25 +- +-package debug +- +-import ( +- "bufio" +- "fmt" +- "log" +- "net/http" +- "os" +- "os/exec" +- "path/filepath" +- "runtime/trace" +- "strings" +- "sync" +- "syscall" +- "time" +-) +- +-var ( +- traceviewersMu sync.Mutex +- traceviewers []*os.Process +- +- kill = (*os.Process).Kill // windows, plan9; UNIX impl kills whole process group +- sysProcAttr syscall.SysProcAttr // UNIX configuration to create process group +-) +- +-// KillTraceViewers kills all "go tool trace" processes started by +-// /flightrecorder requests, for use in tests (see #74668). +-func KillTraceViewers() { +- traceviewersMu.Lock() +- for _, p := range traceviewers { +- kill(p) // ignore error +- } +- traceviewers = nil +- traceviewersMu.Unlock() +-} +- +-// The FlightRecorder is a global resource, so create at most one per process. +-var getRecorder = sync.OnceValues(func() (*trace.FlightRecorder, error) { +- fr := trace.NewFlightRecorder(trace.FlightRecorderConfig{ +- // half a minute is usually enough to know "what just happened?" +- MinAge: 30 * time.Second, +- }) +- if err := fr.Start(); err != nil { +- return nil, err +- } +- return fr, nil +-}) +- +-func startFlightRecorder() (http.HandlerFunc, error) { +- fr, err := getRecorder() +- if err != nil { +- return nil, err +- } +- +- // Return a handler that writes the most recent flight record, +- // starts a trace viewer server, and redirects to it. +- return func(w http.ResponseWriter, r *http.Request) { +- errorf := func(format string, args ...any) { +- msg := fmt.Sprintf(format, args...) +- http.Error(w, msg, http.StatusInternalServerError) +- } +- +- // Write the most recent flight record into a temp file. +- f, err := os.CreateTemp("", "flightrecord") +- if err != nil { +- errorf("can't create temp file for flight record: %v", err) +- return +- } +- if _, err := fr.WriteTo(f); err != nil { +- f.Close() // ignore error +- errorf("failed to write flight record: %s", err) +- return +- } +- if err := f.Close(); err != nil { +- errorf("failed to close flight record: %s", err) +- return +- } +- tracefile, err := filepath.Abs(f.Name()) +- if err != nil { +- errorf("can't absolutize name of trace file: %v", err) +- return +- } +- +- // Run 'go tool trace' to start a new trace-viewer +- // web server process. It will run until gopls terminates. +- // (It would be nicer if we could just link it in; see #66843.) +- cmd := exec.Command("go", "tool", "trace", tracefile) +- cmd.SysProcAttr = &sysProcAttr +- +- // Don't connect trace's std{out,err} to our os.Stderr directly, +- // otherwise the child may outlive the parent in tests, +- // and 'go test' will complain about unclosed pipes. +- // Instead, interpose a pipe that will close when gopls exits. +- // See CL 677262 for a better solution (a cmd/trace flag). +- // (#66843 is of course better still.) +- // Also, this notifies us of the server's readiness and URL. +- urlC := make(chan string) +- { +- r, w, err := os.Pipe() +- if err != nil { +- errorf("can't create pipe: %v", err) +- return +- } +- go func() { +- // Copy from the pipe to stderr, +- // keeping an eye out for the "listening on URL" string. +- scan := bufio.NewScanner(r) +- for scan.Scan() { +- line := scan.Text() +- if _, url, ok := strings.Cut(line, "Trace viewer is listening on "); ok { +- urlC <- url +- } +- fmt.Fprintln(os.Stderr, line) +- } +- if err := scan.Err(); err != nil { +- log.Printf("reading from pipe to cmd/trace: %v", err) +- } +- }() +- cmd.Stderr = w +- cmd.Stdout = w +- } +- +- // Suppress the usual cmd/trace behavior of opening a new +- // browser tab by setting BROWSER to /usr/bin/true (a no-op). +- cmd.Env = append(os.Environ(), "BROWSER=true") +- if err := cmd.Start(); err != nil { +- errorf("failed to start trace server: %s", err) +- return +- } +- +- // Save the process so we can kill it when tests finish. +- traceviewersMu.Lock() +- traceviewers = append(traceviewers, cmd.Process) +- traceviewersMu.Unlock() +- +- // Some of the CI builders can be quite heavily loaded. +- // Give them an extra grace period. +- timeout := 10 * time.Second +- if os.Getenv("GO_BUILDER_NAME") != "" { +- timeout = 1 * time.Minute +- } +- +- select { +- case addr := <-urlC: +- // Success! Send a redirect to the new location. +- // (This URL bypasses the help screen at /.) +- http.Redirect(w, r, addr+"/trace?view=proc", 302) +- +- case <-r.Context().Done(): +- errorf("canceled") +- +- case <-time.After(timeout): +- errorf("trace viewer failed to start within %v", timeout) +- } +- }, nil +-} +diff -urN a/gopls/internal/debug/flight_go124.go b/gopls/internal/debug/flight_go124.go +--- a/gopls/internal/debug/flight_go124.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/debug/flight_go124.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,18 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build !go1.25 +- +-package debug +- +-import ( +- "errors" +- "net/http" +-) +- +-func startFlightRecorder() (http.HandlerFunc, error) { +- return nil, errors.ErrUnsupported +-} +- +-func KillTraceViewers() {} +diff -urN a/gopls/internal/debug/flight_unix.go b/gopls/internal/debug/flight_unix.go +--- a/gopls/internal/debug/flight_unix.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/debug/flight_unix.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,23 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build go1.25 && unix +- +-package debug +- +-import ( +- "os" +- "syscall" +-) +- +-func init() { +- // UNIX: kill the whole process group, since +- // "go tool trace" starts a cmd/trace child. +- kill = killGroup +- sysProcAttr.Setpgid = true +-} +- +-func killGroup(p *os.Process) error { +- return syscall.Kill(-p.Pid, syscall.SIGKILL) +-} +diff -urN a/gopls/internal/debug/info.go b/gopls/internal/debug/info.go +--- a/gopls/internal/debug/info.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/debug/info.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,138 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package debug exports debug information for gopls. +-package debug +- +-import ( +- "bytes" +- "encoding/json" +- "fmt" +- "io" +- "os" +- "runtime" +- "runtime/debug" +- "strings" +- +- "golang.org/x/tools/gopls/internal/version" +-) +- +-type PrintMode int +- +-const ( +- PlainText = PrintMode(iota) +- Markdown +- HTML +- JSON +-) +- +-// ServerVersion is the format used by gopls to report its version to the +-// client. This format is structured so that the client can parse it easily. +-type ServerVersion struct { +- *debug.BuildInfo +- Version string +-} +- +-// VersionInfo returns the build info for the gopls process. If it was not +-// built in module mode, we return a GOPATH-specific message with the +-// hardcoded version. +-func VersionInfo() *ServerVersion { +- if info, ok := debug.ReadBuildInfo(); ok { +- return &ServerVersion{ +- Version: version.Version(), +- BuildInfo: info, +- } +- } +- return &ServerVersion{ +- Version: version.Version(), +- BuildInfo: &debug.BuildInfo{ +- Path: "gopls, built in GOPATH mode", +- GoVersion: runtime.Version(), +- }, +- } +-} +- +-// writeServerInfo writes HTML debug info to w for the instance. +-func (i *Instance) writeServerInfo(out *bytes.Buffer) { +- workDir, _ := os.Getwd() +- section(out, HTML, "server instance", func() { +- fmt.Fprintf(out, "Start time: %v\n", i.StartTime) +- fmt.Fprintf(out, "LogFile: %s\n", i.Logfile) +- fmt.Fprintf(out, "pid: %d\n", os.Getpid()) +- fmt.Fprintf(out, "Working directory: %s\n", workDir) +- fmt.Fprintf(out, "Address: %s\n", i.ServerAddress) +- fmt.Fprintf(out, "Debug address: %s\n", i.DebugAddress()) +- }) +- WriteVersionInfo(out, true, HTML) +- section(out, HTML, "Command Line", func() { +- fmt.Fprintf(out, "cmdline") +- }) +-} +- +-// WriteVersionInfo writes version information to w, using the output format +-// specified by mode. verbose controls whether additional information is +-// written, including section headers. +-func WriteVersionInfo(out *bytes.Buffer, verbose bool, mode PrintMode) { +- info := VersionInfo() +- if mode == JSON { +- writeVersionInfoJSON(out, info) +- return +- } +- +- if !verbose { +- writeBuildInfo(out, info, false, mode) +- return +- } +- section(out, mode, "Build info", func() { +- writeBuildInfo(out, info, true, mode) +- }) +-} +- +-func writeVersionInfoJSON(out *bytes.Buffer, info *ServerVersion) { +- data, err := json.MarshalIndent(info, "", "\t") +- if err != nil { +- panic(err) // can't happen +- } +- out.Write(data) +-} +- +-func section(w io.Writer, mode PrintMode, title string, body func()) { +- switch mode { +- case PlainText: +- fmt.Fprintln(w, title) +- fmt.Fprintln(w, strings.Repeat("-", len(title))) +- body() +- case Markdown: +- fmt.Fprintf(w, "#### %s\n\n```\n", title) +- body() +- fmt.Fprintf(w, "```\n") +- case HTML: +- fmt.Fprintf(w, "

%s

\n
\n", title)
+-		body()
+-		fmt.Fprint(w, "
\n") +- } +-} +- +-func writeBuildInfo(w io.Writer, info *ServerVersion, verbose bool, mode PrintMode) { +- fmt.Fprintf(w, "%v %v\n", info.Path, version.Version()) +- if !verbose { +- return +- } +- printModuleInfo(w, info.Main, mode) +- for _, dep := range info.Deps { +- printModuleInfo(w, *dep, mode) +- } +- fmt.Fprintf(w, "go: %v\n", info.GoVersion) +-} +- +-func printModuleInfo(w io.Writer, m debug.Module, _ PrintMode) { +- fmt.Fprintf(w, " %s@%s", m.Path, m.Version) +- if m.Sum != "" { +- fmt.Fprintf(w, " %s", m.Sum) +- } +- if m.Replace != nil { +- fmt.Fprintf(w, " => %v", m.Replace.Path) +- } +- fmt.Fprintf(w, "\n") +-} +diff -urN a/gopls/internal/debug/info_test.go b/gopls/internal/debug/info_test.go +--- a/gopls/internal/debug/info_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/debug/info_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,45 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package debug exports debug information for gopls. +-package debug +- +-import ( +- "bytes" +- "encoding/json" +- "runtime" +- "testing" +- +- "golang.org/x/tools/gopls/internal/version" +-) +- +-func TestPrintVersionInfoJSON(t *testing.T) { +- buf := new(bytes.Buffer) +- WriteVersionInfo(buf, true, JSON) +- res := buf.Bytes() +- +- var got ServerVersion +- if err := json.Unmarshal(res, &got); err != nil { +- t.Fatalf("unexpected output: %v\n%s", err, res) +- } +- if g, w := got.GoVersion, runtime.Version(); g != w { +- t.Errorf("go version = %v, want %v", g, w) +- } +- if g, w := got.Version, version.Version(); g != w { +- t.Errorf("gopls version = %v, want %v", g, w) +- } +- // Other fields of BuildInfo may not be available during test. +-} +- +-func TestPrintVersionInfoPlainText(t *testing.T) { +- buf := new(bytes.Buffer) +- WriteVersionInfo(buf, true, PlainText) +- res := buf.Bytes() +- +- // Other fields of BuildInfo may not be available during test. +- wantGoplsVersion, wantGoVersion := version.Version(), runtime.Version() +- if !bytes.Contains(res, []byte(wantGoplsVersion)) || !bytes.Contains(res, []byte(wantGoVersion)) { +- t.Errorf("plaintext output = %q,\nwant (version: %v, go: %v)", res, wantGoplsVersion, wantGoVersion) +- } +-} +diff -urN a/gopls/internal/debug/log/log.go b/gopls/internal/debug/log/log.go +--- a/gopls/internal/debug/log/log.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/debug/log/log.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,43 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package log provides helper methods for exporting log events to the +-// internal/event package. +-package log +- +-import ( +- "context" +- "fmt" +- +- label1 "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/event/label" +-) +- +-// Level parameterizes log severity. +-type Level int +- +-const ( +- _ Level = iota +- Error +- Warning +- Info +- Debug +- Trace +-) +- +-// Log exports a log event labeled with level l. +-func (l Level) Log(ctx context.Context, msg string) { +- event.Log(ctx, msg, label1.Level.Of(int(l))) +-} +- +-// Logf formats and exports a log event labeled with level l. +-func (l Level) Logf(ctx context.Context, format string, args ...any) { +- l.Log(ctx, fmt.Sprintf(format, args...)) +-} +- +-// LabeledLevel extracts the labeled log l +-func LabeledLevel(lm label.Map) Level { +- return Level(label1.Level.Get(lm)) +-} +diff -urN a/gopls/internal/debug/metrics.go b/gopls/internal/debug/metrics.go +--- a/gopls/internal/debug/metrics.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/debug/metrics.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,58 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package debug +- +-import ( +- "golang.org/x/tools/internal/event/export/metric" +- "golang.org/x/tools/internal/event/label" +- "golang.org/x/tools/internal/jsonrpc2" +-) +- +-var ( +- // the distributions we use for histograms +- bytesDistribution = []int64{1 << 10, 1 << 11, 1 << 12, 1 << 14, 1 << 16, 1 << 20} +- millisecondsDistribution = []float64{0.1, 0.5, 1, 2, 5, 10, 50, 100, 500, 1000, 5000, 10000, 50000, 100000} +- +- receivedBytes = metric.HistogramInt64{ +- Name: "received_bytes", +- Description: "Distribution of received bytes, by method.", +- Keys: []label.Key{jsonrpc2.RPCDirection, jsonrpc2.Method}, +- Buckets: bytesDistribution, +- } +- +- sentBytes = metric.HistogramInt64{ +- Name: "sent_bytes", +- Description: "Distribution of sent bytes, by method.", +- Keys: []label.Key{jsonrpc2.RPCDirection, jsonrpc2.Method}, +- Buckets: bytesDistribution, +- } +- +- latency = metric.HistogramFloat64{ +- Name: "latency", +- Description: "Distribution of latency in milliseconds, by method.", +- Keys: []label.Key{jsonrpc2.RPCDirection, jsonrpc2.Method}, +- Buckets: millisecondsDistribution, +- } +- +- started = metric.Scalar{ +- Name: "started", +- Description: "Count of RPCs started by method.", +- Keys: []label.Key{jsonrpc2.RPCDirection, jsonrpc2.Method}, +- } +- +- completed = metric.Scalar{ +- Name: "completed", +- Description: "Count of RPCs completed by method and status.", +- Keys: []label.Key{jsonrpc2.RPCDirection, jsonrpc2.Method, jsonrpc2.StatusCode}, +- } +-) +- +-func registerMetrics(m *metric.Config) { +- receivedBytes.Record(m, jsonrpc2.ReceivedBytes) +- sentBytes.Record(m, jsonrpc2.SentBytes) +- latency.Record(m, jsonrpc2.Latency) +- started.Count(m, jsonrpc2.Started) +- completed.Count(m, jsonrpc2.Latency) +-} +diff -urN a/gopls/internal/debug/rpc.go b/gopls/internal/debug/rpc.go +--- a/gopls/internal/debug/rpc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/debug/rpc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,239 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package debug +- +-import ( +- "context" +- "fmt" +- "html/template" +- "net/http" +- "sort" +- "sync" +- "time" +- +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/event/core" +- "golang.org/x/tools/internal/event/export" +- "golang.org/x/tools/internal/event/label" +- "golang.org/x/tools/internal/jsonrpc2" +-) +- +-var RPCTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}RPC Information{{end}} +-{{define "body"}} +-

Inbound

+- {{template "rpcSection" .Inbound}} +-

Outbound

+- {{template "rpcSection" .Outbound}} +-{{end}} +-{{define "rpcSection"}} +- {{range .}}

+- {{.Method}} {{.Started}} traces ({{.InProgress}} in progress) +-
+- Latency {{with .Latency}}{{.Mean}} ({{.Min}}<{{.Max}}){{end}} +- By bucket 0s {{range .Latency.Values}}{{if gt .Count 0}}{{.Count}} {{.Limit}} {{end}}{{end}} +-
+- Received {{.Received}} (avg. {{.ReceivedMean}}) +- Sent {{.Sent}} (avg. {{.SentMean}}) +-
+- Result codes {{range .Codes}}{{.Key}}={{.Count}} {{end}} +-

+- {{end}} +-{{end}} +-`)) +- +-type Rpcs struct { // exported for testing +- mu sync.Mutex +- Inbound []*rpcStats // stats for incoming lsp rpcs sorted by method name +- Outbound []*rpcStats // stats for outgoing lsp rpcs sorted by method name +-} +- +-type rpcStats struct { +- Method string +- Started int64 +- Completed int64 +- +- Latency rpcTimeHistogram +- Received byteUnits +- Sent byteUnits +- Codes []*rpcCodeBucket +-} +- +-type rpcTimeHistogram struct { +- Sum timeUnits +- Count int64 +- Min timeUnits +- Max timeUnits +- Values []rpcTimeBucket +-} +- +-type rpcTimeBucket struct { +- Limit timeUnits +- Count int64 +-} +- +-type rpcCodeBucket struct { +- Key string +- Count int64 +-} +- +-func (r *Rpcs) ProcessEvent(ctx context.Context, ev core.Event, lm label.Map) context.Context { +- r.mu.Lock() +- defer r.mu.Unlock() +- switch { +- case event.IsStart(ev): +- if _, stats := r.getRPCSpan(ctx); stats != nil { +- stats.Started++ +- } +- case event.IsEnd(ev): +- span, stats := r.getRPCSpan(ctx) +- if stats != nil { +- endRPC(span, stats) +- } +- case event.IsMetric(ev): +- sent := byteUnits(jsonrpc2.SentBytes.Get(lm)) +- rec := byteUnits(jsonrpc2.ReceivedBytes.Get(lm)) +- if sent != 0 || rec != 0 { +- if _, stats := r.getRPCSpan(ctx); stats != nil { +- stats.Sent += sent +- stats.Received += rec +- } +- } +- } +- return ctx +-} +- +-func endRPC(span *export.Span, stats *rpcStats) { +- // update the basic counts +- stats.Completed++ +- +- // get and record the status code +- if status := getStatusCode(span); status != "" { +- var b *rpcCodeBucket +- for c, entry := range stats.Codes { +- if entry.Key == status { +- b = stats.Codes[c] +- break +- } +- } +- if b == nil { +- b = &rpcCodeBucket{Key: status} +- stats.Codes = append(stats.Codes, b) +- sort.Slice(stats.Codes, func(i int, j int) bool { +- return stats.Codes[i].Key < stats.Codes[j].Key +- }) +- } +- b.Count++ +- } +- +- // calculate latency if this was an rpc span +- elapsedTime := span.Finish().At().Sub(span.Start().At()) +- latencyMillis := timeUnits(elapsedTime) / timeUnits(time.Millisecond) +- if stats.Latency.Count == 0 { +- stats.Latency.Min = latencyMillis +- stats.Latency.Max = latencyMillis +- } else { +- if stats.Latency.Min > latencyMillis { +- stats.Latency.Min = latencyMillis +- } +- if stats.Latency.Max < latencyMillis { +- stats.Latency.Max = latencyMillis +- } +- } +- stats.Latency.Count++ +- stats.Latency.Sum += latencyMillis +- for i := range stats.Latency.Values { +- if stats.Latency.Values[i].Limit > latencyMillis { +- stats.Latency.Values[i].Count++ +- break +- } +- } +-} +- +-func (r *Rpcs) getRPCSpan(ctx context.Context) (*export.Span, *rpcStats) { +- // get the span +- span := export.GetSpan(ctx) +- if span == nil { +- return nil, nil +- } +- // use the span start event look up the correct stats block +- // we do this because it prevents us matching a sub span +- return span, r.getRPCStats(span.Start()) +-} +- +-func (r *Rpcs) getRPCStats(lm label.Map) *rpcStats { +- method := jsonrpc2.Method.Get(lm) +- if method == "" { +- return nil +- } +- set := &r.Inbound +- if jsonrpc2.RPCDirection.Get(lm) != jsonrpc2.Inbound { +- set = &r.Outbound +- } +- // get the record for this method +- index := sort.Search(len(*set), func(i int) bool { +- return (*set)[i].Method >= method +- }) +- +- if index < len(*set) && (*set)[index].Method == method { +- return (*set)[index] +- } +- +- old := *set +- *set = make([]*rpcStats, len(old)+1) +- copy(*set, old[:index]) +- copy((*set)[index+1:], old[index:]) +- stats := &rpcStats{Method: method} +- stats.Latency.Values = make([]rpcTimeBucket, len(millisecondsDistribution)) +- for i, m := range millisecondsDistribution { +- stats.Latency.Values[i].Limit = timeUnits(m) +- } +- (*set)[index] = stats +- return stats +-} +- +-func (s *rpcStats) InProgress() int64 { return s.Started - s.Completed } +-func (s *rpcStats) SentMean() byteUnits { return s.Sent / byteUnits(s.Started) } +-func (s *rpcStats) ReceivedMean() byteUnits { return s.Received / byteUnits(s.Started) } +- +-func (h *rpcTimeHistogram) Mean() timeUnits { return h.Sum / timeUnits(h.Count) } +- +-func getStatusCode(span *export.Span) string { +- for _, ev := range span.Events() { +- if status := jsonrpc2.StatusCode.Get(ev); status != "" { +- return status +- } +- } +- return "" +-} +- +-func (r *Rpcs) getData(req *http.Request) any { +- return r +-} +- +-func units(v float64, suffixes []string) string { +- s := "" +- for _, s = range suffixes { +- n := v / 1000 +- if n < 1 { +- break +- } +- v = n +- } +- return fmt.Sprintf("%.2f%s", v, s) +-} +- +-type timeUnits float64 +- +-func (v timeUnits) String() string { +- v = v * 1000 * 1000 +- return units(float64(v), []string{"ns", "μs", "ms", "s"}) +-} +- +-type byteUnits float64 +- +-func (v byteUnits) String() string { +- return units(float64(v), []string{"B", "KB", "MB", "GB", "TB"}) +-} +diff -urN a/gopls/internal/debug/serve.go b/gopls/internal/debug/serve.go +--- a/gopls/internal/debug/serve.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/debug/serve.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,928 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package debug +- +-import ( +- "bytes" +- "context" +- "errors" +- "fmt" +- "html/template" +- "io" +- stdlog "log" +- "net" +- "net/http" +- "net/http/pprof" +- "os" +- "path" +- "path/filepath" +- "runtime" +- "strconv" +- "strings" +- "sync" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/debug/log" +- "golang.org/x/tools/gopls/internal/file" +- label1 "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/event/core" +- "golang.org/x/tools/internal/event/export" +- "golang.org/x/tools/internal/event/export/metric" +- "golang.org/x/tools/internal/event/export/prometheus" +- "golang.org/x/tools/internal/event/keys" +- "golang.org/x/tools/internal/event/label" +-) +- +-type contextKeyType int +- +-const ( +- instanceKey contextKeyType = iota +- traceKey +-) +- +-// An Instance holds all debug information associated with a gopls instance. +-type Instance struct { +- Logfile string +- StartTime time.Time +- ServerAddress string +- +- LogWriter io.Writer +- +- exporter event.Exporter +- +- prometheus *prometheus.Exporter +- rpcs *Rpcs +- traces *traces +- State *State +- +- serveMu sync.Mutex +- debugAddress string +- listenedDebugAddress string +-} +- +-// State holds debugging information related to the server state. +-type State struct { +- mu sync.Mutex +- clients []*Client +- servers []*Server +-} +- +-func (st *State) Bugs() []bug.Bug { +- return bug.List() +-} +- +-// Caches returns the set of Cache objects currently being served. +-func (st *State) Caches() []*cache.Cache { +- var caches []*cache.Cache +- seen := make(map[string]struct{}) +- for _, client := range st.Clients() { +- cache := client.Session.Cache() +- if _, found := seen[cache.ID()]; found { +- continue +- } +- seen[cache.ID()] = struct{}{} +- caches = append(caches, cache) +- } +- return caches +-} +- +-// Cache returns the Cache that matches the supplied id. +-func (st *State) Cache(id string) *cache.Cache { +- for _, c := range st.Caches() { +- if c.ID() == id { +- return c +- } +- } +- return nil +-} +- +-// Analysis returns the global Analysis template value. +-func (st *State) Analysis() (_ analysisTmpl) { return } +- +-type analysisTmpl struct{} +- +-func (analysisTmpl) AnalyzerRunTimes() []cache.LabelDuration { return cache.AnalyzerRunTimes() } +- +-// Sessions returns the set of Session objects currently being served. +-func (st *State) Sessions() []*cache.Session { +- var sessions []*cache.Session +- for _, client := range st.Clients() { +- sessions = append(sessions, client.Session) +- } +- return sessions +-} +- +-// Session returns the Session that matches the supplied id. +-func (st *State) Session(id string) *cache.Session { +- for _, s := range st.Sessions() { +- if s.ID() == id { +- return s +- } +- } +- return nil +-} +- +-// Views returns the set of View objects currently being served. +-func (st *State) Views() []*cache.View { +- var views []*cache.View +- for _, s := range st.Sessions() { +- views = append(views, s.Views()...) +- } +- return views +-} +- +-// View returns the View that matches the supplied id. +-func (st *State) View(id string) *cache.View { +- for _, s := range st.Sessions() { +- if v, err := s.View(id); err == nil { +- return v +- } +- } +- return nil // not found +-} +- +-// Clients returns the set of Clients currently being served. +-func (st *State) Clients() []*Client { +- st.mu.Lock() +- defer st.mu.Unlock() +- clients := make([]*Client, len(st.clients)) +- copy(clients, st.clients) +- return clients +-} +- +-// Client returns the Client matching the supplied id. +-func (st *State) Client(id string) *Client { +- for _, c := range st.Clients() { +- if c.Session.ID() == id { +- return c +- } +- } +- return nil +-} +- +-// Servers returns the set of Servers the instance is currently connected to. +-func (st *State) Servers() []*Server { +- st.mu.Lock() +- defer st.mu.Unlock() +- servers := make([]*Server, len(st.servers)) +- copy(servers, st.servers) +- return servers +-} +- +-// A Client is an incoming connection from a remote client. +-type Client struct { +- Session *cache.Session +- DebugAddress string +- Logfile string +- GoplsPath string +- ServerID string +- Service protocol.Server +-} +- +-// A Server is an outgoing connection to a remote LSP server. +-type Server struct { +- ID string +- DebugAddress string +- Logfile string +- GoplsPath string +- ClientID string +-} +- +-// addClient adds a client to the set being served. +-func (st *State) addClient(session *cache.Session) { +- st.mu.Lock() +- defer st.mu.Unlock() +- st.clients = append(st.clients, &Client{Session: session}) +-} +- +-// dropClient removes a client from the set being served. +-func (st *State) dropClient(session *cache.Session) { +- st.mu.Lock() +- defer st.mu.Unlock() +- for i, c := range st.clients { +- if c.Session == session { +- copy(st.clients[i:], st.clients[i+1:]) +- st.clients[len(st.clients)-1] = nil +- st.clients = st.clients[:len(st.clients)-1] +- return +- } +- } +-} +- +-// updateServer updates a server to the set being queried. In practice, there should +-// be at most one remote server. +-func (st *State) updateServer(server *Server) { +- st.mu.Lock() +- defer st.mu.Unlock() +- for i, existing := range st.servers { +- if existing.ID == server.ID { +- // Replace, rather than mutate, to avoid a race. +- newServers := make([]*Server, len(st.servers)) +- copy(newServers, st.servers[:i]) +- newServers[i] = server +- copy(newServers[i+1:], st.servers[i+1:]) +- st.servers = newServers +- return +- } +- } +- st.servers = append(st.servers, server) +-} +- +-// dropServer drops a server from the set being queried. +-func (st *State) dropServer(id string) { +- st.mu.Lock() +- defer st.mu.Unlock() +- for i, s := range st.servers { +- if s.ID == id { +- copy(st.servers[i:], st.servers[i+1:]) +- st.servers[len(st.servers)-1] = nil +- st.servers = st.servers[:len(st.servers)-1] +- return +- } +- } +-} +- +-// an http.ResponseWriter that filters writes +-type filterResponse struct { +- w http.ResponseWriter +- edit func([]byte) []byte +-} +- +-func (c filterResponse) Header() http.Header { +- return c.w.Header() +-} +- +-func (c filterResponse) Write(buf []byte) (int, error) { +- ans := c.edit(buf) +- return c.w.Write(ans) +-} +- +-func (c filterResponse) WriteHeader(n int) { +- c.w.WriteHeader(n) +-} +- +-// replace annoying nuls by spaces +-func cmdline(w http.ResponseWriter, r *http.Request) { +- fake := filterResponse{ +- w: w, +- edit: func(buf []byte) []byte { +- return bytes.ReplaceAll(buf, []byte{0}, []byte{' '}) +- }, +- } +- pprof.Cmdline(fake, r) +-} +- +-func (i *Instance) getCache(r *http.Request) any { +- return i.State.Cache(path.Base(r.URL.Path)) +-} +- +-func (i *Instance) getAnalysis(r *http.Request) any { +- return i.State.Analysis() +-} +- +-func (i *Instance) getSession(r *http.Request) any { +- return i.State.Session(path.Base(r.URL.Path)) +-} +- +-func (i *Instance) getClient(r *http.Request) any { +- return i.State.Client(path.Base(r.URL.Path)) +-} +- +-func (i *Instance) getServer(r *http.Request) any { +- i.State.mu.Lock() +- defer i.State.mu.Unlock() +- id := path.Base(r.URL.Path) +- for _, s := range i.State.servers { +- if s.ID == id { +- return s +- } +- } +- return nil +-} +- +-type FileWithKind interface { +- file.Handle +- Kind() file.Kind // (overlay files only) +-} +- +-// /file/{session}/{identifier}. Returns a [FileWithKind]. +-func (i *Instance) getFile(r *http.Request) any { +- s := i.State.Session(r.PathValue("session")) +- if s == nil { +- return nil +- } +- identifier := r.PathValue("identifier") +- for _, o := range s.Overlays() { +- // TODO(adonovan): understand and document this comparison. +- if o.Identity().Hash.String() == identifier { +- return o +- } +- } +- return nil +-} +- +-// /metadata/{session}/{view}. Returns a [*metadata.Graph]. +-func (i *Instance) getMetadata(r *http.Request) any { +- session := i.State.Session(r.PathValue("session")) +- if session == nil { +- return nil +- } +- +- v, err := session.View(r.PathValue("view")) +- if err != nil { +- stdlog.Printf("/metadata: %v", err) +- return nil // no found +- } +- +- snapshot, release, err := v.Snapshot() +- if err != nil { +- stdlog.Printf("/metadata: failed to get latest snapshot: %v", err) +- return nil +- } +- defer release() +- return snapshot.MetadataGraph() +-} +- +-func (i *Instance) getInfo(r *http.Request) any { +- buf := &bytes.Buffer{} +- i.writeServerInfo(buf) +- return template.HTML(buf.String()) +-} +- +-func (i *Instance) AddService(s protocol.Server, session *cache.Session) { +- for _, c := range i.State.clients { +- if c.Session == session { +- c.Service = s +- return +- } +- } +- stdlog.Printf("unable to find a Client to add the protocol.Server to") +-} +- +-func getMemory(_ *http.Request) any { +- var m runtime.MemStats +- runtime.ReadMemStats(&m) +- return m +-} +- +-func init() { +- event.SetExporter(makeGlobalExporter(os.Stderr)) +-} +- +-func GetInstance(ctx context.Context) *Instance { +- if ctx == nil { +- return nil +- } +- v := ctx.Value(instanceKey) +- if v == nil { +- return nil +- } +- return v.(*Instance) +-} +- +-// WithInstance creates debug instance ready for use using the supplied +-// configuration and stores it in the returned context. +-func WithInstance(ctx context.Context) context.Context { +- i := &Instance{ +- StartTime: time.Now(), +- } +- i.LogWriter = os.Stderr +- i.prometheus = prometheus.New() +- i.rpcs = &Rpcs{} +- i.traces = &traces{} +- i.State = &State{} +- i.exporter = makeInstanceExporter(i) +- return context.WithValue(ctx, instanceKey, i) +-} +- +-// SetLogFile sets the logfile for use with this instance. +-func (i *Instance) SetLogFile(logfile string, isDaemon bool) (func(), error) { +- // TODO: probably a better solution for deferring closure to the caller would +- // be for the debug instance to itself be closed, but this fixes the +- // immediate bug of logs not being captured. +- closeLog := func() {} +- if logfile != "" { +- if logfile == "auto" { +- if isDaemon { +- logfile = filepath.Join(os.TempDir(), fmt.Sprintf("gopls-daemon-%d.log", os.Getpid())) +- } else { +- logfile = filepath.Join(os.TempDir(), fmt.Sprintf("gopls-%d.log", os.Getpid())) +- } +- } +- f, err := os.Create(logfile) +- if err != nil { +- return nil, fmt.Errorf("unable to create log file: %w", err) +- } +- closeLog = func() { +- defer f.Close() +- } +- stdlog.SetOutput(io.MultiWriter(os.Stderr, f)) +- i.LogWriter = f +- } +- i.Logfile = logfile +- return closeLog, nil +-} +- +-// Serve starts and runs a debug server in the background on the given addr. +-// It also logs the port the server starts on, to allow for :0 auto assigned +-// ports. +-func (i *Instance) Serve(ctx context.Context, addr string) (string, error) { +- stdlog.SetFlags(stdlog.Lshortfile) +- if addr == "" { +- return "", nil +- } +- i.serveMu.Lock() +- defer i.serveMu.Unlock() +- +- if i.listenedDebugAddress != "" { +- // Already serving. Return the bound address. +- return i.listenedDebugAddress, nil +- } +- +- i.debugAddress = addr +- listener, err := net.Listen("tcp", i.debugAddress) +- if err != nil { +- return "", err +- } +- i.listenedDebugAddress = listener.Addr().String() +- +- port := listener.Addr().(*net.TCPAddr).Port +- if strings.HasSuffix(i.debugAddress, ":0") { +- stdlog.Printf("debug server listening at http://localhost:%d", port) +- } +- event.Log(ctx, "Debug serving", label1.Port.Of(port)) +- go func() { +- mux := http.NewServeMux() +- mux.HandleFunc("/", render(MainTmpl, func(*http.Request) any { return i })) +- mux.HandleFunc("/debug/", render(DebugTmpl, nil)) +- mux.HandleFunc("/debug/pprof/", pprof.Index) +- mux.HandleFunc("/debug/pprof/cmdline", cmdline) +- mux.HandleFunc("/debug/pprof/profile", pprof.Profile) +- mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) +- mux.HandleFunc("/debug/pprof/trace", pprof.Trace) +- +- if h, err := startFlightRecorder(); err != nil { +- stdlog.Printf("failed to start flight recorder: %v", err) // e.g. go1.24 +- } else { +- mux.HandleFunc("/flightrecorder", h) +- } +- +- if i.prometheus != nil { +- mux.HandleFunc("/metrics/", i.prometheus.Serve) +- } +- if i.rpcs != nil { +- mux.HandleFunc("/rpc/", render(RPCTmpl, i.rpcs.getData)) +- } +- if i.traces != nil { +- mux.HandleFunc("/trace/", render(TraceTmpl, i.traces.getData)) +- } +- mux.HandleFunc("/analysis/", render(AnalysisTmpl, i.getAnalysis)) +- mux.HandleFunc("/cache/", render(CacheTmpl, i.getCache)) +- mux.HandleFunc("/session/", render(SessionTmpl, i.getSession)) +- mux.HandleFunc("/client/", render(ClientTmpl, i.getClient)) +- mux.HandleFunc("/server/", render(ServerTmpl, i.getServer)) +- mux.HandleFunc("/file/{session}/{identifier}", render(FileTmpl, i.getFile)) +- mux.HandleFunc("/metadata/{session}/{view}/", render(MetadataTmpl, i.getMetadata)) +- mux.HandleFunc("/info", render(InfoTmpl, i.getInfo)) +- mux.HandleFunc("/memory", render(MemoryTmpl, getMemory)) +- +- // Internal debugging helpers. +- mux.HandleFunc("/gc", func(w http.ResponseWriter, r *http.Request) { +- runtime.GC() +- runtime.GC() +- runtime.GC() +- http.Redirect(w, r, "/memory", http.StatusTemporaryRedirect) +- }) +- mux.HandleFunc("/_makeabug", func(w http.ResponseWriter, r *http.Request) { +- bug.Report("bug here") +- http.Error(w, "made a bug", http.StatusOK) +- }) +- +- err := http.Serve(listener, mux) // always non-nil +- event.Error(ctx, "Debug server failed", err) +- }() +- return i.listenedDebugAddress, nil +-} +- +-func (i *Instance) DebugAddress() string { +- i.serveMu.Lock() +- defer i.serveMu.Unlock() +- return i.debugAddress +-} +- +-func (i *Instance) ListenedDebugAddress() string { +- i.serveMu.Lock() +- defer i.serveMu.Unlock() +- return i.listenedDebugAddress +-} +- +-func makeGlobalExporter(stderr io.Writer) event.Exporter { +- p := export.Printer{} +- var pMu sync.Mutex +- return func(ctx context.Context, ev core.Event, lm label.Map) context.Context { +- i := GetInstance(ctx) +- +- if event.IsLog(ev) { +- // Don't log context cancellation errors. +- if err := keys.Err.Get(ev); errors.Is(err, context.Canceled) { +- return ctx +- } +- // Make sure any log messages without an instance go to stderr. +- if i == nil { +- pMu.Lock() +- p.WriteEvent(stderr, ev, lm) +- pMu.Unlock() +- } +- level := log.LabeledLevel(lm) +- // Exclude trace logs from LSP logs. +- if level < log.Trace { +- ctx = protocol.LogEvent(ctx, ev, lm, messageType(level)) +- } +- } +- if i == nil { +- return ctx +- } +- return i.exporter(ctx, ev, lm) +- } +-} +- +-func messageType(l log.Level) protocol.MessageType { +- switch l { +- case log.Error: +- return protocol.Error +- case log.Warning: +- return protocol.Warning +- case log.Debug: +- return protocol.Log +- } +- return protocol.Info +-} +- +-func makeInstanceExporter(i *Instance) event.Exporter { +- exporter := func(ctx context.Context, ev core.Event, lm label.Map) context.Context { +- if i.prometheus != nil { +- ctx = i.prometheus.ProcessEvent(ctx, ev, lm) +- } +- if i.rpcs != nil { +- ctx = i.rpcs.ProcessEvent(ctx, ev, lm) +- } +- if i.traces != nil { +- ctx = i.traces.ProcessEvent(ctx, ev, lm) +- } +- if event.IsLog(ev) { +- if s := cache.KeyCreateSession.Get(ev); s != nil { +- i.State.addClient(s) +- } +- if sid := label1.NewServer.Get(ev); sid != "" { +- i.State.updateServer(&Server{ +- ID: sid, +- Logfile: label1.Logfile.Get(ev), +- DebugAddress: label1.DebugAddress.Get(ev), +- GoplsPath: label1.GoplsPath.Get(ev), +- ClientID: label1.ClientID.Get(ev), +- }) +- } +- if s := cache.KeyShutdownSession.Get(ev); s != nil { +- i.State.dropClient(s) +- } +- if sid := label1.EndServer.Get(ev); sid != "" { +- i.State.dropServer(sid) +- } +- if s := cache.KeyUpdateSession.Get(ev); s != nil { +- if c := i.State.Client(s.ID()); c != nil { +- c.DebugAddress = label1.DebugAddress.Get(ev) +- c.Logfile = label1.Logfile.Get(ev) +- c.ServerID = label1.ServerID.Get(ev) +- c.GoplsPath = label1.GoplsPath.Get(ev) +- } +- } +- } +- return ctx +- } +- // StdTrace must be above export.Spans below (by convention, export +- // middleware applies its wrapped exporter last). +- exporter = StdTrace(exporter) +- metrics := metric.Config{} +- registerMetrics(&metrics) +- exporter = metrics.Exporter(exporter) +- exporter = export.Spans(exporter) +- exporter = export.Labels(exporter) +- return exporter +-} +- +-type dataFunc func(*http.Request) any +- +-func render(tmpl *template.Template, fun dataFunc) func(http.ResponseWriter, *http.Request) { +- return func(w http.ResponseWriter, r *http.Request) { +- var data any +- if fun != nil { +- data = fun(r) +- } +- if err := tmpl.Execute(w, data); err != nil { +- event.Error(context.Background(), "", err) +- http.Error(w, err.Error(), http.StatusInternalServerError) +- } +- } +-} +- +-func commas(s string) string { +- for i := len(s); i > 3; { +- i -= 3 +- s = s[:i] + "," + s[i:] +- } +- return s +-} +- +-func fuint64(v uint64) string { +- return commas(strconv.FormatUint(v, 10)) +-} +- +-func fuint32(v uint32) string { +- return commas(strconv.FormatUint(uint64(v), 10)) +-} +- +-func fcontent(v []byte) string { +- return string(v) +-} +- +-var BaseTemplate = template.Must(template.New("").Parse(` +- +- +-{{template "title" .}} +- +-{{block "head" .}}{{end}} +- +- +-Main +-Info +-Memory +-Profiling +-Metrics +-RPC +-Trace +-Flight recorder +-Analysis +-
+-

{{template "title" .}}

+-{{block "body" .}} +-Unknown page +-{{end}} +- +- +- +-{{define "cachelink"}}Cache {{.}}{{end}} +-{{define "clientlink"}}Client {{.}}{{end}} +-{{define "serverlink"}}Server {{.}}{{end}} +-{{define "sessionlink"}}Session {{.}}{{end}} +-`)).Funcs(template.FuncMap{ +- "fuint64": fuint64, +- "fuint32": fuint32, +- "fcontent": fcontent, +- "localAddress": func(s string) string { +- // Try to translate loopback addresses to localhost, both for cosmetics and +- // because unspecified ipv6 addresses can break links on Windows. +- // +- // TODO(rfindley): In the future, it would be better not to assume the +- // server is running on localhost, and instead construct this address using +- // the remote host. +- host, port, err := net.SplitHostPort(s) +- if err != nil { +- return s +- } +- ip := net.ParseIP(host) +- if ip == nil { +- return s +- } +- if ip.IsLoopback() || ip.IsUnspecified() { +- return "localhost:" + port +- } +- return s +- }, +- // TODO(rfindley): re-enable option inspection. +- // "options": func(s *cache.Session) []sessionOption { +- // return showOptions(s.Options()) +- // }, +-}) +- +-var MainTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}Gopls server information{{end}} +-{{define "body"}} +-

Caches

+-
    {{range .State.Caches}}
  • {{template "cachelink" .ID}}
  • {{end}}
+-

Sessions

+-
    {{range .State.Sessions}}
  • {{template "sessionlink" .ID}} from {{template "cachelink" .Cache.ID}}
  • {{end}}
+-

Clients

+-
    {{range .State.Clients}}
  • {{template "clientlink" .Session.ID}}
  • {{end}}
+-

Servers

+-
    {{range .State.Servers}}
  • {{template "serverlink" .ID}}
  • {{end}}
+-

Bug reports

+-
{{range .State.Bugs}}
{{.Key}}
{{.Description}}
{{end}}
+-{{end}} +-`)) +- +-var InfoTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}Gopls version information{{end}} +-{{define "body"}} +-{{.}} +-{{end}} +-`)) +- +-var MemoryTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}Gopls memory usage{{end}} +-{{define "head"}}{{end}} +-{{define "body"}} +-
+-

Stats

+- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +-
Allocated bytes{{fuint64 .HeapAlloc}}
Total allocated bytes{{fuint64 .TotalAlloc}}
System bytes{{fuint64 .Sys}}
Heap system bytes{{fuint64 .HeapSys}}
Malloc calls{{fuint64 .Mallocs}}
Frees{{fuint64 .Frees}}
Idle heap bytes{{fuint64 .HeapIdle}}
In use bytes{{fuint64 .HeapInuse}}
Released to system bytes{{fuint64 .HeapReleased}}
Heap object count{{fuint64 .HeapObjects}}
Stack in use bytes{{fuint64 .StackInuse}}
Stack from system bytes{{fuint64 .StackSys}}
Bucket hash bytes{{fuint64 .BuckHashSys}}
GC metadata bytes{{fuint64 .GCSys}}
Off heap bytes{{fuint64 .OtherSys}}
+-

By size

+- +- +-{{range .BySize}}{{end}} +-
SizeMallocsFrees
{{fuint32 .Size}}{{fuint64 .Mallocs}}{{fuint64 .Frees}}
+-{{end}} +-`)) +- +-var DebugTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}GoPls Debug pages{{end}} +-{{define "body"}} +-Profiling +-{{end}} +-`)) +- +-var CacheTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}Cache {{.ID}}{{end}} +-{{define "body"}} +-

memoize.Store entries

+-
    {{range $k,$v := .MemStats}}
  • {{$k}} - {{$v}}
  • {{end}}
+-

File stats

+-

+-{{- $stats := .FileStats -}} +-Total: {{$stats.Total}}
+-Largest: {{$stats.Largest}}
+-Errors: {{$stats.Errs}}
+-

+-{{end}} +-`)) +- +-var AnalysisTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}Analysis{{end}} +-{{define "body"}} +-

Analyzer.Run times

+-
    {{range .AnalyzerRunTimes}}
  • {{.Duration}} {{.Label}}
  • {{end}}
+-{{end}} +-`)) +- +-var ClientTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}Client {{.Session.ID}}{{end}} +-{{define "body"}} +-Using session: {{template "sessionlink" .Session.ID}}
+-{{if .DebugAddress}}Debug this client at: {{localAddress .DebugAddress}}
{{end}} +-Logfile: {{.Logfile}}
+-Gopls Path: {{.GoplsPath}}
+-{{end}} +-`)) +- +-var ServerTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}Server {{.ID}}{{end}} +-{{define "body"}} +-{{if .DebugAddress}}Debug this server at: {{localAddress .DebugAddress}}
{{end}} +-Logfile: {{.Logfile}}
+-Gopls Path: {{.GoplsPath}}
+-{{end}} +-`)) +- +-var SessionTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}Session {{.ID}}{{end}} +-{{define "body"}} +-From: {{template "cachelink" .Cache.ID}}
+-{{- $session := . -}} +- +-

Views

+-
    {{range .Views}} +-{{- $envOverlay := .EnvOverlay -}} +-
  • ID: {{.ID}}
    +-Type: {{.Type}}
    +-Root: {{.Root}}
    +-{{- if $envOverlay}} +-Env overlay: {{$envOverlay}})
    +-{{end -}} +-Folder.Name: {{.Folder.Name}}
    +-Folder.Dir: {{.Folder.Dir}}
    +-Latest metadata
    +- +-Settings:
    +-
      +-{{range .Folder.Options.Debug}}
    • {{.}}
    • +-{{end}} +-
    +-{{end}}
+- +-

Overlays

+-{{$session := .}} +- +-{{end}} +-`)) +- +-// For /file endpoint; operand is [FileWithKind]. +-var FileTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}Overlay {{.Identity.Hash}}{{end}} +-{{define "body"}} +-{{with .}} +- URI: {{.URI}}
+- Identifier: {{.Identity.Hash}}
+- Version: {{.Version}}
+- Kind: {{.Kind}}
+-{{end}} +-

Contents

+-
{{fcontent .Content}}
+-{{end}} +-`)) +- +-// For /metadata endpoint; operand is [*metadata.Graph]. +-var MetadataTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}Metadata graph{{end}} +-{{define "body"}} +- +-

↓ Index by file

+- +-

Packages ({{len .Packages}})

+-
    +-{{range $id, $pkg := .Packages}} +-
  • {{$id}} +-{{with $pkg}} +-
      +-
    • Name: {{.Name}}
    • +-
    • PkgPath: {{printf "%q" .PkgPath}}
    • +- {{if .Module}}
    • Module: {{printf "%#v" .Module}}
    • {{end}} +- {{if .ForTest}}
    • ForTest: {{.ForTest}}
    • {{end}} +- {{if .Standalone}}
    • Standalone
    • {{end}} +- {{if .Errors}}
    • Errors: {{.Errors}}
    • {{end}} +- {{if .DepsErrors}}
    • DepsErrors: {{.DepsErrors}}
    • {{end}} +-
    • LoadDir: {{.LoadDir}}
    • +-
    • DepsByImpPath +-
        +- {{range $path, $id := .DepsByImpPath}} +-
      • {{if $id}}{{printf "%q" $path}}{{else}}⚠️ {{printf "%q" $path}} missing{{end}}
      • +- {{end}} +-
      +-
    • +- {{if .GoFiles}}
    • GoFiles:
        {{range .GoFiles}}
      • {{.}}
      • {{end}}
    • {{end}} +- {{if .CompiledGoFiles}}
    • CompiledGoFiles:
        {{range .CompiledGoFiles}}
      • {{.}}
      • {{end}}
    • {{end}} +- {{if .IgnoredFiles}}
    • IgnoredFiles:
        {{range .IgnoredFiles}}
      • {{.}}
      • {{end}}
    • {{end}} +- {{if .OtherFiles}}
    • OtherFiles:
        {{range .OtherFiles}}
      • {{.}}
      • {{end}}
    • {{end}} +- +-
    +-{{end}} +-
  • +-{{end}} +-
+- +-

Files

+-
    +-{{range $uri, $pkgs := .ForFile}}
  • {{$uri}} →{{range $pkgs}} {{.ID}}{{end}}
  • {{end}} +-
+- +-{{end}} +-`)) +diff -urN a/gopls/internal/debug/template_test.go b/gopls/internal/debug/template_test.go +--- a/gopls/internal/debug/template_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/debug/template_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,148 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package debug_test +- +-// Provide 'static type checking' of the templates. This guards against changes in various +-// gopls datastructures causing template execution to fail. The checking is done by +-// the github.com/jba/templatecheck package. Before that is run, the test checks that +-// its list of templates and their arguments corresponds to the arguments in +-// calls to render(). The test assumes that all uses of templates are done through render(). +- +-import ( +- "go/ast" +- "html/template" +- "os" +- "runtime" +- "strings" +- "testing" +- +- "github.com/jba/templatecheck" +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/debug" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- "golang.org/x/tools/internal/testenv" +-) +- +-var templates = map[string]struct { +- tmpl *template.Template +- data any // a value of the needed type +-}{ +- "MainTmpl": {debug.MainTmpl, &debug.Instance{}}, +- "DebugTmpl": {debug.DebugTmpl, nil}, +- "RPCTmpl": {debug.RPCTmpl, &debug.Rpcs{}}, +- "TraceTmpl": {debug.TraceTmpl, debug.TraceResults{}}, +- "CacheTmpl": {debug.CacheTmpl, &cache.Cache{}}, +- "SessionTmpl": {debug.SessionTmpl, &cache.Session{}}, +- "ClientTmpl": {debug.ClientTmpl, &debug.Client{}}, +- "ServerTmpl": {debug.ServerTmpl, &debug.Server{}}, +- "FileTmpl": {debug.FileTmpl, *new(debug.FileWithKind)}, +- "MetadataTmpl": {debug.MetadataTmpl, &metadata.Graph{}}, +- "InfoTmpl": {debug.InfoTmpl, "something"}, +- "MemoryTmpl": {debug.MemoryTmpl, runtime.MemStats{}}, +- "AnalysisTmpl": {debug.AnalysisTmpl, new(debug.State).Analysis()}, +-} +- +-func TestTemplates(t *testing.T) { +- testenv.NeedsGoPackages(t) +- testenv.NeedsLocalXTools(t) +- +- cfg := &packages.Config{ +- Mode: packages.NeedTypes | packages.NeedSyntax | packages.NeedTypesInfo, +- } +- cfg.Env = os.Environ() +- cfg.Env = append(cfg.Env, +- "GOPACKAGESDRIVER=off", +- "GOWORK=off", // necessary for -mod=mod below +- "GOFLAGS=-mod=mod", +- ) +- +- pkgs, err := packages.Load(cfg, "golang.org/x/tools/gopls/internal/debug") +- if err != nil { +- t.Fatal(err) +- } +- if len(pkgs) != 1 { +- t.Fatalf("expected a single package, but got %d", len(pkgs)) +- } +- p := pkgs[0] +- if len(p.Errors) != 0 { +- t.Fatalf("compiler error, e.g. %v", p.Errors[0]) +- } +- // find the calls to render in serve.go +- tree := treeOf(p, "serve.go") +- if tree == nil { +- t.Fatalf("found no syntax tree for %s", "serve.go") +- } +- renders := callsOf(tree, "render") +- if len(renders) == 0 { +- t.Fatalf("found no calls to render") +- } +- var found = make(map[string]bool) +- for _, r := range renders { +- if len(r.Args) != 2 { +- // template, func +- t.Fatalf("got %d args, expected 2", len(r.Args)) +- } +- t0, ok := p.TypesInfo.Types[r.Args[0]] +- if !ok || !t0.IsValue() || t0.Type.String() != "*html/template.Template" { +- t.Fatalf("no type info for template") +- } +- if id, ok := r.Args[0].(*ast.Ident); !ok { +- t.Errorf("expected *ast.Ident, got %T", r.Args[0]) +- } else { +- found[id.Name] = true +- } +- } +- // make sure found and templates have the same templates +- for k := range found { +- if _, ok := templates[k]; !ok { +- t.Errorf("code has template %s, but test does not", k) +- } +- } +- for k := range templates { +- if _, ok := found[k]; !ok { +- t.Errorf("test has template %s, code does not", k) +- } +- } +- // now check all the known templates, in alphabetic order, for determinacy +- for k, v := range moremaps.Sorted(templates) { +- // the FuncMap is an annoyance; should not be necessary +- if err := templatecheck.CheckHTML(v.tmpl, v.data); err != nil { +- t.Errorf("%s: %v", k, err) +- continue +- } +- t.Logf("%s ok", k) +- } +-} +- +-func callsOf(tree *ast.File, name string) []*ast.CallExpr { +- var ans []*ast.CallExpr +- f := func(n ast.Node) bool { +- x, ok := n.(*ast.CallExpr) +- if !ok { +- return true +- } +- if y, ok := x.Fun.(*ast.Ident); ok { +- if y.Name == name { +- ans = append(ans, x) +- } +- } +- return true +- } +- ast.Inspect(tree, f) +- return ans +-} +- +-func treeOf(p *packages.Package, fname string) *ast.File { +- for _, tree := range p.Syntax { +- loc := tree.Package +- pos := p.Fset.PositionFor(loc, false) +- if strings.HasSuffix(pos.Filename, fname) { +- return tree +- } +- } +- return nil +-} +diff -urN a/gopls/internal/debug/trace.go b/gopls/internal/debug/trace.go +--- a/gopls/internal/debug/trace.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/debug/trace.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,321 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package debug +- +-import ( +- "bytes" +- "context" +- "fmt" +- "html/template" +- "net/http" +- "runtime/trace" +- "slices" +- "sort" +- "strings" +- "sync" +- "time" +- +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/event/core" +- "golang.org/x/tools/internal/event/export" +- "golang.org/x/tools/internal/event/label" +-) +- +-// TraceTmpl extends BaseTemplate and renders a TraceResults, e.g. from getData(). +-var TraceTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(` +-{{define "title"}}Trace Information{{end}} +-{{define "body"}} +- {{range .Traces}}{{.Name}} last: {{.Last.Duration}}, longest: {{.Longest.Duration}}
{{end}} +- {{if .Selected}} +-

{{.Selected.Name}}

+- {{if .Selected.Last}}

Last

    {{template "completeSpan" .Selected.Last}}
{{end}} +- {{if .Selected.Longest}}

Longest

    {{template "completeSpan" .Selected.Longest}}
{{end}} +- {{end}} +- +-

Recent spans (oldest first)

+-

+- A finite number of recent span start/end times are shown below. +- The nesting represents the children of a parent span (and the log events within a span). +- A span may appear twice: chronologically at toplevel, and nested within its parent. +-

+-
    {{range .Recent}}{{template "spanStartEnd" .}}{{end}}
+-{{end}} +-{{define "spanStartEnd"}} +- {{if .Start}} +-
  • {{.Span.Header .Start}}
  • +- {{else}} +- {{template "completeSpan" .Span}} +- {{end}} +-{{end}} +-{{define "completeSpan"}} +-
  • {{.Header false}}
  • +- {{if .Events}}
      {{range .Events}}
    • {{.Header}}
    • {{end}}
    {{end}} +- {{if .ChildStartEnd}}
      {{range .ChildStartEnd}}{{template "spanStartEnd" .}}{{end}}
    {{end}} +-{{end}} +-`)) +- +-type traces struct { +- mu sync.Mutex +- sets map[string]*traceSet +- unfinished map[export.SpanContext]*traceSpan +- recent []spanStartEnd +- recentEvictions int +-} +- +-// A spanStartEnd records the start or end of a span. +-// If Start, the span may be unfinished, so some fields (e.g. Finish) +-// may be unset and others (e.g. Events) may be being actively populated. +-type spanStartEnd struct { +- Start bool +- Span *traceSpan +-} +- +-func (ev spanStartEnd) Time() time.Time { +- if ev.Start { +- return ev.Span.Start +- } else { +- return ev.Span.Finish +- } +-} +- +-// A TraceResults is the subject for the /trace HTML template. +-type TraceResults struct { // exported for testing +- Traces []*traceSet +- Selected *traceSet +- Recent []spanStartEnd +-} +- +-// A traceSet holds two representative spans of a given span name. +-type traceSet struct { +- Name string +- Last *traceSpan +- Longest *traceSpan +-} +- +-// A traceSpan holds information about a single span. +-type traceSpan struct { +- TraceID export.TraceID +- SpanID export.SpanID +- ParentID export.SpanID +- Name string +- Start time.Time +- Finish time.Time // set at end +- Duration time.Duration // set at end +- Tags string +- Events []traceEvent // set at end +- ChildStartEnd []spanStartEnd // populated while active +- +- parent *traceSpan +-} +- +-const timeFormat = "15:04:05.000" +- +-// Header renders the time, name, tags, and (if !start), +-// duration of a span start or end event. +-func (span *traceSpan) Header(start bool) string { +- if start { +- return fmt.Sprintf("%s start %s %s", +- span.Start.Format(timeFormat), span.Name, span.Tags) +- } else { +- return fmt.Sprintf("%s end %s (+%s) %s", +- span.Finish.Format(timeFormat), span.Name, span.Duration, span.Tags) +- } +-} +- +-type traceEvent struct { +- Time time.Time +- Offset time.Duration // relative to start of span +- Tags string +-} +- +-func (ev traceEvent) Header() string { +- return fmt.Sprintf("%s event (+%s) %s", ev.Time.Format(timeFormat), ev.Offset, ev.Tags) +-} +- +-func StdTrace(exporter event.Exporter) event.Exporter { +- return func(ctx context.Context, ev core.Event, lm label.Map) context.Context { +- span := export.GetSpan(ctx) +- if span == nil { +- return exporter(ctx, ev, lm) +- } +- switch { +- case event.IsStart(ev): +- if span.ParentID.IsValid() { +- region := trace.StartRegion(ctx, span.Name) +- ctx = context.WithValue(ctx, traceKey, region) +- } else { +- var task *trace.Task +- ctx, task = trace.NewTask(ctx, span.Name) +- ctx = context.WithValue(ctx, traceKey, task) +- } +- // Log the start event as it may contain useful labels. +- msg := formatEvent(ev, lm) +- trace.Log(ctx, "start", msg) +- case event.IsLog(ev): +- category := "" +- if event.IsError(ev) { +- category = "error" +- } +- msg := formatEvent(ev, lm) +- trace.Log(ctx, category, msg) +- case event.IsEnd(ev): +- if v := ctx.Value(traceKey); v != nil { +- v.(interface{ End() }).End() +- } +- } +- return exporter(ctx, ev, lm) +- } +-} +- +-func formatEvent(ev core.Event, lm label.Map) string { +- buf := &bytes.Buffer{} +- p := export.Printer{} +- p.WriteEvent(buf, ev, lm) +- return buf.String() +-} +- +-func (t *traces) ProcessEvent(ctx context.Context, ev core.Event, lm label.Map) context.Context { +- span := export.GetSpan(ctx) +- if span == nil { +- return ctx +- } +- +- switch { +- case event.IsStart(ev): +- // Just starting: add it to the unfinished map. +- // Allocate before the critical section. +- td := &traceSpan{ +- TraceID: span.ID.TraceID, +- SpanID: span.ID.SpanID, +- ParentID: span.ParentID, +- Name: span.Name, +- Start: span.Start().At(), +- Tags: renderLabels(span.Start()), +- } +- +- t.mu.Lock() +- defer t.mu.Unlock() +- +- t.addRecentLocked(td, true) // add start event +- +- if t.sets == nil { +- t.sets = make(map[string]*traceSet) +- t.unfinished = make(map[export.SpanContext]*traceSpan) +- } +- t.unfinished[span.ID] = td +- +- // Wire up parents if we have them. +- if span.ParentID.IsValid() { +- parentID := export.SpanContext{TraceID: span.ID.TraceID, SpanID: span.ParentID} +- if parent, ok := t.unfinished[parentID]; ok { +- td.parent = parent +- parent.ChildStartEnd = append(parent.ChildStartEnd, spanStartEnd{true, td}) +- } +- } +- +- case event.IsEnd(ev): +- // Finishing: must be already in the map. +- // Allocate events before the critical section. +- events := span.Events() +- tdEvents := make([]traceEvent, len(events)) +- for i, event := range events { +- tdEvents[i] = traceEvent{ +- Time: event.At(), +- Tags: renderLabels(event), +- } +- } +- +- t.mu.Lock() +- defer t.mu.Unlock() +- td, found := t.unfinished[span.ID] +- if !found { +- return ctx // if this happens we are in a bad place +- } +- delete(t.unfinished, span.ID) +- td.Finish = span.Finish().At() +- td.Duration = span.Finish().At().Sub(span.Start().At()) +- td.Events = tdEvents +- t.addRecentLocked(td, false) // add end event +- +- set, ok := t.sets[span.Name] +- if !ok { +- set = &traceSet{Name: span.Name} +- t.sets[span.Name] = set +- } +- set.Last = td +- if set.Longest == nil || set.Last.Duration > set.Longest.Duration { +- set.Longest = set.Last +- } +- if td.parent != nil { +- td.parent.ChildStartEnd = append(td.parent.ChildStartEnd, spanStartEnd{false, td}) +- } else { +- fillOffsets(td, td.Start) +- } +- } +- return ctx +-} +- +-// addRecentLocked appends a start or end event to the "recent" log, +-// evicting an old entry if necessary. +-func (t *traces) addRecentLocked(span *traceSpan, start bool) { +- t.recent = append(t.recent, spanStartEnd{Start: start, Span: span}) +- +- const maxRecent = 100 // number of log entries before eviction +- for len(t.recent) > maxRecent { +- t.recent[0] = spanStartEnd{} // aid GC +- t.recent = t.recent[1:] +- t.recentEvictions++ +- +- // Using a slice as a FIFO queue leads to unbounded growth +- // as Go's GC cannot collect the ever-growing unused prefix. +- // So, compact it periodically. +- if t.recentEvictions%maxRecent == 0 { +- t.recent = slices.Clone(t.recent) +- } +- } +-} +- +-// getData returns the TraceResults rendered by TraceTmpl for the /trace[/name] endpoint. +-func (t *traces) getData(req *http.Request) any { +- // TODO(adonovan): the HTTP request doesn't acquire the mutex +- // for t or for each span! Audit and fix. +- +- // Sort last/longest sets by name. +- traces := make([]*traceSet, 0, len(t.sets)) +- for _, set := range t.sets { +- traces = append(traces, set) +- } +- sort.Slice(traces, func(i, j int) bool { +- return traces[i].Name < traces[j].Name +- }) +- +- return TraceResults{ +- Traces: traces, +- Selected: t.sets[strings.TrimPrefix(req.URL.Path, "/trace/")], // may be nil +- Recent: t.recent, +- } +-} +- +-func fillOffsets(td *traceSpan, start time.Time) { +- for i := range td.Events { +- td.Events[i].Offset = td.Events[i].Time.Sub(start) +- } +- for _, child := range td.ChildStartEnd { +- if !child.Start { +- fillOffsets(child.Span, start) +- } +- } +-} +- +-func renderLabels(labels label.List) string { +- buf := &bytes.Buffer{} +- for index := 0; labels.Valid(index); index++ { +- // The 'start' label duplicates the span name, so discard it. +- if l := labels.Label(index); l.Valid() && l.Key().Name() != "start" { +- fmt.Fprintf(buf, "%v ", l) +- } +- } +- return buf.String() +-} +diff -urN a/gopls/internal/doc/api.go b/gopls/internal/doc/api.go +--- a/gopls/internal/doc/api.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/doc/api.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,80 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:generate go run ./generate +- +-// The doc package provides JSON metadata that documents gopls' public +-// interfaces. +-package doc +- +-import _ "embed" +- +-// JSON is a JSON encoding of value of type API. +-// The 'gopls api-json' command prints it. +-// +-//go:embed api.json +-var JSON string +- +-// API is a JSON-encodable representation of gopls' public interfaces. +-// +-// TODO(adonovan): document these data types. +-type API struct { +- Options map[string][]*Option +- Lenses []*Lens +- Analyzers []*Analyzer +- Hints []*Hint +-} +- +-type Option struct { +- Name string +- Type string // T = bool | string | int | enum | any | []T | map[T]T | time.Duration +- Doc string +- EnumKeys EnumKeys +- EnumValues []EnumValue +- Default string +- Status string +- Hierarchy string +- DeprecationMessage string +-} +- +-type EnumKeys struct { +- ValueType string +- Keys []EnumKey +-} +- +-type EnumKey struct { +- Name string // in JSON syntax (quoted) +- Doc string +- Default string +- Status string // = "" | "advanced" | "experimental" | "deprecated" +-} +- +-type EnumValue struct { +- Value string // in JSON syntax (quoted) +- Doc string // doc comment; always starts with `Value` +- Status string // = "" | "advanced" | "experimental" | "deprecated" +-} +- +-type Lens struct { +- FileType string // e.g. "Go", "go.mod" +- Lens string +- Title string +- Doc string +- Default bool +- Status string // = "" | "advanced" | "experimental" | "deprecated" +-} +- +-type Analyzer struct { +- Name string +- Doc string // from analysis.Analyzer.Doc ("title: summary\ndescription"; go/doc/comment, not Markdown) +- URL string +- Default bool +-} +- +-type Hint struct { +- Name string +- Doc string +- Default bool +- Status string // = "" | "advanced" | "experimental" | "deprecated" +-} +diff -urN a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json +--- a/gopls/internal/doc/api.json 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/doc/api.json 1969-12-31 18:00:00.000000000 -0600 +@@ -1,3748 +0,0 @@ +-{ +- "Options": { +- "User": [ +- { +- "Name": "buildFlags", +- "Type": "[]string", +- "Doc": "buildFlags is the set of flags passed on to the build system when invoked.\nIt is applied to queries like `go list`, which is used when discovering files.\nThe most common use is to set `-tags`.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "[]", +- "Status": "", +- "Hierarchy": "build", +- "DeprecationMessage": "" +- }, +- { +- "Name": "env", +- "Type": "map[string]string", +- "Doc": "env adds environment variables to external commands run by `gopls`, most notably `go list`.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "{}", +- "Status": "", +- "Hierarchy": "build", +- "DeprecationMessage": "" +- }, +- { +- "Name": "directoryFilters", +- "Type": "[]string", +- "Doc": "directoryFilters can be used to exclude unwanted directories from the\nworkspace. By default, all directories are included. Filters are an\noperator, `+` to include and `-` to exclude, followed by a path prefix\nrelative to the workspace folder. They are evaluated in order, and\nthe last filter that applies to a path controls whether it is included.\nThe path prefix can be empty, so an initial `-` excludes everything.\n\nDirectoryFilters also supports the `**` operator to match 0 or more directories.\n\nExamples:\n\nExclude node_modules at current depth: `-node_modules`\n\nExclude node_modules at any depth: `-**/node_modules`\n\nInclude only project_a: `-` (exclude everything), `+project_a`\n\nInclude only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules`\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "[\"-**/node_modules\"]", +- "Status": "", +- "Hierarchy": "build", +- "DeprecationMessage": "" +- }, +- { +- "Name": "templateExtensions", +- "Type": "[]string", +- "Doc": "templateExtensions gives the extensions of file names that are treated\nas template files. (The extension\nis the part of the file name after the final dot.)\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "[]", +- "Status": "", +- "Hierarchy": "build", +- "DeprecationMessage": "" +- }, +- { +- "Name": "memoryMode", +- "Type": "string", +- "Doc": "obsolete, no effect\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "\"\"", +- "Status": "experimental", +- "Hierarchy": "build", +- "DeprecationMessage": "" +- }, +- { +- "Name": "expandWorkspaceToModule", +- "Type": "bool", +- "Doc": "expandWorkspaceToModule determines which packages are considered\n\"workspace packages\" when the workspace is using modules.\n\nWorkspace packages affect the scope of workspace-wide operations. Notably,\ngopls diagnoses all packages considered to be part of the workspace after\nevery keystroke, so by setting \"ExpandWorkspaceToModule\" to false, and\nopening a nested workspace directory, you can reduce the amount of work\ngopls has to do to keep your workspace up to date.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "true", +- "Status": "experimental", +- "Hierarchy": "build", +- "DeprecationMessage": "" +- }, +- { +- "Name": "standaloneTags", +- "Type": "[]string", +- "Doc": "standaloneTags specifies a set of build constraints that identify\nindividual Go source files that make up the entire main package of an\nexecutable.\n\nA common example of standalone main files is the convention of using the\ndirective `//go:build ignore` to denote files that are not intended to be\nincluded in any package, for example because they are invoked directly by\nthe developer using `go run`.\n\nGopls considers a file to be a standalone main file if and only if it has\npackage name \"main\" and has a build directive of the exact form\n\"//go:build tag\" or \"// +build tag\", where tag is among the list of tags\nconfigured by this setting. Notably, if the build constraint is more\ncomplicated than a simple tag (such as the composite constraint\n`//go:build tag \u0026\u0026 go1.18`), the file is not considered to be a standalone\nmain file.\n\nThis setting is only supported when gopls is built with Go 1.16 or later.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "[\"ignore\"]", +- "Status": "", +- "Hierarchy": "build", +- "DeprecationMessage": "" +- }, +- { +- "Name": "workspaceFiles", +- "Type": "[]string", +- "Doc": "workspaceFiles configures the set of globs that match files defining the\nlogical build of the current workspace. Any on-disk changes to any files\nmatching a glob specified here will trigger a reload of the workspace.\n\nThis setting need only be customized in environments with a custom\nGOPACKAGESDRIVER.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "[]", +- "Status": "", +- "Hierarchy": "build", +- "DeprecationMessage": "" +- }, +- { +- "Name": "hoverKind", +- "Type": "enum", +- "Doc": "hoverKind controls the information that appears in the hover text.\nSingleLine is intended for use only by authors of editor plugins.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": [ +- { +- "Value": "\"FullDocumentation\"", +- "Doc": "", +- "Status": "" +- }, +- { +- "Value": "\"NoDocumentation\"", +- "Doc": "", +- "Status": "" +- }, +- { +- "Value": "\"SingleLine\"", +- "Doc": "", +- "Status": "" +- }, +- { +- "Value": "\"Structured\"", +- "Doc": "`\"Structured\"` is a misguided experimental setting that returns a JSON\nhover format. This setting should not be used, as it will be removed in a\nfuture release of gopls.\n", +- "Status": "" +- }, +- { +- "Value": "\"SynopsisDocumentation\"", +- "Doc": "", +- "Status": "" +- } +- ], +- "Default": "\"FullDocumentation\"", +- "Status": "", +- "Hierarchy": "ui.documentation", +- "DeprecationMessage": "" +- }, +- { +- "Name": "linkTarget", +- "Type": "string", +- "Doc": "linkTarget is the base URL for links to Go package\ndocumentation returned by LSP operations such as Hover and\nDocumentLinks and in the CodeDescription field of each\nDiagnostic.\n\nIt might be one of:\n\n* `\"godoc.org\"`\n* `\"pkg.go.dev\"`\n\nIf company chooses to use its own `godoc.org`, its address can be used as well.\n\nModules matching the GOPRIVATE environment variable will not have\ndocumentation links in hover.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "\"pkg.go.dev\"", +- "Status": "", +- "Hierarchy": "ui.documentation", +- "DeprecationMessage": "" +- }, +- { +- "Name": "linksInHover", +- "Type": "enum", +- "Doc": "linksInHover controls the presence of documentation links in hover markdown.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": [ +- { +- "Value": "false", +- "Doc": "false: do not show links", +- "Status": "" +- }, +- { +- "Value": "true", +- "Doc": "true: show links to the `linkTarget` domain", +- "Status": "" +- }, +- { +- "Value": "\"gopls\"", +- "Doc": "`\"gopls\"`: show links to gopls' internal documentation viewer", +- "Status": "" +- } +- ], +- "Default": "true", +- "Status": "", +- "Hierarchy": "ui.documentation", +- "DeprecationMessage": "" +- }, +- { +- "Name": "usePlaceholders", +- "Type": "bool", +- "Doc": "placeholders enables placeholders for function parameters or struct\nfields in completion responses.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "false", +- "Status": "", +- "Hierarchy": "ui.completion", +- "DeprecationMessage": "" +- }, +- { +- "Name": "completionBudget", +- "Type": "time.Duration", +- "Doc": "completionBudget is the soft latency goal for completion requests. Most\nrequests finish in a couple milliseconds, but in some cases deep\ncompletions can take much longer. As we use up our budget we\ndynamically reduce the search scope to ensure we return timely\nresults. Zero means unlimited.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "\"100ms\"", +- "Status": "debug", +- "Hierarchy": "ui.completion", +- "DeprecationMessage": "" +- }, +- { +- "Name": "matcher", +- "Type": "enum", +- "Doc": "matcher sets the algorithm that is used when calculating completion\ncandidates.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": [ +- { +- "Value": "\"CaseInsensitive\"", +- "Doc": "", +- "Status": "" +- }, +- { +- "Value": "\"CaseSensitive\"", +- "Doc": "", +- "Status": "" +- }, +- { +- "Value": "\"Fuzzy\"", +- "Doc": "", +- "Status": "" +- } +- ], +- "Default": "\"Fuzzy\"", +- "Status": "advanced", +- "Hierarchy": "ui.completion", +- "DeprecationMessage": "" +- }, +- { +- "Name": "experimentalPostfixCompletions", +- "Type": "bool", +- "Doc": "experimentalPostfixCompletions enables artificial method snippets\nsuch as \"someSlice.sort!\".\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "true", +- "Status": "experimental", +- "Hierarchy": "ui.completion", +- "DeprecationMessage": "" +- }, +- { +- "Name": "completeFunctionCalls", +- "Type": "bool", +- "Doc": "completeFunctionCalls enables function call completion.\n\nWhen completing a statement, or when a function return type matches the\nexpected of the expression being completed, completion may suggest call\nexpressions (i.e. may include parentheses).\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "true", +- "Status": "", +- "Hierarchy": "ui.completion", +- "DeprecationMessage": "" +- }, +- { +- "Name": "importShortcut", +- "Type": "enum", +- "Doc": "importShortcut specifies whether import statements should link to\ndocumentation or go to definitions.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": [ +- { +- "Value": "\"Both\"", +- "Doc": "", +- "Status": "" +- }, +- { +- "Value": "\"Definition\"", +- "Doc": "", +- "Status": "" +- }, +- { +- "Value": "\"Link\"", +- "Doc": "", +- "Status": "" +- } +- ], +- "Default": "\"Both\"", +- "Status": "", +- "Hierarchy": "ui.navigation", +- "DeprecationMessage": "" +- }, +- { +- "Name": "symbolMatcher", +- "Type": "enum", +- "Doc": "symbolMatcher sets the algorithm that is used when finding workspace symbols.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": [ +- { +- "Value": "\"CaseInsensitive\"", +- "Doc": "", +- "Status": "" +- }, +- { +- "Value": "\"CaseSensitive\"", +- "Doc": "", +- "Status": "" +- }, +- { +- "Value": "\"FastFuzzy\"", +- "Doc": "", +- "Status": "" +- }, +- { +- "Value": "\"Fuzzy\"", +- "Doc": "", +- "Status": "" +- } +- ], +- "Default": "\"FastFuzzy\"", +- "Status": "advanced", +- "Hierarchy": "ui.navigation", +- "DeprecationMessage": "" +- }, +- { +- "Name": "symbolStyle", +- "Type": "enum", +- "Doc": "symbolStyle controls how symbols are qualified in symbol responses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n \"symbolStyle\": \"Dynamic\",\n...\n}\n```\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": [ +- { +- "Value": "\"Dynamic\"", +- "Doc": "`\"Dynamic\"` uses whichever qualifier results in the highest scoring\nmatch for the given symbol query. Here a \"qualifier\" is any \"/\" or \".\"\ndelimited suffix of the fully qualified symbol. i.e. \"to/pkg.Foo.Field\" or\njust \"Foo.Field\".\n", +- "Status": "" +- }, +- { +- "Value": "\"Full\"", +- "Doc": "`\"Full\"` is fully qualified symbols, i.e.\n\"path/to/pkg.Foo.Field\".\n", +- "Status": "" +- }, +- { +- "Value": "\"Package\"", +- "Doc": "`\"Package\"` is package qualified symbols i.e.\n\"pkg.Foo.Field\".\n", +- "Status": "" +- } +- ], +- "Default": "\"Dynamic\"", +- "Status": "advanced", +- "Hierarchy": "ui.navigation", +- "DeprecationMessage": "" +- }, +- { +- "Name": "symbolScope", +- "Type": "enum", +- "Doc": "symbolScope controls which packages are searched for workspace/symbol\nrequests. When the scope is \"workspace\", gopls searches only workspace\npackages. When the scope is \"all\", gopls searches all loaded packages,\nincluding dependencies and the standard library.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": [ +- { +- "Value": "\"all\"", +- "Doc": "`\"all\"` matches symbols in any loaded package, including\ndependencies.\n", +- "Status": "" +- }, +- { +- "Value": "\"workspace\"", +- "Doc": "`\"workspace\"` matches symbols in workspace packages only.\n", +- "Status": "" +- } +- ], +- "Default": "\"all\"", +- "Status": "", +- "Hierarchy": "ui.navigation", +- "DeprecationMessage": "" +- }, +- { +- "Name": "analyses", +- "Type": "map[string]bool", +- "Doc": "analyses specify analyses that the user would like to enable or disable.\nA map of the names of analysis passes that should be enabled/disabled.\nA full list of analyzers that gopls uses can be found in\n[analyzers.md](https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md).\n\nExample Usage:\n\n```json5\n...\n\"analyses\": {\n \"unreachable\": false, // Disable the unreachable analyzer.\n \"unusedvariable\": true // Enable the unusedvariable analyzer.\n}\n...\n```\n", +- "EnumKeys": { +- "ValueType": "bool", +- "Keys": [ +- { +- "Name": "\"QF1001\"", +- "Doc": "Apply De Morgan's law\n\nAvailable since\n 2021.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"QF1002\"", +- "Doc": "Convert untagged switch to tagged switch\n\nAn untagged switch that compares a single variable against a series of\nvalues can be replaced with a tagged switch.\n\nBefore:\n\n switch {\n case x == 1 || x == 2, x == 3:\n ...\n case x == 4:\n ...\n default:\n ...\n }\n\nAfter:\n\n switch x {\n case 1, 2, 3:\n ...\n case 4:\n ...\n default:\n ...\n }\n\nAvailable since\n 2021.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"QF1003\"", +- "Doc": "Convert if/else-if chain to tagged switch\n\nA series of if/else-if checks comparing the same variable against\nvalues can be replaced with a tagged switch.\n\nBefore:\n\n if x == 1 || x == 2 {\n ...\n } else if x == 3 {\n ...\n } else {\n ...\n }\n\nAfter:\n\n switch x {\n case 1, 2:\n ...\n case 3:\n ...\n default:\n ...\n }\n\nAvailable since\n 2021.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"QF1004\"", +- "Doc": "Use strings.ReplaceAll instead of strings.Replace with n == -1\n\nAvailable since\n 2021.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"QF1005\"", +- "Doc": "Expand call to math.Pow\n\nSome uses of math.Pow can be simplified to basic multiplication.\n\nBefore:\n\n math.Pow(x, 2)\n\nAfter:\n\n x * x\n\nAvailable since\n 2021.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"QF1006\"", +- "Doc": "Lift if+break into loop condition\n\nBefore:\n\n for {\n if done {\n break\n }\n ...\n }\n\nAfter:\n\n for !done {\n ...\n }\n\nAvailable since\n 2021.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"QF1007\"", +- "Doc": "Merge conditional assignment into variable declaration\n\nBefore:\n\n x := false\n if someCondition {\n x = true\n }\n\nAfter:\n\n x := someCondition\n\nAvailable since\n 2021.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"QF1008\"", +- "Doc": "Omit embedded fields from selector expression\n\nAvailable since\n 2021.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"QF1009\"", +- "Doc": "Use time.Time.Equal instead of == operator\n\nAvailable since\n 2021.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"QF1010\"", +- "Doc": "Convert slice of bytes to string when printing it\n\nAvailable since\n 2021.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"QF1011\"", +- "Doc": "Omit redundant type from variable declaration\n\nAvailable since\n 2021.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"QF1012\"", +- "Doc": "Use fmt.Fprintf(x, ...) instead of x.Write(fmt.Sprintf(...))\n\nAvailable since\n 2022.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1000\"", +- "Doc": "Use plain channel send or receive instead of single-case select\n\nSelect statements with a single case can be replaced with a simple\nsend or receive.\n\nBefore:\n\n select {\n case x := \u003c-ch:\n fmt.Println(x)\n }\n\nAfter:\n\n x := \u003c-ch\n fmt.Println(x)\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1001\"", +- "Doc": "Replace for loop with call to copy\n\nUse copy() for copying elements from one slice to another. For\narrays of identical size, you can use simple assignment.\n\nBefore:\n\n for i, x := range src {\n dst[i] = x\n }\n\nAfter:\n\n copy(dst, src)\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1002\"", +- "Doc": "Omit comparison with boolean constant\n\nBefore:\n\n if x == true {}\n\nAfter:\n\n if x {}\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"S1003\"", +- "Doc": "Replace call to strings.Index with strings.Contains\n\nBefore:\n\n if strings.Index(x, y) != -1 {}\n\nAfter:\n\n if strings.Contains(x, y) {}\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1004\"", +- "Doc": "Replace call to bytes.Compare with bytes.Equal\n\nBefore:\n\n if bytes.Compare(x, y) == 0 {}\n\nAfter:\n\n if bytes.Equal(x, y) {}\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1005\"", +- "Doc": "Drop unnecessary use of the blank identifier\n\nIn many cases, assigning to the blank identifier is unnecessary.\n\nBefore:\n\n for _ = range s {}\n x, _ = someMap[key]\n _ = \u003c-ch\n\nAfter:\n\n for range s{}\n x = someMap[key]\n \u003c-ch\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"S1006\"", +- "Doc": "Use 'for { ... }' for infinite loops\n\nFor infinite loops, using for { ... } is the most idiomatic choice.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"S1007\"", +- "Doc": "Simplify regular expression by using raw string literal\n\nRaw string literals use backticks instead of quotation marks and do not support\nany escape sequences. This means that the backslash can be used\nfreely, without the need of escaping.\n\nSince regular expressions have their own escape sequences, raw strings\ncan improve their readability.\n\nBefore:\n\n regexp.Compile(\"\\\\A(\\\\w+) profile: total \\\\d+\\\\n\\\\z\")\n\nAfter:\n\n regexp.Compile(`\\A(\\w+) profile: total \\d+\\n\\z`)\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1008\"", +- "Doc": "Simplify returning boolean expression\n\nBefore:\n\n if \u003cexpr\u003e {\n return true\n }\n return false\n\nAfter:\n\n return \u003cexpr\u003e\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"S1009\"", +- "Doc": "Omit redundant nil check on slices, maps, and channels\n\nThe len function is defined for all slices, maps, and\nchannels, even nil ones, which have a length of zero. It is not necessary to\ncheck for nil before checking that their length is not zero.\n\nBefore:\n\n if x != nil \u0026\u0026 len(x) != 0 {}\n\nAfter:\n\n if len(x) != 0 {}\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1010\"", +- "Doc": "Omit default slice index\n\nWhen slicing, the second index defaults to the length of the value,\nmaking s[n:len(s)] and s[n:] equivalent.\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1011\"", +- "Doc": "Use a single append to concatenate two slices\n\nBefore:\n\n for _, e := range y {\n x = append(x, e)\n }\n \n for i := range y {\n x = append(x, y[i])\n }\n \n for i := range y {\n v := y[i]\n x = append(x, v)\n }\n\nAfter:\n\n x = append(x, y...)\n x = append(x, y...)\n x = append(x, y...)\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"S1012\"", +- "Doc": "Replace time.Now().Sub(x) with time.Since(x)\n\nThe time.Since helper has the same effect as using time.Now().Sub(x)\nbut is easier to read.\n\nBefore:\n\n time.Now().Sub(x)\n\nAfter:\n\n time.Since(x)\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1016\"", +- "Doc": "Use a type conversion instead of manually copying struct fields\n\nTwo struct types with identical fields can be converted between each\nother. In older versions of Go, the fields had to have identical\nstruct tags. Since Go 1.8, however, struct tags are ignored during\nconversions. It is thus not necessary to manually copy every field\nindividually.\n\nBefore:\n\n var x T1\n y := T2{\n Field1: x.Field1,\n Field2: x.Field2,\n }\n\nAfter:\n\n var x T1\n y := T2(x)\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"S1017\"", +- "Doc": "Replace manual trimming with strings.TrimPrefix\n\nInstead of using strings.HasPrefix and manual slicing, use the\nstrings.TrimPrefix function. If the string doesn't start with the\nprefix, the original string will be returned. Using strings.TrimPrefix\nreduces complexity, and avoids common bugs, such as off-by-one\nmistakes.\n\nBefore:\n\n if strings.HasPrefix(str, prefix) {\n str = str[len(prefix):]\n }\n\nAfter:\n\n str = strings.TrimPrefix(str, prefix)\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1018\"", +- "Doc": "Use 'copy' for sliding elements\n\ncopy() permits using the same source and destination slice, even with\noverlapping ranges. This makes it ideal for sliding elements in a\nslice.\n\nBefore:\n\n for i := 0; i \u003c n; i++ {\n bs[i] = bs[offset+i]\n }\n\nAfter:\n\n copy(bs[:n], bs[offset:])\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1019\"", +- "Doc": "Simplify 'make' call by omitting redundant arguments\n\nThe 'make' function has default values for the length and capacity\narguments. For channels, the length defaults to zero, and for slices,\nthe capacity defaults to the length.\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1020\"", +- "Doc": "Omit redundant nil check in type assertion\n\nBefore:\n\n if _, ok := i.(T); ok \u0026\u0026 i != nil {}\n\nAfter:\n\n if _, ok := i.(T); ok {}\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1021\"", +- "Doc": "Merge variable declaration and assignment\n\nBefore:\n\n var x uint\n x = 1\n\nAfter:\n\n var x uint = 1\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"S1023\"", +- "Doc": "Omit redundant control flow\n\nFunctions that have no return value do not need a return statement as\nthe final statement of the function.\n\nSwitches in Go do not have automatic fallthrough, unlike languages\nlike C. It is not necessary to have a break statement as the final\nstatement in a case block.\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1024\"", +- "Doc": "Replace x.Sub(time.Now()) with time.Until(x)\n\nThe time.Until helper has the same effect as using x.Sub(time.Now())\nbut is easier to read.\n\nBefore:\n\n x.Sub(time.Now())\n\nAfter:\n\n time.Until(x)\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1025\"", +- "Doc": "Don't use fmt.Sprintf(\"%s\", x) unnecessarily\n\nIn many instances, there are easier and more efficient ways of getting\na value's string representation. Whenever a value's underlying type is\na string already, or the type has a String method, they should be used\ndirectly.\n\nGiven the following shared definitions\n\n type T1 string\n type T2 int\n\n func (T2) String() string { return \"Hello, world\" }\n\n var x string\n var y T1\n var z T2\n\nwe can simplify\n\n fmt.Sprintf(\"%s\", x)\n fmt.Sprintf(\"%s\", y)\n fmt.Sprintf(\"%s\", z)\n\nto\n\n x\n string(y)\n z.String()\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"S1028\"", +- "Doc": "Simplify error construction with fmt.Errorf\n\nBefore:\n\n errors.New(fmt.Sprintf(...))\n\nAfter:\n\n fmt.Errorf(...)\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1029\"", +- "Doc": "Range over the string directly\n\nRanging over a string will yield byte offsets and runes. If the offset\nisn't used, this is functionally equivalent to converting the string\nto a slice of runes and ranging over that. Ranging directly over the\nstring will be more performant, however, as it avoids allocating a new\nslice, the size of which depends on the length of the string.\n\nBefore:\n\n for _, r := range []rune(s) {}\n\nAfter:\n\n for _, r := range s {}\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"S1030\"", +- "Doc": "Use bytes.Buffer.String or bytes.Buffer.Bytes\n\nbytes.Buffer has both a String and a Bytes method. It is almost never\nnecessary to use string(buf.Bytes()) or []byte(buf.String()) – simply\nuse the other method.\n\nThe only exception to this are map lookups. Due to a compiler optimization,\nm[string(buf.Bytes())] is more efficient than m[buf.String()].\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1031\"", +- "Doc": "Omit redundant nil check around loop\n\nYou can use range on nil slices and maps, the loop will simply never\nexecute. This makes an additional nil check around the loop\nunnecessary.\n\nBefore:\n\n if s != nil {\n for _, x := range s {\n ...\n }\n }\n\nAfter:\n\n for _, x := range s {\n ...\n }\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1032\"", +- "Doc": "Use sort.Ints(x), sort.Float64s(x), and sort.Strings(x)\n\nThe sort.Ints, sort.Float64s and sort.Strings functions are easier to\nread than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x))\nand sort.Sort(sort.StringSlice(x)).\n\nBefore:\n\n sort.Sort(sort.StringSlice(x))\n\nAfter:\n\n sort.Strings(x)\n\nAvailable since\n 2019.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1033\"", +- "Doc": "Unnecessary guard around call to 'delete'\n\nCalling delete on a nil map is a no-op.\n\nAvailable since\n 2019.2\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1034\"", +- "Doc": "Use result of type assertion to simplify cases\n\nAvailable since\n 2019.2\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1035\"", +- "Doc": "Redundant call to net/http.CanonicalHeaderKey in method call on net/http.Header\n\nThe methods on net/http.Header, namely Add, Del, Get\nand Set, already canonicalize the given header name.\n\nAvailable since\n 2020.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1036\"", +- "Doc": "Unnecessary guard around map access\n\nWhen accessing a map key that doesn't exist yet, one receives a zero\nvalue. Often, the zero value is a suitable value, for example when\nusing append or doing integer math.\n\nThe following\n\n if _, ok := m[\"foo\"]; ok {\n m[\"foo\"] = append(m[\"foo\"], \"bar\")\n } else {\n m[\"foo\"] = []string{\"bar\"}\n }\n\ncan be simplified to\n\n m[\"foo\"] = append(m[\"foo\"], \"bar\")\n\nand\n\n if _, ok := m2[\"k\"]; ok {\n m2[\"k\"] += 4\n } else {\n m2[\"k\"] = 4\n }\n\ncan be simplified to\n\n m[\"k\"] += 4\n\nAvailable since\n 2020.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1037\"", +- "Doc": "Elaborate way of sleeping\n\nUsing a select statement with a single case receiving\nfrom the result of time.After is a very elaborate way of sleeping that\ncan much simpler be expressed with a simple call to time.Sleep.\n\nAvailable since\n 2020.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1038\"", +- "Doc": "Unnecessarily complex way of printing formatted string\n\nInstead of using fmt.Print(fmt.Sprintf(...)), one can use fmt.Printf(...).\n\nAvailable since\n 2020.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1039\"", +- "Doc": "Unnecessary use of fmt.Sprint\n\nCalling fmt.Sprint with a single string argument is unnecessary\nand identical to using the string directly.\n\nAvailable since\n 2020.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"S1040\"", +- "Doc": "Type assertion to current type\n\nThe type assertion x.(SomeInterface), when x already has type\nSomeInterface, can only fail if x is nil. Usually, this is\nleft-over code from when x had a different type and you can safely\ndelete the type assertion. If you want to check that x is not nil,\nconsider being explicit and using an actual if x == nil comparison\ninstead of relying on the type assertion panicking.\n\nAvailable since\n 2021.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA1000\"", +- "Doc": "Invalid regular expression\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1001\"", +- "Doc": "Invalid template\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA1002\"", +- "Doc": "Invalid format in time.Parse\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1003\"", +- "Doc": "Unsupported argument to functions in encoding/binary\n\nThe encoding/binary package can only serialize types with known sizes.\nThis precludes the use of the int and uint types, as their sizes\ndiffer on different architectures. Furthermore, it doesn't support\nserializing maps, channels, strings, or functions.\n\nBefore Go 1.8, bool wasn't supported, either.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1004\"", +- "Doc": "Suspiciously small untyped constant in time.Sleep\n\nThe time.Sleep function takes a time.Duration as its only argument.\nDurations are expressed in nanoseconds. Thus, calling time.Sleep(1)\nwill sleep for 1 nanosecond. This is a common source of bugs, as sleep\nfunctions in other languages often accept seconds or milliseconds.\n\nThe time package provides constants such as time.Second to express\nlarge durations. These can be combined with arithmetic to express\narbitrary durations, for example 5 * time.Second for 5 seconds.\n\nIf you truly meant to sleep for a tiny amount of time, use\nn * time.Nanosecond to signal to Staticcheck that you did mean to sleep\nfor some amount of nanoseconds.\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA1005\"", +- "Doc": "Invalid first argument to exec.Command\n\nos/exec runs programs directly (using variants of the fork and exec\nsystem calls on Unix systems). This shouldn't be confused with running\na command in a shell. The shell will allow for features such as input\nredirection, pipes, and general scripting. The shell is also\nresponsible for splitting the user's input into a program name and its\narguments. For example, the equivalent to\n\n ls / /tmp\n\nwould be\n\n exec.Command(\"ls\", \"/\", \"/tmp\")\n\nIf you want to run a command in a shell, consider using something like\nthe following – but be aware that not all systems, particularly\nWindows, will have a /bin/sh program:\n\n exec.Command(\"/bin/sh\", \"-c\", \"ls | grep Awesome\")\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA1007\"", +- "Doc": "Invalid URL in net/url.Parse\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1008\"", +- "Doc": "Non-canonical key in http.Header map\n\nKeys in http.Header maps are canonical, meaning they follow a specific\ncombination of uppercase and lowercase letters. Methods such as\nhttp.Header.Add and http.Header.Del convert inputs into this canonical\nform before manipulating the map.\n\nWhen manipulating http.Header maps directly, as opposed to using the\nprovided methods, care should be taken to stick to canonical form in\norder to avoid inconsistencies. The following piece of code\ndemonstrates one such inconsistency:\n\n h := http.Header{}\n h[\"etag\"] = []string{\"1234\"}\n h.Add(\"etag\", \"5678\")\n fmt.Println(h)\n\n // Output:\n // map[Etag:[5678] etag:[1234]]\n\nThe easiest way of obtaining the canonical form of a key is to use\nhttp.CanonicalHeaderKey.\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA1010\"", +- "Doc": "(*regexp.Regexp).FindAll called with n == 0, which will always return zero results\n\nIf n \u003e= 0, the function returns at most n matches/submatches. To\nreturn all results, specify a negative number.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1011\"", +- "Doc": "Various methods in the 'strings' package expect valid UTF-8, but invalid input is provided\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1012\"", +- "Doc": "A nil context.Context is being passed to a function, consider using context.TODO instead\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA1013\"", +- "Doc": "io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA1014\"", +- "Doc": "Non-pointer value passed to Unmarshal or Decode\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1015\"", +- "Doc": "Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions\n\nBefore Go 1.23, time.Tickers had to be closed to be able to be garbage\ncollected. Since time.Tick doesn't make it possible to close the underlying\nticker, using it repeatedly would leak memory.\n\nGo 1.23 fixes this by allowing tickers to be collected even if they weren't closed.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1016\"", +- "Doc": "Trapping a signal that cannot be trapped\n\nNot all signals can be intercepted by a process. Specifically, on\nUNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are\nnever passed to the process, but instead handled directly by the\nkernel. It is therefore pointless to try and handle these signals.\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA1017\"", +- "Doc": "Channels used with os/signal.Notify should be buffered\n\nThe os/signal package uses non-blocking channel sends when delivering\nsignals. If the receiving end of the channel isn't ready and the\nchannel is either unbuffered or full, the signal will be dropped. To\navoid missing signals, the channel should be buffered and of the\nappropriate size. For a channel used for notification of just one\nsignal value, a buffer of size 1 is sufficient.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1018\"", +- "Doc": "strings.Replace called with n == 0, which does nothing\n\nWith n == 0, zero instances will be replaced. To replace all\ninstances, use a negative number, or use strings.ReplaceAll.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1020\"", +- "Doc": "Using an invalid host:port pair with a net.Listen-related function\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1021\"", +- "Doc": "Using bytes.Equal to compare two net.IP\n\nA net.IP stores an IPv4 or IPv6 address as a slice of bytes. The\nlength of the slice for an IPv4 address, however, can be either 4 or\n16 bytes long, using different ways of representing IPv4 addresses. In\norder to correctly compare two net.IPs, the net.IP.Equal method should\nbe used, as it takes both representations into account.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1023\"", +- "Doc": "Modifying the buffer in an io.Writer implementation\n\nWrite must not modify the slice data, even temporarily.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1024\"", +- "Doc": "A string cutset contains duplicate characters\n\nThe strings.TrimLeft and strings.TrimRight functions take cutsets, not\nprefixes. A cutset is treated as a set of characters to remove from a\nstring. For example,\n\n strings.TrimLeft(\"42133word\", \"1234\")\n\nwill result in the string \"word\" – any characters that are 1, 2, 3 or\n4 are cut from the left of the string.\n\nIn order to remove one string from another, use strings.TrimPrefix instead.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1025\"", +- "Doc": "It is not possible to use (*time.Timer).Reset's return value correctly\n\nAvailable since\n 2019.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1026\"", +- "Doc": "Cannot marshal channels or functions\n\nAvailable since\n 2019.2\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1027\"", +- "Doc": "Atomic access to 64-bit variable must be 64-bit aligned\n\nOn ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to\narrange for 64-bit alignment of 64-bit words accessed atomically. The\nfirst word in a variable or in an allocated struct, array, or slice\ncan be relied upon to be 64-bit aligned.\n\nYou can use the structlayout tool to inspect the alignment of fields\nin a struct.\n\nAvailable since\n 2019.2\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1028\"", +- "Doc": "sort.Slice can only be used on slices\n\nThe first argument of sort.Slice must be a slice.\n\nAvailable since\n 2020.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1029\"", +- "Doc": "Inappropriate key in call to context.WithValue\n\nThe provided key must be comparable and should not be\nof type string or any other built-in type to avoid collisions between\npackages using context. Users of WithValue should define their own\ntypes for keys.\n\nTo avoid allocating when assigning to an interface{},\ncontext keys often have concrete type struct{}. Alternatively,\nexported context key variables' static type should be a pointer or\ninterface.\n\nAvailable since\n 2020.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1030\"", +- "Doc": "Invalid argument in call to a strconv function\n\nThis check validates the format, number base and bit size arguments of\nthe various parsing and formatting functions in strconv.\n\nAvailable since\n 2021.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1031\"", +- "Doc": "Overlapping byte slices passed to an encoder\n\nIn an encoding function of the form Encode(dst, src), dst and\nsrc were found to reference the same memory. This can result in\nsrc bytes being overwritten before they are read, when the encoder\nwrites more than one byte per src byte.\n\nAvailable since\n 2024.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA1032\"", +- "Doc": "Wrong order of arguments to errors.Is\n\nThe first argument of the function errors.Is is the error\nthat we have and the second argument is the error we're trying to match against.\nFor example:\n\n\tif errors.Is(err, io.EOF) { ... }\n\nThis check detects some cases where the two arguments have been swapped. It\nflags any calls where the first argument is referring to a package-level error\nvariable, such as\n\n\tif errors.Is(io.EOF, err) { /* this is wrong */ }\n\nAvailable since\n 2024.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA2001\"", +- "Doc": "Empty critical section, did you mean to defer the unlock?\n\nEmpty critical sections of the kind\n\n mu.Lock()\n mu.Unlock()\n\nare very often a typo, and the following was intended instead:\n\n mu.Lock()\n defer mu.Unlock()\n\nDo note that sometimes empty critical sections can be useful, as a\nform of signaling to wait on another goroutine. Many times, there are\nsimpler ways of achieving the same effect. When that isn't the case,\nthe code should be amply commented to avoid confusion. Combining such\ncomments with a //lint:ignore directive can be used to suppress this\nrare false positive.\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA2002\"", +- "Doc": "Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA2003\"", +- "Doc": "Deferred Lock right after locking, likely meant to defer Unlock instead\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA3000\"", +- "Doc": "TestMain doesn't call os.Exit, hiding test failures\n\nTest executables (and in turn 'go test') exit with a non-zero status\ncode if any tests failed. When specifying your own TestMain function,\nit is your responsibility to arrange for this, by calling os.Exit with\nthe correct code. The correct code is returned by (*testing.M).Run, so\nthe usual way of implementing TestMain is to end it with\nos.Exit(m.Run()).\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA3001\"", +- "Doc": "Assigning to b.N in benchmarks distorts the results\n\nThe testing package dynamically sets b.N to improve the reliability of\nbenchmarks and uses it in computations to determine the duration of a\nsingle operation. Benchmark code must not alter b.N as this would\nfalsify results.\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4000\"", +- "Doc": "Binary operator has identical expressions on both sides\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4001\"", +- "Doc": "\u0026*x gets simplified to x, it does not copy x\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4003\"", +- "Doc": "Comparing unsigned values against negative values is pointless\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4004\"", +- "Doc": "The loop exits unconditionally after one iteration\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4005\"", +- "Doc": "Field assignment that will never be observed. Did you mean to use a pointer receiver?\n\nAvailable since\n 2021.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA4006\"", +- "Doc": "A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code?\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA4008\"", +- "Doc": "The variable in the loop condition never changes, are you incrementing the wrong variable?\n\nFor example:\n\n\tfor i := 0; i \u003c 10; j++ { ... }\n\nThis may also occur when a loop can only execute once because of unconditional\ncontrol flow that terminates the loop. For example, when a loop body contains an\nunconditional break, return, or panic:\n\n\tfunc f() {\n\t\tpanic(\"oops\")\n\t}\n\tfunc g() {\n\t\tfor i := 0; i \u003c 10; i++ {\n\t\t\t// f unconditionally calls panic, which means \"i\" is\n\t\t\t// never incremented.\n\t\t\tf()\n\t\t}\n\t}\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA4009\"", +- "Doc": "A function argument is overwritten before its first use\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA4010\"", +- "Doc": "The result of append will never be observed anywhere\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA4011\"", +- "Doc": "Break statement with no effect. Did you mean to break out of an outer loop?\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4012\"", +- "Doc": "Comparing a value against NaN even though no value is equal to NaN\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA4013\"", +- "Doc": "Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo.\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4014\"", +- "Doc": "An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4015\"", +- "Doc": "Calling functions like math.Ceil on floats converted from integers doesn't do anything useful\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA4016\"", +- "Doc": "Certain bitwise operations, such as x ^ 0, do not do anything useful\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4017\"", +- "Doc": "Discarding the return values of a function without side effects, making the call pointless\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA4018\"", +- "Doc": "Self-assignment of variables\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA4019\"", +- "Doc": "Multiple, identical build constraints in the same file\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4020\"", +- "Doc": "Unreachable case clause in a type switch\n\nIn a type switch like the following\n\n type T struct{}\n func (T) Read(b []byte) (int, error) { return 0, nil }\n\n var v any = T{}\n\n switch v.(type) {\n case io.Reader:\n // ...\n case T:\n // unreachable\n }\n\nthe second case clause can never be reached because T implements\nio.Reader and case clauses are evaluated in source order.\n\nAnother example:\n\n type T struct{}\n func (T) Read(b []byte) (int, error) { return 0, nil }\n func (T) Close() error { return nil }\n\n var v any = T{}\n\n switch v.(type) {\n case io.Reader:\n // ...\n case io.ReadCloser:\n // unreachable\n }\n\nEven though T has a Close method and thus implements io.ReadCloser,\nio.Reader will always match first. The method set of io.Reader is a\nsubset of io.ReadCloser. Thus it is impossible to match the second\ncase without matching the first case.\n\n\nStructurally equivalent interfaces\n\nA special case of the previous example are structurally identical\ninterfaces. Given these declarations\n\n type T error\n type V error\n\n func doSomething() error {\n err, ok := doAnotherThing()\n if ok {\n return T(err)\n }\n\n return U(err)\n }\n\nthe following type switch will have an unreachable case clause:\n\n switch doSomething().(type) {\n case T:\n // ...\n case V:\n // unreachable\n }\n\nT will always match before V because they are structurally equivalent\nand therefore doSomething()'s return value implements both.\n\nAvailable since\n 2019.2\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4022\"", +- "Doc": "Comparing the address of a variable against nil\n\nCode such as 'if \u0026x == nil' is meaningless, because taking the address of a variable always yields a non-nil pointer.\n\nAvailable since\n 2020.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4023\"", +- "Doc": "Impossible comparison of interface value with untyped nil\n\nUnder the covers, interfaces are implemented as two elements, a\ntype T and a value V. V is a concrete value such as an int,\nstruct or pointer, never an interface itself, and has type T. For\ninstance, if we store the int value 3 in an interface, the\nresulting interface value has, schematically, (T=int, V=3). The\nvalue V is also known as the interface's dynamic value, since a\ngiven interface variable might hold different values V (and\ncorresponding types T) during the execution of the program.\n\nAn interface value is nil only if the V and T are both\nunset, (T=nil, V is not set), In particular, a nil interface will\nalways hold a nil type. If we store a nil pointer of type *int\ninside an interface value, the inner type will be *int regardless\nof the value of the pointer: (T=*int, V=nil). Such an interface\nvalue will therefore be non-nil even when the pointer value V\ninside is nil.\n\nThis situation can be confusing, and arises when a nil value is\nstored inside an interface value such as an error return:\n\n func returnsError() error {\n var p *MyError = nil\n if bad() {\n p = ErrBad\n }\n return p // Will always return a non-nil error.\n }\n\nIf all goes well, the function returns a nil p, so the return\nvalue is an error interface value holding (T=*MyError, V=nil).\nThis means that if the caller compares the returned error to nil,\nit will always look as if there was an error even if nothing bad\nhappened. To return a proper nil error to the caller, the\nfunction must return an explicit nil:\n\n func returnsError() error {\n if bad() {\n return ErrBad\n }\n return nil\n }\n\nIt's a good idea for functions that return errors always to use\nthe error type in their signature (as we did above) rather than a\nconcrete type such as *MyError, to help guarantee the error is\ncreated correctly. As an example, os.Open returns an error even\nthough, if not nil, it's always of concrete type *os.PathError.\n\nSimilar situations to those described here can arise whenever\ninterfaces are used. Just keep in mind that if any concrete value\nhas been stored in the interface, the interface will not be nil.\nFor more information, see The Laws of\nReflection at https://golang.org/doc/articles/laws_of_reflection.html.\n\nThis text has been copied from\nhttps://golang.org/doc/faq#nil_error, licensed under the Creative\nCommons Attribution 3.0 License.\n\nAvailable since\n 2020.2\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA4024\"", +- "Doc": "Checking for impossible return value from a builtin function\n\nReturn values of the len and cap builtins cannot be negative.\n\nSee https://golang.org/pkg/builtin/#len and https://golang.org/pkg/builtin/#cap.\n\nExample:\n\n if len(slice) \u003c 0 {\n fmt.Println(\"unreachable code\")\n }\n\nAvailable since\n 2021.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4025\"", +- "Doc": "Integer division of literals that results in zero\n\nWhen dividing two integer constants, the result will\nalso be an integer. Thus, a division such as 2 / 3 results in 0.\nThis is true for all of the following examples:\n\n\t_ = 2 / 3\n\tconst _ = 2 / 3\n\tconst _ float64 = 2 / 3\n\t_ = float64(2 / 3)\n\nStaticcheck will flag such divisions if both sides of the division are\ninteger literals, as it is highly unlikely that the division was\nintended to truncate to zero. Staticcheck will not flag integer\ndivision involving named constants, to avoid noisy positives.\n\nAvailable since\n 2021.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4026\"", +- "Doc": "Go constants cannot express negative zero\n\nIn IEEE 754 floating point math, zero has a sign and can be positive\nor negative. This can be useful in certain numerical code.\n\nGo constants, however, cannot express negative zero. This means that\nthe literals -0.0 and 0.0 have the same ideal value (zero) and\nwill both represent positive zero at runtime.\n\nTo explicitly and reliably create a negative zero, you can use the\nmath.Copysign function: math.Copysign(0, -1).\n\nAvailable since\n 2021.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4027\"", +- "Doc": "(*net/url.URL).Query returns a copy, modifying it doesn't change the URL\n\n(*net/url.URL).Query parses the current value of net/url.URL.RawQuery\nand returns it as a map of type net/url.Values. Subsequent changes to\nthis map will not affect the URL unless the map gets encoded and\nassigned to the URL's RawQuery.\n\nAs a consequence, the following code pattern is an expensive no-op:\nu.Query().Add(key, value).\n\nAvailable since\n 2021.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4028\"", +- "Doc": "x % 1 is always zero\n\nAvailable since\n 2022.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4029\"", +- "Doc": "Ineffective attempt at sorting slice\n\nsort.Float64Slice, sort.IntSlice, and sort.StringSlice are\ntypes, not functions. Doing x = sort.StringSlice(x) does nothing,\nespecially not sort any values. The correct usage is\nsort.Sort(sort.StringSlice(x)) or sort.StringSlice(x).Sort(),\nbut there are more convenient helpers, namely sort.Float64s,\nsort.Ints, and sort.Strings.\n\nAvailable since\n 2022.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4030\"", +- "Doc": "Ineffective attempt at generating random number\n\nFunctions in the math/rand package that accept upper limits, such\nas Intn, generate random numbers in the half-open interval [0,n). In\nother words, the generated numbers will be \u003e= 0 and \u003c n – they\ndon't include n. rand.Intn(1) therefore doesn't generate 0\nor 1, it always generates 0.\n\nAvailable since\n 2022.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA4031\"", +- "Doc": "Checking never-nil value against nil\n\nAvailable since\n 2022.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA4032\"", +- "Doc": "Comparing runtime.GOOS or runtime.GOARCH against impossible value\n\nAvailable since\n 2024.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA5000\"", +- "Doc": "Assignment to nil map\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA5001\"", +- "Doc": "Deferring Close before checking for a possible error\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA5002\"", +- "Doc": "The empty for loop ('for {}') spins and can block the scheduler\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA5003\"", +- "Doc": "Defers in infinite loops will never execute\n\nDefers are scoped to the surrounding function, not the surrounding\nblock. In a function that never returns, i.e. one containing an\ninfinite loop, defers will never execute.\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA5004\"", +- "Doc": "'for { select { ...' with an empty default branch spins\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA5005\"", +- "Doc": "The finalizer references the finalized object, preventing garbage collection\n\nA finalizer is a function associated with an object that runs when the\ngarbage collector is ready to collect said object, that is when the\nobject is no longer referenced by anything.\n\nIf the finalizer references the object, however, it will always remain\nas the final reference to that object, preventing the garbage\ncollector from collecting the object. The finalizer will never run,\nand the object will never be collected, leading to a memory leak. That\nis why the finalizer should instead use its first argument to operate\non the object. That way, the number of references can temporarily go\nto zero before the object is being passed to the finalizer.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA5007\"", +- "Doc": "Infinite recursive call\n\nA function that calls itself recursively needs to have an exit\ncondition. Otherwise it will recurse forever, until the system runs\nout of memory.\n\nThis issue can be caused by simple bugs such as forgetting to add an\nexit condition. It can also happen \"on purpose\". Some languages have\ntail call optimization which makes certain infinite recursive calls\nsafe to use. Go, however, does not implement TCO, and as such a loop\nshould be used instead.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA5008\"", +- "Doc": "Invalid struct tag\n\nAvailable since\n 2019.2\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA5010\"", +- "Doc": "Impossible type assertion\n\nSome type assertions can be statically proven to be\nimpossible. This is the case when the method sets of both\narguments of the type assertion conflict with each other, for\nexample by containing the same method with different\nsignatures.\n\nThe Go compiler already applies this check when asserting from an\ninterface value to a concrete type. If the concrete type misses\nmethods from the interface, or if function signatures don't match,\nthen the type assertion can never succeed.\n\nThis check applies the same logic when asserting from one interface to\nanother. If both interface types contain the same method but with\ndifferent signatures, then the type assertion can never succeed,\neither.\n\nAvailable since\n 2020.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA5011\"", +- "Doc": "Possible nil pointer dereference\n\nA pointer is being dereferenced unconditionally, while\nalso being checked against nil in another place. This suggests that\nthe pointer may be nil and dereferencing it may panic. This is\ncommonly a result of improperly ordered code or missing return\nstatements. Consider the following examples:\n\n func fn(x *int) {\n fmt.Println(*x)\n\n // This nil check is equally important for the previous dereference\n if x != nil {\n foo(*x)\n }\n }\n\n func TestFoo(t *testing.T) {\n x := compute()\n if x == nil {\n t.Errorf(\"nil pointer received\")\n }\n\n // t.Errorf does not abort the test, so if x is nil, the next line will panic.\n foo(*x)\n }\n\nStaticcheck tries to deduce which functions abort control flow.\nFor example, it is aware that a function will not continue\nexecution after a call to panic or log.Fatal. However, sometimes\nthis detection fails, in particular in the presence of\nconditionals. Consider the following example:\n\n func Log(msg string, level int) {\n fmt.Println(msg)\n if level == levelFatal {\n os.Exit(1)\n }\n }\n\n func Fatal(msg string) {\n Log(msg, levelFatal)\n }\n\n func fn(x *int) {\n if x == nil {\n Fatal(\"unexpected nil pointer\")\n }\n fmt.Println(*x)\n }\n\nStaticcheck will flag the dereference of x, even though it is perfectly\nsafe. Staticcheck is not able to deduce that a call to\nFatal will exit the program. For the time being, the easiest\nworkaround is to modify the definition of Fatal like so:\n\n func Fatal(msg string) {\n Log(msg, levelFatal)\n panic(\"unreachable\")\n }\n\nWe also hard-code functions from common logging packages such as\nlogrus. Please file an issue if we're missing support for a\npopular package.\n\nAvailable since\n 2020.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA5012\"", +- "Doc": "Passing odd-sized slice to function expecting even size\n\nSome functions that take slices as parameters expect the slices to have an even number of elements. \nOften, these functions treat elements in a slice as pairs. \nFor example, strings.NewReplacer takes pairs of old and new strings, \nand calling it with an odd number of elements would be an error.\n\nAvailable since\n 2020.2\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA6000\"", +- "Doc": "Using regexp.Match or related in a loop, should use regexp.Compile\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA6001\"", +- "Doc": "Missing an optimization opportunity when indexing maps by byte slices\n\nMap keys must be comparable, which precludes the use of byte slices.\nThis usually leads to using string keys and converting byte slices to\nstrings.\n\nNormally, a conversion of a byte slice to a string needs to copy the data and\ncauses allocations. The compiler, however, recognizes m[string(b)] and\nuses the data of b directly, without copying it, because it knows that\nthe data can't change during the map lookup. This leads to the\ncounter-intuitive situation that\n\n k := string(b)\n println(m[k])\n println(m[k])\n\nwill be less efficient than\n\n println(m[string(b)])\n println(m[string(b)])\n\nbecause the first version needs to copy and allocate, while the second\none does not.\n\nFor some history on this optimization, check out commit\nf5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA6002\"", +- "Doc": "Storing non-pointer values in sync.Pool allocates memory\n\nA sync.Pool is used to avoid unnecessary allocations and reduce the\namount of work the garbage collector has to do.\n\nWhen passing a value that is not a pointer to a function that accepts\nan interface, the value needs to be placed on the heap, which means an\nadditional allocation. Slices are a common thing to put in sync.Pools,\nand they're structs with 3 fields (length, capacity, and a pointer to\nan array). In order to avoid the extra allocation, one should store a\npointer to the slice instead.\n\nSee the comments on https://go-review.googlesource.com/c/go/+/24371\nthat discuss this problem.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA6003\"", +- "Doc": "Converting a string to a slice of runes before ranging over it\n\nYou may want to loop over the runes in a string. Instead of converting\nthe string to a slice of runes and looping over that, you can loop\nover the string itself. That is,\n\n for _, r := range s {}\n\nand\n\n for _, r := range []rune(s) {}\n\nwill yield the same values. The first version, however, will be faster\nand avoid unnecessary memory allocations.\n\nDo note that if you are interested in the indices, ranging over a\nstring and over a slice of runes will yield different indices. The\nfirst one yields byte offsets, while the second one yields indices in\nthe slice of runes.\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA6005\"", +- "Doc": "Inefficient string comparison with strings.ToLower or strings.ToUpper\n\nConverting two strings to the same case and comparing them like so\n\n if strings.ToLower(s1) == strings.ToLower(s2) {\n ...\n }\n\nis significantly more expensive than comparing them with\nstrings.EqualFold(s1, s2). This is due to memory usage as well as\ncomputational complexity.\n\nstrings.ToLower will have to allocate memory for the new strings, as\nwell as convert both strings fully, even if they differ on the very\nfirst byte. strings.EqualFold, on the other hand, compares the strings\none character at a time. It doesn't need to create two intermediate\nstrings and can return as soon as the first non-matching character has\nbeen found.\n\nFor a more in-depth explanation of this issue, see\nhttps://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/\n\nAvailable since\n 2019.2\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA6006\"", +- "Doc": "Using io.WriteString to write []byte\n\nUsing io.WriteString to write a slice of bytes, as in\n\n io.WriteString(w, string(b))\n\nis both unnecessary and inefficient. Converting from []byte to string\nhas to allocate and copy the data, and we could simply use w.Write(b)\ninstead.\n\nAvailable since\n 2024.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA9001\"", +- "Doc": "Defers in range loops may not run when you expect them to\n\nAvailable since\n 2017.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA9002\"", +- "Doc": "Using a non-octal os.FileMode that looks like it was meant to be in octal.\n\nAvailable since\n 2017.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA9003\"", +- "Doc": "Empty body in an if or else branch\n\nAvailable since\n 2017.1, non-default\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA9004\"", +- "Doc": "Only the first constant has an explicit type\n\nIn a constant declaration such as the following:\n\n const (\n First byte = 1\n Second = 2\n )\n\nthe constant Second does not have the same type as the constant First.\nThis construct shouldn't be confused with\n\n const (\n First byte = iota\n Second\n )\n\nwhere First and Second do indeed have the same type. The type is only\npassed on when no explicit value is assigned to the constant.\n\nWhen declaring enumerations with explicit values it is therefore\nimportant not to write\n\n const (\n EnumFirst EnumType = 1\n EnumSecond = 2\n EnumThird = 3\n )\n\nThis discrepancy in types can cause various confusing behaviors and\nbugs.\n\n\nWrong type in variable declarations\n\nThe most obvious issue with such incorrect enumerations expresses\nitself as a compile error:\n\n package pkg\n\n const (\n EnumFirst uint8 = 1\n EnumSecond = 2\n )\n\n func fn(useFirst bool) {\n x := EnumSecond\n if useFirst {\n x = EnumFirst\n }\n }\n\nfails to compile with\n\n ./const.go:11:5: cannot use EnumFirst (type uint8) as type int in assignment\n\n\nLosing method sets\n\nA more subtle issue occurs with types that have methods and optional\ninterfaces. Consider the following:\n\n package main\n\n import \"fmt\"\n\n type Enum int\n\n func (e Enum) String() string {\n return \"an enum\"\n }\n\n const (\n EnumFirst Enum = 1\n EnumSecond = 2\n )\n\n func main() {\n fmt.Println(EnumFirst)\n fmt.Println(EnumSecond)\n }\n\nThis code will output\n\n an enum\n 2\n\nas EnumSecond has no explicit type, and thus defaults to int.\n\nAvailable since\n 2019.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA9005\"", +- "Doc": "Trying to marshal a struct with no public fields nor custom marshaling\n\nThe encoding/json and encoding/xml packages only operate on exported\nfields in structs, not unexported ones. It is usually an error to try\nto (un)marshal structs that only consist of unexported fields.\n\nThis check will not flag calls involving types that define custom\nmarshaling behavior, e.g. via MarshalJSON methods. It will also not\nflag empty structs.\n\nAvailable since\n 2019.2\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA9006\"", +- "Doc": "Dubious bit shifting of a fixed size integer value\n\nBit shifting a value past its size will always clear the value.\n\nFor instance:\n\n v := int8(42)\n v \u003e\u003e= 8\n\nwill always result in 0.\n\nThis check flags bit shifting operations on fixed size integer values only.\nThat is, int, uint and uintptr are never flagged to avoid potential false\npositives in somewhat exotic but valid bit twiddling tricks:\n\n // Clear any value above 32 bits if integers are more than 32 bits.\n func f(i int) int {\n v := i \u003e\u003e 32\n v = v \u003c\u003c 32\n return i-v\n }\n\nAvailable since\n 2020.2\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"SA9007\"", +- "Doc": "Deleting a directory that shouldn't be deleted\n\nIt is virtually never correct to delete system directories such as\n/tmp or the user's home directory. However, it can be fairly easy to\ndo by mistake, for example by mistakenly using os.TempDir instead\nof ioutil.TempDir, or by forgetting to add a suffix to the result\nof os.UserHomeDir.\n\nWriting\n\n d := os.TempDir()\n defer os.RemoveAll(d)\n\nin your unit tests will have a devastating effect on the stability of your system.\n\nThis check flags attempts at deleting the following directories:\n\n- os.TempDir\n- os.UserCacheDir\n- os.UserConfigDir\n- os.UserHomeDir\n\nAvailable since\n 2022.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA9008\"", +- "Doc": "else branch of a type assertion is probably not reading the right value\n\nWhen declaring variables as part of an if statement (like in 'if\nfoo := ...; foo {'), the same variables will also be in the scope of\nthe else branch. This means that in the following example\n\n if x, ok := x.(int); ok {\n // ...\n } else {\n fmt.Printf(\"unexpected type %T\", x)\n }\n\nx in the else branch will refer to the x from x, ok\n:=; it will not refer to the x that is being type-asserted. The\nresult of a failed type assertion is the zero value of the type that\nis being asserted to, so x in the else branch will always have the\nvalue 0 and the type int.\n\nAvailable since\n 2022.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"SA9009\"", +- "Doc": "Ineffectual Go compiler directive\n\nA potential Go compiler directive was found, but is ineffectual as it begins\nwith whitespace.\n\nAvailable since\n 2024.1\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"ST1000\"", +- "Doc": "Incorrect or missing package comment\n\nPackages must have a package comment that is formatted according to\nthe guidelines laid out in\nhttps://go.dev/wiki/CodeReviewComments#package-comments.\n\nAvailable since\n 2019.1, non-default\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1001\"", +- "Doc": "Dot imports are discouraged\n\nDot imports that aren't in external test packages are discouraged.\n\nThe dot_import_whitelist option can be used to whitelist certain\nimports.\n\nQuoting Go Code Review Comments:\n\n\u003e The import . form can be useful in tests that, due to circular\n\u003e dependencies, cannot be made part of the package being tested:\n\u003e \n\u003e package foo_test\n\u003e \n\u003e import (\n\u003e \"bar/testutil\" // also imports \"foo\"\n\u003e . \"foo\"\n\u003e )\n\u003e \n\u003e In this case, the test file cannot be in package foo because it\n\u003e uses bar/testutil, which imports foo. So we use the import .\n\u003e form to let the file pretend to be part of package foo even though\n\u003e it is not. Except for this one case, do not use import . in your\n\u003e programs. It makes the programs much harder to read because it is\n\u003e unclear whether a name like Quux is a top-level identifier in the\n\u003e current package or in an imported package.\n\nAvailable since\n 2019.1\n\nOptions\n dot_import_whitelist\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1003\"", +- "Doc": "Poorly chosen identifier\n\nIdentifiers, such as variable and package names, follow certain rules.\n\nSee the following links for details:\n\n- https://go.dev/doc/effective_go#package-names\n- https://go.dev/doc/effective_go#mixed-caps\n- https://go.dev/wiki/CodeReviewComments#initialisms\n- https://go.dev/wiki/CodeReviewComments#variable-names\n\nAvailable since\n 2019.1, non-default\n\nOptions\n initialisms\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1005\"", +- "Doc": "Incorrectly formatted error string\n\nError strings follow a set of guidelines to ensure uniformity and good\ncomposability.\n\nQuoting Go Code Review Comments:\n\n\u003e Error strings should not be capitalized (unless beginning with\n\u003e proper nouns or acronyms) or end with punctuation, since they are\n\u003e usually printed following other context. That is, use\n\u003e fmt.Errorf(\"something bad\") not fmt.Errorf(\"Something bad\"), so\n\u003e that log.Printf(\"Reading %s: %v\", filename, err) formats without a\n\u003e spurious capital letter mid-message.\n\nAvailable since\n 2019.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1006\"", +- "Doc": "Poorly chosen receiver name\n\nQuoting Go Code Review Comments:\n\n\u003e The name of a method's receiver should be a reflection of its\n\u003e identity; often a one or two letter abbreviation of its type\n\u003e suffices (such as \"c\" or \"cl\" for \"Client\"). Don't use generic\n\u003e names such as \"me\", \"this\" or \"self\", identifiers typical of\n\u003e object-oriented languages that place more emphasis on methods as\n\u003e opposed to functions. The name need not be as descriptive as that\n\u003e of a method argument, as its role is obvious and serves no\n\u003e documentary purpose. It can be very short as it will appear on\n\u003e almost every line of every method of the type; familiarity admits\n\u003e brevity. Be consistent, too: if you call the receiver \"c\" in one\n\u003e method, don't call it \"cl\" in another.\n\nAvailable since\n 2019.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1008\"", +- "Doc": "A function's error value should be its last return value\n\nA function's error value should be its last return value.\n\nAvailable since\n 2019.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1011\"", +- "Doc": "Poorly chosen name for variable of type time.Duration\n\ntime.Duration values represent an amount of time, which is represented\nas a count of nanoseconds. An expression like 5 * time.Microsecond\nyields the value 5000. It is therefore not appropriate to suffix a\nvariable of type time.Duration with any time unit, such as Msec or\nMilli.\n\nAvailable since\n 2019.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1012\"", +- "Doc": "Poorly chosen name for error variable\n\nError variables that are part of an API should be called errFoo or\nErrFoo.\n\nAvailable since\n 2019.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1013\"", +- "Doc": "Should use constants for HTTP error codes, not magic numbers\n\nHTTP has a tremendous number of status codes. While some of those are\nwell known (200, 400, 404, 500), most of them are not. The net/http\npackage provides constants for all status codes that are part of the\nvarious specifications. It is recommended to use these constants\ninstead of hard-coding magic numbers, to vastly improve the\nreadability of your code.\n\nAvailable since\n 2019.1\n\nOptions\n http_status_code_whitelist\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1015\"", +- "Doc": "A switch's default case should be the first or last case\n\nAvailable since\n 2019.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1016\"", +- "Doc": "Use consistent method receiver names\n\nAvailable since\n 2019.1, non-default\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1017\"", +- "Doc": "Don't use Yoda conditions\n\nYoda conditions are conditions of the kind 'if 42 == x', where the\nliteral is on the left side of the comparison. These are a common\nidiom in languages in which assignment is an expression, to avoid bugs\nof the kind 'if (x = 42)'. In Go, which doesn't allow for this kind of\nbug, we prefer the more idiomatic 'if x == 42'.\n\nAvailable since\n 2019.2\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1018\"", +- "Doc": "Avoid zero-width and control characters in string literals\n\nAvailable since\n 2019.2\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1019\"", +- "Doc": "Importing the same package multiple times\n\nGo allows importing the same package multiple times, as long as\ndifferent import aliases are being used. That is, the following\nbit of code is valid:\n\n import (\n \"fmt\"\n fumpt \"fmt\"\n format \"fmt\"\n _ \"fmt\"\n )\n\nHowever, this is very rarely done on purpose. Usually, it is a\nsign of code that got refactored, accidentally adding duplicate\nimport statements. It is also a rarely known feature, which may\ncontribute to confusion.\n\nDo note that sometimes, this feature may be used\nintentionally (see for example\nhttps://github.com/golang/go/commit/3409ce39bfd7584523b7a8c150a310cea92d879d)\n– if you want to allow this pattern in your code base, you're\nadvised to disable this check.\n\nAvailable since\n 2020.1\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1020\"", +- "Doc": "The documentation of an exported function should start with the function's name\n\nDoc comments work best as complete sentences, which\nallow a wide variety of automated presentations. The first sentence\nshould be a one-sentence summary that starts with the name being\ndeclared.\n\nIf every doc comment begins with the name of the item it describes,\nyou can use the doc subcommand of the go tool and run the output\nthrough grep.\n\nSee https://go.dev/doc/effective_go#commentary for more\ninformation on how to write good documentation.\n\nAvailable since\n 2020.1, non-default\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1021\"", +- "Doc": "The documentation of an exported type should start with type's name\n\nDoc comments work best as complete sentences, which\nallow a wide variety of automated presentations. The first sentence\nshould be a one-sentence summary that starts with the name being\ndeclared.\n\nIf every doc comment begins with the name of the item it describes,\nyou can use the doc subcommand of the go tool and run the output\nthrough grep.\n\nSee https://go.dev/doc/effective_go#commentary for more\ninformation on how to write good documentation.\n\nAvailable since\n 2020.1, non-default\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1022\"", +- "Doc": "The documentation of an exported variable or constant should start with variable's name\n\nDoc comments work best as complete sentences, which\nallow a wide variety of automated presentations. The first sentence\nshould be a one-sentence summary that starts with the name being\ndeclared.\n\nIf every doc comment begins with the name of the item it describes,\nyou can use the doc subcommand of the go tool and run the output\nthrough grep.\n\nSee https://go.dev/doc/effective_go#commentary for more\ninformation on how to write good documentation.\n\nAvailable since\n 2020.1, non-default\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ST1023\"", +- "Doc": "Redundant type in variable declaration\n\nAvailable since\n 2021.1, non-default\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"any\"", +- "Doc": "replace interface{} with any\n\nThe any analyzer suggests replacing uses of the empty interface type,\n`interface{}`, with the `any` alias, which was introduced in Go 1.18.\nThis is a purely stylistic change that makes code more readable.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"appendclipped\"", +- "Doc": "simplify append chains using slices.Concat\n\nThe appendclipped analyzer suggests replacing chains of append calls with a\nsingle call to slices.Concat, which was added in Go 1.21. For example,\nappend(append(s, s1...), s2...) would be simplified to slices.Concat(s, s1, s2).\n\nIn the simple case of appending to a newly allocated slice, such as\nappend([]T(nil), s...), the analyzer suggests the more concise slices.Clone(s).\nFor byte slices, it will prefer bytes.Clone if the \"bytes\" package is\nalready imported.\n\nThis fix is only applied when the base of the append tower is a\n\"clipped\" slice, meaning its length and capacity are equal (e.g.\nx[:0:0] or []T{}). This is to avoid changing program behavior by\neliminating intended side effects on the base slice's underlying\narray.\n\nThis analyzer is currently disabled by default as the\ntransformation does not preserve the nilness of the base slice in\nall cases; see https://go.dev/issue/73557.", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"appends\"", +- "Doc": "check for missing values after append\n\nThis checker reports calls to append that pass\nno values to be appended to the slice.\n\n\ts := []string{\"a\", \"b\", \"c\"}\n\t_ = append(s)\n\nSuch calls are always no-ops and often indicate an\nunderlying mistake.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"asmdecl\"", +- "Doc": "report mismatches between assembly files and Go declarations", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"assign\"", +- "Doc": "check for useless assignments\n\nThis checker reports assignments of the form x = x or a[i] = a[i].\nThese are almost always useless, and even when they aren't they are\nusually a mistake.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"atomic\"", +- "Doc": "check for common mistakes using the sync/atomic package\n\nThe atomic checker looks for assignment statements of the form:\n\n\tx = atomic.AddUint64(\u0026x, 1)\n\nwhich are not atomic.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"atomicalign\"", +- "Doc": "check for non-64-bits-aligned arguments to sync/atomic functions", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"bloop\"", +- "Doc": "replace for-range over b.N with b.Loop\n\nThe bloop analyzer suggests replacing benchmark loops of the form\n`for i := 0; i \u003c b.N; i++` or `for range b.N` with the more modern\n`for b.Loop()`, which was added in Go 1.24.\n\nThis change makes benchmark code more readable and also removes the need for\nmanual timer control, so any preceding calls to b.StartTimer, b.StopTimer,\nor b.ResetTimer within the same function will also be removed.\n\nCaveats: The b.Loop() method is designed to prevent the compiler from\noptimizing away the benchmark loop, which can occasionally result in\nslower execution due to increased allocations in some specific cases.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"bools\"", +- "Doc": "check for common mistakes involving boolean operators", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"buildtag\"", +- "Doc": "check //go:build and // +build directives", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"cgocall\"", +- "Doc": "detect some violations of the cgo pointer passing rules\n\nCheck for invalid cgo pointer passing.\nThis looks for code that uses cgo to call C code passing values\nwhose types are almost always invalid according to the cgo pointer\nsharing rules.\nSpecifically, it warns about attempts to pass a Go chan, map, func,\nor slice to C, either directly, or via a pointer, array, or struct.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"composites\"", +- "Doc": "check for unkeyed composite literals\n\nThis analyzer reports a diagnostic for composite literals of struct\ntypes imported from another package that do not use the field-keyed\nsyntax. Such literals are fragile because the addition of a new field\n(even if unexported) to the struct will cause compilation to fail.\n\nAs an example,\n\n\terr = \u0026net.DNSConfigError{err}\n\nshould be replaced by:\n\n\terr = \u0026net.DNSConfigError{Err: err}\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"copylocks\"", +- "Doc": "check for locks erroneously passed by value\n\nInadvertently copying a value containing a lock, such as sync.Mutex or\nsync.WaitGroup, may cause both copies to malfunction. Generally such\nvalues should be referred to through a pointer.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"deepequalerrors\"", +- "Doc": "check for calls of reflect.DeepEqual on error values\n\nThe deepequalerrors checker looks for calls of the form:\n\n reflect.DeepEqual(err1, err2)\n\nwhere err1 and err2 are errors. Using reflect.DeepEqual to compare\nerrors is discouraged.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"defers\"", +- "Doc": "report common mistakes in defer statements\n\nThe defers analyzer reports a diagnostic when a defer statement would\nresult in a non-deferred call to time.Since, as experience has shown\nthat this is nearly always a mistake.\n\nFor example:\n\n\tstart := time.Now()\n\t...\n\tdefer recordLatency(time.Since(start)) // error: call to time.Since is not deferred\n\nThe correct code is:\n\n\tdefer func() { recordLatency(time.Since(start)) }()", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"deprecated\"", +- "Doc": "check for use of deprecated identifiers\n\nThe deprecated analyzer looks for deprecated symbols and package\nimports.\n\nSee https://go.dev/wiki/Deprecated to learn about Go's convention\nfor documenting and signaling deprecated identifiers.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"directive\"", +- "Doc": "check Go toolchain directives such as //go:debug\n\nThis analyzer checks for problems with known Go toolchain directives\nin all Go source files in a package directory, even those excluded by\n//go:build constraints, and all non-Go source files too.\n\nFor //go:debug (see https://go.dev/doc/godebug), the analyzer checks\nthat the directives are placed only in Go source files, only above the\npackage comment, and only in package main or *_test.go files.\n\nSupport for other known directives may be added in the future.\n\nThis analyzer does not check //go:build, which is handled by the\nbuildtag analyzer.\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"embed\"", +- "Doc": "check //go:embed directive usage\n\nThis analyzer checks that the embed package is imported if //go:embed\ndirectives are present, providing a suggested fix to add the import if\nit is missing.\n\nThis analyzer also checks that //go:embed directives precede the\ndeclaration of a single variable.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"errorsas\"", +- "Doc": "report passing non-pointer or non-error values to errors.As\n\nThe errorsas analyzer reports calls to errors.As where the type\nof the second argument is not a pointer to a type implementing error.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"errorsastype\"", +- "Doc": "replace errors.As with errors.AsType[T]\n\nThis analyzer suggests fixes to simplify uses of [errors.As] of\nthis form:\n\n\tvar myerr *MyErr\n\tif errors.As(err, \u0026myerr) {\n\t\thandle(myerr)\n\t}\n\nby using the less error-prone generic [errors.AsType] function,\nintroduced in Go 1.26:\n\n\tif myerr, ok := errors.AsType[*MyErr](err); ok {\n\t\thandle(myerr)\n\t}\n\nThe fix is only offered if the var declaration has the form shown and\nthere are no uses of myerr outside the if statement.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"fillreturns\"", +- "Doc": "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\n\nwill turn into\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"fmtappendf\"", +- "Doc": "replace []byte(fmt.Sprintf) with fmt.Appendf\n\nThe fmtappendf analyzer suggests replacing `[]byte(fmt.Sprintf(...))` with\n`fmt.Appendf(nil, ...)`. This avoids the intermediate allocation of a string\nby Sprintf, making the code more efficient. The suggestion also applies to\nfmt.Sprint and fmt.Sprintln.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"forvar\"", +- "Doc": "remove redundant re-declaration of loop variables\n\nThe forvar analyzer removes unnecessary shadowing of loop variables.\nBefore Go 1.22, it was common to write `for _, x := range s { x := x ... }`\nto create a fresh variable for each iteration. Go 1.22 changed the semantics\nof `for` loops, making this pattern redundant. This analyzer removes the\nunnecessary `x := x` statement.\n\nThis fix only applies to `range` loops.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"framepointer\"", +- "Doc": "report assembly that clobbers the frame pointer before saving it", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"hostport\"", +- "Doc": "check format of addresses passed to net.Dial\n\nThis analyzer flags code that produce network address strings using\nfmt.Sprintf, as in this example:\n\n addr := fmt.Sprintf(\"%s:%d\", host, 12345) // \"will not work with IPv6\"\n ...\n conn, err := net.Dial(\"tcp\", addr) // \"when passed to dial here\"\n\nThe analyzer suggests a fix to use the correct approach, a call to\nnet.JoinHostPort:\n\n addr := net.JoinHostPort(host, \"12345\")\n ...\n conn, err := net.Dial(\"tcp\", addr)\n\nA similar diagnostic and fix are produced for a format string of \"%s:%s\".\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"httpresponse\"", +- "Doc": "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"ifaceassert\"", +- "Doc": "detect impossible interface-to-interface type assertions\n\nThis checker flags type assertions v.(T) and corresponding type-switch cases\nin which the static type V of v is an interface that cannot possibly implement\nthe target interface T. This occurs when V and T contain methods with the same\nname but different signatures. Example:\n\n\tvar v interface {\n\t\tRead()\n\t}\n\t_ = v.(io.Reader)\n\nThe Read method in v has a different signature than the Read method in\nio.Reader, so this assertion cannot succeed.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"infertypeargs\"", +- "Doc": "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"inline\"", +- "Doc": "apply fixes based on 'go:fix inline' comment directives\n\nThe inline analyzer inlines functions and constants that are marked for inlining.\n\n## Functions\n\nGiven a function that is marked for inlining, like this one:\n\n\t//go:fix inline\n\tfunc Square(x int) int { return Pow(x, 2) }\n\nthis analyzer will recommend that calls to the function elsewhere, in the same\nor other packages, should be inlined.\n\nInlining can be used to move off of a deprecated function:\n\n\t// Deprecated: prefer Pow(x, 2).\n\t//go:fix inline\n\tfunc Square(x int) int { return Pow(x, 2) }\n\nIt can also be used to move off of an obsolete package,\nas when the import path has changed or a higher major version is available:\n\n\tpackage pkg\n\n\timport pkg2 \"pkg/v2\"\n\n\t//go:fix inline\n\tfunc F() { pkg2.F(nil) }\n\nReplacing a call pkg.F() by pkg2.F(nil) can have no effect on the program,\nso this mechanism provides a low-risk way to update large numbers of calls.\nWe recommend, where possible, expressing the old API in terms of the new one\nto enable automatic migration.\n\nThe inliner takes care to avoid behavior changes, even subtle ones,\nsuch as changes to the order in which argument expressions are\nevaluated. When it cannot safely eliminate all parameter variables,\nit may introduce a \"binding declaration\" of the form\n\n\tvar params = args\n\nto evaluate argument expressions in the correct order and bind them to\nparameter variables. Since the resulting code transformation may be\nstylistically suboptimal, such inlinings may be disabled by specifying\nthe -inline.allow_binding_decl=false flag to the analyzer driver.\n\n(In cases where it is not safe to \"reduce\" a call—that is, to replace\na call f(x) by the body of function f, suitably substituted—the\ninliner machinery is capable of replacing f by a function literal,\nfunc(){...}(). However, the inline analyzer discards all such\n\"literalizations\" unconditionally, again on grounds of style.)\n\n## Constants\n\nGiven a constant that is marked for inlining, like this one:\n\n\t//go:fix inline\n\tconst Ptr = Pointer\n\nthis analyzer will recommend that uses of Ptr should be replaced with Pointer.\n\nAs with functions, inlining can be used to replace deprecated constants and\nconstants in obsolete packages.\n\nA constant definition can be marked for inlining only if it refers to another\nnamed constant.\n\nThe \"//go:fix inline\" comment must appear before a single const declaration on its own,\nas above; before a const declaration that is part of a group, as in this case:\n\n\tconst (\n\t C = 1\n\t //go:fix inline\n\t Ptr = Pointer\n\t)\n\nor before a group, applying to every constant in the group:\n\n\t//go:fix inline\n\tconst (\n\t\tPtr = Pointer\n\t Val = Value\n\t)\n\nThe proposal https://go.dev/issue/32816 introduces the \"//go:fix inline\" directives.\n\nYou can use this command to apply inline fixes en masse:\n\n\t$ go run golang.org/x/tools/go/analysis/passes/inline/cmd/inline@latest -fix ./...", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"loopclosure\"", +- "Doc": "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nNote: An iteration variable can only outlive a loop iteration in Go versions \u003c=1.21.\nIn Go 1.22 and later, the loop variable lifetimes changed to create a new\niteration variable per loop iteration. (See go.dev/issue/60078.)\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v [\u003cgo1.22].\n\n\tfor _, v := range list {\n\t defer func() {\n\t use(v) // incorrect\n\t }()\n\t}\n\nOne fix is to create a new variable for each iteration of the loop:\n\n\tfor _, v := range list {\n\t v := v // new var per iteration\n\t defer func() {\n\t use(v) // ok\n\t }()\n\t}\n\nAfter Go version 1.22, the previous two for loops are equivalent\nand both are correct.\n\nThe next example uses a go statement and has a similar problem [\u003cgo1.22].\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n\tfor _, v := range elem {\n\t go func() {\n\t use(v) // incorrect, and a data race\n\t }()\n\t}\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n\tfunc Test(t *testing.T) {\n\t for _, test := range tests {\n\t t.Run(test.name, func(t *testing.T) {\n\t t.Parallel()\n\t use(test) // incorrect, and a data race\n\t })\n\t }\n\t}\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop [\u003cgo1.22].\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"lostcancel\"", +- "Doc": "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nWithDeadline and variants such as WithCancelCause must be called,\nor the new context will remain live until its parent context is cancelled.\n(The background context is never cancelled.)", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"maprange\"", +- "Doc": "checks for unnecessary calls to maps.Keys and maps.Values in range statements\n\nConsider a loop written like this:\n\n\tfor val := range maps.Values(m) {\n\t\tfmt.Println(val)\n\t}\n\nThis should instead be written without the call to maps.Values:\n\n\tfor _, val := range m {\n\t\tfmt.Println(val)\n\t}\n\ngolang.org/x/exp/maps returns slices for Keys/Values instead of iterators,\nbut unnecessary calls should similarly be removed:\n\n\tfor _, key := range maps.Keys(m) {\n\t\tfmt.Println(key)\n\t}\n\nshould be rewritten as:\n\n\tfor key := range m {\n\t\tfmt.Println(key)\n\t}", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"mapsloop\"", +- "Doc": "replace explicit loops over maps with calls to maps package\n\nThe mapsloop analyzer replaces loops of the form\n\n\tfor k, v := range x { m[k] = v }\n\nwith a single call to a function from the `maps` package, added in Go 1.23.\nDepending on the context, this could be `maps.Copy`, `maps.Insert`,\n`maps.Clone`, or `maps.Collect`.\n\nThe transformation to `maps.Clone` is applied conservatively, as it\npreserves the nilness of the source map, which may be a subtle change in\nbehavior if the original code did not handle a nil map in the same way.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"minmax\"", +- "Doc": "replace if/else statements with calls to min or max\n\nThe minmax analyzer simplifies conditional assignments by suggesting the use\nof the built-in `min` and `max` functions, introduced in Go 1.21. For example,\n\n\tif a \u003c b { x = a } else { x = b }\n\nis replaced by\n\n\tx = min(a, b).\n\nThis analyzer avoids making suggestions for floating-point types,\nas the behavior of `min` and `max` with NaN values can differ from\nthe original if/else statement.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"newexpr\"", +- "Doc": "simplify code by using go1.26's new(expr)\n\nThis analyzer finds declarations of functions of this form:\n\n\tfunc varOf(x int) *int { return \u0026x }\n\nand suggests a fix to turn them into inlinable wrappers around\ngo1.26's built-in new(expr) function:\n\n\tfunc varOf(x int) *int { return new(x) }\n\nIn addition, this analyzer suggests a fix for each call\nto one of the functions before it is transformed, so that\n\n\tuse(varOf(123))\n\nis replaced by:\n\n\tuse(new(123))\n\n(Wrapper functions such as varOf are common when working with Go\nserialization packages such as for JSON or protobuf, where pointers\nare often used to express optionality.)", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"nilfunc\"", +- "Doc": "check for useless comparisons between functions and nil\n\nA useless comparison is one like f == nil as opposed to f() == nil.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"nilness\"", +- "Doc": "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := \u0026v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n\nSometimes the control flow may be quite complex, making bugs hard\nto spot. In the example below, the err.Error expression is\nguaranteed to panic because, after the first return, err must be\nnil. The intervening loop is just a distraction.\n\n\t...\n\terr := g.Wait()\n\tif err != nil {\n\t\treturn err\n\t}\n\tpartialSuccess := false\n\tfor _, err := range errs {\n\t\tif err == nil {\n\t\t\tpartialSuccess = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif partialSuccess {\n\t\treportStatus(StatusMessage{\n\t\t\tCode: code.ERROR,\n\t\t\tDetail: err.Error(), // \"nil dereference in dynamic method call\"\n\t\t})\n\t\treturn nil\n\t}\n\n...", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"nonewvars\"", +- "Doc": "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\n\tz := 1\n\tz := 2\n\nwill turn into\n\n\tz := 1\n\tz = 2", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"noresultvalues\"", +- "Doc": "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\n\tfunc z() { return nil }\n\nwill turn into\n\n\tfunc z() { return }", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"omitzero\"", +- "Doc": "suggest replacing omitempty with omitzero for struct fields\n\nThe omitzero analyzer identifies uses of the `omitempty` JSON struct tag on\nfields that are themselves structs. The `omitempty` tag has no effect on\nstruct-typed fields. The analyzer offers two suggestions: either remove the\ntag, or replace it with `omitzero` (added in Go 1.24), which correctly\nomits the field if the struct value is zero.\n\nReplacing `omitempty` with `omitzero` is a change in behavior. The\noriginal code would always encode the struct field, whereas the\nmodified code will omit it if it is a zero-value.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"printf\"", +- "Doc": "check consistency of Printf format strings and arguments\n\nThe check applies to calls of the formatting functions such as\n[fmt.Printf] and [fmt.Sprintf], as well as any detected wrappers of\nthose functions such as [log.Printf]. It reports a variety of\nmistakes such as syntax errors in the format string and mismatches\n(of number and type) between the verbs and their arguments.\n\nSee the documentation of the fmt package for the complete set of\nformat operators and their operand types.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"rangeint\"", +- "Doc": "replace 3-clause for loops with for-range over integers\n\nThe rangeint analyzer suggests replacing traditional for loops such\nas\n\n\tfor i := 0; i \u003c n; i++ { ... }\n\nwith the more idiomatic Go 1.22 style:\n\n\tfor i := range n { ... }\n\nThis transformation is applied only if (a) the loop variable is not\nmodified within the loop body and (b) the loop's limit expression\nis not modified within the loop, as `for range` evaluates its\noperand only once.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"recursiveiter\"", +- "Doc": "check for inefficient recursive iterators\n\nThis analyzer reports when a function that returns an iterator\n(iter.Seq or iter.Seq2) calls itself as the operand of a range\nstatement, as this is inefficient.\n\nWhen implementing an iterator (e.g. iter.Seq[T]) for a recursive\ndata type such as a tree or linked list, it is tempting to\nrecursively range over the iterator for each child element.\n\nHere's an example of a naive iterator over a binary tree:\n\n\ttype tree struct {\n\t\tvalue int\n\t\tleft, right *tree\n\t}\n\n\tfunc (t *tree) All() iter.Seq[int] {\n\t\treturn func(yield func(int) bool) {\n\t\t\tif t != nil {\n\t\t\t\tfor elem := range t.left.All() { // \"inefficient recursive iterator\"\n\t\t\t\t\tif !yield(elem) {\n\t\t\t\t\t\treturn\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif !yield(t.value) {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tfor elem := range t.right.All() { // \"inefficient recursive iterator\"\n\t\t\t\t\tif !yield(elem) {\n\t\t\t\t\t\treturn\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\nThough it correctly enumerates the elements of the tree, it hides a\nsignificant performance problem--two, in fact. Consider a balanced\ntree of N nodes. Iterating the root node will cause All to be\ncalled once on every node of the tree. This results in a chain of\nnested active range-over-func statements when yield(t.value) is\ncalled on a leaf node.\n\nThe first performance problem is that each range-over-func\nstatement must typically heap-allocate a variable, so iteration of\nthe tree allocates as many variables as there are elements in the\ntree, for a total of O(N) allocations, all unnecessary.\n\nThe second problem is that each call to yield for a leaf of the\ntree causes each of the enclosing range loops to receive a value,\nwhich they then immediately pass on to their respective yield\nfunction. This results in a chain of log(N) dynamic yield calls per\nelement, a total of O(N*log N) dynamic calls overall, when only\nO(N) are necessary.\n\nA better implementation strategy for recursive iterators is to\nfirst define the \"every\" operator for your recursive data type,\nwhere every(f) reports whether an arbitrary predicate f(x) is true\nfor every element x in the data type. For our tree, the every\nfunction would be:\n\n\tfunc (t *tree) every(f func(int) bool) bool {\n\t\treturn t == nil ||\n\t\t\tt.left.every(f) \u0026\u0026 f(t.value) \u0026\u0026 t.right.every(f)\n\t}\n\nFor example, this use of the every operator prints whether every\nelement in the tree is an even number:\n\n\teven := func(x int) bool { return x\u00261 == 0 }\n\tprintln(t.every(even))\n\nThen the iterator can be simply expressed as a trivial wrapper\naround the every operator:\n\n\tfunc (t *tree) All() iter.Seq[int] {\n\t\treturn func(yield func(int) bool) {\n\t\t\t_ = t.every(yield)\n\t\t}\n\t}\n\nIn effect, tree.All computes whether yield returns true for each\nelement, short-circuiting if it ever returns false, then discards\nthe final boolean result.\n\nThis has much better performance characteristics: it makes one\ndynamic call per element of the tree, and it doesn't heap-allocate\nanything. It is also clearer.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"reflecttypefor\"", +- "Doc": "replace reflect.TypeOf(x) with TypeFor[T]()\n\nThis analyzer suggests fixes to replace uses of reflect.TypeOf(x) with\nreflect.TypeFor, introduced in go1.22, when the desired runtime type\nis known at compile time, for example:\n\n\treflect.TypeOf(uint32(0)) -\u003e reflect.TypeFor[uint32]()\n\treflect.TypeOf((*ast.File)(nil)) -\u003e reflect.TypeFor[*ast.File]()\n\nIt also offers a fix to simplify the construction below, which uses\nreflect.TypeOf to return the runtime type for an interface type,\n\n\treflect.TypeOf((*io.Reader)(nil)).Elem()\n\nto:\n\n\treflect.TypeFor[io.Reader]()\n\nNo fix is offered in cases when the runtime type is dynamic, such as:\n\n\tvar r io.Reader = ...\n\treflect.TypeOf(r)\n\nor when the operand has potential side effects.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"shadow\"", +- "Doc": "check for possible unintended shadowing of variables\n\nThis analyzer check for shadowed variables.\nA shadowed variable is a variable declared in an inner scope\nwith the same name and type as a variable in an outer scope,\nand where the outer variable is mentioned after the inner one\nis declared.\n\n(This definition can be refined; the module generates too many\nfalse positives and is not yet enabled by default.)\n\nFor example:\n\n\tfunc BadRead(f *os.File, buf []byte) error {\n\t\tvar err error\n\t\tfor {\n\t\t\tn, err := f.Read(buf) // shadows the function variable 'err'\n\t\t\tif err != nil {\n\t\t\t\tbreak // causes return of wrong value\n\t\t\t}\n\t\t\tfoo(buf)\n\t\t}\n\t\treturn err\n\t}", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"shift\"", +- "Doc": "check for shifts that equal or exceed the width of the integer", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"sigchanyzer\"", +- "Doc": "check for unbuffered channel of os.Signal\n\nThis checker reports call expression of the form\n\n\tsignal.Notify(c \u003c-chan os.Signal, sig ...os.Signal),\n\nwhere c is an unbuffered channel, which can be at risk of missing the signal.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"simplifycompositelit\"", +- "Doc": "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\n\t[]T{T{}, T{}}\n\nwill be simplified to:\n\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"simplifyrange\"", +- "Doc": "check for range statement simplifications\n\nA range of the form:\n\n\tfor x, _ = range v {...}\n\nwill be simplified to:\n\n\tfor x = range v {...}\n\nA range of the form:\n\n\tfor _ = range v {...}\n\nwill be simplified to:\n\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"simplifyslice\"", +- "Doc": "check for slice simplifications\n\nA slice expression of the form:\n\n\ts[a:len(s)]\n\nwill be simplified to:\n\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"slicescontains\"", +- "Doc": "replace loops with slices.Contains or slices.ContainsFunc\n\nThe slicescontains analyzer simplifies loops that check for the existence of\nan element in a slice. It replaces them with calls to `slices.Contains` or\n`slices.ContainsFunc`, which were added in Go 1.21.\n\nIf the expression for the target element has side effects, this\ntransformation will cause those effects to occur only once, not\nonce per tested slice element.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"slicesdelete\"", +- "Doc": "replace append-based slice deletion with slices.Delete\n\nThe slicesdelete analyzer suggests replacing the idiom\n\n\ts = append(s[:i], s[j:]...)\n\nwith the more explicit\n\n\ts = slices.Delete(s, i, j)\n\nintroduced in Go 1.21.\n\nThis analyzer is disabled by default. The `slices.Delete` function\nzeros the elements between the new length and the old length of the\nslice to prevent memory leaks, which is a subtle difference in\nbehavior compared to the append-based idiom; see https://go.dev/issue/73686.", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"slicessort\"", +- "Doc": "replace sort.Slice with slices.Sort for basic types\n\nThe slicessort analyzer simplifies sorting slices of basic ordered\ntypes. It replaces\n\n\tsort.Slice(s, func(i, j int) bool { return s[i] \u003c s[j] })\n\nwith the simpler `slices.Sort(s)`, which was added in Go 1.21.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"slog\"", +- "Doc": "check for invalid structured logging calls\n\nThe slog checker looks for calls to functions from the log/slog\npackage that take alternating key-value pairs. It reports calls\nwhere an argument in a key position is neither a string nor a\nslog.Attr, and where a final key is missing its value.\nFor example,it would report\n\n\tslog.Warn(\"message\", 11, \"k\") // slog.Warn arg \"11\" should be a string or a slog.Attr\n\nand\n\n\tslog.Info(\"message\", \"k1\", v1, \"k2\") // call to slog.Info missing a final value", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"sortslice\"", +- "Doc": "check the argument type of sort.Slice\n\nsort.Slice requires an argument of a slice type. Check that\nthe interface{} value passed to sort.Slice is actually a slice.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"stditerators\"", +- "Doc": "use iterators instead of Len/At-style APIs\n\nThis analyzer suggests a fix to replace each loop of the form:\n\n\tfor i := 0; i \u003c x.Len(); i++ {\n\t\tuse(x.At(i))\n\t}\n\nor its \"for elem := range x.Len()\" equivalent by a range loop over an\niterator offered by the same data type:\n\n\tfor elem := range x.All() {\n\t\tuse(x.At(i)\n\t}\n\nwhere x is one of various well-known types in the standard library.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"stdmethods\"", +- "Doc": "check signature of methods of well-known interfaces\n\nSometimes a type may be intended to satisfy an interface but may fail to\ndo so because of a mistake in its method signature.\nFor example, the result of this WriteTo method should be (int64, error),\nnot error, to satisfy io.WriterTo:\n\n\ttype myWriterTo struct{...}\n\tfunc (myWriterTo) WriteTo(w io.Writer) error { ... }\n\nThis check ensures that each method whose name matches one of several\nwell-known interface methods from the standard library has the correct\nsignature for that interface.\n\nChecked method names include:\n\n\tFormat GobEncode GobDecode MarshalJSON MarshalXML\n\tPeek ReadByte ReadFrom ReadRune Scan Seek\n\tUnmarshalJSON UnreadByte UnreadRune WriteByte\n\tWriteTo", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"stdversion\"", +- "Doc": "report uses of too-new standard library symbols\n\nThe stdversion analyzer reports references to symbols in the standard\nlibrary that were introduced by a Go release higher than the one in\nforce in the referring file. (Recall that the file's Go version is\ndefined by the 'go' directive its module's go.mod file, or by a\n\"//go:build go1.X\" build tag at the top of the file.)\n\nThe analyzer does not report a diagnostic for a reference to a \"too\nnew\" field or method of a type that is itself \"too new\", as this may\nhave false positives, for example if fields or methods are accessed\nthrough a type alias that is guarded by a Go version constraint.\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"stringintconv\"", +- "Doc": "check for string(int) conversions\n\nThis checker flags conversions of the form string(x) where x is an integer\n(but not byte or rune) type. Such conversions are discouraged because they\nreturn the UTF-8 representation of the Unicode code point x, and not a decimal\nstring representation of x as one might expect. Furthermore, if x denotes an\ninvalid code point, the conversion cannot be statically rejected.\n\nFor conversions that intend on using the code point, consider replacing them\nwith string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the\nstring representation of the value in the desired base.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"stringsbuilder\"", +- "Doc": "replace += with strings.Builder\n\nThis analyzer replaces repeated string += string concatenation\noperations with calls to Go 1.10's strings.Builder.\n\nFor example:\n\n\tvar s = \"[\"\n\tfor x := range seq {\n\t\ts += x\n\t\ts += \".\"\n\t}\n\ts += \"]\"\n\tuse(s)\n\nis replaced by:\n\n\tvar s strings.Builder\n\ts.WriteString(\"[\")\n\tfor x := range seq {\n\t\ts.WriteString(x)\n\t\ts.WriteString(\".\")\n\t}\n\ts.WriteString(\"]\")\n\tuse(s.String())\n\nThis avoids quadratic memory allocation and improves performance.\n\nThe analyzer requires that all references to s except the final one\nare += operations. To avoid warning about trivial cases, at least one\nmust appear within a loop. The variable s must be a local\nvariable, not a global or parameter.\n\nThe sole use of the finished string must be the last reference to the\nvariable s. (It may appear within an intervening loop or function literal,\nsince even s.String() is called repeatedly, it does not allocate memory.)", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"stringscutprefix\"", +- "Doc": "replace HasPrefix/TrimPrefix with CutPrefix\n\nThe stringscutprefix analyzer simplifies a common pattern where code first\nchecks for a prefix with `strings.HasPrefix` and then removes it with\n`strings.TrimPrefix`. It replaces this two-step process with a single call\nto `strings.CutPrefix`, introduced in Go 1.20. The analyzer also handles\nthe equivalent functions in the `bytes` package.\n\nFor example, this input:\n\n\tif strings.HasPrefix(s, prefix) {\n\t use(strings.TrimPrefix(s, prefix))\n\t}\n\nis fixed to:\n\n\tif after, ok := strings.CutPrefix(s, prefix); ok {\n\t use(after)\n\t}\n\nThe analyzer also offers fixes to use CutSuffix in a similar way.\nThis input:\n\n\tif strings.HasSuffix(s, suffix) {\n\t use(strings.TrimSuffix(s, suffix))\n\t}\n\nis fixed to:\n\n\tif before, ok := strings.CutSuffix(s, suffix); ok {\n\t use(before)\n\t}", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"stringsseq\"", +- "Doc": "replace ranging over Split/Fields with SplitSeq/FieldsSeq\n\nThe stringsseq analyzer improves the efficiency of iterating over substrings.\nIt replaces\n\n\tfor range strings.Split(...)\n\nwith the more efficient\n\n\tfor range strings.SplitSeq(...)\n\nwhich was added in Go 1.24 and avoids allocating a slice for the\nsubstrings. The analyzer also handles strings.Fields and the\nequivalent functions in the bytes package.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"structtag\"", +- "Doc": "check that struct field tags conform to reflect.StructTag.Get\n\nAlso report certain struct tags (json, xml) used with unexported fields.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"testingcontext\"", +- "Doc": "replace context.WithCancel with t.Context in tests\n\nThe testingcontext analyzer simplifies context management in tests. It\nreplaces the manual creation of a cancellable context,\n\n\tctx, cancel := context.WithCancel(context.Background())\n\tdefer cancel()\n\nwith a single call to t.Context(), which was added in Go 1.24.\n\nThis change is only suggested if the `cancel` function is not used\nfor any other purpose.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"testinggoroutine\"", +- "Doc": "report calls to (*testing.T).Fatal from goroutines started by a test\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\n\tfunc TestFoo(t *testing.T) {\n\t go func() {\n\t t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n\t }()\n\t}", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"tests\"", +- "Doc": "check for common mistaken usages of tests and examples\n\nThe tests checker walks Test, Benchmark, Fuzzing and Example functions checking\nmalformed names, wrong signatures and examples documenting non-existent\nidentifiers.\n\nPlease see the documentation for package testing in golang.org/pkg/testing\nfor the conventions that are enforced for Tests, Benchmarks, and Examples.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"timeformat\"", +- "Doc": "check for calls of (time.Time).Format or time.Parse with 2006-02-01\n\nThe timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm)\nformat. Internationally, \"yyyy-dd-mm\" does not occur in common calendar date\nstandards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"unmarshal\"", +- "Doc": "report passing non-pointer or non-interface values to unmarshal\n\nThe unmarshal analysis reports calls to functions such as json.Unmarshal\nin which the argument type is not a pointer or an interface.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"unreachable\"", +- "Doc": "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by a return statement, a call to panic, an\ninfinite loop, or similar constructs.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"unsafeptr\"", +- "Doc": "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"unusedfunc\"", +- "Doc": "check for unused functions, methods, etc\n\nThe unusedfunc analyzer reports functions and methods that are\nnever referenced outside of their own declaration.\n\nA function is considered unused if it is unexported and not\nreferenced (except within its own declaration).\n\nA method is considered unused if it is unexported, not referenced\n(except within its own declaration), and its name does not match\nthat of any method of an interface type declared within the same\npackage.\n\nThe tool may report false positives in some situations, for\nexample:\n\n - for a declaration of an unexported function that is referenced\n from another package using the go:linkname mechanism, if the\n declaration's doc comment does not also have a go:linkname\n comment.\n\n (Such code is in any case strongly discouraged: linkname\n annotations, if they must be used at all, should be used on both\n the declaration and the alias.)\n\n - for compiler intrinsics in the \"runtime\" package that, though\n never referenced, are known to the compiler and are called\n indirectly by compiled object code.\n\n - for functions called only from assembly.\n\n - for functions called only from files whose build tags are not\n selected in the current build configuration.\n\nSince these situations are relatively common in the low-level parts\nof the runtime, this analyzer ignores the standard library.\nSee https://go.dev/issue/71686 and https://go.dev/issue/74130 for\nfurther discussion of these limitations.\n\nThe unusedfunc algorithm is not as precise as the\ngolang.org/x/tools/cmd/deadcode tool, but it has the advantage that\nit runs within the modular analysis framework, enabling near\nreal-time feedback within gopls.\n\nThe unusedfunc analyzer also reports unused types, vars, and\nconstants. Enums--constants defined with iota--are ignored since\neven the unused values must remain present to preserve the logical\nordering.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"unusedparams\"", +- "Doc": "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo ensure soundness, it ignores:\n - \"address-taken\" functions, that is, functions that are used as\n a value rather than being called directly; their signatures may\n be required to conform to a func type.\n - exported functions or methods, since they may be address-taken\n in another package.\n - unexported methods whose name matches an interface method\n declared in the same package, since the method's signature\n may be required to conform to the interface type.\n - functions with empty bodies, or containing just a call to panic.\n - parameters that are unnamed, or named \"_\", the blank identifier.\n\nThe analyzer suggests a fix of replacing the parameter name by \"_\",\nbut in such cases a deeper fix can be obtained by invoking the\n\"Refactor: remove unused parameter\" code action, which will\neliminate the parameter entirely, along with all corresponding\narguments at call sites, while taking care to preserve any side\neffects in the argument expressions; see\nhttps://github.com/golang/tools/releases/tag/gopls%2Fv0.14.\n\nThis analyzer ignores generated code.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"unusedresult\"", +- "Doc": "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side\neffects, so it is always a mistake to discard the result. Other\nfunctions may return an error that must not be ignored, or a cleanup\noperation that must be called. This analyzer reports calls to\nfunctions like these when the result of the call is ignored.\n\nThe set of functions may be controlled using flags.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"unusedvariable\"", +- "Doc": "check for unused variables and suggest fixes", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"unusedwrite\"", +- "Doc": "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"waitgroup\"", +- "Doc": "check for misuses of sync.WaitGroup\n\nThis analyzer detects mistaken calls to the (*sync.WaitGroup).Add\nmethod from inside a new goroutine, causing Add to race with Wait:\n\n\t// WRONG\n\tvar wg sync.WaitGroup\n\tgo func() {\n\t wg.Add(1) // \"WaitGroup.Add called from inside new goroutine\"\n\t defer wg.Done()\n\t ...\n\t}()\n\twg.Wait() // (may return prematurely before new goroutine starts)\n\nThe correct code calls Add before starting the goroutine:\n\n\t// RIGHT\n\tvar wg sync.WaitGroup\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\t...\n\t}()\n\twg.Wait()", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"waitgroup\"", +- "Doc": "replace wg.Add(1)/go/wg.Done() with wg.Go\n\nThe waitgroup analyzer simplifies goroutine management with `sync.WaitGroup`.\nIt replaces the common pattern\n\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\t...\n\t}()\n\nwith a single call to\n\n\twg.Go(func(){ ... })\n\nwhich was added in Go 1.25.", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"yield\"", +- "Doc": "report calls to yield where the result is ignored\n\nAfter a yield function returns false, the caller should not call\nthe yield function again; generally the iterator should return\npromptly.\n\nThis example fails to check the result of the call to yield,\ncausing this analyzer to report a diagnostic:\n\n\tyield(1) // yield may be called again (on L2) after returning false\n\tyield(2)\n\nThe corrected code is either this:\n\n\tif yield(1) { yield(2) }\n\nor simply:\n\n\t_ = yield(1) \u0026\u0026 yield(2)\n\nIt is not always a mistake to ignore the result of yield.\nFor example, this is a valid single-element iterator:\n\n\tyield(1) // ok to ignore result\n\treturn\n\nIt is only a mistake when the yield call that returned false may be\nfollowed by another call.", +- "Default": "true", +- "Status": "" +- } +- ] +- }, +- "EnumValues": null, +- "Default": "{}", +- "Status": "", +- "Hierarchy": "ui.diagnostic", +- "DeprecationMessage": "" +- }, +- { +- "Name": "staticcheck", +- "Type": "bool", +- "Doc": "staticcheck configures the default set of analyses staticcheck.io.\nThese analyses are documented on\n[Staticcheck's website](https://staticcheck.io/docs/checks/).\n\nThe \"staticcheck\" option has three values:\n- false: disable all staticcheck analyzers\n- true: enable all staticcheck analyzers\n- unset: enable a subset of staticcheck analyzers\n selected by gopls maintainers for runtime efficiency\n and analytic precision.\n\nRegardless of this setting, individual analyzers can be\nselectively enabled or disabled using the `analyses` setting.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "false", +- "Status": "experimental", +- "Hierarchy": "ui.diagnostic", +- "DeprecationMessage": "" +- }, +- { +- "Name": "staticcheckProvided", +- "Type": "bool", +- "Doc": "", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "false", +- "Status": "experimental", +- "Hierarchy": "ui.diagnostic", +- "DeprecationMessage": "" +- }, +- { +- "Name": "annotations", +- "Type": "map[enum]bool", +- "Doc": "annotations specifies the various kinds of compiler\noptimization details that should be reported as diagnostics\nwhen enabled for a package by the \"Toggle compiler\noptimization details\" (`gopls.gc_details`) command.\n\n(Some users care only about one kind of annotation in their\nprofiling efforts. More importantly, in large packages, the\nnumber of annotations can sometimes overwhelm the user\ninterface and exceed the per-file diagnostic limit.)\n\nTODO(adonovan): rename this field to CompilerOptDetail.\n", +- "EnumKeys": { +- "ValueType": "bool", +- "Keys": [ +- { +- "Name": "\"bounds\"", +- "Doc": "`\"bounds\"` controls bounds checking diagnostics.\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"escape\"", +- "Doc": "`\"escape\"` controls diagnostics about escape choices.\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"inline\"", +- "Doc": "`\"inline\"` controls diagnostics about inlining choices.\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"nil\"", +- "Doc": "`\"nil\"` controls nil checks.\n", +- "Default": "true", +- "Status": "" +- } +- ] +- }, +- "EnumValues": null, +- "Default": "{\"bounds\":true,\"escape\":true,\"inline\":true,\"nil\":true}", +- "Status": "", +- "Hierarchy": "ui.diagnostic", +- "DeprecationMessage": "" +- }, +- { +- "Name": "vulncheck", +- "Type": "enum", +- "Doc": "vulncheck enables vulnerability scanning.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": [ +- { +- "Value": "\"Imports\"", +- "Doc": "`\"Imports\"`: In Imports mode, `gopls` will report vulnerabilities that affect packages\ndirectly and indirectly used by the analyzed main module.\n", +- "Status": "" +- }, +- { +- "Value": "\"Off\"", +- "Doc": "`\"Off\"`: Disable vulnerability analysis.\n", +- "Status": "" +- } +- ], +- "Default": "\"Off\"", +- "Status": "experimental", +- "Hierarchy": "ui.diagnostic", +- "DeprecationMessage": "" +- }, +- { +- "Name": "diagnosticsDelay", +- "Type": "time.Duration", +- "Doc": "diagnosticsDelay controls the amount of time that gopls waits\nafter the most recent file modification before computing deep diagnostics.\nSimple diagnostics (parsing and type-checking) are always run immediately\non recently modified packages.\n\nThis option must be set to a valid duration string, for example `\"250ms\"`.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "\"1s\"", +- "Status": "advanced", +- "Hierarchy": "ui.diagnostic", +- "DeprecationMessage": "" +- }, +- { +- "Name": "diagnosticsTrigger", +- "Type": "enum", +- "Doc": "diagnosticsTrigger controls when to run diagnostics.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": [ +- { +- "Value": "\"Edit\"", +- "Doc": "`\"Edit\"`: Trigger diagnostics on file edit and save. (default)\n", +- "Status": "" +- }, +- { +- "Value": "\"Save\"", +- "Doc": "`\"Save\"`: Trigger diagnostics only on file save. Events like initial workspace load\nor configuration change will still trigger diagnostics.\n", +- "Status": "" +- } +- ], +- "Default": "\"Edit\"", +- "Status": "experimental", +- "Hierarchy": "ui.diagnostic", +- "DeprecationMessage": "" +- }, +- { +- "Name": "analysisProgressReporting", +- "Type": "bool", +- "Doc": "analysisProgressReporting controls whether gopls sends progress\nnotifications when construction of its index of analysis facts is taking a\nlong time. Cancelling these notifications will cancel the indexing task,\nthough it will restart after the next change in the workspace.\n\nWhen a package is opened for the first time and heavyweight analyses such as\nstaticcheck are enabled, it can take a while to construct the index of\nanalysis facts for all its dependencies. The index is cached in the\nfilesystem, so subsequent analysis should be faster.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "true", +- "Status": "", +- "Hierarchy": "ui.diagnostic", +- "DeprecationMessage": "" +- }, +- { +- "Name": "hints", +- "Type": "map[enum]bool", +- "Doc": "hints specify inlay hints that users want to see. A full list of hints\nthat gopls uses can be found in\n[inlayHints.md](https://github.com/golang/tools/blob/master/gopls/doc/inlayHints.md).\n", +- "EnumKeys": { +- "ValueType": "bool", +- "Keys": [ +- { +- "Name": "\"assignVariableTypes\"", +- "Doc": "`\"assignVariableTypes\"` controls inlay hints for variable types in assign statements:\n```go\n\ti/* int*/, j/* int*/ := 0, len(r)-1\n```\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"compositeLiteralFields\"", +- "Doc": "`\"compositeLiteralFields\"` inlay hints for composite literal field names:\n```go\n\t{/*in: */\"Hello, world\", /*want: */\"dlrow ,olleH\"}\n```\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"compositeLiteralTypes\"", +- "Doc": "`\"compositeLiteralTypes\"` controls inlay hints for composite literal types:\n```go\n\tfor _, c := range []struct {\n\t\tin, want string\n\t}{\n\t\t/*struct{ in string; want string }*/{\"Hello, world\", \"dlrow ,olleH\"},\n\t}\n```\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"constantValues\"", +- "Doc": "`\"constantValues\"` controls inlay hints for constant values:\n```go\n\tconst (\n\t\tKindNone Kind = iota/* = 0*/\n\t\tKindPrint/* = 1*/\n\t\tKindPrintf/* = 2*/\n\t\tKindErrorf/* = 3*/\n\t)\n```\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"functionTypeParameters\"", +- "Doc": "`\"functionTypeParameters\"` inlay hints for implicit type parameters on generic functions:\n```go\n\tmyFoo/*[int, string]*/(1, \"hello\")\n```\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"ignoredError\"", +- "Doc": "`\"ignoredError\"` inlay hints for implicitly discarded errors:\n```go\n\tf.Close() // ignore error\n```\nThis check inserts an `// ignore error` hint following any\nstatement that is a function call whose error result is\nimplicitly ignored.\n\nTo suppress the hint, write an actual comment containing\n\"ignore error\" following the call statement, or explictly\nassign the result to a blank variable. A handful of common\nfunctions such as `fmt.Println` are excluded from the\ncheck.\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"parameterNames\"", +- "Doc": "`\"parameterNames\"` controls inlay hints for parameter names:\n```go\n\tparseInt(/* str: */ \"123\", /* radix: */ 8)\n```\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"rangeVariableTypes\"", +- "Doc": "`\"rangeVariableTypes\"` controls inlay hints for variable types in range statements:\n```go\n\tfor k/* int*/, v/* string*/ := range []string{} {\n\t\tfmt.Println(k, v)\n\t}\n```\n", +- "Default": "false", +- "Status": "" +- } +- ] +- }, +- "EnumValues": null, +- "Default": "{}", +- "Status": "experimental", +- "Hierarchy": "ui.inlayhint", +- "DeprecationMessage": "" +- }, +- { +- "Name": "codelenses", +- "Type": "map[enum]bool", +- "Doc": "codelenses overrides the enabled/disabled state of each of gopls'\nsources of [Code Lenses](codelenses.md).\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n \"codelenses\": {\n \"generate\": false, // Don't show the `go generate` lens.\n }\n...\n}\n```\n", +- "EnumKeys": { +- "ValueType": "bool", +- "Keys": [ +- { +- "Name": "\"generate\"", +- "Doc": "`\"generate\"`: Run `go generate`\n\nThis codelens source annotates any `//go:generate` comments\nwith commands to run `go generate` in this directory, on\nall directories recursively beneath this one.\n\nSee [Generating code](https://go.dev/blog/generate) for\nmore details.\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"regenerate_cgo\"", +- "Doc": "`\"regenerate_cgo\"`: Re-generate cgo declarations\n\nThis codelens source annotates an `import \"C\"` declaration\nwith a command to re-run the [cgo\ncommand](https://pkg.go.dev/cmd/cgo) to regenerate the\ncorresponding Go declarations.\n\nUse this after editing the C code in comments attached to\nthe import, or in C header files included by it.\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"run_govulncheck\"", +- "Doc": "`\"run_govulncheck\"`: Run govulncheck (legacy)\n\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run Govulncheck asynchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"test\"", +- "Doc": "`\"test\"`: Run tests and benchmarks\n\nThis codelens source annotates each `Test` and `Benchmark`\nfunction in a `*_test.go` file with a command to run it.\n\nThis source is off by default because VS Code has\na client-side custom UI for testing, and because progress\nnotifications are not a great UX for streamed test output.\nSee:\n- golang/go#67400 for a discussion of this feature.\n- https://github.com/joaotavora/eglot/discussions/1402\n for an alternative approach.\n", +- "Default": "false", +- "Status": "" +- }, +- { +- "Name": "\"tidy\"", +- "Doc": "`\"tidy\"`: Tidy go.mod file\n\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\ntidy`](https://go.dev/ref/mod#go-mod-tidy), which ensures\nthat the go.mod file matches the source code in the module.\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"upgrade_dependency\"", +- "Doc": "`\"upgrade_dependency\"`: Update dependencies\n\nThis codelens source annotates the `module` directive in a\ngo.mod file with commands to:\n\n- check for available upgrades,\n- upgrade direct dependencies, and\n- upgrade all dependencies transitively.\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"vendor\"", +- "Doc": "`\"vendor\"`: Update vendor directory\n\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\nvendor`](https://go.dev/ref/mod#go-mod-vendor), which\ncreates or updates the directory named `vendor` in the\nmodule root so that it contains an up-to-date copy of all\nnecessary package dependencies.\n", +- "Default": "true", +- "Status": "" +- }, +- { +- "Name": "\"vulncheck\"", +- "Doc": "`\"vulncheck\"`: Run govulncheck\n\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run govulncheck synchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", +- "Default": "false", +- "Status": "experimental" +- } +- ] +- }, +- "EnumValues": null, +- "Default": "{\"generate\":true,\"regenerate_cgo\":true,\"run_govulncheck\":true,\"tidy\":true,\"upgrade_dependency\":true,\"vendor\":true}", +- "Status": "", +- "Hierarchy": "ui", +- "DeprecationMessage": "" +- }, +- { +- "Name": "semanticTokens", +- "Type": "bool", +- "Doc": "semanticTokens controls whether the LSP server will send\nsemantic tokens to the client.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "false", +- "Status": "experimental", +- "Hierarchy": "ui", +- "DeprecationMessage": "" +- }, +- { +- "Name": "noSemanticString", +- "Type": "bool", +- "Doc": "noSemanticString turns off the sending of the semantic token 'string'\n\nDeprecated: Use SemanticTokenTypes[\"string\"] = false instead. See\ngolang/vscode-go#3632\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "false", +- "Status": "experimental", +- "Hierarchy": "ui", +- "DeprecationMessage": "use SemanticTokenTypes[\"string\"] = false instead. See\ngolang/vscode-go#3632\n" +- }, +- { +- "Name": "noSemanticNumber", +- "Type": "bool", +- "Doc": "noSemanticNumber turns off the sending of the semantic token 'number'\n\nDeprecated: Use SemanticTokenTypes[\"number\"] = false instead. See\ngolang/vscode-go#3632.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "false", +- "Status": "experimental", +- "Hierarchy": "ui", +- "DeprecationMessage": "use SemanticTokenTypes[\"number\"] = false instead. See\ngolang/vscode-go#3632.\n" +- }, +- { +- "Name": "semanticTokenTypes", +- "Type": "map[string]bool", +- "Doc": "semanticTokenTypes configures the semantic token types. It allows\ndisabling types by setting each value to false.\nBy default, all types are enabled.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "{}", +- "Status": "experimental", +- "Hierarchy": "ui", +- "DeprecationMessage": "" +- }, +- { +- "Name": "semanticTokenModifiers", +- "Type": "map[string]bool", +- "Doc": "semanticTokenModifiers configures the semantic token modifiers. It allows\ndisabling modifiers by setting each value to false.\nBy default, all modifiers are enabled.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "{}", +- "Status": "experimental", +- "Hierarchy": "ui", +- "DeprecationMessage": "" +- }, +- { +- "Name": "newGoFileHeader", +- "Type": "bool", +- "Doc": "newGoFileHeader enables automatic insertion of the copyright comment\nand package declaration in a newly created Go file.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "true", +- "Status": "", +- "Hierarchy": "ui", +- "DeprecationMessage": "" +- }, +- { +- "Name": "packageMove", +- "Type": "bool", +- "Doc": "packageMove enables PrepareRename to send the full package path\nand allows users to move a package via renaming.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "false", +- "Status": "experimental", +- "Hierarchy": "ui", +- "DeprecationMessage": "" +- }, +- { +- "Name": "local", +- "Type": "string", +- "Doc": "local is the equivalent of the `goimports -local` flag, which puts\nimports beginning with this string after third-party packages. It should\nbe the prefix of the import path whose imports should be grouped\nseparately.\n\nIt is used when tidying imports (during an LSP Organize\nImports request) or when inserting new ones (for example,\nduring completion); an LSP Formatting request merely sorts the\nexisting imports.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "\"\"", +- "Status": "", +- "Hierarchy": "formatting", +- "DeprecationMessage": "" +- }, +- { +- "Name": "gofumpt", +- "Type": "bool", +- "Doc": "gofumpt indicates if we should run gofumpt formatting.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "false", +- "Status": "", +- "Hierarchy": "formatting", +- "DeprecationMessage": "" +- }, +- { +- "Name": "verboseOutput", +- "Type": "bool", +- "Doc": "verboseOutput enables additional debug logging.\n", +- "EnumKeys": { +- "ValueType": "", +- "Keys": null +- }, +- "EnumValues": null, +- "Default": "false", +- "Status": "debug", +- "Hierarchy": "", +- "DeprecationMessage": "" +- } +- ] +- }, +- "Lenses": [ +- { +- "FileType": "Go", +- "Lens": "generate", +- "Title": "Run `go generate`", +- "Doc": "\nThis codelens source annotates any `//go:generate` comments\nwith commands to run `go generate` in this directory, on\nall directories recursively beneath this one.\n\nSee [Generating code](https://go.dev/blog/generate) for\nmore details.\n", +- "Default": true, +- "Status": "" +- }, +- { +- "FileType": "Go", +- "Lens": "regenerate_cgo", +- "Title": "Re-generate cgo declarations", +- "Doc": "\nThis codelens source annotates an `import \"C\"` declaration\nwith a command to re-run the [cgo\ncommand](https://pkg.go.dev/cmd/cgo) to regenerate the\ncorresponding Go declarations.\n\nUse this after editing the C code in comments attached to\nthe import, or in C header files included by it.\n", +- "Default": true, +- "Status": "" +- }, +- { +- "FileType": "Go", +- "Lens": "test", +- "Title": "Run tests and benchmarks", +- "Doc": "\nThis codelens source annotates each `Test` and `Benchmark`\nfunction in a `*_test.go` file with a command to run it.\n\nThis source is off by default because VS Code has\na client-side custom UI for testing, and because progress\nnotifications are not a great UX for streamed test output.\nSee:\n- golang/go#67400 for a discussion of this feature.\n- https://github.com/joaotavora/eglot/discussions/1402\n for an alternative approach.\n", +- "Default": false, +- "Status": "" +- }, +- { +- "FileType": "go.mod", +- "Lens": "run_govulncheck", +- "Title": "Run govulncheck (legacy)", +- "Doc": "\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run Govulncheck asynchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", +- "Default": true, +- "Status": "" +- }, +- { +- "FileType": "go.mod", +- "Lens": "tidy", +- "Title": "Tidy go.mod file", +- "Doc": "\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\ntidy`](https://go.dev/ref/mod#go-mod-tidy), which ensures\nthat the go.mod file matches the source code in the module.\n", +- "Default": true, +- "Status": "" +- }, +- { +- "FileType": "go.mod", +- "Lens": "upgrade_dependency", +- "Title": "Update dependencies", +- "Doc": "\nThis codelens source annotates the `module` directive in a\ngo.mod file with commands to:\n\n- check for available upgrades,\n- upgrade direct dependencies, and\n- upgrade all dependencies transitively.\n", +- "Default": true, +- "Status": "" +- }, +- { +- "FileType": "go.mod", +- "Lens": "vendor", +- "Title": "Update vendor directory", +- "Doc": "\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\nvendor`](https://go.dev/ref/mod#go-mod-vendor), which\ncreates or updates the directory named `vendor` in the\nmodule root so that it contains an up-to-date copy of all\nnecessary package dependencies.\n", +- "Default": true, +- "Status": "" +- }, +- { +- "FileType": "go.mod", +- "Lens": "vulncheck", +- "Title": "Run govulncheck", +- "Doc": "\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run govulncheck synchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", +- "Default": false, +- "Status": "experimental" +- } +- ], +- "Analyzers": [ +- { +- "Name": "QF1001", +- "Doc": "Apply De Morgan's law\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#QF1001", +- "Default": false +- }, +- { +- "Name": "QF1002", +- "Doc": "Convert untagged switch to tagged switch\n\nAn untagged switch that compares a single variable against a series of\nvalues can be replaced with a tagged switch.\n\nBefore:\n\n switch {\n case x == 1 || x == 2, x == 3:\n ...\n case x == 4:\n ...\n default:\n ...\n }\n\nAfter:\n\n switch x {\n case 1, 2, 3:\n ...\n case 4:\n ...\n default:\n ...\n }\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#QF1002", +- "Default": true +- }, +- { +- "Name": "QF1003", +- "Doc": "Convert if/else-if chain to tagged switch\n\nA series of if/else-if checks comparing the same variable against\nvalues can be replaced with a tagged switch.\n\nBefore:\n\n if x == 1 || x == 2 {\n ...\n } else if x == 3 {\n ...\n } else {\n ...\n }\n\nAfter:\n\n switch x {\n case 1, 2:\n ...\n case 3:\n ...\n default:\n ...\n }\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#QF1003", +- "Default": true +- }, +- { +- "Name": "QF1004", +- "Doc": "Use strings.ReplaceAll instead of strings.Replace with n == -1\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#QF1004", +- "Default": true +- }, +- { +- "Name": "QF1005", +- "Doc": "Expand call to math.Pow\n\nSome uses of math.Pow can be simplified to basic multiplication.\n\nBefore:\n\n math.Pow(x, 2)\n\nAfter:\n\n x * x\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#QF1005", +- "Default": false +- }, +- { +- "Name": "QF1006", +- "Doc": "Lift if+break into loop condition\n\nBefore:\n\n for {\n if done {\n break\n }\n ...\n }\n\nAfter:\n\n for !done {\n ...\n }\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#QF1006", +- "Default": false +- }, +- { +- "Name": "QF1007", +- "Doc": "Merge conditional assignment into variable declaration\n\nBefore:\n\n x := false\n if someCondition {\n x = true\n }\n\nAfter:\n\n x := someCondition\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#QF1007", +- "Default": false +- }, +- { +- "Name": "QF1008", +- "Doc": "Omit embedded fields from selector expression\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#QF1008", +- "Default": false +- }, +- { +- "Name": "QF1009", +- "Doc": "Use time.Time.Equal instead of == operator\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#QF1009", +- "Default": true +- }, +- { +- "Name": "QF1010", +- "Doc": "Convert slice of bytes to string when printing it\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#QF1010", +- "Default": true +- }, +- { +- "Name": "QF1011", +- "Doc": "Omit redundant type from variable declaration\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#", +- "Default": false +- }, +- { +- "Name": "QF1012", +- "Doc": "Use fmt.Fprintf(x, ...) instead of x.Write(fmt.Sprintf(...))\n\nAvailable since\n 2022.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#QF1012", +- "Default": true +- }, +- { +- "Name": "S1000", +- "Doc": "Use plain channel send or receive instead of single-case select\n\nSelect statements with a single case can be replaced with a simple\nsend or receive.\n\nBefore:\n\n select {\n case x := \u003c-ch:\n fmt.Println(x)\n }\n\nAfter:\n\n x := \u003c-ch\n fmt.Println(x)\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1000", +- "Default": true +- }, +- { +- "Name": "S1001", +- "Doc": "Replace for loop with call to copy\n\nUse copy() for copying elements from one slice to another. For\narrays of identical size, you can use simple assignment.\n\nBefore:\n\n for i, x := range src {\n dst[i] = x\n }\n\nAfter:\n\n copy(dst, src)\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1001", +- "Default": true +- }, +- { +- "Name": "S1002", +- "Doc": "Omit comparison with boolean constant\n\nBefore:\n\n if x == true {}\n\nAfter:\n\n if x {}\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1002", +- "Default": false +- }, +- { +- "Name": "S1003", +- "Doc": "Replace call to strings.Index with strings.Contains\n\nBefore:\n\n if strings.Index(x, y) != -1 {}\n\nAfter:\n\n if strings.Contains(x, y) {}\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1003", +- "Default": true +- }, +- { +- "Name": "S1004", +- "Doc": "Replace call to bytes.Compare with bytes.Equal\n\nBefore:\n\n if bytes.Compare(x, y) == 0 {}\n\nAfter:\n\n if bytes.Equal(x, y) {}\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1004", +- "Default": true +- }, +- { +- "Name": "S1005", +- "Doc": "Drop unnecessary use of the blank identifier\n\nIn many cases, assigning to the blank identifier is unnecessary.\n\nBefore:\n\n for _ = range s {}\n x, _ = someMap[key]\n _ = \u003c-ch\n\nAfter:\n\n for range s{}\n x = someMap[key]\n \u003c-ch\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1005", +- "Default": false +- }, +- { +- "Name": "S1006", +- "Doc": "Use 'for { ... }' for infinite loops\n\nFor infinite loops, using for { ... } is the most idiomatic choice.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1006", +- "Default": false +- }, +- { +- "Name": "S1007", +- "Doc": "Simplify regular expression by using raw string literal\n\nRaw string literals use backticks instead of quotation marks and do not support\nany escape sequences. This means that the backslash can be used\nfreely, without the need of escaping.\n\nSince regular expressions have their own escape sequences, raw strings\ncan improve their readability.\n\nBefore:\n\n regexp.Compile(\"\\\\A(\\\\w+) profile: total \\\\d+\\\\n\\\\z\")\n\nAfter:\n\n regexp.Compile(`\\A(\\w+) profile: total \\d+\\n\\z`)\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1007", +- "Default": true +- }, +- { +- "Name": "S1008", +- "Doc": "Simplify returning boolean expression\n\nBefore:\n\n if \u003cexpr\u003e {\n return true\n }\n return false\n\nAfter:\n\n return \u003cexpr\u003e\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1008", +- "Default": false +- }, +- { +- "Name": "S1009", +- "Doc": "Omit redundant nil check on slices, maps, and channels\n\nThe len function is defined for all slices, maps, and\nchannels, even nil ones, which have a length of zero. It is not necessary to\ncheck for nil before checking that their length is not zero.\n\nBefore:\n\n if x != nil \u0026\u0026 len(x) != 0 {}\n\nAfter:\n\n if len(x) != 0 {}\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1009", +- "Default": true +- }, +- { +- "Name": "S1010", +- "Doc": "Omit default slice index\n\nWhen slicing, the second index defaults to the length of the value,\nmaking s[n:len(s)] and s[n:] equivalent.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1010", +- "Default": true +- }, +- { +- "Name": "S1011", +- "Doc": "Use a single append to concatenate two slices\n\nBefore:\n\n for _, e := range y {\n x = append(x, e)\n }\n \n for i := range y {\n x = append(x, y[i])\n }\n \n for i := range y {\n v := y[i]\n x = append(x, v)\n }\n\nAfter:\n\n x = append(x, y...)\n x = append(x, y...)\n x = append(x, y...)\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1011", +- "Default": false +- }, +- { +- "Name": "S1012", +- "Doc": "Replace time.Now().Sub(x) with time.Since(x)\n\nThe time.Since helper has the same effect as using time.Now().Sub(x)\nbut is easier to read.\n\nBefore:\n\n time.Now().Sub(x)\n\nAfter:\n\n time.Since(x)\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1012", +- "Default": true +- }, +- { +- "Name": "S1016", +- "Doc": "Use a type conversion instead of manually copying struct fields\n\nTwo struct types with identical fields can be converted between each\nother. In older versions of Go, the fields had to have identical\nstruct tags. Since Go 1.8, however, struct tags are ignored during\nconversions. It is thus not necessary to manually copy every field\nindividually.\n\nBefore:\n\n var x T1\n y := T2{\n Field1: x.Field1,\n Field2: x.Field2,\n }\n\nAfter:\n\n var x T1\n y := T2(x)\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1016", +- "Default": false +- }, +- { +- "Name": "S1017", +- "Doc": "Replace manual trimming with strings.TrimPrefix\n\nInstead of using strings.HasPrefix and manual slicing, use the\nstrings.TrimPrefix function. If the string doesn't start with the\nprefix, the original string will be returned. Using strings.TrimPrefix\nreduces complexity, and avoids common bugs, such as off-by-one\nmistakes.\n\nBefore:\n\n if strings.HasPrefix(str, prefix) {\n str = str[len(prefix):]\n }\n\nAfter:\n\n str = strings.TrimPrefix(str, prefix)\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1017", +- "Default": true +- }, +- { +- "Name": "S1018", +- "Doc": "Use 'copy' for sliding elements\n\ncopy() permits using the same source and destination slice, even with\noverlapping ranges. This makes it ideal for sliding elements in a\nslice.\n\nBefore:\n\n for i := 0; i \u003c n; i++ {\n bs[i] = bs[offset+i]\n }\n\nAfter:\n\n copy(bs[:n], bs[offset:])\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1018", +- "Default": true +- }, +- { +- "Name": "S1019", +- "Doc": "Simplify 'make' call by omitting redundant arguments\n\nThe 'make' function has default values for the length and capacity\narguments. For channels, the length defaults to zero, and for slices,\nthe capacity defaults to the length.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1019", +- "Default": true +- }, +- { +- "Name": "S1020", +- "Doc": "Omit redundant nil check in type assertion\n\nBefore:\n\n if _, ok := i.(T); ok \u0026\u0026 i != nil {}\n\nAfter:\n\n if _, ok := i.(T); ok {}\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1020", +- "Default": true +- }, +- { +- "Name": "S1021", +- "Doc": "Merge variable declaration and assignment\n\nBefore:\n\n var x uint\n x = 1\n\nAfter:\n\n var x uint = 1\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1021", +- "Default": false +- }, +- { +- "Name": "S1023", +- "Doc": "Omit redundant control flow\n\nFunctions that have no return value do not need a return statement as\nthe final statement of the function.\n\nSwitches in Go do not have automatic fallthrough, unlike languages\nlike C. It is not necessary to have a break statement as the final\nstatement in a case block.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1023", +- "Default": true +- }, +- { +- "Name": "S1024", +- "Doc": "Replace x.Sub(time.Now()) with time.Until(x)\n\nThe time.Until helper has the same effect as using x.Sub(time.Now())\nbut is easier to read.\n\nBefore:\n\n x.Sub(time.Now())\n\nAfter:\n\n time.Until(x)\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1024", +- "Default": true +- }, +- { +- "Name": "S1025", +- "Doc": "Don't use fmt.Sprintf(\"%s\", x) unnecessarily\n\nIn many instances, there are easier and more efficient ways of getting\na value's string representation. Whenever a value's underlying type is\na string already, or the type has a String method, they should be used\ndirectly.\n\nGiven the following shared definitions\n\n type T1 string\n type T2 int\n\n func (T2) String() string { return \"Hello, world\" }\n\n var x string\n var y T1\n var z T2\n\nwe can simplify\n\n fmt.Sprintf(\"%s\", x)\n fmt.Sprintf(\"%s\", y)\n fmt.Sprintf(\"%s\", z)\n\nto\n\n x\n string(y)\n z.String()\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1025", +- "Default": false +- }, +- { +- "Name": "S1028", +- "Doc": "Simplify error construction with fmt.Errorf\n\nBefore:\n\n errors.New(fmt.Sprintf(...))\n\nAfter:\n\n fmt.Errorf(...)\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1028", +- "Default": true +- }, +- { +- "Name": "S1029", +- "Doc": "Range over the string directly\n\nRanging over a string will yield byte offsets and runes. If the offset\nisn't used, this is functionally equivalent to converting the string\nto a slice of runes and ranging over that. Ranging directly over the\nstring will be more performant, however, as it avoids allocating a new\nslice, the size of which depends on the length of the string.\n\nBefore:\n\n for _, r := range []rune(s) {}\n\nAfter:\n\n for _, r := range s {}\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1029", +- "Default": false +- }, +- { +- "Name": "S1030", +- "Doc": "Use bytes.Buffer.String or bytes.Buffer.Bytes\n\nbytes.Buffer has both a String and a Bytes method. It is almost never\nnecessary to use string(buf.Bytes()) or []byte(buf.String()) – simply\nuse the other method.\n\nThe only exception to this are map lookups. Due to a compiler optimization,\nm[string(buf.Bytes())] is more efficient than m[buf.String()].\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1030", +- "Default": true +- }, +- { +- "Name": "S1031", +- "Doc": "Omit redundant nil check around loop\n\nYou can use range on nil slices and maps, the loop will simply never\nexecute. This makes an additional nil check around the loop\nunnecessary.\n\nBefore:\n\n if s != nil {\n for _, x := range s {\n ...\n }\n }\n\nAfter:\n\n for _, x := range s {\n ...\n }\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1031", +- "Default": true +- }, +- { +- "Name": "S1032", +- "Doc": "Use sort.Ints(x), sort.Float64s(x), and sort.Strings(x)\n\nThe sort.Ints, sort.Float64s and sort.Strings functions are easier to\nread than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x))\nand sort.Sort(sort.StringSlice(x)).\n\nBefore:\n\n sort.Sort(sort.StringSlice(x))\n\nAfter:\n\n sort.Strings(x)\n\nAvailable since\n 2019.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1032", +- "Default": true +- }, +- { +- "Name": "S1033", +- "Doc": "Unnecessary guard around call to 'delete'\n\nCalling delete on a nil map is a no-op.\n\nAvailable since\n 2019.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1033", +- "Default": true +- }, +- { +- "Name": "S1034", +- "Doc": "Use result of type assertion to simplify cases\n\nAvailable since\n 2019.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1034", +- "Default": true +- }, +- { +- "Name": "S1035", +- "Doc": "Redundant call to net/http.CanonicalHeaderKey in method call on net/http.Header\n\nThe methods on net/http.Header, namely Add, Del, Get\nand Set, already canonicalize the given header name.\n\nAvailable since\n 2020.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1035", +- "Default": true +- }, +- { +- "Name": "S1036", +- "Doc": "Unnecessary guard around map access\n\nWhen accessing a map key that doesn't exist yet, one receives a zero\nvalue. Often, the zero value is a suitable value, for example when\nusing append or doing integer math.\n\nThe following\n\n if _, ok := m[\"foo\"]; ok {\n m[\"foo\"] = append(m[\"foo\"], \"bar\")\n } else {\n m[\"foo\"] = []string{\"bar\"}\n }\n\ncan be simplified to\n\n m[\"foo\"] = append(m[\"foo\"], \"bar\")\n\nand\n\n if _, ok := m2[\"k\"]; ok {\n m2[\"k\"] += 4\n } else {\n m2[\"k\"] = 4\n }\n\ncan be simplified to\n\n m[\"k\"] += 4\n\nAvailable since\n 2020.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1036", +- "Default": true +- }, +- { +- "Name": "S1037", +- "Doc": "Elaborate way of sleeping\n\nUsing a select statement with a single case receiving\nfrom the result of time.After is a very elaborate way of sleeping that\ncan much simpler be expressed with a simple call to time.Sleep.\n\nAvailable since\n 2020.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1037", +- "Default": true +- }, +- { +- "Name": "S1038", +- "Doc": "Unnecessarily complex way of printing formatted string\n\nInstead of using fmt.Print(fmt.Sprintf(...)), one can use fmt.Printf(...).\n\nAvailable since\n 2020.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1038", +- "Default": true +- }, +- { +- "Name": "S1039", +- "Doc": "Unnecessary use of fmt.Sprint\n\nCalling fmt.Sprint with a single string argument is unnecessary\nand identical to using the string directly.\n\nAvailable since\n 2020.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1039", +- "Default": true +- }, +- { +- "Name": "S1040", +- "Doc": "Type assertion to current type\n\nThe type assertion x.(SomeInterface), when x already has type\nSomeInterface, can only fail if x is nil. Usually, this is\nleft-over code from when x had a different type and you can safely\ndelete the type assertion. If you want to check that x is not nil,\nconsider being explicit and using an actual if x == nil comparison\ninstead of relying on the type assertion panicking.\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#S1040", +- "Default": true +- }, +- { +- "Name": "SA1000", +- "Doc": "Invalid regular expression\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1000", +- "Default": false +- }, +- { +- "Name": "SA1001", +- "Doc": "Invalid template\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1001", +- "Default": true +- }, +- { +- "Name": "SA1002", +- "Doc": "Invalid format in time.Parse\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1002", +- "Default": false +- }, +- { +- "Name": "SA1003", +- "Doc": "Unsupported argument to functions in encoding/binary\n\nThe encoding/binary package can only serialize types with known sizes.\nThis precludes the use of the int and uint types, as their sizes\ndiffer on different architectures. Furthermore, it doesn't support\nserializing maps, channels, strings, or functions.\n\nBefore Go 1.8, bool wasn't supported, either.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1003", +- "Default": false +- }, +- { +- "Name": "SA1004", +- "Doc": "Suspiciously small untyped constant in time.Sleep\n\nThe time.Sleep function takes a time.Duration as its only argument.\nDurations are expressed in nanoseconds. Thus, calling time.Sleep(1)\nwill sleep for 1 nanosecond. This is a common source of bugs, as sleep\nfunctions in other languages often accept seconds or milliseconds.\n\nThe time package provides constants such as time.Second to express\nlarge durations. These can be combined with arithmetic to express\narbitrary durations, for example 5 * time.Second for 5 seconds.\n\nIf you truly meant to sleep for a tiny amount of time, use\nn * time.Nanosecond to signal to Staticcheck that you did mean to sleep\nfor some amount of nanoseconds.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1004", +- "Default": true +- }, +- { +- "Name": "SA1005", +- "Doc": "Invalid first argument to exec.Command\n\nos/exec runs programs directly (using variants of the fork and exec\nsystem calls on Unix systems). This shouldn't be confused with running\na command in a shell. The shell will allow for features such as input\nredirection, pipes, and general scripting. The shell is also\nresponsible for splitting the user's input into a program name and its\narguments. For example, the equivalent to\n\n ls / /tmp\n\nwould be\n\n exec.Command(\"ls\", \"/\", \"/tmp\")\n\nIf you want to run a command in a shell, consider using something like\nthe following – but be aware that not all systems, particularly\nWindows, will have a /bin/sh program:\n\n exec.Command(\"/bin/sh\", \"-c\", \"ls | grep Awesome\")\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1005", +- "Default": true +- }, +- { +- "Name": "SA1007", +- "Doc": "Invalid URL in net/url.Parse\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1007", +- "Default": false +- }, +- { +- "Name": "SA1008", +- "Doc": "Non-canonical key in http.Header map\n\nKeys in http.Header maps are canonical, meaning they follow a specific\ncombination of uppercase and lowercase letters. Methods such as\nhttp.Header.Add and http.Header.Del convert inputs into this canonical\nform before manipulating the map.\n\nWhen manipulating http.Header maps directly, as opposed to using the\nprovided methods, care should be taken to stick to canonical form in\norder to avoid inconsistencies. The following piece of code\ndemonstrates one such inconsistency:\n\n h := http.Header{}\n h[\"etag\"] = []string{\"1234\"}\n h.Add(\"etag\", \"5678\")\n fmt.Println(h)\n\n // Output:\n // map[Etag:[5678] etag:[1234]]\n\nThe easiest way of obtaining the canonical form of a key is to use\nhttp.CanonicalHeaderKey.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1008", +- "Default": true +- }, +- { +- "Name": "SA1010", +- "Doc": "(*regexp.Regexp).FindAll called with n == 0, which will always return zero results\n\nIf n \u003e= 0, the function returns at most n matches/submatches. To\nreturn all results, specify a negative number.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1010", +- "Default": false +- }, +- { +- "Name": "SA1011", +- "Doc": "Various methods in the 'strings' package expect valid UTF-8, but invalid input is provided\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1011", +- "Default": false +- }, +- { +- "Name": "SA1012", +- "Doc": "A nil context.Context is being passed to a function, consider using context.TODO instead\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1012", +- "Default": true +- }, +- { +- "Name": "SA1013", +- "Doc": "io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1013", +- "Default": true +- }, +- { +- "Name": "SA1014", +- "Doc": "Non-pointer value passed to Unmarshal or Decode\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1014", +- "Default": false +- }, +- { +- "Name": "SA1015", +- "Doc": "Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions\n\nBefore Go 1.23, time.Tickers had to be closed to be able to be garbage\ncollected. Since time.Tick doesn't make it possible to close the underlying\nticker, using it repeatedly would leak memory.\n\nGo 1.23 fixes this by allowing tickers to be collected even if they weren't closed.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1015", +- "Default": false +- }, +- { +- "Name": "SA1016", +- "Doc": "Trapping a signal that cannot be trapped\n\nNot all signals can be intercepted by a process. Specifically, on\nUNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are\nnever passed to the process, but instead handled directly by the\nkernel. It is therefore pointless to try and handle these signals.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1016", +- "Default": true +- }, +- { +- "Name": "SA1017", +- "Doc": "Channels used with os/signal.Notify should be buffered\n\nThe os/signal package uses non-blocking channel sends when delivering\nsignals. If the receiving end of the channel isn't ready and the\nchannel is either unbuffered or full, the signal will be dropped. To\navoid missing signals, the channel should be buffered and of the\nappropriate size. For a channel used for notification of just one\nsignal value, a buffer of size 1 is sufficient.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1017", +- "Default": false +- }, +- { +- "Name": "SA1018", +- "Doc": "strings.Replace called with n == 0, which does nothing\n\nWith n == 0, zero instances will be replaced. To replace all\ninstances, use a negative number, or use strings.ReplaceAll.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1018", +- "Default": false +- }, +- { +- "Name": "SA1020", +- "Doc": "Using an invalid host:port pair with a net.Listen-related function\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1020", +- "Default": false +- }, +- { +- "Name": "SA1021", +- "Doc": "Using bytes.Equal to compare two net.IP\n\nA net.IP stores an IPv4 or IPv6 address as a slice of bytes. The\nlength of the slice for an IPv4 address, however, can be either 4 or\n16 bytes long, using different ways of representing IPv4 addresses. In\norder to correctly compare two net.IPs, the net.IP.Equal method should\nbe used, as it takes both representations into account.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1021", +- "Default": false +- }, +- { +- "Name": "SA1023", +- "Doc": "Modifying the buffer in an io.Writer implementation\n\nWrite must not modify the slice data, even temporarily.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1023", +- "Default": false +- }, +- { +- "Name": "SA1024", +- "Doc": "A string cutset contains duplicate characters\n\nThe strings.TrimLeft and strings.TrimRight functions take cutsets, not\nprefixes. A cutset is treated as a set of characters to remove from a\nstring. For example,\n\n strings.TrimLeft(\"42133word\", \"1234\")\n\nwill result in the string \"word\" – any characters that are 1, 2, 3 or\n4 are cut from the left of the string.\n\nIn order to remove one string from another, use strings.TrimPrefix instead.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1024", +- "Default": false +- }, +- { +- "Name": "SA1025", +- "Doc": "It is not possible to use (*time.Timer).Reset's return value correctly\n\nAvailable since\n 2019.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1025", +- "Default": false +- }, +- { +- "Name": "SA1026", +- "Doc": "Cannot marshal channels or functions\n\nAvailable since\n 2019.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1026", +- "Default": false +- }, +- { +- "Name": "SA1027", +- "Doc": "Atomic access to 64-bit variable must be 64-bit aligned\n\nOn ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to\narrange for 64-bit alignment of 64-bit words accessed atomically. The\nfirst word in a variable or in an allocated struct, array, or slice\ncan be relied upon to be 64-bit aligned.\n\nYou can use the structlayout tool to inspect the alignment of fields\nin a struct.\n\nAvailable since\n 2019.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1027", +- "Default": false +- }, +- { +- "Name": "SA1028", +- "Doc": "sort.Slice can only be used on slices\n\nThe first argument of sort.Slice must be a slice.\n\nAvailable since\n 2020.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1028", +- "Default": false +- }, +- { +- "Name": "SA1029", +- "Doc": "Inappropriate key in call to context.WithValue\n\nThe provided key must be comparable and should not be\nof type string or any other built-in type to avoid collisions between\npackages using context. Users of WithValue should define their own\ntypes for keys.\n\nTo avoid allocating when assigning to an interface{},\ncontext keys often have concrete type struct{}. Alternatively,\nexported context key variables' static type should be a pointer or\ninterface.\n\nAvailable since\n 2020.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1029", +- "Default": false +- }, +- { +- "Name": "SA1030", +- "Doc": "Invalid argument in call to a strconv function\n\nThis check validates the format, number base and bit size arguments of\nthe various parsing and formatting functions in strconv.\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1030", +- "Default": false +- }, +- { +- "Name": "SA1031", +- "Doc": "Overlapping byte slices passed to an encoder\n\nIn an encoding function of the form Encode(dst, src), dst and\nsrc were found to reference the same memory. This can result in\nsrc bytes being overwritten before they are read, when the encoder\nwrites more than one byte per src byte.\n\nAvailable since\n 2024.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1031", +- "Default": false +- }, +- { +- "Name": "SA1032", +- "Doc": "Wrong order of arguments to errors.Is\n\nThe first argument of the function errors.Is is the error\nthat we have and the second argument is the error we're trying to match against.\nFor example:\n\n\tif errors.Is(err, io.EOF) { ... }\n\nThis check detects some cases where the two arguments have been swapped. It\nflags any calls where the first argument is referring to a package-level error\nvariable, such as\n\n\tif errors.Is(io.EOF, err) { /* this is wrong */ }\n\nAvailable since\n 2024.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA1032", +- "Default": false +- }, +- { +- "Name": "SA2001", +- "Doc": "Empty critical section, did you mean to defer the unlock?\n\nEmpty critical sections of the kind\n\n mu.Lock()\n mu.Unlock()\n\nare very often a typo, and the following was intended instead:\n\n mu.Lock()\n defer mu.Unlock()\n\nDo note that sometimes empty critical sections can be useful, as a\nform of signaling to wait on another goroutine. Many times, there are\nsimpler ways of achieving the same effect. When that isn't the case,\nthe code should be amply commented to avoid confusion. Combining such\ncomments with a //lint:ignore directive can be used to suppress this\nrare false positive.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA2001", +- "Default": true +- }, +- { +- "Name": "SA2002", +- "Doc": "Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA2002", +- "Default": false +- }, +- { +- "Name": "SA2003", +- "Doc": "Deferred Lock right after locking, likely meant to defer Unlock instead\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA2003", +- "Default": false +- }, +- { +- "Name": "SA3000", +- "Doc": "TestMain doesn't call os.Exit, hiding test failures\n\nTest executables (and in turn 'go test') exit with a non-zero status\ncode if any tests failed. When specifying your own TestMain function,\nit is your responsibility to arrange for this, by calling os.Exit with\nthe correct code. The correct code is returned by (*testing.M).Run, so\nthe usual way of implementing TestMain is to end it with\nos.Exit(m.Run()).\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA3000", +- "Default": true +- }, +- { +- "Name": "SA3001", +- "Doc": "Assigning to b.N in benchmarks distorts the results\n\nThe testing package dynamically sets b.N to improve the reliability of\nbenchmarks and uses it in computations to determine the duration of a\nsingle operation. Benchmark code must not alter b.N as this would\nfalsify results.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA3001", +- "Default": true +- }, +- { +- "Name": "SA4000", +- "Doc": "Binary operator has identical expressions on both sides\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4000", +- "Default": true +- }, +- { +- "Name": "SA4001", +- "Doc": "\u0026*x gets simplified to x, it does not copy x\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4001", +- "Default": true +- }, +- { +- "Name": "SA4003", +- "Doc": "Comparing unsigned values against negative values is pointless\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4003", +- "Default": true +- }, +- { +- "Name": "SA4004", +- "Doc": "The loop exits unconditionally after one iteration\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4004", +- "Default": true +- }, +- { +- "Name": "SA4005", +- "Doc": "Field assignment that will never be observed. Did you mean to use a pointer receiver?\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4005", +- "Default": false +- }, +- { +- "Name": "SA4006", +- "Doc": "A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code?\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4006", +- "Default": false +- }, +- { +- "Name": "SA4008", +- "Doc": "The variable in the loop condition never changes, are you incrementing the wrong variable?\n\nFor example:\n\n\tfor i := 0; i \u003c 10; j++ { ... }\n\nThis may also occur when a loop can only execute once because of unconditional\ncontrol flow that terminates the loop. For example, when a loop body contains an\nunconditional break, return, or panic:\n\n\tfunc f() {\n\t\tpanic(\"oops\")\n\t}\n\tfunc g() {\n\t\tfor i := 0; i \u003c 10; i++ {\n\t\t\t// f unconditionally calls panic, which means \"i\" is\n\t\t\t// never incremented.\n\t\t\tf()\n\t\t}\n\t}\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4008", +- "Default": false +- }, +- { +- "Name": "SA4009", +- "Doc": "A function argument is overwritten before its first use\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4009", +- "Default": false +- }, +- { +- "Name": "SA4010", +- "Doc": "The result of append will never be observed anywhere\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4010", +- "Default": false +- }, +- { +- "Name": "SA4011", +- "Doc": "Break statement with no effect. Did you mean to break out of an outer loop?\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4011", +- "Default": true +- }, +- { +- "Name": "SA4012", +- "Doc": "Comparing a value against NaN even though no value is equal to NaN\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4012", +- "Default": false +- }, +- { +- "Name": "SA4013", +- "Doc": "Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4013", +- "Default": true +- }, +- { +- "Name": "SA4014", +- "Doc": "An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4014", +- "Default": true +- }, +- { +- "Name": "SA4015", +- "Doc": "Calling functions like math.Ceil on floats converted from integers doesn't do anything useful\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4015", +- "Default": false +- }, +- { +- "Name": "SA4016", +- "Doc": "Certain bitwise operations, such as x ^ 0, do not do anything useful\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4016", +- "Default": true +- }, +- { +- "Name": "SA4017", +- "Doc": "Discarding the return values of a function without side effects, making the call pointless\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4017", +- "Default": false +- }, +- { +- "Name": "SA4018", +- "Doc": "Self-assignment of variables\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4018", +- "Default": false +- }, +- { +- "Name": "SA4019", +- "Doc": "Multiple, identical build constraints in the same file\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4019", +- "Default": true +- }, +- { +- "Name": "SA4020", +- "Doc": "Unreachable case clause in a type switch\n\nIn a type switch like the following\n\n type T struct{}\n func (T) Read(b []byte) (int, error) { return 0, nil }\n\n var v any = T{}\n\n switch v.(type) {\n case io.Reader:\n // ...\n case T:\n // unreachable\n }\n\nthe second case clause can never be reached because T implements\nio.Reader and case clauses are evaluated in source order.\n\nAnother example:\n\n type T struct{}\n func (T) Read(b []byte) (int, error) { return 0, nil }\n func (T) Close() error { return nil }\n\n var v any = T{}\n\n switch v.(type) {\n case io.Reader:\n // ...\n case io.ReadCloser:\n // unreachable\n }\n\nEven though T has a Close method and thus implements io.ReadCloser,\nio.Reader will always match first. The method set of io.Reader is a\nsubset of io.ReadCloser. Thus it is impossible to match the second\ncase without matching the first case.\n\n\nStructurally equivalent interfaces\n\nA special case of the previous example are structurally identical\ninterfaces. Given these declarations\n\n type T error\n type V error\n\n func doSomething() error {\n err, ok := doAnotherThing()\n if ok {\n return T(err)\n }\n\n return U(err)\n }\n\nthe following type switch will have an unreachable case clause:\n\n switch doSomething().(type) {\n case T:\n // ...\n case V:\n // unreachable\n }\n\nT will always match before V because they are structurally equivalent\nand therefore doSomething()'s return value implements both.\n\nAvailable since\n 2019.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4020", +- "Default": true +- }, +- { +- "Name": "SA4022", +- "Doc": "Comparing the address of a variable against nil\n\nCode such as 'if \u0026x == nil' is meaningless, because taking the address of a variable always yields a non-nil pointer.\n\nAvailable since\n 2020.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4022", +- "Default": true +- }, +- { +- "Name": "SA4023", +- "Doc": "Impossible comparison of interface value with untyped nil\n\nUnder the covers, interfaces are implemented as two elements, a\ntype T and a value V. V is a concrete value such as an int,\nstruct or pointer, never an interface itself, and has type T. For\ninstance, if we store the int value 3 in an interface, the\nresulting interface value has, schematically, (T=int, V=3). The\nvalue V is also known as the interface's dynamic value, since a\ngiven interface variable might hold different values V (and\ncorresponding types T) during the execution of the program.\n\nAn interface value is nil only if the V and T are both\nunset, (T=nil, V is not set), In particular, a nil interface will\nalways hold a nil type. If we store a nil pointer of type *int\ninside an interface value, the inner type will be *int regardless\nof the value of the pointer: (T=*int, V=nil). Such an interface\nvalue will therefore be non-nil even when the pointer value V\ninside is nil.\n\nThis situation can be confusing, and arises when a nil value is\nstored inside an interface value such as an error return:\n\n func returnsError() error {\n var p *MyError = nil\n if bad() {\n p = ErrBad\n }\n return p // Will always return a non-nil error.\n }\n\nIf all goes well, the function returns a nil p, so the return\nvalue is an error interface value holding (T=*MyError, V=nil).\nThis means that if the caller compares the returned error to nil,\nit will always look as if there was an error even if nothing bad\nhappened. To return a proper nil error to the caller, the\nfunction must return an explicit nil:\n\n func returnsError() error {\n if bad() {\n return ErrBad\n }\n return nil\n }\n\nIt's a good idea for functions that return errors always to use\nthe error type in their signature (as we did above) rather than a\nconcrete type such as *MyError, to help guarantee the error is\ncreated correctly. As an example, os.Open returns an error even\nthough, if not nil, it's always of concrete type *os.PathError.\n\nSimilar situations to those described here can arise whenever\ninterfaces are used. Just keep in mind that if any concrete value\nhas been stored in the interface, the interface will not be nil.\nFor more information, see The Laws of\nReflection at https://golang.org/doc/articles/laws_of_reflection.html.\n\nThis text has been copied from\nhttps://golang.org/doc/faq#nil_error, licensed under the Creative\nCommons Attribution 3.0 License.\n\nAvailable since\n 2020.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4023", +- "Default": false +- }, +- { +- "Name": "SA4024", +- "Doc": "Checking for impossible return value from a builtin function\n\nReturn values of the len and cap builtins cannot be negative.\n\nSee https://golang.org/pkg/builtin/#len and https://golang.org/pkg/builtin/#cap.\n\nExample:\n\n if len(slice) \u003c 0 {\n fmt.Println(\"unreachable code\")\n }\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4024", +- "Default": true +- }, +- { +- "Name": "SA4025", +- "Doc": "Integer division of literals that results in zero\n\nWhen dividing two integer constants, the result will\nalso be an integer. Thus, a division such as 2 / 3 results in 0.\nThis is true for all of the following examples:\n\n\t_ = 2 / 3\n\tconst _ = 2 / 3\n\tconst _ float64 = 2 / 3\n\t_ = float64(2 / 3)\n\nStaticcheck will flag such divisions if both sides of the division are\ninteger literals, as it is highly unlikely that the division was\nintended to truncate to zero. Staticcheck will not flag integer\ndivision involving named constants, to avoid noisy positives.\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4025", +- "Default": true +- }, +- { +- "Name": "SA4026", +- "Doc": "Go constants cannot express negative zero\n\nIn IEEE 754 floating point math, zero has a sign and can be positive\nor negative. This can be useful in certain numerical code.\n\nGo constants, however, cannot express negative zero. This means that\nthe literals -0.0 and 0.0 have the same ideal value (zero) and\nwill both represent positive zero at runtime.\n\nTo explicitly and reliably create a negative zero, you can use the\nmath.Copysign function: math.Copysign(0, -1).\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4026", +- "Default": true +- }, +- { +- "Name": "SA4027", +- "Doc": "(*net/url.URL).Query returns a copy, modifying it doesn't change the URL\n\n(*net/url.URL).Query parses the current value of net/url.URL.RawQuery\nand returns it as a map of type net/url.Values. Subsequent changes to\nthis map will not affect the URL unless the map gets encoded and\nassigned to the URL's RawQuery.\n\nAs a consequence, the following code pattern is an expensive no-op:\nu.Query().Add(key, value).\n\nAvailable since\n 2021.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4027", +- "Default": true +- }, +- { +- "Name": "SA4028", +- "Doc": "x % 1 is always zero\n\nAvailable since\n 2022.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4028", +- "Default": true +- }, +- { +- "Name": "SA4029", +- "Doc": "Ineffective attempt at sorting slice\n\nsort.Float64Slice, sort.IntSlice, and sort.StringSlice are\ntypes, not functions. Doing x = sort.StringSlice(x) does nothing,\nespecially not sort any values. The correct usage is\nsort.Sort(sort.StringSlice(x)) or sort.StringSlice(x).Sort(),\nbut there are more convenient helpers, namely sort.Float64s,\nsort.Ints, and sort.Strings.\n\nAvailable since\n 2022.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4029", +- "Default": true +- }, +- { +- "Name": "SA4030", +- "Doc": "Ineffective attempt at generating random number\n\nFunctions in the math/rand package that accept upper limits, such\nas Intn, generate random numbers in the half-open interval [0,n). In\nother words, the generated numbers will be \u003e= 0 and \u003c n – they\ndon't include n. rand.Intn(1) therefore doesn't generate 0\nor 1, it always generates 0.\n\nAvailable since\n 2022.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4030", +- "Default": true +- }, +- { +- "Name": "SA4031", +- "Doc": "Checking never-nil value against nil\n\nAvailable since\n 2022.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4031", +- "Default": false +- }, +- { +- "Name": "SA4032", +- "Doc": "Comparing runtime.GOOS or runtime.GOARCH against impossible value\n\nAvailable since\n 2024.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA4032", +- "Default": true +- }, +- { +- "Name": "SA5000", +- "Doc": "Assignment to nil map\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA5000", +- "Default": false +- }, +- { +- "Name": "SA5001", +- "Doc": "Deferring Close before checking for a possible error\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA5001", +- "Default": true +- }, +- { +- "Name": "SA5002", +- "Doc": "The empty for loop ('for {}') spins and can block the scheduler\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA5002", +- "Default": false +- }, +- { +- "Name": "SA5003", +- "Doc": "Defers in infinite loops will never execute\n\nDefers are scoped to the surrounding function, not the surrounding\nblock. In a function that never returns, i.e. one containing an\ninfinite loop, defers will never execute.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA5003", +- "Default": true +- }, +- { +- "Name": "SA5004", +- "Doc": "'for { select { ...' with an empty default branch spins\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA5004", +- "Default": true +- }, +- { +- "Name": "SA5005", +- "Doc": "The finalizer references the finalized object, preventing garbage collection\n\nA finalizer is a function associated with an object that runs when the\ngarbage collector is ready to collect said object, that is when the\nobject is no longer referenced by anything.\n\nIf the finalizer references the object, however, it will always remain\nas the final reference to that object, preventing the garbage\ncollector from collecting the object. The finalizer will never run,\nand the object will never be collected, leading to a memory leak. That\nis why the finalizer should instead use its first argument to operate\non the object. That way, the number of references can temporarily go\nto zero before the object is being passed to the finalizer.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA5005", +- "Default": false +- }, +- { +- "Name": "SA5007", +- "Doc": "Infinite recursive call\n\nA function that calls itself recursively needs to have an exit\ncondition. Otherwise it will recurse forever, until the system runs\nout of memory.\n\nThis issue can be caused by simple bugs such as forgetting to add an\nexit condition. It can also happen \"on purpose\". Some languages have\ntail call optimization which makes certain infinite recursive calls\nsafe to use. Go, however, does not implement TCO, and as such a loop\nshould be used instead.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA5007", +- "Default": false +- }, +- { +- "Name": "SA5008", +- "Doc": "Invalid struct tag\n\nAvailable since\n 2019.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA5008", +- "Default": true +- }, +- { +- "Name": "SA5010", +- "Doc": "Impossible type assertion\n\nSome type assertions can be statically proven to be\nimpossible. This is the case when the method sets of both\narguments of the type assertion conflict with each other, for\nexample by containing the same method with different\nsignatures.\n\nThe Go compiler already applies this check when asserting from an\ninterface value to a concrete type. If the concrete type misses\nmethods from the interface, or if function signatures don't match,\nthen the type assertion can never succeed.\n\nThis check applies the same logic when asserting from one interface to\nanother. If both interface types contain the same method but with\ndifferent signatures, then the type assertion can never succeed,\neither.\n\nAvailable since\n 2020.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA5010", +- "Default": false +- }, +- { +- "Name": "SA5011", +- "Doc": "Possible nil pointer dereference\n\nA pointer is being dereferenced unconditionally, while\nalso being checked against nil in another place. This suggests that\nthe pointer may be nil and dereferencing it may panic. This is\ncommonly a result of improperly ordered code or missing return\nstatements. Consider the following examples:\n\n func fn(x *int) {\n fmt.Println(*x)\n\n // This nil check is equally important for the previous dereference\n if x != nil {\n foo(*x)\n }\n }\n\n func TestFoo(t *testing.T) {\n x := compute()\n if x == nil {\n t.Errorf(\"nil pointer received\")\n }\n\n // t.Errorf does not abort the test, so if x is nil, the next line will panic.\n foo(*x)\n }\n\nStaticcheck tries to deduce which functions abort control flow.\nFor example, it is aware that a function will not continue\nexecution after a call to panic or log.Fatal. However, sometimes\nthis detection fails, in particular in the presence of\nconditionals. Consider the following example:\n\n func Log(msg string, level int) {\n fmt.Println(msg)\n if level == levelFatal {\n os.Exit(1)\n }\n }\n\n func Fatal(msg string) {\n Log(msg, levelFatal)\n }\n\n func fn(x *int) {\n if x == nil {\n Fatal(\"unexpected nil pointer\")\n }\n fmt.Println(*x)\n }\n\nStaticcheck will flag the dereference of x, even though it is perfectly\nsafe. Staticcheck is not able to deduce that a call to\nFatal will exit the program. For the time being, the easiest\nworkaround is to modify the definition of Fatal like so:\n\n func Fatal(msg string) {\n Log(msg, levelFatal)\n panic(\"unreachable\")\n }\n\nWe also hard-code functions from common logging packages such as\nlogrus. Please file an issue if we're missing support for a\npopular package.\n\nAvailable since\n 2020.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA5011", +- "Default": false +- }, +- { +- "Name": "SA5012", +- "Doc": "Passing odd-sized slice to function expecting even size\n\nSome functions that take slices as parameters expect the slices to have an even number of elements. \nOften, these functions treat elements in a slice as pairs. \nFor example, strings.NewReplacer takes pairs of old and new strings, \nand calling it with an odd number of elements would be an error.\n\nAvailable since\n 2020.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA5012", +- "Default": false +- }, +- { +- "Name": "SA6000", +- "Doc": "Using regexp.Match or related in a loop, should use regexp.Compile\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA6000", +- "Default": false +- }, +- { +- "Name": "SA6001", +- "Doc": "Missing an optimization opportunity when indexing maps by byte slices\n\nMap keys must be comparable, which precludes the use of byte slices.\nThis usually leads to using string keys and converting byte slices to\nstrings.\n\nNormally, a conversion of a byte slice to a string needs to copy the data and\ncauses allocations. The compiler, however, recognizes m[string(b)] and\nuses the data of b directly, without copying it, because it knows that\nthe data can't change during the map lookup. This leads to the\ncounter-intuitive situation that\n\n k := string(b)\n println(m[k])\n println(m[k])\n\nwill be less efficient than\n\n println(m[string(b)])\n println(m[string(b)])\n\nbecause the first version needs to copy and allocate, while the second\none does not.\n\nFor some history on this optimization, check out commit\nf5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA6001", +- "Default": false +- }, +- { +- "Name": "SA6002", +- "Doc": "Storing non-pointer values in sync.Pool allocates memory\n\nA sync.Pool is used to avoid unnecessary allocations and reduce the\namount of work the garbage collector has to do.\n\nWhen passing a value that is not a pointer to a function that accepts\nan interface, the value needs to be placed on the heap, which means an\nadditional allocation. Slices are a common thing to put in sync.Pools,\nand they're structs with 3 fields (length, capacity, and a pointer to\nan array). In order to avoid the extra allocation, one should store a\npointer to the slice instead.\n\nSee the comments on https://go-review.googlesource.com/c/go/+/24371\nthat discuss this problem.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA6002", +- "Default": false +- }, +- { +- "Name": "SA6003", +- "Doc": "Converting a string to a slice of runes before ranging over it\n\nYou may want to loop over the runes in a string. Instead of converting\nthe string to a slice of runes and looping over that, you can loop\nover the string itself. That is,\n\n for _, r := range s {}\n\nand\n\n for _, r := range []rune(s) {}\n\nwill yield the same values. The first version, however, will be faster\nand avoid unnecessary memory allocations.\n\nDo note that if you are interested in the indices, ranging over a\nstring and over a slice of runes will yield different indices. The\nfirst one yields byte offsets, while the second one yields indices in\nthe slice of runes.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA6003", +- "Default": false +- }, +- { +- "Name": "SA6005", +- "Doc": "Inefficient string comparison with strings.ToLower or strings.ToUpper\n\nConverting two strings to the same case and comparing them like so\n\n if strings.ToLower(s1) == strings.ToLower(s2) {\n ...\n }\n\nis significantly more expensive than comparing them with\nstrings.EqualFold(s1, s2). This is due to memory usage as well as\ncomputational complexity.\n\nstrings.ToLower will have to allocate memory for the new strings, as\nwell as convert both strings fully, even if they differ on the very\nfirst byte. strings.EqualFold, on the other hand, compares the strings\none character at a time. It doesn't need to create two intermediate\nstrings and can return as soon as the first non-matching character has\nbeen found.\n\nFor a more in-depth explanation of this issue, see\nhttps://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/\n\nAvailable since\n 2019.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA6005", +- "Default": true +- }, +- { +- "Name": "SA6006", +- "Doc": "Using io.WriteString to write []byte\n\nUsing io.WriteString to write a slice of bytes, as in\n\n io.WriteString(w, string(b))\n\nis both unnecessary and inefficient. Converting from []byte to string\nhas to allocate and copy the data, and we could simply use w.Write(b)\ninstead.\n\nAvailable since\n 2024.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA6006", +- "Default": true +- }, +- { +- "Name": "SA9001", +- "Doc": "Defers in range loops may not run when you expect them to\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA9001", +- "Default": false +- }, +- { +- "Name": "SA9002", +- "Doc": "Using a non-octal os.FileMode that looks like it was meant to be in octal.\n\nAvailable since\n 2017.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA9002", +- "Default": true +- }, +- { +- "Name": "SA9003", +- "Doc": "Empty body in an if or else branch\n\nAvailable since\n 2017.1, non-default\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA9003", +- "Default": false +- }, +- { +- "Name": "SA9004", +- "Doc": "Only the first constant has an explicit type\n\nIn a constant declaration such as the following:\n\n const (\n First byte = 1\n Second = 2\n )\n\nthe constant Second does not have the same type as the constant First.\nThis construct shouldn't be confused with\n\n const (\n First byte = iota\n Second\n )\n\nwhere First and Second do indeed have the same type. The type is only\npassed on when no explicit value is assigned to the constant.\n\nWhen declaring enumerations with explicit values it is therefore\nimportant not to write\n\n const (\n EnumFirst EnumType = 1\n EnumSecond = 2\n EnumThird = 3\n )\n\nThis discrepancy in types can cause various confusing behaviors and\nbugs.\n\n\nWrong type in variable declarations\n\nThe most obvious issue with such incorrect enumerations expresses\nitself as a compile error:\n\n package pkg\n\n const (\n EnumFirst uint8 = 1\n EnumSecond = 2\n )\n\n func fn(useFirst bool) {\n x := EnumSecond\n if useFirst {\n x = EnumFirst\n }\n }\n\nfails to compile with\n\n ./const.go:11:5: cannot use EnumFirst (type uint8) as type int in assignment\n\n\nLosing method sets\n\nA more subtle issue occurs with types that have methods and optional\ninterfaces. Consider the following:\n\n package main\n\n import \"fmt\"\n\n type Enum int\n\n func (e Enum) String() string {\n return \"an enum\"\n }\n\n const (\n EnumFirst Enum = 1\n EnumSecond = 2\n )\n\n func main() {\n fmt.Println(EnumFirst)\n fmt.Println(EnumSecond)\n }\n\nThis code will output\n\n an enum\n 2\n\nas EnumSecond has no explicit type, and thus defaults to int.\n\nAvailable since\n 2019.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA9004", +- "Default": true +- }, +- { +- "Name": "SA9005", +- "Doc": "Trying to marshal a struct with no public fields nor custom marshaling\n\nThe encoding/json and encoding/xml packages only operate on exported\nfields in structs, not unexported ones. It is usually an error to try\nto (un)marshal structs that only consist of unexported fields.\n\nThis check will not flag calls involving types that define custom\nmarshaling behavior, e.g. via MarshalJSON methods. It will also not\nflag empty structs.\n\nAvailable since\n 2019.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA9005", +- "Default": false +- }, +- { +- "Name": "SA9006", +- "Doc": "Dubious bit shifting of a fixed size integer value\n\nBit shifting a value past its size will always clear the value.\n\nFor instance:\n\n v := int8(42)\n v \u003e\u003e= 8\n\nwill always result in 0.\n\nThis check flags bit shifting operations on fixed size integer values only.\nThat is, int, uint and uintptr are never flagged to avoid potential false\npositives in somewhat exotic but valid bit twiddling tricks:\n\n // Clear any value above 32 bits if integers are more than 32 bits.\n func f(i int) int {\n v := i \u003e\u003e 32\n v = v \u003c\u003c 32\n return i-v\n }\n\nAvailable since\n 2020.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA9006", +- "Default": true +- }, +- { +- "Name": "SA9007", +- "Doc": "Deleting a directory that shouldn't be deleted\n\nIt is virtually never correct to delete system directories such as\n/tmp or the user's home directory. However, it can be fairly easy to\ndo by mistake, for example by mistakenly using os.TempDir instead\nof ioutil.TempDir, or by forgetting to add a suffix to the result\nof os.UserHomeDir.\n\nWriting\n\n d := os.TempDir()\n defer os.RemoveAll(d)\n\nin your unit tests will have a devastating effect on the stability of your system.\n\nThis check flags attempts at deleting the following directories:\n\n- os.TempDir\n- os.UserCacheDir\n- os.UserConfigDir\n- os.UserHomeDir\n\nAvailable since\n 2022.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA9007", +- "Default": false +- }, +- { +- "Name": "SA9008", +- "Doc": "else branch of a type assertion is probably not reading the right value\n\nWhen declaring variables as part of an if statement (like in 'if\nfoo := ...; foo {'), the same variables will also be in the scope of\nthe else branch. This means that in the following example\n\n if x, ok := x.(int); ok {\n // ...\n } else {\n fmt.Printf(\"unexpected type %T\", x)\n }\n\nx in the else branch will refer to the x from x, ok\n:=; it will not refer to the x that is being type-asserted. The\nresult of a failed type assertion is the zero value of the type that\nis being asserted to, so x in the else branch will always have the\nvalue 0 and the type int.\n\nAvailable since\n 2022.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA9008", +- "Default": false +- }, +- { +- "Name": "SA9009", +- "Doc": "Ineffectual Go compiler directive\n\nA potential Go compiler directive was found, but is ineffectual as it begins\nwith whitespace.\n\nAvailable since\n 2024.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#SA9009", +- "Default": true +- }, +- { +- "Name": "ST1000", +- "Doc": "Incorrect or missing package comment\n\nPackages must have a package comment that is formatted according to\nthe guidelines laid out in\nhttps://go.dev/wiki/CodeReviewComments#package-comments.\n\nAvailable since\n 2019.1, non-default\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1000", +- "Default": false +- }, +- { +- "Name": "ST1001", +- "Doc": "Dot imports are discouraged\n\nDot imports that aren't in external test packages are discouraged.\n\nThe dot_import_whitelist option can be used to whitelist certain\nimports.\n\nQuoting Go Code Review Comments:\n\n\u003e The import . form can be useful in tests that, due to circular\n\u003e dependencies, cannot be made part of the package being tested:\n\u003e \n\u003e package foo_test\n\u003e \n\u003e import (\n\u003e \"bar/testutil\" // also imports \"foo\"\n\u003e . \"foo\"\n\u003e )\n\u003e \n\u003e In this case, the test file cannot be in package foo because it\n\u003e uses bar/testutil, which imports foo. So we use the import .\n\u003e form to let the file pretend to be part of package foo even though\n\u003e it is not. Except for this one case, do not use import . in your\n\u003e programs. It makes the programs much harder to read because it is\n\u003e unclear whether a name like Quux is a top-level identifier in the\n\u003e current package or in an imported package.\n\nAvailable since\n 2019.1\n\nOptions\n dot_import_whitelist\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1001", +- "Default": false +- }, +- { +- "Name": "ST1003", +- "Doc": "Poorly chosen identifier\n\nIdentifiers, such as variable and package names, follow certain rules.\n\nSee the following links for details:\n\n- https://go.dev/doc/effective_go#package-names\n- https://go.dev/doc/effective_go#mixed-caps\n- https://go.dev/wiki/CodeReviewComments#initialisms\n- https://go.dev/wiki/CodeReviewComments#variable-names\n\nAvailable since\n 2019.1, non-default\n\nOptions\n initialisms\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1003", +- "Default": false +- }, +- { +- "Name": "ST1005", +- "Doc": "Incorrectly formatted error string\n\nError strings follow a set of guidelines to ensure uniformity and good\ncomposability.\n\nQuoting Go Code Review Comments:\n\n\u003e Error strings should not be capitalized (unless beginning with\n\u003e proper nouns or acronyms) or end with punctuation, since they are\n\u003e usually printed following other context. That is, use\n\u003e fmt.Errorf(\"something bad\") not fmt.Errorf(\"Something bad\"), so\n\u003e that log.Printf(\"Reading %s: %v\", filename, err) formats without a\n\u003e spurious capital letter mid-message.\n\nAvailable since\n 2019.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1005", +- "Default": false +- }, +- { +- "Name": "ST1006", +- "Doc": "Poorly chosen receiver name\n\nQuoting Go Code Review Comments:\n\n\u003e The name of a method's receiver should be a reflection of its\n\u003e identity; often a one or two letter abbreviation of its type\n\u003e suffices (such as \"c\" or \"cl\" for \"Client\"). Don't use generic\n\u003e names such as \"me\", \"this\" or \"self\", identifiers typical of\n\u003e object-oriented languages that place more emphasis on methods as\n\u003e opposed to functions. The name need not be as descriptive as that\n\u003e of a method argument, as its role is obvious and serves no\n\u003e documentary purpose. It can be very short as it will appear on\n\u003e almost every line of every method of the type; familiarity admits\n\u003e brevity. Be consistent, too: if you call the receiver \"c\" in one\n\u003e method, don't call it \"cl\" in another.\n\nAvailable since\n 2019.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1006", +- "Default": false +- }, +- { +- "Name": "ST1008", +- "Doc": "A function's error value should be its last return value\n\nA function's error value should be its last return value.\n\nAvailable since\n 2019.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1008", +- "Default": false +- }, +- { +- "Name": "ST1011", +- "Doc": "Poorly chosen name for variable of type time.Duration\n\ntime.Duration values represent an amount of time, which is represented\nas a count of nanoseconds. An expression like 5 * time.Microsecond\nyields the value 5000. It is therefore not appropriate to suffix a\nvariable of type time.Duration with any time unit, such as Msec or\nMilli.\n\nAvailable since\n 2019.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1011", +- "Default": false +- }, +- { +- "Name": "ST1012", +- "Doc": "Poorly chosen name for error variable\n\nError variables that are part of an API should be called errFoo or\nErrFoo.\n\nAvailable since\n 2019.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1012", +- "Default": false +- }, +- { +- "Name": "ST1013", +- "Doc": "Should use constants for HTTP error codes, not magic numbers\n\nHTTP has a tremendous number of status codes. While some of those are\nwell known (200, 400, 404, 500), most of them are not. The net/http\npackage provides constants for all status codes that are part of the\nvarious specifications. It is recommended to use these constants\ninstead of hard-coding magic numbers, to vastly improve the\nreadability of your code.\n\nAvailable since\n 2019.1\n\nOptions\n http_status_code_whitelist\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1013", +- "Default": false +- }, +- { +- "Name": "ST1015", +- "Doc": "A switch's default case should be the first or last case\n\nAvailable since\n 2019.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1015", +- "Default": false +- }, +- { +- "Name": "ST1016", +- "Doc": "Use consistent method receiver names\n\nAvailable since\n 2019.1, non-default\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1016", +- "Default": false +- }, +- { +- "Name": "ST1017", +- "Doc": "Don't use Yoda conditions\n\nYoda conditions are conditions of the kind 'if 42 == x', where the\nliteral is on the left side of the comparison. These are a common\nidiom in languages in which assignment is an expression, to avoid bugs\nof the kind 'if (x = 42)'. In Go, which doesn't allow for this kind of\nbug, we prefer the more idiomatic 'if x == 42'.\n\nAvailable since\n 2019.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1017", +- "Default": false +- }, +- { +- "Name": "ST1018", +- "Doc": "Avoid zero-width and control characters in string literals\n\nAvailable since\n 2019.2\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1018", +- "Default": false +- }, +- { +- "Name": "ST1019", +- "Doc": "Importing the same package multiple times\n\nGo allows importing the same package multiple times, as long as\ndifferent import aliases are being used. That is, the following\nbit of code is valid:\n\n import (\n \"fmt\"\n fumpt \"fmt\"\n format \"fmt\"\n _ \"fmt\"\n )\n\nHowever, this is very rarely done on purpose. Usually, it is a\nsign of code that got refactored, accidentally adding duplicate\nimport statements. It is also a rarely known feature, which may\ncontribute to confusion.\n\nDo note that sometimes, this feature may be used\nintentionally (see for example\nhttps://github.com/golang/go/commit/3409ce39bfd7584523b7a8c150a310cea92d879d)\n– if you want to allow this pattern in your code base, you're\nadvised to disable this check.\n\nAvailable since\n 2020.1\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1019", +- "Default": false +- }, +- { +- "Name": "ST1020", +- "Doc": "The documentation of an exported function should start with the function's name\n\nDoc comments work best as complete sentences, which\nallow a wide variety of automated presentations. The first sentence\nshould be a one-sentence summary that starts with the name being\ndeclared.\n\nIf every doc comment begins with the name of the item it describes,\nyou can use the doc subcommand of the go tool and run the output\nthrough grep.\n\nSee https://go.dev/doc/effective_go#commentary for more\ninformation on how to write good documentation.\n\nAvailable since\n 2020.1, non-default\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1020", +- "Default": false +- }, +- { +- "Name": "ST1021", +- "Doc": "The documentation of an exported type should start with type's name\n\nDoc comments work best as complete sentences, which\nallow a wide variety of automated presentations. The first sentence\nshould be a one-sentence summary that starts with the name being\ndeclared.\n\nIf every doc comment begins with the name of the item it describes,\nyou can use the doc subcommand of the go tool and run the output\nthrough grep.\n\nSee https://go.dev/doc/effective_go#commentary for more\ninformation on how to write good documentation.\n\nAvailable since\n 2020.1, non-default\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1021", +- "Default": false +- }, +- { +- "Name": "ST1022", +- "Doc": "The documentation of an exported variable or constant should start with variable's name\n\nDoc comments work best as complete sentences, which\nallow a wide variety of automated presentations. The first sentence\nshould be a one-sentence summary that starts with the name being\ndeclared.\n\nIf every doc comment begins with the name of the item it describes,\nyou can use the doc subcommand of the go tool and run the output\nthrough grep.\n\nSee https://go.dev/doc/effective_go#commentary for more\ninformation on how to write good documentation.\n\nAvailable since\n 2020.1, non-default\n", +- "URL": "https://staticcheck.dev/docs/checks/#ST1022", +- "Default": false +- }, +- { +- "Name": "ST1023", +- "Doc": "Redundant type in variable declaration\n\nAvailable since\n 2021.1, non-default\n", +- "URL": "https://staticcheck.dev/docs/checks/#", +- "Default": false +- }, +- { +- "Name": "any", +- "Doc": "replace interface{} with any\n\nThe any analyzer suggests replacing uses of the empty interface type,\n`interface{}`, with the `any` alias, which was introduced in Go 1.18.\nThis is a purely stylistic change that makes code more readable.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#any", +- "Default": true +- }, +- { +- "Name": "appendclipped", +- "Doc": "simplify append chains using slices.Concat\n\nThe appendclipped analyzer suggests replacing chains of append calls with a\nsingle call to slices.Concat, which was added in Go 1.21. For example,\nappend(append(s, s1...), s2...) would be simplified to slices.Concat(s, s1, s2).\n\nIn the simple case of appending to a newly allocated slice, such as\nappend([]T(nil), s...), the analyzer suggests the more concise slices.Clone(s).\nFor byte slices, it will prefer bytes.Clone if the \"bytes\" package is\nalready imported.\n\nThis fix is only applied when the base of the append tower is a\n\"clipped\" slice, meaning its length and capacity are equal (e.g.\nx[:0:0] or []T{}). This is to avoid changing program behavior by\neliminating intended side effects on the base slice's underlying\narray.\n\nThis analyzer is currently disabled by default as the\ntransformation does not preserve the nilness of the base slice in\nall cases; see https://go.dev/issue/73557.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#appendclipped", +- "Default": false +- }, +- { +- "Name": "appends", +- "Doc": "check for missing values after append\n\nThis checker reports calls to append that pass\nno values to be appended to the slice.\n\n\ts := []string{\"a\", \"b\", \"c\"}\n\t_ = append(s)\n\nSuch calls are always no-ops and often indicate an\nunderlying mistake.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/appends", +- "Default": true +- }, +- { +- "Name": "asmdecl", +- "Doc": "report mismatches between assembly files and Go declarations", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/asmdecl", +- "Default": true +- }, +- { +- "Name": "assign", +- "Doc": "check for useless assignments\n\nThis checker reports assignments of the form x = x or a[i] = a[i].\nThese are almost always useless, and even when they aren't they are\nusually a mistake.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/assign", +- "Default": true +- }, +- { +- "Name": "atomic", +- "Doc": "check for common mistakes using the sync/atomic package\n\nThe atomic checker looks for assignment statements of the form:\n\n\tx = atomic.AddUint64(\u0026x, 1)\n\nwhich are not atomic.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/atomic", +- "Default": true +- }, +- { +- "Name": "atomicalign", +- "Doc": "check for non-64-bits-aligned arguments to sync/atomic functions", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/atomicalign", +- "Default": true +- }, +- { +- "Name": "bloop", +- "Doc": "replace for-range over b.N with b.Loop\n\nThe bloop analyzer suggests replacing benchmark loops of the form\n`for i := 0; i \u003c b.N; i++` or `for range b.N` with the more modern\n`for b.Loop()`, which was added in Go 1.24.\n\nThis change makes benchmark code more readable and also removes the need for\nmanual timer control, so any preceding calls to b.StartTimer, b.StopTimer,\nor b.ResetTimer within the same function will also be removed.\n\nCaveats: The b.Loop() method is designed to prevent the compiler from\noptimizing away the benchmark loop, which can occasionally result in\nslower execution due to increased allocations in some specific cases.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#bloop", +- "Default": true +- }, +- { +- "Name": "bools", +- "Doc": "check for common mistakes involving boolean operators", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/bools", +- "Default": true +- }, +- { +- "Name": "buildtag", +- "Doc": "check //go:build and // +build directives", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/buildtag", +- "Default": true +- }, +- { +- "Name": "cgocall", +- "Doc": "detect some violations of the cgo pointer passing rules\n\nCheck for invalid cgo pointer passing.\nThis looks for code that uses cgo to call C code passing values\nwhose types are almost always invalid according to the cgo pointer\nsharing rules.\nSpecifically, it warns about attempts to pass a Go chan, map, func,\nor slice to C, either directly, or via a pointer, array, or struct.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/cgocall", +- "Default": true +- }, +- { +- "Name": "composites", +- "Doc": "check for unkeyed composite literals\n\nThis analyzer reports a diagnostic for composite literals of struct\ntypes imported from another package that do not use the field-keyed\nsyntax. Such literals are fragile because the addition of a new field\n(even if unexported) to the struct will cause compilation to fail.\n\nAs an example,\n\n\terr = \u0026net.DNSConfigError{err}\n\nshould be replaced by:\n\n\terr = \u0026net.DNSConfigError{Err: err}\n", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/composite", +- "Default": true +- }, +- { +- "Name": "copylocks", +- "Doc": "check for locks erroneously passed by value\n\nInadvertently copying a value containing a lock, such as sync.Mutex or\nsync.WaitGroup, may cause both copies to malfunction. Generally such\nvalues should be referred to through a pointer.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/copylock", +- "Default": true +- }, +- { +- "Name": "deepequalerrors", +- "Doc": "check for calls of reflect.DeepEqual on error values\n\nThe deepequalerrors checker looks for calls of the form:\n\n reflect.DeepEqual(err1, err2)\n\nwhere err1 and err2 are errors. Using reflect.DeepEqual to compare\nerrors is discouraged.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/deepequalerrors", +- "Default": true +- }, +- { +- "Name": "defers", +- "Doc": "report common mistakes in defer statements\n\nThe defers analyzer reports a diagnostic when a defer statement would\nresult in a non-deferred call to time.Since, as experience has shown\nthat this is nearly always a mistake.\n\nFor example:\n\n\tstart := time.Now()\n\t...\n\tdefer recordLatency(time.Since(start)) // error: call to time.Since is not deferred\n\nThe correct code is:\n\n\tdefer func() { recordLatency(time.Since(start)) }()", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/defers", +- "Default": true +- }, +- { +- "Name": "deprecated", +- "Doc": "check for use of deprecated identifiers\n\nThe deprecated analyzer looks for deprecated symbols and package\nimports.\n\nSee https://go.dev/wiki/Deprecated to learn about Go's convention\nfor documenting and signaling deprecated identifiers.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/deprecated", +- "Default": true +- }, +- { +- "Name": "directive", +- "Doc": "check Go toolchain directives such as //go:debug\n\nThis analyzer checks for problems with known Go toolchain directives\nin all Go source files in a package directory, even those excluded by\n//go:build constraints, and all non-Go source files too.\n\nFor //go:debug (see https://go.dev/doc/godebug), the analyzer checks\nthat the directives are placed only in Go source files, only above the\npackage comment, and only in package main or *_test.go files.\n\nSupport for other known directives may be added in the future.\n\nThis analyzer does not check //go:build, which is handled by the\nbuildtag analyzer.\n", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/directive", +- "Default": true +- }, +- { +- "Name": "embed", +- "Doc": "check //go:embed directive usage\n\nThis analyzer checks that the embed package is imported if //go:embed\ndirectives are present, providing a suggested fix to add the import if\nit is missing.\n\nThis analyzer also checks that //go:embed directives precede the\ndeclaration of a single variable.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/embeddirective", +- "Default": true +- }, +- { +- "Name": "errorsas", +- "Doc": "report passing non-pointer or non-error values to errors.As\n\nThe errorsas analyzer reports calls to errors.As where the type\nof the second argument is not a pointer to a type implementing error.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/errorsas", +- "Default": true +- }, +- { +- "Name": "errorsastype", +- "Doc": "replace errors.As with errors.AsType[T]\n\nThis analyzer suggests fixes to simplify uses of [errors.As] of\nthis form:\n\n\tvar myerr *MyErr\n\tif errors.As(err, \u0026myerr) {\n\t\thandle(myerr)\n\t}\n\nby using the less error-prone generic [errors.AsType] function,\nintroduced in Go 1.26:\n\n\tif myerr, ok := errors.AsType[*MyErr](err); ok {\n\t\thandle(myerr)\n\t}\n\nThe fix is only offered if the var declaration has the form shown and\nthere are no uses of myerr outside the if statement.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#errorsastype", +- "Default": true +- }, +- { +- "Name": "fillreturns", +- "Doc": "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\n\nwill turn into\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/fillreturns", +- "Default": true +- }, +- { +- "Name": "fmtappendf", +- "Doc": "replace []byte(fmt.Sprintf) with fmt.Appendf\n\nThe fmtappendf analyzer suggests replacing `[]byte(fmt.Sprintf(...))` with\n`fmt.Appendf(nil, ...)`. This avoids the intermediate allocation of a string\nby Sprintf, making the code more efficient. The suggestion also applies to\nfmt.Sprint and fmt.Sprintln.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#fmtappendf", +- "Default": true +- }, +- { +- "Name": "forvar", +- "Doc": "remove redundant re-declaration of loop variables\n\nThe forvar analyzer removes unnecessary shadowing of loop variables.\nBefore Go 1.22, it was common to write `for _, x := range s { x := x ... }`\nto create a fresh variable for each iteration. Go 1.22 changed the semantics\nof `for` loops, making this pattern redundant. This analyzer removes the\nunnecessary `x := x` statement.\n\nThis fix only applies to `range` loops.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#forvar", +- "Default": true +- }, +- { +- "Name": "framepointer", +- "Doc": "report assembly that clobbers the frame pointer before saving it", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/framepointer", +- "Default": true +- }, +- { +- "Name": "hostport", +- "Doc": "check format of addresses passed to net.Dial\n\nThis analyzer flags code that produce network address strings using\nfmt.Sprintf, as in this example:\n\n addr := fmt.Sprintf(\"%s:%d\", host, 12345) // \"will not work with IPv6\"\n ...\n conn, err := net.Dial(\"tcp\", addr) // \"when passed to dial here\"\n\nThe analyzer suggests a fix to use the correct approach, a call to\nnet.JoinHostPort:\n\n addr := net.JoinHostPort(host, \"12345\")\n ...\n conn, err := net.Dial(\"tcp\", addr)\n\nA similar diagnostic and fix are produced for a format string of \"%s:%s\".\n", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/hostport", +- "Default": true +- }, +- { +- "Name": "httpresponse", +- "Doc": "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/httpresponse", +- "Default": true +- }, +- { +- "Name": "ifaceassert", +- "Doc": "detect impossible interface-to-interface type assertions\n\nThis checker flags type assertions v.(T) and corresponding type-switch cases\nin which the static type V of v is an interface that cannot possibly implement\nthe target interface T. This occurs when V and T contain methods with the same\nname but different signatures. Example:\n\n\tvar v interface {\n\t\tRead()\n\t}\n\t_ = v.(io.Reader)\n\nThe Read method in v has a different signature than the Read method in\nio.Reader, so this assertion cannot succeed.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/ifaceassert", +- "Default": true +- }, +- { +- "Name": "infertypeargs", +- "Doc": "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/infertypeargs", +- "Default": true +- }, +- { +- "Name": "inline", +- "Doc": "apply fixes based on 'go:fix inline' comment directives\n\nThe inline analyzer inlines functions and constants that are marked for inlining.\n\n## Functions\n\nGiven a function that is marked for inlining, like this one:\n\n\t//go:fix inline\n\tfunc Square(x int) int { return Pow(x, 2) }\n\nthis analyzer will recommend that calls to the function elsewhere, in the same\nor other packages, should be inlined.\n\nInlining can be used to move off of a deprecated function:\n\n\t// Deprecated: prefer Pow(x, 2).\n\t//go:fix inline\n\tfunc Square(x int) int { return Pow(x, 2) }\n\nIt can also be used to move off of an obsolete package,\nas when the import path has changed or a higher major version is available:\n\n\tpackage pkg\n\n\timport pkg2 \"pkg/v2\"\n\n\t//go:fix inline\n\tfunc F() { pkg2.F(nil) }\n\nReplacing a call pkg.F() by pkg2.F(nil) can have no effect on the program,\nso this mechanism provides a low-risk way to update large numbers of calls.\nWe recommend, where possible, expressing the old API in terms of the new one\nto enable automatic migration.\n\nThe inliner takes care to avoid behavior changes, even subtle ones,\nsuch as changes to the order in which argument expressions are\nevaluated. When it cannot safely eliminate all parameter variables,\nit may introduce a \"binding declaration\" of the form\n\n\tvar params = args\n\nto evaluate argument expressions in the correct order and bind them to\nparameter variables. Since the resulting code transformation may be\nstylistically suboptimal, such inlinings may be disabled by specifying\nthe -inline.allow_binding_decl=false flag to the analyzer driver.\n\n(In cases where it is not safe to \"reduce\" a call—that is, to replace\na call f(x) by the body of function f, suitably substituted—the\ninliner machinery is capable of replacing f by a function literal,\nfunc(){...}(). However, the inline analyzer discards all such\n\"literalizations\" unconditionally, again on grounds of style.)\n\n## Constants\n\nGiven a constant that is marked for inlining, like this one:\n\n\t//go:fix inline\n\tconst Ptr = Pointer\n\nthis analyzer will recommend that uses of Ptr should be replaced with Pointer.\n\nAs with functions, inlining can be used to replace deprecated constants and\nconstants in obsolete packages.\n\nA constant definition can be marked for inlining only if it refers to another\nnamed constant.\n\nThe \"//go:fix inline\" comment must appear before a single const declaration on its own,\nas above; before a const declaration that is part of a group, as in this case:\n\n\tconst (\n\t C = 1\n\t //go:fix inline\n\t Ptr = Pointer\n\t)\n\nor before a group, applying to every constant in the group:\n\n\t//go:fix inline\n\tconst (\n\t\tPtr = Pointer\n\t Val = Value\n\t)\n\nThe proposal https://go.dev/issue/32816 introduces the \"//go:fix inline\" directives.\n\nYou can use this command to apply inline fixes en masse:\n\n\t$ go run golang.org/x/tools/go/analysis/passes/inline/cmd/inline@latest -fix ./...", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/inline", +- "Default": true +- }, +- { +- "Name": "loopclosure", +- "Doc": "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nNote: An iteration variable can only outlive a loop iteration in Go versions \u003c=1.21.\nIn Go 1.22 and later, the loop variable lifetimes changed to create a new\niteration variable per loop iteration. (See go.dev/issue/60078.)\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v [\u003cgo1.22].\n\n\tfor _, v := range list {\n\t defer func() {\n\t use(v) // incorrect\n\t }()\n\t}\n\nOne fix is to create a new variable for each iteration of the loop:\n\n\tfor _, v := range list {\n\t v := v // new var per iteration\n\t defer func() {\n\t use(v) // ok\n\t }()\n\t}\n\nAfter Go version 1.22, the previous two for loops are equivalent\nand both are correct.\n\nThe next example uses a go statement and has a similar problem [\u003cgo1.22].\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n\tfor _, v := range elem {\n\t go func() {\n\t use(v) // incorrect, and a data race\n\t }()\n\t}\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n\tfunc Test(t *testing.T) {\n\t for _, test := range tests {\n\t t.Run(test.name, func(t *testing.T) {\n\t t.Parallel()\n\t use(test) // incorrect, and a data race\n\t })\n\t }\n\t}\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop [\u003cgo1.22].\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/loopclosure", +- "Default": true +- }, +- { +- "Name": "lostcancel", +- "Doc": "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nWithDeadline and variants such as WithCancelCause must be called,\nor the new context will remain live until its parent context is cancelled.\n(The background context is never cancelled.)", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/lostcancel", +- "Default": true +- }, +- { +- "Name": "maprange", +- "Doc": "checks for unnecessary calls to maps.Keys and maps.Values in range statements\n\nConsider a loop written like this:\n\n\tfor val := range maps.Values(m) {\n\t\tfmt.Println(val)\n\t}\n\nThis should instead be written without the call to maps.Values:\n\n\tfor _, val := range m {\n\t\tfmt.Println(val)\n\t}\n\ngolang.org/x/exp/maps returns slices for Keys/Values instead of iterators,\nbut unnecessary calls should similarly be removed:\n\n\tfor _, key := range maps.Keys(m) {\n\t\tfmt.Println(key)\n\t}\n\nshould be rewritten as:\n\n\tfor key := range m {\n\t\tfmt.Println(key)\n\t}", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/maprange", +- "Default": true +- }, +- { +- "Name": "mapsloop", +- "Doc": "replace explicit loops over maps with calls to maps package\n\nThe mapsloop analyzer replaces loops of the form\n\n\tfor k, v := range x { m[k] = v }\n\nwith a single call to a function from the `maps` package, added in Go 1.23.\nDepending on the context, this could be `maps.Copy`, `maps.Insert`,\n`maps.Clone`, or `maps.Collect`.\n\nThe transformation to `maps.Clone` is applied conservatively, as it\npreserves the nilness of the source map, which may be a subtle change in\nbehavior if the original code did not handle a nil map in the same way.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#mapsloop", +- "Default": true +- }, +- { +- "Name": "minmax", +- "Doc": "replace if/else statements with calls to min or max\n\nThe minmax analyzer simplifies conditional assignments by suggesting the use\nof the built-in `min` and `max` functions, introduced in Go 1.21. For example,\n\n\tif a \u003c b { x = a } else { x = b }\n\nis replaced by\n\n\tx = min(a, b).\n\nThis analyzer avoids making suggestions for floating-point types,\nas the behavior of `min` and `max` with NaN values can differ from\nthe original if/else statement.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#minmax", +- "Default": true +- }, +- { +- "Name": "newexpr", +- "Doc": "simplify code by using go1.26's new(expr)\n\nThis analyzer finds declarations of functions of this form:\n\n\tfunc varOf(x int) *int { return \u0026x }\n\nand suggests a fix to turn them into inlinable wrappers around\ngo1.26's built-in new(expr) function:\n\n\tfunc varOf(x int) *int { return new(x) }\n\nIn addition, this analyzer suggests a fix for each call\nto one of the functions before it is transformed, so that\n\n\tuse(varOf(123))\n\nis replaced by:\n\n\tuse(new(123))\n\n(Wrapper functions such as varOf are common when working with Go\nserialization packages such as for JSON or protobuf, where pointers\nare often used to express optionality.)", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize#newexpr", +- "Default": true +- }, +- { +- "Name": "nilfunc", +- "Doc": "check for useless comparisons between functions and nil\n\nA useless comparison is one like f == nil as opposed to f() == nil.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/nilfunc", +- "Default": true +- }, +- { +- "Name": "nilness", +- "Doc": "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := \u0026v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n\nSometimes the control flow may be quite complex, making bugs hard\nto spot. In the example below, the err.Error expression is\nguaranteed to panic because, after the first return, err must be\nnil. The intervening loop is just a distraction.\n\n\t...\n\terr := g.Wait()\n\tif err != nil {\n\t\treturn err\n\t}\n\tpartialSuccess := false\n\tfor _, err := range errs {\n\t\tif err == nil {\n\t\t\tpartialSuccess = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif partialSuccess {\n\t\treportStatus(StatusMessage{\n\t\t\tCode: code.ERROR,\n\t\t\tDetail: err.Error(), // \"nil dereference in dynamic method call\"\n\t\t})\n\t\treturn nil\n\t}\n\n...", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/nilness", +- "Default": true +- }, +- { +- "Name": "nonewvars", +- "Doc": "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\n\tz := 1\n\tz := 2\n\nwill turn into\n\n\tz := 1\n\tz = 2", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/nonewvars", +- "Default": true +- }, +- { +- "Name": "noresultvalues", +- "Doc": "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\n\tfunc z() { return nil }\n\nwill turn into\n\n\tfunc z() { return }", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/noresultvalues", +- "Default": true +- }, +- { +- "Name": "omitzero", +- "Doc": "suggest replacing omitempty with omitzero for struct fields\n\nThe omitzero analyzer identifies uses of the `omitempty` JSON struct tag on\nfields that are themselves structs. The `omitempty` tag has no effect on\nstruct-typed fields. The analyzer offers two suggestions: either remove the\ntag, or replace it with `omitzero` (added in Go 1.24), which correctly\nomits the field if the struct value is zero.\n\nReplacing `omitempty` with `omitzero` is a change in behavior. The\noriginal code would always encode the struct field, whereas the\nmodified code will omit it if it is a zero-value.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#omitzero", +- "Default": true +- }, +- { +- "Name": "printf", +- "Doc": "check consistency of Printf format strings and arguments\n\nThe check applies to calls of the formatting functions such as\n[fmt.Printf] and [fmt.Sprintf], as well as any detected wrappers of\nthose functions such as [log.Printf]. It reports a variety of\nmistakes such as syntax errors in the format string and mismatches\n(of number and type) between the verbs and their arguments.\n\nSee the documentation of the fmt package for the complete set of\nformat operators and their operand types.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/printf", +- "Default": true +- }, +- { +- "Name": "rangeint", +- "Doc": "replace 3-clause for loops with for-range over integers\n\nThe rangeint analyzer suggests replacing traditional for loops such\nas\n\n\tfor i := 0; i \u003c n; i++ { ... }\n\nwith the more idiomatic Go 1.22 style:\n\n\tfor i := range n { ... }\n\nThis transformation is applied only if (a) the loop variable is not\nmodified within the loop body and (b) the loop's limit expression\nis not modified within the loop, as `for range` evaluates its\noperand only once.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#rangeint", +- "Default": true +- }, +- { +- "Name": "recursiveiter", +- "Doc": "check for inefficient recursive iterators\n\nThis analyzer reports when a function that returns an iterator\n(iter.Seq or iter.Seq2) calls itself as the operand of a range\nstatement, as this is inefficient.\n\nWhen implementing an iterator (e.g. iter.Seq[T]) for a recursive\ndata type such as a tree or linked list, it is tempting to\nrecursively range over the iterator for each child element.\n\nHere's an example of a naive iterator over a binary tree:\n\n\ttype tree struct {\n\t\tvalue int\n\t\tleft, right *tree\n\t}\n\n\tfunc (t *tree) All() iter.Seq[int] {\n\t\treturn func(yield func(int) bool) {\n\t\t\tif t != nil {\n\t\t\t\tfor elem := range t.left.All() { // \"inefficient recursive iterator\"\n\t\t\t\t\tif !yield(elem) {\n\t\t\t\t\t\treturn\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif !yield(t.value) {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tfor elem := range t.right.All() { // \"inefficient recursive iterator\"\n\t\t\t\t\tif !yield(elem) {\n\t\t\t\t\t\treturn\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\nThough it correctly enumerates the elements of the tree, it hides a\nsignificant performance problem--two, in fact. Consider a balanced\ntree of N nodes. Iterating the root node will cause All to be\ncalled once on every node of the tree. This results in a chain of\nnested active range-over-func statements when yield(t.value) is\ncalled on a leaf node.\n\nThe first performance problem is that each range-over-func\nstatement must typically heap-allocate a variable, so iteration of\nthe tree allocates as many variables as there are elements in the\ntree, for a total of O(N) allocations, all unnecessary.\n\nThe second problem is that each call to yield for a leaf of the\ntree causes each of the enclosing range loops to receive a value,\nwhich they then immediately pass on to their respective yield\nfunction. This results in a chain of log(N) dynamic yield calls per\nelement, a total of O(N*log N) dynamic calls overall, when only\nO(N) are necessary.\n\nA better implementation strategy for recursive iterators is to\nfirst define the \"every\" operator for your recursive data type,\nwhere every(f) reports whether an arbitrary predicate f(x) is true\nfor every element x in the data type. For our tree, the every\nfunction would be:\n\n\tfunc (t *tree) every(f func(int) bool) bool {\n\t\treturn t == nil ||\n\t\t\tt.left.every(f) \u0026\u0026 f(t.value) \u0026\u0026 t.right.every(f)\n\t}\n\nFor example, this use of the every operator prints whether every\nelement in the tree is an even number:\n\n\teven := func(x int) bool { return x\u00261 == 0 }\n\tprintln(t.every(even))\n\nThen the iterator can be simply expressed as a trivial wrapper\naround the every operator:\n\n\tfunc (t *tree) All() iter.Seq[int] {\n\t\treturn func(yield func(int) bool) {\n\t\t\t_ = t.every(yield)\n\t\t}\n\t}\n\nIn effect, tree.All computes whether yield returns true for each\nelement, short-circuiting if it ever returns false, then discards\nthe final boolean result.\n\nThis has much better performance characteristics: it makes one\ndynamic call per element of the tree, and it doesn't heap-allocate\nanything. It is also clearer.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/recursiveiter", +- "Default": true +- }, +- { +- "Name": "reflecttypefor", +- "Doc": "replace reflect.TypeOf(x) with TypeFor[T]()\n\nThis analyzer suggests fixes to replace uses of reflect.TypeOf(x) with\nreflect.TypeFor, introduced in go1.22, when the desired runtime type\nis known at compile time, for example:\n\n\treflect.TypeOf(uint32(0)) -\u003e reflect.TypeFor[uint32]()\n\treflect.TypeOf((*ast.File)(nil)) -\u003e reflect.TypeFor[*ast.File]()\n\nIt also offers a fix to simplify the construction below, which uses\nreflect.TypeOf to return the runtime type for an interface type,\n\n\treflect.TypeOf((*io.Reader)(nil)).Elem()\n\nto:\n\n\treflect.TypeFor[io.Reader]()\n\nNo fix is offered in cases when the runtime type is dynamic, such as:\n\n\tvar r io.Reader = ...\n\treflect.TypeOf(r)\n\nor when the operand has potential side effects.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#reflecttypefor", +- "Default": true +- }, +- { +- "Name": "shadow", +- "Doc": "check for possible unintended shadowing of variables\n\nThis analyzer check for shadowed variables.\nA shadowed variable is a variable declared in an inner scope\nwith the same name and type as a variable in an outer scope,\nand where the outer variable is mentioned after the inner one\nis declared.\n\n(This definition can be refined; the module generates too many\nfalse positives and is not yet enabled by default.)\n\nFor example:\n\n\tfunc BadRead(f *os.File, buf []byte) error {\n\t\tvar err error\n\t\tfor {\n\t\t\tn, err := f.Read(buf) // shadows the function variable 'err'\n\t\t\tif err != nil {\n\t\t\t\tbreak // causes return of wrong value\n\t\t\t}\n\t\t\tfoo(buf)\n\t\t}\n\t\treturn err\n\t}", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/shadow", +- "Default": false +- }, +- { +- "Name": "shift", +- "Doc": "check for shifts that equal or exceed the width of the integer", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/shift", +- "Default": true +- }, +- { +- "Name": "sigchanyzer", +- "Doc": "check for unbuffered channel of os.Signal\n\nThis checker reports call expression of the form\n\n\tsignal.Notify(c \u003c-chan os.Signal, sig ...os.Signal),\n\nwhere c is an unbuffered channel, which can be at risk of missing the signal.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/sigchanyzer", +- "Default": true +- }, +- { +- "Name": "simplifycompositelit", +- "Doc": "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\n\t[]T{T{}, T{}}\n\nwill be simplified to:\n\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifycompositelit", +- "Default": true +- }, +- { +- "Name": "simplifyrange", +- "Doc": "check for range statement simplifications\n\nA range of the form:\n\n\tfor x, _ = range v {...}\n\nwill be simplified to:\n\n\tfor x = range v {...}\n\nA range of the form:\n\n\tfor _ = range v {...}\n\nwill be simplified to:\n\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifyrange", +- "Default": true +- }, +- { +- "Name": "simplifyslice", +- "Doc": "check for slice simplifications\n\nA slice expression of the form:\n\n\ts[a:len(s)]\n\nwill be simplified to:\n\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifyslice", +- "Default": true +- }, +- { +- "Name": "slicescontains", +- "Doc": "replace loops with slices.Contains or slices.ContainsFunc\n\nThe slicescontains analyzer simplifies loops that check for the existence of\nan element in a slice. It replaces them with calls to `slices.Contains` or\n`slices.ContainsFunc`, which were added in Go 1.21.\n\nIf the expression for the target element has side effects, this\ntransformation will cause those effects to occur only once, not\nonce per tested slice element.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicescontains", +- "Default": true +- }, +- { +- "Name": "slicesdelete", +- "Doc": "replace append-based slice deletion with slices.Delete\n\nThe slicesdelete analyzer suggests replacing the idiom\n\n\ts = append(s[:i], s[j:]...)\n\nwith the more explicit\n\n\ts = slices.Delete(s, i, j)\n\nintroduced in Go 1.21.\n\nThis analyzer is disabled by default. The `slices.Delete` function\nzeros the elements between the new length and the old length of the\nslice to prevent memory leaks, which is a subtle difference in\nbehavior compared to the append-based idiom; see https://go.dev/issue/73686.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicesdelete", +- "Default": false +- }, +- { +- "Name": "slicessort", +- "Doc": "replace sort.Slice with slices.Sort for basic types\n\nThe slicessort analyzer simplifies sorting slices of basic ordered\ntypes. It replaces\n\n\tsort.Slice(s, func(i, j int) bool { return s[i] \u003c s[j] })\n\nwith the simpler `slices.Sort(s)`, which was added in Go 1.21.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicessort", +- "Default": true +- }, +- { +- "Name": "slog", +- "Doc": "check for invalid structured logging calls\n\nThe slog checker looks for calls to functions from the log/slog\npackage that take alternating key-value pairs. It reports calls\nwhere an argument in a key position is neither a string nor a\nslog.Attr, and where a final key is missing its value.\nFor example,it would report\n\n\tslog.Warn(\"message\", 11, \"k\") // slog.Warn arg \"11\" should be a string or a slog.Attr\n\nand\n\n\tslog.Info(\"message\", \"k1\", v1, \"k2\") // call to slog.Info missing a final value", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/slog", +- "Default": true +- }, +- { +- "Name": "sortslice", +- "Doc": "check the argument type of sort.Slice\n\nsort.Slice requires an argument of a slice type. Check that\nthe interface{} value passed to sort.Slice is actually a slice.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/sortslice", +- "Default": true +- }, +- { +- "Name": "stditerators", +- "Doc": "use iterators instead of Len/At-style APIs\n\nThis analyzer suggests a fix to replace each loop of the form:\n\n\tfor i := 0; i \u003c x.Len(); i++ {\n\t\tuse(x.At(i))\n\t}\n\nor its \"for elem := range x.Len()\" equivalent by a range loop over an\niterator offered by the same data type:\n\n\tfor elem := range x.All() {\n\t\tuse(x.At(i)\n\t}\n\nwhere x is one of various well-known types in the standard library.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stditerators", +- "Default": true +- }, +- { +- "Name": "stdmethods", +- "Doc": "check signature of methods of well-known interfaces\n\nSometimes a type may be intended to satisfy an interface but may fail to\ndo so because of a mistake in its method signature.\nFor example, the result of this WriteTo method should be (int64, error),\nnot error, to satisfy io.WriterTo:\n\n\ttype myWriterTo struct{...}\n\tfunc (myWriterTo) WriteTo(w io.Writer) error { ... }\n\nThis check ensures that each method whose name matches one of several\nwell-known interface methods from the standard library has the correct\nsignature for that interface.\n\nChecked method names include:\n\n\tFormat GobEncode GobDecode MarshalJSON MarshalXML\n\tPeek ReadByte ReadFrom ReadRune Scan Seek\n\tUnmarshalJSON UnreadByte UnreadRune WriteByte\n\tWriteTo", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stdmethods", +- "Default": true +- }, +- { +- "Name": "stdversion", +- "Doc": "report uses of too-new standard library symbols\n\nThe stdversion analyzer reports references to symbols in the standard\nlibrary that were introduced by a Go release higher than the one in\nforce in the referring file. (Recall that the file's Go version is\ndefined by the 'go' directive its module's go.mod file, or by a\n\"//go:build go1.X\" build tag at the top of the file.)\n\nThe analyzer does not report a diagnostic for a reference to a \"too\nnew\" field or method of a type that is itself \"too new\", as this may\nhave false positives, for example if fields or methods are accessed\nthrough a type alias that is guarded by a Go version constraint.\n", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stdversion", +- "Default": true +- }, +- { +- "Name": "stringintconv", +- "Doc": "check for string(int) conversions\n\nThis checker flags conversions of the form string(x) where x is an integer\n(but not byte or rune) type. Such conversions are discouraged because they\nreturn the UTF-8 representation of the Unicode code point x, and not a decimal\nstring representation of x as one might expect. Furthermore, if x denotes an\ninvalid code point, the conversion cannot be statically rejected.\n\nFor conversions that intend on using the code point, consider replacing them\nwith string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the\nstring representation of the value in the desired base.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stringintconv", +- "Default": true +- }, +- { +- "Name": "stringsbuilder", +- "Doc": "replace += with strings.Builder\n\nThis analyzer replaces repeated string += string concatenation\noperations with calls to Go 1.10's strings.Builder.\n\nFor example:\n\n\tvar s = \"[\"\n\tfor x := range seq {\n\t\ts += x\n\t\ts += \".\"\n\t}\n\ts += \"]\"\n\tuse(s)\n\nis replaced by:\n\n\tvar s strings.Builder\n\ts.WriteString(\"[\")\n\tfor x := range seq {\n\t\ts.WriteString(x)\n\t\ts.WriteString(\".\")\n\t}\n\ts.WriteString(\"]\")\n\tuse(s.String())\n\nThis avoids quadratic memory allocation and improves performance.\n\nThe analyzer requires that all references to s except the final one\nare += operations. To avoid warning about trivial cases, at least one\nmust appear within a loop. The variable s must be a local\nvariable, not a global or parameter.\n\nThe sole use of the finished string must be the last reference to the\nvariable s. (It may appear within an intervening loop or function literal,\nsince even s.String() is called repeatedly, it does not allocate memory.)", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringbuilder", +- "Default": true +- }, +- { +- "Name": "stringscutprefix", +- "Doc": "replace HasPrefix/TrimPrefix with CutPrefix\n\nThe stringscutprefix analyzer simplifies a common pattern where code first\nchecks for a prefix with `strings.HasPrefix` and then removes it with\n`strings.TrimPrefix`. It replaces this two-step process with a single call\nto `strings.CutPrefix`, introduced in Go 1.20. The analyzer also handles\nthe equivalent functions in the `bytes` package.\n\nFor example, this input:\n\n\tif strings.HasPrefix(s, prefix) {\n\t use(strings.TrimPrefix(s, prefix))\n\t}\n\nis fixed to:\n\n\tif after, ok := strings.CutPrefix(s, prefix); ok {\n\t use(after)\n\t}\n\nThe analyzer also offers fixes to use CutSuffix in a similar way.\nThis input:\n\n\tif strings.HasSuffix(s, suffix) {\n\t use(strings.TrimSuffix(s, suffix))\n\t}\n\nis fixed to:\n\n\tif before, ok := strings.CutSuffix(s, suffix); ok {\n\t use(before)\n\t}", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringscutprefix", +- "Default": true +- }, +- { +- "Name": "stringsseq", +- "Doc": "replace ranging over Split/Fields with SplitSeq/FieldsSeq\n\nThe stringsseq analyzer improves the efficiency of iterating over substrings.\nIt replaces\n\n\tfor range strings.Split(...)\n\nwith the more efficient\n\n\tfor range strings.SplitSeq(...)\n\nwhich was added in Go 1.24 and avoids allocating a slice for the\nsubstrings. The analyzer also handles strings.Fields and the\nequivalent functions in the bytes package.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringsseq", +- "Default": true +- }, +- { +- "Name": "structtag", +- "Doc": "check that struct field tags conform to reflect.StructTag.Get\n\nAlso report certain struct tags (json, xml) used with unexported fields.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/structtag", +- "Default": true +- }, +- { +- "Name": "testingcontext", +- "Doc": "replace context.WithCancel with t.Context in tests\n\nThe testingcontext analyzer simplifies context management in tests. It\nreplaces the manual creation of a cancellable context,\n\n\tctx, cancel := context.WithCancel(context.Background())\n\tdefer cancel()\n\nwith a single call to t.Context(), which was added in Go 1.24.\n\nThis change is only suggested if the `cancel` function is not used\nfor any other purpose.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#testingcontext", +- "Default": true +- }, +- { +- "Name": "testinggoroutine", +- "Doc": "report calls to (*testing.T).Fatal from goroutines started by a test\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\n\tfunc TestFoo(t *testing.T) {\n\t go func() {\n\t t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n\t }()\n\t}", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/testinggoroutine", +- "Default": true +- }, +- { +- "Name": "tests", +- "Doc": "check for common mistaken usages of tests and examples\n\nThe tests checker walks Test, Benchmark, Fuzzing and Example functions checking\nmalformed names, wrong signatures and examples documenting non-existent\nidentifiers.\n\nPlease see the documentation for package testing in golang.org/pkg/testing\nfor the conventions that are enforced for Tests, Benchmarks, and Examples.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/tests", +- "Default": true +- }, +- { +- "Name": "timeformat", +- "Doc": "check for calls of (time.Time).Format or time.Parse with 2006-02-01\n\nThe timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm)\nformat. Internationally, \"yyyy-dd-mm\" does not occur in common calendar date\nstandards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/timeformat", +- "Default": true +- }, +- { +- "Name": "unmarshal", +- "Doc": "report passing non-pointer or non-interface values to unmarshal\n\nThe unmarshal analysis reports calls to functions such as json.Unmarshal\nin which the argument type is not a pointer or an interface.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unmarshal", +- "Default": true +- }, +- { +- "Name": "unreachable", +- "Doc": "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by a return statement, a call to panic, an\ninfinite loop, or similar constructs.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unreachable", +- "Default": true +- }, +- { +- "Name": "unsafeptr", +- "Doc": "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unsafeptr", +- "Default": true +- }, +- { +- "Name": "unusedfunc", +- "Doc": "check for unused functions, methods, etc\n\nThe unusedfunc analyzer reports functions and methods that are\nnever referenced outside of their own declaration.\n\nA function is considered unused if it is unexported and not\nreferenced (except within its own declaration).\n\nA method is considered unused if it is unexported, not referenced\n(except within its own declaration), and its name does not match\nthat of any method of an interface type declared within the same\npackage.\n\nThe tool may report false positives in some situations, for\nexample:\n\n - for a declaration of an unexported function that is referenced\n from another package using the go:linkname mechanism, if the\n declaration's doc comment does not also have a go:linkname\n comment.\n\n (Such code is in any case strongly discouraged: linkname\n annotations, if they must be used at all, should be used on both\n the declaration and the alias.)\n\n - for compiler intrinsics in the \"runtime\" package that, though\n never referenced, are known to the compiler and are called\n indirectly by compiled object code.\n\n - for functions called only from assembly.\n\n - for functions called only from files whose build tags are not\n selected in the current build configuration.\n\nSince these situations are relatively common in the low-level parts\nof the runtime, this analyzer ignores the standard library.\nSee https://go.dev/issue/71686 and https://go.dev/issue/74130 for\nfurther discussion of these limitations.\n\nThe unusedfunc algorithm is not as precise as the\ngolang.org/x/tools/cmd/deadcode tool, but it has the advantage that\nit runs within the modular analysis framework, enabling near\nreal-time feedback within gopls.\n\nThe unusedfunc analyzer also reports unused types, vars, and\nconstants. Enums--constants defined with iota--are ignored since\neven the unused values must remain present to preserve the logical\nordering.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedfunc", +- "Default": true +- }, +- { +- "Name": "unusedparams", +- "Doc": "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo ensure soundness, it ignores:\n - \"address-taken\" functions, that is, functions that are used as\n a value rather than being called directly; their signatures may\n be required to conform to a func type.\n - exported functions or methods, since they may be address-taken\n in another package.\n - unexported methods whose name matches an interface method\n declared in the same package, since the method's signature\n may be required to conform to the interface type.\n - functions with empty bodies, or containing just a call to panic.\n - parameters that are unnamed, or named \"_\", the blank identifier.\n\nThe analyzer suggests a fix of replacing the parameter name by \"_\",\nbut in such cases a deeper fix can be obtained by invoking the\n\"Refactor: remove unused parameter\" code action, which will\neliminate the parameter entirely, along with all corresponding\narguments at call sites, while taking care to preserve any side\neffects in the argument expressions; see\nhttps://github.com/golang/tools/releases/tag/gopls%2Fv0.14.\n\nThis analyzer ignores generated code.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedparams", +- "Default": true +- }, +- { +- "Name": "unusedresult", +- "Doc": "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side\neffects, so it is always a mistake to discard the result. Other\nfunctions may return an error that must not be ignored, or a cleanup\noperation that must be called. This analyzer reports calls to\nfunctions like these when the result of the call is ignored.\n\nThe set of functions may be controlled using flags.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedresult", +- "Default": true +- }, +- { +- "Name": "unusedvariable", +- "Doc": "check for unused variables and suggest fixes", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedvariable", +- "Default": true +- }, +- { +- "Name": "unusedwrite", +- "Doc": "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedwrite", +- "Default": true +- }, +- { +- "Name": "waitgroup", +- "Doc": "check for misuses of sync.WaitGroup\n\nThis analyzer detects mistaken calls to the (*sync.WaitGroup).Add\nmethod from inside a new goroutine, causing Add to race with Wait:\n\n\t// WRONG\n\tvar wg sync.WaitGroup\n\tgo func() {\n\t wg.Add(1) // \"WaitGroup.Add called from inside new goroutine\"\n\t defer wg.Done()\n\t ...\n\t}()\n\twg.Wait() // (may return prematurely before new goroutine starts)\n\nThe correct code calls Add before starting the goroutine:\n\n\t// RIGHT\n\tvar wg sync.WaitGroup\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\t...\n\t}()\n\twg.Wait()", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/waitgroup", +- "Default": true +- }, +- { +- "Name": "waitgroup", +- "Doc": "replace wg.Add(1)/go/wg.Done() with wg.Go\n\nThe waitgroup analyzer simplifies goroutine management with `sync.WaitGroup`.\nIt replaces the common pattern\n\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\t...\n\t}()\n\nwith a single call to\n\n\twg.Go(func(){ ... })\n\nwhich was added in Go 1.25.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#waitgroup", +- "Default": true +- }, +- { +- "Name": "yield", +- "Doc": "report calls to yield where the result is ignored\n\nAfter a yield function returns false, the caller should not call\nthe yield function again; generally the iterator should return\npromptly.\n\nThis example fails to check the result of the call to yield,\ncausing this analyzer to report a diagnostic:\n\n\tyield(1) // yield may be called again (on L2) after returning false\n\tyield(2)\n\nThe corrected code is either this:\n\n\tif yield(1) { yield(2) }\n\nor simply:\n\n\t_ = yield(1) \u0026\u0026 yield(2)\n\nIt is not always a mistake to ignore the result of yield.\nFor example, this is a valid single-element iterator:\n\n\tyield(1) // ok to ignore result\n\treturn\n\nIt is only a mistake when the yield call that returned false may be\nfollowed by another call.", +- "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/yield", +- "Default": true +- } +- ], +- "Hints": [ +- { +- "Name": "assignVariableTypes", +- "Doc": "`\"assignVariableTypes\"` controls inlay hints for variable types in assign statements:\n```go\n\ti/* int*/, j/* int*/ := 0, len(r)-1\n```\n", +- "Default": false, +- "Status": "" +- }, +- { +- "Name": "compositeLiteralFields", +- "Doc": "`\"compositeLiteralFields\"` inlay hints for composite literal field names:\n```go\n\t{/*in: */\"Hello, world\", /*want: */\"dlrow ,olleH\"}\n```\n", +- "Default": false, +- "Status": "" +- }, +- { +- "Name": "compositeLiteralTypes", +- "Doc": "`\"compositeLiteralTypes\"` controls inlay hints for composite literal types:\n```go\n\tfor _, c := range []struct {\n\t\tin, want string\n\t}{\n\t\t/*struct{ in string; want string }*/{\"Hello, world\", \"dlrow ,olleH\"},\n\t}\n```\n", +- "Default": false, +- "Status": "" +- }, +- { +- "Name": "constantValues", +- "Doc": "`\"constantValues\"` controls inlay hints for constant values:\n```go\n\tconst (\n\t\tKindNone Kind = iota/* = 0*/\n\t\tKindPrint/* = 1*/\n\t\tKindPrintf/* = 2*/\n\t\tKindErrorf/* = 3*/\n\t)\n```\n", +- "Default": false, +- "Status": "" +- }, +- { +- "Name": "functionTypeParameters", +- "Doc": "`\"functionTypeParameters\"` inlay hints for implicit type parameters on generic functions:\n```go\n\tmyFoo/*[int, string]*/(1, \"hello\")\n```\n", +- "Default": false, +- "Status": "" +- }, +- { +- "Name": "ignoredError", +- "Doc": "`\"ignoredError\"` inlay hints for implicitly discarded errors:\n```go\n\tf.Close() // ignore error\n```\nThis check inserts an `// ignore error` hint following any\nstatement that is a function call whose error result is\nimplicitly ignored.\n\nTo suppress the hint, write an actual comment containing\n\"ignore error\" following the call statement, or explictly\nassign the result to a blank variable. A handful of common\nfunctions such as `fmt.Println` are excluded from the\ncheck.\n", +- "Default": false, +- "Status": "" +- }, +- { +- "Name": "parameterNames", +- "Doc": "`\"parameterNames\"` controls inlay hints for parameter names:\n```go\n\tparseInt(/* str: */ \"123\", /* radix: */ 8)\n```\n", +- "Default": false, +- "Status": "" +- }, +- { +- "Name": "rangeVariableTypes", +- "Doc": "`\"rangeVariableTypes\"` controls inlay hints for variable types in range statements:\n```go\n\tfor k/* int*/, v/* string*/ := range []string{} {\n\t\tfmt.Println(k, v)\n\t}\n```\n", +- "Default": false, +- "Status": "" +- } +- ] +-} +\ No newline at end of file +diff -urN a/gopls/internal/doc/generate/generate.go b/gopls/internal/doc/generate/generate.go +--- a/gopls/internal/doc/generate/generate.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/doc/generate/generate.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,838 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The generate command updates the following files of documentation: +-// +-// gopls/doc/settings.md -- from linking gopls/internal/settings.DefaultOptions +-// gopls/doc/analyzers.md -- from linking gopls/internal/settings.DefaultAnalyzers +-// gopls/doc/inlayHints.md -- from loading gopls/internal/settings.InlayHint +-// gopls/internal/doc/api.json -- all of the above in a single value, for 'gopls api-json' +-// +-// Run it with this command: +-// +-// $ cd gopls/internal/doc/generate && go generate +-package main +- +-import ( +- "bytes" +- "encoding/json" +- "errors" +- "fmt" +- "go/ast" +- "go/doc/comment" +- "go/token" +- "go/types" +- "maps" +- "os" +- "os/exec" +- "path/filepath" +- "reflect" +- "regexp" +- "slices" +- "sort" +- "strconv" +- "strings" +- "time" +- "unicode" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/doc" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/mod" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- internalastutil "golang.org/x/tools/internal/astutil" +-) +- +-func main() { +- if _, err := doMain(true); err != nil { +- fmt.Fprintf(os.Stderr, "Generation failed: %v\n", err) +- os.Exit(1) +- } +-} +- +-// doMain regenerates the output files. On success: +-// - if write, it updates them; +-// - if !write, it reports whether they would change. +-func doMain(write bool) (bool, error) { +- api, err := loadAPI() +- if err != nil { +- return false, err +- } +- +- goplsDir, err := pkgDir("golang.org/x/tools/gopls") +- if err != nil { +- return false, err +- } +- +- // TODO(adonovan): consider using HTML, not Markdown, for the +- // generated reference documents. It's not more difficult, the +- // layout is easier to read, and we can use go/doc-comment +- // rendering logic. +- +- for _, f := range []struct { +- name string // relative to gopls +- rewrite rewriter +- }{ +- {"internal/doc/api.json", rewriteAPI}, +- {"doc/settings.md", rewriteSettings}, +- {"doc/codelenses.md", rewriteCodeLenses}, +- {"doc/analyzers.md", rewriteAnalyzers}, +- {"doc/inlayHints.md", rewriteInlayHints}, +- } { +- file := filepath.Join(goplsDir, f.name) +- old, err := os.ReadFile(file) +- if err != nil { +- return false, err +- } +- +- new, err := f.rewrite(old, api) +- if err != nil { +- return false, fmt.Errorf("rewriting %q: %v", file, err) +- } +- +- if write { +- if err := os.WriteFile(file, new, 0); err != nil { +- return false, err +- } +- } else if !bytes.Equal(old, new) { +- return false, nil // files would change +- } +- } +- return true, nil +-} +- +-// A rewriter is a function that transforms the content of a file. +-type rewriter = func([]byte, *doc.API) ([]byte, error) +- +-// pkgDir returns the directory corresponding to the import path pkgPath. +-func pkgDir(pkgPath string) (string, error) { +- cmd := exec.Command("go", "list", "-f", "{{.Dir}}", pkgPath) +- out, err := cmd.Output() +- if err != nil { +- if ee, _ := err.(*exec.ExitError); ee != nil && len(ee.Stderr) > 0 { +- return "", fmt.Errorf("%v: %w\n%s", cmd, err, ee.Stderr) +- } +- return "", fmt.Errorf("%v: %w", cmd, err) +- } +- return strings.TrimSpace(string(out)), nil +-} +- +-// loadAPI computes the JSON-encodable value that describes gopls' +-// interfaces, by a combination of static and dynamic analysis. +-func loadAPI() (*doc.API, error) { +- pkgs, err := packages.Load( +- &packages.Config{ +- Mode: packages.NeedTypes | packages.NeedTypesInfo | packages.NeedSyntax | packages.NeedDeps, +- }, +- "golang.org/x/tools/gopls/internal/settings", +- ) +- if err != nil { +- return nil, err +- } +- settingsPkg := pkgs[0] +- +- defaults := settings.DefaultOptions() +- api := &doc.API{ +- Options: map[string][]*doc.Option{}, +- Analyzers: loadAnalyzers(settings.AllAnalyzers, defaults), +- } +- +- api.Lenses, err = loadLenses(settingsPkg, defaults.Codelenses) +- if err != nil { +- return nil, err +- } +- api.Hints, err = loadHints(settingsPkg) +- if err != nil { +- return nil, err +- } +- +- for _, category := range []reflect.Value{ +- reflect.ValueOf(defaults.UserOptions), +- } { +- // Find the type information and ast.File corresponding to the category. +- optsType := settingsPkg.Types.Scope().Lookup(category.Type().Name()) +- if optsType == nil { +- return nil, fmt.Errorf("could not find %v in scope %v", category.Type().Name(), settingsPkg.Types.Scope()) +- } +- opts, err := loadOptions(category, optsType, settingsPkg, "") +- if err != nil { +- return nil, err +- } +- +- // Edge case for "analyses": populate its enum keys from +- // the analyzer list, since its map keys are strings, not enums. +- // Also, set its EnumKeys.ValueType for historical reasons. +- for _, opt := range opts { +- if opt.Name == "analyses" { +- opt.EnumKeys.ValueType = "bool" +- for _, a := range api.Analyzers { +- opt.EnumKeys.Keys = append(opt.EnumKeys.Keys, doc.EnumKey{ +- Name: fmt.Sprintf("%q", a.Name), +- Doc: a.Doc, +- Default: strconv.FormatBool(a.Default), +- }) +- } +- } +- } +- +- catName := strings.TrimSuffix(category.Type().Name(), "Options") +- api.Options[catName] = opts +- } +- return api, nil +-} +- +-// loadOptions computes a single category of settings by a combination +-// of static analysis and reflection over gopls internal types. +-func loadOptions(category reflect.Value, optsType types.Object, pkg *packages.Package, hierarchy string) ([]*doc.Option, error) { +- file, err := fileForPos(pkg, optsType.Pos()) +- if err != nil { +- return nil, err +- } +- +- enums, err := loadEnums(pkg) // TODO(adonovan): do this only once at toplevel. +- if err != nil { +- return nil, err +- } +- +- var opts []*doc.Option +- optsStruct := optsType.Type().Underlying().(*types.Struct) +- for i := 0; i < optsStruct.NumFields(); i++ { +- // The types field gives us the type. +- typesField := optsStruct.Field(i) +- +- // If the field name ends with "Options", assume it is a struct with +- // additional options and process it recursively. +- if h := strings.TrimSuffix(typesField.Name(), "Options"); h != typesField.Name() { +- // Keep track of the parent structs. +- if hierarchy != "" { +- h = hierarchy + "." + h +- } +- options, err := loadOptions(category, typesField, pkg, strings.ToLower(h)) +- if err != nil { +- return nil, err +- } +- opts = append(opts, options...) +- continue +- } +- path, _ := astutil.PathEnclosingInterval(file, typesField.Pos(), typesField.Pos()) +- if len(path) < 2 { +- return nil, fmt.Errorf("could not find AST node for field %v", typesField) +- } +- +- // The AST field gives us the doc. +- astField, ok := path[1].(*ast.Field) +- if !ok { +- return nil, fmt.Errorf("unexpected AST path %v", path) +- } +- description, deprecation := astField.Doc.Text(), internalastutil.Deprecation(astField.Doc) +- +- // The reflect field gives us the default value. +- reflectField := category.FieldByName(typesField.Name()) +- if !reflectField.IsValid() { +- return nil, fmt.Errorf("could not find reflect field for %v", typesField.Name()) +- } +- +- def, err := formatDefault(reflectField) +- if err != nil { +- return nil, err +- } +- +- // Derive the doc-and-api.json type from the Go field type. +- // +- // In principle, we should use JSON nomenclature here +- // (number, array, object, etc; see #68057), but in +- // practice we use the Go type string ([]T, map[K]V, +- // etc) with only one tweak: enumeration types are +- // replaced by "enum", including when they appear as +- // map keys. +- // +- // Notable edge cases: +- // - any (e.g. in linksInHover) is really a sum of false | true | "internal". +- // - time.Duration is really a string with a particular syntax. +- typ := typesField.Type().String() +- if _, ok := enums[typesField.Type()]; ok { +- typ = "enum" +- } +- name := lowerFirst(typesField.Name()) +- +- // enum-keyed maps +- var enumKeys doc.EnumKeys +- if m, ok := typesField.Type().Underlying().(*types.Map); ok { +- if values, ok := enums[m.Key()]; ok { +- // Update type name: "map[CodeLensSource]T" -> "map[enum]T" +- // hack: assumes key substring is unique! +- typ = strings.Replace(typ, m.Key().String(), "enum", 1) +- +- enumKeys.ValueType = m.Elem().String() // e.g. bool +- +- // For map[enum]T fields, gather the set of valid +- // EnumKeys (from type information). If T=bool, also +- // record the default value (from reflection). +- keys, err := collectEnumKeys(m, reflectField, values) +- if err != nil { +- return nil, err +- } +- enumKeys.Keys = keys +- } +- } +- +- // Get the status of the field by checking its struct tags. +- reflectStructField, ok := category.Type().FieldByName(typesField.Name()) +- if !ok { +- return nil, fmt.Errorf("no struct field for %s", typesField.Name()) +- } +- status := reflectStructField.Tag.Get("status") +- +- opts = append(opts, &doc.Option{ +- Name: name, +- Type: typ, +- Doc: lowerFirst(description), +- Default: def, +- EnumKeys: enumKeys, +- EnumValues: enums[typesField.Type()], +- Status: status, +- Hierarchy: hierarchy, +- DeprecationMessage: lowerFirst(strings.TrimPrefix(deprecation, "Deprecated: ")), +- }) +- } +- return opts, nil +-} +- +-// loadEnums returns a description of gopls' settings enum types based on static analysis. +-func loadEnums(pkg *packages.Package) (map[types.Type][]doc.EnumValue, error) { +- enums := make(map[types.Type][]doc.EnumValue) +- for _, name := range pkg.Types.Scope().Names() { +- obj := pkg.Types.Scope().Lookup(name) +- cnst, ok := obj.(*types.Const) +- if !ok { +- continue +- } +- f, err := fileForPos(pkg, cnst.Pos()) +- if err != nil { +- return nil, fmt.Errorf("finding file for %q: %v", cnst.Name(), err) +- } +- path, _ := astutil.PathEnclosingInterval(f, cnst.Pos(), cnst.Pos()) +- spec := path[1].(*ast.ValueSpec) +- value := cnst.Val().ExactString() +- docstring := valueDoc(cnst.Name(), value, spec.Doc.Text()) +- var status string +- for _, d := range internalastutil.Directives(spec.Doc) { +- if d.Tool == "gopls" && d.Name == "status" { +- status = d.Args +- break +- } +- } +- v := doc.EnumValue{ +- Value: value, +- Doc: docstring, +- Status: status, +- } +- enums[obj.Type()] = append(enums[obj.Type()], v) +- } +- +- // linksInHover is a one-off edge case (true | false | "gopls") +- // that doesn't warrant a general solution (e.g. struct tag). +- enums[pkg.Types.Scope().Lookup("LinksInHoverEnum").Type()] = []doc.EnumValue{ +- {Value: "false", Doc: "false: do not show links"}, +- {Value: "true", Doc: "true: show links to the `linkTarget` domain"}, +- {Value: `"gopls"`, Doc: "`\"gopls\"`: show links to gopls' internal documentation viewer"}, +- } +- +- return enums, nil +-} +- +-func collectEnumKeys(m *types.Map, reflectField reflect.Value, enumValues []doc.EnumValue) ([]doc.EnumKey, error) { +- // We can get default values for enum -> bool maps. +- var isEnumBoolMap bool +- if basic, ok := m.Elem().Underlying().(*types.Basic); ok && basic.Kind() == types.Bool { +- isEnumBoolMap = true +- } +- var keys []doc.EnumKey +- for _, v := range enumValues { +- var def string +- if isEnumBoolMap { +- var err error +- def, err = formatDefaultFromEnumBoolMap(reflectField, v.Value) +- if err != nil { +- return nil, err +- } +- } +- keys = append(keys, doc.EnumKey{ +- Name: v.Value, +- Doc: v.Doc, +- Status: v.Status, +- Default: def, +- }) +- } +- return keys, nil +-} +- +-func formatDefaultFromEnumBoolMap(reflectMap reflect.Value, enumKey string) (string, error) { +- if reflectMap.Kind() != reflect.Map { +- return "", nil +- } +- name := enumKey +- if unquoted, err := strconv.Unquote(name); err == nil { +- name = unquoted +- } +- for _, e := range reflectMap.MapKeys() { +- if e.String() == name { +- value := reflectMap.MapIndex(e) +- if value.Type().Kind() == reflect.Bool { +- return formatDefault(value) +- } +- } +- } +- // Assume that if the value isn't mentioned in the map, it defaults to +- // the default value, false. +- return formatDefault(reflect.ValueOf(false)) +-} +- +-// formatDefault formats the default value into a JSON-like string. +-// VS Code exposes settings as JSON, so showing them as JSON is reasonable. +-// TODO(rstambler): Reconsider this approach, as the VS Code Go generator now +-// marshals to JSON. +-func formatDefault(reflectField reflect.Value) (string, error) { +- def := reflectField.Interface() +- +- // Durations marshal as nanoseconds, but we want the stringy versions, +- // e.g. "100ms". +- if t, ok := def.(time.Duration); ok { +- def = t.String() +- } +- defBytes, err := json.Marshal(def) +- if err != nil { +- return "", err +- } +- +- // Nil values format as "null" so print them as hardcoded empty values. +- switch reflectField.Type().Kind() { +- case reflect.Map: +- if reflectField.IsNil() { +- defBytes = []byte("{}") +- } +- case reflect.Slice: +- if reflectField.IsNil() { +- defBytes = []byte("[]") +- } +- } +- return string(defBytes), err +-} +- +-// valueDoc transforms a docstring documenting a constant identifier to a +-// docstring documenting its value. +-// +-// If doc is of the form "Foo is a bar", it returns '`"fooValue"` is a bar'. If +-// doc is non-standard ("this value is a bar"), it returns '`"fooValue"`: this +-// value is a bar'. +-func valueDoc(name, value, doc string) string { +- if doc == "" { +- return "" +- } +- if strings.HasPrefix(doc, name) { +- // docstring in standard form. Replace the subject with value. +- return fmt.Sprintf("`%s`%s", value, doc[len(name):]) +- } +- return fmt.Sprintf("`%s`: %s", value, doc) +-} +- +-// loadLenses combines the syntactic comments from the settings +-// package with the default values from settings.DefaultOptions(), and +-// returns a list of Code Lens descriptors. +-func loadLenses(settingsPkg *packages.Package, defaults map[settings.CodeLensSource]bool) ([]*doc.Lens, error) { +- // Find the CodeLensSource enums among the files of the protocol package. +- // Map each enum value to its doc comment. +- enumDoc := make(map[string]string) +- enumStatus := make(map[string]string) +- for _, f := range settingsPkg.Syntax { +- for _, decl := range f.Decls { +- if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.CONST { +- for _, spec := range decl.Specs { +- spec := spec.(*ast.ValueSpec) +- posn := safetoken.StartPosition(settingsPkg.Fset, spec.Pos()) +- if id, ok := spec.Type.(*ast.Ident); ok && id.Name == "CodeLensSource" { +- if len(spec.Names) != 1 || len(spec.Values) != 1 { +- return nil, fmt.Errorf("%s: declare one CodeLensSource per line", posn) +- } +- lit, ok := spec.Values[0].(*ast.BasicLit) +- if !ok || lit.Kind != token.STRING { +- return nil, fmt.Errorf("%s: CodeLensSource value is not a string literal", posn) +- } +- value, _ := strconv.Unquote(lit.Value) // ignore error: AST is well-formed +- if spec.Doc == nil { +- return nil, fmt.Errorf("%s: %s lacks doc comment", posn, spec.Names[0].Name) +- } +- enumDoc[value] = spec.Doc.Text() +- for _, d := range internalastutil.Directives(spec.Doc) { +- if d.Tool == "gopls" && d.Name == "status" { +- enumStatus[value] = d.Args +- break +- } +- } +- } +- } +- } +- } +- } +- if len(enumDoc) == 0 { +- return nil, fmt.Errorf("failed to extract any CodeLensSource declarations") +- } +- +- // Build list of Lens descriptors. +- var lenses []*doc.Lens +- addAll := func(sources map[settings.CodeLensSource]cache.CodeLensSourceFunc, fileType string) error { +- for _, source := range slices.Sorted(maps.Keys(sources)) { +- docText, ok := enumDoc[string(source)] +- if !ok { +- return fmt.Errorf("missing CodeLensSource declaration for %s", source) +- } +- title, docText, _ := strings.Cut(docText, "\n") // first line is title +- lenses = append(lenses, &doc.Lens{ +- FileType: fileType, +- Lens: string(source), +- Title: title, +- Doc: docText, +- Default: defaults[source], +- Status: enumStatus[string(source)], +- }) +- } +- return nil +- } +- err := errors.Join( +- addAll(golang.CodeLensSources(), "Go"), +- addAll(mod.CodeLensSources(), "go.mod")) +- return lenses, err +-} +- +-func loadAnalyzers(analyzers []*settings.Analyzer, defaults *settings.Options) []*doc.Analyzer { +- slices.SortFunc(analyzers, func(x, y *settings.Analyzer) int { +- return strings.Compare(x.Analyzer().Name, y.Analyzer().Name) +- }) +- var json []*doc.Analyzer +- for _, a := range analyzers { +- json = append(json, &doc.Analyzer{ +- Name: a.Analyzer().Name, +- Doc: a.Analyzer().Doc, +- URL: a.Analyzer().URL, +- Default: a.Enabled(defaults), +- }) +- } +- return json +-} +- +-// loadHints derives and returns the inlay hints metadata from the settings.InlayHint type. +-func loadHints(settingsPkg *packages.Package) ([]*doc.Hint, error) { +- enums, err := loadEnums(settingsPkg) // TODO(adonovan): call loadEnums exactly once +- if err != nil { +- return nil, err +- } +- inlayHint := settingsPkg.Types.Scope().Lookup("InlayHint").Type() +- var hints []*doc.Hint +- for _, enumVal := range enums[inlayHint] { +- name, _ := strconv.Unquote(enumVal.Value) +- hints = append(hints, &doc.Hint{ +- Name: name, +- Doc: enumVal.Doc, +- Status: enumVal.Status, +- }) +- } +- return hints, nil +-} +- +-func lowerFirst(x string) string { +- if x == "" { +- return x +- } +- return strings.ToLower(x[:1]) + x[1:] +-} +- +-func fileForPos(pkg *packages.Package, pos token.Pos) (*ast.File, error) { +- fset := pkg.Fset +- for _, f := range pkg.Syntax { +- if safetoken.StartPosition(fset, f.FileStart).Filename == safetoken.StartPosition(fset, pos).Filename { +- return f, nil +- } +- } +- return nil, fmt.Errorf("no file for pos %v", pos) +-} +- +-func rewriteAPI(_ []byte, api *doc.API) ([]byte, error) { +- return json.MarshalIndent(api, "", "\t") +-} +- +-type optionsGroup struct { +- title string // dotted path (e.g. "ui.documentation") +- final string // final segment of title (e.g. "documentation") +- level int +- options []*doc.Option +-} +- +-func rewriteSettings(prevContent []byte, api *doc.API) ([]byte, error) { +- content := prevContent +- for category, opts := range api.Options { +- groups := collectGroups(opts) +- +- var buf bytes.Buffer +- +- // First, print a table of contents (ToC). +- fmt.Fprintln(&buf) +- for _, h := range groups { +- title := h.final +- if title != "" { +- fmt.Fprintf(&buf, "%s* [%s](#%s)\n", +- strings.Repeat(" ", h.level), +- capitalize(title), +- strings.ToLower(title)) +- } +- } +- +- // Section titles are h2, options are h3. +- // This is independent of the option hierarchy. +- // (Nested options should not be smaller!) +- fmt.Fprintln(&buf) +- for _, h := range groups { +- title := h.final +- if title != "" { +- // Emit HTML anchor as GitHub markdown doesn't support +- // "# Heading {#anchor}" syntax. +- fmt.Fprintf(&buf, "\n", strings.ToLower(title)) +- +- fmt.Fprintf(&buf, "## %s\n\n", capitalize(title)) +- } +- for _, opt := range h.options { +- // Emit HTML anchor as GitHub markdown doesn't support +- // "# Heading {#anchor}" syntax. +- // +- // (Each option name is the camelCased name of a field of +- // settings.UserOptions or one of its FooOptions subfields.) +- fmt.Fprintf(&buf, "\n", opt.Name) +- +- // heading +- // +- // We do not display the undocumented dotted-path alias +- // (h.title + "." + opt.Name) used by VS Code only. +- fmt.Fprintf(&buf, "### `%s %s`\n\n", opt.Name, opt.Type) +- +- // status +- writeStatus(&buf, opt.Status) +- +- // doc comment +- buf.WriteString(opt.Doc) +- +- // enums +- write := func(name, doc string) { +- if doc != "" { +- unbroken := parBreakRE.ReplaceAllString(doc, "\\\n") +- fmt.Fprintf(&buf, "* %s\n", strings.TrimSpace(unbroken)) +- } else { +- fmt.Fprintf(&buf, "* `%s`\n", name) +- } +- } +- if len(opt.EnumValues) > 0 && opt.Type == "enum" { +- // enum as top-level type constructor +- buf.WriteString("\nMust be one of:\n\n") +- for _, val := range opt.EnumValues { +- write(val.Value, val.Doc) +- } +- } else if len(opt.EnumKeys.Keys) > 0 && shouldShowEnumKeysInSettings(opt.Name) { +- // enum as map key (currently just "annotations") +- buf.WriteString("\nEach enum must be one of:\n\n") +- for _, val := range opt.EnumKeys.Keys { +- write(val.Name, val.Doc) +- } +- } +- +- // default value +- fmt.Fprintf(&buf, "\nDefault: `%v`.\n\n", opt.Default) +- } +- } +- newContent, err := replaceSection(content, category, buf.Bytes()) +- if err != nil { +- return nil, err +- } +- content = newContent +- } +- return content, nil +-} +- +-// writeStatus emits a Markdown paragraph to buf about the status of a feature, +-// if nonempty. +-func writeStatus(buf *bytes.Buffer, status string) { +- switch status { +- case "": +- case "advanced": +- fmt.Fprint(buf, "**This is an advanced setting and should not be configured by most `gopls` users.**\n\n") +- case "debug": +- fmt.Fprint(buf, "**This setting is for debugging purposes only.**\n\n") +- case "experimental": +- fmt.Fprint(buf, "**This setting is experimental and may be deleted.**\n\n") +- default: +- fmt.Fprintf(buf, "**Status: %s.**\n\n", status) +- } +-} +- +-var parBreakRE = regexp.MustCompile("\n{2,}") +- +-func shouldShowEnumKeysInSettings(name string) bool { +- // These fields have too many possible options, +- // or too voluminous documentation, to render as enums. +- // Instead they each get their own page in the manual. +- return !(name == "analyses" || name == "codelenses" || name == "hints") +-} +- +-func collectGroups(opts []*doc.Option) []optionsGroup { +- optsByHierarchy := map[string][]*doc.Option{} +- for _, opt := range opts { +- optsByHierarchy[opt.Hierarchy] = append(optsByHierarchy[opt.Hierarchy], opt) +- } +- +- // As a hack, assume that uncategorized items are less important to +- // users and force the empty string to the end of the list. +- var containsEmpty bool +- var sorted []string +- for h := range optsByHierarchy { +- if h == "" { +- containsEmpty = true +- continue +- } +- sorted = append(sorted, h) +- } +- sort.Strings(sorted) +- if containsEmpty { +- sorted = append(sorted, "") +- } +- var groups []optionsGroup +- baseLevel := 0 +- for _, h := range sorted { +- split := strings.SplitAfter(h, ".") +- last := split[len(split)-1] +- // Hack to capitalize all of UI. +- if last == "ui" { +- last = "UI" +- } +- // A hierarchy may look like "ui.formatting". If "ui" has no +- // options of its own, it may not be added to the map, but it +- // still needs a heading. +- components := strings.Split(h, ".") +- for i := 1; i < len(components); i++ { +- parent := strings.Join(components[0:i], ".") +- if _, ok := optsByHierarchy[parent]; !ok { +- groups = append(groups, optionsGroup{ +- title: parent, +- final: last, +- level: baseLevel + i, +- }) +- } +- } +- groups = append(groups, optionsGroup{ +- title: h, +- final: last, +- level: baseLevel + strings.Count(h, "."), +- options: optsByHierarchy[h], +- }) +- } +- return groups +-} +- +-func capitalize(s string) string { +- return string(unicode.ToUpper(rune(s[0]))) + s[1:] +-} +- +-func rewriteCodeLenses(prevContent []byte, api *doc.API) ([]byte, error) { +- var buf bytes.Buffer +- for _, lens := range api.Lenses { +- fmt.Fprintf(&buf, "## `%s`: %s\n\n", lens.Lens, lens.Title) +- writeStatus(&buf, lens.Status) +- fmt.Fprintf(&buf, "%s\n\n", lens.Doc) +- fmt.Fprintf(&buf, "Default: %v\n\n", onOff(lens.Default)) +- fmt.Fprintf(&buf, "File type: %s\n\n", lens.FileType) +- } +- return replaceSection(prevContent, "Lenses", buf.Bytes()) +-} +- +-func rewriteAnalyzers(prevContent []byte, api *doc.API) ([]byte, error) { +- var buf bytes.Buffer +- for _, analyzer := range api.Analyzers { +- fmt.Fprintf(&buf, "\n", analyzer.Name) +- title, doc, _ := strings.Cut(analyzer.Doc, "\n") +- title = strings.TrimPrefix(title, analyzer.Name+": ") +- fmt.Fprintf(&buf, "## `%s`: %s\n\n", analyzer.Name, title) +- +- // Convert Analyzer.Doc from go/doc/comment form to Markdown. +- // Headings in doc comments are converted to ### (HeadingLevel=3). +- // +- // Some Analyzers (e.g. go/analysis/passes/inline) use ## to indicate subheadings +- // Although this is valid Markdown, it is not valid go/doc/comment, +- // nor is it rendered as a subheading by pkg.go.dev or gopls's doc viewer. +- // Perhaps it will be supported in future; see +- // https://github.com/golang/go/issues/51082#issuecomment-1033116430 et seq. +- // +- // In the meantime, the go/doc/comment processing will escape them so +- // that the ## appears literally in analyzers.md just as it does in +- // the two viewers mentioned above. The meaning is clear enough. +- doctree := new(comment.Parser).Parse(doc) +- buf.Write((&comment.Printer{HeadingLevel: 3}).Markdown(doctree)) +- buf.WriteString("\n\n") +- +- fmt.Fprintf(&buf, "Default: %s.", onOff(analyzer.Default)) +- if !analyzer.Default { +- fmt.Fprintf(&buf, " Enable by setting `\"analyses\": {\"%s\": true}`.", analyzer.Name) +- } +- fmt.Fprintf(&buf, "\n\n") +- if analyzer.URL != "" { +- // TODO(adonovan): currently the URL provides the same information +- // as 'doc' above, though that may change due to +- // https://github.com/golang/go/issues/61315#issuecomment-1841350181. +- // In that case, update this to something like "Complete documentation". +- fmt.Fprintf(&buf, "Package documentation: [%s](%s)\n\n", +- analyzer.Name, analyzer.URL) +- } +- +- } +- return replaceSection(prevContent, "Analyzers", buf.Bytes()) +-} +- +-func rewriteInlayHints(prevContent []byte, api *doc.API) ([]byte, error) { +- var buf bytes.Buffer +- for _, hint := range api.Hints { +- fmt.Fprintf(&buf, "## **%v**\n\n", hint.Name) +- fmt.Fprintf(&buf, "%s\n\n", hint.Doc) +- switch hint.Default { +- case true: +- fmt.Fprintf(&buf, "**Enabled by default.**\n\n") +- case false: +- fmt.Fprintf(&buf, "**Disabled by default. Enable it by setting `\"hints\": {\"%s\": true}`.**\n\n", hint.Name) +- } +- } +- return replaceSection(prevContent, "Hints", buf.Bytes()) +-} +- +-// replaceSection replaces the portion of a file delimited by comments of the form: +-// +-// +-// +-func replaceSection(content []byte, sectionName string, replacement []byte) ([]byte, error) { +- re := regexp.MustCompile(fmt.Sprintf(`(?s)\n(.*?)`, sectionName, sectionName)) +- idx := re.FindSubmatchIndex(content) +- if idx == nil { +- return nil, fmt.Errorf("could not find section %q", sectionName) +- } +- result := slices.Clone(content[:idx[2]]) +- result = append(result, replacement...) +- result = append(result, content[idx[3]:]...) +- return result, nil +-} +- +-type onOff bool +- +-func (o onOff) String() string { +- if o { +- return "on" +- } else { +- return "off" +- } +-} +diff -urN a/gopls/internal/doc/generate/generate_test.go b/gopls/internal/doc/generate/generate_test.go +--- a/gopls/internal/doc/generate/generate_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/doc/generate/generate_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package main +- +-import ( +- "testing" +- +- "golang.org/x/tools/internal/testenv" +-) +- +-func TestGenerated(t *testing.T) { +- testenv.NeedsGoPackages(t) +- testenv.NeedsLocalXTools(t) +- +- ok, err := doMain(false) +- if err != nil { +- t.Fatal(err) +- } +- if !ok { +- t.Error("documentation needs updating. Run: cd gopls && go generate ./...") +- } +-} +diff -urN a/gopls/internal/file/file.go b/gopls/internal/file/file.go +--- a/gopls/internal/file/file.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/file/file.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,68 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The file package defines types used for working with LSP files. +-package file +- +-import ( +- "context" +- "fmt" +- "time" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// An Identity identifies the name and contents of a file. +-// +-// TODO(rfindley): Identity may not carry its weight. Consider instead just +-// exposing Handle.Hash, and using an ad-hoc key type where necessary. +-// Or perhaps if mod/work parsing is moved outside of the memoize cache, +-// a notion of Identity simply isn't needed. +-type Identity struct { +- URI protocol.DocumentURI +- Hash Hash // digest of file contents +-} +- +-func (id Identity) String() string { +- return fmt.Sprintf("%s%s", id.URI, id.Hash) +-} +- +-// A FileHandle represents the URI, content, hash, and optional +-// version of a file tracked by the LSP session. +-// +-// File content may be provided by the file system (for Saved files) +-// or from an overlay, for open files with unsaved edits. +-// A FileHandle may record an attempt to read a non-existent file, +-// in which case Content returns an error. +-type Handle interface { +- // URI is the URI for this file handle. +- URI() protocol.DocumentURI +- // Identity returns an Identity for the file, even if there was an error +- // reading it. +- Identity() Identity +- // SameContentsOnDisk reports whether the file has the same content on disk: +- // it is false for files open on an editor with unsaved edits. +- SameContentsOnDisk() bool +- // Version returns the file version, as defined by the LSP client. +- // For on-disk file handles, Version returns 0. +- Version() int32 +- // Content returns the contents of a file. +- // If the file is not available, returns a nil slice and an error. +- Content() ([]byte, error) +- // ModTime reports the modification time of a file. +- // If the file is not available, returns the zero time and an error. +- ModTime() (time.Time, error) +- // String returns the file's path. +- String() string +-} +- +-// A Source maps URIs to Handles. +-type Source interface { +- // ReadFile returns the Handle for a given URI, either by reading the content +- // of the file or by obtaining it from a cache. +- // +- // Invariant: ReadFile must only return an error in the case of context +- // cancellation. If ctx.Err() is nil, the resulting error must also be nil. +- ReadFile(ctx context.Context, uri protocol.DocumentURI) (Handle, error) +-} +diff -urN a/gopls/internal/file/hash.go b/gopls/internal/file/hash.go +--- a/gopls/internal/file/hash.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/file/hash.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,33 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package file +- +-import ( +- "crypto/sha256" +- "fmt" +-) +- +-// A Hash is a cryptographic digest of the contents of a file. +-// (Although at 32B it is larger than a 16B string header, it is smaller +-// and has better locality than the string header + 64B of hex digits.) +-type Hash [sha256.Size]byte +- +-// HashOf returns the hash of some data. +-func HashOf(data []byte) Hash { +- return Hash(sha256.Sum256(data)) +-} +- +-// String returns the digest as a string of hex digits. +-func (h Hash) String() string { +- return fmt.Sprintf("%64x", [sha256.Size]byte(h)) +-} +- +-// XORWith updates *h to *h XOR h2. +-func (h *Hash) XORWith(h2 Hash) { +- // Small enough that we don't need crypto/subtle.XORBytes. +- for i := range h { +- h[i] ^= h2[i] +- } +-} +diff -urN a/gopls/internal/file/kind.go b/gopls/internal/file/kind.go +--- a/gopls/internal/file/kind.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/file/kind.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,74 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package file +- +-import ( +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// Kind describes the kind of the file in question. +-// It can be one of Go,mod, Sum, or Tmpl. +-type Kind int +- +-const ( +- // UnknownKind is a file type we don't know about. +- UnknownKind = Kind(iota) +- +- // Go is a Go source file. +- Go +- // Mod is a go.mod file. +- Mod +- // Sum is a go.sum file. +- Sum +- // Tmpl is a template file. +- Tmpl +- // Work is a go.work file. +- Work +- // Asm is a Go assembly (.s) file. +- Asm +-) +- +-func (k Kind) String() string { +- switch k { +- case Go: +- return "go" +- case Mod: +- return "go.mod" +- case Sum: +- return "go.sum" +- case Tmpl: +- return "tmpl" +- case Work: +- return "go.work" +- case Asm: +- return "Go assembly" +- default: +- return fmt.Sprintf("internal error: unknown file kind %d", k) +- } +-} +- +-// KindForLang returns the gopls file [Kind] associated with the given LSP +-// LanguageKind string from the LanguageID field of [protocol.TextDocumentItem], +-// or UnknownKind if the language is not one recognized by gopls. +-func KindForLang(langID protocol.LanguageKind) Kind { +- switch langID { +- case "go": +- return Go +- case "go.mod": +- return Mod +- case "go.sum": +- return Sum +- case "tmpl", "gotmpl": +- return Tmpl +- case "go.work": +- return Work +- case "go.s": +- return Asm +- default: +- return UnknownKind +- } +-} +diff -urN a/gopls/internal/file/modification.go b/gopls/internal/file/modification.go +--- a/gopls/internal/file/modification.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/file/modification.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,57 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package file +- +-import "golang.org/x/tools/gopls/internal/protocol" +- +-// Modification represents a modification to a file. +-type Modification struct { +- URI protocol.DocumentURI +- Action Action +- +- // OnDisk is true if a watched file is changed on disk. +- // If true, Version will be -1 and Text will be nil. +- OnDisk bool +- +- // Version will be -1 and Text will be nil when they are not supplied, +- // specifically on textDocument/didClose and for on-disk changes. +- Version int32 +- Text []byte +- +- // LanguageID is only sent from the language client on textDocument/didOpen. +- LanguageID protocol.LanguageKind +-} +- +-// An Action is a type of file state change. +-type Action int +- +-const ( +- UnknownAction = Action(iota) +- Open +- Change +- Close +- Save +- Create +- Delete +-) +- +-func (a Action) String() string { +- switch a { +- case Open: +- return "Open" +- case Change: +- return "Change" +- case Close: +- return "Close" +- case Save: +- return "Save" +- case Create: +- return "Create" +- case Delete: +- return "Delete" +- default: +- return "Unknown" +- } +-} +diff -urN a/gopls/internal/filecache/filecache.go b/gopls/internal/filecache/filecache.go +--- a/gopls/internal/filecache/filecache.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/filecache/filecache.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,620 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The filecache package provides a file-based shared durable blob cache. +-// +-// The cache is a machine-global mapping from (kind string, key +-// [32]byte) to []byte, where kind is an identifier describing the +-// namespace or purpose (e.g. "analysis"), and key is a SHA-256 digest +-// of the recipe of the value. (It need not be the digest of the value +-// itself, so you can query the cache without knowing what value the +-// recipe would produce.) +-// +-// The space budget of the cache can be controlled by [SetBudget]. +-// Cache entries may be evicted at any time or in any order. +-// Note that "du -sh $GOPLSCACHE" may report a disk usage +-// figure that is rather larger (e.g. 50%) than the budget because +-// it rounds up partial disk blocks. +-// +-// The Get and Set operations are concurrency-safe. +-package filecache +- +-import ( +- "bytes" +- "crypto/sha256" +- "encoding/hex" +- "encoding/json" +- "errors" +- "fmt" +- "io" +- "io/fs" +- "log" +- "os" +- "path/filepath" +- "sort" +- "strings" +- "sync" +- "sync/atomic" +- "time" +- +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/lru" +-) +- +-// Start causes the filecache to initialize and start garbage gollection. +-// +-// Start is automatically called by the first call to Get, but may be called +-// explicitly to pre-initialize the cache. +-func Start() { +- go getCacheDir() +-} +- +-// As an optimization, use a 100MB in-memory LRU cache in front of filecache +-// operations. This reduces I/O for operations such as diagnostics or +-// implementations that repeatedly access the same cache entries. +-var memCache = lru.New[memKey, []byte](100 * 1e6) +- +-type memKey struct { +- kind string +- key [32]byte +-} +- +-// Get retrieves from the cache and returns the value most recently +-// supplied to Set(kind, key), possibly by another process. +-// +-// Get returns ErrNotFound if the value was not found. The first call +-// to Get may fail due to ENOSPC or deletion of the process's +-// executable. Other causes of failure include deletion or corruption +-// of the cache (by external meddling) while gopls is running, or +-// faulty hardware; see issue #67433. +-// +-// Callers should not modify the returned array. +-func Get(kind string, key [32]byte) ([]byte, error) { +- // First consult the read-through memory cache. +- // Note that memory cache hits do not update the times +- // used for LRU eviction of the file-based cache. +- if value, ok := memCache.Get(memKey{kind, key}); ok { +- return value, nil +- } +- +- iolimit <- struct{}{} // acquire a token +- defer func() { <-iolimit }() // release a token +- +- // Read the index file, which provides the name of the CAS file. +- indexName, err := filename(kind, key) +- if err != nil { +- // e.g. ENOSPC, deletion of executable (first time only); +- // deletion of cache (at any time). +- return nil, err +- } +- indexData, err := os.ReadFile(indexName) +- if err != nil { +- if errors.Is(err, os.ErrNotExist) { +- return nil, ErrNotFound +- } +- return nil, err +- } +- var valueHash [32]byte +- if copy(valueHash[:], indexData) != len(valueHash) { +- return nil, ErrNotFound // index entry has wrong length +- } +- +- // Read the CAS file and check its contents match. +- // +- // This ensures integrity in all cases (corrupt or truncated +- // file, short read, I/O error, wrong length, etc) except an +- // engineered hash collision, which is infeasible. +- casName, err := filename(casKind, valueHash) +- if err != nil { +- return nil, err // see above for possible causes +- } +- value, _ := os.ReadFile(casName) // ignore error +- if sha256.Sum256(value) != valueHash { +- return nil, ErrNotFound // CAS file is missing or has wrong contents +- } +- +- // Update file times used by LRU eviction. +- // +- // Because this turns a read into a write operation, +- // we follow the approach used in the go command's +- // cache and update the access time only if the +- // existing timestamp is older than one hour. +- // +- // (Traditionally the access time would be updated +- // automatically, but for efficiency most POSIX systems have +- // for many years set the noatime mount option to avoid every +- // open or read operation entailing a metadata write.) +- now := time.Now() +- touch := func(filename string) { +- st, err := os.Stat(filename) +- if err == nil && now.Sub(st.ModTime()) > time.Hour { +- os.Chtimes(filename, now, now) // ignore error +- } +- } +- touch(indexName) +- touch(casName) +- +- memCache.Set(memKey{kind, key}, value, len(value)) +- +- return value, nil +-} +- +-// ErrNotFound is the distinguished error +-// returned by Get when the key is not found. +-var ErrNotFound = fmt.Errorf("not found") +- +-// Set updates the value in the cache. +-// +-// Set may fail due to: +-// - failure to access/create the cache (first call only); +-// - out of space (ENOSPC); +-// - deletion of the cache concurrent with a call to Set; +-// - faulty hardware. +-// See issue #67433. +-func Set(kind string, key [32]byte, value []byte) error { +- memCache.Set(memKey{kind, key}, value, len(value)) +- +- // Set the active event to wake up the GC. +- select { +- case active <- struct{}{}: +- default: +- } +- +- iolimit <- struct{}{} // acquire a token +- defer func() { <-iolimit }() // release a token +- +- // First, add the value to the content- +- // addressable store (CAS), if not present. +- hash := sha256.Sum256(value) +- casName, err := filename(casKind, hash) +- if err != nil { +- return err +- } +- // Does CAS file exist and have correct (complete) content? +- // TODO(adonovan): opt: use mmap for this check. +- if prev, _ := os.ReadFile(casName); !bytes.Equal(prev, value) { +- if err := os.MkdirAll(filepath.Dir(casName), 0700); err != nil { +- return err +- } +- // Avoiding O_TRUNC here is merely an optimization to avoid +- // cache misses when two threads race to write the same file. +- if err := writeFileNoTrunc(casName, value, 0600); err != nil { +- os.Remove(casName) // ignore error +- return err // e.g. disk full +- } +- } +- +- // Now write an index entry that refers to the CAS file. +- indexName, err := filename(kind, key) +- if err != nil { +- return err +- } +- if err := os.MkdirAll(filepath.Dir(indexName), 0700); err != nil { +- return err +- } +- if err := writeFileNoTrunc(indexName, hash[:], 0600); err != nil { +- os.Remove(indexName) // ignore error +- return err // e.g. disk full +- } +- +- return nil +-} +- +-// The active 1-channel is a selectable resettable event +-// indicating recent cache activity. +-var active = make(chan struct{}, 1) +- +-// writeFileNoTrunc is like os.WriteFile but doesn't truncate until +-// after the write, so that racing writes of the same data are idempotent. +-func writeFileNoTrunc(filename string, data []byte, perm os.FileMode) error { +- f, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE, perm) +- if err != nil { +- return err +- } +- _, err = f.Write(data) +- if err == nil { +- err = f.Truncate(int64(len(data))) +- } +- if closeErr := f.Close(); err == nil { +- err = closeErr +- } +- return err +-} +- +-// reserved kind strings +-const ( +- casKind = "cas" // content-addressable store files +- bugKind = "bug" // gopls bug reports +-) +- +-var iolimit = make(chan struct{}, 128) // counting semaphore to limit I/O concurrency in Set. +- +-var budget int64 = 1e9 // 1GB +- +-// SetBudget sets a soft limit on disk usage of regular files in the +-// cache (in bytes) and returns the previous value. Supplying a +-// negative value queries the current value without changing it. +-// +-// If two gopls processes have different budgets, the one with the +-// lower budget will collect garbage more actively, but both will +-// observe the effect. +-// +-// Even in the steady state, the storage usage reported by the 'du' +-// command may exceed the budget by as much as a factor of 3 due to +-// the overheads of directories and the effects of block quantization, +-// which are especially pronounced for the small index files. +-func SetBudget(new int64) (old int64) { +- if new < 0 { +- return atomic.LoadInt64(&budget) +- } +- return atomic.SwapInt64(&budget, new) +-} +- +-// --- implementation ---- +- +-// filename returns the name of the cache file of the specified kind and key. +-// +-// A typical cache file has a name such as: +-// +-// $HOME/Library/Caches / gopls / VVVVVVVV / KK / KKKK...KKKK - kind +-// +-// The portions separated by spaces are as follows: +-// - The user's preferred cache directory; the default value varies by OS. +-// - The constant "gopls". +-// - The "version", 32 bits of the digest of the gopls executable. +-// - The first 8 bits of the key, to avoid huge directories. +-// - The full 256 bits of the key. +-// - The kind or purpose of this cache file (e.g. "analysis"). +-// +-// The kind establishes a namespace for the keys. It is represented as +-// a suffix, not a segment, as this significantly reduces the number +-// of directories created, and thus the storage overhead. +-// +-// Previous iterations of the design aimed for the invariant that once +-// a file is written, its contents are never modified, though it may +-// be atomically replaced or removed. However, not all platforms have +-// an atomic rename operation (our first approach), and file locking +-// (our second) is a notoriously fickle mechanism. +-// +-// The current design instead exploits a trick from the cache +-// implementation used by the go command: writes of small files are in +-// practice atomic (all or nothing) on all platforms. +-// (See GOROOT/src/cmd/go/internal/cache/cache.go.) +-// +-// Russ Cox notes: "all file systems use an rwlock around every file +-// system block, including data blocks, so any writes or reads within +-// the same block are going to be handled atomically by the FS +-// implementation without any need to request file locking explicitly. +-// And since the files are so small, there's only one block. (A block +-// is at minimum 512 bytes, usually much more.)" And: "all modern file +-// systems protect against [partial writes due to power loss] with +-// journals." +-// +-// We use a two-level scheme consisting of an index and a +-// content-addressable store (CAS). A single cache entry consists of +-// two files. The value of a cache entry is written into the file at +-// filename("cas", sha256(value)). Since the value may be arbitrarily +-// large, this write is not atomic. That means we must check the +-// integrity of the contents read back from the CAS to make sure they +-// hash to the expected key. If the CAS file is incomplete or +-// inconsistent, we proceed as if it were missing. +-// +-// Once the CAS file has been written, we write a small fixed-size +-// index file at filename(kind, key), using the values supplied by the +-// caller. The index file contains the hash that identifies the value +-// file in the CAS. (We could add extra metadata to this file, up to +-// 512B, the minimum size of a disk block, if later desired, so long +-// as the total size remains fixed.) Because the index file is small, +-// concurrent writes to it are atomic in practice, even though this is +-// not guaranteed by any OS. The fixed size ensures that readers can't +-// see a palimpsest when a short new file overwrites a longer old one. +-// +-// New versions of gopls are free to reorganize the contents of the +-// version directory as needs evolve. But all versions of gopls must +-// in perpetuity treat the "gopls" directory in a common fashion. +-// +-// In particular, each gopls process attempts to garbage collect +-// the entire gopls directory so that newer binaries can clean up +-// after older ones: in the development cycle especially, new +-// versions may be created frequently. +-func filename(kind string, key [32]byte) (string, error) { +- base := fmt.Sprintf("%x-%s", key, kind) +- dir, err := getCacheDir() +- if err != nil { +- return "", err +- } +- // Keep the BugReports function consistent with this one. +- return filepath.Join(dir, base[:2], base), nil +-} +- +-// getCacheDir returns the persistent cache directory of all processes +-// running this version of the gopls executable. +-// +-// It must incorporate the hash of the executable so that we needn't +-// worry about incompatible changes to the file format or changes to +-// the algorithm that produced the index. +-func getCacheDir() (string, error) { +- cacheDirOnce.Do(func() { +- // Use user's preferred cache directory. +- userDir := os.Getenv("GOPLSCACHE") +- if userDir == "" { +- var err error +- userDir, err = os.UserCacheDir() +- if err != nil { +- userDir = os.TempDir() +- } +- } +- goplsDir := filepath.Join(userDir, "gopls") +- +- // UserCacheDir may return a nonexistent directory +- // (in which case we must create it, which may fail), +- // or it may return a non-writable directory, in +- // which case we should ideally respect the user's express +- // wishes (e.g. XDG_CACHE_HOME) and not write somewhere else. +- // Sadly UserCacheDir doesn't currently let us distinguish +- // such intent from accidental misconfiguraton such as HOME=/ +- // in a CI builder. So, we check whether the gopls subdirectory +- // can be created (or already exists) and not fall back to /tmp. +- // See also https://github.com/golang/go/issues/57638. +- if os.MkdirAll(goplsDir, 0700) != nil { +- goplsDir = filepath.Join(os.TempDir(), "gopls") +- } +- +- // Start the garbage collector. +- go gc(goplsDir) +- +- // Compute the hash of this executable (~20ms) and create a subdirectory. +- hash, err := hashExecutable() +- if err != nil { +- cacheDirErr = fmt.Errorf("can't hash gopls executable: %w", err) +- } +- // Use only 32 bits of the digest to avoid unwieldy filenames. +- // It's not an adversarial situation. +- cacheDir = filepath.Join(goplsDir, fmt.Sprintf("%x", hash[:4])) +- if err := os.MkdirAll(cacheDir, 0700); err != nil { +- cacheDirErr = fmt.Errorf("can't create cache: %w", err) +- } +- }) +- return cacheDir, cacheDirErr +-} +- +-var ( +- cacheDirOnce sync.Once +- cacheDir string +- cacheDirErr error +-) +- +-func hashExecutable() (hash [32]byte, err error) { +- exe, err := os.Executable() +- if err != nil { +- return hash, err +- } +- f, err := os.Open(exe) +- if err != nil { +- return hash, err +- } +- defer f.Close() +- h := sha256.New() +- if _, err := io.Copy(h, f); err != nil { +- return hash, fmt.Errorf("can't read executable: %w", err) +- } +- h.Sum(hash[:0]) +- return hash, nil +-} +- +-// gc runs forever, periodically deleting files from the gopls +-// directory until the space budget is no longer exceeded, and also +-// deleting files older than the maximum age, regardless of budget. +-// +-// One gopls process may delete garbage created by a different gopls +-// process, possibly running a different version of gopls, possibly +-// running concurrently. +-func gc(goplsDir string) { +- // period between collections +- // +- // Originally the period was always 1 minute, but this +- // consumed 15% of a CPU core when idle (#61049). +- // +- // The reason for running collections even when idle is so +- // that long lived gopls sessions eventually clean up the +- // caches created by defunct executables. +- const ( +- minPeriod = 5 * time.Minute // when active +- maxPeriod = 6 * time.Hour // when idle +- ) +- +- // Sleep statDelay*batchSize between stats to smooth out I/O. +- // +- // The constants below were chosen using the following heuristics: +- // - 1GB of filecache is on the order of ~100-200k files, in which case +- // 100μs delay per file introduces 10-20s of additional walk time, +- // less than the minPeriod. +- // - Processing batches of stats at once is much more efficient than +- // sleeping after every stat (due to OS optimizations). +- const statDelay = 100 * time.Microsecond // average delay between stats, to smooth out I/O +- const batchSize = 1000 // # of stats to process before sleeping +- const maxAge = 5 * 24 * time.Hour // max time since last access before file is deleted +- +- // The macOS filesystem is strikingly slow, at least on some machines. +- // /usr/bin/find achieves only about 25,000 stats per second +- // at full speed (no pause between items), meaning a large +- // cache may take several minutes to scan. +- // +- // (gopls' caches should never actually get this big in +- // practice: the example mentioned above resulted from a bug +- // that caused filecache to fail to delete any files.) +- +- const debug = false +- +- // Names of all directories found in first pass; nil thereafter. +- dirs := make(map[string]bool) +- +- for { +- // Wait unconditionally for the minimum period. +- // We do this even on the first run so that tests +- // don't (all) run the GC. +- time.Sleep(minPeriod) +- +- // Enumerate all files in the cache. +- type item struct { +- path string +- mtime time.Time +- size int64 +- } +- var files []item +- start := time.Now() +- var total int64 // bytes +- _ = filepath.Walk(goplsDir, func(path string, stat os.FileInfo, err error) error { +- if err != nil { +- return nil // ignore errors +- } +- if stat.IsDir() { +- // Collect (potentially empty) directories. +- if dirs != nil { +- dirs[path] = true +- } +- } else { +- // Unconditionally delete files we haven't used in ages. +- age := time.Since(stat.ModTime()) +- if age > maxAge { +- if debug { +- log.Printf("age: deleting stale file %s (%dB, age %v)", +- path, stat.Size(), age) +- } +- os.Remove(path) // ignore error +- } else { +- files = append(files, item{path, stat.ModTime(), stat.Size()}) +- total += stat.Size() +- if debug && len(files)%1000 == 0 { +- log.Printf("filecache: checked %d files in %v", len(files), time.Since(start)) +- } +- if len(files)%batchSize == 0 { +- time.Sleep(batchSize * statDelay) +- } +- } +- } +- return nil +- }) +- +- // Sort oldest files first. +- sort.Slice(files, func(i, j int) bool { +- return files[i].mtime.Before(files[j].mtime) +- }) +- +- // Delete oldest files until we're under budget. +- budget := atomic.LoadInt64(&budget) +- for _, file := range files { +- if total < budget { +- break +- } +- if debug { +- age := time.Since(file.mtime) +- log.Printf("budget: deleting stale file %s (%dB, age %v)", +- file.path, file.size, age) +- } +- os.Remove(file.path) // ignore error +- total -= file.size +- } +- files = nil // release memory before sleep +- +- // Once only, delete all directories. +- // This will succeed only for the empty ones, +- // and ensures that stale directories (whose +- // files have been deleted) are removed eventually. +- // They don't take up much space but they do slow +- // down the traversal. +- // +- // We do this after the sleep to minimize the +- // race against Set, which may create a directory +- // that is momentarily empty. +- // +- // (Test processes don't live that long, so +- // this may not be reached on the CI builders.) +- if dirs != nil { +- dirnames := make([]string, 0, len(dirs)) +- for dir := range dirs { +- dirnames = append(dirnames, dir) +- } +- dirs = nil +- +- // Descending length order => children before parents. +- sort.Slice(dirnames, func(i, j int) bool { +- return len(dirnames[i]) > len(dirnames[j]) +- }) +- var deleted int +- for _, dir := range dirnames { +- if os.Remove(dir) == nil { // ignore error +- deleted++ +- } +- } +- if debug { +- log.Printf("deleted %d empty directories", deleted) +- } +- } +- +- // Wait up to the max period, +- // or for Set activity in this process. +- select { +- case <-active: +- case <-time.After(maxPeriod): +- } +- } +-} +- +-func init() { +- // Register a handler to durably record this process's first +- // assertion failure in the cache so that we can ask users to +- // share this information via the stats command. +- bug.Handle(func(bug bug.Bug) { +- // Wait for cache init (bugs in tests happen early). +- _, _ = getCacheDir() +- +- data, err := json.Marshal(bug) +- if err != nil { +- panic(fmt.Sprintf("error marshalling bug %+v: %v", bug, err)) +- } +- +- key := sha256.Sum256(data) +- _ = Set(bugKind, key, data) +- }) +-} +- +-// BugReports returns a new unordered array of the contents +-// of all cached bug reports produced by this executable. +-// It also returns the location of the cache directory +-// used by this process (or "" on initialization error). +-func BugReports() (string, []bug.Bug) { +- // To test this logic, run: +- // $ TEST_GOPLS_BUG=oops gopls bug # trigger a bug +- // $ gopls stats # list the bugs +- +- dir, err := getCacheDir() +- if err != nil { +- return "", nil // ignore initialization errors +- } +- var result []bug.Bug +- _ = filepath.Walk(dir, func(path string, info fs.FileInfo, err error) error { +- if err != nil { +- return nil // ignore readdir/stat errors +- } +- // Parse the key from each "XXXX-bug" cache file name. +- if !info.IsDir() && strings.HasSuffix(path, bugKind) { +- var key [32]byte +- n, err := hex.Decode(key[:], []byte(filepath.Base(path)[:len(key)*2])) +- if err != nil || n != len(key) { +- return nil // ignore malformed file names +- } +- content, err := Get(bugKind, key) +- if err == nil { // ignore read errors +- var b bug.Bug +- if err := json.Unmarshal(content, &b); err != nil { +- log.Printf("error marshalling bug %q: %v", string(content), err) +- } +- result = append(result, b) +- } +- } +- return nil +- }) +- return dir, result +-} +diff -urN a/gopls/internal/filecache/filecache_test.go b/gopls/internal/filecache/filecache_test.go +--- a/gopls/internal/filecache/filecache_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/filecache/filecache_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,264 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package filecache_test +- +-// This file defines tests of the API of the filecache package. +-// +-// Some properties (e.g. garbage collection) cannot be exercised +-// through the API, so this test does not attempt to do so. +- +-import ( +- "bytes" +- cryptorand "crypto/rand" +- "fmt" +- "log" +- mathrand "math/rand" +- "os" +- "os/exec" +- "strconv" +- "strings" +- "testing" +- +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/gopls/internal/filecache" +- "golang.org/x/tools/internal/testenv" +-) +- +-func TestBasics(t *testing.T) { +- const kind = "TestBasics" +- key := uniqueKey() // never used before +- value := []byte("hello") +- +- // Get of a never-seen key returns not found. +- if _, err := filecache.Get(kind, key); err != filecache.ErrNotFound { +- if strings.Contains(err.Error(), "operation not supported") || +- strings.Contains(err.Error(), "not implemented") { +- t.Skipf("skipping: %v", err) +- } +- t.Errorf("Get of random key returned err=%q, want not found", err) +- } +- +- // Set of a never-seen key and a small value succeeds. +- if err := filecache.Set(kind, key, value); err != nil { +- t.Errorf("Set failed: %v", err) +- } +- +- // Get of the key returns a copy of the value. +- if got, err := filecache.Get(kind, key); err != nil { +- t.Errorf("Get after Set failed: %v", err) +- } else if string(got) != string(value) { +- t.Errorf("Get after Set returned different value: got %q, want %q", got, value) +- } +- +- // The kind is effectively part of the key. +- if _, err := filecache.Get("different-kind", key); err != filecache.ErrNotFound { +- t.Errorf("Get with wrong kind returned err=%q, want not found", err) +- } +-} +- +-// TestConcurrency exercises concurrent access to the same entry. +-func TestConcurrency(t *testing.T) { +- if os.Getenv("GO_BUILDER_NAME") == "plan9-arm" { +- t.Skip(`skipping on plan9-arm builder due to golang/go#58748: failing with 'mount rpc error'`) +- } +- const kind = "TestConcurrency" +- key := uniqueKey() +- const N = 100 // concurrency level +- +- // Construct N distinct values, each larger +- // than a typical 4KB OS file buffer page. +- var values [N][8192]byte +- for i := range values { +- if _, err := mathrand.Read(values[i][:]); err != nil { +- t.Fatalf("rand: %v", err) +- } +- } +- +- // get calls Get and verifies that the cache entry +- // matches one of the values passed to Set. +- get := func(mustBeFound bool) error { +- got, err := filecache.Get(kind, key) +- if err != nil { +- if err == filecache.ErrNotFound && !mustBeFound { +- return nil // not found +- } +- return err +- } +- for _, want := range values { +- if bytes.Equal(want[:], got) { +- return nil // a match +- } +- } +- return fmt.Errorf("Get returned a value that was never Set") +- } +- +- // Perform N concurrent calls to Set and Get. +- // All sets must succeed. +- // All gets must return nothing, or one of the Set values; +- // there is no third possibility. +- var group errgroup.Group +- for i := range values { +- group.Go(func() error { return filecache.Set(kind, key, values[i][:]) }) +- group.Go(func() error { return get(false) }) +- } +- if err := group.Wait(); err != nil { +- if strings.Contains(err.Error(), "operation not supported") || +- strings.Contains(err.Error(), "not implemented") { +- t.Skipf("skipping: %v", err) +- } +- t.Fatal(err) +- } +- +- // A final Get must report one of the values that was Set. +- if err := get(true); err != nil { +- t.Fatalf("final Get failed: %v", err) +- } +-} +- +-const ( +- testIPCKind = "TestIPC" +- testIPCValueA = "hello" +- testIPCValueB = "world" +-) +- +-// TestIPC exercises interprocess communication through the cache. +-// It calls Set(A) in the parent, { Get(A); Set(B) } in the child +-// process, then Get(B) in the parent. +-func TestIPC(t *testing.T) { +- testenv.NeedsExec(t) +- +- keyA := uniqueKey() +- keyB := uniqueKey() +- value := []byte(testIPCValueA) +- +- // Set keyA. +- if err := filecache.Set(testIPCKind, keyA, value); err != nil { +- if strings.Contains(err.Error(), "operation not supported") { +- t.Skipf("skipping: %v", err) +- } +- t.Fatalf("Set: %v", err) +- } +- +- // Call ipcChild in a child process, +- // passing it the keys in the environment +- // (quoted, to avoid NUL termination of C strings). +- // It will Get(A) then Set(B). +- cmd := exec.Command(os.Args[0], os.Args[1:]...) +- cmd.Env = append(os.Environ(), +- "ENTRYPOINT=ipcChild", +- fmt.Sprintf("KEYA=%q", keyA), +- fmt.Sprintf("KEYB=%q", keyB)) +- cmd.Stdout = os.Stderr +- cmd.Stderr = os.Stderr +- if err := cmd.Run(); err != nil { +- t.Fatal(err) +- } +- +- // Verify keyB. +- got, err := filecache.Get(testIPCKind, keyB) +- if err != nil { +- t.Fatal(err) +- } +- if string(got) != "world" { +- t.Fatalf("Get(keyB) = %q, want %q", got, "world") +- } +-} +- +-// We define our own main function so that portions of +-// some tests can run in a separate (child) process. +-func TestMain(m *testing.M) { +- switch os.Getenv("ENTRYPOINT") { +- case "ipcChild": +- ipcChild() +- default: +- os.Exit(m.Run()) +- } +-} +- +-// ipcChild is the portion of TestIPC that runs in a child process. +-func ipcChild() { +- getenv := func(name string) (key [32]byte) { +- s, _ := strconv.Unquote(os.Getenv(name)) +- copy(key[:], []byte(s)) +- return +- } +- +- // Verify key A. +- got, err := filecache.Get(testIPCKind, getenv("KEYA")) +- if err != nil || string(got) != testIPCValueA { +- log.Fatalf("child: Get(key) = %q, %v; want %q", got, err, testIPCValueA) +- } +- +- // Set key B. +- if err := filecache.Set(testIPCKind, getenv("KEYB"), []byte(testIPCValueB)); err != nil { +- log.Fatalf("child: Set(keyB) failed: %v", err) +- } +-} +- +-// uniqueKey returns a key that has never been used before. +-func uniqueKey() (key [32]byte) { +- if _, err := cryptorand.Read(key[:]); err != nil { +- log.Fatalf("rand: %v", err) +- } +- return +-} +- +-func BenchmarkUncontendedGet(b *testing.B) { +- const kind = "BenchmarkUncontendedGet" +- key := uniqueKey() +- +- var value [8192]byte +- if _, err := mathrand.Read(value[:]); err != nil { +- b.Fatalf("rand: %v", err) +- } +- if err := filecache.Set(kind, key, value[:]); err != nil { +- b.Fatal(err) +- } +- +- b.SetBytes(int64(len(value))) +- +- var group errgroup.Group +- group.SetLimit(50) +- for b.Loop() { +- group.Go(func() error { +- _, err := filecache.Get(kind, key) +- return err +- }) +- } +- if err := group.Wait(); err != nil { +- b.Fatal(err) +- } +-} +- +-// These two benchmarks are asymmetric: the one for Get imposes a +-// modest bound on concurrency (50) whereas the one for Set imposes a +-// much higher concurrency (1000) to test the implementation's +-// self-imposed bound. +- +-func BenchmarkUncontendedSet(b *testing.B) { +- const kind = "BenchmarkUncontendedSet" +- key := uniqueKey() +- var value [8192]byte +- +- const P = 1000 // parallelism +- b.SetBytes(P * int64(len(value))) +- +- for b.Loop() { +- // Perform P concurrent calls to Set. All must succeed. +- var group errgroup.Group +- for range [P]bool{} { +- group.Go(func() error { +- return filecache.Set(kind, key, value[:]) +- }) +- } +- if err := group.Wait(); err != nil { +- if strings.Contains(err.Error(), "operation not supported") || +- strings.Contains(err.Error(), "not implemented") { +- b.Skipf("skipping: %v", err) +- } +- b.Fatal(err) +- } +- } +-} +diff -urN a/gopls/internal/filewatcher/export_test.go b/gopls/internal/filewatcher/export_test.go +--- a/gopls/internal/filewatcher/export_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/filewatcher/export_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,13 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package filewatcher +- +-// This file defines things (and opens backdoors) needed only by tests. +- +-// SetAfterAddHook sets a hook to be called after a path is added to the watcher. +-// This is used in tests to inspect the error returned by the underlying watcher. +-func SetAfterAddHook(f func(string, error)) { +- afterAddHook = f +-} +diff -urN a/gopls/internal/filewatcher/filewatcher.go b/gopls/internal/filewatcher/filewatcher.go +--- a/gopls/internal/filewatcher/filewatcher.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/filewatcher/filewatcher.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,420 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package filewatcher +- +-import ( +- "errors" +- "io/fs" +- "log/slog" +- "os" +- "path/filepath" +- "strings" +- "sync" +- "time" +- +- "github.com/fsnotify/fsnotify" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// ErrClosed is used when trying to operate on a closed Watcher. +-var ErrClosed = errors.New("file watcher: watcher already closed") +- +-// Watcher collects events from a [fsnotify.Watcher] and converts them into +-// batched LSP [protocol.FileEvent]s. +-type Watcher struct { +- logger *slog.Logger +- +- stop chan struct{} // closed by Close to terminate run loop +- +- // errs is an internal channel for surfacing errors from the file watcher, +- // distinct from the fsnotify watcher's error channel. +- errs chan error +- +- runners sync.WaitGroup // counts the number of active run goroutines (max 1) +- +- watcher *fsnotify.Watcher +- +- mu sync.Mutex // guards all fields below +- +- // watchers counts the number of active watch registration goroutines, +- // including their error handling. +- // After [Watcher.Close] called, watchers's counter will no longer increase. +- watchers sync.WaitGroup +- +- // dirCancel maps a directory path to its cancellation channel. +- // A nil map indicates the watcher is closing and prevents new directory +- // watch registrations. +- dirCancel map[string]chan struct{} +- +- // events is the current batch of unsent file events, which will be sent +- // when the timer expires. +- events []protocol.FileEvent +-} +- +-// New creates a new file watcher and starts its event-handling loop. The +-// [Watcher.Close] method must be called to clean up resources. +-// +-// The provided handler is called sequentially with either a batch of file +-// events or an error. Events and errors may be interleaved. The watcher blocks +-// until the handler returns, so the handler should be fast and non-blocking. +-func New(delay time.Duration, logger *slog.Logger, handler func([]protocol.FileEvent, error)) (*Watcher, error) { +- watcher, err := fsnotify.NewWatcher() +- if err != nil { +- return nil, err +- } +- w := &Watcher{ +- logger: logger, +- watcher: watcher, +- dirCancel: make(map[string]chan struct{}), +- errs: make(chan error), +- stop: make(chan struct{}), +- } +- +- w.runners.Add(1) +- go w.run(delay, handler) +- +- return w, nil +-} +- +-// run is the main event-handling loop for the watcher. It should be run in a +-// separate goroutine. +-func (w *Watcher) run(delay time.Duration, handler func([]protocol.FileEvent, error)) { +- defer w.runners.Done() +- +- // timer is used to debounce events. +- timer := time.NewTimer(delay) +- defer timer.Stop() +- +- for { +- select { +- case <-w.stop: +- return +- +- case <-timer.C: +- if events := w.drainEvents(); len(events) > 0 { +- handler(events, nil) +- } +- timer.Reset(delay) +- +- case err, ok := <-w.watcher.Errors: +- // When the watcher is closed, its Errors channel is closed, which +- // unblocks this case. We continue to the next loop iteration, +- // allowing the <-w.stop case to handle the shutdown. +- if !ok { +- continue +- } +- if err != nil { +- handler(nil, err) +- } +- +- case err, ok := <-w.errs: +- if !ok { +- continue +- } +- if err != nil { +- handler(nil, err) +- } +- +- case event, ok := <-w.watcher.Events: +- if !ok { +- continue +- } +- +- // fsnotify does not guarantee clean filepaths. +- event.Name = filepath.Clean(event.Name) +- +- // fsnotify.Event should not be handled concurrently, to preserve their +- // original order. For example, if a file is deleted and recreated, +- // concurrent handling could process the events in reverse order. +- // +- // Only reset the timer if a relevant event happened. +- // https://github.com/fsnotify/fsnotify?tab=readme-ov-file#why-do-i-get-many-chmod-events +- e, isDir := w.convertEvent(event) +- if e == (protocol.FileEvent{}) { +- continue +- } +- +- if isDir { +- switch e.Type { +- case protocol.Created: +- // Newly created directories are watched asynchronously to prevent +- // a potential deadlock on Windows(see fsnotify/fsnotify#502). +- // Errors are reported internally. +- if done, release := w.addWatchHandle(event.Name); done != nil { +- go func() { +- w.errs <- w.watchDir(event.Name, done) +- +- // Only release after the error is sent. +- release() +- }() +- } +- case protocol.Deleted: +- // Upon removal, we only need to remove the entries from +- // the map. The [fsnotify.Watcher] removes the watch for +- // us. fsnotify/fsnotify#268 +- w.removeWatchHandle(event.Name) +- default: +- // convertEvent enforces that dirs are only Created or Deleted. +- panic("impossible") +- } +- } +- +- w.addEvent(e) +- timer.Reset(delay) +- } +- } +-} +- +-// skipDir reports whether the input dir should be skipped. +-// Directories that are unlikely to contain Go source files relevant for +-// analysis, such as .git directories or testdata, should be skipped to +-// avoid unnecessary file system notifications. This reduces noise and +-// improves efficiency. Conversely, any directory that might contain Go +-// source code should be watched to ensure that gopls can respond to +-// file changes. +-func skipDir(dirName string) bool { +- // TODO(hxjiang): the file watcher should honor gopls directory +- // filter or the new go.mod ignore directive, or actively listening +- // to gopls register capability request with method +- // "workspace/didChangeWatchedFiles" like a real LSP client. +- return strings.HasPrefix(dirName, ".") || strings.HasPrefix(dirName, "_") || dirName == "testdata" +-} +- +-// WatchDir walks through the directory and all its subdirectories, adding +-// them to the watcher. +-func (w *Watcher) WatchDir(path string) error { +- return filepath.WalkDir(filepath.Clean(path), func(path string, d fs.DirEntry, err error) error { +- if d.IsDir() { +- if skipDir(d.Name()) { +- return filepath.SkipDir +- } +- +- done, release := w.addWatchHandle(path) +- if done == nil { // file watcher closing +- return filepath.SkipAll +- } +- defer release() +- +- return w.watchDir(path, done) +- } +- return nil +- }) +-} +- +-// convertEvent translates an [fsnotify.Event] into a [protocol.FileEvent]. +-// It returns the translated event and a boolean indicating if the path was a +-// directory. For directories, the event Type is either Created or Deleted. +-// It returns empty event for events that should be ignored. +-func (w *Watcher) convertEvent(event fsnotify.Event) (_ protocol.FileEvent, isDir bool) { +- // Determine if the event is for a directory. +- if info, err := os.Stat(event.Name); err == nil { +- isDir = info.IsDir() +- } else if os.IsNotExist(err) { +- // Upon deletion, the file/dir has been removed. fsnotify does not +- // provide information regarding the deleted item. +- // Use watchHandles to determine if the deleted item was a directory. +- isDir = w.isWatchedDir(event.Name) +- } else { +- // If statting failed, something is wrong with the file system. +- // Log and move on. +- if w.logger != nil { +- w.logger.Error("failed to stat path, skipping event as its type (file/dir) is unknown", "path", event.Name, "err", err) +- } +- return protocol.FileEvent{}, false +- } +- +- // Filter out events for directories and files that are not of interest. +- if isDir { +- if skipDir(filepath.Base(event.Name)) { +- return protocol.FileEvent{}, true +- } +- } else { +- switch strings.TrimPrefix(filepath.Ext(event.Name), ".") { +- case "go", "mod", "sum", "work", "s": +- default: +- return protocol.FileEvent{}, false +- } +- } +- +- var t protocol.FileChangeType +- switch { +- case event.Op.Has(fsnotify.Rename): +- // A rename is treated as a deletion of the old path because the +- // fsnotify RENAME event doesn't include the new path. A separate +- // CREATE event will be sent for the new path if the destination +- // directory is watched. +- fallthrough +- case event.Op.Has(fsnotify.Remove): +- // TODO(hxjiang): Directory removal events from some LSP clients may +- // not include corresponding removal events for child files and +- // subdirectories. Should we do some filtering when adding the dir +- // deletion event to the events slice. +- t = protocol.Deleted +- case event.Op.Has(fsnotify.Create): +- t = protocol.Created +- case event.Op.Has(fsnotify.Write): +- if isDir { +- return protocol.FileEvent{}, isDir // ignore dir write events +- } +- t = protocol.Changed +- default: +- return protocol.FileEvent{}, isDir // ignore the rest of the events +- } +- +- return protocol.FileEvent{ +- URI: protocol.URIFromPath(event.Name), +- Type: t, +- }, isDir +-} +- +-// watchDir registers a watch for a directory, retrying with backoff if it fails. +-// It can be canceled by calling removeWatchHandle. +-// Returns nil on success or cancellation; otherwise, the last error after all +-// retries. +-func (w *Watcher) watchDir(path string, done chan struct{}) error { +- // On darwin, watching a directory will fail if it contains broken symbolic +- // links. This state can occur temporarily during operations like a git +- // branch switch. To handle this, we retry multiple times with exponential +- // backoff, allowing time for the symbolic link's target to be created. +- +- // TODO(hxjiang): Address a race condition where file or directory creations +- // under current directory might be missed between the current directory +- // creation and the establishment of the file watch. +- // +- // To fix this, we should: +- // 1. Retrospectively check for and trigger creation events for any new +- // files/directories. +- // 2. Recursively add watches for any newly created subdirectories. +- var ( +- delay = 500 * time.Millisecond +- err error +- ) +- +- for i := range 5 { +- if i > 0 { +- select { +- case <-time.After(delay): +- delay *= 2 +- case <-done: +- return nil // cancelled +- } +- } +- // This function may block due to fsnotify/fsnotify#502. +- err = w.watcher.Add(path) +- if afterAddHook != nil { +- afterAddHook(path, err) +- } +- if err == nil { +- break +- } +- } +- +- return err +-} +- +-var afterAddHook func(path string, err error) +- +-// addWatchHandle registers a new directory watch. +-// The returned 'done' channel should be used to signal cancellation of a +-// pending watch, the release function should be called once watch registration +-// is done. +-// It returns nil if the watcher is already closing. +-func (w *Watcher) addWatchHandle(path string) (done chan struct{}, release func()) { +- w.mu.Lock() +- defer w.mu.Unlock() +- +- if w.dirCancel == nil { // file watcher is closing. +- return nil, nil +- } +- +- done = make(chan struct{}) +- w.dirCancel[path] = done +- +- w.watchers.Add(1) +- +- return done, w.watchers.Done +-} +- +-// removeWatchHandle removes the handle for a directory watch and cancels any +-// pending watch attempt for that path. +-func (w *Watcher) removeWatchHandle(path string) { +- w.mu.Lock() +- defer w.mu.Unlock() +- +- if done, ok := w.dirCancel[path]; ok { +- delete(w.dirCancel, path) +- close(done) +- } +-} +- +-// isWatchedDir reports whether the given path has a watch handle, meaning it is +-// a directory the watcher is managing. +-func (w *Watcher) isWatchedDir(path string) bool { +- w.mu.Lock() +- defer w.mu.Unlock() +- +- _, isDir := w.dirCancel[path] +- return isDir +-} +- +-func (w *Watcher) addEvent(event protocol.FileEvent) { +- w.mu.Lock() +- defer w.mu.Unlock() +- +- // Some systems emit duplicate change events in close +- // succession upon file modification. While the current +- // deduplication is naive and only handles immediate duplicates, +- // a more robust solution is needed. +- // +- // TODO(hxjiang): Enhance deduplication. The current batching of +- // events means all duplicates, regardless of proximity, should +- // be removed. Consider checking the entire buffered slice or +- // using a map for this. +- if len(w.events) == 0 || w.events[len(w.events)-1] != event { +- w.events = append(w.events, event) +- } +-} +- +-func (w *Watcher) drainEvents() []protocol.FileEvent { +- w.mu.Lock() +- events := w.events +- w.events = nil +- w.mu.Unlock() +- +- return events +-} +- +-// Close shuts down the watcher, waits for the internal goroutine to terminate, +-// and returns any final error. +-func (w *Watcher) Close() error { +- // Set dirCancel to nil which prevent any future watch attempts. +- w.mu.Lock() +- dirCancel := w.dirCancel +- w.dirCancel = nil +- w.mu.Unlock() +- +- // Cancel any ongoing watch registration. +- for _, ch := range dirCancel { +- close(ch) +- } +- +- // Wait for all watch registration goroutines to finish, including their +- // error handling. This ensures that: +- // - All [Watcher.watchDir] goroutines have exited and it's error is sent +- // to the internal error channel. So it is safe to close the internal +- // error channel. +- // - There are no ongoing [fsnotify.Watcher.Add] calls, so it is safe to +- // close the fsnotify watcher (see fsnotify/fsnotify#704). +- w.watchers.Wait() +- close(w.errs) +- +- err := w.watcher.Close() +- +- // Wait for the main run loop to terminate. +- close(w.stop) +- w.runners.Wait() +- +- return err +-} +diff -urN a/gopls/internal/filewatcher/filewatcher_test.go b/gopls/internal/filewatcher/filewatcher_test.go +--- a/gopls/internal/filewatcher/filewatcher_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/filewatcher/filewatcher_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,479 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package filewatcher_test +- +-import ( +- "cmp" +- "fmt" +- "os" +- "path/filepath" +- "runtime" +- "slices" +- "testing" +- "time" +- +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/gopls/internal/filewatcher" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- "golang.org/x/tools/txtar" +-) +- +-func TestFileWatcher(t *testing.T) { +- switch runtime.GOOS { +- case "darwin", "linux", "windows": +- default: +- t.Skip("unsupported OS") +- } +- +- testCases := []struct { +- name string +- goos []string // if not empty, only run in these OS. +- // If set, sends watch errors for this path to an error channel +- // passed to the 'changes' func. +- watchErrorPath string +- initWorkspace string +- changes func(root string, errs chan error) error +- expectedEvents []protocol.FileEvent +- }{ +- { +- name: "create file in darwin", +- goos: []string{"darwin"}, +- initWorkspace: ` +--- foo.go -- +-package foo +-`, +- changes: func(root string, errs chan error) error { +- return os.WriteFile(filepath.Join(root, "bar.go"), []byte("package main"), 0644) +- }, +- expectedEvents: []protocol.FileEvent{ +- {URI: "bar.go", Type: protocol.Created}, +- }, +- }, +- { +- name: "create file in linux & windows", +- goos: []string{"linux", "windows"}, +- initWorkspace: ` +--- foo.go -- +-package foo +-`, +- changes: func(root string, errs chan error) error { +- return os.WriteFile(filepath.Join(root, "bar.go"), []byte("package main"), 0644) +- }, +- expectedEvents: []protocol.FileEvent{ +- {URI: "bar.go", Type: protocol.Created}, +- {URI: "bar.go", Type: protocol.Changed}, +- }, +- }, +- { +- name: "modify file", +- initWorkspace: ` +--- foo.go -- +-package foo +-`, +- changes: func(root string, errs chan error) error { +- return os.WriteFile(filepath.Join(root, "foo.go"), []byte("package main // modified"), 0644) +- }, +- expectedEvents: []protocol.FileEvent{ +- {URI: "foo.go", Type: protocol.Changed}, +- }, +- }, +- { +- name: "delete file", +- initWorkspace: ` +--- foo.go -- +-package foo +--- bar.go -- +-package bar +-`, +- changes: func(root string, errs chan error) error { +- return os.Remove(filepath.Join(root, "foo.go")) +- }, +- expectedEvents: []protocol.FileEvent{ +- {URI: "foo.go", Type: protocol.Deleted}, +- }, +- }, +- { +- name: "rename file in linux & windows", +- goos: []string{"linux", "windows"}, +- initWorkspace: ` +--- foo.go -- +-package foo +-`, +- changes: func(root string, errs chan error) error { +- return os.Rename(filepath.Join(root, "foo.go"), filepath.Join(root, "bar.go")) +- }, +- expectedEvents: []protocol.FileEvent{ +- {URI: "foo.go", Type: protocol.Deleted}, +- {URI: "bar.go", Type: protocol.Created}, +- }, +- }, +- { +- name: "rename file in darwin", +- goos: []string{"darwin"}, +- initWorkspace: ` +--- foo.go -- +-package foo +-`, +- changes: func(root string, errs chan error) error { +- return os.Rename(filepath.Join(root, "foo.go"), filepath.Join(root, "bar.go")) +- }, +- expectedEvents: []protocol.FileEvent{ +- {URI: "bar.go", Type: protocol.Created}, +- {URI: "foo.go", Type: protocol.Deleted}, +- }, +- }, +- { +- name: "create directory", +- initWorkspace: ` +--- foo.go -- +-package foo +-`, +- changes: func(root string, errs chan error) error { +- return os.Mkdir(filepath.Join(root, "bar"), 0755) +- }, +- expectedEvents: []protocol.FileEvent{ +- {URI: "bar", Type: protocol.Created}, +- }, +- }, +- { +- name: "delete directory", +- initWorkspace: ` +--- foo/bar.go -- +-package foo +-`, +- changes: func(root string, errs chan error) error { +- return os.RemoveAll(filepath.Join(root, "foo")) +- }, +- expectedEvents: []protocol.FileEvent{ +- // We only assert that the directory deletion event exists, +- // because file system event behavior is inconsistent across +- // platforms when deleting a non-empty directory. +- // e.g. windows-amd64 may only emit a single dir removal event, +- // freebsd-amd64 report dir removal before file removal, +- // linux-amd64 report the reverse order. +- // Therefore, the most reliable and cross-platform compatible +- // signal is the deletion event for the directory itself. +- {URI: "foo", Type: protocol.Deleted}, +- }, +- }, +- { +- name: "rename directory in linux & windows", +- goos: []string{"linux", "windows"}, +- initWorkspace: ` +--- foo/bar.go -- +-package foo +-`, +- changes: func(root string, errs chan error) error { +- return os.Rename(filepath.Join(root, "foo"), filepath.Join(root, "baz")) +- }, +- expectedEvents: []protocol.FileEvent{ +- {URI: "foo", Type: protocol.Deleted}, +- {URI: "baz", Type: protocol.Created}, +- }, +- }, +- { +- name: "rename directory in darwin", +- goos: []string{"darwin"}, +- initWorkspace: ` +--- foo/bar.go -- +-package foo +-`, +- changes: func(root string, errs chan error) error { +- return os.Rename(filepath.Join(root, "foo"), filepath.Join(root, "baz")) +- }, +- expectedEvents: []protocol.FileEvent{ +- {URI: "baz", Type: protocol.Created}, +- {URI: "foo", Type: protocol.Deleted}, +- }, +- }, +- { +- name: "broken symlink in darwin", +- goos: []string{"darwin"}, +- watchErrorPath: "foo", +- changes: func(root string, errs chan error) error { +- // Prepare a dir with with broken symbolic link. +- // foo <- 1st +- // └── from.go -> root/to.go <- 1st +- tmp := filepath.Join(t.TempDir(), "foo") +- if err := os.Mkdir(tmp, 0755); err != nil { +- return err +- } +- from := filepath.Join(tmp, "from.go") +- +- to := filepath.Join(root, "to.go") +- // Create the symbolic link to a non-existing file. This would +- // cause the watch registration to fail. +- if err := os.Symlink(to, from); err != nil { +- return err +- } +- +- // Move the directory containing the broken symlink into place +- // to avoids a flaky test where the directory could be watched +- // before the symlink is created. See golang/go#74782. +- if err := os.Rename(tmp, filepath.Join(root, "foo")); err != nil { +- return err +- } +- +- // root +- // ├── foo <- 2nd (Move) +- // │ ├── from.go -> ../to.go <- 2nd (Move) +- // │ └── foo.go <- 4th (Create) +- // └── to.go <- 3rd (Create) +- +- // Should be able to capture an error from [fsnotify.Watcher.Add]. +- err := <-errs +- if err == nil { +- return fmt.Errorf("did not capture watch registration failure") +- } +- +- // The file watcher should retry watch registration and +- // eventually succeed after the file got created. +- if err := os.WriteFile(to, []byte("package main"), 0644); err != nil { +- return err +- } +- +- timer := time.NewTimer(30 * time.Second) +- for { +- var ( +- err error +- ok bool +- ) +- select { +- case err, ok = <-errs: +- if !ok { +- return fmt.Errorf("can not register watch for foo") +- } +- case <-timer.C: +- return fmt.Errorf("can not register watch for foo after 30 seconds") +- } +- +- if err == nil { +- break // watch registration success +- } +- } +- +- // Once the watch registration is done, file events under the +- // dir should be captured. +- return os.WriteFile(filepath.Join(root, "foo", "foo.go"), []byte("package main"), 0644) +- }, +- expectedEvents: []protocol.FileEvent{ +- {URI: "foo", Type: protocol.Created}, +- // TODO(hxjiang): enable this after implementing retrospectively +- // generate create events. +- // {URI: "foo/from.go", Type: protocol.Created}, +- {URI: "to.go", Type: protocol.Created}, +- {URI: "foo/foo.go", Type: protocol.Created}, +- }, +- }, +- } +- +- for _, tt := range testCases { +- t.Run(tt.name, func(t *testing.T) { +- if len(tt.goos) > 0 && !slices.Contains(tt.goos, runtime.GOOS) { +- t.Skipf("skipping on %s", runtime.GOOS) +- } +- +- root := t.TempDir() +- +- var errs chan error +- if tt.watchErrorPath != "" { +- errs = make(chan error, 10) +- filewatcher.SetAfterAddHook(func(path string, err error) { +- if path == filepath.Join(root, tt.watchErrorPath) { +- errs <- err +- if err == nil { +- close(errs) +- } +- } +- }) +- defer filewatcher.SetAfterAddHook(nil) +- } +- +- archive := txtar.Parse([]byte(tt.initWorkspace)) +- for _, f := range archive.Files { +- path := filepath.Join(root, f.Name) +- if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { +- t.Fatal(err) +- } +- if err := os.WriteFile(path, f.Data, 0644); err != nil { +- t.Fatal(err) +- } +- } +- +- matched := 0 +- foundAll := make(chan struct{}) +- var gots []protocol.FileEvent +- handler := func(events []protocol.FileEvent, err error) { +- if err != nil { +- t.Errorf("error from watcher: %v", err) +- } +- gots = append(gots, events...) +- // This verifies that the list of wanted events is a subsequence of +- // the received events. It confirms not only that all wanted events +- // are present, but also that their relative order is preserved. +- for _, got := range events { +- if matched == len(tt.expectedEvents) { +- break +- } +- want := protocol.FileEvent{ +- URI: protocol.URIFromPath(filepath.Join(root, string(tt.expectedEvents[matched].URI))), +- Type: tt.expectedEvents[matched].Type, +- } +- if want == got { +- matched++ +- } +- } +- if matched == len(tt.expectedEvents) { +- close(foundAll) +- } +- } +- w, err := filewatcher.New(50*time.Millisecond, nil, handler) +- if err != nil { +- t.Fatal(err) +- } +- +- if err := w.WatchDir(root); err != nil { +- t.Fatal(err) +- } +- +- if tt.changes != nil { +- if err := tt.changes(root, errs); err != nil { +- t.Fatal(err) +- } +- } +- +- select { +- case <-foundAll: +- case <-time.After(30 * time.Second): +- if matched < len(tt.expectedEvents) { +- t.Errorf("found %v matching events\nall want: %#v\nall got: %#v", matched, tt.expectedEvents, gots) +- } +- } +- +- if err := w.Close(); err != nil { +- t.Errorf("failed to close the file watcher: %v", err) +- } +- }) +- } +-} +- +-func TestStress(t *testing.T) { +- switch runtime.GOOS { +- case "darwin", "linux", "windows": +- default: +- t.Skip("unsupported OS") +- } +- +- const ( +- delay = 50 * time.Millisecond +- parallelism = 100 // number of parallel instances of each kind of operation +- ) +- +- root := t.TempDir() +- +- mkdir := func(base string) func() error { +- return func() error { +- return os.Mkdir(filepath.Join(root, base), 0755) +- } +- } +- write := func(base string) func() error { +- return func() error { +- return os.WriteFile(filepath.Join(root, base), []byte("package main"), 0644) +- } +- } +- remove := func(base string) func() error { +- return func() error { +- return os.Remove(filepath.Join(root, base)) +- } +- } +- rename := func(old, new string) func() error { +- return func() error { +- return os.Rename(filepath.Join(root, old), filepath.Join(root, new)) +- } +- } +- +- wants := make(map[protocol.FileEvent]bool) +- want := func(base string, t protocol.FileChangeType) { +- wants[protocol.FileEvent{URI: protocol.URIFromPath(filepath.Join(root, base)), Type: t}] = true +- } +- +- for i := range parallelism { +- // Create files and dirs that will be deleted or renamed later. +- if err := cmp.Or( +- mkdir(fmt.Sprintf("delete-dir-%d", i))(), +- mkdir(fmt.Sprintf("old-dir-%d", i))(), +- write(fmt.Sprintf("delete-file-%d.go", i))(), +- write(fmt.Sprintf("old-file-%d.go", i))(), +- ); err != nil { +- t.Fatal(err) +- } +- +- // Add expected notification events to the "wants" set. +- want(fmt.Sprintf("file-%d.go", i), protocol.Created) +- want(fmt.Sprintf("delete-file-%d.go", i), protocol.Deleted) +- want(fmt.Sprintf("old-file-%d.go", i), protocol.Deleted) +- want(fmt.Sprintf("new-file-%d.go", i), protocol.Created) +- want(fmt.Sprintf("dir-%d", i), protocol.Created) +- want(fmt.Sprintf("delete-dir-%d", i), protocol.Deleted) +- want(fmt.Sprintf("old-dir-%d", i), protocol.Deleted) +- want(fmt.Sprintf("new-dir-%d", i), protocol.Created) +- } +- +- foundAll := make(chan struct{}) +- w, err := filewatcher.New(delay, nil, func(events []protocol.FileEvent, err error) { +- if err != nil { +- t.Errorf("error from watcher: %v", err) +- return +- } +- for _, e := range events { +- delete(wants, e) +- } +- if len(wants) == 0 { +- close(foundAll) +- } +- }) +- if err != nil { +- t.Fatal(err) +- } +- +- if err := w.WatchDir(root); err != nil { +- t.Fatal(err) +- } +- +- // Spin up multiple goroutines, to perform 6 file system operations i.e. +- // create, delete, rename of file or directory. For deletion and rename, +- // the goroutine deletes / renames files or directories created before the +- // watcher starts. +- var g errgroup.Group +- for id := range parallelism { +- ops := []func() error{ +- write(fmt.Sprintf("file-%d.go", id)), +- remove(fmt.Sprintf("delete-file-%d.go", id)), +- rename(fmt.Sprintf("old-file-%d.go", id), fmt.Sprintf("new-file-%d.go", id)), +- mkdir(fmt.Sprintf("dir-%d", id)), +- remove(fmt.Sprintf("delete-dir-%d", id)), +- rename(fmt.Sprintf("old-dir-%d", id), fmt.Sprintf("new-dir-%d", id)), +- } +- for _, f := range ops { +- g.Go(f) +- } +- } +- if err := g.Wait(); err != nil { +- t.Fatal(err) +- } +- +- select { +- case <-foundAll: +- case <-time.After(30 * time.Second): +- if len(wants) > 0 { +- t.Errorf("missing expected events: %#v", moremaps.KeySlice(wants)) +- } +- } +- +- if err := w.Close(); err != nil { +- t.Errorf("failed to close the file watcher: %v", err) +- } +-} +diff -urN a/gopls/internal/fuzzy/input.go b/gopls/internal/fuzzy/input.go +--- a/gopls/internal/fuzzy/input.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/fuzzy/input.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,183 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fuzzy +- +-import ( +- "unicode" +-) +- +-// RuneRole specifies the role of a rune in the context of an input. +-type RuneRole byte +- +-const ( +- // RNone specifies a rune without any role in the input (i.e., whitespace/non-ASCII). +- RNone RuneRole = iota +- // RSep specifies a rune with the role of segment separator. +- RSep +- // RTail specifies a rune which is a lower-case tail in a word in the input. +- RTail +- // RUCTail specifies a rune which is an upper-case tail in a word in the input. +- RUCTail +- // RHead specifies a rune which is the first character in a word in the input. +- RHead +-) +- +-// RuneRoles detects the roles of each byte rune in an input string and stores it in the output +-// slice. The rune role depends on the input type. Stops when it parsed all the runes in the string +-// or when it filled the output. If output is nil, then it gets created. +-func RuneRoles(candidate []byte, reuse []RuneRole) []RuneRole { +- var output []RuneRole +- if cap(reuse) < len(candidate) { +- output = make([]RuneRole, 0, len(candidate)) +- } else { +- output = reuse[:0] +- } +- +- prev, prev2 := rtNone, rtNone +- for i := range candidate { +- r := rune(candidate[i]) +- +- role := RNone +- +- curr := rtLower +- if candidate[i] <= unicode.MaxASCII { +- curr = runeType(rt[candidate[i]] - '0') +- } +- +- if curr == rtLower { +- if prev == rtNone || prev == rtPunct { +- role = RHead +- } else { +- role = RTail +- } +- } else if curr == rtUpper { +- role = RHead +- +- if prev == rtUpper { +- // This and previous characters are both upper case. +- +- if i+1 == len(candidate) { +- // This is last character, previous was also uppercase -> this is UCTail +- // i.e., (current char is C): aBC / BC / ABC +- role = RUCTail +- } +- } +- } else if curr == rtPunct { +- switch r { +- case '.', ':': +- role = RSep +- } +- } +- if curr != rtLower { +- if i > 1 && output[i-1] == RHead && prev2 == rtUpper && (output[i-2] == RHead || output[i-2] == RUCTail) { +- // The previous two characters were uppercase. The current one is not a lower case, so the +- // previous one can't be a HEAD. Make it a UCTail. +- // i.e., (last char is current char - B must be a UCTail): ABC / ZABC / AB. +- output[i-1] = RUCTail +- } +- } +- +- output = append(output, role) +- prev2 = prev +- prev = curr +- } +- return output +-} +- +-type runeType byte +- +-const ( +- rtNone runeType = iota +- rtPunct +- rtLower +- rtUpper +-) +- +-const rt = "00000000000000000000000000000000000000000000001122222222221000000333333333333333333333333330000002222222222222222222222222200000" +- +-// LastSegment returns the substring representing the last segment from the input, where each +-// byte has an associated RuneRole in the roles slice. This makes sense only for inputs of Symbol +-// or Filename type. +-func LastSegment(input string, roles []RuneRole) string { +- // Exclude ending separators. +- end := len(input) - 1 +- for end >= 0 && roles[end] == RSep { +- end-- +- } +- if end < 0 { +- return "" +- } +- +- start := end - 1 +- for start >= 0 && roles[start] != RSep { +- start-- +- } +- +- return input[start+1 : end+1] +-} +- +-// fromChunks copies string chunks into the given buffer. +-func fromChunks(chunks []string, buffer []byte) []byte { +- ii := 0 +- for _, chunk := range chunks { +- for i := range len(chunk) { +- if ii >= cap(buffer) { +- break +- } +- buffer[ii] = chunk[i] +- ii++ +- } +- } +- return buffer[:ii] +-} +- +-// toLower transforms the input string to lower case, which is stored in the output byte slice. +-// The lower casing considers only ASCII values - non ASCII values are left unmodified. +-// Stops when parsed all input or when it filled the output slice. If output is nil, then it gets +-// created. +-func toLower(input []byte, reuse []byte) []byte { +- output := reuse +- if cap(reuse) < len(input) { +- output = make([]byte, len(input)) +- } +- +- for i := range input { +- r := rune(input[i]) +- if input[i] <= unicode.MaxASCII { +- if 'A' <= r && r <= 'Z' { +- r += 'a' - 'A' +- } +- } +- output[i] = byte(r) +- } +- return output[:len(input)] +-} +- +-// WordConsumer defines a consumer for a word delimited by the [start,end) byte offsets in an input +-// (start is inclusive, end is exclusive). +-type WordConsumer func(start, end int) +- +-// Words find word delimiters in an input based on its bytes' mappings to rune roles. The offset +-// delimiters for each word are fed to the provided consumer function. +-func Words(roles []RuneRole, consume WordConsumer) { +- var wordStart int +- for i, r := range roles { +- switch r { +- case RUCTail, RTail: +- case RHead, RNone, RSep: +- if i != wordStart { +- consume(wordStart, i) +- } +- wordStart = i +- if r != RHead { +- // Skip this character. +- wordStart = i + 1 +- } +- } +- } +- if wordStart != len(roles) { +- consume(wordStart, len(roles)) +- } +-} +diff -urN a/gopls/internal/fuzzy/input_test.go b/gopls/internal/fuzzy/input_test.go +--- a/gopls/internal/fuzzy/input_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/fuzzy/input_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,134 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fuzzy_test +- +-import ( +- "bytes" +- "slices" +- "sort" +- "testing" +- +- "golang.org/x/tools/gopls/internal/fuzzy" +-) +- +-var rolesTests = []struct { +- str string +- want string +-}{ +- {str: "abc::def::goo", want: "Ccc//Ccc//Ccc"}, +- {str: "proto::Message", want: "Ccccc//Ccccccc"}, +- {str: "AbstractSWTFactory", want: "CcccccccCuuCcccccc"}, +- {str: "Abs012", want: "Cccccc"}, +- {str: "/", want: " "}, +- {str: "fOO", want: "CCu"}, +- {str: "fo_oo.o_oo", want: "Cc Cc/C Cc"}, +-} +- +-func rolesString(roles []fuzzy.RuneRole) string { +- var buf bytes.Buffer +- for _, r := range roles { +- buf.WriteByte(" /cuC"[int(r)]) +- } +- return buf.String() +-} +- +-func TestRoles(t *testing.T) { +- for _, tc := range rolesTests { +- gotRoles := make([]fuzzy.RuneRole, len(tc.str)) +- fuzzy.RuneRoles([]byte(tc.str), gotRoles) +- got := rolesString(gotRoles) +- if got != tc.want { +- t.Errorf("roles(%s) = %v; want %v", tc.str, got, tc.want) +- } +- } +-} +- +-var wordSplitTests = []struct { +- input string +- want []string +-}{ +- { +- input: "foo bar baz", +- want: []string{"foo", "bar", "baz"}, +- }, +- { +- input: "fooBarBaz", +- want: []string{"foo", "Bar", "Baz"}, +- }, +- { +- input: "FOOBarBAZ", +- want: []string{"FOO", "Bar", "BAZ"}, +- }, +- { +- input: "foo123_bar2Baz3", +- want: []string{"foo123", "bar2", "Baz3"}, +- }, +-} +- +-func TestWordSplit(t *testing.T) { +- for _, tc := range wordSplitTests { +- roles := fuzzy.RuneRoles([]byte(tc.input), nil) +- +- var got []string +- consumer := func(i, j int) { +- got = append(got, tc.input[i:j]) +- } +- fuzzy.Words(roles, consumer) +- +- if eq := diffStringLists(tc.want, got); !eq { +- t.Errorf("input %v: (want %v -> got %v)", tc.input, tc.want, got) +- } +- } +-} +- +-func diffStringLists(a, b []string) bool { +- sort.Strings(a) +- sort.Strings(b) +- return slices.Equal(a, b) +-} +- +-var lastSegmentSplitTests = []struct { +- str string +- want string +-}{ +- { +- str: "identifier", +- want: "identifier", +- }, +- { +- str: "two_words", +- want: "two_words", +- }, +- { +- str: "first::second", +- want: "second", +- }, +- { +- str: "foo.bar.FOOBar_buz123_test", +- want: "FOOBar_buz123_test", +- }, +-} +- +-func TestLastSegment(t *testing.T) { +- for _, tc := range lastSegmentSplitTests { +- roles := fuzzy.RuneRoles([]byte(tc.str), nil) +- +- got := fuzzy.LastSegment(tc.str, roles) +- +- if got != tc.want { +- t.Errorf("str %v: want %v; got %v", tc.str, tc.want, got) +- } +- } +-} +- +-func BenchmarkRoles(b *testing.B) { +- str := "AbstractSWTFactory" +- out := make([]fuzzy.RuneRole, len(str)) +- +- for b.Loop() { +- fuzzy.RuneRoles([]byte(str), out) +- } +- b.SetBytes(int64(len(str))) +-} +diff -urN a/gopls/internal/fuzzy/matcher.go b/gopls/internal/fuzzy/matcher.go +--- a/gopls/internal/fuzzy/matcher.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/fuzzy/matcher.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,438 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package fuzzy implements a fuzzy matching algorithm. +-package fuzzy +- +-import ( +- "bytes" +- "fmt" +-) +- +-const ( +- // MaxInputSize is the maximum size of the input scored against the fuzzy matcher. Longer inputs +- // will be truncated to this size. +- MaxInputSize = 127 +- // MaxPatternSize is the maximum size of the pattern used to construct the fuzzy matcher. Longer +- // inputs are truncated to this size. +- MaxPatternSize = 63 +-) +- +-type scoreVal int +- +-func (s scoreVal) val() int { +- return int(s) >> 1 +-} +- +-func (s scoreVal) prevK() int { +- return int(s) & 1 +-} +- +-func score(val int, prevK int /*0 or 1*/) scoreVal { +- return scoreVal(val<<1 + prevK) +-} +- +-// Matcher implements a fuzzy matching algorithm for scoring candidates against a pattern. +-// The matcher does not support parallel usage. +-type Matcher struct { +- pattern string +- patternLower []byte // lower-case version of the pattern +- patternShort []byte // first characters of the pattern +- caseSensitive bool // set if the pattern is mix-cased +- +- patternRoles []RuneRole // the role of each character in the pattern +- roles []RuneRole // the role of each character in the tested string +- +- scores [MaxInputSize + 1][MaxPatternSize + 1][2]scoreVal +- +- scoreScale float32 +- +- lastCandidateLen int // in bytes +- lastCandidateMatched bool +- +- // Reusable buffers to avoid allocating for every candidate. +- // - inputBuf stores the concatenated input chunks +- // - lowerBuf stores the last candidate in lower-case +- // - rolesBuf stores the calculated roles for each rune in the last +- // candidate. +- inputBuf [MaxInputSize]byte +- lowerBuf [MaxInputSize]byte +- rolesBuf [MaxInputSize]RuneRole +-} +- +-func (m *Matcher) String() string { return m.pattern } +- +-func (m *Matcher) bestK(i, j int) int { +- if m.scores[i][j][0].val() < m.scores[i][j][1].val() { +- return 1 +- } +- return 0 +-} +- +-// NewMatcher returns a new fuzzy matcher for scoring candidates against the provided pattern. +-func NewMatcher(pattern string) *Matcher { +- if len(pattern) > MaxPatternSize { +- pattern = pattern[:MaxPatternSize] +- } +- +- m := &Matcher{ +- pattern: pattern, +- patternLower: toLower([]byte(pattern), nil), +- } +- +- for i, c := range m.patternLower { +- if pattern[i] != c { +- m.caseSensitive = true +- break +- } +- } +- +- if len(pattern) > 3 { +- m.patternShort = m.patternLower[:3] +- } else { +- m.patternShort = m.patternLower +- } +- +- m.patternRoles = RuneRoles([]byte(pattern), nil) +- +- if len(pattern) > 0 { +- maxCharScore := 4 +- m.scoreScale = 1 / float32(maxCharScore*len(pattern)) +- } +- +- return m +-} +- +-// Score returns the score returned by matching the candidate to the pattern. +-// This is not designed for parallel use. Multiple candidates must be scored sequentially. +-// Returns a score between 0 and 1 (0 - no match, 1 - perfect match). +-func (m *Matcher) Score(candidate string) float32 { +- return m.ScoreChunks([]string{candidate}) +-} +- +-func (m *Matcher) ScoreChunks(chunks []string) float32 { +- candidate := fromChunks(chunks, m.inputBuf[:]) +- if len(candidate) > MaxInputSize { +- candidate = candidate[:MaxInputSize] +- } +- lower := toLower(candidate, m.lowerBuf[:]) +- m.lastCandidateLen = len(candidate) +- +- if len(m.pattern) == 0 { +- // Empty patterns perfectly match candidates. +- return 1 +- } +- +- if m.match(candidate, lower) { +- sc := m.computeScore(candidate, lower) +- if sc > minScore/2 && !m.poorMatch() { +- m.lastCandidateMatched = true +- if len(m.pattern) == len(candidate) { +- // Perfect match. +- return 1 +- } +- +- if sc < 0 { +- sc = 0 +- } +- normalizedScore := min(float32(sc)*m.scoreScale, 1) +- +- return normalizedScore +- } +- } +- +- m.lastCandidateMatched = false +- return 0 +-} +- +-const minScore = -10000 +- +-// MatchedRanges returns matches ranges for the last scored string as a flattened array of +-// [begin, end) byte offset pairs. +-func (m *Matcher) MatchedRanges() []int { +- if len(m.pattern) == 0 || !m.lastCandidateMatched { +- return nil +- } +- i, j := m.lastCandidateLen, len(m.pattern) +- if m.scores[i][j][0].val() < minScore/2 && m.scores[i][j][1].val() < minScore/2 { +- return nil +- } +- +- var ret []int +- k := m.bestK(i, j) +- for i > 0 { +- take := (k == 1) +- k = m.scores[i][j][k].prevK() +- if take { +- if len(ret) == 0 || ret[len(ret)-1] != i { +- ret = append(ret, i) +- ret = append(ret, i-1) +- } else { +- ret[len(ret)-1] = i - 1 +- } +- j-- +- } +- i-- +- } +- // Reverse slice. +- for i := range len(ret) / 2 { +- ret[i], ret[len(ret)-1-i] = ret[len(ret)-1-i], ret[i] +- } +- return ret +-} +- +-func (m *Matcher) match(candidate []byte, candidateLower []byte) bool { +- i, j := 0, 0 +- for ; i < len(candidateLower) && j < len(m.patternLower); i++ { +- if candidateLower[i] == m.patternLower[j] { +- j++ +- } +- } +- if j != len(m.patternLower) { +- return false +- } +- +- // The input passes the simple test against pattern, so it is time to classify its characters. +- // Character roles are used below to find the last segment. +- m.roles = RuneRoles(candidate, m.rolesBuf[:]) +- +- return true +-} +- +-func (m *Matcher) computeScore(candidate []byte, candidateLower []byte) int { +- pattLen, candLen := len(m.pattern), len(candidate) +- +- for j := 0; j <= len(m.pattern); j++ { +- m.scores[0][j][0] = minScore << 1 +- m.scores[0][j][1] = minScore << 1 +- } +- m.scores[0][0][0] = score(0, 0) // Start with 0. +- +- segmentsLeft, lastSegStart := 1, 0 +- for i := range candLen { +- if m.roles[i] == RSep { +- segmentsLeft++ +- lastSegStart = i + 1 +- } +- } +- +- // A per-character bonus for a consecutive match. +- consecutiveBonus := 2 +- wordIdx := 0 // Word count within segment. +- for i := 1; i <= candLen; i++ { +- +- role := m.roles[i-1] +- isHead := role == RHead +- +- if isHead { +- wordIdx++ +- } else if role == RSep && segmentsLeft > 1 { +- wordIdx = 0 +- segmentsLeft-- +- } +- +- var skipPenalty int +- if i == 1 || (i-1) == lastSegStart { +- // Skipping the start of first or last segment. +- skipPenalty++ +- } +- +- for j := 0; j <= pattLen; j++ { +- // By default, we don't have a match. Fill in the skip data. +- m.scores[i][j][1] = minScore << 1 +- +- // Compute the skip score. +- k := 0 +- if m.scores[i-1][j][0].val() < m.scores[i-1][j][1].val() { +- k = 1 +- } +- +- skipScore := m.scores[i-1][j][k].val() +- // Do not penalize missing characters after the last matched segment. +- if j != pattLen { +- skipScore -= skipPenalty +- } +- m.scores[i][j][0] = score(skipScore, k) +- +- if j == 0 || candidateLower[i-1] != m.patternLower[j-1] { +- // Not a match. +- continue +- } +- pRole := m.patternRoles[j-1] +- +- if role == RTail && pRole == RHead { +- if j > 1 { +- // Not a match: a head in the pattern matches a tail character in the candidate. +- continue +- } +- // Special treatment for the first character of the pattern. We allow +- // matches in the middle of a word if they are long enough, at least +- // min(3, pattern.length) characters. +- if !bytes.HasPrefix(candidateLower[i-1:], m.patternShort) { +- continue +- } +- } +- +- // Compute the char score. +- var charScore int +- // Bonus: the char is in the candidate's last segment. +- if segmentsLeft <= 1 { +- charScore++ +- } +- +- // Bonus: exact case match between pattern and candidate. +- if candidate[i-1] == m.pattern[j-1] || +- // Bonus: candidate char is a head and pattern is all +- // lowercase. There is no segmentation in an all lowercase +- // pattern, so assume any char in pattern can be a head. Note +- // that we are intentionally _not_ giving a bonus to a case +- // insensitive match when the pattern is case sensitive. +- role == RHead && !m.caseSensitive { +- charScore++ +- } +- +- // Penalty: pattern char is Head, candidate char is Tail. +- if role == RTail && pRole == RHead { +- charScore-- +- } +- // Penalty: first pattern character matched in the middle of a word. +- if j == 1 && role == RTail { +- charScore -= 4 +- } +- +- // Third dimension encodes whether there is a gap between the previous match and the current +- // one. +- for k := range 2 { +- sc := m.scores[i-1][j-1][k].val() + charScore +- +- isConsecutive := k == 1 || i-1 == 0 || i-1 == lastSegStart +- if isConsecutive { +- // Bonus: a consecutive match. First character match also gets a bonus to +- // ensure prefix final match score normalizes to 1.0. +- // Logically, this is a part of charScore, but we have to compute it here because it +- // only applies for consecutive matches (k == 1). +- sc += consecutiveBonus +- } +- if k == 0 { +- // Penalty: Matching inside a segment (and previous char wasn't matched). Penalize for the lack +- // of alignment. +- if role == RTail || role == RUCTail { +- sc -= 3 +- } +- } +- +- if sc > m.scores[i][j][1].val() { +- m.scores[i][j][1] = score(sc, k) +- } +- } +- } +- } +- +- result := m.scores[len(candidate)][len(m.pattern)][m.bestK(len(candidate), len(m.pattern))].val() +- +- return result +-} +- +-// ScoreTable returns the score table computed for the provided candidate. Used only for debugging. +-func (m *Matcher) ScoreTable(candidate string) string { +- var buf bytes.Buffer +- +- var line1, line2, separator bytes.Buffer +- line1.WriteString("\t") +- line2.WriteString("\t") +- for j := range len(m.pattern) { +- line1.WriteString(fmt.Sprintf("%c\t\t", m.pattern[j])) +- separator.WriteString("----------------") +- } +- +- buf.WriteString(line1.String()) +- buf.WriteString("\n") +- buf.WriteString(separator.String()) +- buf.WriteString("\n") +- +- for i := 1; i <= len(candidate); i++ { +- line1.Reset() +- line2.Reset() +- +- line1.WriteString(fmt.Sprintf("%c\t", candidate[i-1])) +- line2.WriteString("\t") +- +- for j := 1; j <= len(m.pattern); j++ { +- line1.WriteString(fmt.Sprintf("M%6d(%c)\t", m.scores[i][j][0].val(), dir(m.scores[i][j][0].prevK()))) +- line2.WriteString(fmt.Sprintf("H%6d(%c)\t", m.scores[i][j][1].val(), dir(m.scores[i][j][1].prevK()))) +- } +- buf.WriteString(line1.String()) +- buf.WriteString("\n") +- buf.WriteString(line2.String()) +- buf.WriteString("\n") +- buf.WriteString(separator.String()) +- buf.WriteString("\n") +- } +- +- return buf.String() +-} +- +-func dir(prevK int) rune { +- if prevK == 0 { +- return 'M' +- } +- return 'H' +-} +- +-func (m *Matcher) poorMatch() bool { +- if len(m.pattern) < 2 { +- return false +- } +- +- i, j := m.lastCandidateLen, len(m.pattern) +- k := m.bestK(i, j) +- +- var counter, len int +- for i > 0 { +- take := (k == 1) +- k = m.scores[i][j][k].prevK() +- if take { +- len++ +- if k == 0 && len < 3 && m.roles[i-1] == RTail { +- // Short match in the middle of a word +- counter++ +- if counter > 1 { +- return true +- } +- } +- j-- +- } else { +- len = 0 +- } +- i-- +- } +- return false +-} +- +-// BestMatch returns the name most similar to the +-// pattern, using fuzzy matching, or the empty string. +-func BestMatch(pattern string, names []string) string { +- fuzz := NewMatcher(pattern) +- best := "" +- highScore := float32(0) // minimum score is 0 (no match) +- for _, name := range names { +- // TODO: Improve scoring algorithm. +- score := fuzz.Score(name) +- if score > highScore { +- highScore = score +- best = name +- } else if score == 0 { +- // Order matters in the fuzzy matching algorithm. If we find no match +- // when matching the target to the identifier, try matching the identifier +- // to the target. +- revFuzz := NewMatcher(name) +- revScore := revFuzz.Score(pattern) +- if revScore > highScore { +- highScore = revScore +- best = name +- } +- } +- } +- return best +-} +diff -urN a/gopls/internal/fuzzy/matcher_test.go b/gopls/internal/fuzzy/matcher_test.go +--- a/gopls/internal/fuzzy/matcher_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/fuzzy/matcher_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,306 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Benchmark results: +-// +-// BenchmarkMatcher-12 1000000 1615 ns/op 30.95 MB/s 0 B/op 0 allocs/op +-package fuzzy_test +- +-import ( +- "bytes" +- "fmt" +- "math" +- "testing" +- +- "golang.org/x/tools/gopls/internal/fuzzy" +-) +- +-type comparator struct { +- f func(val, ref float32) bool +- descr string +-} +- +-var ( +- eq = comparator{ +- f: func(val, ref float32) bool { +- return val == ref +- }, +- descr: "==", +- } +- ge = comparator{ +- f: func(val, ref float32) bool { +- return val >= ref +- }, +- descr: ">=", +- } +- gt = comparator{ +- f: func(val, ref float32) bool { +- return val > ref +- }, +- descr: ">", +- } +-) +- +-func (c comparator) eval(val, ref float32) bool { +- return c.f(val, ref) +-} +- +-func (c comparator) String() string { +- return c.descr +-} +- +-type scoreTest struct { +- candidate string +- comparator +- ref float32 +-} +- +-var matcherTests = []struct { +- pattern string +- tests []scoreTest +-}{ +- { +- pattern: "", +- tests: []scoreTest{ +- {"def", eq, 1}, +- {"Ab stuff c", eq, 1}, +- }, +- }, +- { +- pattern: "abc", +- tests: []scoreTest{ +- {"def", eq, 0}, +- {"abd", eq, 0}, +- {"abc", ge, 0}, +- {"Abc", ge, 0}, +- {"Ab stuff c", ge, 0}, +- }, +- }, +- { +- pattern: "Abc", +- tests: []scoreTest{ +- {"def", eq, 0}, +- {"abd", eq, 0}, +- {"abc", ge, 0}, +- {"Abc", ge, 0}, +- {"Ab stuff c", ge, 0}, +- }, +- }, +- { +- pattern: "U", +- tests: []scoreTest{ +- {"ErrUnexpectedEOF", gt, 0}, +- {"ErrUnexpectedEOF.Error", eq, 0}, +- }, +- }, +-} +- +-func TestScore(t *testing.T) { +- for _, tc := range matcherTests { +- m := fuzzy.NewMatcher(tc.pattern) +- for _, sct := range tc.tests { +- score := m.Score(sct.candidate) +- if !sct.comparator.eval(score, sct.ref) { +- t.Errorf("m.Score(%q) = %.2g, want %s %v", sct.candidate, score, sct.comparator, sct.ref) +- } +- } +- } +-} +- +-var compareCandidatesTestCases = []struct { +- pattern string +- // In `[][]string{{"foo", "bar"}, {"baz"}}`, +- // "foo" and "bar" must have same score, "baz" must be strictly higher scoring. +- orderedCandidates [][]string +-}{ +- { +- pattern: "Foo", +- orderedCandidates: [][]string{ +- {"Barfoo"}, +- {"Faoo"}, +- {"F_o_o"}, +- {"FaoFooa", "BarFoo"}, +- {"F__oo", "F_oo"}, +- {"FooA", "FooBar", "Foo"}, +- }, +- }, +- { +- pattern: "U", +- orderedCandidates: [][]string{ +- {"ErrUnexpectedEOF.Error"}, +- {"ErrUnexpectedEOF"}, +- }, +- }, +- { +- pattern: "N", +- orderedCandidates: [][]string{ +- {"name"}, +- {"Name"}, +- }, +- }, +-} +- +-func TestCompareCandidateScores(t *testing.T) { +- for _, tc := range compareCandidatesTestCases { +- m := fuzzy.NewMatcher(tc.pattern) +- +- var prevScore float32 +- var prevCandGroup []string +- for i, candGroup := range tc.orderedCandidates { +- var groupScore float32 +- for j, cand := range candGroup { +- score := m.Score(cand) +- if j > 0 && score != groupScore { +- t.Fatalf("score %f of %q different than group", score, cand) +- } +- groupScore = score +- } +- +- if i > 0 && prevScore >= groupScore { +- t.Errorf("%s[=%v] is not scored higher than %s[=%v]", candGroup, groupScore, prevCandGroup, prevScore) +- } +- if groupScore < 0 || groupScore > 1 { +- t.Errorf("%s score is %v; want value between [0, 1]", candGroup, groupScore) +- } +- prevScore = groupScore +- prevCandGroup = candGroup +- } +- } +-} +- +-var fuzzyMatcherTestCases = []struct { +- p string +- str string +- want string +-}{ +- {p: "foo", str: "abc::foo", want: "abc::[foo]"}, +- {p: "foo", str: "foo.foo", want: "foo.[foo]"}, +- {p: "foo", str: "fo_oo.o_oo", want: "[fo]_oo.[o]_oo"}, +- {p: "foo", str: "fo_oo.fo_oo", want: "fo_oo.[fo]_[o]o"}, +- {p: "fo_o", str: "fo_oo.o_oo", want: "[f]o_oo.[o_o]o"}, +- {p: "fOO", str: "fo_oo.o_oo", want: "[f]o_oo.[o]_[o]o"}, +- {p: "tedit", str: "foo.TextEdit", want: "foo.[T]ext[Edit]"}, +- {p: "TEdit", str: "foo.TextEdit", want: "foo.[T]ext[Edit]"}, +- {p: "Tedit", str: "foo.TextEdit", want: "foo.[T]ext[Edit]"}, +- {p: "Tedit", str: "foo.Textedit", want: "foo.[Te]xte[dit]"}, +- {p: "TEdit", str: "foo.Textedit", want: ""}, +- {p: "te", str: "foo.Textedit", want: "foo.[Te]xtedit"}, +- {p: "ee", str: "foo.Textedit", want: ""}, // short middle of the word match +- {p: "ex", str: "foo.Textedit", want: "foo.T[ex]tedit"}, +- {p: "exdi", str: "foo.Textedit", want: ""}, // short middle of the word match +- {p: "exdit", str: "foo.Textedit", want: ""}, // short middle of the word match +- {p: "extdit", str: "foo.Textedit", want: "foo.T[ext]e[dit]"}, +- {p: "e", str: "foo.Textedit", want: "foo.T[e]xtedit"}, +- {p: "E", str: "foo.Textedit", want: "foo.T[e]xtedit"}, +- {p: "ed", str: "foo.Textedit", want: "foo.Text[ed]it"}, +- {p: "edt", str: "foo.Textedit", want: ""}, // short middle of the word match +- {p: "edit", str: "foo.Textedit", want: "foo.Text[edit]"}, +- {p: "edin", str: "foo.TexteditNum", want: "foo.Text[edi]t[N]um"}, +- {p: "n", str: "node.GoNodeMax", want: "[n]ode.GoNodeMax"}, +- {p: "N", str: "node.GoNodeMax", want: "[n]ode.GoNodeMax"}, +- {p: "completio", str: "completion", want: "[completio]n"}, +- {p: "completio", str: "completion.None", want: "[completio]n.None"}, +-} +- +-func TestFuzzyMatcherRanges(t *testing.T) { +- for _, tc := range fuzzyMatcherTestCases { +- matcher := fuzzy.NewMatcher(tc.p) +- score := matcher.Score(tc.str) +- if tc.want == "" { +- if score > 0 { +- t.Errorf("Score(%s, %s) = %v; want: <= 0", tc.p, tc.str, score) +- } +- continue +- } +- if score < 0 { +- t.Errorf("Score(%s, %s) = %v, want: > 0", tc.p, tc.str, score) +- continue +- } +- got := highlightMatches(tc.str, matcher) +- if tc.want != got { +- t.Errorf("highlightMatches(%s, %s) = %v, want: %v", tc.p, tc.str, got, tc.want) +- } +- } +-} +- +-var scoreTestCases = []struct { +- p string +- str string +- want float64 +-}{ +- // Score precision up to five digits. Modify if changing the score, but make sure the new values +- // are reasonable. +- {p: "abc", str: "abc", want: 1}, +- {p: "abc", str: "Abc", want: 1}, +- {p: "abc", str: "Abcdef", want: 1}, +- {p: "strc", str: "StrCat", want: 1}, +- {p: "abc_def", str: "abc_def_xyz", want: 1}, +- {p: "abcdef", str: "abc_def_xyz", want: 0.91667}, +- {p: "abcxyz", str: "abc_def_xyz", want: 0.91667}, +- {p: "sc", str: "StrCat", want: 0.75}, +- {p: "abc", str: "AbstrBasicCtor", want: 0.83333}, +- {p: "foo", str: "abc::foo", want: 0.91667}, +- {p: "afoo", str: "abc::foo", want: 0.9375}, +- {p: "abr", str: "abc::bar", want: 0.5}, +- {p: "br", str: "abc::bar", want: 0.25}, +- {p: "aar", str: "abc::bar", want: 0.41667}, +- {p: "edin", str: "foo.TexteditNum", want: 0.125}, +- {p: "ediu", str: "foo.TexteditNum", want: 0}, +- // We want the next two items to have roughly similar scores. +- {p: "up", str: "unique_ptr", want: 0.75}, +- {p: "up", str: "upper_bound", want: 1}, +-} +- +-func TestScores(t *testing.T) { +- for _, tc := range scoreTestCases { +- matcher := fuzzy.NewMatcher(tc.p) +- got := math.Round(float64(matcher.Score(tc.str))*1e5) / 1e5 +- if got != tc.want { +- t.Errorf("Score(%s, %s) = %v, want: %v", tc.p, tc.str, got, tc.want) +- } +- } +-} +- +-func highlightMatches(str string, matcher *fuzzy.Matcher) string { +- matches := matcher.MatchedRanges() +- +- var buf bytes.Buffer +- index := 0 +- for i := 0; i < len(matches)-1; i += 2 { +- s, e := matches[i], matches[i+1] +- fmt.Fprintf(&buf, "%s[%s]", str[index:s], str[s:e]) +- index = e +- } +- buf.WriteString(str[index:]) +- return buf.String() +-} +- +-func BenchmarkMatcher(b *testing.B) { +- pattern := "Foo" +- candidates := []string{ +- "F_o_o", +- "Barfoo", +- "Faoo", +- "F__oo", +- "F_oo", +- "FaoFooa", +- "BarFoo", +- "FooA", +- "FooBar", +- "Foo", +- } +- +- matcher := fuzzy.NewMatcher(pattern) +- +- for b.Loop() { +- for _, c := range candidates { +- matcher.Score(c) +- } +- } +- var numBytes int +- for _, c := range candidates { +- numBytes += len(c) +- } +- b.SetBytes(int64(numBytes)) +-} +diff -urN a/gopls/internal/fuzzy/self_test.go b/gopls/internal/fuzzy/self_test.go +--- a/gopls/internal/fuzzy/self_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/fuzzy/self_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,39 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fuzzy_test +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/fuzzy" +-) +- +-func BenchmarkSelf_Matcher(b *testing.B) { +- idents := collectIdentifiers(b) +- patterns := generatePatterns() +- +- for b.Loop() { +- for _, pattern := range patterns { +- sm := NewMatcher(pattern) +- for _, ident := range idents { +- _ = sm.Score(ident) +- } +- } +- } +-} +- +-func BenchmarkSelf_SymbolMatcher(b *testing.B) { +- idents := collectIdentifiers(b) +- patterns := generatePatterns() +- +- for b.Loop() { +- for _, pattern := range patterns { +- sm := NewSymbolMatcher(pattern) +- for _, ident := range idents { +- _, _ = sm.Match([]string{ident}) +- } +- } +- } +-} +diff -urN a/gopls/internal/fuzzy/symbol.go b/gopls/internal/fuzzy/symbol.go +--- a/gopls/internal/fuzzy/symbol.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/fuzzy/symbol.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,309 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fuzzy +- +-import ( +- "bytes" +- "fmt" +- "log" +- "unicode" +-) +- +-// SymbolMatcher implements a fuzzy matching algorithm optimized for Go symbols +-// of the form: +-// +-// example.com/path/to/package.object.field +-// +-// Knowing that we are matching symbols like this allows us to make the +-// following optimizations: +-// - We can incorporate right-to-left relevance directly into the score +-// calculation. +-// - We can match from right to left, discarding leading bytes if the input is +-// too long. +-// - We just take the right-most match without losing too much precision. This +-// allows us to use an O(n) algorithm. +-// - We can operate directly on chunked strings; in many cases we will +-// be storing the package path and/or package name separately from the +-// symbol or identifiers, so doing this avoids allocating strings. +-// - We can return the index of the right-most match, allowing us to trim +-// irrelevant qualification. +-type SymbolMatcher struct { +- // Using buffers of length 256 is both a reasonable size for most qualified +- // symbols, and makes it easy to avoid bounds checks by using uint8 indexes. +- pattern [256]rune +- patternLen uint8 +- inputBuffer [256]rune // avoid allocating when considering chunks +- roles [256]uint32 // which roles does a rune play (word start, etc.) +- segments [256]uint8 // how many segments from the right is each rune +-} +- +-// Rune roles. +-const ( +- segmentStart uint32 = 1 << iota // input rune starts a segment (i.e. follows '/' or '.') +- wordStart // input rune starts a word, per camel-case naming rules +- separator // input rune is a separator ('/' or '.') +- upper // input rune is an upper case letter +-) +- +-// NewSymbolMatcher creates a SymbolMatcher that may be used to match the given +-// search pattern. +-// +-// Currently this matcher only accepts case-insensitive fuzzy patterns. +-// +-// An empty pattern matches no input. +-func NewSymbolMatcher(pattern string) *SymbolMatcher { +- m := &SymbolMatcher{} +- for _, p := range pattern { +- m.pattern[m.patternLen] = unicode.ToLower(p) +- m.patternLen++ +- if m.patternLen == 255 || int(m.patternLen) == len(pattern) { +- // break at 255 so that we can represent patternLen with a uint8. +- break +- } +- } +- return m +-} +- +-// Match searches for the right-most match of the search pattern within the +-// symbol represented by concatenating the given chunks. +-// +-// If a match is found, the first result holds the absolute byte offset within +-// all chunks for the start of the symbol. In other words, the index of the +-// match within strings.Join(chunks, ""). +-// +-// The second return value will be the score of the match, which is always +-// between 0 and 1, inclusive. A score of 0 indicates no match. +-// +-// If no match is found, Match returns (-1, 0). +-func (m *SymbolMatcher) Match(chunks []string) (int, float64) { +- // Explicit behavior for an empty pattern. +- // +- // As a minor optimization, this also avoids nilness checks later on, since +- // the compiler can prove that m != nil. +- if m.patternLen == 0 { +- return -1, 0 +- } +- +- // Matching implements a heavily optimized linear scoring algorithm on the +- // input. This is not guaranteed to produce the highest score, but works well +- // enough, particularly due to the right-to-left significance of qualified +- // symbols. +- // +- // Matching proceeds in three passes through the input: +- // - The first pass populates the input buffer and collects rune roles. +- // - The second pass proceeds right-to-left to find the right-most match. +- // - The third pass proceeds left-to-right from the start of the right-most +- // match, to find the most *compact* match, and computes the score of this +- // match. +- // +- // See below for more details of each pass, as well as the scoring algorithm. +- +- // First pass: populate the input buffer out of the provided chunks +- // (lower-casing in the process), and collect rune roles. +- // +- // We could also check for a forward match here, but since we'd have to write +- // the entire input anyway this has negligible impact on performance. +- var ( +- inputLen = uint8(0) +- modifiers = wordStart | segmentStart +- ) +- +-input: +- for _, chunk := range chunks { +- for _, r := range chunk { +- if r == '.' || r == '/' { +- modifiers |= separator +- } +- // optimization: avoid calls to unicode.ToLower, which can't be inlined. +- l := r +- if r <= unicode.MaxASCII { +- if 'A' <= r && r <= 'Z' { +- l = r + 'a' - 'A' +- } +- } else { +- l = unicode.ToLower(r) +- } +- if l != r { +- modifiers |= upper +- +- // If the current rune is capitalized *and the preceding rune was not*, +- // mark this as a word start. This avoids spuriously high ranking of +- // non-camelcase naming schemas, such as the +- // yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE example of +- // golang/go#60201. +- if inputLen == 0 || m.roles[inputLen-1]&upper == 0 { +- modifiers |= wordStart +- } +- } +- m.inputBuffer[inputLen] = l +- m.roles[inputLen] = modifiers +- inputLen++ +- if m.roles[inputLen-1]&separator != 0 { +- modifiers = wordStart | segmentStart +- } else { +- modifiers = 0 +- } +- // TODO: we should prefer the right-most input if it overflows, rather +- // than the left-most as we're doing here. +- if inputLen == 255 { +- break input +- } +- } +- } +- +- // Second pass: find the right-most match, and count segments from the +- // right. +- var ( +- pi = uint8(m.patternLen - 1) // pattern index +- p = m.pattern[pi] // pattern rune +- start = -1 // start offset of match +- rseg = uint8(0) // effective "depth" from the right of the current rune in consideration +- ) +- const maxSeg = 3 // maximum number of segments from the right to count, for scoring purposes. +- +- for ii := inputLen - 1; ; ii-- { +- r := m.inputBuffer[ii] +- if rseg < maxSeg && m.roles[ii]&separator != 0 { +- rseg++ +- } +- m.segments[ii] = rseg +- if p == r { +- if pi == 0 { +- // TODO(rfindley): BUG: the docstring for Match says that it returns an +- // absolute byte offset, but clearly it is returning a rune offset here. +- start = int(ii) +- break +- } +- pi-- +- p = m.pattern[pi] +- } +- // Don't check ii >= 0 in the loop condition: ii is a uint8. +- if ii == 0 { +- break +- } +- } +- +- if start < 0 { +- // no match: skip scoring +- return -1, 0 +- } +- +- // Third pass: find the shortest match and compute the score. +- +- // Score is the average score for each rune. +- // +- // A rune score is the multiple of: +- // 1. The base score, which is 1.0 if the rune starts a segment, 0.9 if the +- // rune starts a mid-segment word, else 0.6. +- // +- // Runes preceded by a matching rune are treated the same as the start +- // of a mid-segment word (with a 0.9 score), so that sequential or exact +- // matches are preferred. We call this a sequential bonus. +- // +- // For the final rune match, this sequential bonus is reduced to 0.8 if +- // the next rune in the input is a mid-segment word, or 0.7 if the next +- // rune in the input is not a word or segment start. This ensures that +- // we favor whole-word or whole-segment matches over prefix matches. +- // +- // 2. 1.0 if the rune is part of the last segment, otherwise +- // 1.0-0.1*, with a max segment count of 3. +- // Notably 1.0-0.1*3 = 0.7 > 0.6, so that foo/_/_/_/_ (a match very +- // early in a qualified symbol name) still scores higher than _f_o_o_ (a +- // completely split match). +- // +- // This is a naive algorithm, but it is fast. There's lots of prior art here +- // that could be leveraged. For example, we could explicitly consider +- // rune distance, and exact matches of words or segments. +- // +- // Also note that this might not actually find the highest scoring match, as +- // doing so could require a non-linear algorithm, depending on how the score +- // is calculated. +- +- // debugging support +- const debug = false // enable to log debugging information +- var ( +- runeScores []float64 +- runeIdxs []int +- ) +- +- pi = 0 +- p = m.pattern[pi] +- +- const ( +- segStartScore = 1.0 // base score of runes starting a segment +- wordScore = 0.9 // base score of runes starting or continuing a word +- noStreak = 0.6 +- perSegment = 0.1 // we count at most 3 segments above +- ) +- +- totScore := 0.0 +- lastMatch := uint8(255) +- for ii := uint8(start); ii < inputLen; ii++ { +- r := m.inputBuffer[ii] +- if r == p { +- pi++ +- finalRune := pi >= m.patternLen +- p = m.pattern[pi] +- +- baseScore := noStreak +- +- // Calculate the sequence bonus based on preceding matches. +- // +- // We do this first as it is overridden by role scoring below. +- if lastMatch == ii-1 { +- baseScore = wordScore +- // Reduce the sequence bonus for the final rune of the pattern based on +- // whether it borders a new segment or word. +- if finalRune { +- switch { +- case ii == inputLen-1 || m.roles[ii+1]&separator != 0: +- // Full segment: no reduction +- case m.roles[ii+1]&wordStart != 0: +- baseScore = wordScore - 0.1 +- default: +- baseScore = wordScore - 0.2 +- } +- } +- } +- lastMatch = ii +- +- // Calculate the rune's role score. If the rune starts a segment or word, +- // this overrides the sequence score, as the rune starts a new sequence. +- switch { +- case m.roles[ii]&segmentStart != 0: +- baseScore = segStartScore +- case m.roles[ii]&wordStart != 0: +- baseScore = wordScore +- } +- +- // Apply the segment-depth penalty (segments from the right). +- runeScore := baseScore * (1.0 - float64(m.segments[ii])*perSegment) +- if debug { +- runeScores = append(runeScores, runeScore) +- runeIdxs = append(runeIdxs, int(ii)) +- } +- totScore += runeScore +- if finalRune { +- break +- } +- } +- } +- +- if debug { +- // Format rune roles and scores in line: +- // fo[o:.52].[b:1]a[r:.6] +- var summary bytes.Buffer +- last := 0 +- for i, idx := range runeIdxs { +- summary.WriteString(string(m.inputBuffer[last:idx])) // encode runes +- fmt.Fprintf(&summary, "[%s:%.2g]", string(m.inputBuffer[idx]), runeScores[i]) +- last = idx + 1 +- } +- summary.WriteString(string(m.inputBuffer[last:inputLen])) // encode runes +- log.Println(summary.String()) +- } +- +- return start, totScore / float64(m.patternLen) +-} +diff -urN a/gopls/internal/fuzzy/symbol_test.go b/gopls/internal/fuzzy/symbol_test.go +--- a/gopls/internal/fuzzy/symbol_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/fuzzy/symbol_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,253 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fuzzy_test +- +-import ( +- "go/ast" +- "go/token" +- "sort" +- "testing" +- +- "golang.org/x/tools/go/packages" +- . "golang.org/x/tools/gopls/internal/fuzzy" +-) +- +-func TestSymbolMatchIndex(t *testing.T) { +- tests := []struct { +- pattern, input string +- want int +- }{ +- {"test", "foo.TestFoo", 4}, +- {"test", "test", 0}, +- {"test", "Test", 0}, +- {"test", "est", -1}, +- {"t", "shortest", 7}, +- {"", "foo", -1}, +- {"", string([]rune{0}), -1}, // verify that we don't default to an empty pattern. +- {"anything", "", -1}, +- } +- +- for _, test := range tests { +- matcher := NewSymbolMatcher(test.pattern) +- if got, _ := matcher.Match([]string{test.input}); got != test.want { +- t.Errorf("NewSymbolMatcher(%q).Match(%q) = %v, _, want %v, _", test.pattern, test.input, got, test.want) +- } +- } +-} +- +-func TestSymbolRanking(t *testing.T) { +- +- // query -> symbols to match, in ascending order of score +- queryRanks := map[string][]string{ +- "test": { +- "this.is.better.than.most", +- "test.foo.bar", +- "thebest", +- "atest", +- "test.foo", +- "testage", +- "tTest", +- "foo.test", +- }, +- "parseside": { // golang/go#60201 +- "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE", +- "parseContext.parse_sidebyside", +- }, +- "cvb": { +- "filecache_test.testIPCValueB", +- "cover.Boundary", +- }, +- "dho": { +- "gocommand.DebugHangingGoCommands", +- "protocol.DocumentHighlightOptions", +- }, +- "flg": { +- "completion.FALLTHROUGH", +- "main.flagGoCmd", +- }, +- "fvi": { +- "godoc.fileIndexVersion", +- "macho.FlagSubsectionsViaSymbols", +- }, +- } +- +- for query, symbols := range queryRanks { +- t.Run(query, func(t *testing.T) { +- matcher := NewSymbolMatcher(query) +- prev := 0.0 +- for _, sym := range symbols { +- _, score := matcher.Match([]string{sym}) +- t.Logf("Match(%q) = %v", sym, score) +- if score <= prev { +- t.Errorf("Match(%q) = _, %v, want > %v", sym, score, prev) +- } +- prev = score +- } +- }) +- } +-} +- +-func TestMatcherSimilarities(t *testing.T) { +- // This test compares the fuzzy matcher with the symbol matcher on a corpus +- // of qualified identifiers extracted from x/tools. +- // +- // These two matchers are not expected to agree, but inspecting differences +- // can be useful for finding interesting ranking edge cases. +- t.Skip("unskip this test to compare matchers") +- +- idents := collectIdentifiers(t) +- t.Logf("collected %d unique identifiers", len(idents)) +- +- // We can't use slices.MaxFunc because we want a custom +- // scoring (not equivalence) function. +- topMatch := func(score func(string) float64) string { +- top := "" +- topScore := 0.0 +- for _, cand := range idents { +- if s := score(cand); s > topScore { +- top = cand +- topScore = s +- } +- } +- return top +- } +- +- agreed := 0 +- total := 0 +- bad := 0 +- patterns := generatePatterns() +- for _, pattern := range patterns { +- total++ +- +- fm := NewMatcher(pattern) +- topFuzzy := topMatch(func(input string) float64 { +- return float64(fm.Score(input)) +- }) +- sm := NewSymbolMatcher(pattern) +- topSymbol := topMatch(func(input string) float64 { +- _, score := sm.Match([]string{input}) +- return score +- }) +- switch { +- case topFuzzy == "" && topSymbol != "": +- if false { +- // The fuzzy matcher has a bug where it misses some matches; for this +- // test we only care about the symbol matcher. +- t.Logf("%q matched %q but no fuzzy match", pattern, topSymbol) +- } +- total-- +- bad++ +- case topFuzzy != "" && topSymbol == "": +- t.Fatalf("%q matched %q but no symbol match", pattern, topFuzzy) +- case topFuzzy == topSymbol: +- agreed++ +- default: +- // Enable this log to see mismatches. +- if false { +- t.Logf("mismatch for %q: fuzzy: %q, symbol: %q", pattern, topFuzzy, topSymbol) +- } +- } +- } +- t.Logf("fuzzy matchers agreed on %d out of %d queries (%d bad)", agreed, total, bad) +-} +- +-func collectIdentifiers(tb testing.TB) []string { +- cfg := &packages.Config{ +- Mode: packages.NeedName | packages.NeedSyntax | packages.NeedFiles, +- Tests: true, +- } +- pkgs, err := packages.Load(cfg, "golang.org/x/tools/...") +- if err != nil { +- tb.Fatal(err) +- } +- uniqueIdents := make(map[string]bool) +- decls := 0 +- for _, pkg := range pkgs { +- for _, f := range pkg.Syntax { +- for _, decl := range f.Decls { +- decls++ +- switch decl := decl.(type) { +- case *ast.GenDecl: +- for _, spec := range decl.Specs { +- switch decl.Tok { +- case token.IMPORT: +- case token.TYPE: +- name := spec.(*ast.TypeSpec).Name.Name +- qualified := pkg.Name + "." + name +- uniqueIdents[qualified] = true +- case token.CONST, token.VAR: +- for _, n := range spec.(*ast.ValueSpec).Names { +- qualified := pkg.Name + "." + n.Name +- uniqueIdents[qualified] = true +- } +- } +- } +- } +- } +- } +- } +- var idents []string +- for k := range uniqueIdents { +- idents = append(idents, k) +- } +- sort.Strings(idents) +- return idents +-} +- +-func generatePatterns() []string { +- var patterns []string +- for x := 'a'; x <= 'z'; x++ { +- for y := 'a'; y <= 'z'; y++ { +- for z := 'a'; z <= 'z'; z++ { +- patterns = append(patterns, string(x)+string(y)+string(z)) +- } +- } +- } +- return patterns +-} +- +-// Test that we strongly prefer exact matches. +-// +-// In golang/go#60027, we preferred "Runner" for the query "rune" over several +-// results containing the word "rune" exactly. Following this observation, +-// scoring was tweaked to more strongly emphasize sequential characters and +-// exact matches. +-func TestSymbolRanking_Issue60027(t *testing.T) { +- matcher := NewSymbolMatcher("rune") +- +- // symbols to match, in ascending order of ranking. +- symbols := []string{ +- "Runner", +- "singleRuneParam", +- "Config.ifsRune", +- "Parser.rune", +- } +- prev := 0.0 +- for _, sym := range symbols { +- _, score := matcher.Match([]string{sym}) +- t.Logf("Match(%q) = %v", sym, score) +- if score < prev { +- t.Errorf("Match(%q) = _, %v, want > %v", sym, score, prev) +- } +- prev = score +- } +-} +- +-func TestChunkedMatch(t *testing.T) { +- matcher := NewSymbolMatcher("test") +- _, want := matcher.Match([]string{"test"}) +- chunked := [][]string{ +- {"", "test"}, +- {"test", ""}, +- {"te", "st"}, +- } +- +- for _, chunks := range chunked { +- offset, score := matcher.Match(chunks) +- if offset != 0 || score != want { +- t.Errorf("Match(%v) = %v, %v, want 0, 1.0", chunks, offset, score) +- } +- } +-} +diff -urN a/gopls/internal/goasm/definition.go b/gopls/internal/goasm/definition.go +--- a/gopls/internal/goasm/definition.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/goasm/definition.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,127 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package goasm provides language-server features for files in Go +-// assembly language (https://go.dev/doc/asm). +-package goasm +- +-import ( +- "context" +- "fmt" +- "go/token" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/asm" +- "golang.org/x/tools/gopls/internal/util/morestrings" +- "golang.org/x/tools/internal/event" +-) +- +-// Definition handles the textDocument/definition request for Go assembly files. +-func Definition(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]protocol.Location, error) { +- ctx, done := event.Start(ctx, "goasm.Definition") +- defer done() +- +- mp, err := snapshot.NarrowestMetadataForFile(ctx, fh.URI()) +- if err != nil { +- return nil, err +- } +- +- // Read the file. +- content, err := fh.Content() +- if err != nil { +- return nil, err +- } +- mapper := protocol.NewMapper(fh.URI(), content) +- offset, err := mapper.PositionOffset(position) +- if err != nil { +- return nil, err +- } +- +- // Parse the assembly. +- // +- // TODO(adonovan): make this just another +- // attribute of the type-checked cache.Package. +- file := asm.Parse(content) +- +- // Figure out the selected symbol. +- // For now, just find the identifier around the cursor. +- var found *asm.Ident +- for _, id := range file.Idents { +- if id.Offset <= offset && offset <= id.End() { +- found = &id +- break +- } +- } +- if found == nil { +- return nil, fmt.Errorf("not an identifier") +- } +- +- // Resolve a symbol with a "." prefix to the current package. +- sym := found.Name +- if sym != "" && sym[0] == '.' { +- sym = string(mp.PkgPath) + sym +- } +- +- // package-qualified symbol? +- if pkgpath, name, ok := morestrings.CutLast(sym, "."); ok { +- // Find declaring package among dependencies. +- // +- // TODO(adonovan): assembly may legally reference +- // non-dependencies. For example, sync/atomic calls +- // internal/runtime/atomic. Perhaps we should search +- // the entire metadata graph, but that's path-dependent. +- var declaring *metadata.Package +- for pkg := range snapshot.MetadataGraph().ForwardReflexiveTransitiveClosure(mp.ID) { +- if pkg.PkgPath == metadata.PackagePath(pkgpath) { +- declaring = pkg +- break +- } +- } +- if declaring == nil { +- return nil, fmt.Errorf("package %q is not a dependency", pkgpath) +- } +- +- // Find declared symbol in syntax package. +- pkgs, err := snapshot.TypeCheck(ctx, declaring.ID) +- if err != nil { +- return nil, err +- } +- pkg := pkgs[0] +- def := pkg.Types().Scope().Lookup(name) +- if def == nil { +- return nil, fmt.Errorf("no symbol %q in package %q", name, pkgpath) +- } +- +- // Map position. +- pos := def.Pos() +- pgf, err := pkg.FileEnclosing(pos) +- if err != nil { +- return nil, err +- } +- loc, err := pgf.PosLocation(pos, pos+token.Pos(len(name))) +- if err != nil { +- return nil, err +- } +- return []protocol.Location{loc}, nil +- +- } else { +- // local symbols (funcs, vars, labels) +- for _, id := range file.Idents { +- if id.Name == found.Name && +- (id.Kind == asm.Text || id.Kind == asm.Global || id.Kind == asm.Label) { +- +- loc, err := mapper.OffsetLocation(id.Offset, id.End()) +- if err != nil { +- return nil, err +- } +- return []protocol.Location{loc}, nil +- } +- } +- } +- +- return nil, nil +-} +diff -urN a/gopls/internal/golang/add_import.go b/gopls/internal/golang/add_import.go +--- a/gopls/internal/golang/add_import.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/add_import.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/imports" +-) +- +-// AddImport adds a single import statement to the given file +-func AddImport(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, importPath string) ([]protocol.TextEdit, error) { +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, err +- } +- return ComputeImportFixEdits(snapshot.Options().Local, pgf.Src, &imports.ImportFix{ +- StmtInfo: imports.ImportInfo{ +- ImportPath: importPath, +- }, +- FixType: imports.AddImport, +- }) +-} +diff -urN a/gopls/internal/golang/addtest.go b/gopls/internal/golang/addtest.go +--- a/gopls/internal/golang/addtest.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/addtest.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,814 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-// This file defines the behavior of the "Add test for FUNC" command. +- +-import ( +- "bytes" +- "context" +- "errors" +- "fmt" +- "go/ast" +- "go/format" +- "go/types" +- "os" +- "path/filepath" +- "strconv" +- "strings" +- "text/template" +- "unicode" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- internalastutil "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/imports" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-const testTmplString = ` +-func {{.TestFuncName}}(t *{{.TestingPackageName}}.T) { +- {{- /* Test cases struct declaration and empty initialization. */}} +- tests := []struct { +- name string // description of this test case +- +- {{- $commentPrinted := false }} +- {{- if and .Receiver .Receiver.Constructor}} +- {{- range .Receiver.Constructor.Args}} +- {{- if .Name}} +- {{- if not $commentPrinted}} +- // Named input parameters for receiver constructor. +- {{- $commentPrinted = true }} +- {{- end}} +- {{.Name}} {{.Type}} +- {{- end}} +- {{- end}} +- {{- end}} +- +- {{- $commentPrinted := false }} +- {{- range .Func.Args}} +- {{- if .Name}} +- {{- if not $commentPrinted}} +- // Named input parameters for target function. +- {{- $commentPrinted = true }} +- {{- end}} +- {{.Name}} {{.Type}} +- {{- end}} +- {{- end}} +- +- {{- range $index, $res := .Func.Results}} +- {{- if eq $res.Name "gotErr"}} +- wantErr bool +- {{- else if eq $index 0}} +- want {{$res.Type}} +- {{- else}} +- want{{add $index 1}} {{$res.Type}} +- {{- end}} +- {{- end}} +- }{ +- // TODO: Add test cases. +- } +- +- {{- /* Loop over all the test cases. */}} +- for _, tt := range tests { +- t.Run(tt.name, func(t *{{.TestingPackageName}}.T) { +- {{- /* Constructor or empty initialization. */}} +- {{- if .Receiver}} +- {{- if .Receiver.Constructor}} +- {{- /* Receiver variable by calling constructor. */}} +- {{fieldNames .Receiver.Constructor.Results ""}} := {{if .PackageName}}{{.PackageName}}.{{end}} +- {{- .Receiver.Constructor.Name}} +- +- {{- /* Constructor input parameters. */ -}} +- ( +- {{- range $index, $arg := .Receiver.Constructor.Args}} +- {{- if ne $index 0}}, {{end}} +- {{- if .Name}}tt.{{.Name}}{{else}}{{.Value}}{{end}} +- {{- end -}} +- ) +- +- {{- /* Handles the error return from constructor. */}} +- {{- $last := last .Receiver.Constructor.Results}} +- {{- if eq $last.Type "error"}} +- if err != nil { +- t.Fatalf("could not construct receiver type: %v", err) +- } +- {{- end}} +- {{- else}} +- {{- /* Receiver variable declaration. */}} +- // TODO: construct the receiver type. +- var {{.Receiver.Var.Name}} {{.Receiver.Var.Type}} +- {{- end}} +- {{- end}} +- +- {{- /* Got variables. */}} +- {{if .Func.Results}}{{fieldNames .Func.Results ""}} := {{end}} +- +- {{- /* Call expression. */}} +- {{- if .Receiver}}{{/* Call method by VAR.METHOD. */}} +- {{- .Receiver.Var.Name}}. +- {{- else if .PackageName}}{{/* Call function by PACKAGE.FUNC. */}} +- {{- .PackageName}}. +- {{- end}}{{.Func.Name}} +- +- {{- /* Input parameters. */ -}} +- ( +- {{- range $index, $arg := .Func.Args}} +- {{- if ne $index 0}}, {{end}} +- {{- if .Name}}tt.{{.Name}}{{else}}{{.Value}}{{end}} +- {{- end -}} +- ) +- +- {{- /* Handles the returned error before the rest of return value. */}} +- {{- $last := last .Func.Results}} +- {{- if eq $last.Type "error"}} +- if gotErr != nil { +- if !tt.wantErr { +- t.Errorf("{{$.Func.Name}}() failed: %v", gotErr) +- } +- return +- } +- if tt.wantErr { +- t.Fatal("{{$.Func.Name}}() succeeded unexpectedly") +- } +- {{- end}} +- +- {{- /* Compare the returned values except for the last returned error. */}} +- {{- if or (and .Func.Results (ne $last.Type "error")) (and (gt (len .Func.Results) 1) (eq $last.Type "error"))}} +- // TODO: update the condition below to compare got with tt.want. +- {{- range $index, $res := .Func.Results}} +- {{- if ne $res.Name "gotErr"}} +- if true { +- t.Errorf("{{$.Func.Name}}() = %v, want %v", {{.Name}}, tt.{{if eq $index 0}}want{{else}}want{{add $index 1}}{{end}}) +- } +- {{- end}} +- {{- end}} +- {{- end}} +- }) +- } +-} +-` +- +-// Name is the name of the field this input parameter should reference. +-// Value is the expression this input parameter should accept. +-// +-// Exactly one of Name or Value must be set. +-type field struct { +- Name, Type, Value string +-} +- +-type function struct { +- Name string +- Args []field +- Results []field +-} +- +-type receiver struct { +- // Var is the name and type of the receiver variable. +- Var field +- // Constructor holds information about the constructor for the receiver type. +- // If no qualified constructor is found, this field will be nil. +- Constructor *function +-} +- +-type testInfo struct { +- // TestingPackageName is the package name should be used when referencing +- // package "testing" +- TestingPackageName string +- // PackageName is the package name the target function/method is declared from. +- PackageName string +- TestFuncName string +- // Func holds information about the function or method being tested. +- Func function +- // Receiver holds information about the receiver of the function or method +- // being tested. +- // This field is nil for functions and non-nil for methods. +- Receiver *receiver +-} +- +-var testTmpl = template.Must(template.New("test").Funcs(template.FuncMap{ +- "add": func(a, b int) int { return a + b }, +- "last": func(slice []field) field { +- if len(slice) == 0 { +- return field{} +- } +- return slice[len(slice)-1] +- }, +- "fieldNames": func(fields []field, qualifier string) (res string) { +- var names []string +- for _, f := range fields { +- names = append(names, qualifier+f.Name) +- } +- return strings.Join(names, ", ") +- }, +-}).Parse(testTmplString)) +- +-// AddTestForFunc adds a test for the function enclosing the given input range. +-// It creates a _test.go file if one does not already exist. +-// It returns the required text edits and the predicted location of the new test +-// function, which is only valid after the edits have been successfully applied. +-func AddTestForFunc(ctx context.Context, snapshot *cache.Snapshot, loc protocol.Location) (changes []protocol.DocumentChange, show *protocol.Location, _ error) { +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, loc.URI) +- if err != nil { +- return nil, nil, err +- } +- +- if metadata.IsCommandLineArguments(pkg.Metadata().ID) { +- return nil, nil, fmt.Errorf("current file in command-line-arguments package") +- } +- +- if errors := pkg.ParseErrors(); len(errors) > 0 { +- return nil, nil, fmt.Errorf("package has parse errors: %v", errors[0]) +- } +- if errors := pkg.TypeErrors(); len(errors) > 0 { +- return nil, nil, fmt.Errorf("package has type errors: %v", errors[0]) +- } +- +- // All three maps map the path of an imported package to +- // the local name if explicit or "" otherwise. +- var ( +- fileImports map[string]string // imports in foo.go file +- testImports map[string]string // imports in foo_test.go file +- extraImports = make(map[string]string) // imports to add to test file +- ) +- +- var collectImports = func(file *ast.File) (map[string]string, error) { +- imps := make(map[string]string) +- for _, spec := range file.Imports { +- // TODO(hxjiang): support dot imports. +- if spec.Name != nil && spec.Name.Name == "." { +- return nil, fmt.Errorf("\"add test for func\" does not support files containing dot imports") +- } +- path, err := strconv.Unquote(spec.Path.Value) +- if err != nil { +- return nil, err +- } +- if spec.Name != nil { +- if spec.Name.Name == "_" { +- continue +- } +- imps[path] = spec.Name.Name +- } else { +- imps[path] = "" +- } +- } +- return imps, nil +- } +- +- // Collect all the imports from the x.go, keep track of the local package name. +- if fileImports, err = collectImports(pgf.File); err != nil { +- return nil, nil, err +- } +- +- testBase := strings.TrimSuffix(loc.URI.Base(), ".go") + "_test.go" +- goTestFileURI := protocol.URIFromPath(filepath.Join(loc.URI.DirPath(), testBase)) +- +- testFH, err := snapshot.ReadFile(ctx, goTestFileURI) +- if err != nil { +- return nil, nil, err +- } +- +- // TODO(hxjiang): use a fresh name if the same test function name already +- // exist. +- +- var ( +- eofRange protocol.Range // empty selection at end of new file +- // edits contains all the text edits to be applied to the test file. +- edits []protocol.TextEdit +- // xtest indicates whether the test file use package x or x_test. +- // TODO(hxjiang): We can discuss the option to interpret the user's +- // intention by which function they are selecting. Have one file for +- // x_test package testing, one file for x package testing. +- xtest = true +- ) +- +- start, end, err := pgf.RangePos(loc.Range) +- if err != nil { +- return nil, nil, err +- } +- +- path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) +- if len(path) < 2 { +- return nil, nil, fmt.Errorf("no enclosing function") +- } +- +- decl, ok := path[len(path)-2].(*ast.FuncDecl) +- if !ok { +- return nil, nil, fmt.Errorf("no enclosing function") +- } +- +- fn := pkg.TypesInfo().Defs[decl.Name].(*types.Func) +- sig := fn.Signature() +- +- testPGF, err := snapshot.ParseGo(ctx, testFH, parsego.Header) +- if err != nil { +- if !errors.Is(err, os.ErrNotExist) { +- return nil, nil, err +- } +- changes = append(changes, protocol.DocumentChangeCreate(goTestFileURI)) +- +- // header is the buffer containing the text to add to the beginning of the file. +- var header bytes.Buffer +- +- // If this test file was created by the gopls, add a copyright header and +- // package decl based on the originating file. +- // Search for something that looks like a copyright header, to replicate +- // in the new file. +- if c := CopyrightComment(pgf.File); c != nil { +- text, err := pgf.NodeText(c) +- if err != nil { +- return nil, nil, err +- } +- header.Write(text) +- // One empty line between copyright header and following. +- header.WriteString("\n\n") +- } +- +- // If this test file was created by gopls, add build constraints +- // matching the non-test file. +- if c := buildConstraintComment(pgf.File); c != nil { +- text, err := pgf.NodeText(c) +- if err != nil { +- return nil, nil, err +- } +- header.Write(text) +- // One empty line between build constraint and following. +- header.WriteString("\n\n") +- } +- +- // Determine if a new test file should use in-package test (package x) +- // or external test (package x_test). If any of the function parameters +- // reference an unexported object, we cannot write out test cases from +- // an x_test package. +- externalTestOK := func() bool { +- if !fn.Exported() { +- return false +- } +- if fn.Signature().Recv() != nil { +- if _, ident, _ := internalastutil.UnpackRecv(decl.Recv.List[0].Type); ident == nil || !ident.IsExported() { +- return false +- } +- } +- refsUnexported := false +- ast.Inspect(decl, func(n ast.Node) bool { +- // The original function refs to an unexported object from the +- // same package, so further inspection is unnecessary. +- if refsUnexported { +- return false +- } +- switch t := n.(type) { +- case *ast.BlockStmt: +- // Avoid inspect the function body. +- return false +- case *ast.Ident: +- // Use test variant (package foo) if the function signature +- // references any unexported objects (like types or +- // constants) from the same package. +- // Note: types.PkgName is excluded from this check as it's +- // always defined in the same package. +- if obj, ok := pkg.TypesInfo().Uses[t]; ok && !obj.Exported() && obj.Pkg() == pkg.Types() && !is[*types.PkgName](obj) { +- refsUnexported = true +- } +- return false +- default: +- return true +- } +- }) +- return !refsUnexported +- } +- +- xtest = externalTestOK() +- if xtest { +- fmt.Fprintf(&header, "package %s_test\n", pkg.Types().Name()) +- } else { +- fmt.Fprintf(&header, "package %s\n", pkg.Types().Name()) +- } +- +- // Write the copyright and package decl to the beginning of the file. +- edits = append(edits, protocol.TextEdit{ +- Range: protocol.Range{}, +- NewText: header.String(), +- }) +- } else { // existing _test.go file. +- file := testPGF.File +- if !file.Name.NamePos.IsValid() { +- return nil, nil, fmt.Errorf("missing package declaration") +- } +- switch file.Name.Name { +- case pgf.File.Name.Name: +- xtest = false +- case pgf.File.Name.Name + "_test": +- xtest = true +- default: +- return nil, nil, fmt.Errorf("invalid package declaration %q in test file %q", file.Name, testPGF) +- } +- +- eofRange, err = testPGF.PosRange(file.FileEnd, file.FileEnd) +- if err != nil { +- return nil, nil, err +- } +- +- // Collect all the imports from the foo_test.go. +- if testImports, err = collectImports(file); err != nil { +- return nil, nil, err +- } +- } +- +- // qual qualifier determines the correct package name to use for a type in +- // foo_test.go. It does this by: +- // - Consult imports map from test file foo_test.go. +- // - If not found, consult imports map from original file foo.go. +- // If the package is not imported in test file foo_test.go, it is added to +- // extraImports map. +- qual := func(p *types.Package) string { +- // References from an in-package test should not be qualified. +- if !xtest && p == pkg.Types() { +- return "" +- } +- // Prefer using the package name if already defined in foo_test.go +- if local, ok := testImports[p.Path()]; ok { +- if local != "" { +- return local +- } else { +- return p.Name() +- } +- } +- // TODO(hxjiang): we should consult the scope of the test package to +- // ensure these new imports do not shadow any package-level names. +- // Prefer the local import name (if any) used in the package under test. +- if local, ok := fileImports[p.Path()]; ok && local != "" { +- extraImports[p.Path()] = local +- return local +- } +- // Fall back to the package name since there is no renaming. +- extraImports[p.Path()] = "" +- return p.Name() +- } +- +- if xtest { +- // Reject if function/method is unexported. +- if !fn.Exported() { +- return nil, nil, fmt.Errorf("cannot add test of unexported function %s to external test package %s_test", decl.Name, pgf.File.Name) +- } +- +- // Reject if receiver is unexported. +- if sig.Recv() != nil { +- if _, ident, _ := internalastutil.UnpackRecv(decl.Recv.List[0].Type); ident == nil || !ident.IsExported() { +- return nil, nil, fmt.Errorf("cannot add external test for method %s.%s as receiver type is not exported", ident.Name, decl.Name) +- } +- } +- // TODO(hxjiang): reject if the any input parameter type is unexported. +- // TODO(hxjiang): reject if any return value type is unexported. Explore +- // the option to drop the return value if the type is unexported. +- } +- +- testName, err := testName(fn) +- if err != nil { +- return nil, nil, err +- } +- +- data := testInfo{ +- TestingPackageName: qual(types.NewPackage("testing", "testing")), +- PackageName: qual(pkg.Types()), +- TestFuncName: testName, +- Func: function{ +- Name: fn.Name(), +- }, +- } +- +- isContextType := func(t types.Type) bool { +- return typesinternal.IsTypeNamed(t, "context", "Context") +- } +- +- for i := range sig.Params().Len() { +- param := sig.Params().At(i) +- name, typ := param.Name(), param.Type() +- f := field{Type: types.TypeString(typ, qual)} +- if i == 0 && isContextType(typ) { +- f.Value = qual(types.NewPackage("context", "context")) + ".Background()" +- } else if name == "" || name == "_" { +- f.Value, _ = typesinternal.ZeroString(typ, qual) +- } else { +- f.Name = name +- } +- data.Func.Args = append(data.Func.Args, f) +- } +- +- for i := range sig.Results().Len() { +- typ := sig.Results().At(i).Type() +- var name string +- if i == sig.Results().Len()-1 && types.Identical(typ, errorType) { +- name = "gotErr" +- } else if i == 0 { +- name = "got" +- } else { +- name = fmt.Sprintf("got%d", i+1) +- } +- data.Func.Results = append(data.Func.Results, field{ +- Name: name, +- Type: types.TypeString(typ, qual), +- }) +- } +- +- if sig.Recv() != nil { +- // Find the preferred type for the receiver. We don't use +- // typesinternal.ReceiverNamed here as we want to preserve aliases. +- recvType := sig.Recv().Type() +- if ptr, ok := recvType.(*types.Pointer); ok { +- recvType = ptr.Elem() +- } +- +- t, ok := recvType.(typesinternal.NamedOrAlias) +- if !ok { +- return nil, nil, fmt.Errorf("the receiver type is neither named type nor alias type") +- } +- +- var varName string +- { +- var possibleNames []string // list of candidates, preferring earlier entries. +- if len(sig.Recv().Name()) > 0 { +- possibleNames = append(possibleNames, +- sig.Recv().Name(), // receiver name. +- string(sig.Recv().Name()[0]), // first character of receiver name. +- ) +- } +- possibleNames = append(possibleNames, +- string(t.Obj().Name()[0]), // first character of receiver type name. +- ) +- if len(t.Obj().Name()) >= 2 { +- possibleNames = append(possibleNames, +- string(t.Obj().Name()[:2]), // first two character of receiver type name. +- ) +- } +- var camelCase []rune +- for i, s := range t.Obj().Name() { +- if i == 0 || unicode.IsUpper(s) { +- camelCase = append(camelCase, s) +- } +- } +- possibleNames = append(possibleNames, +- string(camelCase), // captalized initials. +- ) +- for _, name := range possibleNames { +- name = strings.ToLower(name) +- if name == "" || name == "t" || name == "tt" { +- continue +- } +- varName = name +- break +- } +- if varName == "" { +- varName = "r" // default as "r" for "receiver". +- } +- } +- +- data.Receiver = &receiver{ +- Var: field{ +- Name: varName, +- Type: types.TypeString(recvType, qual), +- }, +- } +- +- // constructor is the selected constructor for type T. +- var constructor *types.Func +- +- // When finding the qualified constructor, the function should return the +- // any type whose named type is the same type as T's named type. +- _, wantType := typesinternal.ReceiverNamed(sig.Recv()) +- for _, name := range pkg.Types().Scope().Names() { +- f, ok := pkg.Types().Scope().Lookup(name).(*types.Func) +- if !ok { +- continue +- } +- if f.Signature().Recv() != nil { +- continue +- } +- // Unexported constructor is not visible in x_test package. +- if xtest && !f.Exported() { +- continue +- } +- // Only allow constructors returning T, T, (T, error), or (T, error). +- if f.Signature().Results().Len() > 2 || f.Signature().Results().Len() == 0 { +- continue +- } +- +- _, gotType := typesinternal.ReceiverNamed(f.Signature().Results().At(0)) +- if gotType == nil || !types.Identical(gotType, wantType) { +- continue +- } +- +- if f.Signature().Results().Len() == 2 && !types.Identical(f.Signature().Results().At(1).Type(), errorType) { +- continue +- } +- +- if constructor == nil { +- constructor = f +- } +- +- // Functions named NewType are prioritized as constructors over other +- // functions that match only the signature criteria. +- if strings.EqualFold(strings.ToLower(f.Name()), strings.ToLower("new"+t.Obj().Name())) { +- constructor = f +- } +- } +- +- if constructor != nil { +- data.Receiver.Constructor = &function{Name: constructor.Name()} +- for i := range constructor.Signature().Params().Len() { +- param := constructor.Signature().Params().At(i) +- name, typ := param.Name(), param.Type() +- f := field{Type: types.TypeString(typ, qual)} +- if i == 0 && isContextType(typ) { +- f.Value = qual(types.NewPackage("context", "context")) + ".Background()" +- } else if name == "" || name == "_" { +- f.Value, _ = typesinternal.ZeroString(typ, qual) +- } else { +- f.Name = name +- } +- data.Receiver.Constructor.Args = append(data.Receiver.Constructor.Args, f) +- } +- for i := range constructor.Signature().Results().Len() { +- typ := constructor.Signature().Results().At(i).Type() +- var name string +- if i == 0 { +- // The first return value must be of type T, *T, or a type whose named +- // type is the same as named type of T. +- name = varName +- } else if i == constructor.Signature().Results().Len()-1 && types.Identical(typ, errorType) { +- name = "err" +- } else { +- // Drop any return values beyond the first and the last. +- // e.g., "f, _, _, err := NewFoo()". +- name = "_" +- } +- data.Receiver.Constructor.Results = append(data.Receiver.Constructor.Results, field{ +- Name: name, +- Type: types.TypeString(typ, qual), +- }) +- } +- } +- } +- +- // Resolves duplicate parameter names between the function and its +- // receiver's constructor. It adds prefix to the constructor's parameters +- // until no conflicts remain. +- if data.Receiver != nil && data.Receiver.Constructor != nil { +- seen := map[string]bool{} +- for _, f := range data.Func.Args { +- if f.Name == "" { +- continue +- } +- seen[f.Name] = true +- } +- +- // "" for no change, "c" for constructor, "i" for input. +- for _, prefix := range []string{"", "c", "c_", "i", "i_"} { +- conflict := false +- for _, f := range data.Receiver.Constructor.Args { +- if f.Name == "" { +- continue +- } +- if seen[prefix+f.Name] { +- conflict = true +- break +- } +- } +- if !conflict { +- for i, f := range data.Receiver.Constructor.Args { +- if f.Name == "" { +- continue +- } +- data.Receiver.Constructor.Args[i].Name = prefix + data.Receiver.Constructor.Args[i].Name +- } +- break +- } +- } +- } +- +- // Compute edits to update imports. +- // +- // If we're adding to an existing test file, we need to adjust existing +- // imports. Otherwise, we can simply write out the imports to the new file. +- if testPGF != nil { +- var importFixes []*imports.ImportFix +- for path, name := range extraImports { +- importFixes = append(importFixes, &imports.ImportFix{ +- StmtInfo: imports.ImportInfo{ +- ImportPath: path, +- Name: name, +- }, +- FixType: imports.AddImport, +- }) +- } +- importEdits, err := ComputeImportFixEdits(snapshot.Options().Local, testPGF.Src, importFixes...) +- if err != nil { +- return nil, nil, fmt.Errorf("could not compute the import fix edits: %w", err) +- } +- edits = append(edits, importEdits...) +- } else { +- var importsBuffer bytes.Buffer +- if len(extraImports) == 1 { +- importsBuffer.WriteString("\nimport ") +- for path, name := range extraImports { +- if name != "" { +- importsBuffer.WriteString(name + " ") +- } +- importsBuffer.WriteString(fmt.Sprintf("\"%s\"\n", path)) +- } +- } else { +- importsBuffer.WriteString("\nimport(") +- // Sort for determinism. +- for path, name := range moremaps.Sorted(extraImports) { +- importsBuffer.WriteString("\n\t") +- if name != "" { +- importsBuffer.WriteString(name + " ") +- } +- importsBuffer.WriteString(fmt.Sprintf("\"%s\"", path)) +- } +- importsBuffer.WriteString("\n)\n") +- } +- edits = append(edits, protocol.TextEdit{ +- Range: protocol.Range{}, +- NewText: importsBuffer.String(), +- }) +- } +- +- var test bytes.Buffer +- if err := testTmpl.Execute(&test, data); err != nil { +- return nil, nil, err +- } +- +- formatted, err := format.Source(test.Bytes()) +- if err != nil { +- return nil, nil, err +- } +- +- edits = append(edits, +- protocol.TextEdit{ +- Range: eofRange, +- NewText: string(formatted), +- }, +- ) +- +- // Show the line of generated test function. +- { +- line := eofRange.Start.Line +- for i := range len(edits) - 1 { // last edits is the func decl +- e := edits[i] +- oldLines := e.Range.End.Line - e.Range.Start.Line +- newLines := uint32(strings.Count(e.NewText, "\n")) +- line += (newLines - oldLines) +- } +- show = &protocol.Location{ +- URI: testFH.URI(), +- Range: protocol.Range{ +- // Test function template have a new line at beginning. +- Start: protocol.Position{Line: line + 1}, +- End: protocol.Position{Line: line + 1}, +- }, +- } +- } +- +- return append(changes, protocol.DocumentChangeEdit(testFH, edits)), show, nil +-} +- +-// testName returns the name of the function to use for the new function that +-// tests fn. +-// Returns empty string if the fn is ill typed or nil. +-func testName(fn *types.Func) (string, error) { +- if fn == nil { +- return "", fmt.Errorf("input nil function") +- } +- testName := "Test" +- if recv := fn.Signature().Recv(); recv != nil { // method declaration. +- // Retrieve the unpointered receiver type to ensure the test name is based +- // on the topmost alias or named type, not the alias' RHS type (potentially +- // unexported) type. +- // For example: +- // type Foo = foo // Foo is an exported alias for the unexported type foo +- recvType := recv.Type() +- if ptr, ok := recv.Type().(*types.Pointer); ok { +- recvType = ptr.Elem() +- } +- +- t, ok := recvType.(typesinternal.NamedOrAlias) +- if !ok { +- return "", fmt.Errorf("receiver type is not named type or alias type") +- } +- +- if !t.Obj().Exported() { +- testName += "_" +- } +- +- testName += t.Obj().Name() + "_" +- } else if !fn.Exported() { // unexported function declaration. +- testName += "_" +- } +- return testName + fn.Name(), nil +-} +diff -urN a/gopls/internal/golang/assembly.go b/gopls/internal/golang/assembly.go +--- a/gopls/internal/golang/assembly.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/assembly.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,157 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-// This file produces the "Browse GOARCH assembly of f" HTML report. +-// +-// See also: +-// - ./codeaction.go - computes the symbol and offers the CodeAction command. +-// - ../server/command.go - handles the command by opening a web page. +-// - ../server/server.go - handles the HTTP request and calls this function. +-// +-// For language-server behavior in Go assembly language files, +-// see [golang.org/x/tools/gopls/internal/goasm]. +- +-import ( +- "bytes" +- "context" +- "fmt" +- "html" +- "io" +- "net/http" +- "os" +- "regexp" +- "strconv" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/util/morestrings" +-) +- +-// AssemblyHTML returns an HTML document containing an assembly listing of the selected function. +-// +-// TODO(adonovan): cross-link jumps and block labels, like github.com/aclements/objbrowse. +-// +-// See gopls/internal/test/integration/misc/webserver_test.go for tests. +-func AssemblyHTML(ctx context.Context, snapshot *cache.Snapshot, w http.ResponseWriter, pkg *cache.Package, symbol string, web Web) { +- // Prepare to compile the package with -S, and capture its stderr stream. +- // We use "go test -c" not "go build" as it covers all three packages +- // (p, "p [p.test]", "p_test [p.test]") in the directory, if they exist. +- // (See also compileropt.go.) +- inv, cleanupInvocation, err := snapshot.GoCommandInvocation(cache.NoNetwork, pkg.Metadata().CompiledGoFiles[0].DirPath(), +- "test", []string{ +- "-c", +- "-o", os.DevNull, +- "-gcflags=-S", +- ".", +- }) +- if err != nil { +- // e.g. failed to write overlays (rare) +- http.Error(w, err.Error(), http.StatusInternalServerError) +- return +- } +- defer cleanupInvocation() +- +- escape := html.EscapeString +- +- // Emit the start of the report. +- titleHTML := fmt.Sprintf("%s assembly for %s", +- escape(snapshot.View().GOARCH()), +- escape(symbol)) +- io.WriteString(w, ` +- +- +- +- `+titleHTML+` +- +- +- +- +-

    `+titleHTML+`

    +-

    +- A Quick Guide to Go's Assembler +-

    +-

    +- Experimental. Contributions welcome! +-

    +-

    +- Click on a source line marker L1234 to navigate your editor there. +- (VS Code users: please upvote #208093) +-

    +-

    Compiling...

    +-
    +-`)
    +-	if flusher, ok := w.(http.Flusher); ok {
    +-		flusher.Flush()
    +-	}
    +-
    +-	// At this point errors must be reported by writing HTML.
    +-	// To do this, set "status" return early.
    +-
    +-	var buf bytes.Buffer
    +-	status := "Reload the page to recompile."
    +-	defer func() {
    +-		// Update the "Compiling..." message.
    +-		fmt.Fprintf(&buf, `
    +-
    +- +-`, status) +- w.Write(buf.Bytes()) +- }() +- +- // Compile the package. +- _, stderr, err, _ := snapshot.View().GoCommandRunner().RunRaw(ctx, *inv) +- if err != nil { +- status = fmt.Sprintf("compilation failed: %v", err) +- return +- } +- +- // Write the rest of the report. +- content := stderr.String() +- +- // insnRx matches an assembly instruction line. +- // Submatch groups are: (offset-hex-dec, file-line-column, instruction). +- insnRx := regexp.MustCompile(`^(\s+0x[0-9a-f ]+)\(([^)]*)\)\s+(.*)$`) +- +- // Parse the functions of interest out of the listing. +- // Each function is of the form: +- // +- // symbol STEXT k=v... +- // 0x0000 00000 (/file.go:123) NOP... +- // ... +- // +- // Allow matches of symbol, symbol.func1, symbol.deferwrap, etc. +- on := false +- for line := range strings.SplitSeq(content, "\n") { +- // start of function symbol? +- if strings.Contains(line, " STEXT ") { +- on = strings.HasPrefix(line, symbol) && +- (line[len(symbol)] == ' ' || line[len(symbol)] == '.') +- } +- if !on { +- continue // within uninteresting symbol +- } +- +- // In lines of the form +- // "\t0x0000 00000 (/file.go:123) NOP..." +- // replace the "(/file.go:123)" portion with an "L0123" source link. +- // Skip filenames of the form "". +- if parts := insnRx.FindStringSubmatch(line); parts != nil { +- link := " " // if unknown +- if file, linenum, ok := morestrings.CutLast(parts[2], ":"); ok && !strings.HasPrefix(file, "<") { +- if linenum, err := strconv.Atoi(linenum); err == nil { +- text := fmt.Sprintf("L%04d", linenum) +- link = sourceLink(text, web.SrcURL(file, linenum, 1)) +- } +- } +- fmt.Fprintf(&buf, "%s\t%s\t%s", escape(parts[1]), link, escape(parts[3])) +- } else { +- buf.WriteString(escape(line)) +- } +- buf.WriteByte('\n') +- } +-} +diff -urN a/gopls/internal/golang/call_hierarchy.go b/gopls/internal/golang/call_hierarchy.go +--- a/gopls/internal/golang/call_hierarchy.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/call_hierarchy.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,320 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "errors" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/go/types/typeutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// PrepareCallHierarchy returns an array of CallHierarchyItem for a file and the position within the file. +-func PrepareCallHierarchy(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) ([]protocol.CallHierarchyItem, error) { +- ctx, done := event.Start(ctx, "golang.PrepareCallHierarchy") +- defer done() +- +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- pos, err := pgf.PositionPos(pp) +- if err != nil { +- return nil, err +- } +- +- _, obj, _ := referencedObject(pkg, pgf, pos) +- if obj == nil { +- return nil, nil +- } +- +- if _, ok := obj.Type().Underlying().(*types.Signature); !ok { +- return nil, nil +- } +- +- declLoc, err := ObjectLocation(ctx, pkg.FileSet(), snapshot, obj) +- if err != nil { +- return nil, err +- } +- rng := declLoc.Range +- +- callHierarchyItem := protocol.CallHierarchyItem{ +- Name: obj.Name(), +- Kind: protocol.Function, +- Tags: []protocol.SymbolTag{}, +- Detail: callHierarchyItemDetail(obj, declLoc), +- URI: declLoc.URI, +- Range: rng, +- SelectionRange: rng, +- } +- return []protocol.CallHierarchyItem{callHierarchyItem}, nil +-} +- +-// IncomingCalls returns an array of CallHierarchyIncomingCall for a file and the position within the file. +-func IncomingCalls(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pos protocol.Position) ([]protocol.CallHierarchyIncomingCall, error) { +- ctx, done := event.Start(ctx, "golang.IncomingCalls") +- defer done() +- +- refs, err := references(ctx, snapshot, fh, pos, false) +- if err != nil { +- if errors.Is(err, ErrNoIdentFound) || errors.Is(err, errNoObjectFound) { +- return nil, nil +- } +- return nil, err +- } +- +- // Group references by their enclosing function declaration. +- incomingCalls := make(map[protocol.Location]*protocol.CallHierarchyIncomingCall) +- for _, ref := range refs { +- callItem, err := enclosingNodeCallItem(ctx, snapshot, ref.pkgPath, ref.location) +- if err != nil { +- event.Error(ctx, fmt.Sprintf("error getting enclosing node for %q", ref.pkgPath), err) +- continue +- } +- loc := callItem.URI.Location(callItem.Range) +- call, ok := incomingCalls[loc] +- if !ok { +- call = &protocol.CallHierarchyIncomingCall{From: callItem} +- incomingCalls[loc] = call +- } +- call.FromRanges = append(call.FromRanges, ref.location.Range) +- } +- +- // Flatten the map of pointers into a slice of values. +- incomingCallItems := make([]protocol.CallHierarchyIncomingCall, 0, len(incomingCalls)) +- for _, callItem := range moremaps.SortedFunc(incomingCalls, protocol.CompareLocation) { +- incomingCallItems = append(incomingCallItems, *callItem) +- } +- return incomingCallItems, nil +-} +- +-// enclosingNodeCallItem creates a CallHierarchyItem representing the function call at loc. +-func enclosingNodeCallItem(ctx context.Context, snapshot *cache.Snapshot, pkgPath PackagePath, loc protocol.Location) (protocol.CallHierarchyItem, error) { +- // Parse the file containing the reference. +- fh, err := snapshot.ReadFile(ctx, loc.URI) +- if err != nil { +- return protocol.CallHierarchyItem{}, err +- } +- // TODO(adonovan): opt: before parsing, trim the bodies of functions +- // that don't contain the reference, using either a scanner-based +- // implementation such as https://go.dev/play/p/KUrObH1YkX8 +- // (~31% speedup), or a byte-oriented implementation (2x speedup). +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return protocol.CallHierarchyItem{}, err +- } +- start, end, err := pgf.RangePos(loc.Range) +- if err != nil { +- return protocol.CallHierarchyItem{}, err +- } +- +- // Find the enclosing named function, if any. +- // +- // It is tempting to treat anonymous functions as nodes in the +- // call hierarchy, and historically we used to do that, +- // poorly; see #64451. However, it is impossible to track +- // references to anonymous functions without much deeper +- // analysis. Local analysis is tractable, but ultimately it +- // can only detect calls from the outer function to the inner +- // function. +- // +- // It is simpler and clearer to treat the top-level named +- // function and all its nested functions as one entity, and it +- // allows users to recursively expand the tree where, before, +- // the chain would be broken by each lambda. +- // +- // If the selection is in a global var initializer, +- // default to the file's package declaration. +- path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) +- var ( +- name = pgf.File.Name.Name +- kind = protocol.Package +- ) +- start, end = pgf.File.Name.Pos(), pgf.File.Name.End() +- for _, node := range path { +- switch node := node.(type) { +- case *ast.FuncDecl: +- name = node.Name.Name +- start, end = node.Name.Pos(), node.Name.End() +- kind = protocol.Function +- +- case *ast.FuncLit: +- // If the call comes from a FuncLit with +- // no enclosing FuncDecl, then use the +- // FuncLit's extent. +- name = "func" +- start, end = node.Pos(), node.Type.End() // signature, sans body +- kind = protocol.Function +- +- case *ast.ValueSpec: +- // If the call comes from a var (or, +- // theoretically, const) initializer outside +- // any function, then use the ValueSpec.Names span. +- name = "init" +- start, end = node.Names[0].Pos(), node.Names[len(node.Names)-1].End() +- kind = protocol.Variable +- } +- } +- +- rng, err := pgf.PosRange(start, end) +- if err != nil { +- return protocol.CallHierarchyItem{}, err +- } +- +- return protocol.CallHierarchyItem{ +- Name: name, +- Kind: kind, +- Tags: []protocol.SymbolTag{}, +- Detail: fmt.Sprintf("%s • %s", pkgPath, fh.URI().Base()), +- URI: loc.URI, +- Range: rng, +- SelectionRange: rng, +- }, nil +-} +- +-// OutgoingCalls returns an array of CallHierarchyOutgoingCall for a file and the position within the file. +-func OutgoingCalls(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) ([]protocol.CallHierarchyOutgoingCall, error) { +- ctx, done := event.Start(ctx, "golang.OutgoingCalls") +- defer done() +- +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- pos, err := pgf.PositionPos(pp) +- if err != nil { +- return nil, err +- } +- +- _, obj, _ := referencedObject(pkg, pgf, pos) +- if obj == nil { +- return nil, nil +- } +- +- if _, ok := obj.Type().Underlying().(*types.Signature); !ok { +- return nil, nil +- } +- +- if isBuiltin(obj) { +- return nil, nil // built-ins have no position +- } +- +- declFile := pkg.FileSet().File(obj.Pos()) +- if declFile == nil { +- return nil, bug.Errorf("file not found for %d", obj.Pos()) +- } +- +- uri := protocol.URIFromPath(declFile.Name()) +- offset, err := safetoken.Offset(declFile, obj.Pos()) +- if err != nil { +- return nil, err +- } +- +- declPkg, declPGF, err := NarrowestPackageForFile(ctx, snapshot, uri) +- if err != nil { +- return nil, err +- } +- +- declPos, err := safetoken.Pos(declPGF.Tok, offset) +- if err != nil { +- return nil, err +- } +- +- declNode, _, _ := findDeclInfo([]*ast.File{declPGF.File}, declPos) +- if declNode == nil { +- // TODO(rfindley): why don't we return an error here, or even bug.Errorf? +- return nil, nil +- // return nil, bug.Errorf("failed to find declaration for %v", obj) +- } +- +- type callRange struct { +- start, end token.Pos +- } +- +- // Find calls to known functions/methods, +- // including interface methods, and built-ins. +- var callRanges []callRange +- for n := range ast.Preorder(declNode) { +- if call, ok := n.(*ast.CallExpr); ok { +- callee := typeutil.Callee(pkg.TypesInfo(), call) +- switch callee.(type) { +- case *types.Func, *types.Builtin: +- // Skip trivial builtins (e.g. len) +- // but allow unsafe.Slice, etc. +- if callee.Pkg() == nil { +- continue +- } +- id := typesinternal.UsedIdent(pkg.TypesInfo(), call.Fun) +- callRanges = append(callRanges, callRange{ +- start: id.NamePos, +- end: call.Lparen, +- }) +- } +- } +- } +- +- outgoingCalls := make(map[protocol.Location]*protocol.CallHierarchyOutgoingCall) +- for _, callRange := range callRanges { +- _, obj, _ := referencedObject(declPkg, declPGF, callRange.start) +- if obj == nil { +- continue +- } +- +- loc, err := ObjectLocation(ctx, declPkg.FileSet(), snapshot, obj) +- if err != nil { +- return nil, err +- } +- +- outgoingCall, ok := outgoingCalls[loc] +- if !ok { +- outgoingCall = &protocol.CallHierarchyOutgoingCall{ +- To: protocol.CallHierarchyItem{ +- Name: obj.Name(), +- Kind: protocol.Function, +- Tags: []protocol.SymbolTag{}, +- Detail: callHierarchyItemDetail(obj, loc), +- URI: loc.URI, +- Range: loc.Range, +- SelectionRange: loc.Range, +- }, +- } +- outgoingCalls[loc] = outgoingCall +- } +- +- rng, err := declPGF.PosRange(callRange.start, callRange.end) +- if err != nil { +- return nil, err +- } +- outgoingCall.FromRanges = append(outgoingCall.FromRanges, rng) +- } +- +- outgoingCallItems := make([]protocol.CallHierarchyOutgoingCall, 0, len(outgoingCalls)) +- for _, callItem := range moremaps.SortedFunc(outgoingCalls, protocol.CompareLocation) { +- outgoingCallItems = append(outgoingCallItems, *callItem) +- } +- return outgoingCallItems, nil +-} +- +-func callHierarchyItemDetail(obj types.Object, loc protocol.Location) string { +- detail := loc.URI.Base() +- if obj.Pkg() != nil { +- detail = fmt.Sprintf("%s • %s", obj.Pkg().Path(), detail) +- } +- return detail +-} +diff -urN a/gopls/internal/golang/change_quote.go b/gopls/internal/golang/change_quote.go +--- a/gopls/internal/golang/change_quote.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/change_quote.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,72 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "go/ast" +- "go/token" +- "strconv" +- "strings" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/diff" +-) +- +-// convertStringLiteral reports whether we can convert between raw and interpreted +-// string literals in the [start, end) range, along with a CodeAction containing the edits. +-// +-// Only the following conditions are true, the action in result is valid +-// - [start, end) is enclosed by a string literal +-// - if the string is interpreted string, need check whether the convert is allowed +-func convertStringLiteral(req *codeActionsRequest) { +- path, _ := astutil.PathEnclosingInterval(req.pgf.File, req.start, req.end) +- lit, ok := path[0].(*ast.BasicLit) +- if !ok || lit.Kind != token.STRING { +- return +- } +- +- str, err := strconv.Unquote(lit.Value) +- if err != nil { +- return +- } +- +- interpreted := lit.Value[0] == '"' +- // Not all "..." strings can be represented as `...` strings. +- if interpreted && !strconv.CanBackquote(strings.ReplaceAll(str, "\n", "")) { +- return +- } +- +- var ( +- title string +- newText string +- ) +- if interpreted { +- title = "Convert to raw string literal" +- newText = "`" + str + "`" +- } else { +- title = "Convert to interpreted string literal" +- newText = strconv.Quote(str) +- } +- +- start, end, err := safetoken.Offsets(req.pgf.Tok, lit.Pos(), lit.End()) +- if err != nil { +- bug.Reportf("failed to get string literal offset by token.Pos:%v", err) +- return +- } +- edits := []diff.Edit{{ +- Start: start, +- End: end, +- New: newText, +- }} +- textedits, err := protocol.EditsFromDiffEdits(req.pgf.Mapper, edits) +- if err != nil { +- bug.Reportf("failed to convert diff.Edit to protocol.TextEdit:%v", err) +- return +- } +- req.addEditAction(title, nil, protocol.DocumentChangeEdit(req.fh, textedits)) +-} +diff -urN a/gopls/internal/golang/change_signature.go b/gopls/internal/golang/change_signature.go +--- a/gopls/internal/golang/change_signature.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/change_signature.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,805 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "bytes" +- "context" +- "fmt" +- "go/ast" +- "go/format" +- "go/parser" +- "go/token" +- "go/types" +- "regexp" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/gopls/internal/util/tokeninternal" +- "golang.org/x/tools/imports" +- internalastutil "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/diff" +- "golang.org/x/tools/internal/refactor/inline" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// Changing a signature works as follows, supposing we have the following +-// original function declaration: +-// +-// func Foo(a, b, c int) +-// +-// Step 1: Write the declaration according to the given signature change. For +-// example, given the parameter transformation [2, 0, 1], we construct a new +-// ast.FuncDecl for the signature: +-// +-// func Foo0(c, a, b int) +-// +-// Step 2: Build a wrapper function that delegates to the new function. +-// With this example, the wrapper would look like this: +-// +-// func Foo1(a, b, c int) { +-// Foo0(c, a, b int) +-// } +-// +-// Step 3: Swap in the wrapper for the original, and inline all calls. The +-// trick here is to rename Foo1 to Foo, inline all calls (replacing them with +-// a call to Foo0), and then rename Foo0 back to Foo, using a simple string +-// replacement. +-// +-// For example, given a call +-// +-// func _() { +-// Foo(1, 2, 3) +-// } +-// +-// The inlining results in +-// +-// func _() { +-// Foo0(3, 1, 2) +-// } +-// +-// And then renaming results in +-// +-// func _() { +-// Foo(3, 1, 2) +-// } +-// +-// And the desired signature rewriting has occurred! Note: in practice, we +-// don't use the names Foo0 and Foo1, as they are too likely to conflict with +-// an existing declaration name. (Instead, we use the prefix G_o_ + p_l_s) +-// +-// The advantage of going through the inliner is that we get all of the +-// semantic considerations for free: the inliner will check for side effects +-// of arguments, check if the last use of a variable is being removed, check +-// for unnecessary imports, etc. +-// +-// Furthermore, by running the change signature rewriting through the inliner, +-// we ensure that the inliner gets better to the point that it can handle a +-// change signature rewrite just as well as if we had implemented change +-// signature as its own operation. For example, suppose we support reordering +-// the results of a function. In that case, the wrapper would be: +-// +-// func Foo1() (int, int) { +-// y, x := Foo0() +-// return x, y +-// } +-// +-// And a call would be rewritten from +-// +-// x, y := Foo() +-// +-// To +-// +-// r1, r2 := Foo() +-// x, y := r2, r1 +-// +-// In order to make this idiomatic, we'd have to teach the inliner to rewrite +-// this as y, x := Foo(). The simplest and most general way to achieve this is +-// to teach the inliner to recognize when a variable is redundant (r1 and r2, +-// in this case), lifting declarations. That's probably a very useful skill for +-// the inliner to have. +- +-// removeParam computes a refactoring to remove the parameter indicated by the +-// given range. +-func removeParam(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, rng protocol.Range) ([]protocol.DocumentChange, error) { +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- // Find the unused parameter to remove. +- info := findParam(pgf, rng) +- if info == nil || info.paramIndex == -1 { +- return nil, fmt.Errorf("no param found") +- } +- // Write a transformation to remove the param. +- var newParams []int +- for i := 0; i < info.decl.Type.Params.NumFields(); i++ { +- if i != info.paramIndex { +- newParams = append(newParams, i) +- } +- } +- return ChangeSignature(ctx, snapshot, pkg, pgf, rng, newParams) +-} +- +-// ChangeSignature computes a refactoring to update the signature according to +-// the provided parameter transformation, for the signature definition +-// surrounding rng. +-// +-// newParams expresses the new parameters for the signature in terms of the old +-// parameters. Each entry in newParams is the index of the new parameter in the +-// original parameter list. For example, given func Foo(a, b, c int) and newParams +-// [2, 0, 1], the resulting changed signature is Foo(c, a, b int). If newParams +-// omits an index of the original signature, that parameter is removed. +-// +-// This operation is a work in progress. Remaining TODO: +-// - Handle adding parameters. +-// - Handle adding/removing/reordering results. +-// - Improve the extra newlines in output. +-// - Stream type checking via ForEachPackage. +-// - Avoid unnecessary additional type checking. +-func ChangeSignature(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, rng protocol.Range, newParams []int) ([]protocol.DocumentChange, error) { +- // Changes to our heuristics for whether we can remove a parameter must also +- // be reflected in the canRemoveParameter helper. +- if perrors, terrors := pkg.ParseErrors(), pkg.TypeErrors(); len(perrors) > 0 || len(terrors) > 0 { +- var sample string +- if len(perrors) > 0 { +- sample = perrors[0].Error() +- } else { +- sample = terrors[0].Error() +- } +- return nil, fmt.Errorf("can't change signatures for packages with parse or type errors: (e.g. %s)", sample) +- } +- +- info := findParam(pgf, rng) +- if info == nil || info.decl == nil { +- return nil, fmt.Errorf("failed to find declaration") +- } +- +- // Step 1: create the new declaration, which is a copy of the original decl +- // with the rewritten signature. +- +- // Flatten, transform and regroup fields, using the flatField intermediate +- // representation. A flatField is the result of flattening an *ast.FieldList +- // along with type information. +- type flatField struct { +- name string // empty if the field is unnamed +- typeExpr ast.Expr +- typ types.Type +- } +- +- var newParamFields []flatField +- for id, field := range internalastutil.FlatFields(info.decl.Type.Params) { +- typ := pkg.TypesInfo().TypeOf(field.Type) +- if typ == nil { +- return nil, fmt.Errorf("missing field type for field #%d", len(newParamFields)) +- } +- field := flatField{ +- typeExpr: field.Type, +- typ: typ, +- } +- if id != nil { +- field.name = id.Name +- } +- newParamFields = append(newParamFields, field) +- } +- +- // Select the new parameter fields. +- newParamFields, ok := selectElements(newParamFields, newParams) +- if !ok { +- return nil, fmt.Errorf("failed to apply parameter transformation %v", newParams) +- } +- +- // writeFields performs the regrouping of named fields. +- writeFields := func(flatFields []flatField) *ast.FieldList { +- list := new(ast.FieldList) +- for i, f := range flatFields { +- var field *ast.Field +- if i > 0 && f.name != "" && flatFields[i-1].name != "" && types.Identical(f.typ, flatFields[i-1].typ) { +- // Group named fields if they have the same type. +- field = list.List[len(list.List)-1] +- } else { +- // Otherwise, create a new field. +- field = &ast.Field{ +- Type: internalastutil.CloneNode(f.typeExpr), +- } +- list.List = append(list.List, field) +- } +- if f.name != "" { +- field.Names = append(field.Names, ast.NewIdent(f.name)) +- } +- } +- return list +- } +- +- newDecl := internalastutil.CloneNode(info.decl) +- newDecl.Type.Params = writeFields(newParamFields) +- +- // Step 2: build a wrapper function calling the new declaration. +- +- var ( +- params = internalastutil.CloneNode(info.decl.Type.Params) // parameters of wrapper func: "_" names must be modified +- args = make([]ast.Expr, len(newParams)) // arguments to the delegated call +- variadic = false // whether the signature is variadic +- ) +- { +- // Record names used by non-blank parameters, just in case the user had a +- // parameter named 'blank0', which would conflict with the synthetic names +- // we construct below. +- // TODO(rfindley): add an integration test for this behavior. +- nonBlankNames := make(map[string]bool) // for detecting conflicts with renamed blanks +- for _, fld := range params.List { +- for _, n := range fld.Names { +- if n.Name != "_" { +- nonBlankNames[n.Name] = true +- } +- } +- if len(fld.Names) == 0 { +- // All parameters must have a non-blank name. For convenience, give +- // this field a blank name. +- fld.Names = append(fld.Names, ast.NewIdent("_")) // will be named below +- } +- } +- // oldParams maps parameters to their argument in the delegated call. +- // In other words, it is the inverse of newParams, but it is represented as +- // a map rather than a slice, as not every old param need exist in +- // newParams. +- oldParams := make(map[int]int) +- for new, old := range newParams { +- oldParams[old] = new +- } +- blanks := 0 +- paramIndex := 0 // global param index. +- for id, field := range internalastutil.FlatFields(params) { +- argIndex, ok := oldParams[paramIndex] +- paramIndex++ +- if !ok { +- continue // parameter is removed +- } +- if id.Name == "_" { // from above: every field has names +- // Create names for blank (_) parameters so the delegating wrapper +- // can refer to them. +- for { +- // These names will not be seen by the user, so give them an +- // arbitrary name. +- newName := fmt.Sprintf("blank%d", blanks) +- blanks++ +- if !nonBlankNames[newName] { +- id.Name = newName +- break +- } +- } +- } +- args[argIndex] = ast.NewIdent(id.Name) +- // Record whether the call has an ellipsis. +- // (Only the last loop iteration matters.) +- _, variadic = field.Type.(*ast.Ellipsis) +- } +- } +- +- // Step 3: Rewrite all referring calls, by swapping in the wrapper and +- // inlining all. +- +- newContent, err := rewriteCalls(ctx, signatureRewrite{ +- snapshot: snapshot, +- pkg: pkg, +- pgf: pgf, +- origDecl: info.decl, +- newDecl: newDecl, +- params: params, +- callArgs: args, +- variadic: variadic, +- }) +- if err != nil { +- return nil, err +- } +- +- // Finally, rewrite the original declaration. We do this after inlining all +- // calls, as there may be calls in the same file as the declaration. But none +- // of the inlining should have changed the location of the original +- // declaration. +- { +- idx := findDecl(pgf.File, info.decl) +- if idx < 0 { +- return nil, bug.Errorf("didn't find original decl") +- } +- +- src, ok := newContent[pgf.URI] +- if !ok { +- src = pgf.Src +- } +- fset := tokeninternal.FileSetFor(pgf.Tok) +- src, err := rewriteSignature(fset, idx, src, newDecl) +- if err != nil { +- return nil, err +- } +- newContent[pgf.URI] = src +- } +- +- // Translate the resulting state into document changes. +- var changes []protocol.DocumentChange +- for uri, after := range newContent { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- before, err := fh.Content() +- if err != nil { +- return nil, err +- } +- edits := diff.Bytes(before, after) +- mapper := protocol.NewMapper(uri, before) +- textedits, err := protocol.EditsFromDiffEdits(mapper, edits) +- if err != nil { +- return nil, fmt.Errorf("computing edits for %s: %v", uri, err) +- } +- change := protocol.DocumentChangeEdit(fh, textedits) +- changes = append(changes, change) +- } +- return changes, nil +-} +- +-// rewriteSignature rewrites the signature of the declIdx'th declaration in src +-// to use the signature of newDecl (described by fset). +-// +-// TODO(rfindley): I think this operation could be generalized, for example by +-// using a concept of a 'nodepath' to correlate nodes between two related +-// files. +-// +-// Note that with its current application, rewriteSignature is expected to +-// succeed. Separate bug.Errorf calls are used below (rather than one call at +-// the callsite) in order to have greater precision. +-func rewriteSignature(fset *token.FileSet, declIdx int, src0 []byte, newDecl *ast.FuncDecl) ([]byte, error) { +- // Parse the new file0 content, to locate the original params. +- file0, err := parser.ParseFile(fset, "", src0, parser.ParseComments|parser.SkipObjectResolution) +- if err != nil { +- return nil, bug.Errorf("re-parsing declaring file failed: %v", err) +- } +- decl0, _ := file0.Decls[declIdx].(*ast.FuncDecl) +- // Inlining shouldn't have changed the location of any declarations, but do +- // a sanity check. +- if decl0 == nil || decl0.Name.Name != newDecl.Name.Name { +- return nil, bug.Errorf("inlining affected declaration order: found %v, not func %s", decl0, newDecl.Name.Name) +- } +- opening0, closing0, err := safetoken.Offsets(fset.File(decl0.Pos()), decl0.Type.Params.Opening, decl0.Type.Params.Closing) +- if err != nil { +- return nil, bug.Errorf("can't find params: %v", err) +- } +- +- // Format the modified signature and apply a textual replacement. This +- // minimizes comment disruption. +- formattedType := FormatNode(fset, newDecl.Type) +- expr, err := parser.ParseExprFrom(fset, "", []byte(formattedType), 0) +- if err != nil { +- return nil, bug.Errorf("parsing modified signature: %v", err) +- } +- newType := expr.(*ast.FuncType) +- opening1, closing1, err := safetoken.Offsets(fset.File(newType.Pos()), newType.Params.Opening, newType.Params.Closing) +- if err != nil { +- return nil, bug.Errorf("param offsets: %v", err) +- } +- newParams := formattedType[opening1 : closing1+1] +- +- // Splice. +- var buf bytes.Buffer +- buf.Write(src0[:opening0]) +- buf.WriteString(newParams) +- buf.Write(src0[closing0+1:]) +- newSrc := buf.Bytes() +- if len(file0.Imports) > 0 { +- formatted, err := imports.Process("output", newSrc, nil) +- if err != nil { +- return nil, bug.Errorf("imports.Process failed: %v", err) +- } +- newSrc = formatted +- } +- return newSrc, nil +-} +- +-// paramInfo records information about a param identified by a position. +-type paramInfo struct { +- decl *ast.FuncDecl // enclosing func decl (non-nil) +- paramIndex int // index of param among all params, or -1 +- field *ast.Field // enclosing field of Decl, or nil if range not among parameters +- name *ast.Ident // indicated name (either enclosing, or Field.Names[0] if len(Field.Names) == 1) +-} +- +-// findParam finds the parameter information spanned by the given range. +-func findParam(pgf *parsego.File, rng protocol.Range) *paramInfo { +- info := paramInfo{paramIndex: -1} +- start, end, err := pgf.RangePos(rng) +- if err != nil { +- return nil +- } +- +- path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) +- var ( +- id *ast.Ident +- field *ast.Field +- ) +- // Find the outermost enclosing node of each kind, whether or not they match +- // the semantics described in the docstring. +- for _, n := range path { +- switch n := n.(type) { +- case *ast.Ident: +- id = n +- case *ast.Field: +- field = n +- case *ast.FuncDecl: +- info.decl = n +- } +- } +- if info.decl == nil { +- return nil +- } +- if field == nil { +- return &info +- } +- pi := 0 +- // Search for field and id among parameters of decl. +- // This search may fail, even if one or both of id and field are non nil: +- // field could be from a result or local declaration, and id could be part of +- // the field type rather than names. +- for _, f := range info.decl.Type.Params.List { +- if f == field { +- info.paramIndex = pi // may be modified later +- info.field = f +- for _, n := range f.Names { +- if n == id { +- info.paramIndex = pi +- info.name = n +- break +- } +- pi++ +- } +- if info.name == nil && len(info.field.Names) == 1 { +- info.name = info.field.Names[0] +- } +- break +- } else { +- m := len(f.Names) +- if m == 0 { +- m = 1 +- } +- pi += m +- } +- } +- return &info +-} +- +-// signatureRewrite defines a rewritten function signature. +-// +-// See rewriteCalls for more details. +-type signatureRewrite struct { +- snapshot *cache.Snapshot +- pkg *cache.Package +- pgf *parsego.File +- origDecl, newDecl *ast.FuncDecl +- params *ast.FieldList +- callArgs []ast.Expr +- variadic bool +-} +- +-// rewriteCalls returns the document changes required to rewrite the +-// signature of origDecl to that of newDecl. +-// +-// This is a rather complicated factoring of the rewrite operation, but is able +-// to describe arbitrary rewrites. Specifically, rewriteCalls creates a +-// synthetic copy of pkg, where the original function declaration is changed to +-// be a trivial wrapper around the new declaration. params and callArgs are +-// used to perform this delegation: params must have the same type as origDecl, +-// but may have renamed parameters (such as is required for delegating blank +-// parameters). callArgs are the arguments of the delegated call (i.e. using +-// params). +-// +-// For example, consider removing the unused 'b' parameter below, rewriting +-// +-// func Foo(a, b, c, _ int) int { +-// return a+c +-// } +-// +-// To +-// +-// func Foo(a, c, _ int) int { +-// return a+c +-// } +-// +-// In this case, rewriteCalls is parameterized as follows: +-// - origDecl is the original declaration +-// - newDecl is the new declaration, which is a copy of origDecl less the 'b' +-// parameter. +-// - params is a new parameter list (a, b, c, blank0 int) to be used for the +-// new wrapper. +-// - callArgs is the argument list (a, c, blank0), to be used to call the new +-// delegate. +-// +-// rewriting is expressed this way so that rewriteCalls can own the details +-// of *how* this rewriting is performed. For example, as of writing it names +-// the synthetic delegate G_o_p_l_s_foo, but the caller need not know this. +-// +-// By passing an entirely new declaration, rewriteCalls may be used for +-// signature refactorings that may affect the function body, such as removing +-// or adding return values. +-func rewriteCalls(ctx context.Context, rw signatureRewrite) (map[protocol.DocumentURI][]byte, error) { +- // tag is a unique prefix that is added to the delegated declaration. +- // +- // It must have a ~0% probability of causing collisions with existing names. +- const tag = "G_o_p_l_s_" +- +- var ( +- modifiedSrc []byte +- modifiedFile *ast.File +- modifiedDecl *ast.FuncDecl +- ) +- { +- delegate := internalastutil.CloneNode(rw.newDecl) // clone before modifying +- delegate.Name.Name = tag + delegate.Name.Name +- if obj := rw.pkg.Types().Scope().Lookup(delegate.Name.Name); obj != nil { +- return nil, fmt.Errorf("synthetic name %q conflicts with an existing declaration", delegate.Name.Name) +- } +- +- wrapper := internalastutil.CloneNode(rw.origDecl) +- wrapper.Type.Params = rw.params +- +- // Get the receiver name, creating it if necessary. +- var recv string // nonempty => call is a method call with receiver recv +- if wrapper.Recv.NumFields() > 0 { +- if len(wrapper.Recv.List[0].Names) > 0 { +- recv = wrapper.Recv.List[0].Names[0].Name +- } else { +- // Create unique name for the temporary receiver, which will be inlined away. +- // +- // We use the lexical scope of the original function to avoid conflicts +- // with (e.g.) named result variables. However, since the parameter syntax +- // may have been modified/renamed from the original function, we must +- // reject those names too. +- usedParams := make(map[string]bool) +- for _, fld := range wrapper.Type.Params.List { +- for _, name := range fld.Names { +- usedParams[name.Name] = true +- } +- } +- scope := rw.pkg.TypesInfo().Scopes[rw.origDecl.Type] +- if scope == nil { +- return nil, bug.Errorf("missing function scope for %v", rw.origDecl.Name.Name) +- } +- for i := 0; ; i++ { +- recv = fmt.Sprintf("r%d", i) +- _, obj := scope.LookupParent(recv, token.NoPos) +- if obj == nil && !usedParams[recv] { +- break +- } +- } +- wrapper.Recv.List[0].Names = []*ast.Ident{{Name: recv}} +- } +- } +- +- name := &ast.Ident{Name: delegate.Name.Name} +- var fun ast.Expr = name +- if recv != "" { +- fun = &ast.SelectorExpr{ +- X: &ast.Ident{Name: recv}, +- Sel: name, +- } +- } +- call := &ast.CallExpr{ +- Fun: fun, +- Args: rw.callArgs, +- } +- if rw.variadic { +- call.Ellipsis = 1 // must not be token.NoPos +- } +- +- var stmt ast.Stmt +- if delegate.Type.Results.NumFields() > 0 { +- stmt = &ast.ReturnStmt{ +- Results: []ast.Expr{call}, +- } +- } else { +- stmt = &ast.ExprStmt{ +- X: call, +- } +- } +- wrapper.Body = &ast.BlockStmt{ +- List: []ast.Stmt{stmt}, +- } +- +- fset := tokeninternal.FileSetFor(rw.pgf.Tok) +- var err error +- modifiedSrc, err = replaceFileDecl(rw.pgf, rw.origDecl, delegate) +- if err != nil { +- return nil, err +- } +- // TODO(rfindley): we can probably get away with one fewer parse operations +- // by returning the modified AST from replaceDecl. Investigate if that is +- // accurate. +- modifiedSrc = append(modifiedSrc, []byte("\n\n"+FormatNode(fset, wrapper))...) +- modifiedFile, err = parser.ParseFile(rw.pkg.FileSet(), rw.pgf.URI.Path(), modifiedSrc, parser.ParseComments|parser.SkipObjectResolution) +- if err != nil { +- return nil, err +- } +- modifiedDecl = modifiedFile.Decls[len(modifiedFile.Decls)-1].(*ast.FuncDecl) +- } +- +- // Type check pkg again with the modified file, to compute the synthetic +- // callee. +- logf := logger(ctx, "change signature", rw.snapshot.Options().VerboseOutput) +- pkg2, info, err := reTypeCheck(logf, rw.pkg, map[protocol.DocumentURI]*ast.File{rw.pgf.URI: modifiedFile}, false) +- if err != nil { +- return nil, err +- } +- calleeInfo, err := inline.AnalyzeCallee(logf, rw.pkg.FileSet(), pkg2, info, modifiedDecl, modifiedSrc) +- if err != nil { +- return nil, fmt.Errorf("analyzing callee: %v", err) +- } +- +- post := func(got []byte) []byte { return bytes.ReplaceAll(got, []byte(tag), nil) } +- opts := &inline.Options{ +- Logf: logf, +- IgnoreEffects: true, +- } +- return inlineAllCalls(ctx, rw.snapshot, rw.pkg, rw.pgf, rw.origDecl, calleeInfo, post, opts) +-} +- +-// reTypeCheck re-type checks orig with new file contents defined by fileMask. +-// +-// It expects that any newly added imports are already present in the +-// transitive imports of orig. +-// +-// If expectErrors is true, reTypeCheck allows errors in the new package. +-// TODO(rfindley): perhaps this should be a filter to specify which errors are +-// acceptable. +-func reTypeCheck(logf func(string, ...any), orig *cache.Package, fileMask map[protocol.DocumentURI]*ast.File, expectErrors bool) (*types.Package, *types.Info, error) { +- pkg := types.NewPackage(string(orig.Metadata().PkgPath), string(orig.Metadata().Name)) +- info := &types.Info{ +- Types: make(map[ast.Expr]types.TypeAndValue), +- Defs: make(map[*ast.Ident]types.Object), +- Uses: make(map[*ast.Ident]types.Object), +- Implicits: make(map[ast.Node]types.Object), +- Selections: make(map[*ast.SelectorExpr]*types.Selection), +- Scopes: make(map[ast.Node]*types.Scope), +- Instances: make(map[*ast.Ident]types.Instance), +- FileVersions: make(map[*ast.File]string), +- } +- { +- var files []*ast.File +- for _, pgf := range orig.CompiledGoFiles() { +- if mask, ok := fileMask[pgf.URI]; ok { +- files = append(files, mask) +- } else { +- files = append(files, pgf.File) +- } +- } +- +- // Implement a BFS for imports in the transitive package graph. +- // +- // Note that this only works if any newly added imports are expected to be +- // present among transitive imports. In general we cannot assume this to +- // be the case, but in the special case of removing a parameter it works +- // because any parameter types must be present in export data. +- var importer func(importPath string) (*types.Package, error) +- { +- var ( +- importsByPath = make(map[string]*types.Package) // cached imports +- toSearch = []*types.Package{orig.Types()} // packages to search +- searched = make(map[string]bool) // path -> (false, if present in toSearch; true, if already searched) +- ) +- importer = func(path string) (*types.Package, error) { +- if p, ok := importsByPath[path]; ok { +- return p, nil +- } +- for len(toSearch) > 0 { +- pkg := toSearch[0] +- toSearch = toSearch[1:] +- searched[pkg.Path()] = true +- for _, p := range pkg.Imports() { +- // TODO(rfindley): this is incorrect: p.Path() is a package path, +- // whereas path is an import path. We can fix this by reporting any +- // newly added imports from inlining, or by using the ImporterFrom +- // interface and package metadata. +- // +- // TODO(rfindley): can't the inliner also be wrong here? It's +- // possible that an import path means different things depending on +- // the location. +- importsByPath[p.Path()] = p +- if _, ok := searched[p.Path()]; !ok { +- searched[p.Path()] = false +- toSearch = append(toSearch, p) +- } +- } +- if p, ok := importsByPath[path]; ok { +- return p, nil +- } +- } +- return nil, fmt.Errorf("missing import") +- } +- } +- cfg := &types.Config{ +- Sizes: orig.Metadata().TypesSizes, +- Importer: ImporterFunc(importer), +- } +- +- // Copied from cache/check.go. +- // TODO(rfindley): factor this out and fix goVersionRx. +- // Set Go dialect. +- if module := orig.Metadata().Module; module != nil && module.GoVersion != "" { +- goVersion := "go" + module.GoVersion +- // types.NewChecker panics if GoVersion is invalid. +- // An unparsable mod file should probably stop us +- // before we get here, but double check just in case. +- if goVersionRx.MatchString(goVersion) { +- cfg.GoVersion = goVersion +- } +- } +- if expectErrors { +- cfg.Error = func(err error) { +- logf("re-type checking: expected error: %v", err) +- } +- } +- typesinternal.SetUsesCgo(cfg) +- checker := types.NewChecker(cfg, orig.FileSet(), pkg, info) +- if err := checker.Files(files); err != nil && !expectErrors { +- return nil, nil, fmt.Errorf("type checking rewritten package: %v", err) +- } +- } +- return pkg, info, nil +-} +- +-// TODO(golang/go#63472): this looks wrong with the new Go version syntax. +-var goVersionRx = regexp.MustCompile(`^go([1-9][0-9]*)\.(0|[1-9][0-9]*)$`) +- +-// selectElements returns a new array of elements of s indicated by the +-// provided list of indices. It returns false if any index was out of bounds. +-// +-// For example, given the slice []string{"a", "b", "c", "d"}, the +-// indices []int{3, 0, 1} results in the slice []string{"d", "a", "b"}. +-func selectElements[T any](s []T, indices []int) ([]T, bool) { +- res := make([]T, len(indices)) +- for i, index := range indices { +- if index < 0 || index >= len(s) { +- return nil, false +- } +- res[i] = s[index] +- } +- return res, true +-} +- +-// replaceFileDecl replaces old with new in the file described by pgf. +-// +-// TODO(rfindley): generalize, and combine with rewriteSignature. +-func replaceFileDecl(pgf *parsego.File, old, new ast.Decl) ([]byte, error) { +- i := findDecl(pgf.File, old) +- if i == -1 { +- return nil, bug.Errorf("didn't find old declaration") +- } +- start, end, err := safetoken.Offsets(pgf.Tok, old.Pos(), old.End()) +- if err != nil { +- return nil, err +- } +- var out bytes.Buffer +- out.Write(pgf.Src[:start]) +- fset := tokeninternal.FileSetFor(pgf.Tok) +- if err := format.Node(&out, fset, new); err != nil { +- return nil, bug.Errorf("formatting new node: %v", err) +- } +- out.Write(pgf.Src[end:]) +- return out.Bytes(), nil +-} +- +-// findDecl finds the index of decl in file.Decls. +-// +-// TODO: use slices.Index when it is available. +-func findDecl(file *ast.File, decl ast.Decl) int { +- for i, d := range file.Decls { +- if d == decl { +- return i +- } +- } +- return -1 +-} +diff -urN a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go +--- a/gopls/internal/golang/codeaction.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/codeaction.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1132 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "encoding/json" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "reflect" +- "slices" +- "strings" +- +- goastutil "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/go/ast/edge" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/analysis/fillstruct" +- "golang.org/x/tools/gopls/internal/analysis/fillswitch" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang/stubmethods" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/imports" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// CodeActions returns all enabled code actions (edits and other +-// commands) available for the selected range. +-// +-// Depending on how the request was triggered, fewer actions may be +-// offered, e.g. to avoid UI distractions after mere cursor motion. +-// +-// See ../protocol/codeactionkind.go for some code action theory. +-func CodeActions(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, rng protocol.Range, diagnostics []protocol.Diagnostic, enabled func(protocol.CodeActionKind) bool, trigger protocol.CodeActionTriggerKind) (actions []protocol.CodeAction, _ error) { +- loc := fh.URI().Location(rng) +- +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, err +- } +- start, end, err := pgf.RangePos(rng) +- if err != nil { +- return nil, err +- } +- +- // Scan to see if any enabled producer needs type information. +- var enabledMemo [len(codeActionProducers)]bool +- needTypes := false +- for i, p := range codeActionProducers { +- if enabled(p.kind) { +- enabledMemo[i] = true +- if p.needPkg { +- needTypes = true +- } +- } +- } +- +- // Compute type information if needed. +- // Also update pgf, start, end to be consistent with pkg. +- // They may differ in case of parse cache miss. +- var pkg *cache.Package +- if needTypes { +- var err error +- pkg, pgf, err = NarrowestPackageForFile(ctx, snapshot, loc.URI) +- if err != nil { +- return nil, err +- } +- start, end, err = pgf.RangePos(loc.Range) +- if err != nil { +- return nil, err +- } +- } +- +- // Execute each enabled producer function. +- req := &codeActionsRequest{ +- actions: &actions, +- lazy: make(map[reflect.Type]any), +- snapshot: snapshot, +- fh: fh, +- pgf: pgf, +- loc: loc, +- start: start, +- end: end, +- diagnostics: diagnostics, +- trigger: trigger, +- pkg: pkg, +- } +- for i, p := range codeActionProducers { +- if !enabledMemo[i] { +- continue +- } +- req.kind = p.kind +- if p.needPkg { +- req.pkg = pkg +- } else { +- req.pkg = nil +- } +- if err := p.fn(ctx, req); err != nil { +- // An error in one code action producer +- // should not affect the others. +- if ctx.Err() != nil { +- return nil, err +- } +- event.Error(ctx, fmt.Sprintf("CodeAction producer %s failed", p.kind), err) +- continue +- } +- } +- +- // Return code actions in the order their providers are listed. +- return actions, nil +-} +- +-// A codeActionsRequest is passed to each function +-// that produces code actions. +-type codeActionsRequest struct { +- // internal fields for use only by [CodeActions]. +- actions *[]protocol.CodeAction // pointer to output slice; call addAction to populate +- lazy map[reflect.Type]any // lazy construction +- +- // inputs to the producer function: +- kind protocol.CodeActionKind +- snapshot *cache.Snapshot +- fh file.Handle +- pgf *parsego.File +- loc protocol.Location +- start, end token.Pos +- diagnostics []protocol.Diagnostic +- trigger protocol.CodeActionTriggerKind +- pkg *cache.Package // set only if producer.needPkg +-} +- +-// addApplyFixAction adds an ApplyFix command-based CodeAction to the result. +-func (req *codeActionsRequest) addApplyFixAction(title, fix string, loc protocol.Location) { +- cmd := command.NewApplyFixCommand(title, command.ApplyFixArgs{ +- Fix: fix, +- Location: loc, +- ResolveEdits: req.resolveEdits(), +- }) +- req.addCommandAction(cmd, true) +-} +- +-// addCommandAction adds a CodeAction to the result based on the provided command. +-// +-// If allowResolveEdits (and the client supports codeAction/resolve) +-// then the command is embedded into the code action data field so +-// that the client can later ask the server to "resolve" a command +-// into an edit that they can preview and apply selectively. +-// IMPORTANT: set allowResolveEdits only for actions that are 'edit aware', +-// meaning they can detect when they are being executed in the context of a +-// codeAction/resolve request, and return edits rather than applying them using +-// workspace/applyEdit. In golang/go#71405, edits were being apply during the +-// codeAction/resolve request handler. +-// TODO(rfindley): refactor the command and code lens registration APIs so that +-// resolve edit support is inferred from the command signature, not dependent +-// on coordination between codeAction and command logic. +-// +-// Otherwise, the command is set as the code action operation. +-func (req *codeActionsRequest) addCommandAction(cmd *protocol.Command, allowResolveEdits bool) { +- act := protocol.CodeAction{ +- Title: cmd.Title, +- Kind: req.kind, +- } +- if allowResolveEdits && req.resolveEdits() { +- data, err := json.Marshal(cmd) +- if err != nil { +- panic("unable to marshal") +- } +- msg := json.RawMessage(data) +- act.Data = &msg +- } else { +- act.Command = cmd +- } +- req.addAction(act) +-} +- +-// addEditAction adds an edit-based CodeAction to the result. +-func (req *codeActionsRequest) addEditAction(title string, fixedDiagnostics []protocol.Diagnostic, changes ...protocol.DocumentChange) { +- req.addAction(protocol.CodeAction{ +- Title: title, +- Kind: req.kind, +- Diagnostics: fixedDiagnostics, +- Edit: protocol.NewWorkspaceEdit(changes...), +- }) +-} +- +-// addAction adds a code action to the response. +-func (req *codeActionsRequest) addAction(act protocol.CodeAction) { +- *req.actions = append(*req.actions, act) +-} +- +-// resolveEdits reports whether the client can resolve edits lazily. +-func (req *codeActionsRequest) resolveEdits() bool { +- opts := req.snapshot.Options() +- return opts.CodeActionResolveOptions != nil && +- slices.Contains(opts.CodeActionResolveOptions, "edit") +-} +- +-// lazyInit[*T](ctx, req) returns a pointer to an instance of T, +-// calling new(T).init(ctx.req) on the first request. +-// +-// It is conceptually a (generic) method of req. +-func lazyInit[P interface { +- init(ctx context.Context, req *codeActionsRequest) +- *T +-}, T any](ctx context.Context, req *codeActionsRequest) P { +- t := reflect.TypeFor[T]() +- v, ok := req.lazy[t].(P) +- if !ok { +- v = new(T) +- v.init(ctx, req) +- req.lazy[t] = v +- } +- return v +-} +- +-// -- producers -- +- +-// A codeActionProducer describes a function that produces CodeActions +-// of a particular kind. +-// The function is only called if that kind is enabled. +-type codeActionProducer struct { +- kind protocol.CodeActionKind +- fn func(ctx context.Context, req *codeActionsRequest) error +- needPkg bool // fn needs type information (req.pkg) +-} +- +-// Code Actions are returned in the order their producers are listed below. +-// Depending on the client, this may influence the order they appear in the UI. +-var codeActionProducers = [...]codeActionProducer{ +- {kind: protocol.QuickFix, fn: quickFix, needPkg: true}, +- {kind: protocol.SourceOrganizeImports, fn: sourceOrganizeImports}, +- {kind: settings.AddTest, fn: addTest, needPkg: true}, +- {kind: settings.GoAssembly, fn: goAssembly, needPkg: true}, +- {kind: settings.GoDoc, fn: goDoc, needPkg: true}, +- {kind: settings.GoFreeSymbols, fn: goFreeSymbols}, +- {kind: settings.GoSplitPackage, fn: goSplitPackage, needPkg: true}, +- {kind: settings.GoTest, fn: goTest, needPkg: true}, +- {kind: settings.GoToggleCompilerOptDetails, fn: toggleCompilerOptDetails}, +- {kind: settings.RefactorExtractFunction, fn: refactorExtractFunction}, +- {kind: settings.RefactorExtractMethod, fn: refactorExtractMethod}, +- {kind: settings.RefactorExtractToNewFile, fn: refactorExtractToNewFile}, +- {kind: settings.RefactorExtractConstant, fn: refactorExtractVariable, needPkg: true}, +- {kind: settings.RefactorExtractVariable, fn: refactorExtractVariable, needPkg: true}, +- {kind: settings.RefactorExtractConstantAll, fn: refactorExtractVariableAll, needPkg: true}, +- {kind: settings.RefactorExtractVariableAll, fn: refactorExtractVariableAll, needPkg: true}, +- {kind: settings.RefactorInlineCall, fn: refactorInlineCall, needPkg: true}, +- {kind: settings.RefactorInlineVariable, fn: refactorInlineVariable, needPkg: true}, +- {kind: settings.RefactorRewriteChangeQuote, fn: refactorRewriteChangeQuote}, +- {kind: settings.RefactorRewriteFillStruct, fn: refactorRewriteFillStruct, needPkg: true}, +- {kind: settings.RefactorRewriteFillSwitch, fn: refactorRewriteFillSwitch, needPkg: true}, +- {kind: settings.RefactorRewriteInvertIf, fn: refactorRewriteInvertIf}, +- {kind: settings.RefactorRewriteJoinLines, fn: refactorRewriteJoinLines, needPkg: true}, +- {kind: settings.RefactorRewriteRemoveUnusedParam, fn: refactorRewriteRemoveUnusedParam, needPkg: true}, +- {kind: settings.RefactorRewriteMoveParamLeft, fn: refactorRewriteMoveParamLeft, needPkg: true}, +- {kind: settings.RefactorRewriteMoveParamRight, fn: refactorRewriteMoveParamRight, needPkg: true}, +- {kind: settings.RefactorRewriteSplitLines, fn: refactorRewriteSplitLines, needPkg: true}, +- {kind: settings.RefactorRewriteEliminateDotImport, fn: refactorRewriteEliminateDotImport, needPkg: true}, +- {kind: settings.RefactorRewriteAddTags, fn: refactorRewriteAddStructTags, needPkg: true}, +- {kind: settings.RefactorRewriteRemoveTags, fn: refactorRewriteRemoveStructTags, needPkg: true}, +- {kind: settings.GoplsDocFeatures, fn: goplsDocFeatures}, // offer this one last (#72742) +- +- // Note: don't forget to update the allow-list in Server.CodeAction +- // when adding new query operations like GoTest and GoDoc that +- // are permitted even in generated source files. +-} +- +-// sourceOrganizeImports produces "Organize Imports" code actions. +-func sourceOrganizeImports(ctx context.Context, req *codeActionsRequest) error { +- res := lazyInit[*allImportsFixesResult](ctx, req) +- +- // Send all of the import edits as one code action +- // if the file is being organized. +- if len(res.allFixEdits) > 0 { +- req.addEditAction("Organize Imports", nil, protocol.DocumentChangeEdit(req.fh, res.allFixEdits)) +- } +- +- return nil +-} +- +-// quickFix produces code actions that fix errors, +-// for example by adding/deleting/renaming imports, +-// or declaring the missing methods of a type. +-func quickFix(ctx context.Context, req *codeActionsRequest) error { +- // Only compute quick fixes if there are any diagnostics to fix. +- if len(req.diagnostics) == 0 { +- return nil +- } +- +- // Process any missing imports and pair them with the diagnostics they fix. +- res := lazyInit[*allImportsFixesResult](ctx, req) +- if res.err != nil { +- return nil +- } +- +- // Separate this into a set of codeActions per diagnostic, where +- // each action is the addition, removal, or renaming of one import. +- for _, importFix := range res.editsPerFix { +- fixedDiags := fixedByImportFix(importFix.fix, req.diagnostics) +- if len(fixedDiags) == 0 { +- continue +- } +- req.addEditAction(importFixTitle(importFix.fix), fixedDiags, protocol.DocumentChangeEdit(req.fh, importFix.edits)) +- } +- +- // Quick fixes for type errors. +- info := req.pkg.TypesInfo() +- for _, typeError := range req.pkg.TypeErrors() { +- // Does type error overlap with CodeAction range? +- start, end := typeError.Pos, typeError.Pos +- if _, _, endPos, ok := typesinternal.ErrorCodeStartEnd(typeError); ok { +- end = endPos +- } +- typeErrorRange, err := req.pgf.PosRange(start, end) +- if err != nil || !protocol.Intersect(typeErrorRange, req.loc.Range) { +- continue +- } +- +- msg := typeError.Msg +- switch { +- // "Missing method" error? (stubmethods) +- // Offer a "Declare missing methods of INTERFACE" code action. +- // See [stubMissingInterfaceMethodsFixer] for command implementation. +- case strings.Contains(msg, "missing method"), +- strings.HasPrefix(msg, "cannot convert"), +- strings.Contains(msg, "not implement"): +- si := stubmethods.GetIfaceStubInfo(req.pkg.FileSet(), info, req.pgf, start, end) +- if si != nil { +- qual := typesinternal.FileQualifier(req.pgf.File, si.Concrete.Obj().Pkg()) +- iface := types.TypeString(si.Interface.Type(), qual) +- msg := fmt.Sprintf("Declare missing methods of %s", iface) +- req.addApplyFixAction(msg, fixMissingInterfaceMethods, req.loc) +- } +- +- // "type X has no field or method Y" compiler error. +- // Offer a "Declare missing method T.f" code action. +- // See [stubMissingCalledFunctionFixer] for command implementation. +- case strings.Contains(msg, "has no field or method"): +- si := stubmethods.GetCallStubInfo(req.pkg.FileSet(), info, req.pgf, start, end) +- if si != nil { +- msg := fmt.Sprintf("Declare missing method %s.%s", si.Receiver.Obj().Name(), si.MethodName) +- req.addApplyFixAction(msg, fixMissingCalledFunction, req.loc) +- } +- +- // "undeclared name: X" or "undefined: X" compiler error. +- // Offer a "Create variable/function X" code action. +- // See [createUndeclared] for command implementation. +- case strings.HasPrefix(msg, "undeclared name: "), +- strings.HasPrefix(msg, "undefined: "): +- cur, _ := req.pgf.Cursor.FindByPos(start, end) +- title := undeclaredFixTitle(cur, msg) +- if title != "" { +- req.addApplyFixAction(title, fixCreateUndeclared, req.loc) +- } +- } +- } +- +- return nil +-} +- +-// allImportsFixesResult is the result of a lazy call to allImportsFixes. +-// It implements the codeActionsRequest lazyInit interface. +-type allImportsFixesResult struct { +- allFixEdits []protocol.TextEdit +- editsPerFix []*importFix +- err error +-} +- +-func (res *allImportsFixesResult) init(ctx context.Context, req *codeActionsRequest) { +- res.allFixEdits, res.editsPerFix, res.err = allImportsFixes(ctx, req.snapshot, req.pgf) +- if res.err != nil { +- event.Error(ctx, "imports fixes", res.err, label.File.Of(req.loc.URI.Path())) +- } +-} +- +-func importFixTitle(fix *imports.ImportFix) string { +- var str string +- switch fix.FixType { +- case imports.AddImport: +- str = fmt.Sprintf("Add import: %s %q", fix.StmtInfo.Name, fix.StmtInfo.ImportPath) +- case imports.DeleteImport: +- str = fmt.Sprintf("Delete import: %s %q", fix.StmtInfo.Name, fix.StmtInfo.ImportPath) +- case imports.SetImportName: +- str = fmt.Sprintf("Rename import: %s %q", fix.StmtInfo.Name, fix.StmtInfo.ImportPath) +- } +- return str +-} +- +-// fixedByImportFix filters the provided slice of diagnostics to those that +-// would be fixed by the provided imports fix. +-func fixedByImportFix(fix *imports.ImportFix, diagnostics []protocol.Diagnostic) []protocol.Diagnostic { +- var results []protocol.Diagnostic +- for _, diagnostic := range diagnostics { +- switch { +- // "undeclared name: X" may be an unresolved import. +- case strings.HasPrefix(diagnostic.Message, "undeclared name: "): +- ident := strings.TrimPrefix(diagnostic.Message, "undeclared name: ") +- if ident == fix.IdentName { +- results = append(results, diagnostic) +- } +- // "undefined: X" may be an unresolved import at Go 1.20+. +- case strings.HasPrefix(diagnostic.Message, "undefined: "): +- ident := strings.TrimPrefix(diagnostic.Message, "undefined: ") +- if ident == fix.IdentName { +- results = append(results, diagnostic) +- } +- // "could not import: X" may be an invalid import. +- case strings.HasPrefix(diagnostic.Message, "could not import: "): +- ident := strings.TrimPrefix(diagnostic.Message, "could not import: ") +- if ident == fix.IdentName { +- results = append(results, diagnostic) +- } +- // "X imported but not used" is an unused import. +- // "X imported but not used as Y" is an unused import. +- case strings.Contains(diagnostic.Message, " imported but not used"): +- idx := strings.Index(diagnostic.Message, " imported but not used") +- importPath := diagnostic.Message[:idx] +- if importPath == fmt.Sprintf("%q", fix.StmtInfo.ImportPath) { +- results = append(results, diagnostic) +- } +- } +- } +- return results +-} +- +-// goFreeSymbols produces "Browse free symbols" code actions. +-// See [server.commandHandler.FreeSymbols] for command implementation. +-func goFreeSymbols(ctx context.Context, req *codeActionsRequest) error { +- if !req.loc.Empty() { +- cmd := command.NewFreeSymbolsCommand("Browse free symbols", req.snapshot.View().ID(), req.loc) +- req.addCommandAction(cmd, false) +- } +- return nil +-} +- +-// goSplitPackage produces "Split package p" code actions. +-// See [server.commandHandler.SplitPackage] for command implementation. +-func goSplitPackage(ctx context.Context, req *codeActionsRequest) error { +- // TODO(adonovan): ideally we would key by the package path, +- // or the ID of the widest package for the current file, +- // so that we don't see different results when toggling +- // between p.go and p_test.go. +- // +- // TODO(adonovan): opt: req should always provide metadata so +- // that we don't have to request type checking (needPkg=true). +- meta := req.pkg.Metadata() +- title := fmt.Sprintf("Split package %q", meta.Name) +- cmd := command.NewSplitPackageCommand(title, req.snapshot.View().ID(), string(meta.ID)) +- req.addCommandAction(cmd, false) +- return nil +-} +- +-// goplsDocFeatures produces "Browse gopls feature documentation" code actions. +-// See [server.commandHandler.ClientOpenURL] for command implementation. +-func goplsDocFeatures(ctx context.Context, req *codeActionsRequest) error { +- cmd := command.NewClientOpenURLCommand( +- "Browse gopls feature documentation", +- "https://go.dev/gopls/features") +- req.addCommandAction(cmd, false) +- return nil +-} +- +-// goDoc produces "Browse documentation for X" code actions. +-// See [server.commandHandler.Doc] for command implementation. +-func goDoc(ctx context.Context, req *codeActionsRequest) error { +- _, _, title := DocFragment(req.pkg, req.pgf, req.start, req.end) +- if title != "" { +- cmd := command.NewDocCommand(title, command.DocArgs{Location: req.loc, ShowDocument: true}) +- req.addCommandAction(cmd, false) +- } +- return nil +-} +- +-// refactorExtractFunction produces "Extract function" code actions. +-// See [extractFunction] for command implementation. +-func refactorExtractFunction(ctx context.Context, req *codeActionsRequest) error { +- if _, ok, _, _ := canExtractFunction(req.pgf.Tok, req.start, req.end, req.pgf.Src, req.pgf.Cursor); ok { +- req.addApplyFixAction("Extract function", fixExtractFunction, req.loc) +- } +- return nil +-} +- +-// refactorExtractMethod produces "Extract method" code actions. +-// See [extractMethod] for command implementation. +-func refactorExtractMethod(ctx context.Context, req *codeActionsRequest) error { +- if _, ok, methodOK, _ := canExtractFunction(req.pgf.Tok, req.start, req.end, req.pgf.Src, req.pgf.Cursor); ok && methodOK { +- req.addApplyFixAction("Extract method", fixExtractMethod, req.loc) +- } +- return nil +-} +- +-// refactorExtractVariable produces "Extract variable|constant" code actions. +-// See [extractVariable] for command implementation. +-func refactorExtractVariable(ctx context.Context, req *codeActionsRequest) error { +- info := req.pkg.TypesInfo() +- if exprs, err := canExtractVariable(info, req.pgf.Cursor, req.start, req.end, false); err == nil { +- // Offer one of refactor.extract.{constant,variable} +- // based on the constness of the expression; this is a +- // limitation of the codeActionProducers mechanism. +- // Beware that future evolutions of the refactorings +- // may make them diverge to become non-complementary, +- // for example because "if const x = ...; y {" is illegal. +- // Same as [refactorExtractVariableAll]. +- constant := info.Types[exprs[0]].Value != nil +- if (req.kind == settings.RefactorExtractConstant) == constant { +- title := "Extract variable" +- if constant { +- title = "Extract constant" +- } +- req.addApplyFixAction(title, fixExtractVariable, req.loc) +- } +- } +- return nil +-} +- +-// refactorExtractVariableAll produces "Extract N occurrences of EXPR" code action. +-// See [extractVariable] for implementation. +-func refactorExtractVariableAll(ctx context.Context, req *codeActionsRequest) error { +- info := req.pkg.TypesInfo() +- // Don't suggest if only one expr is found, +- // otherwise it will duplicate with [refactorExtractVariable] +- if exprs, err := canExtractVariable(info, req.pgf.Cursor, req.start, req.end, true); err == nil && len(exprs) > 1 { +- text, err := req.pgf.NodeText(exprs[0]) +- if err != nil { +- return err +- } +- desc := string(text) +- if len(desc) >= 40 || strings.Contains(desc, "\n") { +- desc = goastutil.NodeDescription(exprs[0]) +- } +- constant := info.Types[exprs[0]].Value != nil +- if (req.kind == settings.RefactorExtractConstantAll) == constant { +- var title string +- if constant { +- title = fmt.Sprintf("Extract %d occurrences of const expression: %s", len(exprs), desc) +- } else { +- title = fmt.Sprintf("Extract %d occurrences of %s", len(exprs), desc) +- } +- req.addApplyFixAction(title, fixExtractVariableAll, req.loc) +- } +- } +- return nil +-} +- +-// refactorExtractToNewFile produces "Extract declarations to new file" code actions. +-// See [server.commandHandler.ExtractToNewFile] for command implementation. +-func refactorExtractToNewFile(ctx context.Context, req *codeActionsRequest) error { +- if canExtractToNewFile(req.pgf, req.start, req.end) { +- cmd := command.NewExtractToNewFileCommand("Extract declarations to new file", req.loc) +- req.addCommandAction(cmd, false) +- } +- return nil +-} +- +-// addTest produces "Add test for FUNC" code actions. +-// See [server.commandHandler.AddTest] for command implementation. +-func addTest(ctx context.Context, req *codeActionsRequest) error { +- // Reject test package. +- if req.pkg.Metadata().ForTest != "" { +- return nil +- } +- +- path, _ := goastutil.PathEnclosingInterval(req.pgf.File, req.start, req.end) +- if len(path) < 2 { +- return nil +- } +- +- decl, ok := path[len(path)-2].(*ast.FuncDecl) +- if !ok { +- return nil +- } +- +- // Don't offer to create tests of "init" or "_". +- if decl.Name.Name == "_" || decl.Name.Name == "init" { +- return nil +- } +- +- // TODO(hxjiang): support functions with type parameter. +- if decl.Type.TypeParams != nil { +- return nil +- } +- +- cmd := command.NewAddTestCommand("Add test for "+decl.Name.String(), req.loc) +- req.addCommandAction(cmd, false) +- +- // TODO(hxjiang): add code action for generate test for package/file. +- return nil +-} +- +-// identityTransform returns a change signature transformation that leaves the +-// given fieldlist unmodified. +-func identityTransform(fields *ast.FieldList) []command.ChangeSignatureParam { +- var id []command.ChangeSignatureParam +- for i := 0; i < fields.NumFields(); i++ { +- id = append(id, command.ChangeSignatureParam{OldIndex: i}) +- } +- return id +-} +- +-// refactorRewriteRemoveUnusedParam produces "Remove unused parameter" code actions. +-// See [server.commandHandler.ChangeSignature] for command implementation. +-func refactorRewriteRemoveUnusedParam(ctx context.Context, req *codeActionsRequest) error { +- if info := removableParameter(req.pkg, req.pgf, req.loc.Range); info != nil { +- var transform []command.ChangeSignatureParam +- for i := 0; i < info.decl.Type.Params.NumFields(); i++ { +- if i != info.paramIndex { +- transform = append(transform, command.ChangeSignatureParam{OldIndex: i}) +- } +- } +- cmd := command.NewChangeSignatureCommand("Remove unused parameter", command.ChangeSignatureArgs{ +- Location: req.loc, +- NewParams: transform, +- NewResults: identityTransform(info.decl.Type.Results), +- ResolveEdits: req.resolveEdits(), +- }) +- req.addCommandAction(cmd, true) +- } +- return nil +-} +- +-func refactorRewriteMoveParamLeft(ctx context.Context, req *codeActionsRequest) error { +- if info := findParam(req.pgf, req.loc.Range); info != nil && +- info.paramIndex > 0 && +- !is[*ast.Ellipsis](info.field.Type) { +- +- // ^^ we can't currently handle moving a variadic param. +- // TODO(rfindley): implement. +- +- transform := identityTransform(info.decl.Type.Params) +- transform[info.paramIndex] = command.ChangeSignatureParam{OldIndex: info.paramIndex - 1} +- transform[info.paramIndex-1] = command.ChangeSignatureParam{OldIndex: info.paramIndex} +- cmd := command.NewChangeSignatureCommand("Move parameter left", command.ChangeSignatureArgs{ +- Location: req.loc, +- NewParams: transform, +- NewResults: identityTransform(info.decl.Type.Results), +- ResolveEdits: req.resolveEdits(), +- }) +- +- req.addCommandAction(cmd, true) +- } +- return nil +-} +- +-func refactorRewriteMoveParamRight(ctx context.Context, req *codeActionsRequest) error { +- if info := findParam(req.pgf, req.loc.Range); info != nil && info.paramIndex >= 0 { +- params := info.decl.Type.Params +- nparams := params.NumFields() +- if info.paramIndex < nparams-1 { // not the last param +- if info.paramIndex == nparams-2 && is[*ast.Ellipsis](params.List[len(params.List)-1].Type) { +- // We can't currently handle moving a variadic param. +- // TODO(rfindley): implement. +- return nil +- } +- +- transform := identityTransform(info.decl.Type.Params) +- transform[info.paramIndex] = command.ChangeSignatureParam{OldIndex: info.paramIndex + 1} +- transform[info.paramIndex+1] = command.ChangeSignatureParam{OldIndex: info.paramIndex} +- cmd := command.NewChangeSignatureCommand("Move parameter right", command.ChangeSignatureArgs{ +- Location: req.loc, +- NewParams: transform, +- NewResults: identityTransform(info.decl.Type.Results), +- ResolveEdits: req.resolveEdits(), +- }) +- req.addCommandAction(cmd, true) +- } +- } +- return nil +-} +- +-// refactorRewriteChangeQuote produces "Convert to {raw,interpreted} string literal" code actions. +-func refactorRewriteChangeQuote(ctx context.Context, req *codeActionsRequest) error { +- convertStringLiteral(req) +- return nil +-} +- +-// refactorRewriteInvertIf produces "Invert 'if' condition" code actions. +-// See [invertIfCondition] for command implementation. +-func refactorRewriteInvertIf(ctx context.Context, req *codeActionsRequest) error { +- if _, ok, _ := canInvertIfCondition(req.pgf.Cursor, req.start, req.end); ok { +- req.addApplyFixAction("Invert 'if' condition", fixInvertIfCondition, req.loc) +- } +- return nil +-} +- +-// refactorRewriteSplitLines produces "Split ITEMS into separate lines" code actions. +-// See [splitLines] for command implementation. +-func refactorRewriteSplitLines(ctx context.Context, req *codeActionsRequest) error { +- // TODO(adonovan): opt: don't set needPkg just for FileSet. +- if msg, ok, _ := canSplitLines(req.pgf.Cursor, req.pkg.FileSet(), req.start, req.end); ok { +- req.addApplyFixAction(msg, fixSplitLines, req.loc) +- } +- return nil +-} +- +-func refactorRewriteEliminateDotImport(ctx context.Context, req *codeActionsRequest) error { +- // Figure out if the request is placed over a dot import. +- var importSpec *ast.ImportSpec +- for _, imp := range req.pgf.File.Imports { +- if posRangeContains(imp.Pos(), imp.End(), req.start, req.end) { +- importSpec = imp +- break +- } +- } +- if importSpec == nil { +- return nil +- } +- if importSpec.Name == nil || importSpec.Name.Name != "." { +- return nil +- } +- +- // dotImported package path and its imported name after removing the dot. +- imported := req.pkg.TypesInfo().PkgNameOf(importSpec).Imported() +- newName := imported.Name() +- +- rng, err := req.pgf.PosRange(importSpec.Name.Pos(), importSpec.Path.Pos()) +- if err != nil { +- return err +- } +- // Delete the '.' part of the import. +- edits := []protocol.TextEdit{{ +- Range: rng, +- }} +- +- fileScope, ok := req.pkg.TypesInfo().Scopes[req.pgf.File] +- if !ok { +- return nil +- } +- +- // Go through each use of the dot imported package, checking its scope for +- // shadowing and calculating an edit to qualify the identifier. +- for curId := range req.pgf.Cursor.Preorder((*ast.Ident)(nil)) { +- ident := curId.Node().(*ast.Ident) +- +- // Only keep identifiers that use a symbol from the +- // dot imported package. +- use := req.pkg.TypesInfo().Uses[ident] +- if use == nil || use.Pkg() == nil { +- continue +- } +- if use.Pkg() != imported { +- continue +- } +- +- // Only qualify unqualified identifiers (due to dot imports) +- // that reference package-level symbols. +- // All other references to a symbol imported from another package +- // are nested within a select expression (pkg.Foo, v.Method, v.Field). +- if astutil.IsChildOf(curId, edge.SelectorExpr_Sel) { +- continue // qualified identifier (pkg.X) or selector (T.X or e.X) +- } +- if !typesinternal.IsPackageLevel(use) { +- continue // unqualified field reference T{X: ...} +- } +- +- // Make sure that the package name will not be shadowed by something else in scope. +- // If it is then we cannot offer this particular code action. +- // +- // TODO: If the object found in scope is the package imported without a +- // dot, or some builtin not used in the file, the code action could be +- // allowed to go through. +- sc := fileScope.Innermost(ident.Pos()) +- if sc == nil { +- continue +- } +- _, obj := sc.LookupParent(newName, ident.Pos()) +- if obj != nil { +- continue +- } +- +- rng, err := req.pgf.PosRange(ident.Pos(), ident.Pos()) // sic, zero-width range before ident +- if err != nil { +- continue +- } +- edits = append(edits, protocol.TextEdit{ +- Range: rng, +- NewText: newName + ".", +- }) +- } +- +- req.addEditAction("Eliminate dot import", nil, protocol.DocumentChangeEdit( +- req.fh, +- edits, +- )) +- return nil +-} +- +-// refactorRewriteJoinLines produces "Join ITEMS into one line" code actions. +-// See [joinLines] for command implementation. +-func refactorRewriteJoinLines(ctx context.Context, req *codeActionsRequest) error { +- // TODO(adonovan): opt: don't set needPkg just for FileSet. +- if msg, ok, _ := canJoinLines(req.pgf.Cursor, req.pkg.FileSet(), req.start, req.end); ok { +- req.addApplyFixAction(msg, fixJoinLines, req.loc) +- } +- return nil +-} +- +-// refactorRewriteFillStruct produces "Fill STRUCT" code actions. +-// See [fillstruct.SuggestedFix] for command implementation. +-func refactorRewriteFillStruct(ctx context.Context, req *codeActionsRequest) error { +- // fillstruct.Diagnose is a lazy analyzer: all it gives us is +- // the (start, end, message) of each SuggestedFix; the actual +- // edit is computed only later by ApplyFix, which calls fillstruct.SuggestedFix. +- for _, diag := range fillstruct.Diagnose(req.pgf.File, req.start, req.end, req.pkg.Types(), req.pkg.TypesInfo()) { +- loc, err := req.pgf.Mapper.PosLocation(req.pgf.Tok, diag.Pos, diag.End) +- if err != nil { +- return err +- } +- for _, fix := range diag.SuggestedFixes { +- req.addApplyFixAction(fix.Message, diag.Category, loc) +- } +- } +- return nil +-} +- +-// refactorRewriteFillSwitch produces "Add cases for TYPE/ENUM" code actions. +-func refactorRewriteFillSwitch(ctx context.Context, req *codeActionsRequest) error { +- for _, diag := range fillswitch.Diagnose(req.pgf.File, req.start, req.end, req.pkg.Types(), req.pkg.TypesInfo()) { +- changes, err := suggestedFixToDocumentChange(ctx, req.snapshot, req.pkg.FileSet(), &diag.SuggestedFixes[0]) +- if err != nil { +- return err +- } +- req.addEditAction(diag.Message, nil, changes...) +- } +- +- return nil +-} +- +-// selectionContainsStructField returns true if the given struct contains a +-// field between start and end pos. If needsTag is true, it only returns true if +-// the struct field found contains a struct tag. +-func selectionContainsStructField(node *ast.StructType, start, end token.Pos, needsTag bool) bool { +- for _, field := range node.Fields.List { +- if start <= field.End() && end >= field.Pos() { +- if !needsTag || field.Tag != nil { +- return true +- } +- } +- } +- return false +-} +- +-// selectionContainsStruct returns true if there exists a struct containing +-// fields within start and end positions. If removeTags is true, it means the +-// current command is for remove tags rather than add tags, so we only return +-// true if the struct field found contains a struct tag to remove. +-func selectionContainsStruct(cursor inspector.Cursor, start, end token.Pos, removeTags bool) bool { +- cur, ok := cursor.FindByPos(start, end) +- if !ok { +- return false +- } +- if _, ok := cur.Node().(*ast.StructType); ok { +- return true +- } +- +- // Handles case where selection is within struct. +- for c := range cur.Enclosing((*ast.StructType)(nil)) { +- if selectionContainsStructField(c.Node().(*ast.StructType), start, end, removeTags) { +- return true +- } +- } +- +- // Handles case where selection contains struct but may contain other nodes, including other structs. +- for c := range cur.Preorder((*ast.StructType)(nil)) { +- node := c.Node().(*ast.StructType) +- // Check that at least one field is located within the selection. If we are removing tags, that field +- // must also have a struct tag, otherwise we do not provide the code action. +- if selectionContainsStructField(node, start, end, removeTags) { +- return true +- } +- } +- return false +-} +- +-// refactorRewriteAddStructTags produces "Add struct tags" code actions. +-// See [server.commandHandler.ModifyTags] for command implementation. +-func refactorRewriteAddStructTags(ctx context.Context, req *codeActionsRequest) error { +- if selectionContainsStruct(req.pgf.Cursor, req.start, req.end, false) { +- // TODO(mkalil): Prompt user for modification args once we have dialogue capabilities. +- cmdAdd := command.NewModifyTagsCommand("Add struct tags", command.ModifyTagsArgs{ +- URI: req.loc.URI, +- Range: req.loc.Range, +- Add: "json", +- }) +- req.addCommandAction(cmdAdd, false) +- } +- return nil +-} +- +-// refactorRewriteRemoveStructTags produces "Remove struct tags" code actions. +-// See [server.commandHandler.ModifyTags] for command implementation. +-func refactorRewriteRemoveStructTags(ctx context.Context, req *codeActionsRequest) error { +- // TODO(mkalil): Prompt user for modification args once we have dialogue capabilities. +- if selectionContainsStruct(req.pgf.Cursor, req.start, req.end, true) { +- cmdRemove := command.NewModifyTagsCommand("Remove struct tags", command.ModifyTagsArgs{ +- URI: req.loc.URI, +- Range: req.loc.Range, +- Clear: true, +- }) +- req.addCommandAction(cmdRemove, false) +- } +- return nil +-} +- +-// removableParameter returns paramInfo about a removable parameter indicated +-// by the given [start, end) range, or nil if no such removal is available. +-// +-// Removing a parameter is possible if +-// - there are no parse or type errors, and +-// - [start, end) is contained within an unused field or parameter name +-// - ... of a non-method function declaration. +-// +-// (Note that the unusedparam analyzer also computes this property, but +-// much more precisely, allowing it to report its findings as diagnostics.) +-// +-// TODO(adonovan): inline into refactorRewriteRemoveUnusedParam. +-func removableParameter(pkg *cache.Package, pgf *parsego.File, rng protocol.Range) *paramInfo { +- if perrors, terrors := pkg.ParseErrors(), pkg.TypeErrors(); len(perrors) > 0 || len(terrors) > 0 { +- return nil // can't remove parameters from packages with errors +- } +- info := findParam(pgf, rng) +- if info == nil || info.field == nil { +- return nil // range does not span a parameter +- } +- if info.decl.Body == nil { +- return nil // external function +- } +- if len(info.field.Names) == 0 { +- return info // no names => field is unused +- } +- if info.name == nil { +- return nil // no name is indicated +- } +- if info.name.Name == "_" { +- return info // trivially unused +- } +- +- obj := pkg.TypesInfo().Defs[info.name] +- if obj == nil { +- return nil // something went wrong +- } +- +- used := false +- ast.Inspect(info.decl.Body, func(node ast.Node) bool { +- if n, ok := node.(*ast.Ident); ok && pkg.TypesInfo().Uses[n] == obj { +- used = true +- } +- return !used // keep going until we find a use +- }) +- if used { +- return nil +- } +- return info +-} +- +-// refactorInlineCall produces "Inline call to FUNC" code actions. +-// See [inlineCall] for command implementation. +-func refactorInlineCall(ctx context.Context, req *codeActionsRequest) error { +- // To avoid distraction (e.g. VS Code lightbulb), offer "inline" +- // only after a selection or explicit menu operation. +- // TODO(adonovan): remove this (and req.trigger); see comment at TestVSCodeIssue65167. +- if req.trigger == protocol.CodeActionAutomatic && req.loc.Empty() { +- return nil +- } +- +- // If range is within call expression, offer to inline the call. +- if _, fn, err := enclosingStaticCall(req.pkg, req.pgf, req.start, req.end); err == nil { +- req.addApplyFixAction("Inline call to "+fn.Name(), fixInlineCall, req.loc) +- } +- return nil +-} +- +-// refactorInlineVariable produces the "Inline variable 'v'" code action. +-// See [inlineVariableOne] for command implementation. +-func refactorInlineVariable(ctx context.Context, req *codeActionsRequest) error { +- // TODO(adonovan): offer "inline all" variant that eliminates the var (see #70085). +- if curUse, _, ok := canInlineVariable(req.pkg.TypesInfo(), req.pgf.Cursor, req.start, req.end); ok { +- title := fmt.Sprintf("Inline variable %q", curUse.Node().(*ast.Ident).Name) +- req.addApplyFixAction(title, fixInlineVariable, req.loc) +- } +- return nil +-} +- +-// goTest produces "Run tests and benchmarks" code actions. +-// See [server.commandHandler.runTests] for command implementation. +-func goTest(ctx context.Context, req *codeActionsRequest) error { +- testFuncs, benchFuncs, err := testsAndBenchmarks(req.pkg.TypesInfo(), req.pgf) +- if err != nil { +- return err +- } +- +- var tests, benchmarks []string +- for _, fn := range testFuncs { +- if protocol.Intersect(fn.rng, req.loc.Range) { +- tests = append(tests, fn.name) +- } +- } +- for _, fn := range benchFuncs { +- if protocol.Intersect(fn.rng, req.loc.Range) { +- benchmarks = append(benchmarks, fn.name) +- } +- } +- +- if len(tests) == 0 && len(benchmarks) == 0 { +- return nil +- } +- +- cmd := command.NewRunTestsCommand("Run tests and benchmarks", command.RunTestsArgs{ +- URI: req.loc.URI, +- Tests: tests, +- Benchmarks: benchmarks, +- }) +- req.addCommandAction(cmd, false) +- return nil +-} +- +-// goAssembly produces "Browse ARCH assembly for FUNC" code actions. +-// See [server.commandHandler.Assembly] for command implementation. +-func goAssembly(ctx context.Context, req *codeActionsRequest) error { +- view := req.snapshot.View() +- +- // Find the enclosing toplevel function or method, +- // and compute its symbol name (e.g. "pkgpath.(T).method"). +- // The report will show this method and all its nested +- // functions (FuncLit, defers, etc). +- // +- // TODO(adonovan): this is no good for generics, since they +- // will always be uninstantiated when they enclose the cursor. +- // Instead, we need to query the func symbol under the cursor, +- // rather than the enclosing function. It may be an explicitly +- // or implicitly instantiated generic, and it may be defined +- // in another package, though we would still need to compile +- // the current package to see its assembly. The challenge, +- // however, is that computing the linker name for a generic +- // symbol is quite tricky. Talk with the compiler team for +- // ideas. +- // +- // TODO(adonovan): think about a smoother UX for jumping +- // directly to (say) a lambda of interest. +- // Perhaps we could scroll to STEXT for the innermost +- // enclosing nested function? +- +- // Compute the linker symbol of the enclosing function or var initializer. +- var sym strings.Builder +- if pkg := req.pkg.Types(); pkg.Name() == "main" { +- sym.WriteString("main") +- } else { +- sym.WriteString(pkg.Path()) +- } +- sym.WriteString(".") +- +- curSel, _ := req.pgf.Cursor.FindByPos(req.start, req.end) +- for cur := range curSel.Enclosing((*ast.FuncDecl)(nil), (*ast.ValueSpec)(nil)) { +- var name string // in command title +- switch node := cur.Node().(type) { +- case *ast.FuncDecl: +- // package-level func or method +- if fn, ok := req.pkg.TypesInfo().Defs[node.Name].(*types.Func); ok && +- fn.Name() != "_" { // blank functions are not compiled +- +- // Source-level init functions are compiled (along with +- // package-level var initializers) in into a single pkg.init +- // function, so this falls out of the logic below. +- +- if sig := fn.Signature(); sig.TypeParams() == nil && sig.RecvTypeParams() == nil { // generic => no assembly +- if sig.Recv() != nil { +- if isPtr, named := typesinternal.ReceiverNamed(sig.Recv()); named != nil { +- if isPtr { +- fmt.Fprintf(&sym, "(*%s)", named.Obj().Name()) +- } else { +- sym.WriteString(named.Obj().Name()) +- } +- sym.WriteByte('.') +- } +- } +- sym.WriteString(fn.Name()) +- +- name = node.Name.Name // success +- } +- } +- +- case *ast.ValueSpec: +- // package-level var initializer? +- if len(node.Names) > 0 && len(node.Values) > 0 { +- v := req.pkg.TypesInfo().Defs[node.Names[0]] +- if v != nil && typesinternal.IsPackageLevel(v) { +- sym.WriteString("init") +- name = "package initializer" // success +- } +- } +- } +- +- if name != "" { +- cmd := command.NewAssemblyCommand( +- fmt.Sprintf("Browse %s assembly for %s", view.GOARCH(), name), +- view.ID(), +- string(req.pkg.Metadata().ID), +- sym.String()) +- req.addCommandAction(cmd, false) +- break +- } +- } +- return nil +-} +- +-// toggleCompilerOptDetails produces "{Show,Hide} compiler optimization details" code action. +-// See [server.commandHandler.GCDetails] for command implementation. +-func toggleCompilerOptDetails(ctx context.Context, req *codeActionsRequest) error { +- // TODO(adonovan): errors from code action providers should probably be +- // logged, even if they aren't visible to the client; see https://go.dev/issue/71275. +- if meta, err := req.snapshot.NarrowestMetadataForFile(ctx, req.fh.URI()); err == nil { +- if len(meta.CompiledGoFiles) == 0 { +- return fmt.Errorf("package %q does not compile file %q", meta.ID, req.fh.URI()) +- } +- dir := meta.CompiledGoFiles[0].Dir() +- +- title := fmt.Sprintf("%s compiler optimization details for %q", +- cond(req.snapshot.WantCompilerOptDetails(dir), "Hide", "Show"), +- dir.Base()) +- cmd := command.NewGCDetailsCommand(title, req.fh.URI()) +- req.addCommandAction(cmd, false) +- } +- return nil +-} +diff -urN a/gopls/internal/golang/code_lens.go b/gopls/internal/golang/code_lens.go +--- a/gopls/internal/golang/code_lens.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/code_lens.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,206 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "go/ast" +- "go/token" +- "go/types" +- "regexp" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/settings" +-) +- +-// CodeLensSources returns the supported sources of code lenses for Go files. +-func CodeLensSources() map[settings.CodeLensSource]cache.CodeLensSourceFunc { +- return map[settings.CodeLensSource]cache.CodeLensSourceFunc{ +- settings.CodeLensGenerate: goGenerateCodeLens, // commands: Generate +- settings.CodeLensTest: runTestCodeLens, // commands: Test +- settings.CodeLensRegenerateCgo: regenerateCgoLens, // commands: RegenerateCgo +- } +-} +- +-var ( +- testRe = regexp.MustCompile(`^Test([^a-z]|$)`) // TestFoo or Test but not Testable +- benchmarkRe = regexp.MustCompile(`^Benchmark([^a-z]|$)`) +-) +- +-func runTestCodeLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { +- var codeLens []protocol.CodeLens +- +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- testFuncs, benchFuncs, err := testsAndBenchmarks(pkg.TypesInfo(), pgf) +- if err != nil { +- return nil, err +- } +- puri := fh.URI() +- for _, fn := range testFuncs { +- cmd := command.NewRunTestsCommand("run test", command.RunTestsArgs{ +- URI: puri, +- Tests: []string{fn.name}, +- }) +- rng := protocol.Range{Start: fn.rng.Start, End: fn.rng.Start} +- codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: cmd}) +- } +- +- for _, fn := range benchFuncs { +- cmd := command.NewRunTestsCommand("run benchmark", command.RunTestsArgs{ +- URI: puri, +- Benchmarks: []string{fn.name}, +- }) +- rng := protocol.Range{Start: fn.rng.Start, End: fn.rng.Start} +- codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: cmd}) +- } +- +- if len(benchFuncs) > 0 { +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, err +- } +- // add a code lens to the top of the file which runs all benchmarks in the file +- rng, err := pgf.PosRange(pgf.File.Package, pgf.File.Package) +- if err != nil { +- return nil, err +- } +- var benches []string +- for _, fn := range benchFuncs { +- benches = append(benches, fn.name) +- } +- cmd := command.NewRunTestsCommand("run file benchmarks", command.RunTestsArgs{ +- URI: puri, +- Benchmarks: benches, +- }) +- codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: cmd}) +- } +- return codeLens, nil +-} +- +-type testFunc struct { +- name string +- rng protocol.Range // of *ast.FuncDecl +-} +- +-// testsAndBenchmarks returns all Test and Benchmark functions in the +-// specified file. +-func testsAndBenchmarks(info *types.Info, pgf *parsego.File) (tests, benchmarks []testFunc, _ error) { +- if !strings.HasSuffix(pgf.URI.Path(), "_test.go") { +- return nil, nil, nil // empty +- } +- +- for _, d := range pgf.File.Decls { +- fn, ok := d.(*ast.FuncDecl) +- if !ok { +- continue +- } +- +- rng, err := pgf.NodeRange(fn) +- if err != nil { +- return nil, nil, err +- } +- +- if matchTestFunc(fn, info, testRe, "T") { +- tests = append(tests, testFunc{fn.Name.Name, rng}) +- } else if matchTestFunc(fn, info, benchmarkRe, "B") { +- benchmarks = append(benchmarks, testFunc{fn.Name.Name, rng}) +- } +- } +- return +-} +- +-func matchTestFunc(fn *ast.FuncDecl, info *types.Info, nameRe *regexp.Regexp, paramID string) bool { +- // Make sure that the function name matches a test function. +- if !nameRe.MatchString(fn.Name.Name) { +- return false +- } +- obj, ok := info.ObjectOf(fn.Name).(*types.Func) +- if !ok { +- return false +- } +- sig := obj.Signature() +- // Test functions should have only one parameter. +- if sig.Params().Len() != 1 { +- return false +- } +- +- // Check the type of the only parameter +- // (We don't Unalias or use typesinternal.ReceiverNamed +- // in the two checks below because "go test" can't see +- // through aliases when enumerating Test* functions; +- // it's syntactic.) +- paramTyp, ok := sig.Params().At(0).Type().(*types.Pointer) +- if !ok { +- return false +- } +- named, ok := paramTyp.Elem().(*types.Named) +- if !ok { +- return false +- } +- namedObj := named.Obj() +- if namedObj.Pkg().Path() != "testing" { +- return false +- } +- return namedObj.Id() == paramID +-} +- +-func goGenerateCodeLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, err +- } +- const ggDirective = "//go:generate" +- for _, c := range pgf.File.Comments { +- for _, l := range c.List { +- if !strings.HasPrefix(l.Text, ggDirective) { +- continue +- } +- rng, err := pgf.PosRange(l.Pos(), l.Pos()+token.Pos(len(ggDirective))) +- if err != nil { +- return nil, err +- } +- dir := fh.URI().Dir() +- nonRecursiveCmd := command.NewGenerateCommand("run go generate", command.GenerateArgs{Dir: dir, Recursive: false}) +- recursiveCmd := command.NewGenerateCommand("run go generate ./...", command.GenerateArgs{Dir: dir, Recursive: true}) +- return []protocol.CodeLens{ +- {Range: rng, Command: recursiveCmd}, +- {Range: rng, Command: nonRecursiveCmd}, +- }, nil +- +- } +- } +- return nil, nil +-} +- +-func regenerateCgoLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, err +- } +- var c *ast.ImportSpec +- for _, imp := range pgf.File.Imports { +- if imp.Path.Value == `"C"` { +- c = imp +- } +- } +- if c == nil { +- return nil, nil +- } +- rng, err := pgf.NodeRange(c) +- if err != nil { +- return nil, err +- } +- puri := fh.URI() +- cmd := command.NewRegenerateCgoCommand("regenerate cgo definitions", command.URIArg{URI: puri}) +- return []protocol.CodeLens{{Range: rng, Command: cmd}}, nil +-} +diff -urN a/gopls/internal/golang/comment.go b/gopls/internal/golang/comment.go +--- a/gopls/internal/golang/comment.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/comment.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,285 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "errors" +- "fmt" +- "go/ast" +- "go/doc/comment" +- "go/token" +- "go/types" +- pathpkg "path" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/astutil" +-) +- +-var errNoCommentReference = errors.New("no comment reference found") +- +-// DocCommentToMarkdown converts the text of a [doc comment] to Markdown. +-// +-// TODO(adonovan): provide a package (or file imports) as context for +-// proper rendering of doc links; see [newDocCommentParser] and golang/go#61677. +-// +-// [doc comment]: https://go.dev/doc/comment +-func DocCommentToMarkdown(text string, options *settings.Options) string { +- var parser comment.Parser +- doc := parser.Parse(text) +- +- var printer comment.Printer +- // The default produces {#Hdr-...} tags for headings. +- // vscode displays thems, which is undesirable. +- // The godoc for comment.Printer says the tags +- // avoid a security problem. +- printer.HeadingID = func(*comment.Heading) string { return "" } +- printer.DocLinkURL = func(link *comment.DocLink) string { +- msg := fmt.Sprintf("https://%s/%s", options.LinkTarget, link.ImportPath) +- if link.Name != "" { +- msg += "#" +- if link.Recv != "" { +- msg += link.Recv + "." +- } +- msg += link.Name +- } +- return msg +- } +- +- return string(printer.Markdown(doc)) +-} +- +-// docLinkDefinition finds the definition of the doc link in comments at pos. +-// If there is no reference at pos, returns errNoCommentReference. +-func docLinkDefinition(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, pos token.Pos) ([]protocol.Location, error) { +- obj, _, err := resolveDocLink(pkg, pgf, pos) +- if err != nil { +- return nil, err +- } +- loc, err := ObjectLocation(ctx, pkg.FileSet(), snapshot, obj) +- if err != nil { +- return nil, err +- } +- return []protocol.Location{loc}, nil +-} +- +-// resolveDocLink parses a doc link in a comment such as [fmt.Println] +-// and returns the symbol at pos, along with the link's range. +-func resolveDocLink(pkg *cache.Package, pgf *parsego.File, pos token.Pos) (types.Object, protocol.Range, error) { +- var comment *ast.Comment +-outer: +- for _, cg := range pgf.File.Comments { +- for _, c := range cg.List { +- if c.Pos() <= pos && pos <= c.End() { +- comment = c +- break outer +- } +- } +- } +- if comment == nil { +- return nil, protocol.Range{}, errNoCommentReference +- } +- +- // The canonical parsing algorithm is defined by go/doc/comment, but +- // unfortunately its API provides no way to reliably reconstruct the +- // position of each doc link from the parsed result. +- line := safetoken.Line(pgf.Tok, pos) +- var start, end token.Pos +- start = max(pgf.Tok.LineStart(line), comment.Pos()) +- if line < pgf.Tok.LineCount() && pgf.Tok.LineStart(line+1) < comment.End() { +- end = pgf.Tok.LineStart(line + 1) +- } else { +- end = comment.End() +- } +- +- textBytes, err := pgf.PosText(start, end) +- if err != nil { +- return nil, protocol.Range{}, err +- } +- +- text := string(textBytes) +- lineOffset := int(pos - start) +- +- for _, idx := range docLinkRegex.FindAllStringSubmatchIndex(text, -1) { +- mstart, mend := idx[2], idx[3] +- // [mstart, mend) identifies the first submatch, +- // which is the reference name in the doc link (sans '*'). +- // e.g. The "[fmt.Println]" reference name is "fmt.Println". +- if mstart <= lineOffset && lineOffset < mend { +- p := lineOffset - mstart +- name := text[mstart:mend] +- i := strings.LastIndexByte(name, '.') +- for i != -1 { +- if p > i { +- break +- } +- name = name[:i] +- i = strings.LastIndexByte(name, '.') +- } +- obj := lookupDocLinkSymbol(pkg, pgf, name) +- if obj == nil { +- return nil, protocol.Range{}, errNoCommentReference +- } +- namePos := start + token.Pos(mstart+i+1) +- rng, err := pgf.PosRange(namePos, namePos+token.Pos(len(obj.Name()))) +- if err != nil { +- return nil, protocol.Range{}, err +- } +- return obj, rng, nil // success +- } +- } +- +- return nil, protocol.Range{}, errNoCommentReference +-} +- +-// lookupDocLinkSymbol returns the symbol denoted by a doc link such +-// as "fmt.Println" or "bytes.Buffer.Write" in the specified file. +-func lookupDocLinkSymbol(pkg *cache.Package, pgf *parsego.File, name string) types.Object { +- scope := pkg.Types().Scope() +- +- prefix, suffix, _ := strings.Cut(name, ".") +- +- // Try treating the prefix as a package name, +- // allowing for non-renaming and renaming imports. +- fileScope := pkg.TypesInfo().Scopes[pgf.File] +- if fileScope == nil { +- // As we learned in golang/go#69616, any file may not be Scopes! +- // - A non-compiled Go file (such as unsafe.go) won't be in Scopes. +- // - A (technically) compiled go file with the wrong package name won't be +- // in Scopes, as it will be skipped by go/types. +- return nil +- } +- pkgname, ok := fileScope.Lookup(prefix).(*types.PkgName) // ok => prefix is imported name +- if !ok { +- // Handle renaming import, e.g. +- // [path.Join] after import pathpkg "path". +- // (Should we look at all files of the package?) +- for _, imp := range pgf.File.Imports { +- pkgname2 := pkg.TypesInfo().PkgNameOf(imp) +- if pkgname2 != nil && pkgname2.Imported().Name() == prefix { +- pkgname = pkgname2 +- break +- } +- } +- } +- if pkgname != nil { +- scope = pkgname.Imported().Scope() +- if suffix == "" { +- return pkgname // not really a valid doc link +- } +- name = suffix +- } +- +- // TODO(adonovan): try searching the forward closure for packages +- // that define the symbol but are not directly imported; +- // see https://github.com/golang/go/issues/61677 +- +- // Field or sel? +- recv, sel, ok := strings.Cut(name, ".") +- if ok { +- obj := scope.Lookup(recv) // package scope +- if obj == nil { +- obj = types.Universe.Lookup(recv) +- } +- obj, ok := obj.(*types.TypeName) +- if !ok { +- return nil +- } +- m, _, _ := types.LookupFieldOrMethod(obj.Type(), true, obj.Pkg(), sel) +- return m +- } +- +- if obj := scope.Lookup(name); obj != nil { +- return obj // package-level symbol +- } +- return types.Universe.Lookup(name) // built-in symbol +-} +- +-// newDocCommentParser returns a function that parses [doc comments], +-// with context for Doc Links supplied by the specified package. +-// +-// Imported symbols are rendered using the import mapping for the file +-// that encloses fileNode. +-// +-// The resulting function is not concurrency safe. +-// +-// See issue #61677 for how this might be generalized to support +-// correct contextual parsing of doc comments in Hover too. +-// +-// [doc comment]: https://go.dev/doc/comment +-func newDocCommentParser(pkg *cache.Package) func(fileNode ast.Node, text string) *comment.Doc { +- var currentFilePos token.Pos // pos whose enclosing file's import mapping should be used +- parser := &comment.Parser{ +- LookupPackage: func(name string) (importPath string, ok bool) { +- for _, f := range pkg.Syntax() { +- // Different files in the same package have +- // different import mappings. Use the provided +- // syntax node to find the correct file. +- if astutil.NodeContains(f, currentFilePos) { +- // First try each actual imported package name. +- for _, imp := range f.Imports { +- pkgName := pkg.TypesInfo().PkgNameOf(imp) +- if pkgName != nil && pkgName.Name() == name { +- return pkgName.Imported().Path(), true +- } +- } +- +- // Then try each imported package's declared name, +- // as some packages are typically imported under a +- // non-default name (e.g. pathpkg "path") but +- // may be referred to in doc links using their +- // canonical name. +- for _, imp := range f.Imports { +- pkgName := pkg.TypesInfo().PkgNameOf(imp) +- if pkgName != nil && pkgName.Imported().Name() == name { +- return pkgName.Imported().Path(), true +- } +- } +- +- // Finally try matching the last segment of each import +- // path imported by any file in the package, as the +- // doc comment may appear in a different file from the +- // import. +- // +- // Ideally we would look up the DepsByPkgPath value +- // (a PackageID) in the metadata graph and use the +- // package's declared name instead of this heuristic, +- // but we don't have access to the graph here. +- for path := range pkg.Metadata().DepsByPkgPath { +- if pathpkg.Base(trimVersionSuffix(string(path))) == name { +- return string(path), true +- } +- } +- +- break +- } +- } +- return "", false +- }, +- LookupSym: func(recv, name string) (ok bool) { +- // package-level decl? +- if recv == "" { +- return pkg.Types().Scope().Lookup(name) != nil +- } +- +- // method? +- tname, ok := pkg.Types().Scope().Lookup(recv).(*types.TypeName) +- if !ok { +- return false +- } +- m, _, _ := types.LookupFieldOrMethod(tname.Type(), true, pkg.Types(), name) +- return is[*types.Func](m) +- }, +- } +- return func(fileNode ast.Node, text string) *comment.Doc { +- currentFilePos = fileNode.Pos() +- return parser.Parse(text) +- } +-} +diff -urN a/gopls/internal/golang/compileropt.go b/gopls/internal/golang/compileropt.go +--- a/gopls/internal/golang/compileropt.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/compileropt.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,229 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "bytes" +- "context" +- "encoding/json" +- "fmt" +- "os" +- "path/filepath" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/internal/event" +-) +- +-// CompilerOptDetails invokes the Go compiler with the "-json=0,dir" +-// flag on the packages and tests in the specified directory, parses +-// its log of optimization decisions, and returns them as a set of +-// diagnostics. +-func CompilerOptDetails(ctx context.Context, snapshot *cache.Snapshot, pkgDir protocol.DocumentURI) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { +- outDir, err := os.MkdirTemp("", fmt.Sprintf("gopls-%d.details", os.Getpid())) +- if err != nil { +- return nil, err +- } +- defer func() { +- if err := os.RemoveAll(outDir); err != nil { +- event.Error(ctx, "cleaning details dir", err) +- } +- }() +- +- outDirURI := protocol.URIFromPath(outDir) +- // details doesn't handle Windows URIs in the form of "file:///C:/...", +- // so rewrite them to "file://C:/...". See golang/go#41614. +- if !strings.HasPrefix(outDir, "/") { +- outDirURI = protocol.DocumentURI(strings.Replace(string(outDirURI), "file:///", "file://", 1)) +- } +- +- // We use "go test -c" not "go build" as it covers all three packages +- // (p, "p [p.test]", "p_test [p.test]") in the directory, if they exist. +- // (See also assembly.go.) +- inv, cleanupInvocation, err := snapshot.GoCommandInvocation(cache.NoNetwork, pkgDir.Path(), "test", []string{ +- "-c", +- "-vet=off", // weirdly -c doesn't disable vet +- fmt.Sprintf("-gcflags=-json=0,%s", outDirURI), // JSON schema version 0 +- fmt.Sprintf("-o=%s", os.DevNull), +- ".", +- }) +- if err != nil { +- return nil, err +- } +- defer cleanupInvocation() +- _, err = snapshot.View().GoCommandRunner().Run(ctx, *inv) +- if err != nil { +- return nil, err +- } +- files, err := findJSONFiles(outDir) +- if err != nil { +- return nil, err +- } +- reports := make(map[protocol.DocumentURI][]*cache.Diagnostic) +- var parseError error +- for _, fn := range files { +- uri, diagnostics, err := parseDetailsFile(fn, snapshot.Options()) +- if err != nil { +- // expect errors for all the files, save 1 +- parseError = err +- } +- fh := snapshot.FindFile(uri) +- if fh == nil { +- continue +- } +- if pkgDir != fh.URI().Dir() { +- // Filter compiler diagnostics to the requested directory. +- // https://github.com/golang/go/issues/42198 +- // sometimes the detail diagnostics generated for files +- // outside the package can never be taken back. +- continue +- } +- reports[fh.URI()] = diagnostics +- } +- return reports, parseError +-} +- +-// parseDetailsFile parses the file written by the Go compiler which contains a JSON-encoded protocol.Diagnostic. +-func parseDetailsFile(filename string, options *settings.Options) (protocol.DocumentURI, []*cache.Diagnostic, error) { +- buf, err := os.ReadFile(filename) +- if err != nil { +- return "", nil, err +- } +- var ( +- uri protocol.DocumentURI +- i int +- diagnostics []*cache.Diagnostic +- ) +- type metadata struct { +- File string `json:"file,omitempty"` +- } +- for dec := json.NewDecoder(bytes.NewReader(buf)); dec.More(); { +- // The first element always contains metadata. +- if i == 0 { +- i++ +- m := new(metadata) +- if err := dec.Decode(m); err != nil { +- return "", nil, err +- } +- if !strings.HasSuffix(m.File, ".go") { +- continue // +- } +- uri = protocol.URIFromPath(m.File) +- continue +- } +- d := new(protocol.Diagnostic) +- if err := dec.Decode(d); err != nil { +- return "", nil, err +- } +- d.Tags = []protocol.DiagnosticTag{} // must be an actual slice +- msg := d.Code.(string) +- if msg != "" { +- msg = fmt.Sprintf("%s(%s)", msg, d.Message) +- } +- if !showDiagnostic(msg, d.Source, options) { +- continue +- } +- +- // zeroIndexedRange subtracts 1 from the line and +- // range, because the compiler output neglects to +- // convert from 1-based UTF-8 coordinates to 0-based UTF-16. +- // (See GOROOT/src/cmd/compile/internal/logopt/log_opts.go.) +- // TODO(rfindley): also translate UTF-8 to UTF-16. +- zeroIndexedRange := func(rng protocol.Range) protocol.Range { +- return protocol.Range{ +- Start: protocol.Position{ +- Line: rng.Start.Line - 1, +- Character: rng.Start.Character - 1, +- }, +- End: protocol.Position{ +- Line: rng.End.Line - 1, +- Character: rng.End.Character - 1, +- }, +- } +- } +- +- var related []protocol.DiagnosticRelatedInformation +- for _, ri := range d.RelatedInformation { +- related = append(related, protocol.DiagnosticRelatedInformation{ +- Location: ri.Location.URI.Location(zeroIndexedRange(ri.Location.Range)), +- Message: ri.Message, +- }) +- } +- diagnostic := &cache.Diagnostic{ +- URI: uri, +- Range: zeroIndexedRange(d.Range), +- Message: msg, +- Severity: d.Severity, +- Source: cache.CompilerOptDetailsInfo, // d.Source is always "go compiler" as of 1.16, use our own +- Tags: d.Tags, +- Related: related, +- } +- diagnostics = append(diagnostics, diagnostic) +- i++ +- } +- return uri, diagnostics, nil +-} +- +-// showDiagnostic reports whether a given diagnostic should be shown to the end +-// user, given the current options. +-func showDiagnostic(msg, source string, o *settings.Options) bool { +- if source != "go compiler" { +- return false +- } +- if o.Annotations == nil { +- return true +- } +- +- // The strings below were gathered by grepping the source of +- // cmd/compile for literal arguments in calls to logopt.LogOpt. +- // (It is not a well defined set.) +- // +- // - canInlineFunction +- // - cannotInlineCall +- // - cannotInlineFunction +- // - escape +- // - escapes +- // - isInBounds +- // - isSliceInBounds +- // - leak +- // - nilcheck +- // +- // Additional ones not handled by logic below: +- // - copy +- // - iteration-variable-to-{heap,stack} +- // - loop-modified-{range,for} +- +- switch { +- case strings.HasPrefix(msg, "canInline") || +- strings.HasPrefix(msg, "cannotInline") || +- strings.HasPrefix(msg, "inlineCall"): +- return o.Annotations[settings.Inline] +- case strings.HasPrefix(msg, "escape") || msg == "leak": +- return o.Annotations[settings.Escape] +- case strings.HasPrefix(msg, "nilcheck"): +- return o.Annotations[settings.Nil] +- case strings.HasPrefix(msg, "isInBounds") || +- strings.HasPrefix(msg, "isSliceInBounds"): +- return o.Annotations[settings.Bounds] +- } +- return false +-} +- +-func findJSONFiles(dir string) ([]string, error) { +- ans := []string{} +- f := func(path string, fi os.FileInfo, _ error) error { +- if fi.IsDir() { +- return nil +- } +- if strings.HasSuffix(path, ".json") { +- ans = append(ans, path) +- } +- return nil +- } +- err := filepath.Walk(dir, f) +- return ans, err +-} +diff -urN a/gopls/internal/golang/completion/builtin.go b/gopls/internal/golang/completion/builtin.go +--- a/gopls/internal/golang/completion/builtin.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/builtin.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,147 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "context" +- "go/ast" +- "go/types" +-) +- +-// builtinArgKind determines the expected object kind for a builtin +-// argument. It attempts to use the AST hints from builtin.go where +-// possible. +-func (c *completer) builtinArgKind(ctx context.Context, obj types.Object, call *ast.CallExpr) objKind { +- builtin, err := c.snapshot.BuiltinFile(ctx) +- if err != nil { +- return 0 +- } +- exprIdx := exprAtPos(c.pos, call.Args) +- +- builtinObj := builtin.File.Scope.Lookup(obj.Name()) +- if builtinObj == nil { +- return 0 +- } +- decl, ok := builtinObj.Decl.(*ast.FuncDecl) +- if !ok || exprIdx >= len(decl.Type.Params.List) { +- return 0 +- } +- +- switch ptyp := decl.Type.Params.List[exprIdx].Type.(type) { +- case *ast.ChanType: +- return kindChan +- case *ast.ArrayType: +- return kindSlice +- case *ast.MapType: +- return kindMap +- case *ast.Ident: +- switch ptyp.Name { +- case "Type": +- switch obj.Name() { +- case "make": +- return kindChan | kindSlice | kindMap +- case "len": +- return kindSlice | kindMap | kindArray | kindString | kindChan +- case "cap": +- return kindSlice | kindArray | kindChan +- } +- } +- } +- +- return 0 +-} +- +-// builtinArgType infers the type of an argument to a builtin +-// function. parentInf is the inferred type info for the builtin +-// call's parent node. +-func (c *completer) builtinArgType(obj types.Object, call *ast.CallExpr, parentInf candidateInference) candidateInference { +- var ( +- exprIdx = exprAtPos(c.pos, call.Args) +- +- // Propagate certain properties from our parent's inference. +- inf = candidateInference{ +- typeName: parentInf.typeName, +- modifiers: parentInf.modifiers, +- } +- ) +- +- switch obj.Name() { +- case "append": +- if exprIdx <= 0 { +- // Infer first append() arg type as apparent return type of +- // append(). +- inf.objType = parentInf.objType +- if parentInf.variadic { +- inf.objType = types.NewSlice(inf.objType) +- } +- break +- } +- +- // For non-initial append() args, infer slice type from the first +- // append() arg, or from parent context. +- if len(call.Args) > 0 { +- inf.objType = c.pkg.TypesInfo().TypeOf(call.Args[0]) +- } +- if inf.objType == nil { +- inf.objType = parentInf.objType +- } +- if inf.objType == nil { +- break +- } +- +- inf.objType = deslice(inf.objType) +- +- // Check if we are completing the variadic append() param. +- inf.variadic = exprIdx == 1 && len(call.Args) <= 2 +- +- // Penalize the first append() argument as a candidate. You +- // don't normally append a slice to itself. +- if sliceChain := objChain(c.pkg.TypesInfo(), call.Args[0]); len(sliceChain) > 0 { +- inf.penalized = append(inf.penalized, penalizedObj{objChain: sliceChain, penalty: 0.9}) +- } +- case "delete": +- if exprIdx > 0 && len(call.Args) > 0 { +- // Try to fill in expected type of map key. +- firstArgType := c.pkg.TypesInfo().TypeOf(call.Args[0]) +- if firstArgType != nil { +- if mt, ok := firstArgType.Underlying().(*types.Map); ok { +- inf.objType = mt.Key() +- } +- } +- } +- case "copy": +- var t1, t2 types.Type +- if len(call.Args) > 0 { +- t1 = c.pkg.TypesInfo().TypeOf(call.Args[0]) +- if len(call.Args) > 1 { +- t2 = c.pkg.TypesInfo().TypeOf(call.Args[1]) +- } +- } +- +- // Fill in expected type of either arg if the other is already present. +- if exprIdx == 1 && t1 != nil { +- inf.objType = t1 +- } else if exprIdx == 0 && t2 != nil { +- inf.objType = t2 +- } +- case "new": +- inf.typeName.wantTypeName = true +- if parentInf.objType != nil { +- // Expected type for "new" is the de-pointered parent type. +- if ptr, ok := parentInf.objType.Underlying().(*types.Pointer); ok { +- inf.objType = ptr.Elem() +- } +- } +- case "make": +- if exprIdx == 0 { +- inf.typeName.wantTypeName = true +- inf.objType = parentInf.objType +- } else { +- inf.objType = types.Typ[types.UntypedInt] +- } +- } +- +- return inf +-} +diff -urN a/gopls/internal/golang/completion/completion.go b/gopls/internal/golang/completion/completion.go +--- a/gopls/internal/golang/completion/completion.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/completion.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,3771 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package completion provides core functionality for code completion in Go +-// editors and tools. +-package completion +- +-import ( +- "context" +- "fmt" +- "go/ast" +- "go/constant" +- "go/parser" +- "go/printer" +- "go/scanner" +- "go/token" +- "go/types" +- "math" +- "slices" +- "sort" +- "strconv" +- "strings" +- "sync" +- "sync/atomic" +- "time" +- "unicode" +- +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/fuzzy" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/golang/completion/snippet" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- internalastutil "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/imports" +- "golang.org/x/tools/internal/stdlib" +- "golang.org/x/tools/internal/typeparams" +- "golang.org/x/tools/internal/typesinternal" +- "golang.org/x/tools/internal/versions" +-) +- +-// A CompletionItem represents a possible completion suggested by the algorithm. +-type CompletionItem struct { +- +- // Invariant: CompletionItem does not refer to syntax or types. +- +- // Label is the primary text the user sees for this completion item. +- Label string +- +- // Detail is supplemental information to present to the user. +- // This often contains the type or return type of the completion item. +- Detail string +- +- // InsertText is the text to insert if this item is selected. +- // Any of the prefix that has already been typed is not trimmed. +- // The insert text does not contain snippets. +- InsertText string +- +- Kind protocol.CompletionItemKind +- Tags []protocol.CompletionItemTag +- Deprecated bool // Deprecated, prefer Tags if available +- +- // An optional array of additional TextEdits that are applied when +- // selecting this completion. +- // +- // Additional text edits should be used to change text unrelated to the current cursor position +- // (for example adding an import statement at the top of the file if the completion item will +- // insert an unqualified type). +- AdditionalTextEdits []protocol.TextEdit +- +- // Depth is how many levels were searched to find this completion. +- // For example when completing "foo<>", "fooBar" is depth 0, and +- // "fooBar.Baz" is depth 1. +- Depth int +- +- // Score is the internal relevance score. +- // A higher score indicates that this completion item is more relevant. +- Score float64 +- +- // snippet is the LSP snippet for the completion item. The LSP +- // specification contains details about LSP snippets. For example, a +- // snippet for a function with the following signature: +- // +- // func foo(a, b, c int) +- // +- // would be: +- // +- // foo(${1:a int}, ${2: b int}, ${3: c int}) +- // +- // If Placeholders is false in the CompletionOptions, the above +- // snippet would instead be: +- // +- // foo(${1:}) +- snippet *snippet.Builder +- +- // Documentation is the documentation for the completion item. +- Documentation string +- +- // isSlice reports whether the underlying type of the object +- // from which this candidate was derived is a slice. +- // (Used to complete append() calls.) +- isSlice bool +-} +- +-// completionOptions holds completion specific configuration. +-type completionOptions struct { +- unimported bool +- documentation bool +- fullDocumentation bool +- placeholders bool +- snippets bool +- postfix bool +- matcher settings.Matcher +- budget time.Duration +- completeFunctionCalls bool +-} +- +-// Snippet is a convenience returns the snippet if available, otherwise +-// the InsertText. +-// used for an item, depending on if the callee wants placeholders or not. +-func (i *CompletionItem) Snippet() string { +- if i.snippet != nil { +- return i.snippet.String() +- } +- return i.InsertText +-} +- +-// addConversion wraps the existing completionItem in a conversion expression. +-// Only affects the receiver's InsertText and snippet fields, not the Label. +-// An empty conv argument has no effect. +-func (i *CompletionItem) addConversion(c *completer, conv conversionEdits) { +- if conv.prefix != "" { +- // If we are in a selector, add an edit to place prefix before selector. +- if sel := enclosingSelector(c.path, c.pos); sel != nil { +- edits, err := c.editText(sel.Pos(), sel.Pos(), conv.prefix) +- if err != nil { +- // safetoken failed: invalid token.Pos information in AST. +- return +- } +- i.AdditionalTextEdits = append(i.AdditionalTextEdits, edits...) +- } else { +- // If there is no selector, just stick the prefix at the start. +- i.InsertText = conv.prefix + i.InsertText +- i.snippet.PrependText(conv.prefix) +- } +- } +- +- if conv.suffix != "" { +- i.InsertText += conv.suffix +- i.snippet.WriteText(conv.suffix) +- } +-} +- +-// Scoring constants are used for weighting the relevance of different candidates. +-const ( +- // lowScore indicates an irrelevant or not useful completion item. +- lowScore float64 = 0.01 +- +- // stdScore is the base score for all completion items. +- stdScore float64 = 1.0 +- +- // highScore indicates a very relevant completion item. +- highScore float64 = 10.0 +-) +- +-// matcher matches a candidate's label against the user input. The +-// returned score reflects the quality of the match. A score of zero +-// indicates no match, and a score of one means a perfect match. +-type matcher interface { +- Score(candidateLabel string) (score float32) +-} +- +-// prefixMatcher implements case sensitive prefix matching. +-type prefixMatcher string +- +-func (pm prefixMatcher) Score(candidateLabel string) float32 { +- if strings.HasPrefix(candidateLabel, string(pm)) { +- return 1 +- } +- return -1 +-} +- +-// insensitivePrefixMatcher implements case insensitive prefix matching. +-type insensitivePrefixMatcher string +- +-func (ipm insensitivePrefixMatcher) Score(candidateLabel string) float32 { +- if strings.HasPrefix(strings.ToLower(candidateLabel), string(ipm)) { +- return 1 +- } +- return -1 +-} +- +-// completer contains the necessary information for a single completion request. +-type completer struct { +- snapshot *cache.Snapshot +- pkg *cache.Package +- qual types.Qualifier // for qualifying typed expressions +- mq golang.MetadataQualifier // for syntactic qualifying +- opts *completionOptions +- +- // completionContext contains information about the trigger for this +- // completion request. +- completionContext completionContext +- +- // fh is a handle to the file associated with this completion request. +- fh file.Handle +- +- // filename is the name of the file associated with this completion request. +- filename string +- +- // pgf is the AST of the file associated with this completion request. +- pgf *parsego.File // debugging +- +- // goversion is the version of Go in force in the file, as +- // defined by x/tools/internal/versions. Empty if unknown. +- // Since go1.22 it should always be known. +- goversion string +- +- // pos is the position at which the request was triggered. +- pos token.Pos +- +- // path is the path of AST nodes enclosing the position. +- path []ast.Node +- +- // seen is the map that ensures we do not return duplicate results. +- seen map[types.Object]bool +- +- // items is the list of completion items returned. +- items []CompletionItem +- +- // completionCallbacks is a list of callbacks to collect completions that +- // require expensive operations. This includes operations where we search +- // through the entire module cache. +- completionCallbacks []func(context.Context, *imports.Options) error +- +- // surrounding describes the identifier surrounding the position. +- surrounding *Selection +- +- // inference contains information we've inferred about ideal +- // candidates such as the candidate's type. +- inference candidateInference +- +- // enclosingFunc contains information about the function enclosing +- // the position. +- enclosingFunc *funcInfo +- +- // enclosingCompositeLiteral contains information about the composite literal +- // enclosing the position. +- enclosingCompositeLiteral *compLitInfo +- +- // deepState contains the current state of our deep completion search. +- deepState deepCompletionState +- +- // matcher matches the candidates against the surrounding prefix. +- matcher matcher +- +- // methodSetCache caches the [types.NewMethodSet] call, which is relatively +- // expensive and can be called many times for the same type while searching +- // for deep completions. +- // TODO(adonovan): use [typeutil.MethodSetCache], which exists for this purpose. +- methodSetCache map[methodSetKey]*types.MethodSet +- +- // tooNewSymbolsCache is a cache of +- // [typesinternal.TooNewStdSymbols], recording for each std +- // package which of its exported symbols are too new for +- // the version of Go in force in the completion file. +- // (The value is the minimum version in the form "go1.%d".) +- tooNewSymbolsCache map[*types.Package]map[types.Object]string +- +- // mapper converts the positions in the file from which the completion originated. +- mapper *protocol.Mapper +- +- // startTime is when we started processing this completion request. It does +- // not include any time the request spent in the queue. +- // +- // Note: in CL 503016, startTime move to *after* type checking, but it was +- // subsequently determined that it was better to keep setting it *before* +- // type checking, so that the completion budget best approximates the user +- // experience. See golang/go#62665 for more details. +- startTime time.Time +- +- // scopes contains all scopes defined by nodes in our path, +- // including nil values for nodes that don't defined a scope. It +- // also includes our package scope and the universal scope at the +- // end. +- // +- // (It is tempting to replace this with fileScope.Innermost(pos) +- // and simply follow the Scope.Parent chain, but we need to +- // preserve the pairwise association of scopes[i] and path[i] +- // because there is no way to get from the Scope to the Node.) +- scopes []*types.Scope +-} +- +-// tooNew reports whether obj is a standard library symbol that is too +-// new for the specified Go version. +-func (c *completer) tooNew(obj types.Object) bool { +- pkg := obj.Pkg() +- if pkg == nil { +- return false // unsafe.Pointer or error.Error +- } +- disallowed, ok := c.tooNewSymbolsCache[pkg] +- if !ok { +- disallowed = typesinternal.TooNewStdSymbols(pkg, c.goversion) +- c.tooNewSymbolsCache[pkg] = disallowed +- } +- return disallowed[obj] != "" +-} +- +-// funcInfo holds info about a function object. +-type funcInfo struct { +- // sig is the function declaration enclosing the position. +- sig *types.Signature +- +- // body is the function's body. +- body *ast.BlockStmt +-} +- +-type compLitInfo struct { +- // cl is the *ast.CompositeLit enclosing the position. +- cl *ast.CompositeLit +- +- // clType is the type of cl. +- clType types.Type +- +- // kv is the *ast.KeyValueExpr enclosing the position, if any. +- kv *ast.KeyValueExpr +- +- // inKey is true if we are certain the position is in the key side +- // of a key-value pair. +- inKey bool +- +- // maybeInFieldName is true if inKey is false and it is possible +- // we are completing a struct field name. For example, +- // "SomeStruct{<>}" will be inKey=false, but maybeInFieldName=true +- // because we _could_ be completing a field name. +- maybeInFieldName bool +-} +- +-type importInfo struct { +- importPath string +- name string +-} +- +-type methodSetKey struct { +- typ types.Type +- addressable bool +-} +- +-type completionContext struct { +- // triggerCharacter is the character used to trigger completion at current +- // position, if any. +- triggerCharacter string +- +- // triggerKind is information about how a completion was triggered. +- triggerKind protocol.CompletionTriggerKind +- +- // commentCompletion is true if we are completing a comment. +- commentCompletion bool +- +- // packageCompletion is true if we are completing a package name. +- packageCompletion bool +-} +- +-// A Selection represents the cursor position and surrounding identifier. +-type Selection struct { +- content string +- tokFile *token.File +- start, end, cursor token.Pos // relative to rng.TokFile +- mapper *protocol.Mapper +-} +- +-// Range returns the surrounding identifier's protocol.Range. +-func (p Selection) Range() (protocol.Range, error) { +- return p.mapper.PosRange(p.tokFile, p.start, p.end) +-} +- +-// PrefixRange returns the protocol.Range of the prefix of the selection. +-func (p Selection) PrefixRange() (protocol.Range, error) { +- return p.mapper.PosRange(p.tokFile, p.start, p.cursor) +-} +- +-func (p Selection) Prefix() string { +- return p.content[:p.cursor-p.start] +-} +- +-func (p Selection) Suffix() string { +- return p.content[p.cursor-p.start:] +-} +- +-func (c *completer) setSurrounding(ident *ast.Ident) { +- if c.surrounding != nil { +- return +- } +- if !(ident.Pos() <= c.pos && c.pos <= ident.End()) { +- return +- } +- +- c.surrounding = &Selection{ +- content: ident.Name, +- cursor: c.pos, +- // Overwrite the prefix only. +- tokFile: c.pgf.Tok, +- start: ident.Pos(), +- end: ident.End(), +- mapper: c.mapper, +- } +- +- c.setMatcherFromPrefix(c.surrounding.Prefix()) +-} +- +-func (c *completer) setMatcherFromPrefix(prefix string) { +- switch c.opts.matcher { +- case settings.Fuzzy: +- c.matcher = fuzzy.NewMatcher(prefix) +- case settings.CaseSensitive: +- c.matcher = prefixMatcher(prefix) +- default: +- c.matcher = insensitivePrefixMatcher(strings.ToLower(prefix)) +- } +-} +- +-func (c *completer) getSurrounding() *Selection { +- if c.surrounding == nil { +- c.surrounding = &Selection{ +- content: "", +- cursor: c.pos, +- tokFile: c.pgf.Tok, +- start: c.pos, +- end: c.pos, +- mapper: c.mapper, +- } +- } +- return c.surrounding +-} +- +-// candidate represents a completion candidate. +-type candidate struct { +- // obj is the types.Object to complete to. +- // TODO(adonovan): eliminate dependence on go/types throughout this struct. +- // See comment in (*completer).selector for explanation. +- obj types.Object +- +- // score is used to rank candidates. +- score float64 +- +- // name is the deep object name path, e.g. "foo.bar" +- name string +- +- // detail is additional information about this item. If not specified, +- // defaults to type string for the object. +- detail string +- +- // path holds the path from the search root (excluding the candidate +- // itself) for a deep candidate. +- path []types.Object +- +- // pathInvokeMask is a bit mask tracking whether each entry in path +- // should be formatted with "()" (i.e. whether it is a function +- // invocation). +- pathInvokeMask uint16 +- +- // mods contains modifications that should be applied to the +- // candidate when inserted. For example, "foo" may be inserted as +- // "*foo" or "foo()". +- mods []typeModKind +- +- // addressable is true if a pointer can be taken to the candidate. +- addressable bool +- +- // convertTo is a type that this candidate should be cast to. For +- // example, if convertTo is float64, "foo" should be formatted as +- // "float64(foo)". +- convertTo types.Type +- +- // imp is the import that needs to be added to this package in order +- // for this candidate to be valid. nil if no import needed. +- imp *importInfo +-} +- +-func (c candidate) hasMod(mod typeModKind) bool { +- return slices.Contains(c.mods, mod) +-} +- +-// Completion returns a list of possible candidates for completion, given a +-// a file and a position. +-// +-// The selection is computed based on the preceding identifier and can be used by +-// the client to score the quality of the completion. For instance, some clients +-// may tolerate imperfect matches as valid completion results, since users may make typos. +-func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, protoPos protocol.Position, protoContext protocol.CompletionContext) ([]CompletionItem, *Selection, error) { +- ctx, done := event.Start(ctx, "completion.Completion") +- defer done() +- +- startTime := time.Now() +- +- pkg, pgf, err := golang.NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil || !pgf.File.Package.IsValid() { +- // Invalid package declaration +- // +- // If we can't parse this file or find position for the package +- // keyword, it may be missing a package declaration. Try offering +- // suggestions for the package declaration. +- // Note that this would be the case even if the keyword 'package' is +- // present but no package name exists. +- items, surrounding, innerErr := packageClauseCompletions(ctx, snapshot, fh, protoPos) +- if innerErr != nil { +- // return the error for GetParsedFile since it's more relevant in this situation. +- return nil, nil, fmt.Errorf("getting file %s for Completion: %v (package completions: %v)", fh.URI(), err, innerErr) +- } +- return items, surrounding, nil +- } +- +- pos, err := pgf.PositionPos(protoPos) +- if err != nil { +- return nil, nil, err +- } +- // Completion is based on what precedes the cursor. +- // Find the path to the position before pos. +- path, _ := astutil.PathEnclosingInterval(pgf.File, pos-1, pos-1) +- if path == nil { +- return nil, nil, fmt.Errorf("cannot find node enclosing position") +- } +- +- info := pkg.TypesInfo() +- +- // Check if completion at this position is valid. If not, return early. +- switch n := path[0].(type) { +- case *ast.BasicLit: +- // Skip completion inside literals except for ImportSpec +- if len(path) > 1 { +- if _, ok := path[1].(*ast.ImportSpec); ok { +- break +- } +- } +- return nil, nil, nil +- case *ast.CallExpr: +- if n.Ellipsis.IsValid() && pos > n.Ellipsis && pos <= n.Ellipsis+token.Pos(len("...")) { +- // Don't offer completions inside or directly after "...". For +- // example, don't offer completions at "<>" in "foo(bar...<>"). +- return nil, nil, nil +- } +- case *ast.Ident: +- // Don't offer completions for (most) defining identifiers. +- if obj, ok := info.Defs[n]; ok { +- if v, ok := obj.(*types.Var); ok && v.IsField() && v.Embedded() { +- // Allow completion of anonymous fields, since they may reference type +- // names. +- } else if pgf.File.Name == n { +- // Allow package name completion. +- } else { +- // Check if we have special completion for this definition, such as +- // test function name completion. +- ans, sel := definition(path, obj, pgf) +- if ans != nil { +- sort.Slice(ans, func(i, j int) bool { +- return ans[i].Score > ans[j].Score +- }) +- return ans, sel, nil +- } +- +- return nil, nil, nil // No completions. +- } +- } +- } +- +- // Collect all surrounding scopes, innermost first, inserting +- // nils as needed to preserve the correspondence with path[i]. +- var scopes []*types.Scope +- for _, n := range path { +- switch node := n.(type) { +- case *ast.FuncDecl: +- n = node.Type +- case *ast.FuncLit: +- n = node.Type +- } +- scopes = append(scopes, info.Scopes[n]) +- } +- scopes = append(scopes, pkg.Types().Scope(), types.Universe) +- +- opts := snapshot.Options() +- c := &completer{ +- pkg: pkg, +- snapshot: snapshot, +- qual: typesinternal.FileQualifier(pgf.File, pkg.Types()), +- mq: golang.MetadataQualifierForFile(snapshot, pgf.File, pkg.Metadata()), +- completionContext: completionContext{ +- triggerCharacter: protoContext.TriggerCharacter, +- triggerKind: protoContext.TriggerKind, +- }, +- fh: fh, +- filename: fh.URI().Path(), +- pgf: pgf, +- goversion: versions.FileVersion(info, pgf.File), // may be "" => no version check +- path: path, +- pos: pos, +- seen: make(map[types.Object]bool), +- enclosingFunc: enclosingFunction(path, info), +- enclosingCompositeLiteral: enclosingCompositeLiteral(path, pos, info), +- deepState: deepCompletionState{ +- enabled: opts.DeepCompletion, +- }, +- opts: &completionOptions{ +- matcher: opts.Matcher, +- unimported: opts.CompleteUnimported, +- documentation: opts.CompletionDocumentation && opts.HoverKind != settings.NoDocumentation, +- fullDocumentation: opts.HoverKind == settings.FullDocumentation, +- placeholders: opts.UsePlaceholders, +- budget: opts.CompletionBudget, +- snippets: opts.InsertTextFormat == protocol.SnippetTextFormat, +- postfix: opts.ExperimentalPostfixCompletions, +- completeFunctionCalls: opts.CompleteFunctionCalls, +- }, +- // default to a matcher that always matches +- matcher: prefixMatcher(""), +- methodSetCache: make(map[methodSetKey]*types.MethodSet), +- tooNewSymbolsCache: make(map[*types.Package]map[types.Object]string), +- mapper: pgf.Mapper, +- startTime: startTime, +- scopes: scopes, +- } +- +- ctx, cancel := context.WithCancel(ctx) +- defer cancel() +- +- // Compute the deadline for this operation. Deadline is relative to the +- // search operation, not the entire completion RPC, as the work up until this +- // point depends significantly on how long it took to type-check, which in +- // turn depends on the timing of the request relative to other operations on +- // the snapshot. Including that work in the budget leads to inconsistent +- // results (and realistically, if type-checking took 200ms already, the user +- // is unlikely to be significantly more bothered by e.g. another 100ms of +- // search). +- // +- // Don't overload the context with this deadline, as we don't want to +- // conflate user cancellation (=fail the operation) with our time limit +- // (=stop searching and succeed with partial results). +- var deadline *time.Time +- if c.opts.budget > 0 { +- d := startTime.Add(c.opts.budget) +- deadline = &d +- } +- +- if surrounding := c.containingIdent(pgf.Src); surrounding != nil { +- c.setSurrounding(surrounding) +- } +- +- c.inference = expectedCandidate(ctx, c) +- +- err = c.collectCompletions(ctx) +- if err != nil { +- return nil, nil, fmt.Errorf("failed to collect completions: %v", err) +- } +- +- // Deep search collected candidates and their members for more candidates. +- c.deepSearch(ctx, 1, deadline) +- +- // At this point we have a sufficiently complete set of results, and want to +- // return as close to the completion budget as possible. Previously, we +- // avoided cancelling the context because it could result in partial results +- // for e.g. struct fields. At this point, we have a minimal valid set of +- // candidates, and so truncating due to context cancellation is acceptable. +- if c.opts.budget > 0 { +- timeoutDuration := time.Until(c.startTime.Add(c.opts.budget)) +- ctx, cancel = context.WithTimeout(ctx, timeoutDuration) +- defer cancel() +- } +- +- for _, callback := range c.completionCallbacks { +- if deadline == nil || time.Now().Before(*deadline) { +- if err := c.snapshot.RunProcessEnvFunc(ctx, callback); err != nil { +- return nil, nil, fmt.Errorf("failed to run goimports callback: %v", err) +- } +- } +- } +- +- // Search candidates populated by expensive operations like +- // unimportedMembers etc. for more completion items. +- c.deepSearch(ctx, 0, deadline) +- +- // Statement candidates offer an entire statement in certain contexts, as +- // opposed to a single object. Add statement candidates last because they +- // depend on other candidates having already been collected. +- c.addStatementCandidates() +- +- sortItems(c.items) +- return c.items, c.getSurrounding(), nil +-} +- +-// collectCompletions adds possible completion candidates to either the deep +-// search queue or completion items directly for different completion contexts. +-func (c *completer) collectCompletions(ctx context.Context) error { +- // Inside import blocks, return completions for unimported packages. +- for _, importSpec := range c.pgf.File.Imports { +- if !(importSpec.Path.Pos() <= c.pos && c.pos <= importSpec.Path.End()) { +- continue +- } +- return c.populateImportCompletions(importSpec) +- } +- +- // Inside comments, offer completions for the name of the relevant symbol. +- for _, comment := range c.pgf.File.Comments { +- if comment.Pos() < c.pos && c.pos <= comment.End() { +- c.populateCommentCompletions(comment) +- return nil +- } +- } +- +- // Struct literals are handled entirely separately. +- if wantStructFieldCompletions(c.enclosingCompositeLiteral) { +- // If we are definitely completing a struct field name, deep completions +- // don't make sense. +- if c.enclosingCompositeLiteral.inKey { +- c.deepState.enabled = false +- } +- return c.structLiteralFieldName(ctx) +- } +- +- if lt := c.wantLabelCompletion(); lt != labelNone { +- c.labels(lt) +- return nil +- } +- +- if c.emptySwitchStmt() { +- // Empty switch statements only admit "default" and "case" keywords. +- c.addKeywordItems(map[string]bool{}, highScore, CASE, DEFAULT) +- return nil +- } +- +- switch n := c.path[0].(type) { +- case *ast.Ident: +- if c.pgf.File.Name == n { +- return c.packageNameCompletions(ctx, c.fh.URI(), n) +- } else if sel, ok := c.path[1].(*ast.SelectorExpr); ok && sel.Sel == n { +- // We are in the Sel part of a selector (e.g. x.‸sel or x.sel‸). +- return c.selector(ctx, sel) +- } +- return c.lexical(ctx) +- +- case *ast.TypeAssertExpr: +- // The function name hasn't been typed yet, but the parens are there: +- // recv.‸(arg) +- // Create a fake selector expression. +- +- // The name "_" is the convention used by go/parser to represent phantom +- // selectors. +- sel := &ast.Ident{NamePos: n.X.End() + token.Pos(len(".")), Name: "_"} +- return c.selector(ctx, &ast.SelectorExpr{X: n.X, Sel: sel}) +- +- case *ast.SelectorExpr: +- // We are in the X part of a selector (x‸.sel), +- // or after the dot with a fixed/phantom Sel (x.‸_). +- return c.selector(ctx, n) +- +- case *ast.BadDecl, *ast.File: +- // At the file scope, only keywords are allowed. +- c.addKeywordCompletions() +- +- default: +- // fallback to lexical completions +- return c.lexical(ctx) +- } +- +- return nil +-} +- +-// containingIdent returns the *ast.Ident containing pos, if any. It +-// synthesizes an *ast.Ident to allow completion in the face of +-// certain syntax errors. +-func (c *completer) containingIdent(src []byte) *ast.Ident { +- // In the normal case, our leaf AST node is the identifier being completed. +- if ident, ok := c.path[0].(*ast.Ident); ok { +- return ident +- } +- +- pos, tkn, lit := c.scanToken(src) +- if !pos.IsValid() { +- return nil +- } +- +- fakeIdent := &ast.Ident{Name: lit, NamePos: pos} +- if _, isBadDecl := c.path[0].(*ast.BadDecl); isBadDecl { +- // You don't get *ast.Idents at the file level, so look for bad +- // decls and use the manually extracted token. +- return fakeIdent +- } else if c.emptySwitchStmt() { +- // Only keywords are allowed in empty switch statements. +- // *ast.Idents are not parsed, so we must use the manually +- // extracted token. +- return fakeIdent +- } else if tkn.IsKeyword() { +- // Otherwise, manually extract the prefix if our containing token +- // is a keyword. This improves completion after an "accidental +- // keyword", e.g. completing to "variance" in "someFunc(var<>)". +- return fakeIdent +- } else if block, ok := c.path[0].(*ast.BlockStmt); ok && len(block.List) != 0 { +- last := block.List[len(block.List)-1] +- // Handle incomplete AssignStmt with multiple left-hand vars: +- // var left, right int +- // left, ri‸ -> "right" +- if expr, ok := last.(*ast.ExprStmt); ok && +- (is[*ast.Ident](expr.X) || +- is[*ast.SelectorExpr](expr.X) || +- is[*ast.IndexExpr](expr.X) || +- is[*ast.StarExpr](expr.X)) { +- return fakeIdent +- } +- } +- +- return nil +-} +- +-// scanToken scans pgh's contents for the token containing pos. +-func (c *completer) scanToken(contents []byte) (token.Pos, token.Token, string) { +- tok := c.pkg.FileSet().File(c.pos) +- +- var s scanner.Scanner +- // TODO(adonovan): fix! this mutates the token.File borrowed from c.pkg, +- // calling AddLine and AddLineColumnInfo. Not sound! +- s.Init(tok, contents, nil, 0) +- for { +- tknPos, tkn, lit := s.Scan() +- if tkn == token.EOF || tknPos >= c.pos { +- return token.NoPos, token.ILLEGAL, "" +- } +- +- if len(lit) > 0 && tknPos <= c.pos && c.pos <= tknPos+token.Pos(len(lit)) { +- return tknPos, tkn, lit +- } +- } +-} +- +-func sortItems(items []CompletionItem) { +- sort.SliceStable(items, func(i, j int) bool { +- // Sort by score first. +- if items[i].Score != items[j].Score { +- return items[i].Score > items[j].Score +- } +- +- // Then sort by label so order stays consistent. This also has the +- // effect of preferring shorter candidates. +- return items[i].Label < items[j].Label +- }) +-} +- +-// emptySwitchStmt reports whether pos is in an empty switch or select +-// statement. +-func (c *completer) emptySwitchStmt() bool { +- block, ok := c.path[0].(*ast.BlockStmt) +- if !ok || len(block.List) > 0 || len(c.path) == 1 { +- return false +- } +- +- switch c.path[1].(type) { +- case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: +- return true +- default: +- return false +- } +-} +- +-// populateImportCompletions yields completions for an import path around the cursor. +-// +-// Completions are suggested at the directory depth of the given import path so +-// that we don't overwhelm the user with a large list of possibilities. As an +-// example, a completion for the prefix "golang" results in "golang.org/". +-// Completions for "golang.org/" yield its subdirectories +-// (i.e. "golang.org/x/"). The user is meant to accept completion suggestions +-// until they reach a complete import path. +-func (c *completer) populateImportCompletions(searchImport *ast.ImportSpec) error { +- if !strings.HasPrefix(searchImport.Path.Value, `"`) { +- return nil +- } +- +- // deepSearch is not valuable for import completions. +- c.deepState.enabled = false +- +- importPath := searchImport.Path.Value +- +- // Extract the text between the quotes (if any) in an import spec. +- // prefix is the part of import path before the cursor. +- prefixEnd := c.pos - searchImport.Path.Pos() +- prefix := strings.Trim(importPath[:prefixEnd], `"`) +- +- // The number of directories in the import path gives us the depth at +- // which to search. +- depth := len(strings.Split(prefix, "/")) - 1 +- +- content := importPath +- start, end := searchImport.Path.Pos(), searchImport.Path.End() +- namePrefix, nameSuffix := `"`, `"` +- // If a starting quote is present, adjust surrounding to either after the +- // cursor or after the first slash (/), except if cursor is at the starting +- // quote. Otherwise we provide a completion including the starting quote. +- if strings.HasPrefix(importPath, `"`) && c.pos > searchImport.Path.Pos() { +- content = content[1:] +- start++ +- if depth > 0 { +- // Adjust textEdit start to replacement range. For ex: if current +- // path was "golang.or/x/to<>ols/internal/", where <> is the cursor +- // position, start of the replacement range would be after +- // "golang.org/x/". +- path := strings.SplitAfter(prefix, "/") +- numChars := len(strings.Join(path[:len(path)-1], "")) +- content = content[numChars:] +- start += token.Pos(numChars) +- } +- namePrefix = "" +- } +- +- // We won't provide an ending quote if one is already present, except if +- // cursor is after the ending quote but still in import spec. This is +- // because cursor has to be in our textEdit range. +- if strings.HasSuffix(importPath, `"`) && c.pos < searchImport.Path.End() { +- end-- +- content = content[:len(content)-1] +- nameSuffix = "" +- } +- +- c.surrounding = &Selection{ +- content: content, +- cursor: c.pos, +- tokFile: c.pgf.Tok, +- start: start, +- end: end, +- mapper: c.mapper, +- } +- +- seenImports := make(map[string]struct{}) +- for _, importSpec := range c.pgf.File.Imports { +- if importSpec.Path.Value == importPath { +- continue +- } +- seenImportPath, err := strconv.Unquote(importSpec.Path.Value) +- if err != nil { +- return err +- } +- seenImports[seenImportPath] = struct{}{} +- } +- +- var mu sync.Mutex // guard c.items locally, since searchImports is called in parallel +- seen := make(map[string]struct{}) +- searchImports := func(pkg imports.ImportFix) { +- path := pkg.StmtInfo.ImportPath +- if _, ok := seenImports[path]; ok { +- return +- } +- +- // Any package path containing fewer directories than the search +- // prefix is not a match. +- pkgDirList := strings.Split(path, "/") +- if len(pkgDirList) < depth+1 { +- return +- } +- pkgToConsider := strings.Join(pkgDirList[:depth+1], "/") +- +- name := pkgDirList[depth] +- // if we're adding an opening quote to completion too, set name to full +- // package path since we'll need to overwrite that range. +- if namePrefix == `"` { +- name = pkgToConsider +- } +- +- score := pkg.Relevance +- if len(pkgDirList)-1 == depth { +- score *= highScore +- } else { +- // For incomplete package paths, add a terminal slash to indicate that the +- // user should keep triggering completions. +- name += "/" +- pkgToConsider += "/" +- } +- +- if _, ok := seen[pkgToConsider]; ok { +- return +- } +- seen[pkgToConsider] = struct{}{} +- +- mu.Lock() +- defer mu.Unlock() +- +- name = namePrefix + name + nameSuffix +- obj := types.NewPkgName(0, nil, name, types.NewPackage(pkgToConsider, name)) +- c.deepState.enqueue(candidate{ +- obj: obj, +- detail: strconv.Quote(pkgToConsider), +- score: score, +- }) +- } +- +- c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { +- if err := imports.GetImportPaths(ctx, searchImports, prefix, c.filename, c.pkg.Types().Name(), opts.Env); err != nil { +- return fmt.Errorf("getting import paths: %v", err) +- } +- return nil +- }) +- return nil +-} +- +-// populateCommentCompletions yields completions for comments preceding or in declarations. +-func (c *completer) populateCommentCompletions(comment *ast.CommentGroup) { +- // If the completion was triggered by a period, ignore it. These types of +- // completions will not be useful in comments. +- if c.completionContext.triggerCharacter == "." { +- return +- } +- +- // Using the comment position find the line after +- file := c.pkg.FileSet().File(comment.End()) +- if file == nil { +- return +- } +- +- // Deep completion doesn't work properly in comments since we don't +- // have a type object to complete further. +- c.deepState.enabled = false +- c.completionContext.commentCompletion = true +- +- // Documentation isn't useful in comments, since it might end up being the +- // comment itself. +- c.opts.documentation = false +- +- commentLine := safetoken.Line(file, comment.End()) +- +- // comment is valid, set surrounding as word boundaries around cursor +- c.setSurroundingForComment(comment) +- +- // Using the next line pos, grab and parse the exported symbol on that line +- for _, n := range c.pgf.File.Decls { +- declLine := safetoken.Line(file, n.Pos()) +- // if the comment is not in, directly above or on the same line as a declaration +- if declLine != commentLine && declLine != commentLine+1 && +- !(n.Pos() <= comment.Pos() && comment.End() <= n.End()) { +- continue +- } +- switch node := n.(type) { +- // handle const, vars, and types +- case *ast.GenDecl: +- for _, spec := range node.Specs { +- switch spec := spec.(type) { +- case *ast.ValueSpec: +- for _, name := range spec.Names { +- if name.String() == "_" { +- continue +- } +- obj := c.pkg.TypesInfo().ObjectOf(name) +- c.deepState.enqueue(candidate{obj: obj, score: stdScore}) +- } +- case *ast.TypeSpec: +- // add TypeSpec fields to completion +- switch typeNode := spec.Type.(type) { +- case *ast.StructType: +- c.addFieldItems(typeNode.Fields) +- case *ast.FuncType: +- c.addFieldItems(typeNode.Params) +- c.addFieldItems(typeNode.Results) +- case *ast.InterfaceType: +- c.addFieldItems(typeNode.Methods) +- } +- +- if spec.Name.String() == "_" { +- continue +- } +- +- obj := c.pkg.TypesInfo().ObjectOf(spec.Name) +- // Type name should get a higher score than fields but not highScore by default +- // since field near a comment cursor gets a highScore +- score := stdScore * 1.1 +- // If type declaration is on the line after comment, give it a highScore. +- if declLine == commentLine+1 { +- score = highScore +- } +- +- c.deepState.enqueue(candidate{obj: obj, score: score}) +- } +- } +- // handle functions +- case *ast.FuncDecl: +- c.addFieldItems(node.Recv) +- c.addFieldItems(node.Type.Params) +- c.addFieldItems(node.Type.Results) +- +- // collect receiver struct fields +- if node.Recv != nil { +- obj := c.pkg.TypesInfo().Defs[node.Name] +- switch obj.(type) { +- case nil: +- report := func() { +- bug.Reportf("missing def for func %s", node.Name) +- } +- // Debugging golang/go#71273. +- if !slices.Contains(c.pkg.CompiledGoFiles(), c.pgf) { +- if c.snapshot.View().Type() == cache.GoPackagesDriverView { +- report() +- } else { +- report() +- } +- } else { +- report() +- } +- continue +- case *types.Func: +- default: +- bug.Reportf("unexpected func obj type %T for %s", obj, node.Name) +- } +- sig := obj.(*types.Func).Signature() +- recv := sig.Recv() +- if recv == nil { +- continue // may be nil if ill-typed +- } +- _, named := typesinternal.ReceiverNamed(recv) +- if named != nil { +- if recvStruct, ok := named.Underlying().(*types.Struct); ok { +- for i := range recvStruct.NumFields() { +- field := recvStruct.Field(i) +- c.deepState.enqueue(candidate{obj: field, score: lowScore}) +- } +- } +- } +- } +- +- if node.Name.String() == "_" { +- continue +- } +- +- obj := c.pkg.TypesInfo().ObjectOf(node.Name) +- if obj == nil || obj.Pkg() != nil && obj.Pkg() != c.pkg.Types() { +- continue +- } +- +- c.deepState.enqueue(candidate{obj: obj, score: highScore}) +- } +- } +-} +- +-// sets word boundaries surrounding a cursor for a comment +-func (c *completer) setSurroundingForComment(comments *ast.CommentGroup) { +- var cursorComment *ast.Comment +- for _, comment := range comments.List { +- if c.pos >= comment.Pos() && c.pos <= comment.End() { +- cursorComment = comment +- break +- } +- } +- // if cursor isn't in the comment +- if cursorComment == nil { +- return +- } +- +- // index of cursor in comment text +- cursorOffset := int(c.pos - cursorComment.Pos()) +- start, end := cursorOffset, cursorOffset +- for start > 0 && isValidIdentifierChar(cursorComment.Text[start-1]) { +- start-- +- } +- for end < len(cursorComment.Text) && isValidIdentifierChar(cursorComment.Text[end]) { +- end++ +- } +- +- c.surrounding = &Selection{ +- content: cursorComment.Text[start:end], +- cursor: c.pos, +- tokFile: c.pgf.Tok, +- start: token.Pos(int(cursorComment.Slash) + start), +- end: token.Pos(int(cursorComment.Slash) + end), +- mapper: c.mapper, +- } +- c.setMatcherFromPrefix(c.surrounding.Prefix()) +-} +- +-// isValidIdentifierChar returns true if a byte is a valid go identifier +-// character, i.e. unicode letter or digit or underscore. +-func isValidIdentifierChar(char byte) bool { +- charRune := rune(char) +- return unicode.In(charRune, unicode.Letter, unicode.Digit) || char == '_' +-} +- +-// adds struct fields, interface methods, function declaration fields to completion +-func (c *completer) addFieldItems(fields *ast.FieldList) { +- // TODO: in golang/go#72828, we get here with a nil surrounding. +- // This indicates a logic bug elsewhere: we should only be interrogating the +- // surrounding if it is set. +- if fields == nil || c.surrounding == nil { +- return +- } +- +- cursor := c.surrounding.cursor +- for _, field := range fields.List { +- for _, name := range field.Names { +- if name.String() == "_" { +- continue +- } +- obj := c.pkg.TypesInfo().ObjectOf(name) +- if obj == nil { +- continue +- } +- +- // if we're in a field comment/doc, score that field as more relevant +- score := stdScore +- if field.Comment != nil && field.Comment.Pos() <= cursor && cursor <= field.Comment.End() { +- score = highScore +- } else if field.Doc != nil && field.Doc.Pos() <= cursor && cursor <= field.Doc.End() { +- score = highScore +- } +- +- c.deepState.enqueue(candidate{obj: obj, score: score}) +- } +- } +-} +- +-func wantStructFieldCompletions(enclosingCl *compLitInfo) bool { +- if enclosingCl == nil { +- return false +- } +- return is[*types.Struct](enclosingCl.clType) && (enclosingCl.inKey || enclosingCl.maybeInFieldName) +-} +- +-func (c *completer) wantTypeName() bool { +- return !c.completionContext.commentCompletion && c.inference.typeName.wantTypeName +-} +- +-// See https://golang.org/issue/36001. Unimported completions are expensive. +-const ( +- maxUnimportedPackageNames = 5 +- unimportedMemberTarget = 100 +-) +- +-// selector finds completions for the specified selector expression. +-// +-// The caller should ensure that sel.X has type information, +-// even if sel is synthetic. +-func (c *completer) selector(ctx context.Context, sel *ast.SelectorExpr) error { +- c.inference.objChain = objChain(c.pkg.TypesInfo(), sel.X) +- +- // True selector? +- if tv, ok := c.pkg.TypesInfo().Types[sel.X]; ok { +- c.methodsAndFields(tv.Type, tv.Addressable(), nil, c.deepState.enqueue) +- c.addPostfixSnippetCandidates(ctx, sel) +- return nil +- } +- +- id, ok := sel.X.(*ast.Ident) +- if !ok { +- return nil +- } +- +- // Treat sel as a qualified identifier. +- var filter func(*metadata.Package) bool +- needImport := false +- if pkgName, ok := c.pkg.TypesInfo().Uses[id].(*types.PkgName); ok { +- // Qualified identifier with import declaration. +- imp := pkgName.Imported() +- +- // Known direct dependency? Expand using type information. +- if _, ok := c.pkg.Metadata().DepsByPkgPath[golang.PackagePath(imp.Path())]; ok { +- c.packageMembers(imp, stdScore, nil, c.deepState.enqueue) +- return nil +- } +- +- // Imported declaration with missing type information. +- // Fall through to shallow completion of unimported package members. +- // Match candidate packages by path. +- filter = func(mp *metadata.Package) bool { +- return strings.TrimPrefix(string(mp.PkgPath), "vendor/") == imp.Path() +- } +- } else { +- // Qualified identifier without import declaration. +- // Match candidate packages by name. +- filter = func(mp *metadata.Package) bool { +- return string(mp.Name) == id.Name +- } +- needImport = true +- } +- +- // Search unimported packages. +- if !c.opts.unimported { +- return nil // feature disabled +- } +- +- // -- completion of symbols in unimported packages -- +- +- // use new code for unimported completions, if flag allows it +- if c.snapshot.Options().ImportsSource == settings.ImportsSourceGopls { +- // The user might have typed strings.TLower, so id.Name==strings, sel.Sel.Name == TLower, +- // but the cursor might be inside TLower, so adjust the prefix +- prefix := sel.Sel.Name +- if c.surrounding != nil { +- if c.surrounding.content != sel.Sel.Name { +- bug.Reportf("unexpected surrounding: %q != %q", c.surrounding.content, sel.Sel.Name) +- } else { +- prefix = sel.Sel.Name[:c.surrounding.cursor-c.surrounding.start] +- } +- } +- c.unimported(ctx, metadata.PackageName(id.Name), prefix) +- return nil +- +- } +- +- // The deep completion algorithm is exceedingly complex and +- // deeply coupled to the now obsolete notions that all +- // token.Pos values can be interpreted by as a single FileSet +- // belonging to the Snapshot and that all types.Object values +- // are canonicalized by a single types.Importer mapping. +- // These invariants are no longer true now that gopls uses +- // an incremental approach, parsing and type-checking each +- // package separately. +- // +- // Consequently, completion of symbols defined in packages that +- // are not currently imported by the query file cannot use the +- // deep completion machinery which is based on type information. +- // Instead it must use only syntax information from a quick +- // parse of top-level declarations (but not function bodies). +- // +- // TODO(adonovan): rewrite the deep completion machinery to +- // not assume global Pos/Object realms and then use export +- // data instead of the quick parse approach taken here. +- +- // First, we search among packages in the forward transitive +- // closure of the workspace. +- // We'll use a fast parse to extract package members +- // from those that match the name/path criterion. +- all, err := c.snapshot.AllMetadata(ctx) +- if err != nil { +- return err +- } +- known := make(map[golang.PackagePath]*metadata.Package) +- for _, mp := range all { +- if mp.Name == "main" { +- continue // not importable +- } +- if mp.IsIntermediateTestVariant() { +- continue +- } +- // The only test variant we admit is "p [p.test]" +- // when we are completing within "p_test [p.test]", +- // as in that case we would like to offer completions +- // of the test variants' additional symbols. +- if mp.ForTest != "" && c.pkg.Metadata().PkgPath != mp.ForTest+"_test" { +- continue +- } +- if !filter(mp) { +- continue +- } +- // Prefer previous entry unless this one is its test variant. +- if mp.ForTest != "" || known[mp.PkgPath] == nil { +- known[mp.PkgPath] = mp +- } +- } +- +- paths := make([]string, 0, len(known)) +- for path := range known { +- paths = append(paths, string(path)) +- } +- +- // Rank import paths as goimports would. +- var relevances map[string]float64 +- if len(paths) > 0 { +- if err := c.snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { +- var err error +- relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths) +- return err +- }); err != nil { +- return err +- } +- sort.Slice(paths, func(i, j int) bool { +- return relevances[paths[i]] > relevances[paths[j]] +- }) +- } +- +- // quickParse does a quick parse of a single file of package m, +- // extracts exported package members and adds candidates to c.items. +- // TODO(rfindley): synchronizing access to c here does not feel right. +- // Consider adding a concurrency-safe API for completer. +- var cMu sync.Mutex // guards c.items and c.matcher +- var enough int32 // atomic bool +- quickParse := func(uri protocol.DocumentURI, mp *metadata.Package, tooNew map[string]bool) error { +- if atomic.LoadInt32(&enough) != 0 { +- return nil +- } +- +- fh, err := c.snapshot.ReadFile(ctx, uri) +- if err != nil { +- return err +- } +- content, err := fh.Content() +- if err != nil { +- return err +- } +- path := string(mp.PkgPath) +- forEachPackageMember(content, func(tok token.Token, id *ast.Ident, fn *ast.FuncDecl) { +- if atomic.LoadInt32(&enough) != 0 { +- return +- } +- +- if !id.IsExported() { +- return +- } +- +- if tooNew[id.Name] { +- return // symbol too new for requesting file's Go's version +- } +- +- cMu.Lock() +- score := c.matcher.Score(id.Name) +- cMu.Unlock() +- +- if sel.Sel.Name != "_" && score == 0 { +- return // not a match; avoid constructing the completion item below +- } +- +- // The only detail is the kind and package: `var (from "example.com/foo")` +- // TODO(adonovan): pretty-print FuncDecl.FuncType or TypeSpec.Type? +- // TODO(adonovan): should this score consider the actual c.matcher.Score +- // of the item? How does this compare with the deepState.enqueue path? +- item := CompletionItem{ +- Label: id.Name, +- Detail: fmt.Sprintf("%s (from %q)", strings.ToLower(tok.String()), mp.PkgPath), +- InsertText: id.Name, +- Score: float64(score) * unimportedScore(relevances[path]), +- } +- switch tok { +- case token.FUNC: +- item.Kind = protocol.FunctionCompletion +- case token.VAR: +- item.Kind = protocol.VariableCompletion +- case token.CONST: +- item.Kind = protocol.ConstantCompletion +- case token.TYPE: +- // Without types, we can't distinguish Class from Interface. +- item.Kind = protocol.ClassCompletion +- } +- +- if needImport { +- imp := &importInfo{importPath: path} +- if imports.ImportPathToAssumedName(path) != string(mp.Name) { +- imp.name = string(mp.Name) +- } +- item.AdditionalTextEdits, _ = c.importEdits(imp) +- } +- +- // For functions, add a parameter snippet. +- if fn != nil { +- paramList := func(list *ast.FieldList) []string { +- var params []string +- if list != nil { +- var cfg printer.Config // slight overkill +- param := func(name string, typ ast.Expr) { +- var buf strings.Builder +- buf.WriteString(name) +- buf.WriteByte(' ') +- cfg.Fprint(&buf, token.NewFileSet(), typ) // ignore error +- params = append(params, buf.String()) +- } +- +- for _, field := range list.List { +- if field.Names != nil { +- for _, name := range field.Names { +- param(name.Name, field.Type) +- } +- } else { +- param("_", field.Type) +- } +- } +- } +- return params +- } +- +- // Ideally we would eliminate the suffix of type +- // parameters that are redundant with inference +- // from the argument types (#51783), but it's +- // quite fiddly to do using syntax alone. +- // (See inferableTypeParams in format.go.) +- tparams := paramList(fn.Type.TypeParams) +- params := paramList(fn.Type.Params) +- var sn snippet.Builder +- c.functionCallSnippet(id.Name, tparams, params, &sn) +- item.snippet = &sn +- } +- +- cMu.Lock() +- c.items = append(c.items, item) +- if len(c.items) >= unimportedMemberTarget { +- atomic.StoreInt32(&enough, 1) +- } +- cMu.Unlock() +- }) +- return nil +- } +- +- goversion := c.pkg.TypesInfo().FileVersions[c.pgf.File] +- +- // Extract the package-level candidates using a quick parse. +- var g errgroup.Group +- for _, path := range paths { +- mp := known[golang.PackagePath(path)] +- +- // For standard packages, build a filter of symbols that +- // are too new for the requesting file's Go version. +- var tooNew map[string]bool +- if syms, ok := stdlib.PackageSymbols[path]; ok && goversion != "" { +- tooNew = make(map[string]bool) +- for _, sym := range syms { +- if versions.Before(goversion, sym.Version.String()) { +- tooNew[sym.Name] = true +- } +- } +- } +- +- for _, uri := range mp.CompiledGoFiles { +- g.Go(func() error { +- return quickParse(uri, mp, tooNew) +- }) +- } +- } +- if err := g.Wait(); err != nil { +- return err +- } +- +- // In addition, we search in the module cache using goimports. +- ctx, cancel := context.WithCancel(ctx) +- var mu sync.Mutex +- add := func(pkgExport imports.PackageExport) { +- if ignoreUnimportedCompletion(pkgExport.Fix) { +- return +- } +- +- mu.Lock() +- defer mu.Unlock() +- // TODO(adonovan): what if the actual package has a vendor/ prefix? +- if _, ok := known[golang.PackagePath(pkgExport.Fix.StmtInfo.ImportPath)]; ok { +- return // We got this one above. +- } +- +- // Continue with untyped proposals. +- pkg := types.NewPackage(pkgExport.Fix.StmtInfo.ImportPath, pkgExport.Fix.IdentName) +- for _, symbol := range pkgExport.Exports { +- if goversion != "" && versions.Before(goversion, symbol.Version.String()) { +- continue // symbol too new for this file +- } +- score := unimportedScore(pkgExport.Fix.Relevance) +- c.deepState.enqueue(candidate{ +- obj: types.NewVar(0, pkg, symbol.Name, nil), +- score: score, +- imp: &importInfo{ +- importPath: pkgExport.Fix.StmtInfo.ImportPath, +- name: pkgExport.Fix.StmtInfo.Name, +- }, +- }) +- } +- if len(c.items) >= unimportedMemberTarget { +- cancel() +- } +- } +- +- c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { +- defer cancel() +- if err := imports.GetPackageExports(ctx, add, id.Name, c.filename, c.pkg.Types().Name(), opts.Env); err != nil { +- return fmt.Errorf("getting package exports: %v", err) +- } +- return nil +- }) +- return nil +-} +- +-// unimportedScore returns a score for an unimported package that is generally +-// lower than other candidates. +-func unimportedScore(relevance float64) float64 { +- return (stdScore + .1*relevance) / 2 +-} +- +-func (c *completer) packageMembers(pkg *types.Package, score float64, imp *importInfo, cb func(candidate)) { +- scope := pkg.Scope() +- for _, name := range scope.Names() { +- obj := scope.Lookup(name) +- if c.tooNew(obj) { +- continue // std symbol too new for file's Go version +- } +- cb(candidate{ +- obj: obj, +- score: score, +- imp: imp, +- addressable: isVar(obj), +- }) +- } +-} +- +-// ignoreUnimportedCompletion reports whether an unimported completion +-// resulting in the given import should be ignored. +-func ignoreUnimportedCompletion(fix *imports.ImportFix) bool { +- // golang/go#60062: don't add unimported completion to golang.org/toolchain. +- return fix != nil && strings.HasPrefix(fix.StmtInfo.ImportPath, "golang.org/toolchain") +-} +- +-func (c *completer) methodsAndFields(typ types.Type, addressable bool, imp *importInfo, cb func(candidate)) { +- if isStarTestingDotF(typ) { +- // is that a sufficient test? (or is more care needed?) +- if c.fuzz(typ, imp, cb) { +- return +- } +- } +- +- mset := c.methodSetCache[methodSetKey{typ, addressable}] +- if mset == nil { +- if addressable && !types.IsInterface(typ) && !isPointer(typ) { +- // Add methods of *T, which includes methods with receiver T. +- mset = types.NewMethodSet(types.NewPointer(typ)) +- } else { +- // Add methods of T. +- mset = types.NewMethodSet(typ) +- } +- c.methodSetCache[methodSetKey{typ, addressable}] = mset +- } +- +- for i := range mset.Len() { +- obj := mset.At(i).Obj() +- // to the other side of the cb() queue? +- if c.tooNew(obj) { +- continue // std method too new for file's Go version +- } +- cb(candidate{ +- obj: mset.At(i).Obj(), +- score: stdScore, +- imp: imp, +- addressable: addressable || isPointer(typ), +- }) +- } +- +- // Add fields of T. +- eachField(typ, func(v *types.Var) { +- if c.tooNew(v) { +- return // std field too new for file's Go version +- } +- cb(candidate{ +- obj: v, +- score: stdScore - 0.01, +- imp: imp, +- addressable: addressable || isPointer(typ), +- }) +- }) +-} +- +-// isStarTestingDotF reports whether typ is *testing.F. +-func isStarTestingDotF(typ types.Type) bool { +- // No Unalias, since go test doesn't consider +- // types when enumeratinf test funcs, only syntax. +- ptr, _ := typ.(*types.Pointer) +- if ptr == nil { +- return false +- } +- named, _ := ptr.Elem().(*types.Named) +- if named == nil { +- return false +- } +- obj := named.Obj() +- // obj.Pkg is nil for the error type. +- return obj != nil && obj.Pkg() != nil && obj.Pkg().Path() == "testing" && obj.Name() == "F" +-} +- +-// lexical finds completions in the lexical environment. +-func (c *completer) lexical(ctx context.Context) error { +- var ( +- builtinIota = types.Universe.Lookup("iota") +- builtinNil = types.Universe.Lookup("nil") +- +- // TODO(rfindley): only allow "comparable" where it is valid (in constraint +- // position or embedded in interface declarations). +- // builtinComparable = types.Universe.Lookup("comparable") +- ) +- +- // Track seen variables to avoid showing completions for shadowed variables. +- // This works since we look at scopes from innermost to outermost. +- seen := make(map[string]struct{}) +- +- // Process scopes innermost first. +- for i, scope := range c.scopes { +- if scope == nil { +- continue +- } +- +- Names: +- for _, name := range scope.Names() { +- declScope, obj := scope.LookupParent(name, c.pos) +- if declScope != scope { +- continue // scope of name starts after c.pos +- } +- +- // If obj's type is invalid, find the AST node that defines the lexical block +- // containing the declaration of obj. Don't resolve types for packages. +- if !isPkgName(obj) && !typeIsValid(obj.Type()) { +- // Match the scope to its ast.Node. If the scope is the package scope, +- // use the *ast.File as the starting node. +- var node ast.Node +- if i < len(c.path) { +- node = c.path[i] +- } else if i == len(c.path) { // use the *ast.File for package scope +- node = c.path[i-1] +- } +- if node != nil { +- if resolved := resolveInvalid(c.pkg.FileSet(), obj, node, c.pkg.TypesInfo()); resolved != nil { +- obj = resolved +- } +- } +- } +- +- // Don't use LHS of decl in RHS. +- for _, ident := range enclosingDeclLHS(c.path) { +- if obj.Pos() == ident.Pos() { +- continue Names +- } +- } +- +- // Don't suggest "iota" outside of const decls. +- if obj == builtinIota && !c.inConstDecl() { +- continue +- } +- +- // Rank outer scopes lower than inner. +- score := stdScore * math.Pow(.99, float64(i)) +- +- // Dowrank "nil" a bit so it is ranked below more interesting candidates. +- if obj == builtinNil { +- score /= 2 +- } +- +- // If we haven't already added a candidate for an object with this name. +- if _, ok := seen[obj.Name()]; !ok { +- seen[obj.Name()] = struct{}{} +- c.deepState.enqueue(candidate{ +- obj: obj, +- score: score, +- addressable: isVar(obj), +- }) +- } +- } +- } +- +- if c.inference.objType != nil { +- if named, ok := types.Unalias(typesinternal.Unpointer(c.inference.objType)).(*types.Named); ok { +- // If we expected a named type, check the type's package for +- // completion items. This is useful when the current file hasn't +- // imported the type's package yet. +- +- if named.Obj() != nil && named.Obj().Pkg() != nil { +- pkg := named.Obj().Pkg() +- +- // Make sure the package name isn't already in use by another +- // object, and that this file doesn't import the package yet. +- // TODO(adonovan): what if pkg.Path has vendor/ prefix? +- if _, ok := seen[pkg.Name()]; !ok && pkg != c.pkg.Types() && !alreadyImports(c.pgf.File, golang.ImportPath(pkg.Path())) { +- seen[pkg.Name()] = struct{}{} +- obj := types.NewPkgName(0, nil, pkg.Name(), pkg) +- imp := &importInfo{ +- importPath: pkg.Path(), +- } +- if imports.ImportPathToAssumedName(pkg.Path()) != pkg.Name() { +- imp.name = pkg.Name() +- } +- c.deepState.enqueue(candidate{ +- obj: obj, +- score: stdScore, +- imp: imp, +- }) +- } +- } +- } +- } +- +- if c.opts.unimported { +- if err := c.unimportedPackages(ctx, seen); err != nil { +- return err +- } +- } +- +- if c.inference.typeName.isTypeParam { +- // If we are completing a type param, offer each structural type. +- // This ensures we suggest "[]int" and "[]float64" for a constraint +- // with type union "[]int | []float64". +- if t, ok := c.inference.objType.(*types.Interface); ok { +- if terms, err := typeparams.InterfaceTermSet(t); err == nil { +- for _, term := range terms { +- c.injectType(ctx, term.Type()) +- } +- } +- } +- } else { +- c.injectType(ctx, c.inference.objType) +- } +- +- // Add keyword completion items appropriate in the current context. +- c.addKeywordCompletions() +- +- return nil +-} +- +-// injectType manufactures candidates based on the given type. This is +-// intended for types not discoverable via lexical search, such as +-// composite and/or generic types. For example, if the type is "[]int", +-// this method makes sure you get candidates "[]int{}" and "[]int" +-// (the latter applies when completing a type name). +-func (c *completer) injectType(ctx context.Context, t types.Type) { +- if t == nil { +- return +- } +- +- t = typesinternal.Unpointer(t) +- +- // If we have an expected type and it is _not_ a named type, handle +- // it specially. Non-named types like "[]int" will never be +- // considered via a lexical search, so we need to directly inject +- // them. Also allow generic types since lexical search does not +- // infer instantiated versions of them. +- if pnt, ok := t.(typesinternal.NamedOrAlias); !ok || pnt.TypeParams().Len() > 0 { +- // If our expected type is "[]int", this will add a literal +- // candidate of "[]int{}". +- c.literal(ctx, t, nil) +- +- if _, isBasic := t.(*types.Basic); !isBasic { +- // If we expect a non-basic type name (e.g. "[]int"), hack up +- // a named type whose name is literally "[]int". This allows +- // us to reuse our object based completion machinery. +- fakeNamedType := candidate{ +- obj: types.NewTypeName(token.NoPos, nil, types.TypeString(t, c.qual), t), +- score: stdScore, +- } +- // Make sure the type name matches before considering +- // candidate. This cuts down on useless candidates. +- if c.matchingTypeName(&fakeNamedType) { +- c.deepState.enqueue(fakeNamedType) +- } +- } +- } +-} +- +-func (c *completer) unimportedPackages(ctx context.Context, seen map[string]struct{}) error { +- var prefix string +- if c.surrounding != nil { +- prefix = c.surrounding.Prefix() +- } +- +- // Don't suggest unimported packages if we have absolutely nothing +- // to go on. +- if prefix == "" { +- return nil +- } +- +- count := 0 +- +- // Search the forward transitive closure of the workspace. +- all, err := c.snapshot.AllMetadata(ctx) +- if err != nil { +- return err +- } +- pkgNameByPath := make(map[golang.PackagePath]string) +- var paths []string // actually PackagePaths +- for _, mp := range all { +- if mp.ForTest != "" { +- continue // skip all test variants +- } +- if mp.Name == "main" { +- continue // main is non-importable +- } +- if !strings.HasPrefix(string(mp.Name), prefix) { +- continue // not a match +- } +- paths = append(paths, string(mp.PkgPath)) +- pkgNameByPath[mp.PkgPath] = string(mp.Name) +- } +- +- // Rank candidates using goimports' algorithm. +- var relevances map[string]float64 +- if len(paths) != 0 { +- if err := c.snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { +- var err error +- relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths) +- return err +- }); err != nil { +- return err +- } +- } +- sort.Slice(paths, func(i, j int) bool { +- if relevances[paths[i]] != relevances[paths[j]] { +- return relevances[paths[i]] > relevances[paths[j]] +- } +- +- // Fall back to lexical sort to keep truncated set of candidates +- // in a consistent order. +- return paths[i] < paths[j] +- }) +- +- for _, path := range paths { +- name := pkgNameByPath[golang.PackagePath(path)] +- if _, ok := seen[name]; ok { +- continue +- } +- imp := &importInfo{ +- importPath: path, +- } +- if imports.ImportPathToAssumedName(path) != name { +- imp.name = name +- } +- if count >= maxUnimportedPackageNames { +- return nil +- } +- c.deepState.enqueue(candidate{ +- // Pass an empty *types.Package to disable deep completions. +- obj: types.NewPkgName(0, nil, name, types.NewPackage(path, name)), +- score: unimportedScore(relevances[path]), +- imp: imp, +- }) +- count++ +- } +- +- var mu sync.Mutex +- add := func(pkg imports.ImportFix) { +- if ignoreUnimportedCompletion(&pkg) { +- return +- } +- mu.Lock() +- defer mu.Unlock() +- if _, ok := seen[pkg.IdentName]; ok { +- return +- } +- if _, ok := relevances[pkg.StmtInfo.ImportPath]; ok { +- return +- } +- +- if count >= maxUnimportedPackageNames { +- return +- } +- +- // Do not add the unimported packages to seen, since we can have +- // multiple packages of the same name as completion suggestions, since +- // only one will be chosen. +- obj := types.NewPkgName(0, nil, pkg.IdentName, types.NewPackage(pkg.StmtInfo.ImportPath, pkg.IdentName)) +- c.deepState.enqueue(candidate{ +- obj: obj, +- score: unimportedScore(pkg.Relevance), +- imp: &importInfo{ +- importPath: pkg.StmtInfo.ImportPath, +- name: pkg.StmtInfo.Name, +- }, +- }) +- count++ +- } +- +- c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { +- if err := imports.GetAllCandidates(ctx, add, prefix, c.filename, c.pkg.Types().Name(), opts.Env); err != nil { +- return fmt.Errorf("getting completion candidates: %v", err) +- } +- return nil +- }) +- +- return nil +-} +- +-// alreadyImports reports whether f has an import with the specified path. +-func alreadyImports(f *ast.File, path golang.ImportPath) bool { +- for _, s := range f.Imports { +- if metadata.UnquoteImportPath(s) == path { +- return true +- } +- } +- return false +-} +- +-func (c *completer) inConstDecl() bool { +- for _, n := range c.path { +- if decl, ok := n.(*ast.GenDecl); ok && decl.Tok == token.CONST { +- return true +- } +- } +- return false +-} +- +-// structLiteralFieldName finds completions for struct field names inside a struct literal. +-func (c *completer) structLiteralFieldName(ctx context.Context) error { +- clInfo := c.enclosingCompositeLiteral +- +- // Mark fields of the composite literal that have already been set, +- // except for the current field. +- addedFields := make(map[*types.Var]bool) +- for _, el := range clInfo.cl.Elts { +- if kvExpr, ok := el.(*ast.KeyValueExpr); ok { +- if clInfo.kv == kvExpr { +- continue +- } +- +- if key, ok := kvExpr.Key.(*ast.Ident); ok { +- if used, ok := c.pkg.TypesInfo().Uses[key]; ok { +- if usedVar, ok := used.(*types.Var); ok { +- addedFields[usedVar] = true +- } +- } +- } +- } +- } +- +- // Add struct fields. +- if t, ok := types.Unalias(clInfo.clType).(*types.Struct); ok { +- const deltaScore = 0.0001 +- for i := range t.NumFields() { +- field := t.Field(i) +- if !addedFields[field] { +- c.deepState.enqueue(candidate{ +- obj: field, +- score: highScore - float64(i)*deltaScore, +- }) +- } +- } +- +- // Fall through and add lexical completions if we aren't +- // certain we are in the key part of a key-value pair. +- if !clInfo.maybeInFieldName { +- return nil +- } +- } +- +- return c.lexical(ctx) +-} +- +-// enclosingCompositeLiteral returns information about the composite literal enclosing the +-// position. +-// It returns nil on failure; for example, if there is no type information for a +-// node on path. +-func enclosingCompositeLiteral(path []ast.Node, pos token.Pos, info *types.Info) *compLitInfo { +- for _, n := range path { +- switch n := n.(type) { +- case *ast.CompositeLit: +- // The enclosing node will be a composite literal if the user has just +- // opened the curly brace (e.g. &x{<>) or the completion request is triggered +- // from an already completed composite literal expression (e.g. &x{foo: 1, <>}) +- // +- // The position is not part of the composite literal unless it falls within the +- // curly braces (e.g. "foo.Foo<>Struct{}"). +- if !(n.Lbrace < pos && pos <= n.Rbrace) { +- // Keep searching since we may yet be inside a composite literal. +- // For example "Foo{B: Ba<>{}}". +- break +- } +- +- tv, ok := info.Types[n] +- if !ok { +- return nil +- } +- +- clInfo := compLitInfo{ +- cl: n, +- clType: typesinternal.Unpointer(tv.Type).Underlying(), +- } +- +- var ( +- expr ast.Expr +- hasKeys bool +- ) +- for _, el := range n.Elts { +- // Remember the expression that the position falls in, if any. +- if el.Pos() <= pos && pos <= el.End() { +- expr = el +- } +- +- if kv, ok := el.(*ast.KeyValueExpr); ok { +- hasKeys = true +- // If expr == el then we know the position falls in this expression, +- // so also record kv as the enclosing *ast.KeyValueExpr. +- if expr == el { +- clInfo.kv = kv +- break +- } +- } +- } +- +- if clInfo.kv != nil { +- // If in a *ast.KeyValueExpr, we know we are in the key if the position +- // is to the left of the colon (e.g. "Foo{F<>: V}". +- clInfo.inKey = pos <= clInfo.kv.Colon +- } else if hasKeys { +- // If we aren't in a *ast.KeyValueExpr but the composite literal has +- // other *ast.KeyValueExprs, we must be on the key side of a new +- // *ast.KeyValueExpr (e.g. "Foo{F: V, <>}"). +- clInfo.inKey = true +- } else { +- switch clInfo.clType.(type) { +- case *types.Struct: +- if len(n.Elts) == 0 { +- // If the struct literal is empty, next could be a struct field +- // name or an expression (e.g. "Foo{<>}" could become "Foo{F:}" +- // or "Foo{someVar}"). +- clInfo.maybeInFieldName = true +- } else if len(n.Elts) == 1 { +- // If there is one expression and the position is in that expression +- // and the expression is an identifier, we may be writing a field +- // name or an expression (e.g. "Foo{F<>}"). +- _, clInfo.maybeInFieldName = expr.(*ast.Ident) +- } +- case *types.Map: +- // If we aren't in a *ast.KeyValueExpr we must be adding a new key +- // to the map. +- clInfo.inKey = true +- } +- } +- +- return &clInfo +- default: +- if breaksExpectedTypeInference(n, pos) { +- return nil +- } +- } +- } +- +- return nil +-} +- +-// enclosingFunction returns the signature and body of the function +-// enclosing the given position. +-func enclosingFunction(path []ast.Node, info *types.Info) *funcInfo { +- for _, node := range path { +- switch t := node.(type) { +- case *ast.FuncDecl: +- if obj, ok := info.Defs[t.Name]; ok { +- return &funcInfo{ +- sig: obj.Type().(*types.Signature), +- body: t.Body, +- } +- } +- case *ast.FuncLit: +- if typ, ok := info.Types[t]; ok { +- if sig, _ := typ.Type.(*types.Signature); sig == nil { +- // golang/go#49397: it should not be possible, but we somehow arrived +- // here with a non-signature type, most likely due to AST mangling +- // such that node.Type is not a FuncType. +- return nil +- } +- return &funcInfo{ +- sig: typ.Type.(*types.Signature), +- body: t.Body, +- } +- } +- } +- } +- return nil +-} +- +-func expectedCompositeLiteralType(clInfo *compLitInfo, pos token.Pos) types.Type { +- switch t := clInfo.clType.(type) { +- case *types.Slice: +- if clInfo.inKey { +- return types.Typ[types.UntypedInt] +- } +- return t.Elem() +- case *types.Array: +- if clInfo.inKey { +- return types.Typ[types.UntypedInt] +- } +- return t.Elem() +- case *types.Map: +- if clInfo.inKey { +- return t.Key() +- } +- return t.Elem() +- case *types.Struct: +- // If we are completing a key (i.e. field name), there is no expected type. +- if clInfo.inKey { +- return nil +- } +- +- // If we are in a key-value pair, but not in the key, then we must be on the +- // value side. The expected type of the value will be determined from the key. +- if clInfo.kv != nil { +- if key, ok := clInfo.kv.Key.(*ast.Ident); ok { +- for i := range t.NumFields() { +- if field := t.Field(i); field.Name() == key.Name { +- return field.Type() +- } +- } +- } +- } else { +- // If we aren't in a key-value pair and aren't in the key, we must be using +- // implicit field names. +- +- // The order of the literal fields must match the order in the struct definition. +- // Find the element that the position belongs to and suggest that field's type. +- if i := exprAtPos(pos, clInfo.cl.Elts); i < t.NumFields() { +- return t.Field(i).Type() +- } +- } +- } +- return nil +-} +- +-// typeMod represents an operator that changes the expected type. +-type typeMod struct { +- mod typeModKind +- arrayLen int64 +-} +- +-type typeModKind int +- +-const ( +- dereference typeModKind = iota // pointer indirection: "*" +- reference // adds level of pointer: "&" for values, "*" for type names +- chanRead // channel read operator: "<-" +- sliceType // make a slice type: "[]" in "[]int" +- arrayType // make an array type: "[2]" in "[2]int" +- invoke // make a function call: "()" in "foo()" +- takeSlice // take slice of array: "[:]" in "foo[:]" +- takeDotDotDot // turn slice into variadic args: "..." in "foo..." +- index // index into slice/array: "[0]" in "foo[0]" +-) +- +-type objKind int +- +-const ( +- kindAny objKind = 0 +- kindArray objKind = 1 << iota +- kindSlice +- kindChan +- kindMap +- kindStruct +- kindString +- kindInt +- kindBool +- kindBytes +- kindPtr +- kindInterface +- kindFloat +- kindComplex +- kindError +- kindStringer +- kindFunc +- kindRange0Func +- kindRange1Func +- kindRange2Func +-) +- +-// penalizedObj represents an object that should be disfavored as a +-// completion candidate. +-type penalizedObj struct { +- // objChain is the full "chain", e.g. "foo.bar().baz" becomes +- // []types.Object{foo, bar, baz}. +- objChain []types.Object +- // penalty is score penalty in the range (0, 1). +- penalty float64 +-} +- +-// candidateInference holds information we have inferred about a type that can be +-// used at the current position. +-type candidateInference struct { +- // objType is the desired type of an object used at the query position. +- objType types.Type +- +- // objKind is a mask of expected kinds of types such as "map", "slice", etc. +- objKind objKind +- +- // variadic is true if we are completing the initial variadic +- // parameter. For example: +- // append([]T{}, <>) // objType=T variadic=true +- // append([]T{}, T{}, <>) // objType=T variadic=false +- variadic bool +- +- // modifiers are prefixes such as "*", "&" or "<-" that influence how +- // a candidate type relates to the expected type. +- modifiers []typeMod +- +- // convertibleTo is a type our candidate type must be convertible to. +- convertibleTo types.Type +- +- // needsExactType is true if the candidate type must be exactly the type of +- // the objType, e.g. an interface rather than it's implementors. +- // +- // This is necessary when objType is derived using reverse type inference: +- // any different (but assignable) type may lead to different type inference, +- // which may no longer be valid. +- // +- // For example, consider the following scenario: +- // +- // func f[T any](x T) []T { return []T{x} } +- // +- // var s []any = f(_) +- // +- // Reverse type inference would infer that the type at _ must be 'any', but +- // that does not mean that any object in the lexical scope is valid: the type of +- // the object must be *exactly* any, otherwise type inference will cause the +- // slice assignment to fail. +- needsExactType bool +- +- // typeName holds information about the expected type name at +- // position, if any. +- typeName typeNameInference +- +- // assignees are the types that would receive a function call's +- // results at the position. For example: +- // +- // foo := 123 +- // foo, bar := <> +- // +- // at "<>", the assignees are [int, ]. +- assignees []types.Type +- +- // variadicAssignees is true if we could be completing an inner +- // function call that fills out an outer function call's variadic +- // params. For example: +- // +- // func foo(int, ...string) {} +- // +- // foo(<>) // variadicAssignees=true +- // foo(bar<>) // variadicAssignees=true +- // foo(bar, baz<>) // variadicAssignees=false +- variadicAssignees bool +- +- // penalized holds expressions that should be disfavored as +- // candidates. For example, it tracks expressions already used in a +- // switch statement's other cases. Each expression is tracked using +- // its entire object "chain" allowing differentiation between +- // "a.foo" and "b.foo" when "a" and "b" are the same type. +- penalized []penalizedObj +- +- // objChain contains the chain of objects representing the +- // surrounding *ast.SelectorExpr. For example, if we are completing +- // "foo.bar.ba<>", objChain will contain []types.Object{foo, bar}. +- objChain []types.Object +-} +- +-// typeNameInference holds information about the expected type name at +-// position. +-type typeNameInference struct { +- // wantTypeName is true if we expect the name of a type. +- wantTypeName bool +- +- // modifiers are prefixes such as "*", "&" or "<-" that influence how +- // a candidate type relates to the expected type. +- modifiers []typeMod +- +- // assertableFrom is a type that must be assertable to our candidate type. +- assertableFrom types.Type +- +- // wantComparable is true if we want a comparable type. +- wantComparable bool +- +- // seenTypeSwitchCases tracks types that have already been used by +- // the containing type switch. +- seenTypeSwitchCases []types.Type +- +- // compLitType is true if we are completing a composite literal type +- // name, e.g "foo<>{}". +- compLitType bool +- +- // isTypeParam is true if we are completing a type instantiation parameter +- isTypeParam bool +-} +- +-// expectedCandidate returns information about the expected candidate +-// for an expression at the query position. +-func expectedCandidate(ctx context.Context, c *completer) (inf candidateInference) { +- inf.typeName = expectTypeName(c) +- +- if c.enclosingCompositeLiteral != nil { +- inf.objType = expectedCompositeLiteralType(c.enclosingCompositeLiteral, c.pos) +- } +- +-Nodes: +- for i, node := range c.path { +- switch node := node.(type) { +- case *ast.BinaryExpr: +- // Determine if query position comes from left or right of op. +- e := node.X +- if c.pos < node.OpPos { +- e = node.Y +- } +- if tv, ok := c.pkg.TypesInfo().Types[e]; ok { +- switch node.Op { +- case token.LAND, token.LOR: +- // Don't infer "bool" type for "&&" or "||". Often you want +- // to compose a boolean expression from non-boolean +- // candidates. +- default: +- inf.objType = tv.Type +- } +- break Nodes +- } +- case *ast.AssignStmt: +- objType, assignees := expectedAssignStmtTypes(c.pkg, node, c.pos) +- inf.objType = objType +- inf.assignees = assignees +- return inf +- case *ast.ValueSpec: +- inf.objType = expectedValueSpecType(c.pkg, node, c.pos) +- return +- case *ast.ReturnStmt: +- if c.enclosingFunc != nil { +- inf.objType = expectedReturnStmtType(c.enclosingFunc.sig, node, c.pos) +- } +- return inf +- case *ast.SendStmt: +- if typ := expectedSendStmtType(c.pkg, node, c.pos); typ != nil { +- inf.objType = typ +- } +- return inf +- case *ast.CallExpr: +- // Only consider CallExpr args if position falls between parens. +- if node.Lparen < c.pos && c.pos <= node.Rparen { +- // For type conversions like "int64(foo)" we can only infer our +- // desired type is convertible to int64. +- if typ := typeConversion(node, c.pkg.TypesInfo()); typ != nil { +- inf.convertibleTo = typ +- break Nodes +- } +- +- if sig, ok := c.pkg.TypesInfo().Types[node.Fun].Type.(*types.Signature); ok { +- // Out of bounds arguments get no inference completion. +- if !sig.Variadic() && exprAtPos(c.pos, node.Args) >= sig.Params().Len() { +- return inf +- } +- +- // Inference is necessary only when function results are generic. +- var free typeparams.Free +- if free.Has(sig.Results()) { +- targs := c.getTypeArgs(node) +- res := inferExpectedResultTypes(c, i) +- substs := reverseInferTypeArgs(sig, targs, res) +- inst := instantiate(sig, substs) +- if inst != nil { +- // TODO(jacobz): If partial signature instantiation becomes possible, +- // make needsExactType only true if necessary. +- // Currently, ambiguous cases always resolve to a conversion expression +- // wrapping the completion, which is occasionally superfluous. +- inf.needsExactType = true +- sig = inst +- } +- } +- +- inf = c.expectedCallParamType(inf, node, sig) +- } +- +- if funIdent, ok := node.Fun.(*ast.Ident); ok { +- obj := c.pkg.TypesInfo().ObjectOf(funIdent) +- +- if obj != nil && obj.Parent() == types.Universe { +- // Defer call to builtinArgType so we can provide it the +- // inferred type from its parent node. +- defer func() { +- inf = c.builtinArgType(obj, node, inf) +- inf.objKind = c.builtinArgKind(ctx, obj, node) +- }() +- +- // The expected type of builtin arguments like append() is +- // the expected type of the builtin call itself. For +- // example: +- // +- // var foo []int = append(<>) +- // +- // To find the expected type at <> we "skip" the append() +- // node and get the expected type one level up, which is +- // []int. +- continue Nodes +- } +- } +- +- return inf +- } +- case *ast.CaseClause: +- if swtch, ok := findSwitchStmt(c.path[i+1:], c.pos, node).(*ast.SwitchStmt); ok { +- if tv, ok := c.pkg.TypesInfo().Types[swtch.Tag]; ok { +- inf.objType = tv.Type +- +- // Record which objects have already been used in the case +- // statements so we don't suggest them again. +- for _, cc := range swtch.Body.List { +- for _, caseExpr := range cc.(*ast.CaseClause).List { +- // Don't record the expression we are currently completing. +- if caseExpr.Pos() < c.pos && c.pos <= caseExpr.End() { +- continue +- } +- +- if objs := objChain(c.pkg.TypesInfo(), caseExpr); len(objs) > 0 { +- inf.penalized = append(inf.penalized, penalizedObj{objChain: objs, penalty: 0.1}) +- } +- } +- } +- } +- } +- return inf +- case *ast.SliceExpr: +- // Make sure position falls within the brackets (e.g. "foo[a:<>]"). +- if node.Lbrack < c.pos && c.pos <= node.Rbrack { +- inf.objType = types.Typ[types.UntypedInt] +- } +- return inf +- case *ast.IndexExpr: +- // Make sure position falls within the brackets (e.g. "foo[<>]"). +- if node.Lbrack < c.pos && c.pos <= node.Rbrack { +- if tv, ok := c.pkg.TypesInfo().Types[node.X]; ok { +- switch t := tv.Type.Underlying().(type) { +- case *types.Map: +- inf.objType = t.Key() +- case *types.Slice, *types.Array: +- inf.objType = types.Typ[types.UntypedInt] +- } +- +- if ct := expectedConstraint(tv.Type, 0); ct != nil { +- inf.objType = ct +- inf.typeName.wantTypeName = true +- inf.typeName.isTypeParam = true +- if typ := c.inferExpectedTypeArg(i+1, 0); typ != nil { +- inf.objType = typ +- } +- } +- } +- } +- return inf +- case *ast.IndexListExpr: +- if node.Lbrack < c.pos && c.pos <= node.Rbrack { +- if tv, ok := c.pkg.TypesInfo().Types[node.X]; ok { +- typeParamIdx := exprAtPos(c.pos, node.Indices) +- if ct := expectedConstraint(tv.Type, typeParamIdx); ct != nil { +- inf.objType = ct +- inf.typeName.wantTypeName = true +- inf.typeName.isTypeParam = true +- if typ := c.inferExpectedTypeArg(i+1, typeParamIdx); typ != nil { +- inf.objType = typ +- } +- } +- } +- } +- return inf +- case *ast.RangeStmt: +- if internalastutil.NodeContains(node.X, c.pos) { +- inf.objKind |= kindSlice | kindArray | kindMap | kindString +- if node.Key == nil && node.Value == nil { +- inf.objKind |= kindRange0Func | kindRange1Func | kindRange2Func +- } else if node.Value == nil { +- inf.objKind |= kindChan | kindRange1Func | kindRange2Func +- } else { +- inf.objKind |= kindRange2Func +- } +- } +- return inf +- case *ast.StarExpr: +- inf.modifiers = append(inf.modifiers, typeMod{mod: dereference}) +- case *ast.UnaryExpr: +- switch node.Op { +- case token.AND: +- inf.modifiers = append(inf.modifiers, typeMod{mod: reference}) +- case token.ARROW: +- inf.modifiers = append(inf.modifiers, typeMod{mod: chanRead}) +- } +- case *ast.DeferStmt, *ast.GoStmt: +- inf.objKind |= kindFunc +- return inf +- default: +- if breaksExpectedTypeInference(node, c.pos) { +- return inf +- } +- } +- } +- +- return inf +-} +- +-// inferExpectedResultTypes takes the index of a call expression within the completion +-// path and uses its surroundings to infer the expected result tuple of the call's signature. +-// Returns the signature result tuple as a slice, or nil if reverse type inference fails. +-// +-// # For example +-// +-// func generic[T any, U any](a T, b U) (T, U) { ... } +-// +-// var x TypeA +-// var y TypeB +-// x, y := generic(, ) +-// +-// inferExpectedResultTypes can determine that the expected result type of the function is (TypeA, TypeB) +-func inferExpectedResultTypes(c *completer, callNodeIdx int) []types.Type { +- callNode, ok := c.path[callNodeIdx].(*ast.CallExpr) +- if !ok { +- bug.Reportf("inferExpectedResultTypes given callNodeIndex: %v which is not a ast.CallExpr\n", callNodeIdx) +- return nil +- } +- +- if len(c.path) <= callNodeIdx+1 { +- return nil +- } +- +- var expectedResults []types.Type +- +- // Check the parents of the call node to extract the expected result types of the call signature. +- // Currently reverse inferences are only supported with the following parent expressions, +- // however this list isn't exhaustive. +- switch node := c.path[callNodeIdx+1].(type) { +- case *ast.KeyValueExpr: +- enclosingCompositeLiteral := enclosingCompositeLiteral(c.path[callNodeIdx:], callNode.Pos(), c.pkg.TypesInfo()) +- if enclosingCompositeLiteral != nil && !wantStructFieldCompletions(enclosingCompositeLiteral) { +- expectedResults = append(expectedResults, expectedCompositeLiteralType(enclosingCompositeLiteral, callNode.Pos())) +- } +- case *ast.AssignStmt: +- objType, assignees := expectedAssignStmtTypes(c.pkg, node, c.pos) +- if len(assignees) > 0 { +- return assignees +- } else if objType != nil { +- expectedResults = append(expectedResults, objType) +- } +- case *ast.ValueSpec: +- if resultType := expectedValueSpecType(c.pkg, node, c.pos); resultType != nil { +- expectedResults = append(expectedResults, resultType) +- } +- case *ast.SendStmt: +- if resultType := expectedSendStmtType(c.pkg, node, c.pos); resultType != nil { +- expectedResults = append(expectedResults, resultType) +- } +- case *ast.ReturnStmt: +- if c.enclosingFunc == nil { +- return nil +- } +- +- // As a special case for reverse call inference in +- // +- // return foo() +- // +- // Pull the result type from the enclosing function +- if exprAtPos(c.pos, node.Results) == 0 { +- if callSig := c.pkg.TypesInfo().Types[callNode.Fun].Type.(*types.Signature); callSig != nil { +- enclosingResults := c.enclosingFunc.sig.Results() +- if callSig.Results().Len() == enclosingResults.Len() { +- expectedResults = make([]types.Type, enclosingResults.Len()) +- for i := range enclosingResults.Len() { +- expectedResults[i] = enclosingResults.At(i).Type() +- } +- return expectedResults +- } +- } +- } +- +- if resultType := expectedReturnStmtType(c.enclosingFunc.sig, node, c.pos); resultType != nil { +- expectedResults = append(expectedResults, resultType) +- } +- case *ast.CallExpr: +- // TODO(jacobz): This is a difficult case because the normal CallExpr candidateInference +- // leans on control flow which is inaccessible in this helper function. +- // It would probably take a significant refactor to a recursive solution to make this case +- // work cleanly. For now it's unimplemented. +- } +- return expectedResults +-} +- +-// expectedSendStmtType return the expected type at the position. +-// Returns nil if unknown. +-func expectedSendStmtType(pkg *cache.Package, node *ast.SendStmt, pos token.Pos) types.Type { +- // Make sure we are on right side of arrow (e.g. "foo <- <>"). +- if pos > node.Arrow+1 { +- if tv, ok := pkg.TypesInfo().Types[node.Chan]; ok { +- if ch, ok := tv.Type.Underlying().(*types.Chan); ok { +- return ch.Elem() +- } +- } +- } +- return nil +-} +- +-// expectedValueSpecType returns the expected type of a ValueSpec at the query +-// position. +-func expectedValueSpecType(pkg *cache.Package, node *ast.ValueSpec, pos token.Pos) types.Type { +- if node.Type != nil && pos > node.Type.End() { +- return pkg.TypesInfo().TypeOf(node.Type) +- } +- return nil +-} +- +-// expectedAssignStmtTypes analyzes the provided assignStmt, and checks +-// to see if the provided pos is within a RHS expression. If so, it report +-// the expected type of that expression, and the LHS type(s) to which it +-// is being assigned. +-func expectedAssignStmtTypes(pkg *cache.Package, node *ast.AssignStmt, pos token.Pos) (objType types.Type, assignees []types.Type) { +- // Only rank completions if you are on the right side of the token. +- if pos > node.TokPos { +- i := exprAtPos(pos, node.Rhs) +- if i >= len(node.Lhs) { +- i = len(node.Lhs) - 1 +- } +- if tv, ok := pkg.TypesInfo().Types[node.Lhs[i]]; ok { +- objType = tv.Type +- } +- +- // If we have a single expression on the RHS, record the LHS +- // assignees so we can favor multi-return function calls with +- // matching result values. +- if len(node.Rhs) <= 1 { +- for _, lhs := range node.Lhs { +- assignees = append(assignees, pkg.TypesInfo().TypeOf(lhs)) +- } +- } else { +- // Otherwise, record our single assignee, even if its type is +- // not available. We use this info to downrank functions +- // with the wrong number of result values. +- assignees = append(assignees, pkg.TypesInfo().TypeOf(node.Lhs[i])) +- } +- } +- return objType, assignees +-} +- +-// expectedReturnStmtType returns the expected type of a return statement. +-// Returns nil if enclosingSig is nil. +-func expectedReturnStmtType(enclosingSig *types.Signature, node *ast.ReturnStmt, pos token.Pos) types.Type { +- if enclosingSig != nil { +- if resultIdx := exprAtPos(pos, node.Results); resultIdx < enclosingSig.Results().Len() { +- return enclosingSig.Results().At(resultIdx).Type() +- } +- } +- return nil +-} +- +-// Returns the number of type arguments in a callExpr +-func (c *completer) getTypeArgs(callExpr *ast.CallExpr) []types.Type { +- var targs []types.Type +- switch fun := callExpr.Fun.(type) { +- case *ast.IndexListExpr: +- for i := range fun.Indices { +- if typ, ok := c.pkg.TypesInfo().Types[fun.Indices[i]]; ok && typeIsValid(typ.Type) { +- targs = append(targs, typ.Type) +- } +- } +- case *ast.IndexExpr: +- if typ, ok := c.pkg.TypesInfo().Types[fun.Index]; ok && typeIsValid(typ.Type) { +- targs = []types.Type{typ.Type} +- } +- } +- return targs +-} +- +-// reverseInferTypeArgs takes a generic signature, a list of passed type arguments, and the expected concrete return types +-// inferred from the signature's call site. If possible, it returns a list of types that could be used as the type arguments +-// to the signature. If not possible, it returns nil. +-// +-// Does not panic if any of the arguments are nil. +-func reverseInferTypeArgs(sig *types.Signature, typeArgs []types.Type, expectedResults []types.Type) []types.Type { +- if len(expectedResults) == 0 || sig == nil || sig.TypeParams().Len() == 0 || sig.Results().Len() != len(expectedResults) { +- return nil +- } +- +- tparams := make([]*types.TypeParam, sig.TypeParams().Len()) +- for i := range sig.TypeParams().Len() { +- tparams[i] = sig.TypeParams().At(i) +- } +- +- for i := len(typeArgs); i < sig.TypeParams().Len(); i++ { +- typeArgs = append(typeArgs, nil) +- } +- +- u := newUnifier(tparams, typeArgs) +- for i, assignee := range expectedResults { +- // Unify does not check the constraints of the type parameters. +- // Checks must be applied after. +- if !u.unify(sig.Results().At(i).Type(), assignee, unifyModeExact) { +- return nil +- } +- } +- +- substs := make([]types.Type, sig.TypeParams().Len()) +- for i := range sig.TypeParams().Len() { +- if sub := u.handles[sig.TypeParams().At(i)]; sub != nil && *sub != nil { +- // Ensure the inferred subst is assignable to the type parameter's constraint. +- if !assignableTo(*sub, sig.TypeParams().At(i).Constraint()) { +- return nil +- } +- substs[i] = *sub +- } +- } +- return substs +-} +- +-// inferExpectedTypeArg gives a type param candidateInference based on the surroundings of its call site. +-// If successful, the inf parameter is returned with only it's objType field updated. +-// +-// callNodeIdx is the index within the completion path of the type parameter's parent call expression. +-// typeParamIdx is the index of the type parameter at the completion pos. +-func (c *completer) inferExpectedTypeArg(callNodeIdx int, typeParamIdx int) types.Type { +- if len(c.path) <= callNodeIdx { +- return nil +- } +- +- callNode, ok := c.path[callNodeIdx].(*ast.CallExpr) +- if !ok { +- return nil +- } +- sig, ok := c.pkg.TypesInfo().Types[callNode.Fun].Type.(*types.Signature) +- if !ok { +- return nil +- } +- +- // Infer the type parameters in a function call based on context +- expectedResults := inferExpectedResultTypes(c, callNodeIdx) +- if typeParamIdx < 0 || typeParamIdx >= sig.TypeParams().Len() { +- return nil +- } +- substs := reverseInferTypeArgs(sig, nil, expectedResults) +- if substs == nil || substs[typeParamIdx] == nil { +- return nil +- } +- +- return substs[typeParamIdx] +-} +- +-// Instantiates a signature with a set of type parameters. +-// Wrapper around types.Instantiate but bad arguments won't cause a panic. +-func instantiate(sig *types.Signature, substs []types.Type) *types.Signature { +- if substs == nil || sig == nil || len(substs) != sig.TypeParams().Len() { +- return nil +- } +- +- for i := range substs { +- if substs[i] == nil { +- substs[i] = sig.TypeParams().At(i) +- } +- } +- +- if inst, err := types.Instantiate(nil, sig, substs, true); err == nil { +- if inst, ok := inst.(*types.Signature); ok { +- return inst +- } +- } +- +- return nil +-} +- +-func (c *completer) expectedCallParamType(inf candidateInference, node *ast.CallExpr, sig *types.Signature) candidateInference { +- numParams := sig.Params().Len() +- if numParams == 0 { +- return inf +- } +- +- exprIdx := exprAtPos(c.pos, node.Args) +- +- // If we have one or zero arg expressions, we may be +- // completing to a function call that returns multiple +- // values, in turn getting passed in to the surrounding +- // call. Record the assignees so we can favor function +- // calls that return matching values. +- if len(node.Args) <= 1 && exprIdx == 0 { +- for i := range sig.Params().Len() { +- inf.assignees = append(inf.assignees, sig.Params().At(i).Type()) +- } +- +- // Record that we may be completing into variadic parameters. +- inf.variadicAssignees = sig.Variadic() +- } +- +- // Make sure not to run past the end of expected parameters. +- if exprIdx >= numParams { +- inf.objType = sig.Params().At(numParams - 1).Type() +- } else { +- inf.objType = sig.Params().At(exprIdx).Type() +- } +- +- if sig.Variadic() && exprIdx >= (numParams-1) { +- // If we are completing a variadic param, deslice the variadic type. +- inf.objType = deslice(inf.objType) +- // Record whether we are completing the initial variadic param. +- inf.variadic = exprIdx == numParams-1 && len(node.Args) <= numParams +- +- // Check if we can infer object kind from printf verb. +- inf.objKind |= printfArgKind(c.pkg.TypesInfo(), node, exprIdx) +- } +- +- // If our expected type is an uninstantiated generic type param, +- // swap to the constraint which will do a decent job filtering +- // candidates. +- if tp, _ := inf.objType.(*types.TypeParam); tp != nil { +- inf.objType = tp.Constraint() +- } +- +- return inf +-} +- +-func expectedConstraint(t types.Type, idx int) types.Type { +- var tp *types.TypeParamList +- if pnt, ok := t.(typesinternal.NamedOrAlias); ok { +- tp = pnt.TypeParams() +- } else if sig, _ := t.Underlying().(*types.Signature); sig != nil { +- tp = sig.TypeParams() +- } +- if tp == nil || idx >= tp.Len() { +- return nil +- } +- return tp.At(idx).Constraint() +-} +- +-// objChain decomposes e into a chain of objects if possible. For +-// example, "foo.bar().baz" will yield []types.Object{foo, bar, baz}. +-// If any part can't be turned into an object, return nil. +-func objChain(info *types.Info, e ast.Expr) []types.Object { +- var objs []types.Object +- +- for e != nil { +- switch n := e.(type) { +- case *ast.Ident: +- obj := info.ObjectOf(n) +- if obj == nil { +- return nil +- } +- objs = append(objs, obj) +- e = nil +- case *ast.SelectorExpr: +- obj := info.ObjectOf(n.Sel) +- if obj == nil { +- return nil +- } +- objs = append(objs, obj) +- e = n.X +- case *ast.CallExpr: +- if len(n.Args) > 0 { +- return nil +- } +- e = n.Fun +- default: +- return nil +- } +- } +- +- // Reverse order so the layout matches the syntactic order. +- slices.Reverse(objs) +- +- return objs +-} +- +-// applyTypeModifiers applies the list of type modifiers to a type. +-// It returns nil if the modifiers could not be applied. +-func (ci candidateInference) applyTypeModifiers(typ types.Type, addressable bool) types.Type { +- for _, mod := range ci.modifiers { +- switch mod.mod { +- case dereference: +- // For every "*" indirection operator, remove a pointer layer +- // from candidate type. +- if ptr, ok := typ.Underlying().(*types.Pointer); ok { +- typ = ptr.Elem() +- } else { +- return nil +- } +- case reference: +- // For every "&" address operator, add another pointer layer to +- // candidate type, if the candidate is addressable. +- if addressable { +- typ = types.NewPointer(typ) +- } else { +- return nil +- } +- case chanRead: +- // For every "<-" operator, remove a layer of channelness. +- if ch, ok := typ.(*types.Chan); ok { +- typ = ch.Elem() +- } else { +- return nil +- } +- } +- } +- +- return typ +-} +- +-// applyTypeNameModifiers applies the list of type modifiers to a type name. +-func (ci candidateInference) applyTypeNameModifiers(typ types.Type) types.Type { +- for _, mod := range ci.typeName.modifiers { +- switch mod.mod { +- case reference: +- typ = types.NewPointer(typ) +- case arrayType: +- typ = types.NewArray(typ, mod.arrayLen) +- case sliceType: +- typ = types.NewSlice(typ) +- } +- } +- return typ +-} +- +-// matchesVariadic returns true if we are completing a variadic +-// parameter and candType is a compatible slice type. +-func (ci candidateInference) matchesVariadic(candType types.Type) bool { +- return ci.variadic && ci.objType != nil && assignableTo(candType, types.NewSlice(ci.objType)) +-} +- +-// findSwitchStmt returns an *ast.CaseClause's corresponding *ast.SwitchStmt or +-// *ast.TypeSwitchStmt. path should start from the case clause's first ancestor. +-func findSwitchStmt(path []ast.Node, pos token.Pos, c *ast.CaseClause) ast.Stmt { +- // Make sure position falls within a "case <>:" clause. +- if exprAtPos(pos, c.List) >= len(c.List) { +- return nil +- } +- // A case clause is always nested within a block statement in a switch statement. +- if len(path) < 2 { +- return nil +- } +- if _, ok := path[0].(*ast.BlockStmt); !ok { +- return nil +- } +- switch s := path[1].(type) { +- case *ast.SwitchStmt: +- return s +- case *ast.TypeSwitchStmt: +- return s +- default: +- return nil +- } +-} +- +-// breaksExpectedTypeInference reports if an expression node's type is unrelated +-// to its child expression node types. For example, "Foo{Bar: x.Baz(<>)}" should +-// expect a function argument, not a composite literal value. +-func breaksExpectedTypeInference(n ast.Node, pos token.Pos) bool { +- switch n := n.(type) { +- case *ast.CompositeLit: +- // Doesn't break inference if pos is in type name. +- // For example: "Foo<>{Bar: 123}" +- return n.Type == nil || !internalastutil.NodeContains(n.Type, pos) +- case *ast.CallExpr: +- // Doesn't break inference if pos is in func name. +- // For example: "Foo<>(123)" +- return !internalastutil.NodeContains(n.Fun, pos) +- case *ast.FuncLit, *ast.IndexExpr, *ast.SliceExpr: +- return true +- default: +- return false +- } +-} +- +-// expectTypeName returns information about the expected type name at position. +-func expectTypeName(c *completer) typeNameInference { +- var inf typeNameInference +- +-Nodes: +- for i, p := range c.path { +- switch n := p.(type) { +- case *ast.FieldList: +- // Expect a type name if pos is in a FieldList. This applies to +- // FuncType params/results, FuncDecl receiver, StructType, and +- // InterfaceType. We don't need to worry about the field name +- // because completion bails out early if pos is in an *ast.Ident +- // that defines an object. +- inf.wantTypeName = true +- break Nodes +- case *ast.CaseClause: +- // Expect type names in type switch case clauses. +- if swtch, ok := findSwitchStmt(c.path[i+1:], c.pos, n).(*ast.TypeSwitchStmt); ok { +- // The case clause types must be assertable from the type switch parameter. +- ast.Inspect(swtch.Assign, func(n ast.Node) bool { +- if ta, ok := n.(*ast.TypeAssertExpr); ok { +- inf.assertableFrom = c.pkg.TypesInfo().TypeOf(ta.X) +- return false +- } +- return true +- }) +- inf.wantTypeName = true +- +- // Track the types that have already been used in this +- // switch's case statements so we don't recommend them. +- for _, e := range swtch.Body.List { +- for _, typeExpr := range e.(*ast.CaseClause).List { +- // Skip if type expression contains pos. We don't want to +- // count it as already used if the user is completing it. +- if typeExpr.Pos() < c.pos && c.pos <= typeExpr.End() { +- continue +- } +- +- if t := c.pkg.TypesInfo().TypeOf(typeExpr); t != nil { +- inf.seenTypeSwitchCases = append(inf.seenTypeSwitchCases, t) +- } +- } +- } +- +- break Nodes +- } +- return typeNameInference{} +- case *ast.TypeAssertExpr: +- // Expect type names in type assert expressions. +- if n.Lparen < c.pos && c.pos <= n.Rparen { +- // The type in parens must be assertable from the expression type. +- inf.assertableFrom = c.pkg.TypesInfo().TypeOf(n.X) +- inf.wantTypeName = true +- break Nodes +- } +- return typeNameInference{} +- case *ast.StarExpr: +- inf.modifiers = append(inf.modifiers, typeMod{mod: reference}) +- case *ast.CompositeLit: +- // We want a type name if position is in the "Type" part of a +- // composite literal (e.g. "Foo<>{}"). +- if n.Type != nil && n.Type.Pos() <= c.pos && c.pos <= n.Type.End() { +- inf.wantTypeName = true +- inf.compLitType = true +- +- if i < len(c.path)-1 { +- // Track preceding "&" operator. Technically it applies to +- // the composite literal and not the type name, but if +- // affects our type completion nonetheless. +- if u, ok := c.path[i+1].(*ast.UnaryExpr); ok && u.Op == token.AND { +- inf.modifiers = append(inf.modifiers, typeMod{mod: reference}) +- } +- } +- } +- break Nodes +- case *ast.ArrayType: +- // If we are inside the "Elt" part of an array type, we want a type name. +- if n.Elt.Pos() <= c.pos && c.pos <= n.Elt.End() { +- inf.wantTypeName = true +- if n.Len == nil { +- // No "Len" expression means a slice type. +- inf.modifiers = append(inf.modifiers, typeMod{mod: sliceType}) +- } else { +- // Try to get the array type using the constant value of "Len". +- tv, ok := c.pkg.TypesInfo().Types[n.Len] +- if ok && tv.Value != nil && tv.Value.Kind() == constant.Int { +- if arrayLen, ok := constant.Int64Val(tv.Value); ok { +- inf.modifiers = append(inf.modifiers, typeMod{mod: arrayType, arrayLen: arrayLen}) +- } +- } +- } +- +- // ArrayTypes can be nested, so keep going if our parent is an +- // ArrayType. +- if i < len(c.path)-1 { +- if _, ok := c.path[i+1].(*ast.ArrayType); ok { +- continue Nodes +- } +- } +- +- break Nodes +- } +- case *ast.MapType: +- inf.wantTypeName = true +- if n.Key != nil { +- inf.wantComparable = internalastutil.NodeContains(n.Key, c.pos) +- } else { +- // If the key is empty, assume we are completing the key if +- // pos is directly after the "map[". +- inf.wantComparable = c.pos == n.Pos()+token.Pos(len("map[")) +- } +- break Nodes +- case *ast.ValueSpec: +- inf.wantTypeName = n.Type != nil && internalastutil.NodeContains(n.Type, c.pos) +- break Nodes +- case *ast.TypeSpec: +- inf.wantTypeName = internalastutil.NodeContains(n.Type, c.pos) +- default: +- if breaksExpectedTypeInference(p, c.pos) { +- return typeNameInference{} +- } +- } +- } +- +- return inf +-} +- +-func (c *completer) fakeObj(T types.Type) *types.Var { +- return types.NewVar(token.NoPos, c.pkg.Types(), "", T) +-} +- +-// derivableTypes iterates types you can derive from t. For example, +-// from "foo" we might derive "&foo", and "foo()". +-func derivableTypes(t types.Type, addressable bool, f func(t types.Type, addressable bool, mod typeModKind) bool) bool { +- switch t := t.Underlying().(type) { +- case *types.Signature: +- // If t is a func type with a single result, offer the result type. +- if t.Results().Len() == 1 && f(t.Results().At(0).Type(), false, invoke) { +- return true +- } +- case *types.Array: +- if f(t.Elem(), true, index) { +- return true +- } +- // Try converting array to slice. +- if f(types.NewSlice(t.Elem()), false, takeSlice) { +- return true +- } +- case *types.Pointer: +- if f(t.Elem(), false, dereference) { +- return true +- } +- case *types.Slice: +- if f(t.Elem(), true, index) { +- return true +- } +- case *types.Map: +- if f(t.Elem(), false, index) { +- return true +- } +- case *types.Chan: +- if f(t.Elem(), false, chanRead) { +- return true +- } +- } +- +- // Check if c is addressable and a pointer to c matches our type inference. +- if addressable && f(types.NewPointer(t), false, reference) { +- return true +- } +- +- return false +-} +- +-// anyCandType reports whether f returns true for any candidate type +-// derivable from c. It searches up to three levels of type +-// modification. For example, given "foo" we could discover "***foo" +-// or "*foo()". +-func (c *candidate) anyCandType(f func(t types.Type, addressable bool) bool) bool { +- if c.obj == nil || c.obj.Type() == nil { +- return false +- } +- +- const maxDepth = 3 +- +- var searchTypes func(t types.Type, addressable bool, mods []typeModKind) bool +- searchTypes = func(t types.Type, addressable bool, mods []typeModKind) bool { +- if f(t, addressable) { +- if len(mods) > 0 { +- newMods := make([]typeModKind, len(mods)+len(c.mods)) +- copy(newMods, mods) +- copy(newMods[len(mods):], c.mods) +- c.mods = newMods +- } +- return true +- } +- +- if len(mods) == maxDepth { +- return false +- } +- +- return derivableTypes(t, addressable, func(t types.Type, addressable bool, mod typeModKind) bool { +- return searchTypes(t, addressable, append(mods, mod)) +- }) +- } +- +- return searchTypes(c.obj.Type(), c.addressable, make([]typeModKind, 0, maxDepth)) +-} +- +-// matchingCandidate reports whether cand matches our type inferences. +-// It mutates cand's score in certain cases. +-func (c *completer) matchingCandidate(cand *candidate) bool { +- if c.completionContext.commentCompletion { +- return false +- } +- +- // Bail out early if we are completing a field name in a composite literal. +- if v, ok := cand.obj.(*types.Var); ok && v.IsField() && wantStructFieldCompletions(c.enclosingCompositeLiteral) { +- return true +- } +- +- if isTypeName(cand.obj) { +- return c.matchingTypeName(cand) +- } else if c.wantTypeName() { +- // If we want a type, a non-type object never matches. +- return false +- } +- +- if c.inference.candTypeMatches(cand) { +- return true +- } +- +- candType := cand.obj.Type() +- if candType == nil { +- return false +- } +- +- if sig, ok := candType.Underlying().(*types.Signature); ok { +- if c.inference.assigneesMatch(cand, sig) { +- // Invoke the candidate if its results are multi-assignable. +- cand.mods = append(cand.mods, invoke) +- return true +- } +- } +- +- // Default to invoking *types.Func candidates. This is so function +- // completions in an empty statement (or other cases with no expected type) +- // are invoked by default. +- if isFunc(cand.obj) { +- cand.mods = append(cand.mods, invoke) +- } +- +- return false +-} +- +-// candTypeMatches reports whether cand makes a good completion +-// candidate given the candidate inference. cand's score may be +-// mutated to downrank the candidate in certain situations. +-func (ci *candidateInference) candTypeMatches(cand *candidate) bool { +- var ( +- expTypes = make([]types.Type, 0, 2) +- variadicType types.Type +- ) +- if ci.objType != nil { +- expTypes = append(expTypes, ci.objType) +- +- if ci.variadic { +- variadicType = types.NewSlice(ci.objType) +- expTypes = append(expTypes, variadicType) +- } +- } +- +- return cand.anyCandType(func(candType types.Type, addressable bool) bool { +- // Take into account any type modifiers on the expected type. +- candType = ci.applyTypeModifiers(candType, addressable) +- if candType == nil { +- return false +- } +- +- if ci.convertibleTo != nil && convertibleTo(candType, ci.convertibleTo) { +- return true +- } +- +- for _, expType := range expTypes { +- if isEmptyInterface(expType) { +- // If any type matches the expected type, fall back to other +- // considerations below. +- // +- // TODO(rfindley): can this be expressed via scoring, rather than a boolean? +- // Why is it the case that we break ties for the empty interface, but +- // not for other expected types that may be satisfied by a lot of +- // types, such as fmt.Stringer? +- continue +- } +- +- matches := ci.typeMatches(expType, candType) +- if !matches { +- // If candType doesn't otherwise match, consider if we can +- // convert candType directly to expType. +- if considerTypeConversion(candType, expType, cand.path) { +- cand.convertTo = expType +- // Give a major score penalty so we always prefer directly +- // assignable candidates, all else equal. +- cand.score *= 0.5 +- return true +- } +- +- continue +- } +- +- if expType == variadicType { +- cand.mods = append(cand.mods, takeDotDotDot) +- } +- +- // Candidate matches, but isn't exactly identical to the expected type. +- // Apply a conversion to allow it to match. +- if ci.needsExactType && !types.Identical(candType, expType) { +- cand.convertTo = expType +- // Ranks barely lower if it needs a conversion, even though it's perfectly valid. +- cand.score *= 0.95 +- } +- +- // Lower candidate score for untyped conversions. This avoids +- // ranking untyped constants above candidates with an exact type +- // match. Don't lower score of builtin constants, e.g. "true". +- if isUntyped(candType) && !types.Identical(candType, expType) && cand.obj.Parent() != types.Universe { +- // Bigger penalty for deep completions into other packages to +- // avoid random constants from other packages popping up all +- // the time. +- if len(cand.path) > 0 && isPkgName(cand.path[0]) { +- cand.score *= 0.5 +- } else { +- cand.score *= 0.75 +- } +- } +- +- return true +- } +- +- // If we don't have a specific expected type, fall back to coarser +- // object kind checks. +- if ci.objType == nil || isEmptyInterface(ci.objType) { +- // If we were able to apply type modifiers to our candidate type, +- // count that as a match. For example: +- // +- // var foo chan int +- // <-fo<> +- // +- // We were able to apply the "<-" type modifier to "foo", so "foo" +- // matches. +- if len(ci.modifiers) > 0 { +- return true +- } +- +- // If we didn't have an exact type match, check if our object kind +- // matches. +- if ci.kindMatches(candType) { +- if ci.objKind == kindFunc { +- cand.mods = append(cand.mods, invoke) +- } +- return true +- } +- } +- +- return false +- }) +-} +- +-// considerTypeConversion returns true if we should offer a completion +-// automatically converting "from" to "to". +-func considerTypeConversion(from, to types.Type, path []types.Object) bool { +- // Don't offer to convert deep completions from other packages. +- // Otherwise there are many random package level consts/vars that +- // pop up as candidates all the time. +- if len(path) > 0 && isPkgName(path[0]) { +- return false +- } +- +- if _, ok := from.(*types.TypeParam); ok { +- return false +- } +- +- if !convertibleTo(from, to) { +- return false +- } +- +- // Don't offer to convert ints to strings since that probably +- // doesn't do what the user wants. +- if isBasicKind(from, types.IsInteger) && isBasicKind(to, types.IsString) { +- return false +- } +- +- return true +-} +- +-// typeMatches reports whether an object of candType makes a good +-// completion candidate given the expected type expType. +-func (ci *candidateInference) typeMatches(expType, candType types.Type) bool { +- // Handle untyped values specially since AssignableTo gives false negatives +- // for them (see https://golang.org/issue/32146). +- if candBasic, ok := candType.Underlying().(*types.Basic); ok { +- if expBasic, ok := expType.Underlying().(*types.Basic); ok { +- // Note that the candidate and/or the expected can be untyped. +- // In "fo<> == 100" the expected type is untyped, and the +- // candidate could also be an untyped constant. +- +- // Sort by is_untyped and then by is_int to simplify below logic. +- a, b := candBasic.Info(), expBasic.Info() +- if a&types.IsUntyped == 0 || (b&types.IsInteger > 0 && b&types.IsUntyped > 0) { +- a, b = b, a +- } +- +- // If at least one is untyped... +- if a&types.IsUntyped > 0 { +- switch { +- // Untyped integers are compatible with floats. +- case a&types.IsInteger > 0 && b&types.IsFloat > 0: +- return true +- +- // Check if their constant kind (bool|int|float|complex|string) matches. +- // This doesn't take into account the constant value, so there will be some +- // false positives due to integer sign and overflow. +- case a&types.IsConstType == b&types.IsConstType: +- return true +- } +- } +- } +- } +- +- // AssignableTo covers the case where the types are equal, but also handles +- // cases like assigning a concrete type to an interface type. +- return assignableTo(candType, expType) +-} +- +-// kindMatches reports whether candType's kind matches our expected +-// kind (e.g. slice, map, etc.). +-func (ci *candidateInference) kindMatches(candType types.Type) bool { +- return ci.objKind > 0 && ci.objKind&candKind(candType) > 0 +-} +- +-// assigneesMatch reports whether an invocation of sig matches the +-// number and type of any assignees. +-func (ci *candidateInference) assigneesMatch(cand *candidate, sig *types.Signature) bool { +- if len(ci.assignees) == 0 { +- return false +- } +- +- // Uniresult functions are always usable and are handled by the +- // normal, non-assignees type matching logic. +- if sig.Results().Len() == 1 { +- return false +- } +- +- // Don't prefer completing into func(...interface{}) calls since all +- // functions would match. +- if ci.variadicAssignees && len(ci.assignees) == 1 && isEmptyInterface(deslice(ci.assignees[0])) { +- return false +- } +- +- var numberOfResultsCouldMatch bool +- if ci.variadicAssignees { +- numberOfResultsCouldMatch = sig.Results().Len() >= len(ci.assignees)-1 +- } else { +- numberOfResultsCouldMatch = sig.Results().Len() == len(ci.assignees) +- } +- +- // If our signature doesn't return the right number of values, it's +- // not a match, so downrank it. For example: +- // +- // var foo func() (int, int) +- // a, b, c := <> // downrank "foo()" since it only returns two values +- if !numberOfResultsCouldMatch { +- cand.score /= 2 +- return false +- } +- +- // If at least one assignee has a valid type, and all valid +- // assignees match the corresponding sig result value, the signature +- // is a match. +- allMatch := false +- for i := range sig.Results().Len() { +- var assignee types.Type +- +- // If we are completing into variadic parameters, deslice the +- // expected variadic type. +- if ci.variadicAssignees && i >= len(ci.assignees)-1 { +- assignee = ci.assignees[len(ci.assignees)-1] +- if elem := deslice(assignee); elem != nil { +- assignee = elem +- } +- } else { +- assignee = ci.assignees[i] +- } +- +- if assignee == nil || assignee == types.Typ[types.Invalid] { +- continue +- } +- +- allMatch = ci.typeMatches(assignee, sig.Results().At(i).Type()) +- if !allMatch { +- break +- } +- } +- return allMatch +-} +- +-func (c *completer) matchingTypeName(cand *candidate) bool { +- if !c.wantTypeName() { +- return false +- } +- +- wantExactTypeParam := c.inference.typeName.isTypeParam && +- c.inference.typeName.wantTypeName && c.inference.needsExactType +- +- typeMatches := func(candType types.Type) bool { +- // Take into account any type name modifier prefixes. +- candType = c.inference.applyTypeNameModifiers(candType) +- +- if from := c.inference.typeName.assertableFrom; from != nil { +- // Don't suggest the starting type in type assertions. For example, +- // if "foo" is an io.Writer, don't suggest "foo.(io.Writer)". +- if types.Identical(from, candType) { +- return false +- } +- +- if intf, ok := from.Underlying().(*types.Interface); ok { +- if !types.AssertableTo(intf, candType) { +- return false +- } +- } +- } +- +- // Suggest the exact type when performing reverse type inference. +- // x = Foo[<>]() +- // Where x is an interface kind, only suggest the interface type rather than its implementors +- if wantExactTypeParam && types.Identical(candType, c.inference.objType) { +- return true +- } +- +- if c.inference.typeName.wantComparable && !types.Comparable(candType) { +- return false +- } +- +- // Skip this type if it has already been used in another type +- // switch case. +- for _, seen := range c.inference.typeName.seenTypeSwitchCases { +- if types.Identical(candType, seen) { +- return false +- } +- } +- +- // We can expect a type name and have an expected type in cases like: +- // +- // var foo []int +- // foo = []i<> +- // +- // Where our expected type is "[]int", and we expect a type name. +- if c.inference.objType != nil { +- return assignableTo(candType, c.inference.objType) +- } +- +- // Default to saying any type name is a match. +- return true +- } +- +- t := cand.obj.Type() +- +- if typeMatches(t) { +- return true +- } +- +- if !types.IsInterface(t) && typeMatches(types.NewPointer(t)) { +- if c.inference.typeName.compLitType { +- // If we are completing a composite literal type as in +- // "foo<>{}", to make a pointer we must prepend "&". +- cand.mods = append(cand.mods, reference) +- } else { +- // If we are completing a normal type name such as "foo<>", to +- // make a pointer we must prepend "*". +- cand.mods = append(cand.mods, dereference) +- } +- return true +- } +- +- return false +-} +- +-var ( +- // "interface { Error() string }" (i.e. error) +- errorIntf = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) +- +- // "interface { String() string }" (i.e. fmt.Stringer) +- stringerIntf = types.NewInterfaceType([]*types.Func{ +- types.NewFunc(token.NoPos, nil, "String", types.NewSignatureType( +- nil, nil, +- nil, nil, +- types.NewTuple(types.NewParam(token.NoPos, nil, "", types.Typ[types.String])), +- false, +- )), +- }, nil).Complete() +- +- byteType = types.Universe.Lookup("byte").Type() +- +- boolType = types.Universe.Lookup("bool").Type() +-) +- +-// candKind returns the objKind of candType, if any. +-func candKind(candType types.Type) objKind { +- var kind objKind +- +- switch t := candType.Underlying().(type) { +- case *types.Array: +- kind |= kindArray +- if t.Elem() == byteType { +- kind |= kindBytes +- } +- case *types.Slice: +- kind |= kindSlice +- if t.Elem() == byteType { +- kind |= kindBytes +- } +- case *types.Chan: +- kind |= kindChan +- case *types.Map: +- kind |= kindMap +- case *types.Pointer: +- kind |= kindPtr +- +- // Some builtins handle array pointers as arrays, so just report a pointer +- // to an array as an array. +- if _, isArray := t.Elem().Underlying().(*types.Array); isArray { +- kind |= kindArray +- } +- case *types.Interface: +- kind |= kindInterface +- case *types.Basic: +- switch info := t.Info(); { +- case info&types.IsString > 0: +- kind |= kindString +- case info&types.IsInteger > 0: +- kind |= kindInt +- case info&types.IsFloat > 0: +- kind |= kindFloat +- case info&types.IsComplex > 0: +- kind |= kindComplex +- case info&types.IsBoolean > 0: +- kind |= kindBool +- } +- case *types.Signature: +- kind |= kindFunc +- +- switch rangeFuncParamCount(t) { +- case 0: +- kind |= kindRange0Func +- case 1: +- kind |= kindRange1Func +- case 2: +- kind |= kindRange2Func +- } +- } +- +- if types.Implements(candType, errorIntf) { +- kind |= kindError +- } +- +- if types.Implements(candType, stringerIntf) { +- kind |= kindStringer +- } +- +- return kind +-} +- +-// If sig looks like a range func, return param count, else return -1. +-func rangeFuncParamCount(sig *types.Signature) int { +- if sig.Results().Len() != 0 || sig.Params().Len() != 1 { +- return -1 +- } +- +- yieldSig, _ := sig.Params().At(0).Type().Underlying().(*types.Signature) +- if yieldSig == nil { +- return -1 +- } +- +- if yieldSig.Results().Len() != 1 || yieldSig.Results().At(0).Type() != boolType { +- return -1 +- } +- +- return yieldSig.Params().Len() +-} +- +-// innermostScope returns the innermost scope for c.pos. +-func (c *completer) innermostScope() *types.Scope { +- for _, s := range c.scopes { +- if s != nil { +- return s +- } +- } +- return nil +-} +- +-// isSlice reports whether the object's underlying type is a slice. +-func isSlice(obj types.Object) bool { +- if obj != nil && obj.Type() != nil { +- if _, ok := obj.Type().Underlying().(*types.Slice); ok { +- return true +- } +- } +- return false +-} +- +-// forEachPackageMember calls f(tok, id, fn) for each package-level +-// TYPE/VAR/CONST/FUNC declaration in the Go source file, based on a +-// quick partial parse. fn is non-nil only for function declarations. +-// The AST position information is garbage. +-func forEachPackageMember(content []byte, f func(tok token.Token, id *ast.Ident, fn *ast.FuncDecl)) { +- purged := internalastutil.PurgeFuncBodies(content) +- file, _ := parser.ParseFile(token.NewFileSet(), "", purged, parser.SkipObjectResolution) +- for _, decl := range file.Decls { +- switch decl := decl.(type) { +- case *ast.GenDecl: +- for _, spec := range decl.Specs { +- switch spec := spec.(type) { +- case *ast.ValueSpec: // var/const +- for _, id := range spec.Names { +- f(decl.Tok, id, nil) +- } +- case *ast.TypeSpec: +- f(decl.Tok, spec.Name, nil) +- } +- } +- case *ast.FuncDecl: +- if decl.Recv == nil { +- f(token.FUNC, decl.Name, decl) +- } +- } +- } +-} +- +-func is[T any](x any) bool { +- _, ok := x.(T) +- return ok +-} +diff -urN a/gopls/internal/golang/completion/deep_completion.go b/gopls/internal/golang/completion/deep_completion.go +--- a/gopls/internal/golang/completion/deep_completion.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/deep_completion.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,381 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "context" +- "go/types" +- "strings" +- "time" +- +- "golang.org/x/tools/gopls/internal/util/typesutil" +- "golang.org/x/tools/internal/analysisinternal" +-) +- +-// MaxDeepCompletions limits deep completion results because in most cases +-// there are too many to be useful. +-const MaxDeepCompletions = 3 +- +-// deepCompletionState stores our state as we search for deep completions. +-// "deep completion" refers to searching into objects' fields and methods to +-// find more completion candidates. +-type deepCompletionState struct { +- // enabled indicates whether deep completion is permitted. +- enabled bool +- +- // queueClosed is used to disable adding new sub-fields to search queue +- // once we're running out of our time budget. +- queueClosed bool +- +- // thisQueue holds the current breadth first search queue. +- thisQueue []candidate +- +- // nextQueue holds the next breadth first search iteration's queue. +- nextQueue []candidate +- +- // highScores tracks the highest deep candidate scores we have found +- // so far. This is used to avoid work for low scoring deep candidates. +- highScores [MaxDeepCompletions]float64 +- +- // candidateCount is the count of unique deep candidates encountered +- // so far. +- candidateCount int +-} +- +-// enqueue adds a candidate to the search queue. +-func (s *deepCompletionState) enqueue(cand candidate) { +- s.nextQueue = append(s.nextQueue, cand) +-} +- +-// scorePenalty computes a deep candidate score penalty. A candidate is +-// penalized based on depth to favor shallower candidates. We also give a +-// slight bonus to unexported objects and a slight additional penalty to +-// function objects. +-func (s *deepCompletionState) scorePenalty(cand *candidate) float64 { +- var deepPenalty float64 +- for _, dc := range cand.path { +- deepPenalty++ +- +- if !dc.Exported() { +- deepPenalty -= 0.1 +- } +- +- if _, isSig := dc.Type().Underlying().(*types.Signature); isSig { +- deepPenalty += 0.1 +- } +- } +- +- // Normalize penalty to a max depth of 10. +- return deepPenalty / 10 +-} +- +-// isHighScore returns whether score is among the top MaxDeepCompletions deep +-// candidate scores encountered so far. If so, it adds score to highScores, +-// possibly displacing an existing high score. +-func (s *deepCompletionState) isHighScore(score float64) bool { +- // Invariant: s.highScores is sorted with highest score first. Unclaimed +- // positions are trailing zeros. +- +- // If we beat an existing score then take its spot. +- for i, deepScore := range s.highScores { +- if score <= deepScore { +- continue +- } +- +- if deepScore != 0 && i != len(s.highScores)-1 { +- // If this wasn't an empty slot then we need to scooch everyone +- // down one spot. +- copy(s.highScores[i+1:], s.highScores[i:]) +- } +- s.highScores[i] = score +- return true +- } +- +- return false +-} +- +-// newPath returns path from search root for an object following a given +-// candidate. +-func (s *deepCompletionState) newPath(cand candidate, obj types.Object) []types.Object { +- path := make([]types.Object, len(cand.path)+1) +- copy(path, cand.path) +- path[len(path)-1] = obj +- +- return path +-} +- +-// deepSearch searches a candidate and its subordinate objects for completion +-// items if deep completion is enabled and adds the valid candidates to +-// completion items. +-func (c *completer) deepSearch(ctx context.Context, minDepth int, deadline *time.Time) { +- defer func() { +- // We can return early before completing the search, so be sure to +- // clear out our queues to not impact any further invocations. +- c.deepState.thisQueue = c.deepState.thisQueue[:0] +- c.deepState.nextQueue = c.deepState.nextQueue[:0] +- }() +- +- depth := 0 // current depth being processed +- // Stop reports whether we should stop the search immediately. +- stop := func() bool { +- // Context cancellation indicates that the actual completion operation was +- // cancelled, so ignore minDepth and deadline. +- select { +- case <-ctx.Done(): +- return true +- default: +- } +- // Otherwise, only stop if we've searched at least minDepth and reached the deadline. +- return depth > minDepth && deadline != nil && time.Now().After(*deadline) +- } +- +- for len(c.deepState.nextQueue) > 0 { +- depth++ +- if stop() { +- return +- } +- c.deepState.thisQueue, c.deepState.nextQueue = c.deepState.nextQueue, c.deepState.thisQueue[:0] +- +- outer: +- for _, cand := range c.deepState.thisQueue { +- obj := cand.obj +- +- if obj == nil { +- continue +- } +- +- // At the top level, dedupe by object. +- if len(cand.path) == 0 { +- if c.seen[obj] { +- continue +- } +- c.seen[obj] = true +- } +- +- // If obj is not accessible because it lives in another package and is +- // not exported, don't treat it as a completion candidate unless it's +- // a package completion candidate. +- if !c.completionContext.packageCompletion && +- obj.Pkg() != nil && obj.Pkg() != c.pkg.Types() && !obj.Exported() { +- continue +- } +- +- if cand.imp != nil && !analysisinternal.CanImport(string(c.pkg.Metadata().PkgPath), cand.imp.importPath) { +- continue // inaccessible internal package +- } +- +- // If we want a type name, don't offer non-type name candidates. +- // However, do offer package names since they can contain type names, +- // and do offer any candidate without a type since we aren't sure if it +- // is a type name or not (i.e. unimported candidate). +- if c.wantTypeName() && obj.Type() != nil && !isTypeName(obj) && !isPkgName(obj) { +- continue +- } +- +- // When searching deep, make sure we don't have a cycle in our chain. +- // We don't dedupe by object because we want to allow both "foo.Baz" +- // and "bar.Baz" even though "Baz" is represented the same types.Object +- // in both. +- for _, seenObj := range cand.path { +- if seenObj == obj { +- continue outer +- } +- } +- +- c.addCandidate(ctx, &cand) +- +- c.deepState.candidateCount++ +- if c.opts.budget > 0 && c.deepState.candidateCount%100 == 0 { +- if stop() { +- return +- } +- spent := float64(time.Since(c.startTime)) / float64(c.opts.budget) +- // If we are almost out of budgeted time, no further elements +- // should be added to the queue. This ensures remaining time is +- // used for processing current queue. +- if !c.deepState.queueClosed && spent >= 0.85 { +- c.deepState.queueClosed = true +- } +- } +- +- // if deep search is disabled, don't add any more candidates. +- if !c.deepState.enabled || c.deepState.queueClosed { +- continue +- } +- +- // Searching members for a type name doesn't make sense. +- if isTypeName(obj) { +- continue +- } +- if obj.Type() == nil { +- continue +- } +- +- // Don't search embedded fields because they were already included in their +- // parent's fields. +- if v, ok := obj.(*types.Var); ok && v.Embedded() { +- continue +- } +- +- if sig, ok := obj.Type().Underlying().(*types.Signature); ok { +- // If obj is a function that takes no arguments and returns one +- // value, keep searching across the function call. +- if sig.Params().Len() == 0 && sig.Results().Len() == 1 { +- path := c.deepState.newPath(cand, obj) +- // The result of a function call is not addressable. +- c.methodsAndFields(sig.Results().At(0).Type(), false, cand.imp, func(newCand candidate) { +- newCand.pathInvokeMask = cand.pathInvokeMask | (1 << uint64(len(cand.path))) +- newCand.path = path +- c.deepState.enqueue(newCand) +- }) +- } +- } +- +- path := c.deepState.newPath(cand, obj) +- switch obj := obj.(type) { +- case *types.PkgName: +- c.packageMembers(obj.Imported(), stdScore, cand.imp, func(newCand candidate) { +- newCand.pathInvokeMask = cand.pathInvokeMask +- newCand.path = path +- c.deepState.enqueue(newCand) +- }) +- default: +- c.methodsAndFields(obj.Type(), cand.addressable, cand.imp, func(newCand candidate) { +- newCand.pathInvokeMask = cand.pathInvokeMask +- newCand.path = path +- c.deepState.enqueue(newCand) +- }) +- } +- } +- } +-} +- +-// addCandidate adds a completion candidate to suggestions, without searching +-// its members for more candidates. +-func (c *completer) addCandidate(ctx context.Context, cand *candidate) { +- obj := cand.obj +- if c.matchingCandidate(cand) { +- cand.score *= highScore +- +- if p := c.penalty(cand); p > 0 { +- cand.score *= (1 - p) +- } +- } else if isTypeName(obj) { +- // If obj is a *types.TypeName that didn't otherwise match, check +- // if a literal object of this type makes a good candidate. +- +- // We only care about named types (i.e. don't want builtin types). +- if _, isNamed := obj.Type().(*types.Named); isNamed { +- c.literal(ctx, obj.Type(), cand.imp) +- } +- } +- +- // Lower score of method calls so we prefer fields and vars over calls. +- if cand.hasMod(invoke) { +- if sig, ok := obj.Type().Underlying().(*types.Signature); ok && sig.Recv() != nil { +- cand.score *= 0.9 +- } +- } +- +- // Prefer private objects over public ones. +- if !obj.Exported() && obj.Parent() != types.Universe { +- cand.score *= 1.1 +- } +- +- // Slight penalty for index modifier (e.g. changing "foo" to +- // "foo[]") to curb false positives. +- if cand.hasMod(index) { +- cand.score *= 0.9 +- } +- +- // Favor shallow matches by lowering score according to depth. +- cand.score -= cand.score * c.deepState.scorePenalty(cand) +- +- if cand.score < 0 { +- cand.score = 0 +- } +- +- cand.name = deepCandName(cand) +- if item, err := c.item(ctx, *cand); err == nil { +- c.items = append(c.items, item) +- } +-} +- +-// deepCandName produces the full candidate name including any +-// ancestor objects. For example, "foo.bar().baz" for candidate "baz". +-func deepCandName(cand *candidate) string { +- totalLen := len(cand.obj.Name()) +- for i, obj := range cand.path { +- totalLen += len(obj.Name()) + 1 +- if cand.pathInvokeMask&(1< 0 { +- totalLen += 2 +- } +- } +- +- var buf strings.Builder +- buf.Grow(totalLen) +- +- for i, obj := range cand.path { +- buf.WriteString(obj.Name()) +- if fn, ok := obj.(*types.Func); ok { +- buf.WriteString(typesutil.FormatTypeParams(fn.Signature().TypeParams())) +- } +- if cand.pathInvokeMask&(1< 0 { +- buf.WriteByte('(') +- buf.WriteByte(')') +- } +- buf.WriteByte('.') +- } +- +- buf.WriteString(cand.obj.Name()) +- +- return buf.String() +-} +- +-// penalty reports a score penalty for cand in the range (0, 1). +-// For example, a candidate is penalized if it has already been used +-// in another switch case statement. +-func (c *completer) penalty(cand *candidate) float64 { +- for _, p := range c.inference.penalized { +- if c.objChainMatches(cand, p.objChain) { +- return p.penalty +- } +- } +- +- return 0 +-} +- +-// objChainMatches reports whether cand combined with the surrounding +-// object prefix matches chain. +-func (c *completer) objChainMatches(cand *candidate, chain []types.Object) bool { +- // For example, when completing: +- // +- // foo.ba<> +- // +- // If we are considering the deep candidate "bar.baz", cand is baz, +- // objChain is [foo] and deepChain is [bar]. We would match the +- // chain [foo, bar, baz]. +- if len(chain) != len(c.inference.objChain)+len(cand.path)+1 { +- return false +- } +- +- if chain[len(chain)-1] != cand.obj { +- return false +- } +- +- for i, o := range c.inference.objChain { +- if chain[i] != o { +- return false +- } +- } +- +- for i, o := range cand.path { +- if chain[i+len(c.inference.objChain)] != o { +- return false +- } +- } +- +- return true +-} +diff -urN a/gopls/internal/golang/completion/deep_completion_test.go b/gopls/internal/golang/completion/deep_completion_test.go +--- a/gopls/internal/golang/completion/deep_completion_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/deep_completion_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,33 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "testing" +-) +- +-func TestDeepCompletionIsHighScore(t *testing.T) { +- // Test that deepCompletionState.isHighScore properly tracks the top +- // N=MaxDeepCompletions scores. +- +- var s deepCompletionState +- +- if !s.isHighScore(1) { +- // No other scores yet, anything is a winner. +- t.Error("1 should be high score") +- } +- +- // Fill up with higher scores. +- for range MaxDeepCompletions { +- if !s.isHighScore(10) { +- t.Error("10 should be high score") +- } +- } +- +- // High scores should be filled with 10s so 2 is not a high score. +- if s.isHighScore(2) { +- t.Error("2 shouldn't be high score") +- } +-} +diff -urN a/gopls/internal/golang/completion/definition.go b/gopls/internal/golang/completion/definition.go +--- a/gopls/internal/golang/completion/definition.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/definition.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,160 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "go/ast" +- "go/types" +- "strings" +- "unicode" +- "unicode/utf8" +- +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/golang/completion/snippet" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// some function definitions in test files can be completed +-// So far, TestFoo(t *testing.T), TestMain(m *testing.M) +-// BenchmarkFoo(b *testing.B), FuzzFoo(f *testing.F) +- +-// path[0] is known to be *ast.Ident +-func definition(path []ast.Node, obj types.Object, pgf *parsego.File) ([]CompletionItem, *Selection) { +- if _, ok := obj.(*types.Func); !ok { +- return nil, nil // not a function at all +- } +- if !strings.HasSuffix(pgf.URI.Path(), "_test.go") { +- return nil, nil // not a test file +- } +- +- name := path[0].(*ast.Ident).Name +- if len(name) == 0 { +- // can't happen +- return nil, nil +- } +- start := path[0].Pos() +- end := path[0].End() +- sel := &Selection{ +- content: "", +- cursor: start, +- tokFile: pgf.Tok, +- start: start, +- end: end, +- mapper: pgf.Mapper, +- } +- var ans []CompletionItem +- var hasParens bool +- n, ok := path[1].(*ast.FuncDecl) +- if !ok { +- return nil, nil // can't happen +- } +- if n.Recv != nil { +- return nil, nil // a method, not a function +- } +- t := n.Type.Params +- if t.Closing != t.Opening { +- hasParens = true +- } +- +- // Always suggest TestMain, if possible +- if strings.HasPrefix("TestMain", name) { +- if hasParens { +- ans = append(ans, defItem("TestMain", obj)) +- } else { +- ans = append(ans, defItem("TestMain(m *testing.M)", obj)) +- } +- } +- +- // If a snippet is possible, suggest it +- if strings.HasPrefix("Test", name) { +- if hasParens { +- ans = append(ans, defItem("Test", obj)) +- } else { +- ans = append(ans, defSnippet("Test", "(t *testing.T)", obj)) +- } +- return ans, sel +- } else if strings.HasPrefix("Benchmark", name) { +- if hasParens { +- ans = append(ans, defItem("Benchmark", obj)) +- } else { +- ans = append(ans, defSnippet("Benchmark", "(b *testing.B)", obj)) +- } +- return ans, sel +- } else if strings.HasPrefix("Fuzz", name) { +- if hasParens { +- ans = append(ans, defItem("Fuzz", obj)) +- } else { +- ans = append(ans, defSnippet("Fuzz", "(f *testing.F)", obj)) +- } +- return ans, sel +- } +- +- // Fill in the argument for what the user has already typed +- if got := defMatches(name, "Test", path, "(t *testing.T)"); got != "" { +- ans = append(ans, defItem(got, obj)) +- } else if got := defMatches(name, "Benchmark", path, "(b *testing.B)"); got != "" { +- ans = append(ans, defItem(got, obj)) +- } else if got := defMatches(name, "Fuzz", path, "(f *testing.F)"); got != "" { +- ans = append(ans, defItem(got, obj)) +- } +- return ans, sel +-} +- +-// defMatches returns text for defItem, never for defSnippet +-func defMatches(name, pat string, path []ast.Node, arg string) string { +- if !strings.HasPrefix(name, pat) { +- return "" +- } +- c, _ := utf8.DecodeRuneInString(name[len(pat):]) +- if unicode.IsLower(c) { +- return "" +- } +- fd, ok := path[1].(*ast.FuncDecl) +- if !ok { +- // we don't know what's going on +- return "" +- } +- fp := fd.Type.Params +- if len(fp.List) > 0 { +- // signature already there, nothing to suggest +- return "" +- } +- if fp.Opening != fp.Closing { +- // nothing: completion works on words, not easy to insert arg +- return "" +- } +- // suggesting signature too +- return name + arg +-} +- +-func defSnippet(prefix, suffix string, obj types.Object) CompletionItem { +- var sn snippet.Builder +- sn.WriteText(prefix) +- sn.WritePlaceholder(func(b *snippet.Builder) { b.WriteText("Xxx") }) +- sn.WriteText(suffix + " {\n\t") +- sn.WriteFinalTabstop() +- sn.WriteText("\n}") +- return CompletionItem{ +- Label: prefix + "Xxx" + suffix, +- Detail: "tab, type the rest of the name, then tab", +- Kind: protocol.FunctionCompletion, +- Depth: 0, +- Score: 10, +- snippet: &sn, +- Documentation: prefix + " test function", +- isSlice: isSlice(obj), +- } +-} +-func defItem(val string, obj types.Object) CompletionItem { +- return CompletionItem{ +- Label: val, +- InsertText: val, +- Kind: protocol.FunctionCompletion, +- Depth: 0, +- Score: 9, // prefer the snippets when available +- Documentation: "complete the function name", +- isSlice: isSlice(obj), +- } +-} +diff -urN a/gopls/internal/golang/completion/format.go b/gopls/internal/golang/completion/format.go +--- a/gopls/internal/golang/completion/format.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/format.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,460 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "context" +- "errors" +- "fmt" +- "go/ast" +- "go/doc" +- "go/types" +- "strings" +- +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/golang/completion/snippet" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/gopls/internal/util/typesutil" +- internalastutil "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/imports" +-) +- +-var ( +- errNoMatch = errors.New("not a surrounding match") +- errLowScore = errors.New("not a high scoring candidate") +-) +- +-// item formats a candidate to a CompletionItem. +-func (c *completer) item(ctx context.Context, cand candidate) (CompletionItem, error) { +- obj := cand.obj +- +- // if the object isn't a valid match against the surrounding, return early. +- matchScore := c.matcher.Score(cand.name) +- if matchScore <= 0 { +- return CompletionItem{}, errNoMatch +- } +- cand.score *= float64(matchScore) +- +- // Ignore deep candidates that won't be in the MaxDeepCompletions anyway. +- if len(cand.path) != 0 && !c.deepState.isHighScore(cand.score) { +- return CompletionItem{}, errLowScore +- } +- +- // Handle builtin types separately. +- if obj.Parent() == types.Universe { +- return c.formatBuiltin(ctx, cand) +- } +- +- var ( +- label = cand.name +- detail = types.TypeString(obj.Type(), c.qual) +- insert = label +- kind = protocol.TextCompletion +- snip snippet.Builder +- protocolEdits []protocol.TextEdit +- ) +- if obj.Type() == nil { +- detail = "" +- } +- +- type hasTypeParams interface{ TypeParams() *types.TypeParamList } +- if genericType, _ := obj.Type().(hasTypeParams); genericType != nil && isTypeName(obj) && c.wantTypeParams() { +- // golang/go#71044: note that type names can be basic types, even in +- // receiver position, for invalid code. +- tparams := genericType.TypeParams() +- label += typesutil.FormatTypeParams(tparams) +- insert = label // maintain invariant above (label == insert) +- } +- +- snip.WriteText(insert) +- +- switch obj := obj.(type) { +- case *types.TypeName: +- detail, kind = golang.FormatType(obj.Type(), c.qual) +- case *types.Const: +- kind = protocol.ConstantCompletion +- case *types.Var: +- if _, ok := obj.Type().(*types.Struct); ok { +- detail = "struct{...}" // for anonymous unaliased struct types +- } else if obj.IsField() { +- var err error +- detail, err = golang.FormatVarType(ctx, c.snapshot, c.pkg, obj, c.qual, c.mq) +- if err != nil { +- return CompletionItem{}, err +- } +- } +- if obj.IsField() { +- kind = protocol.FieldCompletion +- c.structFieldSnippet(cand, detail, &snip) +- } else { +- kind = protocol.VariableCompletion +- } +- if obj.Type() == nil { +- break +- } +- case *types.Func: +- if obj.Signature().Recv() == nil { +- kind = protocol.FunctionCompletion +- } else { +- kind = protocol.MethodCompletion +- } +- case *types.PkgName: +- kind = protocol.ModuleCompletion +- detail = fmt.Sprintf("%q", obj.Imported().Path()) +- case *types.Label: +- kind = protocol.ConstantCompletion +- detail = "label" +- } +- +- var prefix string +- for _, mod := range cand.mods { +- switch mod { +- case reference: +- prefix = "&" + prefix +- case dereference: +- prefix = "*" + prefix +- case chanRead: +- prefix = "<-" + prefix +- } +- } +- +- var ( +- suffix string +- funcType = obj.Type() +- ) +-Suffixes: +- for _, mod := range cand.mods { +- switch mod { +- case invoke: +- if sig, ok := funcType.Underlying().(*types.Signature); ok { +- s, err := golang.NewSignature(ctx, c.snapshot, c.pkg, sig, nil, c.qual, c.mq) +- if err != nil { +- return CompletionItem{}, err +- } +- +- tparams := s.TypeParams() +- if len(tparams) > 0 { +- // Eliminate the suffix of type parameters that are +- // likely redundant because they can probably be +- // inferred from the argument types (#51783). +- // +- // We don't bother doing the reverse inference from +- // result types as result-only type parameters are +- // quite unusual. +- free := inferableTypeParams(sig) +- for i := sig.TypeParams().Len() - 1; i >= 0; i-- { +- tparam := sig.TypeParams().At(i) +- if !free[tparam] { +- break +- } +- tparams = tparams[:i] // eliminate +- } +- } +- +- c.functionCallSnippet("", tparams, s.Params(), &snip) +- if sig.Results().Len() == 1 { +- funcType = sig.Results().At(0).Type() +- } +- detail = "func" + s.Format() +- } +- +- if !c.opts.snippets { +- // Without snippets the candidate will not include "()". Don't +- // add further suffixes since they will be invalid. For +- // example, with snippets "foo()..." would become "foo..." +- // without snippets if we added the dotDotDot. +- break Suffixes +- } +- case takeSlice: +- suffix += "[:]" +- case takeDotDotDot: +- suffix += "..." +- case index: +- snip.WriteText("[") +- snip.WritePlaceholder(nil) +- snip.WriteText("]") +- } +- } +- +- // If this candidate needs an additional import statement, +- // add the additional text edits needed. +- if cand.imp != nil { +- addlEdits, err := c.importEdits(cand.imp) +- +- if err != nil { +- return CompletionItem{}, err +- } +- +- protocolEdits = append(protocolEdits, addlEdits...) +- if kind != protocol.ModuleCompletion { +- if detail != "" { +- detail += " " +- } +- detail += fmt.Sprintf("(from %q)", cand.imp.importPath) +- } +- } +- +- if cand.convertTo != nil { +- conv := c.formatConversion(cand.convertTo) +- prefix = conv.prefix + prefix +- suffix = conv.suffix +- } +- +- if prefix != "" { +- // If we are in a selector, add an edit to place prefix before selector. +- if sel := enclosingSelector(c.path, c.pos); sel != nil { +- edits, err := c.editText(sel.Pos(), sel.Pos(), prefix) +- if err != nil { +- return CompletionItem{}, err +- } +- protocolEdits = append(protocolEdits, edits...) +- } else { +- // If there is no selector, just stick the prefix at the start. +- insert = prefix + insert +- snip.PrependText(prefix) +- } +- } +- +- if suffix != "" { +- insert += suffix +- snip.WriteText(suffix) +- } +- +- detail = strings.TrimPrefix(detail, "untyped ") +- // override computed detail with provided detail, if something is provided. +- if cand.detail != "" { +- detail = cand.detail +- } +- item := CompletionItem{ +- Label: label, +- InsertText: insert, +- AdditionalTextEdits: protocolEdits, +- Detail: detail, +- Kind: kind, +- Score: cand.score, +- Depth: len(cand.path), +- snippet: &snip, +- isSlice: isSlice(obj), +- } +- // If the user doesn't want documentation for completion items. +- if !c.opts.documentation { +- return item, nil +- } +- pos := safetoken.StartPosition(c.pkg.FileSet(), obj.Pos()) +- +- // We ignore errors here, because some types, like "unsafe" or "error", +- // may not have valid positions that we can use to get documentation. +- if !pos.IsValid() { +- return item, nil +- } +- +- comment, err := golang.HoverDocForObject(ctx, c.snapshot, c.pkg.FileSet(), obj) +- if err != nil { +- event.Error(ctx, fmt.Sprintf("failed to find Hover for %q", obj.Name()), err) +- return item, nil +- } +- if c.opts.fullDocumentation { +- item.Documentation = comment.Text() +- } else { +- item.Documentation = doc.Synopsis(comment.Text()) +- } +- if internalastutil.Deprecation(comment) != "" { +- if c.snapshot.Options().CompletionTags { +- item.Tags = []protocol.CompletionItemTag{protocol.ComplDeprecated} +- } else if c.snapshot.Options().CompletionDeprecated { +- item.Deprecated = true +- } +- } +- +- return item, nil +-} +- +-// conversionEdits represents the string edits needed to make a type conversion +-// of an expression. +-type conversionEdits struct { +- prefix, suffix string +-} +- +-// formatConversion returns the edits needed to make a type conversion +-// expression, including parentheses if necessary. +-// +-// Returns empty conversionEdits if convertTo is nil. +-func (c *completer) formatConversion(convertTo types.Type) conversionEdits { +- if convertTo == nil { +- return conversionEdits{} +- } +- +- typeName := types.TypeString(convertTo, c.qual) +- switch t := convertTo.(type) { +- // We need extra parens when casting to these types. For example, +- // we need "(*int)(foo)", not "*int(foo)". +- case *types.Pointer, *types.Signature: +- typeName = "(" + typeName + ")" +- case *types.Basic: +- // If the types are incompatible (as determined by typeMatches), then we +- // must need a conversion here. However, if the target type is untyped, +- // don't suggest converting to e.g. "untyped float" (golang/go#62141). +- if t.Info()&types.IsUntyped != 0 { +- typeName = types.TypeString(types.Default(convertTo), c.qual) +- } +- } +- return conversionEdits{prefix: typeName + "(", suffix: ")"} +-} +- +-// importEdits produces the text edits necessary to add the given import to the current file. +-func (c *completer) importEdits(imp *importInfo) ([]protocol.TextEdit, error) { +- if imp == nil { +- return nil, nil +- } +- +- pgf, err := c.pkg.File(protocol.URIFromPath(c.filename)) +- if err != nil { +- return nil, err +- } +- +- return golang.ComputeImportFixEdits(c.snapshot.Options().Local, pgf.Src, &imports.ImportFix{ +- StmtInfo: imports.ImportInfo{ +- ImportPath: imp.importPath, +- Name: imp.name, +- }, +- // IdentName is unused on this path and is difficult to get. +- FixType: imports.AddImport, +- }) +-} +- +-func (c *completer) formatBuiltin(ctx context.Context, cand candidate) (CompletionItem, error) { +- obj := cand.obj +- item := CompletionItem{ +- Label: obj.Name(), +- InsertText: obj.Name(), +- Score: cand.score, +- } +- switch obj.(type) { +- case *types.Const: +- item.Kind = protocol.ConstantCompletion +- case *types.Builtin: +- item.Kind = protocol.FunctionCompletion +- sig, err := golang.NewBuiltinSignature(ctx, c.snapshot, obj.Name()) +- if err != nil { +- return CompletionItem{}, err +- } +- item.Detail = "func" + sig.Format() +- item.snippet = &snippet.Builder{} +- // The signature inferred for a built-in is instantiated, so TypeParams=∅. +- c.functionCallSnippet(obj.Name(), sig.TypeParams(), sig.Params(), item.snippet) +- case *types.TypeName: +- if types.IsInterface(obj.Type()) { +- item.Kind = protocol.InterfaceCompletion +- } else { +- item.Kind = protocol.ClassCompletion +- } +- case *types.Nil: +- item.Kind = protocol.VariableCompletion +- } +- return item, nil +-} +- +-// decide if the type params (if any) should be part of the completion +-// which only possible for types.Named and types.Signature +-// (so far, only in receivers, e.g.; func (s *GENERIC[K, V])..., which is a types.Named) +-func (c *completer) wantTypeParams() bool { +- // Need to be lexically in a receiver, and a child of an IndexListExpr +- // (but IndexListExpr only exists with go1.18) +- start := c.path[0].Pos() +- for i, nd := range c.path { +- if fd, ok := nd.(*ast.FuncDecl); ok { +- if i > 0 && fd.Recv != nil && start < fd.Recv.End() { +- return true +- } else { +- return false +- } +- } +- } +- return false +-} +- +-// inferableTypeParams returns the set of type parameters +-// of sig that are constrained by (inferred from) the argument types. +-func inferableTypeParams(sig *types.Signature) map[*types.TypeParam]bool { +- free := make(map[*types.TypeParam]bool) +- +- // visit adds to free all the free type parameters of t. +- var visit func(t types.Type) +- visit = func(t types.Type) { +- switch t := t.(type) { +- case *types.Array: +- visit(t.Elem()) +- case *types.Chan: +- visit(t.Elem()) +- case *types.Map: +- visit(t.Key()) +- visit(t.Elem()) +- case *types.Pointer: +- visit(t.Elem()) +- case *types.Slice: +- visit(t.Elem()) +- case *types.Interface: +- for i := range t.NumExplicitMethods() { +- visit(t.ExplicitMethod(i).Type()) +- } +- for i := range t.NumEmbeddeds() { +- visit(t.EmbeddedType(i)) +- } +- case *types.Union: +- for i := range t.Len() { +- visit(t.Term(i).Type()) +- } +- case *types.Signature: +- if tp := t.TypeParams(); tp != nil { +- // Generic signatures only appear as the type of generic +- // function declarations, so this isn't really reachable. +- for i := range tp.Len() { +- visit(tp.At(i).Constraint()) +- } +- } +- visit(t.Params()) +- visit(t.Results()) +- case *types.Tuple: +- for i := range t.Len() { +- visit(t.At(i).Type()) +- } +- case *types.Struct: +- for i := range t.NumFields() { +- visit(t.Field(i).Type()) +- } +- case *types.TypeParam: +- free[t] = true +- case *types.Alias: +- visit(types.Unalias(t)) +- case *types.Named: +- targs := t.TypeArgs() +- for i := range targs.Len() { +- visit(targs.At(i)) +- } +- case *types.Basic: +- // nop +- default: +- panic(t) +- } +- } +- +- visit(sig.Params()) +- +- // Perform induction through constraints. +-restart: +- for i := range sig.TypeParams().Len() { +- tp := sig.TypeParams().At(i) +- if free[tp] { +- n := len(free) +- visit(tp.Constraint()) +- if len(free) > n { +- goto restart // iterate until fixed point +- } +- } +- } +- return free +-} +diff -urN a/gopls/internal/golang/completion/fuzz.go b/gopls/internal/golang/completion/fuzz.go +--- a/gopls/internal/golang/completion/fuzz.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/fuzz.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,143 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "fmt" +- "go/ast" +- "go/types" +- "strings" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// golang/go#51089 +-// *testing.F deserves special treatment as member use is constrained: +-// The arguments to f.Fuzz are determined by the arguments to a previous f.Add +-// Inside f.Fuzz only f.Failed and f.Name are allowed. +-// PJW: are there other packages where we can deduce usage constraints? +- +-// if we find fuzz completions, then return true, as those are the only completions to offer +-func (c *completer) fuzz(testingF types.Type, imp *importInfo, cb func(candidate)) bool { +- // 1. inside f.Fuzz? (only f.Failed and f.Name) +- // 2. possible completing f.Fuzz? +- // [Ident,SelectorExpr,Callexpr,ExprStmt,BlockiStmt,FuncDecl(Fuzz...)] +- // 3. before f.Fuzz, same (for 2., offer choice when looking at an F) +- +- mset := types.NewMethodSet(testingF) +- +- // does the path contain FuncLit as arg to f.Fuzz CallExpr? +- inside := false +-Loop: +- for i, n := range c.path { +- switch v := n.(type) { +- case *ast.CallExpr: +- if len(v.Args) != 1 { +- continue Loop +- } +- if _, ok := v.Args[0].(*ast.FuncLit); !ok { +- continue +- } +- if s, ok := v.Fun.(*ast.SelectorExpr); !ok || s.Sel.Name != "Fuzz" { +- continue +- } +- if i > 2 { // avoid t.Fuzz itself in tests +- inside = true +- break Loop +- } +- } +- } +- if inside { +- for i := range mset.Len() { +- o := mset.At(i).Obj() +- if o.Name() == "Failed" || o.Name() == "Name" { +- cb(candidate{ +- obj: o, +- score: stdScore, +- imp: imp, +- addressable: true, +- }) +- } +- } +- return true +- } +- // if it could be t.Fuzz, look for the preceding t.Add +- id, ok := c.path[0].(*ast.Ident) +- if ok && strings.HasPrefix("Fuzz", id.Name) { +- var add *ast.CallExpr +- f := func(n ast.Node) bool { +- if n == nil { +- return true +- } +- call, ok := n.(*ast.CallExpr) +- if !ok { +- return true +- } +- s, ok := call.Fun.(*ast.SelectorExpr) +- if !ok { +- return true +- } +- if s.Sel.Name != "Add" { +- return true +- } +- // Sel.X should be of type *testing.F +- got := c.pkg.TypesInfo().Types[s.X] +- if got.Type.String() == "*testing.F" { +- add = call +- } +- return false // because we're done... +- } +- // look at the enclosing FuzzFoo functions +- if len(c.path) < 2 { +- return false +- } +- n := c.path[len(c.path)-2] +- if _, ok := n.(*ast.FuncDecl); !ok { +- // the path should start with ast.File, ast.FuncDecl, ... +- // but it didn't, so give up +- return false +- } +- ast.Inspect(n, f) +- if add == nil { +- // looks like f.Fuzz without a preceding f.Add. +- // let the regular completion handle it. +- return false +- } +- +- lbl := "Fuzz(func(t *testing.T" +- for i, a := range add.Args { +- info := c.pkg.TypesInfo().TypeOf(a) +- if info == nil { +- return false // How could this happen, but better safe than panic. +- } +- lbl += fmt.Sprintf(", %c %s", 'a'+i, info) +- } +- lbl += ")" +- xx := CompletionItem{ +- Label: lbl, +- InsertText: lbl, +- Kind: protocol.FunctionCompletion, +- Depth: 0, +- Score: 10, // pretty confident the user should see this +- Documentation: "argument types from f.Add", +- isSlice: false, +- } +- c.items = append(c.items, xx) +- for i := range mset.Len() { +- o := mset.At(i).Obj() +- if o.Name() != "Fuzz" { +- cb(candidate{ +- obj: o, +- score: stdScore, +- imp: imp, +- addressable: true, +- }) +- } +- } +- return true // done +- } +- // let the standard processing take care of it instead +- return false +-} +diff -urN a/gopls/internal/golang/completion/keywords.go b/gopls/internal/golang/completion/keywords.go +--- a/gopls/internal/golang/completion/keywords.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/keywords.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,205 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "go/ast" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/astutil" +-) +- +-const ( +- BREAK = "break" +- CASE = "case" +- CHAN = "chan" +- CONST = "const" +- CONTINUE = "continue" +- DEFAULT = "default" +- DEFER = "defer" +- ELSE = "else" +- FALLTHROUGH = "fallthrough" +- FOR = "for" +- FUNC = "func" +- GO = "go" +- GOTO = "goto" +- IF = "if" +- IMPORT = "import" +- INTERFACE = "interface" +- MAP = "map" +- PACKAGE = "package" +- RANGE = "range" +- RETURN = "return" +- SELECT = "select" +- STRUCT = "struct" +- SWITCH = "switch" +- TYPE = "type" +- VAR = "var" +-) +- +-// addKeywordCompletions offers keyword candidates appropriate at the position. +-func (c *completer) addKeywordCompletions() { +- seen := make(map[string]bool) +- +- if c.wantTypeName() && c.inference.objType == nil { +- // If we want a type name but don't have an expected obj type, +- // include "interface", "struct", "func", "chan", and "map". +- +- // "interface" and "struct" are more common declaring named types. +- // Give them a higher score if we are in a type declaration. +- structIntf, funcChanMap := stdScore, highScore +- if len(c.path) > 1 { +- if _, namedDecl := c.path[1].(*ast.TypeSpec); namedDecl { +- structIntf, funcChanMap = highScore, stdScore +- } +- } +- +- c.addKeywordItems(seen, structIntf, STRUCT, INTERFACE) +- c.addKeywordItems(seen, funcChanMap, FUNC, CHAN, MAP) +- } +- +- // If we are at the file scope, only offer decl keywords. We don't +- // get *ast.Idents at the file scope because non-keyword identifiers +- // turn into *ast.BadDecl, not *ast.Ident. +- if len(c.path) == 1 || is[*ast.File](c.path[1]) { +- c.addKeywordItems(seen, stdScore, TYPE, CONST, VAR, FUNC, IMPORT) +- return +- } else if _, ok := c.path[0].(*ast.Ident); !ok { +- // Otherwise only offer keywords if the client is completing an identifier. +- return +- } +- +- if len(c.path) > 2 { +- // Offer "range" if we are in ast.ForStmt.Init. This is what the +- // AST looks like before "range" is typed, e.g. "for i := r<>". +- if loop, ok := c.path[2].(*ast.ForStmt); ok && loop.Init != nil && astutil.NodeContains(loop.Init, c.pos) { +- c.addKeywordItems(seen, stdScore, RANGE) +- } +- } +- +- // Only suggest keywords if we are beginning a statement. +- switch n := c.path[1].(type) { +- case *ast.BlockStmt, *ast.ExprStmt: +- // OK - our ident must be at beginning of statement. +- case *ast.CommClause: +- // Make sure we aren't in the Comm statement. +- if !n.Colon.IsValid() || c.pos <= n.Colon { +- return +- } +- case *ast.CaseClause: +- // Make sure we aren't in the case List. +- if !n.Colon.IsValid() || c.pos <= n.Colon { +- return +- } +- default: +- return +- } +- +- // Filter out keywords depending on scope +- // Skip the first one because we want to look at the enclosing scopes +- path := c.path[1:] +- for i, n := range path { +- switch node := n.(type) { +- case *ast.CaseClause: +- // only recommend "fallthrough" and "break" within the bodies of a case clause +- if c.pos > node.Colon { +- c.addKeywordItems(seen, stdScore, BREAK) +- // "fallthrough" is only valid in switch statements. +- // A case clause is always nested within a block statement in a switch statement, +- // that block statement is nested within either a TypeSwitchStmt or a SwitchStmt. +- if i+2 >= len(path) { +- continue +- } +- if _, ok := path[i+2].(*ast.SwitchStmt); ok { +- c.addKeywordItems(seen, stdScore, FALLTHROUGH) +- } +- } +- case *ast.CommClause: +- if c.pos > node.Colon { +- c.addKeywordItems(seen, stdScore, BREAK) +- } +- case *ast.TypeSwitchStmt, *ast.SelectStmt, *ast.SwitchStmt: +- // if there is no default case yet, it's highly likely to add a default in switch. +- // we don't offer 'default' anymore if user has used it already in current switch. +- if !hasDefaultClause(node) { +- c.addKeywordItems(seen, highScore, CASE, DEFAULT) +- } +- case *ast.ForStmt, *ast.RangeStmt: +- c.addKeywordItems(seen, stdScore, BREAK, CONTINUE) +- // This is a bit weak, functions allow for many keywords +- case *ast.FuncDecl: +- if node.Body != nil && c.pos > node.Body.Lbrace { +- // requireReturnObj checks whether user must provide some objects after return. +- requireReturnObj := func(sig *ast.FuncType) bool { +- results := sig.Results +- if results == nil || results.List == nil { +- return false // nothing to return +- } +- // If any result is named, allow a bare return. +- for _, r := range results.List { +- for _, name := range r.Names { +- if name.Name != "_" { +- return false +- } +- } +- } +- return true +- } +- ret := RETURN +- if requireReturnObj(node.Type) { +- // as user must return something, we offer a space after return. +- // function literal inside a function will be affected by outer function, +- // but 'go fmt' will help to remove the ending space. +- // the benefit is greater than introducing an unnecessary space. +- ret += " " +- } +- +- c.addKeywordItems(seen, stdScore, DEFER, ret, FOR, GO, SWITCH, SELECT, IF, ELSE, VAR, CONST, GOTO, TYPE) +- } +- } +- } +-} +- +-// hasDefaultClause reports whether the given node contains a direct default case. +-// It does not traverse child nodes to look for nested default clauses, +-// and returns false if the node is not a switch statement. +-func hasDefaultClause(node ast.Node) bool { +- var cases []ast.Stmt +- switch node := node.(type) { +- case *ast.TypeSwitchStmt: +- cases = node.Body.List +- case *ast.SelectStmt: +- cases = node.Body.List +- case *ast.SwitchStmt: +- cases = node.Body.List +- } +- for _, c := range cases { +- if clause, ok := c.(*ast.CaseClause); ok && +- clause.List == nil { // default case +- return true +- } +- } +- return false +-} +- +-// addKeywordItems dedupes and adds completion items for the specified +-// keywords with the specified score. +-func (c *completer) addKeywordItems(seen map[string]bool, score float64, kws ...string) { +- for _, kw := range kws { +- if seen[kw] { +- continue +- } +- seen[kw] = true +- +- if matchScore := c.matcher.Score(kw); matchScore > 0 { +- c.items = append(c.items, CompletionItem{ +- Label: kw, +- Kind: protocol.KeywordCompletion, +- InsertText: kw, +- Score: score * float64(matchScore), +- }) +- } +- } +-} +diff -urN a/gopls/internal/golang/completion/labels.go b/gopls/internal/golang/completion/labels.go +--- a/gopls/internal/golang/completion/labels.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/labels.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,108 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "go/ast" +- "go/token" +- "math" +- "slices" +-) +- +-type labelType int +- +-const ( +- labelNone labelType = iota +- labelBreak +- labelContinue +- labelGoto +-) +- +-// wantLabelCompletion returns true if we want (only) label +-// completions at the position. +-func (c *completer) wantLabelCompletion() labelType { +- if _, ok := c.path[0].(*ast.Ident); ok && len(c.path) > 1 { +- // We want a label if we are an *ast.Ident child of a statement +- // that accepts a label, e.g. "break Lo<>". +- return takesLabel(c.path[1]) +- } +- +- return labelNone +-} +- +-// takesLabel returns the corresponding labelType if n is a statement +-// that accepts a label, otherwise labelNone. +-func takesLabel(n ast.Node) labelType { +- if bs, ok := n.(*ast.BranchStmt); ok { +- switch bs.Tok { +- case token.BREAK: +- return labelBreak +- case token.CONTINUE: +- return labelContinue +- case token.GOTO: +- return labelGoto +- } +- } +- return labelNone +-} +- +-// labels adds completion items for labels defined in the enclosing +-// function. +-func (c *completer) labels(lt labelType) { +- if c.enclosingFunc == nil { +- return +- } +- +- addLabel := func(score float64, l *ast.LabeledStmt) { +- labelObj := c.pkg.TypesInfo().ObjectOf(l.Label) +- if labelObj != nil { +- c.deepState.enqueue(candidate{obj: labelObj, score: score}) +- } +- } +- +- switch lt { +- case labelBreak, labelContinue: +- // "break" and "continue" only accept labels from enclosing statements. +- +- for i, p := range c.path { +- switch p := p.(type) { +- case *ast.FuncLit: +- // Labels are function scoped, so don't continue out of functions. +- return +- case *ast.LabeledStmt: +- switch p.Stmt.(type) { +- case *ast.ForStmt, *ast.RangeStmt: +- // Loop labels can be used for "break" or "continue". +- addLabel(highScore*math.Pow(.99, float64(i)), p) +- case *ast.SwitchStmt, *ast.SelectStmt, *ast.TypeSwitchStmt: +- // Switch and select labels can be used only for "break". +- if lt == labelBreak { +- addLabel(highScore*math.Pow(.99, float64(i)), p) +- } +- } +- } +- } +- case labelGoto: +- // Goto accepts any label in the same function not in a nested +- // block. It also doesn't take labels that would jump across +- // variable definitions, but ignore that case for now. +- ast.Inspect(c.enclosingFunc.body, func(n ast.Node) bool { +- if n == nil { +- return false +- } +- +- switch n := n.(type) { +- // Only search into block-like nodes enclosing our "goto". +- // This prevents us from finding labels in nested blocks. +- case *ast.BlockStmt, *ast.CommClause, *ast.CaseClause: +- return slices.Contains(c.path, n) +- case *ast.LabeledStmt: +- addLabel(highScore, n) +- } +- +- return true +- }) +- } +-} +diff -urN a/gopls/internal/golang/completion/literal.go b/gopls/internal/golang/completion/literal.go +--- a/gopls/internal/golang/completion/literal.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/literal.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,617 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "context" +- "fmt" +- "go/types" +- "strings" +- "unicode" +- +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/golang/completion/snippet" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// literal generates composite literal, function literal, and make() +-// completion items. +-func (c *completer) literal(ctx context.Context, literalType types.Type, imp *importInfo) { +- if !c.opts.snippets { +- return +- } +- +- expType := c.inference.objType +- +- if c.inference.matchesVariadic(literalType) { +- // Don't offer literal slice candidates for variadic arguments. +- // For example, don't offer "[]interface{}{}" in "fmt.Print(<>)". +- return +- } +- +- // Avoid literal candidates if the expected type is an empty +- // interface. It isn't very useful to suggest a literal candidate of +- // every possible type. +- if expType != nil && isEmptyInterface(expType) { +- return +- } +- +- // We handle unnamed literal completions explicitly before searching +- // for candidates. Avoid named-type literal completions for +- // unnamed-type expected type since that results in duplicate +- // candidates. For example, in +- // +- // type mySlice []int +- // var []int = <> +- // +- // don't offer "mySlice{}" since we have already added a candidate +- // of "[]int{}". +- +- // TODO(adonovan): think about aliases: +- // they should probably be treated more like Named. +- // Should this use Deref not Unpointer? +- if is[*types.Named](types.Unalias(literalType)) && +- expType != nil && +- !is[*types.Named](types.Unalias(typesinternal.Unpointer(expType))) { +- +- return +- } +- +- // Check if an object of type literalType would match our expected type. +- cand := candidate{ +- obj: c.fakeObj(literalType), +- } +- +- switch literalType.Underlying().(type) { +- // These literal types are addressable (e.g. "&[]int{}"), others are +- // not (e.g. can't do "&(func(){})"). +- case *types.Struct, *types.Array, *types.Slice, *types.Map: +- cand.addressable = true +- } +- +- // Only suggest a literal conversion if the exact type is known. +- if !c.matchingCandidate(&cand) || (cand.convertTo != nil && !c.inference.needsExactType) { +- return +- } +- +- var ( +- qual = c.qual +- sel = enclosingSelector(c.path, c.pos) +- conversion conversionEdits +- ) +- +- if cand.convertTo != nil { +- conversion = c.formatConversion(cand.convertTo) +- } +- +- // Don't qualify the type name if we are in a selector expression +- // since the package name is already present. +- if sel != nil { +- qual = func(_ *types.Package) string { return "" } +- } +- +- snip, typeName := c.typeNameSnippet(literalType, qual) +- +- // A type name of "[]int" doesn't work very will with the matcher +- // since "[" isn't a valid identifier prefix. Here we strip off the +- // slice (and array) prefix yielding just "int". +- matchName := typeName +- switch t := literalType.(type) { +- case *types.Slice: +- matchName = types.TypeString(t.Elem(), qual) +- case *types.Array: +- matchName = types.TypeString(t.Elem(), qual) +- } +- +- addlEdits, err := c.importEdits(imp) +- if err != nil { +- event.Error(ctx, "error adding import for literal candidate", err) +- return +- } +- +- // If prefix matches the type name, client may want a composite literal. +- if score := c.matcher.Score(matchName); score > 0 { +- if cand.hasMod(reference) { +- if sel != nil { +- // If we are in a selector we must place the "&" before the selector. +- // For example, "foo.B<>" must complete to "&foo.Bar{}", not +- // "foo.&Bar{}". +- edits, err := c.editText(sel.Pos(), sel.Pos(), "&") +- if err != nil { +- event.Error(ctx, "error making edit for literal pointer completion", err) +- return +- } +- addlEdits = append(addlEdits, edits...) +- } else { +- // Otherwise we can stick the "&" directly before the type name. +- typeName = "&" + typeName +- snip.PrependText("&") +- } +- } +- +- switch t := literalType.Underlying().(type) { +- case *types.Struct, *types.Array, *types.Slice, *types.Map: +- item := c.compositeLiteral(t, snip.Clone(), typeName, float64(score), addlEdits) +- item.addConversion(c, conversion) +- c.items = append(c.items, item) +- case *types.Signature: +- // Add a literal completion for a signature type that implements +- // an interface. For example, offer "http.HandlerFunc()" when +- // expected type is "http.Handler". +- if expType != nil && types.IsInterface(expType) { +- if item, ok := c.basicLiteral(t, snip.Clone(), typeName, float64(score), addlEdits); ok { +- item.addConversion(c, conversion) +- c.items = append(c.items, item) +- } +- } +- case *types.Basic: +- // Add a literal completion for basic types that implement our +- // expected interface (e.g. named string type http.Dir +- // implements http.FileSystem), or are identical to our expected +- // type (i.e. yielding a type conversion such as "float64()"). +- if expType != nil && (types.IsInterface(expType) || types.Identical(expType, literalType)) { +- if item, ok := c.basicLiteral(t, snip.Clone(), typeName, float64(score), addlEdits); ok { +- item.addConversion(c, conversion) +- c.items = append(c.items, item) +- } +- } +- } +- } +- +- // If prefix matches "make", client may want a "make()" +- // invocation. We also include the type name to allow for more +- // flexible fuzzy matching. +- if score := c.matcher.Score("make." + matchName); !cand.hasMod(reference) && score > 0 { +- switch literalType.Underlying().(type) { +- case *types.Slice: +- // The second argument to "make()" for slices is required, so default to "0". +- item := c.makeCall(snip.Clone(), typeName, "0", float64(score), addlEdits) +- item.addConversion(c, conversion) +- c.items = append(c.items, item) +- case *types.Map, *types.Chan: +- // Maps and channels don't require the second argument, so omit +- // to keep things simple for now. +- item := c.makeCall(snip.Clone(), typeName, "", float64(score), addlEdits) +- item.addConversion(c, conversion) +- c.items = append(c.items, item) +- } +- } +- +- // If prefix matches "func", client may want a function literal. +- if score := c.matcher.Score("func"); !cand.hasMod(reference) && score > 0 && (expType == nil || !types.IsInterface(expType)) { +- switch t := literalType.Underlying().(type) { +- case *types.Signature: +- if item, ok := c.functionLiteral(ctx, t, float64(score)); ok { +- item.addConversion(c, conversion) +- c.items = append(c.items, item) +- } +- } +- } +-} +- +-// literalCandidateScore is the base score for literal candidates. +-// Literal candidates match the expected type so they should be high +-// scoring, but we want them ranked below lexical objects of the +-// correct type, so scale down highScore. +-const literalCandidateScore = highScore / 2 +- +-// functionLiteral returns a function literal completion item for the +-// given signature, if applicable. +-func (c *completer) functionLiteral(ctx context.Context, sig *types.Signature, matchScore float64) (CompletionItem, bool) { +- snip := &snippet.Builder{} +- snip.WriteText("func(") +- +- // First we generate names for each param and keep a seen count so +- // we know if we need to uniquify param names. For example, +- // "func(int)" will become "func(i int)", but "func(int, int64)" +- // will become "func(i1 int, i2 int64)". +- var ( +- paramNames = make([]string, sig.Params().Len()) +- paramNameCount = make(map[string]int) +- hasTypeParams bool +- ) +- for i := range sig.Params().Len() { +- var ( +- p = sig.Params().At(i) +- name = p.Name() +- ) +- +- if tp, _ := types.Unalias(p.Type()).(*types.TypeParam); tp != nil && !c.typeParamInScope(tp) { +- hasTypeParams = true +- } +- +- if name == "" { +- // If the param has no name in the signature, guess a name based +- // on the type. Use an empty qualifier to ignore the package. +- // For example, we want to name "http.Request" "r", not "hr". +- typeName, err := golang.FormatVarType(ctx, c.snapshot, c.pkg, p, +- func(p *types.Package) string { return "" }, +- func(golang.PackageName, golang.ImportPath, golang.PackagePath) string { return "" }) +- if err != nil { +- // In general, the only error we should encounter while formatting is +- // context cancellation. +- if ctx.Err() == nil { +- event.Error(ctx, "formatting var type", err) +- } +- return CompletionItem{}, false +- } +- name = abbreviateTypeName(typeName) +- } +- paramNames[i] = name +- if name != "_" { +- paramNameCount[name]++ +- } +- } +- +- for n, c := range paramNameCount { +- // Any names we saw more than once will need a unique suffix added +- // on. Reset the count to 1 to act as the suffix for the first +- // name. +- if c >= 2 { +- paramNameCount[n] = 1 +- } else { +- delete(paramNameCount, n) +- } +- } +- +- for i := range sig.Params().Len() { +- if hasTypeParams && !c.opts.placeholders { +- // If there are type params in the args then the user must +- // choose the concrete types. If placeholders are disabled just +- // drop them between the parens and let them fill things in. +- snip.WritePlaceholder(nil) +- break +- } +- +- if i > 0 { +- snip.WriteText(", ") +- } +- +- var ( +- p = sig.Params().At(i) +- name = paramNames[i] +- ) +- +- // Uniquify names by adding on an incrementing numeric suffix. +- if idx, found := paramNameCount[name]; found { +- paramNameCount[name]++ +- name = fmt.Sprintf("%s%d", name, idx) +- } +- +- if name != p.Name() && c.opts.placeholders { +- // If we didn't use the signature's param name verbatim then we +- // may have chosen a poor name. Give the user a placeholder so +- // they can easily fix the name. +- snip.WritePlaceholder(func(b *snippet.Builder) { +- b.WriteText(name) +- }) +- } else { +- snip.WriteText(name) +- } +- +- // If the following param's type is identical to this one, omit +- // this param's type string. For example, emit "i, j int" instead +- // of "i int, j int". +- if i == sig.Params().Len()-1 || !types.Identical(p.Type(), sig.Params().At(i+1).Type()) { +- snip.WriteText(" ") +- typeStr, err := golang.FormatVarType(ctx, c.snapshot, c.pkg, p, c.qual, c.mq) +- if err != nil { +- // In general, the only error we should encounter while formatting is +- // context cancellation. +- if ctx.Err() == nil { +- event.Error(ctx, "formatting var type", err) +- } +- return CompletionItem{}, false +- } +- if sig.Variadic() && i == sig.Params().Len()-1 { +- typeStr = strings.Replace(typeStr, "[]", "...", 1) +- } +- +- if tp, ok := types.Unalias(p.Type()).(*types.TypeParam); ok && !c.typeParamInScope(tp) { +- snip.WritePlaceholder(func(snip *snippet.Builder) { +- snip.WriteText(typeStr) +- }) +- } else { +- snip.WriteText(typeStr) +- } +- } +- } +- snip.WriteText(")") +- +- results := sig.Results() +- if results.Len() > 0 { +- snip.WriteText(" ") +- } +- +- resultsNeedParens := results.Len() > 1 || +- results.Len() == 1 && results.At(0).Name() != "" +- +- var resultHasTypeParams bool +- for i := range results.Len() { +- if tp, ok := types.Unalias(results.At(i).Type()).(*types.TypeParam); ok && !c.typeParamInScope(tp) { +- resultHasTypeParams = true +- } +- } +- +- if resultsNeedParens { +- snip.WriteText("(") +- } +- for i := range results.Len() { +- if resultHasTypeParams && !c.opts.placeholders { +- // Leave an empty tabstop if placeholders are disabled and there +- // are type args that need specifying. +- snip.WritePlaceholder(nil) +- break +- } +- +- if i > 0 { +- snip.WriteText(", ") +- } +- r := results.At(i) +- if name := r.Name(); name != "" { +- snip.WriteText(name + " ") +- } +- +- text, err := golang.FormatVarType(ctx, c.snapshot, c.pkg, r, c.qual, c.mq) +- if err != nil { +- // In general, the only error we should encounter while formatting is +- // context cancellation. +- if ctx.Err() == nil { +- event.Error(ctx, "formatting var type", err) +- } +- return CompletionItem{}, false +- } +- if tp, ok := types.Unalias(r.Type()).(*types.TypeParam); ok && !c.typeParamInScope(tp) { +- snip.WritePlaceholder(func(snip *snippet.Builder) { +- snip.WriteText(text) +- }) +- } else { +- snip.WriteText(text) +- } +- } +- if resultsNeedParens { +- snip.WriteText(")") +- } +- +- snip.WriteText(" {") +- snip.WriteFinalTabstop() +- snip.WriteText("}") +- +- return CompletionItem{ +- Label: "func(...) {}", +- Score: matchScore * literalCandidateScore, +- Kind: protocol.VariableCompletion, +- snippet: snip, +- }, true +-} +- +-// conventionalAcronyms contains conventional acronyms for type names +-// in lower case. For example, "ctx" for "context" and "err" for "error". +-// +-// Keep this up to date with golang.conventionalVarNames. +-var conventionalAcronyms = map[string]string{ +- "context": "ctx", +- "error": "err", +- "tx": "tx", +- "responsewriter": "w", +-} +- +-// abbreviateTypeName abbreviates type names into acronyms. For +-// example, "fooBar" is abbreviated "fb". Care is taken to ignore +-// non-identifier runes. For example, "[]int" becomes "i", and +-// "struct { i int }" becomes "s". +-func abbreviateTypeName(s string) string { +- // Trim off leading non-letters. We trim everything between "[" and +- // "]" to handle array types like "[someConst]int". +- var inBracket bool +- s = strings.TrimFunc(s, func(r rune) bool { +- if inBracket { +- inBracket = r != ']' +- return true +- } +- +- if r == '[' { +- inBracket = true +- } +- +- return !unicode.IsLetter(r) +- }) +- +- if acr, ok := conventionalAcronyms[strings.ToLower(s)]; ok { +- return acr +- } +- +- return golang.AbbreviateVarName(s) +-} +- +-// compositeLiteral returns a composite literal completion item for the given typeName. +-// T is an (unnamed, unaliased) struct, array, slice, or map type. +-func (c *completer) compositeLiteral(T types.Type, snip *snippet.Builder, typeName string, matchScore float64, edits []protocol.TextEdit) CompletionItem { +- snip.WriteText("{") +- // Don't put the tab stop inside the composite literal curlies "{}" +- // for structs that have no accessible fields. +- if strct, ok := T.(*types.Struct); !ok || fieldsAccessible(strct, c.pkg.Types()) { +- snip.WriteFinalTabstop() +- } +- snip.WriteText("}") +- +- nonSnippet := typeName + "{}" +- +- return CompletionItem{ +- Label: nonSnippet, +- InsertText: nonSnippet, +- Score: matchScore * literalCandidateScore, +- Kind: protocol.VariableCompletion, +- AdditionalTextEdits: edits, +- snippet: snip, +- } +-} +- +-// basicLiteral returns a literal completion item for the given basic +-// type name typeName. +-// +-// If T is untyped, this function returns false. +-func (c *completer) basicLiteral(T types.Type, snip *snippet.Builder, typeName string, matchScore float64, edits []protocol.TextEdit) (CompletionItem, bool) { +- // Never give type conversions like "untyped int()". +- if isUntyped(T) { +- return CompletionItem{}, false +- } +- +- snip.WriteText("(") +- snip.WriteFinalTabstop() +- snip.WriteText(")") +- +- nonSnippet := typeName + "()" +- +- return CompletionItem{ +- Label: nonSnippet, +- InsertText: nonSnippet, +- Detail: T.String(), +- Score: matchScore * literalCandidateScore, +- Kind: protocol.VariableCompletion, +- AdditionalTextEdits: edits, +- snippet: snip, +- }, true +-} +- +-// makeCall returns a completion item for a "make()" call given a specific type. +-func (c *completer) makeCall(snip *snippet.Builder, typeName string, secondArg string, matchScore float64, edits []protocol.TextEdit) CompletionItem { +- // Keep it simple and don't add any placeholders for optional "make()" arguments. +- +- snip.PrependText("make(") +- if secondArg != "" { +- snip.WriteText(", ") +- snip.WritePlaceholder(func(b *snippet.Builder) { +- if c.opts.placeholders { +- b.WriteText(secondArg) +- } +- }) +- } +- snip.WriteText(")") +- +- var nonSnippet strings.Builder +- nonSnippet.WriteString("make(" + typeName) +- if secondArg != "" { +- nonSnippet.WriteString(", ") +- nonSnippet.WriteString(secondArg) +- } +- nonSnippet.WriteByte(')') +- +- return CompletionItem{ +- Label: nonSnippet.String(), +- InsertText: nonSnippet.String(), +- // make() should be just below other literal completions +- Score: matchScore * literalCandidateScore * 0.99, +- Kind: protocol.FunctionCompletion, +- AdditionalTextEdits: edits, +- snippet: snip, +- } +-} +- +-// Create a snippet for a type name where type params become placeholders. +-func (c *completer) typeNameSnippet(literalType types.Type, qual types.Qualifier) (*snippet.Builder, string) { +- var ( +- snip snippet.Builder +- typeName string +- tparams *types.TypeParamList +- ) +- +- t, ok := literalType.(typesinternal.NamedOrAlias) // = *Named | *Alias +- if ok { +- tparams = t.TypeParams() +- } +- if tparams.Len() > 0 && !c.fullyInstantiated(t) { +- // tparams.Len() > 0 implies t != nil. +- // Inv: t is not "error" or "unsafe.Pointer", so t.Obj() != nil and has a Pkg(). +- +- // We are not "fully instantiated" meaning we have type params that must be specified. +- if pkg := qual(t.Obj().Pkg()); pkg != "" { +- typeName = pkg + "." +- } +- +- // We do this to get "someType" instead of "someType[T]". +- typeName += t.Obj().Name() +- snip.WriteText(typeName + "[") +- +- if c.opts.placeholders { +- for i := range tparams.Len() { +- if i > 0 { +- snip.WriteText(", ") +- } +- snip.WritePlaceholder(func(snip *snippet.Builder) { +- snip.WriteText(types.TypeString(tparams.At(i), qual)) +- }) +- } +- } else { +- snip.WritePlaceholder(nil) +- } +- snip.WriteText("]") +- typeName += "[...]" +- } else { +- // We don't have unspecified type params so use default type formatting. +- typeName = types.TypeString(literalType, qual) +- snip.WriteText(typeName) +- } +- +- return &snip, typeName +-} +- +-// fullyInstantiated reports whether all of t's type params have +-// specified type args. +-func (c *completer) fullyInstantiated(t typesinternal.NamedOrAlias) bool { +- targs := t.TypeArgs() +- tparams := t.TypeParams() +- +- if tparams.Len() != targs.Len() { +- return false +- } +- +- for i := range targs.Len() { +- targ := targs.At(i) +- +- // The expansion of an alias can have free type parameters, +- // whether or not the alias itself has type parameters: +- // +- // func _[K comparable]() { +- // type Set = map[K]bool // free(Set) = {K} +- // type MapTo[V] = map[K]V // free(Map[foo]) = {V} +- // } +- // +- // So, we must Unalias. +- switch targ := types.Unalias(targ).(type) { +- case *types.TypeParam: +- // A *TypeParam only counts as specified if it is currently in +- // scope (i.e. we are in a generic definition). +- if !c.typeParamInScope(targ) { +- return false +- } +- case *types.Named: +- if !c.fullyInstantiated(targ) { +- return false +- } +- } +- } +- return true +-} +- +-// typeParamInScope returns whether tp's object is in scope at c.pos. +-// This tells you whether you are in a generic definition and can +-// assume tp has been specified. +-func (c *completer) typeParamInScope(tp *types.TypeParam) bool { +- obj := tp.Obj() +- if obj == nil { +- return false +- } +- +- scope := c.innermostScope() +- if scope == nil { +- return false +- } +- +- _, foundObj := scope.LookupParent(obj.Name(), c.pos) +- return obj == foundObj +-} +diff -urN a/gopls/internal/golang/completion/newfile.go b/gopls/internal/golang/completion/newfile.go +--- a/gopls/internal/golang/completion/newfile.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/newfile.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,72 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "bytes" +- "context" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// NewFile returns a document change to complete an empty Go source file. Document change may be nil. +-func NewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (*protocol.DocumentChange, error) { +- if !snapshot.Options().NewGoFileHeader { +- return nil, nil +- } +- content, err := fh.Content() +- if err != nil { +- return nil, err +- } +- if len(content) != 0 { +- return nil, fmt.Errorf("file is not empty") +- } +- meta, err := snapshot.NarrowestMetadataForFile(ctx, fh.URI()) +- if err != nil { +- return nil, err +- } +- var buf bytes.Buffer +- // Copy the copyright header from the first existing file that has one. +- for _, fileURI := range meta.GoFiles { +- if fileURI == fh.URI() { +- continue +- } +- fh, err := snapshot.ReadFile(ctx, fileURI) +- if err != nil { +- continue +- } +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Header) +- if err != nil { +- continue +- } +- if group := golang.CopyrightComment(pgf.File); group != nil { +- text, err := pgf.NodeText(group) +- if err != nil { +- continue +- } +- buf.Write(text) +- buf.WriteString("\n\n") +- break +- } +- } +- +- pkgName, err := bestPackage(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- +- fmt.Fprintf(&buf, "package %s\n", pkgName) +- change := protocol.DocumentChangeEdit(fh, []protocol.TextEdit{{ +- Range: protocol.Range{}, // insert at start of file +- NewText: buf.String(), +- }}) +- +- return &change, nil +-} +diff -urN a/gopls/internal/golang/completion/package.go b/gopls/internal/golang/completion/package.go +--- a/gopls/internal/golang/completion/package.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/package.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,368 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "bytes" +- "context" +- "errors" +- "fmt" +- "go/ast" +- "go/parser" +- "go/scanner" +- "go/token" +- "go/types" +- "path/filepath" +- "sort" +- "strings" +- "unicode" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/fuzzy" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/safetoken" +-) +- +-// bestPackage offers the best package name for a package declaration when +-// one is not present in the given file. +-func bestPackage(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (string, error) { +- suggestions, err := packageSuggestions(ctx, snapshot, uri, "") +- if err != nil { +- return "", err +- } +- // sort with the same way of sortItems. +- sort.SliceStable(suggestions, func(i, j int) bool { +- if suggestions[i].score != suggestions[j].score { +- return suggestions[i].score > suggestions[j].score +- } +- return suggestions[i].name < suggestions[j].name +- }) +- +- return suggestions[0].name, nil +-} +- +-// packageClauseCompletions offers completions for a package declaration when +-// one is not present in the given file. +-func packageClauseCompletions(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]CompletionItem, *Selection, error) { +- // We know that the AST for this file will be empty due to the missing +- // package declaration, but parse it anyway to get a mapper. +- // TODO(adonovan): opt: there's no need to parse just to get a mapper. +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, nil, err +- } +- +- offset, err := pgf.Mapper.PositionOffset(position) +- if err != nil { +- return nil, nil, err +- } +- surrounding, err := packageCompletionSurrounding(pgf, offset) +- if err != nil { +- return nil, nil, fmt.Errorf("invalid position for package completion: %w", err) +- } +- +- packageSuggestions, err := packageSuggestions(ctx, snapshot, fh.URI(), "") +- if err != nil { +- return nil, nil, err +- } +- +- var items []CompletionItem +- for _, pkg := range packageSuggestions { +- insertText := fmt.Sprintf("package %s", pkg.name) +- items = append(items, CompletionItem{ +- Label: insertText, +- Kind: protocol.ModuleCompletion, +- InsertText: insertText, +- Score: pkg.score, +- }) +- } +- sortItems(items) +- return items, surrounding, nil +-} +- +-// packageCompletionSurrounding returns surrounding for package completion if a +-// package completions can be suggested at a given cursor offset. A valid location +-// for package completion is above any declarations or import statements. +-func packageCompletionSurrounding(pgf *parsego.File, offset int) (*Selection, error) { +- m := pgf.Mapper +- // If the file lacks a package declaration, the parser will return an empty +- // AST. As a work-around, try to parse an expression from the file contents. +- fset := token.NewFileSet() +- expr, _ := parser.ParseExprFrom(fset, m.URI.Path(), pgf.Src, parser.Mode(0)) +- if expr == nil { +- return nil, fmt.Errorf("unparsable file (%s)", m.URI) +- } +- tok := fset.File(expr.Pos()) +- cursor := tok.Pos(offset) +- +- // If we were able to parse out an identifier as the first expression from +- // the file, it may be the beginning of a package declaration ("pack "). +- // We can offer package completions if the cursor is in the identifier. +- if name, ok := expr.(*ast.Ident); ok { +- if cursor >= name.Pos() && cursor <= name.End() { +- if !strings.HasPrefix(PACKAGE, name.Name) { +- return nil, fmt.Errorf("cursor in non-matching ident") +- } +- return &Selection{ +- content: name.Name, +- cursor: cursor, +- tokFile: tok, +- start: name.Pos(), +- end: name.End(), +- mapper: m, +- }, nil +- } +- } +- +- // The file is invalid, but it contains an expression that we were able to +- // parse. We will use this expression to construct the cursor's +- // "surrounding". +- +- // First, consider the possibility that we have a valid "package" keyword +- // with an empty package name ("package "). "package" is parsed as an +- // *ast.BadDecl since it is a keyword. +- start, err := safetoken.Offset(tok, expr.Pos()) +- if err != nil { +- return nil, err +- } +- if offset > start && string(bytes.TrimRight(pgf.Src[start:offset], " ")) == PACKAGE { +- return &Selection{ +- content: string(pgf.Src[start:offset]), +- cursor: cursor, +- tokFile: tok, +- start: expr.Pos(), +- end: cursor, +- mapper: m, +- }, nil +- } +- +- // If the cursor is after the start of the expression, no package +- // declaration will be valid. +- if cursor > expr.Pos() { +- return nil, fmt.Errorf("cursor after expression") +- } +- +- // If the cursor is in a comment, don't offer any completions. +- if cursorInComment(tok, cursor, m.Content) { +- return nil, fmt.Errorf("cursor in comment") +- } +- +- // The surrounding range in this case is the cursor. +- return &Selection{ +- content: "", +- tokFile: tok, +- start: cursor, +- end: cursor, +- cursor: cursor, +- mapper: m, +- }, nil +-} +- +-func cursorInComment(file *token.File, cursor token.Pos, src []byte) bool { +- var s scanner.Scanner +- s.Init(file, src, func(_ token.Position, _ string) {}, scanner.ScanComments) +- for { +- pos, tok, lit := s.Scan() +- if pos <= cursor && cursor <= token.Pos(int(pos)+len(lit)) { +- return tok == token.COMMENT +- } +- if tok == token.EOF { +- break +- } +- } +- return false +-} +- +-// packageNameCompletions returns name completions for a package clause using +-// the current name as prefix. +-func (c *completer) packageNameCompletions(ctx context.Context, fileURI protocol.DocumentURI, name *ast.Ident) error { +- cursor := int(c.pos - name.NamePos) +- if cursor < 0 || cursor > len(name.Name) { +- return errors.New("cursor is not in package name identifier") +- } +- +- c.completionContext.packageCompletion = true +- +- prefix := name.Name[:cursor] +- packageSuggestions, err := packageSuggestions(ctx, c.snapshot, fileURI, prefix) +- if err != nil { +- return err +- } +- +- for _, pkg := range packageSuggestions { +- c.deepState.enqueue(pkg) +- } +- return nil +-} +- +-// packageSuggestions returns a list of packages from workspace packages that +-// have the given prefix and are used in the same directory as the given +-// file. This also includes test packages for these packages (_test) and +-// the directory name itself. +-func packageSuggestions(ctx context.Context, snapshot *cache.Snapshot, fileURI protocol.DocumentURI, prefix string) (packages []candidate, err error) { +- active, err := snapshot.WorkspaceMetadata(ctx) +- if err != nil { +- return nil, err +- } +- +- toCandidate := func(name string, score float64) candidate { +- obj := types.NewPkgName(0, nil, name, types.NewPackage("", name)) +- return candidate{obj: obj, name: name, detail: name, score: score} +- } +- +- var currentPackageName string +- // TODO: consider propagating error. +- if md, err := snapshot.NarrowestMetadataForFile(ctx, fileURI); err == nil { +- currentPackageName = string(md.Name) +- } +- +- matcher := fuzzy.NewMatcher(prefix) +- +- // Always try to suggest a main package +- defer func() { +- mainScore := lowScore +- if currentPackageName == "main" { +- mainScore = highScore +- } +- if score := float64(matcher.Score("main")); score > 0 { +- packages = append(packages, toCandidate("main", score*mainScore)) +- } +- }() +- +- dirPath := fileURI.DirPath() +- dirName := filepath.Base(dirPath) +- if !isValidDirName(dirName) { +- return packages, nil +- } +- pkgName := convertDirNameToPkgName(dirName) +- +- seenPkgs := make(map[golang.PackageName]struct{}) +- +- // The `go` command by default only allows one package per directory but we +- // support multiple package suggestions since gopls is build system agnostic. +- for _, mp := range active { +- if mp.Name == "main" || mp.Name == "" { +- continue +- } +- if _, ok := seenPkgs[mp.Name]; ok { +- continue +- } +- +- // Only add packages that are previously used in the current directory. +- var relevantPkg bool +- for _, uri := range mp.CompiledGoFiles { +- if uri.DirPath() == dirPath { +- relevantPkg = true +- break +- } +- } +- if !relevantPkg { +- continue +- } +- +- // Add a found package used in current directory as a high relevance +- // suggestion and the test package for it as a medium relevance +- // suggestion. +- if score := float64(matcher.Score(string(mp.Name))); score > 0 { +- packages = append(packages, toCandidate(string(mp.Name), score*highScore)) +- } +- seenPkgs[mp.Name] = struct{}{} +- +- testPkgName := mp.Name + "_test" +- if _, ok := seenPkgs[testPkgName]; ok || strings.HasSuffix(string(mp.Name), "_test") { +- continue +- } +- if score := float64(matcher.Score(string(testPkgName))); score > 0 { +- packages = append(packages, toCandidate(string(testPkgName), score*stdScore)) +- } +- seenPkgs[testPkgName] = struct{}{} +- } +- +- if _, ok := seenPkgs[pkgName]; !ok { +- // Add current directory name as a low relevance suggestion. +- dirNameScore := lowScore +- // if current package name is empty, the dir name is the best choice. +- if currentPackageName == "" { +- dirNameScore = highScore +- } +- if score := float64(matcher.Score(string(pkgName))); score > 0 { +- packages = append(packages, toCandidate(string(pkgName), score*dirNameScore)) +- } +- +- testPkgName := pkgName + "_test" +- if score := float64(matcher.Score(string(testPkgName))); score > 0 { +- packages = append(packages, toCandidate(string(testPkgName), score*dirNameScore)) +- } +- } +- +- return packages, nil +-} +- +-// isValidDirName checks whether the passed directory name can be used in +-// a package path. Requirements for a package path can be found here: +-// https://golang.org/ref/mod#go-mod-file-ident. +-func isValidDirName(dirName string) bool { +- if dirName == "" { +- return false +- } +- +- for i, ch := range dirName { +- if isLetter(ch) || isDigit(ch) { +- continue +- } +- if i == 0 { +- // Directory name can start only with '_'. '.' is not allowed in module paths. +- // '-' and '~' are not allowed because elements of package paths must be +- // safe command-line arguments. +- if ch == '_' { +- continue +- } +- } else { +- // Modules path elements can't end with '.' +- if isAllowedPunctuation(ch) && (i != len(dirName)-1 || ch != '.') { +- continue +- } +- } +- +- return false +- } +- return true +-} +- +-// convertDirNameToPkgName converts a valid directory name to a valid package name. +-// It leaves only letters and digits. All letters are mapped to lower case. +-func convertDirNameToPkgName(dirName string) golang.PackageName { +- var buf bytes.Buffer +- for _, ch := range dirName { +- switch { +- case isLetter(ch): +- buf.WriteRune(unicode.ToLower(ch)) +- +- case buf.Len() != 0 && isDigit(ch): +- buf.WriteRune(ch) +- } +- } +- return golang.PackageName(buf.String()) +-} +- +-// isLetter and isDigit allow only ASCII characters because +-// "Each path element is a non-empty string made of up ASCII letters, +-// ASCII digits, and limited ASCII punctuation" +-// (see https://golang.org/ref/mod#go-mod-file-ident). +- +-func isLetter(ch rune) bool { +- return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' +-} +- +-func isDigit(ch rune) bool { +- return '0' <= ch && ch <= '9' +-} +- +-func isAllowedPunctuation(ch rune) bool { +- return ch == '_' || ch == '-' || ch == '~' || ch == '.' +-} +diff -urN a/gopls/internal/golang/completion/package_test.go b/gopls/internal/golang/completion/package_test.go +--- a/gopls/internal/golang/completion/package_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/package_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,81 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/golang" +-) +- +-func TestIsValidDirName(t *testing.T) { +- tests := []struct { +- dirName string +- valid bool +- }{ +- {dirName: "", valid: false}, +- // +- {dirName: "a", valid: true}, +- {dirName: "abcdef", valid: true}, +- {dirName: "AbCdEf", valid: true}, +- // +- {dirName: "1a35", valid: true}, +- {dirName: "a16", valid: true}, +- // +- {dirName: "_a", valid: true}, +- {dirName: "a_", valid: true}, +- // +- {dirName: "~a", valid: false}, +- {dirName: "a~", valid: true}, +- // +- {dirName: "-a", valid: false}, +- {dirName: "a-", valid: true}, +- // +- {dirName: ".a", valid: false}, +- {dirName: "a.", valid: false}, +- // +- {dirName: "a~_b--c.-e", valid: true}, +- {dirName: "~a~_b--c.-e", valid: false}, +- {dirName: "a~_b--c.-e--~", valid: true}, +- {dirName: "a~_b--2134dc42.-e6--~", valid: true}, +- {dirName: "abc`def", valid: false}, +- {dirName: "тест", valid: false}, +- {dirName: "你好", valid: false}, +- } +- for _, tt := range tests { +- valid := isValidDirName(tt.dirName) +- if tt.valid != valid { +- t.Errorf("%s: expected %v, got %v", tt.dirName, tt.valid, valid) +- } +- } +-} +- +-func TestConvertDirNameToPkgName(t *testing.T) { +- tests := []struct { +- dirName string +- pkgName golang.PackageName +- }{ +- {dirName: "a", pkgName: "a"}, +- {dirName: "abcdef", pkgName: "abcdef"}, +- {dirName: "AbCdEf", pkgName: "abcdef"}, +- {dirName: "1a35", pkgName: "a35"}, +- {dirName: "14a35", pkgName: "a35"}, +- {dirName: "a16", pkgName: "a16"}, +- {dirName: "_a", pkgName: "a"}, +- {dirName: "a_", pkgName: "a"}, +- {dirName: "a~", pkgName: "a"}, +- {dirName: "a-", pkgName: "a"}, +- {dirName: "a~_b--c.-e", pkgName: "abce"}, +- {dirName: "a~_b--c.-e--~", pkgName: "abce"}, +- {dirName: "a~_b--2134dc42.-e6--~", pkgName: "ab2134dc42e6"}, +- } +- for _, tt := range tests { +- pkgName := convertDirNameToPkgName(tt.dirName) +- if tt.pkgName != pkgName { +- t.Errorf("%s: expected %v, got %v", tt.dirName, tt.pkgName, pkgName) +- continue +- } +- } +-} +diff -urN a/gopls/internal/golang/completion/postfix_snippets.go b/gopls/internal/golang/completion/postfix_snippets.go +--- a/gopls/internal/golang/completion/postfix_snippets.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/postfix_snippets.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,704 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "context" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "log" +- "reflect" +- "strings" +- "sync" +- "text/template" +- +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/golang/completion/snippet" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/imports" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// Postfix snippets are artificial methods that allow the user to +-// compose common operations in an "argument oriented" fashion. For +-// example, instead of "sort.Slice(someSlice, ...)" a user can expand +-// "someSlice.sort!". +- +-// postfixTmpl represents a postfix snippet completion candidate. +-type postfixTmpl struct { +- // label is the completion candidate's label presented to the user. +- label string +- +- // details is passed along to the client as the candidate's details. +- details string +- +- // body is the template text. See postfixTmplArgs for details on the +- // facilities available to the template. +- body string +- +- tmpl *template.Template +-} +- +-// postfixTmplArgs are the template execution arguments available to +-// the postfix snippet templates. +-type postfixTmplArgs struct { +- // StmtOK is true if it is valid to replace the selector with a +- // statement. For example: +- // +- // func foo() { +- // bar.sort! // statement okay +- // +- // someMethod(bar.sort!) // statement not okay +- // } +- StmtOK bool +- +- // X is the textual SelectorExpr.X. For example, when completing +- // "foo.bar.print!", "X" is "foo.bar". +- X string +- +- // Obj is the types.Object of SelectorExpr.X, if any. +- Obj types.Object +- +- // Type is the type of "foo.bar" in "foo.bar.print!". +- Type types.Type +- +- // FuncResults are results of the enclosed function +- FuncResults []*types.Var +- +- sel *ast.SelectorExpr +- scope *types.Scope +- snip snippet.Builder +- importIfNeeded func(pkgPath string, scope *types.Scope) (name string, edits []protocol.TextEdit, err error) +- edits []protocol.TextEdit +- qual types.Qualifier +- varNames map[string]bool +- placeholders bool +- currentTabStop int +-} +- +-var postfixTmpls = []postfixTmpl{{ +- label: "sort", +- details: "sort.Slice()", +- body: `{{if and (eq .Kind "slice") .StmtOK -}} +-{{.Import "sort"}}.Slice({{.X}}, func({{.VarName nil "i"}}, {{.VarName nil "j"}} int) bool { +- {{.Cursor}} +-}) +-{{- end}}`, +-}, { +- label: "last", +- details: "s[len(s)-1]", +- body: `{{if and (eq .Kind "slice") .Obj -}} +-{{.X}}[len({{.X}})-1] +-{{- end}}`, +-}, { +- label: "reverse", +- details: "reverse slice", +- body: `{{if and (eq .Kind "slice") .StmtOK -}} +-{{.Import "slices"}}.Reverse({{.X}}) +-{{- end}}`, +-}, { +- label: "range", +- details: "range over slice", +- body: `{{if and (eq .Kind "slice") .StmtOK -}} +-for {{.VarName nil "i" | .Placeholder }}, {{.VarName .ElemType "v" | .Placeholder}} := range {{.X}} { +- {{.Cursor}} +-} +-{{- end}}`, +-}, { +- label: "for", +- details: "range over slice by index", +- body: `{{if and (eq .Kind "slice") .StmtOK -}} +-for {{ .VarName nil "i" | .Placeholder }} := range {{.X}} { +- {{.Cursor}} +-} +-{{- end}}`, +-}, { +- label: "forr", +- details: "range over slice by index and value", +- body: `{{if and (eq .Kind "slice") .StmtOK -}} +-for {{.VarName nil "i" | .Placeholder }}, {{.VarName .ElemType "v" | .Placeholder }} := range {{.X}} { +- {{.Cursor}} +-} +-{{- end}}`, +-}, { +- label: "append", +- details: "append and re-assign slice", +- body: `{{if and (eq .Kind "slice") .StmtOK .Obj -}} +-{{.X}} = append({{.X}}, {{.Cursor}}) +-{{- end}}`, +-}, { +- label: "append", +- details: "append to slice", +- body: `{{if and (eq .Kind "slice") (not .StmtOK) -}} +-append({{.X}}, {{.Cursor}}) +-{{- end}}`, +-}, { +- label: "copy", +- details: "duplicate slice", +- body: `{{if and (eq .Kind "slice") .StmtOK .Obj -}} +-{{$v := (.VarName nil (printf "%sCopy" .X))}}{{$v}} := make([]{{.TypeName .ElemType}}, len({{.X}})) +-copy({{$v}}, {{.X}}) +-{{end}}`, +-}, { +- label: "range", +- details: "range over map", +- body: `{{if and (eq .Kind "map") .StmtOK -}} +-for {{.VarName .KeyType "k" | .Placeholder}}, {{.VarName .ElemType "v" | .Placeholder}} := range {{.X}} { +- {{.Cursor}} +-} +-{{- end}}`, +-}, { +- label: "for", +- details: "range over map by key", +- body: `{{if and (eq .Kind "map") .StmtOK -}} +-for {{.VarName .KeyType "k" | .Placeholder}} := range {{.X}} { +- {{.Cursor}} +-} +-{{- end}}`, +-}, { +- label: "forr", +- details: "range over map by key and value", +- body: `{{if and (eq .Kind "map") .StmtOK -}} +-for {{.VarName .KeyType "k" | .Placeholder}}, {{.VarName .ElemType "v" | .Placeholder}} := range {{.X}} { +- {{.Cursor}} +-} +-{{- end}}`, +-}, { +- label: "clear", +- details: "clear map contents", +- body: `{{if and (eq .Kind "map") .StmtOK -}} +-{{$k := (.VarName .KeyType "k")}}for {{$k}} := range {{.X}} { +- delete({{.X}}, {{$k}}) +-} +-{{end}}`, +-}, { +- label: "keys", +- details: "create slice of keys", +- body: `{{if and (eq .Kind "map") .StmtOK -}} +-{{$keysVar := (.VarName nil "keys")}}{{$keysVar}} := make([]{{.TypeName .KeyType}}, 0, len({{.X}})) +-{{$k := (.VarName .KeyType "k")}}for {{$k}} := range {{.X}} { +- {{$keysVar}} = append({{$keysVar}}, {{$k}}) +-} +-{{end}}`, +-}, { +- label: "range", +- details: "range over channel", +- body: `{{if and (eq .Kind "chan") .StmtOK -}} +-for {{.VarName .ElemType "e" | .Placeholder}} := range {{.X}} { +- {{.Cursor}} +-} +-{{- end}}`, +-}, { +- label: "for", +- details: "range over channel", +- body: `{{if and (eq .Kind "chan") .StmtOK -}} +-for {{.VarName .ElemType "e" | .Placeholder}} := range {{.X}} { +- {{.Cursor}} +-} +-{{- end}}`, +-}, { +- label: "var", +- details: "assign to variables", +- body: `{{if and (eq .Kind "tuple") .StmtOK -}} +-{{$a := .}}{{range $i, $v := .Tuple}}{{if $i}}, {{end}}{{$a.VarName $v.Type $v.Name | $a.Placeholder }}{{end}} := {{.X}} +-{{- end}}`, +-}, { +- label: "var", +- details: "assign to variable", +- body: `{{if and (ne .Kind "tuple") .StmtOK -}} +-{{.VarName .Type "" | .Placeholder }} := {{.X}} +-{{- end}}`, +-}, { +- label: "print", +- details: "print to stdout", +- body: `{{if and (ne .Kind "tuple") .StmtOK -}} +-{{.Import "fmt"}}.Printf("{{.EscapeQuotes .X}}: %v\n", {{.X}}) +-{{- end}}`, +-}, { +- label: "print", +- details: "print to stdout", +- body: `{{if and (eq .Kind "tuple") .StmtOK -}} +-{{.Import "fmt"}}.Println({{.X}}) +-{{- end}}`, +-}, { +- label: "split", +- details: "split string", +- body: `{{if (eq (.TypeName .Type) "string") -}} +-{{.Import "strings"}}.Split({{.X}}, "{{.Cursor}}") +-{{- end}}`, +-}, { +- label: "join", +- details: "join string slice", +- body: `{{if and (eq .Kind "slice") (eq (.TypeName .ElemType) "string") -}} +-{{.Import "strings"}}.Join({{.X}}, "{{.Cursor}}") +-{{- end}}`, +-}, { +- label: "ifnotnil", +- details: "if expr != nil", +- body: `{{if and (or (eq .Kind "pointer") (eq .Kind "chan") (eq .Kind "signature") (eq .Kind "interface") (eq .Kind "map") (eq .Kind "slice")) .StmtOK -}} +-if {{.X}} != nil { +- {{.Cursor}} +-} +-{{- end}}`, +-}, { +- label: "len", +- details: "len(s)", +- body: `{{if (eq .Kind "slice" "map" "array" "chan") -}} +-len({{.X}}) +-{{- end}}`, +-}, { +- label: "iferr", +- details: "check error and return", +- body: `{{if and .StmtOK (eq (.TypeName .Type) "error") -}} +-{{- $errName := (or (and .IsIdent .X) "err") -}} +-if {{if not .IsIdent}}err := {{.X}}; {{end}}{{$errName}} != nil { +- return {{$a := .}}{{range $i, $v := .FuncResults}} +- {{- if $i}}, {{end -}} +- {{- if eq ($a.TypeName $v.Type) "error" -}} +- {{$a.Placeholder $errName}} +- {{- else -}} +- {{$a.Zero $v.Type}} +- {{- end -}} +- {{end}} +-} +-{{end}}`, +-}, { +- label: "iferr", +- details: "check error and return", +- body: `{{if and .StmtOK (eq .Kind "tuple") (len .Tuple) (eq (.TypeName .TupleLast.Type) "error") -}} +-{{- $a := . -}} +-if {{range $i, $v := .Tuple}}{{if $i}}, {{end}}{{if and (eq ($a.TypeName $v.Type) "error") (eq (inc $i) (len $a.Tuple))}}err{{else}}_{{end}}{{end}} := {{.X -}} +-; err != nil { +- return {{range $i, $v := .FuncResults}} +- {{- if $i}}, {{end -}} +- {{- if eq ($a.TypeName $v.Type) "error" -}} +- {{$a.Placeholder "err"}} +- {{- else -}} +- {{$a.Zero $v.Type}} +- {{- end -}} +- {{end}} +-} +-{{end}}`, +-}, { +- // variferr snippets use nested placeholders, as described in +- // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#snippet_syntax, +- // so that users can wrap the returned error without modifying the error +- // variable name. +- label: "variferr", +- details: "assign variables and check error", +- body: `{{if and .StmtOK (eq .Kind "tuple") (len .Tuple) (eq (.TypeName .TupleLast.Type) "error") -}} +-{{- $a := . -}} +-{{- $errName := "err" -}} +-{{- range $i, $v := .Tuple -}} +- {{- if $i}}, {{end -}} +- {{- if and (eq ($a.TypeName $v.Type) "error") (eq (inc $i) (len $a.Tuple)) -}} +- {{$errName | $a.SpecifiedPlaceholder (len $a.Tuple)}} +- {{- else -}} +- {{$a.VarName $v.Type $v.Name | $a.Placeholder}} +- {{- end -}} +-{{- end}} := {{.X}} +-if {{$errName | $a.SpecifiedPlaceholder (len $a.Tuple)}} != nil { +- return {{range $i, $v := .FuncResults}} +- {{- if $i}}, {{end -}} +- {{- if eq ($a.TypeName $v.Type) "error" -}} +- {{$errName | $a.SpecifiedPlaceholder (len $a.Tuple) | +- $a.SpecifiedPlaceholder (inc (len $a.Tuple))}} +- {{- else -}} +- {{$a.Zero $v.Type}} +- {{- end -}} +- {{end}} +-} +-{{end}}`, +-}, { +- label: "variferr", +- details: "assign variables and check error", +- body: `{{if and .StmtOK (eq (.TypeName .Type) "error") -}} +-{{- $a := . -}} +-{{- $errName := .VarName nil "err" -}} +-{{$errName | $a.SpecifiedPlaceholder 1}} := {{.X}} +-if {{$errName | $a.SpecifiedPlaceholder 1}} != nil { +- return {{range $i, $v := .FuncResults}} +- {{- if $i}}, {{end -}} +- {{- if eq ($a.TypeName $v.Type) "error" -}} +- {{$errName | $a.SpecifiedPlaceholder 1 | $a.SpecifiedPlaceholder 2}} +- {{- else -}} +- {{$a.Zero $v.Type}} +- {{- end -}} +- {{end}} +-} +-{{end}}`, +-}, +- { +- label: "tostring", +- details: "[]byte to string", +- body: `{{if (eq (.TypeName .Type) "[]byte") -}} +- string({{.X}}) +- {{- end}}`, +- }, +- { +- label: "tostring", +- details: "int to string", +- body: `{{if (eq (.TypeName .Type) "int") -}} +- {{.Import "strconv"}}.Itoa({{.X}}) +- {{- end}}`, +- }, +- { +- label: "tobytes", +- details: "string to []byte", +- body: `{{if (eq (.TypeName .Type) "string") -}} +- []byte({{.X}}) +- {{- end}}`, +- }, +-} +- +-// Cursor indicates where the client's cursor should end up after the +-// snippet is done. +-func (a *postfixTmplArgs) Cursor() string { +- return "$0" +-} +- +-// Placeholder indicate a tab stop with the placeholder string, the order +-// of tab stops is the same as the order of invocation +-func (a *postfixTmplArgs) Placeholder(placeholder string) string { +- if !a.placeholders { +- placeholder = "" +- } +- return fmt.Sprintf("${%d:%s}", a.nextTabStop(), placeholder) +-} +- +-// nextTabStop returns the next tab stop index for a new placeholder. +-func (a *postfixTmplArgs) nextTabStop() int { +- // Tab stops start from 1, so increment before returning. +- a.currentTabStop++ +- return a.currentTabStop +-} +- +-// SpecifiedPlaceholder indicate a specified tab stop with the placeholder string. +-// Sometimes the same tab stop appears in multiple places and their numbers +-// need to be specified. e.g. variferr +-func (a *postfixTmplArgs) SpecifiedPlaceholder(tabStop int, placeholder string) string { +- if !a.placeholders { +- placeholder = "" +- } +- return fmt.Sprintf("${%d:%s}", tabStop, placeholder) +-} +- +-// Import makes sure the package corresponding to path is imported, +-// returning the identifier to use to refer to the package. +-func (a *postfixTmplArgs) Import(path string) (string, error) { +- name, edits, err := a.importIfNeeded(path, a.scope) +- if err != nil { +- return "", fmt.Errorf("couldn't import %q: %w", path, err) +- } +- a.edits = append(a.edits, edits...) +- +- return name, nil +-} +- +-func (a *postfixTmplArgs) EscapeQuotes(v string) string { +- return strings.ReplaceAll(v, `"`, `\\"`) +-} +- +-// ElemType returns the Elem() type of xType, if applicable. +-func (a *postfixTmplArgs) ElemType() types.Type { +- type hasElem interface{ Elem() types.Type } // Array, Chan, Map, Pointer, Slice +- if e, ok := a.Type.Underlying().(hasElem); ok { +- return e.Elem() +- } +- return nil +-} +- +-// Kind returns the underlying kind of type, e.g. "slice", "struct", +-// etc. +-func (a *postfixTmplArgs) Kind() string { +- t := reflect.TypeOf(a.Type.Underlying()) +- return strings.ToLower(strings.TrimPrefix(t.String(), "*types.")) +-} +- +-// KeyType returns the type of X's key. KeyType panics if X is not a +-// map. +-func (a *postfixTmplArgs) KeyType() types.Type { +- return a.Type.Underlying().(*types.Map).Key() +-} +- +-// Tuple returns the tuple result vars if the type of X is tuple. +-func (a *postfixTmplArgs) Tuple() []*types.Var { +- tuple, _ := a.Type.(*types.Tuple) +- if tuple == nil { +- return nil +- } +- +- typs := make([]*types.Var, 0, tuple.Len()) +- for i := range tuple.Len() { +- typs = append(typs, tuple.At(i)) +- } +- return typs +-} +- +-// TupleLast returns the last tuple result vars if the type of X is tuple. +-func (a *postfixTmplArgs) TupleLast() *types.Var { +- tuple, _ := a.Type.(*types.Tuple) +- if tuple == nil { +- return nil +- } +- if tuple.Len() == 0 { +- return nil +- } +- return tuple.At(tuple.Len() - 1) +-} +- +-// TypeName returns the textual representation of type t. +-func (a *postfixTmplArgs) TypeName(t types.Type) (string, error) { +- if t == nil || t == types.Typ[types.Invalid] { +- return "", fmt.Errorf("invalid type: %v", t) +- } +- return types.TypeString(t, a.qual), nil +-} +- +-// Zero return the zero value representation of type t +-func (a *postfixTmplArgs) Zero(t types.Type) string { +- zero, _ := typesinternal.ZeroString(t, a.qual) +- return zero +-} +- +-func (a *postfixTmplArgs) IsIdent() bool { +- _, ok := a.sel.X.(*ast.Ident) +- return ok +-} +- +-// VarName returns a suitable variable name for the type t. If t +-// implements the error interface, "err" is used. If t is not a named +-// type then nonNamedDefault is used. Otherwise a name is made by +-// abbreviating the type name. If the resultant name is already in +-// scope, an integer is appended to make a unique name. +-func (a *postfixTmplArgs) VarName(t types.Type, nonNamedDefault string) string { +- if t == nil { +- t = types.Typ[types.Invalid] +- } +- +- var name string +- // go/types predicates are undefined on types.Typ[types.Invalid]. +- if !types.Identical(t, types.Typ[types.Invalid]) && types.Implements(t, errorIntf) { +- name = "err" +- } else if !is[*types.Named](types.Unalias(typesinternal.Unpointer(t))) { +- name = nonNamedDefault +- } +- +- if name == "" { +- name = types.TypeString(t, func(p *types.Package) string { +- return "" +- }) +- name = abbreviateTypeName(name) +- } +- +- if dot := strings.LastIndex(name, "."); dot > -1 { +- name = name[dot+1:] +- } +- +- uniqueName := name +- for i := 2; ; i++ { +- if s, _ := a.scope.LookupParent(uniqueName, token.NoPos); s == nil && !a.varNames[uniqueName] { +- break +- } +- uniqueName = fmt.Sprintf("%s%d", name, i) +- } +- +- a.varNames[uniqueName] = true +- +- return uniqueName +-} +- +-func (c *completer) addPostfixSnippetCandidates(ctx context.Context, sel *ast.SelectorExpr) { +- if !c.opts.postfix { +- return +- } +- +- initPostfixRules() +- +- if sel == nil || sel.Sel == nil { +- return +- } +- +- selType := c.pkg.TypesInfo().TypeOf(sel.X) +- if selType == nil { +- return +- } +- +- // Skip empty tuples since there is no value to operate on. +- if tuple, ok := selType.(*types.Tuple); ok && tuple == nil { +- return +- } +- +- tokFile := c.pkg.FileSet().File(c.pos) +- +- // Only replace sel with a statement if sel is already a statement. +- var stmtOK bool +- for i, n := range c.path { +- if n == sel && i < len(c.path)-1 { +- switch p := c.path[i+1].(type) { +- case *ast.ExprStmt: +- stmtOK = true +- case *ast.AssignStmt: +- // In cases like: +- // +- // foo.<> +- // bar = 123 +- // +- // detect that "foo." makes up the entire statement since the +- // apparent selector spans lines. +- stmtOK = safetoken.Line(tokFile, c.pos) < safetoken.Line(tokFile, p.TokPos) +- } +- break +- } +- } +- +- var funcResults []*types.Var +- if c.enclosingFunc != nil { +- results := c.enclosingFunc.sig.Results() +- if results != nil { +- funcResults = make([]*types.Var, results.Len()) +- for i := range results.Len() { +- funcResults[i] = results.At(i) +- } +- } +- } +- +- scope := c.pkg.Types().Scope().Innermost(c.pos) +- if scope == nil { +- return +- } +- +- // afterDot is the position after selector dot, e.g. "|" in +- // "foo.|print". +- afterDot := sel.Sel.Pos() +- +- // We must detect dangling selectors such as: +- // +- // foo.<> +- // bar +- // +- // and adjust afterDot so that we don't mistakenly delete the +- // newline thinking "bar" is part of our selector. +- if startLine := safetoken.Line(tokFile, sel.Pos()); startLine != safetoken.Line(tokFile, afterDot) { +- if safetoken.Line(tokFile, c.pos) != startLine { +- return +- } +- afterDot = c.pos +- } +- +- for _, rule := range postfixTmpls { +- // When completing foo.print<>, "print" is naturally overwritten, +- // but we need to also remove "foo." so the snippet has a clean +- // slate. +- edits, err := c.editText(sel.Pos(), afterDot, "") +- if err != nil { +- event.Error(ctx, "error calculating postfix edits", err) +- return +- } +- +- tmplArgs := postfixTmplArgs{ +- X: golang.FormatNode(c.pkg.FileSet(), sel.X), +- StmtOK: stmtOK, +- Obj: exprObj(c.pkg.TypesInfo(), sel.X), +- Type: selType, +- FuncResults: funcResults, +- sel: sel, +- qual: c.qual, +- importIfNeeded: c.importIfNeeded, +- scope: scope, +- varNames: make(map[string]bool), +- placeholders: c.opts.placeholders, +- } +- +- // Feed the template straight into the snippet builder. This +- // allows templates to build snippets as they are executed. +- err = rule.tmpl.Execute(&tmplArgs.snip, &tmplArgs) +- if err != nil { +- event.Error(ctx, "error executing postfix template", err) +- continue +- } +- +- if strings.TrimSpace(tmplArgs.snip.String()) == "" { +- continue +- } +- +- score := c.matcher.Score(rule.label) +- if score <= 0 { +- continue +- } +- +- c.items = append(c.items, CompletionItem{ +- Label: rule.label + "!", +- Detail: rule.details, +- Score: float64(score) * 0.01, +- Kind: protocol.SnippetCompletion, +- snippet: &tmplArgs.snip, +- AdditionalTextEdits: append(edits, tmplArgs.edits...), +- }) +- } +-} +- +-var postfixRulesOnce sync.Once +- +-func initPostfixRules() { +- postfixRulesOnce.Do(func() { +- var idx int +- for _, rule := range postfixTmpls { +- var err error +- rule.tmpl, err = template.New("postfix_snippet").Funcs(template.FuncMap{ +- "inc": inc, +- }).Parse(rule.body) +- if err != nil { +- log.Panicf("error parsing postfix snippet template: %v", err) +- } +- postfixTmpls[idx] = rule +- idx++ +- } +- postfixTmpls = postfixTmpls[:idx] +- }) +-} +- +-func inc(i int) int { +- return i + 1 +-} +- +-// importIfNeeded returns the package identifier and any necessary +-// edits to import package pkgPath. +-func (c *completer) importIfNeeded(pkgPath string, scope *types.Scope) (string, []protocol.TextEdit, error) { +- defaultName := imports.ImportPathToAssumedName(pkgPath) +- +- // Check if file already imports pkgPath. +- for _, s := range c.pgf.File.Imports { +- // TODO(adonovan): what if pkgPath has a vendor/ suffix? +- // This may be the cause of go.dev/issue/56291. +- if string(metadata.UnquoteImportPath(s)) == pkgPath { +- if s.Name == nil { +- return defaultName, nil, nil +- } +- if s.Name.Name != "_" { +- return s.Name.Name, nil, nil +- } +- } +- } +- +- // Give up if the package's name is already in use by another object. +- if _, obj := scope.LookupParent(defaultName, token.NoPos); obj != nil { +- return "", nil, fmt.Errorf("import name %q of %q already in use", defaultName, pkgPath) +- } +- +- edits, err := c.importEdits(&importInfo{ +- importPath: pkgPath, +- }) +- if err != nil { +- return "", nil, err +- } +- +- return defaultName, edits, nil +-} +diff -urN a/gopls/internal/golang/completion/printf.go b/gopls/internal/golang/completion/printf.go +--- a/gopls/internal/golang/completion/printf.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/printf.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,174 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "go/ast" +- "go/constant" +- "go/types" +- "strconv" +- "strings" +- "unicode/utf8" +-) +- +-// printfArgKind returns the expected objKind when completing a +-// printf-like operand. call is the printf-like function call, and +-// argIdx is the index of call.Args being completed. +-func printfArgKind(info *types.Info, call *ast.CallExpr, argIdx int) objKind { +- // Printf-like function name must end in "f". +- fn := exprObj(info, call.Fun) +- if fn == nil || !strings.HasSuffix(fn.Name(), "f") { +- return kindAny +- } +- +- sig, _ := fn.Type().Underlying().(*types.Signature) +- if sig == nil { +- return kindAny +- } +- +- // Must be variadic and take at least two params. +- numParams := sig.Params().Len() +- if !sig.Variadic() || numParams < 2 || argIdx < numParams-1 { +- return kindAny +- } +- +- // Param preceding variadic args must be a (format) string. +- if !types.Identical(sig.Params().At(numParams-2).Type(), types.Typ[types.String]) { +- return kindAny +- } +- +- // Format string must be a constant. +- strArg := info.Types[call.Args[numParams-2]].Value // may be zero +- if strArg == nil || strArg.Kind() != constant.String { +- return kindAny +- } +- +- return formatOperandKind(constant.StringVal(strArg), argIdx-(numParams-1)+1) +-} +- +-// formatOperandKind returns the objKind corresponding to format's +-// operandIdx'th operand. +-func formatOperandKind(format string, operandIdx int) objKind { +- var ( +- prevOperandIdx int +- kind = kindAny +- ) +- for { +- i := strings.Index(format, "%") +- if i == -1 { +- break +- } +- +- var operands []formatOperand +- format, operands = parsePrintfVerb(format[i+1:], prevOperandIdx) +- +- // Check if any this verb's operands correspond to our target +- // operandIdx. +- for _, v := range operands { +- if v.idx == operandIdx { +- if kind == kindAny { +- kind = v.kind +- } else if v.kind != kindAny { +- // If multiple verbs refer to the same operand, take the +- // intersection of their kinds. +- kind &= v.kind +- } +- } +- +- prevOperandIdx = v.idx +- } +- } +- return kind +-} +- +-type formatOperand struct { +- // idx is the one-based printf operand index. +- idx int +- // kind is a mask of expected kinds of objects for this operand. +- kind objKind +-} +- +-// parsePrintfVerb parses the leading printf verb in f. The opening +-// "%" must already be trimmed from f. prevIdx is the previous +-// operand's index, or zero if this is the first verb. The format +-// string is returned with the leading verb removed. Multiple operands +-// can be returned in the case of dynamic widths such as "%*.*f". +-func parsePrintfVerb(f string, prevIdx int) (string, []formatOperand) { +- var verbs []formatOperand +- +- addVerb := func(k objKind) { +- verbs = append(verbs, formatOperand{ +- idx: prevIdx + 1, +- kind: k, +- }) +- prevIdx++ +- } +- +- for len(f) > 0 { +- // Trim first rune off of f so we are guaranteed to make progress. +- r, l := utf8.DecodeRuneInString(f) +- f = f[l:] +- +- // We care about three things: +- // 1. The verb, which maps directly to object kind. +- // 2. Explicit operand indices like "%[2]s". +- // 3. Dynamic widths using "*". +- switch r { +- case '%': +- return f, nil +- case '*': +- addVerb(kindInt) +- continue +- case '[': +- // Parse operand index as in "%[2]s". +- i := strings.Index(f, "]") +- if i == -1 { +- return f, nil +- } +- +- idx, err := strconv.Atoi(f[:i]) +- f = f[i+1:] +- if err != nil { +- return f, nil +- } +- +- prevIdx = idx - 1 +- continue +- case 'v', 'T': +- addVerb(kindAny) +- case 't': +- addVerb(kindBool) +- case 'c', 'd', 'o', 'O', 'U': +- addVerb(kindInt) +- case 'e', 'E', 'f', 'F', 'g', 'G': +- addVerb(kindFloat | kindComplex) +- case 'b': +- addVerb(kindInt | kindFloat | kindComplex | kindBytes) +- case 'q', 's': +- addVerb(kindString | kindBytes | kindStringer | kindError) +- case 'x', 'X': +- // Omit kindStringer and kindError though technically allowed. +- addVerb(kindString | kindBytes | kindInt | kindFloat | kindComplex) +- case 'p': +- // Accept kindInterface even though it doesn't necessarily contain a pointer. +- // This avoids us offering "&foo" when "foo" is an interface type. +- addVerb(kindPtr | kindSlice | kindMap | kindFunc | kindInterface) +- case 'w': +- addVerb(kindError) +- case '+', '-', '#', ' ', '.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': +- // Flag or numeric width/precision value. +- continue +- default: +- // Assume unrecognized rune is a custom fmt.Formatter verb. +- addVerb(kindAny) +- } +- +- if len(verbs) > 0 { +- break +- } +- } +- +- return f, verbs +-} +diff -urN a/gopls/internal/golang/completion/printf_test.go b/gopls/internal/golang/completion/printf_test.go +--- a/gopls/internal/golang/completion/printf_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/printf_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,72 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "fmt" +- "testing" +-) +- +-func TestFormatOperandKind(t *testing.T) { +- cases := []struct { +- f string +- idx int +- kind objKind +- }{ +- {"", 1, kindAny}, +- {"%", 1, kindAny}, +- {"%%%", 1, kindAny}, +- {"%[1", 1, kindAny}, +- {"%[?%s", 2, kindAny}, +- {"%[abc]v", 1, kindAny}, +- +- {"%v", 1, kindAny}, +- {"%T", 1, kindAny}, +- {"%t", 1, kindBool}, +- {"%d", 1, kindInt}, +- {"%c", 1, kindInt}, +- {"%o", 1, kindInt}, +- {"%O", 1, kindInt}, +- {"%U", 1, kindInt}, +- {"%e", 1, kindFloat | kindComplex}, +- {"%E", 1, kindFloat | kindComplex}, +- {"%f", 1, kindFloat | kindComplex}, +- {"%F", 1, kindFloat | kindComplex}, +- {"%g", 1, kindFloat | kindComplex}, +- {"%G", 1, kindFloat | kindComplex}, +- {"%b", 1, kindInt | kindFloat | kindComplex | kindBytes}, +- {"%q", 1, kindString | kindBytes | kindStringer | kindError}, +- {"%s", 1, kindString | kindBytes | kindStringer | kindError}, +- {"%x", 1, kindString | kindBytes | kindInt | kindFloat | kindComplex}, +- {"%X", 1, kindString | kindBytes | kindInt | kindFloat | kindComplex}, +- {"%p", 1, kindPtr | kindSlice | kindMap | kindFunc | kindInterface}, +- {"%w", 1, kindError}, +- +- {"%1.2f", 1, kindFloat | kindComplex}, +- {"%*f", 1, kindInt}, +- {"%*f", 2, kindFloat | kindComplex}, +- {"%*.*f", 1, kindInt}, +- {"%*.*f", 2, kindInt}, +- {"%*.*f", 3, kindFloat | kindComplex}, +- {"%[3]*.[2]*[1]f", 1, kindFloat | kindComplex}, +- {"%[3]*.[2]*[1]f", 2, kindInt}, +- {"%[3]*.[2]*[1]f", 3, kindInt}, +- +- {"foo %% %d", 1, kindInt}, +- {"%#-12.34f", 1, kindFloat | kindComplex}, +- {"% d", 1, kindInt}, +- +- {"%s %[1]X %d", 1, kindString | kindBytes}, +- {"%s %[1]X %d", 2, kindInt}, +- } +- +- for _, c := range cases { +- t.Run(fmt.Sprintf("%q#%d", c.f, c.idx), func(t *testing.T) { +- if got := formatOperandKind(c.f, c.idx); got != c.kind { +- t.Errorf("expected %d (%[1]b), got %d (%[2]b)", c.kind, got) +- } +- }) +- } +-} +diff -urN a/gopls/internal/golang/completion/snippet/snippet_builder.go b/gopls/internal/golang/completion/snippet/snippet_builder.go +--- a/gopls/internal/golang/completion/snippet/snippet_builder.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/snippet/snippet_builder.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,111 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package snippet implements the specification for the LSP snippet format. +-// +-// Snippets are "tab stop" templates returned as an optional attribute of LSP +-// completion candidates. As the user presses tab, they cycle through a series of +-// tab stops defined in the snippet. Each tab stop can optionally have placeholder +-// text, which can be pre-selected by editors. For a full description of syntax +-// and features, see "Snippet Syntax" at +-// https://microsoft.github.io/language-server-protocol/specifications/specification-3-14/#textDocument_completion. +-// +-// A typical snippet looks like "foo(${1:i int}, ${2:s string})". +-package snippet +- +-import ( +- "fmt" +- "strings" +-) +- +-// A Builder is used to build an LSP snippet piecemeal. +-// The zero value is ready to use. Do not copy a non-zero Builder. +-type Builder struct { +- // currentTabStop is the index of the previous tab stop. The +- // next tab stop will be currentTabStop+1. +- currentTabStop int +- sb strings.Builder +-} +- +-// Escape characters defined in https://microsoft.github.io/language-server-protocol/specifications/specification-3-14/#textDocument_completion under "Grammar". +-var replacer = strings.NewReplacer( +- `\`, `\\`, +- `}`, `\}`, +- `$`, `\$`, +-) +- +-func (b *Builder) WriteText(s string) { +- replacer.WriteString(&b.sb, s) +-} +- +-func (b *Builder) PrependText(s string) { +- rawSnip := b.String() +- b.sb.Reset() +- b.WriteText(s) +- b.sb.WriteString(rawSnip) +-} +- +-func (b *Builder) Write(data []byte) (int, error) { +- return b.sb.Write(data) +-} +- +-// WritePlaceholder writes a tab stop and placeholder value to the Builder. +-// The callback style allows for creating nested placeholders. To write an +-// empty tab stop, provide a nil callback. +-func (b *Builder) WritePlaceholder(fn func(*Builder)) { +- fmt.Fprintf(&b.sb, "${%d:", b.nextTabStop()) +- if fn != nil { +- fn(b) +- } +- b.sb.WriteByte('}') +-} +- +-// WriteFinalTabstop marks where cursor ends up after the user has +-// cycled through all the normal tab stops. It defaults to the +-// character after the snippet. +-func (b *Builder) WriteFinalTabstop() { +- fmt.Fprint(&b.sb, "$0") +-} +- +-// In addition to '\', '}', and '$', snippet choices also use '|' and ',' as +-// meta characters, so they must be escaped within the choices. +-var choiceReplacer = strings.NewReplacer( +- `\`, `\\`, +- `}`, `\}`, +- `$`, `\$`, +- `|`, `\|`, +- `,`, `\,`, +-) +- +-// WriteChoice writes a tab stop and list of text choices to the Builder. +-// The user's editor will prompt the user to choose one of the choices. +-func (b *Builder) WriteChoice(choices []string) { +- fmt.Fprintf(&b.sb, "${%d|", b.nextTabStop()) +- for i, c := range choices { +- if i != 0 { +- b.sb.WriteByte(',') +- } +- choiceReplacer.WriteString(&b.sb, c) +- } +- b.sb.WriteString("|}") +-} +- +-// String returns the built snippet string. +-func (b *Builder) String() string { +- return b.sb.String() +-} +- +-// Clone returns a copy of b. +-func (b *Builder) Clone() *Builder { +- var clone Builder +- clone.sb.WriteString(b.String()) +- return &clone +-} +- +-// nextTabStop returns the next tab stop index for a new placeholder. +-func (b *Builder) nextTabStop() int { +- // Tab stops start from 1, so increment before returning. +- b.currentTabStop++ +- return b.currentTabStop +-} +diff -urN a/gopls/internal/golang/completion/snippet/snippet_builder_test.go b/gopls/internal/golang/completion/snippet/snippet_builder_test.go +--- a/gopls/internal/golang/completion/snippet/snippet_builder_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/snippet/snippet_builder_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,62 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package snippet +- +-import ( +- "testing" +-) +- +-func TestSnippetBuilder(t *testing.T) { +- expect := func(expected string, fn func(*Builder)) { +- t.Helper() +- +- var b Builder +- fn(&b) +- if got := b.String(); got != expected { +- t.Errorf("got %q, expected %q", got, expected) +- } +- } +- +- expect("", func(b *Builder) {}) +- +- expect(`hi { \} \$ | " , / \\`, func(b *Builder) { +- b.WriteText(`hi { } $ | " , / \`) +- }) +- +- expect("${1:}", func(b *Builder) { +- b.WritePlaceholder(nil) +- }) +- +- expect("hi ${1:there}", func(b *Builder) { +- b.WriteText("hi ") +- b.WritePlaceholder(func(b *Builder) { +- b.WriteText("there") +- }) +- }) +- +- expect(`${1:id=${2:{your id\}}}`, func(b *Builder) { +- b.WritePlaceholder(func(b *Builder) { +- b.WriteText("id=") +- b.WritePlaceholder(func(b *Builder) { +- b.WriteText("{your id}") +- }) +- }) +- }) +- +- expect(`${1|one,{ \} \$ \| " \, / \\,three|}`, func(b *Builder) { +- b.WriteChoice([]string{"one", `{ } $ | " , / \`, "three"}) +- }) +- +- expect("$0 hello", func(b *Builder) { +- b.WriteFinalTabstop() +- b.WriteText(" hello") +- }) +- +- expect(`prepended \$5 ${1:} hello`, func(b *Builder) { +- b.WritePlaceholder(nil) +- b.WriteText(" hello") +- b.PrependText("prepended $5 ") +- }) +-} +diff -urN a/gopls/internal/golang/completion/snippet.go b/gopls/internal/golang/completion/snippet.go +--- a/gopls/internal/golang/completion/snippet.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/snippet.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,126 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "go/ast" +- +- "golang.org/x/tools/gopls/internal/golang/completion/snippet" +- "golang.org/x/tools/gopls/internal/util/safetoken" +-) +- +-// structFieldSnippet calculates the snippet for struct literal field names. +-func (c *completer) structFieldSnippet(cand candidate, detail string, snip *snippet.Builder) { +- if !wantStructFieldCompletions(c.enclosingCompositeLiteral) { +- return +- } +- +- // If we are in a deep completion then we can't be completing a field +- // name (e.g. "Foo{f<>}" completing to "Foo{f.Bar}" should not generate +- // a snippet). +- if len(cand.path) > 0 { +- return +- } +- +- clInfo := c.enclosingCompositeLiteral +- +- // If we are already in a key-value expression, we don't want a snippet. +- if clInfo.kv != nil { +- return +- } +- +- // A plain snippet turns "Foo{Ba<>" into "Foo{Bar: <>". +- snip.WriteText(": ") +- snip.WritePlaceholder(func(b *snippet.Builder) { +- // A placeholder snippet turns "Foo{Ba<>" into "Foo{Bar: <*int*>". +- if c.opts.placeholders { +- b.WriteText(detail) +- } +- }) +- +- fset := c.pkg.FileSet() +- +- // If the cursor position is on a different line from the literal's opening brace, +- // we are in a multiline literal. Ignore line directives. +- if safetoken.StartPosition(fset, c.pos).Line != safetoken.StartPosition(fset, clInfo.cl.Lbrace).Line { +- snip.WriteText(",") +- } +-} +- +-// functionCallSnippet calculates the snippet for function calls. +-// +-// Callers should omit the suffix of type parameters that are +-// constrained by the argument types, to avoid offering completions +-// that contain instantiations that are redundant because of type +-// inference, such as f[int](1) for func f[T any](x T). +-func (c *completer) functionCallSnippet(name string, tparams, params []string, snip *snippet.Builder) { +- if !c.opts.completeFunctionCalls { +- snip.WriteText(name) +- return +- } +- +- // If there is no suffix then we need to reuse existing call parens +- // "()" if present. If there is an identifier suffix then we always +- // need to include "()" since we don't overwrite the suffix. +- if c.surrounding != nil && c.surrounding.Suffix() == "" && len(c.path) > 1 { +- // If we are the left side (i.e. "Fun") part of a call expression, +- // we don't want a snippet since there are already parens present. +- switch n := c.path[1].(type) { +- case *ast.CallExpr: +- // The Lparen != Rparen check detects fudged CallExprs we +- // inserted when fixing the AST. In this case, we do still need +- // to insert the calling "()" parens. +- if n.Fun == c.path[0] && n.Lparen != n.Rparen { +- return +- } +- case *ast.SelectorExpr: +- if len(c.path) > 2 { +- if call, ok := c.path[2].(*ast.CallExpr); ok && call.Fun == c.path[1] && call.Lparen != call.Rparen { +- return +- } +- } +- } +- } +- +- snip.WriteText(name) +- +- if len(tparams) > 0 { +- snip.WriteText("[") +- if c.opts.placeholders { +- for i, tp := range tparams { +- if i > 0 { +- snip.WriteText(", ") +- } +- snip.WritePlaceholder(func(b *snippet.Builder) { +- b.WriteText(tp) +- }) +- } +- } else { +- snip.WritePlaceholder(nil) +- } +- snip.WriteText("]") +- } +- +- snip.WriteText("(") +- +- if c.opts.placeholders { +- // A placeholder snippet turns "someFun<>" into "someFunc(<*i int*>, *s string*)". +- for i, p := range params { +- if i > 0 { +- snip.WriteText(", ") +- } +- snip.WritePlaceholder(func(b *snippet.Builder) { +- b.WriteText(p) +- }) +- } +- } else { +- // A plain snippet turns "someFun<>" into "someFunc(<>)". +- if len(params) > 0 { +- snip.WritePlaceholder(nil) +- } +- } +- +- snip.WriteText(")") +-} +diff -urN a/gopls/internal/golang/completion/statements.go b/gopls/internal/golang/completion/statements.go +--- a/gopls/internal/golang/completion/statements.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/statements.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,426 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/golang/completion/snippet" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// addStatementCandidates adds full statement completion candidates +-// appropriate for the current context. +-func (c *completer) addStatementCandidates() { +- c.addErrCheck() +- c.addAssignAppend() +- c.addReturnZeroValues() +-} +- +-// addAssignAppend offers a completion candidate of the form: +-// +-// someSlice = append(someSlice, ) +-// +-// It will offer the "append" completion in either of two situations: +-// +-// 1. Position is in RHS of assign, prefix matches "append", and +-// corresponding LHS object is a slice. For example, +-// "foo = ap<>" completes to "foo = append(foo, )". +-// +-// 2. Prefix is an ident or selector in an *ast.ExprStmt (i.e. +-// beginning of statement), and our best matching candidate is a +-// slice. For example: "foo.ba" completes to "foo.bar = append(foo.bar, )". +-func (c *completer) addAssignAppend() { +- if len(c.path) < 3 { +- return +- } +- +- ident, _ := c.path[0].(*ast.Ident) +- if ident == nil { +- return +- } +- +- var ( +- // sliceText is the full name of our slice object, e.g. "s.abc" in +- // "s.abc = app<>". +- sliceText string +- // needsLHS is true if we need to prepend the LHS slice name and +- // "=" to our candidate. +- needsLHS = false +- fset = c.pkg.FileSet() +- ) +- +- switch n := c.path[1].(type) { +- case *ast.AssignStmt: +- // We are already in an assignment. Make sure our prefix matches "append". +- if c.matcher.Score("append") <= 0 { +- return +- } +- +- exprIdx := exprAtPos(c.pos, n.Rhs) +- if exprIdx == len(n.Rhs) || exprIdx > len(n.Lhs)-1 { +- return +- } +- +- lhsType := c.pkg.TypesInfo().TypeOf(n.Lhs[exprIdx]) +- if lhsType == nil { +- return +- } +- +- // Make sure our corresponding LHS object is a slice. +- if _, isSlice := lhsType.Underlying().(*types.Slice); !isSlice { +- return +- } +- +- // The name or our slice is whatever's in the LHS expression. +- sliceText = golang.FormatNode(fset, n.Lhs[exprIdx]) +- case *ast.SelectorExpr: +- // Make sure we are a selector at the beginning of a statement. +- if _, parentIsExprtStmt := c.path[2].(*ast.ExprStmt); !parentIsExprtStmt { +- return +- } +- +- // So far we only know the first part of our slice name. For +- // example in "s.a<>" we only know our slice begins with "s." +- // since the user could still be typing. +- sliceText = golang.FormatNode(fset, n.X) + "." +- needsLHS = true +- case *ast.ExprStmt: +- needsLHS = true +- default: +- return +- } +- +- var ( +- label string +- snip snippet.Builder +- score = highScore +- ) +- +- if needsLHS { +- // Offer the long form assign + append candidate if our best +- // candidate is a slice. +- bestItem := c.topCandidate() +- if bestItem == nil || !bestItem.isSlice { +- return +- } +- +- // Don't rank the full form assign + append candidate above the +- // slice itself. +- score = bestItem.Score - 0.01 +- +- // Fill in rest of sliceText now that we have the object name. +- sliceText += bestItem.Label +- +- // Fill in the candidate's LHS bits. +- label = fmt.Sprintf("%s = ", bestItem.Label) +- snip.WriteText(label) +- } +- +- snip.WriteText(fmt.Sprintf("append(%s, ", sliceText)) +- snip.WritePlaceholder(nil) +- snip.WriteText(")") +- +- c.items = append(c.items, CompletionItem{ +- Label: label + fmt.Sprintf("append(%s, )", sliceText), +- Kind: protocol.FunctionCompletion, +- Score: score, +- snippet: &snip, +- }) +-} +- +-// topCandidate returns the strictly highest scoring candidate +-// collected so far. If the top two candidates have the same score, +-// nil is returned. +-func (c *completer) topCandidate() *CompletionItem { +- var bestItem, secondBestItem *CompletionItem +- for i := range c.items { +- if bestItem == nil || c.items[i].Score > bestItem.Score { +- bestItem = &c.items[i] +- } else if secondBestItem == nil || c.items[i].Score > secondBestItem.Score { +- secondBestItem = &c.items[i] +- } +- } +- +- // If secondBestItem has the same score, bestItem isn't +- // the strict best. +- if secondBestItem != nil && secondBestItem.Score == bestItem.Score { +- return nil +- } +- +- return bestItem +-} +- +-// addErrCheck offers a completion candidate of the form: +-// +-// if err != nil { +-// return nil, err +-// } +-// +-// In the case of test functions, it offers a completion candidate of the form: +-// +-// if err != nil { +-// t.Fatal(err) +-// } +-// +-// The position must be in a function that returns an error, and the +-// statement preceding the position must be an assignment where the +-// final LHS object is an error. addErrCheck will synthesize +-// zero values as necessary to make the return statement valid. +-func (c *completer) addErrCheck() { +- if len(c.path) < 2 || c.enclosingFunc == nil || !c.opts.placeholders { +- return +- } +- +- var ( +- errorType = types.Universe.Lookup("error").Type() +- result = c.enclosingFunc.sig.Results() +- testVar = getTestVar(c.enclosingFunc, c.pkg) +- isTest = testVar != "" +- doesNotReturnErr = result.Len() == 0 || !types.Identical(result.At(result.Len()-1).Type(), errorType) +- ) +- // Make sure our enclosing function is a Test func or returns an error. +- if !isTest && doesNotReturnErr { +- return +- } +- +- prevLine := prevStmt(c.pos, c.path) +- if prevLine == nil { +- return +- } +- +- // Make sure our preceding statement was as assignment. +- assign, _ := prevLine.(*ast.AssignStmt) +- if assign == nil || len(assign.Lhs) == 0 { +- return +- } +- +- lastAssignee := assign.Lhs[len(assign.Lhs)-1] +- +- // Make sure the final assignee is an error. +- if !types.Identical(c.pkg.TypesInfo().TypeOf(lastAssignee), errorType) { +- return +- } +- +- var ( +- // errVar is e.g. "err" in "foo, err := bar()". +- errVar = golang.FormatNode(c.pkg.FileSet(), lastAssignee) +- +- // Whether we need to include the "if" keyword in our candidate. +- needsIf = true +- ) +- +- // If the returned error from the previous statement is "_", it is not a real object. +- // If we don't have an error, and the function signature takes a testing.TB that is either ignored +- // or an "_", then we also can't call t.Fatal(err). +- if errVar == "_" { +- return +- } +- +- // Below we try to detect if the user has already started typing "if +- // err" so we can replace what they've typed with our complete +- // statement. +- switch n := c.path[0].(type) { +- case *ast.Ident: +- switch c.path[1].(type) { +- case *ast.ExprStmt: +- // This handles: +- // +- // f, err := os.Open("foo") +- // i<> +- +- // Make sure they are typing "if". +- if c.matcher.Score("if") <= 0 { +- return +- } +- case *ast.IfStmt: +- // This handles: +- // +- // f, err := os.Open("foo") +- // if er<> +- +- // Make sure they are typing the error's name. +- if c.matcher.Score(errVar) <= 0 { +- return +- } +- +- needsIf = false +- default: +- return +- } +- case *ast.IfStmt: +- // This handles: +- // +- // f, err := os.Open("foo") +- // if <> +- +- // Avoid false positives by ensuring the if's cond is a bad +- // expression. For example, don't offer the completion in cases +- // like "if <> somethingElse". +- if _, bad := n.Cond.(*ast.BadExpr); !bad { +- return +- } +- +- // If "if" is our direct prefix, we need to include it in our +- // candidate since the existing "if" will be overwritten. +- needsIf = c.pos == n.Pos()+token.Pos(len("if")) +- } +- +- // Build up a snippet that looks like: +- // +- // if err != nil { +- // return , ..., ${1:err} +- // } +- // +- // We make the error a placeholder so it is easy to alter the error. +- var snip snippet.Builder +- if needsIf { +- snip.WriteText("if ") +- } +- snip.WriteText(fmt.Sprintf("%s != nil {\n\t", errVar)) +- +- var label string +- if isTest { +- snip.WriteText(fmt.Sprintf("%s.Fatal(%s)", testVar, errVar)) +- label = fmt.Sprintf("%[1]s != nil { %[2]s.Fatal(%[1]s) }", errVar, testVar) +- } else { +- snip.WriteText("return ") +- for i := range result.Len() - 1 { +- if zero, isValid := typesinternal.ZeroString(result.At(i).Type(), c.qual); isValid { +- snip.WriteText(zero) +- } +- snip.WriteText(", ") +- } +- snip.WritePlaceholder(func(b *snippet.Builder) { +- b.WriteText(errVar) +- }) +- label = fmt.Sprintf("%[1]s != nil { return %[1]s }", errVar) +- } +- +- snip.WriteText("\n}") +- +- if needsIf { +- label = "if " + label +- } +- +- c.items = append(c.items, CompletionItem{ +- Label: label, +- Kind: protocol.SnippetCompletion, +- Score: highScore, +- snippet: &snip, +- }) +-} +- +-// getTestVar checks the function signature's input parameters and returns +-// the name of the first parameter that implements "testing.TB". For example, +-// func someFunc(t *testing.T) returns the string "t", func someFunc(b *testing.B) +-// returns "b" etc. An empty string indicates that the function signature +-// does not take a testing.TB parameter or does so but is ignored such +-// as func someFunc(*testing.T). +-func getTestVar(enclosingFunc *funcInfo, pkg *cache.Package) string { +- if enclosingFunc == nil || enclosingFunc.sig == nil { +- return "" +- } +- +- var testingPkg *types.Package +- for _, p := range pkg.Types().Imports() { +- if p.Path() == "testing" { +- testingPkg = p +- break +- } +- } +- if testingPkg == nil { +- return "" +- } +- tbObj := testingPkg.Scope().Lookup("TB") +- if tbObj == nil { +- return "" +- } +- iface, ok := tbObj.Type().Underlying().(*types.Interface) +- if !ok { +- return "" +- } +- +- sig := enclosingFunc.sig +- for i := range sig.Params().Len() { +- param := sig.Params().At(i) +- if param.Name() == "_" { +- continue +- } +- if !types.Implements(param.Type(), iface) { +- continue +- } +- return param.Name() +- } +- +- return "" +-} +- +-// addReturnZeroValues offers a snippet candidate on the form: +-// +-// return 0, "", nil +-// +-// Requires a partially or fully written return keyword at position. +-// Requires current position to be in a function with more than +-// zero return parameters. +-func (c *completer) addReturnZeroValues() { +- if len(c.path) < 2 || c.enclosingFunc == nil || !c.opts.placeholders { +- return +- } +- result := c.enclosingFunc.sig.Results() +- if result.Len() == 0 { +- return +- } +- +- // Offer just less than we expect from return as a keyword. +- var score = stdScore - 0.01 +- switch c.path[0].(type) { +- case *ast.ReturnStmt, *ast.Ident: +- f := c.matcher.Score("return") +- if f <= 0 { +- return +- } +- score *= float64(f) +- default: +- return +- } +- +- // The snippet will have a placeholder over each return value. +- // The label will not. +- var snip snippet.Builder +- var label strings.Builder +- snip.WriteText("return ") +- fmt.Fprintf(&label, "return ") +- +- for i := range result.Len() { +- if i > 0 { +- snip.WriteText(", ") +- fmt.Fprintf(&label, ", ") +- } +- +- zero, isValid := typesinternal.ZeroString(result.At(i).Type(), c.qual) +- if !isValid { +- zero = "" +- } +- snip.WritePlaceholder(func(b *snippet.Builder) { +- b.WriteText(zero) +- }) +- fmt.Fprint(&label, zero) +- } +- +- c.items = append(c.items, CompletionItem{ +- Label: label.String(), +- Kind: protocol.SnippetCompletion, +- Score: score, +- snippet: &snip, +- }) +-} +diff -urN a/gopls/internal/golang/completion/unify.go b/gopls/internal/golang/completion/unify.go +--- a/gopls/internal/golang/completion/unify.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/unify.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,661 +0,0 @@ +-// Below was copied from go/types/unify.go on September 24, 2024, +-// and combined with snippets from other files as well. +-// It is copied to implement unification for code completion inferences, +-// in lieu of an official type unification API. +-// +-// TODO: When such an API is available, the code below should deleted. +-// +-// Due to complexity of extracting private types from the go/types package, +-// the unifier does not fully implement interface unification. +-// +-// The code has been modified to compile without introducing any key functionality changes. +-// +- +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// This file implements type unification. +-// +-// Type unification attempts to make two types x and y structurally +-// equivalent by determining the types for a given list of (bound) +-// type parameters which may occur within x and y. If x and y are +-// structurally different (say []T vs chan T), or conflicting +-// types are determined for type parameters, unification fails. +-// If unification succeeds, as a side-effect, the types of the +-// bound type parameters may be determined. +-// +-// Unification typically requires multiple calls u.unify(x, y) to +-// a given unifier u, with various combinations of types x and y. +-// In each call, additional type parameter types may be determined +-// as a side effect and recorded in u. +-// If a call fails (returns false), unification fails. +-// +-// In the unification context, structural equivalence of two types +-// ignores the difference between a defined type and its underlying +-// type if one type is a defined type and the other one is not. +-// It also ignores the difference between an (external, unbound) +-// type parameter and its core type. +-// If two types are not structurally equivalent, they cannot be Go +-// identical types. On the other hand, if they are structurally +-// equivalent, they may be Go identical or at least assignable, or +-// they may be in the type set of a constraint. +-// Whether they indeed are identical or assignable is determined +-// upon instantiation and function argument passing. +- +-package completion +- +-import ( +- "fmt" +- "go/types" +- "strings" +-) +- +-const ( +- // Upper limit for recursion depth. Used to catch infinite recursions +- // due to implementation issues (e.g., see issues go.dev/issue/48619, go.dev/issue/48656). +- unificationDepthLimit = 50 +- +- // Whether to panic when unificationDepthLimit is reached. +- // If disabled, a recursion depth overflow results in a (quiet) +- // unification failure. +- panicAtUnificationDepthLimit = true +- +- // If enableCoreTypeUnification is set, unification will consider +- // the core types, if any, of non-local (unbound) type parameters. +- enableCoreTypeUnification = true +-) +- +-// A unifier maintains a list of type parameters and +-// corresponding types inferred for each type parameter. +-// A unifier is created by calling newUnifier. +-type unifier struct { +- // handles maps each type parameter to its inferred type through +- // an indirection *Type called (inferred type) "handle". +- // Initially, each type parameter has its own, separate handle, +- // with a nil (i.e., not yet inferred) type. +- // After a type parameter P is unified with a type parameter Q, +- // P and Q share the same handle (and thus type). This ensures +- // that inferring the type for a given type parameter P will +- // automatically infer the same type for all other parameters +- // unified (joined) with P. +- handles map[*types.TypeParam]*types.Type +- depth int // recursion depth during unification +-} +- +-// newUnifier returns a new unifier initialized with the given type parameter +-// and corresponding type argument lists. The type argument list may be shorter +-// than the type parameter list, and it may contain nil types. Matching type +-// parameters and arguments must have the same index. +-func newUnifier(tparams []*types.TypeParam, targs []types.Type) *unifier { +- handles := make(map[*types.TypeParam]*types.Type, len(tparams)) +- // Allocate all handles up-front: in a correct program, all type parameters +- // must be resolved and thus eventually will get a handle. +- // Also, sharing of handles caused by unified type parameters is rare and +- // so it's ok to not optimize for that case (and delay handle allocation). +- for i, x := range tparams { +- var t types.Type +- if i < len(targs) { +- t = targs[i] +- } +- handles[x] = &t +- } +- return &unifier{handles, 0} +-} +- +-// unifyMode controls the behavior of the unifier. +-type unifyMode uint +- +-const ( +- // If unifyModeAssign is set, we are unifying types involved in an assignment: +- // they may match inexactly at the top, but element types must match +- // exactly. +- unifyModeAssign unifyMode = 1 << iota +- +- // If unifyModeExact is set, types unify if they are identical (or can be +- // made identical with suitable arguments for type parameters). +- // Otherwise, a named type and a type literal unify if their +- // underlying types unify, channel directions are ignored, and +- // if there is an interface, the other type must implement the +- // interface. +- unifyModeExact +-) +- +-// This function was copied from go/types/unify.go +-// +-// unify attempts to unify x and y and reports whether it succeeded. +-// As a side-effect, types may be inferred for type parameters. +-// The mode parameter controls how types are compared. +-func (u *unifier) unify(x, y types.Type, mode unifyMode) bool { +- return u.nify(x, y, mode) +-} +- +-// join unifies the given type parameters x and y. +-// If both type parameters already have a type associated with them +-// and they are not joined, join fails and returns false. +-func (u *unifier) join(x, y *types.TypeParam) bool { +- switch hx, hy := u.handles[x], u.handles[y]; { +- case hx == hy: +- // Both type parameters already share the same handle. Nothing to do. +- case *hx != nil && *hy != nil: +- // Both type parameters have (possibly different) inferred types. Cannot join. +- return false +- case *hx != nil: +- // Only type parameter x has an inferred type. Use handle of x. +- u.setHandle(y, hx) +- // This case is treated like the default case. +- // case *hy != nil: +- // // Only type parameter y has an inferred type. Use handle of y. +- // u.setHandle(x, hy) +- default: +- // Neither type parameter has an inferred type. Use handle of y. +- u.setHandle(x, hy) +- } +- return true +-} +- +-// asBoundTypeParam returns x.(*types.TypeParam) if x is a type parameter recorded with u. +-// Otherwise, the result is nil. +-func (u *unifier) asBoundTypeParam(x types.Type) *types.TypeParam { +- if x, _ := types.Unalias(x).(*types.TypeParam); x != nil { +- if _, found := u.handles[x]; found { +- return x +- } +- } +- return nil +-} +- +-// setHandle sets the handle for type parameter x +-// (and all its joined type parameters) to h. +-func (u *unifier) setHandle(x *types.TypeParam, h *types.Type) { +- hx := u.handles[x] +- for y, hy := range u.handles { +- if hy == hx { +- u.handles[y] = h +- } +- } +-} +- +-// at returns the (possibly nil) type for type parameter x. +-func (u *unifier) at(x *types.TypeParam) types.Type { +- return *u.handles[x] +-} +- +-// set sets the type t for type parameter x; +-// t must not be nil. +-func (u *unifier) set(x *types.TypeParam, t types.Type) { +- *u.handles[x] = t +-} +- +-// asInterface returns the underlying type of x as an interface if +-// it is a non-type parameter interface. Otherwise it returns nil. +-func asInterface(x types.Type) (i *types.Interface) { +- if _, ok := types.Unalias(x).(*types.TypeParam); !ok { +- i, _ = x.Underlying().(*types.Interface) +- } +- return i +-} +- +-func isTypeParam(t types.Type) bool { +- _, ok := types.Unalias(t).(*types.TypeParam) +- return ok +-} +- +-func asNamed(t types.Type) *types.Named { +- n, _ := types.Unalias(t).(*types.Named) +- return n +-} +- +-func isTypeLit(t types.Type) bool { +- switch types.Unalias(t).(type) { +- case *types.Named, *types.TypeParam: +- return false +- } +- return true +-} +- +-// identicalOrigin reports whether x and y originated in the same declaration. +-func identicalOrigin(x, y *types.Named) bool { +- // TODO(gri) is this correct? +- return x.Origin().Obj() == y.Origin().Obj() +-} +- +-func coreType(t types.Type) types.Type { +- t = types.Unalias(t) +- tpar, _ := t.(*types.TypeParam) +- if tpar == nil { +- return t.Underlying() +- } +- +- return nil +-} +- +-func sameId(obj *types.Var, pkg *types.Package, name string, foldCase bool) bool { +- // If we don't care about capitalization, we also ignore packages. +- if foldCase && strings.EqualFold(obj.Name(), name) { +- return true +- } +- // spec: +- // "Two identifiers are different if they are spelled differently, +- // or if they appear in different packages and are not exported. +- // Otherwise, they are the same." +- if obj.Name() != name { +- return false +- } +- // obj.Name == name +- if obj.Exported() { +- return true +- } +- // not exported, so packages must be the same +- if obj.Pkg() != nil && pkg != nil { +- return obj.Pkg() == pkg +- } +- return obj.Pkg().Path() == pkg.Path() +-} +- +-// nify implements the core unification algorithm which is an +-// adapted version of Checker.identical. For changes to that +-// code the corresponding changes should be made here. +-// Must not be called directly from outside the unifier. +-func (u *unifier) nify(x, y types.Type, mode unifyMode) (result bool) { +- u.depth++ +- defer func() { +- u.depth-- +- }() +- +- // nothing to do if x == y +- if x == y || types.Unalias(x) == types.Unalias(y) { +- return true +- } +- +- // Stop gap for cases where unification fails. +- if u.depth > unificationDepthLimit { +- if panicAtUnificationDepthLimit { +- panic("unification reached recursion depth limit") +- } +- return false +- } +- +- // Unification is symmetric, so we can swap the operands. +- // Ensure that if we have at least one +- // - defined type, make sure one is in y +- // - type parameter recorded with u, make sure one is in x +- if asNamed(x) != nil || u.asBoundTypeParam(y) != nil { +- x, y = y, x +- } +- +- // Unification will fail if we match a defined type against a type literal. +- // If we are matching types in an assignment, at the top-level, types with +- // the same type structure are permitted as long as at least one of them +- // is not a defined type. To accommodate for that possibility, we continue +- // unification with the underlying type of a defined type if the other type +- // is a type literal. This is controlled by the exact unification mode. +- // We also continue if the other type is a basic type because basic types +- // are valid underlying types and may appear as core types of type constraints. +- // If we exclude them, inferred defined types for type parameters may not +- // match against the core types of their constraints (even though they might +- // correctly match against some of the types in the constraint's type set). +- // Finally, if unification (incorrectly) succeeds by matching the underlying +- // type of a defined type against a basic type (because we include basic types +- // as type literals here), and if that leads to an incorrectly inferred type, +- // we will fail at function instantiation or argument assignment time. +- // +- // If we have at least one defined type, there is one in y. +- if ny := asNamed(y); mode&unifyModeExact == 0 && ny != nil && isTypeLit(x) { +- y = ny.Underlying() +- // Per the spec, a defined type cannot have an underlying type +- // that is a type parameter. +- // x and y may be identical now +- if x == y || types.Unalias(x) == types.Unalias(y) { +- return true +- } +- } +- +- // Cases where at least one of x or y is a type parameter recorded with u. +- // If we have at least one type parameter, there is one in x. +- // If we have exactly one type parameter, because it is in x, +- // isTypeLit(x) is false and y was not changed above. In other +- // words, if y was a defined type, it is still a defined type +- // (relevant for the logic below). +- switch px, py := u.asBoundTypeParam(x), u.asBoundTypeParam(y); { +- case px != nil && py != nil: +- // both x and y are type parameters +- if u.join(px, py) { +- return true +- } +- // both x and y have an inferred type - they must match +- return u.nify(u.at(px), u.at(py), mode) +- +- case px != nil: +- // x is a type parameter, y is not +- if x := u.at(px); x != nil { +- // x has an inferred type which must match y +- if u.nify(x, y, mode) { +- // We have a match, possibly through underlying types. +- xi := asInterface(x) +- yi := asInterface(y) +- xn := asNamed(x) != nil +- yn := asNamed(y) != nil +- // If we have two interfaces, what to do depends on +- // whether they are named and their method sets. +- if xi != nil && yi != nil { +- // Both types are interfaces. +- // If both types are defined types, they must be identical +- // because unification doesn't know which type has the "right" name. +- if xn && yn { +- return types.Identical(x, y) +- } +- return false +- // Below is the original code for reference +- +- // In all other cases, the method sets must match. +- // The types unified so we know that corresponding methods +- // match and we can simply compare the number of methods. +- // TODO(gri) We may be able to relax this rule and select +- // the more general interface. But if one of them is a defined +- // type, it's not clear how to choose and whether we introduce +- // an order dependency or not. Requiring the same method set +- // is conservative. +- // if len(xi.typeSet().methods) != len(yi.typeSet().methods) { +- // return false +- // } +- } else if xi != nil || yi != nil { +- // One but not both of them are interfaces. +- // In this case, either x or y could be viable matches for the corresponding +- // type parameter, which means choosing either introduces an order dependence. +- // Therefore, we must fail unification (go.dev/issue/60933). +- return false +- } +- // If we have inexact unification and one of x or y is a defined type, select the +- // defined type. This ensures that in a series of types, all matching against the +- // same type parameter, we infer a defined type if there is one, independent of +- // order. Type inference or assignment may fail, which is ok. +- // Selecting a defined type, if any, ensures that we don't lose the type name; +- // and since we have inexact unification, a value of equally named or matching +- // undefined type remains assignable (go.dev/issue/43056). +- // +- // Similarly, if we have inexact unification and there are no defined types but +- // channel types, select a directed channel, if any. This ensures that in a series +- // of unnamed types, all matching against the same type parameter, we infer the +- // directed channel if there is one, independent of order. +- // Selecting a directional channel, if any, ensures that a value of another +- // inexactly unifying channel type remains assignable (go.dev/issue/62157). +- // +- // If we have multiple defined channel types, they are either identical or we +- // have assignment conflicts, so we can ignore directionality in this case. +- // +- // If we have defined and literal channel types, a defined type wins to avoid +- // order dependencies. +- if mode&unifyModeExact == 0 { +- switch { +- case xn: +- // x is a defined type: nothing to do. +- case yn: +- // x is not a defined type and y is a defined type: select y. +- u.set(px, y) +- default: +- // Neither x nor y are defined types. +- if yc, _ := y.Underlying().(*types.Chan); yc != nil && yc.Dir() != types.SendRecv { +- // y is a directed channel type: select y. +- u.set(px, y) +- } +- } +- } +- return true +- } +- return false +- } +- // otherwise, infer type from y +- u.set(px, y) +- return true +- } +- +- // If u.EnableInterfaceInference is set and we don't require exact unification, +- // if both types are interfaces, one interface must have a subset of the +- // methods of the other and corresponding method signatures must unify. +- // If only one type is an interface, all its methods must be present in the +- // other type and corresponding method signatures must unify. +- +- // Unless we have exact unification, neither x nor y are interfaces now. +- // Except for unbound type parameters (see below), x and y must be structurally +- // equivalent to unify. +- +- // If we get here and x or y is a type parameter, they are unbound +- // (not recorded with the unifier). +- // Ensure that if we have at least one type parameter, it is in x +- // (the earlier swap checks for _recorded_ type parameters only). +- // This ensures that the switch switches on the type parameter. +- // +- // TODO(gri) Factor out type parameter handling from the switch. +- if isTypeParam(y) { +- x, y = y, x +- } +- +- // Type elements (array, slice, etc. elements) use emode for unification. +- // Element types must match exactly if the types are used in an assignment. +- emode := mode +- if mode&unifyModeAssign != 0 { +- emode |= unifyModeExact +- } +- +- // Continue with unaliased types but don't lose original alias names, if any (go.dev/issue/67628). +- xorig, x := x, types.Unalias(x) +- yorig, y := y, types.Unalias(y) +- +- switch x := x.(type) { +- case *types.Basic: +- // Basic types are singletons except for the rune and byte +- // aliases, thus we cannot solely rely on the x == y check +- // above. See also comment in TypeName.IsAlias. +- if y, ok := y.(*types.Basic); ok { +- return x.Kind() == y.Kind() +- } +- +- case *types.Array: +- // Two array types unify if they have the same array length +- // and their element types unify. +- if y, ok := y.(*types.Array); ok { +- // If one or both array lengths are unknown (< 0) due to some error, +- // assume they are the same to avoid spurious follow-on errors. +- return (x.Len() < 0 || y.Len() < 0 || x.Len() == y.Len()) && u.nify(x.Elem(), y.Elem(), emode) +- } +- +- case *types.Slice: +- // Two slice types unify if their element types unify. +- if y, ok := y.(*types.Slice); ok { +- return u.nify(x.Elem(), y.Elem(), emode) +- } +- +- case *types.Struct: +- // Two struct types unify if they have the same sequence of fields, +- // and if corresponding fields have the same names, their (field) types unify, +- // and they have identical tags. Two embedded fields are considered to have the same +- // name. Lower-case field names from different packages are always different. +- if y, ok := y.(*types.Struct); ok { +- if x.NumFields() == y.NumFields() { +- for i := range x.NumFields() { +- f := x.Field(i) +- g := y.Field(i) +- if f.Embedded() != g.Embedded() || +- x.Tag(i) != y.Tag(i) || +- !sameId(f, g.Pkg(), g.Name(), false) || +- !u.nify(f.Type(), g.Type(), emode) { +- return false +- } +- } +- return true +- } +- } +- +- case *types.Pointer: +- // Two pointer types unify if their base types unify. +- if y, ok := y.(*types.Pointer); ok { +- return u.nify(x.Elem(), y.Elem(), emode) +- } +- +- case *types.Tuple: +- // Two tuples types unify if they have the same number of elements +- // and the types of corresponding elements unify. +- if y, ok := y.(*types.Tuple); ok { +- if x.Len() == y.Len() { +- if x != nil { +- for i := range x.Len() { +- v := x.At(i) +- w := y.At(i) +- if !u.nify(v.Type(), w.Type(), mode) { +- return false +- } +- } +- } +- return true +- } +- } +- +- case *types.Signature: +- // Two function types unify if they have the same number of parameters +- // and result values, corresponding parameter and result types unify, +- // and either both functions are variadic or neither is. +- // Parameter and result names are not required to match. +- // TODO(gri) handle type parameters or document why we can ignore them. +- if y, ok := y.(*types.Signature); ok { +- return x.Variadic() == y.Variadic() && +- u.nify(x.Params(), y.Params(), emode) && +- u.nify(x.Results(), y.Results(), emode) +- } +- +- case *types.Interface: +- return false +- // Below is the original code +- +- // Two interface types unify if they have the same set of methods with +- // the same names, and corresponding function types unify. +- // Lower-case method names from different packages are always different. +- // The order of the methods is irrelevant. +- // xset := x.typeSet() +- // yset := y.typeSet() +- // if xset.comparable != yset.comparable { +- // return false +- // } +- // if !xset.terms.equal(yset.terms) { +- // return false +- // } +- // a := xset.methods +- // b := yset.methods +- // if len(a) == len(b) { +- // // Interface types are the only types where cycles can occur +- // // that are not "terminated" via named types; and such cycles +- // // can only be created via method parameter types that are +- // // anonymous interfaces (directly or indirectly) embedding +- // // the current interface. Example: +- // // +- // // type T interface { +- // // m() interface{T} +- // // } +- // // +- // // If two such (differently named) interfaces are compared, +- // // endless recursion occurs if the cycle is not detected. +- // // +- // // If x and y were compared before, they must be equal +- // // (if they were not, the recursion would have stopped); +- // // search the ifacePair stack for the same pair. +- // // +- // // This is a quadratic algorithm, but in practice these stacks +- // // are extremely short (bounded by the nesting depth of interface +- // // type declarations that recur via parameter types, an extremely +- // // rare occurrence). An alternative implementation might use a +- // // "visited" map, but that is probably less efficient overall. +- // q := &ifacePair{x, y, p} +- // for p != nil { +- // if p.identical(q) { +- // return true // same pair was compared before +- // } +- // p = p.prev +- // } +- // if debug { +- // assertSortedMethods(a) +- // assertSortedMethods(b) +- // } +- // for i, f := range a { +- // g := b[i] +- // if f.Id() != g.Id() || !u.nify(f.typ, g.typ, exact, q) { +- // return false +- // } +- // } +- // return true +- // } +- +- case *types.Map: +- // Two map types unify if their key and value types unify. +- if y, ok := y.(*types.Map); ok { +- return u.nify(x.Key(), y.Key(), emode) && u.nify(x.Elem(), y.Elem(), emode) +- } +- +- case *types.Chan: +- // Two channel types unify if their value types unify +- // and if they have the same direction. +- // The channel direction is ignored for inexact unification. +- if y, ok := y.(*types.Chan); ok { +- return (mode&unifyModeExact == 0 || x.Dir() == y.Dir()) && u.nify(x.Elem(), y.Elem(), emode) +- } +- +- case *types.Named: +- // Two named types unify if their type names originate in the same type declaration. +- // If they are instantiated, their type argument lists must unify. +- if y := asNamed(y); y != nil { +- // Check type arguments before origins so they unify +- // even if the origins don't match; for better error +- // messages (see go.dev/issue/53692). +- xargs := x.TypeArgs() +- yargs := y.TypeArgs() +- if xargs.Len() != yargs.Len() { +- return false +- } +- for i := range xargs.Len() { +- xarg := xargs.At(i) +- yarg := yargs.At(i) +- if !u.nify(xarg, yarg, mode) { +- return false +- } +- } +- return identicalOrigin(x, y) +- } +- +- case *types.TypeParam: +- // By definition, a valid type argument must be in the type set of +- // the respective type constraint. Therefore, the type argument's +- // underlying type must be in the set of underlying types of that +- // constraint. If there is a single such underlying type, it's the +- // constraint's core type. It must match the type argument's under- +- // lying type, irrespective of whether the actual type argument, +- // which may be a defined type, is actually in the type set (that +- // will be determined at instantiation time). +- // Thus, if we have the core type of an unbound type parameter, +- // we know the structure of the possible types satisfying such +- // parameters. Use that core type for further unification +- // (see go.dev/issue/50755 for a test case). +- if enableCoreTypeUnification { +- // Because the core type is always an underlying type, +- // unification will take care of matching against a +- // defined or literal type automatically. +- // If y is also an unbound type parameter, we will end +- // up here again with x and y swapped, so we don't +- // need to take care of that case separately. +- if cx := coreType(x); cx != nil { +- // If y is a defined type, it may not match against cx which +- // is an underlying type (incl. int, string, etc.). Use assign +- // mode here so that the unifier automatically takes under(y) +- // if necessary. +- return u.nify(cx, yorig, unifyModeAssign) +- } +- } +- // x != y and there's nothing to do +- +- case nil: +- // avoid a crash in case of nil type +- +- default: +- panic(fmt.Sprintf("u.nify(%s, %s, %d)", xorig, yorig, mode)) +- } +- +- return false +-} +diff -urN a/gopls/internal/golang/completion/unimported.go b/gopls/internal/golang/completion/unimported.go +--- a/gopls/internal/golang/completion/unimported.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/unimported.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,475 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-// unimported completion is invoked when the user types something like 'foo.xx', +-// foo is known to be a package name not yet imported in the current file, and +-// xx (or whatever the user has typed) is interpreted as a hint (pattern) for the +-// member of foo that the user is looking for. +-// +-// This code looks for a suitable completion in a number of places. A 'suitable +-// completion' is an exported symbol (so a type, const, var, or func) from package +-// foo, which, after converting everything to lower case, has the pattern as a +-// subsequence. +-// +-// The code looks for a suitable completion in +-// 1. the imports of some other file of the current package, +-// 2. the standard library, +-// 3. the imports of some other file in the current workspace, +-// 4. imports in the current module with 'foo' as the explicit package name, +-// 5. the module cache, +-// It stops at the first success. +- +-import ( +- "context" +- "fmt" +- "go/ast" +- "go/printer" +- "go/token" +- "path" +- "slices" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/golang/completion/snippet" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/internal/imports" +- "golang.org/x/tools/internal/modindex" +- "golang.org/x/tools/internal/stdlib" +- "golang.org/x/tools/internal/versions" +-) +- +-func (c *completer) unimported(ctx context.Context, pkgname metadata.PackageName, prefix string) { +- wsIDs, ourIDs := c.findPackageIDs(pkgname) +- stdpkgs := c.stdlibPkgs(pkgname) +- if len(ourIDs) > 0 { +- // use the one in the current package, if possible +- items := c.pkgIDmatches(ctx, ourIDs, pkgname, prefix) +- if c.scoreList(items) { +- return +- } +- } +- // do the stdlib next. +- items := c.stdlibMatches(stdpkgs, pkgname, prefix) +- if c.scoreList(items) { +- return +- } +- +- // look in the rest of the workspace +- items = c.pkgIDmatches(ctx, wsIDs, pkgname, prefix) +- if c.scoreList(items) { +- return +- } +- +- // before looking in the module cache, maybe it is an explicit +- // package name used in this module +- if c.explicitPkgName(ctx, pkgname, prefix) { +- return +- } +- +- // look in the module cache +- items, err := c.modcacheMatches(pkgname, prefix) +- items = c.filterGoMod(ctx, items) +- if err == nil && c.scoreList(items) { +- return +- } +- +- // out of things to do +-} +- +-// prefer completion items that are referenced in the go.mod file +-func (c *completer) filterGoMod(ctx context.Context, items []CompletionItem) []CompletionItem { +- gomod := c.pkg.Metadata().Module.GoMod +- uri := protocol.URIFromPath(gomod) +- fh, err := c.snapshot.ReadFile(ctx, uri) +- if err != nil { +- return items +- } +- pm, err := c.snapshot.ParseMod(ctx, fh) +- if err != nil || pm == nil { +- return items +- } +- // if any of the items match any of the req, just return those +- reqnames := []string{} +- for _, req := range pm.File.Require { +- reqnames = append(reqnames, req.Mod.Path) +- } +- better := []CompletionItem{} +- for _, compl := range items { +- if len(compl.AdditionalTextEdits) == 0 { +- continue +- } +- // import "foof/pkg" +- flds := strings.FieldsFunc(compl.AdditionalTextEdits[0].NewText, func(r rune) bool { +- return r == '"' || r == '/' +- }) +- if len(flds) < 3 { +- continue +- } +- if slices.Contains(reqnames, flds[1]) { +- better = append(better, compl) +- } +- } +- if len(better) > 0 { +- return better +- } +- return items +-} +- +-// see if some file in the current package satisfied a foo. import +-// because foo is an explicit package name (import foo "a.b.c") +-func (c *completer) explicitPkgName(ctx context.Context, pkgname metadata.PackageName, prefix string) bool { +- for _, pgf := range c.pkg.CompiledGoFiles() { +- imports := pgf.File.Imports +- for _, imp := range imports { +- if imp.Name != nil && imp.Name.Name == string(pkgname) { +- path := strings.Trim(imp.Path.Value, `"`) +- if c.tryPath(ctx, metadata.PackagePath(path), string(pkgname), prefix) { +- return true // one is enough +- } +- } +- } +- } +- return false +-} +- +-// see if this path contains a usable import with explict package name +-func (c *completer) tryPath(ctx context.Context, path metadata.PackagePath, pkgname, prefix string) bool { +- packages := c.snapshot.MetadataGraph().ForPackagePath +- ids := []metadata.PackageID{} +- for _, pkg := range packages[path] { // could there ever be more than one? +- ids = append(ids, pkg.ID) // pkg.ID. ID: "math/rand" but Name: "rand" +- } +- items := c.pkgIDmatches(ctx, ids, metadata.PackageName(pkgname), prefix) +- return c.scoreList(items) +-} +- +-// find all the packageIDs for packages in the workspace that have the desired name +-// thisPkgIDs contains the ones known to the current package, wsIDs contains the others +-func (c *completer) findPackageIDs(pkgname metadata.PackageName) (wsIDs, thisPkgIDs []metadata.PackageID) { +- g := c.snapshot.MetadataGraph() +- for pid, pkg := range c.snapshot.MetadataGraph().Packages { +- if pkg.Name != pkgname { +- continue +- } +- imports := g.ImportedBy[pid] +- // Metadata is not canonical: it may be held onto by a package. Therefore, +- // we must compare by ID. +- thisPkg := func(mp *metadata.Package) bool { return mp.ID == c.pkg.Metadata().ID } +- if slices.ContainsFunc(imports, thisPkg) { +- thisPkgIDs = append(thisPkgIDs, pid) +- } else { +- wsIDs = append(wsIDs, pid) +- } +- } +- return +-} +- +-// find all the stdlib packages that have the desired name +-func (c *completer) stdlibPkgs(pkgname metadata.PackageName) []metadata.PackagePath { +- var pkgs []metadata.PackagePath // stlib packages that match pkg +- for pkgpath := range stdlib.PackageSymbols { +- v := metadata.PackageName(path.Base(pkgpath)) +- if v == pkgname { +- pkgs = append(pkgs, metadata.PackagePath(pkgpath)) +- } else if imports.WithoutVersion(string(pkgpath)) == string(pkgname) { +- pkgs = append(pkgs, metadata.PackagePath(pkgpath)) +- } +- } +- return pkgs +-} +- +-// return CompletionItems for all matching symbols in the packages in ids. +-func (c *completer) pkgIDmatches(ctx context.Context, ids []metadata.PackageID, pkgname metadata.PackageName, prefix string) []CompletionItem { +- pattern := strings.ToLower(prefix) +- allpkgsyms, err := c.snapshot.Symbols(ctx, ids...) +- if err != nil { +- return nil // would if be worth retrying the ids one by one? +- } +- if len(allpkgsyms) != len(ids) { +- bug.Reportf("Symbols returned %d values for %d pkgIDs", len(allpkgsyms), len(ids)) +- return nil +- } +- var got []CompletionItem +- for i, pkgID := range ids { +- pkg := c.snapshot.MetadataGraph().Packages[pkgID] +- if pkg == nil { +- bug.Reportf("no metadata for %s", pkgID) +- continue // something changed underfoot, otherwise can't happen +- } +- pkgsyms := allpkgsyms[i] +- pkgfname := pkgsyms.Files[0].Path() +- if !imports.CanUse(c.filename, pkgfname) { +- // avoid unusable internal, etc +- continue +- } +- // are any of these any good? +- for np, asym := range pkgsyms.Symbols { +- for _, sym := range asym { +- if !token.IsExported(sym.Name) { +- continue +- } +- if !usefulCompletion(sym.Name, pattern) { +- // for json.U, the existing code finds InvalidUTF8Error +- continue +- } +- var params []string +- var kind protocol.CompletionItemKind +- var detail string +- switch sym.Kind { +- case protocol.Function: +- foundURI := pkgsyms.Files[np] +- fh := c.snapshot.FindFile(foundURI) +- pgf, err := c.snapshot.ParseGo(ctx, fh, 0) +- if err == nil { +- params = funcParams(pgf.File, sym.Name) +- } +- kind = protocol.FunctionCompletion +- detail = fmt.Sprintf("func (from %q)", pkg.PkgPath) +- case protocol.Variable, protocol.Struct: +- kind = protocol.VariableCompletion +- detail = fmt.Sprintf("var (from %q)", pkg.PkgPath) +- case protocol.Constant: +- kind = protocol.ConstantCompletion +- detail = fmt.Sprintf("const (from %q)", pkg.PkgPath) +- default: +- continue +- } +- got = c.appendNewItem(got, sym.Name, +- detail, +- pkg.PkgPath, +- kind, +- pkgname, params) +- } +- } +- } +- return got +-} +- +-// return CompletionItems for all the matches in packages in pkgs. +-func (c *completer) stdlibMatches(pkgs []metadata.PackagePath, pkg metadata.PackageName, prefix string) []CompletionItem { +- // check for deprecated symbols someday +- got := make([]CompletionItem, 0) +- pattern := strings.ToLower(prefix) +- // avoid non-determinacy, especially for marker tests +- slices.Sort(pkgs) +- for _, candpkg := range pkgs { +- if std, ok := stdlib.PackageSymbols[string(candpkg)]; ok { +- for _, sym := range std { +- if !usefulCompletion(sym.Name, pattern) { +- continue +- } +- if !versions.AtLeast(c.goversion, sym.Version.String()) { +- continue +- } +- var kind protocol.CompletionItemKind +- var detail string +- var params []string +- switch sym.Kind { +- case stdlib.Func: +- params = parseSignature(sym.Signature) +- kind = protocol.FunctionCompletion +- detail = fmt.Sprintf("func (from %q)", candpkg) +- case stdlib.Const: +- kind = protocol.ConstantCompletion +- detail = fmt.Sprintf("const (from %q)", candpkg) +- case stdlib.Var: +- kind = protocol.VariableCompletion +- detail = fmt.Sprintf("var (from %q)", candpkg) +- case stdlib.Type: +- kind = protocol.VariableCompletion +- detail = fmt.Sprintf("type (from %q)", candpkg) +- default: +- continue +- } +- got = c.appendNewItem(got, sym.Name, +- detail, +- candpkg, +- kind, +- pkg, params) +- } +- } +- } +- return got +-} +- +-func (c *completer) modcacheMatches(pkg metadata.PackageName, prefix string) ([]CompletionItem, error) { +- ix, err := c.snapshot.View().ModcacheIndex() +- if err != nil { +- return nil, err +- } +- // retrieve everything and let usefulCompletion() and the matcher sort them out +- cands := ix.Lookup(string(pkg), "", true) +- lx := len(cands) +- got := make([]CompletionItem, 0, lx) +- pattern := strings.ToLower(prefix) +- for _, cand := range cands { +- if !usefulCompletion(cand.Name, pattern) { +- continue +- } +- var params []string +- var kind protocol.CompletionItemKind +- var detail string +- switch cand.Type { +- case modindex.Func: +- for _, f := range cand.Sig { +- params = append(params, fmt.Sprintf("%s %s", f.Arg, f.Type)) +- } +- kind = protocol.FunctionCompletion +- detail = fmt.Sprintf("func (from %s)", cand.ImportPath) +- case modindex.Var: +- kind = protocol.VariableCompletion +- detail = fmt.Sprintf("var (from %s)", cand.ImportPath) +- case modindex.Const: +- kind = protocol.ConstantCompletion +- detail = fmt.Sprintf("const (from %s)", cand.ImportPath) +- case modindex.Type: // might be a type alias +- kind = protocol.VariableCompletion +- detail = fmt.Sprintf("type (from %s)", cand.ImportPath) +- default: +- continue +- } +- got = c.appendNewItem(got, cand.Name, +- detail, +- metadata.PackagePath(cand.ImportPath), +- kind, +- pkg, params) +- } +- return got, nil +-} +- +-func (c *completer) appendNewItem(got []CompletionItem, name, detail string, path metadata.PackagePath, kind protocol.CompletionItemKind, pkg metadata.PackageName, params []string) []CompletionItem { +- item := CompletionItem{ +- Label: name, +- Detail: detail, +- InsertText: name, +- Kind: kind, +- } +- imp := importInfo{ +- importPath: string(path), +- name: string(pkg), +- } +- if imports.ImportPathToAssumedName(string(path)) == string(pkg) { +- imp.name = "" +- } +- item.AdditionalTextEdits, _ = c.importEdits(&imp) +- if params != nil { +- var sn snippet.Builder +- c.functionCallSnippet(name, nil, params, &sn) +- item.snippet = &sn +- } +- got = append(got, item) +- return got +-} +- +-// score the list. Return true if any item is added to c.items +-func (c *completer) scoreList(items []CompletionItem) bool { +- ret := false +- for _, item := range items { +- item.Score = float64(c.matcher.Score(item.Label)) +- if item.Score > 0 { +- c.items = append(c.items, item) +- ret = true +- } +- } +- return ret +-} +- +-// pattern is always the result of strings.ToLower +-func usefulCompletion(name, pattern string) bool { +- // this travesty comes from foo.(type) somehow. see issue59096.txt +- if pattern == "_" { +- return true +- } +- // convert both to lower case, and then the runes in the pattern have to occur, in order, +- // in the name +- cand := strings.ToLower(name) +- for _, r := range pattern { +- ix := strings.IndexRune(cand, r) +- if ix < 0 { +- return false +- } +- cand = cand[ix+1:] +- } +- return true +-} +- +-// return a printed version of the function arguments for snippets +-func funcParams(f *ast.File, fname string) []string { +- var params []string +- setParams := func(list *ast.FieldList) { +- if list == nil { +- return +- } +- var cfg printer.Config // slight overkill +- param := func(name string, typ ast.Expr) { +- var buf strings.Builder +- buf.WriteString(name) +- buf.WriteByte(' ') +- cfg.Fprint(&buf, token.NewFileSet(), typ) // ignore error +- params = append(params, buf.String()) +- } +- +- for _, field := range list.List { +- if field.Names != nil { +- for _, name := range field.Names { +- param(name.Name, field.Type) +- } +- } else { +- param("_", field.Type) +- } +- } +- } +- for _, n := range f.Decls { +- switch x := n.(type) { +- case *ast.FuncDecl: +- if x.Recv == nil && x.Name.Name == fname { +- setParams(x.Type.Params) +- } +- } +- } +- return params +-} +- +-// extract the formal parameters from the signature. +-// func[M1 ~map[K]V, M2 ~map[K]V, K comparable, V any](dst M1, src M2) -> []{"dst M1", "src M2"} +-// func[K comparable, V any](seq iter.Seq2[K, V]) map[K]V -> []{"seq iter.Seq2[K, V]"} +-// func(args ...any) *Logger -> []{"args ...any"} +-// func[M ~map[K]V, K comparable, V any](m M, del func(K, V) bool) -> []{"m M", "del func(K, V) bool"} +-func parseSignature(sig string) []string { +- var level int // nesting level of delimiters +- var processing bool // are we doing the params +- var last int // start of current parameter +- var params []string +- for i := range len(sig) { +- switch sig[i] { +- case '[', '{': +- level++ +- case ']', '}': +- level-- +- case '(': +- level++ +- if level == 1 { +- processing = true +- last = i + 1 +- } +- case ')': +- level-- +- if level == 0 && processing { // done +- if i > last { +- params = append(params, strings.TrimSpace(sig[last:i])) +- } +- return params +- } +- case ',': +- if level == 1 && processing { +- params = append(params, strings.TrimSpace(sig[last:i])) +- last = i + 1 +- } +- } +- } +- return nil +-} +diff -urN a/gopls/internal/golang/completion/util.go b/gopls/internal/golang/completion/util.go +--- a/gopls/internal/golang/completion/util.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/completion/util.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,320 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "go/ast" +- "go/token" +- "go/types" +- +- "golang.org/x/tools/go/types/typeutil" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/diff" +- "golang.org/x/tools/internal/typeparams" +-) +- +-// exprAtPos returns the index of the expression containing pos. +-func exprAtPos(pos token.Pos, args []ast.Expr) int { +- for i, expr := range args { +- if expr.Pos() <= pos && pos <= expr.End() { +- return i +- } +- } +- return len(args) +-} +- +-// eachField invokes fn for each field that can be selected from a +-// value of type T. +-func eachField(T types.Type, fn func(*types.Var)) { +- // TODO(adonovan): this algorithm doesn't exclude ambiguous +- // selections that match more than one field/method. +- // types.NewSelectionSet should do that for us. +- +- // for termination on recursive types +- var seen typeutil.Map +- +- var visit func(T types.Type) +- visit = func(T types.Type) { +- // T may be a Struct, optionally Named, with an optional +- // Pointer (with optional Aliases at every step!): +- // Consider: type T *struct{ f int }; _ = T(nil).f +- if T, ok := typeparams.Deref(T).Underlying().(*types.Struct); ok { +- if seen.At(T) != nil { +- return +- } +- +- for i := range T.NumFields() { +- f := T.Field(i) +- fn(f) +- if f.Anonymous() { +- seen.Set(T, true) +- visit(f.Type()) +- } +- } +- } +- } +- visit(T) +-} +- +-// typeIsValid reports whether typ doesn't contain any Invalid types. +-func typeIsValid(typ types.Type) bool { +- // Check named types separately, because we don't want +- // to call Underlying() on them to avoid problems with recursive types. +- if _, ok := types.Unalias(typ).(*types.Named); ok { +- return true +- } +- +- switch typ := typ.Underlying().(type) { +- case *types.Basic: +- return typ.Kind() != types.Invalid +- case *types.Array: +- return typeIsValid(typ.Elem()) +- case *types.Slice: +- return typeIsValid(typ.Elem()) +- case *types.Pointer: +- return typeIsValid(typ.Elem()) +- case *types.Map: +- return typeIsValid(typ.Key()) && typeIsValid(typ.Elem()) +- case *types.Chan: +- return typeIsValid(typ.Elem()) +- case *types.Signature: +- return typeIsValid(typ.Params()) && typeIsValid(typ.Results()) +- case *types.Tuple: +- for i := range typ.Len() { +- if !typeIsValid(typ.At(i).Type()) { +- return false +- } +- } +- return true +- case *types.Struct, *types.Interface: +- // Don't bother checking structs, interfaces for validity. +- return true +- default: +- return false +- } +-} +- +-// resolveInvalid traverses the node of the AST that defines the scope +-// containing the declaration of obj, and attempts to find a user-friendly +-// name for its invalid type. The resulting Object and its Type are fake. +-func resolveInvalid(fset *token.FileSet, obj types.Object, node ast.Node, info *types.Info) types.Object { +- var resultExpr ast.Expr +- ast.Inspect(node, func(node ast.Node) bool { +- switch n := node.(type) { +- case *ast.ValueSpec: +- for _, name := range n.Names { +- if info.Defs[name] == obj { +- resultExpr = n.Type +- } +- } +- return false +- case *ast.Field: // This case handles parameters and results of a FuncDecl or FuncLit. +- for _, name := range n.Names { +- if info.Defs[name] == obj { +- resultExpr = n.Type +- } +- } +- return false +- default: +- return true +- } +- }) +- // Construct a fake type for the object and return a fake object with this type. +- typename := golang.FormatNode(fset, resultExpr) +- typ := types.NewNamed(types.NewTypeName(token.NoPos, obj.Pkg(), typename, nil), types.Typ[types.Invalid], nil) +- v := types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), typ) +- v.SetKind(types.PackageVar) +- return v +-} +- +-// TODO(adonovan): inline these. +-func isVar(obj types.Object) bool { return is[*types.Var](obj) } +-func isTypeName(obj types.Object) bool { return is[*types.TypeName](obj) } +-func isFunc(obj types.Object) bool { return is[*types.Func](obj) } +-func isPkgName(obj types.Object) bool { return is[*types.PkgName](obj) } +- +-// isPointer reports whether T is a Pointer, or an alias of one. +-// It returns false for a Named type whose Underlying is a Pointer. +-// +-// TODO(adonovan): shouldn't this use CoreType(T)? +-func isPointer(T types.Type) bool { return is[*types.Pointer](types.Unalias(T)) } +- +-// isEmptyInterface whether T is a (possibly Named or Alias) empty interface +-// type, such that every type is assignable to T. +-// +-// isEmptyInterface returns false for type parameters, since they have +-// different assignability rules. +-func isEmptyInterface(T types.Type) bool { +- if _, ok := T.(*types.TypeParam); ok { +- return false +- } +- intf, _ := T.Underlying().(*types.Interface) +- return intf != nil && intf.Empty() +-} +- +-func isUntyped(T types.Type) bool { +- if basic, ok := types.Unalias(T).(*types.Basic); ok { +- return basic.Info()&types.IsUntyped > 0 +- } +- return false +-} +- +-func deslice(T types.Type) types.Type { +- if slice, ok := T.Underlying().(*types.Slice); ok { +- return slice.Elem() +- } +- return nil +-} +- +-// enclosingSelector returns the enclosing *ast.SelectorExpr when pos is in the +-// selector. +-func enclosingSelector(path []ast.Node, pos token.Pos) *ast.SelectorExpr { +- if len(path) == 0 { +- return nil +- } +- +- if sel, ok := path[0].(*ast.SelectorExpr); ok { +- return sel +- } +- +- // TODO(adonovan): consider ast.ParenExpr (e.g. (x).name) +- if _, ok := path[0].(*ast.Ident); ok && len(path) > 1 { +- if sel, ok := path[1].(*ast.SelectorExpr); ok && pos >= sel.Sel.Pos() { +- return sel +- } +- } +- +- return nil +-} +- +-// enclosingDeclLHS returns LHS idents from containing value spec or +-// assign statement. +-func enclosingDeclLHS(path []ast.Node) []*ast.Ident { +- for _, n := range path { +- switch n := n.(type) { +- case *ast.ValueSpec: +- return n.Names +- case *ast.AssignStmt: +- ids := make([]*ast.Ident, 0, len(n.Lhs)) +- for _, e := range n.Lhs { +- if id, ok := e.(*ast.Ident); ok { +- ids = append(ids, id) +- } +- } +- return ids +- } +- } +- +- return nil +-} +- +-// exprObj returns the types.Object associated with the *ast.Ident or +-// *ast.SelectorExpr e. +-func exprObj(info *types.Info, e ast.Expr) types.Object { +- var ident *ast.Ident +- switch expr := e.(type) { +- case *ast.Ident: +- ident = expr +- case *ast.SelectorExpr: +- ident = expr.Sel +- default: +- return nil +- } +- +- return info.ObjectOf(ident) +-} +- +-// typeConversion returns the type being converted to if call is a type +-// conversion expression. +-func typeConversion(call *ast.CallExpr, info *types.Info) types.Type { +- // Type conversion (e.g. "float64(foo)"). +- if fun, _ := exprObj(info, call.Fun).(*types.TypeName); fun != nil { +- return fun.Type() +- } +- +- return nil +-} +- +-// fieldsAccessible returns whether s has at least one field accessible by p. +-func fieldsAccessible(s *types.Struct, p *types.Package) bool { +- for i := range s.NumFields() { +- f := s.Field(i) +- if f.Exported() || f.Pkg() == p { +- return true +- } +- } +- return false +-} +- +-// prevStmt returns the statement that precedes the statement containing pos. +-// For example: +-// +-// foo := 1 +-// bar(1 + 2<>) +-// +-// If "<>" is pos, prevStmt returns "foo := 1" +-func prevStmt(pos token.Pos, path []ast.Node) ast.Stmt { +- var blockLines []ast.Stmt +- for i := 0; i < len(path) && blockLines == nil; i++ { +- switch n := path[i].(type) { +- case *ast.BlockStmt: +- blockLines = n.List +- case *ast.CommClause: +- blockLines = n.Body +- case *ast.CaseClause: +- blockLines = n.Body +- } +- } +- +- for i := len(blockLines) - 1; i >= 0; i-- { +- if blockLines[i].End() < pos { +- return blockLines[i] +- } +- } +- +- return nil +-} +- +-// isBasicKind returns whether t is a basic type of kind k. +-func isBasicKind(t types.Type, k types.BasicInfo) bool { +- b, _ := t.Underlying().(*types.Basic) +- return b != nil && b.Info()&k > 0 +-} +- +-func (c *completer) editText(from, to token.Pos, newText string) ([]protocol.TextEdit, error) { +- start, end, err := safetoken.Offsets(c.pgf.Tok, from, to) +- if err != nil { +- return nil, err // can't happen: from/to came from c +- } +- return protocol.EditsFromDiffEdits(c.mapper, []diff.Edit{{ +- Start: start, +- End: end, +- New: newText, +- }}) +-} +- +-// assignableTo is like types.AssignableTo, but returns false if +-// either type is invalid. +-func assignableTo(x, to types.Type) bool { +- if types.Unalias(x) == types.Typ[types.Invalid] || +- types.Unalias(to) == types.Typ[types.Invalid] { +- return false +- } +- +- return types.AssignableTo(x, to) +-} +- +-// convertibleTo is like types.ConvertibleTo, but returns false if +-// either type is invalid. +-func convertibleTo(x, to types.Type) bool { +- if types.Unalias(x) == types.Typ[types.Invalid] || +- types.Unalias(to) == types.Typ[types.Invalid] { +- return false +- } +- +- return types.ConvertibleTo(x, to) +-} +diff -urN a/gopls/internal/golang/counters.go b/gopls/internal/golang/counters.go +--- a/gopls/internal/golang/counters.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/counters.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,22 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import "golang.org/x/telemetry/counter" +- +-// Proposed counters for evaluating gopls extract, inline, and package rename. These counters +-// increment when the user attempts to perform one of these operations, +-// regardless of whether it succeeds. +-var ( +- countExtractFunction = counter.New("gopls/extract:func") +- countExtractMethod = counter.New("gopls/extract:method") +- countExtractVariable = counter.New("gopls/extract:variable") +- countExtractVariableAll = counter.New("gopls/extract:variable-all") +- +- countInlineCall = counter.New("gopls/inline:call") +- countInlineVariable = counter.New("gopls/inline:variable") +- +- countRenamePackage = counter.New("gopls/renamekind:package") +-) +diff -urN a/gopls/internal/golang/definition.go b/gopls/internal/golang/definition.go +--- a/gopls/internal/golang/definition.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/definition.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,506 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "errors" +- "fmt" +- "go/ast" +- "go/parser" +- "go/token" +- "go/types" +- "regexp" +- "strings" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- internalastutil "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/event" +-) +- +-// Definition handles the textDocument/definition request for Go files. +-func Definition(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]protocol.Location, error) { +- ctx, done := event.Start(ctx, "golang.Definition") +- defer done() +- +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- pos, err := pgf.PositionPos(position) +- if err != nil { +- return nil, err +- } +- +- // Handle the case where the cursor is in an import. +- importLocations, err := importDefinition(ctx, snapshot, pkg, pgf, pos) +- if err != nil { +- return nil, err +- } +- if len(importLocations) > 0 { +- return importLocations, nil +- } +- +- // Handle the case where the cursor is in the package name. +- // We use "<= End" to accept a query immediately after the package name. +- if pgf.File != nil && pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.End() { +- // If there's no package documentation, just use current file. +- declFile := pgf +- for _, pgf := range pkg.CompiledGoFiles() { +- if pgf.File.Name != nil && pgf.File.Doc != nil { +- declFile = pgf +- break +- } +- } +- loc, err := declFile.NodeLocation(declFile.File.Name) +- if err != nil { +- return nil, err +- } +- return []protocol.Location{loc}, nil +- } +- +- // Handle the case where the cursor is in a linkname directive. +- locations, err := linknameDefinition(ctx, snapshot, pgf.Mapper, position) +- if !errors.Is(err, ErrNoLinkname) { +- return locations, err // may be success or failure +- } +- +- // Handle the case where the cursor is in an embed directive. +- locations, err = embedDefinition(pgf.Mapper, position) +- if !errors.Is(err, ErrNoEmbed) { +- return locations, err // may be success or failure +- } +- +- // Handle the case where the cursor is in a doc link. +- locations, err = docLinkDefinition(ctx, snapshot, pkg, pgf, pos) +- if !errors.Is(err, errNoCommentReference) { +- return locations, err // may be success or failure +- } +- +- // Handle definition requests for various special kinds of syntax node. +- path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) +- ancestors := path[1:] +- switch node := path[0].(type) { +- // Handle the case where the cursor is on a return statement by jumping to the result variables. +- case *ast.ReturnStmt: +- var funcType *ast.FuncType +- for _, n := range ancestors { +- switch n := n.(type) { +- case *ast.FuncLit: +- funcType = n.Type +- case *ast.FuncDecl: +- funcType = n.Type +- } +- if funcType != nil { +- break +- } +- } +- // Inv: funcType != nil, as a return stmt cannot appear outside a function. +- if funcType.Results == nil { +- return nil, nil // no result variables +- } +- loc, err := pgf.NodeLocation(funcType.Results) +- if err != nil { +- return nil, err +- } +- return []protocol.Location{loc}, nil +- +- case *ast.BranchStmt: +- // Handle the case where the cursor is on a goto, break or continue statement by returning the +- // location of the label, the closing brace of the relevant block statement, or the +- // start of the relevant loop, respectively. +- label, isLabeled := pkg.TypesInfo().Uses[node.Label].(*types.Label) +- switch node.Tok { +- case token.GOTO: +- loc, err := pgf.PosLocation(label.Pos(), label.Pos()+token.Pos(len(label.Name()))) +- if err != nil { +- return nil, err +- } +- return []protocol.Location{loc}, nil +- +- case token.BREAK, token.CONTINUE: +- // Find innermost relevant ancestor for break/continue. +- for i, n := range ancestors { +- if isLabeled && i+1 < len(ancestors) { +- l, ok := ancestors[i+1].(*ast.LabeledStmt) +- if !(ok && l.Label.Name == label.Name()) { +- continue +- } +- } +- switch n.(type) { +- case *ast.ForStmt, *ast.RangeStmt: +- var start, end token.Pos +- if node.Tok == token.BREAK { +- start, end = n.End()-token.Pos(len("}")), n.End() +- } else { // CONTINUE +- start, end = n.Pos(), n.Pos()+token.Pos(len("for")) +- } +- loc, err := pgf.PosLocation(start, end) +- if err != nil { +- return nil, err +- } +- return []protocol.Location{loc}, nil +- case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: +- if node.Tok == token.BREAK { +- loc, err := pgf.PosLocation(n.End()-1, n.End()) +- if err != nil { +- return nil, err +- } +- return []protocol.Location{loc}, nil +- } +- case *ast.FuncDecl, *ast.FuncLit: +- // bad syntax; avoid jumping outside the current function +- return nil, nil +- } +- } +- } +- } +- +- // The general case: the cursor is on an identifier. +- _, obj, _ := referencedObject(pkg, pgf, pos) +- if obj == nil { +- return nil, nil +- } +- +- // Non-go (e.g. assembly) symbols +- // +- // When already at the definition of a Go function without +- // a body, we jump to its non-Go (C or assembly) definition. +- for _, decl := range pgf.File.Decls { +- if decl, ok := decl.(*ast.FuncDecl); ok && +- decl.Body == nil && +- internalastutil.NodeContains(decl.Name, pos) { +- return nonGoDefinition(ctx, snapshot, pkg, decl.Name.Name) +- } +- } +- +- // Finally, map the object position. +- loc, err := ObjectLocation(ctx, pkg.FileSet(), snapshot, obj) +- if err != nil { +- return nil, err +- } +- return []protocol.Location{loc}, nil +-} +- +-// builtinDecl returns the parsed Go file and node corresponding to a builtin +-// object, which may be a universe object or part of types.Unsafe, as well as +-// its declaring identifier. +-func builtinDecl(ctx context.Context, snapshot *cache.Snapshot, obj types.Object) (*parsego.File, *ast.Ident, error) { +- // declaringIdent returns the file-level declaration node (as reported by +- // ast.Object) and declaring identifier of name using legacy (go/ast) object +- // resolution. +- declaringIdent := func(file *ast.File, name string) (ast.Node, *ast.Ident, error) { +- astObj := file.Scope.Lookup(name) +- if astObj == nil { +- // Every built-in should have documentation syntax. +- // However, it is possible to reach this statement by +- // commenting out declarations in {builtin,unsafe}.go. +- return nil, nil, fmt.Errorf("internal error: no object for %s", name) +- } +- decl, ok := astObj.Decl.(ast.Node) +- if !ok { +- return nil, nil, bug.Errorf("internal error: no declaration for %s", obj.Name()) +- } +- var ident *ast.Ident +- switch node := decl.(type) { +- case *ast.Field: +- for _, id := range node.Names { +- if id.Name == name { +- ident = id +- } +- } +- case *ast.ValueSpec: +- for _, id := range node.Names { +- if id.Name == name { +- ident = id +- } +- } +- case *ast.TypeSpec: +- ident = node.Name +- case *ast.Ident: +- ident = node +- case *ast.FuncDecl: +- ident = node.Name +- case *ast.ImportSpec, *ast.LabeledStmt, *ast.AssignStmt: +- // Not reachable for imported objects. +- default: +- return nil, nil, bug.Errorf("internal error: unexpected decl type %T", decl) +- } +- if ident == nil { +- return nil, nil, bug.Errorf("internal error: no declaring identifier for %s", obj.Name()) +- } +- return decl, ident, nil +- } +- +- var ( +- pgf *parsego.File +- ident *ast.Ident +- err error +- ) +- if obj.Pkg() == types.Unsafe { +- // package "unsafe": +- // parse $GOROOT/src/unsafe/unsafe.go +- // +- // (Strictly, we shouldn't assume that the ID of a std +- // package is its PkgPath, but no Bazel+gopackagesdriver +- // users have complained about this yet.) +- unsafe := snapshot.Metadata("unsafe") +- if unsafe == nil { +- // If the type checker somehow resolved 'unsafe', we must have metadata +- // for it. +- return nil, nil, bug.Errorf("no metadata for package 'unsafe'") +- } +- uri := unsafe.GoFiles[0] +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return nil, nil, err +- } +- // TODO(rfindley): treat unsafe symmetrically with the builtin file. Either +- // pre-parse them both, or look up metadata for both. +- pgf, err = snapshot.ParseGo(ctx, fh, parsego.Full&^parser.SkipObjectResolution) +- if err != nil { +- return nil, nil, err +- } +- _, ident, err = declaringIdent(pgf.File, obj.Name()) +- if err != nil { +- return nil, nil, err +- } +- } else { +- // pseudo-package "builtin": +- // use parsed $GOROOT/src/builtin/builtin.go +- pgf, err = snapshot.BuiltinFile(ctx) +- if err != nil { +- return nil, nil, err +- } +- +- if obj.Parent() == types.Universe { +- // built-in function or type +- _, ident, err = declaringIdent(pgf.File, obj.Name()) +- if err != nil { +- return nil, nil, err +- } +- } else if obj.Name() == "Error" { +- // error.Error method +- decl, _, err := declaringIdent(pgf.File, "error") +- if err != nil { +- return nil, nil, err +- } +- field := decl.(*ast.TypeSpec).Type.(*ast.InterfaceType).Methods.List[0] +- ident = field.Names[0] +- } else { +- return nil, nil, bug.Errorf("unknown built-in %v", obj) +- } +- } +- _ = ident.Name // ident != nil +- +- return pgf, ident, nil +-} +- +-// referencedObject returns the identifier and object referenced at the +-// specified position, which must be within the file pgf, for the purposes of +-// definition/hover/call hierarchy operations. It returns a nil object if no +-// object was found at the given position. +-// +-// If the returned identifier is a type-switch implicit (i.e. the x in x := +-// e.(type)), the third result will be the type of the expression being +-// switched on (the type of e in the example). This facilitates workarounds for +-// limitations of the go/types API, which does not report an object for the +-// identifier x. +-// +-// For embedded fields, referencedObject returns the type name object rather +-// than the var (field) object. +-// +-// TODO(rfindley): this function exists to preserve the pre-existing behavior +-// of golang.Identifier. Eliminate this helper in favor of sharing +-// functionality with objectsAt, after choosing suitable primitives. +-func referencedObject(pkg *cache.Package, pgf *parsego.File, pos token.Pos) (*ast.Ident, types.Object, types.Type) { +- path := pathEnclosingObjNode(pgf.File, pos) +- if len(path) == 0 { +- return nil, nil, nil +- } +- var obj types.Object +- info := pkg.TypesInfo() +- switch n := path[0].(type) { +- case *ast.Ident: +- obj = info.ObjectOf(n) +- // If n is the var's declaring ident in a type switch +- // [i.e. the x in x := foo.(type)], it will not have an object. In this +- // case, set obj to the first implicit object (if any), and return the type +- // of the expression being switched on. +- // +- // The type switch may have no case clauses and thus no +- // implicit objects; this is a type error ("unused x"), +- if obj == nil { +- if implicits, typ := typeSwitchImplicits(info, path); len(implicits) > 0 { +- return n, implicits[0], typ +- } +- } +- +- // If the original position was an embedded field, we want to jump +- // to the field's type definition, not the field's definition. +- if v, ok := obj.(*types.Var); ok && v.Embedded() { +- // types.Info.Uses contains the embedded field's *types.TypeName. +- if typeName := info.Uses[n]; typeName != nil { +- obj = typeName +- } +- } +- return n, obj, nil +- } +- return nil, nil, nil +-} +- +-// importDefinition returns locations defining a package referenced by the +-// import spec containing pos. +-// +-// If pos is not inside an import spec, it returns nil, nil. +-func importDefinition(ctx context.Context, s *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, pos token.Pos) ([]protocol.Location, error) { +- var imp *ast.ImportSpec +- for _, spec := range pgf.File.Imports { +- // We use "<= End" to accept a query immediately after an ImportSpec. +- if spec.Path.Pos() <= pos && pos <= spec.Path.End() { +- imp = spec +- } +- } +- if imp == nil { +- return nil, nil +- } +- +- importPath := metadata.UnquoteImportPath(imp) +- impID := pkg.Metadata().DepsByImpPath[importPath] +- if impID == "" { +- return nil, fmt.Errorf("failed to resolve import %q", importPath) +- } +- impMetadata := s.Metadata(impID) +- if impMetadata == nil { +- return nil, fmt.Errorf("missing information for package %q", impID) +- } +- +- var locs []protocol.Location +- for _, f := range impMetadata.CompiledGoFiles { +- fh, err := s.ReadFile(ctx, f) +- if err != nil { +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- continue +- } +- pgf, err := s.ParseGo(ctx, fh, parsego.Header) +- if err != nil { +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- continue +- } +- loc, err := pgf.NodeLocation(pgf.File) +- if err != nil { +- return nil, err +- } +- locs = append(locs, loc) +- } +- +- if len(locs) == 0 { +- return nil, fmt.Errorf("package %q has no readable files", impID) // incl. unsafe +- } +- +- return locs, nil +-} +- +-// ObjectLocation returns the location of the declaring identifier of obj. +-// If valid, obj.Pos() must be mapped by fset. +-// It may need to read the declaring file content, hence (ctx, s). +-// It supports the builtin and unsafe pseudo-packages. +-func ObjectLocation(ctx context.Context, fset *token.FileSet, snapshot *cache.Snapshot, obj types.Object) (protocol.Location, error) { +- if isBuiltin(obj) { +- // Returns fake source declaration in {builtin,unsafe}.go. +- pgf, ident, err := builtinDecl(ctx, snapshot, obj) +- if err != nil { +- return protocol.Location{}, err +- } +- return pgf.NodeLocation(ident) +- } +- +- // An imported Go package has a package-local, unqualified name. +- // When the name matches the imported package name, there is usually +- // no identifier in the import spec with the local package name. +- // +- // For example: +- // import "go/‸ast" // name "ast" matches package name +- // import ‸a "go/ast" // name "a" does not match package name +- // +- // When the identifier does not appear in the source, have the range +- // of the object be the import path, including quotes. +- // +- // But this is just a heuristic, and it's wrong in this case: +- // import ‸ast "go/ast" // name matches (spurious result is `ast "go/`) +- nameLen := len(obj.Name()) +- if pkgName, ok := obj.(*types.PkgName); ok && pkgName.Imported().Name() == pkgName.Name() { +- nameLen = len(pkgName.Imported().Path()) + len(`""`) +- } +- +- var ( +- start = obj.Pos() +- end = start + token.Pos(nameLen) +- ) +- file := fset.File(start) +- if file == nil { +- return protocol.Location{}, bug.Errorf("FileSet does not map Pos %d", start) +- } +- uri := protocol.URIFromPath(file.Name()) +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return protocol.Location{}, err +- } +- content, err := fh.Content() +- if err != nil { +- return protocol.Location{}, err +- } +- // TODO(rfindley): avoid the duplicate column mapping here, by associating a +- // column mapper with each file handle. +- m := protocol.NewMapper(fh.URI(), content) +- return m.PosLocation(file, start, end) +-} +- +-// nonGoDefinition returns the location of the definition of a non-Go symbol. +-// Only assembly is supported for now. +-func nonGoDefinition(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, symbol string) ([]protocol.Location, error) { +- // Examples: +- // TEXT runtime·foo(SB) +- // TEXT ·foo(SB) +- // TODO(adonovan): why does ^TEXT cause it not to match? +- pattern := regexp.MustCompile("TEXT\\b.*·(" + regexp.QuoteMeta(symbol) + ")[\\(<]") +- +- for _, uri := range pkg.Metadata().OtherFiles { +- if strings.HasSuffix(uri.Path(), ".s") { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return nil, err // context cancelled +- } +- content, err := fh.Content() +- if err != nil { +- continue // can't read file +- } +- if match := pattern.FindSubmatchIndex(content); match != nil { +- mapper := protocol.NewMapper(uri, content) +- loc, err := mapper.OffsetLocation(match[2], match[3]) +- if err != nil { +- return nil, err +- } +- return []protocol.Location{loc}, nil +- } +- } +- } +- +- // TODO(adonovan): try C files +- +- // This may be reached for functions that aren't implemented +- // in assembly (e.g. compiler intrinsics like getg). +- return nil, fmt.Errorf("can't find non-Go definition of %s", symbol) +-} +diff -urN a/gopls/internal/golang/diagnostics.go b/gopls/internal/golang/diagnostics.go +--- a/gopls/internal/golang/diagnostics.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/diagnostics.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,109 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/progress" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/moremaps" +-) +- +-// DiagnoseFile returns pull-based diagnostics for the given file. +-func DiagnoseFile(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) ([]*cache.Diagnostic, error) { +- mp, err := snapshot.NarrowestMetadataForFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- +- // TODO(rfindley): consider analysing the package concurrently to package +- // diagnostics. +- +- // Get package (list/parse/type check) diagnostics. +- pkgDiags, err := snapshot.PackageDiagnostics(ctx, mp.ID) +- if err != nil { +- return nil, err +- } +- diags := pkgDiags[uri] +- +- // Get analysis diagnostics. +- pkgAnalysisDiags, err := snapshot.Analyze(ctx, map[PackageID]*metadata.Package{mp.ID: mp}, nil) +- if err != nil { +- return nil, err +- } +- analysisDiags := moremaps.Group(pkgAnalysisDiags, byURI)[uri] +- +- // Return the merged set of file diagnostics, combining type error analyses +- // with type error diagnostics. +- return CombineDiagnostics(diags, analysisDiags), nil +-} +- +-// Analyze reports go/analysis-framework diagnostics in the specified package. +-// +-// If the provided tracker is non-nil, it may be used to provide notifications +-// of the ongoing analysis pass. +-// +-// TODO(rfindley): merge this with snapshot.Analyze. +-func Analyze(ctx context.Context, snapshot *cache.Snapshot, pkgIDs map[PackageID]*metadata.Package, tracker *progress.Tracker) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { +- // Exit early if the context has been canceled. This also protects us +- // from a race on Options, see golang/go#36699. +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- +- analysisDiagnostics, err := snapshot.Analyze(ctx, pkgIDs, tracker) +- if err != nil { +- return nil, err +- } +- return moremaps.Group(analysisDiagnostics, byURI), nil +-} +- +-// byURI is used for grouping diagnostics. +-func byURI(d *cache.Diagnostic) protocol.DocumentURI { return d.URI } +- +-// CombineDiagnostics combines and filters list/parse/type diagnostics from +-// tdiags with the analysis adiags, returning the resulting combined set. +-// +-// Type-error analyzers produce diagnostics that are redundant with type +-// checker diagnostics, but more detailed (e.g. fixes). Rather than report two +-// diagnostics for the same problem, we combine them by augmenting the +-// type-checker diagnostic and discarding the analyzer diagnostic. +-// +-// If an analysis diagnostic has the same range and message as a +-// list/parse/type diagnostic, the suggested fix information (et al) of the +-// latter is merged into a copy of the former. This handles the case where a +-// type-error analyzer suggests a fix to a type error, and avoids duplication. +-// +-// The arguments are not modified. +-func CombineDiagnostics(tdiags []*cache.Diagnostic, adiags []*cache.Diagnostic) []*cache.Diagnostic { +- // Build index of (list+parse+)type errors. +- type key struct { +- Range protocol.Range +- message string +- } +- combined := make([]*cache.Diagnostic, len(tdiags)) +- index := make(map[key]int) // maps (Range,Message) to index in tdiags slice +- for i, diag := range tdiags { +- index[key{diag.Range, diag.Message}] = i +- combined[i] = diag +- } +- +- // Filter out analysis diagnostics that match type errors, +- // retaining their suggested fix (etc) fields. +- for _, diag := range adiags { +- if i, ok := index[key{diag.Range, diag.Message}]; ok { +- copy := *tdiags[i] +- copy.SuggestedFixes = diag.SuggestedFixes +- copy.Tags = diag.Tags +- combined[i] = © +- continue +- } +- combined = append(combined, diag) +- } +- return combined +-} +diff -urN a/gopls/internal/golang/embeddirective.go b/gopls/internal/golang/embeddirective.go +--- a/gopls/internal/golang/embeddirective.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/embeddirective.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,195 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "errors" +- "fmt" +- "io/fs" +- "path/filepath" +- "strconv" +- "strings" +- "unicode" +- "unicode/utf8" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// ErrNoEmbed is returned by EmbedDefinition when no embed +-// directive is found at a particular position. +-// As such it indicates that other definitions could be worth checking. +-var ErrNoEmbed = errors.New("no embed directive found") +- +-var errStopWalk = errors.New("stop walk") +- +-// embedDefinition finds a file matching the embed directive at pos in the mapped file. +-// If there is no embed directive at pos, returns ErrNoEmbed. +-// If multiple files match the embed pattern, one is picked at random. +-func embedDefinition(m *protocol.Mapper, pos protocol.Position) ([]protocol.Location, error) { +- pattern, _ := parseEmbedDirective(m, pos) +- if pattern == "" { +- return nil, ErrNoEmbed +- } +- +- // Find the first matching file. +- var match string +- dir := m.URI.DirPath() +- err := filepath.WalkDir(dir, func(abs string, d fs.DirEntry, e error) error { +- if e != nil { +- return e +- } +- rel, err := filepath.Rel(dir, abs) +- if err != nil { +- return err +- } +- ok, err := filepath.Match(pattern, rel) +- if err != nil { +- return err +- } +- if ok && !d.IsDir() { +- match = abs +- return errStopWalk +- } +- return nil +- }) +- if err != nil && !errors.Is(err, errStopWalk) { +- return nil, err +- } +- if match == "" { +- return nil, fmt.Errorf("%q does not match any files in %q", pattern, dir) +- } +- +- loc := protocol.Location{ +- URI: protocol.URIFromPath(match), +- Range: protocol.Range{ +- Start: protocol.Position{Line: 0, Character: 0}, +- }, +- } +- return []protocol.Location{loc}, nil +-} +- +-// parseEmbedDirective attempts to parse a go:embed directive argument at pos. +-// If successful it return the directive argument and its range, else zero values are returned. +-func parseEmbedDirective(m *protocol.Mapper, pos protocol.Position) (string, protocol.Range) { +- lineStart, err := m.PositionOffset(protocol.Position{Line: pos.Line, Character: 0}) +- if err != nil { +- return "", protocol.Range{} +- } +- lineEnd, err := m.PositionOffset(protocol.Position{Line: pos.Line + 1, Character: 0}) +- if err != nil { +- return "", protocol.Range{} +- } +- +- text := string(m.Content[lineStart:lineEnd]) +- if !strings.HasPrefix(text, "//go:embed") { +- return "", protocol.Range{} +- } +- text = text[len("//go:embed"):] +- offset := lineStart + len("//go:embed") +- +- // Find the first pattern in text that covers the offset of the pos we are looking for. +- findOffset, err := m.PositionOffset(pos) +- if err != nil { +- return "", protocol.Range{} +- } +- patterns, err := parseGoEmbed(text, offset) +- if err != nil { +- return "", protocol.Range{} +- } +- for _, p := range patterns { +- if p.startOffset <= findOffset && findOffset <= p.endOffset { +- // Found our match. +- rng, err := m.OffsetRange(p.startOffset, p.endOffset) +- if err != nil { +- return "", protocol.Range{} +- } +- return p.pattern, rng +- } +- } +- +- return "", protocol.Range{} +-} +- +-type fileEmbed struct { +- pattern string +- startOffset int +- endOffset int +-} +- +-// parseGoEmbed patterns that come after the directive. +-// +-// Copied and adapted from go/build/read.go. +-// Replaced token.Position with start/end offset (including quotes if present). +-func parseGoEmbed(args string, offset int) ([]fileEmbed, error) { +- trimBytes := func(n int) { +- offset += n +- args = args[n:] +- } +- trimSpace := func() { +- trim := strings.TrimLeftFunc(args, unicode.IsSpace) +- trimBytes(len(args) - len(trim)) +- } +- +- var list []fileEmbed +- for trimSpace(); args != ""; trimSpace() { +- var path string +- pathOffset := offset +- Switch: +- switch args[0] { +- default: +- i := len(args) +- for j, c := range args { +- if unicode.IsSpace(c) { +- i = j +- break +- } +- } +- path = args[:i] +- trimBytes(i) +- +- case '`': +- var ok bool +- path, _, ok = strings.Cut(args[1:], "`") +- if !ok { +- return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args) +- } +- trimBytes(1 + len(path) + 1) +- +- case '"': +- i := 1 +- for ; i < len(args); i++ { +- if args[i] == '\\' { +- i++ +- continue +- } +- if args[i] == '"' { +- q, err := strconv.Unquote(args[:i+1]) +- if err != nil { +- return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args[:i+1]) +- } +- path = q +- trimBytes(i + 1) +- break Switch +- } +- } +- if i >= len(args) { +- return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args) +- } +- } +- +- if args != "" { +- r, _ := utf8.DecodeRuneInString(args) +- if !unicode.IsSpace(r) { +- return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args) +- } +- } +- list = append(list, fileEmbed{ +- pattern: path, +- startOffset: pathOffset, +- endOffset: offset, +- }) +- } +- return list, nil +-} +diff -urN a/gopls/internal/golang/extract.go b/gopls/internal/golang/extract.go +--- a/gopls/internal/golang/extract.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/extract.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,2161 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "bytes" +- "fmt" +- "go/ast" +- "go/format" +- "go/parser" +- "go/printer" +- "go/token" +- "go/types" +- "slices" +- "sort" +- "strconv" +- "strings" +- "text/scanner" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- internalastutil "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// extractVariableOne implements the refactor.extract.{variable,constant} CodeAction command. +-func extractVariableOne(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- countExtractVariable.Inc() +- return extractVariable(pkg, pgf, start, end, false) +-} +- +-// extractVariableAll implements the refactor.extract.{variable,constant}-all CodeAction command. +-func extractVariableAll(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- countExtractVariableAll.Inc() +- return extractVariable(pkg, pgf, start, end, true) +-} +- +-// extractVariable replaces one or all occurrences of a specified +-// expression within the same function with newVar. If 'all' is true, +-// it replaces all occurrences of the same expression; otherwise, it +-// only replaces the selected expression. +-// +-// The new variable/constant is declared as close as possible to the first found expression +-// within the deepest common scope accessible to all candidate occurrences. +-func extractVariable(pkg *cache.Package, pgf *parsego.File, start, end token.Pos, all bool) (*token.FileSet, *analysis.SuggestedFix, error) { +- var ( +- fset = pkg.FileSet() +- info = pkg.TypesInfo() +- file = pgf.File +- ) +- // TODO(adonovan): simplify, using Cursor. +- exprs, err := canExtractVariable(info, pgf.Cursor, start, end, all) +- if err != nil { +- return nil, nil, fmt.Errorf("cannot extract: %v", err) +- } +- +- // innermost scope enclosing ith expression +- exprScopes := make([]*types.Scope, len(exprs)) +- for i, e := range exprs { +- exprScopes[i] = info.Scopes[file].Innermost(e.Pos()) +- } +- +- hasCollision := func(name string) bool { +- for _, scope := range exprScopes { +- if s, _ := scope.LookupParent(name, token.NoPos); s != nil { +- return true +- } +- } +- return false +- } +- constant := info.Types[exprs[0]].Value != nil +- +- // Generate name(s) for new declaration. +- baseName := cond(constant, "newConst", "newVar") +- var lhsNames []string +- switch expr := exprs[0].(type) { +- case *ast.CallExpr: +- tup, ok := info.TypeOf(expr).(*types.Tuple) +- if !ok { +- // conversion or single-valued call: +- // treat it the same as our standard extract variable case. +- name, _ := generateName(0, baseName, hasCollision) +- lhsNames = append(lhsNames, name) +- +- } else { +- // call with multiple results +- idx := 0 +- for range tup.Len() { +- // Generate a unique variable for each result. +- var name string +- name, idx = generateName(idx, baseName, hasCollision) +- lhsNames = append(lhsNames, name) +- } +- } +- +- default: +- // TODO: stricter rules for selectorExpr. +- name, _ := generateName(0, baseName, hasCollision) +- lhsNames = append(lhsNames, name) +- } +- +- // Where all the extractable positions can see variable being declared. +- var commonScope *types.Scope +- counter := make(map[*types.Scope]int) +-Outer: +- for _, scope := range exprScopes { +- for s := scope; s != nil; s = s.Parent() { +- counter[s]++ +- if counter[s] == len(exprScopes) { +- // A scope whose count is len(scopes) is common to all ancestor paths. +- // Stop at the first (innermost) one. +- commonScope = s +- break Outer +- } +- } +- } +- +- var visiblePath []ast.Node +- if commonScope != exprScopes[0] { +- // This means the first expr within function body is not the largest scope, +- // we need to find the scope immediately follow the common +- // scope where we will insert the statement before. +- child := exprScopes[0] +- for p := child; p != nil; p = p.Parent() { +- if p == commonScope { +- break +- } +- child = p +- } +- visiblePath, _ = astutil.PathEnclosingInterval(file, child.Pos(), child.End()) +- } else { +- // Insert newVar inside commonScope before the first occurrence of the expression. +- visiblePath, _ = astutil.PathEnclosingInterval(file, exprs[0].Pos(), exprs[0].End()) +- } +- variables, err := collectFreeVars(info, file, exprs[0].Pos(), exprs[0].End(), exprs[0]) +- if err != nil { +- return nil, nil, err +- } +- +- // TODO: There is a bug here: for a variable declared in a labeled +- // switch/for statement it returns the for/switch statement itself +- // which produces the below code which is a compiler error. e.g. +- // label: +- // switch r1 := r() { ... break label ... } +- // On extracting "r()" to a variable +- // label: +- // x := r() +- // switch r1 := x { ... break label ... } // compiler error +- // +- var ( +- insertPos token.Pos +- indentation string +- stmtOK bool // ok to use ":=" instead of var/const decl? +- ) +- if funcDecl, ok := visiblePath[len(visiblePath)-2].(*ast.FuncDecl); ok && internalastutil.NodeContains(funcDecl.Body, start) { +- before, err := stmtToInsertVarBefore(visiblePath, variables) +- if err != nil { +- return nil, nil, fmt.Errorf("cannot find location to insert extraction: %v", err) +- } +- // Within function: compute appropriate statement indentation. +- indent, err := pgf.Indentation(before.Pos()) +- if err != nil { +- return nil, nil, err +- } +- insertPos = before.Pos() +- indentation = "\n" + indent +- +- // Currently, we always extract a constant expression +- // to a const declaration (and logic in CodeAction +- // assumes that we do so); this is conservative because +- // it preserves its constant-ness. +- // +- // In future, constant expressions used only in +- // contexts where constant-ness isn't important could +- // be profitably extracted to a var declaration or := +- // statement, especially if the latter is the Init of +- // an {If,For,Switch}Stmt. +- stmtOK = !constant +- } else { +- // Outside any statement: insert before the current +- // declaration, without indentation. +- currentDecl := visiblePath[len(visiblePath)-2] +- insertPos = currentDecl.Pos() +- indentation = "\n" +- } +- +- // Create statement to declare extracted var/const. +- // +- // TODO(adonovan): beware the const decls are not valid short +- // statements, so if fixing #70563 causes +- // StmtToInsertVarBefore to evolve to permit declarations in +- // the "pre" part of an IfStmt, like so: +- // Before: +- // if cond { +- // } else if «1 + 2» > 0 { +- // } +- // After: +- // if x := 1 + 2; cond { +- // } else if x > 0 { +- // } +- // then it will need to become aware that this is invalid +- // for constants. +- // +- // Conversely, a short var decl stmt is not valid at top level, +- // so when we fix #70665, we'll need to use a var decl. +- var newNode ast.Node +- if !stmtOK { +- // var/const x1, ..., xn = expr +- var names []*ast.Ident +- for _, name := range lhsNames { +- names = append(names, ast.NewIdent(name)) +- } +- newNode = &ast.GenDecl{ +- Tok: cond(constant, token.CONST, token.VAR), +- Specs: []ast.Spec{ +- &ast.ValueSpec{ +- Names: names, +- Values: []ast.Expr{exprs[0]}, +- }, +- }, +- } +- +- } else { +- // var: x1, ... xn := expr +- var lhs []ast.Expr +- for _, name := range lhsNames { +- lhs = append(lhs, ast.NewIdent(name)) +- } +- newNode = &ast.AssignStmt{ +- Tok: token.DEFINE, +- Lhs: lhs, +- Rhs: []ast.Expr{exprs[0]}, +- } +- } +- +- // Format and indent the declaration. +- var buf bytes.Buffer +- if err := format.Node(&buf, fset, newNode); err != nil { +- return nil, nil, err +- } +- // TODO(adonovan): not sound for `...` string literals containing newlines. +- assignment := strings.ReplaceAll(buf.String(), "\n", indentation) + indentation +- textEdits := []analysis.TextEdit{{ +- Pos: insertPos, +- End: insertPos, +- NewText: []byte(assignment), +- }} +- for _, e := range exprs { +- textEdits = append(textEdits, analysis.TextEdit{ +- Pos: e.Pos(), +- End: e.End(), +- NewText: []byte(strings.Join(lhsNames, ", ")), +- }) +- } +- return fset, &analysis.SuggestedFix{ +- TextEdits: textEdits, +- }, nil +-} +- +-// stmtToInsertVarBefore returns the ast.Stmt before which we can safely insert a new variable, +-// and ensures that the new declaration is inserted at a point where all free variables are declared before. +-// Some examples: +-// +-// Basic Example: +-// +-// z := 1 +-// y := z + x +-// +-// If x is undeclared, then this function would return `y := z + x`, so that we +-// can insert `x := ` on the line before `y := z + x`. +-// +-// valid IfStmt example: +-// +-// if z == 1 { +-// } else if z == y {} +-// +-// If y is undeclared, then this function would return `if z == 1 {`, because we cannot +-// insert a statement between an if and an else if statement. As a result, we need to find +-// the top of the if chain to insert `y := ` before. +-// +-// invalid IfStmt example: +-// +-// if x := 1; true { +-// } else if y := x + 1; true { //apply refactor.extract.variable to x +-// } +-// +-// `x` is a free variable defined in the IfStmt, we should not insert +-// the extracted expression outside the IfStmt scope, instead, return an error. +-// +-// TODO(dmo): make this function take a Cursor and simplify +-func stmtToInsertVarBefore(path []ast.Node, variables []*variable) (ast.Stmt, error) { +- enclosingIndex := -1 // index in path of enclosing stmt +- for i, p := range path { +- if _, ok := p.(ast.Stmt); ok { +- enclosingIndex = i +- break +- } +- } +- if enclosingIndex == -1 { +- return nil, fmt.Errorf("no enclosing statement") +- } +- enclosingStmt := path[enclosingIndex].(ast.Stmt) +- +- // hasFreeVar reports if any free variables is defined inside stmt (which may be nil). +- // If true, indicates that the insertion point will sit before the variable declaration. +- hasFreeVar := func(stmt ast.Stmt) bool { +- if stmt == nil { +- return false +- } +- for _, v := range variables { +- if internalastutil.NodeContains(stmt, v.obj.Pos()) { +- return true +- } +- } +- return false +- } +- +- // baseIfStmt walks up the if/else-if chain until we get to +- // the top of the current if chain. +- baseIfStmt := func(index int) (ast.Stmt, error) { +- stmt := path[index] +- for _, node := range path[index+1:] { +- ifStmt, ok := node.(*ast.IfStmt) +- if !ok || ifStmt.Else != stmt { +- break +- } +- if hasFreeVar(ifStmt.Init) { +- return nil, fmt.Errorf("Else's init statement has free variable declaration") +- } +- stmt = ifStmt +- } +- return stmt.(ast.Stmt), nil +- } +- +- switch enclosingStmt := enclosingStmt.(type) { +- case *ast.IfStmt: +- if hasFreeVar(enclosingStmt.Init) { +- return nil, fmt.Errorf("IfStmt's init statement has free variable declaration") +- } +- // The enclosingStmt is inside of the if declaration, +- // We need to check if we are in an else-if stmt and +- // get the base if statement. +- return baseIfStmt(enclosingIndex) +- case *ast.CaseClause: +- // Get the enclosing switch stmt if the enclosingStmt is +- // inside of the case statement. +- for _, node := range path[enclosingIndex+1:] { +- switch stmt := node.(type) { +- case *ast.SwitchStmt: +- if hasFreeVar(stmt.Init) { +- return nil, fmt.Errorf("SwitchStmt's init statement has free variable declaration") +- } +- return stmt, nil +- case *ast.TypeSwitchStmt: +- if hasFreeVar(stmt.Init) { +- return nil, fmt.Errorf("TypeSwitchStmt's init statement has free variable declaration") +- } +- return stmt, nil +- } +- } +- } +- // Check if the enclosing statement is inside another node. +- switch parent := path[enclosingIndex+1].(type) { +- case *ast.IfStmt: +- if hasFreeVar(parent.Init) { +- return nil, fmt.Errorf("IfStmt's init statement has free variable declaration") +- } +- return baseIfStmt(enclosingIndex + 1) +- case *ast.ForStmt: +- if parent.Init == enclosingStmt || parent.Post == enclosingStmt { +- return parent, nil +- } +- case *ast.SwitchStmt: +- if hasFreeVar(parent.Init) { +- return nil, fmt.Errorf("SwitchStmt's init statement has free variable declaration") +- } +- return parent, nil +- case *ast.TypeSwitchStmt: +- if hasFreeVar(parent.Init) { +- return nil, fmt.Errorf("TypeSwitchStmt's init statement has free variable declaration") +- } +- return parent, nil +- } +- return enclosingStmt, nil +-} +- +-// canExtractVariable reports whether the code in the given range can be +-// extracted to a variable (or constant). It returns the selected expression or, if 'all', +-// all structurally equivalent expressions within the same function body, in lexical order. +-func canExtractVariable(info *types.Info, curFile inspector.Cursor, start, end token.Pos, all bool) ([]ast.Expr, error) { +- if start == end { +- return nil, fmt.Errorf("empty selection") +- } +- file := curFile.Node().(*ast.File) +- // TODO(adonovan): simplify, using Cursor. +- path, exact := astutil.PathEnclosingInterval(file, start, end) +- if !exact { +- return nil, fmt.Errorf("selection is not an expression") +- } +- if len(path) == 0 { +- return nil, bug.Errorf("no path enclosing interval") +- } +- for _, n := range path { +- if _, ok := n.(*ast.ImportSpec); ok { +- return nil, fmt.Errorf("cannot extract variable or constant in an import block") +- } +- } +- expr, ok := path[0].(ast.Expr) +- if !ok { +- return nil, fmt.Errorf("selection is not an expression") // e.g. statement +- } +- if tv, ok := info.Types[expr]; !ok || !tv.IsValue() || tv.Type == nil || tv.HasOk() { +- // e.g. type, builtin, x.(type), 2-valued m[k], or ill-typed +- return nil, fmt.Errorf("selection is not a single-valued expression") +- } +- +- var exprs []ast.Expr +- if !all { +- exprs = append(exprs, expr) +- } else if funcDecl, ok := path[len(path)-2].(*ast.FuncDecl); ok { +- // Find all expressions in the same function body that +- // are equal to the selected expression. +- ast.Inspect(funcDecl.Body, func(n ast.Node) bool { +- if e, ok := n.(ast.Expr); ok { +- if internalastutil.Equal(e, expr, func(x, y *ast.Ident) bool { +- xobj, yobj := info.ObjectOf(x), info.ObjectOf(y) +- // The two identifiers must resolve to the same object, +- // or to a declaration within the candidate expression. +- // (This allows two copies of "func (x int) { print(x) }" +- // to match.) +- if xobj != nil && internalastutil.NodeContains(e, xobj.Pos()) && +- yobj != nil && internalastutil.NodeContains(expr, yobj.Pos()) { +- return x.Name == y.Name +- } +- // Use info.Uses to avoid including declaration, for example, +- // when extractnig x: +- // +- // x := 1 // should not include x +- // y := x // include x +- // z := x // include x +- xuse := info.Uses[x] +- return xuse != nil && xuse == info.Uses[y] +- }) { +- exprs = append(exprs, e) +- } +- } +- return true +- }) +- } else { +- return nil, fmt.Errorf("node %T is not inside a function", expr) +- } +- +- // Disallow any expr that sits in lhs of an AssignStmt or ValueSpec for now. +- // +- // TODO(golang/go#70784): In such cases, exprs are operated in "variable" mode (L-value mode in C). +- // In contrast, exprs in the RHS operate in "value" mode (R-value mode in C). +- // L-value mode refers to exprs that represent storage locations, +- // while R-value mode refers to exprs that represent values. +- // There are a number of expressions that may have L-value mode, given by: +- // +- // lvalue = ident -- Ident such that info.Uses[id] is a *Var +- // | '(' lvalue ') ' -- ParenExpr +- // | lvalue '[' expr ']' -- IndexExpr +- // | lvalue '.' ident -- SelectorExpr. +- // +- // For example: +- // +- // type foo struct { +- // bar int +- // } +- // f := foo{bar: 1} +- // x := f.bar + 1 // f.bar operates in "value" mode. +- // f.bar = 2 // f.bar operates in "variable" mode. +- // +- // When extracting exprs in variable mode, we must be cautious. Any such extraction +- // may require capturing the address of the expression and replacing its uses with dereferenced access. +- // The type checker records this information in info.Types[id].{IsValue,Addressable}(). +- // The correct result should be: +- // +- // newVar := &f.bar +- // x := *newVar + 1 +- // *newVar = 2 +- for _, e := range exprs { +- path, _ := astutil.PathEnclosingInterval(file, e.Pos(), e.End()) +- for _, n := range path { +- if assignment, ok := n.(*ast.AssignStmt); ok { +- if slices.Contains(assignment.Lhs, e) { +- return nil, fmt.Errorf("node %T is in LHS of an AssignStmt", expr) +- } +- break +- } +- if value, ok := n.(*ast.ValueSpec); ok { +- for _, name := range value.Names { +- if name == e { +- return nil, fmt.Errorf("node %T is in LHS of a ValueSpec", expr) +- } +- } +- break +- } +- } +- } +- return exprs, nil +-} +- +-// freshName returns an identifier based on prefix (perhaps with a +-// numeric suffix) that is not in scope at the specified position +-// within the file. It returns the next numeric suffix to use. +-func freshName(info *types.Info, file *ast.File, pos token.Pos, prefix string, idx int) (string, int) { +- scope := info.Scopes[file].Innermost(pos) +- return generateName(idx, prefix, func(name string) bool { +- obj, _ := scope.LookupParent(name, pos) +- return obj != nil +- }) +-} +- +-// freshNameOutsideRange is like [freshName], but ignores names +-// declared between start and end for the purposes of detecting conflicts. +-// +-// This is used for function extraction, where [start, end) will be extracted +-// to a new scope. +-func freshNameOutsideRange(info *types.Info, file *ast.File, pos, start, end token.Pos, prefix string, idx int) (string, int) { +- scope := info.Scopes[file].Innermost(pos) +- return generateName(idx, prefix, func(name string) bool { +- // Only report a collision if the object declaration +- // was outside the extracted range. +- for scope != nil { +- obj, declScope := scope.LookupParent(name, pos) +- if obj == nil { +- return false // undeclared +- } +- if !(start <= obj.Pos() && obj.Pos() < end) { +- return true // declared outside ignored range +- } +- scope = declScope.Parent() +- } +- return false +- }) +-} +- +-func generateName(idx int, prefix string, hasCollision func(string) bool) (string, int) { +- name := prefix +- if idx != 0 { +- name += fmt.Sprintf("%d", idx) +- } +- for hasCollision(name) { +- idx++ +- name = fmt.Sprintf("%v%d", prefix, idx) +- } +- return name, idx + 1 +-} +- +-// returnVariable keeps track of the information we need to properly introduce a new variable +-// that we will return in the extracted function. +-type returnVariable struct { +- // name is the identifier that is used on the left-hand side of the call to +- // the extracted function. +- name *ast.Ident +- // decl is the declaration of the variable. It is used in the type signature of the +- // extracted function and for variable declarations. +- decl *ast.Field +- // zeroVal is the "zero value" of the type of the variable. It is used in a return +- // statement in the extracted function. +- zeroVal ast.Expr +-} +- +-// extractMethod refactors the selected block of code into a new method. +-func extractMethod(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- countExtractMethod.Inc() +- return extractFunctionMethod(pkg, pgf, start, end, true) +-} +- +-// extractFunction refactors the selected block of code into a new function. +-func extractFunction(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- countExtractFunction.Inc() +- return extractFunctionMethod(pkg, pgf, start, end, false) +-} +- +-// extractFunctionMethod refactors the selected block of code into a new function/method. +-// It also replaces the selected block of code with a call to the extracted +-// function. First, we manually adjust the selection range. We remove trailing +-// and leading whitespace characters to ensure the range is precisely bounded +-// by AST nodes. Next, we determine the variables that will be the parameters +-// and return values of the extracted function/method. Lastly, we construct the call +-// of the function/method and insert this call as well as the extracted function/method into +-// their proper locations. +-func extractFunctionMethod(cpkg *cache.Package, pgf *parsego.File, start, end token.Pos, isMethod bool) (*token.FileSet, *analysis.SuggestedFix, error) { +- var ( +- fset = cpkg.FileSet() +- pkg = cpkg.Types() +- info = cpkg.TypesInfo() +- src = pgf.Src +- ) +- +- errorPrefix := "extractFunction" +- if isMethod { +- errorPrefix = "extractMethod" +- } +- +- file := pgf.Cursor.Node().(*ast.File) +- // TODO(adonovan): simplify, using Cursor. +- tok := fset.File(file.FileStart) +- if tok == nil { +- return nil, nil, bug.Errorf("no file for position") +- } +- p, ok, methodOk, err := canExtractFunction(tok, start, end, src, pgf.Cursor) +- if (!ok && !isMethod) || (!methodOk && isMethod) { +- return nil, nil, fmt.Errorf("%s: cannot extract %s: %v", errorPrefix, +- safetoken.StartPosition(fset, start), err) +- } +- tok, path, start, end, outer, node := p.tok, p.path, p.start, p.end, p.outer, p.node +- +- // A return statement is non-nested if its parent node is equal to the parent node +- // of the first node in the selection. These cases must be handled separately because +- // non-nested return statements are guaranteed to execute. +- var hasNonNestedReturn bool +- curStart, ok := pgf.Cursor.FindNode(node) +- if !ok { +- return nil, nil, bug.Errorf("cannot find Cursor for start Node") +- } +- curOuter, ok := pgf.Cursor.FindNode(outer) +- if !ok { +- return nil, nil, bug.Errorf("cannot find Cursor for start Node") +- } +- // Determine whether all return statements in the selection are +- // error-handling return statements. They must be of the form: +- // if err != nil { +- // return ..., err +- // } +- // If all return statements in the extracted block have a non-nil error, we +- // can replace the "shouldReturn" check with an error check to produce a +- // more concise output. +- allReturnsFinalErr := true // all ReturnStmts have final 'err' expression +- hasReturn := false // selection contains a ReturnStmt +- filter := []ast.Node{(*ast.ReturnStmt)(nil), (*ast.FuncLit)(nil)} +- curOuter.Inspect(filter, func(cur inspector.Cursor) (descend bool) { +- if funcLit, ok := cur.Node().(*ast.FuncLit); ok { +- // Exclude return statements in function literals because they don't affect the refactor. +- // Keep descending into func lits whose declaration is not included in the extracted block. +- return !(start < funcLit.Pos() && funcLit.End() < end) +- } +- ret := cur.Node().(*ast.ReturnStmt) +- if ret.Pos() < start || ret.End() > end { +- return false // not part of the extracted block +- } +- hasReturn = true +- +- if cur.Parent() == curStart.Parent() { +- hasNonNestedReturn = true +- } +- +- if !allReturnsFinalErr { +- // Stop the traversal if we have already found a non error-handling return statement. +- return false +- } +- // Check if the return statement returns a non-nil error as the last value. +- if len(ret.Results) > 0 { +- typ := info.TypeOf(ret.Results[len(ret.Results)-1]) +- if typ != nil && types.Identical(typ, errorType) { +- // Have: return ..., err +- // Check for enclosing "if err != nil { return ..., err }". +- // In that case, we can lift the error return to the caller. +- if ifstmt, ok := cur.Parent().Parent().Node().(*ast.IfStmt); ok { +- // Only handle the case where the if statement body contains a single statement. +- if body, ok := cur.Parent().Node().(*ast.BlockStmt); ok && len(body.List) <= 1 { +- if cond, ok := ifstmt.Cond.(*ast.BinaryExpr); ok { +- tx := info.TypeOf(cond.X) +- ty := info.TypeOf(cond.Y) +- isErr := tx != nil && types.Identical(tx, errorType) +- isNil := ty != nil && types.Identical(ty, types.Typ[types.UntypedNil]) +- if cond.Op == token.NEQ && isErr && isNil { +- // allReturnsErrHandling remains true +- return false +- } +- } +- } +- } +- } +- } +- allReturnsFinalErr = false +- return false +- }) +- +- allReturnsFinalErr = hasReturn && allReturnsFinalErr +- +- // Now that we have determined the correct range for the selection block, +- // we must determine the signature of the extracted function. We will then replace +- // the block with an assignment statement that calls the extracted function with +- // the appropriate parameters and return values. +- variables, err := collectFreeVars(info, file, start, end, path[0]) +- if err != nil { +- return nil, nil, err +- } +- +- var ( +- receiverUsed bool +- receiver *ast.Field +- receiverName string +- receiverObj types.Object +- ) +- if isMethod { +- if outer == nil || outer.Recv == nil || len(outer.Recv.List) == 0 { +- return nil, nil, fmt.Errorf("%s: cannot extract need method receiver", errorPrefix) +- } +- receiver = outer.Recv.List[0] +- if len(receiver.Names) == 0 || receiver.Names[0] == nil { +- return nil, nil, fmt.Errorf("%s: cannot extract need method receiver name", errorPrefix) +- } +- recvName := receiver.Names[0] +- receiverName = recvName.Name +- receiverObj = info.ObjectOf(recvName) +- } +- +- var ( +- params, returns []ast.Expr // used when calling the extracted function +- paramTypes, returnTypes []*ast.Field // used in the signature of the extracted function +- uninitialized []types.Object // vars we will need to initialize before the call +- ) +- +- // Avoid duplicates while traversing vars and uninitialized. +- seenVars := make(map[types.Object]ast.Expr) +- seenUninitialized := make(map[types.Object]struct{}) +- +- // Some variables on the left-hand side of our assignment statement may be free. If our +- // selection begins in the same scope in which the free variable is defined, we can +- // redefine it in our assignment statement. See the following example, where 'b' and +- // 'err' (both free variables) can be redefined in the second funcCall() while maintaining +- // correctness. +- // +- // +- // Not Redefined: +- // +- // a, err := funcCall() +- // var b int +- // b, err = funcCall() +- // +- // Redefined: +- // +- // a, err := funcCall() +- // b, err := funcCall() +- // +- // We track the number of free variables that can be redefined to maintain our preference +- // of using "x, y, z := fn()" style assignment statements. +- var canRedefineCount int +- +- qual := typesinternal.FileQualifier(file, pkg) +- +- // Each identifier in the selected block must become (1) a parameter to the +- // extracted function, (2) a return value of the extracted function, or (3) a local +- // variable in the extracted function. Determine the outcome(s) for each variable +- // based on whether it is free, altered within the selected block, and used outside +- // of the selected block. +- for _, v := range variables { +- if _, ok := seenVars[v.obj]; ok { +- continue +- } +- if v.obj.Name() == "_" { +- // The blank identifier is always a local variable +- continue +- } +- typ := typesinternal.TypeExpr(v.obj.Type(), qual) +- seenVars[v.obj] = typ +- identifier := ast.NewIdent(v.obj.Name()) +- // An identifier must meet three conditions to become a return value of the +- // extracted function. (1) its value must be defined or reassigned within +- // the selection (isAssigned), (2) it must be used at least once after the +- // selection (isUsed), and (3) its first use after the selection +- // cannot be its own reassignment or redefinition (objOverriden). +- vscope := v.obj.Parent() +- if vscope == nil { +- // v.obj could be a field on an anonymous struct. We'll examine the +- // struct in a different iteration so don't return an error here. +- continue +- } +- isUsed, firstUseAfter := objUsed(info, end, vscope.End(), v.obj) +- if v.assigned && isUsed && !varOverridden(info, firstUseAfter, v.obj, v.free, outer) { +- returnTypes = append(returnTypes, &ast.Field{Type: typ}) +- returns = append(returns, identifier) +- if !v.free { +- uninitialized = append(uninitialized, v.obj) +- +- } else { +- // In go1.22, Scope.Pos for function scopes changed (#60752): +- // it used to start at the body ('{'), now it starts at "func". +- // +- // The second condition below handles the case when +- // v's block is the FuncDecl.Body itself. +- startParent := curStart.Parent().Node() +- if vscope.Pos() == startParent.Pos() || +- startParent == outer.Body && vscope == info.Scopes[outer.Type] { +- canRedefineCount++ +- } +- } +- } +- // An identifier must meet two conditions to become a parameter of the +- // extracted function. (1) it must be free (isFree), and (2) its first +- // use within the selection cannot be its own definition (isDefined). +- if v.free && !v.defined { +- // Skip the selector for a method. +- if isMethod && v.obj == receiverObj { +- receiverUsed = true +- continue +- } +- params = append(params, identifier) +- paramTypes = append(paramTypes, &ast.Field{ +- Names: []*ast.Ident{identifier}, +- Type: typ, +- }) +- } +- } +- +- reorderParams(params, paramTypes) +- +- // Find the function literal that encloses the selection. The enclosing function literal +- // may not be the enclosing function declaration (i.e. 'outer'). For example, in the +- // following block: +- // +- // func main() { +- // ast.Inspect(node, func(n ast.Node) bool { +- // v := 1 // this line extracted +- // return true +- // }) +- // } +- // +- // 'outer' is main(). However, the extracted selection most directly belongs to +- // the anonymous function literal, the second argument of ast.Inspect(). We use the +- // enclosing function literal to determine the proper return types for return statements +- // within the selection. We still need the enclosing function declaration because this is +- // the top-level declaration. We inspect the top-level declaration to look for variables +- // as well as for code replacement. +- enclosing := outer.Type +- for _, p := range path { +- if p == enclosing { +- break +- } +- if fl, ok := p.(*ast.FuncLit); ok { +- enclosing = fl.Type +- break +- } +- } +- +- // We put the selection in a constructed file. We can then traverse and edit +- // the extracted selection without modifying the original AST. +- startOffset, endOffset, err := safetoken.Offsets(tok, start, end) +- if err != nil { +- return nil, nil, err +- } +- selection := src[startOffset:endOffset] +- +- extractedBlock, extractedComments, err := parseStmts(fset, selection) +- if err != nil { +- return nil, nil, err +- } +- +- // We need to account for return statements in the selected block, as they will complicate +- // the logical flow of the extracted function. See the following example, where ** denotes +- // the range to be extracted. +- // +- // Before: +- // +- // func _() int { +- // a := 1 +- // b := 2 +- // **if a == b { +- // return a +- // }** +- // ... +- // } +- // +- // After: +- // +- // func _() int { +- // a := 1 +- // b := 2 +- // cond0, ret0 := x0(a, b) +- // if cond0 { +- // return ret0 +- // } +- // ... +- // } +- // +- // func x0(a int, b int) (bool, int) { +- // if a == b { +- // return true, a +- // } +- // return false, 0 +- // } +- // +- // We handle returns by adding an additional boolean return value to the extracted function. +- // This bool reports whether the original function would have returned. Because the +- // extracted selection contains a return statement, we must also add the types in the +- // return signature of the enclosing function to the return signature of the +- // extracted function. We then add an extra if statement checking this boolean value +- // in the original function. If the condition is met, the original function should +- // return a value, mimicking the functionality of the original return statement(s) +- // in the selection. +- // +- // If there is a return that is guaranteed to execute (hasNonNestedReturns=true), then +- // we don't need to include this additional condition check and can simply return. +- // +- // Before: +- // +- // func _() int { +- // a := 1 +- // b := 2 +- // **if a == b { +- // return a +- // } +- // return b** +- // } +- // +- // After: +- // +- // func _() int { +- // a := 1 +- // b := 2 +- // return x0(a, b) +- // } +- // +- // func x0(a int, b int) int { +- // if a == b { +- // return a +- // } +- // return b +- // } +- +- var retVars []*returnVariable +- var ifReturn *ast.IfStmt +- +- // Determine if the extracted block contains any free branch statements, for +- // example: "continue label" where "label" is declared outside of the +- // extracted block, or continue inside a "for" statement where the for +- // statement is declared outside of the extracted block. These will be +- // handled below, after adjusting return statements and generating return +- // info. +- curSel, _ := pgf.Cursor.FindByPos(start, end) // since canExtractFunction succeeded, this will always return a valid cursor +- freeBranches := freeBranches(info, curSel, start, end) +- +- // All return statements in the extracted block are error handling returns, and there are no free control statements. +- isErrHandlingReturnsCase := allReturnsFinalErr && len(freeBranches) == 0 +- +- if hasReturn { +- if !hasNonNestedReturn { +- // The selected block contained return statements, so we have to modify the +- // signature of the extracted function as described above. Adjust all of +- // the return statements in the extracted function to reflect this change in +- // signature. +- if err := adjustReturnStatements(returnTypes, seenVars, extractedBlock, qual, isErrHandlingReturnsCase); err != nil { +- return nil, nil, err +- } +- } +- // Collect the additional return values and types needed to accommodate return +- // statements in the selection. Update the type signature of the extracted +- // function and construct the if statement that will be inserted in the enclosing +- // function. +- retVars, ifReturn, err = generateReturnInfo(enclosing, pkg, path, file, info, start, end, hasNonNestedReturn, isErrHandlingReturnsCase) +- if err != nil { +- return nil, nil, err +- } +- } +- +- // If the extracted block contains free branch statements, we add another +- // return value "ctrl" to the extracted function that will be used to +- // determine the control flow. See the following example, where === denotes +- // the range to be extracted. +- // +- // Before: +- // func f(cond bool) { +- // for range "abc" { +- // ============== +- // if cond { +- // continue +- // } +- // ============== +- // println(0) +- // } +- // } +- +- // After: +- // func f(cond bool) { +- // for range "abc" { +- // ctrl := newFunction(cond) +- // switch ctrl { +- // case 1: +- // continue +- // } +- // println(0) +- // } +- // } +- // +- // func newFunction(cond bool) int { +- // if cond { +- // return 1 +- // } +- // return 0 +- // } +- // +- +- // Generate an unused identifier for the control value. +- ctrlVar, _ := freshName(info, file, start, "ctrl", 0) +- if len(freeBranches) > 0 { +- +- zeroValExpr := &ast.BasicLit{ +- Kind: token.INT, +- Value: "0", +- } +- var branchStmts []*ast.BranchStmt +- var stack []ast.Node +- // Add the zero "ctrl" value to each return statement in the extracted block. +- ast.Inspect(extractedBlock, func(n ast.Node) bool { +- if n != nil { +- stack = append(stack, n) +- } else { +- stack = stack[:len(stack)-1] +- } +- switch n := n.(type) { +- case *ast.ReturnStmt: +- n.Results = append(n.Results, zeroValExpr) +- case *ast.BranchStmt: +- // Collect a list of branch statements in the extracted block to examine later. +- if isFreeBranchStmt(stack) { +- branchStmts = append(branchStmts, n) +- } +- case *ast.FuncLit: +- // Don't descend into nested functions. When we return false +- // here, ast.Inspect does not give us a "pop" event when leaving +- // the subtree, so we need to pop here. (golang/go#73319) +- stack = stack[:len(stack)-1] +- return false +- } +- return true +- }) +- +- // Construct a return statement to replace each free branch statement in the extracted block. It should have +- // zero values for all return parameters except one, "ctrl", which dictates which continuation to follow. +- var freeCtrlStmtReturns []ast.Expr +- // Create "zero values" for each type. +- for _, returnType := range returnTypes { +- var val ast.Expr +- var isValid bool +- for obj, typ := range seenVars { +- if typ == returnType.Type { +- val, isValid = typesinternal.ZeroExpr(obj.Type(), qual) +- break +- } +- } +- if !isValid { +- return nil, nil, fmt.Errorf("could not find matching AST expression for %T", returnType.Type) +- } +- freeCtrlStmtReturns = append(freeCtrlStmtReturns, val) +- } +- freeCtrlStmtReturns = append(freeCtrlStmtReturns, getZeroVals(retVars)...) +- +- for i, branchStmt := range branchStmts { +- replaceBranchStmtWithReturnStmt(extractedBlock, branchStmt, &ast.ReturnStmt{ +- Return: branchStmt.Pos(), +- Results: append(slices.Clip(freeCtrlStmtReturns), &ast.BasicLit{ +- Kind: token.INT, +- Value: strconv.Itoa(i + 1), // start with 1 because 0 is reserved for base case +- }), +- }) +- +- } +- retVars = append(retVars, &returnVariable{ +- name: ast.NewIdent(ctrlVar), +- decl: &ast.Field{Type: ast.NewIdent("int")}, +- zeroVal: zeroValExpr, +- }) +- } +- +- // Add a return statement to the end of the new function. This return statement must include +- // the values for the types of the original extracted function signature and (if a return +- // statement is present in the selection) enclosing function signature. +- // This only needs to be done if the selections does not have a non-nested return, otherwise +- // it already terminates with a return statement. +- hasReturnValues := len(returns)+len(retVars) > 0 +- if hasReturnValues && !hasNonNestedReturn { +- extractedBlock.List = append(extractedBlock.List, &ast.ReturnStmt{ +- Results: append(returns, getZeroVals(retVars)...), +- }) +- } +- +- // Construct the appropriate call to the extracted function. +- // We must meet two conditions to use ":=" instead of '='. (1) there must be at least +- // one variable on the lhs that is uninitialized (non-free) prior to the assignment. +- // (2) all of the initialized (free) variables on the lhs must be able to be redefined. +- sym := token.ASSIGN +- canDefineCount := len(uninitialized) + canRedefineCount +- canDefine := len(uninitialized)+len(retVars) > 0 && canDefineCount == len(returns) +- if canDefine { +- sym = token.DEFINE +- } +- var funName string +- if isMethod { +- // TODO(suzmue): generate a name that does not conflict for "newMethod". +- funName = "newMethod" +- } else { +- funName, _ = freshName(info, file, start, "newFunction", 0) +- } +- extractedFunCall := generateFuncCall(hasNonNestedReturn, hasReturnValues, params, +- append(returns, getNames(retVars)...), funName, sym, receiverName) +- +- // Create variable declarations for any identifiers that need to be initialized prior to +- // calling the extracted function. We do not manually initialize variables if every return +- // value is uninitialized. We can use := to initialize the variables in this situation. +- var declarations []ast.Stmt +- if canDefineCount != len(returns) { +- declarations = initializeVars(uninitialized, retVars, seenUninitialized, seenVars) +- } +- +- var declBuf, replaceBuf, newFuncBuf, ifBuf, commentBuf bytes.Buffer +- if err := format.Node(&declBuf, fset, declarations); err != nil { +- return nil, nil, err +- } +- if err := format.Node(&replaceBuf, fset, extractedFunCall); err != nil { +- return nil, nil, err +- } +- if ifReturn != nil { +- if isErrHandlingReturnsCase { +- errName := retVars[len(retVars)-1] +- fmt.Fprintf(&ifBuf, "if %s != nil ", errName.name.String()) +- if err := format.Node(&ifBuf, fset, ifReturn.Body); err != nil { +- return nil, nil, err +- } +- } else { +- if err := format.Node(&ifBuf, fset, ifReturn); err != nil { +- return nil, nil, err +- } +- } +- } +- +- // Build the extracted function. We format the function declaration and body +- // separately, so that comments are printed relative to the extracted +- // BlockStmt. +- // +- // In other words, extractedBlock and extractedComments were parsed from a +- // synthetic function declaration of the form func _() { ... }. If we now +- // print the real function declaration, the length of the signature will have +- // grown, causing some comment positions to be computed as inside the +- // signature itself. +- newFunc := &ast.FuncDecl{ +- Name: ast.NewIdent(funName), +- Type: &ast.FuncType{ +- Params: &ast.FieldList{List: paramTypes}, +- Results: &ast.FieldList{List: append(returnTypes, getDecls(retVars)...)}, +- }, +- // Body handled separately -- see above. +- } +- if isMethod { +- var names []*ast.Ident +- if receiverUsed { +- names = append(names, ast.NewIdent(receiverName)) +- } +- newFunc.Recv = &ast.FieldList{ +- List: []*ast.Field{{ +- Names: names, +- Type: receiver.Type, +- }}, +- } +- } +- if err := format.Node(&newFuncBuf, fset, newFunc); err != nil { +- return nil, nil, err +- } +- // Write a space between the end of the function signature and opening '{'. +- if err := newFuncBuf.WriteByte(' '); err != nil { +- return nil, nil, err +- } +- commentedNode := &printer.CommentedNode{ +- Node: extractedBlock, +- Comments: extractedComments, +- } +- if err := format.Node(&newFuncBuf, fset, commentedNode); err != nil { +- return nil, nil, err +- } +- +- // We're going to replace the whole enclosing function, +- // so preserve the text before and after the selected block. +- outerStart, outerEnd, err := safetoken.Offsets(tok, outer.Pos(), outer.End()) +- if err != nil { +- return nil, nil, err +- } +- before := src[outerStart:startOffset] +- after := src[endOffset:outerEnd] +- indent, err := pgf.Indentation(node.Pos()) +- if err != nil { +- return nil, nil, err +- } +- newLineIndent := "\n" + indent +- +- var fullReplacement strings.Builder +- fullReplacement.Write(before) +- if commentBuf.Len() > 0 { +- comments := strings.ReplaceAll(commentBuf.String(), "\n", newLineIndent) +- fullReplacement.WriteString(comments) +- } +- if declBuf.Len() > 0 { // add any initializations, if needed +- initializations := strings.ReplaceAll(declBuf.String(), "\n", newLineIndent) + +- newLineIndent +- fullReplacement.WriteString(initializations) +- } +- fullReplacement.Write(replaceBuf.Bytes()) // call the extracted function +- if ifBuf.Len() > 0 { // add the if statement below the function call, if needed +- ifstatement := newLineIndent + +- strings.ReplaceAll(ifBuf.String(), "\n", newLineIndent) +- fullReplacement.WriteString(ifstatement) +- } +- +- // Add the switch statement for free branch statements after the new function call. +- if len(freeBranches) > 0 { +- fmt.Fprintf(&fullReplacement, "%[1]sswitch %[2]s {%[1]s", newLineIndent, ctrlVar) +- for i, br := range freeBranches { +- // Preserve spacing at the beginning of the line containing the branch statement. +- startPos := tok.LineStart(safetoken.Line(tok, br.Pos())) +- text, err := pgf.PosText(startPos, br.End()) +- if err != nil { +- return nil, nil, err +- } +- fmt.Fprintf(&fullReplacement, "case %d:\n%s%s", i+1, text, newLineIndent) +- } +- fullReplacement.WriteString("}") +- } +- +- fullReplacement.Write(after) +- fullReplacement.WriteString("\n\n") // add newlines after the enclosing function +- fullReplacement.Write(newFuncBuf.Bytes()) // insert the extracted function +- +- return fset, &analysis.SuggestedFix{ +- TextEdits: []analysis.TextEdit{{ +- Pos: outer.Pos(), +- End: outer.End(), +- NewText: []byte(fullReplacement.String()), +- }}, +- }, nil +-} +- +-// isSelector reports if e is the selector expr , . It works for pointer and non-pointer selector expressions. +-func isSelector(e ast.Expr, x, sel string) bool { +- unary, ok := e.(*ast.UnaryExpr) +- if ok && unary.Op == token.MUL { +- e = unary.X +- } +- selectorExpr, ok := e.(*ast.SelectorExpr) +- if !ok { +- return false +- } +- ident, ok := selectorExpr.X.(*ast.Ident) +- if !ok { +- return false +- } +- return ident.Name == x && selectorExpr.Sel.Name == sel +-} +- +-// reorderParams reorders the given parameters in-place to follow common Go conventions. +-func reorderParams(params []ast.Expr, paramTypes []*ast.Field) { +- moveParamToFrontIfFound(params, paramTypes, "testing", "T") +- moveParamToFrontIfFound(params, paramTypes, "testing", "B") +- moveParamToFrontIfFound(params, paramTypes, "context", "Context") +-} +- +-func moveParamToFrontIfFound(params []ast.Expr, paramTypes []*ast.Field, x, sel string) { +- // Move Context parameter (if any) to front. +- for i, t := range paramTypes { +- if isSelector(t.Type, x, sel) { +- p, t := params[i], paramTypes[i] +- copy(params[1:], params[:i]) +- copy(paramTypes[1:], paramTypes[:i]) +- params[0], paramTypes[0] = p, t +- break +- } +- } +-} +- +-// adjustRangeForCommentsAndWhiteSpace adjusts the given range to exclude unnecessary leading or +-// trailing whitespace characters from selection as well as leading or trailing comments. +-// In the following example, each line of the if statement is indented once. There are also two +-// extra spaces after the sclosing bracket before the line break and a comment. +-// +-// \tif (true) { +-// \t _ = 1 +-// \t} // hello \n +-// +-// By default, a valid range begins at 'if' and ends at the first whitespace character +-// after the '}'. But, users are likely to highlight full lines rather than adjusting +-// their cursors for whitespace. To support this use case, we must manually adjust the +-// ranges to match the correct AST node. In this particular example, we would adjust +-// rng.Start forward to the start of 'if' and rng.End backward to after '}'. +-func adjustRangeForCommentsAndWhiteSpace(tok *token.File, start, end token.Pos, content []byte, curFile inspector.Cursor) (token.Pos, token.Pos, error) { +- file := curFile.Node().(*ast.File) +- // TODO(adonovan): simplify, using Cursor. +- +- // Adjust the end of the range to after leading whitespace and comments. +- prevStart := token.NoPos +- startComment := sort.Search(len(file.Comments), func(i int) bool { +- // Find the index for the first comment that ends after range start. +- return file.Comments[i].End() > start +- }) +- for prevStart != start { +- prevStart = start +- // If start is within a comment, move start to the end +- // of the comment group. +- if startComment < len(file.Comments) && file.Comments[startComment].Pos() <= start && start < file.Comments[startComment].End() { +- start = file.Comments[startComment].End() +- startComment++ +- } +- // Move forwards to find a non-whitespace character. +- offset, err := safetoken.Offset(tok, start) +- if err != nil { +- return 0, 0, err +- } +- for offset < len(content) && isGoWhiteSpace(content[offset]) { +- offset++ +- } +- start = tok.Pos(offset) +- } +- +- // Adjust the end of the range to before trailing whitespace and comments. +- prevEnd := token.NoPos +- endComment := sort.Search(len(file.Comments), func(i int) bool { +- // Find the index for the first comment that ends after the range end. +- return file.Comments[i].End() >= end +- }) +- // Search will return n if not found, so we need to adjust if there are no +- // comments that would match. +- if endComment == len(file.Comments) { +- endComment = -1 +- } +- for prevEnd != end { +- prevEnd = end +- // If end is within a comment, move end to the start +- // of the comment group. +- if endComment >= 0 && file.Comments[endComment].Pos() < end && end <= file.Comments[endComment].End() { +- end = file.Comments[endComment].Pos() +- endComment-- +- } +- // Move backwards to find a non-whitespace character. +- offset, err := safetoken.Offset(tok, end) +- if err != nil { +- return 0, 0, err +- } +- for offset > 0 && isGoWhiteSpace(content[offset-1]) { +- offset-- +- } +- end = tok.Pos(offset) +- } +- +- return start, end, nil +-} +- +-// isGoWhiteSpace returns true if b is a considered white space in +-// Go as defined by scanner.GoWhitespace. +-func isGoWhiteSpace(b byte) bool { +- return uint64(scanner.GoWhitespace)&(1< not free +- } +- return obj, true +- } +- // sel returns non-nil if n denotes a selection o.x.y that is referenced by the +- // span and defined either within the span or in the lexical environment. The bool +- // return value acts as an indicator for where it was defined. +- var sel func(n *ast.SelectorExpr) (types.Object, bool) +- sel = func(n *ast.SelectorExpr) (types.Object, bool) { +- switch x := ast.Unparen(n.X).(type) { +- case *ast.SelectorExpr: +- return sel(x) +- case *ast.Ident: +- return id(x) +- } +- return nil, false +- } +- seen := make(map[types.Object]*variable) +- firstUseIn := make(map[types.Object]token.Pos) +- var vars []types.Object +- ast.Inspect(node, func(n ast.Node) bool { +- if n == nil { +- return false +- } +- if start <= n.Pos() && n.End() <= end { +- var obj types.Object +- var isFree, prune bool +- switch n := n.(type) { +- case *ast.BranchStmt: +- // Avoid including labels attached to branch statements. +- return false +- case *ast.Ident: +- obj, isFree = id(n) +- case *ast.SelectorExpr: +- obj, isFree = sel(n) +- prune = true +- } +- if obj != nil { +- seen[obj] = &variable{ +- obj: obj, +- free: isFree, +- } +- vars = append(vars, obj) +- // Find the first time that the object is used in the selection. +- first, ok := firstUseIn[obj] +- if !ok || n.Pos() < first { +- firstUseIn[obj] = n.Pos() +- } +- if prune { +- return false +- } +- } +- } +- return n.Pos() <= end +- }) +- +- // Find identifiers that are initialized or whose values are altered at some +- // point in the selected block. For example, in a selected block from lines 2-4, +- // variables x, y, and z are included in assigned. However, in a selected block +- // from lines 3-4, only variables y and z are included in assigned. +- // +- // 1: var a int +- // 2: var x int +- // 3: y := 3 +- // 4: z := x + a +- // +- ast.Inspect(node, func(n ast.Node) bool { +- if n == nil { +- return false +- } +- if n.Pos() < start || n.End() > end { +- return n.Pos() <= end +- } +- switch n := n.(type) { +- case *ast.AssignStmt: +- for _, assignment := range n.Lhs { +- lhs, ok := assignment.(*ast.Ident) +- if !ok { +- continue +- } +- obj, _ := id(lhs) +- if obj == nil { +- continue +- } +- if _, ok := seen[obj]; !ok { +- continue +- } +- seen[obj].assigned = true +- if n.Tok != token.DEFINE { +- continue +- } +- // Find identifiers that are defined prior to being used +- // elsewhere in the selection. +- // TODO: Include identifiers that are assigned prior to being +- // used elsewhere in the selection. Then, change the assignment +- // to a definition in the extracted function. +- if firstUseIn[obj] != lhs.Pos() { +- continue +- } +- // Ensure that the object is not used in its own re-definition. +- // For example: +- // var f float64 +- // f, e := math.Frexp(f) +- for _, expr := range n.Rhs { +- if referencesObj(info, expr, obj) { +- continue +- } +- if _, ok := seen[obj]; !ok { +- continue +- } +- seen[obj].defined = true +- break +- } +- } +- return false +- case *ast.DeclStmt: +- gen, ok := n.Decl.(*ast.GenDecl) +- if !ok { +- return false +- } +- for _, spec := range gen.Specs { +- vSpecs, ok := spec.(*ast.ValueSpec) +- if !ok { +- continue +- } +- for _, vSpec := range vSpecs.Names { +- obj, _ := id(vSpec) +- if obj == nil { +- continue +- } +- if _, ok := seen[obj]; !ok { +- continue +- } +- seen[obj].assigned = true +- } +- } +- return false +- case *ast.IncDecStmt: +- if ident, ok := n.X.(*ast.Ident); !ok { +- return false +- } else if obj, _ := id(ident); obj == nil { +- return false +- } else { +- if _, ok := seen[obj]; !ok { +- return false +- } +- seen[obj].assigned = true +- } +- } +- return true +- }) +- var variables []*variable +- for _, obj := range vars { +- v, ok := seen[obj] +- if !ok { +- return nil, fmt.Errorf("no seen types.Object for %v", obj) +- } +- if named, ok := v.obj.Type().(typesinternal.NamedOrAlias); ok { +- namedPos := named.Obj().Pos() +- if isLocal(named.Obj()) && !(start <= namedPos && namedPos <= end) { +- return nil, fmt.Errorf("Cannot extract selection: the code refers to a local type whose definition lies outside the extracted block") +- } +- } +- variables = append(variables, v) +- } +- return variables, nil +-} +- +-// referencesObj checks whether the given object appears in the given expression. +-func referencesObj(info *types.Info, expr ast.Expr, obj types.Object) bool { +- var hasObj bool +- ast.Inspect(expr, func(n ast.Node) bool { +- if n == nil { +- return false +- } +- ident, ok := n.(*ast.Ident) +- if !ok { +- return true +- } +- objUse := info.Uses[ident] +- if obj == objUse { +- hasObj = true +- return false +- } +- return false +- }) +- return hasObj +-} +- +-type fnExtractParams struct { +- tok *token.File +- start, end token.Pos +- path []ast.Node +- outer *ast.FuncDecl +- node ast.Node +-} +- +-// canExtractFunction reports whether the code in the given range can be +-// extracted to a function. +-func canExtractFunction(tok *token.File, start, end token.Pos, src []byte, curFile inspector.Cursor) (*fnExtractParams, bool, bool, error) { +- if start == end { +- return nil, false, false, fmt.Errorf("start and end are equal") +- } +- var err error +- file := curFile.Node().(*ast.File) +- // TODO(adonovan): simplify, using Cursor. +- start, end, err = adjustRangeForCommentsAndWhiteSpace(tok, start, end, src, curFile) +- if err != nil { +- return nil, false, false, err +- } +- path, _ := astutil.PathEnclosingInterval(file, start, end) +- if len(path) == 0 { +- return nil, false, false, fmt.Errorf("no path enclosing interval") +- } +- // Node that encloses the selection must be a statement. +- // TODO: Support function extraction for an expression. +- _, ok := path[0].(ast.Stmt) +- if !ok { +- return nil, false, false, fmt.Errorf("node is not a statement") +- } +- +- // Find the function declaration that encloses the selection. +- var outer *ast.FuncDecl +- for _, p := range path { +- if p, ok := p.(*ast.FuncDecl); ok { +- outer = p +- break +- } +- } +- if outer == nil { +- return nil, false, false, fmt.Errorf("no enclosing function") +- } +- +- // Find the nodes at the start and end of the selection. +- var startNode, endNode ast.Node +- ast.Inspect(outer, func(n ast.Node) bool { +- if n == nil { +- return false +- } +- // Do not override 'start' with a node that begins at the same location +- // but is nested further from 'outer'. +- if startNode == nil && n.Pos() == start && n.End() <= end { +- startNode = n +- } +- if endNode == nil && n.End() == end && n.Pos() >= start { +- endNode = n +- } +- return n.Pos() <= end +- }) +- if startNode == nil || endNode == nil { +- return nil, false, false, fmt.Errorf("range does not map to AST nodes") +- } +- // If the region is a blockStmt, use the first and last nodes in the block +- // statement. +- // { ... } => { ... } +- if blockStmt, ok := startNode.(*ast.BlockStmt); ok { +- if len(blockStmt.List) == 0 { +- return nil, false, false, fmt.Errorf("range maps to empty block statement") +- } +- startNode, endNode = blockStmt.List[0], blockStmt.List[len(blockStmt.List)-1] +- start, end = startNode.Pos(), endNode.End() +- } +- return &fnExtractParams{ +- tok: tok, +- start: start, +- end: end, +- path: path, +- outer: outer, +- node: startNode, +- }, true, outer.Recv != nil, nil +-} +- +-// objUsed checks if the object is used within the range. It returns the first +-// occurrence of the object in the range, if it exists. +-func objUsed(info *types.Info, start, end token.Pos, obj types.Object) (bool, *ast.Ident) { +- var firstUse *ast.Ident +- for id, objUse := range info.Uses { +- if obj != objUse { +- continue +- } +- if id.Pos() < start || id.End() > end { +- continue +- } +- if firstUse == nil || id.Pos() < firstUse.Pos() { +- firstUse = id +- } +- } +- return firstUse != nil, firstUse +-} +- +-// varOverridden traverses the given AST node until we find the given identifier. Then, we +-// examine the occurrence of the given identifier and check for (1) whether the identifier +-// is being redefined. If the identifier is free, we also check for (2) whether the identifier +-// is being reassigned. We will not include an identifier in the return statement of the +-// extracted function if it meets one of the above conditions. +-func varOverridden(info *types.Info, firstUse *ast.Ident, obj types.Object, isFree bool, node ast.Node) bool { +- var isOverriden bool +- ast.Inspect(node, func(n ast.Node) bool { +- if n == nil { +- return false +- } +- assignment, ok := n.(*ast.AssignStmt) +- if !ok { +- return true +- } +- // A free variable is initialized prior to the selection. We can always reassign +- // this variable after the selection because it has already been defined. +- // Conversely, a non-free variable is initialized within the selection. Thus, we +- // cannot reassign this variable after the selection unless it is initialized and +- // returned by the extracted function. +- if !isFree && assignment.Tok == token.ASSIGN { +- return false +- } +- for _, assigned := range assignment.Lhs { +- ident, ok := assigned.(*ast.Ident) +- // Check if we found the first use of the identifier. +- if !ok || ident != firstUse { +- continue +- } +- objUse := info.Uses[ident] +- if objUse == nil || objUse != obj { +- continue +- } +- // Ensure that the object is not used in its own definition. +- // For example: +- // var f float64 +- // f, e := math.Frexp(f) +- for _, expr := range assignment.Rhs { +- if referencesObj(info, expr, obj) { +- return false +- } +- } +- isOverriden = true +- return false +- } +- return false +- }) +- return isOverriden +-} +- +-// parseStmts parses the specified source (a list of statements) and +-// returns them as a BlockStmt along with any associated comments. +-func parseStmts(fset *token.FileSet, src []byte) (*ast.BlockStmt, []*ast.CommentGroup, error) { +- text := "package main\nfunc _() { " + string(src) + " }" +- file, err := parser.ParseFile(fset, "", text, parser.ParseComments|parser.SkipObjectResolution) +- if err != nil { +- return nil, nil, err +- } +- if len(file.Decls) != 1 { +- return nil, nil, fmt.Errorf("got %d declarations, want 1", len(file.Decls)) +- } +- decl, ok := file.Decls[0].(*ast.FuncDecl) +- if !ok { +- return nil, nil, bug.Errorf("parsed file does not contain expected function declaration") +- } +- if decl.Body == nil { +- return nil, nil, bug.Errorf("extracted function has no body") +- } +- return decl.Body, file.Comments, nil +-} +- +-// generateReturnInfo generates the information we need to adjust the return statements and +-// signature of the extracted function. We prepare names, signatures, and "zero values" that +-// represent the new variables. We also use this information to construct the if statement that +-// is inserted below the call to the extracted function. +-func generateReturnInfo(enclosing *ast.FuncType, pkg *types.Package, path []ast.Node, file *ast.File, info *types.Info, start, end token.Pos, hasNonNestedReturns bool, isErrHandlingReturnsCase bool) ([]*returnVariable, *ast.IfStmt, error) { +- var retVars []*returnVariable +- var cond *ast.Ident +- // Generate information for the values in the return signature of the enclosing function. +- if enclosing.Results != nil { +- nameIdx := make(map[string]int) // last integral suffixes of generated names +- qual := typesinternal.FileQualifier(file, pkg) +- for _, field := range enclosing.Results.List { +- typ := info.TypeOf(field.Type) +- if typ == nil { +- return nil, nil, fmt.Errorf( +- "failed type conversion, AST expression: %T", field.Type) +- } +- names := []string{""} +- if len(field.Names) > 0 { +- names = nil +- for _, n := range field.Names { +- names = append(names, n.Name) +- } +- } +- for _, name := range names { +- bestName := "result" +- if name != "" && name != "_" { +- bestName = name +- } else if n, ok := varNameForType(typ); ok { +- bestName = n +- } +- retName, idx := freshNameOutsideRange(info, file, path[0].Pos(), start, end, bestName, nameIdx[bestName]) +- nameIdx[bestName] = idx +- z, isValid := typesinternal.ZeroExpr(typ, qual) +- if !isValid { +- return nil, nil, fmt.Errorf("can't generate zero value for %T", typ) +- } +- retVars = append(retVars, &returnVariable{ +- name: ast.NewIdent(retName), +- decl: &ast.Field{Type: typesinternal.TypeExpr(typ, qual)}, +- zeroVal: z, +- }) +- } +- } +- } +- var ifReturn *ast.IfStmt +- if !hasNonNestedReturns { +- results := getNames(retVars) +- if !isErrHandlingReturnsCase { +- // Generate information for the added bool value. +- name, _ := freshNameOutsideRange(info, file, path[0].Pos(), start, end, "shouldReturn", 0) +- cond = &ast.Ident{Name: name} +- retVars = append(retVars, &returnVariable{ +- name: cond, +- decl: &ast.Field{Type: ast.NewIdent("bool")}, +- zeroVal: ast.NewIdent("false"), +- }) +- } +- ifReturn = &ast.IfStmt{ +- Cond: cond, +- Body: &ast.BlockStmt{ +- List: []ast.Stmt{&ast.ReturnStmt{Results: results}}, +- }, +- } +- } +- return retVars, ifReturn, nil +-} +- +-type objKey struct{ pkg, name string } +- +-// conventionalVarNames specifies conventional names for variables with various +-// standard library types. +-// +-// Keep this up to date with completion.conventionalAcronyms. +-// +-// TODO(rfindley): consider factoring out a "conventions" library. +-var conventionalVarNames = map[objKey]string{ +- {"", "error"}: "err", +- {"context", "Context"}: "ctx", +- {"sql", "Tx"}: "tx", +- {"http", "ResponseWriter"}: "rw", // Note: same as [AbbreviateVarName]. +-} +- +-// varNameForType chooses a "good" name for a variable with the given type, +-// if possible. Otherwise, it returns "", false. +-// +-// For special types, it uses known conventional names. +-func varNameForType(t types.Type) (string, bool) { +- tname := typesinternal.TypeNameFor(t) +- if tname == nil { +- return "", false +- } +- +- // Have Alias, Basic, Named, or TypeParam. +- k := objKey{name: tname.Name()} +- if tname.Pkg() != nil { +- k.pkg = tname.Pkg().Name() +- } +- if name, ok := conventionalVarNames[k]; ok { +- return name, true +- } +- +- return AbbreviateVarName(tname.Name()), true +-} +- +-// adjustReturnStatements adds "zero values" of the given types to each return +-// statement in the given AST node. +-func adjustReturnStatements(returnTypes []*ast.Field, seenVars map[types.Object]ast.Expr, extractedBlock *ast.BlockStmt, qual types.Qualifier, isErrHandlingReturnsCase bool) error { +- var zeroVals []ast.Expr +- // Create "zero values" for each type. +- for _, returnType := range returnTypes { +- var val ast.Expr +- var isValid bool +- for obj, typ := range seenVars { +- if typ == returnType.Type { +- val, isValid = typesinternal.ZeroExpr(obj.Type(), qual) +- break +- } +- } +- if !isValid { +- return fmt.Errorf("could not find matching AST expression for %T", returnType.Type) +- } +- zeroVals = append(zeroVals, val) +- } +- // Add "zero values" to each return statement. +- // The bool reports whether the enclosing function should return after calling the +- // extracted function. We set the bool to 'true' because, if these return statements +- // execute, the extracted function terminates early, and the enclosing function must +- // return as well. +- var shouldReturnCond []ast.Expr +- if !isErrHandlingReturnsCase { +- shouldReturnCond = append(shouldReturnCond, ast.NewIdent("true")) +- } +- +- ast.Inspect(extractedBlock, func(n ast.Node) bool { +- if n == nil { +- return false +- } +- // Don't modify return statements inside anonymous functions. +- if _, ok := n.(*ast.FuncLit); ok { +- return false +- } +- if n, ok := n.(*ast.ReturnStmt); ok { +- n.Results = slices.Concat(zeroVals, n.Results, shouldReturnCond) +- return false +- } +- return true +- }) +- return nil +-} +- +-// generateFuncCall constructs a call expression for the extracted function, described by the +-// given parameters and return variables. +-func generateFuncCall(hasNonNestedReturn, hasReturnVals bool, params, returns []ast.Expr, name string, token token.Token, selector string) ast.Node { +- var replace ast.Node +- callExpr := &ast.CallExpr{ +- Fun: ast.NewIdent(name), +- Args: params, +- } +- if selector != "" { +- callExpr = &ast.CallExpr{ +- Fun: &ast.SelectorExpr{ +- X: ast.NewIdent(selector), +- Sel: ast.NewIdent(name), +- }, +- Args: params, +- } +- } +- if hasReturnVals { +- if hasNonNestedReturn { +- // Create a return statement that returns the result of the function call. +- replace = &ast.ReturnStmt{ +- Return: 0, +- Results: []ast.Expr{callExpr}, +- } +- } else { +- // Assign the result of the function call. +- replace = &ast.AssignStmt{ +- Lhs: returns, +- Tok: token, +- Rhs: []ast.Expr{callExpr}, +- } +- } +- } else { +- replace = callExpr +- } +- return replace +-} +- +-// initializeVars creates variable declarations, if needed. +-// Our preference is to replace the selected block with an "x, y, z := fn()" style +-// assignment statement. We can use this style when all of the variables in the +-// extracted function's return statement are either not defined prior to the extracted block +-// or can be safely redefined. However, for example, if z is already defined +-// in a different scope, we replace the selected block with: +-// +-// var x int +-// var y string +-// x, y, z = fn() +-func initializeVars(uninitialized []types.Object, retVars []*returnVariable, seenUninitialized map[types.Object]struct{}, seenVars map[types.Object]ast.Expr) []ast.Stmt { +- var declarations []ast.Stmt +- for _, obj := range uninitialized { +- if _, ok := seenUninitialized[obj]; ok { +- continue +- } +- seenUninitialized[obj] = struct{}{} +- valSpec := &ast.ValueSpec{ +- Names: []*ast.Ident{ast.NewIdent(obj.Name())}, +- Type: seenVars[obj], +- } +- genDecl := &ast.GenDecl{ +- Tok: token.VAR, +- Specs: []ast.Spec{valSpec}, +- } +- declarations = append(declarations, &ast.DeclStmt{Decl: genDecl}) +- } +- // Each variable added from a return statement in the selection +- // must be initialized. +- for i, retVar := range retVars { +- valSpec := &ast.ValueSpec{ +- Names: []*ast.Ident{retVar.name}, +- Type: retVars[i].decl.Type, +- } +- genDecl := &ast.GenDecl{ +- Tok: token.VAR, +- Specs: []ast.Spec{valSpec}, +- } +- declarations = append(declarations, &ast.DeclStmt{Decl: genDecl}) +- } +- return declarations +-} +- +-// getNames returns the names from the given list of returnVariable. +-func getNames(retVars []*returnVariable) []ast.Expr { +- var names []ast.Expr +- for _, retVar := range retVars { +- names = append(names, retVar.name) +- } +- return names +-} +- +-// getZeroVals returns the "zero values" from the given list of returnVariable. +-func getZeroVals(retVars []*returnVariable) []ast.Expr { +- var zvs []ast.Expr +- for _, retVar := range retVars { +- zvs = append(zvs, retVar.zeroVal) +- } +- return zvs +-} +- +-// getDecls returns the declarations from the given list of returnVariable. +-func getDecls(retVars []*returnVariable) []*ast.Field { +- var decls []*ast.Field +- for _, retVar := range retVars { +- decls = append(decls, retVar.decl) +- } +- return decls +-} +- +-func cond[T any](cond bool, t, f T) T { +- if cond { +- return t +- } else { +- return f +- } +-} +- +-// replaceBranchStmtWithReturnStmt modifies the ast node to replace the given +-// branch statement with the given return statement. +-func replaceBranchStmtWithReturnStmt(block ast.Node, br *ast.BranchStmt, ret *ast.ReturnStmt) { +- ast.Inspect(block, func(n ast.Node) bool { +- // Look for the branch statement within a BlockStmt or CaseClause. +- switch n := n.(type) { +- case *ast.BlockStmt: +- for i, stmt := range n.List { +- if stmt == br { +- n.List[i] = ret +- return false +- } +- } +- case *ast.CaseClause: +- for i, stmt := range n.Body { +- if stmt.Pos() == br.Pos() { +- n.Body[i] = ret +- return false +- } +- } +- } +- return true +- }) +-} +- +-// freeBranches returns all branch statements beneath cur whose continuation +-// lies outside the (start, end) range. +-func freeBranches(info *types.Info, cur inspector.Cursor, start, end token.Pos) (free []*ast.BranchStmt) { +-nextBranch: +- for curBr := range cur.Preorder((*ast.BranchStmt)(nil)) { +- br := curBr.Node().(*ast.BranchStmt) +- if br.End() < start || br.Pos() > end { +- continue +- } +- label, _ := info.Uses[br.Label].(*types.Label) +- if label != nil && !(start <= label.Pos() && label.Pos() <= end) { +- free = append(free, br) +- continue +- } +- if br.Tok == token.BREAK || br.Tok == token.CONTINUE { +- filter := []ast.Node{ +- (*ast.ForStmt)(nil), +- (*ast.RangeStmt)(nil), +- (*ast.SwitchStmt)(nil), +- (*ast.TypeSwitchStmt)(nil), +- (*ast.SelectStmt)(nil), +- } +- // Find innermost relevant ancestor for break/continue. +- for curAncestor := range curBr.Parent().Enclosing(filter...) { +- if l, ok := curAncestor.Parent().Node().(*ast.LabeledStmt); ok && +- label != nil && +- l.Label.Name == label.Name() { +- continue +- } +- switch n := curAncestor.Node().(type) { +- case *ast.ForStmt, *ast.RangeStmt: +- if n.Pos() < start { +- free = append(free, br) +- } +- continue nextBranch +- case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: +- if br.Tok == token.BREAK { +- if n.Pos() < start { +- free = append(free, br) +- } +- continue nextBranch +- } +- } +- } +- } +- } +- return +-} +- +-// isFreeBranchStmt returns true if the relevant ancestor for the branch +-// statement at stack[len(stack)-1] cannot be found in the stack. This is used +-// when we are examining the extracted block, since type information isn't +-// available. We need to find the location of the label without using +-// types.Info. +-func isFreeBranchStmt(stack []ast.Node) bool { +- switch node := stack[len(stack)-1].(type) { +- case *ast.BranchStmt: +- isLabeled := node.Label != nil +- switch node.Tok { +- case token.GOTO: +- if isLabeled { +- return !enclosingLabel(stack, node.Label.Name) +- } +- case token.BREAK, token.CONTINUE: +- // Find innermost relevant ancestor for break/continue. +- for i := len(stack) - 2; i >= 0; i-- { +- n := stack[i] +- if isLabeled { +- l, ok := n.(*ast.LabeledStmt) +- if !(ok && l.Label.Name == node.Label.Name) { +- continue +- } +- } +- switch n.(type) { +- case *ast.ForStmt, *ast.RangeStmt, *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: +- return false +- } +- } +- } +- } +- // We didn't find the relevant ancestor on the path, so this must be a free branch statement. +- return true +-} +- +-// enclosingLabel returns true if the given label is found on the stack. +-func enclosingLabel(stack []ast.Node, label string) bool { +- for _, n := range stack { +- if labelStmt, ok := n.(*ast.LabeledStmt); ok && labelStmt.Label.Name == label { +- return true +- } +- } +- return false +-} +diff -urN a/gopls/internal/golang/extracttofile.go b/gopls/internal/golang/extracttofile.go +--- a/gopls/internal/golang/extracttofile.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/extracttofile.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,343 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-// This file defines the code action "Extract declarations to new file". +- +-import ( +- "bytes" +- "context" +- "errors" +- "fmt" +- "go/ast" +- "go/format" +- "go/token" +- "go/types" +- "os" +- "path/filepath" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +-) +- +-// canExtractToNewFile reports whether the code in the given range can be extracted to a new file. +-func canExtractToNewFile(pgf *parsego.File, start, end token.Pos) bool { +- _, _, _, ok := selectedToplevelDecls(pgf, start, end) +- return ok +-} +- +-// findImportEdits finds imports specs that needs to be added to the new file +-// or deleted from the old file if the range is extracted to a new file. +-// +-// TODO: handle dot imports. +-func findImportEdits(file *ast.File, info *types.Info, start, end token.Pos) (adds, deletes []*ast.ImportSpec, _ error) { +- // make a map from a pkgName to its references +- pkgNameReferences := make(map[*types.PkgName][]*ast.Ident) +- for ident, use := range info.Uses { +- if pkgName, ok := use.(*types.PkgName); ok { +- pkgNameReferences[pkgName] = append(pkgNameReferences[pkgName], ident) +- } +- } +- +- // PkgName referenced in the extracted selection must be +- // imported in the new file. +- // PkgName only referenced in the extracted selection must be +- // deleted from the original file. +- for _, spec := range file.Imports { +- if spec.Name != nil && spec.Name.Name == "." { +- // TODO: support dot imports. +- return nil, nil, errors.New("\"extract to new file\" does not support files containing dot imports") +- } +- pkgName := info.PkgNameOf(spec) +- if pkgName == nil { +- continue +- } +- usedInSelection := false +- usedInNonSelection := false +- for _, ident := range pkgNameReferences[pkgName] { +- if posRangeContains(start, end, ident.Pos(), ident.End()) { +- usedInSelection = true +- } else { +- usedInNonSelection = true +- } +- } +- if usedInSelection { +- adds = append(adds, spec) +- } +- if usedInSelection && !usedInNonSelection { +- deletes = append(deletes, spec) +- } +- } +- +- return adds, deletes, nil +-} +- +-// ExtractToNewFile moves selected declarations into a new file. +-func ExtractToNewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, rng protocol.Range) ([]protocol.DocumentChange, error) { +- errorPrefix := "ExtractToNewFile" +- +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- +- start, end, err := pgf.RangePos(rng) +- if err != nil { +- return nil, fmt.Errorf("%s: %w", errorPrefix, err) +- } +- +- // Expand the selection, and compute the portion to extract. +- start, end, firstSymbol, ok := selectedToplevelDecls(pgf, start, end) +- if !ok { +- return nil, fmt.Errorf("invalid selection") +- } +- pgf.CheckPos(start) // #70553 +- // Inv: start is valid wrt pgf.Tok. +- +- // select trailing empty lines +- offset, err := safetoken.Offset(pgf.Tok, end) +- if err != nil { +- return nil, err +- } +- rest := pgf.Src[offset:] +- spaces := len(rest) - len(bytes.TrimLeft(rest, " \t\n")) +- end += token.Pos(spaces) +- pgf.CheckPos(end) // #70553 +- if !(start <= end) { +- bug.Reportf("start: not before end") +- } +- // Inv: end is valid wrt pgf.Tok; env >= start. +- fileStart := pgf.File.FileStart +- pgf.CheckPos(fileStart) // #70553 +- if !(0 <= start-fileStart) { +- bug.Reportf("start: out of bounds") +- } +- if !(int(end-fileStart) <= len(pgf.Src)) { +- bug.Reportf("end: out of bounds") +- } +- // Inv: 0 <= start-fileStart <= end-fileStart <= len(Src). +- src := pgf.Src[start-fileStart : end-fileStart] +- +- replaceRange, err := pgf.PosRange(start, end) +- if err != nil { +- return nil, bug.Errorf("invalid range: %v", err) +- } +- +- adds, deletes, err := findImportEdits(pgf.File, pkg.TypesInfo(), start, end) +- if err != nil { +- return nil, err +- } +- +- var importDeletes []protocol.TextEdit +- // For unparenthesised declarations like `import "fmt"` we remove +- // the whole declaration because simply removing importSpec leaves +- // `import \n`, which does not compile. +- // For parenthesised declarations like `import ("fmt"\n "log")` +- // we only remove the ImportSpec, because removing the whole declaration +- // might remove other ImportsSpecs we don't want to touch. +- unparenthesizedImports := unparenthesizedImports(pgf) +- for _, importSpec := range deletes { +- if decl := unparenthesizedImports[importSpec]; decl != nil { +- importDeletes = append(importDeletes, removeNode(pgf, decl)) +- } else { +- importDeletes = append(importDeletes, removeNode(pgf, importSpec)) +- } +- } +- +- var buf bytes.Buffer +- if c := CopyrightComment(pgf.File); c != nil { +- text, err := pgf.NodeText(c) +- if err != nil { +- return nil, err +- } +- buf.Write(text) +- // One empty line between copyright header and following. +- buf.WriteString("\n\n") +- } +- +- if c := buildConstraintComment(pgf.File); c != nil { +- text, err := pgf.NodeText(c) +- if err != nil { +- return nil, err +- } +- buf.Write(text) +- // One empty line between build constraint and following. +- buf.WriteString("\n\n") +- } +- +- fmt.Fprintf(&buf, "package %s\n", pgf.File.Name.Name) +- if len(adds) > 0 { +- buf.WriteString("import (") +- for _, importSpec := range adds { +- if importSpec.Name != nil { +- fmt.Fprintf(&buf, "%s %s\n", importSpec.Name.Name, importSpec.Path.Value) +- } else { +- fmt.Fprintf(&buf, "%s\n", importSpec.Path.Value) +- } +- } +- buf.WriteString(")\n") +- } +- +- newFile, err := chooseNewFile(ctx, snapshot, pgf.URI.DirPath(), firstSymbol) +- if err != nil { +- return nil, fmt.Errorf("%s: %w", errorPrefix, err) +- } +- +- buf.Write(src) +- +- newFileContent, err := format.Source(buf.Bytes()) +- if err != nil { +- return nil, err +- } +- +- return []protocol.DocumentChange{ +- // edit the original file +- protocol.DocumentChangeEdit(fh, append(importDeletes, protocol.TextEdit{Range: replaceRange, NewText: ""})), +- // create a new file +- protocol.DocumentChangeCreate(newFile.URI()), +- // edit the created file +- protocol.DocumentChangeEdit(newFile, []protocol.TextEdit{ +- {Range: protocol.Range{}, NewText: string(newFileContent)}, +- })}, nil +-} +- +-// chooseNewFile chooses a new filename in dir, based on the name of the +-// first extracted symbol, and if necessary to disambiguate, a numeric suffix. +-func chooseNewFile(ctx context.Context, snapshot *cache.Snapshot, dir string, firstSymbol string) (file.Handle, error) { +- basename := strings.ToLower(firstSymbol) +- newPath := protocol.URIFromPath(filepath.Join(dir, basename+".go")) +- for count := 1; count < 5; count++ { +- fh, err := snapshot.ReadFile(ctx, newPath) +- if err != nil { +- return nil, err // canceled +- } +- if _, err := fh.Content(); errors.Is(err, os.ErrNotExist) { +- return fh, nil +- } +- filename := fmt.Sprintf("%s.%d.go", basename, count) +- newPath = protocol.URIFromPath(filepath.Join(dir, filename)) +- } +- return nil, fmt.Errorf("chooseNewFileURI: exceeded retry limit") +-} +- +-// selectedToplevelDecls returns the lexical extent of the top-level +-// declarations enclosed by [start, end), along with the name of the +-// first declaration. The returned boolean reports whether the selection +-// should be offered a code action to extract the declarations. +-func selectedToplevelDecls(pgf *parsego.File, start, end token.Pos) (token.Pos, token.Pos, string, bool) { +- // selection cannot intersect a package declaration +- if posRangeIntersects(start, end, pgf.File.Package, pgf.File.Name.End()) { +- return 0, 0, "", false +- } +- firstName := "" +- for _, decl := range pgf.File.Decls { +- if posRangeIntersects(start, end, decl.Pos(), decl.End()) { +- var ( +- comment *ast.CommentGroup // (include comment preceding decl) +- id *ast.Ident +- ) +- switch decl := decl.(type) { +- case *ast.BadDecl: +- return 0, 0, "", false +- +- case *ast.FuncDecl: +- // if only selecting keyword "func" or function name, extend selection to the +- // whole function +- if posRangeContains(decl.Pos(), decl.Name.End(), start, end) { +- pgf.CheckNode(decl) // #70553 +- start, end = decl.Pos(), decl.End() +- // Inv: start, end are valid wrt pgf.Tok. +- } +- comment = decl.Doc +- id = decl.Name +- +- case *ast.GenDecl: +- // selection cannot intersect an import declaration +- if decl.Tok == token.IMPORT { +- return 0, 0, "", false +- } +- // if only selecting keyword "type", "const", or "var", extend selection to the +- // whole declaration +- if decl.Tok == token.TYPE && posRangeContains(decl.Pos(), decl.Pos()+token.Pos(len("type")), start, end) || +- decl.Tok == token.CONST && posRangeContains(decl.Pos(), decl.Pos()+token.Pos(len("const")), start, end) || +- decl.Tok == token.VAR && posRangeContains(decl.Pos(), decl.Pos()+token.Pos(len("var")), start, end) { +- pgf.CheckNode(decl) // #70553 +- start, end = decl.Pos(), decl.End() +- // Inv: start, end are valid wrt pgf.Tok. +- } +- comment = decl.Doc +- if len(decl.Specs) > 0 { +- switch spec := decl.Specs[0].(type) { +- case *ast.TypeSpec: +- id = spec.Name +- case *ast.ValueSpec: +- id = spec.Names[0] +- } +- } +- } +- // selection cannot partially intersect a node +- if !posRangeContains(start, end, decl.Pos(), decl.End()) { +- return 0, 0, "", false +- } +- if id != nil && firstName == "" { +- // may be "_" +- firstName = id.Name +- } +- if comment != nil && comment.Pos() < start { +- pgf.CheckNode(comment) // #70553 +- start = comment.Pos() +- // Inv: start is valid wrt pgf.Tok. +- } +- } +- } +- for _, comment := range pgf.File.Comments { +- if posRangeIntersects(start, end, comment.Pos(), comment.End()) { +- if !posRangeContains(start, end, comment.Pos(), comment.End()) { +- // selection cannot partially intersect a comment +- return 0, 0, "", false +- } +- } +- } +- if firstName == "" { +- return 0, 0, "", false +- } +- return start, end, firstName, true +-} +- +-// unparenthesizedImports returns a map from each unparenthesized ImportSpec +-// to its enclosing declaration (which may need to be deleted too). +-func unparenthesizedImports(pgf *parsego.File) map[*ast.ImportSpec]*ast.GenDecl { +- decls := make(map[*ast.ImportSpec]*ast.GenDecl) +- for _, decl := range pgf.File.Decls { +- if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT && !decl.Lparen.IsValid() { +- decls[decl.Specs[0].(*ast.ImportSpec)] = decl +- } +- } +- return decls +-} +- +-// removeNode returns a TextEdit that removes the node. +-func removeNode(pgf *parsego.File, node ast.Node) protocol.TextEdit { +- rng, err := pgf.NodeRange(node) +- if err != nil { +- bug.Reportf("removeNode: %v", err) +- } +- return protocol.TextEdit{Range: rng, NewText: ""} +-} +- +-// posRangeIntersects checks if [a, b) and [c, d) intersects, assuming a <= b and c <= d. +-func posRangeIntersects(a, b, c, d token.Pos) bool { +- return !(b <= c || d <= a) +-} +- +-// posRangeContains checks if [a, b) contains [c, d), assuming a <= b and c <= d. +-func posRangeContains(a, b, c, d token.Pos) bool { +- return a <= c && d <= b +-} +diff -urN a/gopls/internal/golang/fix.go b/gopls/internal/golang/fix.go +--- a/gopls/internal/golang/fix.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/fix.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,182 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "fmt" +- "go/token" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/gopls/internal/analysis/fillstruct" +- "golang.org/x/tools/gopls/internal/analysis/unusedparams" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +-) +- +-// A fixer is a function that suggests a fix for a diagnostic produced +-// by the analysis framework. This is done outside of the analyzer Run +-// function so that the construction of expensive fixes can be +-// deferred until they are requested by the user. +-// +-// The actual diagnostic is not provided; only its position, as the +-// triple (pgf, start, end); the resulting SuggestedFix implicitly +-// relates to that file. +-// +-// The supplied token positions (start, end) must belong to +-// pkg.FileSet(), and the returned positions +-// (SuggestedFix.TextEdits[*].{Pos,End}) must belong to the returned +-// FileSet, which is not necessarily the same. +-// (See [insertDeclsAfter] for explanation.) +-// +-// A fixer may return (nil, nil) if no fix is available. +-type fixer func(ctx context.Context, s *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) +- +-// A singleFileFixer is a [fixer] that inspects only a single file. +-type singleFileFixer func(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) +- +-// singleFile adapts a [singleFileFixer] to a [fixer] +-// by discarding the snapshot and the context it needs. +-func singleFile(fixer1 singleFileFixer) fixer { +- return func(_ context.Context, _ *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- return fixer1(pkg, pgf, start, end) +- } +-} +- +-// Names of ApplyFix.Fix created directly by the CodeAction handler. +-const ( +- fixExtractVariable = "extract_variable" // (or constant) +- fixExtractVariableAll = "extract_variable_all" +- fixExtractFunction = "extract_function" +- fixExtractMethod = "extract_method" +- fixInlineCall = "inline_call" +- fixInlineVariable = "inline_variable" +- fixInvertIfCondition = "invert_if_condition" +- fixSplitLines = "split_lines" +- fixJoinLines = "join_lines" +- fixCreateUndeclared = "create_undeclared" +- fixMissingInterfaceMethods = "stub_missing_interface_method" +- fixMissingCalledFunction = "stub_missing_called_function" +-) +- +-// ApplyFix applies the specified kind of suggested fix to the given +-// file and range, returning the resulting changes. +-// +-// A fix kind is either the Category of an analysis.Diagnostic that +-// had a SuggestedFix with no edits; or the name of a fix agreed upon +-// by [CodeActions] and this function. +-// Fix kinds identify fixes in the command protocol. +-// +-// TODO(adonovan): come up with a better mechanism for registering the +-// connection between analyzers, code actions, and fixers. A flaw of +-// the current approach is that the same Category could in theory +-// apply to a Diagnostic with several lazy fixes, making them +-// impossible to distinguish. It would more precise if there was a +-// SuggestedFix.Category field, or some other way to squirrel metadata +-// in the fix. +-func ApplyFix(ctx context.Context, fix string, snapshot *cache.Snapshot, fh file.Handle, rng protocol.Range) ([]protocol.DocumentChange, error) { +- // This can't be expressed as an entry in the fixer table below +- // because it operates in the protocol (not go/{token,ast}) domain. +- // (Sigh; perhaps it was a mistake to factor out the +- // NarrowestPackageForFile/RangePos/suggestedFixToEdits +- // steps.) +- if fix == unusedparams.FixCategory { +- return removeParam(ctx, snapshot, fh, rng) +- } +- +- fixers := map[string]fixer{ +- // Fixes for analyzer-provided diagnostics. +- // These match the Diagnostic.Category. +- fillstruct.FixCategory: singleFile(fillstruct.SuggestedFix), +- +- // Ad-hoc fixers: these are used when the command is +- // constructed directly by logic in server/code_action. +- fixExtractFunction: singleFile(extractFunction), +- fixExtractMethod: singleFile(extractMethod), +- fixExtractVariable: singleFile(extractVariableOne), +- fixExtractVariableAll: singleFile(extractVariableAll), +- fixInlineCall: inlineCall, +- fixInlineVariable: singleFile(inlineVariableOne), +- fixInvertIfCondition: singleFile(invertIfCondition), +- fixSplitLines: singleFile(splitLines), +- fixJoinLines: singleFile(joinLines), +- fixCreateUndeclared: singleFile(createUndeclared), +- fixMissingInterfaceMethods: stubMissingInterfaceMethodsFixer, +- fixMissingCalledFunction: stubMissingCalledFunctionFixer, +- } +- fixer, ok := fixers[fix] +- if !ok { +- return nil, fmt.Errorf("no suggested fix function for %s", fix) +- } +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- start, end, err := pgf.RangePos(rng) +- if err != nil { +- return nil, err +- } +- fixFset, suggestion, err := fixer(ctx, snapshot, pkg, pgf, start, end) +- if err != nil { +- return nil, err +- } +- if suggestion == nil { +- return nil, nil +- } +- return suggestedFixToDocumentChange(ctx, snapshot, fixFset, suggestion) +-} +- +-// suggestedFixToDocumentChange converts the suggestion's edits from analysis form into protocol form. +-func suggestedFixToDocumentChange(ctx context.Context, snapshot *cache.Snapshot, fset *token.FileSet, suggestion *analysis.SuggestedFix) ([]protocol.DocumentChange, error) { +- type fileInfo struct { +- fh file.Handle +- mapper *protocol.Mapper +- edits []protocol.TextEdit +- } +- files := make(map[protocol.DocumentURI]*fileInfo) +- for _, edit := range suggestion.TextEdits { +- tokFile := fset.File(edit.Pos) +- if tokFile == nil { +- return nil, bug.Errorf("no file for edit position") +- } +- end := edit.End +- if !end.IsValid() { +- end = edit.Pos +- } +- uri := protocol.URIFromPath(tokFile.Name()) +- info, ok := files[uri] +- if !ok { +- // First edit: create a mapper. +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- content, err := fh.Content() +- if err != nil { +- return nil, err +- } +- mapper := protocol.NewMapper(uri, content) +- info = &fileInfo{fh, mapper, nil} +- files[uri] = info +- } +- rng, err := info.mapper.PosRange(tokFile, edit.Pos, end) +- if err != nil { +- return nil, err +- } +- info.edits = append(info.edits, protocol.TextEdit{ +- Range: rng, +- NewText: string(edit.NewText), +- }) +- } +- var changes []protocol.DocumentChange +- for _, info := range files { +- change := protocol.DocumentChangeEdit(info.fh, info.edits) +- changes = append(changes, change) +- } +- return changes, nil +-} +diff -urN a/gopls/internal/golang/folding_range.go b/gopls/internal/golang/folding_range.go +--- a/gopls/internal/golang/folding_range.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/folding_range.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,228 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "cmp" +- "context" +- "go/ast" +- "go/token" +- "slices" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +-) +- +-// FoldingRange gets all of the folding range for f. +-func FoldingRange(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, lineFoldingOnly bool) ([]protocol.FoldingRange, error) { +- // TODO(suzmue): consider limiting the number of folding ranges returned, and +- // implement a way to prioritize folding ranges in that case. +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, err +- } +- +- // With parse errors, we wouldn't be able to produce accurate folding info. +- // LSP protocol (3.16) currently does not have a way to handle this case +- // (https://github.com/microsoft/language-server-protocol/issues/1200). +- // We cannot return an error either because we are afraid some editors +- // may not handle errors nicely. As a workaround, we now return an empty +- // result and let the client handle this case by double check the file +- // contents (i.e. if the file is not empty and the folding range result +- // is empty, raise an internal error). +- if pgf.ParseErr != nil { +- return nil, nil +- } +- +- // Get folding ranges for comments separately as they are not walked by ast.Inspect. +- ranges := commentsFoldingRange(pgf) +- +- // Walk the ast and collect folding ranges. +- filter := []ast.Node{ +- (*ast.BasicLit)(nil), +- (*ast.BlockStmt)(nil), +- (*ast.CallExpr)(nil), +- (*ast.CaseClause)(nil), +- (*ast.CommClause)(nil), +- (*ast.CompositeLit)(nil), +- (*ast.FieldList)(nil), +- (*ast.GenDecl)(nil), +- } +- for cur := range pgf.Cursor.Preorder(filter...) { +- var kind protocol.FoldingRangeKind +- // start and end define the range of content to fold away. +- var start, end token.Pos +- switch n := cur.Node().(type) { +- case *ast.BlockStmt: +- // Fold between positions of or lines between "{" and "}". +- start, end = bracketedFoldingRange(pgf, n.Lbrace, n.Rbrace, lineFoldingOnly) +- +- case *ast.CaseClause: +- // Fold from position of ":" to end. +- start, end = n.Colon+1, n.End() +- +- case *ast.CommClause: +- // Fold from position of ":" to end. +- start, end = n.Colon+1, n.End() +- +- case *ast.CallExpr: +- // Fold between positions of or lines between "(" and ")". +- start, end = bracketedFoldingRange(pgf, n.Lparen, n.Rparen, lineFoldingOnly) +- +- case *ast.FieldList: +- // Fold between positions of or lines between opening parenthesis/brace and closing parenthesis/brace. +- start, end = bracketedFoldingRange(pgf, n.Opening, n.Closing, lineFoldingOnly) +- +- case *ast.GenDecl: +- // If this is an import declaration, set the kind to be protocol.Imports. +- if n.Tok == token.IMPORT { +- kind = protocol.Imports +- } +- // Fold between positions of or lines between "(" and ")". +- start, end = bracketedFoldingRange(pgf, n.Lparen, n.Rparen, lineFoldingOnly) +- +- case *ast.BasicLit: +- // Fold raw string literals from position of "`" to position of "`". +- if n.Kind == token.STRING && len(n.Value) >= 2 && n.Value[0] == '`' && n.Value[len(n.Value)-1] == '`' { +- start, end = n.Pos(), n.End() +- } +- +- case *ast.CompositeLit: +- // Fold between positions of or lines between "{" and "}". +- start, end = bracketedFoldingRange(pgf, n.Lbrace, n.Rbrace, lineFoldingOnly) +- +- default: +- panic(n) +- } +- +- // Check that folding positions are valid. +- if !start.IsValid() || !end.IsValid() { +- continue +- } +- if start == end { +- // Nothing to fold. +- continue +- } +- // in line folding mode, do not fold if the start and end lines are the same. +- if lineFoldingOnly && safetoken.Line(pgf.Tok, start) == safetoken.Line(pgf.Tok, end) { +- continue +- } +- rng, err := pgf.PosRange(start, end) +- if err != nil { +- bug.Reportf("failed to create range: %s", err) // can't happen +- continue +- } +- ranges = append(ranges, foldingRange(kind, rng)) +- } +- +- // Sort by start position. +- slices.SortFunc(ranges, func(x, y protocol.FoldingRange) int { +- if d := cmp.Compare(*x.StartLine, *y.StartLine); d != 0 { +- return d +- } +- return cmp.Compare(*x.StartCharacter, *y.StartCharacter) +- }) +- +- return ranges, nil +-} +- +-// bracketedFoldingRange returns the folding range for nodes with parentheses/braces/brackets +-// that potentially can take up multiple lines. +-func bracketedFoldingRange(pgf *parsego.File, open, close token.Pos, lineFoldingOnly bool) (token.Pos, token.Pos) { +- if !open.IsValid() || !close.IsValid() { +- return token.NoPos, token.NoPos +- } +- if open+1 == close { +- // Nothing to fold: (), {} or []. +- return token.NoPos, token.NoPos +- } +- +- if !lineFoldingOnly { +- return open + 1, close +- } +- +- // Clients with "LineFoldingOnly" set to true can fold only full lines. +- // This is also checked in the caller. +- // +- // Clients that support folding ranges can display them in various ways +- // (e.g., how are folding ranges marked? is the final line displayed?). +- // The most common client +- // is vscode, which displays the first line followed by ..., and then does not +- // display any other lines in the range, but other clients might also display +- // final line of the range. For example, the following code +- // +- // var x = []string{"a", +- // "b", +- // "c" } +- // +- // can be folded (in vscode) to +- // +- // var x = []string{"a", ... +- // +- // or in some other client +- // +- // var x = []string{"a", ... +- // "c" } +- // +- // This code displays the final line containing ),},], but not the closing quote +- // of a multi-line string +- +- prevLineEnd := pgf.Tok.LineStart(safetoken.Line(pgf.Tok, close)) - 1 // there was a previous line +- if prevLineEnd <= open { // all the same line +- return token.NoPos, token.NoPos +- } +- return open + 1, prevLineEnd +-} +- +-// commentsFoldingRange returns the folding ranges for all comment blocks in file. +-// The folding range starts at the end of the first line of the comment block, and ends at the end of the +-// comment block and has kind protocol.Comment. +-func commentsFoldingRange(pgf *parsego.File) (comments []protocol.FoldingRange) { +- tokFile := pgf.Tok +- for _, commentGrp := range pgf.File.Comments { +- startGrpLine, endGrpLine := safetoken.Line(tokFile, commentGrp.Pos()), safetoken.Line(tokFile, commentGrp.End()) +- if startGrpLine == endGrpLine { +- // Don't fold single line comments. +- continue +- } +- +- firstComment := commentGrp.List[0] +- startPos, endLinePos := firstComment.Pos(), firstComment.End() +- startCmmntLine, endCmmntLine := safetoken.Line(tokFile, startPos), safetoken.Line(tokFile, endLinePos) +- if startCmmntLine != endCmmntLine { +- // If the first comment spans multiple lines, then we want to have the +- // folding range start at the end of the first line. +- endLinePos = token.Pos(int(startPos) + len(strings.Split(firstComment.Text, "\n")[0])) +- } +- rng, err := pgf.PosRange(endLinePos, commentGrp.End()) +- if err != nil { +- bug.Reportf("failed to create mapped range: %s", err) // can't happen +- continue +- } +- // Fold from the end of the first line comment to the end of the comment block. +- comments = append(comments, foldingRange(protocol.Comment, rng)) +- } +- return comments +-} +- +-func foldingRange(kind protocol.FoldingRangeKind, rng protocol.Range) protocol.FoldingRange { +- return protocol.FoldingRange{ +- // (I guess LSP doesn't use a protocol.Range here +- // because missing means something different from zero.) +- StartLine: varOf(rng.Start.Line), +- StartCharacter: varOf(rng.Start.Character), +- EndLine: varOf(rng.End.Line), +- EndCharacter: varOf(rng.End.Character), +- Kind: string(kind), +- } +-} +- +-// varOf returns a new variable whose value is x. +-func varOf[T any](x T) *T { return &x } +diff -urN a/gopls/internal/golang/format.go b/gopls/internal/golang/format.go +--- a/gopls/internal/golang/format.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/format.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,357 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package golang defines the LSP features for navigation, analysis, +-// and refactoring of Go source code. +-package golang +- +-import ( +- "bytes" +- "context" +- "fmt" +- "go/ast" +- "go/format" +- "go/parser" +- "go/token" +- "strings" +- "text/scanner" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/gopls/internal/util/tokeninternal" +- "golang.org/x/tools/internal/diff" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/imports" +- gofumptFormat "mvdan.cc/gofumpt/format" +-) +- +-// Format formats a file with a given range. +-func Format(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.TextEdit, error) { +- ctx, done := event.Start(ctx, "golang.Format") +- defer done() +- +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, err +- } +- +- // Even if this file has parse errors, it might still be possible to format it. +- // Using format.Node on an AST with errors may result in code being modified. +- // Attempt to format the source of this file instead. +- if pgf.ParseErr != nil { +- formatted, err := formatSource(ctx, fh) +- if err != nil { +- return nil, err +- } +- return computeTextEdits(ctx, pgf, string(formatted)) +- } +- +- // format.Node changes slightly from one release to another, so the version +- // of Go used to build the LSP server will determine how it formats code. +- // This should be acceptable for all users, who likely be prompted to rebuild +- // the LSP server on each Go release. +- buf := &bytes.Buffer{} +- fset := tokeninternal.FileSetFor(pgf.Tok) +- if err := format.Node(buf, fset, pgf.File); err != nil { +- return nil, err +- } +- formatted := buf.String() +- +- // Apply additional formatting, if any is supported. Currently, the only +- // supported additional formatter is gofumpt. +- if snapshot.Options().Gofumpt { +- // gofumpt can customize formatting based on language version and module +- // path, if available. +- // +- // Try to derive this information, but fall-back on the default behavior. +- // +- // TODO: under which circumstances can we fail to find module information? +- // Can this, for example, result in inconsistent formatting across saves, +- // due to pending calls to packages.Load? +- var opts gofumptFormat.Options +- meta, err := snapshot.NarrowestMetadataForFile(ctx, fh.URI()) +- if err == nil { +- if mi := meta.Module; mi != nil { +- if v := mi.GoVersion; v != "" { +- opts.LangVersion = "go" + v +- } +- opts.ModulePath = mi.Path +- } +- } +- b, err := gofumptFormat.Source(buf.Bytes(), opts) +- if err != nil { +- return nil, err +- } +- formatted = string(b) +- } +- return computeTextEdits(ctx, pgf, formatted) +-} +- +-func formatSource(ctx context.Context, fh file.Handle) ([]byte, error) { +- _, done := event.Start(ctx, "golang.formatSource") +- defer done() +- +- data, err := fh.Content() +- if err != nil { +- return nil, err +- } +- return format.Source(data) +-} +- +-type importFix struct { +- fix *imports.ImportFix +- edits []protocol.TextEdit +-} +- +-// allImportsFixes formats f for each possible fix to the imports. +-// In addition to returning the result of applying all edits, +-// it returns a list of fixes that could be applied to the file, with the +-// corresponding TextEdits that would be needed to apply that fix. +-func allImportsFixes(ctx context.Context, snapshot *cache.Snapshot, pgf *parsego.File) (allFixEdits []protocol.TextEdit, editsPerFix []*importFix, err error) { +- ctx, done := event.Start(ctx, "golang.allImportsFixes") +- defer done() +- +- if err := snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { +- allFixEdits, editsPerFix, err = computeImportEdits(ctx, pgf, snapshot, opts) +- return err +- }); err != nil { +- return nil, nil, fmt.Errorf("allImportsFixes: %v", err) +- } +- return allFixEdits, editsPerFix, nil +-} +- +-// computeImportEdits computes a set of edits that perform one or all of the +-// necessary import fixes. +-func computeImportEdits(ctx context.Context, pgf *parsego.File, snapshot *cache.Snapshot, options *imports.Options) (allFixEdits []protocol.TextEdit, editsPerFix []*importFix, err error) { +- goroot := snapshot.View().Folder().Env.GOROOT +- filename := pgf.URI.Path() +- +- // Build up basic information about the original file. +- isource, err := imports.NewProcessEnvSource(options.Env, filename, pgf.File.Name.Name) +- if err != nil { +- return nil, nil, err +- } +- var source imports.Source +- +- // Keep this in sync with [cache.Session.createView] (see the TODO there: we +- // should factor out the handling of the ImportsSource setting). +- switch snapshot.Options().ImportsSource { +- case settings.ImportsSourceGopls: +- source = snapshot.NewGoplsSource(isource) +- case settings.ImportsSourceOff: // for cider, which has no file system +- source = nil +- case settings.ImportsSourceGoimports: +- source = isource +- } +- // imports require a current metadata graph +- // TODO(rfindley): improve the API +- snapshot.WorkspaceMetadata(ctx) // ignore error +- allFixes, err := imports.FixImports(ctx, filename, pgf.Src, goroot, options.Env.Logf, source) +- if err != nil { +- return nil, nil, err +- } +- +- allFixEdits, err = computeFixEdits(pgf.Src, options, allFixes) +- if err != nil { +- return nil, nil, err +- } +- +- // Apply all of the import fixes to the file. +- // Add the edits for each fix to the result. +- for _, fix := range allFixes { +- edits, err := computeFixEdits(pgf.Src, options, []*imports.ImportFix{fix}) +- if err != nil { +- return nil, nil, err +- } +- editsPerFix = append(editsPerFix, &importFix{ +- fix: fix, +- edits: edits, +- }) +- } +- return allFixEdits, editsPerFix, nil +-} +- +-// ComputeImportFixEdits returns text edits for a single import fix. +-func ComputeImportFixEdits(localPrefix string, src []byte, fixes ...*imports.ImportFix) ([]protocol.TextEdit, error) { +- options := &imports.Options{ +- LocalPrefix: localPrefix, +- // Defaults. +- AllErrors: true, +- Comments: true, +- Fragment: true, +- FormatOnly: false, +- TabIndent: true, +- TabWidth: 8, +- } +- return computeFixEdits(src, options, fixes) +-} +- +-func computeFixEdits(src []byte, options *imports.Options, fixes []*imports.ImportFix) ([]protocol.TextEdit, error) { +- // trim the original data to match fixedData +- left, err := importPrefix(src) +- if err != nil { +- return nil, err +- } +- extra := !strings.Contains(left, "\n") // one line may have more than imports +- if extra { +- left = string(src) +- } +- if len(left) > 0 && left[len(left)-1] != '\n' { +- left += "\n" +- } +- // Apply the fixes and re-parse the file so that we can locate the +- // new imports. +- flags := parser.ImportsOnly +- if extra { +- // used all of origData above, use all of it here too +- flags = 0 +- } +- fixedData, err := imports.ApplyFixes(fixes, "", src, options, flags) +- if err != nil { +- return nil, err +- } +- if fixedData == nil || fixedData[len(fixedData)-1] != '\n' { +- fixedData = append(fixedData, '\n') // ApplyFixes may miss the newline, go figure. +- } +- edits := diff.Strings(left, string(fixedData)) +- return protocolEditsFromSource([]byte(left), edits) +-} +- +-// importPrefix returns the prefix of the given file content through the final +-// import statement. If there are no imports, the prefix is the package +-// statement and any comment groups below it. +-func importPrefix(src []byte) (string, error) { +- fset := token.NewFileSet() +- // do as little parsing as possible +- f, err := parser.ParseFile(fset, "", src, parser.ImportsOnly|parser.ParseComments) +- if err != nil { // This can happen if 'package' is misspelled +- return "", fmt.Errorf("importPrefix: failed to parse: %s", err) +- } +- tok := fset.File(f.FileStart) +- var importEnd int +- for _, d := range f.Decls { +- if x, ok := d.(*ast.GenDecl); ok && x.Tok == token.IMPORT { +- if e, err := safetoken.Offset(tok, d.End()); err != nil { +- return "", fmt.Errorf("importPrefix: %s", err) +- } else if e > importEnd { +- importEnd = e +- } +- } +- } +- +- maybeAdjustToLineEnd := func(pos token.Pos, isCommentNode bool) int { +- offset, err := safetoken.Offset(tok, pos) +- if err != nil { +- return -1 +- } +- +- // Don't go past the end of the file. +- if offset > len(src) { +- offset = len(src) +- } +- // The go/ast package does not account for different line endings, and +- // specifically, in the text of a comment, it will strip out \r\n line +- // endings in favor of \n. To account for these differences, we try to +- // return a position on the next line whenever possible. +- switch line := safetoken.Line(tok, tok.Pos(offset)); { +- case line < tok.LineCount(): +- nextLineOffset, err := safetoken.Offset(tok, tok.LineStart(line+1)) +- if err != nil { +- return -1 +- } +- // If we found a position that is at the end of a line, move the +- // offset to the start of the next line. +- if offset+1 == nextLineOffset { +- offset = nextLineOffset +- } +- case isCommentNode, offset+1 == tok.Size(): +- // If the last line of the file is a comment, or we are at the end +- // of the file, the prefix is the entire file. +- offset = len(src) +- } +- return offset +- } +- if importEnd == 0 { +- pkgEnd := f.Name.End() +- importEnd = maybeAdjustToLineEnd(pkgEnd, false) +- } +- for _, cgroup := range f.Comments { +- for _, c := range cgroup.List { +- if end, err := safetoken.Offset(tok, c.End()); err != nil { +- return "", err +- } else if end > importEnd { +- startLine := safetoken.Position(tok, c.Pos()).Line +- endLine := safetoken.Position(tok, c.End()).Line +- +- // Work around golang/go#41197 by checking if the comment might +- // contain "\r", and if so, find the actual end position of the +- // comment by scanning the content of the file. +- startOffset, err := safetoken.Offset(tok, c.Pos()) +- if err != nil { +- return "", err +- } +- if startLine != endLine && bytes.Contains(src[startOffset:], []byte("\r")) { +- if commentEnd := scanForCommentEnd(src[startOffset:]); commentEnd > 0 { +- end = startOffset + commentEnd +- } +- } +- importEnd = maybeAdjustToLineEnd(tok.Pos(end), true) +- } +- } +- } +- if importEnd > len(src) { +- importEnd = len(src) +- } +- return string(src[:importEnd]), nil +-} +- +-// scanForCommentEnd returns the offset of the end of the multi-line comment +-// at the start of the given byte slice. +-func scanForCommentEnd(src []byte) int { +- var s scanner.Scanner +- s.Init(bytes.NewReader(src)) +- s.Mode ^= scanner.SkipComments +- +- t := s.Scan() +- if t == scanner.Comment { +- return s.Pos().Offset +- } +- return 0 +-} +- +-func computeTextEdits(ctx context.Context, pgf *parsego.File, formatted string) ([]protocol.TextEdit, error) { +- _, done := event.Start(ctx, "golang.computeTextEdits") +- defer done() +- +- edits := diff.Strings(string(pgf.Src), formatted) +- return protocol.EditsFromDiffEdits(pgf.Mapper, edits) +-} +- +-// protocolEditsFromSource converts text edits to LSP edits using the original +-// source. +-func protocolEditsFromSource(src []byte, edits []diff.Edit) ([]protocol.TextEdit, error) { +- m := protocol.NewMapper("", src) +- var result []protocol.TextEdit +- for _, edit := range edits { +- rng, err := m.OffsetRange(edit.Start, edit.End) +- if err != nil { +- return nil, err +- } +- +- if rng.Start == rng.End && edit.New == "" { +- // Degenerate case, which may result from a diff tool wanting to delete +- // '\r' in line endings. Filter it out. +- continue +- } +- result = append(result, protocol.TextEdit{ +- Range: rng, +- NewText: edit.New, +- }) +- } +- return result, nil +-} +diff -urN a/gopls/internal/golang/format_test.go b/gopls/internal/golang/format_test.go +--- a/gopls/internal/golang/format_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/format_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,75 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/test/compare" +-) +- +-func TestImportPrefix(t *testing.T) { +- for i, tt := range []struct { +- input, want string +- }{ +- {"package foo", "package foo"}, +- {"package foo\n", "package foo\n"}, +- {"package foo\n\nfunc f(){}\n", "package foo\n"}, +- {"package foo\n\nimport \"fmt\"\n", "package foo\n\nimport \"fmt\""}, +- {"package foo\nimport (\n\"fmt\"\n)\n", "package foo\nimport (\n\"fmt\"\n)"}, +- {"\n\n\npackage foo\n", "\n\n\npackage foo\n"}, +- {"// hi \n\npackage foo //xx\nfunc _(){}\n", "// hi \n\npackage foo //xx\n"}, +- {"package foo //hi\n", "package foo //hi\n"}, +- {"//hi\npackage foo\n//a\n\n//b\n", "//hi\npackage foo\n//a\n\n//b\n"}, +- { +- "package a\n\nimport (\n \"fmt\"\n)\n//hi\n", +- "package a\n\nimport (\n \"fmt\"\n)\n//hi\n", +- }, +- {`package a /*hi*/`, `package a /*hi*/`}, +- {"package main\r\n\r\nimport \"go/types\"\r\n\r\n/*\r\n\r\n */\r\n", "package main\r\n\r\nimport \"go/types\"\r\n\r\n/*\r\n\r\n */\r\n"}, +- {"package x; import \"os\"; func f() {}\n\n", "package x; import \"os\""}, +- {"package x; func f() {fmt.Println()}\n\n", "package x"}, +- } { +- got, err := importPrefix([]byte(tt.input)) +- if err != nil { +- t.Fatal(err) +- } +- if d := compare.Text(tt.want, got); d != "" { +- t.Errorf("%d: failed for %q:\n%s", i, tt.input, d) +- } +- } +-} +- +-func TestCRLFFile(t *testing.T) { +- for i, tt := range []struct { +- input, want string +- }{ +- { +- input: `package main +- +-/* +-Hi description +-*/ +-func Hi() { +-} +-`, +- want: `package main +- +-/* +-Hi description +-*/`, +- }, +- } { +- got, err := importPrefix([]byte(strings.ReplaceAll(tt.input, "\n", "\r\n"))) +- if err != nil { +- t.Fatal(err) +- } +- want := strings.ReplaceAll(tt.want, "\n", "\r\n") +- if d := compare.Text(want, got); d != "" { +- t.Errorf("%d: failed for %q:\n%s", i, tt.input, d) +- } +- } +-} +diff -urN a/gopls/internal/golang/freesymbols.go b/gopls/internal/golang/freesymbols.go +--- a/gopls/internal/golang/freesymbols.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/freesymbols.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,420 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-// This file implements the "Browse free symbols" code action. +- +-import ( +- "bytes" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "html" +- "slices" +- "sort" +- "strings" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// FreeSymbolsHTML returns an HTML document containing the report of +-// free symbols referenced by the selection. +-func FreeSymbolsHTML(viewID string, pkg *cache.Package, pgf *parsego.File, start, end token.Pos, web Web) []byte { +- +- // Compute free references. +- refs := freeRefs(pkg.Types(), pkg.TypesInfo(), pgf.File, start, end) +- +- // -- model -- +- +- type Import struct { +- Path metadata.PackagePath +- Symbols []string +- } +- type Symbol struct { +- Kind string +- Type string +- Refs []types.Object +- } +- var model struct { +- Imported []Import +- PkgLevel []Symbol +- Local []Symbol +- } +- +- qualifier := typesinternal.NameRelativeTo(pkg.Types()) +- +- // Populate model. +- { +- // List the refs in order of dotted paths. +- sort.Slice(refs, func(i, j int) bool { +- return refs[i].dotted < refs[j].dotted +- }) +- +- // Inspect the references. +- imported := make(map[string][]*freeRef) // refs to imported symbols, by package path +- seen := make(map[string]bool) // to de-dup dotted paths +- for _, ref := range refs { +- if seen[ref.dotted] { +- continue // de-dup +- } +- seen[ref.dotted] = true +- +- var symbols *[]Symbol +- switch ref.scope { +- case "file": +- // imported symbol: group by package +- if pkgname, ok := ref.objects[0].(*types.PkgName); ok { +- path := pkgname.Imported().Path() +- imported[path] = append(imported[path], ref) +- } +- continue +- case "pkg": +- symbols = &model.PkgLevel +- case "local": +- symbols = &model.Local +- default: +- panic(ref.scope) +- } +- +- // Package and local symbols are presented the same way. +- // We treat each dotted path x.y.z as a separate entity. +- +- // Compute kind and type of last object (y in obj.x.y). +- typestr := " " + types.TypeString(ref.typ, qualifier) +- var kind string +- switch obj := ref.objects[len(ref.objects)-1].(type) { +- case *types.Var: +- kind = "var" +- case *types.Func: +- kind = "func" +- case *types.TypeName: +- if is[*types.TypeParam](obj.Type()) { +- kind = "type parameter" +- } else { +- kind = "type" +- } +- typestr = "" // avoid "type T T" +- case *types.Const: +- kind = "const" +- case *types.Label: +- kind = "label" +- typestr = "" // avoid "label L L" +- } +- +- *symbols = append(*symbols, Symbol{ +- Kind: kind, +- Type: typestr, +- Refs: ref.objects, +- }) +- } +- +- // Imported symbols. +- // Produce one record per package, with a list of symbols. +- for pkgPath, refs := range moremaps.Sorted(imported) { +- var syms []string +- for _, ref := range refs { +- // strip package name (bytes.Buffer.Len -> Buffer.Len) +- syms = append(syms, ref.dotted[len(ref.objects[0].Name())+len("."):]) +- } +- sort.Strings(syms) +- const max = 4 +- if len(syms) > max { +- syms[max-1] = fmt.Sprintf("... (%d)", len(syms)) +- syms = syms[:max] +- } +- +- model.Imported = append(model.Imported, Import{ +- Path: PackagePath(pkgPath), +- Symbols: syms, +- }) +- } +- } +- +- // -- presentation -- +- +- var buf bytes.Buffer +- buf.WriteString(` +- +- +- +- +- +- +- +-

    Free symbols

    +-

    +- The selected code contains references to these free* symbols: +-

    +-`) +- +- // Present the refs in three sections: imported, same package, local. +- +- // -- imported symbols -- +- +- // Show one item per package, with a list of symbols. +- fmt.Fprintf(&buf, "

    Imported symbols

    \n") +- fmt.Fprintf(&buf, "
      \n") +- for _, imp := range model.Imported { +- fmt.Fprintf(&buf, "
    • import \"%s\" // for %s
    • \n", +- web.PkgURL(viewID, imp.Path, ""), +- html.EscapeString(string(imp.Path)), +- strings.Join(imp.Symbols, ", ")) +- } +- if len(model.Imported) == 0 { +- fmt.Fprintf(&buf, "
    • (none)
    • \n") +- } +- buf.WriteString("
    \n") +- +- // -- package and local symbols -- +- +- showSymbols := func(scope, title string, symbols []Symbol) { +- fmt.Fprintf(&buf, "

    %s

    \n", scope, title) +- fmt.Fprintf(&buf, "
      \n") +- pre := buf.Len() +- for _, sym := range symbols { +- fmt.Fprintf(&buf, "
    • %s ", sym.Kind) // of rightmost symbol in dotted path +- for i, obj := range sym.Refs { +- if i > 0 { +- buf.WriteByte('.') +- } +- buf.WriteString(objHTML(pkg.FileSet(), web, obj)) +- } +- fmt.Fprintf(&buf, " %s
    • \n", html.EscapeString(sym.Type)) +- } +- if buf.Len() == pre { +- fmt.Fprintf(&buf, "
    • (none)
    • \n") +- } +- buf.WriteString("
    \n") +- } +- showSymbols("pkg", "Package-level symbols", model.PkgLevel) +- showSymbols("local", "Local symbols", model.Local) +- +- // -- code selection -- +- +- // Print the selection, highlighting references to free symbols. +- buf.WriteString("
    \n") +- sort.Slice(refs, func(i, j int) bool { +- return refs[i].expr.Pos() < refs[j].expr.Pos() +- }) +- pos := start +- emitTo := func(end token.Pos) { +- if pos < end { +- fileStart := pgf.File.FileStart +- text := pgf.Mapper.Content[pos-fileStart : end-fileStart] +- buf.WriteString(html.EscapeString(string(text))) +- pos = end +- } +- } +- buf.WriteString(`
    `)
    +-	for _, ref := range refs {
    +-		emitTo(ref.expr.Pos())
    +-		fmt.Fprintf(&buf, ``, ref.scope)
    +-		emitTo(ref.expr.End())
    +-		buf.WriteString(``)
    +-	}
    +-	emitTo(end)
    +-	buf.WriteString(`
    +-
    +-

    +- *A symbol is "free" if it is referenced within the selection but declared +- outside of it. +- +- The free variables are approximately the set of parameters that +- would be needed if the block were extracted into its own function in +- the same package. +- +- Free identifiers may include local types and control labels as well. +- +- Even when you don't intend to extract a block into a new function, +- this information can help you to tell at a glance what names a block +- of code depends on. +-

    +-

    +- Each dotted path of identifiers (such as file.Name.Pos) is reported +- as a separate item, so that you can see which parts of a complex +- type are actually needed. +- +- The free symbols referenced by the body of a function may +- reveal that only a small part (a single field of a struct, say) of +- one of the function's parameters is used, allowing you to simplify +- and generalize the function by choosing a different type for that +- parameter. +-

    +-`) +- return buf.Bytes() +-} +- +-// A freeRef records a reference to a dotted path obj.x.y, +-// where obj (=objects[0]) is a free symbol. +-type freeRef struct { +- objects []types.Object // [obj x y] +- dotted string // "obj.x.y" (used as sort key) +- scope string // scope of obj: pkg|file|local +- expr ast.Expr // =*Ident|*SelectorExpr +- typ types.Type // type of obj.x.y +-} +- +-// freeRefs returns the list of references to free symbols (from +-// within the selection to a symbol declared outside of it). +-// It uses only info.{Scopes,Types,Uses}. +-func freeRefs(pkg *types.Package, info *types.Info, file *ast.File, start, end token.Pos) []*freeRef { +- // Keep us honest about which fields we access. +- info = &types.Info{ +- Scopes: info.Scopes, +- Types: info.Types, +- Uses: info.Uses, +- } +- +- fileScope := info.Scopes[file] +- pkgScope := fileScope.Parent() +- +- // id is called for the leftmost id x in each dotted chain such as (x.y).z. +- // suffix is the reversed suffix of selections (e.g. [z y]). +- id := func(n *ast.Ident, suffix []types.Object) *freeRef { +- obj := info.Uses[n] +- if obj == nil { +- return nil // not a reference +- } +- if start <= obj.Pos() && obj.Pos() < end { +- return nil // defined within selection => not free +- } +- parent := obj.Parent() +- +- // Compute dotted path. +- objects := append(suffix, obj) +- if obj.Pkg() != nil && obj.Pkg() != pkg && typesinternal.IsPackageLevel(obj) { // dot import +- // Synthesize the implicit PkgName. +- pkgName := types.NewPkgName(token.NoPos, pkg, obj.Pkg().Name(), obj.Pkg()) +- parent = fileScope +- objects = append(objects, pkgName) +- } +- slices.Reverse(objects) +- var dotted strings.Builder +- for i, obj := range objects { +- if obj == nil { +- return nil // type error +- } +- if i > 0 { +- dotted.WriteByte('.') +- } +- dotted.WriteString(obj.Name()) +- } +- +- // Compute scope of base object. +- var scope string +- switch parent { +- case nil: +- return nil // interface method or struct field +- case types.Universe: +- return nil // built-in (not interesting) +- case fileScope: +- scope = "file" // defined at file scope (imported package) +- case pkgScope: +- scope = "pkg" // defined at package level +- default: +- scope = "local" // defined within current function +- } +- +- return &freeRef{ +- objects: objects, +- dotted: dotted.String(), +- scope: scope, +- } +- } +- +- // sel(x.y.z, []) calls sel(x.y, [z]) calls id(x, [z, y]). +- sel := func(sel *ast.SelectorExpr, suffix []types.Object) *freeRef { +- for { +- suffix = append(suffix, info.Uses[sel.Sel]) +- +- switch x := ast.Unparen(sel.X).(type) { +- case *ast.Ident: +- return id(x, suffix) +- default: +- return nil +- case *ast.SelectorExpr: +- sel = x +- } +- } +- } +- +- // Visit all the identifiers in the selected ASTs. +- var free []*freeRef +- path, _ := astutil.PathEnclosingInterval(file, start, end) +- var visit func(n ast.Node) bool +- visit = func(n ast.Node) bool { +- // Is this node contained within the selection? +- // (freesymbols permits inexact selections, +- // like two stmts in a block.) +- if n != nil && start <= n.Pos() && n.End() <= end { +- var ref *freeRef +- switch n := n.(type) { +- case *ast.Ident: +- ref = id(n, nil) +- case *ast.SelectorExpr: +- ref = sel(n, nil) +- } +- +- if ref != nil { +- ref.expr = n.(ast.Expr) +- if tv, ok := info.Types[ref.expr]; ok { +- ref.typ = tv.Type +- } else { +- ref.typ = types.Typ[types.Invalid] +- } +- free = append(free, ref) +- } +- +- // After visiting x.sel, don't descend into sel. +- // Descend into x only if we didn't get a ref for x.sel. +- if sel, ok := n.(*ast.SelectorExpr); ok { +- if ref == nil { +- ast.Inspect(sel.X, visit) +- } +- return false +- } +- } +- +- return true // descend +- } +- ast.Inspect(path[0], visit) +- return free +-} +- +-// objHTML returns HTML for obj.Name(), possibly marked up as a link +-// to the web server that, when visited, opens the declaration in the +-// client editor. +-func objHTML(fset *token.FileSet, web Web, obj types.Object) string { +- text := obj.Name() +- if posn := safetoken.StartPosition(fset, obj.Pos()); posn.IsValid() { +- url := web.SrcURL(posn.Filename, posn.Line, posn.Column) +- return sourceLink(text, url) +- } +- return text +-} +- +-// sourceLink returns HTML for a link to open a file in the client editor. +-func sourceLink(text, url string) string { +- // The /src URL returns nothing but has the side effect +- // of causing the LSP client to open the requested file. +- // So we use onclick to prevent the browser from navigating. +- // We keep the href attribute as it causes the to render +- // as a link: blue, underlined, with URL hover information. +- return fmt.Sprintf(`%[2]s`, +- html.EscapeString(url), text) +-} +diff -urN a/gopls/internal/golang/freesymbols_test.go b/gopls/internal/golang/freesymbols_test.go +--- a/gopls/internal/golang/freesymbols_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/freesymbols_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,132 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "fmt" +- "go/ast" +- "go/importer" +- "go/parser" +- "go/token" +- "go/types" +- "reflect" +- "runtime" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +-) +- +-// TestFreeRefs is a unit test of the free-references algorithm. +-func TestFreeRefs(t *testing.T) { +- if runtime.GOOS == "js" || runtime.GOARCH == "wasm" { +- t.Skip("some test imports are unsupported on js or wasm") +- } +- +- for i, test := range []struct { +- src string +- want []string // expected list of "scope kind dotted-path" triples +- }{ +- { +- // basic example (has a "cannot infer" type error) +- `package p; func f[T ~int](x any) { var y T; « f(x.(T) + y) » }`, +- []string{"pkg func f", "local var x", "local typename T", "local var y"}, +- }, +- { +- // selection need not be tree-aligned +- `package p; type T int; type U « T; func _(x U) »`, +- []string{"pkg typename T", "pkg typename U"}, +- }, +- { +- // imported symbols +- `package p; import "fmt"; func f() { « var x fmt.Stringer » }`, +- []string{"file pkgname fmt.Stringer"}, +- }, +- { +- // unsafe and error, our old nemeses +- `package p; import "unsafe"; var ( « _ unsafe.Pointer; _ = error(nil).Error »; )`, +- []string{"file pkgname unsafe.Pointer"}, +- }, +- { +- // two attributes of a var, but not the var itself +- `package p; import "bytes"; func _(buf bytes.Buffer) { « buf.WriteByte(0); buf.WriteString(""); » }`, +- []string{"local var buf.WriteByte", "local var buf.WriteString"}, +- }, +- { +- // dot imports (an edge case) +- `package p; import . "errors"; var _ = « New»`, +- []string{"file pkgname errors.New"}, +- }, +- { +- // struct field (regression test for overzealous dot import logic) +- `package p; import "net/url"; var _ = «url.URL{Host: ""}»`, +- []string{"file pkgname url.URL"}, +- }, +- { +- // dot imports (another regression test of same) +- `package p; import . "net/url"; var _ = «URL{Host: ""}»`, +- []string{"file pkgname url.URL"}, +- }, +- { +- // dot import of unsafe (a corner case) +- `package p; import . "unsafe"; var _ « Pointer»`, +- []string{"file pkgname unsafe.Pointer"}, +- }, +- { +- // dotted path +- `package p; import "go/build"; var _ = « build.Default.GOOS »`, +- []string{"file pkgname build.Default.GOOS"}, +- }, +- { +- // type error +- `package p; import "nope"; var _ = « nope.nope.nope »`, +- []string{"file pkgname nope"}, +- }, +- } { +- name := fmt.Sprintf("file%d.go", i) +- t.Run(name, func(t *testing.T) { +- fset := token.NewFileSet() +- startOffset := strings.Index(test.src, "«") +- endOffset := strings.Index(test.src, "»") +- if startOffset < 0 || endOffset < startOffset { +- t.Fatalf("invalid «...» selection (%d:%d)", startOffset, endOffset) +- } +- src := test.src[:startOffset] + +- " " + +- test.src[startOffset+len("«"):endOffset] + +- " " + +- test.src[endOffset+len("»"):] +- f, err := parser.ParseFile(fset, name, src, parser.SkipObjectResolution) +- if err != nil { +- t.Fatal(err) +- } +- conf := &types.Config{ +- Importer: importer.Default(), +- Error: func(err error) { t.Log(err) }, // not fatal +- } +- info := &types.Info{ +- Uses: make(map[*ast.Ident]types.Object), +- Scopes: make(map[ast.Node]*types.Scope), +- Types: make(map[ast.Expr]types.TypeAndValue), +- } +- pkg, _ := conf.Check(f.Name.Name, fset, []*ast.File{f}, info) // ignore errors +- tf := fset.File(f.Package) +- refs := freeRefs(pkg, info, f, tf.Pos(startOffset), tf.Pos(endOffset)) +- +- kind := func(obj types.Object) string { // e.g. "var", "const" +- return strings.ToLower(reflect.TypeOf(obj).Elem().Name()) +- } +- +- var got []string +- for _, ref := range refs { +- msg := ref.scope + " " + kind(ref.objects[0]) + " " + ref.dotted +- got = append(got, msg) +- } +- if diff := cmp.Diff(test.want, got); diff != "" { +- t.Errorf("(-want +got)\n%s", diff) +- } +- }) +- } +-} +diff -urN a/gopls/internal/golang/highlight.go b/gopls/internal/golang/highlight.go +--- a/gopls/internal/golang/highlight.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/highlight.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,744 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "strconv" +- "strings" +- +- astutil "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- internalastutil "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/fmtstr" +-) +- +-func Highlight(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]protocol.DocumentHighlight, error) { +- ctx, done := event.Start(ctx, "golang.Highlight") +- defer done() +- +- // We always want fully parsed files for highlight, regardless +- // of whether the file belongs to a workspace package. +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, fmt.Errorf("getting package for Highlight: %w", err) +- } +- +- pos, err := pgf.PositionPos(position) +- if err != nil { +- return nil, err +- } +- path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) +- if len(path) == 0 { +- return nil, fmt.Errorf("no enclosing position found for %v:%v", position.Line, position.Character) +- } +- // If start == end for astutil.PathEnclosingInterval, the 1-char interval +- // following start is used instead. As a result, we might not get an exact +- // match so we should check the 1-char interval to the left of the passed +- // in position to see if that is an exact match. +- if _, ok := path[0].(*ast.Ident); !ok { +- if p, _ := astutil.PathEnclosingInterval(pgf.File, pos-1, pos-1); p != nil { +- switch p[0].(type) { +- case *ast.Ident, *ast.SelectorExpr: +- path = p // use preceding ident/selector +- } +- } +- } +- result, err := highlightPath(pkg.TypesInfo(), path, pos) +- if err != nil { +- return nil, err +- } +- var ranges []protocol.DocumentHighlight +- for rng, kind := range result { +- rng, err := pgf.PosRange(rng.start, rng.end) +- if err != nil { +- return nil, err +- } +- ranges = append(ranges, protocol.DocumentHighlight{ +- Range: rng, +- Kind: kind, +- }) +- } +- return ranges, nil +-} +- +-// highlightPath returns ranges to highlight for the given enclosing path, +-// which should be the result of astutil.PathEnclosingInterval. +-func highlightPath(info *types.Info, path []ast.Node, pos token.Pos) (map[posRange]protocol.DocumentHighlightKind, error) { +- result := make(map[posRange]protocol.DocumentHighlightKind) +- +- // Inside a call to a printf-like function (as identified +- // by a simple heuristic). +- // Treat each corresponding ("%v", arg) pair as a highlight class. +- for _, node := range path { +- if call, ok := node.(*ast.CallExpr); ok { +- lit, idx := formatStringAndIndex(info, call) +- if idx != -1 { +- highlightPrintf(call, idx, pos, lit, result) +- } +- } +- } +- +- file := path[len(path)-1].(*ast.File) +- switch node := path[0].(type) { +- case *ast.BasicLit: +- // Import path string literal? +- if len(path) > 1 { +- if imp, ok := path[1].(*ast.ImportSpec); ok { +- highlight := func(n ast.Node) { +- highlightNode(result, n, protocol.Text) +- } +- +- // Highlight the import itself... +- highlight(imp) +- +- // ...and all references to it in the file. +- if pkgname := info.PkgNameOf(imp); pkgname != nil { +- ast.Inspect(file, func(n ast.Node) bool { +- if id, ok := n.(*ast.Ident); ok && +- info.Uses[id] == pkgname { +- highlight(id) +- } +- return true +- }) +- } +- return result, nil +- } +- } +- highlightFuncControlFlow(path, result) +- case *ast.ReturnStmt, *ast.FuncDecl, *ast.FuncType: +- highlightFuncControlFlow(path, result) +- case *ast.Ident: +- // Check if ident is inside return or func decl. +- highlightFuncControlFlow(path, result) +- highlightIdentifier(node, file, info, result) +- case *ast.ForStmt, *ast.RangeStmt: +- highlightLoopControlFlow(path, info, result) +- case *ast.SwitchStmt, *ast.TypeSwitchStmt: +- highlightSwitchFlow(path, info, result) +- case *ast.BranchStmt: +- // BREAK can exit a loop, switch or select, while CONTINUE exit a loop so +- // these need to be handled separately. They can also be embedded in any +- // other loop/switch/select if they have a label. TODO: add support for +- // GOTO and FALLTHROUGH as well. +- switch node.Tok { +- case token.BREAK: +- if node.Label != nil { +- highlightLabeledFlow(path, info, node, result) +- } else { +- highlightUnlabeledBreakFlow(path, info, result) +- } +- case token.CONTINUE: +- if node.Label != nil { +- highlightLabeledFlow(path, info, node, result) +- } else { +- highlightLoopControlFlow(path, info, result) +- } +- } +- } +- +- return result, nil +-} +- +-// formatStringAndIndex returns the BasicLit and index of the BasicLit (the last +-// non-variadic parameter) within the given printf-like call +-// expression, returns -1 as index if unknown. +-func formatStringAndIndex(info *types.Info, call *ast.CallExpr) (*ast.BasicLit, int) { +- typ := info.Types[call.Fun].Type +- if typ == nil { +- return nil, -1 // missing type +- } +- sig, ok := typ.(*types.Signature) +- if !ok { +- return nil, -1 // ill-typed +- } +- if !sig.Variadic() { +- // Skip checking non-variadic functions. +- return nil, -1 +- } +- idx := sig.Params().Len() - 2 +- if !(0 <= idx && idx < len(call.Args)) { +- // Skip checking functions without a format string parameter, or +- // missing the corresponding format argument. +- return nil, -1 +- } +- // We only care about literal format strings, so fmt.Sprint("a"+"b%s", "bar") won't be highlighted. +- if lit, ok := call.Args[idx].(*ast.BasicLit); ok && lit.Kind == token.STRING { +- return lit, idx +- } +- return nil, -1 +-} +- +-// highlightPrintf highlights operations in a format string and their corresponding +-// variadic arguments in a (possible) printf-style function call. +-// For example: +-// +-// fmt.Printf("Hello %s, you scored %d", name, score) +-// +-// If the cursor is on %s or name, it will highlight %s as a write operation, +-// and name as a read operation. +-func highlightPrintf(call *ast.CallExpr, idx int, cursorPos token.Pos, lit *ast.BasicLit, result map[posRange]protocol.DocumentHighlightKind) { +- format, err := strconv.Unquote(lit.Value) +- if err != nil { +- return +- } +- if !strings.Contains(format, "%") { +- return +- } +- operations, err := fmtstr.Parse(format, idx) +- if err != nil { +- return +- } +- +- // fmt.Printf("%[1]d %[1].2d", 3) +- // +- // When cursor is in `%[1]d`, we record `3` being successfully highlighted. +- // And because we will also record `%[1].2d`'s corresponding arguments index is `3` +- // in `visited`, even though it will not highlight any item in the first pass, +- // in the second pass we can correctly highlight it. So the three are the same class. +- succeededArg := 0 +- visited := make(map[posRange]int, 0) +- +- // highlightPair highlights the operation and its potential argument pair if the cursor is within either range. +- highlightPair := func(rang fmtstr.Range, argIndex int) { +- rangeStart, rangeEnd, err := internalastutil.RangeInStringLiteral(lit, rang.Start, rang.End) +- if err != nil { +- return +- } +- visited[posRange{rangeStart, rangeEnd}] = argIndex +- +- var arg ast.Expr +- if argIndex < len(call.Args) { +- arg = call.Args[argIndex] +- } +- +- // cursorPos can't equal to end position, otherwise the two +- // neighborhood such as (%[2]*d) are both highlighted if cursor in "d" (ending of [2]*). +- if rangeStart <= cursorPos && cursorPos < rangeEnd || +- arg != nil && internalastutil.NodeContains(arg, cursorPos) { +- highlightRange(result, rangeStart, rangeEnd, protocol.Write) +- if arg != nil { +- succeededArg = argIndex +- highlightRange(result, arg.Pos(), arg.End(), protocol.Read) +- } +- } +- } +- +- for _, op := range operations { +- // If width or prec has any *, we can not highlight the full range from % to verb, +- // because it will overlap with the sub-range of *, for example: +- // +- // fmt.Printf("%*[3]d", 4, 5, 6) +- // ^ ^ we can only highlight this range when cursor in 6. '*' as a one-rune range will +- // highlight for 4. +- hasAsterisk := false +- +- // Try highlight Width if there is a *. +- if op.Width.Dynamic != -1 { +- hasAsterisk = true +- highlightPair(op.Width.Range, op.Width.Dynamic) +- } +- +- // Try highlight Precision if there is a *. +- if op.Prec.Dynamic != -1 { +- hasAsterisk = true +- highlightPair(op.Prec.Range, op.Prec.Dynamic) +- } +- +- // Try highlight Verb. +- if op.Verb.Verb != '%' { +- // If any * is found inside operation, narrow the highlight range. +- if hasAsterisk { +- highlightPair(op.Verb.Range, op.Verb.ArgIndex) +- } else { +- highlightPair(op.Range, op.Verb.ArgIndex) +- } +- } +- } +- +- // Second pass, try to highlight those missed operations. +- for rang, argIndex := range visited { +- if succeededArg == argIndex { +- highlightRange(result, rang.start, rang.end, protocol.Write) +- } +- } +-} +- +-type posRange struct { +- start, end token.Pos +-} +- +-// highlightFuncControlFlow adds highlight ranges to the result map to +-// associate results and result parameters. +-// +-// Specifically, if the cursor is in a result or result parameter, all +-// results and result parameters with the same index are highlighted. If the +-// cursor is in a 'func' or 'return' keyword, the func keyword as well as all +-// returns from that func are highlighted. +-// +-// As a special case, if the cursor is within a complicated expression, control +-// flow highlighting is disabled, as it would highlight too much. +-func highlightFuncControlFlow(path []ast.Node, result map[posRange]protocol.DocumentHighlightKind) { +- +- var ( +- funcType *ast.FuncType // type of enclosing func, or nil +- funcBody *ast.BlockStmt // body of enclosing func, or nil +- returnStmt *ast.ReturnStmt // enclosing ReturnStmt within the func, or nil +- ) +- +-findEnclosingFunc: +- for i, n := range path { +- switch n := n.(type) { +- // TODO(rfindley, low priority): these pre-existing cases for KeyValueExpr +- // and CallExpr appear to avoid highlighting when the cursor is in a +- // complicated expression. However, the basis for this heuristic is +- // unclear. Can we formalize a rationale? +- case *ast.KeyValueExpr: +- // If cursor is in a key: value expr, we don't want control flow highlighting. +- return +- +- case *ast.CallExpr: +- // If cursor is an arg in a callExpr, we don't want control flow highlighting. +- if i > 0 { +- for _, arg := range n.Args { +- if arg == path[i-1] { +- return +- } +- } +- } +- +- case *ast.FuncLit: +- funcType = n.Type +- funcBody = n.Body +- break findEnclosingFunc +- +- case *ast.FuncDecl: +- funcType = n.Type +- funcBody = n.Body +- break findEnclosingFunc +- +- case *ast.ReturnStmt: +- returnStmt = n +- } +- } +- +- if funcType == nil { +- return // cursor is not in a function +- } +- +- // Helper functions for inspecting the current location. +- var ( +- pos = path[0].Pos() +- inSpan = func(start, end token.Pos) bool { return start <= pos && pos < end } +- inNode = func(n ast.Node) bool { return inSpan(n.Pos(), n.End()) } +- ) +- +- inResults := funcType.Results != nil && inNode(funcType.Results) +- +- // If the cursor is on a "return" or "func" keyword, but not highlighting any +- // specific field or expression, we should highlight all of the exit points +- // of the function, including the "return" and "func" keywords. +- funcEnd := funcType.Func + token.Pos(len("func")) +- highlightAll := path[0] == returnStmt || inSpan(funcType.Func, funcEnd) +- var highlightIndexes map[int]bool +- +- if highlightAll { +- // Add the "func" part of the func declaration. +- highlightRange(result, funcType.Func, funcEnd, protocol.Text) +- } else if returnStmt == nil && !inResults { +- return // nothing to highlight +- } else { +- // If we're not highlighting the entire return statement, we need to collect +- // specific result indexes to highlight. This may be more than one index if +- // the cursor is on a multi-name result field, but not in any specific name. +- if !highlightAll { +- highlightIndexes = make(map[int]bool) +- if returnStmt != nil { +- for i, n := range returnStmt.Results { +- if inNode(n) { +- highlightIndexes[i] = true +- break +- } +- } +- } +- +- if funcType.Results != nil { +- // Scan fields, either adding highlights according to the highlightIndexes +- // computed above, or accounting for the cursor position within the result +- // list. +- // (We do both at once to avoid repeating the cumbersome field traversal.) +- i := 0 +- findField: +- for _, field := range funcType.Results.List { +- for j, name := range field.Names { +- if inNode(name) || highlightIndexes[i+j] { +- highlightNode(result, name, protocol.Text) +- highlightIndexes[i+j] = true +- break findField // found/highlighted the specific name +- } +- } +- // If the cursor is in a field but not in a name (e.g. in the space, or +- // the type), highlight the whole field. +- // +- // Note that this may not be ideal if we're at e.g. +- // +- // (x,‸y int, z int8) +- // +- // ...where it would make more sense to highlight only y. But we don't +- // reach this function if not in a func, return, ident, or basiclit. +- if inNode(field) || highlightIndexes[i] { +- highlightNode(result, field, protocol.Text) +- highlightIndexes[i] = true +- if inNode(field) { +- for j := range field.Names { +- highlightIndexes[i+j] = true +- } +- } +- break findField // found/highlighted the field +- } +- +- n := len(field.Names) +- if n == 0 { +- n = 1 +- } +- i += n +- } +- } +- } +- } +- +- if funcBody != nil { +- ast.Inspect(funcBody, func(n ast.Node) bool { +- switch n := n.(type) { +- case *ast.FuncDecl, *ast.FuncLit: +- // Don't traverse into any functions other than enclosingFunc. +- return false +- case *ast.ReturnStmt: +- if highlightAll { +- // Add the entire return statement. +- highlightNode(result, n, protocol.Text) +- } else { +- // Add the highlighted indexes. +- for i, expr := range n.Results { +- if highlightIndexes[i] { +- highlightNode(result, expr, protocol.Text) +- } +- } +- } +- return false +- +- } +- return true +- }) +- } +-} +- +-// highlightUnlabeledBreakFlow highlights the innermost enclosing for/range/switch or swlect +-func highlightUnlabeledBreakFlow(path []ast.Node, info *types.Info, result map[posRange]protocol.DocumentHighlightKind) { +- // Reverse walk the path until we find closest loop, select, or switch. +- for _, n := range path { +- switch n.(type) { +- case *ast.ForStmt, *ast.RangeStmt: +- highlightLoopControlFlow(path, info, result) +- return // only highlight the innermost statement +- case *ast.SwitchStmt, *ast.TypeSwitchStmt: +- highlightSwitchFlow(path, info, result) +- return +- case *ast.SelectStmt: +- // TODO: add highlight when breaking a select. +- return +- } +- } +-} +- +-// highlightLabeledFlow highlights the enclosing labeled for, range, +-// or switch statement denoted by a labeled break or continue stmt. +-func highlightLabeledFlow(path []ast.Node, info *types.Info, stmt *ast.BranchStmt, result map[posRange]protocol.DocumentHighlightKind) { +- use := info.Uses[stmt.Label] +- if use == nil { +- return +- } +- for _, n := range path { +- if label, ok := n.(*ast.LabeledStmt); ok && info.Defs[label.Label] == use { +- switch label.Stmt.(type) { +- case *ast.ForStmt, *ast.RangeStmt: +- highlightLoopControlFlow([]ast.Node{label.Stmt, label}, info, result) +- case *ast.SwitchStmt, *ast.TypeSwitchStmt: +- highlightSwitchFlow([]ast.Node{label.Stmt, label}, info, result) +- } +- return +- } +- } +-} +- +-func labelFor(path []ast.Node) *ast.Ident { +- if len(path) > 1 { +- if n, ok := path[1].(*ast.LabeledStmt); ok { +- return n.Label +- } +- } +- return nil +-} +- +-func highlightLoopControlFlow(path []ast.Node, info *types.Info, result map[posRange]protocol.DocumentHighlightKind) { +- var loop ast.Node +- var loopLabel *ast.Ident +- stmtLabel := labelFor(path) +-Outer: +- // Reverse walk the path till we get to the for loop. +- for i := range path { +- switch n := path[i].(type) { +- case *ast.ForStmt, *ast.RangeStmt: +- loopLabel = labelFor(path[i:]) +- +- if stmtLabel == nil || loopLabel == stmtLabel { +- loop = n +- break Outer +- } +- } +- } +- if loop == nil { +- return +- } +- +- // Add the for statement. +- rngStart := loop.Pos() +- rngEnd := loop.Pos() + token.Pos(len("for")) +- highlightRange(result, rngStart, rngEnd, protocol.Text) +- +- // Traverse AST to find branch statements within the same for-loop. +- ast.Inspect(loop, func(n ast.Node) bool { +- switch n.(type) { +- case *ast.ForStmt, *ast.RangeStmt: +- return loop == n +- case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: +- return false +- } +- b, ok := n.(*ast.BranchStmt) +- if !ok { +- return true +- } +- if b.Label == nil || info.Uses[b.Label] == info.Defs[loopLabel] { +- highlightNode(result, b, protocol.Text) +- } +- return true +- }) +- +- // Find continue statements in the same loop or switches/selects. +- ast.Inspect(loop, func(n ast.Node) bool { +- switch n.(type) { +- case *ast.ForStmt, *ast.RangeStmt: +- return loop == n +- } +- +- if n, ok := n.(*ast.BranchStmt); ok && n.Tok == token.CONTINUE { +- highlightNode(result, n, protocol.Text) +- } +- return true +- }) +- +- // We don't need to check other for loops if we aren't looking for labeled statements. +- if loopLabel == nil { +- return +- } +- +- // Find labeled branch statements in any loop. +- ast.Inspect(loop, func(n ast.Node) bool { +- b, ok := n.(*ast.BranchStmt) +- if !ok { +- return true +- } +- // statement with labels that matches the loop +- if b.Label != nil && info.Uses[b.Label] == info.Defs[loopLabel] { +- highlightNode(result, b, protocol.Text) +- } +- return true +- }) +-} +- +-func highlightSwitchFlow(path []ast.Node, info *types.Info, result map[posRange]protocol.DocumentHighlightKind) { +- var switchNode ast.Node +- var switchNodeLabel *ast.Ident +- stmtLabel := labelFor(path) +-Outer: +- // Reverse walk the path till we get to the switch statement. +- for i := range path { +- switch n := path[i].(type) { +- case *ast.SwitchStmt, *ast.TypeSwitchStmt: +- switchNodeLabel = labelFor(path[i:]) +- if stmtLabel == nil || switchNodeLabel == stmtLabel { +- switchNode = n +- break Outer +- } +- } +- } +- // Cursor is not in a switch statement +- if switchNode == nil { +- return +- } +- +- // Add the switch statement. +- rngStart := switchNode.Pos() +- rngEnd := switchNode.Pos() + token.Pos(len("switch")) +- highlightRange(result, rngStart, rngEnd, protocol.Text) +- +- // Traverse AST to find break statements within the same switch. +- ast.Inspect(switchNode, func(n ast.Node) bool { +- switch n.(type) { +- case *ast.SwitchStmt, *ast.TypeSwitchStmt: +- return switchNode == n +- case *ast.ForStmt, *ast.RangeStmt, *ast.SelectStmt: +- return false +- } +- +- b, ok := n.(*ast.BranchStmt) +- if !ok || b.Tok != token.BREAK { +- return true +- } +- +- if b.Label == nil || info.Uses[b.Label] == info.Defs[switchNodeLabel] { +- highlightNode(result, b, protocol.Text) +- } +- return true +- }) +- +- // We don't need to check other switches if we aren't looking for labeled statements. +- if switchNodeLabel == nil { +- return +- } +- +- // Find labeled break statements in any switch +- ast.Inspect(switchNode, func(n ast.Node) bool { +- b, ok := n.(*ast.BranchStmt) +- if !ok || b.Tok != token.BREAK { +- return true +- } +- +- if b.Label != nil && info.Uses[b.Label] == info.Defs[switchNodeLabel] { +- highlightNode(result, b, protocol.Text) +- } +- +- return true +- }) +-} +- +-func highlightNode(result map[posRange]protocol.DocumentHighlightKind, n ast.Node, kind protocol.DocumentHighlightKind) { +- highlightRange(result, n.Pos(), n.End(), kind) +-} +- +-func highlightRange(result map[posRange]protocol.DocumentHighlightKind, pos, end token.Pos, kind protocol.DocumentHighlightKind) { +- rng := posRange{pos, end} +- // Order of traversal is important: some nodes (e.g. identifiers) are +- // visited more than once, but the kind set during the first visitation "wins". +- if _, exists := result[rng]; !exists { +- result[rng] = kind +- } +-} +- +-func highlightIdentifier(id *ast.Ident, file *ast.File, info *types.Info, result map[posRange]protocol.DocumentHighlightKind) { +- +- // obj may be nil if the Ident is undefined. +- // In this case, the behavior expected by tests is +- // to match other undefined Idents of the same name. +- obj := info.ObjectOf(id) +- +- highlightIdent := func(n *ast.Ident, kind protocol.DocumentHighlightKind) { +- if n.Name == id.Name && info.ObjectOf(n) == obj { +- highlightNode(result, n, kind) +- } +- } +- // highlightWriteInExpr is called for expressions that are +- // logically on the left side of an assignment. +- // We follow the behavior of VSCode+Rust and GoLand, which differs +- // slightly from types.TypeAndValue.Assignable: +- // *ptr = 1 // ptr write +- // *ptr.field = 1 // ptr read, field write +- // s.field = 1 // s read, field write +- // array[i] = 1 // array read +- var highlightWriteInExpr func(expr ast.Expr) +- highlightWriteInExpr = func(expr ast.Expr) { +- switch expr := expr.(type) { +- case *ast.Ident: +- highlightIdent(expr, protocol.Write) +- case *ast.SelectorExpr: +- highlightIdent(expr.Sel, protocol.Write) +- case *ast.StarExpr: +- highlightWriteInExpr(expr.X) +- case *ast.ParenExpr: +- highlightWriteInExpr(expr.X) +- } +- } +- +- ast.Inspect(file, func(n ast.Node) bool { +- switch n := n.(type) { +- case *ast.AssignStmt: +- for _, s := range n.Lhs { +- highlightWriteInExpr(s) +- } +- case *ast.GenDecl: +- if n.Tok == token.CONST || n.Tok == token.VAR { +- for _, spec := range n.Specs { +- if spec, ok := spec.(*ast.ValueSpec); ok { +- for _, ele := range spec.Names { +- highlightWriteInExpr(ele) +- } +- } +- } +- } +- case *ast.IncDecStmt: +- highlightWriteInExpr(n.X) +- case *ast.SendStmt: +- highlightWriteInExpr(n.Chan) +- case *ast.CompositeLit: +- t := info.TypeOf(n) +- if t == nil { +- t = types.Typ[types.Invalid] +- } +- if ptr, ok := t.Underlying().(*types.Pointer); ok { +- t = ptr.Elem() +- } +- if _, ok := t.Underlying().(*types.Struct); ok { +- for _, expr := range n.Elts { +- if expr, ok := (expr).(*ast.KeyValueExpr); ok { +- highlightWriteInExpr(expr.Key) +- } +- } +- } +- case *ast.RangeStmt: +- highlightWriteInExpr(n.Key) +- highlightWriteInExpr(n.Value) +- case *ast.Field: +- for _, name := range n.Names { +- highlightIdent(name, protocol.Text) +- } +- case *ast.Ident: +- // This case is reached for all Idents, +- // including those also visited by highlightWriteInExpr. +- if is[*types.Var](info.ObjectOf(n)) { +- highlightIdent(n, protocol.Read) +- } else { +- // kind of idents in PkgName, etc. is Text +- highlightIdent(n, protocol.Text) +- } +- case *ast.ImportSpec: +- pkgname := info.PkgNameOf(n) +- if pkgname == obj { +- if n.Name != nil { +- highlightNode(result, n.Name, protocol.Text) +- } else { +- highlightNode(result, n, protocol.Text) +- } +- } +- } +- return true +- }) +-} +diff -urN a/gopls/internal/golang/hover.go b/gopls/internal/golang/hover.go +--- a/gopls/internal/golang/hover.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/hover.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1814 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "bytes" +- "context" +- "encoding/json" +- "fmt" +- "go/ast" +- "go/constant" +- "go/doc" +- "go/format" +- "go/printer" +- "go/token" +- "go/types" +- "go/version" +- "io/fs" +- "path/filepath" +- "sort" +- "strconv" +- "strings" +- "text/tabwriter" +- "time" +- "unicode/utf8" +- +- "golang.org/x/text/unicode/runenames" +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/go/types/typeutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/gopls/internal/util/tokeninternal" +- gastutil "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/stdlib" +- "golang.org/x/tools/internal/typeparams" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// hoverResult contains the (internal) result of a hover query. +-// It is formatted in one of several formats as determined by the +-// HoverKind setting. +-type hoverResult struct { +- // The fields below are exported to define the JSON hover format. +- // TODO(golang/go#70233): (re)remove support for JSON hover. +- +- // Synopsis is a single sentence Synopsis of the symbol's documentation. +- // +- // TODO(adonovan): in what syntax? It (usually) comes from doc.Synopsis, +- // which produces "Text" form, but it may be fed to +- // DocCommentToMarkdown, which expects doc comment syntax. +- Synopsis string `json:"synopsis"` +- +- // FullDocumentation is the symbol's full documentation. +- FullDocumentation string `json:"fullDocumentation"` +- +- // Signature is the symbol's Signature. +- Signature string `json:"signature"` +- +- // SingleLine is a single line describing the symbol. +- // This is recommended only for use in clients that show a single line for hover. +- SingleLine string `json:"singleLine"` +- +- // SymbolName is the human-readable name to use for the symbol in links. +- SymbolName string `json:"symbolName"` +- +- // LinkPath is the path of the package enclosing the given symbol, +- // with the module portion (if any) replaced by "module@version". +- // +- // For example: "github.com/google/go-github/v48@v48.1.0/github". +- // +- // Use LinkTarget + "/" + LinkPath + "#" + LinkAnchor to form a pkgsite URL. +- LinkPath string `json:"linkPath"` +- +- // LinkAnchor is the pkg.go.dev link anchor for the given symbol. +- // For example, the "Node" part of "pkg.go.dev/go/ast#Node". +- LinkAnchor string `json:"linkAnchor"` +- +- // New fields go below, and are unexported. The existing +- // exported fields are underspecified and have already +- // constrained our movements too much. A detailed JSON +- // interface might be nice, but it needs a design and a +- // precise specification. +- // TODO(golang/go#70233): (re)deprecate the JSON hover output. +- +- // typeDecl is the declaration syntax for a type, +- // or "" for a non-type. +- typeDecl string +- +- // methods is the list of descriptions of methods of a type, +- // omitting any that are obvious from typeDecl. +- // It is "" for a non-type. +- methods string +- +- // promotedFields is the list of descriptions of accessible +- // fields of a (struct) type that were promoted through an +- // embedded field. +- promotedFields string +- +- // footer is additional content to insert at the bottom of the hover +- // documentation, before the pkgdoc link. +- footer string +-} +- +-// Hover implements the "textDocument/hover" RPC for Go files. +-// It may return nil even on success. +-// +-// If pkgURL is non-nil, it should be used to generate doc links. +-func Hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position, pkgURL func(path PackagePath, fragment string) protocol.URI) (*protocol.Hover, error) { +- ctx, done := event.Start(ctx, "golang.Hover") +- defer done() +- +- rng, h, err := hover(ctx, snapshot, fh, position) +- if err != nil { +- return nil, err +- } +- if h == nil { +- return nil, nil +- } +- hover, err := formatHover(h, snapshot.Options(), pkgURL) +- if err != nil { +- return nil, err +- } +- return &protocol.Hover{ +- Contents: protocol.MarkupContent{ +- Kind: snapshot.Options().PreferredContentFormat, +- Value: hover, +- }, +- Range: rng, +- }, nil +-} +- +-// findRhsTypeDecl finds an alias's rhs type and returns its declaration. +-// The rhs of an alias might be an alias as well, but we feel this is a rare case. +-// It returns an empty string if the given obj is not an alias. +-func findRhsTypeDecl(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, obj types.Object) (string, error) { +- if alias, ok := obj.Type().(*types.Alias); ok { +- // we choose Rhs instead of types.Unalias to make the connection between original alias +- // and the corresponding aliased type clearer. +- // types.Unalias brings confusion because it breaks the connection from A to C given +- // the alias chain like 'type ( A = B; B = C; )' except we show all transitive alias +- // from start to the end. As it's rare, we don't do so. +- if named, ok := alias.Rhs().(*types.Named); ok { +- obj = named.Obj() +- declPGF1, declPos1, err := parseFull(ctx, snapshot, pkg.FileSet(), obj) +- if err != nil { +- return "", err +- } +- realTypeDecl, _, err := typeDeclContent(declPGF1, declPos1, obj.Name()) +- return realTypeDecl, err +- } +- } +- +- return "", nil +-} +- +-// hover computes hover information at the given position. If we do not support +-// hovering at the position, it returns _, nil, nil: an error is only returned +-// if the position is valid but we fail to compute hover information. +-// +-// TODO(adonovan): strength-reduce file.Handle to protocol.DocumentURI. +-func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) (protocol.Range, *hoverResult, error) { +- // Check for hover inside the builtin file before attempting type checking +- // below. NarrowestPackageForFile may or may not succeed, depending on +- // whether this is a GOROOT view, but even if it does succeed the resulting +- // package will be command-line-arguments package. The user should get a +- // hover for the builtin object, not the object type checked from the +- // builtin.go. +- if snapshot.IsBuiltin(fh.URI()) { +- pgf, err := snapshot.BuiltinFile(ctx) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- pos, err := pgf.PositionPos(pp) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) +- if id, ok := path[0].(*ast.Ident); ok { +- rng, err := pgf.NodeRange(id) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- var obj types.Object +- if id.Name == "Error" { +- obj = types.Universe.Lookup("error").Type().Underlying().(*types.Interface).Method(0) +- } else { +- obj = types.Universe.Lookup(id.Name) +- } +- if obj != nil { +- h, err := hoverBuiltin(ctx, snapshot, obj) +- return rng, h, err +- } +- } +- return protocol.Range{}, nil, nil // no object to hover +- } +- +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- pos, err := pgf.PositionPos(pp) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- +- // Handle hovering over the package name, which does not have an associated +- // object. +- // As with import paths, we allow hovering just after the package name. +- if pgf.File.Name != nil && gastutil.NodeContains(pgf.File.Name, pos) { +- return hoverPackageName(pkg, pgf) +- } +- +- // Handle hovering over embed directive argument. +- pattern, embedRng := parseEmbedDirective(pgf.Mapper, pp) +- if pattern != "" { +- return hoverEmbed(fh, embedRng, pattern) +- } +- +- // hoverRange is the range reported to the client (e.g. for highlighting). +- // It may be an expansion around the selected identifier, +- // for instance when hovering over a linkname directive or doc link. +- var hoverRange *protocol.Range +- // Handle linkname directive by overriding what to look for. +- if pkgPath, name, offset := parseLinkname(pgf.Mapper, pp); pkgPath != "" && name != "" { +- // rng covering 2nd linkname argument: pkgPath.name. +- rng, err := pgf.PosRange(pgf.Tok.Pos(offset), pgf.Tok.Pos(offset+len(pkgPath)+len(".")+len(name))) +- if err != nil { +- return protocol.Range{}, nil, fmt.Errorf("range over linkname arg: %w", err) +- } +- hoverRange = &rng +- +- pkg, pgf, pos, err = findLinkname(ctx, snapshot, PackagePath(pkgPath), name) +- if err != nil { +- return protocol.Range{}, nil, fmt.Errorf("find linkname: %w", err) +- } +- } +- +- // Handle hovering over a doc link +- if obj, rng, _ := resolveDocLink(pkg, pgf, pos); obj != nil { +- // Built-ins have no position. +- if isBuiltin(obj) { +- h, err := hoverBuiltin(ctx, snapshot, obj) +- return rng, h, err +- } +- +- // Find position in declaring file. +- hoverRange = &rng +- objURI := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) +- pkg, pgf, err = NarrowestPackageForFile(ctx, snapshot, protocol.URIFromPath(objURI.Filename)) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- pos = pgf.Tok.Pos(objURI.Offset) +- } +- +- // Handle hovering over import paths, which do not have an associated +- // identifier. +- for _, spec := range pgf.File.Imports { +- if gastutil.NodeContains(spec, pos) { +- path := metadata.UnquoteImportPath(spec) +- hoverRes, err := hoverPackageRef(ctx, snapshot, pkg, path) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- rng, err := pgf.NodeRange(spec.Path) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- if hoverRange == nil { +- hoverRange = &rng +- } +- return *hoverRange, hoverRes, nil // (hoverRes may be nil) +- } +- } +- +- // Handle hovering over various special kinds of syntax node. +- if path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos); len(path) > 0 { +- switch node := path[0].(type) { +- // Handle hovering over (non-import-path) literals. +- case *ast.BasicLit: +- return hoverLit(pgf, node, pos) +- case *ast.ReturnStmt: +- return hoverReturnStatement(pgf, path, node) +- } +- } +- +- // By convention, we qualify hover information relative to the package +- // from which the request originated. +- qual := typesinternal.FileQualifier(pgf.File, pkg.Types()) +- +- // Handle hover over identifier. +- +- // The general case: compute hover information for the object referenced by +- // the identifier at pos. +- ident, obj, selectedType := referencedObject(pkg, pgf, pos) +- if obj == nil || ident == nil { +- return protocol.Range{}, nil, nil // no object to hover +- } +- +- // Unless otherwise specified, rng covers the ident being hovered. +- if hoverRange == nil { +- rng, err := pgf.NodeRange(ident) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- hoverRange = &rng +- } +- +- // Handle type switch identifiers as a special case, since they don't have an +- // object. +- // +- // There's not much useful information to provide. +- if selectedType != nil { +- v := types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), selectedType) +- typesinternal.SetVarKind(v, typesinternal.LocalVar) +- signature := types.ObjectString(v, qual) +- return *hoverRange, &hoverResult{ +- Signature: signature, +- SingleLine: signature, +- SymbolName: v.Name(), +- }, nil +- } +- +- if isBuiltin(obj) { +- // Built-ins have no position. +- h, err := hoverBuiltin(ctx, snapshot, obj) +- return *hoverRange, h, err +- } +- +- // For all other objects, consider the full syntax of their declaration in +- // order to correctly compute their documentation, signature, and link. +- // +- // Beware: decl{PGF,Pos} are not necessarily associated with pkg.FileSet(). +- declPGF, declPos, err := parseFull(ctx, snapshot, pkg.FileSet(), obj) +- if err != nil { +- return protocol.Range{}, nil, fmt.Errorf("re-parsing declaration of %s: %v", obj.Name(), err) +- } +- decl, spec, field := findDeclInfo([]*ast.File{declPGF.File}, declPos) // may be nil^3 +- comment := chooseDocComment(decl, spec, field) +- docText := comment.Text() +- +- // By default, types.ObjectString provides a reasonable signature. +- signature := objectString(obj, qual, declPos, declPGF.Tok, spec) +- +- // When hovering over a reference to a promoted struct field, +- // show the implicitly selected intervening fields. +- cur, ok := pgf.Cursor.FindByPos(pos, pos) +- if !ok { +- return protocol.Range{}, nil, fmt.Errorf("Invalid hover position, failed to get cursor") +- } +- if obj, ok := obj.(*types.Var); ok && obj.IsField() { +- if selExpr, ok := cur.Parent().Node().(*ast.SelectorExpr); ok { +- sel, ok := pkg.TypesInfo().Selections[selExpr] +- if ok && len(sel.Index()) > 1 { +- var buf bytes.Buffer +- buf.WriteString(" // through ") +- t := typesinternal.Unpointer(sel.Recv()) +- for i, index := range sel.Index()[:len(sel.Index())-1] { +- if i > 0 { +- buf.WriteString(", ") +- } +- field := typesinternal.Unpointer(t.Underlying()).(*types.Struct).Field(index) +- t = field.Type() +- // Inv: fieldType is N or *N for some NamedOrAlias type N. +- if ptr, ok := t.(*types.Pointer); ok { +- buf.WriteString("*") +- t = ptr.Elem() +- } +- // Be defensive in case of ill-typed code: +- if named, ok := t.(typesinternal.NamedOrAlias); ok { +- buf.WriteString(named.Obj().Name()) +- } +- } +- // Update signature to include embedded struct info. +- signature += buf.String() +- } +- } +- } +- +- singleLineSignature := signature +- +- // Display struct tag for struct fields at the end of the signature. +- if field != nil && field.Tag != nil { +- signature += " " + field.Tag.Value +- } +- +- // TODO(rfindley): we could do much better for inferred signatures. +- // TODO(adonovan): fuse the two calls below. +- if inferred := inferredSignature(pkg.TypesInfo(), ident); inferred != nil { +- if s := inferredSignatureString(obj, qual, inferred); s != "" { +- signature = s +- } +- } +- +- // Compute size information for types, +- // including allocator size class, +- // and (size, offset) for struct fields. +- // +- // Also, if a struct type's field ordering is significantly +- // wasteful of space, report its optimal size. +- // +- // This information is useful when debugging crashes or +- // optimizing layout. To reduce distraction, we show it only +- // when hovering over the declaring identifier, +- // but not referring identifiers. +- // +- // Size and alignment vary across OS/ARCH. +- // Gopls will select the appropriate build configuration when +- // viewing a type declaration in a build-tagged file, but will +- // use the default build config for all other types, even +- // if they embed platform-variant types. +- // +- var sizeOffset string +- +- // As painfully learned in golang/go#69362, Defs can contain nil entries. +- if def, _ := pkg.TypesInfo().Defs[ident]; def != nil && ident.Pos() == def.Pos() { +- // This is the declaring identifier. +- // (We can't simply use ident.Pos() == obj.Pos() because +- // referencedObject prefers the TypeName for an embedded field). +- +- // format returns the decimal and hex representation of x. +- format := func(x int64) string { +- if x < 10 { +- return fmt.Sprintf("%d", x) +- } +- return fmt.Sprintf("%[1]d (%#[1]x)", x) +- } +- +- path := pathEnclosingObjNode(pgf.File, pos) +- +- // Build string of form "size=... (X% wasted), class=..., offset=...". +- size, wasted, offset := computeSizeOffsetInfo(pkg, path, obj) +- var buf strings.Builder +- if size >= 0 { +- fmt.Fprintf(&buf, "size=%s", format(size)) +- if wasted >= 20 { // >=20% wasted +- fmt.Fprintf(&buf, " (%d%% wasted)", wasted) +- } +- +- // Include allocator size class, if larger. +- if class := sizeClass(size); class > size { +- fmt.Fprintf(&buf, ", class=%s", format(class)) +- } +- } +- if offset >= 0 { +- if buf.Len() > 0 { +- buf.WriteString(", ") +- } +- fmt.Fprintf(&buf, "offset=%s", format(offset)) +- } +- sizeOffset = buf.String() +- } +- +- var typeDecl, methods, fields string +- +- // For "objects defined by a type spec", the signature produced by +- // objectString is insufficient: +- // (1) large structs are formatted poorly, with no newlines +- // (2) we lose inline comments +- // Furthermore, we include a summary of their method set. +- _, isTypeName := obj.(*types.TypeName) +- _, isTypeParam := types.Unalias(obj.Type()).(*types.TypeParam) +- if isTypeName && !isTypeParam { +- var spec1 *ast.TypeSpec +- typeDecl, spec1, err = typeDeclContent(declPGF, declPos, obj.Name()) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- +- // Splice in size/offset at end of first line. +- // "type T struct { // size=..." +- if sizeOffset != "" { +- nl := strings.IndexByte(typeDecl, '\n') +- if nl < 0 { +- nl = len(typeDecl) +- } +- typeDecl = typeDecl[:nl] + " // " + sizeOffset + typeDecl[nl:] +- } +- +- // Promoted fields +- // +- // Show a table of accessible fields of the (struct) +- // type that may not be visible in the syntax (above) +- // due to promotion through embedded fields. +- // +- // Example: +- // +- // // Embedded fields: +- // foo int // through x.y +- // z string // through x.y +- if prom := promotedFields(obj.Type(), pkg.Types()); len(prom) > 0 { +- var b strings.Builder +- b.WriteString("// Embedded fields:\n") +- w := tabwriter.NewWriter(&b, 0, 8, 1, ' ', 0) +- for _, f := range prom { +- fmt.Fprintf(w, "%s\t%s\t// through %s\t\n", +- f.field.Name(), +- types.TypeString(f.field.Type(), qual), +- f.path) +- } +- w.Flush() // ignore error +- b.WriteByte('\n') +- fields = b.String() +- } +- +- // -- methods -- +- +- // For an interface type, explicit methods will have +- // already been displayed when the node was formatted +- // above. Don't list these again. +- var skip map[string]bool +- if iface, ok := spec1.Type.(*ast.InterfaceType); ok { +- if iface.Methods.List != nil { +- for _, m := range iface.Methods.List { +- if len(m.Names) == 1 { +- if skip == nil { +- skip = make(map[string]bool) +- } +- skip[m.Names[0].Name] = true +- } +- } +- } +- } +- +- // Display all the type's accessible methods, +- // including those that require a pointer receiver, +- // and those promoted from embedded struct fields or +- // embedded interfaces. +- var b strings.Builder +- for _, m := range typeutil.IntuitiveMethodSet(obj.Type(), nil) { +- if !accessibleTo(m.Obj(), pkg.Types()) { +- continue // inaccessible +- } +- if skip[m.Obj().Name()] { +- continue // redundant with format.Node above +- } +- if b.Len() > 0 { +- b.WriteByte('\n') +- } +- +- // Use objectString for its prettier rendering of method receivers. +- b.WriteString(objectString(m.Obj(), qual, token.NoPos, nil, nil)) +- } +- methods = b.String() +- +- signature = typeDecl + "\n" + methods +- } else { +- // Non-types +- if sizeOffset != "" { +- signature += " // " + sizeOffset +- } +- } +- +- if isTypeName { +- // get the real type decl only if current object is a type, +- // for non-types, we'd better hide the real type decl to avoid possible confusion. +- // +- // realTypeDecl is defined to store the underlying definition of an alias. +- realTypeDecl, _ := findRhsTypeDecl(ctx, snapshot, pkg, obj) // tolerate the error +- if realTypeDecl != "" { +- typeDecl += fmt.Sprintf("\n\n%s", realTypeDecl) +- } +- } +- +- // Compute link data (on pkg.go.dev or other documentation host). +- // +- // If linkPath is empty, the symbol is not linkable. +- var ( +- linkName string // => link title, always non-empty +- linkPath string // => link path +- anchor string // link anchor +- linkMeta *metadata.Package // metadata for the linked package +- ) +- { +- linkMeta = findFileInDeps(snapshot, pkg.Metadata(), declPGF.URI) +- if linkMeta == nil { +- return protocol.Range{}, nil, bug.Errorf("no package data for %s", declPGF.URI) +- } +- +- // For package names, we simply link to their imported package. +- if pkgName, ok := obj.(*types.PkgName); ok { +- linkName = pkgName.Name() +- linkPath = pkgName.Imported().Path() +- impID := linkMeta.DepsByPkgPath[PackagePath(pkgName.Imported().Path())] +- linkMeta = snapshot.Metadata(impID) +- if linkMeta == nil { +- // Broken imports have fake package paths, so it is not a bug if we +- // don't have metadata. As of writing, there is no way to distinguish +- // broken imports from a true bug where expected metadata is missing. +- return protocol.Range{}, nil, fmt.Errorf("no package data for %s", declPGF.URI) +- } +- } else { +- // For all others, check whether the object is in the package scope, or +- // an exported field or method of an object in the package scope. +- // +- // We try to match pkgsite's heuristics for what is linkable, and what is +- // not. +- var recv types.Object +- switch obj := obj.(type) { +- case *types.Func: +- sig := obj.Signature() +- if sig.Recv() != nil { +- tname := typeToObject(sig.Recv().Type()) +- if tname != nil { // beware typed nil +- recv = tname +- } +- } +- case *types.Var: +- if obj.IsField() { +- if spec, ok := spec.(*ast.TypeSpec); ok { +- typeName := spec.Name +- scopeObj, _ := obj.Pkg().Scope().Lookup(typeName.Name).(*types.TypeName) +- if scopeObj != nil { +- if st, _ := scopeObj.Type().Underlying().(*types.Struct); st != nil { +- for i := 0; i < st.NumFields(); i++ { +- if obj == st.Field(i) { +- recv = scopeObj +- } +- } +- } +- } +- } +- } +- } +- +- // Even if the object is not available in package documentation, it may +- // be embedded in a documented receiver. Detect this by searching +- // enclosing selector expressions. +- // +- // TODO(rfindley): pkgsite doesn't document fields from embedding, just +- // methods. +- if recv == nil || !recv.Exported() { +- path := pathEnclosingObjNode(pgf.File, pos) +- if enclosing := searchForEnclosing(pkg.TypesInfo(), path); enclosing != nil { +- recv = enclosing +- } else { +- recv = nil // note: just recv = ... could result in a typed nil. +- } +- } +- +- pkg := obj.Pkg() +- if recv != nil { +- linkName = fmt.Sprintf("(%s.%s).%s", pkg.Name(), recv.Name(), obj.Name()) +- if obj.Exported() && recv.Exported() && typesinternal.IsPackageLevel(recv) { +- linkPath = pkg.Path() +- anchor = fmt.Sprintf("%s.%s", recv.Name(), obj.Name()) +- } +- } else { +- linkName = fmt.Sprintf("%s.%s", pkg.Name(), obj.Name()) +- if obj.Exported() && typesinternal.IsPackageLevel(obj) { +- linkPath = pkg.Path() +- anchor = obj.Name() +- } +- } +- } +- } +- +- if snapshot.IsGoPrivatePath(linkPath) || linkMeta.ForTest != "" { +- linkPath = "" +- } else if linkMeta.Module != nil && linkMeta.Module.Version != "" { +- mod := linkMeta.Module +- linkPath = strings.Replace(linkPath, mod.Path, cache.ResolvedString(mod), 1) +- } +- +- // Handle hover over an imported package name identifier. +- if pkgName, ok := obj.(*types.PkgName); ok { +- hoverRes, err := hoverPackageRef(ctx, snapshot, pkg, metadata.ImportPath(pkgName.Imported().Path())) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- hoverRange, err := pgf.NodeRange(ident) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- hoverRes.LinkAnchor = anchor +- hoverRes.LinkPath = linkPath +- hoverRes.SymbolName = linkName +- return hoverRange, hoverRes, nil // (hoverRes may be nil) +- } +- +- var footer string +- if sym := StdSymbolOf(obj); sym != nil && sym.Version > 0 { +- footer = fmt.Sprintf("Added in %v", sym.Version) +- } +- +- return *hoverRange, &hoverResult{ +- Synopsis: doc.Synopsis(docText), +- FullDocumentation: docText, +- SingleLine: singleLineSignature, +- SymbolName: linkName, +- Signature: signature, +- LinkPath: linkPath, +- LinkAnchor: anchor, +- typeDecl: typeDecl, +- methods: methods, +- promotedFields: fields, +- footer: footer, +- }, nil +-} +- +-// typeDeclContent returns a well formatted type definition. +-func typeDeclContent(declPGF *parsego.File, declPos token.Pos, name string) (string, *ast.TypeSpec, error) { +- _, spec, _ := findDeclInfo([]*ast.File{declPGF.File}, declPos) // may be nil^3 +- // Don't duplicate comments. +- spec1, ok := spec.(*ast.TypeSpec) +- if !ok { +- // We cannot find a TypeSpec for this type or alias declaration +- // (that is not a type parameter or a built-in). +- // This should be impossible even for ill-formed trees; +- // we suspect that AST repair may be creating inconsistent +- // positions. Don't report a bug in that case. (#64241) +- errorf := fmt.Errorf +- if !declPGF.Fixed() { +- errorf = bug.Errorf +- } +- return "", nil, errorf("type name %q without type spec", name) +- } +- spec2 := *spec1 +- spec2.Doc = nil +- spec2.Comment = nil +- +- var b strings.Builder +- b.WriteString("type ") +- fset := tokeninternal.FileSetFor(declPGF.Tok) +- // TODO(adonovan): use a smarter formatter that omits +- // inaccessible fields (non-exported ones from other packages). +- if err := format.Node(&b, fset, &spec2); err != nil { +- return "", nil, err +- } +- typeDecl := b.String() +- return typeDecl, spec1, nil +-} +- +-// hoverBuiltin computes hover information when hovering over a builtin +-// identifier. +-func hoverBuiltin(ctx context.Context, snapshot *cache.Snapshot, obj types.Object) (*hoverResult, error) { +- // Special handling for error.Error, which is the only builtin method. +- // +- // TODO(rfindley): can this be unified with the handling below? +- if obj.Name() == "Error" { +- signature := obj.String() +- return &hoverResult{ +- Signature: signature, +- SingleLine: signature, +- // TODO(rfindley): these are better than the current behavior. +- // SymbolName: "(error).Error", +- // LinkPath: "builtin", +- // LinkAnchor: "error.Error", +- }, nil +- } +- +- pgf, ident, err := builtinDecl(ctx, snapshot, obj) +- if err != nil { +- return nil, err +- } +- +- var ( +- comment *ast.CommentGroup +- decl ast.Decl +- ) +- path, _ := astutil.PathEnclosingInterval(pgf.File, ident.Pos(), ident.Pos()) +- for _, n := range path { +- switch n := n.(type) { +- case *ast.GenDecl: +- // Separate documentation and signature. +- comment = n.Doc +- node2 := *n +- node2.Doc = nil +- decl = &node2 +- case *ast.FuncDecl: +- // Ditto. +- comment = n.Doc +- node2 := *n +- node2.Doc = nil +- decl = &node2 +- } +- } +- +- signature := formatNodeFile(pgf.Tok, decl) +- // Replace fake types with their common equivalent. +- // TODO(rfindley): we should instead use obj.Type(), which would have the +- // *actual* types of the builtin call. +- signature = replacer.Replace(signature) +- +- docText := comment.Text() +- return &hoverResult{ +- Synopsis: doc.Synopsis(docText), +- FullDocumentation: docText, +- Signature: signature, +- SingleLine: obj.String(), +- SymbolName: obj.Name(), +- LinkPath: "builtin", +- LinkAnchor: obj.Name(), +- }, nil +-} +- +-// hoverPackageRef computes hover information for the package of the specified +-// path imported by pkg. If we do not have metadata for the hovered import, it +-// returns _ +-func hoverPackageRef(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, importPath metadata.ImportPath) (*hoverResult, error) { +- impID := pkg.Metadata().DepsByImpPath[importPath] +- if impID == "" { +- return nil, fmt.Errorf("no package data for import %q", importPath) +- } +- impMetadata := snapshot.Metadata(impID) +- if impMetadata == nil { +- return nil, bug.Errorf("failed to resolve import ID %q", impID) +- } +- +- // Find the first file with a package doc comment. +- var comment *ast.CommentGroup +- for _, f := range impMetadata.CompiledGoFiles { +- fh, err := snapshot.ReadFile(ctx, f) +- if err != nil { +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- continue +- } +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Header) +- if err != nil { +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- continue +- } +- if pgf.File.Doc != nil { +- comment = pgf.File.Doc +- break +- } +- } +- +- docText := comment.Text() +- return &hoverResult{ +- Signature: "package " + string(impMetadata.Name), +- Synopsis: doc.Synopsis(docText), +- FullDocumentation: docText, +- }, nil +-} +- +-// hoverPackageName computes hover information for the package name of the file +-// pgf in pkg. +-func hoverPackageName(pkg *cache.Package, pgf *parsego.File) (protocol.Range, *hoverResult, error) { +- var comment *ast.CommentGroup +- for _, pgf := range pkg.CompiledGoFiles() { +- if pgf.File.Doc != nil { +- comment = pgf.File.Doc +- break +- } +- } +- rng, err := pgf.NodeRange(pgf.File.Name) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- docText := comment.Text() +- +- // List some package attributes at the bottom of the documentation, if +- // applicable. +- type attr struct{ title, value string } +- var attrs []attr +- +- if !metadata.IsCommandLineArguments(pkg.Metadata().ID) { +- attrs = append(attrs, attr{"Package path", string(pkg.Metadata().PkgPath)}) +- } +- +- if pkg.Metadata().Module != nil { +- attrs = append(attrs, attr{"Module", pkg.Metadata().Module.Path}) +- } +- +- // Show the effective language version for this package. +- if v := pkg.TypesInfo().FileVersions[pgf.File]; v != "" { +- attr := attr{value: version.Lang(v)} +- if v == pkg.Types().GoVersion() { +- attr.title = "Language version" +- } else { +- attr.title = "Language version (current file)" +- } +- attrs = append(attrs, attr) +- } +- +- // TODO(rfindley): consider exec'ing go here to compute DefaultGODEBUG, or +- // propose adding GODEBUG info to go/packages. +- +- var footer strings.Builder +- for i, attr := range attrs { +- if i > 0 { +- footer.WriteString("\n") +- } +- fmt.Fprintf(&footer, " - %s: %s", attr.title, attr.value) +- } +- +- return rng, &hoverResult{ +- Signature: "package " + string(pkg.Metadata().Name), +- Synopsis: doc.Synopsis(docText), +- FullDocumentation: docText, +- footer: footer.String(), +- }, nil +-} +- +-// hoverLit computes hover information when hovering over the basic literal lit +-// in the file pgf. The provided pos must be the exact position of the cursor, +-// as it is used to extract the hovered rune in strings. +-// +-// For example, hovering over "\u2211" in "foo \u2211 bar" yields: +-// +-// '∑', U+2211, N-ARY SUMMATION +-func hoverLit(pgf *parsego.File, lit *ast.BasicLit, pos token.Pos) (protocol.Range, *hoverResult, error) { +- var ( +- value string // if non-empty, a constant value to format in hover +- r rune // if non-zero, format a description of this rune in hover +- start, end token.Pos // hover span +- ) +- // Extract a rune from the current position. +- // 'Ω', "...Ω...", or 0x03A9 => 'Ω', U+03A9, GREEK CAPITAL LETTER OMEGA +- switch lit.Kind { +- case token.CHAR: +- s, err := strconv.Unquote(lit.Value) +- if err != nil { +- // If the conversion fails, it's because of an invalid syntax, therefore +- // there is no rune to be found. +- return protocol.Range{}, nil, nil +- } +- r, _ = utf8.DecodeRuneInString(s) +- if r == utf8.RuneError { +- return protocol.Range{}, nil, fmt.Errorf("rune error") +- } +- start, end = lit.Pos(), lit.End() +- +- case token.INT: +- // Short literals (e.g. 99 decimal, 07 octal) are uninteresting. +- if len(lit.Value) < 3 { +- return protocol.Range{}, nil, nil +- } +- +- v := constant.MakeFromLiteral(lit.Value, lit.Kind, 0) +- if v.Kind() != constant.Int { +- return protocol.Range{}, nil, nil +- } +- +- switch lit.Value[:2] { +- case "0x", "0X": +- // As a special case, try to recognize hexadecimal literals as runes if +- // they are within the range of valid unicode values. +- if v, ok := constant.Int64Val(v); ok && v > 0 && v <= utf8.MaxRune && utf8.ValidRune(rune(v)) { +- r = rune(v) +- } +- fallthrough +- case "0o", "0O", "0b", "0B": +- // Format the decimal value of non-decimal literals. +- value = v.ExactString() +- start, end = lit.Pos(), lit.End() +- default: +- return protocol.Range{}, nil, nil +- } +- +- case token.STRING: +- // It's a string, scan only if it contains a unicode escape sequence under or before the +- // current cursor position. +- litOffset, err := safetoken.Offset(pgf.Tok, lit.Pos()) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- offset, err := safetoken.Offset(pgf.Tok, pos) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- for i := offset - litOffset; i > 0; i-- { +- // Start at the cursor position and search backward for the beginning of a rune escape sequence. +- rr, _ := utf8.DecodeRuneInString(lit.Value[i:]) +- if rr == utf8.RuneError { +- return protocol.Range{}, nil, fmt.Errorf("rune error") +- } +- if rr == '\\' { +- // Got the beginning, decode it. +- var tail string +- r, _, tail, err = strconv.UnquoteChar(lit.Value[i:], '"') +- if err != nil { +- // If the conversion fails, it's because of an invalid syntax, +- // therefore is no rune to be found. +- return protocol.Range{}, nil, nil +- } +- // Only the rune escape sequence part of the string has to be highlighted, recompute the range. +- runeLen := len(lit.Value) - (i + len(tail)) +- start = token.Pos(int(lit.Pos()) + i) +- end = token.Pos(int(start) + runeLen) +- break +- } +- } +- } +- +- if value == "" && r == 0 { // nothing to format +- return protocol.Range{}, nil, nil +- } +- +- rng, err := pgf.PosRange(start, end) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- +- var b strings.Builder +- if value != "" { +- b.WriteString(value) +- } +- if r != 0 { +- runeName := runenames.Name(r) +- if len(runeName) > 0 && runeName[0] == '<' { +- // Check if the rune looks like an HTML tag. If so, trim the surrounding <> +- // characters to work around https://github.com/microsoft/vscode/issues/124042. +- runeName = strings.TrimRight(runeName[1:], ">") +- } +- if b.Len() > 0 { +- b.WriteString(", ") +- } +- if strconv.IsPrint(r) { +- fmt.Fprintf(&b, "'%c', ", r) +- } +- fmt.Fprintf(&b, "U+%04X, %s", r, runeName) +- } +- hover := b.String() +- return rng, &hoverResult{ +- Synopsis: hover, +- FullDocumentation: hover, +- }, nil +-} +- +-func hoverReturnStatement(pgf *parsego.File, path []ast.Node, ret *ast.ReturnStmt) (protocol.Range, *hoverResult, error) { +- var funcType *ast.FuncType +- // Find innermost enclosing function. +- for _, n := range path { +- switch n := n.(type) { +- case *ast.FuncLit: +- funcType = n.Type +- case *ast.FuncDecl: +- funcType = n.Type +- } +- if funcType != nil { +- break +- } +- } +- // Inv: funcType != nil because a ReturnStmt is always enclosed by a function. +- if funcType.Results == nil { +- return protocol.Range{}, nil, nil // no result variables +- } +- rng, err := pgf.PosRange(ret.Pos(), ret.End()) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- // Format the function's result type. +- var buf strings.Builder +- var cfg printer.Config +- fset := token.NewFileSet() +- buf.WriteString("returns (") +- for i, field := range funcType.Results.List { +- if i > 0 { +- buf.WriteString(", ") +- } +- cfg.Fprint(&buf, fset, field.Type) // ignore error +- } +- buf.WriteByte(')') +- return rng, &hoverResult{ +- Signature: buf.String(), +- }, nil +-} +- +-// hoverEmbed computes hover information for a filepath.Match pattern. +-// Assumes that the pattern is relative to the location of fh. +-func hoverEmbed(fh file.Handle, rng protocol.Range, pattern string) (protocol.Range, *hoverResult, error) { +- s := &strings.Builder{} +- +- dir := fh.URI().DirPath() +- var matches []string +- err := filepath.WalkDir(dir, func(abs string, d fs.DirEntry, e error) error { +- if e != nil { +- return e +- } +- rel, err := filepath.Rel(dir, abs) +- if err != nil { +- return err +- } +- ok, err := filepath.Match(pattern, rel) +- if err != nil { +- return err +- } +- if ok && !d.IsDir() { +- matches = append(matches, rel) +- } +- return nil +- }) +- if err != nil { +- return protocol.Range{}, nil, err +- } +- +- for _, m := range matches { +- // TODO: Renders each file as separate markdown paragraphs. +- // If forcing (a single) newline is possible it might be more clear. +- fmt.Fprintf(s, "%s\n\n", m) +- } +- +- res := &hoverResult{ +- Signature: fmt.Sprintf("Embedding %q", pattern), +- Synopsis: s.String(), +- FullDocumentation: s.String(), +- } +- return rng, res, nil +-} +- +-// inferredSignatureString is a wrapper around the types.ObjectString function +-// that adds more information to inferred signatures. It will return an empty string +-// if the passed types.Object is not a signature. +-func inferredSignatureString(obj types.Object, qual types.Qualifier, inferred *types.Signature) string { +- // If the signature type was inferred, prefer the inferred signature with a +- // comment showing the generic signature. +- if sig, _ := obj.Type().Underlying().(*types.Signature); sig != nil && sig.TypeParams().Len() > 0 && inferred != nil { +- obj2 := types.NewFunc(obj.Pos(), obj.Pkg(), obj.Name(), inferred) +- str := types.ObjectString(obj2, qual) +- // Try to avoid overly long lines. +- if len(str) > 60 { +- str += "\n" +- } else { +- str += " " +- } +- str += "// " + types.TypeString(sig, qual) +- return str +- } +- return "" +-} +- +-// objectString is a wrapper around the types.ObjectString function. +-// It handles adding more information to the object string. +-// If spec is non-nil, it may be used to format additional declaration +-// syntax, and file must be the token.File describing its positions. +-// +-// Precondition: obj is not a built-in function or method. +-func objectString(obj types.Object, qual types.Qualifier, declPos token.Pos, file *token.File, spec ast.Spec) string { +- str := types.ObjectString(obj, qual) +- +- switch obj := obj.(type) { +- case *types.Func: +- // We fork ObjectString to improve its rendering of methods: +- // specifically, we show the receiver name, +- // and replace the period in (T).f by a space (#62190). +- +- sig := obj.Signature() +- +- var buf bytes.Buffer +- buf.WriteString("func ") +- if recv := sig.Recv(); recv != nil { +- buf.WriteByte('(') +- if _, ok := recv.Type().(*types.Interface); ok { +- // gcimporter creates abstract methods of +- // named interfaces using the interface type +- // (not the named type) as the receiver. +- // Don't print it in full. +- buf.WriteString("interface") +- } else { +- // Show receiver name (go/types does not). +- name := recv.Name() +- if name != "" && name != "_" { +- buf.WriteString(name) +- buf.WriteString(" ") +- } +- types.WriteType(&buf, recv.Type(), qual) +- } +- buf.WriteByte(')') +- buf.WriteByte(' ') // space (go/types uses a period) +- } else if s := qual(obj.Pkg()); s != "" { +- buf.WriteString(s) +- buf.WriteString(".") +- } +- buf.WriteString(obj.Name()) +- types.WriteSignature(&buf, sig, qual) +- str = buf.String() +- +- case *types.Const: +- // Show value of a constant. +- var ( +- declaration = obj.Val().String() // default formatted declaration +- comment = "" // if non-empty, a clarifying comment +- ) +- +- // Try to use the original declaration. +- switch obj.Val().Kind() { +- case constant.String: +- // Usually the original declaration of a string doesn't carry much information. +- // Also strings can be very long. So, just use the constant's value. +- +- default: +- if spec, _ := spec.(*ast.ValueSpec); spec != nil { +- for i, name := range spec.Names { +- if declPos == name.Pos() { +- if i < len(spec.Values) { +- originalDeclaration := formatNodeFile(file, spec.Values[i]) +- if originalDeclaration != declaration { +- comment = declaration +- declaration = originalDeclaration +- } +- } +- break +- } +- } +- } +- } +- +- // Special formatting cases. +- switch typ := types.Unalias(obj.Type()).(type) { +- case *types.Named: +- // Try to add a formatted duration as an inline comment. +- pkg := typ.Obj().Pkg() +- if pkg.Path() == "time" && typ.Obj().Name() == "Duration" && obj.Val().Kind() == constant.Int { +- if d, ok := constant.Int64Val(obj.Val()); ok { +- comment = time.Duration(d).String() +- } +- } +- } +- if comment == declaration { +- comment = "" +- } +- +- str += " = " + declaration +- if comment != "" { +- str += " // " + comment +- } +- } +- return str +-} +- +-// HoverDocForObject returns the best doc comment for obj (for which +-// fset provides file/line information). +-// +-// TODO(rfindley): there appears to be zero(!) tests for this functionality. +-func HoverDocForObject(ctx context.Context, snapshot *cache.Snapshot, fset *token.FileSet, obj types.Object) (*ast.CommentGroup, error) { +- if is[*types.TypeName](obj) && is[*types.TypeParam](obj.Type()) { +- return nil, nil +- } +- +- pgf, pos, err := parseFull(ctx, snapshot, fset, obj) +- if err != nil { +- return nil, fmt.Errorf("re-parsing: %v", err) +- } +- +- decl, spec, field := findDeclInfo([]*ast.File{pgf.File}, pos) +- return chooseDocComment(decl, spec, field), nil +-} +- +-func chooseDocComment(decl ast.Decl, spec ast.Spec, field *ast.Field) *ast.CommentGroup { +- if field != nil { +- if field.Doc != nil { +- return field.Doc +- } +- if field.Comment != nil { +- return field.Comment +- } +- return nil +- } +- switch decl := decl.(type) { +- case *ast.FuncDecl: +- return decl.Doc +- case *ast.GenDecl: +- switch spec := spec.(type) { +- case *ast.ValueSpec: +- if spec.Doc != nil { +- return spec.Doc +- } +- if decl.Doc != nil { +- return decl.Doc +- } +- return spec.Comment +- case *ast.TypeSpec: +- if spec.Doc != nil { +- return spec.Doc +- } +- if decl.Doc != nil { +- return decl.Doc +- } +- return spec.Comment +- } +- } +- return nil +-} +- +-// parseFull fully parses the file containing the declaration of obj +-// (for which fset provides file/line information). It returns the +-// parsego.File and the position of the declaration within it. +-// +-// BEWARE: the provided FileSet is used only to interpret the provided +-// pos; the resulting File and Pos may belong to the same or a +-// different FileSet, such as one synthesized by the parser cache, if +-// parse-caching is enabled. +-func parseFull(ctx context.Context, snapshot *cache.Snapshot, fset *token.FileSet, obj types.Object) (*parsego.File, token.Pos, error) { +- if isBuiltin(obj) { +- pgf, id, err := builtinDecl(ctx, snapshot, obj) +- if err != nil { +- return nil, 0, err +- } +- return pgf, id.Pos(), err +- } +- pos := obj.Pos() +- f := fset.File(pos) +- if f == nil { +- return nil, 0, bug.Errorf("internal error: no file for position %d", pos) +- } +- +- uri := protocol.URIFromPath(f.Name()) +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return nil, 0, err +- } +- +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, 0, err +- } +- +- // Translate pos from original file to new file. +- offset, err := safetoken.Offset(f, pos) +- if err != nil { +- return nil, 0, bug.Errorf("offset out of bounds in %q", uri) +- } +- fullPos, err := safetoken.Pos(pgf.Tok, offset) +- if err != nil { +- return nil, 0, err +- } +- +- return pgf, fullPos, nil +-} +- +-// If pkgURL is non-nil, it should be used to generate doc links. +-func formatHover(h *hoverResult, options *settings.Options, pkgURL func(path PackagePath, fragment string) protocol.URI) (string, error) { +- markdown := options.PreferredContentFormat == protocol.Markdown +- maybeFenced := func(s string) string { +- if s != "" && markdown { +- s = fmt.Sprintf("```go\n%s\n```", strings.Trim(s, "\n")) +- } +- return s +- } +- +- switch options.HoverKind { +- case settings.SingleLine: +- return h.SingleLine, nil +- +- case settings.NoDocumentation: +- return maybeFenced(h.Signature), nil +- +- case settings.Structured: +- b, err := json.Marshal(h) +- if err != nil { +- return "", err +- } +- return string(b), nil +- +- case settings.SynopsisDocumentation, settings.FullDocumentation: +- var sections [][]string // assembled below +- +- // Signature section. +- // +- // For types, we display TypeDecl and Methods, +- // but not Signature, which is redundant (= TypeDecl + "\n" + Methods). +- // For all other symbols, we display Signature; +- // TypeDecl and Methods are empty. +- // TODO(golang/go#70233): When JSON is no more, we could rationalize this. +- if h.typeDecl != "" { +- sections = append(sections, []string{maybeFenced(h.typeDecl)}) +- } else { +- sections = append(sections, []string{maybeFenced(h.Signature)}) +- } +- +- // Doc section. +- var doc string +- switch options.HoverKind { +- case settings.SynopsisDocumentation: +- doc = h.Synopsis +- case settings.FullDocumentation: +- doc = h.FullDocumentation +- } +- if options.PreferredContentFormat == protocol.Markdown { +- doc = DocCommentToMarkdown(doc, options) +- } +- sections = append(sections, []string{ +- doc, +- maybeFenced(h.promotedFields), +- maybeFenced(h.methods), +- }) +- +- // Footer section. +- sections = append(sections, []string{ +- h.footer, +- formatLink(h, options, pkgURL), +- }) +- +- var b strings.Builder +- newline := func() { +- if options.PreferredContentFormat == protocol.Markdown { +- b.WriteString("\n\n") +- } else { +- b.WriteByte('\n') +- } +- } +- for _, section := range sections { +- start := b.Len() +- for _, part := range section { +- if part == "" { +- continue +- } +- // When markdown is a available, insert an hline before the start of +- // the section, if there is content above. +- if markdown && b.Len() == start && start > 0 { +- newline() +- b.WriteString("---") +- } +- if b.Len() > 0 { +- newline() +- } +- b.WriteString(part) +- } +- } +- return b.String(), nil +- +- default: +- return "", fmt.Errorf("invalid HoverKind: %v", options.HoverKind) +- } +-} +- +-// StdSymbolOf returns the std lib symbol information of the given obj. +-// It returns nil if the input obj is not an exported standard library symbol. +-func StdSymbolOf(obj types.Object) *stdlib.Symbol { +- if !obj.Exported() || obj.Pkg() == nil { +- return nil +- } +- +- // Symbols that not defined in standard library should return early. +- // TODO(hxjiang): The returned slices is binary searchable. +- symbols := stdlib.PackageSymbols[obj.Pkg().Path()] +- if symbols == nil { +- return nil +- } +- +- // Handle Function, Type, Const & Var. +- if obj != nil && typesinternal.IsPackageLevel(obj) { +- for _, s := range symbols { +- if s.Kind == stdlib.Method || s.Kind == stdlib.Field { +- continue +- } +- if s.Name == obj.Name() { +- return &s +- } +- } +- return nil +- } +- +- // Handle Method. +- if fn, _ := obj.(*types.Func); fn != nil { +- isPtr, named := typesinternal.ReceiverNamed(fn.Signature().Recv()) +- if named != nil && typesinternal.IsPackageLevel(named.Obj()) { +- for _, s := range symbols { +- if s.Kind != stdlib.Method { +- continue +- } +- ptr, recv, name := s.SplitMethod() +- if ptr == isPtr && recv == named.Obj().Name() && name == fn.Name() { +- return &s +- } +- } +- return nil +- } +- } +- +- // Handle Field. +- if v, _ := obj.(*types.Var); v != nil && v.IsField() { +- for _, s := range symbols { +- if s.Kind != stdlib.Field { +- continue +- } +- +- typeName, fieldName := s.SplitField() +- if fieldName != v.Name() { +- continue +- } +- +- typeObj := obj.Pkg().Scope().Lookup(typeName) +- if typeObj == nil { +- continue +- } +- +- if fieldObj, _, _ := types.LookupFieldOrMethod(typeObj.Type(), true, obj.Pkg(), fieldName); obj == fieldObj { +- return &s +- } +- } +- return nil +- } +- +- return nil +-} +- +-// If pkgURL is non-nil, it should be used to generate doc links. +-func formatLink(h *hoverResult, options *settings.Options, pkgURL func(path PackagePath, fragment string) protocol.URI) string { +- if options.LinksInHover == settings.LinksInHover_None || h.LinkPath == "" { +- return "" +- } +- var url protocol.URI +- var caption string +- if pkgURL != nil { // LinksInHover == "gopls" +- // Discard optional module version portion. +- // (Ideally the hoverResult would retain the structure...) +- path := h.LinkPath +- if module, versionDir, ok := strings.Cut(h.LinkPath, "@"); ok { +- // "module@version/dir" +- path = module +- if _, dir, ok := strings.Cut(versionDir, "/"); ok { +- path += "/" + dir +- } +- } +- url = pkgURL(PackagePath(path), h.LinkAnchor) +- caption = "in gopls doc viewer" +- } else { +- if options.LinkTarget == "" { +- return "" +- } +- url = cache.BuildLink(options.LinkTarget, h.LinkPath, h.LinkAnchor) +- caption = "on " + options.LinkTarget +- } +- switch options.PreferredContentFormat { +- case protocol.Markdown: +- return fmt.Sprintf("[`%s` %s](%s)", h.SymbolName, caption, url) +- case protocol.PlainText: +- return "" +- default: +- return url +- } +-} +- +-// findDeclInfo returns the syntax nodes involved in the declaration of the +-// types.Object with position pos, searching the given list of file syntax +-// trees. +-// +-// Pos may be the position of the name-defining identifier in a FuncDecl, +-// ValueSpec, TypeSpec, Field, or as a special case the position of +-// Ellipsis.Elt in an ellipsis field. +-// +-// If found, the resulting decl, spec, and field will be the inner-most +-// instance of each node type surrounding pos. +-// +-// If field is non-nil, pos is the position of a field Var. If field is nil and +-// spec is non-nil, pos is the position of a Var, Const, or TypeName object. If +-// both field and spec are nil and decl is non-nil, pos is the position of a +-// Func object. +-// +-// It returns a nil decl if no object-defining node is found at pos. +-// +-// TODO(rfindley): this function has tricky semantics, and may be worth unit +-// testing and/or refactoring. +-func findDeclInfo(files []*ast.File, pos token.Pos) (decl ast.Decl, spec ast.Spec, field *ast.Field) { +- found := false +- +- // Visit the files in search of the node at pos. +- stack := make([]ast.Node, 0, 20) +- +- // Allocate the closure once, outside the loop. +- f := func(n ast.Node, stack []ast.Node) bool { +- if found { +- return false +- } +- +- // Skip subtrees (incl. files) that don't contain the search point. +- if !(n.Pos() <= pos && pos < n.End()) { +- return false +- } +- +- switch n := n.(type) { +- case *ast.Field: +- findEnclosingDeclAndSpec := func() { +- for i := len(stack) - 1; i >= 0; i-- { +- switch n := stack[i].(type) { +- case ast.Spec: +- spec = n +- case ast.Decl: +- decl = n +- return +- } +- } +- } +- +- // Check each field name since you can have +- // multiple names for the same type expression. +- for _, id := range n.Names { +- if id.Pos() == pos { +- field = n +- findEnclosingDeclAndSpec() +- found = true +- return false +- } +- } +- +- // Check *ast.Field itself. This handles embedded +- // fields which have no associated *ast.Ident name. +- if n.Pos() == pos { +- field = n +- findEnclosingDeclAndSpec() +- found = true +- return false +- } +- +- // Also check "X" in "...X". This makes it easy to format variadic +- // signature params properly. +- // +- // TODO(rfindley): I don't understand this comment. How does finding the +- // field in this case make it easier to format variadic signature params? +- if ell, ok := n.Type.(*ast.Ellipsis); ok && ell.Elt != nil && ell.Elt.Pos() == pos { +- field = n +- findEnclosingDeclAndSpec() +- found = true +- return false +- } +- +- case *ast.FuncDecl: +- if n.Name.Pos() == pos { +- decl = n +- found = true +- return false +- } +- +- case *ast.GenDecl: +- for _, s := range n.Specs { +- switch s := s.(type) { +- case *ast.TypeSpec: +- if s.Name.Pos() == pos { +- decl = n +- spec = s +- found = true +- return false +- } +- case *ast.ValueSpec: +- for _, id := range s.Names { +- if id.Pos() == pos { +- decl = n +- spec = s +- found = true +- return false +- } +- } +- } +- } +- } +- return true +- } +- for _, file := range files { +- ast.PreorderStack(file, stack, f) +- if found { +- return decl, spec, field +- } +- } +- +- return nil, nil, nil +-} +- +-type promotedField struct { +- path string // path (e.g. "x.y" through embedded fields) +- field *types.Var +-} +- +-// promotedFields returns the list of accessible promoted fields of a struct type t. +-// (Logic plundered from x/tools/cmd/guru/describe.go.) +-func promotedFields(t types.Type, from *types.Package) []promotedField { +- wantField := func(f *types.Var) bool { +- if !accessibleTo(f, from) { +- return false +- } +- // Check that the field is not shadowed. +- obj, _, _ := types.LookupFieldOrMethod(t, true, f.Pkg(), f.Name()) +- return obj == f +- } +- +- var fields []promotedField +- var visit func(t types.Type, stack []*types.Named) +- visit = func(t types.Type, stack []*types.Named) { +- tStruct, ok := typesinternal.Unpointer(t).Underlying().(*types.Struct) +- if !ok { +- return +- } +- fieldloop: +- for i := 0; i < tStruct.NumFields(); i++ { +- f := tStruct.Field(i) +- +- // Handle recursion through anonymous fields. +- if f.Anonymous() { +- if _, named := typesinternal.ReceiverNamed(f); named != nil { +- // If we've already visited this named type +- // on this path, break the cycle. +- for _, x := range stack { +- if x.Origin() == named.Origin() { +- continue fieldloop +- } +- } +- visit(f.Type(), append(stack, named)) +- } +- } +- +- // Save accessible promoted fields. +- if len(stack) > 0 && wantField(f) { +- var path strings.Builder +- for i, t := range stack { +- if i > 0 { +- path.WriteByte('.') +- } +- path.WriteString(t.Obj().Name()) +- } +- fields = append(fields, promotedField{ +- path: path.String(), +- field: f, +- }) +- } +- } +- } +- visit(t, nil) +- +- return fields +-} +- +-func accessibleTo(obj types.Object, pkg *types.Package) bool { +- return obj.Exported() || obj.Pkg() == pkg +-} +- +-// computeSizeOffsetInfo reports the size of obj (if a type or struct +-// field), its wasted space percentage (if a struct type), and its +-// offset (if a struct field). It returns -1 for undefined components. +-func computeSizeOffsetInfo(pkg *cache.Package, path []ast.Node, obj types.Object) (size, wasted, offset int64) { +- size, wasted, offset = -1, -1, -1 +- +- var free typeparams.Free +- sizes := pkg.TypesSizes() +- +- // size (types and fields) +- if v, ok := obj.(*types.Var); ok && v.IsField() || is[*types.TypeName](obj) { +- // If the field's type has free type parameters, +- // its size cannot be computed. +- if !free.Has(obj.Type()) { +- size = sizes.Sizeof(obj.Type()) +- } +- +- // wasted space (struct types) +- if tStruct, ok := obj.Type().Underlying().(*types.Struct); ok && is[*types.TypeName](obj) && size > 0 { +- var fields []*types.Var +- for i := 0; i < tStruct.NumFields(); i++ { +- fields = append(fields, tStruct.Field(i)) +- } +- if len(fields) > 0 { +- // Sort into descending (most compact) order +- // and recompute size of entire struct. +- sort.Slice(fields, func(i, j int) bool { +- return sizes.Sizeof(fields[i].Type()) > +- sizes.Sizeof(fields[j].Type()) +- }) +- offsets := sizes.Offsetsof(fields) +- compactSize := offsets[len(offsets)-1] + sizes.Sizeof(fields[len(fields)-1].Type()) +- wasted = 100 * (size - compactSize) / size +- } +- } +- } +- +- // offset (fields) +- if v, ok := obj.(*types.Var); ok && v.IsField() { +- // Find enclosing struct type. +- var tStruct *types.Struct +- for _, n := range path { +- if n, ok := n.(*ast.StructType); ok { +- t, ok := pkg.TypesInfo().TypeOf(n).(*types.Struct) +- if ok { +- // golang/go#69150: TypeOf(n) was observed not to be a Struct (likely +- // nil) in some cases. +- tStruct = t +- } +- break +- } +- } +- if tStruct != nil { +- var fields []*types.Var +- for i := 0; i < tStruct.NumFields(); i++ { +- f := tStruct.Field(i) +- // If any preceding field's type has free type parameters, +- // its offset cannot be computed. +- if free.Has(f.Type()) { +- break +- } +- fields = append(fields, f) +- if f == v { +- offsets := sizes.Offsetsof(fields) +- offset = offsets[len(offsets)-1] +- break +- } +- } +- } +- } +- +- return +-} +- +-// sizeClass reports the size class for a struct of the specified size, or -1 if unknown.f +-// See GOROOT/src/runtime/msize.go for details. +-func sizeClass(size int64) int64 { +- if size > 1<<16 { +- return -1 // avoid allocation +- } +- // We assume that bytes.Clone doesn't trim, +- // and reports the underlying size class; see TestSizeClass. +- return int64(cap(bytes.Clone(make([]byte, size)))) +-} +diff -urN a/gopls/internal/golang/hover_test.go b/gopls/internal/golang/hover_test.go +--- a/gopls/internal/golang/hover_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/hover_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,22 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import "testing" +- +-func TestSizeClass(t *testing.T) { +- // See GOROOT/src/runtime/msize.go for details. +- for _, test := range [...]struct{ size, class int64 }{ +- {8, 8}, +- {9, 16}, +- {16, 16}, +- {17, 24}, +- } { +- got := sizeClass(test.size) +- if got != test.class { +- t.Errorf("sizeClass(%d) = %d, want %d", test.size, got, test.class) +- } +- } +-} +diff -urN a/gopls/internal/golang/identifier.go b/gopls/internal/golang/identifier.go +--- a/gopls/internal/golang/identifier.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/identifier.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,182 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "errors" +- "go/ast" +- "go/types" +- +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// ErrNoIdentFound is error returned when no identifier is found at a particular position +-var ErrNoIdentFound = errors.New("no identifier found") +- +-// inferredSignature determines the resolved non-generic signature for an +-// identifier in an instantiation expression. +-// +-// If no such signature exists, it returns nil. +-func inferredSignature(info *types.Info, id *ast.Ident) *types.Signature { +- inst := info.Instances[id] +- sig, _ := types.Unalias(inst.Type).(*types.Signature) +- return sig +-} +- +-// searchForEnclosing returns, given the AST path to a SelectorExpr, +-// the exported named type of the innermost implicit field selection. +-// +-// For example, given "new(A).d" where this is (due to embedding) a +-// shorthand for "new(A).b.c.d", it returns the named type of c, +-// if it is exported, otherwise the type of b, or A. +-func searchForEnclosing(info *types.Info, path []ast.Node) *types.TypeName { +- for _, n := range path { +- switch n := n.(type) { +- case *ast.SelectorExpr: +- if sel, ok := info.Selections[n]; ok { +- recv := typesinternal.Unpointer(sel.Recv()) +- +- // Keep track of the last exported type seen. +- var exported *types.TypeName +- if named, ok := types.Unalias(recv).(*types.Named); ok && named.Obj().Exported() { +- exported = named.Obj() +- } +- // We don't want the last element, as that's the field or +- // method itself. +- for _, index := range sel.Index()[:len(sel.Index())-1] { +- if r, ok := recv.Underlying().(*types.Struct); ok { +- recv = typesinternal.Unpointer(r.Field(index).Type()) +- if named, ok := types.Unalias(recv).(*types.Named); ok && named.Obj().Exported() { +- exported = named.Obj() +- } +- } +- } +- return exported +- } +- } +- } +- return nil +-} +- +-// typeToObject returns the relevant type name for the given type, after +-// unwrapping pointers, arrays, slices, channels, and function signatures with +-// a single non-error result, and ignoring built-in named types. +-func typeToObject(typ types.Type) *types.TypeName { +- switch typ := typ.(type) { +- case *types.Alias: +- return typ.Obj() +- case *types.Named: +- // TODO(rfindley): this should use typeparams.NamedTypeOrigin. +- return typ.Obj() +- case *types.Pointer: +- return typeToObject(typ.Elem()) +- case *types.Array: +- return typeToObject(typ.Elem()) +- case *types.Slice: +- return typeToObject(typ.Elem()) +- case *types.Chan: +- return typeToObject(typ.Elem()) +- case *types.Signature: +- // Try to find a return value of a named type. If there's only one +- // such value, jump to its type definition. +- var res *types.TypeName +- +- results := typ.Results() +- for i := 0; i < results.Len(); i++ { +- obj := typeToObject(results.At(i).Type()) +- if obj == nil || hasErrorType(obj) { +- // Skip builtins. TODO(rfindley): should comparable be handled here as well? +- continue +- } +- if res != nil { +- // The function/method must have only one return value of a named type. +- return nil +- } +- +- res = obj +- } +- return res +- default: +- return nil +- } +-} +- +-func hasErrorType(obj types.Object) bool { +- return types.IsInterface(obj.Type()) && obj.Pkg() == nil && obj.Name() == "error" +-} +- +-// typeSwitchImplicits returns all the implicit type switch objects that +-// correspond to the leaf *ast.Ident. It also returns the original type +-// associated with the identifier (outside of a case clause). +-func typeSwitchImplicits(info *types.Info, path []ast.Node) ([]types.Object, types.Type) { +- ident, _ := path[0].(*ast.Ident) +- if ident == nil { +- return nil, nil +- } +- +- var ( +- ts *ast.TypeSwitchStmt +- assign *ast.AssignStmt +- cc *ast.CaseClause +- obj = info.ObjectOf(ident) +- ) +- +- // Walk our ancestors to determine if our leaf ident refers to a +- // type switch variable, e.g. the "a" from "switch a := b.(type)". +-Outer: +- for i := 1; i < len(path); i++ { +- switch n := path[i].(type) { +- case *ast.AssignStmt: +- // Check if ident is the "a" in "a := foo.(type)". The "a" in +- // this case has no types.Object, so check for ident equality. +- if len(n.Lhs) == 1 && n.Lhs[0] == ident { +- assign = n +- } +- case *ast.CaseClause: +- // Check if ident is a use of "a" within a case clause. Each +- // case clause implicitly maps "a" to a different types.Object, +- // so check if ident's object is the case clause's implicit +- // object. +- if obj != nil && info.Implicits[n] == obj { +- cc = n +- } +- case *ast.TypeSwitchStmt: +- // Look for the type switch that owns our previously found +- // *ast.AssignStmt or *ast.CaseClause. +- if n.Assign == assign { +- ts = n +- break Outer +- } +- +- for _, stmt := range n.Body.List { +- if stmt == cc { +- ts = n +- break Outer +- } +- } +- } +- } +- if ts == nil { +- return nil, nil +- } +- // Our leaf ident refers to a type switch variable. Fan out to the +- // type switch's implicit case clause objects. +- var objs []types.Object +- for _, cc := range ts.Body.List { +- if ccObj := info.Implicits[cc]; ccObj != nil { +- objs = append(objs, ccObj) +- } +- } +- // The right-hand side of a type switch should only have one +- // element, and we need to track its type in order to generate +- // hover information for implicit type switch variables. +- var typ types.Type +- if assign, ok := ts.Assign.(*ast.AssignStmt); ok && len(assign.Rhs) == 1 { +- if rhs := assign.Rhs[0].(*ast.TypeAssertExpr); ok { +- typ = info.TypeOf(rhs.X) // may be nil +- } +- } +- return objs, typ +-} +diff -urN a/gopls/internal/golang/identifier_test.go b/gopls/internal/golang/identifier_test.go +--- a/gopls/internal/golang/identifier_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/identifier_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,104 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "bytes" +- "go/ast" +- "go/parser" +- "go/token" +- "go/types" +- "testing" +-) +- +-func TestSearchForEnclosing(t *testing.T) { +- tests := []struct { +- desc string +- // For convenience, consider the first occurrence of the identifier "X" in +- // src. +- src string +- // By convention, "" means no type found. +- wantTypeName string +- }{ +- { +- // TODO(rFindley): is this correct, or do we want to resolve I2 here? +- desc: "embedded interface in interface", +- src: `package a; var y = i1.X; type i1 interface {I2}; type I2 interface{X()}`, +- wantTypeName: "", +- }, +- { +- desc: "embedded interface in struct", +- src: `package a; var y = t.X; type t struct {I}; type I interface{X()}`, +- wantTypeName: "I", +- }, +- { +- desc: "double embedding", +- src: `package a; var y = t1.X; type t1 struct {t2}; type t2 struct {I}; type I interface{X()}`, +- wantTypeName: "I", +- }, +- } +- +- for _, test := range tests { +- t.Run(test.desc, func(t *testing.T) { +- fset := token.NewFileSet() +- file, err := parser.ParseFile(fset, "a.go", test.src, parser.AllErrors|parser.SkipObjectResolution) +- if err != nil { +- t.Fatal(err) +- } +- column := 1 + bytes.IndexRune([]byte(test.src), 'X') +- pos := posAt(1, column, fset, "a.go") +- path := pathEnclosingObjNode(file, pos) +- if path == nil { +- t.Fatalf("no ident found at (1, %d)", column) +- } +- info := newInfo() +- if _, err = (*types.Config)(nil).Check("p", fset, []*ast.File{file}, info); err != nil { +- t.Fatal(err) +- } +- obj := searchForEnclosing(info, path) +- if obj == nil { +- if test.wantTypeName != "" { +- t.Errorf("searchForEnclosing(...) = , want %q", test.wantTypeName) +- } +- return +- } +- if got := obj.Name(); got != test.wantTypeName { +- t.Errorf("searchForEnclosing(...) = %q, want %q", got, test.wantTypeName) +- } +- }) +- } +-} +- +-// posAt returns the token.Pos corresponding to the 1-based (line, column) +-// coordinates in the file fname of fset. +-func posAt(line, column int, fset *token.FileSet, fname string) token.Pos { +- var tok *token.File +- fset.Iterate(func(tf *token.File) bool { +- if tf.Name() == fname { +- tok = tf +- return false +- } +- return true +- }) +- if tok == nil { +- return token.NoPos +- } +- start := tok.LineStart(line) +- return start + token.Pos(column-1) +-} +- +-// newInfo returns a types.Info with all maps populated. +-func newInfo() *types.Info { +- info := &types.Info{ +- Types: make(map[ast.Expr]types.TypeAndValue), +- Defs: make(map[*ast.Ident]types.Object), +- Uses: make(map[*ast.Ident]types.Object), +- Implicits: make(map[ast.Node]types.Object), +- Selections: make(map[*ast.SelectorExpr]*types.Selection), +- Scopes: make(map[ast.Node]*types.Scope), +- FileVersions: make(map[*ast.File]string), +- } +- return info +-} +diff -urN a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go +--- a/gopls/internal/golang/implementation.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/implementation.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1133 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "errors" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "iter" +- "reflect" +- "slices" +- "strings" +- "sync" +- +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/go/ast/edge" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/go/types/typeutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/methodsets" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/moreiters" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// This file defines the new implementation of the 'implementation' +-// operator that does not require type-checker data structures for an +-// unbounded number of packages. +-// +-// TODO(adonovan): +-// - Audit to ensure robustness in face of type errors. +-// - Eliminate false positives due to 'tricky' cases of the global algorithm. +-// - Ensure we have test coverage of: +-// type aliases +-// nil, PkgName, Builtin (all errors) +-// any (empty result) +-// method of unnamed interface type (e.g. var x interface { f() }) +-// (the global algorithm may find implementations of this type +-// but will not include it in the index.) +- +-// Implementation returns a new sorted array of locations of +-// declarations of types that implement (or are implemented by) the +-// type referred to at the given position. +-// +-// If the position denotes a method, the computation is applied to its +-// receiver type and then its corresponding methods are returned. +-func Implementation(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position) ([]protocol.Location, error) { +- ctx, done := event.Start(ctx, "golang.Implementation") +- defer done() +- +- locs, err := implementations(ctx, snapshot, f, pp) +- if err != nil { +- return nil, err +- } +- slices.SortFunc(locs, protocol.CompareLocation) +- locs = slices.Compact(locs) // de-duplicate +- return locs, nil +-} +- +-func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) ([]protocol.Location, error) { +- // Type check the current package. +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- pos, err := pgf.PositionPos(pp) +- if err != nil { +- return nil, err +- } +- +- // Find implementations based on func signatures. +- if locs, err := implFuncs(pkg, pgf, pos); err != errNotHandled { +- return locs, err +- } +- +- // Find implementations based on method sets. +- var ( +- locsMu sync.Mutex +- locs []protocol.Location +- ) +- // relation=0 here means infer direction of the relation +- // (Supertypes/Subtypes) from concreteness of query type/method. +- // (Ideally the implementations request would provide directionality +- // so that one could ask for, say, the superinterfaces of io.ReadCloser; +- // see https://github.com/golang/go/issues/68641#issuecomment-2269293762.) +- const relation = methodsets.TypeRelation(0) +- err = implementationsMsets(ctx, snapshot, pkg, pgf, pos, relation, func(_ metadata.PackagePath, _ string, _ bool, loc protocol.Location) { +- locsMu.Lock() +- locs = append(locs, loc) +- locsMu.Unlock() +- }) +- return locs, err +-} +- +-// An implYieldFunc is a callback called for each match produced by the implementation machinery. +-// - name describes the type or method. +-// - abstract indicates that the result is an interface type or interface method. +-// +-// implYieldFunc implementations must be concurrency-safe. +-type implYieldFunc func(pkgpath metadata.PackagePath, name string, abstract bool, loc protocol.Location) +- +-// implementationsMsets computes implementations of the type at the +-// specified position, by method sets. +-// +-// rel specifies the desired direction of the relation: Subtype, +-// Supertype, or both. As a special case, zero means infer the +-// direction from the concreteness of the query object: Supertype for +-// a concrete type, Subtype for an interface. +-// +-// It is shared by Implementations and TypeHierarchy. +-func implementationsMsets(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, pos token.Pos, rel methodsets.TypeRelation, yield implYieldFunc) error { +- // First, find the object referenced at the cursor. +- // The object may be declared in a different package. +- obj, err := implementsObj(pkg.TypesInfo(), pgf.File, pos) +- if err != nil { +- return err +- } +- +- // If the resulting object has a position, we can expand the search to types +- // in the declaring package(s). In this case, we must re-type check these +- // packages in the same realm. +- var ( +- declOffset int +- declURI protocol.DocumentURI +- localPkgs []*cache.Package +- ) +- if obj.Pos().IsValid() { // no local package for error or error.Error +- declPosn := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) +- declOffset = declPosn.Offset +- // Type-check the declaring package (incl. variants) for use +- // by the "local" search, which uses type information to +- // enumerate all types within the package that satisfy the +- // query type, even those defined local to a function. +- declURI = protocol.URIFromPath(declPosn.Filename) +- declMPs, err := snapshot.MetadataForFile(ctx, declURI, true) +- if err != nil { +- return err +- } +- if len(declMPs) == 0 { +- return fmt.Errorf("no packages for file %s", declURI) +- } +- ids := make([]PackageID, len(declMPs)) +- for i, mp := range declMPs { +- ids[i] = mp.ID +- } +- localPkgs, err = snapshot.TypeCheck(ctx, ids...) +- if err != nil { +- return err +- } +- } +- +- pkg = nil // no longer used +- +- // Is the selected identifier a type name or method? +- // (For methods, report the corresponding method names.) +- queryType, queryMethod := typeOrMethod(obj) +- if queryType == nil { +- return bug.Errorf("%s is not a type or method", obj.Name()) // should have been handled by implementsObj +- } +- +- // Compute the method-set fingerprint used as a key to the global search. +- key, hasMethods := methodsets.KeyOf(queryType) +- if !hasMethods { +- // A type with no methods yields an empty result. +- // (No point reporting that every type satisfies 'any'.) +- return nil +- } +- +- // If the client specified no relation, infer it +- // from the concreteness of the query type. +- if rel == 0 { +- rel = cond(types.IsInterface(queryType), +- methodsets.Subtype, +- methodsets.Supertype) +- } +- +- // The global search needs to look at every package in the +- // forward transitive closure of the workspace; see package +- // ./methodsets. +- // +- // For now we do all the type checking before beginning the search. +- // TODO(adonovan): opt: search in parallel topological order +- // so that we can overlap index lookup with typechecking. +- // I suspect a number of algorithms on the result of TypeCheck could +- // be optimized by being applied as soon as each package is available. +- globalMetas, err := snapshot.AllMetadata(ctx) +- if err != nil { +- return err +- } +- metadata.RemoveIntermediateTestVariants(&globalMetas) +- globalIDs := make([]PackageID, 0, len(globalMetas)) +- +- var pkgPath PackagePath +- if obj.Pkg() != nil { // nil for error +- pkgPath = PackagePath(obj.Pkg().Path()) +- } +- for _, mp := range globalMetas { +- if mp.PkgPath == pkgPath { +- continue // declaring package is handled by local implementation +- } +- globalIDs = append(globalIDs, mp.ID) +- } +- indexes, err := snapshot.MethodSets(ctx, globalIDs...) +- if err != nil { +- return fmt.Errorf("querying method sets: %v", err) +- } +- +- // Search local and global packages in parallel. +- var group errgroup.Group +- +- // local search +- for _, pkg := range localPkgs { +- // The localImplementations algorithm assumes needle and haystack +- // belong to a single package (="realm" of types symbol identities), +- // so we need to recompute obj for each local package. +- // (By contrast the global algorithm is name-based.) +- group.Go(func() error { +- pkgID := pkg.Metadata().ID +- +- // Find declaring identifier based on (URI, offset) +- // so that localImplementations can locate the +- // corresponding obj/queryType/queryMethod in pkg. +- declFile, err := pkg.File(declURI) +- if err != nil { +- return err // "can't happen" +- } +- pos, err := safetoken.Pos(declFile.Tok, declOffset) +- if err != nil { +- return err // also "can't happen" +- } +- path := pathEnclosingObjNode(declFile.File, pos) +- if path == nil { +- return ErrNoIdentFound // checked earlier +- } +- id, ok := path[0].(*ast.Ident) +- if !ok { +- return ErrNoIdentFound // checked earlier +- } +- if err := localImplementations(ctx, snapshot, pkg, id, rel, yield); err != nil { +- return fmt.Errorf("querying local implementations %q: %v", pkgID, err) +- } +- return nil +- }) +- } +- // global search +- for _, index := range indexes { +- group.Go(func() error { +- for _, res := range index.Search(key, rel, queryMethod) { +- loc := res.Location +- // Map offsets to protocol.Locations in parallel (may involve I/O). +- group.Go(func() error { +- ploc, err := offsetToLocation(ctx, snapshot, loc.Filename, loc.Start, loc.End) +- if err != nil { +- return err +- } +- yield(index.PkgPath, res.TypeName, res.IsInterface, ploc) +- return nil +- }) +- } +- return nil +- }) +- } +- return group.Wait() +-} +- +-// typeOrMethod returns the type and optional method to use in an +-// Implementations operation on the specified symbol. +-// It returns a nil type to indicate that the query should not proceed. +-// +-// (It is factored out to allow it to be used both in the query package +-// then (in [localImplementations]) again in the declaring package.) +-func typeOrMethod(obj types.Object) (types.Type, *types.Func) { +- switch obj := obj.(type) { +- case *types.TypeName: +- return obj.Type(), nil +- case *types.Func: +- // For methods, use the receiver type, which may be anonymous. +- if recv := obj.Signature().Recv(); recv != nil { +- return recv.Type(), obj +- } +- } +- return nil, nil +-} +- +-// offsetToLocation converts an offset-based position to a protocol.Location, +-// which requires reading the file. +-func offsetToLocation(ctx context.Context, snapshot *cache.Snapshot, filename string, start, end int) (protocol.Location, error) { +- uri := protocol.URIFromPath(filename) +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return protocol.Location{}, err // cancelled, perhaps +- } +- content, err := fh.Content() +- if err != nil { +- return protocol.Location{}, err // nonexistent or deleted ("can't happen") +- } +- m := protocol.NewMapper(uri, content) +- return m.OffsetLocation(start, end) +-} +- +-// implementsObj returns the object to query for implementations, +-// which is a type name or method. +-func implementsObj(info *types.Info, file *ast.File, pos token.Pos) (types.Object, error) { +- // This function inherits the limitation of its predecessor in +- // requiring the selection to be an identifier (of a type or +- // method). But there's no fundamental reason why one could +- // not pose this query about any selected piece of syntax that +- // has a type and thus a method set. +- // (If LSP was more thorough about passing text selections as +- // intervals to queries, you could ask about the method set of a +- // subexpression such as x.f().) +- +- // TODO(adonovan): simplify: use objectsAt? +- path := pathEnclosingObjNode(file, pos) +- if path == nil { +- return nil, ErrNoIdentFound +- } +- id, ok := path[0].(*ast.Ident) +- if !ok { +- return nil, ErrNoIdentFound +- } +- +- // Is the object a type or method? Reject other kinds. +- obj := info.Uses[id] +- if obj == nil { +- // Check uses first (unlike ObjectOf) so that T in +- // struct{T} is treated as a reference to a type, +- // not a declaration of a field. +- obj = info.Defs[id] +- } +- switch obj := obj.(type) { +- case *types.TypeName: +- // ok +- case *types.Func: +- if obj.Signature().Recv() == nil { +- return nil, fmt.Errorf("%s is a function, not a method (query at 'func' token to find matching signatures)", id.Name) +- } +- case nil: +- return nil, fmt.Errorf("%s denotes unknown object", id.Name) +- default: +- // e.g. *types.Var -> "var". +- kind := strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types.")) +- // TODO(adonovan): improve upon "nil is a nil, not a type". +- return nil, fmt.Errorf("%s is a %s, not a type", id.Name, kind) +- } +- +- return obj, nil +-} +- +-// localImplementations searches within pkg for declarations of all +-// supertypes (if rel contains Supertype) or subtypes (if rel contains +-// Subtype) of the type or method declared by id within the same +-// package, and returns a new unordered array of their locations. +-// +-// If method is non-nil, the function instead returns the location +-// of each type's method (if any) of that ID. +-// +-// ("Local" refers to the search within the same package, but this +-// function's results may include type declarations that are local to +-// a function body. The global search index excludes such types +-// because reliably naming such types is hard.) +-// +-// Results are reported via the yield function. +-func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, id *ast.Ident, rel methodsets.TypeRelation, yield implYieldFunc) error { +- queryType, queryMethod := typeOrMethod(pkg.TypesInfo().Defs[id]) +- if queryType == nil { +- return bug.Errorf("can't find corresponding symbol for %q in package %q", id.Name, pkg) +- } +- queryType = methodsets.EnsurePointer(queryType) +- +- var msets typeutil.MethodSetCache +- +- matches := func(candidateType types.Type) bool { +- // Test the direction of the relation. +- // The client may request either direction or both +- // (e.g. when the client is References), +- // and the Result reports each test independently; +- // both tests succeed when comparing identical +- // interface types. +- var got methodsets.TypeRelation +- if rel&methodsets.Supertype != 0 && implements(&msets, queryType, candidateType) { +- got |= methodsets.Supertype +- } +- if rel&methodsets.Subtype != 0 && implements(&msets, candidateType, queryType) { +- got |= methodsets.Subtype +- } +- return got != 0 +- } +- +- // Scan through all type declarations in the syntax. +- for _, pgf := range pkg.CompiledGoFiles() { +- for cur := range pgf.Cursor.Preorder((*ast.TypeSpec)(nil)) { +- spec := cur.Node().(*ast.TypeSpec) +- if spec.Name == id { +- continue // avoid self-comparison of query type +- } +- def := pkg.TypesInfo().Defs[spec.Name] +- if def == nil { +- continue // "can't happen" for types +- } +- if def.(*types.TypeName).IsAlias() { +- continue // skip type aliases to avoid duplicate reporting +- } +- candidateType := methodsets.EnsurePointer(def.Type()) +- if !matches(candidateType) { +- continue +- } +- +- // Ignore types with empty method sets. +- // (No point reporting that every type satisfies 'any'.) +- mset := msets.MethodSet(candidateType) +- if mset.Len() == 0 { +- continue +- } +- +- isInterface := types.IsInterface(def.Type()) +- +- if queryMethod == nil { +- // Found matching type. +- loc := mustLocation(pgf, spec.Name) +- yield(pkg.Metadata().PkgPath, spec.Name.Name, isInterface, loc) +- continue +- } +- +- // Find corresponding method. +- // +- // We can't use LookupFieldOrMethod because it requires +- // the methodID's types.Package, which we don't know. +- // We could recursively search pkg.Imports for it, +- // but it's easier to walk the method set. +- for i := 0; i < mset.Len(); i++ { +- m := mset.At(i).Obj() +- if m.Pos() == id.Pos() { +- continue // avoid self-comparison of query method +- } +- if m.Id() == queryMethod.Id() { +- posn := safetoken.StartPosition(pkg.FileSet(), m.Pos()) +- loc, err := offsetToLocation(ctx, snapshot, posn.Filename, posn.Offset, posn.Offset+len(m.Name())) +- if err != nil { +- return err +- } +- yield(pkg.Metadata().PkgPath, m.Name(), isInterface, loc) +- break +- } +- } +- } +- } +- +- // Special case: for types that satisfy error, +- // report error in builtin.go (see #59527). +- // +- // (An inconsistency: we always report the type error +- // even when the query was for the method error.Error.) +- if matches(errorType) { +- loc, err := errorLocation(ctx, snapshot) +- if err != nil { +- return err +- } +- yield("", "error", true, loc) +- } +- +- return nil +-} +- +-var errorType = types.Universe.Lookup("error").Type() +- +-// errorLocation returns the location of the 'error' type in builtin.go. +-func errorLocation(ctx context.Context, snapshot *cache.Snapshot) (protocol.Location, error) { +- pgf, err := snapshot.BuiltinFile(ctx) +- if err != nil { +- return protocol.Location{}, err +- } +- for _, decl := range pgf.File.Decls { +- if decl, ok := decl.(*ast.GenDecl); ok { +- for _, spec := range decl.Specs { +- if spec, ok := spec.(*ast.TypeSpec); ok && spec.Name.Name == "error" { +- return pgf.NodeLocation(spec.Name) +- } +- } +- } +- } +- return protocol.Location{}, fmt.Errorf("built-in error type not found") +-} +- +-// implements reports whether x implements y. +-// If one or both types are generic, the result indicates whether the +-// interface may be implemented under some instantiation. +-func implements(msets *typeutil.MethodSetCache, x, y types.Type) bool { +- if !types.IsInterface(y) { +- return false +- } +- +- // For each interface method of y, check that x has it too. +- // It is not necessary to compute x's complete method set. +- // +- // If y is a constraint interface (!y.IsMethodSet()), we +- // ignore non-interface terms, leading to occasional spurious +- // matches. We could in future filter based on them, but it +- // would lead to divergence with the global (fingerprint-based) +- // algorithm, which operates only on methodsets. +- ymset := msets.MethodSet(y) +- for i := range ymset.Len() { +- ym := ymset.At(i).Obj().(*types.Func) +- +- xobj, _, _ := types.LookupFieldOrMethod(x, false, ym.Pkg(), ym.Name()) +- xm, ok := xobj.(*types.Func) +- if !ok { +- return false // x lacks a method of y +- } +- if !unify(xm.Signature(), ym.Signature(), nil) { +- return false // signatures do not match +- } +- } +- return true // all methods found +-} +- +-// unify reports whether the types of x and y match. +-// +-// If unifier is nil, unify reports only whether it succeeded. +-// If unifier is non-nil, it is populated with the values +-// of type parameters determined during a successful unification. +-// If unification succeeds without binding a type parameter, that parameter +-// will not be present in the map. +-// +-// On entry, the unifier's contents are treated as the values of already-bound type +-// parameters, constraining the unification. +-// +-// For example, if unifier is an empty (not nil) map on entry, then the types +-// +-// func[T any](T, int) +-// +-// and +-// +-// func[U any](bool, U) +-// +-// will unify, with T=bool and U=int. +-// That is, the contents of unifier after unify returns will be +-// +-// {T: bool, U: int} +-// +-// where "T" is the type parameter T and "bool" is the basic type for bool. +-// +-// But if unifier is {T: int} is int on entry, then unification will fail, because T +-// does not unify with bool. +-// +-// Unify does not preserve aliases. For example, given the following: +-// +-// type String = string +-// type A[T] = T +-// +-// unification succeeds with T bound to string, not String. +-// +-// See also: unify in cache/methodsets/fingerprint, which implements +-// unification for type fingerprints, for the global index. +-// +-// BUG: literal interfaces are not handled properly. But this function is currently +-// used only for signatures, where such types are very rare. +-func unify(x, y types.Type, unifier map[*types.TypeParam]types.Type) bool { +- // bindings[tp] is the binding for type parameter tp. +- // Although type parameters are nominally bound to types, each bindings[tp] +- // is a pointer to a type, so unbound variables that unify can share a binding. +- bindings := map[*types.TypeParam]*types.Type{} +- +- // Bindings is initialized with pointers to the provided types. +- for tp, t := range unifier { +- bindings[tp] = &t +- } +- +- // bindingFor returns the *types.Type in bindings for tp if tp is not nil, +- // creating one if needed. +- bindingFor := func(tp *types.TypeParam) *types.Type { +- if tp == nil { +- return nil +- } +- b := bindings[tp] +- if b == nil { +- b = new(types.Type) +- bindings[tp] = b +- } +- return b +- } +- +- // bind sets b to t if b does not occur in t. +- bind := func(b *types.Type, t types.Type) bool { +- for tp := range typeParams(t) { +- if b == bindings[tp] { +- return false // failed "occurs" check +- } +- } +- *b = t +- return true +- } +- +- // uni performs the actual unification. +- depth := 0 +- var uni func(x, y types.Type) bool +- uni = func(x, y types.Type) bool { +- // Panic if recursion gets too deep, to detect bugs before +- // overflowing the stack. +- depth++ +- defer func() { depth-- }() +- if depth > 100 { +- panic("unify: max depth exceeded") +- } +- +- x = types.Unalias(x) +- y = types.Unalias(y) +- +- tpx, _ := x.(*types.TypeParam) +- tpy, _ := y.(*types.TypeParam) +- if tpx != nil || tpy != nil { +- // Identical type params unify. +- if tpx == tpy { +- return true +- } +- bx := bindingFor(tpx) +- by := bindingFor(tpy) +- +- // If both args are type params and neither is bound, have them share a binding. +- if bx != nil && by != nil && *bx == nil && *by == nil { +- // Arbitrarily give y's binding to x. +- bindings[tpx] = by +- return true +- } +- // Treat param bindings like original args in what follows. +- if bx != nil && *bx != nil { +- x = *bx +- } +- if by != nil && *by != nil { +- y = *by +- } +- // If the x param is unbound, bind it to y. +- if bx != nil && *bx == nil { +- return bind(bx, y) +- } +- // If the y param is unbound, bind it to x. +- if by != nil && *by == nil { +- return bind(by, x) +- } +- // Unify the binding of a bound parameter. +- return uni(x, y) +- } +- +- // Neither arg is a type param. +- +- if reflect.TypeOf(x) != reflect.TypeOf(y) { +- return false // mismatched types +- } +- +- switch x := x.(type) { +- case *types.Array: +- y := y.(*types.Array) +- return x.Len() == y.Len() && +- uni(x.Elem(), y.Elem()) +- +- case *types.Basic: +- y := y.(*types.Basic) +- return x.Kind() == y.Kind() +- +- case *types.Chan: +- y := y.(*types.Chan) +- return x.Dir() == y.Dir() && +- uni(x.Elem(), y.Elem()) +- +- case *types.Interface: +- y := y.(*types.Interface) +- // TODO(adonovan,jba): fix: for correctness, we must check +- // that both interfaces have the same set of methods +- // modulo type parameters, while avoiding the risk of +- // unbounded interface recursion. +- // +- // Since non-empty interface literals are vanishingly +- // rare in methods signatures, we ignore this for now. +- // If more precision is needed we could compare method +- // names and arities, still without full recursion. +- return x.NumMethods() == y.NumMethods() +- +- case *types.Map: +- y := y.(*types.Map) +- return uni(x.Key(), y.Key()) && +- uni(x.Elem(), y.Elem()) +- +- case *types.Named: +- y := y.(*types.Named) +- if x.Origin() != y.Origin() { +- return false // different named types +- } +- xtargs := x.TypeArgs() +- ytargs := y.TypeArgs() +- if xtargs.Len() != ytargs.Len() { +- return false // arity error (ill-typed) +- } +- for i := range xtargs.Len() { +- if !uni(xtargs.At(i), ytargs.At(i)) { +- return false // mismatched type args +- } +- } +- return true +- +- case *types.Pointer: +- y := y.(*types.Pointer) +- return uni(x.Elem(), y.Elem()) +- +- case *types.Signature: +- y := y.(*types.Signature) +- return x.Variadic() == y.Variadic() && +- uni(x.Params(), y.Params()) && +- uni(x.Results(), y.Results()) +- +- case *types.Slice: +- y := y.(*types.Slice) +- return uni(x.Elem(), y.Elem()) +- +- case *types.Struct: +- y := y.(*types.Struct) +- if x.NumFields() != y.NumFields() { +- return false +- } +- for i := range x.NumFields() { +- xf := x.Field(i) +- yf := y.Field(i) +- if xf.Embedded() != yf.Embedded() || +- xf.Name() != yf.Name() || +- x.Tag(i) != y.Tag(i) || +- !xf.Exported() && xf.Pkg() != yf.Pkg() || +- !uni(xf.Type(), yf.Type()) { +- return false +- } +- } +- return true +- +- case *types.Tuple: +- y := y.(*types.Tuple) +- if x.Len() != y.Len() { +- return false +- } +- for i := range x.Len() { +- if !uni(x.At(i).Type(), y.At(i).Type()) { +- return false +- } +- } +- return true +- +- default: // incl. *Union, *TypeParam +- panic(fmt.Sprintf("unexpected Type %#v", x)) +- } +- } +- +- if !uni(x, y) { +- clear(unifier) +- return false +- } +- +- // Populate the input map with the resulting types. +- if unifier != nil { +- for tparam, tptr := range bindings { +- unifier[tparam] = *tptr +- } +- } +- return true +-} +- +-// typeParams yields all the free type parameters within t that are relevant for +-// unification. +-// +-// Note: this function is tailored for the specific needs of the unification algorithm. +-// Don't try to use it for other purposes, see [typeparams.Free] instead. +-func typeParams(t types.Type) iter.Seq[*types.TypeParam] { +- +- return func(yield func(*types.TypeParam) bool) { +- seen := map[*types.TypeParam]bool{} // yield each type param only once +- +- // tps(t) yields each TypeParam in t and returns false to stop. +- var tps func(types.Type) bool +- tps = func(t types.Type) bool { +- t = types.Unalias(t) +- +- switch t := t.(type) { +- case *types.TypeParam: +- if seen[t] { +- return true +- } +- seen[t] = true +- return yield(t) +- +- case *types.Basic: +- return true +- +- case *types.Array: +- return tps(t.Elem()) +- +- case *types.Chan: +- return tps(t.Elem()) +- +- case *types.Interface: +- // TODO(jba): implement. +- return true +- +- case *types.Map: +- return tps(t.Key()) && tps(t.Elem()) +- +- case *types.Named: +- if t.Origin() == t { +- // generic type: look at type params +- return moreiters.Every(t.TypeParams().TypeParams(), +- func(tp *types.TypeParam) bool { return tps(tp) }) +- } +- // instantiated type: look at type args +- return moreiters.Every(t.TypeArgs().Types(), tps) +- +- case *types.Pointer: +- return tps(t.Elem()) +- +- case *types.Signature: +- return tps(t.Params()) && tps(t.Results()) +- +- case *types.Slice: +- return tps(t.Elem()) +- +- case *types.Struct: +- return moreiters.Every(t.Fields(), +- func(v *types.Var) bool { return tps(v.Type()) }) +- +- case *types.Tuple: +- return moreiters.Every(t.Variables(), +- func(v *types.Var) bool { return tps(v.Type()) }) +- +- default: // incl. *Union +- panic(fmt.Sprintf("unexpected Type %#v", t)) +- } +- } +- +- tps(t) +- } +-} +- +-var ( +- // TODO(adonovan): why do various RPC handlers related to +- // IncomingCalls return (nil, nil) on the protocol in response +- // to this error? That seems like a violation of the protocol. +- // Is it perhaps a workaround for VSCode behavior? +- errNoObjectFound = errors.New("no object found") +-) +- +-// pathEnclosingObjNode returns the AST path to the object-defining +-// node associated with pos. "Object-defining" means either an +-// *ast.Ident mapped directly to a types.Object or an ast.Node mapped +-// implicitly to a types.Object. +-func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node { +- var ( +- path []ast.Node +- found bool +- ) +- +- ast.Inspect(f, func(n ast.Node) bool { +- if found { +- return false +- } +- +- if n == nil { +- path = path[:len(path)-1] +- return false +- } +- +- path = append(path, n) +- +- switch n := n.(type) { +- case *ast.Ident: +- // Include the position directly after identifier. This handles +- // the common case where the cursor is right after the +- // identifier the user is currently typing. Previously we +- // handled this by calling astutil.PathEnclosingInterval twice, +- // once for "pos" and once for "pos-1". +- found = n.Pos() <= pos && pos <= n.End() +- +- case *ast.ImportSpec: +- if n.Path.Pos() <= pos && pos < n.Path.End() { +- found = true +- // If import spec has a name, add name to path even though +- // position isn't in the name. +- if n.Name != nil { +- path = append(path, n.Name) +- } +- } +- +- case *ast.StarExpr: +- // Follow star expressions to the inner identifier. +- if pos == n.Star { +- pos = n.X.Pos() +- } +- } +- +- return !found +- }) +- +- if len(path) == 0 { +- return nil +- } +- +- // Reverse path so leaf is first element. +- slices.Reverse(path) +- return path +-} +- +-// --- Implementations based on signature types -- +- +-// implFuncs finds Implementations based on func types. +-// +-// Just as an interface type abstracts a set of concrete methods, a +-// function type abstracts a set of concrete functions. Gopls provides +-// analogous operations for navigating from abstract to concrete and +-// back in the domain of function types. +-// +-// A single type (for example http.HandlerFunc) can have both an +-// underlying type of function (types.Signature) and have methods that +-// cause it to implement an interface. To avoid a confusing user +-// interface we want to separate the two operations so that the user +-// can unambiguously specify the query they want. +-// +-// So, whereas Implementations queries on interface types are usually +-// keyed by an identifier of a named type, Implementations queries on +-// function types are keyed by the "func" keyword, or by the "(" of a +-// call expression. The query relates two sets of locations: +-// +-// 1. the "func" token of each function declaration (FuncDecl or +-// FuncLit). These are analogous to declarations of concrete +-// methods. +-// +-// 2. uses of abstract functions: +-// +-// (a) the "func" token of each FuncType that is not part of +-// Func{Decl,Lit}. These are analogous to interface{...} types. +-// +-// (b) the "(" paren of each dynamic call on a value of an +-// abstract function type. These are analogous to references to +-// interface method names, but no names are involved, which has +-// historically made them hard to search for. +-// +-// An Implementations query on a location in set 1 returns set 2, +-// and vice versa. +-// +-// implFuncs returns errNotHandled to indicate that we should try the +-// regular method-sets algorithm. +-func implFuncs(pkg *cache.Package, pgf *parsego.File, pos token.Pos) ([]protocol.Location, error) { +- curSel, ok := pgf.Cursor.FindByPos(pos, pos) +- if !ok { +- return nil, fmt.Errorf("no code selected") +- } +- +- info := pkg.TypesInfo() +- if info.Types == nil || info.Defs == nil || info.Uses == nil { +- panic("one of info.Types, .Defs or .Uses is nil") +- } +- +- // Find innermost enclosing FuncType or CallExpr. +- // +- // We are looking for specific tokens (FuncType.Func and +- // CallExpr.Lparen), but FindPos prefers an adjoining +- // subexpression: given f(x) without additional spaces between +- // tokens, FindPos always returns either f or x, never the +- // CallExpr itself. Thus we must ascend the tree. +- // +- // Another subtlety: due to an edge case in go/ast, FindPos at +- // FuncDecl.Type.Func does not return FuncDecl.Type, only the +- // FuncDecl, because the orders of tree positions and tokens +- // are inconsistent. Consequently, the ancestors for a "func" +- // token of Func{Lit,Decl} do not include FuncType, hence the +- // explicit cases below. +- for cur := range curSel.Enclosing( +- (*ast.FuncDecl)(nil), +- (*ast.FuncLit)(nil), +- (*ast.FuncType)(nil), +- (*ast.CallExpr)(nil), +- ) { +- switch n := cur.Node().(type) { +- case *ast.FuncDecl, *ast.FuncLit: +- if inToken(n.Pos(), "func", pos) { +- // Case 1: concrete function declaration. +- // Report uses of corresponding function types. +- switch n := n.(type) { +- case *ast.FuncDecl: +- return funcUses(pkg, info.Defs[n.Name].Type()) +- case *ast.FuncLit: +- return funcUses(pkg, info.TypeOf(n.Type)) +- } +- } +- +- case *ast.FuncType: +- if n.Func.IsValid() && inToken(n.Func, "func", pos) && !beneathFuncDef(cur) { +- // Case 2a: function type. +- // Report declarations of corresponding concrete functions. +- return funcDefs(pkg, info.TypeOf(n)) +- } +- +- case *ast.CallExpr: +- if inToken(n.Lparen, "(", pos) { +- t := dynamicFuncCallType(info, n) +- if t == nil { +- return nil, fmt.Errorf("not a dynamic function call") +- } +- // Case 2b: dynamic call of function value. +- // Report declarations of corresponding concrete functions. +- return funcDefs(pkg, t) +- } +- } +- } +- +- // It's probably a query of a named type or method. +- // Fall back to the method-sets computation. +- return nil, errNotHandled +-} +- +-var errNotHandled = errors.New("not handled") +- +-// funcUses returns all locations in the workspace that are dynamic +-// uses of the specified function type. +-func funcUses(pkg *cache.Package, t types.Type) ([]protocol.Location, error) { +- var locs []protocol.Location +- +- // local search +- for _, pgf := range pkg.CompiledGoFiles() { +- for cur := range pgf.Cursor.Preorder((*ast.CallExpr)(nil), (*ast.FuncType)(nil)) { +- var pos, end token.Pos +- var ftyp types.Type +- switch n := cur.Node().(type) { +- case *ast.CallExpr: +- ftyp = dynamicFuncCallType(pkg.TypesInfo(), n) +- pos, end = n.Lparen, n.Lparen+token.Pos(len("(")) +- +- case *ast.FuncType: +- if !beneathFuncDef(cur) { +- // func type (not def) +- ftyp = pkg.TypesInfo().TypeOf(n) +- pos, end = n.Func, n.Func+token.Pos(len("func")) +- } +- } +- if ftyp == nil { +- continue // missing type information +- } +- if unify(t, ftyp, nil) { +- loc, err := pgf.PosLocation(pos, end) +- if err != nil { +- return nil, err +- } +- locs = append(locs, loc) +- } +- } +- } +- +- // TODO(adonovan): implement global search +- +- return locs, nil +-} +- +-// funcDefs returns all locations in the workspace that define +-// functions of the specified type. +-func funcDefs(pkg *cache.Package, t types.Type) ([]protocol.Location, error) { +- var locs []protocol.Location +- +- // local search +- for _, pgf := range pkg.CompiledGoFiles() { +- for curFn := range pgf.Cursor.Preorder((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)) { +- fn := curFn.Node() +- var ftyp types.Type +- switch fn := fn.(type) { +- case *ast.FuncDecl: +- ftyp = pkg.TypesInfo().Defs[fn.Name].Type() +- case *ast.FuncLit: +- ftyp = pkg.TypesInfo().TypeOf(fn) +- } +- if ftyp == nil { +- continue // missing type information +- } +- if unify(t, ftyp, nil) { +- pos := fn.Pos() +- loc, err := pgf.PosLocation(pos, pos+token.Pos(len("func"))) +- if err != nil { +- return nil, err +- } +- locs = append(locs, loc) +- } +- } +- } +- +- // TODO(adonovan): implement global search, by analogy with +- // methodsets algorithm. +- // +- // One optimization: if any signature type has free package +- // names, look for matches only in packages among the rdeps of +- // those packages. +- +- return locs, nil +-} +- +-// beneathFuncDef reports whether the specified FuncType cursor is a +-// child of Func{Decl,Lit}. +-func beneathFuncDef(cur inspector.Cursor) bool { +- switch ek, _ := cur.ParentEdge(); ek { +- case edge.FuncDecl_Type, edge.FuncLit_Type: +- return true +- } +- return false +-} +- +-// dynamicFuncCallType reports whether call is a dynamic (non-method) function call. +-// If so, it returns the function type, otherwise nil. +-// +-// Tested via ../test/marker/testdata/implementation/signature.txt. +-func dynamicFuncCallType(info *types.Info, call *ast.CallExpr) types.Type { +- if typesinternal.ClassifyCall(info, call) == typesinternal.CallDynamic { +- if tv, ok := info.Types[call.Fun]; ok { +- return tv.Type.Underlying() +- } +- } +- return nil +-} +- +-// inToken reports whether pos is within the token of +-// the specified position and string. +-func inToken(tokPos token.Pos, tokStr string, pos token.Pos) bool { +- return tokPos <= pos && pos <= tokPos+token.Pos(len(tokStr)) +-} +diff -urN a/gopls/internal/golang/implementation_test.go b/gopls/internal/golang/implementation_test.go +--- a/gopls/internal/golang/implementation_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/implementation_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,303 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "go/types" +- "maps" +- "testing" +- +- "golang.org/x/tools/internal/testfiles" +- "golang.org/x/tools/txtar" +-) +- +-func TestUnify(t *testing.T) { +- // Most cases from TestMatches in gopls/internal/util/fingerprint/fingerprint_test.go. +- const src = ` +--- go.mod -- +-module example.com +-go 1.24 +- +--- a/a.go -- +-package a +- +-type Int = int +-type String = string +- +-// Eq.Equal matches casefold.Equal. +-type Eq[T any] interface { Equal(T, T) bool } +-type casefold struct{} +-func (casefold) Equal(x, y string) bool +- +-// A matches AString. +-type A[T any] = struct { x T } +-type AString = struct { x string } +- +-// B matches anything! +-type B[T any] = T +- +-func C1[T any](int, T, ...string) T { panic(0) } +-func C2[U any](int, int, ...U) bool { panic(0) } +-func C3(int, bool, ...string) rune +-func C4(int, bool, ...string) +-func C5(int, float64, bool, string) bool +-func C6(int, bool, ...string) bool +- +-func DAny[T any](Named[T]) { panic(0) } +-func DString(Named[string]) +-func DInt(Named[int]) +- +-type Named[T any] struct { x T } +- +-func E1(byte) rune +-func E2(uint8) int32 +-func E3(int8) uint32 +- +-// generic vs. generic +-func F1[T any](T) { panic(0) } +-func F2[T any](*T) { panic(0) } +-func F3[T any](T, T) { panic(0) } +-func F4[U any](U, *U) {panic(0) } +-func F4a[U any](U, Named[U]) {panic(0) } +-func F5[T, U any](T, U, U) { panic(0) } +-func F6[T any](T, int, T) { panic(0) } +-func F7[T any](bool, T, T) { panic(0) } +-func F8[V any](*V, int, int) { panic(0) } +-func F9[V any](V, *V, V) { panic(0) } +-` +- type tmap = map[*types.TypeParam]types.Type +- +- var ( +- boolType = types.Typ[types.Bool] +- intType = types.Typ[types.Int] +- stringType = types.Typ[types.String] +- ) +- +- pkg := testfiles.LoadPackages(t, txtar.Parse([]byte(src)), "./a")[0] +- scope := pkg.Types.Scope() +- +- tparam := func(name string, index int) *types.TypeParam { +- obj := scope.Lookup(name) +- var tps *types.TypeParamList +- switch obj := obj.(type) { +- case *types.Func: +- tps = obj.Signature().TypeParams() +- case *types.TypeName: +- if n, ok := obj.Type().(*types.Named); ok { +- tps = n.TypeParams() +- } else { +- tps = obj.Type().(*types.Alias).TypeParams() +- } +- default: +- t.Fatalf("unsupported object of type %T", obj) +- } +- return tps.At(index) +- } +- +- for _, test := range []struct { +- x, y string // the symbols in the above source code whose types to unify +- method string // optional field or method +- params tmap // initial values of type params +- want bool // success or failure +- wantParams tmap // expected output +- }{ +- { +- // In Eq[T], T is bound to string. +- x: "Eq", +- y: "casefold", +- method: "Equal", +- want: true, +- wantParams: tmap{tparam("Eq", 0): stringType}, +- }, +- { +- // If we unify A[T] and A[string], T should be bound to string. +- x: "A", +- y: "AString", +- want: true, +- wantParams: tmap{tparam("A", 0): stringType}, +- }, +- {x: "A", y: "Eq", want: false}, // completely unrelated +- { +- x: "B", +- y: "String", +- want: true, +- wantParams: tmap{tparam("B", 0): stringType}, +- }, +- { +- x: "B", +- y: "Int", +- want: true, +- wantParams: tmap{tparam("B", 0): intType}, +- }, +- { +- x: "B", +- y: "A", +- want: true, +- // B's T is bound to A's struct { x T } +- wantParams: tmap{tparam("B", 0): scope.Lookup("A").Type().Underlying()}, +- }, +- { +- // C1's U unifies with C6's bool. +- x: "C1", +- y: "C6", +- wantParams: tmap{tparam("C1", 0): boolType}, +- want: true, +- }, +- // C1 fails to unify with C2 because C1's T must be bound to both int and bool. +- {x: "C1", y: "C2", want: false}, +- // The remaining "C" cases fail for less interesting reasons, usually different numbers +- // or types of parameters or results. +- {x: "C1", y: "C3", want: false}, +- {x: "C1", y: "C4", want: false}, +- {x: "C1", y: "C5", want: false}, +- {x: "C2", y: "C3", want: false}, +- {x: "C2", y: "C4", want: false}, +- {x: "C3", y: "C4", want: false}, +- { +- x: "DAny", +- y: "DString", +- want: true, +- wantParams: tmap{tparam("DAny", 0): stringType}, +- }, +- {x: "DString", y: "DInt", want: false}, // different instantiations of Named +- {x: "E1", y: "E2", want: true}, // byte and rune are just aliases +- {x: "E2", y: "E3", want: false}, +- +- // The following tests cover all of the type param cases of unify. +- { +- // F1[*int] = F2[int], for example +- // F1's T is bound to a pointer to F2's T. +- x: "F1", +- // F2's T is unbound: any instantiation works. +- y: "F2", +- want: true, +- wantParams: tmap{tparam("F1", 0): types.NewPointer(tparam("F2", 0))}, +- }, +- {x: "F3", y: "F4", want: false}, // would require U identical to *U, prevented by occur check +- {x: "F3", y: "F4a", want: false}, // occur check through Named[T] +- { +- x: "F5", +- y: "F6", +- want: true, +- wantParams: tmap{ +- tparam("F5", 0): intType, +- tparam("F5", 1): intType, +- tparam("F6", 0): intType, +- }, +- }, +- {x: "F6", y: "F7", want: false}, // both are bound +- { +- x: "F5", +- y: "F6", +- params: tmap{tparam("F6", 0): intType}, // consistent with the result +- want: true, +- wantParams: tmap{ +- tparam("F5", 0): intType, +- tparam("F5", 1): intType, +- tparam("F6", 0): intType, +- }, +- }, +- { +- x: "F5", +- y: "F6", +- params: tmap{tparam("F6", 0): boolType}, // not consistent +- want: false, +- }, +- {x: "F6", y: "F7", want: false}, // both are bound +- { +- // T=*V, U=int, V=int +- x: "F5", +- y: "F8", +- want: true, +- wantParams: tmap{ +- tparam("F5", 0): types.NewPointer(tparam("F8", 0)), +- tparam("F5", 1): intType, +- }, +- }, +- { +- // T=*V, U=int, V=int +- // Partial initial information is fine, as long as it's consistent. +- x: "F5", +- y: "F8", +- want: true, +- params: tmap{tparam("F5", 1): intType}, +- wantParams: tmap{ +- tparam("F5", 0): types.NewPointer(tparam("F8", 0)), +- tparam("F5", 1): intType, +- }, +- }, +- { +- // T=*V, U=int, V=int +- // Partial initial information is fine, as long as it's consistent. +- x: "F5", +- y: "F8", +- want: true, +- params: tmap{tparam("F5", 0): types.NewPointer(tparam("F8", 0))}, +- wantParams: tmap{ +- tparam("F5", 0): types.NewPointer(tparam("F8", 0)), +- tparam("F5", 1): intType, +- }, +- }, +- {x: "F5", y: "F9", want: false}, // T is unbound, V is bound, and T occurs in V +- { +- // T bound to Named[T'] +- x: "F1", +- y: "DAny", +- want: true, +- wantParams: tmap{ +- tparam("F1", 0): scope.Lookup("DAny").(*types.Func).Signature().Params().At(0).Type()}, +- }, +- } { +- +- lookup := func(name string) types.Type { +- obj := scope.Lookup(name) +- if obj == nil { +- t.Fatalf("Lookup %s failed", name) +- } +- if test.method != "" { +- obj, _, _ = types.LookupFieldOrMethod(obj.Type(), true, pkg.Types, test.method) +- if obj == nil { +- t.Fatalf("Lookup %s.%s failed", name, test.method) +- } +- } +- return obj.Type() +- } +- +- check := func(a, b string, want, compareParams bool) { +- t.Helper() +- +- ta := lookup(a) +- tb := lookup(b) +- +- var gotParams tmap +- if test.params == nil { +- // Get the unifier even if there are no input params. +- gotParams = tmap{} +- } else { +- gotParams = maps.Clone(test.params) +- } +- got := unify(ta, tb, gotParams) +- if got != want { +- t.Errorf("a=%s b=%s method=%s: unify returned %t for these inputs:\n- %s\n- %s", +- a, b, test.method, got, ta, tb) +- return +- } +- if !compareParams { +- return +- } +- if !maps.EqualFunc(gotParams, test.wantParams, types.Identical) { +- t.Errorf("x=%s y=%s method=%s: params: got %v, want %v", +- a, b, test.method, gotParams, test.wantParams) +- } +- } +- +- check(test.x, test.y, test.want, true) +- // unify is symmetric +- check(test.y, test.x, test.want, true) +- // unify is reflexive +- check(test.x, test.x, true, false) +- check(test.y, test.y, true, false) +- } +-} +diff -urN a/gopls/internal/golang/inlay_hint.go b/gopls/internal/golang/inlay_hint.go +--- a/gopls/internal/golang/inlay_hint.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/inlay_hint.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,419 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "cmp" +- "context" +- "fmt" +- "go/ast" +- "go/constant" +- "go/token" +- "go/types" +- "slices" +- "strings" +- +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/go/types/typeutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/typeparams" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-func InlayHint(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pRng protocol.Range) ([]protocol.InlayHint, error) { +- ctx, done := event.Start(ctx, "golang.InlayHint") +- defer done() +- +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, fmt.Errorf("getting file for InlayHint: %w", err) +- } +- +- // Collect a list of the inlay hints that are enabled. +- inlayHintOptions := snapshot.Options().InlayHintOptions +- var enabledHints []inlayHintFunc +- for hint, enabled := range inlayHintOptions.Hints { +- if !enabled { +- continue +- } +- if fn, ok := allInlayHints[hint]; ok { +- enabledHints = append(enabledHints, fn) +- } +- } +- if len(enabledHints) == 0 { +- return nil, nil +- } +- +- info := pkg.TypesInfo() +- qual := typesinternal.FileQualifier(pgf.File, pkg.Types()) +- +- // Set the range to the full file if the range is not valid. +- start, end := pgf.File.FileStart, pgf.File.FileEnd +- +- // TODO(adonovan): this condition looks completely wrong! +- if pRng.Start.Line < pRng.End.Line || pRng.Start.Character < pRng.End.Character { +- // Adjust start and end for the specified range. +- var err error +- start, end, err = pgf.RangePos(pRng) +- if err != nil { +- return nil, err +- } +- } +- +- var hints []protocol.InlayHint +- if curSubrange, ok := pgf.Cursor.FindByPos(start, end); ok { +- add := func(hint protocol.InlayHint) { hints = append(hints, hint) } +- for _, fn := range enabledHints { +- fn(info, pgf, qual, curSubrange, add) +- } +- } +- return hints, nil +-} +- +-type inlayHintFunc func(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur inspector.Cursor, add func(protocol.InlayHint)) +- +-var allInlayHints = map[settings.InlayHint]inlayHintFunc{ +- settings.AssignVariableTypes: assignVariableTypes, +- settings.ConstantValues: constantValues, +- settings.ParameterNames: parameterNames, +- settings.RangeVariableTypes: rangeVariableTypes, +- settings.CompositeLiteralTypes: compositeLiteralTypes, +- settings.CompositeLiteralFieldNames: compositeLiteralFields, +- settings.FunctionTypeParameters: funcTypeParams, +- settings.IgnoredError: ignoredError, +-} +- +-func parameterNames(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur inspector.Cursor, add func(protocol.InlayHint)) { +- for curCall := range cur.Preorder((*ast.CallExpr)(nil)) { +- callExpr := curCall.Node().(*ast.CallExpr) +- t := info.TypeOf(callExpr.Fun) +- if t == nil { +- continue +- } +- signature, ok := typeparams.CoreType(t).(*types.Signature) +- if !ok { +- continue +- } +- +- for i, v := range callExpr.Args { +- start, err := pgf.PosPosition(v.Pos()) +- if err != nil { +- continue +- } +- params := signature.Params() +- // When a function has variadic params, we skip args after +- // params.Len(). +- if i > params.Len()-1 { +- break +- } +- param := params.At(i) +- // param.Name is empty for built-ins like append +- if param.Name() == "" { +- continue +- } +- // Skip the parameter name hint if the arg matches +- // the parameter name. +- if i, ok := v.(*ast.Ident); ok && i.Name == param.Name() { +- continue +- } +- +- label := param.Name() +- if signature.Variadic() && i == params.Len()-1 { +- label = label + "..." +- } +- add(protocol.InlayHint{ +- Position: start, +- Label: labelPart(label + ":"), +- Kind: protocol.Parameter, +- PaddingRight: true, +- }) +- } +- } +-} +- +-func ignoredError(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur inspector.Cursor, add func(protocol.InlayHint)) { +-outer: +- for curCall := range cur.Preorder((*ast.ExprStmt)(nil)) { +- stmt := curCall.Node().(*ast.ExprStmt) +- call, ok := stmt.X.(*ast.CallExpr) +- if !ok { +- continue // not a call stmt +- } +- +- // Check that type of result (or last component) is error. +- tv, ok := info.Types[call] +- if !ok { +- continue // no type info +- } +- t := tv.Type +- if res, ok := t.(*types.Tuple); ok && res.Len() > 1 { +- t = res.At(res.Len() - 1).Type() +- } +- if !types.Identical(t, errorType) { +- continue +- } +- +- // Suppress some common false positives. +- obj := typeutil.Callee(info, call) +- if typesinternal.IsFunctionNamed(obj, "fmt", "Print", "Printf", "Println", "Fprint", "Fprintf", "Fprintln") || +- typesinternal.IsMethodNamed(obj, "bytes", "Buffer", "Write", "WriteByte", "WriteRune", "WriteString") || +- typesinternal.IsMethodNamed(obj, "strings", "Builder", "Write", "WriteByte", "WriteRune", "WriteString") || +- typesinternal.IsFunctionNamed(obj, "io", "WriteString") { +- continue +- } +- +- // Suppress if comment on same line contains "// ignore error". +- line := func(pos token.Pos) int { return safetoken.Line(pgf.Tok, pos) } +- comments := pgf.File.Comments +- compare := func(cg *ast.CommentGroup, pos token.Pos) int { +- return cmp.Compare(cg.Pos(), pos) +- } +- i, _ := slices.BinarySearchFunc(comments, stmt.End(), compare) +- if i >= 0 && i < len(comments) { +- cg := comments[i] +- if line(cg.Pos()) == line(stmt.End()) && strings.Contains(cg.Text(), "ignore error") { +- continue outer // suppress +- } +- } +- +- // Provide a hint. +- pos, err := pgf.PosPosition(stmt.End()) +- if err != nil { +- continue +- } +- add(protocol.InlayHint{ +- Position: pos, +- Label: labelPart(" // ignore error"), +- }) +- } +-} +- +-func funcTypeParams(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur inspector.Cursor, add func(protocol.InlayHint)) { +- for curCall := range cur.Preorder((*ast.CallExpr)(nil)) { +- call := curCall.Node().(*ast.CallExpr) +- id, ok := call.Fun.(*ast.Ident) +- if !ok { +- continue +- } +- inst := info.Instances[id] +- if inst.TypeArgs == nil { +- continue +- } +- start, err := pgf.PosPosition(id.End()) +- if err != nil { +- continue +- } +- var args []string +- for i := 0; i < inst.TypeArgs.Len(); i++ { +- args = append(args, inst.TypeArgs.At(i).String()) +- } +- if len(args) == 0 { +- continue +- } +- add(protocol.InlayHint{ +- Position: start, +- Label: labelPart("[" + strings.Join(args, ", ") + "]"), +- Kind: protocol.Type, +- }) +- } +-} +- +-func assignVariableTypes(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur inspector.Cursor, add func(protocol.InlayHint)) { +- for node := range cur.Preorder((*ast.AssignStmt)(nil), (*ast.ValueSpec)(nil)) { +- switch n := node.Node().(type) { +- case *ast.AssignStmt: +- if n.Tok != token.DEFINE { +- continue +- } +- for _, v := range n.Lhs { +- variableType(info, pgf, qual, v, add) +- } +- case *ast.GenDecl: +- if n.Tok != token.VAR { +- continue +- } +- for _, v := range n.Specs { +- spec := v.(*ast.ValueSpec) +- // The type of the variable is written, skip showing type of this var. +- // ```go +- // var foo string +- // ``` +- if spec.Type != nil { +- continue +- } +- +- for _, v := range spec.Names { +- variableType(info, pgf, qual, v, add) +- } +- } +- } +- } +-} +- +-func rangeVariableTypes(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur inspector.Cursor, add func(protocol.InlayHint)) { +- for curRange := range cur.Preorder((*ast.RangeStmt)(nil)) { +- rStmt := curRange.Node().(*ast.RangeStmt) +- variableType(info, pgf, qual, rStmt.Key, add) +- variableType(info, pgf, qual, rStmt.Value, add) +- } +-} +- +-func variableType(info *types.Info, pgf *parsego.File, qual types.Qualifier, e ast.Expr, add func(protocol.InlayHint)) { +- typ := info.TypeOf(e) +- if typ == nil { +- return +- } +- end, err := pgf.PosPosition(e.End()) +- if err != nil { +- return +- } +- add(protocol.InlayHint{ +- Position: end, +- Label: labelPart(types.TypeString(typ, qual)), +- Kind: protocol.Type, +- PaddingLeft: true, +- }) +-} +- +-func constantValues(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur inspector.Cursor, add func(protocol.InlayHint)) { +- for curDecl := range cur.Preorder((*ast.GenDecl)(nil)) { +- genDecl := curDecl.Node().(*ast.GenDecl) +- if genDecl.Tok != token.CONST { +- continue +- } +- +- for _, v := range genDecl.Specs { +- spec, ok := v.(*ast.ValueSpec) +- if !ok { +- continue +- } +- end, err := pgf.PosPosition(v.End()) +- if err != nil { +- continue +- } +- // Show hints when values are missing or at least one value is not +- // a basic literal. +- showHints := len(spec.Values) == 0 +- checkValues := len(spec.Names) == len(spec.Values) +- var values []string +- for i, w := range spec.Names { +- obj, ok := info.ObjectOf(w).(*types.Const) +- if !ok || obj.Val().Kind() == constant.Unknown { +- continue +- } +- if checkValues { +- switch spec.Values[i].(type) { +- case *ast.BadExpr: +- continue +- case *ast.BasicLit: +- default: +- if obj.Val().Kind() != constant.Bool { +- showHints = true +- } +- } +- } +- values = append(values, fmt.Sprintf("%v", obj.Val())) +- } +- if !showHints || len(values) == 0 { +- continue +- } +- add(protocol.InlayHint{ +- Position: end, +- Label: labelPart("= " + strings.Join(values, ", ")), +- PaddingLeft: true, +- }) +- } +- } +-} +- +-func compositeLiteralFields(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur inspector.Cursor, add func(protocol.InlayHint)) { +- for curCompLit := range cur.Preorder((*ast.CompositeLit)(nil)) { +- compLit, ok := curCompLit.Node().(*ast.CompositeLit) +- if !ok { +- continue +- } +- typ := info.TypeOf(compLit) +- if typ == nil { +- continue +- } +- typ = typesinternal.Unpointer(typ) +- strct, ok := typeparams.CoreType(typ).(*types.Struct) +- if !ok { +- continue +- } +- +- var hints []protocol.InlayHint +- var allEdits []protocol.TextEdit +- for i, v := range compLit.Elts { +- if _, ok := v.(*ast.KeyValueExpr); !ok { +- start, err := pgf.PosPosition(v.Pos()) +- if err != nil { +- continue +- } +- if i > strct.NumFields()-1 { +- break +- } +- hints = append(hints, protocol.InlayHint{ +- Position: start, +- Label: labelPart(strct.Field(i).Name() + ":"), +- Kind: protocol.Parameter, +- PaddingRight: true, +- }) +- allEdits = append(allEdits, protocol.TextEdit{ +- Range: protocol.Range{Start: start, End: start}, +- NewText: strct.Field(i).Name() + ": ", +- }) +- } +- } +- // It is not allowed to have a mix of keyed and unkeyed fields, so +- // have the text edits add keys to all fields. +- for i := range hints { +- hints[i].TextEdits = allEdits +- add(hints[i]) +- } +- } +-} +- +-func compositeLiteralTypes(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur inspector.Cursor, add func(protocol.InlayHint)) { +- for curCompLit := range cur.Preorder((*ast.CompositeLit)(nil)) { +- compLit := curCompLit.Node().(*ast.CompositeLit) +- typ := info.TypeOf(compLit) +- if typ == nil { +- continue +- } +- if compLit.Type != nil { +- continue +- } +- prefix := "" +- if t, ok := typeparams.CoreType(typ).(*types.Pointer); ok { +- typ = t.Elem() +- prefix = "&" +- } +- // The type for this composite literal is implicit, add an inlay hint. +- start, err := pgf.PosPosition(compLit.Lbrace) +- if err != nil { +- continue +- } +- add(protocol.InlayHint{ +- Position: start, +- Label: labelPart(fmt.Sprintf("%s%s", prefix, types.TypeString(typ, qual))), +- Kind: protocol.Type, +- }) +- } +-} +- +-func labelPart(s string) []protocol.InlayHintLabelPart { +- const maxLabelLength = 28 +- if len(s) > maxLabelLength+len("...") { +- s = s[:maxLabelLength] + "..." +- } +- return []protocol.InlayHintLabelPart{{Value: s}} +-} +diff -urN a/gopls/internal/golang/inline_all.go b/gopls/internal/golang/inline_all.go +--- a/gopls/internal/golang/inline_all.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/inline_all.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,318 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "fmt" +- "go/ast" +- "go/parser" +- "go/types" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/go/types/typeutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/internal/refactor/inline" +-) +- +-// inlineAllCalls inlines all calls to the original function declaration +-// described by callee, returning the resulting modified file content. +-// +-// inlining everything is currently an expensive operation: it involves re-type +-// checking every package that contains a potential call, as reported by +-// References. In cases where there are multiple calls per file, inlineAllCalls +-// must type check repeatedly for each additional call. +-// +-// The provided post processing function is applied to the resulting source +-// after each transformation. This is necessary because we are using this +-// function to inline synthetic wrappers for the purpose of signature +-// rewriting. The delegated function has a fake name that doesn't exist in the +-// snapshot, and so we can't re-type check until we replace this fake name. +-// +-// TODO(rfindley): this only works because removing a parameter is a very +-// narrow operation. A better solution would be to allow for ad-hoc snapshots +-// that expose the full machinery of real snapshots: minimal invalidation, +-// batched type checking, etc. Then we could actually rewrite the declaring +-// package in this snapshot (and so 'post' would not be necessary), and could +-// robustly re-type check for the purpose of iterative inlining, even if the +-// inlined code pulls in new imports that weren't present in export data. +-// +-// The code below notes where are assumptions are made that only hold true in +-// the case of parameter removal (annotated with 'Assumption:') +-func inlineAllCalls(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, origDecl *ast.FuncDecl, callee *inline.Callee, post func([]byte) []byte, opts *inline.Options) (_ map[protocol.DocumentURI][]byte, inlineErr error) { +- // Collect references. +- var refs []protocol.Location +- { +- funcPos, err := pgf.Mapper.PosPosition(pgf.Tok, origDecl.Name.NamePos) +- if err != nil { +- return nil, err +- } +- fh, err := snapshot.ReadFile(ctx, pgf.URI) +- if err != nil { +- return nil, err +- } +- refs, err = References(ctx, snapshot, fh, funcPos, false) +- if err != nil { +- return nil, fmt.Errorf("finding references to rewrite: %v", err) +- } +- } +- +- // Type-check the narrowest package containing each reference. +- // TODO(rfindley): we should expose forEachPackage in order to operate in +- // parallel and to reduce peak memory for this operation. +- var ( +- pkgForRef = make(map[protocol.Location]PackageID) +- pkgs = make(map[PackageID]*cache.Package) +- // The inliner assumes that input is well-typed, but that is frequently not +- // the case within gopls. +- // Until we're able to harden the inliner, report panics as errors to avoid +- // crashing the server. +- badPkg = false +- ) +- { +- needPkgs := make(map[PackageID]struct{}) +- for _, ref := range refs { +- md, err := snapshot.NarrowestMetadataForFile(ctx, ref.URI) +- if err != nil { +- return nil, fmt.Errorf("finding ref metadata: %v", err) +- } +- pkgForRef[ref] = md.ID +- needPkgs[md.ID] = struct{}{} +- } +- var pkgIDs []PackageID +- for id := range needPkgs { // TODO: use maps.Keys once it is available to us +- pkgIDs = append(pkgIDs, id) +- } +- +- refPkgs, err := snapshot.TypeCheck(ctx, pkgIDs...) +- if err != nil { +- return nil, fmt.Errorf("type checking reference packages: %v", err) +- } +- +- for _, p := range refPkgs { +- pkgs[p.Metadata().ID] = p +- if len(p.ParseErrors())+len(p.TypeErrors()) > 0 { +- badPkg = true +- } +- } +- } +- +- if badPkg { +- defer func() { +- if x := recover(); x != nil { +- inlineErr = fmt.Errorf("inlining failed (%q), likely because inputs were ill-typed", x) +- } +- }() +- } +- +- // Organize calls by top file declaration. Calls within a single file may +- // affect each other, as the inlining edit may affect the surrounding scope +- // or imports Therefore, when inlining subsequent calls in the same +- // declaration, we must re-type check. +- +- type fileCalls struct { +- pkg *cache.Package +- pgf *parsego.File +- calls []*ast.CallExpr +- } +- +- refsByFile := make(map[protocol.DocumentURI]*fileCalls) +- for _, ref := range refs { +- refpkg := pkgs[pkgForRef[ref]] +- pgf, err := refpkg.File(ref.URI) +- if err != nil { +- return nil, bug.Errorf("finding %s in %s: %v", ref.URI, refpkg.Metadata().ID, err) +- } +- +- start, end, err := pgf.RangePos(ref.Range) +- if err != nil { +- return nil, err // e.g. invalid range +- } +- +- // Look for the surrounding call expression. +- var ( +- name *ast.Ident +- call *ast.CallExpr +- ) +- path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) +- name, _ = path[0].(*ast.Ident) +- +- // TODO(rfindley): handle method expressions correctly. +- if _, ok := path[1].(*ast.SelectorExpr); ok { +- call, _ = path[2].(*ast.CallExpr) +- } else { +- call, _ = path[1].(*ast.CallExpr) +- } +- if name == nil || call == nil { +- // TODO(rfindley): handle this case with eta-abstraction: +- // a reference to the target function f in a non-call position +- // use(f) +- // is replaced by +- // use(func(...) { f(...) }) +- return nil, fmt.Errorf("cannot inline: found non-call function reference %v", ref) +- } +- +- // Heuristic: ignore references that overlap with type checker errors, as they may +- // lead to invalid results (see golang/go#70268). +- hasTypeErrors := false +- for _, typeErr := range refpkg.TypeErrors() { +- if call.Lparen <= typeErr.Pos && typeErr.Pos <= call.Rparen { +- hasTypeErrors = true +- } +- } +- +- if hasTypeErrors { +- continue +- } +- +- if typeutil.StaticCallee(refpkg.TypesInfo(), call) == nil { +- continue // dynamic call +- } +- +- // Sanity check. +- if obj := refpkg.TypesInfo().ObjectOf(name); obj == nil || +- obj.Name() != origDecl.Name.Name || +- obj.Pkg() == nil || +- obj.Pkg().Path() != string(pkg.Metadata().PkgPath) { +- +- return nil, bug.Errorf("cannot inline: corrupted reference %v", ref) +- } +- +- callInfo, ok := refsByFile[ref.URI] +- if !ok { +- callInfo = &fileCalls{ +- pkg: refpkg, +- pgf: pgf, +- } +- refsByFile[ref.URI] = callInfo +- } +- callInfo.calls = append(callInfo.calls, call) +- } +- +- // Inline each call within the same decl in sequence, re-typechecking after +- // each one. If there is only a single call within the decl, we can avoid +- // additional type checking. +- // +- // Assumption: inlining does not affect the package scope, so we can operate +- // on separate files independently. +- result := make(map[protocol.DocumentURI][]byte) +- for uri, callInfo := range refsByFile { +- var ( +- calls = callInfo.calls +- fset = callInfo.pkg.FileSet() +- tpkg = callInfo.pkg.Types() +- tinfo = callInfo.pkg.TypesInfo() +- file = callInfo.pgf.File +- content = callInfo.pgf.Src +- ) +- +- // Check for overlapping calls (such as Foo(Foo())). We can't handle these +- // because inlining may change the source order of the inner call with +- // respect to the inlined outer call, and so the heuristic we use to find +- // the next call (counting from top-to-bottom) does not work. +- for i := range calls { +- if i > 0 && calls[i-1].End() > calls[i].Pos() { +- return nil, fmt.Errorf("%s: can't inline overlapping call %s", uri, types.ExprString(calls[i-1])) +- } +- } +- +- currentCall := 0 +- for currentCall < len(calls) { +- caller := &inline.Caller{ +- Fset: fset, +- Types: tpkg, +- Info: tinfo, +- File: file, +- Call: calls[currentCall], +- Content: content, +- } +- res, err := inline.Inline(caller, callee, opts) +- if err != nil { +- return nil, fmt.Errorf("inlining failed: %v", err) +- } +- content = res.Content +- if post != nil { +- content = post(content) +- } +- if len(calls) <= 1 { +- // No need to re-type check, as we've inlined all calls. +- break +- } +- +- // TODO(rfindley): develop a theory of "trivial" inlining, which are +- // inlinings that don't require re-type checking. +- // +- // In principle, if the inlining only involves replacing one call with +- // another, the scope of the caller is unchanged and there is no need to +- // type check again before inlining subsequent calls (edits should not +- // overlap, and should not affect each other semantically). However, it +- // feels sufficiently complicated that, to be safe, this optimization is +- // deferred until later. +- +- file, err = parser.ParseFile(fset, uri.Path(), content, parser.ParseComments|parser.SkipObjectResolution) +- if err != nil { +- return nil, bug.Errorf("inlined file failed to parse: %v", err) +- } +- +- // After inlining one call with a removed parameter, the package will +- // fail to type check due to "not enough arguments". Therefore, we must +- // allow type errors here. +- // +- // Assumption: the resulting type errors do not affect the correctness of +- // subsequent inlining, because invalid arguments to a call do not affect +- // anything in the surrounding scope. +- // +- // TODO(rfindley): improve this. +- logf := func(string, ...any) {} +- if opts != nil { +- logf = opts.Logf +- } +- tpkg, tinfo, err = reTypeCheck(logf, callInfo.pkg, map[protocol.DocumentURI]*ast.File{uri: file}, true) +- if err != nil { +- return nil, bug.Errorf("type checking after inlining failed: %v", err) +- } +- +- // Collect calls to the target function in the modified declaration. +- var calls2 []*ast.CallExpr +- ast.Inspect(file, func(n ast.Node) bool { +- if call, ok := n.(*ast.CallExpr); ok { +- fn := typeutil.StaticCallee(tinfo, call) +- if fn != nil && fn.Pkg().Path() == string(pkg.Metadata().PkgPath) && fn.Name() == origDecl.Name.Name { +- calls2 = append(calls2, call) +- } +- } +- return true +- }) +- +- // If the number of calls has increased, this process will never cease. +- // If the number of calls has decreased, assume that inlining removed a +- // call. +- // If the number of calls didn't change, assume that inlining replaced +- // a call, and move on to the next. +- // +- // Assumption: we're inlining a call that has at most one recursive +- // reference (which holds for signature rewrites). +- // +- // TODO(rfindley): this isn't good enough. We should be able to support +- // inlining all existing calls even if they increase calls. How do we +- // correlate the before and after syntax? +- switch { +- case len(calls2) > len(calls): +- return nil, fmt.Errorf("inlining increased calls %d->%d, possible recursive call? content:\n%s", len(calls), len(calls2), content) +- case len(calls2) < len(calls): +- calls = calls2 +- case len(calls2) == len(calls): +- calls = calls2 +- currentCall++ +- } +- } +- +- result[callInfo.pgf.URI] = content +- } +- return result, nil +-} +diff -urN a/gopls/internal/golang/inline.go b/gopls/internal/golang/inline.go +--- a/gopls/internal/golang/inline.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/inline.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,264 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-// This file defines the refactor.inline code action. +- +-import ( +- "context" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- +- "golang.org/x/tools/go/analysis" +- goastutil "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/go/ast/edge" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/go/types/typeutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/astutil" +- internalastutil "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/diff" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/refactor/inline" +-) +- +-// enclosingStaticCall returns the innermost function call enclosing +-// the selected range, along with the callee. +-func enclosingStaticCall(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*ast.CallExpr, *types.Func, error) { +- // TODO(adonovan): simplify using pgf.Cursor +- path, _ := goastutil.PathEnclosingInterval(pgf.File, start, end) +- +- var call *ast.CallExpr +-loop: +- for _, n := range path { +- switch n := n.(type) { +- case *ast.FuncLit: +- break loop +- case *ast.CallExpr: +- call = n +- break loop +- } +- } +- if call == nil { +- return nil, nil, fmt.Errorf("no enclosing call") +- } +- if safetoken.Line(pgf.Tok, call.Lparen) != safetoken.Line(pgf.Tok, start) { +- return nil, nil, fmt.Errorf("enclosing call is not on this line") +- } +- fn := typeutil.StaticCallee(pkg.TypesInfo(), call) +- if fn == nil { +- return nil, nil, fmt.Errorf("not a static call to a Go function") +- } +- return call, fn, nil +-} +- +-func inlineCall(ctx context.Context, snapshot *cache.Snapshot, callerPkg *cache.Package, callerPGF *parsego.File, start, end token.Pos) (_ *token.FileSet, _ *analysis.SuggestedFix, err error) { +- countInlineCall.Inc() +- // Find enclosing static call. +- call, fn, err := enclosingStaticCall(callerPkg, callerPGF, start, end) +- if err != nil { +- return nil, nil, err +- } +- +- // Locate callee by file/line and analyze it. +- calleePosn := safetoken.StartPosition(callerPkg.FileSet(), fn.Pos()) +- calleePkg, calleePGF, err := NarrowestPackageForFile(ctx, snapshot, protocol.URIFromPath(calleePosn.Filename)) +- if err != nil { +- return nil, nil, err +- } +- var calleeDecl *ast.FuncDecl +- for _, decl := range calleePGF.File.Decls { +- if decl, ok := decl.(*ast.FuncDecl); ok { +- posn := safetoken.StartPosition(calleePkg.FileSet(), decl.Name.Pos()) +- if posn.Line == calleePosn.Line && posn.Column == calleePosn.Column { +- calleeDecl = decl +- break +- } +- } +- } +- if calleeDecl == nil { +- return nil, nil, fmt.Errorf("can't find callee") +- } +- +- // The inliner assumes that input is well-typed, +- // but that is frequently not the case within gopls. +- // Until we are able to harden the inliner, +- // report panics as errors to avoid crashing the server. +- bad := func(p *cache.Package) bool { return len(p.ParseErrors())+len(p.TypeErrors()) > 0 } +- if bad(calleePkg) || bad(callerPkg) { +- defer func() { +- if x := recover(); x != nil { +- err = fmt.Errorf("inlining failed (%q), likely because inputs were ill-typed", x) +- } +- }() +- } +- +- // Users can consult the gopls event log to see +- // why a particular inlining strategy was chosen. +- logf := logger(ctx, "inliner", snapshot.Options().VerboseOutput) +- +- callee, err := inline.AnalyzeCallee(logf, calleePkg.FileSet(), calleePkg.Types(), calleePkg.TypesInfo(), calleeDecl, calleePGF.Src) +- if err != nil { +- return nil, nil, err +- } +- +- // Inline the call. +- caller := &inline.Caller{ +- Fset: callerPkg.FileSet(), +- Types: callerPkg.Types(), +- Info: callerPkg.TypesInfo(), +- File: callerPGF.File, +- Call: call, +- Content: callerPGF.Src, +- } +- +- res, err := inline.Inline(caller, callee, &inline.Options{Logf: logf}) +- if err != nil { +- return nil, nil, err +- } +- +- return callerPkg.FileSet(), &analysis.SuggestedFix{ +- Message: fmt.Sprintf("inline call of %v", callee), +- TextEdits: diffToTextEdits(callerPGF.Tok, diff.Bytes(callerPGF.Src, res.Content)), +- }, nil +-} +- +-// TODO(adonovan): change the inliner to instead accept an io.Writer. +-func logger(ctx context.Context, name string, verbose bool) func(format string, args ...any) { +- if verbose { +- return func(format string, args ...any) { +- event.Log(ctx, name+": "+fmt.Sprintf(format, args...)) +- } +- } else { +- return func(string, ...any) {} +- } +-} +- +-// canInlineVariable reports whether the selection is within an +-// identifier that is a use of a variable that has an initializer +-// expression. If so, it returns cursors for the identifier and the +-// initializer expression. +-func canInlineVariable(info *types.Info, curFile inspector.Cursor, start, end token.Pos) (_, _ inspector.Cursor, ok bool) { +- if curUse, ok := curFile.FindByPos(start, end); ok { +- if id, ok := curUse.Node().(*ast.Ident); ok && !isLvalueUse(curUse, info) { +- if v, ok := info.Uses[id].(*types.Var); ok && v.Kind() == types.LocalVar { +- if curIdent, ok := curFile.FindByPos(v.Pos(), v.Pos()); ok { +- curParent := curIdent.Parent() +- kind, index := curIdent.ParentEdge() +- switch kind { +- case edge.ValueSpec_Names: +- // var v = expr +- spec := curParent.Node().(*ast.ValueSpec) +- if len(spec.Names) == len(spec.Values) { +- return curUse, curParent.ChildAt(edge.ValueSpec_Values, index), true +- } +- case edge.AssignStmt_Lhs: +- // v := expr +- stmt := curParent.Node().(*ast.AssignStmt) +- if len(stmt.Lhs) == len(stmt.Rhs) { +- return curUse, curParent.ChildAt(edge.AssignStmt_Rhs, index), true +- } +- } +- } +- } +- } +- } +- return +-} +- +-// isLvalueUse reports whether the "use" identifier represented by cur +-// appears in an l-value context such as: +-// +-// - v=... +-// - v++ +-// - &v +-// - v.f(), when this implicitly takes the address of v. +-func isLvalueUse(cur inspector.Cursor, info *types.Info) bool { +- cur = unparenEnclosing(cur) +- +- kind, _ := cur.ParentEdge() +- switch kind { +- case edge.AssignStmt_Lhs, edge.IncDecStmt_X: +- return true // v=..., v++ +- +- case edge.UnaryExpr_X: +- return cur.Parent().Node().(*ast.UnaryExpr).Op == token.AND // &v +- +- case edge.SelectorExpr_X: +- sel := cur.Parent().Node().(*ast.SelectorExpr) +- isPointer := func(t types.Type) bool { +- return is[*types.Pointer](t) +- } +- if seln, ok := info.Selections[sel]; ok && seln.Kind() == types.MethodVal { +- // Have: recv.f() method call +- methodRecv := seln.Obj().(*types.Func).Signature().Recv().Type() +- return !seln.Indirect() && isPointer(methodRecv) && !isPointer(info.TypeOf(sel.X)) +- } +- } +- +- return false +-} +- +-// unparenEnclosing removes enclosing parens from cur in +-// preparation for a call to [Cursor.ParentEdge]. +-func unparenEnclosing(cur inspector.Cursor) inspector.Cursor { +- for astutil.IsChildOf(cur, edge.ParenExpr_X) { +- cur = cur.Parent() +- } +- return cur +-} +- +-// inlineVariableOne computes a fix to replace the selected variable by +-// its initialization expression. +-func inlineVariableOne(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- countInlineVariable.Inc() +- info := pkg.TypesInfo() +- curUse, curRHS, ok := canInlineVariable(info, pgf.Cursor, start, end) +- if !ok { +- return nil, nil, fmt.Errorf("cannot inline variable here") +- } +- use := curUse.Node().(*ast.Ident) +- +- // Check that free symbols of rhs are unshadowed at curUse. +- var ( +- pos = use.Pos() +- scope = info.Scopes[pgf.File].Innermost(pos) +- ) +- for curIdent := range curRHS.Preorder((*ast.Ident)(nil)) { +- if astutil.IsChildOf(curIdent, edge.SelectorExpr_Sel) { +- continue // ignore f in x.f +- } +- id := curIdent.Node().(*ast.Ident) +- obj1 := info.Uses[id] +- if obj1 == nil { +- continue // undefined; or a def, not a use +- } +- if internalastutil.NodeContains(curRHS.Node(), obj1.Pos()) { +- continue // not free (id is defined within RHS) +- } +- _, obj2 := scope.LookupParent(id.Name, pos) +- if obj1 != obj2 { +- return nil, nil, fmt.Errorf("cannot inline variable: its initializer expression refers to %q, which is shadowed by the declaration at line %d", id.Name, safetoken.Position(pgf.Tok, obj2.Pos()).Line) +- } +- } +- +- // TODO(adonovan): also reject variables that are updated by assignments? +- +- return pkg.FileSet(), &analysis.SuggestedFix{ +- Message: fmt.Sprintf("Replace variable %q by its initializer expression", use.Name), +- TextEdits: []analysis.TextEdit{ +- { +- Pos: use.Pos(), +- End: use.End(), +- NewText: []byte(FormatNode(pkg.FileSet(), curRHS.Node())), +- }, +- }, +- }, nil +-} +diff -urN a/gopls/internal/golang/invertifcondition.go b/gopls/internal/golang/invertifcondition.go +--- a/gopls/internal/golang/invertifcondition.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/invertifcondition.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,266 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "fmt" +- "go/ast" +- "go/token" +- "strings" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/util/cursorutil" +- "golang.org/x/tools/gopls/internal/util/safetoken" +-) +- +-// invertIfCondition is a singleFileFixFunc that inverts an if/else statement +-func invertIfCondition(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- var ( +- fset = pkg.FileSet() +- src = pgf.Src +- ) +- +- ifStatement, _, err := canInvertIfCondition(pgf.Cursor, start, end) +- if err != nil { +- return nil, nil, err +- } +- +- var replaceElse analysis.TextEdit +- +- endsWithReturn, err := endsWithReturn(ifStatement.Else) +- if err != nil { +- return nil, nil, err +- } +- +- if endsWithReturn { +- // Replace the whole else part with an empty line and an unindented +- // version of the original if body +- sourcePos := safetoken.StartPosition(fset, ifStatement.Pos()) +- +- indent := max(sourcePos.Column-1, 0) +- +- standaloneBodyText := ifBodyToStandaloneCode(fset, ifStatement.Body, src) +- replaceElse = analysis.TextEdit{ +- Pos: ifStatement.Body.Rbrace + 1, // 1 == len("}") +- End: ifStatement.End(), +- NewText: []byte("\n\n" + strings.Repeat("\t", indent) + standaloneBodyText), +- } +- } else { +- // Replace the else body text with the if body text +- bodyStart := safetoken.StartPosition(fset, ifStatement.Body.Lbrace) +- bodyEnd := safetoken.EndPosition(fset, ifStatement.Body.Rbrace+1) // 1 == len("}") +- bodyText := src[bodyStart.Offset:bodyEnd.Offset] +- replaceElse = analysis.TextEdit{ +- Pos: ifStatement.Else.Pos(), +- End: ifStatement.Else.End(), +- NewText: bodyText, +- } +- } +- +- // Replace the if text with the else text +- elsePosInSource := safetoken.StartPosition(fset, ifStatement.Else.Pos()) +- elseEndInSource := safetoken.EndPosition(fset, ifStatement.Else.End()) +- elseText := src[elsePosInSource.Offset:elseEndInSource.Offset] +- replaceBodyWithElse := analysis.TextEdit{ +- Pos: ifStatement.Body.Pos(), +- End: ifStatement.Body.End(), +- NewText: elseText, +- } +- +- // Replace the if condition with its inverse +- inverseCondition, err := invertCondition(fset, ifStatement.Cond, src) +- if err != nil { +- return nil, nil, err +- } +- replaceConditionWithInverse := analysis.TextEdit{ +- Pos: ifStatement.Cond.Pos(), +- End: ifStatement.Cond.End(), +- NewText: inverseCondition, +- } +- +- // Return a SuggestedFix with just that TextEdit in there +- return fset, &analysis.SuggestedFix{ +- TextEdits: []analysis.TextEdit{ +- replaceConditionWithInverse, +- replaceBodyWithElse, +- replaceElse, +- }, +- }, nil +-} +- +-func endsWithReturn(elseBranch ast.Stmt) (bool, error) { +- elseBlock, isBlockStatement := elseBranch.(*ast.BlockStmt) +- if !isBlockStatement { +- return false, fmt.Errorf("unable to figure out whether this ends with return: %T", elseBranch) +- } +- +- if len(elseBlock.List) == 0 { +- // Empty blocks don't end in returns +- return false, nil +- } +- +- lastStatement := elseBlock.List[len(elseBlock.List)-1] +- +- _, lastStatementIsReturn := lastStatement.(*ast.ReturnStmt) +- return lastStatementIsReturn, nil +-} +- +-// Turn { fmt.Println("Hello") } into just fmt.Println("Hello"), with one less +-// level of indentation. +-// +-// The first line of the result will not be indented, but all of the following +-// lines will. +-func ifBodyToStandaloneCode(fset *token.FileSet, ifBody *ast.BlockStmt, src []byte) string { +- // Get the whole body (without the surrounding braces) as a string +- bodyStart := safetoken.StartPosition(fset, ifBody.Lbrace+1) // 1 == len("}") +- bodyEnd := safetoken.EndPosition(fset, ifBody.Rbrace) +- bodyWithoutBraces := string(src[bodyStart.Offset:bodyEnd.Offset]) +- bodyWithoutBraces = strings.TrimSpace(bodyWithoutBraces) +- +- // Unindent +- bodyWithoutBraces = strings.ReplaceAll(bodyWithoutBraces, "\n\t", "\n") +- +- return bodyWithoutBraces +-} +- +-func invertCondition(fset *token.FileSet, cond ast.Expr, src []byte) ([]byte, error) { +- condStart := safetoken.StartPosition(fset, cond.Pos()) +- condEnd := safetoken.EndPosition(fset, cond.End()) +- oldText := string(src[condStart.Offset:condEnd.Offset]) +- +- switch expr := cond.(type) { +- case *ast.Ident, *ast.ParenExpr, *ast.CallExpr, *ast.StarExpr, *ast.IndexExpr, *ast.IndexListExpr, *ast.SelectorExpr: +- newText := "!" + oldText +- if oldText == "true" { +- newText = "false" +- } else if oldText == "false" { +- newText = "true" +- } +- +- return []byte(newText), nil +- +- case *ast.UnaryExpr: +- if expr.Op != token.NOT { +- // This should never happen +- return dumbInvert(fset, cond, src), nil +- } +- +- inverse := expr.X +- if p, isParen := inverse.(*ast.ParenExpr); isParen { +- // We got !(x), remove the parentheses with the ! so we get just "x" +- inverse = p.X +- +- start := safetoken.StartPosition(fset, inverse.Pos()) +- end := safetoken.EndPosition(fset, inverse.End()) +- if start.Line != end.Line { +- // The expression is multi-line, so we can't remove the parentheses +- inverse = expr.X +- } +- } +- +- start := safetoken.StartPosition(fset, inverse.Pos()) +- end := safetoken.EndPosition(fset, inverse.End()) +- textWithoutNot := src[start.Offset:end.Offset] +- +- return textWithoutNot, nil +- +- case *ast.BinaryExpr: +- // These inversions are unsound for floating point NaN, but that's ok. +- negations := map[token.Token]string{ +- token.EQL: "!=", +- token.LSS: ">=", +- token.GTR: "<=", +- token.NEQ: "==", +- token.LEQ: ">", +- token.GEQ: "<", +- } +- +- negation, negationFound := negations[expr.Op] +- if !negationFound { +- return invertAndOr(fset, expr, src) +- } +- +- xPosInSource := safetoken.StartPosition(fset, expr.X.Pos()) +- opPosInSource := safetoken.StartPosition(fset, expr.OpPos) +- yPosInSource := safetoken.StartPosition(fset, expr.Y.Pos()) +- +- textBeforeOp := string(src[xPosInSource.Offset:opPosInSource.Offset]) +- +- oldOpWithTrailingWhitespace := string(src[opPosInSource.Offset:yPosInSource.Offset]) +- newOpWithTrailingWhitespace := negation + oldOpWithTrailingWhitespace[len(expr.Op.String()):] +- +- textAfterOp := string(src[yPosInSource.Offset:condEnd.Offset]) +- +- return []byte(textBeforeOp + newOpWithTrailingWhitespace + textAfterOp), nil +- } +- +- return dumbInvert(fset, cond, src), nil +-} +- +-// dumbInvert is a fallback, inverting cond into !(cond). +-func dumbInvert(fset *token.FileSet, expr ast.Expr, src []byte) []byte { +- start := safetoken.StartPosition(fset, expr.Pos()) +- end := safetoken.EndPosition(fset, expr.End()) +- text := string(src[start.Offset:end.Offset]) +- return []byte("!(" + text + ")") +-} +- +-func invertAndOr(fset *token.FileSet, expr *ast.BinaryExpr, src []byte) ([]byte, error) { +- if expr.Op != token.LAND && expr.Op != token.LOR { +- // Neither AND nor OR, don't know how to invert this +- return dumbInvert(fset, expr, src), nil +- } +- +- oppositeOp := "&&" +- if expr.Op == token.LAND { +- oppositeOp = "||" +- } +- +- xEndInSource := safetoken.EndPosition(fset, expr.X.End()) +- opPosInSource := safetoken.StartPosition(fset, expr.OpPos) +- whitespaceAfterBefore := src[xEndInSource.Offset:opPosInSource.Offset] +- +- invertedBefore, err := invertCondition(fset, expr.X, src) +- if err != nil { +- return nil, err +- } +- +- invertedAfter, err := invertCondition(fset, expr.Y, src) +- if err != nil { +- return nil, err +- } +- +- yPosInSource := safetoken.StartPosition(fset, expr.Y.Pos()) +- +- oldOpWithTrailingWhitespace := string(src[opPosInSource.Offset:yPosInSource.Offset]) +- newOpWithTrailingWhitespace := oppositeOp + oldOpWithTrailingWhitespace[len(expr.Op.String()):] +- +- return []byte(string(invertedBefore) + string(whitespaceAfterBefore) + newOpWithTrailingWhitespace + string(invertedAfter)), nil +-} +- +-// canInvertIfCondition reports whether we can do invert-if-condition on the +-// code in the given range. +-func canInvertIfCondition(curFile inspector.Cursor, start, end token.Pos) (*ast.IfStmt, bool, error) { +- curIf, _ := curFile.FindByPos(start, end) +- stmt, _ := cursorutil.FirstEnclosing[*ast.IfStmt](curIf) +- if stmt == nil { +- return nil, false, fmt.Errorf("not an if statement") +- } +- if stmt.Else == nil { +- // Can't invert conditions without else clauses +- return nil, false, fmt.Errorf("else clause required") +- } +- +- if _, hasElseIf := stmt.Else.(*ast.IfStmt); hasElseIf { +- // Can't invert conditions with else-if clauses, unclear what that +- // would look like +- return nil, false, fmt.Errorf("else-if not supported") +- } +- +- return stmt, true, nil +-} +diff -urN a/gopls/internal/golang/known_packages.go b/gopls/internal/golang/known_packages.go +--- a/gopls/internal/golang/known_packages.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/known_packages.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,137 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "go/parser" +- "go/token" +- "sort" +- "strings" +- "sync" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/imports" +-) +- +-// KnownPackagePaths returns a new list of package paths of all known +-// packages in the package graph that could potentially be imported by +-// the given file. The list is ordered lexicographically, except that +-// all dot-free paths (standard packages) appear before dotful ones. +-// +-// It is part of the gopls.list_known_packages command. +-func KnownPackagePaths(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]PackagePath, error) { +- // This algorithm is expressed in terms of Metadata, not Packages, +- // so it doesn't cause or wait for type checking. +- +- current, err := snapshot.NarrowestMetadataForFile(ctx, fh.URI()) +- if err != nil { +- return nil, err // e.g. context cancelled +- } +- +- // Parse the file's imports so we can compute which +- // PackagePaths are imported by this specific file. +- src, err := fh.Content() +- if err != nil { +- return nil, err +- } +- file, err := parser.ParseFile(token.NewFileSet(), fh.URI().Path(), src, parser.ImportsOnly) +- if err != nil { +- return nil, err +- } +- imported := make(map[PackagePath]bool) +- for _, imp := range file.Imports { +- if id := current.DepsByImpPath[metadata.UnquoteImportPath(imp)]; id != "" { +- if mp := snapshot.Metadata(id); mp != nil { +- imported[mp.PkgPath] = true +- } +- } +- } +- +- // Now find candidates among all known packages. +- knownPkgs, err := snapshot.AllMetadata(ctx) +- if err != nil { +- return nil, err +- } +- seen := make(map[PackagePath]bool) +- for _, knownPkg := range knownPkgs { +- // package main cannot be imported +- if knownPkg.Name == "main" { +- continue +- } +- // test packages cannot be imported +- if knownPkg.ForTest != "" { +- continue +- } +- // No need to import what the file already imports. +- // This check is based on PackagePath, not PackageID, +- // so that all test variants are filtered out too. +- if imported[knownPkg.PkgPath] { +- continue +- } +- // make sure internal packages are importable by the file +- if !metadata.IsValidImport(current.PkgPath, knownPkg.PkgPath, snapshot.View().Type() != cache.GoPackagesDriverView) { +- continue +- } +- // naive check on cyclical imports +- if isDirectlyCyclical(current, knownPkg) { +- continue +- } +- // AllMetadata may have multiple variants of a pkg. +- seen[knownPkg.PkgPath] = true +- } +- +- // Augment the set by invoking the goimports algorithm. +- if err := snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, o *imports.Options) error { +- ctx, cancel := context.WithTimeout(ctx, time.Millisecond*80) +- defer cancel() +- var seenMu sync.Mutex +- wrapped := func(ifix imports.ImportFix) { +- seenMu.Lock() +- defer seenMu.Unlock() +- // TODO(adonovan): what if the actual package path has a vendor/ prefix? +- seen[PackagePath(ifix.StmtInfo.ImportPath)] = true +- } +- return imports.GetAllCandidates(ctx, wrapped, "", fh.URI().Path(), string(current.Name), o.Env) +- }); err != nil { +- // If goimports failed, proceed with just the candidates from the metadata. +- event.Error(ctx, "imports.GetAllCandidates", err) +- } +- +- // Sort lexicographically, but with std before non-std packages. +- paths := make([]PackagePath, 0, len(seen)) +- for path := range seen { +- paths = append(paths, path) +- } +- sort.Slice(paths, func(i, j int) bool { +- importI, importJ := paths[i], paths[j] +- iHasDot := strings.Contains(string(importI), ".") +- jHasDot := strings.Contains(string(importJ), ".") +- if iHasDot != jHasDot { +- return jHasDot // dot-free paths (standard packages) compare less +- } +- return importI < importJ +- }) +- +- return paths, nil +-} +- +-// isDirectlyCyclical checks if imported directly imports pkg. +-// It does not (yet) offer a full cyclical check because showing a user +-// a list of importable packages already generates a very large list +-// and having a few false positives in there could be worth the +-// performance snappiness. +-// +-// TODO(adonovan): ensure that metadata graph is always cyclic! +-// Many algorithms will get confused or even stuck in the +-// presence of cycles. Then replace this function by 'false'. +-func isDirectlyCyclical(pkg, imported *metadata.Package) bool { +- _, ok := imported.DepsByPkgPath[pkg.PkgPath] +- return ok +-} +diff -urN a/gopls/internal/golang/lines.go b/gopls/internal/golang/lines.go +--- a/gopls/internal/golang/lines.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/lines.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,269 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-// This file defines refactorings for splitting lists of elements +-// (arguments, literals, etc) across multiple lines, and joining +-// them into a single line. +- +-import ( +- "bytes" +- "go/ast" +- "go/token" +- "slices" +- "sort" +- "strings" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/ast/edge" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/util/safetoken" +-) +- +-// canSplitLines checks whether we can split lists of elements inside +-// an enclosing curly bracket/parens into separate lines. +-func canSplitLines(curFile inspector.Cursor, fset *token.FileSet, start, end token.Pos) (string, bool, error) { +- itemType, items, comments, _, _, _ := findSplitJoinTarget(fset, curFile, nil, start, end) +- if itemType == "" { +- return "", false, nil +- } +- +- if !canSplitJoinLines(items, comments) { +- return "", false, nil +- } +- +- for i := 1; i < len(items); i++ { +- prevLine := safetoken.EndPosition(fset, items[i-1].End()).Line +- curLine := safetoken.StartPosition(fset, items[i].Pos()).Line +- if prevLine == curLine { +- return "Split " + itemType + " into separate lines", true, nil +- } +- } +- +- return "", false, nil +-} +- +-// canJoinLines checks whether we can join lists of elements inside an +-// enclosing curly bracket/parens into a single line. +-func canJoinLines(curFile inspector.Cursor, fset *token.FileSet, start, end token.Pos) (string, bool, error) { +- itemType, items, comments, _, _, _ := findSplitJoinTarget(fset, curFile, nil, start, end) +- if itemType == "" { +- return "", false, nil +- } +- +- if !canSplitJoinLines(items, comments) { +- return "", false, nil +- } +- +- for i := 1; i < len(items); i++ { +- prevLine := safetoken.EndPosition(fset, items[i-1].End()).Line +- curLine := safetoken.StartPosition(fset, items[i].Pos()).Line +- if prevLine != curLine { +- return "Join " + itemType + " into one line", true, nil +- } +- } +- +- return "", false, nil +-} +- +-// canSplitJoinLines determines whether we should split/join the lines or not. +-func canSplitJoinLines(items []ast.Node, comments []*ast.CommentGroup) bool { +- if len(items) <= 1 { +- return false +- } +- +- for _, cg := range comments { +- if !strings.HasPrefix(cg.List[0].Text, "/*") { +- return false // can't split/join lists containing "//" comments +- } +- } +- +- return true +-} +- +-// splitLines is a singleFile fixer. +-func splitLines(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- fset := pkg.FileSet() +- itemType, items, comments, indent, braceOpen, braceClose := findSplitJoinTarget(fset, pgf.Cursor, pgf.Src, start, end) +- if itemType == "" { +- return nil, nil, nil // no fix available +- } +- +- return fset, processLines(fset, items, comments, pgf.Src, braceOpen, braceClose, ",\n", "\n", ",\n"+indent, indent+"\t"), nil +-} +- +-// joinLines is a singleFile fixer. +-func joinLines(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- fset := pkg.FileSet() +- itemType, items, comments, _, braceOpen, braceClose := findSplitJoinTarget(fset, pgf.Cursor, pgf.Src, start, end) +- if itemType == "" { +- return nil, nil, nil // no fix available +- } +- +- return fset, processLines(fset, items, comments, pgf.Src, braceOpen, braceClose, ", ", "", "", ""), nil +-} +- +-// processLines is the common operation for both split and join lines because this split/join operation is +-// essentially a transformation of the separating whitespace. +-func processLines(fset *token.FileSet, items []ast.Node, comments []*ast.CommentGroup, src []byte, braceOpen, braceClose token.Pos, sep, prefix, suffix, indent string) *analysis.SuggestedFix { +- nodes := slices.Clone(items) +- +- // box *ast.CommentGroup to ast.Node for easier processing later. +- for _, cg := range comments { +- nodes = append(nodes, cg) +- } +- +- // Sort to interleave comments and nodes. +- sort.Slice(nodes, func(i, j int) bool { +- return nodes[i].Pos() < nodes[j].Pos() +- }) +- +- edits := []analysis.TextEdit{ +- { +- Pos: token.Pos(int(braceOpen) + len("{")), +- End: nodes[0].Pos(), +- NewText: []byte(prefix + indent), +- }, +- { +- Pos: nodes[len(nodes)-1].End(), +- End: braceClose, +- NewText: []byte(suffix), +- }, +- } +- +- for i := 1; i < len(nodes); i++ { +- pos, end := nodes[i-1].End(), nodes[i].Pos() +- if pos > end { +- // this will happen if we have a /*-style comment inside of a Field +- // e.g. `a /*comment here */ int` +- // +- // we will ignore as we only care about finding the field delimiter. +- continue +- } +- +- // at this point, the `,` token in between 2 nodes here must be the field delimiter. +- posOffset := safetoken.EndPosition(fset, pos).Offset +- endOffset := safetoken.StartPosition(fset, end).Offset +- if bytes.IndexByte(src[posOffset:endOffset], ',') == -1 { +- // nodes[i] or nodes[i-1] is a comment hence no delimiter in between +- // in such case, do nothing. +- continue +- } +- +- edits = append(edits, analysis.TextEdit{Pos: pos, End: end, NewText: []byte(sep + indent)}) +- +- // Print the Ellipsis if we synthesized one earlier. +- if is[*ast.Ellipsis](nodes[i]) { +- edits = append(edits, analysis.TextEdit{ +- Pos: nodes[i].End(), +- End: nodes[i].End(), +- NewText: []byte("..."), +- }) +- } +- } +- +- return &analysis.SuggestedFix{TextEdits: edits} +-} +- +-// findSplitJoinTarget returns the first curly bracket/parens that encloses the current cursor. +-func findSplitJoinTarget(fset *token.FileSet, curFile inspector.Cursor, src []byte, start, end token.Pos) (itemType string, items []ast.Node, comments []*ast.CommentGroup, indent string, open, close token.Pos) { +- +- findTarget := func() (targetType string, target ast.Node, open, close token.Pos) { +- cur, _ := curFile.FindByPos(start, end) +- for cur := range cur.Enclosing() { +- // TODO: do cur = enclosingUnparen(cur) first, once CL 701035 lands. +- ek, _ := cur.ParentEdge() +- switch ek { +- // params or results of func signature +- // Note: +- // - each ast.Field (e.g. "x, y, z int") is considered a single item. +- // - splitting Params and Results lists is not usually good style. +- case edge.FuncType_Params: +- p := cur.Node().(*ast.FieldList) +- return "parameters", p, p.Opening, p.Closing +- case edge.FuncType_Results: +- r := cur.Node().(*ast.FieldList) +- if !r.Opening.IsValid() { +- continue +- } +- return "results", r, r.Opening, r.Closing +- case edge.CallExpr_Args: // f(a, b, c) +- node := cur.Parent().Node().(*ast.CallExpr) +- return "arguments", node, node.Lparen, node.Rparen +- case edge.CompositeLit_Elts: // T{a, b, c} +- node := cur.Parent().Node().(*ast.CompositeLit) +- return "elements", node, node.Lbrace, node.Rbrace +- } +- } +- return "", nil, 0, 0 +- } +- +- targetType, targetNode, open, close := findTarget() +- if targetType == "" { +- return "", nil, nil, "", 0, 0 +- } +- +- switch node := targetNode.(type) { +- case *ast.FieldList: +- for _, field := range node.List { +- items = append(items, field) +- } +- case *ast.CallExpr: +- for _, arg := range node.Args { +- items = append(items, arg) +- } +- +- // Preserve "..." by wrapping the last +- // argument in an Ellipsis node +- // with the same Pos/End as the argument. +- // See corresponding logic in processLines. +- if node.Ellipsis.IsValid() { +- last := &items[len(items)-1] +- *last = &ast.Ellipsis{ +- Ellipsis: (*last).Pos(), // determines Ellipsis.Pos() +- Elt: (*last).(ast.Expr), // determines Ellipsis.End() +- } +- } +- case *ast.CompositeLit: +- for _, arg := range node.Elts { +- items = append(items, arg) +- } +- } +- +- // preserve comments separately as it's not part of the targetNode AST. +- file := curFile.Node().(*ast.File) +- for _, cg := range file.Comments { +- if open <= cg.Pos() && cg.Pos() < close { +- comments = append(comments, cg) +- } +- } +- +- // indent is the leading whitespace before the opening curly bracket/paren. +- // +- // in case where we don't have access to src yet i.e. src == nil +- // it's fine to return incorrect indent because we don't need it yet. +- indent = "" +- if len(src) > 0 { +- var pos token.Pos +- switch node := targetNode.(type) { +- case *ast.FieldList: +- pos = node.Opening +- case *ast.CallExpr: +- pos = node.Lparen +- case *ast.CompositeLit: +- pos = node.Lbrace +- } +- +- split := bytes.Split(src, []byte("\n")) +- targetLineNumber := safetoken.StartPosition(fset, pos).Line +- firstLine := string(split[targetLineNumber-1]) +- trimmed := strings.TrimSpace(string(firstLine)) +- indent = firstLine[:strings.Index(firstLine, trimmed)] +- } +- +- return targetType, items, comments, indent, open, close +-} +diff -urN a/gopls/internal/golang/linkname.go b/gopls/internal/golang/linkname.go +--- a/gopls/internal/golang/linkname.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/linkname.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,140 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "errors" +- "fmt" +- "go/token" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// ErrNoLinkname is returned by LinknameDefinition when no linkname +-// directive is found at a particular position. +-// As such it indicates that other definitions could be worth checking. +-var ErrNoLinkname = errors.New("no linkname directive found") +- +-// linknameDefinition finds the definition of the linkname directive in m at pos. +-// If there is no linkname directive at pos, returns ErrNoLinkname. +-func linknameDefinition(ctx context.Context, snapshot *cache.Snapshot, m *protocol.Mapper, from protocol.Position) ([]protocol.Location, error) { +- pkgPath, name, _ := parseLinkname(m, from) +- if pkgPath == "" { +- return nil, ErrNoLinkname +- } +- +- _, pgf, pos, err := findLinkname(ctx, snapshot, PackagePath(pkgPath), name) +- if err != nil { +- return nil, fmt.Errorf("find linkname: %w", err) +- } +- loc, err := pgf.PosLocation(pos, pos+token.Pos(len(name))) +- if err != nil { +- return nil, fmt.Errorf("location of linkname: %w", err) +- } +- return []protocol.Location{loc}, nil +-} +- +-// parseLinkname attempts to parse a go:linkname declaration at the given pos. +-// If successful, it returns +-// - package path referenced +-// - object name referenced +-// - byte offset in mapped file of the start of the link target +-// of the linkname directives 2nd argument. +-// +-// If the position is not in the second argument of a go:linkname directive, +-// or parsing fails, it returns "", "", 0. +-func parseLinkname(m *protocol.Mapper, pos protocol.Position) (pkgPath, name string, targetOffset int) { +- lineStart, err := m.PositionOffset(protocol.Position{Line: pos.Line, Character: 0}) +- if err != nil { +- return "", "", 0 +- } +- lineEnd, err := m.PositionOffset(protocol.Position{Line: pos.Line + 1, Character: 0}) +- if err != nil { +- return "", "", 0 +- } +- +- directive := string(m.Content[lineStart:lineEnd]) +- // (Assumes no leading spaces.) +- if !strings.HasPrefix(directive, "//go:linkname") { +- return "", "", 0 +- } +- // Sometimes source code (typically tests) has another +- // comment after the directive, trim that away. +- if i := strings.LastIndex(directive, "//"); i != 0 { +- directive = strings.TrimSpace(directive[:i]) +- } +- +- // Looking for pkgpath in '//go:linkname f pkgpath.g'. +- // (We ignore 1-arg linkname directives.) +- parts := strings.Fields(directive) +- if len(parts) != 3 { +- return "", "", 0 +- } +- +- // Inside 2nd arg [start, end]? +- // (Assumes no trailing spaces.) +- offset, err := m.PositionOffset(pos) +- if err != nil { +- return "", "", 0 +- } +- end := lineStart + len(directive) +- start := end - len(parts[2]) +- if !(start <= offset && offset <= end) { +- return "", "", 0 +- } +- linkname := parts[2] +- +- // Split the pkg path from the name. +- dot := strings.LastIndexByte(linkname, '.') +- if dot < 0 { +- return "", "", 0 +- } +- +- return linkname[:dot], linkname[dot+1:], start +-} +- +-// findLinkname searches dependencies of packages containing fh for an object +-// with linker name matching the given package path and name. +-func findLinkname(ctx context.Context, snapshot *cache.Snapshot, pkgPath PackagePath, name string) (*cache.Package, *parsego.File, token.Pos, error) { +- // Typically the linkname refers to a forward dependency +- // or a reverse dependency, but in general it may refer +- // to any package that is linked with this one. +- var pkgMeta *metadata.Package +- metas, err := snapshot.AllMetadata(ctx) +- if err != nil { +- return nil, nil, token.NoPos, err +- } +- metadata.RemoveIntermediateTestVariants(&metas) +- for _, meta := range metas { +- if meta.PkgPath == pkgPath { +- pkgMeta = meta +- break +- } +- } +- if pkgMeta == nil { +- return nil, nil, token.NoPos, fmt.Errorf("cannot find package %q", pkgPath) +- } +- +- // When found, type check the desired package (snapshot.TypeCheck in TypecheckFull mode), +- pkgs, err := snapshot.TypeCheck(ctx, pkgMeta.ID) +- if err != nil { +- return nil, nil, token.NoPos, err +- } +- pkg := pkgs[0] +- +- obj := pkg.Types().Scope().Lookup(name) +- if obj == nil { +- return nil, nil, token.NoPos, fmt.Errorf("package %q does not define %s", pkgPath, name) +- } +- +- pos := obj.Pos() +- pgf, err := pkg.FileEnclosing(pos) +- return pkg, pgf, pos, err +-} +diff -urN a/gopls/internal/golang/modify_tags.go b/gopls/internal/golang/modify_tags.go +--- a/gopls/internal/golang/modify_tags.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/modify_tags.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,74 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "bytes" +- "context" +- "fmt" +- "go/ast" +- "go/format" +- +- "github.com/fatih/gomodifytags/modifytags" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/util/cursorutil" +- "golang.org/x/tools/gopls/internal/util/tokeninternal" +- internalastutil "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/diff" +-) +- +-// ModifyTags applies the given struct tag modifications to the specified struct. +-func ModifyTags(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, args command.ModifyTagsArgs, m *modifytags.Modification) ([]protocol.DocumentChange, error) { +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, fmt.Errorf("error fetching package file: %v", err) +- } +- start, end, err := pgf.RangePos(args.Range) +- if err != nil { +- return nil, fmt.Errorf("error getting position information: %v", err) +- } +- // If the cursor is at a point and not a selection, we should use the entire enclosing struct. +- if start == end { +- cur, ok := pgf.Cursor.FindByPos(start, end) +- if !ok { +- return nil, fmt.Errorf("error finding start and end positions: %v", err) +- } +- structnode, _ := cursorutil.FirstEnclosing[*ast.StructType](cur) +- if structnode == nil { +- return nil, fmt.Errorf("no enclosing struct type") +- } +- start, end = structnode.Pos(), structnode.End() +- } +- +- // Create a copy of the file node in order to avoid race conditions when we modify the node in Apply. +- cloned := internalastutil.CloneNode(pgf.File) +- fset := tokeninternal.FileSetFor(pgf.Tok) +- +- if err = m.Apply(fset, cloned, start, end); err != nil { +- return nil, fmt.Errorf("could not modify tags: %v", err) +- } +- +- // Construct a list of DocumentChanges based on the diff between the formatted node and the +- // original file content. +- var after bytes.Buffer +- if err := format.Node(&after, fset, cloned); err != nil { +- return nil, err +- } +- edits := diff.Bytes(pgf.Src, after.Bytes()) +- if len(edits) == 0 { +- return nil, nil +- } +- textedits, err := protocol.EditsFromDiffEdits(pgf.Mapper, edits) +- if err != nil { +- return nil, fmt.Errorf("error computing edits for %s: %v", args.URI, err) +- } +- return []protocol.DocumentChange{ +- protocol.DocumentChangeEdit(fh, textedits), +- }, nil +-} +diff -urN a/gopls/internal/golang/origin.go b/gopls/internal/golang/origin.go +--- a/gopls/internal/golang/origin.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/origin.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import "go/types" +- +-// containsOrigin reports whether the provided object set contains an object +-// with the same origin as the provided obj (which may be a synthetic object +-// created during instantiation). +-func containsOrigin(objSet map[types.Object]bool, obj types.Object) bool { +- objOrigin := origin(obj) +- for target := range objSet { +- if origin(target) == objOrigin { +- return true +- } +- } +- return false +-} +- +-func origin(obj types.Object) types.Object { +- switch obj := obj.(type) { +- case *types.Var: +- return obj.Origin() +- case *types.Func: +- return obj.Origin() +- } +- return obj +-} +diff -urN a/gopls/internal/golang/pkgdoc.go b/gopls/internal/golang/pkgdoc.go +--- a/gopls/internal/golang/pkgdoc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/pkgdoc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,858 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-// This file defines a simple HTML rendering of package documentation +-// in imitation of the style of pkg.go.dev. +-// +-// The current implementation is just a starting point and a +-// placeholder for a more sophisticated one. +-// +-// TODO(adonovan): +-// - rewrite using html/template. +-// Or factor with golang.org/x/pkgsite/internal/godoc/dochtml. +-// - emit breadcrumbs for parent + sibling packages. +-// - list promoted methods---we have type information! (golang/go#67158) +-// - gather Example tests, following go/doc and pkgsite. +-// - add option for doc.AllDecls: show non-exported symbols too. +-// - style the
  • bullets in the index as invisible. +-// - add push notifications such as didChange -> reload. +-// - there appears to be a maximum file size beyond which the +-// "source.doc" code action is not offered. Remove that. +-// - modify JS httpGET function to give a transient visual indication +-// when clicking a source link that the editor is being navigated +-// (in case it doesn't raise itself, like VS Code). +-// - move this into a new package, golang/web, and then +-// split out the various helpers without fear of polluting +-// the golang package namespace? +-// - show "Deprecated" chip when appropriate. +- +-import ( +- "bytes" +- "fmt" +- "go/ast" +- "go/doc" +- "go/doc/comment" +- "go/format" +- "go/token" +- "go/types" +- "html" +- "path/filepath" +- "slices" +- "strings" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- internalastutil "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/stdlib" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// DocFragment finds the package and (optionally) symbol identified by +-// the current selection, and returns the package path and the +-// optional symbol URL fragment (e.g. "#Buffer.Len") for a symbol, +-// along with a title for the code action. +-// +-// It is called once to offer the code action, and again when the +-// command is executed. This is slightly inefficient but ensures that +-// the title and package/symbol logic are consistent in all cases. +-// +-// It returns zeroes if there is nothing to see here (e.g. reference to a builtin). +-func DocFragment(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (pkgpath PackagePath, fragment, title string) { +- thing := thingAtPoint(pkg, pgf, start, end) +- +- makeTitle := func(kind string, imp *types.Package, name string) string { +- title := "Browse documentation for " + kind + " " +- if imp != nil && imp != pkg.Types() { +- title += imp.Name() + "." +- } +- return title + name +- } +- +- wholePackage := func(pkg *types.Package) (PackagePath, string, string) { +- // External test packages don't have /pkg doc pages, +- // so instead show the doc for the package under test. +- // (This named-based heuristic is imperfect.) +- if forTest := strings.TrimSuffix(pkg.Path(), "_test"); forTest != pkg.Path() { +- return PackagePath(forTest), "", makeTitle("package", nil, filepath.Base(forTest)) +- } +- +- return PackagePath(pkg.Path()), "", makeTitle("package", nil, pkg.Name()) +- } +- +- // Conceptually, we check cases in the order: +- // 1. symbol +- // 2. package +- // 3. enclosing +- // but the logic of cases 1 and 3 are identical, hence the odd factoring. +- +- // Imported package? +- if thing.pkg != nil && thing.symbol == nil { +- return wholePackage(thing.pkg) +- } +- +- // Symbol? +- var sym types.Object +- if thing.symbol != nil { +- sym = thing.symbol // reference to a symbol +- } else if thing.enclosing != nil { +- sym = thing.enclosing // selection is within a declaration of a symbol +- } +- if sym == nil { +- return wholePackage(pkg.Types()) // no symbol +- } +- +- // Built-in (error.Error, append or unsafe). +- // TODO(adonovan): handle builtins in /pkg viewer. +- if sym.Pkg() == nil { +- return "", "", "" // nothing to see here +- } +- pkgpath = PackagePath(sym.Pkg().Path()) +- +- // Unexported? Show enclosing type or package. +- if !sym.Exported() { +- // Unexported method of exported type? +- if fn, ok := sym.(*types.Func); ok { +- if recv := fn.Signature().Recv(); recv != nil { +- _, named := typesinternal.ReceiverNamed(recv) +- if named != nil && named.Obj().Exported() { +- sym = named.Obj() +- goto below +- } +- } +- } +- +- return wholePackage(sym.Pkg()) +- below: +- } +- +- // Reference to symbol in external test package? +- // Short-circuit: see comment in wholePackage. +- if strings.HasSuffix(string(pkgpath), "_test") { +- return wholePackage(pkg.Types()) +- } +- +- // package-level symbol? +- if typesinternal.IsPackageLevel(sym) { +- return pkgpath, sym.Name(), makeTitle(objectKind(sym), sym.Pkg(), sym.Name()) +- } +- +- // Inv: sym is field or method, or local. +- switch sym := sym.(type) { +- case *types.Func: // => method +- sig := sym.Signature() +- isPtr, named := typesinternal.ReceiverNamed(sig.Recv()) +- if named != nil { +- if !named.Obj().Exported() { +- return wholePackage(sym.Pkg()) // exported method of unexported type +- } +- name := fmt.Sprintf("(%s%s).%s", +- strings.Repeat("*", btoi(isPtr)), // for *T +- named.Obj().Name(), +- sym.Name()) +- fragment := named.Obj().Name() + "." + sym.Name() +- return pkgpath, fragment, makeTitle("method", sym.Pkg(), name) +- } +- +- case *types.Var: +- if sym.IsField() { +- // TODO(adonovan): support fields. +- // The Var symbol doesn't include the struct +- // type, so we need to use the logic from +- // Hover. (This isn't important for +- // DocFragment as fields don't have fragments, +- // but it matters to the grand unification of +- // Hover/Definition/DocFragment. +- } +- } +- +- // Field, non-exported method, or local declaration: +- // just show current package. +- return wholePackage(pkg.Types()) +-} +- +-// thing describes the package or symbol denoted by a selection. +-// +-// TODO(adonovan): Hover, Definition, and References all start by +-// identifying the selected object. Let's achieve a better factoring +-// of the common parts using this structure, including uniform +-// treatment of doc links, linkname, and suchlike. +-type thing struct { +- // At most one of these fields is set. +- // (The 'enclosing' field is a fallback for when neither +- // of the first two is set.) +- symbol types.Object // referenced symbol +- pkg *types.Package // referenced package +- enclosing types.Object // package-level symbol or method decl enclosing selection +-} +- +-func thingAtPoint(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) thing { +- path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) +- +- // In an import spec? +- if len(path) >= 3 { // [...ImportSpec GenDecl File] +- if spec, ok := path[len(path)-3].(*ast.ImportSpec); ok { +- if pkgname := pkg.TypesInfo().PkgNameOf(spec); pkgname != nil { +- return thing{pkg: pkgname.Imported()} +- } +- } +- } +- +- // Definition or reference to symbol? +- var obj types.Object +- if id, ok := path[0].(*ast.Ident); ok { +- obj = pkg.TypesInfo().ObjectOf(id) +- +- // Treat use to PkgName like ImportSpec. +- if pkgname, ok := obj.(*types.PkgName); ok { +- return thing{pkg: pkgname.Imported()} +- } +- +- } else if sel, ok := path[0].(*ast.SelectorExpr); ok { +- // e.g. selection is "fmt.Println" or just a portion ("mt.Prin") +- obj = pkg.TypesInfo().Uses[sel.Sel] +- } +- if obj != nil { +- return thing{symbol: obj} +- } +- +- // Find enclosing declaration. +- if n := len(path); n > 1 { +- switch decl := path[n-2].(type) { +- case *ast.FuncDecl: +- // method? +- if fn := pkg.TypesInfo().Defs[decl.Name]; fn != nil { +- return thing{enclosing: fn} +- } +- +- case *ast.GenDecl: +- // path=[... Spec? GenDecl File] +- for _, spec := range decl.Specs { +- if n > 2 && spec == path[n-3] { +- var name *ast.Ident +- switch spec := spec.(type) { +- case *ast.ValueSpec: +- // var, const: use first name +- name = spec.Names[0] +- case *ast.TypeSpec: +- name = spec.Name +- } +- if name != nil { +- return thing{enclosing: pkg.TypesInfo().Defs[name]} +- } +- break +- } +- } +- } +- } +- +- return thing{} // nothing to see here +-} +- +-// Web is an abstraction of gopls' web server. +-type Web interface { +- // PkgURL forms URLs of package or symbol documentation. +- PkgURL(viewID string, path PackagePath, fragment string) protocol.URI +- +- // SrcURL forms URLs that cause the editor to open a file at a specific position. +- SrcURL(filename string, line, col8 int) protocol.URI +-} +- +-// PackageDocHTML formats the package documentation page. +-// +-// The posURL function returns a URL that when visited, has the side +-// effect of causing gopls to direct the client editor to navigate to +-// the specified file/line/column position, in UTF-8 coordinates. +-// +-// TODO(adonovan): this function could use some unit tests; we +-// shouldn't have to use integration tests to cover microdetails of +-// HTML rendering. (It is tempting to abstract this function so that +-// it depends only on FileSet/File/Types/TypeInfo/etc, but we should +-// bend the tests to the production interfaces, not the other way +-// around.) +-func PackageDocHTML(viewID string, pkg *cache.Package, web Web) ([]byte, error) { +- // We can't use doc.NewFromFiles (even with doc.PreserveAST +- // mode) as it calls ast.NewPackage which assumes that each +- // ast.File has an ast.Scope and resolves identifiers to +- // (deprecated) ast.Objects. (This is golang/go#66290.) +- // But doc.New only requires pkg.{Name,Files}, +- // so we just boil it down. +- // +- // The only loss is doc.classifyExamples. +- // TODO(adonovan): simulate that too. +- fileMap := make(map[string]*ast.File) +- for _, f := range pkg.Syntax() { +- fileMap[pkg.FileSet().File(f.FileStart).Name()] = f +- } +- astpkg := &ast.Package{ +- Name: pkg.Types().Name(), +- Files: fileMap, +- } +- // PreserveAST mode only half works (golang/go#66449): it still +- // mutates ASTs when filtering out non-exported symbols. +- // As a workaround, enable AllDecls to suppress filtering, +- // and do it ourselves. +- mode := doc.PreserveAST | doc.AllDecls +- docpkg := doc.New(astpkg, pkg.Types().Path(), mode) +- +- // Discard non-exported symbols. +- // TODO(adonovan): do this conditionally, and expose option in UI. +- const showUnexported = false +- if !showUnexported { +- var ( +- unexported = func(name string) bool { return !token.IsExported(name) } +- filterValues = func(slice *[]*doc.Value) { +- delValue := func(v *doc.Value) bool { +- v.Names = slices.DeleteFunc(v.Names, unexported) +- return len(v.Names) == 0 +- } +- *slice = slices.DeleteFunc(*slice, delValue) +- } +- filterFuncs = func(funcs *[]*doc.Func) { +- *funcs = slices.DeleteFunc(*funcs, func(v *doc.Func) bool { +- return unexported(v.Name) +- }) +- } +- ) +- filterValues(&docpkg.Consts) +- filterValues(&docpkg.Vars) +- filterFuncs(&docpkg.Funcs) +- docpkg.Types = slices.DeleteFunc(docpkg.Types, func(t *doc.Type) bool { +- filterValues(&t.Consts) +- filterValues(&t.Vars) +- filterFuncs(&t.Funcs) +- filterFuncs(&t.Methods) +- if unexported(t.Name) { +- // If an unexported type has an exported constructor function, +- // treat the constructor as an ordinary standalone function. +- // We will sort Funcs again below. +- docpkg.Funcs = append(docpkg.Funcs, t.Funcs...) +- return true // delete this type +- } +- return false // keep this type +- }) +- slices.SortFunc(docpkg.Funcs, func(x, y *doc.Func) int { +- return strings.Compare(x.Name, y.Name) +- }) +- } +- +- // docHTML renders the doc comment as Markdown. +- // The fileNode is used to deduce the enclosing file +- // for the correct import mapping. +- // +- // It is not concurrency-safe. +- var docHTML func(fileNode ast.Node, comment string) []byte +- { +- // Adapt doc comment parser and printer +- // to our representation of Go packages +- // so that doc links (e.g. "[fmt.Println]") +- // become valid links. +- printer := &comment.Printer{ +- DocLinkURL: func(link *comment.DocLink) string { +- path := pkg.Metadata().PkgPath +- if link.ImportPath != "" { +- path = PackagePath(link.ImportPath) +- } +- fragment := link.Name +- if link.Recv != "" { +- fragment = link.Recv + "." + link.Name +- } +- return web.PkgURL(viewID, path, fragment) +- }, +- } +- parse := newDocCommentParser(pkg) +- docHTML = func(fileNode ast.Node, comment string) []byte { +- doc := parse(fileNode, comment) +- return printer.HTML(doc) +- } +- } +- +- scope := pkg.Types().Scope() +- escape := html.EscapeString +- +- title := fmt.Sprintf("%s package - %s - Gopls packages", +- pkg.Types().Name(), escape(pkg.Types().Path())) +- +- var buf bytes.Buffer +- buf.WriteString(` +- +- +- +- ` + title + ` +- +- +- +- +- +- +-
    +-\n") +- fmt.Fprintf(&buf, "
    \n") +- +- // -- main element -- +- +- // nodeHTML returns HTML markup for a syntax tree. +- // It replaces referring identifiers with links, +- // and adds style spans for strings and comments. +- nodeHTML := func(n ast.Node) string { +- +- // linkify returns the appropriate URL (if any) for an identifier. +- linkify := func(id *ast.Ident) protocol.URI { +- if obj, ok := pkg.TypesInfo().Uses[id]; ok && obj.Pkg() != nil { +- // imported package name? +- if pkgname, ok := obj.(*types.PkgName); ok { +- // TODO(adonovan): do this for Defs of PkgName too. +- return web.PkgURL(viewID, PackagePath(pkgname.Imported().Path()), "") +- } +- +- // package-level symbol? +- if obj.Parent() == obj.Pkg().Scope() { +- if obj.Pkg() == pkg.Types() { +- return "#" + obj.Name() // intra-package ref +- } else { +- return web.PkgURL(viewID, PackagePath(obj.Pkg().Path()), obj.Name()) +- } +- } +- +- // method of package-level named type? +- if fn, ok := obj.(*types.Func); ok { +- sig := fn.Signature() +- if sig.Recv() != nil { +- _, named := typesinternal.ReceiverNamed(sig.Recv()) +- if named != nil { +- fragment := named.Obj().Name() + "." + fn.Name() +- return web.PkgURL(viewID, PackagePath(fn.Pkg().Path()), fragment) +- } +- } +- return "" +- } +- +- // TODO(adonovan): field of package-level named struct type. +- // (Requires an index, since there's no way to +- // get from Var to Named.) +- } +- return "" +- } +- +- // Splice spans into HTML-escaped segments of the +- // original source buffer (which is usually but not +- // necessarily formatted). +- // +- // (For expedience we don't use the more sophisticated +- // approach taken by cmd/godoc and pkgsite's render +- // package, which emit the text, spans, and comments +- // in one traversal of the syntax tree.) +- // +- // TODO(adonovan): splice styled spans around comments too. +- // +- // TODO(adonovan): pkgsite prints specs from grouped +- // type decls like "type ( T1; T2 )" to make them +- // appear as separate decls. We should too. +- var buf bytes.Buffer +- for _, file := range pkg.CompiledGoFiles() { +- if internalastutil.NodeContains(file.File, n.Pos()) { +- pos := n.Pos() +- +- // emit emits source in the interval [pos:to] and updates pos. +- emit := func(to token.Pos) { +- // Ident and BasicLit always have a valid pos. +- // (Failure means the AST has been corrupted.) +- if !to.IsValid() { +- bug.Reportf("invalid Pos") +- } +- text, err := file.PosText(pos, to) +- if err != nil { +- bug.Reportf("invalid pos range: %v", err) +- return +- } +- buf.WriteString(escape(string(text))) +- pos = to +- } +- ast.Inspect(n, func(n ast.Node) bool { +- switch n := n.(type) { +- case *ast.Ident: +- emit(n.Pos()) +- pos = n.End() +- if url := linkify(n); url != "" { +- fmt.Fprintf(&buf, "%s", url, escape(n.Name)) +- } else { +- buf.WriteString(escape(n.Name)) // plain +- } +- +- case *ast.BasicLit: +- emit(n.Pos()) +- pos = n.End() +- fmt.Fprintf(&buf, "%s", escape(n.Value)) +- } +- return true +- }) +- emit(n.End()) +- return buf.String() +- } +- } +- +- // Original source not found. +- // Format the node without adornments. +- if err := format.Node(&buf, pkg.FileSet(), n); err != nil { +- // e.g. BadDecl? +- buf.Reset() +- fmt.Fprintf(&buf, "formatting error: %v", err) +- } +- return escape(buf.String()) +- } +- +- // fnString is like fn.String() except that it: +- // - shows the receiver name; +- // - uses space "(T) M()" not dot "(T).M()" after receiver; +- // - doesn't bother with the special case for interface receivers +- // since it is unreachable for the methods in go/doc. +- // - elides parameters after the first three: f(a, b, c, ...). +- fnString := func(fn *types.Func) string { +- pkgRelative := typesinternal.NameRelativeTo(pkg.Types()) +- +- sig := fn.Signature() +- +- // Emit "func (recv T) F". +- var buf bytes.Buffer +- buf.WriteString("func ") +- if recv := sig.Recv(); recv != nil { +- buf.WriteByte('(') +- if recv.Name() != "" { +- buf.WriteString(recv.Name()) +- buf.WriteByte(' ') +- } +- types.WriteType(&buf, recv.Type(), pkgRelative) +- buf.WriteByte(')') +- buf.WriteByte(' ') // (ObjectString uses a '.' here) +- } else if pkg := fn.Pkg(); pkg != nil { +- if s := pkgRelative(pkg); s != "" { +- buf.WriteString(s) +- buf.WriteByte('.') +- } +- } +- buf.WriteString(fn.Name()) +- +- // Emit signature. +- // +- // Elide parameters after the third one. +- // WriteSignature is too complex to fork, so we replace +- // parameters 4+ with "invalid type", format, +- // then post-process the string. +- if sig.Params().Len() > 3 { +- +- // Clone each TypeParam as NewSignatureType modifies them (#67294). +- cloneTparams := func(seq *types.TypeParamList) []*types.TypeParam { +- slice := make([]*types.TypeParam, seq.Len()) +- for i := range slice { +- tparam := seq.At(i) +- slice[i] = types.NewTypeParam(tparam.Obj(), tparam.Constraint()) +- } +- return slice +- } +- +- sig = types.NewSignatureType( +- sig.Recv(), +- cloneTparams(sig.RecvTypeParams()), +- cloneTparams(sig.TypeParams()), +- types.NewTuple(append( +- slices.Collect(sig.Params().Variables())[:3], +- types.NewParam(0, nil, "", types.Typ[types.Invalid]))...), +- sig.Results(), +- false) // any final ...T parameter is truncated +- } +- types.WriteSignature(&buf, sig, pkgRelative) +- return strings.ReplaceAll(buf.String(), ", invalid type)", ", ...)") +- } +- +- fmt.Fprintf(&buf, "
    \n") +- +- // package name +- fmt.Fprintf(&buf, "

    Package %s

    \n", pkg.Types().Name()) +- +- // import path +- fmt.Fprintf(&buf, "
    import %q
    \n", pkg.Types().Path()) +- +- // link to same package in pkg.go.dev +- fmt.Fprintf(&buf, "
    \n", +- "https://pkg.go.dev/"+string(pkg.Types().Path())) +- +- // package doc +- for _, f := range pkg.Syntax() { +- if f.Doc != nil { +- fmt.Fprintf(&buf, "
    %s
    \n", docHTML(f.Doc, docpkg.Doc)) +- break +- } +- } +- +- // symbol index +- fmt.Fprintf(&buf, "

    Index

    \n") +- fmt.Fprintf(&buf, "
      \n") +- if len(docpkg.Consts) > 0 { +- fmt.Fprintf(&buf, "
    • Constants
    • \n") +- } +- if len(docpkg.Vars) > 0 { +- fmt.Fprintf(&buf, "
    • Variables
    • \n") +- } +- for _, fn := range docpkg.Funcs { +- obj := scope.Lookup(fn.Name).(*types.Func) +- fmt.Fprintf(&buf, "
    • %s
    • \n", +- obj.Name(), escape(fnString(obj))) +- } +- for _, doctype := range docpkg.Types { +- tname := scope.Lookup(doctype.Name).(*types.TypeName) +- fmt.Fprintf(&buf, "
    • type %[1]s
    • \n", +- tname.Name()) +- +- if len(doctype.Funcs)+len(doctype.Methods) > 0 { +- fmt.Fprintf(&buf, "
        \n") +- +- // constructors +- for _, docfn := range doctype.Funcs { +- obj := scope.Lookup(docfn.Name).(*types.Func) +- fmt.Fprintf(&buf, "
      • %s
      • \n", +- docfn.Name, escape(fnString(obj))) +- } +- // methods +- for _, docmethod := range doctype.Methods { +- method, _, _ := types.LookupFieldOrMethod(tname.Type(), true, tname.Pkg(), docmethod.Name) +- fmt.Fprintf(&buf, "
      • %s
      • \n", +- doctype.Name, +- docmethod.Name, +- escape(fnString(method.(*types.Func)))) +- } +- fmt.Fprintf(&buf, "
      \n") +- } +- } +- // TODO(adonovan): add index of Examples here. +- fmt.Fprintf(&buf, "
    \n") +- +- // constants and variables +- values := func(vals []*doc.Value) { +- for _, v := range vals { +- // anchors +- for _, name := range v.Names { +- fmt.Fprintf(&buf, "\n", escape(name)) +- } +- +- // declaration +- decl2 := *v.Decl // shallow copy +- decl2.Doc = nil +- fmt.Fprintf(&buf, "
    %s
    \n", nodeHTML(&decl2)) +- +- // comment (if any) +- fmt.Fprintf(&buf, "
    %s
    \n", docHTML(v.Decl, v.Doc)) +- } +- } +- fmt.Fprintf(&buf, "

    Constants

    \n") +- if len(docpkg.Consts) == 0 { +- fmt.Fprintf(&buf, "
    (no constants)
    \n") +- } else { +- values(docpkg.Consts) +- } +- fmt.Fprintf(&buf, "

    Variables

    \n") +- if len(docpkg.Vars) == 0 { +- fmt.Fprintf(&buf, "
    (no variables)
    \n") +- } else { +- values(docpkg.Vars) +- } +- +- // addedInHTML returns an HTML division containing the Go release version at +- // which this obj became available. +- addedInHTML := func(obj types.Object) string { +- if sym := StdSymbolOf(obj); sym != nil && sym.Version != stdlib.Version(0) { +- return fmt.Sprintf("added in %v", sym.Version) +- } +- return "" +- } +- +- // package-level functions +- fmt.Fprintf(&buf, "

    Functions

    \n") +- // funcs emits a list of package-level functions, +- // possibly organized beneath the type they construct. +- funcs := func(funcs []*doc.Func) { +- for _, docfn := range funcs { +- obj := scope.Lookup(docfn.Name).(*types.Func) +- +- fmt.Fprintf(&buf, "

    func %s %s

    \n", +- docfn.Name, objHTML(pkg.FileSet(), web, obj), addedInHTML(obj)) +- +- // decl: func F(params) results +- fmt.Fprintf(&buf, "
    %s
    \n", +- nodeHTML(docfn.Decl.Type)) +- +- // comment (if any) +- fmt.Fprintf(&buf, "
    %s
    \n", docHTML(docfn.Decl, docfn.Doc)) +- } +- } +- funcs(docpkg.Funcs) +- +- // types and their subelements +- fmt.Fprintf(&buf, "

    Types

    \n") +- for _, doctype := range docpkg.Types { +- tname := scope.Lookup(doctype.Name).(*types.TypeName) +- +- // title and source link +- fmt.Fprintf(&buf, "

    type %s %s

    \n", +- doctype.Name, objHTML(pkg.FileSet(), web, tname), addedInHTML(tname)) +- +- // declaration +- // TODO(adonovan): excise non-exported struct fields somehow. +- decl2 := *doctype.Decl // shallow copy +- decl2.Doc = nil +- fmt.Fprintf(&buf, "
    %s
    \n", nodeHTML(&decl2)) +- +- // comment (if any) +- fmt.Fprintf(&buf, "
    %s
    \n", docHTML(doctype.Decl, doctype.Doc)) +- +- // subelements +- values(doctype.Consts) // constants of type T +- values(doctype.Vars) // vars of type T +- funcs(doctype.Funcs) // constructors of T +- +- // methods on T +- for _, docmethod := range doctype.Methods { +- method, _, _ := types.LookupFieldOrMethod(tname.Type(), true, tname.Pkg(), docmethod.Name) +- fmt.Fprintf(&buf, "

    func (%s) %s %s

    \n", +- doctype.Name, docmethod.Name, +- docmethod.Orig, // T or *T +- objHTML(pkg.FileSet(), web, method), addedInHTML(method)) +- +- // decl: func (x T) M(params) results +- fmt.Fprintf(&buf, "
    %s
    \n", +- nodeHTML(docmethod.Decl.Type)) +- +- // comment (if any) +- fmt.Fprintf(&buf, "
    %s
    \n", +- docHTML(docmethod.Decl, docmethod.Doc)) +- } +- } +- +- // source files +- fmt.Fprintf(&buf, "

    Source files

    \n") +- for _, filename := range docpkg.Filenames { +- fmt.Fprintf(&buf, "
    %s
    \n", +- sourceLink(filepath.Base(filename), web.SrcURL(filename, 1, 1))) +- } +- +- fmt.Fprintf(&buf, "
    \n") +- fmt.Fprintf(&buf, "\n") +- fmt.Fprintf(&buf, "\n") +- +- return buf.Bytes(), nil +-} +diff -urN a/gopls/internal/golang/references.go b/gopls/internal/golang/references.go +--- a/gopls/internal/golang/references.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/references.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,688 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-// This file defines the 'references' query based on a serializable +-// index constructed during type checking, thus avoiding the need to +-// type-check packages at search time. +-// +-// See the ./xrefs/ subpackage for the index construction and lookup. +-// +-// This implementation does not intermingle objects from distinct +-// calls to TypeCheck. +- +-import ( +- "context" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "sort" +- "strings" +- "sync" +- +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/go/types/objectpath" +- "golang.org/x/tools/go/types/typeutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/methodsets" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/event" +-) +- +-// References returns a list of all references (sorted with +-// definitions before uses) to the object denoted by the identifier at +-// the given file/position, searching the entire workspace. +-func References(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position, includeDeclaration bool) ([]protocol.Location, error) { +- references, err := references(ctx, snapshot, fh, pp, includeDeclaration) +- if err != nil { +- return nil, err +- } +- locations := make([]protocol.Location, len(references)) +- for i, ref := range references { +- locations[i] = ref.location +- } +- return locations, nil +-} +- +-// A reference describes an identifier that refers to the same +-// object as the subject of a References query. +-type reference struct { +- isDeclaration bool +- location protocol.Location +- pkgPath PackagePath // of declaring package (same for all elements of the slice) +-} +- +-// references returns a list of all references (sorted with +-// definitions before uses) to the object denoted by the identifier at +-// the given file/position, searching the entire workspace. +-func references(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position, includeDeclaration bool) ([]reference, error) { +- ctx, done := event.Start(ctx, "golang.references") +- defer done() +- +- // Is the cursor within the package name declaration? +- _, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp) +- if err != nil { +- return nil, err +- } +- +- var refs []reference +- if inPackageName { +- refs, err = packageReferences(ctx, snapshot, f.URI()) +- } else { +- refs, err = ordinaryReferences(ctx, snapshot, f.URI(), pp) +- } +- if err != nil { +- return nil, err +- } +- +- sort.Slice(refs, func(i, j int) bool { +- x, y := refs[i], refs[j] +- if x.isDeclaration != y.isDeclaration { +- return x.isDeclaration // decls < refs +- } +- return protocol.CompareLocation(x.location, y.location) < 0 +- }) +- +- // De-duplicate by location, and optionally remove declarations. +- out := refs[:0] +- for _, ref := range refs { +- if !includeDeclaration && ref.isDeclaration { +- continue +- } +- if len(out) == 0 || out[len(out)-1].location != ref.location { +- out = append(out, ref) +- } +- } +- refs = out +- +- return refs, nil +-} +- +-// packageReferences returns a list of references to the package +-// declaration of the specified name and uri by searching among the +-// import declarations of all packages that directly import the target +-// package. +-func packageReferences(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) ([]reference, error) { +- metas, err := snapshot.MetadataForFile(ctx, uri, false) +- if err != nil { +- return nil, err +- } +- if len(metas) == 0 { +- return nil, fmt.Errorf("found no package containing %s", uri) +- } +- +- var refs []reference +- +- // Find external references to the package declaration +- // from each direct import of the package. +- // +- // The narrowest package is the most broadly imported, +- // so we choose it for the external references. +- // +- // But if the file ends with _test.go then we need to +- // find the package it is testing; there's no direct way +- // to do that, so pick a file from the same package that +- // doesn't end in _test.go and start over. +- narrowest := metas[0] +- if narrowest.ForTest != "" && strings.HasSuffix(string(uri), "_test.go") { +- for _, f := range narrowest.CompiledGoFiles { +- if !strings.HasSuffix(string(f), "_test.go") { +- return packageReferences(ctx, snapshot, f) +- } +- } +- // This package has no non-test files. +- // Skip the search for external references. +- // (Conceivably one could blank-import an empty package, but why?) +- } else { +- rdeps, err := snapshot.ReverseDependencies(ctx, narrowest.ID, false) // direct +- if err != nil { +- return nil, err +- } +- +- // Restrict search to workspace packages. +- workspace, err := snapshot.WorkspaceMetadata(ctx) +- if err != nil { +- return nil, err +- } +- workspaceMap := make(map[PackageID]*metadata.Package, len(workspace)) +- for _, mp := range workspace { +- workspaceMap[mp.ID] = mp +- } +- +- for _, rdep := range rdeps { +- if _, ok := workspaceMap[rdep.ID]; !ok { +- continue +- } +- for _, uri := range rdep.CompiledGoFiles { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- f, err := snapshot.ParseGo(ctx, fh, parsego.Header) +- if err != nil { +- return nil, err +- } +- for _, imp := range f.File.Imports { +- if rdep.DepsByImpPath[metadata.UnquoteImportPath(imp)] == narrowest.ID { +- refs = append(refs, reference{ +- isDeclaration: false, +- location: mustLocation(f, imp), +- pkgPath: narrowest.PkgPath, +- }) +- } +- } +- } +- } +- } +- +- // Find internal "references" to the package from +- // of each package declaration in the target package itself. +- // +- // The widest package (possibly a test variant) has the +- // greatest number of files and thus we choose it for the +- // "internal" references. +- widest := metas[len(metas)-1] // may include _test.go files +- for _, uri := range widest.CompiledGoFiles { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- f, err := snapshot.ParseGo(ctx, fh, parsego.Header) +- if err != nil { +- return nil, err +- } +- // golang/go#66250: don't crash if the package file lacks a name. +- if f.File.Name.Pos().IsValid() { +- refs = append(refs, reference{ +- isDeclaration: true, // (one of many) +- location: mustLocation(f, f.File.Name), +- pkgPath: widest.PkgPath, +- }) +- } +- } +- +- return refs, nil +-} +- +-// ordinaryReferences computes references for all ordinary objects (not package declarations). +-func ordinaryReferences(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, pp protocol.Position) ([]reference, error) { +- // Strategy: use the reference information computed by the +- // type checker to find the declaration. First type-check this +- // package to find the declaration, then type check the +- // declaring package (which may be different), plus variants, +- // to find local (in-package) references. +- // Global references are satisfied by the index. +- +- // Strictly speaking, a wider package could provide a different +- // declaration (e.g. because the _test.go files can change the +- // meaning of a field or method selection), but the narrower +- // package reports the more broadly referenced object. +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, uri) +- if err != nil { +- return nil, err +- } +- +- // Find the selected object (declaration or reference). +- // For struct{T}, we choose the field (Def) over the type (Use). +- pos, err := pgf.PositionPos(pp) +- if err != nil { +- return nil, err +- } +- candidates, _, err := objectsAt(pkg.TypesInfo(), pgf.File, pos) +- if err != nil { +- return nil, err +- } +- +- // Pick first object arbitrarily. +- // The case variables of a type switch have different +- // types but that difference is immaterial here. +- var obj types.Object +- for obj = range candidates { +- break +- } +- if obj == nil { +- return nil, ErrNoIdentFound // can't happen +- } +- +- // nil, error, error.Error, iota, or other built-in? +- if isBuiltin(obj) { +- return nil, fmt.Errorf("references to builtin %q are not supported", obj.Name()) +- } +- +- // Find metadata of all packages containing the object's defining file. +- // This may include the query pkg, and possibly other variants. +- declPosn := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) +- declURI := protocol.URIFromPath(declPosn.Filename) +- variants, err := snapshot.MetadataForFile(ctx, declURI, false) +- if err != nil { +- return nil, err +- } +- if len(variants) == 0 { +- return nil, fmt.Errorf("no packages for file %q", declURI) // can't happen +- } +- // (variants must include ITVs for reverse dependency computation below.) +- +- // Is object exported? +- // If so, compute scope and targets of the global search. +- var ( +- globalScope = make(map[PackageID]*metadata.Package) // (excludes ITVs) +- globalTargets map[PackagePath]map[objectpath.Path]unit +- expansions = make(map[PackageID]unit) // packages that caused search expansion +- ) +- // TODO(adonovan): what about generic functions? Need to consider both +- // uninstantiated and instantiated. The latter have no objectpath. Use Origin? +- if path, err := objectpath.For(obj); err == nil && obj.Exported() { +- pkgPath := variants[0].PkgPath // (all variants have same package path) +- globalTargets = map[PackagePath]map[objectpath.Path]unit{ +- pkgPath: {path: {}}, // primary target +- } +- +- // Compute set of (non-ITV) workspace packages. +- // We restrict references to this subset. +- workspace, err := snapshot.WorkspaceMetadata(ctx) +- if err != nil { +- return nil, err +- } +- workspaceMap := make(map[PackageID]*metadata.Package, len(workspace)) +- workspaceIDs := make([]PackageID, 0, len(workspace)) +- for _, mp := range workspace { +- workspaceMap[mp.ID] = mp +- workspaceIDs = append(workspaceIDs, mp.ID) +- } +- +- // addRdeps expands the global scope to include the +- // reverse dependencies of the specified package. +- addRdeps := func(id PackageID, transitive bool) error { +- rdeps, err := snapshot.ReverseDependencies(ctx, id, transitive) +- if err != nil { +- return err +- } +- for rdepID, rdep := range rdeps { +- // Skip non-workspace packages. +- // +- // This means we also skip any expansion of the +- // search that might be caused by a non-workspace +- // package, possibly causing us to miss references +- // to the expanded target set from workspace packages. +- // +- // TODO(adonovan): don't skip those expansions. +- // The challenge is how to so without type-checking +- // a lot of non-workspace packages not covered by +- // the initial workspace load. +- if _, ok := workspaceMap[rdepID]; !ok { +- continue +- } +- +- globalScope[rdepID] = rdep +- } +- return nil +- } +- +- // How far need we search? +- // For package-level objects, we need only search the direct importers. +- // For fields and methods, we must search transitively. +- transitive := obj.Pkg().Scope().Lookup(obj.Name()) != obj +- +- // The scope is the union of rdeps of each variant. +- // (Each set is disjoint so there's no benefit to +- // combining the metadata graph traversals.) +- for _, mp := range variants { +- if err := addRdeps(mp.ID, transitive); err != nil { +- return nil, err +- } +- } +- +- // Is object a method? +- // +- // If so, expand the search so that the targets include +- // all methods that correspond to it through interface +- // satisfaction, and the scope includes the rdeps of +- // the package that declares each corresponding type. +- // +- // 'expansions' records the packages that declared +- // such types. +- if recv := effectiveReceiver(obj); recv != nil { +- if err := expandMethodSearch(ctx, snapshot, workspaceIDs, obj.(*types.Func), recv, addRdeps, globalTargets, expansions); err != nil { +- return nil, err +- } +- } +- } +- +- // The search functions will call report(loc) for each hit. +- var ( +- refsMu sync.Mutex +- refs []reference +- ) +- report := func(loc protocol.Location, isDecl bool) { +- ref := reference{ +- isDeclaration: isDecl, +- location: loc, +- pkgPath: pkg.Metadata().PkgPath, +- } +- refsMu.Lock() +- refs = append(refs, ref) +- refsMu.Unlock() +- } +- +- // Loop over the variants of the declaring package, +- // and perform both the local (in-package) and global +- // (cross-package) searches, in parallel. +- // +- // TODO(adonovan): opt: support LSP reference streaming. See: +- // - https://github.com/microsoft/vscode-languageserver-node/pull/164 +- // - https://github.com/microsoft/language-server-protocol/pull/182 +- // +- // Careful: this goroutine must not return before group.Wait. +- var group errgroup.Group +- +- // Compute local references for each variant. +- // The target objects are identified by (URI, offset). +- for _, mp := range variants { +- // We want the ordinary importable package, +- // plus any test-augmented variants, since +- // declarations in _test.go files may change +- // the reference of a selection, or even a +- // field into a method or vice versa. +- // +- // But we don't need intermediate test variants, +- // as their local references will be covered +- // already by other variants. +- if mp.IsIntermediateTestVariant() { +- continue +- } +- mp := mp +- group.Go(func() error { +- // TODO(adonovan): opt: batch these TypeChecks. +- pkgs, err := snapshot.TypeCheck(ctx, mp.ID) +- if err != nil { +- return err +- } +- pkg := pkgs[0] +- +- // Find the declaration of the corresponding +- // object in this package based on (URI, offset). +- pgf, err := pkg.File(declURI) +- if err != nil { +- return err +- } +- pos, err := safetoken.Pos(pgf.Tok, declPosn.Offset) +- if err != nil { +- return err +- } +- objects, _, err := objectsAt(pkg.TypesInfo(), pgf.File, pos) +- if err != nil { +- return err // unreachable? (probably caught earlier) +- } +- +- // Report the locations of the declaration(s). +- // TODO(adonovan): what about for corresponding methods? Add tests. +- for _, node := range objects { +- report(mustLocation(pgf, node), true) +- } +- +- // Convert targets map to set. +- targets := make(map[types.Object]bool) +- for obj := range objects { +- targets[obj] = true +- } +- +- return localReferences(pkg, targets, true, report) +- }) +- } +- +- // Also compute local references within packages that declare +- // corresponding methods (see above), which expand the global search. +- // The target objects are identified by (PkgPath, objectpath). +- for id := range expansions { +- group.Go(func() error { +- // TODO(adonovan): opt: batch these TypeChecks. +- pkgs, err := snapshot.TypeCheck(ctx, id) +- if err != nil { +- return err +- } +- pkg := pkgs[0] +- +- targets := make(map[types.Object]bool) +- for objpath := range globalTargets[pkg.Metadata().PkgPath] { +- obj, err := objectpath.Object(pkg.Types(), objpath) +- if err != nil { +- // No such object, because it was +- // declared only in the test variant. +- continue +- } +- targets[obj] = true +- } +- +- // Don't include corresponding types or methods +- // since expansions did that already, and we don't +- // want (e.g.) concrete -> interface -> concrete. +- const correspond = false +- return localReferences(pkg, targets, correspond, report) +- }) +- } +- +- // Compute global references for selected reverse dependencies. +- group.Go(func() error { +- var globalIDs []PackageID +- for id := range globalScope { +- globalIDs = append(globalIDs, id) +- } +- indexes, err := snapshot.References(ctx, globalIDs...) +- if err != nil { +- return err +- } +- for _, index := range indexes { +- for _, loc := range index.Lookup(globalTargets) { +- report(loc, false) +- } +- } +- return nil +- }) +- +- if err := group.Wait(); err != nil { +- return nil, err +- } +- return refs, nil +-} +- +-// expandMethodSearch expands the scope and targets of a global search +-// for an exported method to include all methods in the workspace +-// that correspond to it through interface satisfaction. +-// +-// Each package that declares a corresponding type is added to +-// expansions so that we can also find local references to the type +-// within the package, which of course requires type checking. +-// +-// The scope is expanded by a sequence of calls (not concurrent) to addRdeps. +-// +-// recv is the method's effective receiver type, for method-set computations. +-func expandMethodSearch(ctx context.Context, snapshot *cache.Snapshot, workspaceIDs []PackageID, method *types.Func, recv types.Type, addRdeps func(id PackageID, transitive bool) error, targets map[PackagePath]map[objectpath.Path]unit, expansions map[PackageID]unit) error { +- // Compute the method-set fingerprint used as a key to the global search. +- key, hasMethods := methodsets.KeyOf(recv) +- if !hasMethods { +- // The query object was method T.m, but methodset(T)={}: +- // this indicates that ill-typed T has conflicting fields and methods. +- // Rather than bug-report (#67978), treat the empty method set at face value. +- return nil +- } +- // Search the methodset index of each package in the workspace. +- indexes, err := snapshot.MethodSets(ctx, workspaceIDs...) +- if err != nil { +- return err +- } +- var mu sync.Mutex // guards addRdeps, targets, expansions +- var group errgroup.Group +- for i, index := range indexes { +- group.Go(func() error { +- // Consult index for matching (super/sub) methods. +- const want = methodsets.Supertype | methodsets.Subtype +- results := index.Search(key, want, method) +- if len(results) == 0 { +- return nil +- } +- +- // We have discovered one or more corresponding types. +- id := workspaceIDs[i] +- +- mu.Lock() +- defer mu.Unlock() +- +- // Expand global search scope to include rdeps of this pkg. +- if err := addRdeps(id, true); err != nil { +- return err +- } +- +- // Mark this package so that we search within it for +- // local references to the additional types/methods. +- expansions[id] = unit{} +- +- // Add each corresponding method the to set of global search targets. +- for _, res := range results { +- methodPkg := PackagePath(res.PkgPath) +- opaths, ok := targets[methodPkg] +- if !ok { +- opaths = make(map[objectpath.Path]unit) +- targets[methodPkg] = opaths +- } +- opaths[res.ObjectPath] = unit{} +- } +- return nil +- }) +- } +- return group.Wait() +-} +- +-// localReferences traverses syntax and reports each reference to one +-// of the target objects, or (if correspond is set) an object that +-// corresponds to one of them via interface satisfaction. +-func localReferences(pkg *cache.Package, targets map[types.Object]bool, correspond bool, report func(loc protocol.Location, isDecl bool)) error { +- // If we're searching for references to a method optionally +- // broaden the search to include references to corresponding +- // methods of mutually assignable receiver types. +- // (We use a slice, but objectsAt never returns >1 methods.) +- var methodRecvs []types.Type +- var methodName string // name of an arbitrary target, iff a method +- if correspond { +- for obj := range targets { +- if t := effectiveReceiver(obj); t != nil { +- methodRecvs = append(methodRecvs, t) +- methodName = obj.Name() +- } +- } +- } +- +- var msets typeutil.MethodSetCache +- +- // matches reports whether obj either is or corresponds to a target. +- // (Correspondence is defined as usual for interface methods: super/subtype.) +- matches := func(obj types.Object) bool { +- if containsOrigin(targets, obj) { +- return true +- } +- if methodRecvs != nil && obj.Name() == methodName { +- if orecv := effectiveReceiver(obj); orecv != nil { +- for _, mrecv := range methodRecvs { +- if implements(&msets, orecv, mrecv) || +- implements(&msets, mrecv, orecv) { +- return true +- } +- } +- } +- } +- return false +- } +- +- // Scan through syntax looking for uses of one of the target objects. +- for _, pgf := range pkg.CompiledGoFiles() { +- for curId := range pgf.Cursor.Preorder((*ast.Ident)(nil)) { +- id := curId.Node().(*ast.Ident) +- if obj, ok := pkg.TypesInfo().Uses[id]; ok && matches(obj) { +- report(mustLocation(pgf, id), false) +- } +- } +- } +- return nil +-} +- +-// effectiveReceiver returns the effective receiver type for method-set +-// comparisons for obj, if it is a method, or nil otherwise. +-func effectiveReceiver(obj types.Object) types.Type { +- if fn, ok := obj.(*types.Func); ok { +- if recv := fn.Signature().Recv(); recv != nil { +- return methodsets.EnsurePointer(recv.Type()) +- } +- } +- return nil +-} +- +-// objectsAt returns the non-empty set of objects denoted (def or use) +-// by the specified position within a file syntax tree, or an error if +-// none were found. +-// +-// The result may contain more than one element because all case +-// variables of a type switch appear to be declared at the same +-// position. +-// +-// Each object is mapped to the syntax node that was treated as an +-// identifier, which is not always an ast.Ident. The second component +-// of the result is the innermost node enclosing pos. +-// +-// TODO(adonovan): factor in common with referencedObject. +-func objectsAt(info *types.Info, file *ast.File, pos token.Pos) (map[types.Object]ast.Node, ast.Node, error) { +- path := pathEnclosingObjNode(file, pos) +- if path == nil { +- return nil, nil, ErrNoIdentFound +- } +- +- targets := make(map[types.Object]ast.Node) +- +- switch leaf := path[0].(type) { +- case *ast.Ident: +- // If leaf represents an implicit type switch object or the type +- // switch "assign" variable, expand to all of the type switch's +- // implicit objects. +- if implicits, _ := typeSwitchImplicits(info, path); len(implicits) > 0 { +- for _, obj := range implicits { +- targets[obj] = leaf +- } +- } else { +- // For struct{T}, we prefer the defined field Var over the used TypeName. +- obj := info.ObjectOf(leaf) +- if obj == nil { +- return nil, nil, fmt.Errorf("%w for %q", errNoObjectFound, leaf.Name) +- } +- targets[obj] = leaf +- } +- case *ast.ImportSpec: +- // Look up the implicit *types.PkgName. +- obj := info.Implicits[leaf] +- if obj == nil { +- return nil, nil, fmt.Errorf("%w for import %s", errNoObjectFound, metadata.UnquoteImportPath(leaf)) +- } +- targets[obj] = leaf +- } +- +- if len(targets) == 0 { +- return nil, nil, fmt.Errorf("objectAt: internal error: no targets") // can't happen +- } +- return targets, path[0], nil +-} +- +-// mustLocation reports the location interval a syntax node, +-// which must belong to m.File. +-// +-// Safe for use only by references and implementations. +-func mustLocation(pgf *parsego.File, n ast.Node) protocol.Location { +- loc, err := pgf.NodeLocation(n) +- if err != nil { +- panic(err) // can't happen in references or implementations +- } +- return loc +-} +diff -urN a/gopls/internal/golang/rename_check.go b/gopls/internal/golang/rename_check.go +--- a/gopls/internal/golang/rename_check.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/rename_check.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,946 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +-// +-// Taken from golang.org/x/tools/refactor/rename. +- +-package golang +- +-// This file defines the conflict-checking portion of the rename operation. +-// +-// The renamer works on a single package of type-checked syntax, and +-// is called in parallel for all necessary packages in the workspace, +-// possibly up to the transitive reverse dependencies of the +-// declaration. Finally the union of all edits and errors is computed. +-// +-// Renaming one object may entail renaming of others. For example: +-// +-// - An embedded field couples a Var (field) and a TypeName. +-// So, renaming either one requires renaming the other. +-// If the initial object is an embedded field, we must add its +-// TypeName (and its enclosing package) to the renaming set; +-// this is easily discovered at the outset. +-// +-// Conversely, if the initial object is a TypeName, we must observe +-// whether any of its references (from directly importing packages) +-// is coincident with an embedded field Var and, if so, initiate a +-// renaming of it. +-// +-// - A method of an interface type is coupled to all corresponding +-// methods of types that are assigned to the interface (as +-// discovered by the 'satisfy' pass). As a matter of usability, we +-// require that such renamings be initiated from the interface +-// method, not the concrete method. +- +-import ( +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "path/filepath" +- "reflect" +- "strings" +- "unicode" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/go/ast/edge" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/typeparams" +- "golang.org/x/tools/internal/typesinternal" +- "golang.org/x/tools/refactor/satisfy" +-) +- +-// errorf reports an error (e.g. conflict) and prevents file modification. +-func (r *renamer) errorf(pos token.Pos, format string, args ...any) { +- // Conflict error messages in the old gorename tool (whence this +- // logic originated) contain rich information associated with +- // multiple source lines, such as: +- // +- // p/a.go:1:2: renaming "x" to "y" here +- // p/b.go:3:4: \t would cause this reference to "y" +- // p/c.go:5:5: \t to become shadowed by this intervening declaration. +- // +- // Unfortunately LSP provides no means to transmit the +- // structure of this error, so we format the positions briefly +- // using dir/file.go where dir is the base name of the parent +- // directory. +- +- var conflict strings.Builder +- +- // Add prefix of (truncated) position. +- if pos != token.NoPos { +- // TODO(adonovan): skip position of first error if it is +- // on the same line as the renaming itself. +- posn := safetoken.StartPosition(r.pkg.FileSet(), pos).String() +- segments := strings.Split(filepath.ToSlash(posn), "/") +- if n := len(segments); n > 2 { +- segments = segments[n-2:] +- } +- posn = strings.Join(segments, "/") +- fmt.Fprintf(&conflict, "%s:", posn) +- +- if !strings.HasPrefix(format, "\t") { +- conflict.WriteByte(' ') +- } +- } +- +- fmt.Fprintf(&conflict, format, args...) +- r.conflicts = append(r.conflicts, conflict.String()) +-} +- +-// check performs safety checks of the renaming of the 'from' object to r.to. +-func (r *renamer) check(from types.Object) { +- if r.objsToUpdate[from] { +- return +- } +- r.objsToUpdate[from] = true +- +- // NB: order of conditions is important. +- if from_, ok := from.(*types.PkgName); ok { +- r.checkInFileBlock(from_) +- } else if from_, ok := from.(*types.Label); ok { +- r.checkLabel(from_) +- } else if typesinternal.IsPackageLevel(from) { +- r.checkInPackageBlock(from) +- } else if v, ok := from.(*types.Var); ok && v.IsField() { +- r.checkStructField(v) +- } else if f, ok := from.(*types.Func); ok && recv(f) != nil { +- r.checkMethod(f) +- } else if isLocal(from) { +- r.checkInLexicalScope(from) +- } else { +- r.errorf(from.Pos(), "unexpected %s object %q (please report a bug)\n", +- objectKind(from), from) +- } +-} +- +-// checkInFileBlock performs safety checks for renames of objects in the file block, +-// i.e. imported package names. +-func (r *renamer) checkInFileBlock(from *types.PkgName) { +- // Check import name is not "init". +- if r.to == "init" { +- r.errorf(from.Pos(), "%q is not a valid imported package name", r.to) +- } +- +- // Check for conflicts between file and package block. +- if prev := from.Pkg().Scope().Lookup(r.to); prev != nil { +- r.errorf(from.Pos(), "renaming this %s %q to %q would conflict", +- objectKind(from), from.Name(), r.to) +- r.errorf(prev.Pos(), "\twith this package member %s", +- objectKind(prev)) +- return // since checkInPackageBlock would report redundant errors +- } +- +- // Check for conflicts in lexical scope. +- r.checkInLexicalScope(from) +-} +- +-// checkInPackageBlock performs safety checks for renames of +-// func/var/const/type objects in the package block. +-func (r *renamer) checkInPackageBlock(from types.Object) { +- // Check that there are no references to the name from another +- // package if the renaming would make it unexported. +- if typ := r.pkg.Types(); typ != from.Pkg() && ast.IsExported(r.from) && !ast.IsExported(r.to) { +- if id := someUse(r.pkg.TypesInfo(), from); id != nil { +- r.checkExport(id, typ, from) +- } +- } +- +- // Check that in the package block, "init" is a function, and never referenced. +- if r.to == "init" { +- kind := objectKind(from) +- if kind == "func" { +- // Reject if intra-package references to it exist. +- for id, obj := range r.pkg.TypesInfo().Uses { +- if obj == from { +- r.errorf(from.Pos(), +- "renaming this func %q to %q would make it a package initializer", +- from.Name(), r.to) +- r.errorf(id.Pos(), "\tbut references to it exist") +- break +- } +- } +- } else { +- r.errorf(from.Pos(), "you cannot have a %s at package level named %q", +- kind, r.to) +- } +- } +- +- // In the declaring package, check for conflicts between the +- // package block and all file blocks. +- if from.Pkg() == r.pkg.Types() { +- for _, f := range r.pkg.Syntax() { +- fileScope := r.pkg.TypesInfo().Scopes[f] +- if fileScope == nil { +- continue // type error? (golang/go#40835) +- } +- b, prev := fileScope.LookupParent(r.to, token.NoPos) +- if b == fileScope { +- r.errorf(from.Pos(), "renaming this %s %q to %q would conflict", objectKind(from), from.Name(), r.to) +- r.errorf(prev.Pos(), "\twith this %s", objectKind(prev)) +- return // since checkInPackageBlock would report redundant errors +- } +- } +- } +- +- // Check for conflicts in lexical scope. +- r.checkInLexicalScope(from) +-} +- +-// checkInLexicalScope performs safety checks that a renaming does not +-// change the lexical reference structure of the specified package. +-// +-// For objects in lexical scope, there are three kinds of conflicts: +-// same-, sub-, and super-block conflicts. We will illustrate all three +-// using this example: +-// +-// var x int +-// var z int +-// +-// func f(y int) { +-// print(x) +-// print(y) +-// } +-// +-// Renaming x to z encounters a "same-block conflict", because an object +-// with the new name already exists, defined in the same lexical block +-// as the old object. +-// +-// Renaming x to y encounters a "sub-block conflict", because there exists +-// a reference to x from within (what would become) a hole in its scope. +-// The definition of y in an (inner) sub-block would cast a shadow in +-// the scope of the renamed variable. +-// +-// Renaming y to x encounters a "super-block conflict". This is the +-// converse situation: there is an existing definition of the new name +-// (x) in an (enclosing) super-block, and the renaming would create a +-// hole in its scope, within which there exist references to it. The +-// new name shadows the existing definition of x in the super-block. +-// +-// Removing the old name (and all references to it) is always safe, and +-// requires no checks. +-func (r *renamer) checkInLexicalScope(from types.Object) { +- b := from.Parent() // the block defining the 'from' object +- if b != nil { +- toBlock, to := b.LookupParent(r.to, from.Parent().End()) +- if toBlock == b { +- // same-block conflict +- r.errorf(from.Pos(), "renaming this %s %q to %q", +- objectKind(from), from.Name(), r.to) +- r.errorf(to.Pos(), "\tconflicts with %s in same block", +- objectKind(to)) +- return +- } else if toBlock != nil { +- // Check for super-block conflict. +- // The name r.to is defined in a superblock. +- // Is that name referenced from within this block? +- forEachLexicalRef(r.pkg, to, func(id *ast.Ident, block *types.Scope) bool { +- _, obj := block.LookupParent(from.Name(), id.Pos()) +- if obj == from { +- // super-block conflict +- r.errorf(from.Pos(), "renaming this %s %q to %q", +- objectKind(from), from.Name(), r.to) +- r.errorf(id.Pos(), "\twould shadow this reference") +- r.errorf(to.Pos(), "\tto the %s declared here", +- objectKind(to)) +- return false // stop +- } +- return true +- }) +- } +- } +- // Check for sub-block conflict. +- // Is there an intervening definition of r.to between +- // the block defining 'from' and some reference to it? +- forEachLexicalRef(r.pkg, from, func(id *ast.Ident, block *types.Scope) bool { +- // Find the block that defines the found reference. +- // It may be an ancestor. +- fromBlock, _ := block.LookupParent(from.Name(), id.Pos()) +- // See what r.to would resolve to in the same scope. +- toBlock, to := block.LookupParent(r.to, id.Pos()) +- if to != nil { +- // sub-block conflict +- if deeper(toBlock, fromBlock) { +- r.errorf(from.Pos(), "renaming this %s %q to %q", +- objectKind(from), from.Name(), r.to) +- r.errorf(id.Pos(), "\twould cause this reference to become shadowed") +- r.errorf(to.Pos(), "\tby this intervening %s definition", +- objectKind(to)) +- return false // stop +- } +- } +- return true +- }) +- +- // Renaming a type that is used as an embedded field +- // requires renaming the field too. e.g. +- // type T int // if we rename this to U.. +- // var s struct {T} +- // print(s.T) // ...this must change too +- if _, ok := from.(*types.TypeName); ok { +- for id, obj := range r.pkg.TypesInfo().Uses { +- if obj == from { +- if field := r.pkg.TypesInfo().Defs[id]; field != nil { +- r.check(field) +- } +- } +- } +- } +-} +- +-// deeper reports whether block x is lexically deeper than y. +-func deeper(x, y *types.Scope) bool { +- if x == y || x == nil { +- return false +- } else if y == nil { +- return true +- } else { +- return deeper(x.Parent(), y.Parent()) +- } +-} +- +-// Scope and Position +-// +-// Consider a function f declared as: +-// +-// func f[T *U, U *T](p, q T) (r, s U) { var ( v T; w = v ); type (t *t; u t) } +-// ^ ^ ^ ^ ^ ^ +-/// {T,U} {p,q,r,s} v w t u +-// +-// All objects {T, U, p, q, r, s, local} belong to the same lexical +-// block, the function scope, which is found in types.Info.Scopes +-// for f's FuncType. (A function body's BlockStmt does not have +-// an associated scope; only nested BlockStmts do.) +-// +-// The effective scope of each object is different: +-// +-// - The type parameters T and U, whose constraints may refer to each +-// other, all have a scope that starts at the beginning of the +-// FuncDecl.Type.Func token. +-// +-// - The parameter and result variables {p,q,r,s} can reference the +-// type parameters but not each other, so their scopes all start at +-// the end of the FuncType. +-// (Prior to go1.22 it was--incorrectly--unset; see #64295). +-// Beware also that Scope.Innermost does not currently work correctly for +-// type parameters: it returns the scope of the package, not the function. +-// +-// - Each const or var {v,w} declared within the function body has a +-// scope that begins at the end of its ValueSpec, or after the +-// AssignStmt for a var declared by ":=". +-// +-// - Each type {t,u} in the body has a scope that begins at +-// the start of the TypeSpec, so they can be self-recursive +-// but--unlike package-level types--not mutually recursive. +- +-// forEachLexicalRef calls fn(id, block) for each identifier id in package +-// pkg that is a reference to obj in lexical scope. block is the +-// lexical block enclosing the reference. If fn returns false the +-// iteration is terminated and findLexicalRefs returns false. +-func forEachLexicalRef(pkg *cache.Package, obj types.Object, fn func(id *ast.Ident, block *types.Scope) bool) bool { +- filter := []ast.Node{ +- (*ast.Ident)(nil), +- (*ast.SelectorExpr)(nil), +- (*ast.CompositeLit)(nil), +- } +- ok := true +- var visit func(cur inspector.Cursor) (descend bool) +- visit = func(cur inspector.Cursor) (descend bool) { +- if !ok { +- return false // bail out +- } +- switch n := cur.Node().(type) { +- case *ast.Ident: +- if pkg.TypesInfo().Uses[n] == obj { +- block := typesinternal.EnclosingScope(pkg.TypesInfo(), cur) +- if !fn(n, block) { +- ok = false +- } +- } +- +- case *ast.SelectorExpr: +- // don't visit n.Sel +- cur.ChildAt(edge.SelectorExpr_X, -1).Inspect(filter, visit) +- return false // don't descend +- +- case *ast.CompositeLit: +- // Handle recursion ourselves for struct literals +- // so we don't visit field identifiers. +- tv, ok := pkg.TypesInfo().Types[n] +- if !ok { +- return false // don't descend +- } +- if is[*types.Struct](typeparams.CoreType(typeparams.Deref(tv.Type))) { +- if n.Type != nil { +- cur.ChildAt(edge.CompositeLit_Type, -1).Inspect(filter, visit) +- } +- for i, elt := range n.Elts { +- curElt := cur.ChildAt(edge.CompositeLit_Elts, i) +- if _, ok := elt.(*ast.KeyValueExpr); ok { +- // skip kv.Key +- curElt = curElt.ChildAt(edge.KeyValueExpr_Value, -1) +- } +- curElt.Inspect(filter, visit) +- } +- return false // don't descend +- } +- } +- return true +- } +- +- for _, pgf := range pkg.CompiledGoFiles() { +- pgf.Cursor.Inspect(filter, visit) +- if !ok { +- break +- } +- } +- return ok +-} +- +-func (r *renamer) checkLabel(label *types.Label) { +- // Check there are no identical labels in the function's label block. +- // (Label blocks don't nest, so this is easy.) +- if prev := label.Parent().Lookup(r.to); prev != nil { +- r.errorf(label.Pos(), "renaming this label %q to %q", label.Name(), prev.Name()) +- r.errorf(prev.Pos(), "\twould conflict with this one") +- } +-} +- +-// checkStructField checks that the field renaming will not cause +-// conflicts at its declaration, or ambiguity or changes to any selection. +-func (r *renamer) checkStructField(from *types.Var) { +- // If this is the declaring package, check that the struct +- // declaration is free of field conflicts, and field/method +- // conflicts. +- // +- // go/types offers no easy way to get from a field (or interface +- // method) to its declaring struct (or interface), so we must +- // ascend the AST. +- if pgf, err := r.pkg.FileEnclosing(from.Pos()); err == nil { +- path, _ := astutil.PathEnclosingInterval(pgf.File, from.Pos(), from.Pos()) +- // path matches this pattern: +- // [Ident SelectorExpr? StarExpr? Field FieldList StructType ParenExpr* ... File] +- +- // Ascend to FieldList. +- var i int +- for { +- if _, ok := path[i].(*ast.FieldList); ok { +- break +- } +- i++ +- } +- i++ +- tStruct := path[i].(*ast.StructType) +- i++ +- // Ascend past parens (unlikely). +- for { +- _, ok := path[i].(*ast.ParenExpr) +- if !ok { +- break +- } +- i++ +- } +- if spec, ok := path[i].(*ast.TypeSpec); ok { +- // This struct is also a named type. +- // We must check for direct (non-promoted) field/field +- // and method/field conflicts. +- if tname := r.pkg.TypesInfo().Defs[spec.Name]; tname != nil { +- prev, indices, _ := types.LookupFieldOrMethod(tname.Type(), true, r.pkg.Types(), r.to) +- if len(indices) == 1 { +- r.errorf(from.Pos(), "renaming this field %q to %q", +- from.Name(), r.to) +- r.errorf(prev.Pos(), "\twould conflict with this %s", +- objectKind(prev)) +- return // skip checkSelections to avoid redundant errors +- } +- } +- } else { +- // This struct is not a named type. +- // We need only check for direct (non-promoted) field/field conflicts. +- T := r.pkg.TypesInfo().Types[tStruct].Type.Underlying().(*types.Struct) +- for i := 0; i < T.NumFields(); i++ { +- if prev := T.Field(i); prev.Name() == r.to { +- r.errorf(from.Pos(), "renaming this field %q to %q", +- from.Name(), r.to) +- r.errorf(prev.Pos(), "\twould conflict with this field") +- return // skip checkSelections to avoid redundant errors +- } +- } +- } +- } +- +- // Renaming an anonymous field requires renaming the type too. e.g. +- // print(s.T) // if we rename T to U, +- // type T int // this and +- // var s struct {T} // this must change too. +- if from.Anonymous() { +- if named, ok := from.Type().(*types.Named); ok { +- r.check(named.Obj()) +- } else if named, ok := types.Unalias(typesinternal.Unpointer(from.Type())).(*types.Named); ok { +- r.check(named.Obj()) +- } +- } +- +- // Check integrity of existing (field and method) selections. +- r.checkSelections(from) +-} +- +-// checkSelections checks that all uses and selections that resolve to +-// the specified object would continue to do so after the renaming. +-func (r *renamer) checkSelections(from types.Object) { +- pkg := r.pkg +- typ := pkg.Types() +- { +- if id := someUse(pkg.TypesInfo(), from); id != nil { +- if !r.checkExport(id, typ, from) { +- return +- } +- } +- +- for syntax, sel := range pkg.TypesInfo().Selections { +- // There may be extant selections of only the old +- // name or only the new name, so we must check both. +- // (If neither, the renaming is sound.) +- // +- // In both cases, we wish to compare the lengths +- // of the implicit field path (Selection.Index) +- // to see if the renaming would change it. +- // +- // If a selection that resolves to 'from', when renamed, +- // would yield a path of the same or shorter length, +- // this indicates ambiguity or a changed referent, +- // analogous to same- or sub-block lexical conflict. +- // +- // If a selection using the name 'to' would +- // yield a path of the same or shorter length, +- // this indicates ambiguity or shadowing, +- // analogous to same- or super-block lexical conflict. +- +- // TODO(adonovan): fix: derive from Types[syntax.X].Mode +- // TODO(adonovan): test with pointer, value, addressable value. +- isAddressable := true +- +- if sel.Obj() == from { +- if obj, indices, _ := types.LookupFieldOrMethod(sel.Recv(), isAddressable, from.Pkg(), r.to); obj != nil { +- // Renaming this existing selection of +- // 'from' may block access to an existing +- // type member named 'to'. +- delta := len(indices) - len(sel.Index()) +- if delta > 0 { +- continue // no ambiguity +- } +- r.selectionConflict(from, delta, syntax, obj) +- return +- } +- } else if sel.Obj().Name() == r.to { +- if obj, indices, _ := types.LookupFieldOrMethod(sel.Recv(), isAddressable, from.Pkg(), from.Name()); obj == from { +- // Renaming 'from' may cause this existing +- // selection of the name 'to' to change +- // its meaning. +- delta := len(indices) - len(sel.Index()) +- if delta > 0 { +- continue // no ambiguity +- } +- r.selectionConflict(from, -delta, syntax, sel.Obj()) +- return +- } +- } +- } +- } +-} +- +-func (r *renamer) selectionConflict(from types.Object, delta int, syntax *ast.SelectorExpr, obj types.Object) { +- r.errorf(from.Pos(), "renaming this %s %q to %q", +- objectKind(from), from.Name(), r.to) +- +- switch { +- case delta < 0: +- // analogous to sub-block conflict +- r.errorf(syntax.Sel.Pos(), +- "\twould change the referent of this selection") +- r.errorf(obj.Pos(), "\tof this %s", objectKind(obj)) +- case delta == 0: +- // analogous to same-block conflict +- r.errorf(syntax.Sel.Pos(), +- "\twould make this reference ambiguous") +- r.errorf(obj.Pos(), "\twith this %s", objectKind(obj)) +- case delta > 0: +- // analogous to super-block conflict +- r.errorf(syntax.Sel.Pos(), +- "\twould shadow this selection") +- r.errorf(obj.Pos(), "\tof the %s declared here", +- objectKind(obj)) +- } +-} +- +-// checkMethod performs safety checks for renaming a method. +-// There are three hazards: +-// - declaration conflicts +-// - selection ambiguity/changes +-// - entailed renamings of assignable concrete/interface types. +-// +-// We reject renamings initiated at concrete methods if it would +-// change the assignability relation. For renamings of abstract +-// methods, we rename all methods transitively coupled to it via +-// assignability. +-func (r *renamer) checkMethod(from *types.Func) { +- // e.g. error.Error +- if from.Pkg() == nil { +- r.errorf(from.Pos(), "you cannot rename built-in method %s", from) +- return +- } +- +- // ASSIGNABILITY: We reject renamings of concrete methods that +- // would break a 'satisfy' constraint; but renamings of abstract +- // methods are allowed to proceed, and we rename affected +- // concrete and abstract methods as necessary. It is the +- // initial method that determines the policy. +- +- // Check for conflict at point of declaration. +- // Check to ensure preservation of assignability requirements. +- R := recv(from).Type() +- if types.IsInterface(R) { +- // Abstract method +- +- // declaration +- prev, _, _ := types.LookupFieldOrMethod(R, false, from.Pkg(), r.to) +- if prev != nil { +- r.errorf(from.Pos(), "renaming this interface method %q to %q", +- from.Name(), r.to) +- r.errorf(prev.Pos(), "\twould conflict with this method") +- return +- } +- +- // Check all interfaces that embed this one for +- // declaration conflicts too. +- { +- // Start with named interface types (better errors) +- for _, obj := range r.pkg.TypesInfo().Defs { +- if obj, ok := obj.(*types.TypeName); ok && types.IsInterface(obj.Type()) { +- f, _, _ := types.LookupFieldOrMethod( +- obj.Type(), false, from.Pkg(), from.Name()) +- if f == nil { +- continue +- } +- t, _, _ := types.LookupFieldOrMethod( +- obj.Type(), false, from.Pkg(), r.to) +- if t == nil { +- continue +- } +- r.errorf(from.Pos(), "renaming this interface method %q to %q", +- from.Name(), r.to) +- r.errorf(t.Pos(), "\twould conflict with this method") +- r.errorf(obj.Pos(), "\tin named interface type %q", obj.Name()) +- } +- } +- +- // Now look at all literal interface types (includes named ones again). +- for e, tv := range r.pkg.TypesInfo().Types { +- if e, ok := e.(*ast.InterfaceType); ok { +- _ = e +- _ = tv.Type.(*types.Interface) +- // TODO(adonovan): implement same check as above. +- } +- } +- } +- +- // assignability +- // +- // Find the set of concrete or abstract methods directly +- // coupled to abstract method 'from' by some +- // satisfy.Constraint, and rename them too. +- for key := range r.satisfy() { +- // key = (lhs, rhs) where lhs is always an interface. +- +- lsel := r.msets.MethodSet(key.LHS).Lookup(from.Pkg(), from.Name()) +- if lsel == nil { +- continue +- } +- rmethods := r.msets.MethodSet(key.RHS) +- rsel := rmethods.Lookup(from.Pkg(), from.Name()) +- if rsel == nil { +- continue +- } +- +- // If both sides have a method of this name, +- // and one of them is m, the other must be coupled. +- var coupled *types.Func +- switch from { +- case lsel.Obj(): +- coupled = rsel.Obj().(*types.Func) +- case rsel.Obj(): +- coupled = lsel.Obj().(*types.Func) +- default: +- continue +- } +- +- // We must treat concrete-to-interface +- // constraints like an implicit selection C.f of +- // each interface method I.f, and check that the +- // renaming leaves the selection unchanged and +- // unambiguous. +- // +- // Fun fact: the implicit selection of C.f +- // type I interface{f()} +- // type C struct{I} +- // func (C) g() +- // var _ I = C{} // here +- // yields abstract method I.f. This can make error +- // messages less than obvious. +- // +- if !types.IsInterface(key.RHS) { +- // The logic below was derived from checkSelections. +- +- rtosel := rmethods.Lookup(from.Pkg(), r.to) +- if rtosel != nil { +- rto := rtosel.Obj().(*types.Func) +- delta := len(rsel.Index()) - len(rtosel.Index()) +- if delta < 0 { +- continue // no ambiguity +- } +- +- // TODO(adonovan): record the constraint's position. +- keyPos := token.NoPos +- +- r.errorf(from.Pos(), "renaming this method %q to %q", +- from.Name(), r.to) +- if delta == 0 { +- // analogous to same-block conflict +- r.errorf(keyPos, "\twould make the %s method of %s invoked via interface %s ambiguous", +- r.to, key.RHS, key.LHS) +- r.errorf(rto.Pos(), "\twith (%s).%s", +- recv(rto).Type(), r.to) +- } else { +- // analogous to super-block conflict +- r.errorf(keyPos, "\twould change the %s method of %s invoked via interface %s", +- r.to, key.RHS, key.LHS) +- r.errorf(coupled.Pos(), "\tfrom (%s).%s", +- recv(coupled).Type(), r.to) +- r.errorf(rto.Pos(), "\tto (%s).%s", +- recv(rto).Type(), r.to) +- } +- return // one error is enough +- } +- } +- +- if !r.changeMethods { +- // This should be unreachable. +- r.errorf(from.Pos(), "internal error: during renaming of abstract method %s", from) +- r.errorf(coupled.Pos(), "\tchangedMethods=false, coupled method=%s", coupled) +- r.errorf(from.Pos(), "\tPlease file a bug report") +- return +- } +- +- // Rename the coupled method to preserve assignability. +- r.check(coupled) +- } +- } else { +- // Concrete method +- +- // declaration +- prev, indices, _ := types.LookupFieldOrMethod(R, true, from.Pkg(), r.to) +- if prev != nil && len(indices) == 1 { +- r.errorf(from.Pos(), "renaming this method %q to %q", +- from.Name(), r.to) +- r.errorf(prev.Pos(), "\twould conflict with this %s", +- objectKind(prev)) +- return +- } +- +- // assignability +- // +- // Find the set of abstract methods coupled to concrete +- // method 'from' by some satisfy.Constraint, and rename +- // them too. +- // +- // Coupling may be indirect, e.g. I.f <-> C.f via type D. +- // +- // type I interface {f()} +- // type C int +- // type (C) f() +- // type D struct{C} +- // var _ I = D{} +- // +- for key := range r.satisfy() { +- // key = (lhs, rhs) where lhs is always an interface. +- if types.IsInterface(key.RHS) { +- continue +- } +- rsel := r.msets.MethodSet(key.RHS).Lookup(from.Pkg(), from.Name()) +- if rsel == nil || rsel.Obj() != from { +- continue // rhs does not have the method +- } +- lsel := r.msets.MethodSet(key.LHS).Lookup(from.Pkg(), from.Name()) +- if lsel == nil { +- continue +- } +- imeth := lsel.Obj().(*types.Func) +- +- // imeth is the abstract method (e.g. I.f) +- // and key.RHS is the concrete coupling type (e.g. D). +- if !r.changeMethods { +- r.errorf(from.Pos(), "renaming this method %q to %q", +- from.Name(), r.to) +- var pos token.Pos +- var iface string +- +- I := recv(imeth).Type() +- if named, ok := types.Unalias(I).(*types.Named); ok { +- pos = named.Obj().Pos() +- iface = "interface " + named.Obj().Name() +- } else { +- pos = from.Pos() +- iface = I.String() +- } +- r.errorf(pos, "\twould make %s no longer assignable to %s", +- key.RHS, iface) +- r.errorf(imeth.Pos(), "\t(rename %s.%s if you intend to change both types)", +- I, from.Name()) +- return // one error is enough +- } +- +- // Rename the coupled interface method to preserve assignability. +- r.check(imeth) +- } +- } +- +- // Check integrity of existing (field and method) selections. +- // We skip this if there were errors above, to avoid redundant errors. +- r.checkSelections(from) +-} +- +-func (r *renamer) checkExport(id *ast.Ident, pkg *types.Package, from types.Object) bool { +- // Reject cross-package references if r.to is unexported. +- // (Such references may be qualified identifiers or field/method +- // selections.) +- if !ast.IsExported(r.to) && pkg != from.Pkg() { +- r.errorf(from.Pos(), +- "renaming %q to %q would make it unexported", +- from.Name(), r.to) +- r.errorf(id.Pos(), "\tbreaking references from packages such as %q", +- pkg.Path()) +- return false +- } +- return true +-} +- +-// satisfy returns the set of interface satisfaction constraints. +-func (r *renamer) satisfy() map[satisfy.Constraint]bool { +- if r.satisfyConstraints == nil { +- // Compute on demand: it's expensive. +- var f satisfy.Finder +- pkg := r.pkg +- { +- // From satisfy.Finder documentation: +- // +- // The package must be free of type errors, and +- // info.{Defs,Uses,Selections,Types} must have been populated by the +- // type-checker. +- // +- // Only proceed if all packages have no errors. +- if len(pkg.ParseErrors()) > 0 || len(pkg.TypeErrors()) > 0 { +- var filename string +- if len(pkg.ParseErrors()) > 0 { +- err := pkg.ParseErrors()[0][0] +- filename = filepath.Base(err.Pos.Filename) +- } else if len(pkg.TypeErrors()) > 0 { +- err := pkg.TypeErrors()[0] +- filename = filepath.Base(err.Fset.File(err.Pos).Name()) +- } +- r.errorf(token.NoPos, // we don't have a position for this error. +- "renaming %q to %q not possible because %q in %q has errors", +- r.from, r.to, filename, pkg.Metadata().PkgPath) +- return nil +- } +- f.Find(pkg.TypesInfo(), pkg.Syntax()) +- } +- r.satisfyConstraints = f.Result +- } +- return r.satisfyConstraints +-} +- +-// -- helpers ---------------------------------------------------------- +- +-// recv returns the method's receiver. +-func recv(meth *types.Func) *types.Var { +- return meth.Signature().Recv() +-} +- +-// someUse returns an arbitrary use of obj within info. +-func someUse(info *types.Info, obj types.Object) *ast.Ident { +- for id, o := range info.Uses { +- if o == obj { +- return id +- } +- } +- return nil +-} +- +-func objectKind(obj types.Object) string { +- if obj == nil { +- return "nil object" +- } +- switch obj := obj.(type) { +- case *types.PkgName: +- return "imported package name" +- case *types.TypeName: +- return "type" +- case *types.Var: +- if obj.IsField() { +- return "field" +- } +- case *types.Func: +- if recv(obj) != nil { +- return "method" +- } +- } +- // label, func, var, const +- return strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types.")) +-} +- +-// NB: for renamings, blank is not considered valid. +-func isValidIdentifier(id string) bool { +- if id == "" || id == "_" { +- return false +- } +- for i, r := range id { +- if !isLetter(r) && (i == 0 || !isDigit(r)) { +- return false +- } +- } +- return token.Lookup(id) == token.IDENT +-} +- +-// isValidPackagePath reports whether newPath is a valid new path for the +-// package currently at oldPath. For now, we only support renames that +-// do not result in a package move. +-// TODO(mkalil): support package renames with arbitrary package paths, including +-// relative paths. +-func isValidPackagePath(oldPath, newPath string) bool { +- // We prompt with the full package path, but some users may delete this and +- // just enter a package identifier, which we should still support. +- return isValidIdentifier(newPath) || filepath.Dir(oldPath) == filepath.Dir(newPath) +-} +- +-// isLocal reports whether obj is local to some function. +-// Precondition: not a struct field or interface method. +-func isLocal(obj types.Object) bool { +- // [... 5=stmt 4=func 3=file 2=pkg 1=universe] +- var depth int +- for scope := obj.Parent(); scope != nil; scope = scope.Parent() { +- depth++ +- } +- return depth >= 4 +-} +- +-// -- Plundered from go/scanner: --------------------------------------- +- +-func isLetter(ch rune) bool { +- return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch) +-} +- +-func isDigit(ch rune) bool { +- return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch) +-} +diff -urN a/gopls/internal/golang/rename.go b/gopls/internal/golang/rename.go +--- a/gopls/internal/golang/rename.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/rename.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1787 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-// TODO(adonovan): +-// +-// - method of generic concrete type -> arbitrary instances of same +-// +-// - make satisfy work across packages. +-// +-// - tests, tests, tests: +-// - play with renamings in the k8s tree. +-// - generics +-// - error cases (e.g. conflicts) +-// - renaming a symbol declared in the module cache +-// (currently proceeds with half of the renaming!) +-// - make sure all tests have both a local and a cross-package analogue. +-// - look at coverage +-// - special cases: embedded fields, interfaces, test variants, +-// function-local things with uppercase names; +-// packages with type errors (currently 'satisfy' rejects them), +-// package with missing imports; +-// +-// - measure performance in k8s. +-// +-// - The original gorename tool assumed well-typedness, but the gopls feature +-// does no such check (which actually makes it much more useful). +-// Audit to ensure it is safe on ill-typed code. +-// +-// - Generics support was no doubt buggy before but incrementalization +-// may have exacerbated it. If the problem were just about objects, +-// defs and uses it would be fairly simple, but type assignability +-// comes into play in the 'satisfy' check for method renamings. +-// De-instantiating Vector[int] to Vector[T] changes its type. +-// We need to come up with a theory for the satisfy check that +-// works with generics, and across packages. We currently have no +-// simple way to pass types between packages (think: objectpath for +-// types), though presumably exportdata could be pressed into service. +-// +-// - FileID-based de-duplication of edits to different URIs for the same file. +- +-import ( +- "bytes" +- "context" +- "errors" +- "fmt" +- "go/ast" +- "go/parser" +- "go/printer" +- "go/token" +- "go/types" +- "maps" +- "path" +- "path/filepath" +- "regexp" +- "slices" +- "sort" +- "strconv" +- "strings" +- +- "golang.org/x/mod/modfile" +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/go/types/objectpath" +- "golang.org/x/tools/go/types/typeutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/cursorutil" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- internalastutil "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/diff" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/typesinternal" +- "golang.org/x/tools/refactor/satisfy" +-) +- +-// A renamer holds state of a single call to renameObj, which renames +-// an object (or several coupled objects) within a single type-checked +-// syntax package. +-type renamer struct { +- pkg *cache.Package // the syntax package in which the renaming is applied +- objsToUpdate map[types.Object]bool // records progress of calls to check +- conflicts []string +- from, to string +- satisfyConstraints map[satisfy.Constraint]bool +- msets typeutil.MethodSetCache +- changeMethods bool +-} +- +-// A PrepareItem holds the result of a "prepare rename" operation: +-// the source range and value of a selected identifier. +-type PrepareItem struct { +- Range protocol.Range +- Text string +-} +- +-// PrepareRename searches for a valid renaming at position pp. +-// +-// The returned usererr is intended to be displayed to the user to explain why +-// the prepare fails. Probably we could eliminate the redundancy in returning +-// two errors, but for now this is done defensively. +-func PrepareRename(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position) (_ *PrepareItem, usererr, err error) { +- ctx, done := event.Start(ctx, "golang.PrepareRename") +- defer done() +- +- // Is the cursor within the package name declaration? +- if pgf, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp); err != nil { +- return nil, err, err +- } else if inPackageName { +- item, err := prepareRenamePackageName(ctx, snapshot, pgf) +- return item, err, err +- } +- +- // Ordinary (non-package) renaming. +- // +- // Type-check the current package, locate the reference at the position, +- // validate the object, and report its name and range. +- // +- // TODO(adonovan): in all cases below, we return usererr=nil, +- // which means we return (nil, nil) at the protocol +- // layer. This seems like a bug, or at best an exploitation of +- // knowledge of VSCode-specific behavior. Can we avoid that? +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, f.URI()) +- if err != nil { +- return nil, nil, err +- } +- pos, err := pgf.PositionPos(pp) +- if err != nil { +- return nil, nil, err +- } +- +- cur, ok := pgf.Cursor.FindByPos(pos, pos) +- if !ok { +- return nil, nil, fmt.Errorf("can't find cursor for selection") +- } +- +- // Check if we're in a 'func' keyword. If so, we hijack the renaming to +- // change the function signature. +- if item, err := prepareRenameFuncSignature(pgf, pos, cur); err != nil { +- return nil, nil, err +- } else if item != nil { +- return item, nil, nil +- } +- +- targets, node, err := objectsAt(pkg.TypesInfo(), pgf.File, pos) +- if err != nil { +- // Check if we are renaming an ident inside its doc comment. The call to +- // objectsAt will have returned an error in this case. +- id := docCommentPosToIdent(pgf, pos, cur) +- if id == nil { +- return nil, nil, err +- } +- obj := pkg.TypesInfo().Defs[id] +- if obj == nil { +- return nil, nil, fmt.Errorf("error fetching Object for ident %q", id.Name) +- } +- // Change rename target to the ident. +- targets = map[types.Object]ast.Node{obj: id} +- node = id +- } +- var obj types.Object +- for obj = range targets { +- break // pick one arbitrarily +- } +- if err := checkRenamable(obj, node); err != nil { +- return nil, nil, err +- } +- rng, err := pgf.NodeRange(node) +- if err != nil { +- return nil, nil, err +- } +- if _, isImport := node.(*ast.ImportSpec); isImport { +- // We're not really renaming the import path. +- rng.End = rng.Start +- } +- return &PrepareItem{ +- Range: rng, +- Text: obj.Name(), +- }, nil, nil +-} +- +-func prepareRenamePackageName(ctx context.Context, snapshot *cache.Snapshot, pgf *parsego.File) (*PrepareItem, error) { +- // Does the client support file renaming? +- if !slices.Contains(snapshot.Options().SupportedResourceOperations, protocol.Rename) { +- return nil, errors.New("can't rename package: LSP client does not support file renaming") +- } +- +- // Check validity of the metadata for the file's containing package. +- meta, err := snapshot.NarrowestMetadataForFile(ctx, pgf.URI) +- if err != nil { +- return nil, err +- } +- if meta.Name == "main" { +- return nil, fmt.Errorf("can't rename package \"main\"") +- } +- if strings.HasSuffix(string(meta.Name), "_test") { +- return nil, fmt.Errorf("can't rename x_test packages") +- } +- if meta.Module == nil { +- return nil, fmt.Errorf("can't rename package: missing module information for package %q", meta.PkgPath) +- } +- if meta.Module.Path == string(meta.PkgPath) { +- return nil, fmt.Errorf("can't rename package: package path %q is the same as module path %q", meta.PkgPath, meta.Module.Path) +- } +- +- // Return the location of the package declaration. +- rng, err := pgf.NodeRange(pgf.File.Name) +- if err != nil { +- return nil, err +- } +- +- pkgName := string(meta.Name) +- fullPath := string(meta.PkgPath) +- text := pkgName +- // Before displaying the full package path, verify that the PackageMove +- // setting is enabled and that the package name matches its directory +- // basename. Checking the value of meta.Module above ensures that the +- // current view is either a GoMod or a GoWork view, which are the only views +- // for which we should enable package move. +- if snapshot.Options().PackageMove && path.Base(fullPath) == pkgName { +- text = fullPath +- } +- return &PrepareItem{ +- Range: rng, +- Text: text, +- }, nil +-} +- +-// prepareRenameFuncSignature prepares a change signature refactoring initiated +-// through invoking a rename request at the 'func' keyword of a function +-// declaration. +-// +-// The resulting text is the signature of the function, which may be edited to +-// the new signature. +-func prepareRenameFuncSignature(pgf *parsego.File, pos token.Pos, cursor inspector.Cursor) (*PrepareItem, error) { +- fdecl := funcKeywordDecl(pos, cursor) +- if fdecl == nil { +- return nil, nil +- } +- ftyp := nameBlankParams(fdecl.Type) +- var buf bytes.Buffer +- if err := printer.Fprint(&buf, token.NewFileSet(), ftyp); err != nil { // use a new fileset so that the signature is formatted on a single line +- return nil, err +- } +- rng, err := pgf.PosRange(ftyp.Func, ftyp.Func+token.Pos(len("func"))) +- if err != nil { +- return nil, err +- } +- text := buf.String() +- return &PrepareItem{ +- Range: rng, +- Text: text, +- }, nil +-} +- +-// nameBlankParams returns a copy of ftype with blank or unnamed params +-// assigned a unique name. +-func nameBlankParams(ftype *ast.FuncType) *ast.FuncType { +- ftype = internalastutil.CloneNode(ftype) +- +- // First, collect existing names. +- scope := make(map[string]bool) +- for name := range internalastutil.FlatFields(ftype.Params) { +- if name != nil { +- scope[name.Name] = true +- } +- } +- blanks := 0 +- for name, field := range internalastutil.FlatFields(ftype.Params) { +- if name == nil { +- name = ast.NewIdent("_") +- field.Names = append(field.Names, name) // ok to append +- } +- if name.Name == "" || name.Name == "_" { +- for { +- newName := fmt.Sprintf("_%d", blanks) +- blanks++ +- if !scope[newName] { +- name.Name = newName +- break +- } +- } +- } +- } +- return ftype +-} +- +-// renameFuncSignature computes and applies the effective change signature +-// operation resulting from a 'renamed' (=rewritten) signature. +-func renameFuncSignature(ctx context.Context, pkg *cache.Package, pgf *parsego.File, pos token.Pos, snapshot *cache.Snapshot, cursor inspector.Cursor, f file.Handle, pp protocol.Position, newName string) (map[protocol.DocumentURI][]protocol.TextEdit, error) { +- fdecl := funcKeywordDecl(pos, cursor) +- if fdecl == nil { +- return nil, nil +- } +- ftyp := nameBlankParams(fdecl.Type) +- +- // Parse the user's requested new signature. +- parsed, err := parser.ParseExpr(newName) +- if err != nil { +- return nil, err +- } +- newType, _ := parsed.(*ast.FuncType) +- if newType == nil { +- return nil, fmt.Errorf("parsed signature is %T, not a function type", parsed) +- } +- +- // Check results, before we get into handling permutations of parameters. +- if got, want := newType.Results.NumFields(), ftyp.Results.NumFields(); got != want { +- return nil, fmt.Errorf("changing results not yet supported (got %d results, want %d)", got, want) +- } +- var resultTypes []string +- for _, field := range internalastutil.FlatFields(ftyp.Results) { +- resultTypes = append(resultTypes, FormatNode(token.NewFileSet(), field.Type)) +- } +- resultIndex := 0 +- for _, field := range internalastutil.FlatFields(newType.Results) { +- if FormatNode(token.NewFileSet(), field.Type) != resultTypes[resultIndex] { +- return nil, fmt.Errorf("changing results not yet supported") +- } +- resultIndex++ +- } +- +- type paramInfo struct { +- idx int +- typ string +- } +- oldParams := make(map[string]paramInfo) +- for name, field := range internalastutil.FlatFields(ftyp.Params) { +- oldParams[name.Name] = paramInfo{ +- idx: len(oldParams), +- typ: types.ExprString(field.Type), +- } +- } +- +- var newParams []int +- for name, field := range internalastutil.FlatFields(newType.Params) { +- if name == nil { +- return nil, fmt.Errorf("need named fields") +- } +- info, ok := oldParams[name.Name] +- if !ok { +- return nil, fmt.Errorf("couldn't find name %s: adding parameters not yet supported", name) +- } +- if newType := types.ExprString(field.Type); newType != info.typ { +- return nil, fmt.Errorf("changing types (%s to %s) not yet supported", info.typ, newType) +- } +- newParams = append(newParams, info.idx) +- } +- +- rng, err := pgf.PosRange(ftyp.Func, ftyp.Func) +- if err != nil { +- return nil, err +- } +- changes, err := ChangeSignature(ctx, snapshot, pkg, pgf, rng, newParams) +- if err != nil { +- return nil, err +- } +- transposed := make(map[protocol.DocumentURI][]protocol.TextEdit) +- for _, change := range changes { +- transposed[change.TextDocumentEdit.TextDocument.URI] = protocol.AsTextEdits(change.TextDocumentEdit.Edits) +- } +- return transposed, nil +-} +- +-// funcKeywordDecl returns the FuncDecl for which pos is in the 'func' keyword, +-// if any. +-func funcKeywordDecl(pos token.Pos, cursor inspector.Cursor) *ast.FuncDecl { +- fdecl, _ := cursorutil.FirstEnclosing[*ast.FuncDecl](cursor) +- if fdecl == nil { +- return nil +- } +- ftyp := fdecl.Type +- if pos < ftyp.Func || pos > ftyp.Func+token.Pos(len("func")) { // tolerate renaming immediately after 'func' +- return nil +- } +- return fdecl +-} +- +-// checkRenamable returns an error if the object cannot be renamed. +-// node is the name-like syntax node from which the renaming originated. +-func checkRenamable(obj types.Object, node ast.Node) error { +- switch obj := obj.(type) { +- case *types.Var: +- // Allow renaming an embedded field only at its declaration. +- if obj.Embedded() && node.Pos() != obj.Pos() { +- return errors.New("an embedded field must be renamed at its declaration (since it renames the type too)") +- +- } +- case *types.Builtin, *types.Nil: +- return fmt.Errorf("%s is built in and cannot be renamed", obj.Name()) +- } +- if obj.Pkg() == nil || obj.Pkg().Path() == "unsafe" { +- // e.g. error.Error, unsafe.Pointer +- return fmt.Errorf("%s is built in and cannot be renamed", obj.Name()) +- } +- if obj.Name() == "_" { +- return errors.New("can't rename \"_\"") +- } +- return nil +-} +- +-// editsToDocChanges converts a map of uris to arrays of text edits to a list of document changes. +-func editsToDocChanges(ctx context.Context, snapshot *cache.Snapshot, edits map[protocol.DocumentURI][]protocol.TextEdit) ([]protocol.DocumentChange, error) { +- var changes []protocol.DocumentChange +- for uri, e := range edits { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- changes = append(changes, protocol.DocumentChangeEdit(fh, e)) +- } +- return changes, nil +-} +- +-// Rename returns a map of TextEdits for each file modified when renaming a +-// given identifier within a package and a boolean value of true for renaming +-// package and false otherwise. +-func Rename(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position, newName string) ([]protocol.DocumentChange, error) { +- ctx, done := event.Start(ctx, "golang.Rename") +- defer done() +- +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, f.URI()) +- if err != nil { +- return nil, err +- } +- pos, err := pgf.PositionPos(pp) +- if err != nil { +- return nil, err +- } +- +- cur, ok := pgf.Cursor.FindByPos(pos, pos) +- if !ok { +- return nil, fmt.Errorf("can't find cursor for selection") +- } +- +- if edits, err := renameFuncSignature(ctx, pkg, pgf, pos, snapshot, cur, f, pp, newName); err != nil { +- return nil, err +- } else if edits != nil { +- return editsToDocChanges(ctx, snapshot, edits) +- } +- +- // Cursor within package name declaration? +- _, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp) +- if err != nil { +- return nil, err +- } +- +- var editMap map[protocol.DocumentURI][]diff.Edit +- if inPackageName { +- countRenamePackage.Inc() +- if !isValidPackagePath(pkg.String(), newName) { +- return nil, fmt.Errorf("invalid package path: %q (package moves are not yet supported, see go.dev/issue/57171)", newName) +- } +- // Only the last element of the path is required as input for [renamePackageName]. +- newName = path.Base(newName) +- editMap, err = renamePackageName(ctx, snapshot, f, PackageName(newName)) +- } else { +- if !isValidIdentifier(newName) { +- return nil, fmt.Errorf("invalid identifier to rename: %q", newName) +- } +- editMap, err = renameOrdinary(ctx, snapshot, f.URI(), pp, newName) +- } +- if err != nil { +- return nil, err +- } +- +- // Convert edits to protocol form. +- result := make(map[protocol.DocumentURI][]protocol.TextEdit) +- for uri, edits := range editMap { +- // Sort and de-duplicate edits. +- // +- // Overlapping edits may arise in local renamings (due +- // to type switch implicits) and globals ones (due to +- // processing multiple package variants). +- // +- // We assume renaming produces diffs that are all +- // replacements (no adjacent insertions that might +- // become reordered) and that are either identical or +- // non-overlapping. +- diff.SortEdits(edits) +- edits = slices.Compact(edits) +- +- // TODO(adonovan): the logic above handles repeat edits to the +- // same file URI (e.g. as a member of package p and p_test) but +- // is not sufficient to handle file-system level aliasing arising +- // from symbolic or hard links. For that, we should use a +- // robustio-FileID-keyed map. +- // See https://go.dev/cl/457615 for example. +- // This really occurs in practice, e.g. kubernetes has +- // vendor/k8s.io/kubectl -> ../../staging/src/k8s.io/kubectl. +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- data, err := fh.Content() +- if err != nil { +- return nil, err +- } +- m := protocol.NewMapper(uri, data) +- textedits, err := protocol.EditsFromDiffEdits(m, edits) +- if err != nil { +- return nil, err +- } +- result[uri] = textedits +- } +- +- changes, err := editsToDocChanges(ctx, snapshot, result) +- if err != nil { +- return nil, err +- } +- // Update the last component of the file's enclosing directory. +- if inPackageName { +- oldDir := f.URI().DirPath() +- newDir := filepath.Join(filepath.Dir(oldDir), path.Base(newName)) +- changes = append(changes, protocol.DocumentChangeRename( +- protocol.URIFromPath(oldDir), +- protocol.URIFromPath(newDir))) +- } +- return changes, nil +-} +- +-// renameOrdinary renames an ordinary (non-package) name throughout the workspace. +-func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, pp protocol.Position, newName string) (map[protocol.DocumentURI][]diff.Edit, error) { +- // Type-check the referring package and locate the object(s). +- // +- // Unlike NarrowestPackageForFile, this operation prefers the +- // widest variant as, for non-exported identifiers, it is the +- // only package we need. (In case you're wondering why +- // 'references' doesn't also want the widest variant: it +- // computes the union across all variants.) +- mps, err := snapshot.MetadataForFile(ctx, uri, true) +- if err != nil { +- return nil, err +- } +- if len(mps) == 0 { +- return nil, fmt.Errorf("no package metadata for file %s", uri) +- } +- widest := mps[len(mps)-1] // widest variant may include _test.go files +- pkgs, err := snapshot.TypeCheck(ctx, widest.ID) +- if err != nil { +- return nil, err +- } +- pkg := pkgs[0] +- pgf, err := pkg.File(uri) +- if err != nil { +- return nil, err // "can't happen" +- } +- pos, err := pgf.PositionPos(pp) +- if err != nil { +- return nil, err +- } +- var ok bool +- cur, ok := pgf.Cursor.FindByPos(pos, pos) // of selected Ident or ImportSpec +- if !ok { +- return nil, fmt.Errorf("can't find cursor for selection") +- } +- targets, node, err := objectsAt(pkg.TypesInfo(), pgf.File, pos) +- if err != nil { +- // Check if we are renaming an ident inside its doc comment. The call to +- // objectsAt will have returned an error in this case. +- id := docCommentPosToIdent(pgf, pos, cur) +- if id == nil { +- return nil, err +- } +- obj := pkg.TypesInfo().Defs[id] +- if obj == nil { +- return nil, fmt.Errorf("error fetching types.Object for ident %q", id.Name) +- } +- // Change rename target to the ident. +- targets = map[types.Object]ast.Node{obj: id} +- } +- +- // Pick a representative object arbitrarily. +- // (All share the same name, pos, and kind.) +- var obj types.Object +- for obj = range targets { +- break +- } +- if obj.Name() == newName { +- return nil, fmt.Errorf("old and new names are the same: %s", newName) +- } +- if err := checkRenamable(obj, node); err != nil { +- return nil, err +- } +- +- // This covers the case where we are renaming an embedded field at its +- // declaration (see golang/go#45199). Perform the rename on the field's type declaration. +- if is[*types.Var](obj) && obj.(*types.Var).Embedded() { +- if id, ok := node.(*ast.Ident); ok { +- // TypesInfo.Uses contains the embedded field's *types.TypeName. +- if typeName := pkg.TypesInfo().Uses[id]; typeName != nil { +- loc, err := ObjectLocation(ctx, pkg.FileSet(), snapshot, typeName) +- if err != nil { +- return nil, err +- } +- return renameOrdinary(ctx, snapshot, loc.URI, loc.Range.Start, newName) +- } +- } +- } +- +- // Find objectpath, if object is exported ("" otherwise). +- var declObjPath objectpath.Path +- if obj.Exported() { +- // objectpath.For requires the origin of a generic function or type, not an +- // instantiation (a bug?). +- // +- // Note that unlike Funcs, TypeNames are always canonical (they are "left" +- // of the type parameters, unlike methods). +- switch obj0 := obj.(type) { // avoid "obj :=" since cases reassign the var +- case *types.TypeName: +- if _, ok := types.Unalias(obj.Type()).(*types.TypeParam); ok { +- // As with capitalized function parameters below, type parameters are +- // local. +- goto skipObjectPath +- } +- case *types.Func: +- obj = obj0.Origin() +- case *types.Var: +- // TODO(adonovan): do vars need the origin treatment too? (issue #58462) +- +- // Function parameter and result vars that are (unusually) +- // capitalized are technically exported, even though they +- // cannot be referenced, because they may affect downstream +- // error messages. But we can safely treat them as local. +- // +- // This is not merely an optimization: the renameExported +- // operation gets confused by such vars. It finds them from +- // objectpath, the classifies them as local vars, but as +- // they came from export data they lack syntax and the +- // correct scope tree (issue #61294). +- if !obj0.IsField() && !typesinternal.IsPackageLevel(obj) { +- goto skipObjectPath +- } +- } +- if path, err := objectpath.For(obj); err == nil { +- declObjPath = path +- } +- skipObjectPath: +- } +- +- // Nonexported? Search locally. +- if declObjPath == "" { +- var objects []types.Object +- for obj := range targets { +- objects = append(objects, obj) +- } +- +- editMap, _, err := renameObjects(newName, pkg, objects...) +- if err != nil { +- return nil, err +- } +- +- // If the selected identifier is a receiver declaration, +- // also rename receivers of other methods of the same type +- // that don't already have the desired name. +- // Quietly discard edits from any that can't be renamed. +- // +- // We interpret renaming the receiver declaration as +- // intent for the broader renaming; renaming a use of +- // the receiver effects only the local renaming. +- if id, ok := cur.Node().(*ast.Ident); ok && id.Pos() == obj.Pos() { +- // enclosing func +- if decl, _ := cursorutil.FirstEnclosing[*ast.FuncDecl](cur); decl != nil { +- if decl.Recv != nil && +- len(decl.Recv.List) > 0 && +- len(decl.Recv.List[0].Names) > 0 { +- recv := pkg.TypesInfo().Defs[decl.Recv.List[0].Names[0]] +- if recv == obj { +- // TODO(adonovan): simplify the above 7 lines to +- // to "if obj.(*Var).Kind==Recv" in go1.25. +- renameReceivers(pkg, recv.(*types.Var), newName, editMap) +- } +- } +- } +- } +- return editMap, nil +- } +- +- // Exported: search globally. +- // +- // For exported package-level var/const/func/type objects, the +- // search scope is just the direct importers. +- // +- // For exported fields and methods, the scope is the +- // transitive rdeps. (The exportedness of the field's struct +- // or method's receiver is irrelevant.) +- transitive := false +- switch obj := obj.(type) { +- case *types.TypeName: +- // Renaming an exported package-level type +- // requires us to inspect all transitive rdeps +- // in the event that the type is embedded. +- // +- // TODO(adonovan): opt: this is conservative +- // but inefficient. Instead, expand the scope +- // of the search only if we actually encounter +- // an embedding of the type, and only then to +- // the rdeps of the embedding package. +- if obj.Parent() == obj.Pkg().Scope() { +- transitive = true +- } +- +- case *types.Var: +- if obj.IsField() { +- transitive = true // field +- } +- +- // TODO(adonovan): opt: process only packages that +- // contain a reference (xrefs) to the target field. +- +- case *types.Func: +- if obj.Signature().Recv() != nil { +- transitive = true // method +- } +- +- // It's tempting to optimize by skipping +- // packages that don't contain a reference to +- // the method in the xrefs index, but we still +- // need to apply the satisfy check to those +- // packages to find assignment statements that +- // might expands the scope of the renaming. +- } +- +- // Type-check all the packages to inspect. +- declURI := protocol.URIFromPath(pkg.FileSet().File(obj.Pos()).Name()) +- pkgs, err = typeCheckReverseDependencies(ctx, snapshot, declURI, transitive) +- if err != nil { +- return nil, err +- } +- +- // Apply the renaming to the (initial) object. +- declPkgPath := PackagePath(obj.Pkg().Path()) +- return renameExported(pkgs, declPkgPath, declObjPath, newName) +-} +- +-// renameReceivers renames all receivers of methods of the same named +-// type as recv. The edits of each successful renaming are added to +-// editMap; the failed ones are quietly discarded. +-func renameReceivers(pkg *cache.Package, recv *types.Var, newName string, editMap map[protocol.DocumentURI][]diff.Edit) { +- _, named := typesinternal.ReceiverNamed(recv) +- if named == nil { +- return +- } +- +- // Find receivers of other methods of the same named type. +- for m := range named.Origin().Methods() { +- recv2 := m.Signature().Recv() +- if recv2 == recv { +- continue // don't re-rename original receiver +- } +- if recv2.Name() == newName { +- continue // no renaming needed +- } +- editMap2, _, err := renameObjects(newName, pkg, recv2) +- if err != nil { +- continue // ignore secondary failures +- } +- +- // Since all methods (and their comments) +- // are disjoint, and don't affect imports, +- // we can safely assume that all edits are +- // nonconflicting and disjoint. +- for uri, edits := range editMap2 { +- editMap[uri] = append(editMap[uri], edits...) +- } +- } +-} +- +-// typeCheckReverseDependencies returns the type-checked packages for +-// the reverse dependencies of all packages variants containing +-// file declURI. The packages are in some topological order. +-// +-// It includes all variants (even intermediate test variants) for the +-// purposes of computing reverse dependencies, but discards ITVs for +-// the actual renaming work. +-// +-// (This neglects obscure edge cases where a _test.go file changes the +-// selectors used only in an ITV, but life is short. Also sin must be +-// punished.) +-func typeCheckReverseDependencies(ctx context.Context, snapshot *cache.Snapshot, declURI protocol.DocumentURI, transitive bool) ([]*cache.Package, error) { +- variants, err := snapshot.MetadataForFile(ctx, declURI, false) +- if err != nil { +- return nil, err +- } +- // variants must include ITVs for the reverse dependency +- // computation, but they are filtered out before we typecheck. +- allRdeps := make(map[PackageID]*metadata.Package) +- for _, variant := range variants { +- rdeps, err := snapshot.ReverseDependencies(ctx, variant.ID, transitive) +- if err != nil { +- return nil, err +- } +- allRdeps[variant.ID] = variant // include self +- maps.Copy(allRdeps, rdeps) +- } +- var ids []PackageID +- for id, meta := range allRdeps { +- if meta.IsIntermediateTestVariant() { +- continue +- } +- ids = append(ids, id) +- } +- +- // Sort the packages into some topological order of the +- // (unfiltered) metadata graph. +- metadata.SortPostOrder(snapshot, ids) +- +- // Dependencies must be visited first since they can expand +- // the search set. Ideally we would process the (filtered) set +- // of packages in the parallel postorder of the snapshot's +- // (unfiltered) metadata graph, but this is quite tricky +- // without a good graph abstraction. +- // +- // For now, we visit packages sequentially in order of +- // ascending height, like an inverted breadth-first search. +- // +- // Type checking is by far the dominant cost, so +- // overlapping it with renaming may not be worthwhile. +- return snapshot.TypeCheck(ctx, ids...) +-} +- +-// renameExported renames the object denoted by (pkgPath, objPath) +-// within the specified packages, along with any other objects that +-// must be renamed as a consequence. The slice of packages must be +-// topologically ordered. +-func renameExported(pkgs []*cache.Package, declPkgPath PackagePath, declObjPath objectpath.Path, newName string) (map[protocol.DocumentURI][]diff.Edit, error) { +- +- // A target is a name for an object that is stable across types.Packages. +- type target struct { +- pkg PackagePath +- obj objectpath.Path +- } +- +- // Populate the initial set of target objects. +- // This set may grow as we discover the consequences of each renaming. +- // +- // TODO(adonovan): strictly, each cone of reverse dependencies +- // of a single variant should have its own target map that +- // monotonically expands as we go up the import graph, because +- // declarations in test files can alter the set of +- // package-level names and change the meaning of field and +- // method selectors. So if we parallelize the graph +- // visitation (see above), we should also compute the targets +- // as a union of dependencies. +- // +- // Or we could decide that the logic below is fast enough not +- // to need parallelism. In small measurements so far the +- // type-checking step is about 95% and the renaming only 5%. +- targets := map[target]bool{{declPkgPath, declObjPath}: true} +- +- // Apply the renaming operation to each package. +- allEdits := make(map[protocol.DocumentURI][]diff.Edit) +- for _, pkg := range pkgs { +- +- // Resolved target objects within package pkg. +- var objects []types.Object +- for t := range targets { +- p := pkg.DependencyTypes(t.pkg) +- if p == nil { +- continue // indirect dependency of no consequence +- } +- obj, err := objectpath.Object(p, t.obj) +- if err != nil { +- // Possibly a method or an unexported type +- // that is not reachable through export data? +- // See https://github.com/golang/go/issues/60789. +- // +- // TODO(adonovan): it seems unsatisfactory that Object +- // should return an error for a "valid" path. Perhaps +- // we should define such paths as invalid and make +- // objectpath.For compute reachability? +- // Would that be a compatible change? +- continue +- } +- objects = append(objects, obj) +- } +- if len(objects) == 0 { +- continue // no targets of consequence to this package +- } +- +- // Apply the renaming. +- editMap, moreObjects, err := renameObjects(newName, pkg, objects...) +- if err != nil { +- return nil, err +- } +- +- // It is safe to concatenate the edits as they are non-overlapping +- // (or identical, in which case they will be de-duped by Rename). +- for uri, edits := range editMap { +- allEdits[uri] = append(allEdits[uri], edits...) +- } +- +- // Expand the search set? +- for obj := range moreObjects { +- objpath, err := objectpath.For(obj) +- if err != nil { +- continue // not exported +- } +- target := target{PackagePath(obj.Pkg().Path()), objpath} +- targets[target] = true +- +- // TODO(adonovan): methods requires dynamic +- // programming of the product targets x +- // packages as any package might add a new +- // target (from a forward dep) as a +- // consequence, and any target might imply a +- // new set of rdeps. See golang/go#58461. +- } +- } +- +- return allEdits, nil +-} +- +-// renamePackageName renames package declarations, imports, and go.mod files. +-func renamePackageName(ctx context.Context, s *cache.Snapshot, f file.Handle, newName PackageName) (map[protocol.DocumentURI][]diff.Edit, error) { +- // Rename the package decl and all imports. +- renamingEdits, err := renamePackage(ctx, s, f, newName) +- if err != nil { +- return nil, err +- } +- +- oldBase := f.URI().DirPath() +- newPkgDir := filepath.Join(filepath.Dir(oldBase), string(newName)) +- +- // Update any affected replace directives in go.mod files. +- // TODO(adonovan): extract into its own function. +- // +- // Get all workspace modules. +- // TODO(adonovan): should this operate on all go.mod files, +- // irrespective of whether they are included in the workspace? +- modFiles := s.View().ModFiles() +- for _, m := range modFiles { +- fh, err := s.ReadFile(ctx, m) +- if err != nil { +- return nil, err +- } +- pm, err := s.ParseMod(ctx, fh) +- if err != nil { +- return nil, err +- } +- +- modFileDir := pm.URI.DirPath() +- affectedReplaces := []*modfile.Replace{} +- +- // Check if any replace directives need to be fixed +- for _, r := range pm.File.Replace { +- if !strings.HasPrefix(r.New.Path, "/") && !strings.HasPrefix(r.New.Path, "./") && !strings.HasPrefix(r.New.Path, "../") { +- continue +- } +- +- replacedPath := r.New.Path +- if strings.HasPrefix(r.New.Path, "./") || strings.HasPrefix(r.New.Path, "../") { +- replacedPath = filepath.Join(modFileDir, r.New.Path) +- } +- +- // TODO: Is there a risk of converting a '\' delimited replacement to a '/' delimited replacement? +- if !strings.HasPrefix(filepath.ToSlash(replacedPath)+"/", filepath.ToSlash(oldBase)+"/") { +- continue // not affected by the package renaming +- } +- +- affectedReplaces = append(affectedReplaces, r) +- } +- +- if len(affectedReplaces) == 0 { +- continue +- } +- copied, err := modfile.Parse("", pm.Mapper.Content, nil) +- if err != nil { +- return nil, err +- } +- +- for _, r := range affectedReplaces { +- replacedPath := r.New.Path +- if strings.HasPrefix(r.New.Path, "./") || strings.HasPrefix(r.New.Path, "../") { +- replacedPath = filepath.Join(modFileDir, r.New.Path) +- } +- +- suffix := strings.TrimPrefix(replacedPath, oldBase) +- +- newReplacedPath, err := filepath.Rel(modFileDir, newPkgDir+suffix) +- if err != nil { +- return nil, err +- } +- +- newReplacedPath = filepath.ToSlash(newReplacedPath) +- +- if !strings.HasPrefix(newReplacedPath, "/") && !strings.HasPrefix(newReplacedPath, "../") { +- newReplacedPath = "./" + newReplacedPath +- } +- +- if err := copied.AddReplace(r.Old.Path, "", newReplacedPath, ""); err != nil { +- return nil, err +- } +- } +- +- copied.Cleanup() +- newContent, err := copied.Format() +- if err != nil { +- return nil, err +- } +- +- // Calculate the edits to be made due to the change. +- edits := diff.Bytes(pm.Mapper.Content, newContent) +- renamingEdits[pm.URI] = append(renamingEdits[pm.URI], edits...) +- } +- +- return renamingEdits, nil +-} +- +-// renamePackage computes all workspace edits required to rename the package +-// described by the given metadata, to newName, by renaming its package +-// directory. +-// +-// It updates package clauses and import paths for the renamed package as well +-// as any other packages affected by the directory renaming among all packages +-// known to the snapshot. +-func renamePackage(ctx context.Context, s *cache.Snapshot, f file.Handle, newName PackageName) (map[protocol.DocumentURI][]diff.Edit, error) { +- if strings.HasSuffix(string(newName), "_test") { +- return nil, fmt.Errorf("cannot rename to _test package") +- } +- +- // We need metadata for the relevant package and module paths. +- // These should be the same for all packages containing the file. +- meta, err := s.NarrowestMetadataForFile(ctx, f.URI()) +- if err != nil { +- return nil, err +- } +- +- oldPkgPath := meta.PkgPath +- if meta.Module == nil { +- return nil, fmt.Errorf("cannot rename package: missing module information for package %q", meta.PkgPath) +- } +- modulePath := PackagePath(meta.Module.Path) +- if modulePath == oldPkgPath { +- return nil, fmt.Errorf("cannot rename package: module path %q is the same as the package path, so renaming the package directory would have no effect", modulePath) +- } +- +- newPathPrefix := path.Join(path.Dir(string(oldPkgPath)), string(newName)) +- +- // We must inspect all packages, not just direct importers, +- // because we also rename subpackages, which may be unrelated. +- // (If the renamed package imports a subpackage it may require +- // edits to both its package and import decls.) +- allMetadata, err := s.AllMetadata(ctx) +- if err != nil { +- return nil, err +- } +- +- // Rename package and import declarations in all relevant packages. +- edits := make(map[protocol.DocumentURI][]diff.Edit) +- for _, mp := range allMetadata { +- // Special case: x_test packages for the renamed package will not have the +- // package path as a dir prefix, but still need their package clauses +- // renamed. +- if mp.PkgPath == oldPkgPath+"_test" { +- if err := renamePackageClause(ctx, mp, s, newName+"_test", edits); err != nil { +- return nil, err +- } +- continue +- } +- +- // Subtle: check this condition before checking for valid module info +- // below, because we should not fail this operation if unrelated packages +- // lack module info. +- if !strings.HasPrefix(string(mp.PkgPath)+"/", string(oldPkgPath)+"/") { +- continue // not affected by the package renaming +- } +- +- if mp.Module == nil { +- // This check will always fail under Bazel. +- return nil, fmt.Errorf("cannot rename package: missing module information for package %q", mp.PkgPath) +- } +- +- if modulePath != PackagePath(mp.Module.Path) { +- continue // don't edit imports if nested package and renaming package have different module paths +- } +- +- // Renaming a package consists of changing its import path and package name. +- suffix := strings.TrimPrefix(string(mp.PkgPath), string(oldPkgPath)) +- newPath := newPathPrefix + suffix +- +- pkgName := mp.Name +- if mp.PkgPath == oldPkgPath { +- pkgName = newName +- +- if err := renamePackageClause(ctx, mp, s, newName, edits); err != nil { +- return nil, err +- } +- } +- +- imp := ImportPath(newPath) // TODO(adonovan): what if newPath has vendor/ prefix? +- if err := renameImports(ctx, s, mp, imp, pkgName, edits); err != nil { +- return nil, err +- } +- } +- +- return edits, nil +-} +- +-// renamePackageClause computes edits renaming the package clause of files in +-// the package described by the given metadata, to newName. +-// +-// Edits are written into the edits map. +-func renamePackageClause(ctx context.Context, mp *metadata.Package, snapshot *cache.Snapshot, newName PackageName, edits map[protocol.DocumentURI][]diff.Edit) error { +- // Rename internal references to the package in the renaming package. +- for _, uri := range mp.CompiledGoFiles { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return err +- } +- f, err := snapshot.ParseGo(ctx, fh, parsego.Header) +- if err != nil { +- return err +- } +- if f.File.Name == nil { +- continue // no package declaration +- } +- +- edit, err := posEdit(f.Tok, f.File.Name.Pos(), f.File.Name.End(), string(newName)) +- if err != nil { +- return err +- } +- edits[f.URI] = append(edits[f.URI], edit) +- } +- +- return nil +-} +- +-// renameImports computes the set of edits to imports resulting from renaming +-// the package described by the given metadata, to a package with import path +-// newPath and name newName. +-// +-// Edits are written into the edits map. +-func renameImports(ctx context.Context, snapshot *cache.Snapshot, mp *metadata.Package, newPath ImportPath, newName PackageName, allEdits map[protocol.DocumentURI][]diff.Edit) error { +- rdeps, err := snapshot.ReverseDependencies(ctx, mp.ID, false) // find direct importers +- if err != nil { +- return err +- } +- +- // Pass 1: rename import paths in import declarations. +- needsTypeCheck := make(map[PackageID][]protocol.DocumentURI) +- for _, rdep := range rdeps { +- if rdep.IsIntermediateTestVariant() { +- continue // for renaming, these variants are redundant +- } +- +- for _, uri := range rdep.CompiledGoFiles { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return err +- } +- f, err := snapshot.ParseGo(ctx, fh, parsego.Header) +- if err != nil { +- return err +- } +- if f.File.Name == nil { +- continue // no package declaration +- } +- for _, imp := range f.File.Imports { +- if rdep.DepsByImpPath[metadata.UnquoteImportPath(imp)] != mp.ID { +- continue // not the import we're looking for +- } +- +- // If the import does not explicitly specify +- // a local name, then we need to invoke the +- // type checker to locate references to update. +- // +- // TODO(adonovan): is this actually true? +- // Renaming an import with a local name can still +- // cause conflicts: shadowing of built-ins, or of +- // package-level decls in the same or another file. +- if imp.Name == nil { +- needsTypeCheck[rdep.ID] = append(needsTypeCheck[rdep.ID], uri) +- } +- +- // Create text edit for the import path (string literal). +- edit, err := posEdit(f.Tok, imp.Path.Pos(), imp.Path.End(), strconv.Quote(string(newPath))) +- if err != nil { +- return err +- } +- allEdits[uri] = append(allEdits[uri], edit) +- } +- } +- } +- +- // If the imported package's name hasn't changed, +- // we don't need to rename references within each file. +- if newName == mp.Name { +- return nil +- } +- +- // Pass 2: rename local name (types.PkgName) of imported +- // package throughout one or more files of the package. +- ids := make([]PackageID, 0, len(needsTypeCheck)) +- for id := range needsTypeCheck { +- ids = append(ids, id) +- } +- pkgs, err := snapshot.TypeCheck(ctx, ids...) +- if err != nil { +- return err +- } +- for i, id := range ids { +- pkg := pkgs[i] +- for _, uri := range needsTypeCheck[id] { +- f, err := pkg.File(uri) +- if err != nil { +- return err +- } +- for _, imp := range f.File.Imports { +- if imp.Name != nil { +- continue // has explicit local name +- } +- if rdeps[id].DepsByImpPath[metadata.UnquoteImportPath(imp)] != mp.ID { +- continue // not the import we're looking for +- } +- +- pkgname, ok := pkg.TypesInfo().Implicits[imp].(*types.PkgName) +- if !ok { +- // "can't happen", but be defensive (#71656) +- return fmt.Errorf("internal error: missing type information for %s import at %s", +- imp.Path.Value, safetoken.StartPosition(pkg.FileSet(), imp.Pos())) +- } +- +- pkgScope := pkg.Types().Scope() +- fileScope := pkg.TypesInfo().Scopes[f.File] +- +- localName := string(newName) +- try := 0 +- +- // Keep trying with fresh names until one succeeds. +- // +- // TODO(adonovan): fix: this loop is not sufficient to choose a name +- // that is guaranteed to be conflict-free; renameObj may still fail. +- // So the retry loop should be around renameObj, and we shouldn't +- // bother with scopes here. +- for fileScope.Lookup(localName) != nil || pkgScope.Lookup(localName) != nil { +- try++ +- localName = fmt.Sprintf("%s%d", newName, try) +- } +- +- // renameObj detects various conflicts, including: +- // - new name conflicts with a package-level decl in this file; +- // - new name hides a package-level decl in another file that +- // is actually referenced in this file; +- // - new name hides a built-in that is actually referenced +- // in this file; +- // - a reference in this file to the old package name would +- // become shadowed by an intervening declaration that +- // uses the new name. +- // It returns the edits if no conflict was detected. +- editMap, _, err := renameObjects(localName, pkg, pkgname) +- if err != nil { +- return err +- } +- +- // If the chosen local package name matches the package's +- // new name, delete the change that would have inserted +- // an explicit local name, which is always the lexically +- // first change. +- if localName == string(newName) { +- edits, ok := editMap[uri] +- if !ok { +- return fmt.Errorf("internal error: no changes for %s", uri) +- } +- diff.SortEdits(edits) +- editMap[uri] = edits[1:] +- } +- for uri, edits := range editMap { +- allEdits[uri] = append(allEdits[uri], edits...) +- } +- } +- } +- } +- return nil +-} +- +-// renameObjects computes the edits to the type-checked syntax package pkg +-// required to rename a set of target objects to newName. +-// +-// It also returns the set of objects that were found (due to +-// corresponding methods and embedded fields) to require renaming as a +-// consequence of the requested renamings. +-// +-// It returns an error if the renaming would cause a conflict. +-func renameObjects(newName string, pkg *cache.Package, targets ...types.Object) (map[protocol.DocumentURI][]diff.Edit, map[types.Object]bool, error) { +- r := renamer{ +- pkg: pkg, +- objsToUpdate: make(map[types.Object]bool), +- from: targets[0].Name(), +- to: newName, +- } +- +- // A renaming initiated at an interface method indicates the +- // intention to rename abstract and concrete methods as needed +- // to preserve assignability. +- // TODO(adonovan): pull this into the caller. +- for _, obj := range targets { +- if obj, ok := obj.(*types.Func); ok { +- recv := obj.Signature().Recv() +- if recv != nil && types.IsInterface(recv.Type().Underlying()) { +- r.changeMethods = true +- break +- } +- } +- } +- +- // Check that the renaming of the identifier is ok. +- for _, obj := range targets { +- r.check(obj) +- if len(r.conflicts) > 0 { +- // Stop at first error. +- return nil, nil, fmt.Errorf("%s", strings.Join(r.conflicts, "\n")) +- } +- } +- +- editMap, err := r.update() +- if err != nil { +- return nil, nil, err +- } +- +- // Remove initial targets so that only 'consequences' remain. +- for _, obj := range targets { +- delete(r.objsToUpdate, obj) +- } +- return editMap, r.objsToUpdate, nil +-} +- +-// Rename all references to the target objects. +-func (r *renamer) update() (map[protocol.DocumentURI][]diff.Edit, error) { +- result := make(map[protocol.DocumentURI][]diff.Edit) +- +- // shouldUpdate reports whether obj is one of (or an +- // instantiation of one of) the target objects. +- shouldUpdate := func(obj types.Object) bool { +- return containsOrigin(r.objsToUpdate, obj) +- } +- +- // Find all identifiers in the package that define or use a +- // renamed object. We iterate over info as it is more efficient +- // than calling ast.Inspect for each of r.pkg.CompiledGoFiles(). +- type item struct { +- node ast.Node // Ident, ImportSpec (obj=PkgName), or CaseClause (obj=Var) +- obj types.Object +- isDef bool +- } +- var items []item +- info := r.pkg.TypesInfo() +- for id, obj := range info.Uses { +- if shouldUpdate(obj) { +- items = append(items, item{id, obj, false}) +- } +- } +- for id, obj := range info.Defs { +- if shouldUpdate(obj) { +- items = append(items, item{id, obj, true}) +- } +- } +- for node, obj := range info.Implicits { +- if shouldUpdate(obj) { +- switch node.(type) { +- case *ast.ImportSpec, *ast.CaseClause: +- items = append(items, item{node, obj, true}) +- } +- } +- } +- sort.Slice(items, func(i, j int) bool { +- return items[i].node.Pos() < items[j].node.Pos() +- }) +- +- // Update each identifier, and its doc comment if it is a declaration. +- for _, item := range items { +- pgf, err := r.pkg.FileEnclosing(item.node.Pos()) +- if err != nil { +- bug.Reportf("edit does not belong to syntax of package %q: %v", r.pkg, err) +- continue +- } +- +- // Renaming a types.PkgName may result in the addition or removal of an identifier, +- // so we deal with this separately. +- if pkgName, ok := item.obj.(*types.PkgName); ok && item.isDef { +- edit, err := r.updatePkgName(pgf, pkgName) +- if err != nil { +- return nil, err +- } +- result[pgf.URI] = append(result[pgf.URI], edit) +- continue +- } +- +- // Workaround the unfortunate lack of a Var object +- // for x in "switch x := expr.(type) {}" by adjusting +- // the case clause to the switch ident. +- // This may result in duplicate edits, but we de-dup later. +- if _, ok := item.node.(*ast.CaseClause); ok { +- path, _ := astutil.PathEnclosingInterval(pgf.File, item.obj.Pos(), item.obj.Pos()) +- item.node = path[0].(*ast.Ident) +- } +- +- // Replace the identifier with r.to. +- edit, err := posEdit(pgf.Tok, item.node.Pos(), item.node.End(), r.to) +- if err != nil { +- return nil, err +- } +- +- result[pgf.URI] = append(result[pgf.URI], edit) +- +- if !item.isDef { // uses do not have doc comments to update. +- continue +- } +- +- doc := docComment(pgf, item.node.(*ast.Ident)) +- if doc == nil { +- continue +- } +- +- // Perform the rename in doc comments declared in the original package. +- // go/parser strips out \r\n returns from the comment text, so go +- // line-by-line through the comment text to get the correct positions. +- docRegexp := regexp.MustCompile(`\b` + r.from + `\b`) // valid identifier => valid regexp +- for _, comment := range doc.List { +- if isDirective(comment.Text) { +- continue +- } +- // TODO(adonovan): why are we looping over lines? +- // Just run the loop body once over the entire multiline comment. +- lines := strings.Split(comment.Text, "\n") +- tokFile := pgf.Tok +- commentLine := safetoken.Line(tokFile, comment.Pos()) +- uri := protocol.URIFromPath(tokFile.Name()) +- for i, line := range lines { +- lineStart := comment.Pos() +- if i > 0 { +- lineStart = tokFile.LineStart(commentLine + i) +- } +- for _, locs := range docRegexp.FindAllIndex([]byte(line), -1) { +- edit, err := posEdit(tokFile, lineStart+token.Pos(locs[0]), lineStart+token.Pos(locs[1]), r.to) +- if err != nil { +- return nil, err // can't happen +- } +- result[uri] = append(result[uri], edit) +- } +- } +- } +- } +- +- docLinkEdits, err := r.updateCommentDocLinks() +- if err != nil { +- return nil, err +- } +- for uri, edits := range docLinkEdits { +- result[uri] = append(result[uri], edits...) +- } +- +- return result, nil +-} +- +-// updateCommentDocLinks updates each doc comment in the package +-// that refers to one of the renamed objects using a doc link +-// (https://golang.org/doc/comment#doclinks) such as "[pkg.Type.Method]". +-func (r *renamer) updateCommentDocLinks() (map[protocol.DocumentURI][]diff.Edit, error) { +- result := make(map[protocol.DocumentURI][]diff.Edit) +- var docRenamers []*docLinkRenamer +- for obj := range r.objsToUpdate { +- if _, ok := obj.(*types.PkgName); ok { +- // The dot package name will not be referenced +- if obj.Name() == "." { +- continue +- } +- +- docRenamers = append(docRenamers, &docLinkRenamer{ +- isDep: false, +- isPkgOrType: true, +- file: r.pkg.FileSet().File(obj.Pos()), +- regexp: docLinkPattern("", "", obj.Name(), true), +- to: r.to, +- }) +- continue +- } +- if !obj.Exported() { +- continue +- } +- recvName := "" +- // Doc links can reference only exported package-level objects +- // and methods of exported package-level named types. +- if !typesinternal.IsPackageLevel(obj) { +- obj, isFunc := obj.(*types.Func) +- if !isFunc { +- continue +- } +- recv := obj.Signature().Recv() +- if recv == nil { +- continue +- } +- _, named := typesinternal.ReceiverNamed(recv) +- if named == nil { +- continue +- } +- // Doc links can't reference interface methods. +- if types.IsInterface(named.Underlying()) { +- continue +- } +- name := named.Origin().Obj() +- if !name.Exported() || !typesinternal.IsPackageLevel(name) { +- continue +- } +- recvName = name.Name() +- } +- +- // Qualify objects from other packages. +- pkgName := "" +- if r.pkg.Types() != obj.Pkg() { +- pkgName = obj.Pkg().Name() +- } +- _, isTypeName := obj.(*types.TypeName) +- docRenamers = append(docRenamers, &docLinkRenamer{ +- isDep: r.pkg.Types() != obj.Pkg(), +- isPkgOrType: isTypeName, +- packagePath: obj.Pkg().Path(), +- packageName: pkgName, +- recvName: recvName, +- objName: obj.Name(), +- regexp: docLinkPattern(pkgName, recvName, obj.Name(), isTypeName), +- to: r.to, +- }) +- } +- for _, pgf := range r.pkg.CompiledGoFiles() { +- for _, d := range docRenamers { +- edits, err := d.update(pgf) +- if err != nil { +- return nil, err +- } +- if len(edits) > 0 { +- result[pgf.URI] = append(result[pgf.URI], edits...) +- } +- } +- } +- return result, nil +-} +- +-// docLinkPattern returns a regular expression that matches doclinks in comments. +-// It has one submatch that indicates the symbol to be updated. +-func docLinkPattern(pkgName, recvName, objName string, isPkgOrType bool) *regexp.Regexp { +- // The doc link may contain a leading star, e.g. [*bytes.Buffer]. +- pattern := `\[\*?` +- if pkgName != "" { +- pattern += pkgName + `\.` +- } +- if recvName != "" { +- pattern += recvName + `\.` +- } +- // The first submatch is object name. +- pattern += `(` + objName + `)` +- // If the object is a *types.TypeName or *types.PkgName, also need +- // match the objects referenced by them, so add `(\.\w+)*`. +- if isPkgOrType { +- pattern += `(?:\.\w+)*` +- } +- // There are two type of link in comments: +- // 1. url link. e.g. [text]: url +- // 2. doc link. e.g. [pkg.Name] +- // in order to only match the doc link, add `([^:]|$)` in the end. +- pattern += `\](?:[^:]|$)` +- +- return regexp.MustCompile(pattern) +-} +- +-// A docLinkRenamer renames doc links of forms such as these: +-// +-// [Func] +-// [pkg.Func] +-// [RecvType.Method] +-// [*Type] +-// [*pkg.Type] +-// [*pkg.RecvType.Method] +-type docLinkRenamer struct { +- isDep bool // object is from a dependency package +- isPkgOrType bool // object is *types.PkgName or *types.TypeName +- packagePath string +- packageName string // e.g. "pkg" +- recvName string // e.g. "RecvType" +- objName string // e.g. "Func", "Type", "Method" +- to string // new name +- regexp *regexp.Regexp +- +- file *token.File // enclosing file, if renaming *types.PkgName +-} +- +-// update updates doc links in the package level comments. +-func (r *docLinkRenamer) update(pgf *parsego.File) (result []diff.Edit, err error) { +- if r.file != nil && r.file != pgf.Tok { +- return nil, nil +- } +- pattern := r.regexp +- // If the object is in dependency package, +- // the imported name in the file may be different from the original package name +- if r.isDep { +- for _, spec := range pgf.File.Imports { +- importPath, _ := strconv.Unquote(spec.Path.Value) +- if importPath == r.packagePath { +- // Ignore blank imports +- if spec.Name == nil || spec.Name.Name == "_" || spec.Name.Name == "." { +- continue +- } +- if spec.Name.Name != r.packageName { +- pattern = docLinkPattern(spec.Name.Name, r.recvName, r.objName, r.isPkgOrType) +- } +- break +- } +- } +- } +- +- var edits []diff.Edit +- updateDocLinks := func(doc *ast.CommentGroup) error { +- if doc != nil { +- for _, c := range doc.List { +- for _, locs := range pattern.FindAllStringSubmatchIndex(c.Text, -1) { +- // The first submatch is the object name, so the locs[2:4] is the index of object name. +- edit, err := posEdit(pgf.Tok, c.Pos()+token.Pos(locs[2]), c.Pos()+token.Pos(locs[3]), r.to) +- if err != nil { +- return err +- } +- edits = append(edits, edit) +- } +- } +- } +- return nil +- } +- +- // Update package doc comments. +- err = updateDocLinks(pgf.File.Doc) +- if err != nil { +- return nil, err +- } +- for _, decl := range pgf.File.Decls { +- var doc *ast.CommentGroup +- switch decl := decl.(type) { +- case *ast.GenDecl: +- doc = decl.Doc +- case *ast.FuncDecl: +- doc = decl.Doc +- } +- err = updateDocLinks(doc) +- if err != nil { +- return nil, err +- } +- } +- return edits, nil +-} +- +-// docComment returns the doc for an identifier within the specified file. +-func docComment(pgf *parsego.File, id *ast.Ident) *ast.CommentGroup { +- nodes, _ := astutil.PathEnclosingInterval(pgf.File, id.Pos(), id.End()) +- for _, node := range nodes { +- switch decl := node.(type) { +- case *ast.FuncDecl: +- return decl.Doc +- case *ast.Field: +- return decl.Doc +- case *ast.GenDecl: +- return decl.Doc +- // For {Type,Value}Spec, if the doc on the spec is absent, +- // search for the enclosing GenDecl +- case *ast.TypeSpec: +- if decl.Doc != nil { +- return decl.Doc +- } +- case *ast.ValueSpec: +- if decl.Doc != nil { +- return decl.Doc +- } +- case *ast.Ident: +- case *ast.AssignStmt: +- // *ast.AssignStmt doesn't have an associated comment group. +- // So, we try to find a comment just before the identifier. +- +- // Try to find a comment group only for short variable declarations (:=). +- if decl.Tok != token.DEFINE { +- return nil +- } +- +- identLine := safetoken.Line(pgf.Tok, id.Pos()) +- for _, comment := range nodes[len(nodes)-1].(*ast.File).Comments { +- if comment.Pos() > id.Pos() { +- // Comment is after the identifier. +- continue +- } +- +- lastCommentLine := safetoken.Line(pgf.Tok, comment.End()) +- if lastCommentLine+1 == identLine { +- return comment +- } +- } +- default: +- return nil +- } +- } +- return nil +-} +- +-// docCommentPosToIdent returns the node whose doc comment contains pos, if any. +-// The pos must be within an occurrence of the identifier's name, otherwise it returns nil. +-func docCommentPosToIdent(pgf *parsego.File, pos token.Pos, cur inspector.Cursor) *ast.Ident { +- for curId := range cur.Preorder((*ast.Ident)(nil)) { +- id := curId.Node().(*ast.Ident) +- if pos > id.Pos() { +- continue // Doc comments are not located after an ident. +- } +- doc := docComment(pgf, id) +- if doc == nil || !(doc.Pos() <= pos && pos < doc.End()) { +- continue +- } +- +- docRegexp := regexp.MustCompile(`\b` + id.Name + `\b`) +- for _, comment := range doc.List { +- if isDirective(comment.Text) || !(comment.Pos() <= pos && pos < comment.End()) { +- continue +- } +- start := comment.Pos() +- text, err := pgf.NodeText(comment) +- if err != nil { +- return nil +- } +- for _, locs := range docRegexp.FindAllIndex(text, -1) { +- matchStart := start + token.Pos(locs[0]) +- matchEnd := start + token.Pos(locs[1]) +- if matchStart <= pos && pos <= matchEnd { +- return id +- } +- } +- } +- } +- return nil +-} +- +-// updatePkgName returns the updates to rename a pkgName in the import spec by +-// only modifying the package name portion of the import declaration. +-func (r *renamer) updatePkgName(pgf *parsego.File, pkgName *types.PkgName) (diff.Edit, error) { +- // Modify ImportSpec syntax to add or remove the Name as needed. +- path, _ := astutil.PathEnclosingInterval(pgf.File, pkgName.Pos(), pkgName.Pos()) +- if len(path) < 2 { +- return diff.Edit{}, fmt.Errorf("no path enclosing interval for %s", pkgName.Name()) +- } +- spec, ok := path[1].(*ast.ImportSpec) +- if !ok { +- return diff.Edit{}, fmt.Errorf("failed to update PkgName for %s", pkgName.Name()) +- } +- +- newText := "" +- if pkgName.Imported().Name() != r.to { +- newText = r.to + " " +- } +- +- // Replace the portion (possibly empty) of the spec before the path: +- // local "path" or "path" +- // -> <- -><- +- return posEdit(pgf.Tok, spec.Pos(), spec.Path.Pos(), newText) +-} +- +-// parsePackageNameDecl is a convenience function that parses and +-// returns the package name declaration of file fh, and reports +-// whether the position ppos lies within it. +-// +-// Note: also used by references. +-func parsePackageNameDecl(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, ppos protocol.Position) (*parsego.File, bool, error) { +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Header) +- if err != nil { +- return nil, false, err +- } +- // Careful: because we used parsego.Header, +- // pgf.Pos(ppos) may be beyond EOF => (0, err). +- pos, _ := pgf.PositionPos(ppos) +- return pgf, internalastutil.NodeContains(pgf.File.Name, pos), nil +-} +- +-// posEdit returns an edit to replace the (start, end) range of tf with 'new'. +-func posEdit(tf *token.File, start, end token.Pos, new string) (diff.Edit, error) { +- startOffset, endOffset, err := safetoken.Offsets(tf, start, end) +- if err != nil { +- return diff.Edit{}, err +- } +- return diff.Edit{Start: startOffset, End: endOffset, New: new}, nil +-} +diff -urN a/gopls/internal/golang/semtok.go b/gopls/internal/golang/semtok.go +--- a/gopls/internal/golang/semtok.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/semtok.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,937 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-// This file defines the Semantic Tokens operation for Go source. +- +-import ( +- "bytes" +- "context" +- "errors" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "log" +- "path/filepath" +- "regexp" +- "slices" +- "strconv" +- "strings" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/semtok" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/fmtstr" +-) +- +-// semDebug enables comprehensive logging of decisions +-// (gopls semtok foo.go > /dev/null shows log output). +-// It should never be true in checked-in code. +-const semDebug = false +- +-func SemanticTokens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, rng *protocol.Range) (*protocol.SemanticTokens, error) { +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- +- // Select range. +- var start, end token.Pos +- if rng != nil { +- var err error +- start, end, err = pgf.RangePos(*rng) +- if err != nil { +- return nil, err // e.g. invalid range +- } +- } else { +- tok := pgf.Tok +- start, end = tok.Pos(0), tok.Pos(tok.Size()) // entire file +- } +- +- // Reject full semantic token requests for large files. +- // +- // The LSP says that errors for the semantic token requests +- // should only be returned for exceptions (a word not +- // otherwise defined). This code treats a too-large file as an +- // exception. On parse errors, the code does what it can. +- const maxFullFileSize = 100000 +- if int(end-start) > maxFullFileSize { +- return nil, fmt.Errorf("semantic tokens: range %s too large (%d > %d)", +- fh.URI().Path(), end-start, maxFullFileSize) +- } +- +- tv := tokenVisitor{ +- ctx: ctx, +- metadataSource: snapshot, +- metadata: pkg.Metadata(), +- info: pkg.TypesInfo(), +- fset: pkg.FileSet(), +- pkg: pkg, +- pgf: pgf, +- start: start, +- end: end, +- } +- tv.visit() +- return &protocol.SemanticTokens{ +- Data: semtok.Encode( +- tv.tokens, +- snapshot.Options().EnabledSemanticTokenTypes(), +- snapshot.Options().EnabledSemanticTokenModifiers()), +- ResultID: time.Now().String(), // for delta requests, but we've never seen any +- }, nil +-} +- +-type tokenVisitor struct { +- // inputs +- ctx context.Context // for event logging +- metadataSource metadata.Source // used to resolve imports +- metadata *metadata.Package +- info *types.Info +- fset *token.FileSet +- pkg *cache.Package +- pgf *parsego.File +- start, end token.Pos // range of interest +- +- // working state +- stack []ast.Node // path from root of the syntax tree +- tokens []semtok.Token // computed sequence of semantic tokens +-} +- +-func (tv *tokenVisitor) visit() { +- f := tv.pgf.File +- // may not be in range, but harmless +- tv.token(f.Package, len("package"), semtok.TokKeyword) +- if f.Name != nil { +- tv.token(f.Name.NamePos, len(f.Name.Name), semtok.TokNamespace) +- } +- for _, decl := range f.Decls { +- // Only look at the decls that overlap the range. +- if decl.End() <= tv.start || decl.Pos() >= tv.end { +- continue +- } +- ast.Inspect(decl, tv.inspect) +- } +- +- // Scan all files for imported pkgs, ignore the ambiguous pkg. +- // This is to be consistent with the behavior in [go/doc]: https://pkg.go.dev/pkg/go/doc. +- importByName := make(map[string]*types.PkgName) +- for _, pgf := range tv.pkg.CompiledGoFiles() { +- for _, imp := range pgf.File.Imports { +- if obj := tv.pkg.TypesInfo().PkgNameOf(imp); obj != nil { +- if old, ok := importByName[obj.Name()]; ok { +- if old != nil && old.Imported() != obj.Imported() { +- importByName[obj.Name()] = nil // nil => ambiguous across files +- } +- continue +- } +- importByName[obj.Name()] = obj +- } +- } +- } +- +- for _, cg := range f.Comments { +- for _, c := range cg.List { +- // Only look at the comment that overlap the range. +- if c.End() <= tv.start || c.Pos() >= tv.end { +- continue +- } +- tv.comment(c, importByName) +- } +- } +-} +- +-// Matches (for example) "[F]", "[*p.T]", "[p.T.M]" +-// unless followed by a colon (exclude url link, e.g. "[go]: https://go.dev"). +-// The first group is reference name. e.g. The first group of "[*p.T.M]" is "p.T.M". +-var docLinkRegex = regexp.MustCompile(`\[\*?([\pL_][\pL_0-9]*(\.[\pL_][\pL_0-9]*){0,2})](?:[^:]|$)`) +- +-// comment emits semantic tokens for a comment. +-// If the comment contains doc links or "go:" directives, +-// it emits a separate token for each link or directive and +-// each comment portion between them. +-func (tv *tokenVisitor) comment(c *ast.Comment, importByName map[string]*types.PkgName) { +- if strings.HasPrefix(c.Text, "//go:") { +- tv.godirective(c) +- return +- } +- +- pkgScope := tv.pkg.Types().Scope() +- // lookupObjects interprets the name in various forms +- // (X, p.T, p.T.M, etc) and return the list of symbols +- // denoted by each identifier in the dotted list. +- lookupObjects := func(name string) (objs []types.Object) { +- scope := pkgScope +- if pkg, suffix, ok := strings.Cut(name, "."); ok { +- if obj, _ := importByName[pkg]; obj != nil { +- objs = append(objs, obj) +- scope = obj.Imported().Scope() +- name = suffix +- } +- } +- +- if recv, method, ok := strings.Cut(name, "."); ok { +- obj, ok := scope.Lookup(recv).(*types.TypeName) +- if !ok { +- return nil +- } +- objs = append(objs, obj) +- t, ok := obj.Type().(*types.Named) +- if !ok { +- return nil +- } +- m, _, _ := types.LookupFieldOrMethod(t, true, tv.pkg.Types(), method) +- if m == nil { +- return nil +- } +- objs = append(objs, m) +- return objs +- } else { +- obj := scope.Lookup(name) +- if obj == nil { +- return nil +- } +- if _, ok := obj.(*types.PkgName); !ok && !obj.Exported() { +- return nil +- } +- objs = append(objs, obj) +- return objs +- +- } +- } +- +- pos := c.Pos() +- for line := range strings.SplitSeq(c.Text, "\n") { +- last := 0 +- +- for _, idx := range docLinkRegex.FindAllStringSubmatchIndex(line, -1) { +- // The first group is the reference name. e.g. "X", "p.T", "p.T.M". +- name := line[idx[2]:idx[3]] +- if objs := lookupObjects(name); len(objs) > 0 { +- if last < idx[2] { +- tv.token(pos+token.Pos(last), idx[2]-last, semtok.TokComment) +- } +- offset := pos + token.Pos(idx[2]) +- for i, obj := range objs { +- if i > 0 { +- tv.token(offset, len("."), semtok.TokComment) +- offset += token.Pos(len(".")) +- } +- id, rest, _ := strings.Cut(name, ".") +- name = rest +- tok, mods := tv.appendObjectModifiers(nil, obj) +- tv.token(offset, len(id), tok, mods...) +- offset += token.Pos(len(id)) +- } +- last = idx[3] +- } +- } +- if last != len(c.Text) { +- tv.token(pos+token.Pos(last), len(line)-last, semtok.TokComment) +- } +- pos += token.Pos(len(line) + 1) +- } +-} +- +-// token emits a token of the specified extent and semantics. +-func (tv *tokenVisitor) token(start token.Pos, length int, typ semtok.Type, modifiers ...semtok.Modifier) { +- if !start.IsValid() { +- return +- } +- if length <= 0 { +- return // vscode doesn't like 0-length Tokens +- } +- end := start + token.Pos(length) +- if start >= tv.end || end <= tv.start { +- return +- } +- // want a line and column from start (in LSP coordinates). Ignore line directives. +- rng, err := tv.pgf.PosRange(start, end) +- if err != nil { +- event.Error(tv.ctx, "failed to convert to range", err) +- return +- } +- if rng.End.Line != rng.Start.Line { +- // this happens if users are typing at the end of the file, but report nothing +- return +- } +- tv.tokens = append(tv.tokens, semtok.Token{ +- Line: rng.Start.Line, +- Start: rng.Start.Character, +- Len: rng.End.Character - rng.Start.Character, // (on same line) +- Type: typ, +- Modifiers: modifiers, +- }) +-} +- +-// strStack converts the stack to a string, for debugging and error messages. +-func (tv *tokenVisitor) strStack() string { +- msg := []string{"["} +- for i := len(tv.stack) - 1; i >= 0; i-- { +- n := tv.stack[i] +- msg = append(msg, strings.TrimPrefix(fmt.Sprintf("%T", n), "*ast.")) +- } +- if len(tv.stack) > 0 { +- pos := tv.stack[len(tv.stack)-1].Pos() +- if _, err := safetoken.Offset(tv.pgf.Tok, pos); err != nil { +- msg = append(msg, fmt.Sprintf("invalid position %v for %s", pos, tv.pgf.URI)) +- } else { +- posn := safetoken.Position(tv.pgf.Tok, pos) +- msg = append(msg, fmt.Sprintf("(%s:%d,col:%d)", +- filepath.Base(posn.Filename), posn.Line, posn.Column)) +- } +- } +- msg = append(msg, "]") +- return strings.Join(msg, " ") +-} +- +-// srcLine returns the source text for n (truncated at first newline). +-func (tv *tokenVisitor) srcLine(n ast.Node) string { +- file := tv.pgf.Tok +- line := safetoken.Line(file, n.Pos()) +- start, err := safetoken.Offset(file, file.LineStart(line)) +- if err != nil { +- return "" +- } +- end := start +- for ; end < len(tv.pgf.Src) && tv.pgf.Src[end] != '\n'; end++ { +- +- } +- return string(tv.pgf.Src[start:end]) +-} +- +-func (tv *tokenVisitor) inspect(n ast.Node) (descend bool) { +- if n == nil { +- tv.stack = tv.stack[:len(tv.stack)-1] // pop +- return true +- } +- tv.stack = append(tv.stack, n) // push +- defer func() { +- if !descend { +- tv.stack = tv.stack[:len(tv.stack)-1] // pop +- } +- }() +- +- switch n := n.(type) { +- case *ast.ArrayType: +- case *ast.AssignStmt: +- tv.token(n.TokPos, len(n.Tok.String()), semtok.TokOperator) +- case *ast.BasicLit: +- if n.Kind == token.STRING { +- if strings.Contains(n.Value, "\n") { +- // has to be a string. +- tv.multiline(n.Pos(), n.End(), semtok.TokString) +- } else if !tv.formatString(n) { +- // not a format string, color the whole as a TokString. +- tv.token(n.Pos(), len(n.Value), semtok.TokString) +- } +- } else { +- tv.token(n.Pos(), len(n.Value), semtok.TokNumber) +- } +- case *ast.BinaryExpr: +- tv.token(n.OpPos, len(n.Op.String()), semtok.TokOperator) +- case *ast.BlockStmt: +- case *ast.BranchStmt: +- tv.token(n.TokPos, len(n.Tok.String()), semtok.TokKeyword) +- case *ast.CallExpr: +- if n.Ellipsis.IsValid() { +- tv.token(n.Ellipsis, len("..."), semtok.TokOperator) +- } +- case *ast.CaseClause: +- iam := "case" +- if n.List == nil { +- iam = "default" +- } +- tv.token(n.Case, len(iam), semtok.TokKeyword) +- case *ast.ChanType: +- // chan | chan <- | <- chan +- switch { +- case n.Arrow == token.NoPos: +- tv.token(n.Begin, len("chan"), semtok.TokKeyword) +- case n.Arrow == n.Begin: +- tv.token(n.Arrow, 2, semtok.TokOperator) +- pos := tv.findKeyword("chan", n.Begin+2, n.Value.Pos()) +- tv.token(pos, len("chan"), semtok.TokKeyword) +- case n.Arrow != n.Begin: +- tv.token(n.Begin, len("chan"), semtok.TokKeyword) +- tv.token(n.Arrow, 2, semtok.TokOperator) +- } +- case *ast.CommClause: +- length := len("case") +- if n.Comm == nil { +- length = len("default") +- } +- tv.token(n.Case, length, semtok.TokKeyword) +- case *ast.CompositeLit: +- case *ast.DeclStmt: +- case *ast.DeferStmt: +- tv.token(n.Defer, len("defer"), semtok.TokKeyword) +- case *ast.Ellipsis: +- tv.token(n.Ellipsis, len("..."), semtok.TokOperator) +- case *ast.EmptyStmt: +- case *ast.ExprStmt: +- case *ast.Field: +- case *ast.FieldList: +- case *ast.ForStmt: +- tv.token(n.For, len("for"), semtok.TokKeyword) +- case *ast.FuncDecl: +- case *ast.FuncLit: +- case *ast.FuncType: +- if n.Func != token.NoPos { +- tv.token(n.Func, len("func"), semtok.TokKeyword) +- } +- case *ast.GenDecl: +- tv.token(n.TokPos, len(n.Tok.String()), semtok.TokKeyword) +- case *ast.GoStmt: +- tv.token(n.Go, len("go"), semtok.TokKeyword) +- case *ast.Ident: +- tv.ident(n) +- case *ast.IfStmt: +- tv.token(n.If, len("if"), semtok.TokKeyword) +- if n.Else != nil { +- // x.Body.End() or x.Body.End()+1, not that it matters +- pos := tv.findKeyword("else", n.Body.End(), n.Else.Pos()) +- tv.token(pos, len("else"), semtok.TokKeyword) +- } +- case *ast.ImportSpec: +- tv.importSpec(n) +- return false +- case *ast.IncDecStmt: +- tv.token(n.TokPos, len(n.Tok.String()), semtok.TokOperator) +- case *ast.IndexExpr: +- case *ast.IndexListExpr: +- case *ast.InterfaceType: +- tv.token(n.Interface, len("interface"), semtok.TokKeyword) +- case *ast.KeyValueExpr: +- case *ast.LabeledStmt: +- case *ast.MapType: +- tv.token(n.Map, len("map"), semtok.TokKeyword) +- case *ast.ParenExpr: +- case *ast.RangeStmt: +- tv.token(n.For, len("for"), semtok.TokKeyword) +- // x.TokPos == token.NoPos is legal (for range foo {}) +- offset := n.TokPos +- if offset == token.NoPos { +- offset = n.For +- } +- pos := tv.findKeyword("range", offset, n.X.Pos()) +- tv.token(pos, len("range"), semtok.TokKeyword) +- case *ast.ReturnStmt: +- tv.token(n.Return, len("return"), semtok.TokKeyword) +- case *ast.SelectStmt: +- tv.token(n.Select, len("select"), semtok.TokKeyword) +- case *ast.SelectorExpr: +- case *ast.SendStmt: +- tv.token(n.Arrow, len("<-"), semtok.TokOperator) +- case *ast.SliceExpr: +- case *ast.StarExpr: +- tv.token(n.Star, len("*"), semtok.TokOperator) +- case *ast.StructType: +- tv.token(n.Struct, len("struct"), semtok.TokKeyword) +- case *ast.SwitchStmt: +- tv.token(n.Switch, len("switch"), semtok.TokKeyword) +- case *ast.TypeAssertExpr: +- if n.Type == nil { +- pos := tv.findKeyword("type", n.Lparen, n.Rparen) +- tv.token(pos, len("type"), semtok.TokKeyword) +- } +- case *ast.TypeSpec: +- case *ast.TypeSwitchStmt: +- tv.token(n.Switch, len("switch"), semtok.TokKeyword) +- case *ast.UnaryExpr: +- tv.token(n.OpPos, len(n.Op.String()), semtok.TokOperator) +- case *ast.ValueSpec: +- // things only seen with parsing or type errors, so ignore them +- case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt: +- return false +- // not going to see these +- case *ast.File, *ast.Package: +- tv.errorf("implement %T %s", n, safetoken.Position(tv.pgf.Tok, n.Pos())) +- // other things we knowingly ignore +- case *ast.Comment, *ast.CommentGroup: +- return false +- default: +- tv.errorf("failed to implement %T", n) +- } +- return true +-} +- +-// formatString tries to report directives and string literals +-// inside a (possible) printf-like call, it returns false and does nothing +-// if the string is not a format string. +-func (tv *tokenVisitor) formatString(lit *ast.BasicLit) bool { +- if len(tv.stack) <= 1 { +- return false +- } +- call, ok := tv.stack[len(tv.stack)-2].(*ast.CallExpr) +- if !ok { +- return false +- } +- lastNonVariadic, idx := formatStringAndIndex(tv.info, call) +- if idx == -1 || lit != lastNonVariadic { +- return false +- } +- format, err := strconv.Unquote(lit.Value) +- if err != nil { +- return false +- } +- if !strings.Contains(format, "%") { +- return false +- } +- operations, err := fmtstr.Parse(format, idx) +- if err != nil { +- return false +- } +- +- // It's a format string, compute interleaved sub range of directives and literals. +- // pos tracks literal substring position within the overall BasicLit. +- pos := lit.ValuePos +- for _, op := range operations { +- // Skip "%%". +- if op.Verb.Verb == '%' { +- continue +- } +- rangeStart, rangeEnd, err := astutil.RangeInStringLiteral(lit, op.Range.Start, op.Range.End) +- if err != nil { +- return false +- } +- // Report literal substring. +- tv.token(pos, int(rangeStart-pos), semtok.TokString) +- // Report formatting directive. +- tv.token(rangeStart, int(rangeEnd-rangeStart), semtok.TokString, semtok.ModFormat) +- pos = rangeEnd +- } +- // Report remaining literal substring. +- tv.token(pos, int(lit.End()-pos), semtok.TokString) +- return true +-} +- +-func (tv *tokenVisitor) appendObjectModifiers(mods []semtok.Modifier, obj types.Object) (semtok.Type, []semtok.Modifier) { +- if obj.Pkg() == nil { +- mods = append(mods, semtok.ModDefaultLibrary) +- } +- +- // Note: PkgName, Builtin, Label have type Invalid, which adds no modifiers. +- mods = appendTypeModifiers(mods, obj.Type()) +- +- switch obj := obj.(type) { +- case *types.PkgName: +- return semtok.TokNamespace, mods +- +- case *types.Builtin: +- return semtok.TokFunction, mods +- +- case *types.Func: +- if obj.Signature().Recv() != nil { +- return semtok.TokMethod, mods +- } else { +- return semtok.TokFunction, mods +- } +- +- case *types.TypeName: +- if is[*types.TypeParam](types.Unalias(obj.Type())) { +- return semtok.TokTypeParam, mods +- } +- return semtok.TokType, mods +- +- case *types.Const: +- mods = append(mods, semtok.ModReadonly) +- return semtok.TokVariable, mods +- +- case *types.Var: +- if tv.isParam(obj.Pos()) { +- return semtok.TokParameter, mods +- } else { +- return semtok.TokVariable, mods +- } +- +- case *types.Label: +- return semtok.TokLabel, mods +- +- case *types.Nil: +- mods = append(mods, semtok.ModReadonly) +- return semtok.TokVariable, mods +- } +- +- panic(obj) +-} +- +-// appendTypeModifiers appends optional modifiers that describe the top-level +-// type constructor of t: "pointer", "map", etc. +-func appendTypeModifiers(mods []semtok.Modifier, t types.Type) []semtok.Modifier { +- // For a type parameter, don't report "interface". +- if is[*types.TypeParam](types.Unalias(t)) { +- return mods +- } +- +- switch t := t.Underlying().(type) { +- case *types.Interface: +- mods = append(mods, semtok.ModInterface) +- case *types.Struct: +- mods = append(mods, semtok.ModStruct) +- case *types.Signature: +- mods = append(mods, semtok.ModSignature) +- case *types.Pointer: +- mods = append(mods, semtok.ModPointer) +- case *types.Array: +- mods = append(mods, semtok.ModArray) +- case *types.Map: +- mods = append(mods, semtok.ModMap) +- case *types.Slice: +- mods = append(mods, semtok.ModSlice) +- case *types.Chan: +- mods = append(mods, semtok.ModChan) +- case *types.Basic: +- switch t.Kind() { +- case types.Invalid: +- // ignore (e.g. Builtin, PkgName, Label) +- case types.String: +- mods = append(mods, semtok.ModString) +- case types.Bool: +- mods = append(mods, semtok.ModBool) +- case types.UnsafePointer: +- mods = append(mods, semtok.ModPointer) +- default: +- if t.Info()&types.IsNumeric != 0 { +- mods = append(mods, semtok.ModNumber) +- } +- } +- } +- return mods +-} +- +-func (tv *tokenVisitor) ident(id *ast.Ident) { +- var ( +- tok semtok.Type +- mods []semtok.Modifier +- obj types.Object +- ok bool +- ) +- if obj, _ = tv.info.Defs[id]; obj != nil { +- // definition +- mods = append(mods, semtok.ModDefinition) +- tok, mods = tv.appendObjectModifiers(mods, obj) +- +- } else if obj, ok = tv.info.Uses[id]; ok { +- // use +- tok, mods = tv.appendObjectModifiers(mods, obj) +- +- } else if tok, mods = tv.unkIdent(id); tok != "" { +- // ok +- +- } else { +- return +- } +- +- // Emit a token for the identifier's extent. +- tv.token(id.Pos(), len(id.Name), tok, mods...) +- +- if semDebug { +- q := "nil" +- if obj != nil { +- q = fmt.Sprintf("%T", obj.Type()) // e.g. "*types.Map" +- } +- log.Printf(" use %s/%T/%s got %s %v (%s)", +- id.Name, obj, q, tok, mods, tv.strStack()) +- } +-} +- +-// isParam reports whether the position is that of a parameter name of +-// an enclosing function. +-func (tv *tokenVisitor) isParam(pos token.Pos) bool { +- for i := len(tv.stack) - 1; i >= 0; i-- { +- switch n := tv.stack[i].(type) { +- case *ast.FuncDecl: +- for _, f := range n.Type.Params.List { +- for _, id := range f.Names { +- if id.Pos() == pos { +- return true +- } +- } +- } +- case *ast.FuncLit: +- for _, f := range n.Type.Params.List { +- for _, id := range f.Names { +- if id.Pos() == pos { +- return true +- } +- } +- } +- } +- } +- return false +-} +- +-// unkIdent handles identifiers with no types.Object (neither use nor +-// def), use the parse stack. +-// A lot of these only happen when the package doesn't compile, +-// but in that case it is all best-effort from the parse tree. +-func (tv *tokenVisitor) unkIdent(id *ast.Ident) (semtok.Type, []semtok.Modifier) { +- def := []semtok.Modifier{semtok.ModDefinition} +- n := len(tv.stack) - 2 // parent of Ident; stack is [File ... Ident] +- if n < 0 { +- tv.errorf("no stack") // can't happen +- return "", nil +- } +- switch parent := tv.stack[n].(type) { +- case *ast.BinaryExpr, *ast.UnaryExpr, *ast.ParenExpr, *ast.StarExpr, +- *ast.IncDecStmt, *ast.SliceExpr, *ast.ExprStmt, *ast.IndexExpr, +- *ast.ReturnStmt, *ast.ChanType, *ast.SendStmt, +- *ast.ForStmt, // possibly incomplete +- *ast.IfStmt, /* condition */ +- *ast.KeyValueExpr, // either key or value +- *ast.IndexListExpr: +- return semtok.TokVariable, nil +- case *ast.Ellipsis: +- return semtok.TokType, nil +- case *ast.CaseClause: +- if n-2 >= 0 && is[ast.TypeSwitchStmt](tv.stack[n-2]) { +- return semtok.TokType, nil +- } +- return semtok.TokVariable, nil +- case *ast.ArrayType: +- if id == parent.Len { +- // or maybe a Type Param, but we can't just from the parse tree +- return semtok.TokVariable, nil +- } else { +- return semtok.TokType, nil +- } +- case *ast.MapType: +- return semtok.TokType, nil +- case *ast.CallExpr: +- if id == parent.Fun { +- return semtok.TokFunction, nil +- } +- return semtok.TokVariable, nil +- case *ast.SwitchStmt: +- return semtok.TokVariable, nil +- case *ast.TypeAssertExpr: +- if id == parent.X { +- return semtok.TokVariable, nil +- } else if id == parent.Type { +- return semtok.TokType, nil +- } +- case *ast.ValueSpec: +- if slices.Contains(parent.Names, id) { +- return semtok.TokVariable, def +- } +- for _, p := range parent.Values { +- if p == id { +- return semtok.TokVariable, nil +- } +- } +- return semtok.TokType, nil +- case *ast.SelectorExpr: // e.ti.Selections[nd] is nil, so no help +- if n-1 >= 0 { +- if ce, ok := tv.stack[n-1].(*ast.CallExpr); ok { +- // ... CallExpr SelectorExpr Ident (_.x()) +- if ce.Fun == parent && parent.Sel == id { +- return semtok.TokFunction, nil +- } +- } +- } +- return semtok.TokVariable, nil +- case *ast.AssignStmt: +- for _, p := range parent.Lhs { +- // x := ..., or x = ... +- if p == id { +- if parent.Tok != token.DEFINE { +- def = nil +- } +- return semtok.TokVariable, def // '_' in _ = ... +- } +- } +- // RHS, = x +- return semtok.TokVariable, nil +- case *ast.TypeSpec: // it's a type if it is either the Name or the Type +- if id == parent.Type { +- def = nil +- } +- return semtok.TokType, def +- case *ast.Field: +- // ident could be type in a field, or a method in an interface type, or a variable +- if id == parent.Type { +- return semtok.TokType, nil +- } +- if n > 2 && +- is[*ast.InterfaceType](tv.stack[n-2]) && +- is[*ast.FieldList](tv.stack[n-1]) { +- +- return semtok.TokMethod, def +- } +- return semtok.TokVariable, nil +- case *ast.LabeledStmt: +- if id == parent.Label { +- return semtok.TokLabel, def +- } +- case *ast.BranchStmt: +- if id == parent.Label { +- return semtok.TokLabel, nil +- } +- case *ast.CompositeLit: +- if parent.Type == id { +- return semtok.TokType, nil +- } +- return semtok.TokVariable, nil +- case *ast.RangeStmt: +- if parent.Tok != token.DEFINE { +- def = nil +- } +- return semtok.TokVariable, def +- case *ast.FuncDecl: +- return semtok.TokFunction, def +- default: +- tv.errorf("%T unexpected: %s %s%q", parent, id.Name, tv.strStack(), tv.srcLine(id)) +- } +- return "", nil +-} +- +-// multiline emits a multiline token (`string` or /*comment*/). +-func (tv *tokenVisitor) multiline(start, end token.Pos, tok semtok.Type) { +- // TODO(adonovan): test with non-ASCII. +- +- f := tv.fset.File(start) +- // the hard part is finding the lengths of lines. include the \n +- length := func(line int) int { +- n := f.LineStart(line) +- if line >= f.LineCount() { +- return f.Size() - int(n) +- } +- return int(f.LineStart(line+1) - n) +- } +- spos := safetoken.StartPosition(tv.fset, start) +- epos := safetoken.EndPosition(tv.fset, end) +- sline := spos.Line +- eline := epos.Line +- // first line is from spos.Column to end +- tv.token(start, length(sline)-spos.Column, tok) // leng(sline)-1 - (spos.Column-1) +- for i := sline + 1; i < eline; i++ { +- // intermediate lines are from 1 to end +- tv.token(f.LineStart(i), length(i)-1, tok) // avoid the newline +- } +- // last line is from 1 to epos.Column +- tv.token(f.LineStart(eline), epos.Column-1, tok) // columns are 1-based +-} +- +-// findKeyword returns the position of a keyword by searching within +-// the specified range, for when it cannot be exactly known from the AST. +-// It returns NoPos if the keyword was not present in the source due to parse error. +-func (tv *tokenVisitor) findKeyword(keyword string, start, end token.Pos) token.Pos { +- // TODO(adonovan): use safetoken.Offset. +- offset := int(start) - tv.pgf.Tok.Base() +- last := int(end) - tv.pgf.Tok.Base() +- buf := tv.pgf.Src +- idx := bytes.Index(buf[offset:last], []byte(keyword)) +- if idx < 0 { +- // Ill-formed code may form syntax trees without their usual tokens. +- // For example, "type _ <-<-chan int" parses as <-chan (chan int), +- // with two nested ChanTypes but only one chan keyword. +- return token.NoPos +- } +- return start + token.Pos(idx) +-} +- +-func (tv *tokenVisitor) importSpec(spec *ast.ImportSpec) { +- // a local package name or the last component of the Path +- if spec.Name != nil { +- name := spec.Name.String() +- if name != "_" && name != "." { +- tv.token(spec.Name.Pos(), len(name), semtok.TokNamespace) +- } +- return // don't mark anything for . or _ +- } +- importPath := metadata.UnquoteImportPath(spec) +- if importPath == "" { +- return +- } +- // Import strings are implementation defined. Try to match with parse information. +- depID := tv.metadata.DepsByImpPath[importPath] +- if depID == "" { +- return +- } +- depMD := tv.metadataSource.Metadata(depID) +- if depMD == nil { +- // unexpected, but impact is that maybe some import is not colored +- return +- } +- // Check whether the original literal contains the package's declared name. +- j := strings.LastIndex(spec.Path.Value, string(depMD.Name)) +- if j < 0 { +- // Package name does not match import path, so there is nothing to report. +- return +- } +- // Report virtual declaration at the position of the substring. +- start := spec.Path.Pos() + token.Pos(j) +- tv.token(start, len(depMD.Name), semtok.TokNamespace) +-} +- +-// errorf logs an error and reports a bug. +-func (tv *tokenVisitor) errorf(format string, args ...any) { +- msg := fmt.Sprintf(format, args...) +- bug.Report(msg) +- event.Error(tv.ctx, tv.strStack(), errors.New(msg)) +-} +- +-var godirectives = map[string]struct{}{ +- // https://pkg.go.dev/cmd/compile +- "noescape": {}, +- "uintptrescapes": {}, +- "noinline": {}, +- "norace": {}, +- "nosplit": {}, +- "linkname": {}, +- +- // https://pkg.go.dev/go/build +- "build": {}, +- "binary-only-package": {}, +- "embed": {}, +-} +- +-// Tokenize godirective at the start of the comment c, if any, and the surrounding comment. +-// If there is any failure, emits the entire comment as a TokComment token. +-// Directives are highlighted as-is, even if used incorrectly. Typically there are +-// dedicated analyzers that will warn about misuse. +-func (tv *tokenVisitor) godirective(c *ast.Comment) { +- // First check if '//go:directive args...' is a valid directive. +- directive, args, _ := strings.Cut(c.Text, " ") +- kind, _ := stringsCutPrefix(directive, "//go:") +- if _, ok := godirectives[kind]; !ok { +- // Unknown 'go:' directive. +- tv.token(c.Pos(), len(c.Text), semtok.TokComment) +- return +- } +- +- // Make the 'go:directive' part stand out, the rest is comments. +- tv.token(c.Pos(), len("//"), semtok.TokComment) +- +- directiveStart := c.Pos() + token.Pos(len("//")) +- tv.token(directiveStart, len(directive[len("//"):]), semtok.TokNamespace) +- +- if len(args) > 0 { +- tailStart := c.Pos() + token.Pos(len(directive)+len(" ")) +- tv.token(tailStart, len(args), semtok.TokComment) +- } +-} +- +-// Go 1.20 strings.CutPrefix. +-func stringsCutPrefix(s, prefix string) (after string, found bool) { +- if !strings.HasPrefix(s, prefix) { +- return s, false +- } +- return s[len(prefix):], true +-} +- +-func is[T any](x any) bool { +- _, ok := x.(T) +- return ok +-} +diff -urN a/gopls/internal/golang/signature_help.go b/gopls/internal/golang/signature_help.go +--- a/gopls/internal/golang/signature_help.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/signature_help.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,216 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "strings" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// SignatureHelp returns information about the signature of the innermost +-// function call enclosing the position, or nil if there is none. +-func SignatureHelp(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, params *protocol.SignatureHelpParams) (*protocol.SignatureInformation, error) { +- ctx, done := event.Start(ctx, "golang.SignatureHelp") +- defer done() +- +- // We need full type-checking here, as we must type-check function bodies in +- // order to provide signature help at the requested position. +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, fmt.Errorf("getting file for SignatureHelp: %w", err) +- } +- pos, err := pgf.PositionPos(params.Position) +- if err != nil { +- return nil, err +- } +- // Find a call expression surrounding the query position. +- var callExpr *ast.CallExpr +- path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) +- if path == nil { +- return nil, fmt.Errorf("cannot find node enclosing position") +- } +- info := pkg.TypesInfo() +- var fnval ast.Expr +-loop: +- for i, node := range path { +- switch node := node.(type) { +- case *ast.Ident: +- // If the selected text is a function/method Ident or SelectorExpr, +- // even one not in function call position, +- // show help for its signature. Example: +- // once.Do(initialize⁁) +- // should show help for initialize, not once.Do. +- if t := info.TypeOf(node); t != nil && +- info.Defs[node] == nil && +- is[*types.Signature](t.Underlying()) { +- if sel, ok := path[i+1].(*ast.SelectorExpr); ok && sel.Sel == node { +- fnval = sel // e.g. fmt.Println⁁ +- } else { +- fnval = node +- } +- break loop +- } +- case *ast.CallExpr: +- // Beware: the ')' may be missing. +- if node.Lparen <= pos && pos <= node.Rparen { +- callExpr = node +- fnval = callExpr.Fun +- break loop +- } +- case *ast.FuncLit, *ast.FuncType, *ast.CompositeLit: +- // The user is within an anonymous function or +- // a composite literal, which may be the argument +- // to the *ast.CallExpr. +- // Don't show signature help in this case. +- return nil, nil +- case *ast.BasicLit: +- // golang/go#43397: don't offer signature help when the user is typing +- // in a string literal unless it was manually invoked or help is already active. +- if node.Kind == token.STRING && +- (params.Context == nil || (params.Context.TriggerKind != protocol.SigInvoked && !params.Context.IsRetrigger)) { +- return nil, nil +- } +- } +- } +- +- if fnval == nil { +- return nil, nil +- } +- +- // Get the type information for the function being called. +- var sig *types.Signature +- if tv, ok := info.Types[fnval]; !ok { +- return nil, fmt.Errorf("cannot get type for Fun %[1]T (%[1]v)", fnval) +- } else if tv.IsType() { +- return nil, nil // a conversion, not a call +- } else if sig, ok = tv.Type.Underlying().(*types.Signature); !ok { +- return nil, fmt.Errorf("call operand is not a func or type: %[1]T (%[1]v)", fnval) +- } +- // Inv: sig != nil +- +- // Get the object representing the function, if available. +- // There is no object in certain cases such as calling a function returned by +- // a function (e.g. "foo()()"). +- var obj types.Object +- switch t := fnval.(type) { +- case *ast.Ident: +- obj = info.ObjectOf(t) +- case *ast.SelectorExpr: +- obj = info.ObjectOf(t.Sel) +- } +- +- if obj != nil && isBuiltin(obj) { +- // Special handling for error.Error, which is the only builtin method. +- if obj.Name() == "Error" { +- return &protocol.SignatureInformation{ +- Label: "Error() string", +- // TODO(skewb1k): move the docstring for error.Error to builtin.go and reuse it across all relevant LSP methods. +- Documentation: stringToSigInfoDocumentation("Error returns the error message.", snapshot.Options()), +- Parameters: nil, +- ActiveParameter: nil, +- }, nil +- } +- s, err := NewBuiltinSignature(ctx, snapshot, obj.Name()) +- if err != nil { +- return nil, err +- } +- return signatureInformation(s, snapshot.Options(), pos, callExpr) +- } +- +- mq := MetadataQualifierForFile(snapshot, pgf.File, pkg.Metadata()) +- qual := typesinternal.FileQualifier(pgf.File, pkg.Types()) +- var ( +- comment *ast.CommentGroup +- name string +- ) +- +- if obj != nil { +- comment, err = HoverDocForObject(ctx, snapshot, pkg.FileSet(), obj) +- if err != nil { +- return nil, err +- } +- name = obj.Name() +- } else { +- name = "func" +- } +- +- s, err := NewSignature(ctx, snapshot, pkg, sig, comment, qual, mq) +- if err != nil { +- return nil, err +- } +- s.name = name +- return signatureInformation(s, snapshot.Options(), pos, callExpr) +-} +- +-func signatureInformation(sig *signature, options *settings.Options, pos token.Pos, call *ast.CallExpr) (*protocol.SignatureInformation, error) { +- paramInfo := make([]protocol.ParameterInformation, 0, len(sig.params)) +- for _, p := range sig.params { +- paramInfo = append(paramInfo, protocol.ParameterInformation{Label: p}) +- } +- return &protocol.SignatureInformation{ +- Label: sig.name + sig.Format(), +- Documentation: stringToSigInfoDocumentation(sig.doc, options), +- Parameters: paramInfo, +- ActiveParameter: activeParameter(sig, pos, call), +- }, nil +-} +- +-// activeParameter returns a pointer to a variable containing +-// the index of the active parameter (if known), or nil otherwise. +-func activeParameter(sig *signature, pos token.Pos, call *ast.CallExpr) *uint32 { +- if call == nil { +- return nil +- } +- numParams := uint32(len(sig.params)) +- if numParams == 0 { +- return nil +- } +- // Check if the position is even in the range of the arguments. +- if !(call.Lparen < pos && pos <= call.Rparen) { +- return nil +- } +- +- var activeParam uint32 +- for _, arg := range call.Args { +- if pos <= arg.End() { +- break +- } +- // Don't advance the active parameter for the last parameter of a variadic function. +- if !sig.variadic || activeParam < numParams-1 { +- activeParam++ +- } +- } +- return &activeParam +-} +- +-func stringToSigInfoDocumentation(s string, options *settings.Options) *protocol.Or_SignatureInformation_documentation { +- v := s +- k := protocol.PlainText +- if options.PreferredContentFormat == protocol.Markdown { +- v = DocCommentToMarkdown(s, options) +- // whether or not content is newline terminated may not matter for LSP clients, +- // but our tests expect trailing newlines to be stripped. +- v = strings.TrimSuffix(v, "\n") // TODO(pjw): change the golden files +- k = protocol.Markdown +- } +- return &protocol.Or_SignatureInformation_documentation{ +- Value: protocol.MarkupContent{ +- Kind: k, +- Value: v, +- }, +- } +-} +diff -urN a/gopls/internal/golang/snapshot.go b/gopls/internal/golang/snapshot.go +--- a/gopls/internal/golang/snapshot.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/snapshot.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,82 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// NarrowestPackageForFile is a convenience function that selects the narrowest +-// non-ITV package to which this file belongs, type-checks it in the requested +-// mode (full or workspace), and returns it, along with the parse tree of that +-// file. +-// +-// The "narrowest" package is the one with the fewest number of files that +-// includes the given file. This solves the problem of test variants, as the +-// test will have more files than the non-test package. +-// +-// An intermediate test variant (ITV) package has identical source to a regular +-// package but resolves imports differently. gopls should never need to +-// type-check them. +-// +-// Type-checking is expensive. Call snapshot.ParseGo if all you need is a parse +-// tree, or snapshot.MetadataForFile if you only need metadata. +-func NarrowestPackageForFile(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (*cache.Package, *parsego.File, error) { +- return selectPackageForFile(ctx, snapshot, uri, func(metas []*metadata.Package) *metadata.Package { return metas[0] }) +-} +- +-// WidestPackageForFile is a convenience function that selects the widest +-// non-ITV package to which this file belongs, type-checks it in the requested +-// mode (full or workspace), and returns it, along with the parse tree of that +-// file. +-// +-// The "widest" package is the one with the most number of files that includes +-// the given file. Which is the test variant if one exists. +-// +-// An intermediate test variant (ITV) package has identical source to a regular +-// package but resolves imports differently. gopls should never need to +-// type-check them. +-// +-// Type-checking is expensive. Call snapshot.ParseGo if all you need is a parse +-// tree, or snapshot.MetadataForFile if you only need metadata. +-func WidestPackageForFile(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (*cache.Package, *parsego.File, error) { +- return selectPackageForFile(ctx, snapshot, uri, func(metas []*metadata.Package) *metadata.Package { return metas[len(metas)-1] }) +-} +- +-func selectPackageForFile(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, selector func([]*metadata.Package) *metadata.Package) (*cache.Package, *parsego.File, error) { +- mps, err := snapshot.MetadataForFile(ctx, uri, true) +- if err != nil { +- return nil, nil, err +- } +- if len(mps) == 0 { +- return nil, nil, fmt.Errorf("no package metadata for file %s", uri) +- } +- mp := selector(mps) +- pkgs, err := snapshot.TypeCheck(ctx, mp.ID) +- if err != nil { +- return nil, nil, err +- } +- pkg := pkgs[0] +- pgf, err := pkg.File(uri) +- if err != nil { +- return nil, nil, err // "can't happen" +- } +- return pkg, pgf, err +-} +- +-type ( +- PackageID = metadata.PackageID +- PackagePath = metadata.PackagePath +- PackageName = metadata.PackageName +- ImportPath = metadata.ImportPath +-) +- +-type unit = struct{} +diff -urN a/gopls/internal/golang/splitpkg/graph.go b/gopls/internal/golang/splitpkg/graph.go +--- a/gopls/internal/golang/splitpkg/graph.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/splitpkg/graph.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,101 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package splitpkg +- +-// SCC algorithm stolen from cmd/digraph. +- +-type ( +- graph = map[int]map[int]bool +- nodeList = []int +- nodeSet = map[int]bool +-) +- +-// addNode ensures a node exists in the graph with an initialized edge set. +-func addNode(g graph, node int) map[int]bool { +- edges := g[node] +- if edges == nil { +- edges = make(map[int]bool) +- g[node] = edges +- } +- return edges +-} +- +-// addEdges adds one or more edges from a 'from' node. +-func addEdges(g graph, from int, to ...int) { +- edges := addNode(g, from) +- for _, toNode := range to { +- addNode(g, toNode) +- edges[toNode] = true +- } +-} +- +-// transpose creates the transpose (reverse) of the graph. +-func transpose(g graph) graph { +- rev := make(graph) +- for node, edges := range g { +- addNode(rev, node) // Ensure all nodes exist in the transposed graph +- for succ := range edges { +- addEdges(rev, succ, node) +- } +- } +- return rev +-} +- +-// sccs returns the non-trivial strongly connected components of the graph. +-func sccs(g graph) []nodeSet { +- // Kosaraju's algorithm---Tarjan is overkill here. +- // +- // TODO(adonovan): factor with Tarjan's algorithms from +- // go/ssa/dom.go, +- // go/callgraph/vta/propagation.go, +- // ../../cache/typerefs/refs.go, +- // ../../cache/metadata/graph.go. +- +- // Forward pass. +- S := make(nodeList, 0, len(g)) // postorder stack +- seen := make(nodeSet) +- var visit func(node int) +- visit = func(node int) { +- if !seen[node] { +- seen[node] = true +- for e := range g[node] { +- visit(e) +- } +- S = append(S, node) +- } +- } +- for node := range g { +- visit(node) +- } +- +- // Reverse pass. +- rev := transpose(g) +- var scc nodeSet +- seen = make(nodeSet) +- var rvisit func(node int) +- rvisit = func(node int) { +- if !seen[node] { +- seen[node] = true +- scc[node] = true +- for e := range rev[node] { +- rvisit(e) +- } +- } +- } +- var sccs []nodeSet +- for len(S) > 0 { +- top := S[len(S)-1] +- S = S[:len(S)-1] // pop +- if !seen[top] { +- scc = make(nodeSet) +- rvisit(top) +- if len(scc) == 1 && !g[top][top] { +- continue +- } +- sccs = append(sccs, scc) +- } +- } +- return sccs +-} +diff -urN a/gopls/internal/golang/splitpkg/splitpkg.go b/gopls/internal/golang/splitpkg/splitpkg.go +--- a/gopls/internal/golang/splitpkg/splitpkg.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/splitpkg/splitpkg.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,512 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package splitpkg +- +-// This file produces the "Split package" HTML report. +-// +-// The server persistently holds, for each PackageID, the current set +-// of components and the mapping from declared names to components. On +-// each page reload or JS reload() call, the server type-checks the +-// package, computes its symbol reference graph, projects it onto +-// components, then returns the component reference graph, and if it +-// is cyclic, which edges form cycles. Thus changes to the package +-// source are reflected in the client UI at the next page reload or JS +-// reload() event. +-// +-// See also: +-// - ../codeaction.go - offers the CodeAction command +-// - ../../server/command.go - handles the command by opening a web page +-// - ../../server/server.go - handles the HTTP request and calls this function +-// - ../../server/assets/splitpkg.js - client-side logic +-// - ../../test/integration/web/splitpkg_test.go - integration test of server +-// +-// TODO(adonovan): future work +-// +-// Refine symbol reference graph: +-// - deal with enums (values must stay together; implicit dependency on iota expression) +-// - deal with coupled vars "var x, y = f()" +-// - deal with declared methods (coupled to receiver named type) +-// - deal with fields/interface methods (loosely coupled to struct/interface type) +-// In both cases the field/method name must be either exported or in the same component. +-// +-// UI: +-// - make shift click extend selection of a range of checkboxes. +-// - display two-level grouping of decls and specs: var ( x int; y int ) +-// - indicate when package has type errors (data may be incomplete). +-// +-// Code transformation: +-// - add "Split" button that is green when acyclic. It should: +-// 1) move each component into a new package, or separate file of +-// the same package. (The UI will need to hold this user +-// intent in the list of components.) +-// 2) ensure that each declaration referenced from another package +-// is public, renaming as needed. +-// 3) update package decls, imports, package docs, file docs, +-// doc comments, etc. +-// Should we call this feature "Reorganize package" or "Decompose package" +-// until the "Split" button actually exists? +- +-import ( +- "bytes" +- "crypto/sha256" +- _ "embed" +- "encoding/json" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "html/template" +- "log" +- "strconv" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/filecache" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-//go:embed splitpkg.html.tmpl +-var htmlTmpl string +- +-// HTML returns the HTML for the main "Split package" page, for the +-// /splitpkg endpoint. The real magic happens in JavaScript; see +-// ../../server/assets/splitpkg.js. +-func HTML(pkgpath metadata.PackagePath) []byte { +- t, err := template.New("splitpkg.html").Parse(htmlTmpl) +- if err != nil { +- log.Fatal(err) +- } +- data := struct { +- Title string +- }{ +- Title: fmt.Sprintf("Split package %s", pkgpath), +- } +- var buf bytes.Buffer +- if err := t.Execute(&buf, data); err != nil { +- log.Fatal(err) +- } +- return buf.Bytes() +-} +- +-const cacheKind = "splitpkg" // filecache kind +- +-func cacheKey(pkgID metadata.PackageID) [32]byte { +- return sha256.Sum256([]byte(pkgID)) +-} +- +-// UpdateComponentsJSON parses the JSON description of components and +-// their assigned declarations and updates the component state for the +-// specified package. +-func UpdateComponentsJSON(pkgID metadata.PackageID, data []byte) error { +- return filecache.Set(cacheKind, cacheKey(pkgID), data) +-} +- +-// Web is an abstraction of gopls' web server. +-type Web interface { +- // SrcURL forms URLs that cause the editor to open a file at a specific position. +- SrcURL(filename string, line, col8 int) protocol.URI +-} +- +-// JSON returns the JSON encoding of the data needed by +-// the /splitpkg-json endpoint for the specified package. It includes: +-// - the set of names declared by the package, grouped by file; +-// - the set of components and their assigned declarations from +-// the most recent call to [UpdateComponentsJSON]; and +-// - the component graph derived from them, along with the +-// sets of reference that give rise to each edge. +-func JSON(pkg *cache.Package, web Web) ([]byte, error) { +- // Retrieve package's most recent state from the file cache. +- var comp ComponentsJSON +- data, err := filecache.Get(cacheKind, cacheKey(pkg.Metadata().ID)) +- if err != nil { +- if err != filecache.ErrNotFound { +- return nil, err +- } +- // cache miss: use zero value +- } else if err := json.Unmarshal(data, &comp); err != nil { +- return nil, err +- } +- +- // Prepare to construct symbol reference graph. +- var ( +- info = pkg.TypesInfo() +- symbols = make(map[types.Object]*symbol) +- ) +- +- // setName records the UI name for an object. +- // (The UI name disambiguates "init", "_", etc.) +- setName := func(obj types.Object, name string) { +- symbols[obj] = &symbol{ +- name: name, +- component: comp.Assignments[name], // missing => "default" +- } +- } +- +- // Pass 1: name everything, since naming is order-dependent. +- var initCounter, blankCounter int +- for _, pgf := range pkg.CompiledGoFiles() { +- for _, decl := range pgf.File.Decls { +- switch decl := decl.(type) { +- case *ast.FuncDecl: +- if fn, ok := info.Defs[decl.Name].(*types.Func); ok { +- // For now we treat methods as first class decls, +- // but since they are coupled to the named type +- // they should be omitted in the UI for brevity. +- name := fn.Name() +- if recv := fn.Signature().Recv(); recv != nil { +- fn = fn.Origin() +- _, named := typesinternal.ReceiverNamed(recv) +- name = named.Obj().Name() + "." + name +- } else if name == "init" { +- // Disambiguate top-level init functions. +- name += suffix(&initCounter) +- } +- if name == "_" { // (function or method) +- name += suffix(&blankCounter) +- } +- setName(fn, name) +- } +- +- case *ast.GenDecl: +- switch decl.Tok { +- case token.CONST, token.VAR: +- for _, spec := range decl.Specs { +- spec := spec.(*ast.ValueSpec) +- for _, id := range spec.Names { +- if obj := info.Defs[id]; obj != nil { +- name := obj.Name() +- if name == "_" { +- name += suffix(&blankCounter) +- } +- setName(obj, name) +- } +- } +- } +- +- case token.TYPE: +- for _, spec := range decl.Specs { +- spec := spec.(*ast.TypeSpec) +- if obj := info.Defs[spec.Name]; obj != nil { +- name := obj.Name() +- if name == "_" { +- name += suffix(&blankCounter) +- } +- setName(obj, name) +- } +- } +- } +- } +- } +- } +- +- // Pass 2: compute symbol reference graph, project onto +- // component dependency graph, and build JSON response. +- var ( +- files []*fileJSON +- refs []*refJSON +- ) +- for _, pgf := range pkg.CompiledGoFiles() { +- identURL := func(id *ast.Ident) string { +- posn := safetoken.Position(pgf.Tok, id.Pos()) +- return web.SrcURL(posn.Filename, posn.Line, posn.Column) +- } +- newCollector := func(from *symbol) *refCollector { +- return &refCollector{ +- from: from, +- identURL: identURL, +- pkg: pkg.Types(), +- info: info, +- symbols: symbols, +- } +- } +- var decls []*declJSON +- for _, decl := range pgf.File.Decls { +- var ( +- kind string +- specs []*specJSON +- ) +- switch decl := decl.(type) { +- case *ast.FuncDecl: +- kind = "func" +- if fn, ok := info.Defs[decl.Name].(*types.Func); ok { +- symbol := symbols[fn] +- rc := newCollector(symbol).collect(decl) +- refs = append(refs, rc.refs...) +- specs = append(specs, &specJSON{ +- Name: symbol.name, +- URL: identURL(decl.Name), +- }) +- } +- +- case *ast.GenDecl: +- kind = decl.Tok.String() +- +- switch decl.Tok { +- case token.CONST, token.VAR: +- for _, spec := range decl.Specs { +- spec := spec.(*ast.ValueSpec) +- for i, id := range spec.Names { +- if obj := info.Defs[id]; obj != nil { +- symbol := symbols[obj] +- rc := newCollector(symbol) +- // If there's a type, +- // all RHSs depend on it. +- if spec.Type != nil { +- rc.collect(spec.Type) +- } +- switch len(spec.Values) { +- case len(spec.Names): +- // var x, y = a, b +- rc.collect(spec.Values[i]) +- case 1: +- // var x, y = f() +- rc.collect(spec.Values[0]) +- case 0: +- // var x T +- } +- refs = append(refs, rc.refs...) +- specs = append(specs, &specJSON{ +- Name: symbol.name, +- URL: identURL(id), +- }) +- } +- } +- } +- +- case token.TYPE: +- for _, spec := range decl.Specs { +- spec := spec.(*ast.TypeSpec) +- if obj := info.Defs[spec.Name]; obj != nil { +- symbol := symbols[obj] +- rc := newCollector(symbol).collect(spec.Type) +- refs = append(refs, rc.refs...) +- specs = append(specs, &specJSON{ +- Name: symbol.name, +- URL: identURL(spec.Name), +- }) +- } +- } +- } +- } +- if len(specs) > 0 { +- decls = append(decls, &declJSON{Kind: kind, Specs: specs}) +- } +- } +- files = append(files, &fileJSON{ +- Base: pgf.URI.Base(), +- URL: web.SrcURL(pgf.URI.Path(), 1, 1), +- Decls: decls, +- }) +- } +- +- // Compute the graph of dependencies between components, by +- // projecting the symbol dependency graph through component +- // assignments. +- var ( +- g = make(graph) +- edgeRefs = make(map[[2]int][]*refJSON) // refs that induce each intercomponent edge +- ) +- for _, ref := range refs { +- from, to := ref.from, ref.to +- if from.component != to.component { +- // inter-component reference +- m, ok := g[from.component] +- if !ok { +- m = make(map[int]bool) +- g[from.component] = m +- } +- m[to.component] = true +- +- key := [2]int{from.component, to.component} +- edgeRefs[key] = append(edgeRefs[key], ref) +- } +- } +- +- // Detect cycles in the component graph +- // and record cyclic (⚠) components. +- cycles := [][]int{} // non-nil for JSON +- scmap := make(map[int]int) // maps component index to 1 + SCC index (0 => acyclic) +- for i, scc := range sccs(g) { +- for c := range scc { +- scmap[c] = i + 1 +- } +- cycles = append(cycles, moremaps.KeySlice(scc)) +- } +- +- // Record intercomponent edges and their references. +- edges := []*edgeJSON{} // non-nil for JSON +- for edge, refs := range edgeRefs { +- from, to := edge[0], edge[1] +- edges = append(edges, &edgeJSON{ +- From: from, +- To: to, +- Refs: refs, +- Cyclic: scmap[from] > 0 && scmap[from] == scmap[to], +- }) +- } +- +- return json.Marshal(ResultJSON{ +- Files: files, +- Components: comp, +- Edges: edges, +- Cycles: cycles, +- }) +-} +- +-// A refCollector gathers intra-package references to top-level +-// symbols from within one syntax tree, in lexical order. +-type refCollector struct { +- from *symbol +- identURL func(*ast.Ident) string +- pkg *types.Package +- info *types.Info +- index map[types.Object]*refJSON +- symbols map[types.Object]*symbol +- +- refs []*refJSON // output +-} +- +-// A symbol describes a declared name and its assigned component. +-type symbol struct { +- name string // unique name in the UI and JSON/HTTP protocol +- component int // index of assigned component +-} +- +-// collect adds the free references of n to the collection. +-func (rc *refCollector) collect(n ast.Node) *refCollector { +- var f func(n ast.Node) bool +- f = func(n ast.Node) bool { +- switch n := n.(type) { +- case *ast.SelectorExpr: +- if sel, ok := rc.info.Selections[n]; ok { +- rc.addRef(n.Sel, sel.Obj()) +- ast.Inspect(n.X, f) +- return false // don't visit n.Sel +- } +- +- case *ast.Ident: +- if obj := rc.info.Uses[n]; obj != nil { +- rc.addRef(n, obj) +- } +- } +- return true +- } +- ast.Inspect(n, f) +- +- return rc +-} +- +-// addRef records a reference from id to obj. +-func (rc *refCollector) addRef(id *ast.Ident, obj types.Object) { +- if obj.Pkg() != rc.pkg { +- return // cross-package reference +- } +- +- // Un-instantiate methods. +- if fn, ok := obj.(*types.Func); ok && fn.Signature().Recv() != nil { +- obj = fn.Origin() // G[int].method -> G[T].method +- } +- +- // We only care about refs to package-level symbols. +- // And methods, for now. +- decl := rc.symbols[obj] +- if decl == nil { +- return // not a package-level symbol or top-level method +- } +- +- if ref, ok := rc.index[obj]; !ok { +- ref = &refJSON{ +- From: rc.from.name, +- To: decl.name, +- URL: rc.identURL(id), +- from: rc.from, +- to: decl, +- } +- if rc.index == nil { +- rc.index = make(map[types.Object]*refJSON) +- } +- rc.index[obj] = ref +- rc.refs = append(rc.refs, ref) +- } +-} +- +-// suffix returns a subscripted decimal suffix, +-// preincrementing the specified counter. +-func suffix(counter *int) string { +- *counter++ +- n := *counter +- return subscripter.Replace(strconv.Itoa(n)) +-} +- +-var subscripter = strings.NewReplacer( +- "0", "₀", +- "1", "₁", +- "2", "₂", +- "3", "₃", +- "4", "₄", +- "5", "₅", +- "6", "₆", +- "7", "₇", +- "8", "₈", +- "9", "₉", +-) +- +-// -- JSON types -- +- +-// ResultJSON describes the result of a /splitpkg-json query. +-// It is public for testing. +-type ResultJSON struct { +- Components ComponentsJSON // component names and their assigned declarations +- Files []*fileJSON // files of the packages and their declarations and references +- Edges []*edgeJSON // inter-component edges and their references +- Cycles [][]int // sets of strongly-connected components +-} +- +-// request body of a /splitpkg-components update; +-// also part of /splitpkg-json response. +-type ComponentsJSON struct { +- Names []string `json:",omitempty"` // if empty, implied Names[0]=="default". +- Assignments map[string]int `json:",omitempty"` // maps specJSON.Name to component index; missing => 0 +-} +- +-// edgeJSON describes an inter-component dependency. +-type edgeJSON struct { +- From, To int // component IDs +- Refs []*refJSON // references that give rise to this edge +- Cyclic bool // edge is part of nontrivial strongly connected component +-} +- +-// fileJSON records groups decl/spec information about a single file. +-type fileJSON struct { +- Base string // file base name +- URL string // showDocument link for file +- Decls []*declJSON `json:",omitempty"` +-} +- +-// declJSON groups specs (e.g. "var ( x int; y int )"). +-type declJSON struct { +- Kind string // const, var, type, func +- Specs []*specJSON `json:",omitempty"` +-} +- +-// specJSON describes a single declared name. +-// (A coupled declaration "var x, y = f()" results in two specJSONs.) +-type specJSON struct { +- Name string // x or T.x +- URL string // showDocument link for declaring identifier +-} +- +-// refJSON records the first reference from a given declaration to a symbol. +-// (Repeat uses of the same identifier are omitted.) +-type refJSON struct { +- From, To string // x or T.x of referenced spec +- URL string // showDocument link for referring identifier +- +- from, to *symbol // transient +-} +diff -urN a/gopls/internal/golang/splitpkg/splitpkg.html.tmpl b/gopls/internal/golang/splitpkg/splitpkg.html.tmpl +--- a/gopls/internal/golang/splitpkg/splitpkg.html.tmpl 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/splitpkg/splitpkg.html.tmpl 1969-12-31 18:00:00.000000000 -0600 +@@ -1,69 +0,0 @@ +- +- +- +- +- {{.Title}} +- +- +- +- +- +- +-

    {{.Title}}

    +- +-

    +-ⓘ Use this tool to decompose a package into a set of components +-whose dependencies are acyclic. +- +-First, name a set of components. +- +-Second, assign each declaration to an +-appropriate component: check their checkboxes, choose a component, and +-click Apply. Use the checkbox for a file to select all declarations in +-that file. +- +-Third, examine the set of dependencies between +-components. Each inter-component dependency lists the symbol +-references that cross the boundary. Click on one to navigate your +-editor there. +- +-If two or more components form a dependency cycle (⚠), +-you will need to either change your code, +-or change the component assignments. +- +-Iterate this process. +-Reload the page to refresh after each code change. +-

    +-

    +-Once you are happy with the result, you can split the package, +-renaming declarations as needed to export them. +-In a future release, the code transformation will be automated. +-

    +- +-

    Components

    +-
    +- +-

    +- +- +-[▼ see dependencies] +-

    +- +-
    +- +-

    Declarations

    +- +-
    +- +- +- +-
    +- +-
    +- +-
    +- +-

    Component dependencies

    +- +-
    +-` +diff -urN a/gopls/internal/golang/stub.go b/gopls/internal/golang/stub.go +--- a/gopls/internal/golang/stub.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/stub.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,238 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "bytes" +- "context" +- "fmt" +- "go/format" +- "go/parser" +- "go/token" +- "go/types" +- pathpkg "path" +- "strings" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/golang/stubmethods" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/gopls/internal/util/tokeninternal" +- "golang.org/x/tools/internal/diff" +-) +- +-// stubMissingInterfaceMethodsFixer returns a suggested fix to declare the missing +-// methods of the concrete type that is assigned to an interface type +-// at the cursor position. +-func stubMissingInterfaceMethodsFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- si := stubmethods.GetIfaceStubInfo(pkg.FileSet(), pkg.TypesInfo(), pgf, start, end) +- if si == nil { +- return nil, nil, fmt.Errorf("nil interface request") +- } +- return insertDeclsAfter(ctx, snapshot, pkg.Metadata(), si.Fset, si.Concrete.Obj(), si.Emit) +-} +- +-// stubMissingCalledFunctionFixer returns a suggested fix to declare the missing +-// method that the user may want to generate based on CallExpr +-// at the cursor position. +-func stubMissingCalledFunctionFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- si := stubmethods.GetCallStubInfo(pkg.FileSet(), pkg.TypesInfo(), pgf, start, end) +- if si == nil { +- return nil, nil, fmt.Errorf("invalid type request") +- } +- return insertDeclsAfter(ctx, snapshot, pkg.Metadata(), si.Fset, si.After, si.Emit) +-} +- +-// An emitter writes new top-level declarations into an existing +-// file. References to symbols should be qualified using qual, which +-// respects the local import environment. +-type emitter = func(out *bytes.Buffer, qual types.Qualifier) error +- +-// insertDeclsAfter locates the file that declares symbol sym, +-// (which must be among the dependencies of mp), +-// calls the emit function to generate new declarations, +-// respecting the local import environment, +-// and splices those declarations into the file after the declaration of sym, +-// updating imports as needed. +-// +-// fset must provide the position of sym. +-func insertDeclsAfter(ctx context.Context, snapshot *cache.Snapshot, mp *metadata.Package, fset *token.FileSet, sym types.Object, emit emitter) (*token.FileSet, *analysis.SuggestedFix, error) { +- // Parse the file declaring the sym. +- // +- // Beware: declPGF is not necessarily covered by pkg.FileSet() or si.Fset. +- declPGF, _, err := parseFull(ctx, snapshot, fset, sym) +- if err != nil { +- return nil, nil, fmt.Errorf("failed to parse file %q declaring implementation symbol: %w", declPGF.URI, err) +- } +- if declPGF.Fixed() { +- return nil, nil, fmt.Errorf("file contains parse errors: %s", declPGF.URI) +- } +- +- // Find metadata for the symbol's declaring package +- // as we'll need its import mapping. +- declMeta := findFileInDeps(snapshot, mp, declPGF.URI) +- if declMeta == nil { +- return nil, nil, bug.Errorf("can't find metadata for file %s among dependencies of %s", declPGF.URI, mp) +- } +- +- // Build import environment for the declaring file. +- // (typesinternal.FileQualifier works only for complete +- // import mappings, and requires types.) +- importEnv := make(map[ImportPath]string) // value is local name +- for _, imp := range declPGF.File.Imports { +- importPath := metadata.UnquoteImportPath(imp) +- var name string +- if imp.Name != nil { +- name = imp.Name.Name +- if name == "_" { +- continue +- } else if name == "." { +- name = "" // see types.Qualifier +- } +- } else { +- // Use the correct name from the metadata of the imported +- // package---not a guess based on the import path. +- mp := snapshot.Metadata(declMeta.DepsByImpPath[importPath]) +- if mp == nil { +- continue // can't happen? +- } +- name = string(mp.Name) +- } +- importEnv[importPath] = name // latest alias wins +- } +- +- // Create a package name qualifier that uses the +- // locally appropriate imported package name. +- // It records any needed new imports. +- // TODO(adonovan): factor with golang.FormatVarType? +- // +- // Prior to CL 469155 this logic preserved any renaming +- // imports from the file that declares the interface +- // method--ostensibly the preferred name for imports of +- // frequently renamed packages such as protobufs. +- // Now we use the package's declared name. If this turns out +- // to be a mistake, then use parseHeader(si.iface.Pos()). +- // +- type newImport struct{ name, importPath string } +- var newImports []newImport // for AddNamedImport +- qual := func(pkg *types.Package) string { +- // TODO(adonovan): don't ignore vendor prefix. +- // +- // Ignore the current package import. +- if pkg.Path() == sym.Pkg().Path() { +- return "" +- } +- +- importPath := ImportPath(pkg.Path()) +- name, ok := importEnv[importPath] +- if !ok { +- // Insert new import using package's declared name. +- // +- // TODO(adonovan): resolve conflict between declared +- // name and existing file-level (declPGF.File.Imports) +- // or package-level (sym.Pkg.Scope) decls by +- // generating a fresh name. +- name = pkg.Name() +- importEnv[importPath] = name +- new := newImport{importPath: string(importPath)} +- // For clarity, use a renaming import whenever the +- // local name does not match the path's last segment. +- if name != pathpkg.Base(trimVersionSuffix(new.importPath)) { +- new.name = name +- } +- newImports = append(newImports, new) +- } +- return name +- } +- +- // Compute insertion point for new declarations: +- // after the top-level declaration enclosing the (package-level) type. +- insertOffset, err := safetoken.Offset(declPGF.Tok, declPGF.File.End()) +- if err != nil { +- return nil, nil, bug.Errorf("internal error: end position outside file bounds: %v", err) +- } +- symOffset, err := safetoken.Offset(fset.File(sym.Pos()), sym.Pos()) +- if err != nil { +- return nil, nil, bug.Errorf("internal error: finding type decl offset: %v", err) +- } +- for _, decl := range declPGF.File.Decls { +- declEndOffset, err := safetoken.Offset(declPGF.Tok, decl.End()) +- if err != nil { +- return nil, nil, bug.Errorf("internal error: finding decl offset: %v", err) +- } +- if declEndOffset > symOffset { +- insertOffset = declEndOffset +- break +- } +- } +- +- // Splice the new declarations into the file content. +- var buf bytes.Buffer +- input := declPGF.Mapper.Content // unfixed content of file +- buf.Write(input[:insertOffset]) +- buf.WriteByte('\n') +- err = emit(&buf, qual) +- if err != nil { +- return nil, nil, err +- } +- buf.Write(input[insertOffset:]) +- +- // Re-parse the file. +- fset = token.NewFileSet() +- newF, err := parser.ParseFile(fset, declPGF.URI.Path(), buf.Bytes(), parser.ParseComments|parser.SkipObjectResolution) +- if err != nil { +- return nil, nil, fmt.Errorf("could not reparse file: %w", err) +- } +- +- // Splice the new imports into the syntax tree. +- for _, imp := range newImports { +- astutil.AddNamedImport(fset, newF, imp.name, imp.importPath) +- } +- +- // Pretty-print. +- var output bytes.Buffer +- if err := format.Node(&output, fset, newF); err != nil { +- return nil, nil, fmt.Errorf("format.Node: %w", err) +- } +- +- // Report the diff. +- diffs := diff.Bytes(input, output.Bytes()) +- return tokeninternal.FileSetFor(declPGF.Tok), // edits use declPGF.Tok +- &analysis.SuggestedFix{TextEdits: diffToTextEdits(declPGF.Tok, diffs)}, +- nil +-} +- +-// diffToTextEdits converts diff (offset-based) edits to analysis (token.Pos) form. +-func diffToTextEdits(tok *token.File, diffs []diff.Edit) []analysis.TextEdit { +- edits := make([]analysis.TextEdit, 0, len(diffs)) +- for _, edit := range diffs { +- edits = append(edits, analysis.TextEdit{ +- Pos: tok.Pos(edit.Start), +- End: tok.Pos(edit.End), +- NewText: []byte(edit.New), +- }) +- } +- return edits +-} +- +-// trimVersionSuffix removes a trailing "/v2" (etc) suffix from a module path. +-// +-// This is only a heuristic as to the package's declared name, and +-// should only be used for stylistic decisions, such as whether it +-// would be clearer to use an explicit local name in the import +-// because the declared name differs from the result of this function. +-// When the name matters for correctness, look up the imported +-// package's Metadata.Name. +-func trimVersionSuffix(path string) string { +- dir, base := pathpkg.Split(path) +- if len(base) > 1 && base[0] == 'v' && strings.Trim(base[1:], "0123456789") == "" { +- return dir // sans "/v2" +- } +- return path +-} +diff -urN a/gopls/internal/golang/stubmethods/stubcalledfunc.go b/gopls/internal/golang/stubmethods/stubcalledfunc.go +--- a/gopls/internal/golang/stubmethods/stubcalledfunc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/stubmethods/stubcalledfunc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,261 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package stubmethods +- +-import ( +- "bytes" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "strings" +- "unicode" +- +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/util/cursorutil" +- "golang.org/x/tools/gopls/internal/util/typesutil" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-var anyType = types.Universe.Lookup("any").Type() +- +-// CallStubInfo represents a missing method +-// that a receiver type is about to generate +-// which has "type X has no field or method Y" error +-type CallStubInfo struct { +- Fset *token.FileSet // the FileSet used to type-check the types below +- Receiver typesinternal.NamedOrAlias // the method's receiver type +- MethodName string +- After types.Object // decl after which to insert the new decl +- pointer bool +- info *types.Info +- curCall inspector.Cursor // cursor to the CallExpr +-} +- +-// GetCallStubInfo extracts necessary information to generate a method definition from +-// a CallExpr. +-func GetCallStubInfo(fset *token.FileSet, info *types.Info, pgf *parsego.File, start, end token.Pos) *CallStubInfo { +- callCur, _ := pgf.Cursor.FindByPos(start, end) +- call, callCur := cursorutil.FirstEnclosing[*ast.CallExpr](callCur) +- if call == nil { +- return nil +- } +- s, ok := call.Fun.(*ast.SelectorExpr) +- // TODO: support generating stub functions in the same way. +- if !ok { +- return nil +- } +- +- // If recvExpr is a package name, compiler error would be +- // e.g., "undefined: http.bar", thus will not hit this code path. +- recvExpr := s.X +- recvType, pointer := concreteType(recvExpr, info) +- +- if recvType == nil || recvType.Obj().Pkg() == nil { +- return nil +- } +- +- // A method of a function-local type cannot be stubbed +- // since there's nowhere to put the methods. +- recv := recvType.Obj() +- if recv.Parent() != recv.Pkg().Scope() { +- return nil +- } +- +- after := types.Object(recv) +- // If the enclosing function declaration is a method declaration, +- // and matches the receiver type of the diagnostic, +- // insert after the enclosing method. +- decl, _ := cursorutil.FirstEnclosing[*ast.FuncDecl](callCur) +- if decl != nil && decl.Recv != nil { +- if len(decl.Recv.List) != 1 { +- return nil +- } +- mrt := info.TypeOf(decl.Recv.List[0].Type) +- if mrt != nil && types.Identical(types.Unalias(typesinternal.Unpointer(mrt)), recv.Type()) { +- after = info.ObjectOf(decl.Name) +- } +- } +- return &CallStubInfo{ +- Fset: fset, +- Receiver: recvType, +- MethodName: s.Sel.Name, +- After: after, +- pointer: pointer, +- curCall: callCur, +- info: info, +- } +-} +- +-// Emit writes to out the missing method based on type info of si.Receiver and CallExpr. +-func (si *CallStubInfo) Emit(out *bytes.Buffer, qual types.Qualifier) error { +- params := si.collectParams() +- rets := typesutil.TypesFromContext(si.info, si.curCall) +- recv := si.Receiver.Obj() +- // Pointer receiver? +- var star string +- if si.pointer { +- star = "*" +- } +- +- // Choose receiver name. +- // If any method has a named receiver, choose the first one. +- // Otherwise, use lowercase for the first letter of the object. +- recvName := strings.ToLower(fmt.Sprintf("%.1s", recv.Name())) +- if named, ok := types.Unalias(si.Receiver).(*types.Named); ok { +- for i := 0; i < named.NumMethods(); i++ { +- if recv := named.Method(i).Type().(*types.Signature).Recv(); recv.Name() != "" { +- recvName = recv.Name() +- break +- } +- } +- } +- +- // Emit method declaration. +- fmt.Fprintf(out, "\nfunc (%s %s%s%s) %s", +- recvName, +- star, +- recv.Name(), +- typesutil.FormatTypeParams(si.Receiver.TypeParams()), +- si.MethodName) +- +- // Emit parameters, avoiding name conflicts. +- seen := map[string]bool{recvName: true} +- out.WriteString("(") +- for i, param := range params { +- name := param.name +- if seen[name] { +- name = fmt.Sprintf("param%d", i+1) +- } +- seen[name] = true +- +- if i > 0 { +- out.WriteString(", ") +- } +- fmt.Fprintf(out, "%s %s", name, types.TypeString(param.typ, qual)) +- } +- out.WriteString(") ") +- +- // Emit result types. +- if len(rets) > 1 { +- out.WriteString("(") +- } +- for i, r := range rets { +- if i > 0 { +- out.WriteString(", ") +- } +- out.WriteString(types.TypeString(r, qual)) +- } +- if len(rets) > 1 { +- out.WriteString(")") +- } +- +- // Emit body. +- out.WriteString(` { +- panic("unimplemented") +-}`) +- return nil +-} +- +-type param struct { +- name string +- typ types.Type // the type of param, inferred from CallExpr +-} +- +-// collectParams gathers the parameter information needed to generate a method stub. +-// The param's type default to any if there is a type error in the argument. +-func (si *CallStubInfo) collectParams() []param { +- var params []param +- appendParam := func(e ast.Expr, t types.Type) { +- p := param{"param", anyType} +- if t != nil && !containsInvalid(t) { +- t = types.Default(t) +- p = param{paramName(e, t), t} +- } +- params = append(params, p) +- } +- +- args := si.curCall.Node().(*ast.CallExpr).Args +- for _, arg := range args { +- t := si.info.TypeOf(arg) +- switch t := t.(type) { +- // This is the case where another function call returning multiple +- // results is used as an argument. +- case *types.Tuple: +- for ti := 0; ti < t.Len(); ti++ { +- appendParam(arg, t.At(ti).Type()) +- } +- default: +- appendParam(arg, t) +- } +- } +- return params +-} +- +-// containsInvalid checks if the type name contains "invalid type", +-// which is not a valid syntax to generate. +-func containsInvalid(t types.Type) bool { +- typeString := types.TypeString(t, nil) +- return strings.Contains(typeString, types.Typ[types.Invalid].String()) +-} +- +-// paramName heuristically chooses a parameter name from +-// its argument expression and type. Caller should ensure +-// typ is non-nil. +-func paramName(e ast.Expr, typ types.Type) string { +- if typ == types.Universe.Lookup("error").Type() { +- return "err" +- } +- switch t := e.(type) { +- // Use the identifier's name as the argument name. +- case *ast.Ident: +- return t.Name +- // Use the Sel.Name's last section as the argument name. +- case *ast.SelectorExpr: +- return lastSection(t.Sel.Name) +- } +- +- typ = typesinternal.Unpointer(typ) +- switch t := typ.(type) { +- // Use the first character of the type name as the argument name for builtin types +- case *types.Basic: +- return t.Name()[:1] +- case *types.Slice: +- return paramName(e, t.Elem()) +- case *types.Array: +- return paramName(e, t.Elem()) +- case *types.Signature: +- return "f" +- case *types.Map: +- return "m" +- case *types.Chan: +- return "ch" +- case *types.Named: +- return lastSection(t.Obj().Name()) +- default: +- return lastSection(t.String()) +- } +-} +- +-// lastSection find the position of the last uppercase letter, +-// extract the substring from that point onward, +-// and convert it to lowercase. +-// +-// Example: lastSection("registryManagerFactory") = "factory" +-func lastSection(identName string) string { +- lastUpperIndex := -1 +- for i, r := range identName { +- if unicode.IsUpper(r) { +- lastUpperIndex = i +- } +- } +- if lastUpperIndex != -1 { +- last := identName[lastUpperIndex:] +- return strings.ToLower(last) +- } else { +- return identName +- } +-} +diff -urN a/gopls/internal/golang/stubmethods/stubmethods.go b/gopls/internal/golang/stubmethods/stubmethods.go +--- a/gopls/internal/golang/stubmethods/stubmethods.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/stubmethods/stubmethods.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,416 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package stubmethods provides the analysis logic for the quick fix +-// to "Declare missing methods of TYPE" errors. (The fix logic lives +-// in golang.stubMethodsFixer.) +-package stubmethods +- +-import ( +- "bytes" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "strings" +- +- "golang.org/x/tools/go/ast/edge" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/internal/typesinternal" +- +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/typesutil" +-) +- +-// TODO(adonovan): eliminate the confusing Fset parameter; only the +-// file name and byte offset of Concrete are needed. +- +-// IfaceStubInfo represents a concrete type +-// that wants to stub out an interface type +-type IfaceStubInfo struct { +- // Interface is the interface that the client wants to implement. +- // When the interface is defined, the underlying object will be a TypeName. +- // Note that we keep track of types.Object instead of types.Type in order +- // to keep a reference to the declaring object's package and the ast file +- // in the case where the concrete type file requires a new import that happens to be renamed +- // in the interface file. +- // TODO(marwan-at-work): implement interface literals. +- Fset *token.FileSet // the FileSet used to type-check the types below +- Interface *types.TypeName +- Concrete typesinternal.NamedOrAlias +- pointer bool +-} +- +-// GetIfaceStubInfo determines whether the "missing method error" +-// can be used to deduced what the concrete and interface types are. +-// +-// TODO(adonovan): this function (and its following 5 helpers) tries +-// to deduce a pair of (concrete, interface) types that are related by +-// an assignment, either explicitly or through a return statement or +-// function call. This is essentially what the refactor/satisfy does, +-// more generally. Refactor to share logic, after auditing 'satisfy' +-// for safety on ill-typed code. +-func GetIfaceStubInfo(fset *token.FileSet, info *types.Info, pgf *parsego.File, pos, end token.Pos) *IfaceStubInfo { +- cur, _ := pgf.Cursor.FindByPos(pos, end) +- for cur := range cur.Enclosing() { +- // TODO: do cur = unparenEnclosing(cur) first, once CL 701035 lands. +- ek, _ := cur.ParentEdge() +- switch ek { +- case edge.ValueSpec_Values: +- return fromValueSpec(fset, info, cur) +- case edge.ReturnStmt_Results: +- // An error here may not indicate a real error the user should know about, but it may. +- // Therefore, it would be best to log it out for debugging/reporting purposes instead of ignoring +- // it. However, event.Log takes a context which is not passed via the analysis package. +- // TODO(marwan-at-work): properly log this error. +- si, _ := fromReturnStmt(fset, info, cur) +- return si +- case edge.AssignStmt_Rhs: +- return fromAssignStmt(fset, info, cur) +- case edge.CallExpr_Args: +- // Note that some call expressions don't carry the interface type +- // because they don't point to a function or method declaration elsewhere. +- // For eaxmple, "var Interface = (*Concrete)(nil)". In that case, continue +- // this loop to encounter other possibilities such as *ast.ValueSpec or others. +- si := fromCallExpr(fset, info, cur) +- if si != nil { +- return si +- } +- } +- } +- return nil +-} +- +-// Emit writes to out the missing methods of si.Concrete required for it to implement si.Interface +-func (si *IfaceStubInfo) Emit(out *bytes.Buffer, qual types.Qualifier) error { +- conc := si.Concrete.Obj() +- // Record all direct methods of the current object +- concreteFuncs := make(map[string]struct{}) +- if named, ok := types.Unalias(si.Concrete).(*types.Named); ok { +- for i := 0; i < named.NumMethods(); i++ { +- concreteFuncs[named.Method(i).Name()] = struct{}{} +- } +- } +- +- // Find subset of interface methods that the concrete type lacks. +- ifaceType := si.Interface.Type().Underlying().(*types.Interface) +- +- type missingFn struct { +- fn *types.Func +- needSubtle string +- } +- +- var ( +- missing []missingFn +- concreteStruct, isStruct = typesinternal.Origin(si.Concrete).Underlying().(*types.Struct) +- ) +- +- for i := 0; i < ifaceType.NumMethods(); i++ { +- imethod := ifaceType.Method(i) +- cmethod, index, _ := types.LookupFieldOrMethod(si.Concrete, si.pointer, imethod.Pkg(), imethod.Name()) +- if cmethod == nil { +- missing = append(missing, missingFn{fn: imethod}) +- continue +- } +- +- if _, ok := cmethod.(*types.Var); ok { +- // len(LookupFieldOrMethod.index) = 1 => conflict, >1 => shadow. +- return fmt.Errorf("adding method %s.%s would conflict with (or shadow) existing field", +- conc.Name(), imethod.Name()) +- } +- +- if _, exist := concreteFuncs[imethod.Name()]; exist { +- if !types.Identical(cmethod.Type(), imethod.Type()) { +- return fmt.Errorf("method %s.%s already exists but has the wrong type: got %s, want %s", +- conc.Name(), imethod.Name(), cmethod.Type(), imethod.Type()) +- } +- continue +- } +- +- mf := missingFn{fn: imethod} +- if isStruct && len(index) > 0 { +- field := concreteStruct.Field(index[0]) +- +- fn := field.Name() +- if _, ok := field.Type().(*types.Pointer); ok { +- fn = "*" + fn +- } +- +- mf.needSubtle = fmt.Sprintf("// Subtle: this method shadows the method (%s).%s of %s.%s.\n", fn, imethod.Name(), si.Concrete.Obj().Name(), field.Name()) +- } +- +- missing = append(missing, mf) +- } +- if len(missing) == 0 { +- return fmt.Errorf("no missing methods found") +- } +- +- // Format interface name (used only in a comment). +- iface := si.Interface.Name() +- if ipkg := si.Interface.Pkg(); ipkg != nil && ipkg != conc.Pkg() { +- iface = ipkg.Name() + "." + iface +- } +- +- // Pointer receiver? +- var star string +- if si.pointer { +- star = "*" +- } +- +- // If there are any that have named receiver, choose the first one. +- // Otherwise, use lowercase for the first letter of the object. +- rn := strings.ToLower(si.Concrete.Obj().Name()[0:1]) +- if named, ok := types.Unalias(si.Concrete).(*types.Named); ok { +- for i := 0; i < named.NumMethods(); i++ { +- if recv := named.Method(i).Type().(*types.Signature).Recv(); recv.Name() != "" { +- rn = recv.Name() +- break +- } +- } +- } +- +- // Check for receiver name conflicts +- checkRecvName := func(tuple *types.Tuple) bool { +- for i := 0; i < tuple.Len(); i++ { +- if rn == tuple.At(i).Name() { +- return true +- } +- } +- return false +- } +- +- for index := range missing { +- mrn := rn + " " +- sig := missing[index].fn.Signature() +- if checkRecvName(sig.Params()) || checkRecvName(sig.Results()) { +- mrn = "" +- } +- +- fmt.Fprintf(out, `// %s implements [%s]. +-%sfunc (%s%s%s%s) %s%s { +- panic("unimplemented") +-} +-`, +- missing[index].fn.Name(), +- iface, +- missing[index].needSubtle, +- mrn, +- star, +- si.Concrete.Obj().Name(), +- typesutil.FormatTypeParams(si.Concrete.TypeParams()), +- missing[index].fn.Name(), +- strings.TrimPrefix(types.TypeString(missing[index].fn.Type(), qual), "func")) +- } +- return nil +-} +- +-// fromCallExpr tries to find an *ast.CallExpr's function declaration and +-// analyzes a function call's signature against the passed in call argument to deduce +-// the concrete and interface types. +-func fromCallExpr(fset *token.FileSet, info *types.Info, curCallArg inspector.Cursor) *IfaceStubInfo { +- +- call := curCallArg.Parent().Node().(*ast.CallExpr) +- arg := curCallArg.Node().(ast.Expr) +- +- concType, pointer := concreteType(arg, info) +- if concType == nil || concType.Obj().Pkg() == nil { +- return nil +- } +- tv, ok := info.Types[call.Fun] +- if !ok { +- return nil +- } +- sig, ok := types.Unalias(tv.Type).(*types.Signature) +- if !ok { +- return nil +- } +- +- _, argIdx := curCallArg.ParentEdge() +- var paramType types.Type +- if sig.Variadic() && argIdx >= sig.Params().Len()-1 { +- v := sig.Params().At(sig.Params().Len() - 1) +- if s, _ := v.Type().(*types.Slice); s != nil { +- paramType = s.Elem() +- } +- } else if argIdx < sig.Params().Len() { +- paramType = sig.Params().At(argIdx).Type() +- } +- if paramType == nil { +- return nil // A type error prevents us from determining the param type. +- } +- iface := ifaceObjFromType(paramType) +- if iface == nil { +- return nil +- } +- return &IfaceStubInfo{ +- Fset: fset, +- Concrete: concType, +- pointer: pointer, +- Interface: iface, +- } +-} +- +-// fromReturnStmt analyzes a "return" statement to extract +-// a concrete type that is trying to be returned as an interface type. +-// +-// For example, func() io.Writer { return myType{} } +-// would return StubIfaceInfo with the interface being io.Writer and the concrete type being myType{}. +-func fromReturnStmt(fset *token.FileSet, info *types.Info, curResult inspector.Cursor) (*IfaceStubInfo, error) { +- concType, pointer := concreteType(curResult.Node().(ast.Expr), info) +- if concType == nil || concType.Obj().Pkg() == nil { +- return nil, nil // result is not a named or *named or alias thereof +- } +- // Inv: the return is not a spread return, +- // such as "return f()" where f() has tuple type. +- conc := concType.Obj() +- if conc.Parent() != conc.Pkg().Scope() { +- return nil, fmt.Errorf("local type %q cannot be stubbed", conc.Name()) +- } +- +- sig := typesutil.EnclosingSignature(curResult, info) +- if sig == nil { +- // golang/go#70666: this bug may be reached in practice. +- return nil, bug.Errorf("could not find the enclosing function of the return statement") +- } +- rets := sig.Results() +- // The return operands and function results must match. +- // (Spread returns were rejected earlier.) +- ret := curResult.Parent().Node().(*ast.ReturnStmt) +- if rets.Len() != len(ret.Results) { +- return nil, fmt.Errorf("%d-operand return statement in %d-result function", +- len(ret.Results), +- rets.Len()) +- } +- _, resultIdx := curResult.ParentEdge() +- iface := ifaceObjFromType(rets.At(resultIdx).Type()) +- if iface == nil { +- return nil, nil +- } +- return &IfaceStubInfo{ +- Fset: fset, +- Concrete: concType, +- pointer: pointer, +- Interface: iface, +- }, nil +-} +- +-// fromValueSpec returns *StubIfaceInfo from a variable declaration such as +-// var x io.Writer = &T{} +-func fromValueSpec(fset *token.FileSet, info *types.Info, curValue inspector.Cursor) *IfaceStubInfo { +- +- rhs := curValue.Node().(ast.Expr) +- spec := curValue.Parent().Node().(*ast.ValueSpec) +- +- // Possible implicit/explicit conversion to interface type? +- ifaceNode := spec.Type // var _ myInterface = ... +- if call, ok := rhs.(*ast.CallExpr); ok && ifaceNode == nil && len(call.Args) == 1 { +- // var _ = myInterface(v) +- ifaceNode = call.Fun +- rhs = call.Args[0] +- } +- concType, pointer := concreteType(rhs, info) +- if concType == nil || concType.Obj().Pkg() == nil { +- return nil +- } +- conc := concType.Obj() +- if conc.Parent() != conc.Pkg().Scope() { +- return nil +- } +- +- ifaceObj := ifaceType(ifaceNode, info) +- if ifaceObj == nil { +- return nil +- } +- return &IfaceStubInfo{ +- Fset: fset, +- Concrete: concType, +- Interface: ifaceObj, +- pointer: pointer, +- } +-} +- +-// fromAssignStmt returns *StubIfaceInfo from a variable assignment such as +-// var x io.Writer +-// x = &T{} +-func fromAssignStmt(fset *token.FileSet, info *types.Info, curRhs inspector.Cursor) *IfaceStubInfo { +- // The interface conversion error in an assignment is against the RHS: +- // +- // var x io.Writer +- // x = &T{} // error: missing method +- // ^^^^ +- +- assign := curRhs.Parent().Node().(*ast.AssignStmt) +- _, idx := curRhs.ParentEdge() +- lhs, rhs := assign.Lhs[idx], curRhs.Node().(ast.Expr) +- +- ifaceObj := ifaceType(lhs, info) +- if ifaceObj == nil { +- return nil +- } +- concType, pointer := concreteType(rhs, info) +- if concType == nil || concType.Obj().Pkg() == nil { +- return nil +- } +- conc := concType.Obj() +- if conc.Parent() != conc.Pkg().Scope() { +- return nil +- } +- return &IfaceStubInfo{ +- Fset: fset, +- Concrete: concType, +- Interface: ifaceObj, +- pointer: pointer, +- } +-} +- +-// ifaceType returns the named interface type to which e refers, if any. +-func ifaceType(e ast.Expr, info *types.Info) *types.TypeName { +- tv, ok := info.Types[e] +- if !ok { +- return nil +- } +- return ifaceObjFromType(tv.Type) +-} +- +-func ifaceObjFromType(t types.Type) *types.TypeName { +- named, ok := types.Unalias(t).(*types.Named) +- if !ok { +- return nil +- } +- if !types.IsInterface(named) { +- return nil +- } +- // Interfaces defined in the "builtin" package return nil a Pkg(). +- // But they are still real interfaces that we need to make a special case for. +- // Therefore, protect gopls from panicking if a new interface type was added in the future. +- if named.Obj().Pkg() == nil && named.Obj().Name() != "error" { +- return nil +- } +- return named.Obj() +-} +- +-// concreteType tries to extract the *types.Named that defines +-// the concrete type given the ast.Expr where the "missing method" +-// or "conversion" errors happened. If the concrete type is something +-// that cannot have methods defined on it (such as basic types), this +-// method will return a nil *types.Named. The second return parameter +-// is a boolean that indicates whether the concreteType was defined as a +-// pointer or value. +-func concreteType(e ast.Expr, info *types.Info) (*types.Named, bool) { +- tv, ok := info.Types[e] +- if !ok { +- return nil, false +- } +- typ := tv.Type +- ptr, isPtr := types.Unalias(typ).(*types.Pointer) +- if isPtr { +- typ = ptr.Elem() +- } +- named, ok := types.Unalias(typ).(*types.Named) +- if !ok { +- return nil, false +- } +- return named, isPtr +-} +diff -urN a/gopls/internal/golang/symbols.go b/gopls/internal/golang/symbols.go +--- a/gopls/internal/golang/symbols.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/symbols.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,346 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/event" +-) +- +-func DocumentSymbols(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.DocumentSymbol, error) { +- ctx, done := event.Start(ctx, "golang.DocumentSymbols") +- defer done() +- +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, fmt.Errorf("getting file for DocumentSymbols: %w", err) +- } +- +- // Build symbols for file declarations. When encountering a declaration with +- // errors (typically because positions are invalid), we skip the declaration +- // entirely. VS Code fails to show any symbols if one of the top-level +- // symbols is missing position information. +- var symbols []protocol.DocumentSymbol +- for _, decl := range pgf.File.Decls { +- switch decl := decl.(type) { +- case *ast.FuncDecl: +- if decl.Name.Name == "_" { +- continue +- } +- fs, err := funcSymbol(pgf.Mapper, pgf.Tok, decl) +- if err == nil { +- // If function is a method, prepend the type of the method. +- if decl.Recv != nil && len(decl.Recv.List) > 0 { +- fs.Name = fmt.Sprintf("(%s).%s", types.ExprString(decl.Recv.List[0].Type), fs.Name) +- } +- symbols = append(symbols, fs) +- } +- case *ast.GenDecl: +- for _, spec := range decl.Specs { +- switch spec := spec.(type) { +- case *ast.TypeSpec: +- if spec.Name.Name == "_" { +- continue +- } +- ts, err := typeSymbol(pgf.Mapper, pgf.Tok, spec) +- if err == nil { +- symbols = append(symbols, ts) +- } +- case *ast.ValueSpec: +- for _, name := range spec.Names { +- if name.Name == "_" { +- continue +- } +- vs, err := varSymbol(pgf.Mapper, pgf.Tok, spec, name, decl.Tok == token.CONST) +- if err == nil { +- symbols = append(symbols, vs) +- } +- } +- } +- } +- } +- } +- return symbols, nil +-} +- +-// PackageSymbols returns a list of symbols in the narrowest package for the given file (specified +-// by its URI). +-// Methods with receivers are stored as children under the symbol for their receiver type. +-// The PackageSymbol data type contains the same fields as protocol.DocumentSymbol, with +-// an additional int field "File" that stores the index of that symbol's file in the +-// PackageSymbolsResult.Files. +-// Symbols are gathered using syntax rather than type information because type checking is +-// significantly slower. Syntax information provides enough value to the user without +-// causing a lag when loading symbol information across different files. +-func PackageSymbols(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (command.PackageSymbolsResult, error) { +- ctx, done := event.Start(ctx, "source.PackageSymbols") +- defer done() +- +- pkgFiles := []protocol.DocumentURI{uri} +- +- // golang/vscode-go#3681: do our best if the file is not in a package. +- // TODO(rfindley): revisit this in the future once there is more graceful +- // handling in VS Code. +- if mp, err := snapshot.NarrowestMetadataForFile(ctx, uri); err == nil { +- pkgFiles = mp.CompiledGoFiles +- } +- +- var ( +- pkgName string +- symbols []command.PackageSymbol +- receiverToMethods = make(map[string][]command.PackageSymbol) // receiver name -> methods +- typeSymbolToIdx = make(map[string]int) // type name -> index in symbols +- ) +- for fidx, f := range pkgFiles { +- fh, err := snapshot.ReadFile(ctx, f) +- if err != nil { +- return command.PackageSymbolsResult{}, err +- } +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return command.PackageSymbolsResult{}, err +- } +- if pkgName == "" && pgf.File != nil && pgf.File.Name != nil { +- pkgName = pgf.File.Name.Name +- } +- for _, decl := range pgf.File.Decls { +- switch decl := decl.(type) { +- case *ast.FuncDecl: +- if decl.Name.Name == "_" { +- continue +- } +- if fs, err := funcSymbol(pgf.Mapper, pgf.Tok, decl); err == nil { +- // If function is a method, prepend the type of the method. +- // Don't add the method as its own symbol; store it so we can +- // add it as a child of the receiver type later +- if decl.Recv != nil && len(decl.Recv.List) > 0 { +- _, rname, _ := astutil.UnpackRecv(decl.Recv.List[0].Type) +- receiverToMethods[rname.String()] = append(receiverToMethods[rname.String()], toPackageSymbol(fidx, fs)) +- } else { +- symbols = append(symbols, toPackageSymbol(fidx, fs)) +- } +- } +- case *ast.GenDecl: +- for _, spec := range decl.Specs { +- switch spec := spec.(type) { +- case *ast.TypeSpec: +- if spec.Name.Name == "_" { +- continue +- } +- if ts, err := typeSymbol(pgf.Mapper, pgf.Tok, spec); err == nil { +- typeSymbolToIdx[ts.Name] = len(symbols) +- symbols = append(symbols, toPackageSymbol(fidx, ts)) +- } +- case *ast.ValueSpec: +- for _, name := range spec.Names { +- if name.Name == "_" { +- continue +- } +- if vs, err := varSymbol(pgf.Mapper, pgf.Tok, spec, name, decl.Tok == token.CONST); err == nil { +- symbols = append(symbols, toPackageSymbol(fidx, vs)) +- } +- } +- } +- } +- } +- } +- } +- // Add methods as the child of their receiver type symbol +- for recv, methods := range receiverToMethods { +- if i, ok := typeSymbolToIdx[recv]; ok { +- symbols[i].Children = append(symbols[i].Children, methods...) +- } +- } +- return command.PackageSymbolsResult{ +- PackageName: pkgName, +- Files: pkgFiles, +- Symbols: symbols, +- }, nil +-} +- +-func toPackageSymbol(fileIndex int, s protocol.DocumentSymbol) command.PackageSymbol { +- var res command.PackageSymbol +- res.Name = s.Name +- res.Detail = s.Detail +- res.Kind = s.Kind +- res.Tags = s.Tags +- res.Range = s.Range +- res.SelectionRange = s.SelectionRange +- +- children := make([]command.PackageSymbol, len(s.Children)) +- for i, c := range s.Children { +- children[i] = toPackageSymbol(fileIndex, c) +- } +- res.Children = children +- +- res.File = fileIndex +- return res +-} +- +-func funcSymbol(m *protocol.Mapper, tf *token.File, decl *ast.FuncDecl) (protocol.DocumentSymbol, error) { +- s := protocol.DocumentSymbol{ +- Name: decl.Name.Name, +- Kind: protocol.Function, +- } +- if decl.Recv != nil { +- s.Kind = protocol.Method +- } +- var err error +- s.Range, err = m.NodeRange(tf, decl) +- if err != nil { +- return protocol.DocumentSymbol{}, err +- } +- s.SelectionRange, err = m.NodeRange(tf, decl.Name) +- if err != nil { +- return protocol.DocumentSymbol{}, err +- } +- s.Detail = types.ExprString(decl.Type) +- return s, nil +-} +- +-func typeSymbol(m *protocol.Mapper, tf *token.File, spec *ast.TypeSpec) (protocol.DocumentSymbol, error) { +- s := protocol.DocumentSymbol{ +- Name: spec.Name.Name, +- } +- var err error +- s.Range, err = m.NodeRange(tf, spec) +- if err != nil { +- return protocol.DocumentSymbol{}, err +- } +- s.SelectionRange, err = m.NodeRange(tf, spec.Name) +- if err != nil { +- return protocol.DocumentSymbol{}, err +- } +- s.Kind, s.Detail, s.Children = typeDetails(m, tf, spec.Type) +- return s, nil +-} +- +-func typeDetails(m *protocol.Mapper, tf *token.File, typExpr ast.Expr) (kind protocol.SymbolKind, detail string, children []protocol.DocumentSymbol) { +- switch typExpr := typExpr.(type) { +- case *ast.StructType: +- kind = protocol.Struct +- children = fieldListSymbols(m, tf, typExpr.Fields, protocol.Field) +- if len(children) > 0 { +- detail = "struct{...}" +- } else { +- detail = "struct{}" +- } +- +- // Find interface methods and embedded types. +- case *ast.InterfaceType: +- kind = protocol.Interface +- children = fieldListSymbols(m, tf, typExpr.Methods, protocol.Method) +- if len(children) > 0 { +- detail = "interface{...}" +- } else { +- detail = "interface{}" +- } +- +- case *ast.FuncType: +- kind = protocol.Function +- detail = types.ExprString(typExpr) +- +- default: +- kind = protocol.Class // catch-all, for cases where we don't know the kind syntactically +- detail = types.ExprString(typExpr) +- } +- return +-} +- +-func fieldListSymbols(m *protocol.Mapper, tf *token.File, fields *ast.FieldList, fieldKind protocol.SymbolKind) []protocol.DocumentSymbol { +- if fields == nil { +- return nil +- } +- +- var symbols []protocol.DocumentSymbol +- for _, field := range fields.List { +- detail, children := "", []protocol.DocumentSymbol(nil) +- if field.Type != nil { +- _, detail, children = typeDetails(m, tf, field.Type) +- } +- if len(field.Names) == 0 { // embedded interface or struct field +- // By default, use the formatted type details as the name of this field. +- // This handles potentially invalid syntax, as well as type embeddings in +- // interfaces. +- child := protocol.DocumentSymbol{ +- Name: detail, +- Kind: protocol.Field, // consider all embeddings to be fields +- Children: children, +- } +- +- // If the field is a valid embedding, promote the type name to field +- // name. +- selection := field.Type +- if id := embeddedIdent(field.Type); id != nil { +- child.Name = id.Name +- child.Detail = detail +- selection = id +- } +- +- if rng, err := m.NodeRange(tf, field.Type); err == nil { +- child.Range = rng +- } +- if rng, err := m.NodeRange(tf, selection); err == nil { +- child.SelectionRange = rng +- } +- +- symbols = append(symbols, child) +- } else { +- for _, name := range field.Names { +- child := protocol.DocumentSymbol{ +- Name: name.Name, +- Kind: fieldKind, +- Detail: detail, +- Children: children, +- } +- +- if rng, err := m.NodeRange(tf, field); err == nil { +- child.Range = rng +- } +- if rng, err := m.NodeRange(tf, name); err == nil { +- child.SelectionRange = rng +- } +- +- symbols = append(symbols, child) +- } +- } +- +- } +- return symbols +-} +- +-func varSymbol(m *protocol.Mapper, tf *token.File, spec *ast.ValueSpec, name *ast.Ident, isConst bool) (protocol.DocumentSymbol, error) { +- s := protocol.DocumentSymbol{ +- Name: name.Name, +- Kind: protocol.Variable, +- } +- if isConst { +- s.Kind = protocol.Constant +- } +- var err error +- s.Range, err = m.NodeRange(tf, spec) +- if err != nil { +- return protocol.DocumentSymbol{}, err +- } +- s.SelectionRange, err = m.NodeRange(tf, name) +- if err != nil { +- return protocol.DocumentSymbol{}, err +- } +- if spec.Type != nil { // type may be missing from the syntax +- _, s.Detail, s.Children = typeDetails(m, tf, spec.Type) +- } +- return s, nil +-} +diff -urN a/gopls/internal/golang/type_definition.go b/gopls/internal/golang/type_definition.go +--- a/gopls/internal/golang/type_definition.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/type_definition.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,48 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-// TypeDefinition handles the textDocument/typeDefinition request for Go files. +-func TypeDefinition(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]protocol.Location, error) { +- ctx, done := event.Start(ctx, "golang.TypeDefinition") +- defer done() +- +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- pos, err := pgf.PositionPos(position) +- if err != nil { +- return nil, err +- } +- +- // TODO(rfindley): handle type switch implicits correctly here: if the user +- // jumps to the type definition of x in x := y.(type), it makes sense to jump +- // to the type of y. +- _, obj, _ := referencedObject(pkg, pgf, pos) +- if obj == nil { +- return nil, nil +- } +- +- tname := typeToObject(obj.Type()) +- if tname == nil { +- return nil, fmt.Errorf("no type definition for %s", obj.Name()) +- } +- loc, err := ObjectLocation(ctx, pkg.FileSet(), snapshot, tname) +- if err != nil { +- return nil, err +- } +- return []protocol.Location{loc}, nil +-} +diff -urN a/gopls/internal/golang/type_hierarchy.go b/gopls/internal/golang/type_hierarchy.go +--- a/gopls/internal/golang/type_hierarchy.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/type_hierarchy.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,144 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "fmt" +- "go/types" +- "slices" +- "strings" +- "sync" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/methodsets" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// Type hierarchy support (using method sets) +-// +-// TODO(adonovan): +-// - Support type hierarchy by signatures (using Kind=Function). +-// As with Implementations by signature matching, needs more UX thought. +-// +-// - Allow methods too (using Kind=Method)? It's not exactly in the +-// spirit of TypeHierarchy but it would be useful and it's easy +-// enough to support. +-// +-// FIXME: fix pkg=command-line-arguments problem with query initiated at "error" in builtins.go +- +-// PrepareTypeHierarchy returns the TypeHierarchyItems for the types at the selected position. +-func PrepareTypeHierarchy(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) ([]protocol.TypeHierarchyItem, error) { +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- pos, err := pgf.PositionPos(pp) +- if err != nil { +- return nil, err +- } +- +- // For now, we require that the selection be a type name. +- _, obj, _ := referencedObject(pkg, pgf, pos) +- if obj == nil { +- return nil, fmt.Errorf("not a symbol") +- } +- tname, ok := obj.(*types.TypeName) +- if !ok { +- return nil, fmt.Errorf("not a type name") +- } +- +- // Find declaration. +- declLoc, err := ObjectLocation(ctx, pkg.FileSet(), snapshot, tname) +- if err != nil { +- return nil, err +- } +- +- pkgpath := "builtin" +- if tname.Pkg() != nil { +- pkgpath = tname.Pkg().Path() +- } +- +- return []protocol.TypeHierarchyItem{{ +- Name: tname.Name(), +- Kind: cond(types.IsInterface(tname.Type()), protocol.Interface, protocol.Class), +- Detail: pkgpath, +- URI: declLoc.URI, +- Range: declLoc.Range, // (in theory this should be the entire declaration) +- SelectionRange: declLoc.Range, +- }}, nil +-} +- +-// Subtypes reports information about subtypes of the selected type. +-func Subtypes(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, item protocol.TypeHierarchyItem) ([]protocol.TypeHierarchyItem, error) { +- return relatedTypes(ctx, snapshot, fh, item, methodsets.Subtype) +-} +- +-// Supertypes reports information about supertypes of the selected type. +-func Supertypes(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, item protocol.TypeHierarchyItem) ([]protocol.TypeHierarchyItem, error) { +- return relatedTypes(ctx, snapshot, fh, item, methodsets.Supertype) +-} +- +-// relatedTypes is the common implementation of {Super,Sub}types. +-func relatedTypes(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, item protocol.TypeHierarchyItem, rel methodsets.TypeRelation) ([]protocol.TypeHierarchyItem, error) { +- pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, err +- } +- pos, err := pgf.PositionPos(item.Range.Start) +- if err != nil { +- return nil, err +- } +- +- var ( +- itemsMu sync.Mutex +- items []protocol.TypeHierarchyItem +- ) +- err = implementationsMsets(ctx, snapshot, pkg, pgf, pos, rel, func(pkgpath metadata.PackagePath, name string, abstract bool, loc protocol.Location) { +- if pkgpath == "" { +- pkgpath = "builtin" +- } +- +- itemsMu.Lock() +- defer itemsMu.Unlock() +- items = append(items, protocol.TypeHierarchyItem{ +- Name: name, +- Kind: cond(abstract, protocol.Interface, protocol.Class), +- Detail: string(pkgpath), +- URI: loc.URI, +- Range: loc.Range, // (in theory this should be the entire declaration) +- SelectionRange: loc.Range, +- }) +- }) +- if err != nil { +- return nil, err +- } +- +- // Sort by (package, name, URI, range) then +- // de-duplicate based on the same 4-tuple +- cmp := func(x, y protocol.TypeHierarchyItem) int { +- if d := strings.Compare(x.Detail, y.Detail); d != 0 { +- // Rank the original item's package first. +- if d := boolCompare(x.Detail == item.Detail, y.Detail == item.Detail); d != 0 { +- return -d +- } +- return d +- } +- if d := strings.Compare(x.Name, y.Name); d != 0 { +- return d +- } +- if d := strings.Compare(string(x.URI), string(y.URI)); d != 0 { +- return d +- } +- return protocol.CompareRange(x.SelectionRange, y.Range) +- } +- slices.SortFunc(items, cmp) +- eq := func(x, y protocol.TypeHierarchyItem) bool { return cmp(x, y) == 0 } +- items = slices.CompactFunc(items, eq) +- +- return items, nil +-} +diff -urN a/gopls/internal/golang/types_format.go b/gopls/internal/golang/types_format.go +--- a/gopls/internal/golang/types_format.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/types_format.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,527 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "bytes" +- "context" +- "fmt" +- "go/ast" +- "go/doc" +- "go/printer" +- "go/token" +- "go/types" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/tokeninternal" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/typeparams" +-) +- +-// FormatType returns the detail and kind for a types.Type. +-func FormatType(typ types.Type, qual types.Qualifier) (detail string, kind protocol.CompletionItemKind) { +- typ = typ.Underlying() +- if types.IsInterface(typ) { +- detail = "interface{...}" +- kind = protocol.InterfaceCompletion +- } else if _, ok := typ.(*types.Struct); ok { +- detail = "struct{...}" +- kind = protocol.StructCompletion +- } else { +- detail = types.TypeString(typ, qual) +- kind = protocol.ClassCompletion +- } +- return detail, kind +-} +- +-type signature struct { +- name, doc string +- typeParams, params, results []string +- variadic bool +- needResultParens bool +-} +- +-func (s *signature) Format() string { +- var b strings.Builder +- b.WriteByte('(') +- for i, p := range s.params { +- if i > 0 { +- b.WriteString(", ") +- } +- b.WriteString(p) +- } +- b.WriteByte(')') +- +- // Add space between parameters and results. +- if len(s.results) > 0 { +- b.WriteByte(' ') +- } +- if s.needResultParens { +- b.WriteByte('(') +- } +- for i, r := range s.results { +- if i > 0 { +- b.WriteString(", ") +- } +- b.WriteString(r) +- } +- if s.needResultParens { +- b.WriteByte(')') +- } +- return b.String() +-} +- +-func (s *signature) TypeParams() []string { +- return s.typeParams +-} +- +-func (s *signature) Params() []string { +- return s.params +-} +- +-// NewBuiltinSignature returns signature for the builtin object with a given +-// name, if a builtin object with the name exists. +-func NewBuiltinSignature(ctx context.Context, s *cache.Snapshot, name string) (*signature, error) { +- builtin, err := s.BuiltinFile(ctx) +- if err != nil { +- return nil, err +- } +- obj := builtin.File.Scope.Lookup(name) +- if obj == nil { +- return nil, fmt.Errorf("no builtin object for %s", name) +- } +- decl, ok := obj.Decl.(*ast.FuncDecl) +- if !ok { +- return nil, fmt.Errorf("no function declaration for builtin: %s", name) +- } +- if decl.Type == nil { +- return nil, fmt.Errorf("no type for builtin decl %s", decl.Name) +- } +- var variadic bool +- if decl.Type.Params.List != nil { +- numParams := len(decl.Type.Params.List) +- lastParam := decl.Type.Params.List[numParams-1] +- if _, ok := lastParam.Type.(*ast.Ellipsis); ok { +- variadic = true +- } +- } +- fset := tokeninternal.FileSetFor(builtin.Tok) +- params, _ := formatFieldList(ctx, fset, decl.Type.Params, variadic) +- results, needResultParens := formatFieldList(ctx, fset, decl.Type.Results, false) +- d := decl.Doc.Text() +- switch s.Options().HoverKind { +- case settings.SynopsisDocumentation: +- d = doc.Synopsis(d) +- case settings.NoDocumentation: +- d = "" +- } +- return &signature{ +- doc: d, +- name: name, +- needResultParens: needResultParens, +- params: params, +- results: results, +- variadic: variadic, +- }, nil +-} +- +-// replacer replaces some synthetic "type classes" used in the builtin file +-// with their most common constituent type. +-var replacer = strings.NewReplacer( +- `ComplexType`, `complex128`, +- `FloatType`, `float64`, +- `IntegerType`, `int`, +-) +- +-func formatFieldList(ctx context.Context, fset *token.FileSet, list *ast.FieldList, variadic bool) ([]string, bool) { +- if list == nil { +- return nil, false +- } +- var writeResultParens bool +- var result []string +- for i := 0; i < len(list.List); i++ { +- if i >= 1 { +- writeResultParens = true +- } +- p := list.List[i] +- cfg := printer.Config{Mode: printer.UseSpaces | printer.TabIndent, Tabwidth: 4} +- b := &bytes.Buffer{} +- if err := cfg.Fprint(b, fset, p.Type); err != nil { +- event.Error(ctx, fmt.Sprintf("error printing type %s", types.ExprString(p.Type)), err) +- continue +- } +- typ := replacer.Replace(b.String()) +- if len(p.Names) == 0 { +- result = append(result, typ) +- } +- for _, name := range p.Names { +- if name.Name != "" { +- if i == 0 { +- writeResultParens = true +- } +- result = append(result, fmt.Sprintf("%s %s", name.Name, typ)) +- } else { +- result = append(result, typ) +- } +- } +- } +- if variadic { +- result[len(result)-1] = strings.Replace(result[len(result)-1], "[]", "...", 1) +- } +- return result, writeResultParens +-} +- +-// NewSignature returns formatted signature for a types.Signature struct. +-func NewSignature(ctx context.Context, s *cache.Snapshot, pkg *cache.Package, sig *types.Signature, comment *ast.CommentGroup, qual types.Qualifier, mq MetadataQualifier) (*signature, error) { +- var tparams []string +- tpList := sig.TypeParams() +- for i := 0; i < tpList.Len(); i++ { +- tparam := tpList.At(i) +- // TODO: is it possible to reuse the logic from FormatVarType here? +- s := tparam.Obj().Name() + " " + tparam.Constraint().String() +- tparams = append(tparams, s) +- } +- +- params := make([]string, 0, sig.Params().Len()) +- for i := 0; i < sig.Params().Len(); i++ { +- el := sig.Params().At(i) +- typ, err := FormatVarType(ctx, s, pkg, el, qual, mq) +- if err != nil { +- return nil, err +- } +- if sig.Variadic() && i == sig.Params().Len()-1 { +- typ = strings.Replace(typ, "[]", "...", 1) +- } +- p := typ +- if el.Name() != "" { +- p = el.Name() + " " + typ +- } +- params = append(params, p) +- } +- +- var needResultParens bool +- results := make([]string, 0, sig.Results().Len()) +- for i := 0; i < sig.Results().Len(); i++ { +- if i >= 1 { +- needResultParens = true +- } +- el := sig.Results().At(i) +- typ, err := FormatVarType(ctx, s, pkg, el, qual, mq) +- if err != nil { +- return nil, err +- } +- if el.Name() == "" { +- results = append(results, typ) +- } else { +- if i == 0 { +- needResultParens = true +- } +- results = append(results, el.Name()+" "+typ) +- } +- } +- var d string +- if comment != nil { +- d = comment.Text() +- } +- switch s.Options().HoverKind { +- case settings.SynopsisDocumentation: +- d = doc.Synopsis(d) +- case settings.NoDocumentation: +- d = "" +- } +- return &signature{ +- doc: d, +- typeParams: tparams, +- params: params, +- results: results, +- variadic: sig.Variadic(), +- needResultParens: needResultParens, +- }, nil +-} +- +-// We look for 'invalidTypeString' to determine if we can use the fast path for +-// FormatVarType. +-var invalidTypeString = types.Typ[types.Invalid].String() +- +-// FormatVarType formats a *types.Var, accounting for type aliases. +-// To do this, it looks in the AST of the file in which the object is declared. +-// On any errors, it always falls back to types.TypeString. +-// +-// TODO(rfindley): this function could return the actual name used in syntax, +-// for better parameter names. +-func FormatVarType(ctx context.Context, snapshot *cache.Snapshot, srcpkg *cache.Package, obj *types.Var, qual types.Qualifier, mq MetadataQualifier) (string, error) { +- typeString := types.TypeString(obj.Type(), qual) +- // Fast path: if the type string does not contain 'invalid type', we no +- // longer need to do any special handling, thanks to materialized aliases in +- // Go 1.23+. +- // +- // Unfortunately, due to the handling of invalid types, we can't quite delete +- // the rather complicated preexisting logic of FormatVarType--it isn't an +- // acceptable regression to start printing "invalid type" in completion or +- // signature help. strings.Contains is conservative: the type string of a +- // valid type may actually contain "invalid type" (due to struct tags or +- // field formatting), but such cases should be exceedingly rare. +- if !strings.Contains(typeString, invalidTypeString) { +- return typeString, nil +- } +- +- // TODO(rfindley): This looks wrong. The previous comment said: +- // "If the given expr refers to a type parameter, then use the +- // object's Type instead of the type parameter declaration. This helps +- // format the instantiated type as opposed to the original undeclared +- // generic type". +- // +- // But of course, if obj is a type param, we are formatting a generic type +- // and not an instantiated type. Handling for instantiated types must be done +- // at a higher level. +- // +- // Left this during refactoring in order to preserve pre-existing logic. +- if typeparams.IsTypeParam(obj.Type()) { +- return typeString, nil +- } +- +- if isBuiltin(obj) { +- // This is defensive, though it is extremely unlikely we'll ever have a +- // builtin var. +- return typeString, nil +- } +- +- // TODO(rfindley): parsing to produce candidates can be costly; consider +- // using faster methods. +- targetpgf, pos, err := parseFull(ctx, snapshot, srcpkg.FileSet(), obj) +- if err != nil { +- return "", err // e.g. ctx cancelled +- } +- +- targetMeta := findFileInDeps(snapshot, srcpkg.Metadata(), targetpgf.URI) +- if targetMeta == nil { +- // If we have an object from type-checking, it should exist in a file in +- // the forward transitive closure. +- return "", bug.Errorf("failed to find file %q in deps of %q", targetpgf.URI, srcpkg.Metadata().ID) +- } +- +- decl, spec, field := findDeclInfo([]*ast.File{targetpgf.File}, pos) +- +- // We can't handle type parameters correctly, so we fall back on TypeString +- // for parameterized decls. +- if decl, _ := decl.(*ast.FuncDecl); decl != nil { +- if decl.Type.TypeParams.NumFields() > 0 { +- return typeString, nil // in generic function +- } +- if decl.Recv != nil && len(decl.Recv.List) > 0 { +- rtype := decl.Recv.List[0].Type +- if e, ok := rtype.(*ast.StarExpr); ok { +- rtype = e.X +- } +- if x, _, _, _ := typeparams.UnpackIndexExpr(rtype); x != nil { +- return typeString, nil // in method of generic type +- } +- } +- } +- if spec, _ := spec.(*ast.TypeSpec); spec != nil && spec.TypeParams.NumFields() > 0 { +- return typeString, nil // in generic type decl +- } +- +- if field == nil { +- // TODO(rfindley): we should never reach here from an ordinary var, so +- // should probably return an error here. +- return typeString, nil +- } +- expr := field.Type +- +- rq := requalifier(snapshot, targetpgf.File, targetMeta, mq) +- +- // The type names in the AST may not be correctly qualified. +- // Determine the package name to use based on the package that originated +- // the query and the package in which the type is declared. +- // We then qualify the value by cloning the AST node and editing it. +- expr = qualifyTypeExpr(expr, rq) +- +- // If the request came from a different package than the one in which the +- // types are defined, we may need to modify the qualifiers. +- return formatNodeFile(targetpgf.Tok, expr), nil +-} +- +-// qualifyTypeExpr clones the type expression expr after re-qualifying type +-// names using the given function, which accepts the current syntactic +-// qualifier (possibly "" for unqualified idents), and returns a new qualifier +-// (again, possibly "" if the identifier should be unqualified). +-// +-// The resulting expression may be inaccurate: without type-checking we don't +-// properly account for "." imported identifiers or builtins. +-// +-// TODO(rfindley): add many more tests for this function. +-func qualifyTypeExpr(expr ast.Expr, qf func(string) string) ast.Expr { +- switch expr := expr.(type) { +- case *ast.ArrayType: +- return &ast.ArrayType{ +- Lbrack: expr.Lbrack, +- Elt: qualifyTypeExpr(expr.Elt, qf), +- Len: expr.Len, +- } +- +- case *ast.BinaryExpr: +- if expr.Op != token.OR { +- return expr +- } +- return &ast.BinaryExpr{ +- X: qualifyTypeExpr(expr.X, qf), +- OpPos: expr.OpPos, +- Op: expr.Op, +- Y: qualifyTypeExpr(expr.Y, qf), +- } +- +- case *ast.ChanType: +- return &ast.ChanType{ +- Arrow: expr.Arrow, +- Begin: expr.Begin, +- Dir: expr.Dir, +- Value: qualifyTypeExpr(expr.Value, qf), +- } +- +- case *ast.Ellipsis: +- return &ast.Ellipsis{ +- Ellipsis: expr.Ellipsis, +- Elt: qualifyTypeExpr(expr.Elt, qf), +- } +- +- case *ast.FuncType: +- return &ast.FuncType{ +- Func: expr.Func, +- Params: qualifyFieldList(expr.Params, qf), +- Results: qualifyFieldList(expr.Results, qf), +- } +- +- case *ast.Ident: +- // Unqualified type (builtin, package local, or dot-imported). +- +- // Don't qualify names that look like builtins. +- // +- // Without type-checking this may be inaccurate. It could be made accurate +- // by doing syntactic object resolution for the entire package, but that +- // does not seem worthwhile and we generally want to avoid using +- // ast.Object, which may be inaccurate. +- if obj := types.Universe.Lookup(expr.Name); obj != nil { +- return expr +- } +- +- newName := qf("") +- if newName != "" { +- return &ast.SelectorExpr{ +- X: &ast.Ident{ +- NamePos: expr.Pos(), +- Name: newName, +- }, +- Sel: expr, +- } +- } +- return expr +- +- case *ast.IndexExpr: +- return &ast.IndexExpr{ +- X: qualifyTypeExpr(expr.X, qf), +- Lbrack: expr.Lbrack, +- Index: qualifyTypeExpr(expr.Index, qf), +- Rbrack: expr.Rbrack, +- } +- +- case *ast.IndexListExpr: +- indices := make([]ast.Expr, len(expr.Indices)) +- for i, idx := range expr.Indices { +- indices[i] = qualifyTypeExpr(idx, qf) +- } +- return &ast.IndexListExpr{ +- X: qualifyTypeExpr(expr.X, qf), +- Lbrack: expr.Lbrack, +- Indices: indices, +- Rbrack: expr.Rbrack, +- } +- +- case *ast.InterfaceType: +- return &ast.InterfaceType{ +- Interface: expr.Interface, +- Methods: qualifyFieldList(expr.Methods, qf), +- Incomplete: expr.Incomplete, +- } +- +- case *ast.MapType: +- return &ast.MapType{ +- Map: expr.Map, +- Key: qualifyTypeExpr(expr.Key, qf), +- Value: qualifyTypeExpr(expr.Value, qf), +- } +- +- case *ast.ParenExpr: +- return &ast.ParenExpr{ +- Lparen: expr.Lparen, +- Rparen: expr.Rparen, +- X: qualifyTypeExpr(expr.X, qf), +- } +- +- case *ast.SelectorExpr: +- if id, ok := expr.X.(*ast.Ident); ok { +- // qualified type +- newName := qf(id.Name) +- if newName == "" { +- return expr.Sel +- } +- return &ast.SelectorExpr{ +- X: &ast.Ident{ +- NamePos: id.NamePos, +- Name: newName, +- }, +- Sel: expr.Sel, +- } +- } +- return expr +- +- case *ast.StarExpr: +- return &ast.StarExpr{ +- Star: expr.Star, +- X: qualifyTypeExpr(expr.X, qf), +- } +- +- case *ast.StructType: +- return &ast.StructType{ +- Struct: expr.Struct, +- Fields: qualifyFieldList(expr.Fields, qf), +- Incomplete: expr.Incomplete, +- } +- +- default: +- return expr +- } +-} +- +-func qualifyFieldList(fl *ast.FieldList, qf func(string) string) *ast.FieldList { +- if fl == nil { +- return nil +- } +- if fl.List == nil { +- return &ast.FieldList{ +- Closing: fl.Closing, +- Opening: fl.Opening, +- } +- } +- list := make([]*ast.Field, 0, len(fl.List)) +- for _, f := range fl.List { +- list = append(list, &ast.Field{ +- Comment: f.Comment, +- Doc: f.Doc, +- Names: f.Names, +- Tag: f.Tag, +- Type: qualifyTypeExpr(f.Type, qf), +- }) +- } +- return &ast.FieldList{ +- Closing: fl.Closing, +- Opening: fl.Opening, +- List: list, +- } +-} +diff -urN a/gopls/internal/golang/undeclared.go b/gopls/internal/golang/undeclared.go +--- a/gopls/internal/golang/undeclared.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/undeclared.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,350 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "bytes" +- "fmt" +- "go/ast" +- "go/format" +- "go/token" +- "go/types" +- "strings" +- "unicode" +- +- "golang.org/x/tools/go/analysis" +- goastutil "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/go/ast/edge" +- "golang.org/x/tools/go/ast/inspector" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/util/cursorutil" +- "golang.org/x/tools/gopls/internal/util/typesutil" +- "golang.org/x/tools/internal/astutil" +- "golang.org/x/tools/internal/moreiters" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-// The prefix for this error message changed in Go 1.20. +-var undeclaredNamePrefixes = []string{"undeclared name: ", "undefined: "} +- +-// undeclaredFixTitle generates a code action title for "undeclared name" errors, +-// suggesting the creation of the missing variable or function if applicable. +-func undeclaredFixTitle(curId inspector.Cursor, errMsg string) string { +- // Extract symbol name from error. +- var name string +- for _, prefix := range undeclaredNamePrefixes { +- if !strings.HasPrefix(errMsg, prefix) { +- continue +- } +- name = strings.TrimPrefix(errMsg, prefix) +- } +- ident, ok := curId.Node().(*ast.Ident) +- if !ok || ident.Name != name { +- return "" +- } +- // TODO: support create undeclared field +- if _, ok := curId.Parent().Node().(*ast.SelectorExpr); ok { +- return "" +- } +- +- // Undeclared quick fixes only work in function bodies. +- _, inFunc := moreiters.First(curId.Enclosing((*ast.BlockStmt)(nil))) +- if !inFunc { +- return "" +- } +- +- // Offer a fix. +- noun := cond(astutil.IsChildOf(curId, edge.CallExpr_Fun), "function", "variable") +- return fmt.Sprintf("Create %s %s", noun, name) +-} +- +-// createUndeclared generates a suggested declaration for an undeclared variable or function. +-func createUndeclared(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +- var ( +- fset = pkg.FileSet() +- info = pkg.TypesInfo() +- file = pgf.File +- pos = start // don't use end +- ) +- curId, _ := pgf.Cursor.FindByPos(pos, pos) +- ident, ok := curId.Node().(*ast.Ident) +- if !ok { +- return nil, nil, fmt.Errorf("no identifier found") +- } +- +- // Check for a possible call expression, in which case we should add a +- // new function declaration. +- if astutil.IsChildOf(curId, edge.CallExpr_Fun) { +- return newFunctionDeclaration(curId, file, pkg.Types(), info, fset) +- } +- var ( +- firstRef *ast.Ident // We should insert the new declaration before the first occurrence of the undefined ident. +- assignTokPos token.Pos +- ) +- curFuncDecl, _ := moreiters.First(curId.Enclosing((*ast.FuncDecl)(nil))) +- // Search from enclosing FuncDecl to first use, since we can not use := syntax outside function. +- // Adds the missing colon under the following conditions: +- // 1) parent node must be an *ast.AssignStmt with Tok set to token.ASSIGN. +- // 2) ident must not be self assignment. +- // +- // For example, we should not add a colon when +- // a = a + 1 +- // ^ ^ cursor here +- for curRef := range curFuncDecl.Preorder((*ast.Ident)(nil)) { +- n := curRef.Node().(*ast.Ident) +- if n.Name == ident.Name && info.ObjectOf(n) == nil { +- firstRef = n +- if astutil.IsChildOf(curRef, edge.AssignStmt_Lhs) { +- assign := curRef.Parent().Node().(*ast.AssignStmt) +- if assign.Tok == token.ASSIGN && !referencesIdent(info, assign, ident) { +- assignTokPos = assign.TokPos +- } +- } +- break +- } +- } +- if assignTokPos.IsValid() { +- return fset, &analysis.SuggestedFix{ +- TextEdits: []analysis.TextEdit{{ +- Pos: assignTokPos, +- End: assignTokPos, +- NewText: []byte(":"), +- }}, +- }, nil +- } +- +- // firstRef should never be nil, at least one ident at cursor position should be found, +- // but be defensive. +- if firstRef == nil { +- return nil, nil, fmt.Errorf("no identifier found") +- } +- // TODO(adonovan): replace this with cursor. +- p, _ := goastutil.PathEnclosingInterval(file, firstRef.Pos(), firstRef.Pos()) +- insertBeforeStmt, err := stmtToInsertVarBefore(p, nil) +- if err != nil { +- return nil, nil, fmt.Errorf("could not locate insertion point: %v", err) +- } +- indent, err := pgf.Indentation(insertBeforeStmt.Pos()) +- if err != nil { +- return nil, nil, err +- } +- typs := typesutil.TypesFromContext(info, curId) +- if typs == nil { +- // Default to 0. +- typs = []types.Type{types.Typ[types.Int]} +- } +- expr, _ := typesinternal.ZeroExpr(typs[0], typesinternal.FileQualifier(file, pkg.Types())) +- assignStmt := &ast.AssignStmt{ +- Lhs: []ast.Expr{ast.NewIdent(ident.Name)}, +- Tok: token.DEFINE, +- Rhs: []ast.Expr{expr}, +- } +- var buf bytes.Buffer +- if err := format.Node(&buf, fset, assignStmt); err != nil { +- return nil, nil, err +- } +- newLineIndent := "\n" + indent +- assignment := strings.ReplaceAll(buf.String(), "\n", newLineIndent) + newLineIndent +- +- return fset, &analysis.SuggestedFix{ +- TextEdits: []analysis.TextEdit{ +- { +- Pos: insertBeforeStmt.Pos(), +- End: insertBeforeStmt.Pos(), +- NewText: []byte(assignment), +- }, +- }, +- }, nil +-} +- +-// referencesIdent checks whether the given undefined ident appears in the right-hand side +-// of an assign statement +-func referencesIdent(info *types.Info, assign *ast.AssignStmt, ident *ast.Ident) bool { +- for _, rhs := range assign.Rhs { +- for n := range ast.Preorder(rhs) { +- if id, ok := n.(*ast.Ident); ok && +- id.Name == ident.Name && info.Uses[id] == nil { +- return true +- } +- } +- } +- return false +-} +- +-// newFunctionDeclaration returns a suggested declaration for the ident identified by curId +-// curId always points at an ast.Ident at the CallExpr_Fun edge. +-func newFunctionDeclaration(curId inspector.Cursor, file *ast.File, pkg *types.Package, info *types.Info, fset *token.FileSet) (*token.FileSet, *analysis.SuggestedFix, error) { +- +- id := curId.Node().(*ast.Ident) +- call := curId.Parent().Node().(*ast.CallExpr) +- +- // Find the enclosing function, so that we can add the new declaration +- // below. +- funcdecl, _ := cursorutil.FirstEnclosing[*ast.FuncDecl](curId) +- if funcdecl == nil { +- // TODO(rstambler): Support the situation when there is no enclosing +- // function. +- return nil, nil, fmt.Errorf("no enclosing function found: %v", curId) +- } +- +- pos := funcdecl.End() +- +- var paramNames []string +- var paramTypes []types.Type +- // keep track of all param names to later ensure uniqueness +- nameCounts := map[string]int{} +- for _, arg := range call.Args { +- typ := info.TypeOf(arg) +- if typ == nil { +- return nil, nil, fmt.Errorf("unable to determine type for %s", arg) +- } +- +- switch t := typ.(type) { +- // this is the case where another function call returning multiple +- // results is used as an argument +- case *types.Tuple: +- n := t.Len() +- for i := range n { +- name := typeToArgName(t.At(i).Type()) +- nameCounts[name]++ +- +- paramNames = append(paramNames, name) +- paramTypes = append(paramTypes, types.Default(t.At(i).Type())) +- } +- +- default: +- // does the argument have a name we can reuse? +- // only happens in case of a *ast.Ident +- var name string +- if ident, ok := arg.(*ast.Ident); ok { +- name = ident.Name +- } +- +- if name == "" { +- name = typeToArgName(typ) +- } +- +- nameCounts[name]++ +- +- paramNames = append(paramNames, name) +- paramTypes = append(paramTypes, types.Default(typ)) +- } +- } +- +- for n, c := range nameCounts { +- // Any names we saw more than once will need a unique suffix added +- // on. Reset the count to 1 to act as the suffix for the first +- // occurrence of that name. +- if c >= 2 { +- nameCounts[n] = 1 +- } else { +- delete(nameCounts, n) +- } +- } +- +- params := &ast.FieldList{} +- qual := typesinternal.FileQualifier(file, pkg) +- for i, name := range paramNames { +- if suffix, repeats := nameCounts[name]; repeats { +- nameCounts[name]++ +- name = fmt.Sprintf("%s%d", name, suffix) +- } +- +- // only worth checking after previous param in the list +- if i > 0 { +- // if type of parameter at hand is the same as the previous one, +- // add it to the previous param list of identifiers so to have: +- // (s1, s2 string) +- // and not +- // (s1 string, s2 string) +- if paramTypes[i] == paramTypes[i-1] { +- params.List[len(params.List)-1].Names = append(params.List[len(params.List)-1].Names, ast.NewIdent(name)) +- continue +- } +- } +- +- params.List = append(params.List, &ast.Field{ +- Names: []*ast.Ident{ +- ast.NewIdent(name), +- }, +- Type: typesinternal.TypeExpr(paramTypes[i], qual), +- }) +- } +- +- rets := &ast.FieldList{} +- retTypes := typesutil.TypesFromContext(info, curId.Parent()) +- for _, rt := range retTypes { +- rets.List = append(rets.List, &ast.Field{ +- Type: typesinternal.TypeExpr(rt, qual), +- }) +- } +- +- decl := &ast.FuncDecl{ +- Name: ast.NewIdent(id.Name), +- Type: &ast.FuncType{ +- Params: params, +- Results: rets, +- }, +- Body: &ast.BlockStmt{ +- List: []ast.Stmt{ +- &ast.ExprStmt{ +- X: &ast.CallExpr{ +- Fun: ast.NewIdent("panic"), +- Args: []ast.Expr{ +- &ast.BasicLit{ +- Value: `"unimplemented"`, +- }, +- }, +- }, +- }, +- }, +- }, +- } +- +- b := bytes.NewBufferString("\n\n") +- if err := format.Node(b, fset, decl); err != nil { +- return nil, nil, err +- } +- return fset, &analysis.SuggestedFix{ +- TextEdits: []analysis.TextEdit{{ +- Pos: pos, +- End: pos, +- NewText: b.Bytes(), +- }}, +- }, nil +-} +- +-func typeToArgName(ty types.Type) string { +- s := types.Default(ty).String() +- +- switch t := types.Unalias(ty).(type) { +- case *types.Basic: +- // use first letter in type name for basic types +- return s[0:1] +- case *types.Slice: +- // use element type to decide var name for slices +- return typeToArgName(t.Elem()) +- case *types.Array: +- // use element type to decide var name for arrays +- return typeToArgName(t.Elem()) +- case *types.Chan: +- return "ch" +- } +- +- s = strings.TrimFunc(s, func(r rune) bool { +- return !unicode.IsLetter(r) +- }) +- +- if s == "error" { +- return "err" +- } +- +- // remove package (if present) +- // and make first letter lowercase +- a := []rune(s[strings.LastIndexByte(s, '.')+1:]) +- a[0] = unicode.ToLower(a[0]) +- return string(a) +-} +diff -urN a/gopls/internal/golang/util.go b/gopls/internal/golang/util.go +--- a/gopls/internal/golang/util.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/util.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,378 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "context" +- "go/ast" +- "go/printer" +- "go/token" +- "go/types" +- "regexp" +- "slices" +- "strings" +- "unicode" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/tokeninternal" +-) +- +-// IsGenerated reads and parses the header of the file denoted by uri +-// and reports whether it [ast.IsGenerated]. +-func IsGenerated(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) bool { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return false +- } +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Header) +- if err != nil { +- return false +- } +- return ast.IsGenerated(pgf.File) +-} +- +-// FormatNode returns the "pretty-print" output for an ast node. +-func FormatNode(fset *token.FileSet, n ast.Node) string { +- var buf strings.Builder +- if err := printer.Fprint(&buf, fset, n); err != nil { +- // TODO(rfindley): we should use bug.Reportf here. +- // We encounter this during completion.resolveInvalid. +- return "" +- } +- return buf.String() +-} +- +-// formatNodeFile is like FormatNode, but requires only the token.File for the +-// syntax containing the given ast node. +-func formatNodeFile(file *token.File, n ast.Node) string { +- fset := tokeninternal.FileSetFor(file) +- return FormatNode(fset, n) +-} +- +-// findFileInDeps finds package metadata containing URI in the transitive +-// dependencies of m. When using the Go command, the answer is unique. +-func findFileInDeps(s metadata.Source, mp *metadata.Package, uri protocol.DocumentURI) *metadata.Package { +- seen := make(map[PackageID]bool) +- var search func(*metadata.Package) *metadata.Package +- search = func(mp *metadata.Package) *metadata.Package { +- if seen[mp.ID] { +- return nil +- } +- seen[mp.ID] = true +- if slices.Contains(mp.CompiledGoFiles, uri) { +- return mp +- } +- for _, dep := range mp.DepsByPkgPath { +- mp := s.Metadata(dep) +- if mp == nil { +- bug.Reportf("nil metadata for %q", dep) +- continue +- } +- if found := search(mp); found != nil { +- return found +- } +- } +- return nil +- } +- return search(mp) +-} +- +-// requalifier returns a function that re-qualifies identifiers and qualified +-// identifiers contained in targetFile using the given metadata qualifier. +-func requalifier(s metadata.Source, targetFile *ast.File, targetMeta *metadata.Package, mq MetadataQualifier) func(string) string { +- qm := map[string]string{ +- "": mq(targetMeta.Name, "", targetMeta.PkgPath), +- } +- +- // Construct mapping of import paths to their defined or implicit names. +- for _, imp := range targetFile.Imports { +- name, pkgName, impPath, pkgPath := importInfo(s, imp, targetMeta) +- +- // Re-map the target name for the source file. +- qm[name] = mq(pkgName, impPath, pkgPath) +- } +- +- return func(name string) string { +- if newName, ok := qm[name]; ok { +- return newName +- } +- return name +- } +-} +- +-// A MetadataQualifier is a function that qualifies an identifier declared in a +-// package with the given package name, import path, and package path. +-// +-// In scenarios where metadata is missing the provided PackageName and +-// PackagePath may be empty, but ImportPath must always be non-empty. +-type MetadataQualifier func(PackageName, ImportPath, PackagePath) string +- +-// MetadataQualifierForFile returns a metadata qualifier that chooses the best +-// qualification of an imported package relative to the file f in package with +-// metadata m. +-func MetadataQualifierForFile(s metadata.Source, f *ast.File, mp *metadata.Package) MetadataQualifier { +- // Record local names for import paths. +- localNames := make(map[ImportPath]string) // local names for imports in f +- for _, imp := range f.Imports { +- name, _, impPath, _ := importInfo(s, imp, mp) +- localNames[impPath] = name +- } +- +- // Record a package path -> import path mapping. +- inverseDeps := make(map[PackageID]PackagePath) +- for path, id := range mp.DepsByPkgPath { +- inverseDeps[id] = path +- } +- importsByPkgPath := make(map[PackagePath]ImportPath) // best import paths by pkgPath +- for impPath, id := range mp.DepsByImpPath { +- if id == "" { +- continue +- } +- pkgPath := inverseDeps[id] +- _, hasPath := importsByPkgPath[pkgPath] +- _, hasImp := localNames[impPath] +- // In rare cases, there may be multiple import paths with the same package +- // path. In such scenarios, prefer an import path that already exists in +- // the file. +- if !hasPath || hasImp { +- importsByPkgPath[pkgPath] = impPath +- } +- } +- +- return func(pkgName PackageName, impPath ImportPath, pkgPath PackagePath) string { +- // If supplied, translate the package path to an import path in the source +- // package. +- if pkgPath != "" { +- if srcImp := importsByPkgPath[pkgPath]; srcImp != "" { +- impPath = srcImp +- } +- if pkgPath == mp.PkgPath { +- return "" +- } +- } +- if localName, ok := localNames[impPath]; ok && impPath != "" { +- return localName +- } +- if pkgName != "" { +- return string(pkgName) +- } +- idx := strings.LastIndexByte(string(impPath), '/') +- return string(impPath[idx+1:]) +- } +-} +- +-// importInfo collects information about the import specified by imp, +-// extracting its file-local name, package name, import path, and package path. +-// +-// If metadata is missing for the import, the resulting package name and +-// package path may be empty, and the file local name may be guessed based on +-// the import path. +-// +-// Note: previous versions of this helper used a PackageID->PackagePath map +-// extracted from m, for extracting package path even in the case where +-// metadata for a dep was missing. This should not be necessary, as we should +-// always have metadata for IDs contained in DepsByPkgPath. +-func importInfo(s metadata.Source, imp *ast.ImportSpec, mp *metadata.Package) (string, PackageName, ImportPath, PackagePath) { +- var ( +- name string // local name +- pkgName PackageName +- impPath = metadata.UnquoteImportPath(imp) +- pkgPath PackagePath +- ) +- +- // If the import has a local name, use it. +- if imp.Name != nil { +- name = imp.Name.Name +- } +- +- // Try to find metadata for the import. If successful and there is no local +- // name, the package name is the local name. +- if depID := mp.DepsByImpPath[impPath]; depID != "" { +- if depMP := s.Metadata(depID); depMP != nil { +- if name == "" { +- name = string(depMP.Name) +- } +- pkgName = depMP.Name +- pkgPath = depMP.PkgPath +- } +- } +- +- // If the local name is still unknown, guess it based on the import path. +- if name == "" { +- idx := strings.LastIndexByte(string(impPath), '/') +- name = string(impPath[idx+1:]) +- } +- return name, pkgName, impPath, pkgPath +-} +- +-// isDirective reports whether c is a comment directive. +-// +-// Copied and adapted from go/src/go/ast/ast.go. +-func isDirective(c string) bool { +- if len(c) < 3 { +- return false +- } +- if c[1] != '/' { +- return false +- } +- //-style comment (no newline at the end) +- c = c[2:] +- if len(c) == 0 { +- // empty line +- return false +- } +- // "//line " is a line directive. +- // (The // has been removed.) +- if strings.HasPrefix(c, "line ") { +- return true +- } +- +- // "//[a-z0-9]+:[a-z0-9]" +- // (The // has been removed.) +- colon := strings.Index(c, ":") +- if colon <= 0 || colon+1 >= len(c) { +- return false +- } +- for i := 0; i <= colon+1; i++ { +- if i == colon { +- continue +- } +- b := c[i] +- if !('a' <= b && b <= 'z' || '0' <= b && b <= '9') { +- return false +- } +- } +- return true +-} +- +-// embeddedIdent returns the type name identifier for an embedding x, if x in a +-// valid embedding. Otherwise, it returns nil. +-// +-// Spec: An embedded field must be specified as a type name T or as a pointer +-// to a non-interface type name *T +-func embeddedIdent(x ast.Expr) *ast.Ident { +- if star, ok := x.(*ast.StarExpr); ok { +- x = star.X +- } +- switch ix := x.(type) { // check for instantiated receivers +- case *ast.IndexExpr: +- x = ix.X +- case *ast.IndexListExpr: +- x = ix.X +- } +- switch x := x.(type) { +- case *ast.Ident: +- return x +- case *ast.SelectorExpr: +- if _, ok := x.X.(*ast.Ident); ok { +- return x.Sel +- } +- } +- return nil +-} +- +-// An importFunc is an implementation of the single-method +-// types.Importer interface based on a function value. +-type ImporterFunc func(path string) (*types.Package, error) +- +-func (f ImporterFunc) Import(path string) (*types.Package, error) { return f(path) } +- +-// isBuiltin reports whether obj is a built-in symbol (e.g. append, iota, error.Error, unsafe.Slice). +-// All other symbols have a valid position and a valid package. +-func isBuiltin(obj types.Object) bool { return !obj.Pos().IsValid() } +- +-// btoi returns int(b) as proposed in #64825. +-func btoi(b bool) int { +- if b { +- return 1 +- } else { +- return 0 +- } +-} +- +-// boolCompare is a comparison function for booleans, returning -1 if x < y, 0 +-// if x == y, and 1 if x > y, where false < true. +-func boolCompare(x, y bool) int { +- return btoi(x) - btoi(y) +-} +- +-// AbbreviateVarName returns an abbreviated var name based on the given full +-// name (which may be a type name, for example). +-// +-// See the simple heuristics documented in line. +-func AbbreviateVarName(s string) string { +- var ( +- b strings.Builder +- useNextUpper bool +- ) +- for i, r := range s { +- // Stop if we encounter a non-identifier rune. +- if !unicode.IsLetter(r) && !unicode.IsNumber(r) { +- break +- } +- +- // Otherwise, take the first letter from word boundaries, assuming +- // camelCase. +- if i == 0 { +- b.WriteRune(unicode.ToLower(r)) +- } +- +- if unicode.IsUpper(r) { +- if useNextUpper { +- b.WriteRune(unicode.ToLower(r)) +- useNextUpper = false +- } +- } else { +- useNextUpper = true +- } +- } +- return b.String() +-} +- +-// CopyrightComment returns the copyright comment group from the input file, or +-// nil if not found. +-func CopyrightComment(file *ast.File) *ast.CommentGroup { +- if len(file.Comments) == 0 { +- return nil +- } +- +- // Copyright should appear before package decl and must be the first +- // comment group. +- if c := file.Comments[0]; c.Pos() < file.Package && c != file.Doc && +- !isDirective(c.List[0].Text) && +- strings.Contains(strings.ToLower(c.List[0].Text), "copyright") { +- return c +- } +- +- return nil +-} +- +-var buildConstraintRe = regexp.MustCompile(`^//(go:build|\s*\+build).*`) +- +-// buildConstraintComment returns the build constraint comment from the input +-// file. +-// Returns nil if not found. +-func buildConstraintComment(file *ast.File) *ast.Comment { +- for _, cg := range file.Comments { +- // In Go files a build constraint must appear before the package clause. +- // See https://pkg.go.dev/cmd/go#hdr-Build_constraints +- if cg.Pos() > file.Package { +- return nil +- } +- +- for _, c := range cg.List { +- // TODO: use ast.ParseDirective when available (#68021). +- if buildConstraintRe.MatchString(c.Text) { +- return c +- } +- } +- } +- +- return nil +-} +diff -urN a/gopls/internal/golang/workspace_symbol.go b/gopls/internal/golang/workspace_symbol.go +--- a/gopls/internal/golang/workspace_symbol.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/workspace_symbol.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,562 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "cmp" +- "context" +- "fmt" +- "path/filepath" +- "runtime" +- "slices" +- "sort" +- "strings" +- "unicode" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/symbols" +- "golang.org/x/tools/gopls/internal/fuzzy" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/internal/event" +-) +- +-// maxSymbols defines the maximum number of symbol results that should ever be +-// sent in response to a client. +-const maxSymbols = 100 +- +-// WorkspaceSymbols matches symbols across all views using the given query, +-// according to the match semantics parameterized by matcherType and style. +-// +-// The workspace symbol method is defined in the spec as follows: +-// +-// The workspace symbol request is sent from the client to the server to +-// list project-wide symbols matching the query string. +-// +-// It is unclear what "project-wide" means here, but given the parameters of +-// workspace/symbol do not include any workspace identifier, then it has to be +-// assumed that "project-wide" means "across all workspaces". Hence why +-// WorkspaceSymbols receives the views []View. +-// +-// However, it then becomes unclear what it would mean to call WorkspaceSymbols +-// with a different configured SymbolMatcher per View. Therefore we assume that +-// Session level configuration will define the SymbolMatcher to be used for the +-// WorkspaceSymbols method. +-func WorkspaceSymbols(ctx context.Context, matcher settings.SymbolMatcher, style settings.SymbolStyle, snapshots []*cache.Snapshot, query string) ([]protocol.SymbolInformation, error) { +- ctx, done := event.Start(ctx, "golang.WorkspaceSymbols") +- defer done() +- if query == "" { +- return nil, nil +- } +- +- var s symbolizer +- switch style { +- case settings.DynamicSymbols: +- s = dynamicSymbolMatch +- case settings.FullyQualifiedSymbols: +- s = fullyQualifiedSymbolMatch +- case settings.PackageQualifiedSymbols: +- s = packageSymbolMatch +- default: +- panic(fmt.Errorf("unknown symbol style: %v", style)) +- } +- +- return collectSymbols(ctx, snapshots, matcher, s, query) +-} +- +-// A matcherFunc returns the index and score of a symbol match. +-// +-// See the comment for symbolCollector for more information. +-type matcherFunc func(chunks []string) (int, float64) +- +-// A symbolizer returns the best symbol match for a name with pkg, according to +-// some heuristic. The symbol name is passed as the slice nameParts of logical +-// name pieces. For example, for myType.field the caller can pass either +-// []string{"myType.field"} or []string{"myType.", "field"}. +-// +-// See the comment for symbolCollector for more information. +-// +-// The space argument is an empty slice with spare capacity that may be used +-// to allocate the result. +-type symbolizer func(space []string, name string, pkg *metadata.Package, m matcherFunc) ([]string, float64) +- +-func fullyQualifiedSymbolMatch(space []string, name string, pkg *metadata.Package, matcher matcherFunc) ([]string, float64) { +- if _, score := dynamicSymbolMatch(space, name, pkg, matcher); score > 0 { +- return append(space, string(pkg.PkgPath), ".", name), score +- } +- return nil, 0 +-} +- +-func dynamicSymbolMatch(space []string, name string, pkg *metadata.Package, matcher matcherFunc) ([]string, float64) { +- if metadata.IsCommandLineArguments(pkg.ID) { +- // command-line-arguments packages have a non-sensical package path, so +- // just use their package name. +- return packageSymbolMatch(space, name, pkg, matcher) +- } +- +- var score float64 +- +- endsInPkgName := strings.HasSuffix(string(pkg.PkgPath), string(pkg.Name)) +- +- // If the package path does not end in the package name, we need to check the +- // package-qualified symbol as an extra pass first. +- if !endsInPkgName { +- pkgQualified := append(space, string(pkg.Name), ".", name) +- idx, score := matcher(pkgQualified) +- nameStart := len(pkg.Name) + 1 +- if score > 0 { +- // If our match is contained entirely within the unqualified portion, +- // just return that. +- if idx >= nameStart { +- return append(space, name), score +- } +- // Lower the score for matches that include the package name. +- return pkgQualified, score * 0.8 +- } +- } +- +- // Now try matching the fully qualified symbol. +- fullyQualified := append(space, string(pkg.PkgPath), ".", name) +- idx, score := matcher(fullyQualified) +- +- // As above, check if we matched just the unqualified symbol name. +- nameStart := len(pkg.PkgPath) + 1 +- if idx >= nameStart { +- return append(space, name), score +- } +- +- // If our package path ends in the package name, we'll have skipped the +- // initial pass above, so check if we matched just the package-qualified +- // name. +- if endsInPkgName && idx >= 0 { +- pkgStart := len(pkg.PkgPath) - len(pkg.Name) +- if idx >= pkgStart { +- return append(space, string(pkg.Name), ".", name), score +- } +- } +- +- // Our match was not contained within the unqualified or package qualified +- // symbol. Return the fully qualified symbol but discount the score. +- return fullyQualified, score * 0.6 +-} +- +-func packageSymbolMatch(space []string, name string, pkg *metadata.Package, matcher matcherFunc) ([]string, float64) { +- qualified := append(space, string(pkg.Name), ".", name) +- if _, s := matcher(qualified); s > 0 { +- return qualified, s +- } +- return nil, 0 +-} +- +-func buildMatcher(matcher settings.SymbolMatcher, query string) matcherFunc { +- switch matcher { +- case settings.SymbolFuzzy: +- return parseQuery(query, newFuzzyMatcher) +- case settings.SymbolFastFuzzy: +- return parseQuery(query, func(query string) matcherFunc { +- return fuzzy.NewSymbolMatcher(query).Match +- }) +- case settings.SymbolCaseSensitive: +- return matchExact(query) +- case settings.SymbolCaseInsensitive: +- q := strings.ToLower(query) +- exact := matchExact(q) +- wrapper := []string{""} +- return func(chunks []string) (int, float64) { +- s := strings.Join(chunks, "") +- wrapper[0] = strings.ToLower(s) +- return exact(wrapper) +- } +- } +- panic(fmt.Errorf("unknown symbol matcher: %v", matcher)) +-} +- +-func newFuzzyMatcher(query string) matcherFunc { +- fm := fuzzy.NewMatcher(query) +- return func(chunks []string) (int, float64) { +- score := float64(fm.ScoreChunks(chunks)) +- ranges := fm.MatchedRanges() +- if len(ranges) > 0 { +- return ranges[0], score +- } +- return -1, score +- } +-} +- +-// parseQuery parses a field-separated symbol query, extracting the special +-// characters listed below, and returns a matcherFunc corresponding to the AND +-// of all field queries. +-// +-// Special characters: +-// +-// ^ match exact prefix +-// $ match exact suffix +-// ' match exact +-// +-// In all three of these special queries, matches are 'smart-cased', meaning +-// they are case sensitive if the symbol query contains any upper-case +-// characters, and case insensitive otherwise. +-func parseQuery(q string, newMatcher func(string) matcherFunc) matcherFunc { +- fields := strings.Fields(q) +- if len(fields) == 0 { +- return func([]string) (int, float64) { return -1, 0 } +- } +- var funcs []matcherFunc +- for _, field := range fields { +- var f matcherFunc +- switch { +- case strings.HasPrefix(field, "^"): +- prefix := field[1:] +- f = smartCase(prefix, func(chunks []string) (int, float64) { +- s := strings.Join(chunks, "") +- if strings.HasPrefix(s, prefix) { +- return 0, 1 +- } +- return -1, 0 +- }) +- case strings.HasPrefix(field, "'"): +- exact := field[1:] +- f = smartCase(exact, matchExact(exact)) +- case strings.HasSuffix(field, "$"): +- suffix := field[0 : len(field)-1] +- f = smartCase(suffix, func(chunks []string) (int, float64) { +- s := strings.Join(chunks, "") +- if strings.HasSuffix(s, suffix) { +- return len(s) - len(suffix), 1 +- } +- return -1, 0 +- }) +- default: +- f = newMatcher(field) +- } +- funcs = append(funcs, f) +- } +- if len(funcs) == 1 { +- return funcs[0] +- } +- return comboMatcher(funcs).match +-} +- +-func matchExact(exact string) matcherFunc { +- return func(chunks []string) (int, float64) { +- s := strings.Join(chunks, "") +- if idx := strings.LastIndex(s, exact); idx >= 0 { +- return idx, 1 +- } +- return -1, 0 +- } +-} +- +-// smartCase returns a matcherFunc that is case-sensitive if q contains any +-// upper-case characters, and case-insensitive otherwise. +-func smartCase(q string, m matcherFunc) matcherFunc { +- insensitive := strings.ToLower(q) == q +- wrapper := []string{""} +- return func(chunks []string) (int, float64) { +- s := strings.Join(chunks, "") +- if insensitive { +- s = strings.ToLower(s) +- } +- wrapper[0] = s +- return m(wrapper) +- } +-} +- +-type comboMatcher []matcherFunc +- +-func (c comboMatcher) match(chunks []string) (int, float64) { +- score := 1.0 +- first := 0 +- for _, f := range c { +- idx, s := f(chunks) +- if idx < first { +- first = idx +- } +- score *= s +- } +- return first, score +-} +- +-// collectSymbols calls snapshot.Symbols to walk the syntax trees of +-// all files in the views' current snapshots, and returns a sorted, +-// scored list of symbols that best match the parameters. +-// +-// How it matches symbols is parameterized by two interfaces: +-// - A matcherFunc determines how well a string symbol matches a query. It +-// returns a non-negative score indicating the quality of the match. A score +-// of zero indicates no match. +-// - A symbolizer determines how we extract the symbol for an object. This +-// enables the 'symbolStyle' configuration option. +-func collectSymbols(ctx context.Context, snapshots []*cache.Snapshot, matcherType settings.SymbolMatcher, symbolizer symbolizer, query string) ([]protocol.SymbolInformation, error) { +- // Extract symbols from all files. +- var work []symbolFile +- seen := make(map[protocol.DocumentURI]*metadata.Package) // only scan each file once +- +- for _, snapshot := range snapshots { +- // Use the root view URIs for determining (lexically) +- // whether a URI is in any open workspace. +- folderURI := snapshot.Folder() +- +- pathIncluded := cache.PathIncludeFunc(snapshot.Options().DirectoryFilters) +- folder := filepath.ToSlash(folderURI.Path()) +- +- var ( +- mps []*metadata.Package +- err error +- ) +- if snapshot.Options().SymbolScope == settings.AllSymbolScope { +- mps, err = snapshot.AllMetadata(ctx) +- } else { +- mps, err = snapshot.WorkspaceMetadata(ctx) +- } +- if err != nil { +- return nil, err +- } +- metadata.RemoveIntermediateTestVariants(&mps) +- +- // We'll process packages in order to consider candidate symbols. +- // +- // The order here doesn't matter for correctness, but can affect +- // performance: +- // - As workspace packages score higher than non-workspace packages, +- // sort them first to increase the likelihood that non-workspace +- // symbols are skipped. +- // - As files can be contained in multiple packages, sort by wider +- // packages first, to cover all files with fewer packages. +- workspacePackages := snapshot.WorkspacePackages() +- slices.SortFunc(mps, func(a, b *metadata.Package) int { +- _, aworkspace := workspacePackages.Value(a.ID) +- _, bworkspace := workspacePackages.Value(b.ID) +- if cmp := boolCompare(aworkspace, bworkspace); cmp != 0 { +- return -cmp // workspace packages first +- } +- return -cmp.Compare(len(a.CompiledGoFiles), len(b.CompiledGoFiles)) // widest first +- }) +- +- // Filter out unneeded mps in place, and collect file<->package +- // associations. +- var ids []metadata.PackageID +- for _, mp := range mps { +- used := false +- for _, list := range [][]protocol.DocumentURI{mp.GoFiles, mp.CompiledGoFiles} { +- for _, uri := range list { +- if _, ok := seen[uri]; !ok { +- seen[uri] = mp +- used = true +- } +- } +- } +- if used { +- mps[len(ids)] = mp +- ids = append(ids, mp.ID) +- } +- } +- mps = mps[:len(ids)] +- +- symbolPkgs, err := snapshot.Symbols(ctx, ids...) +- if err != nil { +- return nil, err +- } +- +- for i, sp := range symbolPkgs { +- if sp == nil { +- continue +- } +- mp := mps[i] +- for i, syms := range sp.Symbols { +- uri := sp.Files[i] +- norm := filepath.ToSlash(uri.Path()) +- nm := strings.TrimPrefix(norm, folder) +- if !pathIncluded(nm) { +- continue +- } +- // Only scan each file once. +- if seen[uri] != mp { +- continue +- } +- // seen[uri] = true +- _, workspace := workspacePackages.Value(mp.ID) +- work = append(work, symbolFile{mp, uri, syms, workspace}) +- } +- } +- } +- +- // Match symbols in parallel. +- // Each worker has its own symbolStore, +- // which we merge at the end. +- nmatchers := runtime.GOMAXPROCS(-1) // matching is CPU bound +- results := make(chan *symbolStore) +- for i := range nmatchers { +- go func(i int) { +- matcher := buildMatcher(matcherType, query) +- store := new(symbolStore) +- // Assign files to workers in round-robin fashion. +- for j := i; j < len(work); j += nmatchers { +- matchFile(store, symbolizer, matcher, work[j]) +- } +- results <- store +- }(i) +- } +- +- // Gather and merge results as they arrive. +- var unified symbolStore +- for range nmatchers { +- store := <-results +- for _, syms := range store.res { +- if syms != nil { +- unified.store(syms) +- } +- } +- } +- return unified.results(), nil +-} +- +-// symbolFile holds symbol information for a single file. +-type symbolFile struct { +- mp *metadata.Package +- uri protocol.DocumentURI +- syms []symbols.Symbol +- workspace bool +-} +- +-// matchFile scans a symbol file and adds matching symbols to the store. +-func matchFile(store *symbolStore, symbolizer symbolizer, matcher matcherFunc, f symbolFile) { +- space := make([]string, 0, 3) +- for _, sym := range f.syms { +- symbolParts, score := symbolizer(space, sym.Name, f.mp, matcher) +- +- // Check if the score is too low before applying any downranking. +- if store.tooLow(score) { +- continue +- } +- +- // Factors to apply to the match score for the purpose of downranking +- // results. +- // +- // These numbers were crudely calibrated based on trial-and-error using a +- // small number of sample queries. Adjust as necessary. +- // +- // All factors are multiplicative, meaning if more than one applies they are +- // multiplied together. +- const ( +- // nonWorkspaceFactor is applied to symbols outside the workspace. +- // Developers are less likely to want to jump to code that they +- // are not actively working on. +- nonWorkspaceFactor = 0.5 +- // nonWorkspaceUnexportedFactor is applied to unexported symbols outside +- // the workspace. Since one wouldn't usually jump to unexported +- // symbols to understand a package API, they are particularly irrelevant. +- nonWorkspaceUnexportedFactor = 0.5 +- // every field or method nesting level to access the field decreases +- // the score by a factor of 1.0 - depth*depthFactor, up to a depth of +- // 3. +- // +- // Use a small constant here, as this exists mostly to break ties +- // (e.g. given a type Foo and a field x.Foo, prefer Foo). +- depthFactor = 0.01 +- ) +- +- // TODO(rfindley): compute this downranking *before* calling the symbolizer +- // (which is expensive), so that we can pre-filter candidates whose score +- // will always be too low, even with a perfect match. +- +- startWord := true +- exported := true +- depth := 0.0 +- for _, r := range sym.Name { +- if startWord && !unicode.IsUpper(r) { +- exported = false +- } +- if r == '.' { +- startWord = true +- depth++ +- } else { +- startWord = false +- } +- } +- +- // Apply downranking based on workspace position. +- if !f.workspace { +- score *= nonWorkspaceFactor +- if !exported { +- score *= nonWorkspaceUnexportedFactor +- } +- } +- +- // Apply downranking based on symbol depth. +- if depth > 3 { +- depth = 3 +- } +- score *= 1.0 - depth*depthFactor +- +- if store.tooLow(score) { +- continue +- } +- +- si := &scoredSymbol{ +- score: score, +- info: protocol.SymbolInformation{ +- Name: strings.Join(symbolParts, ""), +- Kind: sym.Kind, +- Location: f.uri.Location(sym.Range), +- ContainerName: string(f.mp.PkgPath), +- }, +- } +- store.store(si) +- } +-} +- +-type symbolStore struct { +- res [maxSymbols]*scoredSymbol +-} +- +-// store inserts si into the sorted results, if si has a high enough score. +-func (sc *symbolStore) store(ss *scoredSymbol) { +- if sc.tooLow(ss.score) { +- return +- } +- insertAt := sort.Search(len(sc.res), func(i int) bool { +- if sc.res[i] == nil { +- return true +- } +- // Sort by score, then symbol length, and finally lexically. +- if ss.score != sc.res[i].score { +- return ss.score > sc.res[i].score +- } +- if cmp := cmp.Compare(len(ss.info.Name), len(sc.res[i].info.Name)); cmp != 0 { +- return cmp < 0 // shortest first +- } +- return ss.info.Name < sc.res[i].info.Name +- }) +- if insertAt < len(sc.res)-1 { +- copy(sc.res[insertAt+1:], sc.res[insertAt:len(sc.res)-1]) +- } +- sc.res[insertAt] = ss +-} +- +-func (sc *symbolStore) tooLow(score float64) bool { +- last := sc.res[len(sc.res)-1] +- if last == nil { +- return false +- } +- return score <= last.score +-} +- +-func (sc *symbolStore) results() []protocol.SymbolInformation { +- var res []protocol.SymbolInformation +- for _, si := range sc.res { +- if si == nil || si.score <= 0 { +- return res +- } +- res = append(res, si.info) +- } +- return res +-} +- +-type scoredSymbol struct { +- score float64 +- info protocol.SymbolInformation +-} +diff -urN a/gopls/internal/golang/workspace_symbol_test.go b/gopls/internal/golang/workspace_symbol_test.go +--- a/gopls/internal/golang/workspace_symbol_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/golang/workspace_symbol_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,144 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package golang +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/cache" +-) +- +-func TestParseQuery(t *testing.T) { +- tests := []struct { +- query, s string +- wantMatch bool +- }{ +- {"", "anything", false}, +- {"any", "anything", true}, +- {"any$", "anything", false}, +- {"ing$", "anything", true}, +- {"ing$", "anythinG", true}, +- {"inG$", "anything", false}, +- {"^any", "anything", true}, +- {"^any", "Anything", true}, +- {"^Any", "anything", false}, +- {"at", "anything", true}, +- // TODO: this appears to be a bug in the fuzzy matching algorithm. 'At' +- // should cause a case-sensitive match. +- // {"At", "anything", false}, +- {"At", "Anything", true}, +- {"'yth", "Anything", true}, +- {"'yti", "Anything", false}, +- {"'any 'thing", "Anything", true}, +- {"anythn nythg", "Anything", true}, +- {"ntx", "Anything", false}, +- {"anythn", "anything", true}, +- {"ing", "anything", true}, +- {"anythn nythgx", "anything", false}, +- } +- +- for _, test := range tests { +- matcher := parseQuery(test.query, newFuzzyMatcher) +- if _, score := matcher([]string{test.s}); score > 0 != test.wantMatch { +- t.Errorf("parseQuery(%q) match for %q: %.2g, want match: %t", test.query, test.s, score, test.wantMatch) +- } +- } +-} +- +-func TestPathIncludeFunc(t *testing.T) { +- tests := []struct { +- filters []string +- included []string +- excluded []string +- }{ +- { +- []string{"+**/c.go"}, +- []string{"a/c.go", "a/b/c.go"}, +- []string{}, +- }, +- { +- []string{"+a/**/c.go"}, +- []string{"a/b/c.go", "a/b/d/c.go", "a/c.go"}, +- []string{}, +- }, +- { +- []string{"-a/c.go", "+a/**"}, +- []string{"a/c.go"}, +- []string{}, +- }, +- { +- []string{"+a/**/c.go", "-**/c.go"}, +- []string{}, +- []string{"a/b/c.go"}, +- }, +- { +- []string{"+a/**/c.go", "-a/**"}, +- []string{}, +- []string{"a/b/c.go"}, +- }, +- { +- []string{"+**/c.go", "-a/**/c.go"}, +- []string{}, +- []string{"a/b/c.go"}, +- }, +- { +- []string{"+foobar", "-foo"}, +- []string{"foobar", "foobar/a"}, +- []string{"foo", "foo/a"}, +- }, +- { +- []string{"+", "-"}, +- []string{}, +- []string{"foobar", "foobar/a", "foo", "foo/a"}, +- }, +- { +- []string{"-", "+"}, +- []string{"foobar", "foobar/a", "foo", "foo/a"}, +- []string{}, +- }, +- { +- []string{"-a/**/b/**/c.go"}, +- []string{}, +- []string{"a/x/y/z/b/f/g/h/c.go"}, +- }, +- // tests for unsupported glob operators +- { +- []string{"+**/c.go", "-a/*/c.go"}, +- []string{"a/b/c.go"}, +- []string{}, +- }, +- { +- []string{"+**/c.go", "-a/?/c.go"}, +- []string{"a/b/c.go"}, +- []string{}, +- }, +- { +- []string{"-b"}, // should only filter paths prefixed with the "b" directory +- []string{"a/b/c.go", "bb"}, +- []string{"b/c/d.go", "b"}, +- }, +- // golang/vscode-go#3692 +- { +- []string{"-**/foo", "+**/bar"}, +- []string{"bar/a.go", "a/bar/b.go"}, +- []string{"foo/a.go", "a/foo/b.go"}, +- }, +- } +- +- for _, test := range tests { +- pathIncluded := cache.PathIncludeFunc(test.filters) +- for _, inc := range test.included { +- if !pathIncluded(inc) { +- t.Errorf("Filters %v excluded %v, wanted included", test.filters, inc) +- } +- } +- +- for _, exc := range test.excluded { +- if pathIncluded(exc) { +- t.Errorf("Filters %v included %v, wanted excluded", test.filters, exc) +- } +- } +- } +-} +diff -urN a/gopls/internal/label/keys.go b/gopls/internal/label/keys.go +--- a/gopls/internal/label/keys.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/label/keys.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,37 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package label provides common labels used to annotate gopls log messages +-// and events. +-package label +- +-import "golang.org/x/tools/internal/event/keys" +- +-var ( +- File = keys.NewString("file", "") +- Directory = keys.New("directory", "") +- URI = keys.New("URI", "") +- Package = keys.NewString("package", "") // sorted comma-separated list of Package IDs +- Query = keys.New("query", "") +- ViewID = keys.NewString("view_id", "") +- Snapshot = keys.NewUInt64("snapshot", "") +- Operation = keys.NewString("operation", "") +- Duration = keys.New("duration", "Elapsed time") +- +- Position = keys.New("position", "") +- PackageCount = keys.NewInt("packages", "") +- Files = keys.New("files", "") +- Port = keys.NewInt("port", "") +- +- NewServer = keys.NewString("new_server", "A new server was added") +- EndServer = keys.NewString("end_server", "A server was shut down") +- +- ServerID = keys.NewString("server", "The server ID an event is related to") +- Logfile = keys.NewString("logfile", "") +- DebugAddress = keys.NewString("debug_address", "") +- GoplsPath = keys.NewString("gopls_path", "") +- ClientID = keys.NewString("client_id", "") +- +- Level = keys.NewInt("level", "The logging level") +-) +diff -urN a/gopls/internal/licenses/gen-licenses.sh b/gopls/internal/licenses/gen-licenses.sh +--- a/gopls/internal/licenses/gen-licenses.sh 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/licenses/gen-licenses.sh 1969-12-31 18:00:00.000000000 -0600 +@@ -1,38 +0,0 @@ +-#!/bin/bash -eu +- +-# Copyright 2020 The Go Authors. All rights reserved. +-# Use of this source code is governed by a BSD-style +-# license that can be found in the LICENSE file. +- +-set -o pipefail +- +-output=$1 +-tempfile=$(mktemp) +-cd $(dirname $0) +- +-cat > $tempfile <> $tempfile +- echo >> $tempfile +- sed 's/^-- / &/' $dir/$license >> $tempfile +- echo >> $tempfile +-done +- +-echo "\`" >> $tempfile +-mv $tempfile $output +diff -urN a/gopls/internal/licenses/licenses.go b/gopls/internal/licenses/licenses.go +--- a/gopls/internal/licenses/licenses.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/licenses/licenses.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,366 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:generate ./gen-licenses.sh licenses.go +-package licenses +- +-const Text = ` +--- github.com/BurntSushi/toml COPYING -- +- +-The MIT License (MIT) +- +-Copyright (c) 2013 TOML authors +- +-Permission is hereby granted, free of charge, to any person obtaining a copy +-of this software and associated documentation files (the "Software"), to deal +-in the Software without restriction, including without limitation the rights +-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +-copies of the Software, and to permit persons to whom the Software is +-furnished to do so, subject to the following conditions: +- +-The above copyright notice and this permission notice shall be included in +-all copies or substantial portions of the Software. +- +-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +-THE SOFTWARE. +- +--- github.com/fatih/camelcase LICENSE.md -- +- +-The MIT License (MIT) +- +-Copyright (c) 2015 Fatih Arslan +- +-Permission is hereby granted, free of charge, to any person obtaining a copy of +-this software and associated documentation files (the "Software"), to deal in +-the Software without restriction, including without limitation the rights to +-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +-the Software, and to permit persons to whom the Software is furnished to do so, +-subject to the following conditions: +- +-The above copyright notice and this permission notice shall be included in all +-copies or substantial portions of the Software. +- +-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +- +--- github.com/fatih/gomodifytags LICENSE -- +- +-Copyright (c) 2017, Fatih Arslan +-All rights reserved. +- +-Redistribution and use in source and binary forms, with or without +-modification, are permitted provided that the following conditions are met: +- +-* Redistributions of source code must retain the above copyright notice, this +- list of conditions and the following disclaimer. +- +-* Redistributions in binary form must reproduce the above copyright notice, +- this list of conditions and the following disclaimer in the documentation +- and/or other materials provided with the distribution. +- +-* Neither the name of gomodifytags nor the names of its +- contributors may be used to endorse or promote products derived from +- this software without specific prior written permission. +- +-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +- +--- github.com/fatih/structtag LICENSE -- +- +-Copyright (c) 2017, Fatih Arslan +-All rights reserved. +- +-Redistribution and use in source and binary forms, with or without +-modification, are permitted provided that the following conditions are met: +- +-* Redistributions of source code must retain the above copyright notice, this +- list of conditions and the following disclaimer. +- +-* Redistributions in binary form must reproduce the above copyright notice, +- this list of conditions and the following disclaimer in the documentation +- and/or other materials provided with the distribution. +- +-* Neither the name of structtag nor the names of its +- contributors may be used to endorse or promote products derived from +- this software without specific prior written permission. +- +-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +- +-This software includes some portions from Go. Go is used under the terms of the +-BSD like license. +- +-Copyright (c) 2012 The Go Authors. All rights reserved. +- +-Redistribution and use in source and binary forms, with or without +-modification, are permitted provided that the following conditions are +-met: +- +- * Redistributions of source code must retain the above copyright +-notice, this list of conditions and the following disclaimer. +- * Redistributions in binary form must reproduce the above +-copyright notice, this list of conditions and the following disclaimer +-in the documentation and/or other materials provided with the +-distribution. +- * Neither the name of Google Inc. nor the names of its +-contributors may be used to endorse or promote products derived from +-this software without specific prior written permission. +- +-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +- +-The Go gopher was designed by Renee French. http://reneefrench.blogspot.com/ The design is licensed under the Creative Commons 3.0 Attributions license. Read this article for more details: https://blog.golang.org/gopher +- +--- github.com/fsnotify/fsnotify LICENSE -- +- +-Copyright © 2012 The Go Authors. All rights reserved. +-Copyright © fsnotify Authors. All rights reserved. +- +-Redistribution and use in source and binary forms, with or without modification, +-are permitted provided that the following conditions are met: +- +-* Redistributions of source code must retain the above copyright notice, this +- list of conditions and the following disclaimer. +-* Redistributions in binary form must reproduce the above copyright notice, this +- list of conditions and the following disclaimer in the documentation and/or +- other materials provided with the distribution. +-* Neither the name of Google Inc. nor the names of its contributors may be used +- to endorse or promote products derived from this software without specific +- prior written permission. +- +-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +- +--- github.com/google/go-cmp LICENSE -- +- +-Copyright (c) 2017 The Go Authors. All rights reserved. +- +-Redistribution and use in source and binary forms, with or without +-modification, are permitted provided that the following conditions are +-met: +- +- * Redistributions of source code must retain the above copyright +-notice, this list of conditions and the following disclaimer. +- * Redistributions in binary form must reproduce the above +-copyright notice, this list of conditions and the following disclaimer +-in the documentation and/or other materials provided with the +-distribution. +- * Neither the name of Google Inc. nor the names of its +-contributors may be used to endorse or promote products derived from +-this software without specific prior written permission. +- +-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +- +--- github.com/google/jsonschema-go LICENSE -- +- +-MIT License +- +-Copyright (c) 2025 JSON Schema Go Project Authors +- +-Permission is hereby granted, free of charge, to any person obtaining a copy +-of this software and associated documentation files (the "Software"), to deal +-in the Software without restriction, including without limitation the rights +-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +-copies of the Software, and to permit persons to whom the Software is +-furnished to do so, subject to the following conditions: +- +-The above copyright notice and this permission notice shall be included in all +-copies or substantial portions of the Software. +- +-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +-SOFTWARE. +- +--- github.com/modelcontextprotocol/go-sdk LICENSE -- +- +-MIT License +- +-Copyright (c) 2025 Go MCP SDK Authors +- +-Permission is hereby granted, free of charge, to any person obtaining a copy +-of this software and associated documentation files (the "Software"), to deal +-in the Software without restriction, including without limitation the rights +-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +-copies of the Software, and to permit persons to whom the Software is +-furnished to do so, subject to the following conditions: +- +-The above copyright notice and this permission notice shall be included in all +-copies or substantial portions of the Software. +- +-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +-SOFTWARE. +- +--- github.com/yosida95/uritemplate/v3 LICENSE -- +- +-Copyright (C) 2016, Kohei YOSHIDA . All rights reserved. +- +-Redistribution and use in source and binary forms, with or without +-modification, are permitted provided that the following conditions are met: +- +- * Redistributions of source code must retain the above copyright +- notice, this list of conditions and the following disclaimer. +- * Redistributions in binary form must reproduce the above copyright +- notice, this list of conditions and the following disclaimer in the +- documentation and/or other materials provided with the distribution. +- * Neither the name of the copyright holder nor the names of its +- contributors may be used to endorse or promote products derived from +- this software without specific prior written permission. +- +-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +-HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +- +--- honnef.co/go/tools LICENSE -- +- +-Copyright (c) 2016 Dominik Honnef +- +-Permission is hereby granted, free of charge, to any person obtaining +-a copy of this software and associated documentation files (the +-"Software"), to deal in the Software without restriction, including +-without limitation the rights to use, copy, modify, merge, publish, +-distribute, sublicense, and/or sell copies of the Software, and to +-permit persons to whom the Software is furnished to do so, subject to +-the following conditions: +- +-The above copyright notice and this permission notice shall be +-included in all copies or substantial portions of the Software. +- +-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +- +--- mvdan.cc/gofumpt LICENSE -- +- +-Copyright (c) 2019, Daniel Martí. All rights reserved. +- +-Redistribution and use in source and binary forms, with or without +-modification, are permitted provided that the following conditions are +-met: +- +- * Redistributions of source code must retain the above copyright +-notice, this list of conditions and the following disclaimer. +- * Redistributions in binary form must reproduce the above +-copyright notice, this list of conditions and the following disclaimer +-in the documentation and/or other materials provided with the +-distribution. +- * Neither the name of the copyright holder nor the names of its +-contributors may be used to endorse or promote products derived from +-this software without specific prior written permission. +- +-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +- +--- mvdan.cc/xurls/v2 LICENSE -- +- +-Copyright (c) 2015, Daniel Martí. All rights reserved. +- +-Redistribution and use in source and binary forms, with or without +-modification, are permitted provided that the following conditions are +-met: +- +- * Redistributions of source code must retain the above copyright +-notice, this list of conditions and the following disclaimer. +- * Redistributions in binary form must reproduce the above +-copyright notice, this list of conditions and the following disclaimer +-in the documentation and/or other materials provided with the +-distribution. +- * Neither the name of the copyright holder nor the names of its +-contributors may be used to endorse or promote products derived from +-this software without specific prior written permission. +- +-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +- +-` +diff -urN a/gopls/internal/licenses/licenses_test.go b/gopls/internal/licenses/licenses_test.go +--- a/gopls/internal/licenses/licenses_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/licenses/licenses_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,40 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package licenses_test +- +-import ( +- "bytes" +- "os" +- "os/exec" +- "runtime" +- "testing" +-) +- +-func TestLicenses(t *testing.T) { +- if runtime.GOOS != "linux" && runtime.GOOS != "darwin" { +- t.Skip("generating licenses only works on Unixes") +- } +- tmp, err := os.CreateTemp("", "") +- if err != nil { +- t.Fatal(err) +- } +- tmp.Close() // ignore error +- +- if out, err := exec.Command("./gen-licenses.sh", tmp.Name()).CombinedOutput(); err != nil { +- t.Fatalf("generating licenses failed: %q, %v", out, err) +- } +- +- got, err := os.ReadFile(tmp.Name()) +- if err != nil { +- t.Fatal(err) +- } +- want, err := os.ReadFile("licenses.go") +- if err != nil { +- t.Fatal(err) +- } +- if !bytes.Equal(got, want) { +- t.Error("combined license text needs updating. Run: `go generate ./internal/licenses` from the gopls module.") +- } +-} +diff -urN a/gopls/internal/lsprpc/autostart_default.go b/gopls/internal/lsprpc/autostart_default.go +--- a/gopls/internal/lsprpc/autostart_default.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/lsprpc/autostart_default.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,38 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lsprpc +- +-import ( +- "fmt" +- "os/exec" +-) +- +-var ( +- daemonize = func(*exec.Cmd) {} +- autoNetworkAddress = autoNetworkAddressDefault +- verifyRemoteOwnership = verifyRemoteOwnershipDefault +-) +- +-func runRemote(cmd *exec.Cmd) error { +- daemonize(cmd) +- if err := cmd.Start(); err != nil { +- return fmt.Errorf("starting remote gopls: %w", err) +- } +- return nil +-} +- +-// autoNetworkAddressDefault returns the default network and address for the +-// automatically-started gopls remote. See autostart_posix.go for more +-// information. +-func autoNetworkAddressDefault(goplsPath, id string) (network string, address string) { +- if id != "" { +- panic("identified remotes are not supported on windows") +- } +- return "tcp", "localhost:37374" +-} +- +-func verifyRemoteOwnershipDefault(network, address string) (bool, error) { +- return true, nil +-} +diff -urN a/gopls/internal/lsprpc/autostart_posix.go b/gopls/internal/lsprpc/autostart_posix.go +--- a/gopls/internal/lsprpc/autostart_posix.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/lsprpc/autostart_posix.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,96 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris +-// +build darwin dragonfly freebsd linux netbsd openbsd solaris +- +-package lsprpc +- +-import ( +- "crypto/sha256" +- "errors" +- "fmt" +- "log" +- "os" +- "os/exec" +- "os/user" +- "path/filepath" +- "strconv" +- "syscall" +-) +- +-func init() { +- daemonize = daemonizePosix +- autoNetworkAddress = autoNetworkAddressPosix +- verifyRemoteOwnership = verifyRemoteOwnershipPosix +-} +- +-func daemonizePosix(cmd *exec.Cmd) { +- cmd.SysProcAttr = &syscall.SysProcAttr{ +- Setsid: true, +- } +-} +- +-// autoNetworkAddressPosix resolves an id on the 'auto' pseduo-network to a +-// real network and address. On unix, this uses unix domain sockets. +-func autoNetworkAddressPosix(goplsPath, id string) (network string, address string) { +- // Especially when doing local development or testing, it's important that +- // the remote gopls instance we connect to is running the same binary as our +- // forwarder. So we encode a short hash of the binary path into the daemon +- // socket name. If possible, we also include the buildid in this hash, to +- // account for long-running processes where the binary has been subsequently +- // rebuilt. +- h := sha256.New() +- cmd := exec.Command("go", "tool", "buildid", goplsPath) +- cmd.Stdout = h +- var pathHash []byte +- if err := cmd.Run(); err == nil { +- pathHash = h.Sum(nil) +- } else { +- log.Printf("error getting current buildid: %v", err) +- sum := sha256.Sum256([]byte(goplsPath)) +- pathHash = sum[:] +- } +- shortHash := fmt.Sprintf("%x", pathHash)[:6] +- user := os.Getenv("USER") +- if user == "" { +- user = "shared" +- } +- basename := filepath.Base(goplsPath) +- idComponent := "" +- if id != "" { +- idComponent = "-" + id +- } +- runtimeDir := os.TempDir() +- if xdg := os.Getenv("XDG_RUNTIME_DIR"); xdg != "" { +- runtimeDir = xdg +- } +- return "unix", filepath.Join(runtimeDir, fmt.Sprintf("%s-%s-daemon.%s%s", basename, shortHash, user, idComponent)) +-} +- +-func verifyRemoteOwnershipPosix(network, address string) (bool, error) { +- if network != "unix" { +- return true, nil +- } +- fi, err := os.Stat(address) +- if err != nil { +- if os.IsNotExist(err) { +- return true, nil +- } +- return false, fmt.Errorf("checking socket owner: %w", err) +- } +- stat, ok := fi.Sys().(*syscall.Stat_t) +- if !ok { +- return false, errors.New("fi.Sys() is not a Stat_t") +- } +- user, err := user.Current() +- if err != nil { +- return false, fmt.Errorf("checking current user: %w", err) +- } +- uid, err := strconv.ParseUint(user.Uid, 10, 32) +- if err != nil { +- return false, fmt.Errorf("parsing current UID: %w", err) +- } +- return stat.Uid == uint32(uid), nil +-} +diff -urN a/gopls/internal/lsprpc/binder.go b/gopls/internal/lsprpc/binder.go +--- a/gopls/internal/lsprpc/binder.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/lsprpc/binder.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,5 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lsprpc +diff -urN a/gopls/internal/lsprpc/binder_test.go b/gopls/internal/lsprpc/binder_test.go +--- a/gopls/internal/lsprpc/binder_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/lsprpc/binder_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,199 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lsprpc_test +- +-import ( +- "context" +- "regexp" +- "strings" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/protocol" +- jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" +- +- . "golang.org/x/tools/gopls/internal/lsprpc" +-) +- +-// ServerBinder binds incoming connections to a new server. +-type ServerBinder struct { +- newServer ServerFunc +-} +- +-func NewServerBinder(newServer ServerFunc) *ServerBinder { +- return &ServerBinder{newServer: newServer} +-} +- +-// streamServer used to have this method, but it was never used. +-// TODO(adonovan): figure out whether we need any of this machinery +-// and, if not, delete it. In the meantime, it's better that it sit +-// in the test package with all the other mothballed machinery +-// than in the production code where it would couple streamServer +-// and ServerBinder. +-/* +-func (s *streamServer) Binder() *ServerBinder { +- newServer := func(ctx context.Context, client protocol.ClientCloser) protocol.Server { +- session := cache.NewSession(ctx, s.cache) +- svr := s.serverForTest +- if svr == nil { +- options := settings.DefaultOptions(s.optionsOverrides) +- svr = server.New(session, client, options) +- if instance := debug.GetInstance(ctx); instance != nil { +- instance.AddService(svr, session) +- } +- } +- return svr +- } +- return NewServerBinder(newServer) +-} +-*/ +- +-func (b *ServerBinder) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { +- client := protocol.ClientDispatcherV2(conn) +- server := b.newServer(ctx, client) +- serverHandler := protocol.ServerHandlerV2(server) +- // Wrap the server handler to inject the client into each request context, so +- // that log events are reflected back to the client. +- wrapped := jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { +- ctx = protocol.WithClient(ctx, client) +- return serverHandler.Handle(ctx, req) +- }) +- preempter := &Canceler{ +- Conn: conn, +- } +- return jsonrpc2_v2.ConnectionOptions{ +- Handler: wrapped, +- Preempter: preempter, +- } +-} +- +-type TestEnv struct { +- Conns []*jsonrpc2_v2.Connection +- Servers []*jsonrpc2_v2.Server +-} +- +-func (e *TestEnv) Shutdown(t *testing.T) { +- for _, s := range e.Servers { +- s.Shutdown() +- } +- for _, c := range e.Conns { +- if err := c.Close(); err != nil { +- t.Error(err) +- } +- } +- for _, s := range e.Servers { +- if err := s.Wait(); err != nil { +- t.Error(err) +- } +- } +-} +- +-func (e *TestEnv) serve(ctx context.Context, t *testing.T, server jsonrpc2_v2.Binder) (jsonrpc2_v2.Listener, *jsonrpc2_v2.Server) { +- l, err := jsonrpc2_v2.NetPipeListener(ctx) +- if err != nil { +- t.Fatal(err) +- } +- s := jsonrpc2_v2.NewServer(ctx, l, server) +- e.Servers = append(e.Servers, s) +- return l, s +-} +- +-func (e *TestEnv) dial(ctx context.Context, t *testing.T, dialer jsonrpc2_v2.Dialer, client jsonrpc2_v2.Binder, forwarded bool) *jsonrpc2_v2.Connection { +- if forwarded { +- l, _ := e.serve(ctx, t, NewForwardBinder(dialer)) +- dialer = l.Dialer() +- } +- conn, err := jsonrpc2_v2.Dial(ctx, dialer, client, nil) +- if err != nil { +- t.Fatal(err) +- } +- e.Conns = append(e.Conns, conn) +- return conn +-} +- +-func staticClientBinder(client protocol.Client) jsonrpc2_v2.Binder { +- f := func(context.Context, protocol.Server) protocol.Client { return client } +- return NewClientBinder(f) +-} +- +-func staticServerBinder(server protocol.Server) jsonrpc2_v2.Binder { +- f := func(ctx context.Context, client protocol.ClientCloser) protocol.Server { +- return server +- } +- return NewServerBinder(f) +-} +- +-func TestClientLoggingV2(t *testing.T) { +- ctx := context.Background() +- +- for name, forwarded := range map[string]bool{ +- "forwarded": true, +- "standalone": false, +- } { +- t.Run(name, func(t *testing.T) { +- client := FakeClient{Logs: make(chan string, 10)} +- env := new(TestEnv) +- defer env.Shutdown(t) +- l, _ := env.serve(ctx, t, staticServerBinder(PingServer{})) +- conn := env.dial(ctx, t, l.Dialer(), staticClientBinder(client), forwarded) +- +- if err := protocol.ServerDispatcherV2(conn).DidOpen(ctx, &protocol.DidOpenTextDocumentParams{}); err != nil { +- t.Errorf("DidOpen: %v", err) +- } +- select { +- case got := <-client.Logs: +- want := "ping" +- matched, err := regexp.MatchString(want, got) +- if err != nil { +- t.Fatal(err) +- } +- if !matched { +- t.Errorf("got log %q, want a log containing %q", got, want) +- } +- case <-time.After(1 * time.Second): +- t.Error("timeout waiting for client log") +- } +- }) +- } +-} +- +-func TestRequestCancellationV2(t *testing.T) { +- ctx := context.Background() +- +- for name, forwarded := range map[string]bool{ +- "forwarded": true, +- "standalone": false, +- } { +- t.Run(name, func(t *testing.T) { +- server := WaitableServer{ +- Started: make(chan struct{}), +- Completed: make(chan error), +- } +- env := new(TestEnv) +- defer env.Shutdown(t) +- l, _ := env.serve(ctx, t, staticServerBinder(server)) +- client := FakeClient{Logs: make(chan string, 10)} +- conn := env.dial(ctx, t, l.Dialer(), staticClientBinder(client), forwarded) +- +- sd := protocol.ServerDispatcherV2(conn) +- ctx, cancel := context.WithCancel(ctx) +- +- result := make(chan error) +- go func() { +- _, err := sd.Hover(ctx, &protocol.HoverParams{}) +- result <- err +- }() +- // Wait for the Hover request to start. +- <-server.Started +- cancel() +- if err := <-result; err == nil { +- t.Error("nil error for cancelled Hover(), want non-nil") +- } +- if err := <-server.Completed; err == nil || !strings.Contains(err.Error(), "cancelled hover") { +- t.Errorf("Hover(): unexpected server-side error %v", err) +- } +- }) +- } +-} +diff -urN a/gopls/internal/lsprpc/commandinterceptor_test.go b/gopls/internal/lsprpc/commandinterceptor_test.go +--- a/gopls/internal/lsprpc/commandinterceptor_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/lsprpc/commandinterceptor_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,61 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lsprpc_test +- +-import ( +- "context" +- "encoding/json" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" +- +- . "golang.org/x/tools/gopls/internal/lsprpc" +-) +- +-func CommandInterceptor(command string, run func(*protocol.ExecuteCommandParams) (any, error)) Middleware { +- return BindHandler(func(delegate jsonrpc2_v2.Handler) jsonrpc2_v2.Handler { +- return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { +- if req.Method == "workspace/executeCommand" { +- var params protocol.ExecuteCommandParams +- if err := json.Unmarshal(req.Params, ¶ms); err == nil { +- if params.Command == command { +- return run(¶ms) +- } +- } +- } +- +- return delegate.Handle(ctx, req) +- }) +- }) +-} +- +-func TestCommandInterceptor(t *testing.T) { +- const command = "foo" +- caught := false +- intercept := func(_ *protocol.ExecuteCommandParams) (any, error) { +- caught = true +- return map[string]any{}, nil +- } +- +- ctx := context.Background() +- env := new(TestEnv) +- defer env.Shutdown(t) +- mw := CommandInterceptor(command, intercept) +- l, _ := env.serve(ctx, t, mw(noopBinder)) +- conn := env.dial(ctx, t, l.Dialer(), noopBinder, false) +- +- params := &protocol.ExecuteCommandParams{ +- Command: command, +- } +- var res any +- err := conn.Call(ctx, "workspace/executeCommand", params).Await(ctx, &res) +- if err != nil { +- t.Fatal(err) +- } +- if !caught { +- t.Errorf("workspace/executeCommand was not intercepted") +- } +-} +diff -urN a/gopls/internal/lsprpc/dialer.go b/gopls/internal/lsprpc/dialer.go +--- a/gopls/internal/lsprpc/dialer.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/lsprpc/dialer.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,114 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lsprpc +- +-import ( +- "context" +- "fmt" +- "io" +- "net" +- "os" +- "os/exec" +- "time" +- +- "golang.org/x/tools/internal/event" +-) +- +-// autoNetwork is the pseudo network type used to signal that gopls should use +-// automatic discovery to resolve a remote address. +-const autoNetwork = "auto" +- +-// An autoDialer is a jsonrpc2 dialer that understands the 'auto' network. +-type autoDialer struct { +- network, addr string // the 'real' network and address +- isAuto bool // whether the server is on the 'auto' network +- +- executable string +- argFunc func(network, addr string) []string +-} +- +-func newAutoDialer(rawAddr string, argFunc func(network, addr string) []string) (*autoDialer, error) { +- d := autoDialer{ +- argFunc: argFunc, +- } +- d.network, d.addr = ParseAddr(rawAddr) +- if d.network == autoNetwork { +- d.isAuto = true +- bin, err := os.Executable() +- if err != nil { +- return nil, fmt.Errorf("getting executable: %w", err) +- } +- d.executable = bin +- d.network, d.addr = autoNetworkAddress(bin, d.addr) +- } +- return &d, nil +-} +- +-// Dial implements the jsonrpc2.Dialer interface. +-func (d *autoDialer) Dial(ctx context.Context) (io.ReadWriteCloser, error) { +- conn, err := d.dialNet(ctx) +- return conn, err +-} +- +-// TODO(rFindley): remove this once we no longer need to integrate with v1 of +-// the jsonrpc2 package. +-func (d *autoDialer) dialNet(ctx context.Context) (net.Conn, error) { +- // Attempt to verify that we own the remote. This is imperfect, but if we can +- // determine that the remote is owned by a different user, we should fail. +- ok, err := verifyRemoteOwnership(d.network, d.addr) +- if err != nil { +- // If the ownership check itself failed, we fail open but log an error to +- // the user. +- event.Error(ctx, "unable to check daemon socket owner, failing open", err) +- } else if !ok { +- // We successfully checked that the socket is not owned by us, we fail +- // closed. +- return nil, fmt.Errorf("socket %q is owned by a different user", d.addr) +- } +- const dialTimeout = 1 * time.Second +- // Try dialing our remote once, in case it is already running. +- netConn, err := net.DialTimeout(d.network, d.addr, dialTimeout) +- if err == nil { +- return netConn, nil +- } +- if d.isAuto && d.argFunc != nil { +- if d.network == "unix" { +- // Sometimes the socketfile isn't properly cleaned up when the server +- // shuts down. Since we have already tried and failed to dial this +- // address, it should *usually* be safe to remove the socket before +- // binding to the address. +- // TODO(rfindley): there is probably a race here if multiple server +- // instances are simultaneously starting up. +- if _, err := os.Stat(d.addr); err == nil { +- if err := os.Remove(d.addr); err != nil { +- return nil, fmt.Errorf("removing remote socket file: %w", err) +- } +- } +- } +- args := d.argFunc(d.network, d.addr) +- cmd := exec.Command(d.executable, args...) +- if err := runRemote(cmd); err != nil { +- return nil, err +- } +- } +- +- const retries = 5 +- // It can take some time for the newly started server to bind to our address, +- // so we retry for a bit. +- for retry := range retries { +- startDial := time.Now() +- netConn, err = net.DialTimeout(d.network, d.addr, dialTimeout) +- if err == nil { +- return netConn, nil +- } +- event.Log(ctx, fmt.Sprintf("failed attempt #%d to connect to remote: %v\n", retry+2, err)) +- // In case our failure was a fast-failure, ensure we wait at least +- // f.dialTimeout before trying again. +- if retry != retries-1 { +- time.Sleep(dialTimeout - time.Since(startDial)) +- } +- } +- return nil, fmt.Errorf("dialing remote: %w", err) +-} +diff -urN a/gopls/internal/lsprpc/export_test.go b/gopls/internal/lsprpc/export_test.go +--- a/gopls/internal/lsprpc/export_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/lsprpc/export_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,135 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lsprpc +- +-// This file defines things (and opens backdoors) needed only by tests. +- +-import ( +- "context" +- "encoding/json" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +- jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" +- "golang.org/x/tools/internal/xcontext" +-) +- +-const HandshakeMethod = handshakeMethod +- +-// A ServerFunc is used to construct an LSP server for a given client. +-type ServerFunc func(context.Context, protocol.ClientCloser) protocol.Server +- +-type Canceler struct { +- Conn *jsonrpc2_v2.Connection +-} +- +-func (c *Canceler) Preempt(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { +- if req.Method != "$/cancelRequest" { +- return nil, jsonrpc2_v2.ErrNotHandled +- } +- var params protocol.CancelParams +- if err := json.Unmarshal(req.Params, ¶ms); err != nil { +- return nil, fmt.Errorf("%w: %v", jsonrpc2_v2.ErrParse, err) +- } +- id, err := jsonrpc2_v2.MakeID(params.ID) +- if err != nil { +- return nil, fmt.Errorf("%w: invalid ID type %T", jsonrpc2_v2.ErrParse, params.ID) +- } +- c.Conn.Cancel(id) +- return nil, nil +-} +- +-type ForwardBinder struct { +- dialer jsonrpc2_v2.Dialer +- onBind func(*jsonrpc2_v2.Connection) +-} +- +-func NewForwardBinder(dialer jsonrpc2_v2.Dialer) *ForwardBinder { +- return &ForwardBinder{ +- dialer: dialer, +- } +-} +- +-func (b *ForwardBinder) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) (opts jsonrpc2_v2.ConnectionOptions) { +- client := protocol.ClientDispatcherV2(conn) +- clientBinder := NewClientBinder(func(context.Context, protocol.Server) protocol.Client { return client }) +- +- serverConn, err := jsonrpc2_v2.Dial(context.Background(), b.dialer, clientBinder, nil) +- if err != nil { +- return jsonrpc2_v2.ConnectionOptions{ +- Handler: jsonrpc2_v2.HandlerFunc(func(context.Context, *jsonrpc2_v2.Request) (any, error) { +- return nil, fmt.Errorf("%w: %v", jsonrpc2_v2.ErrInternal, err) +- }), +- } +- } +- +- if b.onBind != nil { +- b.onBind(serverConn) +- } +- server := protocol.ServerDispatcherV2(serverConn) +- preempter := &Canceler{ +- Conn: conn, +- } +- detached := xcontext.Detach(ctx) +- go func() { +- conn.Wait() // ignore error +- if err := serverConn.Close(); err != nil { +- event.Log(detached, fmt.Sprintf("closing remote connection: %v", err)) +- } +- }() +- return jsonrpc2_v2.ConnectionOptions{ +- Handler: protocol.ServerHandlerV2(server), +- Preempter: preempter, +- } +-} +- +-func NewClientBinder(newClient ClientFunc) *clientBinder { +- return &clientBinder{newClient} +-} +- +-// A ClientFunc is used to construct an LSP client for a given server. +-type ClientFunc func(context.Context, protocol.Server) protocol.Client +- +-// clientBinder binds an LSP client to an incoming connection. +-type clientBinder struct { +- newClient ClientFunc +-} +- +-func (b *clientBinder) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { +- server := protocol.ServerDispatcherV2(conn) +- client := b.newClient(ctx, server) +- return jsonrpc2_v2.ConnectionOptions{ +- Handler: protocol.ClientHandlerV2(client), +- } +-} +- +-// HandlerMiddleware is a middleware that only modifies the jsonrpc2 handler. +-type HandlerMiddleware func(jsonrpc2_v2.Handler) jsonrpc2_v2.Handler +- +-// BindHandler transforms a HandlerMiddleware into a Middleware. +-func BindHandler(hmw HandlerMiddleware) Middleware { +- return Middleware(func(binder jsonrpc2_v2.Binder) jsonrpc2_v2.Binder { +- return BinderFunc(func(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { +- opts := binder.Bind(ctx, conn) +- opts.Handler = hmw(opts.Handler) +- return opts +- }) +- }) +-} +- +-// The BinderFunc type adapts a bind function to implement the jsonrpc2.Binder +-// interface. +-type BinderFunc func(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions +- +-func (f BinderFunc) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { +- return f(ctx, conn) +-} +- +-// Middleware defines a transformation of jsonrpc2 Binders, that may be +-// composed to build jsonrpc2 servers. +-type Middleware func(jsonrpc2_v2.Binder) jsonrpc2_v2.Binder +- +-var GetGoEnv = getGoEnv +diff -urN a/gopls/internal/lsprpc/goenv.go b/gopls/internal/lsprpc/goenv.go +--- a/gopls/internal/lsprpc/goenv.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/lsprpc/goenv.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,34 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lsprpc +- +-import ( +- "context" +- "encoding/json" +- "fmt" +- +- "golang.org/x/tools/internal/gocommand" +-) +- +-func getGoEnv(ctx context.Context, env map[string]any) (map[string]string, error) { +- var runEnv []string +- for k, v := range env { +- runEnv = append(runEnv, fmt.Sprintf("%s=%s", k, v)) +- } +- runner := gocommand.Runner{} +- output, err := runner.Run(ctx, gocommand.Invocation{ +- Verb: "env", +- Args: []string{"-json"}, +- Env: runEnv, +- }) +- if err != nil { +- return nil, err +- } +- envmap := make(map[string]string) +- if err := json.Unmarshal(output.Bytes(), &envmap); err != nil { +- return nil, err +- } +- return envmap, nil +-} +diff -urN a/gopls/internal/lsprpc/goenv_test.go b/gopls/internal/lsprpc/goenv_test.go +--- a/gopls/internal/lsprpc/goenv_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/lsprpc/goenv_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,133 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lsprpc_test +- +-import ( +- "context" +- "encoding/json" +- "fmt" +- "os" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +- jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" +- "golang.org/x/tools/internal/testenv" +- +- . "golang.org/x/tools/gopls/internal/lsprpc" +-) +- +-func GoEnvMiddleware() (Middleware, error) { +- return BindHandler(func(delegate jsonrpc2_v2.Handler) jsonrpc2_v2.Handler { +- return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { +- if req.Method == "initialize" { +- if err := addGoEnvToInitializeRequestV2(ctx, req); err != nil { +- event.Error(ctx, "adding go env to initialize", err) +- } +- } +- return delegate.Handle(ctx, req) +- }) +- }), nil +-} +- +-// This function is almost identical to addGoEnvToInitializeRequest in lsprpc.go. +-// Make changes in parallel. +-func addGoEnvToInitializeRequestV2(ctx context.Context, req *jsonrpc2_v2.Request) error { +- var params protocol.ParamInitialize +- if err := json.Unmarshal(req.Params, ¶ms); err != nil { +- return err +- } +- var opts map[string]any +- switch v := params.InitializationOptions.(type) { +- case nil: +- opts = make(map[string]any) +- case map[string]any: +- opts = v +- default: +- return fmt.Errorf("unexpected type for InitializationOptions: %T", v) +- } +- envOpt, ok := opts["env"] +- if !ok { +- envOpt = make(map[string]any) +- } +- env, ok := envOpt.(map[string]any) +- if !ok { +- return fmt.Errorf("env option is %T, expected a map", envOpt) +- } +- goenv, err := GetGoEnv(ctx, env) +- if err != nil { +- return err +- } +- // We don't want to propagate GOWORK unless explicitly set since that could mess with +- // path inference during cmd/go invocations, see golang/go#51825. +- _, goworkSet := os.LookupEnv("GOWORK") +- for govar, value := range goenv { +- if govar == "GOWORK" && !goworkSet { +- continue +- } +- env[govar] = value +- } +- opts["env"] = env +- params.InitializationOptions = opts +- raw, err := json.Marshal(params) +- if err != nil { +- return fmt.Errorf("marshaling updated options: %v", err) +- } +- req.Params = json.RawMessage(raw) +- return nil +-} +- +-type initServer struct { +- protocol.Server +- +- params *protocol.ParamInitialize +-} +- +-func (s *initServer) Initialize(ctx context.Context, params *protocol.ParamInitialize) (*protocol.InitializeResult, error) { +- s.params = params +- return &protocol.InitializeResult{}, nil +-} +- +-func TestGoEnvMiddleware(t *testing.T) { +- testenv.NeedsTool(t, "go") +- +- ctx := context.Background() +- +- server := &initServer{} +- env := new(TestEnv) +- defer env.Shutdown(t) +- l, _ := env.serve(ctx, t, staticServerBinder(server)) +- mw, err := GoEnvMiddleware() +- if err != nil { +- t.Fatal(err) +- } +- binder := mw(NewForwardBinder(l.Dialer())) +- l, _ = env.serve(ctx, t, binder) +- conn := env.dial(ctx, t, l.Dialer(), noopBinder, true) +- dispatch := protocol.ServerDispatcherV2(conn) +- initParams := &protocol.ParamInitialize{} +- initParams.InitializationOptions = map[string]any{ +- "env": map[string]any{ +- "GONOPROXY": "example.com", +- }, +- } +- if _, err := dispatch.Initialize(ctx, initParams); err != nil { +- t.Fatal(err) +- } +- +- if server.params == nil { +- t.Fatalf("initialize params are unset") +- } +- envOpts := server.params.InitializationOptions.(map[string]any)["env"].(map[string]any) +- +- // Check for an arbitrary Go variable. It should be set. +- if _, ok := envOpts["GOPRIVATE"]; !ok { +- t.Errorf("Go environment variable GOPRIVATE unset in initialization options") +- } +- // Check that the variable present in our user config was not overwritten. +- if got, want := envOpts["GONOPROXY"], "example.com"; got != want { +- t.Errorf("GONOPROXY=%q, want %q", got, want) +- } +-} +diff -urN a/gopls/internal/lsprpc/lsprpc.go b/gopls/internal/lsprpc/lsprpc.go +--- a/gopls/internal/lsprpc/lsprpc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/lsprpc/lsprpc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,599 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package lsprpc implements a jsonrpc2.StreamServer that may be used to +-// serve the LSP on a jsonrpc2 channel. +-package lsprpc +- +-import ( +- "context" +- "encoding/json" +- "fmt" +- "log" +- "maps" +- "net" +- "os" +- "slices" +- "strconv" +- "strings" +- "sync" +- "sync/atomic" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/debug" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/server" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/jsonrpc2" +-) +- +-// Unique identifiers for client/server. +-var serverIndex int64 +- +-// The streamServer type is a jsonrpc2.streamServer that handles incoming +-// streams as a new LSP session, using a shared cache. +-type StreamServer struct { +- cache *cache.Cache +- // daemon controls whether or not to log new connections. +- daemon bool +- +- // optionsOverrides is passed to newly created sessions. +- optionsOverrides func(*settings.Options) +- +- // onSessionExit is called whenever a session exits, with the session ID. +- onSessionExit func(id string) +- +- // serverForTest may be set to a test fake for testing. +- serverForTest protocol.Server +- +- // Keep track of active sessions, for interrogation. +- sessionMu sync.Mutex +- sessions map[string]sessionServer +-} +- +-type sessionServer struct { +- session *cache.Session +- server protocol.Server +-} +- +-// NewStreamServer creates a StreamServer using the shared cache. If +-// withTelemetry is true, each session is instrumented with telemetry that +-// records RPC statistics. +-func NewStreamServer(cache *cache.Cache, daemon bool, optionsFunc func(*settings.Options)) *StreamServer { +- return &StreamServer{ +- cache: cache, +- daemon: daemon, +- optionsOverrides: optionsFunc, +- sessions: make(map[string]sessionServer), +- } +-} +- +-// SetSessionExitFunc sets the function to call when sessions exit. +-// It is not concurrency safe, and must only be called at most once, before the +-// receiver is passed to jsonrpc2.Serve. +-func (s *StreamServer) SetSessionExitFunc(f func(id string)) { +- if s.onSessionExit != nil { +- panic("duplicate call to SetSessionExitFunc") +- } +- s.onSessionExit = f +-} +- +-// ServeStream implements the jsonrpc2.StreamServer interface, by handling +-// incoming streams using a new lsp server. +-func (s *StreamServer) ServeStream(ctx context.Context, conn jsonrpc2.Conn) error { +- client := protocol.ClientDispatcher(conn) +- session := cache.NewSession(ctx, s.cache) +- svr := s.serverForTest +- if svr == nil { +- options := settings.DefaultOptions(s.optionsOverrides) +- svr = server.New(session, client, options) +- if instance := debug.GetInstance(ctx); instance != nil { +- instance.AddService(svr, session) +- } +- } +- s.sessionMu.Lock() +- s.sessions[session.ID()] = sessionServer{session, svr} +- s.sessionMu.Unlock() +- defer func() { +- s.sessionMu.Lock() +- delete(s.sessions, session.ID()) +- s.sessionMu.Unlock() +- if s.onSessionExit != nil { +- s.onSessionExit(session.ID()) +- } +- }() +- +- // Clients may or may not send a shutdown message. Make sure the server is +- // shut down. +- // TODO(rFindley): this shutdown should perhaps be on a disconnected context. +- defer func() { +- if err := svr.Shutdown(ctx); err != nil { +- event.Error(ctx, "error shutting down", err) +- } +- }() +- executable, err := os.Executable() +- if err != nil { +- log.Printf("error getting gopls path: %v", err) +- executable = "" +- } +- ctx = protocol.WithClient(ctx, client) +- conn.Go(ctx, +- protocol.Handlers( +- handshaker(session, executable, s.daemon, +- protocol.ServerHandler(svr, +- jsonrpc2.MethodNotFound)))) +- +- if s.daemon { +- log.Printf("Session %s: connected", session.ID()) +- defer log.Printf("Session %s: exited", session.ID()) +- } +- +- <-conn.Done() +- return conn.Err() +-} +- +-// Session returns the current active session for the given id, or (nil, nil) +-// if none exists. +-func (s *StreamServer) Session(id string) (*cache.Session, protocol.Server) { +- s.sessionMu.Lock() +- defer s.sessionMu.Unlock() +- ss := s.sessions[id] +- return ss.session, ss.server // possibly nil for zero value +-} +- +-// FirstSession returns the first session by lexically sorted session ID, or +-// (nil, nil). +-func (s *StreamServer) FirstSession() (*cache.Session, protocol.Server) { +- s.sessionMu.Lock() +- defer s.sessionMu.Unlock() +- keys := slices.Collect(maps.Keys(s.sessions)) +- if len(keys) == 0 { +- return nil, nil +- } +- id := slices.Min(keys) +- ss := s.sessions[id] +- return ss.session, ss.server +-} +- +-// A forwarder is a jsonrpc2.StreamServer that handles an LSP stream by +-// forwarding it to a remote. This is used when the gopls process started by +-// the editor is in the `-remote` mode, which means it finds and connects to a +-// separate gopls daemon. In these cases, we still want the forwarder gopls to +-// be instrumented with telemetry, and want to be able to in some cases hijack +-// the jsonrpc2 connection with the daemon. +-type forwarder struct { +- dialer *autoDialer +- +- mu sync.Mutex +- // Hold on to the server connection so that we can redo the handshake if any +- // information changes. +- serverConn jsonrpc2.Conn +- serverID string +-} +- +-// NewForwarder creates a new forwarder (a [jsonrpc2.StreamServer]), +-// ready to forward connections to the +-// remote server specified by rawAddr. If provided and rawAddr indicates an +-// 'automatic' address (starting with 'auto;'), argFunc may be used to start a +-// remote server for the auto-discovered address. +-func NewForwarder(rawAddr string, argFunc func(network, address string) []string) (jsonrpc2.StreamServer, error) { +- dialer, err := newAutoDialer(rawAddr, argFunc) +- if err != nil { +- return nil, err +- } +- fwd := &forwarder{ +- dialer: dialer, +- } +- return fwd, nil +-} +- +-// QueryServerState returns a JSON-encodable struct describing the state of the named server. +-func QueryServerState(ctx context.Context, addr string) (any, error) { +- serverConn, err := dialRemote(ctx, addr) +- if err != nil { +- return nil, err +- } +- var state serverState +- if err := protocol.Call(ctx, serverConn, sessionsMethod, nil, &state); err != nil { +- return nil, fmt.Errorf("querying server state: %w", err) +- } +- return &state, nil +-} +- +-// dialRemote is used for making calls into the gopls daemon. addr should be a +-// URL, possibly on the synthetic 'auto' network (e.g. tcp://..., unix://..., +-// or auto://...). +-func dialRemote(ctx context.Context, addr string) (jsonrpc2.Conn, error) { +- network, address := ParseAddr(addr) +- if network == autoNetwork { +- gp, err := os.Executable() +- if err != nil { +- return nil, fmt.Errorf("getting gopls path: %w", err) +- } +- network, address = autoNetworkAddress(gp, address) +- } +- netConn, err := net.DialTimeout(network, address, 5*time.Second) +- if err != nil { +- return nil, fmt.Errorf("dialing remote: %w", err) +- } +- serverConn := jsonrpc2.NewConn(jsonrpc2.NewHeaderStream(netConn)) +- serverConn.Go(ctx, jsonrpc2.MethodNotFound) +- return serverConn, nil +-} +- +-// ExecuteCommand connects to the named server, sends it a +-// workspace/executeCommand request (with command 'id' and arguments +-// JSON encoded in 'request'), and populates the result variable. +-func ExecuteCommand(ctx context.Context, addr string, id string, request, result any) error { +- serverConn, err := dialRemote(ctx, addr) +- if err != nil { +- return err +- } +- args, err := command.MarshalArgs(request) +- if err != nil { +- return err +- } +- params := protocol.ExecuteCommandParams{ +- Command: id, +- Arguments: args, +- } +- return protocol.Call(ctx, serverConn, "workspace/executeCommand", params, result) +-} +- +-// ServeStream dials the forwarder remote and binds the remote to serve the LSP +-// on the incoming stream. +-func (f *forwarder) ServeStream(ctx context.Context, clientConn jsonrpc2.Conn) error { +- client := protocol.ClientDispatcher(clientConn) +- +- netConn, err := f.dialer.dialNet(ctx) +- if err != nil { +- return fmt.Errorf("forwarder: connecting to remote: %w", err) +- } +- serverConn := jsonrpc2.NewConn(jsonrpc2.NewHeaderStream(netConn)) +- server := protocol.ServerDispatcher(serverConn) +- +- // Forward between connections. +- serverConn.Go(ctx, +- protocol.Handlers( +- protocol.ClientHandler(client, +- jsonrpc2.MethodNotFound))) +- +- // Don't run the clientConn yet, so that we can complete the handshake before +- // processing any client messages. +- +- // Do a handshake with the server instance to exchange debug information. +- index := atomic.AddInt64(&serverIndex, 1) +- f.mu.Lock() +- f.serverConn = serverConn +- f.serverID = strconv.FormatInt(index, 10) +- f.mu.Unlock() +- f.handshake(ctx) +- clientConn.Go(ctx, +- protocol.Handlers( +- f.handler( +- protocol.ServerHandler(server, +- jsonrpc2.MethodNotFound)))) +- +- select { +- case <-serverConn.Done(): +- clientConn.Close() // ignore error +- case <-clientConn.Done(): +- serverConn.Close() // ignore error +- } +- +- err = nil +- if serverConn.Err() != nil { +- err = fmt.Errorf("remote disconnected: %v", serverConn.Err()) +- } else if clientConn.Err() != nil { +- err = fmt.Errorf("client disconnected: %v", clientConn.Err()) +- } +- event.Log(ctx, fmt.Sprintf("forwarder: exited with error: %v", err)) +- return err +-} +- +-// TODO(rfindley): remove this handshaking in favor of middleware. +-func (f *forwarder) handshake(ctx context.Context) { +- // This call to os.Executable is redundant, and will be eliminated by the +- // transition to the V2 API. +- goplsPath, err := os.Executable() +- if err != nil { +- event.Error(ctx, "getting executable for handshake", err) +- goplsPath = "" +- } +- var ( +- hreq = handshakeRequest{ +- ServerID: f.serverID, +- GoplsPath: goplsPath, +- } +- hresp handshakeResponse +- ) +- if di := debug.GetInstance(ctx); di != nil { +- hreq.Logfile = di.Logfile +- hreq.DebugAddr = di.ListenedDebugAddress() +- } +- if err := protocol.Call(ctx, f.serverConn, handshakeMethod, hreq, &hresp); err != nil { +- // TODO(rfindley): at some point in the future we should return an error +- // here. Handshakes have become functional in nature. +- event.Error(ctx, "forwarder: gopls handshake failed", err) +- } +- if hresp.GoplsPath != goplsPath { +- event.Error(ctx, "", fmt.Errorf("forwarder: gopls path mismatch: forwarder is %q, remote is %q", goplsPath, hresp.GoplsPath)) +- } +- event.Log(ctx, "New server", +- label.NewServer.Of(f.serverID), +- label.Logfile.Of(hresp.Logfile), +- label.DebugAddress.Of(hresp.DebugAddr), +- label.GoplsPath.Of(hresp.GoplsPath), +- label.ClientID.Of(hresp.SessionID), +- ) +-} +- +-func ConnectToRemote(ctx context.Context, addr string) (net.Conn, error) { +- dialer, err := newAutoDialer(addr, nil) +- if err != nil { +- return nil, err +- } +- return dialer.dialNet(ctx) +-} +- +-// handler intercepts messages to the daemon to enrich them with local +-// information. +-func (f *forwarder) handler(handler jsonrpc2.Handler) jsonrpc2.Handler { +- return func(ctx context.Context, reply jsonrpc2.Replier, r jsonrpc2.Request) error { +- // Intercept certain messages to add special handling. +- switch r.Method() { +- case "initialize": +- if newr, err := addGoEnvToInitializeRequest(ctx, r); err == nil { +- r = newr +- } else { +- log.Printf("unable to add local env to initialize request: %v", err) +- } +- case "workspace/executeCommand": +- var params protocol.ExecuteCommandParams +- if err := json.Unmarshal(r.Params(), ¶ms); err == nil { +- if params.Command == command.StartDebugging.String() { +- var args command.DebuggingArgs +- if err := command.UnmarshalArgs(params.Arguments, &args); err == nil { +- reply = f.replyWithDebugAddress(ctx, reply, args) +- } else { +- event.Error(ctx, "unmarshaling debugging args", err) +- } +- } +- } else { +- event.Error(ctx, "intercepting executeCommand request", err) +- } +- } +- // The gopls workspace environment defaults to the process environment in +- // which gopls daemon was started. To avoid discrepancies in Go environment +- // between the editor and daemon, inject any unset variables in `go env` +- // into the options sent by initialize. +- // +- // See also golang.org/issue/37830. +- return handler(ctx, reply, r) +- } +-} +- +-// addGoEnvToInitializeRequest builds a new initialize request in which we set +-// any environment variables output by `go env` and not already present in the +-// request. +-// +-// It returns an error if r is not an initialize request, or is otherwise +-// malformed. +-func addGoEnvToInitializeRequest(ctx context.Context, r jsonrpc2.Request) (jsonrpc2.Request, error) { +- var params protocol.ParamInitialize +- if err := json.Unmarshal(r.Params(), ¶ms); err != nil { +- return nil, err +- } +- var opts map[string]any +- switch v := params.InitializationOptions.(type) { +- case nil: +- opts = make(map[string]any) +- case map[string]any: +- opts = v +- default: +- return nil, fmt.Errorf("unexpected type for InitializationOptions: %T", v) +- } +- envOpt, ok := opts["env"] +- if !ok { +- envOpt = make(map[string]any) +- } +- env, ok := envOpt.(map[string]any) +- if !ok { +- return nil, fmt.Errorf(`env option is %T, expected a map`, envOpt) +- } +- goenv, err := getGoEnv(ctx, env) +- if err != nil { +- return nil, err +- } +- // We don't want to propagate GOWORK unless explicitly set since that could mess with +- // path inference during cmd/go invocations, see golang/go#51825. +- _, goworkSet := os.LookupEnv("GOWORK") +- for govar, value := range goenv { +- if govar == "GOWORK" && !goworkSet { +- continue +- } +- env[govar] = value +- } +- opts["env"] = env +- params.InitializationOptions = opts +- call, ok := r.(*jsonrpc2.Call) +- if !ok { +- return nil, fmt.Errorf("%T is not a *jsonrpc2.Call", r) +- } +- return jsonrpc2.NewCall(call.ID(), "initialize", params) +-} +- +-func (f *forwarder) replyWithDebugAddress(outerCtx context.Context, r jsonrpc2.Replier, args command.DebuggingArgs) jsonrpc2.Replier { +- di := debug.GetInstance(outerCtx) +- if di == nil { +- event.Log(outerCtx, "no debug instance to start") +- return r +- } +- return func(ctx context.Context, result any, outerErr error) error { +- if outerErr != nil { +- return r(ctx, result, outerErr) +- } +- // Enrich the result with our own debugging information. Since we're an +- // intermediary, the jsonrpc2 package has deserialized the result into +- // maps, by default. Re-do the unmarshalling. +- raw, err := json.Marshal(result) +- if err != nil { +- event.Error(outerCtx, "marshaling intermediate command result", err) +- return r(ctx, result, err) +- } +- var modified command.DebuggingResult +- if err := json.Unmarshal(raw, &modified); err != nil { +- event.Error(outerCtx, "unmarshaling intermediate command result", err) +- return r(ctx, result, err) +- } +- addr := args.Addr +- if addr == "" { +- addr = "localhost:0" +- } +- addr, err = di.Serve(outerCtx, addr) +- if err != nil { +- event.Error(outerCtx, "starting debug server", err) +- return r(ctx, result, err) +- } +- urls := []string{"http://" + addr} +- modified.URLs = append(urls, modified.URLs...) +- go f.handshake(ctx) +- return r(ctx, modified, nil) +- } +-} +- +-// A handshakeRequest identifies a client to the LSP server. +-type handshakeRequest struct { +- // ServerID is the ID of the server on the client. This should usually be 0. +- ServerID string `json:"serverID"` +- // Logfile is the location of the clients log file. +- Logfile string `json:"logfile"` +- // DebugAddr is the client debug address. +- DebugAddr string `json:"debugAddr"` +- // GoplsPath is the path to the Gopls binary running the current client +- // process. +- GoplsPath string `json:"goplsPath"` +-} +- +-// A handshakeResponse is returned by the LSP server to tell the LSP client +-// information about its session. +-type handshakeResponse struct { +- // SessionID is the server session associated with the client. +- SessionID string `json:"sessionID"` +- // Logfile is the location of the server logs. +- Logfile string `json:"logfile"` +- // DebugAddr is the server debug address. +- DebugAddr string `json:"debugAddr"` +- // GoplsPath is the path to the Gopls binary running the current server +- // process. +- GoplsPath string `json:"goplsPath"` +-} +- +-// clientSession identifies a current client LSP session on the server. Note +-// that it looks similar to handshakeResposne, but in fact 'Logfile' and +-// 'DebugAddr' now refer to the client. +-type clientSession struct { +- SessionID string `json:"sessionID"` +- Logfile string `json:"logfile"` +- DebugAddr string `json:"debugAddr"` +-} +- +-// serverState holds information about the gopls daemon process, including its +-// debug information and debug information of all of its current connected +-// clients. +-type serverState struct { +- Logfile string `json:"logfile"` +- DebugAddr string `json:"debugAddr"` +- GoplsPath string `json:"goplsPath"` +- CurrentClientID string `json:"currentClientID"` +- Clients []clientSession `json:"clients"` +-} +- +-const ( +- handshakeMethod = "gopls/handshake" +- sessionsMethod = "gopls/sessions" +-) +- +-func handshaker(session *cache.Session, goplsPath string, logHandshakes bool, handler jsonrpc2.Handler) jsonrpc2.Handler { +- return func(ctx context.Context, reply jsonrpc2.Replier, r jsonrpc2.Request) error { +- switch r.Method() { +- case handshakeMethod: +- // We log.Printf in this handler, rather than event.Log when we want logs +- // to go to the daemon log rather than being reflected back to the +- // client. +- var req handshakeRequest +- if err := json.Unmarshal(r.Params(), &req); err != nil { +- if logHandshakes { +- log.Printf("Error processing handshake for session %s: %v", session.ID(), err) +- } +- sendError(ctx, reply, err) +- return nil +- } +- if logHandshakes { +- log.Printf("Session %s: got handshake. Logfile: %q, Debug addr: %q", session.ID(), req.Logfile, req.DebugAddr) +- } +- event.Log(ctx, "Handshake session update", +- cache.KeyUpdateSession.Of(session), +- label.DebugAddress.Of(req.DebugAddr), +- label.Logfile.Of(req.Logfile), +- label.ServerID.Of(req.ServerID), +- label.GoplsPath.Of(req.GoplsPath), +- ) +- resp := handshakeResponse{ +- SessionID: session.ID(), +- GoplsPath: goplsPath, +- } +- if di := debug.GetInstance(ctx); di != nil { +- resp.Logfile = di.Logfile +- resp.DebugAddr = di.ListenedDebugAddress() +- } +- return reply(ctx, resp, nil) +- +- case sessionsMethod: +- resp := serverState{ +- GoplsPath: goplsPath, +- CurrentClientID: session.ID(), +- } +- if di := debug.GetInstance(ctx); di != nil { +- resp.Logfile = di.Logfile +- resp.DebugAddr = di.ListenedDebugAddress() +- for _, c := range di.State.Clients() { +- resp.Clients = append(resp.Clients, clientSession{ +- SessionID: c.Session.ID(), +- Logfile: c.Logfile, +- DebugAddr: c.DebugAddress, +- }) +- } +- } +- return reply(ctx, resp, nil) +- } +- return handler(ctx, reply, r) +- } +-} +- +-func sendError(ctx context.Context, reply jsonrpc2.Replier, err error) { +- err = fmt.Errorf("%v: %w", err, jsonrpc2.ErrParse) +- if err := reply(ctx, nil, err); err != nil { +- event.Error(ctx, "", err) +- } +-} +- +-// ParseAddr parses the address of a gopls remote. +-// TODO(rFindley): further document this syntax, and allow URI-style remote +-// addresses such as "auto://...". +-func ParseAddr(listen string) (network string, address string) { +- // Allow passing just -remote=auto, as a shorthand for using automatic remote +- // resolution. +- if listen == autoNetwork { +- return autoNetwork, "" +- } +- if parts := strings.SplitN(listen, ";", 2); len(parts) == 2 { +- return parts[0], parts[1] +- } +- return "tcp", listen +-} +diff -urN a/gopls/internal/lsprpc/lsprpc_test.go b/gopls/internal/lsprpc/lsprpc_test.go +--- a/gopls/internal/lsprpc/lsprpc_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/lsprpc/lsprpc_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,372 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lsprpc +- +-import ( +- "context" +- "encoding/json" +- "errors" +- "regexp" +- "strings" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/debug" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/jsonrpc2" +- "golang.org/x/tools/internal/jsonrpc2/servertest" +- "golang.org/x/tools/internal/testenv" +-) +- +-type FakeClient struct { +- protocol.Client +- +- Logs chan string +-} +- +-func (c FakeClient) LogMessage(ctx context.Context, params *protocol.LogMessageParams) error { +- c.Logs <- params.Message +- return nil +-} +- +-// fakeServer is intended to be embedded in the test fakes below, to trivially +-// implement Shutdown. +-type fakeServer struct { +- protocol.Server +-} +- +-func (fakeServer) Shutdown(ctx context.Context) error { +- return nil +-} +- +-type PingServer struct{ fakeServer } +- +-func (s PingServer) DidOpen(ctx context.Context, params *protocol.DidOpenTextDocumentParams) error { +- event.Log(ctx, "ping") +- return nil +-} +- +-func TestClientLogging(t *testing.T) { +- ctx := t.Context() +- +- server := PingServer{} +- client := FakeClient{Logs: make(chan string, 10)} +- +- ctx = debug.WithInstance(ctx) +- ss := NewStreamServer(cache.New(nil), false, nil) +- ss.serverForTest = server +- ts := servertest.NewPipeServer(ss, nil) +- defer checkClose(t, ts.Close) +- cc := ts.Connect(ctx) +- cc.Go(ctx, protocol.ClientHandler(client, jsonrpc2.MethodNotFound)) +- +- if err := protocol.ServerDispatcher(cc).DidOpen(ctx, &protocol.DidOpenTextDocumentParams{}); err != nil { +- t.Errorf("DidOpen: %v", err) +- } +- +- select { +- case got := <-client.Logs: +- want := "ping" +- matched, err := regexp.MatchString(want, got) +- if err != nil { +- t.Fatal(err) +- } +- if !matched { +- t.Errorf("got log %q, want a log containing %q", got, want) +- } +- case <-time.After(1 * time.Second): +- t.Error("timeout waiting for client log") +- } +-} +- +-// WaitableServer instruments LSP request so that we can control their timing. +-// The requests chosen are arbitrary: we simply needed one that blocks, and +-// another that doesn't. +-type WaitableServer struct { +- fakeServer +- +- Started chan struct{} +- Completed chan error +-} +- +-func (s WaitableServer) Hover(ctx context.Context, _ *protocol.HoverParams) (_ *protocol.Hover, err error) { +- s.Started <- struct{}{} +- defer func() { +- s.Completed <- err +- }() +- select { +- case <-ctx.Done(): +- return nil, errors.New("cancelled hover") +- case <-time.After(10 * time.Second): +- } +- return &protocol.Hover{}, nil +-} +- +-func (s WaitableServer) ResolveCompletionItem(_ context.Context, item *protocol.CompletionItem) (*protocol.CompletionItem, error) { +- return item, nil +-} +- +-func checkClose(t *testing.T, closer func() error) { +- t.Helper() +- if err := closer(); err != nil { +- t.Errorf("closing: %v", err) +- } +-} +- +-func setupForwarding(ctx context.Context, t *testing.T, s protocol.Server) (direct, forwarded servertest.Connector, cleanup func()) { +- t.Helper() +- serveCtx := debug.WithInstance(ctx) +- ss := NewStreamServer(cache.New(nil), false, nil) +- ss.serverForTest = s +- tsDirect := servertest.NewTCPServer(serveCtx, ss, nil) +- +- forwarder, err := NewForwarder("tcp;"+tsDirect.Addr, nil) +- if err != nil { +- t.Fatal(err) +- } +- tsForwarded := servertest.NewPipeServer(forwarder, nil) +- return tsDirect, tsForwarded, func() { +- checkClose(t, tsDirect.Close) +- checkClose(t, tsForwarded.Close) +- } +-} +- +-func TestRequestCancellation(t *testing.T) { +- ctx := context.Background() +- server := WaitableServer{ +- Started: make(chan struct{}), +- Completed: make(chan error), +- } +- tsDirect, tsForwarded, cleanup := setupForwarding(ctx, t, server) +- defer cleanup() +- tests := []struct { +- serverType string +- ts servertest.Connector +- }{ +- {"direct", tsDirect}, +- {"forwarder", tsForwarded}, +- } +- +- for _, test := range tests { +- t.Run(test.serverType, func(t *testing.T) { +- cc := test.ts.Connect(ctx) +- sd := protocol.ServerDispatcher(cc) +- cc.Go(ctx, +- protocol.Handlers( +- jsonrpc2.MethodNotFound)) +- +- ctx := context.Background() +- ctx, cancel := context.WithCancel(ctx) +- +- result := make(chan error) +- go func() { +- _, err := sd.Hover(ctx, &protocol.HoverParams{}) +- result <- err +- }() +- // Wait for the Hover request to start. +- <-server.Started +- cancel() +- if err := <-result; err == nil { +- t.Error("nil error for cancelled Hover(), want non-nil") +- } +- if err := <-server.Completed; err == nil || !strings.Contains(err.Error(), "cancelled hover") { +- t.Errorf("Hover(): unexpected server-side error %v", err) +- } +- }) +- } +-} +- +-const exampleProgram = ` +--- go.mod -- +-module mod +- +-go 1.12 +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Println("Hello World.") +-}` +- +-func TestDebugInfoLifecycle(t *testing.T) { +- sb, err := fake.NewSandbox(&fake.SandboxConfig{Files: fake.UnpackTxt(exampleProgram)}) +- if err != nil { +- t.Fatal(err) +- } +- defer func() { +- if err := sb.Close(); err != nil { +- // TODO(golang/go#38490): we can't currently make this an error because +- // it fails on Windows: the workspace directory is still locked by a +- // separate Go process. +- // Once we have a reliable way to wait for proper shutdown, make this an +- // error. +- t.Logf("closing workspace failed: %v", err) +- } +- }() +- +- baseCtx := t.Context() +- clientCtx := debug.WithInstance(baseCtx) +- serverCtx := debug.WithInstance(baseCtx) +- +- ss := NewStreamServer(cache.New(nil), false, nil) +- tsBackend := servertest.NewTCPServer(serverCtx, ss, nil) +- +- forwarder, err := NewForwarder("tcp;"+tsBackend.Addr, nil) +- if err != nil { +- t.Fatal(err) +- } +- tsForwarder := servertest.NewPipeServer(forwarder, nil) +- +- ed1, err := fake.NewEditor(sb, fake.EditorConfig{}).Connect(clientCtx, tsForwarder, fake.ClientHooks{}) +- if err != nil { +- t.Fatal(err) +- } +- defer ed1.Close(clientCtx) +- ed2, err := fake.NewEditor(sb, fake.EditorConfig{}).Connect(baseCtx, tsBackend, fake.ClientHooks{}) +- if err != nil { +- t.Fatal(err) +- } +- defer ed2.Close(baseCtx) +- +- serverDebug := debug.GetInstance(serverCtx) +- if got, want := len(serverDebug.State.Clients()), 2; got != want { +- t.Errorf("len(server:Clients) = %d, want %d", got, want) +- } +- if got, want := len(serverDebug.State.Sessions()), 2; got != want { +- t.Errorf("len(server:Sessions) = %d, want %d", got, want) +- } +- clientDebug := debug.GetInstance(clientCtx) +- if got, want := len(clientDebug.State.Servers()), 1; got != want { +- t.Errorf("len(client:Servers) = %d, want %d", got, want) +- } +- // Close one of the connections to verify that the client and session were +- // dropped. +- if err := ed1.Close(clientCtx); err != nil { +- t.Fatal(err) +- } +- /*TODO: at this point we have verified the editor is closed +- However there is no way currently to wait for all associated go routines to +- go away, and we need to wait for those to trigger the client drop +- for now we just give it a little bit of time, but we need to fix this +- in a principled way +- */ +- start := time.Now() +- delay := time.Millisecond +- const maxWait = time.Second +- for len(serverDebug.State.Clients()) > 1 { +- if time.Since(start) > maxWait { +- break +- } +- time.Sleep(delay) +- delay *= 2 +- } +- if got, want := len(serverDebug.State.Clients()), 1; got != want { +- t.Errorf("len(server:Clients) = %d, want %d", got, want) +- } +- if got, want := len(serverDebug.State.Sessions()), 1; got != want { +- t.Errorf("len(server:Sessions()) = %d, want %d", got, want) +- } +-} +- +-type initServer struct { +- fakeServer +- +- params *protocol.ParamInitialize +-} +- +-func (s *initServer) Initialize(ctx context.Context, params *protocol.ParamInitialize) (*protocol.InitializeResult, error) { +- s.params = params +- return &protocol.InitializeResult{}, nil +-} +- +-func TestEnvForwarding(t *testing.T) { +- testenv.NeedsTool(t, "go") +- +- ctx := context.Background() +- +- server := &initServer{} +- _, tsForwarded, cleanup := setupForwarding(ctx, t, server) +- defer cleanup() +- +- conn := tsForwarded.Connect(ctx) +- conn.Go(ctx, jsonrpc2.MethodNotFound) +- dispatch := protocol.ServerDispatcher(conn) +- initParams := &protocol.ParamInitialize{} +- initParams.InitializationOptions = map[string]any{ +- "env": map[string]any{ +- "GONOPROXY": "example.com", +- }, +- } +- _, err := dispatch.Initialize(ctx, initParams) +- if err != nil { +- t.Fatal(err) +- } +- if server.params == nil { +- t.Fatalf("initialize params are unset") +- } +- env := server.params.InitializationOptions.(map[string]any)["env"].(map[string]any) +- +- // Check for an arbitrary Go variable. It should be set. +- if _, ok := env["GOPRIVATE"]; !ok { +- t.Errorf("Go environment variable GOPRIVATE unset in initialization options") +- } +- // Check that the variable present in our user config was not overwritten. +- if v := env["GONOPROXY"]; v != "example.com" { +- t.Errorf("GONOPROXY environment variable was overwritten") +- } +-} +- +-func TestListenParsing(t *testing.T) { +- tests := []struct { +- input, wantNetwork, wantAddr string +- }{ +- {"127.0.0.1:0", "tcp", "127.0.0.1:0"}, +- {"unix;/tmp/sock", "unix", "/tmp/sock"}, +- {"auto", "auto", ""}, +- {"auto;foo", "auto", "foo"}, +- } +- +- for _, test := range tests { +- gotNetwork, gotAddr := ParseAddr(test.input) +- if gotNetwork != test.wantNetwork { +- t.Errorf("network = %q, want %q", gotNetwork, test.wantNetwork) +- } +- if gotAddr != test.wantAddr { +- t.Errorf("addr = %q, want %q", gotAddr, test.wantAddr) +- } +- } +-} +- +-// For #59479, verify that empty slices are serialized as []. +-func TestEmptySlices(t *testing.T) { +- // The LSP would prefer that empty slices be sent as [] rather than null. +- const bad = `{"a":null}` +- const good = `{"a":[]}` +- var x struct { +- A []string `json:"a"` +- } +- buf, _ := json.Marshal(x) +- if string(buf) != bad { +- // uninitialized is ezpected to give null +- t.Errorf("unexpectedly got %s, want %s", buf, bad) +- } +- x.A = make([]string, 0) +- buf, _ = json.Marshal(x) +- if string(buf) != good { +- // expect [] +- t.Errorf("unexpectedly got %s, want %s", buf, good) +- } +- x.A = []string{} +- buf, _ = json.Marshal(x) +- if string(buf) != good { +- // expect [] +- t.Errorf("unexpectedly got %s, want %s", buf, good) +- } +-} +diff -urN a/gopls/internal/lsprpc/middleware_test.go b/gopls/internal/lsprpc/middleware_test.go +--- a/gopls/internal/lsprpc/middleware_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/lsprpc/middleware_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,223 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lsprpc_test +- +-import ( +- "context" +- "encoding/json" +- "errors" +- "fmt" +- "sync" +- "testing" +- "time" +- +- . "golang.org/x/tools/gopls/internal/lsprpc" +- "golang.org/x/tools/internal/event" +- jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" +-) +- +-var noopBinder = BinderFunc(func(context.Context, *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { +- return jsonrpc2_v2.ConnectionOptions{} +-}) +- +-func TestHandshakeMiddleware(t *testing.T) { +- sh := &Handshaker{ +- metadata: metadata{ +- "answer": 42, +- }, +- } +- ctx := context.Background() +- env := new(TestEnv) +- defer env.Shutdown(t) +- l, _ := env.serve(ctx, t, sh.Middleware(noopBinder)) +- conn := env.dial(ctx, t, l.Dialer(), noopBinder, false) +- ch := &Handshaker{ +- metadata: metadata{ +- "question": 6 * 9, +- }, +- } +- +- check := func(connected bool) error { +- clients := sh.Peers() +- servers := ch.Peers() +- want := 0 +- if connected { +- want = 1 +- } +- if got := len(clients); got != want { +- return fmt.Errorf("got %d clients on the server, want %d", got, want) +- } +- if got := len(servers); got != want { +- return fmt.Errorf("got %d servers on the client, want %d", got, want) +- } +- if !connected { +- return nil +- } +- client := clients[0] +- server := servers[0] +- if _, ok := client.Metadata["question"]; !ok { +- return errors.New("no client metadata") +- } +- if _, ok := server.Metadata["answer"]; !ok { +- return errors.New("no server metadata") +- } +- if client.LocalID != server.RemoteID { +- return fmt.Errorf("client.LocalID == %d, server.PeerID == %d", client.LocalID, server.RemoteID) +- } +- if client.RemoteID != server.LocalID { +- return fmt.Errorf("client.PeerID == %d, server.LocalID == %d", client.RemoteID, server.LocalID) +- } +- return nil +- } +- +- if err := check(false); err != nil { +- t.Fatalf("before handshake: %v", err) +- } +- ch.ClientHandshake(ctx, conn) +- if err := check(true); err != nil { +- t.Fatalf("after handshake: %v", err) +- } +- conn.Close() // ignore error +- // Wait for up to ~2s for connections to get cleaned up. +- delay := 25 * time.Millisecond +- for retries := 3; retries >= 0; retries-- { +- time.Sleep(delay) +- err := check(false) +- if err == nil { +- return +- } +- if retries == 0 { +- t.Fatalf("after closing connection: %v", err) +- } +- delay *= 4 +- } +-} +- +-// Handshaker handles both server and client handshaking over jsonrpc2 v2. +-// To instrument server-side handshaking, use Handshaker.Middleware. +-// To instrument client-side handshaking, call +-// Handshaker.ClientHandshake for any new client-side connections. +-type Handshaker struct { +- // metadata will be shared with peers via handshaking. +- metadata metadata +- +- mu sync.Mutex +- prevID int64 +- peers map[int64]PeerInfo +-} +- +-// metadata holds arbitrary data transferred between jsonrpc2 peers. +-type metadata map[string]any +- +-// PeerInfo holds information about a peering between jsonrpc2 servers. +-type PeerInfo struct { +- // RemoteID is the identity of the current server on its peer. +- RemoteID int64 +- +- // LocalID is the identity of the peer on the server. +- LocalID int64 +- +- // IsClient reports whether the peer is a client. If false, the peer is a +- // server. +- IsClient bool +- +- // Metadata holds arbitrary information provided by the peer. +- Metadata metadata +-} +- +-// Peers returns the peer info this handshaker knows about by way of either the +-// server-side handshake middleware, or client-side handshakes. +-func (h *Handshaker) Peers() []PeerInfo { +- h.mu.Lock() +- defer h.mu.Unlock() +- +- var c []PeerInfo +- for _, v := range h.peers { +- c = append(c, v) +- } +- return c +-} +- +-// Middleware is a jsonrpc2 middleware function to augment connection binding +-// to handle the handshake method, and record disconnections. +-func (h *Handshaker) Middleware(inner jsonrpc2_v2.Binder) jsonrpc2_v2.Binder { +- return BinderFunc(func(ctx context.Context, conn *jsonrpc2_v2.Connection) jsonrpc2_v2.ConnectionOptions { +- opts := inner.Bind(ctx, conn) +- +- localID := h.nextID() +- info := &PeerInfo{ +- RemoteID: localID, +- Metadata: h.metadata, +- } +- +- // Wrap the delegated handler to accept the handshake. +- delegate := opts.Handler +- opts.Handler = jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { +- if req.Method == HandshakeMethod { +- var peerInfo PeerInfo +- if err := json.Unmarshal(req.Params, &peerInfo); err != nil { +- return nil, fmt.Errorf("%w: unmarshaling client info: %v", jsonrpc2_v2.ErrInvalidParams, err) +- } +- peerInfo.LocalID = localID +- peerInfo.IsClient = true +- h.recordPeer(peerInfo) +- return info, nil +- } +- return delegate.Handle(ctx, req) +- }) +- +- // Record the dropped client. +- go h.cleanupAtDisconnect(conn, localID) +- +- return opts +- }) +-} +- +-// ClientHandshake performs a client-side handshake with the server at the +-// other end of conn, recording the server's peer info and watching for conn's +-// disconnection. +-func (h *Handshaker) ClientHandshake(ctx context.Context, conn *jsonrpc2_v2.Connection) { +- localID := h.nextID() +- info := &PeerInfo{ +- RemoteID: localID, +- Metadata: h.metadata, +- } +- +- call := conn.Call(ctx, HandshakeMethod, info) +- var serverInfo PeerInfo +- if err := call.Await(ctx, &serverInfo); err != nil { +- event.Error(ctx, "performing handshake", err) +- return +- } +- serverInfo.LocalID = localID +- h.recordPeer(serverInfo) +- +- go h.cleanupAtDisconnect(conn, localID) +-} +- +-func (h *Handshaker) nextID() int64 { +- h.mu.Lock() +- defer h.mu.Unlock() +- +- h.prevID++ +- return h.prevID +-} +- +-func (h *Handshaker) cleanupAtDisconnect(conn *jsonrpc2_v2.Connection, peerID int64) { +- conn.Wait() // ignore error +- +- h.mu.Lock() +- defer h.mu.Unlock() +- delete(h.peers, peerID) +-} +- +-func (h *Handshaker) recordPeer(info PeerInfo) { +- h.mu.Lock() +- defer h.mu.Unlock() +- if h.peers == nil { +- h.peers = make(map[int64]PeerInfo) +- } +- h.peers[info.LocalID] = info +-} +diff -urN a/gopls/internal/mcp/context.go b/gopls/internal/mcp/context.go +--- a/gopls/internal/mcp/context.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/context.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,410 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mcp +- +-// This file defines the "context" operation, which returns a summary of the +-// specified package. +- +-import ( +- "bytes" +- "context" +- "fmt" +- "go/ast" +- "go/token" +- "slices" +- "strings" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/analysisinternal" +- "golang.org/x/tools/internal/astutil" +-) +- +-type ContextParams struct { +- File string `json:"file" jsonschema:"the absolute path to the file"` +-} +- +-func (h *handler) contextHandler(ctx context.Context, req *mcp.CallToolRequest, params ContextParams) (*mcp.CallToolResult, any, error) { +- countGoContextMCP.Inc() +- fh, snapshot, release, err := h.fileOf(ctx, params.File) +- if err != nil { +- return nil, nil, err +- } +- defer release() +- +- // TODO(hxjiang): support context for GoMod. +- if snapshot.FileKind(fh) != file.Go { +- return nil, nil, fmt.Errorf("can't provide context for non-Go file") +- } +- +- pkg, pgf, err := golang.NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, nil, err +- } +- +- var result strings.Builder +- +- fmt.Fprintf(&result, "Current package %q (package %s):\n\n", pkg.Metadata().PkgPath, pkg.Metadata().Name) +- // Write context of the current file. +- { +- fmt.Fprintf(&result, "%s (current file):\n", pgf.URI.Base()) +- result.WriteString("```go\n") +- if err := writeFileSummary(ctx, snapshot, pgf.URI, &result, false, nil); err != nil { +- return nil, nil, err +- } +- result.WriteString("```\n\n") +- } +- +- // Write context of the rest of the files in the current package. +- { +- for _, file := range pkg.CompiledGoFiles() { +- if file.URI == pgf.URI { +- continue +- } +- +- fmt.Fprintf(&result, "%s:\n", file.URI.Base()) +- result.WriteString("```go\n") +- if err := writeFileSummary(ctx, snapshot, file.URI, &result, false, nil); err != nil { +- return nil, nil, err +- } +- result.WriteString("```\n\n") +- } +- } +- +- // Write dependencies context of current file. +- if len(pgf.File.Imports) > 0 { +- // Write import decls of the current file. +- { +- fmt.Fprintf(&result, "Current file %q contains this import declaration:\n", pgf.URI.Base()) +- result.WriteString("```go\n") +- // Add all import decl to output including all floating comment by +- // using GenDecl's start and end position. +- for _, decl := range pgf.File.Decls { +- genDecl, ok := decl.(*ast.GenDecl) +- if !ok || genDecl.Tok != token.IMPORT { +- continue +- } +- +- text, err := pgf.NodeText(genDecl) +- if err != nil { +- return nil, nil, err +- } +- +- result.Write(text) +- result.WriteString("\n") +- } +- result.WriteString("```\n\n") +- } +- +- var toSummarize []*ast.ImportSpec +- for _, spec := range pgf.File.Imports { +- // Skip the standard library to reduce token usage, operating on +- // the assumption that the LLM is already familiar with its +- // symbols and documentation. +- if analysisinternal.IsStdPackage(spec.Path.Value) { +- continue +- } +- toSummarize = append(toSummarize, spec) +- } +- +- // Write summaries from imported packages. +- if len(toSummarize) > 0 { +- result.WriteString("The imported packages declare the following symbols:\n\n") +- for _, spec := range toSummarize { +- path := metadata.UnquoteImportPath(spec) +- id := pkg.Metadata().DepsByImpPath[path] +- if id == "" { +- continue // ignore error +- } +- md := snapshot.Metadata(id) +- if md == nil { +- continue // ignore error +- } +- if summary := summarizePackage(ctx, snapshot, md); summary != "" { +- result.WriteString(summary) +- } +- } +- } +- } +- +- return textResult(result.String()), nil, nil +-} +- +-func summarizePackage(ctx context.Context, snapshot *cache.Snapshot, md *metadata.Package) string { +- var buf strings.Builder +- fmt.Fprintf(&buf, "%q (package %s)\n", md.PkgPath, md.Name) +- for _, f := range md.CompiledGoFiles { +- fmt.Fprintf(&buf, "%s:\n", f.Base()) +- buf.WriteString("```go\n") +- if err := writeFileSummary(ctx, snapshot, f, &buf, true, nil); err != nil { +- return "" // ignore error +- } +- buf.WriteString("```\n\n") +- } +- return buf.String() +-} +- +-// writeFileSummary writes the file summary to the string builder based on +-// the input file URI. +-func writeFileSummary(ctx context.Context, snapshot *cache.Snapshot, f protocol.DocumentURI, out *strings.Builder, onlyExported bool, declsToSummarize map[string]bool) error { +- fh, err := snapshot.ReadFile(ctx, f) +- if err != nil { +- return err +- } +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return err +- } +- +- // If we're summarizing specific declarations, we don't need to copy the header. +- if declsToSummarize == nil { +- // Copy everything before the first non-import declaration: +- // package decl, imports decl(s), and all comments (excluding copyright). +- { +- endPos := pgf.File.FileEnd +- +- outerloop: +- for _, decl := range pgf.File.Decls { +- switch decl := decl.(type) { +- case *ast.FuncDecl: +- if decl.Doc != nil { +- endPos = decl.Doc.Pos() +- } else { +- endPos = decl.Pos() +- } +- break outerloop +- case *ast.GenDecl: +- if decl.Tok == token.IMPORT { +- continue +- } +- if decl.Doc != nil { +- endPos = decl.Doc.Pos() +- } else { +- endPos = decl.Pos() +- } +- break outerloop +- } +- } +- +- startPos := pgf.File.FileStart +- if copyright := golang.CopyrightComment(pgf.File); copyright != nil { +- startPos = copyright.End() +- } +- +- text, err := pgf.PosText(startPos, endPos) +- if err != nil { +- return err +- } +- +- out.Write(bytes.TrimSpace(text)) +- out.WriteString("\n\n") +- } +- } +- +- // Write func decl and gen decl. +- for _, decl := range pgf.File.Decls { +- switch decl := decl.(type) { +- case *ast.FuncDecl: +- if declsToSummarize != nil { +- if _, ok := declsToSummarize[decl.Name.Name]; !ok { +- continue +- } +- } +- if onlyExported { +- if !decl.Name.IsExported() { +- continue +- } +- +- if decl.Recv != nil && len(decl.Recv.List) > 0 { +- _, rname, _ := astutil.UnpackRecv(decl.Recv.List[0].Type) +- if !rname.IsExported() { +- continue +- } +- } +- } +- +- // Write doc comment and func signature. +- startPos := decl.Pos() +- if decl.Doc != nil { +- startPos = decl.Doc.Pos() +- } +- +- text, err := pgf.PosText(startPos, decl.Type.End()) +- if err != nil { +- return err +- } +- +- out.Write(text) +- out.WriteString("\n\n") +- +- case *ast.GenDecl: +- if decl.Tok == token.IMPORT { +- continue +- } +- +- // If we are summarizing specific decls, check if any of them are in this GenDecl. +- if declsToSummarize != nil { +- found := false +- for _, spec := range decl.Specs { +- switch spec := spec.(type) { +- case *ast.TypeSpec: +- if _, ok := declsToSummarize[spec.Name.Name]; ok { +- found = true +- } +- case *ast.ValueSpec: +- for _, name := range spec.Names { +- if _, ok := declsToSummarize[name.Name]; ok { +- found = true +- } +- } +- } +- } +- if !found { +- continue +- } +- } +- +- // Dump the entire GenDecl (exported or unexported) +- // including doc comment without any filtering to the output. +- if !onlyExported { +- startPos := decl.Pos() +- if decl.Doc != nil { +- startPos = decl.Doc.Pos() +- } +- text, err := pgf.PosText(startPos, decl.End()) +- if err != nil { +- return err +- } +- +- out.Write(text) +- out.WriteString("\n") +- continue +- } +- +- // Write only the GenDecl with exported identifier to the output. +- var buf bytes.Buffer +- if decl.Doc != nil { +- text, err := pgf.NodeText(decl.Doc) +- if err != nil { +- return err +- } +- buf.Write(text) +- buf.WriteString("\n") +- } +- +- buf.WriteString(decl.Tok.String() + " ") +- if decl.Lparen.IsValid() { +- buf.WriteString("(\n") +- } +- +- var anyExported bool +- for _, spec := range decl.Specs { +- // Captures the full byte range of the spec, including +- // its associated doc comments and line comments. +- // This range also covers any floating comments as these +- // can be valuable for context. Like +- // ``` +- // type foo struct { // floating comment. +- // // floating comment. +- // +- // x int +- // } +- // ``` +- var startPos, endPos token.Pos +- +- switch spec := spec.(type) { +- case *ast.TypeSpec: +- if declsToSummarize != nil { +- if _, ok := declsToSummarize[spec.Name.Name]; !ok { +- continue +- } +- } +- // TODO(hxjiang): only keep the exported field of +- // struct spec and exported method of interface spec. +- if !spec.Name.IsExported() { +- continue +- } +- anyExported = true +- +- // Include preceding doc comment, if any. +- if spec.Doc == nil { +- startPos = spec.Pos() +- } else { +- startPos = spec.Doc.Pos() +- } +- +- // Include trailing line comment, if any. +- if spec.Comment == nil { +- endPos = spec.End() +- } else { +- endPos = spec.Comment.End() +- } +- +- case *ast.ValueSpec: +- if declsToSummarize != nil { +- found := false +- for _, name := range spec.Names { +- if _, ok := declsToSummarize[name.Name]; ok { +- found = true +- } +- } +- if !found { +- continue +- } +- } +- // TODO(hxjiang): only keep the exported identifier. +- if !slices.ContainsFunc(spec.Names, (*ast.Ident).IsExported) { +- continue +- } +- anyExported = true +- +- if spec.Doc == nil { +- startPos = spec.Pos() +- } else { +- startPos = spec.Doc.Pos() +- } +- +- if spec.Comment == nil { +- endPos = spec.End() +- } else { +- endPos = spec.Comment.End() +- } +- } +- +- indent, err := pgf.Indentation(startPos) +- if err != nil { +- return err +- } +- +- buf.WriteString(indent) +- +- text, err := pgf.PosText(startPos, endPos) +- if err != nil { +- return err +- } +- +- buf.Write(text) +- buf.WriteString("\n") +- } +- +- if decl.Lparen.IsValid() { +- buf.WriteString(")\n") +- } +- +- // Only write the summary of the genDecl if there is +- // any exported spec. +- if anyExported { +- out.Write(buf.Bytes()) +- out.WriteString("\n") +- } +- } +- } +- return nil +-} +diff -urN a/gopls/internal/mcp/counters.go b/gopls/internal/mcp/counters.go +--- a/gopls/internal/mcp/counters.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/counters.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,23 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mcp +- +-import "golang.org/x/telemetry/counter" +- +-// Proposed counters for evaluating usage of Go MCP Server tools. These counters +-// increment when a user utilizes a specific Go MCP tool. +-var ( +- countGoContextMCP = counter.New("gopls/mcp-tool:go_context") +- countGoDiagnosticsMCP = counter.New("gopls/mcp-tool:go_diagnostics") +- countGoFileContextMCP = counter.New("gopls/mcp-tool:go_file_context") +- countGoFileDiagnosticsMCP = counter.New("gopls/mcp-tool:go_file_diagnostics") +- countGoFileMetadataMCP = counter.New("gopls/mcp-tool:go_file_metadata") +- countGoPackageAPIMCP = counter.New("gopls/mcp-tool:go_package_api") +- countGoReferencesMCP = counter.New("gopls/mcp-tool:go_references") +- countGoSearchMCP = counter.New("gopls/mcp-tool:go_search") +- countGoSymbolReferencesMCP = counter.New("gopls/mcp-tool:go_symbol_references") +- countGoWorkspaceMCP = counter.New("gopls/mcp-tool:go_workspace") +- countGoVulncheckMCP = counter.New("gopls/mcp-tool:go_vulncheck") +-) +diff -urN a/gopls/internal/mcp/file_context.go b/gopls/internal/mcp/file_context.go +--- a/gopls/internal/mcp/file_context.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/file_context.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,94 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mcp +- +-import ( +- "context" +- "fmt" +- "go/ast" +- "go/types" +- "strings" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-type fileContextParams struct { +- File string `json:"file" jsonschema:"the absolute path to the file"` +-} +- +-func (h *handler) fileContextHandler(ctx context.Context, req *mcp.CallToolRequest, params fileContextParams) (*mcp.CallToolResult, any, error) { +- countGoFileContextMCP.Inc() +- fh, snapshot, release, err := h.fileOf(ctx, params.File) +- if err != nil { +- return nil, nil, err +- } +- defer release() +- +- pkg, pgf, err := golang.NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, nil, err +- } +- +- info := pkg.TypesInfo() +- if info == nil { +- return nil, nil, fmt.Errorf("no types info for package %q", pkg.Metadata().PkgPath) +- } +- +- // Group objects defined in other files by file URI. +- otherFiles := make(map[protocol.DocumentURI]map[string]bool) +- addObj := func(obj types.Object) { +- if obj == nil { +- return +- } +- pos := obj.Pos() +- if !pos.IsValid() { +- return +- } +- objFile := pkg.FileSet().File(pos) +- if objFile == nil { +- return +- } +- uri := protocol.URIFromPath(objFile.Name()) +- if uri == fh.URI() { +- return +- } +- if _, ok := otherFiles[uri]; !ok { +- otherFiles[uri] = make(map[string]bool) +- } +- otherFiles[uri][obj.Name()] = true +- } +- +- for cur := range pgf.Cursor.Preorder((*ast.Ident)(nil)) { +- id := cur.Node().(*ast.Ident) +- addObj(info.Uses[id]) +- addObj(info.Defs[id]) +- } +- +- var result strings.Builder +- fmt.Fprintf(&result, "File `%s` is in package %q.\n", params.File, pkg.Metadata().PkgPath) +- fmt.Fprintf(&result, "Below is a summary of the APIs it uses from other files.\n") +- fmt.Fprintf(&result, "To read the full API of any package, use go_package_api.\n") +- for uri, decls := range otherFiles { +- pkgPath := "UNKNOWN" +- md, err := snapshot.NarrowestMetadataForFile(ctx, uri) +- if err != nil { +- if ctx.Err() != nil { +- return nil, nil, ctx.Err() +- } +- } else { +- pkgPath = string(md.PkgPath) +- } +- fmt.Fprintf(&result, "Referenced declarations from %s (package %q):\n", uri.Path(), pkgPath) +- result.WriteString("```go\n") +- if err := writeFileSummary(ctx, snapshot, uri, &result, false, decls); err != nil { +- return nil, nil, err +- } +- result.WriteString("```\n\n") +- } +- +- return textResult(result.String()), nil, nil +-} +diff -urN a/gopls/internal/mcp/file_diagnostics.go b/gopls/internal/mcp/file_diagnostics.go +--- a/gopls/internal/mcp/file_diagnostics.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/file_diagnostics.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,203 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mcp +- +-// This file defines the "diagnostics" operation, which is responsible for +-// returning diagnostics for the input file. +- +-import ( +- "bytes" +- "context" +- "fmt" +- "io" +- "path/filepath" +- "slices" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/diff" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +-) +- +-type diagnosticsParams struct { +- File string `json:"file" jsonschema:"the absolute path to the file to diagnose"` +-} +- +-func (h *handler) fileDiagnosticsHandler(ctx context.Context, req *mcp.CallToolRequest, params diagnosticsParams) (*mcp.CallToolResult, any, error) { +- countGoFileDiagnosticsMCP.Inc() +- fh, snapshot, release, err := h.fileOf(ctx, params.File) +- if err != nil { +- return nil, nil, err +- } +- defer release() +- +- diagnostics, fixes, err := h.diagnoseFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, nil, err +- } +- +- var builder strings.Builder +- if len(diagnostics) == 0 { +- return textResult("No diagnostics"), nil, nil +- } +- +- if err := summarizeDiagnostics(ctx, snapshot, &builder, diagnostics, fixes); err != nil { +- return nil, nil, err +- } +- +- return textResult(builder.String()), nil, nil +-} +- +-// diagnoseFile diagnoses a single file, including go/analysis and quick fixes. +-func (h *handler) diagnoseFile(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) ([]*cache.Diagnostic, map[*cache.Diagnostic]*protocol.CodeAction, error) { +- diagnostics, err := golang.DiagnoseFile(ctx, snapshot, uri) +- if err != nil { +- return nil, nil, err +- } +- if len(diagnostics) == 0 { +- return nil, nil, nil +- } +- +- // LSP [protocol.Diagnostic]s do not carry code edits directly. +- // Instead, gopls provides associated [protocol.CodeAction]s with their +- // diagnostics field populated. +- // Ignore errors. It is still valuable to provide only the diagnostic +- // without any text edits. +- // TODO(hxjiang): support code actions that returns call back command. +- actions, _ := h.lspServer.CodeAction(ctx, &protocol.CodeActionParams{ +- TextDocument: protocol.TextDocumentIdentifier{ +- URI: uri, +- }, +- Context: protocol.CodeActionContext{ +- Only: []protocol.CodeActionKind{protocol.QuickFix}, +- Diagnostics: cache.ToProtocolDiagnostics(diagnostics...), +- }, +- }) +- +- type key struct { +- Message string +- Range protocol.Range +- } +- +- actionMap := make(map[key]*protocol.CodeAction) +- for _, action := range actions { +- for _, d := range action.Diagnostics { +- k := key{d.Message, d.Range} +- if alt, ok := actionMap[k]; !ok || !alt.IsPreferred && action.IsPreferred { +- actionMap[k] = &action +- } +- } +- } +- +- fixes := make(map[*cache.Diagnostic]*protocol.CodeAction) +- for _, d := range diagnostics { +- if fix, ok := actionMap[key{d.Message, d.Range}]; ok { +- fixes[d] = fix +- } +- } +- return diagnostics, fixes, nil +-} +- +-func summarizeDiagnostics(ctx context.Context, snapshot *cache.Snapshot, w io.Writer, diagnostics []*cache.Diagnostic, fixes map[*cache.Diagnostic]*protocol.CodeAction) error { +- for _, d := range diagnostics { +- fmt.Fprintf(w, "%d:%d-%d:%d: [%s] %s\n", d.Range.Start.Line, d.Range.Start.Character, d.Range.End.Line, d.Range.End.Character, d.Severity, d.Message) +- +- fix, ok := fixes[d] +- if ok { +- diff, err := toUnifiedDiff(ctx, snapshot, fix.Edit.DocumentChanges) +- if err != nil { +- return err +- } +- +- fmt.Fprintf(w, "Fix:\n%s\n", diff) +- } +- } +- return nil +-} +- +-// toUnifiedDiff converts each [protocol.DocumentChange] into a separate +-// unified diff. +-// All returned diffs use forward slash ('/') as the file path separator for +-// consistency, regardless of the original system's separator. +-// Multiple changes targeting the same file are not consolidated. +-// TODO(hxjiang): consolidate diffs to the same file. +-func toUnifiedDiff(ctx context.Context, snapshot *cache.Snapshot, changes []protocol.DocumentChange) (string, error) { +- var res strings.Builder +- for _, change := range changes { +- switch { +- case change.CreateFile != nil: +- res.WriteString(diff.Unified("/dev/null", filepath.ToSlash(change.CreateFile.URI.Path()), "", "")) +- case change.DeleteFile != nil: +- fh, err := snapshot.ReadFile(ctx, change.DeleteFile.URI) +- if err != nil { +- return "", err +- } +- content, err := fh.Content() +- if err != nil { +- return "", err +- } +- res.WriteString(diff.Unified(filepath.ToSlash(change.DeleteFile.URI.Path()), "/dev/null", string(content), "")) +- case change.RenameFile != nil: +- fh, err := snapshot.ReadFile(ctx, change.RenameFile.OldURI) +- if err != nil { +- return "", err +- } +- content, err := fh.Content() +- if err != nil { +- return "", err +- } +- res.WriteString(diff.Unified(filepath.ToSlash(change.RenameFile.OldURI.Path()), filepath.ToSlash(change.RenameFile.NewURI.Path()), string(content), string(content))) +- case change.TextDocumentEdit != nil: +- // Assumes gopls never return AnnotatedTextEdit. +- sorted := protocol.AsTextEdits(change.TextDocumentEdit.Edits) +- +- // As stated by the LSP, text edits ranges must never overlap. +- // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textEditArray +- slices.SortFunc(sorted, func(a, b protocol.TextEdit) int { +- if a.Range.Start.Line != b.Range.Start.Line { +- return int(a.Range.Start.Line) - int(b.Range.Start.Line) +- } +- return int(a.Range.Start.Character) - int(b.Range.Start.Character) +- }) +- +- fh, err := snapshot.ReadFile(ctx, change.TextDocumentEdit.TextDocument.URI) +- if err != nil { +- return "", err +- } +- content, err := fh.Content() +- if err != nil { +- return "", err +- } +- +- var newSrc bytes.Buffer +- { +- mapper := protocol.NewMapper(fh.URI(), content) +- +- start := 0 +- for _, edit := range sorted { +- l, r, err := mapper.RangeOffsets(edit.Range) +- if err != nil { +- return "", err +- } +- +- newSrc.Write(content[start:l]) +- newSrc.WriteString(edit.NewText) +- +- start = r +- } +- newSrc.Write(content[start:]) +- } +- +- res.WriteString(diff.Unified(filepath.ToSlash(fh.URI().Path()), filepath.ToSlash(fh.URI().Path()), string(content), newSrc.String())) +- default: +- continue // this shouldn't happen +- } +- res.WriteString("\n") +- } +- return res.String(), nil +-} +diff -urN a/gopls/internal/mcp/file_metadata.go b/gopls/internal/mcp/file_metadata.go +--- a/gopls/internal/mcp/file_metadata.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/file_metadata.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mcp +- +-import ( +- "context" +- "fmt" +- "strings" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +-) +- +-type fileMetadataParams struct { +- File string `json:"file" jsonschema:"the absolute path to the file to describe"` +-} +- +-func (h *handler) fileMetadataHandler(ctx context.Context, req *mcp.CallToolRequest, params fileMetadataParams) (*mcp.CallToolResult, any, error) { +- countGoFileMetadataMCP.Inc() +- fh, snapshot, release, err := h.fileOf(ctx, params.File) +- if err != nil { +- return nil, nil, err +- } +- defer release() +- +- md, err := snapshot.NarrowestMetadataForFile(ctx, fh.URI()) +- if err != nil { +- return nil, nil, err +- } +- +- var b strings.Builder +- addf := func(format string, args ...any) { +- fmt.Fprintf(&b, format, args...) +- } +- addf("File `%s` is in package %q, which has the following files:\n", params.File, md.PkgPath) +- for _, f := range md.CompiledGoFiles { +- addf("\t%s\n", f.Path()) +- } +- return textResult(b.String()), nil, nil +-} +diff -urN a/gopls/internal/mcp/instructions.md b/gopls/internal/mcp/instructions.md +--- a/gopls/internal/mcp/instructions.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/instructions.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,49 +0,0 @@ +-# The gopls MCP server +- +-These instructions describe how to efficiently work in the Go programming language using the gopls MCP server. You can load this file directly into a session where the gopls MCP server is connected. +- +-## Detecting a Go workspace +- +-At the start of every session, you MUST use the `go_workspace` tool to learn about the Go workspace. ONLY if you are in a Go workspace, you MUST run `go_vulncheck` immediately afterwards to identify any existing security risks. The rest of these instructions apply whenever that tool indicates that the user is in a Go workspace. +- +-## Go programming workflows +- +-These guidelines MUST be followed whenever working in a Go workspace. There are two workflows described below: the 'Read Workflow' must be followed when the user asks a question about a Go workspace. The 'Edit Workflow' must be followed when the user edits a Go workspace. +- +-You may re-do parts of each workflow as necessary to recover from errors. However, you must not skip any steps. +- +-### Read workflow +- +-The goal of the read workflow is to understand the codebase. +- +-1. **Understand the workspace layout**: Start by using `go_workspace` to understand the overall structure of the workspace, such as whether it's a module, a workspace, or a GOPATH project. +- +-2. **Find relevant symbols**: If you're looking for a specific type, function, or variable, use `go_search`. This is a fuzzy search that will help you locate symbols even if you don't know the exact name or location. +- EXAMPLE: search for the 'Server' type: `go_search({"query":"server"})` +- +-3. **Understand a file and its intra-package dependencies**: When you have a file path and want to understand its contents and how it connects to other files *in the same package*, use `go_file_context`. This tool will show you a summary of the declarations from other files in the same package that are used by the current file. `go_file_context` MUST be used immediately after reading any Go file for the first time, and MAY be re-used if dependencies have changed. +- EXAMPLE: to understand `server.go`'s dependencies on other files in its package: `go_file_context({"file":"/path/to/server.go"})` +- +-4. **Understand a package's public API**: When you need to understand what a package provides to external code (i.e., its public API), use `go_package_api`. This is especially useful for understanding third-party dependencies or other packages in the same monorepo. +- EXAMPLE: to see the API of the `storage` package: `go_package_api({"packagePaths":["example.com/internal/storage"]})` +- +-### Editing workflow +- +-The editing workflow is iterative. You should cycle through these steps until the task is complete. +- +-1. **Read first**: Before making any edits, follow the Read Workflow to understand the user's request and the relevant code. +- +-2. **Find references**: Before modifying the definition of any symbol, use the `go_symbol_references` tool to find all references to that identifier. This is critical for understanding the impact of your change. Read the files containing references to evaluate if any further edits are required. +- EXAMPLE: `go_symbol_references({"file":"/path/to/server.go","symbol":"Server.Run"})` +- +-3. **Make edits**: Make the required edits, including edits to references you identified in the previous step. Don't proceed to the next step until all planned edits are complete. +- +-4. **Check for errors**: After every code modification, you MUST call the `go_diagnostics` tool. Pass the paths of the files you have edited. This tool will report any build or analysis errors. +- EXAMPLE: `go_diagnostics({"files":["/path/to/server.go"]})` +- +-5. **Fix errors**: If `go_diagnostics` reports any errors, fix them. The tool may provide suggested quick fixes in the form of diffs. You should review these diffs and apply them if they are correct. Once you've applied a fix, re-run `go_diagnostics` to confirm that the issue is resolved. It is OK to ignore 'hint' or 'info' diagnostics if they are not relevant to the current task. Note that Go diagnostic messages may contain a summary of the source code, which may not match its exact text. +- +-6. **Check for vulnerabilities**: If your edits involved adding or updating dependencies in the go.mod file, you MUST run a vulnerability check on the entire workspace. This ensures that the new dependencies do not introduce any security risks. This step should be performed after all build errors are resolved. EXAMPLE: `go_vulncheck({"pattern":"./..."})` +- +-7. **Run tests**: Once `go_diagnostics` reports no errors (and ONLY once there are no errors), run the tests for the packages you have changed. You can do this with `go test [packagePath...]`. Don't run `go test ./...` unless the user explicitly requests it, as doing so may slow down the iteration loop. +- +diff -urN a/gopls/internal/mcp/mcp.go b/gopls/internal/mcp/mcp.go +--- a/gopls/internal/mcp/mcp.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/mcp.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,405 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mcp +- +-import ( +- "context" +- _ "embed" +- "fmt" +- "io" +- "log" +- "net" +- "net/http" +- "os" +- "sync" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/moremaps" +-) +- +-//go:embed instructions.md +-var Instructions string +- +-// A handler implements various MCP tools for an LSP session. +-type handler struct { +- session *cache.Session +- lspServer protocol.Server +-} +- +-// Sessions is the interface used to access gopls sessions. +-type Sessions interface { +- Session(id string) (*cache.Session, protocol.Server) +- FirstSession() (*cache.Session, protocol.Server) +- SetSessionExitFunc(func(string)) +-} +- +-// Serve starts an MCP server serving at the input address. +-// The server receives LSP session events on the specified channel, which the +-// caller is responsible for closing. The server runs until the context is +-// canceled. +-func Serve(ctx context.Context, address string, sessions Sessions, isDaemon bool) error { +- log.Printf("Gopls MCP server: starting up on http") +- listener, err := net.Listen("tcp", address) +- if err != nil { +- return err +- } +- defer listener.Close() +- +- // TODO(hxjiang): expose the MCP server address to the LSP client. +- if isDaemon { +- log.Printf("Gopls MCP daemon: listening on address %s...", listener.Addr()) +- } +- defer log.Printf("Gopls MCP server: exiting") +- +- svr := http.Server{ +- Handler: HTTPHandler(sessions, isDaemon), +- BaseContext: func(net.Listener) context.Context { +- return ctx +- }, +- } +- +- // Run the server until cancellation. +- go func() { +- <-ctx.Done() +- svr.Close() // ignore error +- }() +- log.Printf("mcp http server listening") +- return svr.Serve(listener) +-} +- +-// StartStdIO starts an MCP server over stdio. +-func StartStdIO(ctx context.Context, session *cache.Session, server protocol.Server, rpcLog io.Writer) error { +- s := newServer(session, server) +- if rpcLog != nil { +- return s.Run(ctx, &mcp.LoggingTransport{ +- Transport: &mcp.StdioTransport{}, +- Writer: rpcLog, +- }) +- } else { +- return s.Run(ctx, &mcp.StdioTransport{}) +- } +- +-} +- +-func HTTPHandler(sessions Sessions, isDaemon bool) http.Handler { +- var ( +- mu sync.Mutex // lock for mcpHandlers. +- mcpHandlers = make(map[string]*mcp.SSEHandler) // map from lsp session ids to MCP sse handlers. +- ) +- mux := http.NewServeMux() +- +- // In daemon mode, gopls serves mcp server at ADDRESS/sessions/$SESSIONID. +- // Otherwise, gopls serves mcp server at ADDRESS. +- if isDaemon { +- mux.HandleFunc("/sessions/{id}", func(w http.ResponseWriter, r *http.Request) { +- sessionID := r.PathValue("id") +- +- mu.Lock() +- handler, ok := mcpHandlers[sessionID] +- if !ok { +- if s, svr := sessions.Session(sessionID); s != nil { +- handler = mcp.NewSSEHandler(func(request *http.Request) *mcp.Server { +- return newServer(s, svr) +- }, nil) +- mcpHandlers[sessionID] = handler +- } +- } +- mu.Unlock() +- +- if handler == nil { +- http.Error(w, fmt.Sprintf("session %s not established", sessionID), http.StatusNotFound) +- return +- } +- +- handler.ServeHTTP(w, r) +- }) +- } else { +- // TODO(hxjiang): should gopls serve only at a specific path? +- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { +- mu.Lock() +- // When not in daemon mode, gopls has at most one LSP session. +- _, handler, ok := moremaps.Arbitrary(mcpHandlers) +- if !ok { +- s, svr := sessions.FirstSession() +- handler = mcp.NewSSEHandler(func(request *http.Request) *mcp.Server { +- return newServer(s, svr) +- }, nil) +- mcpHandlers[s.ID()] = handler +- } +- mu.Unlock() +- +- if handler == nil { +- http.Error(w, "session not established", http.StatusNotFound) +- return +- } +- +- handler.ServeHTTP(w, r) +- }) +- } +- sessions.SetSessionExitFunc(func(sessionID string) { +- mu.Lock() +- defer mu.Unlock() +- // TODO(rfindley): add a way to close SSE handlers (and therefore +- // close their transports). Otherwise, we leak JSON-RPC goroutines. +- delete(mcpHandlers, sessionID) +- }) +- return mux +-} +- +-func newServer(session *cache.Session, lspServer protocol.Server) *mcp.Server { +- h := handler{ +- session: session, +- lspServer: lspServer, +- } +- opts := &mcp.ServerOptions{ +- Instructions: Instructions, +- } +- mcpServer := mcp.NewServer(&mcp.Implementation{Name: "gopls", Version: "v1.0.0"}, opts) +- +- defaultTools := []string{ +- "go_workspace", +- "go_package_api", +- "go_diagnostics", +- "go_symbol_references", +- "go_search", +- "go_file_context", +- "go_vulncheck"} +- disabledTools := append(defaultTools, +- // The fileMetadata tool is redundant with fileContext. +- []string{"go_file_metadata", +- // The context tool returns context for all imports, which can consume a +- // lot of tokens. Conservatively, rely on the model selecting the imports +- // to summarize using the outline tool. +- "go_context", +- // The fileDiagnosticsTool only returns diagnostics for the current file, +- // but often changes will cause breakages in other tools. The +- // workspaceDiagnosticsTool always returns breakages, and supports running +- // deeper diagnostics in selected files. +- "go_file_diagnostics", +- // The references tool requires a location, which models tend to get wrong. +- // The symbolic variant seems to be easier to get right, albeit less +- // powerful. +- "go_references", +- }...) +- var toolConfig map[string]bool // non-default settings +- // For testing, poke through to the gopls server to access its options, +- // and enable some of the disabled tools. +- if hasOpts, ok := lspServer.(interface{ Options() *settings.Options }); ok { +- toolConfig = hasOpts.Options().MCPTools +- } +- var tools []string +- for _, tool := range defaultTools { +- if enabled, ok := toolConfig[tool]; !ok || enabled { +- tools = append(tools, tool) +- } +- } +- // Disabled tools must be explicitly enabled. +- for _, tool := range disabledTools { +- if toolConfig[tool] { +- tools = append(tools, tool) +- } +- } +- for _, tool := range tools { +- addToolByName(mcpServer, h, tool) +- } +- return mcpServer +-} +- +-func addToolByName(mcpServer *mcp.Server, h handler, name string) { +- switch name { +- case "go_context": +- mcp.AddTool(mcpServer, &mcp.Tool{ +- Name: "go_context", +- Description: "Provide context for a region within a Go file", +- }, h.contextHandler) +- case "go_diagnostics": +- mcp.AddTool(mcpServer, &mcp.Tool{ +- Name: "go_diagnostics", +- Description: `Provides Go workspace diagnostics. +- +-Checks for parse and build errors across the entire Go workspace. If provided, +-"files" holds absolute paths for active files, on which additional linting is +-performed. +-`, +- }, h.workspaceDiagnosticsHandler) +- case "go_file_context": +- mcp.AddTool(mcpServer, &mcp.Tool{ +- Name: "go_file_context", +- Description: "Summarizes a file's cross-file dependencies", +- }, h.fileContextHandler) +- case "go_file_diagnostics": +- mcp.AddTool(mcpServer, &mcp.Tool{ +- Name: "go_file_diagnostics", +- Description: "Provides diagnostics for a Go file", +- }, h.fileDiagnosticsHandler) +- case "go_file_metadata": +- mcp.AddTool(mcpServer, &mcp.Tool{ +- Name: "go_file_metadata", +- Description: "Provides metadata about the Go package containing the file", +- }, h.fileMetadataHandler) +- case "go_package_api": +- mcp.AddTool(mcpServer, &mcp.Tool{ +- Name: "go_package_api", +- Description: "Provides a summary of a Go package API", +- }, h.outlineHandler) +- case "go_references": +- mcp.AddTool(mcpServer, &mcp.Tool{ +- Name: "go_references", +- Description: "Provide the locations of references to a given object", +- }, h.referencesHandler) +- case "go_search": +- mcp.AddTool(mcpServer, &mcp.Tool{ +- Name: "go_search", +- Description: `Search for symbols in the Go workspace. +- +-Search for symbols using case-insensitive fuzzy search, which may match all or +-part of the fully qualified symbol name. For example, the query 'foo' matches +-Go symbols 'Foo', 'fooBar', 'futils.Oboe', 'github.com/foo/bar.Baz'. +- +-Results are limited to 100 symbols. +-`, +- }, h.searchHandler) +- case "go_symbol_references": +- mcp.AddTool(mcpServer, &mcp.Tool{ +- Name: "go_symbol_references", +- Description: `Provides the locations of references to a (possibly qualified) +-package-level Go symbol referenced from the current file. +- +-For example, given arguments {"file": "/path/to/foo.go", "name": "Foo"}, +-go_symbol_references returns references to the symbol "Foo" declared +-in the current package. +- +-Similarly, given arguments {"file": "/path/to/foo.go", "name": "lib.Bar"}, +-go_symbol_references returns references to the symbol "Bar" in the imported lib +-package. +- +-Finally, symbol references supporting querying fields and methods: symbol +-"T.M" selects the "M" field or method of the "T" type (or value), and "lib.T.M" +-does the same for a symbol in the imported package "lib". +-`, +- }, h.symbolReferencesHandler) +- case "go_workspace": +- mcp.AddTool(mcpServer, &mcp.Tool{ +- Name: "go_workspace", +- Description: "Summarize the Go programming language workspace", +- }, h.workspaceHandler) +- case "go_vulncheck": +- mcp.AddTool(mcpServer, &mcp.Tool{ +- Name: "go_vulncheck", +- Description: `Runs a vulnerability check on the Go workspace. +- +- The check is performed on a given package pattern within a specified directory. +- If no directory is provided, it defaults to the workspace root. +- If no pattern is provided, it defaults to "./...".`, +- }, h.vulncheckHandler) +- } +-} +- +-// snapshot returns the best default snapshot to use for workspace queries. +-func (h *handler) snapshot() (*cache.Snapshot, func(), error) { +- views := h.session.Views() +- if len(views) == 0 { +- return nil, nil, fmt.Errorf("No active builds.") +- } +- return views[0].Snapshot() +-} +- +-// fileOf is like [cache.Session.FileOf], but does a sanity check for file +-// changes. Currently, it checks for modified files in the transitive closure +-// of the file's narrowest package. +-// +-// This helps avoid stale packages, but is not a substitute for real file +-// watching, as it misses things like files being added to a package. +-func (h *handler) fileOf(ctx context.Context, file string) (file.Handle, *cache.Snapshot, func(), error) { +- uri := protocol.URIFromPath(file) +- fh, snapshot, release, err := h.session.FileOf(ctx, uri) +- if err != nil { +- return nil, nil, nil, err +- } +- md, err := snapshot.NarrowestMetadataForFile(ctx, uri) +- if err != nil { +- release() +- return nil, nil, nil, err +- } +- fileEvents, err := checkForFileChanges(ctx, snapshot, md.ID) +- if err != nil { +- release() +- return nil, nil, nil, err +- } +- if len(fileEvents) == 0 { +- return fh, snapshot, release, nil +- } +- release() // snapshot is not latest +- +- // We detect changed files: process them before getting the snapshot. +- if err := h.lspServer.DidChangeWatchedFiles(ctx, &protocol.DidChangeWatchedFilesParams{ +- Changes: fileEvents, +- }); err != nil { +- return nil, nil, nil, err +- } +- return h.session.FileOf(ctx, uri) +-} +- +-// checkForFileChanges checks for file changes in the transitive closure of +-// the given package, by checking file modification time. Since it does not +-// actually read file contents, it may miss changes that occur within the mtime +-// resolution of the current file system (on some operating systems, this may +-// be as much as a second). +-// +-// It also doesn't catch package changes that occur due to added files or +-// changes to the go.mod file. +-func checkForFileChanges(ctx context.Context, snapshot *cache.Snapshot, id metadata.PackageID) ([]protocol.FileEvent, error) { +- var events []protocol.FileEvent +- +- seen := make(map[metadata.PackageID]struct{}) +- var checkPkg func(id metadata.PackageID) error +- checkPkg = func(id metadata.PackageID) error { +- if _, ok := seen[id]; ok { +- return nil +- } +- seen[id] = struct{}{} +- +- mp := snapshot.Metadata(id) +- for _, uri := range mp.CompiledGoFiles { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return err // context cancelled +- } +- +- mtime, mtimeErr := fh.ModTime() +- fi, err := os.Stat(uri.Path()) +- switch { +- case err != nil: +- if mtimeErr == nil { +- // file existed, and doesn't anymore, so the file was deleted +- events = append(events, protocol.FileEvent{URI: uri, Type: protocol.Deleted}) +- } +- case mtimeErr != nil: +- // err == nil (from above), so the file was created +- events = append(events, protocol.FileEvent{URI: uri, Type: protocol.Created}) +- case !mtime.IsZero() && fi.ModTime().After(mtime): +- events = append(events, protocol.FileEvent{URI: uri, Type: protocol.Changed}) +- } +- } +- for _, depID := range mp.DepsByPkgPath { +- if err := checkPkg(depID); err != nil { +- return err +- } +- } +- return nil +- } +- return events, checkPkg(id) +-} +- +-func textResult(text string) *mcp.CallToolResult { +- return &mcp.CallToolResult{ +- Content: []mcp.Content{&mcp.TextContent{Text: text}}, +- } +-} +diff -urN a/gopls/internal/mcp/mcp_test.go b/gopls/internal/mcp/mcp_test.go +--- a/gopls/internal/mcp/mcp_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/mcp_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,55 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mcp_test +- +-import ( +- "context" +- "errors" +- "net/http" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/mcp" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-type emptySessions struct { +-} +- +-// FirstSession implements mcp.Sessions. +-func (e emptySessions) FirstSession() (*cache.Session, protocol.Server) { +- return nil, nil +-} +- +-// Session implements mcp.Sessions. +-func (e emptySessions) Session(string) (*cache.Session, protocol.Server) { +- return nil, nil +-} +- +-// SetSessionExitFunc implements mcp.Sessions. +-func (e emptySessions) SetSessionExitFunc(func(string)) { +-} +- +-func TestContextCancellation(t *testing.T) { +- ctx, cancel := context.WithCancel(context.Background()) +- +- res := make(chan error) +- go func() { +- res <- mcp.Serve(ctx, "localhost:0", emptySessions{}, true) +- }() +- +- time.Sleep(1 * time.Second) +- cancel() +- +- select { +- case err := <-res: +- if !errors.Is(err, http.ErrServerClosed) { +- t.Errorf("mcp server unexpected return got %v, want: %v", err, http.ErrServerClosed) +- } +- case <-time.After(5 * time.Second): +- t.Errorf("mcp server did not terminate after 5 seconds of context cancellation") +- } +-} +diff -urN a/gopls/internal/mcp/outline.go b/gopls/internal/mcp/outline.go +--- a/gopls/internal/mcp/outline.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/outline.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,60 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mcp +- +-import ( +- "context" +- "fmt" +- "strconv" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +- "golang.org/x/tools/gopls/internal/cache/metadata" +-) +- +-type outlineParams struct { +- PackagePaths []string `json:"packagePaths" jsonschema:"the go package paths to describe"` +-} +- +-func (h *handler) outlineHandler(ctx context.Context, req *mcp.CallToolRequest, params outlineParams) (*mcp.CallToolResult, any, error) { +- countGoPackageAPIMCP.Inc() +- snapshot, release, err := h.snapshot() +- if err != nil { +- return nil, nil, err +- } +- defer release() +- +- // Await initialization to ensure we've at least got an initial package graph +- md, err := snapshot.LoadMetadataGraph(ctx) +- if err != nil { +- return nil, nil, err +- } +- var toSummarize []*metadata.Package +- for _, imp := range params.PackagePaths { +- pkgPath := metadata.PackagePath(imp) +- if len(imp) > 0 && imp[0] == '"' { +- unquoted, err := strconv.Unquote(imp) +- if err != nil { +- return nil, nil, fmt.Errorf("failed to unquote %s: %v", imp, err) +- } +- pkgPath = metadata.PackagePath(unquoted) +- } +- if mps := md.ForPackagePath[pkgPath]; len(mps) > 0 { +- toSummarize = append(toSummarize, mps[0]) // first is best +- } +- } +- +- var content []mcp.Content +- for _, mp := range toSummarize { +- if md == nil { +- continue // ignore error +- } +- if summary := summarizePackage(ctx, snapshot, mp); summary != "" { +- content = append(content, &mcp.TextContent{Text: summary}) +- } +- } +- return &mcp.CallToolResult{ +- Content: content, +- }, nil, nil +-} +diff -urN a/gopls/internal/mcp/references.go b/gopls/internal/mcp/references.go +--- a/gopls/internal/mcp/references.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/references.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,69 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +-package mcp +- +-import ( +- "context" +- "fmt" +- "path/filepath" +- "strings" +- "unicode" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-type findReferencesParams struct { +- Location protocol.Location `json:"location"` +-} +- +-func (h *handler) referencesHandler(ctx context.Context, req *mcp.CallToolRequest, params findReferencesParams) (*mcp.CallToolResult, any, error) { +- countGoReferencesMCP.Inc() +- fh, snapshot, release, err := h.session.FileOf(ctx, params.Location.URI) +- if err != nil { +- return nil, nil, err +- } +- defer release() +- pos := params.Location.Range.Start +- refs, err := golang.References(ctx, snapshot, fh, pos, true) +- if err != nil { +- return nil, nil, err +- } +- formatted, err := formatReferences(ctx, snapshot, refs) +- return formatted, nil, err +-} +- +-func formatReferences(ctx context.Context, snapshot *cache.Snapshot, refs []protocol.Location) (*mcp.CallToolResult, error) { +- if len(refs) == 0 { +- return nil, fmt.Errorf("no references found") +- } +- var builder strings.Builder +- fmt.Fprintf(&builder, "The object has %v references. Their locations are listed below\n", len(refs)) +- for i, r := range refs { +- fmt.Fprintf(&builder, "Reference %d\n", i+1) +- fmt.Fprintf(&builder, "Located in the file: %s\n", filepath.ToSlash(r.URI.Path())) +- refFh, err := snapshot.ReadFile(ctx, r.URI) +- // If for some reason there is an error reading the file content, we should still +- // return the references URIs. +- if err != nil { +- continue +- } +- content, err := refFh.Content() +- if err != nil { +- continue +- } +- lines := strings.Split(string(content), "\n") +- var lineContent string +- if int(r.Range.Start.Line) < len(lines) { +- lineContent = strings.TrimLeftFunc(lines[r.Range.Start.Line], unicode.IsSpace) +- } else { +- continue +- } +- fmt.Fprintf(&builder, "The reference is located on line %v, which has content `%s`\n", r.Range.Start.Line, lineContent) +- builder.WriteString("\n") +- } +- return textResult(builder.String()), nil +-} +diff -urN a/gopls/internal/mcp/search.go b/gopls/internal/mcp/search.go +--- a/gopls/internal/mcp/search.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/search.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,50 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mcp +- +-import ( +- "context" +- "fmt" +- "strings" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-type searchParams struct { +- Query string `json:"query" jsonschema:"the fuzzy search query to use for matching symbols"` +-} +- +-func (h *handler) searchHandler(ctx context.Context, req *mcp.CallToolRequest, params searchParams) (*mcp.CallToolResult, any, error) { +- countGoSearchMCP.Inc() +- query := params.Query +- if len(query) == 0 { +- return nil, nil, fmt.Errorf("empty query") +- } +- syms, err := h.lspServer.Symbol(ctx, &protocol.WorkspaceSymbolParams{ +- Query: params.Query, +- }) +- if err != nil { +- return nil, nil, fmt.Errorf("failed to execute symbol query: %v", err) +- } +- if len(syms) == 0 { +- return textResult("No symbols found."), nil, nil +- } +- var b strings.Builder +- fmt.Fprintf(&b, "Top symbol matches:\n") +- for _, sym := range syms { +- fmt.Fprintf(&b, "\t%s (%s in `%s`)\n", sym.Name, kindName(sym.Kind), sym.Location.URI.Path()) +- } +- return textResult(b.String()), nil, nil +-} +- +-// kindName returns the adjusted name for the given symbol kind, +-// fixing LSP conventions that don't work for go, like 'Class'. +-func kindName(k protocol.SymbolKind) string { +- if k == protocol.Class { +- return "Type" +- } +- return fmt.Sprint(k) +-} +diff -urN a/gopls/internal/mcp/symbol_references.go b/gopls/internal/mcp/symbol_references.go +--- a/gopls/internal/mcp/symbol_references.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/symbol_references.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,156 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +-package mcp +- +-import ( +- "context" +- "fmt" +- "go/ast" +- "go/parser" +- "go/token" +- "go/types" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +-) +- +-// symbolReferencesParams defines the parameters for the "go_symbol_references" +-// tool. +-type symbolReferencesParams struct { +- File string `json:"file" jsonschema:"the absolute path to the file containing the symbol"` +- Symbol string `json:"symbol" jsonschema:"the symbol or qualified symbol"` +-} +- +-// symbolReferencesHandler is the handler for the "go_symbol_references" tool. +-// It finds all references to the requested symbol and describes their +-// locations. +-func (h *handler) symbolReferencesHandler(ctx context.Context, req *mcp.CallToolRequest, params symbolReferencesParams) (*mcp.CallToolResult, any, error) { +- countGoSymbolReferencesMCP.Inc() +- fh, snapshot, release, err := h.fileOf(ctx, params.File) +- if err != nil { +- return nil, nil, err +- } +- defer release() +- +- if snapshot.FileKind(fh) != file.Go { +- return nil, nil, fmt.Errorf("can't provide references for non-Go files") +- } +- +- // Parse and extract names before type checking, to fail fast in the case of +- // invalid inputs. +- e, err := parser.ParseExpr(params.Symbol) +- if err != nil { +- return nil, nil, fmt.Errorf("\"symbol\" failed to parse: %v", err) +- } +- path, err := extractPath(e) +- if err != nil { +- return nil, nil, err +- } +- +- pkg, pgf, err := golang.NarrowestPackageForFile(ctx, snapshot, fh.URI()) +- if err != nil { +- return nil, nil, err +- } +- +- target, err := resolveSymbol(path, pkg, pgf) +- if err != nil { +- return nil, nil, err +- } +- +- loc, err := golang.ObjectLocation(ctx, pkg.FileSet(), snapshot, target) +- if err != nil { +- return nil, nil, fmt.Errorf("finding symbol location: %v", err) +- } +- declFH, err := snapshot.ReadFile(ctx, loc.URI) +- if err != nil { +- return nil, nil, err +- } +- refs, err := golang.References(ctx, snapshot, declFH, loc.Range.Start, true) +- if err != nil { +- return nil, nil, err +- } +- formatted, err := formatReferences(ctx, snapshot, refs) +- return formatted, nil, err +-} +- +-// extractPath extracts the 'path' of names from e, which must be of the form +-// a, a.b, or a.b.c. +-// +-// If a nil error is returned, the resulting path is either length 1, 2, or 3. +-func extractPath(e ast.Expr) ([]string, error) { +- switch e := e.(type) { +- case *ast.Ident: +- return []string{e.Name}, nil +- case *ast.SelectorExpr: +- switch x := e.X.(type) { +- case *ast.Ident: +- // Qualified identifier 'a.b', where a is a package or receiver. +- return []string{x.Name, e.Sel.Name}, nil +- case *ast.SelectorExpr: +- // Imported field or method a.b.c: a must be a package name. +- if x2, ok := x.X.(*ast.Ident); ok { +- return []string{x2.Name, x.Sel.Name, e.Sel.Name}, nil +- } +- } +- } +- return nil, fmt.Errorf("invalid qualified symbol: expected a.b or a.b.c") +-} +- +-// resolveSymbol resolves the types.Object for the given qualified path, which +-// must be of length 1, 2, or 3: +-// - For length 1 paths, the symbol is a name in the file scope. +-// - For length 2 paths, the symbol is either field, method, or imported symbol. +-// - For length 3 paths, the symbol is a field or method on an important object. +-func resolveSymbol(path []string, pkg *cache.Package, pgf *parsego.File) (types.Object, error) { +- fileScope, ok := pkg.TypesInfo().Scopes[pgf.File] +- if !ok { +- return nil, fmt.Errorf("internal error: no scope for file") +- } +- +- switch len(path) { +- case 1: +- _, target := fileScope.LookupParent(path[0], token.NoPos) +- if target == nil { +- return nil, fmt.Errorf("failed to resolve name %q", path[0]) +- } +- return target, nil +- case 2: +- switch _, obj := fileScope.LookupParent(path[0], token.NoPos); obj := obj.(type) { +- case *types.PkgName: +- target := obj.Imported().Scope().Lookup(path[1]) +- if target == nil { +- return nil, fmt.Errorf("failed to resolve member %q of %q", path[1], path[0]) +- } +- return target, nil +- case nil: +- return nil, fmt.Errorf("failed to resolve name %q", path[0]) +- default: +- target, _, _ := types.LookupFieldOrMethod(obj.Type(), true, pkg.Types(), path[1]) +- if target == nil { +- return nil, fmt.Errorf("failed to resolve member %q of %q", path[1], path[0]) +- } +- return target, nil +- } +- case 3: +- // Imported field or method a.b.c: a must be a package name. +- obj := fileScope.Lookup(path[0]) +- p, ok := obj.(*types.PkgName) +- if !ok { +- return nil, fmt.Errorf("invalid qualified symbol: %q must be a package (got %T)", path[0], obj) +- } +- recv := p.Imported().Scope().Lookup(path[1]) +- if recv == nil { +- return nil, fmt.Errorf("invalid qualified symbol: could not find %q in package %q", path[1], path[0]) +- } +- target, _, _ := types.LookupFieldOrMethod(recv.Type(), true, pkg.Types(), path[2]) +- if target == nil { +- return nil, fmt.Errorf("failed to resolve member %q of %q", path[2], path[1]) +- } +- return target, nil +- } +- panic("unreachable") +-} +diff -urN a/gopls/internal/mcp/vulncheck.go b/gopls/internal/mcp/vulncheck.go +--- a/gopls/internal/mcp/vulncheck.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/vulncheck.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,101 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mcp +- +-import ( +- "bytes" +- "context" +- "fmt" +- "maps" +- "slices" +- "sort" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +- "golang.org/x/tools/gopls/internal/vulncheck/scan" +-) +- +-type vulncheckParams struct { +- Dir string `json:"dir,omitempty" jsonschema:"directory to run the vulnerability check within"` +- Pattern string `json:"pattern,omitempty" jsonschema:"package pattern to check"` +-} +- +-type GroupedVulnFinding struct { +- ID string `json:"id"` +- Details string `json:"details"` +- AffectedPackages []string `json:"affectedPackages"` +-} +- +-type VulncheckResultOutput struct { +- Findings []GroupedVulnFinding `json:"findings,omitempty"` +- Logs string `json:"logs,omitempty"` +-} +- +-func (h *handler) vulncheckHandler(ctx context.Context, req *mcp.CallToolRequest, params *vulncheckParams) (*mcp.CallToolResult, *VulncheckResultOutput, error) { +- countGoVulncheckMCP.Inc() +- snapshot, release, err := h.snapshot() +- if err != nil { +- return nil, nil, err +- } +- defer release() +- +- dir := params.Dir +- if dir == "" && len(h.session.Views()) > 0 { +- dir = h.session.Views()[0].Root().Path() +- } +- +- pattern := params.Pattern +- if pattern == "" { +- pattern = "./..." +- } +- +- var logBuf bytes.Buffer +- result, err := scan.RunGovulncheck(ctx, pattern, snapshot, dir, &logBuf) +- if err != nil { +- return nil, nil, fmt.Errorf("running govulncheck failed: %v\nLogs:\n%s", err, logBuf.String()) +- } +- +- groupedPkgs := make(map[string]map[string]struct{}) +- for _, finding := range result.Findings { +- if osv := result.Entries[finding.OSV]; osv != nil { +- if _, ok := groupedPkgs[osv.ID]; !ok { +- groupedPkgs[osv.ID] = make(map[string]struct{}) +- } +- pkg := finding.Trace[0].Package +- if pkg == "" { +- pkg = "Go standard library" +- } +- groupedPkgs[osv.ID][pkg] = struct{}{} +- } +- } +- +- var output VulncheckResultOutput +- if len(groupedPkgs) > 0 { +- output.Findings = make([]GroupedVulnFinding, 0, len(groupedPkgs)) +- for id, pkgsSet := range groupedPkgs { +- pkgs := slices.Sorted(maps.Keys(pkgsSet)) +- +- output.Findings = append(output.Findings, GroupedVulnFinding{ +- ID: id, +- Details: result.Entries[id].Details, +- AffectedPackages: pkgs, +- }) +- } +- sort.Slice(output.Findings, func(i, j int) bool { +- return output.Findings[i].ID < output.Findings[j].ID +- }) +- } +- +- if logBuf.Len() > 0 { +- output.Logs = logBuf.String() +- } +- +- var summary bytes.Buffer +- fmt.Fprintf(&summary, "Vulnerability check for pattern %q complete. Found %d vulnerabilities.", pattern, len(output.Findings)) +- if output.Logs != "" { +- fmt.Fprintf(&summary, "\nLogs are available in the structured output.") +- } +- +- return nil, &output, nil +-} +diff -urN a/gopls/internal/mcp/workspace_diagnostics.go b/gopls/internal/mcp/workspace_diagnostics.go +--- a/gopls/internal/mcp/workspace_diagnostics.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/workspace_diagnostics.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,92 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mcp +- +-import ( +- "context" +- "fmt" +- "maps" +- "slices" +- "strings" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-type workspaceDiagnosticsParams struct { +- Files []string `json:"files,omitempty" jsonschema:"absolute paths to active files, if any"` +-} +- +-func (h *handler) workspaceDiagnosticsHandler(ctx context.Context, req *mcp.CallToolRequest, params workspaceDiagnosticsParams) (*mcp.CallToolResult, any, error) { +- countGoDiagnosticsMCP.Inc() +- var ( +- fh file.Handle +- snapshot *cache.Snapshot +- release func() +- err error +- ) +- if len(params.Files) > 0 { +- fh, snapshot, release, err = h.fileOf(ctx, params.Files[0]) +- if err != nil { +- return nil, nil, err +- } +- } else { +- views := h.session.Views() +- if len(views) == 0 { +- return nil, nil, fmt.Errorf("No active builds.") +- } +- snapshot, release, err = views[0].Snapshot() +- if err != nil { +- return nil, nil, err +- } +- } +- defer release() +- +- pkgMap := snapshot.WorkspacePackages() +- var ids []metadata.PackageID +- for id := range pkgMap.All() { +- ids = append(ids, id) +- } +- slices.Sort(ids) +- +- diagnostics, err := snapshot.PackageDiagnostics(ctx, ids...) +- if err != nil { +- return nil, nil, fmt.Errorf("diagnostics failed: %v", err) +- } +- +- fixes := make(map[*cache.Diagnostic]*protocol.CodeAction) +- for _, file := range params.Files { +- uri := protocol.URIFromPath(file) +- // Get more specific diagnostics for the file in question. +- fileDiagnostics, fileFixes, err := h.diagnoseFile(ctx, snapshot, uri) +- if err != nil { +- return nil, nil, fmt.Errorf("diagnostics failed: %v", err) +- } +- diagnostics[fh.URI()] = fileDiagnostics +- maps.Insert(fixes, maps.All(fileFixes)) +- } +- +- keys := slices.Sorted(maps.Keys(diagnostics)) +- var b strings.Builder +- for _, uri := range keys { +- diags := diagnostics[uri] +- if len(diags) > 0 { +- fmt.Fprintf(&b, "File `%s` has the following diagnostics:\n", uri.Path()) +- if err := summarizeDiagnostics(ctx, snapshot, &b, diags, fixes); err != nil { +- return nil, nil, err +- } +- fmt.Fprintln(&b) +- } +- } +- +- if b.Len() == 0 { +- return textResult("No diagnostics."), nil, nil +- } +- +- return textResult(b.String()), nil, nil +-} +diff -urN a/gopls/internal/mcp/workspace.go b/gopls/internal/mcp/workspace.go +--- a/gopls/internal/mcp/workspace.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mcp/workspace.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,116 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mcp +- +-import ( +- "bytes" +- "context" +- "fmt" +- "io" +- "strings" +- +- "slices" +- +- "github.com/modelcontextprotocol/go-sdk/mcp" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/immutable" +-) +- +-func (h *handler) workspaceHandler(ctx context.Context, req *mcp.CallToolRequest, _ any) (*mcp.CallToolResult, any, error) { +- countGoWorkspaceMCP.Inc() +- var summary bytes.Buffer +- views := h.session.Views() +- for _, v := range views { +- snapshot, release, err := v.Snapshot() +- if err != nil { +- continue // view is shut down +- } +- defer release() +- +- pkgs := snapshot.WorkspacePackages() +- +- // Special case: check if it's likely that this isn't actually a Go workspace. +- if len(views) == 1 && // only view +- (v.Type() == cache.AdHocView || v.Type() == cache.GoPackagesDriverView) && // not necessarily Go code +- pkgs.Len() == 0 { // no packages +- +- return &mcp.CallToolResult{ +- Content: []mcp.Content{&mcp.TextContent{Text: "This is not a Go workspace. To work on Go code, open a directory inside a Go module."}}, +- }, nil, nil +- } +- +- dir := v.Root().Path() +- switch v.Type() { +- case cache.GoPackagesDriverView: +- fmt.Fprintf(&summary, "The `%s` directory is loaded using a custom golang.org/x/tools/go/packages driver.\n", dir) +- fmt.Fprintf(&summary, "This indicates a non-standard build system.\n") +- +- case cache.GOPATHView: +- fmt.Fprintf(&summary, "The `%s` directory is loaded using a the legacy GOPATH build system.\n", dir) +- +- case cache.GoModView: +- fmt.Fprintf(&summary, "The `%s` directory uses Go modules, with the following main modules:\n", dir) +- summarizeModFiles(ctx, &summary, snapshot) +- +- case cache.GoWorkView: +- fmt.Fprintf(&summary, "The `%s` directory is in the go workspace defined by `%s`, with the following main modules:\n", dir, v.GoWork().Path()) +- summarizeModFiles(ctx, &summary, snapshot) +- +- case cache.AdHocView: +- fmt.Fprintf(&summary, "The `%s` directory is an ad-hoc Go package, not in a Go module.\n", dir) +- } +- fmt.Fprintln(&summary) +- const summarizePackages = false +- if summarizePackages { +- summaries := packageSummaries(snapshot, pkgs) +- fmt.Fprintf(&summary, "It contains the following Go packages:\n") +- fmt.Fprintf(&summary, "\t%s\n", strings.Join(summaries, "\n\t")) +- fmt.Fprintln(&summary) +- } +- } +- return textResult(summary.String()), nil, nil +-} +- +-func summarizeModFiles(ctx context.Context, w io.Writer, snapshot *cache.Snapshot) { +- v := snapshot.View() +- for _, m := range v.ModFiles() { +- if modPath, err := modulePath(ctx, snapshot, m); err != nil { +- // Fall back on just the go.mod file. +- fmt.Fprintf(w, "\t%s\n", m.Path()) +- } else { +- fmt.Fprintf(w, "\t%s (module %s)\n", m.Path(), modPath) +- } +- } +-} +- +-func modulePath(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (string, error) { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return "", fmt.Errorf("Reading %s: %v", uri, err) +- } +- pmf, err := snapshot.ParseMod(ctx, fh) +- if err != nil { +- return "", fmt.Errorf("parsing modfile: %v", err) +- } +- if pmf.File == nil || pmf.File.Module == nil { +- return "", fmt.Errorf("malformed modfile") +- } +- return pmf.File.Module.Mod.Path, nil +-} +- +-func packageSummaries(snapshot *cache.Snapshot, pkgs immutable.Map[cache.PackageID, cache.PackagePath]) []string { +- var summaries []string +- for id := range pkgs.All() { +- mp := snapshot.Metadata(id) +- if len(mp.CompiledGoFiles) == 0 { +- continue // For convenience, just skip uncompiled packages; we could do more if it matters. +- } +- dir := mp.CompiledGoFiles[0].DirPath() +- summaries = append(summaries, fmt.Sprintf("The `%s` directory contains the %q package with path %q", dir, mp.Name, mp.PkgPath)) +- } +- slices.Sort(summaries) // for stability +- return summaries +-} +diff -urN a/gopls/internal/mod/code_lens.go b/gopls/internal/mod/code_lens.go +--- a/gopls/internal/mod/code_lens.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mod/code_lens.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,201 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mod +- +-import ( +- "context" +- "fmt" +- "os" +- "path/filepath" +- +- "golang.org/x/mod/modfile" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/settings" +-) +- +-// CodeLensSources returns the sources of code lenses for go.mod files. +-func CodeLensSources() map[settings.CodeLensSource]cache.CodeLensSourceFunc { +- return map[settings.CodeLensSource]cache.CodeLensSourceFunc{ +- settings.CodeLensUpgradeDependency: upgradeLenses, // commands: CheckUpgrades, UpgradeDependency +- settings.CodeLensTidy: tidyLens, // commands: Tidy +- settings.CodeLensVendor: vendorLens, // commands: Vendor +- settings.CodeLensVulncheck: vulncheckLenses, // commands: Vulncheck +- settings.CodeLensRunGovulncheck: runGovulncheckLenses, // commands: RunGovulncheck +- } +-} +- +-func upgradeLenses(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil || pm.File == nil { +- return nil, err +- } +- uri := fh.URI() +- reset := command.NewResetGoModDiagnosticsCommand("Reset go.mod diagnostics", command.ResetGoModDiagnosticsArgs{URIArg: command.URIArg{URI: uri}}) +- // Put the `Reset go.mod diagnostics` codelens on the module statement. +- modrng, err := moduleStmtRange(fh, pm) +- if err != nil { +- return nil, err +- } +- lenses := []protocol.CodeLens{{Range: modrng, Command: reset}} +- if len(pm.File.Require) == 0 { +- // Nothing to upgrade. +- return lenses, nil +- } +- var requires []string +- for _, req := range pm.File.Require { +- requires = append(requires, req.Mod.Path) +- } +- checkUpgrade := command.NewCheckUpgradesCommand("Check for upgrades", command.CheckUpgradesArgs{ +- URI: uri, +- Modules: requires, +- }) +- upgradeTransitive := command.NewUpgradeDependencyCommand("Upgrade transitive dependencies", command.DependencyArgs{ +- URI: uri, +- AddRequire: false, +- GoCmdArgs: []string{"-d", "-u", "-t", "./..."}, +- }) +- upgradeDirect := command.NewUpgradeDependencyCommand("Upgrade direct dependencies", command.DependencyArgs{ +- URI: uri, +- AddRequire: false, +- GoCmdArgs: append([]string{"-d"}, requires...), +- }) +- +- // Put the upgrade code lenses above the first require block or statement. +- rng, err := firstRequireRange(fh, pm) +- if err != nil { +- return nil, err +- } +- +- return append(lenses, []protocol.CodeLens{ +- {Range: rng, Command: checkUpgrade}, +- {Range: rng, Command: upgradeTransitive}, +- {Range: rng, Command: upgradeDirect}, +- }...), nil +-} +- +-func tidyLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil || pm.File == nil { +- return nil, err +- } +- uri := fh.URI() +- cmd := command.NewTidyCommand("Run go mod tidy", command.URIArgs{URIs: []protocol.DocumentURI{uri}}) +- rng, err := moduleStmtRange(fh, pm) +- if err != nil { +- return nil, err +- } +- return []protocol.CodeLens{{ +- Range: rng, +- Command: cmd, +- }}, nil +-} +- +-func vendorLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil || pm.File == nil { +- return nil, err +- } +- if len(pm.File.Require) == 0 { +- // Nothing to vendor. +- return nil, nil +- } +- rng, err := moduleStmtRange(fh, pm) +- if err != nil { +- return nil, err +- } +- title := "Create vendor directory" +- uri := fh.URI() +- cmd := command.NewVendorCommand(title, command.URIArg{URI: uri}) +- // Change the message depending on whether or not the module already has a +- // vendor directory. +- vendorDir := filepath.Join(fh.URI().DirPath(), "vendor") +- if info, _ := os.Stat(vendorDir); info != nil && info.IsDir() { +- title = "Sync vendor directory" +- } +- return []protocol.CodeLens{{Range: rng, Command: cmd}}, nil +-} +- +-func moduleStmtRange(fh file.Handle, pm *cache.ParsedModule) (protocol.Range, error) { +- if pm.File == nil || pm.File.Module == nil || pm.File.Module.Syntax == nil { +- return protocol.Range{}, fmt.Errorf("no module statement in %s", fh.URI()) +- } +- syntax := pm.File.Module.Syntax +- return pm.Mapper.OffsetRange(syntax.Start.Byte, syntax.End.Byte) +-} +- +-// firstRequireRange returns the range for the first "require" in the given +-// go.mod file. This is either a require block or an individual require line. +-func firstRequireRange(fh file.Handle, pm *cache.ParsedModule) (protocol.Range, error) { +- if len(pm.File.Require) == 0 { +- return protocol.Range{}, fmt.Errorf("no requires in the file %s", fh.URI()) +- } +- var start, end modfile.Position +- for _, stmt := range pm.File.Syntax.Stmt { +- if b, ok := stmt.(*modfile.LineBlock); ok && len(b.Token) == 1 && b.Token[0] == "require" { +- start, end = b.Span() +- break +- } +- } +- +- firstRequire := pm.File.Require[0].Syntax +- if start.Byte == 0 || firstRequire.Start.Byte < start.Byte { +- start, end = firstRequire.Start, firstRequire.End +- } +- return pm.Mapper.OffsetRange(start.Byte, end.Byte) +-} +- +-func vulncheckLenses(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil || pm.File == nil { +- return nil, err +- } +- // Place the codelenses near the module statement. +- // A module may not have the require block, +- // but vulnerabilities can exist in standard libraries. +- uri := fh.URI() +- rng, err := moduleStmtRange(fh, pm) +- if err != nil { +- return nil, err +- } +- +- vulncheck := command.NewVulncheckCommand("Run govulncheck", command.VulncheckArgs{ +- URI: uri, +- Pattern: "./...", +- }) +- return []protocol.CodeLens{ +- {Range: rng, Command: vulncheck}, +- }, nil +-} +- +-func runGovulncheckLenses(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { +- // If CodeLensVulncheck is enabled, do not use the legacy CodeLensRunGovulncheck. +- if snapshot.Options().UserOptions.UIOptions.Codelenses[settings.CodeLensVulncheck] { +- return nil, nil +- } +- +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil || pm.File == nil { +- return nil, err +- } +- // Place the codelenses near the module statement. +- // A module may not have the require block, +- // but vulnerabilities can exist in standard libraries. +- uri := fh.URI() +- rng, err := moduleStmtRange(fh, pm) +- if err != nil { +- return nil, err +- } +- +- vulncheck := command.NewRunGovulncheckCommand("Run govulncheck", command.VulncheckArgs{ +- URI: uri, +- Pattern: "./...", +- }) +- return []protocol.CodeLens{ +- {Range: rng, Command: vulncheck}, +- }, nil +-} +diff -urN a/gopls/internal/mod/diagnostics.go b/gopls/internal/mod/diagnostics.go +--- a/gopls/internal/mod/diagnostics.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mod/diagnostics.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,544 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package mod provides core features related to go.mod file +-// handling for use by Go editors and tools. +-package mod +- +-import ( +- "context" +- "fmt" +- "runtime" +- "sort" +- "strings" +- "sync" +- +- "golang.org/x/mod/modfile" +- "golang.org/x/mod/semver" +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/vulncheck/govulncheck" +- "golang.org/x/tools/internal/event" +-) +- +-// ParseDiagnostics returns diagnostics from parsing the go.mod files in the workspace. +-func ParseDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { +- ctx, done := event.Start(ctx, "mod.Diagnostics", snapshot.Labels()...) +- defer done() +- +- return collectDiagnostics(ctx, snapshot, parseDiagnostics) +-} +- +-// TidyDiagnostics returns diagnostics from running go mod tidy. +-func TidyDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { +- ctx, done := event.Start(ctx, "mod.Diagnostics", snapshot.Labels()...) +- defer done() +- +- return collectDiagnostics(ctx, snapshot, tidyDiagnostics) +-} +- +-// UpgradeDiagnostics returns upgrade diagnostics for the modules in the +-// workspace with known upgrades. +-func UpgradeDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { +- ctx, done := event.Start(ctx, "mod.UpgradeDiagnostics", snapshot.Labels()...) +- defer done() +- +- return collectDiagnostics(ctx, snapshot, upgradeDiagnostics) +-} +- +-// VulnerabilityDiagnostics returns vulnerability diagnostics for the active modules in the +-// workspace with known vulnerabilities. +-func VulnerabilityDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { +- ctx, done := event.Start(ctx, "mod.VulnerabilityDiagnostics", snapshot.Labels()...) +- defer done() +- +- return collectDiagnostics(ctx, snapshot, vulnerabilityDiagnostics) +-} +- +-func collectDiagnostics(ctx context.Context, snapshot *cache.Snapshot, diagFn func(context.Context, *cache.Snapshot, file.Handle) ([]*cache.Diagnostic, error)) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { +- g, ctx := errgroup.WithContext(ctx) +- cpulimit := runtime.GOMAXPROCS(0) +- g.SetLimit(cpulimit) +- +- var mu sync.Mutex +- reports := make(map[protocol.DocumentURI][]*cache.Diagnostic) +- +- for _, uri := range snapshot.View().ModFiles() { +- g.Go(func() error { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return err +- } +- diagnostics, err := diagFn(ctx, snapshot, fh) +- if err != nil { +- return err +- } +- for _, d := range diagnostics { +- mu.Lock() +- reports[d.URI] = append(reports[fh.URI()], d) +- mu.Unlock() +- } +- return nil +- }) +- } +- +- if err := g.Wait(); err != nil { +- return nil, err +- } +- return reports, nil +-} +- +-// parseDiagnostics reports diagnostics from parsing the mod file. +-func parseDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (diagnostics []*cache.Diagnostic, err error) { +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil { +- if pm == nil || len(pm.ParseErrors) == 0 { +- return nil, err +- } +- return pm.ParseErrors, nil +- } +- return nil, nil +-} +- +-// tidyDiagnostics reports diagnostics from running go mod tidy. +-func tidyDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]*cache.Diagnostic, error) { +- pm, err := snapshot.ParseMod(ctx, fh) // memoized +- if err != nil { +- return nil, nil // errors reported by ModDiagnostics above +- } +- +- tidied, err := snapshot.ModTidy(ctx, pm) +- if err != nil { +- if err != cache.ErrNoModOnDisk && !strings.Contains(err.Error(), "GOPROXY=off") { +- // TODO(rfindley): the check for ErrNoModOnDisk was historically determined +- // to be benign, but may date back to the time when the Go command did not +- // have overlay support. +- // +- // See if we can pass the overlay to the Go command, and eliminate this guard.. +- +- // TODO(golang/go#56395): remove the arbitrary suppression of the mod +- // tidy error when GOPROXY=off. The true fix for this noisy log message +- // is to fix the mod tidy diagnostics. +- event.Error(ctx, fmt.Sprintf("tidy: diagnosing %s", pm.URI), err) +- } +- return nil, nil +- } +- return tidied.Diagnostics, nil +-} +- +-// upgradeDiagnostics adds upgrade quick fixes for individual modules if the upgrades +-// are recorded in the view. +-func upgradeDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (upgradeDiagnostics []*cache.Diagnostic, err error) { +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil { +- // Don't return an error if there are parse error diagnostics to be shown, but also do not +- // continue since we won't be able to show the upgrade diagnostics. +- if pm != nil && len(pm.ParseErrors) != 0 { +- return nil, nil +- } +- return nil, err +- } +- +- upgrades := snapshot.ModuleUpgrades(fh.URI()) +- for _, req := range pm.File.Require { +- ver, ok := upgrades[req.Mod.Path] +- if !ok || req.Mod.Version == ver { +- continue +- } +- rng, err := pm.Mapper.OffsetRange(req.Syntax.Start.Byte, req.Syntax.End.Byte) +- if err != nil { +- return nil, err +- } +- // Upgrade to the exact version we offer the user, not the most recent. +- title := fmt.Sprintf("%s%v", upgradeCodeActionPrefix, ver) +- cmd := command.NewUpgradeDependencyCommand(title, command.DependencyArgs{ +- URI: fh.URI(), +- AddRequire: false, +- GoCmdArgs: []string{req.Mod.Path + "@" + ver}, +- }) +- upgradeDiagnostics = append(upgradeDiagnostics, &cache.Diagnostic{ +- URI: fh.URI(), +- Range: rng, +- Severity: protocol.SeverityInformation, +- Source: cache.UpgradeNotification, +- Message: fmt.Sprintf("%v can be upgraded", req.Mod.Path), +- SuggestedFixes: []cache.SuggestedFix{cache.SuggestedFixFromCommand(cmd, protocol.QuickFix)}, +- }) +- } +- +- return upgradeDiagnostics, nil +-} +- +-const upgradeCodeActionPrefix = "Upgrade to " +- +-// vulnerabilityDiagnostics adds diagnostics for vulnerabilities in individual modules +-// if the vulnerability is recorded in the view. +-func vulnerabilityDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (vulnDiagnostics []*cache.Diagnostic, err error) { +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil { +- // Don't return an error if there are parse error diagnostics to be shown, but also do not +- // continue since we won't be able to show the vulnerability diagnostics. +- if pm != nil && len(pm.ParseErrors) != 0 { +- return nil, nil +- } +- return nil, err +- } +- +- diagSource := cache.Govulncheck +- vs := snapshot.Vulnerabilities(fh.URI())[fh.URI()] +- if vs == nil && snapshot.Options().Vulncheck == settings.ModeVulncheckImports { +- vs, err = snapshot.ModVuln(ctx, fh.URI()) +- if err != nil { +- return nil, err +- } +- diagSource = cache.Vulncheck +- } +- if vs == nil || len(vs.Findings) == 0 { +- return nil, nil +- } +- +- suggestRunOrResetGovulncheck, err := suggestGovulncheckAction(diagSource == cache.Govulncheck, fh.URI()) +- if err != nil { +- // must not happen +- return nil, err // TODO: bug report +- } +- vulnsByModule := make(map[string][]*govulncheck.Finding) +- +- for _, finding := range vs.Findings { +- if vuln, typ := foundVuln(finding); typ == vulnCalled || typ == vulnImported { +- vulnsByModule[vuln.Module] = append(vulnsByModule[vuln.Module], finding) +- } +- } +- for _, req := range pm.File.Require { +- mod := req.Mod.Path +- findings := vulnsByModule[mod] +- if len(findings) == 0 { +- continue +- } +- // note: req.Syntax is the line corresponding to 'require', which means +- // req.Syntax.Start can point to the beginning of the "require" keyword +- // for a single line require (e.g. "require golang.org/x/mod v0.0.0"). +- start := req.Syntax.Start.Byte +- if len(req.Syntax.Token) == 3 { +- start += len("require ") +- } +- rng, err := pm.Mapper.OffsetRange(start, req.Syntax.End.Byte) +- if err != nil { +- return nil, err +- } +- // Map affecting vulns to 'warning' level diagnostics, +- // others to 'info' level diagnostics. +- // Fixes will include only the upgrades for warning level diagnostics. +- var warningFixes, infoFixes []cache.SuggestedFix +- var warningSet, infoSet = map[string]bool{}, map[string]bool{} +- for _, finding := range findings { +- // It is possible that the source code was changed since the last +- // govulncheck run and information in the `vulns` info is stale. +- // For example, imagine that a user is in the middle of updating +- // problematic modules detected by the govulncheck run by applying +- // quick fixes. Stale diagnostics can be confusing and prevent the +- // user from quickly locating the next module to fix. +- // Ideally we should rerun the analysis with the updated module +- // dependencies or any other code changes, but we are not yet +- // in the position of automatically triggering the analysis +- // (govulncheck can take a while). We also don't know exactly what +- // part of source code was changed since `vulns` was computed. +- // As a heuristic, we assume that a user upgrades the affecting +- // module to the version with the fix or the latest one, and if the +- // version in the require statement is equal to or higher than the +- // fixed version, skip generating a diagnostic about the vulnerability. +- // Eventually, the user has to rerun govulncheck. +- if finding.FixedVersion != "" && semver.IsValid(req.Mod.Version) && semver.Compare(finding.FixedVersion, req.Mod.Version) <= 0 { +- continue +- } +- switch _, typ := foundVuln(finding); typ { +- case vulnImported: +- infoSet[finding.OSV] = true +- case vulnCalled: +- warningSet[finding.OSV] = true +- } +- // Upgrade to the exact version we offer the user, not the most recent. +- if fixedVersion := finding.FixedVersion; semver.IsValid(fixedVersion) && semver.Compare(req.Mod.Version, fixedVersion) < 0 { +- cmd := getUpgradeCodeAction(fh, req, fixedVersion) +- sf := cache.SuggestedFixFromCommand(cmd, protocol.QuickFix) +- switch _, typ := foundVuln(finding); typ { +- case vulnImported: +- infoFixes = append(infoFixes, sf) +- case vulnCalled: +- warningFixes = append(warningFixes, sf) +- } +- } +- } +- +- if len(warningSet) == 0 && len(infoSet) == 0 { +- continue +- } +- // Remove affecting osvs from the non-affecting osv list if any. +- if len(warningSet) > 0 { +- for k := range infoSet { +- if warningSet[k] { +- delete(infoSet, k) +- } +- } +- } +- // Add an upgrade for module@latest. +- // TODO(suzmue): verify if latest is the same as fixedVersion. +- latest := getUpgradeCodeAction(fh, req, "latest") +- sf := cache.SuggestedFixFromCommand(latest, protocol.QuickFix) +- if len(warningFixes) > 0 { +- warningFixes = append(warningFixes, sf) +- } +- if len(infoFixes) > 0 { +- infoFixes = append(infoFixes, sf) +- } +- if len(warningSet) > 0 { +- warning := sortedKeys(warningSet) +- warningFixes = append(warningFixes, suggestRunOrResetGovulncheck) +- vulnDiagnostics = append(vulnDiagnostics, &cache.Diagnostic{ +- URI: fh.URI(), +- Range: rng, +- Severity: protocol.SeverityWarning, +- Source: diagSource, +- Message: getVulnMessage(req.Mod.Path, warning, true, diagSource == cache.Govulncheck), +- SuggestedFixes: warningFixes, +- }) +- } +- if len(infoSet) > 0 { +- info := sortedKeys(infoSet) +- infoFixes = append(infoFixes, suggestRunOrResetGovulncheck) +- vulnDiagnostics = append(vulnDiagnostics, &cache.Diagnostic{ +- URI: fh.URI(), +- Range: rng, +- Severity: protocol.SeverityInformation, +- Source: diagSource, +- Message: getVulnMessage(req.Mod.Path, info, false, diagSource == cache.Govulncheck), +- SuggestedFixes: infoFixes, +- }) +- } +- } +- +- // TODO(hyangah): place this diagnostic on the `go` directive or `toolchain` directive +- // after https://go.dev/issue/57001. +- const diagnoseStdLib = false +- +- // If diagnosing the stdlib, add standard library vulnerability diagnostics +- // on the module declaration. +- // +- // Only proceed if we have a valid module declaration on which to position +- // the diagnostics. +- if diagnoseStdLib && pm.File.Module != nil && pm.File.Module.Syntax != nil { +- // Add standard library vulnerabilities. +- stdlibVulns := vulnsByModule["stdlib"] +- if len(stdlibVulns) == 0 { +- return vulnDiagnostics, nil +- } +- +- // Put the standard library diagnostic on the module declaration. +- rng, err := pm.Mapper.OffsetRange(pm.File.Module.Syntax.Start.Byte, pm.File.Module.Syntax.End.Byte) +- if err != nil { +- return vulnDiagnostics, nil // TODO: bug report +- } +- +- var warningSet, infoSet = map[string]bool{}, map[string]bool{} +- for _, finding := range stdlibVulns { +- switch _, typ := foundVuln(finding); typ { +- case vulnImported: +- infoSet[finding.OSV] = true +- case vulnCalled: +- warningSet[finding.OSV] = true +- } +- } +- if len(warningSet) > 0 { +- warning := sortedKeys(warningSet) +- fixes := []cache.SuggestedFix{suggestRunOrResetGovulncheck} +- vulnDiagnostics = append(vulnDiagnostics, &cache.Diagnostic{ +- URI: fh.URI(), +- Range: rng, +- Severity: protocol.SeverityWarning, +- Source: diagSource, +- Message: getVulnMessage("go", warning, true, diagSource == cache.Govulncheck), +- SuggestedFixes: fixes, +- }) +- +- // remove affecting osvs from the non-affecting osv list if any. +- for k := range infoSet { +- if warningSet[k] { +- delete(infoSet, k) +- } +- } +- } +- if len(infoSet) > 0 { +- info := sortedKeys(infoSet) +- fixes := []cache.SuggestedFix{suggestRunOrResetGovulncheck} +- vulnDiagnostics = append(vulnDiagnostics, &cache.Diagnostic{ +- URI: fh.URI(), +- Range: rng, +- Severity: protocol.SeverityInformation, +- Source: diagSource, +- Message: getVulnMessage("go", info, false, diagSource == cache.Govulncheck), +- SuggestedFixes: fixes, +- }) +- } +- } +- +- return vulnDiagnostics, nil +-} +- +-type vulnFindingType int +- +-const ( +- vulnUnknown vulnFindingType = iota +- vulnCalled +- vulnImported +- vulnRequired +-) +- +-// foundVuln returns the frame info describing discovered vulnerable symbol/package/module +-// and how this vulnerability affects the analyzed package or module. +-func foundVuln(finding *govulncheck.Finding) (*govulncheck.Frame, vulnFindingType) { +- // finding.Trace is sorted from the imported vulnerable symbol to +- // the entry point in the callstack. +- // If Function is set, then Package must be set. Module will always be set. +- // If Function is set it was found in the call graph, otherwise if Package is set +- // it was found in the import graph, otherwise it was found in the require graph. +- // See the documentation of govulncheck.Finding. +- if len(finding.Trace) == 0 { // this shouldn't happen, but just in case... +- return nil, vulnUnknown +- } +- vuln := finding.Trace[0] +- if vuln.Package == "" { +- return vuln, vulnRequired +- } +- if vuln.Function == "" { +- return vuln, vulnImported +- } +- return vuln, vulnCalled +-} +- +-func sortedKeys(m map[string]bool) []string { +- ret := make([]string, 0, len(m)) +- for k := range m { +- ret = append(ret, k) +- } +- sort.Strings(ret) +- return ret +-} +- +-// suggestGovulncheckAction returns a code action that suggests either run govulncheck +-// for more accurate investigation (if the present vulncheck diagnostics are based on +-// analysis less accurate than govulncheck) or reset the existing govulncheck result +-// (if the present vulncheck diagnostics are already based on govulncheck run). +-func suggestGovulncheckAction(fromGovulncheck bool, uri protocol.DocumentURI) (cache.SuggestedFix, error) { +- if fromGovulncheck { +- resetVulncheck := command.NewResetGoModDiagnosticsCommand("Reset govulncheck result", command.ResetGoModDiagnosticsArgs{ +- URIArg: command.URIArg{URI: uri}, +- DiagnosticSource: string(cache.Govulncheck), +- }) +- return cache.SuggestedFixFromCommand(resetVulncheck, protocol.QuickFix), nil +- } +- vulncheck := command.NewRunGovulncheckCommand("Run govulncheck to verify", command.VulncheckArgs{ +- URI: uri, +- Pattern: "./...", +- }) +- return cache.SuggestedFixFromCommand(vulncheck, protocol.QuickFix), nil +-} +- +-func getVulnMessage(mod string, vulns []string, used, fromGovulncheck bool) string { +- var b strings.Builder +- if used { +- switch len(vulns) { +- case 1: +- fmt.Fprintf(&b, "%v has a vulnerability used in the code: %v.", mod, vulns[0]) +- default: +- fmt.Fprintf(&b, "%v has vulnerabilities used in the code: %v.", mod, strings.Join(vulns, ", ")) +- } +- } else { +- if fromGovulncheck { +- switch len(vulns) { +- case 1: +- fmt.Fprintf(&b, "%v has a vulnerability %v that is not used in the code.", mod, vulns[0]) +- default: +- fmt.Fprintf(&b, "%v has known vulnerabilities %v that are not used in the code.", mod, strings.Join(vulns, ", ")) +- } +- } else { +- switch len(vulns) { +- case 1: +- fmt.Fprintf(&b, "%v has a vulnerability %v.", mod, vulns[0]) +- default: +- fmt.Fprintf(&b, "%v has known vulnerabilities %v.", mod, strings.Join(vulns, ", ")) +- } +- } +- } +- return b.String() +-} +- +-// href returns the url for the vulnerability information. +-// Eventually we should retrieve the url embedded in the osv.Entry. +-// While vuln.go.dev is under development, this always returns +-// the page in pkg.go.dev. +-func href(vulnID string) string { +- return fmt.Sprintf("https://pkg.go.dev/vuln/%s", vulnID) +-} +- +-func getUpgradeCodeAction(fh file.Handle, req *modfile.Require, version string) *protocol.Command { +- return command.NewUpgradeDependencyCommand(upgradeTitle(version), command.DependencyArgs{ +- URI: fh.URI(), +- AddRequire: false, +- GoCmdArgs: []string{req.Mod.Path + "@" + version}, +- }) +-} +- +-func upgradeTitle(fixedVersion string) string { +- title := fmt.Sprintf("%s%v", upgradeCodeActionPrefix, fixedVersion) +- return title +-} +- +-// SelectUpgradeCodeActions takes a list of code actions for a required module +-// and returns a more selective list of upgrade code actions, +-// where the code actions have been deduped. Code actions unrelated to upgrade +-// are deduplicated by the name. +-func SelectUpgradeCodeActions(actions []protocol.CodeAction) []protocol.CodeAction { +- if len(actions) <= 1 { +- return actions // return early if no sorting necessary +- } +- var versionedUpgrade, latestUpgrade, resetAction protocol.CodeAction +- var chosenVersionedUpgrade string +- var selected []protocol.CodeAction +- +- seenTitles := make(map[string]bool) +- +- for _, action := range actions { +- if strings.HasPrefix(action.Title, upgradeCodeActionPrefix) { +- if v := getUpgradeVersion(action); v == "latest" && latestUpgrade.Title == "" { +- latestUpgrade = action +- } else if versionedUpgrade.Title == "" || semver.Compare(v, chosenVersionedUpgrade) > 0 { +- chosenVersionedUpgrade = v +- versionedUpgrade = action +- } +- } else if strings.HasPrefix(action.Title, "Reset govulncheck") { +- resetAction = action +- } else if !seenTitles[action.Command.Title] { +- seenTitles[action.Command.Title] = true +- selected = append(selected, action) +- } +- } +- if versionedUpgrade.Title != "" { +- selected = append(selected, versionedUpgrade) +- } +- if latestUpgrade.Title != "" { +- selected = append(selected, latestUpgrade) +- } +- if resetAction.Title != "" { +- selected = append(selected, resetAction) +- } +- return selected +-} +- +-func getUpgradeVersion(p protocol.CodeAction) string { +- return strings.TrimPrefix(p.Title, upgradeCodeActionPrefix) +-} +diff -urN a/gopls/internal/mod/format.go b/gopls/internal/mod/format.go +--- a/gopls/internal/mod/format.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mod/format.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,32 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mod +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/diff" +- "golang.org/x/tools/internal/event" +-) +- +-func Format(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.TextEdit, error) { +- ctx, done := event.Start(ctx, "mod.Format") +- defer done() +- +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil { +- return nil, err +- } +- formatted, err := pm.File.Format() +- if err != nil { +- return nil, err +- } +- // Calculate the edits to be made due to the change. +- diffs := diff.Bytes(pm.Mapper.Content, formatted) +- return protocol.EditsFromDiffEdits(pm.Mapper, diffs) +-} +diff -urN a/gopls/internal/mod/hover.go b/gopls/internal/mod/hover.go +--- a/gopls/internal/mod/hover.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mod/hover.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,384 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package mod +- +-import ( +- "bytes" +- "context" +- "fmt" +- "slices" +- "sort" +- "strings" +- +- "golang.org/x/mod/modfile" +- "golang.org/x/mod/module" +- "golang.org/x/mod/semver" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/vulncheck" +- "golang.org/x/tools/gopls/internal/vulncheck/govulncheck" +- "golang.org/x/tools/gopls/internal/vulncheck/osv" +- "golang.org/x/tools/internal/event" +-) +- +-func Hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.Hover, error) { +- // We only provide hover information for the view's go.mod files. +- if !slices.Contains(snapshot.View().ModFiles(), fh.URI()) { +- return nil, nil +- } +- +- ctx, done := event.Start(ctx, "mod.Hover") +- defer done() +- +- // Get the position of the cursor. +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil { +- return nil, fmt.Errorf("getting modfile handle: %w", err) +- } +- offset, err := pm.Mapper.PositionOffset(position) +- if err != nil { +- return nil, fmt.Errorf("computing cursor position: %w", err) +- } +- +- // If the cursor position is on a module statement +- if hover, ok := hoverOnModuleStatement(ctx, pm, offset, snapshot, fh); ok { +- return hover, nil +- } +- return hoverOnRequireStatement(ctx, pm, offset, snapshot, fh) +-} +- +-func hoverOnRequireStatement(ctx context.Context, pm *cache.ParsedModule, offset int, snapshot *cache.Snapshot, fh file.Handle) (*protocol.Hover, error) { +- // Confirm that the cursor is at the position of a require statement. +- var req *modfile.Require +- var startOffset, endOffset int +- for _, r := range pm.File.Require { +- dep := []byte(r.Mod.Path) +- s, e := r.Syntax.Start.Byte, r.Syntax.End.Byte +- i := bytes.Index(pm.Mapper.Content[s:e], dep) +- if i == -1 { +- continue +- } +- // Shift the start position to the location of the +- // dependency within the require statement. +- startOffset, endOffset = s+i, e +- if startOffset <= offset && offset <= endOffset { +- req = r +- break +- } +- } +- // TODO(hyangah): find position for info about vulnerabilities in Go +- +- // The cursor position is not on a require statement. +- if req == nil { +- return nil, nil +- } +- +- // Get the vulnerability info. +- fromGovulncheck := true +- vs := snapshot.Vulnerabilities(fh.URI())[fh.URI()] +- if vs == nil && snapshot.Options().Vulncheck == settings.ModeVulncheckImports { +- var err error +- vs, err = snapshot.ModVuln(ctx, fh.URI()) +- if err != nil { +- return nil, err +- } +- fromGovulncheck = false +- } +- affecting, nonaffecting, osvs := lookupVulns(vs, req.Mod.Path, req.Mod.Version) +- +- // Get the `go mod why` results for the given file. +- why, err := snapshot.ModWhy(ctx, fh) +- if err != nil { +- return nil, err +- } +- explanation, ok := why[req.Mod.Path] +- if !ok { +- return nil, nil +- } +- +- // Get the range to highlight for the hover. +- // TODO(hyangah): adjust the hover range to include the version number +- // to match the diagnostics' range. +- rng, err := pm.Mapper.OffsetRange(startOffset, endOffset) +- if err != nil { +- return nil, err +- } +- options := snapshot.Options() +- isPrivate := snapshot.IsGoPrivatePath(req.Mod.Path) +- header := formatHeader(req.Mod.Path, options) +- explanation = formatExplanation(explanation, pm.ReplaceMap, req, options, isPrivate) +- vulns := formatVulnerabilities(affecting, nonaffecting, osvs, options, fromGovulncheck) +- +- return &protocol.Hover{ +- Contents: protocol.MarkupContent{ +- Kind: options.PreferredContentFormat, +- Value: header + vulns + explanation, +- }, +- Range: rng, +- }, nil +-} +- +-func hoverOnModuleStatement(ctx context.Context, pm *cache.ParsedModule, offset int, snapshot *cache.Snapshot, fh file.Handle) (*protocol.Hover, bool) { +- module := pm.File.Module +- if module == nil { +- return nil, false // no module stmt +- } +- if offset < module.Syntax.Start.Byte || offset > module.Syntax.End.Byte { +- return nil, false // cursor not in module stmt +- } +- +- rng, err := pm.Mapper.OffsetRange(module.Syntax.Start.Byte, module.Syntax.End.Byte) +- if err != nil { +- return nil, false +- } +- fromGovulncheck := true +- vs := snapshot.Vulnerabilities(fh.URI())[fh.URI()] +- +- if vs == nil && snapshot.Options().Vulncheck == settings.ModeVulncheckImports { +- vs, err = snapshot.ModVuln(ctx, fh.URI()) +- if err != nil { +- return nil, false +- } +- fromGovulncheck = false +- } +- modpath := "stdlib" +- goVersion := snapshot.View().GoVersionString() +- affecting, nonaffecting, osvs := lookupVulns(vs, modpath, goVersion) +- options := snapshot.Options() +- vulns := formatVulnerabilities(affecting, nonaffecting, osvs, options, fromGovulncheck) +- +- return &protocol.Hover{ +- Contents: protocol.MarkupContent{ +- Kind: options.PreferredContentFormat, +- Value: vulns, +- }, +- Range: rng, +- }, true +-} +- +-func formatHeader(modpath string, options *settings.Options) string { +- var b strings.Builder +- // Write the heading as an H3. +- b.WriteString("#### " + modpath) +- if options.PreferredContentFormat == protocol.Markdown { +- b.WriteString("\n\n") +- } else { +- b.WriteRune('\n') +- } +- return b.String() +-} +- +-func lookupVulns(vulns *vulncheck.Result, modpath, version string) (affecting, nonaffecting []*govulncheck.Finding, osvs map[string]*osv.Entry) { +- if vulns == nil || len(vulns.Entries) == 0 { +- return nil, nil, nil +- } +- for _, finding := range vulns.Findings { +- vuln, typ := foundVuln(finding) +- if vuln.Module != modpath { +- continue +- } +- // It is possible that the source code was changed since the last +- // govulncheck run and information in the `vulns` info is stale. +- // For example, imagine that a user is in the middle of updating +- // problematic modules detected by the govulncheck run by applying +- // quick fixes. Stale diagnostics can be confusing and prevent the +- // user from quickly locating the next module to fix. +- // Ideally we should rerun the analysis with the updated module +- // dependencies or any other code changes, but we are not yet +- // in the position of automatically triggering the analysis +- // (govulncheck can take a while). We also don't know exactly what +- // part of source code was changed since `vulns` was computed. +- // As a heuristic, we assume that a user upgrades the affecting +- // module to the version with the fix or the latest one, and if the +- // version in the require statement is equal to or higher than the +- // fixed version, skip the vulnerability information in the hover. +- // Eventually, the user has to rerun govulncheck. +- if finding.FixedVersion != "" && semver.IsValid(version) && semver.Compare(finding.FixedVersion, version) <= 0 { +- continue +- } +- switch typ { +- case vulnCalled: +- affecting = append(affecting, finding) +- case vulnImported: +- nonaffecting = append(nonaffecting, finding) +- } +- } +- +- // Remove affecting elements from nonaffecting. +- // An OSV entry can appear in both lists if an OSV entry covers +- // multiple packages imported but not all vulnerable symbols are used. +- // The current wording of hover message doesn't clearly +- // present this case well IMO, so let's skip reporting nonaffecting. +- if len(affecting) > 0 && len(nonaffecting) > 0 { +- affectingSet := map[string]bool{} +- for _, f := range affecting { +- affectingSet[f.OSV] = true +- } +- n := 0 +- for _, v := range nonaffecting { +- if !affectingSet[v.OSV] { +- nonaffecting[n] = v +- n++ +- } +- } +- nonaffecting = nonaffecting[:n] +- } +- sort.Slice(nonaffecting, func(i, j int) bool { return nonaffecting[i].OSV < nonaffecting[j].OSV }) +- sort.Slice(affecting, func(i, j int) bool { return affecting[i].OSV < affecting[j].OSV }) +- return affecting, nonaffecting, vulns.Entries +-} +- +-func fixedVersion(fixed string) string { +- if fixed == "" { +- return "No fix is available." +- } +- return "Fixed in " + fixed + "." +-} +- +-func formatVulnerabilities(affecting, nonaffecting []*govulncheck.Finding, osvs map[string]*osv.Entry, options *settings.Options, fromGovulncheck bool) string { +- if len(osvs) == 0 || (len(affecting) == 0 && len(nonaffecting) == 0) { +- return "" +- } +- byOSV := func(findings []*govulncheck.Finding) map[string][]*govulncheck.Finding { +- m := make(map[string][]*govulncheck.Finding) +- for _, f := range findings { +- m[f.OSV] = append(m[f.OSV], f) +- } +- return m +- } +- affectingByOSV := byOSV(affecting) +- nonaffectingByOSV := byOSV(nonaffecting) +- +- // TODO(hyangah): can we use go templates to generate hover messages? +- // Then, we can use a different template for markdown case. +- useMarkdown := options.PreferredContentFormat == protocol.Markdown +- +- var b strings.Builder +- +- if len(affectingByOSV) > 0 { +- // TODO(hyangah): make the message more eyecatching (icon/codicon/color) +- if len(affectingByOSV) == 1 { +- fmt.Fprintf(&b, "\n**WARNING:** Found %d reachable vulnerability.\n", len(affectingByOSV)) +- } else { +- fmt.Fprintf(&b, "\n**WARNING:** Found %d reachable vulnerabilities.\n", len(affectingByOSV)) +- } +- } +- for id, findings := range affectingByOSV { +- fix := fixedVersion(findings[0].FixedVersion) +- pkgs := vulnerablePkgsInfo(findings, useMarkdown) +- osvEntry := osvs[id] +- +- if useMarkdown { +- fmt.Fprintf(&b, "- [**%v**](%v) %v%v\n%v\n", id, href(id), osvEntry.Summary, pkgs, fix) +- } else { +- fmt.Fprintf(&b, " - [%v] %v (%v) %v%v\n", id, osvEntry.Summary, href(id), pkgs, fix) +- } +- } +- if len(nonaffecting) > 0 { +- if fromGovulncheck { +- fmt.Fprintf(&b, "\n**Note:** The project imports packages with known vulnerabilities, but does not call the vulnerable code.\n") +- } else { +- fmt.Fprintf(&b, "\n**Note:** The project imports packages with known vulnerabilities. Use `govulncheck` to check if the project uses vulnerable symbols.\n") +- } +- } +- for k, findings := range nonaffectingByOSV { +- fix := fixedVersion(findings[0].FixedVersion) +- pkgs := vulnerablePkgsInfo(findings, useMarkdown) +- osvEntry := osvs[k] +- +- if useMarkdown { +- fmt.Fprintf(&b, "- [%v](%v) %v%v\n%v\n", k, href(k), osvEntry.Summary, pkgs, fix) +- } else { +- fmt.Fprintf(&b, " - [%v] %v (%v) %v\n%v\n", k, osvEntry.Summary, href(k), pkgs, fix) +- } +- } +- b.WriteString("\n") +- return b.String() +-} +- +-func vulnerablePkgsInfo(findings []*govulncheck.Finding, useMarkdown bool) string { +- var b strings.Builder +- seen := map[string]bool{} +- for _, f := range findings { +- p := f.Trace[0].Package +- if !seen[p] { +- seen[p] = true +- if useMarkdown { +- b.WriteString("\n * `") +- } else { +- b.WriteString("\n ") +- } +- b.WriteString(p) +- if useMarkdown { +- b.WriteString("`") +- } +- } +- } +- return b.String() +-} +- +-func formatExplanation(text string, replaceMap map[module.Version]module.Version, req *modfile.Require, options *settings.Options, isPrivate bool) string { +- text = strings.TrimSuffix(text, "\n") +- splt := strings.Split(text, "\n") +- length := len(splt) +- +- var b strings.Builder +- +- // If the explanation is 2 lines, then it is of the form: +- // # golang.org/x/text/encoding +- // (main module does not need package golang.org/x/text/encoding) +- if length == 2 { +- b.WriteString(splt[1]) +- return b.String() +- } +- +- imp := splt[length-1] // import path +- reference := imp +- // See golang/go#36998: don't link to modules matching GOPRIVATE. +- if !isPrivate && options.PreferredContentFormat == protocol.Markdown { +- target := imp +- if strings.ToLower(options.LinkTarget) == "pkg.go.dev" { +- mod := req.Mod +- // respect the replacement when constructing a module link. +- if m, ok := replaceMap[req.Mod]; ok { +- // Have: 'replace A v1.2.3 => A vx.x.x' or 'replace A v1.2.3 => B vx.x.x'. +- mod = m +- } else if m, ok := replaceMap[module.Version{Path: req.Mod.Path}]; ok && +- !modfile.IsDirectoryPath(m.Path) { // exclude local replacement. +- // Have: 'replace A => A vx.x.x' or 'replace A => B vx.x.x'. +- mod = m +- } +- target = strings.Replace(target, req.Mod.Path, mod.String(), 1) +- } +- reference = fmt.Sprintf("[%s](%s)", imp, cache.BuildLink(options.LinkTarget, target, "")) +- } +- b.WriteString("This module is necessary because " + reference + " is imported in") +- +- // If the explanation is 3 lines, then it is of the form: +- // # golang.org/x/tools +- // modtest +- // golang.org/x/tools/go/packages +- if length == 3 { +- msg := fmt.Sprintf(" `%s`.", splt[1]) +- b.WriteString(msg) +- return b.String() +- } +- +- // If the explanation is more than 3 lines, then it is of the form: +- // # golang.org/x/text/language +- // rsc.io/quote +- // rsc.io/sampler +- // golang.org/x/text/language +- b.WriteString(":\n```text") +- var dash strings.Builder +- for _, imp := range splt[1 : length-1] { +- dash.WriteString("-") +- b.WriteString("\n" + dash.String() + " " + imp) +- } +- b.WriteString("\n```") +- return b.String() +-} +diff -urN a/gopls/internal/mod/inlay_hint.go b/gopls/internal/mod/inlay_hint.go +--- a/gopls/internal/mod/inlay_hint.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/mod/inlay_hint.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,104 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +-package mod +- +-import ( +- "context" +- "fmt" +- +- "golang.org/x/mod/modfile" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-func InlayHint(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, _ protocol.Range) ([]protocol.InlayHint, error) { +- // Inlay hints are enabled if the client supports them. +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil { +- return nil, err +- } +- +- // Compare the version of the module used in the snapshot's +- // metadata (i.e. the solution to the MVS constraints computed +- // by go list) with the version requested by the module, in +- // both cases, taking replaces into account. Produce an +- // InlayHint when the version of the module is not the one +- // used. +- +- replaces := make(map[string]*modfile.Replace) +- for _, x := range pm.File.Replace { +- replaces[x.Old.Path] = x +- } +- +- requires := make(map[string]*modfile.Require) +- for _, x := range pm.File.Require { +- requires[x.Mod.Path] = x +- } +- +- am, err := snapshot.AllMetadata(ctx) +- if err != nil { +- return nil, err +- } +- +- var ans []protocol.InlayHint +- seen := make(map[string]bool) +- for _, meta := range am { +- if meta.Module == nil || seen[meta.Module.Path] { +- continue +- } +- seen[meta.Module.Path] = true +- metaVersion := meta.Module.Version +- if meta.Module.Replace != nil { +- metaVersion = meta.Module.Replace.Version +- } +- // These versions can be blank, as in gopls/go.mod's local replace +- if oldrepl, ok := replaces[meta.Module.Path]; ok && oldrepl.New.Version != metaVersion { +- ih := genHint(oldrepl.Syntax, oldrepl.New.Version, metaVersion, pm.Mapper) +- if ih != nil { +- ans = append(ans, *ih) +- } +- } else if oldreq, ok := requires[meta.Module.Path]; ok && oldreq.Mod.Version != metaVersion { +- // maybe it was replaced: +- if _, ok := replaces[meta.Module.Path]; ok { +- continue +- } +- ih := genHint(oldreq.Syntax, oldreq.Mod.Version, metaVersion, pm.Mapper) +- if ih != nil { +- ans = append(ans, *ih) +- } +- } +- } +- return ans, nil +-} +- +-func genHint(mline *modfile.Line, oldVersion, newVersion string, m *protocol.Mapper) *protocol.InlayHint { +- x := mline.End.Byte // the parser has removed trailing whitespace and comments (see modfile_test.go) +- x -= len(mline.Token[len(mline.Token)-1]) +- line, err := m.OffsetPosition(x) +- if err != nil { +- return nil +- } +- part := protocol.InlayHintLabelPart{ +- Value: newVersion, +- Tooltip: &protocol.OrPTooltipPLabel{ +- Value: fmt.Sprintf("The build selects version %s rather than go.mod's version %s.", newVersion, oldVersion), +- }, +- } +- rng, err := m.OffsetRange(x, mline.End.Byte) +- if err != nil { +- return nil +- } +- te := protocol.TextEdit{ +- Range: rng, +- NewText: newVersion, +- } +- return &protocol.InlayHint{ +- Position: line, +- Label: []protocol.InlayHintLabelPart{part}, +- Kind: protocol.Parameter, +- PaddingRight: true, +- TextEdits: []protocol.TextEdit{te}, +- } +-} +diff -urN a/gopls/internal/progress/progress.go b/gopls/internal/progress/progress.go +--- a/gopls/internal/progress/progress.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/progress/progress.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,293 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The progress package defines utilities for reporting the progress +-// of long-running operations using features of the LSP client +-// interface such as Progress and ShowMessage. +-package progress +- +-import ( +- "context" +- "fmt" +- "io" +- "math/rand" +- "strconv" +- "strings" +- "sync" +- +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/xcontext" +-) +- +-// NewTracker returns a new Tracker that reports progress to the +-// specified client. +-func NewTracker(client protocol.Client) *Tracker { +- return &Tracker{ +- client: client, +- inProgress: make(map[protocol.ProgressToken]*WorkDone), +- } +-} +- +-// A Tracker reports the progress of a long-running operation to an LSP client. +-type Tracker struct { +- client protocol.Client +- supportsWorkDoneProgress bool +- +- mu sync.Mutex +- inProgress map[protocol.ProgressToken]*WorkDone +-} +- +-// SetSupportsWorkDoneProgress sets whether the client supports "work done" +-// progress reporting. It must be set before using the tracker. +-// +-// TODO(rfindley): fix this broken initialization pattern. +-// Also: do we actually need the fall-back progress behavior using ShowMessage? +-// Surely ShowMessage notifications are too noisy to be worthwhile. +-func (t *Tracker) SetSupportsWorkDoneProgress(b bool) { +- t.supportsWorkDoneProgress = b +-} +- +-// SupportsWorkDoneProgress reports whether the tracker supports work done +-// progress reporting. +-func (t *Tracker) SupportsWorkDoneProgress() bool { +- return t.supportsWorkDoneProgress +-} +- +-// Start notifies the client of work being done on the server. It uses either +-// ShowMessage RPCs or $/progress messages, depending on the capabilities of +-// the client. The returned WorkDone handle may be used to report incremental +-// progress, and to report work completion. In particular, it is an error to +-// call start and not call end(...) on the returned WorkDone handle. +-// +-// If token is empty, a token will be randomly generated. +-// +-// The progress item is considered cancellable if the given cancel func is +-// non-nil. In this case, cancel is called when the work done +-// +-// Example: +-// +-// func Generate(ctx) (err error) { +-// ctx, cancel := context.WithCancel(ctx) +-// defer cancel() +-// work := s.progress.start(ctx, "generate", "running go generate", cancel) +-// defer func() { +-// if err != nil { +-// work.end(ctx, fmt.Sprintf("generate failed: %v", err)) +-// } else { +-// work.end(ctx, "done") +-// } +-// }() +-// // Do the work... +-// } +-func (t *Tracker) Start(ctx context.Context, title, message string, token protocol.ProgressToken, cancel func()) *WorkDone { +- ctx = xcontext.Detach(ctx) // progress messages should not be cancelled +- wd := &WorkDone{ +- client: t.client, +- token: token, +- cancel: cancel, +- } +- if !t.supportsWorkDoneProgress { +- // Previous iterations of this fallback attempted to retain cancellation +- // support by using ShowMessageCommand with a 'Cancel' button, but this is +- // not ideal as the 'Cancel' dialog stays open even after the command +- // completes. +- // +- // Just show a simple message. Clients can implement workDone progress +- // reporting to get cancellation support. +- if err := wd.client.ShowMessage(ctx, &protocol.ShowMessageParams{ +- Type: protocol.Log, +- Message: message, +- }); err != nil { +- event.Error(ctx, "showing start message for "+title, err) +- } +- return wd +- } +- if wd.token == nil { +- token = strconv.FormatInt(rand.Int63(), 10) +- err := wd.client.WorkDoneProgressCreate(ctx, &protocol.WorkDoneProgressCreateParams{ +- Token: token, +- }) +- if err != nil { +- wd.err = err +- event.Error(ctx, "starting work for "+title, err) +- return wd +- } +- wd.token = token +- } +- // At this point we have a token that the client knows about. Store the token +- // before starting work. +- t.mu.Lock() +- t.inProgress[wd.token] = wd +- t.mu.Unlock() +- wd.cleanup = func() { +- t.mu.Lock() +- delete(t.inProgress, token) +- t.mu.Unlock() +- } +- err := wd.client.Progress(ctx, &protocol.ProgressParams{ +- Token: wd.token, +- Value: &protocol.WorkDoneProgressBegin{ +- Kind: "begin", +- Cancellable: wd.cancel != nil, +- Message: message, +- Title: title, +- }, +- }) +- if err != nil { +- event.Error(ctx, "progress begin", err) +- } +- return wd +-} +- +-func (t *Tracker) Cancel(token protocol.ProgressToken) error { +- t.mu.Lock() +- defer t.mu.Unlock() +- wd, ok := t.inProgress[token] +- if !ok { +- return fmt.Errorf("token %q not found in progress", token) +- } +- if wd.cancel == nil { +- return fmt.Errorf("work %q is not cancellable", token) +- } +- wd.doCancel() +- return nil +-} +- +-// WorkDone represents a unit of work that is reported to the client via the +-// progress API. +-type WorkDone struct { +- client protocol.Client +- // If token is nil, this workDone object uses the ShowMessage API, rather +- // than $/progress. +- token protocol.ProgressToken +- // err is set if progress reporting is broken for some reason (for example, +- // if there was an initial error creating a token). +- err error +- +- cancelMu sync.Mutex +- cancelled bool +- cancel func() +- +- cleanup func() +-} +- +-func (wd *WorkDone) Token() protocol.ProgressToken { +- return wd.token +-} +- +-func (wd *WorkDone) doCancel() { +- wd.cancelMu.Lock() +- defer wd.cancelMu.Unlock() +- if !wd.cancelled { +- wd.cancel() +- } +-} +- +-// Report reports an update on WorkDone report back to the client. +-func (wd *WorkDone) Report(ctx context.Context, message string, fraction float64) { +- ctx = xcontext.Detach(ctx) // progress messages should not be cancelled +- if wd == nil { +- return +- } +- wd.cancelMu.Lock() +- cancelled := wd.cancelled +- wd.cancelMu.Unlock() +- if cancelled { +- return +- } +- if wd.err != nil || wd.token == nil { +- // Not using the workDone API, so we do nothing. It would be far too spammy +- // to send incremental messages. +- return +- } +- message = strings.TrimSuffix(message, "\n") +- percentage := uint32(100 * fraction) +- err := wd.client.Progress(ctx, &protocol.ProgressParams{ +- Token: wd.token, +- Value: &protocol.WorkDoneProgressReport{ +- Kind: "report", +- // Note that in the LSP spec, the value of Cancellable may be changed to +- // control whether the cancel button in the UI is enabled. Since we don't +- // yet use this feature, the value is kept constant here. +- Cancellable: wd.cancel != nil, +- Message: message, +- Percentage: &percentage, +- }, +- }) +- if err != nil { +- event.Error(ctx, "reporting progress", err) +- } +-} +- +-// End reports a workdone completion back to the client. +-func (wd *WorkDone) End(ctx context.Context, message string) { +- ctx = xcontext.Detach(ctx) // progress messages should not be cancelled +- if wd == nil { +- return +- } +- var err error +- switch { +- case wd.err != nil: +- // There is a prior error. +- case wd.token == nil: +- // We're falling back to message-based reporting. +- err = wd.client.ShowMessage(ctx, &protocol.ShowMessageParams{ +- Type: protocol.Info, +- Message: message, +- }) +- default: +- err = wd.client.Progress(ctx, &protocol.ProgressParams{ +- Token: wd.token, +- Value: &protocol.WorkDoneProgressEnd{ +- Kind: "end", +- Message: message, +- }, +- }) +- } +- if err != nil { +- event.Error(ctx, "ending work", err) +- } +- if wd.cleanup != nil { +- wd.cleanup() +- } +-} +- +-// NewEventWriter returns an [io.Writer] that calls the context's +-// event printer for each data payload, wrapping it with the +-// operation=generate tag to distinguish its logs from others. +-func NewEventWriter(ctx context.Context, operation string) io.Writer { +- return &eventWriter{ctx: ctx, operation: operation} +-} +- +-type eventWriter struct { +- ctx context.Context +- operation string +-} +- +-func (ew *eventWriter) Write(p []byte) (n int, err error) { +- event.Log(ew.ctx, string(p), label.Operation.Of(ew.operation)) +- return len(p), nil +-} +- +-// NewWorkDoneWriter wraps a WorkDone handle to provide a Writer interface, +-// so that workDone reporting can more easily be hooked into commands. +-func NewWorkDoneWriter(ctx context.Context, wd *WorkDone) io.Writer { +- return &workDoneWriter{ctx: ctx, wd: wd} +-} +- +-// workDoneWriter wraps a workDone handle to provide a Writer interface, +-// so that workDone reporting can more easily be hooked into commands. +-type workDoneWriter struct { +- // In order to implement the io.Writer interface, we must close over ctx. +- ctx context.Context +- wd *WorkDone +-} +- +-func (wdw *workDoneWriter) Write(p []byte) (n int, err error) { +- wdw.wd.Report(wdw.ctx, string(p), 0) +- // Don't fail just because of a failure to report progress. +- return len(p), nil +-} +diff -urN a/gopls/internal/progress/progress_test.go b/gopls/internal/progress/progress_test.go +--- a/gopls/internal/progress/progress_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/progress/progress_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,159 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package progress +- +-import ( +- "context" +- "fmt" +- "sync" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-type fakeClient struct { +- protocol.Client +- +- token protocol.ProgressToken +- +- mu sync.Mutex +- created, begun, reported, messages, ended int +-} +- +-func (c *fakeClient) checkToken(token protocol.ProgressToken) { +- if token == nil { +- panic("nil token in progress message") +- } +- if c.token != nil && c.token != token { +- panic(fmt.Errorf("invalid token in progress message: got %v, want %v", token, c.token)) +- } +-} +- +-func (c *fakeClient) WorkDoneProgressCreate(ctx context.Context, params *protocol.WorkDoneProgressCreateParams) error { +- c.mu.Lock() +- defer c.mu.Unlock() +- c.checkToken(params.Token) +- c.created++ +- return nil +-} +- +-func (c *fakeClient) Progress(ctx context.Context, params *protocol.ProgressParams) error { +- c.mu.Lock() +- defer c.mu.Unlock() +- c.checkToken(params.Token) +- switch params.Value.(type) { +- case *protocol.WorkDoneProgressBegin: +- c.begun++ +- case *protocol.WorkDoneProgressReport: +- c.reported++ +- case *protocol.WorkDoneProgressEnd: +- c.ended++ +- default: +- panic(fmt.Errorf("unknown progress value %T", params.Value)) +- } +- return nil +-} +- +-func (c *fakeClient) ShowMessage(context.Context, *protocol.ShowMessageParams) error { +- c.mu.Lock() +- defer c.mu.Unlock() +- c.messages++ +- return nil +-} +- +-func setup() (*Tracker, *fakeClient) { +- c := &fakeClient{} +- tracker := NewTracker(c) +- tracker.SetSupportsWorkDoneProgress(true) +- return tracker, c +-} +- +-func TestProgressTracker_Reporting(t *testing.T) { +- for _, test := range []struct { +- name string +- supported bool +- token protocol.ProgressToken +- wantReported, wantCreated, wantBegun, wantEnded int +- wantMessages int +- }{ +- { +- name: "unsupported", +- wantMessages: 2, +- }, +- { +- name: "random token", +- supported: true, +- wantCreated: 1, +- wantBegun: 1, +- wantReported: 1, +- wantEnded: 1, +- }, +- { +- name: "string token", +- supported: true, +- token: "token", +- wantBegun: 1, +- wantReported: 1, +- wantEnded: 1, +- }, +- { +- name: "numeric token", +- supported: true, +- token: 1, +- wantReported: 1, +- wantBegun: 1, +- wantEnded: 1, +- }, +- } { +- t.Run(test.name, func(t *testing.T) { +- tracker, client := setup() +- ctx := t.Context() +- tracker.supportsWorkDoneProgress = test.supported +- work := tracker.Start(ctx, "work", "message", test.token, nil) +- client.mu.Lock() +- gotCreated, gotBegun := client.created, client.begun +- client.mu.Unlock() +- if gotCreated != test.wantCreated { +- t.Errorf("got %d created tokens, want %d", gotCreated, test.wantCreated) +- } +- if gotBegun != test.wantBegun { +- t.Errorf("got %d work begun, want %d", gotBegun, test.wantBegun) +- } +- // Ignore errors: this is just testing the reporting behavior. +- work.Report(ctx, "report", 0.5) +- client.mu.Lock() +- gotReported := client.reported +- client.mu.Unlock() +- if gotReported != test.wantReported { +- t.Errorf("got %d progress reports, want %d", gotReported, test.wantCreated) +- } +- work.End(ctx, "done") +- client.mu.Lock() +- gotEnded, gotMessages := client.ended, client.messages +- client.mu.Unlock() +- if gotEnded != test.wantEnded { +- t.Errorf("got %d ended reports, want %d", gotEnded, test.wantEnded) +- } +- if gotMessages != test.wantMessages { +- t.Errorf("got %d messages, want %d", gotMessages, test.wantMessages) +- } +- }) +- } +-} +- +-func TestProgressTracker_Cancellation(t *testing.T) { +- for _, token := range []protocol.ProgressToken{nil, 1, "a"} { +- tracker, _ := setup() +- var canceled bool +- cancel := func() { canceled = true } +- work := tracker.Start(t.Context(), "work", "message", token, cancel) +- if err := tracker.Cancel(work.Token()); err != nil { +- t.Fatal(err) +- } +- if !canceled { +- t.Errorf("tracker.cancel(...): cancel not called") +- } +- } +-} +diff -urN a/gopls/internal/protocol/command/command_gen.go b/gopls/internal/protocol/command/command_gen.go +--- a/gopls/internal/protocol/command/command_gen.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/command/command_gen.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,740 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Don't include this file during code generation, or it will break the build +-// if existing interface methods have been modified. +-//go:build !generate +-// +build !generate +- +-// Code generated by gen.go from gopls/internal/protocol/command. DO NOT EDIT. +- +-package command +- +-import ( +- "context" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// Symbolic names for gopls commands, corresponding to methods of [Interface]. +-// +-// The string value is used in the Command field of protocol.Command. +-// These commands may be obtained from a CodeLens or CodeAction request +-// and executed by an ExecuteCommand request. +-const ( +- AddDependency Command = "gopls.add_dependency" +- AddImport Command = "gopls.add_import" +- AddTelemetryCounters Command = "gopls.add_telemetry_counters" +- AddTest Command = "gopls.add_test" +- ApplyFix Command = "gopls.apply_fix" +- Assembly Command = "gopls.assembly" +- ChangeSignature Command = "gopls.change_signature" +- CheckUpgrades Command = "gopls.check_upgrades" +- ClientOpenURL Command = "gopls.client_open_url" +- DiagnoseFiles Command = "gopls.diagnose_files" +- Doc Command = "gopls.doc" +- EditGoDirective Command = "gopls.edit_go_directive" +- ExtractToNewFile Command = "gopls.extract_to_new_file" +- FetchVulncheckResult Command = "gopls.fetch_vulncheck_result" +- FreeSymbols Command = "gopls.free_symbols" +- GCDetails Command = "gopls.gc_details" +- Generate Command = "gopls.generate" +- GoGetPackage Command = "gopls.go_get_package" +- LSP Command = "gopls.lsp" +- ListImports Command = "gopls.list_imports" +- ListKnownPackages Command = "gopls.list_known_packages" +- MaybePromptForTelemetry Command = "gopls.maybe_prompt_for_telemetry" +- MemStats Command = "gopls.mem_stats" +- ModifyTags Command = "gopls.modify_tags" +- Modules Command = "gopls.modules" +- PackageSymbols Command = "gopls.package_symbols" +- Packages Command = "gopls.packages" +- RegenerateCgo Command = "gopls.regenerate_cgo" +- RemoveDependency Command = "gopls.remove_dependency" +- ResetGoModDiagnostics Command = "gopls.reset_go_mod_diagnostics" +- RunGoWorkCommand Command = "gopls.run_go_work_command" +- RunGovulncheck Command = "gopls.run_govulncheck" +- RunTests Command = "gopls.run_tests" +- ScanImports Command = "gopls.scan_imports" +- SplitPackage Command = "gopls.split_package" +- StartDebugging Command = "gopls.start_debugging" +- StartProfile Command = "gopls.start_profile" +- StopProfile Command = "gopls.stop_profile" +- Tidy Command = "gopls.tidy" +- UpdateGoSum Command = "gopls.update_go_sum" +- UpgradeDependency Command = "gopls.upgrade_dependency" +- Vendor Command = "gopls.vendor" +- Views Command = "gopls.views" +- Vulncheck Command = "gopls.vulncheck" +- WorkspaceStats Command = "gopls.workspace_stats" +-) +- +-var Commands = []Command{ +- AddDependency, +- AddImport, +- AddTelemetryCounters, +- AddTest, +- ApplyFix, +- Assembly, +- ChangeSignature, +- CheckUpgrades, +- ClientOpenURL, +- DiagnoseFiles, +- Doc, +- EditGoDirective, +- ExtractToNewFile, +- FetchVulncheckResult, +- FreeSymbols, +- GCDetails, +- Generate, +- GoGetPackage, +- LSP, +- ListImports, +- ListKnownPackages, +- MaybePromptForTelemetry, +- MemStats, +- ModifyTags, +- Modules, +- PackageSymbols, +- Packages, +- RegenerateCgo, +- RemoveDependency, +- ResetGoModDiagnostics, +- RunGoWorkCommand, +- RunGovulncheck, +- RunTests, +- ScanImports, +- SplitPackage, +- StartDebugging, +- StartProfile, +- StopProfile, +- Tidy, +- UpdateGoSum, +- UpgradeDependency, +- Vendor, +- Views, +- Vulncheck, +- WorkspaceStats, +-} +- +-func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Interface) (any, error) { +- switch Command(params.Command) { +- case AddDependency: +- var a0 DependencyArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.AddDependency(ctx, a0) +- case AddImport: +- var a0 AddImportArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.AddImport(ctx, a0) +- case AddTelemetryCounters: +- var a0 AddTelemetryCountersArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.AddTelemetryCounters(ctx, a0) +- case AddTest: +- var a0 protocol.Location +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.AddTest(ctx, a0) +- case ApplyFix: +- var a0 ApplyFixArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.ApplyFix(ctx, a0) +- case Assembly: +- var a0 string +- var a1 string +- var a2 string +- if err := UnmarshalArgs(params.Arguments, &a0, &a1, &a2); err != nil { +- return nil, err +- } +- return nil, s.Assembly(ctx, a0, a1, a2) +- case ChangeSignature: +- var a0 ChangeSignatureArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.ChangeSignature(ctx, a0) +- case CheckUpgrades: +- var a0 CheckUpgradesArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.CheckUpgrades(ctx, a0) +- case ClientOpenURL: +- var a0 string +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.ClientOpenURL(ctx, a0) +- case DiagnoseFiles: +- var a0 DiagnoseFilesArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.DiagnoseFiles(ctx, a0) +- case Doc: +- var a0 DocArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.Doc(ctx, a0) +- case EditGoDirective: +- var a0 EditGoDirectiveArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.EditGoDirective(ctx, a0) +- case ExtractToNewFile: +- var a0 protocol.Location +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.ExtractToNewFile(ctx, a0) +- case FetchVulncheckResult: +- var a0 URIArg +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.FetchVulncheckResult(ctx, a0) +- case FreeSymbols: +- var a0 string +- var a1 protocol.Location +- if err := UnmarshalArgs(params.Arguments, &a0, &a1); err != nil { +- return nil, err +- } +- return nil, s.FreeSymbols(ctx, a0, a1) +- case GCDetails: +- var a0 protocol.DocumentURI +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.GCDetails(ctx, a0) +- case Generate: +- var a0 GenerateArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.Generate(ctx, a0) +- case GoGetPackage: +- var a0 GoGetPackageArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.GoGetPackage(ctx, a0) +- case LSP: +- var a0 LSPArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.LSP(ctx, a0) +- case ListImports: +- var a0 URIArg +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.ListImports(ctx, a0) +- case ListKnownPackages: +- var a0 URIArg +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.ListKnownPackages(ctx, a0) +- case MaybePromptForTelemetry: +- return nil, s.MaybePromptForTelemetry(ctx) +- case MemStats: +- return s.MemStats(ctx) +- case ModifyTags: +- var a0 ModifyTagsArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.ModifyTags(ctx, a0) +- case Modules: +- var a0 ModulesArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.Modules(ctx, a0) +- case PackageSymbols: +- var a0 PackageSymbolsArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.PackageSymbols(ctx, a0) +- case Packages: +- var a0 PackagesArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.Packages(ctx, a0) +- case RegenerateCgo: +- var a0 URIArg +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.RegenerateCgo(ctx, a0) +- case RemoveDependency: +- var a0 RemoveDependencyArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.RemoveDependency(ctx, a0) +- case ResetGoModDiagnostics: +- var a0 ResetGoModDiagnosticsArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.ResetGoModDiagnostics(ctx, a0) +- case RunGoWorkCommand: +- var a0 RunGoWorkArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.RunGoWorkCommand(ctx, a0) +- case RunGovulncheck: +- var a0 VulncheckArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.RunGovulncheck(ctx, a0) +- case RunTests: +- var a0 RunTestsArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.RunTests(ctx, a0) +- case ScanImports: +- return nil, s.ScanImports(ctx) +- case SplitPackage: +- var a0 string +- var a1 string +- if err := UnmarshalArgs(params.Arguments, &a0, &a1); err != nil { +- return nil, err +- } +- return nil, s.SplitPackage(ctx, a0, a1) +- case StartDebugging: +- var a0 DebuggingArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.StartDebugging(ctx, a0) +- case StartProfile: +- var a0 StartProfileArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.StartProfile(ctx, a0) +- case StopProfile: +- var a0 StopProfileArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.StopProfile(ctx, a0) +- case Tidy: +- var a0 URIArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.Tidy(ctx, a0) +- case UpdateGoSum: +- var a0 URIArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.UpdateGoSum(ctx, a0) +- case UpgradeDependency: +- var a0 DependencyArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.UpgradeDependency(ctx, a0) +- case Vendor: +- var a0 URIArg +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return nil, s.Vendor(ctx, a0) +- case Views: +- return s.Views(ctx) +- case Vulncheck: +- var a0 VulncheckArgs +- if err := UnmarshalArgs(params.Arguments, &a0); err != nil { +- return nil, err +- } +- return s.Vulncheck(ctx, a0) +- case WorkspaceStats: +- return s.WorkspaceStats(ctx) +- } +- return nil, fmt.Errorf("unsupported command %q", params.Command) +-} +- +-func NewAddDependencyCommand(title string, a0 DependencyArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: AddDependency.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewAddImportCommand(title string, a0 AddImportArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: AddImport.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewAddTelemetryCountersCommand(title string, a0 AddTelemetryCountersArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: AddTelemetryCounters.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewAddTestCommand(title string, a0 protocol.Location) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: AddTest.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewApplyFixCommand(title string, a0 ApplyFixArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: ApplyFix.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewAssemblyCommand(title string, a0 string, a1 string, a2 string) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: Assembly.String(), +- Arguments: MustMarshalArgs(a0, a1, a2), +- } +-} +- +-func NewChangeSignatureCommand(title string, a0 ChangeSignatureArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: ChangeSignature.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewCheckUpgradesCommand(title string, a0 CheckUpgradesArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: CheckUpgrades.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewClientOpenURLCommand(title string, a0 string) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: ClientOpenURL.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewDiagnoseFilesCommand(title string, a0 DiagnoseFilesArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: DiagnoseFiles.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewDocCommand(title string, a0 DocArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: Doc.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewEditGoDirectiveCommand(title string, a0 EditGoDirectiveArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: EditGoDirective.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewExtractToNewFileCommand(title string, a0 protocol.Location) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: ExtractToNewFile.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewFetchVulncheckResultCommand(title string, a0 URIArg) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: FetchVulncheckResult.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewFreeSymbolsCommand(title string, a0 string, a1 protocol.Location) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: FreeSymbols.String(), +- Arguments: MustMarshalArgs(a0, a1), +- } +-} +- +-func NewGCDetailsCommand(title string, a0 protocol.DocumentURI) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: GCDetails.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewGenerateCommand(title string, a0 GenerateArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: Generate.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewGoGetPackageCommand(title string, a0 GoGetPackageArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: GoGetPackage.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewLSPCommand(title string, a0 LSPArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: LSP.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewListImportsCommand(title string, a0 URIArg) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: ListImports.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewListKnownPackagesCommand(title string, a0 URIArg) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: ListKnownPackages.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewMaybePromptForTelemetryCommand(title string) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: MaybePromptForTelemetry.String(), +- Arguments: MustMarshalArgs(), +- } +-} +- +-func NewMemStatsCommand(title string) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: MemStats.String(), +- Arguments: MustMarshalArgs(), +- } +-} +- +-func NewModifyTagsCommand(title string, a0 ModifyTagsArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: ModifyTags.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewModulesCommand(title string, a0 ModulesArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: Modules.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewPackageSymbolsCommand(title string, a0 PackageSymbolsArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: PackageSymbols.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewPackagesCommand(title string, a0 PackagesArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: Packages.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewRegenerateCgoCommand(title string, a0 URIArg) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: RegenerateCgo.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewRemoveDependencyCommand(title string, a0 RemoveDependencyArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: RemoveDependency.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewResetGoModDiagnosticsCommand(title string, a0 ResetGoModDiagnosticsArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: ResetGoModDiagnostics.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewRunGoWorkCommandCommand(title string, a0 RunGoWorkArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: RunGoWorkCommand.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewRunGovulncheckCommand(title string, a0 VulncheckArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: RunGovulncheck.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewRunTestsCommand(title string, a0 RunTestsArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: RunTests.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewScanImportsCommand(title string) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: ScanImports.String(), +- Arguments: MustMarshalArgs(), +- } +-} +- +-func NewSplitPackageCommand(title string, a0 string, a1 string) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: SplitPackage.String(), +- Arguments: MustMarshalArgs(a0, a1), +- } +-} +- +-func NewStartDebuggingCommand(title string, a0 DebuggingArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: StartDebugging.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewStartProfileCommand(title string, a0 StartProfileArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: StartProfile.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewStopProfileCommand(title string, a0 StopProfileArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: StopProfile.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewTidyCommand(title string, a0 URIArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: Tidy.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewUpdateGoSumCommand(title string, a0 URIArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: UpdateGoSum.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewUpgradeDependencyCommand(title string, a0 DependencyArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: UpgradeDependency.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewVendorCommand(title string, a0 URIArg) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: Vendor.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewViewsCommand(title string) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: Views.String(), +- Arguments: MustMarshalArgs(), +- } +-} +- +-func NewVulncheckCommand(title string, a0 VulncheckArgs) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: Vulncheck.String(), +- Arguments: MustMarshalArgs(a0), +- } +-} +- +-func NewWorkspaceStatsCommand(title string) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: WorkspaceStats.String(), +- Arguments: MustMarshalArgs(), +- } +-} +diff -urN a/gopls/internal/protocol/command/commandmeta/meta.go b/gopls/internal/protocol/command/commandmeta/meta.go +--- a/gopls/internal/protocol/command/commandmeta/meta.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/command/commandmeta/meta.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,260 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package commandmeta provides metadata about LSP commands, by +-// statically analyzing the command.Interface type. +-// +-// It is used to generate JSONRPC dispatch and marshaling. +-// TODO(adonovan): combine with gopls/internal/protocol/command/gen. +-package commandmeta +- +-import ( +- "fmt" +- "go/ast" +- "go/token" +- "go/types" +- "reflect" +- "strings" +- "unicode" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/go/packages" +- // (does not depend on gopls itself) +-) +- +-// A Command describes a workspace/executeCommand extension command. +-type Command struct { +- MethodName string // e.g. "RunTests" +- Name string // e.g. "gopls.run_tests" +- Title string +- Doc string +- Args []*Field +- Result *Field +-} +- +-type Field struct { +- Name string +- Doc string +- JSONTag string +- Type types.Type +- FieldMod string +- // In some circumstances, we may want to recursively load additional field +- // descriptors for fields of struct types, documenting their internals. +- Fields []*Field +-} +- +-// Load returns a description of the workspace/executeCommand commands +-// supported by gopls based on static analysis of the command.Interface type. +-func Load() ([]*Command, error) { +- pkgs, err := packages.Load( +- &packages.Config{ +- Mode: packages.NeedTypes | packages.NeedTypesInfo | packages.NeedSyntax | packages.NeedImports | packages.NeedDeps, +- BuildFlags: []string{"-tags=generate"}, +- }, +- "golang.org/x/tools/gopls/internal/protocol/command", +- ) +- if err != nil { +- return nil, fmt.Errorf("packages.Load: %v", err) +- } +- pkg := pkgs[0] +- if len(pkg.Errors) > 0 { +- return nil, pkg.Errors[0] +- } +- +- // command.Interface +- obj := pkg.Types.Scope().Lookup("Interface").Type().Underlying().(*types.Interface) +- +- // Load command metadata corresponding to each interface method. +- var commands []*Command +- loader := fieldLoader{make(map[types.Object]*Field)} +- for i := 0; i < obj.NumMethods(); i++ { +- m := obj.Method(i) +- c, err := loader.loadMethod(pkg, m) +- if err != nil { +- return nil, fmt.Errorf("loading %s: %v", m.Name(), err) +- } +- commands = append(commands, c) +- } +- return commands, nil +-} +- +-// fieldLoader loads field information, memoizing results to prevent infinite +-// recursion. +-type fieldLoader struct { +- loaded map[types.Object]*Field +-} +- +-var universeError = types.Universe.Lookup("error").Type() +- +-func (l *fieldLoader) loadMethod(pkg *packages.Package, m *types.Func) (*Command, error) { +- node, err := findField(pkg, m.Pos()) +- if err != nil { +- return nil, err +- } +- title, doc := splitDoc(node.Doc.Text()) +- c := &Command{ +- MethodName: m.Name(), +- Name: lspName(m.Name()), +- Doc: doc, +- Title: title, +- } +- sig := m.Type().Underlying().(*types.Signature) +- rlen := sig.Results().Len() +- if rlen > 2 || rlen == 0 { +- return nil, fmt.Errorf("must have 1 or 2 returns, got %d", rlen) +- } +- finalResult := sig.Results().At(rlen - 1) +- if !types.Identical(finalResult.Type(), universeError) { +- return nil, fmt.Errorf("final return must be error") +- } +- if rlen == 2 { +- obj := sig.Results().At(0) +- c.Result, err = l.loadField(pkg, obj, "", "") +- if err != nil { +- return nil, err +- } +- } +- for i := 0; i < sig.Params().Len(); i++ { +- obj := sig.Params().At(i) +- fld, err := l.loadField(pkg, obj, "", "") +- if err != nil { +- return nil, err +- } +- if i == 0 { +- // Lazy check that the first argument is a context. We could relax this, +- // but then the generated code gets more complicated. +- if named, ok := types.Unalias(fld.Type).(*types.Named); !ok || named.Obj().Name() != "Context" || named.Obj().Pkg().Path() != "context" { +- return nil, fmt.Errorf("first method parameter must be context.Context") +- } +- // Skip the context argument, as it is implied. +- continue +- } +- c.Args = append(c.Args, fld) +- } +- return c, nil +-} +- +-func (l *fieldLoader) loadField(pkg *packages.Package, obj *types.Var, doc, tag string) (*Field, error) { +- if existing, ok := l.loaded[obj]; ok { +- return existing, nil +- } +- fld := &Field{ +- Name: obj.Name(), +- Doc: strings.TrimSpace(doc), +- Type: obj.Type(), +- JSONTag: reflect.StructTag(tag).Get("json"), +- } +- +- // This must be done here to handle nested types, such as: +- // +- // type Test struct { Subtests []Test } +- l.loaded[obj] = fld +- +- under := fld.Type.Underlying() +- // Quick-and-dirty handling for various underlying types. +- switch p := under.(type) { +- case *types.Pointer: +- under = p.Elem().Underlying() +- case *types.Array: +- under = p.Elem().Underlying() +- fld.FieldMod = fmt.Sprintf("[%d]", p.Len()) +- case *types.Slice: +- under = p.Elem().Underlying() +- fld.FieldMod = "[]" +- } +- +- if s, ok := under.(*types.Struct); ok { +- for i := 0; i < s.NumFields(); i++ { +- obj2 := s.Field(i) +- pkg2 := pkg +- if obj2.Pkg() != pkg2.Types { +- pkg2, ok = pkg.Imports[obj2.Pkg().Path()] +- if !ok { +- return nil, fmt.Errorf("missing import for %q: %q", pkg.ID, obj2.Pkg().Path()) +- } +- } +- node, err := findField(pkg2, obj2.Pos()) +- if err != nil { +- return nil, err +- } +- tag := s.Tag(i) +- structField, err := l.loadField(pkg2, obj2, node.Doc.Text(), tag) +- if err != nil { +- return nil, err +- } +- fld.Fields = append(fld.Fields, structField) +- } +- } +- return fld, nil +-} +- +-// splitDoc parses a command doc string to separate the title from normal +-// documentation. +-// +-// The doc comment should be of the form: "MethodName: Title\nDocumentation" +-func splitDoc(text string) (title, doc string) { +- docParts := strings.SplitN(text, "\n", 2) +- titleParts := strings.SplitN(docParts[0], ":", 2) +- if len(titleParts) > 1 { +- title = strings.TrimSpace(titleParts[1]) +- } +- if len(docParts) > 1 { +- doc = strings.TrimSpace(docParts[1]) +- } +- return title, doc +-} +- +-// lspName returns the normalized command name to use in the LSP. +-func lspName(methodName string) string { +- words := splitCamel(methodName) +- for i := range words { +- words[i] = strings.ToLower(words[i]) +- } +- return "gopls." + strings.Join(words, "_") +-} +- +-// splitCamel splits s into words, according to camel-case word boundaries. +-// Initialisms are grouped as a single word. +-// +-// For example: +-// +-// "RunTests" -> []string{"Run", "Tests"} +-// "ClientOpenURL" -> []string{"Client", "Open", "URL"} +-func splitCamel(s string) []string { +- var words []string +- for len(s) > 0 { +- last := max(strings.LastIndexFunc(s, unicode.IsUpper), 0) +- if last == len(s)-1 { +- // Group initialisms as a single word. +- last = 1 + strings.LastIndexFunc(s[:last], func(r rune) bool { return !unicode.IsUpper(r) }) +- } +- words = append(words, s[last:]) +- s = s[:last] +- } +- for i := 0; i < len(words)/2; i++ { +- j := len(words) - i - 1 +- words[i], words[j] = words[j], words[i] +- } +- return words +-} +- +-// findField finds the struct field or interface method positioned at pos, +-// within the AST. +-func findField(pkg *packages.Package, pos token.Pos) (*ast.Field, error) { +- fset := pkg.Fset +- var file *ast.File +- for _, f := range pkg.Syntax { +- if fset.File(f.FileStart).Name() == fset.File(pos).Name() { +- file = f +- break +- } +- } +- if file == nil { +- return nil, fmt.Errorf("no file for pos %v", pos) +- } +- path, _ := astutil.PathEnclosingInterval(file, pos, pos) +- // This is fragile, but in the cases we care about, the field will be in +- // path[1]. +- return path[1].(*ast.Field), nil +-} +diff -urN a/gopls/internal/protocol/command/gen/gen.go b/gopls/internal/protocol/command/gen/gen.go +--- a/gopls/internal/protocol/command/gen/gen.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/command/gen/gen.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,202 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package gen is used to generate command bindings from the gopls command +-// interface. +-package gen +- +-import ( +- "bytes" +- "fmt" +- "go/types" +- "log" +- "text/template" +- +- "golang.org/x/tools/gopls/internal/protocol/command/commandmeta" +- "golang.org/x/tools/internal/imports" +- "golang.org/x/tools/internal/typesinternal" +-) +- +-const src = `// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Don't include this file during code generation, or it will break the build +-// if existing interface methods have been modified. +-//go:build !generate +-// +build !generate +- +-// Code generated by gen.go from gopls/internal/protocol/command. DO NOT EDIT. +- +-package command +- +-import ( +- {{range $k, $v := .Imports -}} +- "{{$k}}" +- {{end}} +-) +- +-// Symbolic names for gopls commands, corresponding to methods of [Interface]. +-// +-// The string value is used in the Command field of protocol.Command. +-// These commands may be obtained from a CodeLens or CodeAction request +-// and executed by an ExecuteCommand request. +-const ( +-{{- range .Commands}} +- {{.MethodName}} Command = "{{.Name}}" +-{{- end}} +-) +- +-var Commands = []Command { +-{{- range .Commands}} +- {{.MethodName}}, +-{{- end}} +-} +- +-func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Interface) (any, error) { +- switch Command(params.Command) { +- {{- range .Commands}} +- case {{.MethodName}}: +- {{- if .Args -}} +- {{- range $i, $v := .Args}} +- var a{{$i}} {{typeString $v.Type}} +- {{- end}} +- if err := UnmarshalArgs(params.Arguments{{range $i, $v := .Args}}, &a{{$i}}{{end}}); err != nil { +- return nil, err +- } +- {{end -}} +- return {{if not .Result}}nil, {{end}}s.{{.MethodName}}(ctx{{range $i, $v := .Args}}, a{{$i}}{{end}}) +- {{- end}} +- } +- return nil, fmt.Errorf("unsupported command %q", params.Command) +-} +-{{- range .Commands}} +- +-{{if fallible .Args}} +-func New{{.MethodName}}Command(title string, {{range $i, $v := .Args}}{{if $i}}, {{end}}a{{$i}} {{typeString $v.Type}}{{end}}) (*protocol.Command, error) { +- args, err := MarshalArgs({{range $i, $v := .Args}}{{if $i}}, {{end}}a{{$i}}{{end}}) +- if err != nil { +- return nil, err +- } +- return &protocol.Command{ +- Title: title, +- Command: {{.MethodName}}.String(), +- Arguments: args, +- }, nil +-} +-{{else}} +-func New{{.MethodName}}Command(title string, {{range $i, $v := .Args}}{{if $i}}, {{end}}a{{$i}} {{typeString $v.Type}}{{end}}) *protocol.Command { +- return &protocol.Command{ +- Title: title, +- Command: {{.MethodName}}.String(), +- Arguments: MustMarshalArgs({{range $i, $v := .Args}}{{if $i}}, {{end}}a{{$i}}{{end}}), +- } +-} +-{{end}} +- +-{{end}} +-` +- +-type data struct { +- Imports map[string]bool +- Commands []*commandmeta.Command +-} +- +-// Generate computes the new contents of ../command_gen.go from a +-// static analysis of the command.Interface type. +-func Generate() ([]byte, error) { +- cmds, err := commandmeta.Load() +- if err != nil { +- return nil, fmt.Errorf("loading command data: %v", err) +- } +- const thispkg = "golang.org/x/tools/gopls/internal/protocol/command" +- qual := func(p *types.Package) string { +- if p.Path() == thispkg { +- return "" +- } +- return p.Name() +- } +- tmpl, err := template.New("").Funcs(template.FuncMap{ +- "typeString": func(t types.Type) string { +- return types.TypeString(t, qual) +- }, +- "fallible": func(args []*commandmeta.Field) bool { +- var fallible func(types.Type) bool +- fallible = func(t types.Type) bool { +- switch t := t.Underlying().(type) { +- case *types.Basic: +- return false +- case *types.Slice: +- return fallible(t.Elem()) +- case *types.Struct: +- for i := 0; i < t.NumFields(); i++ { +- if fallible(t.Field(i).Type()) { +- return true +- } +- } +- return false +- } +- // Assume all other types are fallible for now: +- log.Println("Command.Args has fallible type", t) +- return true +- } +- for _, arg := range args { +- if fallible(arg.Type) { +- return true +- } +- } +- return false +- }, +- }).Parse(src) +- if err != nil { +- return nil, err +- } +- d := data{ +- Commands: cmds, +- Imports: map[string]bool{ +- "context": true, +- "fmt": true, +- "golang.org/x/tools/gopls/internal/protocol": true, +- }, +- } +- for _, c := range d.Commands { +- for _, arg := range c.Args { +- pth := pkgPath(arg.Type) +- if pth != "" && pth != thispkg { +- d.Imports[pth] = true +- } +- } +- if c.Result != nil { +- pth := pkgPath(c.Result.Type) +- if pth != "" && pth != thispkg { +- d.Imports[pth] = true +- } +- } +- } +- +- var buf bytes.Buffer +- if err := tmpl.Execute(&buf, d); err != nil { +- return nil, fmt.Errorf("executing: %v", err) +- } +- +- opts := &imports.Options{ +- AllErrors: true, +- FormatOnly: true, +- Comments: true, +- } +- content, err := imports.Process("", buf.Bytes(), opts) +- if err != nil { +- return nil, fmt.Errorf("goimports: %v", err) +- } +- return content, nil +-} +- +-func pkgPath(t types.Type) string { +- if tname := typesinternal.TypeNameFor(t); tname != nil { +- if pkg := tname.Pkg(); pkg != nil { +- return pkg.Path() +- } +- } +- return "" +-} +diff -urN a/gopls/internal/protocol/command/generate.go b/gopls/internal/protocol/command/generate.go +--- a/gopls/internal/protocol/command/generate.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/command/generate.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build ignore +-// +build ignore +- +-// The generate command generates command_gen.go from a combination of +-// static and dynamic analysis of the command package. +-package main +- +-import ( +- "log" +- "os" +- +- "golang.org/x/tools/gopls/internal/protocol/command/gen" +-) +- +-func main() { +- content, err := gen.Generate() +- if err != nil { +- log.Fatal(err) +- } +- if err := os.WriteFile("command_gen.go", content, 0644); err != nil { +- log.Fatal(err) +- } +-} +diff -urN a/gopls/internal/protocol/command/interface.go b/gopls/internal/protocol/command/interface.go +--- a/gopls/internal/protocol/command/interface.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/command/interface.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,878 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:generate go run -tags=generate generate.go +- +-// Package command defines the interface provided by gopls for the +-// workspace/executeCommand LSP request. +-// +-// This interface is fully specified by the Interface type, provided it +-// conforms to the restrictions outlined in its doc string. +-// +-// Bindings for server-side command dispatch and client-side serialization are +-// also provided by this package, via code generation. +-package command +- +-import ( +- "context" +- "encoding/json" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/vulncheck" +-) +- +-// Interface defines the interface gopls exposes for the +-// workspace/executeCommand request. +-// +-// This interface is used to generate logic for marshaling, +-// unmarshaling, and dispatch, so it has some additional restrictions: +-// +-// 1. All method arguments must be JSON serializable. +-// +-// 2. Methods must return either error or (T, error), where T is a +-// JSON serializable type. +-// +-// 3. The first line of the doc string is special. +-// Everything after the colon is considered the command 'Title'. +-// For example: +-// +-// Command: Capitalized verb phrase with no period +-// +-// Longer description here... +-type Interface interface { +- // ApplyFix: Apply a fix +- // +- // Applies a fix to a region of source code. +- ApplyFix(context.Context, ApplyFixArgs) (*protocol.WorkspaceEdit, error) +- +- // RunTests: Run tests +- // +- // Runs `go test` for a specific set of test or benchmark functions. +- // +- // This command is asynchronous; clients must wait for the 'end' progress notification. +- RunTests(context.Context, RunTestsArgs) error +- +- // Generate: Run go generate +- // +- // Runs `go generate` for a given directory. +- Generate(context.Context, GenerateArgs) error +- +- // Doc: Browse package documentation. +- // +- // Opens the Go package documentation page for the current +- // package in a browser. +- Doc(context.Context, DocArgs) (protocol.URI, error) +- +- // SplitPackage: organize a package into two or more components +- // +- // Opens the "split package" tool in a web browser. +- SplitPackage(ctx context.Context, viewID, packageID string) error +- +- // RegenerateCgo: Regenerate cgo +- // +- // Regenerates cgo definitions. +- RegenerateCgo(context.Context, URIArg) error +- +- // Tidy: Run go mod tidy +- // +- // Runs `go mod tidy` for a module. +- Tidy(context.Context, URIArgs) error +- +- // Vendor: Run go mod vendor +- // +- // Runs `go mod vendor` for a module. +- Vendor(context.Context, URIArg) error +- +- // EditGoDirective: Run go mod edit -go=version +- // +- // Runs `go mod edit -go=version` for a module. +- EditGoDirective(context.Context, EditGoDirectiveArgs) error +- +- // UpdateGoSum: Update go.sum +- // +- // Updates the go.sum file for a module. +- UpdateGoSum(context.Context, URIArgs) error +- +- // CheckUpgrades: Check for upgrades +- // +- // Checks for module upgrades. +- CheckUpgrades(context.Context, CheckUpgradesArgs) error +- +- // AddDependency: Add a dependency +- // +- // Adds a dependency to the go.mod file for a module. +- AddDependency(context.Context, DependencyArgs) error +- +- // UpgradeDependency: Upgrade a dependency +- // +- // Upgrades a dependency in the go.mod file for a module. +- UpgradeDependency(context.Context, DependencyArgs) error +- +- // RemoveDependency: Remove a dependency +- // +- // Removes a dependency from the go.mod file of a module. +- RemoveDependency(context.Context, RemoveDependencyArgs) error +- +- // ResetGoModDiagnostics: Reset go.mod diagnostics +- // +- // Reset diagnostics in the go.mod file of a module. +- ResetGoModDiagnostics(context.Context, ResetGoModDiagnosticsArgs) error +- +- // GoGetPackage: 'go get' a package +- // +- // Runs `go get` to fetch a package. +- GoGetPackage(context.Context, GoGetPackageArgs) error +- +- // GCDetails: Toggle display of compiler optimization details +- // +- // Toggle the per-package flag that causes Go compiler +- // optimization decisions to be reported as diagnostics. +- // +- // (The name is a legacy of a time when the Go compiler was +- // known as "gc". Renaming the command would break custom +- // client-side logic in VS Code.) +- GCDetails(context.Context, protocol.DocumentURI) error +- +- // LSP is a command that functions as a generic dispatcher, allowing clients +- // to execute any LSP RPC through the "workspace/executeCommand" request. +- // +- // This serves two primary purposes: +- // +- // 1. It provides a unified endpoint for clients that are restricted from +- // making arbitrary LSP calls directly, giving them full access to the +- // server's capabilities. +- // +- // 2. It allows the client and server to extend the standard protocol. A +- // client can send custom parameters that are not part of the official +- // LSP, enabling richer functionality. +- // +- // The command takes the target LSP method name and its parameters as a +- // [json.RawMessage], routing the call to the appropriate internal handler. +- LSP(context.Context, LSPArgs) (any, error) +- +- // ListKnownPackages: List known packages +- // +- // Retrieve a list of packages that are importable from the given URI. +- ListKnownPackages(context.Context, URIArg) (ListKnownPackagesResult, error) +- +- // ListImports: List imports of a file and its package +- // +- // Retrieve a list of imports in the given Go file, and the package it +- // belongs to. +- ListImports(context.Context, URIArg) (ListImportsResult, error) +- +- // AddImport: Add an import +- // +- // Ask the server to add an import path to a given Go file. The method will +- // call applyEdit on the client so that clients don't have to apply the edit +- // themselves. +- AddImport(context.Context, AddImportArgs) error +- +- // ExtractToNewFile: Move selected declarations to a new file +- // +- // Used by the code action of the same name. +- ExtractToNewFile(context.Context, protocol.Location) error +- +- // StartDebugging: Start the gopls debug server +- // +- // Start the gopls debug server if it isn't running, and return the debug +- // address. +- StartDebugging(context.Context, DebuggingArgs) (DebuggingResult, error) +- +- // StartProfile: Start capturing a profile of gopls' execution +- // +- // Start a new pprof profile. Before using the resulting file, profiling must +- // be stopped with a corresponding call to StopProfile. +- // +- // This command is intended for internal use only, by the gopls benchmark +- // runner. +- StartProfile(context.Context, StartProfileArgs) (StartProfileResult, error) +- +- // StopProfile: Stop an ongoing profile +- // +- // This command is intended for internal use only, by the gopls benchmark +- // runner. +- StopProfile(context.Context, StopProfileArgs) (StopProfileResult, error) +- +- // GoVulncheck: run vulncheck synchronously. +- // +- // Run vulnerability check (`govulncheck`). +- // +- // This command is synchronous, and returns the govulncheck result. +- Vulncheck(context.Context, VulncheckArgs) (VulncheckResult, error) +- +- // RunGovulncheck: Run vulncheck asynchronously. +- // +- // Run vulnerability check (`govulncheck`). +- // +- // This command is asynchronous; clients must wait for the 'end' progress +- // notification and then retrieve results using gopls.fetch_vulncheck_result. +- // +- // Deprecated: clients should call gopls.vulncheck instead, which returns the +- // actual vulncheck result. +- RunGovulncheck(context.Context, VulncheckArgs) (RunVulncheckResult, error) +- +- // FetchVulncheckResult: Get known vulncheck result +- // +- // Fetch the result of latest vulnerability check (`govulncheck`). +- // +- // Deprecated: clients should call gopls.vulncheck instead, which returns the +- // actual vulncheck result. +- FetchVulncheckResult(context.Context, URIArg) (map[protocol.DocumentURI]*vulncheck.Result, error) +- +- // MemStats: Fetch memory statistics +- // +- // Call runtime.GC multiple times and return memory statistics as reported by +- // runtime.MemStats. +- // +- // This command is used for benchmarking, and may change in the future. +- MemStats(context.Context) (MemStatsResult, error) +- +- // WorkspaceStats: Fetch workspace statistics +- // +- // Query statistics about workspace builds, modules, packages, and files. +- // +- // This command is intended for internal use only, by the gopls stats +- // command. +- WorkspaceStats(context.Context) (WorkspaceStatsResult, error) +- +- // RunGoWorkCommand: Run `go work [args...]`, and apply the resulting go.work +- // edits to the current go.work file +- RunGoWorkCommand(context.Context, RunGoWorkArgs) error +- +- // AddTelemetryCounters: Update the given telemetry counters +- // +- // Gopls will prepend "fwd/" to all the counters updated using this command +- // to avoid conflicts with other counters gopls collects. +- AddTelemetryCounters(context.Context, AddTelemetryCountersArgs) error +- +- // AddTest: add test for the selected function +- AddTest(context.Context, protocol.Location) (*protocol.WorkspaceEdit, error) +- +- // MaybePromptForTelemetry: Prompt user to enable telemetry +- // +- // Checks for the right conditions, and then prompts the user +- // to ask if they want to enable Go telemetry uploading. If +- // the user responds 'Yes', the telemetry mode is set to "on". +- MaybePromptForTelemetry(context.Context) error +- +- // ChangeSignature: Perform a "change signature" refactoring +- // +- // This command is experimental, currently only supporting parameter removal. +- // Its signature will certainly change in the future (pun intended). +- ChangeSignature(context.Context, ChangeSignatureArgs) (*protocol.WorkspaceEdit, error) +- +- // DiagnoseFiles: Cause server to publish diagnostics for the specified files. +- // +- // This command is needed by the 'gopls {check,fix}' CLI subcommands. +- DiagnoseFiles(context.Context, DiagnoseFilesArgs) error +- +- // Views: List current Views on the server. +- // +- // This command is intended for use by gopls tests only. +- Views(context.Context) ([]View, error) +- +- // FreeSymbols: Browse free symbols referenced by the selection in a browser. +- // +- // This command is a query over a selected range of Go source +- // code. It reports the set of "free" symbols of the +- // selection: the set of symbols that are referenced within +- // the selection but are declared outside of it. This +- // information is useful for understanding at a glance what a +- // block of code depends on, perhaps as a precursor to +- // extracting it into a separate function. +- FreeSymbols(ctx context.Context, viewID string, loc protocol.Location) error +- +- // Assembly: Browse assembly listing of current function in a browser. +- // +- // This command opens a web-based disassembly listing of the +- // specified function symbol (plus any nested lambdas and defers). +- // The machine architecture is determined by the view. +- Assembly(_ context.Context, viewID, packageID, symbol string) error +- +- // ClientOpenURL: Request that the client open a URL in a browser. +- ClientOpenURL(_ context.Context, url string) error +- +- // ScanImports: force a synchronous scan of the imports cache. +- // +- // This command is intended for use by gopls tests only. +- ScanImports(context.Context) error +- +- // Packages: Return information about packages +- // +- // This command returns an empty result if the specified files +- // or directories are not associated with any Views on the +- // server yet. +- Packages(context.Context, PackagesArgs) (PackagesResult, error) +- +- // Modules: Return information about modules within a directory +- // +- // This command returns an empty result if there is no module, or if module +- // mode is disabled. Modules will not cause any new views to be loaded and +- // will only return modules associated with views that have already been +- // loaded, regardless of how it is called. Given current usage (by the +- // language server client), there should never be a case where Modules is +- // called on a path that has not already been loaded. +- Modules(context.Context, ModulesArgs) (ModulesResult, error) +- +- // PackageSymbols: Return information about symbols in the given file's package. +- PackageSymbols(context.Context, PackageSymbolsArgs) (PackageSymbolsResult, error) +- +- // ModifyTags: Add or remove struct tags on a given node. +- ModifyTags(context.Context, ModifyTagsArgs) error +-} +- +-type RunTestsArgs struct { +- // The test file containing the tests to run. +- URI protocol.DocumentURI +- +- // Specific test names to run, e.g. TestFoo. +- Tests []string +- +- // Specific benchmarks to run, e.g. BenchmarkFoo. +- Benchmarks []string +-} +- +-type GenerateArgs struct { +- // URI for the directory to generate. +- Dir protocol.DocumentURI +- +- // Whether to generate recursively (go generate ./...) +- Recursive bool +-} +- +-type DocArgs struct { +- Location protocol.Location +- ShowDocument bool // in addition to returning the URL, send showDocument +-} +- +-// TODO(rFindley): document the rest of these once the docgen is fleshed out. +- +-type ApplyFixArgs struct { +- // The name of the fix to apply. +- // +- // For fixes suggested by analyzers, this is a string constant +- // advertised by the analyzer that matches the Category of +- // the analysis.Diagnostic with a SuggestedFix containing no edits. +- // +- // For fixes suggested by code actions, this is a string agreed +- // upon by the code action and golang.ApplyFix. +- Fix string +- +- // The portion of the document to fix. +- Location protocol.Location +- +- // Whether to resolve and return the edits. +- ResolveEdits bool +-} +- +-type URIArg struct { +- // The file URI. +- URI protocol.DocumentURI +-} +- +-type URIArgs struct { +- // The file URIs. +- URIs []protocol.DocumentURI +-} +- +-type CheckUpgradesArgs struct { +- // The go.mod file URI. +- URI protocol.DocumentURI +- // The modules to check. +- Modules []string +-} +- +-type DependencyArgs struct { +- // The go.mod file URI. +- URI protocol.DocumentURI +- // Additional args to pass to the go command. +- GoCmdArgs []string +- // Whether to add a require directive. +- AddRequire bool +-} +- +-type RemoveDependencyArgs struct { +- // The go.mod file URI. +- URI protocol.DocumentURI +- // The module path to remove. +- ModulePath string +- // If the module is tidied apart from the one unused diagnostic, we can +- // run `go get module@none`, and then run `go mod tidy`. Otherwise, we +- // must make textual edits. +- OnlyDiagnostic bool +-} +- +-type EditGoDirectiveArgs struct { +- // Any document URI within the relevant module. +- URI protocol.DocumentURI +- // The version to pass to `go mod edit -go`. +- Version string +-} +- +-type GoGetPackageArgs struct { +- // Any document URI within the relevant module. +- URI protocol.DocumentURI +- // The package to go get. +- Pkg string +- AddRequire bool +-} +- +-type AddImportArgs struct { +- // ImportPath is the target import path that should +- // be added to the URI file +- ImportPath string +- // URI is the file that the ImportPath should be +- // added to +- URI protocol.DocumentURI +-} +- +-type ListKnownPackagesResult struct { +- // Packages is a list of packages relative +- // to the URIArg passed by the command request. +- // In other words, it omits paths that are already +- // imported or cannot be imported due to compiler +- // restrictions. +- Packages []string +-} +- +-type ListImportsResult struct { +- // Imports is a list of imports in the requested file. +- Imports []FileImport +- +- // PackageImports is a list of all imports in the requested file's package. +- PackageImports []PackageImport +-} +- +-type FileImport struct { +- // Path is the import path of the import. +- Path string +- // Name is the name of the import, e.g. `foo` in `import foo "strings"`. +- Name string +-} +- +-type PackageImport struct { +- // Path is the import path of the import. +- Path string +-} +- +-type DebuggingArgs struct { +- // Optional: the address (including port) for the debug server to listen on. +- // If not provided, the debug server will bind to "localhost:0", and the +- // full debug URL will be contained in the result. +- // +- // If there is more than one gopls instance along the serving path (i.e. you +- // are using a daemon), each gopls instance will attempt to start debugging. +- // If Addr specifies a port, only the daemon will be able to bind to that +- // port, and each intermediate gopls instance will fail to start debugging. +- // For this reason it is recommended not to specify a port (or equivalently, +- // to specify ":0"). +- // +- // If the server was already debugging this field has no effect, and the +- // result will contain the previously configured debug URL(s). +- Addr string +-} +- +-type DebuggingResult struct { +- // The URLs to use to access the debug servers, for all gopls instances in +- // the serving path. For the common case of a single gopls instance (i.e. no +- // daemon), this will be exactly one address. +- // +- // In the case of one or more gopls instances forwarding the LSP to a daemon, +- // URLs will contain debug addresses for each server in the serving path, in +- // serving order. The daemon debug address will be the last entry in the +- // slice. If any intermediate gopls instance fails to start debugging, no +- // error will be returned but the debug URL for that server in the URLs slice +- // will be empty. +- URLs []string +-} +- +-// StartProfileArgs holds the arguments to the StartProfile command. +-// +-// It is a placeholder for future compatibility. +-type StartProfileArgs struct { +-} +- +-// StartProfileResult holds the result of the StartProfile command. +-// +-// It is a placeholder for future compatibility. +-type StartProfileResult struct { +-} +- +-// StopProfileArgs holds the arguments to the StopProfile command. +-// +-// It is a placeholder for future compatibility. +-type StopProfileArgs struct { +-} +- +-// StopProfileResult holds the result to the StopProfile command. +-type StopProfileResult struct { +- // File is the profile file name. +- File string +-} +- +-type ResetGoModDiagnosticsArgs struct { +- URIArg +- +- // Optional: source of the diagnostics to reset. +- // If not set, all resettable go.mod diagnostics will be cleared. +- DiagnosticSource string +-} +- +-type VulncheckArgs struct { +- // Any document in the directory from which govulncheck will run. +- URI protocol.DocumentURI +- +- // Package pattern. E.g. "", ".", "./...". +- Pattern string +- +- // TODO: -tests +-} +- +-// RunVulncheckResult holds the result of asynchronously starting the vulncheck +-// command. +-type RunVulncheckResult struct { +- // Token holds the progress token for LSP workDone reporting of the vulncheck +- // invocation. +- Token protocol.ProgressToken +-} +- +-// VulncheckResult holds the result of synchronously running the vulncheck +-// command. +-type VulncheckResult struct { +- // Result holds the result of running vulncheck. +- Result *vulncheck.Result +- // Token holds the progress token used to report progress during back to the +- // LSP client during vulncheck execution. +- Token protocol.ProgressToken +-} +- +-// MemStatsResult holds selected fields from runtime.MemStats. +-type MemStatsResult struct { +- HeapAlloc uint64 +- HeapInUse uint64 +- TotalAlloc uint64 +-} +- +-// WorkspaceStatsResult returns information about the size and shape of the +-// workspace. +-type WorkspaceStatsResult struct { +- Files FileStats // file stats for the cache +- Views []ViewStats // stats for each view in the session +-} +- +-// FileStats holds information about a set of files. +-type FileStats struct { +- Total int // total number of files +- Largest int // number of bytes in the largest file +- Errs int // number of files that could not be read +-} +- +-// ViewStats holds information about a single View in the session. +-type ViewStats struct { +- GoCommandVersion string // version of the Go command resolved for this view +- AllPackages PackageStats // package info for all packages (incl. dependencies) +- WorkspacePackages PackageStats // package info for workspace packages +- Diagnostics int // total number of diagnostics in the workspace +-} +- +-// PackageStats holds information about a collection of packages. +-type PackageStats struct { +- Packages int // total number of packages +- LargestPackage int // number of files in the largest package +- CompiledGoFiles int // total number of compiled Go files across all packages +- Modules int // total number of unique modules +-} +- +-type RunGoWorkArgs struct { +- ViewID string // ID of the view to run the command from +- InitFirst bool // Whether to run `go work init` first +- Args []string // Args to pass to `go work` +-} +- +-// AddTelemetryCountersArgs holds the arguments to the AddCounters command +-// that updates the telemetry counters. +-type AddTelemetryCountersArgs struct { +- // Names and Values must have the same length. +- Names []string // Name of counters. +- Values []int64 // Values added to the corresponding counters. Must be non-negative. +-} +- +-// ChangeSignatureArgs specifies a "change signature" refactoring to perform. +-// +-// The new signature is expressed via the NewParams and NewResults fields. The +-// elements of these lists each describe a new field of the signature, by +-// either referencing a field in the old signature or by defining a new field: +-// - If the element is an integer, it references a positional parameter in the +-// old signature. +-// - If the element is a string, it is parsed as a new field to add. +-// +-// Suppose we have a function `F(a, b int) (string, error)`. Here are some +-// examples of refactoring this signature in practice, eliding the 'Location' +-// and 'ResolveEdits' fields. +-// - `{ "NewParams": [0], "NewResults": [0, 1] }` removes the second parameter +-// - `{ "NewParams": [1, 0], "NewResults": [0, 1] }` flips the parameter order +-// - `{ "NewParams": [0, 1, "a int"], "NewResults": [0, 1] }` adds a new field +-// - `{ "NewParams": [1, 2], "NewResults": [1] }` drops the `error` result +-type ChangeSignatureArgs struct { +- // Location is any range inside the function signature. By convention, this +- // is the same location provided in the codeAction request. +- Location protocol.Location // a range inside of the function signature, as passed to CodeAction +- +- // NewParams describes parameters of the new signature. +- // An int value references a parameter in the old signature by index. +- // A string value describes a new parameter field (e.g. "x int"). +- NewParams []ChangeSignatureParam +- +- // NewResults describes results of the new signature (see above). +- // An int value references a result in the old signature by index. +- // A string value describes a new result field (e.g. "err error"). +- NewResults []ChangeSignatureParam +- +- // Whether to resolve and return the edits. +- ResolveEdits bool +-} +- +-// ChangeSignatureParam implements the API described in the doc string of +-// [ChangeSignatureArgs]: a union of JSON int | string. +-type ChangeSignatureParam struct { +- OldIndex int +- NewField string +-} +- +-func (a *ChangeSignatureParam) UnmarshalJSON(b []byte) error { +- var s string +- if err := json.Unmarshal(b, &s); err == nil { +- a.NewField = s +- return nil +- } +- var i int +- if err := json.Unmarshal(b, &i); err == nil { +- a.OldIndex = i +- return nil +- } +- return fmt.Errorf("must be int or string") +-} +- +-func (a ChangeSignatureParam) MarshalJSON() ([]byte, error) { +- if a.NewField != "" { +- return json.Marshal(a.NewField) +- } +- return json.Marshal(a.OldIndex) +-} +- +-// DiagnoseFilesArgs specifies a set of files for which diagnostics are wanted. +-type DiagnoseFilesArgs struct { +- Files []protocol.DocumentURI +-} +- +-// A View holds summary information about a cache.View. +-type View struct { +- ID string // view ID (the index of this view among all views created) +- Type string // view type (via cache.ViewType.String) +- Root protocol.DocumentURI // root dir of the view (e.g. containing go.mod or go.work) +- Folder protocol.DocumentURI // workspace folder associated with the view +- EnvOverlay []string // environment variable overrides +-} +- +-// PackagesArgs holds arguments for the Packages command. +-type PackagesArgs struct { +- // Files is a list of files and directories whose associated +- // packages should be described by the result. +- // +- // In some cases, a file may belong to more than one package; +- // the result may describe any of them. +- Files []protocol.DocumentURI +- +- // Enumerate all packages under the directory loadable with +- // the ... pattern. +- // The search does not cross the module boundaries and +- // does not return packages that are not yet loaded. +- // (e.g. those excluded by the gopls directory filter setting, +- // or the go.work configuration) +- Recursive bool `json:"Recursive,omitempty"` +- +- // Mode controls the types of information returned for each package. +- Mode PackagesMode +-} +- +-// PackagesMode controls the details to include in PackagesResult. +-type PackagesMode uint64 +- +-const ( +- // Populate the [TestFile.Tests] field in [Package] returned by the +- // Packages command. +- NeedTests PackagesMode = 1 << iota +-) +- +-// PackagesResult is the result of the Packages command. +-type PackagesResult struct { +- // Packages is an unordered list of package metadata. +- Packages []Package +- +- // Module maps module path to module metadata for +- // all the modules of the returned Packages. +- Module map[string]Module +-} +- +-// Package describes a Go package (not an empty parent). +-type Package struct { +- // Package path. +- Path string +- // Module path. Empty if the package doesn't +- // belong to any module. +- ModulePath string +- // q in a "p [q.test]" package. +- ForTest string +- +- // Note: the result does not include the directory name +- // of the package because mapping between a package and +- // a folder is not possible in certain build systems. +- // If directory info is needed, one can guess it +- // from the TestFile's file name. +- +- // TestFiles contains the subset of the files of the package +- // whose name ends with "_test.go". +- // They are ordered deterministically as determined +- // by the underlying build system. +- TestFiles []TestFile +-} +- +-type Module struct { +- Path string // module path +- Version string // module version if any. +- GoMod protocol.DocumentURI // path to the go.mod file. +-} +- +-type TestFile struct { +- URI protocol.DocumentURI // a *_test.go file +- +- // Tests is the list of tests in File, including subtests. +- // +- // The set of subtests is not exhaustive as in general they may be +- // dynamically generated, so it is impossible for static heuristics +- // to enumerate them. +- // +- // Tests are lexically ordered. +- // Since subtest names are prefixed by their top-level test names +- // each top-level test precedes its subtests. +- Tests []TestCase +-} +- +-// TestCase represents a test case. +-// A test case can be a top-level Test/Fuzz/Benchmark/Example function, +-// as recognized by 'go list' or 'go test -list', or +-// a subtest within a top-level function. +-type TestCase struct { +- // Name is the complete name of the test (Test, Benchmark, Example, or Fuzz) +- // or the subtest as it appears in the output of go test -json. +- // The server may attempt to infer names of subtests by static +- // analysis; if so, it should aim to simulate the actual computed +- // name of the test, including any disambiguating suffix such as "#01". +- // To run only this test, clients need to compute the -run, -bench, -fuzz +- // flag values by first splitting the Name with "/" and +- // quoting each element with "^" + regexp.QuoteMeta(Name) + "$". +- // e.g. TestToplevel/Inner.Subtest → -run=^TestToplevel$/^Inner\.Subtest$ +- Name string +- +- // Loc is the filename and range enclosing this test function +- // or the subtest. This is used to place the gutter marker +- // and group tests based on location. +- // For subtests whose test names can be determined statically, +- // this can be either t.Run or the test data table +- // for table-driven setup. +- // Some testing frameworks allow to declare the actual test +- // logic in a different file. For example, one can define +- // a testify test suite in suite_test.go and use it from +- // main_test.go. +- /* +- -- main_test.go -- +- ... +- func TestFoo(t *testing.T) { +- suite.Run(t, new(MyTestSuite)) +- } +- -- suite_test.go -- +- type MyTestSuite struct { +- suite.Suite +- } +- func (suite *MyTestSuite) TestBar() { ... } +- */ +- // In this case, the testing framework creates "TestFoo/TestBar" +- // and the corresponding test case belongs to "main_test.go" +- // TestFile. However, the test case has "suite_test.go" as its +- // file location. +- Loc protocol.Location +-} +- +-type ModulesArgs struct { +- // Dir is the directory in which to search for go.mod files. +- Dir protocol.DocumentURI +- +- // MaxDepth is the directory walk limit. +- // A value of 0 means inspect only Dir. +- // 1 means inspect its child directories too, and so on. +- // A negative value removes the limit. +- MaxDepth int +-} +- +-type ModulesResult struct { +- Modules []Module +-} +- +-type PackageSymbolsArgs struct { +- URI protocol.DocumentURI +-} +- +-type PackageSymbolsResult struct { +- PackageName string +- // Files is a list of files in the given URI's package. +- Files []protocol.DocumentURI +- Symbols []PackageSymbol +-} +- +-// PackageSymbol has the same fields as DocumentSymbol, with an additional int field "File" +-// which stores the index of the symbol's file in the PackageSymbolsResult.Files array +-type PackageSymbol struct { +- Name string `json:"name"` +- +- Detail string `json:"detail,omitempty"` +- +- // protocol.SymbolKind maps an integer to an enum: +- // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#symbolKind +- // i.e. File = 1 +- Kind protocol.SymbolKind `json:"kind"` +- +- Tags []protocol.SymbolTag `json:"tags,omitempty"` +- +- Range protocol.Range `json:"range"` +- +- SelectionRange protocol.Range `json:"selectionRange"` +- +- Children []PackageSymbol `json:"children,omitempty"` +- +- // Index of this symbol's file in PackageSymbolsResult.Files +- File int `json:"file,omitempty"` +-} +- +-// ModifyTagsArgs holds variables that determine how struct tags are modified. +-type ModifyTagsArgs struct { +- URI protocol.DocumentURI // uri of the file to be modified +- Range protocol.Range // range in the file for where to modify struct tags +- Add string // comma-separated list of tags to add; i.e. "json,xml" +- AddOptions string // comma-separated list of options to add, per tag; i.e. "json=omitempty" +- Remove string // comma-separated list of tags to remove +- RemoveOptions string // comma-separated list of options to remove +- Clear bool // if set, clear all tags. tags are cleared before any new tags are added +- ClearOptions bool // if set, clear all tag options; options are cleared before any new options are added +- Overwrite bool // if set, replace existing tags when adding +- SkipUnexportedFields bool // if set, do not modify tags on unexported struct fields +- Transform string // transform rule for adding tags; i.e. "snakecase" +- ValueFormat string // format for the tag's value, after transformation; for example "column:{field}" +-} +- +-type LSPArgs struct { +- Method string `json:"method"` +- Param json.RawMessage `json:"param"` +-} +diff -urN a/gopls/internal/protocol/command/interface_test.go b/gopls/internal/protocol/command/interface_test.go +--- a/gopls/internal/protocol/command/interface_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/command/interface_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,33 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package command_test +- +-import ( +- "os" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol/command/gen" +- "golang.org/x/tools/internal/testenv" +-) +- +-// TestGenerated ensures that we haven't forgotten to update command_gen.go. +-func TestGenerated(t *testing.T) { +- testenv.NeedsGoPackages(t) +- testenv.NeedsLocalXTools(t) +- +- onDisk, err := os.ReadFile("command_gen.go") +- if err != nil { +- t.Fatal(err) +- } +- +- generated, err := gen.Generate() +- if err != nil { +- t.Fatal(err) +- } +- if diff := cmp.Diff(string(generated), string(onDisk)); diff != "" { +- t.Errorf("command_gen.go is stale -- regenerate (-generated +on disk)\n%s", diff) +- } +-} +diff -urN a/gopls/internal/protocol/command/util.go b/gopls/internal/protocol/command/util.go +--- a/gopls/internal/protocol/command/util.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/command/util.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,67 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package command +- +-import ( +- "encoding/json" +- "fmt" +-) +- +-// A Command identifies one of gopls' ad-hoc extension commands +-// that may be invoked through LSP's executeCommand. +-type Command string +- +-func (c Command) String() string { return string(c) } +- +-// MarshalArgs encodes the given arguments to json.RawMessages. This function +-// is used to construct arguments to a protocol.Command. +-// +-// Example usage: +-// +-// jsonArgs, err := MarshalArgs(1, "hello", true, StructuredArg{42, 12.6}) +-func MarshalArgs(args ...any) ([]json.RawMessage, error) { +- var out []json.RawMessage +- for _, arg := range args { +- argJSON, err := json.Marshal(arg) +- if err != nil { +- return nil, err +- } +- out = append(out, argJSON) +- } +- return out, nil +-} +- +-// MustMarshalArgs is like MarshalArgs, but panics on error. +-func MustMarshalArgs(args ...any) []json.RawMessage { +- msg, err := MarshalArgs(args...) +- if err != nil { +- panic(err) +- } +- return msg +-} +- +-// UnmarshalArgs decodes the given json.RawMessages to the variables provided +-// by args. Each element of args should be a pointer. +-// +-// Example usage: +-// +-// var ( +-// num int +-// str string +-// bul bool +-// structured StructuredArg +-// ) +-// err := UnmarshalArgs(args, &num, &str, &bul, &structured) +-func UnmarshalArgs(jsonArgs []json.RawMessage, args ...any) error { +- if len(args) != len(jsonArgs) { +- return fmt.Errorf("DecodeArgs: expected %d input arguments, got %d JSON arguments", len(args), len(jsonArgs)) +- } +- for i, arg := range args { +- if err := json.Unmarshal(jsonArgs[i], arg); err != nil { +- return err +- } +- } +- return nil +-} +diff -urN a/gopls/internal/protocol/context.go b/gopls/internal/protocol/context.go +--- a/gopls/internal/protocol/context.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/context.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,65 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol +- +-import ( +- "bytes" +- "context" +- "sync" +- +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/event/core" +- "golang.org/x/tools/internal/event/export" +- "golang.org/x/tools/internal/event/label" +- "golang.org/x/tools/internal/xcontext" +-) +- +-type contextKey int +- +-const ( +- clientKey = contextKey(iota) +-) +- +-func WithClient(ctx context.Context, client Client) context.Context { +- return context.WithValue(ctx, clientKey, client) +-} +- +-func LogEvent(ctx context.Context, ev core.Event, lm label.Map, mt MessageType) context.Context { +- client, ok := ctx.Value(clientKey).(Client) +- if !ok { +- return ctx +- } +- buf := &bytes.Buffer{} +- p := export.Printer{} +- p.WriteEvent(buf, ev, lm) +- msg := &LogMessageParams{Type: mt, Message: buf.String()} +- // Handle messages generated via event.Error, which won't have a level Label. +- if event.IsError(ev) { +- msg.Type = Error +- } +- +- // The background goroutine lives forever once started, +- // and ensures log messages are sent in order (#61216). +- startLogSenderOnce.Do(func() { +- go func() { +- for f := range logQueue { +- f() +- } +- }() +- }) +- +- // Add the log item to a queue, rather than sending a +- // window/logMessage request to the client synchronously, +- // which would slow down this thread. +- ctx2 := xcontext.Detach(ctx) +- logQueue <- func() { client.LogMessage(ctx2, msg) } +- +- return ctx +-} +- +-var ( +- startLogSenderOnce sync.Once +- logQueue = make(chan func(), 100) // big enough for a large transient burst +-) +diff -urN a/gopls/internal/protocol/doc.go b/gopls/internal/protocol/doc.go +--- a/gopls/internal/protocol/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,18 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:generate go run ./generate +- +-// Package protocol contains the structs that map directly to the +-// request and response messages of the Language Server Protocol. +-// +-// It is a literal transcription, with unmodified comments, and only the changes +-// required to make it go code. +-// Names are uppercased to export them. +-// All fields have JSON tags added to correct the names. +-// Fields marked with a ? are also marked as "omitempty" +-// Fields that are "|| null" are made pointers +-// Fields that are string or number are left as string +-// Fields that are type "number" are made float64 +-package protocol +diff -urN a/gopls/internal/protocol/edits.go b/gopls/internal/protocol/edits.go +--- a/gopls/internal/protocol/edits.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/edits.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,176 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol +- +-import ( +- "fmt" +- "slices" +- +- "golang.org/x/tools/internal/diff" +-) +- +-// EditsFromDiffEdits converts diff.Edits to a non-nil slice of LSP TextEdits. +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textEditArray +-func EditsFromDiffEdits(m *Mapper, edits []diff.Edit) ([]TextEdit, error) { +- // LSP doesn't require TextEditArray to be sorted: +- // this is the receiver's concern. But govim, and perhaps +- // other clients have historically relied on the order. +- edits = slices.Clone(edits) +- diff.SortEdits(edits) +- +- result := make([]TextEdit, len(edits)) +- for i, edit := range edits { +- rng, err := m.OffsetRange(edit.Start, edit.End) +- if err != nil { +- return nil, err +- } +- result[i] = TextEdit{ +- Range: rng, +- NewText: edit.New, +- } +- } +- return result, nil +-} +- +-// EditsToDiffEdits converts LSP TextEdits to diff.Edits. +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textEditArray +-func EditsToDiffEdits(m *Mapper, edits []TextEdit) ([]diff.Edit, error) { +- if edits == nil { +- return nil, nil +- } +- result := make([]diff.Edit, len(edits)) +- for i, edit := range edits { +- start, end, err := m.RangeOffsets(edit.Range) +- if err != nil { +- return nil, err +- } +- result[i] = diff.Edit{ +- Start: start, +- End: end, +- New: edit.NewText, +- } +- } +- return result, nil +-} +- +-// ApplyEdits applies the patch (edits) to m.Content and returns the result. +-// It also returns the edits converted to diff-package form. +-func ApplyEdits(m *Mapper, edits []TextEdit) ([]byte, []diff.Edit, error) { +- diffEdits, err := EditsToDiffEdits(m, edits) +- if err != nil { +- return nil, nil, err +- } +- out, err := diff.ApplyBytes(m.Content, diffEdits) +- return out, diffEdits, err +-} +- +-// AsTextEdits converts a slice possibly containing AnnotatedTextEdits +-// to a slice of TextEdits. +-func AsTextEdits(edits []Or_TextDocumentEdit_edits_Elem) []TextEdit { +- var result []TextEdit +- for _, e := range edits { +- var te TextEdit +- if x, ok := e.Value.(AnnotatedTextEdit); ok { +- te = x.TextEdit +- } else if x, ok := e.Value.(TextEdit); ok { +- te = x +- } else { +- panic(fmt.Sprintf("unexpected type %T, expected AnnotatedTextEdit or TextEdit", e.Value)) +- } +- result = append(result, te) +- } +- return result +-} +- +-// AsAnnotatedTextEdits converts a slice of TextEdits +-// to a slice of Or_TextDocumentEdit_edits_Elem. +-// (returning a typed nil is required in server: in code_action.go and command.go)) +-func AsAnnotatedTextEdits(edits []TextEdit) []Or_TextDocumentEdit_edits_Elem { +- if edits == nil { +- return []Or_TextDocumentEdit_edits_Elem{} +- } +- var result []Or_TextDocumentEdit_edits_Elem +- for _, e := range edits { +- result = append(result, Or_TextDocumentEdit_edits_Elem{ +- Value: TextEdit{ +- Range: e.Range, +- NewText: e.NewText, +- }, +- }) +- } +- return result +-} +- +-// fileHandle abstracts file.Handle to avoid a cycle. +-type fileHandle interface { +- URI() DocumentURI +- Version() int32 +-} +- +-// NewWorkspaceEdit constructs a WorkspaceEdit from a list of document changes. +-// +-// Any ChangeAnnotations must be added after. +-func NewWorkspaceEdit(changes ...DocumentChange) *WorkspaceEdit { +- return &WorkspaceEdit{DocumentChanges: changes} +-} +- +-// DocumentChangeEdit constructs a DocumentChange containing a +-// TextDocumentEdit from a file.Handle and a list of TextEdits. +-func DocumentChangeEdit(fh fileHandle, textedits []TextEdit) DocumentChange { +- return DocumentChange{ +- TextDocumentEdit: &TextDocumentEdit{ +- TextDocument: OptionalVersionedTextDocumentIdentifier{ +- Version: fh.Version(), +- TextDocumentIdentifier: TextDocumentIdentifier{URI: fh.URI()}, +- }, +- Edits: AsAnnotatedTextEdits(textedits), +- }, +- } +-} +- +-// DocumentChangeCreate constructs a DocumentChange that creates a file. +-func DocumentChangeCreate(uri DocumentURI) DocumentChange { +- return DocumentChange{ +- CreateFile: &CreateFile{ +- Kind: "create", +- URI: uri, +- }, +- } +-} +- +-// DocumentChangeRename constructs a DocumentChange that renames a file. +-func DocumentChangeRename(src, dst DocumentURI) DocumentChange { +- return DocumentChange{ +- RenameFile: &RenameFile{ +- Kind: "rename", +- OldURI: src, +- NewURI: dst, +- }, +- } +-} +- +-// SelectCompletionTextEdit returns insert or replace mode TextEdit +-// included in the completion item. +-func SelectCompletionTextEdit(item CompletionItem, useReplaceMode bool) (TextEdit, error) { +- var edit TextEdit +- switch typ := item.TextEdit.Value.(type) { +- case TextEdit: // old style completion item. +- return typ, nil +- case InsertReplaceEdit: +- if useReplaceMode { +- return TextEdit{ +- NewText: typ.NewText, +- Range: typ.Replace, +- }, nil +- } else { +- return TextEdit{ +- NewText: typ.NewText, +- Range: typ.Insert, +- }, nil +- } +- default: +- return edit, fmt.Errorf("unsupported edit type %T", typ) +- } +-} +diff -urN a/gopls/internal/protocol/enums.go b/gopls/internal/protocol/enums.go +--- a/gopls/internal/protocol/enums.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/enums.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,179 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol +- +-import ( +- "fmt" +-) +- +-// CodeActionUnknownTrigger indicates that the trigger for a +-// CodeAction request is unknown. A missing +-// CodeActionContext.TriggerKind should be treated as equivalent. +-const CodeActionUnknownTrigger CodeActionTriggerKind = 0 +- +-var ( +- namesTextDocumentSyncKind [int(Incremental) + 1]string +- namesMessageType [int(Log) + 1]string +- namesFileChangeType [int(Deleted) + 1]string +- namesWatchKind [int(WatchDelete) + 1]string +- namesCompletionTriggerKind [int(TriggerForIncompleteCompletions) + 1]string +- namesDiagnosticSeverity [int(SeverityHint) + 1]string +- namesDiagnosticTag [int(Unnecessary) + 1]string +- namesCompletionItemKind [int(TypeParameterCompletion) + 1]string +- namesInsertTextFormat [int(SnippetTextFormat) + 1]string +- namesDocumentHighlightKind [int(Write) + 1]string +- namesSymbolKind [int(TypeParameter) + 1]string +- namesTextDocumentSaveReason [int(FocusOut) + 1]string +-) +- +-func init() { +- namesTextDocumentSyncKind[int(None)] = "None" +- namesTextDocumentSyncKind[int(Full)] = "Full" +- namesTextDocumentSyncKind[int(Incremental)] = "Incremental" +- +- namesMessageType[int(Error)] = "Error" +- namesMessageType[int(Warning)] = "Warning" +- namesMessageType[int(Info)] = "Info" +- namesMessageType[int(Log)] = "Log" +- +- namesFileChangeType[int(Created)] = "Created" +- namesFileChangeType[int(Changed)] = "Changed" +- namesFileChangeType[int(Deleted)] = "Deleted" +- +- namesWatchKind[int(WatchCreate)] = "WatchCreate" +- namesWatchKind[int(WatchChange)] = "WatchChange" +- namesWatchKind[int(WatchDelete)] = "WatchDelete" +- +- namesCompletionTriggerKind[int(Invoked)] = "Invoked" +- namesCompletionTriggerKind[int(TriggerCharacter)] = "TriggerCharacter" +- namesCompletionTriggerKind[int(TriggerForIncompleteCompletions)] = "TriggerForIncompleteCompletions" +- +- namesDiagnosticSeverity[int(SeverityError)] = "Error" +- namesDiagnosticSeverity[int(SeverityWarning)] = "Warning" +- namesDiagnosticSeverity[int(SeverityInformation)] = "Information" +- namesDiagnosticSeverity[int(SeverityHint)] = "Hint" +- +- namesDiagnosticTag[int(Unnecessary)] = "Unnecessary" +- +- namesCompletionItemKind[int(TextCompletion)] = "text" +- namesCompletionItemKind[int(MethodCompletion)] = "method" +- namesCompletionItemKind[int(FunctionCompletion)] = "func" +- namesCompletionItemKind[int(ConstructorCompletion)] = "constructor" +- namesCompletionItemKind[int(FieldCompletion)] = "field" +- namesCompletionItemKind[int(VariableCompletion)] = "var" +- namesCompletionItemKind[int(ClassCompletion)] = "type" +- namesCompletionItemKind[int(InterfaceCompletion)] = "interface" +- namesCompletionItemKind[int(ModuleCompletion)] = "package" +- namesCompletionItemKind[int(PropertyCompletion)] = "property" +- namesCompletionItemKind[int(UnitCompletion)] = "unit" +- namesCompletionItemKind[int(ValueCompletion)] = "value" +- namesCompletionItemKind[int(EnumCompletion)] = "enum" +- namesCompletionItemKind[int(KeywordCompletion)] = "keyword" +- namesCompletionItemKind[int(SnippetCompletion)] = "snippet" +- namesCompletionItemKind[int(ColorCompletion)] = "color" +- namesCompletionItemKind[int(FileCompletion)] = "file" +- namesCompletionItemKind[int(ReferenceCompletion)] = "reference" +- namesCompletionItemKind[int(FolderCompletion)] = "folder" +- namesCompletionItemKind[int(EnumMemberCompletion)] = "enumMember" +- namesCompletionItemKind[int(ConstantCompletion)] = "const" +- namesCompletionItemKind[int(StructCompletion)] = "struct" +- namesCompletionItemKind[int(EventCompletion)] = "event" +- namesCompletionItemKind[int(OperatorCompletion)] = "operator" +- namesCompletionItemKind[int(TypeParameterCompletion)] = "typeParam" +- +- namesInsertTextFormat[int(PlainTextTextFormat)] = "PlainText" +- namesInsertTextFormat[int(SnippetTextFormat)] = "Snippet" +- +- namesDocumentHighlightKind[int(Text)] = "Text" +- namesDocumentHighlightKind[int(Read)] = "Read" +- namesDocumentHighlightKind[int(Write)] = "Write" +- +- namesSymbolKind[int(File)] = "File" +- namesSymbolKind[int(Module)] = "Module" +- namesSymbolKind[int(Namespace)] = "Namespace" +- namesSymbolKind[int(Package)] = "Package" +- namesSymbolKind[int(Class)] = "Class" +- namesSymbolKind[int(Method)] = "Method" +- namesSymbolKind[int(Property)] = "Property" +- namesSymbolKind[int(Field)] = "Field" +- namesSymbolKind[int(Constructor)] = "Constructor" +- namesSymbolKind[int(Enum)] = "Enum" +- namesSymbolKind[int(Interface)] = "Interface" +- namesSymbolKind[int(Function)] = "Function" +- namesSymbolKind[int(Variable)] = "Variable" +- namesSymbolKind[int(Constant)] = "Constant" +- namesSymbolKind[int(String)] = "String" +- namesSymbolKind[int(Number)] = "Number" +- namesSymbolKind[int(Boolean)] = "Boolean" +- namesSymbolKind[int(Array)] = "Array" +- namesSymbolKind[int(Object)] = "Object" +- namesSymbolKind[int(Key)] = "Key" +- namesSymbolKind[int(Null)] = "Null" +- namesSymbolKind[int(EnumMember)] = "EnumMember" +- namesSymbolKind[int(Struct)] = "Struct" +- namesSymbolKind[int(Event)] = "Event" +- namesSymbolKind[int(Operator)] = "Operator" +- namesSymbolKind[int(TypeParameter)] = "TypeParameter" +- +- namesTextDocumentSaveReason[int(Manual)] = "Manual" +- namesTextDocumentSaveReason[int(AfterDelay)] = "AfterDelay" +- namesTextDocumentSaveReason[int(FocusOut)] = "FocusOut" +-} +- +-func formatEnum(f fmt.State, i int, names []string, unknown string) { +- s := "" +- if i >= 0 && i < len(names) { +- s = names[i] +- } +- if s != "" { +- fmt.Fprint(f, s) +- } else { +- fmt.Fprintf(f, "%s(%d)", unknown, i) +- } +-} +- +-func (e TextDocumentSyncKind) Format(f fmt.State, c rune) { +- formatEnum(f, int(e), namesTextDocumentSyncKind[:], "TextDocumentSyncKind") +-} +- +-func (e MessageType) Format(f fmt.State, c rune) { +- formatEnum(f, int(e), namesMessageType[:], "MessageType") +-} +- +-func (e FileChangeType) Format(f fmt.State, c rune) { +- formatEnum(f, int(e), namesFileChangeType[:], "FileChangeType") +-} +- +-func (e CompletionTriggerKind) Format(f fmt.State, c rune) { +- formatEnum(f, int(e), namesCompletionTriggerKind[:], "CompletionTriggerKind") +-} +- +-func (e DiagnosticSeverity) Format(f fmt.State, c rune) { +- formatEnum(f, int(e), namesDiagnosticSeverity[:], "DiagnosticSeverity") +-} +- +-func (e DiagnosticTag) Format(f fmt.State, c rune) { +- formatEnum(f, int(e), namesDiagnosticTag[:], "DiagnosticTag") +-} +- +-func (e CompletionItemKind) Format(f fmt.State, c rune) { +- formatEnum(f, int(e), namesCompletionItemKind[:], "CompletionItemKind") +-} +- +-func (e InsertTextFormat) Format(f fmt.State, c rune) { +- formatEnum(f, int(e), namesInsertTextFormat[:], "InsertTextFormat") +-} +- +-func (e DocumentHighlightKind) Format(f fmt.State, c rune) { +- formatEnum(f, int(e), namesDocumentHighlightKind[:], "DocumentHighlightKind") +-} +- +-func (e SymbolKind) Format(f fmt.State, c rune) { +- formatEnum(f, int(e), namesSymbolKind[:], "SymbolKind") +-} +- +-func (e TextDocumentSaveReason) Format(f fmt.State, c rune) { +- formatEnum(f, int(e), namesTextDocumentSaveReason[:], "TextDocumentSaveReason") +-} +diff -urN a/gopls/internal/protocol/generate/generate.go b/gopls/internal/protocol/generate/generate.go +--- a/gopls/internal/protocol/generate/generate.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/generate/generate.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,121 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package main +- +-import ( +- "bytes" +- "fmt" +- "log" +- "strings" +-) +- +-// a newType is a type that needs a name and a definition +-// These are the various types that the json specification doesn't name +-type newType struct { +- name string +- properties Properties // for struct/literal types +- items []*Type // for other types ("and", "tuple") +- line int +- kind string // Or, And, Tuple, Lit, Map +- typ *Type +-} +- +-func generateDoc(out *bytes.Buffer, doc string) { +- if doc == "" { +- return +- } +- +- if !strings.Contains(doc, "\n") { +- fmt.Fprintf(out, "// %s\n", doc) +- return +- } +- var list bool +- for line := range strings.SplitSeq(doc, "\n") { +- // Lists in metaModel.json start with a dash. +- // To make a go doc list they have to be preceded +- // by a blank line, and indented. +- // (see type TextDccumentFilter in protocol.go) +- if len(line) > 0 && line[0] == '-' { +- if !list { +- list = true +- fmt.Fprintf(out, "//\n") +- } +- fmt.Fprintf(out, "// %s\n", line) +- } else { +- if len(line) == 0 { +- list = false +- } +- fmt.Fprintf(out, "// %s\n", line) +- } +- } +-} +- +-// decide if a property is optional, and if it needs a * +-// return ",omitempty" if it is optional, and "*" if it needs a pointer +-func propStar(name string, t NameType, gotype string) (omitempty, indirect bool) { +- if t.Optional { +- switch gotype { +- case "uint32", "int32": +- // in FoldingRange.endLine, 0 and empty have different semantics +- // There seem to be no other cases. +- default: +- indirect = true +- omitempty = true +- } +- } +- if strings.HasPrefix(gotype, "[]") || strings.HasPrefix(gotype, "map[") { +- indirect = false // passed by reference, so no need for * +- } else { +- switch gotype { +- case "bool", "string", "interface{}", "any": +- indirect = false // gopls compatibility if t.Optional +- } +- } +- oind, oomit := indirect, omitempty +- if newStar, ok := goplsStar[prop{name, t.Name}]; ok { +- switch newStar { +- case nothing: +- indirect, omitempty = false, false +- case wantOpt: +- indirect, omitempty = false, true +- case wantOptStar: +- indirect, omitempty = true, true +- } +- if indirect == oind && omitempty == oomit { // no change +- log.Printf("goplsStar[ {%q, %q} ](%d) useless %v/%v %v/%v", name, t.Name, t.Line, oind, indirect, oomit, omitempty) +- } +- usedGoplsStar[prop{name, t.Name}] = true +- } +- +- return +-} +- +-func goName(s string) string { +- // Go naming conventions +- if strings.HasSuffix(s, "Id") { +- s = s[:len(s)-len("Id")] + "ID" +- } else if strings.HasSuffix(s, "Uri") { +- s = s[:len(s)-3] + "URI" +- } else if s == "uri" { +- s = "URI" +- } else if s == "id" { +- s = "ID" +- } +- +- // renames for temporary GOPLS compatibility +- if news := goplsType[s]; news != "" { +- usedGoplsType[s] = true +- s = news +- } +- // Names beginning _ are not exported +- if strings.HasPrefix(s, "_") { +- s = strings.Replace(s, "_", "X", 1) +- } +- if s != "string" { // base types are unchanged (textDocuemnt/diagnostic) +- // Title is deprecated, but a) s is only one word, b) replacement is too heavy-weight +- s = strings.Title(s) +- } +- return s +-} +diff -urN a/gopls/internal/protocol/generate/main.go b/gopls/internal/protocol/generate/main.go +--- a/gopls/internal/protocol/generate/main.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/generate/main.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,388 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The generate command generates Go declarations from VSCode's +-// description of the Language Server Protocol. +-// +-// To run it, type 'go generate' in the parent (protocol) directory. +-package main +- +-// see https://github.com/golang/go/issues/61217 for discussion of an issue +- +-import ( +- "bytes" +- "encoding/json" +- "flag" +- "fmt" +- "go/format" +- "log" +- "os" +- "os/exec" +- "path/filepath" +- "strings" +-) +- +-const vscodeRepo = "https://github.com/microsoft/vscode-languageserver-node" +- +-// lspGitRef names a branch or tag in vscodeRepo. +-// It implicitly determines the protocol version of the LSP used by gopls. +-// For example, tag release/protocol/3.17.3 of the repo defines +-// protocol version 3.17.0 (as declared by the metaData.version field). +-// (Point releases are reflected in the git tag version even when they are cosmetic +-// and don't change the protocol.) +-var lspGitRef = "release/protocol/3.17.6-next.14" +- +-var ( +- repodir = flag.String("d", "", "directory containing clone of "+vscodeRepo) +- outputdir = flag.String("o", ".", "output directory") +- // PJW: not for real code +- lineNumbers = flag.Bool("l", false, "add line numbers to generated output") +-) +- +-func main() { +- log.SetFlags(log.Lshortfile) // log file name and line number, not time +- flag.Parse() +- +- processinline() +-} +- +-func processinline() { +- // A local repository may be specified during debugging. +- // The default behavior is to download the canonical version. +- if *repodir == "" { +- tmpdir, err := os.MkdirTemp("", "") +- if err != nil { +- log.Fatal(err) +- } +- defer os.RemoveAll(tmpdir) // ignore error +- +- // Clone the repository. +- cmd := exec.Command("git", "clone", "--quiet", "--depth=1", "-c", "advice.detachedHead=false", vscodeRepo, "--branch="+lspGitRef, "--single-branch", tmpdir) +- cmd.Stdout = os.Stderr +- cmd.Stderr = os.Stderr +- if err := cmd.Run(); err != nil { +- log.Fatal(err) +- } +- +- *repodir = tmpdir +- } else { +- lspGitRef = fmt.Sprintf("(not git, local dir %s)", *repodir) +- } +- +- model := parse(filepath.Join(*repodir, "protocol/metaModel.json")) +- +- findTypeNames(model) +- generateOutput(model) +- +- fileHdr = fileHeader(model) +- +- // write the files +- writeclient() +- writeserver() +- writeprotocol() +- writejsons() +- +- checkTables() +-} +- +-// common file header for output files +-var fileHdr string +- +-func writeclient() { +- out := new(bytes.Buffer) +- fmt.Fprintln(out, fileHdr) +- out.WriteString( +- `import ( +- "context" +- "encoding/json" +- "fmt" +- +- "golang.org/x/tools/internal/jsonrpc2" +-) +-`) +- out.WriteString("type Client interface {\n") +- for _, k := range cdecls.keys() { +- out.WriteString(cdecls[k]) +- } +- out.WriteString("}\n\n") +- out.WriteString(` +-func clientDispatch(ctx context.Context, client Client, reply jsonrpc2.Replier, r jsonrpc2.Request) (bool, error) { +- resp, valid, err := ClientDispatchCall(ctx, client, r.Method(), r.Params()) +- if !valid { +- return false, nil +- } +- +- if err != nil { +- return valid, reply(ctx, nil, err) +- } else { +- return valid, reply(ctx, resp, nil) +- } +-} +- +-func ClientDispatchCall(ctx context.Context, client Client, method string, raw json.RawMessage) (resp any, _ bool, err error) { +- switch method { +-`) +- for _, k := range ccases.keys() { +- out.WriteString(ccases[k]) +- } +- out.WriteString(("\tdefault:\n\t\treturn nil, false, nil\n\t}\n}\n\n")) +- for _, k := range cfuncs.keys() { +- out.WriteString(cfuncs[k]) +- } +- formatTo("tsclient.go", out.Bytes()) +-} +- +-func writeserver() { +- out := new(bytes.Buffer) +- fmt.Fprintln(out, fileHdr) +- out.WriteString( +- `import ( +- "context" +- "encoding/json" +- "fmt" +- +- "golang.org/x/tools/internal/jsonrpc2" +-) +-`) +- out.WriteString("type Server interface {\n") +- for _, k := range sdecls.keys() { +- out.WriteString(sdecls[k]) +- } +- out.WriteString(` +-} +-func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, r jsonrpc2.Request) (bool, error) { +- resp, valid, err := ServerDispatchCall(ctx, server, r.Method(), r.Params()) +- if !valid { +- return false, nil +- } +- +- if err != nil { +- return valid, reply(ctx, nil, err) +- } else { +- return valid, reply(ctx, resp, nil) +- } +-} +- +-func ServerDispatchCall(ctx context.Context, server Server, method string, raw json.RawMessage) (resp any, _ bool, err error) { +- switch method { +-`) +- for _, k := range scases.keys() { +- out.WriteString(scases[k]) +- } +- out.WriteString(("\tdefault:\n\t\treturn nil, false, nil\n\t}\n}\n\n")) +- for _, k := range sfuncs.keys() { +- out.WriteString(sfuncs[k]) +- } +- formatTo("tsserver.go", out.Bytes()) +-} +- +-func writeprotocol() { +- out := new(bytes.Buffer) +- fmt.Fprintln(out, fileHdr) +- out.WriteString("import \"encoding/json\"\n\n") +- +- // The following are unneeded, but make the new code a superset of the old +- hack := func(newer, existing string) { +- if _, ok := types[existing]; !ok { +- log.Fatalf("types[%q] not found", existing) +- } +- types[newer] = strings.Replace(types[existing], existing, newer, 1) +- } +- hack("ConfigurationParams", "ParamConfiguration") +- hack("InitializeParams", "ParamInitialize") +- hack("PreviousResultId", "PreviousResultID") +- hack("WorkspaceFoldersServerCapabilities", "WorkspaceFolders5Gn") +- hack("_InitializeParams", "XInitializeParams") +- +- for _, k := range types.keys() { +- if k == "WatchKind" { +- types[k] = "type WatchKind = uint32" // strict gopls compatibility needs the '=' +- } +- out.WriteString(types[k]) +- } +- +- out.WriteString("\nconst (\n") +- for _, k := range consts.keys() { +- out.WriteString(consts[k]) +- } +- out.WriteString(")\n\n") +- formatTo("tsprotocol.go", out.Bytes()) +-} +- +-func writejsons() { +- out := new(bytes.Buffer) +- fmt.Fprintln(out, fileHdr) +- out.WriteString("import \"encoding/json\"\n\n") +- out.WriteString("import \"fmt\"\n") +- +- out.WriteString(` +-// UnmarshalError indicates that a JSON value did not conform to +-// one of the expected cases of an LSP union type. +-type UnmarshalError struct { +- msg string +-} +- +-func (e UnmarshalError) Error() string { +- return e.msg +-} +-`) +- +- for _, k := range jsons.keys() { +- out.WriteString(jsons[k]) +- } +- formatTo("tsjson.go", out.Bytes()) +-} +- +-// formatTo formats the Go source and writes it to *outputdir/basename. +-func formatTo(basename string, src []byte) { +- formatted, err := format.Source(src) +- if err != nil { +- failed := filepath.Join("/tmp", basename+".fail") +- if err := os.WriteFile(failed, src, 0644); err != nil { +- log.Fatal(err) +- } +- log.Fatalf("formatting %s: %v (see %s)", basename, err, failed) +- } +- if err := os.WriteFile(filepath.Join(*outputdir, basename), formatted, 0644); err != nil { +- log.Fatal(err) +- } +-} +- +-// create the common file header for the output files +-func fileHeader(model *Model) string { +- fname := filepath.Join(*repodir, ".git", "HEAD") +- buf, err := os.ReadFile(fname) +- if err != nil { +- log.Fatal(err) +- } +- buf = bytes.TrimSpace(buf) +- var githash string +- if len(buf) == 40 { +- githash = string(buf[:40]) +- } else if bytes.HasPrefix(buf, []byte("ref: ")) { +- fname = filepath.Join(*repodir, ".git", string(buf[5:])) +- buf, err = os.ReadFile(fname) +- if err != nil { +- log.Fatal(err) +- } +- githash = string(buf[:40]) +- } else { +- log.Fatalf("githash cannot be recovered from %s", fname) +- } +- +- format := `// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated for LSP. DO NOT EDIT. +- +-package protocol +- +-// Code generated from %[1]s at ref %[2]s (hash %[3]s). +-// %[4]s/blob/%[2]s/%[1]s +-// LSP metaData.version = %[5]s. +- +-` +- return fmt.Sprintf(format, +- "protocol/metaModel.json", // 1 +- lspGitRef, // 2 +- githash, // 3 +- vscodeRepo, // 4 +- model.Version.Version) // 5 +-} +- +-func parse(fname string) *Model { +- buf, err := os.ReadFile(fname) +- if err != nil { +- log.Fatal(err) +- } +- buf = addLineNumbers(buf) +- model := new(Model) +- if err := json.Unmarshal(buf, model); err != nil { +- log.Fatal(err) +- } +- return model +-} +- +-// Type.Value has to be treated specially for literals and maps +-func (t *Type) UnmarshalJSON(data []byte) error { +- // First unmarshal only the unambiguous fields. +- var x struct { +- Kind string `json:"kind"` +- Items []*Type `json:"items"` +- Element *Type `json:"element"` +- Name string `json:"name"` +- Key *Type `json:"key"` +- Value any `json:"value"` +- Line int `json:"line"` +- } +- if err := json.Unmarshal(data, &x); err != nil { +- return err +- } +- *t = Type{ +- Kind: x.Kind, +- Items: x.Items, +- Element: x.Element, +- Name: x.Name, +- Value: x.Value, +- Line: x.Line, +- } +- +- // Then unmarshal the 'value' field based on the kind. +- // This depends on Unmarshal ignoring fields it doesn't know about. +- switch x.Kind { +- case "map": +- var x struct { +- Key *Type `json:"key"` +- Value *Type `json:"value"` +- } +- if err := json.Unmarshal(data, &x); err != nil { +- return fmt.Errorf("Type.kind=map: %v", err) +- } +- t.Key = x.Key +- t.Value = x.Value +- +- case "literal": +- var z struct { +- Value ParseLiteral `json:"value"` +- } +- +- if err := json.Unmarshal(data, &z); err != nil { +- return fmt.Errorf("Type.kind=literal: %v", err) +- } +- t.Value = z.Value +- +- case "base", "reference", "array", "and", "or", "tuple", +- "stringLiteral": +- // no-op. never seen integerLiteral or booleanLiteral. +- +- default: +- return fmt.Errorf("cannot decode Type.kind %q: %s", x.Kind, data) +- } +- return nil +-} +- +-// which table entries were not used +-func checkTables() { +- for k := range disambiguate { +- if !usedDisambiguate[k] { +- log.Printf("disambiguate[%v] unused", k) +- } +- } +- for k := range renameProp { +- if !usedRenameProp[k] { +- log.Printf("renameProp {%q, %q} unused", k[0], k[1]) +- } +- } +- for k := range goplsStar { +- if !usedGoplsStar[k] { +- log.Printf("goplsStar {%q, %q} unused", k[0], k[1]) +- } +- } +- for k := range goplsType { +- if !usedGoplsType[k] { +- log.Printf("unused goplsType[%q]->%s", k, goplsType[k]) +- } +- } +-} +diff -urN a/gopls/internal/protocol/generate/main_test.go b/gopls/internal/protocol/generate/main_test.go +--- a/gopls/internal/protocol/generate/main_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/generate/main_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,116 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package main +- +-import ( +- "encoding/json" +- "fmt" +- "log" +- "os" +- "testing" +-) +- +-// These tests require the result of +-//"git clone https://github.com/microsoft/vscode-languageserver-node" in the HOME directory +- +-// this is not a test, but a way to get code coverage, +-// (in vscode, just run the test with "go.coverOnSingleTest": true) +-func TestAll(t *testing.T) { +- t.Skip("needs vscode-languageserver-node repository") +- *lineNumbers = true +- log.SetFlags(log.Lshortfile) +- main() +-} +- +-// check that the parsed file includes all the information +-// from the json file. This test will fail if the spec +-// introduces new fields. (one can test this test by +-// commenting out the version field in Model.) +-func TestParseContents(t *testing.T) { +- t.Skip("needs vscode-languageserver-node repository") +- log.SetFlags(log.Lshortfile) +- +- // compute our parse of the specification +- dir := os.Getenv("HOME") + "/vscode-languageserver-node" +- fname := dir + "/protocol/metaModel.json" +- v := parse(fname) +- out, err := json.Marshal(v) +- if err != nil { +- t.Fatal(err) +- } +- var our any +- if err := json.Unmarshal(out, &our); err != nil { +- t.Fatal(err) +- } +- +- // process the json file +- buf, err := os.ReadFile(fname) +- if err != nil { +- t.Fatalf("could not read metaModel.json: %v", err) +- } +- var raw any +- if err := json.Unmarshal(buf, &raw); err != nil { +- t.Fatal(err) +- } +- +- // convert to strings showing the fields +- them := flatten(raw) +- us := flatten(our) +- +- // everything in them should be in us +- lesser := make(sortedMap[bool]) +- for _, s := range them { +- lesser[s] = true +- } +- greater := make(sortedMap[bool]) // set of fields we have +- for _, s := range us { +- greater[s] = true +- } +- for _, k := range lesser.keys() { // set if fields they have +- if !greater[k] { +- t.Errorf("missing %s", k) +- } +- } +-} +- +-// flatten(nil) = "nil" +-// flatten(v string) = fmt.Sprintf("%q", v) +-// flatten(v float64)= fmt.Sprintf("%g", v) +-// flatten(v bool) = fmt.Sprintf("%v", v) +-// flatten(v []any) = []string{"[0]"flatten(v[0]), "[1]"flatten(v[1]), ...} +-// flatten(v map[string]any) = {"key1": flatten(v["key1"]), "key2": flatten(v["key2"]), ...} +-func flatten(x any) []string { +- switch v := x.(type) { +- case nil: +- return []string{"nil"} +- case string: +- return []string{fmt.Sprintf("%q", v)} +- case float64: +- return []string{fmt.Sprintf("%g", v)} +- case bool: +- return []string{fmt.Sprintf("%v", v)} +- case []any: +- var ans []string +- for i, x := range v { +- idx := fmt.Sprintf("[%.3d]", i) +- for _, s := range flatten(x) { +- ans = append(ans, idx+s) +- } +- } +- return ans +- case map[string]any: +- var ans []string +- for k, x := range v { +- idx := fmt.Sprintf("%q:", k) +- for _, s := range flatten(x) { +- ans = append(ans, idx+s) +- } +- } +- return ans +- default: +- log.Fatalf("unexpected type %T", x) +- return nil +- } +-} +diff -urN a/gopls/internal/protocol/generate/output.go b/gopls/internal/protocol/generate/output.go +--- a/gopls/internal/protocol/generate/output.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/generate/output.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,458 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package main +- +-import ( +- "bytes" +- "fmt" +- "log" +- "slices" +- "sort" +- "strings" +-) +- +-var ( +- // tsclient.go has 3 sections +- cdecls = make(sortedMap[string]) +- ccases = make(sortedMap[string]) +- cfuncs = make(sortedMap[string]) +- // tsserver.go has 3 sections +- sdecls = make(sortedMap[string]) +- scases = make(sortedMap[string]) +- sfuncs = make(sortedMap[string]) +- // tsprotocol.go has 2 sections +- types = make(sortedMap[string]) +- consts = make(sortedMap[string]) +- // tsjson has 1 section +- jsons = make(sortedMap[string]) +-) +- +-func generateOutput(model *Model) { +- for _, r := range model.Requests { +- genDecl(model, r.Method, r.Params, r.Result, r.Direction) +- genCase(model, r.Method, r.Params, r.Result, r.Direction) +- genFunc(model, r.Method, r.Params, r.Result, r.Direction, false) +- } +- for _, n := range model.Notifications { +- if n.Method == "$/cancelRequest" { +- continue // handled internally by jsonrpc2 +- } +- genDecl(model, n.Method, n.Params, nil, n.Direction) +- genCase(model, n.Method, n.Params, nil, n.Direction) +- genFunc(model, n.Method, n.Params, nil, n.Direction, true) +- } +- genStructs(model) +- genAliases(model) +- genGenTypes() // generate the unnamed types +- genConsts(model) +- genMarshal() +-} +- +-func genDecl(model *Model, method string, param, result *Type, dir string) { +- fname := methodName(method) +- p := "" +- if notNil(param) { +- p = ", *" + goplsName(param) +- } +- ret := "error" +- if notNil(result) { +- tp := goplsName(result) +- if !hasNilValue(tp) { +- tp = "*" + tp +- } +- ret = fmt.Sprintf("(%s, error)", tp) +- } +- // special gopls compatibility case (PJW: still needed?) +- switch method { +- case "workspace/configuration": +- // was And_Param_workspace_configuration, but the type substitution doesn't work, +- // as ParamConfiguration is embedded in And_Param_workspace_configuration +- p = ", *ParamConfiguration" +- ret = "([]LSPAny, error)" +- } +- fragment := strings.ReplaceAll(strings.TrimPrefix(method, "$/"), "/", "_") +- msg := fmt.Sprintf("\t%s\t%s(context.Context%s) %s\n", lspLink(model, fragment), fname, p, ret) +- switch dir { +- case "clientToServer": +- sdecls[method] = msg +- case "serverToClient": +- cdecls[method] = msg +- case "both": +- sdecls[method] = msg +- cdecls[method] = msg +- default: +- log.Fatalf("impossible direction %q", dir) +- } +-} +- +-func genCase(_ *Model, method string, param, result *Type, dir string) { +- out := new(bytes.Buffer) +- fmt.Fprintf(out, "\tcase %q:\n", method) +- var p string +- fname := methodName(method) +- if notNil(param) { +- nm := goplsName(param) +- if method == "workspace/configuration" { // gopls compatibility +- // was And_Param_workspace_configuration, which contains ParamConfiguration +- // so renaming the type leads to circular definitions +- nm = "ParamConfiguration" // gopls compatibility +- } +- fmt.Fprintf(out, "\t\tvar params %s\n", nm) +- out.WriteString("\t\tif err := UnmarshalJSON(raw, ¶ms); err != nil {\n") +- out.WriteString("\t\t\treturn nil, true, fmt.Errorf(\"%%w: %%s\", jsonrpc2.ErrParse, err)\n\t\t}\n") +- p = ", ¶ms" +- } +- if notNil(result) { +- fmt.Fprintf(out, "\t\tresp, err := %%s.%s(ctx%s)\n", fname, p) +- out.WriteString("\t\tif err != nil {\n") +- out.WriteString("\t\t\treturn nil, true, err\n") +- out.WriteString("\t\t}\n") +- out.WriteString("\t\treturn resp, true, nil\n") +- } else { +- fmt.Fprintf(out, "\t\terr := %%s.%s(ctx%s)\n", fname, p) +- out.WriteString("\t\treturn nil, true, err\n") +- } +- out.WriteString("\n") +- msg := out.String() +- switch dir { +- case "clientToServer": +- scases[method] = fmt.Sprintf(msg, "server") +- case "serverToClient": +- ccases[method] = fmt.Sprintf(msg, "client") +- case "both": +- scases[method] = fmt.Sprintf(msg, "server") +- ccases[method] = fmt.Sprintf(msg, "client") +- default: +- log.Fatalf("impossible direction %q", dir) +- } +-} +- +-func genFunc(_ *Model, method string, param, result *Type, dir string, isnotify bool) { +- out := new(bytes.Buffer) +- var p, r string +- var goResult string +- if notNil(param) { +- p = ", params *" + goplsName(param) +- } +- if notNil(result) { +- goResult = goplsName(result) +- if !hasNilValue(goResult) { +- goResult = "*" + goResult +- } +- r = fmt.Sprintf("(%s, error)", goResult) +- } else { +- r = "error" +- } +- // special gopls compatibility case +- switch method { +- case "workspace/configuration": +- // was And_Param_workspace_configuration, but the type substitution doesn't work, +- // as ParamConfiguration is embedded in And_Param_workspace_configuration +- p = ", params *ParamConfiguration" +- r = "([]LSPAny, error)" +- goResult = "[]LSPAny" +- } +- fname := methodName(method) +- fmt.Fprintf(out, "func (s *%%sDispatcher) %s(ctx context.Context%s) %s {\n", +- fname, p, r) +- +- if !notNil(result) { +- if isnotify { +- if notNil(param) { +- fmt.Fprintf(out, "\treturn s.sender.Notify(ctx, %q, params)\n", method) +- } else { +- fmt.Fprintf(out, "\treturn s.sender.Notify(ctx, %q, nil)\n", method) +- } +- } else { +- if notNil(param) { +- fmt.Fprintf(out, "\treturn s.sender.Call(ctx, %q, params, nil)\n", method) +- } else { +- fmt.Fprintf(out, "\treturn s.sender.Call(ctx, %q, nil, nil)\n", method) +- } +- } +- } else { +- fmt.Fprintf(out, "\tvar result %s\n", goResult) +- if isnotify { +- if notNil(param) { +- fmt.Fprintf(out, "\ts.sender.Notify(ctx, %q, params)\n", method) +- } else { +- fmt.Fprintf(out, "\t\tif err := s.sender.Notify(ctx, %q, nil); err != nil {\n", method) +- } +- } else { +- if notNil(param) { +- fmt.Fprintf(out, "\t\tif err := s.sender.Call(ctx, %q, params, &result); err != nil {\n", method) +- } else { +- fmt.Fprintf(out, "\t\tif err := s.sender.Call(ctx, %q, nil, &result); err != nil {\n", method) +- } +- } +- fmt.Fprintf(out, "\t\treturn nil, err\n\t}\n\treturn result, nil\n") +- } +- out.WriteString("}\n") +- msg := out.String() +- switch dir { +- case "clientToServer": +- sfuncs[method] = fmt.Sprintf(msg, "server") +- case "serverToClient": +- cfuncs[method] = fmt.Sprintf(msg, "client") +- case "both": +- sfuncs[method] = fmt.Sprintf(msg, "server") +- cfuncs[method] = fmt.Sprintf(msg, "client") +- default: +- log.Fatalf("impossible direction %q", dir) +- } +-} +- +-func genStructs(model *Model) { +- structures := make(map[string]*Structure) // for expanding Extends +- for _, s := range model.Structures { +- structures[s.Name] = s +- } +- for _, s := range model.Structures { +- out := new(bytes.Buffer) +- generateDoc(out, s.Documentation) +- nm := goName(s.Name) +- if nm == "string" { // an unacceptable strut name +- // a weird case, and needed only so the generated code contains the old gopls code +- nm = "DocumentDiagnosticParams" +- } +- fmt.Fprintf(out, "//\n") +- out.WriteString(lspLink(model, camelCase(s.Name))) +- fmt.Fprintf(out, "type %s struct {%s\n", nm, linex(s.Line)) +- // for gopls compatibility, embed most extensions, but expand the rest some day +- props := slices.Clone(s.Properties) +- if s.Name == "SymbolInformation" { // but expand this one +- for _, ex := range s.Extends { +- fmt.Fprintf(out, "\t// extends %s\n", ex.Name) +- props = append(props, structures[ex.Name].Properties...) +- } +- genProps(out, props, nm) +- } else { +- genProps(out, props, nm) +- for _, ex := range s.Extends { +- fmt.Fprintf(out, "\t%s\n", goName(ex.Name)) +- } +- } +- for _, ex := range s.Mixins { +- fmt.Fprintf(out, "\t%s\n", goName(ex.Name)) +- } +- // TODO(hxjiang): clean this up after microsoft/language-server-protocol#377 +- // is fixed and released. +- if nm == "TextDocumentPositionParams" { +- out.WriteString("\t// Range is an optional field representing the user's text selection in the document.\n") +- out.WriteString("\t// If provided, the Position must be contained within this range.\n") +- out.WriteString("\t//\n") +- out.WriteString("\t// Note: This is a non-standard protocol extension. See microsoft/language-server-protocol#377.\n") +- out.WriteString("\tRange Range `json:\"range\"`") +- } +- out.WriteString("}\n") +- types[nm] = out.String() +- } +- +- // base types +- // (For URI and DocumentURI, see ../uri.go.) +- types["LSPAny"] = "type LSPAny = any\n" +- // A special case, the only previously existing Or type +- types["DocumentDiagnosticReport"] = "type DocumentDiagnosticReport = Or_DocumentDiagnosticReport // (alias) \n" +- +-} +- +-// "FooBar" -> "fooBar" +-func camelCase(TitleCased string) string { +- return strings.ToLower(TitleCased[:1]) + TitleCased[1:] +-} +- +-func lspLink(model *Model, fragment string) string { +- // Derive URL version from metaData.version in JSON file. +- parts := strings.Split(model.Version.Version, ".") // e.g. "3.17.0" +- return fmt.Sprintf("// See https://microsoft.github.io/language-server-protocol/specifications/lsp/%s.%s/specification#%s\n", +- parts[0], parts[1], // major.minor +- fragment) +-} +- +-func genProps(out *bytes.Buffer, props []NameType, name string) { +- for _, p := range props { +- tp := goplsName(p.Type) +- if newNm, ok := renameProp[prop{name, p.Name}]; ok { +- usedRenameProp[prop{name, p.Name}] = true +- if tp == newNm { +- log.Printf("renameProp useless {%q, %q} for %s", name, p.Name, tp) +- } +- tp = newNm +- } +- // it's a pointer if it is optional, or for gopls compatibility +- omit, star := propStar(name, p, tp) +- json := fmt.Sprintf(" `json:\"%s\"`", p.Name) +- if omit { +- json = fmt.Sprintf(" `json:\"%s,omitempty\"`", p.Name) +- } +- generateDoc(out, p.Documentation) +- if star { +- fmt.Fprintf(out, "\t%s *%s %s\n", goName(p.Name), tp, json) +- } else { +- fmt.Fprintf(out, "\t%s %s %s\n", goName(p.Name), tp, json) +- } +- } +-} +- +-func genAliases(model *Model) { +- for _, ta := range model.TypeAliases { +- out := new(bytes.Buffer) +- generateDoc(out, ta.Documentation) +- nm := goName(ta.Name) +- if nm != ta.Name { +- continue // renamed the type, e.g., "DocumentDiagnosticReport", an or-type to "string" +- } +- tp := goplsName(ta.Type) +- fmt.Fprintf(out, "//\n") +- out.WriteString(lspLink(model, camelCase(ta.Name))) +- fmt.Fprintf(out, "type %s = %s // (alias)\n", nm, tp) +- types[nm] = out.String() +- } +-} +- +-func genGenTypes() { +- for _, nt := range genTypes { +- out := new(bytes.Buffer) +- nm := goplsName(nt.typ) +- switch nt.kind { +- case "literal": +- fmt.Fprintf(out, "// created for Literal (%s)\n", nt.name) +- fmt.Fprintf(out, "type %s struct {%s\n", nm, linex(nt.line+1)) +- genProps(out, nt.properties, nt.name) // systematic name, not gopls name; is this a good choice? +- case "or": +- if !strings.HasPrefix(nm, "Or") { +- // It was replaced by a narrower type defined elsewhere +- continue +- } +- names := []string{} +- for _, t := range nt.items { +- if notNil(t) { +- names = append(names, goplsName(t)) +- } +- } +- sort.Strings(names) +- fmt.Fprintf(out, "// created for Or %v\n", names) +- fmt.Fprintf(out, "type %s struct {%s\n", nm, linex(nt.line+1)) +- fmt.Fprintf(out, "\tValue any `json:\"value\"`\n") +- case "and": +- fmt.Fprintf(out, "// created for And\n") +- fmt.Fprintf(out, "type %s struct {%s\n", nm, linex(nt.line+1)) +- for _, x := range nt.items { +- nm := goplsName(x) +- fmt.Fprintf(out, "\t%s\n", nm) +- } +- case "tuple": // there's only this one +- nt.name = "UIntCommaUInt" +- fmt.Fprintf(out, "//created for Tuple\ntype %s struct {%s\n", nm, linex(nt.line+1)) +- fmt.Fprintf(out, "\tFld0 uint32 `json:\"fld0\"`\n") +- fmt.Fprintf(out, "\tFld1 uint32 `json:\"fld1\"`\n") +- default: +- log.Fatalf("%s not handled", nt.kind) +- } +- out.WriteString("}\n") +- types[nm] = out.String() +- } +-} +-func genConsts(model *Model) { +- for _, e := range model.Enumerations { +- out := new(bytes.Buffer) +- generateDoc(out, e.Documentation) +- tp := goplsName(e.Type) +- nm := goName(e.Name) +- fmt.Fprintf(out, "type %s %s%s\n", nm, tp, linex(e.Line)) +- types[nm] = out.String() +- vals := new(bytes.Buffer) +- generateDoc(vals, e.Documentation) +- for _, v := range e.Values { +- generateDoc(vals, v.Documentation) +- nm := goName(v.Name) +- more, ok := disambiguate[e.Name] +- if ok { +- usedDisambiguate[e.Name] = true +- nm = more.prefix + nm + more.suffix +- nm = goName(nm) // stringType +- } +- var val string +- switch v := v.Value.(type) { +- case string: +- val = fmt.Sprintf("%q", v) +- case float64: +- val = fmt.Sprintf("%d", int(v)) +- default: +- log.Fatalf("impossible type %T", v) +- } +- fmt.Fprintf(vals, "\t%s %s = %s%s\n", nm, e.Name, val, linex(v.Line)) +- } +- consts[nm] = vals.String() +- } +-} +-func genMarshal() { +- for _, nt := range genTypes { +- nm := goplsName(nt.typ) +- if !strings.HasPrefix(nm, "Or") { +- continue +- } +- names := []string{} +- for _, t := range nt.items { +- if notNil(t) { +- names = append(names, goplsName(t)) +- } +- } +- sort.Strings(names) +- var buf bytes.Buffer +- fmt.Fprintf(&buf, "func (t %s) MarshalJSON() ([]byte, error) {\n", nm) +- buf.WriteString("\tswitch x := t.Value.(type){\n") +- for _, nmx := range names { +- fmt.Fprintf(&buf, "\tcase %s:\n", nmx) +- fmt.Fprintf(&buf, "\t\treturn json.Marshal(x)\n") +- } +- buf.WriteString("\tcase nil:\n\t\treturn []byte(\"null\"), nil\n\t}\n") +- fmt.Fprintf(&buf, "\treturn nil, fmt.Errorf(\"type %%T not one of %v\", t)\n", names) +- buf.WriteString("}\n\n") +- +- fmt.Fprintf(&buf, "func (t *%s) UnmarshalJSON(x []byte) error {\n", nm) +- buf.WriteString("\tif string(x) == \"null\" {\n\t\tt.Value = nil\n\t\t\treturn nil\n\t}\n") +- for i, nmx := range names { +- fmt.Fprintf(&buf, "\tvar h%d %s\n", i, nmx) +- fmt.Fprintf(&buf, "\tif err := json.Unmarshal(x, &h%d); err == nil {\n\t\tt.Value = h%d\n\t\t\treturn nil\n\t\t}\n", i, i) +- } +- fmt.Fprintf(&buf, "return &UnmarshalError{\"unmarshal failed to match one of %v\"}", names) +- buf.WriteString("}\n\n") +- jsons[nm] = buf.String() +- } +-} +- +-func linex(n int) string { +- if *lineNumbers { +- return fmt.Sprintf(" // line %d", n) +- } +- return "" +-} +- +-func goplsName(t *Type) string { +- nm := typeNames[t] +- // translate systematic name to gopls name +- if newNm, ok := goplsType[nm]; ok { +- usedGoplsType[nm] = true +- nm = newNm +- } +- return nm +-} +- +-func notNil(t *Type) bool { // shutdwon is the special case that needs this +- return t != nil && (t.Kind != "base" || t.Name != "null") +-} +- +-func hasNilValue(t string) bool { +- // this may be unreliable, and need a supplementary table +- if strings.HasPrefix(t, "[]") || strings.HasPrefix(t, "*") { +- return true +- } +- if t == "interface{}" || t == "any" { +- return true +- } +- // that's all the cases that occur currently +- return false +-} +diff -urN a/gopls/internal/protocol/generate/README.md b/gopls/internal/protocol/generate/README.md +--- a/gopls/internal/protocol/generate/README.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/generate/README.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,144 +0,0 @@ +-# LSP Support for gopls +- +-## The protocol +- +-The LSP protocol exchanges json-encoded messages between the client and the server. +-(gopls is the server.) The messages are either Requests, which require Responses, or +-Notifications, which generate no response. Each Request or Notification has a method name +-such as "textDocument/hover" that indicates its meaning and determines which function in the server will handle it. +-The protocol is described in a +-[web page](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.18/specification/), +-in words, and in a json file (metaModel.json) available either linked towards the bottom of the +-web page, or in the vscode-languageserver-node repository. This code uses the latter so the +-exact version can be tied to a githash. By default, the command will download the `github.com/microsoft/vscode-languageserver-node` repository to a temporary directory. +- +-The specification has five sections +- +-1. Requests, which describe the Request and Response types for request methods (e.g., *textDocument/didChange*), +-2. Notifications, which describe the Request types for notification methods, +-3. Structures, which describe named struct-like types, +-4. TypeAliases, which describe type aliases, +-5. Enumerations, which describe named constants. +- +-Requests and Notifications are tagged with a Method (e.g., `"textDocument/hover"`). +-The specification does not specify the names of the functions that handle the messages. These +-names are specified by the `methodNames` map. Enumerations generate Go `const`s, but +-in Typescript they are scoped to namespaces, while in Go they are scoped to a package, so the Go names +-may need to be modified to avoid name collisions. (See the `disambiguate` map, and its use.) +- +-Finally, the specified types are Typescript types, which are quite different from Go types. +- +-### Optionality +- +-The specification can mark fields in structs as Optional. The client distinguishes between missing +-fields and `null` fields in some cases. The Go translation for an optional type +-should be making sure the field's value +-can be `nil`, and adding the json tag `,omitempty`. The former condition would be satisfied by +-adding `*` to the field's type if the type is not a reference type. +- +-### Types +- +-The specification uses a number of different types, only a few of which correspond directly to Go types. +-The specification's types are "base", "reference", "map", "literal", "stringLiteral", "tuple", "and", "or". +-The "base" types correspond directly to Go types, although some Go types needs to be chosen for `URI` and `DocumentUri`. (The "base" types`RegExp`, `BooleanLiteral`, `NumericLiteral` never occur.) +- +-"reference" types are the struct-like types in the Structures section of the specification. The given +-names are suitable for Go to use, except the code needs to change names like `_Initialze` to `XInitialize` so +-they are exported for json marshaling and unmarshaling. +- +-"map" types are just like Go. (The key type in all of them is `DocumentUri`.) +- +-"stringLiteral" types are types whose type name and value are a single string. The chosen Go equivalent +-is to make the type `string` and the value a constant. (The alternative would be to generate a new +-named type, which seemed redundant.) +- +-"literal" types are like Go anonymous structs, so they have to be given a name. (All instances +-of the remaining types have to be given names. One approach is to construct the name from the components +-of the type, but this leads to misleading punning, and is unstable if components are added. The other approach +-is to construct the name from the context of the definition, that is, from the types it is defined within. +-For instance `Lit__InitializeParams_clientInfo` is the "literal" type at the +-`clientInfo` field in the `_InitializeParams` +-struct. Although this choice is sensitive to the ordering of the components, the code uses this approach, +-presuming that reordering components is an unlikely protocol change.) +- +-"tuple" types are generated as Go structs. (There is only one, with two `uint32` fields.) +- +-"and" types are Go structs with embedded type names. (There is only one, `And_Param_workspace_configuration`.) +- +-"or" types are the most complicated. There are a lot of them and there is no simple Go equivalent. +-They are defined as structs with a single `Value interface{}` field and custom json marshaling +-and unmarshaling code. Users can assign anything to `Value` but the type will be checked, and +-correctly marshaled, by the custom marshaling code. The unmarshaling code checks types, so `Value` +-will have one of the permitted types. (`nil` is always allowed.) There are about 40 "or" types that +-have a single non-null component, and these are converted to the component type. +- +-## Processing +- +-The code parses the json specification file, and scans all the types. It assigns names, as described +-above, to the types that are unnamed in the specification, and constructs Go equivalents as required. +-(Most of this code is in typenames.go.) +- +-There are four output files. tsclient.go and tsserver.go contain the definition and implementation +-of the `protocol.Client` and `protocol.Server` types and the code that dispatches on the Method +-of the Request or Notification. tsjson.go contains the custom marshaling and unmarshaling code. +-And tsprotocol.go contains the type and const definitions. +- +-### Accommodating gopls +- +-As the code generates output, mostly in generateoutput.go and main.go, +-it makes adjustments so that no changes are required to the existing Go code. +-(Organizing the computation this way makes the code's structure simpler, but results in +-a lot of unused types.) +-There are three major classes of these adjustments, and leftover special cases. +- +-The first major +-adjustment is to change generated type names to the ones gopls expects. Some of these don't change the +-semantics of the type, just the name. +-But for historical reasons a lot of them replace "or" types by a single +-component of the type. (Until fairly recently Go only saw or used only one of components.) +-The `goplsType` map in tables.go controls this process. +- +-The second major adjustment is to the types of fields of structs, which is done using the +-`renameProp` map in tables.go. +- +-The third major adjustment handles optionality, controlling `*` and `,omitempty` placement when +-the default rules don't match what gopls is expecting. (The map is `goplsStar`, also in tables.go) +-(If the intermediate components in expressions of the form `A.B.C.S` were optional, the code would need +-a lot of useless checking for nils. Typescript has a language construct to avoid most checks.) +- +-Then there are some additional special cases. There are a few places with adjustments to avoid +-recursive types. For instance `LSPArray` is `[]LSPAny`, but `LSPAny` is an "or" type including `LSPArray`. +-The solution is to make `LSPAny` an `interface{}`. Another instance is `_InitializeParams.trace` +-whose type is an "or" of 3 stringLiterals, which just becomes a `string`. +- +-### Checking +- +-`TestAll(t *testing.T)` checks that there are no unexpected fields in the json specification. +- +-While the code is executing, it checks that all the entries in the maps in tables.go are used. +-It also checks that the entries in `renameProp` and `goplsStar` are not redundant. +- +-As a one-time check on the first release of this code, diff-ing the existing and generated tsclient.go +-and tsserver.go code results in only whitespace and comment diffs. The existing and generated +-tsprotocol.go differ in whitespace and comments, and in a substantial number of new type definitions +-that the older, more heuristic, code did not generate. (And the unused type `_InitializeParams` differs +-slightly between the new and the old, and is not worth fixing.) +- +-### Some history +- +-The original stub code was written by hand, but with the protocol under active development, that +-couldn't last. The web page existed before the json specification, but it lagged the implementation +-and was hard to process by machine. So the earlier version of the generating code was written in Typescript, and +-used the Typescript compiler's API to parse the protocol code in the repository. +-It then used a set of heuristics +-to pick out the elements of the protocol, and another set of overlapping heuristics to create the Go code. +-The output was functional, but idiosyncratic, and the code was fragile and barely maintainable. +- +-### The future +- +-Most of the adjustments using the maps in tables.go could be removed by making changes, mostly to names, +-in the gopls code. Using more "or" types in gopls requires more elaborate, but stereotyped, changes. +-But even without all the adjustments, making this its own module would face problems; a number of +-dependencies would have to be factored out. And, it is fragile. The custom unmarshaling code knows what +-types it expects. A design that return an 'any' on unexpected types would match the json +-'ignore unexpected values' philosophy better, but the Go code would need extra checking. +diff -urN a/gopls/internal/protocol/generate/tables.go b/gopls/internal/protocol/generate/tables.go +--- a/gopls/internal/protocol/generate/tables.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/generate/tables.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,286 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package main +- +-import "log" +- +-// prop combines the name of a property (class.field) with the name of +-// the structure it is in, using LSP field capitalization. +-type prop [2]string +- +-const ( +- nothing = iota +- wantOpt // omitempty +- wantOptStar // omitempty, indirect +-) +- +-// goplsStar records the optionality of each field in the protocol. +-// The comments are vague hints as to why removing the line is not trivial. +-// A.B.C.D means that one of B or C would change to a pointer +-// so a test or initialization would be needed +-var goplsStar = map[prop]int{ +- {"AnnotatedTextEdit", "annotationId"}: wantOptStar, +- {"ClientCapabilities", "textDocument"}: wantOpt, // A.B.C.D at fake/editor.go:255 +- {"ClientCapabilities", "window"}: wantOpt, // test failures +- {"ClientCapabilities", "workspace"}: wantOpt, // test failures +- {"CodeAction", "kind"}: wantOpt, // A.B.C.D +- +- {"CodeActionClientCapabilities", "codeActionLiteralSupport"}: wantOpt, // test failures +- +- {"CompletionClientCapabilities", "completionItem"}: wantOpt, // A.B.C.D +- {"CompletionClientCapabilities", "insertTextMode"}: wantOpt, // A.B.C.D +- {"CompletionItem", "kind"}: wantOpt, // need temporary variables +- {"CompletionParams", "context"}: wantOpt, // needs nil checks +- +- {"Diagnostic", "severity"}: wantOpt, // needs nil checks or more careful thought +- {"DidSaveTextDocumentParams", "text"}: wantOptStar, // capabilities_test.go:112 logic +- {"DocumentHighlight", "kind"}: wantOpt, // need temporary variables +- +- {"FoldingRange", "startLine"}: wantOptStar, // unset != zero (#71489) +- {"FoldingRange", "startCharacter"}: wantOptStar, // unset != zero (#71489) +- {"FoldingRange", "endLine"}: wantOptStar, // unset != zero (#71489) +- {"FoldingRange", "endCharacter"}: wantOptStar, // unset != zero (#71489) +- +- {"Hover", "range"}: wantOpt, // complex expressions +- {"InlayHint", "kind"}: wantOpt, // temporary variables +- +- {"PublishDiagnosticsParams", "version"}: wantOpt, // zero => missing (#73501) +- {"SignatureHelp", "activeParameter"}: wantOptStar, // unset != zero +- {"SignatureInformation", "activeParameter"}: wantOptStar, // unset != zero +- {"TextDocumentClientCapabilities", "codeAction"}: wantOpt, // A.B.C.D +- {"TextDocumentClientCapabilities", "completion"}: wantOpt, // A.B.C.D +- {"TextDocumentClientCapabilities", "documentSymbol"}: wantOpt, // A.B.C.D +- {"TextDocumentClientCapabilities", "publishDiagnostics"}: wantOpt, // A.B.C.D +- {"TextDocumentClientCapabilities", "semanticTokens"}: wantOpt, // A.B.C.D +- {"TextDocumentContentChangePartial", "range"}: wantOptStar, // == nil test +- {"TextDocumentContentChangePartial", "rangeLength"}: wantOptStar, // unset != zero +- {"TextDocumentSyncOptions", "change"}: wantOpt, // &constant +- {"WorkDoneProgressBegin", "percentage"}: wantOptStar, // unset != zero +- {"WorkDoneProgressParams", "workDoneToken"}: wantOpt, // test failures +- {"WorkDoneProgressReport", "percentage"}: wantOptStar, // unset != zero +- {"WorkspaceClientCapabilities", "didChangeConfiguration"}: wantOpt, // A.B.C.D +- {"WorkspaceClientCapabilities", "didChangeWatchedFiles"}: wantOpt, // A.B.C.D +-} +- +-// keep track of which entries in goplsStar are used +-var usedGoplsStar = make(map[prop]bool) +- +-// For gopls compatibility, use a different, typically more restrictive, type for some fields. +-var renameProp = map[prop]string{ +- {"CancelParams", "id"}: "any", +- {"Command", "arguments"}: "[]json.RawMessage", +- {"CodeAction", "data"}: "json.RawMessage", // delay unmarshalling commands +- {"Diagnostic", "code"}: "any", +- {"Diagnostic", "data"}: "json.RawMessage", // delay unmarshalling quickfixes +- +- {"DocumentDiagnosticReportPartialResult", "relatedDocuments"}: "map[DocumentURI]any", +- +- {"ExecuteCommandParams", "arguments"}: "[]json.RawMessage", +- {"FoldingRange", "kind"}: "string", +- {"Hover", "contents"}: "MarkupContent", +- {"InlayHint", "label"}: "[]InlayHintLabelPart", +- +- {"RelatedFullDocumentDiagnosticReport", "relatedDocuments"}: "map[DocumentURI]any", +- {"RelatedUnchangedDocumentDiagnosticReport", "relatedDocuments"}: "map[DocumentURI]any", +- +- // PJW: this one is tricky. +- {"ServerCapabilities", "codeActionProvider"}: "any", +- +- {"ServerCapabilities", "inlayHintProvider"}: "any", +- // slightly tricky +- {"ServerCapabilities", "renameProvider"}: "any", +- // slightly tricky +- {"ServerCapabilities", "semanticTokensProvider"}: "any", +- // slightly tricky +- {"ServerCapabilities", "textDocumentSync"}: "any", +- {"TextDocumentSyncOptions", "save"}: "SaveOptions", +- {"WorkspaceEdit", "documentChanges"}: "[]DocumentChange", +-} +- +-// which entries of renameProp were used +-var usedRenameProp = make(map[prop]bool) +- +-type adjust struct { +- prefix, suffix string +-} +- +-// disambiguate specifies prefixes or suffixes to add to all values of +-// some enum types to avoid name conflicts +-var disambiguate = map[string]adjust{ +- "CodeActionTriggerKind": {"CodeAction", ""}, +- "CompletionItemKind": {"", "Completion"}, +- "CompletionItemTag": {"Compl", ""}, +- "DiagnosticSeverity": {"Severity", ""}, +- "DocumentDiagnosticReportKind": {"Diagnostic", ""}, +- "FileOperationPatternKind": {"", "Pattern"}, +- "InlineCompletionTriggerKind": {"Inline", ""}, +- "InsertTextFormat": {"", "TextFormat"}, +- "LanguageKind": {"Lang", ""}, +- "SemanticTokenModifiers": {"Mod", ""}, +- "SemanticTokenTypes": {"", "Type"}, +- "SignatureHelpTriggerKind": {"Sig", ""}, +- "SymbolTag": {"", "Symbol"}, +- "WatchKind": {"Watch", ""}, +-} +- +-// which entries of disambiguate got used +-var usedDisambiguate = make(map[string]bool) +- +-// for gopls compatibility, replace generated type names with existing ones +-var goplsType = map[string]string{ +- "And_RegOpt_textDocument_colorPresentation": "WorkDoneProgressOptionsAndTextDocumentRegistrationOptions", +- "ConfigurationParams": "ParamConfiguration", +- "DocumentUri": "DocumentURI", +- "InitializeParams": "ParamInitialize", +- "LSPAny": "any", +- +- "Lit_SemanticTokensOptions_range_Item1": "PRangeESemanticTokensOptions", +- +- "Or_Declaration": "[]Location", +- "Or_DidChangeConfigurationRegistrationOptions_section": "OrPSection_workspace_didChangeConfiguration", +- "Or_InlayHintLabelPart_tooltip": "OrPTooltipPLabel", +- "Or_InlayHint_tooltip": "OrPTooltip_textDocument_inlayHint", +- "Or_LSPAny": "any", +- +- "Or_ParameterInformation_documentation": "string", +- "Or_ParameterInformation_label": "string", +- "Or_PrepareRenameResult": "PrepareRenamePlaceholder", +- "Or_ProgressToken": "any", +- "Or_Result_textDocument_completion": "CompletionList", +- "Or_Result_textDocument_declaration": "Or_textDocument_declaration", +- "Or_Result_textDocument_definition": "[]Location", +- "Or_Result_textDocument_documentSymbol": "[]any", +- "Or_Result_textDocument_implementation": "[]Location", +- "Or_Result_textDocument_semanticTokens_full_delta": "any", +- "Or_Result_textDocument_typeDefinition": "[]Location", +- "Or_Result_workspace_symbol": "[]SymbolInformation", +- "Or_TextDocumentContentChangeEvent": "TextDocumentContentChangePartial", +- "Or_RelativePattern_baseUri": "DocumentURI", +- +- "Or_WorkspaceFoldersServerCapabilities_changeNotifications": "string", +- "Or_WorkspaceSymbol_location": "OrPLocation_workspace_symbol", +- +- "Tuple_ParameterInformation_label_Item1": "UIntCommaUInt", +- "WorkspaceFoldersServerCapabilities": "WorkspaceFolders5Gn", +- "[]LSPAny": "[]any", +- +- "[]Or_Result_textDocument_codeAction_Item0_Elem": "[]CodeAction", +- "[]PreviousResultId": "[]PreviousResultID", +- "[]uinteger": "[]uint32", +- "boolean": "bool", +- "decimal": "float64", +- "integer": "int32", +- "map[DocumentUri][]TextEdit": "map[DocumentURI][]TextEdit", +- "uinteger": "uint32", +-} +- +-var usedGoplsType = make(map[string]bool) +- +-// methodNames is a map from the method to the name of the function that handles it +-var methodNames = map[string]string{ +- "$/cancelRequest": "CancelRequest", +- "$/logTrace": "LogTrace", +- "$/progress": "Progress", +- "$/setTrace": "SetTrace", +- "callHierarchy/incomingCalls": "IncomingCalls", +- "callHierarchy/outgoingCalls": "OutgoingCalls", +- "client/registerCapability": "RegisterCapability", +- "client/unregisterCapability": "UnregisterCapability", +- "codeAction/resolve": "ResolveCodeAction", +- "codeLens/resolve": "ResolveCodeLens", +- "completionItem/resolve": "ResolveCompletionItem", +- "documentLink/resolve": "ResolveDocumentLink", +- "exit": "Exit", +- "initialize": "Initialize", +- "initialized": "Initialized", +- "inlayHint/resolve": "Resolve", +- "notebookDocument/didChange": "DidChangeNotebookDocument", +- "notebookDocument/didClose": "DidCloseNotebookDocument", +- "notebookDocument/didOpen": "DidOpenNotebookDocument", +- "notebookDocument/didSave": "DidSaveNotebookDocument", +- "shutdown": "Shutdown", +- "telemetry/event": "Event", +- "textDocument/codeAction": "CodeAction", +- "textDocument/codeLens": "CodeLens", +- "textDocument/colorPresentation": "ColorPresentation", +- "textDocument/completion": "Completion", +- "textDocument/declaration": "Declaration", +- "textDocument/definition": "Definition", +- "textDocument/diagnostic": "Diagnostic", +- "textDocument/didChange": "DidChange", +- "textDocument/didClose": "DidClose", +- "textDocument/didOpen": "DidOpen", +- "textDocument/didSave": "DidSave", +- "textDocument/documentColor": "DocumentColor", +- "textDocument/documentHighlight": "DocumentHighlight", +- "textDocument/documentLink": "DocumentLink", +- "textDocument/documentSymbol": "DocumentSymbol", +- "textDocument/foldingRange": "FoldingRange", +- "textDocument/formatting": "Formatting", +- "textDocument/hover": "Hover", +- "textDocument/implementation": "Implementation", +- "textDocument/inlayHint": "InlayHint", +- "textDocument/inlineCompletion": "InlineCompletion", +- "textDocument/inlineValue": "InlineValue", +- "textDocument/linkedEditingRange": "LinkedEditingRange", +- "textDocument/moniker": "Moniker", +- "textDocument/onTypeFormatting": "OnTypeFormatting", +- "textDocument/prepareCallHierarchy": "PrepareCallHierarchy", +- "textDocument/prepareRename": "PrepareRename", +- "textDocument/prepareTypeHierarchy": "PrepareTypeHierarchy", +- "textDocument/publishDiagnostics": "PublishDiagnostics", +- "textDocument/rangeFormatting": "RangeFormatting", +- "textDocument/rangesFormatting": "RangesFormatting", +- "textDocument/references": "References", +- "textDocument/rename": "Rename", +- "textDocument/selectionRange": "SelectionRange", +- "textDocument/semanticTokens/full": "SemanticTokensFull", +- "textDocument/semanticTokens/full/delta": "SemanticTokensFullDelta", +- "textDocument/semanticTokens/range": "SemanticTokensRange", +- "textDocument/signatureHelp": "SignatureHelp", +- "textDocument/typeDefinition": "TypeDefinition", +- "textDocument/willSave": "WillSave", +- "textDocument/willSaveWaitUntil": "WillSaveWaitUntil", +- "typeHierarchy/subtypes": "Subtypes", +- "typeHierarchy/supertypes": "Supertypes", +- "window/logMessage": "LogMessage", +- "window/showDocument": "ShowDocument", +- "window/showMessage": "ShowMessage", +- "window/showMessageRequest": "ShowMessageRequest", +- "window/workDoneProgress/cancel": "WorkDoneProgressCancel", +- "window/workDoneProgress/create": "WorkDoneProgressCreate", +- "workspace/applyEdit": "ApplyEdit", +- "workspace/codeLens/refresh": "CodeLensRefresh", +- "workspace/configuration": "Configuration", +- "workspace/diagnostic": "DiagnosticWorkspace", +- "workspace/diagnostic/refresh": "DiagnosticRefresh", +- "workspace/didChangeConfiguration": "DidChangeConfiguration", +- "workspace/didChangeWatchedFiles": "DidChangeWatchedFiles", +- "workspace/didChangeWorkspaceFolders": "DidChangeWorkspaceFolders", +- "workspace/didCreateFiles": "DidCreateFiles", +- "workspace/didDeleteFiles": "DidDeleteFiles", +- "workspace/didRenameFiles": "DidRenameFiles", +- "workspace/executeCommand": "ExecuteCommand", +- "workspace/foldingRange/refresh": "FoldingRangeRefresh", +- "workspace/inlayHint/refresh": "InlayHintRefresh", +- "workspace/inlineValue/refresh": "InlineValueRefresh", +- "workspace/semanticTokens/refresh": "SemanticTokensRefresh", +- "workspace/symbol": "Symbol", +- "workspace/textDocumentContent": "TextDocumentContent", +- "workspace/textDocumentContent/refresh": "TextDocumentContentRefresh", +- "workspace/willCreateFiles": "WillCreateFiles", +- "workspace/willDeleteFiles": "WillDeleteFiles", +- "workspace/willRenameFiles": "WillRenameFiles", +- "workspace/workspaceFolders": "WorkspaceFolders", +- "workspaceSymbol/resolve": "ResolveWorkspaceSymbol", +-} +- +-func methodName(method string) string { +- ans := methodNames[method] +- if ans == "" { +- log.Fatalf("unknown method %q", method) +- } +- return ans +-} +diff -urN a/gopls/internal/protocol/generate/typenames.go b/gopls/internal/protocol/generate/typenames.go +--- a/gopls/internal/protocol/generate/typenames.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/generate/typenames.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,181 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package main +- +-import ( +- "fmt" +- "log" +- "strings" +-) +- +-var typeNames = make(map[*Type]string) +-var genTypes []*newType +- +-func findTypeNames(model *Model) { +- for _, s := range model.Structures { +- for _, e := range s.Extends { +- nameType(e, nil) // all references +- } +- for _, m := range s.Mixins { +- nameType(m, nil) // all references +- } +- for _, p := range s.Properties { +- nameType(p.Type, []string{s.Name, p.Name}) +- } +- } +- for _, t := range model.Enumerations { +- nameType(t.Type, []string{t.Name}) +- } +- for _, t := range model.TypeAliases { +- nameType(t.Type, []string{t.Name}) +- } +- for _, r := range model.Requests { +- nameType(r.Params, []string{"Param", r.Method}) +- nameType(r.Result, []string{"Result", r.Method}) +- nameType(r.RegistrationOptions, []string{"RegOpt", r.Method}) +- } +- for _, n := range model.Notifications { +- nameType(n.Params, []string{"Param", n.Method}) +- nameType(n.RegistrationOptions, []string{"RegOpt", n.Method}) +- } +-} +- +-// nameType populates typeNames[t] with the computed name of the type. +-// path is the list of enclosing constructs in the JSON model. +-func nameType(t *Type, path []string) string { +- if t == nil || typeNames[t] != "" { +- return "" +- } +- switch t.Kind { +- case "base": +- typeNames[t] = t.Name +- return t.Name +- case "reference": +- typeNames[t] = t.Name +- return t.Name +- case "array": +- nm := "[]" + nameType(t.Element, append(path, "Elem")) +- typeNames[t] = nm +- return nm +- case "map": +- key := nameType(t.Key, nil) // never a generated type +- value := nameType(t.Value.(*Type), append(path, "Value")) +- nm := "map[" + key + "]" + value +- typeNames[t] = nm +- return nm +- // generated types +- case "and": +- nm := nameFromPath("And", path) +- typeNames[t] = nm +- for _, it := range t.Items { +- nameType(it, append(path, "Item")) +- } +- genTypes = append(genTypes, &newType{ +- name: nm, +- typ: t, +- kind: "and", +- items: t.Items, +- line: t.Line, +- }) +- return nm +- case "literal": +- nm := nameFromPath("Lit", path) +- typeNames[t] = nm +- for _, p := range t.Value.(ParseLiteral).Properties { +- nameType(p.Type, append(path, p.Name)) +- } +- genTypes = append(genTypes, &newType{ +- name: nm, +- typ: t, +- kind: "literal", +- properties: t.Value.(ParseLiteral).Properties, +- line: t.Line, +- }) +- return nm +- case "tuple": +- nm := nameFromPath("Tuple", path) +- typeNames[t] = nm +- for _, it := range t.Items { +- nameType(it, append(path, "Item")) +- } +- genTypes = append(genTypes, &newType{ +- name: nm, +- typ: t, +- kind: "tuple", +- items: t.Items, +- line: t.Line, +- }) +- return nm +- case "or": +- nm := nameFromPath("Or", path) +- typeNames[t] = nm +- for i, it := range t.Items { +- // these names depend on the ordering within the "or" type +- nameType(it, append(path, fmt.Sprintf("Item%d", i))) +- } +- // this code handles an "or" of stringLiterals (_InitializeParams.trace) +- names := make(map[string]int) +- var msg strings.Builder +- for _, it := range t.Items { +- if line, ok := names[typeNames[it]]; ok { +- // duplicate component names are bad +- fmt.Fprintf(&msg, "lines %d %d dup, %s for %s\n", line, it.Line, typeNames[it], nm) +- } +- names[typeNames[it]] = t.Line +- } +- // this code handles an "or" of stringLiterals (_InitializeParams.trace) +- if len(names) == 1 { +- var solekey string +- for k := range names { +- solekey = k // the sole name +- } +- if solekey == "string" { // _InitializeParams.trace +- typeNames[t] = "string" +- return "string" +- } +- // otherwise unexpected +- log.Printf("unexpected: single-case 'or' type has non-string key %s: %s", nm, solekey) +- log.Fatal(msg.String()) +- } else if len(names) == 2 { +- // if one of the names is null, just use the other, rather than generating an "or". +- // This removes about 40 types from the generated code. An entry in goplsStar +- // could be added to handle the null case, if necessary. +- newNm := "" +- sawNull := false +- for k := range names { +- if k == "null" { +- sawNull = true +- } else { +- newNm = k +- } +- } +- if sawNull { +- typeNames[t] = newNm +- return newNm +- } +- } +- genTypes = append(genTypes, &newType{ +- name: nm, +- typ: t, +- kind: "or", +- items: t.Items, +- line: t.Line, +- }) +- return nm +- case "stringLiteral": // a single type, like 'kind' or 'rename' +- typeNames[t] = "string" +- return "string" +- default: +- log.Fatalf("nameType: %T unexpected, line:%d path:%v", t, t.Line, path) +- panic("unreachable in nameType") +- } +-} +- +-func nameFromPath(prefix string, path []string) string { +- nm := prefix + "_" + strings.Join(path, "_") +- // methods have slashes +- nm = strings.ReplaceAll(nm, "/", "_") +- return nm +-} +diff -urN a/gopls/internal/protocol/generate/types.go b/gopls/internal/protocol/generate/types.go +--- a/gopls/internal/protocol/generate/types.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/generate/types.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,167 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package main +- +-import ( +- "fmt" +- "sort" +-) +- +-// Model contains the parsed version of the spec +-type Model struct { +- Version Metadata `json:"metaData"` +- Requests []*Request `json:"requests"` +- Notifications []*Notification `json:"notifications"` +- Structures []*Structure `json:"structures"` +- Enumerations []*Enumeration `json:"enumerations"` +- TypeAliases []*TypeAlias `json:"typeAliases"` +- Line int `json:"line"` +-} +- +-// Metadata is information about the version of the spec +-type Metadata struct { +- Version string `json:"version"` +- Line int `json:"line"` +-} +- +-// A Request is the parsed version of an LSP request +-type Request struct { +- Documentation string `json:"documentation"` +- ErrorData *Type `json:"errorData"` +- Direction string `json:"messageDirection"` +- Method string `json:"method"` +- Params *Type `json:"params"` +- PartialResult *Type `json:"partialResult"` +- Proposed bool `json:"proposed"` +- RegistrationMethod string `json:"registrationMethod"` +- RegistrationOptions *Type `json:"registrationOptions"` +- Result *Type `json:"result"` +- Since string `json:"since"` +- Line int `json:"line"` +-} +- +-// A Notificatin is the parsed version of an LSP notification +-type Notification struct { +- Documentation string `json:"documentation"` +- Direction string `json:"messageDirection"` +- Method string `json:"method"` +- Params *Type `json:"params"` +- Proposed bool `json:"proposed"` +- RegistrationMethod string `json:"registrationMethod"` +- RegistrationOptions *Type `json:"registrationOptions"` +- Since string `json:"since"` +- Line int `json:"line"` +-} +- +-// A Structure is the parsed version of an LSP structure from the spec +-type Structure struct { +- Documentation string `json:"documentation"` +- Extends []*Type `json:"extends"` +- Mixins []*Type `json:"mixins"` +- Name string `json:"name"` +- Properties []NameType `json:"properties"` +- Proposed bool `json:"proposed"` +- Since string `json:"since"` +- Line int `json:"line"` +-} +- +-// An enumeration is the parsed version of an LSP enumeration from the spec +-type Enumeration struct { +- Documentation string `json:"documentation"` +- Name string `json:"name"` +- Proposed bool `json:"proposed"` +- Since string `json:"since"` +- SupportsCustomValues bool `json:"supportsCustomValues"` +- Type *Type `json:"type"` +- Values []NameValue `json:"values"` +- Line int `json:"line"` +-} +- +-// A TypeAlias is the parsed version of an LSP type alias from the spec +-type TypeAlias struct { +- Documentation string `json:"documentation"` +- Deprecated string `json:"deprecated"` +- Name string `json:"name"` +- Proposed bool `json:"proposed"` +- Since string `json:"since"` +- Type *Type `json:"type"` +- Line int `json:"line"` +-} +- +-// A NameValue describes an enumeration constant +-type NameValue struct { +- Documentation string `json:"documentation"` +- Name string `json:"name"` +- Proposed bool `json:"proposed"` +- Since string `json:"since"` +- Value any `json:"value"` // number or string +- Line int `json:"line"` +-} +- +-// A Type is the parsed version of an LSP type from the spec, +-// or a Type the code constructs +-type Type struct { +- Kind string `json:"kind"` // -- which kind goes with which field -- +- Items []*Type `json:"items"` // "and", "or", "tuple" +- Element *Type `json:"element"` // "array" +- Name string `json:"name"` // "base", "reference" +- Key *Type `json:"key"` // "map" +- Value any `json:"value"` // "map", "stringLiteral", "literal" +- Line int `json:"line"` // JSON source line +-} +- +-// ParseLiteral is Type.Value when Type.Kind is "literal" +-type ParseLiteral struct { +- Properties `json:"properties"` +-} +- +-// A NameType represents the name and type of a structure element +-type NameType struct { +- Name string `json:"name"` +- Type *Type `json:"type"` +- Optional bool `json:"optional"` +- Documentation string `json:"documentation"` +- Deprecated string `json:"deprecated"` +- Since string `json:"since"` +- Proposed bool `json:"proposed"` +- Line int `json:"line"` +-} +- +-// Properties are the collection of structure fields +-type Properties []NameType +- +-// addLineNumbers adds a "line" field to each object in the JSON. +-func addLineNumbers(buf []byte) []byte { +- var ans []byte +- // In the specification .json file, the delimiter '{' is +- // always followed by a newline. There are other {s embedded in strings. +- // json.Token does not return \n, or :, or , so using it would +- // require parsing the json to reconstruct the missing information. +- for linecnt, i := 1, 0; i < len(buf); i++ { +- ans = append(ans, buf[i]) +- switch buf[i] { +- case '{': +- if buf[i+1] == '\n' { +- ans = append(ans, fmt.Sprintf(`"line": %d, `, linecnt)...) +- // warning: this would fail if the spec file had +- // `"value": {\n}`, but it does not, as comma is a separator. +- } +- case '\n': +- linecnt++ +- } +- } +- return ans +-} +- +-type sortedMap[T any] map[string]T +- +-func (s sortedMap[T]) keys() []string { +- var keys []string +- for k := range s { +- keys = append(keys, k) +- } +- sort.Strings(keys) +- return keys +-} +diff -urN a/gopls/internal/protocol/json_test.go b/gopls/internal/protocol/json_test.go +--- a/gopls/internal/protocol/json_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/json_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,134 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol_test +- +-import ( +- "encoding/json" +- "fmt" +- "regexp" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// verify that type errors in Initialize lsp messages don't cause +-// any other unmarshalling errors. The code looks at single values and the +-// first component of array values. Each occurrence is replaced by something +-// of a different type, the resulting string unmarshalled, and compared to +-// the unmarshalling of the unchanged strings. The test passes if there is no +-// more than a single difference reported. That is, if changing a single value +-// in the message changes no more than a single value in the unmarshalled struct, +-// it is safe to ignore *json.UnmarshalTypeError. +- +-// strings are changed to numbers or bools (true) +-// bools are changed to numbers or strings +-// numbers are changed to strings or bools +- +-// a recent Initialize message taken from a log (at some point +-// some field incompatibly changed from bool to int32) +-const input = `{"processId":46408,"clientInfo":{"name":"Visual Studio Code - Insiders","version":"1.76.0-insider"},"locale":"en-us","rootPath":"/Users/pjw/hakim","rootUri":"file:///Users/pjw/hakim","capabilities":{"workspace":{"applyEdit":true,"workspaceEdit":{"documentChanges":true,"resourceOperations":["create","rename","delete"],"failureHandling":"textOnlyTransactional","normalizesLineEndings":true,"changeAnnotationSupport":{"groupsOnLabel":true}},"configuration":true,"didChangeWatchedFiles":{"dynamicRegistration":true,"relativePatternSupport":true},"symbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"tagSupport":{"valueSet":[1]},"resolveSupport":{"properties":["location.range"]}},"codeLens":{"refreshSupport":true},"executeCommand":{"dynamicRegistration":true},"didChangeConfiguration":{"dynamicRegistration":true},"workspaceFolders":true,"semanticTokens":{"refreshSupport":true},"fileOperations":{"dynamicRegistration":true,"didCreate":true,"didRename":true,"didDelete":true,"willCreate":true,"willRename":true,"willDelete":true},"inlineValue":{"refreshSupport":true},"inlayHint":{"refreshSupport":true},"diagnostics":{"refreshSupport":true}},"textDocument":{"publishDiagnostics":{"relatedInformation":true,"versionSupport":false,"tagSupport":{"valueSet":[1,2]},"codeDescriptionSupport":true,"dataSupport":true},"synchronization":{"dynamicRegistration":true,"willSave":true,"willSaveWaitUntil":true,"didSave":true},"completion":{"dynamicRegistration":true,"contextSupport":true,"completionItem":{"snippetSupport":true,"commitCharactersSupport":true,"documentationFormat":["markdown","plaintext"],"deprecatedSupport":true,"preselectSupport":true,"tagSupport":{"valueSet":[1]},"insertReplaceSupport":true,"resolveSupport":{"properties":["documentation","detail","additionalTextEdits"]},"insertTextModeSupport":{"valueSet":[1,2]},"labelDetailsSupport":true},"insertTextMode":2,"completionItemKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]},"completionList":{"itemDefaults":["commitCharacters","editRange","insertTextFormat","insertTextMode"]}},"hover":{"dynamicRegistration":true,"contentFormat":["markdown","plaintext"]},"signatureHelp":{"dynamicRegistration":true,"signatureInformation":{"documentationFormat":["markdown","plaintext"],"parameterInformation":{"labelOffsetSupport":true},"activeParameterSupport":true},"contextSupport":true},"definition":{"dynamicRegistration":true,"linkSupport":true},"references":{"dynamicRegistration":true},"documentHighlight":{"dynamicRegistration":true},"documentSymbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"hierarchicalDocumentSymbolSupport":true,"tagSupport":{"valueSet":[1]},"labelSupport":true},"codeAction":{"dynamicRegistration":true,"isPreferredSupport":true,"disabledSupport":true,"dataSupport":true,"resolveSupport":{"properties":["edit"]},"codeActionLiteralSupport":{"codeActionKind":{"valueSet":["","quickfix","refactor","refactor.extract","refactor.inline","refactor.rewrite","source","source.organizeImports"]}},"honorsChangeAnnotations":false},"codeLens":{"dynamicRegistration":true},"formatting":{"dynamicRegistration":true},"rangeFormatting":{"dynamicRegistration":true},"onTypeFormatting":{"dynamicRegistration":true},"rename":{"dynamicRegistration":true,"prepareSupport":true,"prepareSupportDefaultBehavior":1,"honorsChangeAnnotations":true},"documentLink":{"dynamicRegistration":true,"tooltipSupport":true},"typeDefinition":{"dynamicRegistration":true,"linkSupport":true},"implementation":{"dynamicRegistration":true,"linkSupport":true},"colorProvider":{"dynamicRegistration":true},"foldingRange":{"dynamicRegistration":true,"rangeLimit":5000,"lineFoldingOnly":true,"foldingRangeKind":{"valueSet":["comment","imports","region"]},"foldingRange":{"collapsedText":false}},"declaration":{"dynamicRegistration":true,"linkSupport":true},"selectionRange":{"dynamicRegistration":true},"callHierarchy":{"dynamicRegistration":true},"semanticTokens":{"dynamicRegistration":true,"tokenTypes":["namespace","type","class","enum","interface","struct","typeParameter","parameter","variable","property","enumMember","event","function","method","macro","keyword","modifier","comment","string","number","regexp","operator","decorator"],"tokenModifiers":["declaration","definition","readonly","static","deprecated","abstract","async","modification","documentation","defaultLibrary"],"formats":["relative"],"requests":{"range":true,"full":{"delta":true}},"multilineTokenSupport":false,"overlappingTokenSupport":false,"serverCancelSupport":true,"augmentsSyntaxTokens":true},"linkedEditingRange":{"dynamicRegistration":true},"typeHierarchy":{"dynamicRegistration":true},"inlineValue":{"dynamicRegistration":true},"inlayHint":{"dynamicRegistration":true,"resolveSupport":{"properties":["tooltip","textEdits","label.tooltip","label.location","label.command"]}},"diagnostic":{"dynamicRegistration":true,"relatedDocumentSupport":false}},"window":{"showMessage":{"messageActionItem":{"additionalPropertiesSupport":true}},"showDocument":{"support":true},"workDoneProgress":true},"general":{"staleRequestSupport":{"cancel":true,"retryOnContentModified":["textDocument/semanticTokens/full","textDocument/semanticTokens/range","textDocument/semanticTokens/full/delta"]},"regularExpressions":{"engine":"ECMAScript","version":"ES2020"},"markdown":{"parser":"marked","version":"1.1.0"},"positionEncodings":["utf-16"]},"notebookDocument":{"synchronization":{"dynamicRegistration":true,"executionSummarySupport":true}}},"initializationOptions":{"usePlaceholders":true,"completionDocumentation":true,"verboseOutput":false,"build.directoryFilters":["-foof","-internal/protocol/typescript"],"codelenses":{"reference":true,"gc_details":true},"analyses":{"fillstruct":true,"staticcheck":true,"unusedparams":false,"composites":false},"semanticTokens":true,"noSemanticString":true,"noSemanticNumber":true,"templateExtensions":["tmpl","gotmpl"],"ui.completion.matcher":"Fuzzy","ui.inlayhint.hints":{"assignVariableTypes":false,"compositeLiteralFields":false,"compositeLiteralTypes":false,"constantValues":false,"functionTypeParameters":false,"parameterNames":false,"rangeVariableTypes":false},"ui.vulncheck":"Off","allExperiments":true},"trace":"off","workspaceFolders":[{"uri":"file:///Users/pjw/hakim","name":"hakim"}]}` +- +-type DiffReporter struct { +- path cmp.Path +- diffs []string +-} +- +-func (r *DiffReporter) PushStep(ps cmp.PathStep) { +- r.path = append(r.path, ps) +-} +- +-func (r *DiffReporter) Report(rs cmp.Result) { +- if !rs.Equal() { +- vx, vy := r.path.Last().Values() +- r.diffs = append(r.diffs, fmt.Sprintf("%#v:\n\t-: %+v\n\t+: %+v\n", r.path, vx, vy)) +- } +-} +- +-func (r *DiffReporter) PopStep() { +- r.path = r.path[:len(r.path)-1] +-} +- +-func (r *DiffReporter) String() string { +- return strings.Join(r.diffs, "\n") +-} +- +-func TestStringChanges(t *testing.T) { +- // string as value +- stringLeaf := regexp.MustCompile(`:("[^"]*")`) +- leafs := stringLeaf.FindAllStringSubmatchIndex(input, -1) +- allDeltas(t, leafs, "23", "true") +- // string as first element of array +- stringArray := regexp.MustCompile(`[[]("[^"]*")`) +- arrays := stringArray.FindAllStringSubmatchIndex(input, -1) +- allDeltas(t, arrays, "23", "true") +-} +- +-func TestBoolChanges(t *testing.T) { +- boolLeaf := regexp.MustCompile(`:(true|false)(,|})`) +- leafs := boolLeaf.FindAllStringSubmatchIndex(input, -1) +- allDeltas(t, leafs, "23", `"xx"`) +- boolArray := regexp.MustCompile(`:[[](true|false)(,|])`) +- arrays := boolArray.FindAllStringSubmatchIndex(input, -1) +- allDeltas(t, arrays, "23", `"xx"`) +-} +- +-func TestNumberChanges(t *testing.T) { +- numLeaf := regexp.MustCompile(`:(\d+)(,|})`) +- leafs := numLeaf.FindAllStringSubmatchIndex(input, -1) +- allDeltas(t, leafs, "true", `"xx"`) +- numArray := regexp.MustCompile(`:[[](\d+)(,|])`) +- arrays := numArray.FindAllStringSubmatchIndex(input, -1) +- allDeltas(t, arrays, "true", `"xx"`) +-} +- +-// v is a set of matches. check that substituting any repl never +-// creates more than 1 unmarshaling error +-func allDeltas(t *testing.T, v [][]int, repls ...string) { +- t.Helper() +- for _, repl := range repls { +- for i, x := range v { +- err := tryChange(x[2], x[3], repl) +- if err != nil { +- t.Errorf("%d:%q %v", i, input[x[2]:x[3]], err) +- } +- } +- } +-} +- +-func tryChange(start, end int, repl string) error { +- var p, q protocol.ParamInitialize +- mod := input[:start] + repl + input[end:] +- excerpt := func() (string, string) { +- a := max(start-5, 0) +- // trusting repl to be no longer than what it replaces +- b := min(end+5, len(input)) +- ma := input[a:b] +- mb := mod[a:b] +- return ma, mb +- } +- +- if err := json.Unmarshal([]byte(input), &p); err != nil { +- return fmt.Errorf("%s %v", repl, err) +- } +- switch err := json.Unmarshal([]byte(mod), &q).(type) { +- case nil: //ok +- case *json.UnmarshalTypeError: +- break +- case *protocol.UnmarshalError: +- return nil // cmp.Diff produces several diffs for custom unmrshalers +- default: +- return fmt.Errorf("%T unexpected unmarshal error", err) +- } +- +- var r DiffReporter +- cmp.Diff(p, q, cmp.Reporter(&r)) +- if len(r.diffs) > 1 { // 0 is possible, e.g., for interface{} +- ma, mb := excerpt() +- return fmt.Errorf("got %d diffs for %q\n%s\n%s", len(r.diffs), repl, ma, mb) +- } +- return nil +-} +diff -urN a/gopls/internal/protocol/log.go b/gopls/internal/protocol/log.go +--- a/gopls/internal/protocol/log.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/log.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,136 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol +- +-import ( +- "context" +- "fmt" +- "io" +- "strings" +- "sync" +- "time" +- +- "golang.org/x/tools/internal/jsonrpc2" +-) +- +-type loggingStream struct { +- stream jsonrpc2.Stream +- logMu sync.Mutex +- log io.Writer +-} +- +-// LoggingStream returns a stream that does LSP protocol logging too +-func LoggingStream(str jsonrpc2.Stream, w io.Writer) jsonrpc2.Stream { +- return &loggingStream{stream: str, log: w} +-} +- +-func (s *loggingStream) Read(ctx context.Context) (jsonrpc2.Message, int64, error) { +- msg, count, err := s.stream.Read(ctx) +- if err == nil { +- s.logCommon(msg, true) +- } +- return msg, count, err +-} +- +-func (s *loggingStream) Write(ctx context.Context, msg jsonrpc2.Message) (int64, error) { +- s.logCommon(msg, false) +- count, err := s.stream.Write(ctx, msg) +- return count, err +-} +- +-func (s *loggingStream) Close() error { +- return s.stream.Close() +-} +- +-type req struct { +- method string +- start time.Time +-} +- +-type mapped struct { +- mu sync.Mutex +- clientCalls map[string]req +- serverCalls map[string]req +-} +- +-var maps = &mapped{ +- sync.Mutex{}, +- make(map[string]req), +- make(map[string]req), +-} +- +-// these 4 methods are each used exactly once, but it seemed +-// better to have the encapsulation rather than ad hoc mutex +-// code in 4 places +-func (m *mapped) client(id string) req { +- m.mu.Lock() +- defer m.mu.Unlock() +- v := m.clientCalls[id] +- delete(m.clientCalls, id) +- return v +-} +- +-func (m *mapped) server(id string) req { +- m.mu.Lock() +- defer m.mu.Unlock() +- v := m.serverCalls[id] +- delete(m.serverCalls, id) +- return v +-} +- +-func (m *mapped) setClient(id string, r req) { +- m.mu.Lock() +- defer m.mu.Unlock() +- m.clientCalls[id] = r +-} +- +-func (m *mapped) setServer(id string, r req) { +- m.mu.Lock() +- defer m.mu.Unlock() +- m.serverCalls[id] = r +-} +- +-const eor = "\r\n\r\n\r\n" +- +-func (s *loggingStream) logCommon(msg jsonrpc2.Message, isRead bool) { +- s.logMu.Lock() +- defer s.logMu.Unlock() +- direction, pastTense := "Received", "Received" +- get, set := maps.client, maps.setServer +- if isRead { +- direction, pastTense = "Sending", "Sent" +- get, set = maps.server, maps.setClient +- } +- if msg == nil || s.log == nil { +- return +- } +- tm := time.Now() +- tmfmt := tm.Format("15:04:05.000 PM") +- +- buf := strings.Builder{} +- fmt.Fprintf(&buf, "[Trace - %s] ", tmfmt) // common beginning +- switch msg := msg.(type) { +- case *jsonrpc2.Call: +- id := fmt.Sprint(msg.ID()) +- fmt.Fprintf(&buf, "%s request '%s - (%s)'.\n", direction, msg.Method(), id) +- fmt.Fprintf(&buf, "Params: %s%s", msg.Params(), eor) +- set(id, req{method: msg.Method(), start: tm}) +- case *jsonrpc2.Notification: +- fmt.Fprintf(&buf, "%s notification '%s'.\n", direction, msg.Method()) +- fmt.Fprintf(&buf, "Params: %s%s", msg.Params(), eor) +- case *jsonrpc2.Response: +- id := fmt.Sprint(msg.ID()) +- if err := msg.Err(); err != nil { +- fmt.Fprintf(s.log, "[Error - %s] %s #%s %s%s", pastTense, tmfmt, id, err, eor) +- return +- } +- cc := get(id) +- elapsed := tm.Sub(cc.start) +- fmt.Fprintf(&buf, "%s response '%s - (%s)' in %dms.\n", +- direction, cc.method, id, elapsed/time.Millisecond) +- fmt.Fprintf(&buf, "Result: %s%s", msg.Result(), eor) +- } +- s.log.Write([]byte(buf.String())) +-} +diff -urN a/gopls/internal/protocol/mapper.go b/gopls/internal/protocol/mapper.go +--- a/gopls/internal/protocol/mapper.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/mapper.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,367 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol +- +-// This file defines Mapper, which wraps a file content buffer +-// ([]byte) and provides efficient conversion between every kind of +-// position representation. +-// +-// gopls uses four main representations of position: +-// +-// 1. byte offsets, e.g. (start, end int), starting from zero. +-// +-// 2. go/token notation. Use these types when interacting directly +-// with the go/* syntax packages: +-// +-// token.Pos +-// token.FileSet +-// token.File +-// +-// Because File.Offset and File.Pos panic on invalid inputs, +-// we do not call them directly and instead use the safetoken package +-// for these conversions. This is enforced by a static check. +-// +-// Beware also that the methods of token.File have two bugs for which +-// safetoken contains workarounds: +-// - #57490, whereby the parser may create ast.Nodes during error +-// recovery whose computed positions are out of bounds (EOF+1). +-// - #41029, whereby the wrong line number is returned for the EOF position. +-// +-// 3. the cmd package. +-// +-// cmd.point = (line, col8, offset). +-// cmd.Span = (uri URI, start, end cmd.point) +-// +-// Line and column are 1-based. +-// Columns are measured in bytes (UTF-8 codes). +-// All fields are optional. +-// +-// These types are useful as intermediate conversions of validated +-// ranges. Since their fields are optional they are also useful for +-// parsing user-provided positions (e.g. in the CLI) before we have +-// access to file contents. +-// +-// 4. protocol, the LSP RPC message format. +-// +-// protocol.Position = (Line, Character uint32) +-// protocol.Range = (start, end Position) +-// protocol.Location = (URI, protocol.Range) +-// +-// Line and Character are 0-based. +-// Characters (columns) are measured in UTF-16 codes. +-// +-// protocol.Mapper holds the (URI, Content) of a file, enabling +-// efficient mapping between byte offsets, cmd ranges, and +-// protocol ranges. +- +-import ( +- "bytes" +- "fmt" +- "go/ast" +- "go/token" +- "sort" +- "sync" +- "unicode/utf8" +- +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +-) +- +-// A Mapper wraps the content of a file and provides mapping +-// between byte offsets and notations of position such as: +-// +-// - (line, col8) pairs, where col8 is a 1-based UTF-8 column number +-// (bytes), as used by the go/token and cmd packages. +-// +-// - (line, col16) pairs, where col16 is a 1-based UTF-16 column +-// number, as used by the LSP protocol. +-// +-// All conversion methods are named "FromTo", where From and To are the two types. +-// For example, the PointPosition method converts from a Point to a Position. +-// +-// Mapper does not intrinsically depend on go/token-based +-// representations. Use safetoken to map between token.Pos <=> byte +-// offsets, or the convenience methods such as PosPosition, +-// NodePosition, or NodeRange. +-// +-// See overview comments at top of this file. +-type Mapper struct { +- URI DocumentURI +- Content []byte +- +- // Line-number information is requested only for a tiny +- // fraction of Mappers, so we compute it lazily. +- // Call initLines() before accessing fields below. +- linesOnce sync.Once +- lineStart []int // byte offset of start of ith line (0-based); last=EOF iff \n-terminated +- nonASCII bool +- +- // TODO(adonovan): adding an extra lineStart entry for EOF +- // might simplify every method that accesses it. Try it out. +-} +- +-// NewMapper creates a new mapper for the given URI and content. +-func NewMapper(uri DocumentURI, content []byte) *Mapper { +- return &Mapper{URI: uri, Content: content} +-} +- +-// initLines populates the lineStart table. +-func (m *Mapper) initLines() { +- m.linesOnce.Do(func() { +- nlines := bytes.Count(m.Content, []byte("\n")) +- m.lineStart = make([]int, 1, nlines+1) // initially []int{0} +- for offset, b := range m.Content { +- if b == '\n' { +- m.lineStart = append(m.lineStart, offset+1) +- } +- if b >= utf8.RuneSelf { +- m.nonASCII = true +- } +- } +- }) +-} +- +-// LineCol8Position converts a valid line and UTF-8 column number, +-// both 1-based, to a protocol (UTF-16) position. +-func (m *Mapper) LineCol8Position(line, col8 int) (Position, error) { +- // Report a bug for inputs that are invalid for any file content. +- if line < 1 { +- return Position{}, bug.Errorf("invalid 1-based line number: %d", line) +- } +- if col8 < 1 { +- return Position{}, bug.Errorf("invalid 1-based column number: %d", col8) +- } +- +- m.initLines() +- line0 := line - 1 // 0-based +- if !(0 <= line0 && line0 < len(m.lineStart)) { +- return Position{}, fmt.Errorf("line number %d out of range (max %d)", line, len(m.lineStart)) +- } +- +- // content[start:end] is the preceding partial line. +- start := m.lineStart[line0] +- end := start + col8 - 1 +- +- // Validate column. +- if end > len(m.Content) { +- return Position{}, fmt.Errorf("column is beyond end of file") +- } else if line0+1 < len(m.lineStart) && end >= m.lineStart[line0+1] { +- return Position{}, fmt.Errorf("column is beyond end of line") +- } +- +- char := UTF16Len(m.Content[start:end]) +- return Position{Line: uint32(line0), Character: uint32(char)}, nil +-} +- +-// -- conversions from byte offsets -- +- +-// OffsetLocation converts a byte-offset interval to a protocol (UTF-16) location. +-func (m *Mapper) OffsetLocation(start, end int) (Location, error) { +- rng, err := m.OffsetRange(start, end) +- if err != nil { +- return Location{}, err +- } +- return m.URI.Location(rng), nil +-} +- +-// OffsetRange converts a byte-offset interval to a protocol (UTF-16) range. +-func (m *Mapper) OffsetRange(start, end int) (Range, error) { +- if start > end { +- return Range{}, fmt.Errorf("start offset (%d) > end (%d)", start, end) +- } +- startPosition, err := m.OffsetPosition(start) +- if err != nil { +- return Range{}, fmt.Errorf("start: %v", err) +- } +- endPosition, err := m.OffsetPosition(end) +- if err != nil { +- return Range{}, fmt.Errorf("end: %v", err) +- } +- return Range{Start: startPosition, End: endPosition}, nil +-} +- +-// OffsetPosition converts a byte offset to a protocol (UTF-16) position. +-func (m *Mapper) OffsetPosition(offset int) (Position, error) { +- if !(0 <= offset && offset <= len(m.Content)) { +- return Position{}, fmt.Errorf("invalid offset %d (want 0-%d)", offset, len(m.Content)) +- } +- // No error may be returned after this point, +- // even if the offset does not fall at a rune boundary. +- +- line, col16 := m.lineCol16(offset) +- return Position{Line: uint32(line), Character: uint32(col16)}, nil +-} +- +-// lineCol16 converts a valid byte offset to line and UTF-16 column numbers, both 0-based. +-func (m *Mapper) lineCol16(offset int) (int, int) { +- line, start, cr := m.line(offset) +- var col16 int +- if m.nonASCII { +- col16 = UTF16Len(m.Content[start:offset]) +- } else { +- col16 = offset - start +- } +- if cr { +- col16-- // retreat from \r at line end +- } +- return line, col16 +-} +- +-// OffsetLineCol8 converts a valid byte offset to line and UTF-8 column numbers, both 1-based. +-func (m *Mapper) OffsetLineCol8(offset int) (int, int) { +- line, start, cr := m.line(offset) +- col8 := offset - start +- if cr { +- col8-- // retreat from \r at line end +- } +- return line + 1, col8 + 1 +-} +- +-// line returns: +-// - the 0-based index of the line that encloses the (valid) byte offset; +-// - the start offset of that line; and +-// - whether the offset denotes a carriage return (\r) at line end. +-func (m *Mapper) line(offset int) (int, int, bool) { +- m.initLines() +- // In effect, binary search returns a 1-based result. +- line := sort.Search(len(m.lineStart), func(i int) bool { +- return offset < m.lineStart[i] +- }) +- +- // Adjustment for line-endings: \r|\n is the same as |\r\n. +- var eol int +- if line == len(m.lineStart) { +- eol = len(m.Content) // EOF +- } else { +- eol = m.lineStart[line] - 1 +- } +- cr := offset == eol && offset > 0 && m.Content[offset-1] == '\r' +- +- line-- // 0-based +- +- return line, m.lineStart[line], cr +-} +- +-// -- conversions from protocol (UTF-16) domain -- +- +-// RangeOffsets converts a protocol (UTF-16) range to start/end byte offsets. +-func (m *Mapper) RangeOffsets(r Range) (int, int, error) { +- start, err := m.PositionOffset(r.Start) +- if err != nil { +- return 0, 0, err +- } +- end, err := m.PositionOffset(r.End) +- if err != nil { +- return 0, 0, err +- } +- if start > end { +- return 0, 0, fmt.Errorf("start (offset %d) > end (offset %d)", start, end) +- } +- return start, end, nil +-} +- +-// PositionOffset converts a protocol (UTF-16) position to a byte offset. +-func (m *Mapper) PositionOffset(p Position) (int, error) { +- m.initLines() +- +- // Validate line number. +- if p.Line > uint32(len(m.lineStart)) { +- return 0, fmt.Errorf("line number %d out of range 0-%d", p.Line, len(m.lineStart)) +- } else if p.Line == uint32(len(m.lineStart)) { +- if p.Character == 0 { +- return len(m.Content), nil // EOF +- } +- return 0, fmt.Errorf("column is beyond end of file") +- } +- +- offset := m.lineStart[p.Line] +- content := m.Content[offset:] // rest of file from start of enclosing line +- +- // Advance bytes up to the required number of UTF-16 codes. +- col8 := 0 +- for col16 := 0; col16 < int(p.Character); col16++ { +- r, sz := utf8.DecodeRune(content) +- if sz == 0 { +- return 0, fmt.Errorf("column is beyond end of file") +- } +- if r == '\n' { +- return 0, fmt.Errorf("column is beyond end of line") +- } +- if sz == 1 && r == utf8.RuneError { +- return 0, fmt.Errorf("buffer contains invalid UTF-8 text") +- } +- content = content[sz:] +- +- if r >= 0x10000 { +- col16++ // rune was encoded by a pair of surrogate UTF-16 codes +- +- if col16 == int(p.Character) { +- break // requested position is in the middle of a rune +- } +- } +- col8 += sz +- } +- return offset + col8, nil +-} +- +-// -- go/token domain convenience methods -- +- +-// PosPosition converts a token pos to a protocol (UTF-16) position. +-func (m *Mapper) PosPosition(tf *token.File, pos token.Pos) (Position, error) { +- offset, err := safetoken.Offset(tf, pos) +- if err != nil { +- return Position{}, err +- } +- return m.OffsetPosition(offset) +-} +- +-// PosLocation converts a token range to a protocol (UTF-16) location. +-func (m *Mapper) PosLocation(tf *token.File, start, end token.Pos) (Location, error) { +- startOffset, endOffset, err := safetoken.Offsets(tf, start, end) +- if err != nil { +- return Location{}, err +- } +- rng, err := m.OffsetRange(startOffset, endOffset) +- if err != nil { +- return Location{}, err +- } +- return m.URI.Location(rng), nil +-} +- +-// PosRange converts a token range to a protocol (UTF-16) range. +-func (m *Mapper) PosRange(tf *token.File, start, end token.Pos) (Range, error) { +- startOffset, endOffset, err := safetoken.Offsets(tf, start, end) +- if err != nil { +- return Range{}, err +- } +- return m.OffsetRange(startOffset, endOffset) +-} +- +-// PosText returns the source text for the token range. +-func (m *Mapper) PosText(tf *token.File, start, end token.Pos) ([]byte, error) { +- startOffset, endOffset, err := safetoken.Offsets(tf, start, end) +- if err != nil { +- return nil, err +- } +- return m.Content[startOffset:endOffset], nil +-} +- +-// NodeRange converts a syntax node range to a protocol (UTF-16) range. +-func (m *Mapper) NodeRange(tf *token.File, node ast.Node) (Range, error) { +- return m.PosRange(tf, node.Pos(), node.End()) +-} +- +-// NodeText returns the source text for syntax node range. +-func (m *Mapper) NodeText(tf *token.File, node ast.Node) ([]byte, error) { +- return m.PosText(tf, node.Pos(), node.End()) +-} +- +-// LocationTextDocumentPositionParams converts its argument to its result. +-func LocationTextDocumentPositionParams(loc Location) TextDocumentPositionParams { +- return TextDocumentPositionParams{ +- TextDocument: TextDocumentIdentifier{URI: loc.URI}, +- Position: loc.Range.Start, +- } +-} +diff -urN a/gopls/internal/protocol/mapper_test.go b/gopls/internal/protocol/mapper_test.go +--- a/gopls/internal/protocol/mapper_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/mapper_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,449 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol_test +- +-import ( +- "fmt" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// This file tests Mapper's logic for converting between offsets, +-// UTF-8 columns, and UTF-16 columns. (The strange form attests to +-// earlier abstractions.) +- +-// 𐐀 is U+10400 = [F0 90 90 80] in UTF-8, [D801 DC00] in UTF-16. +-var funnyString = []byte("𐐀23\n𐐀45") +- +-var toUTF16Tests = []struct { +- scenario string +- input []byte +- line int // 1-indexed count +- col int // 1-indexed byte position in line +- offset int // 0-indexed byte offset into input +- resUTF16col int // 1-indexed UTF-16 col number +- pre string // everything before the cursor on the line +- post string // everything from the cursor onwards +- err string // expected error string in call to ToUTF16Column +- issue *bool +-}{ +- { +- scenario: "cursor missing content", +- input: nil, +- offset: -1, +- err: "point has neither offset nor line/column", +- }, +- { +- scenario: "cursor missing position", +- input: funnyString, +- line: -1, +- col: -1, +- offset: -1, +- err: "point has neither offset nor line/column", +- }, +- { +- scenario: "zero length input; cursor at first col, first line", +- input: []byte(""), +- line: 1, +- col: 1, +- offset: 0, +- resUTF16col: 1, +- }, +- { +- scenario: "cursor before funny character; first line", +- input: funnyString, +- line: 1, +- col: 1, +- offset: 0, +- resUTF16col: 1, +- pre: "", +- post: "𐐀23", +- }, +- { +- scenario: "cursor after funny character; first line", +- input: funnyString, +- line: 1, +- col: 5, // 4 + 1 (1-indexed) +- offset: 4, // (unused since we have line+col) +- resUTF16col: 3, // 2 + 1 (1-indexed) +- pre: "𐐀", +- post: "23", +- }, +- { +- scenario: "cursor after last character on first line", +- input: funnyString, +- line: 1, +- col: 7, // 4 + 1 + 1 + 1 (1-indexed) +- offset: 6, // 4 + 1 + 1 (unused since we have line+col) +- resUTF16col: 5, // 2 + 1 + 1 + 1 (1-indexed) +- pre: "𐐀23", +- post: "", +- }, +- { +- scenario: "cursor before funny character; second line", +- input: funnyString, +- line: 2, +- col: 1, +- offset: 7, // length of first line (unused since we have line+col) +- resUTF16col: 1, +- pre: "", +- post: "𐐀45", +- }, +- { +- scenario: "cursor after funny character; second line", +- input: funnyString, +- line: 1, +- col: 5, // 4 + 1 (1-indexed) +- offset: 11, // 7 (length of first line) + 4 (unused since we have line+col) +- resUTF16col: 3, // 2 + 1 (1-indexed) +- pre: "𐐀", +- post: "45", +- }, +- { +- scenario: "cursor after last character on second line", +- input: funnyString, +- line: 2, +- col: 7, // 4 + 1 + 1 + 1 (1-indexed) +- offset: 13, // 7 (length of first line) + 4 + 1 + 1 (unused since we have line+col) +- resUTF16col: 5, // 2 + 1 + 1 + 1 (1-indexed) +- pre: "𐐀45", +- post: "", +- }, +- { +- scenario: "cursor beyond end of file", +- input: funnyString, +- line: 2, +- col: 8, // 4 + 1 + 1 + 1 + 1 (1-indexed) +- offset: 14, // 4 + 1 + 1 + 1 (unused since we have line+col) +- err: "column is beyond end of file", +- }, +-} +- +-var fromUTF16Tests = []struct { +- scenario string +- input []byte +- line int // 1-indexed line number (isn't actually used) +- utf16col int // 1-indexed UTF-16 col number +- resCol int // 1-indexed byte position in line +- resOffset int // 0-indexed byte offset into input +- pre string // everything before the cursor on the line +- post string // everything from the cursor onwards +- err string // expected error string in call to ToUTF16Column +-}{ +- { +- scenario: "zero length input; cursor at first col, first line", +- input: []byte(""), +- line: 1, +- utf16col: 1, +- resCol: 1, +- resOffset: 0, +- pre: "", +- post: "", +- }, +- { +- scenario: "cursor before funny character", +- input: funnyString, +- line: 1, +- utf16col: 1, +- resCol: 1, +- resOffset: 0, +- pre: "", +- post: "𐐀23", +- }, +- { +- scenario: "cursor after funny character", +- input: funnyString, +- line: 1, +- utf16col: 3, +- resCol: 5, +- resOffset: 4, +- pre: "𐐀", +- post: "23", +- }, +- { +- scenario: "cursor after last character on line", +- input: funnyString, +- line: 1, +- utf16col: 5, +- resCol: 7, +- resOffset: 6, +- pre: "𐐀23", +- post: "", +- }, +- { +- scenario: "cursor beyond last character on line", +- input: funnyString, +- line: 1, +- utf16col: 6, +- resCol: 7, +- resOffset: 6, +- pre: "𐐀23", +- post: "", +- err: "column is beyond end of line", +- }, +- { +- scenario: "cursor before funny character; second line", +- input: funnyString, +- line: 2, +- utf16col: 1, +- resCol: 1, +- resOffset: 7, +- pre: "", +- post: "𐐀45", +- }, +- { +- scenario: "cursor after funny character; second line", +- input: funnyString, +- line: 2, +- utf16col: 3, // 2 + 1 (1-indexed) +- resCol: 5, // 4 + 1 (1-indexed) +- resOffset: 11, // 7 (length of first line) + 4 +- pre: "𐐀", +- post: "45", +- }, +- { +- scenario: "cursor after last character on second line", +- input: funnyString, +- line: 2, +- utf16col: 5, // 2 + 1 + 1 + 1 (1-indexed) +- resCol: 7, // 4 + 1 + 1 + 1 (1-indexed) +- resOffset: 13, // 7 (length of first line) + 4 + 1 + 1 +- pre: "𐐀45", +- post: "", +- }, +- { +- scenario: "cursor beyond end of file", +- input: funnyString, +- line: 2, +- utf16col: 6, // 2 + 1 + 1 + 1 + 1(1-indexed) +- resCol: 8, // 4 + 1 + 1 + 1 + 1 (1-indexed) +- resOffset: 14, // 7 (length of first line) + 4 + 1 + 1 + 1 +- err: "column is beyond end of file", +- }, +-} +- +-func TestToUTF16(t *testing.T) { +- for _, e := range toUTF16Tests { +- t.Run(e.scenario, func(t *testing.T) { +- if e.issue != nil && !*e.issue { +- t.Skip("expected to fail") +- } +- m := protocol.NewMapper("", e.input) +- var pos protocol.Position +- var err error +- if e.line > 0 { +- pos, err = m.LineCol8Position(e.line, e.col) +- } else if e.offset >= 0 { +- pos, err = m.OffsetPosition(e.offset) +- } else { +- err = fmt.Errorf("point has neither offset nor line/column") +- } +- if err != nil { +- if err.Error() != e.err { +- t.Fatalf("expected error %v; got %v", e.err, err) +- } +- return +- } +- if e.err != "" { +- t.Fatalf("unexpected success; wanted %v", e.err) +- } +- got := int(pos.Character) + 1 +- if got != e.resUTF16col { +- t.Fatalf("expected result %v; got %v", e.resUTF16col, got) +- } +- pre, post := getPrePost(e.input, e.offset) +- if pre != e.pre { +- t.Fatalf("expected #%d pre %q; got %q", e.offset, e.pre, pre) +- } +- if post != e.post { +- t.Fatalf("expected #%d, post %q; got %q", e.offset, e.post, post) +- } +- }) +- } +-} +- +-func TestFromUTF16(t *testing.T) { +- for _, e := range fromUTF16Tests { +- t.Run(e.scenario, func(t *testing.T) { +- m := protocol.NewMapper("", e.input) +- offset, err := m.PositionOffset(protocol.Position{ +- Line: uint32(e.line - 1), +- Character: uint32(e.utf16col - 1), +- }) +- if err != nil { +- if err.Error() != e.err { +- t.Fatalf("expected error %v; got %v", e.err, err) +- } +- return +- } +- if e.err != "" { +- t.Fatalf("unexpected success; wanted %v", e.err) +- } +- if offset != e.resOffset { +- t.Fatalf("expected offset %v; got %v", e.resOffset, offset) +- } +- line, col8 := m.OffsetLineCol8(offset) +- if line != e.line { +- t.Fatalf("expected resulting line %v; got %v", e.line, line) +- } +- if col8 != e.resCol { +- t.Fatalf("expected resulting col %v; got %v", e.resCol, col8) +- } +- pre, post := getPrePost(e.input, offset) +- if pre != e.pre { +- t.Fatalf("expected #%d pre %q; got %q", offset, e.pre, pre) +- } +- if post != e.post { +- t.Fatalf("expected #%d post %q; got %q", offset, e.post, post) +- } +- }) +- } +-} +- +-func getPrePost(content []byte, offset int) (string, string) { +- pre, post := string(content)[:offset], string(content)[offset:] +- if i := strings.LastIndex(pre, "\n"); i >= 0 { +- pre = pre[i+1:] +- } +- if i := strings.IndexRune(post, '\n'); i >= 0 { +- post = post[:i] +- } +- return pre, post +-} +- +-// -- these are the historical lsppos tests -- +- +-type testCase struct { +- content string // input text +- substrOrOffset any // explicit integer offset, or a substring +- wantLine, wantChar int // expected LSP position information +-} +- +-// offset returns the test case byte offset +-func (c testCase) offset() int { +- switch x := c.substrOrOffset.(type) { +- case int: +- return x +- case string: +- i := strings.Index(c.content, x) +- if i < 0 { +- panic(fmt.Sprintf("%q does not contain substring %q", c.content, x)) +- } +- return i +- } +- panic("substrOrIndex must be an integer or string") +-} +- +-var tests = []testCase{ +- {"a𐐀b", "a", 0, 0}, +- {"a𐐀b", "𐐀", 0, 1}, +- {"a𐐀b", "b", 0, 3}, +- {"a𐐀b\n", "\n", 0, 4}, +- {"a𐐀b\r\n", "\n", 0, 4}, // \r|\n is not a valid position, so we move back to the end of the first line. +- {"a𐐀b\r\nx", "x", 1, 0}, +- {"a𐐀b\r\nx\ny", "y", 2, 0}, +- +- // Testing EOL and EOF positions +- {"", 0, 0, 0}, // 0th position of an empty buffer is (0, 0) +- {"abc", "c", 0, 2}, +- {"abc", 3, 0, 3}, +- {"abc\n", "\n", 0, 3}, +- {"abc\n", 4, 1, 0}, // position after a newline is on the next line +-} +- +-func TestLineChar(t *testing.T) { +- for _, test := range tests { +- m := protocol.NewMapper("", []byte(test.content)) +- offset := test.offset() +- posn, _ := m.OffsetPosition(offset) +- gotLine, gotChar := int(posn.Line), int(posn.Character) +- if gotLine != test.wantLine || gotChar != test.wantChar { +- t.Errorf("LineChar(%d) = (%d,%d), want (%d,%d)", offset, gotLine, gotChar, test.wantLine, test.wantChar) +- } +- } +-} +- +-func TestInvalidOffset(t *testing.T) { +- content := []byte("a𐐀b\r\nx\ny") +- m := protocol.NewMapper("", content) +- for _, offset := range []int{-1, 100} { +- posn, err := m.OffsetPosition(offset) +- if err == nil { +- t.Errorf("OffsetPosition(%d) = %s, want error", offset, posn) +- } +- } +-} +- +-func TestPosition(t *testing.T) { +- for _, test := range tests { +- m := protocol.NewMapper("", []byte(test.content)) +- offset := test.offset() +- got, err := m.OffsetPosition(offset) +- if err != nil { +- t.Errorf("OffsetPosition(%d) failed: %v", offset, err) +- continue +- } +- want := protocol.Position{Line: uint32(test.wantLine), Character: uint32(test.wantChar)} +- if got != want { +- t.Errorf("Position(%d) = %v, want %v", offset, got, want) +- } +- } +-} +- +-func TestRange(t *testing.T) { +- for _, test := range tests { +- m := protocol.NewMapper("", []byte(test.content)) +- offset := test.offset() +- got, err := m.OffsetRange(0, offset) +- if err != nil { +- t.Fatal(err) +- } +- want := protocol.Range{ +- End: protocol.Position{Line: uint32(test.wantLine), Character: uint32(test.wantChar)}, +- } +- if got != want { +- t.Errorf("Range(%d) = %v, want %v", offset, got, want) +- } +- } +-} +- +-func TestBytesOffset(t *testing.T) { +- tests := []struct { +- text string +- pos protocol.Position +- want int +- }{ +- // U+10400 encodes as [F0 90 90 80] in UTF-8 and [D801 DC00] in UTF-16. +- {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 0}, want: 0}, +- {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 1}, want: 1}, +- {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 2}, want: 1}, +- {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 3}, want: 5}, +- {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 4}, want: 6}, +- {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 5}, want: -1}, +- {text: "aaa\nbbb\n", pos: protocol.Position{Line: 0, Character: 3}, want: 3}, +- {text: "aaa\nbbb\n", pos: protocol.Position{Line: 0, Character: 4}, want: -1}, +- {text: "aaa\nbbb\n", pos: protocol.Position{Line: 1, Character: 0}, want: 4}, +- {text: "aaa\nbbb\n", pos: protocol.Position{Line: 1, Character: 3}, want: 7}, +- {text: "aaa\nbbb\n", pos: protocol.Position{Line: 1, Character: 4}, want: -1}, +- {text: "aaa\nbbb\n", pos: protocol.Position{Line: 2, Character: 0}, want: 8}, +- {text: "aaa\nbbb\n", pos: protocol.Position{Line: 2, Character: 1}, want: -1}, +- {text: "aaa\nbbb\n\n", pos: protocol.Position{Line: 2, Character: 0}, want: 8}, +- } +- +- for i, test := range tests { +- fname := fmt.Sprintf("test %d", i) +- uri := protocol.URIFromPath(fname) +- mapper := protocol.NewMapper(uri, []byte(test.text)) +- got, err := mapper.PositionOffset(test.pos) +- if err != nil && test.want != -1 { +- t.Errorf("%d: unexpected error: %v", i, err) +- } +- if err == nil && got != test.want { +- t.Errorf("want %d for %q(Line:%d,Character:%d), but got %d", test.want, test.text, int(test.pos.Line), int(test.pos.Character), got) +- } +- } +-} +diff -urN a/gopls/internal/protocol/protocol.go b/gopls/internal/protocol/protocol.go +--- a/gopls/internal/protocol/protocol.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/protocol.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,297 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol +- +-import ( +- "bytes" +- "context" +- "encoding/json" +- "fmt" +- "io" +- +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/jsonrpc2" +- jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" +- "golang.org/x/tools/internal/xcontext" +-) +- +-var ( +- // RequestCancelledError should be used when a request is cancelled early. +- RequestCancelledError = jsonrpc2.NewError(-32800, "JSON RPC cancelled") +- RequestCancelledErrorV2 = jsonrpc2_v2.NewError(-32800, "JSON RPC cancelled") +-) +- +-type ClientCloser interface { +- Client +- io.Closer +-} +- +-type connSender interface { +- io.Closer +- +- Notify(ctx context.Context, method string, params any) error +- Call(ctx context.Context, method string, params, result any) error +-} +- +-type clientDispatcher struct { +- sender connSender +-} +- +-func (c *clientDispatcher) Close() error { +- return c.sender.Close() +-} +- +-// ClientDispatcher returns a Client that dispatches LSP requests across the +-// given jsonrpc2 connection. +-func ClientDispatcher(conn jsonrpc2.Conn) ClientCloser { +- return &clientDispatcher{sender: clientConn{conn}} +-} +- +-type clientConn struct { +- conn jsonrpc2.Conn +-} +- +-func (c clientConn) Close() error { +- return c.conn.Close() +-} +- +-func (c clientConn) Notify(ctx context.Context, method string, params any) error { +- return c.conn.Notify(ctx, method, params) +-} +- +-func (c clientConn) Call(ctx context.Context, method string, params any, result any) error { +- id, err := c.conn.Call(ctx, method, params, result) +- if ctx.Err() != nil { +- cancelCall(ctx, c, id) +- } +- return err +-} +- +-func ClientDispatcherV2(conn *jsonrpc2_v2.Connection) ClientCloser { +- return &clientDispatcher{clientConnV2{conn}} +-} +- +-type clientConnV2 struct { +- conn *jsonrpc2_v2.Connection +-} +- +-func (c clientConnV2) Close() error { +- return c.conn.Close() +-} +- +-func (c clientConnV2) Notify(ctx context.Context, method string, params any) error { +- return c.conn.Notify(ctx, method, params) +-} +- +-func (c clientConnV2) Call(ctx context.Context, method string, params any, result any) error { +- call := c.conn.Call(ctx, method, params) +- err := call.Await(ctx, result) +- if ctx.Err() != nil { +- detached := xcontext.Detach(ctx) +- c.conn.Notify(detached, "$/cancelRequest", &CancelParams{ID: call.ID().Raw()}) +- } +- return err +-} +- +-// ServerDispatcher returns a Server that dispatches LSP requests across the +-// given jsonrpc2 connection. +-func ServerDispatcher(conn jsonrpc2.Conn) Server { +- return &serverDispatcher{sender: clientConn{conn}} +-} +- +-func ServerDispatcherV2(conn *jsonrpc2_v2.Connection) Server { +- return &serverDispatcher{sender: clientConnV2{conn}} +-} +- +-type serverDispatcher struct { +- sender connSender +-} +- +-func ClientHandler(client Client, handler jsonrpc2.Handler) jsonrpc2.Handler { +- return func(ctx context.Context, reply jsonrpc2.Replier, req jsonrpc2.Request) error { +- if ctx.Err() != nil { +- ctx := xcontext.Detach(ctx) +- return reply(ctx, nil, RequestCancelledError) +- } +- handled, err := clientDispatch(ctx, client, reply, req) +- if handled || err != nil { +- return err +- } +- return handler(ctx, reply, req) +- } +-} +- +-func ClientHandlerV2(client Client) jsonrpc2_v2.Handler { +- return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { +- if ctx.Err() != nil { +- return nil, RequestCancelledErrorV2 +- } +- req1 := req2to1(req) +- var ( +- result any +- resErr error +- ) +- replier := func(_ context.Context, res any, err error) error { +- if err != nil { +- resErr = err +- return nil +- } +- result = res +- return nil +- } +- _, err := clientDispatch(ctx, client, replier, req1) +- if err != nil { +- return nil, err +- } +- return result, resErr +- }) +-} +- +-func ServerHandler(server Server, handler jsonrpc2.Handler) jsonrpc2.Handler { +- return func(ctx context.Context, reply jsonrpc2.Replier, req jsonrpc2.Request) error { +- if ctx.Err() != nil { +- ctx := xcontext.Detach(ctx) +- return reply(ctx, nil, RequestCancelledError) +- } +- handled, err := serverDispatch(ctx, server, reply, req) +- if handled || err != nil { +- return err +- } +- return handler(ctx, reply, req) +- } +-} +- +-func ServerHandlerV2(server Server) jsonrpc2_v2.Handler { +- return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { +- if ctx.Err() != nil { +- return nil, RequestCancelledErrorV2 +- } +- req1 := req2to1(req) +- var ( +- result any +- resErr error +- ) +- replier := func(_ context.Context, res any, err error) error { +- if err != nil { +- resErr = err +- return nil +- } +- result = res +- return nil +- } +- _, err := serverDispatch(ctx, server, replier, req1) +- if err != nil { +- return nil, err +- } +- return result, resErr +- }) +-} +- +-func req2to1(req2 *jsonrpc2_v2.Request) jsonrpc2.Request { +- if req2.ID.IsValid() { +- raw := req2.ID.Raw() +- var idv1 jsonrpc2.ID +- switch v := raw.(type) { +- case int64: +- idv1 = jsonrpc2.NewIntID(v) +- case string: +- idv1 = jsonrpc2.NewStringID(v) +- default: +- panic(fmt.Sprintf("unsupported ID type %T", raw)) +- } +- req1, err := jsonrpc2.NewCall(idv1, req2.Method, req2.Params) +- if err != nil { +- panic(err) +- } +- return req1 +- } +- req1, err := jsonrpc2.NewNotification(req2.Method, req2.Params) +- if err != nil { +- panic(err) +- } +- return req1 +-} +- +-func Handlers(handler jsonrpc2.Handler) jsonrpc2.Handler { +- return CancelHandler( +- jsonrpc2.AsyncHandler( +- jsonrpc2.MustReplyHandler(handler))) +-} +- +-func CancelHandler(handler jsonrpc2.Handler) jsonrpc2.Handler { +- handler, canceller := jsonrpc2.CancelHandler(handler) +- return func(ctx context.Context, reply jsonrpc2.Replier, req jsonrpc2.Request) error { +- if req.Method() != "$/cancelRequest" { +- // TODO(iancottrell): See if we can generate a reply for the request to be cancelled +- // at the point of cancellation rather than waiting for gopls to naturally reply. +- // To do that, we need to keep track of whether a reply has been sent already and +- // be careful about racing between the two paths. +- // TODO(iancottrell): Add a test that watches the stream and verifies the response +- // for the cancelled request flows. +- replyWithDetachedContext := func(ctx context.Context, resp any, err error) error { +- // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#cancelRequest +- if ctx.Err() != nil && err == nil { +- err = RequestCancelledError +- } +- ctx = xcontext.Detach(ctx) +- return reply(ctx, resp, err) +- } +- return handler(ctx, replyWithDetachedContext, req) +- } +- var params CancelParams +- if err := UnmarshalJSON(req.Params(), ¶ms); err != nil { +- return sendParseError(ctx, reply, err) +- } +- if n, ok := params.ID.(float64); ok { +- canceller(jsonrpc2.NewIntID(int64(n))) +- } else if s, ok := params.ID.(string); ok { +- canceller(jsonrpc2.NewStringID(s)) +- } else { +- return sendParseError(ctx, reply, fmt.Errorf("request ID %v malformed", params.ID)) +- } +- return reply(ctx, nil, nil) +- } +-} +- +-func Call(ctx context.Context, conn jsonrpc2.Conn, method string, params any, result any) error { +- id, err := conn.Call(ctx, method, params, result) +- if ctx.Err() != nil { +- cancelCall(ctx, clientConn{conn}, id) +- } +- return err +-} +- +-func cancelCall(ctx context.Context, sender connSender, id jsonrpc2.ID) { +- ctx = xcontext.Detach(ctx) +- ctx, done := event.Start(ctx, "protocol.canceller") +- defer done() +- // Note that only *jsonrpc2.ID implements json.Marshaler. +- sender.Notify(ctx, "$/cancelRequest", &CancelParams{ID: &id}) +-} +- +-// UnmarshalJSON unmarshals msg into the variable pointed to by +-// params. In JSONRPC, optional messages may be +-// "null", in which case it is a no-op. +-func UnmarshalJSON(msg json.RawMessage, v any) error { +- if len(msg) == 0 || bytes.Equal(msg, []byte("null")) { +- return nil +- } +- return json.Unmarshal(msg, v) +-} +- +-func sendParseError(ctx context.Context, reply jsonrpc2.Replier, err error) error { +- return reply(ctx, nil, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err)) +-} +- +-// NonNilSlice returns x, or an empty slice if x was nil. +-// +-// (Many slice fields of protocol structs must be non-nil +-// to avoid being encoded as JSON "null".) +-func NonNilSlice[T comparable](x []T) []T { +- if x == nil { +- return []T{} +- } +- return x +-} +diff -urN a/gopls/internal/protocol/semtok/README.txt b/gopls/internal/protocol/semtok/README.txt +--- a/gopls/internal/protocol/semtok/README.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/semtok/README.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,121 +0,0 @@ +- +-The [LSP](https://microsoft.github.io/language-server-protocol/specifications/specification-3-17/#textDocument_semanticTokens) +-specifies semantic tokens as a way of telling clients about language-specific +-properties of pieces of code in a file being edited. +- +-The client asks for a set of semantic tokens and modifiers. This note describe which ones +-gopls will return, and under what circumstances. Gopls has no control over how the client +-converts semantic tokens into colors (or some other visible indication). In vscode it +-is possible to modify the color a theme uses by setting the `editor.semanticTokenColorCustomizations` +-object. We provide a little [guidance](#Colors) later. +- +-There are 22 semantic tokens, with 10 possible modifiers. The protocol allows each semantic +-token to be used with any of the 1024 subsets of possible modifiers, but most combinations +-don't make intuitive sense (although `async documentation` has a certain appeal). +- +-The 22 semantic tokens are `namespace`, `type`, `class`, `enum`, `interface`, +- `struct`, `typeParameter`, `parameter`, `variable`, `property`, `enumMember`, +- `event`, `function`, `method`, `macro`, `keyword`, `modifier`, `comment`, +- `string`, `number`, `regexp`, `operator`. +- +-The 10 modifiers are `declaration`, `definition`, `readonly`, `static`, +- `deprecated`, `abstract`, `async`, `modification`, `documentation`, `defaultLibrary`. +- +-The authoritative lists are in the [specification](https://microsoft.github.io/language-server-protocol/specifications/specification-3-17/#semanticTokenTypes) +- +-For the implementation to work correctly the client and server have to agree on the ordering +-of the tokens and of the modifiers. Gopls, therefore, will only send tokens and modifiers +-that the client has asked for. This document says what gopls would send if the client +-asked for everything. By default, vscode asks for everything. +- +-Gopls sends 11 token types for `.go` files and 1 for `.*tmpl` files. +-Nothing is sent for any other kind of file. +-This all could change. (When Go has generics, gopls will return `typeParameter`.) +- +-For `.*tmpl` files gopls sends `macro`, and no modifiers, for each `{{`...`}}` scope. +- +-## Semantic tokens for Go files +- +-There are two contrasting guiding principles that might be used to decide what to mark +-with semantic tokens. All clients already do some kind of syntax marking. E.g., vscode +-uses a TextMate grammar. The minimal principle would send semantic tokens only for those +-language features that cannot be reliably found without parsing Go and looking at types. +-The maximal principle would attempt to convey as much as possible about the Go code, +-using all available parsing and type information. +- +-There is much to be said for returning minimal information, but the minimal principle is +-not well-specified. Gopls has no way of knowing what the clients know about the Go program +-being edited. Even in vscode the TextMate grammars can be more or less elaborate +-and change over time. (Nonetheless, a minimal implementation would not return `keyword`, +-`number`, `comment`, or `string`.) +- +-The maximal position isn't particularly well-specified either. To chose one example, a +-format string might have formatting codes (`%-[4].6f`), escape sequences (`\U00010604`), and regular +-characters. Should these all be distinguished? One could even imagine distinguishing +-different runes by their Unicode language assignment, or some other Unicode property, such as +-being [confusable](http://www.unicode.org/Public/security/10.0.0/confusables.txt). While gopls does not fully adhere to such distinctions, +-it does recognizes formatting directives within strings, decorating them with "format" modifiers, +-providing more precise semantic highlighting in format strings. +- +-Semantic tokens are returned for identifiers, keywords, operators, comments, and literals. +-(Semantic tokens do not cover the file. They are not returned for +-white space or punctuation, and there is no semantic token for labels.) +-The following describes more precisely what gopls +-does, with a few notes on possible alternative choices. +-The references to *object* refer to the +-```types.Object``` returned by the type checker. The references to *nodes* refer to the +-```ast.Node``` from the parser. +- +-1. __`keyword`__ All Go [keywords](https://golang.org/ref/spec#Keywords) are marked `keyword`. +-1. __`namespace`__ All package names are marked `namespace`. In an import, if there is an +-alias, it would be marked. Otherwise the last component of the import path is marked. +-1. __`type`__ Objects of type ```types.TypeName``` are marked `type`. It also reports +-a modifier for the top-level constructor of the object's type, one of: +-`interface`, `struct`, `signature`, `pointer`, `array`, `map`, `slice`, `chan`, `string`, `number`, `bool`, `invalid`. +-1. __`parameter`__ The formal arguments in ```ast.FuncDecl``` and ```ast.FuncType``` nodes are marked `parameter`. +-1. __`variable`__ Identifiers in the +-scope of ```const``` are modified with `readonly`. ```nil``` is usually a `variable` modified with both +-`readonly` and `defaultLibrary`. (```nil``` is a predefined identifier; the user can redefine it, +-in which case it would just be a variable, or whatever.) Identifiers of type ```types.Variable``` are, +-not surprisingly, marked `variable`. Identifiers being defined (node ```ast.GenDecl```) are modified +-by `definition` and, if appropriate, `readonly`. Receivers (in method declarations) are +-`variable`. +-1. __`method`__ Methods are marked at their definition (```func (x foo) bar() {}```) or declaration +-in an ```interface```. Methods are not marked where they are used. +-In ```x.bar()```, ```x``` will be marked +-either as a `namespace` if it is a package name, or as a `variable` if it is an interface value, +-so distinguishing ```bar``` seemed superfluous. +-1. __`function`__ Bultins (```types.Builtin```) are modified with `defaultLibrary` +-(e.g., ```make```, ```len```, ```copy```). Identifiers whose +-object is ```types.Func``` or whose node is ```ast.FuncDecl``` are `function`. +-1. __`comment`__ Comments and struct tags. (Perhaps struct tags should be `property`?) +-1. __`string`__ Strings. Could add modifiers for e.g., escapes or format codes. +-1. __`number`__ Numbers. Should the ```i``` in ```23i``` be handled specially? +-1. __`operator`__ Assignment operators, binary operators, ellipses (```...```), increment/decrement +-operators, sends (```<-```), and unary operators. +- +-Gopls will send the modifier `deprecated` if it finds a comment +-```// deprecated``` in the godoc. +- +-The unused tokens for Go code are `class`, `enum`, `interface`, +- `struct`, `typeParameter`, `property`, `enumMember`, +- `event`, `macro`, `modifier`, +- `regexp` +- +-## Colors +- +-These comments are about vscode. +- +-The documentation has a [helpful](https://code.visualstudio.com/api/language-extensions/semantic-highlight-guide#custom-textmate-scope-mappings) +-description of which semantic tokens correspond to scopes in TextMate grammars. Themes seem +-to use the TextMate scopes to decide on colors. +- +-Some examples of color customizations are [here](https://medium.com/@danromans/how-to-customize-semantic-token-colorization-with-visual-studio-code-ac3eab96141b). +- +-## Note +- +-While a file is being edited it may temporarily contain either +-parsing errors or type errors. In this case gopls cannot determine some (or maybe any) +-of the semantic tokens. To avoid weird flickering it is the responsibility +-of clients to maintain the semantic token information +-in the unedited part of the file, and they do. +diff -urN a/gopls/internal/protocol/semtok/semtok.go b/gopls/internal/protocol/semtok/semtok.go +--- a/gopls/internal/protocol/semtok/semtok.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/semtok/semtok.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,203 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The semtok package provides an encoder for LSP's semantic tokens. +-package semtok +- +-import "sort" +- +-// A Token provides the extent and semantics of a token. +-type Token struct { +- Line, Start uint32 // 0-based UTF-16 index +- Len uint32 // in UTF-16 codes +- Type Type +- Modifiers []Modifier +-} +- +-type Type string +- +-const ( +- // These are the tokens defined by LSP 3.18, but a client is +- // free to send its own set; any tokens that the server emits +- // that are not in this set are simply not encoded in the bitfield. +- TokComment Type = "comment" // for a comment +- TokFunction Type = "function" // for a function +- TokKeyword Type = "keyword" // for a keyword +- TokLabel Type = "label" // for a control label (LSP 3.18) +- TokMacro Type = "macro" // for text/template tokens +- TokMethod Type = "method" // for a method +- TokNamespace Type = "namespace" // for an imported package name +- TokNumber Type = "number" // for a numeric literal +- TokOperator Type = "operator" // for an operator +- TokParameter Type = "parameter" // for a parameter variable +- TokString Type = "string" // for a string literal +- TokType Type = "type" // for a type name (plus other uses) +- TokTypeParam Type = "typeParameter" // for a type parameter +- TokVariable Type = "variable" // for a var or const +- // The section below defines a subset of token types in standard token types +- // that gopls does not use. +- // +- // If you move types to above, document it in +- // gopls/doc/features/passive.md#semantic-tokens. +- // TokClass TokenType = "class" +- // TokDecorator TokenType = "decorator" +- // TokEnum TokenType = "enum" +- // TokEnumMember TokenType = "enumMember" +- // TokEvent TokenType = "event" +- // TokInterface TokenType = "interface" +- // TokModifier TokenType = "modifier" +- // TokProperty TokenType = "property" +- // TokRegexp TokenType = "regexp" +- // TokStruct TokenType = "struct" +-) +- +-// TokenTypes is a slice of types gopls will return as its server capabilities. +-var TokenTypes = []Type{ +- TokNamespace, +- TokType, +- TokTypeParam, +- TokParameter, +- TokVariable, +- TokFunction, +- TokMethod, +- TokMacro, +- TokKeyword, +- TokComment, +- TokString, +- TokNumber, +- TokOperator, +- TokLabel, +-} +- +-type Modifier string +- +-const ( +- // LSP 3.18 standard modifiers +- // As with TokenTypes, clients get only the modifiers they request. +- // +- // The section below defines a subset of modifiers in standard modifiers +- // that gopls understand. +- ModDefaultLibrary Modifier = "defaultLibrary" // for predeclared symbols +- ModDefinition Modifier = "definition" // for the declaring identifier of a symbol +- ModReadonly Modifier = "readonly" // for constants (TokVariable) +- // The section below defines the rest of the modifiers in standard modifiers +- // that gopls does not use. +- // +- // If you move modifiers to above, document it in +- // gopls/doc/features/passive.md#semantic-tokens. +- // ModAbstract Modifier = "abstract" +- // ModAsync Modifier = "async" +- // ModDeclaration Modifier = "declaration" +- // ModDeprecated Modifier = "deprecated" +- // ModDocumentation Modifier = "documentation" +- // ModModification Modifier = "modification" +- // ModStatic Modifier = "static" +- +- // non-standard modifiers +- // +- // Since the type of a symbol is orthogonal to its kind, +- // (e.g. a variable can have function type), +- // we use modifiers for the top-level type constructor. +- ModArray Modifier = "array" +- ModBool Modifier = "bool" +- ModChan Modifier = "chan" +- ModFormat Modifier = "format" // for format string directives such as "%s" +- ModInterface Modifier = "interface" +- ModMap Modifier = "map" +- ModNumber Modifier = "number" +- ModPointer Modifier = "pointer" +- ModSignature Modifier = "signature" // for function types +- ModSlice Modifier = "slice" +- ModString Modifier = "string" +- ModStruct Modifier = "struct" +-) +- +-// TokenModifiers is a slice of modifiers gopls will return as its server +-// capabilities. +-var TokenModifiers = []Modifier{ +- // LSP 3.18 standard modifiers. +- ModDefinition, +- ModReadonly, +- ModDefaultLibrary, +- // Additional custom modifiers. +- ModArray, +- ModBool, +- ModChan, +- ModFormat, +- ModInterface, +- ModMap, +- ModNumber, +- ModPointer, +- ModSignature, +- ModSlice, +- ModString, +- ModStruct, +-} +- +-// Encode returns the LSP encoding of a sequence of tokens. +-// encodeType and encodeModifier maps control which types and modifiers are +-// excluded in the response. If a type or modifier maps to false, it will be +-// omitted from the output. +-func Encode( +- tokens []Token, +- encodeType map[Type]bool, +- encodeModifier map[Modifier]bool) []uint32 { +- +- // binary operators, at least, will be out of order +- sort.Slice(tokens, func(i, j int) bool { +- if tokens[i].Line != tokens[j].Line { +- return tokens[i].Line < tokens[j].Line +- } +- return tokens[i].Start < tokens[j].Start +- }) +- +- typeMap := make(map[Type]int) +- for i, t := range TokenTypes { +- if enable, ok := encodeType[t]; ok && !enable { +- continue +- } +- typeMap[Type(t)] = i +- } +- +- modMap := make(map[Modifier]int) +- for i, m := range TokenModifiers { +- if enable, ok := encodeModifier[m]; ok && !enable { +- continue +- } +- modMap[Modifier(m)] = 1 << i +- } +- +- // each semantic token needs five values but some tokens might be skipped. +- // (see Integer Encoding for Tokens in the LSP spec) +- x := make([]uint32, 5*len(tokens)) +- var j int +- var last Token +- for i := range tokens { +- item := tokens[i] +- typ, ok := typeMap[item.Type] +- if !ok { +- continue // client doesn't want semantic token info. +- } +- if j == 0 { +- x[0] = tokens[0].Line +- } else { +- x[j] = item.Line - last.Line +- } +- x[j+1] = item.Start +- if j > 0 && x[j] == 0 { +- x[j+1] = item.Start - last.Start +- } +- x[j+2] = item.Len +- x[j+3] = uint32(typ) +- mask := 0 +- for _, s := range item.Modifiers { +- // modMap[s] is 0 if the client doesn't want this modifier +- mask |= modMap[s] +- } +- x[j+4] = uint32(mask) +- j += 5 +- last = item +- } +- return x[:j] +-} +diff -urN a/gopls/internal/protocol/span.go b/gopls/internal/protocol/span.go +--- a/gopls/internal/protocol/span.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/span.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,131 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol +- +-import ( +- "fmt" +- "unicode/utf8" +-) +- +-// Empty reports whether the Range is an empty selection. +-func (rng Range) Empty() bool { return rng.Start == rng.End } +- +-// Empty reports whether the Location is an empty selection. +-func (loc Location) Empty() bool { return loc.Range.Empty() } +- +-// CompareLocation defines a three-valued comparison over locations, +-// lexicographically ordered by (URI, Range). +-func CompareLocation(x, y Location) int { +- if x.URI != y.URI { +- if x.URI < y.URI { +- return -1 +- } else { +- return +1 +- } +- } +- return CompareRange(x.Range, y.Range) +-} +- +-// CompareRange returns -1 if a is before b, 0 if a == b, and 1 if a is after b. +-// +-// A range a is defined to be 'before' b if a.Start is before b.Start, or +-// a.Start == b.Start and a.End is before b.End. +-func CompareRange(a, b Range) int { +- if r := ComparePosition(a.Start, b.Start); r != 0 { +- return r +- } +- return ComparePosition(a.End, b.End) +-} +- +-// ComparePosition returns -1 if a is before b, 0 if a == b, and 1 if a is after b. +-func ComparePosition(a, b Position) int { +- if a.Line != b.Line { +- if a.Line < b.Line { +- return -1 +- } else { +- return +1 +- } +- } +- if a.Character != b.Character { +- if a.Character < b.Character { +- return -1 +- } else { +- return +1 +- } +- } +- return 0 +-} +- +-// Intersect reports whether x and y intersect. +-// +-// Two non-empty half-open integer intervals intersect iff: +-// +-// y.start < x.end && x.start < y.end +-// +-// Mathematical conventional views an interval as a set of integers. +-// An empty interval is the empty set, so its intersection with any +-// other interval is empty, and thus an empty interval does not +-// intersect any other interval. +-// +-// However, this function uses a looser definition appropriate for +-// text selections: if either x or y is empty, it uses <= operators +-// instead, so an empty range within or abutting a non-empty range is +-// considered to overlap it, and an empty range overlaps itself. +-// +-// This handles the common case in which there is no selection, but +-// the cursor is at the start or end of an expression and the caller +-// wants to know whether the cursor intersects the range of the +-// expression. The answer in this case should be yes, even though the +-// selection is empty. Similarly the answer should also be yes if the +-// cursor is properly within the range of the expression. But a +-// non-empty selection abutting the expression should not be +-// considered to intersect it. +-func Intersect(x, y Range) bool { +- r1 := ComparePosition(x.Start, y.End) +- r2 := ComparePosition(y.Start, x.End) +- if r1 < 0 && r2 < 0 { +- return true // mathematical intersection +- } +- return (x.Empty() || y.Empty()) && r1 <= 0 && r2 <= 0 +-} +- +-// Format implements fmt.Formatter. +-// +-// Note: Formatter is implemented instead of Stringer (presumably) for +-// performance reasons, though it is not clear that it matters in practice. +-func (r Range) Format(f fmt.State, _ rune) { +- fmt.Fprintf(f, "%v-%v", r.Start, r.End) +-} +- +-// Format implements fmt.Formatter. +-// +-// See Range.Format for discussion of why the Formatter interface is +-// implemented rather than Stringer. +-func (p Position) Format(f fmt.State, _ rune) { +- fmt.Fprintf(f, "%v:%v", p.Line, p.Character) +-} +- +-// -- implementation helpers -- +- +-// UTF16Len returns the number of codes in the UTF-16 transcoding of s. +-func UTF16Len(s []byte) int { +- var n int +- for len(s) > 0 { +- n++ +- +- // Fast path for ASCII. +- if s[0] < 0x80 { +- s = s[1:] +- continue +- } +- +- r, size := utf8.DecodeRune(s) +- if r >= 0x10000 { +- n++ // surrogate pair +- } +- s = s[size:] +- } +- return n +-} +diff -urN a/gopls/internal/protocol/tsclient.go b/gopls/internal/protocol/tsclient.go +--- a/gopls/internal/protocol/tsclient.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/tsclient.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,323 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated for LSP. DO NOT EDIT. +- +-package protocol +- +-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.14 (hash 66a087310eea0d60495ba3578d78f70409c403d9). +-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.14/protocol/metaModel.json +-// LSP metaData.version = 3.17.0. +- +-import ( +- "context" +- "encoding/json" +- "fmt" +- +- "golang.org/x/tools/internal/jsonrpc2" +-) +- +-type Client interface { +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#logTrace +- LogTrace(context.Context, *LogTraceParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#progress +- Progress(context.Context, *ProgressParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#client_registerCapability +- RegisterCapability(context.Context, *RegistrationParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#client_unregisterCapability +- UnregisterCapability(context.Context, *UnregistrationParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#telemetry_event +- Event(context.Context, *any) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_publishDiagnostics +- PublishDiagnostics(context.Context, *PublishDiagnosticsParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#window_logMessage +- LogMessage(context.Context, *LogMessageParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#window_showDocument +- ShowDocument(context.Context, *ShowDocumentParams) (*ShowDocumentResult, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#window_showMessage +- ShowMessage(context.Context, *ShowMessageParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#window_showMessageRequest +- ShowMessageRequest(context.Context, *ShowMessageRequestParams) (*MessageActionItem, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#window_workDoneProgress_create +- WorkDoneProgressCreate(context.Context, *WorkDoneProgressCreateParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_applyEdit +- ApplyEdit(context.Context, *ApplyWorkspaceEditParams) (*ApplyWorkspaceEditResult, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_codeLens_refresh +- CodeLensRefresh(context.Context) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_configuration +- Configuration(context.Context, *ParamConfiguration) ([]LSPAny, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_diagnostic_refresh +- DiagnosticRefresh(context.Context) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_foldingRange_refresh +- FoldingRangeRefresh(context.Context) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_inlayHint_refresh +- InlayHintRefresh(context.Context) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_inlineValue_refresh +- InlineValueRefresh(context.Context) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_semanticTokens_refresh +- SemanticTokensRefresh(context.Context) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_textDocumentContent_refresh +- TextDocumentContentRefresh(context.Context, *TextDocumentContentRefreshParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_workspaceFolders +- WorkspaceFolders(context.Context) ([]WorkspaceFolder, error) +-} +- +-func clientDispatch(ctx context.Context, client Client, reply jsonrpc2.Replier, r jsonrpc2.Request) (bool, error) { +- resp, valid, err := ClientDispatchCall(ctx, client, r.Method(), r.Params()) +- if !valid { +- return false, nil +- } +- +- if err != nil { +- return valid, reply(ctx, nil, err) +- } else { +- return valid, reply(ctx, resp, nil) +- } +-} +- +-func ClientDispatchCall(ctx context.Context, client Client, method string, raw json.RawMessage) (resp any, _ bool, err error) { +- switch method { +- case "$/logTrace": +- var params LogTraceParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := client.LogTrace(ctx, ¶ms) +- return nil, true, err +- +- case "$/progress": +- var params ProgressParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := client.Progress(ctx, ¶ms) +- return nil, true, err +- +- case "client/registerCapability": +- var params RegistrationParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := client.RegisterCapability(ctx, ¶ms) +- return nil, true, err +- +- case "client/unregisterCapability": +- var params UnregistrationParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := client.UnregisterCapability(ctx, ¶ms) +- return nil, true, err +- +- case "telemetry/event": +- var params any +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := client.Event(ctx, ¶ms) +- return nil, true, err +- +- case "textDocument/publishDiagnostics": +- var params PublishDiagnosticsParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := client.PublishDiagnostics(ctx, ¶ms) +- return nil, true, err +- +- case "window/logMessage": +- var params LogMessageParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := client.LogMessage(ctx, ¶ms) +- return nil, true, err +- +- case "window/showDocument": +- var params ShowDocumentParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := client.ShowDocument(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "window/showMessage": +- var params ShowMessageParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := client.ShowMessage(ctx, ¶ms) +- return nil, true, err +- +- case "window/showMessageRequest": +- var params ShowMessageRequestParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := client.ShowMessageRequest(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "window/workDoneProgress/create": +- var params WorkDoneProgressCreateParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := client.WorkDoneProgressCreate(ctx, ¶ms) +- return nil, true, err +- +- case "workspace/applyEdit": +- var params ApplyWorkspaceEditParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := client.ApplyEdit(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "workspace/codeLens/refresh": +- err := client.CodeLensRefresh(ctx) +- return nil, true, err +- +- case "workspace/configuration": +- var params ParamConfiguration +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := client.Configuration(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "workspace/diagnostic/refresh": +- err := client.DiagnosticRefresh(ctx) +- return nil, true, err +- +- case "workspace/foldingRange/refresh": +- err := client.FoldingRangeRefresh(ctx) +- return nil, true, err +- +- case "workspace/inlayHint/refresh": +- err := client.InlayHintRefresh(ctx) +- return nil, true, err +- +- case "workspace/inlineValue/refresh": +- err := client.InlineValueRefresh(ctx) +- return nil, true, err +- +- case "workspace/semanticTokens/refresh": +- err := client.SemanticTokensRefresh(ctx) +- return nil, true, err +- +- case "workspace/textDocumentContent/refresh": +- var params TextDocumentContentRefreshParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := client.TextDocumentContentRefresh(ctx, ¶ms) +- return nil, true, err +- +- case "workspace/workspaceFolders": +- resp, err := client.WorkspaceFolders(ctx) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- default: +- return nil, false, nil +- } +-} +- +-func (s *clientDispatcher) LogTrace(ctx context.Context, params *LogTraceParams) error { +- return s.sender.Notify(ctx, "$/logTrace", params) +-} +-func (s *clientDispatcher) Progress(ctx context.Context, params *ProgressParams) error { +- return s.sender.Notify(ctx, "$/progress", params) +-} +-func (s *clientDispatcher) RegisterCapability(ctx context.Context, params *RegistrationParams) error { +- return s.sender.Call(ctx, "client/registerCapability", params, nil) +-} +-func (s *clientDispatcher) UnregisterCapability(ctx context.Context, params *UnregistrationParams) error { +- return s.sender.Call(ctx, "client/unregisterCapability", params, nil) +-} +-func (s *clientDispatcher) Event(ctx context.Context, params *any) error { +- return s.sender.Notify(ctx, "telemetry/event", params) +-} +-func (s *clientDispatcher) PublishDiagnostics(ctx context.Context, params *PublishDiagnosticsParams) error { +- return s.sender.Notify(ctx, "textDocument/publishDiagnostics", params) +-} +-func (s *clientDispatcher) LogMessage(ctx context.Context, params *LogMessageParams) error { +- return s.sender.Notify(ctx, "window/logMessage", params) +-} +-func (s *clientDispatcher) ShowDocument(ctx context.Context, params *ShowDocumentParams) (*ShowDocumentResult, error) { +- var result *ShowDocumentResult +- if err := s.sender.Call(ctx, "window/showDocument", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *clientDispatcher) ShowMessage(ctx context.Context, params *ShowMessageParams) error { +- return s.sender.Notify(ctx, "window/showMessage", params) +-} +-func (s *clientDispatcher) ShowMessageRequest(ctx context.Context, params *ShowMessageRequestParams) (*MessageActionItem, error) { +- var result *MessageActionItem +- if err := s.sender.Call(ctx, "window/showMessageRequest", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *clientDispatcher) WorkDoneProgressCreate(ctx context.Context, params *WorkDoneProgressCreateParams) error { +- return s.sender.Call(ctx, "window/workDoneProgress/create", params, nil) +-} +-func (s *clientDispatcher) ApplyEdit(ctx context.Context, params *ApplyWorkspaceEditParams) (*ApplyWorkspaceEditResult, error) { +- var result *ApplyWorkspaceEditResult +- if err := s.sender.Call(ctx, "workspace/applyEdit", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *clientDispatcher) CodeLensRefresh(ctx context.Context) error { +- return s.sender.Call(ctx, "workspace/codeLens/refresh", nil, nil) +-} +-func (s *clientDispatcher) Configuration(ctx context.Context, params *ParamConfiguration) ([]LSPAny, error) { +- var result []LSPAny +- if err := s.sender.Call(ctx, "workspace/configuration", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *clientDispatcher) DiagnosticRefresh(ctx context.Context) error { +- return s.sender.Call(ctx, "workspace/diagnostic/refresh", nil, nil) +-} +-func (s *clientDispatcher) FoldingRangeRefresh(ctx context.Context) error { +- return s.sender.Call(ctx, "workspace/foldingRange/refresh", nil, nil) +-} +-func (s *clientDispatcher) InlayHintRefresh(ctx context.Context) error { +- return s.sender.Call(ctx, "workspace/inlayHint/refresh", nil, nil) +-} +-func (s *clientDispatcher) InlineValueRefresh(ctx context.Context) error { +- return s.sender.Call(ctx, "workspace/inlineValue/refresh", nil, nil) +-} +-func (s *clientDispatcher) SemanticTokensRefresh(ctx context.Context) error { +- return s.sender.Call(ctx, "workspace/semanticTokens/refresh", nil, nil) +-} +-func (s *clientDispatcher) TextDocumentContentRefresh(ctx context.Context, params *TextDocumentContentRefreshParams) error { +- return s.sender.Call(ctx, "workspace/textDocumentContent/refresh", params, nil) +-} +-func (s *clientDispatcher) WorkspaceFolders(ctx context.Context) ([]WorkspaceFolder, error) { +- var result []WorkspaceFolder +- if err := s.sender.Call(ctx, "workspace/workspaceFolders", nil, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +diff -urN a/gopls/internal/protocol/tsdocument_changes.go b/gopls/internal/protocol/tsdocument_changes.go +--- a/gopls/internal/protocol/tsdocument_changes.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/tsdocument_changes.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,81 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol +- +-import ( +- "encoding/json" +- "fmt" +-) +- +-// DocumentChange is a union of various file edit operations. +-// +-// Exactly one field of this struct is non-nil; see [DocumentChange.Valid]. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#resourceChanges +-type DocumentChange struct { +- TextDocumentEdit *TextDocumentEdit +- CreateFile *CreateFile +- RenameFile *RenameFile +- DeleteFile *DeleteFile +-} +- +-// Valid reports whether the DocumentChange sum-type value is valid, +-// that is, exactly one of create, delete, edit, or rename. +-func (ch DocumentChange) Valid() bool { +- n := 0 +- if ch.TextDocumentEdit != nil { +- n++ +- } +- if ch.CreateFile != nil { +- n++ +- } +- if ch.RenameFile != nil { +- n++ +- } +- if ch.DeleteFile != nil { +- n++ +- } +- return n == 1 +-} +- +-func (d *DocumentChange) UnmarshalJSON(data []byte) error { +- var m map[string]any +- if err := json.Unmarshal(data, &m); err != nil { +- return err +- } +- +- if _, ok := m["textDocument"]; ok { +- d.TextDocumentEdit = new(TextDocumentEdit) +- return json.Unmarshal(data, d.TextDocumentEdit) +- } +- +- // The {Create,Rename,Delete}File types all share a 'kind' field. +- kind := m["kind"] +- switch kind { +- case "create": +- d.CreateFile = new(CreateFile) +- return json.Unmarshal(data, d.CreateFile) +- case "rename": +- d.RenameFile = new(RenameFile) +- return json.Unmarshal(data, d.RenameFile) +- case "delete": +- d.DeleteFile = new(DeleteFile) +- return json.Unmarshal(data, d.DeleteFile) +- } +- return fmt.Errorf("DocumentChanges: unexpected kind: %q", kind) +-} +- +-func (d *DocumentChange) MarshalJSON() ([]byte, error) { +- if d.TextDocumentEdit != nil { +- return json.Marshal(d.TextDocumentEdit) +- } else if d.CreateFile != nil { +- return json.Marshal(d.CreateFile) +- } else if d.RenameFile != nil { +- return json.Marshal(d.RenameFile) +- } else if d.DeleteFile != nil { +- return json.Marshal(d.DeleteFile) +- } +- return nil, fmt.Errorf("empty DocumentChanges union value") +-} +diff -urN a/gopls/internal/protocol/tsinsertreplaceedit.go b/gopls/internal/protocol/tsinsertreplaceedit.go +--- a/gopls/internal/protocol/tsinsertreplaceedit.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/tsinsertreplaceedit.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,40 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol +- +-import ( +- "encoding/json" +- "fmt" +-) +- +-// InsertReplaceEdit is used instead of TextEdit in CompletionItem +-// in editors that support it. These two types are alike in appearance +-// but can be differentiated by the presence or absence of +-// certain properties. UnmarshalJSON of the sum type tries to +-// unmarshal as TextEdit only if unmarshal as InsertReplaceEdit fails. +-// However, due to this similarity, unmarshal with the other type +-// never fails. This file has a custom JSON unmarshaller for +-// InsertReplaceEdit, that fails if the required fields are missing. +- +-// UnmarshalJSON unmarshals InsertReplaceEdit with extra +-// checks on the presence of "insert" and "replace" properties. +-func (e *InsertReplaceEdit) UnmarshalJSON(data []byte) error { +- var required struct { +- NewText string +- Insert *Range `json:"insert,omitempty"` +- Replace *Range `json:"replace,omitempty"` +- } +- +- if err := json.Unmarshal(data, &required); err != nil { +- return err +- } +- if required.Insert == nil && required.Replace == nil { +- return fmt.Errorf("not InsertReplaceEdit") +- } +- e.NewText = required.NewText +- e.Insert = *required.Insert +- e.Replace = *required.Replace +- return nil +-} +diff -urN a/gopls/internal/protocol/tsinsertreplaceedit_test.go b/gopls/internal/protocol/tsinsertreplaceedit_test.go +--- a/gopls/internal/protocol/tsinsertreplaceedit_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/tsinsertreplaceedit_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,44 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol +- +-import ( +- "encoding/json" +- "testing" +- +- "github.com/google/go-cmp/cmp" +-) +- +-func TestInsertReplaceEdit_UnmarshalJSON(t *testing.T) { +- tests := []struct { +- name string +- in any +- wantErr bool +- }{ +- { +- name: "TextEdit", +- in: TextEdit{NewText: "new text", Range: Range{Start: Position{Line: 1}}}, +- }, +- { +- name: "InsertReplaceEdit", +- in: InsertReplaceEdit{NewText: "new text", Insert: Range{Start: Position{Line: 100}}, Replace: Range{End: Position{Line: 200}}}, +- }, +- } +- for _, tt := range tests { +- t.Run(tt.name, func(t *testing.T) { +- data, err := json.MarshalIndent(Or_CompletionItem_textEdit{Value: tt.in}, "", " ") +- if err != nil { +- t.Fatalf("failed to marshal: %v", err) +- } +- var decoded Or_CompletionItem_textEdit +- if err := json.Unmarshal(data, &decoded); err != nil { +- t.Fatalf("failed to unmarshal: %v", err) +- } +- if diff := cmp.Diff(tt.in, decoded.Value); diff != "" { +- t.Errorf("unmarshal returns unexpected result: (-want +got):\n%s", diff) +- } +- }) +- } +-} +diff -urN a/gopls/internal/protocol/tsjson.go b/gopls/internal/protocol/tsjson.go +--- a/gopls/internal/protocol/tsjson.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/tsjson.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,2167 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated for LSP. DO NOT EDIT. +- +-package protocol +- +-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.14 (hash 66a087310eea0d60495ba3578d78f70409c403d9). +-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.14/protocol/metaModel.json +-// LSP metaData.version = 3.17.0. +- +-import "encoding/json" +- +-import "fmt" +- +-// UnmarshalError indicates that a JSON value did not conform to +-// one of the expected cases of an LSP union type. +-type UnmarshalError struct { +- msg string +-} +- +-func (e UnmarshalError) Error() string { +- return e.msg +-} +-func (t OrPLocation_workspace_symbol) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case Location: +- return json.Marshal(x) +- case LocationUriOnly: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [Location LocationUriOnly]", t) +-} +- +-func (t *OrPLocation_workspace_symbol) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 Location +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 LocationUriOnly +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [Location LocationUriOnly]"} +-} +- +-func (t OrPSection_workspace_didChangeConfiguration) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case []string: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [[]string string]", t) +-} +- +-func (t *OrPSection_workspace_didChangeConfiguration) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 []string +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [[]string string]"} +-} +- +-func (t OrPTooltipPLabel) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case MarkupContent: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t) +-} +- +-func (t *OrPTooltipPLabel) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 MarkupContent +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"} +-} +- +-func (t OrPTooltip_textDocument_inlayHint) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case MarkupContent: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t) +-} +- +-func (t *OrPTooltip_textDocument_inlayHint) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 MarkupContent +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"} +-} +- +-func (t Or_CancelParams_id) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case int32: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [int32 string]", t) +-} +- +-func (t *Or_CancelParams_id) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 int32 +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [int32 string]"} +-} +- +-func (t Or_ClientSemanticTokensRequestOptions_full) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case ClientSemanticTokensRequestFullDelta: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [ClientSemanticTokensRequestFullDelta bool]", t) +-} +- +-func (t *Or_ClientSemanticTokensRequestOptions_full) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 ClientSemanticTokensRequestFullDelta +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [ClientSemanticTokensRequestFullDelta bool]"} +-} +- +-func (t Or_ClientSemanticTokensRequestOptions_range) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case Lit_ClientSemanticTokensRequestOptions_range_Item1: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]", t) +-} +- +-func (t *Or_ClientSemanticTokensRequestOptions_range) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 Lit_ClientSemanticTokensRequestOptions_range_Item1 +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]"} +-} +- +-func (t Or_CompletionItemDefaults_editRange) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case EditRangeWithInsertReplace: +- return json.Marshal(x) +- case Range: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [EditRangeWithInsertReplace Range]", t) +-} +- +-func (t *Or_CompletionItemDefaults_editRange) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 EditRangeWithInsertReplace +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 Range +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [EditRangeWithInsertReplace Range]"} +-} +- +-func (t Or_CompletionItem_documentation) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case MarkupContent: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t) +-} +- +-func (t *Or_CompletionItem_documentation) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 MarkupContent +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"} +-} +- +-func (t Or_CompletionItem_textEdit) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case InsertReplaceEdit: +- return json.Marshal(x) +- case TextEdit: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [InsertReplaceEdit TextEdit]", t) +-} +- +-func (t *Or_CompletionItem_textEdit) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 InsertReplaceEdit +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 TextEdit +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [InsertReplaceEdit TextEdit]"} +-} +- +-func (t Or_Definition) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case Location: +- return json.Marshal(x) +- case []Location: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [Location []Location]", t) +-} +- +-func (t *Or_Definition) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 Location +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 []Location +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [Location []Location]"} +-} +- +-func (t Or_Diagnostic_code) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case int32: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [int32 string]", t) +-} +- +-func (t *Or_Diagnostic_code) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 int32 +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [int32 string]"} +-} +- +-func (t Or_DocumentDiagnosticReport) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case RelatedFullDocumentDiagnosticReport: +- return json.Marshal(x) +- case RelatedUnchangedDocumentDiagnosticReport: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport]", t) +-} +- +-func (t *Or_DocumentDiagnosticReport) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 RelatedFullDocumentDiagnosticReport +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 RelatedUnchangedDocumentDiagnosticReport +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport]"} +-} +- +-func (t Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case FullDocumentDiagnosticReport: +- return json.Marshal(x) +- case UnchangedDocumentDiagnosticReport: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t) +-} +- +-func (t *Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 FullDocumentDiagnosticReport +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 UnchangedDocumentDiagnosticReport +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"} +-} +- +-func (t Or_DocumentFilter) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case NotebookCellTextDocumentFilter: +- return json.Marshal(x) +- case TextDocumentFilter: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [NotebookCellTextDocumentFilter TextDocumentFilter]", t) +-} +- +-func (t *Or_DocumentFilter) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 NotebookCellTextDocumentFilter +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 TextDocumentFilter +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [NotebookCellTextDocumentFilter TextDocumentFilter]"} +-} +- +-func (t Or_GlobPattern) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case Pattern: +- return json.Marshal(x) +- case RelativePattern: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [Pattern RelativePattern]", t) +-} +- +-func (t *Or_GlobPattern) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 Pattern +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 RelativePattern +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [Pattern RelativePattern]"} +-} +- +-func (t Or_Hover_contents) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case MarkedString: +- return json.Marshal(x) +- case MarkupContent: +- return json.Marshal(x) +- case []MarkedString: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [MarkedString MarkupContent []MarkedString]", t) +-} +- +-func (t *Or_Hover_contents) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 MarkedString +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 MarkupContent +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 []MarkedString +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [MarkedString MarkupContent []MarkedString]"} +-} +- +-func (t Or_InlayHint_label) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case []InlayHintLabelPart: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [[]InlayHintLabelPart string]", t) +-} +- +-func (t *Or_InlayHint_label) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 []InlayHintLabelPart +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [[]InlayHintLabelPart string]"} +-} +- +-func (t Or_InlineCompletionItem_insertText) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case StringValue: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [StringValue string]", t) +-} +- +-func (t *Or_InlineCompletionItem_insertText) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 StringValue +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [StringValue string]"} +-} +- +-func (t Or_InlineValue) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case InlineValueEvaluatableExpression: +- return json.Marshal(x) +- case InlineValueText: +- return json.Marshal(x) +- case InlineValueVariableLookup: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup]", t) +-} +- +-func (t *Or_InlineValue) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 InlineValueEvaluatableExpression +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 InlineValueText +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 InlineValueVariableLookup +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup]"} +-} +- +-func (t Or_MarkedString) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case MarkedStringWithLanguage: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [MarkedStringWithLanguage string]", t) +-} +- +-func (t *Or_MarkedString) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 MarkedStringWithLanguage +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [MarkedStringWithLanguage string]"} +-} +- +-func (t Or_NotebookCellTextDocumentFilter_notebook) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case NotebookDocumentFilter: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t) +-} +- +-func (t *Or_NotebookCellTextDocumentFilter_notebook) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 NotebookDocumentFilter +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"} +-} +- +-func (t Or_NotebookDocumentFilter) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case NotebookDocumentFilterNotebookType: +- return json.Marshal(x) +- case NotebookDocumentFilterPattern: +- return json.Marshal(x) +- case NotebookDocumentFilterScheme: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]", t) +-} +- +-func (t *Or_NotebookDocumentFilter) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 NotebookDocumentFilterNotebookType +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 NotebookDocumentFilterPattern +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 NotebookDocumentFilterScheme +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]"} +-} +- +-func (t Or_NotebookDocumentFilterWithCells_notebook) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case NotebookDocumentFilter: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t) +-} +- +-func (t *Or_NotebookDocumentFilterWithCells_notebook) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 NotebookDocumentFilter +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"} +-} +- +-func (t Or_NotebookDocumentFilterWithNotebook_notebook) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case NotebookDocumentFilter: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t) +-} +- +-func (t *Or_NotebookDocumentFilterWithNotebook_notebook) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 NotebookDocumentFilter +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"} +-} +- +-func (t Or_NotebookDocumentSyncOptions_notebookSelector_Elem) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case NotebookDocumentFilterWithCells: +- return json.Marshal(x) +- case NotebookDocumentFilterWithNotebook: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]", t) +-} +- +-func (t *Or_NotebookDocumentSyncOptions_notebookSelector_Elem) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 NotebookDocumentFilterWithCells +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 NotebookDocumentFilterWithNotebook +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]"} +-} +- +-func (t Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case FullDocumentDiagnosticReport: +- return json.Marshal(x) +- case UnchangedDocumentDiagnosticReport: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t) +-} +- +-func (t *Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 FullDocumentDiagnosticReport +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 UnchangedDocumentDiagnosticReport +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"} +-} +- +-func (t Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case FullDocumentDiagnosticReport: +- return json.Marshal(x) +- case UnchangedDocumentDiagnosticReport: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t) +-} +- +-func (t *Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 FullDocumentDiagnosticReport +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 UnchangedDocumentDiagnosticReport +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"} +-} +- +-func (t Or_Result_textDocument_codeAction_Item0_Elem) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case CodeAction: +- return json.Marshal(x) +- case Command: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [CodeAction Command]", t) +-} +- +-func (t *Or_Result_textDocument_codeAction_Item0_Elem) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 CodeAction +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 Command +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [CodeAction Command]"} +-} +- +-func (t Or_Result_textDocument_inlineCompletion) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case InlineCompletionList: +- return json.Marshal(x) +- case []InlineCompletionItem: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [InlineCompletionList []InlineCompletionItem]", t) +-} +- +-func (t *Or_Result_textDocument_inlineCompletion) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 InlineCompletionList +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 []InlineCompletionItem +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [InlineCompletionList []InlineCompletionItem]"} +-} +- +-func (t Or_SemanticTokensOptions_full) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case SemanticTokensFullDelta: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [SemanticTokensFullDelta bool]", t) +-} +- +-func (t *Or_SemanticTokensOptions_full) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 SemanticTokensFullDelta +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [SemanticTokensFullDelta bool]"} +-} +- +-func (t Or_SemanticTokensOptions_range) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case PRangeESemanticTokensOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [PRangeESemanticTokensOptions bool]", t) +-} +- +-func (t *Or_SemanticTokensOptions_range) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 PRangeESemanticTokensOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [PRangeESemanticTokensOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_callHierarchyProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case CallHierarchyOptions: +- return json.Marshal(x) +- case CallHierarchyRegistrationOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [CallHierarchyOptions CallHierarchyRegistrationOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_callHierarchyProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 CallHierarchyOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 CallHierarchyRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 bool +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [CallHierarchyOptions CallHierarchyRegistrationOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_codeActionProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case CodeActionOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [CodeActionOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_codeActionProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 CodeActionOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [CodeActionOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_colorProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case DocumentColorOptions: +- return json.Marshal(x) +- case DocumentColorRegistrationOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [DocumentColorOptions DocumentColorRegistrationOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_colorProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 DocumentColorOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 DocumentColorRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 bool +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [DocumentColorOptions DocumentColorRegistrationOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_declarationProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case DeclarationOptions: +- return json.Marshal(x) +- case DeclarationRegistrationOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [DeclarationOptions DeclarationRegistrationOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_declarationProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 DeclarationOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 DeclarationRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 bool +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [DeclarationOptions DeclarationRegistrationOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_definitionProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case DefinitionOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [DefinitionOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_definitionProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 DefinitionOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [DefinitionOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_diagnosticProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case DiagnosticOptions: +- return json.Marshal(x) +- case DiagnosticRegistrationOptions: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [DiagnosticOptions DiagnosticRegistrationOptions]", t) +-} +- +-func (t *Or_ServerCapabilities_diagnosticProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 DiagnosticOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 DiagnosticRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [DiagnosticOptions DiagnosticRegistrationOptions]"} +-} +- +-func (t Or_ServerCapabilities_documentFormattingProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case DocumentFormattingOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [DocumentFormattingOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_documentFormattingProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 DocumentFormattingOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [DocumentFormattingOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_documentHighlightProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case DocumentHighlightOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [DocumentHighlightOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_documentHighlightProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 DocumentHighlightOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [DocumentHighlightOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_documentRangeFormattingProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case DocumentRangeFormattingOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [DocumentRangeFormattingOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_documentRangeFormattingProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 DocumentRangeFormattingOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [DocumentRangeFormattingOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_documentSymbolProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case DocumentSymbolOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [DocumentSymbolOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_documentSymbolProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 DocumentSymbolOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [DocumentSymbolOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_foldingRangeProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case FoldingRangeOptions: +- return json.Marshal(x) +- case FoldingRangeRegistrationOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [FoldingRangeOptions FoldingRangeRegistrationOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_foldingRangeProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 FoldingRangeOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 FoldingRangeRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 bool +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [FoldingRangeOptions FoldingRangeRegistrationOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_hoverProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case HoverOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [HoverOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_hoverProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 HoverOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [HoverOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_implementationProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case ImplementationOptions: +- return json.Marshal(x) +- case ImplementationRegistrationOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [ImplementationOptions ImplementationRegistrationOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_implementationProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 ImplementationOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 ImplementationRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 bool +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [ImplementationOptions ImplementationRegistrationOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_inlayHintProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case InlayHintOptions: +- return json.Marshal(x) +- case InlayHintRegistrationOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [InlayHintOptions InlayHintRegistrationOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_inlayHintProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 InlayHintOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 InlayHintRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 bool +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [InlayHintOptions InlayHintRegistrationOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_inlineCompletionProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case InlineCompletionOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [InlineCompletionOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_inlineCompletionProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 InlineCompletionOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [InlineCompletionOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_inlineValueProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case InlineValueOptions: +- return json.Marshal(x) +- case InlineValueRegistrationOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [InlineValueOptions InlineValueRegistrationOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_inlineValueProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 InlineValueOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 InlineValueRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 bool +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [InlineValueOptions InlineValueRegistrationOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_linkedEditingRangeProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case LinkedEditingRangeOptions: +- return json.Marshal(x) +- case LinkedEditingRangeRegistrationOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_linkedEditingRangeProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 LinkedEditingRangeOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 LinkedEditingRangeRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 bool +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_monikerProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case MonikerOptions: +- return json.Marshal(x) +- case MonikerRegistrationOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [MonikerOptions MonikerRegistrationOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_monikerProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 MonikerOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 MonikerRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 bool +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [MonikerOptions MonikerRegistrationOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_notebookDocumentSync) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case NotebookDocumentSyncOptions: +- return json.Marshal(x) +- case NotebookDocumentSyncRegistrationOptions: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions]", t) +-} +- +-func (t *Or_ServerCapabilities_notebookDocumentSync) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 NotebookDocumentSyncOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 NotebookDocumentSyncRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions]"} +-} +- +-func (t Or_ServerCapabilities_referencesProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case ReferenceOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [ReferenceOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_referencesProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 ReferenceOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [ReferenceOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_renameProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case RenameOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [RenameOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_renameProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 RenameOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [RenameOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_selectionRangeProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case SelectionRangeOptions: +- return json.Marshal(x) +- case SelectionRangeRegistrationOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [SelectionRangeOptions SelectionRangeRegistrationOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_selectionRangeProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 SelectionRangeOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 SelectionRangeRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 bool +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [SelectionRangeOptions SelectionRangeRegistrationOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_semanticTokensProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case SemanticTokensOptions: +- return json.Marshal(x) +- case SemanticTokensRegistrationOptions: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [SemanticTokensOptions SemanticTokensRegistrationOptions]", t) +-} +- +-func (t *Or_ServerCapabilities_semanticTokensProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 SemanticTokensOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 SemanticTokensRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [SemanticTokensOptions SemanticTokensRegistrationOptions]"} +-} +- +-func (t Or_ServerCapabilities_textDocumentSync) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case TextDocumentSyncKind: +- return json.Marshal(x) +- case TextDocumentSyncOptions: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [TextDocumentSyncKind TextDocumentSyncOptions]", t) +-} +- +-func (t *Or_ServerCapabilities_textDocumentSync) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 TextDocumentSyncKind +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 TextDocumentSyncOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentSyncKind TextDocumentSyncOptions]"} +-} +- +-func (t Or_ServerCapabilities_typeDefinitionProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case TypeDefinitionOptions: +- return json.Marshal(x) +- case TypeDefinitionRegistrationOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_typeDefinitionProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 TypeDefinitionOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 TypeDefinitionRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 bool +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_typeHierarchyProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case TypeHierarchyOptions: +- return json.Marshal(x) +- case TypeHierarchyRegistrationOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_typeHierarchyProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 TypeHierarchyOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 TypeHierarchyRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 bool +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool]"} +-} +- +-func (t Or_ServerCapabilities_workspaceSymbolProvider) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case WorkspaceSymbolOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [WorkspaceSymbolOptions bool]", t) +-} +- +-func (t *Or_ServerCapabilities_workspaceSymbolProvider) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 WorkspaceSymbolOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [WorkspaceSymbolOptions bool]"} +-} +- +-func (t Or_SignatureInformation_documentation) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case MarkupContent: +- return json.Marshal(x) +- case string: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t) +-} +- +-func (t *Or_SignatureInformation_documentation) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 MarkupContent +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 string +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"} +-} +- +-func (t Or_TextDocumentEdit_edits_Elem) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case AnnotatedTextEdit: +- return json.Marshal(x) +- case SnippetTextEdit: +- return json.Marshal(x) +- case TextEdit: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [AnnotatedTextEdit SnippetTextEdit TextEdit]", t) +-} +- +-func (t *Or_TextDocumentEdit_edits_Elem) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 AnnotatedTextEdit +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 SnippetTextEdit +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 TextEdit +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [AnnotatedTextEdit SnippetTextEdit TextEdit]"} +-} +- +-func (t Or_TextDocumentFilter) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case TextDocumentFilterLanguage: +- return json.Marshal(x) +- case TextDocumentFilterPattern: +- return json.Marshal(x) +- case TextDocumentFilterScheme: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]", t) +-} +- +-func (t *Or_TextDocumentFilter) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 TextDocumentFilterLanguage +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 TextDocumentFilterPattern +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 TextDocumentFilterScheme +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]"} +-} +- +-func (t Or_TextDocumentSyncOptions_save) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case SaveOptions: +- return json.Marshal(x) +- case bool: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [SaveOptions bool]", t) +-} +- +-func (t *Or_TextDocumentSyncOptions_save) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 SaveOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 bool +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [SaveOptions bool]"} +-} +- +-func (t Or_WorkspaceDocumentDiagnosticReport) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case WorkspaceFullDocumentDiagnosticReport: +- return json.Marshal(x) +- case WorkspaceUnchangedDocumentDiagnosticReport: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport]", t) +-} +- +-func (t *Or_WorkspaceDocumentDiagnosticReport) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 WorkspaceFullDocumentDiagnosticReport +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 WorkspaceUnchangedDocumentDiagnosticReport +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport]"} +-} +- +-func (t Or_WorkspaceEdit_documentChanges_Elem) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case CreateFile: +- return json.Marshal(x) +- case DeleteFile: +- return json.Marshal(x) +- case RenameFile: +- return json.Marshal(x) +- case TextDocumentEdit: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [CreateFile DeleteFile RenameFile TextDocumentEdit]", t) +-} +- +-func (t *Or_WorkspaceEdit_documentChanges_Elem) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 CreateFile +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 DeleteFile +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- var h2 RenameFile +- if err := json.Unmarshal(x, &h2); err == nil { +- t.Value = h2 +- return nil +- } +- var h3 TextDocumentEdit +- if err := json.Unmarshal(x, &h3); err == nil { +- t.Value = h3 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [CreateFile DeleteFile RenameFile TextDocumentEdit]"} +-} +- +-func (t Or_WorkspaceOptions_textDocumentContent) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case TextDocumentContentOptions: +- return json.Marshal(x) +- case TextDocumentContentRegistrationOptions: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [TextDocumentContentOptions TextDocumentContentRegistrationOptions]", t) +-} +- +-func (t *Or_WorkspaceOptions_textDocumentContent) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 TextDocumentContentOptions +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 TextDocumentContentRegistrationOptions +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentContentOptions TextDocumentContentRegistrationOptions]"} +-} +- +-func (t Or_textDocument_declaration) MarshalJSON() ([]byte, error) { +- switch x := t.Value.(type) { +- case Declaration: +- return json.Marshal(x) +- case []DeclarationLink: +- return json.Marshal(x) +- case nil: +- return []byte("null"), nil +- } +- return nil, fmt.Errorf("type %T not one of [Declaration []DeclarationLink]", t) +-} +- +-func (t *Or_textDocument_declaration) UnmarshalJSON(x []byte) error { +- if string(x) == "null" { +- t.Value = nil +- return nil +- } +- var h0 Declaration +- if err := json.Unmarshal(x, &h0); err == nil { +- t.Value = h0 +- return nil +- } +- var h1 []DeclarationLink +- if err := json.Unmarshal(x, &h1); err == nil { +- t.Value = h1 +- return nil +- } +- return &UnmarshalError{"unmarshal failed to match one of [Declaration []DeclarationLink]"} +-} +diff -urN a/gopls/internal/protocol/tsprotocol.go b/gopls/internal/protocol/tsprotocol.go +--- a/gopls/internal/protocol/tsprotocol.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/tsprotocol.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,7011 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated for LSP. DO NOT EDIT. +- +-package protocol +- +-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.14 (hash 66a087310eea0d60495ba3578d78f70409c403d9). +-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.14/protocol/metaModel.json +-// LSP metaData.version = 3.17.0. +- +-import "encoding/json" +- +-// A special text edit with an additional change annotation. +-// +-// @since 3.16.0. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#annotatedTextEdit +-type AnnotatedTextEdit struct { +- // The actual identifier of the change annotation +- AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"` +- TextEdit +-} +- +-// Defines how values from a set of defaults and an individual item will be +-// merged. +-// +-// @since 3.18.0 +-type ApplyKind uint32 +- +-// The parameters passed via an apply workspace edit request. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#applyWorkspaceEditParams +-type ApplyWorkspaceEditParams struct { +- // An optional label of the workspace edit. This label is +- // presented in the user interface for example on an undo +- // stack to undo the workspace edit. +- Label string `json:"label,omitempty"` +- // The edits to apply. +- Edit WorkspaceEdit `json:"edit"` +- // Additional data about the edit. +- // +- // @since 3.18.0 +- // @proposed +- Metadata *WorkspaceEditMetadata `json:"metadata,omitempty"` +-} +- +-// The result returned from the apply workspace edit request. +-// +-// @since 3.17 renamed from ApplyWorkspaceEditResponse +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#applyWorkspaceEditResult +-type ApplyWorkspaceEditResult struct { +- // Indicates whether the edit was applied or not. +- Applied bool `json:"applied"` +- // An optional textual description for why the edit was not applied. +- // This may be used by the server for diagnostic logging or to provide +- // a suitable error for a request that triggered the edit. +- FailureReason string `json:"failureReason,omitempty"` +- // Depending on the client's failure handling strategy `failedChange` might +- // contain the index of the change that failed. This property is only available +- // if the client signals a `failureHandlingStrategy` in its client capabilities. +- FailedChange uint32 `json:"failedChange"` +-} +- +-// A base for all symbol information. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#baseSymbolInformation +-type BaseSymbolInformation struct { +- // The name of this symbol. +- Name string `json:"name"` +- // The kind of this symbol. +- Kind SymbolKind `json:"kind"` +- // Tags for this symbol. +- // +- // @since 3.16.0 +- Tags []SymbolTag `json:"tags,omitempty"` +- // The name of the symbol containing this symbol. This information is for +- // user interface purposes (e.g. to render a qualifier in the user interface +- // if necessary). It can't be used to re-infer a hierarchy for the document +- // symbols. +- ContainerName string `json:"containerName,omitempty"` +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyClientCapabilities +-type CallHierarchyClientCapabilities struct { +- // Whether implementation supports dynamic registration. If this is set to `true` +- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` +- // return value for the corresponding server capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// Represents an incoming call, e.g. a caller of a method or constructor. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyIncomingCall +-type CallHierarchyIncomingCall struct { +- // The item that makes the call. +- From CallHierarchyItem `json:"from"` +- // The ranges at which the calls appear. This is relative to the caller +- // denoted by {@link CallHierarchyIncomingCall.from `this.from`}. +- FromRanges []Range `json:"fromRanges"` +-} +- +-// The parameter of a `callHierarchy/incomingCalls` request. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyIncomingCallsParams +-type CallHierarchyIncomingCallsParams struct { +- Item CallHierarchyItem `json:"item"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// Represents programming constructs like functions or constructors in the context +-// of call hierarchy. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyItem +-type CallHierarchyItem struct { +- // The name of this item. +- Name string `json:"name"` +- // The kind of this item. +- Kind SymbolKind `json:"kind"` +- // Tags for this item. +- Tags []SymbolTag `json:"tags,omitempty"` +- // More detail for this item, e.g. the signature of a function. +- Detail string `json:"detail,omitempty"` +- // The resource identifier of this item. +- URI DocumentURI `json:"uri"` +- // The range enclosing this symbol not including leading/trailing whitespace but everything else, e.g. comments and code. +- Range Range `json:"range"` +- // The range that should be selected and revealed when this symbol is being picked, e.g. the name of a function. +- // Must be contained by the {@link CallHierarchyItem.range `range`}. +- SelectionRange Range `json:"selectionRange"` +- // A data entry field that is preserved between a call hierarchy prepare and +- // incoming calls or outgoing calls requests. +- Data any `json:"data,omitempty"` +-} +- +-// Call hierarchy options used during static registration. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyOptions +-type CallHierarchyOptions struct { +- WorkDoneProgressOptions +-} +- +-// Represents an outgoing call, e.g. calling a getter from a method or a method from a constructor etc. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyOutgoingCall +-type CallHierarchyOutgoingCall struct { +- // The item that is called. +- To CallHierarchyItem `json:"to"` +- // The range at which this item is called. This is the range relative to the caller, e.g the item +- // passed to {@link CallHierarchyItemProvider.provideCallHierarchyOutgoingCalls `provideCallHierarchyOutgoingCalls`} +- // and not {@link CallHierarchyOutgoingCall.to `this.to`}. +- FromRanges []Range `json:"fromRanges"` +-} +- +-// The parameter of a `callHierarchy/outgoingCalls` request. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyOutgoingCallsParams +-type CallHierarchyOutgoingCallsParams struct { +- Item CallHierarchyItem `json:"item"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// The parameter of a `textDocument/prepareCallHierarchy` request. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyPrepareParams +-type CallHierarchyPrepareParams struct { +- TextDocumentPositionParams +- WorkDoneProgressParams +-} +- +-// Call hierarchy options used during static or dynamic registration. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyRegistrationOptions +-type CallHierarchyRegistrationOptions struct { +- TextDocumentRegistrationOptions +- CallHierarchyOptions +- StaticRegistrationOptions +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#cancelParams +-type CancelParams struct { +- // The request id to cancel. +- ID any `json:"id"` +-} +- +-// Additional information that describes document changes. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#changeAnnotation +-type ChangeAnnotation struct { +- // A human-readable string describing the actual change. The string +- // is rendered prominent in the user interface. +- Label string `json:"label"` +- // A flag which indicates that user confirmation is needed +- // before applying the change. +- NeedsConfirmation bool `json:"needsConfirmation,omitempty"` +- // A human-readable string which is rendered less prominent in +- // the user interface. +- Description string `json:"description,omitempty"` +-} +- +-// An identifier to refer to a change annotation stored with a workspace edit. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#changeAnnotationIdentifier +-type ChangeAnnotationIdentifier = string // (alias) +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#changeAnnotationsSupportOptions +-type ChangeAnnotationsSupportOptions struct { +- // Whether the client groups edits with equal labels into tree nodes, +- // for instance all edits labelled with "Changes in Strings" would +- // be a tree node. +- GroupsOnLabel bool `json:"groupsOnLabel,omitempty"` +-} +- +-// Defines the capabilities provided by the client. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCapabilities +-type ClientCapabilities struct { +- // Workspace specific client capabilities. +- Workspace WorkspaceClientCapabilities `json:"workspace,omitempty"` +- // Text document specific client capabilities. +- TextDocument TextDocumentClientCapabilities `json:"textDocument,omitempty"` +- // Capabilities specific to the notebook document support. +- // +- // @since 3.17.0 +- NotebookDocument *NotebookDocumentClientCapabilities `json:"notebookDocument,omitempty"` +- // Window specific client capabilities. +- Window WindowClientCapabilities `json:"window,omitempty"` +- // General client capabilities. +- // +- // @since 3.16.0 +- General *GeneralClientCapabilities `json:"general,omitempty"` +- // Experimental client capabilities. +- Experimental any `json:"experimental,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeActionKindOptions +-type ClientCodeActionKindOptions struct { +- // The code action kind values the client supports. When this +- // property exists the client also guarantees that it will +- // handle values outside its set gracefully and falls back +- // to a default value when unknown. +- ValueSet []CodeActionKind `json:"valueSet"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeActionLiteralOptions +-type ClientCodeActionLiteralOptions struct { +- // The code action kind is support with the following value +- // set. +- CodeActionKind ClientCodeActionKindOptions `json:"codeActionKind"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeActionResolveOptions +-type ClientCodeActionResolveOptions struct { +- // The properties that a client can resolve lazily. +- Properties []string `json:"properties"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeLensResolveOptions +-type ClientCodeLensResolveOptions struct { +- // The properties that a client can resolve lazily. +- Properties []string `json:"properties"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemInsertTextModeOptions +-type ClientCompletionItemInsertTextModeOptions struct { +- ValueSet []InsertTextMode `json:"valueSet"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemOptions +-type ClientCompletionItemOptions struct { +- // Client supports snippets as insert text. +- // +- // A snippet can define tab stops and placeholders with `$1`, `$2` +- // and `${3:foo}`. `$0` defines the final tab stop, it defaults to +- // the end of the snippet. Placeholders with equal identifiers are linked, +- // that is typing in one will update others too. +- SnippetSupport bool `json:"snippetSupport,omitempty"` +- // Client supports commit characters on a completion item. +- CommitCharactersSupport bool `json:"commitCharactersSupport,omitempty"` +- // Client supports the following content formats for the documentation +- // property. The order describes the preferred format of the client. +- DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"` +- // Client supports the deprecated property on a completion item. +- DeprecatedSupport bool `json:"deprecatedSupport,omitempty"` +- // Client supports the preselect property on a completion item. +- PreselectSupport bool `json:"preselectSupport,omitempty"` +- // Client supports the tag property on a completion item. Clients supporting +- // tags have to handle unknown tags gracefully. Clients especially need to +- // preserve unknown tags when sending a completion item back to the server in +- // a resolve call. +- // +- // @since 3.15.0 +- TagSupport *CompletionItemTagOptions `json:"tagSupport,omitempty"` +- // Client support insert replace edit to control different behavior if a +- // completion item is inserted in the text or should replace text. +- // +- // @since 3.16.0 +- InsertReplaceSupport bool `json:"insertReplaceSupport,omitempty"` +- // Indicates which properties a client can resolve lazily on a completion +- // item. Before version 3.16.0 only the predefined properties `documentation` +- // and `details` could be resolved lazily. +- // +- // @since 3.16.0 +- ResolveSupport *ClientCompletionItemResolveOptions `json:"resolveSupport,omitempty"` +- // The client supports the `insertTextMode` property on +- // a completion item to override the whitespace handling mode +- // as defined by the client (see `insertTextMode`). +- // +- // @since 3.16.0 +- InsertTextModeSupport *ClientCompletionItemInsertTextModeOptions `json:"insertTextModeSupport,omitempty"` +- // The client has support for completion item label +- // details (see also `CompletionItemLabelDetails`). +- // +- // @since 3.17.0 +- LabelDetailsSupport bool `json:"labelDetailsSupport,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemOptionsKind +-type ClientCompletionItemOptionsKind struct { +- // The completion item kind values the client supports. When this +- // property exists the client also guarantees that it will +- // handle values outside its set gracefully and falls back +- // to a default value when unknown. +- // +- // If this property is not present the client only supports +- // the completion items kinds from `Text` to `Reference` as defined in +- // the initial version of the protocol. +- ValueSet []CompletionItemKind `json:"valueSet,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemResolveOptions +-type ClientCompletionItemResolveOptions struct { +- // The properties that a client can resolve lazily. +- Properties []string `json:"properties"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientDiagnosticsTagOptions +-type ClientDiagnosticsTagOptions struct { +- // The tags supported by the client. +- ValueSet []DiagnosticTag `json:"valueSet"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientFoldingRangeKindOptions +-type ClientFoldingRangeKindOptions struct { +- // The folding range kind values the client supports. When this +- // property exists the client also guarantees that it will +- // handle values outside its set gracefully and falls back +- // to a default value when unknown. +- ValueSet []FoldingRangeKind `json:"valueSet,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientFoldingRangeOptions +-type ClientFoldingRangeOptions struct { +- // If set, the client signals that it supports setting collapsedText on +- // folding ranges to display custom labels instead of the default text. +- // +- // @since 3.17.0 +- CollapsedText bool `json:"collapsedText,omitempty"` +-} +- +-// Information about the client +-// +-// @since 3.15.0 +-// @since 3.18.0 ClientInfo type name added. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientInfo +-type ClientInfo struct { +- // The name of the client as defined by the client. +- Name string `json:"name"` +- // The client's version as defined by the client. +- Version string `json:"version,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientInlayHintResolveOptions +-type ClientInlayHintResolveOptions struct { +- // The properties that a client can resolve lazily. +- Properties []string `json:"properties"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSemanticTokensRequestFullDelta +-type ClientSemanticTokensRequestFullDelta struct { +- // The client will send the `textDocument/semanticTokens/full/delta` request if +- // the server provides a corresponding handler. +- Delta bool `json:"delta,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSemanticTokensRequestOptions +-type ClientSemanticTokensRequestOptions struct { +- // The client will send the `textDocument/semanticTokens/range` request if +- // the server provides a corresponding handler. +- Range *Or_ClientSemanticTokensRequestOptions_range `json:"range,omitempty"` +- // The client will send the `textDocument/semanticTokens/full` request if +- // the server provides a corresponding handler. +- Full *Or_ClientSemanticTokensRequestOptions_full `json:"full,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientShowMessageActionItemOptions +-type ClientShowMessageActionItemOptions struct { +- // Whether the client supports additional attributes which +- // are preserved and send back to the server in the +- // request's response. +- AdditionalPropertiesSupport bool `json:"additionalPropertiesSupport,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSignatureInformationOptions +-type ClientSignatureInformationOptions struct { +- // Client supports the following content formats for the documentation +- // property. The order describes the preferred format of the client. +- DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"` +- // Client capabilities specific to parameter information. +- ParameterInformation *ClientSignatureParameterInformationOptions `json:"parameterInformation,omitempty"` +- // The client supports the `activeParameter` property on `SignatureInformation` +- // literal. +- // +- // @since 3.16.0 +- ActiveParameterSupport bool `json:"activeParameterSupport,omitempty"` +- // The client supports the `activeParameter` property on +- // `SignatureHelp`/`SignatureInformation` being set to `null` to +- // indicate that no parameter should be active. +- // +- // @since 3.18.0 +- // @proposed +- NoActiveParameterSupport bool `json:"noActiveParameterSupport,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSignatureParameterInformationOptions +-type ClientSignatureParameterInformationOptions struct { +- // The client supports processing label offsets instead of a +- // simple label string. +- // +- // @since 3.14.0 +- LabelOffsetSupport bool `json:"labelOffsetSupport,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSymbolKindOptions +-type ClientSymbolKindOptions struct { +- // The symbol kind values the client supports. When this +- // property exists the client also guarantees that it will +- // handle values outside its set gracefully and falls back +- // to a default value when unknown. +- // +- // If this property is not present the client only supports +- // the symbol kinds from `File` to `Array` as defined in +- // the initial version of the protocol. +- ValueSet []SymbolKind `json:"valueSet,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSymbolResolveOptions +-type ClientSymbolResolveOptions struct { +- // The properties that a client can resolve lazily. Usually +- // `location.range` +- Properties []string `json:"properties"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSymbolTagOptions +-type ClientSymbolTagOptions struct { +- // The tags supported by the client. +- ValueSet []SymbolTag `json:"valueSet"` +-} +- +-// A code action represents a change that can be performed in code, e.g. to fix a problem or +-// to refactor code. +-// +-// A CodeAction must set either `edit` and/or a `command`. If both are supplied, the `edit` is applied first, then the `command` is executed. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeAction +-type CodeAction struct { +- // A short, human-readable, title for this code action. +- Title string `json:"title"` +- // The kind of the code action. +- // +- // Used to filter code actions. +- Kind CodeActionKind `json:"kind,omitempty"` +- // The diagnostics that this code action resolves. +- Diagnostics []Diagnostic `json:"diagnostics,omitempty"` +- // Marks this as a preferred action. Preferred actions are used by the `auto fix` command and can be targeted +- // by keybindings. +- // +- // A quick fix should be marked preferred if it properly addresses the underlying error. +- // A refactoring should be marked preferred if it is the most reasonable choice of actions to take. +- // +- // @since 3.15.0 +- IsPreferred bool `json:"isPreferred,omitempty"` +- // Marks that the code action cannot currently be applied. +- // +- // Clients should follow the following guidelines regarding disabled code actions: +- // +- // - Disabled code actions are not shown in automatic [lightbulbs](https://code.visualstudio.com/docs/editor/editingevolved#_code-action) +- // code action menus. +- // +- // - Disabled actions are shown as faded out in the code action menu when the user requests a more specific type +- // of code action, such as refactorings. +- // +- // - If the user has a [keybinding](https://code.visualstudio.com/docs/editor/refactoring#_keybindings-for-code-actions) +- // that auto applies a code action and only disabled code actions are returned, the client should show the user an +- // error message with `reason` in the editor. +- // +- // @since 3.16.0 +- Disabled *CodeActionDisabled `json:"disabled,omitempty"` +- // The workspace edit this code action performs. +- Edit *WorkspaceEdit `json:"edit,omitempty"` +- // A command this code action executes. If a code action +- // provides an edit and a command, first the edit is +- // executed and then the command. +- Command *Command `json:"command,omitempty"` +- // A data entry field that is preserved on a code action between +- // a `textDocument/codeAction` and a `codeAction/resolve` request. +- // +- // @since 3.16.0 +- Data *json.RawMessage `json:"data,omitempty"` +- // Tags for this code action. +- // +- // @since 3.18.0 - proposed +- Tags []CodeActionTag `json:"tags,omitempty"` +-} +- +-// The Client Capabilities of a {@link CodeActionRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionClientCapabilities +-type CodeActionClientCapabilities struct { +- // Whether code action supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // The client support code action literals of type `CodeAction` as a valid +- // response of the `textDocument/codeAction` request. If the property is not +- // set the request can only return `Command` literals. +- // +- // @since 3.8.0 +- CodeActionLiteralSupport ClientCodeActionLiteralOptions `json:"codeActionLiteralSupport,omitempty"` +- // Whether code action supports the `isPreferred` property. +- // +- // @since 3.15.0 +- IsPreferredSupport bool `json:"isPreferredSupport,omitempty"` +- // Whether code action supports the `disabled` property. +- // +- // @since 3.16.0 +- DisabledSupport bool `json:"disabledSupport,omitempty"` +- // Whether code action supports the `data` property which is +- // preserved between a `textDocument/codeAction` and a +- // `codeAction/resolve` request. +- // +- // @since 3.16.0 +- DataSupport bool `json:"dataSupport,omitempty"` +- // Whether the client supports resolving additional code action +- // properties via a separate `codeAction/resolve` request. +- // +- // @since 3.16.0 +- ResolveSupport *ClientCodeActionResolveOptions `json:"resolveSupport,omitempty"` +- // Whether the client honors the change annotations in +- // text edits and resource operations returned via the +- // `CodeAction#edit` property by for example presenting +- // the workspace edit in the user interface and asking +- // for confirmation. +- // +- // @since 3.16.0 +- HonorsChangeAnnotations bool `json:"honorsChangeAnnotations,omitempty"` +- // Whether the client supports documentation for a class of +- // code actions. +- // +- // @since 3.18.0 +- // @proposed +- DocumentationSupport bool `json:"documentationSupport,omitempty"` +- // Client supports the tag property on a code action. Clients +- // supporting tags have to handle unknown tags gracefully. +- // +- // @since 3.18.0 - proposed +- TagSupport *CodeActionTagOptions `json:"tagSupport,omitempty"` +-} +- +-// Contains additional diagnostic information about the context in which +-// a {@link CodeActionProvider.provideCodeActions code action} is run. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionContext +-type CodeActionContext struct { +- // An array of diagnostics known on the client side overlapping the range provided to the +- // `textDocument/codeAction` request. They are provided so that the server knows which +- // errors are currently presented to the user for the given range. There is no guarantee +- // that these accurately reflect the error state of the resource. The primary parameter +- // to compute code actions is the provided range. +- Diagnostics []Diagnostic `json:"diagnostics"` +- // Requested kind of actions to return. +- // +- // Actions not of this kind are filtered out by the client before being shown. So servers +- // can omit computing them. +- Only []CodeActionKind `json:"only,omitempty"` +- // The reason why code actions were requested. +- // +- // @since 3.17.0 +- TriggerKind *CodeActionTriggerKind `json:"triggerKind,omitempty"` +-} +- +-// Captures why the code action is currently disabled. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionDisabled +-type CodeActionDisabled struct { +- // Human readable description of why the code action is currently disabled. +- // +- // This is displayed in the code actions UI. +- Reason string `json:"reason"` +-} +- +-// A set of predefined code action kinds +-type CodeActionKind string +- +-// Documentation for a class of code actions. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionKindDocumentation +-type CodeActionKindDocumentation struct { +- // The kind of the code action being documented. +- // +- // If the kind is generic, such as `CodeActionKind.Refactor`, the documentation will be shown whenever any +- // refactorings are returned. If the kind if more specific, such as `CodeActionKind.RefactorExtract`, the +- // documentation will only be shown when extract refactoring code actions are returned. +- Kind CodeActionKind `json:"kind"` +- // Command that is ued to display the documentation to the user. +- // +- // The title of this documentation code action is taken from {@linkcode Command.title} +- Command Command `json:"command"` +-} +- +-// Provider options for a {@link CodeActionRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionOptions +-type CodeActionOptions struct { +- // CodeActionKinds that this server may return. +- // +- // The list of kinds may be generic, such as `CodeActionKind.Refactor`, or the server +- // may list out every specific kind they provide. +- CodeActionKinds []CodeActionKind `json:"codeActionKinds,omitempty"` +- // Static documentation for a class of code actions. +- // +- // Documentation from the provider should be shown in the code actions menu if either: +- // +- // +- // - Code actions of `kind` are requested by the editor. In this case, the editor will show the documentation that +- // most closely matches the requested code action kind. For example, if a provider has documentation for +- // both `Refactor` and `RefactorExtract`, when the user requests code actions for `RefactorExtract`, +- // the editor will use the documentation for `RefactorExtract` instead of the documentation for `Refactor`. +- // +- // +- // - Any code actions of `kind` are returned by the provider. +- // +- // At most one documentation entry should be shown per provider. +- // +- // @since 3.18.0 +- // @proposed +- Documentation []CodeActionKindDocumentation `json:"documentation,omitempty"` +- // The server provides support to resolve additional +- // information for a code action. +- // +- // @since 3.16.0 +- ResolveProvider bool `json:"resolveProvider,omitempty"` +- WorkDoneProgressOptions +-} +- +-// The parameters of a {@link CodeActionRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionParams +-type CodeActionParams struct { +- // The document in which the command was invoked. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The range for which the command was invoked. +- Range Range `json:"range"` +- // Context carrying additional information. +- Context CodeActionContext `json:"context"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// Registration options for a {@link CodeActionRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionRegistrationOptions +-type CodeActionRegistrationOptions struct { +- TextDocumentRegistrationOptions +- CodeActionOptions +-} +- +-// Code action tags are extra annotations that tweak the behavior of a code action. +-// +-// @since 3.18.0 - proposed +-type CodeActionTag uint32 +- +-// @since 3.18.0 - proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionTagOptions +-type CodeActionTagOptions struct { +- // The tags supported by the client. +- ValueSet []CodeActionTag `json:"valueSet"` +-} +- +-// The reason why code actions were requested. +-// +-// @since 3.17.0 +-type CodeActionTriggerKind uint32 +- +-// Structure to capture a description for an error code. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeDescription +-type CodeDescription struct { +- // An URI to open with more information about the diagnostic error. +- Href URI `json:"href"` +-} +- +-// A code lens represents a {@link Command command} that should be shown along with +-// source text, like the number of references, a way to run tests, etc. +-// +-// A code lens is _unresolved_ when no command is associated to it. For performance +-// reasons the creation of a code lens and resolving should be done in two stages. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLens +-type CodeLens struct { +- // The range in which this code lens is valid. Should only span a single line. +- Range Range `json:"range"` +- // The command this code lens represents. +- Command *Command `json:"command,omitempty"` +- // A data entry field that is preserved on a code lens item between +- // a {@link CodeLensRequest} and a {@link CodeLensResolveRequest} +- Data any `json:"data,omitempty"` +-} +- +-// The client capabilities of a {@link CodeLensRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensClientCapabilities +-type CodeLensClientCapabilities struct { +- // Whether code lens supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // Whether the client supports resolving additional code lens +- // properties via a separate `codeLens/resolve` request. +- // +- // @since 3.18.0 +- ResolveSupport *ClientCodeLensResolveOptions `json:"resolveSupport,omitempty"` +-} +- +-// Code Lens provider options of a {@link CodeLensRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensOptions +-type CodeLensOptions struct { +- // Code lens has a resolve provider as well. +- ResolveProvider bool `json:"resolveProvider,omitempty"` +- WorkDoneProgressOptions +-} +- +-// The parameters of a {@link CodeLensRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensParams +-type CodeLensParams struct { +- // The document to request code lens for. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// Registration options for a {@link CodeLensRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensRegistrationOptions +-type CodeLensRegistrationOptions struct { +- TextDocumentRegistrationOptions +- CodeLensOptions +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensWorkspaceClientCapabilities +-type CodeLensWorkspaceClientCapabilities struct { +- // Whether the client implementation supports a refresh request sent from the +- // server to the client. +- // +- // Note that this event is global and will force the client to refresh all +- // code lenses currently shown. It should be used with absolute care and is +- // useful for situation where a server for example detect a project wide +- // change that requires such a calculation. +- RefreshSupport bool `json:"refreshSupport,omitempty"` +-} +- +-// Represents a color in RGBA space. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#color +-type Color struct { +- // The red component of this color in the range [0-1]. +- Red float64 `json:"red"` +- // The green component of this color in the range [0-1]. +- Green float64 `json:"green"` +- // The blue component of this color in the range [0-1]. +- Blue float64 `json:"blue"` +- // The alpha component of this color in the range [0-1]. +- Alpha float64 `json:"alpha"` +-} +- +-// Represents a color range from a document. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#colorInformation +-type ColorInformation struct { +- // The range in the document where this color appears. +- Range Range `json:"range"` +- // The actual color value for this color range. +- Color Color `json:"color"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#colorPresentation +-type ColorPresentation struct { +- // The label of this color presentation. It will be shown on the color +- // picker header. By default this is also the text that is inserted when selecting +- // this color presentation. +- Label string `json:"label"` +- // An {@link TextEdit edit} which is applied to a document when selecting +- // this presentation for the color. When `falsy` the {@link ColorPresentation.label label} +- // is used. +- TextEdit *TextEdit `json:"textEdit,omitempty"` +- // An optional array of additional {@link TextEdit text edits} that are applied when +- // selecting this color presentation. Edits must not overlap with the main {@link ColorPresentation.textEdit edit} nor with themselves. +- AdditionalTextEdits []TextEdit `json:"additionalTextEdits,omitempty"` +-} +- +-// Parameters for a {@link ColorPresentationRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#colorPresentationParams +-type ColorPresentationParams struct { +- // The text document. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The color to request presentations for. +- Color Color `json:"color"` +- // The range where the color would be inserted. Serves as a context. +- Range Range `json:"range"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// Represents a reference to a command. Provides a title which +-// will be used to represent a command in the UI and, optionally, +-// an array of arguments which will be passed to the command handler +-// function when invoked. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#command +-type Command struct { +- // Title of the command, like `save`. +- Title string `json:"title"` +- // An optional tooltip. +- // +- // @since 3.18.0 +- // @proposed +- Tooltip string `json:"tooltip,omitempty"` +- // The identifier of the actual command handler. +- Command string `json:"command"` +- // Arguments that the command handler should be +- // invoked with. +- Arguments []json.RawMessage `json:"arguments,omitempty"` +-} +- +-// Completion client capabilities +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionClientCapabilities +-type CompletionClientCapabilities struct { +- // Whether completion supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // The client supports the following `CompletionItem` specific +- // capabilities. +- CompletionItem ClientCompletionItemOptions `json:"completionItem,omitempty"` +- CompletionItemKind *ClientCompletionItemOptionsKind `json:"completionItemKind,omitempty"` +- // Defines how the client handles whitespace and indentation +- // when accepting a completion item that uses multi line +- // text in either `insertText` or `textEdit`. +- // +- // @since 3.17.0 +- InsertTextMode InsertTextMode `json:"insertTextMode,omitempty"` +- // The client supports to send additional context information for a +- // `textDocument/completion` request. +- ContextSupport bool `json:"contextSupport,omitempty"` +- // The client supports the following `CompletionList` specific +- // capabilities. +- // +- // @since 3.17.0 +- CompletionList *CompletionListCapabilities `json:"completionList,omitempty"` +-} +- +-// Contains additional information about the context in which a completion request is triggered. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionContext +-type CompletionContext struct { +- // How the completion was triggered. +- TriggerKind CompletionTriggerKind `json:"triggerKind"` +- // The trigger character (a single character) that has trigger code complete. +- // Is undefined if `triggerKind !== CompletionTriggerKind.TriggerCharacter` +- TriggerCharacter string `json:"triggerCharacter,omitempty"` +-} +- +-// A completion item represents a text snippet that is +-// proposed to complete text that is being typed. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItem +-type CompletionItem struct { +- // The label of this completion item. +- // +- // The label property is also by default the text that +- // is inserted when selecting this completion. +- // +- // If label details are provided the label itself should +- // be an unqualified name of the completion item. +- Label string `json:"label"` +- // Additional details for the label +- // +- // @since 3.17.0 +- LabelDetails *CompletionItemLabelDetails `json:"labelDetails,omitempty"` +- // The kind of this completion item. Based of the kind +- // an icon is chosen by the editor. +- Kind CompletionItemKind `json:"kind,omitempty"` +- // Tags for this completion item. +- // +- // @since 3.15.0 +- Tags []CompletionItemTag `json:"tags,omitempty"` +- // A human-readable string with additional information +- // about this item, like type or symbol information. +- Detail string `json:"detail,omitempty"` +- // A human-readable string that represents a doc-comment. +- Documentation *Or_CompletionItem_documentation `json:"documentation,omitempty"` +- // Indicates if this item is deprecated. +- // @deprecated Use `tags` instead. +- Deprecated bool `json:"deprecated,omitempty"` +- // Select this item when showing. +- // +- // *Note* that only one completion item can be selected and that the +- // tool / client decides which item that is. The rule is that the *first* +- // item of those that match best is selected. +- Preselect bool `json:"preselect,omitempty"` +- // A string that should be used when comparing this item +- // with other items. When `falsy` the {@link CompletionItem.label label} +- // is used. +- SortText string `json:"sortText,omitempty"` +- // A string that should be used when filtering a set of +- // completion items. When `falsy` the {@link CompletionItem.label label} +- // is used. +- FilterText string `json:"filterText,omitempty"` +- // A string that should be inserted into a document when selecting +- // this completion. When `falsy` the {@link CompletionItem.label label} +- // is used. +- // +- // The `insertText` is subject to interpretation by the client side. +- // Some tools might not take the string literally. For example +- // VS Code when code complete is requested in this example +- // `con` and a completion item with an `insertText` of +- // `console` is provided it will only insert `sole`. Therefore it is +- // recommended to use `textEdit` instead since it avoids additional client +- // side interpretation. +- InsertText string `json:"insertText,omitempty"` +- // The format of the insert text. The format applies to both the +- // `insertText` property and the `newText` property of a provided +- // `textEdit`. If omitted defaults to `InsertTextFormat.PlainText`. +- // +- // Please note that the insertTextFormat doesn't apply to +- // `additionalTextEdits`. +- InsertTextFormat *InsertTextFormat `json:"insertTextFormat,omitempty"` +- // How whitespace and indentation is handled during completion +- // item insertion. If not provided the clients default value depends on +- // the `textDocument.completion.insertTextMode` client capability. +- // +- // @since 3.16.0 +- InsertTextMode *InsertTextMode `json:"insertTextMode,omitempty"` +- // An {@link TextEdit edit} which is applied to a document when selecting +- // this completion. When an edit is provided the value of +- // {@link CompletionItem.insertText insertText} is ignored. +- // +- // Most editors support two different operations when accepting a completion +- // item. One is to insert a completion text and the other is to replace an +- // existing text with a completion text. Since this can usually not be +- // predetermined by a server it can report both ranges. Clients need to +- // signal support for `InsertReplaceEdits` via the +- // `textDocument.completion.insertReplaceSupport` client capability +- // property. +- // +- // *Note 1:* The text edit's range as well as both ranges from an insert +- // replace edit must be a [single line] and they must contain the position +- // at which completion has been requested. +- // *Note 2:* If an `InsertReplaceEdit` is returned the edit's insert range +- // must be a prefix of the edit's replace range, that means it must be +- // contained and starting at the same position. +- // +- // @since 3.16.0 additional type `InsertReplaceEdit` +- TextEdit *Or_CompletionItem_textEdit `json:"textEdit,omitempty"` +- // The edit text used if the completion item is part of a CompletionList and +- // CompletionList defines an item default for the text edit range. +- // +- // Clients will only honor this property if they opt into completion list +- // item defaults using the capability `completionList.itemDefaults`. +- // +- // If not provided and a list's default range is provided the label +- // property is used as a text. +- // +- // @since 3.17.0 +- TextEditText string `json:"textEditText,omitempty"` +- // An optional array of additional {@link TextEdit text edits} that are applied when +- // selecting this completion. Edits must not overlap (including the same insert position) +- // with the main {@link CompletionItem.textEdit edit} nor with themselves. +- // +- // Additional text edits should be used to change text unrelated to the current cursor position +- // (for example adding an import statement at the top of the file if the completion item will +- // insert an unqualified type). +- AdditionalTextEdits []TextEdit `json:"additionalTextEdits,omitempty"` +- // An optional set of characters that when pressed while this completion is active will accept it first and +- // then type that character. *Note* that all commit characters should have `length=1` and that superfluous +- // characters will be ignored. +- CommitCharacters []string `json:"commitCharacters,omitempty"` +- // An optional {@link Command command} that is executed *after* inserting this completion. *Note* that +- // additional modifications to the current document should be described with the +- // {@link CompletionItem.additionalTextEdits additionalTextEdits}-property. +- Command *Command `json:"command,omitempty"` +- // A data entry field that is preserved on a completion item between a +- // {@link CompletionRequest} and a {@link CompletionResolveRequest}. +- Data any `json:"data,omitempty"` +-} +- +-// Specifies how fields from a completion item should be combined with those +-// from `completionList.itemDefaults`. +-// +-// If unspecified, all fields will be treated as ApplyKind.Replace. +-// +-// If a field's value is ApplyKind.Replace, the value from a completion item (if +-// provided and not `null`) will always be used instead of the value from +-// `completionItem.itemDefaults`. +-// +-// If a field's value is ApplyKind.Merge, the values will be merged using the rules +-// defined against each field below. +-// +-// Servers are only allowed to return `applyKind` if the client +-// signals support for this via the `completionList.applyKindSupport` +-// capability. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemApplyKinds +-type CompletionItemApplyKinds struct { +- // Specifies whether commitCharacters on a completion will replace or be +- // merged with those in `completionList.itemDefaults.commitCharacters`. +- // +- // If ApplyKind.Replace, the commit characters from the completion item will +- // always be used unless not provided, in which case those from +- // `completionList.itemDefaults.commitCharacters` will be used. An +- // empty list can be used if a completion item does not have any commit +- // characters and also should not use those from +- // `completionList.itemDefaults.commitCharacters`. +- // +- // If ApplyKind.Merge the commitCharacters for the completion will be the +- // union of all values in both `completionList.itemDefaults.commitCharacters` +- // and the completion's own `commitCharacters`. +- // +- // @since 3.18.0 +- CommitCharacters *ApplyKind `json:"commitCharacters,omitempty"` +- // Specifies whether the `data` field on a completion will replace or +- // be merged with data from `completionList.itemDefaults.data`. +- // +- // If ApplyKind.Replace, the data from the completion item will be used if +- // provided (and not `null`), otherwise +- // `completionList.itemDefaults.data` will be used. An empty object can +- // be used if a completion item does not have any data but also should +- // not use the value from `completionList.itemDefaults.data`. +- // +- // If ApplyKind.Merge, a shallow merge will be performed between +- // `completionList.itemDefaults.data` and the completion's own data +- // using the following rules: +- // +- // +- // - If a completion's `data` field is not provided (or `null`), the +- // entire `data` field from `completionList.itemDefaults.data` will be +- // used as-is. +- // - If a completion's `data` field is provided, each field will +- // overwrite the field of the same name in +- // `completionList.itemDefaults.data` but no merging of nested fields +- // within that value will occur. +- // +- // @since 3.18.0 +- Data *ApplyKind `json:"data,omitempty"` +-} +- +-// In many cases the items of an actual completion result share the same +-// value for properties like `commitCharacters` or the range of a text +-// edit. A completion list can therefore define item defaults which will +-// be used if a completion item itself doesn't specify the value. +-// +-// If a completion list specifies a default value and a completion item +-// also specifies a corresponding value, the rules for combining these are +-// defined by `applyKinds` (if the client supports it), defaulting to +-// ApplyKind.Replace. +-// +-// Servers are only allowed to return default values if the client +-// signals support for this via the `completionList.itemDefaults` +-// capability. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemDefaults +-type CompletionItemDefaults struct { +- // A default commit character set. +- // +- // @since 3.17.0 +- CommitCharacters []string `json:"commitCharacters,omitempty"` +- // A default edit range. +- // +- // @since 3.17.0 +- EditRange *Or_CompletionItemDefaults_editRange `json:"editRange,omitempty"` +- // A default insert text format. +- // +- // @since 3.17.0 +- InsertTextFormat *InsertTextFormat `json:"insertTextFormat,omitempty"` +- // A default insert text mode. +- // +- // @since 3.17.0 +- InsertTextMode *InsertTextMode `json:"insertTextMode,omitempty"` +- // A default data value. +- // +- // @since 3.17.0 +- Data any `json:"data,omitempty"` +-} +- +-// The kind of a completion entry. +-type CompletionItemKind uint32 +- +-// Additional details for a completion item label. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemLabelDetails +-type CompletionItemLabelDetails struct { +- // An optional string which is rendered less prominently directly after {@link CompletionItem.label label}, +- // without any spacing. Should be used for function signatures and type annotations. +- Detail string `json:"detail,omitempty"` +- // An optional string which is rendered less prominently after {@link CompletionItem.detail}. Should be used +- // for fully qualified names and file paths. +- Description string `json:"description,omitempty"` +-} +- +-// Completion item tags are extra annotations that tweak the rendering of a completion +-// item. +-// +-// @since 3.15.0 +-type CompletionItemTag uint32 +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemTagOptions +-type CompletionItemTagOptions struct { +- // The tags supported by the client. +- ValueSet []CompletionItemTag `json:"valueSet"` +-} +- +-// Represents a collection of {@link CompletionItem completion items} to be presented +-// in the editor. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionList +-type CompletionList struct { +- // This list it not complete. Further typing results in recomputing this list. +- // +- // Recomputed lists have all their items replaced (not appended) in the +- // incomplete completion sessions. +- IsIncomplete bool `json:"isIncomplete"` +- // In many cases the items of an actual completion result share the same +- // value for properties like `commitCharacters` or the range of a text +- // edit. A completion list can therefore define item defaults which will +- // be used if a completion item itself doesn't specify the value. +- // +- // If a completion list specifies a default value and a completion item +- // also specifies a corresponding value, the rules for combining these are +- // defined by `applyKinds` (if the client supports it), defaulting to +- // ApplyKind.Replace. +- // +- // Servers are only allowed to return default values if the client +- // signals support for this via the `completionList.itemDefaults` +- // capability. +- // +- // @since 3.17.0 +- ItemDefaults *CompletionItemDefaults `json:"itemDefaults,omitempty"` +- // Specifies how fields from a completion item should be combined with those +- // from `completionList.itemDefaults`. +- // +- // If unspecified, all fields will be treated as ApplyKind.Replace. +- // +- // If a field's value is ApplyKind.Replace, the value from a completion item +- // (if provided and not `null`) will always be used instead of the value +- // from `completionItem.itemDefaults`. +- // +- // If a field's value is ApplyKind.Merge, the values will be merged using +- // the rules defined against each field below. +- // +- // Servers are only allowed to return `applyKind` if the client +- // signals support for this via the `completionList.applyKindSupport` +- // capability. +- // +- // @since 3.18.0 +- ApplyKind *CompletionItemApplyKinds `json:"applyKind,omitempty"` +- // The completion items. +- Items []CompletionItem `json:"items"` +-} +- +-// The client supports the following `CompletionList` specific +-// capabilities. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionListCapabilities +-type CompletionListCapabilities struct { +- // The client supports the following itemDefaults on +- // a completion list. +- // +- // The value lists the supported property names of the +- // `CompletionList.itemDefaults` object. If omitted +- // no properties are supported. +- // +- // @since 3.17.0 +- ItemDefaults []string `json:"itemDefaults,omitempty"` +- // Specifies whether the client supports `CompletionList.applyKind` to +- // indicate how supported values from `completionList.itemDefaults` +- // and `completion` will be combined. +- // +- // If a client supports `applyKind` it must support it for all fields +- // that it supports that are listed in `CompletionList.applyKind`. This +- // means when clients add support for new/future fields in completion +- // items the MUST also support merge for them if those fields are +- // defined in `CompletionList.applyKind`. +- // +- // @since 3.18.0 +- ApplyKindSupport bool `json:"applyKindSupport,omitempty"` +-} +- +-// Completion options. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionOptions +-type CompletionOptions struct { +- // Most tools trigger completion request automatically without explicitly requesting +- // it using a keyboard shortcut (e.g. Ctrl+Space). Typically they do so when the user +- // starts to type an identifier. For example if the user types `c` in a JavaScript file +- // code complete will automatically pop up present `console` besides others as a +- // completion item. Characters that make up identifiers don't need to be listed here. +- // +- // If code complete should automatically be trigger on characters not being valid inside +- // an identifier (for example `.` in JavaScript) list them in `triggerCharacters`. +- TriggerCharacters []string `json:"triggerCharacters,omitempty"` +- // The list of all possible characters that commit a completion. This field can be used +- // if clients don't support individual commit characters per completion item. See +- // `ClientCapabilities.textDocument.completion.completionItem.commitCharactersSupport` +- // +- // If a server provides both `allCommitCharacters` and commit characters on an individual +- // completion item the ones on the completion item win. +- // +- // @since 3.2.0 +- AllCommitCharacters []string `json:"allCommitCharacters,omitempty"` +- // The server provides support to resolve additional +- // information for a completion item. +- ResolveProvider bool `json:"resolveProvider,omitempty"` +- // The server supports the following `CompletionItem` specific +- // capabilities. +- // +- // @since 3.17.0 +- CompletionItem *ServerCompletionItemOptions `json:"completionItem,omitempty"` +- WorkDoneProgressOptions +-} +- +-// Completion parameters +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionParams +-type CompletionParams struct { +- // The completion context. This is only available it the client specifies +- // to send this using the client capability `textDocument.completion.contextSupport === true` +- Context CompletionContext `json:"context,omitempty"` +- TextDocumentPositionParams +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// Registration options for a {@link CompletionRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionRegistrationOptions +-type CompletionRegistrationOptions struct { +- TextDocumentRegistrationOptions +- CompletionOptions +-} +- +-// How a completion was triggered +-type CompletionTriggerKind uint32 +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#configurationItem +-type ConfigurationItem struct { +- // The scope to get the configuration section for. +- ScopeURI *URI `json:"scopeUri,omitempty"` +- // The configuration section asked for. +- Section string `json:"section,omitempty"` +-} +- +-// The parameters of a configuration request. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#configurationParams +-type ConfigurationParams struct { +- Items []ConfigurationItem `json:"items"` +-} +- +-// Create file operation. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#createFile +-type CreateFile struct { +- // A create +- Kind string `json:"kind"` +- // The resource to create. +- URI DocumentURI `json:"uri"` +- // Additional options +- Options *CreateFileOptions `json:"options,omitempty"` +- ResourceOperation +-} +- +-// Options to create a file. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#createFileOptions +-type CreateFileOptions struct { +- // Overwrite existing file. Overwrite wins over `ignoreIfExists` +- Overwrite bool `json:"overwrite,omitempty"` +- // Ignore if exists. +- IgnoreIfExists bool `json:"ignoreIfExists,omitempty"` +-} +- +-// The parameters sent in notifications/requests for user-initiated creation of +-// files. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#createFilesParams +-type CreateFilesParams struct { +- // An array of all files/folders created in this operation. +- Files []FileCreate `json:"files"` +-} +- +-// The declaration of a symbol representation as one or many {@link Location locations}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declaration +-type Declaration = []Location // (alias) +-// @since 3.14.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationClientCapabilities +-type DeclarationClientCapabilities struct { +- // Whether declaration supports dynamic registration. If this is set to `true` +- // the client supports the new `DeclarationRegistrationOptions` return value +- // for the corresponding server capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // The client supports additional metadata in the form of declaration links. +- LinkSupport bool `json:"linkSupport,omitempty"` +-} +- +-// Information about where a symbol is declared. +-// +-// Provides additional metadata over normal {@link Location location} declarations, including the range of +-// the declaring symbol. +-// +-// Servers should prefer returning `DeclarationLink` over `Declaration` if supported +-// by the client. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationLink +-type DeclarationLink = LocationLink // (alias) +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationOptions +-type DeclarationOptions struct { +- WorkDoneProgressOptions +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationParams +-type DeclarationParams struct { +- TextDocumentPositionParams +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationRegistrationOptions +-type DeclarationRegistrationOptions struct { +- DeclarationOptions +- TextDocumentRegistrationOptions +- StaticRegistrationOptions +-} +- +-// The definition of a symbol represented as one or many {@link Location locations}. +-// For most programming languages there is only one location at which a symbol is +-// defined. +-// +-// Servers should prefer returning `DefinitionLink` over `Definition` if supported +-// by the client. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definition +-type Definition = Or_Definition // (alias) +-// Client Capabilities for a {@link DefinitionRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionClientCapabilities +-type DefinitionClientCapabilities struct { +- // Whether definition supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // The client supports additional metadata in the form of definition links. +- // +- // @since 3.14.0 +- LinkSupport bool `json:"linkSupport,omitempty"` +-} +- +-// Information about where a symbol is defined. +-// +-// Provides additional metadata over normal {@link Location location} definitions, including the range of +-// the defining symbol +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionLink +-type DefinitionLink = LocationLink // (alias) +-// Server Capabilities for a {@link DefinitionRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionOptions +-type DefinitionOptions struct { +- WorkDoneProgressOptions +-} +- +-// Parameters for a {@link DefinitionRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionParams +-type DefinitionParams struct { +- TextDocumentPositionParams +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// Registration options for a {@link DefinitionRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionRegistrationOptions +-type DefinitionRegistrationOptions struct { +- TextDocumentRegistrationOptions +- DefinitionOptions +-} +- +-// Delete file operation +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#deleteFile +-type DeleteFile struct { +- // A delete +- Kind string `json:"kind"` +- // The file to delete. +- URI DocumentURI `json:"uri"` +- // Delete options. +- Options *DeleteFileOptions `json:"options,omitempty"` +- ResourceOperation +-} +- +-// Delete file options +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#deleteFileOptions +-type DeleteFileOptions struct { +- // Delete the content recursively if a folder is denoted. +- Recursive bool `json:"recursive,omitempty"` +- // Ignore the operation if the file doesn't exist. +- IgnoreIfNotExists bool `json:"ignoreIfNotExists,omitempty"` +-} +- +-// The parameters sent in notifications/requests for user-initiated deletes of +-// files. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#deleteFilesParams +-type DeleteFilesParams struct { +- // An array of all files/folders deleted in this operation. +- Files []FileDelete `json:"files"` +-} +- +-// Represents a diagnostic, such as a compiler error or warning. Diagnostic objects +-// are only valid in the scope of a resource. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnostic +-type Diagnostic struct { +- // The range at which the message applies +- Range Range `json:"range"` +- // The diagnostic's severity. To avoid interpretation mismatches when a +- // server is used with different clients it is highly recommended that servers +- // always provide a severity value. +- Severity DiagnosticSeverity `json:"severity,omitempty"` +- // The diagnostic's code, which usually appear in the user interface. +- Code any `json:"code,omitempty"` +- // An optional property to describe the error code. +- // Requires the code field (above) to be present/not null. +- // +- // @since 3.16.0 +- CodeDescription *CodeDescription `json:"codeDescription,omitempty"` +- // A human-readable string describing the source of this +- // diagnostic, e.g. 'typescript' or 'super lint'. It usually +- // appears in the user interface. +- Source string `json:"source,omitempty"` +- // The diagnostic's message. It usually appears in the user interface +- Message string `json:"message"` +- // Additional metadata about the diagnostic. +- // +- // @since 3.15.0 +- Tags []DiagnosticTag `json:"tags,omitempty"` +- // An array of related diagnostic information, e.g. when symbol-names within +- // a scope collide all definitions can be marked via this property. +- RelatedInformation []DiagnosticRelatedInformation `json:"relatedInformation,omitempty"` +- // A data entry field that is preserved between a `textDocument/publishDiagnostics` +- // notification and `textDocument/codeAction` request. +- // +- // @since 3.16.0 +- Data *json.RawMessage `json:"data,omitempty"` +-} +- +-// Client capabilities specific to diagnostic pull requests. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticClientCapabilities +-type DiagnosticClientCapabilities struct { +- // Whether implementation supports dynamic registration. If this is set to `true` +- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` +- // return value for the corresponding server capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // Whether the clients supports related documents for document diagnostic pulls. +- RelatedDocumentSupport bool `json:"relatedDocumentSupport,omitempty"` +- DiagnosticsCapabilities +-} +- +-// Diagnostic options. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticOptions +-type DiagnosticOptions struct { +- // An optional identifier under which the diagnostics are +- // managed by the client. +- Identifier string `json:"identifier,omitempty"` +- // Whether the language has inter file dependencies meaning that +- // editing code in one file can result in a different diagnostic +- // set in another file. Inter file dependencies are common for +- // most programming languages and typically uncommon for linters. +- InterFileDependencies bool `json:"interFileDependencies"` +- // The server provides support for workspace diagnostics as well. +- WorkspaceDiagnostics bool `json:"workspaceDiagnostics"` +- WorkDoneProgressOptions +-} +- +-// Diagnostic registration options. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticRegistrationOptions +-type DiagnosticRegistrationOptions struct { +- TextDocumentRegistrationOptions +- DiagnosticOptions +- StaticRegistrationOptions +-} +- +-// Represents a related message and source code location for a diagnostic. This should be +-// used to point to code locations that cause or related to a diagnostics, e.g when duplicating +-// a symbol in a scope. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticRelatedInformation +-type DiagnosticRelatedInformation struct { +- // The location of this related diagnostic information. +- Location Location `json:"location"` +- // The message of this related diagnostic information. +- Message string `json:"message"` +-} +- +-// Cancellation data returned from a diagnostic request. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticServerCancellationData +-type DiagnosticServerCancellationData struct { +- RetriggerRequest bool `json:"retriggerRequest"` +-} +- +-// The diagnostic's severity. +-type DiagnosticSeverity uint32 +- +-// The diagnostic tags. +-// +-// @since 3.15.0 +-type DiagnosticTag uint32 +- +-// Workspace client capabilities specific to diagnostic pull requests. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticWorkspaceClientCapabilities +-type DiagnosticWorkspaceClientCapabilities struct { +- // Whether the client implementation supports a refresh request sent from +- // the server to the client. +- // +- // Note that this event is global and will force the client to refresh all +- // pulled diagnostics currently shown. It should be used with absolute care and +- // is useful for situation where a server for example detects a project wide +- // change that requires such a calculation. +- RefreshSupport bool `json:"refreshSupport,omitempty"` +-} +- +-// General diagnostics capabilities for pull and push model. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticsCapabilities +-type DiagnosticsCapabilities struct { +- // Whether the clients accepts diagnostics with related information. +- RelatedInformation bool `json:"relatedInformation,omitempty"` +- // Client supports the tag property to provide meta data about a diagnostic. +- // Clients supporting tags have to handle unknown tags gracefully. +- // +- // @since 3.15.0 +- TagSupport *ClientDiagnosticsTagOptions `json:"tagSupport,omitempty"` +- // Client supports a codeDescription property +- // +- // @since 3.16.0 +- CodeDescriptionSupport bool `json:"codeDescriptionSupport,omitempty"` +- // Whether code action supports the `data` property which is +- // preserved between a `textDocument/publishDiagnostics` and +- // `textDocument/codeAction` request. +- // +- // @since 3.16.0 +- DataSupport bool `json:"dataSupport,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationClientCapabilities +-type DidChangeConfigurationClientCapabilities struct { +- // Did change configuration notification supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// The parameters of a change configuration notification. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationParams +-type DidChangeConfigurationParams struct { +- // The actual changed settings +- Settings any `json:"settings"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationRegistrationOptions +-type DidChangeConfigurationRegistrationOptions struct { +- Section *OrPSection_workspace_didChangeConfiguration `json:"section,omitempty"` +-} +- +-// The params sent in a change notebook document notification. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeNotebookDocumentParams +-type DidChangeNotebookDocumentParams struct { +- // The notebook document that did change. The version number points +- // to the version after all provided changes have been applied. If +- // only the text document content of a cell changes the notebook version +- // doesn't necessarily have to change. +- NotebookDocument VersionedNotebookDocumentIdentifier `json:"notebookDocument"` +- // The actual changes to the notebook document. +- // +- // The changes describe single state changes to the notebook document. +- // So if there are two changes c1 (at array index 0) and c2 (at array +- // index 1) for a notebook in state S then c1 moves the notebook from +- // S to S' and c2 from S' to S''. So c1 is computed on the state S and +- // c2 is computed on the state S'. +- // +- // To mirror the content of a notebook using change events use the following approach: +- // +- // - start with the same initial content +- // - apply the 'notebookDocument/didChange' notifications in the order you receive them. +- // - apply the `NotebookChangeEvent`s in a single notification in the order +- // you receive them. +- Change NotebookDocumentChangeEvent `json:"change"` +-} +- +-// The change text document notification's parameters. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeTextDocumentParams +-type DidChangeTextDocumentParams struct { +- // The document that did change. The version number points +- // to the version after all provided content changes have +- // been applied. +- TextDocument VersionedTextDocumentIdentifier `json:"textDocument"` +- // The actual content changes. The content changes describe single state changes +- // to the document. So if there are two content changes c1 (at array index 0) and +- // c2 (at array index 1) for a document in state S then c1 moves the document from +- // S to S' and c2 from S' to S''. So c1 is computed on the state S and c2 is computed +- // on the state S'. +- // +- // To mirror the content of a document using change events use the following approach: +- // +- // - start with the same initial content +- // - apply the 'textDocument/didChange' notifications in the order you receive them. +- // - apply the `TextDocumentContentChangeEvent`s in a single notification in the order +- // you receive them. +- ContentChanges []TextDocumentContentChangeEvent `json:"contentChanges"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWatchedFilesClientCapabilities +-type DidChangeWatchedFilesClientCapabilities struct { +- // Did change watched files notification supports dynamic registration. Please note +- // that the current protocol doesn't support static configuration for file changes +- // from the server side. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // Whether the client has support for {@link RelativePattern relative pattern} +- // or not. +- // +- // @since 3.17.0 +- RelativePatternSupport bool `json:"relativePatternSupport,omitempty"` +-} +- +-// The watched files change notification's parameters. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWatchedFilesParams +-type DidChangeWatchedFilesParams struct { +- // The actual file events. +- Changes []FileEvent `json:"changes"` +-} +- +-// Describe options to be used when registered for text document change events. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWatchedFilesRegistrationOptions +-type DidChangeWatchedFilesRegistrationOptions struct { +- // The watchers to register. +- Watchers []FileSystemWatcher `json:"watchers"` +-} +- +-// The parameters of a `workspace/didChangeWorkspaceFolders` notification. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWorkspaceFoldersParams +-type DidChangeWorkspaceFoldersParams struct { +- // The actual workspace folder change event. +- Event WorkspaceFoldersChangeEvent `json:"event"` +-} +- +-// The params sent in a close notebook document notification. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didCloseNotebookDocumentParams +-type DidCloseNotebookDocumentParams struct { +- // The notebook document that got closed. +- NotebookDocument NotebookDocumentIdentifier `json:"notebookDocument"` +- // The text documents that represent the content +- // of a notebook cell that got closed. +- CellTextDocuments []TextDocumentIdentifier `json:"cellTextDocuments"` +-} +- +-// The parameters sent in a close text document notification +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didCloseTextDocumentParams +-type DidCloseTextDocumentParams struct { +- // The document that was closed. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +-} +- +-// The params sent in an open notebook document notification. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didOpenNotebookDocumentParams +-type DidOpenNotebookDocumentParams struct { +- // The notebook document that got opened. +- NotebookDocument NotebookDocument `json:"notebookDocument"` +- // The text documents that represent the content +- // of a notebook cell. +- CellTextDocuments []TextDocumentItem `json:"cellTextDocuments"` +-} +- +-// The parameters sent in an open text document notification +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didOpenTextDocumentParams +-type DidOpenTextDocumentParams struct { +- // The document that was opened. +- TextDocument TextDocumentItem `json:"textDocument"` +-} +- +-// The params sent in a save notebook document notification. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didSaveNotebookDocumentParams +-type DidSaveNotebookDocumentParams struct { +- // The notebook document that got saved. +- NotebookDocument NotebookDocumentIdentifier `json:"notebookDocument"` +-} +- +-// The parameters sent in a save text document notification +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didSaveTextDocumentParams +-type DidSaveTextDocumentParams struct { +- // The document that was saved. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // Optional the content when saved. Depends on the includeText value +- // when the save notification was requested. +- Text *string `json:"text,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorClientCapabilities +-type DocumentColorClientCapabilities struct { +- // Whether implementation supports dynamic registration. If this is set to `true` +- // the client supports the new `DocumentColorRegistrationOptions` return value +- // for the corresponding server capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorOptions +-type DocumentColorOptions struct { +- WorkDoneProgressOptions +-} +- +-// Parameters for a {@link DocumentColorRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorParams +-type DocumentColorParams struct { +- // The text document. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorRegistrationOptions +-type DocumentColorRegistrationOptions struct { +- TextDocumentRegistrationOptions +- DocumentColorOptions +- StaticRegistrationOptions +-} +- +-// Parameters of the document diagnostic request. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticParams +-type DocumentDiagnosticParams struct { +- // The text document. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The additional identifier provided during registration. +- Identifier string `json:"identifier,omitempty"` +- // The result id of a previous response if provided. +- PreviousResultID string `json:"previousResultId,omitempty"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// The result of a document diagnostic pull request. A report can +-// either be a full report containing all diagnostics for the +-// requested document or an unchanged report indicating that nothing +-// has changed in terms of diagnostics in comparison to the last +-// pull request. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticReport +-type DocumentDiagnosticReport = Or_DocumentDiagnosticReport // (alias) +-// The document diagnostic report kinds. +-// +-// @since 3.17.0 +-type DocumentDiagnosticReportKind string +- +-// A partial result for a document diagnostic report. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticReportPartialResult +-type DocumentDiagnosticReportPartialResult struct { +- RelatedDocuments map[DocumentURI]any `json:"relatedDocuments"` +-} +- +-// A document filter describes a top level text document or +-// a notebook cell document. +-// +-// @since 3.17.0 - support for NotebookCellTextDocumentFilter. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFilter +-type DocumentFilter = Or_DocumentFilter // (alias) +-// Client capabilities of a {@link DocumentFormattingRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingClientCapabilities +-type DocumentFormattingClientCapabilities struct { +- // Whether formatting supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// Provider options for a {@link DocumentFormattingRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingOptions +-type DocumentFormattingOptions struct { +- WorkDoneProgressOptions +-} +- +-// The parameters of a {@link DocumentFormattingRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingParams +-type DocumentFormattingParams struct { +- // The document to format. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The format options. +- Options FormattingOptions `json:"options"` +- WorkDoneProgressParams +-} +- +-// Registration options for a {@link DocumentFormattingRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingRegistrationOptions +-type DocumentFormattingRegistrationOptions struct { +- TextDocumentRegistrationOptions +- DocumentFormattingOptions +-} +- +-// A document highlight is a range inside a text document which deserves +-// special attention. Usually a document highlight is visualized by changing +-// the background color of its range. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlight +-type DocumentHighlight struct { +- // The range this highlight applies to. +- Range Range `json:"range"` +- // The highlight kind, default is {@link DocumentHighlightKind.Text text}. +- Kind DocumentHighlightKind `json:"kind,omitempty"` +-} +- +-// Client Capabilities for a {@link DocumentHighlightRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightClientCapabilities +-type DocumentHighlightClientCapabilities struct { +- // Whether document highlight supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// A document highlight kind. +-type DocumentHighlightKind uint32 +- +-// Provider options for a {@link DocumentHighlightRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightOptions +-type DocumentHighlightOptions struct { +- WorkDoneProgressOptions +-} +- +-// Parameters for a {@link DocumentHighlightRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightParams +-type DocumentHighlightParams struct { +- TextDocumentPositionParams +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// Registration options for a {@link DocumentHighlightRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightRegistrationOptions +-type DocumentHighlightRegistrationOptions struct { +- TextDocumentRegistrationOptions +- DocumentHighlightOptions +-} +- +-// A document link is a range in a text document that links to an internal or external resource, like another +-// text document or a web site. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLink +-type DocumentLink struct { +- // The range this link applies to. +- Range Range `json:"range"` +- // The uri this link points to. If missing a resolve request is sent later. +- Target *URI `json:"target,omitempty"` +- // The tooltip text when you hover over this link. +- // +- // If a tooltip is provided, is will be displayed in a string that includes instructions on how to +- // trigger the link, such as `{0} (ctrl + click)`. The specific instructions vary depending on OS, +- // user settings, and localization. +- // +- // @since 3.15.0 +- Tooltip string `json:"tooltip,omitempty"` +- // A data entry field that is preserved on a document link between a +- // DocumentLinkRequest and a DocumentLinkResolveRequest. +- Data any `json:"data,omitempty"` +-} +- +-// The client capabilities of a {@link DocumentLinkRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkClientCapabilities +-type DocumentLinkClientCapabilities struct { +- // Whether document link supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // Whether the client supports the `tooltip` property on `DocumentLink`. +- // +- // @since 3.15.0 +- TooltipSupport bool `json:"tooltipSupport,omitempty"` +-} +- +-// Provider options for a {@link DocumentLinkRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkOptions +-type DocumentLinkOptions struct { +- // Document links have a resolve provider as well. +- ResolveProvider bool `json:"resolveProvider,omitempty"` +- WorkDoneProgressOptions +-} +- +-// The parameters of a {@link DocumentLinkRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkParams +-type DocumentLinkParams struct { +- // The document to provide document links for. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// Registration options for a {@link DocumentLinkRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkRegistrationOptions +-type DocumentLinkRegistrationOptions struct { +- TextDocumentRegistrationOptions +- DocumentLinkOptions +-} +- +-// Client capabilities of a {@link DocumentOnTypeFormattingRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingClientCapabilities +-type DocumentOnTypeFormattingClientCapabilities struct { +- // Whether on type formatting supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// Provider options for a {@link DocumentOnTypeFormattingRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingOptions +-type DocumentOnTypeFormattingOptions struct { +- // A character on which formatting should be triggered, like `{`. +- FirstTriggerCharacter string `json:"firstTriggerCharacter"` +- // More trigger characters. +- MoreTriggerCharacter []string `json:"moreTriggerCharacter,omitempty"` +-} +- +-// The parameters of a {@link DocumentOnTypeFormattingRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingParams +-type DocumentOnTypeFormattingParams struct { +- // The document to format. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The position around which the on type formatting should happen. +- // This is not necessarily the exact position where the character denoted +- // by the property `ch` got typed. +- Position Position `json:"position"` +- // The character that has been typed that triggered the formatting +- // on type request. That is not necessarily the last character that +- // got inserted into the document since the client could auto insert +- // characters as well (e.g. like automatic brace completion). +- Ch string `json:"ch"` +- // The formatting options. +- Options FormattingOptions `json:"options"` +-} +- +-// Registration options for a {@link DocumentOnTypeFormattingRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingRegistrationOptions +-type DocumentOnTypeFormattingRegistrationOptions struct { +- TextDocumentRegistrationOptions +- DocumentOnTypeFormattingOptions +-} +- +-// Client capabilities of a {@link DocumentRangeFormattingRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingClientCapabilities +-type DocumentRangeFormattingClientCapabilities struct { +- // Whether range formatting supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // Whether the client supports formatting multiple ranges at once. +- // +- // @since 3.18.0 +- // @proposed +- RangesSupport bool `json:"rangesSupport,omitempty"` +-} +- +-// Provider options for a {@link DocumentRangeFormattingRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingOptions +-type DocumentRangeFormattingOptions struct { +- // Whether the server supports formatting multiple ranges at once. +- // +- // @since 3.18.0 +- // @proposed +- RangesSupport bool `json:"rangesSupport,omitempty"` +- WorkDoneProgressOptions +-} +- +-// The parameters of a {@link DocumentRangeFormattingRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingParams +-type DocumentRangeFormattingParams struct { +- // The document to format. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The range to format +- Range Range `json:"range"` +- // The format options +- Options FormattingOptions `json:"options"` +- WorkDoneProgressParams +-} +- +-// Registration options for a {@link DocumentRangeFormattingRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingRegistrationOptions +-type DocumentRangeFormattingRegistrationOptions struct { +- TextDocumentRegistrationOptions +- DocumentRangeFormattingOptions +-} +- +-// The parameters of a {@link DocumentRangesFormattingRequest}. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangesFormattingParams +-type DocumentRangesFormattingParams struct { +- // The document to format. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The ranges to format +- Ranges []Range `json:"ranges"` +- // The format options +- Options FormattingOptions `json:"options"` +- WorkDoneProgressParams +-} +- +-// A document selector is the combination of one or many document filters. +-// +-// @sample `let sel:DocumentSelector = [{ language: 'typescript' }, { language: 'json', pattern: '**∕tsconfig.json' }]`; +-// +-// The use of a string as a document filter is deprecated @since 3.16.0. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSelector +-type DocumentSelector = []DocumentFilter // (alias) +-// Represents programming constructs like variables, classes, interfaces etc. +-// that appear in a document. Document symbols can be hierarchical and they +-// have two ranges: one that encloses its definition and one that points to +-// its most interesting range, e.g. the range of an identifier. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbol +-type DocumentSymbol struct { +- // The name of this symbol. Will be displayed in the user interface and therefore must not be +- // an empty string or a string only consisting of white spaces. +- Name string `json:"name"` +- // More detail for this symbol, e.g the signature of a function. +- Detail string `json:"detail,omitempty"` +- // The kind of this symbol. +- Kind SymbolKind `json:"kind"` +- // Tags for this document symbol. +- // +- // @since 3.16.0 +- Tags []SymbolTag `json:"tags,omitempty"` +- // Indicates if this symbol is deprecated. +- // +- // @deprecated Use tags instead +- Deprecated bool `json:"deprecated,omitempty"` +- // The range enclosing this symbol not including leading/trailing whitespace but everything else +- // like comments. This information is typically used to determine if the clients cursor is +- // inside the symbol to reveal in the symbol in the UI. +- Range Range `json:"range"` +- // The range that should be selected and revealed when this symbol is being picked, e.g the name of a function. +- // Must be contained by the `range`. +- SelectionRange Range `json:"selectionRange"` +- // Children of this symbol, e.g. properties of a class. +- Children []DocumentSymbol `json:"children,omitempty"` +-} +- +-// Client Capabilities for a {@link DocumentSymbolRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolClientCapabilities +-type DocumentSymbolClientCapabilities struct { +- // Whether document symbol supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // Specific capabilities for the `SymbolKind` in the +- // `textDocument/documentSymbol` request. +- SymbolKind *ClientSymbolKindOptions `json:"symbolKind,omitempty"` +- // The client supports hierarchical document symbols. +- HierarchicalDocumentSymbolSupport bool `json:"hierarchicalDocumentSymbolSupport,omitempty"` +- // The client supports tags on `SymbolInformation`. Tags are supported on +- // `DocumentSymbol` if `hierarchicalDocumentSymbolSupport` is set to true. +- // Clients supporting tags have to handle unknown tags gracefully. +- // +- // @since 3.16.0 +- TagSupport *ClientSymbolTagOptions `json:"tagSupport,omitempty"` +- // The client supports an additional label presented in the UI when +- // registering a document symbol provider. +- // +- // @since 3.16.0 +- LabelSupport bool `json:"labelSupport,omitempty"` +-} +- +-// Provider options for a {@link DocumentSymbolRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolOptions +-type DocumentSymbolOptions struct { +- // A human-readable string that is shown when multiple outlines trees +- // are shown for the same document. +- // +- // @since 3.16.0 +- Label string `json:"label,omitempty"` +- WorkDoneProgressOptions +-} +- +-// Parameters for a {@link DocumentSymbolRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolParams +-type DocumentSymbolParams struct { +- // The text document. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// Registration options for a {@link DocumentSymbolRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolRegistrationOptions +-type DocumentSymbolRegistrationOptions struct { +- TextDocumentRegistrationOptions +- DocumentSymbolOptions +-} +- +-// Edit range variant that includes ranges for insert and replace operations. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#editRangeWithInsertReplace +-type EditRangeWithInsertReplace struct { +- Insert Range `json:"insert"` +- Replace Range `json:"replace"` +-} +- +-// Predefined error codes. +-type ErrorCodes int32 +- +-// The client capabilities of a {@link ExecuteCommandRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandClientCapabilities +-type ExecuteCommandClientCapabilities struct { +- // Execute command supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// The server capabilities of a {@link ExecuteCommandRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandOptions +-type ExecuteCommandOptions struct { +- // The commands to be executed on the server +- Commands []string `json:"commands"` +- WorkDoneProgressOptions +-} +- +-// The parameters of a {@link ExecuteCommandRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandParams +-type ExecuteCommandParams struct { +- // The identifier of the actual command handler. +- Command string `json:"command"` +- // Arguments that the command should be invoked with. +- Arguments []json.RawMessage `json:"arguments,omitempty"` +- WorkDoneProgressParams +-} +- +-// Registration options for a {@link ExecuteCommandRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandRegistrationOptions +-type ExecuteCommandRegistrationOptions struct { +- ExecuteCommandOptions +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executionSummary +-type ExecutionSummary struct { +- // A strict monotonically increasing value +- // indicating the execution order of a cell +- // inside a notebook. +- ExecutionOrder uint32 `json:"executionOrder"` +- // Whether the execution was successful or +- // not if known by the client. +- Success bool `json:"success,omitempty"` +-} +-type FailureHandlingKind string +- +-// The file event type +-type FileChangeType uint32 +- +-// Represents information on a file/folder create. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileCreate +-type FileCreate struct { +- // A file:// URI for the location of the file/folder being created. +- URI string `json:"uri"` +-} +- +-// Represents information on a file/folder delete. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileDelete +-type FileDelete struct { +- // A file:// URI for the location of the file/folder being deleted. +- URI string `json:"uri"` +-} +- +-// An event describing a file change. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileEvent +-type FileEvent struct { +- // The file's uri. +- URI DocumentURI `json:"uri"` +- // The change type. +- Type FileChangeType `json:"type"` +-} +- +-// Capabilities relating to events from file operations by the user in the client. +-// +-// These events do not come from the file system, they come from user operations +-// like renaming a file in the UI. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationClientCapabilities +-type FileOperationClientCapabilities struct { +- // Whether the client supports dynamic registration for file requests/notifications. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // The client has support for sending didCreateFiles notifications. +- DidCreate bool `json:"didCreate,omitempty"` +- // The client has support for sending willCreateFiles requests. +- WillCreate bool `json:"willCreate,omitempty"` +- // The client has support for sending didRenameFiles notifications. +- DidRename bool `json:"didRename,omitempty"` +- // The client has support for sending willRenameFiles requests. +- WillRename bool `json:"willRename,omitempty"` +- // The client has support for sending didDeleteFiles notifications. +- DidDelete bool `json:"didDelete,omitempty"` +- // The client has support for sending willDeleteFiles requests. +- WillDelete bool `json:"willDelete,omitempty"` +-} +- +-// A filter to describe in which file operation requests or notifications +-// the server is interested in receiving. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationFilter +-type FileOperationFilter struct { +- // A Uri scheme like `file` or `untitled`. +- Scheme string `json:"scheme,omitempty"` +- // The actual file operation pattern. +- Pattern FileOperationPattern `json:"pattern"` +-} +- +-// Options for notifications/requests for user operations on files. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationOptions +-type FileOperationOptions struct { +- // The server is interested in receiving didCreateFiles notifications. +- DidCreate *FileOperationRegistrationOptions `json:"didCreate,omitempty"` +- // The server is interested in receiving willCreateFiles requests. +- WillCreate *FileOperationRegistrationOptions `json:"willCreate,omitempty"` +- // The server is interested in receiving didRenameFiles notifications. +- DidRename *FileOperationRegistrationOptions `json:"didRename,omitempty"` +- // The server is interested in receiving willRenameFiles requests. +- WillRename *FileOperationRegistrationOptions `json:"willRename,omitempty"` +- // The server is interested in receiving didDeleteFiles file notifications. +- DidDelete *FileOperationRegistrationOptions `json:"didDelete,omitempty"` +- // The server is interested in receiving willDeleteFiles file requests. +- WillDelete *FileOperationRegistrationOptions `json:"willDelete,omitempty"` +-} +- +-// A pattern to describe in which file operation requests or notifications +-// the server is interested in receiving. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationPattern +-type FileOperationPattern struct { +- // The glob pattern to match. Glob patterns can have the following syntax: +- // +- // - `*` to match one or more characters in a path segment +- // - `?` to match on one character in a path segment +- // - `**` to match any number of path segments, including none +- // - `{}` to group sub patterns into an OR expression. (e.g. `**​/*.{ts,js}` matches all TypeScript and JavaScript files) +- // - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …) +- // - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`) +- Glob string `json:"glob"` +- // Whether to match files or folders with this pattern. +- // +- // Matches both if undefined. +- Matches *FileOperationPatternKind `json:"matches,omitempty"` +- // Additional options used during matching. +- Options *FileOperationPatternOptions `json:"options,omitempty"` +-} +- +-// A pattern kind describing if a glob pattern matches a file a folder or +-// both. +-// +-// @since 3.16.0 +-type FileOperationPatternKind string +- +-// Matching options for the file operation pattern. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationPatternOptions +-type FileOperationPatternOptions struct { +- // The pattern should be matched ignoring casing. +- IgnoreCase bool `json:"ignoreCase,omitempty"` +-} +- +-// The options to register for file operations. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationRegistrationOptions +-type FileOperationRegistrationOptions struct { +- // The actual filters. +- Filters []FileOperationFilter `json:"filters"` +-} +- +-// Represents information on a file/folder rename. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileRename +-type FileRename struct { +- // A file:// URI for the original location of the file/folder being renamed. +- OldURI string `json:"oldUri"` +- // A file:// URI for the new location of the file/folder being renamed. +- NewURI string `json:"newUri"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileSystemWatcher +-type FileSystemWatcher struct { +- // The glob pattern to watch. See {@link GlobPattern glob pattern} for more detail. +- // +- // @since 3.17.0 support for relative patterns. +- GlobPattern GlobPattern `json:"globPattern"` +- // The kind of events of interest. If omitted it defaults +- // to WatchKind.Create | WatchKind.Change | WatchKind.Delete +- // which is 7. +- Kind *WatchKind `json:"kind,omitempty"` +-} +- +-// Represents a folding range. To be valid, start and end line must be bigger than zero and smaller +-// than the number of lines in the document. Clients are free to ignore invalid ranges. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRange +-type FoldingRange struct { +- // The zero-based start line of the range to fold. The folded area starts after the line's last character. +- // To be valid, the end must be zero or larger and smaller than the number of lines in the document. +- StartLine *uint32 `json:"startLine,omitempty"` +- // The zero-based character offset from where the folded range starts. If not defined, defaults to the length of the start line. +- StartCharacter *uint32 `json:"startCharacter,omitempty"` +- // The zero-based end line of the range to fold. The folded area ends with the line's last character. +- // To be valid, the end must be zero or larger and smaller than the number of lines in the document. +- EndLine *uint32 `json:"endLine,omitempty"` +- // The zero-based character offset before the folded range ends. If not defined, defaults to the length of the end line. +- EndCharacter *uint32 `json:"endCharacter,omitempty"` +- // Describes the kind of the folding range such as 'comment' or 'region'. The kind +- // is used to categorize folding ranges and used by commands like 'Fold all comments'. +- // See {@link FoldingRangeKind} for an enumeration of standardized kinds. +- Kind string `json:"kind,omitempty"` +- // The text that the client should show when the specified range is +- // collapsed. If not defined or not supported by the client, a default +- // will be chosen by the client. +- // +- // @since 3.17.0 +- CollapsedText string `json:"collapsedText,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeClientCapabilities +-type FoldingRangeClientCapabilities struct { +- // Whether implementation supports dynamic registration for folding range +- // providers. If this is set to `true` the client supports the new +- // `FoldingRangeRegistrationOptions` return value for the corresponding +- // server capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // The maximum number of folding ranges that the client prefers to receive +- // per document. The value serves as a hint, servers are free to follow the +- // limit. +- RangeLimit uint32 `json:"rangeLimit"` +- // If set, the client signals that it only supports folding complete lines. +- // If set, client will ignore specified `startCharacter` and `endCharacter` +- // properties in a FoldingRange. +- LineFoldingOnly bool `json:"lineFoldingOnly,omitempty"` +- // Specific options for the folding range kind. +- // +- // @since 3.17.0 +- FoldingRangeKind *ClientFoldingRangeKindOptions `json:"foldingRangeKind,omitempty"` +- // Specific options for the folding range. +- // +- // @since 3.17.0 +- FoldingRange *ClientFoldingRangeOptions `json:"foldingRange,omitempty"` +-} +- +-// A set of predefined range kinds. +-type FoldingRangeKind string +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeOptions +-type FoldingRangeOptions struct { +- WorkDoneProgressOptions +-} +- +-// Parameters for a {@link FoldingRangeRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeParams +-type FoldingRangeParams struct { +- // The text document. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeRegistrationOptions +-type FoldingRangeRegistrationOptions struct { +- TextDocumentRegistrationOptions +- FoldingRangeOptions +- StaticRegistrationOptions +-} +- +-// Client workspace capabilities specific to folding ranges +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeWorkspaceClientCapabilities +-type FoldingRangeWorkspaceClientCapabilities struct { +- // Whether the client implementation supports a refresh request sent from the +- // server to the client. +- // +- // Note that this event is global and will force the client to refresh all +- // folding ranges currently shown. It should be used with absolute care and is +- // useful for situation where a server for example detects a project wide +- // change that requires such a calculation. +- // +- // @since 3.18.0 +- // @proposed +- RefreshSupport bool `json:"refreshSupport,omitempty"` +-} +- +-// Value-object describing what options formatting should use. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#formattingOptions +-type FormattingOptions struct { +- // Size of a tab in spaces. +- TabSize uint32 `json:"tabSize"` +- // Prefer spaces over tabs. +- InsertSpaces bool `json:"insertSpaces"` +- // Trim trailing whitespace on a line. +- // +- // @since 3.15.0 +- TrimTrailingWhitespace bool `json:"trimTrailingWhitespace,omitempty"` +- // Insert a newline character at the end of the file if one does not exist. +- // +- // @since 3.15.0 +- InsertFinalNewline bool `json:"insertFinalNewline,omitempty"` +- // Trim all newlines after the final newline at the end of the file. +- // +- // @since 3.15.0 +- TrimFinalNewlines bool `json:"trimFinalNewlines,omitempty"` +-} +- +-// A diagnostic report with a full set of problems. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fullDocumentDiagnosticReport +-type FullDocumentDiagnosticReport struct { +- // A full document diagnostic report. +- Kind string `json:"kind"` +- // An optional result id. If provided it will +- // be sent on the next diagnostic request for the +- // same document. +- ResultID string `json:"resultId,omitempty"` +- // The actual items. +- Items []Diagnostic `json:"items"` +-} +- +-// General client capabilities. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#generalClientCapabilities +-type GeneralClientCapabilities struct { +- // Client capability that signals how the client +- // handles stale requests (e.g. a request +- // for which the client will not process the response +- // anymore since the information is outdated). +- // +- // @since 3.17.0 +- StaleRequestSupport *StaleRequestSupportOptions `json:"staleRequestSupport,omitempty"` +- // Client capabilities specific to regular expressions. +- // +- // @since 3.16.0 +- RegularExpressions *RegularExpressionsClientCapabilities `json:"regularExpressions,omitempty"` +- // Client capabilities specific to the client's markdown parser. +- // +- // @since 3.16.0 +- Markdown *MarkdownClientCapabilities `json:"markdown,omitempty"` +- // The position encodings supported by the client. Client and server +- // have to agree on the same position encoding to ensure that offsets +- // (e.g. character position in a line) are interpreted the same on both +- // sides. +- // +- // To keep the protocol backwards compatible the following applies: if +- // the value 'utf-16' is missing from the array of position encodings +- // servers can assume that the client supports UTF-16. UTF-16 is +- // therefore a mandatory encoding. +- // +- // If omitted it defaults to ['utf-16']. +- // +- // Implementation considerations: since the conversion from one encoding +- // into another requires the content of the file / line the conversion +- // is best done where the file is read which is usually on the server +- // side. +- // +- // @since 3.17.0 +- PositionEncodings []PositionEncodingKind `json:"positionEncodings,omitempty"` +-} +- +-// The glob pattern. Either a string pattern or a relative pattern. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#globPattern +-type GlobPattern = Or_GlobPattern // (alias) +-// The result of a hover request. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hover +-type Hover struct { +- // The hover's content +- Contents MarkupContent `json:"contents"` +- // An optional range inside the text document that is used to +- // visualize the hover, e.g. by changing the background color. +- Range Range `json:"range,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverClientCapabilities +-type HoverClientCapabilities struct { +- // Whether hover supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // Client supports the following content formats for the content +- // property. The order describes the preferred format of the client. +- ContentFormat []MarkupKind `json:"contentFormat,omitempty"` +-} +- +-// Hover options. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverOptions +-type HoverOptions struct { +- WorkDoneProgressOptions +-} +- +-// Parameters for a {@link HoverRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverParams +-type HoverParams struct { +- TextDocumentPositionParams +- WorkDoneProgressParams +-} +- +-// Registration options for a {@link HoverRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverRegistrationOptions +-type HoverRegistrationOptions struct { +- TextDocumentRegistrationOptions +- HoverOptions +-} +- +-// @since 3.6.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationClientCapabilities +-type ImplementationClientCapabilities struct { +- // Whether implementation supports dynamic registration. If this is set to `true` +- // the client supports the new `ImplementationRegistrationOptions` return value +- // for the corresponding server capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // The client supports additional metadata in the form of definition links. +- // +- // @since 3.14.0 +- LinkSupport bool `json:"linkSupport,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationOptions +-type ImplementationOptions struct { +- WorkDoneProgressOptions +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationParams +-type ImplementationParams struct { +- TextDocumentPositionParams +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationRegistrationOptions +-type ImplementationRegistrationOptions struct { +- TextDocumentRegistrationOptions +- ImplementationOptions +- StaticRegistrationOptions +-} +- +-// The data type of the ResponseError if the +-// initialize request fails. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeError +-type InitializeError struct { +- // Indicates whether the client execute the following retry logic: +- // (1) show the message provided by the ResponseError to the user +- // (2) user selects retry or cancel +- // (3) if user selected retry the initialize method is sent again. +- Retry bool `json:"retry"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeParams +-type InitializeParams struct { +- XInitializeParams +- WorkspaceFoldersInitializeParams +-} +- +-// The result returned from an initialize request. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeResult +-type InitializeResult struct { +- // The capabilities the language server provides. +- Capabilities ServerCapabilities `json:"capabilities"` +- // Information about the server. +- // +- // @since 3.15.0 +- ServerInfo *ServerInfo `json:"serverInfo,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializedParams +-type InitializedParams struct { +-} +- +-// Inlay hint information. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHint +-type InlayHint struct { +- // The position of this hint. +- // +- // If multiple hints have the same position, they will be shown in the order +- // they appear in the response. +- Position Position `json:"position"` +- // The label of this hint. A human readable string or an array of +- // InlayHintLabelPart label parts. +- // +- // *Note* that neither the string nor the label part can be empty. +- Label []InlayHintLabelPart `json:"label"` +- // The kind of this hint. Can be omitted in which case the client +- // should fall back to a reasonable default. +- Kind InlayHintKind `json:"kind,omitempty"` +- // Optional text edits that are performed when accepting this inlay hint. +- // +- // *Note* that edits are expected to change the document so that the inlay +- // hint (or its nearest variant) is now part of the document and the inlay +- // hint itself is now obsolete. +- TextEdits []TextEdit `json:"textEdits,omitempty"` +- // The tooltip text when you hover over this item. +- Tooltip *OrPTooltip_textDocument_inlayHint `json:"tooltip,omitempty"` +- // Render padding before the hint. +- // +- // Note: Padding should use the editor's background color, not the +- // background color of the hint itself. That means padding can be used +- // to visually align/separate an inlay hint. +- PaddingLeft bool `json:"paddingLeft,omitempty"` +- // Render padding after the hint. +- // +- // Note: Padding should use the editor's background color, not the +- // background color of the hint itself. That means padding can be used +- // to visually align/separate an inlay hint. +- PaddingRight bool `json:"paddingRight,omitempty"` +- // A data entry field that is preserved on an inlay hint between +- // a `textDocument/inlayHint` and a `inlayHint/resolve` request. +- Data any `json:"data,omitempty"` +-} +- +-// Inlay hint client capabilities. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintClientCapabilities +-type InlayHintClientCapabilities struct { +- // Whether inlay hints support dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // Indicates which properties a client can resolve lazily on an inlay +- // hint. +- ResolveSupport *ClientInlayHintResolveOptions `json:"resolveSupport,omitempty"` +-} +- +-// Inlay hint kinds. +-// +-// @since 3.17.0 +-type InlayHintKind uint32 +- +-// An inlay hint label part allows for interactive and composite labels +-// of inlay hints. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintLabelPart +-type InlayHintLabelPart struct { +- // The value of this label part. +- Value string `json:"value"` +- // The tooltip text when you hover over this label part. Depending on +- // the client capability `inlayHint.resolveSupport` clients might resolve +- // this property late using the resolve request. +- Tooltip *OrPTooltipPLabel `json:"tooltip,omitempty"` +- // An optional source code location that represents this +- // label part. +- // +- // The editor will use this location for the hover and for code navigation +- // features: This part will become a clickable link that resolves to the +- // definition of the symbol at the given location (not necessarily the +- // location itself), it shows the hover that shows at the given location, +- // and it shows a context menu with further code navigation commands. +- // +- // Depending on the client capability `inlayHint.resolveSupport` clients +- // might resolve this property late using the resolve request. +- Location *Location `json:"location,omitempty"` +- // An optional command for this label part. +- // +- // Depending on the client capability `inlayHint.resolveSupport` clients +- // might resolve this property late using the resolve request. +- Command *Command `json:"command,omitempty"` +-} +- +-// Inlay hint options used during static registration. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintOptions +-type InlayHintOptions struct { +- // The server provides support to resolve additional +- // information for an inlay hint item. +- ResolveProvider bool `json:"resolveProvider,omitempty"` +- WorkDoneProgressOptions +-} +- +-// A parameter literal used in inlay hint requests. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintParams +-type InlayHintParams struct { +- // The text document. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The document range for which inlay hints should be computed. +- Range Range `json:"range"` +- WorkDoneProgressParams +-} +- +-// Inlay hint options used during static or dynamic registration. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintRegistrationOptions +-type InlayHintRegistrationOptions struct { +- InlayHintOptions +- TextDocumentRegistrationOptions +- StaticRegistrationOptions +-} +- +-// Client workspace capabilities specific to inlay hints. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintWorkspaceClientCapabilities +-type InlayHintWorkspaceClientCapabilities struct { +- // Whether the client implementation supports a refresh request sent from +- // the server to the client. +- // +- // Note that this event is global and will force the client to refresh all +- // inlay hints currently shown. It should be used with absolute care and +- // is useful for situation where a server for example detects a project wide +- // change that requires such a calculation. +- RefreshSupport bool `json:"refreshSupport,omitempty"` +-} +- +-// Client capabilities specific to inline completions. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionClientCapabilities +-type InlineCompletionClientCapabilities struct { +- // Whether implementation supports dynamic registration for inline completion providers. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// Provides information about the context in which an inline completion was requested. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionContext +-type InlineCompletionContext struct { +- // Describes how the inline completion was triggered. +- TriggerKind InlineCompletionTriggerKind `json:"triggerKind"` +- // Provides information about the currently selected item in the autocomplete widget if it is visible. +- SelectedCompletionInfo *SelectedCompletionInfo `json:"selectedCompletionInfo,omitempty"` +-} +- +-// An inline completion item represents a text snippet that is proposed inline to complete text that is being typed. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionItem +-type InlineCompletionItem struct { +- // The text to replace the range with. Must be set. +- InsertText Or_InlineCompletionItem_insertText `json:"insertText"` +- // A text that is used to decide if this inline completion should be shown. When `falsy` the {@link InlineCompletionItem.insertText} is used. +- FilterText string `json:"filterText,omitempty"` +- // The range to replace. Must begin and end on the same line. +- Range *Range `json:"range,omitempty"` +- // An optional {@link Command} that is executed *after* inserting this completion. +- Command *Command `json:"command,omitempty"` +-} +- +-// Represents a collection of {@link InlineCompletionItem inline completion items} to be presented in the editor. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionList +-type InlineCompletionList struct { +- // The inline completion items +- Items []InlineCompletionItem `json:"items"` +-} +- +-// Inline completion options used during static registration. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionOptions +-type InlineCompletionOptions struct { +- WorkDoneProgressOptions +-} +- +-// A parameter literal used in inline completion requests. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionParams +-type InlineCompletionParams struct { +- // Additional information about the context in which inline completions were +- // requested. +- Context InlineCompletionContext `json:"context"` +- TextDocumentPositionParams +- WorkDoneProgressParams +-} +- +-// Inline completion options used during static or dynamic registration. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionRegistrationOptions +-type InlineCompletionRegistrationOptions struct { +- InlineCompletionOptions +- TextDocumentRegistrationOptions +- StaticRegistrationOptions +-} +- +-// Describes how an {@link InlineCompletionItemProvider inline completion provider} was triggered. +-// +-// @since 3.18.0 +-// @proposed +-type InlineCompletionTriggerKind uint32 +- +-// Inline value information can be provided by different means: +-// +-// - directly as a text value (class InlineValueText). +-// - as a name to use for a variable lookup (class InlineValueVariableLookup) +-// - as an evaluatable expression (class InlineValueEvaluatableExpression) +-// +-// The InlineValue types combines all inline value types into one type. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValue +-type InlineValue = Or_InlineValue // (alias) +-// Client capabilities specific to inline values. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueClientCapabilities +-type InlineValueClientCapabilities struct { +- // Whether implementation supports dynamic registration for inline value providers. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueContext +-type InlineValueContext struct { +- // The stack frame (as a DAP Id) where the execution has stopped. +- FrameID int32 `json:"frameId"` +- // The document range where execution has stopped. +- // Typically the end position of the range denotes the line where the inline values are shown. +- StoppedLocation Range `json:"stoppedLocation"` +-} +- +-// Provide an inline value through an expression evaluation. +-// If only a range is specified, the expression will be extracted from the underlying document. +-// An optional expression can be used to override the extracted expression. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueEvaluatableExpression +-type InlineValueEvaluatableExpression struct { +- // The document range for which the inline value applies. +- // The range is used to extract the evaluatable expression from the underlying document. +- Range Range `json:"range"` +- // If specified the expression overrides the extracted expression. +- Expression string `json:"expression,omitempty"` +-} +- +-// Inline value options used during static registration. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueOptions +-type InlineValueOptions struct { +- WorkDoneProgressOptions +-} +- +-// A parameter literal used in inline value requests. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueParams +-type InlineValueParams struct { +- // The text document. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The document range for which inline values should be computed. +- Range Range `json:"range"` +- // Additional information about the context in which inline values were +- // requested. +- Context InlineValueContext `json:"context"` +- WorkDoneProgressParams +-} +- +-// Inline value options used during static or dynamic registration. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueRegistrationOptions +-type InlineValueRegistrationOptions struct { +- InlineValueOptions +- TextDocumentRegistrationOptions +- StaticRegistrationOptions +-} +- +-// Provide inline value as text. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueText +-type InlineValueText struct { +- // The document range for which the inline value applies. +- Range Range `json:"range"` +- // The text of the inline value. +- Text string `json:"text"` +-} +- +-// Provide inline value through a variable lookup. +-// If only a range is specified, the variable name will be extracted from the underlying document. +-// An optional variable name can be used to override the extracted name. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueVariableLookup +-type InlineValueVariableLookup struct { +- // The document range for which the inline value applies. +- // The range is used to extract the variable name from the underlying document. +- Range Range `json:"range"` +- // If specified the name of the variable to look up. +- VariableName string `json:"variableName,omitempty"` +- // How to perform the lookup. +- CaseSensitiveLookup bool `json:"caseSensitiveLookup"` +-} +- +-// Client workspace capabilities specific to inline values. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueWorkspaceClientCapabilities +-type InlineValueWorkspaceClientCapabilities struct { +- // Whether the client implementation supports a refresh request sent from the +- // server to the client. +- // +- // Note that this event is global and will force the client to refresh all +- // inline values currently shown. It should be used with absolute care and is +- // useful for situation where a server for example detects a project wide +- // change that requires such a calculation. +- RefreshSupport bool `json:"refreshSupport,omitempty"` +-} +- +-// A special text edit to provide an insert and a replace operation. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#insertReplaceEdit +-type InsertReplaceEdit struct { +- // The string to be inserted. +- NewText string `json:"newText"` +- // The range if the insert is requested +- Insert Range `json:"insert"` +- // The range if the replace is requested. +- Replace Range `json:"replace"` +-} +- +-// Defines whether the insert text in a completion item should be interpreted as +-// plain text or a snippet. +-type InsertTextFormat uint32 +- +-// How whitespace and indentation is handled during completion +-// item insertion. +-// +-// @since 3.16.0 +-type InsertTextMode uint32 +-type LSPAny = any +- +-// LSP arrays. +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#lSPArray +-type LSPArray = []any // (alias) +-type LSPErrorCodes int32 +- +-// LSP object definition. +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#lSPObject +-type LSPObject = map[string]LSPAny // (alias) +-// Predefined Language kinds +-// @since 3.18.0 +-type LanguageKind string +- +-// Client capabilities for the linked editing range request. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeClientCapabilities +-type LinkedEditingRangeClientCapabilities struct { +- // Whether implementation supports dynamic registration. If this is set to `true` +- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` +- // return value for the corresponding server capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeOptions +-type LinkedEditingRangeOptions struct { +- WorkDoneProgressOptions +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeParams +-type LinkedEditingRangeParams struct { +- TextDocumentPositionParams +- WorkDoneProgressParams +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeRegistrationOptions +-type LinkedEditingRangeRegistrationOptions struct { +- TextDocumentRegistrationOptions +- LinkedEditingRangeOptions +- StaticRegistrationOptions +-} +- +-// The result of a linked editing range request. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRanges +-type LinkedEditingRanges struct { +- // A list of ranges that can be edited together. The ranges must have +- // identical length and contain identical text content. The ranges cannot overlap. +- Ranges []Range `json:"ranges"` +- // An optional word pattern (regular expression) that describes valid contents for +- // the given ranges. If no pattern is provided, the client configuration's word +- // pattern will be used. +- WordPattern string `json:"wordPattern,omitempty"` +-} +- +-// created for Literal (Lit_ClientSemanticTokensRequestOptions_range_Item1) +-type Lit_ClientSemanticTokensRequestOptions_range_Item1 struct { +-} +- +-// Represents a location inside a resource, such as a line +-// inside a text file. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#location +-type Location struct { +- URI DocumentURI `json:"uri"` +- Range Range `json:"range"` +-} +- +-// Represents the connection of two locations. Provides additional metadata over normal {@link Location locations}, +-// including an origin range. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#locationLink +-type LocationLink struct { +- // Span of the origin of this link. +- // +- // Used as the underlined span for mouse interaction. Defaults to the word range at +- // the definition position. +- OriginSelectionRange *Range `json:"originSelectionRange,omitempty"` +- // The target resource identifier of this link. +- TargetURI DocumentURI `json:"targetUri"` +- // The full target range of this link. If the target for example is a symbol then target range is the +- // range enclosing this symbol not including leading/trailing whitespace but everything else +- // like comments. This information is typically used to highlight the range in the editor. +- TargetRange Range `json:"targetRange"` +- // The range that should be selected and revealed when this link is being followed, e.g the name of a function. +- // Must be contained by the `targetRange`. See also `DocumentSymbol#range` +- TargetSelectionRange Range `json:"targetSelectionRange"` +-} +- +-// Location with only uri and does not include range. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#locationUriOnly +-type LocationUriOnly struct { +- URI DocumentURI `json:"uri"` +-} +- +-// The log message parameters. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#logMessageParams +-type LogMessageParams struct { +- // The message type. See {@link MessageType} +- Type MessageType `json:"type"` +- // The actual message. +- Message string `json:"message"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#logTraceParams +-type LogTraceParams struct { +- Message string `json:"message"` +- Verbose string `json:"verbose,omitempty"` +-} +- +-// Client capabilities specific to the used markdown parser. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markdownClientCapabilities +-type MarkdownClientCapabilities struct { +- // The name of the parser. +- Parser string `json:"parser"` +- // The version of the parser. +- Version string `json:"version,omitempty"` +- // A list of HTML tags that the client allows / supports in +- // Markdown. +- // +- // @since 3.17.0 +- AllowedTags []string `json:"allowedTags,omitempty"` +-} +- +-// MarkedString can be used to render human readable text. It is either a markdown string +-// or a code-block that provides a language and a code snippet. The language identifier +-// is semantically equal to the optional language identifier in fenced code blocks in GitHub +-// issues. See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting +-// +-// The pair of a language and a value is an equivalent to markdown: +-// ```${language} +-// ${value} +-// ``` +-// +-// Note that markdown strings will be sanitized - that means html will be escaped. +-// @deprecated use MarkupContent instead. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markedString +-type MarkedString = Or_MarkedString // (alias) +-// @since 3.18.0 +-// @deprecated use MarkupContent instead. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markedStringWithLanguage +-type MarkedStringWithLanguage struct { +- Language string `json:"language"` +- Value string `json:"value"` +-} +- +-// A `MarkupContent` literal represents a string value which content is interpreted base on its +-// kind flag. Currently the protocol supports `plaintext` and `markdown` as markup kinds. +-// +-// If the kind is `markdown` then the value can contain fenced code blocks like in GitHub issues. +-// See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting +-// +-// Here is an example how such a string can be constructed using JavaScript / TypeScript: +-// ```ts +-// +-// let markdown: MarkdownContent = { +-// kind: MarkupKind.Markdown, +-// value: [ +-// '# Header', +-// 'Some text', +-// '```typescript', +-// 'someCode();', +-// '```' +-// ].join('\n') +-// }; +-// +-// ``` +-// +-// *Please Note* that clients might sanitize the return markdown. A client could decide to +-// remove HTML from the markdown to avoid script execution. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markupContent +-type MarkupContent struct { +- // The type of the Markup +- Kind MarkupKind `json:"kind"` +- // The content itself +- Value string `json:"value"` +-} +- +-// Describes the content type that a client supports in various +-// result literals like `Hover`, `ParameterInfo` or `CompletionItem`. +-// +-// Please note that `MarkupKinds` must not start with a `$`. This kinds +-// are reserved for internal usage. +-type MarkupKind string +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#messageActionItem +-type MessageActionItem struct { +- // A short title like 'Retry', 'Open Log' etc. +- Title string `json:"title"` +-} +- +-// The message type +-type MessageType uint32 +- +-// Moniker definition to match LSIF 0.5 moniker definition. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#moniker +-type Moniker struct { +- // The scheme of the moniker. For example tsc or .Net +- Scheme string `json:"scheme"` +- // The identifier of the moniker. The value is opaque in LSIF however +- // schema owners are allowed to define the structure if they want. +- Identifier string `json:"identifier"` +- // The scope in which the moniker is unique +- Unique UniquenessLevel `json:"unique"` +- // The moniker kind if known. +- Kind *MonikerKind `json:"kind,omitempty"` +-} +- +-// Client capabilities specific to the moniker request. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerClientCapabilities +-type MonikerClientCapabilities struct { +- // Whether moniker supports dynamic registration. If this is set to `true` +- // the client supports the new `MonikerRegistrationOptions` return value +- // for the corresponding server capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// The moniker kind. +-// +-// @since 3.16.0 +-type MonikerKind string +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerOptions +-type MonikerOptions struct { +- WorkDoneProgressOptions +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerParams +-type MonikerParams struct { +- TextDocumentPositionParams +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerRegistrationOptions +-type MonikerRegistrationOptions struct { +- TextDocumentRegistrationOptions +- MonikerOptions +-} +- +-// A notebook cell. +-// +-// A cell's document URI must be unique across ALL notebook +-// cells and can therefore be used to uniquely identify a +-// notebook cell or the cell's text document. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCell +-type NotebookCell struct { +- // The cell's kind +- Kind NotebookCellKind `json:"kind"` +- // The URI of the cell's text document +- // content. +- Document DocumentURI `json:"document"` +- // Additional metadata stored with the cell. +- // +- // Note: should always be an object literal (e.g. LSPObject) +- Metadata *LSPObject `json:"metadata,omitempty"` +- // Additional execution summary information +- // if supported by the client. +- ExecutionSummary *ExecutionSummary `json:"executionSummary,omitempty"` +-} +- +-// A change describing how to move a `NotebookCell` +-// array from state S to S'. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCellArrayChange +-type NotebookCellArrayChange struct { +- // The start oftest of the cell that changed. +- Start uint32 `json:"start"` +- // The deleted cells +- DeleteCount uint32 `json:"deleteCount"` +- // The new cells, if any +- Cells []NotebookCell `json:"cells,omitempty"` +-} +- +-// A notebook cell kind. +-// +-// @since 3.17.0 +-type NotebookCellKind uint32 +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCellLanguage +-type NotebookCellLanguage struct { +- Language string `json:"language"` +-} +- +-// A notebook cell text document filter denotes a cell text +-// document by different properties. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCellTextDocumentFilter +-type NotebookCellTextDocumentFilter struct { +- // A filter that matches against the notebook +- // containing the notebook cell. If a string +- // value is provided it matches against the +- // notebook type. '*' matches every notebook. +- Notebook Or_NotebookCellTextDocumentFilter_notebook `json:"notebook"` +- // A language id like `python`. +- // +- // Will be matched against the language id of the +- // notebook cell document. '*' matches every language. +- Language string `json:"language,omitempty"` +-} +- +-// A notebook document. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocument +-type NotebookDocument struct { +- // The notebook document's uri. +- URI URI `json:"uri"` +- // The type of the notebook. +- NotebookType string `json:"notebookType"` +- // The version number of this document (it will increase after each +- // change, including undo/redo). +- Version int32 `json:"version"` +- // Additional metadata stored with the notebook +- // document. +- // +- // Note: should always be an object literal (e.g. LSPObject) +- Metadata *LSPObject `json:"metadata,omitempty"` +- // The cells of a notebook. +- Cells []NotebookCell `json:"cells"` +-} +- +-// Structural changes to cells in a notebook document. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentCellChangeStructure +-type NotebookDocumentCellChangeStructure struct { +- // The change to the cell array. +- Array NotebookCellArrayChange `json:"array"` +- // Additional opened cell text documents. +- DidOpen []TextDocumentItem `json:"didOpen,omitempty"` +- // Additional closed cell text documents. +- DidClose []TextDocumentIdentifier `json:"didClose,omitempty"` +-} +- +-// Cell changes to a notebook document. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentCellChanges +-type NotebookDocumentCellChanges struct { +- // Changes to the cell structure to add or +- // remove cells. +- Structure *NotebookDocumentCellChangeStructure `json:"structure,omitempty"` +- // Changes to notebook cells properties like its +- // kind, execution summary or metadata. +- Data []NotebookCell `json:"data,omitempty"` +- // Changes to the text content of notebook cells. +- TextContent []NotebookDocumentCellContentChanges `json:"textContent,omitempty"` +-} +- +-// Content changes to a cell in a notebook document. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentCellContentChanges +-type NotebookDocumentCellContentChanges struct { +- Document VersionedTextDocumentIdentifier `json:"document"` +- Changes []TextDocumentContentChangeEvent `json:"changes"` +-} +- +-// A change event for a notebook document. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentChangeEvent +-type NotebookDocumentChangeEvent struct { +- // The changed meta data if any. +- // +- // Note: should always be an object literal (e.g. LSPObject) +- Metadata *LSPObject `json:"metadata,omitempty"` +- // Changes to cells +- Cells *NotebookDocumentCellChanges `json:"cells,omitempty"` +-} +- +-// Capabilities specific to the notebook document support. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentClientCapabilities +-type NotebookDocumentClientCapabilities struct { +- // Capabilities specific to notebook document synchronization +- // +- // @since 3.17.0 +- Synchronization NotebookDocumentSyncClientCapabilities `json:"synchronization"` +-} +- +-// A notebook document filter denotes a notebook document by +-// different properties. The properties will be match +-// against the notebook's URI (same as with documents) +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilter +-type NotebookDocumentFilter = Or_NotebookDocumentFilter // (alias) +-// A notebook document filter where `notebookType` is required field. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterNotebookType +-type NotebookDocumentFilterNotebookType struct { +- // The type of the enclosing notebook. +- NotebookType string `json:"notebookType"` +- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. +- Scheme string `json:"scheme,omitempty"` +- // A glob pattern. +- Pattern *GlobPattern `json:"pattern,omitempty"` +-} +- +-// A notebook document filter where `pattern` is required field. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterPattern +-type NotebookDocumentFilterPattern struct { +- // The type of the enclosing notebook. +- NotebookType string `json:"notebookType,omitempty"` +- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. +- Scheme string `json:"scheme,omitempty"` +- // A glob pattern. +- Pattern GlobPattern `json:"pattern"` +-} +- +-// A notebook document filter where `scheme` is required field. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterScheme +-type NotebookDocumentFilterScheme struct { +- // The type of the enclosing notebook. +- NotebookType string `json:"notebookType,omitempty"` +- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. +- Scheme string `json:"scheme"` +- // A glob pattern. +- Pattern *GlobPattern `json:"pattern,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterWithCells +-type NotebookDocumentFilterWithCells struct { +- // The notebook to be synced If a string +- // value is provided it matches against the +- // notebook type. '*' matches every notebook. +- Notebook *Or_NotebookDocumentFilterWithCells_notebook `json:"notebook,omitempty"` +- // The cells of the matching notebook to be synced. +- Cells []NotebookCellLanguage `json:"cells"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterWithNotebook +-type NotebookDocumentFilterWithNotebook struct { +- // The notebook to be synced If a string +- // value is provided it matches against the +- // notebook type. '*' matches every notebook. +- Notebook Or_NotebookDocumentFilterWithNotebook_notebook `json:"notebook"` +- // The cells of the matching notebook to be synced. +- Cells []NotebookCellLanguage `json:"cells,omitempty"` +-} +- +-// A literal to identify a notebook document in the client. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentIdentifier +-type NotebookDocumentIdentifier struct { +- // The notebook document's uri. +- URI URI `json:"uri"` +-} +- +-// Notebook specific client capabilities. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentSyncClientCapabilities +-type NotebookDocumentSyncClientCapabilities struct { +- // Whether implementation supports dynamic registration. If this is +- // set to `true` the client supports the new +- // `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` +- // return value for the corresponding server capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // The client supports sending execution summary data per cell. +- ExecutionSummarySupport bool `json:"executionSummarySupport,omitempty"` +-} +- +-// Options specific to a notebook plus its cells +-// to be synced to the server. +-// +-// If a selector provides a notebook document +-// filter but no cell selector all cells of a +-// matching notebook document will be synced. +-// +-// If a selector provides no notebook document +-// filter but only a cell selector all notebook +-// document that contain at least one matching +-// cell will be synced. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentSyncOptions +-type NotebookDocumentSyncOptions struct { +- // The notebooks to be synced +- NotebookSelector []Or_NotebookDocumentSyncOptions_notebookSelector_Elem `json:"notebookSelector"` +- // Whether save notification should be forwarded to +- // the server. Will only be honored if mode === `notebook`. +- Save bool `json:"save,omitempty"` +-} +- +-// Registration options specific to a notebook. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentSyncRegistrationOptions +-type NotebookDocumentSyncRegistrationOptions struct { +- NotebookDocumentSyncOptions +- StaticRegistrationOptions +-} +- +-// A text document identifier to optionally denote a specific version of a text document. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#optionalVersionedTextDocumentIdentifier +-type OptionalVersionedTextDocumentIdentifier struct { +- // The version number of this document. If a versioned text document identifier +- // is sent from the server to the client and the file is not open in the editor +- // (the server has not received an open notification before) the server can send +- // `null` to indicate that the version is unknown and the content on disk is the +- // truth (as specified with document content ownership). +- Version int32 `json:"version"` +- TextDocumentIdentifier +-} +- +-// created for Or [Location LocationUriOnly] +-type OrPLocation_workspace_symbol struct { +- Value any `json:"value"` +-} +- +-// created for Or [[]string string] +-type OrPSection_workspace_didChangeConfiguration struct { +- Value any `json:"value"` +-} +- +-// created for Or [MarkupContent string] +-type OrPTooltipPLabel struct { +- Value any `json:"value"` +-} +- +-// created for Or [MarkupContent string] +-type OrPTooltip_textDocument_inlayHint struct { +- Value any `json:"value"` +-} +- +-// created for Or [int32 string] +-type Or_CancelParams_id struct { +- Value any `json:"value"` +-} +- +-// created for Or [ClientSemanticTokensRequestFullDelta bool] +-type Or_ClientSemanticTokensRequestOptions_full struct { +- Value any `json:"value"` +-} +- +-// created for Or [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool] +-type Or_ClientSemanticTokensRequestOptions_range struct { +- Value any `json:"value"` +-} +- +-// created for Or [EditRangeWithInsertReplace Range] +-type Or_CompletionItemDefaults_editRange struct { +- Value any `json:"value"` +-} +- +-// created for Or [MarkupContent string] +-type Or_CompletionItem_documentation struct { +- Value any `json:"value"` +-} +- +-// created for Or [InsertReplaceEdit TextEdit] +-type Or_CompletionItem_textEdit struct { +- Value any `json:"value"` +-} +- +-// created for Or [Location []Location] +-type Or_Definition struct { +- Value any `json:"value"` +-} +- +-// created for Or [int32 string] +-type Or_Diagnostic_code struct { +- Value any `json:"value"` +-} +- +-// created for Or [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport] +-type Or_DocumentDiagnosticReport struct { +- Value any `json:"value"` +-} +- +-// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] +-type Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value struct { +- Value any `json:"value"` +-} +- +-// created for Or [NotebookCellTextDocumentFilter TextDocumentFilter] +-type Or_DocumentFilter struct { +- Value any `json:"value"` +-} +- +-// created for Or [Pattern RelativePattern] +-type Or_GlobPattern struct { +- Value any `json:"value"` +-} +- +-// created for Or [MarkedString MarkupContent []MarkedString] +-type Or_Hover_contents struct { +- Value any `json:"value"` +-} +- +-// created for Or [[]InlayHintLabelPart string] +-type Or_InlayHint_label struct { +- Value any `json:"value"` +-} +- +-// created for Or [StringValue string] +-type Or_InlineCompletionItem_insertText struct { +- Value any `json:"value"` +-} +- +-// created for Or [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup] +-type Or_InlineValue struct { +- Value any `json:"value"` +-} +- +-// created for Or [MarkedStringWithLanguage string] +-type Or_MarkedString struct { +- Value any `json:"value"` +-} +- +-// created for Or [NotebookDocumentFilter string] +-type Or_NotebookCellTextDocumentFilter_notebook struct { +- Value any `json:"value"` +-} +- +-// created for Or [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme] +-type Or_NotebookDocumentFilter struct { +- Value any `json:"value"` +-} +- +-// created for Or [NotebookDocumentFilter string] +-type Or_NotebookDocumentFilterWithCells_notebook struct { +- Value any `json:"value"` +-} +- +-// created for Or [NotebookDocumentFilter string] +-type Or_NotebookDocumentFilterWithNotebook_notebook struct { +- Value any `json:"value"` +-} +- +-// created for Or [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook] +-type Or_NotebookDocumentSyncOptions_notebookSelector_Elem struct { +- Value any `json:"value"` +-} +- +-// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] +-type Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value struct { +- Value any `json:"value"` +-} +- +-// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] +-type Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value struct { +- Value any `json:"value"` +-} +- +-// created for Or [CodeAction Command] +-type Or_Result_textDocument_codeAction_Item0_Elem struct { +- Value any `json:"value"` +-} +- +-// created for Or [InlineCompletionList []InlineCompletionItem] +-type Or_Result_textDocument_inlineCompletion struct { +- Value any `json:"value"` +-} +- +-// created for Or [SemanticTokensFullDelta bool] +-type Or_SemanticTokensOptions_full struct { +- Value any `json:"value"` +-} +- +-// created for Or [PRangeESemanticTokensOptions bool] +-type Or_SemanticTokensOptions_range struct { +- Value any `json:"value"` +-} +- +-// created for Or [CallHierarchyOptions CallHierarchyRegistrationOptions bool] +-type Or_ServerCapabilities_callHierarchyProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [CodeActionOptions bool] +-type Or_ServerCapabilities_codeActionProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [DocumentColorOptions DocumentColorRegistrationOptions bool] +-type Or_ServerCapabilities_colorProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [DeclarationOptions DeclarationRegistrationOptions bool] +-type Or_ServerCapabilities_declarationProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [DefinitionOptions bool] +-type Or_ServerCapabilities_definitionProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [DiagnosticOptions DiagnosticRegistrationOptions] +-type Or_ServerCapabilities_diagnosticProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [DocumentFormattingOptions bool] +-type Or_ServerCapabilities_documentFormattingProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [DocumentHighlightOptions bool] +-type Or_ServerCapabilities_documentHighlightProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [DocumentRangeFormattingOptions bool] +-type Or_ServerCapabilities_documentRangeFormattingProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [DocumentSymbolOptions bool] +-type Or_ServerCapabilities_documentSymbolProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [FoldingRangeOptions FoldingRangeRegistrationOptions bool] +-type Or_ServerCapabilities_foldingRangeProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [HoverOptions bool] +-type Or_ServerCapabilities_hoverProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [ImplementationOptions ImplementationRegistrationOptions bool] +-type Or_ServerCapabilities_implementationProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [InlayHintOptions InlayHintRegistrationOptions bool] +-type Or_ServerCapabilities_inlayHintProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [InlineCompletionOptions bool] +-type Or_ServerCapabilities_inlineCompletionProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [InlineValueOptions InlineValueRegistrationOptions bool] +-type Or_ServerCapabilities_inlineValueProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool] +-type Or_ServerCapabilities_linkedEditingRangeProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [MonikerOptions MonikerRegistrationOptions bool] +-type Or_ServerCapabilities_monikerProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions] +-type Or_ServerCapabilities_notebookDocumentSync struct { +- Value any `json:"value"` +-} +- +-// created for Or [ReferenceOptions bool] +-type Or_ServerCapabilities_referencesProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [RenameOptions bool] +-type Or_ServerCapabilities_renameProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [SelectionRangeOptions SelectionRangeRegistrationOptions bool] +-type Or_ServerCapabilities_selectionRangeProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [SemanticTokensOptions SemanticTokensRegistrationOptions] +-type Or_ServerCapabilities_semanticTokensProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [TextDocumentSyncKind TextDocumentSyncOptions] +-type Or_ServerCapabilities_textDocumentSync struct { +- Value any `json:"value"` +-} +- +-// created for Or [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool] +-type Or_ServerCapabilities_typeDefinitionProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool] +-type Or_ServerCapabilities_typeHierarchyProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [WorkspaceSymbolOptions bool] +-type Or_ServerCapabilities_workspaceSymbolProvider struct { +- Value any `json:"value"` +-} +- +-// created for Or [MarkupContent string] +-type Or_SignatureInformation_documentation struct { +- Value any `json:"value"` +-} +- +-// created for Or [AnnotatedTextEdit SnippetTextEdit TextEdit] +-type Or_TextDocumentEdit_edits_Elem struct { +- Value any `json:"value"` +-} +- +-// created for Or [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme] +-type Or_TextDocumentFilter struct { +- Value any `json:"value"` +-} +- +-// created for Or [SaveOptions bool] +-type Or_TextDocumentSyncOptions_save struct { +- Value any `json:"value"` +-} +- +-// created for Or [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport] +-type Or_WorkspaceDocumentDiagnosticReport struct { +- Value any `json:"value"` +-} +- +-// created for Or [CreateFile DeleteFile RenameFile TextDocumentEdit] +-type Or_WorkspaceEdit_documentChanges_Elem struct { +- Value any `json:"value"` +-} +- +-// created for Or [TextDocumentContentOptions TextDocumentContentRegistrationOptions] +-type Or_WorkspaceOptions_textDocumentContent struct { +- Value any `json:"value"` +-} +- +-// created for Or [Declaration []DeclarationLink] +-type Or_textDocument_declaration struct { +- Value any `json:"value"` +-} +- +-// created for Literal (Lit_SemanticTokensOptions_range_Item1) +-type PRangeESemanticTokensOptions struct { +-} +- +-// The parameters of a configuration request. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#configurationParams +-type ParamConfiguration struct { +- Items []ConfigurationItem `json:"items"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeParams +-type ParamInitialize struct { +- XInitializeParams +- WorkspaceFoldersInitializeParams +-} +- +-// Represents a parameter of a callable-signature. A parameter can +-// have a label and a doc-comment. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#parameterInformation +-type ParameterInformation struct { +- // The label of this parameter information. +- // +- // Either a string or an inclusive start and exclusive end offsets within its containing +- // signature label. (see SignatureInformation.label). The offsets are based on a UTF-16 +- // string representation as `Position` and `Range` does. +- // +- // To avoid ambiguities a server should use the [start, end] offset value instead of using +- // a substring. Whether a client support this is controlled via `labelOffsetSupport` client +- // capability. +- // +- // *Note*: a label of type string should be a substring of its containing signature label. +- // Its intended use case is to highlight the parameter label part in the `SignatureInformation.label`. +- Label string `json:"label"` +- // The human-readable doc-comment of this parameter. Will be shown +- // in the UI but can be omitted. +- Documentation string `json:"documentation,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#partialResultParams +-type PartialResultParams struct { +- // An optional token that a server can use to report partial results (e.g. streaming) to +- // the client. +- PartialResultToken *ProgressToken `json:"partialResultToken,omitempty"` +-} +- +-// The glob pattern to watch relative to the base path. Glob patterns can have the following syntax: +-// +-// - `*` to match one or more characters in a path segment +-// - `?` to match on one character in a path segment +-// - `**` to match any number of path segments, including none +-// - `{}` to group conditions (e.g. `**​/*.{ts,js}` matches all TypeScript and JavaScript files) +-// - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …) +-// - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`) +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#pattern +-type Pattern = string // (alias) +-// Position in a text document expressed as zero-based line and character +-// offset. Prior to 3.17 the offsets were always based on a UTF-16 string +-// representation. So a string of the form `a𐐀b` the character offset of the +-// character `a` is 0, the character offset of `𐐀` is 1 and the character +-// offset of b is 3 since `𐐀` is represented using two code units in UTF-16. +-// Since 3.17 clients and servers can agree on a different string encoding +-// representation (e.g. UTF-8). The client announces it's supported encoding +-// via the client capability [`general.positionEncodings`](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#clientCapabilities). +-// The value is an array of position encodings the client supports, with +-// decreasing preference (e.g. the encoding at index `0` is the most preferred +-// one). To stay backwards compatible the only mandatory encoding is UTF-16 +-// represented via the string `utf-16`. The server can pick one of the +-// encodings offered by the client and signals that encoding back to the +-// client via the initialize result's property +-// [`capabilities.positionEncoding`](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#serverCapabilities). If the string value +-// `utf-16` is missing from the client's capability `general.positionEncodings` +-// servers can safely assume that the client supports UTF-16. If the server +-// omits the position encoding in its initialize result the encoding defaults +-// to the string value `utf-16`. Implementation considerations: since the +-// conversion from one encoding into another requires the content of the +-// file / line the conversion is best done where the file is read which is +-// usually on the server side. +-// +-// Positions are line end character agnostic. So you can not specify a position +-// that denotes `\r|\n` or `\n|` where `|` represents the character offset. +-// +-// @since 3.17.0 - support for negotiated position encoding. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#position +-type Position struct { +- // Line position in a document (zero-based). +- Line uint32 `json:"line"` +- // Character offset on a line in a document (zero-based). +- // +- // The meaning of this offset is determined by the negotiated +- // `PositionEncodingKind`. +- Character uint32 `json:"character"` +-} +- +-// A set of predefined position encoding kinds. +-// +-// @since 3.17.0 +-type PositionEncodingKind string +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenameDefaultBehavior +-type PrepareRenameDefaultBehavior struct { +- DefaultBehavior bool `json:"defaultBehavior"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenameParams +-type PrepareRenameParams struct { +- TextDocumentPositionParams +- WorkDoneProgressParams +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenamePlaceholder +-type PrepareRenamePlaceholder struct { +- Range Range `json:"range"` +- Placeholder string `json:"placeholder"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenameResult +-type PrepareRenameResult = PrepareRenamePlaceholder // (alias) +-type PrepareSupportDefaultBehavior uint32 +- +-// A previous result id in a workspace pull request. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#previousResultId +-type PreviousResultID struct { +- // The URI for which the client knowns a +- // result id. +- URI DocumentURI `json:"uri"` +- // The value of the previous result id. +- Value string `json:"value"` +-} +- +-// A previous result id in a workspace pull request. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#previousResultId +-type PreviousResultId struct { +- // The URI for which the client knowns a +- // result id. +- URI DocumentURI `json:"uri"` +- // The value of the previous result id. +- Value string `json:"value"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#progressParams +-type ProgressParams struct { +- // The progress token provided by the client or server. +- Token ProgressToken `json:"token"` +- // The progress data. +- Value any `json:"value"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#progressToken +-type ProgressToken = any // (alias) +-// The publish diagnostic client capabilities. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#publishDiagnosticsClientCapabilities +-type PublishDiagnosticsClientCapabilities struct { +- // Whether the client interprets the version property of the +- // `textDocument/publishDiagnostics` notification's parameter. +- // +- // @since 3.15.0 +- VersionSupport bool `json:"versionSupport,omitempty"` +- DiagnosticsCapabilities +-} +- +-// The publish diagnostic notification's parameters. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#publishDiagnosticsParams +-type PublishDiagnosticsParams struct { +- // The URI for which diagnostic information is reported. +- URI DocumentURI `json:"uri"` +- // Optional the version number of the document the diagnostics are published for. +- // +- // @since 3.15.0 +- Version int32 `json:"version,omitempty"` +- // An array of diagnostic information items. +- Diagnostics []Diagnostic `json:"diagnostics"` +-} +- +-// A range in a text document expressed as (zero-based) start and end positions. +-// +-// If you want to specify a range that contains a line including the line ending +-// character(s) then use an end position denoting the start of the next line. +-// For example: +-// ```ts +-// +-// { +-// start: { line: 5, character: 23 } +-// end : { line 6, character : 0 } +-// } +-// +-// ``` +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#range +-type Range struct { +- // The range's start position. +- Start Position `json:"start"` +- // The range's end position. +- End Position `json:"end"` +-} +- +-// Client Capabilities for a {@link ReferencesRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceClientCapabilities +-type ReferenceClientCapabilities struct { +- // Whether references supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// Value-object that contains additional information when +-// requesting references. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceContext +-type ReferenceContext struct { +- // Include the declaration of the current symbol. +- IncludeDeclaration bool `json:"includeDeclaration"` +-} +- +-// Reference options. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceOptions +-type ReferenceOptions struct { +- WorkDoneProgressOptions +-} +- +-// Parameters for a {@link ReferencesRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceParams +-type ReferenceParams struct { +- Context ReferenceContext `json:"context"` +- TextDocumentPositionParams +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// Registration options for a {@link ReferencesRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceRegistrationOptions +-type ReferenceRegistrationOptions struct { +- TextDocumentRegistrationOptions +- ReferenceOptions +-} +- +-// General parameters to register for a notification or to register a provider. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#registration +-type Registration struct { +- // The id used to register the request. The id can be used to deregister +- // the request again. +- ID string `json:"id"` +- // The method / capability to register for. +- Method string `json:"method"` +- // Options necessary for the registration. +- RegisterOptions any `json:"registerOptions,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#registrationParams +-type RegistrationParams struct { +- Registrations []Registration `json:"registrations"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#regularExpressionEngineKind +-type RegularExpressionEngineKind = string // (alias) +-// Client capabilities specific to regular expressions. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#regularExpressionsClientCapabilities +-type RegularExpressionsClientCapabilities struct { +- // The engine's name. +- Engine RegularExpressionEngineKind `json:"engine"` +- // The engine's version. +- Version string `json:"version,omitempty"` +-} +- +-// A full diagnostic report with a set of related documents. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#relatedFullDocumentDiagnosticReport +-type RelatedFullDocumentDiagnosticReport struct { +- // Diagnostics of related documents. This information is useful +- // in programming languages where code in a file A can generate +- // diagnostics in a file B which A depends on. An example of +- // such a language is C/C++ where marco definitions in a file +- // a.cpp and result in errors in a header file b.hpp. +- // +- // @since 3.17.0 +- RelatedDocuments map[DocumentURI]any `json:"relatedDocuments,omitempty"` +- FullDocumentDiagnosticReport +-} +- +-// An unchanged diagnostic report with a set of related documents. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#relatedUnchangedDocumentDiagnosticReport +-type RelatedUnchangedDocumentDiagnosticReport struct { +- // Diagnostics of related documents. This information is useful +- // in programming languages where code in a file A can generate +- // diagnostics in a file B which A depends on. An example of +- // such a language is C/C++ where marco definitions in a file +- // a.cpp and result in errors in a header file b.hpp. +- // +- // @since 3.17.0 +- RelatedDocuments map[DocumentURI]any `json:"relatedDocuments,omitempty"` +- UnchangedDocumentDiagnosticReport +-} +- +-// A relative pattern is a helper to construct glob patterns that are matched +-// relatively to a base URI. The common value for a `baseUri` is a workspace +-// folder root, but it can be another absolute URI as well. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#relativePattern +-type RelativePattern struct { +- // A workspace folder or a base URI to which this pattern will be matched +- // against relatively. +- BaseURI DocumentURI `json:"baseUri"` +- // The actual glob pattern; +- Pattern Pattern `json:"pattern"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameClientCapabilities +-type RenameClientCapabilities struct { +- // Whether rename supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // Client supports testing for validity of rename operations +- // before execution. +- // +- // @since 3.12.0 +- PrepareSupport bool `json:"prepareSupport,omitempty"` +- // Client supports the default behavior result. +- // +- // The value indicates the default behavior used by the +- // client. +- // +- // @since 3.16.0 +- PrepareSupportDefaultBehavior *PrepareSupportDefaultBehavior `json:"prepareSupportDefaultBehavior,omitempty"` +- // Whether the client honors the change annotations in +- // text edits and resource operations returned via the +- // rename request's workspace edit by for example presenting +- // the workspace edit in the user interface and asking +- // for confirmation. +- // +- // @since 3.16.0 +- HonorsChangeAnnotations bool `json:"honorsChangeAnnotations,omitempty"` +-} +- +-// Rename file operation +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameFile +-type RenameFile struct { +- // A rename +- Kind string `json:"kind"` +- // The old (existing) location. +- OldURI DocumentURI `json:"oldUri"` +- // The new location. +- NewURI DocumentURI `json:"newUri"` +- // Rename options. +- Options *RenameFileOptions `json:"options,omitempty"` +- ResourceOperation +-} +- +-// Rename file options +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameFileOptions +-type RenameFileOptions struct { +- // Overwrite target if existing. Overwrite wins over `ignoreIfExists` +- Overwrite bool `json:"overwrite,omitempty"` +- // Ignores if target exists. +- IgnoreIfExists bool `json:"ignoreIfExists,omitempty"` +-} +- +-// The parameters sent in notifications/requests for user-initiated renames of +-// files. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameFilesParams +-type RenameFilesParams struct { +- // An array of all files/folders renamed in this operation. When a folder is renamed, only +- // the folder will be included, and not its children. +- Files []FileRename `json:"files"` +-} +- +-// Provider options for a {@link RenameRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameOptions +-type RenameOptions struct { +- // Renames should be checked and tested before being executed. +- // +- // @since version 3.12.0 +- PrepareProvider bool `json:"prepareProvider,omitempty"` +- WorkDoneProgressOptions +-} +- +-// The parameters of a {@link RenameRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameParams +-type RenameParams struct { +- // The document to rename. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The position at which this request was sent. +- Position Position `json:"position"` +- // The new name of the symbol. If the given name is not valid the +- // request must return a {@link ResponseError} with an +- // appropriate message set. +- NewName string `json:"newName"` +- WorkDoneProgressParams +-} +- +-// Registration options for a {@link RenameRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameRegistrationOptions +-type RenameRegistrationOptions struct { +- TextDocumentRegistrationOptions +- RenameOptions +-} +- +-// A generic resource operation. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#resourceOperation +-type ResourceOperation struct { +- // The resource operation kind. +- Kind string `json:"kind"` +- // An optional annotation identifier describing the operation. +- // +- // @since 3.16.0 +- AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"` +-} +-type ResourceOperationKind string +- +-// Save options. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#saveOptions +-type SaveOptions struct { +- // The client is supposed to include the content on save. +- IncludeText bool `json:"includeText,omitempty"` +-} +- +-// Describes the currently selected completion item. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectedCompletionInfo +-type SelectedCompletionInfo struct { +- // The range that will be replaced if this completion item is accepted. +- Range Range `json:"range"` +- // The text the range will be replaced with if this completion is accepted. +- Text string `json:"text"` +-} +- +-// A selection range represents a part of a selection hierarchy. A selection range +-// may have a parent selection range that contains it. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRange +-type SelectionRange struct { +- // The {@link Range range} of this selection range. +- Range Range `json:"range"` +- // The parent selection range containing this range. Therefore `parent.range` must contain `this.range`. +- Parent *SelectionRange `json:"parent,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeClientCapabilities +-type SelectionRangeClientCapabilities struct { +- // Whether implementation supports dynamic registration for selection range providers. If this is set to `true` +- // the client supports the new `SelectionRangeRegistrationOptions` return value for the corresponding server +- // capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeOptions +-type SelectionRangeOptions struct { +- WorkDoneProgressOptions +-} +- +-// A parameter literal used in selection range requests. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeParams +-type SelectionRangeParams struct { +- // The text document. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The positions inside the text document. +- Positions []Position `json:"positions"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeRegistrationOptions +-type SelectionRangeRegistrationOptions struct { +- SelectionRangeOptions +- TextDocumentRegistrationOptions +- StaticRegistrationOptions +-} +- +-// A set of predefined token modifiers. This set is not fixed +-// an clients can specify additional token types via the +-// corresponding client capabilities. +-// +-// @since 3.16.0 +-type SemanticTokenModifiers string +- +-// A set of predefined token types. This set is not fixed +-// an clients can specify additional token types via the +-// corresponding client capabilities. +-// +-// @since 3.16.0 +-type SemanticTokenTypes string +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokens +-type SemanticTokens struct { +- // An optional result id. If provided and clients support delta updating +- // the client will include the result id in the next semantic token request. +- // A server can then instead of computing all semantic tokens again simply +- // send a delta. +- ResultID string `json:"resultId,omitempty"` +- // The actual tokens. +- Data []uint32 `json:"data"` +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensClientCapabilities +-type SemanticTokensClientCapabilities struct { +- // Whether implementation supports dynamic registration. If this is set to `true` +- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` +- // return value for the corresponding server capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // Which requests the client supports and might send to the server +- // depending on the server's capability. Please note that clients might not +- // show semantic tokens or degrade some of the user experience if a range +- // or full request is advertised by the client but not provided by the +- // server. If for example the client capability `requests.full` and +- // `request.range` are both set to true but the server only provides a +- // range provider the client might not render a minimap correctly or might +- // even decide to not show any semantic tokens at all. +- Requests ClientSemanticTokensRequestOptions `json:"requests"` +- // The token types that the client supports. +- TokenTypes []string `json:"tokenTypes"` +- // The token modifiers that the client supports. +- TokenModifiers []string `json:"tokenModifiers"` +- // The token formats the clients supports. +- Formats []TokenFormat `json:"formats"` +- // Whether the client supports tokens that can overlap each other. +- OverlappingTokenSupport bool `json:"overlappingTokenSupport,omitempty"` +- // Whether the client supports tokens that can span multiple lines. +- MultilineTokenSupport bool `json:"multilineTokenSupport,omitempty"` +- // Whether the client allows the server to actively cancel a +- // semantic token request, e.g. supports returning +- // LSPErrorCodes.ServerCancelled. If a server does the client +- // needs to retrigger the request. +- // +- // @since 3.17.0 +- ServerCancelSupport bool `json:"serverCancelSupport,omitempty"` +- // Whether the client uses semantic tokens to augment existing +- // syntax tokens. If set to `true` client side created syntax +- // tokens and semantic tokens are both used for colorization. If +- // set to `false` the client only uses the returned semantic tokens +- // for colorization. +- // +- // If the value is `undefined` then the client behavior is not +- // specified. +- // +- // @since 3.17.0 +- AugmentsSyntaxTokens bool `json:"augmentsSyntaxTokens,omitempty"` +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensDelta +-type SemanticTokensDelta struct { +- ResultID string `json:"resultId,omitempty"` +- // The semantic token edits to transform a previous result into a new result. +- Edits []SemanticTokensEdit `json:"edits"` +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensDeltaParams +-type SemanticTokensDeltaParams struct { +- // The text document. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The result id of a previous response. The result Id can either point to a full response +- // or a delta response depending on what was received last. +- PreviousResultID string `json:"previousResultId"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensDeltaPartialResult +-type SemanticTokensDeltaPartialResult struct { +- Edits []SemanticTokensEdit `json:"edits"` +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensEdit +-type SemanticTokensEdit struct { +- // The start offset of the edit. +- Start uint32 `json:"start"` +- // The count of elements to remove. +- DeleteCount uint32 `json:"deleteCount"` +- // The elements to insert. +- Data []uint32 `json:"data,omitempty"` +-} +- +-// Semantic tokens options to support deltas for full documents +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensFullDelta +-type SemanticTokensFullDelta struct { +- // The server supports deltas for full documents. +- Delta bool `json:"delta,omitempty"` +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensLegend +-type SemanticTokensLegend struct { +- // The token types a server uses. +- TokenTypes []string `json:"tokenTypes"` +- // The token modifiers a server uses. +- TokenModifiers []string `json:"tokenModifiers"` +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensOptions +-type SemanticTokensOptions struct { +- // The legend used by the server +- Legend SemanticTokensLegend `json:"legend"` +- // Server supports providing semantic tokens for a specific range +- // of a document. +- Range *Or_SemanticTokensOptions_range `json:"range,omitempty"` +- // Server supports providing semantic tokens for a full document. +- Full *Or_SemanticTokensOptions_full `json:"full,omitempty"` +- WorkDoneProgressOptions +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensParams +-type SemanticTokensParams struct { +- // The text document. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensPartialResult +-type SemanticTokensPartialResult struct { +- Data []uint32 `json:"data"` +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensRangeParams +-type SemanticTokensRangeParams struct { +- // The text document. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The range the semantic tokens are requested for. +- Range Range `json:"range"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensRegistrationOptions +-type SemanticTokensRegistrationOptions struct { +- TextDocumentRegistrationOptions +- SemanticTokensOptions +- StaticRegistrationOptions +-} +- +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensWorkspaceClientCapabilities +-type SemanticTokensWorkspaceClientCapabilities struct { +- // Whether the client implementation supports a refresh request sent from +- // the server to the client. +- // +- // Note that this event is global and will force the client to refresh all +- // semantic tokens currently shown. It should be used with absolute care +- // and is useful for situation where a server for example detects a project +- // wide change that requires such a calculation. +- RefreshSupport bool `json:"refreshSupport,omitempty"` +-} +- +-// Defines the capabilities provided by a language +-// server. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#serverCapabilities +-type ServerCapabilities struct { +- // The position encoding the server picked from the encodings offered +- // by the client via the client capability `general.positionEncodings`. +- // +- // If the client didn't provide any position encodings the only valid +- // value that a server can return is 'utf-16'. +- // +- // If omitted it defaults to 'utf-16'. +- // +- // @since 3.17.0 +- PositionEncoding *PositionEncodingKind `json:"positionEncoding,omitempty"` +- // Defines how text documents are synced. Is either a detailed structure +- // defining each notification or for backwards compatibility the +- // TextDocumentSyncKind number. +- TextDocumentSync any `json:"textDocumentSync,omitempty"` +- // Defines how notebook documents are synced. +- // +- // @since 3.17.0 +- NotebookDocumentSync *Or_ServerCapabilities_notebookDocumentSync `json:"notebookDocumentSync,omitempty"` +- // The server provides completion support. +- CompletionProvider *CompletionOptions `json:"completionProvider,omitempty"` +- // The server provides hover support. +- HoverProvider *Or_ServerCapabilities_hoverProvider `json:"hoverProvider,omitempty"` +- // The server provides signature help support. +- SignatureHelpProvider *SignatureHelpOptions `json:"signatureHelpProvider,omitempty"` +- // The server provides Goto Declaration support. +- DeclarationProvider *Or_ServerCapabilities_declarationProvider `json:"declarationProvider,omitempty"` +- // The server provides goto definition support. +- DefinitionProvider *Or_ServerCapabilities_definitionProvider `json:"definitionProvider,omitempty"` +- // The server provides Goto Type Definition support. +- TypeDefinitionProvider *Or_ServerCapabilities_typeDefinitionProvider `json:"typeDefinitionProvider,omitempty"` +- // The server provides Goto Implementation support. +- ImplementationProvider *Or_ServerCapabilities_implementationProvider `json:"implementationProvider,omitempty"` +- // The server provides find references support. +- ReferencesProvider *Or_ServerCapabilities_referencesProvider `json:"referencesProvider,omitempty"` +- // The server provides document highlight support. +- DocumentHighlightProvider *Or_ServerCapabilities_documentHighlightProvider `json:"documentHighlightProvider,omitempty"` +- // The server provides document symbol support. +- DocumentSymbolProvider *Or_ServerCapabilities_documentSymbolProvider `json:"documentSymbolProvider,omitempty"` +- // The server provides code actions. CodeActionOptions may only be +- // specified if the client states that it supports +- // `codeActionLiteralSupport` in its initial `initialize` request. +- CodeActionProvider any `json:"codeActionProvider,omitempty"` +- // The server provides code lens. +- CodeLensProvider *CodeLensOptions `json:"codeLensProvider,omitempty"` +- // The server provides document link support. +- DocumentLinkProvider *DocumentLinkOptions `json:"documentLinkProvider,omitempty"` +- // The server provides color provider support. +- ColorProvider *Or_ServerCapabilities_colorProvider `json:"colorProvider,omitempty"` +- // The server provides workspace symbol support. +- WorkspaceSymbolProvider *Or_ServerCapabilities_workspaceSymbolProvider `json:"workspaceSymbolProvider,omitempty"` +- // The server provides document formatting. +- DocumentFormattingProvider *Or_ServerCapabilities_documentFormattingProvider `json:"documentFormattingProvider,omitempty"` +- // The server provides document range formatting. +- DocumentRangeFormattingProvider *Or_ServerCapabilities_documentRangeFormattingProvider `json:"documentRangeFormattingProvider,omitempty"` +- // The server provides document formatting on typing. +- DocumentOnTypeFormattingProvider *DocumentOnTypeFormattingOptions `json:"documentOnTypeFormattingProvider,omitempty"` +- // The server provides rename support. RenameOptions may only be +- // specified if the client states that it supports +- // `prepareSupport` in its initial `initialize` request. +- RenameProvider any `json:"renameProvider,omitempty"` +- // The server provides folding provider support. +- FoldingRangeProvider *Or_ServerCapabilities_foldingRangeProvider `json:"foldingRangeProvider,omitempty"` +- // The server provides selection range support. +- SelectionRangeProvider *Or_ServerCapabilities_selectionRangeProvider `json:"selectionRangeProvider,omitempty"` +- // The server provides execute command support. +- ExecuteCommandProvider *ExecuteCommandOptions `json:"executeCommandProvider,omitempty"` +- // The server provides call hierarchy support. +- // +- // @since 3.16.0 +- CallHierarchyProvider *Or_ServerCapabilities_callHierarchyProvider `json:"callHierarchyProvider,omitempty"` +- // The server provides linked editing range support. +- // +- // @since 3.16.0 +- LinkedEditingRangeProvider *Or_ServerCapabilities_linkedEditingRangeProvider `json:"linkedEditingRangeProvider,omitempty"` +- // The server provides semantic tokens support. +- // +- // @since 3.16.0 +- SemanticTokensProvider any `json:"semanticTokensProvider,omitempty"` +- // The server provides moniker support. +- // +- // @since 3.16.0 +- MonikerProvider *Or_ServerCapabilities_monikerProvider `json:"monikerProvider,omitempty"` +- // The server provides type hierarchy support. +- // +- // @since 3.17.0 +- TypeHierarchyProvider *Or_ServerCapabilities_typeHierarchyProvider `json:"typeHierarchyProvider,omitempty"` +- // The server provides inline values. +- // +- // @since 3.17.0 +- InlineValueProvider *Or_ServerCapabilities_inlineValueProvider `json:"inlineValueProvider,omitempty"` +- // The server provides inlay hints. +- // +- // @since 3.17.0 +- InlayHintProvider any `json:"inlayHintProvider,omitempty"` +- // The server has support for pull model diagnostics. +- // +- // @since 3.17.0 +- DiagnosticProvider *Or_ServerCapabilities_diagnosticProvider `json:"diagnosticProvider,omitempty"` +- // Inline completion options used during static registration. +- // +- // @since 3.18.0 +- // @proposed +- InlineCompletionProvider *Or_ServerCapabilities_inlineCompletionProvider `json:"inlineCompletionProvider,omitempty"` +- // Workspace specific server capabilities. +- Workspace *WorkspaceOptions `json:"workspace,omitempty"` +- // Experimental server capabilities. +- Experimental any `json:"experimental,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#serverCompletionItemOptions +-type ServerCompletionItemOptions struct { +- // The server has support for completion item label +- // details (see also `CompletionItemLabelDetails`) when +- // receiving a completion item in a resolve call. +- // +- // @since 3.17.0 +- LabelDetailsSupport bool `json:"labelDetailsSupport,omitempty"` +-} +- +-// Information about the server +-// +-// @since 3.15.0 +-// @since 3.18.0 ServerInfo type name added. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#serverInfo +-type ServerInfo struct { +- // The name of the server as defined by the server. +- Name string `json:"name"` +- // The server's version as defined by the server. +- Version string `json:"version,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#setTraceParams +-type SetTraceParams struct { +- Value TraceValue `json:"value"` +-} +- +-// Client capabilities for the showDocument request. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showDocumentClientCapabilities +-type ShowDocumentClientCapabilities struct { +- // The client has support for the showDocument +- // request. +- Support bool `json:"support"` +-} +- +-// Params to show a resource in the UI. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showDocumentParams +-type ShowDocumentParams struct { +- // The uri to show. +- URI URI `json:"uri"` +- // Indicates to show the resource in an external program. +- // To show, for example, `https://code.visualstudio.com/` +- // in the default WEB browser set `external` to `true`. +- External bool `json:"external,omitempty"` +- // An optional property to indicate whether the editor +- // showing the document should take focus or not. +- // Clients might ignore this property if an external +- // program is started. +- TakeFocus bool `json:"takeFocus,omitempty"` +- // An optional selection range if the document is a text +- // document. Clients might ignore the property if an +- // external program is started or the file is not a text +- // file. +- Selection *Range `json:"selection,omitempty"` +-} +- +-// The result of a showDocument request. +-// +-// @since 3.16.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showDocumentResult +-type ShowDocumentResult struct { +- // A boolean indicating if the show was successful. +- Success bool `json:"success"` +-} +- +-// The parameters of a notification message. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showMessageParams +-type ShowMessageParams struct { +- // The message type. See {@link MessageType} +- Type MessageType `json:"type"` +- // The actual message. +- Message string `json:"message"` +-} +- +-// Show message request client capabilities +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showMessageRequestClientCapabilities +-type ShowMessageRequestClientCapabilities struct { +- // Capabilities specific to the `MessageActionItem` type. +- MessageActionItem *ClientShowMessageActionItemOptions `json:"messageActionItem,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showMessageRequestParams +-type ShowMessageRequestParams struct { +- // The message type. See {@link MessageType} +- Type MessageType `json:"type"` +- // The actual message. +- Message string `json:"message"` +- // The message action items to present. +- Actions []MessageActionItem `json:"actions,omitempty"` +-} +- +-// Signature help represents the signature of something +-// callable. There can be multiple signature but only one +-// active and only one active parameter. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelp +-type SignatureHelp struct { +- // One or more signatures. +- Signatures []SignatureInformation `json:"signatures"` +- // The active signature. If omitted or the value lies outside the +- // range of `signatures` the value defaults to zero or is ignored if +- // the `SignatureHelp` has no signatures. +- // +- // Whenever possible implementors should make an active decision about +- // the active signature and shouldn't rely on a default value. +- // +- // In future version of the protocol this property might become +- // mandatory to better express this. +- ActiveSignature uint32 `json:"activeSignature"` +- // The active parameter of the active signature. +- // +- // If `null`, no parameter of the signature is active (for example a named +- // argument that does not match any declared parameters). This is only valid +- // if the client specifies the client capability +- // `textDocument.signatureHelp.noActiveParameterSupport === true` +- // +- // If omitted or the value lies outside the range of +- // `signatures[activeSignature].parameters` defaults to 0 if the active +- // signature has parameters. +- // +- // If the active signature has no parameters it is ignored. +- // +- // In future version of the protocol this property might become +- // mandatory (but still nullable) to better express the active parameter if +- // the active signature does have any. +- ActiveParameter *uint32 `json:"activeParameter,omitempty"` +-} +- +-// Client Capabilities for a {@link SignatureHelpRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpClientCapabilities +-type SignatureHelpClientCapabilities struct { +- // Whether signature help supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // The client supports the following `SignatureInformation` +- // specific properties. +- SignatureInformation *ClientSignatureInformationOptions `json:"signatureInformation,omitempty"` +- // The client supports to send additional context information for a +- // `textDocument/signatureHelp` request. A client that opts into +- // contextSupport will also support the `retriggerCharacters` on +- // `SignatureHelpOptions`. +- // +- // @since 3.15.0 +- ContextSupport bool `json:"contextSupport,omitempty"` +-} +- +-// Additional information about the context in which a signature help request was triggered. +-// +-// @since 3.15.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpContext +-type SignatureHelpContext struct { +- // Action that caused signature help to be triggered. +- TriggerKind SignatureHelpTriggerKind `json:"triggerKind"` +- // Character that caused signature help to be triggered. +- // +- // This is undefined when `triggerKind !== SignatureHelpTriggerKind.TriggerCharacter` +- TriggerCharacter string `json:"triggerCharacter,omitempty"` +- // `true` if signature help was already showing when it was triggered. +- // +- // Retriggers occurs when the signature help is already active and can be caused by actions such as +- // typing a trigger character, a cursor move, or document content changes. +- IsRetrigger bool `json:"isRetrigger"` +- // The currently active `SignatureHelp`. +- // +- // The `activeSignatureHelp` has its `SignatureHelp.activeSignature` field updated based on +- // the user navigating through available signatures. +- ActiveSignatureHelp *SignatureHelp `json:"activeSignatureHelp,omitempty"` +-} +- +-// Server Capabilities for a {@link SignatureHelpRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpOptions +-type SignatureHelpOptions struct { +- // List of characters that trigger signature help automatically. +- TriggerCharacters []string `json:"triggerCharacters,omitempty"` +- // List of characters that re-trigger signature help. +- // +- // These trigger characters are only active when signature help is already showing. All trigger characters +- // are also counted as re-trigger characters. +- // +- // @since 3.15.0 +- RetriggerCharacters []string `json:"retriggerCharacters,omitempty"` +- WorkDoneProgressOptions +-} +- +-// Parameters for a {@link SignatureHelpRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpParams +-type SignatureHelpParams struct { +- // The signature help context. This is only available if the client specifies +- // to send this using the client capability `textDocument.signatureHelp.contextSupport === true` +- // +- // @since 3.15.0 +- Context *SignatureHelpContext `json:"context,omitempty"` +- TextDocumentPositionParams +- WorkDoneProgressParams +-} +- +-// Registration options for a {@link SignatureHelpRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpRegistrationOptions +-type SignatureHelpRegistrationOptions struct { +- TextDocumentRegistrationOptions +- SignatureHelpOptions +-} +- +-// How a signature help was triggered. +-// +-// @since 3.15.0 +-type SignatureHelpTriggerKind uint32 +- +-// Represents the signature of something callable. A signature +-// can have a label, like a function-name, a doc-comment, and +-// a set of parameters. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureInformation +-type SignatureInformation struct { +- // The label of this signature. Will be shown in +- // the UI. +- Label string `json:"label"` +- // The human-readable doc-comment of this signature. Will be shown +- // in the UI but can be omitted. +- Documentation *Or_SignatureInformation_documentation `json:"documentation,omitempty"` +- // The parameters of this signature. +- Parameters []ParameterInformation `json:"parameters,omitempty"` +- // The index of the active parameter. +- // +- // If `null`, no parameter of the signature is active (for example a named +- // argument that does not match any declared parameters). This is only valid +- // if the client specifies the client capability +- // `textDocument.signatureHelp.noActiveParameterSupport === true` +- // +- // If provided (or `null`), this is used in place of +- // `SignatureHelp.activeParameter`. +- // +- // @since 3.16.0 +- ActiveParameter *uint32 `json:"activeParameter,omitempty"` +-} +- +-// An interactive text edit. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#snippetTextEdit +-type SnippetTextEdit struct { +- // The range of the text document to be manipulated. +- Range Range `json:"range"` +- // The snippet to be inserted. +- Snippet StringValue `json:"snippet"` +- // The actual identifier of the snippet edit. +- AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#staleRequestSupportOptions +-type StaleRequestSupportOptions struct { +- // The client will actively cancel the request. +- Cancel bool `json:"cancel"` +- // The list of requests for which the client +- // will retry the request if it receives a +- // response with error code `ContentModified` +- RetryOnContentModified []string `json:"retryOnContentModified"` +-} +- +-// Static registration options to be returned in the initialize +-// request. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#staticRegistrationOptions +-type StaticRegistrationOptions struct { +- // The id used to register the request. The id can be used to deregister +- // the request again. See also Registration#id. +- ID string `json:"id,omitempty"` +-} +- +-// A string value used as a snippet is a template which allows to insert text +-// and to control the editor cursor when insertion happens. +-// +-// A snippet can define tab stops and placeholders with `$1`, `$2` +-// and `${3:foo}`. `$0` defines the final tab stop, it defaults to +-// the end of the snippet. Variables are defined with `$name` and +-// `${name:default value}`. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#stringValue +-type StringValue struct { +- // The kind of string value. +- Kind string `json:"kind"` +- // The snippet string. +- Value string `json:"value"` +-} +- +-// Represents information about programming constructs like variables, classes, +-// interfaces etc. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#symbolInformation +-type SymbolInformation struct { +- // extends BaseSymbolInformation +- // Indicates if this symbol is deprecated. +- // +- // @deprecated Use tags instead +- Deprecated bool `json:"deprecated,omitempty"` +- // The location of this symbol. The location's range is used by a tool +- // to reveal the location in the editor. If the symbol is selected in the +- // tool the range's start information is used to position the cursor. So +- // the range usually spans more than the actual symbol's name and does +- // normally include things like visibility modifiers. +- // +- // The range doesn't have to denote a node range in the sense of an abstract +- // syntax tree. It can therefore not be used to re-construct a hierarchy of +- // the symbols. +- Location Location `json:"location"` +- // The name of this symbol. +- Name string `json:"name"` +- // The kind of this symbol. +- Kind SymbolKind `json:"kind"` +- // Tags for this symbol. +- // +- // @since 3.16.0 +- Tags []SymbolTag `json:"tags,omitempty"` +- // The name of the symbol containing this symbol. This information is for +- // user interface purposes (e.g. to render a qualifier in the user interface +- // if necessary). It can't be used to re-infer a hierarchy for the document +- // symbols. +- ContainerName string `json:"containerName,omitempty"` +-} +- +-// A symbol kind. +-type SymbolKind uint32 +- +-// Symbol tags are extra annotations that tweak the rendering of a symbol. +-// +-// @since 3.16 +-type SymbolTag uint32 +- +-// Describe options to be used when registered for text document change events. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentChangeRegistrationOptions +-type TextDocumentChangeRegistrationOptions struct { +- // How documents are synced to the server. +- SyncKind TextDocumentSyncKind `json:"syncKind"` +- TextDocumentRegistrationOptions +-} +- +-// Text document specific client capabilities. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentClientCapabilities +-type TextDocumentClientCapabilities struct { +- // Defines which synchronization capabilities the client supports. +- Synchronization *TextDocumentSyncClientCapabilities `json:"synchronization,omitempty"` +- // Defines which filters the client supports. +- // +- // @since 3.18.0 +- Filters *TextDocumentFilterClientCapabilities `json:"filters,omitempty"` +- // Capabilities specific to the `textDocument/completion` request. +- Completion CompletionClientCapabilities `json:"completion,omitempty"` +- // Capabilities specific to the `textDocument/hover` request. +- Hover *HoverClientCapabilities `json:"hover,omitempty"` +- // Capabilities specific to the `textDocument/signatureHelp` request. +- SignatureHelp *SignatureHelpClientCapabilities `json:"signatureHelp,omitempty"` +- // Capabilities specific to the `textDocument/declaration` request. +- // +- // @since 3.14.0 +- Declaration *DeclarationClientCapabilities `json:"declaration,omitempty"` +- // Capabilities specific to the `textDocument/definition` request. +- Definition *DefinitionClientCapabilities `json:"definition,omitempty"` +- // Capabilities specific to the `textDocument/typeDefinition` request. +- // +- // @since 3.6.0 +- TypeDefinition *TypeDefinitionClientCapabilities `json:"typeDefinition,omitempty"` +- // Capabilities specific to the `textDocument/implementation` request. +- // +- // @since 3.6.0 +- Implementation *ImplementationClientCapabilities `json:"implementation,omitempty"` +- // Capabilities specific to the `textDocument/references` request. +- References *ReferenceClientCapabilities `json:"references,omitempty"` +- // Capabilities specific to the `textDocument/documentHighlight` request. +- DocumentHighlight *DocumentHighlightClientCapabilities `json:"documentHighlight,omitempty"` +- // Capabilities specific to the `textDocument/documentSymbol` request. +- DocumentSymbol DocumentSymbolClientCapabilities `json:"documentSymbol,omitempty"` +- // Capabilities specific to the `textDocument/codeAction` request. +- CodeAction CodeActionClientCapabilities `json:"codeAction,omitempty"` +- // Capabilities specific to the `textDocument/codeLens` request. +- CodeLens *CodeLensClientCapabilities `json:"codeLens,omitempty"` +- // Capabilities specific to the `textDocument/documentLink` request. +- DocumentLink *DocumentLinkClientCapabilities `json:"documentLink,omitempty"` +- // Capabilities specific to the `textDocument/documentColor` and the +- // `textDocument/colorPresentation` request. +- // +- // @since 3.6.0 +- ColorProvider *DocumentColorClientCapabilities `json:"colorProvider,omitempty"` +- // Capabilities specific to the `textDocument/formatting` request. +- Formatting *DocumentFormattingClientCapabilities `json:"formatting,omitempty"` +- // Capabilities specific to the `textDocument/rangeFormatting` request. +- RangeFormatting *DocumentRangeFormattingClientCapabilities `json:"rangeFormatting,omitempty"` +- // Capabilities specific to the `textDocument/onTypeFormatting` request. +- OnTypeFormatting *DocumentOnTypeFormattingClientCapabilities `json:"onTypeFormatting,omitempty"` +- // Capabilities specific to the `textDocument/rename` request. +- Rename *RenameClientCapabilities `json:"rename,omitempty"` +- // Capabilities specific to the `textDocument/foldingRange` request. +- // +- // @since 3.10.0 +- FoldingRange *FoldingRangeClientCapabilities `json:"foldingRange,omitempty"` +- // Capabilities specific to the `textDocument/selectionRange` request. +- // +- // @since 3.15.0 +- SelectionRange *SelectionRangeClientCapabilities `json:"selectionRange,omitempty"` +- // Capabilities specific to the `textDocument/publishDiagnostics` notification. +- PublishDiagnostics PublishDiagnosticsClientCapabilities `json:"publishDiagnostics,omitempty"` +- // Capabilities specific to the various call hierarchy requests. +- // +- // @since 3.16.0 +- CallHierarchy *CallHierarchyClientCapabilities `json:"callHierarchy,omitempty"` +- // Capabilities specific to the various semantic token request. +- // +- // @since 3.16.0 +- SemanticTokens SemanticTokensClientCapabilities `json:"semanticTokens,omitempty"` +- // Capabilities specific to the `textDocument/linkedEditingRange` request. +- // +- // @since 3.16.0 +- LinkedEditingRange *LinkedEditingRangeClientCapabilities `json:"linkedEditingRange,omitempty"` +- // Client capabilities specific to the `textDocument/moniker` request. +- // +- // @since 3.16.0 +- Moniker *MonikerClientCapabilities `json:"moniker,omitempty"` +- // Capabilities specific to the various type hierarchy requests. +- // +- // @since 3.17.0 +- TypeHierarchy *TypeHierarchyClientCapabilities `json:"typeHierarchy,omitempty"` +- // Capabilities specific to the `textDocument/inlineValue` request. +- // +- // @since 3.17.0 +- InlineValue *InlineValueClientCapabilities `json:"inlineValue,omitempty"` +- // Capabilities specific to the `textDocument/inlayHint` request. +- // +- // @since 3.17.0 +- InlayHint *InlayHintClientCapabilities `json:"inlayHint,omitempty"` +- // Capabilities specific to the diagnostic pull model. +- // +- // @since 3.17.0 +- Diagnostic *DiagnosticClientCapabilities `json:"diagnostic,omitempty"` +- // Client capabilities specific to inline completions. +- // +- // @since 3.18.0 +- // @proposed +- InlineCompletion *InlineCompletionClientCapabilities `json:"inlineCompletion,omitempty"` +-} +- +-// An event describing a change to a text document. If only a text is provided +-// it is considered to be the full content of the document. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentChangeEvent +-type TextDocumentContentChangeEvent = TextDocumentContentChangePartial // (alias) +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentChangePartial +-type TextDocumentContentChangePartial struct { +- // The range of the document that changed. +- Range *Range `json:"range,omitempty"` +- // The optional length of the range that got replaced. +- // +- // @deprecated use range instead. +- RangeLength *uint32 `json:"rangeLength,omitempty"` +- // The new text for the provided range. +- Text string `json:"text"` +-} +- +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentChangeWholeDocument +-type TextDocumentContentChangeWholeDocument struct { +- // The new text of the whole document. +- Text string `json:"text"` +-} +- +-// Client capabilities for a text document content provider. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentClientCapabilities +-type TextDocumentContentClientCapabilities struct { +- // Text document content provider supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// Text document content provider options. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentOptions +-type TextDocumentContentOptions struct { +- // The schemes for which the server provides content. +- Schemes []string `json:"schemes"` +-} +- +-// Parameters for the `workspace/textDocumentContent` request. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentParams +-type TextDocumentContentParams struct { +- // The uri of the text document. +- URI DocumentURI `json:"uri"` +-} +- +-// Parameters for the `workspace/textDocumentContent/refresh` request. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentRefreshParams +-type TextDocumentContentRefreshParams struct { +- // The uri of the text document to refresh. +- URI DocumentURI `json:"uri"` +-} +- +-// Text document content provider registration options. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentRegistrationOptions +-type TextDocumentContentRegistrationOptions struct { +- TextDocumentContentOptions +- StaticRegistrationOptions +-} +- +-// Result of the `workspace/textDocumentContent` request. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentResult +-type TextDocumentContentResult struct { +- // The text content of the text document. Please note, that the content of +- // any subsequent open notifications for the text document might differ +- // from the returned content due to whitespace and line ending +- // normalizations done on the client +- Text string `json:"text"` +-} +- +-// Describes textual changes on a text document. A TextDocumentEdit describes all changes +-// on a document version Si and after they are applied move the document to version Si+1. +-// So the creator of a TextDocumentEdit doesn't need to sort the array of edits or do any +-// kind of ordering. However the edits must be non overlapping. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentEdit +-type TextDocumentEdit struct { +- // The text document to change. +- TextDocument OptionalVersionedTextDocumentIdentifier `json:"textDocument"` +- // The edits to be applied. +- // +- // @since 3.16.0 - support for AnnotatedTextEdit. This is guarded using a +- // client capability. +- // +- // @since 3.18.0 - support for SnippetTextEdit. This is guarded using a +- // client capability. +- Edits []Or_TextDocumentEdit_edits_Elem `json:"edits"` +-} +- +-// A document filter denotes a document by different properties like +-// the {@link TextDocument.languageId language}, the {@link Uri.scheme scheme} of +-// its resource, or a glob-pattern that is applied to the {@link TextDocument.fileName path}. +-// +-// Glob patterns can have the following syntax: +-// +-// - `*` to match one or more characters in a path segment +-// - `?` to match on one character in a path segment +-// - `**` to match any number of path segments, including none +-// - `{}` to group sub patterns into an OR expression. (e.g. `**​/*.{ts,js}` matches all TypeScript and JavaScript files) +-// - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …) +-// - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`) +-// +-// @sample A language filter that applies to typescript files on disk: `{ language: 'typescript', scheme: 'file' }` +-// @sample A language filter that applies to all package.json paths: `{ language: 'json', pattern: '**package.json' }` +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilter +-type TextDocumentFilter = Or_TextDocumentFilter // (alias) +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterClientCapabilities +-type TextDocumentFilterClientCapabilities struct { +- // The client supports Relative Patterns. +- // +- // @since 3.18.0 +- RelativePatternSupport bool `json:"relativePatternSupport,omitempty"` +-} +- +-// A document filter where `language` is required field. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterLanguage +-type TextDocumentFilterLanguage struct { +- // A language id, like `typescript`. +- Language string `json:"language"` +- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. +- Scheme string `json:"scheme,omitempty"` +- // A glob pattern, like **​/*.{ts,js}. See TextDocumentFilter for examples. +- // +- // @since 3.18.0 - support for relative patterns. Whether clients support +- // relative patterns depends on the client capability +- // `textDocuments.filters.relativePatternSupport`. +- Pattern *GlobPattern `json:"pattern,omitempty"` +-} +- +-// A document filter where `pattern` is required field. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterPattern +-type TextDocumentFilterPattern struct { +- // A language id, like `typescript`. +- Language string `json:"language,omitempty"` +- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. +- Scheme string `json:"scheme,omitempty"` +- // A glob pattern, like **​/*.{ts,js}. See TextDocumentFilter for examples. +- // +- // @since 3.18.0 - support for relative patterns. Whether clients support +- // relative patterns depends on the client capability +- // `textDocuments.filters.relativePatternSupport`. +- Pattern GlobPattern `json:"pattern"` +-} +- +-// A document filter where `scheme` is required field. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterScheme +-type TextDocumentFilterScheme struct { +- // A language id, like `typescript`. +- Language string `json:"language,omitempty"` +- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. +- Scheme string `json:"scheme"` +- // A glob pattern, like **​/*.{ts,js}. See TextDocumentFilter for examples. +- // +- // @since 3.18.0 - support for relative patterns. Whether clients support +- // relative patterns depends on the client capability +- // `textDocuments.filters.relativePatternSupport`. +- Pattern *GlobPattern `json:"pattern,omitempty"` +-} +- +-// A literal to identify a text document in the client. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentIdentifier +-type TextDocumentIdentifier struct { +- // The text document's uri. +- URI DocumentURI `json:"uri"` +-} +- +-// An item to transfer a text document from the client to the +-// server. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentItem +-type TextDocumentItem struct { +- // The text document's uri. +- URI DocumentURI `json:"uri"` +- // The text document's language identifier. +- LanguageID LanguageKind `json:"languageId"` +- // The version number of this document (it will increase after each +- // change, including undo/redo). +- Version int32 `json:"version"` +- // The content of the opened text document. +- Text string `json:"text"` +-} +- +-// A parameter literal used in requests to pass a text document and a position inside that +-// document. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentPositionParams +-type TextDocumentPositionParams struct { +- // The text document. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The position inside the text document. +- Position Position `json:"position"` +- // Range is an optional field representing the user's text selection in the document. +- // If provided, the Position must be contained within this range. +- // +- // Note: This is a non-standard protocol extension. See microsoft/language-server-protocol#377. +- Range Range `json:"range"` +-} +- +-// General text document registration options. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentRegistrationOptions +-type TextDocumentRegistrationOptions struct { +- // A document selector to identify the scope of the registration. If set to null +- // the document selector provided on the client side will be used. +- DocumentSelector DocumentSelector `json:"documentSelector"` +-} +- +-// Represents reasons why a text document is saved. +-type TextDocumentSaveReason uint32 +- +-// Save registration options. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentSaveRegistrationOptions +-type TextDocumentSaveRegistrationOptions struct { +- TextDocumentRegistrationOptions +- SaveOptions +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentSyncClientCapabilities +-type TextDocumentSyncClientCapabilities struct { +- // Whether text document synchronization supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // The client supports sending will save notifications. +- WillSave bool `json:"willSave,omitempty"` +- // The client supports sending a will save request and +- // waits for a response providing text edits which will +- // be applied to the document before it is saved. +- WillSaveWaitUntil bool `json:"willSaveWaitUntil,omitempty"` +- // The client supports did save notifications. +- DidSave bool `json:"didSave,omitempty"` +-} +- +-// Defines how the host (editor) should sync +-// document changes to the language server. +-type TextDocumentSyncKind uint32 +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentSyncOptions +-type TextDocumentSyncOptions struct { +- // Open and close notifications are sent to the server. If omitted open close notification should not +- // be sent. +- OpenClose bool `json:"openClose,omitempty"` +- // Change notifications are sent to the server. See TextDocumentSyncKind.None, TextDocumentSyncKind.Full +- // and TextDocumentSyncKind.Incremental. If omitted it defaults to TextDocumentSyncKind.None. +- Change TextDocumentSyncKind `json:"change,omitempty"` +- // If present will save notifications are sent to the server. If omitted the notification should not be +- // sent. +- WillSave bool `json:"willSave,omitempty"` +- // If present will save wait until requests are sent to the server. If omitted the request should not be +- // sent. +- WillSaveWaitUntil bool `json:"willSaveWaitUntil,omitempty"` +- // If present save notifications are sent to the server. If omitted the notification should not be +- // sent. +- Save *SaveOptions `json:"save,omitempty"` +-} +- +-// A text edit applicable to a text document. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textEdit +-type TextEdit struct { +- // The range of the text document to be manipulated. To insert +- // text into a document create a range where start === end. +- Range Range `json:"range"` +- // The string to be inserted. For delete operations use an +- // empty string. +- NewText string `json:"newText"` +-} +-type TokenFormat string +-type TraceValue string +- +-// Since 3.6.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionClientCapabilities +-type TypeDefinitionClientCapabilities struct { +- // Whether implementation supports dynamic registration. If this is set to `true` +- // the client supports the new `TypeDefinitionRegistrationOptions` return value +- // for the corresponding server capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // The client supports additional metadata in the form of definition links. +- // +- // Since 3.14.0 +- LinkSupport bool `json:"linkSupport,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionOptions +-type TypeDefinitionOptions struct { +- WorkDoneProgressOptions +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionParams +-type TypeDefinitionParams struct { +- TextDocumentPositionParams +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionRegistrationOptions +-type TypeDefinitionRegistrationOptions struct { +- TextDocumentRegistrationOptions +- TypeDefinitionOptions +- StaticRegistrationOptions +-} +- +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyClientCapabilities +-type TypeHierarchyClientCapabilities struct { +- // Whether implementation supports dynamic registration. If this is set to `true` +- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` +- // return value for the corresponding server capability as well. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +-} +- +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyItem +-type TypeHierarchyItem struct { +- // The name of this item. +- Name string `json:"name"` +- // The kind of this item. +- Kind SymbolKind `json:"kind"` +- // Tags for this item. +- Tags []SymbolTag `json:"tags,omitempty"` +- // More detail for this item, e.g. the signature of a function. +- Detail string `json:"detail,omitempty"` +- // The resource identifier of this item. +- URI DocumentURI `json:"uri"` +- // The range enclosing this symbol not including leading/trailing whitespace +- // but everything else, e.g. comments and code. +- Range Range `json:"range"` +- // The range that should be selected and revealed when this symbol is being +- // picked, e.g. the name of a function. Must be contained by the +- // {@link TypeHierarchyItem.range `range`}. +- SelectionRange Range `json:"selectionRange"` +- // A data entry field that is preserved between a type hierarchy prepare and +- // supertypes or subtypes requests. It could also be used to identify the +- // type hierarchy in the server, helping improve the performance on +- // resolving supertypes and subtypes. +- Data any `json:"data,omitempty"` +-} +- +-// Type hierarchy options used during static registration. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyOptions +-type TypeHierarchyOptions struct { +- WorkDoneProgressOptions +-} +- +-// The parameter of a `textDocument/prepareTypeHierarchy` request. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyPrepareParams +-type TypeHierarchyPrepareParams struct { +- TextDocumentPositionParams +- WorkDoneProgressParams +-} +- +-// Type hierarchy options used during static or dynamic registration. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyRegistrationOptions +-type TypeHierarchyRegistrationOptions struct { +- TextDocumentRegistrationOptions +- TypeHierarchyOptions +- StaticRegistrationOptions +-} +- +-// The parameter of a `typeHierarchy/subtypes` request. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchySubtypesParams +-type TypeHierarchySubtypesParams struct { +- Item TypeHierarchyItem `json:"item"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// The parameter of a `typeHierarchy/supertypes` request. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchySupertypesParams +-type TypeHierarchySupertypesParams struct { +- Item TypeHierarchyItem `json:"item"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// created for Tuple +-type UIntCommaUInt struct { +- Fld0 uint32 `json:"fld0"` +- Fld1 uint32 `json:"fld1"` +-} +- +-// A diagnostic report indicating that the last returned +-// report is still accurate. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#unchangedDocumentDiagnosticReport +-type UnchangedDocumentDiagnosticReport struct { +- // A document diagnostic report indicating +- // no changes to the last result. A server can +- // only return `unchanged` if result ids are +- // provided. +- Kind string `json:"kind"` +- // A result id which will be sent on the next +- // diagnostic request for the same document. +- ResultID string `json:"resultId"` +-} +- +-// Moniker uniqueness level to define scope of the moniker. +-// +-// @since 3.16.0 +-type UniquenessLevel string +- +-// General parameters to unregister a request or notification. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#unregistration +-type Unregistration struct { +- // The id used to unregister the request or notification. Usually an id +- // provided during the register request. +- ID string `json:"id"` +- // The method to unregister for. +- Method string `json:"method"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#unregistrationParams +-type UnregistrationParams struct { +- Unregisterations []Unregistration `json:"unregisterations"` +-} +- +-// A versioned notebook document identifier. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#versionedNotebookDocumentIdentifier +-type VersionedNotebookDocumentIdentifier struct { +- // The version number of this notebook document. +- Version int32 `json:"version"` +- // The notebook document's uri. +- URI URI `json:"uri"` +-} +- +-// A text document identifier to denote a specific version of a text document. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#versionedTextDocumentIdentifier +-type VersionedTextDocumentIdentifier struct { +- // The version number of this document. +- Version int32 `json:"version"` +- TextDocumentIdentifier +-} +-type WatchKind = uint32 // The parameters sent in a will save text document notification. +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#willSaveTextDocumentParams +-type WillSaveTextDocumentParams struct { +- // The document that will be saved. +- TextDocument TextDocumentIdentifier `json:"textDocument"` +- // The 'TextDocumentSaveReason'. +- Reason TextDocumentSaveReason `json:"reason"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#windowClientCapabilities +-type WindowClientCapabilities struct { +- // It indicates whether the client supports server initiated +- // progress using the `window/workDoneProgress/create` request. +- // +- // The capability also controls Whether client supports handling +- // of progress notifications. If set servers are allowed to report a +- // `workDoneProgress` property in the request specific server +- // capabilities. +- // +- // @since 3.15.0 +- WorkDoneProgress bool `json:"workDoneProgress,omitempty"` +- // Capabilities specific to the showMessage request. +- // +- // @since 3.16.0 +- ShowMessage *ShowMessageRequestClientCapabilities `json:"showMessage,omitempty"` +- // Capabilities specific to the showDocument request. +- // +- // @since 3.16.0 +- ShowDocument *ShowDocumentClientCapabilities `json:"showDocument,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressBegin +-type WorkDoneProgressBegin struct { +- Kind string `json:"kind"` +- // Mandatory title of the progress operation. Used to briefly inform about +- // the kind of operation being performed. +- // +- // Examples: "Indexing" or "Linking dependencies". +- Title string `json:"title"` +- // Controls if a cancel button should show to allow the user to cancel the +- // long running operation. Clients that don't support cancellation are allowed +- // to ignore the setting. +- Cancellable bool `json:"cancellable,omitempty"` +- // Optional, more detailed associated progress message. Contains +- // complementary information to the `title`. +- // +- // Examples: "3/25 files", "project/src/module2", "node_modules/some_dep". +- // If unset, the previous progress message (if any) is still valid. +- Message string `json:"message,omitempty"` +- // Optional progress percentage to display (value 100 is considered 100%). +- // If not provided infinite progress is assumed and clients are allowed +- // to ignore the `percentage` value in subsequent in report notifications. +- // +- // The value should be steadily rising. Clients are free to ignore values +- // that are not following this rule. The value range is [0, 100]. +- Percentage *uint32 `json:"percentage,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressCancelParams +-type WorkDoneProgressCancelParams struct { +- // The token to be used to report progress. +- Token ProgressToken `json:"token"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressCreateParams +-type WorkDoneProgressCreateParams struct { +- // The token to be used to report progress. +- Token ProgressToken `json:"token"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressEnd +-type WorkDoneProgressEnd struct { +- Kind string `json:"kind"` +- // Optional, a final message indicating to for example indicate the outcome +- // of the operation. +- Message string `json:"message,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressOptions +-type WorkDoneProgressOptions struct { +- WorkDoneProgress bool `json:"workDoneProgress,omitempty"` +-} +- +-// created for And +-type WorkDoneProgressOptionsAndTextDocumentRegistrationOptions struct { +- WorkDoneProgressOptions +- TextDocumentRegistrationOptions +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressParams +-type WorkDoneProgressParams struct { +- // An optional token that a server can use to report work done progress. +- WorkDoneToken ProgressToken `json:"workDoneToken,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressReport +-type WorkDoneProgressReport struct { +- Kind string `json:"kind"` +- // Controls enablement state of a cancel button. +- // +- // Clients that don't support cancellation or don't support controlling the button's +- // enablement state are allowed to ignore the property. +- Cancellable bool `json:"cancellable,omitempty"` +- // Optional, more detailed associated progress message. Contains +- // complementary information to the `title`. +- // +- // Examples: "3/25 files", "project/src/module2", "node_modules/some_dep". +- // If unset, the previous progress message (if any) is still valid. +- Message string `json:"message,omitempty"` +- // Optional progress percentage to display (value 100 is considered 100%). +- // If not provided infinite progress is assumed and clients are allowed +- // to ignore the `percentage` value in subsequent in report notifications. +- // +- // The value should be steadily rising. Clients are free to ignore values +- // that are not following this rule. The value range is [0, 100] +- Percentage *uint32 `json:"percentage,omitempty"` +-} +- +-// Workspace specific client capabilities. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceClientCapabilities +-type WorkspaceClientCapabilities struct { +- // The client supports applying batch edits +- // to the workspace by supporting the request +- // 'workspace/applyEdit' +- ApplyEdit bool `json:"applyEdit,omitempty"` +- // Capabilities specific to `WorkspaceEdit`s. +- WorkspaceEdit *WorkspaceEditClientCapabilities `json:"workspaceEdit,omitempty"` +- // Capabilities specific to the `workspace/didChangeConfiguration` notification. +- DidChangeConfiguration DidChangeConfigurationClientCapabilities `json:"didChangeConfiguration,omitempty"` +- // Capabilities specific to the `workspace/didChangeWatchedFiles` notification. +- DidChangeWatchedFiles DidChangeWatchedFilesClientCapabilities `json:"didChangeWatchedFiles,omitempty"` +- // Capabilities specific to the `workspace/symbol` request. +- Symbol *WorkspaceSymbolClientCapabilities `json:"symbol,omitempty"` +- // Capabilities specific to the `workspace/executeCommand` request. +- ExecuteCommand *ExecuteCommandClientCapabilities `json:"executeCommand,omitempty"` +- // The client has support for workspace folders. +- // +- // @since 3.6.0 +- WorkspaceFolders bool `json:"workspaceFolders,omitempty"` +- // The client supports `workspace/configuration` requests. +- // +- // @since 3.6.0 +- Configuration bool `json:"configuration,omitempty"` +- // Capabilities specific to the semantic token requests scoped to the +- // workspace. +- // +- // @since 3.16.0. +- SemanticTokens *SemanticTokensWorkspaceClientCapabilities `json:"semanticTokens,omitempty"` +- // Capabilities specific to the code lens requests scoped to the +- // workspace. +- // +- // @since 3.16.0. +- CodeLens *CodeLensWorkspaceClientCapabilities `json:"codeLens,omitempty"` +- // The client has support for file notifications/requests for user operations on files. +- // +- // Since 3.16.0 +- FileOperations *FileOperationClientCapabilities `json:"fileOperations,omitempty"` +- // Capabilities specific to the inline values requests scoped to the +- // workspace. +- // +- // @since 3.17.0. +- InlineValue *InlineValueWorkspaceClientCapabilities `json:"inlineValue,omitempty"` +- // Capabilities specific to the inlay hint requests scoped to the +- // workspace. +- // +- // @since 3.17.0. +- InlayHint *InlayHintWorkspaceClientCapabilities `json:"inlayHint,omitempty"` +- // Capabilities specific to the diagnostic requests scoped to the +- // workspace. +- // +- // @since 3.17.0. +- Diagnostics *DiagnosticWorkspaceClientCapabilities `json:"diagnostics,omitempty"` +- // Capabilities specific to the folding range requests scoped to the workspace. +- // +- // @since 3.18.0 +- // @proposed +- FoldingRange *FoldingRangeWorkspaceClientCapabilities `json:"foldingRange,omitempty"` +- // Capabilities specific to the `workspace/textDocumentContent` request. +- // +- // @since 3.18.0 +- // @proposed +- TextDocumentContent *TextDocumentContentClientCapabilities `json:"textDocumentContent,omitempty"` +-} +- +-// Parameters of the workspace diagnostic request. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDiagnosticParams +-type WorkspaceDiagnosticParams struct { +- // The additional identifier provided during registration. +- Identifier string `json:"identifier,omitempty"` +- // The currently known diagnostic reports with their +- // previous result ids. +- PreviousResultIds []PreviousResultID `json:"previousResultIds"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// A workspace diagnostic report. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDiagnosticReport +-type WorkspaceDiagnosticReport struct { +- Items []WorkspaceDocumentDiagnosticReport `json:"items"` +-} +- +-// A partial result for a workspace diagnostic report. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDiagnosticReportPartialResult +-type WorkspaceDiagnosticReportPartialResult struct { +- Items []WorkspaceDocumentDiagnosticReport `json:"items"` +-} +- +-// A workspace diagnostic document report. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDocumentDiagnosticReport +-type WorkspaceDocumentDiagnosticReport = Or_WorkspaceDocumentDiagnosticReport // (alias) +-// A workspace edit represents changes to many resources managed in the workspace. The edit +-// should either provide `changes` or `documentChanges`. If documentChanges are present +-// they are preferred over `changes` if the client can handle versioned document edits. +-// +-// Since version 3.13.0 a workspace edit can contain resource operations as well. If resource +-// operations are present clients need to execute the operations in the order in which they +-// are provided. So a workspace edit for example can consist of the following two changes: +-// (1) a create file a.txt and (2) a text document edit which insert text into file a.txt. +-// +-// An invalid sequence (e.g. (1) delete file a.txt and (2) insert text into file a.txt) will +-// cause failure of the operation. How the client recovers from the failure is described by +-// the client capability: `workspace.workspaceEdit.failureHandling` +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceEdit +-type WorkspaceEdit struct { +- // Holds changes to existing resources. +- Changes map[DocumentURI][]TextEdit `json:"changes,omitempty"` +- // Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes +- // are either an array of `TextDocumentEdit`s to express changes to n different text documents +- // where each text document edit addresses a specific version of a text document. Or it can contain +- // above `TextDocumentEdit`s mixed with create, rename and delete file / folder operations. +- // +- // Whether a client supports versioned document edits is expressed via +- // `workspace.workspaceEdit.documentChanges` client capability. +- // +- // If a client neither supports `documentChanges` nor `workspace.workspaceEdit.resourceOperations` then +- // only plain `TextEdit`s using the `changes` property are supported. +- DocumentChanges []DocumentChange `json:"documentChanges,omitempty"` +- // A map of change annotations that can be referenced in `AnnotatedTextEdit`s or create, rename and +- // delete file / folder operations. +- // +- // Whether clients honor this property depends on the client capability `workspace.changeAnnotationSupport`. +- // +- // @since 3.16.0 +- ChangeAnnotations map[ChangeAnnotationIdentifier]ChangeAnnotation `json:"changeAnnotations,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceEditClientCapabilities +-type WorkspaceEditClientCapabilities struct { +- // The client supports versioned document changes in `WorkspaceEdit`s +- DocumentChanges bool `json:"documentChanges,omitempty"` +- // The resource operations the client supports. Clients should at least +- // support 'create', 'rename' and 'delete' files and folders. +- // +- // @since 3.13.0 +- ResourceOperations []ResourceOperationKind `json:"resourceOperations,omitempty"` +- // The failure handling strategy of a client if applying the workspace edit +- // fails. +- // +- // @since 3.13.0 +- FailureHandling *FailureHandlingKind `json:"failureHandling,omitempty"` +- // Whether the client normalizes line endings to the client specific +- // setting. +- // If set to `true` the client will normalize line ending characters +- // in a workspace edit to the client-specified new line +- // character. +- // +- // @since 3.16.0 +- NormalizesLineEndings bool `json:"normalizesLineEndings,omitempty"` +- // Whether the client in general supports change annotations on text edits, +- // create file, rename file and delete file changes. +- // +- // @since 3.16.0 +- ChangeAnnotationSupport *ChangeAnnotationsSupportOptions `json:"changeAnnotationSupport,omitempty"` +- // Whether the client supports `WorkspaceEditMetadata` in `WorkspaceEdit`s. +- // +- // @since 3.18.0 +- // @proposed +- MetadataSupport bool `json:"metadataSupport,omitempty"` +- // Whether the client supports snippets as text edits. +- // +- // @since 3.18.0 +- // @proposed +- SnippetEditSupport bool `json:"snippetEditSupport,omitempty"` +-} +- +-// Additional data about a workspace edit. +-// +-// @since 3.18.0 +-// @proposed +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceEditMetadata +-type WorkspaceEditMetadata struct { +- // Signal to the editor that this edit is a refactoring. +- IsRefactoring bool `json:"isRefactoring,omitempty"` +-} +- +-// A workspace folder inside a client. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFolder +-type WorkspaceFolder struct { +- // The associated URI for this workspace folder. +- URI URI `json:"uri"` +- // The name of the workspace folder. Used to refer to this +- // workspace folder in the user interface. +- Name string `json:"name"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersServerCapabilities +-type WorkspaceFolders5Gn struct { +- // The server has support for workspace folders +- Supported bool `json:"supported,omitempty"` +- // Whether the server wants to receive workspace folder +- // change notifications. +- // +- // If a string is provided the string is treated as an ID +- // under which the notification is registered on the client +- // side. The ID can be used to unregister for these events +- // using the `client/unregisterCapability` request. +- ChangeNotifications string `json:"changeNotifications,omitempty"` +-} +- +-// The workspace folder change event. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersChangeEvent +-type WorkspaceFoldersChangeEvent struct { +- // The array of added workspace folders +- Added []WorkspaceFolder `json:"added"` +- // The array of the removed workspace folders +- Removed []WorkspaceFolder `json:"removed"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersInitializeParams +-type WorkspaceFoldersInitializeParams struct { +- // The workspace folders configured in the client when the server starts. +- // +- // This property is only available if the client supports workspace folders. +- // It can be `null` if the client supports workspace folders but none are +- // configured. +- // +- // @since 3.6.0 +- WorkspaceFolders []WorkspaceFolder `json:"workspaceFolders,omitempty"` +-} +- +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersServerCapabilities +-type WorkspaceFoldersServerCapabilities struct { +- // The server has support for workspace folders +- Supported bool `json:"supported,omitempty"` +- // Whether the server wants to receive workspace folder +- // change notifications. +- // +- // If a string is provided the string is treated as an ID +- // under which the notification is registered on the client +- // side. The ID can be used to unregister for these events +- // using the `client/unregisterCapability` request. +- ChangeNotifications string `json:"changeNotifications,omitempty"` +-} +- +-// A full document diagnostic report for a workspace diagnostic result. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFullDocumentDiagnosticReport +-type WorkspaceFullDocumentDiagnosticReport struct { +- // The URI for which diagnostic information is reported. +- URI DocumentURI `json:"uri"` +- // The version number for which the diagnostics are reported. +- // If the document is not marked as open `null` can be provided. +- Version int32 `json:"version"` +- FullDocumentDiagnosticReport +-} +- +-// Defines workspace specific capabilities of the server. +-// +-// @since 3.18.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceOptions +-type WorkspaceOptions struct { +- // The server supports workspace folder. +- // +- // @since 3.6.0 +- WorkspaceFolders *WorkspaceFolders5Gn `json:"workspaceFolders,omitempty"` +- // The server is interested in notifications/requests for operations on files. +- // +- // @since 3.16.0 +- FileOperations *FileOperationOptions `json:"fileOperations,omitempty"` +- // The server supports the `workspace/textDocumentContent` request. +- // +- // @since 3.18.0 +- // @proposed +- TextDocumentContent *Or_WorkspaceOptions_textDocumentContent `json:"textDocumentContent,omitempty"` +-} +- +-// A special workspace symbol that supports locations without a range. +-// +-// See also SymbolInformation. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbol +-type WorkspaceSymbol struct { +- // The location of the symbol. Whether a server is allowed to +- // return a location without a range depends on the client +- // capability `workspace.symbol.resolveSupport`. +- // +- // See SymbolInformation#location for more details. +- Location OrPLocation_workspace_symbol `json:"location"` +- // A data entry field that is preserved on a workspace symbol between a +- // workspace symbol request and a workspace symbol resolve request. +- Data any `json:"data,omitempty"` +- BaseSymbolInformation +-} +- +-// Client capabilities for a {@link WorkspaceSymbolRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolClientCapabilities +-type WorkspaceSymbolClientCapabilities struct { +- // Symbol request supports dynamic registration. +- DynamicRegistration bool `json:"dynamicRegistration,omitempty"` +- // Specific capabilities for the `SymbolKind` in the `workspace/symbol` request. +- SymbolKind *ClientSymbolKindOptions `json:"symbolKind,omitempty"` +- // The client supports tags on `SymbolInformation`. +- // Clients supporting tags have to handle unknown tags gracefully. +- // +- // @since 3.16.0 +- TagSupport *ClientSymbolTagOptions `json:"tagSupport,omitempty"` +- // The client support partial workspace symbols. The client will send the +- // request `workspaceSymbol/resolve` to the server to resolve additional +- // properties. +- // +- // @since 3.17.0 +- ResolveSupport *ClientSymbolResolveOptions `json:"resolveSupport,omitempty"` +-} +- +-// Server capabilities for a {@link WorkspaceSymbolRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolOptions +-type WorkspaceSymbolOptions struct { +- // The server provides support to resolve additional +- // information for a workspace symbol. +- // +- // @since 3.17.0 +- ResolveProvider bool `json:"resolveProvider,omitempty"` +- WorkDoneProgressOptions +-} +- +-// The parameters of a {@link WorkspaceSymbolRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolParams +-type WorkspaceSymbolParams struct { +- // A query string to filter symbols by. Clients may send an empty +- // string here to request all symbols. +- // +- // The `query`-parameter should be interpreted in a *relaxed way* as editors +- // will apply their own highlighting and scoring on the results. A good rule +- // of thumb is to match case-insensitive and to simply check that the +- // characters of *query* appear in their order in a candidate symbol. +- // Servers shouldn't use prefix, substring, or similar strict matching. +- Query string `json:"query"` +- WorkDoneProgressParams +- PartialResultParams +-} +- +-// Registration options for a {@link WorkspaceSymbolRequest}. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolRegistrationOptions +-type WorkspaceSymbolRegistrationOptions struct { +- WorkspaceSymbolOptions +-} +- +-// An unchanged document diagnostic report for a workspace diagnostic result. +-// +-// @since 3.17.0 +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceUnchangedDocumentDiagnosticReport +-type WorkspaceUnchangedDocumentDiagnosticReport struct { +- // The URI for which diagnostic information is reported. +- URI DocumentURI `json:"uri"` +- // The version number for which the diagnostics are reported. +- // If the document is not marked as open `null` can be provided. +- Version int32 `json:"version"` +- UnchangedDocumentDiagnosticReport +-} +- +-// The initialize parameters +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#_InitializeParams +-type XInitializeParams struct { +- // The process Id of the parent process that started +- // the server. +- // +- // Is `null` if the process has not been started by another process. +- // If the parent process is not alive then the server should exit. +- ProcessID int32 `json:"processId"` +- // Information about the client +- // +- // @since 3.15.0 +- ClientInfo *ClientInfo `json:"clientInfo,omitempty"` +- // The locale the client is currently showing the user interface +- // in. This must not necessarily be the locale of the operating +- // system. +- // +- // Uses IETF language tags as the value's syntax +- // (See https://en.wikipedia.org/wiki/IETF_language_tag) +- // +- // @since 3.16.0 +- Locale string `json:"locale,omitempty"` +- // The rootPath of the workspace. Is null +- // if no folder is open. +- // +- // @deprecated in favour of rootUri. +- RootPath string `json:"rootPath,omitempty"` +- // The rootUri of the workspace. Is null if no +- // folder is open. If both `rootPath` and `rootUri` are set +- // `rootUri` wins. +- // +- // @deprecated in favour of workspaceFolders. +- RootURI DocumentURI `json:"rootUri"` +- // The capabilities provided by the client (editor or tool) +- Capabilities ClientCapabilities `json:"capabilities"` +- // User provided initialization options. +- InitializationOptions any `json:"initializationOptions,omitempty"` +- // The initial trace setting. If omitted trace is disabled ('off'). +- Trace *TraceValue `json:"trace,omitempty"` +- WorkDoneProgressParams +-} +- +-// The initialize parameters +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#_InitializeParams +-type _InitializeParams struct { +- // The process Id of the parent process that started +- // the server. +- // +- // Is `null` if the process has not been started by another process. +- // If the parent process is not alive then the server should exit. +- ProcessID int32 `json:"processId"` +- // Information about the client +- // +- // @since 3.15.0 +- ClientInfo *ClientInfo `json:"clientInfo,omitempty"` +- // The locale the client is currently showing the user interface +- // in. This must not necessarily be the locale of the operating +- // system. +- // +- // Uses IETF language tags as the value's syntax +- // (See https://en.wikipedia.org/wiki/IETF_language_tag) +- // +- // @since 3.16.0 +- Locale string `json:"locale,omitempty"` +- // The rootPath of the workspace. Is null +- // if no folder is open. +- // +- // @deprecated in favour of rootUri. +- RootPath string `json:"rootPath,omitempty"` +- // The rootUri of the workspace. Is null if no +- // folder is open. If both `rootPath` and `rootUri` are set +- // `rootUri` wins. +- // +- // @deprecated in favour of workspaceFolders. +- RootURI DocumentURI `json:"rootUri"` +- // The capabilities provided by the client (editor or tool) +- Capabilities ClientCapabilities `json:"capabilities"` +- // User provided initialization options. +- InitializationOptions any `json:"initializationOptions,omitempty"` +- // The initial trace setting. If omitted trace is disabled ('off'). +- Trace *TraceValue `json:"trace,omitempty"` +- WorkDoneProgressParams +-} +- +-const ( +- // Defines how values from a set of defaults and an individual item will be +- // merged. +- // +- // @since 3.18.0 +- // The value from the individual item (if provided and not `null`) will be +- // used instead of the default. +- Replace ApplyKind = 1 +- // The value from the item will be merged with the default. +- // +- // The specific rules for mergeing values are defined against each field +- // that supports merging. +- Merge ApplyKind = 2 +- // A set of predefined code action kinds +- // Empty kind. +- Empty CodeActionKind = "" +- // Base kind for quickfix actions: 'quickfix' +- QuickFix CodeActionKind = "quickfix" +- // Base kind for refactoring actions: 'refactor' +- Refactor CodeActionKind = "refactor" +- // Base kind for refactoring extraction actions: 'refactor.extract' +- // +- // Example extract actions: +- // +- // +- // - Extract method +- // - Extract function +- // - Extract variable +- // - Extract interface from class +- // - ... +- RefactorExtract CodeActionKind = "refactor.extract" +- // Base kind for refactoring inline actions: 'refactor.inline' +- // +- // Example inline actions: +- // +- // +- // - Inline function +- // - Inline variable +- // - Inline constant +- // - ... +- RefactorInline CodeActionKind = "refactor.inline" +- // Base kind for refactoring move actions: `refactor.move` +- // +- // Example move actions: +- // +- // +- // - Move a function to a new file +- // - Move a property between classes +- // - Move method to base class +- // - ... +- // +- // @since 3.18.0 +- // @proposed +- RefactorMove CodeActionKind = "refactor.move" +- // Base kind for refactoring rewrite actions: 'refactor.rewrite' +- // +- // Example rewrite actions: +- // +- // +- // - Convert JavaScript function to class +- // - Add or remove parameter +- // - Encapsulate field +- // - Make method static +- // - Move method to base class +- // - ... +- RefactorRewrite CodeActionKind = "refactor.rewrite" +- // Base kind for source actions: `source` +- // +- // Source code actions apply to the entire file. +- Source CodeActionKind = "source" +- // Base kind for an organize imports source action: `source.organizeImports` +- SourceOrganizeImports CodeActionKind = "source.organizeImports" +- // Base kind for auto-fix source actions: `source.fixAll`. +- // +- // Fix all actions automatically fix errors that have a clear fix that do not require user input. +- // They should not suppress errors or perform unsafe fixes such as generating new types or classes. +- // +- // @since 3.15.0 +- SourceFixAll CodeActionKind = "source.fixAll" +- // Base kind for all code actions applying to the entire notebook's scope. CodeActionKinds using +- // this should always begin with `notebook.` +- // +- // @since 3.18.0 +- Notebook CodeActionKind = "notebook" +- // Code action tags are extra annotations that tweak the behavior of a code action. +- // +- // @since 3.18.0 - proposed +- // Marks the code action as LLM-generated. +- LLMGenerated CodeActionTag = 1 +- // The reason why code actions were requested. +- // +- // @since 3.17.0 +- // Code actions were explicitly requested by the user or by an extension. +- CodeActionInvoked CodeActionTriggerKind = 1 +- // Code actions were requested automatically. +- // +- // This typically happens when current selection in a file changes, but can +- // also be triggered when file content changes. +- CodeActionAutomatic CodeActionTriggerKind = 2 +- // The kind of a completion entry. +- TextCompletion CompletionItemKind = 1 +- MethodCompletion CompletionItemKind = 2 +- FunctionCompletion CompletionItemKind = 3 +- ConstructorCompletion CompletionItemKind = 4 +- FieldCompletion CompletionItemKind = 5 +- VariableCompletion CompletionItemKind = 6 +- ClassCompletion CompletionItemKind = 7 +- InterfaceCompletion CompletionItemKind = 8 +- ModuleCompletion CompletionItemKind = 9 +- PropertyCompletion CompletionItemKind = 10 +- UnitCompletion CompletionItemKind = 11 +- ValueCompletion CompletionItemKind = 12 +- EnumCompletion CompletionItemKind = 13 +- KeywordCompletion CompletionItemKind = 14 +- SnippetCompletion CompletionItemKind = 15 +- ColorCompletion CompletionItemKind = 16 +- FileCompletion CompletionItemKind = 17 +- ReferenceCompletion CompletionItemKind = 18 +- FolderCompletion CompletionItemKind = 19 +- EnumMemberCompletion CompletionItemKind = 20 +- ConstantCompletion CompletionItemKind = 21 +- StructCompletion CompletionItemKind = 22 +- EventCompletion CompletionItemKind = 23 +- OperatorCompletion CompletionItemKind = 24 +- TypeParameterCompletion CompletionItemKind = 25 +- // Completion item tags are extra annotations that tweak the rendering of a completion +- // item. +- // +- // @since 3.15.0 +- // Render a completion as obsolete, usually using a strike-out. +- ComplDeprecated CompletionItemTag = 1 +- // How a completion was triggered +- // Completion was triggered by typing an identifier (24x7 code +- // complete), manual invocation (e.g Ctrl+Space) or via API. +- Invoked CompletionTriggerKind = 1 +- // Completion was triggered by a trigger character specified by +- // the `triggerCharacters` properties of the `CompletionRegistrationOptions`. +- TriggerCharacter CompletionTriggerKind = 2 +- // Completion was re-triggered as current completion list is incomplete +- TriggerForIncompleteCompletions CompletionTriggerKind = 3 +- // The diagnostic's severity. +- // Reports an error. +- SeverityError DiagnosticSeverity = 1 +- // Reports a warning. +- SeverityWarning DiagnosticSeverity = 2 +- // Reports an information. +- SeverityInformation DiagnosticSeverity = 3 +- // Reports a hint. +- SeverityHint DiagnosticSeverity = 4 +- // The diagnostic tags. +- // +- // @since 3.15.0 +- // Unused or unnecessary code. +- // +- // Clients are allowed to render diagnostics with this tag faded out instead of having +- // an error squiggle. +- Unnecessary DiagnosticTag = 1 +- // Deprecated or obsolete code. +- // +- // Clients are allowed to rendered diagnostics with this tag strike through. +- Deprecated DiagnosticTag = 2 +- // The document diagnostic report kinds. +- // +- // @since 3.17.0 +- // A diagnostic report with a full +- // set of problems. +- DiagnosticFull DocumentDiagnosticReportKind = "full" +- // A report indicating that the last +- // returned report is still accurate. +- DiagnosticUnchanged DocumentDiagnosticReportKind = "unchanged" +- // A document highlight kind. +- // A textual occurrence. +- Text DocumentHighlightKind = 1 +- // Read-access of a symbol, like reading a variable. +- Read DocumentHighlightKind = 2 +- // Write-access of a symbol, like writing to a variable. +- Write DocumentHighlightKind = 3 +- // Predefined error codes. +- ParseError ErrorCodes = -32700 +- InvalidRequest ErrorCodes = -32600 +- MethodNotFound ErrorCodes = -32601 +- InvalidParams ErrorCodes = -32602 +- InternalError ErrorCodes = -32603 +- // Error code indicating that a server received a notification or +- // request before the server has received the `initialize` request. +- ServerNotInitialized ErrorCodes = -32002 +- UnknownErrorCode ErrorCodes = -32001 +- // Applying the workspace change is simply aborted if one of the changes provided +- // fails. All operations executed before the failing operation stay executed. +- Abort FailureHandlingKind = "abort" +- // All operations are executed transactional. That means they either all +- // succeed or no changes at all are applied to the workspace. +- Transactional FailureHandlingKind = "transactional" +- // If the workspace edit contains only textual file changes they are executed transactional. +- // If resource changes (create, rename or delete file) are part of the change the failure +- // handling strategy is abort. +- TextOnlyTransactional FailureHandlingKind = "textOnlyTransactional" +- // The client tries to undo the operations already executed. But there is no +- // guarantee that this is succeeding. +- Undo FailureHandlingKind = "undo" +- // The file event type +- // The file got created. +- Created FileChangeType = 1 +- // The file got changed. +- Changed FileChangeType = 2 +- // The file got deleted. +- Deleted FileChangeType = 3 +- // A pattern kind describing if a glob pattern matches a file a folder or +- // both. +- // +- // @since 3.16.0 +- // The pattern matches a file only. +- FilePattern FileOperationPatternKind = "file" +- // The pattern matches a folder only. +- FolderPattern FileOperationPatternKind = "folder" +- // A set of predefined range kinds. +- // Folding range for a comment +- Comment FoldingRangeKind = "comment" +- // Folding range for an import or include +- Imports FoldingRangeKind = "imports" +- // Folding range for a region (e.g. `#region`) +- Region FoldingRangeKind = "region" +- // Inlay hint kinds. +- // +- // @since 3.17.0 +- // An inlay hint that for a type annotation. +- Type InlayHintKind = 1 +- // An inlay hint that is for a parameter. +- Parameter InlayHintKind = 2 +- // Describes how an {@link InlineCompletionItemProvider inline completion provider} was triggered. +- // +- // @since 3.18.0 +- // @proposed +- // Completion was triggered explicitly by a user gesture. +- InlineInvoked InlineCompletionTriggerKind = 1 +- // Completion was triggered automatically while editing. +- InlineAutomatic InlineCompletionTriggerKind = 2 +- // Defines whether the insert text in a completion item should be interpreted as +- // plain text or a snippet. +- // The primary text to be inserted is treated as a plain string. +- PlainTextTextFormat InsertTextFormat = 1 +- // The primary text to be inserted is treated as a snippet. +- // +- // A snippet can define tab stops and placeholders with `$1`, `$2` +- // and `${3:foo}`. `$0` defines the final tab stop, it defaults to +- // the end of the snippet. Placeholders with equal identifiers are linked, +- // that is typing in one will update others too. +- // +- // See also: https://microsoft.github.io/language-server-protocol/specifications/specification-current/#snippet_syntax +- SnippetTextFormat InsertTextFormat = 2 +- // How whitespace and indentation is handled during completion +- // item insertion. +- // +- // @since 3.16.0 +- // The insertion or replace strings is taken as it is. If the +- // value is multi line the lines below the cursor will be +- // inserted using the indentation defined in the string value. +- // The client will not apply any kind of adjustments to the +- // string. +- AsIs InsertTextMode = 1 +- // The editor adjusts leading whitespace of new lines so that +- // they match the indentation up to the cursor of the line for +- // which the item is accepted. +- // +- // Consider a line like this: <2tabs><3tabs>foo. Accepting a +- // multi line completion item is indented using 2 tabs and all +- // following lines inserted will be indented using 2 tabs as well. +- AdjustIndentation InsertTextMode = 2 +- // A request failed but it was syntactically correct, e.g the +- // method name was known and the parameters were valid. The error +- // message should contain human readable information about why +- // the request failed. +- // +- // @since 3.17.0 +- RequestFailed LSPErrorCodes = -32803 +- // The server cancelled the request. This error code should +- // only be used for requests that explicitly support being +- // server cancellable. +- // +- // @since 3.17.0 +- ServerCancelled LSPErrorCodes = -32802 +- // The server detected that the content of a document got +- // modified outside normal conditions. A server should +- // NOT send this error code if it detects a content change +- // in it unprocessed messages. The result even computed +- // on an older state might still be useful for the client. +- // +- // If a client decides that a result is not of any use anymore +- // the client should cancel the request. +- ContentModified LSPErrorCodes = -32801 +- // The client has canceled a request and a server has detected +- // the cancel. +- RequestCancelled LSPErrorCodes = -32800 +- // Predefined Language kinds +- // @since 3.18.0 +- LangABAP LanguageKind = "abap" +- LangWindowsBat LanguageKind = "bat" +- LangBibTeX LanguageKind = "bibtex" +- LangClojure LanguageKind = "clojure" +- LangCoffeescript LanguageKind = "coffeescript" +- LangC LanguageKind = "c" +- LangCPP LanguageKind = "cpp" +- LangCSharp LanguageKind = "csharp" +- LangCSS LanguageKind = "css" +- // @since 3.18.0 +- // @proposed +- LangD LanguageKind = "d" +- // @since 3.18.0 +- // @proposed +- LangDelphi LanguageKind = "pascal" +- LangDiff LanguageKind = "diff" +- LangDart LanguageKind = "dart" +- LangDockerfile LanguageKind = "dockerfile" +- LangElixir LanguageKind = "elixir" +- LangErlang LanguageKind = "erlang" +- LangFSharp LanguageKind = "fsharp" +- LangGitCommit LanguageKind = "git-commit" +- LangGitRebase LanguageKind = "rebase" +- LangGo LanguageKind = "go" +- LangGroovy LanguageKind = "groovy" +- LangHandlebars LanguageKind = "handlebars" +- LangHaskell LanguageKind = "haskell" +- LangHTML LanguageKind = "html" +- LangIni LanguageKind = "ini" +- LangJava LanguageKind = "java" +- LangJavaScript LanguageKind = "javascript" +- LangJavaScriptReact LanguageKind = "javascriptreact" +- LangJSON LanguageKind = "json" +- LangLaTeX LanguageKind = "latex" +- LangLess LanguageKind = "less" +- LangLua LanguageKind = "lua" +- LangMakefile LanguageKind = "makefile" +- LangMarkdown LanguageKind = "markdown" +- LangObjectiveC LanguageKind = "objective-c" +- LangObjectiveCPP LanguageKind = "objective-cpp" +- // @since 3.18.0 +- // @proposed +- LangPascal LanguageKind = "pascal" +- LangPerl LanguageKind = "perl" +- LangPerl6 LanguageKind = "perl6" +- LangPHP LanguageKind = "php" +- LangPowershell LanguageKind = "powershell" +- LangPug LanguageKind = "jade" +- LangPython LanguageKind = "python" +- LangR LanguageKind = "r" +- LangRazor LanguageKind = "razor" +- LangRuby LanguageKind = "ruby" +- LangRust LanguageKind = "rust" +- LangSCSS LanguageKind = "scss" +- LangSASS LanguageKind = "sass" +- LangScala LanguageKind = "scala" +- LangShaderLab LanguageKind = "shaderlab" +- LangShellScript LanguageKind = "shellscript" +- LangSQL LanguageKind = "sql" +- LangSwift LanguageKind = "swift" +- LangTypeScript LanguageKind = "typescript" +- LangTypeScriptReact LanguageKind = "typescriptreact" +- LangTeX LanguageKind = "tex" +- LangVisualBasic LanguageKind = "vb" +- LangXML LanguageKind = "xml" +- LangXSL LanguageKind = "xsl" +- LangYAML LanguageKind = "yaml" +- // Describes the content type that a client supports in various +- // result literals like `Hover`, `ParameterInfo` or `CompletionItem`. +- // +- // Please note that `MarkupKinds` must not start with a `$`. This kinds +- // are reserved for internal usage. +- // Plain text is supported as a content format +- PlainText MarkupKind = "plaintext" +- // Markdown is supported as a content format +- Markdown MarkupKind = "markdown" +- // The message type +- // An error message. +- Error MessageType = 1 +- // A warning message. +- Warning MessageType = 2 +- // An information message. +- Info MessageType = 3 +- // A log message. +- Log MessageType = 4 +- // A debug message. +- // +- // @since 3.18.0 +- // @proposed +- Debug MessageType = 5 +- // The moniker kind. +- // +- // @since 3.16.0 +- // The moniker represent a symbol that is imported into a project +- Import MonikerKind = "import" +- // The moniker represents a symbol that is exported from a project +- Export MonikerKind = "export" +- // The moniker represents a symbol that is local to a project (e.g. a local +- // variable of a function, a class not visible outside the project, ...) +- Local MonikerKind = "local" +- // A notebook cell kind. +- // +- // @since 3.17.0 +- // A markup-cell is formatted source that is used for display. +- Markup NotebookCellKind = 1 +- // A code-cell is source code. +- Code NotebookCellKind = 2 +- // A set of predefined position encoding kinds. +- // +- // @since 3.17.0 +- // Character offsets count UTF-8 code units (e.g. bytes). +- UTF8 PositionEncodingKind = "utf-8" +- // Character offsets count UTF-16 code units. +- // +- // This is the default and must always be supported +- // by servers +- UTF16 PositionEncodingKind = "utf-16" +- // Character offsets count UTF-32 code units. +- // +- // Implementation note: these are the same as Unicode codepoints, +- // so this `PositionEncodingKind` may also be used for an +- // encoding-agnostic representation of character offsets. +- UTF32 PositionEncodingKind = "utf-32" +- // The client's default behavior is to select the identifier +- // according the to language's syntax rule. +- Identifier PrepareSupportDefaultBehavior = 1 +- // Supports creating new files and folders. +- Create ResourceOperationKind = "create" +- // Supports renaming existing files and folders. +- Rename ResourceOperationKind = "rename" +- // Supports deleting existing files and folders. +- Delete ResourceOperationKind = "delete" +- // A set of predefined token modifiers. This set is not fixed +- // an clients can specify additional token types via the +- // corresponding client capabilities. +- // +- // @since 3.16.0 +- ModDeclaration SemanticTokenModifiers = "declaration" +- ModDefinition SemanticTokenModifiers = "definition" +- ModReadonly SemanticTokenModifiers = "readonly" +- ModStatic SemanticTokenModifiers = "static" +- ModDeprecated SemanticTokenModifiers = "deprecated" +- ModAbstract SemanticTokenModifiers = "abstract" +- ModAsync SemanticTokenModifiers = "async" +- ModModification SemanticTokenModifiers = "modification" +- ModDocumentation SemanticTokenModifiers = "documentation" +- ModDefaultLibrary SemanticTokenModifiers = "defaultLibrary" +- // A set of predefined token types. This set is not fixed +- // an clients can specify additional token types via the +- // corresponding client capabilities. +- // +- // @since 3.16.0 +- NamespaceType SemanticTokenTypes = "namespace" +- // Represents a generic type. Acts as a fallback for types which can't be mapped to +- // a specific type like class or enum. +- TypeType SemanticTokenTypes = "type" +- ClassType SemanticTokenTypes = "class" +- EnumType SemanticTokenTypes = "enum" +- InterfaceType SemanticTokenTypes = "interface" +- StructType SemanticTokenTypes = "struct" +- TypeParameterType SemanticTokenTypes = "typeParameter" +- ParameterType SemanticTokenTypes = "parameter" +- VariableType SemanticTokenTypes = "variable" +- PropertyType SemanticTokenTypes = "property" +- EnumMemberType SemanticTokenTypes = "enumMember" +- EventType SemanticTokenTypes = "event" +- FunctionType SemanticTokenTypes = "function" +- MethodType SemanticTokenTypes = "method" +- MacroType SemanticTokenTypes = "macro" +- KeywordType SemanticTokenTypes = "keyword" +- ModifierType SemanticTokenTypes = "modifier" +- CommentType SemanticTokenTypes = "comment" +- StringType SemanticTokenTypes = "string" +- NumberType SemanticTokenTypes = "number" +- RegexpType SemanticTokenTypes = "regexp" +- OperatorType SemanticTokenTypes = "operator" +- // @since 3.17.0 +- DecoratorType SemanticTokenTypes = "decorator" +- // @since 3.18.0 +- LabelType SemanticTokenTypes = "label" +- // How a signature help was triggered. +- // +- // @since 3.15.0 +- // Signature help was invoked manually by the user or by a command. +- SigInvoked SignatureHelpTriggerKind = 1 +- // Signature help was triggered by a trigger character. +- SigTriggerCharacter SignatureHelpTriggerKind = 2 +- // Signature help was triggered by the cursor moving or by the document content changing. +- SigContentChange SignatureHelpTriggerKind = 3 +- // A symbol kind. +- File SymbolKind = 1 +- Module SymbolKind = 2 +- Namespace SymbolKind = 3 +- Package SymbolKind = 4 +- Class SymbolKind = 5 +- Method SymbolKind = 6 +- Property SymbolKind = 7 +- Field SymbolKind = 8 +- Constructor SymbolKind = 9 +- Enum SymbolKind = 10 +- Interface SymbolKind = 11 +- Function SymbolKind = 12 +- Variable SymbolKind = 13 +- Constant SymbolKind = 14 +- String SymbolKind = 15 +- Number SymbolKind = 16 +- Boolean SymbolKind = 17 +- Array SymbolKind = 18 +- Object SymbolKind = 19 +- Key SymbolKind = 20 +- Null SymbolKind = 21 +- EnumMember SymbolKind = 22 +- Struct SymbolKind = 23 +- Event SymbolKind = 24 +- Operator SymbolKind = 25 +- TypeParameter SymbolKind = 26 +- // Symbol tags are extra annotations that tweak the rendering of a symbol. +- // +- // @since 3.16 +- // Render a symbol as obsolete, usually using a strike-out. +- DeprecatedSymbol SymbolTag = 1 +- // Represents reasons why a text document is saved. +- // Manually triggered, e.g. by the user pressing save, by starting debugging, +- // or by an API call. +- Manual TextDocumentSaveReason = 1 +- // Automatic after a delay. +- AfterDelay TextDocumentSaveReason = 2 +- // When the editor lost focus. +- FocusOut TextDocumentSaveReason = 3 +- // Defines how the host (editor) should sync +- // document changes to the language server. +- // Documents should not be synced at all. +- None TextDocumentSyncKind = 0 +- // Documents are synced by always sending the full content +- // of the document. +- Full TextDocumentSyncKind = 1 +- // Documents are synced by sending the full content on open. +- // After that only incremental updates to the document are +- // send. +- Incremental TextDocumentSyncKind = 2 +- Relative TokenFormat = "relative" +- // Turn tracing off. +- Off TraceValue = "off" +- // Trace messages only. +- Messages TraceValue = "messages" +- // Verbose message tracing. +- Verbose TraceValue = "verbose" +- // Moniker uniqueness level to define scope of the moniker. +- // +- // @since 3.16.0 +- // The moniker is only unique inside a document +- Document UniquenessLevel = "document" +- // The moniker is unique inside a project for which a dump got created +- Project UniquenessLevel = "project" +- // The moniker is unique inside the group to which a project belongs +- Group UniquenessLevel = "group" +- // The moniker is unique inside the moniker scheme. +- Scheme UniquenessLevel = "scheme" +- // The moniker is globally unique +- Global UniquenessLevel = "global" +- // Interested in create events. +- WatchCreate WatchKind = 1 +- // Interested in change events +- WatchChange WatchKind = 2 +- // Interested in delete events +- WatchDelete WatchKind = 4 +-) +diff -urN a/gopls/internal/protocol/tsserver.go b/gopls/internal/protocol/tsserver.go +--- a/gopls/internal/protocol/tsserver.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/tsserver.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1368 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated for LSP. DO NOT EDIT. +- +-package protocol +- +-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.14 (hash 66a087310eea0d60495ba3578d78f70409c403d9). +-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.14/protocol/metaModel.json +-// LSP metaData.version = 3.17.0. +- +-import ( +- "context" +- "encoding/json" +- "fmt" +- +- "golang.org/x/tools/internal/jsonrpc2" +-) +- +-type Server interface { +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#progress +- Progress(context.Context, *ProgressParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#setTrace +- SetTrace(context.Context, *SetTraceParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchy_incomingCalls +- IncomingCalls(context.Context, *CallHierarchyIncomingCallsParams) ([]CallHierarchyIncomingCall, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchy_outgoingCalls +- OutgoingCalls(context.Context, *CallHierarchyOutgoingCallsParams) ([]CallHierarchyOutgoingCall, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeAction_resolve +- ResolveCodeAction(context.Context, *CodeAction) (*CodeAction, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLens_resolve +- ResolveCodeLens(context.Context, *CodeLens) (*CodeLens, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItem_resolve +- ResolveCompletionItem(context.Context, *CompletionItem) (*CompletionItem, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLink_resolve +- ResolveDocumentLink(context.Context, *DocumentLink) (*DocumentLink, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#exit +- Exit(context.Context) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initialize +- Initialize(context.Context, *ParamInitialize) (*InitializeResult, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initialized +- Initialized(context.Context, *InitializedParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHint_resolve +- Resolve(context.Context, *InlayHint) (*InlayHint, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocument_didChange +- DidChangeNotebookDocument(context.Context, *DidChangeNotebookDocumentParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocument_didClose +- DidCloseNotebookDocument(context.Context, *DidCloseNotebookDocumentParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocument_didOpen +- DidOpenNotebookDocument(context.Context, *DidOpenNotebookDocumentParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocument_didSave +- DidSaveNotebookDocument(context.Context, *DidSaveNotebookDocumentParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#shutdown +- Shutdown(context.Context) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_codeAction +- CodeAction(context.Context, *CodeActionParams) ([]CodeAction, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_codeLens +- CodeLens(context.Context, *CodeLensParams) ([]CodeLens, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_colorPresentation +- ColorPresentation(context.Context, *ColorPresentationParams) ([]ColorPresentation, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_completion +- Completion(context.Context, *CompletionParams) (*CompletionList, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_declaration +- Declaration(context.Context, *DeclarationParams) (*Or_textDocument_declaration, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_definition +- Definition(context.Context, *DefinitionParams) ([]Location, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_diagnostic +- Diagnostic(context.Context, *DocumentDiagnosticParams) (*DocumentDiagnosticReport, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_didChange +- DidChange(context.Context, *DidChangeTextDocumentParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_didClose +- DidClose(context.Context, *DidCloseTextDocumentParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_didOpen +- DidOpen(context.Context, *DidOpenTextDocumentParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_didSave +- DidSave(context.Context, *DidSaveTextDocumentParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_documentColor +- DocumentColor(context.Context, *DocumentColorParams) ([]ColorInformation, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_documentHighlight +- DocumentHighlight(context.Context, *DocumentHighlightParams) ([]DocumentHighlight, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_documentLink +- DocumentLink(context.Context, *DocumentLinkParams) ([]DocumentLink, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_documentSymbol +- DocumentSymbol(context.Context, *DocumentSymbolParams) ([]any, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_foldingRange +- FoldingRange(context.Context, *FoldingRangeParams) ([]FoldingRange, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_formatting +- Formatting(context.Context, *DocumentFormattingParams) ([]TextEdit, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_hover +- Hover(context.Context, *HoverParams) (*Hover, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_implementation +- Implementation(context.Context, *ImplementationParams) ([]Location, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_inlayHint +- InlayHint(context.Context, *InlayHintParams) ([]InlayHint, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_inlineCompletion +- InlineCompletion(context.Context, *InlineCompletionParams) (*Or_Result_textDocument_inlineCompletion, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_inlineValue +- InlineValue(context.Context, *InlineValueParams) ([]InlineValue, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_linkedEditingRange +- LinkedEditingRange(context.Context, *LinkedEditingRangeParams) (*LinkedEditingRanges, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_moniker +- Moniker(context.Context, *MonikerParams) ([]Moniker, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_onTypeFormatting +- OnTypeFormatting(context.Context, *DocumentOnTypeFormattingParams) ([]TextEdit, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_prepareCallHierarchy +- PrepareCallHierarchy(context.Context, *CallHierarchyPrepareParams) ([]CallHierarchyItem, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_prepareRename +- PrepareRename(context.Context, *PrepareRenameParams) (*PrepareRenameResult, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_prepareTypeHierarchy +- PrepareTypeHierarchy(context.Context, *TypeHierarchyPrepareParams) ([]TypeHierarchyItem, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_rangeFormatting +- RangeFormatting(context.Context, *DocumentRangeFormattingParams) ([]TextEdit, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_rangesFormatting +- RangesFormatting(context.Context, *DocumentRangesFormattingParams) ([]TextEdit, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_references +- References(context.Context, *ReferenceParams) ([]Location, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_rename +- Rename(context.Context, *RenameParams) (*WorkspaceEdit, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_selectionRange +- SelectionRange(context.Context, *SelectionRangeParams) ([]SelectionRange, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_semanticTokens_full +- SemanticTokensFull(context.Context, *SemanticTokensParams) (*SemanticTokens, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_semanticTokens_full_delta +- SemanticTokensFullDelta(context.Context, *SemanticTokensDeltaParams) (any, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_semanticTokens_range +- SemanticTokensRange(context.Context, *SemanticTokensRangeParams) (*SemanticTokens, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_signatureHelp +- SignatureHelp(context.Context, *SignatureHelpParams) (*SignatureHelp, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_typeDefinition +- TypeDefinition(context.Context, *TypeDefinitionParams) ([]Location, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_willSave +- WillSave(context.Context, *WillSaveTextDocumentParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_willSaveWaitUntil +- WillSaveWaitUntil(context.Context, *WillSaveTextDocumentParams) ([]TextEdit, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchy_subtypes +- Subtypes(context.Context, *TypeHierarchySubtypesParams) ([]TypeHierarchyItem, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchy_supertypes +- Supertypes(context.Context, *TypeHierarchySupertypesParams) ([]TypeHierarchyItem, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#window_workDoneProgress_cancel +- WorkDoneProgressCancel(context.Context, *WorkDoneProgressCancelParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_diagnostic +- DiagnosticWorkspace(context.Context, *WorkspaceDiagnosticParams) (*WorkspaceDiagnosticReport, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_didChangeConfiguration +- DidChangeConfiguration(context.Context, *DidChangeConfigurationParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_didChangeWatchedFiles +- DidChangeWatchedFiles(context.Context, *DidChangeWatchedFilesParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_didChangeWorkspaceFolders +- DidChangeWorkspaceFolders(context.Context, *DidChangeWorkspaceFoldersParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_didCreateFiles +- DidCreateFiles(context.Context, *CreateFilesParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_didDeleteFiles +- DidDeleteFiles(context.Context, *DeleteFilesParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_didRenameFiles +- DidRenameFiles(context.Context, *RenameFilesParams) error +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_executeCommand +- ExecuteCommand(context.Context, *ExecuteCommandParams) (any, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_symbol +- Symbol(context.Context, *WorkspaceSymbolParams) ([]SymbolInformation, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_textDocumentContent +- TextDocumentContent(context.Context, *TextDocumentContentParams) (*TextDocumentContentResult, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_willCreateFiles +- WillCreateFiles(context.Context, *CreateFilesParams) (*WorkspaceEdit, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_willDeleteFiles +- WillDeleteFiles(context.Context, *DeleteFilesParams) (*WorkspaceEdit, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_willRenameFiles +- WillRenameFiles(context.Context, *RenameFilesParams) (*WorkspaceEdit, error) +- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbol_resolve +- ResolveWorkspaceSymbol(context.Context, *WorkspaceSymbol) (*WorkspaceSymbol, error) +-} +- +-func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, r jsonrpc2.Request) (bool, error) { +- resp, valid, err := ServerDispatchCall(ctx, server, r.Method(), r.Params()) +- if !valid { +- return false, nil +- } +- +- if err != nil { +- return valid, reply(ctx, nil, err) +- } else { +- return valid, reply(ctx, resp, nil) +- } +-} +- +-func ServerDispatchCall(ctx context.Context, server Server, method string, raw json.RawMessage) (resp any, _ bool, err error) { +- switch method { +- case "$/progress": +- var params ProgressParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.Progress(ctx, ¶ms) +- return nil, true, err +- +- case "$/setTrace": +- var params SetTraceParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.SetTrace(ctx, ¶ms) +- return nil, true, err +- +- case "callHierarchy/incomingCalls": +- var params CallHierarchyIncomingCallsParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.IncomingCalls(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "callHierarchy/outgoingCalls": +- var params CallHierarchyOutgoingCallsParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.OutgoingCalls(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "codeAction/resolve": +- var params CodeAction +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.ResolveCodeAction(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "codeLens/resolve": +- var params CodeLens +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.ResolveCodeLens(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "completionItem/resolve": +- var params CompletionItem +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.ResolveCompletionItem(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "documentLink/resolve": +- var params DocumentLink +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.ResolveDocumentLink(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "exit": +- err := server.Exit(ctx) +- return nil, true, err +- +- case "initialize": +- var params ParamInitialize +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Initialize(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "initialized": +- var params InitializedParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.Initialized(ctx, ¶ms) +- return nil, true, err +- +- case "inlayHint/resolve": +- var params InlayHint +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Resolve(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "notebookDocument/didChange": +- var params DidChangeNotebookDocumentParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidChangeNotebookDocument(ctx, ¶ms) +- return nil, true, err +- +- case "notebookDocument/didClose": +- var params DidCloseNotebookDocumentParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidCloseNotebookDocument(ctx, ¶ms) +- return nil, true, err +- +- case "notebookDocument/didOpen": +- var params DidOpenNotebookDocumentParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidOpenNotebookDocument(ctx, ¶ms) +- return nil, true, err +- +- case "notebookDocument/didSave": +- var params DidSaveNotebookDocumentParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidSaveNotebookDocument(ctx, ¶ms) +- return nil, true, err +- +- case "shutdown": +- err := server.Shutdown(ctx) +- return nil, true, err +- +- case "textDocument/codeAction": +- var params CodeActionParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.CodeAction(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/codeLens": +- var params CodeLensParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.CodeLens(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/colorPresentation": +- var params ColorPresentationParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.ColorPresentation(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/completion": +- var params CompletionParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Completion(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/declaration": +- var params DeclarationParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Declaration(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/definition": +- var params DefinitionParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Definition(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/diagnostic": +- var params DocumentDiagnosticParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Diagnostic(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/didChange": +- var params DidChangeTextDocumentParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidChange(ctx, ¶ms) +- return nil, true, err +- +- case "textDocument/didClose": +- var params DidCloseTextDocumentParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidClose(ctx, ¶ms) +- return nil, true, err +- +- case "textDocument/didOpen": +- var params DidOpenTextDocumentParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidOpen(ctx, ¶ms) +- return nil, true, err +- +- case "textDocument/didSave": +- var params DidSaveTextDocumentParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidSave(ctx, ¶ms) +- return nil, true, err +- +- case "textDocument/documentColor": +- var params DocumentColorParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.DocumentColor(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/documentHighlight": +- var params DocumentHighlightParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.DocumentHighlight(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/documentLink": +- var params DocumentLinkParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.DocumentLink(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/documentSymbol": +- var params DocumentSymbolParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.DocumentSymbol(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/foldingRange": +- var params FoldingRangeParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.FoldingRange(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/formatting": +- var params DocumentFormattingParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Formatting(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/hover": +- var params HoverParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Hover(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/implementation": +- var params ImplementationParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Implementation(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/inlayHint": +- var params InlayHintParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.InlayHint(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/inlineCompletion": +- var params InlineCompletionParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.InlineCompletion(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/inlineValue": +- var params InlineValueParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.InlineValue(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/linkedEditingRange": +- var params LinkedEditingRangeParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.LinkedEditingRange(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/moniker": +- var params MonikerParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Moniker(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/onTypeFormatting": +- var params DocumentOnTypeFormattingParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.OnTypeFormatting(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/prepareCallHierarchy": +- var params CallHierarchyPrepareParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.PrepareCallHierarchy(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/prepareRename": +- var params PrepareRenameParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.PrepareRename(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/prepareTypeHierarchy": +- var params TypeHierarchyPrepareParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.PrepareTypeHierarchy(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/rangeFormatting": +- var params DocumentRangeFormattingParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.RangeFormatting(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/rangesFormatting": +- var params DocumentRangesFormattingParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.RangesFormatting(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/references": +- var params ReferenceParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.References(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/rename": +- var params RenameParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Rename(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/selectionRange": +- var params SelectionRangeParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.SelectionRange(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/semanticTokens/full": +- var params SemanticTokensParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.SemanticTokensFull(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/semanticTokens/full/delta": +- var params SemanticTokensDeltaParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.SemanticTokensFullDelta(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/semanticTokens/range": +- var params SemanticTokensRangeParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.SemanticTokensRange(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/signatureHelp": +- var params SignatureHelpParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.SignatureHelp(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/typeDefinition": +- var params TypeDefinitionParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.TypeDefinition(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "textDocument/willSave": +- var params WillSaveTextDocumentParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.WillSave(ctx, ¶ms) +- return nil, true, err +- +- case "textDocument/willSaveWaitUntil": +- var params WillSaveTextDocumentParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.WillSaveWaitUntil(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "typeHierarchy/subtypes": +- var params TypeHierarchySubtypesParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Subtypes(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "typeHierarchy/supertypes": +- var params TypeHierarchySupertypesParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Supertypes(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "window/workDoneProgress/cancel": +- var params WorkDoneProgressCancelParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.WorkDoneProgressCancel(ctx, ¶ms) +- return nil, true, err +- +- case "workspace/diagnostic": +- var params WorkspaceDiagnosticParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.DiagnosticWorkspace(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "workspace/didChangeConfiguration": +- var params DidChangeConfigurationParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidChangeConfiguration(ctx, ¶ms) +- return nil, true, err +- +- case "workspace/didChangeWatchedFiles": +- var params DidChangeWatchedFilesParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidChangeWatchedFiles(ctx, ¶ms) +- return nil, true, err +- +- case "workspace/didChangeWorkspaceFolders": +- var params DidChangeWorkspaceFoldersParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidChangeWorkspaceFolders(ctx, ¶ms) +- return nil, true, err +- +- case "workspace/didCreateFiles": +- var params CreateFilesParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidCreateFiles(ctx, ¶ms) +- return nil, true, err +- +- case "workspace/didDeleteFiles": +- var params DeleteFilesParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidDeleteFiles(ctx, ¶ms) +- return nil, true, err +- +- case "workspace/didRenameFiles": +- var params RenameFilesParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- err := server.DidRenameFiles(ctx, ¶ms) +- return nil, true, err +- +- case "workspace/executeCommand": +- var params ExecuteCommandParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.ExecuteCommand(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "workspace/symbol": +- var params WorkspaceSymbolParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.Symbol(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "workspace/textDocumentContent": +- var params TextDocumentContentParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.TextDocumentContent(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "workspace/willCreateFiles": +- var params CreateFilesParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.WillCreateFiles(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "workspace/willDeleteFiles": +- var params DeleteFilesParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.WillDeleteFiles(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "workspace/willRenameFiles": +- var params RenameFilesParams +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.WillRenameFiles(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- case "workspaceSymbol/resolve": +- var params WorkspaceSymbol +- if err := UnmarshalJSON(raw, ¶ms); err != nil { +- return nil, true, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) +- } +- resp, err := server.ResolveWorkspaceSymbol(ctx, ¶ms) +- if err != nil { +- return nil, true, err +- } +- return resp, true, nil +- +- default: +- return nil, false, nil +- } +-} +- +-func (s *serverDispatcher) Progress(ctx context.Context, params *ProgressParams) error { +- return s.sender.Notify(ctx, "$/progress", params) +-} +-func (s *serverDispatcher) SetTrace(ctx context.Context, params *SetTraceParams) error { +- return s.sender.Notify(ctx, "$/setTrace", params) +-} +-func (s *serverDispatcher) IncomingCalls(ctx context.Context, params *CallHierarchyIncomingCallsParams) ([]CallHierarchyIncomingCall, error) { +- var result []CallHierarchyIncomingCall +- if err := s.sender.Call(ctx, "callHierarchy/incomingCalls", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) OutgoingCalls(ctx context.Context, params *CallHierarchyOutgoingCallsParams) ([]CallHierarchyOutgoingCall, error) { +- var result []CallHierarchyOutgoingCall +- if err := s.sender.Call(ctx, "callHierarchy/outgoingCalls", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) ResolveCodeAction(ctx context.Context, params *CodeAction) (*CodeAction, error) { +- var result *CodeAction +- if err := s.sender.Call(ctx, "codeAction/resolve", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) ResolveCodeLens(ctx context.Context, params *CodeLens) (*CodeLens, error) { +- var result *CodeLens +- if err := s.sender.Call(ctx, "codeLens/resolve", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) ResolveCompletionItem(ctx context.Context, params *CompletionItem) (*CompletionItem, error) { +- var result *CompletionItem +- if err := s.sender.Call(ctx, "completionItem/resolve", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) ResolveDocumentLink(ctx context.Context, params *DocumentLink) (*DocumentLink, error) { +- var result *DocumentLink +- if err := s.sender.Call(ctx, "documentLink/resolve", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Exit(ctx context.Context) error { +- return s.sender.Notify(ctx, "exit", nil) +-} +-func (s *serverDispatcher) Initialize(ctx context.Context, params *ParamInitialize) (*InitializeResult, error) { +- var result *InitializeResult +- if err := s.sender.Call(ctx, "initialize", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Initialized(ctx context.Context, params *InitializedParams) error { +- return s.sender.Notify(ctx, "initialized", params) +-} +-func (s *serverDispatcher) Resolve(ctx context.Context, params *InlayHint) (*InlayHint, error) { +- var result *InlayHint +- if err := s.sender.Call(ctx, "inlayHint/resolve", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) DidChangeNotebookDocument(ctx context.Context, params *DidChangeNotebookDocumentParams) error { +- return s.sender.Notify(ctx, "notebookDocument/didChange", params) +-} +-func (s *serverDispatcher) DidCloseNotebookDocument(ctx context.Context, params *DidCloseNotebookDocumentParams) error { +- return s.sender.Notify(ctx, "notebookDocument/didClose", params) +-} +-func (s *serverDispatcher) DidOpenNotebookDocument(ctx context.Context, params *DidOpenNotebookDocumentParams) error { +- return s.sender.Notify(ctx, "notebookDocument/didOpen", params) +-} +-func (s *serverDispatcher) DidSaveNotebookDocument(ctx context.Context, params *DidSaveNotebookDocumentParams) error { +- return s.sender.Notify(ctx, "notebookDocument/didSave", params) +-} +-func (s *serverDispatcher) Shutdown(ctx context.Context) error { +- return s.sender.Call(ctx, "shutdown", nil, nil) +-} +-func (s *serverDispatcher) CodeAction(ctx context.Context, params *CodeActionParams) ([]CodeAction, error) { +- var result []CodeAction +- if err := s.sender.Call(ctx, "textDocument/codeAction", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) CodeLens(ctx context.Context, params *CodeLensParams) ([]CodeLens, error) { +- var result []CodeLens +- if err := s.sender.Call(ctx, "textDocument/codeLens", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) ColorPresentation(ctx context.Context, params *ColorPresentationParams) ([]ColorPresentation, error) { +- var result []ColorPresentation +- if err := s.sender.Call(ctx, "textDocument/colorPresentation", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Completion(ctx context.Context, params *CompletionParams) (*CompletionList, error) { +- var result *CompletionList +- if err := s.sender.Call(ctx, "textDocument/completion", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Declaration(ctx context.Context, params *DeclarationParams) (*Or_textDocument_declaration, error) { +- var result *Or_textDocument_declaration +- if err := s.sender.Call(ctx, "textDocument/declaration", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Definition(ctx context.Context, params *DefinitionParams) ([]Location, error) { +- var result []Location +- if err := s.sender.Call(ctx, "textDocument/definition", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Diagnostic(ctx context.Context, params *DocumentDiagnosticParams) (*DocumentDiagnosticReport, error) { +- var result *DocumentDiagnosticReport +- if err := s.sender.Call(ctx, "textDocument/diagnostic", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) DidChange(ctx context.Context, params *DidChangeTextDocumentParams) error { +- return s.sender.Notify(ctx, "textDocument/didChange", params) +-} +-func (s *serverDispatcher) DidClose(ctx context.Context, params *DidCloseTextDocumentParams) error { +- return s.sender.Notify(ctx, "textDocument/didClose", params) +-} +-func (s *serverDispatcher) DidOpen(ctx context.Context, params *DidOpenTextDocumentParams) error { +- return s.sender.Notify(ctx, "textDocument/didOpen", params) +-} +-func (s *serverDispatcher) DidSave(ctx context.Context, params *DidSaveTextDocumentParams) error { +- return s.sender.Notify(ctx, "textDocument/didSave", params) +-} +-func (s *serverDispatcher) DocumentColor(ctx context.Context, params *DocumentColorParams) ([]ColorInformation, error) { +- var result []ColorInformation +- if err := s.sender.Call(ctx, "textDocument/documentColor", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) DocumentHighlight(ctx context.Context, params *DocumentHighlightParams) ([]DocumentHighlight, error) { +- var result []DocumentHighlight +- if err := s.sender.Call(ctx, "textDocument/documentHighlight", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) DocumentLink(ctx context.Context, params *DocumentLinkParams) ([]DocumentLink, error) { +- var result []DocumentLink +- if err := s.sender.Call(ctx, "textDocument/documentLink", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) DocumentSymbol(ctx context.Context, params *DocumentSymbolParams) ([]any, error) { +- var result []any +- if err := s.sender.Call(ctx, "textDocument/documentSymbol", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) FoldingRange(ctx context.Context, params *FoldingRangeParams) ([]FoldingRange, error) { +- var result []FoldingRange +- if err := s.sender.Call(ctx, "textDocument/foldingRange", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Formatting(ctx context.Context, params *DocumentFormattingParams) ([]TextEdit, error) { +- var result []TextEdit +- if err := s.sender.Call(ctx, "textDocument/formatting", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Hover(ctx context.Context, params *HoverParams) (*Hover, error) { +- var result *Hover +- if err := s.sender.Call(ctx, "textDocument/hover", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Implementation(ctx context.Context, params *ImplementationParams) ([]Location, error) { +- var result []Location +- if err := s.sender.Call(ctx, "textDocument/implementation", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) InlayHint(ctx context.Context, params *InlayHintParams) ([]InlayHint, error) { +- var result []InlayHint +- if err := s.sender.Call(ctx, "textDocument/inlayHint", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) InlineCompletion(ctx context.Context, params *InlineCompletionParams) (*Or_Result_textDocument_inlineCompletion, error) { +- var result *Or_Result_textDocument_inlineCompletion +- if err := s.sender.Call(ctx, "textDocument/inlineCompletion", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) InlineValue(ctx context.Context, params *InlineValueParams) ([]InlineValue, error) { +- var result []InlineValue +- if err := s.sender.Call(ctx, "textDocument/inlineValue", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) LinkedEditingRange(ctx context.Context, params *LinkedEditingRangeParams) (*LinkedEditingRanges, error) { +- var result *LinkedEditingRanges +- if err := s.sender.Call(ctx, "textDocument/linkedEditingRange", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Moniker(ctx context.Context, params *MonikerParams) ([]Moniker, error) { +- var result []Moniker +- if err := s.sender.Call(ctx, "textDocument/moniker", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) OnTypeFormatting(ctx context.Context, params *DocumentOnTypeFormattingParams) ([]TextEdit, error) { +- var result []TextEdit +- if err := s.sender.Call(ctx, "textDocument/onTypeFormatting", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) PrepareCallHierarchy(ctx context.Context, params *CallHierarchyPrepareParams) ([]CallHierarchyItem, error) { +- var result []CallHierarchyItem +- if err := s.sender.Call(ctx, "textDocument/prepareCallHierarchy", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) PrepareRename(ctx context.Context, params *PrepareRenameParams) (*PrepareRenameResult, error) { +- var result *PrepareRenameResult +- if err := s.sender.Call(ctx, "textDocument/prepareRename", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) PrepareTypeHierarchy(ctx context.Context, params *TypeHierarchyPrepareParams) ([]TypeHierarchyItem, error) { +- var result []TypeHierarchyItem +- if err := s.sender.Call(ctx, "textDocument/prepareTypeHierarchy", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) RangeFormatting(ctx context.Context, params *DocumentRangeFormattingParams) ([]TextEdit, error) { +- var result []TextEdit +- if err := s.sender.Call(ctx, "textDocument/rangeFormatting", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) RangesFormatting(ctx context.Context, params *DocumentRangesFormattingParams) ([]TextEdit, error) { +- var result []TextEdit +- if err := s.sender.Call(ctx, "textDocument/rangesFormatting", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) References(ctx context.Context, params *ReferenceParams) ([]Location, error) { +- var result []Location +- if err := s.sender.Call(ctx, "textDocument/references", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Rename(ctx context.Context, params *RenameParams) (*WorkspaceEdit, error) { +- var result *WorkspaceEdit +- if err := s.sender.Call(ctx, "textDocument/rename", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) SelectionRange(ctx context.Context, params *SelectionRangeParams) ([]SelectionRange, error) { +- var result []SelectionRange +- if err := s.sender.Call(ctx, "textDocument/selectionRange", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) SemanticTokensFull(ctx context.Context, params *SemanticTokensParams) (*SemanticTokens, error) { +- var result *SemanticTokens +- if err := s.sender.Call(ctx, "textDocument/semanticTokens/full", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) SemanticTokensFullDelta(ctx context.Context, params *SemanticTokensDeltaParams) (any, error) { +- var result any +- if err := s.sender.Call(ctx, "textDocument/semanticTokens/full/delta", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) SemanticTokensRange(ctx context.Context, params *SemanticTokensRangeParams) (*SemanticTokens, error) { +- var result *SemanticTokens +- if err := s.sender.Call(ctx, "textDocument/semanticTokens/range", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) SignatureHelp(ctx context.Context, params *SignatureHelpParams) (*SignatureHelp, error) { +- var result *SignatureHelp +- if err := s.sender.Call(ctx, "textDocument/signatureHelp", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) TypeDefinition(ctx context.Context, params *TypeDefinitionParams) ([]Location, error) { +- var result []Location +- if err := s.sender.Call(ctx, "textDocument/typeDefinition", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) WillSave(ctx context.Context, params *WillSaveTextDocumentParams) error { +- return s.sender.Notify(ctx, "textDocument/willSave", params) +-} +-func (s *serverDispatcher) WillSaveWaitUntil(ctx context.Context, params *WillSaveTextDocumentParams) ([]TextEdit, error) { +- var result []TextEdit +- if err := s.sender.Call(ctx, "textDocument/willSaveWaitUntil", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Subtypes(ctx context.Context, params *TypeHierarchySubtypesParams) ([]TypeHierarchyItem, error) { +- var result []TypeHierarchyItem +- if err := s.sender.Call(ctx, "typeHierarchy/subtypes", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Supertypes(ctx context.Context, params *TypeHierarchySupertypesParams) ([]TypeHierarchyItem, error) { +- var result []TypeHierarchyItem +- if err := s.sender.Call(ctx, "typeHierarchy/supertypes", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) WorkDoneProgressCancel(ctx context.Context, params *WorkDoneProgressCancelParams) error { +- return s.sender.Notify(ctx, "window/workDoneProgress/cancel", params) +-} +-func (s *serverDispatcher) DiagnosticWorkspace(ctx context.Context, params *WorkspaceDiagnosticParams) (*WorkspaceDiagnosticReport, error) { +- var result *WorkspaceDiagnosticReport +- if err := s.sender.Call(ctx, "workspace/diagnostic", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) DidChangeConfiguration(ctx context.Context, params *DidChangeConfigurationParams) error { +- return s.sender.Notify(ctx, "workspace/didChangeConfiguration", params) +-} +-func (s *serverDispatcher) DidChangeWatchedFiles(ctx context.Context, params *DidChangeWatchedFilesParams) error { +- return s.sender.Notify(ctx, "workspace/didChangeWatchedFiles", params) +-} +-func (s *serverDispatcher) DidChangeWorkspaceFolders(ctx context.Context, params *DidChangeWorkspaceFoldersParams) error { +- return s.sender.Notify(ctx, "workspace/didChangeWorkspaceFolders", params) +-} +-func (s *serverDispatcher) DidCreateFiles(ctx context.Context, params *CreateFilesParams) error { +- return s.sender.Notify(ctx, "workspace/didCreateFiles", params) +-} +-func (s *serverDispatcher) DidDeleteFiles(ctx context.Context, params *DeleteFilesParams) error { +- return s.sender.Notify(ctx, "workspace/didDeleteFiles", params) +-} +-func (s *serverDispatcher) DidRenameFiles(ctx context.Context, params *RenameFilesParams) error { +- return s.sender.Notify(ctx, "workspace/didRenameFiles", params) +-} +-func (s *serverDispatcher) ExecuteCommand(ctx context.Context, params *ExecuteCommandParams) (any, error) { +- var result any +- if err := s.sender.Call(ctx, "workspace/executeCommand", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) Symbol(ctx context.Context, params *WorkspaceSymbolParams) ([]SymbolInformation, error) { +- var result []SymbolInformation +- if err := s.sender.Call(ctx, "workspace/symbol", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) TextDocumentContent(ctx context.Context, params *TextDocumentContentParams) (*TextDocumentContentResult, error) { +- var result *TextDocumentContentResult +- if err := s.sender.Call(ctx, "workspace/textDocumentContent", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) WillCreateFiles(ctx context.Context, params *CreateFilesParams) (*WorkspaceEdit, error) { +- var result *WorkspaceEdit +- if err := s.sender.Call(ctx, "workspace/willCreateFiles", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) WillDeleteFiles(ctx context.Context, params *DeleteFilesParams) (*WorkspaceEdit, error) { +- var result *WorkspaceEdit +- if err := s.sender.Call(ctx, "workspace/willDeleteFiles", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) WillRenameFiles(ctx context.Context, params *RenameFilesParams) (*WorkspaceEdit, error) { +- var result *WorkspaceEdit +- if err := s.sender.Call(ctx, "workspace/willRenameFiles", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +-func (s *serverDispatcher) ResolveWorkspaceSymbol(ctx context.Context, params *WorkspaceSymbol) (*WorkspaceSymbol, error) { +- var result *WorkspaceSymbol +- if err := s.sender.Call(ctx, "workspaceSymbol/resolve", params, &result); err != nil { +- return nil, err +- } +- return result, nil +-} +diff -urN a/gopls/internal/protocol/uri.go b/gopls/internal/protocol/uri.go +--- a/gopls/internal/protocol/uri.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/uri.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,246 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package protocol +- +-// This file declares URI, DocumentURI, and its methods. +-// +-// For the LSP definition of these types, see +-// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#uri +- +-import ( +- "fmt" +- "net/url" +- "path/filepath" +- "strings" +- "unicode" +- +- "golang.org/x/tools/gopls/internal/util/pathutil" +-) +- +-// A DocumentURI is the URI of a client editor document. +-// +-// According to the LSP specification: +-// +-// Care should be taken to handle encoding in URIs. For +-// example, some clients (such as VS Code) may encode colons +-// in drive letters while others do not. The URIs below are +-// both valid, but clients and servers should be consistent +-// with the form they use themselves to ensure the other party +-// doesn’t interpret them as distinct URIs. Clients and +-// servers should not assume that each other are encoding the +-// same way (for example a client encoding colons in drive +-// letters cannot assume server responses will have encoded +-// colons). The same applies to casing of drive letters - one +-// party should not assume the other party will return paths +-// with drive letters cased the same as it. +-// +-// file:///c:/project/readme.md +-// file:///C%3A/project/readme.md +-// +-// This is done during JSON unmarshalling; +-// see [DocumentURI.UnmarshalText] for details. +-type DocumentURI string +- +-// A URI is an arbitrary URL (e.g. https), not necessarily a file. +-type URI = string +- +-// UnmarshalText implements decoding of DocumentURI values. +-// +-// In particular, it implements a systematic correction of various odd +-// features of the definition of DocumentURI in the LSP spec that +-// appear to be workarounds for bugs in VS Code. For example, it may +-// URI-encode the URI itself, so that colon becomes %3A, and it may +-// send file://foo.go URIs that have two slashes (not three) and no +-// hostname. +-// +-// We use UnmarshalText, not UnmarshalJSON, because it is called even +-// for non-addressable values such as keys and values of map[K]V, +-// where there is no pointer of type *K or *V on which to call +-// UnmarshalJSON. (See Go issue #28189 for more detail.) +-// +-// Non-empty DocumentURIs are valid "file"-scheme URIs. +-// The empty DocumentURI is valid. +-func (uri *DocumentURI) UnmarshalText(data []byte) (err error) { +- *uri, err = ParseDocumentURI(string(data)) +- return +-} +- +-// Clean returns the cleaned uri by triggering filepath.Clean underlying. +-func (uri DocumentURI) Clean() DocumentURI { +- return URIFromPath(filepath.Clean(uri.Path())) +-} +- +-// Path returns the file path for the given URI. +-// +-// DocumentURI("").Path() returns the empty string. +-// +-// Path panics if called on a URI that is not a valid filename. +-func (uri DocumentURI) Path() string { +- filename, err := filename(uri) +- if err != nil { +- // e.g. ParseRequestURI failed. +- // +- // This can only affect DocumentURIs created by +- // direct string manipulation; all DocumentURIs +- // received from the client pass through +- // ParseRequestURI, which ensures validity. +- panic(err) +- } +- return filepath.FromSlash(filename) +-} +- +-// Base returns the base name of the file path of the given URI. +-func (uri DocumentURI) Base() string { +- return filepath.Base(uri.Path()) +-} +- +-// Dir returns the URI for the directory containing the receiver. +-func (uri DocumentURI) Dir() DocumentURI { +- // This function could be more efficiently implemented by avoiding any call +- // to Path(), but at least consolidates URI manipulation. +- return URIFromPath(uri.DirPath()) +-} +- +-// DirPath returns the file path to the directory containing this URI, which +-// must be a file URI. +-func (uri DocumentURI) DirPath() string { +- return filepath.Dir(uri.Path()) +-} +- +-// Encloses reports whether uri's path, considered as a sequence of segments, +-// is a prefix of file's path. +-func (uri DocumentURI) Encloses(file DocumentURI) bool { +- return pathutil.InDir(uri.Path(), file.Path()) +-} +- +-// Location returns the Location for the specified range of this URI's file. +-func (uri DocumentURI) Location(rng Range) Location { +- return Location{URI: uri, Range: rng} +-} +- +-func filename(uri DocumentURI) (string, error) { +- if uri == "" { +- return "", nil +- } +- +- // This conservative check for the common case +- // of a simple non-empty absolute POSIX filename +- // avoids the allocation of a net.URL. +- if strings.HasPrefix(string(uri), "file:///") { +- rest := string(uri)[len("file://"):] // leave one slash +- for i := 0; i < len(rest); i++ { +- b := rest[i] +- // Reject these cases: +- if b < ' ' || b == 0x7f || // control character +- b == '%' || b == '+' || // URI escape +- b == ':' || // Windows drive letter +- b == '&' || b == '?' { // authority or query +- goto slow +- } +- // We do not reject '@' as it cannot be part of the +- // authority (e.g. user:pass@example.com) in a +- // "file:///" URL, and '@' commonly appears in file +- // paths such as GOMODCACHE/module@version/... +- } +- return rest, nil +- } +-slow: +- +- u, err := url.ParseRequestURI(string(uri)) +- if err != nil { +- return "", err +- } +- if u.Scheme != fileScheme { +- return "", fmt.Errorf("only file URIs are supported, got %q from %q", u.Scheme, uri) +- } +- // If the URI is a Windows URI, we trim the leading "/" and uppercase +- // the drive letter, which will never be case sensitive. +- if isWindowsDriveURIPath(u.Path) { +- u.Path = strings.ToUpper(string(u.Path[1])) + u.Path[2:] +- } +- +- return u.Path, nil +-} +- +-// ParseDocumentURI interprets a string as a DocumentURI, applying VS +-// Code workarounds; see [DocumentURI.UnmarshalText] for details. +-// If "s" is a file name, use [URIFromPath] instead. +-func ParseDocumentURI(s string) (DocumentURI, error) { +- if s == "" { +- return "", nil +- } +- +- if !strings.HasPrefix(s, "file://") { +- return "", fmt.Errorf("DocumentURI scheme is not 'file': %s", s) +- } +- +- // VS Code sends URLs with only two slashes, +- // which are invalid. golang/go#39789. +- if !strings.HasPrefix(s, "file:///") { +- s = "file:///" + s[len("file://"):] +- } +- +- // Even though the input is a URI, it may not be in canonical form. VS Code +- // in particular over-escapes :, @, etc. Unescape and re-encode to canonicalize. +- path, err := url.PathUnescape(s[len("file://"):]) +- if err != nil { +- return "", err +- } +- +- // File URIs from Windows may have lowercase drive letters. +- // Since drive letters are guaranteed to be case insensitive, +- // we change them to uppercase to remain consistent. +- // For example, file:///c:/x/y/z becomes file:///C:/x/y/z. +- if isWindowsDriveURIPath(path) { +- path = path[:1] + strings.ToUpper(string(path[1])) + path[2:] +- } +- u := url.URL{Scheme: fileScheme, Path: path} +- return DocumentURI(u.String()), nil +-} +- +-// URIFromPath returns DocumentURI for the supplied file path. +-// Given "", it returns "". +-func URIFromPath(path string) DocumentURI { +- if path == "" { +- return "" +- } +- if !isWindowsDrivePath(path) { +- if abs, err := filepath.Abs(path); err == nil { +- path = abs +- } +- } +- // Check the file path again, in case it became absolute. +- if isWindowsDrivePath(path) { +- path = "/" + strings.ToUpper(string(path[0])) + path[1:] +- } +- path = filepath.ToSlash(path) +- u := url.URL{ +- Scheme: fileScheme, +- Path: path, +- } +- return DocumentURI(u.String()) +-} +- +-const fileScheme = "file" +- +-// isWindowsDrivePath returns true if the file path is of the form used by +-// Windows. We check if the path begins with a drive letter, followed by a ":". +-// For example: C:/x/y/z. +-func isWindowsDrivePath(path string) bool { +- if len(path) < 3 { +- return false +- } +- return unicode.IsLetter(rune(path[0])) && path[1] == ':' +-} +- +-// isWindowsDriveURIPath returns true if the file URI is of the format used by +-// Windows URIs. The url.Parse package does not specially handle Windows paths +-// (see golang/go#6027), so we check if the URI path has a drive prefix (e.g. "/C:"). +-func isWindowsDriveURIPath(uri string) bool { +- if len(uri) < 4 { +- return false +- } +- return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':' +-} +diff -urN a/gopls/internal/protocol/uri_test.go b/gopls/internal/protocol/uri_test.go +--- a/gopls/internal/protocol/uri_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/uri_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,134 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build !windows +-// +build !windows +- +-package protocol_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// TestURIFromPath tests the conversion between URIs and filenames. The test cases +-// include Windows-style URIs and filepaths, but we avoid having OS-specific +-// tests by using only forward slashes, assuming that the standard library +-// functions filepath.ToSlash and filepath.FromSlash do not need testing. +-func TestURIFromPath(t *testing.T) { +- for _, test := range []struct { +- path, wantFile string +- wantURI protocol.DocumentURI +- }{ +- { +- path: ``, +- wantFile: ``, +- wantURI: protocol.DocumentURI(""), +- }, +- { +- path: `C:/Windows/System32`, +- wantFile: `C:/Windows/System32`, +- wantURI: protocol.DocumentURI("file:///C:/Windows/System32"), +- }, +- { +- path: `C:/Go/src/bob.go`, +- wantFile: `C:/Go/src/bob.go`, +- wantURI: protocol.DocumentURI("file:///C:/Go/src/bob.go"), +- }, +- { +- path: `c:/Go/src/bob.go`, +- wantFile: `C:/Go/src/bob.go`, +- wantURI: protocol.DocumentURI("file:///C:/Go/src/bob.go"), +- }, +- { +- path: `/path/to/dir`, +- wantFile: `/path/to/dir`, +- wantURI: protocol.DocumentURI("file:///path/to/dir"), +- }, +- { +- path: `/a/b/c/src/bob.go`, +- wantFile: `/a/b/c/src/bob.go`, +- wantURI: protocol.DocumentURI("file:///a/b/c/src/bob.go"), +- }, +- { +- path: `c:/Go/src/bob george/george/george.go`, +- wantFile: `C:/Go/src/bob george/george/george.go`, +- wantURI: protocol.DocumentURI("file:///C:/Go/src/bob%20george/george/george.go"), +- }, +- } { +- got := protocol.URIFromPath(test.path) +- if got != test.wantURI { +- t.Errorf("URIFromPath(%q): got %q, expected %q", test.path, got, test.wantURI) +- } +- gotFilename := got.Path() +- if gotFilename != test.wantFile { +- t.Errorf("Filename(%q): got %q, expected %q", got, gotFilename, test.wantFile) +- } +- } +-} +- +-func TestParseDocumentURI(t *testing.T) { +- for _, test := range []struct { +- input string +- want string // string(DocumentURI) on success or error.Error() on failure +- wantPath string // expected DocumentURI.Path on success +- }{ +- { +- input: `file:///c:/Go/src/bob%20george/george/george.go`, +- want: "file:///C:/Go/src/bob%20george/george/george.go", +- wantPath: `C:/Go/src/bob george/george/george.go`, +- }, +- { +- input: `file:///C%3A/Go/src/bob%20george/george/george.go`, +- want: "file:///C:/Go/src/bob%20george/george/george.go", +- wantPath: `C:/Go/src/bob george/george/george.go`, +- }, +- { +- input: `file:///path/to/%25p%25ercent%25/per%25cent.go`, +- want: `file:///path/to/%25p%25ercent%25/per%25cent.go`, +- wantPath: `/path/to/%p%ercent%/per%cent.go`, +- }, +- { +- input: `file:///C%3A/`, +- want: `file:///C:/`, +- wantPath: `C:/`, +- }, +- { +- input: `file:///`, +- want: `file:///`, +- wantPath: `/`, +- }, +- { +- input: `file://wsl%24/Ubuntu/home/wdcui/repo/VMEnclaves/cvm-runtime`, +- want: `file:///wsl$/Ubuntu/home/wdcui/repo/VMEnclaves/cvm-runtime`, +- wantPath: `/wsl$/Ubuntu/home/wdcui/repo/VMEnclaves/cvm-runtime`, +- }, +- { +- input: "", +- want: "", +- wantPath: "", +- }, +- // Errors: +- { +- input: "https://go.dev/", +- want: "DocumentURI scheme is not 'file': https://go.dev/", +- }, +- } { +- uri, err := protocol.ParseDocumentURI(test.input) +- var got string +- if err != nil { +- got = err.Error() +- } else { +- got = string(uri) +- } +- if got != test.want { +- t.Errorf("ParseDocumentURI(%q): got %q, want %q", test.input, got, test.want) +- } +- if err == nil && uri.Path() != test.wantPath { +- t.Errorf("DocumentURI(%s).Path = %q, want %q", uri, +- uri.Path(), test.wantPath) +- } +- } +-} +diff -urN a/gopls/internal/protocol/uri_windows_test.go b/gopls/internal/protocol/uri_windows_test.go +--- a/gopls/internal/protocol/uri_windows_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/protocol/uri_windows_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,139 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build windows +-// +build windows +- +-package protocol_test +- +-import ( +- "path/filepath" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// TestURIFromPath tests the conversion between URIs and filenames. The test cases +-// include Windows-style URIs and filepaths, but we avoid having OS-specific +-// tests by using only forward slashes, assuming that the standard library +-// functions filepath.ToSlash and filepath.FromSlash do not need testing. +-func TestURIFromPath(t *testing.T) { +- rootPath, err := filepath.Abs("/") +- if err != nil { +- t.Fatal(err) +- } +- if len(rootPath) < 2 || rootPath[1] != ':' { +- t.Fatalf("malformed root path %q", rootPath) +- } +- driveLetter := string(rootPath[0]) +- +- for _, test := range []struct { +- path, wantFile string +- wantURI protocol.DocumentURI +- }{ +- { +- path: ``, +- wantFile: ``, +- wantURI: protocol.DocumentURI(""), +- }, +- { +- path: `C:\Windows\System32`, +- wantFile: `C:\Windows\System32`, +- wantURI: protocol.DocumentURI("file:///C:/Windows/System32"), +- }, +- { +- path: `C:\Go\src\bob.go`, +- wantFile: `C:\Go\src\bob.go`, +- wantURI: protocol.DocumentURI("file:///C:/Go/src/bob.go"), +- }, +- { +- path: `c:\Go\src\bob.go`, +- wantFile: `C:\Go\src\bob.go`, +- wantURI: protocol.DocumentURI("file:///C:/Go/src/bob.go"), +- }, +- { +- path: `\path\to\dir`, +- wantFile: driveLetter + `:\path\to\dir`, +- wantURI: protocol.DocumentURI("file:///" + driveLetter + ":/path/to/dir"), +- }, +- { +- path: `\a\b\c\src\bob.go`, +- wantFile: driveLetter + `:\a\b\c\src\bob.go`, +- wantURI: protocol.DocumentURI("file:///" + driveLetter + ":/a/b/c/src/bob.go"), +- }, +- { +- path: `c:\Go\src\bob george\george\george.go`, +- wantFile: `C:\Go\src\bob george\george\george.go`, +- wantURI: protocol.DocumentURI("file:///C:/Go/src/bob%20george/george/george.go"), +- }, +- } { +- got := protocol.URIFromPath(test.path) +- if got != test.wantURI { +- t.Errorf("URIFromPath(%q): got %q, expected %q", test.path, got, test.wantURI) +- } +- gotFilename := got.Path() +- if gotFilename != test.wantFile { +- t.Errorf("Filename(%q): got %q, expected %q", got, gotFilename, test.wantFile) +- } +- } +-} +- +-func TestParseDocumentURI(t *testing.T) { +- for _, test := range []struct { +- input string +- want string // string(DocumentURI) on success or error.Error() on failure +- wantPath string // expected DocumentURI.Path on success +- }{ +- { +- input: `file:///c:/Go/src/bob%20george/george/george.go`, +- want: "file:///C:/Go/src/bob%20george/george/george.go", +- wantPath: `C:\Go\src\bob george\george\george.go`, +- }, +- { +- input: `file:///C%3A/Go/src/bob%20george/george/george.go`, +- want: "file:///C:/Go/src/bob%20george/george/george.go", +- wantPath: `C:\Go\src\bob george\george\george.go`, +- }, +- { +- input: `file:///c:/path/to/%25p%25ercent%25/per%25cent.go`, +- want: `file:///C:/path/to/%25p%25ercent%25/per%25cent.go`, +- wantPath: `C:\path\to\%p%ercent%\per%cent.go`, +- }, +- { +- input: `file:///C%3A/`, +- want: `file:///C:/`, +- wantPath: `C:\`, +- }, +- { +- input: `file:///`, +- want: `file:///`, +- wantPath: `\`, +- }, +- { +- input: "", +- want: "", +- wantPath: "", +- }, +- // Errors: +- { +- input: "https://go.dev/", +- want: "DocumentURI scheme is not 'file': https://go.dev/", +- }, +- } { +- uri, err := protocol.ParseDocumentURI(test.input) +- var got string +- if err != nil { +- got = err.Error() +- } else { +- got = string(uri) +- } +- if got != test.want { +- t.Errorf("ParseDocumentURI(%q): got %q, want %q", test.input, got, test.want) +- } +- if err == nil && uri.Path() != test.wantPath { +- t.Errorf("DocumentURI(%s).Path = %q, want %q", uri, +- uri.Path(), test.wantPath) +- } +- } +-} +diff -urN a/gopls/internal/server/assets/common.css b/gopls/internal/server/assets/common.css +--- a/gopls/internal/server/assets/common.css 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/assets/common.css 1969-12-31 18:00:00.000000000 -0600 +@@ -1,116 +0,0 @@ +-/* Copyright 2024 The Go Authors. All rights reserved. +- * Use of this source code is governed by a BSD-style +- * license that can be found in the LICENSE file. +- */ +- +-/* inspired by pkg.go.dev's typography.css */ +- +-body { +- font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji'; +- font-size: 1rem; +- line-height: normal; +-} +- +-h1 { +- font-size: 1.5rem; +-} +- +-h2 { +- font-size: 1.375rem; +-} +- +-h3 { +- font-size: 1.25rem; +-} +- +-h4 { +- font-size: 1.125rem; +-} +- +-h5 { +- font-size: 1rem; +-} +- +-h6 { +- font-size: 0.875rem; +-} +- +-h1, +-h2, +-h3, +-h4 { +- font-weight: 600; +- line-height: 1.25em; +- word-break: break-word; +-} +- +-h5, +-h6 { +- font-weight: 500; +- line-height: 1.3em; +- word-break: break-word; +-} +- +-p { +- font-size: 1rem; +- line-height: 1.5rem; +- max-width: 60rem; +-} +- +-strong { +- font-weight: 600; +-} +- +-code, +-pre, +-textarea.code { +- font-family: Consolas, 'Liberation Mono', Menlo, monospace; +- font-size: 0.875rem; +- line-height: 1.5em; +-} +- +-pre, +-textarea.code { +- background-color: #eee; +- border: 3px; +- border-radius: 3px; +- color: black; +- overflow-x: auto; +- padding: 0.625rem; +- tab-size: 4; +- white-space: pre; +-} +- +-button, +-input, +-select, +-textarea { +- font: inherit; +-} +- +-a, +-a:link, +-a:visited { +- color: rgb(0, 125, 156); +- text-decoration: none; +-} +- +-a:hover, +-a:focus { +- color: rgb(0, 125, 156); +- text-decoration: underline; +-} +- +-a:hover > * { +- text-decoration: underline; +-} +- +-#disconnected { +- position: fixed; +- top: 1em; +- left: 1em; +- display: none; /* initially */ +- background-color: white; +- border: thick solid red; +- padding: 2em; +-} +diff -urN a/gopls/internal/server/assets/common.js b/gopls/internal/server/assets/common.js +--- a/gopls/internal/server/assets/common.js 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/assets/common.js 1969-12-31 18:00:00.000000000 -0600 +@@ -1,28 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// httpGET requests a URL for its effects only. +-// (It is needed for /open URLs; see objHTML.) +-function httpGET(url) { +- var x = new XMLHttpRequest(); +- x.open("GET", url, true); +- x.send(); +- return false; // disable usual behavior +-} +- +-// disconnect banner +-window.addEventListener('load', function() { +- // Create a hidden
    element. +- var banner = document.createElement("div"); +- banner.id = "disconnected"; +- banner.innerText = "Gopls server has terminated. Page is inactive."; +- document.body.appendChild(banner); +- +- // Start a GET /hang request. If it ever completes, the server +- // has disconnected. Reveal the banner in that case. +- var x = new XMLHttpRequest(); +- x.open("GET", "/hang", true); +- x.onloadend = () => { banner.style.display = "block"; }; +- x.send(); +-}); +Binary files a/gopls/internal/server/assets/favicon.ico and b/gopls/internal/server/assets/favicon.ico differ +diff -urN a/gopls/internal/server/assets/go-logo-blue.svg b/gopls/internal/server/assets/go-logo-blue.svg +--- a/gopls/internal/server/assets/go-logo-blue.svg 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/assets/go-logo-blue.svg 1969-12-31 18:00:00.000000000 -0600 +@@ -1 +0,0 @@ +- +\ No newline at end of file +diff -urN a/gopls/internal/server/assets/splitpkg.css b/gopls/internal/server/assets/splitpkg.css +--- a/gopls/internal/server/assets/splitpkg.css 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/assets/splitpkg.css 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-/* Copyright 2025 The Go Authors. All rights reserved. +- * Use of this source code is governed by a BSD-style +- * license that can be found in the LICENSE file. +- */ +- +-li { +- list-style: none; +-} +-span.component { +- display: inline-block; +- min-width: 8em; +-} +-span.delete { +- color: grey; +- opacity: 0.3; +-} +-span.delete:hover { +- opacity: 1; +-} +-hr { +- border-top: thin solid #EEE; +-} +-p.help { +- width: 6in; +- line-height: 120%; /* tighter than default */ +- font-size: 80%; +- text-align: justify; +- text-justify: inter-word; +- color: #AAA; +-} +diff -urN a/gopls/internal/server/assets/splitpkg.js b/gopls/internal/server/assets/splitpkg.js +--- a/gopls/internal/server/assets/splitpkg.js 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/assets/splitpkg.js 1969-12-31 18:00:00.000000000 -0600 +@@ -1,339 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// data holds the application state, of Go type splitpkg.ResultJSON. +-// Each reload() replaces it by the current server state. +-var data = null; +- +-// One-time setup. +-window.addEventListener('load', function() { +- document.getElementById('add-component').addEventListener('click', onClickAddComponent); +- document.getElementById('assign-apply').addEventListener('click', onClickApplyAssignment); +- +- reload(); +-}); +- +-/** +- * onClickAddComponent adds a new component, posts it to the server, +- * and reloads the page. +- */ +-function onClickAddComponent(event) { +- const name = document.getElementById('new-component').value.trim(); +- if (name == "") { +- alert("empty component name"); +- return; +- } +- if (data.Components.Names.includes(name)) { +- alert("duplicate component name"); +- return; +- } +- data.Components.Names.push(name); +- postComponents(); +- reload(); +-} +- +-/** +- * onClickDeleteComponent deletes a component, posts it to the server, +- * and reloads the page. +- */ +-function onClickDeleteComponent(event) { +- const li = event.target.parentNode; +- li.parentNode.removeChild(li); +- +- const index = li.index; // of deleted component +- +- const names = data.Components.Names; +- names.splice(index, 1); +- +- // Update assignments after implicit renumbering of components. +- const assignments = data.Components.Assignments; +- Object.entries(assignments).forEach(([k, compIndex]) => { +- if (compIndex == index) { +- assignments[k] = 0; // => default component +- } else if (compIndex > index) { +- assignments[k] = compIndex - 1; +- } +- }); +- +- postComponents(); +- reload(); +-} +- +-/** postComponents notifies the server of a change in the Components information. */ +-function postComponents() { +- // Post the updated components. +- const xhr = new XMLHttpRequest(); +- xhr.open("POST", makeURL('/splitpkg-components'), false); // false => synchronous +- xhr.setRequestHeader('Content-Type', 'application/json'); +- xhr.send(JSON.stringify(data.Components)); +- if (xhr.status === 200) { +- // ok +- } else { +- alert("failed to post new component list: " + xhr.statusText); +- return; +- } +-} +- +-/** +- * onClickApplyAssignment is called when the Apply button is clicked. +- * It updates the component assignment mapping. +- */ +-function onClickApplyAssignment(event) { +- const componentIndex = document.getElementById('assign-select').selectedIndex; +- +- // Update the component assignment of each spec +- // whose
  • checkbox is checked. +- document.querySelectorAll('li.spec-node').forEach((specItem) => { +- const checkbox = specItem.firstChild; +- if (checkbox.checked) { +- data.Components.Assignments[specItem.dataset.name] = componentIndex; +- } +- }); +- +- postComponents(); // update the server +- reload(); // recompute the page state +-} +- +-/** +- * reload requests the current server state, +- * updates the global 'data' variable, +- * and rebuilds the page DOM to reflect the state. +- */ +-function reload() { +- const xhr = new XMLHttpRequest(); +- xhr.open("GET", makeURL('/splitpkg-json'), false); // false => synchronous +- xhr.send(null); +- if (xhr.status === 200) { +- try { +- data = JSON.parse(xhr.responseText); +- } catch (e) { +- alert("error parsing JSON: " + e); +- return null; +- } +- } else { +- alert("request failed: " + xhr.statusText); +- return null; +- } +- +- // Ensure there is always a default component. +- if (!data.Components.Names) { // undefined, null, or empty +- data.Components.Names = ["default"]; +- } +- if (!data.Components.Assignments) { // undefined, null, or empty +- data.Components.Assignments = {}; +- } +- +- // Rebuild list of components. +- const componentsContainer = document.getElementById('components'); +- componentsContainer.replaceChildren(); // clear out previous state +- const assignSelect = document.getElementById('assign-select'); +- assignSelect.replaceChildren(); // clear out previous state +- const componentsList = document.createElement('ul'); +- componentsContainer.appendChild(componentsList); +- data.Components.Names.forEach((name, i) => { +- //
  • ■ name × +- const li = document.createElement('li'); +- li.index = i; // custom index field holds component index (for onClickDeleteComponent) +- componentsList.appendChild(li); +- +- // ■ name +- const span = document.createElement('span'); +- span.className = 'component'; +- span.style.color = componentColors[i % componentColors.length]; +- span.append('■ ', name) +- li.append(span); +- +- // × +- if (i > 0) { // the default component cannot be deleted +- const xSpan = document.createElement('span'); +- xSpan.className = 'delete'; +- xSpan.append(' ×') +- xSpan.addEventListener('click', onClickDeleteComponent); +- li.append(xSpan); +- } +- +- // Add component to the assignment dropdown. +- assignSelect.add(new Option(name, null)); +- }) +- +- // Rebuild list of decls grouped by file. +- const filesContainer = document.getElementById('files'); +- filesContainer.replaceChildren(); // clear out previous state +- data.Files.forEach(fileJson => { +- filesContainer.appendChild(createFileDiv(fileJson)); +- }); +- +- // Display strongly connected components. +- const deps = document.getElementById('deps'); +- deps.replaceChildren(); // clear out previous state +- const depsList = document.createElement('ul'); +- deps.append(depsList); +- +- // Be explicit if there are no component dependencies. +- if (data.Edges.length == 0) { +- const li = document.createElement('li'); +- depsList.append(li); +- li.append("No dependencies"); +- return; +- } +- +- // List all sets of mutually dependent components. +- data.Cycles.forEach((scc) => { +- const item = document.createElement('li'); +- item.append("⚠ Component cycle: " + +- scc.map(index => data.Components.Names[index]).join(', ')) +- depsList.append(item); +- }) +- +- // Show intercomponent edges. +- data.Edges.forEach((edge) => { +- const edgeItem = document.createElement('li'); +- depsList.append(edgeItem); +- +- // component edge +- const from = data.Components.Names[edge.From]; +- const to = data.Components.Names[edge.To]; +- edgeItem.append((edge.Cyclic ? "⚠ " : "") + from + " ➤ " + to); +- +- // sublist of symbol references that induced the edge +- const refsList = document.createElement('ul'); +- edgeItem.appendChild(refsList); +- refsList.className = 'refs-list'; +- edge.Refs.forEach(ref => { +- refsList.appendChild(createRefItem(ref)); +- }); +- }) +-} +- +-/** +- * makeURL returns a URL string with the specified path, +- * but preserving the current page's query parameters (view, pkg). +- */ +-function makeURL(path) { +- const url = new URL(window.location.href); +- url.pathname = url.pathname.substring(0, url.pathname.lastIndexOf('/')) + path; +- return url.href; +-} +- +-/** createFileDiv creates a
    for a fileJSON object. */ +-function createFileDiv(fileData) { +- // Create the main container for the file entry. +- const fileContainer = document.createElement('div'); +- fileContainer.className = 'file-node'; +- +- // Create and append the file's base name as a para. +- const para = document.createElement('p'); +- fileContainer.appendChild(para); +- +- // The file's checkbox applies in bulk to all specs within it. +- var specCheckboxes = []; +- const fileCheckbox = document.createElement('input'); +- fileCheckbox.type = 'checkbox'; +- fileCheckbox.addEventListener('click', (event) => { +- // Select/deselect all specs belonging to the file. +- const checked = event.target.checked; +- specCheckboxes.forEach(checkbox => { +- checkbox.checked = checked; +- }); +- }) +- para.appendChild(fileCheckbox); +- para.append("File "); +- +- // Link file name to start of file. +- const baseName = document.createElement('a'); +- para.appendChild(baseName); +- baseName.className = 'file-link'; +- baseName.textContent = fileData.Base; +- baseName.addEventListener('click', () => httpGET(fileData.URL)); +- +- // Process declarations if they exist. +- if (fileData.Decls && fileData.Decls.length > 0) { +- const declsList = document.createElement('ul'); +- declsList.className = 'decls-list'; +- // For now we flatten out the decl/spec grouping. +- fileData.Decls.forEach(decl => { +- if (decl.Specs && decl.Specs.length > 0) { +- decl.Specs.forEach(spec => { +- declsList.appendChild(createSpecItem(decl.Kind, spec, specCheckboxes)); +- }); +- } +- }); +- fileContainer.appendChild(declsList); +- } +- +- return fileContainer; +-} +- +-/** createSpecItem creates an
  • element for a specJSON object (one declared name). */ +-function createSpecItem(kind, specData, checkboxes) { +- //
  • myfunc... +- const specItem = document.createElement('li'); +- specItem.className = 'spec-node'; +- specItem.dataset.name = specData.Name; // custom .name field holds symbol's unique logical name +- +- // First child is a checkbox. +- const specCheckbox = document.createElement('input'); +- specCheckbox.type = 'checkbox'; +- specItem.appendChild(specCheckbox); +- checkboxes.push(specCheckbox); +- +- // Next is the component assignment color swatch. +- const assignSpan = document.createElement('span'); +- assignSpan.className = 'component-swatch'; +- assignSpan.textContent = "■"; +- { +- var index = data.Components.Assignments[specData.Name]; // may be undefined +- if (!index) { +- index = 0; // default +- } +- assignSpan.style.color = componentColors[index % componentColors.length]; +- assignSpan.title = "Component " + data.Components.Names[index]; // tooltip +- } +- specItem.appendChild(assignSpan); +- +- // Encircle the func/var/const/type indicator. +- const symbolSpan = document.createElement('span'); +- const symbol = String.fromCodePoint(kind.codePointAt(0) - 'a'.codePointAt(0) + 'ⓐ'.codePointAt(0)); +- symbolSpan.title = kind; // tooltip +- symbolSpan.append(`${symbol} `); +- specItem.append(symbolSpan); +- +- // Link name to declaration. +- const specName = document.createElement('a'); +- specItem.appendChild(specName); +- specName.textContent = ` ${specData.Name}`; +- specName.addEventListener('click', () => httpGET(specData.URL)); +- +- return specItem; +-} +- +-/** createRefItem creates an
  • element for a refJSON object (a reference). */ +-function createRefItem(refData) { +- const refItem = document.createElement('li'); +- refItem.className = 'ref-node'; +- +- // Link (from -> to) to the reference in from. +- const refLink = document.createElement('a'); +- refItem.appendChild(refLink); +- refLink.addEventListener('click', () => httpGET(refData.URL)); +- refLink.textContent = "${refData.From} ➤ ${refData.To}"; +- +- return refItem; +-} +- +-/** componentColors is a palette of dark, high-contrast colors. */ +-const componentColors = [ +- "#298429", +- "#4B4B8F", +- "#AD2C2C", +- "#A62CA6", +- "#6E65AF", +- "#D15050", +- "#2CA6A6", +- "#C55656", +- "#7B8C58", +- "#587676", +- "#B95EE1", +- "#AF6D41", +-]; +diff -urN a/gopls/internal/server/call_hierarchy.go b/gopls/internal/server/call_hierarchy.go +--- a/gopls/internal/server/call_hierarchy.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/call_hierarchy.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,62 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) PrepareCallHierarchy(ctx context.Context, params *protocol.CallHierarchyPrepareParams) ([]protocol.CallHierarchyItem, error) { +- ctx, done := event.Start(ctx, "server.PrepareCallHierarchy") +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- switch snapshot.FileKind(fh) { +- case file.Go: +- return golang.PrepareCallHierarchy(ctx, snapshot, fh, params.Position) +- } +- return nil, nil // empty result +-} +- +-func (s *server) IncomingCalls(ctx context.Context, params *protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) { +- ctx, done := event.Start(ctx, "server.IncomingCalls") +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.Item.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- switch snapshot.FileKind(fh) { +- case file.Go: +- return golang.IncomingCalls(ctx, snapshot, fh, params.Item.Range.Start) +- } +- return nil, nil // empty result +-} +- +-func (s *server) OutgoingCalls(ctx context.Context, params *protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) { +- ctx, done := event.Start(ctx, "server.OutgoingCalls") +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.Item.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- switch snapshot.FileKind(fh) { +- case file.Go: +- return golang.OutgoingCalls(ctx, snapshot, fh, params.Item.Range.Start) +- } +- return nil, nil // empty result +-} +diff -urN a/gopls/internal/server/code_action.go b/gopls/internal/server/code_action.go +--- a/gopls/internal/server/code_action.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/code_action.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,369 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- "fmt" +- "slices" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/mod" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) CodeAction(ctx context.Context, params *protocol.CodeActionParams) ([]protocol.CodeAction, error) { +- ctx, done := event.Start(ctx, "server.CodeAction") +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- uri := fh.URI() +- kind := snapshot.FileKind(fh) +- +- // Determine the supported code action kinds for this file. +- // +- // We interpret CodeActionKinds hierarchically, so refactor.rewrite +- // subsumes refactor.rewrite.change_quote, for example, +- // and "" (protocol.Empty) subsumes all kinds. +- // See ../protocol/codeactionkind.go for some code action theory. +- // +- // The Context.Only field specifies which code actions +- // the client wants. According to LSP 3.18 textDocument_codeAction, +- // an Only=[] should be interpreted as Only=["quickfix"]: +- // +- // "In version 1.0 of the protocol, there weren’t any +- // source or refactoring code actions. Code actions +- // were solely used to (quick) fix code, not to +- // write/rewrite code. So if a client asks for code +- // actions without any kind, the standard quick fix +- // code actions should be returned." +- // +- // However, this would deny clients (e.g. Vim+coc.nvim, +- // Emacs+eglot, and possibly others) the easiest and most +- // natural way of querying the server for the entire set of +- // available code actions. But reporting all available code +- // actions would be a nuisance for VS Code, since mere cursor +- // motion into a region with a code action (~anywhere) would +- // trigger a lightbulb usually associated with quickfixes. +- // +- // As a compromise, we use the trigger kind as a heuristic: if +- // the query was triggered by cursor motion (Automatic), we +- // respond with only quick fixes; if the query was invoked +- // explicitly (Invoked), we respond with all available +- // actions. +- codeActionKinds := make(map[protocol.CodeActionKind]bool) +- if len(params.Context.Only) > 0 { +- for _, kind := range params.Context.Only { // kind may be "" (=> all) +- codeActionKinds[kind] = true +- } +- } else { +- // No explicit kind specified. +- // Heuristic: decide based on trigger. +- if triggerKind(params) == protocol.CodeActionAutomatic { +- // e.g. cursor motion: show only quick fixes +- codeActionKinds[protocol.QuickFix] = true +- } else { +- // e.g. a menu selection (or unknown trigger kind, +- // as in our tests): show all available code actions. +- codeActionKinds[protocol.Empty] = true +- } +- } +- +- // enabled reports whether the specified kind of code action is required. +- enabled := func(kind protocol.CodeActionKind) bool { +- // Given "refactor.rewrite.foo", check for it, +- // then "refactor.rewrite", "refactor", then "". +- // A false map entry prunes the search for ancestors. +- // +- // If codeActionKinds contains protocol.Empty (""), +- // all kinds are enabled. +- for { +- if v, ok := codeActionKinds[kind]; ok { +- return v +- } +- if kind == "" { +- return false +- } +- +- // The "source.test" code action shouldn't be +- // returned to the client unless requested by +- // an exact match in Only. +- // +- // This mechanism exists to avoid a distracting +- // lightbulb (code action) on each Test function. +- // These actions are unwanted in VS Code because it +- // has Test Explorer, and in other editors because +- // the UX of executeCommand is unsatisfactory for tests: +- // it doesn't show the complete streaming output. +- // See https://github.com/joaotavora/eglot/discussions/1402 +- // for a better solution. See also +- // https://github.com/golang/go/issues/67400. +- // +- // TODO(adonovan): consider instead switching on +- // codeActionTriggerKind. Perhaps other noisy Source +- // Actions should be guarded in the same way. +- if kind == settings.GoTest { +- return false // don't search ancestors +- } +- +- // Try the parent. +- if dot := strings.LastIndexByte(string(kind), '.'); dot >= 0 { +- kind = kind[:dot] // "refactor.foo" -> "refactor" +- } else { +- kind = "" // "refactor" -> "" +- } +- } +- } +- +- switch kind { +- case file.Mod: +- var actions []protocol.CodeAction +- +- fixes, err := s.codeActionsMatchingDiagnostics(ctx, fh.URI(), snapshot, params.Context.Diagnostics, enabled) +- if err != nil { +- return nil, err +- } +- +- // Group vulnerability fixes by their range, and select only the most +- // appropriate upgrades. +- // +- // TODO(rfindley): can this instead be accomplished on the diagnosis side, +- // so that code action handling remains uniform? +- vulnFixes := make(map[protocol.Range][]protocol.CodeAction) +- searchFixes: +- for _, fix := range fixes { +- for _, diag := range fix.Diagnostics { +- if diag.Source == string(cache.Govulncheck) || diag.Source == string(cache.Vulncheck) { +- vulnFixes[diag.Range] = append(vulnFixes[diag.Range], fix) +- continue searchFixes +- } +- } +- actions = append(actions, fix) +- } +- +- for _, fixes := range vulnFixes { +- fixes = mod.SelectUpgradeCodeActions(fixes) +- actions = append(actions, fixes...) +- } +- +- return actions, nil +- +- case file.Go: +- // diagnostic-bundled code actions +- // +- // The diagnostics already have a UI presence (e.g. squiggly underline); +- // the associated action may additionally show (in VS Code) as a lightbulb. +- // Note s.codeActionsMatchingDiagnostics returns only fixes +- // detected during the analysis phase. golang.CodeActions computes +- // extra changes that can address some diagnostics. +- actions, err := s.codeActionsMatchingDiagnostics(ctx, uri, snapshot, params.Context.Diagnostics, enabled) +- if err != nil { +- return nil, err +- } +- +- // computed code actions (may include quickfixes from diagnostics) +- moreActions, err := golang.CodeActions(ctx, snapshot, fh, params.Range, params.Context.Diagnostics, enabled, triggerKind(params)) +- if err != nil { +- return nil, err +- } +- actions = append(actions, moreActions...) +- +- // Don't suggest most fixes for generated files, since they are generally +- // not useful and some editors may apply them automatically on save. +- // (Unfortunately there's no reliable way to distinguish fixes from +- // queries, so we must list all kinds of queries here.) +- // +- // We make an exception for OrganizeImports, because +- // (a) it is needed when making temporary experimental +- // changes (e.g. adding logging) in generated files, and +- // (b) it doesn't report diagnostics on well-formed code, and +- // unedited generated files must be well formed. +- if golang.IsGenerated(ctx, snapshot, uri) { +- actions = slices.DeleteFunc(actions, func(a protocol.CodeAction) bool { +- switch a.Kind { +- case settings.GoTest, +- settings.GoDoc, +- settings.GoFreeSymbols, +- settings.GoSplitPackage, +- settings.GoAssembly, +- settings.GoplsDocFeatures, +- settings.GoToggleCompilerOptDetails: +- return false // read-only query +- case settings.OrganizeImports: +- return false // fix allowed in generated files (see #73959) +- } +- return true // potential write operation +- }) +- } +- +- return actions, nil +- +- default: +- // Unsupported file kind for a code action. +- return nil, nil +- } +-} +- +-func triggerKind(params *protocol.CodeActionParams) protocol.CodeActionTriggerKind { +- if kind := params.Context.TriggerKind; kind != nil { // (some clients omit it) +- return *kind +- } +- return protocol.CodeActionUnknownTrigger +-} +- +-// ResolveCodeAction resolves missing Edit information (that is, computes the +-// details of the necessary patch) in the given code action using the provided +-// Data field of the CodeAction, which should contain the raw json of a protocol.Command. +-// +-// This should be called by the client before applying code actions, when the +-// client has code action resolve support. +-// +-// This feature allows capable clients to preview and selectively apply the diff +-// instead of applying the whole thing unconditionally through workspace/applyEdit. +-func (s *server) ResolveCodeAction(ctx context.Context, ca *protocol.CodeAction) (*protocol.CodeAction, error) { +- ctx, done := event.Start(ctx, "server.ResolveCodeAction") +- defer done() +- +- // Only resolve the code action if there is Data provided. +- var cmd protocol.Command +- if ca.Data != nil { +- if err := protocol.UnmarshalJSON(*ca.Data, &cmd); err != nil { +- return nil, err +- } +- } +- if cmd.Command != "" { +- params := &protocol.ExecuteCommandParams{ +- Command: cmd.Command, +- Arguments: cmd.Arguments, +- } +- +- handler := &commandHandler{ +- s: s, +- params: params, +- } +- edit, err := command.Dispatch(ctx, params, handler) +- if err != nil { +- return nil, err +- } +- var ok bool +- if ca.Edit, ok = edit.(*protocol.WorkspaceEdit); !ok { +- return nil, fmt.Errorf("unable to resolve code action %q", ca.Title) +- } +- } +- return ca, nil +-} +- +-// codeActionsMatchingDiagnostics creates code actions for the +-// provided diagnostics, by unmarshalling actions bundled in the +-// protocol.Diagnostic.Data field or, if there were none, by creating +-// actions from edits associated with a matching Diagnostic from the +-// set of stored diagnostics for this file. +-func (s *server) codeActionsMatchingDiagnostics(ctx context.Context, uri protocol.DocumentURI, snapshot *cache.Snapshot, pds []protocol.Diagnostic, enabled func(protocol.CodeActionKind) bool) ([]protocol.CodeAction, error) { +- var actions []protocol.CodeAction +- var unbundled []protocol.Diagnostic // diagnostics without bundled code actions in their Data field +- for _, pd := range pds { +- bundled, err := cache.BundledLazyFixes(pd) +- if err != nil { +- return nil, err +- } +- if len(bundled) > 0 { +- for _, fix := range bundled { +- if enabled(fix.Kind) { +- actions = append(actions, fix) +- } +- } +- } else { +- // No bundled actions: keep searching for a match. +- unbundled = append(unbundled, pd) +- } +- } +- +- for _, pd := range unbundled { +- for _, sd := range s.findMatchingDiagnostics(uri, pd) { +- diagActions, err := codeActionsForDiagnostic(ctx, snapshot, sd, &pd, enabled) +- if err != nil { +- return nil, err +- } +- actions = append(actions, diagActions...) +- } +- } +- return actions, nil +-} +- +-func codeActionsForDiagnostic(ctx context.Context, snapshot *cache.Snapshot, sd *cache.Diagnostic, pd *protocol.Diagnostic, enabled func(protocol.CodeActionKind) bool) ([]protocol.CodeAction, error) { +- var actions []protocol.CodeAction +- for _, fix := range sd.SuggestedFixes { +- if !enabled(fix.ActionKind) { +- continue +- } +- var changes []protocol.DocumentChange +- for uri, edits := range fix.Edits { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- change := protocol.DocumentChangeEdit(fh, edits) +- changes = append(changes, change) +- } +- actions = append(actions, protocol.CodeAction{ +- Title: fix.Title, +- Kind: fix.ActionKind, +- Edit: protocol.NewWorkspaceEdit(changes...), +- Command: fix.Command, +- Diagnostics: []protocol.Diagnostic{*pd}, +- }) +- } +- return actions, nil +-} +- +-func (s *server) findMatchingDiagnostics(uri protocol.DocumentURI, pd protocol.Diagnostic) []*cache.Diagnostic { +- s.diagnosticsMu.Lock() +- defer s.diagnosticsMu.Unlock() +- +- var sds []*cache.Diagnostic +- if fileDiags := s.diagnostics[uri]; fileDiags != nil { +- for _, viewDiags := range fileDiags.byView { +- for _, sd := range viewDiags.diagnostics { +- // extra space may have been trimmed when +- // converting to protocol.Diagnostic +- sameDiagnostic := pd.Message == strings.TrimSpace(sd.Message) && +- protocol.CompareRange(pd.Range, sd.Range) == 0 && +- pd.Source == string(sd.Source) +- +- if sameDiagnostic { +- sds = append(sds, sd) +- } +- } +- } +- } +- return sds +-} +- +-func (s *server) getSupportedCodeActions() []protocol.CodeActionKind { +- allCodeActionKinds := make(map[protocol.CodeActionKind]struct{}) +- for _, kinds := range s.Options().SupportedCodeActions { +- for kind := range kinds { +- allCodeActionKinds[kind] = struct{}{} +- } +- } +- var result []protocol.CodeActionKind +- for kind := range allCodeActionKinds { +- result = append(result, kind) +- } +- slices.Sort(result) +- return result +-} +- +-type unit = struct{} +diff -urN a/gopls/internal/server/code_lens.go b/gopls/internal/server/code_lens.go +--- a/gopls/internal/server/code_lens.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/code_lens.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,66 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- "fmt" +- "sort" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/mod" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/internal/event" +-) +- +-// CodeLens reports the set of available CodeLenses +-// (range-associated commands) in the given file. +-func (s *server) CodeLens(ctx context.Context, params *protocol.CodeLensParams) ([]protocol.CodeLens, error) { +- ctx, done := event.Start(ctx, "server.CodeLens", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- var lensFuncs map[settings.CodeLensSource]cache.CodeLensSourceFunc +- switch snapshot.FileKind(fh) { +- case file.Mod: +- lensFuncs = mod.CodeLensSources() +- case file.Go: +- lensFuncs = golang.CodeLensSources() +- default: +- // Unsupported file kind for a code lens. +- return nil, nil +- } +- var lenses []protocol.CodeLens +- for kind, lensFunc := range lensFuncs { +- if !snapshot.Options().Codelenses[kind] { +- continue +- } +- added, err := lensFunc(ctx, snapshot, fh) +- // Code lens is called on every keystroke, so we should just operate in +- // a best-effort mode, ignoring errors. +- if err != nil { +- event.Error(ctx, fmt.Sprintf("code lens %s failed", kind), err) +- continue +- } +- lenses = append(lenses, added...) +- } +- sort.Slice(lenses, func(i, j int) bool { +- a, b := lenses[i], lenses[j] +- if cmp := protocol.CompareRange(a.Range, b.Range); cmp != 0 { +- return cmp < 0 +- } +- return a.Command.Command < b.Command.Command +- }) +- return lenses, nil +-} +diff -urN a/gopls/internal/server/command.go b/gopls/internal/server/command.go +--- a/gopls/internal/server/command.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/command.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1881 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "bytes" +- "context" +- "encoding/json" +- "errors" +- "fmt" +- "io" +- "log" +- "maps" +- "os" +- "path/filepath" +- "regexp" +- "runtime" +- "runtime/pprof" +- "slices" +- "sort" +- "strings" +- "sync" +- +- "github.com/fatih/gomodifytags/modifytags" +- "golang.org/x/mod/modfile" +- "golang.org/x/telemetry/counter" +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/debug" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/progress" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/tokeninternal" +- "golang.org/x/tools/gopls/internal/vulncheck" +- "golang.org/x/tools/gopls/internal/vulncheck/scan" +- "golang.org/x/tools/internal/diff" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/gocommand" +- "golang.org/x/tools/internal/jsonrpc2" +- "golang.org/x/tools/internal/xcontext" +-) +- +-func (s *server) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCommandParams) (any, error) { +- ctx, done := event.Start(ctx, "server.ExecuteCommand") +- defer done() +- +- // For test synchronization, always create a progress notification. +- // +- // This may be in addition to user-facing progress notifications created in +- // the course of command execution. +- if s.Options().VerboseWorkDoneProgress { +- work := s.progress.Start(ctx, params.Command, "Verbose: running command...", nil, nil) +- defer work.End(ctx, "Done.") +- } +- +- if !slices.Contains(s.Options().SupportedCommands, params.Command) { +- return nil, fmt.Errorf("%s is not a supported command", params.Command) +- } +- +- handler := &commandHandler{ +- s: s, +- params: params, +- } +- return command.Dispatch(ctx, params, handler) +-} +- +-type commandHandler struct { +- s *server +- params *protocol.ExecuteCommandParams +-} +- +-func (h *commandHandler) Modules(ctx context.Context, args command.ModulesArgs) (command.ModulesResult, error) { +- // keepModule filters modules based on the command args +- keepModule := func(goMod protocol.DocumentURI) bool { +- // Does the directory enclose the view's go.mod file? +- if !args.Dir.Encloses(goMod) { +- return false +- } +- +- // Calculate the relative path +- rel, err := filepath.Rel(args.Dir.Path(), goMod.Path()) +- if err != nil { +- return false // "can't happen" (see prior Encloses check) +- } +- +- assert(goMod.Base() == "go.mod", fmt.Sprintf("invalid go.mod path: want go.mod, got %q", goMod.Path())) +- +- // Invariant: rel is a relative path without "../" segments and the last +- // segment is "go.mod" +- nparts := strings.Count(rel, string(filepath.Separator)) +- return args.MaxDepth < 0 || nparts <= args.MaxDepth +- } +- +- // Views may include: +- // - go.work views containing one or more modules each; +- // - go.mod views containing a single module each; +- // - GOPATH and/or ad hoc views containing no modules. +- // +- // Retrieving a view via the request path would only work for a +- // non-recursive query for a go.mod view, and even in that case +- // [Session.SnapshotOf] doesn't work on directories. Thus we check every +- // view. +- var result command.ModulesResult +- seen := map[protocol.DocumentURI]bool{} +- for _, v := range h.s.session.Views() { +- s, release, err := v.Snapshot() +- if err != nil { +- return command.ModulesResult{}, err +- } +- defer release() +- +- for _, modFile := range v.ModFiles() { +- if !keepModule(modFile) { +- continue +- } +- +- // Deduplicate +- if seen[modFile] { +- continue +- } +- seen[modFile] = true +- +- fh, err := s.ReadFile(ctx, modFile) +- if err != nil { +- return command.ModulesResult{}, err +- } +- mod, err := s.ParseMod(ctx, fh) +- if err != nil { +- return command.ModulesResult{}, err +- } +- if mod.File.Module == nil { +- continue // syntax contains errors +- } +- result.Modules = append(result.Modules, command.Module{ +- Path: mod.File.Module.Mod.Path, +- Version: mod.File.Module.Mod.Version, +- GoMod: mod.URI, +- }) +- } +- } +- return result, nil +-} +- +-func (h *commandHandler) Packages(ctx context.Context, args command.PackagesArgs) (command.PackagesResult, error) { +- // Convert file arguments into directories +- dirs := make([]protocol.DocumentURI, len(args.Files)) +- for i, file := range args.Files { +- if filepath.Ext(file.Path()) == ".go" { +- dirs[i] = file.Dir() +- } else { +- dirs[i] = file +- } +- } +- +- keepPackage := func(pkg *metadata.Package) bool { +- for _, file := range pkg.GoFiles { +- for _, dir := range dirs { +- if file.Dir() == dir || args.Recursive && dir.Encloses(file) { +- return true +- } +- } +- } +- return false +- } +- +- result := command.PackagesResult{ +- Module: make(map[string]command.Module), +- } +- +- err := h.run(ctx, commandConfig{ +- progress: "Packages", +- }, func(ctx context.Context, _ commandDeps) error { +- for _, view := range h.s.session.Views() { +- snapshot, release, err := view.Snapshot() +- if err != nil { +- return err +- } +- defer release() +- +- metas, err := snapshot.WorkspaceMetadata(ctx) +- if err != nil { +- return err +- } +- +- // Filter out unwanted packages +- metas = slices.DeleteFunc(metas, func(meta *metadata.Package) bool { +- return meta.IsIntermediateTestVariant() || +- !keepPackage(meta) +- }) +- +- start := len(result.Packages) +- for _, meta := range metas { +- var mod command.Module +- if meta.Module != nil { +- mod = command.Module{ +- Path: meta.Module.Path, +- Version: meta.Module.Version, +- GoMod: protocol.URIFromPath(meta.Module.GoMod), +- } +- result.Module[mod.Path] = mod // Overwriting is ok +- } +- +- result.Packages = append(result.Packages, command.Package{ +- Path: string(meta.PkgPath), +- ForTest: string(meta.ForTest), +- ModulePath: mod.Path, +- }) +- } +- +- if args.Mode&command.NeedTests == 0 { +- continue +- } +- +- // Make a single request to the index (per snapshot) to minimize the +- // performance hit +- var ids []cache.PackageID +- for _, meta := range metas { +- ids = append(ids, meta.ID) +- } +- +- allTests, err := snapshot.Tests(ctx, ids...) +- if err != nil { +- return err +- } +- +- for i, tests := range allTests { +- pkg := &result.Packages[start+i] +- fileByPath := map[protocol.DocumentURI]*command.TestFile{} +- for _, test := range tests.All() { +- test := command.TestCase{ +- Name: test.Name, +- Loc: test.Location, +- } +- +- file, ok := fileByPath[test.Loc.URI] +- if !ok { +- f := command.TestFile{ +- URI: test.Loc.URI, +- } +- i := len(pkg.TestFiles) +- pkg.TestFiles = append(pkg.TestFiles, f) +- file = &pkg.TestFiles[i] +- fileByPath[test.Loc.URI] = file +- } +- file.Tests = append(file.Tests, test) +- } +- } +- } +- +- return nil +- }) +- return result, err +-} +- +-func (h *commandHandler) MaybePromptForTelemetry(ctx context.Context) error { +- // if the server's TelemetryPrompt is true, it's likely the server already +- // handled prompting for it. Don't try to prompt again. +- if !h.s.options.TelemetryPrompt { +- go h.s.maybePromptForTelemetry(ctx, true) +- } +- return nil +-} +- +-func (*commandHandler) AddTelemetryCounters(_ context.Context, args command.AddTelemetryCountersArgs) error { +- if len(args.Names) != len(args.Values) { +- return fmt.Errorf("Names and Values must have the same length") +- } +- // invalid counter update requests will be silently dropped. (no audience) +- for i, n := range args.Names { +- v := args.Values[i] +- if n == "" || v < 0 { +- continue +- } +- counter.Add("fwd/"+n, v) +- } +- return nil +-} +- +-func (c *commandHandler) AddTest(ctx context.Context, loc protocol.Location) (*protocol.WorkspaceEdit, error) { +- var result *protocol.WorkspaceEdit +- err := c.run(ctx, commandConfig{ +- forURI: loc.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- if deps.snapshot.FileKind(deps.fh) != file.Go { +- return fmt.Errorf("can't add test for non-Go file") +- } +- docedits, show, err := golang.AddTestForFunc(ctx, deps.snapshot, loc) +- if err != nil { +- return err +- } +- if err := applyChanges(ctx, c.s.client, docedits); err != nil { +- return err +- } +- +- if show != nil { +- showDocumentImpl(ctx, c.s.client, protocol.URI(show.URI), &show.Range, c.s.options) +- } +- return nil +- }) +- return result, err +-} +- +-func (c *commandHandler) LSP(ctx context.Context, param command.LSPArgs) (any, error) { +- response, valid, err := protocol.ServerDispatchCall(ctx, c.s, param.Method, param.Param) +- if !valid { +- return nil, fmt.Errorf("method %s does not exist", param.Method) +- } +- +- return response, err +-} +- +-// commandConfig configures common command set-up and execution. +-type commandConfig struct { +- requireSave bool // whether all files must be saved for the command to work +- progress string // title to use for progress reporting. If empty, no progress will be reported. +- progressStyle settings.WorkDoneProgressStyle // style information for client-side progress display. +- forView string // view to resolve to a snapshot; incompatible with forURI +- forURI protocol.DocumentURI // URI to resolve to a snapshot. If unset, snapshot will be nil. +-} +- +-// commandDeps is evaluated from a commandConfig. Note that not all fields may +-// be populated, depending on which configuration is set. See comments in-line +-// for details. +-type commandDeps struct { +- snapshot *cache.Snapshot // present if cfg.forURI or forView was set +- fh file.Handle // present if cfg.forURI was set +- work *progress.WorkDone // present if cfg.progress was set +-} +- +-type commandFunc func(context.Context, commandDeps) error +- +-// These strings are reported as the final WorkDoneProgressEnd message +-// for each workspace/executeCommand request. +-const ( +- CommandCanceled = "canceled" +- CommandFailed = "failed" +- CommandCompleted = "completed" +-) +- +-// run performs command setup for command execution, and invokes the given run +-// function. If cfg.async is set, run executes the given func in a separate +-// goroutine, and returns as soon as setup is complete and the goroutine is +-// scheduled. +-// +-// Invariant: if the resulting error is non-nil, the given run func will +-// (eventually) be executed exactly once. +-func (c *commandHandler) run(ctx context.Context, cfg commandConfig, run commandFunc) (err error) { +- if cfg.requireSave { +- var unsaved []string +- for _, overlay := range c.s.session.Overlays() { +- if !overlay.SameContentsOnDisk() { +- unsaved = append(unsaved, overlay.URI().Path()) +- } +- } +- if len(unsaved) > 0 { +- return fmt.Errorf("All files must be saved first (unsaved: %v).", unsaved) +- } +- } +- var deps commandDeps +- var release func() +- if cfg.forURI != "" && cfg.forView != "" { +- return bug.Errorf("internal error: forURI=%q, forView=%q", cfg.forURI, cfg.forView) +- } +- if cfg.forURI != "" { +- deps.fh, deps.snapshot, release, err = c.s.session.FileOf(ctx, cfg.forURI) +- if err != nil { +- return err +- } +- +- } else if cfg.forView != "" { +- view, err := c.s.session.View(cfg.forView) +- if err != nil { +- return err +- } +- deps.snapshot, release, err = view.Snapshot() +- if err != nil { +- return err +- } +- +- } else { +- release = func() {} +- } +- // Inv: release() must be called exactly once after this point. +- // In the async case, runcmd may outlive run(). +- +- ctx, cancel := context.WithCancel(xcontext.Detach(ctx)) +- if cfg.progress != "" { +- header := "" +- if _, ok := c.s.options.SupportedWorkDoneProgressFormats[cfg.progressStyle]; ok && cfg.progressStyle != "" { +- header = fmt.Sprintf("style: %s\n\n", cfg.progressStyle) +- } +- deps.work = c.s.progress.Start(ctx, cfg.progress, header+"Running...", c.params.WorkDoneToken, cancel) +- } +- runcmd := func() error { +- defer release() +- defer cancel() +- err := run(ctx, deps) +- if deps.work != nil { +- switch { +- case errors.Is(err, context.Canceled): +- deps.work.End(ctx, CommandCanceled) +- case err != nil: +- event.Error(ctx, "command error", err) +- deps.work.End(ctx, CommandFailed) +- default: +- deps.work.End(ctx, CommandCompleted) +- } +- } +- return err +- } +- +- // For legacy reasons, gopls.run_govulncheck must run asynchronously. +- // TODO(golang/vscode-go#3572): remove this (along with the +- // gopls.run_govulncheck command entirely) once VS Code only uses the new +- // gopls.vulncheck command. +- if c.params.Command == "gopls.run_govulncheck" { +- if cfg.progress == "" { +- log.Fatalf("asynchronous command gopls.run_govulncheck does not enable progress reporting") +- } +- go func() { +- if err := runcmd(); err != nil { +- showMessage(ctx, c.s.client, protocol.Error, err.Error()) +- } +- }() +- return nil +- } +- +- return runcmd() +-} +- +-func (c *commandHandler) ApplyFix(ctx context.Context, args command.ApplyFixArgs) (*protocol.WorkspaceEdit, error) { +- var result *protocol.WorkspaceEdit +- err := c.run(ctx, commandConfig{ +- // Note: no progress here. Applying fixes should be quick. +- forURI: args.Location.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- changes, err := golang.ApplyFix(ctx, args.Fix, deps.snapshot, deps.fh, args.Location.Range) +- if err != nil { +- return err +- } +- wsedit := protocol.NewWorkspaceEdit(changes...) +- if args.ResolveEdits { +- result = wsedit +- return nil +- } +- return applyChanges(ctx, c.s.client, changes) +- }) +- return result, err +-} +- +-func (c *commandHandler) RegenerateCgo(ctx context.Context, args command.URIArg) error { +- return c.run(ctx, commandConfig{ +- progress: "Regenerating Cgo", +- }, func(ctx context.Context, _ commandDeps) error { +- return c.modifyState(ctx, FromRegenerateCgo, func() (*cache.Snapshot, func(), error) { +- // Resetting the view causes cgo to be regenerated via `go list`. +- v, err := c.s.session.ResetView(ctx, args.URI) +- if err != nil { +- return nil, nil, err +- } +- return v.Snapshot() +- }) +- }) +-} +- +-// modifyState performs an operation that modifies the snapshot state. +-// +-// It causes a snapshot diagnosis for the provided ModificationSource. +-func (c *commandHandler) modifyState(ctx context.Context, source ModificationSource, work func() (*cache.Snapshot, func(), error)) error { +- var wg sync.WaitGroup // tracks work done on behalf of this function, incl. diagnostics +- wg.Add(1) +- defer wg.Done() +- +- // Track progress on this operation for testing. +- if c.s.Options().VerboseWorkDoneProgress { +- work := c.s.progress.Start(ctx, DiagnosticWorkTitle(source), "Calculating file diagnostics...", nil, nil) +- go func() { +- wg.Wait() +- work.End(ctx, "Done.") +- }() +- } +- snapshot, release, err := work() +- if err != nil { +- return err +- } +- wg.Go(func() { +- // Diagnosing with the background context ensures new snapshots are fully +- // diagnosed. +- c.s.diagnoseSnapshot(snapshot.BackgroundContext(), snapshot, nil, 0) +- release() +- }) +- return nil +-} +- +-func (c *commandHandler) CheckUpgrades(ctx context.Context, args command.CheckUpgradesArgs) error { +- return c.run(ctx, commandConfig{ +- forURI: args.URI, +- progress: "Checking for upgrades", +- }, func(ctx context.Context, deps commandDeps) error { +- return c.modifyState(ctx, FromCheckUpgrades, func() (*cache.Snapshot, func(), error) { +- upgrades, err := c.s.getUpgrades(ctx, deps.snapshot, args.URI, args.Modules) +- if err != nil { +- return nil, nil, err +- } +- return c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ +- ModuleUpgrades: map[protocol.DocumentURI]map[string]string{args.URI: upgrades}, +- }) +- }) +- }) +-} +- +-func (c *commandHandler) AddDependency(ctx context.Context, args command.DependencyArgs) error { +- return c.GoGetModule(ctx, args) +-} +- +-func (c *commandHandler) UpgradeDependency(ctx context.Context, args command.DependencyArgs) error { +- return c.GoGetModule(ctx, args) +-} +- +-func (c *commandHandler) ResetGoModDiagnostics(ctx context.Context, args command.ResetGoModDiagnosticsArgs) error { +- return c.run(ctx, commandConfig{ +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- return c.modifyState(ctx, FromResetGoModDiagnostics, func() (*cache.Snapshot, func(), error) { +- return c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ +- ModuleUpgrades: map[protocol.DocumentURI]map[string]string{ +- deps.fh.URI(): nil, +- }, +- Vulns: map[protocol.DocumentURI]*vulncheck.Result{ +- deps.fh.URI(): nil, +- }, +- }) +- }) +- }) +-} +- +-func (c *commandHandler) GoGetModule(ctx context.Context, args command.DependencyArgs) error { +- return c.run(ctx, commandConfig{ +- progress: "Running go get", +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- return c.s.runGoModUpdateCommands(ctx, deps.snapshot, args.URI, func(invoke func(...string) (*bytes.Buffer, error)) error { +- return runGoGetModule(invoke, args.AddRequire, args.GoCmdArgs) +- }) +- }) +-} +- +-// TODO(rFindley): UpdateGoSum, Tidy, and Vendor could probably all be one command. +-func (c *commandHandler) UpdateGoSum(ctx context.Context, args command.URIArgs) error { +- return c.run(ctx, commandConfig{ +- progress: "Updating go.sum", +- }, func(ctx context.Context, _ commandDeps) error { +- for _, uri := range args.URIs { +- fh, snapshot, release, err := c.s.session.FileOf(ctx, uri) +- if err != nil { +- return err +- } +- defer release() +- if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error { +- _, err := invoke("list", "all") +- return err +- }); err != nil { +- return err +- } +- } +- return nil +- }) +-} +- +-func (c *commandHandler) Tidy(ctx context.Context, args command.URIArgs) error { +- return c.run(ctx, commandConfig{ +- progress: "Running go mod tidy", +- }, func(ctx context.Context, _ commandDeps) error { +- for _, uri := range args.URIs { +- fh, snapshot, release, err := c.s.session.FileOf(ctx, uri) +- if err != nil { +- return err +- } +- defer release() +- if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error { +- _, err := invoke("mod", "tidy") +- return err +- }); err != nil { +- return err +- } +- } +- return nil +- }) +-} +- +-func (c *commandHandler) Vendor(ctx context.Context, args command.URIArg) error { +- return c.run(ctx, commandConfig{ +- requireSave: true, // TODO(adonovan): probably not needed; but needs a test. +- progress: "Running go mod vendor", +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- // Use RunGoCommandPiped here so that we don't compete with any other go +- // command invocations. go mod vendor deletes modules.txt before recreating +- // it, and therefore can run into file locking issues on Windows if that +- // file is in use by another process, such as go list. +- // +- // If golang/go#44119 is resolved, go mod vendor will instead modify +- // modules.txt in-place. In that case we could theoretically allow this +- // command to run concurrently. +- stderr := new(bytes.Buffer) +- inv, cleanupInvocation, err := deps.snapshot.GoCommandInvocation(cache.NetworkOK, args.URI.DirPath(), "mod", []string{"vendor"}) +- if err != nil { +- return err +- } +- defer cleanupInvocation() +- err = deps.snapshot.View().GoCommandRunner().RunPiped(ctx, *inv, &bytes.Buffer{}, stderr) +- if err != nil { +- return fmt.Errorf("running go mod vendor failed: %v\nstderr:\n%s", err, stderr.String()) +- } +- return nil +- }) +-} +- +-func (c *commandHandler) EditGoDirective(ctx context.Context, args command.EditGoDirectiveArgs) error { +- return c.run(ctx, commandConfig{ +- requireSave: true, // if go.mod isn't saved it could cause a problem +- forURI: args.URI, +- }, func(ctx context.Context, _ commandDeps) error { +- fh, snapshot, release, err := c.s.session.FileOf(ctx, args.URI) +- if err != nil { +- return err +- } +- defer release() +- if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error { +- _, err := invoke("mod", "edit", "-go", args.Version) +- return err +- }); err != nil { +- return err +- } +- return nil +- }) +-} +- +-func (c *commandHandler) RemoveDependency(ctx context.Context, args command.RemoveDependencyArgs) error { +- return c.run(ctx, commandConfig{ +- progress: "Removing dependency", +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- // See the documentation for OnlyDiagnostic. +- // +- // TODO(rfindley): In Go 1.17+, we will be able to use the go command +- // without checking if the module is tidy. +- if args.OnlyDiagnostic { +- return c.s.runGoModUpdateCommands(ctx, deps.snapshot, args.URI, func(invoke func(...string) (*bytes.Buffer, error)) error { +- if err := runGoGetModule(invoke, false, []string{args.ModulePath + "@none"}); err != nil { +- return err +- } +- _, err := invoke("mod", "tidy") +- return err +- }) +- } +- pm, err := deps.snapshot.ParseMod(ctx, deps.fh) +- if err != nil { +- return err +- } +- edits, err := dropDependency(pm, args.ModulePath) +- if err != nil { +- return err +- } +- return applyChanges(ctx, c.s.client, []protocol.DocumentChange{protocol.DocumentChangeEdit(deps.fh, edits)}) +- }) +-} +- +-// dropDependency returns the edits to remove the given require from the go.mod +-// file. +-func dropDependency(pm *cache.ParsedModule, modulePath string) ([]protocol.TextEdit, error) { +- // We need a private copy of the parsed go.mod file, since we're going to +- // modify it. +- copied, err := modfile.Parse("", pm.Mapper.Content, nil) +- if err != nil { +- return nil, err +- } +- if err := copied.DropRequire(modulePath); err != nil { +- return nil, err +- } +- copied.Cleanup() +- newContent, err := copied.Format() +- if err != nil { +- return nil, err +- } +- // Calculate the edits to be made due to the change. +- diff := diff.Bytes(pm.Mapper.Content, newContent) +- return protocol.EditsFromDiffEdits(pm.Mapper, diff) +-} +- +-func (c *commandHandler) Doc(ctx context.Context, args command.DocArgs) (protocol.URI, error) { +- if args.Location.URI == "" { +- return "", errors.New("missing location URI") +- } +- +- var result protocol.URI +- err := c.run(ctx, commandConfig{ +- progress: "", // the operation should be fast +- forURI: args.Location.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- pkg, pgf, err := golang.NarrowestPackageForFile(ctx, deps.snapshot, args.Location.URI) +- if err != nil { +- return err +- } +- start, end, err := pgf.RangePos(args.Location.Range) +- if err != nil { +- return err +- } +- +- // Start web server. +- web, err := c.s.getWeb() +- if err != nil { +- return err +- } +- +- // Compute package path and optional symbol fragment +- // (e.g. "#Buffer.Len") from the selection. +- pkgpath, fragment, _ := golang.DocFragment(pkg, pgf, start, end) +- +- // Direct the client to open the /pkg page. +- result = web.PkgURL(deps.snapshot.View().ID(), pkgpath, fragment) +- if args.ShowDocument { +- openClientBrowser(ctx, c.s.client, "Doc", result, c.s.Options()) +- } +- +- return nil +- }) +- return result, err +-} +- +-func (c *commandHandler) RunTests(ctx context.Context, args command.RunTestsArgs) error { +- return c.run(ctx, commandConfig{ +- progress: "Running go test", // (asynchronous) +- requireSave: true, // go test honors overlays, but tests themselves cannot +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- jsonrpc2.Async(ctx) // don't block RPCs behind this command, since it can take a while +- return c.runTests(ctx, deps.snapshot, deps.work, args.URI, args.Tests, args.Benchmarks) +- }) +-} +- +-func (c *commandHandler) runTests(ctx context.Context, snapshot *cache.Snapshot, work *progress.WorkDone, uri protocol.DocumentURI, tests, benchmarks []string) error { +- // TODO: fix the error reporting when this runs async. +- meta, err := snapshot.NarrowestMetadataForFile(ctx, uri) +- if err != nil { +- return err +- } +- pkgPath := string(meta.ForTest) +- +- // create output +- buf := &bytes.Buffer{} +- ew := progress.NewEventWriter(ctx, "test") +- out := io.MultiWriter(ew, progress.NewWorkDoneWriter(ctx, work), buf) +- +- // Run `go test -run Func` on each test. +- var failedTests int +- for _, funcName := range tests { +- args := []string{pkgPath, "-v", "-count=1", fmt.Sprintf("-run=^%s$", regexp.QuoteMeta(funcName))} +- inv, cleanupInvocation, err := snapshot.GoCommandInvocation(cache.NoNetwork, uri.DirPath(), "test", args) +- if err != nil { +- return err +- } +- defer cleanupInvocation() +- if err := snapshot.View().GoCommandRunner().RunPiped(ctx, *inv, out, out); err != nil { +- if errors.Is(err, context.Canceled) { +- return err +- } +- failedTests++ +- } +- } +- +- // Run `go test -run=^$ -bench Func` on each test. +- var failedBenchmarks int +- for _, funcName := range benchmarks { +- inv, cleanupInvocation, err := snapshot.GoCommandInvocation(cache.NoNetwork, uri.DirPath(), "test", []string{ +- pkgPath, "-v", "-run=^$", fmt.Sprintf("-bench=^%s$", regexp.QuoteMeta(funcName)), +- }) +- if err != nil { +- return err +- } +- defer cleanupInvocation() +- if err := snapshot.View().GoCommandRunner().RunPiped(ctx, *inv, out, out); err != nil { +- if errors.Is(err, context.Canceled) { +- return err +- } +- failedBenchmarks++ +- } +- } +- +- var title string +- if len(tests) > 0 && len(benchmarks) > 0 { +- title = "tests and benchmarks" +- } else if len(tests) > 0 { +- title = "tests" +- } else if len(benchmarks) > 0 { +- title = "benchmarks" +- } else { +- return errors.New("No functions were provided") +- } +- message := fmt.Sprintf("all %s passed", title) +- if failedTests > 0 && failedBenchmarks > 0 { +- message = fmt.Sprintf("%d / %d tests failed and %d / %d benchmarks failed", failedTests, len(tests), failedBenchmarks, len(benchmarks)) +- } else if failedTests > 0 { +- message = fmt.Sprintf("%d / %d tests failed", failedTests, len(tests)) +- } else if failedBenchmarks > 0 { +- message = fmt.Sprintf("%d / %d benchmarks failed", failedBenchmarks, len(benchmarks)) +- } +- if failedTests > 0 || failedBenchmarks > 0 { +- message += "\n" + buf.String() +- } +- +- showMessage(ctx, c.s.client, protocol.Info, message) +- +- if failedTests > 0 || failedBenchmarks > 0 { +- return errors.New("gopls.test command failed") +- } +- return nil +-} +- +-func (c *commandHandler) Generate(ctx context.Context, args command.GenerateArgs) error { +- title := "Running go generate ." +- if args.Recursive { +- title = "Running go generate ./..." +- } +- return c.run(ctx, commandConfig{ +- requireSave: true, // commands executed by go generate cannot honor overlays +- progress: title, +- forURI: args.Dir, +- }, func(ctx context.Context, deps commandDeps) error { +- er := progress.NewEventWriter(ctx, "generate") +- +- pattern := "." +- if args.Recursive { +- pattern = "./..." +- } +- inv, cleanupInvocation, err := deps.snapshot.GoCommandInvocation(cache.NetworkOK, args.Dir.Path(), "generate", []string{"-x", pattern}) +- if err != nil { +- return err +- } +- defer cleanupInvocation() +- stderr := io.MultiWriter(er, progress.NewWorkDoneWriter(ctx, deps.work)) +- if err := deps.snapshot.View().GoCommandRunner().RunPiped(ctx, *inv, er, stderr); err != nil { +- return err +- } +- return nil +- }) +-} +- +-func (c *commandHandler) GoGetPackage(ctx context.Context, args command.GoGetPackageArgs) error { +- return c.run(ctx, commandConfig{ +- forURI: args.URI, +- progress: "Running go get", +- }, func(ctx context.Context, deps commandDeps) error { +- snapshot := deps.snapshot +- modURI := snapshot.GoModForFile(args.URI) +- if modURI == "" { +- return fmt.Errorf("no go.mod file found for %s", args.URI) +- } +- tempDir, cleanupModDir, err := cache.TempModDir(ctx, snapshot, modURI) +- if err != nil { +- return fmt.Errorf("creating a temp go.mod: %v", err) +- } +- defer cleanupModDir() +- +- inv, cleanupInvocation, err := snapshot.GoCommandInvocation(cache.NetworkOK, modURI.DirPath(), "list", +- []string{"-f", "{{.Module.Path}}@{{.Module.Version}}", "-mod=mod", "-modfile=" + filepath.Join(tempDir, "go.mod"), args.Pkg}, +- "GOWORK=off", +- ) +- if err != nil { +- return err +- } +- defer cleanupInvocation() +- stdout, err := snapshot.View().GoCommandRunner().Run(ctx, *inv) +- if err != nil { +- return err +- } +- ver := strings.TrimSpace(stdout.String()) +- return c.s.runGoModUpdateCommands(ctx, snapshot, args.URI, func(invoke func(...string) (*bytes.Buffer, error)) error { +- if args.AddRequire { +- if err := addModuleRequire(invoke, []string{ver}); err != nil { +- return err +- } +- } +- _, err := invoke(append([]string{"get", "-d"}, args.Pkg)...) +- return err +- }) +- }) +-} +- +-func (s *server) runGoModUpdateCommands(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, run func(invoke func(...string) (*bytes.Buffer, error)) error) error { +- // TODO(rfindley): can/should this use findRootPattern? +- modURI := snapshot.GoModForFile(uri) +- if modURI == "" { +- return fmt.Errorf("no go.mod file found for %s", uri.Path()) +- } +- newModBytes, newSumBytes, err := snapshot.RunGoModUpdateCommands(ctx, modURI, run) +- if err != nil { +- return err +- } +- sumURI := protocol.URIFromPath(strings.TrimSuffix(modURI.Path(), ".mod") + ".sum") +- +- modChange, err := computeEditChange(ctx, snapshot, modURI, newModBytes) +- if err != nil { +- return err +- } +- sumChange, err := computeEditChange(ctx, snapshot, sumURI, newSumBytes) +- if err != nil { +- return err +- } +- +- var changes []protocol.DocumentChange +- if modChange.Valid() { +- changes = append(changes, modChange) +- } +- if sumChange.Valid() { +- changes = append(changes, sumChange) +- } +- return applyChanges(ctx, s.client, changes) +-} +- +-// computeEditChange computes the edit change required to transform the +-// snapshot file specified by uri to the provided new content. +-// Beware: returns a DocumentChange that is !Valid() if none were necessary. +-// +-// If the file is not open, computeEditChange simply writes the new content to +-// disk. +-// +-// TODO(rfindley): fix this API asymmetry. It should be up to the caller to +-// write the file or apply the edits. +-func computeEditChange(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, newContent []byte) (protocol.DocumentChange, error) { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return protocol.DocumentChange{}, err +- } +- oldContent, err := fh.Content() +- if err != nil && !os.IsNotExist(err) { +- return protocol.DocumentChange{}, err +- } +- +- if bytes.Equal(oldContent, newContent) { +- return protocol.DocumentChange{}, nil // note: result is !Valid() +- } +- +- // Sending a workspace edit to a closed file causes VS Code to open the +- // file and leave it unsaved. We would rather apply the changes directly, +- // especially to go.sum, which should be mostly invisible to the user. +- if !snapshot.IsOpen(uri) { +- err := os.WriteFile(uri.Path(), newContent, 0666) +- return protocol.DocumentChange{}, err +- } +- +- m := protocol.NewMapper(fh.URI(), oldContent) +- diff := diff.Bytes(oldContent, newContent) +- textedits, err := protocol.EditsFromDiffEdits(m, diff) +- if err != nil { +- return protocol.DocumentChange{}, err +- } +- return protocol.DocumentChangeEdit(fh, textedits), nil +-} +- +-func applyChanges(ctx context.Context, cli protocol.Client, changes []protocol.DocumentChange) error { +- if len(changes) == 0 { +- return nil +- } +- response, err := cli.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ +- Edit: *protocol.NewWorkspaceEdit(changes...), +- }) +- if err != nil { +- return err +- } +- if !response.Applied { +- return fmt.Errorf("edits not applied because of %s", response.FailureReason) +- } +- return nil +-} +- +-func runGoGetModule(invoke func(...string) (*bytes.Buffer, error), addRequire bool, args []string) error { +- if addRequire { +- if err := addModuleRequire(invoke, args); err != nil { +- return err +- } +- } +- _, err := invoke(append([]string{"get", "-d"}, args...)...) +- return err +-} +- +-func addModuleRequire(invoke func(...string) (*bytes.Buffer, error), args []string) error { +- // Using go get to create a new dependency results in an +- // `// indirect` comment we may not want. The only way to avoid it +- // is to add the require as direct first. Then we can use go get to +- // update go.sum and tidy up. +- _, err := invoke(append([]string{"mod", "edit", "-require"}, args...)...) +- return err +-} +- +-// TODO(rfindley): inline. +-func (s *server) getUpgrades(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, modules []string) (map[string]string, error) { +- args := append([]string{"-mod=readonly", "-m", "-u", "-json"}, modules...) +- inv, cleanup, err := snapshot.GoCommandInvocation(cache.NetworkOK, uri.DirPath(), "list", args) +- if err != nil { +- return nil, err +- } +- defer cleanup() +- stdout, err := snapshot.View().GoCommandRunner().Run(ctx, *inv) +- if err != nil { +- return nil, err +- } +- +- upgrades := map[string]string{} +- for dec := json.NewDecoder(stdout); dec.More(); { +- mod := &gocommand.ModuleJSON{} +- if err := dec.Decode(mod); err != nil { +- return nil, err +- } +- if mod.Update == nil { +- continue +- } +- upgrades[mod.Path] = mod.Update.Version +- } +- return upgrades, nil +-} +- +-func (c *commandHandler) GCDetails(ctx context.Context, uri protocol.DocumentURI) error { +- return c.run(ctx, commandConfig{ +- forURI: uri, +- }, func(ctx context.Context, deps commandDeps) error { +- return c.modifyState(ctx, FromToggleCompilerOptDetails, func() (*cache.Snapshot, func(), error) { +- // Don't blindly use "dir := deps.fh.URI().Dir()"; validate. +- meta, err := deps.snapshot.NarrowestMetadataForFile(ctx, deps.fh.URI()) +- if err != nil { +- return nil, nil, err +- } +- if len(meta.CompiledGoFiles) == 0 { +- return nil, nil, fmt.Errorf("package %q does not compile file %q", meta.ID, deps.fh.URI()) +- } +- dir := meta.CompiledGoFiles[0].Dir() +- +- want := !deps.snapshot.WantCompilerOptDetails(dir) // toggle per-directory flag +- return c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ +- CompilerOptDetails: map[protocol.DocumentURI]bool{dir: want}, +- }) +- }) +- }) +-} +- +-func (c *commandHandler) ListKnownPackages(ctx context.Context, args command.URIArg) (command.ListKnownPackagesResult, error) { +- var result command.ListKnownPackagesResult +- err := c.run(ctx, commandConfig{ +- progress: "Listing packages", +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- pkgs, err := golang.KnownPackagePaths(ctx, deps.snapshot, deps.fh) +- for _, pkg := range pkgs { +- result.Packages = append(result.Packages, string(pkg)) +- } +- return err +- }) +- return result, err +-} +- +-func (c *commandHandler) ListImports(ctx context.Context, args command.URIArg) (command.ListImportsResult, error) { +- var result command.ListImportsResult +- err := c.run(ctx, commandConfig{ +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- fh, err := deps.snapshot.ReadFile(ctx, args.URI) +- if err != nil { +- return err +- } +- pgf, err := deps.snapshot.ParseGo(ctx, fh, parsego.Header) +- if err != nil { +- return err +- } +- fset := tokeninternal.FileSetFor(pgf.Tok) +- for _, group := range astutil.Imports(fset, pgf.File) { +- for _, imp := range group { +- if imp.Path == nil { +- continue +- } +- var name string +- if imp.Name != nil { +- name = imp.Name.Name +- } +- result.Imports = append(result.Imports, command.FileImport{ +- Path: string(metadata.UnquoteImportPath(imp)), +- Name: name, +- }) +- } +- } +- meta, err := deps.snapshot.NarrowestMetadataForFile(ctx, args.URI) +- if err != nil { +- return err // e.g. cancelled +- } +- for pkgPath := range meta.DepsByPkgPath { +- result.PackageImports = append(result.PackageImports, +- command.PackageImport{Path: string(pkgPath)}) +- } +- sort.Slice(result.PackageImports, func(i, j int) bool { +- return result.PackageImports[i].Path < result.PackageImports[j].Path +- }) +- return nil +- }) +- return result, err +-} +- +-func (c *commandHandler) AddImport(ctx context.Context, args command.AddImportArgs) error { +- return c.run(ctx, commandConfig{ +- progress: "Adding import", +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- edits, err := golang.AddImport(ctx, deps.snapshot, deps.fh, args.ImportPath) +- if err != nil { +- return fmt.Errorf("could not add import: %v", err) +- } +- return applyChanges(ctx, c.s.client, []protocol.DocumentChange{protocol.DocumentChangeEdit(deps.fh, edits)}) +- }) +-} +- +-func (c *commandHandler) ExtractToNewFile(ctx context.Context, args protocol.Location) error { +- return c.run(ctx, commandConfig{ +- progress: "Extract to a new file", +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- changes, err := golang.ExtractToNewFile(ctx, deps.snapshot, deps.fh, args.Range) +- if err != nil { +- return err +- } +- return applyChanges(ctx, c.s.client, changes) +- }) +-} +- +-func (c *commandHandler) StartDebugging(ctx context.Context, args command.DebuggingArgs) (result command.DebuggingResult, _ error) { +- addr := args.Addr +- if addr == "" { +- addr = "localhost:0" +- } +- di := debug.GetInstance(ctx) +- if di == nil { +- return result, errors.New("internal error: server has no debugging instance") +- } +- listenedAddr, err := di.Serve(ctx, addr) +- if err != nil { +- return result, fmt.Errorf("starting debug server: %w", err) +- } +- result.URLs = []string{"http://" + listenedAddr} +- openClientBrowser(ctx, c.s.client, "Debug", result.URLs[0], c.s.Options()) +- return result, nil +-} +- +-func (c *commandHandler) StartProfile(ctx context.Context, args command.StartProfileArgs) (result command.StartProfileResult, _ error) { +- file, err := os.CreateTemp("", "gopls-profile-*") +- if err != nil { +- return result, fmt.Errorf("creating temp profile file: %v", err) +- } +- +- c.s.ongoingProfileMu.Lock() +- defer c.s.ongoingProfileMu.Unlock() +- +- if c.s.ongoingProfile != nil { +- file.Close() // ignore error +- return result, fmt.Errorf("profile already started (for %q)", c.s.ongoingProfile.Name()) +- } +- +- if err := pprof.StartCPUProfile(file); err != nil { +- file.Close() // ignore error +- return result, fmt.Errorf("starting profile: %v", err) +- } +- +- c.s.ongoingProfile = file +- return result, nil +-} +- +-func (c *commandHandler) StopProfile(ctx context.Context, args command.StopProfileArgs) (result command.StopProfileResult, _ error) { +- c.s.ongoingProfileMu.Lock() +- defer c.s.ongoingProfileMu.Unlock() +- +- prof := c.s.ongoingProfile +- c.s.ongoingProfile = nil +- +- if prof == nil { +- return result, fmt.Errorf("no ongoing profile") +- } +- +- pprof.StopCPUProfile() +- if err := prof.Close(); err != nil { +- return result, fmt.Errorf("closing profile file: %v", err) +- } +- result.File = prof.Name() +- return result, nil +-} +- +-func (c *commandHandler) FetchVulncheckResult(ctx context.Context, arg command.URIArg) (map[protocol.DocumentURI]*vulncheck.Result, error) { +- ret := map[protocol.DocumentURI]*vulncheck.Result{} +- err := c.run(ctx, commandConfig{forURI: arg.URI}, func(ctx context.Context, deps commandDeps) error { +- if deps.snapshot.Options().Vulncheck == settings.ModeVulncheckImports { +- for _, modfile := range deps.snapshot.View().ModFiles() { +- res, err := deps.snapshot.ModVuln(ctx, modfile) +- if err != nil { +- return err +- } +- ret[modfile] = res +- } +- } +- // Overwrite if there is any govulncheck-based result. +- maps.Copy(ret, deps.snapshot.Vulnerabilities()) +- return nil +- }) +- return ret, err +-} +- +-const GoVulncheckCommandTitle = "govulncheck" +- +-func (c *commandHandler) Vulncheck(ctx context.Context, args command.VulncheckArgs) (command.VulncheckResult, error) { +- if args.URI == "" { +- return command.VulncheckResult{}, errors.New("VulncheckArgs is missing URI field") +- } +- +- var commandResult command.VulncheckResult +- err := c.run(ctx, commandConfig{ +- progress: GoVulncheckCommandTitle, +- progressStyle: settings.WorkDoneProgressStyleLog, +- requireSave: true, // govulncheck cannot honor overlays +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- jsonrpc2.Async(ctx) // run this in parallel with other requests: vulncheck can be slow. +- +- workDoneWriter := progress.NewWorkDoneWriter(ctx, deps.work) +- dir := args.URI.DirPath() +- pattern := args.Pattern +- +- result, err := scan.RunGovulncheck(ctx, pattern, deps.snapshot, dir, workDoneWriter) +- if err != nil { +- return err +- } +- commandResult.Result = result +- commandResult.Token = deps.work.Token() +- +- snapshot, release, err := c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ +- Vulns: map[protocol.DocumentURI]*vulncheck.Result{args.URI: result}, +- }) +- if err != nil { +- return err +- } +- defer release() +- +- // Diagnosing with the background context ensures new snapshots are fully +- // diagnosed. +- c.s.diagnoseSnapshot(snapshot.BackgroundContext(), snapshot, nil, 0) +- +- affecting := make(map[string]bool, len(result.Entries)) +- for _, finding := range result.Findings { +- if len(finding.Trace) > 1 { // at least 2 frames if callstack exists (vulnerability, entry) +- affecting[finding.OSV] = true +- } +- } +- if len(affecting) == 0 { +- showMessage(ctx, c.s.client, protocol.Info, "No vulnerabilities found") +- return nil +- } +- affectingOSVs := make([]string, 0, len(affecting)) +- for id := range affecting { +- affectingOSVs = append(affectingOSVs, id) +- } +- sort.Strings(affectingOSVs) +- +- showMessage(ctx, c.s.client, protocol.Warning, fmt.Sprintf("Found %v", strings.Join(affectingOSVs, ", "))) +- +- return nil +- }) +- if err != nil { +- return command.VulncheckResult{}, err +- } +- return commandResult, nil +-} +- +-// RunGovulncheck is like Vulncheck (in fact, a copy), but is tweaked slightly +-// to run asynchronously rather than return a result. +-// +-// This logic was copied, rather than factored out, as this implementation is +-// slated for deletion. +-// +-// TODO(golang/vscode-go#3572) +-// TODO(hxjiang): deprecate gopls.run_govulncheck. +-func (c *commandHandler) RunGovulncheck(ctx context.Context, args command.VulncheckArgs) (command.RunVulncheckResult, error) { +- // Deduplicate the RunGovulncheck command so only one is running at a time. +- if !c.s.runGovulncheckInProgress.CompareAndSwap(false, true) { +- c.s.client.ShowMessage(ctx, &protocol.ShowMessageParams{ +- Type: protocol.Info, +- Message: "A govulncheck scan is already in progress.", +- }) +- return command.RunVulncheckResult{}, nil +- } +- +- defer c.s.runGovulncheckInProgress.Store(false) +- +- if args.URI == "" { +- return command.RunVulncheckResult{}, errors.New("VulncheckArgs is missing URI field") +- } +- +- // Return the workdone token so that clients can identify when this +- // vulncheck invocation is complete. +- // +- // Since the run function executes asynchronously, we use a channel to +- // synchronize the start of the run and return the token. +- tokenChan := make(chan protocol.ProgressToken, 1) +- err := c.run(ctx, commandConfig{ +- progress: GoVulncheckCommandTitle, +- requireSave: true, // govulncheck cannot honor overlays +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- tokenChan <- deps.work.Token() +- +- workDoneWriter := progress.NewWorkDoneWriter(ctx, deps.work) +- dir := filepath.Dir(args.URI.Path()) +- pattern := args.Pattern +- +- result, err := scan.RunGovulncheck(ctx, pattern, deps.snapshot, dir, workDoneWriter) +- if err != nil { +- return err +- } +- +- snapshot, release, err := c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ +- Vulns: map[protocol.DocumentURI]*vulncheck.Result{args.URI: result}, +- }) +- if err != nil { +- return err +- } +- defer release() +- +- // Diagnosing with the background context ensures new snapshots are fully +- // diagnosed. +- c.s.diagnoseSnapshot(snapshot.BackgroundContext(), snapshot, nil, 0) +- +- affecting := make(map[string]bool, len(result.Entries)) +- for _, finding := range result.Findings { +- if len(finding.Trace) > 1 { // at least 2 frames if callstack exists (vulnerability, entry) +- affecting[finding.OSV] = true +- } +- } +- if len(affecting) == 0 { +- showMessage(ctx, c.s.client, protocol.Info, "No vulnerabilities found") +- return nil +- } +- affectingOSVs := make([]string, 0, len(affecting)) +- for id := range affecting { +- affectingOSVs = append(affectingOSVs, id) +- } +- sort.Strings(affectingOSVs) +- +- showMessage(ctx, c.s.client, protocol.Warning, fmt.Sprintf("Found %v", strings.Join(affectingOSVs, ", "))) +- +- return nil +- }) +- if err != nil { +- return command.RunVulncheckResult{}, err +- } +- select { +- case <-ctx.Done(): +- return command.RunVulncheckResult{}, ctx.Err() +- case token := <-tokenChan: +- return command.RunVulncheckResult{Token: token}, nil +- } +-} +- +-// MemStats implements the MemStats command. It returns an error as a +-// future-proof API, but the resulting error is currently always nil. +-func (c *commandHandler) MemStats(ctx context.Context) (command.MemStatsResult, error) { +- // GC a few times for stable results. +- runtime.GC() +- runtime.GC() +- runtime.GC() +- var m runtime.MemStats +- runtime.ReadMemStats(&m) +- return command.MemStatsResult{ +- HeapAlloc: m.HeapAlloc, +- HeapInUse: m.HeapInuse, +- TotalAlloc: m.TotalAlloc, +- }, nil +-} +- +-// WorkspaceStats implements the WorkspaceStats command, reporting information +-// about the current state of the loaded workspace for the current session. +-func (c *commandHandler) WorkspaceStats(ctx context.Context) (command.WorkspaceStatsResult, error) { +- var res command.WorkspaceStatsResult +- res.Files = c.s.session.Cache().FileStats() +- +- for _, view := range c.s.session.Views() { +- vs, err := collectViewStats(ctx, view) +- if err != nil { +- return res, err +- } +- res.Views = append(res.Views, vs) +- } +- return res, nil +-} +- +-func collectViewStats(ctx context.Context, view *cache.View) (command.ViewStats, error) { +- s, release, err := view.Snapshot() +- if err != nil { +- return command.ViewStats{}, err +- } +- defer release() +- +- allMD, err := s.AllMetadata(ctx) +- if err != nil { +- return command.ViewStats{}, err +- } +- allPackages := collectPackageStats(allMD) +- +- wsMD, err := s.WorkspaceMetadata(ctx) +- if err != nil { +- return command.ViewStats{}, err +- } +- workspacePackages := collectPackageStats(wsMD) +- +- var ids []golang.PackageID +- for _, mp := range wsMD { +- ids = append(ids, mp.ID) +- } +- +- diags, err := s.PackageDiagnostics(ctx, ids...) +- if err != nil { +- return command.ViewStats{}, err +- } +- +- ndiags := 0 +- for _, d := range diags { +- ndiags += len(d) +- } +- +- return command.ViewStats{ +- GoCommandVersion: view.GoVersionString(), +- AllPackages: allPackages, +- WorkspacePackages: workspacePackages, +- Diagnostics: ndiags, +- }, nil +-} +- +-func collectPackageStats(mps []*metadata.Package) command.PackageStats { +- var stats command.PackageStats +- stats.Packages = len(mps) +- modules := make(map[string]bool) +- +- for _, mp := range mps { +- n := len(mp.CompiledGoFiles) +- stats.CompiledGoFiles += n +- if n > stats.LargestPackage { +- stats.LargestPackage = n +- } +- if mp.Module != nil { +- modules[mp.Module.Path] = true +- } +- } +- stats.Modules = len(modules) +- +- return stats +-} +- +-// RunGoWorkCommand invokes `go work ` with the provided arguments. +-// +-// args.InitFirst controls whether to first run `go work init`. This allows a +-// single command to both create and recursively populate a go.work file -- as +-// of writing there is no `go work init -r`. +-// +-// Some thought went into implementing this command. Unlike the go.mod commands +-// above, this command simply invokes the go command and relies on the client +-// to notify gopls of file changes via didChangeWatchedFile notifications. +-// We could instead run these commands with GOWORK set to a temp file, but that +-// poses the following problems: +-// - directory locations in the resulting temp go.work file will be computed +-// relative to the directory containing that go.work. If the go.work is in a +-// tempdir, the directories will need to be translated to/from that dir. +-// - it would be simpler to use a temp go.work file in the workspace +-// directory, or whichever directory contains the real go.work file, but +-// that sets a bad precedent of writing to a user-owned directory. We +-// shouldn't start doing that. +-// - Sending workspace edits to create a go.work file would require using +-// the CreateFile resource operation, which would need to be tested in every +-// client as we haven't used it before. We don't have time for that right +-// now. +-// +-// Therefore, we simply require that the current go.work file is saved (if it +-// exists), and delegate to the go command. +-func (c *commandHandler) RunGoWorkCommand(ctx context.Context, args command.RunGoWorkArgs) error { +- return c.run(ctx, commandConfig{ +- progress: "Running go work command", +- forView: args.ViewID, +- }, func(ctx context.Context, deps commandDeps) (runErr error) { +- snapshot := deps.snapshot +- view := snapshot.View() +- viewDir := snapshot.Folder().Path() +- +- if view.Type() != cache.GoWorkView && view.GoWork() != "" { +- // If we are not using an existing go.work file, GOWORK must be explicitly off. +- // TODO(rfindley): what about GO111MODULE=off? +- return fmt.Errorf("cannot modify go.work files when GOWORK=off") +- } +- +- var gowork string +- // If the user has explicitly set GOWORK=off, we should warn them +- // explicitly and avoid potentially misleading errors below. +- if view.GoWork() != "" { +- gowork = view.GoWork().Path() +- fh, err := snapshot.ReadFile(ctx, view.GoWork()) +- if err != nil { +- return err // e.g. canceled +- } +- if !fh.SameContentsOnDisk() { +- return fmt.Errorf("must save workspace file %s before running go work commands", view.GoWork()) +- } +- } else { +- if !args.InitFirst { +- // If go.work does not exist, we should have detected that and asked +- // for InitFirst. +- return bug.Errorf("internal error: cannot run go work command: required go.work file not found") +- } +- gowork = filepath.Join(viewDir, "go.work") +- if err := c.invokeGoWork(ctx, viewDir, gowork, []string{"init"}); err != nil { +- return fmt.Errorf("running `go work init`: %v", err) +- } +- } +- +- return c.invokeGoWork(ctx, viewDir, gowork, args.Args) +- }) +-} +- +-func (c *commandHandler) invokeGoWork(ctx context.Context, viewDir, gowork string, args []string) error { +- inv := gocommand.Invocation{ +- Verb: "work", +- Args: args, +- WorkingDir: viewDir, +- Env: append(os.Environ(), fmt.Sprintf("GOWORK=%s", gowork)), +- } +- if _, err := c.s.session.GoCommandRunner().Run(ctx, inv); err != nil { +- return fmt.Errorf("running go work command: %v", err) +- } +- return nil +-} +- +-// showMessage causes the client to show a progress or error message. +-// +-// It reports whether it succeeded. If it fails, it writes an error to +-// the server log, so most callers can safely ignore the result. +-func showMessage(ctx context.Context, cli protocol.Client, typ protocol.MessageType, message string) bool { +- err := cli.ShowMessage(ctx, &protocol.ShowMessageParams{ +- Type: typ, +- Message: message, +- }) +- if err != nil { +- event.Error(ctx, "client.showMessage: %v", err) +- return false +- } +- return true +-} +- +-// openClientBrowser causes the LSP client to open the specified URL +-// in an external browser. +-// +-// If the client does not support window/showDocument, a window/showMessage +-// request is instead used, with the format "$title: open your browser to $url". +-func openClientBrowser(ctx context.Context, cli protocol.Client, title string, url protocol.URI, opts *settings.Options) { +- if opts.ShowDocumentSupported { +- showDocumentImpl(ctx, cli, url, nil, opts) +- } else { +- params := &protocol.ShowMessageParams{ +- Type: protocol.Info, +- Message: fmt.Sprintf("%s: open your browser to %s", title, url), +- } +- if err := cli.ShowMessage(ctx, params); err != nil { +- event.Error(ctx, "failed to show browser url", err) +- } +- } +-} +- +-// openClientEditor causes the LSP client to open the specified document +-// and select the indicated range. +-// +-// Note that VS Code 1.87.2 doesn't currently raise the window; this is +-// https://github.com/microsoft/vscode/issues/207634 +-func openClientEditor(ctx context.Context, cli protocol.Client, loc protocol.Location, opts *settings.Options) { +- if !opts.ShowDocumentSupported { +- return // no op +- } +- showDocumentImpl(ctx, cli, protocol.URI(loc.URI), &loc.Range, opts) +-} +- +-func showDocumentImpl(ctx context.Context, cli protocol.Client, url protocol.URI, rangeOpt *protocol.Range, opts *settings.Options) { +- if !opts.ShowDocumentSupported { +- return // no op +- } +- // In principle we shouldn't send a showDocument request to a +- // client that doesn't support it, as reported by +- // ShowDocumentClientCapabilities. But even clients that do +- // support it may defer the real work of opening the document +- // asynchronously, to avoid deadlocks due to rentrancy. +- // +- // For example: client sends request to server; server sends +- // showDocument to client; client opens editor; editor causes +- // new RPC to be sent to server, which is still busy with +- // previous request. (This happens in eglot.) +- // +- // So we can't rely on the success/failure information. +- // That's the reason this function doesn't return an error. +- +- // "External" means run the system-wide handler (e.g. open(1) +- // on macOS or xdg-open(1) on Linux) for this URL, ignoring +- // TakeFocus and Selection. Note that this may still end up +- // opening the same editor (e.g. VSCode) for a file: URL. +- res, err := cli.ShowDocument(ctx, &protocol.ShowDocumentParams{ +- URI: url, +- External: rangeOpt == nil, +- TakeFocus: true, +- Selection: rangeOpt, // optional +- }) +- if err != nil { +- event.Error(ctx, "client.showDocument: %v", err) +- } else if res != nil && !res.Success { +- event.Log(ctx, fmt.Sprintf("client declined to open document %v", url)) +- } +-} +- +-func (c *commandHandler) ChangeSignature(ctx context.Context, args command.ChangeSignatureArgs) (*protocol.WorkspaceEdit, error) { +- countChangeSignature.Inc() +- var result *protocol.WorkspaceEdit +- err := c.run(ctx, commandConfig{ +- forURI: args.Location.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- pkg, pgf, err := golang.NarrowestPackageForFile(ctx, deps.snapshot, args.Location.URI) +- if err != nil { +- return err +- } +- +- // For now, gopls only supports parameter permutation or removal. +- var perm []int +- for _, newParam := range args.NewParams { +- if newParam.NewField != "" { +- return fmt.Errorf("adding new parameters is currently unsupported") +- } +- perm = append(perm, newParam.OldIndex) +- } +- +- docedits, err := golang.ChangeSignature(ctx, deps.snapshot, pkg, pgf, args.Location.Range, perm) +- if err != nil { +- return err +- } +- wsedit := protocol.NewWorkspaceEdit(docedits...) +- if args.ResolveEdits { +- result = wsedit +- return nil +- } +- return applyChanges(ctx, c.s.client, docedits) +- }) +- return result, err +-} +- +-func (c *commandHandler) DiagnoseFiles(ctx context.Context, args command.DiagnoseFilesArgs) error { +- return c.run(ctx, commandConfig{ +- progress: "Diagnose files", +- }, func(ctx context.Context, _ commandDeps) error { +- +- // TODO(rfindley): even better would be textDocument/diagnostics (golang/go#60122). +- // Though note that implementing pull diagnostics may cause some servers to +- // request diagnostics in an ad-hoc manner, and break our intentional pacing. +- +- ctx, done := event.Start(ctx, "commandHandler.DiagnoseFiles") +- defer done() +- +- snapshots := make(map[*cache.Snapshot]bool) +- for _, uri := range args.Files { +- fh, snapshot, release, err := c.s.session.FileOf(ctx, uri) +- if err != nil { +- return err +- } +- if snapshots[snapshot] || snapshot.FileKind(fh) != file.Go { +- release() +- continue +- } +- defer release() +- snapshots[snapshot] = true +- } +- +- var wg sync.WaitGroup +- for snapshot := range snapshots { +- wg.Go(func() { +- +- // Use the operation context for diagnosis, rather than +- // snapshot.BackgroundContext, because this operation does not create +- // new snapshots (so they should also be diagnosed by other means). +- c.s.diagnoseSnapshot(ctx, snapshot, nil, 0) +- }) +- } +- wg.Wait() +- +- return nil +- }) +-} +- +-func (c *commandHandler) Views(ctx context.Context) ([]command.View, error) { +- var summaries []command.View +- for _, view := range c.s.session.Views() { +- summaries = append(summaries, command.View{ +- ID: view.ID(), +- Type: view.Type().String(), +- Root: view.Root(), +- Folder: view.Folder().Dir, +- EnvOverlay: view.EnvOverlay(), +- }) +- } +- return summaries, nil +-} +- +-func (c *commandHandler) FreeSymbols(ctx context.Context, viewID string, loc protocol.Location) error { +- web, err := c.s.getWeb() +- if err != nil { +- return err +- } +- url := web.freesymbolsURL(viewID, loc) +- openClientBrowser(ctx, c.s.client, "Free symbols", url, c.s.Options()) +- return nil +-} +- +-func (c *commandHandler) SplitPackage(ctx context.Context, viewID, packageID string) error { +- web, err := c.s.getWeb() +- if err != nil { +- return err +- } +- url := web.splitpkgURL(viewID, packageID) +- openClientBrowser(ctx, c.s.client, "SplitPackage", url, c.s.Options()) +- return nil +-} +- +-func (c *commandHandler) Assembly(ctx context.Context, viewID, packageID, symbol string) error { +- web, err := c.s.getWeb() +- if err != nil { +- return err +- } +- url := web.assemblyURL(viewID, packageID, symbol) +- openClientBrowser(ctx, c.s.client, "Assembly", url, c.s.Options()) +- return nil +-} +- +-func (c *commandHandler) ClientOpenURL(ctx context.Context, url string) error { +- // Fall back to "Gopls: open your browser..." if we must send a showMessage +- // request, since we don't know the context of this command. +- openClientBrowser(ctx, c.s.client, "Gopls", url, c.s.Options()) +- return nil +-} +- +-func (c *commandHandler) ScanImports(ctx context.Context) error { +- for _, v := range c.s.session.Views() { +- v.ScanImports() +- } +- return nil +-} +- +-func (c *commandHandler) PackageSymbols(ctx context.Context, args command.PackageSymbolsArgs) (command.PackageSymbolsResult, error) { +- var result command.PackageSymbolsResult +- err := c.run(ctx, commandConfig{ +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- if deps.snapshot.FileKind(deps.fh) != file.Go { +- // golang/vscode-go#3681: fail silently, to avoid spurious error popups. +- return nil +- } +- res, err := golang.PackageSymbols(ctx, deps.snapshot, args.URI) +- if err != nil { +- return err +- } +- result = res +- return nil +- }) +- +- // sort symbols for determinism +- sort.SliceStable(result.Symbols, func(i, j int) bool { +- iv, jv := result.Symbols[i], result.Symbols[j] +- if iv.Name == jv.Name { +- return iv.Range.Start.Line < jv.Range.Start.Line +- } +- return iv.Name < jv.Name +- }) +- +- return result, err +-} +- +-// optionsStringToMap transforms comma-separated options of the form +-// "foo=bar,baz=quux" to a go map. Returns nil if any options are malformed. +-func optionsStringToMap(options string) (map[string][]string, error) { +- optionsMap := make(map[string][]string) +- for item := range strings.SplitSeq(options, ",") { +- key, option, found := strings.Cut(item, "=") +- if !found { +- return nil, fmt.Errorf("invalid option %q", item) +- } +- optionsMap[key] = append(optionsMap[key], option) +- } +- return optionsMap, nil +-} +- +-func (c *commandHandler) ModifyTags(ctx context.Context, args command.ModifyTagsArgs) error { +- return c.run(ctx, commandConfig{ +- progress: "Modifying tags", +- forURI: args.URI, +- }, func(ctx context.Context, deps commandDeps) error { +- m := &modifytags.Modification{ +- Clear: args.Clear, +- ClearOptions: args.ClearOptions, +- ValueFormat: args.ValueFormat, +- Overwrite: args.Overwrite, +- } +- +- transform, err := parseTransform(args.Transform) +- if err != nil { +- return err +- } +- m.Transform = transform +- +- // Each command involves either adding or removing tags, depending on +- // whether Add or Clear is set. +- if args.Add != "" { +- countAddStructTags.Inc() +- m.Add = strings.Split(args.Add, ",") +- } else if args.Clear { +- countRemoveStructTags.Inc() +- } +- if args.AddOptions != "" { +- if options, err := optionsStringToMap(args.AddOptions); err != nil { +- return err +- } else { +- m.AddOptions = options +- } +- } +- if args.Remove != "" { +- m.Remove = strings.Split(args.Remove, ",") +- } +- if args.RemoveOptions != "" { +- if options, err := optionsStringToMap(args.RemoveOptions); err != nil { +- return err +- } else { +- m.RemoveOptions = options +- } +- } +- fh, err := deps.snapshot.ReadFile(ctx, args.URI) +- if err != nil { +- return err +- } +- changes, err := golang.ModifyTags(ctx, deps.snapshot, fh, args, m) +- if err != nil { +- return err +- } +- return applyChanges(ctx, c.s.client, changes) +- }) +-} +- +-func parseTransform(input string) (modifytags.Transform, error) { +- switch input { +- case "camelcase": +- return modifytags.CamelCase, nil +- case "lispcase": +- return modifytags.LispCase, nil +- case "pascalcase": +- return modifytags.PascalCase, nil +- case "titlecase": +- return modifytags.TitleCase, nil +- case "keep": +- return modifytags.Keep, nil +- case "": +- fallthrough +- case "snakecase": +- return modifytags.SnakeCase, nil +- default: +- return modifytags.SnakeCase, fmt.Errorf("invalid Transform value") +- } +-} +diff -urN a/gopls/internal/server/completion.go b/gopls/internal/server/completion.go +--- a/gopls/internal/server/completion.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/completion.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,207 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- "fmt" +- "strings" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/golang/completion" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/telemetry" +- "golang.org/x/tools/gopls/internal/template" +- "golang.org/x/tools/gopls/internal/work" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) Completion(ctx context.Context, params *protocol.CompletionParams) (_ *protocol.CompletionList, rerr error) { +- recordLatency := telemetry.StartLatencyTimer("completion") +- defer func() { +- recordLatency(ctx, rerr) +- }() +- +- ctx, done := event.Start(ctx, "server.Completion", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- var candidates []completion.CompletionItem +- var surrounding *completion.Selection +- switch snapshot.FileKind(fh) { +- case file.Go: +- candidates, surrounding, err = completion.Completion(ctx, snapshot, fh, params.Position, params.Context) +- case file.Mod: +- candidates, surrounding = nil, nil +- case file.Work: +- cl, err := work.Completion(ctx, snapshot, fh, params.Position) +- if err != nil { +- break +- } +- return cl, nil +- case file.Tmpl: +- var cl *protocol.CompletionList +- cl, err = template.Completion(ctx, snapshot, fh, params.Position, params.Context) +- if err != nil { +- break // use common error handling, candidates==nil +- } +- return cl, nil +- } +- if err != nil { +- event.Error(ctx, "no completions found", err, label.Position.Of(params.Position)) +- } +- if candidates == nil || surrounding == nil { +- complEmpty.Inc() +- return &protocol.CompletionList{ +- IsIncomplete: true, +- Items: []protocol.CompletionItem{}, +- }, nil +- } +- +- // When using deep completions/fuzzy matching, report results as incomplete so +- // client fetches updated completions after every key stroke. +- options := snapshot.Options() +- incompleteResults := options.DeepCompletion || options.Matcher == settings.Fuzzy +- +- items, err := toProtocolCompletionItems(candidates, surrounding, options) +- if err != nil { +- return nil, err +- } +- if snapshot.FileKind(fh) == file.Go { +- s.saveLastCompletion(fh.URI(), fh.Version(), items, params.Position) +- } +- +- if len(items) > 10 { +- // TODO(pjw): long completions are ok for field lists +- complLong.Inc() +- } else { +- complShort.Inc() +- } +- return &protocol.CompletionList{ +- IsIncomplete: incompleteResults, +- Items: items, +- }, nil +-} +- +-func (s *server) saveLastCompletion(uri protocol.DocumentURI, version int32, items []protocol.CompletionItem, pos protocol.Position) { +- s.efficacyMu.Lock() +- defer s.efficacyMu.Unlock() +- s.efficacyVersion = version +- s.efficacyURI = uri +- s.efficacyPos = pos +- s.efficacyItems = items +-} +- +-// toProtocolCompletionItems converts the candidates to the protocol completion items, +-// the candidates must be sorted based on score as it will be respected by client side. +-func toProtocolCompletionItems(candidates []completion.CompletionItem, surrounding *completion.Selection, options *settings.Options) ([]protocol.CompletionItem, error) { +- replaceRng, err := surrounding.Range() +- if err != nil { +- return nil, err +- } +- insertRng0, err := surrounding.PrefixRange() +- if err != nil { +- return nil, err +- } +- suffix := surrounding.Suffix() +- +- var ( +- items = make([]protocol.CompletionItem, 0, len(candidates)) +- numDeepCompletionsSeen int +- ) +- for i, candidate := range candidates { +- // Limit the number of deep completions to not overwhelm the user in cases +- // with dozens of deep completion matches. +- if candidate.Depth > 0 { +- if !options.DeepCompletion { +- continue +- } +- if numDeepCompletionsSeen >= completion.MaxDeepCompletions { +- continue +- } +- numDeepCompletionsSeen++ +- } +- insertText := candidate.InsertText +- if options.InsertTextFormat == protocol.SnippetTextFormat { +- insertText = candidate.Snippet() +- } +- +- // This can happen if the client has snippets disabled but the +- // candidate only supports snippet insertion. +- if insertText == "" { +- continue +- } +- +- var doc *protocol.Or_CompletionItem_documentation +- if candidate.Documentation != "" { +- var value any +- if options.PreferredContentFormat == protocol.Markdown { +- value = protocol.MarkupContent{ +- Kind: protocol.Markdown, +- Value: golang.DocCommentToMarkdown(candidate.Documentation, options), +- } +- } else { +- value = candidate.Documentation +- } +- doc = &protocol.Or_CompletionItem_documentation{Value: value} +- } +- var edits *protocol.Or_CompletionItem_textEdit +- if options.InsertReplaceSupported { +- insertRng := insertRng0 +- if suffix == "" || strings.Contains(insertText, suffix) { +- insertRng = replaceRng +- } +- // Insert and Replace ranges share the same start position and +- // the same text edit but the end position may differ. +- // See the comment for the CompletionItem's TextEdit field. +- // https://pkg.go.dev/golang.org/x/tools/gopls/internal/protocol#CompletionItem +- edits = &protocol.Or_CompletionItem_textEdit{ +- Value: protocol.InsertReplaceEdit{ +- NewText: insertText, +- Insert: insertRng, // replace up to the cursor position. +- Replace: replaceRng, +- }, +- } +- } else { +- edits = &protocol.Or_CompletionItem_textEdit{ +- Value: protocol.TextEdit{ +- NewText: insertText, +- Range: replaceRng, +- }, +- } +- } +- item := protocol.CompletionItem{ +- Label: candidate.Label, +- Detail: candidate.Detail, +- Kind: candidate.Kind, +- TextEdit: edits, +- InsertTextFormat: &options.InsertTextFormat, +- AdditionalTextEdits: candidate.AdditionalTextEdits, +- // This is a hack so that the client sorts completion results in the order +- // according to their score. This can be removed upon the resolution of +- // https://github.com/Microsoft/language-server-protocol/issues/348. +- SortText: fmt.Sprintf("%05d", i), +- +- // Trim operators (VSCode doesn't like weird characters in +- // filterText). +- FilterText: strings.TrimLeft(candidate.InsertText, "&*"), +- +- Preselect: i == 0, +- Documentation: doc, +- Tags: protocol.NonNilSlice(candidate.Tags), +- Deprecated: candidate.Deprecated, +- } +- items = append(items, item) +- } +- return items, nil +-} +diff -urN a/gopls/internal/server/counters.go b/gopls/internal/server/counters.go +--- a/gopls/internal/server/counters.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/counters.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,45 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import "golang.org/x/telemetry/counter" +- +-// Proposed counters for evaluating gopls code completion. +-var ( +- complEmpty = counter.New("gopls/completion/len:0") // count empty suggestions +- complShort = counter.New("gopls/completion/len:<=10") // not empty, not long +- complLong = counter.New("gopls/completion/len:>10") // returning more than 10 items +- +- changeFull = counter.New("gopls/completion/used:unknown") // full file change in didChange +- complUnused = counter.New("gopls/completion/used:no") // did not use a completion +- complUsed = counter.New("gopls/completion/used:yes") // used a completion +- +- // exported so tests can verify that counters are incremented +- CompletionCounters = []*counter.Counter{ +- complEmpty, +- complShort, +- complLong, +- changeFull, +- complUnused, +- complUsed, +- } +-) +- +-// Proposed counters for evaluating gopls change signature and rename. These +-// counters increment when the user attempts to perform one of these operations, +-// regardless of whether it succeeds. +-var ( +- countChangeSignature = counter.New("gopls/changesig") +- +- countRename = counter.New("gopls/rename") +-) +- +-// Proposed counters for evaluating gopls refactoring codeactions add struct +-// tags and remove struct tags. +-var ( +- countAddStructTags = counter.New("gopls/structtags:add") +- +- countRemoveStructTags = counter.New("gopls/structtags:remove") +-) +diff -urN a/gopls/internal/server/debug.go b/gopls/internal/server/debug.go +--- a/gopls/internal/server/debug.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/debug.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,12 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-// assert panics with the given msg if cond is not true. +-func assert(cond bool, msg string) { +- if !cond { +- panic(msg) +- } +-} +diff -urN a/gopls/internal/server/definition.go b/gopls/internal/server/definition.go +--- a/gopls/internal/server/definition.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/definition.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,64 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/goasm" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/telemetry" +- "golang.org/x/tools/gopls/internal/template" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) Definition(ctx context.Context, params *protocol.DefinitionParams) (_ []protocol.Location, rerr error) { +- recordLatency := telemetry.StartLatencyTimer("definition") +- defer func() { +- recordLatency(ctx, rerr) +- }() +- +- ctx, done := event.Start(ctx, "server.Definition", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- // TODO(rfindley): definition requests should be multiplexed across all views. +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- switch kind := snapshot.FileKind(fh); kind { +- case file.Tmpl: +- return template.Definition(snapshot, fh, params.Position) +- case file.Go: +- return golang.Definition(ctx, snapshot, fh, params.Position) +- case file.Asm: +- return goasm.Definition(ctx, snapshot, fh, params.Position) +- default: +- return nil, fmt.Errorf("can't find definitions for file type %s", kind) +- } +-} +- +-func (s *server) TypeDefinition(ctx context.Context, params *protocol.TypeDefinitionParams) ([]protocol.Location, error) { +- ctx, done := event.Start(ctx, "server.TypeDefinition", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- // TODO(rfindley): type definition requests should be multiplexed across all views. +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- switch kind := snapshot.FileKind(fh); kind { +- case file.Go: +- return golang.TypeDefinition(ctx, snapshot, fh, params.Position) +- default: +- return nil, fmt.Errorf("can't find type definitions for file type %s", kind) +- } +-} +diff -urN a/gopls/internal/server/diagnostics.go b/gopls/internal/server/diagnostics.go +--- a/gopls/internal/server/diagnostics.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/diagnostics.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,931 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- "errors" +- "fmt" +- "os" +- "path/filepath" +- "runtime" +- "slices" +- "sort" +- "strings" +- "sync" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/mod" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/template" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- "golang.org/x/tools/gopls/internal/work" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/event/keys" +- "golang.org/x/tools/internal/jsonrpc2" +-) +- +-// Diagnostic implements the textDocument/diagnostic LSP request, reporting +-// diagnostics for the given file. +-// +-// This is a work in progress. +-// TODO(rfindley): +-// - support RelatedDocuments? If so, how? Maybe include other package diagnostics? +-// - support resultID (=snapshot ID) +-// - support multiple views +-// - add orphaned file diagnostics +-// - support go.mod, go.work files +-func (s *server) Diagnostic(ctx context.Context, params *protocol.DocumentDiagnosticParams) (*protocol.DocumentDiagnosticReport, error) { +- ctx, done := event.Start(ctx, "server.Diagnostic") +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- jsonrpc2.Async(ctx) // allow asynchronous collection of diagnostics +- +- uri := fh.URI() +- kind := snapshot.FileKind(fh) +- var diagnostics []*cache.Diagnostic +- switch kind { +- case file.Go: +- diagnostics, err = golang.DiagnoseFile(ctx, snapshot, uri) +- if err != nil { +- return nil, err +- } +- default: +- return nil, fmt.Errorf("pull diagnostics not supported for this file kind") +- } +- return &protocol.DocumentDiagnosticReport{ +- Value: protocol.RelatedFullDocumentDiagnosticReport{ +- FullDocumentDiagnosticReport: protocol.FullDocumentDiagnosticReport{ +- Items: cache.ToProtocolDiagnostics(diagnostics...), +- }, +- }, +- }, nil +-} +- +-// fileDiagnostics holds the current state of published diagnostics for a file. +-type fileDiagnostics struct { +- publishedHash file.Hash // hash of the last set of diagnostics published for this URI +- mustPublish bool // if set, publish diagnostics even if they haven't changed +- +- // Orphaned file diagnostics are not necessarily associated with any *View +- // (since they are orphaned). Instead, keep track of the modification ID at +- // which they were orphaned (see server.lastModificationID). +- orphanedAt uint64 // modification ID at which this file was orphaned. +- orphanedFileDiagnostics []*cache.Diagnostic +- +- // Files may have their diagnostics computed by multiple views, and so +- // diagnostics are organized by View. See the documentation for update for more +- // details about how the set of file diagnostics evolves over time. +- byView map[*cache.View]viewDiagnostics +-} +- +-// viewDiagnostics holds a set of file diagnostics computed from a given View. +-type viewDiagnostics struct { +- snapshot uint64 // snapshot sequence ID +- version int32 // file version +- diagnostics []*cache.Diagnostic +-} +- +-// common types; for brevity +-type ( +- viewSet = map[*cache.View]unit +- diagMap = map[protocol.DocumentURI][]*cache.Diagnostic +-) +- +-func sortDiagnostics(d []*cache.Diagnostic) { +- sort.Slice(d, func(i int, j int) bool { +- a, b := d[i], d[j] +- if r := protocol.CompareRange(a.Range, b.Range); r != 0 { +- return r < 0 +- } +- if a.Source != b.Source { +- return a.Source < b.Source +- } +- return a.Message < b.Message +- }) +-} +- +-func (s *server) diagnoseChangedViews(ctx context.Context, modID uint64, lastChange map[*cache.View][]protocol.DocumentURI, cause ModificationSource) { +- // Collect views needing diagnosis. +- s.modificationMu.Lock() +- needsDiagnosis := moremaps.KeySlice(s.viewsToDiagnose) +- s.modificationMu.Unlock() +- +- // Diagnose views concurrently. +- var wg sync.WaitGroup +- for _, v := range needsDiagnosis { +- snapshot, release, err := v.Snapshot() +- if err != nil { +- s.modificationMu.Lock() +- // The View is shut down. Unlike below, no need to check +- // s.needsDiagnosis[v], since the view can never be diagnosed. +- delete(s.viewsToDiagnose, v) +- s.modificationMu.Unlock() +- continue +- } +- +- // Collect uris for fast diagnosis. We only care about the most recent +- // change here, because this is just an optimization for the case where the +- // user is actively editing a single file. +- uris := lastChange[v] +- if snapshot.Options().DiagnosticsTrigger == settings.DiagnosticsOnSave && cause == FromDidChange { +- // The user requested to update the diagnostics only on save. +- // Do not diagnose yet. +- release() +- continue +- } +- +- wg.Add(1) +- go func(snapshot *cache.Snapshot, uris []protocol.DocumentURI) { +- defer release() +- defer wg.Done() +- s.diagnoseSnapshot(ctx, snapshot, uris, snapshot.Options().DiagnosticsDelay) +- s.modificationMu.Lock() +- +- // Only remove v from s.viewsToDiagnose if the context is not cancelled. +- // This ensures that the snapshot was not cloned before its state was +- // fully evaluated, and therefore avoids missing a change that was +- // irrelevant to an incomplete snapshot. +- // +- // See the documentation for s.viewsToDiagnose for details. +- if ctx.Err() == nil && s.viewsToDiagnose[v] <= modID { +- delete(s.viewsToDiagnose, v) +- } +- s.modificationMu.Unlock() +- }(snapshot, uris) +- } +- +- wg.Wait() +- +- // Diagnose orphaned files for the session. +- orphanedFileDiagnostics, err := s.session.OrphanedFileDiagnostics(ctx) +- if err == nil { +- err = s.updateOrphanedFileDiagnostics(ctx, modID, orphanedFileDiagnostics) +- } +- if err != nil { +- if ctx.Err() == nil { +- event.Error(ctx, "warning: while diagnosing orphaned files", err) +- } +- } +-} +- +-// diagnoseSnapshot computes and publishes diagnostics for the given snapshot. +-// +-// If delay is non-zero, computing diagnostics does not start until after this +-// delay has expired, to allow work to be cancelled by subsequent changes. +-// +-// If changedURIs is non-empty, it is a set of recently changed files that +-// should be diagnosed immediately, and onDisk reports whether these file +-// changes came from a change to on-disk files. +-// +-// If the provided context is cancelled, diagnostics may be partially +-// published. Therefore, the provided context should only be cancelled if there +-// will be a subsequent operation to make diagnostics consistent. In general, +-// if an operation creates a new snapshot, it is responsible for ensuring that +-// snapshot (or a subsequent snapshot in the same View) is eventually +-// diagnosed. +-func (s *server) diagnoseSnapshot(ctx context.Context, snapshot *cache.Snapshot, changedURIs []protocol.DocumentURI, delay time.Duration) { +- ctx, done := event.Start(ctx, "server.diagnoseSnapshot", snapshot.Labels()...) +- defer done() +- +- if delay > 0 { +- // 2-phase diagnostics. +- // +- // The first phase just parses and type-checks (but +- // does not analyze) packages directly affected by +- // file modifications. +- // +- // The second phase runs after the delay, and does everything. +- +- if len(changedURIs) > 0 { +- diagnostics, err := diagnoseChangedFiles(ctx, snapshot, changedURIs) +- if err != nil { +- if ctx.Err() == nil { +- event.Error(ctx, "warning: while diagnosing changed files", err, snapshot.Labels()...) +- } +- return +- } +- s.updateDiagnostics(ctx, snapshot, diagnostics, false) +- } +- +- select { +- case <-time.After(delay): +- case <-ctx.Done(): +- return +- } +- } +- +- diagnostics, err := s.diagnose(ctx, snapshot) +- if err != nil { +- if ctx.Err() == nil { +- event.Error(ctx, "warning: while diagnosing snapshot", err, snapshot.Labels()...) +- } +- return +- } +- s.updateDiagnostics(ctx, snapshot, diagnostics, true) +-} +- +-func diagnoseChangedFiles(ctx context.Context, snapshot *cache.Snapshot, uris []protocol.DocumentURI) (diagMap, error) { +- ctx, done := event.Start(ctx, "server.diagnoseChangedFiles", snapshot.Labels()...) +- defer done() +- +- toDiagnose := make(map[metadata.PackageID]*metadata.Package) +- for _, uri := range uris { +- // If the file is not open, don't diagnose its package. +- // +- // We don't care about fast diagnostics for files that are no longer open, +- // because the user isn't looking at them. Also, explicitly requesting a +- // package can lead to "command-line-arguments" packages if the file isn't +- // covered by the current View. By avoiding requesting packages for e.g. +- // unrelated file movement, we can minimize these unnecessary packages. +- if !snapshot.IsOpen(uri) { +- continue +- } +- // If the file is not known to the snapshot (e.g., if it was deleted), +- // don't diagnose it. +- if snapshot.FindFile(uri) == nil { +- continue +- } +- +- // Don't request type-checking for builtin.go: it's not a real package. +- if snapshot.IsBuiltin(uri) { +- continue +- } +- +- // Don't diagnose files that are ignored by `go list` (e.g. testdata). +- if snapshot.IgnoredFile(uri) { +- continue +- } +- +- // Find all packages that include this file and diagnose them in parallel. +- meta, err := snapshot.NarrowestMetadataForFile(ctx, uri) +- if err != nil { +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- // TODO(findleyr): we should probably do something with the error here, +- // but as of now this can fail repeatedly if load fails, so can be too +- // noisy to log (and we'll handle things later in the slow pass). +- continue +- } +- // golang/go#65801: only diagnose changes to workspace packages. Otherwise, +- // diagnostics will be unstable, as the slow-path diagnostics will erase +- // them. +- if snapshot.IsWorkspacePackage(meta.ID) { +- toDiagnose[meta.ID] = meta +- } +- } +- diags, err := snapshot.PackageDiagnostics(ctx, moremaps.KeySlice(toDiagnose)...) +- if err != nil { +- if ctx.Err() == nil { +- event.Error(ctx, "warning: diagnostics failed", err, snapshot.Labels()...) +- } +- return nil, err +- } +- // golang/go#59587: guarantee that we compute type-checking diagnostics +- // for every compiled package file, otherwise diagnostics won't be quickly +- // cleared following a fix. +- for _, meta := range toDiagnose { +- for _, uri := range meta.CompiledGoFiles { +- if _, ok := diags[uri]; !ok { +- diags[uri] = nil +- } +- } +- } +- return diags, nil +-} +- +-func (s *server) diagnose(ctx context.Context, snapshot *cache.Snapshot) (diagMap, error) { +- ctx, done := event.Start(ctx, "server.diagnose", snapshot.Labels()...) +- defer done() +- +- // Wait for a free diagnostics slot. +- // TODO(adonovan): opt: shouldn't it be the analysis implementation's +- // job to de-dup and limit resource consumption? In any case this +- // function spends most its time waiting for awaitLoaded, at +- // least initially. +- select { +- case <-ctx.Done(): +- return nil, ctx.Err() +- case s.diagnosticsSema <- struct{}{}: +- } +- defer func() { +- <-s.diagnosticsSema +- }() +- +- var ( +- diagnosticsMu sync.Mutex +- diagnostics = make(diagMap) +- ) +- // common code for dispatching diagnostics +- store := func(operation string, diagsByFile diagMap, err error) { +- if err != nil { +- if ctx.Err() == nil { +- event.Error(ctx, "warning: while "+operation, err, snapshot.Labels()...) +- } +- return +- } +- diagnosticsMu.Lock() +- defer diagnosticsMu.Unlock() +- for uri, diags := range diagsByFile { +- diagnostics[uri] = append(diagnostics[uri], diags...) +- } +- } +- +- // Diagnostics below are organized by increasing specificity: +- // go.work > mod > mod upgrade > mod vuln > package, etc. +- +- // Diagnose go.work file. +- workReports, workErr := work.Diagnostics(ctx, snapshot) +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- store("diagnosing go.work file", workReports, workErr) +- +- // Diagnose go.mod file. +- modReports, modErr := mod.ParseDiagnostics(ctx, snapshot) +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- store("diagnosing go.mod file", modReports, modErr) +- +- // Diagnose go.mod upgrades. +- upgradeReports, upgradeErr := mod.UpgradeDiagnostics(ctx, snapshot) +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- store("diagnosing go.mod upgrades", upgradeReports, upgradeErr) +- +- // Diagnose vulnerabilities. +- vulnReports, vulnErr := mod.VulnerabilityDiagnostics(ctx, snapshot) +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- store("diagnosing vulnerabilities", vulnReports, vulnErr) +- +- workspacePkgs, err := snapshot.WorkspaceMetadata(ctx) +- if s.shouldIgnoreError(snapshot, err) { +- return diagnostics, ctx.Err() +- } +- +- initialErr := snapshot.InitializationError() +- if ctx.Err() != nil { +- // Don't update initialization status if the context is cancelled. +- return nil, ctx.Err() +- } +- +- if initialErr != nil { +- store("critical error", initialErr.Diagnostics, nil) +- } +- +- // Show the error as a progress error report so that it appears in the +- // status bar. If a client doesn't support progress reports, the error +- // will still be shown as a ShowMessage. If there is no error, any running +- // error progress reports will be closed. +- statusErr := initialErr +- if len(snapshot.Overlays()) == 0 { +- // Don't report a hanging status message if there are no open files at this +- // snapshot. +- statusErr = nil +- } +- s.updateCriticalErrorStatus(ctx, snapshot, statusErr) +- +- // Diagnose template (.tmpl) files. +- tmplReports := template.Diagnostics(snapshot) +- // NOTE(rfindley): typeCheckSource is not accurate here. +- // (but this will be gone soon anyway). +- store("diagnosing templates", tmplReports, nil) +- +- // If there are no workspace packages, there is nothing to diagnose and +- // there are no orphaned files. +- if len(workspacePkgs) == 0 { +- return diagnostics, nil +- } +- +- var wg sync.WaitGroup // for potentially slow operations below +- +- // Maybe run go mod tidy (if it has been invalidated). +- // +- // Since go mod tidy can be slow, we run it concurrently to diagnostics. +- wg.Go(func() { +- modTidyReports, err := mod.TidyDiagnostics(ctx, snapshot) +- store("running go mod tidy", modTidyReports, err) +- }) +- +- // Run type checking and go/analysis diagnosis of packages in parallel. +- // +- // For analysis, we use the *widest* package for each open file, +- // for two reasons: +- // +- // - Correctness: some analyzers (e.g. unused{param,func}) depend +- // on it. If applied to a non-test package for which a +- // corresponding test package exists, they make assumptions +- // that are falsified in the test package, for example that +- // all references to unexported symbols are visible to the +- // analysis. +- // +- // - Efficiency: it may yield a smaller covering set of +- // PackageIDs for a given set of files. For example, {x.go, +- // x_test.go} is covered by the single package x_test using +- // "widest". (Using "narrowest", it would be covered only by +- // the pair of packages {x, x_test}, Originally we used all +- // covering packages, so {x.go} alone would be analyzed +- // twice.) +- var ( +- toDiagnose = make(map[metadata.PackageID]*metadata.Package) +- toAnalyze = make(map[metadata.PackageID]*metadata.Package) +- +- // secondary index, used to eliminate narrower packages. +- toAnalyzeWidest = make(map[golang.PackagePath]*metadata.Package) +- ) +- for _, mp := range workspacePkgs { +- var hasNonIgnored, hasOpenFile bool +- for _, uri := range mp.CompiledGoFiles { +- if !hasNonIgnored && !snapshot.IgnoredFile(uri) { +- hasNonIgnored = true +- } +- if !hasOpenFile && snapshot.IsOpen(uri) { +- hasOpenFile = true +- } +- } +- if hasNonIgnored { +- toDiagnose[mp.ID] = mp +- if hasOpenFile { +- if prev, ok := toAnalyzeWidest[mp.PkgPath]; ok { +- if len(prev.CompiledGoFiles) >= len(mp.CompiledGoFiles) { +- // Previous entry is not narrower; keep it. +- continue +- } +- // Evict previous (narrower) entry. +- delete(toAnalyze, prev.ID) +- } +- toAnalyze[mp.ID] = mp +- toAnalyzeWidest[mp.PkgPath] = mp +- } +- } +- } +- +- wg.Go(func() { +- compilerOptDetailsDiags, err := s.compilerOptDetailsDiagnostics(ctx, snapshot, toDiagnose) +- store("collecting compiler optimization details", compilerOptDetailsDiags, err) +- }) +- +- // Package diagnostics and analysis diagnostics must both be computed and +- // merged before they can be reported. +- var pkgDiags, analysisDiags diagMap +- // Collect package diagnostics. +- wg.Go(func() { +- var err error +- pkgDiags, err = snapshot.PackageDiagnostics(ctx, moremaps.KeySlice(toDiagnose)...) +- if err != nil { +- event.Error(ctx, "warning: diagnostics failed", err, snapshot.Labels()...) +- } +- }) +- +- // Get diagnostics from analysis framework. +- // This includes type-error analyzers, which suggest fixes to compiler errors. +- wg.Go(func() { +- var err error +- // TODO(rfindley): here and above, we should avoid using the first result +- // if err is non-nil (though as of today it's OK). +- analysisDiags, err = golang.Analyze(ctx, snapshot, toAnalyze, s.progress) +- +- // Filter out Hint diagnostics for closed files. +- // VS Code already omits Hint diagnostics in the Problems tab, but other +- // clients do not. This filter makes the visibility of Hints more similar +- // across clients. +- for uri, diags := range analysisDiags { +- if !snapshot.IsOpen(uri) { +- newDiags := slices.DeleteFunc(diags, func(diag *cache.Diagnostic) bool { +- return diag.Severity == protocol.SeverityHint +- }) +- if len(newDiags) == 0 { +- delete(analysisDiags, uri) +- } else { +- analysisDiags[uri] = newDiags +- } +- } +- } +- +- if err != nil { +- event.Error(ctx, "warning: analyzing package", err, append(snapshot.Labels(), label.Package.Of(keys.Join(moremaps.KeySlice(toDiagnose))))...) +- return +- } +- }) +- +- wg.Wait() +- +- // Merge analysis diagnostics with package diagnostics, and store the +- // resulting analysis diagnostics. +- combinedDiags := make(diagMap) +- for uri, adiags := range analysisDiags { +- tdiags := pkgDiags[uri] +- combinedDiags[uri] = golang.CombineDiagnostics(tdiags, adiags) +- } +- for uri, tdiags := range pkgDiags { +- if _, ok := combinedDiags[uri]; !ok { +- combinedDiags[uri] = tdiags +- } +- } +- store("type checking and analysing", combinedDiags, nil) // error reported above +- +- return diagnostics, nil +-} +- +-func (s *server) compilerOptDetailsDiagnostics(ctx context.Context, snapshot *cache.Snapshot, toDiagnose map[metadata.PackageID]*metadata.Package) (diagMap, error) { +- // Process requested diagnostics about compiler optimization details. +- // +- // TODO(rfindley): This should memoize its results if the package has not changed. +- // Consider that these points, in combination with the note below about +- // races, suggest that compiler optimization details should be tracked on the Snapshot. +- diagnostics := make(diagMap) +- seenDirs := make(map[protocol.DocumentURI]bool) +- for _, mp := range toDiagnose { +- if len(mp.CompiledGoFiles) == 0 { +- continue +- } +- dir := mp.CompiledGoFiles[0].Dir() +- if snapshot.WantCompilerOptDetails(dir) { +- if !seenDirs[dir] { +- seenDirs[dir] = true +- +- perFileDiags, err := golang.CompilerOptDetails(ctx, snapshot, dir) +- if err != nil { +- event.Error(ctx, "warning: compiler optimization details", err, append(snapshot.Labels(), label.URI.Of(dir))...) +- continue +- } +- for uri, diags := range perFileDiags { +- diagnostics[uri] = append(diagnostics[uri], diags...) +- } +- } +- } +- } +- return diagnostics, nil +-} +- +-// mustPublishDiagnostics marks the uri as needing publication, independent of +-// whether the published contents have changed. +-// +-// This can be used for ensuring gopls publishes diagnostics after certain file +-// events. +-func (s *server) mustPublishDiagnostics(uri protocol.DocumentURI) { +- s.diagnosticsMu.Lock() +- defer s.diagnosticsMu.Unlock() +- +- if s.diagnostics[uri] == nil { +- s.diagnostics[uri] = new(fileDiagnostics) +- } +- s.diagnostics[uri].mustPublish = true +-} +- +-const WorkspaceLoadFailure = "Error loading workspace" +- +-// updateCriticalErrorStatus updates the critical error progress notification +-// based on err. +-// +-// If err is nil, or if there are no open files, it clears any existing error +-// progress report. +-func (s *server) updateCriticalErrorStatus(ctx context.Context, snapshot *cache.Snapshot, err *cache.InitializationError) { +- s.criticalErrorStatusMu.Lock() +- defer s.criticalErrorStatusMu.Unlock() +- +- // Remove all newlines so that the error message can be formatted in a +- // status bar. +- var errMsg string +- if err != nil { +- errMsg = strings.ReplaceAll(err.MainError.Error(), "\n", " ") +- } +- +- if s.criticalErrorStatus == nil { +- if errMsg != "" { +- event.Error(ctx, "errors loading workspace", err.MainError, snapshot.Labels()...) +- s.criticalErrorStatus = s.progress.Start(ctx, WorkspaceLoadFailure, errMsg, nil, nil) +- } +- return +- } +- +- // If an error is already shown to the user, update it or mark it as +- // resolved. +- if errMsg == "" { +- s.criticalErrorStatus.End(ctx, "Done.") +- s.criticalErrorStatus = nil +- } else { +- s.criticalErrorStatus.Report(ctx, errMsg, 0) +- } +-} +- +-// updateDiagnostics records the result of diagnosing a snapshot, and publishes +-// any diagnostics that need to be updated on the client. +-func (s *server) updateDiagnostics(ctx context.Context, snapshot *cache.Snapshot, diagnostics diagMap, final bool) { +- ctx, done := event.Start(ctx, "server.publishDiagnostics") +- defer done() +- +- s.diagnosticsMu.Lock() +- defer s.diagnosticsMu.Unlock() +- +- // Before updating any diagnostics, check that the context (i.e. snapshot +- // background context) is not cancelled. +- // +- // If not, then we know that we haven't started diagnosing the next snapshot, +- // because the previous snapshot is cancelled before the next snapshot is +- // returned from Invalidate. +- // +- // Therefore, even if we publish stale diagnostics here, they should +- // eventually be overwritten with accurate diagnostics. +- // +- // TODO(rfindley): refactor the API to force that snapshots are diagnosed +- // after they are created. +- if ctx.Err() != nil { +- return +- } +- +- // golang/go#65312: since the set of diagnostics depends on the set of views, +- // we get the views *after* locking diagnosticsMu. This ensures that +- // updateDiagnostics does not incorrectly delete diagnostics that have been +- // set for an existing view that was created between the call to +- // s.session.Views() and updateDiagnostics. +- viewMap := make(viewSet) +- for _, v := range s.session.Views() { +- viewMap[v] = unit{} +- } +- +- // updateAndPublish updates diagnostics for a file, checking both the latest +- // diagnostics for the current snapshot, as well as reconciling the set of +- // views. +- updateAndPublish := func(uri protocol.DocumentURI, f *fileDiagnostics, diags []*cache.Diagnostic) error { +- current, ok := f.byView[snapshot.View()] +- // Update the stored diagnostics if: +- // 1. we've never seen diagnostics for this view, +- // 2. diagnostics are for an older snapshot, or +- // 3. we're overwriting with final diagnostics +- // +- // In other words, we shouldn't overwrite existing diagnostics for a +- // snapshot with non-final diagnostics. This avoids the race described at +- // https://github.com/golang/go/issues/64765#issuecomment-1890144575. +- if !ok || current.snapshot < snapshot.SequenceID() || (current.snapshot == snapshot.SequenceID() && final) { +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return err +- } +- current = viewDiagnostics{ +- snapshot: snapshot.SequenceID(), +- version: fh.Version(), +- diagnostics: diags, +- } +- if f.byView == nil { +- f.byView = make(map[*cache.View]viewDiagnostics) +- } +- f.byView[snapshot.View()] = current +- } +- +- return s.publishFileDiagnosticsLocked(ctx, viewMap, uri, current.version, f) +- } +- +- seen := make(map[protocol.DocumentURI]bool) +- for uri, diags := range diagnostics { +- f, ok := s.diagnostics[uri] +- if !ok { +- f = new(fileDiagnostics) +- s.diagnostics[uri] = f +- } +- seen[uri] = true +- if err := updateAndPublish(uri, f, diags); err != nil { +- if ctx.Err() != nil { +- return +- } else { +- event.Error(ctx, "updateDiagnostics: failed to deliver diagnostics", err, label.URI.Of(uri)) +- } +- } +- } +- +- // TODO(rfindley): perhaps we should clean up files that have no diagnostics. +- // One could imagine a large operation generating diagnostics for a great +- // number of files, after which gopls has to do more bookkeeping into the +- // future. +- if final { +- for uri, f := range s.diagnostics { +- if !seen[uri] { +- if err := updateAndPublish(uri, f, nil); err != nil { +- if ctx.Err() != nil { +- return +- } else { +- event.Error(ctx, "updateDiagnostics: failed to deliver diagnostics", err, label.URI.Of(uri)) +- } +- } +- } +- } +- } +-} +- +-// updateOrphanedFileDiagnostics records and publishes orphaned file +-// diagnostics as a given modification time. +-func (s *server) updateOrphanedFileDiagnostics(ctx context.Context, modID uint64, diagnostics diagMap) error { +- views := s.session.Views() +- viewSet := make(viewSet) +- for _, v := range views { +- viewSet[v] = unit{} +- } +- +- s.diagnosticsMu.Lock() +- defer s.diagnosticsMu.Unlock() +- +- for uri, diags := range diagnostics { +- f, ok := s.diagnostics[uri] +- if !ok { +- f = new(fileDiagnostics) +- s.diagnostics[uri] = f +- } +- if f.orphanedAt > modID { +- continue +- } +- f.orphanedAt = modID +- f.orphanedFileDiagnostics = diags +- // TODO(rfindley): the version of this file is potentially inaccurate; +- // nevertheless, it should be eventually consistent, because all +- // modifications are diagnosed. +- fh, err := s.session.ReadFile(ctx, uri) +- if err != nil { +- return err +- } +- if err := s.publishFileDiagnosticsLocked(ctx, viewSet, uri, fh.Version(), f); err != nil { +- return err +- } +- } +- +- // Clear any stale orphaned file diagnostics. +- for uri, f := range s.diagnostics { +- if f.orphanedAt < modID { +- f.orphanedFileDiagnostics = nil +- } +- fh, err := s.session.ReadFile(ctx, uri) +- if err != nil { +- return err +- } +- if err := s.publishFileDiagnosticsLocked(ctx, viewSet, uri, fh.Version(), f); err != nil { +- return err +- } +- } +- return nil +-} +- +-// publishFileDiagnosticsLocked publishes a fileDiagnostics value, while holding s.diagnosticsMu. +-// +-// If the publication succeeds, it updates f.publishedHash and f.mustPublish. +-func (s *server) publishFileDiagnosticsLocked(ctx context.Context, views viewSet, uri protocol.DocumentURI, version int32, f *fileDiagnostics) error { +- // We add a disambiguating suffix (e.g. " [darwin,arm64]") to +- // each diagnostic that doesn't occur in the default view; +- // see golang/go#65496. +- type diagSuffix struct { +- diag *cache.Diagnostic +- suffix string // "" for default build (or orphans) +- } +- +- // diagSuffixes records the set of view suffixes for a given diagnostic. +- diagSuffixes := make(map[file.Hash][]diagSuffix) +- add := func(diag *cache.Diagnostic, suffix string) { +- h := diag.Hash() +- diagSuffixes[h] = append(diagSuffixes[h], diagSuffix{diag, suffix}) +- } +- +- // Construct the inverse mapping, from diagnostic (hash) to its suffixes (views). +- for _, diag := range f.orphanedFileDiagnostics { +- add(diag, "") +- } +- +- var allViews []*cache.View +- for view, viewDiags := range f.byView { +- if _, ok := views[view]; !ok { +- delete(f.byView, view) // view no longer exists +- continue +- } +- if viewDiags.version != version { +- continue // a payload of diagnostics applies to a specific file version +- } +- allViews = append(allViews, view) +- } +- +- // Only report diagnostics from relevant views for a file. This avoids +- // spurious import errors when a view has only a partial set of dependencies +- // for a package (golang/go#66425). +- // +- // It's ok to use the session to derive the eligible views, because we +- // publish diagnostics following any state change, so the set of relevant +- // views is eventually consistent. +- relevantViews, err := cache.RelevantViews(ctx, s.session, uri, allViews) +- if err != nil { +- return err +- } +- +- if len(relevantViews) == 0 { +- // If we have no preferred diagnostics for a given file (i.e., the file is +- // not naturally nested within a view), then all diagnostics should be +- // considered valid. +- // +- // This could arise if the user jumps to definition outside the workspace. +- // There is no view that owns the file, so its diagnostics are valid from +- // any view. +- relevantViews = allViews +- } +- +- for _, view := range relevantViews { +- viewDiags := f.byView[view] +- // Compute the view's suffix (e.g. " [darwin,arm64]"). +- var suffix string +- { +- var words []string +- if view.GOOS() != runtime.GOOS { +- words = append(words, view.GOOS()) +- } +- if view.GOARCH() != runtime.GOARCH { +- words = append(words, view.GOARCH()) +- } +- if len(words) > 0 { +- suffix = fmt.Sprintf(" [%s]", strings.Join(words, ",")) +- } +- } +- +- for _, diag := range viewDiags.diagnostics { +- add(diag, suffix) +- } +- } +- +- // De-dup diagnostics across views by hash, and sort. +- var ( +- hash file.Hash +- unique []*cache.Diagnostic +- ) +- for h, items := range diagSuffixes { +- // Sort the items by ascending suffix, so that the +- // default view (if present) is first. +- // (The others are ordered arbitrarily.) +- sort.Slice(items, func(i, j int) bool { +- return items[i].suffix < items[j].suffix +- }) +- +- // If the diagnostic was not present in +- // the default view, add the view suffix. +- first := items[0] +- if first.suffix != "" { +- diag2 := *first.diag // shallow copy +- diag2.Message += first.suffix +- first.diag = &diag2 +- h = diag2.Hash() // update the hash +- } +- +- hash.XORWith(h) +- unique = append(unique, first.diag) +- } +- sortDiagnostics(unique) +- +- // Publish, if necessary. +- if hash != f.publishedHash || f.mustPublish { +- if err := s.client.PublishDiagnostics(ctx, &protocol.PublishDiagnosticsParams{ +- Diagnostics: cache.ToProtocolDiagnostics(unique...), +- URI: uri, +- Version: version, // 0 ("on disk") => omitted from JSON encoding +- }); err != nil { +- return err +- } +- f.publishedHash = hash +- f.mustPublish = false +- } +- return nil +-} +- +-func (s *server) shouldIgnoreError(snapshot *cache.Snapshot, err error) bool { +- if err == nil { // if there is no error at all +- return false +- } +- if errors.Is(err, context.Canceled) { +- return true +- } +- // If the folder has no Go code in it, we shouldn't spam the user with a warning. +- // TODO(rfindley): surely it is not correct to walk the folder here just to +- // suppress diagnostics, every time we compute diagnostics. +- var hasGo bool +- _ = filepath.Walk(snapshot.Folder().Path(), func(_ string, info os.FileInfo, err error) error { +- if err != nil { +- return err +- } +- if !strings.HasSuffix(info.Name(), ".go") { +- return nil +- } +- hasGo = true +- return errors.New("done") +- }) +- return !hasGo +-} +diff -urN a/gopls/internal/server/folding_range.go b/gopls/internal/server/folding_range.go +--- a/gopls/internal/server/folding_range.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/folding_range.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) FoldingRange(ctx context.Context, params *protocol.FoldingRangeParams) ([]protocol.FoldingRange, error) { +- ctx, done := event.Start(ctx, "server.FoldingRange", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- if snapshot.FileKind(fh) != file.Go { +- return nil, nil // empty result +- } +- return golang.FoldingRange(ctx, snapshot, fh, snapshot.Options().LineFoldingOnly) +-} +diff -urN a/gopls/internal/server/format.go b/gopls/internal/server/format.go +--- a/gopls/internal/server/format.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/format.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,38 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/mod" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/work" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) Formatting(ctx context.Context, params *protocol.DocumentFormattingParams) ([]protocol.TextEdit, error) { +- ctx, done := event.Start(ctx, "server.Formatting", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- switch snapshot.FileKind(fh) { +- case file.Mod: +- return mod.Format(ctx, snapshot, fh) +- case file.Go: +- return golang.Format(ctx, snapshot, fh) +- case file.Work: +- return work.Format(ctx, snapshot, fh) +- } +- return nil, nil // empty result +-} +diff -urN a/gopls/internal/server/general.go b/gopls/internal/server/general.go +--- a/gopls/internal/server/general.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/general.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,702 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-// This file defines server methods related to initialization, +-// options, shutdown, and exit. +- +-import ( +- "context" +- "encoding/json" +- "errors" +- "fmt" +- "go/build" +- "os" +- "path" +- "path/filepath" +- "sort" +- "strings" +- "sync" +- +- "golang.org/x/telemetry/counter" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/debug" +- debuglog "golang.org/x/tools/gopls/internal/debug/log" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/semtok" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/telemetry" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/goversion" +- "golang.org/x/tools/gopls/internal/util/moremaps" +- "golang.org/x/tools/gopls/internal/util/moreslices" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/jsonrpc2" +-) +- +-func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitialize) (*protocol.InitializeResult, error) { +- ctx, done := event.Start(ctx, "server.Initialize") +- defer done() +- +- var clientName string +- if params != nil && params.ClientInfo != nil { +- clientName = params.ClientInfo.Name +- } +- recordClientInfo(clientName) +- +- s.stateMu.Lock() +- if s.state >= serverInitializing { +- defer s.stateMu.Unlock() +- return nil, fmt.Errorf("%w: initialize called while server in %v state", jsonrpc2.ErrInvalidRequest, s.state) +- } +- s.state = serverInitializing +- s.stateMu.Unlock() +- +- s.progress.SetSupportsWorkDoneProgress(params.Capabilities.Window.WorkDoneProgress) +- +- options := s.Options().Clone() +- // TODO(rfindley): eliminate this defer. +- defer func() { s.SetOptions(options) }() +- +- // Process initialization options. +- { +- res, errs := options.Set(params.InitializationOptions) +- s.handleOptionResult(ctx, res, errs) +- } +- options.ForClientCapabilities(params.ClientInfo, params.Capabilities) +- +- if options.ShowBugReports { +- // Report the next bug that occurs on the server. +- bug.Handle(func(b bug.Bug) { +- msg := &protocol.ShowMessageParams{ +- Type: protocol.Error, +- Message: fmt.Sprintf("A bug occurred on the server: %s\nLocation:%s", b.Description, b.Key), +- } +- go s.eventuallyShowMessage(context.Background(), msg) +- }) +- } +- +- folders := params.WorkspaceFolders +- if len(folders) == 0 { +- if params.RootURI != "" { +- folders = []protocol.WorkspaceFolder{{ +- URI: string(params.RootURI), +- Name: path.Base(params.RootURI.Path()), +- }} +- } +- } +- s.pendingFolders = append(s.pendingFolders, folders...) +- +- var codeActionProvider any = true +- if ca := params.Capabilities.TextDocument.CodeAction; len(ca.CodeActionLiteralSupport.CodeActionKind.ValueSet) > 0 { +- // If the client has specified CodeActionLiteralSupport, +- // send the code actions we support. +- // +- // Using CodeActionOptions is only valid if codeActionLiteralSupport is set. +- codeActionProvider = &protocol.CodeActionOptions{ +- CodeActionKinds: s.getSupportedCodeActions(), +- ResolveProvider: true, +- } +- } +- +- var diagnosticProvider *protocol.Or_ServerCapabilities_diagnosticProvider +- if options.PullDiagnostics { +- diagnosticProvider = &protocol.Or_ServerCapabilities_diagnosticProvider{ +- Value: protocol.DiagnosticOptions{ +- InterFileDependencies: true, +- WorkspaceDiagnostics: false, // we don't support workspace/diagnostic +- }, +- } +- } +- +- var renameOpts any = true +- if r := params.Capabilities.TextDocument.Rename; r != nil && r.PrepareSupport { +- renameOpts = protocol.RenameOptions{ +- PrepareProvider: r.PrepareSupport, +- } +- } +- +- versionInfo := debug.VersionInfo() +- +- goplsVersion, err := json.Marshal(versionInfo) +- if err != nil { +- return nil, err +- } +- +- return &protocol.InitializeResult{ +- Capabilities: protocol.ServerCapabilities{ +- CallHierarchyProvider: &protocol.Or_ServerCapabilities_callHierarchyProvider{Value: true}, +- CodeActionProvider: codeActionProvider, +- CodeLensProvider: &protocol.CodeLensOptions{}, // must be non-nil to enable the code lens capability +- CompletionProvider: &protocol.CompletionOptions{ +- TriggerCharacters: []string{"."}, +- }, +- DefinitionProvider: &protocol.Or_ServerCapabilities_definitionProvider{Value: true}, +- TypeDefinitionProvider: &protocol.Or_ServerCapabilities_typeDefinitionProvider{Value: true}, +- ImplementationProvider: &protocol.Or_ServerCapabilities_implementationProvider{Value: true}, +- DocumentFormattingProvider: &protocol.Or_ServerCapabilities_documentFormattingProvider{Value: true}, +- DocumentSymbolProvider: &protocol.Or_ServerCapabilities_documentSymbolProvider{Value: true}, +- WorkspaceSymbolProvider: &protocol.Or_ServerCapabilities_workspaceSymbolProvider{Value: true}, +- ExecuteCommandProvider: &protocol.ExecuteCommandOptions{ +- Commands: protocol.NonNilSlice(options.SupportedCommands), +- }, +- FoldingRangeProvider: &protocol.Or_ServerCapabilities_foldingRangeProvider{Value: true}, +- HoverProvider: &protocol.Or_ServerCapabilities_hoverProvider{Value: true}, +- DocumentHighlightProvider: &protocol.Or_ServerCapabilities_documentHighlightProvider{Value: true}, +- DocumentLinkProvider: &protocol.DocumentLinkOptions{}, +- InlayHintProvider: protocol.InlayHintOptions{}, +- DiagnosticProvider: diagnosticProvider, +- ReferencesProvider: &protocol.Or_ServerCapabilities_referencesProvider{Value: true}, +- RenameProvider: renameOpts, +- SelectionRangeProvider: &protocol.Or_ServerCapabilities_selectionRangeProvider{Value: true}, +- SemanticTokensProvider: protocol.SemanticTokensOptions{ +- Range: &protocol.Or_SemanticTokensOptions_range{Value: true}, +- Full: &protocol.Or_SemanticTokensOptions_full{Value: true}, +- Legend: protocol.SemanticTokensLegend{ +- TokenTypes: moreslices.ConvertStrings[string](semtok.TokenTypes), +- TokenModifiers: moreslices.ConvertStrings[string](semtok.TokenModifiers), +- }, +- }, +- SignatureHelpProvider: &protocol.SignatureHelpOptions{ +- TriggerCharacters: []string{"(", ","}, +- // Used to update or dismiss signature help when it's already active, +- // typically after a call expression is closed. +- RetriggerCharacters: []string{")"}, +- }, +- TextDocumentSync: &protocol.TextDocumentSyncOptions{ +- Change: protocol.Incremental, +- OpenClose: true, +- Save: &protocol.SaveOptions{ +- IncludeText: false, +- }, +- }, +- TypeHierarchyProvider: &protocol.Or_ServerCapabilities_typeHierarchyProvider{Value: true}, +- Workspace: &protocol.WorkspaceOptions{ +- WorkspaceFolders: &protocol.WorkspaceFolders5Gn{ +- Supported: true, +- ChangeNotifications: "workspace/didChangeWorkspaceFolders", +- }, +- FileOperations: &protocol.FileOperationOptions{ +- DidCreate: &protocol.FileOperationRegistrationOptions{ +- Filters: []protocol.FileOperationFilter{{ +- Scheme: "file", +- // gopls is only interested with files in .go extension. +- Pattern: protocol.FileOperationPattern{Glob: "**/*.go"}, +- }}, +- }, +- }, +- }, +- }, +- ServerInfo: &protocol.ServerInfo{ +- Name: "gopls", +- Version: string(goplsVersion), +- }, +- }, nil +-} +- +-func (s *server) Initialized(ctx context.Context, params *protocol.InitializedParams) error { +- ctx, done := event.Start(ctx, "server.Initialized") +- defer done() +- +- s.stateMu.Lock() +- if s.state >= serverInitialized { +- defer s.stateMu.Unlock() +- return fmt.Errorf("%w: initialized called while server in %v state", jsonrpc2.ErrInvalidRequest, s.state) +- } +- s.state = serverInitialized +- s.stateMu.Unlock() +- +- for _, not := range s.notifications { +- s.client.ShowMessage(ctx, not) // ignore error +- } +- s.notifications = nil +- +- s.addFolders(ctx, s.pendingFolders) +- +- s.pendingFolders = nil +- s.checkViewGoVersions() +- +- var registrations []protocol.Registration +- options := s.Options() +- if options.ConfigurationSupported && options.DynamicConfigurationSupported { +- registrations = append(registrations, protocol.Registration{ +- ID: "workspace/didChangeConfiguration", +- Method: "workspace/didChangeConfiguration", +- }) +- } +- if len(registrations) > 0 { +- if err := s.client.RegisterCapability(ctx, &protocol.RegistrationParams{ +- Registrations: registrations, +- }); err != nil { +- return err +- } +- } +- +- // Ask (maybe) about enabling telemetry. Do this asynchronously, as it's OK +- // for users to ignore or dismiss the question. +- go s.maybePromptForTelemetry(ctx, options.TelemetryPrompt) +- +- return nil +-} +- +-// checkViewGoVersions checks whether any Go version used by a view is too old, +-// raising a showMessage notification if so. +-// +-// It should be called after views change. +-func (s *server) checkViewGoVersions() { +- oldestVersion, fromBuild := go1Point(), true +- for _, view := range s.session.Views() { +- viewVersion := view.GoVersion() +- if oldestVersion == -1 || viewVersion < oldestVersion { +- oldestVersion, fromBuild = viewVersion, false +- } +- if viewVersion >= 0 { +- counter.Inc(fmt.Sprintf("gopls/goversion:1.%d", viewVersion)) +- } +- } +- +- if msg, isError := goversion.Message(oldestVersion, fromBuild); msg != "" { +- mType := protocol.Warning +- if isError { +- mType = protocol.Error +- } +- s.eventuallyShowMessage(context.Background(), &protocol.ShowMessageParams{ +- Type: mType, +- Message: msg, +- }) +- } +-} +- +-// go1Point returns the x in Go 1.x. If an error occurs extracting the go +-// version, it returns -1. +-// +-// Copied from the testenv package. +-func go1Point() int { +- for i := len(build.Default.ReleaseTags) - 1; i >= 0; i-- { +- var version int +- if _, err := fmt.Sscanf(build.Default.ReleaseTags[i], "go1.%d", &version); err != nil { +- continue +- } +- return version +- } +- return -1 +-} +- +-// addFolders adds the specified list of "folders" (that's Windows for +-// directories) to the session. It does not return an error, though it +-// may report an error to the client over LSP if one or more folders +-// had problems, for example, folders with unsupported file system. +-func (s *server) addFolders(ctx context.Context, folders []protocol.WorkspaceFolder) { +- originalViews := len(s.session.Views()) +- viewErrors := make(map[protocol.URI]error) +- +- // Skip non-'file' scheme, or invalid workspace folders, +- // and log them form error reports. +- // VS Code's file system API +- // (https://code.visualstudio.com/api/references/vscode-api#FileSystem) +- // allows extension to define their own schemes and register +- // them with the workspace. We've seen gitlens://, decompileFs://, etc +- // but the list can grow over time. +- var filtered []protocol.WorkspaceFolder +- for _, f := range folders { +- uri, err := protocol.ParseDocumentURI(f.URI) +- if err != nil { +- debuglog.Warning.Logf(ctx, "skip adding virtual folder %q - invalid folder URI: %v", f.Name, err) +- continue +- } +- if s.session.HasView(uri) { +- debuglog.Warning.Logf(ctx, "skip adding the already added folder %q - its view has been created before", f.Name) +- continue +- } +- filtered = append(filtered, f) +- } +- folders = filtered +- +- var ndiagnose sync.WaitGroup // number of unfinished diagnose calls +- if s.Options().VerboseWorkDoneProgress { +- work := s.progress.Start(ctx, DiagnosticWorkTitle(FromInitialWorkspaceLoad), "Calculating diagnostics for initial workspace load...", nil, nil) +- defer func() { +- go func() { +- ndiagnose.Wait() +- work.End(ctx, "Done.") +- }() +- }() +- } +- // Only one view gets to have a workspace. +- var nsnapshots sync.WaitGroup // number of unfinished snapshot initializations +- for _, folder := range folders { +- uri, err := protocol.ParseDocumentURI(folder.URI) +- if err != nil { +- viewErrors[folder.URI] = fmt.Errorf("invalid folder URI: %v", err) +- continue +- } +- work := s.progress.Start(ctx, "Setting up workspace", "Loading packages...", nil, nil) +- snapshot, release, err := s.addView(ctx, folder.Name, uri) +- if err != nil { +- if err == cache.ErrViewExists { +- continue +- } +- viewErrors[folder.URI] = err +- work.End(ctx, fmt.Sprintf("Error loading packages: %s", err)) +- continue +- } +- // Inv: release() must be called once. +- +- // Initialize snapshot asynchronously. +- initialized := make(chan struct{}) +- nsnapshots.Add(1) +- go func() { +- snapshot.AwaitInitialized(ctx) +- work.End(ctx, "Finished loading packages.") +- nsnapshots.Done() +- close(initialized) // signal +- }() +- +- // Diagnose the newly created view asynchronously. +- ndiagnose.Go(func() { +- s.diagnoseSnapshot(snapshot.BackgroundContext(), snapshot, nil, 0) +- <-initialized +- release() +- }) +- } +- +- // Wait for snapshots to be initialized so that all files are known. +- // (We don't need to wait for diagnosis to finish.) +- nsnapshots.Wait() +- +- // Register for file watching notifications, if they are supported. +- if err := s.updateWatchedDirectories(ctx); err != nil { +- event.Error(ctx, "failed to register for file watching notifications", err) +- } +- +- // Report any errors using the protocol. +- if len(viewErrors) > 0 { +- var errMsg strings.Builder +- fmt.Fprintf(&errMsg, "Error loading workspace folders (expected %v, got %v)\n", len(folders), len(s.session.Views())-originalViews) +- for uri, err := range viewErrors { +- fmt.Fprintf(&errMsg, "failed to load view for %s: %v\n", uri, err) +- } +- showMessage(ctx, s.client, protocol.Error, errMsg.String()) +- } +-} +- +-// updateWatchedDirectories compares the current set of directories to watch +-// with the previously registered set of directories. If the set of directories +-// has changed, we unregister and re-register for file watching notifications. +-// updatedSnapshots is the set of snapshots that have been updated. +-func (s *server) updateWatchedDirectories(ctx context.Context) error { +- patterns := s.session.FileWatchingGlobPatterns(ctx) +- +- s.watchedGlobPatternsMu.Lock() +- defer s.watchedGlobPatternsMu.Unlock() +- +- // Nothing to do if the set of workspace directories is unchanged. +- if moremaps.SameKeys(s.watchedGlobPatterns, patterns) { +- return nil +- } +- +- // If the set of directories to watch has changed, register the updates and +- // unregister the previously watched directories. This ordering avoids a +- // period where no files are being watched. Still, if a user makes on-disk +- // changes before these updates are complete, we may miss them for the new +- // directories. +- prevID := s.watchRegistrationCount - 1 +- if err := s.registerWatchedDirectoriesLocked(ctx, patterns); err != nil { +- return err +- } +- if prevID >= 0 { +- return s.client.UnregisterCapability(ctx, &protocol.UnregistrationParams{ +- Unregisterations: []protocol.Unregistration{{ +- ID: watchedFilesCapabilityID(prevID), +- Method: "workspace/didChangeWatchedFiles", +- }}, +- }) +- } +- return nil +-} +- +-func watchedFilesCapabilityID(id int) string { +- return fmt.Sprintf("workspace/didChangeWatchedFiles-%d", id) +-} +- +-// registerWatchedDirectoriesLocked sends the workspace/didChangeWatchedFiles +-// registrations to the client and updates s.watchedDirectories. +-// The caller must not subsequently mutate patterns. +-func (s *server) registerWatchedDirectoriesLocked(ctx context.Context, patterns map[protocol.RelativePattern]unit) error { +- if !s.Options().DynamicWatchedFilesSupported { +- return nil +- } +- +- supportsRelativePatterns := s.Options().RelativePatternsSupported +- +- s.watchedGlobPatterns = patterns +- watchers := make([]protocol.FileSystemWatcher, 0, len(patterns)) // must be a slice +- val := protocol.WatchChange | protocol.WatchDelete | protocol.WatchCreate +- for pattern := range patterns { +- var value any +- if supportsRelativePatterns && pattern.BaseURI != "" { +- value = pattern +- } else { +- p := pattern.Pattern +- if pattern.BaseURI != "" { +- p = path.Join(filepath.ToSlash(pattern.BaseURI.Path()), p) +- } +- value = p +- } +- watchers = append(watchers, protocol.FileSystemWatcher{ +- GlobPattern: protocol.GlobPattern{Value: value}, +- Kind: &val, +- }) +- } +- +- if err := s.client.RegisterCapability(ctx, &protocol.RegistrationParams{ +- Registrations: []protocol.Registration{{ +- ID: watchedFilesCapabilityID(s.watchRegistrationCount), +- Method: "workspace/didChangeWatchedFiles", +- RegisterOptions: protocol.DidChangeWatchedFilesRegistrationOptions{ +- Watchers: watchers, +- }, +- }}, +- }); err != nil { +- return err +- } +- s.watchRegistrationCount++ +- return nil +-} +- +-// Options returns the current server options. +-// +-// The caller must not modify the result. +-func (s *server) Options() *settings.Options { +- s.optionsMu.Lock() +- defer s.optionsMu.Unlock() +- return s.options +-} +- +-// SetOptions sets the current server options. +-// +-// The caller must not subsequently modify the options. +-func (s *server) SetOptions(opts *settings.Options) { +- s.optionsMu.Lock() +- defer s.optionsMu.Unlock() +- s.options = opts +-} +- +-func (s *server) newFolder(ctx context.Context, folder protocol.DocumentURI, name string, opts *settings.Options) (*cache.Folder, error) { +- env, err := cache.FetchGoEnv(ctx, folder, opts) +- if err != nil { +- return nil, err +- } +- +- // Increment folder counters. +- switch { +- case env.GOTOOLCHAIN == "auto" || strings.Contains(env.GOTOOLCHAIN, "+auto"): +- counter.Inc("gopls/gotoolchain:auto") +- case env.GOTOOLCHAIN == "path" || strings.Contains(env.GOTOOLCHAIN, "+path"): +- counter.Inc("gopls/gotoolchain:path") +- case env.GOTOOLCHAIN == "local": // local+auto and local+path handled above +- counter.Inc("gopls/gotoolchain:local") +- default: +- counter.Inc("gopls/gotoolchain:other") +- } +- +- // Record whether a driver is in use so that it appears in the +- // user's telemetry upload. Although we can't correlate the +- // driver information with the crash or bug.Report at the +- // granularity of the process instance, users that use a +- // driver tend to do so most of the time, so we'll get a +- // strong clue. See #60890 for an example of an issue where +- // this information would have been helpful. +- if env.EffectiveGOPACKAGESDRIVER != "" { +- counter.Inc("gopls/gopackagesdriver") +- } +- +- return &cache.Folder{ +- Dir: folder, +- Name: name, +- Options: opts, +- Env: *env, +- }, nil +-} +- +-// fetchFolderOptions makes a workspace/configuration request for the given +-// folder, and populates options with the result. +-// +-// If folder is "", fetchFolderOptions makes an unscoped request. +-func (s *server) fetchFolderOptions(ctx context.Context, folder protocol.DocumentURI) (*settings.Options, error) { +- opts := s.Options() +- if !opts.ConfigurationSupported { +- return opts, nil +- } +- var scopeURI *string +- if folder != "" { +- scope := string(folder) +- scopeURI = &scope +- } +- configs, err := s.client.Configuration(ctx, &protocol.ParamConfiguration{ +- Items: []protocol.ConfigurationItem{{ +- ScopeURI: scopeURI, +- Section: "gopls", +- }}, +- }, +- ) +- if err != nil { +- return nil, fmt.Errorf("failed to get workspace configuration from client (%s): %v", folder, err) +- } +- +- opts = opts.Clone() +- for _, config := range configs { +- res, errs := opts.Set(config) +- s.handleOptionResult(ctx, res, errs) +- } +- return opts, nil +-} +- +-func (s *server) eventuallyShowMessage(ctx context.Context, msg *protocol.ShowMessageParams) { +- s.stateMu.Lock() +- defer s.stateMu.Unlock() +- if s.state == serverInitialized { +- _ = s.client.ShowMessage(ctx, msg) // ignore error +- } +- s.notifications = append(s.notifications, msg) +-} +- +-func (s *server) handleOptionResult(ctx context.Context, applied []telemetry.CounterPath, optionErrors []error) { +- for _, path := range applied { +- path = append(settings.CounterPath{"gopls", "setting"}, path...) +- counter.Inc(path.FullName()) +- } +- +- var warnings, errs []string +- for _, err := range optionErrors { +- if err == nil { +- panic("nil error passed to handleOptionErrors") +- } +- if errors.Is(err, new(settings.SoftError)) { +- warnings = append(warnings, err.Error()) +- } else { +- errs = append(errs, err.Error()) +- } +- } +- +- // Sort messages, but put errors first. +- // +- // Having stable content for the message allows clients to de-duplicate. This +- // matters because we may send duplicate warnings for clients that support +- // dynamic configuration: one for the initial settings, and then more for the +- // individual viewsettings. +- var msgs []string +- msgType := protocol.Warning +- if len(errs) > 0 { +- msgType = protocol.Error +- sort.Strings(errs) +- msgs = append(msgs, errs...) +- } +- if len(warnings) > 0 { +- sort.Strings(warnings) +- msgs = append(msgs, warnings...) +- } +- +- if len(msgs) > 0 { +- // Settings +- combined := "Invalid settings: " + strings.Join(msgs, "; ") +- params := &protocol.ShowMessageParams{ +- Type: msgType, +- Message: combined, +- } +- s.eventuallyShowMessage(ctx, params) +- } +-} +- +-// Shutdown implements the 'shutdown' LSP handler. It releases resources +-// associated with the server and waits for all ongoing work to complete. +-func (s *server) Shutdown(ctx context.Context) error { +- ctx, done := event.Start(ctx, "server.Shutdown") +- defer done() +- +- s.stateMu.Lock() +- defer s.stateMu.Unlock() +- if s.state < serverInitialized { +- event.Log(ctx, "server shutdown without initialization") +- } +- if s.state != serverShutDown { +- // Wait for the webserver (if any) to finish. +- if s.web != nil { +- s.web.server.Shutdown(ctx) // ignore error +- } +- +- // drop all the active views +- s.session.Shutdown(ctx) +- s.state = serverShutDown +- } +- return nil +-} +- +-func (s *server) Exit(ctx context.Context) error { +- ctx, done := event.Start(ctx, "server.Exit") +- defer done() +- +- s.stateMu.Lock() +- defer s.stateMu.Unlock() +- +- s.client.Close() // ignore error +- +- if s.state != serverShutDown { +- // TODO: We should be able to do better than this. +- os.Exit(1) +- } +- // We don't terminate the process on a normal exit, we just allow it to +- // close naturally if needed after the connection is closed. +- return nil +-} +- +-// recordClientInfo records gopls client info. +-func recordClientInfo(clientName string) { +- key := "gopls/client:other" +- switch clientName { +- case "Visual Studio Code": +- key = "gopls/client:vscode" +- case "Visual Studio Code - Insiders": +- key = "gopls/client:vscode-insiders" +- case "VSCodium": +- key = "gopls/client:vscodium" +- case "code-server": +- // https://github.com/coder/code-server/blob/3cb92edc76ecc2cfa5809205897d93d4379b16a6/ci/build/build-vscode.sh#L19 +- key = "gopls/client:code-server" +- case "Eglot": +- // https://lists.gnu.org/archive/html/bug-gnu-emacs/2023-03/msg00954.html +- key = "gopls/client:eglot" +- case "govim": +- // https://github.com/govim/govim/pull/1189 +- key = "gopls/client:govim" +- case "helix": +- // https://github.com/helix-editor/helix/blob/d0218f7e78bc0c3af4b0995ab8bda66b9c542cf3/helix-lsp/src/client.rs#L714 +- key = "gopls/client:helix" +- case "Neovim": +- // https://github.com/neovim/neovim/blob/42333ea98dfcd2994ee128a3467dfe68205154cd/runtime/lua/vim/lsp.lua#L1361 +- key = "gopls/client:neovim" +- case "coc.nvim": +- // https://github.com/neoclide/coc.nvim/blob/3dc6153a85ed0f185abec1deb972a66af3fbbfb4/src/language-client/client.ts#L994 +- key = "gopls/client:coc.nvim" +- case "Sublime Text LSP": +- // https://github.com/sublimelsp/LSP/blob/e608f878e7e9dd34aabe4ff0462540fadcd88fcc/plugin/core/sessions.py#L493 +- key = "gopls/client:sublimetext" +- case "Windsurf": +- key = "gopls/client:windsurf" +- case "Cursor": +- key = "gopls/client:cursor" +- case "Zed", "Zed Dev", "Zed Nightly", "Zed Preview": +- // https: //github.com/zed-industries/zed/blob/0ac17526687bf11007f0fbb5c3b2ff463ce47293/crates/release_channel/src/lib.rs#L147 +- key = "gopls/client:zed" +- default: +- // Accumulate at least a local counter for an unknown +- // client name, but also fall through to count it as +- // ":other" for collection. +- if clientName != "" { +- counter.New(fmt.Sprintf("gopls/client-other:%s", clientName)).Inc() +- } +- } +- counter.Inc(key) +-} +diff -urN a/gopls/internal/server/highlight.go b/gopls/internal/server/highlight.go +--- a/gopls/internal/server/highlight.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/highlight.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,39 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/template" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) DocumentHighlight(ctx context.Context, params *protocol.DocumentHighlightParams) ([]protocol.DocumentHighlight, error) { +- ctx, done := event.Start(ctx, "server.DocumentHighlight", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- switch snapshot.FileKind(fh) { +- case file.Tmpl: +- return template.Highlight(ctx, snapshot, fh, params.Position) +- case file.Go: +- rngs, err := golang.Highlight(ctx, snapshot, fh, params.Position) +- if err != nil { +- event.Error(ctx, "no highlight", err) +- } +- return rngs, nil +- } +- return nil, nil // empty result +-} +diff -urN a/gopls/internal/server/hover.go b/gopls/internal/server/hover.go +--- a/gopls/internal/server/hover.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/hover.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,59 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/mod" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/telemetry" +- "golang.org/x/tools/gopls/internal/template" +- "golang.org/x/tools/gopls/internal/work" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) Hover(ctx context.Context, params *protocol.HoverParams) (_ *protocol.Hover, rerr error) { +- recordLatency := telemetry.StartLatencyTimer("hover") +- defer func() { +- recordLatency(ctx, rerr) +- }() +- +- ctx, done := event.Start(ctx, "server.Hover", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- switch snapshot.FileKind(fh) { +- case file.Mod: +- return mod.Hover(ctx, snapshot, fh, params.Position) +- case file.Go: +- var pkgURL func(path golang.PackagePath, fragment string) protocol.URI +- if snapshot.Options().LinksInHover == settings.LinksInHover_Gopls { +- web, err := s.getWeb() +- if err != nil { +- event.Error(ctx, "failed to start web server", err) +- } else { +- pkgURL = func(path golang.PackagePath, fragment string) protocol.URI { +- return web.PkgURL(snapshot.View().ID(), path, fragment) +- } +- } +- } +- return golang.Hover(ctx, snapshot, fh, params.Position, pkgURL) +- case file.Tmpl: +- return template.Hover(ctx, snapshot, fh, params.Position) +- case file.Work: +- return work.Hover(ctx, snapshot, fh, params.Position) +- } +- return nil, nil // empty result +-} +diff -urN a/gopls/internal/server/implementation.go b/gopls/internal/server/implementation.go +--- a/gopls/internal/server/implementation.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/implementation.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/telemetry" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) Implementation(ctx context.Context, params *protocol.ImplementationParams) (_ []protocol.Location, rerr error) { +- recordLatency := telemetry.StartLatencyTimer("implementation") +- defer func() { +- recordLatency(ctx, rerr) +- }() +- +- ctx, done := event.Start(ctx, "server.Implementation", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- if snapshot.FileKind(fh) != file.Go { +- return nil, nil // empty result +- } +- return golang.Implementation(ctx, snapshot, fh, params.Position) +-} +diff -urN a/gopls/internal/server/inlay_hint.go b/gopls/internal/server/inlay_hint.go +--- a/gopls/internal/server/inlay_hint.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/inlay_hint.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,35 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/mod" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) InlayHint(ctx context.Context, params *protocol.InlayHintParams) ([]protocol.InlayHint, error) { +- ctx, done := event.Start(ctx, "server.InlayHint", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- switch snapshot.FileKind(fh) { +- case file.Mod: +- return mod.InlayHint(ctx, snapshot, fh, params.Range) +- case file.Go: +- return golang.InlayHint(ctx, snapshot, fh, params.Range) +- } +- return nil, nil // empty result +-} +diff -urN a/gopls/internal/server/link.go b/gopls/internal/server/link.go +--- a/gopls/internal/server/link.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/link.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,322 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "bytes" +- "context" +- "fmt" +- "go/ast" +- "go/token" +- "net/url" +- "path/filepath" +- "regexp" +- "strings" +- "sync" +- +- "golang.org/x/mod/modfile" +- "golang.org/x/mod/module" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/event" +- "mvdan.cc/xurls/v2" +-) +- +-func (s *server) DocumentLink(ctx context.Context, params *protocol.DocumentLinkParams) (links []protocol.DocumentLink, err error) { +- ctx, done := event.Start(ctx, "server.DocumentLink") +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- switch snapshot.FileKind(fh) { +- case file.Mod: +- links, err = modLinks(ctx, snapshot, fh) +- case file.Go: +- links, err = goLinks(ctx, snapshot, fh) +- } +- // Don't return errors for document links. +- if err != nil { +- event.Error(ctx, "failed to compute document links", err, label.URI.Of(fh.URI())) +- return nil, nil // empty result +- } +- return links, nil // may be empty (for other file types) +-} +- +-func modLinks(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.DocumentLink, error) { +- pm, err := snapshot.ParseMod(ctx, fh) +- if err != nil { +- return nil, err +- } +- +- var links []protocol.DocumentLink +- for _, rep := range pm.File.Replace { +- if modfile.IsDirectoryPath(rep.New.Path) { +- // Have local replacement, such as 'replace A => ../'. +- dep := []byte(rep.New.Path) +- start, end := rep.Syntax.Start.Byte, rep.Syntax.End.Byte +- i := bytes.Index(pm.Mapper.Content[start:end], dep) +- if i < 0 { +- continue +- } +- path := rep.New.Path +- if !filepath.IsAbs(path) { +- path = filepath.Join(fh.URI().DirPath(), path) +- } +- // jump to the go.mod file of replaced module. +- path = filepath.Join(filepath.Clean(path), "go.mod") +- l, err := toProtocolLink(pm.Mapper, protocol.URIFromPath(path).Path(), start+i, start+i+len(dep)) +- if err != nil { +- return nil, err +- } +- links = append(links, l) +- continue +- } +- } +- +- for _, req := range pm.File.Require { +- if req.Syntax == nil { +- continue +- } +- // See golang/go#36998: don't link to modules matching GOPRIVATE. +- if snapshot.IsGoPrivatePath(req.Mod.Path) { +- continue +- } +- dep := []byte(req.Mod.Path) +- start, end := req.Syntax.Start.Byte, req.Syntax.End.Byte +- i := bytes.Index(pm.Mapper.Content[start:end], dep) +- if i == -1 { +- continue +- } +- +- mod := req.Mod +- // respect the replacement when constructing a module link. +- if m, ok := pm.ReplaceMap[req.Mod]; ok { +- // Have: 'replace A v1.2.3 => A vx.x.x' or 'replace A v1.2.3 => B vx.x.x'. +- mod = m +- } else if m, ok := pm.ReplaceMap[module.Version{Path: req.Mod.Path}]; ok && +- !modfile.IsDirectoryPath(m.Path) { // exclude local replacement. +- // Have: 'replace A => A vx.x.x' or 'replace A => B vx.x.x'. +- mod = m +- } +- +- // Shift the start position to the location of the +- // dependency within the require statement. +- target := cache.BuildLink(snapshot.Options().LinkTarget, "mod/"+mod.String(), "") +- l, err := toProtocolLink(pm.Mapper, target, start+i, start+i+len(dep)) +- if err != nil { +- return nil, err +- } +- links = append(links, l) +- } +- // TODO(ridersofrohan): handle links for replace and exclude directives. +- if syntax := pm.File.Syntax; syntax == nil { +- return links, nil +- } +- +- // Get all the links that are contained in the comments of the file. +- urlRegexp := xurls.Relaxed() +- for _, expr := range pm.File.Syntax.Stmt { +- comments := expr.Comment() +- if comments == nil { +- continue +- } +- for _, section := range [][]modfile.Comment{comments.Before, comments.Suffix, comments.After} { +- for _, comment := range section { +- l, err := findLinksInString(urlRegexp, comment.Token, comment.Start.Byte, pm.Mapper) +- if err != nil { +- return nil, err +- } +- links = append(links, l...) +- } +- } +- } +- return links, nil +-} +- +-// goLinks returns the set of hyperlink annotations for the specified Go file. +-func goLinks(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.DocumentLink, error) { +- +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, err +- } +- +- var links []protocol.DocumentLink +- +- // Create links for import specs. +- if snapshot.Options().ImportShortcut.ShowLinks() { +- +- // If links are to pkg.go.dev, append module version suffixes. +- // This requires the import map from the package metadata. Ignore errors. +- var depsByImpPath map[golang.ImportPath]golang.PackageID +- if strings.ToLower(snapshot.Options().LinkTarget) == "pkg.go.dev" { +- if meta, err := snapshot.NarrowestMetadataForFile(ctx, fh.URI()); err == nil { +- depsByImpPath = meta.DepsByImpPath +- } +- } +- +- for _, imp := range pgf.File.Imports { +- importPath := metadata.UnquoteImportPath(imp) +- if importPath == "" { +- continue // bad import +- } +- // See golang/go#36998: don't link to modules matching GOPRIVATE. +- if snapshot.IsGoPrivatePath(string(importPath)) { +- continue +- } +- +- urlPath := string(importPath) +- +- // For pkg.go.dev, append module version suffix to package import path. +- if mp := snapshot.Metadata(depsByImpPath[importPath]); mp != nil && mp.Module != nil && cache.ResolvedPath(mp.Module) != "" && cache.ResolvedVersion(mp.Module) != "" { +- urlPath = strings.Replace(urlPath, mp.Module.Path, cache.ResolvedString(mp.Module), 1) +- } +- +- start, end, err := safetoken.Offsets(pgf.Tok, imp.Path.Pos(), imp.Path.End()) +- if err != nil { +- return nil, err +- } +- targetURL := cache.BuildLink(snapshot.Options().LinkTarget, urlPath, "") +- // Account for the quotation marks in the positions. +- l, err := toProtocolLink(pgf.Mapper, targetURL, start+len(`"`), end-len(`"`)) +- if err != nil { +- return nil, err +- } +- links = append(links, l) +- } +- } +- +- urlRegexp := xurls.Relaxed() +- +- // Gather links found in string literals. +- var str []*ast.BasicLit +- for curLit := range pgf.Cursor.Preorder((*ast.BasicLit)(nil)) { +- lit := curLit.Node().(*ast.BasicLit) +- if lit.Kind == token.STRING { +- if _, ok := curLit.Parent().Node().(*ast.ImportSpec); ok { +- continue // ignore import strings +- } +- str = append(str, lit) +- } +- } +- for _, s := range str { +- strOffset, err := safetoken.Offset(pgf.Tok, s.Pos()) +- if err != nil { +- return nil, err +- } +- l, err := findLinksInString(urlRegexp, s.Value, strOffset, pgf.Mapper) +- if err != nil { +- return nil, err +- } +- links = append(links, l...) +- } +- +- // Gather links found in comments. +- for _, commentGroup := range pgf.File.Comments { +- for _, comment := range commentGroup.List { +- commentOffset, err := safetoken.Offset(pgf.Tok, comment.Pos()) +- if err != nil { +- return nil, err +- } +- l, err := findLinksInString(urlRegexp, comment.Text, commentOffset, pgf.Mapper) +- if err != nil { +- return nil, err +- } +- links = append(links, l...) +- } +- } +- +- return links, nil +-} +- +-// acceptedSchemes controls the schemes that URLs must have to be shown to the +-// user. Other schemes can't be opened by LSP clients, so linkifying them is +-// distracting. See golang/go#43990. +-var acceptedSchemes = map[string]bool{ +- "http": true, +- "https": true, +-} +- +-// findLinksInString is the user-supplied regular expression to match URL. +-// srcOffset is the start offset of 'src' within m's file. +-func findLinksInString(urlRegexp *regexp.Regexp, src string, srcOffset int, m *protocol.Mapper) ([]protocol.DocumentLink, error) { +- var links []protocol.DocumentLink +- for _, index := range urlRegexp.FindAllIndex([]byte(src), -1) { +- start, end := index[0], index[1] +- link := src[start:end] +- linkURL, err := url.Parse(link) +- // Fallback: Linkify IP addresses as suggested in golang/go#18824. +- if err != nil { +- linkURL, err = url.Parse("//" + link) +- // Not all potential links will be valid, so don't return this error. +- if err != nil { +- continue +- } +- } +- // If the URL has no scheme, use https. +- if linkURL.Scheme == "" { +- linkURL.Scheme = "https" +- } +- if !acceptedSchemes[linkURL.Scheme] { +- continue +- } +- +- l, err := toProtocolLink(m, linkURL.String(), srcOffset+start, srcOffset+end) +- if err != nil { +- return nil, err +- } +- links = append(links, l) +- } +- // Handle golang/go#1234-style links. +- r := getIssueRegexp() +- for _, index := range r.FindAllIndex([]byte(src), -1) { +- start, end := index[0], index[1] +- matches := r.FindStringSubmatch(src) +- if len(matches) < 4 { +- continue +- } +- org, repo, number := matches[1], matches[2], matches[3] +- targetURL := fmt.Sprintf("https://github.com/%s/%s/issues/%s", org, repo, number) +- l, err := toProtocolLink(m, targetURL, srcOffset+start, srcOffset+end) +- if err != nil { +- return nil, err +- } +- links = append(links, l) +- } +- return links, nil +-} +- +-func getIssueRegexp() *regexp.Regexp { +- once.Do(func() { +- issueRegexp = regexp.MustCompile(`(\w+)/([\w-]+)#([0-9]+)`) +- }) +- return issueRegexp +-} +- +-var ( +- once sync.Once +- issueRegexp *regexp.Regexp +-) +- +-func toProtocolLink(m *protocol.Mapper, targetURL string, start, end int) (protocol.DocumentLink, error) { +- rng, err := m.OffsetRange(start, end) +- if err != nil { +- return protocol.DocumentLink{}, err +- } +- return protocol.DocumentLink{ +- Range: rng, +- Target: &targetURL, +- }, nil +-} +diff -urN a/gopls/internal/server/prompt.go b/gopls/internal/server/prompt.go +--- a/gopls/internal/server/prompt.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/prompt.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,417 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- "fmt" +- "math/rand" +- "os" +- "path/filepath" +- "strconv" +- "testing" +- "time" +- +- "golang.org/x/telemetry" +- "golang.org/x/telemetry/counter" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-// promptTimeout is the amount of time we wait for an ongoing prompt before +-// prompting again. This gives the user time to reply. However, at some point +-// we must assume that the client is not displaying the prompt, the user is +-// ignoring it, or the prompt has been disrupted in some way (e.g. by a gopls +-// crash). +-const promptTimeout = 24 * time.Hour +- +-// gracePeriod is the amount of time we wait before sufficient telemetry data +-// is accumulated in the local directory, so users can have time to review +-// what kind of information will be collected and uploaded when prompting starts. +-const gracePeriod = 7 * 24 * time.Hour +- +-// samplesPerMille is the prompt probability. +-// Token is an integer between [1, 1000] and is assigned when maybePromptForTelemetry +-// is called first time. Only the user with a token ∈ [1, samplesPerMille] +-// will be considered for prompting. +-const samplesPerMille = 10 // 1% sample rate +- +-// The following constants are used for testing telemetry integration. +-const ( +- TelemetryPromptWorkTitle = "Checking telemetry prompt" // progress notification title, for awaiting in tests +- GoplsConfigDirEnvvar = "GOPLS_CONFIG_DIR" // overridden for testing +- FakeTelemetryModefileEnvvar = "GOPLS_FAKE_TELEMETRY_MODEFILE" // overridden for testing +- FakeSamplesPerMille = "GOPLS_FAKE_SAMPLES_PER_MILLE" // overridden for testing +- TelemetryYes = "Yes, I'd like to help." +- TelemetryNo = "No, thanks." +-) +- +-// The following environment variables may be set by the client. +-// Exported for testing telemetry integration. +-const ( +- GoTelemetryGoplsClientStartTimeEnvvar = "GOTELEMETRY_GOPLS_CLIENT_START_TIME" // telemetry start time recorded in client +- GoTelemetryGoplsClientTokenEnvvar = "GOTELEMETRY_GOPLS_CLIENT_TOKEN" // sampling token +-) +- +-// getenv returns the effective environment variable value for the provided +-// key, looking up the key in the session environment before falling back on +-// the process environment. +-func (s *server) getenv(key string) string { +- if v, ok := s.Options().Env[key]; ok { +- return v +- } +- return os.Getenv(key) +-} +- +-// telemetryMode returns the current effective telemetry mode. +-// By default this is x/telemetry.Mode(), but it may be overridden for tests. +-func (s *server) telemetryMode() string { +- if fake := s.getenv(FakeTelemetryModefileEnvvar); fake != "" { +- if data, err := os.ReadFile(fake); err == nil { +- return string(data) +- } +- return "local" +- } +- return telemetry.Mode() +-} +- +-// setTelemetryMode sets the current telemetry mode. +-// By default this calls x/telemetry.SetMode, but it may be overridden for +-// tests. +-func (s *server) setTelemetryMode(mode string) error { +- if fake := s.getenv(FakeTelemetryModefileEnvvar); fake != "" { +- return os.WriteFile(fake, []byte(mode), 0666) +- } +- return telemetry.SetMode(mode) +-} +- +-// maybePromptForTelemetry checks for the right conditions, and then prompts +-// the user to ask if they want to enable Go telemetry uploading. If the user +-// responds 'Yes', the telemetry mode is set to "on". +-// +-// The actual conditions for prompting are defensive, erring on the side of not +-// prompting. +-// If enabled is false, this will not prompt the user in any condition, +-// but will send work progress reports to help testing. +-func (s *server) maybePromptForTelemetry(ctx context.Context, enabled bool) { +- if s.Options().VerboseWorkDoneProgress { +- work := s.progress.Start(ctx, TelemetryPromptWorkTitle, "Checking if gopls should prompt about telemetry...", nil, nil) +- defer work.End(ctx, "Done.") +- } +- +- errorf := func(format string, args ...any) { +- err := fmt.Errorf(format, args...) +- event.Error(ctx, "telemetry prompt failed", err) +- } +- +- // Only prompt if we can read/write the prompt config file. +- configDir := s.getenv(GoplsConfigDirEnvvar) // set for testing +- if configDir == "" && testing.Testing() { +- // Unless tests set GoplsConfigDirEnvvar, the prompt is a no op. +- // We don't want tests to interact with os.UserConfigDir(). +- return +- } +- if configDir == "" { +- userDir, err := os.UserConfigDir() +- if err != nil { +- errorf("unable to determine user config dir: %v", err) +- return +- } +- configDir = filepath.Join(userDir, "gopls") +- } +- +- // Read the current prompt file. +- +- var ( +- promptDir = filepath.Join(configDir, "prompt") // prompt configuration directory +- promptFile = filepath.Join(promptDir, "telemetry") // telemetry prompt file +- ) +- +- // prompt states, stored in the prompt file +- const ( +- pUnknown = "" // first time +- pNotReady = "-" // user is not asked yet (either not sampled or not past the grace period) +- pYes = "yes" // user said yes +- pNo = "no" // user said no +- pPending = "pending" // current prompt is still pending +- pFailed = "failed" // prompt was asked but failed +- ) +- validStates := map[string]bool{ +- pNotReady: true, +- pYes: true, +- pNo: true, +- pPending: true, +- pFailed: true, +- } +- +- // Parse the current prompt file. +- var ( +- state = pUnknown +- attempts = 0 // number of times we've asked already +- +- // the followings are recorded after gopls v0.17+. +- token = 0 // valid token is [1, 1000] +- creationTime int64 // unix time sec +- ) +- if content, err := os.ReadFile(promptFile); err == nil { +- if n, _ := fmt.Sscanf(string(content), "%s %d %d %d", &state, &attempts, &creationTime, &token); (n == 2 || n == 4) && validStates[state] { +- // successfully parsed! +- // ~ v0.16: must have only two fields, state and attempts. +- // v0.17 ~: must have all four fields. +- } else { +- state, attempts, creationTime, token = pUnknown, 0, 0, 0 +- // TODO(hyangah): why do we want to present this as an error to user? +- errorf("malformed prompt result %q", string(content)) +- } +- } else if !os.IsNotExist(err) { +- errorf("reading prompt file: %v", err) +- // Something went wrong. Since we don't know how many times we've asked the +- // prompt, err on the side of not asking. +- // +- // But record this in telemetry, in case some users enable telemetry by +- // other means. +- counter.New("gopls/telemetryprompt/corrupted").Inc() +- return +- } +- +- counter.New(fmt.Sprintf("gopls/telemetryprompt/attempts:%d", attempts)).Inc() +- +- // Check terminal conditions. +- +- if state == pYes { +- // Prompt has been accepted. +- // +- // We record this counter for every gopls session, rather than when the +- // prompt actually accepted below, because if we only recorded it in the +- // counter file at the time telemetry is enabled, we'd never upload it, +- // because we exclude any counter files that overlap with a time period +- // that has telemetry uploading is disabled. +- counter.New("gopls/telemetryprompt/accepted").Inc() +- return +- } +- if state == pNo { +- // Prompt has been declined. In most cases, this means we'll never see the +- // counter below, but it's possible that the user may enable telemetry by +- // other means later on. If we see a significant number of users that have +- // accepted telemetry but declined the prompt, it may be an indication that +- // the prompt is not working well. +- counter.New("gopls/telemetryprompt/declined").Inc() +- return +- } +- if attempts >= 5 { // pPending or pFailed +- // We've tried asking enough; give up. Record that the prompt expired, in +- // case the user decides to enable telemetry by other means later on. +- // (see also the pNo case). +- counter.New("gopls/telemetryprompt/expired").Inc() +- return +- } +- +- // We only check enabled after (1) the work progress is started, and (2) the +- // prompt file has been read. (1) is for testing purposes, and (2) is so that +- // we record the "gopls/telemetryprompt/accepted" counter for every session. +- if !enabled { +- return // prompt is disabled +- } +- +- if s.telemetryMode() == "on" || s.telemetryMode() == "off" { +- // Telemetry is already on or explicitly off -- nothing to ask about. +- return +- } +- +- // Transition: pUnknown -> pNotReady +- if state == pUnknown { +- // First time; we need to make the prompt dir. +- if err := os.MkdirAll(promptDir, 0777); err != nil { +- errorf("creating prompt dir: %v", err) +- return +- } +- state = pNotReady +- } +- +- // Correct missing values. +- if creationTime == 0 { +- creationTime = time.Now().Unix() +- if v := s.getenv(GoTelemetryGoplsClientStartTimeEnvvar); v != "" { +- if sec, err := strconv.ParseInt(v, 10, 64); err == nil && sec > 0 { +- creationTime = sec +- } +- } +- } +- if token == 0 { +- token = rand.Intn(1000) + 1 +- if v := s.getenv(GoTelemetryGoplsClientTokenEnvvar); v != "" { +- if tok, err := strconv.Atoi(v); err == nil && 1 <= tok && tok <= 1000 { +- token = tok +- } +- } +- } +- +- // Transition: pNotReady -> pPending if sampled +- if state == pNotReady { +- threshold := samplesPerMille +- if v := s.getenv(FakeSamplesPerMille); v != "" { +- if t, err := strconv.Atoi(v); err == nil { +- threshold = t +- } +- } +- if token <= threshold && time.Now().Unix()-creationTime > gracePeriod.Milliseconds()/1000 { +- state = pPending +- } +- } +- +- // Acquire the lock and write the updated state to the prompt file before actually +- // prompting. +- // +- // This ensures that the prompt file is writeable, and that we increment the +- // attempt counter before we prompt, so that we don't end up in a failure +- // mode where we keep prompting and then failing to record the response. +- +- release, ok, err := acquireLockFile(promptFile) +- if err != nil { +- errorf("acquiring prompt: %v", err) +- return +- } +- if !ok { +- // Another process is making decision. +- return +- } +- defer release() +- +- if state != pNotReady { // pPending or pFailed +- attempts++ +- } +- +- pendingContent := fmt.Appendf(nil, "%s %d %d %d", state, attempts, creationTime, token) +- if err := os.WriteFile(promptFile, pendingContent, 0666); err != nil { +- errorf("writing pending state: %v", err) +- return +- } +- +- if state == pNotReady { +- return +- } +- +- var prompt = `Go telemetry helps us improve Go by periodically sending anonymous metrics and crash reports to the Go team. Learn more at https://go.dev/doc/telemetry. +- +-Would you like to enable Go telemetry? +-` +- if s.Options().LinkifyShowMessage { +- prompt = `Go telemetry helps us improve Go by periodically sending anonymous metrics and crash reports to the Go team. Learn more at [go.dev/doc/telemetry](https://go.dev/doc/telemetry). +- +-Would you like to enable Go telemetry? +-` +- } +- // TODO(rfindley): investigate a "tell me more" action in combination with ShowDocument. +- params := &protocol.ShowMessageRequestParams{ +- Type: protocol.Info, +- Message: prompt, +- Actions: []protocol.MessageActionItem{ +- {Title: TelemetryYes}, +- {Title: TelemetryNo}, +- }, +- } +- +- item, err := s.client.ShowMessageRequest(ctx, params) +- if err != nil { +- errorf("ShowMessageRequest failed: %v", err) +- // Defensive: ensure item == nil for the logic below. +- item = nil +- } +- +- message := func(typ protocol.MessageType, msg string) { +- if !showMessage(ctx, s.client, typ, msg) { +- // Make sure we record that "telemetry prompt failed". +- errorf("showMessage failed: %v", err) +- } +- } +- +- result := pFailed +- if item == nil { +- // e.g. dialog was dismissed +- errorf("no response") +- } else { +- // Response matches MessageActionItem.Title. +- switch item.Title { +- case TelemetryYes: +- result = pYes +- if err := s.setTelemetryMode("on"); err == nil { +- message(protocol.Info, telemetryOnMessage(s.Options().LinkifyShowMessage)) +- } else { +- errorf("enabling telemetry failed: %v", err) +- msg := fmt.Sprintf("Failed to enable Go telemetry: %v\nTo enable telemetry manually, please run `go run golang.org/x/telemetry/cmd/gotelemetry@latest on`", err) +- message(protocol.Error, msg) +- } +- +- case TelemetryNo: +- result = pNo +- default: +- errorf("unrecognized response %q", item.Title) +- message(protocol.Error, fmt.Sprintf("Unrecognized response %q", item.Title)) +- } +- } +- resultContent := fmt.Appendf(nil, "%s %d %d %d", result, attempts, creationTime, token) +- if err := os.WriteFile(promptFile, resultContent, 0666); err != nil { +- errorf("error writing result state to prompt file: %v", err) +- } +-} +- +-func telemetryOnMessage(linkify bool) string { +- format := `Thank you. Telemetry uploading is now enabled. +- +-To disable telemetry uploading, run %s. +-` +- var runCmd = "`go run golang.org/x/telemetry/cmd/gotelemetry@latest local`" +- if linkify { +- runCmd = "[gotelemetry local](https://golang.org/x/telemetry/cmd/gotelemetry)" +- } +- return fmt.Sprintf(format, runCmd) +-} +- +-// acquireLockFile attempts to "acquire a lock" for writing to path. +-// +-// This is achieved by creating an exclusive lock file at .lock. Lock +-// files expire after a period, at which point acquireLockFile will remove and +-// recreate the lock file. +-// +-// acquireLockFile fails if path is in a directory that doesn't exist. +-func acquireLockFile(path string) (func(), bool, error) { +- lockpath := path + ".lock" +- fi, err := os.Stat(lockpath) +- if err == nil { +- if time.Since(fi.ModTime()) > promptTimeout { +- _ = os.Remove(lockpath) // ignore error +- } else { +- return nil, false, nil +- } +- } else if !os.IsNotExist(err) { +- return nil, false, fmt.Errorf("statting lockfile: %v", err) +- } +- +- f, err := os.OpenFile(lockpath, os.O_CREATE|os.O_EXCL, 0666) +- if err != nil { +- if os.IsExist(err) { +- return nil, false, nil +- } +- return nil, false, fmt.Errorf("creating lockfile: %v", err) +- } +- fi, err = f.Stat() +- if err != nil { +- return nil, false, err +- } +- release := func() { +- _ = f.Close() // ignore error +- fi2, err := os.Stat(lockpath) +- if err == nil && os.SameFile(fi, fi2) { +- // Only clean up the lockfile if it's the same file we created. +- // Otherwise, our lock has expired and something else has the lock. +- // +- // There's a race here, in that the file could have changed since the +- // stat above; but given that we've already waited 24h this is extremely +- // unlikely, and acceptable. +- _ = os.Remove(lockpath) +- } +- } +- return release, true, nil +-} +diff -urN a/gopls/internal/server/prompt_test.go b/gopls/internal/server/prompt_test.go +--- a/gopls/internal/server/prompt_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/prompt_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,79 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "path/filepath" +- "sync" +- "sync/atomic" +- "testing" +-) +- +-func TestAcquireFileLock(t *testing.T) { +- name := filepath.Join(t.TempDir(), "config.json") +- +- const concurrency = 100 +- var acquired int32 +- var releasers [concurrency]func() +- defer func() { +- for _, r := range releasers { +- if r != nil { +- r() +- } +- } +- }() +- +- var wg sync.WaitGroup +- for i := range releasers { +- wg.Go(func() { +- +- release, ok, err := acquireLockFile(name) +- if err != nil { +- t.Errorf("Acquire failed: %v", err) +- return +- } +- if ok { +- atomic.AddInt32(&acquired, 1) +- releasers[i] = release +- } +- }) +- } +- +- wg.Wait() +- +- if acquired != 1 { +- t.Errorf("Acquire succeeded %d times, expected exactly 1", acquired) +- } +-} +- +-func TestReleaseAndAcquireFileLock(t *testing.T) { +- name := filepath.Join(t.TempDir(), "config.json") +- +- acquire := func() (func(), bool) { +- t.Helper() +- release, ok, err := acquireLockFile(name) +- if err != nil { +- t.Fatal(err) +- } +- return release, ok +- } +- +- release, ok := acquire() +- if !ok { +- t.Fatal("failed to Acquire") +- } +- if release2, ok := acquire(); ok { +- release() +- release2() +- t.Fatalf("Acquire succeeded unexpectedly") +- } +- +- release() +- release3, ok := acquire() +- release3() +- if !ok { +- t.Fatalf("failed to Acquire") +- } +-} +diff -urN a/gopls/internal/server/references.go b/gopls/internal/server/references.go +--- a/gopls/internal/server/references.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/references.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,40 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/telemetry" +- "golang.org/x/tools/gopls/internal/template" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) References(ctx context.Context, params *protocol.ReferenceParams) (_ []protocol.Location, rerr error) { +- recordLatency := telemetry.StartLatencyTimer("references") +- defer func() { +- recordLatency(ctx, rerr) +- }() +- +- ctx, done := event.Start(ctx, "server.References", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- switch snapshot.FileKind(fh) { +- case file.Tmpl: +- return template.References(ctx, snapshot, fh, params) +- case file.Go: +- return golang.References(ctx, snapshot, fh, params.Position, params.Context.IncludeDeclaration) +- } +- return nil, nil // empty result +-} +diff -urN a/gopls/internal/server/rename.go b/gopls/internal/server/rename.go +--- a/gopls/internal/server/rename.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/rename.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,72 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) Rename(ctx context.Context, params *protocol.RenameParams) (*protocol.WorkspaceEdit, error) { +- countRename.Inc() +- ctx, done := event.Start(ctx, "server.Rename", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- if kind := snapshot.FileKind(fh); kind != file.Go { +- return nil, fmt.Errorf("cannot rename in file of type %s", kind) +- } +- +- changes, err := golang.Rename(ctx, snapshot, fh, params.Position, params.NewName) +- if err != nil { +- return nil, err +- } +- return protocol.NewWorkspaceEdit(changes...), nil +-} +- +-// PrepareRename implements the textDocument/prepareRename handler. It may +-// return (nil, nil) if there is no rename at the cursor position, but it is +-// not desirable to display an error to the user. +-// +-// TODO(rfindley): why wouldn't we want to show an error to the user, if the +-// user initiated a rename request at the cursor? +-func (s *server) PrepareRename(ctx context.Context, params *protocol.PrepareRenameParams) (*protocol.PrepareRenamePlaceholder, error) { +- ctx, done := event.Start(ctx, "server.PrepareRename", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- if kind := snapshot.FileKind(fh); kind != file.Go { +- return nil, fmt.Errorf("cannot rename in file of type %s", kind) +- } +- +- // Do not return errors here, as it adds clutter. +- // Returning a nil result means there is not a valid rename. +- item, usererr, err := golang.PrepareRename(ctx, snapshot, fh, params.Position) +- if err != nil { +- // Return usererr here rather than err, to avoid cluttering the UI with +- // internal error details. +- return nil, usererr +- } +- return &protocol.PrepareRenamePlaceholder{ +- Range: item.Range, +- Placeholder: item.Text, +- }, nil +-} +diff -urN a/gopls/internal/server/selection_range.go b/gopls/internal/server/selection_range.go +--- a/gopls/internal/server/selection_range.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/selection_range.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,75 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- "fmt" +- +- "golang.org/x/tools/go/ast/astutil" +- "golang.org/x/tools/gopls/internal/cache/parsego" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-// SelectionRange defines the textDocument/selectionRange feature, +-// which, given a list of positions within a file, +-// reports a linked list of enclosing syntactic blocks, innermost first. +-// +-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_selectionRange. +-// +-// This feature can be used by a client to implement "expand selection" in a +-// language-aware fashion. Multiple input positions are supported to allow +-// for multiple cursors, and the entire path up to the whole document is +-// returned for each cursor to avoid multiple round-trips when the user is +-// likely to issue this command multiple times in quick succession. +-func (s *server) SelectionRange(ctx context.Context, params *protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) { +- ctx, done := event.Start(ctx, "server.SelectionRange") +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- if kind := snapshot.FileKind(fh); kind != file.Go { +- return nil, fmt.Errorf("SelectionRange not supported for file of type %s", kind) +- } +- +- pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) +- if err != nil { +- return nil, err +- } +- +- result := make([]protocol.SelectionRange, len(params.Positions)) +- for i, protocolPos := range params.Positions { +- pos, err := pgf.PositionPos(protocolPos) +- if err != nil { +- return nil, err +- } +- +- path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) +- +- tail := &result[i] // tail of the Parent linked list, built head first +- +- for j, node := range path { +- rng, err := pgf.NodeRange(node) +- if err != nil { +- return nil, err +- } +- +- // Add node to tail. +- if j > 0 { +- tail.Parent = &protocol.SelectionRange{} +- tail = tail.Parent +- } +- tail.Range = rng +- } +- } +- +- return result, nil +-} +diff -urN a/gopls/internal/server/semantic.go b/gopls/internal/server/semantic.go +--- a/gopls/internal/server/semantic.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/semantic.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,56 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/template" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) SemanticTokensFull(ctx context.Context, params *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) { +- return s.semanticTokens(ctx, params.TextDocument, nil) +-} +- +-func (s *server) SemanticTokensRange(ctx context.Context, params *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) { +- return s.semanticTokens(ctx, params.TextDocument, ¶ms.Range) +-} +- +-func (s *server) semanticTokens(ctx context.Context, td protocol.TextDocumentIdentifier, rng *protocol.Range) (*protocol.SemanticTokens, error) { +- ctx, done := event.Start(ctx, "server.semanticTokens", label.URI.Of(td.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, td.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- if snapshot.Options().SemanticTokens { +- switch snapshot.FileKind(fh) { +- case file.Tmpl: +- return template.SemanticTokens(ctx, snapshot, fh.URI()) +- case file.Go: +- return golang.SemanticTokens(ctx, snapshot, fh, rng) +- } +- } +- +- // Not enabled, or unsupported file type: return empty result. +- // +- // Returning an empty response is necessary to invalidate +- // semantic tokens in VS Code (and perhaps other editors). +- // Previously, we returned an error, but that had the side effect +- // of noisy "semantictokens are disabled" logs on every keystroke. +- // +- // We must return a non-nil Data slice for JSON serialization. +- // We do not return an empty field with "omitempty" set, +- // as it is not marked optional in the protocol (golang/go#67885). +- return &protocol.SemanticTokens{Data: []uint32{}}, nil +-} +diff -urN a/gopls/internal/server/server.go b/gopls/internal/server/server.go +--- a/gopls/internal/server/server.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/server.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,626 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package server defines gopls' implementation of the LSP server +-// interface, [protocol.Server]. Call [New] to create an instance. +-package server +- +-import ( +- "context" +- "crypto/rand" +- "embed" +- "encoding/base64" +- "fmt" +- "io" +- "io/fs" +- "log" +- "net" +- "net/http" +- "net/url" +- "os" +- paths "path" +- "strconv" +- "strings" +- "sync" +- "sync/atomic" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cache/metadata" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/golang/splitpkg" +- "golang.org/x/tools/gopls/internal/progress" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/internal/event" +-) +- +-// New creates an LSP server and binds it to handle incoming client +-// messages on the supplied stream. +-func New(session *cache.Session, client protocol.ClientCloser, options *settings.Options) protocol.Server { +- const concurrentAnalyses = 1 +- // If this assignment fails to compile after a protocol +- // upgrade, it means that one or more new methods need new +- // stub declarations in unimplemented.go. +- return &server{ +- diagnostics: make(map[protocol.DocumentURI]*fileDiagnostics), +- watchedGlobPatterns: nil, // empty +- changedFiles: make(map[protocol.DocumentURI]unit), +- session: session, +- client: client, +- diagnosticsSema: make(chan unit, concurrentAnalyses), +- progress: progress.NewTracker(client), +- options: options, +- viewsToDiagnose: make(map[*cache.View]uint64), +- } +-} +- +-type serverState int +- +-const ( +- serverCreated = serverState(iota) +- serverInitializing // set once the server has received "initialize" request +- serverInitialized // set once the server has received "initialized" request +- serverShutDown +-) +- +-func (s serverState) String() string { +- switch s { +- case serverCreated: +- return "created" +- case serverInitializing: +- return "initializing" +- case serverInitialized: +- return "initialized" +- case serverShutDown: +- return "shutDown" +- } +- return fmt.Sprintf("(unknown state: %d)", int(s)) +-} +- +-// server implements the [protocol.Server] interface. +-// +-// A server holds the server-side state of a single client/server +-// session or connection; it conceptually corresponds to a single call +-// to accept(2), not to listen(2) as the name "server" might suggest. +-type server struct { +- client protocol.ClientCloser +- +- stateMu sync.Mutex +- state serverState +- // notifications generated before serverInitialized +- notifications []*protocol.ShowMessageParams +- +- session *cache.Session +- +- // changedFiles tracks files for which there has been a textDocument/didChange. +- changedFilesMu sync.Mutex +- changedFiles map[protocol.DocumentURI]unit +- +- // folders is only valid between initialize and initialized, and holds the +- // set of folders to build views for when we are ready. +- // Only the valid, non-empty 'file'-scheme URIs will be added. +- pendingFolders []protocol.WorkspaceFolder +- +- // watchedGlobPatterns is the set of glob patterns that we have requested +- // the client watch on disk. It will be updated as the set of directories +- // that the server should watch changes. +- // The map field may be reassigned but the map is immutable. +- watchedGlobPatternsMu sync.Mutex +- watchedGlobPatterns map[protocol.RelativePattern]unit +- watchRegistrationCount int +- +- diagnosticsMu sync.Mutex // guards map and its values +- diagnostics map[protocol.DocumentURI]*fileDiagnostics +- +- // diagnosticsSema limits the concurrency of diagnostics runs, which can be +- // expensive. +- diagnosticsSema chan unit +- +- progress *progress.Tracker +- +- // When the workspace fails to load, we show its status through a progress +- // report with an error message. +- criticalErrorStatusMu sync.Mutex +- criticalErrorStatus *progress.WorkDone +- +- // Track an ongoing CPU profile created with the StartProfile command and +- // terminated with the StopProfile command. +- ongoingProfileMu sync.Mutex +- ongoingProfile *os.File // if non-nil, an ongoing profile is writing to this file +- +- // Track most recently requested options. +- optionsMu sync.Mutex +- options *settings.Options +- +- // Track the most recent completion results, for measuring completion efficacy +- efficacyMu sync.Mutex +- efficacyURI protocol.DocumentURI +- efficacyVersion int32 +- efficacyItems []protocol.CompletionItem +- efficacyPos protocol.Position +- +- // Web server (for package documentation, etc) associated with this +- // LSP server. Opened on demand, and closed during LSP Shutdown. +- webOnce sync.Once +- web *web +- webErr error +- +- // # Modification tracking and diagnostics +- // +- // For the purpose of tracking diagnostics, we need a monotonically +- // increasing clock. Each time a change occurs on the server, this clock is +- // incremented and the previous diagnostics pass is cancelled. When the +- // changed is processed, the Session (via DidModifyFiles) determines which +- // Views are affected by the change and these views are added to the +- // viewsToDiagnose set. Then the server calls diagnoseChangedViews +- // in a separate goroutine. Any Views that successfully complete their +- // diagnostics are removed from the viewsToDiagnose set, provided they haven't +- // been subsequently marked for re-diagnosis (as determined by the latest +- // modificationID referenced by viewsToDiagnose). +- // +- // In this way, we enforce eventual completeness of the diagnostic set: any +- // views requiring diagnosis are diagnosed, though possibly at a later point +- // in time. Notably, the logic in Session.DidModifyFiles to determines if a +- // view needs diagnosis considers whether any packages in the view were +- // invalidated. Consider the following sequence of snapshots for a given view +- // V: +- // +- // C1 C2 +- // S1 -> S2 -> S3 +- // +- // In this case, suppose that S1 was fully type checked, and then two changes +- // C1 and C2 occur in rapid succession, to a file in their package graph but +- // perhaps not enclosed by V's root. In this case, the logic of +- // DidModifyFiles will detect that V needs to be reloaded following C1. In +- // order for our eventual consistency to be sound, we need to avoid the race +- // where S2 is being diagnosed, C2 arrives, and S3 is not detected as needing +- // diagnosis because the relevant package has not yet been computed in S2. To +- // achieve this, we only remove V from viewsToDiagnose if the diagnosis of S2 +- // completes before C2 is processed, which we can confirm by checking +- // S2.BackgroundContext(). +- modificationMu sync.Mutex +- cancelPrevDiagnostics func() +- viewsToDiagnose map[*cache.View]uint64 // View -> modification at which it last required diagnosis +- lastModificationID uint64 // incrementing clock +- +- runGovulncheckInProgress atomic.Bool +-} +- +-func (s *server) WorkDoneProgressCancel(ctx context.Context, params *protocol.WorkDoneProgressCancelParams) error { +- ctx, done := event.Start(ctx, "server.WorkDoneProgressCancel") +- defer done() +- +- return s.progress.Cancel(params.Token) +-} +- +-// web encapsulates the web server associated with an LSP server. +-// It is used for package documentation and other queries +-// where HTML makes more sense than a client editor UI. +-// +-// Example URL: +-// +-// http://127.0.0.1:PORT/gopls/SECRET/... +-// +-// where +-// - PORT is the random port number; +-// - "gopls" helps the reader guess which program is the server; +-// - SECRET is the 64-bit token; and +-// - ... is the material part of the endpoint. +-// +-// Valid endpoints: +-// +-// open?file=%s&line=%d&col=%d - open a file +-// pkg/PKGPATH?view=%s - show doc for package in a given view +-// assembly?pkg=%s&view=%s&symbol=%s - show assembly of specified func symbol +-// freesymbols?file=%s&range=%d:%d:%d:%d:&view=%s - show report of free symbols +-// splitpkg?pkg=%s&view=%s - show "split package" HTML for given package/view +-// splitpkg-json?pkg=%s&view=%s - query component dependency graph for given package/view +-// splitpkg-components?pkg=%s&view=%s - update component definitions for given package/view +-type web struct { +- server *http.Server +- addr url.URL // "http://127.0.0.1:PORT/gopls/SECRET" +- mux *http.ServeMux +-} +- +-// getWeb returns the web server associated with this +-// LSP server, creating it on first request. +-func (s *server) getWeb() (*web, error) { +- s.webOnce.Do(func() { +- s.web, s.webErr = s.initWeb() +- }) +- return s.web, s.webErr +-} +- +-// initWeb starts the local web server through which gopls +-// serves package documentation and suchlike. +-// +-// Clients should use [getWeb]. +-func (s *server) initWeb() (*web, error) { +- // Use 64 random bits as the base of the URL namespace. +- // This ensures that URLs are unguessable to any local +- // processes that connect to the server, preventing +- // exfiltration of source code. +- // +- // (Note: depending on the LSP client, URLs that are passed to +- // it via showDocument and that result in the opening of a +- // browser tab may be transiently published through the argv +- // array of the open(1) or xdg-open(1) command.) +- token := make([]byte, 8) +- if _, err := rand.Read(token); err != nil { +- return nil, fmt.Errorf("generating secret token: %v", err) +- } +- +- // Pick any free port. +- listener, err := net.Listen("tcp", "127.0.0.1:0") +- if err != nil { +- return nil, err +- } +- +- // -- There should be no early returns after this point. -- +- +- // The root mux is not authenticated. +- rootMux := http.NewServeMux() +- rootMux.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) { +- http.Error(w, "request URI lacks authentication segment", http.StatusUnauthorized) +- }) +- rootMux.HandleFunc("/favicon.ico", func(w http.ResponseWriter, req *http.Request) { +- http.Redirect(w, req, "/assets/favicon.ico", http.StatusMovedPermanently) +- }) +- rootMux.HandleFunc("/hang", func(w http.ResponseWriter, req *http.Request) { +- // This endpoint hangs until cancelled. +- // It is used by JS to detect server disconnect. +- <-req.Context().Done() +- }) +- +- // Serve assets (JS, PNG, etc) from embedded data, +- // except during local development. +- fs := fs.FS(assets) +- // fs = os.DirFS("/Users/adonovan/w/xtools/gopls/internal/server") // uncomment during development +- rootMux.Handle("/assets/", http.FileServer(http.FS(fs))) +- +- secret := "/gopls/" + base64.RawURLEncoding.EncodeToString(token) +- webMux := http.NewServeMux() +- rootMux.Handle(secret+"/", withPanicHandler(http.StripPrefix(secret, webMux))) +- +- webServer := &http.Server{Addr: listener.Addr().String(), Handler: rootMux} +- go func() { +- // This should run until LSP Shutdown, at which point +- // it will return ErrServerClosed. Any other error +- // means it failed to start. +- if err := webServer.Serve(listener); err != nil { +- if err != http.ErrServerClosed { +- log.Print(err) +- } +- } +- }() +- +- web := &web{ +- server: webServer, +- addr: url.URL{Scheme: "http", Host: webServer.Addr, Path: secret}, +- mux: webMux, +- } +- +- // The /src handler allows the browser to request that the +- // LSP client editor open a file; see web.SrcURL. +- webMux.HandleFunc("/src", func(w http.ResponseWriter, req *http.Request) { +- if err := req.ParseForm(); err != nil { +- http.Error(w, err.Error(), http.StatusBadRequest) +- return +- } +- uri := protocol.URIFromPath(req.Form.Get("file")) +- line, _ := strconv.Atoi(req.Form.Get("line")) // 1-based +- col, _ := strconv.Atoi(req.Form.Get("col")) // 1-based UTF-8 +- posn := protocol.Position{ +- Line: uint32(line - 1), +- Character: uint32(col - 1), // TODO(adonovan): map to UTF-16 +- } +- openClientEditor(req.Context(), s.client, protocol.Location{ +- URI: uri, +- Range: protocol.Range{Start: posn, End: posn}, +- }, s.Options()) +- }) +- +- // getSnapshot returns the snapshot for the view=... request parameter. +- // On success, the caller must call the snapshot's release function; +- // callers may assume that req.ParseForm succeeded. +- // On failure, it reports an HTTP error. +- getSnapshot := func(w http.ResponseWriter, req *http.Request) (*cache.Snapshot, func(), bool) { +- if err := req.ParseForm(); err != nil { +- http.Error(w, err.Error(), http.StatusBadRequest) +- return nil, nil, false +- } +- viewID := req.Form.Get("view") +- if viewID == "" { +- http.Error(w, "no view=... parameter", http.StatusBadRequest) +- return nil, nil, false +- } +- view, err := s.session.View(viewID) +- if err != nil { +- http.Error(w, err.Error(), http.StatusNotFound) +- return nil, nil, false +- } +- snapshot, release, err := view.Snapshot() +- if err != nil { +- http.Error(w, err.Error(), http.StatusInternalServerError) +- return nil, nil, false +- } +- return snapshot, release, true +- } +- +- // The /pkg/PATH&view=... handler shows package documentation for PATH. +- webMux.Handle("/pkg/", http.StripPrefix("/pkg/", http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { +- snapshot, release, ok := getSnapshot(w, req) +- if !ok { +- return +- } +- defer release() +- +- // Find package by path. +- pkgPath := metadata.PackagePath(req.URL.Path) +- mps := snapshot.MetadataGraph().ForPackagePath[pkgPath] +- if len(mps) == 0 { +- // TODO(adonovan): what should we do for external test packages? +- http.Error(w, "package not found", http.StatusNotFound) +- return +- } +- found := mps[0] +- +- // Type-check the package and render its documentation. +- pkgs, err := snapshot.TypeCheck(req.Context(), found.ID) +- if err != nil { +- http.Error(w, err.Error(), http.StatusInternalServerError) +- return +- } +- content, err := golang.PackageDocHTML(snapshot.View().ID(), pkgs[0], web) +- if err != nil { +- http.Error(w, err.Error(), http.StatusInternalServerError) +- return +- } +- w.Write(content) +- }))) +- +- // The /freesymbols?file=...&range=...&view=... handler shows +- // free symbols referenced by the selection. +- webMux.HandleFunc("/freesymbols", func(w http.ResponseWriter, req *http.Request) { +- snapshot, release, ok := getSnapshot(w, req) +- if !ok { +- return +- } +- defer release() +- +- // Get selection range and type-check. +- loc := protocol.Location{ +- URI: protocol.DocumentURI(req.Form.Get("file")), +- } +- if _, err := fmt.Sscanf(req.Form.Get("range"), "%d:%d:%d:%d", +- &loc.Range.Start.Line, +- &loc.Range.Start.Character, +- &loc.Range.End.Line, +- &loc.Range.End.Character, +- ); err != nil { +- http.Error(w, "invalid range", http.StatusInternalServerError) +- return +- } +- pkg, pgf, err := golang.NarrowestPackageForFile(req.Context(), snapshot, loc.URI) +- if err != nil { +- http.Error(w, err.Error(), http.StatusInternalServerError) +- return +- } +- start, end, err := pgf.RangePos(loc.Range) +- if err != nil { +- http.Error(w, err.Error(), http.StatusInternalServerError) +- return +- } +- +- // Produce report. +- html := golang.FreeSymbolsHTML(snapshot.View().ID(), pkg, pgf, start, end, web) +- w.Write(html) +- }) +- +- // The /assembly?pkg=...&view=...&symbol=... handler shows +- // the assembly of the current function. +- webMux.HandleFunc("/assembly", func(w http.ResponseWriter, req *http.Request) { +- snapshot, release, ok := getSnapshot(w, req) +- if !ok { +- return +- } +- defer release() +- +- // Get other parameters. +- var ( +- pkgID = metadata.PackageID(req.Form.Get("pkg")) +- symbol = req.Form.Get("symbol") +- ) +- if pkgID == "" || symbol == "" { +- http.Error(w, "/assembly requires pkg, symbol", http.StatusBadRequest) +- return +- } +- +- ctx := req.Context() +- pkgs, err := snapshot.TypeCheck(ctx, pkgID) +- if err != nil { +- http.Error(w, err.Error(), http.StatusInternalServerError) +- return +- } +- pkg := pkgs[0] +- +- // Produce report. +- golang.AssemblyHTML(ctx, snapshot, w, pkg, symbol, web) +- }) +- +- // The /splitpkg?pkg=...&view=... handler shows +- // the "split package" tool (HTML) for the specified package/view. +- webMux.HandleFunc("/splitpkg", func(w http.ResponseWriter, req *http.Request) { +- snapshot, release, ok := getSnapshot(w, req) +- if !ok { +- return +- } +- defer release() +- +- // Get metadata for pkg. +- pkgID := metadata.PackageID(req.Form.Get("pkg")) +- if pkgID == "" { +- http.Error(w, "/splitpkg requires pkg", http.StatusBadRequest) +- return +- } +- mp := snapshot.Metadata(pkgID) +- if mp == nil { +- http.Error(w, "no such package: "+string(pkgID), http.StatusInternalServerError) +- return +- } +- +- w.Write(splitpkg.HTML(mp.PkgPath)) +- }) +- +- // The /splitpkg-json?pkg=...&view=... handler returns the symbol reference graph. +- webMux.HandleFunc("/splitpkg-json", func(w http.ResponseWriter, req *http.Request) { +- snapshot, release, ok := getSnapshot(w, req) +- if !ok { +- return +- } +- defer release() +- +- // Get type information for pkg. +- pkgID := metadata.PackageID(req.Form.Get("pkg")) +- if pkgID == "" { +- http.Error(w, "/splitpkg-json requires pkg", http.StatusBadRequest) +- return +- } +- pkgs, err := snapshot.TypeCheck(req.Context(), pkgID) +- if err != nil { +- http.Error(w, err.Error(), http.StatusInternalServerError) +- return +- } +- pkg := pkgs[0] +- +- data, err := splitpkg.JSON(pkg, web) +- if err != nil { +- http.Error(w, err.Error(), http.StatusInternalServerError) +- return +- } +- w.Write(data) +- }) +- +- // The /splitpkg-components?pkg=...&view=... handler updates the components mapping. +- // for the specified package, causing it to be saved persistently, and +- // returned by future /splitpkg-json queries. +- webMux.HandleFunc("/splitpkg-components", func(w http.ResponseWriter, req *http.Request) { +- snapshot, release, ok := getSnapshot(w, req) +- if !ok { +- return +- } +- defer release() +- +- // Get metadata for pkg. +- pkgID := metadata.PackageID(req.Form.Get("pkg")) +- if pkgID == "" { +- http.Error(w, "/splitpkg-components requires pkg", http.StatusBadRequest) +- return +- } +- mp := snapshot.Metadata(pkgID) +- if mp == nil { +- http.Error(w, "no such package: "+string(pkgID), http.StatusInternalServerError) +- return +- } +- +- data, err := io.ReadAll(req.Body) +- if err != nil { +- msg := fmt.Sprintf("reading request body: %v", err) +- http.Error(w, msg, http.StatusBadRequest) +- return +- } +- +- if err := splitpkg.UpdateComponentsJSON(pkgID, data); err != nil { +- http.Error(w, err.Error(), http.StatusBadRequest) +- return +- } +- }) +- +- return web, nil +-} +- +-// assets holds our static web server content. +-// +-//go:embed assets/* +-var assets embed.FS +- +-// SrcURL returns a /src URL that, when visited, causes the client +-// editor to open the specified file/line/column (in 1-based UTF-8 +-// coordinates). +-// +-// (Rendering may generate hundreds of positions across files of many +-// packages, so don't convert to LSP coordinates yet: wait until the +-// URL is opened.) +-func (w *web) SrcURL(filename string, line, col8 int) protocol.URI { +- query := fmt.Sprintf("file=%s&line=%d&col=%d", +- url.QueryEscape(filename), +- line, +- col8) +- return w.url("src", query, "") +-} +- +-// PkgURL returns a /pkg URL for the documentation of the specified package. +-// The optional fragment must be of the form "Println" or "Buffer.WriteString". +-func (w *web) PkgURL(viewID string, path golang.PackagePath, fragment string) protocol.URI { +- query := "view=" + url.QueryEscape(viewID) +- return w.url("pkg/"+string(path), query, fragment) +-} +- +-// freesymbolsURL returns a /freesymbols URL for a report +-// on the free symbols referenced within the selection span (loc). +-func (w *web) freesymbolsURL(viewID string, loc protocol.Location) protocol.URI { +- query := fmt.Sprintf("file=%s&range=%d:%d:%d:%d&view=%s", +- url.QueryEscape(string(loc.URI)), +- loc.Range.Start.Line, +- loc.Range.Start.Character, +- loc.Range.End.Line, +- loc.Range.End.Character, +- url.QueryEscape(viewID)) +- return w.url("freesymbols", query, "") +-} +- +-// assemblyURL returns the URL of an assembly listing of the specified function symbol. +-func (w *web) assemblyURL(viewID, packageID, symbol string) protocol.URI { +- query := fmt.Sprintf("view=%s&pkg=%s&symbol=%s", +- url.QueryEscape(viewID), +- url.QueryEscape(packageID), +- url.QueryEscape(symbol)) +- return w.url("assembly", query, "") +-} +- +-// splitpkgURL returns the URL of the "split package" HTML page for the specified package. +-func (w *web) splitpkgURL(viewID, packageID string) protocol.URI { +- query := fmt.Sprintf("view=%s&pkg=%s", +- url.QueryEscape(viewID), +- url.QueryEscape(packageID)) +- return w.url("splitpkg", query, "") +-} +- +-// url returns a URL by joining a relative path, an (encoded) query, +-// and an (unencoded) fragment onto the authenticated base URL of the +-// web server. +-func (w *web) url(path, query, fragment string) protocol.URI { +- url2 := w.addr +- url2.Path = paths.Join(url2.Path, strings.TrimPrefix(path, "/")) +- url2.RawQuery = query +- url2.Fragment = fragment +- return protocol.URI(url2.String()) +-} +- +-// withPanicHandler wraps an HTTP handler with telemetry-reporting of +-// panics that would otherwise be silently recovered by the net/http +-// root handler. +-func withPanicHandler(h http.Handler) http.HandlerFunc { +- return func(w http.ResponseWriter, req *http.Request) { +- panicked := true +- defer func() { +- if panicked { +- bug.Report("panic in HTTP handler") +- } +- }() +- h.ServeHTTP(w, req) +- panicked = false +- } +-} +diff -urN a/gopls/internal/server/signature_help.go b/gopls/internal/server/signature_help.go +--- a/gopls/internal/server/signature_help.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/signature_help.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,49 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) SignatureHelp(ctx context.Context, params *protocol.SignatureHelpParams) (*protocol.SignatureHelp, error) { +- ctx, done := event.Start(ctx, "server.SignatureHelp", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- if snapshot.FileKind(fh) != file.Go { +- return nil, nil // empty result +- } +- +- info, err := golang.SignatureHelp(ctx, snapshot, fh, params) +- if err != nil { +- // TODO(rfindley): is this correct? Apparently, returning an error from +- // signatureHelp is distracting in some editors, though I haven't confirmed +- // that recently. +- // +- // It's unclear whether we still need to avoid returning this error result. +- event.Error(ctx, "signature help failed", err, label.Position.Of(params.Position)) +- return nil, nil +- } +- if info == nil { +- return nil, nil +- } +- return &protocol.SignatureHelp{ +- Signatures: []protocol.SignatureInformation{*info}, +- ActiveSignature: 0, +- ActiveParameter: info.ActiveParameter, +- }, nil +-} +diff -urN a/gopls/internal/server/symbols.go b/gopls/internal/server/symbols.go +--- a/gopls/internal/server/symbols.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/symbols.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,59 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/template" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) DocumentSymbol(ctx context.Context, params *protocol.DocumentSymbolParams) ([]any, error) { +- ctx, done := event.Start(ctx, "server.DocumentSymbol", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- +- var docSymbols []protocol.DocumentSymbol +- switch snapshot.FileKind(fh) { +- case file.Tmpl: +- docSymbols, err = template.DocumentSymbols(snapshot, fh) +- case file.Go: +- docSymbols, err = golang.DocumentSymbols(ctx, snapshot, fh) +- default: +- return nil, nil // empty result +- } +- if err != nil { +- event.Error(ctx, "DocumentSymbols failed", err) +- return nil, nil // empty result +- } +- // Convert the symbols to an interface array. +- // TODO: Remove this once the lsp deprecates SymbolInformation. +- symbols := make([]any, len(docSymbols)) +- for i, s := range docSymbols { +- if snapshot.Options().HierarchicalDocumentSymbolSupport { +- symbols[i] = s +- continue +- } +- // If the client does not support hierarchical document symbols, then +- // we need to be backwards compatible for now and return SymbolInformation. +- symbols[i] = protocol.SymbolInformation{ +- Name: s.Name, +- Kind: s.Kind, +- Deprecated: s.Deprecated, +- Location: params.TextDocument.URI.Location(s.Range), +- } +- } +- return symbols, nil +-} +diff -urN a/gopls/internal/server/text_synchronization.go b/gopls/internal/server/text_synchronization.go +--- a/gopls/internal/server/text_synchronization.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/text_synchronization.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,420 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "bytes" +- "context" +- "errors" +- "fmt" +- "path/filepath" +- "strings" +- "sync" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/label" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/jsonrpc2" +- "golang.org/x/tools/internal/xcontext" +-) +- +-// ModificationSource identifies the origin of a change. +-type ModificationSource int +- +-const ( +- // FromDidOpen is from a didOpen notification. +- FromDidOpen = ModificationSource(iota) +- +- // FromDidChange is from a didChange notification. +- FromDidChange +- +- // FromDidChangeWatchedFiles is from didChangeWatchedFiles notification. +- FromDidChangeWatchedFiles +- +- // FromDidSave is from a didSave notification. +- FromDidSave +- +- // FromDidClose is from a didClose notification. +- FromDidClose +- +- // FromDidChangeConfiguration is from a didChangeConfiguration notification. +- FromDidChangeConfiguration +- +- // FromRegenerateCgo refers to file modifications caused by regenerating +- // the cgo sources for the workspace. +- FromRegenerateCgo +- +- // FromInitialWorkspaceLoad refers to the loading of all packages in the +- // workspace when the view is first created. +- FromInitialWorkspaceLoad +- +- // FromCheckUpgrades refers to state changes resulting from the CheckUpgrades +- // command, which queries module upgrades. +- FromCheckUpgrades +- +- // FromResetGoModDiagnostics refers to state changes resulting from the +- // ResetGoModDiagnostics command. +- FromResetGoModDiagnostics +- +- // FromToggleCompilerOptDetails refers to state changes resulting from toggling +- // a package's compiler optimization details flag. +- FromToggleCompilerOptDetails +-) +- +-func (m ModificationSource) String() string { +- switch m { +- case FromDidOpen: +- return "opened files" +- case FromDidChange: +- return "changed files" +- case FromDidChangeWatchedFiles: +- return "files changed on disk" +- case FromDidSave: +- return "saved files" +- case FromDidClose: +- return "close files" +- case FromRegenerateCgo: +- return "regenerate cgo" +- case FromInitialWorkspaceLoad: +- return "initial workspace load" +- case FromCheckUpgrades: +- return "from check upgrades" +- case FromResetGoModDiagnostics: +- return "from resetting go.mod diagnostics" +- default: +- return "unknown file modification" +- } +-} +- +-func (s *server) DidOpen(ctx context.Context, params *protocol.DidOpenTextDocumentParams) error { +- ctx, done := event.Start(ctx, "server.DidOpen", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- uri := params.TextDocument.URI +- // There may not be any matching view in the current session. If that's +- // the case, try creating a new view based on the opened file path. +- // +- // TODO(golang/go#57979): revisit creating a folder here. We should separate +- // the logic for managing folders from the logic for managing views. But it +- // does make sense to ensure at least one workspace folder the first time a +- // file is opened, and we can't do that inside didModifyFiles because we +- // don't want to request configuration while holding a lock. +- if len(s.session.Views()) == 0 { +- dir := uri.DirPath() +- s.addFolders(ctx, []protocol.WorkspaceFolder{{ +- URI: string(protocol.URIFromPath(dir)), +- Name: filepath.Base(dir), +- }}) +- } +- return s.didModifyFiles(ctx, []file.Modification{{ +- URI: uri, +- Action: file.Open, +- Version: params.TextDocument.Version, +- Text: []byte(params.TextDocument.Text), +- LanguageID: params.TextDocument.LanguageID, +- }}, FromDidOpen) +-} +- +-func (s *server) DidChange(ctx context.Context, params *protocol.DidChangeTextDocumentParams) error { +- ctx, done := event.Start(ctx, "server.DidChange", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- uri := params.TextDocument.URI +- text, err := s.changedText(ctx, uri, params.ContentChanges) +- if err != nil { +- return err +- } +- c := file.Modification{ +- URI: uri, +- Action: file.Change, +- Version: params.TextDocument.Version, +- Text: text, +- } +- if err := s.didModifyFiles(ctx, []file.Modification{c}, FromDidChange); err != nil { +- return err +- } +- return s.warnAboutModifyingGeneratedFiles(ctx, uri) +-} +- +-// warnAboutModifyingGeneratedFiles shows a warning if a user tries to edit a +-// generated file for the first time. +-func (s *server) warnAboutModifyingGeneratedFiles(ctx context.Context, uri protocol.DocumentURI) error { +- s.changedFilesMu.Lock() +- _, ok := s.changedFiles[uri] +- if !ok { +- s.changedFiles[uri] = struct{}{} +- } +- s.changedFilesMu.Unlock() +- +- // This file has already been edited before. +- if ok { +- return nil +- } +- +- // Warn the user that they are editing a generated file, but +- // don't try to stop them: there are often good reasons to do +- // so, such as adding temporary logging, or evaluating changes +- // to the generated code without the trouble of modifying the +- // generator logic (see #73959). +- snapshot, release, err := s.session.SnapshotOf(ctx, uri) +- if err != nil { +- return err +- } +- isGenerated := golang.IsGenerated(ctx, snapshot, uri) +- release() +- if isGenerated { +- msg := fmt.Sprintf("Warning: editing %s, a generated file.", uri.Base()) +- showMessage(ctx, s.client, protocol.Warning, msg) +- } +- return nil +-} +- +-func (s *server) DidChangeWatchedFiles(ctx context.Context, params *protocol.DidChangeWatchedFilesParams) error { +- ctx, done := event.Start(ctx, "server.DidChangeWatchedFiles") +- defer done() +- +- var modifications []file.Modification +- for _, change := range params.Changes { +- action := changeTypeToFileAction(change.Type) +- modifications = append(modifications, file.Modification{ +- URI: change.URI, +- Action: action, +- OnDisk: true, +- }) +- } +- return s.didModifyFiles(ctx, modifications, FromDidChangeWatchedFiles) +-} +- +-func (s *server) DidSave(ctx context.Context, params *protocol.DidSaveTextDocumentParams) error { +- ctx, done := event.Start(ctx, "server.DidSave", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- c := file.Modification{ +- URI: params.TextDocument.URI, +- Action: file.Save, +- } +- if params.Text != nil { +- c.Text = []byte(*params.Text) +- } +- return s.didModifyFiles(ctx, []file.Modification{c}, FromDidSave) +-} +- +-func (s *server) DidClose(ctx context.Context, params *protocol.DidCloseTextDocumentParams) error { +- ctx, done := event.Start(ctx, "server.DidClose", label.URI.Of(params.TextDocument.URI)) +- defer done() +- +- return s.didModifyFiles(ctx, []file.Modification{ +- { +- URI: params.TextDocument.URI, +- Action: file.Close, +- Version: -1, +- Text: nil, +- }, +- }, FromDidClose) +-} +- +-func (s *server) didModifyFiles(ctx context.Context, modifications []file.Modification, cause ModificationSource) error { +- // wg guards two conditions: +- // 1. didModifyFiles is complete +- // 2. the goroutine diagnosing changes on behalf of didModifyFiles is +- // complete, if it was started +- // +- // Both conditions must be satisfied for the purpose of testing: we don't +- // want to observe the completion of change processing until we have received +- // all diagnostics as well as all server->client notifications done on behalf +- // of this function. +- var wg sync.WaitGroup +- wg.Add(1) +- defer wg.Done() +- +- if s.Options().VerboseWorkDoneProgress { +- work := s.progress.Start(ctx, DiagnosticWorkTitle(cause), "Calculating file diagnostics...", nil, nil) +- go func() { +- wg.Wait() +- work.End(ctx, "Done.") +- }() +- } +- +- s.stateMu.Lock() +- if s.state >= serverShutDown { +- // This state check does not prevent races below, and exists only to +- // produce a better error message. The actual race to the cache should be +- // guarded by Session.viewMu. +- s.stateMu.Unlock() +- return errors.New("server is shut down") +- } +- s.stateMu.Unlock() +- +- // If the set of changes included directories, expand those directories +- // to their files. +- modifications = s.session.ExpandModificationsToDirectories(ctx, modifications) +- +- viewsToDiagnose, err := s.session.DidModifyFiles(ctx, modifications) +- if err != nil { +- return err +- } +- +- // golang/go#50267: diagnostics should be re-sent after each change. +- for _, mod := range modifications { +- s.mustPublishDiagnostics(mod.URI) +- } +- +- modCtx, modID := s.needsDiagnosis(ctx, viewsToDiagnose) +- +- wg.Go(func() { +- s.diagnoseChangedViews(modCtx, modID, viewsToDiagnose, cause) +- }) +- +- // After any file modifications, we need to update our watched files, +- // in case something changed. Compute the new set of directories to watch, +- // and if it differs from the current set, send updated registrations. +- return s.updateWatchedDirectories(ctx) +-} +- +-// needsDiagnosis records the given views as needing diagnosis, returning the +-// context and modification id to use for said diagnosis. +-// +-// Only the keys of viewsToDiagnose are used; the changed files are irrelevant. +-func (s *server) needsDiagnosis(ctx context.Context, viewsToDiagnose map[*cache.View][]protocol.DocumentURI) (context.Context, uint64) { +- s.modificationMu.Lock() +- defer s.modificationMu.Unlock() +- if s.cancelPrevDiagnostics != nil { +- s.cancelPrevDiagnostics() +- } +- modCtx := xcontext.Detach(ctx) +- modCtx, s.cancelPrevDiagnostics = context.WithCancel(modCtx) +- s.lastModificationID++ +- modID := s.lastModificationID +- +- for v := range viewsToDiagnose { +- if needs, ok := s.viewsToDiagnose[v]; !ok || needs < modID { +- s.viewsToDiagnose[v] = modID +- } +- } +- return modCtx, modID +-} +- +-// DiagnosticWorkTitle returns the title of the diagnostic work resulting from a +-// file change originating from the given cause. +-func DiagnosticWorkTitle(cause ModificationSource) string { +- return fmt.Sprintf("diagnosing %v", cause) +-} +- +-func (s *server) changedText(ctx context.Context, uri protocol.DocumentURI, changes []protocol.TextDocumentContentChangeEvent) ([]byte, error) { +- if len(changes) == 0 { +- return nil, fmt.Errorf("%w: no content changes provided", jsonrpc2.ErrInternal) +- } +- +- // Check if the client sent the full content of the file. +- // We accept a full content change even if the server expected incremental changes. +- if len(changes) == 1 && changes[0].Range == nil && changes[0].RangeLength == nil { +- changeFull.Inc() +- return []byte(changes[0].Text), nil +- } +- return s.applyIncrementalChanges(ctx, uri, changes) +-} +- +-func (s *server) applyIncrementalChanges(ctx context.Context, uri protocol.DocumentURI, changes []protocol.TextDocumentContentChangeEvent) ([]byte, error) { +- fh, err := s.session.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- content, err := fh.Content() +- if err != nil { +- return nil, fmt.Errorf("%w: file not found (%v)", jsonrpc2.ErrInternal, err) +- } +- for i, change := range changes { +- // TODO(adonovan): refactor to use diff.Apply, which is robust w.r.t. +- // out-of-order or overlapping changes---and much more efficient. +- +- // Make sure to update mapper along with the content. +- m := protocol.NewMapper(uri, content) +- if change.Range == nil { +- return nil, fmt.Errorf("%w: unexpected nil range for change", jsonrpc2.ErrInternal) +- } +- start, end, err := m.RangeOffsets(*change.Range) +- if err != nil { +- return nil, err +- } +- if end < start { +- return nil, fmt.Errorf("%w: invalid range for content change", jsonrpc2.ErrInternal) +- } +- var buf bytes.Buffer +- buf.Write(content[:start]) +- buf.WriteString(change.Text) +- buf.Write(content[end:]) +- content = buf.Bytes() +- if i == 0 { // only look at the first change if there are seversl +- // TODO(pjw): understand multi-change) +- s.checkEfficacy(fh.URI(), fh.Version(), change) +- } +- } +- return content, nil +-} +- +-// increment counters if any of the completions look like there were used +-func (s *server) checkEfficacy(uri protocol.DocumentURI, version int32, change protocol.TextDocumentContentChangePartial) { +- s.efficacyMu.Lock() +- defer s.efficacyMu.Unlock() +- if s.efficacyURI != uri { +- return +- } +- // gopls increments the version, the test client does not +- if version != s.efficacyVersion && version != s.efficacyVersion+1 { +- return +- } +- // does any change at pos match a proposed completion item? +- for _, item := range s.efficacyItems { +- if item.TextEdit == nil { +- continue +- } +- // CompletionTextEdit may have both insert/replace mode ranges. +- // According to the LSP spec, if an `InsertReplaceEdit` is returned +- // the edit's insert range must be a prefix of the edit's replace range, +- // that means it must be contained and starting at the same position. +- // The efficacy computation uses only the start range, so it is not +- // affected by whether the client applied the suggestion in insert +- // or replace mode. Let's just use the replace mode that was the default +- // in gopls for a while. +- edit, err := protocol.SelectCompletionTextEdit(item, false) +- if err != nil { +- continue +- } +- if edit.Range.Start == change.Range.Start { +- // the change and the proposed completion start at the same +- if (change.RangeLength == nil || *change.RangeLength == 0) && len(change.Text) == 1 { +- // a single character added it does not count as a completion +- continue +- } +- ix := strings.Index(edit.NewText, "$") +- if ix < 0 && strings.HasPrefix(change.Text, edit.NewText) { +- // not a snippet, suggested completion is a prefix of the change +- complUsed.Inc() +- return +- } +- if ix > 1 && strings.HasPrefix(change.Text, edit.NewText[:ix]) { +- // a snippet, suggested completion up to $ marker is a prefix of the change +- complUsed.Inc() +- return +- } +- } +- } +- complUnused.Inc() +-} +- +-func changeTypeToFileAction(ct protocol.FileChangeType) file.Action { +- switch ct { +- case protocol.Changed: +- return file.Change +- case protocol.Created: +- return file.Create +- case protocol.Deleted: +- return file.Delete +- } +- return file.UnknownAction +-} +diff -urN a/gopls/internal/server/type_hierarchy.go b/gopls/internal/server/type_hierarchy.go +--- a/gopls/internal/server/type_hierarchy.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/type_hierarchy.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,63 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) PrepareTypeHierarchy(ctx context.Context, params *protocol.TypeHierarchyPrepareParams) ([]protocol.TypeHierarchyItem, error) { +- ctx, done := event.Start(ctx, "server.PrepareTypeHierarchy") +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.TextDocument.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- switch snapshot.FileKind(fh) { +- case file.Go: +- return golang.PrepareTypeHierarchy(ctx, snapshot, fh, params.Position) +- } +- return nil, fmt.Errorf("unsupported file type: %v", fh) +-} +- +-func (s *server) Subtypes(ctx context.Context, params *protocol.TypeHierarchySubtypesParams) ([]protocol.TypeHierarchyItem, error) { +- ctx, done := event.Start(ctx, "server.Subtypes") +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.Item.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- switch snapshot.FileKind(fh) { +- case file.Go: +- return golang.Subtypes(ctx, snapshot, fh, params.Item) +- } +- return nil, fmt.Errorf("unsupported file type: %v", fh) +-} +- +-func (s *server) Supertypes(ctx context.Context, params *protocol.TypeHierarchySupertypesParams) ([]protocol.TypeHierarchyItem, error) { +- ctx, done := event.Start(ctx, "server.Supertypes") +- defer done() +- +- fh, snapshot, release, err := s.session.FileOf(ctx, params.Item.URI) +- if err != nil { +- return nil, err +- } +- defer release() +- switch snapshot.FileKind(fh) { +- case file.Go: +- return golang.Supertypes(ctx, snapshot, fh, params.Item) +- } +- return nil, fmt.Errorf("unsupported file type: %v", fh) +-} +diff -urN a/gopls/internal/server/unimplemented.go b/gopls/internal/server/unimplemented.go +--- a/gopls/internal/server/unimplemented.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/unimplemented.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,143 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-// This file defines the LSP server methods that gopls does not currently implement. +- +-import ( +- "context" +- "fmt" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/jsonrpc2" +-) +- +-func (s *server) ColorPresentation(context.Context, *protocol.ColorPresentationParams) ([]protocol.ColorPresentation, error) { +- return nil, notImplemented("ColorPresentation") +-} +- +-func (s *server) Declaration(context.Context, *protocol.DeclarationParams) (*protocol.Or_textDocument_declaration, error) { +- return nil, notImplemented("Declaration") +-} +- +-func (s *server) DiagnosticWorkspace(context.Context, *protocol.WorkspaceDiagnosticParams) (*protocol.WorkspaceDiagnosticReport, error) { +- return nil, notImplemented("DiagnosticWorkspace") +-} +- +-func (s *server) DidChangeNotebookDocument(context.Context, *protocol.DidChangeNotebookDocumentParams) error { +- return notImplemented("DidChangeNotebookDocument") +-} +- +-func (s *server) DidCloseNotebookDocument(context.Context, *protocol.DidCloseNotebookDocumentParams) error { +- return notImplemented("DidCloseNotebookDocument") +-} +- +-func (s *server) DidDeleteFiles(context.Context, *protocol.DeleteFilesParams) error { +- return notImplemented("DidDeleteFiles") +-} +- +-func (s *server) DidOpenNotebookDocument(context.Context, *protocol.DidOpenNotebookDocumentParams) error { +- return notImplemented("DidOpenNotebookDocument") +-} +- +-func (s *server) DidRenameFiles(context.Context, *protocol.RenameFilesParams) error { +- return notImplemented("DidRenameFiles") +-} +- +-func (s *server) DidSaveNotebookDocument(context.Context, *protocol.DidSaveNotebookDocumentParams) error { +- return notImplemented("DidSaveNotebookDocument") +-} +- +-func (s *server) DocumentColor(context.Context, *protocol.DocumentColorParams) ([]protocol.ColorInformation, error) { +- return nil, notImplemented("DocumentColor") +-} +- +-func (s *server) InlineCompletion(context.Context, *protocol.InlineCompletionParams) (*protocol.Or_Result_textDocument_inlineCompletion, error) { +- return nil, notImplemented("InlineCompletion") +-} +- +-func (s *server) InlineValue(context.Context, *protocol.InlineValueParams) ([]protocol.InlineValue, error) { +- return nil, notImplemented("InlineValue") +-} +- +-func (s *server) LinkedEditingRange(context.Context, *protocol.LinkedEditingRangeParams) (*protocol.LinkedEditingRanges, error) { +- return nil, notImplemented("LinkedEditingRange") +-} +- +-func (s *server) Moniker(context.Context, *protocol.MonikerParams) ([]protocol.Moniker, error) { +- return nil, notImplemented("Moniker") +-} +- +-func (s *server) OnTypeFormatting(context.Context, *protocol.DocumentOnTypeFormattingParams) ([]protocol.TextEdit, error) { +- return nil, notImplemented("OnTypeFormatting") +-} +- +-func (s *server) Progress(context.Context, *protocol.ProgressParams) error { +- return notImplemented("Progress") +-} +- +-func (s *server) RangeFormatting(context.Context, *protocol.DocumentRangeFormattingParams) ([]protocol.TextEdit, error) { +- return nil, notImplemented("RangeFormatting") +-} +- +-func (s *server) RangesFormatting(context.Context, *protocol.DocumentRangesFormattingParams) ([]protocol.TextEdit, error) { +- return nil, notImplemented("RangesFormatting") +-} +- +-func (s *server) Resolve(context.Context, *protocol.InlayHint) (*protocol.InlayHint, error) { +- return nil, notImplemented("Resolve") +-} +- +-func (s *server) ResolveCodeLens(context.Context, *protocol.CodeLens) (*protocol.CodeLens, error) { +- return nil, notImplemented("ResolveCodeLens") +-} +- +-func (s *server) ResolveCompletionItem(context.Context, *protocol.CompletionItem) (*protocol.CompletionItem, error) { +- return nil, notImplemented("ResolveCompletionItem") +-} +- +-func (s *server) ResolveDocumentLink(context.Context, *protocol.DocumentLink) (*protocol.DocumentLink, error) { +- return nil, notImplemented("ResolveDocumentLink") +-} +- +-func (s *server) ResolveWorkspaceSymbol(context.Context, *protocol.WorkspaceSymbol) (*protocol.WorkspaceSymbol, error) { +- return nil, notImplemented("ResolveWorkspaceSymbol") +-} +- +-func (s *server) SemanticTokensFullDelta(context.Context, *protocol.SemanticTokensDeltaParams) (any, error) { +- return nil, notImplemented("SemanticTokensFullDelta") +-} +- +-func (s *server) SetTrace(context.Context, *protocol.SetTraceParams) error { +- return notImplemented("SetTrace") +-} +- +-func (s *server) WillCreateFiles(context.Context, *protocol.CreateFilesParams) (*protocol.WorkspaceEdit, error) { +- return nil, notImplemented("WillCreateFiles") +-} +- +-func (s *server) WillDeleteFiles(context.Context, *protocol.DeleteFilesParams) (*protocol.WorkspaceEdit, error) { +- return nil, notImplemented("WillDeleteFiles") +-} +- +-func (s *server) WillRenameFiles(context.Context, *protocol.RenameFilesParams) (*protocol.WorkspaceEdit, error) { +- return nil, notImplemented("WillRenameFiles") +-} +- +-func (s *server) WillSave(context.Context, *protocol.WillSaveTextDocumentParams) error { +- return notImplemented("WillSave") +-} +- +-func (s *server) WillSaveWaitUntil(context.Context, *protocol.WillSaveTextDocumentParams) ([]protocol.TextEdit, error) { +- return nil, notImplemented("WillSaveWaitUntil") +-} +- +-func (s *server) TextDocumentContent(context.Context, *protocol.TextDocumentContentParams) (*protocol.TextDocumentContentResult, error) { +- return nil, notImplemented("TextDocumentContent") +-} +- +-func notImplemented(method string) error { +- return fmt.Errorf("%w: %q not yet implemented", jsonrpc2.ErrMethodNotFound, method) +-} +diff -urN a/gopls/internal/server/workspace.go b/gopls/internal/server/workspace.go +--- a/gopls/internal/server/workspace.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/workspace.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,172 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- "fmt" +- "reflect" +- "strings" +- "sync" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/golang/completion" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) DidChangeWorkspaceFolders(ctx context.Context, params *protocol.DidChangeWorkspaceFoldersParams) error { +- for _, folder := range params.Event.Removed { +- if !strings.HasPrefix(folder.URI, "file://") { +- // Some clients that support virtual file systems may send workspace change messages +- // about workspace folders in the virtual file systems. addFolders must not add +- // those folders, so they don't need to be removed either. +- continue +- } +- dir, err := protocol.ParseDocumentURI(folder.URI) +- if err != nil { +- return fmt.Errorf("invalid folder %q: %v", folder.URI, err) +- } +- if !s.session.RemoveView(ctx, dir) { +- return fmt.Errorf("view %q for %v not found", folder.Name, folder.URI) +- } +- } +- s.addFolders(ctx, params.Event.Added) +- return nil +-} +- +-// addView returns a Snapshot and a release function that must be +-// called when it is no longer needed. +-func (s *server) addView(ctx context.Context, name string, dir protocol.DocumentURI) (*cache.Snapshot, func(), error) { +- s.stateMu.Lock() +- state := s.state +- s.stateMu.Unlock() +- if state < serverInitialized { +- return nil, nil, fmt.Errorf("addView called before server initialized") +- } +- opts, err := s.fetchFolderOptions(ctx, dir) +- if err != nil { +- return nil, nil, err +- } +- folder, err := s.newFolder(ctx, dir, name, opts) +- if err != nil { +- return nil, nil, err +- } +- _, snapshot, release, err := s.session.NewView(ctx, folder) +- return snapshot, release, err +-} +- +-func (s *server) DidChangeConfiguration(ctx context.Context, _ *protocol.DidChangeConfigurationParams) error { +- ctx, done := event.Start(ctx, "server.DidChangeConfiguration") +- defer done() +- +- var wg sync.WaitGroup +- wg.Add(1) +- defer wg.Done() +- if s.Options().VerboseWorkDoneProgress { +- work := s.progress.Start(ctx, DiagnosticWorkTitle(FromDidChangeConfiguration), "Calculating diagnostics...", nil, nil) +- go func() { +- wg.Wait() +- work.End(ctx, "Done.") +- }() +- } +- +- // Apply any changes to the session-level settings. +- options, err := s.fetchFolderOptions(ctx, "") +- if err != nil { +- return err +- } +- s.SetOptions(options) +- +- // Collect options for all workspace folders. +- // If none have changed, this is a no op. +- folderOpts := make(map[protocol.DocumentURI]*settings.Options) +- changed := false +- // The set of views is implicitly guarded by the fact that gopls processes +- // didChange notifications synchronously. +- // +- // TODO(rfindley): investigate this assumption: perhaps we should hold viewMu +- // here. +- views := s.session.Views() +- for _, view := range views { +- folder := view.Folder() +- if folderOpts[folder.Dir] != nil { +- continue +- } +- opts, err := s.fetchFolderOptions(ctx, folder.Dir) +- if err != nil { +- return err +- } +- +- if !reflect.DeepEqual(folder.Options, opts) { +- changed = true +- } +- folderOpts[folder.Dir] = opts +- } +- if !changed { +- return nil +- } +- +- var newFolders []*cache.Folder +- for _, view := range views { +- folder := view.Folder() +- opts := folderOpts[folder.Dir] +- newFolder, err := s.newFolder(ctx, folder.Dir, folder.Name, opts) +- if err != nil { +- return err +- } +- newFolders = append(newFolders, newFolder) +- } +- s.session.UpdateFolders(ctx, newFolders) // ignore error +- +- // The view set may have been updated above. +- viewsToDiagnose := make(map[*cache.View][]protocol.DocumentURI) +- for _, view := range s.session.Views() { +- viewsToDiagnose[view] = nil +- } +- +- modCtx, modID := s.needsDiagnosis(ctx, viewsToDiagnose) +- wg.Go(func() { +- s.diagnoseChangedViews(modCtx, modID, viewsToDiagnose, FromDidChangeConfiguration) +- }) +- +- // An options change may have affected the detected Go version. +- s.checkViewGoVersions() +- +- return nil +-} +- +-func (s *server) DidCreateFiles(ctx context.Context, params *protocol.CreateFilesParams) error { +- ctx, done := event.Start(ctx, "server.DidCreateFiles") +- defer done() +- +- var allChanges []protocol.DocumentChange +- for _, createdFile := range params.Files { +- uri := protocol.DocumentURI(createdFile.URI) +- fh, snapshot, release, err := s.session.FileOf(ctx, uri) +- if err != nil { +- event.Error(ctx, "fail to call fileOf", err) +- continue +- } +- defer release() +- +- switch snapshot.FileKind(fh) { +- case file.Go: +- change, err := completion.NewFile(ctx, snapshot, fh) +- if err != nil { +- // any error, including "it's not a new file" +- continue +- } +- if change != nil { +- allChanges = append(allChanges, *change) +- } +- default: +- } +- } +- +- return applyChanges(ctx, s.client, allChanges) +-} +diff -urN a/gopls/internal/server/workspace_symbol.go b/gopls/internal/server/workspace_symbol.go +--- a/gopls/internal/server/workspace_symbol.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/server/workspace_symbol.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package server +- +-import ( +- "context" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/golang" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/telemetry" +- "golang.org/x/tools/internal/event" +-) +- +-func (s *server) Symbol(ctx context.Context, params *protocol.WorkspaceSymbolParams) (_ []protocol.SymbolInformation, rerr error) { +- recordLatency := telemetry.StartLatencyTimer("symbol") +- defer func() { +- recordLatency(ctx, rerr) +- }() +- +- ctx, done := event.Start(ctx, "server.Symbol") +- defer done() +- +- views := s.session.Views() +- matcher := s.Options().SymbolMatcher +- style := s.Options().SymbolStyle +- +- var snapshots []*cache.Snapshot +- for _, v := range views { +- snapshot, release, err := v.Snapshot() +- if err != nil { +- continue // snapshot is shutting down +- } +- // If err is non-nil, the snapshot is shutting down. Skip it. +- defer release() +- snapshots = append(snapshots, snapshot) +- } +- return golang.WorkspaceSymbols(ctx, matcher, style, snapshots, params.Query) +-} +diff -urN a/gopls/internal/settings/analysis.go b/gopls/internal/settings/analysis.go +--- a/gopls/internal/settings/analysis.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/settings/analysis.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,281 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package settings +- +-import ( +- "slices" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/go/analysis/passes/appends" +- "golang.org/x/tools/go/analysis/passes/asmdecl" +- "golang.org/x/tools/go/analysis/passes/assign" +- "golang.org/x/tools/go/analysis/passes/atomic" +- "golang.org/x/tools/go/analysis/passes/atomicalign" +- "golang.org/x/tools/go/analysis/passes/bools" +- "golang.org/x/tools/go/analysis/passes/buildtag" +- "golang.org/x/tools/go/analysis/passes/cgocall" +- "golang.org/x/tools/go/analysis/passes/composite" +- "golang.org/x/tools/go/analysis/passes/copylock" +- "golang.org/x/tools/go/analysis/passes/deepequalerrors" +- "golang.org/x/tools/go/analysis/passes/defers" +- "golang.org/x/tools/go/analysis/passes/directive" +- "golang.org/x/tools/go/analysis/passes/errorsas" +- "golang.org/x/tools/go/analysis/passes/framepointer" +- "golang.org/x/tools/go/analysis/passes/hostport" +- "golang.org/x/tools/go/analysis/passes/httpresponse" +- "golang.org/x/tools/go/analysis/passes/ifaceassert" +- "golang.org/x/tools/go/analysis/passes/inline" +- "golang.org/x/tools/go/analysis/passes/loopclosure" +- "golang.org/x/tools/go/analysis/passes/lostcancel" +- "golang.org/x/tools/go/analysis/passes/modernize" +- "golang.org/x/tools/go/analysis/passes/nilfunc" +- "golang.org/x/tools/go/analysis/passes/nilness" +- "golang.org/x/tools/go/analysis/passes/printf" +- "golang.org/x/tools/go/analysis/passes/shadow" +- "golang.org/x/tools/go/analysis/passes/shift" +- "golang.org/x/tools/go/analysis/passes/sigchanyzer" +- "golang.org/x/tools/go/analysis/passes/slog" +- "golang.org/x/tools/go/analysis/passes/sortslice" +- "golang.org/x/tools/go/analysis/passes/stdmethods" +- "golang.org/x/tools/go/analysis/passes/stdversion" +- "golang.org/x/tools/go/analysis/passes/stringintconv" +- "golang.org/x/tools/go/analysis/passes/structtag" +- "golang.org/x/tools/go/analysis/passes/testinggoroutine" +- "golang.org/x/tools/go/analysis/passes/tests" +- "golang.org/x/tools/go/analysis/passes/timeformat" +- "golang.org/x/tools/go/analysis/passes/unmarshal" +- "golang.org/x/tools/go/analysis/passes/unreachable" +- "golang.org/x/tools/go/analysis/passes/unsafeptr" +- "golang.org/x/tools/go/analysis/passes/unusedresult" +- "golang.org/x/tools/go/analysis/passes/unusedwrite" +- "golang.org/x/tools/go/analysis/passes/waitgroup" +- "golang.org/x/tools/gopls/internal/analysis/deprecated" +- "golang.org/x/tools/gopls/internal/analysis/embeddirective" +- "golang.org/x/tools/gopls/internal/analysis/fillreturns" +- "golang.org/x/tools/gopls/internal/analysis/infertypeargs" +- "golang.org/x/tools/gopls/internal/analysis/maprange" +- "golang.org/x/tools/gopls/internal/analysis/nonewvars" +- "golang.org/x/tools/gopls/internal/analysis/noresultvalues" +- "golang.org/x/tools/gopls/internal/analysis/recursiveiter" +- "golang.org/x/tools/gopls/internal/analysis/simplifycompositelit" +- "golang.org/x/tools/gopls/internal/analysis/simplifyrange" +- "golang.org/x/tools/gopls/internal/analysis/simplifyslice" +- "golang.org/x/tools/gopls/internal/analysis/unusedfunc" +- "golang.org/x/tools/gopls/internal/analysis/unusedparams" +- "golang.org/x/tools/gopls/internal/analysis/unusedvariable" +- "golang.org/x/tools/gopls/internal/analysis/yield" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/goplsexport" +- "honnef.co/go/tools/analysis/lint" +-) +- +-var AllAnalyzers = slices.Concat(DefaultAnalyzers, StaticcheckAnalyzers) +- +-// Analyzer augments an [analysis.Analyzer] with additional LSP configuration. +-// +-// Analyzers are immutable, since they are shared across multiple LSP sessions. +-type Analyzer struct { +- analyzer *analysis.Analyzer +- staticcheck *lint.RawDocumentation // only for staticcheck analyzers +- nonDefault bool // (sense is negated so we can mostly omit it) +- actionKinds []protocol.CodeActionKind +- severity protocol.DiagnosticSeverity +- tags []protocol.DiagnosticTag +-} +- +-// Analyzer returns the [analysis.Analyzer] that this Analyzer wraps. +-func (a *Analyzer) Analyzer() *analysis.Analyzer { return a.analyzer } +- +-// Enabled reports whether the analyzer is enabled by the options. +-// This value can be configured per-analysis in user settings. +-func (a *Analyzer) Enabled(o *Options) bool { +- // An explicit setting by name takes precedence. +- if v, found := o.Analyses[a.Analyzer().Name]; found { +- return v +- } +- if a.staticcheck != nil { +- // An explicit staticcheck={true,false} setting +- // enables/disables all staticcheck analyzers. +- if o.StaticcheckProvided { +- return o.Staticcheck +- } +- // Respect staticcheck's off-by-default options too. +- // (This applies to only a handful of analyzers.) +- if a.staticcheck.NonDefault { +- return false +- } +- } +- // Respect gopls' default setting. +- return !a.nonDefault +-} +- +-// ActionKinds is the set of kinds of code action this analyzer produces. +-// +-// If left unset, it defaults to QuickFix. +-// TODO(rfindley): revisit. +-func (a *Analyzer) ActionKinds() []protocol.CodeActionKind { return a.actionKinds } +- +-// Severity is the severity set for diagnostics reported by this analyzer. +-// The default severity is SeverityWarning. +-// +-// While the LSP spec does not specify how severity should be used, here are +-// some guiding heuristics: +-// - Error: for parse and type errors, which would stop the build. +-// - Warning: for analyzer diagnostics reporting likely bugs. +-// - Info: for analyzer diagnostics that do not indicate bugs, but may +-// suggest inaccurate or superfluous code. +-// - Hint: for analyzer diagnostics that do not indicate mistakes, but offer +-// simplifications or modernizations. By their nature, hints should +-// generally carry quick fixes. +-// +-// The difference between Info and Hint is particularly subtle. Importantly, +-// Hint diagnostics do not appear in the Problems tab in VS Code, so they are +-// less intrusive than Info diagnostics. The rule of thumb is this: use Info if +-// the diagnostic is not a bug, but the author probably didn't mean to write +-// the code that way. Use Hint if the diagnostic is not a bug and the author +-// intended to write the code that way, but there is a simpler or more modern +-// way to express the same logic. An 'unused' diagnostic is Info level, since +-// the author probably didn't mean to check in unreachable code. A 'modernize' +-// or 'deprecated' diagnostic is Hint level, since the author intended to write +-// the code that way, but now there is a better way. +-func (a *Analyzer) Severity() protocol.DiagnosticSeverity { +- if a.severity == 0 { +- return protocol.SeverityWarning +- } +- return a.severity +-} +- +-// Tags is extra tags (unnecessary, deprecated, etc) for diagnostics +-// reported by this analyzer. +-func (a *Analyzer) Tags() []protocol.DiagnosticTag { return a.tags } +- +-// String returns the name of this analyzer. +-func (a *Analyzer) String() string { return a.analyzer.String() } +- +-// DefaultAnalyzers holds the list of Analyzers available to all gopls +-// sessions, independent of build version. It is the source from which +-// gopls/doc/analyzers.md is generated. +-var DefaultAnalyzers = []*Analyzer{ +- // See [Analyzer.Severity] for guidance on setting analyzer severity below. +- +- // The traditional vet suite: +- {analyzer: appends.Analyzer}, +- {analyzer: asmdecl.Analyzer}, +- {analyzer: assign.Analyzer}, +- {analyzer: atomic.Analyzer}, +- {analyzer: bools.Analyzer}, +- {analyzer: buildtag.Analyzer}, +- {analyzer: cgocall.Analyzer}, +- {analyzer: composite.Analyzer}, +- {analyzer: copylock.Analyzer}, +- {analyzer: defers.Analyzer}, +- { +- analyzer: deprecated.Analyzer, +- severity: protocol.SeverityHint, +- tags: []protocol.DiagnosticTag{protocol.Deprecated}, +- }, +- {analyzer: directive.Analyzer}, +- {analyzer: errorsas.Analyzer}, +- {analyzer: framepointer.Analyzer}, +- {analyzer: httpresponse.Analyzer}, +- {analyzer: ifaceassert.Analyzer}, +- {analyzer: loopclosure.Analyzer}, +- {analyzer: lostcancel.Analyzer}, +- {analyzer: nilfunc.Analyzer}, +- {analyzer: printf.Analyzer}, +- {analyzer: shift.Analyzer}, +- {analyzer: sigchanyzer.Analyzer}, +- {analyzer: slog.Analyzer}, +- {analyzer: stdmethods.Analyzer}, +- {analyzer: stdversion.Analyzer}, +- {analyzer: stringintconv.Analyzer}, +- {analyzer: structtag.Analyzer}, +- {analyzer: testinggoroutine.Analyzer}, +- {analyzer: tests.Analyzer}, +- {analyzer: timeformat.Analyzer}, +- {analyzer: unmarshal.Analyzer}, +- {analyzer: unreachable.Analyzer}, +- {analyzer: unsafeptr.Analyzer}, +- {analyzer: unusedresult.Analyzer}, +- +- // not suitable for vet: +- // - some (nilness, yield) use go/ssa; see #59714. +- // - others don't meet the "frequency" criterion; +- // see GOROOT/src/cmd/vet/README. +- {analyzer: atomicalign.Analyzer}, +- {analyzer: deepequalerrors.Analyzer}, +- {analyzer: nilness.Analyzer}, // uses go/ssa +- {analyzer: yield.Analyzer}, // uses go/ssa +- {analyzer: sortslice.Analyzer}, +- {analyzer: embeddirective.Analyzer}, +- {analyzer: waitgroup.Analyzer}, // to appear in cmd/vet@go1.25 +- {analyzer: hostport.Analyzer}, // to appear in cmd/vet@go1.25 +- {analyzer: recursiveiter.Analyzer}, // under evaluation +- +- // disabled due to high false positives +- {analyzer: shadow.Analyzer, severity: protocol.SeverityHint, nonDefault: true}, // very noisy +- // fieldalignment is not even off-by-default; see #67762. +- +- // simplifiers and modernizers +- // +- // These analyzers offer mere style fixes on correct code, +- // thus they will never appear in cmd/vet and +- // their severity level is "information". +- // +- // gofmt -s suite +- { +- analyzer: simplifycompositelit.Analyzer, +- actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, +- severity: protocol.SeverityInformation, +- }, +- { +- analyzer: simplifyrange.Analyzer, +- actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, +- severity: protocol.SeverityInformation, +- }, +- { +- analyzer: simplifyslice.Analyzer, +- actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, +- severity: protocol.SeverityInformation, +- }, +- // other simplifiers +- {analyzer: inline.Analyzer, severity: protocol.SeverityHint}, +- {analyzer: infertypeargs.Analyzer, severity: protocol.SeverityInformation}, +- {analyzer: maprange.Analyzer, severity: protocol.SeverityHint}, +- {analyzer: unusedparams.Analyzer, severity: protocol.SeverityInformation}, +- {analyzer: unusedfunc.Analyzer, severity: protocol.SeverityInformation}, +- {analyzer: unusedwrite.Analyzer, severity: protocol.SeverityInformation}, // uses go/ssa +- // the modernize suite +- {analyzer: modernize.AnyAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.AppendClippedAnalyzer, severity: protocol.SeverityHint, nonDefault: true}, // not nil-preserving +- {analyzer: modernize.BLoopAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: goplsexport.ErrorsAsTypeModernizer, severity: protocol.SeverityHint}, +- {analyzer: modernize.FmtAppendfAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.ForVarAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: goplsexport.StdIteratorsModernizer, severity: protocol.SeverityHint}, +- {analyzer: modernize.MapsLoopAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.MinMaxAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.NewExprAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.OmitZeroAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.RangeIntAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.ReflectTypeForAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.SlicesContainsAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.SlicesDeleteAnalyzer, severity: protocol.SeverityHint, nonDefault: true}, // not nil-preserving +- {analyzer: modernize.SlicesSortAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.StringsBuilderAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.StringsCutPrefixAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.StringsSeqAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.TestingContextAnalyzer, severity: protocol.SeverityHint}, +- {analyzer: modernize.WaitGroupAnalyzer, severity: protocol.SeverityHint}, +- +- // type-error analyzers +- // These analyzers enrich go/types errors with suggested fixes. +- // Since they exist only to attach their fixes to type errors, their +- // severity is irrelevant. +- {analyzer: fillreturns.Analyzer}, +- {analyzer: nonewvars.Analyzer}, +- {analyzer: noresultvalues.Analyzer}, +- {analyzer: unusedvariable.Analyzer}, +-} +diff -urN a/gopls/internal/settings/codeactionkind.go b/gopls/internal/settings/codeactionkind.go +--- a/gopls/internal/settings/codeactionkind.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/settings/codeactionkind.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,121 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package settings +- +-import "golang.org/x/tools/gopls/internal/protocol" +- +-// This file defines constants for non-standard CodeActions. +- +-// CodeAction kinds specific to gopls +-// +-// See ../protocol/tsprotocol.go for LSP standard kinds, including +-// +-// quickfix +-// refactor +-// refactor.extract +-// refactor.inline +-// refactor.move +-// refactor.rewrite +-// source +-// source.organizeImports +-// source.fixAll +-// notebook +-// +-// Kinds are hierarchical: "refactor" subsumes "refactor.inline", +-// which subsumes "refactor.inline.call". This rule implies that the +-// empty string, confusingly named protocol.Empty, subsumes all kinds. +-// The "Only" field in a CodeAction request may specify a category +-// such as "refactor"; any matching code action will be returned. +-// +-// All CodeActions returned by gopls use a specific leaf kind such as +-// "refactor.inline.call", except for quick fixes, which all use +-// "quickfix". TODO(adonovan): perhaps quick fixes should also be +-// hierarchical (e.g. quickfix.govulncheck.{reset,upgrade})? +-// +-// # VS Code +-// +-// The effects of CodeActionKind on the behavior of VS Code are +-// baffling and undocumented. Here's what we have observed. +-// +-// Clicking on the "Refactor..." menu item shows a submenu of actions +-// with kind="refactor.*", and clicking on "Source action..." shows +-// actions with kind="source.*". A lightbulb appears in both cases. +-// +-// A third menu, "Quick fix...", not found on the usual context +-// menu but accessible through the command palette or "⌘.", +-// does not set the Only field in its request, so the set of +-// kinds is determined by how the server interprets the default. +-// The LSP 3.18 guidance is that this should be treated +-// equivalent to Only=["quickfix"], and that is what gopls +-// now does. (If the server responds with more kinds, they will +-// be displayed in menu subsections.) +-// +-// All of these CodeAction requests have triggerkind=Invoked. +-// +-// Cursor motion also performs a CodeAction request, but with +-// triggerkind=Automatic. Even if this returns a mix of action kinds, +-// only the "refactor" and "quickfix" actions seem to matter. +-// A lightbulb appears if that subset of actions is non-empty, and the +-// menu displays them. (This was noisy--see #65167--so gopls now only +-// reports diagnostic-associated code actions if kind is Invoked or +-// missing.) +-// +-// None of these CodeAction requests specifies a "kind" restriction; +-// the filtering is done on the response, by the client. +-// +-// In all these menus, VS Code organizes the actions' menu items +-// into groups based on their kind, with hardwired captions such as +-// "Refactor...", "Extract", "Inline", "More actions", and "Quick fix". +-// +-// The special category "source.fixAll" is intended for actions that +-// are unambiguously safe to apply so that clients may automatically +-// apply all actions matching this category on save. (That said, this +-// is not VS Code's default behavior; see editor.codeActionsOnSave.) +-const ( +- // source +- GoAssembly protocol.CodeActionKind = "source.assembly" +- GoDoc protocol.CodeActionKind = "source.doc" +- GoFreeSymbols protocol.CodeActionKind = "source.freesymbols" +- GoSplitPackage protocol.CodeActionKind = "source.splitPackage" +- GoTest protocol.CodeActionKind = "source.test" +- GoToggleCompilerOptDetails protocol.CodeActionKind = "source.toggleCompilerOptDetails" +- AddTest protocol.CodeActionKind = "source.addTest" +- OrganizeImports protocol.CodeActionKind = "source.organizeImports" +- +- // gopls +- GoplsDocFeatures protocol.CodeActionKind = "gopls.doc.features" +- +- // refactor.rewrite +- RefactorRewriteChangeQuote protocol.CodeActionKind = "refactor.rewrite.changeQuote" +- RefactorRewriteFillStruct protocol.CodeActionKind = "refactor.rewrite.fillStruct" +- RefactorRewriteFillSwitch protocol.CodeActionKind = "refactor.rewrite.fillSwitch" +- RefactorRewriteInvertIf protocol.CodeActionKind = "refactor.rewrite.invertIf" +- RefactorRewriteJoinLines protocol.CodeActionKind = "refactor.rewrite.joinLines" +- RefactorRewriteRemoveUnusedParam protocol.CodeActionKind = "refactor.rewrite.removeUnusedParam" +- RefactorRewriteMoveParamLeft protocol.CodeActionKind = "refactor.rewrite.moveParamLeft" +- RefactorRewriteMoveParamRight protocol.CodeActionKind = "refactor.rewrite.moveParamRight" +- RefactorRewriteSplitLines protocol.CodeActionKind = "refactor.rewrite.splitLines" +- RefactorRewriteEliminateDotImport protocol.CodeActionKind = "refactor.rewrite.eliminateDotImport" +- RefactorRewriteAddTags protocol.CodeActionKind = "refactor.rewrite.addTags" +- RefactorRewriteRemoveTags protocol.CodeActionKind = "refactor.rewrite.removeTags" +- +- // refactor.inline +- RefactorInlineCall protocol.CodeActionKind = "refactor.inline.call" +- RefactorInlineVariable protocol.CodeActionKind = "refactor.inline.variable" +- +- // refactor.extract +- RefactorExtractConstant protocol.CodeActionKind = "refactor.extract.constant" +- RefactorExtractConstantAll protocol.CodeActionKind = "refactor.extract.constant-all" +- RefactorExtractFunction protocol.CodeActionKind = "refactor.extract.function" +- RefactorExtractMethod protocol.CodeActionKind = "refactor.extract.method" +- RefactorExtractVariable protocol.CodeActionKind = "refactor.extract.variable" +- RefactorExtractVariableAll protocol.CodeActionKind = "refactor.extract.variable-all" +- RefactorExtractToNewFile protocol.CodeActionKind = "refactor.extract.toNewFile" +- +- // Note: add new kinds to: +- // - the SupportedCodeActions map in default.go +- // - the codeActionProducers table in ../golang/codeaction.go +- // - the docs in ../../doc/features/transformation.md +-) +diff -urN a/gopls/internal/settings/default.go b/gopls/internal/settings/default.go +--- a/gopls/internal/settings/default.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/settings/default.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,159 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package settings +- +-import ( +- "sync" +- "time" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +-) +- +-var ( +- optionsOnce sync.Once +- defaultOptions *Options +-) +- +-// DefaultOptions is the options that are used for Gopls execution independent +-// of any externally provided configuration (LSP initialization, command +-// invocation, etc.). +-// +-// It is the source from which gopls/doc/settings.md is generated. +-func DefaultOptions(overrides ...func(*Options)) *Options { +- optionsOnce.Do(func() { +- var commands []string +- for _, c := range command.Commands { +- commands = append(commands, c.String()) +- } +- defaultOptions = &Options{ +- ClientOptions: ClientOptions{ +- InsertTextFormat: protocol.PlainTextTextFormat, +- PreferredContentFormat: protocol.Markdown, +- ConfigurationSupported: true, +- DynamicConfigurationSupported: true, +- DynamicRegistrationSemanticTokensSupported: true, +- DynamicWatchedFilesSupported: true, +- LineFoldingOnly: false, +- HierarchicalDocumentSymbolSupport: true, +- ImportsSource: ImportsSourceGopls, +- }, +- ServerOptions: ServerOptions{ +- SupportedCodeActions: map[file.Kind]map[protocol.CodeActionKind]bool{ +- file.Go: { +- // This should include specific leaves in the tree, +- // (e.g. refactor.inline.call) not generic branches +- // (e.g. refactor.inline or refactor). +- protocol.SourceFixAll: true, +- protocol.SourceOrganizeImports: true, +- protocol.QuickFix: true, +- GoAssembly: true, +- GoDoc: true, +- GoFreeSymbols: true, +- GoSplitPackage: true, +- GoplsDocFeatures: true, +- RefactorRewriteChangeQuote: true, +- RefactorRewriteFillStruct: true, +- RefactorRewriteFillSwitch: true, +- RefactorRewriteInvertIf: true, +- RefactorRewriteJoinLines: true, +- RefactorRewriteRemoveUnusedParam: true, +- RefactorRewriteSplitLines: true, +- RefactorInlineCall: true, +- RefactorInlineVariable: true, +- RefactorExtractConstant: true, +- RefactorExtractConstantAll: true, +- RefactorExtractFunction: true, +- RefactorExtractMethod: true, +- RefactorExtractVariable: true, +- RefactorExtractVariableAll: true, +- RefactorExtractToNewFile: true, +- // Not GoTest: it must be explicit in CodeActionParams.Context.Only +- }, +- file.Mod: { +- protocol.SourceOrganizeImports: true, +- protocol.QuickFix: true, +- }, +- file.Work: {}, +- file.Sum: {}, +- file.Tmpl: {}, +- }, +- SupportedCommands: commands, +- }, +- UserOptions: UserOptions{ +- BuildOptions: BuildOptions{ +- ExpandWorkspaceToModule: true, +- DirectoryFilters: []string{"-**/node_modules"}, +- TemplateExtensions: []string{}, +- StandaloneTags: []string{"ignore"}, +- WorkspaceFiles: []string{}, +- }, +- UIOptions: UIOptions{ +- DiagnosticOptions: DiagnosticOptions{ +- Annotations: map[Annotation]bool{ +- Bounds: true, +- Escape: true, +- Inline: true, +- Nil: true, +- }, +- Vulncheck: ModeVulncheckOff, +- DiagnosticsDelay: 1 * time.Second, +- DiagnosticsTrigger: DiagnosticsOnEdit, +- AnalysisProgressReporting: true, +- }, +- InlayHintOptions: InlayHintOptions{ +- Hints: map[InlayHint]bool{}, +- }, +- DocumentationOptions: DocumentationOptions{ +- HoverKind: FullDocumentation, +- LinkTarget: "pkg.go.dev", +- LinksInHover: LinksInHover_LinkTarget, +- }, +- NavigationOptions: NavigationOptions{ +- ImportShortcut: BothShortcuts, +- SymbolMatcher: SymbolFastFuzzy, +- SymbolStyle: DynamicSymbols, +- SymbolScope: AllSymbolScope, +- }, +- CompletionOptions: CompletionOptions{ +- Matcher: Fuzzy, +- CompletionBudget: 100 * time.Millisecond, +- ExperimentalPostfixCompletions: true, +- CompleteFunctionCalls: true, +- }, +- Codelenses: map[CodeLensSource]bool{ +- CodeLensGenerate: true, +- CodeLensRegenerateCgo: true, +- CodeLensTidy: true, +- CodeLensUpgradeDependency: true, +- CodeLensVendor: true, +- CodeLensRunGovulncheck: true, +- }, +- NewGoFileHeader: true, +- }, +- }, +- InternalOptions: InternalOptions{ +- CompleteUnimported: true, +- CompletionDocumentation: true, +- DeepCompletion: true, +- SubdirWatchPatterns: SubdirWatchPatternsAuto, +- ReportAnalysisProgressAfter: 5 * time.Second, +- TelemetryPrompt: false, +- LinkifyShowMessage: false, +- IncludeReplaceInWorkspace: false, +- ZeroConfig: true, +- }, +- } +- }) +- options := defaultOptions.Clone() +- for _, override := range overrides { +- if override != nil { +- override(options) +- } +- } +- +- return options +-} +diff -urN a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go +--- a/gopls/internal/settings/settings.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/settings/settings.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1672 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package settings +- +-import ( +- "fmt" +- "maps" +- "path/filepath" +- "reflect" +- "strings" +- "time" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/semtok" +- "golang.org/x/tools/gopls/internal/telemetry" +- "golang.org/x/tools/gopls/internal/util/frob" +-) +- +-// An Annotation is a category of Go compiler optimization diagnostic. +-type Annotation string +- +-const ( +- // Nil controls nil checks. +- Nil Annotation = "nil" +- +- // Escape controls diagnostics about escape choices. +- Escape Annotation = "escape" +- +- // Inline controls diagnostics about inlining choices. +- Inline Annotation = "inline" +- +- // Bounds controls bounds checking diagnostics. +- Bounds Annotation = "bounds" +-) +- +-// Options holds various configuration that affects Gopls execution, organized +-// by the nature or origin of the settings. +-// +-// Options must be comparable with reflect.DeepEqual, and serializable with +-// [frob.Codec]. +-// +-// This type defines both the logic of LSP-supplied option parsing +-// (see [SetOptions]), and the public documentation of options in +-// ../../doc/settings.md (generated by gopls/doc/generate). +-// +-// Each exported field of each embedded type such as "ClientOptions" +-// contributes a user-visible option setting. The option name is the +-// field name rendered in camelCase. Unlike most Go doc comments, +-// these fields should be documented using GitHub markdown. +-type Options struct { +- ClientOptions +- ServerOptions +- UserOptions +- InternalOptions +-} +- +-// Debug returns a list of "name = value" strings for each Options field. +-func (o *Options) Debug() []string { +- var res []string +- +- var visitStruct func(v reflect.Value, path []string) +- visitStruct = func(v reflect.Value, path []string) { +- for i := range v.NumField() { +- f := v.Field(i) +- ftyp := v.Type().Field(i) +- path := append(path, ftyp.Name) +- if ftyp.Type.Kind() == reflect.Struct { +- visitStruct(f, path) +- } else { +- res = append(res, fmt.Sprintf("%s = %#v", +- strings.Join(path, "."), +- f.Interface())) +- } +- } +- } +- visitStruct(reflect.ValueOf(o).Elem(), nil) +- +- return res +-} +- +-// ClientOptions holds LSP-specific configuration that is provided by the +-// client. +-// +-// ClientOptions must be comparable with reflect.DeepEqual. +-type ClientOptions struct { +- ClientInfo protocol.ClientInfo +- InsertTextFormat protocol.InsertTextFormat +- InsertReplaceSupported bool +- ConfigurationSupported bool +- DynamicConfigurationSupported bool +- DynamicRegistrationSemanticTokensSupported bool +- DynamicWatchedFilesSupported bool +- RelativePatternsSupported bool +- PreferredContentFormat protocol.MarkupKind +- LineFoldingOnly bool +- HierarchicalDocumentSymbolSupport bool +- ImportsSource ImportsSourceEnum `status:"experimental"` +- SemanticTypes []string +- SemanticMods []string +- RelatedInformationSupported bool +- CompletionTags bool +- CompletionDeprecated bool +- SupportedResourceOperations []protocol.ResourceOperationKind +- CodeActionResolveOptions []string +- ShowDocumentSupported bool +- // SupportedWorkDoneProgressFormats specifies the formats supported by the +- // client for handling workdone progress metadata. +- SupportedWorkDoneProgressFormats map[WorkDoneProgressStyle]bool +-} +- +-// ServerOptions holds LSP-specific configuration that is provided by the +-// server. +-// +-// ServerOptions must be comparable with reflect.DeepEqual. +-type ServerOptions struct { +- SupportedCodeActions map[file.Kind]map[protocol.CodeActionKind]bool +- SupportedCommands []string +-} +- +-// Note: BuildOptions must be comparable with reflect.DeepEqual. +-type BuildOptions struct { +- // BuildFlags is the set of flags passed on to the build system when invoked. +- // It is applied to queries like `go list`, which is used when discovering files. +- // The most common use is to set `-tags`. +- BuildFlags []string +- +- // Env adds environment variables to external commands run by `gopls`, most notably `go list`. +- Env map[string]string +- +- // DirectoryFilters can be used to exclude unwanted directories from the +- // workspace. By default, all directories are included. Filters are an +- // operator, `+` to include and `-` to exclude, followed by a path prefix +- // relative to the workspace folder. They are evaluated in order, and +- // the last filter that applies to a path controls whether it is included. +- // The path prefix can be empty, so an initial `-` excludes everything. +- // +- // DirectoryFilters also supports the `**` operator to match 0 or more directories. +- // +- // Examples: +- // +- // Exclude node_modules at current depth: `-node_modules` +- // +- // Exclude node_modules at any depth: `-**/node_modules` +- // +- // Include only project_a: `-` (exclude everything), `+project_a` +- // +- // Include only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules` +- DirectoryFilters []string +- +- // TemplateExtensions gives the extensions of file names that are treated +- // as template files. (The extension +- // is the part of the file name after the final dot.) +- TemplateExtensions []string +- +- // obsolete, no effect +- MemoryMode string `status:"experimental"` +- +- // ExpandWorkspaceToModule determines which packages are considered +- // "workspace packages" when the workspace is using modules. +- // +- // Workspace packages affect the scope of workspace-wide operations. Notably, +- // gopls diagnoses all packages considered to be part of the workspace after +- // every keystroke, so by setting "ExpandWorkspaceToModule" to false, and +- // opening a nested workspace directory, you can reduce the amount of work +- // gopls has to do to keep your workspace up to date. +- ExpandWorkspaceToModule bool `status:"experimental"` +- +- // StandaloneTags specifies a set of build constraints that identify +- // individual Go source files that make up the entire main package of an +- // executable. +- // +- // A common example of standalone main files is the convention of using the +- // directive `//go:build ignore` to denote files that are not intended to be +- // included in any package, for example because they are invoked directly by +- // the developer using `go run`. +- // +- // Gopls considers a file to be a standalone main file if and only if it has +- // package name "main" and has a build directive of the exact form +- // "//go:build tag" or "// +build tag", where tag is among the list of tags +- // configured by this setting. Notably, if the build constraint is more +- // complicated than a simple tag (such as the composite constraint +- // `//go:build tag && go1.18`), the file is not considered to be a standalone +- // main file. +- // +- // This setting is only supported when gopls is built with Go 1.16 or later. +- StandaloneTags []string +- +- // WorkspaceFiles configures the set of globs that match files defining the +- // logical build of the current workspace. Any on-disk changes to any files +- // matching a glob specified here will trigger a reload of the workspace. +- // +- // This setting need only be customized in environments with a custom +- // GOPACKAGESDRIVER. +- WorkspaceFiles []string +-} +- +-// Note: UIOptions must be comparable with reflect.DeepEqual. +-type UIOptions struct { +- DocumentationOptions +- CompletionOptions +- NavigationOptions +- DiagnosticOptions +- InlayHintOptions +- +- // Codelenses overrides the enabled/disabled state of each of gopls' +- // sources of [Code Lenses](codelenses.md). +- // +- // Example Usage: +- // +- // ```json5 +- // "gopls": { +- // ... +- // "codelenses": { +- // "generate": false, // Don't show the `go generate` lens. +- // } +- // ... +- // } +- // ``` +- Codelenses map[CodeLensSource]bool +- +- // SemanticTokens controls whether the LSP server will send +- // semantic tokens to the client. +- SemanticTokens bool `status:"experimental"` +- +- // NoSemanticString turns off the sending of the semantic token 'string' +- // +- // Deprecated: Use SemanticTokenTypes["string"] = false instead. See +- // golang/vscode-go#3632 +- NoSemanticString bool `status:"experimental"` +- +- // NoSemanticNumber turns off the sending of the semantic token 'number' +- // +- // Deprecated: Use SemanticTokenTypes["number"] = false instead. See +- // golang/vscode-go#3632. +- NoSemanticNumber bool `status:"experimental"` +- +- // SemanticTokenTypes configures the semantic token types. It allows +- // disabling types by setting each value to false. +- // By default, all types are enabled. +- SemanticTokenTypes map[string]bool `status:"experimental"` +- +- // SemanticTokenModifiers configures the semantic token modifiers. It allows +- // disabling modifiers by setting each value to false. +- // By default, all modifiers are enabled. +- SemanticTokenModifiers map[string]bool `status:"experimental"` +- +- // NewGoFileHeader enables automatic insertion of the copyright comment +- // and package declaration in a newly created Go file. +- NewGoFileHeader bool +- +- // PackageMove enables PrepareRename to send the full package path +- // and allows users to move a package via renaming. +- PackageMove bool `status:"experimental"` +-} +- +-// A CodeLensSource identifies an (algorithmic) source of code lenses. +-type CodeLensSource string +- +-// CodeLens sources +-// +-// These identifiers appear in the "codelenses" configuration setting, +-// and in the user documentation thereof, which is generated by +-// gopls/doc/generate/generate.go parsing this file. +-// +-// Doc comments should use GitHub Markdown. +-// The first line becomes the title. +-// +-// (For historical reasons, each code lens source identifier typically +-// matches the name of one of the command.Commands returned by it, +-// but that isn't essential.) +-const ( +- // Run `go generate` +- // +- // This codelens source annotates any `//go:generate` comments +- // with commands to run `go generate` in this directory, on +- // all directories recursively beneath this one. +- // +- // See [Generating code](https://go.dev/blog/generate) for +- // more details. +- CodeLensGenerate CodeLensSource = "generate" +- +- // Re-generate cgo declarations +- // +- // This codelens source annotates an `import "C"` declaration +- // with a command to re-run the [cgo +- // command](https://pkg.go.dev/cmd/cgo) to regenerate the +- // corresponding Go declarations. +- // +- // Use this after editing the C code in comments attached to +- // the import, or in C header files included by it. +- CodeLensRegenerateCgo CodeLensSource = "regenerate_cgo" +- +- // Run govulncheck +- // +- // This codelens source annotates the `module` directive in a go.mod file +- // with a command to run govulncheck synchronously. +- // +- // [Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that +- // computes the set of functions reachable within your application, including +- // dependencies; queries a database of known security vulnerabilities; and +- // reports any potential problems it finds. +- // +- //gopls:status experimental +- CodeLensVulncheck CodeLensSource = "vulncheck" +- +- // Run govulncheck (legacy) +- // +- // This codelens source annotates the `module` directive in a go.mod file +- // with a command to run Govulncheck asynchronously. +- // +- // [Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that +- // computes the set of functions reachable within your application, including +- // dependencies; queries a database of known security vulnerabilities; and +- // reports any potential problems it finds. +- CodeLensRunGovulncheck CodeLensSource = "run_govulncheck" +- +- // Run tests and benchmarks +- // +- // This codelens source annotates each `Test` and `Benchmark` +- // function in a `*_test.go` file with a command to run it. +- // +- // This source is off by default because VS Code has +- // a client-side custom UI for testing, and because progress +- // notifications are not a great UX for streamed test output. +- // See: +- // - golang/go#67400 for a discussion of this feature. +- // - https://github.com/joaotavora/eglot/discussions/1402 +- // for an alternative approach. +- CodeLensTest CodeLensSource = "test" +- +- // Tidy go.mod file +- // +- // This codelens source annotates the `module` directive in a +- // go.mod file with a command to run [`go mod +- // tidy`](https://go.dev/ref/mod#go-mod-tidy), which ensures +- // that the go.mod file matches the source code in the module. +- CodeLensTidy CodeLensSource = "tidy" +- +- // Update dependencies +- // +- // This codelens source annotates the `module` directive in a +- // go.mod file with commands to: +- // +- // - check for available upgrades, +- // - upgrade direct dependencies, and +- // - upgrade all dependencies transitively. +- CodeLensUpgradeDependency CodeLensSource = "upgrade_dependency" +- +- // Update vendor directory +- // +- // This codelens source annotates the `module` directive in a +- // go.mod file with a command to run [`go mod +- // vendor`](https://go.dev/ref/mod#go-mod-vendor), which +- // creates or updates the directory named `vendor` in the +- // module root so that it contains an up-to-date copy of all +- // necessary package dependencies. +- CodeLensVendor CodeLensSource = "vendor" +-) +- +-// Note: CompletionOptions must be comparable with reflect.DeepEqual. +-type CompletionOptions struct { +- // Placeholders enables placeholders for function parameters or struct +- // fields in completion responses. +- UsePlaceholders bool +- +- // CompletionBudget is the soft latency goal for completion requests. Most +- // requests finish in a couple milliseconds, but in some cases deep +- // completions can take much longer. As we use up our budget we +- // dynamically reduce the search scope to ensure we return timely +- // results. Zero means unlimited. +- CompletionBudget time.Duration `status:"debug"` +- +- // Matcher sets the algorithm that is used when calculating completion +- // candidates. +- Matcher Matcher `status:"advanced"` +- +- // ExperimentalPostfixCompletions enables artificial method snippets +- // such as "someSlice.sort!". +- ExperimentalPostfixCompletions bool `status:"experimental"` +- +- // CompleteFunctionCalls enables function call completion. +- // +- // When completing a statement, or when a function return type matches the +- // expected of the expression being completed, completion may suggest call +- // expressions (i.e. may include parentheses). +- CompleteFunctionCalls bool +-} +- +-// Note: DocumentationOptions must be comparable with reflect.DeepEqual. +-type DocumentationOptions struct { +- // HoverKind controls the information that appears in the hover text. +- // SingleLine is intended for use only by authors of editor plugins. +- HoverKind HoverKind +- +- // LinkTarget is the base URL for links to Go package +- // documentation returned by LSP operations such as Hover and +- // DocumentLinks and in the CodeDescription field of each +- // Diagnostic. +- // +- // It might be one of: +- // +- // * `"godoc.org"` +- // * `"pkg.go.dev"` +- // +- // If company chooses to use its own `godoc.org`, its address can be used as well. +- // +- // Modules matching the GOPRIVATE environment variable will not have +- // documentation links in hover. +- LinkTarget string +- +- // LinksInHover controls the presence of documentation links in hover markdown. +- LinksInHover LinksInHoverEnum +-} +- +-// LinksInHoverEnum has legal values: +-// +-// - `false`, for no links; +-// - `true`, for links to the `linkTarget` domain; or +-// - `"gopls"`, for links to gopls' internal documentation viewer. +-// +-// Note: this type has special logic in loadEnums in generate.go. +-// Be sure to reflect enum and doc changes there! +-type LinksInHoverEnum int +- +-const ( +- LinksInHover_None LinksInHoverEnum = iota +- LinksInHover_LinkTarget +- LinksInHover_Gopls +-) +- +-// MarshalJSON implements the json.Marshaler interface, so that the default +-// values are formatted correctly in documentation. (See [Options.setOne] for +-// the flexible custom unmarshalling behavior). +-func (l LinksInHoverEnum) MarshalJSON() ([]byte, error) { +- switch l { +- case LinksInHover_None: +- return []byte("false"), nil +- case LinksInHover_LinkTarget: +- return []byte("true"), nil +- case LinksInHover_Gopls: +- return []byte(`"gopls"`), nil +- default: +- return nil, fmt.Errorf("invalid LinksInHover value %d", l) +- } +-} +- +-// Note: FormattingOptions must be comparable with reflect.DeepEqual. +-type FormattingOptions struct { +- // Local is the equivalent of the `goimports -local` flag, which puts +- // imports beginning with this string after third-party packages. It should +- // be the prefix of the import path whose imports should be grouped +- // separately. +- // +- // It is used when tidying imports (during an LSP Organize +- // Imports request) or when inserting new ones (for example, +- // during completion); an LSP Formatting request merely sorts the +- // existing imports. +- Local string +- +- // Gofumpt indicates if we should run gofumpt formatting. +- Gofumpt bool +-} +- +-// Note: DiagnosticOptions must be comparable with reflect.DeepEqual, +-// and frob-encodable (no interfaces). +-type DiagnosticOptions struct { +- // Analyses specify analyses that the user would like to enable or disable. +- // A map of the names of analysis passes that should be enabled/disabled. +- // A full list of analyzers that gopls uses can be found in +- // [analyzers.md](https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md). +- // +- // Example Usage: +- // +- // ```json5 +- // ... +- // "analyses": { +- // "unreachable": false, // Disable the unreachable analyzer. +- // "unusedvariable": true // Enable the unusedvariable analyzer. +- // } +- // ... +- // ``` +- Analyses map[string]bool +- +- // Staticcheck configures the default set of analyses staticcheck.io. +- // These analyses are documented on +- // [Staticcheck's website](https://staticcheck.io/docs/checks/). +- // +- // The "staticcheck" option has three values: +- // - false: disable all staticcheck analyzers +- // - true: enable all staticcheck analyzers +- // - unset: enable a subset of staticcheck analyzers +- // selected by gopls maintainers for runtime efficiency +- // and analytic precision. +- // +- // Regardless of this setting, individual analyzers can be +- // selectively enabled or disabled using the `analyses` setting. +- Staticcheck bool `status:"experimental"` +- StaticcheckProvided bool `status:"experimental"` // = "staticcheck" was explicitly provided +- +- // Annotations specifies the various kinds of compiler +- // optimization details that should be reported as diagnostics +- // when enabled for a package by the "Toggle compiler +- // optimization details" (`gopls.gc_details`) command. +- // +- // (Some users care only about one kind of annotation in their +- // profiling efforts. More importantly, in large packages, the +- // number of annotations can sometimes overwhelm the user +- // interface and exceed the per-file diagnostic limit.) +- // +- // TODO(adonovan): rename this field to CompilerOptDetail. +- Annotations map[Annotation]bool +- +- // Vulncheck enables vulnerability scanning. +- Vulncheck VulncheckMode `status:"experimental"` +- +- // DiagnosticsDelay controls the amount of time that gopls waits +- // after the most recent file modification before computing deep diagnostics. +- // Simple diagnostics (parsing and type-checking) are always run immediately +- // on recently modified packages. +- // +- // This option must be set to a valid duration string, for example `"250ms"`. +- DiagnosticsDelay time.Duration `status:"advanced"` +- +- // DiagnosticsTrigger controls when to run diagnostics. +- DiagnosticsTrigger DiagnosticsTrigger `status:"experimental"` +- +- // AnalysisProgressReporting controls whether gopls sends progress +- // notifications when construction of its index of analysis facts is taking a +- // long time. Cancelling these notifications will cancel the indexing task, +- // though it will restart after the next change in the workspace. +- // +- // When a package is opened for the first time and heavyweight analyses such as +- // staticcheck are enabled, it can take a while to construct the index of +- // analysis facts for all its dependencies. The index is cached in the +- // filesystem, so subsequent analysis should be faster. +- AnalysisProgressReporting bool +-} +- +-type InlayHintOptions struct { +- // Hints specify inlay hints that users want to see. A full list of hints +- // that gopls uses can be found in +- // [inlayHints.md](https://github.com/golang/tools/blob/master/gopls/doc/inlayHints.md). +- Hints map[InlayHint]bool `status:"experimental"` +-} +- +-// An InlayHint identifies a category of hint that may be +-// independently requested through the "hints" setting. +-type InlayHint string +- +-// This is the source from which gopls/doc/inlayHints.md is generated. +-const ( +- // ParameterNames controls inlay hints for parameter names: +- // ```go +- // parseInt(/* str: */ "123", /* radix: */ 8) +- // ``` +- ParameterNames InlayHint = "parameterNames" +- +- // AssignVariableTypes controls inlay hints for variable types in assign statements: +- // ```go +- // i/* int*/, j/* int*/ := 0, len(r)-1 +- // ``` +- AssignVariableTypes InlayHint = "assignVariableTypes" +- +- // ConstantValues controls inlay hints for constant values: +- // ```go +- // const ( +- // KindNone Kind = iota/* = 0*/ +- // KindPrint/* = 1*/ +- // KindPrintf/* = 2*/ +- // KindErrorf/* = 3*/ +- // ) +- // ``` +- ConstantValues InlayHint = "constantValues" +- +- // RangeVariableTypes controls inlay hints for variable types in range statements: +- // ```go +- // for k/* int*/, v/* string*/ := range []string{} { +- // fmt.Println(k, v) +- // } +- // ``` +- RangeVariableTypes InlayHint = "rangeVariableTypes" +- +- // CompositeLiteralTypes controls inlay hints for composite literal types: +- // ```go +- // for _, c := range []struct { +- // in, want string +- // }{ +- // /*struct{ in string; want string }*/{"Hello, world", "dlrow ,olleH"}, +- // } +- // ``` +- CompositeLiteralTypes InlayHint = "compositeLiteralTypes" +- +- // CompositeLiteralFieldNames inlay hints for composite literal field names: +- // ```go +- // {/*in: */"Hello, world", /*want: */"dlrow ,olleH"} +- // ``` +- CompositeLiteralFieldNames InlayHint = "compositeLiteralFields" +- +- // FunctionTypeParameters inlay hints for implicit type parameters on generic functions: +- // ```go +- // myFoo/*[int, string]*/(1, "hello") +- // ``` +- FunctionTypeParameters InlayHint = "functionTypeParameters" +- +- // IgnoredError inlay hints for implicitly discarded errors: +- // ```go +- // f.Close() // ignore error +- // ``` +- // This check inserts an `// ignore error` hint following any +- // statement that is a function call whose error result is +- // implicitly ignored. +- // +- // To suppress the hint, write an actual comment containing +- // "ignore error" following the call statement, or explictly +- // assign the result to a blank variable. A handful of common +- // functions such as `fmt.Println` are excluded from the +- // check. +- IgnoredError InlayHint = "ignoredError" +-) +- +-type NavigationOptions struct { +- // ImportShortcut specifies whether import statements should link to +- // documentation or go to definitions. +- ImportShortcut ImportShortcut +- +- // SymbolMatcher sets the algorithm that is used when finding workspace symbols. +- SymbolMatcher SymbolMatcher `status:"advanced"` +- +- // SymbolStyle controls how symbols are qualified in symbol responses. +- // +- // Example Usage: +- // +- // ```json5 +- // "gopls": { +- // ... +- // "symbolStyle": "Dynamic", +- // ... +- // } +- // ``` +- SymbolStyle SymbolStyle `status:"advanced"` +- +- // SymbolScope controls which packages are searched for workspace/symbol +- // requests. When the scope is "workspace", gopls searches only workspace +- // packages. When the scope is "all", gopls searches all loaded packages, +- // including dependencies and the standard library. +- SymbolScope SymbolScope +-} +- +-// UserOptions holds custom Gopls configuration (not part of the LSP) that is +-// modified by the client. +-// +-// UserOptions must be comparable with reflect.DeepEqual. +-type UserOptions struct { +- BuildOptions +- UIOptions +- FormattingOptions +- +- // VerboseOutput enables additional debug logging. +- VerboseOutput bool `status:"debug"` +-} +- +-// EnvSlice returns Env as a slice of k=v strings. +-func (u *UserOptions) EnvSlice() []string { +- var result []string +- for k, v := range u.Env { +- result = append(result, fmt.Sprintf("%v=%v", k, v)) +- } +- return result +-} +- +-// SetEnvSlice sets Env from a slice of k=v strings. +-func (u *UserOptions) SetEnvSlice(env []string) { +- u.Env = map[string]string{} +- for _, kv := range env { +- split := strings.SplitN(kv, "=", 2) +- if len(split) != 2 { +- continue +- } +- u.Env[split[0]] = split[1] +- } +-} +- +-type WorkDoneProgressStyle string +- +-const WorkDoneProgressStyleLog WorkDoneProgressStyle = "log" +- +-// InternalOptions contains settings that are not intended for use by the +-// average user. These may be settings used by tests or outdated settings that +-// will soon be deprecated. Some of these settings may not even be configurable +-// by the user. +-// +-// TODO(rfindley): even though these settings are not intended for +-// modification, some of them should be surfaced in our documentation. +-type InternalOptions struct { +- // MCPTools configures enabled tools (by tool name), overriding the defaults. +- MCPTools map[string]bool +- +- // VerboseWorkDoneProgress controls whether the LSP server should send +- // progress reports for all work done outside the scope of an RPC. +- // Used by the regression tests. +- VerboseWorkDoneProgress bool +- +- // The following options were previously available to users, but they +- // really shouldn't be configured by anyone other than "power users". +- +- // CompletionDocumentation enables documentation with completion results. +- CompletionDocumentation bool +- +- // CompleteUnimported enables completion for packages that you do not +- // currently import. +- CompleteUnimported bool +- +- // DeepCompletion enables the ability to return completions from deep +- // inside relevant entities, rather than just the locally accessible ones. +- // +- // Consider this example: +- // +- // ```go +- // package main +- // +- // import "fmt" +- // +- // type wrapString struct { +- // str string +- // } +- // +- // func main() { +- // x := wrapString{"hello world"} +- // fmt.Printf(<>) +- // } +- // ``` +- // +- // At the location of the `<>` in this program, deep completion would suggest +- // the result `x.str`. +- DeepCompletion bool +- +- // ShowBugReports causes a message to be shown when the first bug is reported +- // on the server. +- // This option applies only during initialization. +- ShowBugReports bool +- +- // SubdirWatchPatterns configures the file watching glob patterns registered +- // by gopls. +- // +- // Some clients (namely VS Code) do not send workspace/didChangeWatchedFile +- // notifications for files contained in a directory when that directory is +- // deleted: +- // https://github.com/microsoft/vscode/issues/109754 +- // +- // In this case, gopls would miss important notifications about deleted +- // packages. To work around this, gopls registers a watch pattern for each +- // directory containing Go files. +- // +- // Unfortunately, other clients experience performance problems with this +- // many watch patterns, so there is no single behavior that works well for +- // all clients. +- // +- // The "subdirWatchPatterns" setting allows configuring this behavior. Its +- // default value of "auto" attempts to guess the correct behavior based on +- // the client name. We'd love to avoid this specialization, but as described +- // above there is no single value that works for all clients. +- // +- // If any LSP client does not behave well with the default value (for +- // example, if like VS Code it drops file notifications), please file an +- // issue. +- SubdirWatchPatterns SubdirWatchPatterns +- +- // ReportAnalysisProgressAfter sets the duration for gopls to wait before starting +- // progress reporting for ongoing go/analysis passes. +- // +- // It is intended to be used for testing only. +- ReportAnalysisProgressAfter time.Duration +- +- // TelemetryPrompt controls whether gopls prompts about enabling Go telemetry. +- // +- // Once the prompt is answered, gopls doesn't ask again, but TelemetryPrompt +- // can prevent the question from ever being asked in the first place. +- TelemetryPrompt bool +- +- // LinkifyShowMessage controls whether the client wants gopls +- // to linkify links in showMessage. e.g. [go.dev](https://go.dev). +- LinkifyShowMessage bool +- +- // IncludeReplaceInWorkspace controls whether locally replaced modules in a +- // go.mod file are treated like workspace modules. +- // Or in other words, if a go.mod file with local replaces behaves like a +- // go.work file. +- IncludeReplaceInWorkspace bool +- +- // ZeroConfig enables the zero-config algorithm for workspace layout, +- // dynamically creating build configurations for different modules, +- // directories, and GOOS/GOARCH combinations to cover open files. +- ZeroConfig bool +- +- // PullDiagnostics enables support for pull diagnostics. +- // +- // TODO(rfindley): make pull diagnostics robust, and remove this option, +- // allowing pull diagnostics by default. +- PullDiagnostics bool +-} +- +-type SubdirWatchPatterns string +- +-const ( +- SubdirWatchPatternsOn SubdirWatchPatterns = "on" +- SubdirWatchPatternsOff SubdirWatchPatterns = "off" +- SubdirWatchPatternsAuto SubdirWatchPatterns = "auto" +-) +- +-type ImportShortcut string +- +-const ( +- BothShortcuts ImportShortcut = "Both" +- LinkShortcut ImportShortcut = "Link" +- DefinitionShortcut ImportShortcut = "Definition" +-) +- +-func (s ImportShortcut) ShowLinks() bool { +- return s == BothShortcuts || s == LinkShortcut +-} +- +-func (s ImportShortcut) ShowDefinition() bool { +- return s == BothShortcuts || s == DefinitionShortcut +-} +- +-// ImportsSourceEnum has legal values: +-// +-// - `off` to disable searching the file system for imports +-// - `gopls` to use the metadata graph and module cache index +-// - `goimports` for the old behavior, to be deprecated +-type ImportsSourceEnum string +- +-const ( +- ImportsSourceOff ImportsSourceEnum = "off" +- ImportsSourceGopls ImportsSourceEnum = "gopls" +- ImportsSourceGoimports ImportsSourceEnum = "goimports" +-) +- +-type Matcher string +- +-const ( +- Fuzzy Matcher = "Fuzzy" +- CaseInsensitive Matcher = "CaseInsensitive" +- CaseSensitive Matcher = "CaseSensitive" +-) +- +-// A SymbolMatcher controls the matching of symbols for workspace/symbol +-// requests. +-type SymbolMatcher string +- +-const ( +- SymbolFuzzy SymbolMatcher = "Fuzzy" +- SymbolFastFuzzy SymbolMatcher = "FastFuzzy" +- SymbolCaseInsensitive SymbolMatcher = "CaseInsensitive" +- SymbolCaseSensitive SymbolMatcher = "CaseSensitive" +-) +- +-// A SymbolStyle controls the formatting of symbols in workspace/symbol results. +-type SymbolStyle string +- +-const ( +- // PackageQualifiedSymbols is package qualified symbols i.e. +- // "pkg.Foo.Field". +- PackageQualifiedSymbols SymbolStyle = "Package" +- // FullyQualifiedSymbols is fully qualified symbols, i.e. +- // "path/to/pkg.Foo.Field". +- FullyQualifiedSymbols SymbolStyle = "Full" +- // DynamicSymbols uses whichever qualifier results in the highest scoring +- // match for the given symbol query. Here a "qualifier" is any "/" or "." +- // delimited suffix of the fully qualified symbol. i.e. "to/pkg.Foo.Field" or +- // just "Foo.Field". +- DynamicSymbols SymbolStyle = "Dynamic" +-) +- +-// A SymbolScope controls the search scope for workspace/symbol requests. +-type SymbolScope string +- +-const ( +- // WorkspaceSymbolScope matches symbols in workspace packages only. +- WorkspaceSymbolScope SymbolScope = "workspace" +- // AllSymbolScope matches symbols in any loaded package, including +- // dependencies. +- AllSymbolScope SymbolScope = "all" +-) +- +-type HoverKind string +- +-const ( +- SingleLine HoverKind = "SingleLine" +- NoDocumentation HoverKind = "NoDocumentation" +- SynopsisDocumentation HoverKind = "SynopsisDocumentation" +- FullDocumentation HoverKind = "FullDocumentation" +- +- // Structured is a misguided experimental setting that returns a JSON +- // hover format. This setting should not be used, as it will be removed in a +- // future release of gopls. +- Structured HoverKind = "Structured" +-) +- +-type VulncheckMode string +- +-const ( +- // Disable vulnerability analysis. +- ModeVulncheckOff VulncheckMode = "Off" +- // In Imports mode, `gopls` will report vulnerabilities that affect packages +- // directly and indirectly used by the analyzed main module. +- ModeVulncheckImports VulncheckMode = "Imports" +- +- // TODO: VulncheckRequire, VulncheckCallgraph +-) +- +-type DiagnosticsTrigger string +- +-const ( +- // Trigger diagnostics on file edit and save. (default) +- DiagnosticsOnEdit DiagnosticsTrigger = "Edit" +- // Trigger diagnostics only on file save. Events like initial workspace load +- // or configuration change will still trigger diagnostics. +- DiagnosticsOnSave DiagnosticsTrigger = "Save" +- // TODO: support "Manual"? +-) +- +-type CounterPath = telemetry.CounterPath +- +-// Set updates *Options based on the provided JSON value: +-// null, bool, string, number, array, or object. +-// +-// The applied result describes settings that were applied. Each CounterPath +-// contains at least the name of the setting, but may also include sub-setting +-// names for settings that are themselves maps, and/or a non-empty bucket name +-// when bucketing is desirable. +-// +-// On failure, it returns one or more non-nil errors. +-func (o *Options) Set(value any) (applied []CounterPath, errs []error) { +- switch value := value.(type) { +- case nil: +- case map[string]any: +- seen := make(map[string]struct{}) +- for name, value := range value { +- // Use only the last segment of a dotted name such as +- // ui.navigation.symbolMatcher. The other segments +- // are discarded, even without validation (!). +- // (They are supported to enable hierarchical names +- // in the VS Code graphical configuration UI.) +- split := strings.Split(name, ".") +- name = split[len(split)-1] +- +- if _, ok := seen[name]; ok { +- errs = append(errs, fmt.Errorf("duplicate value for %s", name)) +- } +- seen[name] = struct{}{} +- +- paths, err := o.setOne(name, value) +- if err != nil { +- err := fmt.Errorf("setting option %q: %w", name, err) +- errs = append(errs, err) +- } +- _, soft := err.(*SoftError) +- if err == nil || soft { +- if len(paths) == 0 { +- path := CounterPath{name, ""} +- applied = append(applied, path) +- } else { +- for _, subpath := range paths { +- path := append(CounterPath{name}, subpath...) +- applied = append(applied, path) +- } +- } +- } +- } +- default: +- errs = append(errs, fmt.Errorf("invalid options type %T (want JSON null or object)", value)) +- } +- return applied, errs +-} +- +-func (o *Options) ForClientCapabilities(clientInfo *protocol.ClientInfo, caps protocol.ClientCapabilities) { +- if clientInfo != nil { +- o.ClientInfo = *clientInfo +- } +- if caps.Workspace.WorkspaceEdit != nil { +- o.SupportedResourceOperations = caps.Workspace.WorkspaceEdit.ResourceOperations +- } +- // Check if the client supports snippets in completion items. +- if c := caps.TextDocument.Completion; c.CompletionItem.SnippetSupport { +- o.InsertTextFormat = protocol.SnippetTextFormat +- } +- o.InsertReplaceSupported = caps.TextDocument.Completion.CompletionItem.InsertReplaceSupport +- if caps.Window.ShowDocument != nil { +- o.ShowDocumentSupported = caps.Window.ShowDocument.Support +- } +- // Check if the client supports configuration messages. +- o.ConfigurationSupported = caps.Workspace.Configuration +- o.DynamicConfigurationSupported = caps.Workspace.DidChangeConfiguration.DynamicRegistration +- o.DynamicRegistrationSemanticTokensSupported = caps.TextDocument.SemanticTokens.DynamicRegistration +- o.DynamicWatchedFilesSupported = caps.Workspace.DidChangeWatchedFiles.DynamicRegistration +- o.RelativePatternsSupported = caps.Workspace.DidChangeWatchedFiles.RelativePatternSupport +- +- // Check which types of content format are supported by this client. +- if hover := caps.TextDocument.Hover; hover != nil && len(hover.ContentFormat) > 0 { +- o.PreferredContentFormat = hover.ContentFormat[0] +- } +- // Check if the client supports only line folding. +- +- if fr := caps.TextDocument.FoldingRange; fr != nil { +- // TODO(pjw): add telemetry +- o.LineFoldingOnly = fr.LineFoldingOnly +- } +- // Check if the client supports hierarchical document symbols. +- o.HierarchicalDocumentSymbolSupport = caps.TextDocument.DocumentSymbol.HierarchicalDocumentSymbolSupport +- +- // Client's semantic tokens +- o.SemanticTypes = caps.TextDocument.SemanticTokens.TokenTypes +- o.SemanticMods = caps.TextDocument.SemanticTokens.TokenModifiers +- // we don't need Requests, as we support full functionality +- // we don't need Formats, as there is only one, for now +- +- // Check if the client supports diagnostic related information. +- o.RelatedInformationSupported = caps.TextDocument.PublishDiagnostics.RelatedInformation +- // Check if the client completion support includes tags (preferred) or deprecation +- if caps.TextDocument.Completion.CompletionItem.TagSupport != nil && +- caps.TextDocument.Completion.CompletionItem.TagSupport.ValueSet != nil { +- o.CompletionTags = true +- } else if caps.TextDocument.Completion.CompletionItem.DeprecatedSupport { +- o.CompletionDeprecated = true +- } +- +- // Check if the client supports code actions resolving. +- if caps.TextDocument.CodeAction.DataSupport && caps.TextDocument.CodeAction.ResolveSupport != nil { +- o.CodeActionResolveOptions = caps.TextDocument.CodeAction.ResolveSupport.Properties +- } +- +- // Client experimental capabilities. +- if experimental, ok := caps.Experimental.(map[string]any); ok { +- if formats, ok := experimental["progressMessageStyles"].([]any); ok { +- o.SupportedWorkDoneProgressFormats = make(map[WorkDoneProgressStyle]bool, len(formats)) +- for _, f := range formats { +- o.SupportedWorkDoneProgressFormats[WorkDoneProgressStyle(f.(string))] = true +- } +- } +- } +-} +- +-var codec = frob.CodecFor[*Options]() +- +-func (o *Options) Clone() *Options { +- data := codec.Encode(o) +- var clone *Options +- codec.Decode(data, &clone) +- return clone +-} +- +-// validateDirectoryFilter validates if the filter string +-// - is not empty +-// - start with either + or - +-// - doesn't contain currently unsupported glob operators: *, ? +-func validateDirectoryFilter(ifilter string) (string, error) { +- filter := fmt.Sprint(ifilter) +- if filter == "" || (filter[0] != '+' && filter[0] != '-') { +- return "", fmt.Errorf("invalid filter %v, must start with + or -", filter) +- } +- segs := strings.Split(filter[1:], "/") +- unsupportedOps := [...]string{"?", "*"} +- for _, seg := range segs { +- if seg != "**" { +- for _, op := range unsupportedOps { +- if strings.Contains(seg, op) { +- return "", fmt.Errorf("invalid filter %v, operator %v not supported. If you want to have this operator supported, consider filing an issue", filter, op) +- } +- } +- } +- } +- +- return strings.TrimRight(filepath.FromSlash(filter), "/"), nil +-} +- +-// setOne updates a field of o based on the name and value. +-// +-// The applied result describes the counter values to be updated as a result of +-// the applied setting. If the result is nil, the default counter for this +-// setting should be updated. +-// +-// For example, if the setting name is "foo", +-// - If applied is nil, update the count for "foo". +-// - If applied is []CounterPath{{"bucket"}}, update the count for +-// foo:bucket. +-// - If applied is []CounterPath{{"a","b"}, {"c","d"}}, update foo/a:b and +-// foo/c:d. +-// +-// It returns an error if the value was invalid or duplicate. +-// It is the caller's responsibility to augment the error with 'name'. +-func (o *Options) setOne(name string, value any) (applied []CounterPath, _ error) { +- switch name { +- case "env": +- env, ok := value.(map[string]any) +- if !ok { +- return nil, fmt.Errorf("invalid type %T (want JSON object)", value) +- } +- if o.Env == nil { +- o.Env = make(map[string]string) +- } +- for k, v := range env { +- // For historic compatibility, we accept int too (e.g. CGO_ENABLED=1). +- switch v.(type) { +- case string, int: +- o.Env[k] = fmt.Sprint(v) +- default: +- return nil, fmt.Errorf("invalid map value %T (want string)", v) +- } +- } +- return nil, nil +- +- case "buildFlags": +- return nil, setStringSlice(&o.BuildFlags, value) +- +- case "directoryFilters": +- filterStrings, err := asStringSlice(value) +- if err != nil { +- return nil, err +- } +- var filters []string +- for _, filterStr := range filterStrings { +- filter, err := validateDirectoryFilter(filterStr) +- if err != nil { +- return nil, err +- } +- filters = append(filters, strings.TrimRight(filepath.FromSlash(filter), "/")) +- } +- o.DirectoryFilters = filters +- return nil, nil +- +- case "workspaceFiles": +- return nil, setStringSlice(&o.WorkspaceFiles, value) +- case "completionDocumentation": +- return setBool(&o.CompletionDocumentation, value) +- case "usePlaceholders": +- return setBool(&o.UsePlaceholders, value) +- case "deepCompletion": +- return setBool(&o.DeepCompletion, value) +- case "completeUnimported": +- return setBool(&o.CompleteUnimported, value) +- case "completionBudget": +- return nil, setDuration(&o.CompletionBudget, value) +- case "importsSource": +- return setEnum(&o.ImportsSource, value, +- ImportsSourceOff, +- ImportsSourceGopls, +- ImportsSourceGoimports) +- case "matcher": +- return setEnum(&o.Matcher, value, +- Fuzzy, +- CaseSensitive, +- CaseInsensitive) +- +- case "symbolMatcher": +- return setEnum(&o.SymbolMatcher, value, +- SymbolFuzzy, +- SymbolFastFuzzy, +- SymbolCaseInsensitive, +- SymbolCaseSensitive) +- +- case "symbolStyle": +- return setEnum(&o.SymbolStyle, value, +- FullyQualifiedSymbols, +- PackageQualifiedSymbols, +- DynamicSymbols) +- +- case "symbolScope": +- return setEnum(&o.SymbolScope, value, +- WorkspaceSymbolScope, +- AllSymbolScope) +- +- case "hoverKind": +- // TODO(rfindley): reinstate the deprecation of Structured hover by making +- // it a warning in gopls v0.N+1, and removing it in gopls v0.N+2. +- return setEnum(&o.HoverKind, value, +- NoDocumentation, +- SingleLine, +- SynopsisDocumentation, +- FullDocumentation, +- Structured, +- ) +- +- case "linkTarget": +- return nil, setString(&o.LinkTarget, value) +- +- case "linksInHover": +- switch value { +- case false: +- o.LinksInHover = LinksInHover_None +- case true: +- o.LinksInHover = LinksInHover_LinkTarget +- case "gopls": +- o.LinksInHover = LinksInHover_Gopls +- default: +- return nil, fmt.Errorf(`invalid value %s; expect false, true, or "gopls"`, value) +- } +- return nil, nil +- +- case "importShortcut": +- return setEnum(&o.ImportShortcut, value, +- BothShortcuts, +- LinkShortcut, +- DefinitionShortcut) +- +- case "analyses": +- counts, err := setBoolMap(&o.Analyses, value) +- if err != nil { +- return nil, err +- } +- if o.Analyses["fieldalignment"] { +- return counts, &SoftError{"the 'fieldalignment' analyzer was removed in gopls/v0.17.0; instead, hover over struct fields to see size/offset information (https://go.dev/issue/66861)"} +- } +- return counts, nil +- +- case "hints": +- return setBoolMap(&o.Hints, value) +- +- case "annotations": +- return setAnnotationMap(&o.Annotations, value) +- +- case "vulncheck": +- return setEnum(&o.Vulncheck, value, +- ModeVulncheckOff, +- ModeVulncheckImports) +- +- case "codelenses", "codelens": +- lensOverrides, err := asBoolMap[CodeLensSource](value) +- if err != nil { +- return nil, err +- } +- if o.Codelenses == nil { +- o.Codelenses = make(map[CodeLensSource]bool) +- } +- o.Codelenses = maps.Clone(o.Codelenses) +- maps.Copy(o.Codelenses, lensOverrides) +- +- var counts []CounterPath +- for k, v := range lensOverrides { +- counts = append(counts, CounterPath{string(k), fmt.Sprint(v)}) +- } +- +- var errs []string +- if name == "codelens" { +- errs = append(errs, deprecatedError("codelenses").Error()) +- } +- if lensOverrides[CodeLensRunGovulncheck] && lensOverrides[CodeLensVulncheck] { +- errs = append(errs, "The 'run_govulncheck' codelens is superseded by the 'vulncheck' codelens. Only 'vulncheck' should be set.") +- } +- if len(errs) > 0 { +- return counts, &SoftError{msg: strings.Join(errs, "\n")} +- } +- return counts, nil +- +- case "staticcheck": +- o.StaticcheckProvided = true +- return setBool(&o.Staticcheck, value) +- +- case "local": +- return nil, setString(&o.Local, value) +- +- case "verboseOutput": +- return setBool(&o.VerboseOutput, value) +- +- case "verboseWorkDoneProgress": +- return setBool(&o.VerboseWorkDoneProgress, value) +- +- case "showBugReports": +- return setBool(&o.ShowBugReports, value) +- +- case "gofumpt": +- return setBool(&o.Gofumpt, value) +- +- case "completeFunctionCalls": +- return setBool(&o.CompleteFunctionCalls, value) +- +- case "semanticTokens": +- return setBool(&o.SemanticTokens, value) +- +- // TODO(hxjiang): deprecate noSemanticString and noSemanticNumber. +- case "noSemanticString": +- counts, err := setBool(&o.NoSemanticString, value) +- if err != nil { +- return nil, err +- } +- return counts, &SoftError{"noSemanticString setting is deprecated, use semanticTokenTypes instead (though you can continue to apply them for the time being)."} +- +- case "noSemanticNumber": +- counts, err := setBool(&o.NoSemanticNumber, value) +- if err != nil { +- return nil, err +- } +- return counts, &SoftError{"noSemanticNumber setting is deprecated, use semanticTokenTypes instead (though you can continue to apply them for the time being)."} +- +- case "semanticTokenTypes": +- return setBoolMap(&o.SemanticTokenTypes, value) +- +- case "semanticTokenModifiers": +- return setBoolMap(&o.SemanticTokenModifiers, value) +- +- case "newGoFileHeader": +- return setBool(&o.NewGoFileHeader, value) +- +- case "expandWorkspaceToModule": +- // See golang/go#63536: we can consider deprecating +- // expandWorkspaceToModule, but probably need to change the default +- // behavior in that case to *not* expand to the module. +- return setBool(&o.ExpandWorkspaceToModule, value) +- +- case "experimentalPostfixCompletions": +- return setBool(&o.ExperimentalPostfixCompletions, value) +- +- case "templateExtensions": +- switch value := value.(type) { +- case []any: +- return nil, setStringSlice(&o.TemplateExtensions, value) +- case nil: +- o.TemplateExtensions = nil +- default: +- return nil, fmt.Errorf("unexpected type %T (want JSON array of string)", value) +- } +- return nil, nil +- +- case "diagnosticsDelay": +- return nil, setDuration(&o.DiagnosticsDelay, value) +- +- case "diagnosticsTrigger": +- return setEnum(&o.DiagnosticsTrigger, value, +- DiagnosticsOnEdit, +- DiagnosticsOnSave) +- +- case "analysisProgressReporting": +- return setBool(&o.AnalysisProgressReporting, value) +- +- case "standaloneTags": +- return nil, setStringSlice(&o.StandaloneTags, value) +- +- case "subdirWatchPatterns": +- return setEnum(&o.SubdirWatchPatterns, value, +- SubdirWatchPatternsOn, +- SubdirWatchPatternsOff, +- SubdirWatchPatternsAuto) +- +- case "reportAnalysisProgressAfter": +- return nil, setDuration(&o.ReportAnalysisProgressAfter, value) +- +- case "telemetryPrompt": +- return setBool(&o.TelemetryPrompt, value) +- +- case "linkifyShowMessage": +- return setBool(&o.LinkifyShowMessage, value) +- +- case "includeReplaceInWorkspace": +- return setBool(&o.IncludeReplaceInWorkspace, value) +- +- case "zeroConfig": +- return setBool(&o.ZeroConfig, value) +- +- case "pullDiagnostics": +- return setBool(&o.PullDiagnostics, value) +- +- case "mcpTools": +- return setBoolMap(&o.MCPTools, value) +- +- case "packageMove": +- return setBool(&o.PackageMove, value) +- +- // deprecated and renamed settings +- // +- // These should never be deleted: there is essentially no cost +- // to providing a better error message indefinitely; it's not +- // as if we would ever want to recycle the name of a setting. +- +- // renamed +- case "experimentalDisabledAnalyses": +- return nil, deprecatedError("analyses") +- +- case "disableDeepCompletion": +- return nil, deprecatedError("deepCompletion") +- +- case "disableFuzzyMatching": +- return nil, deprecatedError("fuzzyMatching") +- +- case "wantCompletionDocumentation": +- return nil, deprecatedError("completionDocumentation") +- +- case "wantUnimportedCompletions": +- return nil, deprecatedError("completeUnimported") +- +- case "fuzzyMatching": +- return nil, deprecatedError("matcher") +- +- case "caseSensitiveCompletion": +- return nil, deprecatedError("matcher") +- +- case "experimentalDiagnosticsDelay": +- return nil, deprecatedError("diagnosticsDelay") +- +- // deprecated +- +- case "allowImplicitNetworkAccess": +- return nil, deprecatedError("") +- +- case "memoryMode": +- return nil, deprecatedError("") +- +- case "tempModFile": +- return nil, deprecatedError("") +- +- case "experimentalWorkspaceModule": +- return nil, deprecatedError("") +- +- case "experimentalTemplateSupport": +- return nil, deprecatedError("") +- +- case "experimentalWatchedFileDelay": +- return nil, deprecatedError("") +- +- case "experimentalPackageCacheKey": +- return nil, deprecatedError("") +- +- case "allowModfileModifications": +- return nil, deprecatedError("") +- +- case "allExperiments": +- // golang/go#65548: this setting is a no-op, but we fail don't report it as +- // deprecated, since the nightly VS Code injects it. +- // +- // If, in the future, VS Code stops injecting this, we could theoretically +- // report an error here, but it also seems harmless to keep ignoring this +- // setting forever. +- return nil, nil +- +- case "experimentalUseInvalidMetadata": +- return nil, deprecatedError("") +- +- case "newDiff": +- return nil, deprecatedError("") +- +- case "wantSuggestedFixes": +- return nil, deprecatedError("") +- +- case "noIncrementalSync": +- return nil, deprecatedError("") +- +- case "watchFileChanges": +- return nil, deprecatedError("") +- +- case "go-diff": +- return nil, deprecatedError("") +- +- default: +- return nil, fmt.Errorf("unexpected setting") +- } +-} +- +-// EnabledSemanticTokenModifiers returns a map of modifiers to boolean. +-func (o *Options) EnabledSemanticTokenModifiers() map[semtok.Modifier]bool { +- copy := make(map[semtok.Modifier]bool, len(o.SemanticTokenModifiers)) +- for k, v := range o.SemanticTokenModifiers { +- copy[semtok.Modifier(k)] = v +- } +- return copy +-} +- +-// EnabledSemanticTokenTypes returns a map of types to boolean. +-func (o *Options) EnabledSemanticTokenTypes() map[semtok.Type]bool { +- copy := make(map[semtok.Type]bool, len(o.SemanticTokenTypes)) +- for k, v := range o.SemanticTokenTypes { +- copy[semtok.Type(k)] = v +- } +- if o.NoSemanticString { +- copy[semtok.TokString] = false +- } +- if o.NoSemanticNumber { +- copy[semtok.TokNumber] = false +- } +- return copy +-} +- +-// A SoftError is an error that does not affect the functionality of gopls. +-type SoftError struct { +- msg string +-} +- +-func (e *SoftError) Error() string { +- return e.msg +-} +- +-// deprecatedError reports the current setting as deprecated. +-// The optional replacement is suggested to the user. +-func deprecatedError(replacement string) error { +- msg := "this setting is deprecated" +- if replacement != "" { +- msg = fmt.Sprintf("%s, use %q instead", msg, replacement) +- } +- return &SoftError{msg} +-} +- +-// setT() and asT() helpers: the setT forms write to the 'dest *T' +-// variable only on success, to reduce boilerplate in Option.set. +- +-func setBool(dest *bool, value any) ([]CounterPath, error) { +- b, err := asBool(value) +- if err != nil { +- return nil, err +- } +- *dest = b +- return []CounterPath{{fmt.Sprint(b)}}, nil +-} +- +-func asBool(value any) (bool, error) { +- b, ok := value.(bool) +- if !ok { +- return false, fmt.Errorf("invalid type %T (want bool)", value) +- } +- return b, nil +-} +- +-func setDuration(dest *time.Duration, value any) error { +- str, err := asString(value) +- if err != nil { +- return err +- } +- parsed, err := time.ParseDuration(str) +- if err != nil { +- return err +- } +- *dest = parsed +- return nil +-} +- +-func setAnnotationMap(dest *map[Annotation]bool, value any) ([]CounterPath, error) { +- all, err := asBoolMap[string](value) +- if err != nil { +- return nil, err +- } +- var counters []CounterPath +- // Default to everything enabled by default. +- m := make(map[Annotation]bool) +- for k, enabled := range all { +- var a Annotation +- cnts, err := setEnum(&a, k, +- Nil, +- Escape, +- Inline, +- Bounds) +- if err != nil { +- // In case of an error, process any legacy values. +- switch k { +- case "noEscape": +- m[Escape] = false +- return nil, fmt.Errorf(`"noEscape" is deprecated, set "Escape: false" instead`) +- +- case "noNilcheck": +- m[Nil] = false +- return nil, fmt.Errorf(`"noNilcheck" is deprecated, set "Nil: false" instead`) +- +- case "noInline": +- m[Inline] = false +- return nil, fmt.Errorf(`"noInline" is deprecated, set "Inline: false" instead`) +- +- case "noBounds": +- m[Bounds] = false +- return nil, fmt.Errorf(`"noBounds" is deprecated, set "Bounds: false" instead`) +- +- default: +- return nil, err +- } +- } +- counters = append(counters, cnts...) +- m[a] = enabled +- } +- *dest = m +- return counters, nil +-} +- +-func setBoolMap[K ~string](dest *map[K]bool, value any) ([]CounterPath, error) { +- m, err := asBoolMap[K](value) +- if err != nil { +- return nil, err +- } +- *dest = m +- var counts []CounterPath +- for k, v := range m { +- counts = append(counts, CounterPath{string(k), fmt.Sprint(v)}) +- } +- return counts, nil +-} +- +-func asBoolMap[K ~string](value any) (map[K]bool, error) { +- all, ok := value.(map[string]any) +- if !ok { +- return nil, fmt.Errorf("invalid type %T (want JSON object)", value) +- } +- m := make(map[K]bool) +- for a, enabled := range all { +- b, ok := enabled.(bool) +- if !ok { +- return nil, fmt.Errorf("invalid type %T for object field %q", enabled, a) +- } +- m[K(a)] = b +- } +- return m, nil +-} +- +-func setString(dest *string, value any) error { +- str, err := asString(value) +- if err != nil { +- return err +- } +- *dest = str +- return nil +-} +- +-func asString(value any) (string, error) { +- str, ok := value.(string) +- if !ok { +- return "", fmt.Errorf("invalid type %T (want string)", value) +- } +- return str, nil +-} +- +-func setStringSlice(dest *[]string, value any) error { +- slice, err := asStringSlice(value) +- if err != nil { +- return err +- } +- *dest = slice +- return nil +-} +- +-func asStringSlice(value any) ([]string, error) { +- array, ok := value.([]any) +- if !ok { +- return nil, fmt.Errorf("invalid type %T (want JSON array of string)", value) +- } +- var slice []string +- for _, elem := range array { +- str, ok := elem.(string) +- if !ok { +- return nil, fmt.Errorf("invalid array element type %T (want string)", elem) +- } +- slice = append(slice, str) +- } +- return slice, nil +-} +- +-func setEnum[S ~string](dest *S, value any, options ...S) ([]CounterPath, error) { +- enum, err := asEnum(value, options...) +- if err != nil { +- return nil, err +- } +- *dest = enum +- return []CounterPath{{string(enum)}}, nil +-} +- +-func asEnum[S ~string](value any, options ...S) (S, error) { +- str, err := asString(value) +- if err != nil { +- return "", err +- } +- for _, opt := range options { +- if strings.EqualFold(str, string(opt)) { +- return opt, nil +- } +- } +- return "", fmt.Errorf("invalid option %q for enum", str) +-} +diff -urN a/gopls/internal/settings/settings_test.go b/gopls/internal/settings/settings_test.go +--- a/gopls/internal/settings/settings_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/settings/settings_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,258 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package settings_test +- +-import ( +- "reflect" +- "testing" +- "time" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/clonetest" +- . "golang.org/x/tools/gopls/internal/settings" +-) +- +-func TestDefaultsEquivalence(t *testing.T) { +- opts1 := DefaultOptions() +- opts2 := DefaultOptions() +- if !reflect.DeepEqual(opts1, opts2) { +- t.Fatal("default options are not equivalent using reflect.DeepEqual") +- } +-} +- +-func TestOptions_Set(t *testing.T) { +- type testCase struct { +- name string +- value any +- wantError bool +- check func(Options) bool +- } +- tests := []testCase{ +- { +- name: "symbolStyle", +- value: "Dynamic", +- check: func(o Options) bool { return o.SymbolStyle == DynamicSymbols }, +- }, +- { +- name: "symbolStyle", +- value: "", +- wantError: true, +- check: func(o Options) bool { return o.SymbolStyle == "" }, +- }, +- { +- name: "symbolStyle", +- value: false, +- wantError: true, +- check: func(o Options) bool { return o.SymbolStyle == "" }, +- }, +- { +- name: "symbolMatcher", +- value: "caseInsensitive", +- check: func(o Options) bool { return o.SymbolMatcher == SymbolCaseInsensitive }, +- }, +- { +- name: "completionBudget", +- value: "2s", +- check: func(o Options) bool { return o.CompletionBudget == 2*time.Second }, +- }, +- { +- name: "codelenses", +- value: map[string]any{"generate": true}, +- check: func(o Options) bool { return o.Codelenses["generate"] }, +- }, +- { +- name: "allExperiments", +- value: true, +- check: func(o Options) bool { +- return true // just confirm that we handle this setting +- }, +- }, +- { +- name: "hoverKind", +- value: "FullDocumentation", +- check: func(o Options) bool { +- return o.HoverKind == FullDocumentation +- }, +- }, +- { +- name: "hoverKind", +- value: "NoDocumentation", +- check: func(o Options) bool { +- return o.HoverKind == NoDocumentation +- }, +- }, +- { +- name: "hoverKind", +- value: "SingleLine", +- check: func(o Options) bool { +- return o.HoverKind == SingleLine +- }, +- }, +- { +- name: "hoverKind", +- value: "Structured", +- // wantError: true, // TODO(rfindley): reinstate this error +- check: func(o Options) bool { +- return o.HoverKind == Structured +- }, +- }, +- { +- name: "ui.documentation.hoverKind", +- value: "Structured", +- // wantError: true, // TODO(rfindley): reinstate this error +- check: func(o Options) bool { +- return o.HoverKind == Structured +- }, +- }, +- { +- name: "hoverKind", +- value: "FullDocumentation", +- check: func(o Options) bool { +- return o.HoverKind == FullDocumentation +- }, +- }, +- { +- name: "ui.documentation.hoverKind", +- value: "FullDocumentation", +- check: func(o Options) bool { +- return o.HoverKind == FullDocumentation +- }, +- }, +- { +- name: "matcher", +- value: "Fuzzy", +- check: func(o Options) bool { +- return o.Matcher == Fuzzy +- }, +- }, +- { +- name: "matcher", +- value: "CaseSensitive", +- check: func(o Options) bool { +- return o.Matcher == CaseSensitive +- }, +- }, +- { +- name: "matcher", +- value: "CaseInsensitive", +- check: func(o Options) bool { +- return o.Matcher == CaseInsensitive +- }, +- }, +- { +- name: "env", +- value: map[string]any{"testing": "true"}, +- check: func(o Options) bool { +- v, found := o.Env["testing"] +- return found && v == "true" +- }, +- }, +- { +- name: "env", +- value: []string{"invalid", "input"}, +- wantError: true, +- check: func(o Options) bool { +- return o.Env == nil +- }, +- }, +- { +- name: "directoryFilters", +- value: []any{"-node_modules", "+project_a"}, +- check: func(o Options) bool { +- return len(o.DirectoryFilters) == 2 +- }, +- }, +- { +- name: "directoryFilters", +- value: []any{"invalid"}, +- wantError: true, +- check: func(o Options) bool { +- return len(o.DirectoryFilters) == 0 +- }, +- }, +- { +- name: "directoryFilters", +- value: []string{"-invalid", "+type"}, +- wantError: true, +- check: func(o Options) bool { +- return len(o.DirectoryFilters) == 0 +- }, +- }, +- { +- name: "annotations", +- value: map[string]any{ +- "Nil": false, +- "noBounds": true, +- }, +- wantError: true, +- check: func(o Options) bool { +- return !o.Annotations[Nil] && !o.Annotations[Bounds] +- }, +- }, +- { +- name: "vulncheck", +- value: []any{"invalid"}, +- wantError: true, +- check: func(o Options) bool { +- return o.Vulncheck == "" // For invalid value, default to 'off'. +- }, +- }, +- { +- name: "vulncheck", +- value: "Imports", +- check: func(o Options) bool { +- return o.Vulncheck == ModeVulncheckImports // For invalid value, default to 'off'. +- }, +- }, +- { +- name: "vulncheck", +- value: "imports", +- check: func(o Options) bool { +- return o.Vulncheck == ModeVulncheckImports +- }, +- }, +- } +- +- for _, test := range tests { +- var opts Options +- _, err := opts.Set(map[string]any{test.name: test.value}) +- if err != nil { +- if !test.wantError { +- t.Errorf("Options.set(%q, %v) failed: %v", +- test.name, test.value, err) +- } +- continue +- } else if test.wantError { +- t.Fatalf("Options.set(%q, %v) succeeded unexpectedly", +- test.name, test.value) +- } +- +- // TODO: this could be made much better using cmp.Diff, if that becomes +- // available in this module. +- if !test.check(opts) { +- t.Errorf("Options.set(%q, %v): unexpected result %+v", test.name, test.value, opts) +- } +- } +-} +- +-func TestOptions_Clone(t *testing.T) { +- // Test that the Options.Clone actually performs a deep clone of the Options +- // struct. +- +- golden := clonetest.NonZero[*Options]() +- opts := clonetest.NonZero[*Options]() +- opts2 := opts.Clone() +- +- // The clone should be equivalent to the original. +- if diff := cmp.Diff(golden, opts2); diff != "" { +- t.Errorf("Clone() does not match original (-want +got):\n%s", diff) +- } +- +- // Mutating the clone should not mutate the original. +- clonetest.ZeroOut(opts2) +- if diff := cmp.Diff(golden, opts); diff != "" { +- t.Errorf("Mutating clone mutated the original (-want +got):\n%s", diff) +- } +-} +diff -urN a/gopls/internal/settings/staticcheck.go b/gopls/internal/settings/staticcheck.go +--- a/gopls/internal/settings/staticcheck.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/settings/staticcheck.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,450 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package settings +- +-import ( +- "fmt" +- "log" +- +- "golang.org/x/tools/go/analysis" +- "golang.org/x/tools/gopls/internal/protocol" +- "honnef.co/go/tools/analysis/lint" +- "honnef.co/go/tools/quickfix" +- "honnef.co/go/tools/quickfix/qf1001" +- "honnef.co/go/tools/quickfix/qf1002" +- "honnef.co/go/tools/quickfix/qf1003" +- "honnef.co/go/tools/quickfix/qf1004" +- "honnef.co/go/tools/quickfix/qf1005" +- "honnef.co/go/tools/quickfix/qf1006" +- "honnef.co/go/tools/quickfix/qf1007" +- "honnef.co/go/tools/quickfix/qf1008" +- "honnef.co/go/tools/quickfix/qf1009" +- "honnef.co/go/tools/quickfix/qf1010" +- "honnef.co/go/tools/quickfix/qf1011" +- "honnef.co/go/tools/quickfix/qf1012" +- "honnef.co/go/tools/simple" +- "honnef.co/go/tools/simple/s1000" +- "honnef.co/go/tools/simple/s1001" +- "honnef.co/go/tools/simple/s1002" +- "honnef.co/go/tools/simple/s1003" +- "honnef.co/go/tools/simple/s1004" +- "honnef.co/go/tools/simple/s1005" +- "honnef.co/go/tools/simple/s1006" +- "honnef.co/go/tools/simple/s1007" +- "honnef.co/go/tools/simple/s1008" +- "honnef.co/go/tools/simple/s1009" +- "honnef.co/go/tools/simple/s1010" +- "honnef.co/go/tools/simple/s1011" +- "honnef.co/go/tools/simple/s1012" +- "honnef.co/go/tools/simple/s1016" +- "honnef.co/go/tools/simple/s1017" +- "honnef.co/go/tools/simple/s1018" +- "honnef.co/go/tools/simple/s1019" +- "honnef.co/go/tools/simple/s1020" +- "honnef.co/go/tools/simple/s1021" +- "honnef.co/go/tools/simple/s1023" +- "honnef.co/go/tools/simple/s1024" +- "honnef.co/go/tools/simple/s1025" +- "honnef.co/go/tools/simple/s1028" +- "honnef.co/go/tools/simple/s1029" +- "honnef.co/go/tools/simple/s1030" +- "honnef.co/go/tools/simple/s1031" +- "honnef.co/go/tools/simple/s1032" +- "honnef.co/go/tools/simple/s1033" +- "honnef.co/go/tools/simple/s1034" +- "honnef.co/go/tools/simple/s1035" +- "honnef.co/go/tools/simple/s1036" +- "honnef.co/go/tools/simple/s1037" +- "honnef.co/go/tools/simple/s1038" +- "honnef.co/go/tools/simple/s1039" +- "honnef.co/go/tools/simple/s1040" +- "honnef.co/go/tools/staticcheck" +- "honnef.co/go/tools/staticcheck/sa1000" +- "honnef.co/go/tools/staticcheck/sa1001" +- "honnef.co/go/tools/staticcheck/sa1002" +- "honnef.co/go/tools/staticcheck/sa1003" +- "honnef.co/go/tools/staticcheck/sa1004" +- "honnef.co/go/tools/staticcheck/sa1005" +- "honnef.co/go/tools/staticcheck/sa1006" +- "honnef.co/go/tools/staticcheck/sa1007" +- "honnef.co/go/tools/staticcheck/sa1008" +- "honnef.co/go/tools/staticcheck/sa1010" +- "honnef.co/go/tools/staticcheck/sa1011" +- "honnef.co/go/tools/staticcheck/sa1012" +- "honnef.co/go/tools/staticcheck/sa1013" +- "honnef.co/go/tools/staticcheck/sa1014" +- "honnef.co/go/tools/staticcheck/sa1015" +- "honnef.co/go/tools/staticcheck/sa1016" +- "honnef.co/go/tools/staticcheck/sa1017" +- "honnef.co/go/tools/staticcheck/sa1018" +- "honnef.co/go/tools/staticcheck/sa1019" +- "honnef.co/go/tools/staticcheck/sa1020" +- "honnef.co/go/tools/staticcheck/sa1021" +- "honnef.co/go/tools/staticcheck/sa1023" +- "honnef.co/go/tools/staticcheck/sa1024" +- "honnef.co/go/tools/staticcheck/sa1025" +- "honnef.co/go/tools/staticcheck/sa1026" +- "honnef.co/go/tools/staticcheck/sa1027" +- "honnef.co/go/tools/staticcheck/sa1028" +- "honnef.co/go/tools/staticcheck/sa1029" +- "honnef.co/go/tools/staticcheck/sa1030" +- "honnef.co/go/tools/staticcheck/sa1031" +- "honnef.co/go/tools/staticcheck/sa1032" +- "honnef.co/go/tools/staticcheck/sa2000" +- "honnef.co/go/tools/staticcheck/sa2001" +- "honnef.co/go/tools/staticcheck/sa2002" +- "honnef.co/go/tools/staticcheck/sa2003" +- "honnef.co/go/tools/staticcheck/sa3000" +- "honnef.co/go/tools/staticcheck/sa3001" +- "honnef.co/go/tools/staticcheck/sa4000" +- "honnef.co/go/tools/staticcheck/sa4001" +- "honnef.co/go/tools/staticcheck/sa4003" +- "honnef.co/go/tools/staticcheck/sa4004" +- "honnef.co/go/tools/staticcheck/sa4005" +- "honnef.co/go/tools/staticcheck/sa4006" +- "honnef.co/go/tools/staticcheck/sa4008" +- "honnef.co/go/tools/staticcheck/sa4009" +- "honnef.co/go/tools/staticcheck/sa4010" +- "honnef.co/go/tools/staticcheck/sa4011" +- "honnef.co/go/tools/staticcheck/sa4012" +- "honnef.co/go/tools/staticcheck/sa4013" +- "honnef.co/go/tools/staticcheck/sa4014" +- "honnef.co/go/tools/staticcheck/sa4015" +- "honnef.co/go/tools/staticcheck/sa4016" +- "honnef.co/go/tools/staticcheck/sa4017" +- "honnef.co/go/tools/staticcheck/sa4018" +- "honnef.co/go/tools/staticcheck/sa4019" +- "honnef.co/go/tools/staticcheck/sa4020" +- "honnef.co/go/tools/staticcheck/sa4021" +- "honnef.co/go/tools/staticcheck/sa4022" +- "honnef.co/go/tools/staticcheck/sa4023" +- "honnef.co/go/tools/staticcheck/sa4024" +- "honnef.co/go/tools/staticcheck/sa4025" +- "honnef.co/go/tools/staticcheck/sa4026" +- "honnef.co/go/tools/staticcheck/sa4027" +- "honnef.co/go/tools/staticcheck/sa4028" +- "honnef.co/go/tools/staticcheck/sa4029" +- "honnef.co/go/tools/staticcheck/sa4030" +- "honnef.co/go/tools/staticcheck/sa4031" +- "honnef.co/go/tools/staticcheck/sa4032" +- "honnef.co/go/tools/staticcheck/sa5000" +- "honnef.co/go/tools/staticcheck/sa5001" +- "honnef.co/go/tools/staticcheck/sa5002" +- "honnef.co/go/tools/staticcheck/sa5003" +- "honnef.co/go/tools/staticcheck/sa5004" +- "honnef.co/go/tools/staticcheck/sa5005" +- "honnef.co/go/tools/staticcheck/sa5007" +- "honnef.co/go/tools/staticcheck/sa5008" +- "honnef.co/go/tools/staticcheck/sa5009" +- "honnef.co/go/tools/staticcheck/sa5010" +- "honnef.co/go/tools/staticcheck/sa5011" +- "honnef.co/go/tools/staticcheck/sa5012" +- "honnef.co/go/tools/staticcheck/sa6000" +- "honnef.co/go/tools/staticcheck/sa6001" +- "honnef.co/go/tools/staticcheck/sa6002" +- "honnef.co/go/tools/staticcheck/sa6003" +- "honnef.co/go/tools/staticcheck/sa6005" +- "honnef.co/go/tools/staticcheck/sa6006" +- "honnef.co/go/tools/staticcheck/sa9001" +- "honnef.co/go/tools/staticcheck/sa9002" +- "honnef.co/go/tools/staticcheck/sa9003" +- "honnef.co/go/tools/staticcheck/sa9004" +- "honnef.co/go/tools/staticcheck/sa9005" +- "honnef.co/go/tools/staticcheck/sa9006" +- "honnef.co/go/tools/staticcheck/sa9007" +- "honnef.co/go/tools/staticcheck/sa9008" +- "honnef.co/go/tools/staticcheck/sa9009" +- "honnef.co/go/tools/stylecheck" +- "honnef.co/go/tools/stylecheck/st1000" +- "honnef.co/go/tools/stylecheck/st1001" +- "honnef.co/go/tools/stylecheck/st1003" +- "honnef.co/go/tools/stylecheck/st1005" +- "honnef.co/go/tools/stylecheck/st1006" +- "honnef.co/go/tools/stylecheck/st1008" +- "honnef.co/go/tools/stylecheck/st1011" +- "honnef.co/go/tools/stylecheck/st1012" +- "honnef.co/go/tools/stylecheck/st1013" +- "honnef.co/go/tools/stylecheck/st1015" +- "honnef.co/go/tools/stylecheck/st1016" +- "honnef.co/go/tools/stylecheck/st1017" +- "honnef.co/go/tools/stylecheck/st1018" +- "honnef.co/go/tools/stylecheck/st1019" +- "honnef.co/go/tools/stylecheck/st1020" +- "honnef.co/go/tools/stylecheck/st1021" +- "honnef.co/go/tools/stylecheck/st1022" +- "honnef.co/go/tools/stylecheck/st1023" +-) +- +-// StaticcheckAnalyzers lists available Staticcheck analyzers. +-var StaticcheckAnalyzers = initStaticcheckAnalyzers() +- +-func initStaticcheckAnalyzers() (res []*Analyzer) { +- +- mapSeverity := func(severity lint.Severity) protocol.DiagnosticSeverity { +- switch severity { +- case lint.SeverityError: +- return protocol.SeverityError +- case lint.SeverityDeprecated: +- // TODO(dh): in LSP, deprecated is a tag, not a severity. +- // We'll want to support this once we enable SA5011. +- return protocol.SeverityWarning +- case lint.SeverityWarning: +- return protocol.SeverityWarning +- case lint.SeverityInfo: +- return protocol.SeverityInformation +- case lint.SeverityHint: +- return protocol.SeverityHint +- default: +- return protocol.SeverityWarning +- } +- } +- +- // We can't import buildir.Analyzer directly, so grab it from another analyzer. +- buildir := sa1000.SCAnalyzer.Analyzer.Requires[0] +- if buildir.Name != "buildir" { +- panic("sa1000.Requires[0] is not buildir") +- } +- +- add := func(a *lint.Analyzer, dflt bool) { +- // Assert that no analyzer that requires "buildir", +- // even indirectly, is enabled by default. +- if dflt { +- var visit func(aa *analysis.Analyzer) +- visit = func(aa *analysis.Analyzer) { +- if aa == buildir { +- log.Fatalf("%s requires buildir (perhaps indirectly) yet is enabled by default", a.Analyzer.Name) +- } +- for _, req := range aa.Requires { +- visit(req) +- } +- } +- visit(a.Analyzer) +- } +- res = append(res, &Analyzer{ +- analyzer: a.Analyzer, +- staticcheck: a.Doc, +- nonDefault: !dflt, +- severity: mapSeverity(a.Doc.Severity), +- }) +- } +- +- type M = map[*lint.Analyzer]any // value = true|false|nil +- +- addAll := func(suite string, upstream []*lint.Analyzer, config M) { +- for _, a := range upstream { +- v, ok := config[a] +- if !ok { +- panic(fmt.Sprintf("%s.Analyzers includes %s but config mapping does not; settings audit required", suite, a.Analyzer.Name)) +- } +- if v != nil { +- add(a, v.(bool)) +- } +- } +- } +- +- // For each analyzer in the four suites provided by +- // staticcheck, we provide a complete configuration, mapping +- // it to a boolean, indicating whether it should be on by +- // default in gopls, or nil to indicate explicitly that it has +- // been excluded (e.g. because it is redundant with an +- // existing vet analyzer such as printf, waitgroup, appends). +- // +- // This approach ensures that as suites grow, we make an +- // affirmative decision, positive or negative, about adding +- // new items. +- // +- // An analyzer may be off by default if: +- // - it requires, even indirectly, "buildir", which is like +- // buildssa but uses facts, making it expensive; +- // - it has significant false positives; +- // - it reports on non-problematic style issues; +- // - its fixes are lossy (e.g. of comments) or not always sound; +- // - it reports "maybes", not "definites" (e.g. sa9001). +- // - it reports on harmless stylistic choices that may have +- // been chosen deliberately for clarity or emphasis (e.g. s1005). +- // - it makes deductions from build tags that are not true +- // for all configurations. +- +- addAll("simple", simple.Analyzers, M{ +- s1000.SCAnalyzer: true, +- s1001.SCAnalyzer: true, +- s1002.SCAnalyzer: false, // makes unsound deductions from build tags +- s1003.SCAnalyzer: true, +- s1004.SCAnalyzer: true, +- s1005.SCAnalyzer: false, // not a correctness/style issue +- s1006.SCAnalyzer: false, // makes unsound deductions from build tags +- s1007.SCAnalyzer: true, +- s1008.SCAnalyzer: false, // may lose important comments +- s1009.SCAnalyzer: true, +- s1010.SCAnalyzer: true, +- s1011.SCAnalyzer: false, // requires buildir +- s1012.SCAnalyzer: true, +- s1016.SCAnalyzer: false, // may rely on coincidental structural subtyping +- s1017.SCAnalyzer: true, +- s1018.SCAnalyzer: true, +- s1019.SCAnalyzer: true, +- s1020.SCAnalyzer: true, +- s1021.SCAnalyzer: false, // may lose important comments +- s1023.SCAnalyzer: true, +- s1024.SCAnalyzer: true, +- s1025.SCAnalyzer: false, // requires buildir +- s1028.SCAnalyzer: true, +- s1029.SCAnalyzer: false, // requires buildir +- s1030.SCAnalyzer: true, // (tentative: see docs, +- s1031.SCAnalyzer: true, +- s1032.SCAnalyzer: true, +- s1033.SCAnalyzer: true, +- s1034.SCAnalyzer: true, +- s1035.SCAnalyzer: true, +- s1036.SCAnalyzer: true, +- s1037.SCAnalyzer: true, +- s1038.SCAnalyzer: true, +- s1039.SCAnalyzer: true, +- s1040.SCAnalyzer: true, +- }) +- +- addAll("stylecheck", stylecheck.Analyzers, M{ +- // These are all slightly too opinionated to be on by default. +- st1000.SCAnalyzer: false, +- st1001.SCAnalyzer: false, +- st1003.SCAnalyzer: false, +- st1005.SCAnalyzer: false, +- st1006.SCAnalyzer: false, +- st1008.SCAnalyzer: false, +- st1011.SCAnalyzer: false, +- st1012.SCAnalyzer: false, +- st1013.SCAnalyzer: false, +- st1015.SCAnalyzer: false, +- st1016.SCAnalyzer: false, +- st1017.SCAnalyzer: false, +- st1018.SCAnalyzer: false, +- st1019.SCAnalyzer: false, +- st1020.SCAnalyzer: false, +- st1021.SCAnalyzer: false, +- st1022.SCAnalyzer: false, +- st1023.SCAnalyzer: false, +- }) +- +- // These are not bug fixes but code transformations: some +- // reversible and value-neutral, of the kind typically listed +- // on the VS Code's Refactor/Source Action/Quick Fix menus. +- // +- // TODO(adonovan): plumb these to the appropriate menu, +- // as we do for code actions such as split/join lines. +- addAll("quickfix", quickfix.Analyzers, M{ +- qf1001.SCAnalyzer: false, // not always a style improvement +- qf1002.SCAnalyzer: true, +- qf1003.SCAnalyzer: true, +- qf1004.SCAnalyzer: true, +- qf1005.SCAnalyzer: false, // not always a style improvement +- qf1006.SCAnalyzer: false, // may lose important comments +- qf1007.SCAnalyzer: false, // may lose important comments +- qf1008.SCAnalyzer: false, // not always a style improvement +- qf1009.SCAnalyzer: true, +- qf1010.SCAnalyzer: true, +- qf1011.SCAnalyzer: false, // not always a style improvement +- qf1012.SCAnalyzer: true, +- }) +- +- addAll("staticcheck", staticcheck.Analyzers, M{ +- sa1000.SCAnalyzer: false, // requires buildir +- sa1001.SCAnalyzer: true, +- sa1002.SCAnalyzer: false, // requires buildir +- sa1003.SCAnalyzer: false, // requires buildir +- sa1004.SCAnalyzer: true, +- sa1005.SCAnalyzer: true, +- sa1006.SCAnalyzer: nil, // redundant wrt 'printf' +- sa1007.SCAnalyzer: false, // requires buildir +- sa1008.SCAnalyzer: true, +- sa1010.SCAnalyzer: false, // requires buildir +- sa1011.SCAnalyzer: false, // requires buildir +- sa1012.SCAnalyzer: true, +- sa1013.SCAnalyzer: true, +- sa1014.SCAnalyzer: false, // requires buildir +- sa1015.SCAnalyzer: false, // requires buildir +- sa1016.SCAnalyzer: true, +- sa1017.SCAnalyzer: false, // requires buildir +- sa1018.SCAnalyzer: false, // requires buildir +- sa1019.SCAnalyzer: nil, // redundant wrt 'deprecated' +- sa1020.SCAnalyzer: false, // requires buildir +- sa1021.SCAnalyzer: false, // requires buildir +- sa1023.SCAnalyzer: false, // requires buildir +- sa1024.SCAnalyzer: false, // requires buildir +- sa1025.SCAnalyzer: false, // requires buildir +- sa1026.SCAnalyzer: false, // requires buildir +- sa1027.SCAnalyzer: false, // requires buildir +- sa1028.SCAnalyzer: false, // requires buildir +- sa1029.SCAnalyzer: false, // requires buildir +- sa1030.SCAnalyzer: false, // requires buildir +- sa1031.SCAnalyzer: false, // requires buildir +- sa1032.SCAnalyzer: false, // requires buildir +- sa2000.SCAnalyzer: nil, // redundant wrt 'waitgroup' +- sa2001.SCAnalyzer: true, +- sa2002.SCAnalyzer: false, // requires buildir +- sa2003.SCAnalyzer: false, // requires buildir +- sa3000.SCAnalyzer: true, +- sa3001.SCAnalyzer: true, +- sa4000.SCAnalyzer: true, +- sa4001.SCAnalyzer: true, +- sa4003.SCAnalyzer: true, +- sa4004.SCAnalyzer: true, +- sa4005.SCAnalyzer: false, // requires buildir +- sa4006.SCAnalyzer: false, // requires buildir +- sa4008.SCAnalyzer: false, // requires buildir +- sa4009.SCAnalyzer: false, // requires buildir +- sa4010.SCAnalyzer: false, // requires buildir +- sa4011.SCAnalyzer: true, +- sa4012.SCAnalyzer: false, // requires buildir +- sa4013.SCAnalyzer: true, +- sa4014.SCAnalyzer: true, +- sa4015.SCAnalyzer: false, // requires buildir +- sa4016.SCAnalyzer: true, +- sa4017.SCAnalyzer: false, // requires buildir +- sa4018.SCAnalyzer: false, // requires buildir +- sa4019.SCAnalyzer: true, +- sa4020.SCAnalyzer: true, +- sa4021.SCAnalyzer: nil, // redundant wrt 'appends' +- sa4022.SCAnalyzer: true, +- sa4023.SCAnalyzer: false, // requires buildir +- sa4024.SCAnalyzer: true, +- sa4025.SCAnalyzer: true, +- sa4026.SCAnalyzer: true, +- sa4027.SCAnalyzer: true, +- sa4028.SCAnalyzer: true, +- sa4029.SCAnalyzer: true, +- sa4030.SCAnalyzer: true, +- sa4031.SCAnalyzer: false, // requires buildir +- sa4032.SCAnalyzer: true, +- sa5000.SCAnalyzer: false, // requires buildir +- sa5001.SCAnalyzer: true, +- sa5002.SCAnalyzer: false, // makes unsound deductions from build tags +- sa5003.SCAnalyzer: true, +- sa5004.SCAnalyzer: true, +- sa5005.SCAnalyzer: false, // requires buildir +- sa5007.SCAnalyzer: false, // requires buildir +- sa5008.SCAnalyzer: true, +- sa5009.SCAnalyzer: nil, // requires buildir; redundant wrt 'printf' (#34494) +- sa5010.SCAnalyzer: false, // requires buildir +- sa5011.SCAnalyzer: false, // requires buildir +- sa5012.SCAnalyzer: false, // requires buildir +- sa6000.SCAnalyzer: false, // requires buildir +- sa6001.SCAnalyzer: false, // requires buildir +- sa6002.SCAnalyzer: false, // requires buildir +- sa6003.SCAnalyzer: false, // requires buildir +- sa6005.SCAnalyzer: true, +- sa6006.SCAnalyzer: true, +- sa9001.SCAnalyzer: false, // reports a "maybe" bug (low signal/noise) +- sa9002.SCAnalyzer: true, +- sa9003.SCAnalyzer: false, // requires buildir; NonDefault +- sa9004.SCAnalyzer: true, +- sa9005.SCAnalyzer: false, // requires buildir +- sa9006.SCAnalyzer: true, +- sa9007.SCAnalyzer: false, // requires buildir +- sa9008.SCAnalyzer: false, // requires buildir +- sa9009.SCAnalyzer: true, +- }) +- +- return res +-} +diff -urN a/gopls/internal/settings/vet_test.go b/gopls/internal/settings/vet_test.go +--- a/gopls/internal/settings/vet_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/settings/vet_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,50 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package settings_test +- +-import ( +- "encoding/json" +- "fmt" +- "os/exec" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/doc" +- "golang.org/x/tools/internal/testenv" +-) +- +-// TestVetSuite ensures that gopls's analyser suite is a superset of vet's. +-// +-// This test may fail spuriously if gopls/doc/generate.TestGenerated +-// fails. In that case retry after re-running the JSON generator. +-func TestVetSuite(t *testing.T) { +- testenv.NeedsTool(t, "go") +- +- // Read gopls' suite from the API JSON. +- goplsAnalyzers := make(map[string]bool) +- var api doc.API +- if err := json.Unmarshal([]byte(doc.JSON), &api); err != nil { +- t.Fatal(err) +- } +- for _, a := range api.Analyzers { +- goplsAnalyzers[a.Name] = true +- } +- +- // Read vet's suite by parsing its help message. +- cmd := exec.Command("go", "tool", "vet", "help") +- cmd.Stdout = new(strings.Builder) +- if err := cmd.Run(); err != nil { +- t.Fatalf("failed to run vet: %v", err) +- } +- out := fmt.Sprint(cmd.Stdout) +- _, out, _ = strings.Cut(out, "Registered analyzers:\n\n") +- out, _, _ = strings.Cut(out, "\n\n") +- for line := range strings.SplitSeq(out, "\n") { +- name := strings.Fields(line)[0] +- if !goplsAnalyzers[name] { +- t.Errorf("gopls lacks vet analyzer %q", name) +- } +- } +-} +diff -urN a/gopls/internal/telemetry/counterpath.go b/gopls/internal/telemetry/counterpath.go +--- a/gopls/internal/telemetry/counterpath.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/telemetry/counterpath.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package telemetry +- +-import "strings" +- +-// A CounterPath represents the components of a telemetry counter name. +-// +-// By convention, counter names follow the format path/to/counter:bucket. The +-// CounterPath holds the '/'-separated components of this path, along with a +-// final element representing the bucket. +-// +-// CounterPaths may be used to build up counters incrementally, such as when a +-// set of observed counters shared a common prefix, to be controlled by the +-// caller. +-type CounterPath []string +- +-// FullName returns the counter name for the receiver. +-func (p CounterPath) FullName() string { +- if len(p) == 0 { +- return "" +- } +- name := strings.Join([]string(p[:len(p)-1]), "/") +- if bucket := p[len(p)-1]; bucket != "" { +- name += ":" + bucket +- } +- return name +-} +diff -urN a/gopls/internal/telemetry/counterpath_test.go b/gopls/internal/telemetry/counterpath_test.go +--- a/gopls/internal/telemetry/counterpath_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/telemetry/counterpath_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,47 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package telemetry +- +-import ( +- "testing" +-) +- +-// TestCounterPath tests the formatting of various counter paths. +-func TestCounterPath(t *testing.T) { +- tests := []struct { +- path CounterPath +- want string +- }{ +- { +- path: CounterPath{}, +- want: "", +- }, +- { +- path: CounterPath{"counter"}, +- want: ":counter", +- }, +- { +- path: CounterPath{"counter", "bucket"}, +- want: "counter:bucket", +- }, +- { +- path: CounterPath{"path", "to", "counter"}, +- want: "path/to:counter", +- }, +- { +- path: CounterPath{"multi", "component", "path", "bucket"}, +- want: "multi/component/path:bucket", +- }, +- { +- path: CounterPath{"path", ""}, +- want: "path", +- }, +- } +- for _, tt := range tests { +- if got := tt.path.FullName(); got != tt.want { +- t.Errorf("CounterPath(%v).FullName() = %v, want %v", tt.path, got, tt.want) +- } +- } +-} +diff -urN a/gopls/internal/telemetry/latency.go b/gopls/internal/telemetry/latency.go +--- a/gopls/internal/telemetry/latency.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/telemetry/latency.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,102 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package telemetry +- +-import ( +- "context" +- "errors" +- "fmt" +- "sort" +- "sync" +- "time" +- +- "golang.org/x/telemetry/counter" +-) +- +-// latencyKey is used for looking up latency counters. +-type latencyKey struct { +- operation, bucket string +- isError bool +-} +- +-var ( +- latencyBuckets = []struct { +- end time.Duration +- name string +- }{ +- {10 * time.Millisecond, "<10ms"}, +- {50 * time.Millisecond, "<50ms"}, +- {100 * time.Millisecond, "<100ms"}, +- {200 * time.Millisecond, "<200ms"}, +- {500 * time.Millisecond, "<500ms"}, +- {1 * time.Second, "<1s"}, +- {5 * time.Second, "<5s"}, +- {24 * time.Hour, "<24h"}, +- } +- +- latencyCounterMu sync.Mutex +- latencyCounters = make(map[latencyKey]*counter.Counter) // lazily populated +-) +- +-// ForEachLatencyCounter runs the provided function for each current latency +-// counter measuring the given operation. +-// +-// Exported for testing. +-func ForEachLatencyCounter(operation string, isError bool, f func(*counter.Counter)) { +- latencyCounterMu.Lock() +- defer latencyCounterMu.Unlock() +- +- for k, v := range latencyCounters { +- if k.operation == operation && k.isError == isError { +- f(v) +- } +- } +-} +- +-// getLatencyCounter returns the counter used to record latency of the given +-// operation in the given bucket. +-func getLatencyCounter(operation, bucket string, isError bool) *counter.Counter { +- latencyCounterMu.Lock() +- defer latencyCounterMu.Unlock() +- +- key := latencyKey{operation, bucket, isError} +- c, ok := latencyCounters[key] +- if !ok { +- var name string +- if isError { +- name = fmt.Sprintf("gopls/%s/error-latency:%s", operation, bucket) +- } else { +- name = fmt.Sprintf("gopls/%s/latency:%s", operation, bucket) +- } +- c = counter.New(name) +- latencyCounters[key] = c +- } +- return c +-} +- +-// StartLatencyTimer starts a timer for the gopls operation with the given +-// name, and returns a func to stop the timer and record the latency sample. +-// +-// If the context provided to the resulting func is done, no observation is +-// recorded. +-func StartLatencyTimer(operation string) func(context.Context, error) { +- start := time.Now() +- return func(ctx context.Context, err error) { +- if errors.Is(ctx.Err(), context.Canceled) { +- // Ignore timing where the operation is cancelled, it may be influenced +- // by client behavior. +- return +- } +- latency := time.Since(start) +- bucketIdx := sort.Search(len(latencyBuckets), func(i int) bool { +- bucket := latencyBuckets[i] +- return latency < bucket.end +- }) +- if bucketIdx < len(latencyBuckets) { // ignore latency longer than a day :) +- bucketName := latencyBuckets[bucketIdx].name +- getLatencyCounter(operation, bucketName, err != nil).Inc() +- } +- } +-} +diff -urN a/gopls/internal/telemetry/telemetry_test.go b/gopls/internal/telemetry/telemetry_test.go +--- a/gopls/internal/telemetry/telemetry_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/telemetry/telemetry_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,276 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build go1.21 && !openbsd && !js && !wasip1 && !solaris && !android && !386 +-// +build go1.21,!openbsd,!js,!wasip1,!solaris,!android,!386 +- +-package telemetry_test +- +-import ( +- "context" +- "errors" +- "os" +- "strconv" +- "strings" +- "testing" +- "time" +- +- "golang.org/x/telemetry/counter" +- "golang.org/x/telemetry/counter/countertest" // requires go1.21+ +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/telemetry" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/util/bug" +-) +- +-func TestMain(m *testing.M) { +- tmp, err := os.MkdirTemp("", "gopls-telemetry-test-counters") +- if err != nil { +- panic(err) +- } +- countertest.Open(tmp) +- code := Main(m) +- os.RemoveAll(tmp) // ignore error (cleanup fails on Windows; golang/go#68243) +- os.Exit(code) +-} +- +-func TestTelemetry(t *testing.T) { +- var ( +- goversion = "" +- editor = "vscode" // We set ClientName("Visual Studio Code") below. +- ) +- +- // Run gopls once to determine the Go version. +- WithOptions( +- Modes(Default), +- ).Run(t, "", func(_ *testing.T, env *Env) { +- goversion = strconv.Itoa(env.GoVersion()) +- }) +- +- // counters that should be incremented once per session +- sessionCounters := []*counter.Counter{ +- counter.New("gopls/client:" + editor), +- counter.New("gopls/goversion:1." + goversion), +- counter.New("fwd/vscode/linter:a"), +- counter.New("gopls/gotoolchain:local"), +- } +- initialCounts := make([]uint64, len(sessionCounters)) +- for i, c := range sessionCounters { +- count, err := countertest.ReadCounter(c) +- if err != nil { +- continue // counter db not open, or counter not found +- } +- initialCounts[i] = count +- } +- +- // Verify that a properly configured session gets notified of a bug on the +- // server. +- WithOptions( +- Modes(Default), // must be in-process to receive the bug report below +- Settings{"showBugReports": true}, +- ClientName("Visual Studio Code"), +- EnvVars{ +- "GOTOOLCHAIN": "local", // so that the local counter is incremented +- }, +- ).Run(t, "", func(_ *testing.T, env *Env) { +- goversion = strconv.Itoa(env.GoVersion()) +- addForwardedCounters(env, []string{"vscode/linter:a"}, []int64{1}) +- const desc = "got a bug" +- +- // This will increment a counter named something like: +- // +- // `gopls/bug +- // golang.org/x/tools/gopls/internal/util/bug.report:+35 +- // golang.org/x/tools/gopls/internal/util/bug.Report:=68 +- // golang.org/x/tools/gopls/internal/telemetry_test.TestTelemetry.func2:+4 +- // golang.org/x/tools/gopls/internal/test/integration.(*Runner).Run.func1:+87 +- // testing.tRunner:+150 +- // runtime.goexit:+0` +- // +- bug.Report(desc) // want a stack counter with the trace starting from here. +- +- env.Await(ShownMessage(desc)) +- }) +- +- // gopls/editor:client +- // gopls/goversion:1.x +- // fwd/vscode/linter:a +- // gopls/gotoolchain:local +- for i, c := range sessionCounters { +- want := initialCounts[i] + 1 +- got, err := countertest.ReadCounter(c) +- if err != nil || got != want { +- t.Errorf("ReadCounter(%q) = (%v, %v), want (%v, nil)", c.Name(), got, err, want) +- t.Logf("Current timestamp = %v", time.Now().UTC()) +- } +- } +- +- // gopls/bug +- bugcount := bug.BugReportCount +- counts, err := countertest.ReadStackCounter(bugcount) +- if err != nil { +- t.Fatalf("ReadStackCounter(bugreportcount) failed - %v", err) +- } +- if len(counts) != 1 || !hasEntry(counts, t.Name(), 1) { +- t.Errorf("read stackcounter(%q) = (%#v, %v), want one entry", "gopls/bug", counts, err) +- t.Logf("Current timestamp = %v", time.Now().UTC()) +- } +-} +- +-func TestSettingTelemetry(t *testing.T) { +- // counters that should be incremented by each session +- sessionCounters := []*counter.Counter{ +- counter.New("gopls/setting/diagnosticsDelay"), +- counter.New("gopls/setting/staticcheck:true"), +- counter.New("gopls/setting/noSemanticString:true"), +- counter.New("gopls/setting/analyses/deprecated:false"), +- } +- +- initialCounts := make([]uint64, len(sessionCounters)) +- for i, c := range sessionCounters { +- count, err := countertest.ReadCounter(c) +- if err != nil { +- continue // counter db not open, or counter not found +- } +- initialCounts[i] = count +- } +- +- // Run gopls. +- WithOptions( +- Modes(Default), +- Settings{ +- "staticcheck": true, +- "analyses": map[string]bool{ +- "deprecated": false, +- }, +- "diagnosticsDelay": "0s", +- "noSemanticString": true, +- }, +- ).Run(t, "", func(_ *testing.T, env *Env) { +- }) +- +- for i, c := range sessionCounters { +- count, err := countertest.ReadCounter(c) +- if err != nil { +- t.Errorf("ReadCounter(%q) failed: %v", c.Name(), err) +- continue +- } +- if count <= initialCounts[i] { +- t.Errorf("ReadCounter(%q) = %d, want > %d", c.Name(), count, initialCounts[i]) +- } +- } +-} +- +-func addForwardedCounters(env *Env, names []string, values []int64) { +- args, err := command.MarshalArgs(command.AddTelemetryCountersArgs{ +- Names: names, Values: values, +- }) +- if err != nil { +- env.TB.Fatal(err) +- } +- var res error +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: command.AddTelemetryCounters.String(), +- Arguments: args, +- }, &res) +- if res != nil { +- env.TB.Errorf("%v failed - %v", command.AddTelemetryCounters, res) +- } +-} +- +-func hasEntry(counts map[string]uint64, pattern string, want uint64) bool { +- for k, v := range counts { +- if strings.Contains(k, pattern) && v == want { +- return true +- } +- } +- return false +-} +- +-func TestLatencyCounter(t *testing.T) { +- const operation = "TestLatencyCounter" // a unique operation name +- +- stop := telemetry.StartLatencyTimer(operation) +- stop(context.Background(), nil) +- +- for isError, want := range map[bool]uint64{false: 1, true: 0} { +- if got := totalLatencySamples(t, operation, isError); got != want { +- t.Errorf("totalLatencySamples(operation=%v, isError=%v) = %d, want %d", operation, isError, got, want) +- } +- } +-} +- +-func TestLatencyCounter_Error(t *testing.T) { +- const operation = "TestLatencyCounter_Error" // a unique operation name +- +- stop := telemetry.StartLatencyTimer(operation) +- stop(context.Background(), errors.New("bad")) +- +- for isError, want := range map[bool]uint64{false: 0, true: 1} { +- if got := totalLatencySamples(t, operation, isError); got != want { +- t.Errorf("totalLatencySamples(operation=%v, isError=%v) = %d, want %d", operation, isError, got, want) +- } +- } +-} +- +-func TestLatencyCounter_Cancellation(t *testing.T) { +- const operation = "TestLatencyCounter_Cancellation" +- +- stop := telemetry.StartLatencyTimer(operation) +- ctx, cancel := context.WithCancel(context.Background()) +- cancel() +- stop(ctx, nil) +- +- for isError, want := range map[bool]uint64{false: 0, true: 0} { +- if got := totalLatencySamples(t, operation, isError); got != want { +- t.Errorf("totalLatencySamples(operation=%v, isError=%v) = %d, want %d", operation, isError, got, want) +- } +- } +-} +- +-func totalLatencySamples(t *testing.T, operation string, isError bool) uint64 { +- var total uint64 +- telemetry.ForEachLatencyCounter(operation, isError, func(c *counter.Counter) { +- count, err := countertest.ReadCounter(c) +- if err != nil { +- t.Errorf("ReadCounter(%s) failed: %v", c.Name(), err) +- } else { +- total += count +- } +- }) +- return total +-} +- +-func TestLatencyInstrumentation(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.test/a +-go 1.18 +--- a.go -- +-package a +- +-func _() { +- x := 0 +- _ = x +-} +-` +- +- // Verify that a properly configured session gets notified of a bug on the +- // server. +- WithOptions( +- Modes(Default), // must be in-process to receive the bug report below +- ).Run(t, files, func(_ *testing.T, env *Env) { +- env.OpenFile("a.go") +- before := totalLatencySamples(t, "completion", false) +- loc := env.RegexpSearch("a.go", "x") +- for i := 0; i < 10; i++ { +- env.Completion(loc) +- } +- after := totalLatencySamples(t, "completion", false) +- if after-before < 10 { +- t.Errorf("after 10 completions, completion counter went from %d to %d", before, after) +- } +- }) +-} +diff -urN a/gopls/internal/template/completion.go b/gopls/internal/template/completion.go +--- a/gopls/internal/template/completion.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/template/completion.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,266 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package template +- +-import ( +- "bytes" +- "context" +- "fmt" +- "go/scanner" +- gotoken "go/token" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// information needed for completion +-type completer struct { +- p *parsed +- pos protocol.Position +- offset int // offset of the start of the Token +- ctx protocol.CompletionContext +- syms map[string]symbol +-} +- +-func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pos protocol.Position, context protocol.CompletionContext) (*protocol.CompletionList, error) { +- all := parseSet(snapshot.Templates()) +- var start int // the beginning of the Token (completed or not) +- syms := make(map[string]symbol) +- var p *parsed +- for uri, fc := range all.files { +- // collect symbols from all template files +- filterSyms(syms, fc.symbols) +- if uri.Path() != fh.URI().Path() { +- continue +- } +- offset, err := enclosingTokenStart(fc, pos) +- if err != nil { +- return nil, err +- } +- start = offset +- p = fc +- } +- if p == nil { +- // this cannot happen unless the search missed a template file +- return nil, fmt.Errorf("%s not found", fh.Identity().URI.Path()) +- } +- c := completer{ +- p: p, +- pos: pos, +- offset: start + len(lbraces), +- ctx: context, +- syms: syms, +- } +- return c.complete() +-} +- +-func filterSyms(syms map[string]symbol, ns []symbol) { +- for _, xsym := range ns { +- switch xsym.kind { +- case protocol.Method, protocol.Package, protocol.Boolean, protocol.Namespace, +- protocol.Function: +- syms[xsym.name] = xsym // we don't care which symbol we get +- case protocol.Variable: +- if xsym.name != "dot" { +- syms[xsym.name] = xsym +- } +- case protocol.Constant: +- if xsym.name == "nil" { +- syms[xsym.name] = xsym +- } +- } +- } +-} +- +-// enclosingTokenStart returns the start offset of the enclosing token. +-// A (-1, non-nil) result indicates "no enclosing token". +-func enclosingTokenStart(fc *parsed, pos protocol.Position) (int, error) { +- // pos is the pos-th character. if the cursor is at the beginning +- // of the file, pos is 0. That is, we've only seen characters before pos +- // 1. pos might be in a Token, return tk.Start +- // 2. pos might be after an elided but before a Token, return elided +- // 3. return -1 for false +- offset, err := fc.mapper.PositionOffset(pos) +- if err != nil { +- return 0, err +- } +- +- // TODO: opt: this could be a binary search, as the tokens are ordered +- for _, tk := range fc.tokens { +- if tk.start+len(lbraces) <= offset && offset+len(rbraces) <= tk.end { +- return tk.start, nil +- } +- } +- +- for _, x := range fc.elided { +- if x+len(lbraces) > offset { +- // fc.elided is sorted, and x is the position where a '{{' was replaced +- // by ' '. We consider only cases where the replaced {{ is to the left +- // of the cursor. +- break +- } +- // If the interval [x,offset] does not contain Left or Right +- // then provide completions. (do we need the test for Right?) +- if !bytes.Contains(fc.buf[x:offset], lbraces) && !bytes.Contains(fc.buf[x:offset], rbraces) { +- return x, nil +- } +- } +- return -1, fmt.Errorf("no token enclosing %d", pos) +-} +- +-var ( +- keywords = []string{"if", "with", "else", "block", "range", "template", "end}}", "end"} +- globals = []string{"and", "call", "html", "index", "slice", "js", "len", "not", "or", +- "urlquery", "printf", "println", "print", "eq", "ne", "le", "lt", "ge", "gt"} +-) +- +-// find the completions. start is the offset of either the Token enclosing pos, or where +-// the incomplete token starts. +-// The error return is always nil. +-func (c *completer) complete() (*protocol.CompletionList, error) { +- ans := &protocol.CompletionList{IsIncomplete: true, Items: []protocol.CompletionItem{}} +- start, err := c.p.mapper.PositionOffset(c.pos) +- if err != nil { +- return ans, err +- } +- sofar := c.p.buf[c.offset:start] +- if len(sofar) == 0 || sofar[len(sofar)-1] == ' ' || sofar[len(sofar)-1] == '\t' { +- return ans, nil +- } +- // sofar could be parsed by either c.analyzer() or scan(). The latter is precise +- // and slower, but fast enough +- words := scan(sofar) +- // 1. if pattern starts $, show variables +- // 2. if pattern starts ., show methods (and . by itself?) +- // 3. if len(words) == 1, show firstWords (but if it were a |, show functions and globals) +- // 4. ...? (parenthetical expressions, arguments, ...) (packages, namespaces, nil?) +- if len(words) == 0 { +- return nil, nil // if this happens, why were we called? +- } +- pattern := words[len(words)-1] +- if pattern[0] == '$' { +- // should we also return a raw "$"? +- for _, s := range c.syms { +- if s.kind == protocol.Variable && weakMatch(s.name, pattern) > 0 { +- ans.Items = append(ans.Items, protocol.CompletionItem{ +- Label: s.name, +- Kind: protocol.VariableCompletion, +- Detail: "Variable", +- }) +- } +- } +- return ans, nil +- } +- if pattern[0] == '.' { +- for _, s := range c.syms { +- if s.kind == protocol.Method && weakMatch("."+s.name, pattern) > 0 { +- ans.Items = append(ans.Items, protocol.CompletionItem{ +- Label: s.name, +- Kind: protocol.MethodCompletion, +- Detail: "Method/member", +- }) +- } +- } +- return ans, nil +- } +- // could we get completion attempts in strings or numbers, and if so, do we care? +- // globals +- for _, kw := range globals { +- if weakMatch(kw, pattern) != 0 { +- ans.Items = append(ans.Items, protocol.CompletionItem{ +- Label: kw, +- Kind: protocol.KeywordCompletion, +- Detail: "Function", +- }) +- } +- } +- // and functions +- for _, s := range c.syms { +- if s.kind == protocol.Function && weakMatch(s.name, pattern) != 0 { +- ans.Items = append(ans.Items, protocol.CompletionItem{ +- Label: s.name, +- Kind: protocol.FunctionCompletion, +- Detail: "Function", +- }) +- } +- } +- // keywords if we're at the beginning +- if len(words) <= 1 || len(words[len(words)-2]) == 1 && words[len(words)-2][0] == '|' { +- for _, kw := range keywords { +- if weakMatch(kw, pattern) != 0 { +- ans.Items = append(ans.Items, protocol.CompletionItem{ +- Label: kw, +- Kind: protocol.KeywordCompletion, +- Detail: "keyword", +- }) +- } +- } +- } +- return ans, nil +-} +- +-// version of c.analyze that uses go/scanner. +-func scan(buf []byte) []string { +- fset := gotoken.NewFileSet() +- fp := fset.AddFile("", -1, len(buf)) +- var sc scanner.Scanner +- sc.Init(fp, buf, func(pos gotoken.Position, msg string) {}, scanner.ScanComments) +- ans := make([]string, 0, 10) // preallocating gives a measurable savings +- for { +- _, tok, lit := sc.Scan() // tok is an int +- if tok == gotoken.EOF { +- break // done +- } else if tok == gotoken.SEMICOLON && lit == "\n" { +- continue // don't care, but probably can't happen +- } else if tok == gotoken.PERIOD { +- ans = append(ans, ".") // lit is empty +- } else if tok == gotoken.IDENT && len(ans) > 0 && ans[len(ans)-1] == "." { +- ans[len(ans)-1] = "." + lit +- } else if tok == gotoken.IDENT && len(ans) > 0 && ans[len(ans)-1] == "$" { +- ans[len(ans)-1] = "$" + lit +- } else if lit != "" { +- ans = append(ans, lit) +- } +- } +- return ans +-} +- +-// pattern is what the user has typed +-func weakMatch(choice, pattern string) float64 { +- lower := strings.ToLower(choice) +- // for now, use only lower-case everywhere +- pattern = strings.ToLower(pattern) +- // The first char has to match +- if pattern[0] != lower[0] { +- return 0 +- } +- // If they start with ., then the second char has to match +- from := 1 +- if pattern[0] == '.' { +- if len(pattern) < 2 { +- return 1 // pattern just a ., so it matches +- } +- if pattern[1] != lower[1] { +- return 0 +- } +- from = 2 +- } +- // check that all the characters of pattern occur as a subsequence of choice +- i, j := from, from +- for ; i < len(lower) && j < len(pattern); j++ { +- if pattern[j] == lower[i] { +- i++ +- if i >= len(lower) { +- return 0 +- } +- } +- } +- if j < len(pattern) { +- return 0 +- } +- return 1 +-} +diff -urN a/gopls/internal/template/completion_test.go b/gopls/internal/template/completion_test.go +--- a/gopls/internal/template/completion_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/template/completion_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,105 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package template +- +-import ( +- "log" +- "sort" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-func init() { +- log.SetFlags(log.Lshortfile) +-} +- +-type tparse struct { +- marked string // ^ shows where to ask for completions. (The user just typed the following character.) +- wanted []string // expected completions; nil => no enclosing token +-} +- +-// Test completions in templates that parse enough (if completion needs symbols) +-// Seen characters up to the ^ +-func TestParsed(t *testing.T) { +- for _, test := range []tparse{ +- {"{{x}}{{12. xx^", nil}, // https://github.com/golang/go/issues/50430 +- {``, nil}, +- {"{{i^f}}", []string{"index", "if"}}, +- {"{{if .}}{{e^ {{end}}", []string{"eq", "end}}", "else", "end"}}, +- {"{{foo}}{{f^", []string{"foo"}}, +- {"{{$^}}", []string{"$"}}, +- {"{{$x:=4}}{{$^", []string{"$x"}}, +- {"{{$x:=4}}{{$ ^ ", []string{}}, +- {"{{len .Modified}}{{.^Mo", []string{"Modified"}}, +- {"{{len .Modified}}{{.mf^", []string{"Modified"}}, +- {"{{$^ }}", []string{"$"}}, +- {"{{$a =3}}{{$^", []string{"$a"}}, +- // .two is not good here: fix someday +- {`{{.Modified}}{{.^{{if $.one.two}}xxx{{end}}`, []string{"Modified", "one", "two"}}, +- {`{{.Modified}}{{.o^{{if $.one.two}}xxx{{end}}`, []string{"one"}}, +- {"{{.Modiifed}}{{.one.t^{{if $.one.two}}xxx{{end}}", []string{"two"}}, +- {`{{block "foo" .}}{{i^`, []string{"index", "if"}}, +- {"{{in^{{Internal}}", []string{"index", "Internal", "if"}}, +- // simple number has no completions +- {"{{4^e", []string{}}, +- // simple string has no completions +- {"{{`e^", []string{}}, +- {"{{`No i^", []string{}}, // example of why go/scanner is used +- {"{{xavier}}{{12. x^", []string{"xavier"}}, +- } { +- t.Run("", func(t *testing.T) { +- var got []string +- if c := testCompleter(t, test); c != nil { +- ans, _ := c.complete() +- for _, a := range ans.Items { +- got = append(got, a.Label) +- } +- } +- if len(got) != len(test.wanted) { +- t.Fatalf("%q: got %q, wanted %q %d,%d", test.marked, got, test.wanted, len(got), len(test.wanted)) +- } +- sort.Strings(test.wanted) +- sort.Strings(got) +- for i := 0; i < len(got); i++ { +- if test.wanted[i] != got[i] { +- t.Fatalf("%q at %d: got %v, wanted %v", test.marked, i, got, test.wanted) +- } +- } +- }) +- } +-} +- +-func testCompleter(t *testing.T, tx tparse) *completer { +- // seen chars up to ^ +- offset := strings.Index(tx.marked, "^") +- buf := strings.Replace(tx.marked, "^", "", 1) +- p := parseBuffer("", []byte(buf)) +- if p.parseErr != nil { +- t.Logf("%q: %v", tx.marked, p.parseErr) +- } +- pos, err := p.mapper.OffsetPosition(offset) +- if err != nil { +- t.Fatal(err) +- } +- +- start, err := enclosingTokenStart(p, pos) +- if err != nil { +- if start == -1 { +- return nil // no enclosing token +- } +- t.Fatal(err) +- } +- syms := make(map[string]symbol) +- filterSyms(syms, p.symbols) +- return &completer{ +- p: p, +- pos: pos, +- offset: start + len(lbraces), +- ctx: protocol.CompletionContext{TriggerKind: protocol.Invoked}, +- syms: syms, +- } +-} +diff -urN a/gopls/internal/template/highlight.go b/gopls/internal/template/highlight.go +--- a/gopls/internal/template/highlight.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/template/highlight.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,112 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package template +- +-import ( +- "context" +- "fmt" +- "regexp" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-func Highlight(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, loc protocol.Position) ([]protocol.DocumentHighlight, error) { +- buf, err := fh.Content() +- if err != nil { +- return nil, err +- } +- p := parseBuffer(fh.URI(), buf) +- pos, err := p.mapper.PositionOffset(loc) +- if err != nil { +- return nil, err +- } +- +- if p.parseErr == nil { +- for _, s := range p.symbols { +- if s.start <= pos && pos < s.start+s.len { +- return markSymbols(p, s) +- } +- } +- } +- +- // these tokens exist whether or not there was a parse error +- // (symbols require a successful parse) +- for _, tok := range p.tokens { +- if tok.start <= pos && pos < tok.end { +- wordAt := wordAt(p.buf, pos) +- if len(wordAt) > 0 { +- return markWordInToken(p, wordAt) +- } +- } +- } +- +- // TODO: find the 'word' at pos, etc: someday +- // until then we get the default action, which doesn't respect word boundaries +- return nil, nil +-} +- +-func markSymbols(p *parsed, sym symbol) ([]protocol.DocumentHighlight, error) { +- var ans []protocol.DocumentHighlight +- for _, s := range p.symbols { +- if s.name == sym.name { +- kind := protocol.Read +- if s.vardef { +- kind = protocol.Write +- } +- rng, err := p.mapper.OffsetRange(s.offsets()) +- if err != nil { +- return nil, err +- } +- ans = append(ans, protocol.DocumentHighlight{ +- Range: rng, +- Kind: kind, +- }) +- } +- } +- return ans, nil +-} +- +-// A token is {{...}}, and this marks words in the token that equal the give word +-func markWordInToken(p *parsed, wordAt string) ([]protocol.DocumentHighlight, error) { +- var ans []protocol.DocumentHighlight +- pat, err := regexp.Compile(fmt.Sprintf(`\b%s\b`, wordAt)) +- if err != nil { +- return nil, fmt.Errorf("%q: unmatchable word (%v)", wordAt, err) +- } +- for _, tok := range p.tokens { +- matches := pat.FindAllIndex(p.buf[tok.start:tok.end], -1) +- for _, match := range matches { +- rng, err := p.mapper.OffsetRange(match[0], match[1]) +- if err != nil { +- return nil, err +- } +- ans = append(ans, protocol.DocumentHighlight{ +- Range: rng, +- Kind: protocol.Text, +- }) +- } +- } +- return ans, nil +-} +- +-// wordAt returns the word the cursor is in (meaning in or just before) +-func wordAt(buf []byte, pos int) string { +- if pos >= len(buf) { +- return "" +- } +- after := moreRe.Find(buf[pos:]) +- if len(after) == 0 { +- return "" // end of the word +- } +- got := wordRe.Find(buf[:pos+len(after)]) +- return string(got) +-} +- +-var ( +- wordRe = regexp.MustCompile(`[$]?\w+$`) +- moreRe = regexp.MustCompile(`^[$]?\w+`) +-) +diff -urN a/gopls/internal/template/implementations.go b/gopls/internal/template/implementations.go +--- a/gopls/internal/template/implementations.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/template/implementations.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,257 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package template +- +-import ( +- "context" +- "fmt" +- "regexp" +- "strconv" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/semtok" +-) +- +-// line number (1-based) and message +-var errRe = regexp.MustCompile(`template.*:(\d+): (.*)`) +- +-// Diagnostics returns parse errors. There is only one per file. +-// The errors are not always helpful. For instance { {end}} +-// will likely point to the end of the file. +-func Diagnostics(snapshot *cache.Snapshot) map[protocol.DocumentURI][]*cache.Diagnostic { +- diags := make(map[protocol.DocumentURI][]*cache.Diagnostic) +- for uri, fh := range snapshot.Templates() { +- diags[uri] = diagnoseOne(fh) +- } +- return diags +-} +- +-func diagnoseOne(fh file.Handle) []*cache.Diagnostic { +- // no need for skipTemplate check, as Diagnose is called on the +- // snapshot's template files +- buf, err := fh.Content() +- if err != nil { +- // TODO: Is a Diagnostic with no Range useful? event.Error also? +- msg := fmt.Sprintf("failed to read %s (%v)", fh.URI().Path(), err) +- return []*cache.Diagnostic{{ +- Message: msg, +- Severity: protocol.SeverityError, +- URI: fh.URI(), +- Source: cache.TemplateError, +- }} +- } +- p := parseBuffer(fh.URI(), buf) +- if p.parseErr == nil { +- return nil +- } +- +- errorf := func(format string, args ...any) []*cache.Diagnostic { +- msg := fmt.Sprintf("malformed template error %q: %s", +- p.parseErr.Error(), +- fmt.Sprintf(format, args...)) +- rng, err := p.mapper.OffsetRange(0, 1) // first UTF-16 code +- if err != nil { +- rng = protocol.Range{} // start of file +- } +- return []*cache.Diagnostic{{ +- Message: msg, +- Severity: protocol.SeverityError, +- Range: rng, +- URI: fh.URI(), +- Source: cache.TemplateError, +- }} +- } +- +- // errors look like `template: :40: unexpected "}" in operand` +- // so the string needs to be parsed +- matches := errRe.FindStringSubmatch(p.parseErr.Error()) +- if len(matches) != 3 { +- return errorf("expected 3 matches, got %d (%v)", len(matches), matches) +- } +- lineno, err := strconv.Atoi(matches[1]) +- if err != nil { +- return errorf("couldn't convert %q to int, %v", matches[1], err) +- } +- msg := matches[2] +- +- // Compute the range for the whole (1-based) line. +- rng, err := lineRange(p.mapper, lineno) +- if err != nil { +- return errorf("invalid position: %v", err) +- } +- +- return []*cache.Diagnostic{{ +- Message: msg, +- Severity: protocol.SeverityError, +- Range: rng, +- Source: cache.TemplateError, +- }} +-} +- +-// Definition finds the definitions of the symbol at loc. It +-// does not understand scoping (if any) in templates. This code is +-// for definitions, type definitions, and implementations. +-// Results only for variables and templates. +-func Definition(snapshot *cache.Snapshot, fh file.Handle, loc protocol.Position) ([]protocol.Location, error) { +- x, _, err := symAtPosition(fh, loc) +- if err != nil { +- return nil, err +- } +- sym := x.name +- ans := []protocol.Location{} +- // PJW: this is probably a pattern to abstract +- a := parseSet(snapshot.Templates()) +- for _, p := range a.files { +- for _, s := range p.symbols { +- if !s.vardef || s.name != sym { +- continue +- } +- loc, err := p.mapper.OffsetLocation(s.offsets()) +- if err != nil { +- return nil, err +- } +- ans = append(ans, loc) +- } +- } +- return ans, nil +-} +- +-func Hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.Hover, error) { +- sym, p, err := symAtPosition(fh, position) +- if err != nil { +- return nil, err +- } +- +- var value string +- switch sym.kind { +- case protocol.Function: +- value = fmt.Sprintf("function: %s", sym.name) +- case protocol.Variable: +- value = fmt.Sprintf("variable: %s", sym.name) +- case protocol.Constant: +- value = fmt.Sprintf("constant %s", sym.name) +- case protocol.Method: // field or method +- value = fmt.Sprintf("%s: field or method", sym.name) +- case protocol.Package: // template use, template def (PJW: do we want two?) +- value = fmt.Sprintf("template %s\n(add definition)", sym.name) +- case protocol.Namespace: +- value = fmt.Sprintf("template %s defined", sym.name) +- case protocol.Number: +- value = "number" +- case protocol.String: +- value = "string" +- case protocol.Boolean: +- value = "boolean" +- default: +- value = fmt.Sprintf("oops, sym=%#v", sym) +- } +- +- rng, err := p.mapper.OffsetRange(sym.offsets()) +- if err != nil { +- return nil, err +- } +- +- return &protocol.Hover{ +- Range: rng, +- Contents: protocol.MarkupContent{ +- Kind: protocol.Markdown, +- Value: value, +- }, +- }, nil +-} +- +-func References(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, params *protocol.ReferenceParams) ([]protocol.Location, error) { +- sym, _, err := symAtPosition(fh, params.Position) +- if err != nil { +- return nil, err +- } +- if sym.name == "" { +- return nil, fmt.Errorf("no symbol at position") +- } +- ans := []protocol.Location{} +- +- a := parseSet(snapshot.Templates()) +- for _, p := range a.files { +- for _, s := range p.symbols { +- if s.name != sym.name { +- continue +- } +- if s.vardef && !params.Context.IncludeDeclaration { +- continue +- } +- loc, err := p.mapper.OffsetLocation(s.offsets()) +- if err != nil { +- return nil, err +- } +- ans = append(ans, loc) +- } +- } +- // TODO: do these need to be sorted? (a.files is a map) +- return ans, nil +-} +- +-func SemanticTokens(ctx context.Context, snapshot *cache.Snapshot, spn protocol.DocumentURI) (*protocol.SemanticTokens, error) { +- fh, err := snapshot.ReadFile(ctx, spn) +- if err != nil { +- return nil, err +- } +- buf, err := fh.Content() +- if err != nil { +- return nil, err +- } +- p := parseBuffer(fh.URI(), buf) +- +- var items []semtok.Token +- for _, t := range p.tokens { +- if t.start == t.end { +- continue // vscode doesn't like 0-length tokens +- } +- pos, err := p.mapper.OffsetPosition(t.start) +- if err != nil { +- return nil, err +- } +- // TODO(adonovan): don't ignore the rng restriction, if any. +- items = append(items, semtok.Token{ +- Line: pos.Line, +- Start: pos.Character, +- Len: uint32(protocol.UTF16Len(p.buf[t.start:t.end])), +- Type: semtok.TokMacro, +- }) +- } +- return &protocol.SemanticTokens{ +- Data: semtok.Encode(items, nil, nil), +- // for small cache, some day. for now, the LSP client ignores this +- // (that is, when the LSP client starts returning these, we can cache) +- ResultID: fmt.Sprintf("%v", time.Now()), +- }, nil +-} +- +-// TODO: still need to do rename, etc +- +-func symAtPosition(fh file.Handle, posn protocol.Position) (*symbol, *parsed, error) { +- buf, err := fh.Content() +- if err != nil { +- return nil, nil, err +- } +- p := parseBuffer(fh.URI(), buf) +- offset, err := p.mapper.PositionOffset(posn) +- if err != nil { +- return nil, nil, err +- } +- var syms []symbol +- for _, s := range p.symbols { +- if s.start <= offset && offset < s.start+s.len { +- syms = append(syms, s) +- } +- } +- if len(syms) == 0 { +- return nil, p, fmt.Errorf("no symbol found") +- } +- sym := syms[0] +- return &sym, p, nil +-} +diff -urN a/gopls/internal/template/parse.go b/gopls/internal/template/parse.go +--- a/gopls/internal/template/parse.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/template/parse.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,344 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package template contains code for dealing with templates +-package template +- +-// template files are small enough that the code reprocesses them each time +-// this may be a bad choice for projects with lots of template files. +- +-import ( +- "bytes" +- "fmt" +- "io" +- "log" +- "regexp" +- "sort" +- "text/template" +- "text/template/parse" +- +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-var ( +- lbraces = []byte("{{") +- rbraces = []byte("}}") +-) +- +-type parsed struct { +- buf []byte // contents +- mapper *protocol.Mapper +- elided []int // offsets where lbraces was replaced by blanks +- +- // tokens are matched lbraces-rbraces pairs, computed before trying to parse +- tokens []token +- +- // result of parsing +- named []*template.Template // the template and embedded templates +- parseErr error +- symbols []symbol +- stack []parse.Node // used while computing symbols +-} +- +-// A token is a single {{...}}. +-type token struct { +- start, end int // 0-based byte offset from start of template +-} +- +-// set contains the Parse of all the template files +-type set struct { +- files map[protocol.DocumentURI]*parsed +-} +- +-// parseSet returns the set of the snapshot's tmpl files +-// (maybe cache these, but then avoiding import cycles needs code rearrangements) +-// +-// TODO(adonovan): why doesn't parseSet return an error? +-func parseSet(tmpls map[protocol.DocumentURI]file.Handle) *set { +- all := make(map[protocol.DocumentURI]*parsed) +- for uri, fh := range tmpls { +- buf, err := fh.Content() +- if err != nil { +- // TODO(pjw): decide what to do with these errors +- log.Printf("failed to read %s (%v)", fh.URI().Path(), err) +- continue +- } +- all[uri] = parseBuffer(uri, buf) +- } +- return &set{files: all} +-} +- +-func parseBuffer(uri protocol.DocumentURI, buf []byte) *parsed { +- ans := &parsed{ +- buf: buf, +- mapper: protocol.NewMapper(uri, buf), +- } +- if len(buf) == 0 { +- return ans +- } +- ans.setTokens() // ans.buf may be a new []byte +- t, err := template.New("").Parse(string(ans.buf)) +- if err != nil { +- funcs := make(template.FuncMap) +- for t == nil && ans.parseErr == nil { +- // in 1.17 it may be possible to avoid getting this error +- // template: :2: function "foo" not defined +- matches := parseErrR.FindStringSubmatch(err.Error()) +- if len(matches) == 2 { +- // suppress the error by giving it a function with the right name +- funcs[matches[1]] = func() any { return nil } +- t, err = template.New("").Funcs(funcs).Parse(string(ans.buf)) +- continue +- } +- ans.parseErr = err // unfixed error +- return ans +- } +- } +- ans.named = t.Templates() +- // set the symbols +- for _, t := range ans.named { +- ans.stack = append(ans.stack, t.Root) +- ans.findSymbols() +- if t.Name() != "" { +- // defining a template. The pos is just after {{define...}} (or {{block...}}?) +- at, sz := ans.findLiteralBefore(int(t.Root.Pos)) +- s := symbol{start: at, len: sz, name: t.Name(), kind: protocol.Namespace, vardef: true} +- ans.symbols = append(ans.symbols, s) +- } +- } +- +- sort.Slice(ans.symbols, func(i, j int) bool { +- left, right := ans.symbols[i], ans.symbols[j] +- if left.start != right.start { +- return left.start < right.start +- } +- if left.vardef != right.vardef { +- return left.vardef +- } +- return left.kind < right.kind +- }) +- return ans +-} +- +-// findLiteralBefore locates the first preceding string literal +-// returning its offset and length in buf or (-1, 0) if there is none. +-// Assume double-quoted string rather than backquoted string for now. +-func (p *parsed) findLiteralBefore(pos int) (int, int) { +- left, right := -1, -1 +- for i := pos - 1; i >= 0; i-- { +- if p.buf[i] != '"' { +- continue +- } +- if right == -1 { +- right = i +- continue +- } +- left = i +- break +- } +- if left == -1 { +- return -1, 0 +- } +- return left + 1, right - left - 1 +-} +- +-var ( +- parseErrR = regexp.MustCompile(`template:.*function "([^"]+)" not defined`) +-) +- +-func (p *parsed) setTokens() { +- const ( +- // InRaw and InString only occur inside an action (SeenLeft) +- Start = iota +- InRaw +- InString +- SeenLeft +- ) +- state := Start +- var left, oldState int +- for n := 0; n < len(p.buf); n++ { +- c := p.buf[n] +- switch state { +- case InRaw: +- if c == '`' { +- state = oldState +- } +- case InString: +- if c == '"' && !isEscaped(p.buf[:n]) { +- state = oldState +- } +- case SeenLeft: +- if c == '`' { +- oldState = state // it's SeenLeft, but a little clearer this way +- state = InRaw +- continue +- } +- if c == '"' { +- oldState = state +- state = InString +- continue +- } +- if bytes.HasPrefix(p.buf[n:], rbraces) { +- right := n + len(rbraces) +- tok := token{start: left, end: right} +- p.tokens = append(p.tokens, tok) +- state = Start +- } +- // If we see (unquoted) lbraces then the original left is probably the user +- // typing. Suppress the original left +- if bytes.HasPrefix(p.buf[n:], lbraces) { +- p.elideAt(left) +- left = n +- n += len(lbraces) - 1 // skip the rest +- } +- case Start: +- if bytes.HasPrefix(p.buf[n:], lbraces) { +- left = n +- state = SeenLeft +- n += len(lbraces) - 1 // skip the rest (avoids {{{ bug) +- } +- } +- } +- // this error occurs after typing {{ at the end of the file +- if state != Start { +- // Unclosed lbraces. remove the lbraces at left +- p.elideAt(left) +- } +-} +- +-func (p *parsed) elideAt(left int) { +- if p.elided == nil { +- // p.buf is the same buffer that v.Read() returns, so copy it. +- // (otherwise the next time it's parsed, elided information is lost) +- p.buf = bytes.Clone(p.buf) +- } +- for i := range lbraces { +- p.buf[left+i] = ' ' +- } +- p.elided = append(p.elided, left) +-} +- +-// isEscaped reports whether the byte after buf is escaped +-func isEscaped(buf []byte) bool { +- backSlashes := 0 +- for j := len(buf) - 1; j >= 0 && buf[j] == '\\'; j-- { +- backSlashes++ +- } +- return backSlashes%2 == 1 +-} +- +-// lineRange returns the range for the entire specified (1-based) line. +-func lineRange(m *protocol.Mapper, line int) (protocol.Range, error) { +- posn := protocol.Position{Line: uint32(line - 1)} +- +- // start of line +- start, err := m.PositionOffset(posn) +- if err != nil { +- return protocol.Range{}, err +- } +- +- // end of line (or file) +- posn.Line++ +- end := len(m.Content) // EOF +- if offset, err := m.PositionOffset(posn); err != nil { +- end = offset - len("\n") +- } +- +- return m.OffsetRange(start, end) +-} +- +-// -- debugging -- +- +-func (p *parsed) writeNode(w io.Writer, n parse.Node) { +- wr := wrNode{p: p, w: w} +- wr.writeNode(n, "") +-} +- +-type wrNode struct { +- p *parsed +- w io.Writer +-} +- +-func (wr wrNode) writeNode(n parse.Node, indent string) { +- if n == nil { +- return +- } +- at := func(pos parse.Pos) string { +- offset := int(pos) +- posn, err := wr.p.mapper.OffsetPosition(offset) +- if err != nil { +- return fmt.Sprintf("", pos, err) +- } +- return fmt.Sprintf("(%d)%v:%v", pos, posn.Line, posn.Character) +- } +- switch x := n.(type) { +- case *parse.ActionNode: +- fmt.Fprintf(wr.w, "%sActionNode at %s\n", indent, at(x.Pos)) +- wr.writeNode(x.Pipe, indent+". ") +- case *parse.BoolNode: +- fmt.Fprintf(wr.w, "%sBoolNode at %s, %v\n", indent, at(x.Pos), x.True) +- case *parse.BranchNode: +- fmt.Fprintf(wr.w, "%sBranchNode at %s\n", indent, at(x.Pos)) +- wr.writeNode(x.Pipe, indent+"Pipe. ") +- wr.writeNode(x.List, indent+"List. ") +- wr.writeNode(x.ElseList, indent+"Else. ") +- case *parse.ChainNode: +- fmt.Fprintf(wr.w, "%sChainNode at %s, %v\n", indent, at(x.Pos), x.Field) +- case *parse.CommandNode: +- fmt.Fprintf(wr.w, "%sCommandNode at %s, %d children\n", indent, at(x.Pos), len(x.Args)) +- for _, a := range x.Args { +- wr.writeNode(a, indent+". ") +- } +- //case *parse.CommentNode: // 1.16 +- case *parse.DotNode: +- fmt.Fprintf(wr.w, "%sDotNode at %s\n", indent, at(x.Pos)) +- case *parse.FieldNode: +- fmt.Fprintf(wr.w, "%sFieldNode at %s, %v\n", indent, at(x.Pos), x.Ident) +- case *parse.IdentifierNode: +- fmt.Fprintf(wr.w, "%sIdentifierNode at %s, %v\n", indent, at(x.Pos), x.Ident) +- case *parse.IfNode: +- fmt.Fprintf(wr.w, "%sIfNode at %s\n", indent, at(x.Pos)) +- wr.writeNode(&x.BranchNode, indent+". ") +- case *parse.ListNode: +- if x == nil { +- return // nil BranchNode.ElseList +- } +- fmt.Fprintf(wr.w, "%sListNode at %s, %d children\n", indent, at(x.Pos), len(x.Nodes)) +- for _, n := range x.Nodes { +- wr.writeNode(n, indent+". ") +- } +- case *parse.NilNode: +- fmt.Fprintf(wr.w, "%sNilNode at %s\n", indent, at(x.Pos)) +- case *parse.NumberNode: +- fmt.Fprintf(wr.w, "%sNumberNode at %s, %s\n", indent, at(x.Pos), x.Text) +- case *parse.PipeNode: +- if x == nil { +- return // {{template "xxx"}} +- } +- fmt.Fprintf(wr.w, "%sPipeNode at %s, %d vars, %d cmds, IsAssign:%v\n", +- indent, at(x.Pos), len(x.Decl), len(x.Cmds), x.IsAssign) +- for _, d := range x.Decl { +- wr.writeNode(d, indent+"Decl. ") +- } +- for _, c := range x.Cmds { +- wr.writeNode(c, indent+"Cmd. ") +- } +- case *parse.RangeNode: +- fmt.Fprintf(wr.w, "%sRangeNode at %s\n", indent, at(x.Pos)) +- wr.writeNode(&x.BranchNode, indent+". ") +- case *parse.StringNode: +- fmt.Fprintf(wr.w, "%sStringNode at %s, %s\n", indent, at(x.Pos), x.Quoted) +- case *parse.TemplateNode: +- fmt.Fprintf(wr.w, "%sTemplateNode at %s, %s\n", indent, at(x.Pos), x.Name) +- wr.writeNode(x.Pipe, indent+". ") +- case *parse.TextNode: +- fmt.Fprintf(wr.w, "%sTextNode at %s, len %d\n", indent, at(x.Pos), len(x.Text)) +- case *parse.VariableNode: +- fmt.Fprintf(wr.w, "%sVariableNode at %s, %v\n", indent, at(x.Pos), x.Ident) +- case *parse.WithNode: +- fmt.Fprintf(wr.w, "%sWithNode at %s\n", indent, at(x.Pos)) +- wr.writeNode(&x.BranchNode, indent+". ") +- } +-} +diff -urN a/gopls/internal/template/parse_test.go b/gopls/internal/template/parse_test.go +--- a/gopls/internal/template/parse_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/template/parse_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,107 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package template +- +-import "testing" +- +-func TestSymbols(t *testing.T) { +- for i, test := range []struct { +- buf string +- wantNamed int // expected number of named templates +- syms []string // expected symbols (start, len, name, kind, def?) +- }{ +- {` +-{{if (foo .X.Y)}}{{$A := "hi"}}{{.Z $A}}{{else}} +-{{$A.X 12}} +-{{foo (.X.Y) 23 ($A.Zü)}} +-{{end}}`, 1, []string{ +- "{7,3,foo,Function,false}", +- "{12,1,X,Method,false}", +- "{14,1,Y,Method,false}", +- "{21,2,$A,Variable,true}", +- "{26,4,,String,false}", +- "{35,1,Z,Method,false}", +- "{38,2,$A,Variable,false}", +- "{53,2,$A,Variable,false}", +- "{56,1,X,Method,false}", +- "{57,2,,Number,false}", +- "{64,3,foo,Function,false}", +- "{70,1,X,Method,false}", +- "{72,1,Y,Method,false}", +- "{75,2,,Number,false}", +- "{80,2,$A,Variable,false}", +- "{83,3,Zü,Method,false}", +- "{94,3,,Constant,false}", +- }}, +- {`{{define "zzz"}}{{.}}{{end}} +-{{template "zzz"}}`, 2, []string{ +- "{10,3,zzz,Namespace,true}", +- "{18,1,dot,Variable,false}", +- "{41,3,zzz,Package,false}", +- }}, +- {`{{block "aaa" foo}}b{{end}}`, 2, []string{ +- "{9,3,aaa,Namespace,true}", +- "{9,3,aaa,Package,false}", +- "{14,3,foo,Function,false}", +- "{19,1,,Constant,false}", +- }}, +- {"", 0, nil}, +- {`{{/* this is +-a comment */}}`, 1, nil}, // https://go.dev/issue/74635 +- } { +- got := parseBuffer("", []byte(test.buf)) +- if got.parseErr != nil { +- t.Error(got.parseErr) +- continue +- } +- if len(got.named) != test.wantNamed { +- t.Errorf("%d: got %d, expected %d", i, len(got.named), test.wantNamed) +- } +- for n, s := range got.symbols { +- if s.String() != test.syms[n] { +- t.Errorf("%d: got %s, expected %s", i, s.String(), test.syms[n]) +- } +- } +- } +-} +- +-func TestWordAt(t *testing.T) { +- want := []string{"", "", "$A", "$A", "", "", "", "", "", "", +- "", "", "", "if", "if", "", "$A", "$A", "", "", +- "B", "", "", "end", "end", "end", "", "", ""} +- buf := []byte("{{$A := .}}{{if $A}}B{{end}}") +- for i := range buf { +- got := wordAt(buf, i) +- if got != want[i] { +- t.Errorf("for %d, got %q, wanted %q", i, got, want[i]) +- } +- } +-} +- +-func TestQuotes(t *testing.T) { +- for _, s := range []struct { +- tmpl string +- tokCnt int +- elidedCnt int8 +- }{ +- {"{{- /*comment*/ -}}", 1, 0}, +- {"{{/*`\ncomment\n`*/}}", 1, 0}, +- //{"{{foo\nbar}}\n", 1, 0}, // this action spanning lines parses in 1.16 +- {"{{\"{{foo}}{{\"}}", 1, 0}, +- {"{{\n{{- when}}", 1, 1}, // corrected +- {"{{{{if .}}xx{{\n{{end}}", 2, 2}, // corrected +- } { +- p := parseBuffer("", []byte(s.tmpl)) +- if len(p.tokens) != s.tokCnt { +- t.Errorf("%q: got %d tokens, expected %d", s, len(p.tokens), s.tokCnt) +- } +- if p.parseErr != nil { +- t.Errorf("%q: %v", string(p.buf), p.parseErr) +- } +- if len(p.elided) != int(s.elidedCnt) { +- t.Errorf("%q: elided %d, expected %d", s, len(p.elided), s.elidedCnt) +- } +- } +-} +diff -urN a/gopls/internal/template/symbols.go b/gopls/internal/template/symbols.go +--- a/gopls/internal/template/symbols.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/template/symbols.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,271 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package template +- +-import ( +- "bytes" +- "context" +- "fmt" +- "text/template/parse" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-// in local coordinates, to be translated to protocol.DocumentSymbol +-type symbol struct { +- start int // 0-based byte offset, for sorting +- len int // of source, in bytes +- name string +- kind protocol.SymbolKind +- vardef bool // is this a variable definition? +- // do we care about selection range, or children? +- // no children yet, and selection range is the same as range +-} +- +-func (s symbol) offsets() (start, end int) { +- return s.start, s.start + s.len +-} +- +-func (s symbol) String() string { +- return fmt.Sprintf("{%d,%d,%s,%s,%v}", s.start, s.len, s.name, s.kind, s.vardef) +-} +- +-// for FieldNode or VariableNode (or ChainNode?) +-func (p *parsed) fields(flds []string, x parse.Node) []symbol { +- ans := []symbol{} +- // guessing that there are no embedded blanks allowed. The doc is unclear +- lookfor := "" +- switch x.(type) { +- case *parse.FieldNode: +- for _, f := range flds { +- lookfor += "." + f // quadratic, but probably ok +- } +- case *parse.VariableNode: +- lookfor = flds[0] +- for i := 1; i < len(flds); i++ { +- lookfor += "." + flds[i] +- } +- case *parse.ChainNode: // PJW, what are these? +- for _, f := range flds { +- lookfor += "." + f // quadratic, but probably ok +- } +- default: +- // If these happen they will happen even if gopls is restarted +- // and the users does the same thing, so it is better not to panic. +- // context.Background() is used because we don't have access +- // to any other context. [we could, but it would be complicated] +- event.Log(context.Background(), fmt.Sprintf("%T unexpected in fields()", x)) +- return nil +- } +- if len(lookfor) == 0 { +- event.Log(context.Background(), fmt.Sprintf("no strings in fields() %#v", x)) +- return nil +- } +- startsAt := int(x.Position()) +- ix := bytes.Index(p.buf[startsAt:], []byte(lookfor)) // HasPrefix? PJW? +- if ix < 0 || ix > len(lookfor) { // lookfor expected to be at start (or so) +- // probably golang.go/#43388, so back up +- startsAt -= len(flds[0]) + 1 +- ix = bytes.Index(p.buf[startsAt:], []byte(lookfor)) // ix might be 1? PJW +- if ix < 0 { +- return ans +- } +- } +- at := ix + startsAt +- for _, f := range flds { +- at += 1 // . +- kind := protocol.Method +- if f[0] == '$' { +- kind = protocol.Variable +- } +- sym := symbol{name: f, kind: kind, start: at, len: len(f)} +- if kind == protocol.Variable && len(p.stack) > 1 { +- if pipe, ok := p.stack[len(p.stack)-2].(*parse.PipeNode); ok { +- for _, y := range pipe.Decl { +- if x == y { +- sym.vardef = true +- } +- } +- } +- } +- ans = append(ans, sym) +- at += len(f) +- } +- return ans +-} +- +-func (p *parsed) findSymbols() { +- if len(p.stack) == 0 { +- return +- } +- n := p.stack[len(p.stack)-1] +- pop := func() { +- p.stack = p.stack[:len(p.stack)-1] +- } +- if n == nil { // allowing nil simplifies the code +- pop() +- return +- } +- nxt := func(nd parse.Node) { +- p.stack = append(p.stack, nd) +- p.findSymbols() +- } +- switch x := n.(type) { +- case *parse.ActionNode: +- nxt(x.Pipe) +- case *parse.BoolNode: +- // need to compute the length from the value +- msg := fmt.Sprintf("%v", x.True) +- p.symbols = append(p.symbols, symbol{start: int(x.Pos), len: len(msg), kind: protocol.Boolean}) +- case *parse.BranchNode: +- nxt(x.Pipe) +- nxt(x.List) +- nxt(x.ElseList) +- case *parse.ChainNode: +- p.symbols = append(p.symbols, p.fields(x.Field, x)...) +- nxt(x.Node) +- case *parse.CommandNode: +- for _, a := range x.Args { +- nxt(a) +- } +- //case *parse.CommentNode: // go 1.16 +- // log.Printf("implement %d", x.Type()) +- case *parse.DotNode: +- sym := symbol{name: "dot", kind: protocol.Variable, start: int(x.Pos), len: 1} +- p.symbols = append(p.symbols, sym) +- case *parse.FieldNode: +- p.symbols = append(p.symbols, p.fields(x.Ident, x)...) +- case *parse.IdentifierNode: +- sym := symbol{name: x.Ident, kind: protocol.Function, start: int(x.Pos), len: len(x.Ident)} +- p.symbols = append(p.symbols, sym) +- case *parse.IfNode: +- nxt(&x.BranchNode) +- case *parse.ListNode: +- if x != nil { // wretched typed nils. Node should have an IfNil +- for _, nd := range x.Nodes { +- nxt(nd) +- } +- } +- case *parse.NilNode: +- sym := symbol{name: "nil", kind: protocol.Constant, start: int(x.Pos), len: 3} +- p.symbols = append(p.symbols, sym) +- case *parse.NumberNode: +- // no name; ascii +- p.symbols = append(p.symbols, symbol{start: int(x.Pos), len: len(x.Text), kind: protocol.Number}) +- case *parse.PipeNode: +- if x == nil { // {{template "foo"}} +- return +- } +- for _, d := range x.Decl { +- nxt(d) +- } +- for _, c := range x.Cmds { +- nxt(c) +- } +- case *parse.RangeNode: +- nxt(&x.BranchNode) +- case *parse.StringNode: +- // no name +- p.symbols = append(p.symbols, symbol{start: int(x.Pos), len: len(x.Quoted), kind: protocol.String}) +- case *parse.TemplateNode: +- // invoking a template, e.g. {{define "foo"}} +- // x.Pos is the index of "foo". +- // The logic below assumes that the literal is trivial. +- p.symbols = append(p.symbols, symbol{name: x.Name, kind: protocol.Package, start: int(x.Pos) + len(`"`), len: len(x.Name)}) +- nxt(x.Pipe) +- case *parse.TextNode: +- if len(x.Text) == 1 && x.Text[0] == '\n' { +- break +- } +- // nothing to report, but build one for hover +- p.symbols = append(p.symbols, symbol{start: int(x.Pos), len: len(x.Text), kind: protocol.Constant}) +- case *parse.VariableNode: +- p.symbols = append(p.symbols, p.fields(x.Ident, x)...) +- case *parse.WithNode: +- nxt(&x.BranchNode) +- } +- pop() +-} +- +-// DocumentSymbols returns a hierarchy of the symbols defined in a template file. +-// (The hierarchy is flat. SymbolInformation might be better.) +-func DocumentSymbols(snapshot *cache.Snapshot, fh file.Handle) ([]protocol.DocumentSymbol, error) { +- buf, err := fh.Content() +- if err != nil { +- return nil, err +- } +- p := parseBuffer(fh.URI(), buf) +- if p.parseErr != nil { +- return nil, p.parseErr +- } +- var ans []protocol.DocumentSymbol +- for _, sym := range p.symbols { +- if sym.kind == protocol.Constant { +- continue +- } +- detail := kindStr(sym.kind) +- if detail == "Namespace" { +- detail = "Template" +- } +- if sym.vardef { +- detail += "(def)" +- } else { +- detail += "(use)" +- } +- rng, err := p.mapper.OffsetRange(sym.offsets()) +- if err != nil { +- return nil, err +- } +- ans = append(ans, protocol.DocumentSymbol{ +- Name: sym.name, +- Detail: detail, +- Kind: sym.kind, +- Range: rng, +- SelectionRange: rng, // or should this be the entire {{...}}? +- }) +- } +- return ans, nil +-} +- +-func kindStr(k protocol.SymbolKind) string { +- n := int(k) +- if n < 1 || n >= len(kindNames) { +- return fmt.Sprintf("?SymbolKind %d?", n) +- } +- return kindNames[n] +-} +- +-var kindNames = []string{ +- "", +- "File", +- "Module", +- "Namespace", +- "Package", +- "Class", +- "Method", +- "Property", +- "Field", +- "Constructor", +- "Enum", +- "Interface", +- "Function", +- "Variable", +- "Constant", +- "String", +- "Number", +- "Boolean", +- "Array", +- "Object", +- "Key", +- "Null", +- "EnumMember", +- "Struct", +- "Event", +- "Operator", +- "TypeParameter", +-} +diff -urN a/gopls/internal/test/compare/text.go b/gopls/internal/test/compare/text.go +--- a/gopls/internal/test/compare/text.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/compare/text.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,49 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package compare +- +-import ( +- "bytes" +- +- "golang.org/x/tools/internal/diff" +-) +- +-// Text returns a formatted unified diff of the edits to go from want to +-// got, returning "" if and only if want == got. +-// +-// This function is intended for use in testing, and panics if any error occurs +-// while computing the diff. It is not sufficiently tested for production use. +-func Text(want, got string) string { +- return NamedText("want", "got", want, got) +-} +- +-// NamedText is like text, but allows passing custom names of the 'want' and +-// 'got' content. +-func NamedText(wantName, gotName, want, got string) string { +- if want == got { +- return "" +- } +- +- // Add newlines to avoid verbose newline messages ("No newline at end of file"). +- unified := diff.Unified(wantName, gotName, want+"\n", got+"\n") +- +- // Defensively assert that we get an actual diff, so that we guarantee the +- // invariant that we return "" if and only if want == got. +- // +- // This is probably unnecessary, but convenient. +- if unified == "" { +- panic("empty diff for non-identical input") +- } +- +- return unified +-} +- +-// Bytes is like Text but using byte slices. +-func Bytes(want, got []byte) string { +- if bytes.Equal(want, got) { +- return "" // common case +- } +- return Text(string(want), string(got)) +-} +diff -urN a/gopls/internal/test/compare/text_test.go b/gopls/internal/test/compare/text_test.go +--- a/gopls/internal/test/compare/text_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/compare/text_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,28 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package compare_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/test/compare" +-) +- +-func TestText(t *testing.T) { +- tests := []struct { +- got, want, wantDiff string +- }{ +- {"", "", ""}, +- {"equal", "equal", ""}, +- {"a", "b", "--- want\n+++ got\n@@ -1 +1 @@\n-b\n+a\n"}, +- {"a\nd\nc\n", "a\nb\nc\n", "--- want\n+++ got\n@@ -1,4 +1,4 @@\n a\n-b\n+d\n c\n \n"}, +- } +- +- for _, test := range tests { +- if gotDiff := compare.Text(test.want, test.got); gotDiff != test.wantDiff { +- t.Errorf("compare.Text(%q, %q) =\n%q, want\n%q", test.want, test.got, gotDiff, test.wantDiff) +- } +- } +-} +diff -urN a/gopls/internal/test/integration/bench/bench_test.go b/gopls/internal/test/integration/bench/bench_test.go +--- a/gopls/internal/test/integration/bench/bench_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/bench_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,354 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "bytes" +- "compress/gzip" +- "context" +- "flag" +- "fmt" +- "io" +- "log" +- "os" +- "os/exec" +- "path/filepath" +- "strings" +- "sync" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/cmd" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/fakenet" +- "golang.org/x/tools/internal/event" +- "golang.org/x/tools/internal/jsonrpc2" +- "golang.org/x/tools/internal/jsonrpc2/servertest" +- "golang.org/x/tools/internal/pprof" +- "golang.org/x/tools/internal/tool" +-) +- +-var ( +- goplsPath = flag.String("gopls_path", "", "if set, use this gopls for testing; incompatible with -gopls_commit") +- +- installGoplsOnce sync.Once // guards installing gopls at -gopls_commit +- goplsCommit = flag.String("gopls_commit", "", "if set, install and use gopls at this commit for testing; incompatible with -gopls_path") +- +- cpuProfile = flag.String("gopls_cpuprofile", "", "if set, the cpu profile file suffix; see \"Profiling\" in the package doc") +- memProfile = flag.String("gopls_memprofile", "", "if set, the mem profile file suffix; see \"Profiling\" in the package doc") +- allocProfile = flag.String("gopls_allocprofile", "", "if set, the alloc profile file suffix; see \"Profiling\" in the package doc") +- blockProfile = flag.String("gopls_blockprofile", "", "if set, the block profile file suffix; see \"Profiling\" in the package doc") +- trace = flag.String("gopls_trace", "", "if set, the trace file suffix; see \"Profiling\" in the package doc") +- +- // If non-empty, tempDir is a temporary working dir that was created by this +- // test suite. +- makeTempDirOnce sync.Once // guards creation of the temp dir +- tempDir string +-) +- +-// if runAsGopls is "true", run the gopls command instead of the testing.M. +-const runAsGopls = "_GOPLS_BENCH_RUN_AS_GOPLS" +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- if os.Getenv(runAsGopls) == "true" { +- tool.Main(context.Background(), cmd.New(), os.Args[1:]) +- os.Exit(0) +- } +- event.SetExporter(nil) // don't log to stderr +- code := m.Run() +- if err := cleanup(); err != nil { +- fmt.Fprintf(os.Stderr, "cleaning up after benchmarks: %v\n", err) +- if code == 0 { +- code = 1 +- } +- } +- os.Exit(code) +-} +- +-// getTempDir returns the temporary directory to use for benchmark files, +-// creating it if necessary. +-func getTempDir() string { +- makeTempDirOnce.Do(func() { +- var err error +- tempDir, err = os.MkdirTemp("", "gopls-bench") +- if err != nil { +- log.Fatal(err) +- } +- }) +- return tempDir +-} +- +-// shallowClone performs a shallow clone of repo into dir at the given +-// 'commitish' ref (any commit reference understood by git). +-// +-// The directory dir must not already exist. +-func shallowClone(dir, repo, commitish string) error { +- if err := os.Mkdir(dir, 0750); err != nil { +- return fmt.Errorf("creating dir for %s: %v", repo, err) +- } +- +- // Set a timeout for git fetch. If this proves flaky, it can be removed. +- ctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute) +- defer cancel() +- +- // Use a shallow fetch to download just the relevant commit. +- shInit := fmt.Sprintf("git init && git fetch --depth=1 %q %q && git checkout FETCH_HEAD", repo, commitish) +- initCmd := exec.CommandContext(ctx, "/bin/sh", "-c", shInit) +- initCmd.Dir = dir +- if output, err := initCmd.CombinedOutput(); err != nil { +- return fmt.Errorf("checking out %s: %v\n%s", repo, err, output) +- } +- return nil +-} +- +-// connectEditor connects a fake editor session in the given dir, using the +-// given editor config. +-func connectEditor(dir string, config fake.EditorConfig, ts servertest.Connector) (*fake.Sandbox, *fake.Editor, *integration.Awaiter, error) { +- s, err := fake.NewSandbox(&fake.SandboxConfig{ +- Workdir: dir, +- GOPROXY: "https://proxy.golang.org", +- }) +- if err != nil { +- return nil, nil, nil, err +- } +- +- a := integration.NewAwaiter(s.Workdir) +- editor, err := fake.NewEditor(s, config).Connect(context.Background(), ts, a.Hooks()) +- if err != nil { +- return nil, nil, nil, err +- } +- +- return s, editor, a, nil +-} +- +-// newGoplsConnector returns a connector that connects to a new gopls process, +-// executed with the provided arguments. +-func newGoplsConnector(args []string) (servertest.Connector, error) { +- if *goplsPath != "" && *goplsCommit != "" { +- panic("can't set both -gopls_path and -gopls_commit") +- } +- var ( +- goplsPath = *goplsPath +- env []string +- ) +- if *goplsCommit != "" { +- goplsPath = getInstalledGopls() +- } +- if goplsPath == "" { +- var err error +- goplsPath, err = os.Executable() +- if err != nil { +- return nil, err +- } +- env = []string{fmt.Sprintf("%s=true", runAsGopls)} +- } +- return &SidecarServer{ +- goplsPath: goplsPath, +- env: env, +- args: args, +- }, nil +-} +- +-// profileArgs returns additional command-line arguments to use when invoking +-// gopls, to enable the user-requested profiles. +-// +-// If wantCPU is set, CPU profiling is enabled as well. Some tests may want to +-// instrument profiling around specific critical sections of the benchmark, +-// rather than the entire process. +-// +-// TODO(rfindley): like CPU, all of these would be better served by a custom +-// command. Very rarely do we care about memory usage as the process exits: we +-// care about specific points in time during the benchmark. mem and alloc +-// should be snapshotted, and tracing should be bracketed around critical +-// sections. +-func profileArgs(name string, wantCPU bool) []string { +- var args []string +- if wantCPU && *cpuProfile != "" { +- args = append(args, fmt.Sprintf("-profile.cpu=%s", qualifiedName(name, *cpuProfile))) +- } +- if *memProfile != "" { +- args = append(args, fmt.Sprintf("-profile.mem=%s", qualifiedName(name, *memProfile))) +- } +- if *allocProfile != "" { +- args = append(args, fmt.Sprintf("-profile.alloc=%s", qualifiedName(name, *allocProfile))) +- } +- if *blockProfile != "" { +- args = append(args, fmt.Sprintf("-profile.block=%s", qualifiedName(name, *blockProfile))) +- } +- if *trace != "" { +- args = append(args, fmt.Sprintf("-profile.trace=%s", qualifiedName(name, *trace))) +- } +- return args +-} +- +-func qualifiedName(args ...string) string { +- return strings.Join(args, ".") +-} +- +-// getInstalledGopls builds gopls at the given -gopls_commit, returning the +-// path to the gopls binary. +-func getInstalledGopls() string { +- if *goplsCommit == "" { +- panic("must provide -gopls_commit") +- } +- toolsDir := filepath.Join(getTempDir(), "gopls_build") +- goplsPath := filepath.Join(toolsDir, "gopls", "gopls") +- +- installGoplsOnce.Do(func() { +- log.Printf("installing gopls: checking out x/tools@%s into %s\n", *goplsCommit, toolsDir) +- if err := shallowClone(toolsDir, "https://go.googlesource.com/tools", *goplsCommit); err != nil { +- log.Fatal(err) +- } +- +- log.Println("installing gopls: building...") +- bld := exec.Command("go", "build", ".") +- bld.Dir = filepath.Join(toolsDir, "gopls") +- if output, err := bld.CombinedOutput(); err != nil { +- log.Fatalf("building gopls: %v\n%s", err, output) +- } +- +- // Confirm that the resulting path now exists. +- if _, err := os.Stat(goplsPath); err != nil { +- log.Fatalf("os.Stat(%s): %v", goplsPath, err) +- } +- }) +- return goplsPath +-} +- +-// A SidecarServer starts (and connects to) a separate gopls process at the +-// given path. +-type SidecarServer struct { +- goplsPath string +- env []string // additional environment bindings +- args []string // command-line arguments +-} +- +-// Connect creates new io.Pipes and binds them to the underlying StreamServer. +-// +-// It implements the servertest.Connector interface. +-func (s *SidecarServer) Connect(ctx context.Context) jsonrpc2.Conn { +- // Note: don't use CommandContext here, as we want gopls to exit gracefully +- // in order to write out profile data. +- // +- // We close the connection on context cancellation below. +- cmd := exec.Command(s.goplsPath, s.args...) +- +- stdin, err := cmd.StdinPipe() +- if err != nil { +- log.Fatal(err) +- } +- stdout, err := cmd.StdoutPipe() +- if err != nil { +- log.Fatal(err) +- } +- cmd.Stderr = os.Stderr +- cmd.Env = append(os.Environ(), s.env...) +- if err := cmd.Start(); err != nil { +- log.Fatalf("starting gopls: %v", err) +- } +- +- go func() { +- // If we don't log.Fatal here, benchmarks may hang indefinitely if gopls +- // exits abnormally. +- // +- // TODO(rfindley): ideally we would shut down the connection gracefully, +- // but that doesn't currently work. +- if err := cmd.Wait(); err != nil { +- log.Fatalf("gopls invocation failed with error: %v", err) +- } +- }() +- +- clientStream := jsonrpc2.NewHeaderStream(fakenet.NewConn("stdio", stdout, stdin)) +- clientConn := jsonrpc2.NewConn(clientStream) +- +- go func() { +- select { +- case <-ctx.Done(): +- clientConn.Close() // ignore error +- clientStream.Close() // ignore error +- case <-clientConn.Done(): +- } +- }() +- +- return clientConn +-} +- +-// startProfileIfSupported checks to see if the remote gopls instance supports +-// the start/stop profiling commands. If so, it starts profiling and returns a +-// function that stops profiling and records the total CPU seconds sampled in the +-// cpu_seconds benchmark metric. +-// +-// If the remote gopls instance does not support profiling commands, this +-// function returns nil. +-// +-// If the supplied userSuffix is non-empty, the profile is written to +-// ., and not deleted when the benchmark exits. Otherwise, +-// the profile is written to a temp file that is deleted after the cpu_seconds +-// metric has been computed. +-func startProfileIfSupported(b *testing.B, env *integration.Env, name string) func() { +- if !env.Editor.HasCommand(command.StartProfile) { +- return nil +- } +- b.StopTimer() +- stopProfile := env.StartProfile() +- b.StartTimer() +- return func() { +- b.StopTimer() +- profFile := stopProfile() +- totalCPU, err := totalCPUForProfile(profFile) +- if err != nil { +- b.Fatalf("reading profile: %v", err) +- } +- b.ReportMetric(totalCPU.Seconds()/float64(b.N), "cpu_seconds/op") +- if *cpuProfile != "" { +- // Read+write to avoid exdev errors. +- data, err := os.ReadFile(profFile) +- if err != nil { +- b.Fatalf("reading profile: %v", err) +- } +- name := qualifiedName(name, *cpuProfile) +- if err := os.WriteFile(name, data, 0666); err != nil { +- b.Fatalf("writing profile: %v", err) +- } +- } +- if err := os.Remove(profFile); err != nil { +- b.Errorf("removing profile file: %v", err) +- } +- } +-} +- +-// totalCPUForProfile reads the pprof profile with the given file name, parses, +-// and aggregates the total CPU sampled during the profile. +-func totalCPUForProfile(filename string) (time.Duration, error) { +- protoGz, err := os.ReadFile(filename) +- if err != nil { +- return 0, err +- } +- rd, err := gzip.NewReader(bytes.NewReader(protoGz)) +- if err != nil { +- return 0, fmt.Errorf("creating gzip reader for %s: %v", filename, err) +- } +- data, err := io.ReadAll(rd) +- if err != nil { +- return 0, fmt.Errorf("reading %s: %v", filename, err) +- } +- return pprof.TotalTime(data) +-} +- +-// closeBuffer stops the benchmark timer and closes the buffer with the given +-// name. +-// +-// It may be used to clean up files opened in the shared environment during +-// benchmarking. +-func closeBuffer(b *testing.B, env *integration.Env, name string) { +- b.StopTimer() +- env.CloseBuffer(name) +- env.AfterChange() +- b.StartTimer() +-} +diff -urN a/gopls/internal/test/integration/bench/codeaction_test.go b/gopls/internal/test/integration/bench/codeaction_test.go +--- a/gopls/internal/test/integration/bench/codeaction_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/codeaction_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,69 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "fmt" +- "sync/atomic" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-func BenchmarkCodeAction(b *testing.B) { +- for _, test := range didChangeTests { +- b.Run(test.repo, func(b *testing.B) { +- env := getRepo(b, test.repo).sharedEnv(b) +- env.OpenFile(test.file) +- defer closeBuffer(b, env, test.file) +- env.AfterChange() +- +- env.CodeActionForFile(test.file, nil) // pre-warm +- +- b.ResetTimer() +- +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "hover")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- for b.Loop() { +- env.CodeActionForFile(test.file, nil) +- } +- }) +- } +-} +- +-func BenchmarkCodeActionFollowingEdit(b *testing.B) { +- for _, test := range didChangeTests { +- b.Run(test.repo, func(b *testing.B) { +- env := getRepo(b, test.repo).sharedEnv(b) +- env.OpenFile(test.file) +- defer closeBuffer(b, env, test.file) +- env.EditBuffer(test.file, protocol.TextEdit{NewText: "// __TEST_PLACEHOLDER_0__\n"}) +- env.AfterChange() +- +- env.CodeActionForFile(test.file, nil) // pre-warm +- +- b.ResetTimer() +- +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "hover")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- for b.Loop() { +- edits := atomic.AddInt64(&editID, 1) +- env.EditBuffer(test.file, protocol.TextEdit{ +- Range: protocol.Range{ +- Start: protocol.Position{Line: 0, Character: 0}, +- End: protocol.Position{Line: 1, Character: 0}, +- }, +- // Increment the placeholder text, to ensure cache misses. +- NewText: fmt.Sprintf("// __TEST_PLACEHOLDER_%d__\n", edits), +- }) +- env.CodeActionForFile(test.file, nil) +- } +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/bench/completion_test.go b/gopls/internal/test/integration/bench/completion_test.go +--- a/gopls/internal/test/integration/bench/completion_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/completion_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,328 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "flag" +- "fmt" +- "sync/atomic" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +-) +- +-var completionGOPATH = flag.String("completion_gopath", "", "if set, use this GOPATH for BenchmarkCompletion") +- +-type completionBenchOptions struct { +- file, locationRegexp string +- +- // Hooks to run edits before initial completion +- setup func(*Env) // run before the benchmark starts +- beforeCompletion func(*Env) // run before each completion +-} +- +-// Deprecated: new tests should be expressed in BenchmarkCompletion. +-func benchmarkCompletion(options completionBenchOptions, b *testing.B) { +- repo := getRepo(b, "tools") +- _ = repo.sharedEnv(b) // ensure cache is warm +- env := repo.newEnv(b, fake.EditorConfig{}, "completion", false) +- defer env.Close() +- +- // Run edits required for this completion. +- if options.setup != nil { +- options.setup(env) +- } +- +- // Run a completion to make sure the system is warm. +- loc := env.RegexpSearch(options.file, options.locationRegexp) +- completions := env.Completion(loc) +- +- if testing.Verbose() { +- fmt.Println("Results:") +- for i := 0; i < len(completions.Items); i++ { +- fmt.Printf("\t%d. %v\n", i, completions.Items[i]) +- } +- } +- +- b.Run("tools", func(b *testing.B) { +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName("tools", "completion")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- for b.Loop() { +- if options.beforeCompletion != nil { +- options.beforeCompletion(env) +- } +- env.Completion(loc) +- } +- }) +-} +- +-// endRangeInBuffer returns the position for last character in the buffer for +-// the given file. +-func endRangeInBuffer(env *Env, name string) protocol.Range { +- buffer := env.BufferText(name) +- m := protocol.NewMapper("", []byte(buffer)) +- rng, err := m.OffsetRange(len(buffer), len(buffer)) +- if err != nil { +- env.TB.Fatal(err) +- } +- return rng +-} +- +-// Benchmark struct completion in tools codebase. +-func BenchmarkStructCompletion(b *testing.B) { +- file := "internal/lsp/cache/session.go" +- +- setup := func(env *Env) { +- env.OpenFile(file) +- env.EditBuffer(file, protocol.TextEdit{ +- Range: endRangeInBuffer(env, file), +- NewText: "\nvar testVariable map[string]bool = Session{}.\n", +- }) +- } +- +- benchmarkCompletion(completionBenchOptions{ +- file: file, +- locationRegexp: `var testVariable map\[string\]bool = Session{}(\.)`, +- setup: setup, +- }, b) +-} +- +-// Benchmark import completion in tools codebase. +-func BenchmarkImportCompletion(b *testing.B) { +- const file = "internal/lsp/source/completion/completion.go" +- benchmarkCompletion(completionBenchOptions{ +- file: file, +- locationRegexp: `go\/()`, +- setup: func(env *Env) { env.OpenFile(file) }, +- }, b) +-} +- +-// Benchmark slice completion in tools codebase. +-func BenchmarkSliceCompletion(b *testing.B) { +- file := "internal/lsp/cache/session.go" +- +- setup := func(env *Env) { +- env.OpenFile(file) +- env.EditBuffer(file, protocol.TextEdit{ +- Range: endRangeInBuffer(env, file), +- NewText: "\nvar testVariable []byte = \n", +- }) +- } +- +- benchmarkCompletion(completionBenchOptions{ +- file: file, +- locationRegexp: `var testVariable \[\]byte (=)`, +- setup: setup, +- }, b) +-} +- +-// Benchmark deep completion in function call in tools codebase. +-func BenchmarkFuncDeepCompletion(b *testing.B) { +- file := "internal/lsp/source/completion/completion.go" +- fileContent := ` +-func (c *completer) _() { +- c.inference.kindMatches(c.) +-} +-` +- setup := func(env *Env) { +- env.OpenFile(file) +- originalBuffer := env.BufferText(file) +- env.EditBuffer(file, protocol.TextEdit{ +- Range: endRangeInBuffer(env, file), +- // TODO(rfindley): this is a bug: it should just be fileContent. +- NewText: originalBuffer + fileContent, +- }) +- } +- +- benchmarkCompletion(completionBenchOptions{ +- file: file, +- locationRegexp: `func \(c \*completer\) _\(\) {\n\tc\.inference\.kindMatches\((c)`, +- setup: setup, +- }, b) +-} +- +-type completionTest struct { +- repo string +- name string +- file string // repo-relative file to create +- content string // file content +- locationRegexp string // regexp for completion +-} +- +-var completionTests = []completionTest{ +- { +- "tools", +- "selector", +- "internal/lsp/source/completion/completion2.go", +- ` +-package completion +- +-func (c *completer) _() { +- c.inference.kindMatches(c.) +-} +-`, +- `func \(c \*completer\) _\(\) {\n\tc\.inference\.kindMatches\((c)`, +- }, +- { +- "tools", +- "unimportedident", +- "internal/lsp/source/completion/completion2.go", +- ` +-package completion +- +-func (c *completer) _() { +- lo +-} +-`, +- `lo()`, +- }, +- { +- "tools", +- "unimportedselector", +- "internal/lsp/source/completion/completion2.go", +- ` +-package completion +- +-func (c *completer) _() { +- log. +-} +-`, +- `log\.()`, +- }, +- { +- "kubernetes", +- "selector", +- "pkg/kubelet/kubelet2.go", +- ` +-package kubelet +- +-func (kl *Kubelet) _() { +- kl. +-} +-`, +- `kl\.()`, +- }, +- { +- "kubernetes", +- "identifier", +- "pkg/kubelet/kubelet2.go", +- ` +-package kubelet +- +-func (kl *Kubelet) _() { +- k // here +-} +-`, +- `k() // here`, +- }, +- { +- "oracle", +- "selector", +- "dataintegration/pivot2.go", +- ` +-package dataintegration +- +-func (p *Pivot) _() { +- p. +-} +-`, +- `p\.()`, +- }, +-} +- +-// Benchmark completion following an arbitrary edit. +-// +-// Edits force type-checked packages to be invalidated, so we want to measure +-// how long it takes before completion results are available. +-func BenchmarkCompletion(b *testing.B) { +- for _, test := range completionTests { +- b.Run(fmt.Sprintf("%s_%s", test.repo, test.name), func(b *testing.B) { +- for _, followingEdit := range []bool{true, false} { +- b.Run(fmt.Sprintf("edit=%v", followingEdit), func(b *testing.B) { +- for _, completeUnimported := range []bool{true, false} { +- b.Run(fmt.Sprintf("unimported=%v", completeUnimported), func(b *testing.B) { +- for _, budget := range []string{"0s", "100ms"} { +- b.Run(fmt.Sprintf("budget=%s", budget), func(b *testing.B) { +- runCompletion(b, test, followingEdit, completeUnimported, budget) +- }) +- } +- }) +- } +- }) +- } +- }) +- } +-} +- +-// For optimizing unimported completion, it can be useful to benchmark with a +-// huge GOMODCACHE. +-var gomodcache = flag.String("gomodcache", "", "optional GOMODCACHE for unimported completion benchmarks") +- +-func runCompletion(b *testing.B, test completionTest, followingEdit, completeUnimported bool, budget string) { +- repo := getRepo(b, test.repo) +- gopath := *completionGOPATH +- if gopath == "" { +- // use a warm GOPATH +- sharedEnv := repo.sharedEnv(b) +- gopath = sharedEnv.Sandbox.GOPATH() +- } +- envvars := map[string]string{ +- "GOPATH": gopath, +- } +- +- if *gomodcache != "" { +- envvars["GOMODCACHE"] = *gomodcache +- } +- +- env := repo.newEnv(b, fake.EditorConfig{ +- Env: envvars, +- Settings: map[string]any{ +- "completeUnimported": completeUnimported, +- "completionBudget": budget, +- }, +- }, "completion", false) +- defer env.Close() +- +- env.CreateBuffer(test.file, "// __TEST_PLACEHOLDER_0__\n"+test.content) +- editPlaceholder := func() { +- edits := atomic.AddInt64(&editID, 1) +- env.EditBuffer(test.file, protocol.TextEdit{ +- Range: protocol.Range{ +- Start: protocol.Position{Line: 0, Character: 0}, +- End: protocol.Position{Line: 1, Character: 0}, +- }, +- // Increment the placeholder text, to ensure cache misses. +- NewText: fmt.Sprintf("// __TEST_PLACEHOLDER_%d__\n", edits), +- }) +- } +- env.AfterChange() +- +- // Run a completion to make sure the system is warm. +- loc := env.RegexpSearch(test.file, test.locationRegexp) +- completions := env.Completion(loc) +- +- if testing.Verbose() { +- fmt.Println("Results:") +- for i, item := range completions.Items { +- fmt.Printf("\t%d. %v\n", i, item) +- } +- } +- +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "completion")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- for b.Loop() { +- if followingEdit { +- editPlaceholder() +- } +- loc := env.RegexpSearch(test.file, test.locationRegexp) +- env.Completion(loc) +- } +-} +diff -urN a/gopls/internal/test/integration/bench/definition_test.go b/gopls/internal/test/integration/bench/definition_test.go +--- a/gopls/internal/test/integration/bench/definition_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/definition_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,46 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "testing" +-) +- +-func BenchmarkDefinition(b *testing.B) { +- tests := []struct { +- repo string +- file string +- regexp string +- }{ +- {"istio", "pkg/config/model.go", `gogotypes\.(MarshalAny)`}, +- {"google-cloud-go", "httpreplay/httpreplay.go", `proxy\.(ForRecording)`}, +- {"kubernetes", "pkg/controller/lookup_cache.go", `hashutil\.(DeepHashObject)`}, +- {"kuma", "api/generic/insights.go", `proto\.(Message)`}, +- {"pkgsite", "internal/log/log.go", `derrors\.(Wrap)`}, +- {"starlark", "starlark/eval.go", "prog.compiled.(Encode)"}, +- {"tools", "internal/lsp/cache/check.go", `(snapshot)\) buildKey`}, +- } +- +- for _, test := range tests { +- b.Run(test.repo, func(b *testing.B) { +- env := getRepo(b, test.repo).sharedEnv(b) +- env.OpenFile(test.file) +- defer closeBuffer(b, env, test.file) +- +- loc := env.RegexpSearch(test.file, test.regexp) +- env.Await(env.DoneWithOpen()) +- env.FirstDefinition(loc) // pre-warm the query, and open the target file +- b.ResetTimer() +- +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "definition")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- for b.Loop() { +- env.FirstDefinition(loc) // pre-warm the query +- } +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/bench/diagnostic_test.go b/gopls/internal/test/integration/bench/diagnostic_test.go +--- a/gopls/internal/test/integration/bench/diagnostic_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/diagnostic_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,76 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "sync" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +-) +- +-// BenchmarkDiagnosePackageFiles measures how long it takes to request +-// diagnostics for 10 files in a single package, following a change to that +-// package. +-// +-// This can be used to measure the efficiency of pull diagnostics +-// (golang/go#53275). +-func BenchmarkDiagnosePackageFiles(b *testing.B) { +- if testing.Short() { +- b.Skip("pull diagnostics are not supported by the benchmark dashboard baseline") +- } +- +- env := getRepo(b, "kubernetes").newEnv(b, fake.EditorConfig{ +- Settings: map[string]any{ +- "pullDiagnostics": true, // currently required for pull diagnostic support +- }, +- }, "diagnosePackageFiles", false) +- +- // 10 arbitrary files in a single package. +- files := []string{ +- "pkg/kubelet/active_deadline.go", // 98 lines +- "pkg/kubelet/active_deadline_test.go", // 95 lines +- "pkg/kubelet/kubelet.go", // 2439 lines +- "pkg/kubelet/kubelet_pods.go", // 2061 lines +- "pkg/kubelet/kubelet_network.go", // 70 lines +- "pkg/kubelet/kubelet_network_test.go", // 46 lines +- "pkg/kubelet/pod_workers.go", // 1323 lines +- "pkg/kubelet/pod_workers_test.go", // 1758 lines +- "pkg/kubelet/runonce.go", // 175 lines +- "pkg/kubelet/volume_host.go", // 297 lines +- } +- +- env.Await(InitialWorkspaceLoad) +- +- for _, file := range files { +- env.OpenFile(file) +- } +- +- env.AfterChange() +- +- edit := makeEditFunc(env, files[0]) +- +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName("kubernetes", "diagnosePackageFiles")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- for b.Loop() { +- edit() +- var wg sync.WaitGroup +- for _, file := range files { +- wg.Go(func() { +- fileDiags := env.Diagnostics(file) +- for _, d := range fileDiags { +- if d.Severity == protocol.SeverityError { +- b.Errorf("unexpected error diagnostic: %s", d.Message) +- } +- } +- }) +- } +- wg.Wait() +- } +-} +diff -urN a/gopls/internal/test/integration/bench/didchange_test.go b/gopls/internal/test/integration/bench/didchange_test.go +--- a/gopls/internal/test/integration/bench/didchange_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/didchange_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,163 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "fmt" +- "sync/atomic" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +-) +- +-// Use a global edit counter as bench function may execute multiple times, and +-// we want to avoid cache hits. Use time.Now to also avoid cache hits from the +-// shared file cache. +-var editID int64 = time.Now().UnixNano() +- +-type changeTest struct { +- repo string // repo identifier + optional disambiguating ".foo" suffix +- file string +- canSave bool +-} +- +-var didChangeTests = []changeTest{ +- {"google-cloud-go", "internal/annotate.go", true}, +- {"istio", "pkg/fuzz/util.go", true}, +- {"kubernetes", "pkg/controller/lookup_cache.go", true}, +- {"kubernetes.types", "staging/src/k8s.io/api/core/v1/types.go", true}, // results in 25K file batch! +- {"kuma", "api/generic/insights.go", true}, +- {"oracle", "dataintegration/data_type.go", false}, // diagnoseSave fails because this package is generated +- {"pkgsite", "internal/frontend/server.go", true}, +- {"starlark", "starlark/eval.go", true}, +- {"tools", "internal/lsp/cache/snapshot.go", true}, +-} +- +-// BenchmarkDidChange benchmarks modifications of a single file by making +-// synthetic modifications in a comment. It controls pacing by waiting for the +-// server to actually start processing the didChange notification before +-// proceeding. Notably it does not wait for diagnostics to complete. +-func BenchmarkDidChange(b *testing.B) { +- for _, test := range didChangeTests { +- b.Run(test.repo, func(b *testing.B) { +- env := getRepo(b, test.repo).sharedEnv(b) +- env.OpenFile(test.file) +- defer closeBuffer(b, env, test.file) +- +- // Insert the text we'll be modifying at the top of the file. +- edit := makeEditFunc(env, test.file) +- +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "didchange")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- b.ResetTimer() +- +- for b.Loop() { +- edit() +- env.Await(env.StartedChange()) +- } +- }) +- } +-} +- +-// makeEditFunc prepares the given file for incremental editing, by inserting a +-// placeholder comment that will be overwritten with a new unique value by each +-// call to the resulting function. While makeEditFunc awaits gopls to finish +-// processing the initial edit, the callback for incremental edits does not +-// await any gopls state. +-// +-// This is used for benchmarks that must repeatedly invalidate a file's +-// contents. +-// +-// TODO(rfindley): use this throughout. +-func makeEditFunc(env *Env, file string) func() { +- // Insert the text we'll be modifying at the top of the file. +- env.EditBuffer(file, protocol.TextEdit{NewText: "// __TEST_PLACEHOLDER_0__\n"}) +- env.AfterChange() +- +- return func() { +- edits := atomic.AddInt64(&editID, 1) +- env.EditBuffer(file, protocol.TextEdit{ +- Range: protocol.Range{ +- Start: protocol.Position{Line: 0, Character: 0}, +- End: protocol.Position{Line: 1, Character: 0}, +- }, +- // Increment the placeholder text, to ensure cache misses. +- NewText: fmt.Sprintf("// __TEST_PLACEHOLDER_%d__\n", edits), +- }) +- } +-} +- +-func BenchmarkDiagnoseChange(b *testing.B) { +- for _, test := range didChangeTests { +- runChangeDiagnosticsBenchmark(b, test, false, "diagnoseChange") +- } +-} +- +-// TODO(rfindley): add a benchmark for with a metadata-affecting change, when +-// this matters. +-func BenchmarkDiagnoseSave(b *testing.B) { +- for _, test := range didChangeTests { +- runChangeDiagnosticsBenchmark(b, test, true, "diagnoseSave") +- } +-} +- +-// runChangeDiagnosticsBenchmark runs a benchmark to edit the test file and +-// await the resulting diagnostics pass. If save is set, the file is also saved. +-func runChangeDiagnosticsBenchmark(b *testing.B, test changeTest, save bool, operation string) { +- b.Run(test.repo, func(b *testing.B) { +- if !test.canSave { +- b.Skipf("skipping as %s cannot be saved", test.file) +- } +- sharedEnv := getRepo(b, test.repo).sharedEnv(b) +- config := fake.EditorConfig{ +- Env: map[string]string{ +- "GOPATH": sharedEnv.Sandbox.GOPATH(), +- }, +- Settings: map[string]any{ +- "diagnosticsDelay": "0s", +- }, +- } +- // Use a new env to avoid the diagnostic delay: we want to measure how +- // long it takes to produce the diagnostics. +- env := getRepo(b, test.repo).newEnv(b, config, operation, false) +- defer env.Close() +- env.OpenFile(test.file) +- // Insert the text we'll be modifying at the top of the file. +- env.EditBuffer(test.file, protocol.TextEdit{NewText: "// __TEST_PLACEHOLDER_0__\n"}) +- if save { +- env.SaveBuffer(test.file) +- } +- env.AfterChange() +- b.ResetTimer() +- +- // We must use an extra subtest layer here, so that we only set up the +- // shared env once (otherwise we pay additional overhead and the profiling +- // flags don't work). +- b.Run("diagnose", func(b *testing.B) { +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, operation)); stopAndRecord != nil { +- defer stopAndRecord() +- } +- for b.Loop() { +- edits := atomic.AddInt64(&editID, 1) +- env.EditBuffer(test.file, protocol.TextEdit{ +- Range: protocol.Range{ +- Start: protocol.Position{Line: 0, Character: 0}, +- End: protocol.Position{Line: 1, Character: 0}, +- }, +- // Increment the placeholder text, to ensure cache misses. +- NewText: fmt.Sprintf("// __TEST_PLACEHOLDER_%d__\n", edits), +- }) +- if save { +- env.SaveBuffer(test.file) +- } +- env.AfterChange() +- } +- }) +- }) +-} +diff -urN a/gopls/internal/test/integration/bench/doc.go b/gopls/internal/test/integration/bench/doc.go +--- a/gopls/internal/test/integration/bench/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The bench package implements benchmarks for various LSP operations. +-// +-// Benchmarks check out specific commits of popular and/or exemplary +-// repositories, and script an external gopls process via a fake text editor. +-// By default, benchmarks run the test executable as gopls (using a special +-// "gopls mode" environment variable). A different gopls binary may be used by +-// setting the -gopls_path or -gopls_commit flags. +-// +-// This package is a work in progress. +-// +-// # Profiling +-// +-// Benchmark functions run gopls in a separate process, which means the normal +-// test flags for profiling aren't useful. Instead the -gopls_cpuprofile, +-// -gopls_memprofile, -gopls_allocprofile, -gopls_blockprofile, and +-// -gopls_trace flags may be used to pass through profiling to the gopls +-// subproces. +-// +-// Each of these flags sets a suffix for the respective gopls profile, which is +-// named according to the schema ... For example, +-// setting -gopls_cpuprofile=cpu will result in profiles named tools.iwl.cpu, +-// tools.rename.cpu, etc. In some cases, these profiles are for the entire +-// gopls subprocess (as in the initial workspace load), whereas in others they +-// span only the critical section of the benchmark. It is up to each benchmark +-// to implement profiling as appropriate. +-// +-// # Integration with perf.golang.org +-// +-// Benchmarks that run with -short are automatically tracked by +-// perf.golang.org, at +-// https://perf.golang.org/dashboard/?benchmark=all&repository=tools&branch=release-branch.go1.20 +-// +-// # TODO +-// - add more benchmarks, and more repositories +-// - fix the perf dashboard to not require the branch= parameter +-// - improve this documentation +-package bench +diff -urN a/gopls/internal/test/integration/bench/hover_test.go b/gopls/internal/test/integration/bench/hover_test.go +--- a/gopls/internal/test/integration/bench/hover_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/hover_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,47 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "testing" +-) +- +-func BenchmarkHover(b *testing.B) { +- tests := []struct { +- repo string +- file string +- regexp string +- }{ +- {"google-cloud-go", "httpreplay/httpreplay.go", `proxy\.(ForRecording)`}, +- {"istio", "pkg/config/model.go", `gogotypes\.(MarshalAny)`}, +- {"kubernetes", "pkg/apis/core/types.go", "type (Pod)"}, +- {"kuma", "api/generic/insights.go", `proto\.(Message)`}, +- {"pkgsite", "internal/log/log.go", `derrors\.(Wrap)`}, +- {"starlark", "starlark/eval.go", "prog.compiled.(Encode)"}, +- {"tools", "internal/lsp/cache/check.go", `(snapshot)\) buildKey`}, +- } +- +- for _, test := range tests { +- b.Run(test.repo, func(b *testing.B) { +- env := getRepo(b, test.repo).sharedEnv(b) +- env.OpenFile(test.file) +- defer closeBuffer(b, env, test.file) +- +- loc := env.RegexpSearch(test.file, test.regexp) +- env.AfterChange() +- +- env.Hover(loc) // pre-warm the query +- b.ResetTimer() +- +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "hover")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- for b.Loop() { +- env.Hover(loc) // pre-warm the query +- } +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/bench/implementations_test.go b/gopls/internal/test/integration/bench/implementations_test.go +--- a/gopls/internal/test/integration/bench/implementations_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/implementations_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,44 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import "testing" +- +-func BenchmarkImplementations(b *testing.B) { +- tests := []struct { +- repo string +- file string +- regexp string +- }{ +- {"google-cloud-go", "httpreplay/httpreplay.go", `type (Recorder)`}, +- {"istio", "pkg/config/mesh/watcher.go", `type (Watcher)`}, +- {"kubernetes", "pkg/controller/lookup_cache.go", `objectWithMeta`}, +- {"kuma", "api/generic/insights.go", `type (Insight)`}, +- {"pkgsite", "internal/datasource.go", `type (DataSource)`}, +- {"starlark", "syntax/syntax.go", `type (Expr)`}, +- {"tools", "internal/lsp/source/view.go", `type (Snapshot)`}, +- } +- +- for _, test := range tests { +- b.Run(test.repo, func(b *testing.B) { +- env := getRepo(b, test.repo).sharedEnv(b) +- env.OpenFile(test.file) +- defer closeBuffer(b, env, test.file) +- +- loc := env.RegexpSearch(test.file, test.regexp) +- env.AfterChange() +- env.Implementations(loc) // pre-warm the query +- b.ResetTimer() +- +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "implementations")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- for b.Loop() { +- env.Implementations(loc) +- } +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/bench/imports_test.go b/gopls/internal/test/integration/bench/imports_test.go +--- a/gopls/internal/test/integration/bench/imports_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/imports_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,87 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "context" +- "flag" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +-) +- +-var gopath = flag.String("gopath", "", "if set, run goimports scan with this GOPATH value") +- +-func BenchmarkInitialGoimportsScan(b *testing.B) { +- if *gopath == "" { +- // This test doesn't make much sense with a tiny module cache. +- // For now, don't bother trying to construct a huge cache, since it likely +- // wouldn't work well on the perf builder. Instead, this benchmark only +- // runs with a pre-existing GOPATH. +- b.Skip("imports scan requires an explicit GOPATH to be set with -gopath") +- } +- +- repo := getRepo(b, "tools") // since this a test of module cache scanning, any repo will do +- +- for b.Loop() { +- func() { +- // Unfortunately we (intentionally) don't support resetting the module +- // cache scan state, so in order to have an accurate benchmark we must +- // effectively restart gopls on every iteration. +- // +- // Warning: this can cause this benchmark to run quite slowly if the +- // observed time (when the timer is running) is a tiny fraction of the +- // actual time. +- b.StopTimer() +- config := fake.EditorConfig{ +- Env: map[string]string{"GOPATH": *gopath}, +- } +- env := repo.newEnv(b, config, "imports", false) +- defer env.Close() +- env.Await(InitialWorkspaceLoad) +- +- // Create a buffer with a dangling selector where the receiver is a single +- // character ('a') that matches a large fraction of the module cache. +- env.CreateBuffer("internal/lsp/cache/temp.go", ` +-// This is a temp file to exercise goimports scan of the module cache. +-package cache +- +-func _() { +- _ = a.B // a dangling selector causes goimports to scan many packages +-} +-`) +- env.AfterChange() +- +- // Force a scan of the imports cache, so that the goimports algorithm +- // observes all directories. +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: command.ScanImports.String(), +- }, nil) +- +- if stopAndRecord := startProfileIfSupported(b, env, "importsscan"); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- b.StartTimer() +- if false { +- // golang/go#67923: testing resuming imports scanning after a +- // cancellation. +- // +- // Cancelling and then resuming the scan should take around the same +- // amount of time. +- ctx, cancel := context.WithTimeout(env.Ctx, 50*time.Millisecond) +- defer cancel() +- if err := env.Editor.OrganizeImports(ctx, "internal/lsp/cache/temp.go"); err != nil { +- b.Logf("organize imports failed: %v", err) +- } +- } +- env.OrganizeImports("internal/lsp/cache/temp.go") +- }() +- } +-} +diff -urN a/gopls/internal/test/integration/bench/iwl_test.go b/gopls/internal/test/integration/bench/iwl_test.go +--- a/gopls/internal/test/integration/bench/iwl_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/iwl_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,102 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +-) +- +-// BenchmarkInitialWorkspaceLoad benchmarks the initial workspace load time for +-// a new editing session. +-// +-// The OpenFiles variant of this test is more realistic: who cares if gopls is +-// initialized if you can't use it? However, this test is left as is to +-// preserve the validity of historical data, and to represent the baseline +-// performance of validating the workspace state. +-func BenchmarkInitialWorkspaceLoad(b *testing.B) { +- repoNames := []string{ +- "google-cloud-go", +- "istio", +- "kubernetes", +- "kuma", +- "oracle", +- "pkgsite", +- "starlark", +- "tools", +- "hashiform", +- } +- for _, repoName := range repoNames { +- b.Run(repoName, func(b *testing.B) { +- repo := getRepo(b, repoName) +- // get the (initialized) shared env to ensure the cache is warm. +- // Reuse its GOPATH so that we get cache hits for things in the module +- // cache. +- sharedEnv := repo.sharedEnv(b) +- b.ResetTimer() +- +- for b.Loop() { +- doIWL(b, sharedEnv.Sandbox.GOPATH(), repo, nil) +- } +- }) +- } +-} +- +-// BenchmarkInitialWorkspaceLoadOpenFiles benchmarks the initial workspace load +-// after opening one or more files. +-// +-// It may differ significantly from [BenchmarkInitialWorkspaceLoad], since +-// there is various active state that is proportional to the number of open +-// files. +-func BenchmarkInitialWorkspaceLoadOpenFiles(b *testing.B) { +- for _, t := range didChangeTests { +- b.Run(t.repo, func(b *testing.B) { +- repo := getRepo(b, t.repo) +- sharedEnv := repo.sharedEnv(b) +- b.ResetTimer() +- +- for b.Loop() { +- doIWL(b, sharedEnv.Sandbox.GOPATH(), repo, []string{t.file}) +- } +- }) +- } +-} +- +-func doIWL(b *testing.B, gopath string, repo *repo, openfiles []string) { +- // Exclude the time to set up the env from the benchmark time, as this may +- // involve installing gopls and/or checking out the repo dir. +- b.StopTimer() +- config := fake.EditorConfig{Env: map[string]string{"GOPATH": gopath}} +- env := repo.newEnv(b, config, "iwl", true) +- defer env.Close() +- b.StartTimer() +- +- // TODO(rfindley): not awaiting the IWL here leads to much more volatile +- // results. Investigate. +- env.Await(InitialWorkspaceLoad) +- +- for _, f := range openfiles { +- env.OpenFile(f) +- } +- +- env.AfterChange() +- +- if env.Editor.HasCommand(command.MemStats) { +- b.StopTimer() +- params := &protocol.ExecuteCommandParams{ +- Command: command.MemStats.String(), +- } +- var memstats command.MemStatsResult +- env.ExecuteCommand(params, &memstats) +- b.ReportMetric(float64(memstats.HeapAlloc), "alloc_bytes") +- b.ReportMetric(float64(memstats.HeapInUse), "in_use_bytes") +- b.ReportMetric(float64(memstats.TotalAlloc), "total_alloc_bytes") +- b.StartTimer() +- } +-} +diff -urN a/gopls/internal/test/integration/bench/references_test.go b/gopls/internal/test/integration/bench/references_test.go +--- a/gopls/internal/test/integration/bench/references_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/references_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,44 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import "testing" +- +-func BenchmarkReferences(b *testing.B) { +- tests := []struct { +- repo string +- file string +- regexp string +- }{ +- {"google-cloud-go", "httpreplay/httpreplay.go", `func (NewRecorder)`}, +- {"istio", "pkg/config/model.go", "type (Meta)"}, +- {"kubernetes", "pkg/controller/lookup_cache.go", "type (objectWithMeta)"}, // TODO: choose an exported identifier +- {"kuma", "pkg/events/interfaces.go", "type (Event)"}, +- {"pkgsite", "internal/log/log.go", "func (Infof)"}, +- {"starlark", "syntax/syntax.go", "type (Ident)"}, +- {"tools", "internal/lsp/source/view.go", "type (Snapshot)"}, +- } +- +- for _, test := range tests { +- b.Run(test.repo, func(b *testing.B) { +- env := getRepo(b, test.repo).sharedEnv(b) +- env.OpenFile(test.file) +- defer closeBuffer(b, env, test.file) +- +- loc := env.RegexpSearch(test.file, test.regexp) +- env.AfterChange() +- env.References(loc) // pre-warm the query +- b.ResetTimer() +- +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "references")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- for b.Loop() { +- env.References(loc) +- } +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/bench/reload_test.go b/gopls/internal/test/integration/bench/reload_test.go +--- a/gopls/internal/test/integration/bench/reload_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/reload_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,71 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +-package bench +- +-import ( +- "fmt" +- "path" +- "regexp" +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// BenchmarkReload benchmarks reloading a file metadata after a change to an import. +-// +-// This ensures we are able to diagnose a changed file without reloading all +-// invalidated packages. See also golang/go#61344 +-func BenchmarkReload(b *testing.B) { +- type replace map[string]string +- tests := []struct { +- repo string +- file string +- // replacements must be 'reversible', in the sense that the replacing +- // string is unique. +- replace replace +- }{ +- // pkg/util/hash is transitively imported by a large number of packages. We +- // should not need to reload those packages to get a diagnostic. +- {"kubernetes", "pkg/util/hash/hash.go", replace{`"hash"`: `"hashx"`}}, +- {"kubernetes", "pkg/kubelet/kubelet.go", replace{ +- `"k8s.io/kubernetes/pkg/kubelet/config"`: `"k8s.io/kubernetes/pkg/kubelet/configx"`, +- }}, +- } +- +- for _, test := range tests { +- b.Run(fmt.Sprintf("%s/%s", test.repo, path.Base(test.file)), func(b *testing.B) { +- env := getRepo(b, test.repo).sharedEnv(b) +- +- env.OpenFile(test.file) +- defer closeBuffer(b, env, test.file) +- +- env.AfterChange() +- +- profileName := qualifiedName("reload", test.repo, path.Base(test.file)) +- if stopAndRecord := startProfileIfSupported(b, env, profileName); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- b.ResetTimer() +- for b.Loop() { +- // Mutate the file. This may result in cache hits, but that's OK: the +- // goal is to ensure that we don't reload more than just the current +- // package. +- for k, v := range test.replace { +- env.RegexpReplace(test.file, regexp.QuoteMeta(k), v) +- } +- // Note: don't use env.AfterChange() here: we only want to await the +- // first diagnostic. +- // +- // Awaiting a full diagnosis would await diagnosing everything, which +- // would require reloading everything. +- env.Await(Diagnostics(ForFile(test.file))) +- for k, v := range test.replace { +- env.RegexpReplace(test.file, regexp.QuoteMeta(v), k) +- } +- env.Await(NoDiagnostics(ForFile(test.file))) +- } +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/bench/rename_test.go b/gopls/internal/test/integration/bench/rename_test.go +--- a/gopls/internal/test/integration/bench/rename_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/rename_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,49 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "fmt" +- "testing" +-) +- +-func BenchmarkRename(b *testing.B) { +- tests := []struct { +- repo string +- file string +- regexp string +- baseName string +- }{ +- {"google-cloud-go", "httpreplay/httpreplay.go", `func (NewRecorder)`, "NewRecorder"}, +- {"istio", "pkg/config/model.go", `(Namespace) string`, "Namespace"}, +- {"kubernetes", "pkg/controller/lookup_cache.go", `hashutil\.(DeepHashObject)`, "DeepHashObject"}, +- {"kuma", "pkg/events/interfaces.go", `Delete`, "Delete"}, +- {"pkgsite", "internal/log/log.go", `func (Infof)`, "Infof"}, +- {"starlark", "starlark/eval.go", `Program\) (Filename)`, "Filename"}, +- {"tools", "internal/lsp/cache/snapshot.go", `meta \*(metadataGraph)`, "metadataGraph"}, +- } +- +- for _, test := range tests { +- names := 0 // bench function may execute multiple times +- b.Run(test.repo, func(b *testing.B) { +- env := getRepo(b, test.repo).sharedEnv(b) +- env.OpenFile(test.file) +- loc := env.RegexpSearch(test.file, test.regexp) +- env.Await(env.DoneWithOpen()) +- env.Rename(loc, test.baseName+"X") // pre-warm the query +- b.ResetTimer() +- +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "rename")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- for b.Loop() { +- names++ +- newName := fmt.Sprintf("%s%d", test.baseName, names) +- env.Rename(loc, newName) +- } +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/bench/repo_test.go b/gopls/internal/test/integration/bench/repo_test.go +--- a/gopls/internal/test/integration/bench/repo_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/repo_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,296 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "bytes" +- "context" +- "errors" +- "flag" +- "fmt" +- "log" +- "os" +- "path/filepath" +- "strings" +- "sync" +- "testing" +- "time" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +-) +- +-// repos holds shared repositories for use in benchmarks. +-// +-// These repos were selected to represent a variety of different types of +-// codebases. +-var repos = map[string]*repo{ +- // google-cloud-go has 145 workspace modules (!), and is quite large. +- "google-cloud-go": { +- name: "google-cloud-go", +- url: "https://github.com/googleapis/google-cloud-go.git", +- commit: "07da765765218debf83148cc7ed8a36d6e8921d5", +- inDir: flag.String("cloud_go_dir", "", "if set, reuse this directory as google-cloud-go@07da7657"), +- }, +- +- // Used by x/benchmarks; large. +- "istio": { +- name: "istio", +- url: "https://github.com/istio/istio", +- commit: "1.17.0", +- inDir: flag.String("istio_dir", "", "if set, reuse this directory as istio@v1.17.0"), +- }, +- +- // Kubernetes is a large repo with many dependencies, and in the past has +- // been about as large a repo as gopls could handle. +- "kubernetes": { +- name: "kubernetes", +- url: "https://github.com/kubernetes/kubernetes", +- commit: "v1.24.0", +- short: true, +- inDir: flag.String("kubernetes_dir", "", "if set, reuse this directory as kubernetes@v1.24.0"), +- }, +- +- // A large, industrial application. +- "kuma": { +- name: "kuma", +- url: "https://github.com/kumahq/kuma", +- commit: "2.1.1", +- inDir: flag.String("kuma_dir", "", "if set, reuse this directory as kuma@v2.1.1"), +- }, +- +- // A repo containing a very large package (./dataintegration). +- "oracle": { +- name: "oracle", +- url: "https://github.com/oracle/oci-go-sdk.git", +- commit: "v65.43.0", +- short: true, +- inDir: flag.String("oracle_dir", "", "if set, reuse this directory as oracle/oci-go-sdk@v65.43.0"), +- }, +- +- // x/pkgsite is familiar and represents a common use case (a webserver). It +- // also has a number of static non-go files and template files. +- "pkgsite": { +- name: "pkgsite", +- url: "https://go.googlesource.com/pkgsite", +- commit: "81f6f8d4175ad0bf6feaa03543cc433f8b04b19b", +- short: true, +- inDir: flag.String("pkgsite_dir", "", "if set, reuse this directory as pkgsite@81f6f8d4"), +- }, +- +- // A tiny self-contained project. +- "starlark": { +- name: "starlark", +- url: "https://github.com/google/starlark-go", +- commit: "3f75dec8e4039385901a30981e3703470d77e027", +- short: true, +- inDir: flag.String("starlark_dir", "", "if set, reuse this directory as starlark@3f75dec8"), +- }, +- +- // The current repository, which is medium-small and has very few dependencies. +- "tools": { +- name: "tools", +- url: "https://go.googlesource.com/tools", +- commit: "gopls/v0.9.0", +- short: true, +- inDir: flag.String("tools_dir", "", "if set, reuse this directory as x/tools@v0.9.0"), +- }, +- +- // A repo of similar size to kubernetes, but with substantially more +- // complex types that led to a serious performance regression (issue #60621). +- "hashiform": { +- name: "hashiform", +- url: "https://github.com/hashicorp/terraform-provider-aws", +- commit: "ac55de2b1950972d93feaa250d7505d9ed829c7c", +- inDir: flag.String("hashiform_dir", "", "if set, reuse this directory as hashiform@ac55de2"), +- }, +-} +- +-// getRepo gets the requested repo, and skips the test if -short is set and +-// repo is not configured as a short repo. +-// +-// The name may include an optional ".foo" suffix after the repo +-// identifier. This allows several tests to use the same repo but have +-// distinct test names and associated file names. +-func getRepo(tb testing.TB, name string) *repo { +- tb.Helper() +- name, _, _ = strings.Cut(name, ".") // remove ".foo" suffix +- repo := repos[name] +- if repo == nil { +- tb.Fatalf("repo %s does not exist", name) +- } +- if !repo.short && testing.Short() { +- tb.Skipf("large repo %s does not run with -short", repo.name) +- } +- return repo +-} +- +-// A repo represents a working directory for a repository checked out at a +-// specific commit. +-// +-// Repos are used for sharing state across benchmarks that operate on the same +-// codebase. +-type repo struct { +- // static configuration +- name string // must be unique, used for subdirectory +- url string // repo url +- commit string // full commit hash or tag +- short bool // whether this repo runs with -short +- inDir *string // if set, use this dir as url@commit, and don't delete +- +- dirOnce sync.Once +- dir string // directory containing source code checked out to url@commit +- +- // shared editor state +- editorOnce sync.Once +- editor *fake.Editor +- sandbox *fake.Sandbox +- awaiter *Awaiter +-} +- +-// reusableDir return a reusable directory for benchmarking, or "". +-// +-// If the user specifies a directory, the test will create and populate it +-// on the first run and re-use it on subsequent runs. Otherwise it will +-// create, populate, and delete a temporary directory. +-func (r *repo) reusableDir() string { +- if r.inDir == nil { +- return "" +- } +- return *r.inDir +-} +- +-// getDir returns directory containing repo source code, creating it if +-// necessary. It is safe for concurrent use. +-func (r *repo) getDir() string { +- r.dirOnce.Do(func() { +- if r.dir = r.reusableDir(); r.dir == "" { +- r.dir = filepath.Join(getTempDir(), r.name) +- } +- +- _, err := os.Stat(r.dir) +- switch { +- case os.IsNotExist(err): +- log.Printf("cloning %s@%s into %s", r.url, r.commit, r.dir) +- if err := shallowClone(r.dir, r.url, r.commit); err != nil { +- log.Fatal(err) +- } +- case err != nil: +- log.Fatal(err) +- default: +- log.Printf("reusing %s as %s@%s", r.dir, r.url, r.commit) +- } +- }) +- return r.dir +-} +- +-// sharedEnv returns a shared benchmark environment. It is safe for concurrent +-// use. +-// +-// Every call to sharedEnv uses the same editor and sandbox, as a means to +-// avoid reinitializing the editor for large repos. Calling repo.Close cleans +-// up the shared environment. +-// +-// Repos in the package-local Repos var are closed at the end of the test main +-// function. +-func (r *repo) sharedEnv(tb testing.TB) *Env { +- r.editorOnce.Do(func() { +- dir := r.getDir() +- +- start := time.Now() +- log.Printf("starting initial workspace load for %s", r.name) +- ts, err := newGoplsConnector(profileArgs(r.name, false)) +- if err != nil { +- log.Fatal(err) +- } +- r.sandbox, r.editor, r.awaiter, err = connectEditor(dir, fake.EditorConfig{}, ts) +- if err != nil { +- log.Fatalf("connecting editor: %v", err) +- } +- +- if err := r.awaiter.Await(context.Background(), InitialWorkspaceLoad); err != nil { +- log.Fatal(err) +- } +- log.Printf("initial workspace load (cold) for %s took %v", r.name, time.Since(start)) +- }) +- +- return &Env{ +- TB: tb, +- Ctx: context.Background(), +- Editor: r.editor, +- Sandbox: r.sandbox, +- Awaiter: r.awaiter, +- } +-} +- +-// newEnv returns a new Env connected to a new gopls process communicating +-// over stdin/stdout. It is safe for concurrent use. +-// +-// It is the caller's responsibility to call Close on the resulting Env when it +-// is no longer needed. +-func (r *repo) newEnv(tb testing.TB, config fake.EditorConfig, forOperation string, cpuProfile bool) *Env { +- dir := r.getDir() +- +- args := profileArgs(qualifiedName(r.name, forOperation), cpuProfile) +- ts, err := newGoplsConnector(args) +- if err != nil { +- tb.Fatal(err) +- } +- sandbox, editor, awaiter, err := connectEditor(dir, config, ts) +- if err != nil { +- log.Fatalf("connecting editor: %v", err) +- } +- +- return &Env{ +- TB: tb, +- Ctx: context.Background(), +- Editor: editor, +- Sandbox: sandbox, +- Awaiter: awaiter, +- } +-} +- +-// Close cleans up shared state referenced by the repo. +-func (r *repo) Close() error { +- var errBuf bytes.Buffer +- if r.editor != nil { +- if err := r.editor.Close(context.Background()); err != nil { +- fmt.Fprintf(&errBuf, "closing editor: %v", err) +- } +- } +- if r.sandbox != nil { +- if err := r.sandbox.Close(); err != nil { +- fmt.Fprintf(&errBuf, "closing sandbox: %v", err) +- } +- } +- if r.dir != "" && r.reusableDir() == "" { +- if err := os.RemoveAll(r.dir); err != nil { +- fmt.Fprintf(&errBuf, "cleaning dir: %v", err) +- } +- } +- if errBuf.Len() > 0 { +- return errors.New(errBuf.String()) +- } +- return nil +-} +- +-// cleanup cleans up state that is shared across benchmark functions. +-func cleanup() error { +- var errBuf bytes.Buffer +- for _, repo := range repos { +- if err := repo.Close(); err != nil { +- fmt.Fprintf(&errBuf, "closing %q: %v", repo.name, err) +- } +- } +- if tempDir != "" { +- if err := os.RemoveAll(tempDir); err != nil { +- fmt.Fprintf(&errBuf, "cleaning tempDir: %v", err) +- } +- } +- if errBuf.Len() > 0 { +- return errors.New(errBuf.String()) +- } +- return nil +-} +diff -urN a/gopls/internal/test/integration/bench/stress_test.go b/gopls/internal/test/integration/bench/stress_test.go +--- a/gopls/internal/test/integration/bench/stress_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/stress_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,91 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "context" +- "flag" +- "fmt" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/lsprpc" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/internal/jsonrpc2" +- "golang.org/x/tools/internal/jsonrpc2/servertest" +-) +- +-// github.com/pilosa/pilosa is a repository that has historically caused +-// significant memory problems for Gopls. We use it for a simple stress test +-// that types arbitrarily in a file with lots of dependents. +- +-var pilosaPath = flag.String("pilosa_path", "", "Path to a directory containing "+ +- "github.com/pilosa/pilosa, for stress testing. Do not set this unless you "+ +- "know what you're doing!") +- +-func TestPilosaStress(t *testing.T) { +- // TODO(rfindley): revisit this test and make it is hermetic: it should check +- // out pilosa into a directory. +- // +- // Note: This stress test has not been run recently, and may no longer +- // function properly. +- if *pilosaPath == "" { +- t.Skip("-pilosa_path not configured") +- } +- +- sandbox, err := fake.NewSandbox(&fake.SandboxConfig{ +- Workdir: *pilosaPath, +- GOPROXY: "https://proxy.golang.org", +- }) +- if err != nil { +- t.Fatal(err) +- } +- server := lsprpc.NewStreamServer(cache.New(nil), false, nil) +- ts := servertest.NewPipeServer(server, jsonrpc2.NewRawStream) +- +- ctx := context.Background() +- editor, err := fake.NewEditor(sandbox, fake.EditorConfig{}).Connect(ctx, ts, fake.ClientHooks{}) +- if err != nil { +- t.Fatal(err) +- } +- +- files := []string{ +- "cmd.go", +- "internal/private.pb.go", +- "roaring/roaring.go", +- "roaring/roaring_internal_test.go", +- "server/handler_test.go", +- } +- for _, file := range files { +- if err := editor.OpenFile(ctx, file); err != nil { +- t.Fatal(err) +- } +- } +- ctx, cancel := context.WithTimeout(ctx, 10*time.Minute) +- defer cancel() +- +- i := 1 +- // MagicNumber is an identifier that occurs in roaring.go. Just change it +- // arbitrarily. +- if err := editor.RegexpReplace(ctx, "roaring/roaring.go", "MagicNumber", fmt.Sprintf("MagicNumber%d", 1)); err != nil { +- t.Fatal(err) +- } +- for { +- select { +- case <-ctx.Done(): +- return +- default: +- } +- if err := editor.RegexpReplace(ctx, "roaring/roaring.go", fmt.Sprintf("MagicNumber%d", i), fmt.Sprintf("MagicNumber%d", i+1)); err != nil { +- t.Fatal(err) +- } +- // Simulate (very fast) typing. +- // +- // Typing 80 wpm ~150ms per keystroke. +- time.Sleep(150 * time.Millisecond) +- i++ +- } +-} +diff -urN a/gopls/internal/test/integration/bench/tests_test.go b/gopls/internal/test/integration/bench/tests_test.go +--- a/gopls/internal/test/integration/bench/tests_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/tests_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,96 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +-package bench +- +-import ( +- "encoding/json" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func BenchmarkPackagesCommand(b *testing.B) { +- // By convention, x/benchmarks runs the gopls benchmarks with -short, so that +- // we can use this flag to filter out benchmarks that should not be run by +- // the perf builder. +- // +- // In this case, the benchmark must be skipped because the current baseline +- // (gopls@v0.11.0) lacks the gopls.package command. +- if testing.Short() { +- b.Skip("not supported by the benchmark dashboard baseline") +- } +- +- tests := []struct { +- repo string +- files []string +- recurse bool +- }{ +- {"tools", []string{"internal/lsp/debounce_test.go"}, false}, +- } +- for _, test := range tests { +- b.Run(test.repo, func(b *testing.B) { +- args := command.PackagesArgs{ +- Mode: command.NeedTests, +- } +- +- env := getRepo(b, test.repo).sharedEnv(b) +- for _, file := range test.files { +- env.OpenFile(file) +- defer closeBuffer(b, env, file) +- args.Files = append(args.Files, env.Editor.DocumentURI(file)) +- } +- env.AfterChange() +- +- result := executePackagesCmd(b, env, args) // pre-warm +- +- // sanity check JSON {en,de}coding +- var pkgs command.PackagesResult +- data, err := json.Marshal(result) +- if err != nil { +- b.Fatal(err) +- } +- err = json.Unmarshal(data, &pkgs) +- if err != nil { +- b.Fatal(err) +- } +- var haveTest bool +- for _, pkg := range pkgs.Packages { +- for _, file := range pkg.TestFiles { +- if len(file.Tests) > 0 { +- haveTest = true +- break +- } +- } +- } +- if !haveTest { +- b.Fatalf("Expected tests") +- } +- +- b.ResetTimer() +- +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "packages")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- for b.Loop() { +- executePackagesCmd(b, env, args) +- } +- }) +- } +-} +- +-func executePackagesCmd(t testing.TB, env *integration.Env, args command.PackagesArgs) any { +- t.Helper() +- cmd := command.NewPackagesCommand("Packages", args) +- result, err := env.Editor.Server.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{ +- Command: command.Packages.String(), +- Arguments: cmd.Arguments, +- }) +- if err != nil { +- t.Fatal(err) +- } +- return result +-} +diff -urN a/gopls/internal/test/integration/bench/typing_test.go b/gopls/internal/test/integration/bench/typing_test.go +--- a/gopls/internal/test/integration/bench/typing_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/typing_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,63 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "fmt" +- "sync/atomic" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// BenchmarkTyping simulates typing steadily in a single file at different +-// paces. +-// +-// The key metric for this benchmark is not latency, but cpu_seconds per +-// operation. +-func BenchmarkTyping(b *testing.B) { +- for _, test := range didChangeTests { +- b.Run(test.repo, func(b *testing.B) { +- env := getRepo(b, test.repo).sharedEnv(b) +- env.OpenFile(test.file) +- defer closeBuffer(b, env, test.file) +- +- // Insert the text we'll be modifying at the top of the file. +- env.EditBuffer(test.file, protocol.TextEdit{NewText: "// __TEST_PLACEHOLDER_0__\n"}) +- env.AfterChange() +- +- delays := []time.Duration{ +- 10 * time.Millisecond, // automated changes +- 50 * time.Millisecond, // very fast mashing, or fast key sequences +- 150 * time.Millisecond, // avg interval for 80wpm typing. +- } +- +- for _, delay := range delays { +- b.Run(delay.String(), func(b *testing.B) { +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "typing")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- ticker := time.NewTicker(delay) +- for b.Loop() { +- edits := atomic.AddInt64(&editID, 1) +- env.EditBuffer(test.file, protocol.TextEdit{ +- Range: protocol.Range{ +- Start: protocol.Position{Line: 0, Character: 0}, +- End: protocol.Position{Line: 1, Character: 0}, +- }, +- // Increment the placeholder text, to ensure cache misses. +- NewText: fmt.Sprintf("// __TEST_PLACEHOLDER_%d__\n", edits), +- }) +- <-ticker.C +- } +- b.StopTimer() +- ticker.Stop() +- env.AfterChange() // wait for all change processing to complete +- }) +- } +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/bench/unimported_test.go b/gopls/internal/test/integration/bench/unimported_test.go +--- a/gopls/internal/test/integration/bench/unimported_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/unimported_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,158 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "context" +- "fmt" +- "go/token" +- "os" +- "os/exec" +- "path/filepath" +- "strings" +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/internal/modindex" +-) +- +-// experiments show the new code about 15 times faster than the old, +-// and the old code sometimes fails to find the completion +-func BenchmarkLocalModcache(b *testing.B) { +- budgets := []string{"0s", "100ms", "200ms", "500ms", "1s", "5s"} +- sources := []string{"gopls", "goimports"} +- for _, budget := range budgets { +- b.Run(fmt.Sprintf("budget=%s", budget), func(b *testing.B) { +- for _, source := range sources { +- b.Run(fmt.Sprintf("source=%s", source), func(b *testing.B) { +- runModcacheCompletion(b, budget, source) +- }) +- } +- }) +- } +-} +- +-func runModcacheCompletion(b *testing.B, budget, source string) { +- // First set up the program to be edited +- gomod := ` +-module mod.com +- +-go 1.21 +-` +- pat := ` +-package main +-var _ = %s.%s +-` +- pkg, name, modcache := findSym(b) +- name, _, _ = strings.Cut(name, " ") +- mainfile := fmt.Sprintf(pat, pkg, name) +- // Second, create the Env and start gopls +- dir := getTempDir() +- if err := os.Mkdir(dir, 0750); err != nil { +- if !os.IsExist(err) { +- b.Fatal(err) +- } +- } +- defer os.RemoveAll(dir) // is this right? needed? +- if err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte(gomod), 0644); err != nil { +- b.Fatal(err) +- } +- if err := os.WriteFile(filepath.Join(dir, "main.go"), []byte(mainfile), 0644); err != nil { +- b.Fatal(err) +- } +- ts, err := newGoplsConnector(nil) +- if err != nil { +- b.Fatal(err) +- } +- // PJW: put better EditorConfig here +- envvars := map[string]string{ +- "GOMODCACHE": modcache, +- //"GOPATH": sandbox.GOPATH(), // do we need a GOPATH? +- } +- fc := fake.EditorConfig{ +- Env: envvars, +- Settings: map[string]any{ +- "completeUnimported": true, +- "completionBudget": budget, // "0s", "100ms" +- "importsSource": source, // "gopls" or "goimports" +- }, +- } +- sandbox, editor, awaiter, err := connectEditor(dir, fc, ts) +- if err != nil { +- b.Fatal(err) +- } +- defer sandbox.Close() +- defer editor.Close(context.Background()) +- if err := awaiter.Await(context.Background(), InitialWorkspaceLoad); err != nil { +- b.Fatal(err) +- } +- env := &Env{ +- TB: b, +- Ctx: context.Background(), +- Editor: editor, +- Sandbox: sandbox, +- Awaiter: awaiter, +- } +- // Check that completion works as expected +- env.CreateBuffer("main.go", mainfile) +- env.AfterChange() +- if false { // warm up? or not? +- loc := env.RegexpSearch("main.go", name) +- completions := env.Completion(loc) +- if len(completions.Items) == 0 { +- b.Fatal("no completions") +- } +- } +- +- // run benchmark +- for b.Loop() { +- loc := env.RegexpSearch("main.go", name) +- env.Completion(loc) +- } +-} +- +-// find some symbol in the module cache +-func findSym(t testing.TB) (pkg, name, gomodcache string) { +- initForTest(t) +- cmd := exec.Command("go", "env", "GOMODCACHE") +- out, err := cmd.Output() +- if err != nil { +- t.Fatal(err) +- } +- modcache := strings.TrimSpace(string(out)) +- ix, err := modindex.Read(modcache) +- if err != nil { +- t.Fatal(err) +- } +- if ix == nil { +- t.Fatal("nil index") +- } +- nth := 100 // or something +- for _, e := range ix.Entries { +- if token.IsExported(e.PkgName) || strings.HasPrefix(e.PkgName, "_") { +- continue // weird stuff in module cache +- } +- +- for _, nm := range e.Names { +- nth-- +- if nth == 0 { +- return e.PkgName, nm, modcache +- } +- } +- } +- t.Fatalf("index doesn't have enough usable names, need another %d", nth) +- return "", "", modcache +-} +- +-// Set IndexDir, avoiding the special case for tests, +-func initForTest(t testing.TB) { +- dir, err := os.UserCacheDir() +- if err != nil { +- t.Fatalf("os.UserCacheDir: %v", err) +- } +- dir = filepath.Join(dir, "go", "imports") +- modindex.IndexDir = dir +-} +diff -urN a/gopls/internal/test/integration/bench/workspace_symbols_test.go b/gopls/internal/test/integration/bench/workspace_symbols_test.go +--- a/gopls/internal/test/integration/bench/workspace_symbols_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/bench/workspace_symbols_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,43 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bench +- +-import ( +- "flag" +- "fmt" +- "testing" +- "time" +-) +- +-var symbolQuery = flag.String("symbol_query", "test", "symbol query to use in benchmark") +- +-// BenchmarkWorkspaceSymbols benchmarks the time to execute a workspace symbols +-// request (controlled by the -symbol_query flag). +-func BenchmarkWorkspaceSymbols(b *testing.B) { +- for name := range repos { +- b.Run(name, func(b *testing.B) { +- env := getRepo(b, name).sharedEnv(b) +- start := time.Now() +- symbols := env.Symbol(*symbolQuery) // warm the cache +- +- if testing.Verbose() { +- fmt.Printf("Results (after %s):\n", time.Since(start)) +- for i, symbol := range symbols { +- fmt.Printf("\t%d. %s (%s)\n", i, symbol.Name, symbol.ContainerName) +- } +- } +- +- b.ResetTimer() +- +- if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(name, "workspaceSymbols")); stopAndRecord != nil { +- defer stopAndRecord() +- } +- +- for b.Loop() { +- env.Symbol(*symbolQuery) +- } +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/codelens/codelens_test.go b/gopls/internal/test/integration/codelens/codelens_test.go +--- a/gopls/internal/test/integration/codelens/codelens_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/codelens/codelens_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,408 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package codelens +- +-import ( +- "fmt" +- "os" +- "testing" +- +- "golang.org/x/tools/gopls/internal/server" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/util/bug" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/internal/testenv" +-) +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- os.Exit(Main(m)) +-} +- +-func TestDisablingCodeLens(t *testing.T) { +- const workspace = ` +--- go.mod -- +-module codelens.test +- +-go 1.12 +--- lib.go -- +-package lib +- +-type Number int +- +-const ( +- Zero Number = iota +- One +- Two +-) +- +-//` + `go:generate stringer -type=Number +-` +- tests := []struct { +- label string +- enabled map[string]bool +- wantCodeLens bool +- }{ +- { +- label: "default", +- wantCodeLens: true, +- }, +- { +- label: "generate disabled", +- enabled: map[string]bool{string(settings.CodeLensGenerate): false}, +- wantCodeLens: false, +- }, +- } +- for _, test := range tests { +- t.Run(test.label, func(t *testing.T) { +- WithOptions( +- Settings{"codelenses": test.enabled}, +- ).Run(t, workspace, func(t *testing.T, env *Env) { +- env.OpenFile("lib.go") +- lens := env.CodeLens("lib.go") +- if gotCodeLens := len(lens) > 0; gotCodeLens != test.wantCodeLens { +- t.Errorf("got codeLens: %t, want %t", gotCodeLens, test.wantCodeLens) +- } +- }) +- }) +- } +-} +- +-const proxyWithLatest = ` +--- golang.org/x/hello@v1.3.3/go.mod -- +-module golang.org/x/hello +- +-go 1.12 +--- golang.org/x/hello@v1.3.3/hi/hi.go -- +-package hi +- +-var Goodbye error +--- golang.org/x/hello@v1.2.3/go.mod -- +-module golang.org/x/hello +- +-go 1.12 +--- golang.org/x/hello@v1.2.3/hi/hi.go -- +-package hi +- +-var Goodbye error +-` +- +-// This test confirms the full functionality of the code lenses for updating +-// dependencies in a go.mod file, when using a go.work file. It checks for the +-// code lens that suggests an update and then executes the command associated +-// with that code lens. A regression test for golang/go#39446. It also checks +-// that these code lenses only affect the diagnostics and contents of the +-// containing go.mod file. +-func TestUpgradeCodelens_Workspace(t *testing.T) { +- const shouldUpdateDep = ` +--- go.work -- +-go 1.18 +- +-use ( +- ./a +- ./b +-) +--- a/go.mod -- +-module mod.com/a +- +-go 1.14 +- +-require golang.org/x/hello v1.2.3 +--- a/go.sum -- +-golang.org/x/hello v1.2.3 h1:7Wesfkx/uBd+eFgPrq0irYj/1XfmbvLV8jZ/W7C2Dwg= +-golang.org/x/hello v1.2.3/go.mod h1:OgtlzsxVMUUdsdQCIDYgaauCTH47B8T8vofouNJfzgY= +--- a/main.go -- +-package main +- +-import "golang.org/x/hello/hi" +- +-func main() { +- _ = hi.Goodbye +-} +--- b/go.mod -- +-module mod.com/b +- +-go 1.14 +- +-require golang.org/x/hello v1.2.3 +--- b/go.sum -- +-golang.org/x/hello v1.2.3 h1:7Wesfkx/uBd+eFgPrq0irYj/1XfmbvLV8jZ/W7C2Dwg= +-golang.org/x/hello v1.2.3/go.mod h1:OgtlzsxVMUUdsdQCIDYgaauCTH47B8T8vofouNJfzgY= +--- b/main.go -- +-package main +- +-import ( +- "golang.org/x/hello/hi" +-) +- +-func main() { +- _ = hi.Goodbye +-} +-` +- +- const wantGoModA = `module mod.com/a +- +-go 1.14 +- +-require golang.org/x/hello v1.3.3 +-` +- // Applying the diagnostics or running the codelenses for a/go.mod +- // should not change the contents of b/go.mod +- const wantGoModB = `module mod.com/b +- +-go 1.14 +- +-require golang.org/x/hello v1.2.3 +-` +- +- for _, commandTitle := range []string{ +- "Upgrade transitive dependencies", +- "Upgrade direct dependencies", +- } { +- t.Run(commandTitle, func(t *testing.T) { +- WithOptions( +- ProxyFiles(proxyWithLatest), +- ).Run(t, shouldUpdateDep, func(t *testing.T, env *Env) { +- env.OpenFile("a/go.mod") +- env.OpenFile("b/go.mod") +- var lens protocol.CodeLens +- var found bool +- for _, l := range env.CodeLens("a/go.mod") { +- if l.Command.Title == commandTitle { +- lens = l +- found = true +- } +- } +- if !found { +- t.Fatalf("found no command with the title %s", commandTitle) +- } +- if err := env.Editor.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{ +- Command: lens.Command.Command, +- Arguments: lens.Command.Arguments, +- }, nil); err != nil { +- t.Fatal(err) +- } +- env.AfterChange() +- if got := env.BufferText("a/go.mod"); got != wantGoModA { +- t.Fatalf("a/go.mod upgrade failed:\n%s", compare.Text(wantGoModA, got)) +- } +- if got := env.BufferText("b/go.mod"); got != wantGoModB { +- t.Fatalf("b/go.mod changed unexpectedly:\n%s", compare.Text(wantGoModB, got)) +- } +- }) +- }) +- } +- for _, vendoring := range []bool{false, true} { +- t.Run(fmt.Sprintf("Upgrade individual dependency vendoring=%v", vendoring), func(t *testing.T) { +- WithOptions( +- ProxyFiles(proxyWithLatest), +- ).Run(t, shouldUpdateDep, func(t *testing.T, env *Env) { +- if vendoring { +- env.RunGoCommandInDirWithEnv("a", []string{"GOWORK=off"}, "mod", "vendor") +- } +- env.AfterChange() +- env.OpenFile("a/go.mod") +- env.OpenFile("b/go.mod") +- +- env.ExecuteCodeLensCommand("a/go.mod", command.CheckUpgrades, nil) +- d := &protocol.PublishDiagnosticsParams{} +- env.OnceMet( +- CompletedWork(server.DiagnosticWorkTitle(server.FromCheckUpgrades), 1, true), +- Diagnostics(env.AtRegexp("a/go.mod", `require`), WithMessage("can be upgraded")), +- ReadDiagnostics("a/go.mod", d), +- // We do not want there to be a diagnostic for b/go.mod, +- // but there may be some subtlety in timing here, where this +- // should always succeed, but may not actually test the correct +- // behavior. +- NoDiagnostics(env.AtRegexp("b/go.mod", `require`)), +- ) +- // Check for upgrades in b/go.mod and then clear them. +- env.ExecuteCodeLensCommand("b/go.mod", command.CheckUpgrades, nil) +- env.OnceMet( +- CompletedWork(server.DiagnosticWorkTitle(server.FromCheckUpgrades), 2, true), +- Diagnostics(env.AtRegexp("b/go.mod", `require`), WithMessage("can be upgraded")), +- ) +- env.ExecuteCodeLensCommand("b/go.mod", command.ResetGoModDiagnostics, nil) +- env.OnceMet( +- CompletedWork(server.DiagnosticWorkTitle(server.FromResetGoModDiagnostics), 1, true), +- NoDiagnostics(ForFile("b/go.mod")), +- ) +- +- // Apply the diagnostics to a/go.mod. +- env.ApplyQuickFixes("a/go.mod", d.Diagnostics) +- env.AfterChange() +- if got := env.BufferText("a/go.mod"); got != wantGoModA { +- t.Fatalf("a/go.mod upgrade failed:\n%s", compare.Text(wantGoModA, got)) +- } +- if got := env.BufferText("b/go.mod"); got != wantGoModB { +- t.Fatalf("b/go.mod changed unexpectedly:\n%s", compare.Text(wantGoModB, got)) +- } +- }) +- }) +- } +-} +- +-func TestUpgradeCodelens_ModVendor(t *testing.T) { +- // This test checks the regression of golang/go#66055. The upgrade codelens +- // should work in a mod vendor context (the test above using a go.work file +- // was not broken). +- testenv.NeedsGoCommand1Point(t, 22) +- +- const shouldUpdateDep = ` +--- go.mod -- +-module mod.com/a +- +-go 1.22 +- +-require golang.org/x/hello v1.2.3 +--- main.go -- +-package main +- +-import "golang.org/x/hello/hi" +- +-func main() { +- _ = hi.Goodbye +-} +-` +- +- const wantGoModA = `module mod.com/a +- +-go 1.22 +- +-require golang.org/x/hello v1.3.3 +-` +- +- WithOptions( +- WriteGoSum("."), +- ProxyFiles(proxyWithLatest), +- ).Run(t, shouldUpdateDep, func(t *testing.T, env *Env) { +- env.RunGoCommand("mod", "vendor") +- env.AfterChange() +- env.OpenFile("go.mod") +- +- env.ExecuteCodeLensCommand("go.mod", command.CheckUpgrades, nil) +- d := &protocol.PublishDiagnosticsParams{} +- env.OnceMet( +- CompletedWork(server.DiagnosticWorkTitle(server.FromCheckUpgrades), 1, true), +- Diagnostics(env.AtRegexp("go.mod", `require`), WithMessage("can be upgraded")), +- ReadDiagnostics("go.mod", d), +- ) +- +- // Apply the diagnostics to a/go.mod. +- env.ApplyQuickFixes("go.mod", d.Diagnostics) +- env.AfterChange() +- if got := env.BufferText("go.mod"); got != wantGoModA { +- t.Fatalf("go.mod upgrade failed:\n%s", compare.Text(wantGoModA, got)) +- } +- }) +-} +- +-func TestUnusedDependenciesCodelens(t *testing.T) { +- const proxy = ` +--- golang.org/x/hello@v1.0.0/go.mod -- +-module golang.org/x/hello +- +-go 1.14 +--- golang.org/x/hello@v1.0.0/hi/hi.go -- +-package hi +- +-var Goodbye error +--- golang.org/x/unused@v1.0.0/go.mod -- +-module golang.org/x/unused +- +-go 1.14 +--- golang.org/x/unused@v1.0.0/nouse/nouse.go -- +-package nouse +- +-var NotUsed error +-` +- +- const shouldRemoveDep = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +- +-require golang.org/x/hello v1.0.0 +-require golang.org/x/unused v1.0.0 +- +-// EOF +--- main.go -- +-package main +- +-import "golang.org/x/hello/hi" +- +-func main() { +- _ = hi.Goodbye +-} +-` +- WithOptions( +- WriteGoSum("."), +- ProxyFiles(proxy), +- ).Run(t, shouldRemoveDep, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- env.RegexpReplace("go.mod", "// EOF", "// EOF unsaved edit") // unsaved edits ok +- env.ExecuteCodeLensCommand("go.mod", command.Tidy, nil) +- env.AfterChange() +- got := env.BufferText("go.mod") +- const wantGoMod = `module mod.com +- +-go 1.14 +- +-require golang.org/x/hello v1.0.0 +- +-// EOF unsaved edit +-` +- if got != wantGoMod { +- t.Fatalf("go.mod tidy failed:\n%s", compare.Text(wantGoMod, got)) +- } +- }) +-} +- +-func TestRegenerateCgo(t *testing.T) { +- testenv.NeedsTool(t, "cgo") +- const workspace = ` +--- go.mod -- +-module example.com +- +-go 1.12 +--- cgo.go -- +-package x +- +-/* +-int fortythree() { return 42; } +-*/ +-import "C" +- +-func Foo() { +- print(C.fortytwo()) +-} +-` +- Run(t, workspace, func(t *testing.T, env *Env) { +- // Open the file. We have a nonexistant symbol that will break cgo processing. +- env.OpenFile("cgo.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("cgo.go", ``), WithMessage("go list failed to return CompiledGoFiles")), +- ) +- +- // Fix the C function name. We haven't regenerated cgo, so nothing should be fixed. +- env.RegexpReplace("cgo.go", `int fortythree`, "int fortytwo") +- env.SaveBuffer("cgo.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("cgo.go", ``), WithMessage("go list failed to return CompiledGoFiles")), +- ) +- +- // Regenerate cgo, fixing the diagnostic. +- env.ExecuteCodeLensCommand("cgo.go", command.RegenerateCgo, nil) +- env.OnceMet( +- CompletedWork(server.DiagnosticWorkTitle(server.FromRegenerateCgo), 1, true), +- NoDiagnostics(ForFile("cgo.go")), +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/completion/completion18_test.go b/gopls/internal/test/integration/completion/completion18_test.go +--- a/gopls/internal/test/integration/completion/completion18_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/completion/completion18_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,132 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-// This file is misnamed; it has no version constraints. +-// TODO(adonovan): fold into completion_test.go +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/internal/testenv" +-) +- +-// test generic receivers +-func TestGenericReceiver(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- main.go -- +-package main +-type SyncMap[K any, V comparable] struct {} +-func (s *SyncMap[K,V]) f() {} +-type XX[T any] struct {} +-type UU[T any] struct {} +-func (s SyncMap[XX,string]) g(v UU) {} +-` +- +- tests := []struct { +- pat string +- want []string +- }{ +- {"s .Syn", []string{"SyncMap[K, V]"}}, +- {"Map.X", []string{}}, // This is probably wrong, Maybe "XX"? +- {"v U", []string{"UU", "uint", "uint16", "uint32", "uint64", "uint8", "uintptr"}}, // not U[T] +- } +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.Await(env.DoneWithOpen()) +- for _, tst := range tests { +- loc := env.RegexpSearch("main.go", tst.pat) +- loc.Range.Start.Character += uint32(protocol.UTF16Len([]byte(tst.pat))) +- completions := env.Completion(loc) +- result := compareCompletionLabels(tst.want, completions.Items) +- if result != "" { +- t.Errorf("%s: wanted %v", result, tst.want) +- for i, g := range completions.Items { +- t.Errorf("got %d %s %s", i, g.Label, g.Detail) +- } +- } +- } +- }) +-} +- +-func TestFuzzFunc(t *testing.T) { +- // The behavior under test is derived from the std module, +- // not the x/tools/internal/stdlib linked into gopls. +- testenv.NeedsGoCommand1Point(t, 25) // go1.25 added TBF.Attr +- +- // use the example from the package documentation +- modfile := ` +--- go.mod -- +-module mod.com +- +-go 1.25 +-` +- part0 := `package foo +-import "testing" +-func FuzzNone(f *testing.F) { +- f.Add(12) // better not find this f.Add +-} +-func FuzzHex(f *testing.F) { +- for _, seed := range [][]byte{{}, {0}, {9}, {0xa}, {0xf}, {1, 2, 3, 4}} { +- f.Ad` +- part1 := `d(seed) +- } +- f.F` +- part2 := `uzz(func(t *testing.T, in []byte) { +- enc := hex.EncodeToString(in) +- out, err := hex.DecodeString(enc) +- if err != nil { +- f.Failed() +- } +- if !bytes.Equal(in, out) { +- t.Fatalf("%v: round trip: %v, %s", in, out, f.Name()) +- } +- }) +-} +-` +- data := modfile + `-- a_test.go -- +-` + part0 + ` +--- b_test.go -- +-` + part0 + part1 + ` +--- c_test.go -- +-` + part0 + part1 + part2 +- +- tests := []struct { +- file string +- pat string +- offset uint32 // UTF16 length from the beginning of pat to what the user just typed +- want []string +- }{ +- // To avoid breaking these assertions as the "testing" package evolves, +- // use an optional (?) suffix for newer symbols. +- {"a_test.go", "f.Ad", 3, []string{"Add", "ArtifactDir?", "Attr"}}, // Attr is 1.25, Artifact is 1.26 +- {"c_test.go", " f.F", 4, []string{"Failed"}}, +- {"c_test.go", "f.N", 3, []string{"Name"}}, +- {"b_test.go", "f.F", 3, []string{"Fuzz(func(t *testing.T, a []byte)", "Fail", "FailNow", +- "Failed", "Fatal", "Fatalf"}}, +- } +- Run(t, data, func(t *testing.T, env *Env) { +- for _, test := range tests { +- env.OpenFile(test.file) +- env.Await(env.DoneWithOpen()) +- loc := env.RegexpSearch(test.file, test.pat) +- loc.Range.Start.Character += test.offset // character user just typed? will type? +- completions := env.Completion(loc) +- result := compareCompletionLabels(test.want, completions.Items) +- if result != "" { +- t.Errorf("pat=%q <<%s>>", test.pat, result) +- for i, it := range completions.Items { +- t.Errorf("%d got %q %q", i, it.Label, it.Detail) +- } +- } +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/completion/completion_test.go b/gopls/internal/test/integration/completion/completion_test.go +--- a/gopls/internal/test/integration/completion/completion_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/completion/completion_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1626 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "fmt" +- "os" +- "path/filepath" +- "slices" +- "sort" +- "strings" +- "testing" +- "time" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/telemetry/counter" +- "golang.org/x/telemetry/counter/countertest" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/server" +- "golang.org/x/tools/gopls/internal/settings" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/internal/testenv" +-) +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- os.Exit(Main(m)) +-} +- +-const proxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +--- example.com@v1.2.3/blah/blah.go -- +-package blah +- +-const Name = "Blah" +--- random.org@v1.2.3/go.mod -- +-module random.org +- +-go 1.12 +--- random.org@v1.2.3/blah/blah.go -- +-package hello +- +-const Name = "Hello" +-` +- +-func TestPackageCompletion(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- cmd/main.go -- +-package main +--- cmd/testfile.go -- +-package +--- fruits/apple.go -- +-package apple +- +-fun apple() int { +- return 0 +-} +- +--- fruits/testfile.go -- +-// this is a comment +- +-/* +- this is a multiline comment +-*/ +- +-import "fmt" +- +-func test() {} +- +--- fruits/testfile2.go -- +-package +- +--- fruits/testfile3.go -- +-pac +--- 123f_r.u~its-123/testfile.go -- +-package +- +--- .invalid-dir@-name/testfile.go -- +-package +-` +- var ( +- testfile4 = "" +- testfile5 = "/*a comment*/ " +- testfile6 = "/*a comment*/\n" +- ) +- for _, tc := range []struct { +- name string +- filename string +- content *string +- triggerRegexp string +- want []string +- editRegexp string +- }{ +- { +- name: "main package completion after package keyword", +- filename: "cmd/testfile.go", +- triggerRegexp: "package()", +- want: []string{"package main", "package cmd", "package cmd_test"}, +- editRegexp: "package", +- }, +- { +- name: "package completion at valid position", +- filename: "fruits/testfile.go", +- triggerRegexp: "\n()", +- want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, +- editRegexp: "\n()", +- }, +- { +- name: "package completion in a comment", +- filename: "fruits/testfile.go", +- triggerRegexp: "th(i)s", +- want: nil, +- }, +- { +- name: "package completion in a multiline comment", +- filename: "fruits/testfile.go", +- triggerRegexp: `\/\*\n()`, +- want: nil, +- }, +- { +- name: "package completion at invalid position", +- filename: "fruits/testfile.go", +- triggerRegexp: "import \"fmt\"\n()", +- want: nil, +- }, +- { +- name: "package completion after package keyword", +- filename: "fruits/testfile2.go", +- triggerRegexp: "package()", +- want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, +- editRegexp: "package", +- }, +- { +- name: "package completion with 'pac' prefix", +- filename: "fruits/testfile3.go", +- triggerRegexp: "pac()", +- want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, +- editRegexp: "pac", +- }, +- { +- name: "package completion for empty file", +- filename: "fruits/testfile4.go", +- triggerRegexp: "^$", +- content: &testfile4, +- want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, +- editRegexp: "^$", +- }, +- { +- name: "package completion without terminal newline", +- filename: "fruits/testfile5.go", +- triggerRegexp: `\*\/ ()`, +- content: &testfile5, +- want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, +- editRegexp: `\*\/ ()`, +- }, +- { +- name: "package completion on terminal newline", +- filename: "fruits/testfile6.go", +- triggerRegexp: `\*\/\n()`, +- content: &testfile6, +- want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, +- editRegexp: `\*\/\n()`, +- }, +- // Issue golang/go#44680 +- { +- name: "package completion for dir name with punctuation", +- filename: "123f_r.u~its-123/testfile.go", +- triggerRegexp: "package()", +- want: []string{"package fruits123", "package fruits123_test", "package main"}, +- editRegexp: "package", +- }, +- { +- name: "package completion for invalid dir name", +- filename: ".invalid-dir@-name/testfile.go", +- triggerRegexp: "package()", +- want: []string{"package main"}, +- editRegexp: "package", +- }, +- } { +- t.Run(tc.name, func(t *testing.T) { +- Run(t, files, func(t *testing.T, env *Env) { +- if tc.content != nil { +- env.WriteWorkspaceFile(tc.filename, *tc.content) +- env.Await(env.DoneWithChangeWatchedFiles()) +- } +- env.OpenFile(tc.filename) +- completions := env.Completion(env.RegexpSearch(tc.filename, tc.triggerRegexp)) +- +- // Check that the completion item suggestions are in the range +- // of the file. {Start,End}.Line are zero-based. +- lineCount := len(strings.Split(env.BufferText(tc.filename), "\n")) +- for _, item := range completions.Items { +- for _, mode := range []string{"replace", "insert"} { +- edit, err := protocol.SelectCompletionTextEdit(item, mode == "replace") +- if err != nil { +- t.Fatalf("unexpected text edit in completion item (%v): %v", mode, err) +- } +- if start := int(edit.Range.Start.Line); start > lineCount { +- t.Fatalf("unexpected text edit range (%v) start line number: got %d, want <= %d", mode, start, lineCount) +- } +- if end := int(edit.Range.End.Line); end > lineCount { +- t.Fatalf("unexpected text edit range (%v) end line number: got %d, want <= %d", mode, end, lineCount) +- } +- } +- } +- +- if tc.want != nil { +- expectedLoc := env.RegexpSearch(tc.filename, tc.editRegexp) +- for _, item := range completions.Items { +- for _, mode := range []string{"replace", "insert"} { +- edit, _ := protocol.SelectCompletionTextEdit(item, mode == "replace") +- gotRng := edit.Range +- if expectedLoc.Range != gotRng { +- t.Errorf("unexpected completion range (%v) for completion item %s: got %v, want %v", +- mode, item.Label, gotRng, expectedLoc.Range) +- } +- } +- } +- } +- +- diff := compareCompletionLabels(tc.want, completions.Items) +- if diff != "" { +- t.Error(diff) +- } +- }) +- }) +- } +-} +- +-func TestPackageNameCompletion(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- math/add.go -- +-package ma +-` +- +- want := []string{"ma", "ma_test", "main", "math", "math_test"} +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("math/add.go") +- completions := env.Completion(env.RegexpSearch("math/add.go", "package ma()")) +- +- diff := compareCompletionLabels(want, completions.Items) +- if diff != "" { +- t.Fatal(diff) +- } +- }) +-} +- +-// compareCompletionLabels returns a non-empty string reporting the +-// difference (if any) between the labels of the actual completion +-// items (gotItems) and the expected list (want). +-// +-// A want item with a "?" suffix is optional. +-// +-// TODO(rfindley): audit/clean up call sites for this helper, to ensure +-// consistent test errors. +-func compareCompletionLabels(want []string, gotItems []protocol.CompletionItem) string { +- var got []string +- for _, item := range gotItems { +- got = append(got, item.Label) +- if item.Label != item.InsertText && item.TextEdit == nil { +- // Label should be the same as InsertText, if InsertText is to be used +- return fmt.Sprintf("label not the same as InsertText %#v", item) +- } +- } +- +- if len(got) == 0 && len(want) == 0 { +- return "" // treat nil and the empty slice as equivalent +- } +- +- // A 'want' item with a '?' suffix is optional, to ease +- // migration across versions. Remove any that are not present +- // in the 'got' set. +- var out []string +- for _, item := range want { +- item, optional := strings.CutSuffix(item, "?") +- if optional && !slices.Contains(got, item) { +- continue // optional item is missing +- } +- out = append(out, item) +- } +- want = out +- +- if diff := cmp.Diff(want, got); diff != "" { +- return fmt.Sprintf("completion item mismatch (-want +got):\n%s", diff) +- } +- return "" +-} +- +-func TestIssue74611(t *testing.T) { +- // Completions should not offer symbols from unimported packages +- // that cannot be imported because they are "internal". +- // +- // Ideally, we should test std case as well but because mocking +- // a fake stdlib is hard, we just test the 3rd user case. +- // std test is done interactively. +- const files = `-- go.mod -- +-module mod.com/cmd +- +-go 1.21 +- +--- a.go -- +-package pkg +-import "fmt" +- +-func main() { +- fmt.Println("call Println to use fmt") +- _ = maps +-} // (completion requested at start of line) +-` +- +- WithOptions().Run(t, files, func(t *testing.T, env *Env) { +- filename := "a.go" +- // Trigger unimported completions for the maps package. +- env.OpenFile(filename) +- env.Await(env.DoneWithOpen()) +- loc := env.RegexpSearch(filename, "\n}") +- completions := env.Completion(loc) +- if len(completions.Items) == 0 { +- t.Fatalf("no completion items") +- } +- runtimeMaps := `"internal/runtime/maps"` +- found := slices.ContainsFunc(completions.Items, func(item protocol.CompletionItem) bool { +- return item.Detail == runtimeMaps +- }) +- +- if found { +- t.Fatalf("unwanted completion: %s", runtimeMaps) +- } +- }) +-} +- +-func TestUnimportedCompletion(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +- +-require example.com v1.2.3 +--- main.go -- +-package main +- +-func main() { +- _ = blah +-} +--- main2.go -- +-package main +- +-import "example.com/blah" +- +-func _() { +- _ = blah.Hello +-} +-` +- WithOptions( +- WriteGoSum("."), +- ProxyFiles(proxy), +- Settings{"importsSource": settings.ImportsSourceGopls}, +- ).Run(t, mod, func(t *testing.T, env *Env) { +- // Make sure the dependency is in the module cache and accessible for +- // unimported completions, and then remove it before proceeding. +- env.RemoveWorkspaceFile("main2.go") +- env.RunGoCommand("mod", "tidy") +- env.Await(env.DoneWithChangeWatchedFiles()) +- +- // Trigger unimported completions for the example.com/blah package. +- env.OpenFile("main.go") +- env.Await(env.DoneWithOpen()) +- loc := env.RegexpSearch("main.go", "ah") +- completions := env.Completion(loc) +- if len(completions.Items) == 0 { +- t.Fatalf("no completion items") +- } +- env.AcceptCompletion(loc, completions.Items[0]) // adds blah import to main.go +- env.Await(env.DoneWithChange()) +- +- // Trigger completions once again for the blah.<> selector. +- env.RegexpReplace("main.go", "_ = blah", "_ = blah.") +- env.Await(env.DoneWithChange()) +- loc = env.RegexpSearch("main.go", "\n}") +- completions = env.Completion(loc) +- if len(completions.Items) != 1 { +- t.Fatalf("expected 1 completion item, got %v", len(completions.Items)) +- } +- item := completions.Items[0] +- if item.Label != "Name" { +- t.Fatalf("expected completion item blah.Name, got %v", item.Label) +- } +- env.AcceptCompletion(loc, item) +- +- // Await the diagnostics to add example.com/blah to the go.mod file. +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", `"example.com/blah"`)), +- ) +- }) +-} +- +-// Test that completions still work with an undownloaded module, golang/go#43333. +-func TestUndownloadedModule(t *testing.T) { +- // mod.com depends on example.com, but only in a file that's hidden by a +- // build tag, so the IWL won't download example.com. That will cause errors +- // in the go list -m call performed by the imports package. +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +- +-require example.com v1.2.3 +--- useblah.go -- +-// +build hidden +- +-package pkg +-import "example.com/blah" +-var _ = blah.Name +--- mainmod/mainmod.go -- +-package mainmod +- +-const Name = "mainmod" +-` +- WithOptions( +- WriteGoSum("."), +- Settings{"importsSource": settings.ImportsSourceGopls}, +- ProxyFiles(proxy)).Run(t, files, func(t *testing.T, env *Env) { +- env.CreateBuffer("import.go", "package pkg\nvar _ = mainmod.Name\n") +- env.SaveBuffer("import.go") +- content := env.ReadWorkspaceFile("import.go") +- if !strings.Contains(content, `import "mod.com/mainmod`) { +- t.Errorf("expected import of mod.com/mainmod in %q", content) +- } +- }) +-} +- +-// Test that we can doctor the source code enough so the file is +-// parseable and completion works as expected. +-func TestSourceFixup(t *testing.T) { +- // This example relies on the fixer to turn "s." into "s._" so +- // that it parses as a SelectorExpr with only local problems, +- // instead of snarfing up the following declaration of S +- // looking for an identifier; thus completion offers s.i. +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- foo.go -- +-package foo +- +-func _() { +- var s S +- if s. +-} +- +-type S struct { +- i int +-} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("foo.go") +- completions := env.Completion(env.RegexpSearch("foo.go", `if s\.()`)) +- diff := compareCompletionLabels([]string{"i"}, completions.Items) +- if diff != "" { +- t.Fatal(diff) +- } +- }) +-} +- +-func TestCompletion_Issue45510(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func _() { +- type a *a +- var aaaa1, aaaa2 a +- var _ a = aaaa +- +- type b a +- var bbbb1, bbbb2 b +- var _ b = bbbb +-} +- +-type ( +- c *d +- d *e +- e **c +-) +- +-func _() { +- var ( +- xxxxc c +- xxxxd d +- xxxxe e +- ) +- +- var _ c = xxxx +- var _ d = xxxx +- var _ e = xxxx +-} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- +- tests := []struct { +- re string +- want []string +- }{ +- {`var _ a = aaaa()`, []string{"aaaa1", "aaaa2"}}, +- {`var _ b = bbbb()`, []string{"bbbb1", "bbbb2"}}, +- {`var _ c = xxxx()`, []string{"xxxxc", "xxxxd", "xxxxe"}}, +- {`var _ d = xxxx()`, []string{"xxxxc", "xxxxd", "xxxxe"}}, +- {`var _ e = xxxx()`, []string{"xxxxc", "xxxxd", "xxxxe"}}, +- } +- for _, tt := range tests { +- completions := env.Completion(env.RegexpSearch("main.go", tt.re)) +- diff := compareCompletionLabels(tt.want, completions.Items) +- if diff != "" { +- t.Errorf("%s: %s", tt.re, diff) +- } +- } +- }) +-} +- +-func TestCompletionDeprecation(t *testing.T) { +- const files = ` +--- go.mod -- +-module test.com +- +-go 1.16 +--- prog.go -- +-package waste +-// Deprecated: use newFoof. +-func fooFunc() bool { +- return false +-} +- +-// Deprecated: bad. +-const badPi = 3.14 +- +-func doit() { +- if fooF +- panic() +- x := badP +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("prog.go") +- loc := env.RegexpSearch("prog.go", "if fooF") +- loc.Range.Start.Character += uint32(protocol.UTF16Len([]byte("if fooF"))) +- completions := env.Completion(loc) +- diff := compareCompletionLabels([]string{"fooFunc"}, completions.Items) +- if diff != "" { +- t.Error(diff) +- } +- if completions.Items[0].Tags == nil { +- t.Errorf("expected Tags to show deprecation %#v", completions.Items[0].Tags) +- } +- loc = env.RegexpSearch("prog.go", "= badP") +- loc.Range.Start.Character += uint32(protocol.UTF16Len([]byte("= badP"))) +- completions = env.Completion(loc) +- diff = compareCompletionLabels([]string{"badPi"}, completions.Items) +- if diff != "" { +- t.Error(diff) +- } +- if completions.Items[0].Tags == nil { +- t.Errorf("expected Tags to show deprecation %#v", completions.Items[0].Tags) +- } +- }) +-} +- +-func TestUnimportedCompletion_VSCodeIssue1489(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +- +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Println("a") +- math.Sqr +-} +-` +- WithOptions( +- WindowsLineEndings(), +- Settings{"ui.completion.usePlaceholders": true}, +- Settings{"importsSource": settings.ImportsSourceGopls}, +- ).Run(t, src, func(t *testing.T, env *Env) { +- // Trigger unimported completions for the mod.com package. +- env.OpenFile("main.go") +- env.Await(env.DoneWithOpen()) +- loc := env.RegexpSearch("main.go", "Sqr()") +- completions := env.Completion(loc) +- if len(completions.Items) == 0 { +- t.Fatalf("no completion items") +- } +- env.AcceptCompletion(loc, completions.Items[0]) +- env.Await(env.DoneWithChange()) +- got := env.BufferText("main.go") +- want := "package main\r\n\r\nimport (\r\n\t\"fmt\"\r\n\t\"math\"\r\n)\r\n\r\nfunc main() {\r\n\tfmt.Println(\"a\")\r\n\tmath.Sqrt(${1:x float64})\r\n}\r\n" +- if diff := cmp.Diff(want, got); diff != "" { +- t.Errorf("unimported completion (-want +got):\n%s", diff) +- } +- }) +-} +- +-func TestUnimportedCompletion_VSCodeIssue3365(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.19 +- +--- main.go -- +-package main +- +-func main() { +- println(strings.TLower) +-} +- +-var Lower = "" +-` +- find := func(t *testing.T, completions *protocol.CompletionList, name string) protocol.CompletionItem { +- t.Helper() +- if completions == nil || len(completions.Items) == 0 { +- t.Fatalf("no completion items") +- } +- for _, i := range completions.Items { +- if i.Label == name { +- return i +- } +- } +- t.Fatalf("no item with label %q", name) +- return protocol.CompletionItem{} +- } +- +- for _, supportInsertReplace := range []bool{true, false} { +- t.Run(fmt.Sprintf("insertReplaceSupport=%v", supportInsertReplace), func(t *testing.T) { +- capabilities := fmt.Sprintf(`{ "textDocument": { "completion": { "completionItem": {"insertReplaceSupport":%t, "snippetSupport": false } } } }`, supportInsertReplace) +- runner := WithOptions( +- CapabilitiesJSON([]byte(capabilities)), +- Settings{"importsSource": settings.ImportsSourceGopls}, +- ) +- runner.Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.Await(env.DoneWithOpen()) +- orig := env.BufferText("main.go") +- +- // We try to trigger completion at "println(strings.T<>Lower)" +- // and accept the completion candidate that matches the 'accept' label. +- insertModeWant := "println(strings.ToUpperLower)" +- if !supportInsertReplace { +- insertModeWant = "println(strings.ToUpper)" +- } +- testcases := []struct { +- mode string +- accept string +- want string +- }{ +- { +- mode: "insert", +- accept: "ToUpper", +- want: insertModeWant, +- }, +- { +- mode: "insert", +- accept: "ToLower", +- want: "println(strings.ToLower)", // The suffix 'Lower' is included in the text edit. +- }, +- { +- mode: "replace", +- accept: "ToUpper", +- want: "println(strings.ToUpper)", +- }, +- { +- mode: "replace", +- accept: "ToLower", +- want: "println(strings.ToLower)", +- }, +- } +- +- for _, tc := range testcases { +- t.Run(fmt.Sprintf("%v/%v", tc.mode, tc.accept), func(t *testing.T) { +- +- env.SetSuggestionInsertReplaceMode(tc.mode == "replace") +- env.SetBufferContent("main.go", orig) +- loc := env.RegexpSearch("main.go", `Lower\)`) +- completions := env.Completion(loc) +- item := find(t, completions, tc.accept) +- env.AcceptCompletion(loc, item) +- env.Await(env.DoneWithChange()) +- got := env.BufferText("main.go") +- if !strings.Contains(got, tc.want) { +- t.Errorf("unexpected state after completion:\n%v\nwanted %v", got, tc.want) +- } +- }) +- } +- }) +- }) +- } +-} +-func TestUnimportedCompletionHasPlaceholders60269(t *testing.T) { +- // We can't express this as a marker test because it doesn't support AcceptCompletion. +- const src = ` +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-var _ = b.F +- +--- b/b.go -- +-package b +- +-func F0(a, b int, c float64) {} +-func F1(int, chan *string) {} +-func F2[K, V any](map[K]V, chan V) {} // missing type parameters was issue #60959 +-func F3[K comparable, V any](map[K]V, chan V) {} +-` +- WithOptions( +- WindowsLineEndings(), +- Settings{"ui.completion.usePlaceholders": true, +- "importsSource": settings.ImportsSourceGopls}, +- ).Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.Await(env.DoneWithOpen()) +- +- // The table lists the expected completions of b.F as they appear in Items. +- const common = "package a\r\n\r\nimport \"example.com/b\"\r\n\r\nvar _ = " +- for i, want := range []string{ +- common + "b.F0(${1:a int}, ${2:b int}, ${3:c float64})\r\n", +- common + "b.F1(${1:_ int}, ${2:_ chan *string})\r\n", +- common + "b.F2(${1:_ map[K]V}, ${2:_ chan V})\r\n", +- common + "b.F3(${1:_ map[K]V}, ${2:_ chan V})\r\n", +- } { +- loc := env.RegexpSearch("a/a.go", "b.F()") +- completions := env.Completion(loc) +- if len(completions.Items) == 0 { +- t.Fatalf("no completion items") +- } +- saved := env.BufferText("a/a.go") +- env.AcceptCompletion(loc, completions.Items[i]) +- env.Await(env.DoneWithChange()) +- got := env.BufferText("a/a.go") +- if diff := cmp.Diff(want, got); diff != "" { +- t.Errorf("%d: unimported completion (-want +got):\n%s", i, diff) +- } +- env.SetBufferContent("a/a.go", saved) // restore +- } +- }) +-} +- +-func TestPackageMemberCompletionAfterSyntaxError(t *testing.T) { +- // This test documents the current broken behavior due to golang/go#58833. +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +- +--- main.go -- +-package main +- +-import "math" +- +-func main() { +- math.Sqrt(,0) +- math.Ldex +-} +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.Await(env.DoneWithOpen()) +- loc := env.RegexpSearch("main.go", "Ldex()") +- completions := env.Completion(loc) +- if len(completions.Items) == 0 { +- t.Fatalf("no completion items") +- } +- env.AcceptCompletion(loc, completions.Items[0]) +- env.Await(env.DoneWithChange()) +- got := env.BufferText("main.go") +- // The completion of math.Ldex after the syntax error on the +- // previous line is not "math.Ldexp" but "math.Ldexmath.Abs". +- // (In VSCode, "Abs" wrongly appears in the completion menu.) +- // This is a consequence of poor error recovery in the parser +- // causing "math.Ldex" to become a BadExpr. +- want := "package main\n\nimport \"math\"\n\nfunc main() {\n\tmath.Sqrt(,0)\n\tmath.Ldexmath.Abs(${1:})\n}\n" +- if diff := cmp.Diff(want, got); diff != "" { +- t.Errorf("unimported completion (-want +got):\n%s", diff) +- } +- }) +-} +- +-func TestCompleteAllFields(t *testing.T) { +- // This test verifies that completion results always include all struct fields. +- // See golang/go#53992. +- +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +- +--- p/p.go -- +-package p +- +-import ( +- "fmt" +- +- . "net/http" +- . "runtime" +- . "go/types" +- . "go/parser" +- . "go/ast" +-) +- +-type S struct { +- a, b, c, d, e, f, g, h, i, j, k, l, m int +- n, o, p, q, r, s, t, u, v, w, x, y, z int +-} +- +-func _() { +- var s S +- fmt.Println(s.) +-} +-` +- +- WithOptions(Settings{ +- "completionBudget": "1ns", // must be non-zero as 0 => infinity +- }).Run(t, src, func(t *testing.T, env *Env) { +- wantFields := make(map[string]bool) +- for c := 'a'; c <= 'z'; c++ { +- wantFields[string(c)] = true +- } +- +- env.OpenFile("p/p.go") +- // Make an arbitrary edit to ensure we're not hitting the cache. +- env.EditBuffer("p/p.go", fake.NewEdit(0, 0, 0, 0, fmt.Sprintf("// current time: %v\n", time.Now()))) +- loc := env.RegexpSearch("p/p.go", `s\.()`) +- completions := env.Completion(loc) +- gotFields := make(map[string]bool) +- for _, item := range completions.Items { +- if item.Kind == protocol.FieldCompletion { +- gotFields[item.Label] = true +- } +- } +- +- if diff := cmp.Diff(wantFields, gotFields); diff != "" { +- t.Errorf("Completion(...) returned mismatching fields (-want +got):\n%s", diff) +- } +- }) +-} +- +-func TestDefinition(t *testing.T) { +- files := ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- a_test.go -- +-package foo +-` +- tests := []struct { +- line string // the sole line in the buffer after the package statement +- pat string // the pattern to search for +- want []string // expected completions +- }{ +- {"func T", "T", []string{"TestXxx(t *testing.T)", "TestMain(m *testing.M)"}}, +- {"func T()", "T", []string{"TestMain", "Test"}}, +- {"func TestM", "TestM", []string{"TestMain(m *testing.M)", "TestM(t *testing.T)"}}, +- {"func TestM()", "TestM", []string{"TestMain"}}, +- {"func TestMi", "TestMi", []string{"TestMi(t *testing.T)"}}, +- {"func TestMi()", "TestMi", nil}, +- {"func TestG", "TestG", []string{"TestG(t *testing.T)"}}, +- {"func TestG(", "TestG", nil}, +- {"func Ben", "B", []string{"BenchmarkXxx(b *testing.B)"}}, +- {"func Ben(", "Ben", []string{"Benchmark"}}, +- {"func BenchmarkFoo", "BenchmarkFoo", []string{"BenchmarkFoo(b *testing.B)"}}, +- {"func BenchmarkFoo(", "BenchmarkFoo", nil}, +- {"func Fuz", "F", []string{"FuzzXxx(f *testing.F)"}}, +- {"func Fuz(", "Fuz", []string{"Fuzz"}}, +- {"func Testx", "Testx", nil}, +- {"func TestMe(t *testing.T)", "TestMe", nil}, +- {"func Te(t *testing.T)", "Te", []string{"TestMain", "Test"}}, +- } +- fname := "a_test.go" +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile(fname) +- env.Await(env.DoneWithOpen()) +- for _, test := range tests { +- env.SetBufferContent(fname, "package foo\n"+test.line) +- loc := env.RegexpSearch(fname, test.pat) +- loc.Range.Start.Character += uint32(protocol.UTF16Len([]byte(test.pat))) +- completions := env.Completion(loc) +- if diff := compareCompletionLabels(test.want, completions.Items); diff != "" { +- t.Error(diff) +- } +- } +- }) +-} +- +-// Test that completing a definition replaces source text when applied, golang/go#56852. +-func TestDefinitionReplaceRange(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.17 +-` +- +- tests := []struct { +- name string +- before, after string +- }{ +- { +- name: "func TestMa", +- before: ` +-package foo_test +- +-func TestMa +-`, +- after: ` +-package foo_test +- +-func TestMain(m *testing.M) +-`, +- }, +- { +- name: "func TestSome", +- before: ` +-package foo_test +- +-func TestSome +-`, +- after: ` +-package foo_test +- +-func TestSome(t *testing.T) +-`, +- }, +- { +- name: "func Bench", +- before: ` +-package foo_test +- +-func Bench +-`, +- // Note: Snippet with escaped }. +- after: ` +-package foo_test +- +-func Benchmark${1:Xxx}(b *testing.B) { +- $0 +-\} +-`, +- }, +- } +- +- Run(t, mod, func(t *testing.T, env *Env) { +- env.CreateBuffer("foo_test.go", "") +- +- for _, tst := range tests { +- tst.before = strings.Trim(tst.before, "\n") +- tst.after = strings.Trim(tst.after, "\n") +- env.SetBufferContent("foo_test.go", tst.before) +- +- loc := env.RegexpSearch("foo_test.go", tst.name) +- loc.Range.Start.Character = uint32(protocol.UTF16Len([]byte(tst.name))) +- completions := env.Completion(loc) +- if len(completions.Items) == 0 { +- t.Fatalf("no completion items") +- } +- +- env.AcceptCompletion(loc, completions.Items[0]) +- env.Await(env.DoneWithChange()) +- if buf := env.BufferText("foo_test.go"); buf != tst.after { +- t.Errorf("%s:incorrect completion: got %q, want %q", tst.name, buf, tst.after) +- } +- } +- }) +-} +- +-func TestGoWorkCompletion(t *testing.T) { +- const files = ` +--- go.work -- +-go 1.18 +- +-use ./a +-use ./a/ba +-use ./a/b/ +-use ./dir/foo +-use ./dir/foobar/ +-use ./missing/ +--- a/go.mod -- +--- go.mod -- +--- a/bar/go.mod -- +--- a/b/c/d/e/f/go.mod -- +--- dir/bar -- +--- dir/foobar/go.mod -- +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.work") +- +- tests := []struct { +- re string +- want []string +- }{ +- {`use ()\.`, []string{".", "./a", "./a/bar", "./dir/foobar"}}, +- {`use \.()`, []string{"", "/a", "/a/bar", "/dir/foobar"}}, +- {`use \./()`, []string{"a", "a/bar", "dir/foobar"}}, +- {`use ./a()`, []string{"", "/b/c/d/e/f", "/bar"}}, +- {`use ./a/b()`, []string{"/c/d/e/f", "ar"}}, +- {`use ./a/b/()`, []string{`c/d/e/f`}}, +- {`use ./a/ba()`, []string{"r"}}, +- {`use ./dir/foo()`, []string{"bar"}}, +- {`use ./dir/foobar/()`, []string{}}, +- {`use ./missing/()`, []string{}}, +- } +- for _, tt := range tests { +- completions := env.Completion(env.RegexpSearch("go.work", tt.re)) +- diff := compareCompletionLabels(tt.want, completions.Items) +- if diff != "" { +- t.Errorf("%s: %s", tt.re, diff) +- } +- } +- }) +-} +- +-const reverseInferenceSrcPrelude = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- a.go -- +-package a +- +-type InterfaceA interface { +- implA() +-} +- +-type InterfaceB interface { +- implB() +-} +- +- +-type TypeA struct{} +- +-func (TypeA) implA() {} +- +-type TypeX string +- +-func (TypeX) implB() {} +- +-type TypeB struct{} +- +-func (TypeB) implB() {} +- +-type TypeC struct{} // should have no impact +- +-type Wrap[T any] struct { +- inner *T +-} +- +-func NewWrap[T any](x T) Wrap[T] { +- return Wrap[T]{inner: &x} +-} +- +-func DoubleWrap[T any, U any](t T, u U) (Wrap[T], Wrap[U]) { +- return Wrap[T]{inner: &t}, Wrap[U]{inner: &u} +-} +- +-func IntWrap[T int32 | int64](x T) Wrap[T] { +- return Wrap[T]{inner: &x} +-} +- +-var ia InterfaceA +-var ib InterfaceB +- +-var avar TypeA +-var bvar TypeB +- +-var i int +-var i32 int32 +-var i64 int64 +-` +- +-func TestReverseInferCompletion(t *testing.T) { +- src := reverseInferenceSrcPrelude + ` +- func main() { +- var _ Wrap[int64] = IntWrap() +- } +- ` +- Run(t, src, func(t *testing.T, env *Env) { +- compl := env.RegexpSearch("a.go", `IntWrap\(()\)`) +- +- env.OpenFile("a.go") +- result := env.Completion(compl) +- +- wantLabel := []string{"i64", "i", "i32", "int64()"} +- +- // only check the prefix due to formatting differences with escaped characters +- wantText := []string{"i64", "int64(i", "int64(i32", "int64("} +- +- for i, item := range result.Items[:len(wantLabel)] { +- if diff := cmp.Diff(wantLabel[i], item.Label); diff != "" { +- t.Errorf("Completion: unexpected label mismatch (-want +got):\n%s", diff) +- } +- +- if insertText, ok := item.TextEdit.Value.(protocol.InsertReplaceEdit); ok { +- if diff := cmp.Diff(wantText[i], insertText.NewText[:len(wantText[i])]); diff != "" { +- t.Errorf("Completion: unexpected insertText mismatch (checks prefix only) (-want +got):\n%s", diff) +- } +- } +- } +- }) +-} +- +-func TestInterfaceReverseInferCompletion(t *testing.T) { +- src := reverseInferenceSrcPrelude + ` +- func main() { +- var wa Wrap[InterfaceA] +- var wb Wrap[InterfaceB] +- wb = NewWrap() // wb is of type Wrap[InterfaceB] +- } +- ` +- +- Run(t, src, func(t *testing.T, env *Env) { +- compl := env.RegexpSearch("a.go", `NewWrap\(()\)`) +- +- env.OpenFile("a.go") +- result := env.Completion(compl) +- +- wantLabel := []string{"ib", "bvar", "wb.inner", "TypeB{}", "TypeX()", "nil"} +- +- // only check the prefix due to formatting differences with escaped characters +- wantText := []string{"ib", "InterfaceB(", "*wb.inner", "InterfaceB(", "InterfaceB(", "nil"} +- +- for i, item := range result.Items[:len(wantLabel)] { +- if diff := cmp.Diff(wantLabel[i], item.Label); diff != "" { +- t.Errorf("Completion: unexpected label mismatch (-want +got):\n%s", diff) +- } +- +- if insertText, ok := item.TextEdit.Value.(protocol.InsertReplaceEdit); ok { +- if diff := cmp.Diff(wantText[i], insertText.NewText[:len(wantText[i])]); diff != "" { +- t.Errorf("Completion: unexpected insertText mismatch (checks prefix only) (-want +got):\n%s", diff) +- } +- } +- } +- }) +-} +- +-func TestInvalidReverseInferenceDefaultsToConstraintCompletion(t *testing.T) { +- src := reverseInferenceSrcPrelude + ` +- func main() { +- var wa Wrap[InterfaceA] +- // This is ambiguous, so default to the constraint rather the inference. +- wa = IntWrap() +- } +- ` +- Run(t, src, func(t *testing.T, env *Env) { +- compl := env.RegexpSearch("a.go", `IntWrap\(()\)`) +- +- env.OpenFile("a.go") +- result := env.Completion(compl) +- +- wantLabel := []string{"i32", "i64", "nil"} +- +- for i, item := range result.Items[:len(wantLabel)] { +- if diff := cmp.Diff(wantLabel[i], item.Label); diff != "" { +- t.Errorf("Completion: unexpected label mismatch (-want +got):\n%s", diff) +- } +- } +- }) +-} +- +-func TestInterfaceReverseInferTypeParamCompletion(t *testing.T) { +- src := reverseInferenceSrcPrelude + ` +- func main() { +- var wa Wrap[InterfaceA] +- var wb Wrap[InterfaceB] +- wb = NewWrap[]() +- } +- ` +- +- Run(t, src, func(t *testing.T, env *Env) { +- compl := env.RegexpSearch("a.go", `NewWrap\[()\]\(\)`) +- +- env.OpenFile("a.go") +- result := env.Completion(compl) +- want := []string{"InterfaceB", "TypeB", "TypeX", "InterfaceA", "TypeA"} +- for i, item := range result.Items[:len(want)] { +- if diff := cmp.Diff(want[i], item.Label); diff != "" { +- t.Errorf("Completion: unexpected mismatch (-want +got):\n%s", diff) +- } +- } +- }) +-} +- +-func TestInvalidReverseInferenceTypeParamDefaultsToConstraintCompletion(t *testing.T) { +- src := reverseInferenceSrcPrelude + ` +- func main() { +- var wa Wrap[InterfaceA] +- // This is ambiguous, so default to the constraint rather the inference. +- wb = IntWrap[]() +- } +- ` +- +- Run(t, src, func(t *testing.T, env *Env) { +- compl := env.RegexpSearch("a.go", `IntWrap\[()\]\(\)`) +- +- env.OpenFile("a.go") +- result := env.Completion(compl) +- want := []string{"int32", "int64"} +- for i, item := range result.Items[:len(want)] { +- if diff := cmp.Diff(want[i], item.Label); diff != "" { +- t.Errorf("Completion: unexpected mismatch (-want +got):\n%s", diff) +- } +- } +- }) +-} +- +-func TestReverseInferDoubleTypeParamCompletion(t *testing.T) { +- src := reverseInferenceSrcPrelude + ` +- func main() { +- var wa Wrap[InterfaceA] +- var wb Wrap[InterfaceB] +- +- wa, wb = DoubleWrap[]() +- // _ is necessary to trick the parser into an index list expression +- wa, wb = DoubleWrap[InterfaceA, _]() +- } +- ` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- +- compl := env.RegexpSearch("a.go", `DoubleWrap\[()\]\(\)`) +- result := env.Completion(compl) +- +- wantLabel := []string{"InterfaceA", "TypeA", "InterfaceB", "TypeB", "TypeC"} +- +- for i, item := range result.Items[:len(wantLabel)] { +- if diff := cmp.Diff(wantLabel[i], item.Label); diff != "" { +- t.Errorf("Completion: unexpected label mismatch (-want +got):\n%s", diff) +- } +- } +- +- compl = env.RegexpSearch("a.go", `DoubleWrap\[InterfaceA, (_)\]\(\)`) +- result = env.Completion(compl) +- +- wantLabel = []string{"InterfaceB", "TypeB", "TypeX", "InterfaceA", "TypeA"} +- +- for i, item := range result.Items[:len(wantLabel)] { +- if diff := cmp.Diff(wantLabel[i], item.Label); diff != "" { +- t.Errorf("Completion: unexpected label mismatch (-want +got):\n%s", diff) +- } +- } +- }) +-} +- +-func TestDoubleParamReturnCompletion(t *testing.T) { +- src := reverseInferenceSrcPrelude + ` +- func concrete() (Wrap[InterfaceA], Wrap[InterfaceB]) { +- return DoubleWrap[]() +- } +- +- func concrete2() (Wrap[InterfaceA], Wrap[InterfaceB]) { +- return DoubleWrap[InterfaceA, _]() +- } +- ` +- +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- +- tests := map[string][]string{ +- `DoubleWrap\[()\]\(\)`: {"InterfaceA", "TypeA", "InterfaceB", "TypeB", "TypeC"}, +- `DoubleWrap\[InterfaceA, (_)\]\(\)`: {"InterfaceB", "TypeB", "TypeX", "InterfaceA", "TypeA"}, +- } +- +- for re, wantLabels := range tests { +- compl := env.RegexpSearch("a.go", re) +- result := env.Completion(compl) +- if len(result.Items) < len(wantLabels) { +- t.Fatalf("Completion(%q) returned mismatching labels: got %v, want at least labels %v", re, result.Items, wantLabels) +- } +- for i, item := range result.Items[:len(wantLabels)] { +- if diff := cmp.Diff(wantLabels[i], item.Label); diff != "" { +- t.Errorf("Completion(%q): unexpected label mismatch (-want +got):\n%s", re, diff) +- } +- } +- } +- }) +-} +- +-func TestBuiltinCompletion(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- a.go -- +-package a +- +-func _() { +- // here +-} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- result := env.Completion(env.RegexpSearch("a.go", `// here`)) +- builtins := []string{ +- "any", "append", "bool", "byte", "cap", "close", +- "comparable", "complex", "complex128", "complex64", "copy", "delete", +- "error", "false", "float32", "float64", "imag", "int", "int16", "int32", +- "int64", "int8", "len", "make", "new", "panic", "print", "println", "real", +- "recover", "rune", "string", "true", "uint", "uint16", "uint32", "uint64", +- "uint8", "uintptr", "nil", +- } +- if testenv.Go1Point() >= 21 { +- builtins = append(builtins, "clear", "max", "min") +- } +- sort.Strings(builtins) +- var got []string +- +- for _, item := range result.Items { +- // TODO(rfindley): for flexibility, ignore zero while it is being +- // implemented. Remove this if/when zero lands. +- if item.Label != "zero" { +- got = append(got, item.Label) +- } +- } +- sort.Strings(got) +- +- if diff := cmp.Diff(builtins, got); diff != "" { +- t.Errorf("Completion: unexpected mismatch (-want +got):\n%s", diff) +- } +- }) +-} +- +-func TestOverlayCompletion(t *testing.T) { +- const files = ` +--- go.mod -- +-module foo.test +- +-go 1.18 +- +--- foo/foo.go -- +-package foo +- +-type Foo struct{} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.CreateBuffer("nodisk/nodisk.go", ` +-package nodisk +- +-import ( +- "foo.test/foo" +-) +- +-func _() { +- foo.Foo() +-} +-`) +- list := env.Completion(env.RegexpSearch("nodisk/nodisk.go", "foo.(Foo)")) +- want := []string{"Foo"} +- var got []string +- for _, item := range list.Items { +- got = append(got, item.Label) +- } +- if diff := cmp.Diff(want, got); diff != "" { +- t.Errorf("Completion: unexpected mismatch (-want +got):\n%s", diff) +- } +- }) +-} +- +-// Fix for golang/go#60062: unimported completion included "golang.org/toolchain" results. +-// and check that functions (from the standard library) have snippets +-func TestToolchainCompletions(t *testing.T) { +- const files = ` +--- go.mod -- +-module foo.test/foo +- +-go 1.21 +- +--- foo.go -- +-package foo +- +-func _() { +- os.Open +-} +- +-func _() { +- strings +-} +-` +- +- const proxy = ` +--- golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64/go.mod -- +-module golang.org/toolchain +--- golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64/src/os/os.go -- +-package os +- +-func Open() {} +--- golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64/src/strings/strings.go -- +-package strings +- +-func Join() {} +-` +- +- WithOptions( +- ProxyFiles(proxy), +- Settings{"importsSource": settings.ImportsSourceGopls}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.RunGoCommand("mod", "download", "golang.org/toolchain@v0.0.1-go1.21.1.linux-amd64") +- env.OpenFile("foo.go") +- +- for _, pattern := range []string{"os.Open()", "string()"} { +- loc := env.RegexpSearch("foo.go", pattern) +- res := env.Completion(loc) +- for _, item := range res.Items { +- if strings.Contains(item.Detail, "golang.org/toolchain") { +- t.Errorf("Completion(...) returned toolchain item %#v", item) +- } +- if strings.HasPrefix(item.Detail, "func") { +- // check that there are snippets +- x, ok := item.TextEdit.Value.(protocol.InsertReplaceEdit) +- if !ok { +- t.Errorf("item.TextEdit.Value unexpected type %T", item.TextEdit.Value) +- } +- if !strings.Contains(x.NewText, "${1") { +- t.Errorf("expected snippet in %q", x.NewText) +- } +- } +- } +- } +- }) +-} +- +-// when completing using the module cache, prefer things mentioned +-// in the go.mod file. +-func TestIssue61208(t *testing.T) { +- +- const cache = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.22 +--- example.com@v1.2.3/blah/blah.go -- +-package blah +- +-const Name = "Blah" +--- random.org@v1.2.3/go.mod -- +-module random.org +- +-go 1.22 +--- random.org@v1.2.3/blah/blah.go -- +-package blah +- +-const Name = "Hello" +-` +- const files = ` +--- go.mod -- +-module mod.com +-go 1.22 +-require random.org v1.2.3 +--- main.go -- +-package main +-var _ = blah. +-` +- modcache := t.TempDir() +- defer CleanModCache(t, modcache) +- mx := fake.UnpackTxt(cache) +- for k, v := range mx { +- fname := filepath.Join(modcache, k) +- dir := filepath.Dir(fname) +- os.MkdirAll(dir, 0777) // ignore error +- if err := os.WriteFile(fname, v, 0644); err != nil { +- t.Fatal(err) +- } +- } +- +- WithOptions( +- EnvVars{"GOMODCACHE": modcache}, +- WriteGoSum("."), +- Settings{"importsSource": settings.ImportsSourceGopls}, +- NoLogsOnError(), +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.Await(env.DoneWithOpen()) +- loc := env.RegexpSearch("main.go", "blah.()") +- completions := env.Completion(loc) +- if len(completions.Items) != 1 { +- t.Errorf("got %d, expected 1", len(completions.Items)) +- for _, x := range completions.Items { +- t.Logf("%#v", x.AdditionalTextEdits[0].NewText) +- } +- } +- if got := completions.Items[0].AdditionalTextEdits[0].NewText; !strings.Contains(got, `"random.org`) { +- t.Errorf("got %q, expected a `random.org`", got) +- } +- }) +-} +- +-// show that the efficacy counters get exercised. Fortuntely a small program +-// exercises them all +-func TestCounters(t *testing.T) { +- const files = ` +--- go.mod -- +-module foo +-go 1.21 +--- x.go -- +-package foo +- +-func main() { +-} +- +-` +- WithOptions( +- Modes(Default), +- ).Run(t, files, func(t *testing.T, env *Env) { +- cts := func() map[*counter.Counter]uint64 { +- ans := make(map[*counter.Counter]uint64) +- for _, c := range server.CompletionCounters { +- ans[c], _ = countertest.ReadCounter(c) +- } +- return ans +- } +- before := cts() +- env.OpenFile("x.go") +- env.Await(env.DoneWithOpen()) +- saved := env.BufferText("x.go") +- lines := strings.Split(saved, "\n") +- // make sure the unused counter is exercised +- loc := env.RegexpSearch("x.go", "main") +- loc.Range.End = loc.Range.Start +- env.Completion(loc) // ignore the proposed completions +- env.RegexpReplace("x.go", "main", "Main") // completions are unused +- env.SetBufferContent("x.go", saved) // restore x.go +- // used:no +- +- // all the action is after 4 characters on line 2 (counting from 0) +- for i := 2; i < len(lines); i++ { +- l := lines[i] +- loc.Range.Start.Line = uint32(i) +- for j := 4; j < len(l); j++ { +- loc.Range.Start.Character = uint32(j) +- loc.Range.End = loc.Range.Start +- res := env.Completion(loc) +- if len(res.Items) > 0 { +- r := res.Items[0] +- env.AcceptCompletion(loc, r) +- env.SetBufferContent("x.go", saved) +- } +- } +- } +- after := cts() +- for c := range after { +- if after[c] <= before[c] { +- t.Errorf("%s did not increase", c.Name()) +- } +- } +- }) +-} +- +-// find import foo "bar" for foo.xxx +-func TestImportAlias(t *testing.T) { +- testenv.NeedsGoCommand1Point(t, 24) // we will find math/rand/v2 +- const files = ` +--- a.go -- +-package x +-var _ = xrand. +--- b.go -- +-package x +- +-import xrand "math/rand" +- +-var _ = xrand.Int() +- +--- go.mod -- +-module foo.com +- +-go 1.24.2 +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- env.Await(env.DoneWithOpen()) +- loc := env.RegexpSearch("a.go", "xrand.()") +- compls := env.Completion(loc) +- if len(compls.Items) == 0 { +- t.Fatal("no completions") +- } +- one := compls.Items[0].AdditionalTextEdits[0].NewText +- if one != "\nimport xrand \"math/rand\"\n" { +- t.Errorf("wrong import %q", one) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/completion/fixedbugs_test.go b/gopls/internal/test/integration/completion/fixedbugs_test.go +--- a/gopls/internal/test/integration/completion/fixedbugs_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/completion/fixedbugs_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,57 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestPackageCompletionCrash_Issue68169(t *testing.T) { +- // This test reproduces the scenario of golang/go#68169, a crash in +- // completion.Selection.Suffix. +- // +- // The file content here is extracted from the issue. +- const files = ` +--- go.mod -- +-module example.com +- +-go 1.18 +--- playdos/play.go -- +-package // comment (to preserve spaces) +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("playdos/play.go") +- // Previously, this call would crash gopls as it was incorrectly computing +- // the surrounding completion suffix. +- completions := env.Completion(env.RegexpSearch("playdos/play.go", "package ()")) +- if len(completions.Items) == 0 { +- t.Fatal("Completion() returned empty results") +- } +- // Sanity check: we should get package clause completion. +- if got, want := completions.Items[0].Label, "package playdos"; got != want { +- t.Errorf("Completion()[0].Label == %s, want %s", got, want) +- } +- }) +-} +- +-func TestFixInitStatementCrash_Issue72026(t *testing.T) { +- // This test checks that we don't crash when the if condition overflows the +- // file (as is possible with a malformed struct type). +- +- const files = ` +--- go.mod -- +-module example.com +- +-go 1.18 +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.CreateBuffer("p.go", "package p\nfunc _() {\n\tfor i := struct") +- env.AfterChange() +- }) +-} +diff -urN a/gopls/internal/test/integration/completion/postfix_snippet_test.go b/gopls/internal/test/integration/completion/postfix_snippet_test.go +--- a/gopls/internal/test/integration/completion/postfix_snippet_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/completion/postfix_snippet_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,762 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package completion +- +-import ( +- "strings" +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestPostfixSnippetCompletion(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +-` +- +- cases := []struct { +- name string +- before, after string +- allowMultipleItem bool +- }{ +- { +- name: "sort", +- before: ` +-package foo +- +-func _() { +- var foo []int +- foo.sort +-} +-`, +- after: ` +-package foo +- +-import "sort" +- +-func _() { +- var foo []int +- sort.Slice(foo, func(i, j int) bool { +- $0 +-}) +-} +-`, +- }, +- { +- name: "sort_renamed_sort_package", +- before: ` +-package foo +- +-import blahsort "sort" +- +-var j int +- +-func _() { +- var foo []int +- foo.sort +-} +-`, +- after: ` +-package foo +- +-import blahsort "sort" +- +-var j int +- +-func _() { +- var foo []int +- blahsort.Slice(foo, func(i, j2 int) bool { +- $0 +-}) +-} +-`, +- }, +- { +- name: "last", +- before: ` +-package foo +- +-func _() { +- var s struct { i []int } +- s.i.last +-} +-`, +- after: ` +-package foo +- +-func _() { +- var s struct { i []int } +- s.i[len(s.i)-1] +-} +-`, +- }, +- { +- name: "reverse", +- before: ` +-package foo +- +-func _() { +- var foo []int +- foo.reverse +-} +-`, +- after: ` +-package foo +- +-import "slices" +- +-func _() { +- var foo []int +- slices.Reverse(foo) +-} +-`, +- }, +- { +- name: "slice_range", +- before: ` +-package foo +- +-func _() { +- type myThing struct{} +- var foo []myThing +- foo.range +-} +-`, +- after: ` +-package foo +- +-func _() { +- type myThing struct{} +- var foo []myThing +- for ${1:}, ${2:} := range foo { +- $0 +-} +-} +-`, +- }, +- { +- name: "append_stmt", +- before: ` +-package foo +- +-func _() { +- var foo []int +- foo.append +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo []int +- foo = append(foo, $0) +-} +-`, +- }, +- { +- name: "append_expr", +- before: ` +-package foo +- +-func _() { +- var foo []int +- var _ []int = foo.append +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo []int +- var _ []int = append(foo, $0) +-} +-`, +- }, +- { +- name: "slice_copy", +- before: ` +-package foo +- +-func _() { +- var foo []int +- foo.copy +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo []int +- fooCopy := make([]int, len(foo)) +-copy(fooCopy, foo) +- +-} +-`, +- }, +- { +- name: "map_range", +- before: ` +-package foo +- +-func _() { +- var foo map[string]int +- foo.range +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo map[string]int +- for ${1:}, ${2:} := range foo { +- $0 +-} +-} +-`, +- }, +- { +- name: "map_clear", +- before: ` +-package foo +- +-func _() { +- var foo map[string]int +- foo.clear +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo map[string]int +- for k := range foo { +- delete(foo, k) +-} +- +-} +-`, +- }, +- { +- name: "map_keys", +- before: ` +-package foo +- +-func _() { +- var foo map[string]int +- foo.keys +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo map[string]int +- keys := make([]string, 0, len(foo)) +-for k := range foo { +- keys = append(keys, k) +-} +- +-} +-`, +- }, +- { +- name: "channel_range", +- before: ` +-package foo +- +-func _() { +- foo := make(chan int) +- foo.range +-} +-`, +- after: ` +-package foo +- +-func _() { +- foo := make(chan int) +- for ${1:} := range foo { +- $0 +-} +-} +-`, +- }, +- { +- name: "var", +- before: ` +-package foo +- +-func foo() (int, error) { return 0, nil } +- +-func _() { +- foo().var +-} +-`, +- after: ` +-package foo +- +-func foo() (int, error) { return 0, nil } +- +-func _() { +- ${1:}, ${2:} := foo() +-} +-`, +- allowMultipleItem: true, +- }, +- { +- name: "var_single_value", +- before: ` +-package foo +- +-func foo() error { return nil } +- +-func _() { +- foo().var +-} +-`, +- allowMultipleItem: true, +- after: ` +-package foo +- +-func foo() error { return nil } +- +-func _() { +- ${1:} := foo() +-} +-`, +- }, +- { +- name: "var_same_type", +- before: ` +-package foo +- +-func foo() (int, int) { return 0, 0 } +- +-func _() { +- foo().var +-} +-`, +- after: ` +-package foo +- +-func foo() (int, int) { return 0, 0 } +- +-func _() { +- ${1:}, ${2:} := foo() +-} +-`, +- }, +- { +- name: "print_scalar", +- before: ` +-package foo +- +-func _() { +- var foo int +- foo.print +-} +-`, +- after: ` +-package foo +- +-import "fmt" +- +-func _() { +- var foo int +- fmt.Printf("foo: %v\n", foo) +-} +-`, +- }, +- { +- name: "print_multi", +- before: ` +-package foo +- +-func foo() (int, error) { return 0, nil } +- +-func _() { +- foo().print +-} +-`, +- after: ` +-package foo +- +-import "fmt" +- +-func foo() (int, error) { return 0, nil } +- +-func _() { +- fmt.Println(foo()) +-} +-`, +- }, +- { +- name: "string split", +- before: ` +-package foo +- +-func foo() []string { +- x := "test" +- return x.split +-}`, +- after: ` +-package foo +- +-import "strings" +- +-func foo() []string { +- x := "test" +- return strings.Split(x, "$0") +-}`, +- }, +- { +- name: "string slice join", +- before: ` +-package foo +- +-func foo() string { +- x := []string{"a", "test"} +- return x.join +-}`, +- after: ` +-package foo +- +-import "strings" +- +-func foo() string { +- x := []string{"a", "test"} +- return strings.Join(x, "$0") +-}`, +- }, +- { +- name: "if not nil interface", +- before: ` +-package foo +- +-func _() { +- var foo error +- foo.ifnotnil +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo error +- if foo != nil { +- $0 +-} +-} +-`, +- }, +- { +- name: "if not nil pointer", +- before: ` +-package foo +- +-func _() { +- var foo *int +- foo.ifnotnil +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo *int +- if foo != nil { +- $0 +-} +-} +-`, +- }, +- { +- name: "if not nil slice", +- before: ` +-package foo +- +-func _() { +- var foo []int +- foo.ifnotnil +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo []int +- if foo != nil { +- $0 +-} +-} +-`, +- }, +- { +- name: "if not nil map", +- before: ` +-package foo +- +-func _() { +- var foo map[string]any +- foo.ifnotnil +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo map[string]any +- if foo != nil { +- $0 +-} +-} +-`, +- }, +- { +- name: "if not nil channel", +- before: ` +-package foo +- +-func _() { +- var foo chan int +- foo.ifnotnil +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo chan int +- if foo != nil { +- $0 +-} +-} +-`, +- }, +- { +- name: "if not nil function", +- before: ` +-package foo +- +-func _() { +- var foo func() +- foo.ifnotnil +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo func() +- if foo != nil { +- $0 +-} +-} +-`, +- }, +- { +- name: "slice_len", +- before: ` +-package foo +- +-func _() { +- var foo []int +- foo.len +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo []int +- len(foo) +-} +-`, +- }, +- { +- name: "map_len", +- before: ` +-package foo +- +-func _() { +- var foo map[string]int +- foo.len +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo map[string]int +- len(foo) +-} +-`, +- }, +- { +- name: "slice_for", +- allowMultipleItem: true, +- before: ` +-package foo +- +-func _() { +- var foo []int +- foo.for +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo []int +- for ${1:} := range foo { +- $0 +-} +-} +-`, +- }, +- { +- name: "map_for", +- allowMultipleItem: true, +- before: ` +-package foo +- +-func _() { +- var foo map[string]int +- foo.for +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo map[string]int +- for ${1:} := range foo { +- $0 +-} +-} +-`, +- }, +- { +- name: "chan_for", +- allowMultipleItem: true, +- before: ` +-package foo +- +-func _() { +- var foo chan int +- foo.for +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo chan int +- for ${1:} := range foo { +- $0 +-} +-} +-`, +- }, +- { +- name: "slice_forr", +- before: ` +-package foo +- +-func _() { +- var foo []int +- foo.forr +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo []int +- for ${1:}, ${2:} := range foo { +- $0 +-} +-} +-`, +- }, +- { +- name: "slice_forr", +- before: ` +-package foo +- +-func _() { +- var foo []int +- foo.forr +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo []int +- for ${1:}, ${2:} := range foo { +- $0 +-} +-} +-`, +- }, +- { +- name: "map_forr", +- before: ` +-package foo +- +-func _() { +- var foo map[string]int +- foo.forr +-} +-`, +- after: ` +-package foo +- +-func _() { +- var foo map[string]int +- for ${1:}, ${2:} := range foo { +- $0 +-} +-} +-`, +- }, +- } +- +- r := WithOptions( +- Settings{ +- "experimentalPostfixCompletions": true, +- }, +- ) +- r.Run(t, mod, func(t *testing.T, env *Env) { +- env.CreateBuffer("foo.go", "") +- +- for _, c := range cases { +- t.Run(c.name, func(t *testing.T) { +- c.before = strings.Trim(c.before, "\n") +- c.after = strings.Trim(c.after, "\n") +- +- env.SetBufferContent("foo.go", c.before) +- +- loc := env.RegexpSearch("foo.go", "\n}") +- completions := env.Completion(loc) +- if len(completions.Items) < 1 { +- t.Fatalf("expected at least one completion, got %v", completions.Items) +- } +- if !c.allowMultipleItem && len(completions.Items) > 1 { +- t.Fatalf("expected one completion, got %v", completions.Items) +- } +- +- env.AcceptCompletion(loc, completions.Items[0]) +- +- if buf := env.BufferText("foo.go"); buf != c.after { +- t.Errorf("\nGOT:\n%s\nEXPECTED:\n%s", buf, c.after) +- } +- }) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/debug/debug_test.go b/gopls/internal/test/integration/debug/debug_test.go +--- a/gopls/internal/test/integration/debug/debug_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/debug/debug_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,101 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package debug +- +-import ( +- "context" +- "encoding/json" +- "io" +- "net/http" +- "os" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/util/bug" +-) +- +-func TestMain(m *testing.M) { +- os.Exit(Main(m)) +-} +- +-func TestBugNotification(t *testing.T) { +- // Verify that a properly configured session gets notified of a bug on the +- // server. +- WithOptions( +- Modes(Default), // must be in-process to receive the bug report below +- Settings{"showBugReports": true}, +- ).Run(t, "", func(t *testing.T, env *Env) { +- const desc = "got a bug" +- bug.Report(desc) +- env.Await(ShownMessage(desc)) +- }) +-} +- +-// TestStartDebugging executes a gopls.start_debugging command to +-// start the internal web server. +-func TestStartDebugging(t *testing.T) { +- WithOptions( +- Modes(Default), // doesn't work in Forwarded mode +- ).Run(t, "", func(t *testing.T, env *Env) { +- // Start a debugging server. +- res, err := startDebugging(env.Ctx, env.Editor.Server, &command.DebuggingArgs{ +- Addr: "", // any free port +- }) +- if err != nil { +- t.Fatalf("startDebugging: %v", err) +- } +- +- // Assert that the server requested that the +- // client show the debug page in a browser. +- debugURL := res.URLs[0] +- env.Await(ShownDocument(debugURL)) +- +- // Send a request to the debug server and ensure it responds. +- resp, err := http.Get(debugURL) +- if err != nil { +- t.Fatal(err) +- } +- defer resp.Body.Close() +- data, err := io.ReadAll(resp.Body) +- if err != nil { +- t.Fatalf("reading HTTP response body: %v", err) +- } +- const want = "Gopls" +- if !strings.Contains(string(data), want) { +- t.Errorf("GET %s response does not contain %q: <<%s>>", debugURL, want, data) +- } +- }) +-} +- +-// startDebugging starts a debugging server. +-// TODO(adonovan): move into command package? +-func startDebugging(ctx context.Context, server protocol.Server, args *command.DebuggingArgs) (*command.DebuggingResult, error) { +- rawArgs, err := command.MarshalArgs(args) +- if err != nil { +- return nil, err +- } +- res0, err := server.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ +- Command: command.StartDebugging.String(), +- Arguments: rawArgs, +- }) +- if err != nil { +- return nil, err +- } +- // res0 is the result of a schemaless (map[string]any) JSON decoding. +- // Re-encode and decode into the correct Go struct type. +- // TODO(adonovan): fix (*serverDispatcher).ExecuteCommand. +- data, err := json.Marshal(res0) +- if err != nil { +- return nil, err +- } +- var res *command.DebuggingResult +- if err := json.Unmarshal(data, &res); err != nil { +- return nil, err +- } +- return res, nil +-} +diff -urN a/gopls/internal/test/integration/diagnostics/analysis_test.go b/gopls/internal/test/integration/diagnostics/analysis_test.go +--- a/gopls/internal/test/integration/diagnostics/analysis_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/diagnostics/analysis_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,199 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package diagnostics +- +-import ( +- "fmt" +- "testing" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/internal/testenv" +-) +- +-// Test for the timeformat analyzer, following golang/vscode-go#2406. +-// +-// This test checks that applying the suggested fix from the analyzer resolves +-// the diagnostic warning. +-func TestTimeFormatAnalyzer(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- main.go -- +-package main +- +-import ( +- "fmt" +- "time" +-) +- +-func main() { +- now := time.Now() +- fmt.Println(now.Format("2006-02-01")) +-}` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", "2006-02-01")), +- ReadDiagnostics("main.go", &d), +- ) +- +- env.ApplyQuickFixes("main.go", d.Diagnostics) +- env.AfterChange(NoDiagnostics(ForFile("main.go"))) +- }) +-} +- +-func TestAnalysisProgressReporting(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +- +--- main.go -- +-package main +- +-func main() { +-}` +- +- tests := []struct { +- setting bool +- want Expectation +- }{ +- {true, CompletedWork(cache.AnalysisProgressTitle, 1, true)}, +- {false, Not(CompletedWork(cache.AnalysisProgressTitle, 1, true))}, +- } +- +- for _, test := range tests { +- t.Run(fmt.Sprint(test.setting), func(t *testing.T) { +- WithOptions( +- Settings{ +- "reportAnalysisProgressAfter": "0s", +- "analysisProgressReporting": test.setting, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.AfterChange(test.want) +- }) +- }) +- } +-} +- +-// Test the embed directive analyzer. +-// +-// There is a fix for missing imports, but it should not trigger for other +-// kinds of issues reported by the analayzer, here the variable +-// declaration following the embed directive is wrong. +-func TestNoSuggestedFixesForEmbedDirectiveDeclaration(t *testing.T) { +- const generated = ` +--- go.mod -- +-module mod.com +- +-go 1.20 +- +--- foo.txt -- +-FOO +- +--- main.go -- +-package main +- +-import _ "embed" +- +-//go:embed foo.txt +-var foo, bar string +- +-func main() { +- _ = foo +-} +-` +- Run(t, generated, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", "//go:embed")), +- ReadDiagnostics("main.go", &d), +- ) +- if fixes := env.GetQuickFixes("main.go", d.Diagnostics); len(fixes) != 0 { +- t.Errorf("got quick fixes %v, wanted none", fixes) +- } +- }) +-} +- +-func TestAnalysisFiltering(t *testing.T) { +- // This test checks that hint level diagnostics are only surfaced for open +- // files. +- +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.20 +- +--- a.go -- +-package p +- +-var X interface{} +- +--- b.go -- +-package p +- +-var Y interface{} +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- env.AfterChange( +- Diagnostics(ForFile("a.go"), WithMessage("replaced by any")), +- NoDiagnostics(ForFile("b.go")), +- ) +- }) +-} +- +-func TestModernizationConsistency_Issue75000(t *testing.T) { +- testenv.SkipAfterGoCommand1Point(t, 24) +- testenv.NeedsGoCommand1Point(t, 22) // uses range-over-int +- +- // This test checks that we don't offer modernization suggestions when the +- // ambient Go version is older than the modernized APIs. +- // +- // The code is from golang/go#75000, where gopls suggested to use +- // `waitgroup.Go` even though the user was on 1.24. +- +- const src = ` +--- main.go -- +-package main +- +-import ( +- "fmt" +- "sync" +-) +- +-func doit(i int) { +- fmt.Println("i = ", i) +-} +- +-func main() { +- var wg sync.WaitGroup +- for i := range 5 { +- wg.Add(1) +- go func() { +- defer wg.Done() +- doit(i) +- }() +- } +- +- wg.Wait() +-} +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.AfterChange(NoDiagnostics()) +- }) +-} +diff -urN a/gopls/internal/test/integration/diagnostics/builtin_test.go b/gopls/internal/test/integration/diagnostics/builtin_test.go +--- a/gopls/internal/test/integration/diagnostics/builtin_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/diagnostics/builtin_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,35 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package diagnostics +- +-import ( +- "strings" +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestIssue44866(t *testing.T) { +- src := ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a.go -- +-package a +- +-const ( +- c = iota +-) +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- loc := env.FirstDefinition(env.RegexpSearch("a.go", "iota")) +- if !strings.HasSuffix(string(loc.URI), "builtin.go") { +- t.Fatalf("jumped to %q, want builtin.go", loc.URI) +- } +- env.AfterChange(NoDiagnostics(ForFile("builtin.go"))) +- }) +-} +diff -urN a/gopls/internal/test/integration/diagnostics/diagnostics_test.go b/gopls/internal/test/integration/diagnostics/diagnostics_test.go +--- a/gopls/internal/test/integration/diagnostics/diagnostics_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/diagnostics/diagnostics_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,2277 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package diagnostics +- +-import ( +- "context" +- "fmt" +- "os" +- "os/exec" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/server" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/internal/testenv" +-) +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- os.Exit(Main(m)) +-} +- +-// Use mod.com for all go.mod files due to golang/go#35230. +-const exampleProgram = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Println("Hello World.") +-}` +- +-func TestDiagnosticErrorInEditedFile(t *testing.T) { +- // This test is very basic: start with a clean Go program, make an error, and +- // get a diagnostic for that error. However, it also demonstrates how to +- // combine Expectations to await more complex state in the editor. +- RunMultiple{ +- {"golist", WithOptions(Modes(Default))}, +- {"gopackages", WithOptions( +- Modes(Default), +- FakeGoPackagesDriver(t), +- )}, +- }.Run(t, exampleProgram, func(t *testing.T, env *Env) { +- // Deleting the 'n' at the end of Println should generate a single error +- // diagnostic. +- env.OpenFile("main.go") +- env.RegexpReplace("main.go", "Printl(n)", "") +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", "Printl")), +- // Assert that this test has sent no error logs to the client. This is not +- // strictly necessary for testing this regression, but is included here +- // as an example of using the NoErrorLogs() expectation. Feel free to +- // delete. +- NoErrorLogs(), +- ) +- }) +-} +- +-func TestMissingImportDiagsClearOnFirstFile(t *testing.T) { +- const onlyMod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +-` +- WithOptions( +- Settings{ +- "pullDiagnostics": true, +- }, +- ).Run(t, onlyMod, func(t *testing.T, env *Env) { +- env.CreateBuffer("main.go", `package main +- +-func _() { +- log.Println() +-} +-`) +- env.AfterChange(Diagnostics(env.AtRegexp("main.go", "log"))) +- env.SaveBuffer("main.go") +- if got := env.Diagnostics("main.go"); len(got) != 0 { +- t.Errorf("got %d diagnostics, want 0", len(got)) +- } +- env.AfterChange(NoDiagnostics(ForFile("main.go"))) +- }) +-} +- +-func TestDiagnosticErrorInNewFile(t *testing.T) { +- const brokenFile = `package main +- +-const Foo = "abc +-` +- RunMultiple{ +- {"golist", WithOptions(Modes(Default))}, +- // Since this test requires loading an overlay, +- // it verifies that the fake go/packages driver honors overlays. +- {"gopackages", WithOptions( +- Modes(Default), +- FakeGoPackagesDriver(t), +- )}, +- }.Run(t, brokenFile, func(t *testing.T, env *Env) { +- env.CreateBuffer("broken.go", brokenFile) +- env.AfterChange(Diagnostics(env.AtRegexp("broken.go", "\"abc"))) +- }) +-} +- +-// badPackage contains a duplicate definition of the 'A' const. +-const badPackage = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a.go -- +-package consts +- +-const A = 1 +--- b.go -- +-package consts +- +-const A = 2 +-` +- +-func TestDiagnosticClearingOnEdit(t *testing.T) { +- WithOptions( +- Settings{ +- "pullDiagnostics": true, +- }, +- ).Run(t, badPackage, func(t *testing.T, env *Env) { +- env.OpenFile("b.go") +- +- for _, f := range []string{"a.go", "b.go"} { +- if got := env.Diagnostics(f); len(got) != 1 { +- t.Errorf("textDocument/diagnostic(%s) returned %d diagnostics, want 1. Got %v", f, len(got), got) +- } +- } +- env.AfterChange( +- Diagnostics(env.AtRegexp("a.go", "A = 1")), +- Diagnostics(env.AtRegexp("b.go", "A = 2")), +- ) +- +- // Fix the error by editing the const name A in b.go to `B`. +- env.RegexpReplace("b.go", "(A) = 2", "B") +- for _, f := range []string{"a.go", "b.go"} { +- if got := env.Diagnostics(f); len(got) != 0 { +- t.Errorf("textDocument/diagnostic(%s) returned %d diagnostics, want 0. Got %v", f, len(got), got) +- } +- } +- env.AfterChange( +- NoDiagnostics(ForFile("a.go")), +- NoDiagnostics(ForFile("b.go")), +- ) +- }) +-} +- +-func TestDiagnosticClearingOnDelete_Issue37049(t *testing.T) { +- Run(t, badPackage, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a.go", "A = 1")), +- Diagnostics(env.AtRegexp("b.go", "A = 2")), +- ) +- env.RemoveWorkspaceFile("b.go") +- +- env.AfterChange( +- NoDiagnostics(ForFile("a.go")), +- NoDiagnostics(ForFile("b.go")), +- ) +- }) +-} +- +-func TestDiagnosticClearingOnClose(t *testing.T) { +- Run(t, badPackage, func(t *testing.T, env *Env) { +- env.CreateBuffer("c.go", `package consts +- +-const A = 3`) +- env.AfterChange( +- Diagnostics(env.AtRegexp("a.go", "A = 1")), +- Diagnostics(env.AtRegexp("b.go", "A = 2")), +- Diagnostics(env.AtRegexp("c.go", "A = 3")), +- ) +- env.CloseBuffer("c.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a.go", "A = 1")), +- Diagnostics(env.AtRegexp("b.go", "A = 2")), +- NoDiagnostics(ForFile("c.go")), +- ) +- }) +-} +- +-// Tests golang/go#37978. +-func TestIssue37978(t *testing.T) { +- Run(t, exampleProgram, func(t *testing.T, env *Env) { +- // Create a new workspace-level directory and empty file. +- env.CreateBuffer("c/c.go", "") +- +- // Write the file contents with a missing import. +- env.EditBuffer("c/c.go", protocol.TextEdit{ +- NewText: `package c +- +-const A = http.MethodGet +-`, +- }) +- env.AfterChange( +- Diagnostics(env.AtRegexp("c/c.go", "http.MethodGet")), +- ) +- // Save file, which will organize imports, adding the expected import. +- // Expect the diagnostics to clear. +- env.SaveBuffer("c/c.go") +- env.AfterChange( +- NoDiagnostics(ForFile("c/c.go")), +- ) +- }) +-} +- +-// Tests golang/go#38878: good a.go, bad a_test.go, remove a_test.go but its errors remain +-// If the file is open in the editor, this is working as intended +-// If the file is not open in the editor, the errors go away +-const test38878 = ` +--- go.mod -- +-module foo +- +-go 1.12 +--- a.go -- +-package x +- +-// import "fmt" +- +-func f() {} +- +--- a_test.go -- +-package x +- +-import "testing" +- +-func TestA(t *testing.T) { +- f(3) +-} +-` +- +-// Tests golang/go#38878: deleting a test file should clear its errors, and +-// not break the workspace. +-func TestDeleteTestVariant(t *testing.T) { +- Run(t, test38878, func(t *testing.T, env *Env) { +- env.AfterChange(Diagnostics(env.AtRegexp("a_test.go", `f\((3)\)`))) +- env.RemoveWorkspaceFile("a_test.go") +- env.AfterChange(NoDiagnostics(ForFile("a_test.go"))) +- +- // Make sure the test variant has been removed from the workspace by +- // triggering a metadata load. +- env.OpenFile("a.go") +- env.RegexpReplace("a.go", `// import`, "import") +- env.AfterChange(Diagnostics(env.AtRegexp("a.go", `"fmt"`))) +- }) +-} +- +-// Tests golang/go#38878: deleting a test file on disk while it's still open +-// should not clear its errors. +-func TestDeleteTestVariant_DiskOnly(t *testing.T) { +- Run(t, test38878, func(t *testing.T, env *Env) { +- env.OpenFile("a_test.go") +- env.AfterChange(Diagnostics(AtPosition("a_test.go", 5, 3))) +- env.Sandbox.Workdir.RemoveFile(context.Background(), "a_test.go") // ignore error +- env.AfterChange(Diagnostics(AtPosition("a_test.go", 5, 3))) +- }) +-} +- +-// TestNoMod confirms that gopls continues to work when a user adds a go.mod +-// file to their workspace. +-func TestNoMod(t *testing.T) { +- const noMod = ` +--- main.go -- +-package main +- +-import "mod.com/bob" +- +-func main() { +- bob.Hello() +-} +--- bob/bob.go -- +-package bob +- +-func Hello() { +- var x int +-} +-` +- +- t.Run("manual", func(t *testing.T) { +- Run(t, noMod, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", `"mod.com/bob"`)), +- ) +- env.CreateBuffer("go.mod", `module mod.com +- +- go 1.12 +-`) +- env.SaveBuffer("go.mod") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- Diagnostics(env.AtRegexp("bob/bob.go", "x")), +- ReadDiagnostics("bob/bob.go", &d), +- ) +- if len(d.Diagnostics) != 1 { +- t.Fatalf("expected 1 diagnostic, got %v", len(d.Diagnostics)) +- } +- }) +- }) +- t.Run("initialized", func(t *testing.T) { +- Run(t, noMod, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", `"mod.com/bob"`)), +- ) +- env.RunGoCommand("mod", "init", "mod.com") +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- Diagnostics(env.AtRegexp("bob/bob.go", "x")), +- ) +- }) +- }) +- +- t.Run("without workspace module", func(t *testing.T) { +- WithOptions( +- Modes(Default), +- ).Run(t, noMod, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", `"mod.com/bob"`)), +- ) +- if _, err := env.Sandbox.RunGoCommand(env.Ctx, "", "mod", []string{"init", "mod.com"}, nil, true); err != nil { +- t.Fatal(err) +- } +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- Diagnostics(env.AtRegexp("bob/bob.go", "x")), +- ) +- }) +- }) +-} +- +-// Tests golang/go#38267. +-func TestIssue38267(t *testing.T) { +- const testPackage = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- lib.go -- +-package lib +- +-func Hello(x string) { +- _ = x +-} +--- lib_test.go -- +-package lib +- +-import "testing" +- +-type testStruct struct{ +- name string +-} +- +-func TestHello(t *testing.T) { +- testStructs := []*testStruct{ +- &testStruct{"hello"}, +- &testStruct{"goodbye"}, +- } +- for y := range testStructs { +- _ = y +- } +-} +-` +- +- Run(t, testPackage, func(t *testing.T, env *Env) { +- env.OpenFile("lib_test.go") +- env.AfterChange( +- Diagnostics(AtPosition("lib_test.go", 10, 2)), +- Diagnostics(AtPosition("lib_test.go", 11, 2)), +- ) +- env.OpenFile("lib.go") +- env.RegexpReplace("lib.go", "_ = x", "var y int") +- env.AfterChange( +- Diagnostics(env.AtRegexp("lib.go", "y int")), +- NoDiagnostics(ForFile("lib_test.go")), +- ) +- }) +-} +- +-// Tests golang/go#38328. +-func TestPackageChange_Issue38328(t *testing.T) { +- const packageChange = ` +--- go.mod -- +-module fake +- +-go 1.12 +--- a.go -- +-package foo +-func main() {} +-` +- Run(t, packageChange, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- env.RegexpReplace("a.go", "foo", "foox") +- env.AfterChange( +- NoDiagnostics(ForFile("a.go")), +- ) +- }) +-} +- +-const testPackageWithRequire = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +- +-require foo.test v1.2.3 +--- print.go -- +-package lib +- +-import ( +- "fmt" +- +- "foo.test/bar" +-) +- +-func PrintAnswer() { +- fmt.Printf("answer: %s", bar.Answer) +-} +-` +- +-const testPackageWithRequireProxy = ` +--- foo.test@v1.2.3/go.mod -- +-module foo.test +- +-go 1.12 +--- foo.test@v1.2.3/bar/const.go -- +-package bar +- +-const Answer = 42 +-` +- +-func TestResolveDiagnosticWithDownload(t *testing.T) { +- WithOptions( +- WriteGoSum("."), +- ProxyFiles(testPackageWithRequireProxy), +- ).Run(t, testPackageWithRequire, func(t *testing.T, env *Env) { +- env.OpenFile("print.go") +- // Check that gopackages correctly loaded this dependency. We should get a +- // diagnostic for the wrong formatting type. +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("print.go", "%s"), +- WithMessage("wrong type int"), +- ), +- ) +- }) +-} +- +-func TestMissingDependency(t *testing.T) { +- Run(t, testPackageWithRequire, func(t *testing.T, env *Env) { +- env.OpenFile("print.go") +- env.Await( +- // Log messages are asynchronous to other events on the LSP stream, so we +- // can't use OnceMet or AfterChange here. +- LogMatching(protocol.Error, "initial workspace load failed", 1, false), +- ) +- }) +-} +- +-// Tests golang/go#36951. +-func TestAdHocPackages_Issue36951(t *testing.T) { +- const adHoc = ` +--- b/b.go -- +-package b +- +-func Hello() { +- var x int +-} +-` +- Run(t, adHoc, func(t *testing.T, env *Env) { +- env.OpenFile("b/b.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("b/b.go", "x")), +- ) +- }) +-} +- +-// Tests golang/go#37984: GOPATH should be read from the go command. +-func TestNoGOPATH_Issue37984(t *testing.T) { +- const files = ` +--- main.go -- +-package main +- +-func _() { +- fmt.Println("Hello World") +-} +-` +- WithOptions( +- EnvVars{ +- "GOPATH": "", +- "GO111MODULE": "off", +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.AfterChange(Diagnostics(env.AtRegexp("main.go", "fmt"))) +- env.SaveBuffer("main.go") +- env.AfterChange(NoDiagnostics(ForFile("main.go"))) +- }) +-} +- +-// Tests golang/go#38669. +-func TestEqualInEnv_Issue38669(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-var _ = x.X +--- x/x.go -- +-package x +- +-var X = 0 +-` +- WithOptions( +- EnvVars{"GOFLAGS": "-tags=foo"}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.OrganizeImports("main.go") +- env.AfterChange(NoDiagnostics(ForFile("main.go"))) +- }) +-} +- +-// Tests golang/go#38467. +-func TestNoSuggestedFixesForGeneratedFiles_Issue38467(t *testing.T) { +- // This test ensures that gopls' CodeAction handler suppresses +- // diagnostics in generated code. Beware that many analyzers +- // themselves suppress diagnostics in generated files, in +- // particular the low-status "simplifiers" (modernize, +- // simplify{range,slice,compositelit}), so we use the hostport +- // analyzer here. +- const generated = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-// Code generated by generator.go. DO NOT EDIT. +- +-package main +- +-import ("fmt"; "net") +- +-func _() { +- addr := fmt.Sprintf("%s:%d", "localhost", 12345) +- net.Dial("tcp", addr) +-} +-` +- Run(t, generated, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(AtPosition("main.go", 7, 21)), +- ReadDiagnostics("main.go", &d), +- ) +- if fixes := env.GetQuickFixes("main.go", d.Diagnostics); len(fixes) != 0 { +- t.Errorf("got quick fixes %v, wanted none", fixes) +- } +- }) +-} +- +-// Expect a module/GOPATH error if there is an error in the file at startup. +-// Tests golang/go#37279. +-func TestBrokenWorkspace_OutsideModule(t *testing.T) { +- const noModule = ` +--- a.go -- +-package foo +- +-import "mod.com/hello" +- +-func f() { +- hello.Goodbye() +-} +-` +- Run(t, noModule, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- env.AfterChange( +- // AdHoc views are not critical errors, but their missing import +- // diagnostics should specifically mention GOROOT or GOPATH (and not +- // modules). +- NoOutstandingWork(IgnoreTelemetryPromptWork), +- Diagnostics( +- env.AtRegexp("a.go", `"mod.com`), +- WithMessage("in GOROOT"), +- ), +- ) +- // Deleting the import dismisses the warning. +- env.RegexpReplace("a.go", `import "mod.com/hello"`, "") +- env.AfterChange( +- NoOutstandingWork(IgnoreTelemetryPromptWork), +- ) +- }) +-} +- +-func TestNonGoFolder(t *testing.T) { +- const files = ` +--- hello.txt -- +-hi mom +-` +- for _, go111module := range []string{"on", "off", ""} { +- t.Run(fmt.Sprintf("GO111MODULE_%v", go111module), func(t *testing.T) { +- WithOptions( +- EnvVars{"GO111MODULE": go111module}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- NoOutstandingWork(IgnoreTelemetryPromptWork), +- ) +- }) +- }) +- } +-} +- +-// Tests the repro case from golang/go#38602. Diagnostics are now handled properly, +-// which blocks type checking. +-func TestConflictingMainPackageErrors(t *testing.T) { +- const collision = ` +--- x/x.go -- +-package x +- +-import "x/hello" +- +-func Hello() { +- hello.HiThere() +-} +--- x/main.go -- +-package main +- +-func main() { +- fmt.Println("") +-} +-` +- WithOptions( +- InGOPATH(), +- EnvVars{"GO111MODULE": "off"}, +- ).Run(t, collision, func(t *testing.T, env *Env) { +- env.OpenFile("x/x.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("x/x.go", `^`), WithMessage("found packages main (main.go) and x (x.go)")), +- Diagnostics(env.AtRegexp("x/main.go", `^`), WithMessage("found packages main (main.go) and x (x.go)")), +- ) +- +- // We don't recover cleanly from the errors without good overlay support. +- if testenv.Go1Point() >= 16 { +- env.RegexpReplace("x/x.go", `package x`, `package main`) +- env.AfterChange( +- Diagnostics(env.AtRegexp("x/main.go", `fmt`)), +- ) +- } +- }) +-} +- +-const ardanLabsProxy = ` +--- github.com/ardanlabs/conf@v1.2.3/go.mod -- +-module github.com/ardanlabs/conf +- +-go 1.12 +--- github.com/ardanlabs/conf@v1.2.3/conf.go -- +-package conf +- +-var ErrHelpWanted error +-` +- +-// Test for golang/go#38211. +-func Test_issue38211(t *testing.T) { +- const ardanLabs = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- main.go -- +-package main +- +-import "github.com/ardanlabs/conf" +- +-func main() { +- _ = conf.ErrHelpWanted +-} +-` +- +- modcache := t.TempDir() +- defer CleanModCache(t, modcache) +- +- opts := []RunOption{ +- EnvVars{"GOMODCACHE": modcache}, +- ProxyFiles(ardanLabsProxy), +- //WriteGoSum("."), // TODO(golang/go#74594): uncommenting this causes mysterious failure; investigate and make the error clearer (go list?) +- } +- +- t.Run("setup", func(t *testing.T) { +- // Forcibly populate GOMODCACHE +- // so OrganizeImports can later rely on it. +- WithOptions(opts...).Run(t, ardanLabs, func(t *testing.T, env *Env) { +- // TODO(adonovan): why doesn't RunGoCommand respect EnvVars?? +- // (That was the motivation to use Sandbox.RunGoCommand +- // rather than execute go mod download directly!) +- // See comment at CleanModCache and golang/go#74595. +- environ := []string{"GOMODCACHE=" + modcache} +- _, err := env.Sandbox.RunGoCommand(env.Ctx, "", "get", []string{"github.com/ardanlabs/conf@v1.2.3"}, environ, false) +- if err != nil { +- t.Error(err) +- } +- }) +- }) +- +- WithOptions(opts...).Run(t, ardanLabs, func(t *testing.T, env *Env) { +- // Expect a "no module provides package" diagnostic. +- env.OpenFile("go.mod") +- env.OpenFile("main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("main.go", `"github.com/ardanlabs/conf"`), +- WithMessage("no required module provides package")), +- ReadDiagnostics("main.go", &d), +- ) +- +- // Apply the suggested fix to make the go.mod file +- // require "github.com/ardanlabs/conf". +- env.ApplyQuickFixes("main.go", d.Diagnostics) +- env.SaveBuffer("go.mod") +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- ) +- +- // Comment out the sole use of conf, +- // causing an "unused import" diagnostic. +- env.RegexpReplace("main.go", "_ = conf.ErrHelpWanted", "//_ = conf.ErrHelpWanted") +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("main.go", `"github.com/ardanlabs/conf"`), +- WithMessage("imported and not used")), +- ) +- +- // Remove the import using OrganizeImports, leading +- // to an "unused require" diagnostic in the go.mod. +- env.SaveBuffer("main.go") // => OrganizeImports +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- Diagnostics( +- env.AtRegexp("go.mod", "require github.com/ardanlabs/conf"), +- WithMessage("not used in this module")), +- ReadDiagnostics("go.mod", &d), +- ) +- +- // Apply the suggested fix to remove the "require" directive. +- env.ApplyQuickFixes("go.mod", d.Diagnostics) +- env.SaveBuffer("go.mod") +- env.AfterChange( +- NoDiagnostics(ForFile("go.mod")), +- ) +- +- // Uncomment the use of the import. +- // OrganizeImports should add the import. +- // Expect another "no required module provides package" +- // diagnostic, bringing us full circle. +- env.RegexpReplace("main.go", "//_ = conf.ErrHelpWanted", "_ = conf.ErrHelpWanted") +- env.SaveBuffer("main.go") // => OrganizeImports +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("main.go", `"github.com/ardanlabs/conf"`), +- WithMessage("no required module provides package")), +- ) +- }) +-} +- +-// Test for golang/go#38207. +-func TestNewModule_Issue38207(t *testing.T) { +- const emptyFile = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-` +- WithOptions( +- ProxyFiles(ardanLabsProxy), +- ).Run(t, emptyFile, func(t *testing.T, env *Env) { +- env.CreateBuffer("main.go", `package main +- +-import "github.com/ardanlabs/conf" +- +-func main() { +- _ = conf.ErrHelpWanted +-} +-`) +- env.SaveBuffer("main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", `"github.com/ardanlabs/conf"`), WithMessage("no required module")), +- ReadDiagnostics("main.go", &d), +- ) +- env.ApplyQuickFixes("main.go", d.Diagnostics) +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- ) +- }) +-} +- +-// Test for golang/go#36960. +-func TestNewFileBadImports_Issue36960(t *testing.T) { +- const simplePackage = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- a/a1.go -- +-package a +- +-import "fmt" +- +-func _() { +- fmt.Println("hi") +-} +-` +- Run(t, simplePackage, func(t *testing.T, env *Env) { +- env.OpenFile("a/a1.go") +- env.CreateBuffer("a/a2.go", ``) +- env.SaveBufferWithoutActions("a/a2.go") +- env.AfterChange( +- NoDiagnostics(ForFile("a/a1.go")), +- ) +- env.EditBuffer("a/a2.go", fake.NewEdit(0, 0, 0, 0, `package a`)) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a1.go")), +- ) +- }) +-} +- +-// This test tries to replicate the workflow of a user creating a new x test. +-// It also tests golang/go#39315. +-func TestManuallyCreatingXTest(t *testing.T) { +- // Create a package that already has a test variant (in-package test). +- const testVariant = ` +--- go.mod -- +-module mod.com +- +-go 1.15 +--- hello/hello.go -- +-package hello +- +-func Hello() { +- var x int +-} +--- hello/hello_test.go -- +-package hello +- +-import "testing" +- +-func TestHello(t *testing.T) { +- var x int +- Hello() +-} +-` +- Run(t, testVariant, func(t *testing.T, env *Env) { +- // Open the file, triggering the workspace load. +- // There are errors in the code to ensure all is working as expected. +- env.OpenFile("hello/hello.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("hello/hello.go", "x")), +- Diagnostics(env.AtRegexp("hello/hello_test.go", "x")), +- ) +- +- // Create an empty file with the intention of making it an x test. +- // This resembles a typical flow in an editor like VS Code, in which +- // a user would create an empty file and add content, saving +- // intermittently. +- // TODO(rstambler): There might be more edge cases here, as file +- // content can be added incrementally. +- env.CreateBuffer("hello/hello_x_test.go", ``) +- +- // Save the empty file (no actions since formatting will fail). +- env.SaveBufferWithoutActions("hello/hello_x_test.go") +- +- // Add the content. The missing import is for the package under test. +- env.EditBuffer("hello/hello_x_test.go", fake.NewEdit(0, 0, 0, 0, `package hello_test +- +-import ( +- "testing" +-) +- +-func TestHello(t *testing.T) { +- hello.Hello() +-} +-`)) +- // Expect a diagnostic for the missing import. Save, which should +- // trigger import organization. The diagnostic should clear. +- env.AfterChange( +- Diagnostics(env.AtRegexp("hello/hello_x_test.go", "hello.Hello")), +- ) +- env.SaveBuffer("hello/hello_x_test.go") +- env.AfterChange( +- NoDiagnostics(ForFile("hello/hello_x_test.go")), +- ) +- }) +-} +- +-// Reproduce golang/go#40690. +-func TestCreateOnlyXTest(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- foo/foo.go -- +-package foo +--- foo/bar_test.go -- +-` +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("foo/bar_test.go") +- env.EditBuffer("foo/bar_test.go", fake.NewEdit(0, 0, 0, 0, "package foo")) +- env.Await(env.DoneWithChange()) +- env.RegexpReplace("foo/bar_test.go", "package foo", `package foo_test +- +-import "testing" +- +-func TestX(t *testing.T) { +- var x int +-} +-`) +- env.AfterChange( +- Diagnostics(env.AtRegexp("foo/bar_test.go", "x")), +- ) +- }) +-} +- +-func TestChangePackageName(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- foo/foo.go -- +-package foo +--- foo/bar_test.go -- +-package foo_ +-` +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("foo/bar_test.go") +- env.AfterChange() +- env.RegexpReplace("foo/bar_test.go", "package foo_", "package foo_test") +- env.AfterChange( +- NoDiagnostics(ForFile("foo/bar_test.go")), +- NoDiagnostics(ForFile("foo/foo.go")), +- ) +- }) +-} +- +-func TestIgnoredFiles(t *testing.T) { +- const ws = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- _foo/x.go -- +-package x +- +-var _ = foo.Bar +-` +- Run(t, ws, func(t *testing.T, env *Env) { +- env.OpenFile("_foo/x.go") +- env.AfterChange( +- NoDiagnostics(ForFile("_foo/x.go")), +- ) +- }) +-} +- +-// Partially reproduces golang/go#38977, moving a file between packages. +-// It also gets hit by some go command bug fixed in 1.15, but we don't +-// care about that so much here. +-func TestDeletePackage(t *testing.T) { +- const ws = ` +--- go.mod -- +-module mod.com +- +-go 1.15 +--- a/a.go -- +-package a +- +-const A = 1 +- +--- b/b.go -- +-package b +- +-import "mod.com/a" +- +-const B = a.A +- +--- c/c.go -- +-package c +- +-import "mod.com/a" +- +-const C = a.A +-` +- Run(t, ws, func(t *testing.T, env *Env) { +- env.OpenFile("b/b.go") +- env.Await(env.DoneWithOpen()) +- // Delete c/c.go, the only file in package c. +- env.RemoveWorkspaceFile("c/c.go") +- +- // We should still get diagnostics for files that exist. +- env.RegexpReplace("b/b.go", `a.A`, "a.Nonexistant") +- env.AfterChange( +- Diagnostics(env.AtRegexp("b/b.go", `Nonexistant`)), +- ) +- }) +-} +- +-// This is a copy of the scenario_default/quickfix_empty_files.txt test from +-// govim. Reproduces golang/go#39646. +-func TestQuickFixEmptyFiles(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +-` +- // To fully recreate the govim tests, we create files by inserting +- // a newline, adding to the file, and then deleting the newline. +- // Wait for each event to process to avoid cancellations and force +- // package loads. +- writeGoVim := func(env *Env, name, content string) { +- env.WriteWorkspaceFile(name, "") +- env.Await(env.DoneWithChangeWatchedFiles()) +- +- env.CreateBuffer(name, "\n") +- env.Await(env.DoneWithOpen()) +- +- env.EditBuffer(name, fake.NewEdit(1, 0, 1, 0, content)) +- env.Await(env.DoneWithChange()) +- +- env.EditBuffer(name, fake.NewEdit(0, 0, 1, 0, "")) +- env.Await(env.DoneWithChange()) +- } +- +- const p = `package p; func DoIt(s string) {};` +- const main = `package main +- +-import "mod.com/p" +- +-func main() { +- p.DoIt(5) +-} +-` +- // A simple version of the test that reproduces most of the problems it +- // exposes. +- t.Run("short", func(t *testing.T) { +- Run(t, mod, func(t *testing.T, env *Env) { +- writeGoVim(env, "p/p.go", p) +- writeGoVim(env, "main.go", main) +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", "5")), +- ) +- }) +- }) +- +- // A full version that replicates the whole flow of the test. +- t.Run("full", func(t *testing.T) { +- Run(t, mod, func(t *testing.T, env *Env) { +- writeGoVim(env, "p/p.go", p) +- writeGoVim(env, "main.go", main) +- writeGoVim(env, "p/p_test.go", `package p +- +-import "testing" +- +-func TestDoIt(t *testing.T) { +- DoIt(5) +-} +-`) +- writeGoVim(env, "p/x_test.go", `package p_test +- +-import ( +- "testing" +- +- "mod.com/p" +-) +- +-func TestDoIt(t *testing.T) { +- p.DoIt(5) +-} +-`) +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", "5")), +- Diagnostics(env.AtRegexp("p/p_test.go", "5")), +- Diagnostics(env.AtRegexp("p/x_test.go", "5")), +- ) +- env.RegexpReplace("p/p.go", "s string", "i int") +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- NoDiagnostics(ForFile("p/p_test.go")), +- NoDiagnostics(ForFile("p/x_test.go")), +- ) +- }) +- }) +-} +- +-func TestSingleFile(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.13 +--- a/a.go -- +-package a +- +-func _() { +- var x int +-} +-` +- WithOptions( +- // Empty workspace folders. +- WorkspaceFolders(), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "x")), +- ) +- }) +-} +- +-// Reproduces the case described in +-// https://github.com/golang/go/issues/39296#issuecomment-652058883. +-func TestPkgm(t *testing.T) { +- const basic = ` +--- go.mod -- +-module mod.com +- +-go 1.15 +--- foo/foo.go -- +-package foo +- +-import "fmt" +- +-func Foo() { +- fmt.Println("") +-} +-` +- Run(t, basic, func(t *testing.T, env *Env) { +- env.WriteWorkspaceFile("foo/foo_test.go", `package main +- +-func main() { +- +-}`) +- env.OpenFile("foo/foo_test.go") +- env.RegexpReplace("foo/foo_test.go", `package main`, `package foo`) +- env.AfterChange(NoDiagnostics(ForFile("foo/foo.go"))) +- }) +-} +- +-func TestClosingBuffer(t *testing.T) { +- const basic = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- main.go -- +-package main +- +-func main() {} +-` +- Run(t, basic, func(t *testing.T, env *Env) { +- env.CreateBuffer("foo.go", `package main`) +- env.AfterChange() +- env.CloseBuffer("foo.go") +- env.AfterChange(NoLogMatching(protocol.Info, "packages=0")) +- }) +-} +- +-// Reproduces golang/go#38424. +-func TestCutAndPaste(t *testing.T) { +- const basic = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- main2.go -- +-package main +-` +- Run(t, basic, func(t *testing.T, env *Env) { +- env.CreateBuffer("main.go", "") +- env.Await(env.DoneWithOpen()) +- +- env.SaveBufferWithoutActions("main.go") +- env.Await(env.DoneWithSave(), env.DoneWithChangeWatchedFiles()) +- +- env.EditBuffer("main.go", fake.NewEdit(0, 0, 0, 0, `package main +- +-func main() { +-} +-`)) +- env.Await(env.DoneWithChange()) +- +- env.SaveBuffer("main.go") +- env.Await(env.DoneWithSave(), env.DoneWithChangeWatchedFiles()) +- +- env.EditBuffer("main.go", fake.NewEdit(0, 0, 4, 0, "")) +- env.Await(env.DoneWithChange()) +- +- env.EditBuffer("main.go", fake.NewEdit(0, 0, 0, 0, `package main +- +-func main() { +- var x int +-} +-`)) +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", "x")), +- ) +- }) +-} +- +-// Reproduces golang/go#39763. +-func TestInvalidPackageName(t *testing.T) { +- const pkgDefault = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package default +- +-func main() {} +-` +- Run(t, pkgDefault, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("main.go", "default"), +- WithMessage("expected 'IDENT'"), +- ), +- ) +- }) +-} +- +-// This test verifies that the workspace scope is effectively limited to the +-// workspace folder, if expandWorkspaceToModule is set. +-func TestExpandWorkspaceToModule(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a/main.go -- +-package main +- +-func main() {} +--- main.go -- +-package main +- +-func main() { +- var x int +-} +-` +- WithOptions( +- WorkspaceFolders("a"), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("a/main.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", "x")), +- ) +- }) +- WithOptions( +- WorkspaceFolders("a"), +- Settings{"expandWorkspaceToModule": false}, +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("a/main.go") +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- ) +- }) +-} +- +-// This test verifies that the workspace scope is effectively limited to the +-// set of active modules. +-// +-// We should not get diagnostics or file watching patterns for paths outside of +-// the active workspace. +-func TestWorkspaceModules(t *testing.T) { +- const mod = ` +--- go.work -- +-go 1.18 +- +-use a +--- a/go.mod -- +-module mod.com/a +- +-go 1.12 +--- a/a.go -- +-package a +- +-func _() { +- var x int +-} +--- b/go.mod -- +-module mod.com/b +- +-go 1.18 +-` +- WithOptions( +- Settings{ +- "subdirWatchPatterns": "on", +- }, +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- // Writing this file may cause the snapshot to 'know' about the file b, but +- // that shouldn't cause it to watch the 'b' directory. +- env.WriteWorkspaceFile("b/b.go", `package b +- +-func _() { +- var x int +-} +-`) +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "x")), +- NoDiagnostics(ForFile("b/b.go")), +- FileWatchMatching("a$"), +- NoFileWatchMatching("b$"), +- ) +- }) +-} +- +-func TestSimplifyCompositeLitDiagnostic(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-import "fmt" +- +-type t struct { +- msg string +-} +- +-func main() { +- x := []t{t{"msg"}} +- fmt.Println(x) +-} +-` +- +- WithOptions( +- Settings{"staticcheck": true}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", `t{"msg"}`), WithMessage("redundant type")), +- ReadDiagnostics("main.go", &d), +- ) +- if tags := d.Diagnostics[0].Tags; len(tags) == 0 || tags[0] != protocol.Unnecessary { +- t.Errorf("wanted Unnecessary tag on diagnostic, got %v", tags) +- } +- env.ApplyQuickFixes("main.go", d.Diagnostics) +- env.AfterChange(NoDiagnostics(ForFile("main.go"))) +- }) +-} +- +-// Test some secondary diagnostics +-func TestSecondaryDiagnostics(t *testing.T) { +- const dir = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +-func main() { +- panic("not here") +-} +--- other.go -- +-package main +-func main() {} +-` +- Run(t, dir, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.OpenFile("other.go") +- var mainDiags, otherDiags protocol.PublishDiagnosticsParams +- env.AfterChange( +- ReadDiagnostics("main.go", &mainDiags), +- ReadDiagnostics("other.go", &otherDiags), +- ) +- if len(mainDiags.Diagnostics) != 1 { +- t.Fatalf("main.go, got %d diagnostics, expected 1", len(mainDiags.Diagnostics)) +- } +- keep := mainDiags.Diagnostics[0] +- if len(otherDiags.Diagnostics) != 1 { +- t.Fatalf("other.go: got %d diagnostics, expected 1", len(otherDiags.Diagnostics)) +- } +- if len(otherDiags.Diagnostics[0].RelatedInformation) != 1 { +- t.Fatalf("got %d RelatedInformations, expected 1", len(otherDiags.Diagnostics[0].RelatedInformation)) +- } +- // check that the RelatedInformation matches the error from main.go +- c := otherDiags.Diagnostics[0].RelatedInformation[0] +- if c.Location.Range != keep.Range { +- t.Errorf("locations don't match. Got %v expected %v", c.Location.Range, keep.Range) +- } +- }) +-} +- +-func TestOrphanedFiles(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a/a.go -- +-package a +- +-func main() { +- var x int +-} +--- a/a_exclude.go -- +-// +build exclude +- +-package a +- +-func _() { +- var x int +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "x")), +- ) +- env.OpenFile("a/a_exclude.go") +- +- loadOnce := LogMatching(protocol.Info, "query=.*file=.*a_exclude.go", 1, false) +- +- // can't use OnceMet or AfterChange as logs are async +- env.Await(loadOnce) +- // ...but ensure that the change has been fully processed before editing. +- // Otherwise, there may be a race where the snapshot is cloned before all +- // state changes resulting from the load have been processed +- // (golang/go#61521). +- env.AfterChange() +- +- // Check that orphaned files are not reloaded, by making a change in +- // a.go file and confirming that the workspace diagnosis did not reload +- // a_exclude.go. +- // +- // This is racy (but fails open) because logs are asynchronous to other LSP +- // operations. There's a chance gopls _did_ log, and we just haven't seen +- // it yet. +- env.RegexpReplace("a/a.go", "package a", "package a // arbitrary comment") +- env.AfterChange(loadOnce) +- }) +-} +- +-func TestSwig(t *testing.T) { +- if _, err := exec.LookPath("swig"); err != nil { +- t.Skip("skipping test: swig not available") +- } +- if _, err := exec.LookPath("g++"); err != nil { +- t.Skip("skipping test: g++ not available") +- } +- +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- pkg/simple/export_swig.go -- +-package simple +- +-func ExportSimple(x, y int) int { +- return Gcd(x, y) +-} +--- pkg/simple/simple.swigcxx -- +-%module simple +- +-%inline %{ +-extern int gcd(int x, int y) +-{ +- int g; +- g = y; +- while (x > 0) { +- g = x; +- x = y % x; +- y = g; +- } +- return g; +-} +-%} +--- main.go -- +-package a +- +-func main() { +- var x int +-} +-` +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- NoDiagnostics(WithMessage("illegal character U+0023 '#'")), +- ) +- }) +-} +- +-// When foo_test.go is opened, gopls will object to the borked package name. +-// This test asserts that when the package name is fixed, gopls will soon after +-// have no more complaints about it. +-// https://github.com/golang/go/issues/41061 +-func TestRenamePackage(t *testing.T) { +- const proxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +--- example.com@v1.2.3/blah/blah.go -- +-package blah +- +-const Name = "Blah" +--- random.org@v1.2.3/go.mod -- +-module random.org +- +-go 1.12 +--- random.org@v1.2.3/blah/blah.go -- +-package hello +- +-const Name = "Hello" +-` +- +- const contents = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-import "example.com/blah" +- +-func main() { +- blah.Hello() +-} +--- bob.go -- +-package main +--- foo/foo.go -- +-package foo +--- foo/foo_test.go -- +-package foo_ +-` +- +- WithOptions( +- ProxyFiles(proxy), +- InGOPATH(), +- EnvVars{"GO111MODULE": "off"}, +- ).Run(t, contents, func(t *testing.T, env *Env) { +- // Simulate typing character by character. +- env.OpenFile("foo/foo_test.go") +- env.Await(env.DoneWithOpen()) +- env.RegexpReplace("foo/foo_test.go", "_", "_t") +- env.Await(env.DoneWithChange()) +- env.RegexpReplace("foo/foo_test.go", "_t", "_test") +- env.AfterChange( +- NoDiagnostics(ForFile("foo/foo_test.go")), +- NoOutstandingWork(IgnoreTelemetryPromptWork), +- ) +- }) +-} +- +-// TestProgressBarErrors confirms that critical workspace load errors are shown +-// and updated via progress reports. +-func TestProgressBarErrors(t *testing.T) { +- const pkg = ` +--- go.mod -- +-modul mod.com +- +-go 1.12 +--- main.go -- +-package main +-` +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- env.AfterChange( +- OutstandingWork(server.WorkspaceLoadFailure, "unknown directive"), +- ) +- env.EditBuffer("go.mod", fake.NewEdit(0, 0, 3, 0, `module mod.com +- +-go 1.hello +-`)) +- // As of golang/go#42529, go.mod changes do not reload the workspace until +- // they are saved. +- env.SaveBufferWithoutActions("go.mod") +- env.AfterChange( +- OutstandingWork(server.WorkspaceLoadFailure, "invalid go version"), +- ) +- env.RegexpReplace("go.mod", "go 1.hello", "go 1.12") +- env.SaveBufferWithoutActions("go.mod") +- env.AfterChange( +- NoOutstandingWork(IgnoreTelemetryPromptWork), +- ) +- }) +-} +- +-func TestDeleteDirectory(t *testing.T) { +- const mod = ` +--- bob/bob.go -- +-package bob +- +-func Hello() { +- var x int +-} +--- go.mod -- +-module mod.com +--- cmd/main.go -- +-package main +- +-import "mod.com/bob" +- +-func main() { +- bob.Hello() +-} +-` +- WithOptions( +- Settings{ +- // Now that we don't watch subdirs by default (except for VS Code), +- // we must explicitly ask gopls to requests subdir watch patterns. +- "subdirWatchPatterns": "on", +- }, +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- FileWatchMatching("bob"), +- ) +- env.RemoveWorkspaceFile("bob") +- env.AfterChange( +- Diagnostics(env.AtRegexp("cmd/main.go", `"mod.com/bob"`)), +- NoDiagnostics(ForFile("bob/bob.go")), +- NoFileWatchMatching("bob"), +- ) +- }) +-} +- +-// Confirms that circular imports are tested and reported. +-func TestCircularImports(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- self/self.go -- +-package self +- +-import _ "mod.com/self" +-func Hello() {} +--- double/a/a.go -- +-package a +- +-import _ "mod.com/double/b" +--- double/b/b.go -- +-package b +- +-import _ "mod.com/double/a" +--- triple/a/a.go -- +-package a +- +-import _ "mod.com/triple/b" +--- triple/b/b.go -- +-package b +- +-import _ "mod.com/triple/c" +--- triple/c/c.go -- +-package c +- +-import _ "mod.com/triple/a" +-` +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("self/self.go", `_ "mod.com/self"`), WithMessage("import cycle not allowed")), +- Diagnostics(env.AtRegexp("double/a/a.go", `_ "mod.com/double/b"`), WithMessage("import cycle not allowed")), +- Diagnostics(env.AtRegexp("triple/a/a.go", `_ "mod.com/triple/b"`), WithMessage("import cycle not allowed")), +- ) +- }) +-} +- +-// Tests golang/go#46667: deleting a problematic import path should resolve +-// import cycle errors. +-func TestResolveImportCycle(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.test +- +-go 1.16 +--- a/a.go -- +-package a +- +-import "mod.test/b" +- +-const A = b.A +-const B = 2 +--- b/b.go -- +-package b +- +-import "mod.test/a" +- +-const A = 1 +-const B = a.B +- ` +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.OpenFile("b/b.go") +- env.AfterChange( +- // The Go command sometimes tells us about only one of the import cycle +- // errors below. Also, sometimes we get an error during type checking +- // instead of during list, due to missing metadata. This is likely due to +- // a race. +- // For robustness of this test, succeed if we get any reasonable error. +- // +- // TODO(golang/go#52904): we should get *both* of these errors. +- // TODO(golang/go#64899): we should always get an import cycle error +- // rather than a missing metadata error. +- AnyOf( +- Diagnostics(env.AtRegexp("a/a.go", `"mod.test/b"`)), +- Diagnostics(env.AtRegexp("b/b.go", `"mod.test/a"`)), +- ), +- ) +- env.RegexpReplace("b/b.go", `const B = a\.B`, "") +- env.SaveBuffer("b/b.go") +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- NoDiagnostics(ForFile("b/b.go")), +- ) +- }) +-} +- +-func TestBadImport(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-import ( +- _ "nosuchpkg" +-) +-` +- t.Run("module", func(t *testing.T) { +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", `"nosuchpkg"`), WithMessage(`could not import nosuchpkg (no required module provides package "nosuchpkg"`)), +- ) +- }) +- }) +- t.Run("GOPATH", func(t *testing.T) { +- WithOptions( +- InGOPATH(), +- EnvVars{"GO111MODULE": "off"}, +- Modes(Default), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", `"nosuchpkg"`), WithMessage(`cannot find package "nosuchpkg"`)), +- ) +- }) +- }) +-} +- +-func TestNestedModules(t *testing.T) { +- const proxy = ` +--- nested.com@v1.0.0/go.mod -- +-module nested.com +- +-go 1.12 +--- nested.com@v1.0.0/hello/hello.go -- +-package hello +- +-func Hello() {} +-` +- +- const nested = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +- +-require nested.com v1.0.0 +--- main.go -- +-package main +- +-import "nested.com/hello" +- +-func main() { +- hello.Hello() +-} +--- nested/go.mod -- +-module nested.com +- +--- nested/hello/hello.go -- +-package hello +- +-func Hello() { +- helloHelper() +-} +--- nested/hello/hello_helper.go -- +-package hello +- +-func helloHelper() {} +-` +- WithOptions( +- WriteGoSum("."), +- ProxyFiles(proxy), +- Modes(Default), +- ).Run(t, nested, func(t *testing.T, env *Env) { +- // Expect a diagnostic in a nested module. +- env.OpenFile("nested/hello/hello.go") +- env.AfterChange( +- NoDiagnostics(ForFile("nested/hello/hello.go")), +- ) +- loc := env.FirstDefinition(env.RegexpSearch("nested/hello/hello.go", "helloHelper")) +- want := "nested/hello/hello_helper.go" +- if got := env.Sandbox.Workdir.URIToPath(loc.URI); got != want { +- t.Errorf("Definition() returned %q, want %q", got, want) +- } +- }) +-} +- +-func TestAdHocPackagesReloading(t *testing.T) { +- const nomod = ` +--- main.go -- +-package main +- +-func main() {} +-` +- Run(t, nomod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.RegexpReplace("main.go", "{}", "{ var x int; }") // simulate typing +- env.AfterChange(NoLogMatching(protocol.Info, "packages=1")) +- }) +-} +- +-func TestBuildTagChange(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- foo.go -- +-// decoy comment +-// +build hidden +-// decoy comment +- +-package foo +-var Foo = 1 +--- bar.go -- +-package foo +-var Bar = Foo +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("foo.go") +- env.AfterChange(Diagnostics(env.AtRegexp("bar.go", `Foo`))) +- env.RegexpReplace("foo.go", `\+build`, "") +- env.AfterChange(NoDiagnostics(ForFile("bar.go"))) +- }) +- +-} +- +-func TestIssue44736(t *testing.T) { +- const files = ` +- -- go.mod -- +-module blah.com +- +-go 1.16 +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- asdf +- fmt.Printf("This is a test %v") +- fdas +-} +--- other.go -- +-package main +- +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.OpenFile("other.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", "asdf")), +- Diagnostics(env.AtRegexp("main.go", "fdas")), +- ) +- env.SetBufferContent("other.go", "package main\n\nasdf") +- // The new diagnostic in other.go should not suppress diagnostics in main.go. +- env.AfterChange( +- Diagnostics(env.AtRegexp("other.go", "asdf"), WithMessage("expected declaration")), +- Diagnostics(env.AtRegexp("main.go", "asdf")), +- ) +- }) +-} +- +-func TestInitialization(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.16 +--- main.go -- +-package main +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- env.Await(env.DoneWithOpen()) +- env.RegexpReplace("go.mod", "module", "modul") +- env.SaveBufferWithoutActions("go.mod") +- env.AfterChange( +- NoLogMatching(protocol.Error, "initial workspace load failed"), +- ) +- }) +-} +- +-// This test confirms that the view does not reinitialize when a go.mod file is +-// opened. +-func TestNoReinitialize(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() {} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- env.Await( +- // Check that we have only loaded "<dir>/..." once. +- // Log messages are asynchronous to other events on the LSP stream, so we +- // can't use OnceMet or AfterChange here. +- LogMatching(protocol.Info, `.*query=.*\.\.\..*`, 1, false), +- ) +- }) +-} +- +-func TestLangVersion(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-const C = 0b10 +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", `0b10`), WithMessage("go1.13 or later")), +- ) +- env.WriteWorkspaceFile("go.mod", "module mod.com \n\ngo 1.13\n") +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- ) +- }) +-} +- +-func TestNoQuickFixForUndeclaredConstraint(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- main.go -- +-package main +- +-func F[T C](_ T) { +-} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- var d protocol.PublishDiagnosticsParams +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", `C`)), +- ReadDiagnostics("main.go", &d), +- ) +- if fixes := env.GetQuickFixes("main.go", d.Diagnostics); len(fixes) != 0 { +- t.Errorf("got quick fixes %v, wanted none", fixes) +- } +- }) +-} +- +-func TestEditGoDirective(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.16 +--- main.go -- +-package main +- +-func F[T any](_ T) { +-} +-` +- Run(t, files, func(_ *testing.T, env *Env) { // Create a new workspace-level directory and empty file. +- var d protocol.PublishDiagnosticsParams +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", `T any`), WithMessage("type parameter")), +- ReadDiagnostics("main.go", &d), +- ) +- +- env.ApplyQuickFixes("main.go", d.Diagnostics) +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- ) +- }) +-} +- +-func TestEditGoDirectiveWorkspace(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.16 +--- go.work -- +-go 1.18 +- +-use . +--- main.go -- +-package main +- +-func F[T any](_ T) { +-} +-` +- Run(t, files, func(_ *testing.T, env *Env) { // Create a new workspace-level directory and empty file. +- var d protocol.PublishDiagnosticsParams +- +- // We should have a diagnostic because generics are not supported at 1.16. +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", `T any`), WithMessage("type parameter")), +- ReadDiagnostics("main.go", &d), +- ) +- +- // This diagnostic should have a quick fix to edit the go version. +- env.ApplyQuickFixes("main.go", d.Diagnostics) +- +- // Once the edit is applied, the problematic diagnostics should be +- // resolved. +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- ) +- }) +-} +- +-// This test demonstrates that analysis facts are correctly propagated +-// across packages. +-func TestInterpackageAnalysis(t *testing.T) { +- const src = ` +--- go.mod -- +-module example.com +--- a/a.go -- +-package a +- +-import "example.com/b" +- +-func _() { +- new(b.B).Printf("%d", "s") // printf error +-} +- +--- b/b.go -- +-package b +- +-import "example.com/c" +- +-type B struct{} +- +-func (B) Printf(format string, args ...interface{}) { +- c.MyPrintf(format, args...) +-} +- +--- c/c.go -- +-package c +- +-import "fmt" +- +-func MyPrintf(format string, args ...interface{}) { +- fmt.Printf(format, args...) +-} +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("a/a.go", "%d"), +- WithMessage("format %d has arg \"s\" of wrong type string"), +- ), +- ) +- }) +-} +- +-// This test ensures that only Analyzers with RunDespiteErrors=true +-// are invoked on a package that would not compile, even if the errors +-// are distant and localized. +-func TestErrorsThatPreventAnalysis(t *testing.T) { +- const src = ` +--- go.mod -- +-module example.com +--- a/a.go -- +-package a +- +-import "fmt" +-import "sync" +-import _ "example.com/b" +- +-func _() { +- // The copylocks analyzer (RunDespiteErrors, FactTypes={}) does run. +- var mu sync.Mutex +- mu2 := mu // copylocks error, reported +- _ = &mu2 +- +- // The printf analyzer (!RunDespiteErrors, FactTypes!={}) does not run: +- // (c, printf) failed because of type error in c +- // (b, printf) and (a, printf) do not run because of failed prerequisites. +- fmt.Printf("%d", "s") // printf error, unreported +- +- // The bools analyzer (!RunDespiteErrors, FactTypes={}) does not run: +- var cond bool +- _ = cond != true && cond != true // bools error, unreported +-} +- +--- b/b.go -- +-package b +- +-import _ "example.com/c" +- +--- c/c.go -- +-package c +- +-var _ = 1 / "" // type error +- +-` +- Run(t, src, func(t *testing.T, env *Env) { +- var diags protocol.PublishDiagnosticsParams +- env.OpenFile("a/a.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "mu2 := (mu)"), WithMessage("assignment copies lock value")), +- ReadDiagnostics("a/a.go", &diags)) +- +- // Assert that there were no other diagnostics. +- // In particular: +- // - "fmt.Printf" does not trigger a [printf] finding; +- // - "cond != true" does not trigger a [bools] finding. +- // +- // We use this check in preference to NoDiagnosticAtRegexp +- // as it is robust in case of minor mistakes in the position +- // regexp, and because it reports unexpected diagnostics. +- if got, want := len(diags.Diagnostics), 1; got != want { +- t.Errorf("got %d diagnostics in a/a.go, want %d:", got, want) +- for i, diag := range diags.Diagnostics { +- t.Logf("Diagnostics[%d] = %+v", i, diag) +- } +- } +- }) +-} +- +-// This test demonstrates the deprecated symbol analyzer +-// produces deprecation notices with expected severity and tags. +-func TestDeprecatedAnalysis(t *testing.T) { +- const src = ` +--- go.mod -- +-module example.com +--- a/a.go -- +-package a +- +-import "example.com/b" +- +-func _() { +- new(b.B).Obsolete() // deprecated +-} +- +--- b/b.go -- +-package b +- +-type B struct{} +- +-// Deprecated: use New instead. +-func (B) Obsolete() {} +- +-func (B) New() {} +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("a/a.go", "new.*Obsolete"), +- WithMessage("use New instead."), +- WithSeverityTags("deprecated", protocol.SeverityHint, []protocol.DiagnosticTag{protocol.Deprecated}), +- ), +- ) +- }) +-} +- +-func TestDiagnosticsOnlyOnSaveFile(t *testing.T) { +- // This functionality is broken because the new orphaned file diagnostics +- // logic wants to publish diagnostics for changed files, independent of any +- // snapshot diagnostics pass, and this causes stale diagnostics to be +- // invalidated. +- // +- // We can fix this behavior more correctly by also honoring the +- // diagnosticsTrigger in DiagnoseOrphanedFiles, but that would require +- // resolving configuration that is independent of the snapshot. In other +- // words, we need to figure out which cache.Folder.Options applies to the +- // changed file, even if it does not have a snapshot. +- t.Skip("temporary skip for golang/go#57979: revisit after zero-config logic is in place") +- +- const onlyMod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() { +- Foo() +-} +--- foo.go -- +-package main +- +-func Foo() {} +-` +- WithOptions( +- Settings{ +- "diagnosticsTrigger": "Save", +- }, +- ).Run(t, onlyMod, func(t *testing.T, env *Env) { +- env.OpenFile("foo.go") +- env.RegexpReplace("foo.go", "(Foo)", "Bar") // Makes reference to Foo undefined/undeclared. +- env.AfterChange(NoDiagnostics()) // No diagnostics update until file save. +- +- env.SaveBuffer("foo.go") +- // Compiler's error message about undeclared names vary depending on the version, +- // but must be explicit about the problematic name. +- env.AfterChange(Diagnostics(env.AtRegexp("main.go", "Foo"), WithMessage("Foo"))) +- +- env.OpenFile("main.go") +- env.RegexpReplace("main.go", "(Foo)", "Bar") +- // No diagnostics update until file save. That results in outdated diagnostic. +- env.AfterChange(Diagnostics(env.AtRegexp("main.go", "Bar"), WithMessage("Foo"))) +- +- env.SaveBuffer("main.go") +- env.AfterChange(NoDiagnostics()) +- }) +-} +diff -urN a/gopls/internal/test/integration/diagnostics/golist_test.go b/gopls/internal/test/integration/diagnostics/golist_test.go +--- a/gopls/internal/test/integration/diagnostics/golist_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/diagnostics/golist_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,71 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package diagnostics +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/cache" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/internal/testenv" +-) +- +-func TestGoListErrors(t *testing.T) { +- testenv.NeedsTool(t, "cgo") +- +- const src = ` +--- go.mod -- +-module a.com +- +-go 1.18 +--- a/a.go -- +-package a +- +-import +--- c/c.go -- +-package c +- +-/* +-int fortythree() { return 42; } +-*/ +-import "C" +- +-func Foo() { +- print(C.fortytwo()) +-} +--- p/p.go -- +-package p +- +-import "a.com/q" +- +-const P = q.Q + 1 +--- q/q.go -- +-package q +- +-import "a.com/p" +- +-const Q = p.P + 1 +-` +- +- Run(t, src, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics( +- env.AtRegexp("a/a.go", "import\n()"), +- FromSource(string(cache.ParseError)), +- ), +- Diagnostics( +- AtPosition("c/c.go", 0, 0), +- FromSource(string(cache.ListError)), +- WithMessage("may indicate failure to perform cgo processing"), +- ), +- Diagnostics( +- env.AtRegexp("p/p.go", `"a.com/q"`), +- FromSource(string(cache.ListError)), +- WithMessage("import cycle not allowed"), +- ), +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/diagnostics/gopackagesdriver_test.go b/gopls/internal/test/integration/diagnostics/gopackagesdriver_test.go +--- a/gopls/internal/test/integration/diagnostics/gopackagesdriver_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/diagnostics/gopackagesdriver_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,82 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package diagnostics +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// Test that the import error does not mention GOPATH when building with +-// go/packages driver. +-func TestBrokenWorkspace_GOPACKAGESDRIVER(t *testing.T) { +- // A go.mod file is actually needed here, because the fake go/packages driver +- // uses go list behind the scenes, and we load go/packages driver workspaces +- // with ./... +- const files = ` +--- go.mod -- +-module m +-go 1.12 +- +--- a.go -- +-package foo +- +-import "mod.com/hello" +-` +- WithOptions( +- FakeGoPackagesDriver(t), +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("a.go", `"mod.com`), +- WithMessage("go/packages driver"), +- ), +- ) +- // Deleting the import removes the error. +- env.RegexpReplace("a.go", `import "mod.com/hello"`, "") +- env.AfterChange( +- NoDiagnostics(ForFile("a.go")), +- ) +- }) +-} +- +-func TestValidImportCheck_GoPackagesDriver(t *testing.T) { +- const files = ` +--- go.work -- +-use . +- +--- go.mod -- +-module example.com +-go 1.0 +- +--- a/a.go -- +-package a +-import _ "example.com/b/internal/c" +- +--- b/internal/c/c.go -- +-package c +-` +- +- // Note that 'go list' produces an error ("use of internal package %q not allowed") +- // and gopls produces another ("invalid use of internal package %q") with source=compiler. +- // Here we assert that the second one is not reported with a go/packages driver. +- // (We don't assert that the first is missing, because the test driver wraps go list!) +- +- // go list +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange(Diagnostics(WithMessage(`invalid use of internal package "example.com/b/internal/c"`))) +- }) +- +- // test driver +- WithOptions( +- FakeGoPackagesDriver(t), +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange(NoDiagnostics(WithMessage(`invalid use of internal package "example.com/b/internal/c"`))) +- }) +-} +diff -urN a/gopls/internal/test/integration/diagnostics/invalidation_test.go b/gopls/internal/test/integration/diagnostics/invalidation_test.go +--- a/gopls/internal/test/integration/diagnostics/invalidation_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/diagnostics/invalidation_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,141 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package diagnostics +- +-import ( +- "fmt" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// Test for golang/go#50267: diagnostics should be re-sent after a file is +-// opened. +-func TestDiagnosticsAreResentAfterCloseOrOpen(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.16 +--- main.go -- +-package main +- +-func _() { +- x := 2 +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { // Create a new workspace-level directory and empty file. +- env.OpenFile("main.go") +- var afterOpen protocol.PublishDiagnosticsParams +- env.AfterChange( +- ReadDiagnostics("main.go", &afterOpen), +- ) +- env.CloseBuffer("main.go") +- var afterClose protocol.PublishDiagnosticsParams +- env.AfterChange( +- ReadDiagnostics("main.go", &afterClose), +- ) +- if afterOpen.Version == afterClose.Version { +- t.Errorf("publishDiagnostics: got the same version after closing (%d) as after opening", afterOpen.Version) +- } +- env.OpenFile("main.go") +- var afterReopen protocol.PublishDiagnosticsParams +- env.AfterChange( +- ReadDiagnostics("main.go", &afterReopen), +- ) +- if afterReopen.Version == afterClose.Version { +- t.Errorf("pubslishDiagnostics: got the same version after reopening (%d) as after closing", afterClose.Version) +- } +- }) +-} +- +-// Test for the "chatty" diagnostics: gopls should re-send diagnostics for +-// changed files after every file change, even if diagnostics did not change. +-func TestChattyDiagnostics(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.16 +--- main.go -- +-package main +- +-func _() { +- x := 2 +-} +- +-// Irrelevant comment #0 +-` +- +- Run(t, files, func(t *testing.T, env *Env) { // Create a new workspace-level directory and empty file. +- env.OpenFile("main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- ReadDiagnostics("main.go", &d), +- ) +- +- if len(d.Diagnostics) != 1 { +- t.Fatalf("len(Diagnostics) = %d, want 1", len(d.Diagnostics)) +- } +- msg := d.Diagnostics[0].Message +- +- for i := range 5 { +- before := d.Version +- env.RegexpReplace("main.go", "Irrelevant comment #.", fmt.Sprintf("Irrelevant comment #%d", i)) +- env.AfterChange( +- ReadDiagnostics("main.go", &d), +- ) +- +- if d.Version == before { +- t.Errorf("after change, got version %d, want new version", d.Version) +- } +- +- // As a sanity check, make sure we have the same diagnostic. +- if len(d.Diagnostics) != 1 { +- t.Fatalf("len(Diagnostics) = %d, want 1", len(d.Diagnostics)) +- } +- newMsg := d.Diagnostics[0].Message +- if newMsg != msg { +- t.Errorf("after change, got message %q, want %q", newMsg, msg) +- } +- } +- }) +-} +- +-func TestCreatingPackageInvalidatesDiagnostics_Issue66384(t *testing.T) { +- const files = ` +--- go.mod -- +-module example.com +- +-go 1.15 +--- main.go -- +-package main +- +-import "example.com/pkg" +- +-func main() { +- var _ pkg.Thing +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", `"example.com/pkg"`)), +- ) +- // In order for this test to reproduce golang/go#66384, we have to create +- // the buffer, wait for loads, and *then* "type out" the contents. Doing so +- // reproduces the conditions of the bug report, that typing the package +- // name itself doesn't invalidate the broken import. +- env.CreateBuffer("pkg/pkg.go", "") +- env.AfterChange() +- env.EditBuffer("pkg/pkg.go", protocol.TextEdit{NewText: "package pkg\ntype Thing struct{}\n"}) +- env.AfterChange() +- env.SaveBuffer("pkg/pkg.go") +- env.AfterChange(NoDiagnostics()) +- env.SetBufferContent("pkg/pkg.go", "package pkg") +- env.AfterChange(Diagnostics(env.AtRegexp("main.go", "Thing"))) +- }) +-} +diff -urN a/gopls/internal/test/integration/diagnostics/undeclared_test.go b/gopls/internal/test/integration/diagnostics/undeclared_test.go +--- a/gopls/internal/test/integration/diagnostics/undeclared_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/diagnostics/undeclared_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,69 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package diagnostics +- +-import ( +- "slices" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestUndeclaredDiagnostics(t *testing.T) { +- src := ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a/a.go -- +-package a +- +-func _() int { +- return x +-} +--- b/b.go -- +-package b +- +-func _() int { +- var y int +- y = y +- return y +-} +-` +- Run(t, src, func(t *testing.T, env *Env) { +- isUnnecessary := func(diag protocol.Diagnostic) bool { +- return slices.Contains(diag.Tags, protocol.Unnecessary) +- } +- +- // 'x' is undeclared, but still necessary. +- env.OpenFile("a/a.go") +- var adiags protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "x")), +- ReadDiagnostics("a/a.go", &adiags), +- ) +- if got := len(adiags.Diagnostics); got != 1 { +- t.Errorf("len(Diagnostics) = %d, want 1", got) +- } +- if diag := adiags.Diagnostics[0]; isUnnecessary(diag) { +- t.Errorf("%v tagged unnecessary, want necessary", diag) +- } +- +- // 'y = y' is pointless, and should be detected as unnecessary. +- env.OpenFile("b/b.go") +- var bdiags protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(env.AtRegexp("b/b.go", "y = y")), +- ReadDiagnostics("b/b.go", &bdiags), +- ) +- if got := len(bdiags.Diagnostics); got != 1 { +- t.Errorf("len(Diagnostics) = %d, want 1", got) +- } +- if diag := bdiags.Diagnostics[0]; !isUnnecessary(diag) { +- t.Errorf("%v tagged necessary, want unnecessary", diag) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/doc.go b/gopls/internal/test/integration/doc.go +--- a/gopls/internal/test/integration/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,156 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package integration provides a framework for writing integration tests of gopls. +-// +-// The behaviors that matter to users, and the scenarios they +-// typically describe in bug report, are usually expressed in terms of +-// editor interactions. For example: "When I open my editor in this +-// directory, navigate to this file, and change this line, I get a +-// diagnostic that doesn't make sense". The integration package +-// provides an API for gopls maintainers to express these types of +-// user interactions in ordinary Go tests, validate them, and run them +-// in a variety of execution modes. +-// +-// # Test package setup +-// +-// The integration test package uses a couple of uncommon patterns to reduce +-// boilerplate in test bodies. First, it is intended to be imported as "." so +-// that helpers do not need to be qualified. Second, it requires some setup +-// that is currently implemented in the integration.Main function, which must be +-// invoked by TestMain. Therefore, a minimal integration testing package looks +-// like this: +-// +-// package feature +-// +-// import ( +-// "fmt" +-// "testing" +-// +-// "golang.org/x/tools/gopls/internal/hooks" +-// . "golang.org/x/tools/gopls/internal/test/integration" +-// ) +-// +-// func TestMain(m *testing.M) { +-// os.Exit(Main(m, hooks.Options)) +-// } +-// +-// # Writing a simple integration test +-// +-// To run an integration test use the integration.Run function, which accepts a +-// txtar-encoded archive defining the initial workspace state. This function +-// sets up the workspace in a temporary directory, creates a fake text editor, +-// starts gopls, and initializes an LSP session. It then invokes the provided +-// test function with an *Env encapsulating the newly created +-// environment. Because gopls may be run in various modes (as a sidecar or +-// daemon process, with different settings), the test runner may perform this +-// process multiple times, re-running the test function each time with a new +-// environment. +-// +-// func TestOpenFile(t *testing.T) { +-// const files = ` +-// -- go.mod -- +-// module mod.com +-// +-// go 1.12 +-// -- foo.go -- +-// package foo +-// ` +-// Run(t, files, func(t *testing.T, env *Env) { +-// env.OpenFile("foo.go") +-// }) +-// } +-// +-// # Configuring integration test execution +-// +-// The integration package exposes several options that affect the setup process +-// described above. To use these options, use the WithOptions function: +-// +-// WithOptions(opts...).Run(...) +-// +-// See options.go for a full list of available options. +-// +-// # Operating on editor state +-// +-// To operate on editor state within the test body, the Env type provides +-// access to the workspace directory (Env.SandBox), text editor (Env.Editor), +-// LSP server (Env.Server), and 'awaiter' (Env.Awaiter). +-// +-// In most cases, operations on these primitive building blocks of the +-// integration test environment expect a Context (which should be a child of +-// env.Ctx), and return an error. To avoid boilerplate, the Env exposes a set +-// of wrappers in wrappers.go for use in scripting: +-// +-// env.CreateBuffer("c/c.go", "") +-// env.EditBuffer("c/c.go", editor.Edit{ +-// Text: `package c`, +-// }) +-// +-// These wrappers thread through Env.Ctx, and call t.Fatal on any errors. +-// +-// # Expressing expectations +-// +-// The general pattern for an integration test is to script interactions with the +-// fake editor and sandbox, and assert that gopls behaves correctly after each +-// state change. Unfortunately, this is complicated by the fact that state +-// changes are communicated to gopls via unidirectional client->server +-// notifications (didOpen, didChange, etc.), and resulting gopls behavior such +-// as diagnostics, logs, or messages is communicated back via server->client +-// notifications. Therefore, within integration tests we must be able to say "do +-// this, and then eventually gopls should do that". To achieve this, the +-// integration package provides a framework for expressing conditions that must +-// eventually be met, in terms of the Expectation type. +-// +-// To express the assertion that "eventually gopls must meet these +-// expectations", use env.Await(...): +-// +-// env.RegexpReplace("x/x.go", `package x`, `package main`) +-// env.Await(env.DiagnosticAtRegexp("x/main.go", `fmt`)) +-// +-// Await evaluates the provided expectations atomically, whenever the client +-// receives a state-changing notification from gopls. See expectation.go for a +-// full list of available expectations. +-// +-// A problem with this model is that if gopls never meets the provided +-// expectations, the test runner will hang until the test timeout +-// (which defaults to 10m). There are two ways to work around this +-// poor behavior: +-// +-// 1. Use a precondition to define precisely when we expect conditions to be +-// met. Gopls provides the OnceMet(precondition, expectations...) pattern +-// to express ("once this precondition is met, the following expectations +-// must all hold"). To instrument preconditions, gopls uses verbose +-// progress notifications to inform the client about ongoing work (see +-// CompletedWork). The most common precondition is to wait for gopls to be +-// done processing all change notifications, for which the integration package +-// provides the AfterChange helper. For example: +-// +-// // We expect diagnostics to be cleared after gopls is done processing the +-// // didSave notification. +-// env.SaveBuffer("a/go.mod") +-// env.AfterChange(EmptyDiagnostics("a/go.mod")) +-// +-// 2. Set a shorter timeout during development, if you expect to be breaking +-// tests. By setting the environment variable GOPLS_INTEGRATION_TEST_TIMEOUT=5s, +-// integration tests will time out after 5 seconds. +-// +-// # Tips & Tricks +-// +-// Here are some tips and tricks for working with integration tests: +-// +-// 1. Set the environment variable GOPLS_INTEGRRATION_TEST_TIMEOUT=5s during development. +-// 2. Run tests with -short. This will only run integration tests in the +-// default gopls execution mode. +-// 3. Use capture groups to narrow regexp positions. All regular-expression +-// based positions (such as DiagnosticAtRegexp) will match the position of +-// the first capture group, if any are provided. This can be used to +-// identify a specific position in the code for a pattern that may occur in +-// multiple places. For example `var (mu) sync.Mutex` matches the position +-// of "mu" within the variable declaration. +-// 4. Read diagnostics into a variable to implement more complicated +-// assertions about diagnostic state in the editor. To do this, use the +-// pattern OnceMet(precondition, ReadDiagnostics("file.go", &d)) to capture +-// the current diagnostics as soon as the precondition is met. This is +-// preferable to accessing the diagnostics directly, as it avoids races. +-package integration +diff -urN a/gopls/internal/test/integration/env.go b/gopls/internal/test/integration/env.go +--- a/gopls/internal/test/integration/env.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/env.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,399 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package integration +- +-import ( +- "context" +- "fmt" +- "net/http/httptest" +- "os" +- "os/exec" +- "strings" +- "sync" +- "sync/atomic" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/internal/jsonrpc2/servertest" +- "golang.org/x/tools/internal/mcp" +-) +- +-// Env holds the building blocks of an editor testing environment, providing +-// wrapper methods that hide the boilerplate of plumbing contexts and checking +-// errors. +-// Call [Env.Shutdown] for cleaning up resources after the test. +-type Env struct { +- TB testing.TB +- Ctx context.Context +- +- // Most tests should not need to access the scratch area, editor, server, or +- // connection, but they are available if needed. +- Sandbox *fake.Sandbox +- Server servertest.Connector +- +- // Editor is owned by the Env, and shut down +- Editor *fake.Editor +- +- Awaiter *Awaiter +- +- // MCPServer, MCPSession and EventChan is owned by the Env, and shut down. +- // Only available if the test enables MCP Server. +- MCPServer *httptest.Server +- MCPSession *mcp.ClientSession +-} +- +-// nextAwaiterRegistration is used to create unique IDs for various Awaiter +-// registrations. +-var nextAwaiterRegistration atomic.Uint64 +- +-// An Awaiter keeps track of relevant LSP state, so that it may be asserted +-// upon with Expectations. +-// +-// Wire it into a fake.Editor using Awaiter.Hooks(). +-// +-// TODO(rfindley): consider simply merging Awaiter with the fake.Editor. It +-// probably is not worth its own abstraction. +-type Awaiter struct { +- workdir *fake.Workdir +- +- mu sync.Mutex +- // For simplicity, each waiter gets a unique ID. +- state State +- waiters map[uint64]*condition +- +- // collectors map a registration to the collection of messages that have been +- // received since the registration was created. +- docCollectors map[uint64][]*protocol.ShowDocumentParams +- messageCollectors map[uint64][]*protocol.ShowMessageParams +-} +- +-func NewAwaiter(workdir *fake.Workdir) *Awaiter { +- return &Awaiter{ +- workdir: workdir, +- state: State{ +- diagnostics: make(map[string]*protocol.PublishDiagnosticsParams), +- work: make(map[protocol.ProgressToken]*workProgress), +- startedWork: make(map[string]uint64), +- completedWork: make(map[string]uint64), +- }, +- waiters: make(map[uint64]*condition), +- } +-} +- +-// Hooks returns LSP client hooks required for awaiting asynchronous expectations. +-func (a *Awaiter) Hooks() fake.ClientHooks { +- return fake.ClientHooks{ +- OnDiagnostics: a.onDiagnostics, +- OnLogMessage: a.onLogMessage, +- OnWorkDoneProgressCreate: a.onWorkDoneProgressCreate, +- OnProgress: a.onProgress, +- OnShowDocument: a.onShowDocument, +- OnShowMessage: a.onShowMessage, +- OnShowMessageRequest: a.onShowMessageRequest, +- OnRegisterCapability: a.onRegisterCapability, +- OnUnregisterCapability: a.onUnregisterCapability, +- } +-} +- +-// State encapsulates the server state TODO: explain more +-type State struct { +- // diagnostics are a map of relative path->diagnostics params +- diagnostics map[string]*protocol.PublishDiagnosticsParams +- logs []*protocol.LogMessageParams +- showDocument []*protocol.ShowDocumentParams +- showMessage []*protocol.ShowMessageParams +- showMessageRequest []*protocol.ShowMessageRequestParams +- +- registrations []*protocol.RegistrationParams +- registeredCapabilities map[string]protocol.Registration +- unregistrations []*protocol.UnregistrationParams +- +- // outstandingWork is a map of token->work summary. All tokens are assumed to +- // be string, though the spec allows for numeric tokens as well. +- work map[protocol.ProgressToken]*workProgress +- startedWork map[string]uint64 // title -> count of 'begin' +- completedWork map[string]uint64 // title -> count of 'end' +-} +- +-type workProgress struct { +- title, msg, endMsg string +- percent float64 +- complete bool // seen 'end' +-} +- +-type awaitResult struct { +- verdict Verdict +- reason string +-} +- +-// A condition is satisfied when its expectation is [Met] or [Unmeetable]. The +-// result is sent on the verdict channel. +-type condition struct { +- expectation Expectation +- verdict chan awaitResult +-} +- +-func (a *Awaiter) onDiagnostics(_ context.Context, d *protocol.PublishDiagnosticsParams) error { +- a.mu.Lock() +- defer a.mu.Unlock() +- +- pth := a.workdir.URIToPath(d.URI) +- a.state.diagnostics[pth] = d +- a.checkConditionsLocked() +- return nil +-} +- +-func (a *Awaiter) onShowDocument(_ context.Context, params *protocol.ShowDocumentParams) error { +- a.mu.Lock() +- defer a.mu.Unlock() +- +- // Update any outstanding listeners. +- for id, s := range a.docCollectors { +- a.docCollectors[id] = append(s, params) +- } +- +- a.state.showDocument = append(a.state.showDocument, params) +- a.checkConditionsLocked() +- return nil +-} +- +-// ListenToShownDocuments registers a listener to incoming showDocument +-// notifications. Call the resulting func to deregister the listener and +-// receive all notifications that have occurred since the listener was +-// registered. +-func (a *Awaiter) ListenToShownDocuments() func() []*protocol.ShowDocumentParams { +- id := nextAwaiterRegistration.Add(1) +- +- a.mu.Lock() +- defer a.mu.Unlock() +- +- if a.docCollectors == nil { +- a.docCollectors = make(map[uint64][]*protocol.ShowDocumentParams) +- } +- a.docCollectors[id] = nil +- +- return func() []*protocol.ShowDocumentParams { +- a.mu.Lock() +- defer a.mu.Unlock() +- params := a.docCollectors[id] +- delete(a.docCollectors, id) +- return params +- } +-} +- +-func (a *Awaiter) onShowMessage(_ context.Context, params *protocol.ShowMessageParams) error { +- a.mu.Lock() +- defer a.mu.Unlock() +- +- // Update any outstanding listeners. +- for id, s := range a.messageCollectors { +- a.messageCollectors[id] = append(s, params) +- } +- +- a.state.showMessage = append(a.state.showMessage, params) +- a.checkConditionsLocked() +- return nil +-} +- +-// ListenToShownMessages registers a listener to incoming showMessage +-// notifications. Call the resulting func to deregister the listener and +-// receive all notifications that have occurred since the listener was +-// registered. +-// +-// ListenToShownMessages should be called before the operation that +-// generates the showMessage event to ensure that the event is +-// reliably collected. +-func (a *Awaiter) ListenToShownMessages() func() []*protocol.ShowMessageParams { +- id := nextAwaiterRegistration.Add(1) +- +- a.mu.Lock() +- defer a.mu.Unlock() +- +- if a.messageCollectors == nil { +- a.messageCollectors = make(map[uint64][]*protocol.ShowMessageParams) +- } +- a.messageCollectors[id] = nil +- +- return func() []*protocol.ShowMessageParams { +- a.mu.Lock() +- defer a.mu.Unlock() +- params := a.messageCollectors[id] +- delete(a.messageCollectors, id) +- return params +- } +-} +- +-func (a *Awaiter) onShowMessageRequest(_ context.Context, m *protocol.ShowMessageRequestParams) error { +- a.mu.Lock() +- defer a.mu.Unlock() +- +- a.state.showMessageRequest = append(a.state.showMessageRequest, m) +- a.checkConditionsLocked() +- return nil +-} +- +-func (a *Awaiter) onLogMessage(_ context.Context, m *protocol.LogMessageParams) error { +- a.mu.Lock() +- defer a.mu.Unlock() +- +- a.state.logs = append(a.state.logs, m) +- a.checkConditionsLocked() +- return nil +-} +- +-func (a *Awaiter) onWorkDoneProgressCreate(_ context.Context, m *protocol.WorkDoneProgressCreateParams) error { +- a.mu.Lock() +- defer a.mu.Unlock() +- +- a.state.work[m.Token] = &workProgress{} +- return nil +-} +- +-func (a *Awaiter) onProgress(_ context.Context, m *protocol.ProgressParams) error { +- a.mu.Lock() +- defer a.mu.Unlock() +- work, ok := a.state.work[m.Token] +- if !ok { +- panic(fmt.Sprintf("got progress report for unknown report %v: %v", m.Token, m)) +- } +- v := m.Value.(map[string]any) +- switch kind := v["kind"]; kind { +- case "begin": +- work.title = v["title"].(string) +- a.state.startedWork[work.title]++ +- if msg, ok := v["message"]; ok { +- work.msg = msg.(string) +- } +- case "report": +- if pct, ok := v["percentage"]; ok { +- work.percent = pct.(float64) +- } +- if msg, ok := v["message"]; ok { +- work.msg = msg.(string) +- } +- case "end": +- work.complete = true +- a.state.completedWork[work.title]++ +- if msg, ok := v["message"]; ok { +- work.endMsg = msg.(string) +- } +- } +- a.checkConditionsLocked() +- return nil +-} +- +-func (a *Awaiter) onRegisterCapability(_ context.Context, m *protocol.RegistrationParams) error { +- a.mu.Lock() +- defer a.mu.Unlock() +- +- a.state.registrations = append(a.state.registrations, m) +- if a.state.registeredCapabilities == nil { +- a.state.registeredCapabilities = make(map[string]protocol.Registration) +- } +- for _, reg := range m.Registrations { +- a.state.registeredCapabilities[reg.Method] = reg +- } +- a.checkConditionsLocked() +- return nil +-} +- +-func (a *Awaiter) onUnregisterCapability(_ context.Context, m *protocol.UnregistrationParams) error { +- a.mu.Lock() +- defer a.mu.Unlock() +- +- a.state.unregistrations = append(a.state.unregistrations, m) +- a.checkConditionsLocked() +- return nil +-} +- +-func (a *Awaiter) checkConditionsLocked() { +- for id, condition := range a.waiters { +- if v, why := condition.expectation.Check(a.state); v != Unmet { +- delete(a.waiters, id) +- condition.verdict <- awaitResult{v, why} +- } +- } +-} +- +-// Await blocks until the given expectations are all simultaneously met. +-// +-// Generally speaking Await should be avoided because it blocks indefinitely if +-// gopls ends up in a state where the expectations are never going to be met. +-// Use AfterChange or OnceMet instead, so that the runner knows when to stop +-// waiting. +-func (e *Env) Await(expectations ...Expectation) { +- e.TB.Helper() +- if err := e.Awaiter.Await(e.Ctx, AllOf(expectations...)); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// OnceMet blocks until the precondition is met by the state or becomes +-// unmeetable. If it was met, OnceMet checks that the state meets all +-// expectations in mustMeets. +-func (e *Env) OnceMet(pre Expectation, mustMeets ...Expectation) { +- e.TB.Helper() +- e.Await(OnceMet(pre, AllOf(mustMeets...))) +-} +- +-// Await waits for all expectations to simultaneously be met. It should only be +-// called from the main test goroutine. +-func (a *Awaiter) Await(ctx context.Context, expectation Expectation) error { +- a.mu.Lock() +- // Before adding the waiter, we check if the condition is currently met or +- // failed to avoid a race where the condition was realized before Await was +- // called. +- switch verdict, why := expectation.Check(a.state); verdict { +- case Met: +- a.mu.Unlock() +- return nil +- case Unmeetable: +- err := fmt.Errorf("unmeetable expectation:\n%s\nreason:\n%s", indent(expectation.Description), indent(why)) +- a.mu.Unlock() +- return err +- } +- cond := &condition{ +- expectation: expectation, +- verdict: make(chan awaitResult), +- } +- a.waiters[nextAwaiterRegistration.Add(1)] = cond +- a.mu.Unlock() +- +- var err error +- select { +- case <-ctx.Done(): +- err = ctx.Err() +- case res := <-cond.verdict: +- if res.verdict != Met { +- err = fmt.Errorf("the following condition is %s:\n%s\nreason:\n%s", +- res.verdict, indent(expectation.Description), indent(res.reason)) +- } +- } +- return err +-} +- +-// indent indents all lines of msg, including the first. +-func indent(msg string) string { +- const prefix = " " +- return prefix + strings.ReplaceAll(msg, "\n", "\n"+prefix) +-} +- +-// CleanModCache cleans the specified GOMODCACHE. +-// +-// TODO(golang/go#74595): this is only necessary as the module cache cleaning of the +-// sandbox does not respect GOMODCACHE set via EnvVars. We should fix this, but +-// that is probably part of a larger refactoring of the sandbox that I'm not +-// inclined to undertake. --rfindley. +-// +-// (For similar problems caused by the same bug, see Test_issue38211; see also +-// comment in Sandbox.Env.) +-func CleanModCache(t *testing.T, modcache string) { +- cmd := exec.Command("go", "clean", "-modcache") +- cmd.Env = append(os.Environ(), "GOMODCACHE="+modcache, "GOTOOLCHAIN=local") +- if output, err := cmd.CombinedOutput(); err != nil { +- t.Errorf("cleaning modcache: %v\noutput:\n%s", err, string(output)) +- } +-} +diff -urN a/gopls/internal/test/integration/env_test.go b/gopls/internal/test/integration/env_test.go +--- a/gopls/internal/test/integration/env_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/env_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,71 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package integration +- +-import ( +- "context" +- "encoding/json" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-func TestProgressUpdating(t *testing.T) { +- a := &Awaiter{ +- state: State{ +- work: make(map[protocol.ProgressToken]*workProgress), +- startedWork: make(map[string]uint64), +- completedWork: make(map[string]uint64), +- }, +- } +- ctx := context.Background() +- if err := a.onWorkDoneProgressCreate(ctx, &protocol.WorkDoneProgressCreateParams{ +- Token: "foo", +- }); err != nil { +- t.Fatal(err) +- } +- if err := a.onWorkDoneProgressCreate(ctx, &protocol.WorkDoneProgressCreateParams{ +- Token: "bar", +- }); err != nil { +- t.Fatal(err) +- } +- updates := []struct { +- token string +- value any +- }{ +- {"foo", protocol.WorkDoneProgressBegin{Kind: "begin", Title: "foo work"}}, +- {"bar", protocol.WorkDoneProgressBegin{Kind: "begin", Title: "bar work"}}, +- {"foo", protocol.WorkDoneProgressEnd{Kind: "end"}}, +- {"bar", protocol.WorkDoneProgressReport{Kind: "report", Percentage: varOf[uint32](42)}}, +- } +- for _, update := range updates { +- params := &protocol.ProgressParams{ +- Token: update.token, +- Value: update.value, +- } +- data, err := json.Marshal(params) +- if err != nil { +- t.Fatal(err) +- } +- var unmarshaled protocol.ProgressParams +- if err := json.Unmarshal(data, &unmarshaled); err != nil { +- t.Fatal(err) +- } +- if err := a.onProgress(ctx, &unmarshaled); err != nil { +- t.Fatal(err) +- } +- } +- if got, want := a.state.completedWork["foo work"], uint64(1); got != want { +- t.Errorf(`completedWork["foo work"] = %d, want %d`, got, want) +- } +- got := *a.state.work["bar"] +- want := workProgress{title: "bar work", percent: 42} +- if got != want { +- t.Errorf("work progress for \"bar\": %v, want %v", got, want) +- } +-} +- +-// varOf returns a new variable whose value is x. +-func varOf[T any](x T) *T { return &x } +diff -urN a/gopls/internal/test/integration/expectation.go b/gopls/internal/test/integration/expectation.go +--- a/gopls/internal/test/integration/expectation.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/expectation.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,894 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package integration +- +-import ( +- "bytes" +- "fmt" +- "maps" +- "regexp" +- "slices" +- "strings" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/server" +- "golang.org/x/tools/gopls/internal/util/constraints" +-) +- +-var ( +- // InitialWorkspaceLoad is an expectation that the workspace initial load has +- // completed. It is verified via workdone reporting. +- InitialWorkspaceLoad = CompletedWork(server.DiagnosticWorkTitle(server.FromInitialWorkspaceLoad), 1, false) +-) +- +-// A Verdict is the result of checking an expectation against the current +-// editor state. +-type Verdict int +- +-// Order matters for the following constants: verdicts are sorted in order of +-// decisiveness. +-const ( +- // Met indicates that an expectation is satisfied by the current state. +- Met Verdict = iota +- // Unmet indicates that an expectation is not currently met, but could be met +- // in the future. +- Unmet +- // Unmeetable indicates that an expectation cannot be satisfied in the +- // future. +- Unmeetable +-) +- +-func (v Verdict) String() string { +- switch v { +- case Met: +- return "Met" +- case Unmet: +- return "Unmet" +- case Unmeetable: +- return "Unmeetable" +- } +- return fmt.Sprintf("unrecognized verdict %d", v) +-} +- +-// An Expectation is an expected property of the state of the LSP client. +-// The Check function reports whether the property is met. +-// +-// Expectations are combinators. By composing them, tests may express +-// complex expectations in terms of simpler ones. +-type Expectation struct { +- // Check returns the verdict of this expectation for the given state. +- // If the vertict is not [Met], the second result should return a reason +- // that the verdict is not (yet) met. +- Check func(State) (Verdict, string) +- +- // Description holds a noun-phrase identifying what the expectation checks. +- // +- // TODO(rfindley): revisit existing descriptions to ensure they compose nicely. +- Description string +-} +- +-// OnceMet returns an Expectation that, once the precondition is met, asserts +-// that mustMeet is met. +-func OnceMet(pre, post Expectation) Expectation { +- check := func(s State) (Verdict, string) { +- switch v, why := pre.Check(s); v { +- case Unmeetable, Unmet: +- return v, fmt.Sprintf("precondition is %s: %s", v, why) +- case Met: +- v, why := post.Check(s) +- if v != Met { +- return Unmeetable, fmt.Sprintf("postcondition is not met:\n%s", indent(why)) +- } +- return Met, "" +- default: +- panic(fmt.Sprintf("unknown precondition verdict %s", v)) +- } +- } +- desc := fmt.Sprintf("once the following is met:\n%s\nmust have:\n%s", +- indent(pre.Description), indent(post.Description)) +- return Expectation{ +- Check: check, +- Description: desc, +- } +-} +- +-// Not inverts the sense of an expectation: a met expectation is unmet, and an +-// unmet expectation is met. +-func Not(e Expectation) Expectation { +- check := func(s State) (Verdict, string) { +- switch v, _ := e.Check(s); v { +- case Met: +- return Unmet, "condition unexpectedly satisfied" +- case Unmet, Unmeetable: +- return Met, "" +- default: +- panic(fmt.Sprintf("unexpected verdict %v", v)) +- } +- } +- return Expectation{ +- Check: check, +- Description: fmt.Sprintf("not: %s", e.Description), +- } +-} +- +-// AnyOf returns an expectation that is satisfied when any of the given +-// expectations is met. +-func AnyOf(anyOf ...Expectation) Expectation { +- if len(anyOf) == 1 { +- return anyOf[0] // avoid unnecessary boilerplate +- } +- check := func(s State) (Verdict, string) { +- for _, e := range anyOf { +- verdict, _ := e.Check(s) +- if verdict == Met { +- return Met, "" +- } +- } +- return Unmet, "none of the expectations were met" +- } +- description := describeExpectations(anyOf...) +- return Expectation{ +- Check: check, +- Description: fmt.Sprintf("any of:\n%s", description), +- } +-} +- +-// AllOf expects that all given expectations are met. +-func AllOf(allOf ...Expectation) Expectation { +- if len(allOf) == 1 { +- return allOf[0] // avoid unnecessary boilerplate +- } +- check := func(s State) (Verdict, string) { +- var ( +- verdict = Met +- reason string +- ) +- for _, e := range allOf { +- v, why := e.Check(s) +- if v > verdict { +- verdict = v +- reason = why +- } +- } +- return verdict, reason +- } +- desc := describeExpectations(allOf...) +- return Expectation{ +- Check: check, +- Description: fmt.Sprintf("all of:\n%s", indent(desc)), +- } +-} +- +-func describeExpectations(expectations ...Expectation) string { +- var descriptions []string +- for _, e := range expectations { +- descriptions = append(descriptions, e.Description) +- } +- return strings.Join(descriptions, "\n") +-} +- +-// ReadDiagnostics is an Expectation that stores the current diagnostics for +-// fileName in into, whenever it is evaluated. +-// +-// It can be used in combination with OnceMet or AfterChange to capture the +-// state of diagnostics when other expectations are satisfied. +-func ReadDiagnostics(fileName string, into *protocol.PublishDiagnosticsParams) Expectation { +- check := func(s State) (Verdict, string) { +- diags, ok := s.diagnostics[fileName] +- if !ok { +- return Unmeetable, fmt.Sprintf("no diagnostics for %q", fileName) +- } +- *into = *diags +- return Met, "" +- } +- return Expectation{ +- Check: check, +- Description: fmt.Sprintf("read diagnostics for %q", fileName), +- } +-} +- +-// ReadAllDiagnostics is an expectation that stores all published diagnostics +-// into the provided map, whenever it is evaluated. +-// +-// It can be used in combination with OnceMet or AfterChange to capture the +-// state of diagnostics when other expectations are satisfied. +-func ReadAllDiagnostics(into *map[string]*protocol.PublishDiagnosticsParams) Expectation { +- check := func(s State) (Verdict, string) { +- allDiags := maps.Clone(s.diagnostics) +- *into = allDiags +- return Met, "" +- } +- return Expectation{ +- Check: check, +- Description: "read all diagnostics", +- } +-} +- +-// ShownDocument asserts that the client has received a +-// ShowDocumentRequest for the given URI. +-func ShownDocument(uri protocol.URI) Expectation { +- check := func(s State) (Verdict, string) { +- for _, params := range s.showDocument { +- if params.URI == uri { +- return Met, "" +- } +- } +- return Unmet, fmt.Sprintf("no ShowDocumentRequest received for %s", uri) +- } +- return Expectation{ +- Check: check, +- Description: fmt.Sprintf("received window/showDocument for URI %s", uri), +- } +-} +- +-// ShownDocuments is an expectation that appends each showDocument +-// request into the provided slice, whenever it is evaluated. +-// +-// It can be used in combination with OnceMet or AfterChange to +-// capture the set of showDocument requests when other expectations +-// are satisfied. +-func ShownDocuments(into *[]*protocol.ShowDocumentParams) Expectation { +- check := func(s State) (Verdict, string) { +- *into = append(*into, s.showDocument...) +- return Met, "" +- } +- return Expectation{ +- Check: check, +- Description: "read shown documents", +- } +-} +- +-// NoShownMessage asserts that the editor has not received a ShowMessage. +-func NoShownMessage(containing string) Expectation { +- check := func(s State) (Verdict, string) { +- for _, m := range s.showMessage { +- if strings.Contains(m.Message, containing) { +- // Format the message (which may contain newlines) as a block quote. +- msg := fmt.Sprintf("\"\"\"\n%s\n\"\"\"", strings.TrimSpace(m.Message)) +- return Unmeetable, fmt.Sprintf("observed the following message:\n%s", indent(msg)) +- } +- } +- return Met, "" +- } +- var desc string +- if containing != "" { +- desc = fmt.Sprintf("received no ShowMessage containing %q", containing) +- } else { +- desc = "received no ShowMessage requests" +- } +- return Expectation{ +- Check: check, +- Description: desc, +- } +-} +- +-// ShownMessage asserts that the editor has received a ShowMessageRequest +-// containing the given substring. +-func ShownMessage(containing string) Expectation { +- check := func(s State) (Verdict, string) { +- for _, m := range s.showMessage { +- if strings.Contains(m.Message, containing) { +- return Met, "" +- } +- } +- return Unmet, fmt.Sprintf("no ShowMessage containing %q", containing) +- } +- return Expectation{ +- Check: check, +- Description: fmt.Sprintf("received window/showMessage containing %q", containing), +- } +-} +- +-// ShownMessageRequest asserts that the editor has received a +-// ShowMessageRequest with message matching the given regular expression. +-func ShownMessageRequest(matchingRegexp string) Expectation { +- msgRE := regexp.MustCompile(matchingRegexp) +- check := func(s State) (Verdict, string) { +- if len(s.showMessageRequest) == 0 { +- return Unmet, "no ShowMessageRequest have been received" +- } +- for _, m := range s.showMessageRequest { +- if msgRE.MatchString(m.Message) { +- return Met, "" +- } +- } +- return Unmet, fmt.Sprintf("no ShowMessageRequest (out of %d) match %q", len(s.showMessageRequest), matchingRegexp) +- } +- return Expectation{ +- Check: check, +- Description: fmt.Sprintf("ShowMessageRequest matching %q", matchingRegexp), +- } +-} +- +-// DoneDiagnosingChanges expects that diagnostics are complete from common +-// change notifications: didOpen, didChange, didSave, didChangeWatchedFiles, +-// and didClose. +-// +-// This can be used when multiple notifications may have been sent, such as +-// when a didChange is immediately followed by a didSave. It is insufficient to +-// simply await NoOutstandingWork, because the LSP client has no control over +-// when the server starts processing a notification. Therefore, we must keep +-// track of +-func (e *Env) DoneDiagnosingChanges() Expectation { +- stats := e.Editor.Stats() +- statsBySource := map[server.ModificationSource]uint64{ +- server.FromDidOpen: stats.DidOpen, +- server.FromDidChange: stats.DidChange, +- server.FromDidSave: stats.DidSave, +- server.FromDidChangeWatchedFiles: stats.DidChangeWatchedFiles, +- server.FromDidClose: stats.DidClose, +- server.FromDidChangeConfiguration: stats.DidChangeConfiguration, +- } +- +- var expected []server.ModificationSource +- for k, v := range statsBySource { +- if v > 0 { +- expected = append(expected, k) +- } +- } +- +- // Sort for stability. +- slices.Sort(expected) +- +- var all []Expectation +- for _, source := range expected { +- all = append(all, CompletedWork(server.DiagnosticWorkTitle(source), statsBySource[source], true)) +- } +- +- return AllOf(all...) +-} +- +-// AfterChange expects that the given expectations will be met after all +-// state-changing notifications have been processed by the server. +-// Specifically, it awaits the awaits completion of the process of diagnosis +-// after the following notifications, before checking the given expectations: +-// - textDocument/didOpen +-// - textDocument/didChange +-// - textDocument/didSave +-// - textDocument/didClose +-// - workspace/didChangeWatchedFiles +-// - workspace/didChangeConfiguration +-func (e *Env) AfterChange(expectations ...Expectation) { +- e.TB.Helper() +- e.OnceMet( +- e.DoneDiagnosingChanges(), +- expectations..., +- ) +-} +- +-// DoneWithOpen expects all didOpen notifications currently sent by the editor +-// to be completely processed. +-func (e *Env) DoneWithOpen() Expectation { +- opens := e.Editor.Stats().DidOpen +- return CompletedWork(server.DiagnosticWorkTitle(server.FromDidOpen), opens, true) +-} +- +-// StartedChange expects that the server has at least started processing all +-// didChange notifications sent from the client. +-func (e *Env) StartedChange() Expectation { +- changes := e.Editor.Stats().DidChange +- return StartedWork(server.DiagnosticWorkTitle(server.FromDidChange), changes) +-} +- +-// DoneWithChange expects all didChange notifications currently sent by the +-// editor to be completely processed. +-func (e *Env) DoneWithChange() Expectation { +- changes := e.Editor.Stats().DidChange +- return CompletedWork(server.DiagnosticWorkTitle(server.FromDidChange), changes, true) +-} +- +-// DoneWithSave expects all didSave notifications currently sent by the editor +-// to be completely processed. +-func (e *Env) DoneWithSave() Expectation { +- saves := e.Editor.Stats().DidSave +- return CompletedWork(server.DiagnosticWorkTitle(server.FromDidSave), saves, true) +-} +- +-// StartedChangeWatchedFiles expects that the server has at least started +-// processing all didChangeWatchedFiles notifications sent from the client. +-func (e *Env) StartedChangeWatchedFiles() Expectation { +- changes := e.Editor.Stats().DidChangeWatchedFiles +- return StartedWork(server.DiagnosticWorkTitle(server.FromDidChangeWatchedFiles), changes) +-} +- +-// DoneWithChangeWatchedFiles expects all didChangeWatchedFiles notifications +-// currently sent by the editor to be completely processed. +-func (e *Env) DoneWithChangeWatchedFiles() Expectation { +- changes := e.Editor.Stats().DidChangeWatchedFiles +- return CompletedWork(server.DiagnosticWorkTitle(server.FromDidChangeWatchedFiles), changes, true) +-} +- +-// DoneWithClose expects all didClose notifications currently sent by the +-// editor to be completely processed. +-func (e *Env) DoneWithClose() Expectation { +- changes := e.Editor.Stats().DidClose +- return CompletedWork(server.DiagnosticWorkTitle(server.FromDidClose), changes, true) +-} +- +-// StartedWork expect a work item to have been started >= atLeast times. +-// +-// See CompletedWork. +-func StartedWork(title string, atLeast uint64) Expectation { +- check := func(s State) (Verdict, string) { +- started := s.startedWork[title] +- if started >= atLeast { +- return Met, "" +- } +- return Unmet, fmt.Sprintf("started work %d %s", started, pluralize("time", started)) +- } +- return Expectation{ +- Check: check, +- Description: fmt.Sprintf("started work %q at least %d %s", title, atLeast, pluralize("time", atLeast)), +- } +-} +- +-// CompletedWork expects a work item to have been completed >= atLeast times. +-// +-// Since the Progress API doesn't include any hidden metadata, we must use the +-// progress notification title to identify the work we expect to be completed. +-func CompletedWork(title string, count uint64, atLeast bool) Expectation { +- check := func(s State) (Verdict, string) { +- completed := s.completedWork[title] +- if completed == count || atLeast && completed > count { +- return Met, "" +- } +- return Unmet, fmt.Sprintf("completed %d %s", completed, pluralize("time", completed)) +- } +- desc := fmt.Sprintf("completed work %q %v %s", title, count, pluralize("time", count)) +- if atLeast { +- desc = fmt.Sprintf("completed work %q at least %d %s", title, count, pluralize("time", count)) +- } +- return Expectation{ +- Check: check, +- Description: desc, +- } +-} +- +-// pluralize adds an 's' suffix to name if n > 1. +-func pluralize[T constraints.Integer](name string, n T) string { +- if n > 1 { +- return name + "s" +- } +- return name +-} +- +-type WorkStatus struct { +- // Last seen message from either `begin` or `report` progress. +- Msg string +- // Message sent with `end` progress message. +- EndMsg string +-} +- +-// CompletedProgressToken expects that workDone progress is complete for the given +-// progress token. When non-nil WorkStatus is provided, it will be filled +-// when the expectation is met. +-// +-// If the token is not a progress token that the client has seen, this +-// expectation is Unmeetable. +-func CompletedProgressToken(token protocol.ProgressToken, into *WorkStatus) Expectation { +- check := func(s State) (Verdict, string) { +- work, ok := s.work[token] +- if !ok { +- return Unmeetable, "no matching work items" +- } +- if work.complete { +- if into != nil { +- into.Msg = work.msg +- into.EndMsg = work.endMsg +- } +- return Met, "" +- } +- return Unmet, fmt.Sprintf("work is not complete; last message: %q", work.msg) +- } +- return Expectation{ +- Check: check, +- Description: fmt.Sprintf("completed work for token %v", token), +- } +-} +- +-// CompletedProgress expects that there is exactly one workDone progress with +-// the given title, and is satisfied when that progress completes. If it is +-// met, the corresponding status is written to the into argument. +-// +-// TODO(rfindley): refactor to eliminate the redundancy with CompletedWork. +-// This expectation is a vestige of older workarounds for asynchronous command +-// execution. +-func CompletedProgress(title string, into *WorkStatus) Expectation { +- check := func(s State) (Verdict, string) { +- var work *workProgress +- for _, w := range s.work { +- if w.title == title { +- if work != nil { +- return Unmeetable, "multiple matching work items" +- } +- work = w +- } +- } +- if work == nil { +- return Unmeetable, "no matching work items" +- } +- if work.complete { +- if into != nil { +- into.Msg = work.msg +- into.EndMsg = work.endMsg +- } +- return Met, "" +- } +- return Unmet, fmt.Sprintf("work is not complete; last message: %q", work.msg) +- } +- desc := fmt.Sprintf("exactly 1 completed workDoneProgress with title %v", title) +- return Expectation{ +- Check: check, +- Description: desc, +- } +-} +- +-// OutstandingWork expects a work item to be outstanding. The given title must +-// be an exact match, whereas the given msg must only be contained in the work +-// item's message. +-func OutstandingWork(title, msg string) Expectation { +- check := func(s State) (Verdict, string) { +- for _, work := range s.work { +- if work.complete { +- continue +- } +- if work.title == title && strings.Contains(work.msg, msg) { +- return Met, "" +- } +- } +- return Unmet, "no matching work" +- } +- return Expectation{ +- Check: check, +- Description: fmt.Sprintf("outstanding work: %q containing %q", title, msg), +- } +-} +- +-// NoOutstandingWork asserts that there is no work initiated using the LSP +-// $/progress API that has not completed. +-// +-// If non-nil, the ignore func is used to ignore certain work items for the +-// purpose of this check. +-// +-// TODO(rfindley): consider refactoring to treat outstanding work the same way +-// we treat diagnostics: with an algebra of filters. +-func NoOutstandingWork(ignore func(title, msg string) bool) Expectation { +- check := func(s State) (Verdict, string) { +- for _, w := range s.work { +- if w.complete { +- continue +- } +- if w.title == "" { +- // A token that has been created but not yet used. +- // +- // TODO(rfindley): this should be separated in the data model: until +- // the "begin" notification, work should not be in progress. +- continue +- } +- if ignore != nil && ignore(w.title, w.msg) { +- continue +- } +- return Unmet, fmt.Sprintf("found outstanding work %q: %q", w.title, w.msg) +- } +- return Met, "" +- } +- return Expectation{ +- Check: check, +- Description: "no outstanding work", +- } +-} +- +-// IgnoreTelemetryPromptWork may be used in conjunction with NoOutStandingWork +-// to ignore the telemetry prompt. +-func IgnoreTelemetryPromptWork(title, msg string) bool { +- return title == server.TelemetryPromptWorkTitle +-} +- +-// NoErrorLogs asserts that the client has not received any log messages of +-// error severity. +-func NoErrorLogs() Expectation { +- return NoLogMatching(protocol.Error, "") +-} +- +-// LogMatching asserts that the client has received a log message +-// of type typ matching the regexp re a certain number of times. +-// +-// The count argument specifies the expected number of matching logs. If +-// atLeast is set, this is a lower bound, otherwise there must be exactly count +-// matching logs. +-// +-// Logs are asynchronous to other LSP messages, so this expectation should not +-// be used with combinators such as OnceMet or AfterChange that assert on +-// ordering with respect to other operations. +-func LogMatching(typ protocol.MessageType, re string, count int, atLeast bool) Expectation { +- rec, err := regexp.Compile(re) +- if err != nil { +- panic(err) +- } +- check := func(state State) (Verdict, string) { +- var found int +- for _, msg := range state.logs { +- if msg.Type == typ && rec.Match([]byte(msg.Message)) { +- found++ +- } +- } +- // Check for an exact or "at least" match. +- if found == count || (found >= count && atLeast) { +- return Met, "" +- } +- // If we require an exact count, and have received more than expected, the +- // expectation can never be met. +- verdict := Unmet +- if found > count && !atLeast { +- verdict = Unmeetable +- } +- return verdict, fmt.Sprintf("found %d matching logs", found) +- } +- desc := fmt.Sprintf("log message matching %q expected %v times", re, count) +- if atLeast { +- desc = fmt.Sprintf("log message matching %q expected at least %v times", re, count) +- } +- return Expectation{ +- Check: check, +- Description: desc, +- } +-} +- +-// NoLogMatching asserts that the client has not received a log message +-// of type typ matching the regexp re. If re is an empty string, any log +-// message is considered a match. +-func NoLogMatching(typ protocol.MessageType, re string) Expectation { +- var r *regexp.Regexp +- if re != "" { +- var err error +- r, err = regexp.Compile(re) +- if err != nil { +- panic(err) +- } +- } +- check := func(state State) (Verdict, string) { +- for _, msg := range state.logs { +- if msg.Type != typ { +- continue +- } +- if r == nil || r.Match([]byte(msg.Message)) { +- return Unmeetable, fmt.Sprintf("found matching log %q", msg.Message) +- } +- } +- return Met, "" +- } +- desc := fmt.Sprintf("no %s log messages", typ) +- if re != "" { +- desc += fmt.Sprintf(" matching %q", re) +- } +- return Expectation{ +- Check: check, +- Description: desc, +- } +-} +- +-// FileWatchMatching expects that a file registration matches re. +-func FileWatchMatching(re string) Expectation { +- return Expectation{ +- Check: checkFileWatch(re, Met, Unmet), +- Description: fmt.Sprintf("file watch matching %q", re), +- } +-} +- +-// NoFileWatchMatching expects that no file registration matches re. +-func NoFileWatchMatching(re string) Expectation { +- return Expectation{ +- Check: checkFileWatch(re, Unmet, Met), +- Description: fmt.Sprintf("no file watch matching %q", re), +- } +-} +- +-func checkFileWatch(re string, onMatch, onNoMatch Verdict) func(State) (Verdict, string) { +- rec := regexp.MustCompile(re) +- return func(s State) (Verdict, string) { +- r := s.registeredCapabilities["workspace/didChangeWatchedFiles"] +- watchers := jsonProperty(r.RegisterOptions, "watchers").([]any) +- for _, watcher := range watchers { +- pattern := jsonProperty(watcher, "globPattern").(string) +- if rec.MatchString(pattern) { +- return onMatch, fmt.Sprintf("matches watcher pattern %q", pattern) +- } +- } +- return onNoMatch, "no matching watchers" +- } +-} +- +-// jsonProperty extracts a value from a path of JSON property names, assuming +-// the default encoding/json unmarshaling to the empty interface (i.e.: that +-// JSON objects are unmarshalled as map[string]interface{}) +-// +-// For example, if obj is unmarshalled from the following json: +-// +-// { +-// "foo": { "bar": 3 } +-// } +-// +-// Then jsonProperty(obj, "foo", "bar") will be 3. +-func jsonProperty(obj any, path ...string) any { +- if len(path) == 0 || obj == nil { +- return obj +- } +- m := obj.(map[string]any) +- return jsonProperty(m[path[0]], path[1:]...) +-} +- +-func formatDiagnostic(d protocol.Diagnostic) string { +- return fmt.Sprintf("%d:%d [%s]: %s\n", d.Range.Start.Line, d.Range.Start.Character, d.Source, d.Message) +-} +- +-// Diagnostics asserts that there is at least one diagnostic matching the given +-// filters. +-func Diagnostics(filters ...DiagnosticFilter) Expectation { +- check := func(s State) (Verdict, string) { +- diags := flattenDiagnostics(s) +- for _, filter := range filters { +- var filtered []flatDiagnostic +- for _, d := range diags { +- if filter.check(d.name, d.diag) { +- filtered = append(filtered, d) +- } +- } +- if len(filtered) == 0 { +- // Reprinting the description of the filters is too verbose. +- // +- // We can probably do better here, but for now just format the +- // diagnostics. +- var b bytes.Buffer +- for name, params := range s.diagnostics { +- fmt.Fprintf(&b, "\t%s (version %d):\n", name, params.Version) +- for _, d := range params.Diagnostics { +- fmt.Fprintf(&b, "\t\t%s", formatDiagnostic(d)) +- } +- } +- return Unmet, fmt.Sprintf("diagnostics:\n%s", b.String()) +- } +- diags = filtered +- } +- return Met, "" +- } +- var descs []string +- for _, filter := range filters { +- descs = append(descs, filter.desc) +- } +- return Expectation{ +- Check: check, +- Description: "any diagnostics " + strings.Join(descs, ", "), +- } +-} +- +-// NoDiagnostics asserts that there are no diagnostics matching the given +-// filters. Notably, if no filters are supplied this assertion checks that +-// there are no diagnostics at all, for any file. +-func NoDiagnostics(filters ...DiagnosticFilter) Expectation { +- check := func(s State) (Verdict, string) { +- diags := flattenDiagnostics(s) +- for _, filter := range filters { +- var filtered []flatDiagnostic +- for _, d := range diags { +- if filter.check(d.name, d.diag) { +- filtered = append(filtered, d) +- } +- } +- diags = filtered +- } +- if len(diags) > 0 { +- d := diags[0] +- why := fmt.Sprintf("have diagnostic: %s: %v", d.name, formatDiagnostic(d.diag)) +- return Unmet, why +- } +- return Met, "" +- } +- var descs []string +- for _, filter := range filters { +- descs = append(descs, filter.desc) +- } +- return Expectation{ +- Check: check, +- Description: "no diagnostics " + strings.Join(descs, ", "), +- } +-} +- +-type flatDiagnostic struct { +- name string +- diag protocol.Diagnostic +-} +- +-func flattenDiagnostics(state State) []flatDiagnostic { +- var result []flatDiagnostic +- for name, diags := range state.diagnostics { +- for _, diag := range diags.Diagnostics { +- result = append(result, flatDiagnostic{name, diag}) +- } +- } +- return result +-} +- +-// -- Diagnostic filters -- +- +-// A DiagnosticFilter filters the set of diagnostics, for assertion with +-// Diagnostics or NoDiagnostics. +-type DiagnosticFilter struct { +- desc string +- check func(name string, _ protocol.Diagnostic) bool +-} +- +-// ForFile filters to diagnostics matching the sandbox-relative file name. +-func ForFile(name string) DiagnosticFilter { +- return DiagnosticFilter{ +- desc: fmt.Sprintf("for file %q", name), +- check: func(diagName string, _ protocol.Diagnostic) bool { +- return diagName == name +- }, +- } +-} +- +-// FromSource filters to diagnostics matching the given diagnostics source. +-func FromSource(source string) DiagnosticFilter { +- return DiagnosticFilter{ +- desc: fmt.Sprintf("with source %q", source), +- check: func(_ string, d protocol.Diagnostic) bool { +- return d.Source == source +- }, +- } +-} +- +-// AtRegexp filters to diagnostics in the file with sandbox-relative path name, +-// at the first position matching the given regexp pattern. +-// +-// TODO(rfindley): pass in the editor to expectations, so that they may depend +-// on editor state and AtRegexp can be a function rather than a method. +-func (e *Env) AtRegexp(name, pattern string) DiagnosticFilter { +- loc := e.RegexpSearch(name, pattern) +- return DiagnosticFilter{ +- desc: fmt.Sprintf("at the first position (%v) matching %#q in %q", loc.Range.Start, pattern, name), +- check: func(diagName string, d protocol.Diagnostic) bool { +- return diagName == name && d.Range.Start == loc.Range.Start +- }, +- } +-} +- +-// AtPosition filters to diagnostics at location name:line:character, for a +-// sandbox-relative path name. +-// +-// Line and character are 0-based, and character measures UTF-16 codes. +-// +-// Note: prefer the more readable AtRegexp. +-func AtPosition(name string, line, character uint32) DiagnosticFilter { +- pos := protocol.Position{Line: line, Character: character} +- return DiagnosticFilter{ +- desc: fmt.Sprintf("at %s:%d:%d", name, line, character), +- check: func(diagName string, d protocol.Diagnostic) bool { +- return diagName == name && d.Range.Start == pos +- }, +- } +-} +- +-// WithMessage filters to diagnostics whose message contains the given +-// substring. +-func WithMessage(substring string) DiagnosticFilter { +- return DiagnosticFilter{ +- desc: fmt.Sprintf("with message containing %q", substring), +- check: func(_ string, d protocol.Diagnostic) bool { +- return strings.Contains(d.Message, substring) +- }, +- } +-} +- +-// WithSeverityTags filters to diagnostics whose severity and tags match +-// the given expectation. +-func WithSeverityTags(diagName string, severity protocol.DiagnosticSeverity, tags []protocol.DiagnosticTag) DiagnosticFilter { +- return DiagnosticFilter{ +- desc: fmt.Sprintf("with diagnostic %q with severity %q and tag %#q", diagName, severity, tags), +- check: func(_ string, d protocol.Diagnostic) bool { +- return d.Source == diagName && d.Severity == severity && cmp.Equal(d.Tags, tags) +- }, +- } +-} +diff -urN a/gopls/internal/test/integration/fake/client.go b/gopls/internal/test/integration/fake/client.go +--- a/gopls/internal/test/integration/fake/client.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/client.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,225 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fake +- +-import ( +- "context" +- "encoding/json" +- "fmt" +- "path" +- "path/filepath" +- "sync/atomic" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/test/integration/fake/glob" +-) +- +-// ClientHooks are a set of optional hooks called during handling of +-// the corresponding client method (see protocol.Client for the +-// LSP server-to-client RPCs) in order to make test expectations +-// awaitable. +-type ClientHooks struct { +- OnLogMessage func(context.Context, *protocol.LogMessageParams) error +- OnDiagnostics func(context.Context, *protocol.PublishDiagnosticsParams) error +- OnWorkDoneProgressCreate func(context.Context, *protocol.WorkDoneProgressCreateParams) error +- OnProgress func(context.Context, *protocol.ProgressParams) error +- OnShowDocument func(context.Context, *protocol.ShowDocumentParams) error +- OnShowMessage func(context.Context, *protocol.ShowMessageParams) error +- OnShowMessageRequest func(context.Context, *protocol.ShowMessageRequestParams) error +- OnRegisterCapability func(context.Context, *protocol.RegistrationParams) error +- OnUnregisterCapability func(context.Context, *protocol.UnregistrationParams) error +-} +- +-// Client is an implementation of the [protocol.Client] interface +-// based on the test's fake [Editor]. It mostly delegates +-// functionality to hooks that can be configured by tests. +-type Client struct { +- editor *Editor +- hooks ClientHooks +- onApplyEdit atomic.Pointer[ApplyEditHandler] // hook for marker tests to intercept edits +-} +- +-type ApplyEditHandler = func(context.Context, *protocol.WorkspaceEdit) error +- +-// SetApplyEditHandler sets the (non-nil) handler for ApplyEdit +-// downcalls, and returns a function to restore the previous one. +-// Use it around client-to-server RPCs to capture the edits. +-// The default handler is c.Editor.onApplyEdit +-func (c *Client) SetApplyEditHandler(h ApplyEditHandler) func() { +- if h == nil { +- panic("h is nil") +- } +- prev := c.onApplyEdit.Swap(&h) +- return func() { +- if c.onApplyEdit.Swap(prev) != &h { +- panic("improper nesting of SetApplyEditHandler, restore") +- } +- } +-} +- +-func (c *Client) CodeLensRefresh(context.Context) error { return nil } +- +-func (c *Client) InlayHintRefresh(context.Context) error { return nil } +- +-func (c *Client) DiagnosticRefresh(context.Context) error { return nil } +- +-func (c *Client) FoldingRangeRefresh(context.Context) error { return nil } +- +-func (c *Client) InlineValueRefresh(context.Context) error { return nil } +- +-func (c *Client) SemanticTokensRefresh(context.Context) error { return nil } +- +-func (c *Client) LogTrace(context.Context, *protocol.LogTraceParams) error { return nil } +- +-func (c *Client) TextDocumentContentRefresh(context.Context, *protocol.TextDocumentContentRefreshParams) error { +- return nil +-} +- +-func (c *Client) ShowMessage(ctx context.Context, params *protocol.ShowMessageParams) error { +- if c.hooks.OnShowMessage != nil { +- return c.hooks.OnShowMessage(ctx, params) +- } +- return nil +-} +- +-func (c *Client) ShowMessageRequest(ctx context.Context, params *protocol.ShowMessageRequestParams) (*protocol.MessageActionItem, error) { +- if c.hooks.OnShowMessageRequest != nil { +- if err := c.hooks.OnShowMessageRequest(ctx, params); err != nil { +- return nil, err +- } +- } +- if c.editor.config.MessageResponder != nil { +- return c.editor.config.MessageResponder(params) +- } +- return nil, nil // don't choose, which is effectively dismissing the message +-} +- +-func (c *Client) LogMessage(ctx context.Context, params *protocol.LogMessageParams) error { +- if c.hooks.OnLogMessage != nil { +- return c.hooks.OnLogMessage(ctx, params) +- } +- return nil +-} +- +-func (c *Client) Event(ctx context.Context, event *any) error { +- return nil +-} +- +-func (c *Client) PublishDiagnostics(ctx context.Context, params *protocol.PublishDiagnosticsParams) error { +- if c.hooks.OnDiagnostics != nil { +- return c.hooks.OnDiagnostics(ctx, params) +- } +- return nil +-} +- +-func (c *Client) WorkspaceFolders(context.Context) ([]protocol.WorkspaceFolder, error) { +- return []protocol.WorkspaceFolder{}, nil +-} +- +-func (c *Client) Configuration(_ context.Context, p *protocol.ParamConfiguration) ([]any, error) { +- results := make([]any, len(p.Items)) +- for i, item := range p.Items { +- if item.ScopeURI != nil && *item.ScopeURI == "" { +- return nil, fmt.Errorf(`malformed ScopeURI ""`) +- } +- if item.Section == "gopls" { +- config := c.editor.Config() +- results[i] = makeSettings(c.editor.sandbox, config, item.ScopeURI) +- } +- } +- return results, nil +-} +- +-func (c *Client) RegisterCapability(ctx context.Context, params *protocol.RegistrationParams) error { +- if c.hooks.OnRegisterCapability != nil { +- if err := c.hooks.OnRegisterCapability(ctx, params); err != nil { +- return err +- } +- } +- // Update file watching patterns. +- // +- // TODO(rfindley): We could verify more here, like verify that the +- // registration ID is distinct, and that the capability is not currently +- // registered. +- for _, registration := range params.Registrations { +- if registration.Method == "workspace/didChangeWatchedFiles" { +- // Marshal and unmarshal to interpret RegisterOptions as +- // DidChangeWatchedFilesRegistrationOptions. +- raw, err := json.Marshal(registration.RegisterOptions) +- if err != nil { +- return fmt.Errorf("marshaling registration options: %v", err) +- } +- var opts protocol.DidChangeWatchedFilesRegistrationOptions +- if err := json.Unmarshal(raw, &opts); err != nil { +- return fmt.Errorf("unmarshaling registration options: %v", err) +- } +- var globs []*glob.Glob +- for _, watcher := range opts.Watchers { +- var globPattern string +- switch pattern := watcher.GlobPattern.Value.(type) { +- case protocol.Pattern: +- globPattern = pattern +- case protocol.RelativePattern: +- globPattern = path.Join(filepath.ToSlash(pattern.BaseURI.Path()), pattern.Pattern) +- } +- // TODO(rfindley): honor the watch kind. +- g, err := glob.Parse(globPattern) +- if err != nil { +- return fmt.Errorf("error parsing glob pattern %q: %v", watcher.GlobPattern, err) +- } +- globs = append(globs, g) +- } +- c.editor.mu.Lock() +- c.editor.watchPatterns = globs +- c.editor.mu.Unlock() +- } +- } +- return nil +-} +- +-func (c *Client) UnregisterCapability(ctx context.Context, params *protocol.UnregistrationParams) error { +- if c.hooks.OnUnregisterCapability != nil { +- return c.hooks.OnUnregisterCapability(ctx, params) +- } +- return nil +-} +- +-func (c *Client) Progress(ctx context.Context, params *protocol.ProgressParams) error { +- if c.hooks.OnProgress != nil { +- return c.hooks.OnProgress(ctx, params) +- } +- return nil +-} +- +-func (c *Client) WorkDoneProgressCreate(ctx context.Context, params *protocol.WorkDoneProgressCreateParams) error { +- if c.hooks.OnWorkDoneProgressCreate != nil { +- return c.hooks.OnWorkDoneProgressCreate(ctx, params) +- } +- return nil +-} +- +-func (c *Client) ShowDocument(ctx context.Context, params *protocol.ShowDocumentParams) (*protocol.ShowDocumentResult, error) { +- if c.hooks.OnShowDocument != nil { +- if err := c.hooks.OnShowDocument(ctx, params); err != nil { +- return nil, err +- } +- return &protocol.ShowDocumentResult{Success: true}, nil +- } +- return nil, nil +-} +- +-func (c *Client) ApplyEdit(ctx context.Context, params *protocol.ApplyWorkspaceEditParams) (*protocol.ApplyWorkspaceEditResult, error) { +- if len(params.Edit.Changes) > 0 { +- return &protocol.ApplyWorkspaceEditResult{FailureReason: "Edit.Changes is unsupported"}, nil +- } +- onApplyEdit := c.editor.applyWorkspaceEdit +- if ptr := c.onApplyEdit.Load(); ptr != nil { +- onApplyEdit = *ptr +- } +- if err := onApplyEdit(ctx, ¶ms.Edit); err != nil { +- return nil, err +- } +- return &protocol.ApplyWorkspaceEditResult{Applied: true}, nil +-} +diff -urN a/gopls/internal/test/integration/fake/doc.go b/gopls/internal/test/integration/fake/doc.go +--- a/gopls/internal/test/integration/fake/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package fake provides a fake implementation of an LSP-enabled +-// text editor, its LSP client plugin, and a Sandbox environment for +-// use in integration tests. +-// +-// The Editor type provides a high level API for text editor operations +-// (open/modify/save/close a buffer, jump to definition, etc.), and the Client +-// type exposes an LSP client for the editor that can be connected to a +-// language server. By default, the Editor and Client should be compliant with +-// the LSP spec: their intended use is to verify server compliance with the +-// spec in a variety of environment. Possible future enhancements of these +-// types may allow them to misbehave in configurable ways, but that is not +-// their primary use. +-// +-// The Sandbox type provides a facility for executing tests with a temporary +-// directory, module proxy, and GOPATH. +-package fake +diff -urN a/gopls/internal/test/integration/fake/edit.go b/gopls/internal/test/integration/fake/edit.go +--- a/gopls/internal/test/integration/fake/edit.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/edit.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,42 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fake +- +-import ( +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/diff" +-) +- +-// NewEdit creates an edit replacing all content between the 0-based +-// (startLine, startColumn) and (endLine, endColumn) with text. +-// +-// Columns measure UTF-16 codes. +-func NewEdit(startLine, startColumn, endLine, endColumn uint32, text string) protocol.TextEdit { +- return protocol.TextEdit{ +- Range: protocol.Range{ +- Start: protocol.Position{Line: startLine, Character: startColumn}, +- End: protocol.Position{Line: endLine, Character: endColumn}, +- }, +- NewText: text, +- } +-} +- +-// applyEdits applies the edits to a file with the specified lines, +-// and returns a new slice containing the lines of the patched file. +-// It is a wrapper around diff.Apply; see that function for preconditions. +-func applyEdits(mapper *protocol.Mapper, edits []protocol.TextEdit, windowsLineEndings bool) ([]byte, error) { +- diffEdits, err := protocol.EditsToDiffEdits(mapper, edits) +- if err != nil { +- return nil, err +- } +- patched, err := diff.ApplyBytes(mapper.Content, diffEdits) +- if err != nil { +- return nil, err +- } +- if windowsLineEndings { +- patched = toWindowsLineEndings(patched) +- } +- return patched, nil +-} +diff -urN a/gopls/internal/test/integration/fake/editor.go b/gopls/internal/test/integration/fake/editor.go +--- a/gopls/internal/test/integration/fake/editor.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/editor.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1823 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fake +- +-import ( +- "bytes" +- "context" +- "encoding/json" +- "errors" +- "fmt" +- "maps" +- "math/rand/v2" +- "os" +- "path" +- "path/filepath" +- "regexp" +- "slices" +- "strings" +- "sync" +- "time" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/test/integration/fake/glob" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/pathutil" +- "golang.org/x/tools/internal/jsonrpc2" +- "golang.org/x/tools/internal/jsonrpc2/servertest" +- "golang.org/x/tools/internal/xcontext" +-) +- +-// Editor is a fake client editor. It keeps track of client state and can be +-// used for writing LSP tests. +-type Editor struct { +- +- // Server, client, and sandbox are concurrency safe and written only +- // at construction time, so do not require synchronization. +- Server protocol.Server +- cancelConn func() +- serverConn jsonrpc2.Conn +- client *Client +- sandbox *Sandbox +- +- // TODO(rfindley): buffers should be keyed by protocol.DocumentURI. +- mu sync.Mutex +- config EditorConfig // editor configuration +- buffers map[string]buffer // open buffers (relative path -> buffer content) +- watchPatterns []*glob.Glob // glob patterns to watch +- suggestionUseReplaceMode bool +- +- // These fields are populated by Connect. +- serverCapabilities protocol.ServerCapabilities +- semTokOpts protocol.SemanticTokensOptions +- +- // Call metrics for the purpose of expectations. This is done in an ad-hoc +- // manner for now. Perhaps in the future we should do something more +- // systematic. Guarded with a separate mutex as calls may need to be accessed +- // asynchronously via callbacks into the Editor. +- callsMu sync.Mutex +- calls CallCounts +-} +- +-// CallCounts tracks the number of protocol notifications of different types. +-type CallCounts struct { +- DidOpen, DidChange, DidSave, DidChangeWatchedFiles, DidClose, DidChangeConfiguration uint64 +-} +- +-// buffer holds information about an open buffer in the editor. +-type buffer struct { +- version int // monotonic version; incremented on edits +- path string // relative path in the workspace +- mapper *protocol.Mapper // buffer content +- dirty bool // if true, content is unsaved (TODO(rfindley): rename this field) +-} +- +-func (b buffer) text() string { +- return string(b.mapper.Content) +-} +- +-// EditorConfig configures the editor's LSP session. This is similar to +-// golang.UserOptions, but we use a separate type here so that we expose only +-// that configuration which we support. +-// +-// The zero value for EditorConfig is the default configuration. +-type EditorConfig struct { +- // ClientName sets the clientInfo.name for the LSP session (in the initialize request). +- // +- // Since this can only be set during initialization, changing this field via +- // Editor.ChangeConfiguration has no effect. +- // +- // If empty, "fake.Editor" is used. +- ClientName string +- +- // Env holds environment variables to apply on top of the default editor +- // environment. When applying these variables, the special string +- // $SANDBOX_WORKDIR is replaced by the absolute path to the sandbox working +- // directory. +- Env map[string]string +- +- // WorkspaceFolders is the workspace folders to configure on the LSP server. +- // Each workspace folder is a file path relative to the sandbox workdir, or +- // a uri (used when testing behavior with virtual file system or non-'file' +- // scheme document uris). +- // +- // As special cases, if WorkspaceFolders is nil the editor defaults to +- // configuring a single workspace folder corresponding to the workdir root. +- // To explicitly send no workspace folders, use an empty (non-nil) slice. +- WorkspaceFolders []string +- +- // NoDefaultWorkspaceFiles is used to specify whether the fake editor +- // should give a default workspace folder when WorkspaceFolders is nil. +- // When it's true, the editor will pass original WorkspaceFolders as is to the LSP server. +- NoDefaultWorkspaceFiles bool +- +- // RelRootPath is the root path which will be converted to rootUri to configure on the LSP server. +- RelRootPath string +- +- // Whether to edit files with windows line endings. +- WindowsLineEndings bool +- +- // Map of language ID -> regexp to match, used to set the file type of new +- // buffers. Applied as an overlay on top of the following defaults: +- // "go" -> ".*\.go" +- // "go.mod" -> "go\.mod" +- // "go.sum" -> "go\.sum" +- // "gotmpl" -> ".*tmpl" +- // "go.s" -> ".*\.s" +- FileAssociations map[protocol.LanguageKind]string +- +- // Settings holds user-provided configuration for the LSP server. +- Settings map[string]any +- +- // FolderSettings holds user-provided per-folder configuration, if any. +- // +- // It maps each folder (as a relative path to the sandbox workdir) to its +- // configuration mapping (like Settings). +- FolderSettings map[string]map[string]any +- +- // CapabilitiesJSON holds JSON client capabilities to overlay over the +- // editor's default client capabilities. +- // +- // Specifically, this JSON string will be unmarshalled into the editor's +- // client capabilities struct, before sending to the server. +- CapabilitiesJSON []byte +- +- // If non-nil, MessageResponder is used to respond to ShowMessageRequest +- // messages. +- MessageResponder func(params *protocol.ShowMessageRequestParams) (*protocol.MessageActionItem, error) +- +- // MaxMessageDelay is used for fuzzing message delivery to reproduce test +- // flakes. +- MaxMessageDelay time.Duration +-} +- +-// NewEditor creates a new Editor. +-func NewEditor(sandbox *Sandbox, config EditorConfig) *Editor { +- return &Editor{ +- buffers: make(map[string]buffer), +- sandbox: sandbox, +- config: config, +- } +-} +- +-// Connect configures the editor to communicate with an LSP server on conn. It +-// is not concurrency safe, and should be called at most once, before using the +-// editor. +-// +-// It returns the editor, so that it may be called as follows: +-// +-// editor, err := NewEditor(s).Connect(ctx, conn, hooks) +-func (e *Editor) Connect(ctx context.Context, connector servertest.Connector, hooks ClientHooks) (*Editor, error) { +- bgCtx, cancelConn := context.WithCancel(xcontext.Detach(ctx)) +- conn := connector.Connect(bgCtx) +- e.cancelConn = cancelConn +- +- e.serverConn = conn +- e.Server = protocol.ServerDispatcher(conn) +- e.client = &Client{editor: e, hooks: hooks} +- handler := protocol.ClientHandler(e.client, jsonrpc2.MethodNotFound) +- if e.config.MaxMessageDelay > 0 { +- handler = DelayedHandler(e.config.MaxMessageDelay, handler) +- } +- conn.Go(bgCtx, protocol.Handlers(handler)) +- +- if err := e.initialize(ctx); err != nil { +- return nil, err +- } +- e.sandbox.Workdir.AddWatcher(e.onFileChanges) +- return e, nil +-} +- +-// DelayedHandler waits [0, maxDelay) before handling each message. +-func DelayedHandler(maxDelay time.Duration, handler jsonrpc2.Handler) jsonrpc2.Handler { +- return func(ctx context.Context, reply jsonrpc2.Replier, req jsonrpc2.Request) error { +- delay := time.Duration(rand.Int64N(int64(maxDelay))) +- select { +- case <-ctx.Done(): +- case <-time.After(delay): +- } +- return handler(ctx, reply, req) +- } +-} +- +-func (e *Editor) Stats() CallCounts { +- e.callsMu.Lock() +- defer e.callsMu.Unlock() +- return e.calls +-} +- +-// Shutdown issues the 'shutdown' LSP notification. +-func (e *Editor) Shutdown(ctx context.Context) error { +- if e.Server != nil { +- if err := e.Server.Shutdown(ctx); err != nil { +- return fmt.Errorf("Shutdown: %w", err) +- } +- } +- return nil +-} +- +-// Exit issues the 'exit' LSP notification. +-func (e *Editor) Exit(ctx context.Context) error { +- if e.Server != nil { +- // Not all LSP clients issue the exit RPC, but we do so here to ensure that +- // we gracefully handle it on multi-session servers. +- if err := e.Server.Exit(ctx); err != nil { +- return fmt.Errorf("Exit: %w", err) +- } +- } +- return nil +-} +- +-// Close disconnects the LSP client session. +-// TODO(rfindley): rename to 'Disconnect'. +-func (e *Editor) Close(ctx context.Context) error { +- if err := e.Shutdown(ctx); err != nil { +- return err +- } +- if err := e.Exit(ctx); err != nil { +- return err +- } +- defer func() { +- e.cancelConn() +- }() +- +- // called close on the editor should result in the connection closing +- select { +- case <-e.serverConn.Done(): +- // connection closed itself +- return nil +- case <-ctx.Done(): +- return fmt.Errorf("connection not closed: %w", ctx.Err()) +- } +-} +- +-// Client returns the LSP client for this editor. +-func (e *Editor) Client() *Client { +- return e.client +-} +- +-// makeSettings builds the settings map for use in LSP settings RPCs. +-func makeSettings(sandbox *Sandbox, config EditorConfig, scopeURI *protocol.URI) map[string]any { +- env := make(map[string]string) +- maps.Copy(env, sandbox.GoEnv()) +- maps.Copy(env, config.Env) +- for k, v := range env { +- v = strings.ReplaceAll(v, "$SANDBOX_WORKDIR", sandbox.Workdir.RootURI().Path()) +- env[k] = v +- } +- +- settings := map[string]any{ +- "env": env, +- +- // Use verbose progress reporting so that integration tests can assert on +- // asynchronous operations being completed (such as diagnosing a snapshot). +- "verboseWorkDoneProgress": true, +- +- // Set an unlimited completion budget, so that tests don't flake because +- // completions are too slow. +- "completionBudget": "0s", +- } +- +- for k, v := range config.Settings { +- if k == "env" { +- panic("must not provide env via the EditorConfig.Settings field: use the EditorConfig.Env field instead") +- } +- settings[k] = v +- } +- +- // If the server is requesting configuration for a specific scope, apply +- // settings for the nearest folder that has customized settings, if any. +- if scopeURI != nil { +- var ( +- scopePath = protocol.DocumentURI(*scopeURI).Path() +- closestDir string // longest dir with settings containing the scope, if any +- closestSettings map[string]any // settings for that dir, if any +- ) +- for relPath, settings := range config.FolderSettings { +- dir := sandbox.Workdir.AbsPath(relPath) +- if strings.HasPrefix(scopePath+string(filepath.Separator), dir+string(filepath.Separator)) && len(dir) > len(closestDir) { +- closestDir = dir +- closestSettings = settings +- } +- } +- if closestSettings != nil { +- maps.Copy(settings, closestSettings) +- } +- } +- +- return settings +-} +- +-func (e *Editor) initialize(ctx context.Context) error { +- config := e.Config() +- +- clientName := config.ClientName +- if clientName == "" { +- clientName = "fake.Editor" +- } +- +- params := &protocol.ParamInitialize{} +- params.ClientInfo = &protocol.ClientInfo{ +- Name: clientName, +- Version: "v1.0.0", +- } +- params.InitializationOptions = makeSettings(e.sandbox, config, nil) +- +- params.WorkspaceFolders = makeWorkspaceFolders(e.sandbox, config.WorkspaceFolders, config.NoDefaultWorkspaceFiles) +- params.RootURI = protocol.URIFromPath(config.RelRootPath) +- if !uriRE.MatchString(config.RelRootPath) { // relative file path +- params.RootURI = e.sandbox.Workdir.URI(config.RelRootPath) +- } +- +- capabilities, err := clientCapabilities(config) +- if err != nil { +- return fmt.Errorf("unmarshalling EditorConfig.CapabilitiesJSON: %v", err) +- } +- params.Capabilities = capabilities +- +- trace := protocol.TraceValue("messages") +- params.Trace = &trace +- // TODO: support workspace folders. +- if e.Server != nil { +- resp, err := e.Server.Initialize(ctx, params) +- if err != nil { +- return fmt.Errorf("initialize: %w", err) +- } +- semTokOpts, err := marshalUnmarshal[protocol.SemanticTokensOptions](resp.Capabilities.SemanticTokensProvider) +- if err != nil { +- return fmt.Errorf("unmarshalling semantic tokens options: %v", err) +- } +- e.serverCapabilities = resp.Capabilities +- e.semTokOpts = semTokOpts +- +- if err := e.Server.Initialized(ctx, &protocol.InitializedParams{}); err != nil { +- return fmt.Errorf("initialized: %w", err) +- } +- } +- // TODO: await initial configuration here, or expect gopls to manage that? +- return nil +-} +- +-func clientCapabilities(cfg EditorConfig) (protocol.ClientCapabilities, error) { +- var capabilities protocol.ClientCapabilities +- // Set various client capabilities that are sought by gopls. +- capabilities.Workspace.Configuration = true // support workspace/configuration +- capabilities.TextDocument.Completion.CompletionItem.TagSupport = &protocol.CompletionItemTagOptions{} +- capabilities.TextDocument.Completion.CompletionItem.TagSupport.ValueSet = []protocol.CompletionItemTag{protocol.ComplDeprecated} +- capabilities.TextDocument.Completion.CompletionItem.SnippetSupport = true +- capabilities.TextDocument.Completion.CompletionItem.InsertReplaceSupport = true +- capabilities.TextDocument.SemanticTokens.Requests.Full = &protocol.Or_ClientSemanticTokensRequestOptions_full{Value: true} +- capabilities.Window.WorkDoneProgress = true // support window/workDoneProgress +- capabilities.Window.ShowDocument = &protocol.ShowDocumentClientCapabilities{Support: true} // support window/showDocument +- capabilities.TextDocument.SemanticTokens.TokenTypes = []string{ +- "namespace", "type", "class", "enum", "interface", +- "struct", "typeParameter", "parameter", "variable", "property", "enumMember", +- "event", "function", "method", "macro", "keyword", "modifier", "comment", +- "string", "number", "regexp", "operator", +- // Additional types supported by this client: +- "label", +- } +- capabilities.TextDocument.SemanticTokens.TokenModifiers = []string{ +- "declaration", "definition", "readonly", "static", +- "deprecated", "abstract", "async", "modification", "documentation", "defaultLibrary", +- // Additional modifiers supported by this client: +- "interface", "struct", "signature", "pointer", "array", "map", "slice", "chan", "string", "number", "bool", "invalid", +- } +- // Request that the server provide its complete list of code action kinds. +- capabilities.TextDocument.CodeAction = protocol.CodeActionClientCapabilities{ +- DataSupport: true, +- ResolveSupport: &protocol.ClientCodeActionResolveOptions{ +- Properties: []string{"edit"}, +- }, +- CodeActionLiteralSupport: protocol.ClientCodeActionLiteralOptions{ +- CodeActionKind: protocol.ClientCodeActionKindOptions{ +- ValueSet: []protocol.CodeActionKind{protocol.Empty}, // => all +- }, +- }, +- } +- // The LSP tests have historically enabled this flag, +- // but really we should test both ways for older editors. +- capabilities.TextDocument.DocumentSymbol.HierarchicalDocumentSymbolSupport = true +- // Glob pattern watching is enabled. +- capabilities.Workspace.DidChangeWatchedFiles.DynamicRegistration = true +- // "rename" operations are used for package renaming. +- // +- // TODO(rfindley): add support for other resource operations (create, delete, ...) +- capabilities.Workspace.WorkspaceEdit = &protocol.WorkspaceEditClientCapabilities{ +- ResourceOperations: []protocol.ResourceOperationKind{ +- "rename", +- }, +- } +- +- // Apply capabilities overlay. +- if cfg.CapabilitiesJSON != nil { +- if err := json.Unmarshal(cfg.CapabilitiesJSON, &capabilities); err != nil { +- return protocol.ClientCapabilities{}, fmt.Errorf("unmarshalling EditorConfig.CapabilitiesJSON: %v", err) +- } +- } +- return capabilities, nil +-} +- +-// Returns the connected LSP server's capabilities. +-// Only populated after a call to [Editor.Connect]. +-func (e *Editor) ServerCapabilities() protocol.ServerCapabilities { +- return e.serverCapabilities +-} +- +-// marshalUnmarshal is a helper to json Marshal and then Unmarshal as a +-// different type. Used to work around cases where our protocol types are not +-// specific. +-func marshalUnmarshal[T any](v any) (T, error) { +- var t T +- data, err := json.Marshal(v) +- if err != nil { +- return t, err +- } +- err = json.Unmarshal(data, &t) +- return t, err +-} +- +-// HasCommand reports whether the connected server supports the command with the given ID. +-func (e *Editor) HasCommand(cmd command.Command) bool { +- return slices.Contains(e.serverCapabilities.ExecuteCommandProvider.Commands, cmd.String()) +-} +- +-// Examples: https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml +-var uriRE = regexp.MustCompile(`^[a-z][a-z0-9+\-.]*://\S+`) +- +-// makeWorkspaceFolders creates a slice of workspace folders to use for +-// this editing session, based on the editor configuration. +-func makeWorkspaceFolders(sandbox *Sandbox, paths []string, useEmpty bool) (folders []protocol.WorkspaceFolder) { +- if len(paths) == 0 { +- if useEmpty { +- return nil +- } +- paths = []string{string(sandbox.Workdir.RelativeTo)} +- } +- +- for _, path := range paths { +- uri := path +- if !uriRE.MatchString(path) { // relative file path +- uri = string(sandbox.Workdir.URI(path)) +- } +- folders = append(folders, protocol.WorkspaceFolder{ +- URI: uri, +- Name: filepath.Base(uri), +- }) +- } +- +- return folders +-} +- +-// onFileChanges is registered to be called by the Workdir on any writes that +-// go through the Workdir API. It is called synchronously by the Workdir. +-func (e *Editor) onFileChanges(ctx context.Context, evts []protocol.FileEvent) { +- if e.Server == nil { +- return +- } +- +- // e may be locked when onFileChanges is called, but it is important that we +- // synchronously increment this counter so that we can subsequently assert on +- // the number of expected DidChangeWatchedFiles calls. +- e.callsMu.Lock() +- e.calls.DidChangeWatchedFiles++ +- e.callsMu.Unlock() +- +- // Since e may be locked, we must run this mutation asynchronously. +- go func() { +- e.mu.Lock() +- defer e.mu.Unlock() +- for _, evt := range evts { +- // Always send an on-disk change, even for events that seem useless +- // because they're shadowed by an open buffer. +- path := e.sandbox.Workdir.URIToPath(evt.URI) +- if buf, ok := e.buffers[path]; ok { +- // Following VS Code, don't honor deletions or changes to dirty buffers. +- if buf.dirty || evt.Type == protocol.Deleted { +- continue +- } +- +- content, err := e.sandbox.Workdir.ReadFile(path) +- if err != nil { +- continue // A race with some other operation. +- } +- // No need to update if the buffer content hasn't changed. +- if string(content) == buf.text() { +- continue +- } +- // During shutdown, this call will fail. Ignore the error. +- _ = e.setBufferContentLocked(ctx, path, false, content, nil) +- } +- } +- var matchedEvts []protocol.FileEvent +- for _, evt := range evts { +- filename := filepath.ToSlash(evt.URI.Path()) +- for _, g := range e.watchPatterns { +- if g.Match(filename) { +- matchedEvts = append(matchedEvts, evt) +- break +- } +- } +- } +- +- // TODO(rfindley): don't send notifications while locked. +- e.Server.DidChangeWatchedFiles(ctx, &protocol.DidChangeWatchedFilesParams{ +- Changes: matchedEvts, +- }) +- }() +-} +- +-// OpenFile creates a buffer for the given workdir-relative file. +-// +-// If the file is already open, it is a no-op. +-func (e *Editor) OpenFile(ctx context.Context, path string) error { +- if e.HasBuffer(path) { +- return nil +- } +- content, err := e.sandbox.Workdir.ReadFile(path) +- if err != nil { +- return err +- } +- if e.Config().WindowsLineEndings { +- content = toWindowsLineEndings(content) +- } +- return e.createBuffer(ctx, path, false, content) +-} +- +-// toWindowsLineEndings checks whether content has windows line endings. +-// +-// If so, it returns content unmodified. If not, it returns a new byte slice modified to use CRLF line endings. +-func toWindowsLineEndings(content []byte) []byte { +- abnormal := false +- for i, b := range content { +- if b == '\n' && (i == 0 || content[i-1] != '\r') { +- abnormal = true +- break +- } +- } +- if !abnormal { +- return content +- } +- var buf bytes.Buffer +- for i, b := range content { +- if b == '\n' && (i == 0 || content[i-1] != '\r') { +- buf.WriteByte('\r') +- } +- buf.WriteByte(b) +- } +- return buf.Bytes() +-} +- +-// CreateBuffer creates a new unsaved buffer corresponding to the workdir path, +-// containing the given textual content. +-func (e *Editor) CreateBuffer(ctx context.Context, path, content string) error { +- return e.createBuffer(ctx, path, true, []byte(content)) +-} +- +-func (e *Editor) createBuffer(ctx context.Context, path string, dirty bool, content []byte) error { +- e.mu.Lock() +- +- if _, ok := e.buffers[path]; ok { +- e.mu.Unlock() +- return fmt.Errorf("buffer %q already exists", path) +- } +- +- uri := e.sandbox.Workdir.URI(path) +- buf := buffer{ +- version: 1, +- path: path, +- mapper: protocol.NewMapper(uri, content), +- dirty: dirty, +- } +- e.buffers[path] = buf +- +- item := e.textDocumentItem(buf) +- e.mu.Unlock() +- +- return e.sendDidOpen(ctx, item) +-} +- +-// textDocumentItem builds a protocol.TextDocumentItem for the given buffer. +-// +-// Precondition: e.mu must be held. +-func (e *Editor) textDocumentItem(buf buffer) protocol.TextDocumentItem { +- return protocol.TextDocumentItem{ +- URI: e.sandbox.Workdir.URI(buf.path), +- LanguageID: languageID(buf.path, e.config.FileAssociations), +- Version: int32(buf.version), +- Text: buf.text(), +- } +-} +- +-func (e *Editor) sendDidOpen(ctx context.Context, item protocol.TextDocumentItem) error { +- if e.Server != nil { +- if err := e.Server.DidOpen(ctx, &protocol.DidOpenTextDocumentParams{ +- TextDocument: item, +- }); err != nil { +- return fmt.Errorf("DidOpen: %w", err) +- } +- e.callsMu.Lock() +- e.calls.DidOpen++ +- e.callsMu.Unlock() +- } +- return nil +-} +- +-var defaultFileAssociations = map[protocol.LanguageKind]*regexp.Regexp{ +- "go": regexp.MustCompile(`^.*\.go$`), // '$' is important: don't match .gotmpl! +- "go.mod": regexp.MustCompile(`^go\.mod$`), +- "go.sum": regexp.MustCompile(`^go(\.work)?\.sum$`), +- "go.work": regexp.MustCompile(`^go\.work$`), +- "gotmpl": regexp.MustCompile(`^.*tmpl$`), +- "go.s": regexp.MustCompile(`\.s$`), +-} +- +-// languageID returns the language identifier for the path p given the user +-// configured fileAssociations. +-func languageID(p string, fileAssociations map[protocol.LanguageKind]string) protocol.LanguageKind { +- base := path.Base(p) +- for lang, re := range fileAssociations { +- re := regexp.MustCompile(re) +- if re.MatchString(base) { +- return lang +- } +- } +- for lang, re := range defaultFileAssociations { +- if re.MatchString(base) { +- return lang +- } +- } +- return "" +-} +- +-// CloseBuffer removes the current buffer (regardless of whether it is saved). +-// CloseBuffer returns an error if the buffer is not open. +-func (e *Editor) CloseBuffer(ctx context.Context, path string) error { +- e.mu.Lock() +- _, ok := e.buffers[path] +- if !ok { +- e.mu.Unlock() +- return ErrUnknownBuffer +- } +- delete(e.buffers, path) +- e.mu.Unlock() +- +- return e.sendDidClose(ctx, e.TextDocumentIdentifier(path)) +-} +- +-func (e *Editor) sendDidClose(ctx context.Context, doc protocol.TextDocumentIdentifier) error { +- if e.Server != nil { +- if err := e.Server.DidClose(ctx, &protocol.DidCloseTextDocumentParams{ +- TextDocument: doc, +- }); err != nil { +- return fmt.Errorf("DidClose: %w", err) +- } +- e.callsMu.Lock() +- e.calls.DidClose++ +- e.callsMu.Unlock() +- } +- return nil +-} +- +-func (e *Editor) DocumentURI(path string) protocol.DocumentURI { +- return e.sandbox.Workdir.URI(path) +-} +- +-func (e *Editor) TextDocumentIdentifier(path string) protocol.TextDocumentIdentifier { +- return protocol.TextDocumentIdentifier{ +- URI: e.DocumentURI(path), +- } +-} +- +-// SaveBuffer writes the content of the buffer specified by the given path to +-// the filesystem. +-func (e *Editor) SaveBuffer(ctx context.Context, path string) error { +- if err := e.OrganizeImports(ctx, path); err != nil { +- return fmt.Errorf("organizing imports before save: %w", err) +- } +- if err := e.FormatBuffer(ctx, path); err != nil { +- return fmt.Errorf("formatting before save: %w", err) +- } +- return e.SaveBufferWithoutActions(ctx, path) +-} +- +-func (e *Editor) SaveBufferWithoutActions(ctx context.Context, path string) error { +- e.mu.Lock() +- defer e.mu.Unlock() +- buf, ok := e.buffers[path] +- if !ok { +- return fmt.Errorf("unknown buffer: %q", path) +- } +- content := buf.text() +- includeText := false +- syncOptions, ok := e.serverCapabilities.TextDocumentSync.(protocol.TextDocumentSyncOptions) +- if ok { +- includeText = syncOptions.Save.IncludeText +- } +- +- docID := e.TextDocumentIdentifier(buf.path) +- if e.Server != nil { +- if err := e.Server.WillSave(ctx, &protocol.WillSaveTextDocumentParams{ +- TextDocument: docID, +- Reason: protocol.Manual, +- }); err != nil { +- return fmt.Errorf("WillSave: %w", err) +- } +- } +- if err := e.sandbox.Workdir.WriteFile(ctx, path, content); err != nil { +- return fmt.Errorf("writing %q: %w", path, err) +- } +- +- buf.dirty = false +- e.buffers[path] = buf +- +- if e.Server != nil { +- params := &protocol.DidSaveTextDocumentParams{ +- TextDocument: docID, +- } +- if includeText { +- params.Text = &content +- } +- if err := e.Server.DidSave(ctx, params); err != nil { +- return fmt.Errorf("DidSave: %w", err) +- } +- e.callsMu.Lock() +- e.calls.DidSave++ +- e.callsMu.Unlock() +- } +- return nil +-} +- +-// ErrNoMatch is returned if a regexp search fails. +-var ( +- ErrNoMatch = errors.New("no match") +- ErrUnknownBuffer = errors.New("unknown buffer") +-) +- +-// regexpLocation returns the location of the first occurrence of either re +-// or its singular subgroup. It returns ErrNoMatch if the regexp doesn't match. +-func regexpLocation(mapper *protocol.Mapper, re string) (protocol.Location, error) { +- var start, end int +- rec, err := regexp.Compile(re) +- if err != nil { +- return protocol.Location{}, err +- } +- indexes := rec.FindSubmatchIndex(mapper.Content) +- if indexes == nil { +- return protocol.Location{}, ErrNoMatch +- } +- switch len(indexes) { +- case 2: +- // no subgroups: return the range of the regexp expression +- start, end = indexes[0], indexes[1] +- case 4: +- // one subgroup: return its range +- start, end = indexes[2], indexes[3] +- default: +- return protocol.Location{}, fmt.Errorf("invalid search regexp %q: expect either 0 or 1 subgroups, got %d", re, len(indexes)/2-1) +- } +- return mapper.OffsetLocation(start, end) +-} +- +-// RegexpSearch returns the Location of the first match for re in the buffer +-// bufName. For convenience, RegexpSearch supports the following two modes: +-// 1. If re has no subgroups, return the position of the match for re itself. +-// 2. If re has one subgroup, return the position of the first subgroup. +-// +-// It returns an error re is invalid, has more than one subgroup, or doesn't +-// match the buffer. +-func (e *Editor) RegexpSearch(bufName, re string) (protocol.Location, error) { +- e.mu.Lock() +- buf, ok := e.buffers[bufName] +- e.mu.Unlock() +- if !ok { +- return protocol.Location{}, ErrUnknownBuffer +- } +- return regexpLocation(buf.mapper, re) +-} +- +-// RegexpReplace edits the buffer corresponding to path by replacing the first +-// instance of re, or its first subgroup, with the replace text. See +-// RegexpSearch for more explanation of these two modes. +-// It returns an error if re is invalid, has more than one subgroup, or doesn't +-// match the buffer. +-func (e *Editor) RegexpReplace(ctx context.Context, path, re, replace string) error { +- e.mu.Lock() +- defer e.mu.Unlock() +- buf, ok := e.buffers[path] +- if !ok { +- return ErrUnknownBuffer +- } +- loc, err := regexpLocation(buf.mapper, re) +- if err != nil { +- return err +- } +- edits := []protocol.TextEdit{{ +- Range: loc.Range, +- NewText: replace, +- }} +- patched, err := applyEdits(buf.mapper, edits, e.config.WindowsLineEndings) +- if err != nil { +- return fmt.Errorf("editing %q: %v", path, err) +- } +- return e.setBufferContentLocked(ctx, path, true, patched, edits) +-} +- +-// EditBuffer applies the given test edits to the buffer identified by path. +-func (e *Editor) EditBuffer(ctx context.Context, path string, edits []protocol.TextEdit) error { +- e.mu.Lock() +- defer e.mu.Unlock() +- return e.editBufferLocked(ctx, path, edits) +-} +- +-func (e *Editor) SetBufferContent(ctx context.Context, path, content string) error { +- e.mu.Lock() +- defer e.mu.Unlock() +- return e.setBufferContentLocked(ctx, path, true, []byte(content), nil) +-} +- +-// HasBuffer reports whether the file name is open in the editor. +-func (e *Editor) HasBuffer(name string) bool { +- e.mu.Lock() +- defer e.mu.Unlock() +- _, ok := e.buffers[name] +- return ok +-} +- +-// BufferText returns the content of the buffer with the given name, or "" if +-// the file at that path is not open. The second return value reports whether +-// the file is open. +-func (e *Editor) BufferText(name string) (string, bool) { +- e.mu.Lock() +- defer e.mu.Unlock() +- buf, ok := e.buffers[name] +- if !ok { +- return "", false +- } +- return buf.text(), true +-} +- +-// Mapper returns the protocol.Mapper for the given buffer name, if it is open. +-func (e *Editor) Mapper(name string) (*protocol.Mapper, error) { +- e.mu.Lock() +- defer e.mu.Unlock() +- buf, ok := e.buffers[name] +- if !ok { +- return nil, fmt.Errorf("no mapper for %q", name) +- } +- return buf.mapper, nil +-} +- +-// BufferVersion returns the current version of the buffer corresponding to +-// name (or 0 if it is not being edited). +-func (e *Editor) BufferVersion(name string) int { +- e.mu.Lock() +- defer e.mu.Unlock() +- return e.buffers[name].version +-} +- +-func (e *Editor) editBufferLocked(ctx context.Context, path string, edits []protocol.TextEdit) error { +- buf, ok := e.buffers[path] +- if !ok { +- return fmt.Errorf("unknown buffer %q", path) +- } +- content, err := applyEdits(buf.mapper, edits, e.config.WindowsLineEndings) +- if err != nil { +- return fmt.Errorf("editing %q: %v; edits:\n%v", path, err, edits) +- } +- return e.setBufferContentLocked(ctx, path, true, content, edits) +-} +- +-func (e *Editor) setBufferContentLocked(ctx context.Context, path string, dirty bool, content []byte, fromEdits []protocol.TextEdit) error { +- buf, ok := e.buffers[path] +- if !ok { +- return fmt.Errorf("unknown buffer %q", path) +- } +- buf.mapper = protocol.NewMapper(buf.mapper.URI, content) +- buf.version++ +- buf.dirty = dirty +- e.buffers[path] = buf +- +- // A simple heuristic: if there is only one edit, send it incrementally. +- // Otherwise, send the entire content. +- var evt protocol.TextDocumentContentChangeEvent +- if len(fromEdits) == 1 { +- evt.Range = &fromEdits[0].Range +- evt.Text = fromEdits[0].NewText +- } else { +- evt.Text = buf.text() +- } +- params := &protocol.DidChangeTextDocumentParams{ +- TextDocument: protocol.VersionedTextDocumentIdentifier{ +- Version: int32(buf.version), +- TextDocumentIdentifier: e.TextDocumentIdentifier(buf.path), +- }, +- ContentChanges: []protocol.TextDocumentContentChangeEvent{evt}, +- } +- if e.Server != nil { +- if err := e.Server.DidChange(ctx, params); err != nil { +- return fmt.Errorf("DidChange: %w", err) +- } +- e.callsMu.Lock() +- e.calls.DidChange++ +- e.callsMu.Unlock() +- } +- return nil +-} +- +-// Definitions returns the definitions of the symbol at the given +-// location in an open buffer. +-func (e *Editor) Definitions(ctx context.Context, loc protocol.Location) ([]protocol.Location, error) { +- if err := e.checkBufferLocation(loc); err != nil { +- return nil, err +- } +- params := &protocol.DefinitionParams{} +- params.TextDocument.URI = loc.URI +- params.Position = loc.Range.Start +- +- return e.Server.Definition(ctx, params) +-} +- +-// TypeDefinitions returns the type definitions of the symbol at the +-// given location in an open buffer. +-func (e *Editor) TypeDefinitions(ctx context.Context, loc protocol.Location) ([]protocol.Location, error) { +- if err := e.checkBufferLocation(loc); err != nil { +- return nil, err +- } +- params := &protocol.TypeDefinitionParams{} +- params.TextDocument.URI = loc.URI +- params.Position = loc.Range.Start +- +- return e.Server.TypeDefinition(ctx, params) +-} +- +-// Symbol performs a workspace symbol search using query +-func (e *Editor) Symbol(ctx context.Context, query string) ([]protocol.SymbolInformation, error) { +- params := &protocol.WorkspaceSymbolParams{Query: query} +- return e.Server.Symbol(ctx, params) +-} +- +-// OrganizeImports requests and performs the source.organizeImports codeAction. +-func (e *Editor) OrganizeImports(ctx context.Context, path string) error { +- loc := e.sandbox.Workdir.EntireFile(path) +- _, err := e.applyCodeActions(ctx, loc, nil, protocol.SourceOrganizeImports) +- return err +-} +- +-// RefactorRewrite requests and performs the source.refactorRewrite codeAction. +-func (e *Editor) RefactorRewrite(ctx context.Context, loc protocol.Location) error { +- applied, err := e.applyCodeActions(ctx, loc, nil, protocol.RefactorRewrite) +- if err != nil { +- return err +- } +- if applied == 0 { +- return fmt.Errorf("no refactorings were applied") +- } +- return nil +-} +- +-// ApplyQuickFixes requests and performs the quickfix codeAction. +-func (e *Editor) ApplyQuickFixes(ctx context.Context, loc protocol.Location, diagnostics []protocol.Diagnostic) error { +- applied, err := e.applyCodeActions(ctx, loc, diagnostics, protocol.SourceFixAll, protocol.QuickFix) +- if applied == 0 { +- return fmt.Errorf("no quick fixes were applied") +- } +- return err +-} +- +-// ApplyCodeAction applies the given code action. +-func (e *Editor) ApplyCodeAction(ctx context.Context, action protocol.CodeAction) error { +- // Resolve the code actions if necessary and supported. +- if action.Edit == nil { +- editSupport, err := e.EditResolveSupport() +- if err != nil { +- return err +- } +- if editSupport { +- ca, err := e.Server.ResolveCodeAction(ctx, &action) +- if err != nil { +- return err +- } +- action.Edit = ca.Edit +- } +- } +- +- if action.Edit != nil { +- for _, change := range action.Edit.DocumentChanges { +- if change.TextDocumentEdit != nil { +- path := e.sandbox.Workdir.URIToPath(change.TextDocumentEdit.TextDocument.URI) +- if int32(e.buffers[path].version) != change.TextDocumentEdit.TextDocument.Version { +- // Skip edits for old versions. +- continue +- } +- if err := e.EditBuffer(ctx, path, protocol.AsTextEdits(change.TextDocumentEdit.Edits)); err != nil { +- return fmt.Errorf("editing buffer %q: %w", path, err) +- } +- } +- } +- } +- // Execute any commands. The specification says that commands are +- // executed after edits are applied. +- if action.Command != nil { +- if err := e.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ +- Command: action.Command.Command, +- Arguments: action.Command.Arguments, +- }, nil); err != nil { +- return err +- } +- } +- // Some commands may edit files on disk. +- return e.sandbox.Workdir.CheckForFileChanges(ctx) +-} +- +-func (e *Editor) Diagnostics(ctx context.Context, path string) ([]protocol.Diagnostic, error) { +- if e.Server == nil { +- return nil, errors.New("not connected") +- } +- e.mu.Lock() +- capabilities := e.serverCapabilities.DiagnosticProvider +- e.mu.Unlock() +- +- if capabilities == nil { +- return nil, errors.New("server does not support pull diagnostics") +- } +- switch capabilities.Value.(type) { +- case nil: +- return nil, errors.New("server does not support pull diagnostics") +- case protocol.DiagnosticOptions: +- case protocol.DiagnosticRegistrationOptions: +- // We could optionally check TextDocumentRegistrationOptions here to +- // see if any filters apply to path. +- default: +- panic(fmt.Sprintf("unknown DiagnosticsProvider type %T", capabilities.Value)) +- } +- +- params := &protocol.DocumentDiagnosticParams{ +- TextDocument: e.TextDocumentIdentifier(path), +- } +- result, err := e.Server.Diagnostic(ctx, params) +- if err != nil { +- return nil, err +- } +- report, ok := result.Value.(protocol.RelatedFullDocumentDiagnosticReport) +- if !ok { +- return nil, fmt.Errorf("unexpected diagnostics report type %T", result) +- } +- return report.Items, nil +-} +- +-// GetQuickFixes returns the available quick fix code actions. +-func (e *Editor) GetQuickFixes(ctx context.Context, loc protocol.Location, diagnostics []protocol.Diagnostic) ([]protocol.CodeAction, error) { +- return e.CodeActions(ctx, loc, diagnostics, protocol.QuickFix, protocol.SourceFixAll) +-} +- +-func (e *Editor) applyCodeActions(ctx context.Context, loc protocol.Location, diagnostics []protocol.Diagnostic, only ...protocol.CodeActionKind) (int, error) { +- actions, err := e.CodeActions(ctx, loc, diagnostics, only...) +- if err != nil { +- return 0, err +- } +- applied := 0 +- for _, action := range actions { +- if action.Title == "" { +- return 0, fmt.Errorf("empty title for code action") +- } +- applied++ +- if err := e.ApplyCodeAction(ctx, action); err != nil { +- return 0, err +- } +- } +- return applied, nil +-} +- +-// TODO(rfindley): add missing documentation to exported methods here. +- +-func (e *Editor) CodeActions(ctx context.Context, loc protocol.Location, diagnostics []protocol.Diagnostic, only ...protocol.CodeActionKind) ([]protocol.CodeAction, error) { +- if e.Server == nil { +- return nil, nil +- } +- params := &protocol.CodeActionParams{} +- params.TextDocument.URI = loc.URI +- params.Context.Only = only +- params.Range = loc.Range // may be zero => whole file +- if diagnostics != nil { +- params.Context.Diagnostics = diagnostics +- } +- return e.Server.CodeAction(ctx, params) +-} +- +-func (e *Editor) ExecuteCodeLensCommand(ctx context.Context, path string, cmd command.Command, result any) error { +- lenses, err := e.CodeLens(ctx, path) +- if err != nil { +- return err +- } +- var lens protocol.CodeLens +- var found bool +- for _, l := range lenses { +- if l.Command.Command == cmd.String() { +- lens = l +- found = true +- } +- } +- if !found { +- return fmt.Errorf("found no command with the ID %s", cmd) +- } +- return e.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ +- Command: lens.Command.Command, +- Arguments: lens.Command.Arguments, +- }, result) +-} +- +-// ExecuteCommand makes a workspace/executeCommand request to the connected LSP +-// server, if any. +-// +-// Result contains a pointer to a variable to be populated by json.Unmarshal. +-func (e *Editor) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCommandParams, result any) error { +- if e.Server == nil { +- return nil +- } +- var match bool +- if e.serverCapabilities.ExecuteCommandProvider != nil { +- // Ensure that this command was actually listed as a supported command. +- if slices.Contains(e.serverCapabilities.ExecuteCommandProvider.Commands, params.Command) { +- match = true +- } +- } +- if !match { +- return fmt.Errorf("unsupported command %q", params.Command) +- } +- response, err := e.Server.ExecuteCommand(ctx, params) +- if err != nil { +- return err +- } +- // Some commands use the go command, which writes directly to disk. +- // For convenience, check for those changes. +- if err := e.sandbox.Workdir.CheckForFileChanges(ctx); err != nil { +- return fmt.Errorf("checking for file changes: %v", err) +- } +- if result != nil { +- // ExecuteCommand already unmarshalled the response without knowing +- // its schema, using the generic map[string]any representation. +- // Encode and decode again, this time into a typed variable. +- // +- // This could be improved by generating a jsonrpc2 command client from the +- // command.Interface, but that should only be done if we're consolidating +- // this part of the tsprotocol generation. +- // +- // TODO(rfindley): we could also improve this by having ExecuteCommand return +- // a json.RawMessage, similar to what we do with arguments. +- data, err := json.Marshal(response) +- if err != nil { +- return bug.Errorf("marshalling response: %v", err) +- } +- if err := json.Unmarshal(data, result); err != nil { +- return fmt.Errorf("unmarshalling response: %v", err) +- } +- } +- return nil +-} +- +-// FormatBuffer gofmts a Go file. +-func (e *Editor) FormatBuffer(ctx context.Context, path string) error { +- if e.Server == nil { +- return nil +- } +- e.mu.Lock() +- version := e.buffers[path].version +- e.mu.Unlock() +- params := &protocol.DocumentFormattingParams{} +- params.TextDocument.URI = e.sandbox.Workdir.URI(path) +- edits, err := e.Server.Formatting(ctx, params) +- if err != nil { +- return fmt.Errorf("textDocument/formatting: %w", err) +- } +- e.mu.Lock() +- defer e.mu.Unlock() +- if versionAfter := e.buffers[path].version; versionAfter != version { +- return fmt.Errorf("before receipt of formatting edits, buffer version changed from %d to %d", version, versionAfter) +- } +- if len(edits) == 0 { +- return nil +- } +- return e.editBufferLocked(ctx, path, edits) +-} +- +-func (e *Editor) checkBufferLocation(loc protocol.Location) error { +- e.mu.Lock() +- defer e.mu.Unlock() +- path := e.sandbox.Workdir.URIToPath(loc.URI) +- buf, ok := e.buffers[path] +- if !ok { +- return fmt.Errorf("buffer %q is not open", path) +- } +- +- _, _, err := buf.mapper.RangeOffsets(loc.Range) +- return err +-} +- +-// RunGenerate runs `go generate` non-recursively in the workdir-relative dir +-// path. It does not report any resulting file changes as a watched file +-// change, so must be followed by a call to Workdir.CheckForFileChanges once +-// the generate command has completed. +-// TODO(rFindley): this shouldn't be necessary anymore. Delete it. +-func (e *Editor) RunGenerate(ctx context.Context, dir string) error { +- if e.Server == nil { +- return nil +- } +- absDir := e.sandbox.Workdir.AbsPath(dir) +- cmd := command.NewGenerateCommand("", command.GenerateArgs{ +- Dir: protocol.URIFromPath(absDir), +- Recursive: false, +- }) +- params := &protocol.ExecuteCommandParams{ +- Command: cmd.Command, +- Arguments: cmd.Arguments, +- } +- if err := e.ExecuteCommand(ctx, params, nil); err != nil { +- return fmt.Errorf("running generate: %v", err) +- } +- // Unfortunately we can't simply poll the workdir for file changes here, +- // because server-side command may not have completed. In integration tests, we can +- // Await this state change, but here we must delegate that responsibility to +- // the caller. +- return nil +-} +- +-// CodeLens executes a codelens request on the server. +-func (e *Editor) CodeLens(ctx context.Context, path string) ([]protocol.CodeLens, error) { +- if e.Server == nil { +- return nil, nil +- } +- e.mu.Lock() +- _, ok := e.buffers[path] +- e.mu.Unlock() +- if !ok { +- return nil, fmt.Errorf("buffer %q is not open", path) +- } +- params := &protocol.CodeLensParams{ +- TextDocument: e.TextDocumentIdentifier(path), +- } +- lens, err := e.Server.CodeLens(ctx, params) +- if err != nil { +- return nil, err +- } +- return lens, nil +-} +- +-// Completion executes a completion request on the server. +-func (e *Editor) Completion(ctx context.Context, loc protocol.Location) (*protocol.CompletionList, error) { +- if e.Server == nil { +- return nil, nil +- } +- path := e.sandbox.Workdir.URIToPath(loc.URI) +- e.mu.Lock() +- _, ok := e.buffers[path] +- e.mu.Unlock() +- if !ok { +- return nil, fmt.Errorf("buffer %q is not open", path) +- } +- params := &protocol.CompletionParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- completions, err := e.Server.Completion(ctx, params) +- if err != nil { +- return nil, err +- } +- return completions, nil +-} +- +-func (e *Editor) DidCreateFiles(ctx context.Context, files ...protocol.DocumentURI) error { +- if e.Server == nil { +- return nil +- } +- params := &protocol.CreateFilesParams{} +- for _, file := range files { +- params.Files = append(params.Files, protocol.FileCreate{ +- URI: string(file), +- }) +- } +- return e.Server.DidCreateFiles(ctx, params) +-} +- +-func (e *Editor) SetSuggestionInsertReplaceMode(_ context.Context, useReplaceMode bool) { +- e.mu.Lock() +- defer e.mu.Unlock() +- e.suggestionUseReplaceMode = useReplaceMode +-} +- +-// AcceptCompletion accepts a completion for the given item +-// at the given position based on the editor's suggestion insert mode. +-// The server provides separate insert/replace ranges only if the +-// Editor declares `InsertReplaceSupport` capability during initialization. +-// Otherwise, it returns a single range and the insert/replace mode is ignored. +-func (e *Editor) AcceptCompletion(ctx context.Context, loc protocol.Location, item protocol.CompletionItem) error { +- if e.Server == nil { +- return nil +- } +- e.mu.Lock() +- defer e.mu.Unlock() +- path := e.sandbox.Workdir.URIToPath(loc.URI) +- _, ok := e.buffers[path] +- if !ok { +- return fmt.Errorf("buffer %q is not open", path) +- } +- edit, err := protocol.SelectCompletionTextEdit(item, e.suggestionUseReplaceMode) +- if err != nil { +- return err +- } +- return e.editBufferLocked(ctx, path, append([]protocol.TextEdit{ +- edit, +- }, item.AdditionalTextEdits...)) +-} +- +-// Symbols executes a workspace/symbols request on the server. +-func (e *Editor) Symbols(ctx context.Context, sym string) ([]protocol.SymbolInformation, error) { +- if e.Server == nil { +- return nil, nil +- } +- params := &protocol.WorkspaceSymbolParams{Query: sym} +- ans, err := e.Server.Symbol(ctx, params) +- return ans, err +-} +- +-// CodeLens executes a codelens request on the server. +-func (e *Editor) InlayHint(ctx context.Context, path string) ([]protocol.InlayHint, error) { +- if e.Server == nil { +- return nil, nil +- } +- e.mu.Lock() +- _, ok := e.buffers[path] +- e.mu.Unlock() +- if !ok { +- return nil, fmt.Errorf("buffer %q is not open", path) +- } +- params := &protocol.InlayHintParams{ +- TextDocument: e.TextDocumentIdentifier(path), +- } +- hints, err := e.Server.InlayHint(ctx, params) +- if err != nil { +- return nil, err +- } +- return hints, nil +-} +- +-// References returns references to the object at loc, as returned by +-// the connected LSP server. If no server is connected, it returns (nil, nil). +-func (e *Editor) References(ctx context.Context, loc protocol.Location) ([]protocol.Location, error) { +- if e.Server == nil { +- return nil, nil +- } +- path := e.sandbox.Workdir.URIToPath(loc.URI) +- e.mu.Lock() +- _, ok := e.buffers[path] +- e.mu.Unlock() +- if !ok { +- return nil, fmt.Errorf("buffer %q is not open", path) +- } +- params := &protocol.ReferenceParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- Context: protocol.ReferenceContext{ +- IncludeDeclaration: true, +- }, +- } +- locations, err := e.Server.References(ctx, params) +- if err != nil { +- return nil, err +- } +- return locations, nil +-} +- +-// Rename performs a rename of the object at loc to newName, using the +-// connected LSP server. If no server is connected, it returns nil. +-func (e *Editor) Rename(ctx context.Context, loc protocol.Location, newName string) error { +- if e.Server == nil { +- return nil +- } +- path := e.sandbox.Workdir.URIToPath(loc.URI) +- +- // Verify that PrepareRename succeeds. +- prepareParams := &protocol.PrepareRenameParams{} +- prepareParams.TextDocument = e.TextDocumentIdentifier(path) +- prepareParams.Position = loc.Range.Start +- if _, err := e.Server.PrepareRename(ctx, prepareParams); err != nil { +- return fmt.Errorf("preparing rename: %v", err) +- } +- +- params := &protocol.RenameParams{ +- TextDocument: e.TextDocumentIdentifier(path), +- Position: loc.Range.Start, +- NewName: newName, +- } +- wsedit, err := e.Server.Rename(ctx, params) +- if err != nil { +- return err +- } +- return e.applyWorkspaceEdit(ctx, wsedit) +-} +- +-// Implementations returns implementations for the object at loc, as +-// returned by the connected LSP server. If no server is connected, it returns +-// (nil, nil). +-func (e *Editor) Implementations(ctx context.Context, loc protocol.Location) ([]protocol.Location, error) { +- if e.Server == nil { +- return nil, nil +- } +- path := e.sandbox.Workdir.URIToPath(loc.URI) +- e.mu.Lock() +- _, ok := e.buffers[path] +- e.mu.Unlock() +- if !ok { +- return nil, fmt.Errorf("buffer %q is not open", path) +- } +- params := &protocol.ImplementationParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- return e.Server.Implementation(ctx, params) +-} +- +-func (e *Editor) SignatureHelp(ctx context.Context, loc protocol.Location) (*protocol.SignatureHelp, error) { +- if e.Server == nil { +- return nil, nil +- } +- path := e.sandbox.Workdir.URIToPath(loc.URI) +- e.mu.Lock() +- _, ok := e.buffers[path] +- e.mu.Unlock() +- if !ok { +- return nil, fmt.Errorf("buffer %q is not open", path) +- } +- params := &protocol.SignatureHelpParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- return e.Server.SignatureHelp(ctx, params) +-} +- +-func (e *Editor) RenameFile(ctx context.Context, oldPath, newPath string) error { +- closed, opened, err := e.renameBuffers(oldPath, newPath) +- if err != nil { +- return err +- } +- +- for _, c := range closed { +- if err := e.sendDidClose(ctx, c); err != nil { +- return err +- } +- } +- for _, o := range opened { +- if err := e.sendDidOpen(ctx, o); err != nil { +- return err +- } +- } +- +- // Finally, perform the renaming on disk. +- if err := e.sandbox.Workdir.RenameFile(ctx, oldPath, newPath); err != nil { +- return fmt.Errorf("renaming sandbox file: %w", err) +- } +- return nil +-} +- +-// renameBuffers renames in-memory buffers affected by the renaming of +-// oldPath->newPath, returning the resulting text documents that must be closed +-// and opened over the LSP. +-func (e *Editor) renameBuffers(oldPath, newPath string) (closed []protocol.TextDocumentIdentifier, opened []protocol.TextDocumentItem, _ error) { +- e.mu.Lock() +- defer e.mu.Unlock() +- +- // In case either oldPath or newPath is absolute, convert to absolute paths +- // before checking for containment. +- oldAbs := e.sandbox.Workdir.AbsPath(oldPath) +- newAbs := e.sandbox.Workdir.AbsPath(newPath) +- +- // Collect buffers that are affected by the given file or directory renaming. +- buffersToRename := make(map[string]string) // old path -> new path +- +- for path := range e.buffers { +- abs := e.sandbox.Workdir.AbsPath(path) +- if oldAbs == abs || pathutil.InDir(oldAbs, abs) { +- rel, err := filepath.Rel(oldAbs, abs) +- if err != nil { +- return nil, nil, fmt.Errorf("filepath.Rel(%q, %q): %v", oldAbs, abs, err) +- } +- nabs := filepath.Join(newAbs, rel) +- newPath := e.sandbox.Workdir.RelPath(nabs) +- buffersToRename[path] = newPath +- } +- } +- +- // Update buffers, and build protocol changes. +- for old, new := range buffersToRename { +- buf := e.buffers[old] +- delete(e.buffers, old) +- buf.version = 1 +- buf.path = new +- e.buffers[new] = buf +- +- closed = append(closed, e.TextDocumentIdentifier(old)) +- opened = append(opened, e.textDocumentItem(buf)) +- } +- +- return closed, opened, nil +-} +- +-// applyWorkspaceEdit applies the sequence of document changes in +-// wsedit to the Editor. +-// +-// See also: +-// - changedFiles in ../../marker/marker_test.go for the +-// handler used by the marker test to intercept edits. +-// - client.applyWorkspaceEdit in ../../../cmd/cmd.go for the +-// CLI variant. +-func (e *Editor) applyWorkspaceEdit(ctx context.Context, wsedit *protocol.WorkspaceEdit) error { +- uriToPath := e.sandbox.Workdir.URIToPath +- +- for _, change := range wsedit.DocumentChanges { +- switch { +- case change.TextDocumentEdit != nil: +- if err := e.applyTextDocumentEdit(ctx, *change.TextDocumentEdit); err != nil { +- return err +- } +- +- case change.RenameFile != nil: +- old := uriToPath(change.RenameFile.OldURI) +- new := uriToPath(change.RenameFile.NewURI) +- return e.RenameFile(ctx, old, new) +- +- case change.CreateFile != nil: +- path := uriToPath(change.CreateFile.URI) +- if err := e.CreateBuffer(ctx, path, ""); err != nil { +- return err // e.g. already exists +- } +- +- case change.DeleteFile != nil: +- path := uriToPath(change.CreateFile.URI) +- _ = e.CloseBuffer(ctx, path) // returns error if not open +- if err := e.sandbox.Workdir.RemoveFile(ctx, path); err != nil { +- return err // e.g. doesn't exist +- } +- +- default: +- return bug.Errorf("invalid DocumentChange") +- } +- } +- return nil +-} +- +-func (e *Editor) applyTextDocumentEdit(ctx context.Context, change protocol.TextDocumentEdit) error { +- path := e.sandbox.Workdir.URIToPath(change.TextDocument.URI) +- if ver := int32(e.BufferVersion(path)); ver != change.TextDocument.Version { +- return fmt.Errorf("buffer versions for %q do not match: have %d, editing %d", path, ver, change.TextDocument.Version) +- } +- if !e.HasBuffer(path) { +- err := e.OpenFile(ctx, path) +- if os.IsNotExist(err) { +- // TODO: it's unclear if this is correct. Here we create the buffer (with +- // version 1), then apply edits. Perhaps we should apply the edits before +- // sending the didOpen notification. +- err = e.CreateBuffer(ctx, path, "") +- } +- if err != nil { +- return err +- } +- } +- return e.EditBuffer(ctx, path, protocol.AsTextEdits(change.Edits)) +-} +- +-// Config returns the current editor configuration. +-func (e *Editor) Config() EditorConfig { +- e.mu.Lock() +- defer e.mu.Unlock() +- return e.config +-} +- +-func (e *Editor) SetConfig(cfg EditorConfig) { +- e.mu.Lock() +- e.config = cfg +- e.mu.Unlock() +-} +- +-// ChangeConfiguration sets the new editor configuration, and if applicable +-// sends a didChangeConfiguration notification. +-// +-// An error is returned if the change notification failed to send. +-func (e *Editor) ChangeConfiguration(ctx context.Context, newConfig EditorConfig) error { +- e.SetConfig(newConfig) +- if e.Server != nil { +- var params protocol.DidChangeConfigurationParams // empty: gopls ignores the Settings field +- if err := e.Server.DidChangeConfiguration(ctx, ¶ms); err != nil { +- return err +- } +- e.callsMu.Lock() +- e.calls.DidChangeConfiguration++ +- e.callsMu.Unlock() +- } +- return nil +-} +- +-// ChangeWorkspaceFolders sets the new workspace folders, and sends a +-// didChangeWorkspaceFolders notification to the server. +-// +-// The given folders must all be unique. +-func (e *Editor) ChangeWorkspaceFolders(ctx context.Context, folders []string) error { +- config := e.Config() +- +- // capture existing folders so that we can compute the change. +- oldFolders := makeWorkspaceFolders(e.sandbox, config.WorkspaceFolders, config.NoDefaultWorkspaceFiles) +- newFolders := makeWorkspaceFolders(e.sandbox, folders, config.NoDefaultWorkspaceFiles) +- config.WorkspaceFolders = folders +- e.SetConfig(config) +- +- if e.Server == nil { +- return nil +- } +- +- var params protocol.DidChangeWorkspaceFoldersParams +- +- // Keep track of old workspace folders that must be removed. +- toRemove := make(map[protocol.URI]protocol.WorkspaceFolder) +- for _, folder := range oldFolders { +- toRemove[folder.URI] = folder +- } +- +- // Sanity check: if we see a folder twice the algorithm below doesn't work, +- // so track seen folders to ensure that we panic in that case. +- seen := make(map[protocol.URI]protocol.WorkspaceFolder) +- for _, folder := range newFolders { +- if _, ok := seen[folder.URI]; ok { +- panic(fmt.Sprintf("folder %s seen twice", folder.URI)) +- } +- +- // If this folder already exists, we don't want to remove it. +- // Otherwise, we need to add it. +- if _, ok := toRemove[folder.URI]; ok { +- delete(toRemove, folder.URI) +- } else { +- params.Event.Added = append(params.Event.Added, folder) +- } +- } +- +- for _, v := range toRemove { +- params.Event.Removed = append(params.Event.Removed, v) +- } +- +- return e.Server.DidChangeWorkspaceFolders(ctx, ¶ms) +-} +- +-// CodeAction executes a codeAction request on the server. +-// If loc.Range is zero, the whole file is implied. +-// To reduce distraction, the trigger action (unknown, automatic, invoked) +-// may affect what actions are offered. +-func (e *Editor) CodeAction(ctx context.Context, loc protocol.Location, diagnostics []protocol.Diagnostic, trigger protocol.CodeActionTriggerKind) ([]protocol.CodeAction, error) { +- if e.Server == nil { +- return nil, nil +- } +- path := e.sandbox.Workdir.URIToPath(loc.URI) +- e.mu.Lock() +- _, ok := e.buffers[path] +- e.mu.Unlock() +- if !ok { +- return nil, fmt.Errorf("buffer %q is not open", path) +- } +- params := &protocol.CodeActionParams{ +- TextDocument: e.TextDocumentIdentifier(path), +- Context: protocol.CodeActionContext{ +- Diagnostics: diagnostics, +- TriggerKind: &trigger, +- Only: []protocol.CodeActionKind{protocol.Empty}, // => all +- }, +- Range: loc.Range, // may be zero +- } +- lens, err := e.Server.CodeAction(ctx, params) +- if err != nil { +- return nil, err +- } +- return lens, nil +-} +- +-func (e *Editor) EditResolveSupport() (bool, error) { +- capabilities, err := clientCapabilities(e.Config()) +- if err != nil { +- return false, err +- } +- return capabilities.TextDocument.CodeAction.ResolveSupport != nil && slices.Contains(capabilities.TextDocument.CodeAction.ResolveSupport.Properties, "edit"), nil +-} +- +-// Hover triggers a hover at the given position in an open buffer. +-// It may return (nil, zero) if no symbol was selected. +-func (e *Editor) Hover(ctx context.Context, loc protocol.Location) (*protocol.MarkupContent, protocol.Location, error) { +- if err := e.checkBufferLocation(loc); err != nil { +- return nil, protocol.Location{}, err +- } +- params := &protocol.HoverParams{} +- params.TextDocument.URI = loc.URI +- params.Position = loc.Range.Start +- +- resp, err := e.Server.Hover(ctx, params) +- if err != nil { +- return nil, protocol.Location{}, fmt.Errorf("hover: %w", err) +- } +- if resp == nil { +- return nil, protocol.Location{}, nil // e.g. no selected symbol +- } +- return &resp.Contents, loc.URI.Location(resp.Range), nil +-} +- +-func (e *Editor) DocumentLink(ctx context.Context, path string) ([]protocol.DocumentLink, error) { +- if e.Server == nil { +- return nil, nil +- } +- params := &protocol.DocumentLinkParams{} +- params.TextDocument.URI = e.sandbox.Workdir.URI(path) +- return e.Server.DocumentLink(ctx, params) +-} +- +-func (e *Editor) DocumentHighlight(ctx context.Context, loc protocol.Location) ([]protocol.DocumentHighlight, error) { +- if e.Server == nil { +- return nil, nil +- } +- if err := e.checkBufferLocation(loc); err != nil { +- return nil, err +- } +- params := &protocol.DocumentHighlightParams{} +- params.TextDocument.URI = loc.URI +- params.Position = loc.Range.Start +- +- return e.Server.DocumentHighlight(ctx, params) +-} +- +-// SemanticTokensFull invokes textDocument/semanticTokens/full, and interprets +-// its result. +-func (e *Editor) SemanticTokensFull(ctx context.Context, path string) ([]SemanticToken, error) { +- p := &protocol.SemanticTokensParams{ +- TextDocument: protocol.TextDocumentIdentifier{ +- URI: e.sandbox.Workdir.URI(path), +- }, +- } +- resp, err := e.Server.SemanticTokensFull(ctx, p) +- if err != nil { +- return nil, err +- } +- content, ok := e.BufferText(path) +- if !ok { +- return nil, fmt.Errorf("buffer %s is not open", path) +- } +- return e.interpretTokens(resp.Data, content), nil +-} +- +-// SemanticTokensRange invokes textDocument/semanticTokens/range, and +-// interprets its result. +-func (e *Editor) SemanticTokensRange(ctx context.Context, loc protocol.Location) ([]SemanticToken, error) { +- p := &protocol.SemanticTokensRangeParams{ +- TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, +- Range: loc.Range, +- } +- resp, err := e.Server.SemanticTokensRange(ctx, p) +- if err != nil { +- return nil, err +- } +- path := e.sandbox.Workdir.URIToPath(loc.URI) +- // As noted above: buffers should be keyed by protocol.DocumentURI. +- content, ok := e.BufferText(path) +- if !ok { +- return nil, fmt.Errorf("buffer %s is not open", path) +- } +- return e.interpretTokens(resp.Data, content), nil +-} +- +-// A SemanticToken is an interpreted semantic token value. +-type SemanticToken struct { +- Token string +- TokenType string +- Mod string +-} +- +-// Note: previously this function elided comment, string, and number tokens. +-// Instead, filtering of token types should be done by the caller. +-func (e *Editor) interpretTokens(x []uint32, contents string) []SemanticToken { +- legend := e.semTokOpts.Legend +- lines := strings.Split(contents, "\n") +- ans := []SemanticToken{} +- line, col := 1, 1 +- for i := 0; i < len(x); i += 5 { +- line += int(x[i]) +- col += int(x[i+1]) +- if x[i] != 0 { // new line +- col = int(x[i+1]) + 1 // 1-based column numbers +- } +- sz := x[i+2] +- t := legend.TokenTypes[x[i+3]] +- l := x[i+4] +- var mods []string +- for i, mod := range legend.TokenModifiers { +- if l&(1<<i) != 0 { +- mods = append(mods, mod) +- } +- } +- // Preexisting note: "col is a utf-8 offset" +- // TODO(rfindley): is that true? Or is it UTF-16, like other columns in the LSP? +- tok := lines[line-1][col-1 : col-1+int(sz)] +- ans = append(ans, SemanticToken{tok, t, strings.Join(mods, " ")}) +- } +- return ans +-} +diff -urN a/gopls/internal/test/integration/fake/editor_test.go b/gopls/internal/test/integration/fake/editor_test.go +--- a/gopls/internal/test/integration/fake/editor_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/editor_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,61 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fake +- +-import ( +- "context" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-const exampleProgram = ` +--- go.mod -- +-go 1.12 +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Println("Hello World.") +-} +-` +- +-func TestClientEditing(t *testing.T) { +- ws, err := NewSandbox(&SandboxConfig{Files: UnpackTxt(exampleProgram)}) +- if err != nil { +- t.Fatal(err) +- } +- defer ws.Close() +- ctx := context.Background() +- editor := NewEditor(ws, EditorConfig{}) +- if err := editor.OpenFile(ctx, "main.go"); err != nil { +- t.Fatal(err) +- } +- if err := editor.EditBuffer(ctx, "main.go", []protocol.TextEdit{ +- { +- Range: protocol.Range{ +- Start: protocol.Position{Line: 5, Character: 14}, +- End: protocol.Position{Line: 5, Character: 26}, +- }, +- NewText: "Hola, mundo.", +- }, +- }); err != nil { +- t.Fatal(err) +- } +- got := editor.buffers["main.go"].text() +- want := `package main +- +-import "fmt" +- +-func main() { +- fmt.Println("Hola, mundo.") +-} +-` +- if got != want { +- t.Errorf("got text %q, want %q", got, want) +- } +-} +diff -urN a/gopls/internal/test/integration/fake/edit_test.go b/gopls/internal/test/integration/fake/edit_test.go +--- a/gopls/internal/test/integration/fake/edit_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/edit_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,95 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fake +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-func TestApplyEdits(t *testing.T) { +- tests := []struct { +- label string +- content string +- edits []protocol.TextEdit +- want string +- wantErr bool +- }{ +- { +- label: "empty content", +- }, +- { +- label: "empty edit", +- content: "hello", +- edits: []protocol.TextEdit{}, +- want: "hello", +- }, +- { +- label: "unicode edit", +- content: "hello, 日本語", +- edits: []protocol.TextEdit{ +- NewEdit(0, 7, 0, 10, "world"), +- }, +- want: "hello, world", +- }, +- { +- label: "range edit", +- content: "ABC\nDEF\nGHI\nJKL", +- edits: []protocol.TextEdit{ +- NewEdit(1, 1, 2, 3, "12\n345"), +- }, +- want: "ABC\nD12\n345\nJKL", +- }, +- { +- label: "regression test for issue #57627", +- content: "go 1.18\nuse moda/a", +- edits: []protocol.TextEdit{ +- NewEdit(1, 0, 1, 0, "\n"), +- NewEdit(2, 0, 2, 0, "\n"), +- }, +- want: "go 1.18\n\nuse moda/a\n", +- }, +- { +- label: "end before start", +- content: "ABC\nDEF\nGHI\nJKL", +- edits: []protocol.TextEdit{ +- NewEdit(2, 3, 1, 1, "12\n345"), +- }, +- wantErr: true, +- }, +- { +- label: "out of bounds line", +- content: "ABC\nDEF\nGHI\nJKL", +- edits: []protocol.TextEdit{ +- NewEdit(1, 1, 4, 3, "12\n345"), +- }, +- wantErr: true, +- }, +- { +- label: "out of bounds column", +- content: "ABC\nDEF\nGHI\nJKL", +- edits: []protocol.TextEdit{ +- NewEdit(1, 4, 2, 3, "12\n345"), +- }, +- wantErr: true, +- }, +- } +- +- for _, test := range tests { +- t.Run(test.label, func(t *testing.T) { +- got, err := applyEdits(protocol.NewMapper("", []byte(test.content)), test.edits, false) +- if (err != nil) != test.wantErr { +- t.Errorf("got err %v, want error: %t", err, test.wantErr) +- } +- if err != nil { +- return +- } +- if got := string(got); got != test.want { +- t.Errorf("got %q, want %q", got, test.want) +- } +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/fake/glob/glob.go b/gopls/internal/test/integration/fake/glob/glob.go +--- a/gopls/internal/test/integration/fake/glob/glob.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/glob/glob.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,349 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package glob implements an LSP-compliant glob pattern matcher for testing. +-package glob +- +-import ( +- "errors" +- "fmt" +- "strings" +- "unicode/utf8" +-) +- +-// A Glob is an LSP-compliant glob pattern, as defined by the spec: +-// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#documentFilter +-// +-// NOTE: this implementation is currently only intended for testing. In order +-// to make it production ready, we'd need to: +-// - verify it against the VS Code implementation +-// - add more tests +-// - microbenchmark, likely avoiding the element interface +-// - resolve the question of what is meant by "character". If it's a UTF-16 +-// code (as we suspect) it'll be a bit more work. +-// +-// Quoting from the spec: +-// Glob patterns can have the following syntax: +-// - `*` to match one or more characters in a path segment +-// - `?` to match on one character in a path segment +-// - `**` to match any number of path segments, including none +-// - `{}` to group sub patterns into an OR expression. (e.g. `**/*.{ts,js}` +-// matches all TypeScript and JavaScript files) +-// - `[]` to declare a range of characters to match in a path segment +-// (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …) +-// - `[!...]` to negate a range of characters to match in a path segment +-// (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but +-// not `example.0`) +-// +-// Expanding on this: +-// - '/' matches one or more literal slashes. +-// - any other character matches itself literally. +-type Glob struct { +- elems []element // pattern elements +-} +- +-// Parse builds a Glob for the given pattern, returning an error if the pattern +-// is invalid. +-func Parse(pattern string) (*Glob, error) { +- g, _, err := parse(pattern, false) +- return g, err +-} +- +-func parse(pattern string, nested bool) (*Glob, string, error) { +- g := new(Glob) +- for len(pattern) > 0 { +- switch pattern[0] { +- case '/': +- pattern = pattern[1:] +- g.elems = append(g.elems, slash{}) +- +- case '*': +- if len(pattern) > 1 && pattern[1] == '*' { +- if (len(g.elems) > 0 && g.elems[len(g.elems)-1] != slash{}) || (len(pattern) > 2 && pattern[2] != '/') { +- return nil, "", errors.New("** may only be adjacent to '/'") +- } +- pattern = pattern[2:] +- g.elems = append(g.elems, starStar{}) +- break +- } +- pattern = pattern[1:] +- g.elems = append(g.elems, star{}) +- +- case '?': +- pattern = pattern[1:] +- g.elems = append(g.elems, anyChar{}) +- +- case '{': +- var gs group +- for pattern[0] != '}' { +- pattern = pattern[1:] +- g, pat, err := parse(pattern, true) +- if err != nil { +- return nil, "", err +- } +- if len(pat) == 0 { +- return nil, "", errors.New("unmatched '{'") +- } +- pattern = pat +- gs = append(gs, g) +- } +- pattern = pattern[1:] +- g.elems = append(g.elems, gs) +- +- case '}', ',': +- if nested { +- return g, pattern, nil +- } +- pattern = g.parseLiteral(pattern, false) +- +- case '[': +- pattern = pattern[1:] +- if len(pattern) == 0 { +- return nil, "", errBadRange +- } +- negate := false +- if pattern[0] == '!' { +- pattern = pattern[1:] +- negate = true +- } +- low, sz, err := readRangeRune(pattern) +- if err != nil { +- return nil, "", err +- } +- pattern = pattern[sz:] +- if len(pattern) == 0 || pattern[0] != '-' { +- return nil, "", errBadRange +- } +- pattern = pattern[1:] +- high, sz, err := readRangeRune(pattern) +- if err != nil { +- return nil, "", err +- } +- pattern = pattern[sz:] +- if len(pattern) == 0 || pattern[0] != ']' { +- return nil, "", errBadRange +- } +- pattern = pattern[1:] +- g.elems = append(g.elems, charRange{negate, low, high}) +- +- default: +- pattern = g.parseLiteral(pattern, nested) +- } +- } +- return g, "", nil +-} +- +-// helper for decoding a rune in range elements, e.g. [a-z] +-func readRangeRune(input string) (rune, int, error) { +- r, sz := utf8.DecodeRuneInString(input) +- var err error +- if r == utf8.RuneError { +- // See the documentation for DecodeRuneInString. +- switch sz { +- case 0: +- err = errBadRange +- case 1: +- err = errInvalidUTF8 +- } +- } +- return r, sz, err +-} +- +-var ( +- errBadRange = errors.New("'[' patterns must be of the form [x-y]") +- errInvalidUTF8 = errors.New("invalid UTF-8 encoding") +-) +- +-func (g *Glob) parseLiteral(pattern string, nested bool) string { +- var specialChars string +- if nested { +- specialChars = "*?{[/}," +- } else { +- specialChars = "*?{[/" +- } +- end := strings.IndexAny(pattern, specialChars) +- if end == -1 { +- end = len(pattern) +- } +- g.elems = append(g.elems, literal(pattern[:end])) +- return pattern[end:] +-} +- +-func (g *Glob) String() string { +- var b strings.Builder +- for _, e := range g.elems { +- fmt.Fprint(&b, e) +- } +- return b.String() +-} +- +-// element holds a glob pattern element, as defined below. +-type element fmt.Stringer +- +-// element types. +-type ( +- slash struct{} // One or more '/' separators +- literal string // string literal, not containing /, *, ?, {}, or [] +- star struct{} // * +- anyChar struct{} // ? +- starStar struct{} // ** +- group []*Glob // {foo, bar, ...} grouping +- charRange struct { // [a-z] character range +- negate bool +- low, high rune +- } +-) +- +-func (s slash) String() string { return "/" } +-func (l literal) String() string { return string(l) } +-func (s star) String() string { return "*" } +-func (a anyChar) String() string { return "?" } +-func (s starStar) String() string { return "**" } +-func (g group) String() string { +- var parts []string +- for _, g := range g { +- parts = append(parts, g.String()) +- } +- return "{" + strings.Join(parts, ",") + "}" +-} +-func (r charRange) String() string { +- return "[" + string(r.low) + "-" + string(r.high) + "]" +-} +- +-// Match reports whether the input string matches the glob pattern. +-func (g *Glob) Match(input string) bool { +- return match(g.elems, input) +-} +- +-func match(elems []element, input string) (ok bool) { +- var elem any +- for len(elems) > 0 { +- elem, elems = elems[0], elems[1:] +- switch elem := elem.(type) { +- case slash: +- if len(input) == 0 || input[0] != '/' { +- return false +- } +- for input[0] == '/' { +- input = input[1:] +- } +- +- case starStar: +- // Special cases: +- // - **/a matches "a" +- // - **/ matches everything +- // +- // Note that if ** is followed by anything, it must be '/' (this is +- // enforced by Parse). +- if len(elems) > 0 { +- elems = elems[1:] +- } +- +- // A trailing ** matches anything. +- if len(elems) == 0 { +- return true +- } +- +- // Backtracking: advance pattern segments until the remaining pattern +- // elements match. +- for len(input) != 0 { +- if match(elems, input) { +- return true +- } +- _, input = split(input) +- } +- return false +- +- case literal: +- if !strings.HasPrefix(input, string(elem)) { +- return false +- } +- input = input[len(elem):] +- +- case star: +- var segInput string +- segInput, input = split(input) +- +- elemEnd := len(elems) +- for i, e := range elems { +- if e == (slash{}) { +- elemEnd = i +- break +- } +- } +- segElems := elems[:elemEnd] +- elems = elems[elemEnd:] +- +- // A trailing * matches the entire segment. +- if len(segElems) == 0 { +- break +- } +- +- // Backtracking: advance characters until remaining subpattern elements +- // match. +- matched := false +- for i := range segInput { +- if match(segElems, segInput[i:]) { +- matched = true +- break +- } +- } +- if !matched { +- return false +- } +- +- case anyChar: +- if len(input) == 0 || input[0] == '/' { +- return false +- } +- input = input[1:] +- +- case group: +- // Append remaining pattern elements to each group member looking for a +- // match. +- var branch []element +- for _, m := range elem { +- branch = branch[:0] +- branch = append(branch, m.elems...) +- branch = append(branch, elems...) +- if match(branch, input) { +- return true +- } +- } +- return false +- +- case charRange: +- if len(input) == 0 || input[0] == '/' { +- return false +- } +- c, sz := utf8.DecodeRuneInString(input) +- if c < elem.low || c > elem.high { +- return false +- } +- input = input[sz:] +- +- default: +- panic(fmt.Sprintf("segment type %T not implemented", elem)) +- } +- } +- +- return len(input) == 0 +-} +- +-// split returns the portion before and after the first slash +-// (or sequence of consecutive slashes). If there is no slash +-// it returns (input, nil). +-func split(input string) (first, rest string) { +- i := strings.IndexByte(input, '/') +- if i < 0 { +- return input, "" +- } +- first = input[:i] +- for j := i; j < len(input); j++ { +- if input[j] != '/' { +- return first, input[j:] +- } +- } +- return first, "" +-} +diff -urN a/gopls/internal/test/integration/fake/glob/glob_test.go b/gopls/internal/test/integration/fake/glob/glob_test.go +--- a/gopls/internal/test/integration/fake/glob/glob_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/glob/glob_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,118 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package glob_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/test/integration/fake/glob" +-) +- +-func TestParseErrors(t *testing.T) { +- tests := []string{ +- "***", +- "ab{c", +- "[]", +- "[a-]", +- "ab{c{d}", +- } +- +- for _, test := range tests { +- _, err := glob.Parse(test) +- if err == nil { +- t.Errorf("Parse(%q) succeeded unexpectedly", test) +- } +- } +-} +- +-func TestMatch(t *testing.T) { +- tests := []struct { +- pattern, input string +- want bool +- }{ +- // Basic cases. +- {"", "", true}, +- {"", "a", false}, +- {"", "/", false}, +- {"abc", "abc", true}, +- +- // ** behavior +- {"**", "abc", true}, +- {"**/abc", "abc", true}, +- {"**", "abc/def", true}, +- {"{a/**/c,a/**/d}", "a/b/c", true}, +- {"{a/**/c,a/**/d}", "a/b/c/d", true}, +- {"{a/**/c,a/**/e}", "a/b/c/d", false}, +- {"{a/**/c,a/**/e,a/**/d}", "a/b/c/d", true}, +- {"{/a/**/c,a/**/e,a/**/d}", "a/b/c/d", true}, +- {"{/a/**/c,a/**/e,a/**/d}", "/a/b/c/d", false}, +- {"{/a/**/c,a/**/e,a/**/d}", "/a/b/c", true}, +- {"{/a/**/e,a/**/e,a/**/d}", "/a/b/c", false}, +- +- // * and ? behavior +- {"/*", "/a", true}, +- {"*", "foo", true}, +- {"*o", "foo", true}, +- {"*o", "foox", false}, +- {"f*o", "foo", true}, +- {"f*o", "fo", true}, +- {"fo?", "foo", true}, +- {"fo?", "fox", true}, +- {"fo?", "fooo", false}, +- {"fo?", "fo", false}, +- {"?", "a", true}, +- {"?", "ab", false}, +- {"?", "", false}, +- {"*?", "", false}, +- {"?b", "ab", true}, +- {"?c", "ab", false}, +- +- // {} behavior +- {"ab{c,d}e", "abce", true}, +- {"ab{c,d}e", "abde", true}, +- {"ab{c,d}e", "abxe", false}, +- {"ab{c,d}e", "abe", false}, +- {"{a,b}c", "ac", true}, +- {"{a,b}c", "bc", true}, +- {"{a,b}c", "ab", false}, +- {"a{b,c}", "ab", true}, +- {"a{b,c}", "ac", true}, +- {"a{b,c}", "bc", false}, +- {"ab{c{1,2},d}e", "abc1e", true}, +- {"ab{c{1,2},d}e", "abde", true}, +- {"ab{c{1,2},d}e", "abc1f", false}, +- {"ab{c{1,2},d}e", "abce", false}, +- {"ab{c[}-~]}d", "abc}d", true}, +- {"ab{c[}-~]}d", "abc~d", true}, +- {"ab{c[}-~],y}d", "abcxd", false}, +- {"ab{c[}-~],y}d", "abyd", true}, +- {"ab{c[}-~],y}d", "abd", false}, +- {"{a/b/c,d/e/f}", "a/b/c", true}, +- {"/ab{/c,d}e", "/ab/ce", true}, +- {"/ab{/c,d}e", "/ab/cf", false}, +- +- // [-] behavior +- {"[a-c]", "a", true}, +- {"[a-c]", "b", true}, +- {"[a-c]", "c", true}, +- {"[a-c]", "d", false}, +- {"[a-c]", " ", false}, +- +- // Realistic examples. +- {"**/*.{ts,js}", "path/to/foo.ts", true}, +- {"**/*.{ts,js}", "path/to/foo.js", true}, +- {"**/*.{ts,js}", "path/to/foo.go", false}, +- } +- +- for _, test := range tests { +- g, err := glob.Parse(test.pattern) +- if err != nil { +- t.Fatalf("New(%q) failed unexpectedly: %v", test.pattern, err) +- } +- if got := g.Match(test.input); got != test.want { +- t.Errorf("New(%q).Match(%q) = %t, want %t", test.pattern, test.input, got, test.want) +- } +- } +-} +diff -urN a/gopls/internal/test/integration/fake/proxy.go b/gopls/internal/test/integration/fake/proxy.go +--- a/gopls/internal/test/integration/fake/proxy.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/proxy.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fake +- +-import ( +- "fmt" +- "strings" +- +- "golang.org/x/tools/internal/proxydir" +-) +- +-// WriteProxy creates a new proxy file tree using the txtar-encoded content, +-// and returns its URL. +-func WriteProxy(tmpdir string, files map[string][]byte) (string, error) { +- type moduleVersion struct { +- modulePath, version string +- } +- // Transform into the format expected by the proxydir package. +- filesByModule := make(map[moduleVersion]map[string][]byte) +- for name, data := range files { +- modulePath, version, suffix := splitModuleVersionPath(name) +- mv := moduleVersion{modulePath, version} +- if _, ok := filesByModule[mv]; !ok { +- filesByModule[mv] = make(map[string][]byte) +- } +- filesByModule[mv][suffix] = data +- } +- for mv, files := range filesByModule { +- // Don't hoist this check out of the loop: +- // the problem is benign if filesByModule is empty. +- if strings.Contains(tmpdir, "#") { +- return "", fmt.Errorf("WriteProxy's tmpdir contains '#', which is unsuitable for GOPROXY. (If tmpdir was derived from testing.T.Name, use t.Run to ensure that each subtest has a unique name.)") +- } +- if err := proxydir.WriteModuleVersion(tmpdir, mv.modulePath, mv.version, files); err != nil { +- return "", fmt.Errorf("error writing %s@%s: %v", mv.modulePath, mv.version, err) +- } +- } +- return proxydir.ToURL(tmpdir), nil +-} +diff -urN a/gopls/internal/test/integration/fake/sandbox.go b/gopls/internal/test/integration/fake/sandbox.go +--- a/gopls/internal/test/integration/fake/sandbox.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/sandbox.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,285 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fake +- +-import ( +- "context" +- "errors" +- "fmt" +- "os" +- "path/filepath" +- "strings" +- +- "golang.org/x/tools/internal/gocommand" +- "golang.org/x/tools/internal/robustio" +- "golang.org/x/tools/txtar" +-) +- +-// Sandbox holds a collection of temporary resources to use for working with Go +-// code in tests. +-type Sandbox struct { +- gopath string +- rootdir string +- goproxy string +- Workdir *Workdir +- goCommandRunner gocommand.Runner +-} +- +-// SandboxConfig controls the behavior of a test sandbox. The zero value +-// defines a reasonable default. +-type SandboxConfig struct { +- // RootDir sets the base directory to use when creating temporary +- // directories. If not specified, defaults to a new temporary directory. +- RootDir string +- // Files holds a txtar-encoded archive of files to populate the initial state +- // of the working directory. +- // +- // For convenience, the special substring "$SANDBOX_WORKDIR" is replaced with +- // the sandbox's resolved working directory before writing files. +- Files map[string][]byte +- // InGoPath specifies that the working directory should be within the +- // temporary GOPATH. +- InGoPath bool +- // Workdir configures the working directory of the Sandbox. It behaves as +- // follows: +- // - if set to an absolute path, use that path as the working directory. +- // - if set to a relative path, create and use that path relative to the +- // sandbox. +- // - if unset, default to a the 'work' subdirectory of the sandbox. +- // +- // This option is incompatible with InGoPath or Files. +- Workdir string +- // ProxyFiles holds a txtar-encoded archive of files to populate a file-based +- // Go proxy. +- ProxyFiles map[string][]byte +- // GOPROXY is the explicit GOPROXY value that should be used for the sandbox. +- // +- // This option is incompatible with ProxyFiles. +- GOPROXY string +-} +- +-// NewSandbox creates a collection of named temporary resources, with a +-// working directory populated by the txtar-encoded content in srctxt, and a +-// file-based module proxy populated with the txtar-encoded content in +-// proxytxt. +-// +-// If rootDir is non-empty, it will be used as the root of temporary +-// directories created for the sandbox. Otherwise, a new temporary directory +-// will be used as root. +-// +-// TODO(rfindley): the sandbox abstraction doesn't seem to carry its weight. +-// Sandboxes should be composed out of their building-blocks, rather than via a +-// monolithic configuration. +-func NewSandbox(config *SandboxConfig) (_ *Sandbox, err error) { +- if config == nil { +- config = new(SandboxConfig) +- } +- if err := validateConfig(*config); err != nil { +- return nil, fmt.Errorf("invalid SandboxConfig: %v", err) +- } +- +- sb := &Sandbox{} +- defer func() { +- // Clean up if we fail at any point in this constructor. +- if err != nil { +- sb.Close() // ignore error +- } +- }() +- +- rootDir := config.RootDir +- if rootDir == "" { +- rootDir, err = os.MkdirTemp(config.RootDir, "gopls-sandbox-") +- if err != nil { +- return nil, fmt.Errorf("creating temporary workdir: %v", err) +- } +- } +- sb.rootdir = rootDir +- sb.gopath = filepath.Join(sb.rootdir, "gopath") +- if err := os.Mkdir(sb.gopath, 0755); err != nil { +- return nil, err +- } +- if config.GOPROXY != "" { +- sb.goproxy = config.GOPROXY +- } else { +- proxydir := filepath.Join(sb.rootdir, "proxy") +- if err := os.Mkdir(proxydir, 0755); err != nil { +- return nil, err +- } +- sb.goproxy, err = WriteProxy(proxydir, config.ProxyFiles) +- if err != nil { +- return nil, err +- } +- } +- // Short-circuit writing the workdir if we're given an absolute path, since +- // this is used for running in an existing directory. +- // TODO(findleyr): refactor this to be less of a workaround. +- if filepath.IsAbs(config.Workdir) { +- sb.Workdir, err = NewWorkdir(config.Workdir, nil) +- if err != nil { +- return nil, err +- } +- return sb, nil +- } +- var workdir string +- if config.Workdir == "" { +- if config.InGoPath { +- // Set the working directory as $GOPATH/src. +- workdir = filepath.Join(sb.gopath, "src") +- } else if workdir == "" { +- workdir = filepath.Join(sb.rootdir, "work") +- } +- } else { +- // relative path +- workdir = filepath.Join(sb.rootdir, config.Workdir) +- } +- if err := os.MkdirAll(workdir, 0755); err != nil { +- return nil, err +- } +- sb.Workdir, err = NewWorkdir(workdir, config.Files) +- if err != nil { +- return nil, err +- } +- return sb, nil +-} +- +-func UnpackTxt(txt string) map[string][]byte { +- dataMap := make(map[string][]byte) +- archive := txtar.Parse([]byte(txt)) +- for _, f := range archive.Files { +- if _, ok := dataMap[f.Name]; ok { +- panic(fmt.Sprintf("found file %q twice", f.Name)) +- } +- dataMap[f.Name] = f.Data +- } +- return dataMap +-} +- +-func validateConfig(config SandboxConfig) error { +- if filepath.IsAbs(config.Workdir) && (len(config.Files) > 0 || config.InGoPath) { +- return errors.New("absolute Workdir cannot be set in conjunction with Files or InGoPath") +- } +- if config.Workdir != "" && config.InGoPath { +- return errors.New("Workdir cannot be set in conjunction with InGoPath") +- } +- if config.GOPROXY != "" && config.ProxyFiles != nil { +- return errors.New("GOPROXY cannot be set in conjunction with ProxyFiles") +- } +- return nil +-} +- +-// splitModuleVersionPath extracts module information from files stored in the +-// directory structure modulePath@version/suffix. +-// For example: +-// +-// splitModuleVersionPath("mod.com@v1.2.3/package") = ("mod.com", "v1.2.3", "package") +-func splitModuleVersionPath(path string) (modulePath, version, suffix string) { +- parts := strings.Split(path, "/") +- var modulePathParts []string +- for i, p := range parts { +- if strings.Contains(p, "@") { +- mv := strings.SplitN(p, "@", 2) +- modulePathParts = append(modulePathParts, mv[0]) +- return strings.Join(modulePathParts, "/"), mv[1], strings.Join(parts[i+1:], "/") +- } +- modulePathParts = append(modulePathParts, p) +- } +- // Default behavior: this is just a module path. +- return path, "", "" +-} +- +-func (sb *Sandbox) RootDir() string { +- return sb.rootdir +-} +- +-// GOPATH returns the value of the Sandbox GOPATH. +-func (sb *Sandbox) GOPATH() string { +- return sb.gopath +-} +- +-// GoEnv returns the default environment variables that can be used for +-// invoking Go commands in the sandbox. +-func (sb *Sandbox) GoEnv() map[string]string { +- return map[string]string{ +- "GOPATH": sb.GOPATH(), +- "GOPROXY": sb.goproxy, +- "GO111MODULE": "", +- "GOSUMDB": "off", +- "GOPACKAGESDRIVER": "off", +- "GOTOOLCHAIN": "local", // tests should not download toolchains +- // TODO(golang/go#74595): Why don't we respect GOMODCACHE in the +- // settings.env? See comment at env.CleanModCache. +- "GOMODCACHE": "", +- } +-} +- +-// goCommandInvocation returns a new gocommand.Invocation initialized with the +-// sandbox environment variables and working directory. +-func (sb *Sandbox) goCommandInvocation() gocommand.Invocation { +- var vars []string +- for k, v := range sb.GoEnv() { +- vars = append(vars, fmt.Sprintf("%s=%s", k, v)) +- } +- inv := gocommand.Invocation{ +- Env: vars, +- } +- // sb.Workdir may be nil if we exited the constructor with errors (we call +- // Close to clean up any partial state from the constructor, which calls +- // RunGoCommand). +- if sb.Workdir != nil { +- inv.WorkingDir = string(sb.Workdir.RelativeTo) +- } +- return inv +-} +- +-// RunGoCommand executes a go command in the sandbox and returns its standard +-// output. If checkForFileChanges is true, the sandbox scans the working +-// directory and emits file change events for any file changes it finds. +-func (sb *Sandbox) RunGoCommand(ctx context.Context, dir, verb string, args, env []string, checkForFileChanges bool) ([]byte, error) { +- inv := sb.goCommandInvocation() +- inv.Verb = verb +- inv.Args = args +- inv.Env = append(inv.Env, env...) +- if dir != "" { +- inv.WorkingDir = sb.Workdir.AbsPath(dir) +- } +- stdout, stderr, _, err := sb.goCommandRunner.RunRaw(ctx, inv) +- if err != nil { +- return nil, fmt.Errorf("go command failed (stdout: %s) (stderr: %s): %v", stdout.String(), stderr.String(), err) +- } +- // Since running a go command may result in changes to workspace files, +- // check if we need to send any "watched" file events. +- // +- // TODO(rFindley): this side-effect can impact the usability of the sandbox +- // for benchmarks. Consider refactoring. +- if sb.Workdir != nil && checkForFileChanges { +- if err := sb.Workdir.CheckForFileChanges(ctx); err != nil { +- return nil, fmt.Errorf("checking for file changes: %w", err) +- } +- } +- return stdout.Bytes(), nil +-} +- +-// GoVersion checks the version of the go command. +-// It returns the X in Go 1.X. +-func (sb *Sandbox) GoVersion(ctx context.Context) (int, error) { +- inv := sb.goCommandInvocation() +- return gocommand.GoVersion(ctx, inv, &sb.goCommandRunner) +-} +- +-// Close removes all state associated with the sandbox. +-func (sb *Sandbox) Close() error { +- var goCleanErr error +- // Careful: sb may not be fully initialized. +- if sb.gopath != "" && sb.Workdir != nil { +- // Important: run this command in RootDir so that it doesn't interact with +- // any toolchain downloads that may occur +- _, goCleanErr = sb.RunGoCommand(context.Background(), sb.RootDir(), "clean", []string{"-modcache"}, nil, false) +- } +- err := robustio.RemoveAll(sb.rootdir) +- if err != nil || goCleanErr != nil { +- return fmt.Errorf("error(s) cleaning sandbox: cleaning modcache: %v; removing files: %v", goCleanErr, err) +- } +- return nil +-} +diff -urN a/gopls/internal/test/integration/fake/workdir.go b/gopls/internal/test/integration/fake/workdir.go +--- a/gopls/internal/test/integration/fake/workdir.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/workdir.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,429 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fake +- +-import ( +- "bytes" +- "context" +- "crypto/sha256" +- "fmt" +- "io/fs" +- "os" +- "path/filepath" +- "runtime" +- "slices" +- "sort" +- "strings" +- "sync" +- "time" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/robustio" +-) +- +-// RelativeTo is a helper for operations relative to a given directory. +-type RelativeTo string +- +-// AbsPath returns an absolute filesystem path for the workdir-relative path. +-func (r RelativeTo) AbsPath(path string) string { +- fp := filepath.FromSlash(path) +- if filepath.IsAbs(fp) { +- return fp +- } +- return filepath.Join(string(r), filepath.FromSlash(path)) +-} +- +-// RelPath returns a '/'-encoded path relative to the working directory (or an +-// absolute path if the file is outside of workdir) +-func (r RelativeTo) RelPath(fp string) string { +- root := string(r) +- if rel, err := filepath.Rel(root, fp); err == nil && !strings.HasPrefix(rel, "..") { +- return filepath.ToSlash(rel) +- } +- return filepath.ToSlash(fp) +-} +- +-// writeFileData writes content to the relative path, replacing the special +-// token $SANDBOX_WORKDIR with the relative root given by rel. It does not +-// trigger any file events. +-func writeFileData(path string, content []byte, rel RelativeTo) error { +- content = bytes.ReplaceAll(content, []byte("$SANDBOX_WORKDIR"), []byte(rel)) +- fp := rel.AbsPath(path) +- if err := os.MkdirAll(filepath.Dir(fp), 0755); err != nil { +- return fmt.Errorf("creating nested directory: %w", err) +- } +- backoff := 1 * time.Millisecond +- for { +- err := os.WriteFile(fp, content, 0644) +- if err != nil { +- // This lock file violation is not handled by the robustio package, as it +- // indicates a real race condition that could be avoided. +- if isWindowsErrLockViolation(err) { +- time.Sleep(backoff) +- backoff *= 2 +- continue +- } +- return fmt.Errorf("writing %q: %w", path, err) +- } +- return nil +- } +-} +- +-// isWindowsErrLockViolation reports whether err is ERROR_LOCK_VIOLATION +-// on Windows. +-var isWindowsErrLockViolation = func(error) bool { return false } +- +-// Workdir is a temporary working directory for tests. It exposes file +-// operations in terms of relative paths, and fakes file watching by triggering +-// events on file operations. +-type Workdir struct { +- RelativeTo +- +- watcherMu sync.Mutex +- watchers []func(context.Context, []protocol.FileEvent) +- +- fileMu sync.Mutex +- // File identities we know about, for the purpose of detecting changes. +- // +- // Since files is only used for detecting _changes_, we are tolerant of +- // fileIDs that may have hash and mtime coming from different states of the +- // file: if either are out of sync, then the next poll should detect a +- // discrepancy. It is OK if we detect too many changes, but not OK if we miss +- // changes. +- // +- // For that matter, this mechanism for detecting changes can still be flaky +- // on platforms where mtime is very coarse (such as older versions of WSL). +- // It would be much better to use a proper fs event library, but we can't +- // currently import those into x/tools. +- // +- // TODO(golang/go#52284): replace this polling mechanism with a +- // cross-platform library for filesystem notifications. +- files map[string]fileID +-} +- +-// NewWorkdir writes the txtar-encoded file data in txt to dir, and returns a +-// Workir for operating on these files using +-func NewWorkdir(dir string, files map[string][]byte) (*Workdir, error) { +- w := &Workdir{RelativeTo: RelativeTo(dir)} +- for name, data := range files { +- if err := writeFileData(name, data, w.RelativeTo); err != nil { +- return nil, fmt.Errorf("writing to workdir: %w", err) +- } +- } +- _, err := w.pollFiles() // poll files to populate the files map. +- return w, err +-} +- +-// fileID identifies a file version on disk. +-type fileID struct { +- mtime time.Time +- hash string // empty if mtime is old enough to be reliable; otherwise a file digest +-} +- +-func hashFile(data []byte) string { +- return fmt.Sprintf("%x", sha256.Sum256(data)) +-} +- +-// RootURI returns the root URI for this working directory of this scratch +-// environment. +-func (w *Workdir) RootURI() protocol.DocumentURI { +- return protocol.URIFromPath(string(w.RelativeTo)) +-} +- +-// AddWatcher registers the given func to be called on any file change. +-func (w *Workdir) AddWatcher(watcher func(context.Context, []protocol.FileEvent)) { +- w.watcherMu.Lock() +- w.watchers = append(w.watchers, watcher) +- w.watcherMu.Unlock() +-} +- +-// URI returns the URI to a the workdir-relative path. +-func (w *Workdir) URI(path string) protocol.DocumentURI { +- return protocol.URIFromPath(w.AbsPath(path)) +-} +- +-// URIToPath converts a uri to a workdir-relative path (or an absolute path, +-// if the uri is outside of the workdir). +-func (w *Workdir) URIToPath(uri protocol.DocumentURI) string { +- return w.RelPath(uri.Path()) +-} +- +-// EntireFile returns the entire extent of the file named by the workdir-relative path. +-func (w *Workdir) EntireFile(path string) protocol.Location { +- return protocol.Location{URI: w.URI(path)} +-} +- +-// ReadFile reads a text file specified by a workdir-relative path. +-func (w *Workdir) ReadFile(path string) ([]byte, error) { +- backoff := 1 * time.Millisecond +- for { +- b, err := os.ReadFile(w.AbsPath(path)) +- if err != nil { +- if runtime.GOOS == "plan9" && strings.HasSuffix(err.Error(), " exclusive use file already open") { +- // Plan 9 enforces exclusive access to locked files. +- // Give the owner time to unlock it and retry. +- time.Sleep(backoff) +- backoff *= 2 +- continue +- } +- return nil, err +- } +- return b, nil +- } +-} +- +-// RegexpSearch searches the file corresponding to path for the first position +-// matching re. +-func (w *Workdir) RegexpSearch(path string, re string) (protocol.Location, error) { +- content, err := w.ReadFile(path) +- if err != nil { +- return protocol.Location{}, err +- } +- mapper := protocol.NewMapper(w.URI(path), content) +- return regexpLocation(mapper, re) +-} +- +-// RemoveFile removes a workdir-relative file path and notifies watchers of the +-// change. +-func (w *Workdir) RemoveFile(ctx context.Context, path string) error { +- fp := w.AbsPath(path) +- if err := robustio.RemoveAll(fp); err != nil { +- return fmt.Errorf("removing %q: %w", path, err) +- } +- +- return w.CheckForFileChanges(ctx) +-} +- +-// WriteFiles writes the text file content to workdir-relative paths and +-// notifies watchers of the changes. +-func (w *Workdir) WriteFiles(ctx context.Context, files map[string]string) error { +- for path, content := range files { +- fp := w.AbsPath(path) +- _, err := os.Stat(fp) +- if err != nil && !os.IsNotExist(err) { +- return fmt.Errorf("checking if %q exists: %w", path, err) +- } +- if err := writeFileData(path, []byte(content), w.RelativeTo); err != nil { +- return err +- } +- } +- return w.CheckForFileChanges(ctx) +-} +- +-// WriteFile writes text file content to a workdir-relative path and notifies +-// watchers of the change. +-func (w *Workdir) WriteFile(ctx context.Context, path, content string) error { +- return w.WriteFiles(ctx, map[string]string{path: content}) +-} +- +-// RenameFile performs an on disk-renaming of the workdir-relative oldPath to +-// workdir-relative newPath, and notifies watchers of the changes. +-// +-// oldPath must either be a regular file or in the same directory as newPath. +-func (w *Workdir) RenameFile(ctx context.Context, oldPath, newPath string) error { +- oldAbs := w.AbsPath(oldPath) +- newAbs := w.AbsPath(newPath) +- +- // For os.Rename, “OS-specific restrictions may apply when oldpath and newpath +- // are in different directories.” If that applies here, we may fall back to +- // ReadFile, WriteFile, and RemoveFile to perform the rename non-atomically. +- // +- // However, the fallback path only works for regular files: renaming a +- // directory would be much more complex and isn't needed for our tests. +- fallbackOk := false +- if filepath.Dir(oldAbs) != filepath.Dir(newAbs) { +- fi, err := os.Stat(oldAbs) +- if err == nil && !fi.Mode().IsRegular() { +- return &os.PathError{ +- Op: "RenameFile", +- Path: oldPath, +- Err: fmt.Errorf("%w: file is not regular and not in the same directory as %s", os.ErrInvalid, newPath), +- } +- } +- fallbackOk = true +- } +- +- var renameErr error +- const debugFallback = false +- if fallbackOk && debugFallback { +- renameErr = fmt.Errorf("%w: debugging fallback path", os.ErrInvalid) +- } else { +- renameErr = robustio.Rename(oldAbs, newAbs) +- } +- if renameErr != nil { +- if !fallbackOk { +- return renameErr // The OS-specific Rename restrictions do not apply. +- } +- +- content, err := w.ReadFile(oldPath) +- if err != nil { +- // If we can't even read the file, the error from Rename may be accurate. +- return renameErr +- } +- fi, err := os.Stat(newAbs) +- if err == nil { +- if fi.IsDir() { +- // “If newpath already exists and is not a directory, Rename replaces it.” +- // But if it is a directory, maybe not? +- return renameErr +- } +- // On most platforms, Rename replaces the named file with a new file, +- // rather than overwriting the existing file it in place. Mimic that +- // behavior here. +- if err := robustio.RemoveAll(newAbs); err != nil { +- // Maybe we don't have permission to replace newPath? +- return renameErr +- } +- } else if !os.IsNotExist(err) { +- // If the destination path already exists or there is some problem with it, +- // the error from Rename may be accurate. +- return renameErr +- } +- if writeErr := writeFileData(newPath, content, w.RelativeTo); writeErr != nil { +- // At this point we have tried to actually write the file. +- // If it still doesn't exist, assume that the error from Rename was accurate: +- // for example, maybe we don't have permission to create the new path. +- // Otherwise, return the error from the write, which may indicate some +- // other problem (such as a full disk). +- if _, statErr := os.Stat(newAbs); !os.IsNotExist(statErr) { +- return writeErr +- } +- return renameErr +- } +- if err := robustio.RemoveAll(oldAbs); err != nil { +- // If we failed to remove the old file, that may explain the Rename error too. +- // Make a best effort to back out the write to the new path. +- robustio.RemoveAll(newAbs) // ignore error +- return renameErr +- } +- } +- +- return w.CheckForFileChanges(ctx) +-} +- +-// ListFiles returns a new sorted list of the relative paths of files in dir, +-// recursively. +-func (w *Workdir) ListFiles(dir string) ([]string, error) { +- absDir := w.AbsPath(dir) +- var paths []string +- if err := filepath.Walk(absDir, func(fp string, info os.FileInfo, err error) error { +- if err != nil { +- return err +- } +- if info.Mode()&(fs.ModeDir|fs.ModeSymlink) == 0 { +- paths = append(paths, w.RelPath(fp)) +- } +- return nil +- }); err != nil { +- return nil, err +- } +- sort.Strings(paths) +- return paths, nil +-} +- +-// CheckForFileChanges walks the working directory and checks for any files +-// that have changed since the last poll. +-func (w *Workdir) CheckForFileChanges(ctx context.Context) error { +- evts, err := w.pollFiles() +- if err != nil { +- return err +- } +- if len(evts) == 0 { +- return nil +- } +- w.watcherMu.Lock() +- watchers := slices.Clone(w.watchers) +- w.watcherMu.Unlock() +- for _, w := range watchers { +- w(ctx, evts) +- } +- return nil +-} +- +-// pollFiles updates w.files and calculates FileEvents corresponding to file +-// state changes since the last poll. It does not call sendEvents. +-func (w *Workdir) pollFiles() ([]protocol.FileEvent, error) { +- w.fileMu.Lock() +- defer w.fileMu.Unlock() +- +- newFiles := make(map[string]fileID) +- var evts []protocol.FileEvent +- if err := filepath.Walk(string(w.RelativeTo), func(fp string, info os.FileInfo, err error) error { +- if err != nil { +- return err +- } +- // Skip directories and symbolic links (which may be links to directories). +- // +- // The latter matters for repos like Kubernetes, which use symlinks. +- if info.Mode()&(fs.ModeDir|fs.ModeSymlink) != 0 { +- return nil +- } +- +- // Opt: avoid reading the file if mtime is sufficiently old to be reliable. +- // +- // If mtime is recent, it may not sufficiently identify the file contents: +- // a subsequent write could result in the same mtime. For these cases, we +- // must read the file contents. +- id := fileID{mtime: info.ModTime()} +- if time.Since(info.ModTime()) < 2*time.Second { +- data, err := os.ReadFile(fp) +- if err != nil { +- return err +- } +- id.hash = hashFile(data) +- } +- path := w.RelPath(fp) +- newFiles[path] = id +- +- if w.files != nil { +- oldID, ok := w.files[path] +- delete(w.files, path) +- switch { +- case !ok: +- evts = append(evts, protocol.FileEvent{ +- URI: w.URI(path), +- Type: protocol.Created, +- }) +- case oldID != id: +- changed := true +- +- // Check whether oldID and id do not match because oldID was polled at +- // a recent enough to time such as to require hashing. +- // +- // In this case, read the content to check whether the file actually +- // changed. +- if oldID.mtime.Equal(id.mtime) && oldID.hash != "" && id.hash == "" { +- data, err := os.ReadFile(fp) +- if err != nil { +- return err +- } +- if hashFile(data) == oldID.hash { +- changed = false +- } +- } +- if changed { +- evts = append(evts, protocol.FileEvent{ +- URI: w.URI(path), +- Type: protocol.Changed, +- }) +- } +- } +- } +- +- return nil +- }); err != nil { +- return nil, err +- } +- +- // Any remaining files must have been deleted. +- for path := range w.files { +- evts = append(evts, protocol.FileEvent{ +- URI: w.URI(path), +- Type: protocol.Deleted, +- }) +- } +- w.files = newFiles +- return evts, nil +-} +diff -urN a/gopls/internal/test/integration/fake/workdir_test.go b/gopls/internal/test/integration/fake/workdir_test.go +--- a/gopls/internal/test/integration/fake/workdir_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/workdir_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,219 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fake +- +-import ( +- "context" +- "os" +- "sync" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-const sharedData = ` +--- go.mod -- +-go 1.12 +--- nested/README.md -- +-Hello World! +-` +- +-// newWorkdir sets up a temporary Workdir with the given txtar-encoded content. +-// It also configures an eventBuffer to receive file event notifications. These +-// notifications are sent synchronously for each operation, such that once a +-// workdir file operation has returned the caller can expect that any relevant +-// file notifications are present in the buffer. +-// +-// It is the caller's responsibility to call the returned cleanup function. +-func newWorkdir(t *testing.T, txt string) (*Workdir, *eventBuffer, func()) { +- t.Helper() +- +- tmpdir, err := os.MkdirTemp("", "goplstest-workdir-") +- if err != nil { +- t.Fatal(err) +- } +- wd, err := NewWorkdir(tmpdir, UnpackTxt(txt)) +- if err != nil { +- t.Fatal(err) +- } +- cleanup := func() { +- if err := os.RemoveAll(tmpdir); err != nil { +- t.Error(err) +- } +- } +- +- buf := new(eventBuffer) +- wd.AddWatcher(buf.onEvents) +- return wd, buf, cleanup +-} +- +-// eventBuffer collects events from a file watcher. +-type eventBuffer struct { +- mu sync.Mutex +- events []protocol.FileEvent +-} +- +-// onEvents collects adds events to the buffer; to be used with Workdir.AddWatcher. +-func (c *eventBuffer) onEvents(_ context.Context, events []protocol.FileEvent) { +- c.mu.Lock() +- defer c.mu.Unlock() +- +- c.events = append(c.events, events...) +-} +- +-// take empties the buffer, returning its previous contents. +-func (c *eventBuffer) take() []protocol.FileEvent { +- c.mu.Lock() +- defer c.mu.Unlock() +- +- evts := c.events +- c.events = nil +- return evts +-} +- +-func TestWorkdir_ReadFile(t *testing.T) { +- wd, _, cleanup := newWorkdir(t, sharedData) +- defer cleanup() +- +- got, err := wd.ReadFile("nested/README.md") +- if err != nil { +- t.Fatal(err) +- } +- want := "Hello World!\n" +- if got := string(got); got != want { +- t.Errorf("reading workdir file, got %q, want %q", got, want) +- } +-} +- +-func TestWorkdir_WriteFile(t *testing.T) { +- wd, events, cleanup := newWorkdir(t, sharedData) +- defer cleanup() +- ctx := context.Background() +- +- tests := []struct { +- path string +- wantType protocol.FileChangeType +- }{ +- {"data.txt", protocol.Created}, +- {"nested/README.md", protocol.Changed}, +- } +- +- for _, test := range tests { +- if err := wd.WriteFile(ctx, test.path, "42"); err != nil { +- t.Fatal(err) +- } +- es := events.take() +- if got := len(es); got != 1 { +- t.Fatalf("len(events) = %d, want 1", got) +- } +- path := wd.URIToPath(es[0].URI) +- if path != test.path { +- t.Errorf("event path = %q, want %q", path, test.path) +- } +- if es[0].Type != test.wantType { +- t.Errorf("event type = %v, want %v", es[0].Type, test.wantType) +- } +- got, err := wd.ReadFile(test.path) +- if err != nil { +- t.Fatal(err) +- } +- want := "42" +- if got := string(got); got != want { +- t.Errorf("ws.ReadFile(%q) = %q, want %q", test.path, got, want) +- } +- } +-} +- +-// Test for file notifications following file operations. +-func TestWorkdir_FileWatching(t *testing.T) { +- wd, events, cleanup := newWorkdir(t, "") +- defer cleanup() +- ctx := context.Background() +- +- must := func(err error) { +- if err != nil { +- t.Fatal(err) +- } +- } +- +- type changeMap map[string]protocol.FileChangeType +- checkEvent := func(wantChanges changeMap) { +- gotChanges := make(changeMap) +- for _, e := range events.take() { +- gotChanges[wd.URIToPath(e.URI)] = e.Type +- } +- if diff := cmp.Diff(wantChanges, gotChanges); diff != "" { +- t.Errorf("mismatching file events (-want +got):\n%s", diff) +- } +- } +- +- must(wd.WriteFile(ctx, "foo.go", "package foo")) +- checkEvent(changeMap{"foo.go": protocol.Created}) +- +- must(wd.RenameFile(ctx, "foo.go", "bar.go")) +- checkEvent(changeMap{"foo.go": protocol.Deleted, "bar.go": protocol.Created}) +- +- must(wd.RemoveFile(ctx, "bar.go")) +- checkEvent(changeMap{"bar.go": protocol.Deleted}) +-} +- +-func TestWorkdir_CheckForFileChanges(t *testing.T) { +- t.Skip("broken on darwin-amd64-10_12") +- wd, events, cleanup := newWorkdir(t, sharedData) +- defer cleanup() +- ctx := context.Background() +- +- checkChange := func(wantPath string, wantType protocol.FileChangeType) { +- if err := wd.CheckForFileChanges(ctx); err != nil { +- t.Fatal(err) +- } +- ev := events.take() +- if len(ev) == 0 { +- t.Fatal("no file events received") +- } +- gotEvt := ev[0] +- gotPath := wd.URIToPath(gotEvt.URI) +- // Only check relative path and Type +- if gotPath != wantPath || gotEvt.Type != wantType { +- t.Errorf("file events: got %v, want {Path: %s, Type: %v}", gotEvt, wantPath, wantType) +- } +- } +- // Sleep some positive amount of time to ensure a distinct mtime. +- if err := writeFileData("go.mod", []byte("module foo.test\n"), wd.RelativeTo); err != nil { +- t.Fatal(err) +- } +- checkChange("go.mod", protocol.Changed) +- if err := writeFileData("newFile", []byte("something"), wd.RelativeTo); err != nil { +- t.Fatal(err) +- } +- checkChange("newFile", protocol.Created) +- fp := wd.AbsPath("newFile") +- if err := os.Remove(fp); err != nil { +- t.Fatal(err) +- } +- checkChange("newFile", protocol.Deleted) +-} +- +-func TestSplitModuleVersionPath(t *testing.T) { +- tests := []struct { +- path string +- wantModule, wantVersion, wantSuffix string +- }{ +- {"foo.com@v1.2.3/bar", "foo.com", "v1.2.3", "bar"}, +- {"foo.com/module@v1.2.3/bar", "foo.com/module", "v1.2.3", "bar"}, +- {"foo.com@v1.2.3", "foo.com", "v1.2.3", ""}, +- {"std@v1.14.0", "std", "v1.14.0", ""}, +- {"another/module/path", "another/module/path", "", ""}, +- } +- +- for _, test := range tests { +- module, version, suffix := splitModuleVersionPath(test.path) +- if module != test.wantModule || version != test.wantVersion || suffix != test.wantSuffix { +- t.Errorf("splitModuleVersionPath(%q) =\n\t(%q, %q, %q)\nwant\n\t(%q, %q, %q)", +- test.path, module, version, suffix, test.wantModule, test.wantVersion, test.wantSuffix) +- } +- } +-} +diff -urN a/gopls/internal/test/integration/fake/workdir_windows.go b/gopls/internal/test/integration/fake/workdir_windows.go +--- a/gopls/internal/test/integration/fake/workdir_windows.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/fake/workdir_windows.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,21 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fake +- +-import ( +- "errors" +- "syscall" +-) +- +-func init() { +- // constants copied from GOROOT/src/internal/syscall/windows/syscall_windows.go +- const ( +- ERROR_LOCK_VIOLATION syscall.Errno = 33 +- ) +- +- isWindowsErrLockViolation = func(err error) bool { +- return errors.Is(err, ERROR_LOCK_VIOLATION) +- } +-} +diff -urN a/gopls/internal/test/integration/inlayhints/inlayhints_test.go b/gopls/internal/test/integration/inlayhints/inlayhints_test.go +--- a/gopls/internal/test/integration/inlayhints/inlayhints_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/inlayhints/inlayhints_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,69 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +-package inlayhint +- +-import ( +- "os" +- "testing" +- +- "golang.org/x/tools/gopls/internal/settings" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/util/bug" +-) +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- os.Exit(Main(m)) +-} +- +-func TestEnablingInlayHints(t *testing.T) { +- const workspace = ` +--- go.mod -- +-module inlayHint.test +-go 1.12 +--- lib.go -- +-package lib +-type Number int +-const ( +- Zero Number = iota +- One +- Two +-) +-` +- tests := []struct { +- label string +- enabled map[string]bool +- wantInlayHint bool +- }{ +- { +- label: "default", +- wantInlayHint: false, +- }, +- { +- label: "enable const", +- enabled: map[string]bool{string(settings.ConstantValues): true}, +- wantInlayHint: true, +- }, +- { +- label: "enable parameter names", +- enabled: map[string]bool{string(settings.ParameterNames): true}, +- wantInlayHint: false, +- }, +- } +- for _, test := range tests { +- t.Run(test.label, func(t *testing.T) { +- WithOptions( +- Settings{ +- "hints": test.enabled, +- }, +- ).Run(t, workspace, func(t *testing.T, env *Env) { +- env.OpenFile("lib.go") +- lens := env.InlayHints("lib.go") +- if gotInlayHint := len(lens) > 0; gotInlayHint != test.wantInlayHint { +- t.Errorf("got inlayHint: %t, want %t", gotInlayHint, test.wantInlayHint) +- } +- }) +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/misc/addtest_test.go b/gopls/internal/test/integration/misc/addtest_test.go +--- a/gopls/internal/test/integration/misc/addtest_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/addtest_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,120 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// TestAddTest is a basic test of interaction with the "gopls.add_test" code action. +-func TestAddTest(t *testing.T) { +- const files = ` +--- go.mod -- +-module example.com +- +--- a/a.go -- +-package a +- +-import( +- "context" +-) +- +-func Foo(ctx context.Context, in string) string {return in} +- +--- a/a_test.go -- +-package a_test +- +-import( +- "testing" +-) +- +-func TestExisting(t *testing.T) {} +-` +- const want = `package a_test +- +-import ( +- "context" +- "testing" +- +- "example.com/a" +-) +- +-func TestExisting(t *testing.T) {} +- +-func TestFoo(t *testing.T) { +- tests := []struct { +- name string // description of this test case +- // Named input parameters for target function. +- in string +- want string +- }{ +- // TODO: Add test cases. +- } +- for _, tt := range tests { +- t.Run(tt.name, func(t *testing.T) { +- got := a.Foo(context.Background(), tt.in) +- // TODO: update the condition below to compare got with tt.want. +- if true { +- t.Errorf("Foo() = %v, want %v", got, tt.want) +- } +- }) +- } +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- +- loc := env.RegexpSearch("a/a.go", "Foo") +- actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) +- if err != nil { +- t.Fatalf("CodeAction: %v", err) +- } +- action, err := codeActionByKind(actions, settings.AddTest) +- if err != nil { +- t.Fatal(err) +- } +- +- // Execute the command. +- // Its side effect should be a single showDocument request. +- params := &protocol.ExecuteCommandParams{ +- Command: action.Command.Command, +- Arguments: action.Command.Arguments, +- } +- +- listen := env.Awaiter.ListenToShownDocuments() +- env.ExecuteCommand(params, nil) +- // Wait until we finish writing to the file. +- env.AfterChange() +- if got := env.BufferText("a/a_test.go"); got != want { +- t.Errorf("gopls.add_test returned unexpected diff (-want +got):\n%s", compare.Text(want, got)) +- } +- +- got := listen() +- if len(got) != 1 { +- t.Errorf("gopls.add_test: got %d showDocument requests, want 1: %v", len(got), got) +- } else { +- if want := protocol.URI(env.Sandbox.Workdir.URI("a/a_test.go")); got[0].URI != want { +- t.Errorf("gopls.add_test: got showDocument requests for %v, want %v", got[0].URI, want) +- } +- +- // Pointing to the line of test function declaration. +- if want := (protocol.Range{ +- Start: protocol.Position{ +- Line: 11, +- }, +- End: protocol.Position{ +- Line: 11, +- }, +- }); *got[0].Selection != want { +- t.Errorf("gopls.add_test: got showDocument requests selection for %v, want %v", *got[0].Selection, want) +- } +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/call_hierarchy_test.go b/gopls/internal/test/integration/misc/call_hierarchy_test.go +--- a/gopls/internal/test/integration/misc/call_hierarchy_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/call_hierarchy_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// Test for golang/go#49125 +-func TestCallHierarchy_Issue49125(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- p.go -- +-package pkg +-` +- // TODO(rfindley): this could probably just be a marker test. +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("p.go") +- loc := env.RegexpSearch("p.go", "pkg") +- +- var params protocol.CallHierarchyPrepareParams +- params.TextDocument.URI = loc.URI +- params.Position = loc.Range.Start +- +- // Check that this doesn't panic. +- env.Editor.Server.PrepareCallHierarchy(env.Ctx, ¶ms) +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/codeactions_test.go b/gopls/internal/test/integration/misc/codeactions_test.go +--- a/gopls/internal/test/integration/misc/codeactions_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/codeactions_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,177 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "fmt" +- "slices" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// This test exercises the filtering of code actions in generated files. +-// Most code actions, being potential edits, are discarded, but +-// some (GoTest, GoDoc) are pure queries, and so are allowed. +-func TestCodeActionsInGeneratedFiles(t *testing.T) { +- const src = ` +--- go.mod -- +-module example.com +-go 1.19 +- +--- src/a.go -- +-package a +- +-func f() { g() } +-func g() {} +--- gen/a.go -- +-// Code generated by hand; DO NOT EDIT. +-package a +- +-func f() { g() } +-func g() {} +- +--- issue72742/a.go -- +-package main +- +-func main(){ +- fmt.Println("helloworld") +-} +-` +- +- Run(t, src, func(t *testing.T, env *Env) { +- check := func(filename string, re string, want []protocol.CodeActionKind) { +- env.OpenFile(filename) +- loc := env.RegexpSearch(filename, re) +- actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) +- if err != nil { +- t.Fatal(err) +- } +- +- type kinds = []protocol.CodeActionKind +- got := make(kinds, 0) +- for _, act := range actions { +- got = append(got, act.Kind) +- } +- +- if diff := cmp.Diff(want, got); diff != "" { +- t.Errorf("%s: unexpected CodeActionKinds: (-want +got):\n%s", +- filename, diff) +- t.Log(actions) +- } +- } +- +- check("src/a.go", `g\(\)`, []protocol.CodeActionKind{ +- settings.AddTest, +- settings.GoAssembly, +- settings.GoDoc, +- settings.GoFreeSymbols, +- settings.GoSplitPackage, +- settings.GoToggleCompilerOptDetails, +- settings.RefactorInlineCall, +- settings.GoplsDocFeatures, +- }) +- +- check("gen/a.go", `g\(\)`, []protocol.CodeActionKind{ +- settings.GoAssembly, +- settings.GoDoc, +- settings.GoFreeSymbols, +- settings.GoSplitPackage, +- settings.GoToggleCompilerOptDetails, +- settings.GoplsDocFeatures, +- }) +- +- check("issue72742/a.go", `fmt`, []protocol.CodeActionKind{ +- settings.OrganizeImports, +- settings.AddTest, +- settings.GoAssembly, +- settings.GoDoc, +- settings.GoFreeSymbols, +- settings.GoSplitPackage, +- settings.GoToggleCompilerOptDetails, +- settings.GoplsDocFeatures, +- }) +- }) +-} +- +-// Test refactor.inline.call is not included in automatically triggered code action +-// unless users want refactoring. +-// +-// (The mechanism behind this behavior has changed. It was added when +-// we used to interpret CodeAction(Only=[]) as "all kinds", which was +-// a distracting nuisance (too many lightbulbs); this was fixed by +-// adding special logic to refactor.inline.call to respect the trigger +-// kind; but now we do this for all actions (for similar reasons) and +-// interpret Only=[] as Only=[quickfix] unless triggerKind=invoked; +-// except that the test client always requests CodeAction(Only=[""]). +-// So, we should remove the special logic from refactorInlineCall +-// and vary the Only parameter used by the test client.) +-func TestVSCodeIssue65167(t *testing.T) { +- const vim1 = `package main +- +-func main() { +- Func() // range to be selected +-} +- +-func Func() int { return 0 } +-` +- +- Run(t, "", func(t *testing.T, env *Env) { +- env.CreateBuffer("main.go", vim1) +- for _, trigger := range []protocol.CodeActionTriggerKind{ +- protocol.CodeActionUnknownTrigger, +- protocol.CodeActionInvoked, +- protocol.CodeActionAutomatic, +- } { +- t.Run(fmt.Sprintf("trigger=%v", trigger), func(t *testing.T) { +- for _, selectedRange := range []bool{false, true} { +- t.Run(fmt.Sprintf("range=%t", selectedRange), func(t *testing.T) { +- loc := env.RegexpSearch("main.go", "Func") +- if !selectedRange { +- // assume the cursor is placed at the beginning of `Func`, so end==start. +- loc.Range.End = loc.Range.Start +- } +- actions := env.CodeAction(loc, nil, trigger) +- want := trigger != protocol.CodeActionAutomatic || selectedRange +- if got := slices.ContainsFunc(actions, func(act protocol.CodeAction) bool { +- return act.Kind == settings.RefactorInlineCall +- }); got != want { +- t.Errorf("got refactor.inline.call = %t, want %t", got, want) +- } +- }) +- } +- }) +- } +- }) +-} +- +-// TestDescendingRange isn't really a test of CodeAction at all: it +-// merely tests the response of the server to any (start, end) range +-// that is descending. See #74394. +-func TestDescendingRange(t *testing.T) { +- const src = ` +--- go.mod -- +-module example.com +-go 1.19 +- +--- a/a.go -- +-package a +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- loc := env.RegexpSearch("a/a.go", "package") +- rng := &loc.Range +- rng.Start, rng.End = rng.End, rng.Start +- _, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) +- got, wantSubstr := fmt.Sprint(err), "start (offset 7) > end (offset 0)" +- if !strings.Contains(got, wantSubstr) { +- t.Fatalf("CodeAction error: got %q, want substring %q", got, wantSubstr) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/compileropt_test.go b/gopls/internal/test/integration/misc/compileropt_test.go +--- a/gopls/internal/test/integration/misc/compileropt_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/compileropt_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,243 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "fmt" +- "runtime" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/server" +- "golang.org/x/tools/gopls/internal/settings" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// TestCompilerOptDetails exercises the "{Show,Hide} compiler optimization details" code action. +-func TestCompilerOptDetails(t *testing.T) { +- if runtime.GOOS == "android" { +- t.Skipf("the compiler optimization details code action doesn't work on Android") +- } +- +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +- +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Println(42) +-} +-` +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- actions := env.CodeActionForFile("main.go", nil) +- +- // Execute the "Show compiler optimization details" command. +- docAction, err := codeActionByKind(actions, settings.GoToggleCompilerOptDetails) +- if err != nil { +- t.Fatal(err) +- } +- +- params := &protocol.ExecuteCommandParams{ +- Command: docAction.Command.Command, +- Arguments: docAction.Command.Arguments, +- } +- env.ExecuteCommand(params, nil) +- +- env.OnceMet( +- CompletedWork(server.DiagnosticWorkTitle(server.FromToggleCompilerOptDetails), 1, true), +- Diagnostics( +- ForFile("main.go"), +- AtPosition("main.go", 5, 13), // (LSP coordinates) +- WithMessage("42 escapes"), +- WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), +- ), +- ) +- +- // Diagnostics should be reported even on unsaved +- // edited buffers, thanks to the magic of overlays. +- env.SetBufferContent("main.go", ` +-package main +-func main() { _ = f } +-func f(x int) *int { return &x }`) +- env.AfterChange(Diagnostics( +- ForFile("main.go"), +- WithMessage("x escapes"), +- WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), +- )) +- +- // Toggle the flag again so now it should be off. +- env.ExecuteCommand(params, nil) +- env.OnceMet( +- CompletedWork(server.DiagnosticWorkTitle(server.FromToggleCompilerOptDetails), 2, true), +- NoDiagnostics(ForFile("main.go")), +- ) +- }) +-} +- +-// TestCompilerOptDetails_perDirectory exercises that the "want +-// optimization details" flag has per-directory cardinality. +-func TestCompilerOptDetails_perDirectory(t *testing.T) { +- if runtime.GOOS == "android" { +- t.Skipf("the compiler optimization details code action doesn't work on Android") +- } +- +- const mod = ` +--- go.mod -- +-module mod.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-func F(x int) any { return &x } +- +--- a/a_test.go -- +-package a +- +-func G(x int) any { return &x } +- +--- a/a_x_test.go -- +-package a_test +- +-func H(x int) any { return &x } +-` +- +- Run(t, mod, func(t *testing.T, env *Env) { +- // toggle executes the "Toggle compiler optimization details" +- // command within a file, and asserts that it has the specified title. +- toggle := func(filename, wantTitle string) { +- env.OpenFile(filename) +- actions := env.CodeActionForFile(filename, nil) +- +- docAction, err := codeActionByKind(actions, settings.GoToggleCompilerOptDetails) +- if err != nil { +- t.Fatal(err) +- } +- if docAction.Title != wantTitle { +- t.Errorf("CodeAction.Title = %q, want %q", docAction.Title, wantTitle) +- } +- params := &protocol.ExecuteCommandParams{ +- Command: docAction.Command.Command, +- Arguments: docAction.Command.Arguments, +- } +- env.ExecuteCommand(params, nil) +- } +- +- // Show diagnostics for directory a/ from one file. +- // Diagnostics are reported for all three packages. +- toggle("a/a.go", `Show compiler optimization details for "a"`) +- env.OnceMet( +- CompletedWork(server.DiagnosticWorkTitle(server.FromToggleCompilerOptDetails), 1, true), +- Diagnostics( +- ForFile("a/a.go"), +- AtPosition("a/a.go", 2, 7), +- WithMessage("x escapes to heap"), +- WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), +- ), +- Diagnostics( +- ForFile("a/a_test.go"), +- AtPosition("a/a_test.go", 2, 7), +- WithMessage("x escapes to heap"), +- WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), +- ), +- Diagnostics( +- ForFile("a/a_x_test.go"), +- AtPosition("a/a_x_test.go", 2, 7), +- WithMessage("x escapes to heap"), +- WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), +- ), +- ) +- +- // Hide diagnostics for the directory from a different file. +- // All diagnostics disappear. +- toggle("a/a_test.go", `Hide compiler optimization details for "a"`) +- env.OnceMet( +- CompletedWork(server.DiagnosticWorkTitle(server.FromToggleCompilerOptDetails), 2, true), +- NoDiagnostics(ForFile("a/a.go")), +- NoDiagnostics(ForFile("a/a_test.go")), +- NoDiagnostics(ForFile("a/a_x_test.go")), +- ) +- }) +-} +- +-// TestCompilerOptDetails_config exercises that the "want optimization +-// details" flag honors the "annotation" configuration setting. +-func TestCompilerOptDetails_config(t *testing.T) { +- if runtime.GOOS == "android" { +- t.Skipf("the compiler optimization details code action doesn't work on Android") +- } +- +- const mod = ` +--- go.mod -- +-module mod.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-func F(x int) any { return &x } // escape(x escapes to heap) +-func G() { defer func(){} () } // cannotInlineFunction(unhandled op DEFER) +-` +- +- for _, escape := range []bool{true, false} { +- WithOptions( +- Settings{"annotations": map[string]any{"inline": true, "escape": escape}}, +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- actions := env.CodeActionForFile("a/a.go", nil) +- +- docAction, err := codeActionByKind(actions, settings.GoToggleCompilerOptDetails) +- if err != nil { +- t.Fatal(err) +- } +- params := &protocol.ExecuteCommandParams{ +- Command: docAction.Command.Command, +- Arguments: docAction.Command.Arguments, +- } +- env.ExecuteCommand(params, nil) +- +- env.OnceMet( +- CompletedWork(server.DiagnosticWorkTitle(server.FromToggleCompilerOptDetails), 1, true), +- cond(escape, Diagnostics, NoDiagnostics)( +- ForFile("a/a.go"), +- AtPosition("a/a.go", 2, 7), +- WithMessage("x escapes to heap"), +- WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), +- ), +- Diagnostics( +- ForFile("a/a.go"), +- AtPosition("a/a.go", 3, 5), +- WithMessage("cannotInlineFunction(unhandled op DEFER)"), +- WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), +- ), +- ) +- }) +- } +-} +- +-func cond[T any](cond bool, x, y T) T { +- if cond { +- return x +- } else { +- return y +- } +-} +- +-// codeActionByKind returns the first action of (exactly) the specified kind, or an error. +-func codeActionByKind(actions []protocol.CodeAction, kind protocol.CodeActionKind) (*protocol.CodeAction, error) { +- for _, act := range actions { +- if act.Kind == kind { +- return &act, nil +- } +- } +- return nil, fmt.Errorf("can't find action with kind %s, only %#v", kind, actions) +-} +diff -urN a/gopls/internal/test/integration/misc/configuration_test.go b/gopls/internal/test/integration/misc/configuration_test.go +--- a/gopls/internal/test/integration/misc/configuration_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/configuration_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,248 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +- +- "golang.org/x/tools/internal/testenv" +-) +- +-// Test that enabling and disabling produces the expected results of showing +-// and hiding staticcheck analysis results. +-func TestChangeConfiguration(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a/a.go -- +-package a +- +-import "errors" +- +-// FooErr should be called ErrFoo (ST1012) +-var FooErr = errors.New("foo") +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- ) +- cfg := env.Editor.Config() +- cfg.Settings = map[string]any{ +- "staticcheck": true, +- } +- env.ChangeConfiguration(cfg) +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "var (FooErr)")), +- ) +- }) +-} +- +-func TestIdenticalConfiguration(t *testing.T) { +- // This test checks that changing configuration does not cause views to be +- // recreated if there is no configuration change. +- const files = ` +--- a.go -- +-package p +- +-func _() { +- var x *int +- y := *x +- _ = y +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- // Sanity check: before disabling the nilness analyzer, we should have a +- // diagnostic for the nil dereference. +- env.OpenFile("a.go") +- env.AfterChange( +- Diagnostics( +- ForFile("a.go"), +- WithMessage("nil dereference"), +- ), +- ) +- +- // Collect the view ID before changing configuration. +- viewID := func() string { +- t.Helper() +- views := env.Views() +- if len(views) != 1 { +- t.Fatalf("got %d views, want 1", len(views)) +- } +- return views[0].ID +- } +- before := viewID() +- +- // Now disable the nilness analyzer. +- cfg := env.Editor.Config() +- cfg.Settings = map[string]any{ +- "analyses": map[string]any{ +- "nilness": false, +- }, +- } +- +- // This should cause the diagnostic to disappear... +- env.ChangeConfiguration(cfg) +- env.AfterChange( +- NoDiagnostics(), +- ) +- // ...and we should be on the second view. +- after := viewID() +- if after == before { +- t.Errorf("after configuration change, got view %q (same as before), want new view", after) +- } +- +- // Now change configuration again, this time with the same configuration as +- // before. We should still have no diagnostics... +- env.ChangeConfiguration(cfg) +- env.AfterChange( +- NoDiagnostics(), +- ) +- // ...and we should still be on the second view. +- if got := viewID(); got != after { +- t.Errorf("after second configuration change, got view %q, want %q", got, after) +- } +- }) +-} +- +-// Test that clients can configure per-workspace configuration, which is +-// queried via the scopeURI of a workspace/configuration request. +-// (this was broken in golang/go#65519). +-func TestWorkspaceConfiguration(t *testing.T) { +- const files = ` +--- go.mod -- +-module example.com/config +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-import "example.com/config/b" +- +-func _() { +- _ = b.B{2} +-} +- +--- b/b.go -- +-package b +- +-type B struct { +- F int +-} +-` +- +- WithOptions( +- WorkspaceFolders("a"), +- FolderSettings{ +- "a": { +- "analyses": map[string]bool{ +- "composites": false, +- }, +- }, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange(NoDiagnostics()) +- }) +-} +- +-// TestMajorOptionsChange is like TestChangeConfiguration, but modifies an +-// an open buffer before making a major (but inconsequential) change that +-// causes gopls to recreate the view. +-// +-// Gopls should not get confused about buffer content when recreating the view. +-func TestMajorOptionsChange(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a/a.go -- +-package a +- +-import "errors" +- +-var ErrFoo = errors.New("foo") +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- // Introduce a staticcheck diagnostic. It should be detected when we enable +- // staticcheck later. +- env.RegexpReplace("a/a.go", "ErrFoo", "FooErr") +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- ) +- cfg := env.Editor.Config() +- // Any change to environment recreates the view, but this should not cause +- // gopls to get confused about the content of a/a.go: we should get the +- // staticcheck diagnostic below. +- cfg.Env = map[string]string{ +- "AN_ARBITRARY_VAR": "FOO", +- } +- cfg.Settings = map[string]any{ +- "staticcheck": true, +- } +- env.ChangeConfiguration(cfg) +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "var (FooErr)")), +- ) +- }) +-} +- +-func TestStaticcheckWarning(t *testing.T) { +- // Note: keep this in sync with TestChangeConfiguration. +- testenv.SkipAfterGo1Point(t, 19) +- +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a/a.go -- +-package a +- +-import "errors" +- +-// FooErr should be called ErrFoo (ST1012) +-var FooErr = errors.New("foo") +-` +- +- WithOptions( +- Settings{"staticcheck": true}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- ShownMessage("staticcheck is not supported"), +- ) +- }) +-} +- +-func TestDeprecatedSettings(t *testing.T) { +- WithOptions( +- Settings{ +- "experimentalUseInvalidMetadata": true, +- "experimentalWatchedFileDelay": "1s", +- "experimentalWorkspaceModule": true, +- "tempModfile": true, +- "allowModfileModifications": true, +- "allowImplicitNetworkAccess": true, +- }, +- ).Run(t, "", func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- ShownMessage("experimentalWorkspaceModule"), +- ShownMessage("experimentalUseInvalidMetadata"), +- ShownMessage("experimentalWatchedFileDelay"), +- ShownMessage("tempModfile"), +- ShownMessage("allowModfileModifications"), +- ShownMessage("allowImplicitNetworkAccess"), +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/debugserver_test.go b/gopls/internal/test/integration/misc/debugserver_test.go +--- a/gopls/internal/test/integration/misc/debugserver_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/debugserver_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,46 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "net/http" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestStartDebugging(t *testing.T) { +- WithOptions( +- Modes(Forwarded), +- ).Run(t, "", func(t *testing.T, env *Env) { +- args, err := command.MarshalArgs(command.DebuggingArgs{}) +- if err != nil { +- t.Fatal(err) +- } +- params := &protocol.ExecuteCommandParams{ +- Command: command.StartDebugging.String(), +- Arguments: args, +- } +- var result command.DebuggingResult +- env.ExecuteCommand(params, &result) +- if got, want := len(result.URLs), 2; got != want { +- t.Fatalf("got %d urls, want %d; urls: %#v", got, want, result.URLs) +- } +- for i, u := range result.URLs { +- resp, err := http.Get(u) +- if err != nil { +- t.Errorf("getting url #%d (%q): %v", i, u, err) +- continue +- } +- defer resp.Body.Close() +- if got, want := resp.StatusCode, http.StatusOK; got != want { +- t.Errorf("debug server #%d returned HTTP %d, want %d", i, got, want) +- } +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/definition_test.go b/gopls/internal/test/integration/misc/definition_test.go +--- a/gopls/internal/test/integration/misc/definition_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/definition_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,752 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "fmt" +- "os" +- "path" +- "path/filepath" +- "regexp" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-const internalDefinition = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Println(message) +-} +--- const.go -- +-package main +- +-const message = "Hello World." +-` +- +-func TestGoToInternalDefinition(t *testing.T) { +- Run(t, internalDefinition, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- loc := env.FirstDefinition(env.RegexpSearch("main.go", "message")) +- name := env.Sandbox.Workdir.URIToPath(loc.URI) +- if want := "const.go"; name != want { +- t.Errorf("Definition: got file %q, want %q", name, want) +- } +- if want := env.RegexpSearch("const.go", "message"); loc != want { +- t.Errorf("Definition: got location %v, want %v", loc, want) +- } +- }) +-} +- +-const linknameDefinition = ` +--- go.mod -- +-module mod.com +- +--- upper/upper.go -- +-package upper +- +-import ( +- _ "unsafe" +- +- _ "mod.com/middle" +-) +- +-//go:linkname foo mod.com/lower.bar +-func foo() string +- +--- middle/middle.go -- +-package middle +- +-import ( +- _ "mod.com/lower" +-) +- +--- lower/lower.s -- +- +--- lower/lower.go -- +-package lower +- +-func bar() string { +- return "bar as foo" +-}` +- +-func TestGoToLinknameDefinition(t *testing.T) { +- Run(t, linknameDefinition, func(t *testing.T, env *Env) { +- env.OpenFile("upper/upper.go") +- +- // Jump from directives 2nd arg. +- start := env.RegexpSearch("upper/upper.go", `lower.bar`) +- loc := env.FirstDefinition(start) +- name := env.Sandbox.Workdir.URIToPath(loc.URI) +- if want := "lower/lower.go"; name != want { +- t.Errorf("Definition: got file %q, want %q", name, want) +- } +- if want := env.RegexpSearch("lower/lower.go", `bar`); loc != want { +- t.Errorf("Definition: got position %v, want %v", loc, want) +- } +- }) +-} +- +-const linknameDefinitionReverse = ` +--- go.mod -- +-module mod.com +- +--- upper/upper.s -- +- +--- upper/upper.go -- +-package upper +- +-import ( +- _ "mod.com/middle" +-) +- +-func foo() string +- +--- middle/middle.go -- +-package middle +- +-import ( +- _ "mod.com/lower" +-) +- +--- lower/lower.go -- +-package lower +- +-import _ "unsafe" +- +-//go:linkname bar mod.com/upper.foo +-func bar() string { +- return "bar as foo" +-}` +- +-func TestGoToLinknameDefinitionInReverseDep(t *testing.T) { +- Run(t, linknameDefinitionReverse, func(t *testing.T, env *Env) { +- env.OpenFile("lower/lower.go") +- +- // Jump from directives 2nd arg. +- start := env.RegexpSearch("lower/lower.go", `upper.foo`) +- loc := env.FirstDefinition(start) +- name := env.Sandbox.Workdir.URIToPath(loc.URI) +- if want := "upper/upper.go"; name != want { +- t.Errorf("Definition: got file %q, want %q", name, want) +- } +- if want := env.RegexpSearch("upper/upper.go", `foo`); loc != want { +- t.Errorf("Definition: got position %v, want %v", loc, want) +- } +- }) +-} +- +-// The linkname directive connects two packages not related in the import graph. +-const linknameDefinitionDisconnected = ` +--- go.mod -- +-module mod.com +- +--- a/a.go -- +-package a +- +-import ( +- _ "unsafe" +-) +- +-//go:linkname foo mod.com/b.bar +-func foo() string +- +--- b/b.go -- +-package b +- +-func bar() string { +- return "bar as foo" +-}` +- +-func TestGoToLinknameDefinitionDisconnected(t *testing.T) { +- Run(t, linknameDefinitionDisconnected, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- +- // Jump from directives 2nd arg. +- start := env.RegexpSearch("a/a.go", `b.bar`) +- loc := env.FirstDefinition(start) +- name := env.Sandbox.Workdir.URIToPath(loc.URI) +- if want := "b/b.go"; name != want { +- t.Errorf("Definition: got file %q, want %q", name, want) +- } +- if want := env.RegexpSearch("b/b.go", `bar`); loc != want { +- t.Errorf("Definition: got position %v, want %v", loc, want) +- } +- }) +-} +- +-const stdlibDefinition = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Printf() +-}` +- +-func TestGoToStdlibDefinition_Issue37045(t *testing.T) { +- Run(t, stdlibDefinition, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- loc := env.FirstDefinition(env.RegexpSearch("main.go", `fmt.(Printf)`)) +- name := env.Sandbox.Workdir.URIToPath(loc.URI) +- if got, want := path.Base(name), "print.go"; got != want { +- t.Errorf("Definition: got file %q, want %q", name, want) +- } +- env.OpenFile(name) +- +- // Test that we can jump to definition from outside our workspace. +- // See golang.org/issues/37045. +- newLoc := env.FirstDefinition(loc) +- newName := env.Sandbox.Workdir.URIToPath(newLoc.URI) +- if newName != name { +- t.Errorf("Definition is not idempotent: got %q, want %q", newName, name) +- } +- if newLoc != loc { +- t.Errorf("Definition is not idempotent: got %v, want %v", newLoc, loc) +- } +- }) +-} +- +-func TestUnexportedStdlib_Issue40809(t *testing.T) { +- Run(t, stdlibDefinition, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- loc := env.FirstDefinition(env.RegexpSearch("main.go", `fmt.(Printf)`)) +- name := env.Sandbox.Workdir.URIToPath(loc.URI) +- env.OpenFile(name) +- +- loc = env.RegexpSearch(name, `:=\s*(newPrinter)\(\)`) +- +- // Check that we can find references on a reference +- refs := env.References(loc) +- if len(refs) < 5 { +- t.Errorf("expected 5+ references to newPrinter, found: %#v", refs) +- } +- +- loc = env.FirstDefinition(loc) +- content, _ := env.Hover(loc) +- if !strings.Contains(content.Value, "newPrinter") { +- t.Fatal("definition of newPrinter went to the incorrect place") +- } +- // And on the definition too. +- refs = env.References(loc) +- if len(refs) < 5 { +- t.Errorf("expected 5+ references to newPrinter, found: %#v", refs) +- } +- }) +-} +- +-// Test the hover on an error's Error function. +-// This can't be done via the marker tests because Error is a builtin. +-func TestHoverOnError(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() { +- var err error +- err.Error() +-}` +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- content, _ := env.Hover(env.RegexpSearch("main.go", "Error")) +- if content == nil { +- t.Fatalf("nil hover content for Error") +- } +- want := "```go\nfunc (error).Error() string\n```" +- if content.Value != want { +- t.Fatalf("hover failed:\n%s", compare.Text(want, content.Value)) +- } +- }) +-} +- +-func TestImportShortcut(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-import "fmt" +- +-func main() {} +-` +- for _, tt := range []struct { +- wantLinks int +- importShortcut string +- }{ +- {1, "Link"}, +- {0, "Definition"}, +- {1, "Both"}, +- } { +- t.Run(tt.importShortcut, func(t *testing.T) { +- WithOptions( +- Settings{"importShortcut": tt.importShortcut}, +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- loc := env.FirstDefinition(env.RegexpSearch("main.go", `"fmt"`)) +- if loc == (protocol.Location{}) { +- t.Fatalf("expected definition, got none") +- } +- links := env.DocumentLink("main.go") +- if len(links) != tt.wantLinks { +- t.Fatalf("expected %v links, got %v", tt.wantLinks, len(links)) +- } +- }) +- }) +- } +-} +- +-func TestGoToTypeDefinition_Issue38589(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-type Int int +- +-type Struct struct{} +- +-func F1() {} +-func F2() (int, error) { return 0, nil } +-func F3() (**Struct, bool, *Int, error) { return nil, false, nil, nil } +-func F4() (**Struct, bool, *float64, error) { return nil, false, nil, nil } +- +-func main() {} +-` +- +- for _, tt := range []struct { +- re string +- wantError bool +- wantTypeRe string +- }{ +- {re: `F1`, wantError: true}, +- {re: `F2`, wantError: true}, +- {re: `F3`, wantError: true}, +- {re: `F4`, wantError: false, wantTypeRe: `type (Struct)`}, +- } { +- t.Run(tt.re, func(t *testing.T) { +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- +- locs, err := env.Editor.TypeDefinitions(env.Ctx, env.RegexpSearch("main.go", tt.re)) +- if tt.wantError { +- if err == nil { +- t.Fatal("expected error, got nil") +- } +- return +- } +- if err != nil { +- t.Fatalf("expected nil error, got %s", err) +- } +- if len(locs) == 0 { +- t.Fatalf("TypeDefinitions: empty result") +- } +- +- typeLoc := env.RegexpSearch("main.go", tt.wantTypeRe) +- if locs[0] != typeLoc { +- t.Errorf("invalid pos: want %+v, got %+v", typeLoc, locs[0]) +- } +- }) +- }) +- } +-} +- +-func TestGoToTypeDefinition_Issue60544(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.19 +--- main.go -- +-package main +- +-func F[T comparable]() {} +-` +- +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- +- // TypeDefinition of comparable should not panic. +- loc := env.RegexpSearch("main.go", "comparable") +- locs, err := env.Editor.TypeDefinitions(env.Ctx, loc) // doesn't panic +- if err != nil { +- t.Fatal(err) +- } +- +- // For extra credit, check the actual location. +- got := fmt.Sprint(locs) +- wantSubstr := "builtin.go" +- if !strings.Contains(got, wantSubstr) { +- t.Errorf("TypeDefinitions('comparable') = %v, want substring %q", got, wantSubstr) +- } +- }) +-} +- +-// Test for golang/go#47825. +-func TestImportTestVariant(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- client/test/role.go -- +-package test +- +-import _ "mod.com/client" +- +-type RoleSetup struct{} +--- client/client_role_test.go -- +-package client_test +- +-import ( +- "testing" +- _ "mod.com/client" +- ctest "mod.com/client/test" +-) +- +-func TestClient(t *testing.T) { +- _ = ctest.RoleSetup{} +-} +--- client/client_test.go -- +-package client +- +-import "testing" +- +-func TestClient(t *testing.T) {} +--- client.go -- +-package client +-` +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("client/client_role_test.go") +- env.FirstDefinition(env.RegexpSearch("client/client_role_test.go", "RoleSetup")) +- }) +-} +- +-// This test exercises a crashing pattern from golang/go#49223. +-func TestGoToCrashingDefinition_Issue49223(t *testing.T) { +- Run(t, "", func(t *testing.T, env *Env) { +- params := &protocol.DefinitionParams{} +- params.TextDocument.URI = protocol.DocumentURI("fugitive%3A///Users/user/src/mm/ems/.git//0/pkg/domain/treasury/provider.go") +- params.Position.Character = 18 +- params.Position.Line = 0 +- env.Editor.Server.Definition(env.Ctx, params) // ignore error +- }) +-} +- +-// TestVendoringInvalidatesMetadata ensures that gopls uses the +-// correct metadata even after an external 'go mod vendor' command +-// causes packages to move; see issue #55995. +-// See also TestImplementationsInVendor, which tests the same fix. +-func TestVendoringInvalidatesMetadata(t *testing.T) { +- t.Skip("golang/go#56169: file watching does not capture vendor dirs") +- +- const proxy = ` +--- other.com/b@v1.0.0/go.mod -- +-module other.com/b +-go 1.14 +- +--- other.com/b@v1.0.0/b.go -- +-package b +-const K = 0 +-` +- const src = ` +--- go.mod -- +-module example.com/a +-go 1.14 +-require other.com/b v1.0.0 +- +--- a.go -- +-package a +-import "other.com/b" +-const _ = b.K +- +-` +- WithOptions( +- WriteGoSum("."), +- ProxyFiles(proxy), +- Modes(Default), // fails in 'experimental' mode +- ).Run(t, src, func(t *testing.T, env *Env) { +- // Enable to debug go.sum mismatch, which may appear as +- // "module lookup disabled by GOPROXY=off", confusingly. +- if false { +- env.DumpGoSum(".") +- } +- +- env.OpenFile("a.go") +- refLoc := env.RegexpSearch("a.go", "K") // find "b.K" reference +- +- // Initially, b.K is defined in the module cache. +- gotLoc := env.FirstDefinition(refLoc) +- gotFile := env.Sandbox.Workdir.URIToPath(gotLoc.URI) +- wantCache := filepath.ToSlash(env.Sandbox.GOPATH()) + "/pkg/mod/other.com/b@v1.0.0/b.go" +- if gotFile != wantCache { +- t.Errorf("Definition, before: got file %q, want %q", gotFile, wantCache) +- } +- +- // Run 'go mod vendor' outside the editor. +- env.RunGoCommand("mod", "vendor") +- +- // Synchronize changes to watched files. +- env.Await(env.DoneWithChangeWatchedFiles()) +- +- // Now, b.K is defined in the vendor tree. +- gotLoc = env.FirstDefinition(refLoc) +- wantVendor := "vendor/other.com/b/b.go" +- if gotFile != wantVendor { +- t.Errorf("Definition, after go mod vendor: got file %q, want %q", gotFile, wantVendor) +- } +- +- // Delete the vendor tree. +- if err := os.RemoveAll(env.Sandbox.Workdir.AbsPath("vendor")); err != nil { +- t.Fatal(err) +- } +- // Notify the server of the deletion. +- if err := env.Sandbox.Workdir.CheckForFileChanges(env.Ctx); err != nil { +- t.Fatal(err) +- } +- +- // Synchronize again. +- env.Await(env.DoneWithChangeWatchedFiles()) +- +- // b.K is once again defined in the module cache. +- gotLoc = env.FirstDefinition(gotLoc) +- gotFile = env.Sandbox.Workdir.URIToPath(gotLoc.URI) +- if gotFile != wantCache { +- t.Errorf("Definition, after rm -rf vendor: got file %q, want %q", gotFile, wantCache) +- } +- }) +-} +- +-const embedDefinition = ` +--- go.mod -- +-module mod.com +- +--- main.go -- +-package main +- +-import ( +- "embed" +-) +- +-//go:embed *.txt +-var foo embed.FS +- +-func main() {} +- +--- skip.sql -- +-SKIP +- +--- foo.txt -- +-FOO +- +--- skip.bat -- +-SKIP +-` +- +-func TestEmbedDefinition(t *testing.T) { +- Run(t, embedDefinition, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- +- start := env.RegexpSearch("main.go", `\*.txt`) +- loc := env.FirstDefinition(start) +- +- name := env.Sandbox.Workdir.URIToPath(loc.URI) +- if want := "foo.txt"; name != want { +- t.Errorf("Definition: got file %q, want %q", name, want) +- } +- }) +-} +- +-func TestDefinitionOfErrorErrorMethod(t *testing.T) { +- const src = `Regression test for a panic in definition of error.Error (of course). +-golang/go#64086 +- +--- go.mod -- +-module mod.com +-go 1.18 +- +--- a.go -- +-package a +- +-func _(err error) { +- _ = err.Error() +-} +- +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- +- start := env.RegexpSearch("a.go", `Error`) +- loc := env.FirstDefinition(start) +- +- if !strings.HasSuffix(string(loc.URI), "builtin.go") { +- t.Errorf("Definition(err.Error) = %#v, want builtin.go", loc) +- } +- }) +-} +- +-func TestAssemblyDefinition(t *testing.T) { +- // This test cannot be expressed as a marker test because +- // the expect package ignores markers (@loc) within a .s file. +- const src = ` +--- go.mod -- +-module mod.com +- +--- foo_darwin_arm64.s -- +- +-// assembly implementation +-TEXT ·foo(SB),NOSPLIT,$0 +- RET +- +--- a.go -- +-//go:build darwin && arm64 +- +-package a +- +-// Go declaration +-func foo(int) int +- +-var _ = foo(123) // call +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- +- locString := func(loc protocol.Location) string { +- return fmt.Sprintf("%s:%s", loc.URI.Base(), loc.Range) +- } +- +- // Definition at the call"foo(123)" takes us to the Go declaration. +- callLoc := env.RegexpSearch("a.go", regexp.QuoteMeta("foo(123)")) +- declLoc := env.FirstDefinition(callLoc) +- if got, want := locString(declLoc), "a.go:5:5-5:8"; got != want { +- t.Errorf("Definition(call): got %s, want %s", got, want) +- } +- +- // Definition a second time takes us to the assembly implementation. +- implLoc := env.FirstDefinition(declLoc) +- if got, want := locString(implLoc), "foo_darwin_arm64.s:2:6-2:9"; got != want { +- t.Errorf("Definition(go decl): got %s, want %s", got, want) +- } +- }) +-} +- +-func TestPackageKeyInvalidationAfterSave(t *testing.T) { +- // This test is a little subtle, but catches a bug that slipped through +- // testing of https://go.dev/cl/614165, which moved active packages to the +- // packageHandle. +- // +- // The bug was that after a format-and-save operation, the save marks the +- // package as dirty but doesn't change its identity. In other words, this is +- // the sequence of change: +- // +- // S_0 --format--> S_1 --save--> S_2 +- // +- // A package is computed on S_0, invalidated in S_1 and immediately +- // invalidated again in S_2. Due to an invalidation bug, the validity of the +- // package from S_0 was checked by comparing the identical keys of S_1 and +- // S_2, and so the stale package from S_0 was marked as valid. +- const src = ` +--- go.mod -- +-module mod.com +- +--- a.go -- +-package a +- +-func Foo() { +-} +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- +- fooLoc := env.RegexpSearch("a.go", "()Foo") +- loc0 := env.FirstDefinition(fooLoc) +- +- // Insert a space that will be removed by formatting. +- env.EditBuffer("a.go", protocol.TextEdit{ +- Range: fooLoc.Range, +- NewText: " ", +- }) +- env.SaveBuffer("a.go") // reformats the file before save +- env.AfterChange() +- loc1 := env.FirstDefinition(env.RegexpSearch("a.go", "Foo")) +- if diff := cmp.Diff(loc0, loc1); diff != "" { +- t.Errorf("mismatching locations (-want +got):\n%s", diff) +- } +- }) +-} +- +-func TestCommentDefinition_Issue69616(t *testing.T) { +- // This test exercises a few edge cases discovered by telemetry in +- // golang/go#69616, namely situations where a parsed Go file might +- // not have an associated scope in the package types.Info. +- // +- // The files below set up two types of edge cases: +- // - a 'compiled' Go file that isn't actually type-checked, because it has +- // the wrong package name +- // - a non-compiled Go file (unsafe.go). +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.21 +--- cmd/main.go -- +-package main +- +-import "unsafe" +- +-var _ = unsafe.Offsetof +- +-func Foo() {} +--- cmd/x.go -- +-package x +- +-// Bar is like [Foo] +-func Bar() {} +-` +- +- Run(t, src, func(t *testing.T, env *Env) { +- // First, check that we don't produce a crash or bug when +- // finding definitions in a 'compiled' go file that isn't actually type +- // checked. +- env.OpenFile("cmd/x.go") +- _, _ = env.Editor.Definitions(env.Ctx, env.RegexpSearch("cmd/x.go", "()Foo")) +- +- // Next, go to the unsafe package, and find the doc link to [Sizeof]. +- // It will also fail to resolve, because unsafe.go isn't compiled, but +- // again should not panic or result in a bug. +- env.OpenFile("cmd/main.go") +- loc := env.FirstDefinition(env.RegexpSearch("cmd/main.go", `unsafe\.(Offsetof)`)) +- unsafePath := loc.URI.Path() +- env.OpenFile(unsafePath) +- _, _ = env.Editor.Definitions(env.Ctx, +- env.RegexpSearch(unsafePath, `\[()Sizeof\]`)) +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/embed_test.go b/gopls/internal/test/integration/misc/embed_test.go +--- a/gopls/internal/test/integration/misc/embed_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/embed_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestMissingPatternDiagnostic(t *testing.T) { +- const files = ` +--- go.mod -- +-module example.com +--- x.go -- +-package x +- +-import ( +- _ "embed" +-) +- +-// Issue 47436 +-func F() {} +- +-//go:embed NONEXISTENT +-var Foo string +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("x.go") +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("x.go", `NONEXISTENT`), +- WithMessage("no matching files found"), +- ), +- ) +- env.RegexpReplace("x.go", `NONEXISTENT`, "x.go") +- env.AfterChange(NoDiagnostics(ForFile("x.go"))) +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/extract_test.go b/gopls/internal/test/integration/misc/extract_test.go +--- a/gopls/internal/test/integration/misc/extract_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/extract_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,67 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-func TestExtractFunction(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func Foo() int { +- a := 5 +- return a +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- loc := env.RegexpSearch("main.go", `a := 5\n.*return a`) +- actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) +- if err != nil { +- t.Fatal(err) +- } +- +- // Find the extract function code action. +- var extractFunc *protocol.CodeAction +- for _, action := range actions { +- if action.Kind == settings.RefactorExtractFunction { +- extractFunc = &action +- break +- } +- } +- if extractFunc == nil { +- t.Fatal("could not find extract function action") +- } +- +- env.ApplyCodeAction(*extractFunc) +- want := `package main +- +-func Foo() int { +- return newFunction() +-} +- +-func newFunction() int { +- a := 5 +- return a +-} +-` +- if got := env.BufferText("main.go"); got != want { +- t.Fatalf("TestFillStruct failed:\n%s", compare.Text(want, got)) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/failures_test.go b/gopls/internal/test/integration/misc/failures_test.go +--- a/gopls/internal/test/integration/misc/failures_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/failures_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,82 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// This is a slight variant of TestHoverOnError in definition_test.go +-// that includes a line directive, which makes no difference since +-// gopls ignores line directives. +-func TestHoverFailure(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a.y -- +-DWIM(main) +- +--- main.go -- +-//line a.y:1 +-package main +- +-func main() { +- var err error +- err.Error() +-}` +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- content, _ := env.Hover(env.RegexpSearch("main.go", "Error")) +- if content == nil { +- t.Fatalf("Hover('Error') returned nil") +- } +- want := "```go\nfunc (error).Error() string\n```" +- if content.Value != want { +- t.Fatalf("wrong Hover('Error') content:\n%s", compare.Text(want, content.Value)) +- } +- }) +-} +- +-// This test demonstrates a case where gopls is not at all confused by +-// line directives, because it completely ignores them. +-func TestFailingDiagnosticClearingOnEdit(t *testing.T) { +- // badPackageDup contains a duplicate definition of the 'A' const. +- // This is a minor variant of TestDiagnosticClearingOnEdit from +- // diagnostics_test.go, with a line directive, which makes no difference. +- const badPackageDup = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a.go -- +-package consts +- +-const A = 1 +--- b.go -- +-package consts +-//line gen.go:5 +-const A = 2 +-` +- +- Run(t, badPackageDup, func(t *testing.T, env *Env) { +- env.OpenFile("b.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("b.go", `A = 2`), WithMessage("A redeclared")), +- Diagnostics(env.AtRegexp("a.go", `A = 1`), WithMessage("other declaration")), +- ) +- +- // Fix the error by editing the const name A in b.go to `B`. +- env.RegexpReplace("b.go", "(A) = 2", "B") +- env.AfterChange( +- NoDiagnostics(ForFile("a.go")), +- NoDiagnostics(ForFile("b.go")), +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/fix_test.go b/gopls/internal/test/integration/misc/fix_test.go +--- a/gopls/internal/test/integration/misc/fix_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/fix_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,163 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// A basic test for fillstruct, now that it uses a command and supports resolve edits. +-func TestFillStruct(t *testing.T) { +- tc := []struct { +- name string +- capabilities string +- wantCommand bool +- }{ +- {"default", "{}", false}, +- {"no data support", `{"textDocument": {"codeAction": {"dataSupport": false, "resolveSupport": {"properties": ["edit"]}}}}`, true}, +- {"no resolve support", `{"textDocument": {"codeAction": {"dataSupport": true, "resolveSupport": {"properties": []}}}}`, true}, +- {"data and resolve support", `{"textDocument": {"codeAction": {"dataSupport": true, "resolveSupport": {"properties": ["edit"]}}}}`, false}, +- } +- +- const basic = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- main.go -- +-package main +- +-type Info struct { +- WordCounts map[string]int +- Words []string +-} +- +-func Foo() { +- _ = Info{} +-} +-` +- +- for _, tt := range tc { +- t.Run(tt.name, func(t *testing.T) { +- runner := WithOptions(CapabilitiesJSON([]byte(tt.capabilities))) +- +- runner.Run(t, basic, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- fixes, err := env.Editor.CodeActions(env.Ctx, env.RegexpSearch("main.go", "Info{}"), nil, settings.RefactorRewriteFillStruct) +- if err != nil { +- t.Fatal(err) +- } +- +- if len(fixes) != 1 { +- t.Fatalf("expected 1 code action, got %v", len(fixes)) +- } +- if tt.wantCommand { +- if fixes[0].Command == nil || fixes[0].Data != nil { +- t.Errorf("expected code action to have command not data, got %v", fixes[0]) +- } +- } else { +- if fixes[0].Command != nil || fixes[0].Data == nil { +- t.Errorf("expected code action to have command not data, got %v", fixes[0]) +- } +- } +- +- // Apply the code action (handles resolving the code action), and check that the result is correct. +- if err := env.Editor.RefactorRewrite(env.Ctx, env.RegexpSearch("main.go", "Info{}")); err != nil { +- t.Fatal(err) +- } +- want := `package main +- +-type Info struct { +- WordCounts map[string]int +- Words []string +-} +- +-func Foo() { +- _ = Info{ +- WordCounts: map[string]int{}, +- Words: []string{}, +- } +-} +-` +- if got := env.BufferText("main.go"); got != want { +- t.Fatalf("TestFillStruct failed:\n%s", compare.Text(want, got)) +- } +- }) +- }) +- } +-} +- +-func TestFillReturns(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func Foo() error { +- return +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- // The error message here changed in 1.18; "return values" covers both forms. +- Diagnostics(env.AtRegexp("main.go", `return`), WithMessage("return values")), +- ReadDiagnostics("main.go", &d), +- ) +- var quickFixes []*protocol.CodeAction +- for _, act := range env.CodeActionForFile("main.go", d.Diagnostics) { +- if act.Kind == protocol.QuickFix { +- act := act // remove in go1.22 +- quickFixes = append(quickFixes, &act) +- } +- } +- if len(quickFixes) != 1 { +- t.Fatalf("expected 1 quick fix, got %d:\n%v", len(quickFixes), quickFixes) +- } +- env.ApplyQuickFixes("main.go", d.Diagnostics) +- env.AfterChange(NoDiagnostics(ForFile("main.go"))) +- }) +-} +- +-func TestUnusedParameter_Issue63755(t *testing.T) { +- // This test verifies the fix for #63755, where codeActions panicked on parameters +- // of functions with no function body. +- +- // We should not detect parameters as unused for external functions. +- +- const files = ` +--- go.mod -- +-module unused.mod +- +-go 1.18 +- +--- external.go -- +-package external +- +-func External(z int) +- +-func _() { +- External(1) +-} +- ` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("external.go") +- _, err := env.Editor.CodeAction(env.Ctx, env.RegexpSearch("external.go", "z"), nil, protocol.CodeActionUnknownTrigger) +- if err != nil { +- t.Fatal(err) +- } +- // yay, no panic +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/formatting_test.go b/gopls/internal/test/integration/misc/formatting_test.go +--- a/gopls/internal/test/integration/misc/formatting_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/formatting_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,357 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-const unformattedProgram = ` +--- main.go -- +-package main +-import "fmt" +-func main( ) { +- fmt.Println("Hello World.") +-} +--- main.go.golden -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Println("Hello World.") +-} +-` +- +-func TestFormatting(t *testing.T) { +- Run(t, unformattedProgram, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.FormatBuffer("main.go") +- got := env.BufferText("main.go") +- want := env.ReadWorkspaceFile("main.go.golden") +- if got != want { +- t.Errorf("unexpected formatting result:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-// Tests golang/go#36824. +-func TestFormattingOneLine36824(t *testing.T) { +- const onelineProgram = ` +--- a.go -- +-package main; func f() {} +- +--- a.go.formatted -- +-package main +- +-func f() {} +-` +- Run(t, onelineProgram, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- env.FormatBuffer("a.go") +- got := env.BufferText("a.go") +- want := env.ReadWorkspaceFile("a.go.formatted") +- if got != want { +- t.Errorf("unexpected formatting result:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-// Tests golang/go#36824. +-func TestFormattingOneLineImports36824(t *testing.T) { +- const onelineProgramA = ` +--- a.go -- +-package x; func f() {fmt.Println()} +- +--- a.go.imported -- +-package x +- +-import "fmt" +- +-func f() { fmt.Println() } +-` +- Run(t, onelineProgramA, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- env.OrganizeImports("a.go") +- got := env.BufferText("a.go") +- want := env.ReadWorkspaceFile("a.go.imported") +- if got != want { +- t.Errorf("unexpected formatting result:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-func TestFormattingOneLineRmImports36824(t *testing.T) { +- const onelineProgramB = ` +--- a.go -- +-package x; import "os"; func f() {} +- +--- a.go.imported -- +-package x +- +-func f() {} +-` +- Run(t, onelineProgramB, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- env.OrganizeImports("a.go") +- got := env.BufferText("a.go") +- want := env.ReadWorkspaceFile("a.go.imported") +- if got != want { +- t.Errorf("unexpected formatting result:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-const disorganizedProgram = ` +--- main.go -- +-package main +- +-import ( +- "fmt" +- "errors" +-) +-func main( ) { +- fmt.Println(errors.New("bad")) +-} +--- main.go.organized -- +-package main +- +-import ( +- "errors" +- "fmt" +-) +-func main( ) { +- fmt.Println(errors.New("bad")) +-} +--- main.go.formatted -- +-package main +- +-import ( +- "errors" +- "fmt" +-) +- +-func main() { +- fmt.Println(errors.New("bad")) +-} +-` +- +-func TestOrganizeImports(t *testing.T) { +- Run(t, disorganizedProgram, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.OrganizeImports("main.go") +- got := env.BufferText("main.go") +- want := env.ReadWorkspaceFile("main.go.organized") +- if got != want { +- t.Errorf("unexpected formatting result:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-func TestFormattingOnSave(t *testing.T) { +- Run(t, disorganizedProgram, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.SaveBuffer("main.go") +- got := env.BufferText("main.go") +- want := env.ReadWorkspaceFile("main.go.formatted") +- if got != want { +- t.Errorf("unexpected formatting result:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-// Tests various possibilities for comments in files with CRLF line endings. +-// Import organization in these files has historically been a source of bugs. +-func TestCRLFLineEndings(t *testing.T) { +- for _, tt := range []struct { +- issue, input, want string +- }{ +- { +- issue: "41057", +- want: `package main +- +-/* +-Hi description +-*/ +-func Hi() { +-} +-`, +- }, +- { +- issue: "42646", +- want: `package main +- +-import ( +- "fmt" +-) +- +-/* +-func upload(c echo.Context) error { +- if err := r.ParseForm(); err != nil { +- fmt.Fprintf(w, "ParseForm() err: %v", err) +- return +- } +- fmt.Fprintf(w, "POST request successful") +- path_ver := r.FormValue("path_ver") +- ukclin_ver := r.FormValue("ukclin_ver") +- +- fmt.Fprintf(w, "Name = %s\n", path_ver) +- fmt.Fprintf(w, "Address = %s\n", ukclin_ver) +-} +-*/ +- +-func main() { +- const server_port = 8080 +- fmt.Printf("port: %d\n", server_port) +-} +-`, +- }, +- { +- issue: "42923", +- want: `package main +- +-// Line 1. +-// aa +-type Tree struct { +- arr []string +-} +-`, +- }, +- { +- issue: "47200", +- input: `package main +- +-import "fmt" +- +-func main() { +- math.Sqrt(9) +- fmt.Println("hello") +-} +-`, +- want: `package main +- +-import ( +- "fmt" +- "math" +-) +- +-func main() { +- math.Sqrt(9) +- fmt.Println("hello") +-} +-`, +- }, +- } { +- t.Run(tt.issue, func(t *testing.T) { +- Run(t, "-- main.go --", func(t *testing.T, env *Env) { +- input := tt.input +- if input == "" { +- input = tt.want +- } +- crlf := strings.ReplaceAll(input, "\n", "\r\n") +- env.CreateBuffer("main.go", crlf) +- env.Await(env.DoneWithOpen()) +- env.OrganizeImports("main.go") +- got := env.BufferText("main.go") +- got = strings.ReplaceAll(got, "\r\n", "\n") // convert everything to LF for simplicity +- if tt.want != got { +- t.Errorf("unexpected content after save:\n%s", compare.Text(tt.want, got)) +- } +- }) +- }) +- } +-} +- +-func TestGofumptFormatting(t *testing.T) { +- // Exercise some gofumpt formatting rules: +- // - No empty lines following an assignment operator +- // - Octal integer literals should use the 0o prefix on modules using Go +- // 1.13 and later. Requires LangVersion to be correctly resolved. +- // - std imports must be in a separate group at the top. Requires ModulePath +- // to be correctly resolved. +- const input = ` +--- go.mod -- +-module foo +- +-go 1.17 +--- foo.go -- +-package foo +- +-import ( +- "foo/bar" +- "fmt" +-) +- +-const perm = 0755 +- +-func foo() { +- foo := +- "bar" +- fmt.Println(foo, bar.Bar) +-} +--- foo.go.formatted -- +-package foo +- +-import ( +- "fmt" +- +- "foo/bar" +-) +- +-const perm = 0o755 +- +-func foo() { +- foo := "bar" +- fmt.Println(foo, bar.Bar) +-} +--- bar/bar.go -- +-package bar +- +-const Bar = 42 +-` +- +- WithOptions( +- Settings{ +- "gofumpt": true, +- }, +- ).Run(t, input, func(t *testing.T, env *Env) { +- env.OpenFile("foo.go") +- env.FormatBuffer("foo.go") +- got := env.BufferText("foo.go") +- want := env.ReadWorkspaceFile("foo.go.formatted") +- if got != want { +- t.Errorf("unexpected formatting result:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-func TestGofumpt_Issue61692(t *testing.T) { +- const input = ` +--- go.mod -- +-module foo +- +-go 1.21rc3 +--- foo.go -- +-package foo +- +-func _() { +- foo := +- "bar" +-} +-` +- +- WithOptions( +- Settings{ +- "gofumpt": true, +- }, +- ).Run(t, input, func(t *testing.T, env *Env) { +- env.OpenFile("foo.go") +- env.FormatBuffer("foo.go") // golang/go#61692: must not panic +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/generate_test.go b/gopls/internal/test/integration/misc/generate_test.go +--- a/gopls/internal/test/integration/misc/generate_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/generate_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,135 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// TODO(rfindley): figure out why go generate fails on android builders. +- +-//go:build !android +-// +build !android +- +-package misc +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestGenerateProgress(t *testing.T) { +- const generatedWorkspace = ` +--- go.mod -- +-module fake.test +- +-go 1.14 +--- generate.go -- +-// +build ignore +- +-package main +- +-import ( +- "os" +-) +- +-func main() { +- os.WriteFile("generated.go", []byte("package " + os.Args[1] + "\n\nconst Answer = 21"), 0644) +-} +- +--- lib1/lib.go -- +-package lib1 +- +-//` + `go:generate go run ../generate.go lib1 +- +--- lib2/lib.go -- +-package lib2 +- +-//` + `go:generate go run ../generate.go lib2 +- +--- main.go -- +-package main +- +-import ( +- "fake.test/lib1" +- "fake.test/lib2" +-) +- +-func main() { +- println(lib1.Answer + lib2.Answer) +-} +-` +- +- Run(t, generatedWorkspace, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", "lib1.(Answer)")), +- ) +- env.RunGenerate("./lib1") +- env.RunGenerate("./lib2") +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- ) +- }) +-} +- +-func TestGenerateUseNetwork(t *testing.T) { +- const proxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.21 +--- example.com@v1.2.3/main.go -- +-package main +- +-func main() { +- println("hello world") +-} +-` +- const generatedWorkspace = ` +--- go.mod -- +-module fake.test +- +-go 1.21 +--- main.go -- +- +-package main +- +-//go:` + /* hide this string from the go command */ `generate go run example.com@latest +- +-` +- WithOptions(ProxyFiles(proxy)). +- Run(t, generatedWorkspace, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- ) +- env.RunGenerate("./") +- }) +-} +- +-func TestEditingGeneratedFileWarning(t *testing.T) { +- const src = ` +--- go.mod -- +-module example.com +-go 1.21 +- +--- a/a.go -- +-// Code generated by me. DO NOT EDIT. +- +-package a +- +-var x = 1 +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- collectMessages := env.Awaiter.ListenToShownMessages() +- env.RegexpReplace("a/a.go", "var", "const") +- env.Await(env.DoneWithChange()) +- messages := collectMessages() +- +- const want = "Warning: editing a.go, a generated file." +- if len(messages) != 1 || messages[0].Message != want { +- for _, message := range messages { +- t.Errorf("got message %q", message.Message) +- } +- t.Errorf("no %q warning", want) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/highlight_test.go b/gopls/internal/test/integration/misc/highlight_test.go +--- a/gopls/internal/test/integration/misc/highlight_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/highlight_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,154 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "sort" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestWorkspacePackageHighlight(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() { +- var A string = "A" +- x := "x-" + A +- println(A, x) +-}` +- +- Run(t, mod, func(t *testing.T, env *Env) { +- const file = "main.go" +- env.OpenFile(file) +- loc := env.FirstDefinition(env.RegexpSearch(file, `var (A) string`)) +- +- checkHighlights(env, loc, 3) +- }) +-} +- +-func TestStdPackageHighlight_Issue43511(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Printf() +-}` +- +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- defLoc := env.FirstDefinition(env.RegexpSearch("main.go", `fmt\.(Printf)`)) +- file := env.Sandbox.Workdir.URIToPath(defLoc.URI) +- env.OpenFile(file) +- loc := env.RegexpSearch(file, `func Printf\((format) string`) +- +- checkHighlights(env, loc, 2) +- }) +-} +- +-func TestThirdPartyPackageHighlight_Issue43511(t *testing.T) { +- const proxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +--- example.com@v1.2.3/global/global.go -- +-package global +- +-const A = 1 +- +-func foo() { +- _ = A +-} +- +-func bar() int { +- return A + A +-} +--- example.com@v1.2.3/local/local.go -- +-package local +- +-func foo() int { +- const b = 2 +- +- return b * b * (b+1) + b +-}` +- +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +- +-require example.com v1.2.3 +--- main.go -- +-package main +- +-import ( +- _ "example.com/global" +- _ "example.com/local" +-) +- +-func main() {}` +- +- WithOptions( +- ProxyFiles(proxy), +- WriteGoSum("."), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- +- defLoc := env.FirstDefinition(env.RegexpSearch("main.go", `"example.com/global"`)) +- file := env.Sandbox.Workdir.URIToPath(defLoc.URI) +- env.OpenFile(file) +- loc := env.RegexpSearch(file, `const (A)`) +- checkHighlights(env, loc, 4) +- +- defLoc = env.FirstDefinition(env.RegexpSearch("main.go", `"example.com/local"`)) +- file = env.Sandbox.Workdir.URIToPath(defLoc.URI) +- env.OpenFile(file) +- loc = env.RegexpSearch(file, `const (b)`) +- checkHighlights(env, loc, 5) +- }) +-} +- +-func checkHighlights(env *Env, loc protocol.Location, highlightCount int) { +- t := env.TB +- t.Helper() +- +- highlights := env.DocumentHighlight(loc) +- if len(highlights) != highlightCount { +- t.Fatalf("expected %v highlight(s), got %v", highlightCount, len(highlights)) +- } +- +- references := env.References(loc) +- if len(highlights) != len(references) { +- t.Fatalf("number of highlights and references is expected to be equal: %v != %v", len(highlights), len(references)) +- } +- +- sort.Slice(highlights, func(i, j int) bool { +- return protocol.CompareRange(highlights[i].Range, highlights[j].Range) < 0 +- }) +- sort.Slice(references, func(i, j int) bool { +- return protocol.CompareRange(references[i].Range, references[j].Range) < 0 +- }) +- for i := range highlights { +- if highlights[i].Range != references[i].Range { +- t.Errorf("highlight and reference ranges are expected to be equal: %v != %v", highlights[i].Range, references[i].Range) +- } +- } +-} +diff -urN a/gopls/internal/test/integration/misc/hover_test.go b/gopls/internal/test/integration/misc/hover_test.go +--- a/gopls/internal/test/integration/misc/hover_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/hover_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,806 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "fmt" +- "regexp" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +-) +- +-func TestHoverUnexported(t *testing.T) { +- const proxy = ` +--- golang.org/x/structs@v1.0.0/go.mod -- +-module golang.org/x/structs +- +-go 1.21 +- +--- golang.org/x/structs@v1.0.0/types.go -- +-package structs +- +-type Mixed struct { +- // Exported comment +- Exported int +- unexported string +-} +- +-func printMixed(m Mixed) { +- println(m) +-} +-` +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.21 +- +-require golang.org/x/structs v1.0.0 +--- main.go -- +-package main +- +-import "golang.org/x/structs" +- +-func main() { +- var m structs.Mixed +- _ = m.Exported +-} +-` +- +- // TODO: use a nested workspace folder here. +- WithOptions( +- ProxyFiles(proxy), +- WriteGoSum("."), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- mixedLoc := env.RegexpSearch("main.go", "Mixed") +- got, _ := env.Hover(mixedLoc) +- if !strings.Contains(got.Value, "unexported") { +- t.Errorf("Workspace hover: missing expected field 'unexported'. Got:\n%q", got.Value) +- } +- +- cacheLoc := env.FirstDefinition(mixedLoc) +- cacheFile := env.Sandbox.Workdir.URIToPath(cacheLoc.URI) +- env.OpenFile(cacheFile) +- argLoc := env.RegexpSearch(cacheFile, "printMixed.*(Mixed)") +- got, _ = env.Hover(argLoc) +- if !strings.Contains(got.Value, "unexported") { +- t.Errorf("Non-workspace hover: missing expected field 'unexported'. Got:\n%q", got.Value) +- } +- +- exportedFieldLoc := env.RegexpSearch("main.go", "Exported") +- got, _ = env.Hover(exportedFieldLoc) +- if !strings.Contains(got.Value, "comment") { +- t.Errorf("Workspace hover: missing comment for field 'Exported'. Got:\n%q", got.Value) +- } +- }) +-} +- +-func TestHoverIntLiteral(t *testing.T) { +- const source = ` +--- main.go -- +-package main +- +-var ( +- bigBin = 0b1001001 +-) +- +-var hex = 0xe34e +- +-func main() { +-} +-` +- Run(t, source, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- hexExpected := "58190" +- got, _ := env.Hover(env.RegexpSearch("main.go", "0xe")) +- if got != nil && !strings.Contains(got.Value, hexExpected) { +- t.Errorf("Hover: missing expected field '%s'. Got:\n%q", hexExpected, got.Value) +- } +- +- binExpected := "73" +- got, _ = env.Hover(env.RegexpSearch("main.go", "0b1")) +- if got != nil && !strings.Contains(got.Value, binExpected) { +- t.Errorf("Hover: missing expected field '%s'. Got:\n%q", binExpected, got.Value) +- } +- }) +-} +- +-// Tests that hovering does not trigger the panic in golang/go#48249. +-func TestPanicInHoverBrokenCode(t *testing.T) { +- // Note: this test can not be expressed as a marker test, as it must use +- // content without a trailing newline. +- const source = ` +--- main.go -- +-package main +- +-type Example struct` +- Run(t, source, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.Editor.Hover(env.Ctx, env.RegexpSearch("main.go", "Example")) +- }) +-} +- +-func TestHoverRune_48492(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- main.go -- +-package main +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.EditBuffer("main.go", fake.NewEdit(0, 0, 1, 0, "package main\nfunc main() {\nconst x = `\nfoo\n`\n}")) +- env.Editor.Hover(env.Ctx, env.RegexpSearch("main.go", "foo")) +- }) +-} +- +-func TestHoverImport(t *testing.T) { +- const packageDoc1 = "Package lib1 hover documentation" +- const packageDoc2 = "Package lib2 hover documentation" +- tests := []struct { +- hoverPackage string +- want string +- wantError bool +- }{ +- { +- "mod.com/lib1", +- packageDoc1, +- false, +- }, +- { +- "mod.com/lib2", +- packageDoc2, +- false, +- }, +- { +- "mod.com/lib3", +- "", +- false, +- }, +- { +- "mod.com/lib4", +- "", +- true, +- }, +- } +- source := fmt.Sprintf(` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- lib1/a.go -- +-// %s +-package lib1 +- +-const C = 1 +- +--- lib1/b.go -- +-package lib1 +- +-const D = 1 +- +--- lib2/a.go -- +-// %s +-package lib2 +- +-const E = 1 +- +--- lib3/a.go -- +-package lib3 +- +-const F = 1 +- +--- main.go -- +-package main +- +-import ( +- "mod.com/lib1" +- "mod.com/lib2" +- "mod.com/lib3" +- "mod.com/lib4" +-) +- +-func main() { +- println("Hello") +-} +- `, packageDoc1, packageDoc2) +- Run(t, source, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- for _, test := range tests { +- got, _, err := env.Editor.Hover(env.Ctx, env.RegexpSearch("main.go", test.hoverPackage)) +- if test.wantError { +- if err == nil { +- t.Errorf("Hover(%q) succeeded unexpectedly", test.hoverPackage) +- } +- } else if !strings.Contains(got.Value, test.want) { +- t.Errorf("Hover(%q): got:\n%q\nwant:\n%q", test.hoverPackage, got.Value, test.want) +- } +- } +- }) +-} +- +-func TestHoverPackageIdent(t *testing.T) { +- const packageDoc1 = "Package lib1 hover documentation" +- const packageDoc2 = "Package lib2 hover documentation" +- tests := []struct { +- hoverIdent string +- want string +- wantError bool +- }{ +- { +- "lib1", +- packageDoc1, +- false, +- }, +- { +- "lib2", +- packageDoc2, +- false, +- }, +- { +- "lib3", +- "", +- false, +- }, +- { +- "lib4", +- "", +- true, +- }, +- } +- source := fmt.Sprintf(` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- lib1/a.go -- +-// %s +-package lib1 +- +-const C = 1 +- +--- lib1/b.go -- +-package lib1 +- +-const D = 1 +- +--- lib2/a.go -- +-// %s +-package lib2 +- +-const E = 1 +- +--- lib3/a.go -- +-package lib3 +- +-const F = 1 +- +--- main.go -- +-package main +- +-import ( +- "mod.com/lib1" +- "mod.com/lib2" +- "mod.com/lib3" +- "mod.com/lib4" +-) +- +-func main() { +- println(lib1.C) +- println(lib2.E) +- println(lib3.F) +- println(lib4.Z) +-} +- `, packageDoc1, packageDoc2) +- Run(t, source, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- for _, test := range tests { +- got, _, err := env.Editor.Hover(env.Ctx, env.RegexpSearch("main.go", "("+test.hoverIdent+")\\.")) +- if test.wantError { +- if err == nil { +- t.Errorf("Hover(%q) succeeded unexpectedly", test.hoverIdent) +- } +- } else if !strings.Contains(got.Value, test.want) { +- t.Errorf("Hover(%q): got:\n%q\nwant:\n%q", test.hoverIdent, got.Value, test.want) +- } +- } +- }) +-} +- +-// for x/tools/gopls: unhandled named anchor on the hover #57048 +-func TestHoverTags(t *testing.T) { +- const source = ` +--- go.mod -- +-module mod.com +- +-go 1.19 +- +--- lib/a.go -- +- +-// variety of execution modes. +-// +-// # Test package setup +-// +-// The regression test package uses a couple of uncommon patterns to reduce +-package lib +- +--- a.go -- +- package main +- import "mod.com/lib" +- +- const A = 1 +- +-} +-` +- Run(t, source, func(t *testing.T, env *Env) { +- t.Run("tags", func(t *testing.T) { +- env.OpenFile("a.go") +- z := env.RegexpSearch("a.go", "lib") +- t.Logf("%#v", z) +- got, _ := env.Hover(env.RegexpSearch("a.go", "lib")) +- if strings.Contains(got.Value, "{#hdr-") { +- t.Errorf("Hover: got {#hdr- tag:\n%q", got) +- } +- }) +- }) +-} +- +-// This is a regression test for Go issue #57625. +-func TestHoverModMissingModuleStmt(t *testing.T) { +- const source = ` +--- go.mod -- +-go 1.16 +-` +- Run(t, source, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- env.Hover(env.RegexpSearch("go.mod", "go")) // no panic +- }) +-} +- +-func TestHoverCompletionMarkdown(t *testing.T) { +- const source = ` +--- go.mod -- +-module mod.com +-go 1.19 +--- main.go -- +-package main +-// Just says [hello]. +-// +-// [hello]: https://en.wikipedia.org/wiki/Hello +-func Hello() string { +- Hello() //Here +- return "hello" +-} +-` +- Run(t, source, func(t *testing.T, env *Env) { +- // Hover, Completion, and SignatureHelp should all produce markdown +- // check that the markdown for SignatureHelp and Completion are +- // the same, and contained in that for Hover (up to trailing \n) +- env.OpenFile("main.go") +- loc := env.RegexpSearch("main.go", "func (Hello)") +- hover, _ := env.Hover(loc) +- hoverContent := hover.Value +- +- loc = env.RegexpSearch("main.go", "//Here") +- loc.Range.Start.Character -= 3 // Hello(_) //Here +- completions := env.Completion(loc) +- signatures := env.SignatureHelp(loc) +- +- if len(completions.Items) != 1 { +- t.Errorf("got %d completions, expected 1", len(completions.Items)) +- } +- if len(signatures.Signatures) != 1 { +- t.Errorf("got %d signatures, expected 1", len(signatures.Signatures)) +- } +- item := completions.Items[0].Documentation.Value +- var itemContent string +- if x, ok := item.(protocol.MarkupContent); !ok || x.Kind != protocol.Markdown { +- t.Fatalf("%#v is not markdown", item) +- } else { +- itemContent = strings.Trim(x.Value, "\n") +- } +- sig := signatures.Signatures[0].Documentation.Value +- var sigContent string +- if x, ok := sig.(protocol.MarkupContent); !ok || x.Kind != protocol.Markdown { +- t.Fatalf("%#v is not markdown", item) +- } else { +- sigContent = x.Value +- } +- if itemContent != sigContent { +- t.Errorf("item:%q not sig:%q", itemContent, sigContent) +- } +- if !strings.Contains(hoverContent, itemContent) { +- t.Errorf("hover:%q does not contain sig;%q", hoverContent, sigContent) +- } +- }) +-} +- +-// Test that the generated markdown contains links for Go references. +-// https://github.com/golang/go/issues/58352 +-func TestHoverLinks(t *testing.T) { +- const input = ` +--- go.mod -- +-go 1.19 +-module mod.com +--- main.go -- +-package main +-// [fmt] +-var A int +-// [fmt.Println] +-var B int +-// [golang.org/x/tools/go/packages.Package.String] +-var C int +-` +- var tests = []struct { +- pat string +- ans string +- }{ +- {"A", "fmt"}, +- {"B", "fmt#Println"}, +- {"C", "golang.org/x/tools/go/packages#Package.String"}, +- } +- for _, test := range tests { +- Run(t, input, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- loc := env.RegexpSearch("main.go", test.pat) +- hover, _ := env.Hover(loc) +- hoverContent := hover.Value +- want := fmt.Sprintf("%s/%s", "https://pkg.go.dev", test.ans) +- if !strings.Contains(hoverContent, want) { +- t.Errorf("hover:%q does not contain link %q", hoverContent, want) +- } +- }) +- } +-} +- +-const linknameHover = ` +--- go.mod -- +-module mod.com +- +--- upper/upper.go -- +-package upper +- +-import ( +- _ "unsafe" +- _ "mod.com/lower" +-) +- +-//go:linkname foo mod.com/lower.bar +-func foo() string +- +--- lower/lower.go -- +-package lower +- +-// bar does foo. +-func bar() string { +- return "foo by bar" +-}` +- +-func TestHoverLinknameDirective(t *testing.T) { +- Run(t, linknameHover, func(t *testing.T, env *Env) { +- // Jump from directives 2nd arg. +- env.OpenFile("upper/upper.go") +- from := env.RegexpSearch("upper/upper.go", `lower.bar`) +- +- hover, _ := env.Hover(from) +- content := hover.Value +- +- expect := "bar does foo" +- if !strings.Contains(content, expect) { +- t.Errorf("hover: %q does not contain: %q", content, expect) +- } +- }) +-} +- +-func TestHoverGoWork_Issue60821(t *testing.T) { +- const files = ` +--- go.work -- +-go 1.19 +- +-use ( +- moda +- modb +-) +--- moda/go.mod -- +- +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.work") +- // Neither of the requests below should crash gopls. +- _, _, _ = env.Editor.Hover(env.Ctx, env.RegexpSearch("go.work", "moda")) +- _, _, _ = env.Editor.Hover(env.Ctx, env.RegexpSearch("go.work", "modb")) +- }) +-} +- +-const embedHover = ` +--- go.mod -- +-module mod.com +-go 1.19 +--- main.go -- +-package main +- +-import "embed" +- +-//go:embed *.txt +-var foo embed.FS +- +-func main() { +-} +--- foo.txt -- +-FOO +--- bar.txt -- +-BAR +--- baz.txt -- +-BAZ +--- other.sql -- +-SKIPPED +--- dir.txt/skip.txt -- +-SKIPPED +-` +- +-func TestHoverEmbedDirective(t *testing.T) { +- Run(t, embedHover, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- from := env.RegexpSearch("main.go", `\*.txt`) +- +- got, _ := env.Hover(from) +- if got == nil { +- t.Fatalf("hover over //go:embed arg not found") +- } +- content := got.Value +- +- wants := []string{"foo.txt", "bar.txt", "baz.txt"} +- for _, want := range wants { +- if !strings.Contains(content, want) { +- t.Errorf("hover: %q does not contain: %q", content, want) +- } +- } +- +- // A directory should never be matched, even if it happens to have a matching name. +- // Content in subdirectories should not match on only one asterisk. +- skips := []string{"other.sql", "dir.txt", "skip.txt"} +- for _, skip := range skips { +- if strings.Contains(content, skip) { +- t.Errorf("hover: %q should not contain: %q", content, skip) +- } +- } +- }) +-} +- +-func TestHoverBrokenImport_Issue60592(t *testing.T) { +- const files = ` +--- go.mod -- +-module testdata +-go 1.18 +- +--- p.go -- +-package main +- +-import foo "a" +- +-func _() { +- foo.Print() +-} +- +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("p.go") +- // This request should not crash gopls. +- _, _, _ = env.Editor.Hover(env.Ctx, env.RegexpSearch("p.go", "foo[.]")) +- }) +-} +- +-func TestHoverInternalLinks(t *testing.T) { +- const src = ` +--- main.go -- +-package main +- +-import "errors" +- +-func main() { +- errors.New("oops") +-} +-` +- for _, test := range []struct { +- linksInHover any // JSON configuration value +- wantRE string // pattern to match the Hover Markdown output +- }{ +- { +- true, // default: use options.LinkTarget domain +- regexp.QuoteMeta("[`errors.New` on pkg.go.dev](https://pkg.go.dev/errors#New)"), +- }, +- { +- "gopls", // use gopls' internal viewer +- "\\[`errors.New` in gopls doc viewer\\]\\(http://127.0.0.1:[0-9]+/gopls/[^/]+/pkg/errors\\?view=[0-9]+#New\\)", +- }, +- } { +- WithOptions( +- Settings{"linksInHover": test.linksInHover}, +- ).Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- got, _ := env.Hover(env.RegexpSearch("main.go", "New")) +- if m, err := regexp.MatchString(test.wantRE, got.Value); err != nil { +- t.Fatalf("bad regexp in test: %v", err) +- } else if !m { +- t.Fatalf("hover output does not match %q; got:\n\n%s", test.wantRE, got.Value) +- } +- }) +- } +-} +- +-func TestHoverInternalLinksIssue68116(t *testing.T) { +- // Links for the internal viewer should not include a module version suffix: +- // the package path and the view are an unambiguous key; see #68116. +- +- const proxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +- +--- example.com@v1.2.3/a/a.go -- +-package a +- +-// F is a function. +-func F() +-` +- +- const mod = ` +--- go.mod -- +-module main +- +-go 1.12 +- +-require example.com v1.2.3 +- +--- main.go -- +-package main +- +-import "example.com/a" +- +-func main() { +- a.F() +-} +-` +- WithOptions( +- ProxyFiles(proxy), +- Settings{"linksInHover": "gopls"}, +- WriteGoSum("."), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- got, _ := env.Hover(env.RegexpSearch("main.go", "F")) +- const wantRE = "\\[`a.F` in gopls doc viewer\\]\\(http://127.0.0.1:[0-9]+/gopls/[^/]+/pkg/example.com/a\\?view=[0-9]+#F\\)" // no version +- if m, err := regexp.MatchString(wantRE, got.Value); err != nil { +- t.Fatalf("bad regexp in test: %v", err) +- } else if !m { +- t.Fatalf("hover output does not match %q; got:\n\n%s", wantRE, got.Value) +- } +- }) +-} +- +-func TestHoverBuiltinFile(t *testing.T) { +- // This test verifies that hovering in the builtin file provides the same +- // hover content as hovering over a use of a builtin. +- +- const src = ` +--- p.go -- +-package p +- +-func _() { +- const ( +- _ = iota +- _ = true +- ) +- var ( +- _ any +- err error = e{} // avoid nil deref warning +- ) +- _ = err.Error +- println("Hello") +- _ = min(1, 2) +-} +- +-// e implements Error, for use above. +-type e struct{} +-func (e) Error() string +-` +- +- // Test hovering over various builtins with different kinds of declarations. +- tests := []string{ +- "iota", +- "true", +- "any", +- "error", +- "Error", +- "println", +- "min", +- } +- +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("p.go") +- env.AfterChange(NoDiagnostics()) // avoid accidental compiler errors +- +- for _, builtin := range tests { +- useLocation := env.RegexpSearch("p.go", builtin) +- calleeHover, _ := env.Hover(useLocation) +- declLocation := env.FirstDefinition(useLocation) +- env.OpenFile(env.Sandbox.Workdir.URIToPath(declLocation.URI)) +- declHover, _ := env.Hover(declLocation) +- if diff := cmp.Diff(calleeHover, declHover); diff != "" { +- t.Errorf("Hover mismatch (-callee hover +decl hover):\n%s", diff) +- } +- } +- }) +-} +- +-func TestHoverStdlibWithAvailableVersion(t *testing.T) { +- const src = ` +--- stdlib.go -- +-package stdlib +- +-import "fmt" +-import "context" +-import "crypto" +-import "regexp" +-import "go/doc/comment" +- +-type testRegexp = *regexp.Regexp +- +-func _() { +- var ctx context.Context +- ctx = context.Background() +- if ctx.Err(); e == context.Canceled { +- fmt.Println("Canceled") +- fmt.Printf("%v", crypto.SHA512_224) +- } +- _ := fmt.Appendf(make([]byte, 100), "world, %d", 23) +- +- var re = regexp.MustCompile("\n{2,}") +- copy := re.Copy() +- var testRE testRegexp +- testRE.Longest() +- +- var pr comment.Printer +- pr.HeadingID = func(*comment.Heading) string { return "" } +-} +-` +- +- testcases := []struct { +- symbolRE string // regexp matching symbol to hover over +- shouldContain bool +- targetString string +- }{ +- {"Println", false, "go1.0"}, // package-level func +- {"Appendf", true, "go1.19"}, // package-level func +- {"Background", true, "go1.7"}, // package-level func +- {"Canceled", true, "go1.7"}, // package-level var +- {"Context", true, "go1.7"}, // package-level type +- {"SHA512_224", true, "go1.5"}, // package-level const +- {"Copy", true, "go1.6"}, // method +- {"Longest", true, "go1.1"}, // method with alias receiver +- {"HeadingID", true, "go1.19"}, // field +- } +- +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("stdlib.go") +- for _, tc := range testcases { +- content, _ := env.Hover(env.RegexpSearch("stdlib.go", tc.symbolRE)) +- if tc.shouldContain && !strings.Contains(content.Value, tc.targetString) { +- t.Errorf("Hover(%q) should contain string %s", tc.symbolRE, tc.targetString) +- } +- if !tc.shouldContain && strings.Contains(content.Value, tc.targetString) { +- t.Errorf("Hover(%q) should not contain string %s", tc.symbolRE, tc.targetString) +- } +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/imports_test.go b/gopls/internal/test/integration/misc/imports_test.go +--- a/gopls/internal/test/integration/misc/imports_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/imports_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,762 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "os" +- "path/filepath" +- "runtime" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/settings" +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/internal/modindex" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-// Tests golang/go#38815. +-func TestIssue38815(t *testing.T) { +- const needs = ` +--- go.mod -- +-module foo +- +-go 1.12 +--- a.go -- +-package main +-func f() {} +-` +- const ntest = `package main +-func TestZ(t *testing.T) { +- f() +-} +-` +- const want = `package main +- +-import "testing" +- +-func TestZ(t *testing.T) { +- f() +-} +-` +- +- // it was returning +- // "package main\nimport \"testing\"\npackage main..." +- Run(t, needs, func(t *testing.T, env *Env) { +- env.CreateBuffer("a_test.go", ntest) +- env.SaveBuffer("a_test.go") +- got := env.BufferText("a_test.go") +- if want != got { +- t.Errorf("got\n%q, wanted\n%q", got, want) +- } +- }) +-} +- +-func TestIssue59124(t *testing.T) { +- const stuff = ` +--- go.mod -- +-module foo +-go 1.19 +--- a.go -- +-//line foo.y:102 +-package main +- +-import "fmt" +- +-//this comment is necessary for failure +-func _() { +- fmt.Println("hello") +-} +-` +- Run(t, stuff, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- was := env.BufferText("a.go") +- env.AfterChange(NoDiagnostics()) +- env.OrganizeImports("a.go") +- is := env.BufferText("a.go") +- if diff := compare.Text(was, is); diff != "" { +- t.Errorf("unexpected diff after organizeImports:\n%s", diff) +- } +- }) +-} +- +-func TestIssue66407(t *testing.T) { +- const files = ` +--- go.mod -- +-module foo +-go 1.21 +--- a.go -- +-package foo +- +-func f(x float64) float64 { +- return x + rand.Float64() +-} +--- b.go -- +-package foo +- +-func _() { +- _ = rand.Int63() +-} +-` +- WithOptions(Modes(Default)). +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- was := env.BufferText("a.go") +- env.OrganizeImports("a.go") +- is := env.BufferText("a.go") +- // expect complaint that module is before 1.22 +- env.AfterChange(Diagnostics(ForFile("a.go"))) +- diff := compare.Text(was, is) +- // check that it found the 'right' rand +- if !strings.Contains(diff, `import "math/rand/v2"`) { +- t.Errorf("expected rand/v2, got %q", diff) +- } +- env.OpenFile("b.go") +- was = env.BufferText("b.go") +- env.OrganizeImports("b.go") +- // a.go still has its module problem but b.go is fine +- env.AfterChange(Diagnostics(ForFile("a.go")), +- NoDiagnostics(ForFile("b.go"))) +- is = env.BufferText("b.go") +- diff = compare.Text(was, is) +- if !strings.Contains(diff, `import "math/rand"`) { +- t.Errorf("expected math/rand, got %q", diff) +- } +- }) +-} +- +-func TestVim1(t *testing.T) { +- const vim1 = `package main +- +-import "fmt" +- +-var foo = 1 +-var bar = 2 +- +-func main() { +- fmt.Printf("This is a test %v\n", foo) +- fmt.Printf("This is another test %v\n", foo) +- fmt.Printf("This is also a test %v\n", foo) +-} +-` +- +- // The file remains unchanged, but if there any quick fixes +- // are returned, they confuse vim (according to CL 233117). +- // Therefore check for no QuickFix CodeActions. +- Run(t, "", func(t *testing.T, env *Env) { +- env.CreateBuffer("main.go", vim1) +- env.OrganizeImports("main.go") +- +- // Assert no quick fixes. +- for _, act := range env.CodeActionForFile("main.go", nil) { +- if act.Kind == protocol.QuickFix { +- t.Errorf("unexpected quick fix action: %#v", act) +- } +- } +- if t.Failed() { +- got := env.BufferText("main.go") +- if got == vim1 { +- t.Errorf("no changes") +- } else { +- t.Errorf("got\n%q", got) +- t.Errorf("was\n%q", vim1) +- } +- } +- }) +-} +- +-func TestVim2(t *testing.T) { +- const vim2 = `package main +- +-import ( +- "fmt" +- +- "example.com/blah" +- +- "rubbish.com/useless" +-) +- +-func main() { +- fmt.Println(blah.Name, useless.Name) +-} +-` +- +- Run(t, "", func(t *testing.T, env *Env) { +- env.CreateBuffer("main.go", vim2) +- env.OrganizeImports("main.go") +- +- // Assert no quick fixes. +- for _, act := range env.CodeActionForFile("main.go", nil) { +- if act.Kind == protocol.QuickFix { +- t.Errorf("unexpected quick-fix action: %#v", act) +- } +- } +- }) +-} +- +-const exampleProxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +--- example.com@v1.2.3/x/x.go -- +-package x +- +-const X = 1 +--- example.com@v1.2.3/y/y.go -- +-package y +- +-const Y = 2 +-` +- +-func TestGOMODCACHE(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +- +-require example.com v1.2.3 +--- main.go -- +-package main +- +-import "example.com/x" +- +-var _, _ = x.X, y.Y +-` +- modcache := t.TempDir() +- defer CleanModCache(t, modcache) +- +- opts := []RunOption{ +- EnvVars{"GOMODCACHE": modcache}, +- ProxyFiles(exampleProxy), +- WriteGoSum("."), +- } +- +- // Force go list to populate GOMODCACHE +- // so OrganizeImports can later rely on it. +- t.Run("setup", func(t *testing.T) { +- WithOptions(opts...).Run(t, files, func(t *testing.T, env *Env) {}) +- }) +- +- WithOptions(opts...).Run(t, files, func(t *testing.T, env *Env) { +- // Expect y is undefined. +- env.OpenFile("main.go") +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("main.go", `y.Y`), +- WithMessage("undefined")), +- ) +- +- // Apply suggested fix via OrganizeImports. +- env.SaveBuffer("main.go") // => OrganizeImports +- env.AfterChange(NoDiagnostics(ForFile("main.go"))) +- +- // Verify that y.Y is defined within the module cache. +- loc := env.FirstDefinition(env.RegexpSearch("main.go", `y.(Y)`)) +- path := env.Sandbox.Workdir.URIToPath(loc.URI) +- if !strings.HasPrefix(path, filepath.ToSlash(modcache)) { +- t.Errorf("found module dependency outside of GOMODCACHE: got %v, wanted subdir of %v", path, filepath.ToSlash(modcache)) +- } +- }) +-} +- +-// make sure it gets the v2 +-/* marker test? +- +-Add proxy data with the special proxy/ prefix (see gopls/internal/test/marker/testdata/quickfix/unusedrequire.txt). +-Invoke the organizeImports codeaction directly (see gopls/internal/test/marker/testdata/codeaction/imports.txt, but use the edit=golden named argument instead of result= to minimize the size of the golden output. +-*/ +-func Test58382(t *testing.T) { +- files := `-- main.go -- +-package main +-import "fmt" +-func main() { +- fmt.Println(xurls.Relaxed().FindAllString()) +-} +--- go.mod -- +-module demo +-go 1.20 +-` +- cache := `-- mvdan.cc/xurls@v2.5.0/xurls.go -- +-package xurls +-func Relaxed() *regexp.Regexp { +-return nil +-} +--- github.com/mvdan/xurls/v2@v1.1.0/xurls.go -- +-package xurls +-func Relaxed() *regexp.Regexp { +-return nil +-} +-` +- modcache := t.TempDir() +- defer CleanModCache(t, modcache) +- mx := fake.UnpackTxt(cache) +- for k, v := range mx { +- fname := filepath.Join(modcache, k) +- dir := filepath.Dir(fname) +- os.MkdirAll(dir, 0777) // ignore error +- if err := os.WriteFile(fname, v, 0644); err != nil { +- t.Fatal(err) +- } +- } +- WithOptions( +- EnvVars{"GOMODCACHE": modcache}, +- WriteGoSum("."), +- Settings{"importsSource": settings.ImportsSourceGopls}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- +- env.OpenFile("main.go") +- env.SaveBuffer("main.go") +- out := env.BufferText("main.go") +- if !strings.Contains(out, "xurls/v2") { +- t.Errorf("did not get v2 in %q", out) +- } +- }) +-} +- +-// get the version requested in the go.mod file, not /v2 +-func Test61208(t *testing.T) { +- files := `-- main.go -- +-package main +-import "fmt" +-func main() { +- fmt.Println(xurls.Relaxed().FindAllString()) +-} +--- go.mod -- +-module demo +-go 1.20 +-require github.com/mvdan/xurls v1.1.0 +-` +- cache := `-- mvdan.cc/xurls/v2@v2.5.0/a/xurls.go -- +-package xurls +-func Relaxed() *regexp.Regexp { +-return nil +-} +--- github.com/mvdan/xurls@v1.1.0/a/xurls.go -- +-package xurls +-func Relaxed() *regexp.Regexp { +-return nil +-} +-` +- modcache := t.TempDir() +- defer CleanModCache(t, modcache) +- mx := fake.UnpackTxt(cache) +- for k, v := range mx { +- fname := filepath.Join(modcache, k) +- dir := filepath.Dir(fname) +- os.MkdirAll(dir, 0777) // ignore error +- if err := os.WriteFile(fname, v, 0644); err != nil { +- t.Fatal(err) +- } +- } +- WithOptions( +- EnvVars{"GOMODCACHE": modcache}, +- WriteGoSum("."), +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.SaveBuffer("main.go") +- out := env.BufferText("main.go") +- if !strings.Contains(out, "github.com/mvdan/xurls") { +- t.Errorf("did not get github.com/mvdan/xurls in %q", out) +- } +- }) +-} +- +-// get the version already used in the module +-func Test60663(t *testing.T) { +- files := `-- main.go -- +-package main +-import "fmt" +-func main() { +- fmt.Println(xurls.Relaxed().FindAllString()) +-} +--- go.mod -- +-module demo +-go 1.20 +--- a.go -- +-package main +-import "github.com/mvdan/xurls" +-var _ = xurls.Relaxed() +-` +- cache := `-- mvdan.cc/xurls/v2@v2.5.0/xurls.go -- +-package xurls +-func Relaxed() *regexp.Regexp { +-return nil +-} +--- github.com/mvdan/xurls@v1.1.0/xurls.go -- +-package xurls +-func Relaxed() *regexp.Regexp { +-return nil +-} +-` +- modcache := t.TempDir() +- defer CleanModCache(t, modcache) +- mx := fake.UnpackTxt(cache) +- for k, v := range mx { +- fname := filepath.Join(modcache, k) +- dir := filepath.Dir(fname) +- os.MkdirAll(dir, 0777) // ignore error +- if err := os.WriteFile(fname, v, 0644); err != nil { +- t.Fatal(err) +- } +- } +- WithOptions( +- EnvVars{"GOMODCACHE": modcache}, +- WriteGoSum("."), +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.SaveBuffer("main.go") +- out := env.BufferText("main.go") +- if !strings.Contains(out, "github.com/mvdan/xurls") { +- t.Errorf("did not get github.com/mvdan/xurls in %q", out) +- } +- }) +-} +- +-// use the import from a different package in the same module +-func Test44510(t *testing.T) { +- const files = `-- go.mod -- +-module test +-go 1.19 +--- foo/foo.go -- +-package main +-import strs "strings" +-var _ = strs.Count +--- bar/bar.go -- +-package main +-var _ = strs.Builder +-` +- WithOptions( +- WriteGoSum("."), +- ).Run(t, files, func(T *testing.T, env *Env) { +- env.OpenFile("bar/bar.go") +- env.SaveBuffer("bar/bar.go") +- buf := env.BufferText("bar/bar.go") +- if !strings.Contains(buf, "strs") { +- t.Error(buf) +- } +- }) +-} +- +-func TestIssue67156(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com/a +- +-go 1.20 +- +-require example.com v1.2.3 +- +--- main.go -- +-package main +- +-import "example.com/x" +- +-var _, _ = x.X, y.Y +-` +- modcache := t.TempDir() +- base := filepath.Base(modcache) +- defer CleanModCache(t, modcache) +- +- // Construct a very unclean module cache whose length exceeds the length of +- // the clean directory path, to reproduce the crash in golang/go#67156 +- const sep = string(filepath.Separator) +- modcache += strings.Repeat(sep+".."+sep+base, 10) +- +- opts := []RunOption{ +- EnvVars{"GOMODCACHE": modcache}, +- ProxyFiles(exampleProxy), +- WriteGoSum("."), +- } +- +- t.Run("setup", func(t *testing.T) { +- // Force go list to populate GOMODCACHE. +- WithOptions(opts...).Run(t, files, func(t *testing.T, env *Env) {}) +- +- // Update module index. +- if ix, err := modindex.Update(modcache); err != nil { +- t.Fatalf("failed to obtain updated module index: %v", err) +- } else if len(ix.Entries) != 2 { +- t.Fatalf("got %v, want 2 entries", ix) +- } +- }) +- +- WithOptions(opts...).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.AfterChange(Diagnostics(env.AtRegexp("main.go", `y.Y`))) +- env.SaveBuffer("main.go") // => OrganizeImports +- env.AfterChange(NoDiagnostics(ForFile("main.go"))) +- }) +-} +- +-// Tests golang/go#40685. +-func TestAcceptImportsQuickFixTestVariant(t *testing.T) { +- const pkg = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a/a.go -- +-package a +- +-import ( +- "fmt" +-) +- +-func _() { +- fmt.Println("") +- os.Stat("") +-} +--- a/a_test.go -- +-package a +- +-import ( +- "os" +- "testing" +-) +- +-func TestA(t *testing.T) { +- os.Stat("") +-} +-` +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "os.Stat")), +- ReadDiagnostics("a/a.go", &d), +- ) +- env.ApplyQuickFixes("a/a.go", d.Diagnostics) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- ) +- }) +-} +- +-// Test of golang/go#70755 +-func TestQuickFixIssue70755(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +-go 1.19.0 // with go 1.23.0 this fails on some builders +--- bar/bar.go -- +-package notbar +-type NotBar struct {} +--- baz/baz.go -- +-package baz +-type Baz struct {} +--- foo/foo.go -- +-package foo +-type Foo struct { +- bar notbar.NotBar +- baz baz.Baz +-}` +- WithOptions( +- Settings{"importsSource": settings.ImportsSourceGopls}). +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("foo/foo.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange(ReadDiagnostics("foo/foo.go", &d)) +- env.ApplyQuickFixes("foo/foo.go", d.Diagnostics) +- // at this point 'import notbar "mod.com/bar"' has been added +- // but it's still missing the import of "mod.com/baz" +- y := env.BufferText("foo/foo.go") +- if !strings.Contains(y, `notbar "mod.com/bar"`) { +- t.Error("quick fix did not find notbar") +- } +- env.SaveBuffer("foo/foo.go") +- env.AfterChange(NoDiagnostics(ForFile("foo/foo.go"))) +- }) +-} +- +-// Test for golang/go#52784 +-func TestGoWorkImports(t *testing.T) { +- const pkg = ` +--- go.work -- +-go 1.19 +- +-use ( +- ./caller +- ./mod +-) +--- caller/go.mod -- +-module caller.com +- +-go 1.18 +- +-require mod.com v0.0.0 +- +-replace mod.com => ../mod +--- caller/caller.go -- +-package main +- +-func main() { +- a.Test() +-} +--- mod/go.mod -- +-module mod.com +- +-go 1.18 +--- mod/a/a.go -- +-package a +- +-func Test() { +-} +-` +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.OpenFile("caller/caller.go") +- env.AfterChange(Diagnostics(env.AtRegexp("caller/caller.go", "a.Test"))) +- +- // Saving caller.go should trigger goimports, which should find a.Test in +- // the mod.com module, thanks to the go.work file. +- env.SaveBuffer("caller/caller.go") +- env.AfterChange(NoDiagnostics(ForFile("caller/caller.go"))) +- }) +-} +- +-// prefer the undeprecated alternative 70736 +-func TestDeprecated70736(t *testing.T) { +- t.Logf("GOOS %s, GARCH %s version %s", runtime.GOOS, runtime.GOARCH, runtime.Version()) +- files := `-- main.go -- +-package main +-func main() { +- var v = xurls.Relaxed().FindAllString() +- var w = xurls.A +-} +--- go.mod -- +-module demo +-go 1.20 +-` +- cache := `-- mvdan.cc/xurls/v2@v2.5.0/xurls.go -- +-package xurls +-// Deprecated: +-func Relaxed() *regexp.Regexp { +-return nil +-} +-var A int +--- github.com/mvdan/xurls@v1.1.0/xurls.go -- +-package xurls +-func Relaxed() *regexp.Regexp { +-return nil +-} +-var A int +-` +- modcache := t.TempDir() +- defer CleanModCache(t, modcache) +- mx := fake.UnpackTxt(cache) +- for k, v := range mx { +- fname := filepath.Join(modcache, k) +- dir := filepath.Dir(fname) +- os.MkdirAll(dir, 0777) // ignore error +- if err := os.WriteFile(fname, v, 0644); err != nil { +- t.Fatal(err) +- } +- } +- WithOptions( +- EnvVars{"GOMODCACHE": modcache}, +- WriteGoSum("."), +- Settings{"importsSource": settings.ImportsSourceGopls}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.SaveBuffer("main.go") +- out := env.BufferText("main.go") +- if strings.Contains(out, "xurls/v2") { +- t.Errorf("chose deprecated v2 in %q", out) +- } +- }) +-} +- +-// Find the non-test package asked for in a test +-func TestTestImports(t *testing.T) { +- const pkg = ` +--- go.work -- +-go 1.19 +- +-use ( +- ./caller +- ./mod +- ./xxx +-) +--- caller/go.mod -- +-module caller.com +- +-go 1.18 +- +-require mod.com v0.0.0 +-require xxx.com v0.0.0 +- +-replace mod.com => ../mod +-replace xxx.com => ../xxx +--- caller/caller_test.go -- +-package main +- +-var _ = a.Test +--- xxx/go.mod -- +-module xxx.com +- +-go 1.18 +--- xxx/a/a_test.go -- +-package a +- +-func Test() { +-} +--- mod/go.mod -- +-module mod.com +- +-go 1.18 +--- mod/a/a.go -- +-package a +- +-func Test() { +-} +-` +- WithOptions(Modes(Default)).Run(t, pkg, func(t *testing.T, env *Env) { +- env.OpenFile("caller/caller_test.go") +- env.AfterChange(Diagnostics(env.AtRegexp("caller/caller_test.go", "a.Test"))) +- +- // Saving caller_test.go should trigger goimports, which should find a.Test in +- // the mod.com module, thanks to the go.work file. +- env.SaveBuffer("caller/caller_test.go") +- env.AfterChange(NoDiagnostics(ForFile("caller/caller_test.go"))) +- buf := env.BufferText("caller/caller_test.go") +- if !strings.Contains(buf, "mod.com/a") { +- t.Errorf("got %q, expected a mod.com/a", buf) +- } +- }) +-} +- +-// this test replaces 'package bar' with 'package foo' +-// saves the file, and then looks for the import in the main package.s +-func Test67973(t *testing.T) { +- const files = `-- go.mod -- +-module hello +-go 1.19 +--- hello.go -- +-package main +-var _ = foo.Bar +--- internal/foo/foo.go -- +-package bar +-func Bar() {} +-` +- WithOptions( +- Settings{"importsSource": settings.ImportsSourceGopls}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("hello.go") +- env.AfterChange(env.DoneWithOpen()) +- env.SaveBuffer("hello.go") +- env.OpenFile("internal/foo/foo.go") +- env.RegexpReplace("internal/foo/foo.go", "bar", "foo") +- env.SaveBuffer("internal/foo/foo.go") +- env.SaveBuffer("hello.go") +- buf := env.BufferText("hello.go") +- if !strings.Contains(buf, "internal/foo") { +- t.Errorf(`expected import "hello/internal/foo" but got %q`, buf) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/import_test.go b/gopls/internal/test/integration/misc/import_test.go +--- a/gopls/internal/test/integration/misc/import_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/import_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,127 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestAddImport(t *testing.T) { +- const before = `package main +- +-import "fmt" +- +-func main() { +- fmt.Println("hello world") +-} +-` +- +- const want = `package main +- +-import ( +- "bytes" +- "fmt" +-) +- +-func main() { +- fmt.Println("hello world") +-} +-` +- +- Run(t, "", func(t *testing.T, env *Env) { +- env.CreateBuffer("main.go", before) +- cmd := command.NewAddImportCommand("Add Import", command.AddImportArgs{ +- URI: env.Sandbox.Workdir.URI("main.go"), +- ImportPath: "bytes", +- }) +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: command.AddImport.String(), +- Arguments: cmd.Arguments, +- }, nil) +- got := env.BufferText("main.go") +- if got != want { +- t.Fatalf("gopls.add_import failed\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-func TestListImports(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- foo.go -- +-package foo +-const C = 1 +--- import_strings_test.go -- +-package foo +-import ( +- x "strings" +- "testing" +-) +- +-func TestFoo(t *testing.T) {} +--- import_testing_test.go -- +-package foo +- +-import "testing" +- +-func TestFoo2(t *testing.T) {} +-` +- tests := []struct { +- filename string +- want command.ListImportsResult +- }{ +- { +- filename: "import_strings_test.go", +- want: command.ListImportsResult{ +- Imports: []command.FileImport{ +- {Name: "x", Path: "strings"}, +- {Path: "testing"}, +- }, +- PackageImports: []command.PackageImport{ +- {Path: "strings"}, +- {Path: "testing"}, +- }, +- }, +- }, +- { +- filename: "import_testing_test.go", +- want: command.ListImportsResult{ +- Imports: []command.FileImport{ +- {Path: "testing"}, +- }, +- PackageImports: []command.PackageImport{ +- {Path: "strings"}, +- {Path: "testing"}, +- }, +- }, +- }, +- } +- +- Run(t, files, func(t *testing.T, env *Env) { +- for _, tt := range tests { +- cmd := command.NewListImportsCommand("List Imports", command.URIArg{ +- URI: env.Sandbox.Workdir.URI(tt.filename), +- }) +- var result command.ListImportsResult +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: command.ListImports.String(), +- Arguments: cmd.Arguments, +- }, &result) +- if diff := cmp.Diff(tt.want, result); diff != "" { +- t.Errorf("unexpected list imports result for %q (-want +got):\n%s", tt.filename, diff) +- } +- } +- +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/link_test.go b/gopls/internal/test/integration/misc/link_test.go +--- a/gopls/internal/test/integration/misc/link_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/link_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,215 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "path/filepath" +- "slices" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestHoverAndDocumentLink(t *testing.T) { +- const program = ` +--- go.mod -- +-module mod.test +- +-go 1.12 +- +-require import.test v1.2.3 +- +-require replace.test v1.2.3 +-replace replace.test => replace.test v1.2.4 +- +-require replace.fixed.test v1.2.3 +-replace replace.fixed.test v1.2.3 => replace.fixed.test v1.2.4 +- +-require replace.another.test v1.2.3 +-replace replace.another.test => another.test v1.2.3 +- +- +-replace example.com/non-exist => ./ +-replace example.com/non-exist1 => ../work/ +- +--- main.go -- +-package main +- +-import "import.test/pkg" +-import "replace.test/replace" +-import "replace.fixed.test/fixed" +-import "replace.another.test/another" +- +-func main() { +- // Issue 43990: this is not a link that most users can open from an LSP +- // client: mongodb://not.a.link.com +- println(pkg.Hello) +- println(replace.Hello) +- println(fixed.Hello) +- println(another.Hello) +-}` +- +- const proxy = ` +--- import.test@v1.2.3/go.mod -- +-module import.test +- +-go 1.12 +--- import.test@v1.2.3/pkg/const.go -- +-// package documentation +-package pkg +- +- +--- replace.test@v1.2.4/go.mod -- +-module replace.test +- +-go 1.12 +--- replace.test@v1.2.4/replace/const.go -- +-package replace +- +-const Hello = "Hello" +- +--- replace.fixed.test@v1.2.4/go.mod -- +-module replace.fixed.test +- +-go 1.12 +--- replace.fixed.test@v1.2.4/fixed/const.go -- +-package fixed +- +-const Hello = "Hello" +- +--- another.test@v1.2.3/go.mod -- +-module another.test +- +-go 1.12 +--- another.test@v1.2.3/another/const.go -- +-package another +- +-const Hello = "Hello" +-` +- WithOptions( +- ProxyFiles(proxy), +- WriteGoSum("."), +- ).Run(t, program, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.OpenFile("go.mod") +- +- const ( +- modImportLink = "https://pkg.go.dev/mod/import.test@v1.2.3" +- modReplaceLink = "https://pkg.go.dev/mod/replace.test@v1.2.4" +- modReplaceFixedeLink = "https://pkg.go.dev/mod/replace.fixed.test@v1.2.4" +- modAnotherLink = "https://pkg.go.dev/mod/another.test@v1.2.3" +- +- pkgImportLink = "https://pkg.go.dev/import.test@v1.2.3/pkg" +- pkgReplaceLink = "https://pkg.go.dev/replace.test@v1.2.4/replace" +- pkgReplaceFixedLink = "https://pkg.go.dev/replace.fixed.test@v1.2.4/fixed" +- pkgAnotherLink = "https://pkg.go.dev/another.test@v1.2.3/another" +- pkgDoc = "package documentation" +- ) +- +- // First, check that we get the expected links via hover and documentLink. +- content, _ := env.Hover(env.RegexpSearch("main.go", "pkg.Hello")) +- if content == nil || !strings.Contains(content.Value, pkgImportLink) { +- t.Errorf("hover: got %v in main.go, want contains %q", content, pkgImportLink) +- } +- content, _ = env.Hover(env.RegexpSearch("main.go", "replace.Hello")) +- if content == nil || !strings.Contains(content.Value, pkgReplaceLink) { +- t.Errorf("hover: got %v in main.go, want contains %q", content, pkgReplaceLink) +- } +- content, _ = env.Hover(env.RegexpSearch("main.go", "fixed.Hello")) +- if content == nil || !strings.Contains(content.Value, pkgReplaceFixedLink) { +- t.Errorf("hover: got %v in main.go, want contains %q", content, pkgReplaceFixedLink) +- } +- content, _ = env.Hover(env.RegexpSearch("main.go", "another.Hello")) +- if content == nil || !strings.Contains(content.Value, pkgAnotherLink) { +- t.Errorf("hover: got %v in main.go, want contains %q", content, pkgAnotherLink) +- } +- +- content, _ = env.Hover(env.RegexpSearch("go.mod", "import.test")) +- if content == nil || !strings.Contains(content.Value, pkgImportLink) { +- t.Errorf("hover: got %v in main.go, want contains %q", content, pkgImportLink) +- } +- content, _ = env.Hover(env.RegexpSearch("go.mod", "replace.test")) +- if content == nil || !strings.Contains(content.Value, pkgReplaceLink) { +- t.Errorf("hover: got %v in main.go, want contains %q", content, pkgReplaceLink) +- } +- content, _ = env.Hover(env.RegexpSearch("go.mod", "replace.fixed.test")) +- if content == nil || !strings.Contains(content.Value, pkgReplaceFixedLink) { +- t.Errorf("hover: got %v in main.go, want contains %q", content, pkgReplaceFixedLink) +- } +- content, _ = env.Hover(env.RegexpSearch("go.mod", "replace.another.test")) +- if content == nil || !strings.Contains(content.Value, pkgAnotherLink) { +- t.Errorf("hover: got %v in main.go, want contains %q", content, pkgAnotherLink) +- } +- +- getLinks := func(links []protocol.DocumentLink) []string { +- var got []string +- for i := range links { +- got = append(got, *links[i].Target) +- } +- return got +- } +- links := env.DocumentLink("main.go") +- got, want := getLinks(links), []string{ +- pkgImportLink, +- pkgReplaceLink, +- pkgReplaceFixedLink, +- pkgAnotherLink, +- } +- if !slices.Equal(got, want) { +- t.Errorf("documentLink: got links %v for main.go, want links %v", got, want) +- } +- +- links = env.DocumentLink("go.mod") +- localReplacePath := filepath.Join(env.Sandbox.Workdir.RootURI().Path(), "go.mod") +- got, want = getLinks(links), []string{ +- localReplacePath, localReplacePath, +- modImportLink, +- modReplaceLink, +- modReplaceFixedeLink, +- modAnotherLink, +- } +- if !slices.Equal(got, want) { +- t.Errorf("documentLink: got links %v for go.mod, want links %v", got, want) +- } +- +- // Then change the environment to make these links private. +- cfg := env.Editor.Config() +- cfg.Env = map[string]string{"GOPRIVATE": "import.test"} +- env.ChangeConfiguration(cfg) +- +- // Finally, verify that the links are gone. +- content, _ = env.Hover(env.RegexpSearch("main.go", "pkg.Hello")) +- if content == nil || strings.Contains(content.Value, pkgImportLink) { +- t.Errorf("hover: got %v in main.go, want non-empty hover without %q", content, pkgImportLink) +- } +- content, _ = env.Hover(env.RegexpSearch("go.mod", "import.test")) +- if content == nil || strings.Contains(content.Value, modImportLink) { +- t.Errorf("hover: got %v in go.mod, want contains %q", content, modImportLink) +- } +- +- links = env.DocumentLink("main.go") +- got, want = getLinks(links), []string{ +- pkgReplaceLink, +- pkgReplaceFixedLink, +- pkgAnotherLink, +- } +- if !slices.Equal(got, want) { +- t.Errorf("documentLink: got links %v for main.go, want links %v", got, want) +- } +- +- links = env.DocumentLink("go.mod") +- got, want = getLinks(links), []string{ +- localReplacePath, localReplacePath, +- modReplaceLink, +- modReplaceFixedeLink, +- modAnotherLink, +- } +- if !slices.Equal(got, want) { +- t.Errorf("documentLink: got links %v for go.mod, want links %v", got, want) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/misc_test.go b/gopls/internal/test/integration/misc/misc_test.go +--- a/gopls/internal/test/integration/misc/misc_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/misc_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,72 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "os" +- "strings" +- "testing" +- +- "golang.org/x/telemetry/counter/countertest" +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/util/bug" +-) +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- tmp, err := os.MkdirTemp("", "gopls-misc-test-counters") +- if err != nil { +- panic(err) +- } +- countertest.Open(tmp) +- code := Main(m) +- os.RemoveAll(tmp) // ignore error (cleanup fails on Windows; golang/go#68243) +- os.Exit(code) +-} +- +-// TestDocumentURIFix ensures that a DocumentURI supplied by the +-// client is subject to the "fixing" operation documented at +-// [protocol.DocumentURI.UnmarshalText]. The details of the fixing are +-// tested in the protocol package; here we aim to test only that it +-// occurs at all. +-func TestDocumentURIFix(t *testing.T) { +- const mod = ` +--- go.mod -- +-module testdata +-go 1.18 +- +--- a.go -- +-package a +- +-const K = 1 +-` +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- loc := env.RegexpSearch("a.go", "K") +- path := strings.TrimPrefix(string(loc.URI), "file://") // (absolute) +- +- check := func() { +- t.Helper() +- t.Logf("URI = %s", loc.URI) +- content, _ := env.Hover(loc) // must succeed +- if content == nil || !strings.Contains(content.Value, "const K") { +- t.Errorf("wrong content: %#v", content) +- } +- } +- +- // Regular URI (e.g. file://$TMPDIR/TestDocumentURIFix/default/work/a.go) +- check() +- +- // URL-encoded path (e.g. contains %2F instead of last /) +- loc.URI = protocol.DocumentURI("file://" + strings.Replace(path, "/a.go", "%2Fa.go", 1)) +- check() +- +- // We intentionally do not test further cases (e.g. +- // file:// without a third slash) as it would quickly +- // get bogged down in irrelevant details of the +- // fake editor's own handling of URIs. +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/modify_tags_test.go b/gopls/internal/test/integration/misc/modify_tags_test.go +--- a/gopls/internal/test/integration/misc/modify_tags_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/modify_tags_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,159 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/test/compare" +- "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestModifyTags(t *testing.T) { +- const files = ` +--- go.mod -- +-module example.com +- +-go 1.20 +- +--- a.go -- +-package a +- +-type A struct { +- B string +- C int +- D bool +- E string +-} +- +--- b.go -- +-package b +- +-type B struct { +- B string ` + "`json:\"b,omitempty\"`" + ` +- C int ` + "`json:\"c,omitempty\"`" + ` +- D bool ` + "`json:\"d,omitempty\"`" + ` +- E string ` + "`json:\"e,omitempty\"`" + ` +-} +- +--- c.go -- +-package c +- +-type C struct { +- B string +- C int +- D bool ` + "`json:\"d,omitempty\"`" + ` +- E string +-} +-` +- +- const wantAddTagsEntireStruct = `package a +- +-type A struct { +- B string ` + "`json:\"b,omitempty\"`" + ` +- C int ` + "`json:\"c,omitempty\"`" + ` +- D bool ` + "`json:\"d,omitempty\"`" + ` +- E string ` + "`json:\"e,omitempty\"`" + ` +-} +-` +- +- const wantRemoveTags = `package b +- +-type B struct { +- B string +- C int +- D bool ` + "`json:\"d,omitempty\"`" + ` +- E string ` + "`json:\"e,omitempty\"`" + ` +-} +-` +- +- const wantAddTagsSingleLine = `package a +- +-type A struct { +- B string +- C int +- D bool ` + "`json:\"d,omitempty\"`" + ` +- E string +-} +-` +- +- const wantRemoveOptions = `package c +- +-type C struct { +- B string +- C int +- D bool ` + "`json:\"d\"`" + ` +- E string +-} +-` +- +- tests := []struct { +- file string +- args command.ModifyTagsArgs +- want string +- }{ +- {file: "a.go", args: command.ModifyTagsArgs{ +- Range: protocol.Range{ +- Start: protocol.Position{Line: 2, Character: 0}, +- End: protocol.Position{Line: 8, Character: 0}, +- }, +- Add: "json", +- AddOptions: "json=omitempty", +- }, want: wantAddTagsEntireStruct}, +- {file: "b.go", args: command.ModifyTagsArgs{ +- Range: protocol.Range{ +- Start: protocol.Position{Line: 3, Character: 2}, +- End: protocol.Position{Line: 4, Character: 6}, +- }, +- Remove: "json", +- }, want: wantRemoveTags}, +- {file: "a.go", args: command.ModifyTagsArgs{ +- Range: protocol.Range{ +- Start: protocol.Position{Line: 5, Character: 0}, +- End: protocol.Position{Line: 5, Character: 7}, +- }, +- Add: "json", +- AddOptions: "json=omitempty", +- }, want: wantAddTagsSingleLine}, +- {file: "c.go", args: command.ModifyTagsArgs{ +- Range: protocol.Range{ +- Start: protocol.Position{Line: 3, Character: 0}, +- End: protocol.Position{Line: 7, Character: 0}, +- }, +- RemoveOptions: "json=omitempty", +- }, want: wantRemoveOptions}, +- } +- +- for _, test := range tests { +- integration.Run(t, files, func(t *testing.T, env *integration.Env) { +- uri := env.Sandbox.Workdir.URI(test.file) +- args, err := command.MarshalArgs( +- command.ModifyTagsArgs{ +- URI: uri, +- Range: test.args.Range, +- Add: test.args.Add, +- AddOptions: test.args.AddOptions, +- Remove: test.args.Remove, +- RemoveOptions: test.args.RemoveOptions, +- }, +- ) +- if err != nil { +- t.Fatal(err) +- } +- var res any +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: command.ModifyTags.String(), +- Arguments: args, +- }, &res) +- // Wait until we finish writing to the file. +- env.AfterChange() +- if got := env.BufferText(test.file); got != test.want { +- t.Errorf("modify_tags returned unexpected diff (-want +got):\n%s", compare.Text(test.want, got)) +- } +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/misc/multiple_adhoc_test.go b/gopls/internal/test/integration/misc/multiple_adhoc_test.go +--- a/gopls/internal/test/integration/misc/multiple_adhoc_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/multiple_adhoc_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,44 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestMultipleAdHocPackages(t *testing.T) { +- Run(t, ` +--- a/a.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Println("") +-} +--- a/b.go -- +-package main +- +-import "fmt" +- +-func main() () { +- fmt.Println("") +-} +-`, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- if list := env.Completion(env.RegexpSearch("a/a.go", "Println")); list == nil || len(list.Items) == 0 { +- t.Fatal("expected completions, got none") +- } +- env.OpenFile("a/b.go") +- if list := env.Completion(env.RegexpSearch("a/b.go", "Println")); list == nil || len(list.Items) == 0 { +- t.Fatal("expected completions, got none") +- } +- if list := env.Completion(env.RegexpSearch("a/a.go", "Println")); list == nil || len(list.Items) == 0 { +- t.Fatal("expected completions, got none") +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/package_symbols_test.go b/gopls/internal/test/integration/misc/package_symbols_test.go +--- a/gopls/internal/test/integration/misc/package_symbols_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/package_symbols_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,111 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "github.com/google/go-cmp/cmp/cmpopts" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestPackageSymbols(t *testing.T) { +- const files = ` +--- go.mod -- +-module example.com +- +-go 1.20 +- +--- a.go -- +-package a +- +-var A = "var" +-type S struct{} +- +-func (s *S) M1() {} +--- b.go -- +-package a +- +-var b = 1 +- +-func (s *S) M2() {} +- +-func (s *S) M3() {} +- +-func F() {} +--- unloaded.go -- +-//go:build unloaded +- +-package a +- +-var Unloaded int +-` +- integration.Run(t, files, func(t *testing.T, env *integration.Env) { +- aURI := env.Sandbox.Workdir.URI("a.go") +- bURI := env.Sandbox.Workdir.URI("b.go") +- args, err := command.MarshalArgs(command.PackageSymbolsArgs{ +- URI: aURI, +- }) +- if err != nil { +- t.Fatal(err) +- } +- +- var res command.PackageSymbolsResult +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: command.PackageSymbols.String(), +- Arguments: args, +- }, &res) +- +- want := command.PackageSymbolsResult{ +- PackageName: "a", +- Files: []protocol.DocumentURI{aURI, bURI}, +- Symbols: []command.PackageSymbol{ +- {Name: "A", Kind: protocol.Variable, File: 0}, +- {Name: "F", Kind: protocol.Function, File: 1}, +- {Name: "S", Kind: protocol.Struct, File: 0, Children: []command.PackageSymbol{ +- {Name: "M1", Kind: protocol.Method, File: 0}, +- {Name: "M2", Kind: protocol.Method, File: 1}, +- {Name: "M3", Kind: protocol.Method, File: 1}, +- }}, +- {Name: "b", Kind: protocol.Variable, File: 1}, +- }, +- } +- ignore := cmpopts.IgnoreFields(command.PackageSymbol{}, "Range", "SelectionRange", "Detail") +- if diff := cmp.Diff(want, res, ignore); diff != "" { +- t.Errorf("package_symbols returned unexpected diff (-want +got):\n%s", diff) +- } +- +- for file, want := range map[string]command.PackageSymbolsResult{ +- "go.mod": {}, +- "unloaded.go": { +- PackageName: "a", +- Files: []protocol.DocumentURI{env.Sandbox.Workdir.URI("unloaded.go")}, +- Symbols: []command.PackageSymbol{ +- {Name: "Unloaded", Kind: protocol.Variable, File: 0}, +- }, +- }, +- } { +- uri := env.Sandbox.Workdir.URI(file) +- args, err := command.MarshalArgs(command.PackageSymbolsArgs{ +- URI: uri, +- }) +- if err != nil { +- t.Fatal(err) +- } +- var res command.PackageSymbolsResult +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: command.PackageSymbols.String(), +- Arguments: args, +- }, &res) +- +- if diff := cmp.Diff(want, res, ignore); diff != "" { +- t.Errorf("package_symbols returned unexpected diff (-want +got):\n%s", diff) +- } +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/prompt_test.go b/gopls/internal/test/integration/misc/prompt_test.go +--- a/gopls/internal/test/integration/misc/prompt_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/prompt_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,501 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "fmt" +- "os" +- "path/filepath" +- "regexp" +- "strconv" +- "testing" +- "time" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/telemetry/counter" +- "golang.org/x/telemetry/counter/countertest" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/server" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// Test prompt file in old and new formats are handled as expected. +-func TestTelemetryPrompt_PromptFile(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() {} +-` +- +- defaultTelemetryStartTime := "1714521600" // 2024-05-01 +- defaultToken := "7" +- samplesPerMille := "500" +- +- testCases := []struct { +- name, in, want string +- wantPrompt bool +- }{ +- { +- name: "empty", +- in: "", +- want: "failed 1 1714521600 7", +- wantPrompt: true, +- }, +- { +- name: "v0.15-format/invalid", +- in: "pending", +- want: "failed 1 1714521600 7", +- wantPrompt: true, +- }, +- { +- name: "v0.15-format/pPending", +- in: "pending 1", +- want: "failed 2 1714521600 7", +- wantPrompt: true, +- }, +- { +- name: "v0.15-format/pPending", +- in: "failed 1", +- want: "failed 2 1714521600 7", +- wantPrompt: true, +- }, +- { +- name: "v0.15-format/pYes", +- in: "yes 1", +- want: "yes 1", // untouched since short-circuited +- }, +- { +- name: "v0.16-format/pNotReady", +- in: "- 0 1714521600 1000", +- want: "- 0 1714521600 1000", +- }, +- { +- name: "v0.16-format/pPending", +- in: "pending 1 1714521600 1", +- want: "failed 2 1714521600 1", +- wantPrompt: true, +- }, +- { +- name: "v0.16-format/pFailed", +- in: "failed 2 1714521600 1", +- want: "failed 3 1714521600 1", +- wantPrompt: true, +- }, +- { +- name: "v0.16-format/invalid", +- in: "xxx 0 12345 678", +- want: "failed 1 1714521600 7", +- wantPrompt: true, +- }, +- { +- name: "v0.16-format/extra", +- in: "- 0 1714521600 1000 7777 xxx", +- want: "- 0 1714521600 1000", // drop extra +- }, +- } +- for _, tc := range testCases { +- t.Run(tc.name, func(t *testing.T) { +- modeFile := filepath.Join(t.TempDir(), "mode") +- goplsConfigDir := t.TempDir() +- promptDir := filepath.Join(goplsConfigDir, "prompt") +- promptFile := filepath.Join(promptDir, "telemetry") +- +- if err := os.MkdirAll(promptDir, 0777); err != nil { +- t.Fatal(err) +- } +- if err := os.WriteFile(promptFile, []byte(tc.in), 0666); err != nil { +- t.Fatal(err) +- } +- WithOptions( +- Modes(Default), // no need to run this in all modes +- EnvVars{ +- server.GoplsConfigDirEnvvar: goplsConfigDir, +- server.FakeTelemetryModefileEnvvar: modeFile, +- server.GoTelemetryGoplsClientStartTimeEnvvar: defaultTelemetryStartTime, +- server.GoTelemetryGoplsClientTokenEnvvar: defaultToken, +- server.FakeSamplesPerMille: samplesPerMille, +- }, +- Settings{ +- "telemetryPrompt": true, +- }, +- ).Run(t, src, func(t *testing.T, env *Env) { +- expectation := ShownMessageRequest(".*Would you like to enable Go telemetry?") +- if !tc.wantPrompt { +- expectation = Not(expectation) +- } +- env.OnceMet( +- CompletedWork(server.TelemetryPromptWorkTitle, 1, true), +- expectation, +- ) +- if got, err := os.ReadFile(promptFile); err != nil || string(got) != tc.want { +- t.Fatalf("(%q) -> (%q, %v), want %q", tc.in, got, err, tc.want) +- } +- }) +- }) +- } +-} +- +-// Test that gopls prompts for telemetry only when it is supposed to. +-func TestTelemetryPrompt_Conditions_Mode(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() { +-} +-` +- +- for _, enabled := range []bool{true, false} { +- t.Run(fmt.Sprintf("telemetryPrompt=%v", enabled), func(t *testing.T) { +- for _, initialMode := range []string{"", "local", "off", "on"} { +- t.Run(fmt.Sprintf("initial_mode=%s", initialMode), func(t *testing.T) { +- modeFile := filepath.Join(t.TempDir(), "mode") +- if initialMode != "" { +- if err := os.WriteFile(modeFile, []byte(initialMode), 0666); err != nil { +- t.Fatal(err) +- } +- } +- telemetryStartTime := time.Now().Add(-8 * 24 * time.Hour) // telemetry started a while ago +- WithOptions( +- Modes(Default), // no need to run this in all modes +- EnvVars{ +- server.GoplsConfigDirEnvvar: t.TempDir(), +- server.FakeTelemetryModefileEnvvar: modeFile, +- server.GoTelemetryGoplsClientStartTimeEnvvar: strconv.FormatInt(telemetryStartTime.Unix(), 10), +- server.GoTelemetryGoplsClientTokenEnvvar: "1", // always sample because samplingPerMille >= 1. +- }, +- Settings{ +- "telemetryPrompt": enabled, +- }, +- ).Run(t, src, func(t *testing.T, env *Env) { +- wantPrompt := enabled && (initialMode == "" || initialMode == "local") +- expectation := ShownMessageRequest(".*Would you like to enable Go telemetry?") +- if !wantPrompt { +- expectation = Not(expectation) +- } +- env.OnceMet( +- CompletedWork(server.TelemetryPromptWorkTitle, 1, true), +- expectation, +- ) +- }) +- }) +- } +- }) +- } +-} +- +-// Test that gopls prompts for telemetry only after instrumenting for a while, and +-// when the token is within the range for sample. +-func TestTelemetryPrompt_Conditions_StartTimeAndSamplingToken(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() { +-} +-` +- day := 24 * time.Hour +- samplesPerMille := 50 +- for _, token := range []int{1, samplesPerMille, samplesPerMille + 1} { +- wantSampled := token <= samplesPerMille +- t.Run(fmt.Sprintf("to_sample=%t/tokens=%d", wantSampled, token), func(t *testing.T) { +- for _, elapsed := range []time.Duration{8 * day, 1 * day, 0} { +- telemetryStartTimeOrEmpty := "" +- if elapsed > 0 { +- telemetryStartTimeOrEmpty = strconv.FormatInt(time.Now().Add(-elapsed).Unix(), 10) +- } +- t.Run(fmt.Sprintf("elapsed=%s", elapsed), func(t *testing.T) { +- modeFile := filepath.Join(t.TempDir(), "mode") +- WithOptions( +- Modes(Default), // no need to run this in all modes +- EnvVars{ +- server.GoplsConfigDirEnvvar: t.TempDir(), +- server.FakeTelemetryModefileEnvvar: modeFile, +- server.GoTelemetryGoplsClientStartTimeEnvvar: telemetryStartTimeOrEmpty, +- server.GoTelemetryGoplsClientTokenEnvvar: strconv.Itoa(token), +- server.FakeSamplesPerMille: strconv.Itoa(samplesPerMille), // want token ∈ [1, 50] is always sampled. +- }, +- Settings{ +- "telemetryPrompt": true, +- }, +- ).Run(t, src, func(t *testing.T, env *Env) { +- wantPrompt := wantSampled && elapsed > 7*day +- expectation := ShownMessageRequest(".*Would you like to enable Go telemetry?") +- if !wantPrompt { +- expectation = Not(expectation) +- } +- env.OnceMet( +- CompletedWork(server.TelemetryPromptWorkTitle, 1, true), +- expectation, +- ) +- }) +- }) +- } +- }) +- } +-} +- +-// Test that responding to the telemetry prompt results in the expected state. +-func TestTelemetryPrompt_Response(t *testing.T) { +- if !countertest.SupportedPlatform { +- t.Skip("requires counter support") +- } +- +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() { +-} +-` +- +- var ( +- acceptanceCounter = "gopls/telemetryprompt/accepted" +- declinedCounter = "gopls/telemetryprompt/declined" +- attempt1Counter = "gopls/telemetryprompt/attempts:1" +- allCounters = []string{acceptanceCounter, declinedCounter, attempt1Counter} +- ) +- +- // To avoid (but not prevent) the flakes encountered in golang/go#68659, we +- // need to perform our first read before starting to increment counters. +- // +- // ReadCounter checks to see if the counter file needs to be rotated before +- // reading. When files are rotated, all previous counts are lost. Calling +- // ReadCounter here reduces the window for a flake due to this rotation (the +- // file was originally was located during countertest.Open in TestMain). +- // +- // golang/go#71590 tracks the larger problems with the countertest library. +- // +- // (The counter name below is arbitrary.) +- _, _ = countertest.ReadCounter(counter.New("issue68659")) +- +- // We must increment counters in order for the initial reads below to +- // succeed. +- // +- // TODO(rfindley): ReadCounter should simply return 0 for uninitialized +- // counters. +- for _, name := range allCounters { +- counter.New(name).Inc() +- } +- +- readCounts := func(t *testing.T) map[string]uint64 { +- t.Helper() +- counts := make(map[string]uint64) +- for _, name := range allCounters { +- count, err := countertest.ReadCounter(counter.New(name)) +- if err != nil { +- t.Fatalf("ReadCounter(%q) failed: %v", name, err) +- } +- counts[name] = count +- } +- return counts +- } +- +- tests := []struct { +- name string // subtest name +- response string // response to choose for the telemetry dialog +- wantMode string // resulting telemetry mode +- wantMsg string // substring contained in the follow-up popup (if empty, no popup is expected) +- wantInc uint64 // expected 'prompt accepted' counter increment +- wantCounts map[string]uint64 +- }{ +- {"yes", server.TelemetryYes, "on", "uploading is now enabled", 1, map[string]uint64{ +- acceptanceCounter: 1, +- declinedCounter: 0, +- attempt1Counter: 1, +- }}, +- {"no", server.TelemetryNo, "", "", 0, map[string]uint64{ +- acceptanceCounter: 0, +- declinedCounter: 1, +- attempt1Counter: 1, +- }}, +- {"empty", "", "", "", 0, map[string]uint64{ +- acceptanceCounter: 0, +- declinedCounter: 0, +- attempt1Counter: 1, +- }}, +- } +- +- for _, test := range tests { +- t.Run(test.name, func(t *testing.T) { +- initialCounts := readCounts(t) +- modeFile := filepath.Join(t.TempDir(), "mode") +- telemetryStartTime := time.Now().Add(-8 * 24 * time.Hour) +- msgRE := regexp.MustCompile(".*Would you like to enable Go telemetry?") +- respond := func(m *protocol.ShowMessageRequestParams) (*protocol.MessageActionItem, error) { +- if msgRE.MatchString(m.Message) { +- for _, item := range m.Actions { +- if item.Title == test.response { +- return &item, nil +- } +- } +- if test.response != "" { +- t.Errorf("action item %q not found", test.response) +- } +- } +- return nil, nil +- } +- WithOptions( +- Modes(Default), // no need to run this in all modes +- EnvVars{ +- server.GoplsConfigDirEnvvar: t.TempDir(), +- server.FakeTelemetryModefileEnvvar: modeFile, +- server.GoTelemetryGoplsClientStartTimeEnvvar: strconv.FormatInt(telemetryStartTime.Unix(), 10), +- server.GoTelemetryGoplsClientTokenEnvvar: "1", // always sample because samplingPerMille >= 1. +- }, +- Settings{ +- "telemetryPrompt": true, +- }, +- MessageResponder(respond), +- ).Run(t, src, func(t *testing.T, env *Env) { +- var postConditions []Expectation +- if test.wantMsg != "" { +- postConditions = append(postConditions, ShownMessage(test.wantMsg)) +- } +- env.OnceMet( +- CompletedWork(server.TelemetryPromptWorkTitle, 1, true), +- postConditions..., +- ) +- gotMode := "" +- if contents, err := os.ReadFile(modeFile); err == nil { +- gotMode = string(contents) +- } else if !os.IsNotExist(err) { +- t.Fatal(err) +- } +- if gotMode != test.wantMode { +- t.Errorf("after prompt, mode=%s, want %s", gotMode, test.wantMode) +- } +- +- // We increment the acceptance counter when checking the prompt file +- // before prompting, so start a second, transient gopls session and +- // verify that the acceptance counter is incremented. +- env2 := ConnectGoplsEnv(t, env.Ctx, env.Sandbox, env.Editor.Config(), env.Server) +- env2.Await(CompletedWork(server.TelemetryPromptWorkTitle, 1, true)) +- if err := env2.Editor.Close(env2.Ctx); err != nil { +- t.Errorf("closing second editor: %v", err) +- } +- +- gotCounts := readCounts(t) +- for k := range gotCounts { +- gotCounts[k] -= initialCounts[k] +- } +- if diff := cmp.Diff(test.wantCounts, gotCounts); diff != "" { +- t.Errorf("counter mismatch (-want +got):\n%s", diff) +- } +- }) +- }) +- } +-} +- +-// Test that we stop asking about telemetry after the user ignores the question +-// 5 times. +-func TestTelemetryPrompt_GivingUp(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() { +-} +-` +- +- // For this test, we want to share state across gopls sessions. +- modeFile := filepath.Join(t.TempDir(), "mode") +- telemetryStartTime := time.Now().Add(-30 * 24 * time.Hour) +- configDir := t.TempDir() +- +- const maxPrompts = 5 // internal prompt limit defined by gopls +- +- for i := range maxPrompts + 1 { +- WithOptions( +- Modes(Default), // no need to run this in all modes +- EnvVars{ +- server.GoplsConfigDirEnvvar: configDir, +- server.FakeTelemetryModefileEnvvar: modeFile, +- server.GoTelemetryGoplsClientStartTimeEnvvar: strconv.FormatInt(telemetryStartTime.Unix(), 10), +- server.GoTelemetryGoplsClientTokenEnvvar: "1", // always sample because samplingPerMille >= 1. +- }, +- Settings{ +- "telemetryPrompt": true, +- }, +- ).Run(t, src, func(t *testing.T, env *Env) { +- wantPrompt := i < maxPrompts +- expectation := ShownMessageRequest(".*Would you like to enable Go telemetry?") +- if !wantPrompt { +- expectation = Not(expectation) +- } +- env.OnceMet( +- CompletedWork(server.TelemetryPromptWorkTitle, 1, true), +- expectation, +- ) +- }) +- } +-} +- +-// Test that gopls prompts for telemetry only when it is supposed to. +-func TestTelemetryPrompt_Conditions_Command(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() { +-} +-` +- modeFile := filepath.Join(t.TempDir(), "mode") +- telemetryStartTime := time.Now().Add(-8 * 24 * time.Hour) +- WithOptions( +- Modes(Default), // no need to run this in all modes +- EnvVars{ +- server.GoplsConfigDirEnvvar: t.TempDir(), +- server.FakeTelemetryModefileEnvvar: modeFile, +- server.GoTelemetryGoplsClientStartTimeEnvvar: fmt.Sprintf("%d", telemetryStartTime.Unix()), +- server.GoTelemetryGoplsClientTokenEnvvar: "1", // always sample because samplingPerMille >= 1. +- }, +- Settings{ +- // off because we are testing +- // if we can trigger the prompt with command. +- "telemetryPrompt": false, +- }, +- ).Run(t, src, func(t *testing.T, env *Env) { +- cmd := command.NewMaybePromptForTelemetryCommand("prompt") +- var err error +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: cmd.Command, +- }, &err) +- if err != nil { +- t.Fatal(err) +- } +- expectation := ShownMessageRequest(".*Would you like to enable Go telemetry?") +- env.OnceMet( +- CompletedWork(server.TelemetryPromptWorkTitle, 2, true), +- expectation, +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/references_test.go b/gopls/internal/test/integration/misc/references_test.go +--- a/gopls/internal/test/integration/misc/references_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/references_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,572 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "fmt" +- "os" +- "path/filepath" +- "reflect" +- "sort" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/test/integration" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestStdlibReferences(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Print() +-} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- loc := env.FirstDefinition(env.RegexpSearch("main.go", `fmt.(Print)`)) +- env.OpenFile(env.Sandbox.Workdir.URIToPath(loc.URI)) +- refs, err := env.Editor.References(env.Ctx, loc) +- if err != nil { +- t.Fatal(err) +- } +- if len(refs) != 2 { +- // TODO(adonovan): make this assertion less maintainer-hostile. +- t.Fatalf("got %v reference(s), want 2", len(refs)) +- } +- // The first reference is guaranteed to be the definition. +- if got, want := refs[1].URI, env.Sandbox.Workdir.URI("main.go"); got != want { +- t.Errorf("found reference in %v, wanted %v", got, want) +- } +- }) +-} +- +-// This is a regression test for golang/go#48400 (a panic). +-func TestReferencesOnErrorMethod(t *testing.T) { +- // Ideally this would actually return the correct answer, +- // instead of merely failing gracefully. +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-type t interface { +- error +-} +- +-type s struct{} +- +-func (*s) Error() string { +- return "" +-} +- +-func _() { +- var s s +- _ = s.Error() +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- loc := env.FirstDefinition(env.RegexpSearch("main.go", `Error`)) +- refs, err := env.Editor.References(env.Ctx, loc) +- if err != nil { +- t.Fatalf("references on (*s).Error failed: %v", err) +- } +- // TODO(adonovan): this test is crying out for marker support in integration tests. +- var buf strings.Builder +- for _, ref := range refs { +- fmt.Fprintf(&buf, "%s %s\n", env.Sandbox.Workdir.URIToPath(ref.URI), ref.Range) +- } +- got := buf.String() +- want := "main.go 8:10-8:15\n" + // (*s).Error decl +- "main.go 14:7-14:12\n" // s.Error() call +- if diff := cmp.Diff(want, got); diff != "" { +- t.Errorf("unexpected references on (*s).Error (-want +got):\n%s", diff) +- } +- }) +-} +- +-func TestDefsRefsBuiltins(t *testing.T) { +- // TODO(adonovan): add unsafe.{SliceData,String,StringData} in later go versions. +- const files = ` +--- go.mod -- +-module example.com +-go 1.16 +- +--- a.go -- +-package a +- +-import "unsafe" +- +-const _ = iota +-var _ error +-var _ int +-var _ = append() +-var _ = unsafe.Pointer(nil) +-var _ = unsafe.Add(nil, nil) +-var _ = unsafe.Sizeof(0) +-var _ = unsafe.Alignof(0) +-var _ = unsafe.Slice(nil, 0) +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- for name := range strings.FieldsSeq( +- "iota error int nil append iota Pointer Sizeof Alignof Add Slice") { +- loc := env.RegexpSearch("a.go", `\b`+name+`\b`) +- +- // definition -> {builtin,unsafe}.go +- def := env.FirstDefinition(loc) +- if (!strings.HasSuffix(string(def.URI), "builtin.go") && +- !strings.HasSuffix(string(def.URI), "unsafe.go")) || +- def.Range.Start.Line == 0 { +- t.Errorf("definition(%q) = %v, want {builtin,unsafe}.go", +- name, def) +- } +- +- // "references to (builtin "Foo"|unsafe.Foo) are not supported" +- _, err := env.Editor.References(env.Ctx, loc) +- gotErr := fmt.Sprint(err) +- if !strings.Contains(gotErr, "references to") || +- !strings.Contains(gotErr, "not supported") || +- !strings.Contains(gotErr, name) { +- t.Errorf("references(%q) error: got %q, want %q", +- name, gotErr, "references to ... are not supported") +- } +- } +- }) +-} +- +-func TestPackageReferences(t *testing.T) { +- tests := []struct { +- packageName string +- wantRefCount int +- wantFiles []string +- }{ +- { +- "lib1", +- 3, +- []string{ +- "main.go", +- "lib1/a.go", +- "lib1/b.go", +- }, +- }, +- { +- "lib2", +- 2, +- []string{ +- "main.go", +- "lib2/a.go", +- }, +- }, +- } +- +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- lib1/a.go -- +-package lib1 +- +-const A = 1 +- +--- lib1/b.go -- +-package lib1 +- +-const B = 1 +- +--- lib2/a.go -- +-package lib2 +- +-const C = 1 +- +--- main.go -- +-package main +- +-import ( +- "mod.com/lib1" +- "mod.com/lib2" +-) +- +-func main() { +- println("Hello") +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- for _, test := range tests { +- file := fmt.Sprintf("%s/a.go", test.packageName) +- env.OpenFile(file) +- loc := env.RegexpSearch(file, test.packageName) +- refs := env.References(loc) +- if len(refs) != test.wantRefCount { +- // TODO(adonovan): make this assertion less maintainer-hostile. +- t.Fatalf("got %v reference(s), want %d", len(refs), test.wantRefCount) +- } +- var refURIs []string +- for _, ref := range refs { +- refURIs = append(refURIs, string(ref.URI)) +- } +- for _, base := range test.wantFiles { +- hasBase := false +- for _, ref := range refURIs { +- if strings.HasSuffix(ref, base) { +- hasBase = true +- break +- } +- } +- if !hasBase { +- t.Fatalf("got [%v], want reference ends with \"%v\"", strings.Join(refURIs, ","), base) +- } +- } +- } +- }) +-} +- +-// Test for golang/go#43144. +-// +-// Verify that we search for references and implementations in intermediate +-// test variants. +-func TestReferencesInTestVariants(t *testing.T) { +- const files = ` +--- go.mod -- +-module foo.mod +- +-go 1.12 +--- foo/foo.go -- +-package foo +- +-import "foo.mod/bar" +- +-const Foo = 42 +- +-type T int +-type InterfaceM interface{ M() } +-type InterfaceF interface{ F() } +- +-func _() { +- _ = bar.Blah +-} +- +--- foo/foo_test.go -- +-package foo +- +-type Fer struct{} +-func (Fer) F() {} +- +--- bar/bar.go -- +-package bar +- +-var Blah = 123 +- +--- bar/bar_test.go -- +-package bar +- +-type Mer struct{} +-func (Mer) M() {} +- +-func TestBar() { +- _ = Blah +-} +--- bar/bar_x_test.go -- +-package bar_test +- +-import ( +- "foo.mod/bar" +- "foo.mod/foo" +-) +- +-type Mer struct{} +-func (Mer) M() {} +- +-func _() { +- _ = bar.Blah +- _ = foo.Foo +-} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("foo/foo.go") +- +- refTests := []struct { +- re string +- wantRefs []string +- }{ +- // Blah is referenced: +- // - inside the foo.mod/bar (ordinary) package +- // - inside the foo.mod/bar [foo.mod/bar.test] test variant package +- // - from the foo.mod/bar_test [foo.mod/bar.test] x_test package +- // - from the foo.mod/foo package +- {"Blah", []string{"bar/bar.go:3", "bar/bar_test.go:7", "bar/bar_x_test.go:12", "foo/foo.go:12"}}, +- +- // Foo is referenced in bar_x_test.go via the intermediate test variant +- // foo.mod/foo [foo.mod/bar.test]. +- {"Foo", []string{"bar/bar_x_test.go:13", "foo/foo.go:5"}}, +- } +- +- for _, test := range refTests { +- loc := env.RegexpSearch("foo/foo.go", test.re) +- refs := env.References(loc) +- +- got := fileLocations(env, refs) +- if diff := cmp.Diff(test.wantRefs, got); diff != "" { +- t.Errorf("References(%q) returned unexpected diff (-want +got):\n%s", test.re, diff) +- } +- } +- +- implTests := []struct { +- re string +- wantImpls []string +- }{ +- // InterfaceM is implemented both in foo.mod/bar [foo.mod/bar.test] (which +- // doesn't import foo), and in foo.mod/bar_test [foo.mod/bar.test], which +- // imports the test variant of foo. +- {"InterfaceM", []string{"bar/bar_test.go:3", "bar/bar_x_test.go:8"}}, +- +- // A search within the ordinary package to should find implementations +- // (Fer) within the augmented test package. +- {"InterfaceF", []string{"foo/foo_test.go:3"}}, +- } +- +- for _, test := range implTests { +- loc := env.RegexpSearch("foo/foo.go", test.re) +- impls := env.Implementations(loc) +- +- got := fileLocations(env, impls) +- if diff := cmp.Diff(test.wantImpls, got); diff != "" { +- t.Errorf("Implementations(%q) returned unexpected diff (-want +got):\n%s", test.re, diff) +- } +- } +- }) +-} +- +-// This is a regression test for Issue #56169, in which interface +-// implementations in vendored modules were not found. The actual fix +-// was the same as for #55995; see TestVendoringInvalidatesMetadata. +-func TestImplementationsInVendor(t *testing.T) { +- const proxy = ` +--- other.com/b@v1.0.0/go.mod -- +-module other.com/b +-go 1.14 +- +--- other.com/b@v1.0.0/b.go -- +-package b +-type B int +-func (B) F() {} +-` +- const src = ` +--- go.mod -- +-module example.com/a +-go 1.14 +-require other.com/b v1.0.0 +- +--- a.go -- +-package a +-import "other.com/b" +-type I interface { F() } +-var _ b.B +- +-` +- WithOptions( +- WriteGoSum("."), +- ProxyFiles(proxy), +- Modes(Default), // fails in 'experimental' mode +- ).Run(t, src, func(t *testing.T, env *Env) { +- // Enable to debug go.sum mismatch, which may appear as +- // "module lookup disabled by GOPROXY=off", confusingly. +- if false { +- env.DumpGoSum(".") +- } +- +- checkVendor := func(locs []protocol.Location, wantVendor bool) { +- if len(locs) != 1 { +- t.Errorf("got %d locations, want 1", len(locs)) +- } else if strings.Contains(string(locs[0].URI), "/vendor/") != wantVendor { +- t.Errorf("got location %s, wantVendor=%t", locs[0], wantVendor) +- } +- } +- +- env.OpenFile("a.go") +- refLoc := env.RegexpSearch("a.go", "I") // find "I" reference +- +- // Initially, a.I has one implementation b.B in +- // the module cache, not the vendor tree. +- checkVendor(env.Implementations(refLoc), false) +- +- // Run 'go mod vendor' outside the editor. +- env.RunGoCommand("mod", "vendor") +- +- // Synchronize changes to watched files. +- env.Await(env.DoneWithChangeWatchedFiles()) +- +- // Now, b.B is found in the vendor tree. +- checkVendor(env.Implementations(refLoc), true) +- +- // Delete the vendor tree. +- if err := os.RemoveAll(env.Sandbox.Workdir.AbsPath("vendor")); err != nil { +- t.Fatal(err) +- } +- // Notify the server of the deletion. +- if err := env.Sandbox.Workdir.CheckForFileChanges(env.Ctx); err != nil { +- t.Fatal(err) +- } +- +- // Synchronize again. +- env.Await(env.DoneWithChangeWatchedFiles()) +- +- // b.B is once again defined in the module cache. +- checkVendor(env.Implementations(refLoc), false) +- }) +-} +- +-// This test can't be expressed as a marker test because the marker +-// test framework opens all files (which is a bit of a hack), creating +-// a <command-line-arguments> package for packages that otherwise +-// wouldn't be found from the go.work file. +-func TestReferencesFromWorkspacePackages59674(t *testing.T) { +- const src = ` +--- a/go.mod -- +-module example.com/a +-go 1.12 +- +--- b/go.mod -- +-module example.com/b +-go 1.12 +- +--- c/go.mod -- +-module example.com/c +-go 1.12 +- +--- lib/go.mod -- +-module example.com/lib +-go 1.12 +- +--- go.work -- +-use ./a +-use ./b +-// don't use ./c +-use ./lib +- +--- a/a.go -- +-package a +- +-import "example.com/lib" +- +-var _ = lib.F // query here +- +--- b/b.go -- +-package b +- +-import "example.com/lib" +- +-var _ = lib.F // also found by references +- +--- c/c.go -- +-package c +- +-import "example.com/lib" +- +-var _ = lib.F // this reference should not be reported +- +--- lib/lib.go -- +-package lib +- +-func F() {} // declaration +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- refLoc := env.RegexpSearch("a/a.go", "F") +- got := fileLocations(env, env.References(refLoc)) +- want := []string{"a/a.go:5", "b/b.go:5", "lib/lib.go:3"} +- if diff := cmp.Diff(want, got); diff != "" { +- t.Errorf("incorrect References (-want +got):\n%s", diff) +- } +- }) +-} +- +-// Test an 'implementation' query on a type that implements 'error'. +-// (Unfortunately builtin locations cannot be expressed using @loc +-// in the marker test framework.) +-func TestImplementationsOfError(t *testing.T) { +- const src = ` +--- go.mod -- +-module example.com +-go 1.12 +- +--- a.go -- +-package a +- +-type Error2 interface { +- Error() string +-} +- +-type MyError int +-func (MyError) Error() string { return "" } +- +-type MyErrorPtr int +-func (*MyErrorPtr) Error() string { return "" } +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- +- for _, test := range []struct { +- re string +- want []string +- }{ +- // error type +- {"Error2", []string{"a.go:10", "a.go:7", "std:builtin/builtin.go"}}, +- {"MyError", []string{"a.go:3", "std:builtin/builtin.go"}}, +- {"MyErrorPtr", []string{"a.go:3", "std:builtin/builtin.go"}}, +- // error.Error method +- {"(Error).. string", []string{"a.go:11", "a.go:8", "std:builtin/builtin.go"}}, +- {"MyError. (Error)", []string{"a.go:4", "std:builtin/builtin.go"}}, +- {"MyErrorPtr. (Error)", []string{"a.go:4", "std:builtin/builtin.go"}}, +- } { +- matchLoc := env.RegexpSearch("a.go", test.re) +- impls := env.Implementations(matchLoc) +- got := fileLocations(env, impls) +- if !reflect.DeepEqual(got, test.want) { +- t.Errorf("Implementations(%q) = %q, want %q", +- test.re, got, test.want) +- } +- } +- }) +-} +- +-// fileLocations returns a new sorted array of the +-// relative file name and line number of each location. +-// Duplicates are not removed. +-// Standard library filenames are abstracted for robustness. +-func fileLocations(env *integration.Env, locs []protocol.Location) []string { +- got := make([]string, 0, len(locs)) +- for _, loc := range locs { +- path := env.Sandbox.Workdir.URIToPath(loc.URI) // (slashified) +- if i := strings.LastIndex(path, "/src/"); i >= 0 && filepath.IsAbs(path) { +- // Absolute path with "src" segment: assume it's in GOROOT. +- // Strip directory and don't add line/column since they are fragile. +- path = "std:" + path[i+len("/src/"):] +- } else { +- path = fmt.Sprintf("%s:%d", path, loc.Range.Start.Line+1) +- } +- got = append(got, path) +- } +- sort.Strings(got) +- return got +-} +diff -urN a/gopls/internal/test/integration/misc/rename_test.go b/gopls/internal/test/integration/misc/rename_test.go +--- a/gopls/internal/test/integration/misc/rename_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/rename_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,921 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "fmt" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestPrepareRenameMainPackage(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- main.go -- +-package main +- +-import ( +- "fmt" +-) +- +-func main() { +- fmt.Println(1) +-} +-` +- const wantErr = "can't rename package \"main\"" +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- loc := env.RegexpSearch("main.go", `main`) +- params := &protocol.PrepareRenameParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- _, err := env.Editor.Server.PrepareRename(env.Ctx, params) +- if err == nil { +- t.Errorf("missing can't rename package main error from PrepareRename") +- } +- +- if err.Error() != wantErr { +- t.Errorf("got %v, want %v", err.Error(), wantErr) +- } +- }) +-} +- +-// Test case for golang/go#56227 +-func TestRenameWithUnsafeSlice(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- p.go -- +-package p +- +-import "unsafe" +- +-type T struct{} +- +-func (T) M() {} +- +-func _() { +- x := [3]int{1, 2, 3} +- ptr := unsafe.Pointer(&x) +- _ = unsafe.Slice((*int)(ptr), 3) +-} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("p.go") +- env.Rename(env.RegexpSearch("p.go", "M"), "N") // must not panic +- }) +-} +- +-func TestPrepareRenameWithNoPackageDeclaration(t *testing.T) { +- const files = ` +-go 1.14 +--- lib/a.go -- +-import "fmt" +- +-const A = 1 +- +-func bar() { +- fmt.Println("Bar") +-} +- +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Println("Hello") +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("lib/a.go") +- err := env.Editor.Rename(env.Ctx, env.RegexpSearch("lib/a.go", "fmt"), "fmt1") +- if got, want := fmt.Sprint(err), "no identifier found"; got != want { +- t.Errorf("Rename: got error %v, want %v", got, want) +- } +- }) +-} +- +-func TestPrepareRenameFailWithUnknownModule(t *testing.T) { +- const files = ` +-go 1.14 +--- lib/a.go -- +-package lib +- +-const A = 1 +- +--- main.go -- +-package main +- +-import ( +- "mod.com/lib" +-) +- +-func main() { +- println("Hello") +-} +-` +- const wantErr = "can't rename package: missing module information for package" +- Run(t, files, func(t *testing.T, env *Env) { +- loc := env.RegexpSearch("lib/a.go", "lib") +- params := &protocol.PrepareRenameParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(loc), +- } +- _, err := env.Editor.Server.PrepareRename(env.Ctx, params) +- if err == nil || !strings.Contains(err.Error(), wantErr) { +- t.Errorf("missing cannot rename packages with unknown module from PrepareRename") +- } +- }) +-} +- +-// This test ensures that each import of a renamed package +-// is also renamed if it would otherwise create a conflict. +-func TestRenamePackageWithConflicts(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- lib/a.go -- +-package lib +- +-const A = 1 +- +--- lib/nested/a.go -- +-package nested +- +-const B = 1 +- +--- lib/x/a.go -- +-package nested1 +- +-const C = 1 +- +--- main.go -- +-package main +- +-import ( +- "mod.com/lib" +- "mod.com/lib/nested" +- nested1 "mod.com/lib/x" +-) +- +-func main() { +- println("Hello") +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("lib/a.go") +- env.Rename(env.RegexpSearch("lib/a.go", "lib"), "nested") +- +- // Check if the new package name exists. +- env.RegexpSearch("nested/a.go", "package nested") +- env.RegexpSearch("main.go", `nested2 "mod.com/nested"`) +- env.RegexpSearch("main.go", "mod.com/nested/nested") +- env.RegexpSearch("main.go", `nested1 "mod.com/nested/x"`) +- }) +-} +- +-func TestRenamePackageWithAlias(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- lib/a.go -- +-package lib +- +-const A = 1 +- +--- lib/nested/a.go -- +-package nested +- +-const B = 1 +- +--- main.go -- +-package main +- +-import ( +- "mod.com/lib" +- lib1 "mod.com/lib/nested" +-) +- +-func main() { +- println("Hello") +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("lib/a.go") +- env.Rename(env.RegexpSearch("lib/a.go", "lib"), "nested") +- +- // Check if the new package name exists. +- env.RegexpSearch("nested/a.go", "package nested") +- env.RegexpSearch("main.go", "mod.com/nested") +- env.RegexpSearch("main.go", `lib1 "mod.com/nested/nested"`) +- }) +-} +- +-func TestRenamePackageWithDifferentDirectoryPath(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- lib/a.go -- +-package lib +- +-const A = 1 +- +--- lib/nested/a.go -- +-package foo +- +-const B = 1 +- +--- main.go -- +-package main +- +-import ( +- "mod.com/lib" +- foo "mod.com/lib/nested" +-) +- +-func main() { +- println("Hello") +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("lib/a.go") +- env.Rename(env.RegexpSearch("lib/a.go", "lib"), "nested") +- +- // Check if the new package name exists. +- env.RegexpSearch("nested/a.go", "package nested") +- env.RegexpSearch("main.go", "mod.com/nested") +- env.RegexpSearch("main.go", `foo "mod.com/nested/nested"`) +- }) +-} +- +-func TestRenamePackage(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- lib/a.go -- +-package lib +- +-const A = 1 +- +--- lib/b.go -- +-package lib +- +-const B = 1 +- +--- lib/nested/a.go -- +-package nested +- +-const C = 1 +- +--- main.go -- +-package main +- +-import ( +- "mod.com/lib" +- "mod.com/lib/nested" +-) +- +-func main() { +- println("Hello") +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("lib/a.go") +- env.Rename(env.RegexpSearch("lib/a.go", "lib"), "lib1") +- +- // Check if the new package name exists. +- env.RegexpSearch("lib1/a.go", "package lib1") +- env.RegexpSearch("lib1/b.go", "package lib1") +- env.RegexpSearch("main.go", "mod.com/lib1") +- env.RegexpSearch("main.go", "mod.com/lib1/nested") +- }) +-} +- +-// Test for golang/go#47564. +-func TestRenameInTestVariant(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- stringutil/stringutil.go -- +-package stringutil +- +-func Identity(s string) string { +- return s +-} +--- stringutil/stringutil_test.go -- +-package stringutil +- +-func TestIdentity(t *testing.T) { +- if got := Identity("foo"); got != "foo" { +- t.Errorf("bad") +- } +-} +--- main.go -- +-package main +- +-import ( +- "fmt" +- +- "mod.com/stringutil" +-) +- +-func main() { +- fmt.Println(stringutil.Identity("hello world")) +-} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.Rename(env.RegexpSearch("main.go", `stringutil\.(Identity)`), "Identityx") +- env.OpenFile("stringutil/stringutil_test.go") +- text := env.BufferText("stringutil/stringutil_test.go") +- if !strings.Contains(text, "Identityx") { +- t.Errorf("stringutil/stringutil_test.go: missing expected token `Identityx` after rename:\n%s", text) +- } +- }) +-} +- +-// This is a test that rename operation initiated by the editor function as expected. +-func TestRenameFileFromEditor(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.16 +--- a/a.go -- +-package a +- +-const X = 1 +--- a/x.go -- +-package a +- +-const X = 2 +--- b/b.go -- +-package b +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- // Rename files and verify that diagnostics are affected accordingly. +- +- // Initially, we should have diagnostics on both X's, for their duplicate declaration. +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("a/a.go", "X")), +- Diagnostics(env.AtRegexp("a/x.go", "X")), +- ) +- +- // Moving x.go should make the diagnostic go away. +- env.RenameFile("a/x.go", "b/x.go") +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), // no more duplicate declarations +- Diagnostics(env.AtRegexp("b/b.go", "package")), // as package names mismatch +- ) +- +- // Renaming should also work on open buffers. +- env.OpenFile("b/x.go") +- +- // Moving x.go back to a/ should cause the diagnostics to reappear. +- env.RenameFile("b/x.go", "a/x.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "X")), +- Diagnostics(env.AtRegexp("a/x.go", "X")), +- ) +- +- // Renaming the entire directory should move both the open and closed file. +- env.RenameFile("a", "x") +- env.AfterChange( +- Diagnostics(env.AtRegexp("x/a.go", "X")), +- Diagnostics(env.AtRegexp("x/x.go", "X")), +- ) +- +- // As a sanity check, verify that x/x.go is open. +- if text := env.BufferText("x/x.go"); text == "" { +- t.Fatal("got empty buffer for x/x.go") +- } +- }) +-} +- +-func TestRenamePackage_Tests(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- lib/a.go -- +-package lib +- +-const A = 1 +- +--- lib/b.go -- +-package lib +- +-const B = 1 +- +--- lib/a_test.go -- +-package lib_test +- +-import ( +- "mod.com/lib" +- "fmt +-) +- +-const C = 1 +- +--- lib/b_test.go -- +-package lib +- +-import ( +- "fmt +-) +- +-const D = 1 +- +--- lib/nested/a.go -- +-package nested +- +-const D = 1 +- +--- main.go -- +-package main +- +-import ( +- "mod.com/lib" +- "mod.com/lib/nested" +-) +- +-func main() { +- println("Hello") +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("lib/a.go") +- env.Rename(env.RegexpSearch("lib/a.go", "lib"), "lib1") +- +- // Check if the new package name exists. +- env.RegexpSearch("lib1/a.go", "package lib1") +- env.RegexpSearch("lib1/b.go", "package lib1") +- env.RegexpSearch("main.go", "mod.com/lib1") +- env.RegexpSearch("main.go", "mod.com/lib1/nested") +- +- // Check if the test package is renamed +- env.RegexpSearch("lib1/a_test.go", "package lib1_test") +- env.RegexpSearch("lib1/b_test.go", "package lib1") +- }) +-} +- +-func TestRenamePackage_NestedModule(t *testing.T) { +- const files = ` +--- go.work -- +-go 1.18 +-use ( +- . +- ./foo/bar +- ./foo/baz +-) +- +--- go.mod -- +-module mod.com +- +-go 1.18 +- +-require ( +- mod.com/foo/bar v0.0.0 +-) +- +-replace ( +- mod.com/foo/bar => ./foo/bar +- mod.com/foo/baz => ./foo/baz +-) +--- foo/foo.go -- +-package foo +- +-import "fmt" +- +-func Bar() { +- fmt.Println("In foo before renamed to foox.") +-} +- +--- foo/bar/go.mod -- +-module mod.com/foo/bar +- +--- foo/bar/bar.go -- +-package bar +- +-const Msg = "Hi from package bar" +- +--- foo/baz/go.mod -- +-module mod.com/foo/baz +- +--- foo/baz/baz.go -- +-package baz +- +-const Msg = "Hi from package baz" +- +--- main.go -- +-package main +- +-import ( +- "fmt" +- "mod.com/foo/bar" +- "mod.com/foo/baz" +- "mod.com/foo" +-) +- +-func main() { +- foo.Bar() +- fmt.Println(bar.Msg) +- fmt.Println(baz.Msg) +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("foo/foo.go") +- env.Rename(env.RegexpSearch("foo/foo.go", "foo"), "foox") +- +- env.RegexpSearch("foox/foo.go", "package foox") +- env.OpenFile("foox/bar/bar.go") +- env.OpenFile("foox/bar/go.mod") +- +- env.RegexpSearch("main.go", "mod.com/foo/bar") +- env.RegexpSearch("main.go", "mod.com/foox") +- env.RegexpSearch("main.go", "foox.Bar()") +- +- env.RegexpSearch("go.mod", "./foox/bar") +- env.RegexpSearch("go.mod", "./foox/baz") +- }) +-} +- +-func TestRenamePackage_DuplicateImport(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- lib/a.go -- +-package lib +- +-const A = 1 +- +--- lib/nested/a.go -- +-package nested +- +-const B = 1 +- +--- main.go -- +-package main +- +-import ( +- "mod.com/lib" +- lib1 "mod.com/lib" +- lib2 "mod.com/lib/nested" +-) +- +-func main() { +- println("Hello") +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("lib/a.go") +- env.Rename(env.RegexpSearch("lib/a.go", "lib"), "nested") +- +- // Check if the new package name exists. +- env.RegexpSearch("nested/a.go", "package nested") +- env.RegexpSearch("main.go", "mod.com/nested") +- env.RegexpSearch("main.go", `lib1 "mod.com/nested"`) +- env.RegexpSearch("main.go", `lib2 "mod.com/nested/nested"`) +- }) +-} +- +-func TestRenamePackage_DuplicateBlankImport(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- lib/a.go -- +-package lib +- +-const A = 1 +- +--- lib/nested/a.go -- +-package nested +- +-const B = 1 +- +--- main.go -- +-package main +- +-import ( +- "mod.com/lib" +- _ "mod.com/lib" +- lib1 "mod.com/lib/nested" +-) +- +-func main() { +- println("Hello") +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("lib/a.go") +- env.Rename(env.RegexpSearch("lib/a.go", "lib"), "nested") +- +- // Check if the new package name exists. +- env.RegexpSearch("nested/a.go", "package nested") +- env.RegexpSearch("main.go", "mod.com/nested") +- env.RegexpSearch("main.go", `_ "mod.com/nested"`) +- env.RegexpSearch("main.go", `lib1 "mod.com/nested/nested"`) +- }) +-} +- +-func TestRenamePackage_TestVariant(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- foo/foo.go -- +-package foo +- +-const Foo = 42 +--- bar/bar.go -- +-package bar +- +-import "mod.com/foo" +- +-const Bar = foo.Foo +--- bar/bar_test.go -- +-package bar +- +-import "mod.com/foo" +- +-const Baz = foo.Foo +--- testdata/bar/bar.go -- +-package bar +- +-import "mod.com/foox" +- +-const Bar = foox.Foo +--- testdata/bar/bar_test.go -- +-package bar +- +-import "mod.com/foox" +- +-const Baz = foox.Foo +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("foo/foo.go") +- env.Rename(env.RegexpSearch("foo/foo.go", "package (foo)"), "foox") +- +- checkTestdata(t, env) +- }) +-} +- +-func TestRenamePackage_IntermediateTestVariant(t *testing.T) { +- // In this test set up, we have the following import edges: +- // bar_test -> baz -> foo -> bar +- // bar_test -> foo -> bar +- // bar_test -> bar +- // +- // As a consequence, bar_x_test.go is in the reverse closure of both +- // `foo [bar.test]` and `baz [bar.test]`. This test confirms that we don't +- // produce duplicate edits in this case. +- const files = ` +--- go.mod -- +-module foo.mod +- +-go 1.12 +--- foo/foo.go -- +-package foo +- +-import "foo.mod/bar" +- +-const Foo = 42 +- +-const _ = bar.Bar +--- baz/baz.go -- +-package baz +- +-import "foo.mod/foo" +- +-const Baz = foo.Foo +--- bar/bar.go -- +-package bar +- +-var Bar = 123 +--- bar/bar_test.go -- +-package bar +- +-const _ = Bar +--- bar/bar_x_test.go -- +-package bar_test +- +-import ( +- "foo.mod/bar" +- "foo.mod/baz" +- "foo.mod/foo" +-) +- +-const _ = bar.Bar + baz.Baz + foo.Foo +--- testdata/foox/foo.go -- +-package foox +- +-import "foo.mod/bar" +- +-const Foo = 42 +- +-const _ = bar.Bar +--- testdata/baz/baz.go -- +-package baz +- +-import "foo.mod/foox" +- +-const Baz = foox.Foo +--- testdata/bar/bar_x_test.go -- +-package bar_test +- +-import ( +- "foo.mod/bar" +- "foo.mod/baz" +- "foo.mod/foox" +-) +- +-const _ = bar.Bar + baz.Baz + foox.Foo +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("foo/foo.go") +- env.Rename(env.RegexpSearch("foo/foo.go", "package (foo)"), "foox") +- +- checkTestdata(t, env) +- }) +-} +- +-func TestRenamePackage_Nesting(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- lib/a.go -- +-package lib +- +-import "mod.com/lib/nested" +- +-const A = 1 + nested.B +--- lib/nested/a.go -- +-package nested +- +-const B = 1 +--- other/other.go -- +-package other +- +-import ( +- "mod.com/lib" +- "mod.com/lib/nested" +-) +- +-const C = lib.A + nested.B +--- testdata/libx/a.go -- +-package libx +- +-import "mod.com/libx/nested" +- +-const A = 1 + nested.B +--- testdata/other/other.go -- +-package other +- +-import ( +- "mod.com/libx" +- "mod.com/libx/nested" +-) +- +-const C = libx.A + nested.B +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("lib/a.go") +- env.Rename(env.RegexpSearch("lib/a.go", "package (lib)"), "libx") +- +- checkTestdata(t, env) +- }) +-} +- +-func TestRenamePackage_InvalidName(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- lib/a.go -- +-package lib +- +-import "mod.com/lib/nested" +- +-const A = 1 + nested.B +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("lib/a.go") +- loc := env.RegexpSearch("lib/a.go", "package (lib)") +- +- for _, badName := range []string{"$$$", "lib_test"} { +- if err := env.Editor.Rename(env.Ctx, loc, badName); err == nil { +- t.Errorf("Rename(lib, libx) succeeded, want non-nil error") +- } +- } +- }) +-} +- +-func TestRenamePackage_InternalPackage(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- lib/a.go -- +-package lib +- +-import ( +- "fmt" +- "mod.com/lib/internal/x" +-) +- +-const A = 1 +- +-func print() { +- fmt.Println(x.B) +-} +- +--- lib/internal/x/a.go -- +-package x +- +-const B = 1 +- +--- main.go -- +-package main +- +-import "mod.com/lib" +- +-func main() { +- lib.print() +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("lib/internal/x/a.go") +- env.Rename(env.RegexpSearch("lib/internal/x/a.go", "x"), "utils") +- +- // Check if the new package name exists. +- env.RegexpSearch("lib/a.go", "mod.com/lib/internal/utils") +- env.RegexpSearch("lib/a.go", "utils.B") +- +- // Check if the test package is renamed +- env.RegexpSearch("lib/internal/utils/a.go", "package utils") +- +- env.OpenFile("lib/a.go") +- env.Rename(env.RegexpSearch("lib/a.go", "lib"), "lib1") +- +- // Check if the new package name exists. +- env.RegexpSearch("lib1/a.go", "package lib1") +- env.RegexpSearch("lib1/a.go", "mod.com/lib1/internal/utils") +- env.RegexpSearch("main.go", `import "mod.com/lib1"`) +- env.RegexpSearch("main.go", "lib1.print()") +- }) +-} +- +-// checkTestdata checks that current buffer contents match their corresponding +-// expected content in the testdata directory. +-func checkTestdata(t *testing.T, env *Env) { +- t.Helper() +- files := env.ListFiles("testdata") +- if len(files) == 0 { +- t.Fatal("no files in testdata directory") +- } +- for _, file := range files { +- suffix := strings.TrimPrefix(file, "testdata/") +- got := env.BufferText(suffix) +- want := env.ReadWorkspaceFile(file) +- if diff := compare.Text(want, got); diff != "" { +- t.Errorf("Rename: unexpected buffer content for %s (-want +got):\n%s", suffix, diff) +- } +- } +-} +diff -urN a/gopls/internal/test/integration/misc/semantictokens_test.go b/gopls/internal/test/integration/misc/semantictokens_test.go +--- a/gopls/internal/test/integration/misc/semantictokens_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/semantictokens_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,238 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "fmt" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +-) +- +-func TestBadURICrash_VSCodeIssue1498(t *testing.T) { +- const src = ` +--- go.mod -- +-module example.com +- +-go 1.12 +- +--- main.go -- +-package main +- +-func main() {} +- +-` +- WithOptions( +- Modes(Default), +- ).Run(t, src, func(t *testing.T, env *Env) { +- params := &protocol.SemanticTokensParams{} +- const badURI = "http://foo" +- params.TextDocument.URI = badURI +- // This call panicked in the past: golang/vscode-go#1498. +- _, err := env.Editor.Server.SemanticTokensFull(env.Ctx, params) +- +- // Requests to an invalid URI scheme now result in an LSP error. +- got := fmt.Sprint(err) +- want := `DocumentURI scheme is not 'file': http://foo` +- if !strings.Contains(got, want) { +- t.Errorf("SemanticTokensFull error is %v, want substring %q", got, want) +- } +- }) +-} +- +-// fix bug involving type parameters and regular parameters +-// (golang/vscode-go#2527) +-func TestSemantic_2527(t *testing.T) { +- // these are the expected types of identifiers in text order +- want := []fake.SemanticToken{ +- {Token: "package", TokenType: "keyword"}, +- {Token: "foo", TokenType: "namespace"}, +- {Token: "// comment", TokenType: "comment"}, +- {Token: "func", TokenType: "keyword"}, +- {Token: "Add", TokenType: "function", Mod: "definition signature"}, +- {Token: "T", TokenType: "typeParameter", Mod: "definition"}, +- {Token: "int", TokenType: "type", Mod: "defaultLibrary number"}, +- {Token: "target", TokenType: "parameter", Mod: "definition"}, +- {Token: "T", TokenType: "typeParameter"}, +- {Token: "l", TokenType: "parameter", Mod: "definition slice"}, +- {Token: "T", TokenType: "typeParameter"}, +- {Token: "T", TokenType: "typeParameter"}, +- {Token: "return", TokenType: "keyword"}, +- {Token: "append", TokenType: "function", Mod: "defaultLibrary"}, +- {Token: "l", TokenType: "parameter", Mod: "slice"}, +- {Token: "target", TokenType: "parameter"}, +- {Token: "for", TokenType: "keyword"}, +- {Token: "range", TokenType: "keyword"}, +- {Token: "l", TokenType: "parameter", Mod: "slice"}, +- {Token: "// test coverage", TokenType: "comment"}, +- {Token: "return", TokenType: "keyword"}, +- {Token: "nil", TokenType: "variable", Mod: "readonly defaultLibrary"}, +- } +- src := ` +--- go.mod -- +-module example.com +- +-go 1.19 +--- main.go -- +-package foo +-// comment +-func Add[T int](target T, l []T) []T { +- return append(l, target) +- for range l {} // test coverage +- return nil +-} +-` +- WithOptions( +- Modes(Default), +- Settings{"semanticTokens": true}, +- ).Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", "for range")), +- ) +- seen := env.SemanticTokensFull("main.go") +- if x := cmp.Diff(want, seen); x != "" { +- t.Errorf("Semantic tokens do not match (-want +got):\n%s", x) +- } +- }) +- +-} +- +-// fix inconsistency in TypeParameters +-// https://github.com/golang/go/issues/57619 +-func TestSemantic_57619(t *testing.T) { +- src := ` +--- go.mod -- +-module example.com +- +-go 1.19 +--- main.go -- +-package foo +-type Smap[K int, V any] struct { +- Store map[K]V +-} +-func (s *Smap[K, V]) Get(k K) (V, bool) { +- v, ok := s.Store[k] +- return v, ok +-} +-func New[K int, V any]() Smap[K, V] { +- return Smap[K, V]{Store: make(map[K]V)} +-} +-` +- WithOptions( +- Modes(Default), +- Settings{"semanticTokens": true}, +- ).Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- seen := env.SemanticTokensFull("main.go") +- for i, s := range seen { +- if (s.Token == "K" || s.Token == "V") && s.TokenType != "typeParameter" { +- t.Errorf("%d: expected K and V to be type parameters, but got %v", i, s) +- } +- } +- }) +-} +- +-func TestSemanticGoDirectives(t *testing.T) { +- src := ` +--- go.mod -- +-module example.com +- +-go 1.19 +--- main.go -- +-package foo +- +-//go:linkname now time.Now +-func now() +- +-//go:noinline +-func foo() {} +- +-// Mentioning go:noinline should not tokenize. +- +-//go:notadirective +-func bar() {} +-` +- want := []fake.SemanticToken{ +- {Token: "package", TokenType: "keyword"}, +- {Token: "foo", TokenType: "namespace"}, +- +- {Token: "//", TokenType: "comment"}, +- {Token: "go:linkname", TokenType: "namespace"}, +- {Token: "now time.Now", TokenType: "comment"}, +- {Token: "func", TokenType: "keyword"}, +- {Token: "now", TokenType: "function", Mod: "definition signature"}, +- +- {Token: "//", TokenType: "comment"}, +- {Token: "go:noinline", TokenType: "namespace"}, +- {Token: "func", TokenType: "keyword"}, +- {Token: "foo", TokenType: "function", Mod: "definition signature"}, +- +- {Token: "// Mentioning go:noinline should not tokenize.", TokenType: "comment"}, +- +- {Token: "//go:notadirective", TokenType: "comment"}, +- {Token: "func", TokenType: "keyword"}, +- {Token: "bar", TokenType: "function", Mod: "definition signature"}, +- } +- +- WithOptions( +- Modes(Default), +- Settings{"semanticTokens": true}, +- ).Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- seen := env.SemanticTokensFull("main.go") +- if x := cmp.Diff(want, seen); x != "" { +- t.Errorf("Semantic tokens do not match (-want +got):\n%s", x) +- } +- }) +-} +- +-// Make sure no zero-length tokens occur +-func TestSemantic_65254(t *testing.T) { +- src := ` +--- go.mod -- +-module example.com +- +-go 1.21 +--- main.go -- +-package main +- +-/* a comment with an +- +-empty line +-*/ +- +-const bad = ` +- +- src += "`foo" + ` +- ` + "bar`" +- want := []fake.SemanticToken{ +- {Token: "package", TokenType: "keyword"}, +- {Token: "main", TokenType: "namespace"}, +- {Token: "/* a comment with an", TokenType: "comment"}, +- // --- Note that the zero length line does not show up +- {Token: "empty line", TokenType: "comment"}, +- {Token: "*/", TokenType: "comment"}, +- {Token: "const", TokenType: "keyword"}, +- {Token: "bad", TokenType: "variable", Mod: "definition readonly"}, +- {Token: "`foo", TokenType: "string"}, +- // --- Note the zero length line does not show up +- {Token: "\tbar`", TokenType: "string"}, +- } +- WithOptions( +- Modes(Default), +- Settings{"semanticTokens": true}, +- ).Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- seen := env.SemanticTokensFull("main.go") +- if x := cmp.Diff(want, seen); x != "" { +- t.Errorf("Semantic tokens do not match (-want +got):\n%s", x) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/settings_test.go b/gopls/internal/test/integration/misc/settings_test.go +--- a/gopls/internal/test/integration/misc/settings_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/settings_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,32 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestEmptyDirectoryFilters_Issue51843(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() { +-} +-` +- +- WithOptions( +- Settings{"directoryFilters": []string{""}}, +- ).Run(t, src, func(t *testing.T, env *Env) { +- // No need to do anything. Issue golang/go#51843 is triggered by the empty +- // directory filter above. +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/shared_test.go b/gopls/internal/test/integration/misc/shared_test.go +--- a/gopls/internal/test/integration/misc/shared_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/shared_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,58 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// Smoke test that simultaneous editing sessions in the same workspace works. +-func TestSimultaneousEdits(t *testing.T) { +- const sharedProgram = ` +--- go.mod -- +-module mod +- +-go 1.12 +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Println("Hello World.") +-}` +- +- WithOptions( +- Modes(DefaultModes()&(Forwarded|SeparateProcess)), +- ).Run(t, sharedProgram, func(t *testing.T, env1 *Env) { +- // Create a second test session connected to the same workspace and server +- // as the first. +- env2 := ConnectGoplsEnv(t, env1.Ctx, env1.Sandbox, env1.Editor.Config(), env1.Server) +- env2.Await(InitialWorkspaceLoad) +- // In editor #1, break fmt.Println as before. +- env1.OpenFile("main.go") +- env1.RegexpReplace("main.go", "Printl(n)", "") +- // In editor #2 remove the closing brace. +- env2.OpenFile("main.go") +- env2.RegexpReplace("main.go", "\\)\n(})", "") +- +- // Now check that we got different diagnostics in each environment. +- env1.AfterChange(Diagnostics(env1.AtRegexp("main.go", "Printl"))) +- env2.AfterChange(Diagnostics(env2.AtRegexp("main.go", "$"))) +- +- // Now close editor #2, and verify that operation in editor #1 is +- // unaffected. +- if err := env2.Editor.Close(env2.Ctx); err != nil { +- t.Errorf("closing second editor: %v", err) +- } +- +- env1.RegexpReplace("main.go", "Printl", "Println") +- env1.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/signature_help_test.go b/gopls/internal/test/integration/misc/signature_help_test.go +--- a/gopls/internal/test/integration/misc/signature_help_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/signature_help_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,69 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestSignatureHelpInNonWorkspacePackage(t *testing.T) { +- const files = ` +--- a/go.mod -- +-module a.com +- +-go 1.18 +--- a/a/a.go -- +-package a +- +-func DoSomething(int) {} +- +-func _() { +- DoSomething() +-} +--- b/go.mod -- +-module b.com +-go 1.18 +- +-require a.com v1.0.0 +- +-replace a.com => ../a +--- b/b/b.go -- +-package b +- +-import "a.com/a" +- +-func _() { +- a.DoSomething() +-} +-` +- +- WithOptions( +- WorkspaceFolders("a"), +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a/a.go") +- env.OpenFile("b/b/b.go") +- signatureHelp := func(filename string) *protocol.SignatureHelp { +- loc := env.RegexpSearch(filename, `DoSomething\(()\)`) +- var params protocol.SignatureHelpParams +- params.TextDocument.URI = loc.URI +- params.Position = loc.Range.Start +- help, err := env.Editor.Server.SignatureHelp(env.Ctx, ¶ms) +- if err != nil { +- t.Fatal(err) +- } +- return help +- } +- ahelp := signatureHelp("a/a/a.go") +- bhelp := signatureHelp("b/b/b.go") +- +- if diff := cmp.Diff(ahelp, bhelp); diff != "" { +- t.Fatal(diff) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/staticcheck_test.go b/gopls/internal/test/integration/misc/staticcheck_test.go +--- a/gopls/internal/test/integration/misc/staticcheck_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/staticcheck_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,119 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestStaticcheckGenerics(t *testing.T) { +- // CL 583778 causes buildir not to run on packages that use +- // range-over-func, since it might otherwise crash. But nearly +- // all packages will soon meet this description, so the +- // analyzers in this test will not run, and the test will fail. +- // TODO(adonovan): reenable once dominikh/go-tools#1494 is fixed. +- t.Skip("disabled until buildir supports range-over-func (dominikh/go-tools#1494)") +- +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- a/a.go -- +-package a +- +-import ( +- "errors" +- "sort" +- "strings" +-) +- +-func Zero[P any]() P { +- var p P +- return p +-} +- +-type Inst[P any] struct { +- Field P +-} +- +-func testGenerics[P *T, T any](p P) { +- // Calls to instantiated functions should not break checks. +- slice := Zero[string]() +- sort.Slice(slice, func(i, j int) bool { +- return slice[i] < slice[j] +- }) +- +- // Usage of instantiated fields should not break checks. +- g := Inst[string]{"hello"} +- g.Field = strings.TrimLeft(g.Field, "12234") +- +- // Use of type parameters should not break checks. +- var q P +- p = q // SA4009: p is overwritten before its first use +- q = &*p // SA4001: &* will be simplified +-} +- +- +-// FooErr should be called ErrFoo (ST1012) +-var FooErr error = errors.New("foo") +-` +- +- WithOptions( +- Settings{"staticcheck": true}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "sort.Slice"), FromSource("sortslice")), +- Diagnostics(env.AtRegexp("a/a.go", "sort.Slice.(slice)"), FromSource("SA1028")), +- Diagnostics(env.AtRegexp("a/a.go", "var (FooErr)"), FromSource("ST1012")), +- Diagnostics(env.AtRegexp("a/a.go", `"12234"`), FromSource("SA1024")), +- Diagnostics(env.AtRegexp("a/a.go", "testGenerics.*(p P)"), FromSource("SA4009")), +- Diagnostics(env.AtRegexp("a/a.go", "q = (&\\*p)"), FromSource("SA4001")), +- ) +- }) +-} +- +-// Test for golang/go#56270: an analysis with related info should not panic if +-// analysis.RelatedInformation.End is not set. +-func TestStaticcheckRelatedInfo(t *testing.T) { +- // CL 583778 causes buildir not to run on packages that use +- // range-over-func, since it might otherwise crash. But nearly +- // all packages will soon meet this description, so the +- // analyzers in this test will not run, and the test will fail. +- // TODO(adonovan): reenable once dominikh/go-tools#1494 is fixed. +- t.Skip("disabled until buildir supports range-over-func (dominikh/go-tools#1494)") +- +- const files = ` +--- go.mod -- +-module mod.test +- +-go 1.18 +--- p.go -- +-package p +- +-import ( +- "fmt" +-) +- +-func Foo(enabled interface{}) { +- if enabled, ok := enabled.(bool); ok { +- } else { +- _ = fmt.Sprintf("invalid type %T", enabled) // enabled is always bool here +- } +-} +-` +- +- WithOptions( +- Settings{"staticcheck": true}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("p.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("p.go", ", (enabled)"), FromSource("SA9008")), +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/test_test.go b/gopls/internal/test/integration/misc/test_test.go +--- a/gopls/internal/test/integration/misc/test_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/test_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,82 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-// This file defines tests of the source.test ("Run tests and +-// benchmarks") code action. +- +-import ( +- "os" +- "path/filepath" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestRunTestsAndBenchmarks(t *testing.T) { +- file := filepath.Join(t.TempDir(), "out") +- os.Setenv("TESTFILE", file) // ignore error +- +- const src = ` +--- go.mod -- +-module example.com +-go 1.19 +- +--- a/a.go -- +-package a +- +--- a/a_test.go -- +-package a +- +-import ( +- "os" +- "testing" +-) +- +-func Test(t *testing.T) { +- os.WriteFile(os.Getenv("TESTFILE"), []byte("ok"), 0644) +-} +- +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a/a_test.go") +- loc := env.RegexpSearch("a/a_test.go", "WriteFile") +- +- // Request code actions. (settings.GoTest is special: +- // it is returned only when explicitly requested.) +- actions, err := env.Editor.Server.CodeAction(env.Ctx, &protocol.CodeActionParams{ +- TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, +- Range: loc.Range, +- Context: protocol.CodeActionContext{ +- Only: []protocol.CodeActionKind{settings.GoTest}, +- }, +- }) +- if err != nil { +- t.Fatal(err) +- } +- if len(actions) != 1 { +- t.Fatalf("CodeAction returned %#v, want one source.test action", actions) +- } +- if actions[0].Command == nil { +- t.Fatalf("CodeActions()[0] has no Command") +- } +- +- // Execute test. +- // (ExecuteCommand fails if the test fails.) +- t.Logf("Running %s...", actions[0].Title) +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: actions[0].Command.Command, +- Arguments: actions[0].Command.Arguments, +- }, nil) +- +- // Check test had expected side effect. +- data, err := os.ReadFile(file) +- if string(data) != "ok" { +- t.Fatalf("Test did not write expected content of %s; ReadFile returned (%q, %v)", file, data, err) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/vendor_test.go b/gopls/internal/test/integration/misc/vendor_test.go +--- a/gopls/internal/test/integration/misc/vendor_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/vendor_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,98 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-const basicProxy = ` +--- golang.org/x/hello@v1.2.3/go.mod -- +-module golang.org/x/hello +- +-go 1.14 +--- golang.org/x/hello@v1.2.3/hi/hi.go -- +-package hi +- +-var Goodbye error +-` +- +-func TestInconsistentVendoring(t *testing.T) { +- const pkgThatUsesVendoring = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +- +-require golang.org/x/hello v1.2.3 +--- vendor/modules.txt -- +--- a/a1.go -- +-package a +- +-import "golang.org/x/hello/hi" +- +-func _() { +- _ = hi.Goodbye +- var q int // hardcode a diagnostic +-} +-` +- WithOptions( +- Modes(Default), +- ProxyFiles(basicProxy), +- WriteGoSum("."), +- ).Run(t, pkgThatUsesVendoring, func(t *testing.T, env *Env) { +- env.OpenFile("a/a1.go") +- d := &protocol.PublishDiagnosticsParams{} +- env.AfterChange( +- Diagnostics(env.AtRegexp("go.mod", "module mod.com"), WithMessage("Inconsistent vendoring")), +- ReadDiagnostics("go.mod", d), +- ) +- env.ApplyQuickFixes("go.mod", d.Diagnostics) +- +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a1.go", `q int`), WithMessage("not used")), +- ) +- }) +-} +- +-func TestWindowsVendoring_Issue56291(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +- +-require golang.org/x/hello v1.2.3 +--- main.go -- +-package main +- +-import "golang.org/x/hello/hi" +- +-func main() { +- _ = hi.Goodbye +-} +-` +- WithOptions( +- Modes(Default), +- ProxyFiles(basicProxy), +- WriteGoSum("."), +- ).Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.AfterChange(NoDiagnostics()) +- env.RunGoCommand("mod", "tidy") +- env.RunGoCommand("mod", "vendor") +- env.AfterChange(NoDiagnostics()) +- env.RegexpReplace("main.go", `import "golang.org/x/hello/hi"`, "") +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", "hi.Goodbye")), +- ) +- env.SaveBuffer("main.go") +- env.AfterChange(NoDiagnostics()) +- }) +-} +diff -urN a/gopls/internal/test/integration/misc/vuln_test.go b/gopls/internal/test/integration/misc/vuln_test.go +--- a/gopls/internal/test/integration/misc/vuln_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/vuln_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,981 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "context" +- "encoding/json" +- "fmt" +- "sort" +- "strings" +- "testing" +- "time" +- +- "github.com/google/go-cmp/cmp" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/server" +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/vulncheck" +- "golang.org/x/tools/gopls/internal/vulncheck/vulntest" +-) +- +-func TestRunGovulncheckError(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- foo.go -- +-package foo +-` +- Run(t, files, func(t *testing.T, env *Env) { +- cmd := command.NewRunGovulncheckCommand("Run Vulncheck Exp", command.VulncheckArgs{ +- URI: "/invalid/file/url", // invalid arg +- }) +- params := &protocol.ExecuteCommandParams{ +- Command: command.RunGovulncheck.String(), +- Arguments: cmd.Arguments, +- } +- +- var result any +- err := env.Editor.ExecuteCommand(env.Ctx, params, &result) +- // We want an error! +- if err == nil { +- t.Errorf("got success, want invalid file URL error. Result: %v", result) +- } +- }) +-} +- +-func TestVulncheckError(t *testing.T) { +- // This test checks an error of the gopls.vulncheck command, which should be +- // returned synchronously. +- +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- foo.go -- +-package foo +- +-func F() { // build error incomplete +-` +- WithOptions( +- EnvVars{ +- "_GOPLS_TEST_BINARY_RUN_AS_GOPLS": "true", // needed to run `gopls vulncheck`. +- }, +- Settings{ +- "codelenses": map[string]bool{ +- "run_govulncheck": true, +- "vulncheck": true, +- }, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- var result command.VulncheckResult +- err := env.Editor.ExecuteCodeLensCommand(env.Ctx, "go.mod", command.Vulncheck, &result) +- if err == nil { +- t.Fatalf("govulncheck succeeded unexpectedly: %v", result) +- } +- var ws WorkStatus +- env.Await( +- CompletedProgress(server.GoVulncheckCommandTitle, &ws), +- ) +- wantEndMsg, wantMsgPart := "failed", "There are errors with the provided package patterns:" +- if ws.EndMsg != "failed" || !strings.Contains(ws.Msg, wantMsgPart) || !strings.Contains(err.Error(), wantMsgPart) { +- t.Errorf("work status = %+v, want {EndMessage: %q, Message: %q}", ws, wantEndMsg, wantMsgPart) +- } +- }) +-} +- +-const vulnsData = ` +--- GO-2022-01.yaml -- +-modules: +- - module: golang.org/amod +- versions: +- - introduced: 1.0.0 +- - fixed: 1.0.4 +- packages: +- - package: golang.org/amod/avuln +- symbols: +- - VulnData.Vuln1 +- - VulnData.Vuln2 +-description: > +- vuln in amod is found +-summary: vuln in amod +-references: +- - href: pkg.go.dev/vuln/GO-2022-01 +--- GO-2022-03.yaml -- +-modules: +- - module: golang.org/amod +- versions: +- - introduced: 1.0.0 +- - fixed: 1.0.6 +- packages: +- - package: golang.org/amod/avuln +- symbols: +- - nonExisting +-description: > +- unaffecting vulnerability is found +-summary: unaffecting vulnerability +--- GO-2022-02.yaml -- +-modules: +- - module: golang.org/bmod +- packages: +- - package: golang.org/bmod/bvuln +- symbols: +- - Vuln +-description: | +- vuln in bmod is found. +- +- This is a long description +- of this vulnerability. +-summary: vuln in bmod (no fix) +-references: +- - href: pkg.go.dev/vuln/GO-2022-03 +--- GO-2022-04.yaml -- +-modules: +- - module: golang.org/bmod +- packages: +- - package: golang.org/bmod/unused +- symbols: +- - Vuln +-description: | +- vuln in bmod/somethingelse is found +-summary: vuln in bmod/somethingelse +-references: +- - href: pkg.go.dev/vuln/GO-2022-04 +--- GOSTDLIB.yaml -- +-modules: +- - module: stdlib +- versions: +- - introduced: 1.18.0 +- packages: +- - package: archive/zip +- symbols: +- - OpenReader +-summary: vuln in GOSTDLIB +-references: +- - href: pkg.go.dev/vuln/GOSTDLIB +-` +- +-func TestRunGovulncheckStd(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.19 +--- main.go -- +-package main +- +-import ( +- "archive/zip" +- "fmt" +-) +- +-func main() { +- _, err := zip.OpenReader("file.zip") // vulnerability id: GOSTDLIB +- fmt.Println(err) +-} +-` +- +- db, err := vulntest.NewDatabase(context.Background(), []byte(vulnsData)) +- if err != nil { +- t.Fatal(err) +- } +- defer db.Clean() +- +- for _, legacy := range []bool{false, true} { +- t.Run(fmt.Sprintf("legacy=%v", legacy), func(t *testing.T) { +- lenses := map[string]bool{"vulncheck": !legacy, "run_govulncheck": legacy} +- WithOptions( +- EnvVars{ +- // Let the analyzer read vulnerabilities data from the testdata/vulndb. +- "GOVULNDB": db.URI(), +- // When fetchinging stdlib package vulnerability info, +- // behave as if our go version is go1.19 for this testing. +- // The default behavior is to run `go env GOVERSION` (which isn't mutable env var). +- cache.GoVersionForVulnTest: "go1.19", +- "_GOPLS_TEST_BINARY_RUN_AS_GOPLS": "true", // needed to run `gopls vulncheck`. +- }, +- Settings{ +- "codelenses": lenses, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- +- // Run Command included in the codelens. +- +- var result *vulncheck.Result +- var expectation Expectation +- if legacy { +- var r command.RunVulncheckResult +- env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &r) +- expectation = CompletedProgressToken(r.Token, nil) +- } else { +- var r command.VulncheckResult +- env.ExecuteCodeLensCommand("go.mod", command.Vulncheck, &r) +- result = r.Result +- expectation = CompletedProgress(server.GoVulncheckCommandTitle, nil) +- } +- +- env.OnceMet( +- expectation, +- ShownMessage("Found GOSTDLIB"), +- NoDiagnostics(ForFile("go.mod")), +- ) +- testFetchVulncheckResult(t, env, "go.mod", result, map[string]fetchVulncheckResult{ +- "go.mod": {IDs: []string{"GOSTDLIB"}, Mode: vulncheck.ModeGovulncheck}, +- }) +- }) +- }) +- } +-} +- +-func TestFetchVulncheckResultStd(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.18 +--- main.go -- +-package main +- +-import ( +- "archive/zip" +- "fmt" +-) +- +-func main() { +- _, err := zip.OpenReader("file.zip") // vulnerability id: GOSTDLIB +- fmt.Println(err) +-} +-` +- +- db, err := vulntest.NewDatabase(context.Background(), []byte(vulnsData)) +- if err != nil { +- t.Fatal(err) +- } +- defer db.Clean() +- WithOptions( +- EnvVars{ +- // Let the analyzer read vulnerabilities data from the testdata/vulndb. +- "GOVULNDB": db.URI(), +- // When fetchinging stdlib package vulnerability info, +- // behave as if our go version is go1.18 for this testing. +- cache.GoVersionForVulnTest: "go1.18", +- "_GOPLS_TEST_BINARY_RUN_AS_GOPLS": "true", // needed to run `gopls vulncheck`. +- }, +- Settings{"ui.diagnostic.vulncheck": "Imports"}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- env.AfterChange( +- NoDiagnostics(ForFile("go.mod")), +- // we don't publish diagnostics for standard library vulnerability yet. +- ) +- testFetchVulncheckResult(t, env, "", nil, map[string]fetchVulncheckResult{ +- "go.mod": { +- IDs: []string{"GOSTDLIB"}, +- Mode: vulncheck.ModeImports, +- }, +- }) +- }) +-} +- +-// fetchVulncheckResult summarizes a vulncheck result for a single file. +-type fetchVulncheckResult struct { +- IDs []string +- Mode vulncheck.AnalysisMode +-} +- +-// testFetchVulncheckResult checks that calling gopls.fetch_vulncheck_result +-// returns the expected summarized results contained in the want argument. +-// +-// If fromRun is non-nil, it is the result of running running vulncheck for +-// runPath, and testFetchVulncheckResult also checks that the fetched result +-// for runPath matches fromRun. +-// +-// This awkward factoring is an artifact of a transition from fetching +-// vulncheck results asynchronously, to allowing the command to run +-// asynchronously, yet returning the result synchronously from the client's +-// perspective. +-// +-// TODO(rfindley): once VS Code no longer depends on fetching results +-// asynchronously, we can remove gopls.fetch_vulncheck_result, and simplify or +-// remove this helper. +-func testFetchVulncheckResult(t *testing.T, env *Env, runPath string, fromRun *vulncheck.Result, want map[string]fetchVulncheckResult) { +- t.Helper() +- +- var result map[protocol.DocumentURI]*vulncheck.Result +- fetchCmd := command.NewFetchVulncheckResultCommand("fetch", command.URIArg{ +- URI: env.Sandbox.Workdir.URI("go.mod"), +- }) +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: fetchCmd.Command, +- Arguments: fetchCmd.Arguments, +- }, &result) +- +- for _, v := range want { +- sort.Strings(v.IDs) +- } +- summarize := func(r *vulncheck.Result) fetchVulncheckResult { +- osv := map[string]bool{} +- for _, v := range r.Findings { +- osv[v.OSV] = true +- } +- ids := make([]string, 0, len(osv)) +- for id := range osv { +- ids = append(ids, id) +- } +- sort.Strings(ids) +- return fetchVulncheckResult{ +- IDs: ids, +- Mode: r.Mode, +- } +- } +- got := map[string]fetchVulncheckResult{} +- for k, r := range result { +- modfile := env.Sandbox.Workdir.RelPath(k.Path()) +- got[modfile] = summarize(r) +- } +- if fromRun != nil { +- if diff := cmp.Diff(want, got); diff != "" { +- t.Errorf("fetch vulncheck result = got %v, want %v: diff %v", got, want, diff) +- } +- if diff := cmp.Diff(summarize(fromRun), got[runPath]); diff != "" { +- t.Errorf("fetched vulncheck result differs from returned (-returned, +fetched):\n%s", diff) +- } +- } +-} +- +-const workspace1 = ` +--- go.mod -- +-module golang.org/entry +- +-go 1.18 +- +-require golang.org/cmod v1.1.3 +- +-require ( +- golang.org/amod v1.0.0 // indirect +- golang.org/bmod v0.5.0 // indirect +-) +--- x/x.go -- +-package x +- +-import ( +- "golang.org/cmod/c" +- "golang.org/entry/y" +-) +- +-func X() { +- c.C1().Vuln1() // vuln use: X -> Vuln1 +-} +- +-func CallY() { +- y.Y() // vuln use: CallY -> y.Y -> bvuln.Vuln +-} +- +--- y/y.go -- +-package y +- +-import "golang.org/cmod/c" +- +-func Y() { +- c.C2()() // vuln use: Y -> bvuln.Vuln +-} +-` +- +-// cmod/c imports amod/avuln and bmod/bvuln. +-const proxy1 = ` +--- golang.org/cmod@v1.1.3/go.mod -- +-module golang.org/cmod +- +-go 1.12 +--- golang.org/cmod@v1.1.3/c/c.go -- +-package c +- +-import ( +- "golang.org/amod/avuln" +- "golang.org/bmod/bvuln" +-) +- +-type I interface { +- Vuln1() +-} +- +-func C1() I { +- v := avuln.VulnData{} +- v.Vuln2() // vuln use +- return v +-} +- +-func C2() func() { +- return bvuln.Vuln +-} +--- golang.org/amod@v1.0.0/go.mod -- +-module golang.org/amod +- +-go 1.14 +--- golang.org/amod@v1.0.0/avuln/avuln.go -- +-package avuln +- +-type VulnData struct {} +-func (v VulnData) Vuln1() {} +-func (v VulnData) Vuln2() {} +--- golang.org/amod@v1.0.4/go.mod -- +-module golang.org/amod +- +-go 1.14 +--- golang.org/amod@v1.0.4/avuln/avuln.go -- +-package avuln +- +-type VulnData struct {} +-func (v VulnData) Vuln1() {} +-func (v VulnData) Vuln2() {} +- +--- golang.org/bmod@v0.5.0/go.mod -- +-module golang.org/bmod +- +-go 1.14 +--- golang.org/bmod@v0.5.0/bvuln/bvuln.go -- +-package bvuln +- +-func Vuln() { +- // something evil +-} +--- golang.org/bmod@v0.5.0/unused/unused.go -- +-package unused +- +-func Vuln() { +- // something evil +-} +--- golang.org/amod@v1.0.6/go.mod -- +-module golang.org/amod +- +-go 1.14 +--- golang.org/amod@v1.0.6/avuln/avuln.go -- +-package avuln +- +-type VulnData struct {} +-func (v VulnData) Vuln1() {} +-func (v VulnData) Vuln2() {} +-` +- +-func vulnTestEnv(proxyData string) (*vulntest.DB, []RunOption, error) { +- db, err := vulntest.NewDatabase(context.Background(), []byte(vulnsData)) +- if err != nil { +- return nil, nil, nil +- } +- settings := Settings{ +- "codelenses": map[string]bool{ +- "run_govulncheck": true, +- }, +- } +- ev := EnvVars{ +- // Let the analyzer read vulnerabilities data from the testdata/vulndb. +- "GOVULNDB": db.URI(), +- // When fetching stdlib package vulnerability info, +- // behave as if our go version is go1.18 for this testing. +- // The default behavior is to run `go env GOVERSION` (which isn't mutable env var). +- cache.GoVersionForVulnTest: "go1.18", +- "_GOPLS_TEST_BINARY_RUN_AS_GOPLS": "true", // needed to run `gopls vulncheck`. +- "GOSUMDB": "off", +- } +- return db, []RunOption{ProxyFiles(proxyData), ev, settings, WriteGoSum(".")}, nil +-} +- +-func TestRunVulncheckPackageDiagnostics(t *testing.T) { +- db, opts0, err := vulnTestEnv(proxy1) +- if err != nil { +- t.Fatal(err) +- } +- defer db.Clean() +- +- checkVulncheckDiagnostics := func(env *Env, t *testing.T) { +- env.OpenFile("go.mod") +- +- gotDiagnostics := &protocol.PublishDiagnosticsParams{} +- env.AfterChange( +- Diagnostics(env.AtRegexp("go.mod", `golang.org/amod`)), +- ReadDiagnostics("go.mod", gotDiagnostics), +- ) +- +- testFetchVulncheckResult(t, env, "", nil, map[string]fetchVulncheckResult{ +- "go.mod": { +- IDs: []string{"GO-2022-01", "GO-2022-02", "GO-2022-03"}, +- Mode: vulncheck.ModeImports, +- }, +- }) +- +- wantVulncheckDiagnostics := map[string]vulnDiagExpectation{ +- "golang.org/amod": { +- diagnostics: []vulnDiag{ +- { +- msg: "golang.org/amod has known vulnerabilities GO-2022-01, GO-2022-03.", +- severity: protocol.SeverityInformation, +- source: string(cache.Vulncheck), +- codeActions: []string{ +- "Run govulncheck to verify", +- "Upgrade to v1.0.6", +- "Upgrade to latest", +- }, +- }, +- }, +- codeActions: []string{ +- "Run govulncheck to verify", +- "Upgrade to v1.0.6", +- "Upgrade to latest", +- }, +- hover: []string{"GO-2022-01", "Fixed in v1.0.4.", "GO-2022-03"}, +- }, +- "golang.org/bmod": { +- diagnostics: []vulnDiag{ +- { +- msg: "golang.org/bmod has a vulnerability GO-2022-02.", +- severity: protocol.SeverityInformation, +- source: string(cache.Vulncheck), +- codeActions: []string{ +- "Run govulncheck to verify", +- }, +- }, +- }, +- codeActions: []string{ +- "Run govulncheck to verify", +- }, +- hover: []string{"GO-2022-02", "vuln in bmod (no fix)", "No fix is available."}, +- }, +- } +- +- for pattern, want := range wantVulncheckDiagnostics { +- modPathDiagnostics := testVulnDiagnostics(t, env, pattern, want, gotDiagnostics) +- +- gotActions := env.CodeActionForFile("go.mod", modPathDiagnostics) +- if diff := diffCodeActions(gotActions, want.codeActions); diff != "" { +- t.Errorf("code actions for %q do not match, got %v, want %v\n%v\n", pattern, gotActions, want.codeActions, diff) +- continue +- } +- } +- } +- +- wantNoVulncheckDiagnostics := func(env *Env, t *testing.T) { +- env.OpenFile("go.mod") +- +- gotDiagnostics := &protocol.PublishDiagnosticsParams{} +- env.AfterChange( +- ReadDiagnostics("go.mod", gotDiagnostics), +- ) +- +- if len(gotDiagnostics.Diagnostics) > 0 { +- t.Errorf("Unexpected diagnostics: %v", stringify(gotDiagnostics)) +- } +- testFetchVulncheckResult(t, env, "", nil, map[string]fetchVulncheckResult{}) +- } +- +- for _, tc := range []struct { +- name string +- setting Settings +- wantDiagnostics bool +- }{ +- {"imports", Settings{"ui.diagnostic.vulncheck": "Imports"}, true}, +- {"default", Settings{}, false}, +- {"invalid", Settings{"ui.diagnostic.vulncheck": "invalid"}, false}, +- } { +- t.Run(tc.name, func(t *testing.T) { +- // override the settings options to enable diagnostics +- opts := append(opts0, tc.setting) +- WithOptions(opts...).Run(t, workspace1, func(t *testing.T, env *Env) { +- // TODO(hyangah): implement it, so we see GO-2022-01, GO-2022-02, and GO-2022-03. +- // Check that the actions we get when including all diagnostics at a location return the same result +- if tc.wantDiagnostics { +- checkVulncheckDiagnostics(env, t) +- } else { +- wantNoVulncheckDiagnostics(env, t) +- } +- +- if tc.name == "imports" && tc.wantDiagnostics { +- // test we get only govulncheck-based diagnostics after "run govulncheck". +- var result command.RunVulncheckResult +- env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &result) +- gotDiagnostics := &protocol.PublishDiagnosticsParams{} +- env.OnceMet( +- CompletedProgressToken(result.Token, nil), +- ShownMessage("Found"), +- ) +- env.OnceMet( +- Diagnostics(env.AtRegexp("go.mod", "golang.org/bmod")), +- ReadDiagnostics("go.mod", gotDiagnostics), +- ) +- // We expect only one diagnostic for GO-2022-02. +- count := 0 +- for _, diag := range gotDiagnostics.Diagnostics { +- if strings.Contains(diag.Message, "GO-2022-02") { +- count++ +- if got, want := diag.Severity, protocol.SeverityWarning; got != want { +- t.Errorf("Diagnostic for GO-2022-02 = %v, want %v", got, want) +- } +- } +- } +- if count != 1 { +- t.Errorf("Unexpected number of diagnostics about GO-2022-02 = %v, want 1:\n%+v", count, stringify(gotDiagnostics)) +- } +- } +- }) +- }) +- } +-} +- +-// TestRunGovulncheck_Expiry checks that govulncheck results expire after a +-// certain amount of time. +-func TestRunGovulncheck_Expiry(t *testing.T) { +- // For this test, set the max age to a duration smaller than the sleep below. +- defer func(prev time.Duration) { +- cache.MaxGovulncheckResultAge = prev +- }(cache.MaxGovulncheckResultAge) +- cache.MaxGovulncheckResultAge = 99 * time.Millisecond +- +- db, opts0, err := vulnTestEnv(proxy1) +- if err != nil { +- t.Fatal(err) +- } +- defer db.Clean() +- +- WithOptions(opts0...).Run(t, workspace1, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- env.OpenFile("x/x.go") +- +- var result command.RunVulncheckResult +- env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &result) +- env.OnceMet( +- CompletedProgressToken(result.Token, nil), +- ShownMessage("Found"), +- ) +- // Sleep long enough for the results to expire. +- time.Sleep(100 * time.Millisecond) +- // Make an arbitrary edit to force re-diagnosis of the workspace. +- env.RegexpReplace("x/x.go", "package x", "package x ") +- env.AfterChange( +- NoDiagnostics(env.AtRegexp("go.mod", "golang.org/bmod")), +- ) +- }) +-} +- +-func stringify(a any) string { +- data, _ := json.Marshal(a) +- return string(data) +-} +- +-func TestRunVulncheckWarning(t *testing.T) { +- db, opts, err := vulnTestEnv(proxy1) +- if err != nil { +- t.Fatal(err) +- } +- defer db.Clean() +- WithOptions(opts...).Run(t, workspace1, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- +- var result command.RunVulncheckResult +- env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &result) +- gotDiagnostics := &protocol.PublishDiagnosticsParams{} +- env.OnceMet( +- CompletedProgressToken(result.Token, nil), +- ShownMessage("Found"), +- ) +- // Vulncheck diagnostics asynchronous to the vulncheck command. +- env.OnceMet( +- Diagnostics(env.AtRegexp("go.mod", `golang.org/amod`)), +- ReadDiagnostics("go.mod", gotDiagnostics), +- ) +- +- testFetchVulncheckResult(t, env, "go.mod", nil, map[string]fetchVulncheckResult{ +- // All vulnerabilities (symbol-level, import-level, module-level) are reported. +- "go.mod": {IDs: []string{"GO-2022-01", "GO-2022-02", "GO-2022-03", "GO-2022-04"}, Mode: vulncheck.ModeGovulncheck}, +- }) +- env.OpenFile("x/x.go") +- env.OpenFile("y/y.go") +- wantDiagnostics := map[string]vulnDiagExpectation{ +- "golang.org/amod": { +- applyAction: "Upgrade to v1.0.6", +- diagnostics: []vulnDiag{ +- { +- msg: "golang.org/amod has a vulnerability used in the code: GO-2022-01.", +- severity: protocol.SeverityWarning, +- source: string(cache.Govulncheck), +- codeActions: []string{ +- "Upgrade to v1.0.4", +- "Upgrade to latest", +- "Reset govulncheck result", +- }, +- }, +- { +- msg: "golang.org/amod has a vulnerability GO-2022-03 that is not used in the code.", +- severity: protocol.SeverityInformation, +- source: string(cache.Govulncheck), +- codeActions: []string{ +- "Upgrade to v1.0.6", +- "Upgrade to latest", +- "Reset govulncheck result", +- }, +- }, +- }, +- codeActions: []string{ +- "Upgrade to v1.0.6", +- "Upgrade to latest", +- "Reset govulncheck result", +- }, +- hover: []string{"GO-2022-01", "Fixed in v1.0.4.", "GO-2022-03"}, +- }, +- "golang.org/bmod": { +- diagnostics: []vulnDiag{ +- { +- msg: "golang.org/bmod has a vulnerability used in the code: GO-2022-02.", +- severity: protocol.SeverityWarning, +- source: string(cache.Govulncheck), +- codeActions: []string{ +- "Reset govulncheck result", // no fix, but we should give an option to reset. +- }, +- }, +- }, +- codeActions: []string{ +- "Reset govulncheck result", // no fix, but we should give an option to reset. +- }, +- hover: []string{"GO-2022-02", "vuln in bmod (no fix)", "No fix is available."}, +- }, +- } +- +- for mod, want := range wantDiagnostics { +- modPathDiagnostics := testVulnDiagnostics(t, env, mod, want, gotDiagnostics) +- +- // Check that the actions we get when including all diagnostics at a location return the same result +- gotActions := env.CodeActionForFile("go.mod", modPathDiagnostics) +- if diff := diffCodeActions(gotActions, want.codeActions); diff != "" { +- t.Errorf("code actions for %q do not match, expected %v, got %v\n%v\n", mod, want.codeActions, gotActions, diff) +- continue +- } +- +- // Apply the code action matching applyAction. +- if want.applyAction == "" { +- continue +- } +- for _, action := range gotActions { +- if action.Title == want.applyAction { +- env.ApplyCodeAction(action) +- break +- } +- } +- } +- +- env.Await(env.DoneWithChangeWatchedFiles()) +- wantGoMod := `module golang.org/entry +- +-go 1.18 +- +-require golang.org/cmod v1.1.3 +- +-require ( +- golang.org/amod v1.0.6 // indirect +- golang.org/bmod v0.5.0 // indirect +-) +-` +- if got := env.BufferText("go.mod"); got != wantGoMod { +- t.Fatalf("go.mod vulncheck fix failed:\n%s", compare.Text(wantGoMod, got)) +- } +- }) +-} +- +-func diffCodeActions(gotActions []protocol.CodeAction, want []string) string { +- var gotTitles []string +- for _, ca := range gotActions { +- gotTitles = append(gotTitles, ca.Title) +- } +- return cmp.Diff(want, gotTitles) +-} +- +-const workspace2 = ` +--- go.mod -- +-module golang.org/entry +- +-go 1.18 +- +-require golang.org/bmod v0.5.0 +- +--- x/x.go -- +-package x +- +-import "golang.org/bmod/bvuln" +- +-func F() { +- // Calls a benign func in bvuln. +- bvuln.OK() +-} +-` +- +-const proxy2 = ` +--- golang.org/bmod@v0.5.0/bvuln/bvuln.go -- +-package bvuln +- +-func Vuln() {} // vulnerable. +-func OK() {} // ok. +-` +- +-func TestGovulncheckInfo(t *testing.T) { +- db, opts, err := vulnTestEnv(proxy2) +- if err != nil { +- t.Fatal(err) +- } +- defer db.Clean() +- WithOptions(opts...).Run(t, workspace2, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- var result command.RunVulncheckResult +- env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &result) +- gotDiagnostics := &protocol.PublishDiagnosticsParams{} +- env.OnceMet( +- CompletedProgressToken(result.Token, nil), +- ShownMessage("No vulnerabilities found"), // only count affecting vulnerabilities. +- ) +- +- // Vulncheck diagnostics asynchronous to the vulncheck command. +- env.OnceMet( +- Diagnostics(env.AtRegexp("go.mod", "golang.org/bmod")), +- ReadDiagnostics("go.mod", gotDiagnostics), +- ) +- +- testFetchVulncheckResult(t, env, "go.mod", nil, map[string]fetchVulncheckResult{ +- "go.mod": {IDs: []string{"GO-2022-02", "GO-2022-04"}, Mode: vulncheck.ModeGovulncheck}, +- }) +- // wantDiagnostics maps a module path in the require +- // section of a go.mod to diagnostics that will be returned +- // when running vulncheck. +- wantDiagnostics := map[string]vulnDiagExpectation{ +- "golang.org/bmod": { +- diagnostics: []vulnDiag{ +- { +- msg: "golang.org/bmod has a vulnerability GO-2022-02 that is not used in the code.", +- severity: protocol.SeverityInformation, +- source: string(cache.Govulncheck), +- codeActions: []string{ +- "Reset govulncheck result", +- }, +- }, +- }, +- codeActions: []string{ +- "Reset govulncheck result", +- }, +- hover: []string{"GO-2022-02", "vuln in bmod (no fix)", "No fix is available."}, +- }, +- } +- +- var allActions []protocol.CodeAction +- for mod, want := range wantDiagnostics { +- modPathDiagnostics := testVulnDiagnostics(t, env, mod, want, gotDiagnostics) +- // Check that the actions we get when including all diagnostics at a location return the same result +- gotActions := env.CodeActionForFile("go.mod", modPathDiagnostics) +- allActions = append(allActions, gotActions...) +- if diff := diffCodeActions(gotActions, want.codeActions); diff != "" { +- t.Errorf("code actions for %q do not match, expected %v, got %v\n%v\n", mod, want.codeActions, gotActions, diff) +- continue +- } +- } +- +- // Clear Diagnostics by using one of the reset code actions. +- var reset protocol.CodeAction +- for _, a := range allActions { +- if a.Title == "Reset govulncheck result" { +- reset = a +- break +- } +- } +- if reset.Title != "Reset govulncheck result" { +- t.Errorf("failed to find a 'Reset govulncheck result' code action, got %v", allActions) +- } +- env.ApplyCodeAction(reset) +- +- env.Await(NoDiagnostics(ForFile("go.mod"))) +- }) +-} +- +-// testVulnDiagnostics finds the require or module statement line for the requireMod in go.mod file +-// and runs checks if diagnostics and code actions associated with the line match expectation. +-func testVulnDiagnostics(t *testing.T, env *Env, pattern string, want vulnDiagExpectation, got *protocol.PublishDiagnosticsParams) []protocol.Diagnostic { +- t.Helper() +- loc := env.RegexpSearch("go.mod", pattern) +- var modPathDiagnostics []protocol.Diagnostic +- for _, w := range want.diagnostics { +- // Find the diagnostics at loc.start. +- var diag *protocol.Diagnostic +- for _, g := range got.Diagnostics { +- if g.Range.Start == loc.Range.Start && w.msg == g.Message { +- modPathDiagnostics = append(modPathDiagnostics, g) +- diag = &g +- break +- } +- } +- if diag == nil { +- t.Errorf("no diagnostic at %q matching %q found\n", pattern, w.msg) +- continue +- } +- if diag.Severity != w.severity || diag.Source != w.source { +- t.Errorf("incorrect (severity, source) for %q, want (%s, %s) got (%s, %s)\n", w.msg, w.severity, w.source, diag.Severity, diag.Source) +- } +- // Check expected code actions appear. +- gotActions := env.CodeActionForFile("go.mod", []protocol.Diagnostic{*diag}) +- if diff := diffCodeActions(gotActions, w.codeActions); diff != "" { +- t.Errorf("code actions for %q do not match, want %v, got %v\n%v\n", w.msg, w.codeActions, gotActions, diff) +- continue +- } +- } +- // Check that useful info is supplemented as hover. +- if len(want.hover) > 0 { +- hover, _ := env.Hover(loc) +- for _, part := range want.hover { +- if !strings.Contains(hover.Value, part) { +- t.Errorf("hover contents for %q do not match, want %v, got %v\n", pattern, strings.Join(want.hover, ","), hover.Value) +- break +- } +- } +- } +- return modPathDiagnostics +-} +- +-type vulnRelatedInfo struct { +- Filename string +- Line uint32 +- Message string +-} +- +-type vulnDiag struct { +- msg string +- severity protocol.DiagnosticSeverity +- // codeActions is a list titles of code actions that we get with this +- // diagnostics as the context. +- codeActions []string +- // relatedInfo is related info message prefixed by the file base. +- // See summarizeRelatedInfo. +- relatedInfo []vulnRelatedInfo +- // diagnostic source. +- source string +-} +- +-// vulnDiagExpectation maps a module path in the require +-// section of a go.mod to diagnostics that will be returned +-// when running vulncheck. +-type vulnDiagExpectation struct { +- // applyAction is the title of the code action to run for this module. +- // If empty, no code actions will be executed. +- applyAction string +- // diagnostics is the list of diagnostics we expect at the require line for +- // the module path. +- diagnostics []vulnDiag +- // codeActions is a list titles of code actions that we get with context +- // diagnostics. +- codeActions []string +- // hover message is the list of expected hover message parts for this go.mod require line. +- // all parts must appear in the hover message. +- hover []string +-} +diff -urN a/gopls/internal/test/integration/misc/workspace_symbol_test.go b/gopls/internal/test/integration/misc/workspace_symbol_test.go +--- a/gopls/internal/test/integration/misc/workspace_symbol_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/misc/workspace_symbol_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,114 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package misc +- +-import ( +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/settings" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestWorkspaceSymbolMissingMetadata(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.17 +--- a.go -- +-package p +- +-const K1 = "a.go" +--- exclude.go -- +- +-//go:build exclude +-// +build exclude +- +-package exclude +- +-const K2 = "exclude.go" +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- checkSymbols(env, "K", "K1") +- +- // Opening up an ignored file will result in an overlay with missing +- // metadata, but this shouldn't break workspace symbols requests. +- env.OpenFile("exclude.go") +- checkSymbols(env, "K", "K1") +- }) +-} +- +-func TestWorkspaceSymbolSorting(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.17 +--- a/a.go -- +-package a +- +-const ( +- Foo = iota +- FooBar +- Fooey +- Fooex +- Fooest +-) +-` +- +- var symbolMatcher = string(settings.SymbolFastFuzzy) +- WithOptions( +- Settings{"symbolMatcher": symbolMatcher}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- checkSymbols(env, "Foo", +- "Foo", // prefer exact segment matches first +- "FooBar", // ...followed by exact word matches +- "Fooex", // shorter than Fooest, FooBar, lexically before Fooey +- "Fooey", // shorter than Fooest, Foobar +- "Fooest", +- ) +- }) +-} +- +-func TestWorkspaceSymbolSpecialPatterns(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.17 +--- a/a.go -- +-package a +- +-const ( +- AxxBxxCxx +- ABC +-) +-` +- +- var symbolMatcher = string(settings.SymbolFastFuzzy) +- WithOptions( +- Settings{"symbolMatcher": symbolMatcher}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- checkSymbols(env, "ABC", "ABC", "AxxBxxCxx") +- checkSymbols(env, "'ABC", "ABC") +- checkSymbols(env, "^mod.com", "mod.com/a.ABC", "mod.com/a.AxxBxxCxx") +- checkSymbols(env, "^mod.com Axx", "mod.com/a.AxxBxxCxx") +- checkSymbols(env, "C$", "ABC") +- }) +-} +- +-func checkSymbols(env *Env, query string, want ...string) { +- env.TB.Helper() +- var got []string +- for _, info := range env.Symbol(query) { +- got = append(got, info.Name) +- } +- if diff := cmp.Diff(got, want); diff != "" { +- env.TB.Errorf("unexpected Symbol(%q) result (+want -got):\n%s", query, diff) +- } +-} +diff -urN a/gopls/internal/test/integration/modfile/modfile_test.go b/gopls/internal/test/integration/modfile/modfile_test.go +--- a/gopls/internal/test/integration/modfile/modfile_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/modfile/modfile_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1204 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package modfile +- +-import ( +- "os" +- "runtime" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/util/bug" +- +- "golang.org/x/tools/gopls/internal/protocol" +-) +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- os.Exit(Main(m)) +-} +- +-const workspaceProxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +--- example.com@v1.2.3/blah/blah.go -- +-package blah +- +-func SaySomething() { +- fmt.Println("something") +-} +--- random.org@v1.2.3/go.mod -- +-module random.org +- +-go 1.12 +--- random.org@v1.2.3/bye/bye.go -- +-package bye +- +-func Goodbye() { +- println("Bye") +-} +-` +- +-const proxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +--- example.com@v1.2.3/blah/blah.go -- +-package blah +- +-const Name = "Blah" +--- random.org@v1.2.3/go.mod -- +-module random.org +- +-go 1.12 +--- random.org@v1.2.3/blah/blah.go -- +-package hello +- +-const Name = "Hello" +-` +- +-func TestModFileModification(t *testing.T) { +- const untidyModule = ` +--- a/go.mod -- +-module mod.com +- +--- a/main.go -- +-package main +- +-import "example.com/blah" +- +-func main() { +- println(blah.Name) +-} +-` +- +- runner := RunMultiple{ +- {"default", WithOptions(ProxyFiles(proxy), WorkspaceFolders("a"))}, +- {"nested", WithOptions(ProxyFiles(proxy))}, +- } +- +- t.Run("basic", func(t *testing.T) { +- runner.Run(t, untidyModule, func(t *testing.T, env *Env) { +- // Open the file and make sure that the initial workspace load does not +- // modify the go.mod file. +- goModContent := env.ReadWorkspaceFile("a/go.mod") +- env.OpenFile("a/main.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/main.go", "\"example.com/blah\"")), +- ) +- if got := env.ReadWorkspaceFile("a/go.mod"); got != goModContent { +- t.Fatalf("go.mod changed on disk:\n%s", compare.Text(goModContent, got)) +- } +- // Save the buffer, which will format and organize imports. +- // Confirm that the go.mod file still does not change. +- env.SaveBuffer("a/main.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/main.go", "\"example.com/blah\"")), +- ) +- if got := env.ReadWorkspaceFile("a/go.mod"); got != goModContent { +- t.Fatalf("go.mod changed on disk:\n%s", compare.Text(goModContent, got)) +- } +- }) +- }) +- +- // Reproduce golang/go#40269 by deleting and recreating main.go. +- t.Run("delete main.go", func(t *testing.T) { +- runner.Run(t, untidyModule, func(t *testing.T, env *Env) { +- goModContent := env.ReadWorkspaceFile("a/go.mod") +- mainContent := env.ReadWorkspaceFile("a/main.go") +- env.OpenFile("a/main.go") +- env.SaveBuffer("a/main.go") +- +- // Ensure that we're done processing all the changes caused by opening +- // and saving above. If not, we may run into a file locking issue on +- // windows. +- // +- // If this proves insufficient, env.RemoveWorkspaceFile can be updated to +- // retry file lock errors on windows. +- env.AfterChange() +- env.RemoveWorkspaceFile("a/main.go") +- +- // TODO(rfindley): awaiting here shouldn't really be necessary. We should +- // be consistent eventually. +- // +- // Probably this was meant to exercise a race with the change below. +- env.AfterChange() +- +- env.WriteWorkspaceFile("a/main.go", mainContent) +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/main.go", "\"example.com/blah\"")), +- ) +- if got := env.ReadWorkspaceFile("a/go.mod"); got != goModContent { +- t.Fatalf("go.mod changed on disk:\n%s", compare.Text(goModContent, got)) +- } +- }) +- }) +-} +- +-func TestGoGetFix(t *testing.T) { +- const mod = ` +--- a/go.mod -- +-module mod.com +- +-go 1.12 +- +--- a/main.go -- +-package main +- +-import "example.com/blah" +- +-var _ = blah.Name +-` +- +- const want = `module mod.com +- +-go 1.12 +- +-require example.com v1.2.3 +-` +- +- RunMultiple{ +- {"default", WithOptions(ProxyFiles(proxy), WorkspaceFolders("a"))}, +- {"nested", WithOptions(ProxyFiles(proxy))}, +- }.Run(t, mod, func(t *testing.T, env *Env) { +- if strings.Contains(t.Name(), "workspace_module") { +- t.Skip("workspace module mode doesn't set -mod=readonly") +- } +- env.OpenFile("a/main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/main.go", `"example.com/blah"`)), +- ReadDiagnostics("a/main.go", &d), +- ) +- var goGetDiag protocol.Diagnostic +- for _, diag := range d.Diagnostics { +- if strings.Contains(diag.Message, "could not import") { +- goGetDiag = diag +- } +- } +- env.ApplyQuickFixes("a/main.go", []protocol.Diagnostic{goGetDiag}) +- if got := env.ReadWorkspaceFile("a/go.mod"); got != want { +- t.Fatalf("unexpected go.mod content:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-// Tests that multiple missing dependencies gives good single fixes. +-func TestMissingDependencyFixes(t *testing.T) { +- const mod = ` +--- a/go.mod -- +-module mod.com +- +-go 1.12 +- +--- a/main.go -- +-package main +- +-import "example.com/blah" +-import "random.org/blah" +- +-var _, _ = blah.Name, hello.Name +-` +- +- const want = `module mod.com +- +-go 1.12 +- +-require random.org v1.2.3 +-` +- +- RunMultiple{ +- {"default", WithOptions(ProxyFiles(proxy), WorkspaceFolders("a"))}, +- {"nested", WithOptions(ProxyFiles(proxy))}, +- }.Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("a/main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/main.go", `"random.org/blah"`)), +- ReadDiagnostics("a/main.go", &d), +- ) +- var randomDiag protocol.Diagnostic +- for _, diag := range d.Diagnostics { +- if strings.Contains(diag.Message, "random.org") { +- randomDiag = diag +- } +- } +- env.ApplyQuickFixes("a/main.go", []protocol.Diagnostic{randomDiag}) +- if got := env.ReadWorkspaceFile("a/go.mod"); got != want { +- t.Fatalf("unexpected go.mod content:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-// Tests that multiple missing dependencies gives good single fixes. +-func TestMissingDependencyFixesWithGoWork(t *testing.T) { +- const mod = ` +--- go.work -- +-go 1.18 +- +-use ( +- ./a +-) +--- a/go.mod -- +-module mod.com +- +-go 1.12 +- +--- a/main.go -- +-package main +- +-import "example.com/blah" +-import "random.org/blah" +- +-var _, _ = blah.Name, hello.Name +-` +- +- const want = `module mod.com +- +-go 1.12 +- +-require random.org v1.2.3 +-` +- +- RunMultiple{ +- {"default", WithOptions(ProxyFiles(proxy), WorkspaceFolders("a"))}, +- {"nested", WithOptions(ProxyFiles(proxy))}, +- }.Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("a/main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/main.go", `"random.org/blah"`)), +- ReadDiagnostics("a/main.go", &d), +- ) +- var randomDiag protocol.Diagnostic +- for _, diag := range d.Diagnostics { +- if strings.Contains(diag.Message, "random.org") { +- randomDiag = diag +- } +- } +- env.ApplyQuickFixes("a/main.go", []protocol.Diagnostic{randomDiag}) +- if got := env.ReadWorkspaceFile("a/go.mod"); got != want { +- t.Fatalf("unexpected go.mod content:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-func TestIndirectDependencyFix(t *testing.T) { +- const mod = ` +--- a/go.mod -- +-module mod.com +- +-go 1.12 +- +-require example.com v1.2.3 // indirect +--- a/go.sum -- +-example.com v1.2.3 h1:ihBTGWGjTU3V4ZJ9OmHITkU9WQ4lGdQkMjgyLFk0FaY= +-example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= +--- a/main.go -- +-package main +- +-import "example.com/blah" +- +-func main() { +- fmt.Println(blah.Name) +-` +- const want = `module mod.com +- +-go 1.12 +- +-require example.com v1.2.3 +-` +- +- RunMultiple{ +- {"default", WithOptions(ProxyFiles(proxy), WorkspaceFolders("a"))}, +- {"nested", WithOptions(ProxyFiles(proxy))}, +- }.Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("a/go.mod") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/go.mod", "// indirect")), +- ReadDiagnostics("a/go.mod", &d), +- ) +- env.ApplyQuickFixes("a/go.mod", d.Diagnostics) +- if got := env.BufferText("a/go.mod"); got != want { +- t.Fatalf("unexpected go.mod content:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-// Test to reproduce golang/go#39041. It adds a new require to a go.mod file +-// that already has an unused require. +-func TestNewDepWithUnusedDep(t *testing.T) { +- +- const proxy = ` +--- github.com/esimov/caire@v1.2.5/go.mod -- +-module github.com/esimov/caire +- +-go 1.12 +--- github.com/esimov/caire@v1.2.5/caire.go -- +-package caire +- +-func RemoveTempImage() {} +--- google.golang.org/protobuf@v1.20.0/go.mod -- +-module google.golang.org/protobuf +- +-go 1.12 +--- google.golang.org/protobuf@v1.20.0/hello/hello.go -- +-package hello +-` +- const repro = ` +--- a/go.mod -- +-module mod.com +- +-go 1.14 +- +-require google.golang.org/protobuf v1.20.0 +--- a/go.sum -- +-github.com/esimov/caire v1.2.5 h1:OcqDII/BYxcBYj3DuwDKjd+ANhRxRqLa2n69EGje7qw= +-github.com/esimov/caire v1.2.5/go.mod h1:mXnjRjg3+WUtuhfSC1rKRmdZU9vJZyS1ZWU0qSvJhK8= +-google.golang.org/protobuf v1.20.0 h1:y9T1vAtFKQg0faFNMOxJU7WuEqPWolVkjIkU6aI8qCY= +-google.golang.org/protobuf v1.20.0/go.mod h1:FcqsytGClbtLv1ot8NvsJHjBi0h22StKVP+K/j2liKA= +--- a/main.go -- +-package main +- +-import ( +- "github.com/esimov/caire" +-) +- +-func _() { +- caire.RemoveTempImage() +-}` +- +- RunMultiple{ +- {"default", WithOptions(ProxyFiles(proxy), WorkspaceFolders("a"))}, +- {"nested", WithOptions(ProxyFiles(proxy))}, +- }.Run(t, repro, func(t *testing.T, env *Env) { +- env.OpenFile("a/main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/main.go", `"github.com/esimov/caire"`)), +- ReadDiagnostics("a/main.go", &d), +- ) +- env.ApplyQuickFixes("a/main.go", d.Diagnostics) +- want := `module mod.com +- +-go 1.14 +- +-require ( +- github.com/esimov/caire v1.2.5 +- google.golang.org/protobuf v1.20.0 +-) +-` +- if got := env.ReadWorkspaceFile("a/go.mod"); got != want { +- t.Fatalf("TestNewDepWithUnusedDep failed:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-// TODO: For this test to be effective, the sandbox's file watcher must respect +-// the file watching GlobPattern in the capability registration. See +-// golang/go#39384. +-func TestModuleChangesOnDisk(t *testing.T) { +- const mod = ` +--- a/go.mod -- +-module mod.com +- +-go 1.12 +- +-require example.com v1.2.3 +--- a/go.sum -- +-example.com v1.2.3 h1:ihBTGWGjTU3V4ZJ9OmHITkU9WQ4lGdQkMjgyLFk0FaY= +-example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= +--- a/main.go -- +-package main +- +-func main() { +- fmt.Println(blah.Name) +-` +- RunMultiple{ +- {"default", WithOptions(ProxyFiles(proxy), WorkspaceFolders("a"))}, +- {"nested", WithOptions(ProxyFiles(proxy))}, +- }.Run(t, mod, func(t *testing.T, env *Env) { +- // With zero-config gopls, we must open a/main.go to have a View including a/go.mod. +- env.OpenFile("a/main.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/go.mod", "require")), +- ) +- env.RunGoCommandInDir("a", "mod", "tidy") +- env.AfterChange( +- NoDiagnostics(ForFile("a/go.mod")), +- ) +- }) +-} +- +-// Tests golang/go#39784: a missing indirect dependency, necessary +-// due to blah@v2.0.0's incomplete go.mod file. +-func TestBadlyVersionedModule(t *testing.T) { +- const proxy = ` +--- example.com/blah/@v/v1.0.0.mod -- +-module example.com +- +-go 1.12 +--- example.com/blah@v1.0.0/blah.go -- +-package blah +- +-const Name = "Blah" +--- example.com/blah/v2/@v/v2.0.0.mod -- +-module example.com +- +-go 1.12 +--- example.com/blah/v2@v2.0.0/blah.go -- +-package blah +- +-import "example.com/blah" +- +-var V1Name = blah.Name +-const Name = "Blah" +-` +- const files = ` +--- a/go.mod -- +-module mod.com +- +-go 1.12 +- +-require example.com/blah/v2 v2.0.0 +--- a/go.sum -- +-example.com/blah v1.0.0 h1:kGPlWJbMsn1P31H9xp/q2mYI32cxLnCvauHN0AVaHnc= +-example.com/blah v1.0.0/go.mod h1:PZUQaGFeVjyDmAE8ywmLbmDn3fj4Ws8epg4oLuDzW3M= +-example.com/blah/v2 v2.0.0 h1:DNPsFPkKtTdxclRheaMCiYAoYizp6PuBzO0OmLOO0pY= +-example.com/blah/v2 v2.0.0/go.mod h1:UZiKbTwobERo/hrqFLvIQlJwQZQGxWMVY4xere8mj7w= +--- a/main.go -- +-package main +- +-import "example.com/blah/v2" +- +-var _ = blah.Name +-` +- RunMultiple{ +- {"default", WithOptions(ProxyFiles(proxy), WorkspaceFolders("a"))}, +- {"nested", WithOptions(ProxyFiles(proxy))}, +- }.Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/main.go") +- env.OpenFile("a/go.mod") +- var modDiags protocol.PublishDiagnosticsParams +- env.AfterChange( +- // We would like for the error to appear in the v2 module, but +- // as of writing non-workspace packages are not diagnosed. +- Diagnostics(env.AtRegexp("a/main.go", `"example.com/blah/v2"`), WithMessage("no required module provides")), +- Diagnostics(env.AtRegexp("a/go.mod", `require example.com/blah/v2`), WithMessage("no required module provides")), +- ReadDiagnostics("a/go.mod", &modDiags), +- ) +- +- env.ApplyQuickFixes("a/go.mod", modDiags.Diagnostics) +- const want = `module mod.com +- +-go 1.12 +- +-require ( +- example.com/blah v1.0.0 // indirect +- example.com/blah/v2 v2.0.0 +-) +-` +- env.SaveBuffer("a/go.mod") +- env.AfterChange(NoDiagnostics(ForFile("a/main.go"))) +- if got := env.BufferText("a/go.mod"); got != want { +- t.Fatalf("suggested fixes failed:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-// Reproduces golang/go#38232. +-func TestUnknownRevision(t *testing.T) { +- if runtime.GOOS == "plan9" { +- t.Skipf("skipping test that fails for unknown reasons on plan9; see https://go.dev/issue/50477") +- } +- const unknown = ` +--- a/go.mod -- +-module mod.com +- +-require ( +- example.com v1.2.2 +-) +--- a/main.go -- +-package main +- +-import "example.com/blah" +- +-func main() { +- var x = blah.Name +-} +-` +- +- runner := RunMultiple{ +- {"default", WithOptions(ProxyFiles(proxy), WorkspaceFolders("a"))}, +- {"nested", WithOptions(ProxyFiles(proxy))}, +- } +- // Start from a bad state/bad IWL, and confirm that we recover. +- t.Run("bad", func(t *testing.T) { +- runner.Run(t, unknown, func(t *testing.T, env *Env) { +- env.OpenFile("a/go.mod") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/go.mod", "example.com v1.2.2")), +- ) +- env.RegexpReplace("a/go.mod", "v1.2.2", "v1.2.3") +- env.SaveBuffer("a/go.mod") // Save to trigger diagnostics. +- +- d := protocol.PublishDiagnosticsParams{} +- env.AfterChange( +- // Make sure the diagnostic mentions the new version -- the old diagnostic is in the same place. +- Diagnostics(env.AtRegexp("a/go.mod", "example.com v1.2.3"), WithMessage("example.com@v1.2.3")), +- ReadDiagnostics("a/go.mod", &d), +- ) +- qfs := env.GetQuickFixes("a/go.mod", d.Diagnostics) +- if len(qfs) == 0 { +- t.Fatalf("got 0 code actions to fix %v, wanted at least 1", d.Diagnostics) +- } +- env.ApplyCodeAction(qfs[0]) // Arbitrarily pick a single fix to apply. Applying all of them seems to cause trouble in this particular test. +- env.SaveBuffer("a/go.mod") // Save to trigger diagnostics. +- env.AfterChange( +- NoDiagnostics(ForFile("a/go.mod")), +- Diagnostics(env.AtRegexp("a/main.go", "x = ")), +- ) +- }) +- }) +- +- const known = ` +--- a/go.mod -- +-module mod.com +- +-require ( +- example.com v1.2.3 +-) +--- a/go.sum -- +-example.com v1.2.3 h1:ihBTGWGjTU3V4ZJ9OmHITkU9WQ4lGdQkMjgyLFk0FaY= +-example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= +--- a/main.go -- +-package main +- +-import "example.com/blah" +- +-func main() { +- var x = blah.Name +-} +-` +- // Start from a good state, transform to a bad state, and confirm that we +- // still recover. +- t.Run("good", func(t *testing.T) { +- runner.Run(t, known, func(t *testing.T, env *Env) { +- env.OpenFile("a/go.mod") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/main.go", "x = ")), +- ) +- env.RegexpReplace("a/go.mod", "v1.2.3", "v1.2.2") +- env.SaveBuffer("a/go.mod") // go.mod changes must be on disk +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/go.mod", "example.com v1.2.2")), +- ) +- env.RegexpReplace("a/go.mod", "v1.2.2", "v1.2.3") +- env.SaveBuffer("a/go.mod") // go.mod changes must be on disk +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/main.go", "x = ")), +- ) +- }) +- }) +-} +- +-// Confirm that an error in an indirect dependency of a requirement is surfaced +-// as a diagnostic in the go.mod file. +-func TestErrorInIndirectDependency(t *testing.T) { +- const badProxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +- +-require random.org v1.2.3 // indirect +--- example.com@v1.2.3/blah/blah.go -- +-package blah +- +-const Name = "Blah" +--- random.org@v1.2.3/go.mod -- +-module bob.org +- +-go 1.12 +--- random.org@v1.2.3/blah/blah.go -- +-package hello +- +-const Name = "Hello" +-` +- const module = ` +--- a/go.mod -- +-module mod.com +- +-go 1.14 +- +-require example.com v1.2.3 +--- a/main.go -- +-package main +- +-import "example.com/blah" +- +-func main() { +- println(blah.Name) +-} +-` +- RunMultiple{ +- {"default", WithOptions(ProxyFiles(badProxy), WorkspaceFolders("a"))}, +- {"nested", WithOptions(ProxyFiles(badProxy))}, +- }.Run(t, module, func(t *testing.T, env *Env) { +- env.OpenFile("a/go.mod") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/go.mod", "require example.com v1.2.3")), +- ) +- }) +-} +- +-// A copy of govim's config_set_env_goflags_mod_readonly test. +-func TestGovimModReadonly(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.13 +--- main.go -- +-package main +- +-import "example.com/blah" +- +-func main() { +- println(blah.Name) +-} +-` +- WithOptions( +- EnvVars{"GOFLAGS": "-mod=readonly"}, +- ProxyFiles(proxy), +- Modes(Default), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- original := env.ReadWorkspaceFile("go.mod") +- env.AfterChange( +- Diagnostics(env.AtRegexp("main.go", `"example.com/blah"`)), +- ) +- got := env.ReadWorkspaceFile("go.mod") +- if got != original { +- t.Fatalf("go.mod file modified:\n%s", compare.Text(original, got)) +- } +- env.RunGoCommand("get", "example.com/blah@v1.2.3") +- env.RunGoCommand("mod", "tidy") +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- ) +- }) +-} +- +-func TestMultiModuleModDiagnostics(t *testing.T) { +- const mod = ` +--- go.work -- +-go 1.18 +- +-use ( +- a +- b +-) +--- a/go.mod -- +-module moda.com +- +-go 1.14 +- +-require ( +- example.com v1.2.3 +-) +--- a/go.sum -- +-example.com v1.2.3 h1:Yryq11hF02fEf2JlOS2eph+ICE2/ceevGV3C9dl5V/c= +-example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= +--- a/main.go -- +-package main +- +-func main() {} +--- b/go.mod -- +-module modb.com +- +-require example.com v1.2.3 +- +-go 1.14 +--- b/main.go -- +-package main +- +-import "example.com/blah" +- +-func main() { +- blah.SaySomething() +-} +-` +- WithOptions( +- ProxyFiles(workspaceProxy), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("a/go.mod", "example.com v1.2.3"), +- WithMessage("is not used"), +- ), +- ) +- }) +-} +- +-func TestModTidyWithBuildTags(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- main.go -- +-// +build bob +- +-package main +- +-import "example.com/blah" +- +-func main() { +- blah.SaySomething() +-} +-` +- WithOptions( +- ProxyFiles(workspaceProxy), +- Settings{"buildFlags": []string{"-tags", "bob"}}, +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", `"example.com/blah"`)), +- ) +- }) +-} +- +-func TestModTypoDiagnostic(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() {} +-` +- Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- env.RegexpReplace("go.mod", "module", "modul") +- env.AfterChange( +- Diagnostics(env.AtRegexp("go.mod", "modul")), +- ) +- }) +-} +- +-func TestSumUpdateFixesDiagnostics(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +- +-require ( +- example.com v1.2.3 +-) +--- main.go -- +-package main +- +-import ( +- "example.com/blah" +-) +- +-func main() { +- println(blah.Name) +-} +-` +- WithOptions( +- ProxyFiles(workspaceProxy), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- d := &protocol.PublishDiagnosticsParams{} +- env.OpenFile("go.mod") +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("go.mod", `example.com v1.2.3`), +- WithMessage("go.sum is out of sync"), +- ), +- ReadDiagnostics("go.mod", d), +- ) +- env.ApplyQuickFixes("go.mod", d.Diagnostics) +- env.SaveBuffer("go.mod") // Save to trigger diagnostics. +- env.AfterChange( +- NoDiagnostics(ForFile("go.mod")), +- ) +- }) +-} +- +-// This test confirms that editing a go.mod file only causes metadata +-// to be invalidated when it's saved. +-func TestGoModInvalidatesOnSave(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-func main() { +- hello() +-} +--- hello.go -- +-package main +- +-func hello() {} +-` +- WithOptions( +- // TODO(rFindley) this doesn't work in multi-module workspace mode, because +- // it keeps around the last parsing modfile. Update this test to also +- // exercise the workspace module. +- Modes(Default), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- env.Await(env.DoneWithOpen()) +- env.RegexpReplace("go.mod", "module", "modul") +- // Confirm that we still have metadata with only on-disk edits. +- env.OpenFile("main.go") +- loc := env.FirstDefinition(env.RegexpSearch("main.go", "hello")) +- if loc.URI.Base() != "hello.go" { +- t.Fatalf("expected definition in hello.go, got %s", loc.URI) +- } +- // Confirm that we no longer have metadata when the file is saved. +- env.SaveBufferWithoutActions("go.mod") +- _, err := env.Editor.Definitions(env.Ctx, env.RegexpSearch("main.go", "hello")) +- if err == nil { +- t.Fatalf("expected error, got none") +- } +- }) +-} +- +-func TestRemoveUnusedDependency(t *testing.T) { +- const proxy = ` +--- hasdep.com@v1.2.3/go.mod -- +-module hasdep.com +- +-go 1.12 +- +-require example.com v1.2.3 +--- hasdep.com@v1.2.3/a/a.go -- +-package a +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +--- example.com@v1.2.3/blah/blah.go -- +-package blah +- +-const Name = "Blah" +--- random.com@v1.2.3/go.mod -- +-module random.com +- +-go 1.12 +--- random.com@v1.2.3/blah/blah.go -- +-package blah +- +-const Name = "Blah" +-` +- t.Run("almost tidied", func(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +- +-require hasdep.com v1.2.3 +--- main.go -- +-package main +- +-func main() {} +-` +- WithOptions( +- ProxyFiles(proxy), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- d := &protocol.PublishDiagnosticsParams{} +- env.AfterChange( +- Diagnostics(env.AtRegexp("go.mod", "require hasdep.com v1.2.3")), +- ReadDiagnostics("go.mod", d), +- ) +- const want = `module mod.com +- +-go 1.12 +-` +- env.ApplyQuickFixes("go.mod", d.Diagnostics) +- if got := env.BufferText("go.mod"); got != want { +- t.Fatalf("unexpected content in go.mod:\n%s", compare.Text(want, got)) +- } +- }) +- }) +- +- t.Run("not tidied", func(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +- +-require hasdep.com v1.2.3 +-require random.com v1.2.3 +--- main.go -- +-package main +- +-func main() {} +-` +- WithOptions( +- WriteGoSum("."), +- ProxyFiles(proxy), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- d := &protocol.PublishDiagnosticsParams{} +- env.OpenFile("go.mod") +- pos := env.RegexpSearch("go.mod", "require hasdep.com v1.2.3").Range.Start +- env.AfterChange( +- Diagnostics(AtPosition("go.mod", pos.Line, pos.Character)), +- ReadDiagnostics("go.mod", d), +- ) +- const want = `module mod.com +- +-go 1.12 +- +-require random.com v1.2.3 +-` +- var diagnostics []protocol.Diagnostic +- for _, d := range d.Diagnostics { +- if d.Range.Start.Line != pos.Line { +- continue +- } +- diagnostics = append(diagnostics, d) +- } +- env.ApplyQuickFixes("go.mod", diagnostics) +- if got := env.BufferText("go.mod"); got != want { +- t.Fatalf("unexpected content in go.mod:\n%s", compare.Text(want, got)) +- } +- }) +- }) +-} +- +-func TestSumUpdateQuickFix(t *testing.T) { +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +- +-require ( +- example.com v1.2.3 +-) +--- main.go -- +-package main +- +-import ( +- "example.com/blah" +-) +- +-func main() { +- blah.Hello() +-} +-` +- WithOptions( +- ProxyFiles(workspaceProxy), +- Modes(Default), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("go.mod") +- params := &protocol.PublishDiagnosticsParams{} +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("go.mod", `example.com`), +- WithMessage("go.sum is out of sync"), +- ), +- ReadDiagnostics("go.mod", params), +- ) +- env.ApplyQuickFixes("go.mod", params.Diagnostics) +- const want = `example.com v1.2.3 h1:Yryq11hF02fEf2JlOS2eph+ICE2/ceevGV3C9dl5V/c= +-example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= +-` +- if got := env.ReadWorkspaceFile("go.sum"); got != want { +- t.Fatalf("unexpected go.sum contents:\n%s", compare.Text(want, got)) +- } +- }) +-} +- +-func TestDownloadDeps(t *testing.T) { +- const proxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +- +-require random.org v1.2.3 +--- example.com@v1.2.3/blah/blah.go -- +-package blah +- +-import "random.org/bye" +- +-func SaySomething() { +- bye.Goodbye() +-} +--- random.org@v1.2.3/go.mod -- +-module random.org +- +-go 1.12 +--- random.org@v1.2.3/bye/bye.go -- +-package bye +- +-func Goodbye() { +- println("Bye") +-} +-` +- +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- main.go -- +-package main +- +-import ( +- "example.com/blah" +-) +- +-func main() { +- blah.SaySomething() +-} +-` +- WithOptions( +- ProxyFiles(proxy), +- Modes(Default), +- ).Run(t, mod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- d := &protocol.PublishDiagnosticsParams{} +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("main.go", `"example.com/blah"`), +- WithMessage(`could not import example.com/blah (no required module provides package "example.com/blah")`), +- ), +- ReadDiagnostics("main.go", d), +- ) +- env.ApplyQuickFixes("main.go", d.Diagnostics) +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- NoDiagnostics(ForFile("go.mod")), +- ) +- }) +-} +- +-func TestInvalidGoVersion(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go foo +--- main.go -- +-package main +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("go.mod", `go foo`), WithMessage("invalid go version")), +- ) +- env.WriteWorkspaceFile("go.mod", "module mod.com \n\ngo 1.12\n") +- env.AfterChange(NoDiagnostics(ForFile("go.mod"))) +- }) +-} +- +-// This is a regression test for a bug in the line-oriented implementation +-// of the "apply diffs" operation used by the fake editor. +-func TestIssue57627(t *testing.T) { +- const files = ` +--- go.work -- +-package main +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.work") +- env.SetBufferContent("go.work", "go 1.18\nuse moda/a") +- env.SaveBuffer("go.work") // doesn't fail +- }) +-} +- +-func TestInconsistentMod(t *testing.T) { +- const proxy = ` +--- golang.org/x/mod@v0.7.0/go.mod -- +-go 1.20 +-module golang.org/x/mod +--- golang.org/x/mod@v0.7.0/a.go -- +-package mod +-func AutoQuote(string) string { return ""} +--- golang.org/x/mod@v0.9.0/go.mod -- +-go 1.20 +-module golang.org/x/mod +--- golang.org/x/mod@v0.9.0/a.go -- +-package mod +-func AutoQuote(string) string { return ""} +-` +- const files = ` +--- go.work -- +-go 1.20 +-use ( +- ./a +- ./b +-) +- +--- a/go.mod -- +-module a.mod.com +-go 1.20 +-require golang.org/x/mod v0.6.0 // yyy +-replace golang.org/x/mod v0.6.0 => golang.org/x/mod v0.7.0 +--- a/main.go -- +-package main +-import "golang.org/x/mod" +-import "fmt" +-func main() {fmt.Println(mod.AutoQuote(""))} +- +--- b/go.mod -- +-module b.mod.com +-go 1.20 +-require golang.org/x/mod v0.9.0 // xxx +--- b/main.go -- +-package aaa +-import "golang.org/x/mod" +-import "fmt" +-func main() {fmt.Println(mod.AutoQuote(""))} +-var A int +- +--- b/c/go.mod -- +-module c.b.mod.com +-go 1.20 +-require b.mod.com v0.4.2 +-replace b.mod.com => ../ +--- b/c/main.go -- +-package main +-import "b.mod.com/aaa" +-import "fmt" +-func main() {fmt.Println(aaa.A)} +-` +- WithOptions( +- ProxyFiles(proxy), +- Modes(Default), +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/go.mod") +- ahints := env.InlayHints("a/go.mod") +- if len(ahints) != 1 { +- t.Errorf("expected exactly one hint, got %d: %#v", len(ahints), ahints) +- } +- env.OpenFile("b/c/go.mod") +- bhints := env.InlayHints("b/c/go.mod") +- if len(bhints) != 0 { +- t.Errorf("expected no hints, got %d: %#v", len(bhints), bhints) +- } +- }) +- +-} +diff -urN a/gopls/internal/test/integration/modfile/tempmodfile_test.go b/gopls/internal/test/integration/modfile/tempmodfile_test.go +--- a/gopls/internal/test/integration/modfile/tempmodfile_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/modfile/tempmodfile_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package modfile +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// This test replaces an older, problematic test (golang/go#57784). But it has +-// been a long time since the go command would mutate go.mod files. +-// +-// TODO(golang/go#61970): the tempModfile setting should be removed entirely. +-func TestTempModfileUnchanged(t *testing.T) { +- // badMod has a go.mod file that is missing a go directive. +- const badMod = ` +--- go.mod -- +-module badmod.test/p +--- p.go -- +-package p +-` +- +- WithOptions( +- Modes(Default), // no reason to test this with a remote gopls +- ProxyFiles(workspaceProxy), +- Settings{ +- "tempModfile": true, +- }, +- ).Run(t, badMod, func(t *testing.T, env *Env) { +- env.OpenFile("p.go") +- env.AfterChange() +- want := "module badmod.test/p\n" +- got := env.ReadWorkspaceFile("go.mod") +- if got != want { +- t.Errorf("go.mod content:\n%s\nwant:\n%s", got, want) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/options.go b/gopls/internal/test/integration/options.go +--- a/gopls/internal/test/integration/options.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/options.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,219 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package integration +- +-import ( +- "maps" +- "strings" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/internal/drivertest" +-) +- +-type runConfig struct { +- editor fake.EditorConfig +- sandbox fake.SandboxConfig +- modes Mode +- noLogsOnError bool +- writeGoSum []string +-} +- +-func defaultConfig() runConfig { +- return runConfig{ +- editor: fake.EditorConfig{ +- Settings: map[string]any{ +- // Shorten the diagnostic delay to speed up test execution (else we'd add +- // the default delay to each assertion about diagnostics) +- "diagnosticsDelay": "10ms", +- }, +- }, +- } +-} +- +-// A RunOption augments the behavior of the test runner. +-type RunOption interface { +- set(*runConfig) +-} +- +-type optionSetter func(*runConfig) +- +-func (f optionSetter) set(opts *runConfig) { +- f(opts) +-} +- +-// ProxyFiles configures a file proxy using the given txtar-encoded string. +-func ProxyFiles(txt string) RunOption { +- return optionSetter(func(opts *runConfig) { +- opts.sandbox.ProxyFiles = fake.UnpackTxt(txt) +- }) +-} +- +-// WriteGoSum causes the environment to write a go.sum file for the requested +-// relative directories (via `go list -mod=mod`), before starting gopls. +-// +-// Useful for tests that use ProxyFiles, but don't care about crafting the +-// go.sum content. +-func WriteGoSum(dirs ...string) RunOption { +- return optionSetter(func(opts *runConfig) { +- opts.writeGoSum = dirs +- }) +-} +- +-// Modes configures the execution modes that the test should run in. +-// +-// By default, modes are configured by the test runner. If this option is set, +-// it overrides the set of default modes and the test runs in exactly these +-// modes. +-func Modes(modes Mode) RunOption { +- return optionSetter(func(opts *runConfig) { +- if opts.modes != 0 { +- panic("modes set more than once") +- } +- opts.modes = modes +- }) +-} +- +-// NoLogsOnError turns off dumping the LSP logs on test failures. +-func NoLogsOnError() RunOption { +- return optionSetter(func(opts *runConfig) { +- opts.noLogsOnError = true +- }) +-} +- +-// WindowsLineEndings configures the editor to use windows line endings. +-func WindowsLineEndings() RunOption { +- return optionSetter(func(opts *runConfig) { +- opts.editor.WindowsLineEndings = true +- }) +-} +- +-// ClientName sets the LSP client name. +-func ClientName(name string) RunOption { +- return optionSetter(func(opts *runConfig) { +- opts.editor.ClientName = name +- }) +-} +- +-// CapabilitiesJSON sets the capabalities json. +-func CapabilitiesJSON(capabilities []byte) RunOption { +- return optionSetter(func(opts *runConfig) { +- opts.editor.CapabilitiesJSON = capabilities +- }) +-} +- +-// Settings sets user-provided configuration for the LSP server. +-// +-// As a special case, the env setting must not be provided via Settings: use +-// EnvVars instead. +-type Settings map[string]any +- +-func (s Settings) set(opts *runConfig) { +- if opts.editor.Settings == nil { +- opts.editor.Settings = make(map[string]any) +- } +- maps.Copy(opts.editor.Settings, s) +-} +- +-// WorkspaceFolders configures the workdir-relative workspace folders or uri +-// to send to the LSP server. By default the editor sends a single workspace folder +-// corresponding to the workdir root. To explicitly configure no workspace +-// folders, use WorkspaceFolders with no arguments. +-func WorkspaceFolders(relFolders ...string) RunOption { +- if len(relFolders) == 0 { +- // Use an empty non-nil slice to signal explicitly no folders. +- relFolders = []string{} +- } +- +- return optionSetter(func(opts *runConfig) { +- opts.editor.WorkspaceFolders = relFolders +- }) +-} +- +-// NoDefaultWorkspaceFiles is used to specify whether the fake editor +-// should give a default workspace folder to the LSP server. +-// When it's true, the editor will pass original WorkspaceFolders to the LSP server. +-func NoDefaultWorkspaceFiles() RunOption { +- return optionSetter(func(opts *runConfig) { +- opts.editor.NoDefaultWorkspaceFiles = true +- }) +-} +- +-// RootPath configures the roo path which will be converted to rootUri and sent to the LSP server. +-func RootPath(relpath string) RunOption { +- return optionSetter(func(opts *runConfig) { +- opts.editor.RelRootPath = relpath +- }) +-} +- +-// FolderSettings defines per-folder workspace settings, keyed by relative path +-// to the folder. +-// +-// Use in conjunction with WorkspaceFolders to have different settings for +-// different folders. +-type FolderSettings map[string]Settings +- +-func (fs FolderSettings) set(opts *runConfig) { +- // Re-use the Settings type, for symmetry, but translate back into maps for +- // the editor config. +- folders := make(map[string]map[string]any) +- for k, v := range fs { +- folders[k] = v +- } +- opts.editor.FolderSettings = folders +-} +- +-// EnvVars sets environment variables for the LSP session. When applying these +-// variables to the session, the special string $SANDBOX_WORKDIR is replaced by +-// the absolute path to the sandbox working directory. +-type EnvVars map[string]string +- +-func (e EnvVars) set(opts *runConfig) { +- if opts.editor.Env == nil { +- opts.editor.Env = make(map[string]string) +- } +- maps.Copy(opts.editor.Env, e) +-} +- +-// FakeGoPackagesDriver configures gopls to run with a fake GOPACKAGESDRIVER +-// environment variable. +-func FakeGoPackagesDriver(t *testing.T) RunOption { +- env := drivertest.Env(t) +- vars := make(EnvVars) +- for _, e := range env { +- kv := strings.SplitN(e, "=", 2) +- vars[kv[0]] = kv[1] +- } +- return vars +-} +- +-// InGOPATH configures the workspace working directory to be GOPATH, rather +-// than a separate working directory for use with modules. +-func InGOPATH() RunOption { +- return optionSetter(func(opts *runConfig) { +- opts.sandbox.InGoPath = true +- }) +-} +- +-// MessageResponder configures the editor to respond to +-// window/showMessageRequest messages using the provided function. +-func MessageResponder(f func(*protocol.ShowMessageRequestParams) (*protocol.MessageActionItem, error)) RunOption { +- return optionSetter(func(opts *runConfig) { +- opts.editor.MessageResponder = f +- }) +-} +- +-// DelayMessages can be used to fuzz message delivery delays for the purpose of +-// reproducing test flakes. +-// +-// (Even though this option may be unused, keep it around to aid in debugging +-// future flakes.) +-func DelayMessages(upto time.Duration) RunOption { +- return optionSetter(func(opts *runConfig) { +- opts.editor.MaxMessageDelay = upto +- }) +-} +diff -urN a/gopls/internal/test/integration/regtest.go b/gopls/internal/test/integration/regtest.go +--- a/gopls/internal/test/integration/regtest.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/regtest.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,247 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package integration +- +-import ( +- "context" +- "flag" +- "fmt" +- "os" +- "os/exec" +- "path/filepath" +- "runtime" +- "strings" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/cmd" +- "golang.org/x/tools/gopls/internal/util/memoize" +- "golang.org/x/tools/internal/drivertest" +- "golang.org/x/tools/internal/gocommand" +- "golang.org/x/tools/internal/testenv" +- "golang.org/x/tools/internal/tool" +-) +- +-var ( +- runSubprocessTests = flag.Bool("enable_gopls_subprocess_tests", false, "run integration tests against a gopls subprocess (default: in-process)") +- goplsBinaryPath = flag.String("gopls_test_binary", "", "path to the gopls binary for use as a remote, for use with the -enable_gopls_subprocess_tests flag") +- timeout = flag.Duration("timeout", defaultTimeout(), "if nonzero, default timeout for each integration test; defaults to GOPLS_INTEGRATION_TEST_TIMEOUT") +- skipCleanup = flag.Bool("skip_cleanup", false, "whether to skip cleaning up temp directories") +- printGoroutinesOnFailure = flag.Bool("print_goroutines", false, "whether to print goroutines info on failure") +- printLogs = flag.Bool("print_logs", false, "whether to print LSP logs") +-) +- +-func defaultTimeout() time.Duration { +- s := os.Getenv("GOPLS_INTEGRATION_TEST_TIMEOUT") +- if s == "" { +- return 0 +- } +- d, err := time.ParseDuration(s) +- if err != nil { +- fmt.Fprintf(os.Stderr, "invalid GOPLS_INTEGRATION_TEST_TIMEOUT %q: %v\n", s, err) +- os.Exit(2) +- } +- return d +-} +- +-var runner *Runner +- +-func Run(t *testing.T, files string, f TestFunc) { +- runner.Run(t, files, f) +-} +- +-func WithOptions(opts ...RunOption) configuredRunner { +- return configuredRunner{opts: opts} +-} +- +-type configuredRunner struct { +- opts []RunOption +-} +- +-func (r configuredRunner) Run(t *testing.T, files string, f TestFunc) { +- // Print a warning if the test's temporary directory is not +- // suitable as a workspace folder, as this may lead to +- // otherwise-cryptic failures. This situation typically occurs +- // when an arbitrary string (e.g. "foo.") is used as a subtest +- // name, on a platform with filename restrictions (e.g. no +- // trailing period on Windows). +- tmp := t.TempDir() +- if err := cache.CheckPathValid(tmp); err != nil { +- t.Logf("Warning: testing.T.TempDir(%s) is not valid as a workspace folder: %s", +- tmp, err) +- } +- +- runner.Run(t, files, f, r.opts...) +-} +- +-// RunMultiple runs a test multiple times, with different options. +-// The runner should be constructed with [WithOptions]. +-// +-// TODO(rfindley): replace Modes with selective use of RunMultiple. +-type RunMultiple []struct { +- Name string +- Runner interface { +- Run(t *testing.T, files string, f TestFunc) +- } +-} +- +-func (r RunMultiple) Run(t *testing.T, files string, f TestFunc) { +- for _, runner := range r { +- t.Run(runner.Name, func(t *testing.T) { +- runner.Runner.Run(t, files, f) +- }) +- } +-} +- +-// DefaultModes returns the default modes to run for each regression test (they +-// may be reconfigured by the tests themselves). +-func DefaultModes() Mode { +- modes := Default +- if !testing.Short() { +- // TODO(rfindley): we should just run a few select integration tests in +- // "Forwarded" mode, and call it a day. No need to run every single test in +- // two ways. +- modes |= Forwarded +- } +- if *runSubprocessTests { +- modes |= SeparateProcess +- } +- return modes +-} +- +-var runFromMain = false // true if Main has been called +- +-// Main sets up and tears down the shared integration test state. +-func Main(m *testing.M) (code int) { +- // Provide an entrypoint for tests that use a fake go/packages driver. +- drivertest.RunIfChild() +- +- defer func() { +- if runner != nil { +- if err := runner.Close(); err != nil { +- fmt.Fprintf(os.Stderr, "closing test runner: %v\n", err) +- // Cleanup is broken in go1.12 and earlier, and sometimes flakes on +- // Windows due to file locking, but this is OK for our CI. +- // +- // Fail on go1.13+, except for windows and android which have shutdown problems. +- if testenv.Go1Point() >= 13 && runtime.GOOS != "windows" && runtime.GOOS != "android" { +- if code == 0 { +- code = 1 +- } +- } +- } +- } +- }() +- +- runFromMain = true +- +- // golang/go#54461: enable additional debugging around hanging Go commands. +- gocommand.DebugHangingGoCommands = true +- +- // If this magic environment variable is set, run gopls instead of the test +- // suite. See the documentation for runTestAsGoplsEnvvar for more details. +- if os.Getenv(runTestAsGoplsEnvvar) == "true" { +- tool.Main(context.Background(), cmd.New(), os.Args[1:]) +- return 0 +- } +- +- if !testenv.HasExec() { +- fmt.Printf("skipping all tests: exec not supported on %s/%s\n", runtime.GOOS, runtime.GOARCH) +- return 0 +- } +- testenv.ExitIfSmallMachine() +- +- flag.Parse() +- +- // Disable GOPACKAGESDRIVER, as it can cause spurious test failures. +- os.Setenv("GOPACKAGESDRIVER", "off") // ignore error +- +- if skipReason := checkBuilder(); skipReason != "" { +- fmt.Printf("Skipping all tests: %s\n", skipReason) +- return 0 +- } +- +- if err := testenv.HasTool("go"); err != nil { +- fmt.Println("Missing go command") +- return 1 +- } +- +- runner = &Runner{ +- DefaultModes: DefaultModes(), +- Timeout: *timeout, +- PrintGoroutinesOnFailure: *printGoroutinesOnFailure, +- SkipCleanup: *skipCleanup, +- store: memoize.NewStore(memoize.NeverEvict), +- } +- +- runner.goplsPath = *goplsBinaryPath +- if runner.goplsPath == "" { +- var err error +- runner.goplsPath, err = os.Executable() +- if err != nil { +- panic(fmt.Sprintf("finding test binary path: %v", err)) +- } +- } +- +- dir, err := os.MkdirTemp("", "gopls-test-") +- if err != nil { +- panic(fmt.Errorf("creating temp directory: %v", err)) +- } +- runner.tempDir = dir +- +- FilterToolchainPathAndGOROOT() +- +- return m.Run() +-} +- +-// FilterToolchainPathAndGOROOT updates the PATH and GOROOT environment +-// variables for the current process to effectively revert the changes made by +-// the go command when performing a toolchain switch in the context of `go +-// test` (see golang/go#68005). +-// +-// It does this by looking through PATH for a go command that is NOT a +-// toolchain go command, and adjusting PATH to find that go command. Then it +-// unsets GOROOT in order to use the default GOROOT for that go command. +-// +-// TODO(rfindley): this is very much a hack, so that our 1.21 and 1.22 builders +-// actually exercise integration with older go commands. In golang/go#69321, we +-// hope to do better. +-func FilterToolchainPathAndGOROOT() { +- if localGo, first := findLocalGo(); localGo != "" && !first { +- dir := filepath.Dir(localGo) +- path := os.Getenv("PATH") +- os.Setenv("PATH", dir+string(os.PathListSeparator)+path) // ignore error +- os.Unsetenv("GOROOT") // Remove the GOROOT value that was added by toolchain switch. +- } +-} +- +-// findLocalGo returns a path to a local (=non-toolchain) Go version, or the +-// empty string if none is found. +-// +-// The second result reports if path matches the result of exec.LookPath. +-func findLocalGo() (path string, first bool) { +- paths := filepath.SplitList(os.Getenv("PATH")) +- for _, path := range paths { +- // Use a simple heuristic to filter out toolchain paths. +- if strings.Contains(path, "toolchain@v0.0.1-go") && filepath.Base(path) == "bin" { +- continue // toolchain path +- } +- fullPath := filepath.Join(path, "go") +- fi, err := os.Stat(fullPath) +- if err != nil { +- continue +- } +- if fi.Mode()&0111 != 0 { +- first := false +- pathGo, err := exec.LookPath("go") +- if err == nil { +- first = fullPath == pathGo +- } +- return fullPath, first +- } +- } +- return "", false +-} +diff -urN a/gopls/internal/test/integration/runner.go b/gopls/internal/test/integration/runner.go +--- a/gopls/internal/test/integration/runner.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/runner.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,434 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package integration +- +-import ( +- "bytes" +- "context" +- "fmt" +- "io" +- "net" +- "os" +- "os/exec" +- "path/filepath" +- "runtime" +- "runtime/pprof" +- "strings" +- "sync" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/debug" +- "golang.org/x/tools/gopls/internal/lsprpc" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/gopls/internal/util/memoize" +- "golang.org/x/tools/internal/jsonrpc2" +- "golang.org/x/tools/internal/jsonrpc2/servertest" +- "golang.org/x/tools/internal/testenv" +- "golang.org/x/tools/internal/xcontext" +-) +- +-// Mode is a bitmask that defines for which execution modes a test should run. +-// +-// Each mode controls several aspects of gopls' configuration: +-// - Which server options to use for gopls sessions +-// - Whether to use a shared cache +-// - Whether to use a shared server +-// - Whether to run the server in-process or in a separate process +-// +-// The behavior of each mode with respect to these aspects is summarized below. +-// TODO(rfindley, cleanup): rather than using arbitrary names for these modes, +-// we can compose them explicitly out of the features described here, allowing +-// individual tests more freedom in constructing problematic execution modes. +-// For example, a test could assert on a certain behavior when running on a +-// separate process. Moreover, we could unify 'Modes' with 'Options', and use +-// RunMultiple rather than a hard-coded loop through modes. +-// +-// Mode | Options | Shared Cache? | Shared Server? | In-process? +-// --------------------------------------------------------------------------- +-// Default | Default | Y | N | Y +-// Forwarded | Default | Y | Y | Y +-// SeparateProcess | Default | Y | Y | N +-type Mode int +- +-const ( +- // Default mode runs gopls with the default options, communicating over pipes +- // to emulate the lsp sidecar execution mode, which communicates over +- // stdin/stdout. +- // +- // It uses separate servers for each test, but a shared cache, to avoid +- // duplicating work when processing GOROOT. +- Default Mode = 1 << iota +- +- // Forwarded uses the default options, but forwards connections to a shared +- // in-process gopls server. +- Forwarded +- +- // SeparateProcess uses the default options, but forwards connection to an +- // external gopls daemon. +- // +- // Only supported on GOOS=linux. +- SeparateProcess +-) +- +-func (m Mode) String() string { +- switch m { +- case Default: +- return "default" +- case Forwarded: +- return "forwarded" +- case SeparateProcess: +- return "separate process" +- default: +- return "unknown mode" +- } +-} +- +-// A Runner runs tests in gopls execution environments, as specified by its +-// modes. For modes that share state (for example, a shared cache or common +-// remote), any tests that execute on the same Runner will share the same +-// state. +-type Runner struct { +- // Configuration +- DefaultModes Mode // modes to run for each test +- Timeout time.Duration // per-test timeout, if set +- PrintGoroutinesOnFailure bool // whether to dump goroutines on test failure +- SkipCleanup bool // if set, don't delete test data directories when the test exits +- +- // Immutable state shared across test invocations +- goplsPath string // path to the gopls executable (for SeparateProcess mode) +- tempDir string // shared parent temp directory +- store *memoize.Store // shared store +- +- // Lazily allocated resources +- tsOnce sync.Once +- ts *servertest.TCPServer // shared in-process test server ("forwarded" mode) +- +- startRemoteOnce sync.Once +- remoteSocket string // unix domain socket for shared daemon ("separate process" mode) +- remoteErr error +- cancelRemote func() +-} +- +-type TestFunc func(t *testing.T, env *Env) +- +-// Run executes the test function in the default configured gopls execution +-// modes. For each a test run, a new workspace is created containing the +-// un-txtared files specified by filedata. +-func (r *Runner) Run(t *testing.T, files string, test TestFunc, opts ...RunOption) { +- // TODO(rfindley): this function has gotten overly complicated, and warrants +- // refactoring. +- +- if !runFromMain { +- // Main performs various setup precondition checks. +- // While it could theoretically be made OK for a Runner to be used outside +- // of Main, it is simpler to enforce that we only use the Runner from +- // integration test suites. +- t.Fatal("integration.Runner.Run must be run from integration.Main") +- } +- +- tests := []struct { +- name string +- mode Mode +- getServer func() jsonrpc2.StreamServer +- }{ +- {"default", Default, r.defaultServer}, +- {"forwarded", Forwarded, r.forwardedServer}, +- {"separate_process", SeparateProcess, r.separateProcessServer}, +- } +- +- for _, tc := range tests { +- config := defaultConfig() +- for _, opt := range opts { +- opt.set(&config) +- } +- modes := r.DefaultModes +- if config.modes != 0 { +- modes = config.modes +- } +- if modes&tc.mode == 0 { +- continue +- } +- +- t.Run(tc.name, func(t *testing.T) { +- // TODO(rfindley): once jsonrpc2 shutdown is fixed, we should not leak +- // goroutines in this test function. +- // stacktest.NoLeak(t) +- +- ctx := context.Background() +- if r.Timeout != 0 { +- var cancel context.CancelFunc +- ctx, cancel = context.WithTimeout(ctx, r.Timeout) +- defer cancel() +- } else if d, ok := testenv.Deadline(t); ok { +- timeout := time.Until(d) * 19 / 20 // Leave an arbitrary 5% for cleanup. +- var cancel context.CancelFunc +- ctx, cancel = context.WithTimeout(ctx, timeout) +- defer cancel() +- } +- +- // TODO(rfindley): do we need an instance at all? Can it be removed? +- ctx = debug.WithInstance(ctx) +- +- rootDir := filepath.Join(r.tempDir, filepath.FromSlash(t.Name())) +- if err := os.MkdirAll(rootDir, 0755); err != nil { +- t.Fatal(err) +- } +- +- files := fake.UnpackTxt(files) +- if config.editor.WindowsLineEndings { +- for name, data := range files { +- files[name] = bytes.ReplaceAll(data, []byte("\n"), []byte("\r\n")) +- } +- } +- config.sandbox.Files = files +- config.sandbox.RootDir = rootDir +- sandbox, err := fake.NewSandbox(&config.sandbox) +- if err != nil { +- t.Fatal(err) +- } +- defer func() { +- if !r.SkipCleanup { +- if err := sandbox.Close(); err != nil { +- pprof.Lookup("goroutine").WriteTo(os.Stderr, 1) // ignore error +- t.Errorf("closing the sandbox: %v", err) +- } +- } +- }() +- +- // Write the go.sum file for the requested directories, before starting the server. +- for _, dir := range config.writeGoSum { +- if _, err := sandbox.RunGoCommand(context.Background(), dir, "list", []string{"-mod=mod", "./..."}, []string{"GOWORK=off"}, true); err != nil { +- t.Fatal(err) +- } +- } +- +- ss := tc.getServer() +- +- framer := jsonrpc2.NewRawStream +- ls := &loggingFramer{} +- framer = ls.framer(jsonrpc2.NewRawStream) +- ts := servertest.NewPipeServer(ss, framer) +- +- env := ConnectGoplsEnv(t, ctx, sandbox, config.editor, ts) +- defer func() { +- if t.Failed() && r.PrintGoroutinesOnFailure { +- pprof.Lookup("goroutine").WriteTo(os.Stderr, 1) // ignore error +- } +- if (t.Failed() && !config.noLogsOnError) || *printLogs { +- ls.printBuffers(t.Name(), os.Stderr) +- } +- // For tests that failed due to a timeout, don't fail to shutdown +- // because ctx is done. +- // +- // There is little point to setting an arbitrary timeout for closing +- // the editor: in general we want to clean up before proceeding to the +- // next test, and if there is a deadlock preventing closing it will +- // eventually be handled by the `go test` timeout. +- if err := env.Editor.Close(xcontext.Detach(ctx)); err != nil { +- t.Errorf("closing editor: %v", err) +- } +- }() +- // Always await the initial workspace load. +- env.Await(InitialWorkspaceLoad) +- test(t, env) +- }) +- } +-} +- +-// ConnectGoplsEnv creates a new Gopls environment for the given sandbox, +-// editor config, and server connector. +-// +-// TODO(rfindley): significantly refactor the way testing environments are +-// constructed. +-func ConnectGoplsEnv(t testing.TB, ctx context.Context, sandbox *fake.Sandbox, config fake.EditorConfig, connector servertest.Connector) *Env { +- awaiter := NewAwaiter(sandbox.Workdir) +- editor, err := fake.NewEditor(sandbox, config).Connect(ctx, connector, awaiter.Hooks()) +- if err != nil { +- t.Fatal(err) +- } +- env := &Env{ +- TB: t, +- Ctx: ctx, +- Sandbox: sandbox, +- Server: connector, +- Editor: editor, +- Awaiter: awaiter, +- } +- return env +-} +- +-// longBuilders maps builders that are skipped when -short is set to a +-// (possibly empty) justification. +-var longBuilders = map[string]string{ +- "x_tools-gotip-openbsd-amd64": "go.dev/issue/72145", +- "x_tools-go1.24-openbsd-amd64": "go.dev/issue/72145", +- "x_tools-go1.23-openbsd-amd64": "go.dev/issue/72145", +- +- "darwin-amd64-10_12": "", +- "freebsd-amd64-race": "", +- "illumos-amd64": "", +- "netbsd-arm-bsiegert": "", +- "solaris-amd64-oraclerel": "", +- "windows-arm-zx2c4": "", +- "linux-ppc64le-power9osu": "go.dev/issue/66748", +-} +- +-// TODO(rfindley): inline into Main. +-func checkBuilder() string { +- builder := os.Getenv("GO_BUILDER_NAME") +- if reason, ok := longBuilders[builder]; ok && testing.Short() { +- if reason != "" { +- return fmt.Sprintf("skipping %s with -short due to %s", builder, reason) +- } else { +- return fmt.Sprintf("skipping %s with -short", builder) +- } +- } +- return "" +-} +- +-type loggingFramer struct { +- mu sync.Mutex +- buf *safeBuffer +-} +- +-// safeBuffer is a threadsafe buffer for logs. +-type safeBuffer struct { +- mu sync.Mutex +- buf bytes.Buffer +-} +- +-func (b *safeBuffer) Write(p []byte) (int, error) { +- b.mu.Lock() +- defer b.mu.Unlock() +- return b.buf.Write(p) +-} +- +-func (s *loggingFramer) framer(f jsonrpc2.Framer) jsonrpc2.Framer { +- return func(nc net.Conn) jsonrpc2.Stream { +- s.mu.Lock() +- framed := false +- if s.buf == nil { +- s.buf = &safeBuffer{buf: bytes.Buffer{}} +- framed = true +- } +- s.mu.Unlock() +- stream := f(nc) +- if framed { +- return protocol.LoggingStream(stream, s.buf) +- } +- return stream +- } +-} +- +-func (s *loggingFramer) printBuffers(testname string, w io.Writer) { +- s.mu.Lock() +- defer s.mu.Unlock() +- +- if s.buf == nil { +- return +- } +- fmt.Fprintf(os.Stderr, "#### Start Gopls Test Logs for %q\n", testname) +- s.buf.mu.Lock() +- io.Copy(w, &s.buf.buf) +- s.buf.mu.Unlock() +- fmt.Fprintf(os.Stderr, "#### End Gopls Test Logs for %q\n", testname) +-} +- +-// defaultServer handles the Default execution mode. +-func (r *Runner) defaultServer() jsonrpc2.StreamServer { +- return lsprpc.NewStreamServer(cache.New(r.store), false, nil) +-} +- +-// forwardedServer handles the Forwarded execution mode. +-func (r *Runner) forwardedServer() jsonrpc2.StreamServer { +- r.tsOnce.Do(func() { +- ctx := context.Background() +- ctx = debug.WithInstance(ctx) +- ss := lsprpc.NewStreamServer(cache.New(nil), false, nil) +- r.ts = servertest.NewTCPServer(ctx, ss, nil) +- }) +- return newForwarder("tcp", r.ts.Addr) +-} +- +-// runTestAsGoplsEnvvar triggers TestMain to run gopls instead of running +-// tests. It's a trick to allow tests to find a binary to use to start a gopls +-// subprocess. +-const runTestAsGoplsEnvvar = "_GOPLS_TEST_BINARY_RUN_AS_GOPLS" +- +-// separateProcessServer handles the SeparateProcess execution mode. +-func (r *Runner) separateProcessServer() jsonrpc2.StreamServer { +- if runtime.GOOS != "linux" { +- panic("separate process execution mode is only supported on linux") +- } +- +- r.startRemoteOnce.Do(func() { +- socketDir, err := os.MkdirTemp(r.tempDir, "gopls-test-socket") +- if err != nil { +- r.remoteErr = err +- return +- } +- r.remoteSocket = filepath.Join(socketDir, "gopls-test-daemon") +- +- // The server should be killed by when the test runner exits, but to be +- // conservative also set a listen timeout. +- args := []string{"serve", "-listen", "unix;" + r.remoteSocket, "-listen.timeout", "1m"} +- +- ctx, cancel := context.WithCancel(context.Background()) +- cmd := exec.CommandContext(ctx, r.goplsPath, args...) +- cmd.Env = append(os.Environ(), runTestAsGoplsEnvvar+"=true") +- +- // Start the external gopls process. This is still somewhat racy, as we +- // don't know when gopls binds to the socket, but the gopls forwarder +- // client has built-in retry behavior that should mostly mitigate this +- // problem (and if it doesn't, we probably want to improve the retry +- // behavior). +- if err := cmd.Start(); err != nil { +- cancel() +- r.remoteSocket = "" +- r.remoteErr = err +- } else { +- r.cancelRemote = cancel +- // Spin off a goroutine to wait, so that we free up resources when the +- // server exits. +- go cmd.Wait() +- } +- }) +- +- return newForwarder("unix", r.remoteSocket) +-} +- +-func newForwarder(network, address string) jsonrpc2.StreamServer { +- server, err := lsprpc.NewForwarder(network+";"+address, nil) +- if err != nil { +- // This should never happen, as we are passing an explicit address. +- panic(fmt.Sprintf("internal error: unable to create forwarder: %v", err)) +- } +- return server +-} +- +-// Close cleans up resource that have been allocated to this workspace. +-func (r *Runner) Close() error { +- var errmsgs []string +- if r.ts != nil { +- if err := r.ts.Close(); err != nil { +- errmsgs = append(errmsgs, err.Error()) +- } +- } +- if r.cancelRemote != nil { +- r.cancelRemote() +- } +- if !r.SkipCleanup { +- if err := os.RemoveAll(r.tempDir); err != nil { +- errmsgs = append(errmsgs, err.Error()) +- } +- } +- if len(errmsgs) > 0 { +- return fmt.Errorf("errors closing the test runner:\n\t%s", strings.Join(errmsgs, "\n\t")) +- } +- return nil +-} +diff -urN a/gopls/internal/test/integration/template/template_test.go b/gopls/internal/test/integration/template/template_test.go +--- a/gopls/internal/test/integration/template/template_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/template/template_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,287 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package template +- +-import ( +- "os" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/util/bug" +-) +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- os.Exit(Main(m)) +-} +- +-func TestMultilineTokens(t *testing.T) { +- // 51731: panic: runtime error: slice bounds out of range [38:3] +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.17 +--- hi.tmpl -- +-{{if (foÜx .X.Y)}}😀{{$A := +- "hi" +- }}{{.Z $A}}{{else}} +-{{$A.X 12}} +-{{foo (.X.Y) 23 ($A.Z)}} +-{{end}} +-` +- WithOptions( +- Settings{ +- "templateExtensions": []string{"tmpl"}, +- "semanticTokens": true, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- var p protocol.SemanticTokensParams +- p.TextDocument.URI = env.Sandbox.Workdir.URI("hi.tmpl") +- toks, err := env.Editor.Server.SemanticTokensFull(env.Ctx, &p) +- if err != nil { +- t.Errorf("semantic token failed: %v", err) +- } +- if toks == nil || len(toks.Data) == 0 { +- t.Errorf("got no semantic tokens") +- } +- }) +-} +- +-func TestMultilineTokensAgain(t *testing.T) { +- // Regression tests for a crash; see go.dev/issue/74635. +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.17 +--- hi.tmpl -- +-{{/* this is +-a comment */}} +-` +- WithOptions( +- Settings{ +- "templateExtensions": []string{"tmpl"}, +- "semanticTokens": true, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- var p protocol.SemanticTokensParams +- p.TextDocument.URI = env.Sandbox.Workdir.URI("hi.tmpl") +- toks, err := env.Editor.Server.SemanticTokensFull(env.Ctx, &p) +- if err != nil { +- t.Errorf("semantic token failed: %v", err) +- } +- if toks == nil || len(toks.Data) == 0 { +- t.Errorf("got no semantic tokens") +- } +- }) +-} +- +-func TestTemplatesFromExtensions(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- hello.tmpl -- +-{{range .Planets}} +-Hello {{}} <-- missing body +-{{end}} +-` +- WithOptions( +- Settings{ +- "templateExtensions": []string{"tmpl"}, +- "semanticTokens": true, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- // TODO: can we move this diagnostic onto {{}}? +- var diags protocol.PublishDiagnosticsParams +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("hello.tmpl", "()Hello {{}}")), +- ReadDiagnostics("hello.tmpl", &diags), +- ) +- d := diags.Diagnostics // issue 50786: check for Source +- if len(d) != 1 { +- t.Errorf("expected 1 diagnostic, got %d", len(d)) +- return +- } +- if d[0].Source != "template" { +- t.Errorf("expected Source 'template', got %q", d[0].Source) +- } +- // issue 50801 (even broken templates could return some semantic tokens) +- var p protocol.SemanticTokensParams +- p.TextDocument.URI = env.Sandbox.Workdir.URI("hello.tmpl") +- toks, err := env.Editor.Server.SemanticTokensFull(env.Ctx, &p) +- if err != nil { +- t.Errorf("semantic token failed: %v", err) +- } +- if toks == nil || len(toks.Data) == 0 { +- t.Errorf("got no semantic tokens") +- } +- +- env.WriteWorkspaceFile("hello.tmpl", "{{range .Planets}}\nHello {{.}}\n{{end}}") +- env.AfterChange(NoDiagnostics(ForFile("hello.tmpl"))) +- }) +-} +- +-func TestTemplatesObserveDirectoryFilters(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a/a.tmpl -- +-A {{}} <-- missing body +--- b/b.tmpl -- +-B {{}} <-- missing body +-` +- +- WithOptions( +- Settings{ +- "directoryFilters": []string{"-b"}, +- "templateExtensions": []string{"tmpl"}, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("a/a.tmpl", "()A")), +- NoDiagnostics(ForFile("b/b.tmpl")), +- ) +- }) +-} +- +-func TestTemplatesFromLangID(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.CreateBuffer("hello.tmpl", "") +- env.AfterChange( +- NoDiagnostics(ForFile("hello.tmpl")), // Don't get spurious errors for empty templates. +- ) +- env.SetBufferContent("hello.tmpl", "{{range .Planets}}\nHello {{}}\n{{end}}") +- env.Await(Diagnostics(env.AtRegexp("hello.tmpl", "()Hello {{}}"))) +- env.RegexpReplace("hello.tmpl", "{{}}", "{{.}}") +- env.Await(NoDiagnostics(ForFile("hello.tmpl"))) +- }) +-} +- +-func TestClosingTemplatesMakesDiagnosticsDisappear(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- hello.tmpl -- +-{{range .Planets}} +-Hello {{}} <-- missing body +-{{end}} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("hello.tmpl") +- env.AfterChange( +- Diagnostics(env.AtRegexp("hello.tmpl", "()Hello {{}}")), +- ) +- // Since we don't have templateExtensions configured, closing hello.tmpl +- // should make its diagnostics disappear. +- env.CloseBuffer("hello.tmpl") +- env.AfterChange( +- NoDiagnostics(ForFile("hello.tmpl")), +- ) +- }) +-} +- +-func TestMultipleSuffixes(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- b.gotmpl -- +-{{define "A"}}goo{{end}} +--- a.tmpl -- +-{{template "A"}} +-` +- +- WithOptions( +- Settings{ +- "templateExtensions": []string{"tmpl", "gotmpl"}, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a.tmpl") +- x := env.RegexpSearch("a.tmpl", `A`) +- loc := env.FirstDefinition(x) +- env.OpenFile(env.Sandbox.Workdir.URIToPath(loc.URI)) +- refs := env.References(loc) +- if len(refs) != 2 { +- t.Fatalf("got %v reference(s), want 2", len(refs)) +- } +- // make sure we got one from b.gotmpl +- want := env.Sandbox.Workdir.URI("b.gotmpl") +- if refs[0].URI != want && refs[1].URI != want { +- t.Errorf("failed to find reference to %s", shorten(want)) +- for i, r := range refs { +- t.Logf("%d: URI:%s %v", i, shorten(r.URI), r.Range) +- } +- } +- +- content, nloc := env.Hover(loc) +- if loc != nloc { +- t.Errorf("loc? got %v, wanted %v", nloc, loc) +- } +- if content.Value != "template A defined" { +- t.Errorf("got %s, wanted 'template A defined", content.Value) +- } +- }) +-} +- +-// shorten long URIs +-func shorten(fn protocol.DocumentURI) string { +- if len(fn) <= 20 { +- return string(fn) +- } +- pieces := strings.Split(string(fn), "/") +- if len(pieces) < 2 { +- return string(fn) +- } +- j := len(pieces) +- return pieces[j-2] + "/" + pieces[j-1] +-} +- +-func TestCompletionPanic_Issue57621(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- hello.tmpl -- +-{{range .Planets}} +-Hello {{ +-{{end}} +-` +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("hello.tmpl") +- // None of these should panic. +- env.Completion(env.RegexpSearch("hello.tmpl", `Hello ()\{\{`)) +- env.Completion(env.RegexpSearch("hello.tmpl", `Hello \{()\{`)) +- env.Completion(env.RegexpSearch("hello.tmpl", `Hello \{\{()`)) +- env.Completion(env.RegexpSearch("hello.tmpl", `()\{\{range`)) +- env.Completion(env.RegexpSearch("hello.tmpl", `\{()\{range`)) +- env.Completion(env.RegexpSearch("hello.tmpl", `\{\{()range`)) +- env.Completion(env.RegexpSearch("hello.tmpl", `Planets()}}`)) +- env.Completion(env.RegexpSearch("hello.tmpl", `Planets}()}`)) +- env.Completion(env.RegexpSearch("hello.tmpl", `Planets}}()`)) +- }) +-} +- +-// Hover needs tests +diff -urN a/gopls/internal/test/integration/watch/setting_test.go b/gopls/internal/test/integration/watch/setting_test.go +--- a/gopls/internal/test/integration/watch/setting_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/watch/setting_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,85 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package watch +- +-import ( +- "fmt" +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestSubdirWatchPatterns(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.test +- +-go 1.18 +--- subdir/subdir.go -- +-package subdir +-` +- +- tests := []struct { +- clientName string +- subdirWatchPatterns string +- wantWatched bool +- }{ +- {"other client", "on", true}, +- {"other client", "off", false}, +- {"other client", "auto", false}, +- {"Visual Studio Code", "auto", true}, +- } +- +- for _, test := range tests { +- t.Run(fmt.Sprintf("%s_%s", test.clientName, test.subdirWatchPatterns), func(t *testing.T) { +- WithOptions( +- ClientName(test.clientName), +- Settings{ +- "subdirWatchPatterns": test.subdirWatchPatterns, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- var expectation Expectation +- if test.wantWatched { +- expectation = FileWatchMatching("subdir") +- } else { +- expectation = NoFileWatchMatching("subdir") +- } +- env.OnceMet( +- InitialWorkspaceLoad, +- expectation, +- ) +- }) +- }) +- } +-} +- +-// This test checks that we surface errors for invalid subdir watch patterns, +-// as the triple of ("off"|"on"|"auto") may be confusing to users inclined to +-// use (true|false) or some other truthy value. +-func TestSubdirWatchPatterns_BadValues(t *testing.T) { +- tests := []struct { +- badValue any +- wantMessage string +- }{ +- {true, "invalid type bool (want string)"}, +- {false, "invalid type bool (want string)"}, +- {"yes", `invalid option "yes"`}, +- } +- +- for _, test := range tests { +- t.Run(fmt.Sprint(test.badValue), func(t *testing.T) { +- WithOptions( +- Settings{ +- "subdirWatchPatterns": test.badValue, +- }, +- ).Run(t, "", func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- ShownMessage(test.wantMessage), +- ) +- }) +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/watch/watch_test.go b/gopls/internal/test/integration/watch/watch_test.go +--- a/gopls/internal/test/integration/watch/watch_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/watch/watch_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,712 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package watch +- +-import ( +- "os" +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/util/bug" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +-) +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- os.Exit(Main(m)) +-} +- +-func TestEditFile(t *testing.T) { +- const pkg = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- a/a.go -- +-package a +- +-func _() { +- var x int +-} +-` +- // Edit the file when it's *not open* in the workspace, and check that +- // diagnostics are updated. +- t.Run("unopened", func(t *testing.T) { +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("a/a.go", "x")), +- ) +- env.WriteWorkspaceFile("a/a.go", `package a; func _() {};`) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- ) +- }) +- }) +- +- // Edit the file when it *is open* in the workspace, and check that +- // diagnostics are *not* updated. +- t.Run("opened", func(t *testing.T) { +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- // Insert a trivial edit so that we don't automatically update the buffer +- // (see CL 267577). +- env.EditBuffer("a/a.go", fake.NewEdit(0, 0, 0, 0, " ")) +- env.AfterChange() +- env.WriteWorkspaceFile("a/a.go", `package a; func _() {};`) +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "x")), +- ) +- }) +- }) +-} +- +-// Edit a dependency on disk and expect a new diagnostic. +-func TestEditDependency(t *testing.T) { +- const pkg = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- b/b.go -- +-package b +- +-func B() int { return 0 } +--- a/a.go -- +-package a +- +-import ( +- "mod.com/b" +-) +- +-func _() { +- _ = b.B() +-} +-` +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange() +- env.WriteWorkspaceFile("b/b.go", `package b; func B() {};`) +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "b.B")), +- ) +- }) +-} +- +-// Edit both the current file and one of its dependencies on disk and +-// expect diagnostic changes. +-func TestEditFileAndDependency(t *testing.T) { +- const pkg = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- b/b.go -- +-package b +- +-func B() int { return 0 } +--- a/a.go -- +-package a +- +-import ( +- "mod.com/b" +-) +- +-func _() { +- var x int +- _ = b.B() +-} +-` +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("a/a.go", "x")), +- ) +- env.WriteWorkspaceFiles(map[string]string{ +- "b/b.go": `package b; func B() {};`, +- "a/a.go": `package a +- +-import "mod.com/b" +- +-func _() { +- b.B() +-}`, +- }) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- NoDiagnostics(ForFile("b/b.go")), +- ) +- }) +-} +- +-// Delete a dependency and expect a new diagnostic. +-func TestDeleteDependency(t *testing.T) { +- const pkg = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- b/b.go -- +-package b +- +-func B() int { return 0 } +--- a/a.go -- +-package a +- +-import ( +- "mod.com/b" +-) +- +-func _() { +- _ = b.B() +-} +-` +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange() +- env.RemoveWorkspaceFile("b/b.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "\"mod.com/b\"")), +- ) +- }) +-} +- +-// Create a dependency on disk and expect the diagnostic to go away. +-func TestCreateDependency(t *testing.T) { +- const missing = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- b/b.go -- +-package b +- +-func B() int { return 0 } +--- a/a.go -- +-package a +- +-import ( +- "mod.com/c" +-) +- +-func _() { +- c.C() +-} +-` +- Run(t, missing, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("a/a.go", "\"mod.com/c\"")), +- ) +- env.WriteWorkspaceFile("c/c.go", `package c; func C() {};`) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- ) +- }) +-} +- +-// Create a new dependency and add it to the file on disk. +-// This is similar to what might happen if you switch branches. +-func TestCreateAndAddDependency(t *testing.T) { +- const original = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- a/a.go -- +-package a +- +-func _() {} +-` +- Run(t, original, func(t *testing.T, env *Env) { +- env.WriteWorkspaceFile("c/c.go", `package c; func C() {};`) +- env.WriteWorkspaceFile("a/a.go", `package a; import "mod.com/c"; func _() { c.C() }`) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- ) +- }) +-} +- +-// Create a new file that defines a new symbol, in the same package. +-func TestCreateFile(t *testing.T) { +- const pkg = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- a/a.go -- +-package a +- +-func _() { +- hello() +-} +-` +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("a/a.go", "hello")), +- ) +- env.WriteWorkspaceFile("a/a2.go", `package a; func hello() {};`) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- ) +- }) +-} +- +-// Add a new method to an interface and implement it. +-// Inspired by the structure of internal/golang and internal/cache. +-func TestCreateImplementation(t *testing.T) { +- const pkg = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- b/b.go -- +-package b +- +-type B interface{ +- Hello() string +-} +- +-func SayHello(bee B) { +- println(bee.Hello()) +-} +--- a/a.go -- +-package a +- +-import "mod.com/b" +- +-type X struct {} +- +-func (_ X) Hello() string { +- return "" +-} +- +-func _() { +- x := X{} +- b.SayHello(x) +-} +-` +- const newMethod = `package b +-type B interface{ +- Hello() string +- Bye() string +-} +- +-func SayHello(bee B) { +- println(bee.Hello()) +-}` +- const implementation = `package a +- +-import "mod.com/b" +- +-type X struct {} +- +-func (_ X) Hello() string { +- return "" +-} +- +-func (_ X) Bye() string { +- return "" +-} +- +-func _() { +- x := X{} +- b.SayHello(x) +-}` +- +- // Add the new method before the implementation. Expect diagnostics. +- t.Run("method before implementation", func(t *testing.T) { +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.WriteWorkspaceFile("b/b.go", newMethod) +- env.AfterChange( +- Diagnostics(AtPosition("a/a.go", 12, 12)), +- ) +- env.WriteWorkspaceFile("a/a.go", implementation) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- ) +- }) +- }) +- // Add the new implementation before the new method. Expect no diagnostics. +- t.Run("implementation before method", func(t *testing.T) { +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.WriteWorkspaceFile("a/a.go", implementation) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- ) +- env.WriteWorkspaceFile("b/b.go", newMethod) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- ) +- }) +- }) +- // Add both simultaneously. Expect no diagnostics. +- t.Run("implementation and method simultaneously", func(t *testing.T) { +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.WriteWorkspaceFiles(map[string]string{ +- "a/a.go": implementation, +- "b/b.go": newMethod, +- }) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- NoDiagnostics(ForFile("b/b.go")), +- ) +- }) +- }) +-} +- +-// Tests golang/go#38498. Delete a file and then force a reload. +-// Assert that we no longer try to load the file. +-func TestDeleteFiles(t *testing.T) { +- // TODO(rfindley): this test is brittle, because it depends on underspecified +- // logging behavior around loads. +- // +- // We should have a robust way to test loads. It should be possible to assert +- // on the specific loads that have occurred, and without the synchronization +- // problems associated with logging. +- +- const pkg = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- a/a.go -- +-package a +- +-func _() { +- var _ int +-} +--- a/a_unneeded.go -- +-package a +-` +- t.Run("close then delete", func(t *testing.T) { +- WithOptions( +- // verboseOutput causes Snapshot.load to log package files. +- // (see the TODO above: this is brittle) +- Settings{"verboseOutput": true}, +- ).Run(t, pkg, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.OpenFile("a/a_unneeded.go") +- env.Await( +- // Log messages are asynchronous to other events on the LSP stream, so we +- // can't use OnceMet or AfterChange here. +- LogMatching(protocol.Info, "a_unneeded.go", 1, false), +- ) +- +- // Close and delete the open file, mimicking what an editor would do. +- env.CloseBuffer("a/a_unneeded.go") +- env.RemoveWorkspaceFile("a/a_unneeded.go") +- env.RegexpReplace("a/a.go", "var _ int", "fmt.Println(\"\")") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "fmt")), +- ) +- env.SaveBuffer("a/a.go") +- env.Await( +- // There should only be one log message containing +- // a_unneeded.go, from the initial workspace load, which we +- // check for earlier. If there are more, there's a bug. +- LogMatching(protocol.Info, "a_unneeded.go", 1, false), +- NoDiagnostics(ForFile("a/a.go")), +- ) +- }) +- }) +- +- t.Run("delete then close", func(t *testing.T) { +- WithOptions( +- Settings{"verboseOutput": true}, +- ).Run(t, pkg, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.OpenFile("a/a_unneeded.go") +- env.Await( +- LogMatching(protocol.Info, "a_unneeded.go", 1, false), +- ) +- +- // Delete and then close the file. +- env.RemoveWorkspaceFile("a/a_unneeded.go") +- env.CloseBuffer("a/a_unneeded.go") +- env.RegexpReplace("a/a.go", "var _ int", "fmt.Println(\"\")") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "fmt")), +- ) +- env.SaveBuffer("a/a.go") +- env.Await( +- // There should only be one log message containing +- // a_unneeded.go, from the initial workspace load, which we +- // check for earlier. If there are more, there's a bug. +- LogMatching(protocol.Info, "a_unneeded.go", 1, false), +- NoDiagnostics(ForFile("a/a.go")), +- ) +- }) +- }) +-} +- +-// This change reproduces the behavior of switching branches, with multiple +-// files being created and deleted. The key change here is the movement of a +-// symbol from one file to another in a given package through a deletion and +-// creation. To reproduce an issue with metadata invalidation in batched +-// changes, the last change in the batch is an on-disk file change that doesn't +-// require metadata invalidation. +-func TestMoveSymbol(t *testing.T) { +- const pkg = ` +--- go.mod -- +-module mod.com +- +-go 1.14 +--- main.go -- +-package main +- +-import "mod.com/a" +- +-func main() { +- var x int +- x = a.Hello +- println(x) +-} +--- a/a1.go -- +-package a +- +-var Hello int +--- a/a2.go -- +-package a +- +-func _() {} +-` +- Run(t, pkg, func(t *testing.T, env *Env) { +- env.WriteWorkspaceFile("a/a3.go", "package a\n\nvar Hello int\n") +- env.RemoveWorkspaceFile("a/a1.go") +- env.WriteWorkspaceFile("a/a2.go", "package a; func _() {};") +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- ) +- }) +-} +- +-// Reproduce golang/go#40456. +-func TestChangeVersion(t *testing.T) { +- const proxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +--- example.com@v1.2.3/blah/blah.go -- +-package blah +- +-const Name = "Blah" +- +-func X(x int) {} +--- example.com@v1.2.2/go.mod -- +-module example.com +- +-go 1.12 +--- example.com@v1.2.2/blah/blah.go -- +-package blah +- +-const Name = "Blah" +- +-func X() {} +--- random.org@v1.2.3/go.mod -- +-module random.org +- +-go 1.12 +--- random.org@v1.2.3/blah/blah.go -- +-package hello +- +-const Name = "Hello" +-` +- const mod = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +- +-require example.com v1.2.2 +--- main.go -- +-package main +- +-import "example.com/blah" +- +-func main() { +- blah.X() +-} +-` +- WithOptions( +- WriteGoSum("."), +- ProxyFiles(proxy)).Run(t, mod, func(t *testing.T, env *Env) { +- env.WriteWorkspaceFiles(map[string]string{ +- "go.mod": `module mod.com +- +-go 1.12 +- +-require example.com v1.2.3 +-`, +- "main.go": `package main +- +-import ( +- "example.com/blah" +-) +- +-func main() { +- blah.X(1) +-} +-`, +- }) +- env.AfterChange( +- env.DoneWithChangeWatchedFiles(), +- NoDiagnostics(ForFile("main.go")), +- ) +- }) +-} +- +-// Reproduces golang/go#40340. +-func TestSwitchFromGOPATHToModuleMode(t *testing.T) { +- const files = ` +--- foo/blah/blah.go -- +-package blah +- +-const Name = "" +--- main.go -- +-package main +- +-import "foo/blah" +- +-func main() { +- _ = blah.Name +-} +-` +- WithOptions( +- InGOPATH(), +- Modes(Default), // golang/go#57521: this test is temporarily failing in 'experimental' mode +- EnvVars{"GO111MODULE": "auto"}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- ) +- if _, err := env.Sandbox.RunGoCommand(env.Ctx, "", "mod", []string{"init", "mod.com"}, nil, true); err != nil { +- t.Fatal(err) +- } +- +- // TODO(golang/go#57558, golang/go#57512): file watching is asynchronous, +- // and we must wait for the view to be reconstructed before touching +- // main.go, so that the new view "knows" about main.go. This is a bug, but +- // awaiting the change here avoids it. +- env.AfterChange() +- +- env.RegexpReplace("main.go", `"foo/blah"`, `"mod.com/foo/blah"`) +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- ) +- }) +-} +- +-// Reproduces golang/go#40487. +-func TestSwitchFromModulesToGOPATH(t *testing.T) { +- const files = ` +--- foo/go.mod -- +-module mod.com +- +-go 1.14 +--- foo/blah/blah.go -- +-package blah +- +-const Name = "" +--- foo/main.go -- +-package main +- +-import "mod.com/blah" +- +-func main() { +- _ = blah.Name +-} +-` +- WithOptions( +- InGOPATH(), +- EnvVars{"GO111MODULE": "auto"}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("foo/main.go") +- env.RemoveWorkspaceFile("foo/go.mod") +- env.AfterChange( +- Diagnostics(env.AtRegexp("foo/main.go", `"mod.com/blah"`)), +- ) +- env.RegexpReplace("foo/main.go", `"mod.com/blah"`, `"foo/blah"`) +- env.AfterChange( +- NoDiagnostics(ForFile("foo/main.go")), +- ) +- }) +-} +- +-func TestNewSymbolInTestVariant(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a/a.go -- +-package a +- +-func bob() {} +--- a/a_test.go -- +-package a +- +-import "testing" +- +-func TestBob(t *testing.T) { +- bob() +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- // Add a new symbol to the package under test and use it in the test +- // variant. Expect no diagnostics. +- env.WriteWorkspaceFiles(map[string]string{ +- "a/a.go": `package a +- +-func bob() {} +-func george() {} +-`, +- "a/a_test.go": `package a +- +-import "testing" +- +-func TestAll(t *testing.T) { +- bob() +- george() +-} +-`, +- }) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- NoDiagnostics(ForFile("a/a_test.go")), +- ) +- // Now, add a new file to the test variant and use its symbol in the +- // original test file. Expect no diagnostics. +- env.WriteWorkspaceFiles(map[string]string{ +- "a/a_test.go": `package a +- +-import "testing" +- +-func TestAll(t *testing.T) { +- bob() +- george() +- hi() +-} +-`, +- "a/a2_test.go": `package a +- +-import "testing" +- +-func hi() {} +- +-func TestSomething(t *testing.T) {} +-`, +- }) +- env.AfterChange( +- NoDiagnostics(ForFile("a/a_test.go")), +- NoDiagnostics(ForFile("a/a2_test.go")), +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/web/assembly_test.go b/gopls/internal/test/integration/web/assembly_test.go +--- a/gopls/internal/test/integration/web/assembly_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/web/assembly_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,156 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package web_test +- +-import ( +- "regexp" +- "runtime" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/internal/testenv" +-) +- +-// TestAssembly is a basic test of the web-based assembly listing. +-func TestAssembly(t *testing.T) { +- testenv.NeedsGoCommand1Point(t, 22) // for up-to-date assembly listing +- +- const files = ` +--- go.mod -- +-module example.com +- +--- a/a.go -- +-package a +- +-func f(x int) int { +- println("hello") +- defer println("world") +- return x +-} +- +-func g() { +- println("goodbye") +-} +- +-var v = [...]int{ +- f(123), +- f(456), +-} +- +-func init() { +- f(789) +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- +- // Get the report and do some minimal checks for sensible results. +- // +- // Use only portable instructions below! Remember that +- // this is a test of plumbing, not compilation, so +- // it's better to skip the tests, rather than refine +- // them, on any architecture that gives us trouble +- // (e.g. uses JAL for CALL, or BL<cc> for RET). +- // We conservatively test only on the two most popular +- // architectures. +- { +- loc := env.RegexpSearch("a/a.go", "println") +- report := asmFor(t, env, loc) +- checkMatch(t, true, report, `TEXT.*example.com/a.f`) +- switch runtime.GOARCH { +- case "amd64", "arm64": +- checkMatch(t, true, report, `CALL runtime.printlock`) +- checkMatch(t, true, report, `CALL runtime.printstring`) +- checkMatch(t, true, report, `CALL runtime.printunlock`) +- checkMatch(t, true, report, `CALL example.com/a.f.deferwrap`) +- checkMatch(t, true, report, `RET`) +- checkMatch(t, true, report, `CALL runtime.morestack_noctxt`) +- } +- +- // Nested functions are also shown. +- // +- // The condition here was relaxed to unblock go.dev/cl/639515. +- checkMatch(t, true, report, `example.com/a.f.deferwrap`) +- +- // But other functions are not. +- checkMatch(t, false, report, `TEXT.*example.com/a.g`) +- } +- +- // Check that code in a package-level var initializer is found too. +- { +- loc := env.RegexpSearch("a/a.go", `f\(123\)`) +- report := asmFor(t, env, loc) +- switch runtime.GOARCH { +- case "amd64", "arm64": +- checkMatch(t, true, report, `TEXT.*example.com/a.init`) +- checkMatch(t, true, report, `MOV.? \$123`) +- checkMatch(t, true, report, `MOV.? \$456`) +- checkMatch(t, true, report, `CALL example.com/a.f`) +- } +- } +- +- // And code in a source-level init function. +- { +- loc := env.RegexpSearch("a/a.go", `f\(789\)`) +- report := asmFor(t, env, loc) +- switch runtime.GOARCH { +- case "amd64", "arm64": +- checkMatch(t, true, report, `TEXT.*example.com/a.init`) +- checkMatch(t, true, report, `MOV.? \$789`) +- checkMatch(t, true, report, `CALL example.com/a.f`) +- } +- } +- }) +-} +- +-// TestTestAssembly exercises assembly listing of tests. +-func TestTestAssembly(t *testing.T) { +- testenv.NeedsGoCommand1Point(t, 22) // for up-to-date assembly listing +- +- const files = ` +--- go.mod -- +-module example.com +- +--- a/a_test.go -- +-package a +- +-import "testing" +- +-func Test1(*testing.T) { println(0) } +- +--- a/a_x_test.go -- +-package a_test +- +-import "testing" +- +-func Test2(*testing.T) { println(0) } +-` +- Run(t, files, func(t *testing.T, env *Env) { +- for _, test := range []struct { +- filename, symbol string +- }{ +- {"a/a_test.go", "example.com/a.Test1"}, +- {"a/a_x_test.go", "example.com/a_test.Test2"}, +- } { +- env.OpenFile(test.filename) +- loc := env.RegexpSearch(test.filename, `println`) +- report := asmFor(t, env, loc) +- checkMatch(t, true, report, `TEXT.*`+regexp.QuoteMeta(test.symbol)) +- switch runtime.GOARCH { +- case "amd64", "arm64": +- checkMatch(t, true, report, `CALL runtime.printint`) +- } +- } +- }) +-} +- +-// asmFor returns the HTML document served by gopls for a "Browse assembly" +-// command at the specified location in an open file. +-func asmFor(t *testing.T, env *Env, loc protocol.Location) []byte { +- _, content := codeActionWebPage(t, env, settings.GoAssembly, loc) +- return content +-} +diff -urN a/gopls/internal/test/integration/web/flight_test.go b/gopls/internal/test/integration/web/flight_test.go +--- a/gopls/internal/test/integration/web/flight_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/web/flight_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,69 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package web_test +- +-import ( +- "encoding/json" +- "runtime" +- "testing" +- +- "golang.org/x/tools/gopls/internal/debug" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// TestFlightRecorder checks that the flight recorder is minimally functional. +-func TestFlightRecorder(t *testing.T) { +- // The usual UNIX mechanisms cause timely termination of the +- // cmd/trace process, but this doesn't happen on Windows, +- // leading to CI failures because of process->file locking. +- // Rather than invent a complex mechanism, skip the test: +- // this feature is only for gopls developers anyway. +- // Better long term solutions are CL 677262 and issue #66843. +- if runtime.GOOS == "windows" { +- t.Skip("not reliable on windows") +- } +- +- // This is a global hammer; it won't play nicely with +- // multiple concurrent tests of Flight Recorder. +- t.Cleanup(debug.KillTraceViewers) +- +- const files = ` +--- go.mod -- +-module example.com +- +--- a/a.go -- +-package a +- +-const A = 1 +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- +- // Start the debug server. +- var result command.DebuggingResult +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: command.StartDebugging.String(), +- Arguments: []json.RawMessage{json.RawMessage("{}")}, // no args -> pick port +- }, &result) +- uri := result.URLs[0] +- t.Logf("StartDebugging: URLs[0] = %s", uri) +- +- // Check the debug server page is sensible. +- doc1 := get(t, uri) +- checkMatch(t, true, doc1, "Gopls server information") +- checkMatch(t, true, doc1, `<a href="/flightrecorder">Flight recorder</a>`) +- +- // "Click" the Flight Recorder link. +- // It should redirect to the web server +- // of a "go tool trace" process. +- // The resulting web page is entirely programmatic, +- // so we check for an arbitrary expected symbol. +- doc2 := get(t, uri+"/flightrecorder") +- checkMatch(t, true, doc2, `onTraceViewerImportFail`) +- }) +-} +diff -urN a/gopls/internal/test/integration/web/freesymbols_test.go b/gopls/internal/test/integration/web/freesymbols_test.go +--- a/gopls/internal/test/integration/web/freesymbols_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/web/freesymbols_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,76 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package web_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/settings" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// TestFreeSymbols is a basic test of interaction with the "free symbols" web report. +-func TestFreeSymbols(t *testing.T) { +- const files = ` +--- go.mod -- +-module example.com +- +--- a/a.go -- +-package a +- +-import "fmt" +-import "bytes" +- +-func f(buf bytes.Buffer, greeting string) { +-/* « */ +- fmt.Fprintf(&buf, "%s", greeting) +- buf.WriteString(fmt.Sprint("foo")) +- buf.WriteByte(0) +-/* » */ +- buf.Write(nil) +-} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- +- // Invoke the "Browse free symbols" code +- // action to start the server. +- loc := env.RegexpSearch("a/a.go", "«((?:.|\n)*)»") +- actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) +- if err != nil { +- t.Fatalf("CodeAction: %v", err) +- } +- action, err := codeActionByKind(actions, settings.GoFreeSymbols) +- if err != nil { +- t.Fatal(err) +- } +- +- // Execute the command. +- // Its side effect should be a single showDocument request. +- params := &protocol.ExecuteCommandParams{ +- Command: action.Command.Command, +- Arguments: action.Command.Arguments, +- } +- var result command.DebuggingResult +- collectDocs := env.Awaiter.ListenToShownDocuments() +- env.ExecuteCommand(params, &result) +- doc := shownDocument(t, collectDocs(), "http:") +- if doc == nil { +- t.Fatalf("no showDocument call had 'file:' prefix") +- } +- t.Log("showDocument(package doc) URL:", doc.URI) +- +- // Get the report and do some minimal checks for sensible results. +- report := get(t, doc.URI) +- checkMatch(t, true, report, `<li>import "<a .*'>fmt</a>" // for Fprintf, Sprint</li>`) +- checkMatch(t, true, report, `<li>var <a .*>buf</a> bytes.Buffer</li>`) +- checkMatch(t, true, report, `<li>func <a .*>WriteByte</a> func\(c byte\) error</li>`) +- checkMatch(t, true, report, `<li>func <a .*>WriteString</a> func\(s string\) \(n int, err error\)</li>`) +- checkMatch(t, false, report, `<li>func <a .*>Write</a>`) // not in selection +- checkMatch(t, true, report, `<li>var <a .*>greeting</a> string</li>`) +- }) +-} +diff -urN a/gopls/internal/test/integration/web/pkdoc_test.go b/gopls/internal/test/integration/web/pkdoc_test.go +--- a/gopls/internal/test/integration/web/pkdoc_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/web/pkdoc_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,485 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package web_test +- +-import ( +- "fmt" +- "html" +- "regexp" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/settings" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// TODO(adonovan): define marker test verbs for checking package docs. +- +-// TestBrowsePkgDoc provides basic coverage of the "Browse package +-// documentation", which creates a web server on demand. +-func TestBrowsePkgDoc(t *testing.T) { +- const files = ` +--- go.mod -- +-module example.com +- +--- a/a.go -- +-package a +- +-const A = 1 +- +-type G[T any] int +-func (G[T]) F(int, int, int, int, int, int, int, ...int) {} +- +-// EOF +-` +- Run(t, files, func(t *testing.T, env *Env) { +- // Assert that the HTML page contains the expected const declaration. +- // (We may need to make allowances for HTML markup.) +- env.OpenFile("a/a.go") +- uri1 := viewPkgDoc(t, env, env.Sandbox.Workdir.EntireFile("a/a.go")) +- doc1 := get(t, uri1) +- checkMatch(t, true, doc1, "const A =.*1") +- +- // Regression test for signature truncation (#67287, #67294). +- checkMatch(t, true, doc1, regexp.QuoteMeta("func (G[T]) F(int, int, int, ...)")) +- +- // Check that edits to the buffer (even unsaved) are +- // reflected in the HTML document. +- env.RegexpReplace("a/a.go", "// EOF", "func NewFunc() {}") +- env.Await(env.DoneDiagnosingChanges()) +- doc2 := get(t, uri1) +- checkMatch(t, true, doc2, "func NewFunc") +- +- // TODO(adonovan): assert some basic properties of the +- // HTML document using something like +- // golang.org/x/pkgsite/internal/testing/htmlcheck. +- +- // Grab the URL in the HTML source link for NewFunc. +- // (We don't have a DOM or JS interpreter so we have +- // to know something of the document internals here.) +- rx := regexp.MustCompile(`<h3 id='NewFunc'.*httpGET\("(.*)"\)`) +- srcURL := html.UnescapeString(string(rx.FindSubmatch(doc2)[1])) +- +- // Fetch the document. Its result isn't important, +- // but it must have the side effect of another showDocument +- // downcall, this time for a "file:" URL, causing the +- // client editor to navigate to the source file. +- t.Log("extracted /src URL", srcURL) +- collectDocs := env.Awaiter.ListenToShownDocuments() +- get(t, srcURL) +- +- // Check that shown location is that of NewFunc. +- shownSource := shownDocument(t, collectDocs(), "file:") +- gotLoc := protocol.Location{ +- URI: protocol.DocumentURI(shownSource.URI), // fishy conversion +- Range: *shownSource.Selection, +- } +- t.Log("showDocument(source file) URL:", gotLoc) +- wantLoc := env.RegexpSearch("a/a.go", `func ()NewFunc`) +- if gotLoc != wantLoc { +- t.Errorf("got location %v, want %v", gotLoc, wantLoc) +- } +- }) +-} +- +-func TestShowDocumentUnsupported(t *testing.T) { +- const files = ` +--- go.mod -- +-module example.com +- +--- a.go -- +-package a +- +-const A = 1 +-` +- +- for _, supported := range []bool{false, true} { +- t.Run(fmt.Sprintf("supported=%v", supported), func(t *testing.T) { +- opts := []RunOption{Modes(Default)} +- if !supported { +- opts = append(opts, CapabilitiesJSON([]byte(` +-{ +- "window": { +- "showDocument": { +- "support": false +- } +- } +-}`))) +- } +- WithOptions(opts...).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- // Invoke the "Browse package documentation" code +- // action to start the server. +- actions := env.CodeAction(env.Sandbox.Workdir.EntireFile("a.go"), nil, 0) +- docAction, err := codeActionByKind(actions, settings.GoDoc) +- if err != nil { +- t.Fatal(err) +- } +- +- // Execute the command. +- // Its side effect should be a single showDocument request. +- params := &protocol.ExecuteCommandParams{ +- Command: docAction.Command.Command, +- Arguments: docAction.Command.Arguments, +- } +- var result any +- collectDocs := env.Awaiter.ListenToShownDocuments() +- collectMessages := env.Awaiter.ListenToShownMessages() +- env.ExecuteCommand(params, &result) +- +- // golang/go#70342: just because the command has finished does not mean +- // that we will have received the necessary notifications. Synchronize +- // using progress reports. +- env.Await(CompletedWork(params.Command, 1, false)) +- +- wantDocs, wantMessages := 0, 1 +- if supported { +- wantDocs, wantMessages = 1, 0 +- } +- +- docs := collectDocs() +- messages := collectMessages() +- +- if gotDocs := len(docs); gotDocs != wantDocs { +- t.Errorf("gopls.doc: got %d showDocument requests, want %d", gotDocs, wantDocs) +- } +- if gotMessages := len(messages); gotMessages != wantMessages { +- t.Errorf("gopls.doc: got %d showMessage requests, want %d", gotMessages, wantMessages) +- } +- }) +- }) +- } +-} +- +-func TestPkgDocNoPanic66449(t *testing.T) { +- // This particular input triggered a latent bug in doc.New +- // that would corrupt the AST while filtering out unexported +- // symbols such as b, causing nodeHTML to panic. +- // Now it doesn't crash. +- // +- // We also check cross-reference anchors for all symbols. +- const files = ` +--- go.mod -- +-module example.com +- +--- a/a.go -- +-package a +- +-// The 'π' suffix is to elimimate spurious matches with other HTML substrings, +-// in particular the random base64 secret tokens that appear in gopls URLs. +- +-var Vπ, vπ = 0, 0 +-const Cπ, cπ = 0, 0 +- +-func Fπ() +-func fπ() +- +-type Tπ int +-type tπ int +- +-func (Tπ) Mπ() {} +-func (Tπ) mπ() {} +- +-func (tπ) Mπ() {} +-func (tπ) mπ() {} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- uri1 := viewPkgDoc(t, env, env.Sandbox.Workdir.EntireFile("a/a.go")) +- +- doc := get(t, uri1) +- // (Ideally our code rendering would also +- // eliminate unexported symbols...) +- checkMatch(t, true, doc, "var Vπ, vπ = .*0.*0") +- checkMatch(t, true, doc, "const Cπ, cπ = .*0.*0") +- +- // Unexported funcs/types/... must still be discarded. +- checkMatch(t, true, doc, "Fπ") +- checkMatch(t, false, doc, "fπ") +- checkMatch(t, true, doc, "Tπ") +- checkMatch(t, false, doc, "tπ") +- +- // Also, check that anchors exist (only) for exported symbols. +- // exported: +- checkMatch(t, true, doc, "<a id='Vπ'") +- checkMatch(t, true, doc, "<a id='Cπ'") +- checkMatch(t, true, doc, "<h3 id='Tπ'") +- checkMatch(t, true, doc, "<h3 id='Fπ'") +- checkMatch(t, true, doc, "<h4 id='Tπ.Mπ'") +- // unexported: +- checkMatch(t, false, doc, "<a id='vπ'") +- checkMatch(t, false, doc, "<a id='cπ'") +- checkMatch(t, false, doc, "<h3 id='tπ'") +- checkMatch(t, false, doc, "<h3 id='fπ'") +- checkMatch(t, false, doc, "<h4 id='Tπ.mπ'") +- checkMatch(t, false, doc, "<h4 id='tπ.Mπ'") +- checkMatch(t, false, doc, "<h4 id='tπ.mπ'") +- }) +-} +- +-// TestPkgDocNavigation tests that the symbol selector and index of +-// symbols are well formed. +-func TestPkgDocNavigation(t *testing.T) { +- const files = ` +--- go.mod -- +-module example.com +- +--- a/a.go -- +-package a +- +-func Func1(int, string, bool, []string) (int, error) +-func Func2(x, y int, a, b string) (int, error) +- +-type Type struct {} +-func (t Type) Method() {} +-func (p *Type) PtrMethod() {} +- +-func Constructor() Type +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- uri1 := viewPkgDoc(t, env, env.Sandbox.Workdir.EntireFile("a/a.go")) +- doc := get(t, uri1) +- +- q := regexp.QuoteMeta +- +- // selector +- checkMatch(t, true, doc, q(`<option label='Func1(_, _, _, _)' value='#Func1'/>`)) +- checkMatch(t, true, doc, q(`<option label='Func2(x, y, a, b)' value='#Func2'/>`)) +- checkMatch(t, true, doc, q(`<option label='Type' value='#Type'/>`)) +- checkMatch(t, true, doc, q(`<option label='Constructor()' value='#Constructor'/>`)) +- checkMatch(t, true, doc, q(`<option label='(t) Method()' value='#Type.Method'/>`)) +- checkMatch(t, true, doc, q(`<option label='(p) PtrMethod()' value='#Type.PtrMethod'/>`)) +- +- // index +- checkMatch(t, true, doc, q(`<li><a href='#Func1'>func Func1(int, string, bool, ...) (int, error)</a></li>`)) +- checkMatch(t, true, doc, q(`<li><a href='#Func2'>func Func2(x int, y int, a string, ...) (int, error)</a></li>`)) +- checkMatch(t, true, doc, q(`<li><a href='#Type'>type Type</a></li>`)) +- checkMatch(t, true, doc, q(`<li><a href='#Constructor'>func Constructor() Type</a></li>`)) +- checkMatch(t, true, doc, q(`<li><a href='#Type.Method'>func (t Type) Method()</a></li>`)) +- checkMatch(t, true, doc, q(`<li><a href='#Type.PtrMethod'>func (p *Type) PtrMethod()</a></li>`)) +- }) +-} +- +-// TestPkgDocContext tests that the gopls.doc command title and /pkg +-// URL are appropriate for the current selection. It is effectively a +-// test of golang.DocFragment. +-func TestPkgDocContext(t *testing.T) { +- const files = ` +--- go.mod -- +-module example.com +- +--- a/a.go -- +-package a +- +-import "fmt" +-import "bytes" +- +-func A() { +- fmt.Println() +- new(bytes.Buffer).Write(nil) +-} +- +-const K = 123 +- +-type T int +-func (*T) M() { /*in T.M*/} +- +-` +- +- viewRE := regexp.MustCompile("view=[0-9]*") +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- for _, test := range []struct { +- re string // regexp indicating selected portion of input file +- want string // suffix of expected URL after /pkg/ +- }{ +- // current package +- {"package a", "example.com/a?view=1"}, // outside any decl +- {"in T.M", "example.com/a?view=1#T.M"}, // inside method (*T).M +- {"123", "example.com/a?view=1#K"}, // inside const/var decl +- {"T int", "example.com/a?view=1#T"}, // inside type decl +- +- // imported +- {"\"fmt\"", "fmt?view=1"}, // in import spec +- {"fmt[.]", "fmt?view=1"}, // use of PkgName +- {"Println", "fmt?view=1#Println"}, // use of imported pkg-level symbol +- {"fmt.Println", "fmt?view=1#Println"}, // qualified identifier +- {"Write", "bytes?view=1#Buffer.Write"}, // use of imported method +- +- // TODO(adonovan): +- // - xtest package -> ForTest +- // - field of imported struct -> nope +- // - exported method of nonexported type from another package +- // (e.g. types.Named.Obj) -> nope +- // Also: assert that Command.Title looks nice. +- } { +- uri := viewPkgDoc(t, env, env.RegexpSearch("a/a.go", test.re)) +- _, got, ok := strings.Cut(uri, "/pkg/") +- if !ok { +- t.Errorf("pattern %q => %s (invalid /pkg URL)", test.re, uri) +- continue +- } +- +- // Normalize the view ID, which varies by integration test mode. +- got = viewRE.ReplaceAllString(got, "view=1") +- +- if got != test.want { +- t.Errorf("pattern %q => %s; want %s", test.re, got, test.want) +- } +- } +- }) +-} +- +-// TestPkgDocFileImports tests that the doc links are rendered +-// as URLs based on the correct import mapping for the file in +-// which they appear. +-func TestPkgDocFileImports(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +-go 1.20 +- +--- a/a1.go -- +-// Package a refers to [b.T] [b.U] [alias.D] [d.D] [c.T] [c.U] [nope.Nope] +-package a +- +-import "mod.com/b" +-import alias "mod.com/d" +- +-// [b.T] indeed refers to b.T. +-// +-// [alias.D] refers to d.D +-// but [d.D] also refers to d.D. +-type A1 int +- +--- a/a2.go -- +-package a +- +-import b "mod.com/c" +- +-// [b.U] actually refers to c.U. +-type A2 int +- +--- b/b.go -- +-package b +- +-type T int +-type U int +- +--- c/c.go -- +-package c +- +-type T int +-type U int +- +--- d/d.go -- +-package d +- +-type D int +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a1.go") +- uri1 := viewPkgDoc(t, env, env.Sandbox.Workdir.EntireFile("a/a1.go")) +- doc := get(t, uri1) +- +- // Check that the doc links are resolved using the +- // appropriate import mapping for the file in which +- // they appear. +- checkMatch(t, true, doc, `pkg/mod.com/b\?.*#T">b.T</a> indeed refers to b.T`) +- checkMatch(t, true, doc, `pkg/mod.com/c\?.*#U">b.U</a> actually refers to c.U`) +- +- // Check that doc links can be resolved using either +- // the original or the local name when they refer to a +- // renaming import. (Local names are preferred.) +- checkMatch(t, true, doc, `pkg/mod.com/d\?.*#D">alias.D</a> refers to d.D`) +- checkMatch(t, true, doc, `pkg/mod.com/d\?.*#D">d.D</a> also refers to d.D`) +- +- // Check that links in the package doc comment are +- // resolved, and relative to the correct file (a1.go). +- checkMatch(t, true, doc, `Package a refers to.*pkg/mod.com/b\?.*#T">b.T</a>`) +- checkMatch(t, true, doc, `Package a refers to.*pkg/mod.com/b\?.*#U">b.U</a>`) +- checkMatch(t, true, doc, `Package a refers to.*pkg/mod.com/d\?.*#D">alias.D</a>`) +- checkMatch(t, true, doc, `Package a refers to.*pkg/mod.com/d\?.*#D">d.D</a>`) +- checkMatch(t, true, doc, `Package a refers to.*pkg/mod.com/c\?.*#T">c.T</a>`) +- checkMatch(t, true, doc, `Package a refers to.*pkg/mod.com/c\?.*#U">c.U</a>`) +- checkMatch(t, true, doc, `Package a refers to.* \[nope.Nope\]`) +- }) +-} +- +-// TestPkgDocConstructorOfUnexported tests that exported constructor +-// functions (NewT) whose result type (t) is unexported are not +-// discarded but are presented as ordinary top-level functions (#69553). +-func TestPkgDocConstructorOfUnexported(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.com +-go 1.20 +- +--- a/a.go -- +-package a +- +-func A() {} +-func Z() {} +- +-type unexported int +-func NewUnexported() unexported // exported constructor of unexported type +- +-type Exported int +-func NewExported() Exported // exported constructor of exported type +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- uri1 := viewPkgDoc(t, env, env.Sandbox.Workdir.EntireFile("a/a.go")) +- doc := get(t, uri1) +- +- want := regexp.QuoteMeta(` +-<optgroup label='Functions'> +- <option label='A()' value='#A'/> +- <option label='NewUnexported()' value='#NewUnexported'/> +- <option label='Z()' value='#Z'/> +-</optgroup> +-<optgroup label='Types'> +- <option label='Exported' value='#Exported'/> +-</optgroup> +-<optgroup label='type Exported'> +- <option label='NewExported()' value='#NewExported'/> +-</optgroup>`) +- checkMatch(t, true, doc, want) +- }) +-} +- +-// viewPkgDoc invokes the "Browse package documentation" code action +-// at the specified location. It returns the URI of the document, or +-// fails the test. +-func viewPkgDoc(t *testing.T, env *Env, loc protocol.Location) protocol.URI { +- // Invoke the "Browse package documentation" code +- // action to start the server. +- actions := env.CodeAction(loc, nil, 0) +- docAction, err := codeActionByKind(actions, settings.GoDoc) +- if err != nil { +- t.Fatal(err) +- } +- +- // Execute the command. +- // Its side effect should be a single showDocument request. +- params := &protocol.ExecuteCommandParams{ +- Command: docAction.Command.Command, +- Arguments: docAction.Command.Arguments, +- } +- var result any +- collectDocs := env.Awaiter.ListenToShownDocuments() +- env.ExecuteCommand(params, &result) +- +- doc := shownDocument(t, collectDocs(), "http:") +- if doc == nil { +- t.Fatalf("no showDocument call had 'http:' prefix") +- } +- if false { +- t.Log("showDocument(package doc) URL:", doc.URI) +- } +- return doc.URI +-} +diff -urN a/gopls/internal/test/integration/web/splitpkg_test.go b/gopls/internal/test/integration/web/splitpkg_test.go +--- a/gopls/internal/test/integration/web/splitpkg_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/web/splitpkg_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,181 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package web_test +- +-import ( +- "bytes" +- "encoding/json" +- "fmt" +- "io" +- "net/http" +- "net/url" +- "slices" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/golang/splitpkg" +- "golang.org/x/tools/gopls/internal/settings" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// TestSplitPackage is a basic test of the web-based split package tool. +-func TestSplitPackage(t *testing.T) { +- const files = ` +--- go.mod -- +-module example.com +- +--- a/a.go -- +-package a +- +-func a() { b1() } +- +-func b1() { b2() } +-func b2() { b1(); c() } +- +-// EOF +--- a/b.go -- +-package a +- +-func c() { d() } +- +-func d() {} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- +- // Get the web page and do some rudimentary checks. +- // Most of the action happens in *.js (which we can't test) +- // and in interaction with the JSON endpoints (which we can). +- loc := env.RegexpSearch("a/a.go", "package") +- uri, page := codeActionWebPage(t, env, settings.GoSplitPackage, loc) +- +- checkMatch(t, true, page, `<h1>Split package example.com/a</h1>`) +- +- // Now we interact using JSON, basically a trivial Go +- // version of the splitpkg.js code. +- +- // jsonHTTP performs a JSON-over-HTTP request to the specified path. +- jsonHTTP := func(method, path string, in, out any) { +- // Replace the /splitpkg portion of the main page's URL, +- // keeping everything else. +- u, err := url.Parse(uri) +- if err != nil { +- t.Fatalf("parsing URL: %v", err) +- } +- u.Path = strings.ReplaceAll(u.Path, "/splitpkg", path) +- +- // HTTP +- inJSON, err := json.Marshal(in) +- if err != nil { +- t.Fatalf("encoding input: %v", err) +- } +- t.Logf("%s: in=%s", path, inJSON) +- req, err := http.NewRequest(method, u.String(), bytes.NewReader(inJSON)) +- if err != nil { +- t.Fatalf("NewRequest: %v", err) +- } +- req.Header.Set("Content-Type", "application/json") +- resp, err := http.DefaultClient.Do(req) +- if err != nil { +- t.Fatalf("HTTP request: %v", err) +- } +- defer resp.Body.Close() +- outJSON, err := io.ReadAll(resp.Body) +- if err != nil { +- t.Fatalf("reading output: %v", err) +- } +- t.Logf("%s: out=%s", path, outJSON) +- if out != nil { +- if err := json.Unmarshal(outJSON, out); err != nil { +- t.Fatalf("decoding output: %v", err) +- } +- } +- } +- +- // checkFileDecls queries the current package's decls grouped by file +- // and asserts that they match the description of the wanted state. +- checkFileDecls := func(want string) { +- var res splitpkg.ResultJSON +- jsonHTTP("GET", "/splitpkg-json", nil, &res) +- +- var lines []string +- for _, file := range res.Files { +- var buf strings.Builder +- fmt.Fprintf(&buf, "file %s:", file.Base) +- for _, decl := range file.Decls { +- for _, spec := range decl.Specs { +- fmt.Fprintf(&buf, " %s %s;", decl.Kind, spec.Name) +- } +- } +- lines = append(lines, buf.String()) +- } +- slices.Sort(lines) +- got := strings.Join(lines, "\n") +- +- if diff := cmp.Diff(want, got); diff != "" { +- t.Errorf("unexpected file decls:\ngot:\n%s\nwant:\n%s\ndiff:\n%s", got, want, diff) +- } +- } +- +- // checkEdges queries the current decomposition and asserts +- // that it matches the description of the wanted state. +- checkEdges := func(want string) { +- var res splitpkg.ResultJSON +- jsonHTTP("GET", "/splitpkg-json", nil, &res) +- +- var lines []string +- for _, edge := range res.Edges { +- var buf strings.Builder +- fmt.Fprintf(&buf, "edge %s -> %s:", res.Components.Names[edge.From], res.Components.Names[edge.To]) +- if edge.Cyclic { +- buf.WriteString(" ⚠") +- } +- for _, ref := range edge.Refs { +- fmt.Fprintf(&buf, " %s -> %s;", ref.From, ref.To) +- } +- lines = append(lines, buf.String()) +- } +- slices.Sort(lines) +- got := strings.Join(lines, "\n") +- if diff := cmp.Diff(want, got); diff != "" { +- t.Errorf("unexpected component edges:\ngot:\n%s\nwant:\n%s\ndiff:\n%s", got, want, diff) +- } +- } +- +- // Check the initial file/decl state. +- checkFileDecls(` +-file a.go: func a; func b1; func b2; +-file b.go: func c; func d;`[1:]) +- +- // Check that the set of decls updates as we edit the files. +- env.RegexpReplace("a/a.go", "// EOF", "func b3() {}") +- env.Await(env.DoneDiagnosingChanges()) +- checkFileDecls(` +-file a.go: func a; func b1; func b2; func b3; +-file b.go: func c; func d;`[1:]) +- +- // Post a cyclic decomposition. Check the report. +- jsonHTTP("POST", "/splitpkg-components", splitpkg.ComponentsJSON{ +- Names: []string{"zero", "one", "two", "three"}, +- Assignments: map[string]int{"a": 0, "b1": 1, "b2": 2, "c": 3, "d": 3}, +- }, nil) +- checkEdges(` +-edge one -> two: ⚠ b1 -> b2; +-edge two -> one: ⚠ b2 -> b1; +-edge two -> three: b2 -> c; +-edge zero -> one: a -> b1;`[1:]) +- +- // Post an acyclic decomposition. Check the report. +- jsonHTTP("POST", "/splitpkg-components", splitpkg.ComponentsJSON{ +- Names: []string{"zero", "one", "two", "three"}, +- Assignments: map[string]int{"a": 0, "b1": 1, "b2": 1, "c": 2, "d": 3}, +- }, nil) +- checkEdges(` +-edge one -> two: b2 -> c; +-edge two -> three: c -> d; +-edge zero -> one: a -> b1;`[1:]) +- }) +-} +diff -urN a/gopls/internal/test/integration/web/util_test.go b/gopls/internal/test/integration/web/util_test.go +--- a/gopls/internal/test/integration/web/util_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/web/util_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,111 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package web_test +- +-// This file defines web server testing utilities. +- +-import ( +- "fmt" +- "io" +- "net/http" +- "os" +- "regexp" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/util/bug" +-) +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- os.Exit(integration.Main(m)) +-} +- +-// shownDocument returns the first shown document matching the URI prefix. +-// It may be nil. +-// As a side effect, it clears the list of accumulated shown documents. +-func shownDocument(t *testing.T, shown []*protocol.ShowDocumentParams, prefix string) *protocol.ShowDocumentParams { +- t.Helper() +- var first *protocol.ShowDocumentParams +- for _, sd := range shown { +- if strings.HasPrefix(sd.URI, prefix) { +- if first != nil { +- t.Errorf("got multiple showDocument requests: %#v", shown) +- break +- } +- first = sd +- } +- } +- return first +-} +- +-// get fetches the content of a document over HTTP. +-func get(t *testing.T, url string) []byte { +- t.Helper() +- resp, err := http.Get(url) +- if err != nil { +- t.Fatal(err) +- } +- defer resp.Body.Close() +- got, err := io.ReadAll(resp.Body) +- if err != nil { +- t.Fatal(err) +- } +- return got +-} +- +-// checkMatch asserts that got matches (or doesn't match, if !want) the pattern. +-func checkMatch(t *testing.T, want bool, got []byte, pattern string) { +- t.Helper() +- if regexp.MustCompile(pattern).Match(got) != want { +- if want { +- t.Errorf("input did not match wanted pattern %q; got:\n%s", pattern, got) +- } else { +- t.Errorf("input matched unwanted pattern %q; got:\n%s", pattern, got) +- } +- } +-} +- +-// codeActionByKind returns the first action of (exactly) the specified kind, or an error. +-func codeActionByKind(actions []protocol.CodeAction, kind protocol.CodeActionKind) (*protocol.CodeAction, error) { +- for _, act := range actions { +- if act.Kind == kind { +- return &act, nil +- } +- } +- return nil, fmt.Errorf("can't find action with kind %s, only %#v", kind, actions) +-} +- +-// codeActionWebPage returns the URL and content of the page opened by the specified code action. +-func codeActionWebPage(t *testing.T, env *integration.Env, kind protocol.CodeActionKind, loc protocol.Location) (string, []byte) { +- actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) +- if err != nil { +- t.Fatalf("CodeAction: %v", err) +- } +- action, err := codeActionByKind(actions, kind) +- if err != nil { +- t.Fatal(err) +- } +- +- // Execute the command. +- // Its side effect should be a single showDocument request. +- params := &protocol.ExecuteCommandParams{ +- Command: action.Command.Command, +- Arguments: action.Command.Arguments, +- } +- var result command.DebuggingResult +- collectDocs := env.Awaiter.ListenToShownDocuments() +- env.ExecuteCommand(params, &result) +- doc := shownDocument(t, collectDocs(), "http:") +- if doc == nil { +- t.Fatalf("no showDocument call had 'file:' prefix") +- } +- t.Log("showDocument(package doc) URL:", doc.URI) +- +- return doc.URI, get(t, doc.URI) +-} +diff -urN a/gopls/internal/test/integration/workspace/adhoc_test.go b/gopls/internal/test/integration/workspace/adhoc_test.go +--- a/gopls/internal/test/integration/workspace/adhoc_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/adhoc_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,39 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// Test for golang/go#57209: editing a file in an ad-hoc package should not +-// trigger conflicting diagnostics. +-func TestAdhoc_Edits(t *testing.T) { +- const files = ` +--- a.go -- +-package foo +- +-const X = 1 +- +--- b.go -- +-package foo +- +-// import "errors" +- +-const Y = X +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("b.go") +- +- for range 10 { +- env.RegexpReplace("b.go", `// import "errors"`, `import "errors"`) +- env.RegexpReplace("b.go", `import "errors"`, `// import "errors"`) +- env.AfterChange(NoDiagnostics()) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/workspace/broken_test.go b/gopls/internal/test/integration/workspace/broken_test.go +--- a/gopls/internal/test/integration/workspace/broken_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/broken_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,260 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/server" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// This file holds various tests for UX with respect to broken workspaces. +-// +-// TODO: consolidate other tests here. +-// +-// TODO: write more tests: +-// - an explicit GOWORK value that doesn't exist +-// - using modules and/or GOWORK inside of GOPATH? +- +-// Test for golang/go#53933 +-func TestBrokenWorkspace_DuplicateModules(t *testing.T) { +- // This proxy module content is replaced by the workspace, but is still +- // required for module resolution to function in the Go command. +- const proxy = ` +--- example.com/foo@v0.0.1/go.mod -- +-module example.com/foo +- +-go 1.12 +-` +- +- const src = ` +--- go.work -- +-go 1.18 +- +-use ( +- ./package1 +- ./package1/vendor/example.com/foo +- ./package2 +- ./package2/vendor/example.com/foo +-) +- +--- package1/go.mod -- +-module mod.test +- +-go 1.18 +- +-require example.com/foo v0.0.1 +--- package1/main.go -- +-package main +- +-import "example.com/foo" +- +-func main() { +- _ = foo.CompleteMe +-} +--- package1/vendor/example.com/foo/go.mod -- +-module example.com/foo +- +-go 1.18 +--- package1/vendor/example.com/foo/foo.go -- +-package foo +- +-const CompleteMe = 111 +--- package2/go.mod -- +-module mod2.test +- +-go 1.18 +- +-require example.com/foo v0.0.1 +--- package2/main.go -- +-package main +- +-import "example.com/foo" +- +-func main() { +- _ = foo.CompleteMe +-} +--- package2/vendor/example.com/foo/go.mod -- +-module example.com/foo +- +-go 1.18 +--- package2/vendor/example.com/foo/foo.go -- +-package foo +- +-const CompleteMe = 222 +-` +- +- WithOptions( +- ProxyFiles(proxy), +- ).Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("package1/main.go") +- env.AfterChange( +- OutstandingWork(server.WorkspaceLoadFailure, `module example.com/foo appears multiple times in workspace`), +- ) +- +- // Remove the redundant vendored copy of example.com. +- env.WriteWorkspaceFile("go.work", `go 1.18 +- use ( +- ./package1 +- ./package2 +- ./package2/vendor/example.com/foo +- ) +- `) +- env.AfterChange(NoOutstandingWork(IgnoreTelemetryPromptWork)) +- +- // Check that definitions in package1 go to the copy vendored in package2. +- location := string(env.FirstDefinition(env.RegexpSearch("package1/main.go", "CompleteMe")).URI) +- const wantLocation = "package2/vendor/example.com/foo/foo.go" +- if !strings.HasSuffix(location, wantLocation) { +- t.Errorf("got definition of CompleteMe at %q, want %q", location, wantLocation) +- } +- }) +-} +- +-// Test for golang/go#43186: correcting the module path should fix errors +-// without restarting gopls. +-func TestBrokenWorkspace_WrongModulePath(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.testx +- +-go 1.18 +--- p/internal/foo/foo.go -- +-package foo +- +-const C = 1 +--- p/internal/bar/bar.go -- +-package bar +- +-import "mod.test/p/internal/foo" +- +-const D = foo.C + 1 +--- p/internal/bar/bar_test.go -- +-package bar_test +- +-import ( +- "mod.test/p/internal/foo" +- . "mod.test/p/internal/bar" +-) +- +-const E = D + foo.C +--- p/internal/baz/baz_test.go -- +-package baz_test +- +-import ( +- named "mod.test/p/internal/bar" +-) +- +-const F = named.D - 3 +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("p/internal/bar/bar.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("p/internal/bar/bar.go", "\"mod.test/p/internal/foo\"")), +- ) +- env.OpenFile("go.mod") +- env.RegexpReplace("go.mod", "mod.testx", "mod.test") +- env.SaveBuffer("go.mod") // saving triggers a reload +- env.AfterChange(NoDiagnostics()) +- }) +-} +- +-func TestMultipleModules_Warning(t *testing.T) { +- t.Skip("temporary skip for golang/go#57979: revisit after zero-config logic is in place") +- +- msgForVersion := func(ver int) string { +- if ver >= 18 { +- return `gopls was not able to find modules in your workspace.` +- } else { +- return `gopls requires a module at the root of your workspace.` +- } +- } +- +- const modules = ` +--- a/go.mod -- +-module a.com +- +-go 1.12 +--- a/a.go -- +-package a +--- a/empty.go -- +-// an empty file +--- b/go.mod -- +-module b.com +- +-go 1.12 +--- b/b.go -- +-package b +-` +- for _, go111module := range []string{"on", "auto"} { +- t.Run("GO111MODULE="+go111module, func(t *testing.T) { +- WithOptions( +- Modes(Default), +- EnvVars{"GO111MODULE": go111module}, +- ).Run(t, modules, func(t *testing.T, env *Env) { +- ver := env.GoVersion() +- msg := msgForVersion(ver) +- env.OpenFile("a/a.go") +- env.OpenFile("a/empty.go") +- env.OpenFile("b/go.mod") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "package a")), +- Diagnostics(env.AtRegexp("b/go.mod", "module b.com")), +- OutstandingWork(server.WorkspaceLoadFailure, msg), +- ) +- +- // Changing the workspace folders to the valid modules should resolve +- // the workspace errors and diagnostics. +- // +- // TODO(rfindley): verbose work tracking doesn't follow changing the +- // workspace folder, therefore we can't invoke AfterChange here. +- env.ChangeWorkspaceFolders("a", "b") +- env.Await( +- NoDiagnostics(ForFile("a/a.go")), +- NoDiagnostics(ForFile("b/go.mod")), +- NoOutstandingWork(IgnoreTelemetryPromptWork), +- ) +- +- env.ChangeWorkspaceFolders(".") +- +- // TODO(rfindley): when GO111MODULE=auto, we need to open or change a +- // file here in order to detect a critical error. This is because gopls +- // has forgotten about a/a.go, and therefore doesn't hit the heuristic +- // "all packages are command-line-arguments". +- // +- // This is broken, and could be fixed by adjusting the heuristic to +- // account for the scenario where there are *no* workspace packages, or +- // (better) trying to get workspace packages for each open file. See +- // also golang/go#54261. +- env.OpenFile("b/b.go") +- env.AfterChange( +- // TODO(rfindley): fix these missing diagnostics. +- // Diagnostics(env.AtRegexp("a/a.go", "package a")), +- // Diagnostics(env.AtRegexp("b/go.mod", "module b.com")), +- Diagnostics(env.AtRegexp("b/b.go", "package b")), +- OutstandingWork(server.WorkspaceLoadFailure, msg), +- ) +- }) +- }) +- } +- +- // Expect no warning if GO111MODULE=auto in a directory in GOPATH. +- t.Run("GOPATH_GO111MODULE_auto", func(t *testing.T) { +- WithOptions( +- Modes(Default), +- EnvVars{"GO111MODULE": "auto"}, +- InGOPATH(), +- ).Run(t, modules, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange( +- NoDiagnostics(ForFile("a/a.go")), +- NoOutstandingWork(IgnoreTelemetryPromptWork), +- ) +- }) +- }) +-} +diff -urN a/gopls/internal/test/integration/workspace/didcreatefiles_test.go b/gopls/internal/test/integration/workspace/didcreatefiles_test.go +--- a/gopls/internal/test/integration/workspace/didcreatefiles_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/didcreatefiles_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,146 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "context" +- "fmt" +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// TestAutoFillPackageDecl tests that creation of a new .go file causes +-// gopls to choose a sensible package name and fill in the package declaration. +-func TestAutoFillPackageDecl(t *testing.T) { +- const existFiles = ` +--- go.mod -- +-module mod.com +- +-go 1.12 +- +--- dog/a_test.go -- +-package dog +--- fruits/apple.go -- +-package apple +- +-fun apple() int { +- return 0 +-} +- +--- license/license.go -- +-/* Copyright 2025 The Go Authors. All rights reserved. +-Use of this source code is governed by a BSD-style +-license that can be found in the LICENSE file. */ +- +-package license +- +--- license1/license.go -- +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package license1 +- +--- cmd/main.go -- +-package main +- +--- integration/a_test.go -- +-package integration_test +- +--- nopkg/testfile.go -- +-package +-` +- for _, tc := range []struct { +- name string +- newfile string +- want string +- }{ +- { +- name: "new file in folder with a_test.go", +- newfile: "dog/newfile.go", +- want: "package dog\n", +- }, +- { +- name: "new file in folder with go file", +- newfile: "fruits/newfile.go", +- want: "package apple\n", +- }, +- { +- name: "new test file in folder with go file", +- newfile: "fruits/newfile_test.go", +- want: "package apple\n", +- }, +- { +- name: "new file in folder with go file that contains license comment", +- newfile: "license/newfile.go", +- want: `/* Copyright 2025 The Go Authors. All rights reserved. +-Use of this source code is governed by a BSD-style +-license that can be found in the LICENSE file. */ +- +-package license +-`, +- }, +- { +- name: "new file in folder with go file that contains license comment", +- newfile: "license1/newfile.go", +- want: `// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package license1 +-`, +- }, +- { +- name: "new file in folder with main package", +- newfile: "cmd/newfile.go", +- want: "package main\n", +- }, +- { +- name: "new file in empty folder", +- newfile: "empty_folder/newfile.go", +- want: "package emptyfolder\n", +- }, +- { +- name: "new file in folder with integration_test package", +- newfile: "integration/newfile.go", +- want: "package integration\n", +- }, +- { +- name: "new test file in folder with integration_test package", +- newfile: "integration/newfile_test.go", +- want: "package integration\n", +- }, +- { +- name: "new file in folder with incomplete package clause", +- newfile: "incomplete/newfile.go", +- want: "package incomplete\n", +- }, +- { +- name: "package completion for dir name with punctuation", +- newfile: "123f_r.u~its-123/newfile.go", +- want: "package fruits123\n", +- }, +- { +- name: "package completion for dir name with invalid dir name", +- newfile: "123f_r.u~its-123/newfile.go", +- want: "package fruits123\n", +- }, +- } { +- t.Run(tc.name, func(t *testing.T) { +- createFiles := fmt.Sprintf("%s\n-- %s --", existFiles, tc.newfile) +- Run(t, createFiles, func(t *testing.T, env *Env) { +- env.DidCreateFiles(env.Editor.DocumentURI(tc.newfile)) +- // save buffer to ensure the edits take effects in the file system. +- if err := env.Editor.SaveBuffer(context.Background(), tc.newfile); err != nil { +- t.Fatal(err) +- } +- if got := env.FileContent(tc.newfile); tc.want != got { +- t.Fatalf("want '%s' but got '%s'", tc.want, got) +- } +- }) +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/workspace/directoryfilters_test.go b/gopls/internal/test/integration/workspace/directoryfilters_test.go +--- a/gopls/internal/test/integration/workspace/directoryfilters_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/directoryfilters_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,207 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "sort" +- "strings" +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// This file contains regression tests for the directoryFilters setting. +-// +-// TODO: +-// - consolidate some of these tests into a single test +-// - add more tests for changing directory filters +- +-func TestDirectoryFilters(t *testing.T) { +- WithOptions( +- ProxyFiles(workspaceProxy), +- WorkspaceFolders("pkg"), +- Settings{ +- "directoryFilters": []string{"-inner"}, +- }, +- ).Run(t, workspaceModule, func(t *testing.T, env *Env) { +- syms := env.Symbol("Hi") +- sort.Slice(syms, func(i, j int) bool { return syms[i].ContainerName < syms[j].ContainerName }) +- for _, s := range syms { +- if strings.Contains(s.ContainerName, "inner") { +- t.Errorf("WorkspaceSymbol: found symbol %q with container %q, want \"inner\" excluded", s.Name, s.ContainerName) +- } +- } +- }) +-} +- +-func TestDirectoryFiltersLoads(t *testing.T) { +- // exclude, and its error, should be excluded from the workspace. +- const files = ` +--- go.mod -- +-module example.com +- +-go 1.12 +--- exclude/exclude.go -- +-package exclude +- +-const _ = Nonexistant +-` +- +- WithOptions( +- Settings{"directoryFilters": []string{"-exclude"}}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- NoDiagnostics(ForFile("exclude/x.go")), +- ) +- }) +-} +- +-func TestDirectoryFiltersTransitiveDep(t *testing.T) { +- // Even though exclude is excluded from the workspace, it should +- // still be importable as a non-workspace package. +- const files = ` +--- go.mod -- +-module example.com +- +-go 1.12 +--- include/include.go -- +-package include +-import "example.com/exclude" +- +-const _ = exclude.X +--- exclude/exclude.go -- +-package exclude +- +-const _ = Nonexistant // should be ignored, since this is a non-workspace package +-const X = 1 +-` +- +- WithOptions( +- Settings{"directoryFilters": []string{"-exclude"}}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- NoDiagnostics(ForFile("exclude/exclude.go")), // filtered out +- NoDiagnostics(ForFile("include/include.go")), // successfully builds +- ) +- }) +-} +- +-// Test for golang/go#46438: support for '**' in directory filters. +-func TestDirectoryFilters_Wildcard(t *testing.T) { +- filters := []string{"-**/bye"} +- WithOptions( +- ProxyFiles(workspaceProxy), +- WorkspaceFolders("pkg"), +- Settings{ +- "directoryFilters": filters, +- }, +- ).Run(t, workspaceModule, func(t *testing.T, env *Env) { +- syms := env.Symbol("Bye") +- sort.Slice(syms, func(i, j int) bool { return syms[i].ContainerName < syms[j].ContainerName }) +- for _, s := range syms { +- if strings.Contains(s.ContainerName, "bye") { +- t.Errorf("WorkspaceSymbol: found symbol %q with container %q with filters %v", s.Name, s.ContainerName, filters) +- } +- } +- }) +-} +- +-// Test for golang/go#52993: wildcard directoryFilters should apply to +-// goimports scanning as well. +-func TestDirectoryFilters_ImportScanning(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.test +- +-go 1.12 +--- main.go -- +-package main +- +-func main() { +- bye.Goodbye() +- hi.Hello() +-} +--- p/bye/bye.go -- +-package bye +- +-func Goodbye() {} +--- hi/hi.go -- +-package hi +- +-func Hello() {} +-` +- +- WithOptions( +- Settings{ +- "directoryFilters": []string{"-**/bye", "-hi"}, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- beforeSave := env.BufferText("main.go") +- env.OrganizeImports("main.go") +- got := env.BufferText("main.go") +- if got != beforeSave { +- t.Errorf("after organizeImports code action, got modified buffer:\n%s", got) +- } +- }) +-} +- +-// Test for golang/go#52993: non-wildcard directoryFilters should still be +-// applied relative to the workspace folder, not the module root. +-func TestDirectoryFilters_MultiRootImportScanning(t *testing.T) { +- const files = ` +--- go.work -- +-go 1.18 +- +-use ( +- a +- b +-) +--- a/go.mod -- +-module mod1.test +- +-go 1.18 +--- a/main.go -- +-package main +- +-func main() { +- hi.Hi() +-} +--- a/hi/hi.go -- +-package hi +- +-func Hi() {} +--- b/go.mod -- +-module mod2.test +- +-go 1.18 +--- b/main.go -- +-package main +- +-func main() { +- hi.Hi() +-} +--- b/hi/hi.go -- +-package hi +- +-func Hi() {} +-` +- +- WithOptions( +- Settings{ +- "directoryFilters": []string{"-hi"}, // this test fails with -**/hi +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a/main.go") +- beforeSave := env.BufferText("a/main.go") +- env.OrganizeImports("a/main.go") +- got := env.BufferText("a/main.go") +- if got == beforeSave { +- t.Errorf("after organizeImports code action, got identical buffer:\n%s", got) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/workspace/fromenv_test.go b/gopls/internal/test/integration/workspace/fromenv_test.go +--- a/gopls/internal/test/integration/workspace/fromenv_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/fromenv_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,76 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "fmt" +- "path/filepath" +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// Test that setting go.work via environment variables or settings works. +-func TestUseGoWorkOutsideTheWorkspace(t *testing.T) { +- // As discussed in +- // https://github.com/golang/go/issues/59458#issuecomment-1513794691, we must +- // use \-separated paths in go.work use directives for this test to work +- // correctly on windows. +- var files = fmt.Sprintf(` +--- work/a/go.mod -- +-module a.com +- +-go 1.12 +--- work/a/a.go -- +-package a +--- work/b/go.mod -- +-module b.com +- +-go 1.12 +--- work/b/b.go -- +-package b +- +-func _() { +- x := 1 // unused +-} +--- other/c/go.mod -- +-module c.com +- +-go 1.18 +--- other/c/c.go -- +-package c +--- config/go.work -- +-go 1.18 +- +-use ( +- %s +- %s +- %s +-) +-`, +- filepath.Join("$SANDBOX_WORKDIR", "work", "a"), +- filepath.Join("$SANDBOX_WORKDIR", "work", "b"), +- filepath.Join("$SANDBOX_WORKDIR", "other", "c"), +- ) +- +- WithOptions( +- WorkspaceFolders("work"), // use a nested workspace dir, so that GOWORK is outside the workspace +- EnvVars{"GOWORK": filepath.Join("$SANDBOX_WORKDIR", "config", "go.work")}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- // When we have an explicit GOWORK set, we should get a file watch request. +- env.OnceMet( +- InitialWorkspaceLoad, +- FileWatchMatching(`other`), +- FileWatchMatching(`config.go\.work`), +- ) +- env.Await(FileWatchMatching(`config.go\.work`)) +- // Even though work/b is not open, we should get its diagnostics as it is +- // included in the workspace. +- env.OpenFile("work/a/a.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("work/b/b.go", "x := 1"), WithMessage("not used")), +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/workspace/goversion_test.go b/gopls/internal/test/integration/workspace/goversion_test.go +--- a/gopls/internal/test/integration/workspace/goversion_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/goversion_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,126 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "flag" +- "os" +- "os/exec" +- "runtime" +- "strings" +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/internal/testenv" +-) +- +-var go121bin = flag.String("go121bin", "", "bin directory containing go 1.21 or later") +- +-// TODO(golang/go#65917): delete this test once we no longer support building +-// gopls with older Go versions. +-func TestCanHandlePatchVersions(t *testing.T) { +- // This test verifies the fixes for golang/go#66195 and golang/go#66636 -- +- // that gopls does not crash when encountering a go version with a patch +- // number in the go.mod file. +- // +- // This is tricky to test, because the regression requires that gopls is +- // built with an older go version, and then the environment is upgraded to +- // have a more recent go. To set up this scenario, the test requires a path +- // to a bin directory containing go1.21 or later. +- if *go121bin == "" { +- t.Skip("-go121bin directory is not set") +- } +- +- if runtime.GOOS != "linux" && runtime.GOOS != "darwin" { +- t.Skip("requires linux or darwin") // for PATH separator +- } +- +- path := os.Getenv("PATH") +- t.Setenv("PATH", *go121bin+":"+path) +- +- const files = ` +--- go.mod -- +-module example.com/bar +- +-go 1.21.1 +- +--- p.go -- +-package bar +- +-type I interface { string } +-` +- +- WithOptions( +- EnvVars{ +- "PATH": path, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("p.go") +- env.AfterChange( +- NoDiagnostics(ForFile("p.go")), +- ) +- }) +-} +- +-func TestTypeCheckingFutureVersions(t *testing.T) { +- // This test checks the regression in golang/go#66677, where go/types fails +- // silently when the language version is 1.22. +- // +- // It does this by recreating the scenario of a toolchain upgrade to 1.22, as +- // reported in the issue. For this to work, the test must be able to download +- // toolchains from proxy.golang.org. +- // +- // This is really only a problem for Go 1.21, because with Go 1.23, the bug +- // is fixed, and starting with 1.23 we're going to *require* 1.23 to build +- // gopls. +- // +- // TODO(golang/go#65917): delete this test after Go 1.23 is released and +- // gopls requires the latest Go to build. +- testenv.SkipAfterGo1Point(t, 21) +- +- if testing.Short() { +- t.Skip("skipping with -short, as this test uses the network") +- } +- +- // If go 1.22.2 is already available in the module cache, reuse it rather +- // than downloading it anew. +- out, err := exec.Command("go", "env", "GOPATH").Output() +- if err != nil { +- t.Fatal(err) +- } +- gopath := strings.TrimSpace(string(out)) // use the ambient 1.22.2 toolchain if available +- +- const files = ` +--- go.mod -- +-module example.com/foo +- +-go 1.22.2 +- +--- main.go -- +-package main +- +-func main() { +- x := 1 +-} +-` +- +- WithOptions( +- Modes(Default), // slow test, only run in one mode +- EnvVars{ +- "GOPATH": gopath, +- "GOTOOLCHAIN": "", // not local +- "GOPROXY": "https://proxy.golang.org", +- "GOSUMDB": "sum.golang.org", +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("main.go", "x"), +- WithMessage("not used"), +- ), +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/workspace/metadata_test.go b/gopls/internal/test/integration/workspace/metadata_test.go +--- a/gopls/internal/test/integration/workspace/metadata_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/metadata_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,236 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "strings" +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// TODO(rfindley): move workspace tests related to metadata bugs into this +-// file. +- +-func TestFixImportDecl(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.test +- +-go 1.12 +--- p.go -- +-package p +- +-import ( +- _ "fmt" +- +-const C = 42 +-` +- +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("p.go") +- env.RegexpReplace("p.go", "\"fmt\"", "\"fmt\"\n)") +- env.AfterChange( +- NoDiagnostics(ForFile("p.go")), +- ) +- }) +-} +- +-// Test that moving ignoring a file via build constraints causes diagnostics to +-// be resolved. +-func TestIgnoreFile(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.test +- +-go 1.12 +--- foo.go -- +-package main +- +-func main() {} +--- bar.go -- +-package main +- +-func main() {} +- ` +- +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("foo.go") +- env.OpenFile("bar.go") +- env.OnceMet( +- env.DoneWithOpen(), +- Diagnostics(env.AtRegexp("foo.go", "func (main)")), +- Diagnostics(env.AtRegexp("bar.go", "func (main)")), +- ) +- +- // Ignore bar.go. This should resolve diagnostics. +- env.RegexpReplace("bar.go", "package main", "//go:build ignore\n\npackage main") +- +- // To make this test pass with experimentalUseInvalidMetadata, we could make +- // an arbitrary edit that invalidates the snapshot, at which point the +- // orphaned diagnostics will be invalidated. +- // +- // But of course, this should not be necessary: we should invalidate stale +- // information when fresh metadata arrives. +- // env.RegexpReplace("foo.go", "package main", "package main // test") +- env.AfterChange( +- NoDiagnostics(ForFile("foo.go")), +- NoDiagnostics(ForFile("bar.go")), +- ) +- +- // If instead of 'ignore' (which gopls treats as a standalone package) we +- // used a different build tag, we should get a warning about having no +- // packages for bar.go +- env.RegexpReplace("bar.go", "ignore", "excluded") +- env.AfterChange( +- Diagnostics(env.AtRegexp("bar.go", "package (main)"), WithMessage("excluded due to its build tags")), +- ) +- }) +-} +- +-func TestReinitializeRepeatedly(t *testing.T) { +- const multiModule = ` +--- go.work -- +-go 1.18 +- +-use ( +- moda/a +- modb +-) +--- moda/a/go.mod -- +-module a.com +- +-require b.com v1.2.3 +--- moda/a/go.sum -- +-b.com v1.2.3 h1:tXrlXP0rnjRpKNmkbLYoWBdq0ikb3C3bKK9//moAWBI= +-b.com v1.2.3/go.mod h1:D+J7pfFBZK5vdIdZEFquR586vKKIkqG7Qjw9AxG5BQ8= +--- moda/a/a.go -- +-package a +- +-import ( +- "b.com/b" +-) +- +-func main() { +- var x int +- _ = b.Hello() +- // AAA +-} +--- modb/go.mod -- +-module b.com +- +--- modb/b/b.go -- +-package b +- +-func Hello() int { +- var x int +-} +-` +- WithOptions( +- ProxyFiles(workspaceModuleProxy), +- Settings{ +- // For this test, we want workspace diagnostics to start immediately +- // during change processing. +- "diagnosticsDelay": "0", +- }, +- ).Run(t, multiModule, func(t *testing.T, env *Env) { +- env.OpenFile("moda/a/a.go") +- env.AfterChange() +- +- // This test verifies that we fully process workspace reinitialization +- // (which allows GOPROXY), even when the reinitialized snapshot is +- // invalidated by subsequent changes. +- // +- // First, update go.work to remove modb. This will cause reinitialization +- // to fetch b.com from the proxy. +- env.WriteWorkspaceFile("go.work", "go 1.18\nuse moda/a") +- // Next, wait for gopls to start processing the change. Because we've set +- // diagnosticsDelay to zero, this will start diagnosing the workspace (and +- // try to reinitialize on the snapshot context). +- env.Await(env.StartedChangeWatchedFiles()) +- // Finally, immediately make a file change to cancel the previous +- // operation. This is racy, but will usually cause initialization to be +- // canceled. +- env.RegexpReplace("moda/a/a.go", "AAA", "BBB") +- env.AfterChange() +- // Now, to satisfy a definition request, gopls will try to reload moda. But +- // without access to the proxy (because this is no longer a +- // reinitialization), this loading will fail. +- loc := env.FirstDefinition(env.RegexpSearch("moda/a/a.go", "Hello")) +- got := env.Sandbox.Workdir.URIToPath(loc.URI) +- if want := "b.com@v1.2.3/b/b.go"; !strings.HasSuffix(got, want) { +- t.Errorf("expected %s, got %v", want, got) +- } +- }) +-} +- +-// Test for golang/go#59458. With lazy module loading, we may not need +-// transitively required modules. +-func TestNestedModuleLoading_Issue59458(t *testing.T) { +- // In this test, module b.com/nested requires b.com/other, which in turn +- // requires b.com, but b.com/nested does not reach b.com through the package +- // graph. Therefore, b.com/nested does not need b.com on 1.17 and later, +- // thanks to graph pruning. +- // +- // We verify that we can load b.com/nested successfully. Previously, we +- // couldn't, because loading the pattern b.com/nested/... matched the module +- // b.com, which exists in the module graph but does not have a go.sum entry. +- +- const proxy = ` +--- b.com@v1.2.3/go.mod -- +-module b.com +- +-go 1.18 +--- b.com@v1.2.3/b/b.go -- +-package b +- +-func Hello() {} +- +--- b.com/other@v1.4.6/go.mod -- +-module b.com/other +- +-go 1.18 +- +-require b.com v1.2.3 +--- b.com/other@v1.4.6/go.sun -- +-b.com v1.2.3 h1:AGjCxWRJLUuJiZ21IUTByr9buoa6+B6Qh5LFhVLKpn4= +--- b.com/other@v1.4.6/bar/bar.go -- +-package bar +- +-import "b.com/b" +- +-func _() { +- b.Hello() +-} +--- b.com/other@v1.4.6/foo/foo.go -- +-package foo +- +-const Foo = 0 +-` +- +- const files = ` +--- go.mod -- +-module b.com/nested +- +-go 1.18 +- +-require b.com/other v1.4.6 +--- nested.go -- +-package nested +- +-import "b.com/other/foo" +- +-const C = foo.Foo +-` +- WithOptions( +- WriteGoSum("."), +- ProxyFiles(proxy), +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- NoDiagnostics(), +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/workspace/misspelling_test.go b/gopls/internal/test/integration/workspace/misspelling_test.go +--- a/gopls/internal/test/integration/workspace/misspelling_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/misspelling_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,80 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "runtime" +- "testing" +- +- "golang.org/x/tools/gopls/internal/test/compare" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// Test for golang/go#57081. +-func TestFormattingMisspelledURI(t *testing.T) { +- if runtime.GOOS != "windows" && runtime.GOOS != "darwin" { +- t.Skip("golang/go#57081 only reproduces on case-insensitive filesystems.") +- } +- const files = ` +--- go.mod -- +-module mod.test +- +-go 1.19 +--- foo.go -- +-package foo +- +-const C = 2 // extra space is intentional +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("Foo.go") +- env.FormatBuffer("Foo.go") +- want := env.BufferText("Foo.go") +- +- if want == "" { +- t.Fatalf("Foo.go is empty") +- } +- +- // In golang/go#57081, we observed that if overlay cases don't match, gopls +- // will find (and format) the on-disk contents rather than the overlay, +- // resulting in invalid edits. +- // +- // Verify that this doesn't happen, by confirming that formatting is +- // idempotent. +- env.FormatBuffer("Foo.go") +- got := env.BufferText("Foo.go") +- if diff := compare.Text(want, got); diff != "" { +- t.Errorf("invalid content after second formatting:\n%s", diff) +- } +- }) +-} +- +-// Test that we can find packages for open files with different spelling on +-// case-insensitive file systems. +-func TestPackageForMisspelledURI(t *testing.T) { +- t.Skip("golang/go#57081: this test fails because the Go command does not load Foo.go correctly") +- if runtime.GOOS != "windows" && runtime.GOOS != "darwin" { +- t.Skip("golang/go#57081 only reproduces on case-insensitive filesystems.") +- } +- const files = ` +--- go.mod -- +-module mod.test +- +-go 1.19 +--- foo.go -- +-package foo +- +-const C = D +--- bar.go -- +-package foo +- +-const D = 2 +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("Foo.go") +- env.AfterChange(NoDiagnostics()) +- }) +-} +diff -urN a/gopls/internal/test/integration/workspace/modules_test.go b/gopls/internal/test/integration/workspace/modules_test.go +--- a/gopls/internal/test/integration/workspace/modules_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/modules_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,161 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "sort" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestModulesCmd(t *testing.T) { +- const goModView = ` +--- go.mod -- +-module foo +- +--- pkg/pkg.go -- +-package pkg +-func Pkg() +- +--- bar/bar.go -- +-package bar +-func Bar() +- +--- bar/baz/go.mod -- +-module baz +- +--- bar/baz/baz.go -- +-package baz +-func Baz() +-` +- +- const goWorkView = ` +--- go.work -- +-use ./foo +-use ./bar +- +--- foo/go.mod -- +-module foo +- +--- foo/foo.go -- +-package foo +-func Foo() +- +--- bar/go.mod -- +-module bar +- +--- bar/bar.go -- +-package bar +-func Bar() +-` +- +- t.Run("go.mod view", func(t *testing.T) { +- // If baz isn't loaded, it will not be included +- t.Run("unloaded", func(t *testing.T) { +- Run(t, goModView, func(t *testing.T, env *Env) { +- checkModules(t, env, env.Editor.DocumentURI(""), -1, []command.Module{ +- { +- Path: "foo", +- GoMod: env.Editor.DocumentURI("go.mod"), +- }, +- }) +- }) +- }) +- +- // With baz loaded and recursion enabled, baz will be included +- t.Run("recurse", func(t *testing.T) { +- Run(t, goModView, func(t *testing.T, env *Env) { +- env.OpenFile("bar/baz/baz.go") +- checkModules(t, env, env.Editor.DocumentURI(""), -1, []command.Module{ +- { +- Path: "baz", +- GoMod: env.Editor.DocumentURI("bar/baz/go.mod"), +- }, +- { +- Path: "foo", +- GoMod: env.Editor.DocumentURI("go.mod"), +- }, +- }) +- }) +- }) +- +- // With recursion=1, baz will not be included +- t.Run("depth", func(t *testing.T) { +- Run(t, goModView, func(t *testing.T, env *Env) { +- env.OpenFile("bar/baz/baz.go") +- checkModules(t, env, env.Editor.DocumentURI(""), 1, []command.Module{ +- { +- Path: "foo", +- GoMod: env.Editor.DocumentURI("go.mod"), +- }, +- }) +- }) +- }) +- +- // Baz will be included if it is requested specifically +- t.Run("nested", func(t *testing.T) { +- Run(t, goModView, func(t *testing.T, env *Env) { +- env.OpenFile("bar/baz/baz.go") +- checkModules(t, env, env.Editor.DocumentURI("bar/baz"), 0, []command.Module{ +- { +- Path: "baz", +- GoMod: env.Editor.DocumentURI("bar/baz/go.mod"), +- }, +- }) +- }) +- }) +- }) +- +- t.Run("go.work view", func(t *testing.T) { +- t.Run("base", func(t *testing.T) { +- Run(t, goWorkView, func(t *testing.T, env *Env) { +- checkModules(t, env, env.Editor.DocumentURI(""), 0, nil) +- }) +- }) +- +- t.Run("recursive", func(t *testing.T) { +- Run(t, goWorkView, func(t *testing.T, env *Env) { +- checkModules(t, env, env.Editor.DocumentURI(""), -1, []command.Module{ +- { +- Path: "bar", +- GoMod: env.Editor.DocumentURI("bar/go.mod"), +- }, +- { +- Path: "foo", +- GoMod: env.Editor.DocumentURI("foo/go.mod"), +- }, +- }) +- }) +- }) +- }) +-} +- +-func checkModules(t testing.TB, env *Env, dir protocol.DocumentURI, maxDepth int, want []command.Module) { +- t.Helper() +- +- cmd := command.NewModulesCommand("Modules", command.ModulesArgs{Dir: dir, MaxDepth: maxDepth}) +- var result command.ModulesResult +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: command.Modules.String(), +- Arguments: cmd.Arguments, +- }, &result) +- +- // The ordering of results is undefined and modules from a go.work view are +- // retrieved from a map, so sort the results to ensure consistency +- sort.Slice(result.Modules, func(i, j int) bool { +- a, b := result.Modules[i], result.Modules[j] +- return strings.Compare(a.Path, b.Path) < 0 +- }) +- +- diff := cmp.Diff(want, result.Modules) +- if diff != "" { +- t.Errorf("Modules(%v) returned unexpected diff (-want +got):\n%s", dir, diff) +- } +-} +diff -urN a/gopls/internal/test/integration/workspace/multi_folder_test.go b/gopls/internal/test/integration/workspace/multi_folder_test.go +--- a/gopls/internal/test/integration/workspace/multi_folder_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/multi_folder_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,128 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-// TODO(rfindley): update the marker tests to support the concept of multiple +-// workspace folders, and move this there. +-func TestMultiView_Diagnostics(t *testing.T) { +- // In the past, gopls would only diagnose one View at a time +- // (the last to have changed). +- // +- // This test verifies that gopls can maintain diagnostics for multiple Views. +- const files = ` +- +--- a/go.mod -- +-module golang.org/lsptests/a +- +-go 1.20 +--- a/a.go -- +-package a +- +-func _() { +- x := 1 // unused +-} +--- b/go.mod -- +-module golang.org/lsptests/b +- +-go 1.20 +--- b/b.go -- +-package b +- +-func _() { +- y := 2 // unused +-} +-` +- +- WithOptions( +- WorkspaceFolders("a", "b"), +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("a/a.go", "x")), +- Diagnostics(env.AtRegexp("b/b.go", "y")), +- ) +- }) +-} +- +-func TestMultiView_LocalReplace(t *testing.T) { +- // This is a regression test for #66145, where gopls attempted to load a +- // package in a locally replaced module as a workspace package, resulting in +- // spurious import diagnostics because the module graph had been pruned. +- +- const proxy = ` +--- example.com/c@v1.2.3/go.mod -- +-module example.com/c +- +-go 1.20 +- +--- example.com/c@v1.2.3/c.go -- +-package c +- +-const C = 3 +- +-` +- // In the past, gopls would only diagnose one View at a time +- // (the last to have changed). +- // +- // This test verifies that gopls can maintain diagnostics for multiple Views. +- const files = ` +--- a/go.mod -- +-module golang.org/lsptests/a +- +-go 1.20 +- +-require golang.org/lsptests/b v1.2.3 +- +-replace golang.org/lsptests/b => ../b +- +--- a/a.go -- +-package a +- +-import "golang.org/lsptests/b" +- +-const A = b.B - 1 +- +--- b/go.mod -- +-module golang.org/lsptests/b +- +-go 1.20 +- +-require example.com/c v1.2.3 +- +--- b/go.sum -- +-example.com/c v1.2.3 h1:hsOPhoHQLZPEn7l3kNya3fR3SfqW0/rafZMP8ave6fg= +-example.com/c v1.2.3/go.mod h1:4uG6Y5qX88LrEd4KfRoiguHZIbdLKUEHD1wXqPyrHcA= +--- b/b.go -- +-package b +- +-const B = 2 +- +--- b/unrelated/u.go -- +-package unrelated +- +-import "example.com/c" +- +-const U = c.C +-` +- +- WithOptions( +- WorkspaceFolders("a", "b"), +- ProxyFiles(proxy), +- ).Run(t, files, func(t *testing.T, env *Env) { +- // Opening unrelated first ensures that when we compute workspace packages +- // for the "a" workspace, it includes the unrelated package, which will be +- // unloadable from a as there is no a/go.sum. +- env.OpenFile("b/unrelated/u.go") +- env.AfterChange() +- env.OpenFile("a/a.go") +- env.AfterChange(NoDiagnostics()) +- }) +-} +diff -urN a/gopls/internal/test/integration/workspace/packages_test.go b/gopls/internal/test/integration/workspace/packages_test.go +--- a/gopls/internal/test/integration/workspace/packages_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/packages_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,549 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "sort" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestPackages(t *testing.T) { +- const files = ` +--- go.mod -- +-module foo +- +--- foo.go -- +-package foo +-func Foo() +- +--- bar/bar.go -- +-package bar +-func Bar() +- +--- baz/go.mod -- +-module baz +- +--- baz/baz.go -- +-package baz +-func Baz() +-` +- +- t.Run("file", func(t *testing.T) { +- Run(t, files, func(t *testing.T, env *Env) { +- checkPackages(t, env, []protocol.DocumentURI{env.Editor.DocumentURI("foo.go")}, false, 0, []command.Package{ +- { +- Path: "foo", +- ModulePath: "foo", +- }, +- }, map[string]command.Module{ +- "foo": { +- Path: "foo", +- GoMod: env.Editor.DocumentURI("go.mod"), +- }, +- }, []string{}) +- }) +- }) +- +- t.Run("package", func(t *testing.T) { +- Run(t, files, func(t *testing.T, env *Env) { +- checkPackages(t, env, []protocol.DocumentURI{env.Editor.DocumentURI("bar")}, false, 0, []command.Package{ +- { +- Path: "foo/bar", +- ModulePath: "foo", +- }, +- }, map[string]command.Module{ +- "foo": { +- Path: "foo", +- GoMod: env.Editor.DocumentURI("go.mod"), +- }, +- }, []string{}) +- }) +- }) +- +- t.Run("workspace", func(t *testing.T) { +- Run(t, files, func(t *testing.T, env *Env) { +- checkPackages(t, env, []protocol.DocumentURI{env.Editor.DocumentURI("")}, true, 0, []command.Package{ +- { +- Path: "foo", +- ModulePath: "foo", +- }, +- { +- Path: "foo/bar", +- ModulePath: "foo", +- }, +- }, map[string]command.Module{ +- "foo": { +- Path: "foo", +- GoMod: env.Editor.DocumentURI("go.mod"), +- }, +- }, []string{}) +- }) +- }) +- +- t.Run("nested module", func(t *testing.T) { +- Run(t, files, func(t *testing.T, env *Env) { +- // Load the nested module +- env.OpenFile("baz/baz.go") +- +- // Request packages using the URI of the nested module _directory_ +- checkPackages(t, env, []protocol.DocumentURI{env.Editor.DocumentURI("baz")}, true, 0, []command.Package{ +- { +- Path: "baz", +- ModulePath: "baz", +- }, +- }, map[string]command.Module{ +- "baz": { +- Path: "baz", +- GoMod: env.Editor.DocumentURI("baz/go.mod"), +- }, +- }, []string{}) +- }) +- }) +-} +- +-func TestPackagesWithTests(t *testing.T) { +- const files = ` +--- go.mod -- +-module foo +- +--- foo.go -- +-package foo +-import "testing" +-func Foo() +-func TestFoo2(t *testing.T) +-func foo() +- +--- foo_test.go -- +-package foo +-import "testing" +-func TestFoo(t *testing.T) +-func Issue70927(*error) +-func Test_foo(t *testing.T) +- +--- foo2_test.go -- +-package foo_test +-import "testing" +-func TestBar(t *testing.T) {} +- +--- baz/baz_test.go -- +-package baz +-import "testing" +-func TestBaz(*testing.T) +-func BenchmarkBaz(*testing.B) +-func FuzzBaz(*testing.F) +-func ExampleBaz() +- +--- bat/go.mod -- +-module bat +- +--- bat/bat_test.go -- +-package bat +-import "testing" +-func Test(*testing.T) +-` +- +- t.Run("file", func(t *testing.T) { +- Run(t, files, func(t *testing.T, env *Env) { +- checkPackages(t, env, []protocol.DocumentURI{env.Editor.DocumentURI("foo_test.go")}, false, command.NeedTests, []command.Package{ +- { +- Path: "foo", +- ModulePath: "foo", +- }, +- { +- Path: "foo", +- ForTest: "foo", +- ModulePath: "foo", +- TestFiles: []command.TestFile{ +- { +- URI: env.Editor.DocumentURI("foo_test.go"), +- Tests: []command.TestCase{ +- {Name: "TestFoo"}, +- {Name: "Test_foo"}, +- }, +- }, +- }, +- }, +- { +- Path: "foo_test", +- ForTest: "foo", +- ModulePath: "foo", +- TestFiles: []command.TestFile{ +- { +- URI: env.Editor.DocumentURI("foo2_test.go"), +- Tests: []command.TestCase{ +- {Name: "TestBar"}, +- }, +- }, +- }, +- }, +- }, map[string]command.Module{ +- "foo": { +- Path: "foo", +- GoMod: env.Editor.DocumentURI("go.mod"), +- }, +- }, []string{ +- "func TestFoo(t *testing.T)", +- "func Test_foo(t *testing.T)", +- "func TestBar(t *testing.T) {}", +- }) +- }) +- }) +- +- t.Run("package", func(t *testing.T) { +- Run(t, files, func(t *testing.T, env *Env) { +- checkPackages(t, env, []protocol.DocumentURI{env.Editor.DocumentURI("baz")}, false, command.NeedTests, []command.Package{ +- { +- Path: "foo/baz", +- ForTest: "foo/baz", +- ModulePath: "foo", +- TestFiles: []command.TestFile{ +- { +- URI: env.Editor.DocumentURI("baz/baz_test.go"), +- Tests: []command.TestCase{ +- {Name: "TestBaz"}, +- {Name: "BenchmarkBaz"}, +- {Name: "FuzzBaz"}, +- {Name: "ExampleBaz"}, +- }, +- }, +- }, +- }, +- }, map[string]command.Module{ +- "foo": { +- Path: "foo", +- GoMod: env.Editor.DocumentURI("go.mod"), +- }, +- }, []string{ +- "func TestBaz(*testing.T)", +- "func BenchmarkBaz(*testing.B)", +- "func FuzzBaz(*testing.F)", +- "func ExampleBaz()", +- }) +- }) +- }) +- +- t.Run("workspace", func(t *testing.T) { +- Run(t, files, func(t *testing.T, env *Env) { +- checkPackages(t, env, []protocol.DocumentURI{env.Editor.DocumentURI(".")}, true, command.NeedTests, []command.Package{ +- { +- Path: "foo", +- ModulePath: "foo", +- }, +- { +- Path: "foo", +- ForTest: "foo", +- ModulePath: "foo", +- TestFiles: []command.TestFile{ +- { +- URI: env.Editor.DocumentURI("foo_test.go"), +- Tests: []command.TestCase{ +- {Name: "TestFoo"}, +- {Name: "Test_foo"}, +- }, +- }, +- }, +- }, +- { +- Path: "foo/baz", +- ForTest: "foo/baz", +- ModulePath: "foo", +- TestFiles: []command.TestFile{ +- { +- URI: env.Editor.DocumentURI("baz/baz_test.go"), +- Tests: []command.TestCase{ +- {Name: "TestBaz"}, +- {Name: "BenchmarkBaz"}, +- {Name: "FuzzBaz"}, +- {Name: "ExampleBaz"}, +- }, +- }, +- }, +- }, +- { +- Path: "foo_test", +- ForTest: "foo", +- ModulePath: "foo", +- TestFiles: []command.TestFile{ +- { +- URI: env.Editor.DocumentURI("foo2_test.go"), +- Tests: []command.TestCase{ +- {Name: "TestBar"}, +- }, +- }, +- }, +- }, +- }, map[string]command.Module{ +- "foo": { +- Path: "foo", +- GoMod: env.Editor.DocumentURI("go.mod"), +- }, +- }, []string{ +- "func TestFoo(t *testing.T)", +- "func Test_foo(t *testing.T)", +- "func TestBaz(*testing.T)", +- "func BenchmarkBaz(*testing.B)", +- "func FuzzBaz(*testing.F)", +- "func ExampleBaz()", +- "func TestBar(t *testing.T) {}", +- }) +- }) +- }) +- +- t.Run("nested module", func(t *testing.T) { +- Run(t, files, func(t *testing.T, env *Env) { +- // Load the nested module +- env.OpenFile("bat/bat_test.go") +- +- // Request packages using the URI of the nested module _directory_ +- checkPackages(t, env, []protocol.DocumentURI{env.Editor.DocumentURI("bat")}, true, command.NeedTests, []command.Package{ +- { +- Path: "bat", +- ForTest: "bat", +- ModulePath: "bat", +- TestFiles: []command.TestFile{ +- { +- URI: env.Editor.DocumentURI("bat/bat_test.go"), +- Tests: []command.TestCase{ +- {Name: "Test"}, +- }, +- }, +- }, +- }, +- }, map[string]command.Module{ +- "bat": { +- Path: "bat", +- GoMod: env.Editor.DocumentURI("bat/go.mod"), +- }, +- }, []string{ +- "func Test(*testing.T)", +- }) +- }) +- }) +-} +- +-func TestPackagesWithSubtests(t *testing.T) { +- const files = ` +--- go.mod -- +-module foo +- +--- foo_test.go -- +-package foo +- +-import "testing" +- +-// Verify that examples don't break subtest detection +-func ExampleFoo() {} +- +-func TestFoo(t *testing.T) { +- t.Run("Bar", func(t *testing.T) { +- t.Run("Baz", func(t *testing.T) {}) +- }) +- t.Run("Bar", func(t *testing.T) {}) +- t.Run("Bar", func(t *testing.T) {}) +- t.Run("with space", func(t *testing.T) {}) +- +- var x X +- y := func(t *testing.T) { +- t.Run("VarSub", func(t *testing.T) {}) +- } +- t.Run("SubtestFunc", SubtestFunc) +- t.Run("SubtestMethod", x.SubtestMethod) +- t.Run("SubtestVar", y) +-} +- +-func SubtestFunc(t *testing.T) { +- t.Run("FuncSub", func(t *testing.T) {}) +-} +- +-type X int +-func (X) SubtestMethod(t *testing.T) { +- t.Run("MethodSub", func(t *testing.T) {}) +-} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- checkPackages(t, env, []protocol.DocumentURI{env.Editor.DocumentURI("foo_test.go")}, false, command.NeedTests, []command.Package{ +- { +- Path: "foo", +- ForTest: "foo", +- ModulePath: "foo", +- TestFiles: []command.TestFile{ +- { +- URI: env.Editor.DocumentURI("foo_test.go"), +- Tests: []command.TestCase{ +- {Name: "ExampleFoo"}, +- {Name: "TestFoo"}, +- {Name: "TestFoo/Bar"}, +- {Name: "TestFoo/Bar/Baz"}, +- {Name: "TestFoo/Bar#01"}, +- {Name: "TestFoo/Bar#02"}, +- {Name: "TestFoo/with_space"}, +- {Name: "TestFoo/SubtestFunc"}, +- {Name: "TestFoo/SubtestFunc/FuncSub"}, +- {Name: "TestFoo/SubtestMethod"}, +- {Name: "TestFoo/SubtestMethod/MethodSub"}, +- {Name: "TestFoo/SubtestVar"}, +- // {Name: "TestFoo/SubtestVar/VarSub"}, // TODO +- }, +- }, +- }, +- }, +- }, map[string]command.Module{ +- "foo": { +- Path: "foo", +- GoMod: env.Editor.DocumentURI("go.mod"), +- }, +- }, []string{ +- "func ExampleFoo() {}", +- `func TestFoo(t *testing.T) { +- t.Run("Bar", func(t *testing.T) { +- t.Run("Baz", func(t *testing.T) {}) +- }) +- t.Run("Bar", func(t *testing.T) {}) +- t.Run("Bar", func(t *testing.T) {}) +- t.Run("with space", func(t *testing.T) {}) +- +- var x X +- y := func(t *testing.T) { +- t.Run("VarSub", func(t *testing.T) {}) +- } +- t.Run("SubtestFunc", SubtestFunc) +- t.Run("SubtestMethod", x.SubtestMethod) +- t.Run("SubtestVar", y) +-}`, +- "t.Run(\"Bar\", func(t *testing.T) {\n\t\tt.Run(\"Baz\", func(t *testing.T) {})\n\t})", +- `t.Run("Baz", func(t *testing.T) {})`, +- `t.Run("Bar", func(t *testing.T) {})`, +- `t.Run("Bar", func(t *testing.T) {})`, +- `t.Run("with space", func(t *testing.T) {})`, +- `t.Run("SubtestFunc", SubtestFunc)`, +- `t.Run("FuncSub", func(t *testing.T) {})`, +- `t.Run("SubtestMethod", x.SubtestMethod)`, +- `t.Run("MethodSub", func(t *testing.T) {})`, +- `t.Run("SubtestVar", y)`, +- }) +- }) +-} +- +-func TestRecursiveSubtest(t *testing.T) { +- const files = ` +--- go.mod -- +-module foo +- +--- foo_test.go -- +-package foo +- +-import "testing" +- +-func TestFoo(t *testing.T) { t.Run("Foo", TestFoo) } +-func TestBar(t *testing.T) { t.Run("Foo", TestFoo) } +- +-func TestBaz(t *testing.T) { +- var sub func(t *testing.T) +- sub = func(t *testing.T) { t.Run("Sub", sub) } +- t.Run("Sub", sub) +-} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- checkPackages(t, env, []protocol.DocumentURI{env.Editor.DocumentURI("foo_test.go")}, false, command.NeedTests, []command.Package{ +- { +- Path: "foo", +- ForTest: "foo", +- ModulePath: "foo", +- TestFiles: []command.TestFile{ +- { +- URI: env.Editor.DocumentURI("foo_test.go"), +- Tests: []command.TestCase{ +- {Name: "TestFoo"}, +- {Name: "TestFoo/Foo"}, +- {Name: "TestBar"}, +- {Name: "TestBar/Foo"}, +- {Name: "TestBaz"}, +- {Name: "TestBaz/Sub"}, +- }, +- }, +- }, +- }, +- }, map[string]command.Module{ +- "foo": { +- Path: "foo", +- GoMod: env.Editor.DocumentURI("go.mod"), +- }, +- }, []string{ +- `func TestFoo(t *testing.T) { t.Run("Foo", TestFoo) }`, +- `t.Run("Foo", TestFoo)`, +- `func TestBar(t *testing.T) { t.Run("Foo", TestFoo) }`, +- `t.Run("Foo", TestFoo)`, +- `func TestBaz(t *testing.T) { +- var sub func(t *testing.T) +- sub = func(t *testing.T) { t.Run("Sub", sub) } +- t.Run("Sub", sub) +-}`, +- `t.Run("Sub", sub)`, +- }) +- }) +-} +- +-func checkPackages(t testing.TB, env *Env, files []protocol.DocumentURI, recursive bool, mode command.PackagesMode, wantPkg []command.Package, wantModule map[string]command.Module, wantSource []string) { +- t.Helper() +- +- cmd := command.NewPackagesCommand("Packages", command.PackagesArgs{Files: files, Recursive: recursive, Mode: mode}) +- var result command.PackagesResult +- env.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: command.Packages.String(), +- Arguments: cmd.Arguments, +- }, &result) +- +- // The ordering of packages is undefined so sort the results to ensure +- // consistency +- sort.Slice(result.Packages, func(i, j int) bool { +- a, b := result.Packages[i], result.Packages[j] +- c := strings.Compare(a.Path, b.Path) +- if c != 0 { +- return c < 0 +- } +- return strings.Compare(a.ForTest, b.ForTest) < 0 +- }) +- +- // Instead of testing the exact values of the test locations (which would +- // make these tests significantly more trouble to maintain), verify the +- // source range they refer to. +- gotSource := []string{} // avoid issues with comparing null to [] +- for i := range result.Packages { +- pkg := &result.Packages[i] +- for i := range pkg.TestFiles { +- file := &pkg.TestFiles[i] +- env.OpenFile(file.URI.Path()) +- +- for i := range file.Tests { +- test := &file.Tests[i] +- gotSource = append(gotSource, env.FileContentAt(test.Loc)) +- test.Loc = protocol.Location{} +- } +- } +- } +- +- if diff := cmp.Diff(wantPkg, result.Packages); diff != "" { +- t.Errorf("Packages(%v) returned unexpected packages (-want +got):\n%s", files, diff) +- } +- +- if diff := cmp.Diff(wantModule, result.Module); diff != "" { +- t.Errorf("Packages(%v) returned unexpected modules (-want +got):\n%s", files, diff) +- } +- +- // Don't check the source if the response is incorrect +- if !t.Failed() { +- if diff := cmp.Diff(wantSource, gotSource); diff != "" { +- t.Errorf("Packages(%v) returned unexpected test case ranges (-want +got):\n%s", files, diff) +- } +- } +-} +diff -urN a/gopls/internal/test/integration/workspace/quickfix_test.go b/gopls/internal/test/integration/workspace/quickfix_test.go +--- a/gopls/internal/test/integration/workspace/quickfix_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/quickfix_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,508 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/test/compare" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestQuickFix_UseModule(t *testing.T) { +- t.Skip("temporary skip for golang/go#57979: with zero-config gopls these files are no longer orphaned") +- +- const files = ` +--- go.work -- +-go 1.20 +- +-use ( +- ./a +-) +--- a/go.mod -- +-module mod.com/a +- +-go 1.18 +- +--- a/main.go -- +-package main +- +-import "mod.com/a/lib" +- +-func main() { +- _ = lib.C +-} +- +--- a/lib/lib.go -- +-package lib +- +-const C = "b" +--- b/go.mod -- +-module mod.com/b +- +-go 1.18 +- +--- b/main.go -- +-package main +- +-import "mod.com/b/lib" +- +-func main() { +- _ = lib.C +-} +- +--- b/lib/lib.go -- +-package lib +- +-const C = "b" +-` +- +- for _, title := range []string{ +- "Use this module", +- "Use all modules", +- } { +- t.Run(title, func(t *testing.T) { +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("b/main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange(ReadDiagnostics("b/main.go", &d)) +- fixes := env.GetQuickFixes("b/main.go", d.Diagnostics) +- var toApply []protocol.CodeAction +- for _, fix := range fixes { +- if strings.Contains(fix.Title, title) { +- toApply = append(toApply, fix) +- } +- } +- if len(toApply) != 1 { +- t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), title, toApply) +- } +- env.ApplyCodeAction(toApply[0]) +- env.AfterChange(NoDiagnostics()) +- want := `go 1.20 +- +-use ( +- ./a +- ./b +-) +-` +- got := env.ReadWorkspaceFile("go.work") +- if diff := compare.Text(want, got); diff != "" { +- t.Errorf("unexpected go.work content:\n%s", diff) +- } +- }) +- }) +- } +-} +- +-func TestQuickFix_AddGoWork(t *testing.T) { +- t.Skip("temporary skip for golang/go#57979: with zero-config gopls these files are no longer orphaned") +- +- const files = ` +--- a/go.mod -- +-module mod.com/a +- +-go 1.18 +- +--- a/main.go -- +-package main +- +-import "mod.com/a/lib" +- +-func main() { +- _ = lib.C +-} +- +--- a/lib/lib.go -- +-package lib +- +-const C = "b" +--- b/go.mod -- +-module mod.com/b +- +-go 1.18 +- +--- b/main.go -- +-package main +- +-import "mod.com/b/lib" +- +-func main() { +- _ = lib.C +-} +- +--- b/lib/lib.go -- +-package lib +- +-const C = "b" +-` +- +- tests := []struct { +- name string +- file string +- title string +- want string // expected go.work content, excluding go directive line +- }{ +- { +- "use b", +- "b/main.go", +- "Add a go.work file using this module", +- ` +-use ./b +-`, +- }, +- { +- "use a", +- "a/main.go", +- "Add a go.work file using this module", +- ` +-use ./a +-`, +- }, +- { +- "use all", +- "a/main.go", +- "Add a go.work file using all modules", +- ` +-use ( +- ./a +- ./b +-) +-`, +- }, +- } +- +- for _, test := range tests { +- t.Run(test.name, func(t *testing.T) { +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile(test.file) +- var d protocol.PublishDiagnosticsParams +- env.AfterChange(ReadDiagnostics(test.file, &d)) +- fixes := env.GetQuickFixes(test.file, d.Diagnostics) +- var toApply []protocol.CodeAction +- for _, fix := range fixes { +- if strings.Contains(fix.Title, test.title) { +- toApply = append(toApply, fix) +- } +- } +- if len(toApply) != 1 { +- t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), test.title, toApply) +- } +- env.ApplyCodeAction(toApply[0]) +- env.AfterChange( +- NoDiagnostics(ForFile(test.file)), +- ) +- +- got := env.ReadWorkspaceFile("go.work") +- // Ignore the `go` directive, which we assume is on the first line of +- // the go.work file. This allows the test to be independent of go version. +- got = strings.Join(strings.Split(got, "\n")[1:], "\n") +- if diff := compare.Text(test.want, got); diff != "" { +- t.Errorf("unexpected go.work content:\n%s", diff) +- } +- }) +- }) +- } +-} +- +-func TestQuickFix_UnsavedGoWork(t *testing.T) { +- t.Skip("temporary skip for golang/go#57979: with zero-config gopls these files are no longer orphaned") +- +- const files = ` +--- go.work -- +-go 1.21 +- +-use ( +- ./a +-) +--- a/go.mod -- +-module mod.com/a +- +-go 1.18 +- +--- a/main.go -- +-package main +- +-func main() {} +--- b/go.mod -- +-module mod.com/b +- +-go 1.18 +- +--- b/main.go -- +-package main +- +-func main() {} +-` +- +- for _, title := range []string{ +- "Use this module", +- "Use all modules", +- } { +- t.Run(title, func(t *testing.T) { +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.work") +- env.OpenFile("b/main.go") +- env.RegexpReplace("go.work", "go 1.21", "go 1.21 // arbitrary comment") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange(ReadDiagnostics("b/main.go", &d)) +- fixes := env.GetQuickFixes("b/main.go", d.Diagnostics) +- var toApply []protocol.CodeAction +- for _, fix := range fixes { +- if strings.Contains(fix.Title, title) { +- toApply = append(toApply, fix) +- } +- } +- if len(toApply) != 1 { +- t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), title, toApply) +- } +- fix := toApply[0] +- err := env.Editor.ApplyCodeAction(env.Ctx, fix) +- if err == nil { +- t.Fatalf("codeAction(%q) succeeded unexpectedly", fix.Title) +- } +- +- if got := err.Error(); !strings.Contains(got, "must save") { +- t.Errorf("codeAction(%q) returned error %q, want containing \"must save\"", fix.Title, err) +- } +- }) +- }) +- } +-} +- +-func TestQuickFix_GOWORKOff(t *testing.T) { +- t.Skip("temporary skip for golang/go#57979: with zero-config gopls these files are no longer orphaned") +- +- const files = ` +--- go.work -- +-go 1.21 +- +-use ( +- ./a +-) +--- a/go.mod -- +-module mod.com/a +- +-go 1.18 +- +--- a/main.go -- +-package main +- +-func main() {} +--- b/go.mod -- +-module mod.com/b +- +-go 1.18 +- +--- b/main.go -- +-package main +- +-func main() {} +-` +- +- for _, title := range []string{ +- "Use this module", +- "Use all modules", +- } { +- t.Run(title, func(t *testing.T) { +- WithOptions( +- EnvVars{"GOWORK": "off"}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.work") +- env.OpenFile("b/main.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange(ReadDiagnostics("b/main.go", &d)) +- fixes := env.GetQuickFixes("b/main.go", d.Diagnostics) +- var toApply []protocol.CodeAction +- for _, fix := range fixes { +- if strings.Contains(fix.Title, title) { +- toApply = append(toApply, fix) +- } +- } +- if len(toApply) != 1 { +- t.Fatalf("codeAction: got %d quick fixes matching %q, want 1; got: %v", len(toApply), title, toApply) +- } +- fix := toApply[0] +- err := env.Editor.ApplyCodeAction(env.Ctx, fix) +- if err == nil { +- t.Fatalf("codeAction(%q) succeeded unexpectedly", fix.Title) +- } +- +- if got := err.Error(); !strings.Contains(got, "GOWORK=off") { +- t.Errorf("codeAction(%q) returned error %q, want containing \"GOWORK=off\"", fix.Title, err) +- } +- }) +- }) +- } +-} +- +-func TestStubMethods64087(t *testing.T) { +- // We can't use the @fix or @quickfixerr or @codeaction +- // because the error now reported by the corrected logic +- // is internal and silently causes no fix to be offered. +- // +- // See also the similar TestStubMethods64545 below. +- +- const files = ` +-This is a regression test for a panic (issue #64087) in stub methods. +- +-The illegal expression int("") caused a "cannot convert" error that +-spuriously triggered the "stub methods" in a function whose return +-statement had too many operands, leading to an out-of-bounds index. +- +--- go.mod -- +-module mod.com +-go 1.18 +- +--- a.go -- +-package a +- +-func f() error { +- return nil, myerror{int("")} +-} +- +-type myerror struct{any} +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- +- // Expect a "wrong result count" diagnostic. +- var d protocol.PublishDiagnosticsParams +- env.AfterChange(ReadDiagnostics("a.go", &d)) +- +- // In no particular order, we expect: +- // "...too many return values..." (compiler) +- // "...cannot convert..." (compiler) +- // and possibly: +- // "...too many return values..." (fillreturns) +- // We check only for the first of these. +- found := false +- for i, diag := range d.Diagnostics { +- t.Logf("Diagnostics[%d] = %q (%s)", i, diag.Message, diag.Source) +- if strings.Contains(diag.Message, "too many return") { +- found = true +- } +- } +- if !found { +- t.Fatalf("Expected WrongResultCount diagnostic not found.") +- } +- +- // GetQuickFixes should not panic (the original bug). +- fixes := env.GetQuickFixes("a.go", d.Diagnostics) +- +- // We should not be offered a "stub methods" fix. +- for _, fix := range fixes { +- if strings.Contains(fix.Title, "Implement error") { +- t.Errorf("unexpected 'stub methods' fix: %#v", fix) +- } +- } +- }) +-} +- +-func TestStubMethods64545(t *testing.T) { +- // We can't use the @fix or @quickfixerr or @codeaction +- // because the error now reported by the corrected logic +- // is internal and silently causes no fix to be offered. +- // +- // TODO(adonovan): we may need to generalize this test and +- // TestStubMethods64087 if this happens a lot. +- +- const files = ` +-This is a regression test for a panic (issue #64545) in stub methods. +- +-The illegal expression int("") caused a "cannot convert" error that +-spuriously triggered the "stub methods" in a function whose var +-spec had no RHS values, leading to an out-of-bounds index. +- +--- go.mod -- +-module mod.com +-go 1.18 +- +--- a.go -- +-package a +- +-var _ [int("")]byte +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- +- // Expect a "cannot convert" diagnostic, and perhaps others. +- var d protocol.PublishDiagnosticsParams +- env.AfterChange(ReadDiagnostics("a.go", &d)) +- +- found := false +- for i, diag := range d.Diagnostics { +- t.Logf("Diagnostics[%d] = %q (%s)", i, diag.Message, diag.Source) +- if strings.Contains(diag.Message, "cannot convert") { +- found = true +- } +- } +- if !found { +- t.Fatalf("Expected 'cannot convert' diagnostic not found.") +- } +- +- // GetQuickFixes should not panic (the original bug). +- fixes := env.GetQuickFixes("a.go", d.Diagnostics) +- +- // We should not be offered a "stub methods" fix. +- for _, fix := range fixes { +- if strings.Contains(fix.Title, "Implement error") { +- t.Errorf("unexpected 'stub methods' fix: %#v", fix) +- } +- } +- }) +-} +- +-// quick fix didn't offer add imports +-func TestIssue70755(t *testing.T) { +- files := ` +--- go.mod -- +-module failure.com +-go 1.23 +--- bar/bar.go -- +-package notbar +-type NotBar struct{} +--- baz/baz.go -- +-package baz +-type Baz struct{} +--- foo/foo.go -- +-package foo +-type foo struct { +-bar notbar.NotBar +-bzz baz.Baz +-} +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("foo/foo.go") +- var d protocol.PublishDiagnosticsParams +- env.AfterChange(ReadDiagnostics("foo/foo.go", &d)) +- // should get two, one for undefined notbar +- // and one for undefined baz +- fixes := env.GetQuickFixes("foo/foo.go", d.Diagnostics) +- if len(fixes) != 2 { +- t.Fatalf("got %v, want 2 quick fixes", fixes) +- } +- good := 0 +- failures := "" +- for _, f := range fixes { +- ti := f.Title +- // these may be overly white-space sensitive +- if ti == "Add import: notbar \"failure.com/bar\"" || +- ti == "Add import: \"failure.com/baz\"" { +- good++ +- } else { +- failures += ti +- } +- } +- if good != 2 { +- t.Errorf("failed to find\n%q, got\n%q\n%q", failures, fixes[0].Title, +- fixes[1].Title) +- } +- +- }) +-} +diff -urN a/gopls/internal/test/integration/workspace/standalone_test.go b/gopls/internal/test/integration/workspace/standalone_test.go +--- a/gopls/internal/test/integration/workspace/standalone_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/standalone_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,203 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "sort" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestStandaloneFiles(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.test +- +-go 1.16 +--- lib/lib.go -- +-package lib +- +-const K = 0 +- +-type I interface { +- M() +-} +--- lib/ignore.go -- +-//go:build ignore +- +-package main +- +-import ( +- "mod.test/lib" +-) +- +-const K = 1 +- +-type Mer struct{} +-func (Mer) M() +- +-func main() { +- println(lib.K + K) +-} +-` +- WithOptions( +- // On Go 1.17 and earlier, this test fails with +- // experimentalWorkspaceModule. Not investigated, as +- // experimentalWorkspaceModule will be removed. +- Modes(Default), +- ).Run(t, files, func(t *testing.T, env *Env) { +- // Initially, gopls should not know about the standalone file as it hasn't +- // been opened. Therefore, we should only find one symbol 'K'. +- // +- // (The choice of "K" is a little sleazy: it was originally "C" until +- // we started adding "unsafe" to the workspace unconditionally, which +- // caused a spurious match of "unsafe.Slice". But in practice every +- // workspace depends on unsafe.) +- syms := env.Symbol("K") +- if got, want := len(syms), 1; got != want { +- t.Errorf("got %d symbols, want %d (%+v)", got, want, syms) +- } +- +- // Similarly, we should only find one reference to "K", and no +- // implementations of I. +- checkLocations := func(method string, gotLocations []protocol.Location, wantFiles ...string) { +- var gotFiles []string +- for _, l := range gotLocations { +- gotFiles = append(gotFiles, env.Sandbox.Workdir.URIToPath(l.URI)) +- } +- sort.Strings(gotFiles) +- sort.Strings(wantFiles) +- if diff := cmp.Diff(wantFiles, gotFiles); diff != "" { +- t.Errorf("%s(...): unexpected locations (-want +got):\n%s", method, diff) +- } +- } +- +- env.OpenFile("lib/lib.go") +- env.AfterChange(NoDiagnostics()) +- +- // Replacing K with D should not cause any workspace diagnostics, since we +- // haven't yet opened the standalone file. +- env.RegexpReplace("lib/lib.go", "K", "D") +- env.AfterChange(NoDiagnostics()) +- env.RegexpReplace("lib/lib.go", "D", "K") +- env.AfterChange(NoDiagnostics()) +- +- refs := env.References(env.RegexpSearch("lib/lib.go", "K")) +- checkLocations("References", refs, "lib/lib.go") +- +- impls := env.Implementations(env.RegexpSearch("lib/lib.go", "I")) +- checkLocations("Implementations", impls) // no implementations +- +- // Opening the standalone file should not result in any diagnostics. +- env.OpenFile("lib/ignore.go") +- env.AfterChange(NoDiagnostics()) +- +- // Having opened the standalone file, we should find its symbols in the +- // workspace. +- syms = env.Symbol("K") +- if got, want := len(syms), 2; got != want { +- t.Fatalf("got %d symbols, want %d", got, want) +- } +- +- foundMainK := false +- var symNames []string +- for _, sym := range syms { +- symNames = append(symNames, sym.Name) +- if sym.Name == "main.K" { +- foundMainK = true +- } +- } +- if !foundMainK { +- t.Errorf("WorkspaceSymbol(\"K\") = %v, want containing main.K", symNames) +- } +- +- // We should resolve workspace definitions in the standalone file. +- fileLoc := env.FirstDefinition(env.RegexpSearch("lib/ignore.go", "lib.(K)")) +- file := env.Sandbox.Workdir.URIToPath(fileLoc.URI) +- if got, want := file, "lib/lib.go"; got != want { +- t.Errorf("Definition(lib.K) = %v, want %v", got, want) +- } +- +- // ...as well as intra-file definitions +- loc := env.FirstDefinition(env.RegexpSearch("lib/ignore.go", "\\+ (K)")) +- wantLoc := env.RegexpSearch("lib/ignore.go", "const (K)") +- if loc != wantLoc { +- t.Errorf("Definition(K) = %v, want %v", loc, wantLoc) +- } +- +- // Renaming "lib.K" to "lib.D" should cause a diagnostic in the standalone +- // file. +- env.RegexpReplace("lib/lib.go", "K", "D") +- env.AfterChange(Diagnostics(env.AtRegexp("lib/ignore.go", "lib.(K)"))) +- +- // Undoing the replacement should fix diagnostics +- env.RegexpReplace("lib/lib.go", "D", "K") +- env.AfterChange(NoDiagnostics()) +- +- // Now that our workspace has no errors, we should be able to find +- // references and rename. +- refs = env.References(env.RegexpSearch("lib/lib.go", "K")) +- checkLocations("References", refs, "lib/lib.go", "lib/ignore.go") +- +- impls = env.Implementations(env.RegexpSearch("lib/lib.go", "I")) +- checkLocations("Implementations", impls, "lib/ignore.go") +- +- // Renaming should rename in the standalone package. +- env.Rename(env.RegexpSearch("lib/lib.go", "K"), "D") +- env.RegexpSearch("lib/ignore.go", "lib.D") +- }) +-} +- +-func TestStandaloneFiles_Configuration(t *testing.T) { +- const files = ` +--- go.mod -- +-module mod.test +- +-go 1.18 +--- lib.go -- +-package lib // without this package, files are loaded as command-line-arguments +--- ignore.go -- +-//go:build ignore +- +-package main +- +-// An arbitrary comment. +- +-func main() {} +--- standalone.go -- +-//go:build standalone +- +-package main +- +-func main() {} +-` +- +- WithOptions( +- Settings{ +- "standaloneTags": []string{"standalone", "script"}, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("ignore.go") +- env.OpenFile("standalone.go") +- +- env.AfterChange( +- Diagnostics(env.AtRegexp("ignore.go", "package (main)")), +- NoDiagnostics(ForFile("standalone.go")), +- ) +- +- cfg := env.Editor.Config() +- cfg.Settings = map[string]any{ +- "standaloneTags": []string{"ignore"}, +- } +- env.ChangeConfiguration(cfg) +- env.AfterChange( +- NoDiagnostics(ForFile("ignore.go")), +- Diagnostics(env.AtRegexp("standalone.go", "package (main)")), +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/workspace/std_test.go b/gopls/internal/test/integration/workspace/std_test.go +--- a/gopls/internal/test/integration/workspace/std_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/std_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,79 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "os" +- "os/exec" +- "path/filepath" +- "runtime" +- "strings" +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestStdWorkspace(t *testing.T) { +- // This test checks that we actually load workspace packages when opening +- // GOROOT. +- // +- // In golang/go#65801, we failed to do this because go/packages returns nil +- // Module for std and cmd. +- // +- // Because this test loads std as a workspace, it may be slow on smaller +- // builders. +- if testing.Short() { +- t.Skip("skipping with -short: loads GOROOT") +- } +- +- // The test also fails on Windows because an absolute path does not match +- // (likely a misspelling due to slashes). +- // TODO(rfindley): investigate and fix this on windows. +- if runtime.GOOS == "windows" { +- t.Skip("skipping on windows: fails to misspelled paths") +- } +- +- // Query GOROOT. This is slightly more precise than e.g. runtime.GOROOT, as +- // it queries the go command in the environment. +- cmd := exec.Command("go", "env", "GOROOT") +- // Run with GOTOOLCHAIN=local so as to not be affected by toolchain upgrades +- // in the current directory (which is affected by gopls' go.mod file). +- // This was golang/go#70187 +- cmd.Env = append(os.Environ(), "GOTOOLCHAIN=local") +- goroot, err := cmd.Output() +- if err != nil { +- t.Fatal(err) +- } +- stdDir := filepath.Join(strings.TrimSpace(string(goroot)), "src") +- WithOptions( +- Modes(Default), // This test may be slow. No reason to run it multiple times. +- WorkspaceFolders(stdDir), +- ).Run(t, "", func(t *testing.T, env *Env) { +- // Find parser.ParseFile. Query with `'` to get an exact match. +- syms := env.Symbol("'go/parser.ParseFile") +- if len(syms) != 1 { +- t.Fatalf("got %d symbols, want exactly 1. Symbols:\n%v", len(syms), syms) +- } +- parserPath := syms[0].Location.URI.Path() +- env.OpenFile(parserPath) +- +- // Find the reference to ast.File from the signature of ParseFile. This +- // helps guard against matching a comment. +- astFile := env.RegexpSearch(parserPath, `func ParseFile\(.*ast\.(File)`) +- refs := env.References(astFile) +- +- // If we've successfully loaded workspace packages for std, we should find +- // a reference in go/types. +- foundGoTypesReference := false +- for _, ref := range refs { +- if strings.Contains(string(ref.URI), "go/types") { +- foundGoTypesReference = true +- } +- } +- if !foundGoTypesReference { +- t.Errorf("references(ast.File) did not return a go/types reference. Refs:\n%v", refs) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/workspace/vendor_test.go b/gopls/internal/test/integration/workspace/vendor_test.go +--- a/gopls/internal/test/integration/workspace/vendor_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/vendor_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,65 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "testing" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestWorkspacePackagesExcludesVendor(t *testing.T) { +- // This test verifies that packages in the vendor directory are not workspace +- // packages. This would be an easy mistake for gopls to make, since mod +- // vendoring excludes go.mod files, and therefore the nearest go.mod file for +- // vendored packages is often the workspace mod file. +- const proxy = ` +--- other.com/b@v1.0.0/go.mod -- +-module other.com/b +- +-go 1.18 +- +--- other.com/b@v1.0.0/b.go -- +-package b +- +-type B int +- +-func _() { +- var V int // unused +-} +-` +- const src = ` +--- go.mod -- +-module example.com/a +-go 1.14 +-require other.com/b v1.0.0 +- +--- a.go -- +-package a +- +-import "other.com/b" +- +-var _ b.B +- +-` +- WithOptions( +- WriteGoSum("."), +- ProxyFiles(proxy), +- Modes(Default), +- ).Run(t, src, func(t *testing.T, env *Env) { +- env.RunGoCommand("mod", "vendor") +- // Uncomment for updated go.sum contents. +- // env.DumpGoSum(".") +- env.OpenFile("a.go") +- env.AfterChange( +- NoDiagnostics(), // as b is not a workspace package +- ) +- loc := env.FirstDefinition(env.RegexpSearch("a.go", `b\.(B)`)) +- env.OpenFile(env.Sandbox.Workdir.URIToPath(loc.URI)) +- env.AfterChange( +- Diagnostics(env.AtRegexp("vendor/other.com/b/b.go", "V"), WithMessage("not used")), +- ) +- }) +-} +diff -urN a/gopls/internal/test/integration/workspace/workspace_test.go b/gopls/internal/test/integration/workspace/workspace_test.go +--- a/gopls/internal/test/integration/workspace/workspace_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/workspace_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1521 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "context" +- "fmt" +- "os" +- "sort" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "github.com/google/go-cmp/cmp/cmpopts" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/goversion" +- "golang.org/x/tools/internal/gocommand" +- "golang.org/x/tools/internal/testenv" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- os.Exit(Main(m)) +-} +- +-const workspaceProxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +--- example.com@v1.2.3/blah/blah.go -- +-package blah +- +-import "fmt" +- +-func SaySomething() { +- fmt.Println("something") +-} +--- random.org@v1.2.3/go.mod -- +-module random.org +- +-go 1.12 +--- random.org@v1.2.3/bye/bye.go -- +-package bye +- +-func Goodbye() { +- println("Bye") +-} +-` +- +-// TODO: Add a replace directive. +-const workspaceModule = ` +--- pkg/go.mod -- +-module mod.com +- +-go 1.14 +- +-require ( +- example.com v1.2.3 +- random.org v1.2.3 +-) +--- pkg/go.sum -- +-example.com v1.2.3 h1:veRD4tUnatQRgsULqULZPjeoBGFr2qBhevSCZllD2Ds= +-example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= +-random.org v1.2.3 h1:+JE2Fkp7gS0zsHXGEQJ7hraom3pNTlkxC4b2qPfA+/Q= +-random.org v1.2.3/go.mod h1:E9KM6+bBX2g5ykHZ9H27w16sWo3QwgonyjM44Dnej3I= +--- pkg/main.go -- +-package main +- +-import ( +- "example.com/blah" +- "mod.com/inner" +- "random.org/bye" +-) +- +-func main() { +- blah.SaySomething() +- inner.Hi() +- bye.Goodbye() +-} +--- pkg/main2.go -- +-package main +- +-import "fmt" +- +-func _() { +- fmt.Print("%s") +-} +--- pkg/inner/inner.go -- +-package inner +- +-import "example.com/blah" +- +-func Hi() { +- blah.SaySomething() +-} +--- goodbye/bye/bye.go -- +-package bye +- +-func Bye() {} +--- goodbye/go.mod -- +-module random.org +- +-go 1.12 +-` +- +-// Confirm that find references returns all of the references in the module, +-// regardless of what the workspace root is. +-func TestReferences(t *testing.T) { +- for _, tt := range []struct { +- name, rootPath string +- }{ +- { +- name: "module root", +- rootPath: "pkg", +- }, +- { +- name: "subdirectory", +- rootPath: "pkg/inner", +- }, +- } { +- t.Run(tt.name, func(t *testing.T) { +- opts := []RunOption{ProxyFiles(workspaceProxy)} +- if tt.rootPath != "" { +- opts = append(opts, WorkspaceFolders(tt.rootPath)) +- } +- WithOptions(opts...).Run(t, workspaceModule, func(t *testing.T, env *Env) { +- f := "pkg/inner/inner.go" +- env.OpenFile(f) +- locations := env.References(env.RegexpSearch(f, `SaySomething`)) +- want := 3 +- if got := len(locations); got != want { +- t.Fatalf("expected %v locations, got %v", want, got) +- } +- }) +- }) +- } +-} +- +-// Make sure that analysis diagnostics are cleared for the whole package when +-// the only opened file is closed. This test was inspired by the experience in +-// VS Code, where clicking on a reference result triggers a +-// textDocument/didOpen without a corresponding textDocument/didClose. +-func TestClearAnalysisDiagnostics(t *testing.T) { +- WithOptions( +- ProxyFiles(workspaceProxy), +- WorkspaceFolders("pkg/inner"), +- ).Run(t, workspaceModule, func(t *testing.T, env *Env) { +- env.OpenFile("pkg/main.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("pkg/main2.go", "fmt.Print")), +- ) +- env.CloseBuffer("pkg/main.go") +- env.AfterChange( +- NoDiagnostics(ForFile("pkg/main2.go")), +- ) +- }) +-} +- +-// TestReloadOnlyOnce checks that changes to the go.mod file do not result in +-// redundant package loads (golang/go#54473). +-// +-// Note that this test may be fragile, as it depends on specific structure to +-// log messages around reinitialization. Nevertheless, it is important for +-// guarding against accidentally duplicate reloading. +-func TestReloadOnlyOnce(t *testing.T) { +- WithOptions( +- ProxyFiles(workspaceProxy), +- WorkspaceFolders("pkg"), +- ).Run(t, workspaceModule, func(t *testing.T, env *Env) { +- dir := env.Sandbox.Workdir.URI("goodbye").Path() +- goModWithReplace := fmt.Sprintf(`%s +-replace random.org => %s +-`, env.ReadWorkspaceFile("pkg/go.mod"), dir) +- env.WriteWorkspaceFile("pkg/go.mod", goModWithReplace) +- env.Await( +- LogMatching(protocol.Info, `packages\.Load #\d+\n`, 2, false), +- ) +- }) +-} +- +-const workspaceModuleProxy = ` +--- example.com@v1.2.3/go.mod -- +-module example.com +- +-go 1.12 +--- example.com@v1.2.3/blah/blah.go -- +-package blah +- +-import "fmt" +- +-func SaySomething() { +- fmt.Println("something") +-} +--- b.com@v1.2.3/go.mod -- +-module b.com +- +-go 1.12 +--- b.com@v1.2.3/b/b.go -- +-package b +- +-func Hello() {} +-` +- +-const multiModule = ` +--- moda/a/go.mod -- +-module a.com +- +-require b.com v1.2.3 +--- moda/a/go.sum -- +-b.com v1.2.3 h1:tXrlXP0rnjRpKNmkbLYoWBdq0ikb3C3bKK9//moAWBI= +-b.com v1.2.3/go.mod h1:D+J7pfFBZK5vdIdZEFquR586vKKIkqG7Qjw9AxG5BQ8= +--- moda/a/a.go -- +-package a +- +-import ( +- "b.com/b" +-) +- +-func main() { +- var x int +- _ = b.Hello() +-} +--- modb/go.mod -- +-module b.com +- +--- modb/b/b.go -- +-package b +- +-func Hello() int { +- var x int +-} +-` +- +-func TestAutomaticWorkspaceModule_Interdependent(t *testing.T) { +- WithOptions( +- ProxyFiles(workspaceModuleProxy), +- ).Run(t, multiModule, func(t *testing.T, env *Env) { +- env.RunGoCommand("work", "init") +- env.RunGoCommand("work", "use", "-r", ".") +- env.AfterChange( +- Diagnostics(env.AtRegexp("moda/a/a.go", "x")), +- Diagnostics(env.AtRegexp("modb/b/b.go", "x")), +- NoDiagnostics(env.AtRegexp("moda/a/a.go", `"b.com/b"`)), +- ) +- }) +-} +- +-func TestWorkspaceVendoring(t *testing.T) { +- testenv.NeedsGoCommand1Point(t, 22) +- WithOptions( +- ProxyFiles(workspaceModuleProxy), +- ).Run(t, multiModule, func(t *testing.T, env *Env) { +- env.RunGoCommand("work", "init") +- env.RunGoCommand("work", "use", "moda/a") +- env.AfterChange() +- env.OpenFile("moda/a/a.go") +- env.RunGoCommand("work", "vendor") +- env.AfterChange() +- loc := env.FirstDefinition(env.RegexpSearch("moda/a/a.go", "b.(Hello)")) +- const want = "vendor/b.com/b/b.go" +- if got := env.Sandbox.Workdir.URIToPath(loc.URI); got != want { +- t.Errorf("Definition: got location %q, want %q", got, want) +- } +- }) +-} +- +-func TestModuleWithExclude(t *testing.T) { +- const proxy = ` +--- c.com@v1.2.3/go.mod -- +-module c.com +- +-go 1.12 +- +-require b.com v1.2.3 +--- c.com@v1.2.3/blah/blah.go -- +-package blah +- +-import "fmt" +- +-func SaySomething() { +- fmt.Println("something") +-} +--- b.com@v1.2.3/go.mod -- +-module b.com +- +-go 1.12 +--- b.com@v1.2.4/b/b.go -- +-package b +- +-func Hello() {} +--- b.com@v1.2.4/go.mod -- +-module b.com +- +-go 1.12 +-` +- const files = ` +--- go.mod -- +-module a.com +- +-require c.com v1.2.3 +- +-exclude b.com v1.2.3 +--- main.go -- +-package a +- +-func main() { +- var x int +-} +-` +- WithOptions( +- WriteGoSum("."), +- ProxyFiles(proxy), +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- Diagnostics(env.AtRegexp("main.go", "x")), +- ) +- }) +-} +- +-// This change tests that the version of the module used changes after it has +-// been deleted from the workspace. +-// +-// TODO(golang/go#55331): delete this placeholder along with experimental +-// workspace module. +-func TestDeleteModule_Interdependent(t *testing.T) { +- const multiModule = ` +--- go.work -- +-go 1.18 +- +-use ( +- moda/a +- modb +-) +--- moda/a/go.mod -- +-module a.com +- +-require b.com v1.2.3 +--- moda/a/go.sum -- +-b.com v1.2.3 h1:tXrlXP0rnjRpKNmkbLYoWBdq0ikb3C3bKK9//moAWBI= +-b.com v1.2.3/go.mod h1:D+J7pfFBZK5vdIdZEFquR586vKKIkqG7Qjw9AxG5BQ8= +--- moda/a/a.go -- +-package a +- +-import ( +- "b.com/b" +-) +- +-func main() { +- var x int +- _ = b.Hello() +-} +--- modb/go.mod -- +-module b.com +- +--- modb/b/b.go -- +-package b +- +-func Hello() int { +- var x int +-} +-` +- WithOptions( +- ProxyFiles(workspaceModuleProxy), +- ).Run(t, multiModule, func(t *testing.T, env *Env) { +- env.OpenFile("moda/a/a.go") +- env.Await(env.DoneWithOpen()) +- +- originalLoc := env.FirstDefinition(env.RegexpSearch("moda/a/a.go", "Hello")) +- original := env.Sandbox.Workdir.URIToPath(originalLoc.URI) +- if want := "modb/b/b.go"; !strings.HasSuffix(original, want) { +- t.Errorf("expected %s, got %v", want, original) +- } +- env.AfterChange() +- +- env.RemoveWorkspaceFile("modb/b/b.go") +- env.RemoveWorkspaceFile("modb/go.mod") +- env.WriteWorkspaceFile("go.work", "go 1.18\nuse moda/a") +- env.AfterChange() +- +- gotLoc := env.FirstDefinition(env.RegexpSearch("moda/a/a.go", "Hello")) +- got := env.Sandbox.Workdir.URIToPath(gotLoc.URI) +- if want := "b.com@v1.2.3/b/b.go"; !strings.HasSuffix(got, want) { +- t.Errorf("expected %s, got %v", want, got) +- } +- }) +-} +- +-// Tests that the version of the module used changes after it has been added +-// to the workspace. +-func TestCreateModule_Interdependent(t *testing.T) { +- const multiModule = ` +--- go.work -- +-go 1.18 +- +-use ( +- moda/a +-) +--- moda/a/go.mod -- +-module a.com +- +-require b.com v1.2.3 +--- moda/a/go.sum -- +-b.com v1.2.3 h1:tXrlXP0rnjRpKNmkbLYoWBdq0ikb3C3bKK9//moAWBI= +-b.com v1.2.3/go.mod h1:D+J7pfFBZK5vdIdZEFquR586vKKIkqG7Qjw9AxG5BQ8= +--- moda/a/a.go -- +-package a +- +-import ( +- "b.com/b" +-) +- +-func main() { +- var x int +- _ = b.Hello() +-} +-` +- WithOptions( +- ProxyFiles(workspaceModuleProxy), +- ).Run(t, multiModule, func(t *testing.T, env *Env) { +- env.OpenFile("moda/a/a.go") +- loc := env.FirstDefinition(env.RegexpSearch("moda/a/a.go", "Hello")) +- original := env.Sandbox.Workdir.URIToPath(loc.URI) +- if want := "b.com@v1.2.3/b/b.go"; !strings.HasSuffix(original, want) { +- t.Errorf("expected %s, got %v", want, original) +- } +- env.WriteWorkspaceFiles(map[string]string{ +- "go.work": `go 1.18 +- +-use ( +- moda/a +- modb +-) +-`, +- "modb/go.mod": "module b.com", +- "modb/b/b.go": `package b +- +-func Hello() int { +- var x int +-} +-`, +- }) +- env.AfterChange(Diagnostics(env.AtRegexp("modb/b/b.go", "x"))) +- gotLoc := env.FirstDefinition(env.RegexpSearch("moda/a/a.go", "Hello")) +- got := env.Sandbox.Workdir.URIToPath(gotLoc.URI) +- if want := "modb/b/b.go"; !strings.HasSuffix(got, want) { +- t.Errorf("expected %s, got %v", want, original) +- } +- }) +-} +- +-// This test confirms that a gopls workspace can recover from initialization +-// with one invalid module. +-func TestOneBrokenModule(t *testing.T) { +- const multiModule = ` +--- go.work -- +-go 1.18 +- +-use ( +- moda/a +- modb +-) +--- moda/a/go.mod -- +-module a.com +- +-require b.com v1.2.3 +- +--- moda/a/a.go -- +-package a +- +-import ( +- "b.com/b" +-) +- +-func main() { +- var x int +- _ = b.Hello() +-} +--- modb/go.mod -- +-modul b.com // typo here +- +--- modb/b/b.go -- +-package b +- +-func Hello() int { +- var x int +-} +-` +- WithOptions( +- ProxyFiles(workspaceModuleProxy), +- ).Run(t, multiModule, func(t *testing.T, env *Env) { +- env.OpenFile("modb/go.mod") +- env.AfterChange( +- Diagnostics(AtPosition("modb/go.mod", 0, 0)), +- ) +- env.RegexpReplace("modb/go.mod", "modul", "module") +- env.SaveBufferWithoutActions("modb/go.mod") +- env.AfterChange( +- Diagnostics(env.AtRegexp("modb/b/b.go", "x")), +- ) +- }) +-} +- +-// TestBadGoWork exercises the panic from golang/vscode-go#2121. +-func TestBadGoWork(t *testing.T) { +- const files = ` +--- go.work -- +-use ./bar +--- bar/go.mod -- +-module example.com/bar +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.work") +- }) +-} +- +-func TestUseGoWork(t *testing.T) { +- // This test validates certain functionality related to using a go.work +- // file to specify workspace modules. +- const multiModule = ` +--- moda/a/go.mod -- +-module a.com +- +-require b.com v1.2.3 +--- moda/a/go.sum -- +-b.com v1.2.3 h1:tXrlXP0rnjRpKNmkbLYoWBdq0ikb3C3bKK9//moAWBI= +-b.com v1.2.3/go.mod h1:D+J7pfFBZK5vdIdZEFquR586vKKIkqG7Qjw9AxG5BQ8= +--- moda/a/a.go -- +-package a +- +-import ( +- "b.com/b" +-) +- +-func main() { +- var x int +- _ = b.Hello() +-} +--- modb/go.mod -- +-module b.com +- +-require example.com v1.2.3 +--- modb/go.sum -- +-example.com v1.2.3 h1:Yryq11hF02fEf2JlOS2eph+ICE2/ceevGV3C9dl5V/c= +-example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= +--- modb/b/b.go -- +-package b +- +-func Hello() int { +- var x int +-} +--- go.work -- +-go 1.17 +- +-use ( +- ./moda/a +-) +-` +- WithOptions( +- ProxyFiles(workspaceModuleProxy), +- Settings{ +- "subdirWatchPatterns": "on", +- }, +- ).Run(t, multiModule, func(t *testing.T, env *Env) { +- // Initially, the go.work should cause only the a.com module to be loaded, +- // so we shouldn't get any file watches for modb. Further validate this by +- // jumping to a definition in b.com and ensuring that we go to the module +- // cache. +- env.OnceMet( +- InitialWorkspaceLoad, +- NoFileWatchMatching("modb"), +- ) +- env.OpenFile("moda/a/a.go") +- env.Await(env.DoneWithOpen()) +- +- // To verify which modules are loaded, we'll jump to the definition of +- // b.Hello. +- checkHelloLocation := func(want string) error { +- loc := env.FirstDefinition(env.RegexpSearch("moda/a/a.go", "Hello")) +- file := env.Sandbox.Workdir.URIToPath(loc.URI) +- if !strings.HasSuffix(file, want) { +- return fmt.Errorf("expected %s, got %v", want, file) +- } +- return nil +- } +- +- // Initially this should be in the module cache, as b.com is not replaced. +- if err := checkHelloLocation("b.com@v1.2.3/b/b.go"); err != nil { +- t.Fatal(err) +- } +- +- // Now, modify the go.work file on disk to activate the b.com module in +- // the workspace. +- env.WriteWorkspaceFile("go.work", ` +-go 1.17 +- +-use ( +- ./moda/a +- ./modb +-) +-`) +- +- // As of golang/go#54069, writing go.work to the workspace triggers a +- // workspace reload, and new file watches. +- env.AfterChange( +- Diagnostics(env.AtRegexp("modb/b/b.go", "x")), +- // TODO(golang/go#60340): we don't get a file watch yet, because +- // updateWatchedDirectories runs before snapshot.load. Instead, we get it +- // after the next change (the didOpen below). +- // FileWatchMatching("modb"), +- ) +- +- // Jumping to definition should now go to b.com in the workspace. +- if err := checkHelloLocation("modb/b/b.go"); err != nil { +- t.Fatal(err) +- } +- +- // Now, let's modify the go.work *overlay* (not on disk), and verify that +- // this change is only picked up once it is saved. +- env.OpenFile("go.work") +- env.AfterChange( +- // TODO(golang/go#60340): delete this expectation in favor of +- // the commented-out expectation above, once we fix the evaluation order +- // of file watches. We should not have to wait for a second change to get +- // the correct watches. +- FileWatchMatching("modb"), +- ) +- env.SetBufferContent("go.work", `go 1.17 +- +-use ( +- ./moda/a +-)`) +- +- // Simply modifying the go.work file does not cause a reload, so we should +- // still jump within the workspace. +- // +- // TODO: should editing the go.work above cause modb diagnostics to be +- // suppressed? +- env.AfterChange() +- if err := checkHelloLocation("modb/b/b.go"); err != nil { +- t.Fatal(err) +- } +- +- // Saving should reload the workspace. +- env.SaveBufferWithoutActions("go.work") +- if err := checkHelloLocation("b.com@v1.2.3/b/b.go"); err != nil { +- t.Fatal(err) +- } +- +- // Since no file in modb is open, there should be no view containing +- // modb/go.mod, and we should clear its diagnostics. +- env.AfterChange(NoDiagnostics(ForFile("modb/go.mod"))) +- +- // Test Formatting. +- env.SetBufferContent("go.work", `go 1.18 +- use ( +- +- +- +- ./moda/a +-) +-`) // TODO(matloob): For some reason there's a "start position 7:0 is out of bounds" error when the ")" is on the last character/line in the file. Rob probably knows what's going on. +- env.SaveBuffer("go.work") +- env.AfterChange() +- gotWorkContents := env.ReadWorkspaceFile("go.work") +- wantWorkContents := `go 1.18 +- +-use ( +- ./moda/a +-) +-` +- if gotWorkContents != wantWorkContents { +- t.Fatalf("formatted contents of workspace: got %q; want %q", gotWorkContents, wantWorkContents) +- } +- }) +-} +- +-func TestUseGoWorkDiagnosticMissingModule(t *testing.T) { +- const files = ` +--- go.work -- +-go 1.18 +- +-use ./foo +--- bar/go.mod -- +-module example.com/bar +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.work") +- env.AfterChange( +- Diagnostics(env.AtRegexp("go.work", "use"), WithMessage("directory ./foo does not contain a module")), +- ) +- // The following tests is a regression test against an issue where we weren't +- // copying the workFile struct field on workspace when a new one was created in +- // (*workspace).invalidate. Set the buffer content to a working file so that +- // invalidate recognizes the workspace to be change and copies over the workspace +- // struct, and then set the content back to the old contents to make sure +- // the diagnostic still shows up. +- env.SetBufferContent("go.work", "go 1.18 \n\n use ./bar\n") +- env.AfterChange( +- NoDiagnostics(env.AtRegexp("go.work", "use")), +- ) +- env.SetBufferContent("go.work", "go 1.18 \n\n use ./foo\n") +- env.AfterChange( +- Diagnostics(env.AtRegexp("go.work", "use"), WithMessage("directory ./foo does not contain a module")), +- ) +- }) +-} +- +-func TestUseGoWorkDiagnosticSyntaxError(t *testing.T) { +- const files = ` +--- go.work -- +-go 1.18 +- +-usa ./foo +-replace +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.work") +- env.AfterChange( +- Diagnostics(env.AtRegexp("go.work", "usa"), WithMessage("unknown directive: usa")), +- Diagnostics(env.AtRegexp("go.work", "replace"), WithMessage("usage: replace")), +- ) +- }) +-} +- +-func TestUseGoWorkHover(t *testing.T) { +- const files = ` +--- go.work -- +-go 1.18 +- +-use ./foo +-use ( +- ./bar +- ./bar/baz +-) +--- foo/go.mod -- +-module example.com/foo +--- bar/go.mod -- +-module example.com/bar +--- bar/baz/go.mod -- +-module example.com/bar/baz +-` +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("go.work") +- +- tcs := map[string]string{ +- `\./foo`: "example.com/foo", +- `(?m)\./bar$`: "example.com/bar", +- `\./bar/baz`: "example.com/bar/baz", +- } +- +- for hoverRE, want := range tcs { +- got, _ := env.Hover(env.RegexpSearch("go.work", hoverRE)) +- if got.Value != want { +- t.Errorf(`hover on %q: got %q, want %q`, hoverRE, got, want) +- } +- } +- }) +-} +- +-func TestExpandToGoWork(t *testing.T) { +- const workspace = ` +--- moda/a/go.mod -- +-module a.com +- +-require b.com v1.2.3 +--- moda/a/a.go -- +-package a +- +-import ( +- "b.com/b" +-) +- +-func main() { +- var x int +- _ = b.Hello() +-} +--- modb/go.mod -- +-module b.com +- +-require example.com v1.2.3 +--- modb/b/b.go -- +-package b +- +-func Hello() int { +- var x int +-} +--- go.work -- +-go 1.17 +- +-use ( +- ./moda/a +- ./modb +-) +-` +- WithOptions( +- WorkspaceFolders("moda/a"), +- ).Run(t, workspace, func(t *testing.T, env *Env) { +- env.OpenFile("moda/a/a.go") +- env.Await(env.DoneWithOpen()) +- loc := env.FirstDefinition(env.RegexpSearch("moda/a/a.go", "Hello")) +- file := env.Sandbox.Workdir.URIToPath(loc.URI) +- want := "modb/b/b.go" +- if !strings.HasSuffix(file, want) { +- t.Errorf("expected %s, got %v", want, file) +- } +- }) +-} +- +-func TestInnerGoWork(t *testing.T) { +- // This test checks that gopls honors a go.work file defined +- // inside a go module (golang/go#63917). +- const workspace = ` +--- go.mod -- +-module a.com +- +-require b.com v1.2.3 +--- a/go.work -- +-go 1.18 +- +-use ( +- .. +- ../b +-) +--- a/a.go -- +-package a +- +-import "b.com/b" +- +-var _ = b.B +--- b/go.mod -- +-module b.com/b +- +--- b/b.go -- +-package b +- +-const B = 0 +-` +- WithOptions( +- // This doesn't work if we open the outer module. I'm not sure it should, +- // since the go.work file does not apply to the entire module, just a +- // subdirectory. +- WorkspaceFolders("a"), +- ).Run(t, workspace, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- loc := env.FirstDefinition(env.RegexpSearch("a/a.go", "b.(B)")) +- got := env.Sandbox.Workdir.URIToPath(loc.URI) +- want := "b/b.go" +- if got != want { +- t.Errorf("Definition(b.B): got %q, want %q", got, want) +- } +- }) +-} +- +-func TestNonWorkspaceFileCreation(t *testing.T) { +- const files = ` +--- work/go.mod -- +-module mod.com +- +-go 1.12 +--- work/x.go -- +-package x +-` +- +- const code = ` +-package foo +-import "fmt" +-var _ = fmt.Printf +-` +- WithOptions( +- WorkspaceFolders("work"), // so that outside/... is outside the workspace +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.CreateBuffer("outside/foo.go", "") +- env.EditBuffer("outside/foo.go", fake.NewEdit(0, 0, 0, 0, code)) +- env.FirstDefinition(env.RegexpSearch("outside/foo.go", `Printf`)) +- }) +-} +- +-func TestGoWork_V2Module(t *testing.T) { +- // When using a go.work, we must have proxy content even if it is replaced. +- const proxy = ` +--- b.com/v2@v2.1.9/go.mod -- +-module b.com/v2 +- +-go 1.12 +--- b.com/v2@v2.1.9/b/b.go -- +-package b +- +-func Ciao()() int { +- return 0 +-} +-` +- +- const multiModule = ` +--- go.work -- +-go 1.18 +- +-use ( +- moda/a +- modb +- modb/v2 +- modc +-) +--- moda/a/go.mod -- +-module a.com +- +-require b.com/v2 v2.1.9 +--- moda/a/a.go -- +-package a +- +-import ( +- "b.com/v2/b" +-) +- +-func main() { +- var x int +- _ = b.Hi() +-} +--- modb/go.mod -- +-module b.com +- +--- modb/b/b.go -- +-package b +- +-func Hello() int { +- var x int +-} +--- modb/v2/go.mod -- +-module b.com/v2 +- +--- modb/v2/b/b.go -- +-package b +- +-func Hi() int { +- var x int +-} +--- modc/go.mod -- +-module gopkg.in/yaml.v1 // test gopkg.in versions +--- modc/main.go -- +-package main +- +-func main() { +- var x int +-} +-` +- +- WithOptions( +- ProxyFiles(proxy), +- ).Run(t, multiModule, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- // TODO(rfindley): assert on the full set of diagnostics here. We +- // should ensure that we don't have a diagnostic at b.Hi in a.go. +- Diagnostics(env.AtRegexp("moda/a/a.go", "x")), +- Diagnostics(env.AtRegexp("modb/b/b.go", "x")), +- Diagnostics(env.AtRegexp("modb/v2/b/b.go", "x")), +- Diagnostics(env.AtRegexp("modc/main.go", "x")), +- ) +- }) +-} +- +-// Confirm that a fix for a tidy module will correct all modules in the +-// workspace. +-func TestMultiModule_OneBrokenModule(t *testing.T) { +- // In the earlier 'experimental workspace mode', gopls would aggregate go.sum +- // entries for the workspace module, allowing it to correctly associate +- // missing go.sum with diagnostics. With go.work files, this doesn't work: +- // the go.command will happily write go.work.sum. +- t.Skip("golang/go#57509: go.mod diagnostics do not work in go.work mode") +- const files = ` +--- go.work -- +-go 1.18 +- +-use ( +- a +- b +-) +--- go.work.sum -- +--- a/go.mod -- +-module a.com +- +-go 1.12 +--- a/main.go -- +-package main +--- b/go.mod -- +-module b.com +- +-go 1.12 +- +-require ( +- example.com v1.2.3 +-) +--- b/go.sum -- +--- b/main.go -- +-package b +- +-import "example.com/blah" +- +-func main() { +- blah.Hello() +-} +-` +- WithOptions( +- ProxyFiles(workspaceProxy), +- ).Run(t, files, func(t *testing.T, env *Env) { +- params := &protocol.PublishDiagnosticsParams{} +- env.OpenFile("b/go.mod") +- env.AfterChange( +- Diagnostics( +- env.AtRegexp("go.mod", `example.com v1.2.3`), +- WithMessage("go.sum is out of sync"), +- ), +- ReadDiagnostics("b/go.mod", params), +- ) +- for _, d := range params.Diagnostics { +- if !strings.Contains(d.Message, "go.sum is out of sync") { +- continue +- } +- actions := env.GetQuickFixes("b/go.mod", []protocol.Diagnostic{d}) +- if len(actions) != 2 { +- t.Fatalf("expected 2 code actions, got %v", len(actions)) +- } +- env.ApplyQuickFixes("b/go.mod", []protocol.Diagnostic{d}) +- } +- env.AfterChange( +- NoDiagnostics(ForFile("b/go.mod")), +- ) +- }) +-} +- +-// Tests the fix for golang/go#52500. +-func TestChangeTestVariant_Issue52500(t *testing.T) { +- const src = ` +--- go.mod -- +-module mod.test +- +-go 1.12 +--- main_test.go -- +-package main_test +- +-type Server struct{} +- +-const MainConst = otherConst +--- other_test.go -- +-package main_test +- +-const otherConst = 0 +- +-func (Server) Foo() {} +-` +- +- Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("other_test.go") +- env.RegexpReplace("other_test.go", "main_test", "main") +- +- // For this test to function, it is necessary to wait on both of the +- // expectations below: the bug is that when switching the package name in +- // other_test.go from main->main_test, metadata for main_test is not marked +- // as invalid. So we need to wait for the metadata of main_test.go to be +- // updated before moving other_test.go back to the main_test package. +- env.Await( +- Diagnostics(env.AtRegexp("other_test.go", "Server")), +- Diagnostics(env.AtRegexp("main_test.go", "otherConst")), +- ) +- env.RegexpReplace("other_test.go", "main", "main_test") +- env.AfterChange( +- NoDiagnostics(ForFile("other_test.go")), +- NoDiagnostics(ForFile("main_test.go")), +- ) +- +- // This will cause a test failure if other_test.go is not in any package. +- _ = env.FirstDefinition(env.RegexpSearch("other_test.go", "Server")) +- }) +-} +- +-// Test for golang/go#48929. +-func TestClearNonWorkspaceDiagnostics(t *testing.T) { +- const ws = ` +--- go.work -- +-go 1.18 +- +-use ( +- ./b +-) +--- a/go.mod -- +-module a +- +-go 1.17 +--- a/main.go -- +-package main +- +-func main() { +- var V string +-} +--- b/go.mod -- +-module b +- +-go 1.17 +--- b/main.go -- +-package b +- +-import ( +- _ "fmt" +-) +-` +- Run(t, ws, func(t *testing.T, env *Env) { +- env.OpenFile("b/main.go") +- env.AfterChange( +- NoDiagnostics(ForFile("a/main.go")), +- ) +- env.OpenFile("a/main.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/main.go", "V"), WithMessage("not used")), +- ) +- // Here, diagnostics are added because of zero-config gopls. +- // In the past, they were added simply due to diagnosing changed files. +- // (see TestClearNonWorkspaceDiagnostics_NoView below for a +- // reimplementation of that test). +- if got, want := len(env.Views()), 2; got != want { +- t.Errorf("after opening a/main.go, got %d views, want %d", got, want) +- } +- env.CloseBuffer("a/main.go") +- env.AfterChange( +- NoDiagnostics(ForFile("a/main.go")), +- ) +- if got, want := len(env.Views()), 1; got != want { +- t.Errorf("after closing a/main.go, got %d views, want %d", got, want) +- } +- }) +-} +- +-// This test is like TestClearNonWorkspaceDiagnostics, but bypasses the +-// zero-config algorithm by opening a nested workspace folder. +-// +-// We should still compute diagnostics correctly for open packages. +-func TestClearNonWorkspaceDiagnostics_NoView(t *testing.T) { +- const ws = ` +--- a/go.mod -- +-module example.com/a +- +-go 1.18 +- +-require example.com/b v1.2.3 +- +-replace example.com/b => ../b +- +--- a/a.go -- +-package a +- +-import "example.com/b" +- +-func _() { +- V := b.B // unused +-} +- +--- b/go.mod -- +-module b +- +-go 1.18 +- +--- b/b.go -- +-package b +- +-const B = 2 +- +-func _() { +- var V int // unused +-} +- +--- b/b2.go -- +-package b +- +-const B2 = B +- +--- c/c.go -- +-package main +- +-func main() { +- var V int // unused +-} +-` +- WithOptions( +- WorkspaceFolders("a"), +- ).Run(t, ws, func(t *testing.T, env *Env) { +- env.OpenFile("a/a.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "V"), WithMessage("not used")), +- NoDiagnostics(ForFile("b/b.go")), +- NoDiagnostics(ForFile("c/c.go")), +- ) +- env.OpenFile("b/b.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "V"), WithMessage("not used")), +- Diagnostics(env.AtRegexp("b/b.go", "V"), WithMessage("not used")), +- NoDiagnostics(ForFile("c/c.go")), +- ) +- +- // Opening b/b.go should not result in a new view, because b is not +- // contained in a workspace folder. +- // +- // Yet we should get diagnostics for b, because it is open. +- if got, want := len(env.Views()), 1; got != want { +- t.Errorf("after opening b/b.go, got %d views, want %d", got, want) +- } +- env.CloseBuffer("b/b.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "V"), WithMessage("not used")), +- NoDiagnostics(ForFile("b/b.go")), +- NoDiagnostics(ForFile("c/c.go")), +- ) +- +- // We should get references in the b package. +- bUse := env.RegexpSearch("a/a.go", `b\.(B)`) +- refs := env.References(bUse) +- wantRefs := []string{"a/a.go", "b/b.go", "b/b2.go"} +- var gotRefs []string +- for _, ref := range refs { +- gotRefs = append(gotRefs, env.Sandbox.Workdir.URIToPath(ref.URI)) +- } +- sort.Strings(gotRefs) +- if diff := cmp.Diff(wantRefs, gotRefs); diff != "" { +- t.Errorf("references(b.B) mismatch (-want +got)\n%s", diff) +- } +- +- // Opening c/c.go should also not result in a new view, yet we should get +- // orphaned file diagnostics. +- env.OpenFile("c/c.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "V"), WithMessage("not used")), +- NoDiagnostics(ForFile("b/b.go")), +- Diagnostics(env.AtRegexp("c/c.go", "V"), WithMessage("not used")), +- ) +- if got, want := len(env.Views()), 1; got != want { +- t.Errorf("after opening b/b.go, got %d views, want %d", got, want) +- } +- +- env.CloseBuffer("c/c.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "V"), WithMessage("not used")), +- NoDiagnostics(ForFile("b/b.go")), +- NoDiagnostics(ForFile("c/c.go")), +- ) +- env.CloseBuffer("a/a.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("a/a.go", "V"), WithMessage("not used")), +- NoDiagnostics(ForFile("b/b.go")), +- NoDiagnostics(ForFile("c/c.go")), +- ) +- }) +-} +- +-// Test that we don't get a version warning when the Go version in PATH is +-// supported. +-func TestOldGoNotification_SupportedVersion(t *testing.T) { +- v := goVersion(t) +- if v < goversion.OldestSupported() { +- t.Skipf("go version 1.%d is unsupported", v) +- } +- +- Run(t, "", func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- NoShownMessage("upgrade"), +- ) +- }) +-} +- +-// Test that we do get a version warning when the Go version in PATH is +-// unsupported, though this test may never execute if we stop running CI at +-// legacy Go versions (see also TestOldGoNotification_Fake) +-func TestOldGoNotification_UnsupportedVersion(t *testing.T) { +- v := goVersion(t) +- if v >= goversion.OldestSupported() { +- t.Skipf("go version 1.%d is supported", v) +- } +- +- Run(t, "", func(t *testing.T, env *Env) { +- env.Await( +- // Note: cannot use OnceMet(InitialWorkspaceLoad, ...) here, as the +- // upgrade message may race with the IWL. +- ShownMessage("Please upgrade"), +- ) +- }) +-} +- +-func TestOldGoNotification_Fake(t *testing.T) { +- // Get the Go version from path, and make sure it's unsupported. +- // +- // In the future we'll stop running CI on legacy Go versions. By mutating the +- // oldest supported Go version here, we can at least ensure that the +- // ShowMessage pop-up works. +- ctx := context.Background() +- version, err := gocommand.GoVersion(ctx, gocommand.Invocation{}, &gocommand.Runner{}) +- if err != nil { +- t.Fatal(err) +- } +- defer func(t []goversion.Support) { +- goversion.Supported = t +- }(goversion.Supported) +- goversion.Supported = []goversion.Support{ +- {GoVersion: version, InstallGoplsVersion: "v1.0.0"}, +- } +- +- Run(t, "", func(t *testing.T, env *Env) { +- env.Await( +- // Note: cannot use OnceMet(InitialWorkspaceLoad, ...) here, as the +- // upgrade message may race with the IWL. +- ShownMessage("Please upgrade"), +- ) +- }) +-} +- +-// goVersion returns the version of the Go command in PATH. +-func goVersion(t *testing.T) int { +- t.Helper() +- ctx := context.Background() +- goversion, err := gocommand.GoVersion(ctx, gocommand.Invocation{}, &gocommand.Runner{}) +- if err != nil { +- t.Fatal(err) +- } +- return goversion +-} +- +-func TestGoworkMutation(t *testing.T) { +- WithOptions( +- ProxyFiles(workspaceModuleProxy), +- ).Run(t, multiModule, func(t *testing.T, env *Env) { +- env.RunGoCommand("work", "init") +- env.RunGoCommand("work", "use", "-r", ".") +- env.AfterChange( +- Diagnostics(env.AtRegexp("moda/a/a.go", "x")), +- Diagnostics(env.AtRegexp("modb/b/b.go", "x")), +- NoDiagnostics(env.AtRegexp("moda/a/a.go", `b\.Hello`)), +- ) +- env.RunGoCommand("work", "edit", "-dropuse", "modb") +- env.Await( +- Diagnostics(env.AtRegexp("moda/a/a.go", "x")), +- NoDiagnostics(env.AtRegexp("modb/b/b.go", "x")), +- Diagnostics(env.AtRegexp("moda/a/a.go", `b\.Hello`)), +- ) +- }) +-} +- +-func TestInitializeWithNonFileWorkspaceFolders(t *testing.T) { +- for _, tt := range []struct { +- name string +- folders []string +- wantViewRoots []string +- }{ +- { +- name: "real,virtual", +- folders: []string{"modb", "virtual:///virtualpath"}, +- wantViewRoots: []string{"./modb"}, +- }, +- { +- name: "virtual,real", +- folders: []string{"virtual:///virtualpath", "modb"}, +- wantViewRoots: []string{"./modb"}, +- }, +- { +- name: "real,virtual,real", +- folders: []string{"moda/a", "virtual:///virtualpath", "modb"}, +- wantViewRoots: []string{"./moda/a", "./modb"}, +- }, +- { +- name: "virtual", +- folders: []string{"virtual:///virtualpath"}, +- wantViewRoots: nil, +- }, +- } { +- +- t.Run(tt.name, func(t *testing.T) { +- opts := []RunOption{ProxyFiles(workspaceProxy), WorkspaceFolders(tt.folders...)} +- WithOptions(opts...).Run(t, multiModule, func(t *testing.T, env *Env) { +- summary := func(typ cache.ViewType, root, folder string) command.View { +- return command.View{ +- Type: typ.String(), +- Root: env.Sandbox.Workdir.URI(root), +- Folder: env.Sandbox.Workdir.URI(folder), +- } +- } +- checkViews := func(want ...command.View) { +- got := env.Views() +- if diff := cmp.Diff(want, got, cmpopts.IgnoreFields(command.View{}, "ID")); diff != "" { +- t.Errorf("SummarizeViews() mismatch (-want +got):\n%s", diff) +- } +- } +- var wantViews []command.View +- for _, root := range tt.wantViewRoots { +- wantViews = append(wantViews, summary(cache.GoModView, root, root)) +- } +- env.Await( +- LogMatching(protocol.Warning, "skip adding virtual folder", 1, false), +- ) +- checkViews(wantViews...) +- }) +- }) +- } +-} +- +-// TestChangeAddedWorkspaceFolders tests issue71967 which an editor sends the following requests. +-// +-// 1. send an initialization request with rootURI but no workspaceFolders, +-// which gopls helps to find a workspaceFolders for it. +-// 2. send a DidChangeWorkspaceFolders request with the exact the same folder gopls helps to find. +-// +-// It uses the same approach to simulate the scenario, and ensure we can skip the already added file. +-func TestChangeAddedWorkspaceFolders(t *testing.T) { +- for _, tt := range []struct { +- name string +- after []string +- wantViewRoots []string +- }{ +- { +- name: "add an already added file", +- after: []string{"modb"}, +- wantViewRoots: []string{"./modb"}, +- }, +- { +- name: "add an already added file but with an ending slash", +- after: []string{"modb/"}, +- wantViewRoots: []string{"./modb"}, +- }, +- { +- name: "add an already added file and a new file", +- after: []string{"modb", "moda/a"}, +- wantViewRoots: []string{"./modb", "moda/a"}, +- }, +- } { +- t.Run(tt.name, func(t *testing.T) { +- opts := []RunOption{ProxyFiles(workspaceProxy), RootPath("modb"), NoDefaultWorkspaceFiles()} +- WithOptions(opts...).Run(t, multiModule, func(t *testing.T, env *Env) { +- summary := func(typ cache.ViewType, root, folder string) command.View { +- return command.View{ +- Type: typ.String(), +- Root: env.Sandbox.Workdir.URI(root), +- Folder: env.Sandbox.Workdir.URI(folder), +- } +- } +- checkViews := func(want ...command.View) { +- got := env.Views() +- if diff := cmp.Diff(want, got, cmpopts.IgnoreFields(command.View{}, "ID")); diff != "" { +- t.Errorf("SummarizeViews() mismatch (-want +got):\n%s", diff) +- } +- } +- var wantViews []command.View +- for _, root := range tt.wantViewRoots { +- wantViews = append(wantViews, summary(cache.GoModView, root, root)) +- } +- env.ChangeWorkspaceFolders(tt.after...) +- env.Await( +- LogMatching(protocol.Warning, "skip adding the already added folder", 1, false), +- NoOutstandingWork(IgnoreTelemetryPromptWork), +- ) +- checkViews(wantViews...) +- }) +- }) +- } +-} +- +-// Test that non-file scheme Document URIs in ChangeWorkspaceFolders +-// notification does not produce errors. +-func TestChangeNonFileWorkspaceFolders(t *testing.T) { +- for _, tt := range []struct { +- name string +- before []string +- after []string +- wantViewRoots []string +- }{ +- { +- name: "add", +- before: []string{"modb"}, +- after: []string{"modb", "moda/a", "virtual:///virtualpath"}, +- wantViewRoots: []string{"./modb", "moda/a"}, +- }, +- { +- name: "remove", +- before: []string{"modb", "virtual:///virtualpath", "moda/a"}, +- after: []string{"modb"}, +- wantViewRoots: []string{"./modb"}, +- }, +- } { +- t.Run(tt.name, func(t *testing.T) { +- opts := []RunOption{ProxyFiles(workspaceProxy), WorkspaceFolders(tt.before...)} +- WithOptions(opts...).Run(t, multiModule, func(t *testing.T, env *Env) { +- summary := func(typ cache.ViewType, root, folder string) command.View { +- return command.View{ +- Type: typ.String(), +- Root: env.Sandbox.Workdir.URI(root), +- Folder: env.Sandbox.Workdir.URI(folder), +- } +- } +- checkViews := func(want ...command.View) { +- got := env.Views() +- if diff := cmp.Diff(want, got, cmpopts.IgnoreFields(command.View{}, "ID")); diff != "" { +- t.Errorf("SummarizeViews() mismatch (-want +got):\n%s", diff) +- } +- } +- var wantViews []command.View +- for _, root := range tt.wantViewRoots { +- wantViews = append(wantViews, summary(cache.GoModView, root, root)) +- } +- env.ChangeWorkspaceFolders(tt.after...) +- env.Await( +- LogMatching(protocol.Warning, "skip adding virtual folder", 1, false), +- NoOutstandingWork(IgnoreTelemetryPromptWork), +- ) +- checkViews(wantViews...) +- }) +- }) +- } +-} +diff -urN a/gopls/internal/test/integration/workspace/zero_config_test.go b/gopls/internal/test/integration/workspace/zero_config_test.go +--- a/gopls/internal/test/integration/workspace/zero_config_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/workspace/zero_config_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,328 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package workspace +- +-import ( +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "github.com/google/go-cmp/cmp/cmpopts" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/protocol/command" +- +- . "golang.org/x/tools/gopls/internal/test/integration" +-) +- +-func TestAddAndRemoveGoWork(t *testing.T) { +- // Use a workspace with a module in the root directory to exercise the case +- // where a go.work is added to the existing root directory. This verifies +- // that we're detecting changes to the module source, not just the root +- // directory. +- const nomod = ` +--- go.mod -- +-module a.com +- +-go 1.16 +--- main.go -- +-package main +- +-func main() {} +--- b/go.mod -- +-module b.com +- +-go 1.16 +--- b/main.go -- +-package main +- +-func main() {} +-` +- WithOptions( +- Modes(Default), +- ).Run(t, nomod, func(t *testing.T, env *Env) { +- env.OpenFile("main.go") +- env.OpenFile("b/main.go") +- +- summary := func(typ cache.ViewType, root, folder string) command.View { +- return command.View{ +- Type: typ.String(), +- Root: env.Sandbox.Workdir.URI(root), +- Folder: env.Sandbox.Workdir.URI(folder), +- } +- } +- checkViews := func(want ...command.View) { +- got := env.Views() +- if diff := cmp.Diff(want, got, cmpopts.IgnoreFields(command.View{}, "ID")); diff != "" { +- t.Errorf("SummarizeViews() mismatch (-want +got):\n%s", diff) +- } +- } +- +- // Zero-config gopls makes this work. +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- NoDiagnostics(env.AtRegexp("b/main.go", "package (main)")), +- ) +- checkViews(summary(cache.GoModView, ".", "."), summary(cache.GoModView, "b", ".")) +- +- env.WriteWorkspaceFile("go.work", `go 1.16 +- +-use ( +- . +- b +-) +-`) +- env.AfterChange(NoDiagnostics()) +- checkViews(summary(cache.GoWorkView, ".", ".")) +- +- // Removing the go.work file should put us back where we started. +- env.RemoveWorkspaceFile("go.work") +- +- // Again, zero-config gopls makes this work. +- env.AfterChange( +- NoDiagnostics(ForFile("main.go")), +- NoDiagnostics(env.AtRegexp("b/main.go", "package (main)")), +- ) +- checkViews(summary(cache.GoModView, ".", "."), summary(cache.GoModView, "b", ".")) +- +- // Close and reopen b, to ensure the views are adjusted accordingly. +- env.CloseBuffer("b/main.go") +- env.AfterChange() +- checkViews(summary(cache.GoModView, ".", ".")) +- +- env.OpenFile("b/main.go") +- env.AfterChange() +- checkViews(summary(cache.GoModView, ".", "."), summary(cache.GoModView, "b", ".")) +- }) +-} +- +-func TestOpenAndClosePorts(t *testing.T) { +- // This test checks that as we open and close files requiring a different +- // port, the set of Views is adjusted accordingly. +- const files = ` +--- go.mod -- +-module a.com/a +- +-go 1.20 +- +--- a_linux.go -- +-package a +- +--- a_darwin.go -- +-package a +- +--- a_windows.go -- +-package a +-` +- +- WithOptions( +- EnvVars{ +- "GOOS": "linux", // assume that linux is the default GOOS +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- summary := func(envOverlay ...string) command.View { +- return command.View{ +- Type: cache.GoModView.String(), +- Root: env.Sandbox.Workdir.URI("."), +- Folder: env.Sandbox.Workdir.URI("."), +- EnvOverlay: envOverlay, +- } +- } +- checkViews := func(want ...command.View) { +- got := env.Views() +- if diff := cmp.Diff(want, got, cmpopts.IgnoreFields(command.View{}, "ID")); diff != "" { +- t.Errorf("SummarizeViews() mismatch (-want +got):\n%s", diff) +- } +- } +- checkViews(summary()) +- env.OpenFile("a_linux.go") +- checkViews(summary()) +- env.OpenFile("a_darwin.go") +- checkViews( +- summary(), +- summary("GOARCH=amd64", "GOOS=darwin"), +- ) +- env.OpenFile("a_windows.go") +- checkViews( +- summary(), +- summary("GOARCH=amd64", "GOOS=darwin"), +- summary("GOARCH=amd64", "GOOS=windows"), +- ) +- env.CloseBuffer("a_darwin.go") +- checkViews( +- summary(), +- summary("GOARCH=amd64", "GOOS=windows"), +- ) +- env.CloseBuffer("a_linux.go") +- checkViews( +- summary(), +- summary("GOARCH=amd64", "GOOS=windows"), +- ) +- env.CloseBuffer("a_windows.go") +- checkViews(summary()) +- }) +-} +- +-func TestCriticalErrorsInOrphanedFiles(t *testing.T) { +- // This test checks that as we open and close files requiring a different +- // port, the set of Views is adjusted accordingly. +- const files = ` +--- go.mod -- +-modul golang.org/lsptests/broken +- +-go 1.20 +- +--- a.go -- +-package broken +- +-const C = 0 +-` +- +- Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- env.AfterChange( +- Diagnostics(env.AtRegexp("go.mod", "modul")), +- Diagnostics(env.AtRegexp("a.go", "broken"), WithMessage("initialization failed")), +- ) +- }) +-} +- +-func TestGoModReplace(t *testing.T) { +- // This test checks that we treat locally replaced modules as workspace +- // modules, according to the "includeReplaceInWorkspace" setting. +- const files = ` +--- moda/go.mod -- +-module golang.org/a +- +-require golang.org/b v1.2.3 +- +-replace golang.org/b => ../modb +- +-go 1.20 +- +--- moda/a.go -- +-package a +- +-import "golang.org/b" +- +-const A = b.B +- +--- modb/go.mod -- +-module golang.org/b +- +-go 1.20 +- +--- modb/b.go -- +-package b +- +-const B = 1 +-` +- +- for useReplace, expectation := range map[bool]Expectation{ +- true: FileWatchMatching("modb"), +- false: NoFileWatchMatching("modb"), +- } { +- WithOptions( +- WorkspaceFolders("moda"), +- Settings{ +- "includeReplaceInWorkspace": useReplace, +- }, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OnceMet( +- InitialWorkspaceLoad, +- expectation, +- ) +- }) +- } +-} +- +-func TestDisableZeroConfig(t *testing.T) { +- // This test checks that we treat locally replaced modules as workspace +- // modules, according to the "includeReplaceInWorkspace" setting. +- const files = ` +--- moda/go.mod -- +-module golang.org/a +- +-go 1.20 +- +--- moda/a.go -- +-package a +- +--- modb/go.mod -- +-module golang.org/b +- +-go 1.20 +- +--- modb/b.go -- +-package b +- +-` +- +- WithOptions( +- Settings{"zeroConfig": false}, +- ).Run(t, files, func(t *testing.T, env *Env) { +- env.OpenFile("moda/a.go") +- env.OpenFile("modb/b.go") +- env.AfterChange() +- if got := env.Views(); len(got) != 1 || got[0].Type != cache.AdHocView.String() { +- t.Errorf("Views: got %v, want one adhoc view", got) +- } +- }) +-} +- +-func TestVendorExcluded(t *testing.T) { +- // Test that we don't create Views for vendored modules. +- // +- // We construct the vendor directory manually here, as `go mod vendor` will +- // omit the go.mod file. This synthesizes the setup of Kubernetes, where the +- // entire module is vendored through a symlinked directory. +- const src = ` +--- go.mod -- +-module example.com/a +- +-go 1.18 +- +-require other.com/b v1.0.0 +- +--- a.go -- +-package a +-import "other.com/b" +-var _ b.B +- +--- vendor/modules.txt -- +-# other.com/b v1.0.0 +-## explicit; go 1.14 +-other.com/b +- +--- vendor/other.com/b/go.mod -- +-module other.com/b +-go 1.14 +- +--- vendor/other.com/b/b.go -- +-package b +-type B int +- +-func _() { +- var V int // unused +-} +-` +- WithOptions( +- Modes(Default), +- ).Run(t, src, func(t *testing.T, env *Env) { +- env.OpenFile("a.go") +- env.AfterChange(NoDiagnostics()) +- loc := env.FirstDefinition(env.RegexpSearch("a.go", `b\.(B)`)) +- if !strings.Contains(string(loc.URI), "/vendor/") { +- t.Fatalf("Definition(b.B) = %v, want vendored location", loc.URI) +- } +- env.OpenFile(env.Sandbox.Workdir.URIToPath(loc.URI)) +- env.AfterChange( +- Diagnostics(env.AtRegexp("vendor/other.com/b/b.go", "V"), WithMessage("not used")), +- ) +- +- if views := env.Views(); len(views) != 1 { +- t.Errorf("After opening /vendor/, got %d views, want 1. Views:\n%v", len(views), views) +- } +- }) +-} +diff -urN a/gopls/internal/test/integration/wrappers.go b/gopls/internal/test/integration/wrappers.go +--- a/gopls/internal/test/integration/wrappers.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/integration/wrappers.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,636 +0,0 @@ +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package integration +- +-import ( +- "errors" +- "os" +- "path" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/protocol/command" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/internal/xcontext" +-) +- +-// RemoveWorkspaceFile deletes a file on disk but does nothing in the +-// editor. It calls t.Fatal on any error. +-func (e *Env) RemoveWorkspaceFile(name string) { +- e.TB.Helper() +- if err := e.Sandbox.Workdir.RemoveFile(e.Ctx, name); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// ReadWorkspaceFile reads a file from the workspace, calling t.Fatal on any +-// error. +-func (e *Env) ReadWorkspaceFile(name string) string { +- e.TB.Helper() +- content, err := e.Sandbox.Workdir.ReadFile(name) +- if err != nil { +- e.TB.Fatal(err) +- } +- return string(content) +-} +- +-// WriteWorkspaceFile writes a file to disk but does nothing in the editor. +-// It calls t.Fatal on any error. +-func (e *Env) WriteWorkspaceFile(name, content string) { +- e.TB.Helper() +- if err := e.Sandbox.Workdir.WriteFile(e.Ctx, name, content); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// WriteWorkspaceFiles deletes a file on disk but does nothing in the +-// editor. It calls t.Fatal on any error. +-func (e *Env) WriteWorkspaceFiles(files map[string]string) { +- e.TB.Helper() +- if err := e.Sandbox.Workdir.WriteFiles(e.Ctx, files); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// ListFiles lists relative paths to files in the given directory. +-// It calls t.Fatal on any error. +-func (e *Env) ListFiles(dir string) []string { +- e.TB.Helper() +- paths, err := e.Sandbox.Workdir.ListFiles(dir) +- if err != nil { +- e.TB.Fatal(err) +- } +- return paths +-} +- +-// OpenFile opens a file in the editor, calling t.Fatal on any error. +-func (e *Env) OpenFile(name string) { +- e.TB.Helper() +- if err := e.Editor.OpenFile(e.Ctx, name); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// CreateBuffer creates a buffer in the editor, calling t.Fatal on any error. +-func (e *Env) CreateBuffer(name string, content string) { +- e.TB.Helper() +- if err := e.Editor.CreateBuffer(e.Ctx, name, content); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// BufferText returns the current buffer contents for the file with the given +-// relative path, calling t.Fatal if the file is not open in a buffer. +-func (e *Env) BufferText(name string) string { +- e.TB.Helper() +- text, ok := e.Editor.BufferText(name) +- if !ok { +- e.TB.Fatalf("buffer %q is not open", name) +- } +- return text +-} +- +-// CloseBuffer closes an editor buffer without saving, calling t.Fatal on any +-// error. +-func (e *Env) CloseBuffer(name string) { +- e.TB.Helper() +- if err := e.Editor.CloseBuffer(e.Ctx, name); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// EditBuffer applies edits to an editor buffer, calling t.Fatal on any error. +-func (e *Env) EditBuffer(name string, edits ...protocol.TextEdit) { +- e.TB.Helper() +- if err := e.Editor.EditBuffer(e.Ctx, name, edits); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-func (e *Env) SetBufferContent(name string, content string) { +- e.TB.Helper() +- if err := e.Editor.SetBufferContent(e.Ctx, name, content); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// FileContent returns the file content for name that applies to the current +-// editing session: it returns the buffer content for an open file, the +-// on-disk content for an unopened file, or "" for a non-existent file. +-func (e *Env) FileContent(name string) string { +- e.TB.Helper() +- text, ok := e.Editor.BufferText(name) +- if ok { +- return text +- } +- content, err := e.Sandbox.Workdir.ReadFile(name) +- if err != nil { +- if errors.Is(err, os.ErrNotExist) { +- return "" +- } else { +- e.TB.Fatal(err) +- } +- } +- return string(content) +-} +- +-// FileContentAt returns the file content at the given location, using the +-// file's mapper. +-func (e *Env) FileContentAt(location protocol.Location) string { +- e.TB.Helper() +- mapper, err := e.Editor.Mapper(location.URI.Path()) +- if err != nil { +- e.TB.Fatal(err) +- } +- start, end, err := mapper.RangeOffsets(location.Range) +- if err != nil { +- e.TB.Fatal(err) +- } +- return string(mapper.Content[start:end]) +-} +- +-// RegexpSearch returns the starting position of the first match for re in the +-// buffer specified by name, calling t.Fatal on any error. It first searches +-// for the position in open buffers, then in workspace files. +-func (e *Env) RegexpSearch(name, re string) protocol.Location { +- e.TB.Helper() +- loc, err := e.Editor.RegexpSearch(name, re) +- if err == fake.ErrUnknownBuffer { +- loc, err = e.Sandbox.Workdir.RegexpSearch(name, re) +- } +- if err != nil { +- e.TB.Fatalf("RegexpSearch: %v, %v for %q", name, err, re) +- } +- return loc +-} +- +-// RegexpReplace replaces the first group in the first match of regexpStr with +-// the replace text, calling t.Fatal on any error. +-func (e *Env) RegexpReplace(name, regexpStr, replace string) { +- e.TB.Helper() +- if err := e.Editor.RegexpReplace(e.Ctx, name, regexpStr, replace); err != nil { +- e.TB.Fatalf("RegexpReplace: %v", err) +- } +-} +- +-// SaveBuffer saves an editor buffer, calling t.Fatal on any error. +-func (e *Env) SaveBuffer(name string) { +- e.TB.Helper() +- if err := e.Editor.SaveBuffer(e.Ctx, name); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-func (e *Env) SaveBufferWithoutActions(name string) { +- e.TB.Helper() +- if err := e.Editor.SaveBufferWithoutActions(e.Ctx, name); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// FirstDefinition returns the first definition of the symbol at the +-// selected location, calling t.Fatal on error. +-func (e *Env) FirstDefinition(loc protocol.Location) protocol.Location { +- e.TB.Helper() +- locs, err := e.Editor.Definitions(e.Ctx, loc) +- if err != nil { +- e.TB.Fatal(err) +- } +- if len(locs) == 0 { +- e.TB.Fatalf("no definitions") +- } +- return locs[0] +-} +- +-// FirstTypeDefinition returns the first type definition of the symbol +-// at the selected location, calling t.Fatal on error. +-func (e *Env) FirstTypeDefinition(loc protocol.Location) protocol.Location { +- e.TB.Helper() +- locs, err := e.Editor.TypeDefinitions(e.Ctx, loc) +- if err != nil { +- e.TB.Fatal(err) +- } +- if len(locs) == 0 { +- e.TB.Fatalf("no type definitions") +- } +- return locs[0] +-} +- +-// FormatBuffer formats the editor buffer, calling t.Fatal on any error. +-func (e *Env) FormatBuffer(name string) { +- e.TB.Helper() +- if err := e.Editor.FormatBuffer(e.Ctx, name); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// OrganizeImports processes the source.organizeImports codeAction, calling +-// t.Fatal on any error. +-func (e *Env) OrganizeImports(name string) { +- e.TB.Helper() +- if err := e.Editor.OrganizeImports(e.Ctx, name); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// ApplyQuickFixes processes the quickfix codeAction, calling t.Fatal on any error. +-func (e *Env) ApplyQuickFixes(path string, diagnostics []protocol.Diagnostic) { +- e.TB.Helper() +- loc := e.Sandbox.Workdir.EntireFile(path) +- if err := e.Editor.ApplyQuickFixes(e.Ctx, loc, diagnostics); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// ApplyCodeAction applies the given code action, calling t.Fatal on any error. +-func (e *Env) ApplyCodeAction(action protocol.CodeAction) { +- e.TB.Helper() +- if err := e.Editor.ApplyCodeAction(e.Ctx, action); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// Diagnostics returns diagnostics for the given file, calling t.Fatal on any +-// error. +-func (e *Env) Diagnostics(name string) []protocol.Diagnostic { +- e.TB.Helper() +- diags, err := e.Editor.Diagnostics(e.Ctx, name) +- if err != nil { +- e.TB.Fatal(err) +- } +- return diags +-} +- +-// GetQuickFixes returns the available quick fix code actions, calling t.Fatal +-// on any error. +-func (e *Env) GetQuickFixes(path string, diagnostics []protocol.Diagnostic) []protocol.CodeAction { +- e.TB.Helper() +- loc := e.Sandbox.Workdir.EntireFile(path) +- actions, err := e.Editor.GetQuickFixes(e.Ctx, loc, diagnostics) +- if err != nil { +- e.TB.Fatal(err) +- } +- return actions +-} +- +-// Hover in the editor, calling t.Fatal on any error. +-// It may return (nil, zero) even on success. +-func (e *Env) Hover(loc protocol.Location) (*protocol.MarkupContent, protocol.Location) { +- e.TB.Helper() +- c, loc, err := e.Editor.Hover(e.Ctx, loc) +- if err != nil { +- e.TB.Fatal(err) +- } +- return c, loc +-} +- +-func (e *Env) DocumentLink(name string) []protocol.DocumentLink { +- e.TB.Helper() +- links, err := e.Editor.DocumentLink(e.Ctx, name) +- if err != nil { +- e.TB.Fatal(err) +- } +- return links +-} +- +-func (e *Env) DocumentHighlight(loc protocol.Location) []protocol.DocumentHighlight { +- e.TB.Helper() +- highlights, err := e.Editor.DocumentHighlight(e.Ctx, loc) +- if err != nil { +- e.TB.Fatal(err) +- } +- return highlights +-} +- +-// RunGenerate runs "go generate" in the given dir, calling t.Fatal on any error. +-// It waits for the generate command to complete and checks for file changes +-// before returning. +-func (e *Env) RunGenerate(dir string) { +- e.TB.Helper() +- if err := e.Editor.RunGenerate(e.Ctx, dir); err != nil { +- e.TB.Fatal(err) +- } +- e.Await(NoOutstandingWork(IgnoreTelemetryPromptWork)) +- // Ideally the editor.Workspace would handle all synthetic file watching, but +- // we help it out here as we need to wait for the generate command to +- // complete before checking the filesystem. +- e.CheckForFileChanges() +-} +- +-// RunGoCommand runs the given command in the sandbox's default working +-// directory. +-func (e *Env) RunGoCommand(verb string, args ...string) []byte { +- e.TB.Helper() +- out, err := e.Sandbox.RunGoCommand(e.Ctx, "", verb, args, nil, true) +- if err != nil { +- e.TB.Fatal(err) +- } +- return out +-} +- +-// RunGoCommandInDir is like RunGoCommand, but executes in the given +-// relative directory of the sandbox. +-func (e *Env) RunGoCommandInDir(dir, verb string, args ...string) { +- e.TB.Helper() +- if _, err := e.Sandbox.RunGoCommand(e.Ctx, dir, verb, args, nil, true); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// RunGoCommandInDirWithEnv is like RunGoCommand, but executes in the given +-// relative directory of the sandbox with the given additional environment variables. +-func (e *Env) RunGoCommandInDirWithEnv(dir string, env []string, verb string, args ...string) { +- e.TB.Helper() +- if _, err := e.Sandbox.RunGoCommand(e.Ctx, dir, verb, args, env, true); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// GoVersion checks the version of the go command. +-// It returns the X in Go 1.X. +-func (e *Env) GoVersion() int { +- e.TB.Helper() +- v, err := e.Sandbox.GoVersion(e.Ctx) +- if err != nil { +- e.TB.Fatal(err) +- } +- return v +-} +- +-// DumpGoSum prints the correct go.sum contents for dir in txtar format, +-// for use in creating integration tests. +-func (e *Env) DumpGoSum(dir string) { +- e.TB.Helper() +- +- if _, err := e.Sandbox.RunGoCommand(e.Ctx, dir, "list", []string{"-mod=mod", "./..."}, nil, true); err != nil { +- e.TB.Fatal(err) +- } +- sumFile := path.Join(dir, "go.sum") +- e.TB.Log("\n\n-- " + sumFile + " --\n" + e.ReadWorkspaceFile(sumFile)) +- e.TB.Fatal("see contents above") +-} +- +-// CheckForFileChanges triggers a manual poll of the workspace for any file +-// changes since creation, or since last polling. It is a workaround for the +-// lack of true file watching support in the fake workspace. +-func (e *Env) CheckForFileChanges() { +- e.TB.Helper() +- if err := e.Sandbox.Workdir.CheckForFileChanges(e.Ctx); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// CodeLens calls textDocument/codeLens for the given path, calling t.Fatal on +-// any error. +-func (e *Env) CodeLens(path string) []protocol.CodeLens { +- e.TB.Helper() +- lens, err := e.Editor.CodeLens(e.Ctx, path) +- if err != nil { +- e.TB.Fatal(err) +- } +- return lens +-} +- +-// ExecuteCodeLensCommand executes the command for the code lens matching the +-// given command name. +-// +-// result is a pointer to a variable to be populated by json.Unmarshal. +-func (e *Env) ExecuteCodeLensCommand(path string, cmd command.Command, result any) { +- e.TB.Helper() +- if err := e.Editor.ExecuteCodeLensCommand(e.Ctx, path, cmd, result); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// ExecuteCommand executes the requested command in the editor, calling t.Fatal +-// on any error. +-// +-// result is a pointer to a variable to be populated by json.Unmarshal. +-func (e *Env) ExecuteCommand(params *protocol.ExecuteCommandParams, result any) { +- e.TB.Helper() +- if err := e.Editor.ExecuteCommand(e.Ctx, params, result); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// Views returns the server's views. +-func (e *Env) Views() []command.View { +- var summaries []command.View +- cmd := command.NewViewsCommand("") +- e.ExecuteCommand(&protocol.ExecuteCommandParams{ +- Command: cmd.Command, +- Arguments: cmd.Arguments, +- }, &summaries) +- return summaries +-} +- +-// StartProfile starts a CPU profile with the given name, using the +-// gopls.start_profile custom command. It calls t.Fatal on any error. +-// +-// The resulting stop function must be called to stop profiling (using the +-// gopls.stop_profile custom command). +-func (e *Env) StartProfile() (stop func() string) { +- // TODO(golang/go#61217): revisit the ergonomics of these command APIs. +- // +- // This would be a lot simpler if we generated params constructors. +- args, err := command.MarshalArgs(command.StartProfileArgs{}) +- if err != nil { +- e.TB.Fatal(err) +- } +- params := &protocol.ExecuteCommandParams{ +- Command: command.StartProfile.String(), +- Arguments: args, +- } +- var result command.StartProfileResult +- e.ExecuteCommand(params, &result) +- +- return func() string { +- stopArgs, err := command.MarshalArgs(command.StopProfileArgs{}) +- if err != nil { +- e.TB.Fatal(err) +- } +- stopParams := &protocol.ExecuteCommandParams{ +- Command: command.StopProfile.String(), +- Arguments: stopArgs, +- } +- var result command.StopProfileResult +- e.ExecuteCommand(stopParams, &result) +- return result.File +- } +-} +- +-// InlayHints calls textDocument/inlayHints for the given path, calling t.Fatal on +-// any error. +-func (e *Env) InlayHints(path string) []protocol.InlayHint { +- e.TB.Helper() +- hints, err := e.Editor.InlayHint(e.Ctx, path) +- if err != nil { +- e.TB.Fatal(err) +- } +- return hints +-} +- +-// Symbol calls workspace/symbol +-func (e *Env) Symbol(query string) []protocol.SymbolInformation { +- e.TB.Helper() +- ans, err := e.Editor.Symbols(e.Ctx, query) +- if err != nil { +- e.TB.Fatal(err) +- } +- return ans +-} +- +-// References wraps Editor.References, calling t.Fatal on any error. +-func (e *Env) References(loc protocol.Location) []protocol.Location { +- e.TB.Helper() +- locations, err := e.Editor.References(e.Ctx, loc) +- if err != nil { +- e.TB.Fatal(err) +- } +- return locations +-} +- +-// Rename wraps Editor.Rename, calling t.Fatal on any error. +-func (e *Env) Rename(loc protocol.Location, newName string) { +- e.TB.Helper() +- if err := e.Editor.Rename(e.Ctx, loc, newName); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// Implementations wraps Editor.Implementations, calling t.Fatal on any error. +-func (e *Env) Implementations(loc protocol.Location) []protocol.Location { +- e.TB.Helper() +- locations, err := e.Editor.Implementations(e.Ctx, loc) +- if err != nil { +- e.TB.Fatal(err) +- } +- return locations +-} +- +-// RenameFile wraps Editor.RenameFile, calling t.Fatal on any error. +-func (e *Env) RenameFile(oldPath, newPath string) { +- e.TB.Helper() +- if err := e.Editor.RenameFile(e.Ctx, oldPath, newPath); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// SignatureHelp wraps Editor.SignatureHelp, calling t.Fatal on error +-func (e *Env) SignatureHelp(loc protocol.Location) *protocol.SignatureHelp { +- e.TB.Helper() +- sighelp, err := e.Editor.SignatureHelp(e.Ctx, loc) +- if err != nil { +- e.TB.Fatal(err) +- } +- return sighelp +-} +- +-// Completion executes a completion request on the server. +-func (e *Env) Completion(loc protocol.Location) *protocol.CompletionList { +- e.TB.Helper() +- completions, err := e.Editor.Completion(e.Ctx, loc) +- if err != nil { +- e.TB.Fatal(err) +- } +- return completions +-} +- +-func (e *Env) DidCreateFiles(files ...protocol.DocumentURI) { +- e.TB.Helper() +- err := e.Editor.DidCreateFiles(e.Ctx, files...) +- if err != nil { +- e.TB.Fatal(err) +- } +-} +- +-func (e *Env) SetSuggestionInsertReplaceMode(useReplaceMode bool) { +- e.TB.Helper() +- e.Editor.SetSuggestionInsertReplaceMode(e.Ctx, useReplaceMode) +-} +- +-// AcceptCompletion accepts a completion for the given item at the given +-// position. +-func (e *Env) AcceptCompletion(loc protocol.Location, item protocol.CompletionItem) { +- e.TB.Helper() +- if err := e.Editor.AcceptCompletion(e.Ctx, loc, item); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// CodeActionForFile calls textDocument/codeAction for the entire +-// file, and calls t.Fatal if there were errors. +-func (e *Env) CodeActionForFile(path string, diagnostics []protocol.Diagnostic) []protocol.CodeAction { +- return e.CodeAction(e.Sandbox.Workdir.EntireFile(path), diagnostics, protocol.CodeActionUnknownTrigger) +-} +- +-// CodeAction calls textDocument/codeAction for a selection, +-// and calls t.Fatal if there were errors. +-func (e *Env) CodeAction(loc protocol.Location, diagnostics []protocol.Diagnostic, trigger protocol.CodeActionTriggerKind) []protocol.CodeAction { +- e.TB.Helper() +- actions, err := e.Editor.CodeAction(e.Ctx, loc, diagnostics, trigger) +- if err != nil { +- e.TB.Fatal(err) +- } +- return actions +-} +- +-// ChangeConfiguration updates the editor config, calling t.Fatal on any error. +-func (e *Env) ChangeConfiguration(newConfig fake.EditorConfig) { +- e.TB.Helper() +- if err := e.Editor.ChangeConfiguration(e.Ctx, newConfig); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// ChangeWorkspaceFolders updates the editor workspace folders, calling t.Fatal +-// on any error. +-func (e *Env) ChangeWorkspaceFolders(newFolders ...string) { +- e.TB.Helper() +- if err := e.Editor.ChangeWorkspaceFolders(e.Ctx, newFolders); err != nil { +- e.TB.Fatal(err) +- } +-} +- +-// SemanticTokensFull invokes textDocument/semanticTokens/full, calling t.Fatal +-// on any error. +-func (e *Env) SemanticTokensFull(path string) []fake.SemanticToken { +- e.TB.Helper() +- toks, err := e.Editor.SemanticTokensFull(e.Ctx, path) +- if err != nil { +- e.TB.Fatal(err) +- } +- return toks +-} +- +-// SemanticTokensRange invokes textDocument/semanticTokens/range, calling t.Fatal +-// on any error. +-func (e *Env) SemanticTokensRange(loc protocol.Location) []fake.SemanticToken { +- e.TB.Helper() +- toks, err := e.Editor.SemanticTokensRange(e.Ctx, loc) +- if err != nil { +- e.TB.Fatal(err) +- } +- return toks +-} +- +-// Close shuts down resources associated with the environment, calling t.Error +-// on any error. +-func (e *Env) Close() { +- ctx := xcontext.Detach(e.Ctx) +- if e.MCPSession != nil { +- if err := e.MCPSession.Close(); err != nil { +- e.TB.Errorf("closing MCP session: %v", err) +- } +- } +- if e.MCPServer != nil { +- e.MCPServer.Close() +- } +- if err := e.Editor.Close(ctx); err != nil { +- e.TB.Errorf("closing editor: %v", err) +- } +- if err := e.Sandbox.Close(); err != nil { +- e.TB.Errorf("cleaning up sandbox: %v", err) +- } +-} +diff -urN a/gopls/internal/test/marker/doc.go b/gopls/internal/test/marker/doc.go +--- a/gopls/internal/test/marker/doc.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/doc.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,423 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-/* +-Package marker defines a framework for running "marker" tests, each +-defined by a file in the testdata subdirectory. +- +-Use this command to run the tests, from the gopls module: +- +- $ go test ./internal/test/marker [-update] +- +-A marker test uses the '//@' syntax of the x/tools/internal/expect package to +-annotate source code with various information such as locations and arguments +-of LSP operations to be executed by the test. The syntax following '@' is +-parsed as a comma-separated list of Go-like function calls, which we refer to +-as 'markers' (or sometimes 'marks'), for example +- +- //@ foo(a, "b", 3), bar(0) +- +-Unlike ordinary Go, the marker syntax also supports optional named arguments +-using the syntax name=value. If provided, named arguments must appear after all +-positional arguments, though their ordering with respect to other named +-arguments does not matter. For example +- +- //@ foo(a, "b", d=4, c=3) +- +-Each marker causes a corresponding function to be called in the test. Some +-markers are declarations; for example, @loc declares a name for a source +-location. Others have effects, such as executing an LSP operation and asserting +-that it behaved as expected. See the Marker types documentation below for the +-list of all supported markers. +- +-Each call argument is converted to the type of the corresponding parameter of +-the designated function. The conversion logic may use the surrounding context, +-such as the position or nearby text. See the Argument conversion section below +-for the full set of special conversions. As a special case, the blank +-identifier '_' is treated as the zero value of the parameter type. +- +-The test runner collects test cases by searching the given directory for +-files with the .txt extension. Each file is interpreted as a txtar archive, +-which is extracted to a temporary directory. The relative path to the .txt +-file is used as the subtest name. The preliminary section of the file +-(before the first archive entry) is a free-form comment. +- +-# Special files +- +-There are several types of file within the test archive that are given special +-treatment by the test runner: +- +- - "skip": the presence of this file causes the test to be skipped, with +- its content used as the skip message. +- +- - "flags": this file is treated as a whitespace-separated list of flags +- that configure the MarkerTest instance. Supported flags: +- +- -{min,max}_go=go1.20 sets the {min,max}imum Go runtime version for the test +- (inclusive). +- -{min,max}_go_command=go1.20 sets the {min,max}imum Go command version for +- the test (inclusive). +- -cgo requires that CGO_ENABLED is set and the cgo tool is available. +- -write_sumfile=a,b,c instructs the test runner to generate go.sum files +- in these directories before running the test. +- -skip_goos=a,b,c instructs the test runner to skip the test for the +- listed GOOS values. +- -skip_goarch=a,b,c does the same for GOARCH. +- TODO(rfindley): using build constraint expressions for -skip_go{os,arch} would +- be clearer. +- -ignore_extra_diags suppresses errors for unmatched diagnostics +- -filter_builtins=false disables the filtering of builtins from +- completion results. +- -filter_keywords=false disables the filtering of keywords from +- completion results. +- -errors_ok=true suppresses errors for Error level log entries. +- +- TODO(rfindley): support flag values containing whitespace. +- +- - "settings.json": this file is parsed as JSON, and used as the +- session configuration (see gopls/doc/settings.md) +- +- - "capabilities.json": this file is parsed as JSON client capabilities, +- and applied as an overlay over the default editor client capabilities. +- see https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#clientCapabilities +- for more details. +- +- - "env": this file is parsed as a list of VAR=VALUE fields specifying the +- editor environment. +- +- - Golden files: Within the archive, file names starting with '@' are +- treated as "golden" content, and are not written to disk, but instead are +- made available to test methods expecting an argument of type *Golden, +- using the identifier following '@'. For example, if the first parameter of +- Foo were of type *Golden, the test runner would convert the identifier a +- in the call @foo(a, "b", 3) into a *Golden by collecting golden file +- data starting with "@a/". As a special case, for tests that only need one +- golden file, the data contained in the file "@a" is indexed in the *Golden +- value by the empty string "". +- +- - proxy files: any file starting with proxy/ is treated as a Go proxy +- file. If present, these files are written to a separate temporary +- directory and GOPROXY is set to file://<proxy directory>. +- +-# Marker types +- +-Markers are of two kinds: "value markers" and "action markers". Value markers +-are processed in a first pass, and define named values that may be referred to +-as arguments to action markers. For example, the @loc marker defines a named +-location that may be used wherever a location is expected. Value markers cannot +-refer to names defined by other value markers. Action markers are processed in +-a second pass and perform some action such as testing an LSP operation. +- +-Below, we list supported markers using function signatures, augmented with the +-named argument support name=value, as described above. The types referred to in +-the signatures below are described in the Argument conversion section. +- +-Here is the list of supported value markers: +- +- - loc(name, location): specifies the name for a location in the source. These +- locations may be referenced by other markers. Naturally, the location +- argument may be specified only as a string or regular expression in the +- first pass. +- The predeclared locations BUILTIN and UNSAFE match any location in the +- GOROOT/src/{builtin/builtin,unsafe/unsafe}.go file, regardless of GOROOT. +- +- - defloc(name, location): performs a textDocument/definition request at the +- src location, and binds the result to the given name. This may be used to +- refer to positions in the standard library. +- +- - hiloc(name, location, kind): defines a documentHighlight value of the +- given location and kind. Use its label in a @highlightall marker to +- indicate the expected result of a highlight query. +- +- - item(name, details, kind): defines a completionItem with the provided +- fields. This information is not positional, and therefore @item markers +- may occur anywhere in the source. Use in conjunction with @complete, +- @snippet, or @rank. +- +- TODO(rfindley): rethink whether floating @item annotations are the best +- way to specify completion results. +- +-Here is the list of supported action markers: +- +- - acceptcompletion(location, label, golden): specifies that accepting the +- completion candidate produced at the given location with provided label +- results in the given golden state. +- +- - codeaction(start location, kind string, end=location, edit=golden, result=golden, err=stringMatcher) +- +- Specifies a code action to request at the location, with given kind. +- +- If end is set, the location is defined to be between start.Start and end.End. +- +- Exactly one of edit, result, or err must be set. If edit is set, it is a +- golden reference to the edits resulting from the code action. If result is +- set, it is a golden reference to the full set of changed files resulting +- from the code action. If err is set, it is the code action error. +- +- - codelens(location, title): specifies that a codelens is expected at the +- given location, with given title. Must be used in conjunction with +- @codelenses. +- +- - codelenses(): specifies that textDocument/codeLens should be run for the +- current document, with results compared to the @codelens annotations in +- the current document. +- +- - complete(location, ...items): specifies expected completion results at +- the given location. Must be used in conjunction with @item. +- +- - diag(location, regexp, exact=bool): specifies an expected diagnostic +- matching the given regexp at the given location. The test runner requires a +- 1:1 correspondence between observed diagnostics and diag annotations. The +- diagnostics source and kind fields are ignored, to reduce fuss. +- +- The specified location must match the start position of the diagnostic, +- but end positions are ignored unless exact=true. +- +- TODO(adonovan): in the older marker framework, the annotation asserted two +- additional fields (source="compiler", kind="error"). Restore them using +- optional named arguments. +- +- - def(src, want ...location): performs a textDocument/definition request at +- the src location, and checks that the results equals want. +- +- - documentLink(golden): asserts that textDocument/documentLink returns +- links as described by the golden file. +- +- - foldingrange(golden): performs a textDocument/foldingRange for the +- current document, and compare with the golden content, which is the +- original source annotated with numbered tags delimiting the resulting +- ranges (e.g. <1 kind="..."> ... </1>). +- +- - format(golden): performs a textDocument/format request for the enclosing +- file, and compare against the named golden file. If the formatting +- request succeeds, the golden file must contain the resulting formatted +- source. If the formatting request fails, the golden file must contain +- the error message. +- +- - highlightall(all ...documentHighlight): makes a textDocument/highlight +- request at each location in "all" and checks that the result is "all". +- In other words, given highlightall(X1, X2, ..., Xn), it checks that +- highlight(X1) = highlight(X2) = ... = highlight(Xn) = {X1, X2, ..., Xn}. +- In general, highlight sets are not equivalence classes; for asymmetric +- cases, use @highlight instead. +- Each element of "all" is the label of a @hiloc marker. +- +- - highlight(src location, dsts ...documentHighlight): makes a +- textDocument/highlight request at the given src location, which should +- highlight the provided dst locations and kinds. +- +- - hover(src, dst location, sm stringMatcher): performs a textDocument/hover +- at the src location, and checks that the result is the dst location, with +- matching hover content. +- +- - hovererr(src, sm stringMatcher): performs a textDocument/hover at the src +- location, and checks that the error matches the given stringMatcher. +- +- - implementation(src location, want ...location, err=stringMatcher): +- makes a textDocument/implementation query at the src location and +- checks that the resulting set of locations matches want. If err is +- set, the implementation query must fail with the expected error. +- +- - incomingcalls(src location, want ...location): makes a +- callHierarchy/incomingCalls query at the src location, and checks that +- the set of call.From locations matches want. +- (These locations are the declarations of the functions enclosing +- the calls, not the calls themselves.) +- +- - outgoingcalls(src location, want ...location): makes a +- callHierarchy/outgoingCalls query at the src location, and checks that +- the set of call.To locations matches want. +- +- - preparerename(src location, placeholder string, span=location): asserts +- that a textDocument/prepareRename request at the src location has the given +- placeholder text. If present, the optional span argument is verified to be +- the span of the prepareRename result. If placeholder is "", this is treated +- as a negative assertion and prepareRename should return nil. +- +- - quickfix(location, regexp, golden): like diag, the location and +- regexp identify an expected diagnostic, which must have exactly one +- associated "quickfix" code action. +- This action is executed for its editing effects on the source files. +- Like rename, the golden directory contains the expected transformed files. +- +- - quickfixerr(location, regexp, wantError): specifies that the +- quickfix operation should fail with an error that matches the expectation. +- (Failures in the computation to offer a fix do not generally result +- in LSP errors, so this marker is not appropriate for testing them.) +- +- - rank(location, ...string OR completionItem): executes a +- textDocument/completion request at the given location, and verifies that +- each expected completion item occurs in the results, in the expected order. +- Items may be specified as string literal completion labels, or as +- references to a completion item created with the @item marker. +- Other unexpected completion items are allowed to occur in the results, and +- are ignored. A "!" prefix on a label asserts that the symbol is not a +- completion candidate. +- +- - refs(location, want ...location): executes a textDocument/references +- request at the first location and asserts that the result is the set of +- 'want' locations. The first want location must be the declaration +- (assumedly unique). +- +- - rename(location, new, golden): specifies a renaming of the +- identifier at the specified location to the new name. +- The golden directory contains the transformed files. +- +- - renameerr(location, new, wantError): specifies a renaming that +- fails with an error that matches the expectation. +- +- - signature(location, label, active): specifies that +- signatureHelp at the given location should match the provided string, with +- the active parameter (an index) highlighted. +- +- - snippet(location, string OR completionItem, snippet): executes a +- textDocument/completion request at the location, and searches for a result +- with label matching that its second argument, which may be a string literal +- or a reference to a completion item created by the @item marker (in which +- case the item's label is used). It checks that the resulting snippet +- matches the provided snippet. +- +- - subtypes (src location, want ...location), +- supertypes(src location, want ...location): +- execute a textDocument/prepareTypeHierarchy request at the src +- location, followed by a typeHierarchy/{sub,super}types request on +- the first response, and check that the result contains the list +- of wanted locations in order. +- +- - symbol(golden): makes a textDocument/documentSymbol request +- for the enclosing file, formats the response with one symbol +- per line, sorts it, and compares against the named golden file. +- Each line is of the form: +- +- dotted.symbol.name kind "detail" +n lines +- +- where the "+n lines" part indicates that the declaration spans +- several lines. The test otherwise makes no attempt to check +- location information. There is no point to using more than one +- @symbol marker in a given file. +- +- - token(location, tokenType, mod): makes a textDocument/semanticTokens/range +- request at the given location, and asserts that the result includes +- exactly one token with the given token type and modifier string. +- +- - workspacesymbol(query, golden): makes a workspace/symbol request for the +- given query, formats the response with one symbol per line, and compares +- against the named golden file. As workspace symbols are by definition a +- workspace-wide request, the location of the workspace symbol marker does +- not matter. Each line is of the form: +- +- location name kind +- +- - mcptool(name string, arg string, location=location, output=golden): +- Executes an MCP tool call using the provided tool name and args (a +- JSON-encoded value). Any string or []string values in the JSON input object +- are modified to replace the substring '$WORKDIR' with the actual working +- directory of the test. Furthermore, if 'location' is provided, it is used +- to populate the 'location' property of the JSON input with the given LSP +- source location. The test then asserts that the MCP server's response +- matches the content of the golden file identified by output. For +- portability, all filepath separators in the output are normalized to '/', +- even if they occur outside of a path context. +- +-# Argument conversion +- +-Marker arguments are first parsed by the internal/expect package, which accepts +-the following tokens as defined by the Go spec: +- - string, int64, float64, and rune literals +- - true and false +- - nil +- - identifiers (type expect.Identifier) +- - regular expressions, denoted the two tokens re"abc" (type *regexp.Regexp) +- +-These values are passed as arguments to the corresponding parameter of the +-test function. Additional value conversions may occur for these argument -> +-parameter type pairs: +- +- - string->regexp: the argument is parsed as a regular expressions. +- +- - string->location: the argument is converted to the location of the first +- instance of the argument in the file content starting from the beginning of +- the line containing the note. Multi-line matches are permitted, but the +- match must begin before the note. +- +- - regexp->location: the argument is converted to the location of the first +- match for the argument in the file content starting from the beginning of +- the line containing the note. Multi-line matches are permitted, but the +- match must begin before the note. If the regular expression contains +- exactly one subgroup, the position of the subgroup is used rather than the +- position of the submatch. +- +- - name->location: the argument is replaced by the named location. +- +- - name->Golden: the argument is used to look up golden content prefixed by +- @<argument>. +- +- - {string,regexp,identifier}->stringMatcher: a stringMatcher type +- specifies an expected string, either in the form of a substring +- that must be present, a regular expression that it must match, or an +- identifier (e.g. foo) such that the archive entry @foo exists and +- contains the exact expected string. +- stringMatchers are used by some markers to match positive results +- (outputs) and by other markers to match error messages. +- +-# Example +- +-Here is a complete example: +- +- This test checks hovering over constants. +- +- -- a.go -- +- package a +- +- const abc = 0x2a //@hover("b", "abc", abc),hover(" =", "abc", abc) +- +- -- @abc -- +- ```go +- const abc untyped int = 42 +- ``` +- +- @hover("b", "abc", abc),hover(" =", "abc", abc) +- +-In this example, the @hover annotation tells the test runner to run the +-hoverMarker function, which has parameters: +- +- (mark marker, src, dst protocol.Location, g *Golden). +- +-The first argument holds the test context, including fake editor with open +-files, and sandboxed directory. +- +-Argument converters translate the "b" and "abc" arguments into locations by +-interpreting each one as a substring (or as a regular expression, if of the +-form re"a|b") and finding the location of its first occurrence starting on the +-preceding portion of the line, and the abc identifier into a the golden content +-contained in the file @abc. Then the hoverMarker method executes a +-textDocument/hover LSP request at the src position, and ensures the result +-spans "abc", with the markdown content from @abc. (Note that the markdown +-content includes the expect annotation as the doc comment.) +- +-The next hover on the same line asserts the same result, but initiates the +-hover immediately after "abc" in the source. This tests that we find the +-preceding identifier when hovering. +- +-# Updating golden files +- +-To update golden content in the test archive, it is easier to regenerate +-content automatically rather than edit it by hand. To do this, run the +-tests with the -update flag. Only tests that actually run will be updated. +- +-In some cases, golden content will vary by Go version (for example, gopls +-produces different markdown at Go versions before the 1.19 go/doc update). +-By convention, the golden content in test archives should match the output +-at Go tip. Each test function can normalize golden content for older Go +-versions. +- +-Note that -update does not cause missing @diag or @loc markers to be added. +- +-# TODO +- +- - Rename the files .txtar. +- - Eliminate all *err markers, preferring named arguments. +- - In failed assertions, display locations using symbolic @loc names where available. +-*/ +-package marker +diff -urN a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go +--- a/gopls/internal/test/marker/marker_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/marker_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,2864 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package marker +- +-// This file defines the marker test framework. +-// See doc.go for extensive documentation. +- +-import ( +- "bytes" +- "context" +- "encoding/json" +- "errors" +- "flag" +- "fmt" +- "go/token" +- "go/types" +- "io/fs" +- "log" +- "net/http/httptest" +- "os" +- "path" +- "path/filepath" +- "reflect" +- "regexp" +- "runtime" +- "slices" +- "sort" +- "strings" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "github.com/google/go-cmp/cmp/cmpopts" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/debug" +- "golang.org/x/tools/gopls/internal/lsprpc" +- internalmcp "golang.org/x/tools/gopls/internal/mcp" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/test/compare" +- "golang.org/x/tools/gopls/internal/test/integration" +- "golang.org/x/tools/gopls/internal/test/integration/fake" +- "golang.org/x/tools/gopls/internal/util/bug" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/diff" +- "golang.org/x/tools/internal/diff/myers" +- "golang.org/x/tools/internal/expect" +- "golang.org/x/tools/internal/jsonrpc2" +- "golang.org/x/tools/internal/jsonrpc2/servertest" +- "golang.org/x/tools/internal/mcp" +- "golang.org/x/tools/internal/testenv" +- "golang.org/x/tools/txtar" +-) +- +-var update = flag.Bool("update", false, "if set, update test data during marker tests") +- +-func TestMain(m *testing.M) { +- bug.PanicOnBugs = true +- testenv.ExitIfSmallMachine() +- // Disable GOPACKAGESDRIVER, as it can cause spurious test failures. +- os.Setenv("GOPACKAGESDRIVER", "off") // ignore error +- integration.FilterToolchainPathAndGOROOT() +- os.Exit(m.Run()) +-} +- +-// Test runs the marker tests from the testdata directory. +-// +-// See package documentation for details on how marker tests work. +-// +-// These tests were inspired by (and in many places copied from) a previous +-// iteration of the marker tests built on top of the packagestest framework. +-// Key design decisions motivating this reimplementation are as follows: +-// - The old tests had a single global session, causing interaction at a +-// distance and several awkward workarounds. +-// - The old tests could not be safely parallelized, because certain tests +-// manipulated the server options +-// - Relatedly, the old tests did not have a logic grouping of assertions into +-// a single unit, resulting in clusters of files serving clusters of +-// entangled assertions. +-// - The old tests used locations in the source as test names and as the +-// identity of golden content, meaning that a single edit could change the +-// name of an arbitrary number of subtests, and making it difficult to +-// manually edit golden content. +-// - The old tests did not hew closely to LSP concepts, resulting in, for +-// example, each marker implementation doing its own position +-// transformations, and inventing its own mechanism for configuration. +-// - The old tests had an ad-hoc session initialization process. The integration +-// test environment has had more time devoted to its initialization, and has a +-// more convenient API. +-// - The old tests lacked documentation, and often had failures that were hard +-// to understand. By starting from scratch, we can revisit these aspects. +-func Test(t *testing.T) { +- if testing.Short() { +- builder := os.Getenv("GO_BUILDER_NAME") +- // Note that HasPrefix(builder, "darwin-" only matches legacy builders. +- // LUCI builder names start with x_tools-goN.NN. +- // We want to exclude solaris on both legacy and LUCI builders, as +- // it is timing out. +- if strings.HasPrefix(builder, "darwin-") || strings.Contains(builder, "solaris") { +- t.Skip("golang/go#64473: skipping with -short: this test is too slow on darwin and solaris builders") +- } +- if strings.HasSuffix(builder, "freebsd-amd64-race") { +- t.Skip("golang/go#71731: the marker tests are too slow to run on the amd64-race builder") +- } +- } +- // The marker tests must be able to run go/packages.Load. +- testenv.NeedsGoPackages(t) +- +- const dir = "testdata" +- tests, err := loadMarkerTests(dir) +- if err != nil { +- t.Fatal(err) +- } +- +- // Opt: use a shared cache. +- cache := cache.New(nil) +- +- for _, test := range tests { +- t.Run(test.name, func(t *testing.T) { +- t.Parallel() +- +- if test.skipReason != "" { +- t.Skip(test.skipReason) +- } +- if slices.Contains(test.skipGOOS, runtime.GOOS) { +- t.Skipf("skipping on %s due to -skip_goos", runtime.GOOS) +- } +- if slices.Contains(test.skipGOARCH, runtime.GOARCH) { +- t.Skipf("skipping on %s due to -skip_goarch", runtime.GOARCH) +- } +- +- // TODO(rfindley): it may be more useful to have full support for build +- // constraints. +- if test.minGoVersion != "" { +- var go1point int +- if _, err := fmt.Sscanf(test.minGoVersion, "go1.%d", &go1point); err != nil { +- t.Fatalf("parsing -min_go version: %v", err) +- } +- testenv.NeedsGo1Point(t, go1point) +- } +- if test.maxGoVersion != "" { +- // A max Go version may be useful when (e.g.) a recent go/types +- // fix makes it impossible to reproduce a certain older crash. +- var go1point int +- if _, err := fmt.Sscanf(test.maxGoVersion, "go1.%d", &go1point); err != nil { +- t.Fatalf("parsing -max_go version: %v", err) +- } +- testenv.SkipAfterGo1Point(t, go1point) +- } +- if test.minGoCommandVersion != "" { +- var go1point int +- if _, err := fmt.Sscanf(test.minGoCommandVersion, "go1.%d", &go1point); err != nil { +- t.Fatalf("parsing -min_go_command version: %v", err) +- } +- testenv.NeedsGoCommand1Point(t, go1point) +- } +- if test.maxGoCommandVersion != "" { +- var go1point int +- if _, err := fmt.Sscanf(test.maxGoCommandVersion, "go1.%d", &go1point); err != nil { +- t.Fatalf("parsing -max_go_command version: %v", err) +- } +- testenv.SkipAfterGoCommand1Point(t, go1point) +- } +- if test.cgo { +- if os.Getenv("CGO_ENABLED") == "0" { +- // NeedsTool causes the test to fail if cgo is available but disabled +- // on the current platform through the environment. I'm not sure why it +- // behaves this way, but if CGO_ENABLED=0 is set, we want to skip. +- t.Skip("skipping due to CGO_ENABLED=0") +- } +- testenv.NeedsTool(t, "cgo") +- } +- +- config := fake.EditorConfig{ +- Settings: test.settings, +- CapabilitiesJSON: test.capabilities, +- Env: test.env, +- } +- +- if _, ok := config.Settings["diagnosticsDelay"]; !ok { +- if config.Settings == nil { +- config.Settings = make(map[string]any) +- } +- config.Settings["diagnosticsDelay"] = "10ms" +- } +- +- // inv: config.Settings != nil +- +- run := &markerTestRun{ +- test: test, +- env: newEnv(t, cache, test.files, test.proxyFiles, test.writeGoSum, config, test.mcp), +- settings: config.Settings, +- values: make(map[expect.Identifier]any), +- diags: make(map[protocol.Location][]protocol.Diagnostic), +- extraNotes: make(map[protocol.DocumentURI]map[string][]*expect.Note), +- } +- defer run.env.Close() +- +- // Support built-in pseudo-locations here. +- // fmtLoc coerces "got" Locations to these forms +- // so they can be compared without depending +- // on line numbers in external files. +- // +- // (We could actually define "builtin:int" etc +- // with the correct file/line/col for each +- // builtin symbol, but that doesn't seem to be +- // where the bugs are.) +- run.values["BUILTIN"] = protocol.Location{ +- URI: "file:///pseudo/builtin/builtin.go", +- } +- run.values["UNSAFE"] = protocol.Location{ +- URI: "file:///pseudo/unsafe/unsafe.go", +- } +- +- // Open all files so that we operate consistently with LSP clients, and +- // (pragmatically) so that we have a Mapper available via the fake +- // editor. +- // +- // This also allows avoiding mutating the editor state in tests. +- for file := range test.files { +- run.env.OpenFile(file) +- } +- +- allDiags := make(map[string][]protocol.Diagnostic) +- if run.env.Editor.ServerCapabilities().DiagnosticProvider != nil { +- for name := range test.files { +- // golang/go#53275: support pull diagnostics for go.mod and go.work +- // files. +- if strings.HasSuffix(name, ".go") { +- allDiags[name] = run.env.Diagnostics(name) +- } +- } +- } else { +- // Wait for the didOpen notifications to be processed, then collect +- // diagnostics. +- +- run.env.AfterChange() +- var diags map[string]*protocol.PublishDiagnosticsParams +- run.env.AfterChange(integration.ReadAllDiagnostics(&diags)) +- for path, params := range diags { +- allDiags[path] = params.Diagnostics +- } +- } +- +- for path, diags := range allDiags { +- uri := run.env.Sandbox.Workdir.URI(path) +- for _, diag := range diags { +- loc := protocol.Location{ +- URI: uri, +- Range: protocol.Range{ +- Start: diag.Range.Start, +- End: diag.Range.Start, // ignore end positions +- }, +- } +- run.diags[loc] = append(run.diags[loc], diag) +- } +- } +- +- var markers []marker +- for _, note := range test.notes { +- mark := marker{run: run, note: note} +- if fn, ok := valueMarkerFuncs[note.Name]; ok { +- fn(mark) +- } else if _, ok := actionMarkerFuncs[note.Name]; ok { +- markers = append(markers, mark) // save for later +- } else { +- uri := mark.uri() +- if run.extraNotes[uri] == nil { +- run.extraNotes[uri] = make(map[string][]*expect.Note) +- } +- run.extraNotes[uri][note.Name] = append(run.extraNotes[uri][note.Name], note) +- } +- } +- +- // Invoke each remaining marker in the test. +- for _, mark := range markers { +- actionMarkerFuncs[mark.note.Name](mark) +- } +- +- // Any remaining (un-eliminated) diagnostics are an error. +- if !test.ignoreExtraDiags { +- for loc, diags := range run.diags { +- for _, diag := range diags { +- // Note that loc is collapsed (start==end). +- // For formatting, show the exact span. +- exactLoc := protocol.Location{ +- URI: loc.URI, +- Range: diag.Range, +- } +- t.Errorf("%s: unexpected diagnostic: %q", run.fmtLoc(exactLoc), diag.Message) +- } +- } +- } +- +- // TODO(rfindley): use these for whole-file marker tests. +- for uri, extras := range run.extraNotes { +- for name, extra := range extras { +- if len(extra) > 0 { +- t.Errorf("%s: %d unused %q markers", run.env.Sandbox.Workdir.URIToPath(uri), len(extra), name) +- } +- } +- } +- +- // Now that all markers have executed, check whether there where any +- // unexpected error logs. +- // This guards against noisiness: see golang/go#66746) +- if !test.errorsOK { +- run.env.AfterChange(integration.NoErrorLogs()) +- } +- +- formatted, err := formatTest(test) +- if err != nil { +- t.Errorf("formatTest: %v", err) +- } else if *update { +- filename := filepath.Join(dir, test.name) +- if err := os.WriteFile(filename, formatted, 0o644); err != nil { +- t.Error(err) +- } +- } else if !t.Failed() { +- // Verify that the testdata has not changed. +- // +- // Only check this if the test hasn't already failed, otherwise we'd +- // report duplicate mismatches of golden data. +- // Otherwise, verify that formatted content matches. +- if diff := compare.NamedText("formatted", "on-disk", string(formatted), string(test.content)); diff != "" { +- t.Errorf("formatted test does not match on-disk content:\n%s", diff) +- } +- } +- }) +- } +- +- if abs, err := filepath.Abs(dir); err == nil && t.Failed() { +- t.Logf("(Filenames are relative to %s.)", abs) +- } +-} +- +-// A marker holds state for the execution of a single @marker +-// annotation in the source. +-type marker struct { +- run *markerTestRun +- note *expect.Note +-} +- +-// ctx returns the mark context. +-func (m marker) ctx() context.Context { return m.run.env.Ctx } +- +-// T returns the testing.TB for this mark. +-func (m marker) T() testing.TB { return m.run.env.TB } +- +-// server returns the LSP server for the marker test run. +-func (m marker) editor() *fake.Editor { return m.run.env.Editor } +- +-// server returns the LSP server for the marker test run. +-func (m marker) server() protocol.Server { return m.run.env.Editor.Server } +- +-// uri returns the URI of the file containing the marker. +-func (mark marker) uri() protocol.DocumentURI { +- return mark.run.env.Sandbox.Workdir.URI(mark.run.test.fset.File(mark.note.Pos).Name()) +-} +- +-// document returns a protocol.TextDocumentIdentifier for the current file. +-func (mark marker) document() protocol.TextDocumentIdentifier { +- return protocol.TextDocumentIdentifier{URI: mark.uri()} +-} +- +-// path returns the relative path to the file containing the marker. +-func (mark marker) path() string { +- return mark.run.env.Sandbox.Workdir.RelPath(mark.run.test.fset.File(mark.note.Pos).Name()) +-} +- +-// mapper returns a *protocol.Mapper for the current file. +-func (mark marker) mapper() *protocol.Mapper { +- mapper, err := mark.editor().Mapper(mark.path()) +- if err != nil { +- mark.T().Fatalf("failed to get mapper for current mark: %v", err) +- } +- return mapper +-} +- +-// error reports an error with a prefix indicating the position of the marker +-// note. +-func (mark marker) error(args ...any) { +- mark.T().Helper() +- msg := fmt.Sprint(args...) +- mark.T().Errorf("%s: %s", mark.run.fmtPos(mark.note.Pos), msg) +-} +- +-// errorf reports a formatted error with a prefix indicating the position of +-// the marker note. +-// +-// It formats the error message using mark.sprintf. +-func (mark marker) errorf(format string, args ...any) { +- mark.T().Helper() +- msg := mark.sprintf(format, args...) +- // TODO(adonovan): consider using fmt.Fprintf(os.Stderr)+t.Fail instead of +- // t.Errorf to avoid reporting uninteresting positions in the Go source of +- // the driver. However, this loses the order of stderr wrt "FAIL: TestFoo" +- // subtest dividers. +- mark.T().Errorf("%s: %s", mark.run.fmtPos(mark.note.Pos), msg) +-} +- +-// valueMarkerFunc returns a wrapper around a function that allows it to be +-// called during the processing of value markers (e.g. @value(v, 123)) with marker +-// arguments converted to function parameters. The provided function's first +-// parameter must be of type 'marker', and it must return a value. +-// +-// Unlike action markers, which are executed for actions such as test +-// assertions, value markers are all evaluated first, and each computes +-// a value that is recorded by its identifier, which is the marker's first +-// argument. These values may be referred to from an action marker by +-// this identifier, e.g. @action(... , v, ...). +-// +-// For example, given a fn with signature +-// +-// func(mark marker, label, details, kind string) CompletionItem +-// +-// The result of valueMarkerFunc can associated with @item notes, and invoked +-// as follows: +-// +-// //@item(FooCompletion, "Foo", "func() int", "func") +-// +-// The provided fn should not mutate the test environment. +-func valueMarkerFunc(fn any) func(marker) { +- ftype := reflect.TypeOf(fn) +- if ftype.NumIn() == 0 || ftype.In(0) != markerType { +- panic(fmt.Sprintf("value marker function %#v must accept marker as its first argument", ftype)) +- } +- if ftype.NumOut() != 1 { +- panic(fmt.Sprintf("value marker function %#v must have exactly 1 result", ftype)) +- } +- +- return func(mark marker) { +- if len(mark.note.Args) == 0 || !is[expect.Identifier](mark.note.Args[0]) { +- mark.errorf("first argument to a value marker function must be an identifier") +- return +- } +- id := mark.note.Args[0].(expect.Identifier) +- if alt, ok := mark.run.values[id]; ok { +- mark.errorf("%s already declared as %T", id, alt) +- return +- } +- args := append([]any{mark}, mark.note.Args[1:]...) +- argValues, err := convertArgs(mark, ftype, args) +- if err != nil { +- mark.error(err) +- return +- } +- results := reflect.ValueOf(fn).Call(argValues) +- mark.run.values[id] = results[0].Interface() +- } +-} +- +-// actionMarkerFunc returns a wrapper around a function that allows it to be +-// called during the processing of action markers (e.g. @action("abc", 123)) +-// with marker arguments converted to function parameters. The provided +-// function's first parameter must be of type 'marker', and it must not return +-// any values. Any named arguments that may be used by the marker func must be +-// listed in allowedNames. +-// +-// The provided fn should not mutate the test environment. +-func actionMarkerFunc(fn any, allowedNames ...string) func(marker) { +- ftype := reflect.TypeOf(fn) +- if ftype.NumIn() == 0 || ftype.In(0) != markerType { +- panic(fmt.Sprintf("action marker function %#v must accept marker as its first argument", ftype)) +- } +- if ftype.NumOut() != 0 { +- panic(fmt.Sprintf("action marker function %#v cannot have results", ftype)) +- } +- +- var allowed map[string]bool +- if len(allowedNames) > 0 { +- allowed = make(map[string]bool) +- for _, name := range allowedNames { +- allowed[name] = true +- } +- } +- +- return func(mark marker) { +- for name := range mark.note.NamedArgs { +- if !allowed[name] { +- mark.errorf("unexpected named argument %q", name) +- } +- } +- +- args := append([]any{mark}, mark.note.Args...) +- argValues, err := convertArgs(mark, ftype, args) +- if err != nil { +- mark.error(err) +- return +- } +- reflect.ValueOf(fn).Call(argValues) +- } +-} +- +-func convertArgs(mark marker, ftype reflect.Type, args []any) ([]reflect.Value, error) { +- var ( +- argValues []reflect.Value +- pnext int // next param index +- p reflect.Type // current param +- ) +- for i, arg := range args { +- if i < ftype.NumIn() { +- p = ftype.In(pnext) +- pnext++ +- } else if p == nil || !ftype.IsVariadic() { +- // The actual number of arguments expected by the mark varies, depending +- // on whether this is a value marker or an action marker. +- // +- // Since this error indicates a bug, probably OK to have an imprecise +- // error message here. +- return nil, fmt.Errorf("too many arguments to %s", mark.note.Name) +- } +- elemType := p +- if ftype.IsVariadic() && pnext == ftype.NumIn() { +- elemType = p.Elem() +- } +- var v reflect.Value +- if id, ok := arg.(expect.Identifier); ok && id == "_" { +- v = reflect.Zero(elemType) +- } else { +- a, err := convert(mark, arg, elemType) +- if err != nil { +- return nil, err +- } +- v = reflect.ValueOf(a) +- } +- argValues = append(argValues, v) +- } +- // Check that we have sufficient arguments. If the function is variadic, we +- // do not need arguments for the final parameter. +- if pnext < ftype.NumIn()-1 || pnext == ftype.NumIn()-1 && !ftype.IsVariadic() { +- // Same comment as above: OK to be vague here. +- return nil, fmt.Errorf("not enough arguments to %s", mark.note.Name) +- } +- return argValues, nil +-} +- +-// namedArg returns the named argument for name, or the default value. +-func namedArg[T any](mark marker, name string, dflt T) T { +- if v, ok := mark.note.NamedArgs[name]; ok { +- if e, ok := v.(T); ok { +- return e +- } else { +- v, err := convert(mark, v, reflect.TypeOf(dflt)) +- if err != nil { +- mark.errorf("invalid value for %q: could not convert %v (%T) to %T", name, v, v, dflt) +- return dflt +- } +- return v.(T) +- } +- } +- return dflt +-} +- +-func namedArgFunc[T any](mark marker, name string, f func(marker, any) (T, error), dflt T) T { +- if v, ok := mark.note.NamedArgs[name]; ok { +- if v2, err := f(mark, v); err == nil { +- return v2 +- } else { +- mark.errorf("invalid value for %q: %v: %v", name, v, err) +- } +- } +- return dflt +-} +- +-func exactlyOneNamedArg(mark marker, names ...string) bool { +- var found []string +- for _, name := range names { +- if _, ok := mark.note.NamedArgs[name]; ok { +- found = append(found, name) +- } +- } +- if len(found) != 1 { +- mark.errorf("need exactly one of %v to be set, got %v", names, found) +- return false +- } +- return true +-} +- +-// is reports whether arg is a T. +-func is[T any](arg any) bool { +- _, ok := arg.(T) +- return ok +-} +- +-// Supported value marker functions. See [valueMarkerFunc] for more details. +-var valueMarkerFuncs = map[string]func(marker){ +- "loc": valueMarkerFunc(locMarker), +- "item": valueMarkerFunc(completionItemMarker), +- "hiloc": valueMarkerFunc(highlightLocationMarker), +- "defloc": valueMarkerFunc(defLocMarker), +-} +- +-// Supported action marker functions. See [actionMarkerFunc] for more details. +-// +-// See doc.go for marker documentation. +-var actionMarkerFuncs = map[string]func(marker){ +- "acceptcompletion": actionMarkerFunc(acceptCompletionMarker), +- "codeaction": actionMarkerFunc(codeActionMarker, "end", "result", "edit", "err"), +- "codelenses": actionMarkerFunc(codeLensesMarker), +- "complete": actionMarkerFunc(completeMarker), +- "def": actionMarkerFunc(defMarker), +- "diag": actionMarkerFunc(diagMarker, "exact"), +- "documentlink": actionMarkerFunc(documentLinkMarker), +- "foldingrange": actionMarkerFunc(foldingRangeMarker), +- "format": actionMarkerFunc(formatMarker), +- "highlight": actionMarkerFunc(highlightMarker), +- "highlightall": actionMarkerFunc(highlightAllMarker), +- "hover": actionMarkerFunc(hoverMarker), +- "hovererr": actionMarkerFunc(hoverErrMarker), +- "implementation": actionMarkerFunc(implementationMarker, "err"), +- "incomingcalls": actionMarkerFunc(incomingCallsMarker), +- "inlayhints": actionMarkerFunc(inlayhintsMarker), +- "outgoingcalls": actionMarkerFunc(outgoingCallsMarker), +- "preparerename": actionMarkerFunc(prepareRenameMarker, "span"), +- "rank": actionMarkerFunc(rankMarker), +- "refs": actionMarkerFunc(refsMarker), +- "rename": actionMarkerFunc(renameMarker), +- "renameerr": actionMarkerFunc(renameErrMarker), +- "selectionrange": actionMarkerFunc(selectionRangeMarker), +- "signature": actionMarkerFunc(signatureMarker), +- "snippet": actionMarkerFunc(snippetMarker), +- "subtypes": actionMarkerFunc(subtypesMarker), +- "supertypes": actionMarkerFunc(supertypesMarker), +- "quickfix": actionMarkerFunc(quickfixMarker), +- "quickfixerr": actionMarkerFunc(quickfixErrMarker), +- "symbol": actionMarkerFunc(symbolMarker), +- "token": actionMarkerFunc(tokenMarker), +- "typedef": actionMarkerFunc(typedefMarker), +- "workspacesymbol": actionMarkerFunc(workspaceSymbolMarker), +- "mcptool": actionMarkerFunc(mcpToolMarker, "location", "output"), +-} +- +-// markerTest holds all the test data extracted from a test txtar archive. +-// +-// See the documentation for RunMarkerTests for more information on the archive +-// format. +-type markerTest struct { +- name string // relative path to the txtar file in the testdata dir +- fset *token.FileSet // fileset used for parsing notes +- content []byte // raw test content +- archive *txtar.Archive // original test archive +- settings map[string]any // gopls settings +- capabilities []byte // content of capabilities.json file +- env map[string]string // editor environment +- proxyFiles map[string][]byte // proxy content +- files map[string][]byte // data files from the archive (excluding special files) +- notes []*expect.Note // extracted notes from data files +- golden map[expect.Identifier]*Golden // extracted golden content, by identifier name +- +- skipReason string // the skip reason extracted from the "skip" archive file +- flags []string // flags extracted from the special "flags" archive file. +- +- // Parsed flags values. See the flag definitions below for documentation. +- minGoVersion, maxGoVersion string // min/max version of Go runtime +- minGoCommandVersion, maxGoCommandVersion string // min/max version of ambient go command +- +- cgo bool +- writeGoSum []string +- skipGOOS []string +- skipGOARCH []string +- ignoreExtraDiags bool +- filterBuiltins bool +- filterKeywords bool +- errorsOK bool +- mcp bool +-} +- +-// flagSet returns the flagset used for parsing the special "flags" file in the +-// test archive. +-func (t *markerTest) flagSet() *flag.FlagSet { +- flags := flag.NewFlagSet(t.name, flag.ContinueOnError) +- flags.StringVar(&t.minGoVersion, "min_go", "", "if set, the minimum go1.X version required for this test") +- flags.StringVar(&t.maxGoVersion, "max_go", "", "if set, the maximum go1.X version required for this test") +- flags.StringVar(&t.minGoCommandVersion, "min_go_command", "", "if set, the minimum go1.X go command version required for this test") +- flags.StringVar(&t.maxGoCommandVersion, "max_go_command", "", "if set, the maximum go1.X go command version required for this test") +- flags.BoolVar(&t.cgo, "cgo", false, "if set, requires cgo (both the cgo tool and CGO_ENABLED=1)") +- flags.Var((*stringListValue)(&t.writeGoSum), "write_sumfile", "if set, write the sumfile for these directories") +- flags.Var((*stringListValue)(&t.skipGOOS), "skip_goos", "if set, skip this test on these GOOS values") +- flags.Var((*stringListValue)(&t.skipGOARCH), "skip_goarch", "if set, skip this test on these GOARCH values") +- flags.BoolVar(&t.ignoreExtraDiags, "ignore_extra_diags", false, "if set, suppress errors for unmatched diagnostics") +- flags.BoolVar(&t.filterBuiltins, "filter_builtins", true, "if set, filter builtins from completion results") +- flags.BoolVar(&t.filterKeywords, "filter_keywords", true, "if set, filter keywords from completion results") +- flags.BoolVar(&t.errorsOK, "errors_ok", false, "if set, Error level log messages are acceptable in this test") +- flags.BoolVar(&t.mcp, "mcp", false, "if set, enable model context protocol client and server in this test") +- return flags +-} +- +-// stringListValue implements flag.Value. +-type stringListValue []string +- +-func (l *stringListValue) Set(s string) error { +- if s != "" { +- for d := range strings.SplitSeq(s, ",") { +- *l = append(*l, strings.TrimSpace(d)) +- } +- } +- return nil +-} +- +-func (l stringListValue) String() string { +- return strings.Join([]string(l), ",") +-} +- +-func (mark *marker) getGolden(id expect.Identifier) *Golden { +- t := mark.run.test +- golden, ok := t.golden[id] +- // If there was no golden content for this identifier, we must create one +- // to handle the case where -update is set: we need a place to store +- // the updated content. +- if !ok { +- golden = &Golden{id: id} +- +- // TODO(adonovan): the separation of markerTest (the +- // static aspects) from markerTestRun (the dynamic +- // ones) is evidently bogus because here we modify +- // markerTest during execution. Let's merge the two. +- t.golden[id] = golden +- } +- if golden.firstReference == "" { +- golden.firstReference = mark.path() +- } +- return golden +-} +- +-// Golden holds extracted golden content for a single @<name> prefix. +-// +-// When -update is set, golden captures the updated golden contents for later +-// writing. +-type Golden struct { +- id expect.Identifier +- firstReference string // file name first referencing this golden content +- data map[string][]byte // key "" => @id itself +- updated map[string][]byte +-} +- +-// Get returns golden content for the given name, which corresponds to the +-// relative path following the golden prefix @<name>/. For example, to access +-// the content of @foo/path/to/result.json from the Golden associated with +-// @foo, name should be "path/to/result.json". +-// +-// If -update is set, the given update function will be called to get the +-// updated golden content that should be written back to testdata. +-// +-// Marker functions must use this method instead of accessing data entries +-// directly otherwise the -update operation will delete those entries. +-// +-// TODO(rfindley): rethink the logic here. We may want to separate Get and Set, +-// and not delete golden content that isn't set. +-func (g *Golden) Get(t testing.TB, name string, updated []byte) ([]byte, bool) { +- if existing, ok := g.updated[name]; ok { +- // Multiple tests may reference the same golden data, but if they do they +- // must agree about its expected content. +- if diff := compare.NamedText("existing", "updated", string(existing), string(updated)); diff != "" { +- t.Errorf("conflicting updates for golden data %s/%s:\n%s", g.id, name, diff) +- } +- } +- if g.updated == nil { +- g.updated = make(map[string][]byte) +- } +- g.updated[name] = updated +- if *update { +- return updated, true +- } +- +- res, ok := g.data[name] +- return res, ok +-} +- +-// loadMarkerTests walks the given dir looking for .txt files, which it +-// interprets as a txtar archive. +-// +-// See the documentation for RunMarkerTests for more details on the test data +-// archive. +-func loadMarkerTests(dir string) ([]*markerTest, error) { +- var tests []*markerTest +- err := filepath.WalkDir(dir, func(path string, _ fs.DirEntry, err error) error { +- if strings.HasSuffix(path, ".txt") { +- content, err := os.ReadFile(path) +- if err != nil { +- return err +- } +- +- name := filepath.ToSlash(strings.TrimPrefix(path, dir+string(filepath.Separator))) +- test, err := loadMarkerTest(name, content) +- if err != nil { +- return fmt.Errorf("%s: %v", path, err) +- } +- tests = append(tests, test) +- } +- return err +- }) +- return tests, err +-} +- +-func loadMarkerTest(name string, content []byte) (*markerTest, error) { +- archive := txtar.Parse(content) +- if len(archive.Files) == 0 { +- return nil, fmt.Errorf("txtar file has no '-- filename --' sections") +- } +- if bytes.Contains(archive.Comment, []byte("\n-- ")) { +- // This check is conservative, but the comment is only a comment. +- return nil, fmt.Errorf("ill-formed '-- filename --' header in comment") +- } +- test := &markerTest{ +- name: name, +- fset: token.NewFileSet(), +- content: content, +- archive: archive, +- files: make(map[string][]byte), +- golden: make(map[expect.Identifier]*Golden), +- } +- seen := make(map[string]bool) +- for _, file := range archive.Files { +- if seen[file.Name] { +- return nil, fmt.Errorf("duplicate archive section %q", file.Name) +- } +- seen[file.Name] = true +- switch { +- case file.Name == "skip": +- reason := strings.ReplaceAll(string(file.Data), "\n", " ") +- reason = strings.TrimSpace(reason) +- test.skipReason = reason +- +- case file.Name == "flags": +- test.flags = strings.Fields(string(file.Data)) +- +- case file.Name == "settings.json": +- if err := json.Unmarshal(file.Data, &test.settings); err != nil { +- return nil, err +- } +- +- case file.Name == "capabilities.json": +- test.capabilities = file.Data // lazily unmarshalled by the editor +- +- case file.Name == "env": +- test.env = make(map[string]string) +- fields := strings.Fields(string(file.Data)) +- for _, field := range fields { +- key, value, ok := strings.Cut(field, "=") +- if !ok { +- return nil, fmt.Errorf("env vars must be formatted as var=value, got %q", field) +- } +- test.env[key] = value +- } +- +- case strings.HasPrefix(file.Name, "@"): // golden content +- idstring, name, _ := strings.Cut(file.Name[len("@"):], "/") +- id := expect.Identifier(idstring) +- // Note that a file.Name of just "@id" gives (id, name) = ("id", ""). +- if _, ok := test.golden[id]; !ok { +- test.golden[id] = &Golden{ +- id: id, +- data: make(map[string][]byte), +- } +- } +- test.golden[id].data[name] = file.Data +- +- case strings.HasPrefix(file.Name, "proxy/"): +- name := file.Name[len("proxy/"):] +- if test.proxyFiles == nil { +- test.proxyFiles = make(map[string][]byte) +- } +- test.proxyFiles[name] = file.Data +- +- default: // ordinary file content +- notes, err := expect.Parse(test.fset, file.Name, file.Data) +- if err != nil { +- return nil, fmt.Errorf("parsing notes in %q: %v", file.Name, err) +- } +- +- // Reject common misspelling: "// @mark". +- // TODO(adonovan): permit "// @" within a string. Detect multiple spaces. +- if i := bytes.Index(file.Data, []byte("// @")); i >= 0 { +- line := 1 + bytes.Count(file.Data[:i], []byte("\n")) +- return nil, fmt.Errorf("%s:%d: unwanted space before marker (// @)", file.Name, line) +- } +- +- // The 'go list' command doesn't work correct with modules named +- // testdata", so don't allow it as a module name (golang/go#65406). +- // (Otherwise files within it will end up in an ad hoc +- // package, "command-line-arguments/$TMPDIR/...".) +- if filepath.Base(file.Name) == "go.mod" && +- bytes.Contains(file.Data, []byte("module testdata")) { +- return nil, fmt.Errorf("'testdata' is not a valid module name") +- } +- +- test.notes = append(test.notes, notes...) +- test.files[file.Name] = file.Data +- } +- +- // Print a warning if we see what looks like "-- filename --" +- // without the second "--". It's not necessarily wrong, +- // but it should almost never appear in our test inputs. +- if bytes.Contains(file.Data, []byte("\n-- ")) { +- log.Printf("ill-formed '-- filename --' header in %s?", file.Name) +- } +- } +- +- // Parse flags after loading files, as they may have been set by the "flags" +- // file. +- if err := test.flagSet().Parse(test.flags); err != nil { +- return nil, fmt.Errorf("parsing flags: %v", err) +- } +- +- return test, nil +-} +- +-// formatTest formats the test as a txtar archive. +-func formatTest(test *markerTest) ([]byte, error) { +- arch := &txtar.Archive{ +- Comment: test.archive.Comment, +- } +- +- updatedGolden := make(map[string][]byte) +- firstReferences := make(map[string]string) +- for id, g := range test.golden { +- for name, data := range g.updated { +- filename := "@" + path.Join(string(id), name) // name may be "" +- updatedGolden[filename] = data +- firstReferences[filename] = g.firstReference +- } +- } +- +- // Preserve the original ordering of archive files. +- for _, file := range test.archive.Files { +- switch file.Name { +- // Preserve configuration files exactly as they were. They must have parsed +- // if we got this far. +- case "skip", "flags", "settings.json", "capabilities.json", "env": +- arch.Files = append(arch.Files, file) +- default: +- if _, ok := test.files[file.Name]; ok { // ordinary file +- arch.Files = append(arch.Files, file) +- } else if strings.HasPrefix(file.Name, "proxy/") { // proxy file +- arch.Files = append(arch.Files, file) +- } else if data, ok := updatedGolden[file.Name]; ok { // golden file +- arch.Files = append(arch.Files, txtar.File{Name: file.Name, Data: data}) +- delete(updatedGolden, file.Name) +- } +- } +- } +- +- // ...but insert new golden files after their first reference. +- var newGoldenFiles []txtar.File +- for filename, data := range updatedGolden { +- // TODO(rfindley): it looks like this implicitly removes trailing newlines +- // from golden content. Is there any way to fix that? Perhaps we should +- // just make the diff tolerant of missing newlines? +- newGoldenFiles = append(newGoldenFiles, txtar.File{Name: filename, Data: data}) +- } +- // Sort new golden files lexically. +- sort.Slice(newGoldenFiles, func(i, j int) bool { +- return newGoldenFiles[i].Name < newGoldenFiles[j].Name +- }) +- for _, g := range newGoldenFiles { +- insertAt := len(arch.Files) +- if firstRef := firstReferences[g.Name]; firstRef != "" { +- for i, f := range arch.Files { +- if f.Name == firstRef { +- // Insert alphabetically among golden files following the test file. +- for i++; i < len(arch.Files); i++ { +- f := arch.Files[i] +- if !strings.HasPrefix(f.Name, "@") || f.Name >= g.Name { +- insertAt = i +- break +- } +- } +- break +- } +- } +- } +- arch.Files = slices.Insert(arch.Files, insertAt, g) +- } +- +- return txtar.Format(arch), nil +-} +- +-// newEnv creates a new environment for a marker test. +-// +-// TODO(rfindley): simplify and refactor the construction of testing +-// environments across integration tests, marker tests, and benchmarks. +-func newEnv(t *testing.T, cache *cache.Cache, files, proxyFiles map[string][]byte, writeGoSum []string, config fake.EditorConfig, enableMCP bool) *integration.Env { +- sandbox, err := fake.NewSandbox(&fake.SandboxConfig{ +- RootDir: t.TempDir(), +- Files: files, +- ProxyFiles: proxyFiles, +- }) +- if err != nil { +- t.Fatal(err) +- } +- +- for _, dir := range writeGoSum { +- if _, err := sandbox.RunGoCommand(context.Background(), dir, "list", []string{"-mod=mod", "..."}, []string{"GOWORK=off"}, true); err != nil { +- t.Fatal(err) +- } +- } +- +- // Put a debug instance in the context to prevent logging to stderr. +- // See associated TODO in runner.go: we should revisit this pattern. +- ctx := context.Background() +- ctx = debug.WithInstance(ctx) +- +- awaiter := integration.NewAwaiter(sandbox.Workdir) +- +- ss := lsprpc.NewStreamServer(cache, false, nil) +- +- var mcpServer *httptest.Server +- if enableMCP { +- mcpServer = httptest.NewServer(internalmcp.HTTPHandler(ss, false)) +- } +- +- server := servertest.NewPipeServer(ss, jsonrpc2.NewRawStream) +- editor, err := fake.NewEditor(sandbox, config).Connect(ctx, server, awaiter.Hooks()) +- if err != nil { +- sandbox.Close() // ignore error +- t.Fatal(err) +- } +- +- if err := awaiter.Await(ctx, integration.OnceMet( +- integration.InitialWorkspaceLoad, +- integration.NoShownMessage(""), +- )); err != nil { +- sandbox.Close() // ignore error +- t.Fatal(err) +- } +- +- var mcpSession *mcp.ClientSession +- if enableMCP { +- client := mcp.NewClient("test", "v1.0.0", nil) +- mcpSession, err = client.Connect(ctx, mcp.NewSSEClientTransport(mcpServer.URL, nil)) +- if err != nil { +- t.Fatalf("fail to connect to mcp server: %v", err) +- } +- } +- +- return &integration.Env{ +- TB: t, +- Ctx: ctx, +- Editor: editor, +- Sandbox: sandbox, +- Awaiter: awaiter, +- MCPSession: mcpSession, +- MCPServer: mcpServer, +- } +-} +- +-// A markerTestRun holds the state of one run of a marker test archive. +-type markerTestRun struct { +- test *markerTest +- env *integration.Env +- settings map[string]any +- +- // Collected information. +- // Each @diag/@quickfix marker eliminates an entry from diags. +- values map[expect.Identifier]any +- diags map[protocol.Location][]protocol.Diagnostic // diagnostics by position; location end == start +- +- // Notes that weren't associated with a top-level marker func. They may be +- // consumed by another marker (e.g. @codelenses collects @codelens markers). +- // Any notes that aren't consumed are flagged as an error. +- extraNotes map[protocol.DocumentURI]map[string][]*expect.Note +-} +- +-// sprintf returns a formatted string after applying pre-processing to +-// arguments of the following types: +-// - token.Pos: formatted using (*markerTestRun).fmtPos +-// - protocol.Location: formatted using (*markerTestRun).fmtLoc +-func (c *marker) sprintf(format string, args ...any) string { +- if false { +- _ = fmt.Sprintf(format, args...) // enable vet printf checker +- } +- var args2 []any +- for _, arg := range args { +- switch arg := arg.(type) { +- case token.Pos: +- args2 = append(args2, c.run.fmtPos(arg)) +- case protocol.Location: +- args2 = append(args2, c.run.fmtLoc(arg)) +- default: +- args2 = append(args2, arg) +- } +- } +- return fmt.Sprintf(format, args2...) +-} +- +-// fmtPos formats the given pos in the context of the test, using +-// archive-relative paths for files and including the line number in the full +-// archive file. +-func (run *markerTestRun) fmtPos(pos token.Pos) string { +- file := run.test.fset.File(pos) +- if file == nil { +- run.env.TB.Errorf("position %d not in test fileset", pos) +- return "<invalid location>" +- } +- m, err := run.env.Editor.Mapper(file.Name()) +- if err != nil { +- run.env.TB.Errorf("%s", err) +- return "<invalid location>" +- } +- loc, err := m.PosLocation(file, pos, pos) +- if err != nil { +- run.env.TB.Errorf("Mapper(%s).PosLocation failed: %v", file.Name(), err) +- } +- return run.fmtLoc(loc) +-} +- +-// fmtLoc formats the given location in the context of the test, using +-// archive-relative paths for files and including the line number in the full +-// archive file. +-func (run *markerTestRun) fmtLoc(loc protocol.Location) string { +- if loc == (protocol.Location{}) { +- run.env.TB.Errorf("unable to find %s in test archive", loc) +- return "<invalid location>" +- } +- +- // Format builtin locations as "file:///pseudo/builtin/builtin.go:0:0", +- // which is the predefined BUILTIN marker; ditto UNSAFE. +- if strings.HasSuffix(string(loc.URI), "src/unsafe/unsafe.go") || +- strings.HasSuffix(string(loc.URI), "src/builtin/builtin.go") { +- name := filepath.Base(filepath.Dir(loc.URI.Path())) // "unsafe" or "builtin" +- loc.URI = protocol.DocumentURI("file:///pseudo/" + name + "/" + name + ".go") +- loc.Range = protocol.Range{} +- } +- +- lines := bytes.Count(run.test.archive.Comment, []byte("\n")) +- var name string +- for _, f := range run.test.archive.Files { +- lines++ // -- separator -- +- uri := run.env.Sandbox.Workdir.URI(f.Name) +- if uri == loc.URI { +- name = f.Name +- break +- } +- lines += bytes.Count(f.Data, []byte("\n")) +- } +- if name == "" { +- // Fall back to formatting the "lsp" location. +- // These will be in UTF-16, but we probably don't need to clarify that, +- // since it will be implied by the file:// URI format. +- return summarizeLoc(string(loc.URI), +- int(loc.Range.Start.Line), int(loc.Range.Start.Character), +- int(loc.Range.End.Line), int(loc.Range.End.Character)) +- } +- name, startLine, startCol, endLine, endCol := run.mapLocation(loc) +- innerSpan := summarizeLoc(name, startLine, startCol, endLine, endCol) +- outerSpan := summarizeLoc(run.test.name, lines+startLine, startCol, lines+endLine, endCol) +- return fmt.Sprintf("%s (%s)", innerSpan, outerSpan) +-} +- +-// mapLocation returns the relative path and utf8 span of the corresponding +-// location, which must be a valid location in an archive file. +-func (run *markerTestRun) mapLocation(loc protocol.Location) (name string, startLine, startCol, endLine, endCol int) { +- // Note: Editor.Mapper fails if loc.URI is not open, but we always open all +- // archive files, so this is probably OK. +- // +- // In the future, we may want to have the editor read contents from disk if +- // the URI is not open. +- name = run.env.Sandbox.Workdir.URIToPath(loc.URI) +- m, err := run.env.Editor.Mapper(name) +- if err != nil { +- run.env.TB.Errorf("internal error: %v", err) +- return +- } +- start, end, err := m.RangeOffsets(loc.Range) +- if err != nil { +- run.env.TB.Errorf("error formatting location %s: %v", loc, err) +- return +- } +- startLine, startCol = m.OffsetLineCol8(start) +- endLine, endCol = m.OffsetLineCol8(end) +- return name, startLine, startCol, endLine, endCol +-} +- +-// fmtLocForGolden is like fmtLoc, but chooses more succinct and stable +-// formatting, such as would be used for formatting locations in Golden +-// content. +-func (run *markerTestRun) fmtLocForGolden(loc protocol.Location) string { +- if loc == (protocol.Location{}) { +- return "<invalid location>" +- } +- name := run.env.Sandbox.Workdir.URIToPath(loc.URI) +- // Note: we check IsAbs on filepaths rather than the slash-ified name for +- // accurate handling of windows drive letters. +- if filepath.IsAbs(filepath.FromSlash(name)) { +- // Don't format any position information in this case, since it will be +- // volatile. +- return "<external>" +- } +- return summarizeLoc(run.mapLocation(loc)) +-} +- +-// summarizeLoc formats a summary of the given location, in the form +-// +-// <name>:<startLine>:<startCol>[-[<endLine>:]endCol] +-func summarizeLoc(name string, startLine, startCol, endLine, endCol int) string { +- span := fmt.Sprintf("%s:%d:%d", name, startLine, startCol) +- if startLine != endLine || startCol != endCol { +- span += "-" +- if endLine != startLine { +- span += fmt.Sprintf("%d:", endLine) +- } +- span += fmt.Sprintf("%d", endCol) +- } +- return span +-} +- +-// ---- converters ---- +- +-// Types with special handling. +-var ( +- goldenType = reflect.TypeOf(&Golden{}) +- markerType = reflect.TypeOf(marker{}) +- stringMatcherType = reflect.TypeOf(stringMatcher{}) +-) +- +-// Custom conversions. +-// +-// These functions are called after valueMarkerFuncs have run to convert +-// arguments into the desired parameter types. +-// +-// Converters should return an error rather than calling marker.errorf(). +-var customConverters = map[reflect.Type]func(marker, any) (any, error){ +- reflect.TypeOf(protocol.Location{}): converter(convertLocation), +- reflect.TypeOf(completionLabel("")): converter(convertCompletionLabel), +-} +- +-// converter transforms a typed argument conversion function to an untyped +-// conversion function. +-func converter[T any](f func(marker, any) (T, error)) func(marker, any) (any, error) { +- return func(m marker, arg any) (any, error) { +- return f(m, arg) +- } +-} +- +-func convert(mark marker, arg any, paramType reflect.Type) (any, error) { +- // Handle stringMatcher and golden parameters before resolving identifiers, +- // because golden content lives in a separate namespace from other +- // identifiers. +- // TODO(rfindley): simplify by flattening the namespace. This interacts +- // poorly with named argument resolution. +- switch paramType { +- case stringMatcherType: +- return convertStringMatcher(mark, arg) +- case goldenType: +- id, ok := arg.(expect.Identifier) +- if !ok { +- return nil, fmt.Errorf("invalid input type %T: golden key must be an identifier", arg) +- } +- return mark.getGolden(id), nil +- } +- if id, ok := arg.(expect.Identifier); ok { +- if arg2, ok := mark.run.values[id]; ok { +- arg = arg2 +- } +- } +- if converter, ok := customConverters[paramType]; ok { +- arg2, err := converter(mark, arg) +- if err != nil { +- return nil, err +- } +- arg = arg2 +- } +- if reflect.TypeOf(arg).AssignableTo(paramType) { +- return arg, nil // no conversion required +- } +- return nil, fmt.Errorf("cannot convert %v (%T) to %s", arg, arg, paramType) +-} +- +-// convertNamedArgLocation is a workaround for converting locations referenced +-// by a named argument. See the TODO in [convert]: this wouldn't be necessary +-// if we flattened the namespace such that golden content lived in the same +-// namespace as values. +-func convertNamedArgLocation(mark marker, arg any) (protocol.Location, error) { +- if id, ok := arg.(expect.Identifier); ok { +- if v, ok := mark.run.values[id]; ok { +- if loc, ok := v.(protocol.Location); ok { +- return loc, nil +- } +- return protocol.Location{}, fmt.Errorf("invalid location value %v", v) +- } +- } +- return convertLocation(mark, arg) +-} +- +-// convertLocation converts a string or regexp argument into the protocol +-// location corresponding to the first position of the string (or first match +-// of the regexp) in the line preceding the note. +-func convertLocation(mark marker, arg any) (protocol.Location, error) { +- // matchContent is used to match the given argument against the file content +- // starting at the marker line. +- var matchContent func([]byte) (int, int, error) +- +- switch arg := arg.(type) { +- case protocol.Location: +- return arg, nil // nothing to do +- case string: +- matchContent = func(content []byte) (int, int, error) { +- idx := bytes.Index(content, []byte(arg)) +- if idx < 0 { +- return 0, 0, fmt.Errorf("substring %q not found", arg) +- } +- return idx, idx + len(arg), nil +- } +- case *regexp.Regexp: +- matchContent = func(content []byte) (int, int, error) { +- matches := arg.FindSubmatchIndex(content) +- if len(matches) == 0 { +- return 0, 0, fmt.Errorf("no match for regexp %q", arg) +- } +- switch len(matches) { +- case 2: +- // no subgroups: return the range of the regexp expression +- return matches[0], matches[1], nil +- case 4: +- // one subgroup: return its range +- return matches[2], matches[3], nil +- default: +- return 0, 0, fmt.Errorf("invalid location regexp %q: expect either 0 or 1 subgroups, got %d", arg, len(matches)/2-1) +- } +- } +- default: +- return protocol.Location{}, fmt.Errorf("cannot convert argument type %T to location (must be a string or regexp to match the preceding line)", arg) +- } +- +- // Now use matchFunc to match a range starting on the marker line. +- +- file := mark.run.test.fset.File(mark.note.Pos) +- posn := safetoken.Position(file, mark.note.Pos) +- lineStart := file.LineStart(posn.Line) +- lineStartOff, lineEndOff, err := safetoken.Offsets(file, lineStart, mark.note.Pos) +- if err != nil { +- return protocol.Location{}, err +- } +- m := mark.mapper() +- start, end, err := matchContent(m.Content[lineStartOff:]) +- if err != nil { +- return protocol.Location{}, err +- } +- startOff, endOff := lineStartOff+start, lineStartOff+end +- if startOff > lineEndOff { +- // The start of the match must be between the start of the line and the +- // marker position (inclusive). +- return protocol.Location{}, fmt.Errorf("no matching range found starting on the current line") +- } +- return m.OffsetLocation(startOff, endOff) +-} +- +-// completionLabel is a special parameter type that may be converted from a +-// string literal, or extracted from a completion item. +-// +-// See [convertCompletionLabel]. +-type completionLabel string +- +-// convertCompletionLabel coerces an argument to a [completionLabel] parameter +-// type. +-// +-// If the arg is a string, it is trivially converted. If the arg is a +-// completionItem, its label is extracted. +-// +-// This allows us to stage a migration of the "snippet" marker to a simpler +-// model where the completion label can just be listed explicitly. +-func convertCompletionLabel(mark marker, arg any) (completionLabel, error) { +- switch arg := arg.(type) { +- case string: +- return completionLabel(arg), nil +- case completionItem: +- return completionLabel(arg.Label), nil +- default: +- return "", fmt.Errorf("cannot convert argument type %T to completion label (must be a string or completion item)", arg) +- } +-} +- +-// convertStringMatcher converts a string, regexp, or identifier +-// argument into a stringMatcher. The string is a substring of the +-// expected error, the regexp is a pattern than matches the expected +-// error, and the identifier is a golden file containing the expected +-// error. +-func convertStringMatcher(mark marker, arg any) (stringMatcher, error) { +- switch arg := arg.(type) { +- case string: +- return stringMatcher{substr: arg}, nil +- case *regexp.Regexp: +- return stringMatcher{pattern: arg}, nil +- case expect.Identifier: +- golden := mark.getGolden(arg) +- return stringMatcher{golden: golden}, nil +- default: +- return stringMatcher{}, fmt.Errorf("cannot convert %T to wantError (want: string, regexp, or identifier)", arg) +- } +-} +- +-// A stringMatcher represents an expectation of a specific string value. +-// +-// It may be indicated in one of three ways, in 'expect' notation: +-// - an identifier 'foo', to compare (exactly) with the contents of the golden +-// section @foo; +-// - a pattern expression re"ab.*c", to match against a regular expression; +-// - a string literal "abc", to check for a substring. +-type stringMatcher struct { +- golden *Golden +- pattern *regexp.Regexp +- substr string +-} +- +-// empty reports whether the receiver is an empty stringMatcher. +-func (sm stringMatcher) empty() bool { +- return sm.golden == nil && sm.pattern == nil && sm.substr == "" +-} +- +-func (sm stringMatcher) String() string { +- if sm.golden != nil { +- return fmt.Sprintf("content from @%s entry", sm.golden.id) +- } else if sm.pattern != nil { +- return fmt.Sprintf("content matching %#q", sm.pattern) +- } else { +- return fmt.Sprintf("content with substring %q", sm.substr) +- } +-} +- +-// checkErr asserts that the given error matches the stringMatcher's expectations. +-func (sm stringMatcher) checkErr(mark marker, err error) { +- if err == nil { +- mark.errorf("@%s succeeded unexpectedly, want %v", mark.note.Name, sm) +- return +- } +- sm.check(mark, err.Error()) +-} +- +-// check asserts that the given content matches the stringMatcher's expectations. +-func (sm stringMatcher) check(mark marker, got string) { +- if sm.golden != nil { +- compareGolden(mark, []byte(got), sm.golden) +- } else if sm.pattern != nil { +- // Content must match the regular expression pattern. +- if !sm.pattern.MatchString(got) { +- mark.errorf("got %q, does not match pattern %#q", got, sm.pattern) +- } +- } else if !strings.Contains(got, sm.substr) { +- // Content must contain the expected substring. +- mark.errorf("got %q, want substring %q", got, sm.substr) +- } +-} +- +-// checkChangedFiles compares the files changed by an operation with their expected (golden) state. +-func checkChangedFiles(mark marker, changed map[string][]byte, golden *Golden) { +- // Check changed files match expectations. +- for filename, got := range changed { +- if want, ok := golden.Get(mark.T(), filename, got); !ok { +- mark.errorf("%s: unexpected change to file %s; got:\n%s", +- mark.note.Name, filename, got) +- } else if string(got) != string(want) { +- mark.errorf("%s: wrong file content for %s: got:\n%s\nwant:\n%s\ndiff:\n%s", +- mark.note.Name, filename, got, want, +- compare.Bytes(want, got)) +- } +- } +- +- // Report unmet expectations. +- for filename := range golden.data { +- if _, ok := changed[filename]; !ok { +- want, _ := golden.Get(mark.T(), filename, nil) +- mark.errorf("%s: missing change to file %s; want:\n%s", +- mark.note.Name, filename, want) +- } +- } +-} +- +-// checkDiffs computes unified diffs for each changed file, and compares with +-// the diff content stored in the given golden directory. +-func checkDiffs(mark marker, changed map[string][]byte, golden *Golden) { +- diffs := make(map[string]string) +- for name, after := range changed { +- before := mark.run.env.FileContent(name) +- // TODO(golang/go#64023): switch back to diff.Strings. +- // The attached issue is only one obstacle to switching. +- // Another is that different diff algorithms produce +- // different results, so if we commit diffs in test +- // expectations, then we need to either (1) state +- // which diff implementation they use and never change +- // it, or (2) don't compare diffs, but instead apply +- // the "want" diff and check that it produces the +- // "got" output. Option 2 is more robust, as it allows +- // the test expectation to use any valid diff. +- edits := myers.ComputeEdits(before, string(after)) +- d, err := diff.ToUnified("before", "after", before, edits, 0) +- if err != nil { +- // Can't happen: edits are consistent. +- log.Fatalf("internal error in diff.ToUnified: %v", err) +- } +- // Trim the unified header from diffs, as it is unnecessary and repetitive. +- difflines := strings.Split(d, "\n") +- if len(difflines) >= 2 && strings.HasPrefix(difflines[1], "+++") { +- diffs[name] = strings.Join(difflines[2:], "\n") +- } else { +- diffs[name] = d +- } +- } +- // Check changed files match expectations. +- for filename, got := range diffs { +- if want, ok := golden.Get(mark.T(), filename, []byte(got)); !ok { +- mark.errorf("%s: unexpected change to file %s; got diff:\n%s", +- mark.note.Name, filename, got) +- } else if got != string(want) { +- mark.errorf("%s: wrong diff for %s:\n\ngot:\n%s\n\nwant:\n%s\n", +- mark.note.Name, filename, got, want) +- } +- } +- // Report unmet expectations. +- for filename := range golden.data { +- if _, ok := changed[filename]; !ok { +- want, _ := golden.Get(mark.T(), filename, nil) +- mark.errorf("%s: missing change to file %s; want:\n%s", +- mark.note.Name, filename, want) +- } +- } +-} +- +-// ---- marker functions ---- +- +-// TODO(rfindley): consolidate documentation of these markers. They are already +-// documented above, so much of the documentation here is redundant. +- +-// completionItem is a simplified summary of a completion item. +-type completionItem struct { +- Label, Detail, Kind, Documentation string +-} +- +-func completionItemMarker(mark marker, label string, other ...string) completionItem { +- if len(other) > 3 { +- mark.errorf("too many arguments to @item: expect at most 4") +- } +- item := completionItem{ +- Label: label, +- } +- if len(other) > 0 { +- item.Detail = other[0] +- } +- if len(other) > 1 { +- item.Kind = other[1] +- } +- if len(other) > 2 { +- item.Documentation = other[2] +- } +- return item +-} +- +-func rankMarker(mark marker, src protocol.Location, items ...completionLabel) { +- // Separate positive and negative items (expectations). +- var pos, neg []completionLabel +- for _, item := range items { +- if strings.HasPrefix(string(item), "!") { +- neg = append(neg, item) +- } else { +- pos = append(pos, item) +- } +- } +- +- // Collect results that are present in items, preserving their order. +- list := mark.run.env.Completion(src) +- var got []string +- for _, g := range list.Items { +- for _, w := range pos { +- if g.Label == string(w) { +- got = append(got, g.Label) +- break +- } +- } +- for _, w := range neg { +- if g.Label == string(w[len("!"):]) { +- mark.errorf("got unwanted completion: %s", g.Label) +- break +- } +- } +- } +- var want []string +- for _, w := range pos { +- want = append(want, string(w)) +- } +- if diff := cmp.Diff(want, got); diff != "" { +- mark.errorf("completion rankings do not match (-want +got):\n%s", diff) +- } +-} +- +-func snippetMarker(mark marker, src protocol.Location, label completionLabel, want string) { +- list := mark.run.env.Completion(src) +- var ( +- found bool +- got string +- all []string // for errors +- ) +- items := filterBuiltinsAndKeywords(mark, list.Items) +- for _, i := range items { +- all = append(all, i.Label) +- if i.Label == string(label) { +- found = true +- if i.TextEdit != nil { +- if edit, err := protocol.SelectCompletionTextEdit(i, false); err == nil { +- got = edit.NewText +- } +- } +- break +- } +- } +- if !found { +- mark.errorf("no completion item found matching %s (got: %v)", label, all) +- return +- } +- if got != want { +- mark.errorf("snippets do not match: got:\n%q\nwant:\n%q", got, want) +- } +-} +- +-// completeMarker implements the @complete marker, running +-// textDocument/completion at the given src location and asserting that the +-// results match the expected results. +-func completeMarker(mark marker, src protocol.Location, want ...completionItem) { +- list := mark.run.env.Completion(src) +- items := filterBuiltinsAndKeywords(mark, list.Items) +- var got []completionItem +- for i, item := range items { +- simplified := completionItem{ +- Label: item.Label, +- Detail: item.Detail, +- Kind: fmt.Sprint(item.Kind), +- } +- if item.Documentation != nil { +- switch v := item.Documentation.Value.(type) { +- case string: +- simplified.Documentation = v +- case protocol.MarkupContent: +- simplified.Documentation = strings.TrimSpace(v.Value) // trim newlines +- } +- } +- // Support short-hand notation: if Detail, Kind, or Documentation are omitted from the +- // item, don't match them. +- if i < len(want) { +- if want[i].Detail == "" { +- simplified.Detail = "" +- } +- if want[i].Kind == "" { +- simplified.Kind = "" +- } +- if want[i].Documentation == "" { +- simplified.Documentation = "" +- } +- } +- got = append(got, simplified) +- } +- if len(want) == 0 { +- want = nil // got is nil if empty +- } +- if diff := cmp.Diff(want, got); diff != "" { +- mark.errorf("Completion(...) returned unexpected results (-want +got):\n%s", diff) +- } +-} +- +-// filterBuiltinsAndKeywords filters out builtins and keywords from completion +-// results. +-// +-// It over-approximates, and does not detect if builtins are shadowed. +-func filterBuiltinsAndKeywords(mark marker, items []protocol.CompletionItem) []protocol.CompletionItem { +- keep := 0 +- for _, item := range items { +- if mark.run.test.filterKeywords && item.Kind == protocol.KeywordCompletion { +- continue +- } +- if mark.run.test.filterBuiltins && types.Universe.Lookup(item.Label) != nil { +- continue +- } +- items[keep] = item +- keep++ +- } +- return items[:keep] +-} +- +-// acceptCompletionMarker implements the @acceptCompletion marker, running +-// textDocument/completion at the given src location and accepting the +-// candidate with the given label. The resulting source must match the provided +-// golden content. +-func acceptCompletionMarker(mark marker, src protocol.Location, label string, golden *Golden) { +- list := mark.run.env.Completion(src) +- var selected *protocol.CompletionItem +- for _, item := range list.Items { +- if item.Label == label { +- selected = &item +- break +- } +- } +- if selected == nil { +- mark.errorf("Completion(...) did not return an item labeled %q", label) +- return +- } +- edit, err := protocol.SelectCompletionTextEdit(*selected, false) +- if err != nil { +- mark.errorf("Completion(...) did not return a valid edit: %v", err) +- return +- } +- filename := mark.path() +- mapper := mark.mapper() +- patched, _, err := protocol.ApplyEdits(mapper, append([]protocol.TextEdit{edit}, selected.AdditionalTextEdits...)) +- if err != nil { +- mark.errorf("ApplyProtocolEdits failed: %v", err) +- return +- } +- changes := map[string][]byte{filename: patched} +- // Check the file state. +- checkChangedFiles(mark, changes, golden) +-} +- +-// defMarker implements the @def marker, running textDocument/definition at +-// the given location and asserting that there the results match want. +-func defMarker(mark marker, loc protocol.Location, want ...protocol.Location) { +- env := mark.run.env +- got, err := env.Editor.Definitions(env.Ctx, loc) +- if err != nil { +- mark.errorf("definition request failed: %v", err) +- return +- } +- +- if err := compareLocations(mark, got, want); err != nil { +- mark.errorf("def failed: %v", err) +- } +-} +- +-func typedefMarker(mark marker, src, dst protocol.Location) { +- got := mark.run.env.FirstTypeDefinition(src) +- if got != dst { +- mark.errorf("type definition location does not match:\n\tgot: %s\n\twant %s", +- mark.run.fmtLoc(got), mark.run.fmtLoc(dst)) +- } +-} +- +-func foldingRangeMarker(mark marker, g *Golden) { +- env := mark.run.env +- ranges, err := mark.server().FoldingRange(env.Ctx, &protocol.FoldingRangeParams{ +- TextDocument: mark.document(), +- }) +- if err != nil { +- mark.errorf("foldingRange failed: %v", err) +- return +- } +- var edits []protocol.TextEdit +- insert := func(line, char uint32, text string) { +- pos := protocol.Position{Line: line, Character: char} +- edits = append(edits, protocol.TextEdit{ +- Range: protocol.Range{ +- Start: pos, +- End: pos, +- }, +- NewText: text, +- }) +- } +- for i, rng := range ranges { +- // We assume the server populates these optional fields. +- insert(*rng.StartLine, *rng.StartCharacter, fmt.Sprintf("<%d kind=%q>", i, rng.Kind)) +- insert(*rng.EndLine, *rng.EndCharacter, fmt.Sprintf("</%d>", i)) +- } +- filename := mark.path() +- mapper, err := env.Editor.Mapper(filename) +- if err != nil { +- mark.errorf("Editor.Mapper(%s) failed: %v", filename, err) +- return +- } +- got, _, err := protocol.ApplyEdits(mapper, edits) +- if err != nil { +- mark.errorf("ApplyProtocolEdits failed: %v", err) +- return +- } +- want, _ := g.Get(mark.T(), "", got) +- if diff := compare.Bytes(want, got); diff != "" { +- mark.errorf("foldingRange mismatch:\n%s", diff) +- } +-} +- +-// formatMarker implements the @format marker. +-func formatMarker(mark marker, golden *Golden) { +- edits, err := mark.server().Formatting(mark.ctx(), &protocol.DocumentFormattingParams{ +- TextDocument: mark.document(), +- }) +- var got []byte +- if err != nil { +- got = []byte(err.Error() + "\n") // all golden content is newline terminated +- } else { +- env := mark.run.env +- filename := mark.path() +- mapper, err := env.Editor.Mapper(filename) +- if err != nil { +- mark.errorf("Editor.Mapper(%s) failed: %v", filename, err) +- } +- +- got, _, err = protocol.ApplyEdits(mapper, edits) +- if err != nil { +- mark.errorf("ApplyProtocolEdits failed: %v", err) +- return +- } +- } +- +- compareGolden(mark, got, golden) +-} +- +-func highlightLocationMarker(mark marker, loc protocol.Location, kindName expect.Identifier) protocol.DocumentHighlight { +- var kind protocol.DocumentHighlightKind +- switch kindName { +- case "read": +- kind = protocol.Read +- case "write": +- kind = protocol.Write +- case "text": +- kind = protocol.Text +- default: +- mark.errorf("invalid highlight kind: %q", kindName) +- } +- +- return protocol.DocumentHighlight{ +- Range: loc.Range, +- Kind: kind, +- } +-} +- +-func sortDocumentHighlights(s []protocol.DocumentHighlight) { +- sort.Slice(s, func(i, j int) bool { +- return protocol.CompareRange(s[i].Range, s[j].Range) < 0 +- }) +-} +- +-// highlightAllMarker makes textDocument/highlight +-// requests at locations of equivalence classes. Given input +-// highlightall(X1, X2, ..., Xn), the marker checks +-// highlight(X1) = highlight(X2) = ... = highlight(Xn) = {X1, X2, ..., Xn}. +-// It is not the general rule for all highlighting, and use @highlight +-// for asymmetric cases. +-// +-// TODO(b/288111111): this is a bit of a hack. We should probably +-// have a more general way of testing that a function is idempotent. +-func highlightAllMarker(mark marker, all ...protocol.DocumentHighlight) { +- sortDocumentHighlights(all) +- for _, src := range all { +- loc := mark.uri().Location(src.Range) +- got := mark.run.env.DocumentHighlight(loc) +- sortDocumentHighlights(got) +- +- if d := cmp.Diff(all, got); d != "" { +- mark.errorf("DocumentHighlight(%v) mismatch (-want +got):\n%s", loc, d) +- } +- } +-} +- +-func highlightMarker(mark marker, src protocol.DocumentHighlight, dsts ...protocol.DocumentHighlight) { +- loc := mark.uri().Location(src.Range) +- got := mark.run.env.DocumentHighlight(loc) +- +- sortDocumentHighlights(got) +- sortDocumentHighlights(dsts) +- +- if diff := cmp.Diff(dsts, got, cmpopts.EquateEmpty()); diff != "" { +- mark.errorf("DocumentHighlight(%v) mismatch (-want +got):\n%s", src, diff) +- } +-} +- +-func hoverMarker(mark marker, src, dst protocol.Location, sc stringMatcher) { +- content, gotDst := mark.run.env.Hover(src) +- if gotDst != dst { +- mark.errorf("hover location does not match:\n\tgot: %s\n\twant %s)", mark.run.fmtLoc(gotDst), mark.run.fmtLoc(dst)) +- } +- gotMD := "" +- if content != nil { +- gotMD = content.Value +- } +- sc.check(mark, gotMD) +-} +- +-func hoverErrMarker(mark marker, src protocol.Location, em stringMatcher) { +- _, _, err := mark.editor().Hover(mark.ctx(), src) +- em.checkErr(mark, err) +-} +- +-// locMarker implements the @loc marker. +-func locMarker(mark marker, loc protocol.Location) protocol.Location { return loc } +- +-// defLocMarker implements the @defloc marker, which binds a location to the +-// (first) result of a jump-to-definition request. +-func defLocMarker(mark marker, loc protocol.Location) protocol.Location { +- return mark.run.env.FirstDefinition(loc) +-} +- +-// diagMarker implements the @diag marker. It eliminates diagnostics from +-// the observed set in mark.test. +-func diagMarker(mark marker, loc protocol.Location, re *regexp.Regexp) { +- exact := namedArg(mark, "exact", false) +- if _, ok := removeDiagnostic(mark, loc, exact, re); !ok { +- mark.errorf("no diagnostic at %v matches %q", loc, re) +- } +-} +- +-// removeDiagnostic looks for a diagnostic matching loc at the given position. +-// +-// If found, it returns (diag, true), and eliminates the matched diagnostic +-// from the unmatched set. +-// +-// If not found, it returns (protocol.Diagnostic{}, false). +-func removeDiagnostic(mark marker, loc protocol.Location, matchEnd bool, re *regexp.Regexp) (protocol.Diagnostic, bool) { +- key := loc +- key.Range.End = key.Range.Start // diagnostics ignore end position. +- diags := mark.run.diags[key] +- for i, diag := range diags { +- if re.MatchString(diag.Message) && (!matchEnd || diag.Range.End == loc.Range.End) { +- mark.run.diags[key] = slices.Delete(diags, i, i+1) +- return diag, true +- } +- } +- return protocol.Diagnostic{}, false +-} +- +-// renameMarker implements the @rename(location, new, golden) marker. +-func renameMarker(mark marker, loc protocol.Location, newName string, golden *Golden) { +- changed, err := rename(mark.run.env, loc, newName) +- if err != nil { +- mark.errorf("rename failed: %v. (Use @renameerr for expected errors.)", err) +- return +- } +- checkDiffs(mark, changed, golden) +-} +- +-// renameErrMarker implements the @renamererr(location, new, error) marker. +-func renameErrMarker(mark marker, loc protocol.Location, newName string, wantErr stringMatcher) { +- _, err := rename(mark.run.env, loc, newName) +- wantErr.checkErr(mark, err) +-} +- +-func selectionRangeMarker(mark marker, loc protocol.Location, g *Golden) { +- ranges, err := mark.server().SelectionRange(mark.ctx(), &protocol.SelectionRangeParams{ +- TextDocument: mark.document(), +- Positions: []protocol.Position{loc.Range.Start}, +- }) +- if err != nil { +- mark.errorf("SelectionRange failed: %v", err) +- return +- } +- var buf bytes.Buffer +- m := mark.mapper() +- for i, path := range ranges { +- fmt.Fprintf(&buf, "Ranges %d:", i) +- rng := path +- for { +- s, e, err := m.RangeOffsets(rng.Range) +- if err != nil { +- mark.errorf("RangeOffsets failed: %v", err) +- return +- } +- +- var snippet string +- if e-s < 30 { +- snippet = string(m.Content[s:e]) +- } else { +- snippet = string(m.Content[s:s+15]) + "..." + string(m.Content[e-15:e]) +- } +- +- fmt.Fprintf(&buf, "\n\t%v %q", rng.Range, strings.ReplaceAll(snippet, "\n", "\\n")) +- +- if rng.Parent == nil { +- break +- } +- rng = *rng.Parent +- } +- buf.WriteRune('\n') +- } +- compareGolden(mark, buf.Bytes(), g) +-} +- +-func tokenMarker(mark marker, loc protocol.Location, tokenType, mod string) { +- tokens := mark.run.env.SemanticTokensRange(loc) +- if len(tokens) != 1 { +- mark.errorf("got %d tokens, want 1", len(tokens)) +- return +- } +- tok := tokens[0] +- if tok.TokenType != tokenType { +- mark.errorf("token type = %q, want %q", tok.TokenType, tokenType) +- } +- if tok.Mod != mod { +- mark.errorf("token mod = %q, want %q", tok.Mod, mod) +- } +-} +- +-func signatureMarker(mark marker, src protocol.Location, label string, active int64) { +- got := mark.run.env.SignatureHelp(src) +- var gotLabels []string // for better error messages +- if got != nil { +- for _, s := range got.Signatures { +- gotLabels = append(gotLabels, s.Label) +- } +- } +- if label == "" { +- // A null result is expected. +- // (There's no point having a @signatureerr marker +- // because the server handler suppresses all errors.) +- if got != nil && len(gotLabels) > 0 { +- mark.errorf("signatureHelp = %v, want 0 signatures", gotLabels) +- } +- return +- } +- if got == nil || len(got.Signatures) != 1 { +- mark.errorf("signatureHelp = %v, want exactly 1 signature", gotLabels) +- return +- } +- if got := gotLabels[0]; got != label { +- mark.errorf("signatureHelp: got label %q, want %q", got, label) +- } +- gotActiveParameter := int64(-1) // => missing +- if got.ActiveParameter != nil { +- gotActiveParameter = int64(*got.ActiveParameter) +- } +- if gotActiveParameter != active { +- mark.errorf("signatureHelp: got active parameter %d, want %d", gotActiveParameter, active) +- } +-} +- +-// rename returns the new contents of the files that would be modified +-// by renaming the identifier at loc to newName. +-func rename(env *integration.Env, loc protocol.Location, newName string) (map[string][]byte, error) { +- // We call Server.Rename directly, instead of +- // env.Editor.Rename(env.Ctx, loc, newName) +- // to isolate Rename from PrepareRename, and because we don't +- // want to modify the file system in a scenario with multiple +- // @rename markers. +- +- wsedit, err := env.Editor.Server.Rename(env.Ctx, &protocol.RenameParams{ +- TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, +- Position: loc.Range.Start, +- NewName: newName, +- }) +- if err != nil { +- return nil, err +- } +- return changedFiles(env, wsedit.DocumentChanges) +-} +- +-// changedFiles applies the given sequence of document changes to the +-// editor buffer content, recording the final contents in the returned map. +-// The actual editor state is not changed. +-// Deleted files are indicated by a content of []byte(nil). +-// +-// See also: +-// - Editor.applyWorkspaceEdit ../integration/fake/editor.go for the +-// implementation of this operation used in normal testing. +-// - client.applyWorkspaceEdit in ../../../cmd/cmd.go for the +-// CLI variant. +-func changedFiles(env *integration.Env, changes []protocol.DocumentChange) (map[string][]byte, error) { +- uriToPath := env.Sandbox.Workdir.URIToPath +- +- // latest maps each updated file name to a mapper holding its +- // current contents, or nil if the file has been deleted. +- latest := make(map[protocol.DocumentURI]*protocol.Mapper) +- +- // read reads a file. It returns an error if the file never +- // existed or was deleted. +- read := func(uri protocol.DocumentURI) (*protocol.Mapper, error) { +- if m, ok := latest[uri]; ok { +- if m == nil { +- return nil, fmt.Errorf("read: file %s was deleted", uri) +- } +- return m, nil +- } +- return env.Editor.Mapper(uriToPath(uri)) +- } +- +- // write (over)writes a file. A nil content indicates a deletion. +- write := func(uri protocol.DocumentURI, content []byte) { +- var m *protocol.Mapper +- if content != nil { +- m = protocol.NewMapper(uri, content) +- } +- latest[uri] = m +- } +- +- // Process the sequence of changes. +- for _, change := range changes { +- switch { +- case change.TextDocumentEdit != nil: +- uri := change.TextDocumentEdit.TextDocument.URI +- m, err := read(uri) +- if err != nil { +- return nil, err // missing +- } +- patched, _, err := protocol.ApplyEdits(m, protocol.AsTextEdits(change.TextDocumentEdit.Edits)) +- if err != nil { +- return nil, err // bad edit +- } +- write(uri, patched) +- +- case change.RenameFile != nil: +- old := change.RenameFile.OldURI +- new := change.RenameFile.NewURI +- info, err := os.Stat(old.Path()) +- if err != nil { +- return nil, err +- } +- if info.IsDir() { +- // Walk through all the files in the old directory and copy +- // their content to the new directory. +- // TODO(mkalil): This currently only handles renaming the file's +- // innermost directory. Need to handle renames of outer directories +- // directories when implementing package move refactoring. +- for _, file := range env.ListFiles(old.Path()) { +- oldFile := protocol.URIFromPath(path.Join(old.Path(), path.Base(file))) +- m, err := read(oldFile) +- if err != nil { +- return nil, err // missing +- } +- write(oldFile, nil) +- +- newFile := protocol.URIFromPath(path.Join(new.Path(), path.Base(file))) +- if _, err := read(oldFile); err == nil { +- return nil, fmt.Errorf("RenameFile: destination %s exists", new) +- } +- write(newFile, m.Content) +- } +- } else { +- m, err := read(old) +- if err != nil { +- return nil, err // missing +- } +- write(old, nil) +- +- if _, err := read(old); err == nil { +- return nil, fmt.Errorf("RenameFile: destination %s exists", new) +- } +- write(new, m.Content) +- } +- +- case change.CreateFile != nil: +- uri := change.CreateFile.URI +- if _, err := read(uri); err == nil { +- return nil, fmt.Errorf("CreateFile %s: file exists", uri) +- } +- write(uri, []byte("")) // initially empty +- +- case change.DeleteFile != nil: +- uri := change.DeleteFile.URI +- if _, err := read(uri); err != nil { +- return nil, fmt.Errorf("DeleteFile %s: file does not exist", uri) +- } +- write(uri, nil) +- +- default: +- return nil, fmt.Errorf("invalid DocumentChange") +- } +- } +- +- // Convert into result form. +- result := make(map[string][]byte) +- for uri, mapper := range latest { +- var content []byte +- if mapper != nil { +- content = mapper.Content +- } +- result[uriToPath(uri)] = content +- } +- +- return result, nil +-} +- +-func codeActionMarker(mark marker, loc protocol.Location, kind string) { +- if !exactlyOneNamedArg(mark, "edit", "result", "err") { +- return +- } +- +- if end := namedArgFunc(mark, "end", convertNamedArgLocation, protocol.Location{}); end.URI != "" { +- if end.URI != loc.URI { +- mark.errorf("end marker is in a different file (%s)", filepath.Base(loc.URI.Path())) +- return +- } +- loc.Range.End = end.Range.End +- } +- +- var ( +- edit = namedArg(mark, "edit", expect.Identifier("")) +- result = namedArg(mark, "result", expect.Identifier("")) +- wantErr = namedArgFunc(mark, "err", convertStringMatcher, stringMatcher{}) +- ) +- +- changed, err := codeAction(mark.run.env, loc.URI, loc.Range, protocol.CodeActionKind(kind), nil) +- if err != nil && wantErr.empty() { +- mark.errorf("codeAction failed: %v", err) +- return +- } +- +- switch { +- case edit != "": +- g := mark.getGolden(edit) +- checkDiffs(mark, changed, g) +- case result != "": +- g := mark.getGolden(result) +- // Check the file state. +- checkChangedFiles(mark, changed, g) +- case !wantErr.empty(): +- wantErr.checkErr(mark, err) +- default: +- panic("unreachable") +- } +-} +- +-// codeLensesMarker runs the @codelenses() marker, collecting @codelens marks +-// in the current file and comparing with the result of the +-// textDocument/codeLens RPC. +-func codeLensesMarker(mark marker) { +- type codeLens struct { +- Range protocol.Range +- Title string +- } +- +- lenses := mark.run.env.CodeLens(mark.path()) +- var got []codeLens +- for _, lens := range lenses { +- title := "" +- if lens.Command != nil { +- title = lens.Command.Title +- } +- got = append(got, codeLens{lens.Range, title}) +- } +- +- var want []codeLens +- mark.consumeExtraNotes("codelens", actionMarkerFunc(func(_ marker, loc protocol.Location, title string) { +- want = append(want, codeLens{loc.Range, title}) +- })) +- +- for _, s := range [][]codeLens{got, want} { +- sort.Slice(s, func(i, j int) bool { +- li, lj := s[i], s[j] +- if c := protocol.CompareRange(li.Range, lj.Range); c != 0 { +- return c < 0 +- } +- return li.Title < lj.Title +- }) +- } +- +- if diff := cmp.Diff(want, got); diff != "" { +- mark.errorf("codelenses: unexpected diff (-want +got):\n%s", diff) +- } +-} +- +-func documentLinkMarker(mark marker, g *Golden) { +- var b bytes.Buffer +- links := mark.run.env.DocumentLink(mark.path()) +- for _, l := range links { +- if l.Target == nil { +- mark.errorf("%s: nil link target", l.Range) +- continue +- } +- loc := mark.uri().Location(l.Range) +- fmt.Fprintln(&b, mark.run.fmtLocForGolden(loc), *l.Target) +- } +- +- compareGolden(mark, b.Bytes(), g) +-} +- +-// consumeExtraNotes runs the provided func for each extra note with the given +-// name, and deletes all matching notes. +-func (mark marker) consumeExtraNotes(name string, f func(marker)) { +- uri := mark.uri() +- notes := mark.run.extraNotes[uri][name] +- delete(mark.run.extraNotes[uri], name) +- +- for _, note := range notes { +- f(marker{run: mark.run, note: note}) +- } +-} +- +-// quickfixMarker implements the @quickfix(location, regexp, +-// kind, golden) marker. It acts like @diag(location, regexp), to set +-// the expectation of a diagnostic, but then it applies the "quickfix" +-// code action (which must be unique) suggested by the matched diagnostic. +-func quickfixMarker(mark marker, loc protocol.Location, re *regexp.Regexp, golden *Golden) { +- loc.Range.End = loc.Range.Start // diagnostics ignore end position. +- // Find and remove the matching diagnostic. +- diag, ok := removeDiagnostic(mark, loc, false, re) +- if !ok { +- mark.errorf("no diagnostic at %v matches %q", loc, re) +- return +- } +- +- // Apply the fix it suggests. +- changed, err := codeAction(mark.run.env, loc.URI, diag.Range, "quickfix", &diag) +- if err != nil { +- mark.errorf("quickfix failed: %v. (Use @quickfixerr for expected errors.)", err) +- return +- } +- +- // Check the file state. +- checkDiffs(mark, changed, golden) +-} +- +-func quickfixErrMarker(mark marker, loc protocol.Location, re *regexp.Regexp, wantErr stringMatcher) { +- loc.Range.End = loc.Range.Start // diagnostics ignore end position. +- // Find and remove the matching diagnostic. +- diag, ok := removeDiagnostic(mark, loc, false, re) +- if !ok { +- mark.errorf("no diagnostic at %v matches %q", loc, re) +- return +- } +- +- // Apply the fix it suggests. +- _, err := codeAction(mark.run.env, loc.URI, diag.Range, "quickfix", &diag) +- wantErr.checkErr(mark, err) +-} +- +-// codeAction executes a textDocument/codeAction request for the specified +-// location and kind. If diag is non-nil, it is used as the code action +-// context. +-// +-// The resulting map contains resulting file contents after the code action is +-// applied. Currently, this function does not support code actions that return +-// edits directly; it only supports code action commands. +-func codeAction(env *integration.Env, uri protocol.DocumentURI, rng protocol.Range, kind protocol.CodeActionKind, diag *protocol.Diagnostic) (map[string][]byte, error) { +- changes, err := codeActionChanges(env, uri, rng, kind, diag) +- if err != nil { +- return nil, err +- } +- return changedFiles(env, changes) +-} +- +-// codeActionChanges executes a textDocument/codeAction request for the +-// specified location and kind, and captures the resulting document changes. +-// If diag is non-nil, it is used as the code action context. +-func codeActionChanges(env *integration.Env, uri protocol.DocumentURI, rng protocol.Range, kind protocol.CodeActionKind, diag *protocol.Diagnostic) ([]protocol.DocumentChange, error) { +- // Collect any server-initiated changes created by workspace/applyEdit. +- // +- // We set up this handler immediately, not right before executing the code +- // action command, so we can assert that neither the codeAction request nor +- // codeAction resolve request cause edits as a side effect (golang/go#71405). +- var changes []protocol.DocumentChange +- restore := env.Editor.Client().SetApplyEditHandler(func(ctx context.Context, wsedit *protocol.WorkspaceEdit) error { +- changes = append(changes, wsedit.DocumentChanges...) +- return nil +- }) +- defer restore() +- +- // Request all code actions that apply to the diagnostic. +- // A production client would set Only=[kind], +- // but we can give a better error if we don't filter. +- params := &protocol.CodeActionParams{ +- TextDocument: protocol.TextDocumentIdentifier{URI: uri}, +- Range: rng, +- Context: protocol.CodeActionContext{ +- Only: []protocol.CodeActionKind{protocol.Empty}, // => all +- }, +- } +- if diag != nil { +- params.Context.Diagnostics = []protocol.Diagnostic{*diag} +- } +- +- actions, err := env.Editor.Server.CodeAction(env.Ctx, params) +- if err != nil { +- return nil, err +- } +- +- // Find the sole candidate CodeAction of exactly the specified kind +- // (e.g. refactor.inline.call). +- var candidates []protocol.CodeAction +- for _, act := range actions { +- if act.Kind == kind { +- candidates = append(candidates, act) +- } +- } +- if len(candidates) != 1 { +- var msg bytes.Buffer +- fmt.Fprintf(&msg, "found %d CodeActions of kind %s for this diagnostic, want 1", len(candidates), kind) +- for _, act := range actions { +- fmt.Fprintf(&msg, "\n\tfound %q (%s)", act.Title, act.Kind) +- } +- return nil, errors.New(msg.String()) +- } +- action := candidates[0] +- +- // Apply the codeAction. +- // +- // Spec: +- // "If a code action provides an edit and a command, first the edit is +- // executed and then the command." +- // An action may specify an edit and/or a command, to be +- // applied in that order. But since applyDocumentChanges(env, +- // action.Edit.DocumentChanges) doesn't compose, for now we +- // assert that actions return one or the other. +- +- // Resolve code action edits first if the client has resolve support +- // and the code action has no edits. +- if action.Edit == nil { +- editSupport, err := env.Editor.EditResolveSupport() +- if err != nil { +- return nil, err +- } +- if editSupport { +- resolved, err := env.Editor.Server.ResolveCodeAction(env.Ctx, &action) +- if err != nil { +- return nil, err +- } +- action.Edit = resolved.Edit +- } +- } +- +- if action.Edit != nil { +- if len(action.Edit.Changes) > 0 { +- env.TB.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Edit.Changes", action.Kind, action.Title) +- } +- if action.Edit.DocumentChanges != nil { +- if action.Command != nil { +- env.TB.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Command", action.Kind, action.Title) +- } +- return action.Edit.DocumentChanges, nil +- } +- } +- +- if action.Command != nil { +- // This is a typical CodeAction command: +- // +- // Title: "Implement error" +- // Command: gopls.apply_fix +- // Arguments: [{"Fix":"stub_methods","URI":".../a.go","Range":...}}] +- // +- // The client makes an ExecuteCommand RPC to the server, +- // which dispatches it to the ApplyFix handler. +- // ApplyFix dispatches to the "stub_methods" fixer (the meat). +- // The server then makes an ApplyEdit RPC to the client, +- // whose WorkspaceEditFunc hook temporarily gathers the edits +- // instead of applying them. +- +- if _, err := env.Editor.Server.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{ +- Command: action.Command.Command, +- Arguments: action.Command.Arguments, +- }); err != nil { +- return nil, err +- } +- return changes, nil // populated as a side effect of ExecuteCommand +- } +- +- return nil, nil +-} +- +-// refsMarker implements the @refs marker. +-func refsMarker(mark marker, src protocol.Location, want ...protocol.Location) { +- refs := func(includeDeclaration bool, want []protocol.Location) error { +- got, err := mark.server().References(mark.ctx(), &protocol.ReferenceParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), +- Context: protocol.ReferenceContext{ +- IncludeDeclaration: includeDeclaration, +- }, +- }) +- if err != nil { +- return err +- } +- +- return compareLocations(mark, got, want) +- } +- +- for _, includeDeclaration := range []bool{false, true} { +- // Ignore first 'want' location if we didn't request the declaration. +- // TODO(adonovan): don't assume a single declaration: +- // there may be >1 if corresponding methods are considered. +- want := want +- if !includeDeclaration && len(want) > 0 { +- want = want[1:] +- } +- if err := refs(includeDeclaration, want); err != nil { +- mark.errorf("refs(includeDeclaration=%t) failed: %v", +- includeDeclaration, err) +- } +- } +-} +- +-// implementationMarker implements the @implementation marker. +-func implementationMarker(mark marker, src protocol.Location, want ...protocol.Location) { +- wantErr := namedArgFunc(mark, "err", convertStringMatcher, stringMatcher{}) +- +- got, err := mark.server().Implementation(mark.ctx(), &protocol.ImplementationParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), +- }) +- if err != nil && wantErr.empty() { +- mark.errorf("implementation at %s failed: %v", src, err) +- return +- } +- if !wantErr.empty() { +- wantErr.checkErr(mark, err) +- return +- } +- if err := compareLocations(mark, got, want); err != nil { +- mark.errorf("implementation: %v", err) +- } +-} +- +-func mcpToolMarker(mark marker, tool string, rawArgs string) { +- if !mark.run.test.mcp { +- mark.errorf("mcp not enabled: add -mcp") +- return +- } +- args := make(map[string]any) +- if err := json.Unmarshal([]byte(rawArgs), &args); err != nil { +- mark.errorf("fail to unmarshal arguments to map[string]any: %v", err) +- return +- } +- +- // substitutePaths replaces instances of $WORKDIR in string or []string values with +- // the actual working directory. +- var substitutePaths func(any) any +- substitutePaths = func(v any) any { +- switch v := v.(type) { +- case string: +- return strings.ReplaceAll(v, "$WORKDIR", mark.run.env.Sandbox.Workdir.RootURI().Path()) +- +- case []any: +- for i, e := range v { +- if _, ok := e.(string); ok { +- v[i] = substitutePaths(e).(string) +- } +- } +- } +- return v +- } +- // Hack: replace '$WORKDIR' in string arguments with the actual sandbox +- // working directory. +- for k, v := range args { +- args[k] = substitutePaths(v) +- } +- +- if loc := namedArg(mark, "location", protocol.Location{}); loc != (protocol.Location{}) { +- args["location"] = loc +- } +- +- res, err := mark.run.env.MCPSession.CallTool(mark.ctx(), &mcp.CallToolParams{ +- Name: tool, +- Arguments: args, +- }) +- if err != nil { +- mark.errorf("failed to call mcp tool: %v", err) +- return +- } +- +- var buf bytes.Buffer +- for i, c := range res.Content { +- if c.Type != "text" { +- mark.errorf("unsupported return content[%v] type: %s", i, c.Type) +- continue +- } +- buf.WriteString(c.Text) +- } +- if !bytes.HasSuffix(buf.Bytes(), []byte{'\n'}) { +- buf.WriteString("\n") // all golden content is newline terminated +- } +- +- got := buf.String() +- // For portability, replace all (potential) filepath separators with "/". +- got = strings.ReplaceAll(got, string(filepath.Separator), "/") +- // To ensure consistent unified diff output, the working directory path +- // is replaced with "$WORKDIR". This addresses cases where MCP tools +- // include absolute file paths in generated diffs. +- got = strings.ReplaceAll(got, filepath.ToSlash(mark.run.env.Sandbox.Workdir.RootURI().Path()), "$WORKDIR") +- +- output := namedArg(mark, "output", expect.Identifier("")) +- golden := mark.getGolden(output) +- want, _ := golden.Get(mark.T(), "", []byte(got)) +- if diff := compare.Text(string(want), got); diff != "" { +- mark.errorf("unexpected mcp tools call %s return: diff:\n%s", tool, diff) +- } +-} +- +-func incomingCallsMarker(mark marker, src protocol.Location, want ...protocol.Location) { +- getCalls := func(item protocol.CallHierarchyItem) ([]protocol.Location, error) { +- calls, err := mark.server().IncomingCalls(mark.ctx(), &protocol.CallHierarchyIncomingCallsParams{Item: item}) +- if err != nil { +- return nil, err +- } +- var locs []protocol.Location +- for _, call := range calls { +- locs = append(locs, call.From.URI.Location(call.From.Range)) +- } +- return locs, nil +- } +- callHierarchy(mark, src, getCalls, want) +-} +- +-func outgoingCallsMarker(mark marker, src protocol.Location, want ...protocol.Location) { +- getCalls := func(item protocol.CallHierarchyItem) ([]protocol.Location, error) { +- calls, err := mark.server().OutgoingCalls(mark.ctx(), &protocol.CallHierarchyOutgoingCallsParams{Item: item}) +- if err != nil { +- return nil, err +- } +- var locs []protocol.Location +- for _, call := range calls { +- locs = append(locs, call.To.URI.Location(call.To.Range)) +- } +- return locs, nil +- } +- callHierarchy(mark, src, getCalls, want) +-} +- +-type callHierarchyFunc = func(protocol.CallHierarchyItem) ([]protocol.Location, error) +- +-func callHierarchy(mark marker, src protocol.Location, getCalls callHierarchyFunc, want []protocol.Location) { +- items, err := mark.server().PrepareCallHierarchy(mark.ctx(), &protocol.CallHierarchyPrepareParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), +- }) +- if err != nil { +- mark.errorf("PrepareCallHierarchy failed: %v", err) +- return +- } +- if nitems := len(items); nitems != 1 { +- mark.errorf("PrepareCallHierarchy returned %d items, want exactly 1", nitems) +- return +- } +- item := items[0] +- if loc := item.URI.Location(item.Range); loc != src { +- mark.errorf("PrepareCallHierarchy found call %v, want %v", loc, src) +- return +- } +- calls, err := getCalls(items[0]) +- if err != nil { +- mark.errorf("call hierarchy failed: %v", err) +- return +- } +- if err := compareLocations(mark, calls, want); err != nil { +- mark.errorf("%s failed: %v", mark.note.Name, err) +- } +-} +- +-func inlayhintsMarker(mark marker, g *Golden) { +- hints := mark.run.env.InlayHints(mark.path()) +- +- // Map inlay hints to text edits. +- edits := make([]protocol.TextEdit, len(hints)) +- for i, hint := range hints { +- var paddingLeft, paddingRight string +- if hint.PaddingLeft { +- paddingLeft = " " +- } +- if hint.PaddingRight { +- paddingRight = " " +- } +- edits[i] = protocol.TextEdit{ +- Range: protocol.Range{Start: hint.Position, End: hint.Position}, +- NewText: fmt.Sprintf("<%s%s%s>", paddingLeft, hint.Label[0].Value, paddingRight), +- } +- } +- +- m := mark.mapper() +- got, _, err := protocol.ApplyEdits(m, edits) +- if err != nil { +- mark.errorf("ApplyProtocolEdits: %v", err) +- return +- } +- +- compareGolden(mark, got, g) +-} +- +-func prepareRenameMarker(mark marker, src protocol.Location, placeholder string) { +- params := &protocol.PrepareRenameParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), +- } +- got, err := mark.server().PrepareRename(mark.ctx(), params) +- if err != nil { +- mark.T().Fatal(err) +- } +- if placeholder == "" { +- if got != nil { +- mark.errorf("PrepareRename(...) = %v, want nil", got) +- } +- return +- } +- +- want := &protocol.PrepareRenameResult{ +- Placeholder: placeholder, +- } +- if span := namedArg(mark, "span", protocol.Location{}); span != (protocol.Location{}) { +- want.Range = span.Range +- } else { +- got.Range = protocol.Range{} // ignore Range +- } +- if diff := cmp.Diff(want, got); diff != "" { +- mark.errorf("mismatching PrepareRename result:\n%s", diff) +- } +-} +- +-func subtypesMarker(mark marker, src protocol.Location, want ...protocol.Location) { +- typeHierarchy(mark, src, want, func(item protocol.TypeHierarchyItem) ([]protocol.TypeHierarchyItem, error) { +- return mark.server().Subtypes(mark.ctx(), &protocol.TypeHierarchySubtypesParams{Item: item}) +- }) +-} +- +-func supertypesMarker(mark marker, src protocol.Location, want ...protocol.Location) { +- typeHierarchy(mark, src, want, func(item protocol.TypeHierarchyItem) ([]protocol.TypeHierarchyItem, error) { +- return mark.server().Supertypes(mark.ctx(), &protocol.TypeHierarchySupertypesParams{Item: item}) +- }) +-} +- +-type typeHierarchyFunc = func(item protocol.TypeHierarchyItem) ([]protocol.TypeHierarchyItem, error) +- +-func typeHierarchy(mark marker, src protocol.Location, want []protocol.Location, get typeHierarchyFunc) { +- items, err := mark.server().PrepareTypeHierarchy(mark.ctx(), &protocol.TypeHierarchyPrepareParams{ +- TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), +- }) +- if err != nil { +- mark.errorf("PrepareTypeHierarchy failed: %v", err) +- return +- } +- if nitems := len(items); nitems != 1 { +- mark.errorf("PrepareTypeHierarchy returned %d items, want exactly 1", nitems) +- return +- } +- if loc := (protocol.Location{URI: items[0].URI, Range: items[0].Range}); loc != src { +- mark.errorf("PrepareTypeHierarchy found type %v, want %v", loc, src) +- return +- } +- items, err = get(items[0]) +- if err != nil { +- mark.errorf("type hierarchy failed: %v", err) +- return +- } +- got := []protocol.Location{} // non-nil; cmp.Diff cares +- for _, item := range items { +- got = append(got, item.URI.Location(item.Range)) +- } +- if d := cmp.Diff(want, got); d != "" { +- mark.errorf("type hierarchy: unexpected results (-want +got):\n%s", d) +- } +-} +- +-// symbolMarker implements the @symbol marker. +-func symbolMarker(mark marker, golden *Golden) { +- // Retrieve information about all symbols in this file. +- symbols, err := mark.server().DocumentSymbol(mark.ctx(), &protocol.DocumentSymbolParams{ +- TextDocument: protocol.TextDocumentIdentifier{URI: mark.uri()}, +- }) +- if err != nil { +- mark.errorf("DocumentSymbol request failed: %v", err) +- return +- } +- +- // Format symbols one per line, sorted (in effect) by first column, a dotted name. +- var lines []string +- for _, symbol := range symbols { +- // Each result element is a union of (legacy) +- // SymbolInformation and (new) DocumentSymbol, +- // so we ascertain which one and then transcode. +- data, err := json.Marshal(symbol) +- if err != nil { +- mark.T().Fatal(err) +- } +- if _, ok := symbol.(map[string]any)["location"]; ok { +- // This case is not reached because Editor initialization +- // enables HierarchicalDocumentSymbolSupport. +- // TODO(adonovan): test this too. +- var sym protocol.SymbolInformation +- if err := json.Unmarshal(data, &sym); err != nil { +- mark.T().Fatal(err) +- } +- mark.errorf("fake Editor doesn't support SymbolInformation") +- +- } else { +- var sym protocol.DocumentSymbol // new hierarchical hotness +- if err := json.Unmarshal(data, &sym); err != nil { +- mark.T().Fatal(err) +- } +- +- // Print each symbol in the response tree. +- var visit func(sym protocol.DocumentSymbol, prefix []string) +- visit = func(sym protocol.DocumentSymbol, prefix []string) { +- var out strings.Builder +- out.WriteString(strings.Join(prefix, ".")) +- fmt.Fprintf(&out, " %q", sym.Detail) +- if delta := sym.Range.End.Line - sym.Range.Start.Line; delta > 0 { +- fmt.Fprintf(&out, " +%d lines", delta) +- } +- lines = append(lines, out.String()) +- +- for _, child := range sym.Children { +- visit(child, append(prefix, child.Name)) +- } +- } +- visit(sym, []string{sym.Name}) +- } +- } +- sort.Strings(lines) +- lines = append(lines, "") // match trailing newline in .txtar file +- got := []byte(strings.Join(lines, "\n")) +- +- // Compare with golden. +- want, ok := golden.Get(mark.T(), "", got) +- if !ok { +- mark.errorf("%s: missing golden file @%s", mark.note.Name, golden.id) +- } else if diff := cmp.Diff(string(got), string(want)); diff != "" { +- mark.errorf("%s: unexpected output: got:\n%s\nwant:\n%s\ndiff:\n%s", +- mark.note.Name, got, want, diff) +- } +-} +- +-// compareLocations returns an error message if got and want are not +-// the same set of locations. The marker is used only for fmtLoc. +-func compareLocations(mark marker, got, want []protocol.Location) error { +- toStrings := func(locs []protocol.Location) []string { +- strs := make([]string, len(locs)) +- for i, loc := range locs { +- strs[i] = mark.run.fmtLoc(loc) +- } +- sort.Strings(strs) +- return strs +- } +- if diff := cmp.Diff(toStrings(want), toStrings(got)); diff != "" { +- return fmt.Errorf("incorrect result locations: (got %d, want %d):\n%s", +- len(got), len(want), diff) +- } +- return nil +-} +- +-func workspaceSymbolMarker(mark marker, query string, golden *Golden) { +- params := &protocol.WorkspaceSymbolParams{ +- Query: query, +- } +- +- gotSymbols, err := mark.server().Symbol(mark.ctx(), params) +- if err != nil { +- mark.errorf("Symbol(%q) failed: %v", query, err) +- return +- } +- var got bytes.Buffer +- for _, s := range gotSymbols { +- // Omit the txtar position of the symbol location; otherwise edits to the +- // txtar archive lead to unexpected failures. +- loc := mark.run.fmtLocForGolden(s.Location) +- if loc == "" { +- loc = "<unknown>" +- } +- fmt.Fprintf(&got, "%s %s %s\n", loc, s.Name, s.Kind) +- } +- +- compareGolden(mark, got.Bytes(), golden) +-} +- +-// compareGolden compares the content of got with that of g.Get(""), reporting +-// errors on any mismatch. +-// +-// TODO(rfindley): use this helper in more places. +-func compareGolden(mark marker, got []byte, g *Golden) { +- want, ok := g.Get(mark.T(), "", got) +- if !ok { +- mark.errorf("missing golden file @%s", g.id) +- return +- } +- // Normalize newline termination: archive files (i.e. Golden content) can't +- // contain non-newline terminated files, except in the special case where the +- // file is completely empty. +- // +- // Note that txtar partitions a contiguous byte slice, so we must copy before +- // appending. +- normalize := func(s []byte) []byte { +- if n := len(s); n > 0 && s[n-1] != '\n' { +- s = append(s[:n:n], '\n') // don't mutate array +- } +- return s +- } +- got = normalize(got) +- want = normalize(want) +- if diff := compare.Bytes(want, got); diff != "" { +- mark.errorf("%s does not match @%s:\n%s", mark.note.Name, g.id, diff) +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/callhierarchy/callhierarchy.txt b/gopls/internal/test/marker/testdata/callhierarchy/callhierarchy.txt +--- a/gopls/internal/test/marker/testdata/callhierarchy/callhierarchy.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/callhierarchy/callhierarchy.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,96 +0,0 @@ +-This test checks call hierarchy queries. +- +--ignore_extra_diags due to the initialization cycle. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/callhierarchy +- +--- incoming/incoming.go -- +-package incoming +- +-import "golang.org/lsptests/callhierarchy" +- +-// A is exported to test incoming calls across packages +-func A() { //@loc(incomingA, "A") +- callhierarchy.D() +-} +- +--- outgoing/outgoing.go -- +-package outgoing +- +-// B is exported to test outgoing calls across packages +-func B() { //@loc(outgoingB, "B") +-} +- +--- hierarchy.go -- +-package callhierarchy //@loc(hPkg, "callhierarchy") +- +-import "golang.org/lsptests/callhierarchy/outgoing" +- +-func a() { //@loc(hA, "a") +- D() +-} +- +-func b() { //@loc(hB, "b") +- D() +-} +- +-// C is an exported function +-func C() { //@loc(hC, "C") +- D() +- D() +-} +- +-// To test hierarchy across function literals +-var x = func() { D() } //@loc(hX, "x"),loc(hXGlobal, "x") +- +-// D is exported to test incoming/outgoing calls across packages +-func D() { //@ loc(hD, "D"), incomingcalls(hD, hA, hB, hC, hXGlobal, incomingA), outgoingcalls(hD, hE, hF, hG, hH, hI, Generic, outgoingB) +- e() +- x() +- F() +- outgoing.B() +- foo := func() {} //@ loc(hFoo, "foo"), incomingcalls(hFoo, hD), outgoingcalls(hFoo) +- foo() +- +- func() { +- g() +- }() +- +- var i Interface = impl{} +- i.H() +- i.I() +- +- s := Struct{} +- s.J() +- s.K() +- +- Generic[string]() +-} +- +-func e() {} //@loc(hE, "e") +- +-// F is an exported function +-func F() {} //@loc(hF, "F") +- +-func g() {} //@loc(hG, "g") +- +-type Interface interface { +- H() //@loc(hH, "H") +- I() //@loc(hI, "I") +-} +- +-type impl struct{} +- +-func (i impl) H() {} +-func (i impl) I() {} +- +-type Struct struct { +- J func() //@loc(hJ, "J") +- K func() //@loc(hK, "K") +-} +- +-func Generic[T any]() //@loc(Generic, "Generic") +diff -urN a/gopls/internal/test/marker/testdata/callhierarchy/issue64451.txt b/gopls/internal/test/marker/testdata/callhierarchy/issue64451.txt +--- a/gopls/internal/test/marker/testdata/callhierarchy/issue64451.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/callhierarchy/issue64451.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,51 +0,0 @@ +-This test checks call hierarchy queries involving lambdas, which are +-treated as mere statements of their enclosing name function, since +-we can't track calls to them. +- +-Calls from a global var decl are reported at the ValueSpec.Names. +- +-See golang/go#64451. +- +--- go.mod -- +-module example.com +-go 1.0 +- +--- a/a.go -- +-package a +- +-func Foo() { //@ loc(Foo, "Foo") +- bar() +-} +- +-func bar() { //@ loc(bar, "bar") +- go func() { baz() }() +-} +- +-func baz() { //@ loc(baz, "baz") +- bluh() +-} +- +-func bluh() { //@ loc(bluh, "bluh") +- print() +-} +- +-var _ = func() int { //@ loc(global, "_") +- baz() +- return 0 +-}() +- +-func init() { //@ loc(init, "init") +- baz() +-} +- +-//@ outgoingcalls(Foo, bar) +-//@ outgoingcalls(bar, baz) +-//@ outgoingcalls(baz, bluh) +-//@ outgoingcalls(bluh) +-//@ outgoingcalls(init, baz) +- +-//@ incomingcalls(Foo) +-//@ incomingcalls(bar, Foo) +-//@ incomingcalls(baz, bar, global, init) +-//@ incomingcalls(bluh, baz) +-//@ incomingcalls(init) +diff -urN a/gopls/internal/test/marker/testdata/callhierarchy/issue66923.txt b/gopls/internal/test/marker/testdata/callhierarchy/issue66923.txt +--- a/gopls/internal/test/marker/testdata/callhierarchy/issue66923.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/callhierarchy/issue66923.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-Regression test for a crash (#66923) in outgoing calls +-to a built-in function (unsafe.Slice). +- +--- go.mod -- +-module example.com +-go 1.17 +- +--- a/a.go -- +-package a +- +-import "unsafe" +- +-func A() []int { //@ loc(A, "A"), outgoingcalls(A, UNSAFE) +- return unsafe.Slice(new(int), 1) +-} +diff -urN a/gopls/internal/test/marker/testdata/callhierarchy/issue75230.txt b/gopls/internal/test/marker/testdata/callhierarchy/issue75230.txt +--- a/gopls/internal/test/marker/testdata/callhierarchy/issue75230.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/callhierarchy/issue75230.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,16 +0,0 @@ +-Regression test for a crash (golang/go#75230) in outgoing calls +-to a built-in method (error.Error). +- +-We (arbitrarily) don't show calls to built-ins without a package, +-such as error.Error, hence the empty result asserted below. +- +--- go.mod -- +-module example.com +-go 1.17 +- +--- a/a.go -- +-package a +- +-func A(err error) string { //@ loc(A, "A"), outgoingcalls(A) +- return err.Error() +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/add_struct_tags.txt b/gopls/internal/test/marker/testdata/codeaction/add_struct_tags.txt +--- a/gopls/internal/test/marker/testdata/codeaction/add_struct_tags.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/add_struct_tags.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,31 +0,0 @@ +-This test checks the behavior of the 'Add struct tags' code action. +- +--- flags -- +--ignore_extra_diags +- +--- addtags.go -- +-package addtags +- +-type A struct { +- x int //@codeaction("x", "refactor.rewrite.addTags", edit=singleline) +- y int //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.addTags", edit=twolines) +- z int //@codeaction(re`()n`, "refactor.rewrite.addTags", edit=entirestruct) +-} +--- @entirestruct/addtags.go -- +-@@ -4,3 +4,3 @@ +-- x int //@codeaction("x", "refactor.rewrite.addTags", edit=singleline) +-- y int //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.addTags", edit=twolines) +-- z int //@codeaction(re`()n`, "refactor.rewrite.addTags", edit=entirestruct) +-+ x int `json:"x"` //@codeaction("x", "refactor.rewrite.addTags", edit=singleline) +-+ y int `json:"y"` //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.addTags", edit=twolines) +-+ z int `json:"z"` //@codeaction(re`()n`, "refactor.rewrite.addTags", edit=entirestruct) +--- @singleline/addtags.go -- +-@@ -4 +4 @@ +-- x int //@codeaction("x", "refactor.rewrite.addTags", edit=singleline) +-+ x int `json:"x"` //@codeaction("x", "refactor.rewrite.addTags", edit=singleline) +--- @twolines/addtags.go -- +-@@ -5,2 +5,2 @@ +-- y int //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.addTags", edit=twolines) +-- z int //@codeaction(re`()n`, "refactor.rewrite.addTags", edit=entirestruct) +-+ y int `json:"y"` //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.addTags", edit=twolines) +-+ z int `json:"z"` //@codeaction(re`()n`, "refactor.rewrite.addTags", edit=entirestruct) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/addtest.txt b/gopls/internal/test/marker/testdata/codeaction/addtest.txt +--- a/gopls/internal/test/marker/testdata/codeaction/addtest.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/addtest.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,1542 +0,0 @@ +-This test checks the behavior of the 'add test for FUNC' code action. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/addtest +- +-go 1.18 +- +--- copyrightandbuildconstraint/copyrightandbuildconstraint.go -- +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build go1.18 +- +-// Package main is for lsp test. +-package main +- +-func Foo(in string) string {return in} //@codeaction("Foo", "source.addTest", edit=with_copyright_build_constraint) +- +--- @with_copyright_build_constraint/copyrightandbuildconstraint/copyrightandbuildconstraint_test.go -- +-@@ -0,0 +1,32 @@ +-+// Copyright 2020 The Go Authors. All rights reserved. +-+// Use of this source code is governed by a BSD-style +-+// license that can be found in the LICENSE file. +-+ +-+//go:build go1.18 +-+ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/copyrightandbuildconstraint" +-+ "testing" +-+) +-+ +-+func TestFoo(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got := main.Foo(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Foo() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- buildconstraint/buildconstraint.go -- +-//go:build go1.18 +- +-// Package copyright is for lsp test. +-package copyright +- +-func Foo(in string) string {return in} //@codeaction("Foo", "source.addTest", edit=with_build_constraint) +- +--- @with_build_constraint/buildconstraint/buildconstraint_test.go -- +-@@ -0,0 +1,28 @@ +-+//go:build go1.18 +-+ +-+package copyright_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/buildconstraint" +-+ "testing" +-+) +-+ +-+func TestFoo(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got := copyright.Foo(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Foo() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- missingtestfile/missingtestfile.go -- +-package main +- +-type Bar struct {} +- +-type foo struct {} +- +-func ExportedFunction(in string) string {return in} //@codeaction("ExportedFunction", "source.addTest", edit=missing_test_file_exported_function) +- +-func UnexportedInputParam(in string, f foo) string {return in} //@codeaction("UnexportedInputParam", "source.addTest", edit=missing_test_file_function_unexported_input) +- +-func unexportedFunction(in string) string {return in} //@codeaction("unexportedFunction", "source.addTest", edit=missing_test_file_unexported_function) +- +-func (*Bar) ExportedMethod(in string) string {return in} //@codeaction("ExportedMethod", "source.addTest", edit=missing_test_file_exported_recv_exported_method) +- +-func (*Bar) UnexportedInputParam(in string, f foo) string {return in} //@codeaction("UnexportedInputParam", "source.addTest", edit=missing_test_file_method_unexported_input) +- +-func (*foo) ExportedMethod(in string) string {return in} //@codeaction("ExportedMethod", "source.addTest", edit=missing_test_file_unexported_recv) +- +--- @missing_test_file_exported_function/missingtestfile/missingtestfile_test.go -- +-@@ -0,0 +1,26 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/missingtestfile" +-+ "testing" +-+) +-+ +-+func TestExportedFunction(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got := main.ExportedFunction(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("ExportedFunction() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @missing_test_file_exported_recv_exported_method/missingtestfile/missingtestfile_test.go -- +-@@ -0,0 +1,28 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/missingtestfile" +-+ "testing" +-+) +-+ +-+func TestBar_ExportedMethod(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ // TODO: construct the receiver type. +-+ var b main.Bar +-+ got := b.ExportedMethod(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("ExportedMethod() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @missing_test_file_function_unexported_input/missingtestfile/missingtestfile_test.go -- +-@@ -0,0 +1,24 @@ +-+package main +-+ +-+import "testing" +-+ +-+func TestUnexportedInputParam(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ f foo +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got := UnexportedInputParam(tt.in, tt.f) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("UnexportedInputParam() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @missing_test_file_method_unexported_input/missingtestfile/missingtestfile_test.go -- +-@@ -0,0 +1,26 @@ +-+package main +-+ +-+import "testing" +-+ +-+func TestBar_UnexportedInputParam(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ f foo +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ // TODO: construct the receiver type. +-+ var b Bar +-+ got := b.UnexportedInputParam(tt.in, tt.f) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("UnexportedInputParam() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @missing_test_file_unexported_function/missingtestfile/missingtestfile_test.go -- +-@@ -0,0 +1,23 @@ +-+package main +-+ +-+import "testing" +-+ +-+func Test_unexportedFunction(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got := unexportedFunction(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("unexportedFunction() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @missing_test_file_unexported_recv/missingtestfile/missingtestfile_test.go -- +-@@ -0,0 +1,25 @@ +-+package main +-+ +-+import "testing" +-+ +-+func Test_foo_ExportedMethod(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ // TODO: construct the receiver type. +-+ var f foo +-+ got := f.ExportedMethod(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("ExportedMethod() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- xpackagetestfile/xpackagetestfile.go -- +-package main +- +-func ExportedFunction(in string) string {return in} //@codeaction("ExportedFunction", "source.addTest", edit=xpackage_exported_function) +-func unexportedFunction(in string) string {return in} //@codeaction("unexportedFunction", "source.addTest", edit=xpackage_unexported_function) +- +-type Bar struct {} +- +-func (*Bar) ExportedMethod(in string) string {return in} //@codeaction("ExportedMethod", "source.addTest", edit=xpackage_exported_recv_exported_method) +-func (*Bar) unexportedMethod(in string) string {return in} //@codeaction("unexportedMethod", "source.addTest", edit=xpackage_exported_recv_unexported_method) +- +-type foo struct {} +- +-func (*foo) ExportedMethod(in string) string {return in} //@codeaction("ExportedMethod", "source.addTest", edit=xpackage_unexported_recv_exported_method) +-func (*foo) unexportedMethod(in string) string {return in} //@codeaction("unexportedMethod", "source.addTest", edit=xpackage_unexported_recv_unexported_method) +- +--- xpackagetestfile/xpackagetestfile_test.go -- +-package main +- +--- @xpackage_exported_function/xpackagetestfile/xpackagetestfile_test.go -- +-@@ -3 +3,22 @@ +-+import "testing" +-+ +-+ +-+func TestExportedFunction(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got := ExportedFunction(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("ExportedFunction() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @xpackage_unexported_function/xpackagetestfile/xpackagetestfile_test.go -- +-@@ -3 +3,22 @@ +-+import "testing" +-+ +-+ +-+func Test_unexportedFunction(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got := unexportedFunction(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("unexportedFunction() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @xpackage_exported_recv_exported_method/xpackagetestfile/xpackagetestfile_test.go -- +-@@ -3 +3,24 @@ +-+import "testing" +-+ +-+ +-+func TestBar_ExportedMethod(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ // TODO: construct the receiver type. +-+ var b Bar +-+ got := b.ExportedMethod(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("ExportedMethod() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @xpackage_exported_recv_unexported_method/xpackagetestfile/xpackagetestfile_test.go -- +-@@ -3 +3,24 @@ +-+import "testing" +-+ +-+ +-+func TestBar_unexportedMethod(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ // TODO: construct the receiver type. +-+ var b Bar +-+ got := b.unexportedMethod(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("unexportedMethod() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @xpackage_unexported_recv_exported_method/xpackagetestfile/xpackagetestfile_test.go -- +-@@ -3 +3,24 @@ +-+import "testing" +-+ +-+ +-+func Test_foo_ExportedMethod(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ // TODO: construct the receiver type. +-+ var f foo +-+ got := f.ExportedMethod(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("ExportedMethod() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @xpackage_unexported_recv_unexported_method/xpackagetestfile/xpackagetestfile_test.go -- +-@@ -3 +3,24 @@ +-+import "testing" +-+ +-+ +-+func Test_foo_unexportedMethod(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ // TODO: construct the receiver type. +-+ var f foo +-+ got := f.unexportedMethod(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("unexportedMethod() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- aliasreceiver/aliasreceiver.go -- +-package main +- +-type bar0 struct {} +-type bar1 = bar0 +-type Bar = bar1 +- +-func (*Bar) ExportedMethod(in string) string {return in} //@codeaction("ExportedMethod", "source.addTest", edit=pointer_receiver_exported_method) +-func (*Bar) unexportedMethod(in string) string {return in} //@codeaction("unexportedMethod", "source.addTest", edit=pointer_receiver_unexported_method) +- +-type foo0 struct {} +-type foo1 = foo0 +-type foo = foo1 +- +-func (foo) ExportedMethod(in string) string {return in} //@codeaction("ExportedMethod", "source.addTest", edit=alias_receiver_exported_method) +-func (foo) unexportedMethod(in string) string {return in} //@codeaction("unexportedMethod", "source.addTest", edit=alias_receiver_unexported_method) +- +-type baz0 struct{} +-type baz1 = baz0 +-type baz = baz1 +- +-func newBaz0() baz0 {return baz0{}} +- +-func (baz) method(in string) string {return in} //@codeaction("method", "source.addTest", edit=alias_constructor_on_underlying_type) +- +-type qux0 struct{} +-type qux1 = qux0 +-type qux2 = qux1 +-type Qux = *qux2 +- +-func newQux1() (qux1, error) {return qux1{}, nil} +- +-func (Qux) method(in string) string {return in} //@codeaction("method", "source.addTest", edit=alias_constructor_on_different_alias_type) +- +--- aliasreceiver/aliasreceiver_test.go -- +-package main +- +--- @pointer_receiver_exported_method/aliasreceiver/aliasreceiver_test.go -- +-@@ -3 +3,24 @@ +-+import "testing" +-+ +-+ +-+func TestBar_ExportedMethod(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ // TODO: construct the receiver type. +-+ var b Bar +-+ got := b.ExportedMethod(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("ExportedMethod() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @pointer_receiver_unexported_method/aliasreceiver/aliasreceiver_test.go -- +-@@ -3 +3,24 @@ +-+import "testing" +-+ +-+ +-+func TestBar_unexportedMethod(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ // TODO: construct the receiver type. +-+ var b Bar +-+ got := b.unexportedMethod(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("unexportedMethod() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @alias_receiver_exported_method/aliasreceiver/aliasreceiver_test.go -- +-@@ -3 +3,24 @@ +-+import "testing" +-+ +-+ +-+func Test_foo_ExportedMethod(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ // TODO: construct the receiver type. +-+ var f foo +-+ got := f.ExportedMethod(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("ExportedMethod() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @alias_receiver_unexported_method/aliasreceiver/aliasreceiver_test.go -- +-@@ -3 +3,24 @@ +-+import "testing" +-+ +-+ +-+func Test_foo_unexportedMethod(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ // TODO: construct the receiver type. +-+ var f foo +-+ got := f.unexportedMethod(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("unexportedMethod() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @alias_constructor_on_underlying_type/aliasreceiver/aliasreceiver_test.go -- +-@@ -3 +3,23 @@ +-+import "testing" +-+ +-+ +-+func Test_baz_method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ b := newBaz0() +-+ got := b.method(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("method() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @alias_constructor_on_different_alias_type/aliasreceiver/aliasreceiver_test.go -- +-@@ -3 +3,26 @@ +-+import "testing" +-+ +-+ +-+func TestQux_method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ q, err := newQux1() +-+ if err != nil { +-+ t.Fatalf("could not construct receiver type: %v", err) +-+ } +-+ got := q.method(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("method() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- multiinputoutput/multiinputoutput.go -- +-package main +- +-func Foo(in, in2, in3, in4 string) (out, out1, out2 string) {return "", "", ""} //@codeaction("Foo", "source.addTest", edit=multi_input_output) +- +--- @multi_input_output/multiinputoutput/multiinputoutput_test.go -- +-@@ -0,0 +1,37 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/multiinputoutput" +-+ "testing" +-+) +-+ +-+func TestFoo(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ in2 string +-+ in3 string +-+ in4 string +-+ want string +-+ want2 string +-+ want3 string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got, got2, got3 := main.Foo(tt.in, tt.in2, tt.in3, tt.in4) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Foo() = %v, want %v", got, tt.want) +-+ } +-+ if true { +-+ t.Errorf("Foo() = %v, want %v", got2, tt.want2) +-+ } +-+ if true { +-+ t.Errorf("Foo() = %v, want %v", got3, tt.want3) +-+ } +-+ }) +-+ } +-+} +--- xpackagerename/xpackagerename.go -- +-package main +- +-import ( +- mytime "time" +- myast "go/ast" +- mytest "testing" +-) +- +-var local mytest.T +- +-func Foo(t mytime.Time, a *myast.Node) (mytime.Time, *myast.Node) {return t, a} //@codeaction("Foo", "source.addTest", edit=xpackage_rename) +- +--- @xpackage_rename/xpackagerename/xpackagerename_test.go -- +-@@ -0,0 +1,33 @@ +-+package main_test +-+ +-+import( +-+ myast "go/ast" +-+ "golang.org/lsptests/addtest/xpackagerename" +-+ mytest "testing" +-+ mytime "time" +-+) +-+ +-+func TestFoo(t *mytest.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ t mytime.Time +-+ a *myast.Node +-+ want mytime.Time +-+ want2 *myast.Node +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *mytest.T) { +-+ got, got2 := main.Foo(tt.t, tt.a) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Foo() = %v, want %v", got, tt.want) +-+ } +-+ if true { +-+ t.Errorf("Foo() = %v, want %v", got2, tt.want2) +-+ } +-+ }) +-+ } +-+} +--- xtestpackagerename/xtestpackagerename.go -- +-package main +- +-import ( +- mytime "time" +- myast "go/ast" +- mytest "testing" +-) +- +-var local mytest.T +- +-func Foo(t mytime.Time, a *myast.Node) (mytime.Time, *myast.Node) {return t, a} //@codeaction("Foo", "source.addTest", edit=xtest_package_rename) +- +--- xtestpackagerename/xtestpackagerename_test.go -- +-package main_test +- +-import ( +- yourast "go/ast" +- yourtest "testing" +- yourtime "time" +-) +- +-var fooTime = yourtime.Time{} +-var fooNode = yourast.Node{} +-var fooT yourtest.T +- +--- @xtest_package_rename/xtestpackagerename/xtestpackagerename_test.go -- +-@@ -7 +7,2 @@ +-+ +-+ "golang.org/lsptests/addtest/xtestpackagerename" +-@@ -13 +15,25 @@ +-+ +-+func TestFoo(t *yourtest.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ t yourtime.Time +-+ a *yourast.Node +-+ want yourtime.Time +-+ want2 *yourast.Node +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *yourtest.T) { +-+ got, got2 := main.Foo(tt.t, tt.a) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Foo() = %v, want %v", got, tt.want) +-+ } +-+ if true { +-+ t.Errorf("Foo() = %v, want %v", got2, tt.want2) +-+ } +-+ }) +-+ } +-+} +--- returnwitherror/returnwitherror.go -- +-package main +- +-func OnlyErr() error {return nil} //@codeaction("OnlyErr", "source.addTest", edit=return_only_error) +-func StringErr() (string, error) {return "", nil} //@codeaction("StringErr", "source.addTest", edit=return_string_error) +-func MultipleStringErr() (string, string, string, error) {return "", "", "", nil} //@codeaction("MultipleStringErr", "source.addTest", edit=return_multiple_string_error) +- +--- @return_only_error/returnwitherror/returnwitherror_test.go -- +-@@ -0,0 +1,29 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/returnwitherror" +-+ "testing" +-+) +-+ +-+func TestOnlyErr(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ wantErr bool +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ gotErr := main.OnlyErr() +-+ if gotErr != nil { +-+ if !tt.wantErr { +-+ t.Errorf("OnlyErr() failed: %v", gotErr) +-+ } +-+ return +-+ } +-+ if tt.wantErr { +-+ t.Fatal("OnlyErr() succeeded unexpectedly") +-+ } +-+ }) +-+ } +-+} +--- @return_string_error/returnwitherror/returnwitherror_test.go -- +-@@ -0,0 +1,34 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/returnwitherror" +-+ "testing" +-+) +-+ +-+func TestStringErr(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ want string +-+ wantErr bool +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got, gotErr := main.StringErr() +-+ if gotErr != nil { +-+ if !tt.wantErr { +-+ t.Errorf("StringErr() failed: %v", gotErr) +-+ } +-+ return +-+ } +-+ if tt.wantErr { +-+ t.Fatal("StringErr() succeeded unexpectedly") +-+ } +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("StringErr() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @return_multiple_string_error/returnwitherror/returnwitherror_test.go -- +-@@ -0,0 +1,42 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/returnwitherror" +-+ "testing" +-+) +-+ +-+func TestMultipleStringErr(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ want string +-+ want2 string +-+ want3 string +-+ wantErr bool +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got, got2, got3, gotErr := main.MultipleStringErr() +-+ if gotErr != nil { +-+ if !tt.wantErr { +-+ t.Errorf("MultipleStringErr() failed: %v", gotErr) +-+ } +-+ return +-+ } +-+ if tt.wantErr { +-+ t.Fatal("MultipleStringErr() succeeded unexpectedly") +-+ } +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("MultipleStringErr() = %v, want %v", got, tt.want) +-+ } +-+ if true { +-+ t.Errorf("MultipleStringErr() = %v, want %v", got2, tt.want2) +-+ } +-+ if true { +-+ t.Errorf("MultipleStringErr() = %v, want %v", got3, tt.want3) +-+ } +-+ }) +-+ } +-+} +--- constructor/constructor.go -- +-package main +- +-// Constructor returns the type T. +-func NewReturnType() ReturnType {return ReturnType{}} +- +-type ReturnType struct {} +- +-func (*ReturnType) Method(in string) string {return in} //@codeaction("Method", "source.addTest", edit=constructor_return_type) +- +-// Constructor returns the type T and an error. +-func NewReturnTypeError() (ReturnTypeError, error) {return ReturnTypeError{}, nil} +- +-type ReturnTypeError struct {} +- +-func (*ReturnTypeError) Method(in string) string {return in} //@codeaction("Method", "source.addTest", edit=constructor_return_type_error) +- +-// Constructor returns the type *T. +-func NewReturnPtr() *ReturnPtr {return nil} +- +-type ReturnPtr struct {} +- +-func (*ReturnPtr) Method(in string) string {return in} //@codeaction("Method", "source.addTest", edit=constructor_return_ptr) +- +-// Constructor returns the type *T and an error. +-func NewReturnPtrError() (*ReturnPtrError, error) {return nil, nil} +- +-type ReturnPtrError struct {} +- +-func (*ReturnPtrError) Method(in string) string {return in} //@codeaction("Method", "source.addTest", edit=constructor_return_ptr_error) +- +--- @constructor_return_type/constructor/constructor_test.go -- +-@@ -0,0 +1,27 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/constructor" +-+ "testing" +-+) +-+ +-+func TestReturnType_Method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ r := main.NewReturnType() +-+ got := r.Method(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Method() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @constructor_return_type_error/constructor/constructor_test.go -- +-@@ -0,0 +1,30 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/constructor" +-+ "testing" +-+) +-+ +-+func TestReturnTypeError_Method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ r, err := main.NewReturnTypeError() +-+ if err != nil { +-+ t.Fatalf("could not construct receiver type: %v", err) +-+ } +-+ got := r.Method(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Method() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @constructor_return_ptr/constructor/constructor_test.go -- +-@@ -0,0 +1,27 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/constructor" +-+ "testing" +-+) +-+ +-+func TestReturnPtr_Method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ r := main.NewReturnPtr() +-+ got := r.Method(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Method() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @constructor_return_ptr_error/constructor/constructor_test.go -- +-@@ -0,0 +1,30 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/constructor" +-+ "testing" +-+) +-+ +-+func TestReturnPtrError_Method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ r, err := main.NewReturnPtrError() +-+ if err != nil { +-+ t.Fatalf("could not construct receiver type: %v", err) +-+ } +-+ got := r.Method(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Method() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- constructorcomparison/constructorcomparison.go -- +-package main +- +-// Foo have two constructors. NewFoo is prefered over others. +-func CreateAFoo() Foo {return Foo{}} +-func NewFoo() Foo {return Foo{}} +- +-type Foo struct{} +- +-func (*Foo) Method(in string) string {return in} //@codeaction("Method", "source.addTest", edit=constructor_comparison_new) +- +-// Bar have two constructors. Bar is preferred due to alphabetical ordering. +-func ABar() (Bar, error) {return Bar{}, nil} +-// func CreateABar() Bar {return Bar{}} +- +-type Bar struct{} +- +-func (*Bar) Method(in string) string {return in} //@codeaction("Method", "source.addTest", edit=constructor_comparison_alphabetical) +- +--- @constructor_comparison_new/constructorcomparison/constructorcomparison_test.go -- +-@@ -0,0 +1,27 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/constructorcomparison" +-+ "testing" +-+) +-+ +-+func TestFoo_Method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ f := main.NewFoo() +-+ got := f.Method(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Method() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- @constructor_comparison_alphabetical/constructorcomparison/constructorcomparison_test.go -- +-@@ -0,0 +1,30 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/constructorcomparison" +-+ "testing" +-+) +-+ +-+func TestBar_Method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ in string +-+ want string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ b, err := main.ABar() +-+ if err != nil { +-+ t.Fatalf("could not construct receiver type: %v", err) +-+ } +-+ got := b.Method(tt.in) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Method() = %v, want %v", got, tt.want) +-+ } +-+ }) +-+ } +-+} +--- unnamedparam/unnamedparam.go -- +-package main +- +-import "time" +- +-func FooInputBasic(one, two, _ string, _ int) (out, out1, out2 string) {return "", "", ""} //@codeaction("Foo", "source.addTest", edit=function_basic_type) +- +-func FooInputStruct(one string, _ time.Time) (out, out1, out2 string) {return "", "", ""} //@codeaction("Foo", "source.addTest", edit=function_struct_type) +- +-func FooInputPtr(one string, _ *time.Time) (out, out1, out2 string) {return "", "", ""} //@codeaction("Foo", "source.addTest", edit=function_ptr_type) +- +-func FooInputFunc(one string, _ func(time.Time) *time.Time) (out, out1, out2 string) {return "", "", ""} //@codeaction("Foo", "source.addTest", edit=function_func_type) +- +-type BarInputBasic struct{} +- +-func NewBarInputBasic(one, two, _ string, _ int) *BarInputBasic {return nil} +- +-func (r *BarInputBasic) Method(one, two, _ string, _ int) {} //@codeaction("Method", "source.addTest", edit=constructor_basic_type) +- +-type BarInputStruct struct{} +- +-func NewBarInputStruct(one string, _ time.Time) *BarInputStruct {return nil} +- +-func (r *BarInputStruct) Method(one string, _ time.Time) {} //@codeaction("Method", "source.addTest", edit=constructor_struct_type) +- +-type BarInputPtr struct{} +- +-func NewBarInputPtr(one string, _ *time.Time) *BarInputPtr {return nil} +- +-func (r *BarInputPtr) Method(one string, _ *time.Time) {} //@codeaction("Method", "source.addTest", edit=constructor_ptr_type) +- +-type BarInputFunction struct{} +- +-func NewBarInputFunction(one string, _ func(time.Time) *time.Time) *BarInputFunction {return nil} +- +-func (r *BarInputFunction) Method(one string, _ func(time.Time) *time.Time) {} //@codeaction("Method", "source.addTest", edit=constructor_func_type) +- +--- @function_basic_type/unnamedparam/unnamedparam_test.go -- +-@@ -0,0 +1,35 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/unnamedparam" +-+ "testing" +-+) +-+ +-+func TestFooInputBasic(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ one string +-+ two string +-+ want string +-+ want2 string +-+ want3 string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got, got2, got3 := main.FooInputBasic(tt.one, tt.two, "", 0) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("FooInputBasic() = %v, want %v", got, tt.want) +-+ } +-+ if true { +-+ t.Errorf("FooInputBasic() = %v, want %v", got2, tt.want2) +-+ } +-+ if true { +-+ t.Errorf("FooInputBasic() = %v, want %v", got3, tt.want3) +-+ } +-+ }) +-+ } +-+} +--- @function_func_type/unnamedparam/unnamedparam_test.go -- +-@@ -0,0 +1,35 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/unnamedparam" +-+ "testing" +-+ "time" +-+) +-+ +-+func TestFooInputFunc(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ one string +-+ want string +-+ want2 string +-+ want3 string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got, got2, got3 := main.FooInputFunc(tt.one, nil) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("FooInputFunc() = %v, want %v", got, tt.want) +-+ } +-+ if true { +-+ t.Errorf("FooInputFunc() = %v, want %v", got2, tt.want2) +-+ } +-+ if true { +-+ t.Errorf("FooInputFunc() = %v, want %v", got3, tt.want3) +-+ } +-+ }) +-+ } +-+} +--- @function_ptr_type/unnamedparam/unnamedparam_test.go -- +-@@ -0,0 +1,35 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/unnamedparam" +-+ "testing" +-+ "time" +-+) +-+ +-+func TestFooInputPtr(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ one string +-+ want string +-+ want2 string +-+ want3 string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got, got2, got3 := main.FooInputPtr(tt.one, nil) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("FooInputPtr() = %v, want %v", got, tt.want) +-+ } +-+ if true { +-+ t.Errorf("FooInputPtr() = %v, want %v", got2, tt.want2) +-+ } +-+ if true { +-+ t.Errorf("FooInputPtr() = %v, want %v", got3, tt.want3) +-+ } +-+ }) +-+ } +-+} +--- @function_struct_type/unnamedparam/unnamedparam_test.go -- +-@@ -0,0 +1,35 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/unnamedparam" +-+ "testing" +-+ "time" +-+) +-+ +-+func TestFooInputStruct(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for target function. +-+ one string +-+ want string +-+ want2 string +-+ want3 string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got, got2, got3 := main.FooInputStruct(tt.one, time.Time{}) +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("FooInputStruct() = %v, want %v", got, tt.want) +-+ } +-+ if true { +-+ t.Errorf("FooInputStruct() = %v, want %v", got2, tt.want2) +-+ } +-+ if true { +-+ t.Errorf("FooInputStruct() = %v, want %v", got3, tt.want3) +-+ } +-+ }) +-+ } +-+} +--- @constructor_basic_type/unnamedparam/unnamedparam_test.go -- +-@@ -0,0 +1,26 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/unnamedparam" +-+ "testing" +-+) +-+ +-+func TestBarInputBasic_Method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for receiver constructor. +-+ cone string +-+ ctwo string +-+ // Named input parameters for target function. +-+ one string +-+ two string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ r := main.NewBarInputBasic(tt.cone, tt.ctwo, "", 0) +-+ r.Method(tt.one, tt.two, "", 0) +-+ }) +-+ } +-+} +--- @constructor_func_type/unnamedparam/unnamedparam_test.go -- +-@@ -0,0 +1,25 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/unnamedparam" +-+ "testing" +-+ "time" +-+) +-+ +-+func TestBarInputFunction_Method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for receiver constructor. +-+ cone string +-+ // Named input parameters for target function. +-+ one string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ r := main.NewBarInputFunction(tt.cone, nil) +-+ r.Method(tt.one, nil) +-+ }) +-+ } +-+} +--- @constructor_ptr_type/unnamedparam/unnamedparam_test.go -- +-@@ -0,0 +1,25 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/unnamedparam" +-+ "testing" +-+ "time" +-+) +-+ +-+func TestBarInputPtr_Method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for receiver constructor. +-+ cone string +-+ // Named input parameters for target function. +-+ one string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ r := main.NewBarInputPtr(tt.cone, nil) +-+ r.Method(tt.one, nil) +-+ }) +-+ } +-+} +--- @constructor_struct_type/unnamedparam/unnamedparam_test.go -- +-@@ -0,0 +1,25 @@ +-+package main_test +-+ +-+import( +-+ "golang.org/lsptests/addtest/unnamedparam" +-+ "testing" +-+ "time" +-+) +-+ +-+func TestBarInputStruct_Method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ // Named input parameters for receiver constructor. +-+ cone string +-+ // Named input parameters for target function. +-+ one string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ r := main.NewBarInputStruct(tt.cone, time.Time{}) +-+ r.Method(tt.one, time.Time{}) +-+ }) +-+ } +-+} +--- contextinput/contextinput.go -- +-package main +- +-import "context" +- +-func Function(ctx context.Context, _, _ string) (out, out1, out2 string) {return "", "", ""} //@codeaction("Function", "source.addTest", edit=function_context) +- +-type Foo struct {} +- +-func NewFoo(ctx context.Context) (*Foo, error) {return nil, nil} +- +-func (*Foo) Method(ctx context.Context, _, _ string) (out, out1, out2 string) {return "", "", ""} //@codeaction("Method", "source.addTest", edit=method_context) +--- contextinput/contextinput_test.go -- +-package main_test +- +-import renamedctx "context" +- +-var local renamedctx.Context +- +--- @function_context/contextinput/contextinput_test.go -- +-@@ -3 +3,3 @@ +--import renamedctx "context" +-+import ( +-+ renamedctx "context" +-+ "testing" +-@@ -5 +7,3 @@ +-+ "golang.org/lsptests/addtest/contextinput" +-+) +-+ +-@@ -7 +12,26 @@ +-+ +-+func TestFunction(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ want string +-+ want2 string +-+ want3 string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ got, got2, got3 := main.Function(renamedctx.Background(), "", "") +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Function() = %v, want %v", got, tt.want) +-+ } +-+ if true { +-+ t.Errorf("Function() = %v, want %v", got2, tt.want2) +-+ } +-+ if true { +-+ t.Errorf("Function() = %v, want %v", got3, tt.want3) +-+ } +-+ }) +-+ } +-+} +--- @method_context/contextinput/contextinput_test.go -- +-@@ -3 +3,3 @@ +--import renamedctx "context" +-+import ( +-+ renamedctx "context" +-+ "testing" +-@@ -5 +7,3 @@ +-+ "golang.org/lsptests/addtest/contextinput" +-+) +-+ +-@@ -7 +12,30 @@ +-+ +-+func TestFoo_Method(t *testing.T) { +-+ tests := []struct { +-+ name string // description of this test case +-+ want string +-+ want2 string +-+ want3 string +-+ }{ +-+ // TODO: Add test cases. +-+ } +-+ for _, tt := range tests { +-+ t.Run(tt.name, func(t *testing.T) { +-+ f, err := main.NewFoo(renamedctx.Background()) +-+ if err != nil { +-+ t.Fatalf("could not construct receiver type: %v", err) +-+ } +-+ got, got2, got3 := f.Method(renamedctx.Background(), "", "") +-+ // TODO: update the condition below to compare got with tt.want. +-+ if true { +-+ t.Errorf("Method() = %v, want %v", got, tt.want) +-+ } +-+ if true { +-+ t.Errorf("Method() = %v, want %v", got2, tt.want2) +-+ } +-+ if true { +-+ t.Errorf("Method() = %v, want %v", got3, tt.want3) +-+ } +-+ }) +-+ } +-+} +--- typeparameter/typeparameter.go -- +-package main +- +-func Function[T any] () {} // no suggested fix +- +-type Foo struct {} +- +-func NewFoo() +- +-func (*Foo) Method[T any]() {} // no suggested fix +diff -urN a/gopls/internal/test/marker/testdata/codeaction/change_quote.txt b/gopls/internal/test/marker/testdata/codeaction/change_quote.txt +--- a/gopls/internal/test/marker/testdata/codeaction/change_quote.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/change_quote.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,69 +0,0 @@ +-This test checks the behavior of the 'change quote' code action. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/changequote +- +-go 1.18 +- +--- a.go -- +-package changequote +- +-import ( +- "fmt" +-) +- +-func foo() { +- var s string +- s = "hello" //@codeaction(`"`, "refactor.rewrite.changeQuote", edit=a1) +- s = `hello` //@codeaction("`", "refactor.rewrite.changeQuote", edit=a2) +- s = "hello\tworld" //@codeaction(`"`, "refactor.rewrite.changeQuote", edit=a3) +- s = `hello world` //@codeaction("`", "refactor.rewrite.changeQuote", edit=a4) +- s = "hello\nworld" //@codeaction(`"`, "refactor.rewrite.changeQuote", edit=a5) +- // add a comment to avoid affect diff compute +- s = `hello +-world` //@codeaction("`", "refactor.rewrite.changeQuote", edit=a6) +- s = "hello\"world" //@codeaction(`"`, "refactor.rewrite.changeQuote", edit=a7) +- s = `hello"world` //@codeaction("`", "refactor.rewrite.changeQuote", edit=a8) +- s = "hello\x1bworld" //@codeaction(`"`, "refactor.rewrite.changeQuote", err=re"found 0 CodeActions") +- s = "hello`world" //@codeaction(`"`, "refactor.rewrite.changeQuote", err=re"found 0 CodeActions") +- s = "hello\x7fworld" //@codeaction(`"`, "refactor.rewrite.changeQuote", err=re"found 0 CodeActions") +- fmt.Println(s) +-} +- +--- @a1/a.go -- +-@@ -9 +9 @@ +-- s = "hello" //@codeaction(`"`, "refactor.rewrite.changeQuote", edit=a1) +-+ s = `hello` //@codeaction(`"`, "refactor.rewrite.changeQuote", edit=a1) +--- @a2/a.go -- +-@@ -10 +10 @@ +-- s = `hello` //@codeaction("`", "refactor.rewrite.changeQuote", edit=a2) +-+ s = "hello" //@codeaction("`", "refactor.rewrite.changeQuote", edit=a2) +--- @a3/a.go -- +-@@ -11 +11 @@ +-- s = "hello\tworld" //@codeaction(`"`, "refactor.rewrite.changeQuote", edit=a3) +-+ s = `hello world` //@codeaction(`"`, "refactor.rewrite.changeQuote", edit=a3) +--- @a4/a.go -- +-@@ -12 +12 @@ +-- s = `hello world` //@codeaction("`", "refactor.rewrite.changeQuote", edit=a4) +-+ s = "hello\tworld" //@codeaction("`", "refactor.rewrite.changeQuote", edit=a4) +--- @a5/a.go -- +-@@ -13 +13,2 @@ +-- s = "hello\nworld" //@codeaction(`"`, "refactor.rewrite.changeQuote", edit=a5) +-+ s = `hello +-+world` //@codeaction(`"`, "refactor.rewrite.changeQuote", edit=a5) +--- @a6/a.go -- +-@@ -15,2 +15 @@ +-- s = `hello +--world` //@codeaction("`", "refactor.rewrite.changeQuote", edit=a6) +-+ s = "hello\nworld" //@codeaction("`", "refactor.rewrite.changeQuote", edit=a6) +--- @a7/a.go -- +-@@ -17 +17 @@ +-- s = "hello\"world" //@codeaction(`"`, "refactor.rewrite.changeQuote", edit=a7) +-+ s = `hello"world` //@codeaction(`"`, "refactor.rewrite.changeQuote", edit=a7) +--- @a8/a.go -- +-@@ -18 +18 @@ +-- s = `hello"world` //@codeaction("`", "refactor.rewrite.changeQuote", edit=a8) +-+ s = "hello\"world" //@codeaction("`", "refactor.rewrite.changeQuote", edit=a8) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/eliminate_dot_import.txt b/gopls/internal/test/marker/testdata/codeaction/eliminate_dot_import.txt +--- a/gopls/internal/test/marker/testdata/codeaction/eliminate_dot_import.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/eliminate_dot_import.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,50 +0,0 @@ +-This test checks the behavior of the 'remove dot import' code action. +- +--- go.mod -- +-module example.com +- +-go 1.18 +- +--- a.go -- +-package dotimport +- +-// Base case: action is OK. +- +-import ( +- . "fmt" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a1) +- . "bytes" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a2) +- . "time" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a3) +-) +- +-var _ = a +- +-func a() { +- Println("hello") +- +- buf := NewBuffer(nil) +- buf.Grow(10) +- +- _ = Ticker{C: nil} +-} +- +--- @a1/a.go -- +-@@ -6 +6 @@ +-- . "fmt" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a1) +-+ "fmt" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a1) +-@@ -14 +14 @@ +-- Println("hello") +-+ fmt.Println("hello") +--- @a2/a.go -- +-@@ -7 +7 @@ +-- . "bytes" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a2) +-+ "bytes" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a2) +-@@ -16 +16 @@ +-- buf := NewBuffer(nil) +-+ buf := bytes.NewBuffer(nil) +--- @a3/a.go -- +-@@ -8 +8 @@ +-- . "time" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a3) +-+ "time" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a3) +-@@ -19 +19 @@ +-- _ = Ticker{C: nil} +-+ _ = time.Ticker{C: nil} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_anonymous_struct.txt b/gopls/internal/test/marker/testdata/codeaction/extract_anonymous_struct.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_anonymous_struct.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_anonymous_struct.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,222 +0,0 @@ +-This test checks of the behavior of extract function when the extracted block includes anonymous structs. +--- go.mod -- +-module mod.com +- +-go 1.12 +--- a/a.go -- +-package a +- +-func _() { +- var x struct{ y int } //@codeaction("var", "refactor.extract.function", end=endA, result=anonA) +- println(x.y) //@loc(endA, ")") +-} +- +--- b/b.go -- +-package b +- +-func _() { +- type T struct { +- y int +- } +- var x T //@codeaction("var", "refactor.extract.function", end=endB, err="the code refers to a local type") +- println(x.y) //@loc(endB, ")") +-} +- +--- @anonA/a/a.go -- +-package a +- +-func _() { +- newFunction() //@loc(endA, ")") +-} +- +-func newFunction() { +- var x struct{ y int } //@codeaction("var", "refactor.extract.function", end=endA, result=anonA) +- println(x.y) +-} +- +--- d/d.go -- +-package d +- +-func _() { +- s := []struct{ y int }{ +- {y: 1}, +- {y: 2}, +- } +- for _, v := range s { //@codeaction("for", "refactor.extract.function", end=endD, result=anonD) +- println(v.y) +- } //@loc(endD, "}") +-} +- +--- @anonD/d/d.go -- +-package d +- +-func _() { +- s := []struct{ y int }{ +- {y: 1}, +- {y: 2}, +- } +- newFunction(s) //@loc(endD, "}") +-} +- +-func newFunction(s []struct{y int}) { +- for _, v := range s { //@codeaction("for", "refactor.extract.function", end=endD, result=anonD) +- println(v.y) +- } +-} +- +--- e/e.go -- +-package e +- +-func _() { +- var x int +- s := []struct { //@codeaction("s", "refactor.extract.function", end=endE, result=anonE) +- y int +- }{ +- {y: 1}, +- {y: 2}, +- } +- x = s[0].y //@loc(endE, "x = s[0].y") +- println(x) +-} +- +--- @anonE/e/e.go -- +-package e +- +-func _() { +- var x int +- x = newFunction(x) //@loc(endE, "x = s[0].y") +- println(x) +-} +- +-func newFunction(x int) int { +- s := []struct { //@codeaction("s", "refactor.extract.function", end=endE, result=anonE) +- y int +- }{ +- {y: 1}, +- {y: 2}, +- } +- x = s[0].y +- return x +-} +- +--- f/f.go -- +-package f +-func _() int { +- x := struct{ y int } { y: 1 } //@codeaction("x", "refactor.extract.function", end=endF, result=anonF) +- return x.y //@loc(endF, "y") +-} +- +--- @anonF/f/f.go -- +-package f +-func _() int { +- return newFunction() //@loc(endF, "y") +-} +- +-func newFunction() int { +- x := struct{ y int }{y: 1} //@codeaction("x", "refactor.extract.function", end=endF, result=anonF) +- return x.y +-} +- +--- g/g.go -- +-package g +- +-import "fmt" +- +-func _() error { +- x := struct{ y error }{fmt.Errorf("test error")} +- return x.y //@ loc(endG, "y"), codeaction("return", "refactor.extract.function", end=endG, result=anonG) +-} +- +--- @anonG/g/g.go -- +-package g +- +-import "fmt" +- +-func _() error { +- x := struct{ y error }{fmt.Errorf("test error")} +- return newFunction(x) //@ loc(endG, "y"), codeaction("return", "refactor.extract.function", end=endG, result=anonG) +-} +- +-func newFunction(x struct{y error}) error { +- return x.y +-} +- +--- h/h.go -- +-package h +- +-import "fmt" +- +-func _() string { +- type A error +- type B struct { +- A +- } +- a := B{A: fmt.Errorf("test error")} //@codeaction("a", "refactor.extract.function", end=endH, err="the code refers to a local type") +- return a.Error() //@loc(endH, "Error()") +-} +- +--- i/i.go -- +-package i +- +-import "fmt" +- +-func _() string { +- var a struct{ e error } //@codeaction("var", "refactor.extract.function", end=endI, result=anonI) +- a.e = fmt.Errorf("test error") +- return a.e.Error() //@loc(endI, "Error()") +-} +- +--- @anonI/i/i.go -- +-package i +- +-import "fmt" +- +-func _() string { +- return newFunction() //@loc(endI, "Error()") +-} +- +-func newFunction() string { +- var a struct{ e error } //@codeaction("var", "refactor.extract.function", end=endI, result=anonI) +- a.e = fmt.Errorf("test error") +- return a.e.Error() +-} +- +--- j/j.go -- +-package j +- +-import "unsafe" +- +-func _() uintptr { +- var x struct{ p unsafe.Pointer } +- y := uintptr(x.p) //@codeaction("y", "refactor.extract.function", end=endJ, result=anonJ) +- return y //@loc(endJ, "y") +-} +- +--- @anonJ/j/j.go -- +-package j +- +-import "unsafe" +- +-func _() uintptr { +- var x struct{ p unsafe.Pointer } +- return newFunction(x) //@loc(endJ, "y") +-} +- +-func newFunction(x struct{p unsafe.Pointer}) uintptr { +- y := uintptr(x.p) //@codeaction("y", "refactor.extract.function", end=endJ, result=anonJ) +- return y +-} +- +--- k/k.go -- +-package k +- +-import "unsafe" +- +-func _(x int) unsafe.Pointer { +- type A struct { +- p unsafe.Pointer +- } +- c := A{p: unsafe.Pointer(&x)} //@codeaction("c", "refactor.extract.function", end=endK, err="the code refers to a local type") +- return c.p //@loc(endK, "c.p") +-} +- +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_control.txt b/gopls/internal/test/marker/testdata/codeaction/extract_control.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_control.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_control.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,259 +0,0 @@ +-This test verifies various behaviors of function extraction involving free control statements. +- +--- go.mod -- +-module mod.test/extract +- +-go 1.18 +- +--- freecontrol.go -- +-package extract +- +-//@codeaction(ifCondContinue, "refactor.extract.function", edit=freeControl1) +-//@codeaction(ifCondGotoLabel, "refactor.extract.function", edit=freeControl2) +-//@codeaction(ifCondGotoLabelWithLabel, "refactor.extract.function", edit=freeControl3) +-//@codeaction(multipleCtrl, "refactor.extract.function", edit=freeControl4) +-//@codeaction(multipleCtrlNotAllSelected, "refactor.extract.function", edit=freeControl5) +-//@codeaction(ctrlVarExists, "refactor.extract.function", edit=freeControl6) +-//@codeaction(twoReturns, "refactor.extract.function", edit=freeControl7) +-//@codeaction(forWithLabel, "refactor.extract.function", edit=freeControl8) +- +-func FuncContinue(cond bool) { +- for range "abc" { +- if cond { //@ loc(ifCondContinue, re`(?s)if.*println.0.`) +- continue +- } +- println(0) +- } +-} +- +-func FuncGoTo(cond bool) { +- for range "abc" { +- if cond { //@ loc(ifCondGotoLabel, re`(?s)if.*println.1.`), loc(ifCondGotoLabelWithLabel, re`(?s)if.*goto.label1....`) +- goto label1 +- } +- label1: +- println(1) +- } +-} +- +-func FuncMultipleCtrl(x int) { +- for range "abc" { +- if x < 10 { //@ loc(multipleCtrl, re`(?s)if.x.*return...next1....`), loc(multipleCtrlNotAllSelected, re`(?s)if.x.*break....`) +- continue +- } +- if x > 2 { +- break +- } +- if x == 1 { +- return //next1 +- } +- } +-} +- +-func FuncCtrlVarExists(x int) { +- ctrl := "abc" +- for range ctrl { +- if x < 10 { //@ loc(ctrlVarExists, re`(?s)if.x.*continue...next2....`) +- continue //next2 +- } +- } +-} +- +-func FuncTwoReturns(x int) int { +- outer: +- for range "abc" { +- if x < 10 { //@ loc(twoReturns, re`(?s)if.x.*return.1....`) +- return 0 +- } +- test := x - 4 +- if test > 2 { +- continue +- } +- if test == 10 { +- return 1 +- } +- +- for range "def" { //@ loc(forWithLabel, re`(?s)for.*outer.........`) +- if x < 2 { +- continue +- } +- if x > 10 { +- continue outer +- } +- } +- } +- return 0 +-} +--- @freeControl1/freecontrol.go -- +-@@ -14 +14,3 @@ +-- if cond { //@ loc(ifCondContinue, re`(?s)if.*println.0.`) +-+ ctrl := newFunction(cond) +-+ switch ctrl { +-+ case 1: +-@@ -17 +19 @@ +-- println(0) +-@@ -21 +22,8 @@ +-+func newFunction(cond bool) int { +-+ if cond { //@ loc(ifCondContinue, re`(?s)if.*println.0.`) +-+ return 1 +-+ } +-+ println(0) +-+ return 0 +-+} +-+ +--- @freeControl2/freecontrol.go -- +-@@ -23,5 +23 @@ +-- if cond { //@ loc(ifCondGotoLabel, re`(?s)if.*println.1.`), loc(ifCondGotoLabelWithLabel, re`(?s)if.*goto.label1....`) +-- goto label1 +-- } +-- label1: +-- println(1) +-+ newFunction(cond) +-@@ -31 +27,8 @@ +-+func newFunction(cond bool) { +-+ if cond { //@ loc(ifCondGotoLabel, re`(?s)if.*println.1.`), loc(ifCondGotoLabelWithLabel, re`(?s)if.*goto.label1....`) +-+ goto label1 +-+ } +-+label1: +-+ println(1) +-+} +-+ +--- @freeControl3/freecontrol.go -- +-@@ -23 +23,3 @@ +-- if cond { //@ loc(ifCondGotoLabel, re`(?s)if.*println.1.`), loc(ifCondGotoLabelWithLabel, re`(?s)if.*goto.label1....`) +-+ ctrl := newFunction(cond) +-+ switch ctrl { +-+ case 1: +-@@ -31 +33,7 @@ +-+func newFunction(cond bool) int { +-+ if cond { //@ loc(ifCondGotoLabel, re`(?s)if.*println.1.`), loc(ifCondGotoLabelWithLabel, re`(?s)if.*goto.label1....`) +-+ return 1 +-+ } +-+ return 0 +-+} +-+ +--- @freeControl4/freecontrol.go -- +-@@ -33,2 +33,3 @@ +-- if x < 10 { //@ loc(multipleCtrl, re`(?s)if.x.*return...next1....`), loc(multipleCtrlNotAllSelected, re`(?s)if.x.*break....`) +-- continue +-+ shouldReturn, ctrl := newFunction(x) +-+ if shouldReturn { +-+ return +-@@ -36 +37,4 @@ +-- if x > 2 { +-+ switch ctrl { +-+ case 1: +-+ continue +-+ case 2: +-@@ -39,3 +43 @@ +-- if x == 1 { +-- return //next1 +-- } +-@@ -45 +46,14 @@ +-+func newFunction(x int) (bool, int) { +-+ if x < 10 { //@ loc(multipleCtrl, re`(?s)if.x.*return...next1....`), loc(multipleCtrlNotAllSelected, re`(?s)if.x.*break....`) +-+ return false, 1 +-+ } +-+ if x > 2 { +-+ return false, 2 +-+ } +-+ if x == 1 { +-+ return true, //next1 +-+ 0 +-+ } +-+ return false, 0 +-+} +-+ +--- @freeControl5/freecontrol.go -- +-@@ -33 +33,3 @@ +-- if x < 10 { //@ loc(multipleCtrl, re`(?s)if.x.*return...next1....`), loc(multipleCtrlNotAllSelected, re`(?s)if.x.*break....`) +-+ ctrl := newFunction(x) +-+ switch ctrl { +-+ case 1: +-@@ -35,2 +37 @@ +-- } +-- if x > 2 { +-+ case 2: +-@@ -45 +46,10 @@ +-+func newFunction(x int) int { +-+ if x < 10 { //@ loc(multipleCtrl, re`(?s)if.x.*return...next1....`), loc(multipleCtrlNotAllSelected, re`(?s)if.x.*break....`) +-+ return 1 +-+ } +-+ if x > 2 { +-+ return 2 +-+ } +-+ return 0 +-+} +-+ +--- @freeControl6/freecontrol.go -- +-@@ -48,2 +48,4 @@ +-- if x < 10 { //@ loc(ctrlVarExists, re`(?s)if.x.*continue...next2....`) +-- continue //next2 +-+ ctrl1 := newFunction(x) +-+ switch ctrl1 { +-+ case 1: +-+ continue +-@@ -54 +56,7 @@ +-+func newFunction(x int) int { +-+ if x < 10 { //@ loc(ctrlVarExists, re`(?s)if.x.*continue...next2....`) +-+ return 1 //next2 +-+ } +-+ return 0 +-+} +-+ +--- @freeControl7/freecontrol.go -- +-@@ -57,2 +57,3 @@ +-- if x < 10 { //@ loc(twoReturns, re`(?s)if.x.*return.1....`) +-- return 0 +-+ i, shouldReturn, ctrl := newFunction(x) +-+ if shouldReturn { +-+ return i +-@@ -60,2 +61,2 @@ +-- test := x - 4 +-- if test > 2 { +-+ switch ctrl { +-+ case 1: +-@@ -64,3 +65 @@ +-- if test == 10 { +-- return 1 +-- } +-@@ -79 +77,14 @@ +-+ +-+func newFunction(x int) (int, bool, int) { +-+ if x < 10 { //@ loc(twoReturns, re`(?s)if.x.*return.1....`) +-+ return 0, true, 0 +-+ } +-+ test := x - 4 +-+ if test > 2 { +-+ return 0, false, 1 +-+ } +-+ if test == 10 { +-+ return 1, true, 0 +-+ } +-+ return 0, false, 0 +-+} +--- @freeControl8/freecontrol.go -- +-@@ -68,5 +68,3 @@ +-- for range "def" { //@ loc(forWithLabel, re`(?s)for.*outer.........`) +-- if x < 2 { +-- continue +-- } +-- if x > 10 { +-+ ctrl := newFunction(x) +-+ switch ctrl { +-+ case 1: +-@@ -74 +72 @@ +-- } +-@@ -79 +76,12 @@ +-+ +-+func newFunction(x int) int { +-+ for range "def" { //@ loc(forWithLabel, re`(?s)for.*outer.........`) +-+ if x < 2 { +-+ continue +-+ } +-+ if x > 10 { +-+ return 1 +-+ } +-+ } +-+ return 0 +-+} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_method.txt b/gopls/internal/test/marker/testdata/codeaction/extract_method.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_method.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_method.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,250 +0,0 @@ +-This test exercises function and method extraction. +- +--- flags -- +--ignore_extra_diags +- +--- basic.go -- +-package extract +- +-//@codeaction(A_XLessThanYP, "refactor.extract.method", edit=meth1) +-//@codeaction(A_XLessThanYP, "refactor.extract.function", edit=func1) +-//@codeaction(A_AddP1, "refactor.extract.method", edit=meth2) +-//@codeaction(A_AddP1, "refactor.extract.function", edit=func2) +-//@codeaction(A_AddP2, "refactor.extract.method", edit=meth3) +-//@codeaction(A_AddP2, "refactor.extract.function", edit=func3) +-//@codeaction(A_XLessThanY, "refactor.extract.method", edit=meth4) +-//@codeaction(A_XLessThanY, "refactor.extract.function", edit=func4) +-//@codeaction(A_Add1, "refactor.extract.method", edit=meth5) +-//@codeaction(A_Add1, "refactor.extract.function", edit=func5) +-//@codeaction(A_Add2, "refactor.extract.method", edit=meth6) +-//@codeaction(A_Add2, "refactor.extract.function", edit=func6) +- +-type A struct { +- x int +- y int +-} +- +-func (a *A) XLessThanYP() bool { +- return a.x < a.y //@loc(A_XLessThanYP, re`return.*a\.y`) +-} +- +-func (a *A) AddP() int { +- sum := a.x + a.y //@loc(A_AddP1, re`sum.*a\.y`) +- return sum //@loc(A_AddP2, re`return.*?sum`) +-} +- +-func (a A) XLessThanY() bool { +- return a.x < a.y //@loc(A_XLessThanY, re`return.*a\.y`) +-} +- +-func (a A) Add() int { +- sum := a.x + a.y //@loc(A_Add1, re`sum.*a\.y`) +- return sum //@loc(A_Add2, re`return.*?sum`) +-} +- +--- @func1/basic.go -- +-@@ -22 +22 @@ +-- return a.x < a.y //@loc(A_XLessThanYP, re`return.*a\.y`) +-+ return newFunction(a) //@loc(A_XLessThanYP, re`return.*a\.y`) +-@@ -25 +25,4 @@ +-+func newFunction(a *A) bool { +-+ return a.x < a.y +-+} +-+ +--- @func2/basic.go -- +-@@ -26 +26 @@ +-- sum := a.x + a.y //@loc(A_AddP1, re`sum.*a\.y`) +-+ sum := newFunction(a) //@loc(A_AddP1, re`sum.*a\.y`) +-@@ -30 +30,5 @@ +-+func newFunction(a *A) int { +-+ sum := a.x + a.y +-+ return sum +-+} +-+ +--- @func3/basic.go -- +-@@ -27 +27 @@ +-- return sum //@loc(A_AddP2, re`return.*?sum`) +-+ return newFunction(sum) //@loc(A_AddP2, re`return.*?sum`) +-@@ -30 +30,4 @@ +-+func newFunction(sum int) int { +-+ return sum +-+} +-+ +--- @func4/basic.go -- +-@@ -31 +31 @@ +-- return a.x < a.y //@loc(A_XLessThanY, re`return.*a\.y`) +-+ return newFunction(a) //@loc(A_XLessThanY, re`return.*a\.y`) +-@@ -34 +34,4 @@ +-+func newFunction(a A) bool { +-+ return a.x < a.y +-+} +-+ +--- @func5/basic.go -- +-@@ -35 +35 @@ +-- sum := a.x + a.y //@loc(A_Add1, re`sum.*a\.y`) +-+ sum := newFunction(a) //@loc(A_Add1, re`sum.*a\.y`) +-@@ -39 +39,5 @@ +-+func newFunction(a A) int { +-+ sum := a.x + a.y +-+ return sum +-+} +-+ +--- @func6/basic.go -- +-@@ -36 +36 @@ +-- return sum //@loc(A_Add2, re`return.*?sum`) +-+ return newFunction(sum) //@loc(A_Add2, re`return.*?sum`) +-@@ -39 +39,4 @@ +-+func newFunction(sum int) int { +-+ return sum +-+} +-+ +--- @meth1/basic.go -- +-@@ -22 +22 @@ +-- return a.x < a.y //@loc(A_XLessThanYP, re`return.*a\.y`) +-+ return a.newMethod() //@loc(A_XLessThanYP, re`return.*a\.y`) +-@@ -25 +25,4 @@ +-+func (a *A) newMethod() bool { +-+ return a.x < a.y +-+} +-+ +--- @meth2/basic.go -- +-@@ -26 +26 @@ +-- sum := a.x + a.y //@loc(A_AddP1, re`sum.*a\.y`) +-+ sum := a.newMethod() //@loc(A_AddP1, re`sum.*a\.y`) +-@@ -30 +30,5 @@ +-+func (a *A) newMethod() int { +-+ sum := a.x + a.y +-+ return sum +-+} +-+ +--- @meth3/basic.go -- +-@@ -27 +27 @@ +-- return sum //@loc(A_AddP2, re`return.*?sum`) +-+ return a.newMethod(sum) //@loc(A_AddP2, re`return.*?sum`) +-@@ -30 +30,4 @@ +-+func (*A) newMethod(sum int) int { +-+ return sum +-+} +-+ +--- @meth4/basic.go -- +-@@ -31 +31 @@ +-- return a.x < a.y //@loc(A_XLessThanY, re`return.*a\.y`) +-+ return a.newMethod() //@loc(A_XLessThanY, re`return.*a\.y`) +-@@ -34 +34,4 @@ +-+func (a A) newMethod() bool { +-+ return a.x < a.y +-+} +-+ +--- @meth5/basic.go -- +-@@ -35 +35 @@ +-- sum := a.x + a.y //@loc(A_Add1, re`sum.*a\.y`) +-+ sum := a.newMethod() //@loc(A_Add1, re`sum.*a\.y`) +-@@ -39 +39,5 @@ +-+func (a A) newMethod() int { +-+ sum := a.x + a.y +-+ return sum +-+} +-+ +--- @meth6/basic.go -- +-@@ -36 +36 @@ +-- return sum //@loc(A_Add2, re`return.*?sum`) +-+ return a.newMethod(sum) //@loc(A_Add2, re`return.*?sum`) +-@@ -39 +39,4 @@ +-+func (A) newMethod(sum int) int { +-+ return sum +-+} +-+ +--- context.go -- +-package extract +- +-import ( +- "context" +- "testing" +-) +- +-//@codeaction(B_AddP, "refactor.extract.method", edit=contextMeth1) +-//@codeaction(B_AddP, "refactor.extract.function", edit=contextFunc1) +-//@codeaction(B_LongList, "refactor.extract.method", edit=contextMeth2) +-//@codeaction(B_LongList, "refactor.extract.function", edit=contextFunc2) +-//@codeaction(B_AddPWithB, "refactor.extract.function", edit=contextFuncB) +-//@codeaction(B_LongListWithT, "refactor.extract.function", edit=contextFuncT) +- +-type B struct { +- x int +- y int +-} +- +-func (b *B) AddP(ctx context.Context) (int, error) { +- sum := b.x + b.y +- return sum, ctx.Err() //@loc(B_AddP, re`return.*ctx\.Err\(\)`) +-} +- +-func (b *B) LongList(ctx context.Context) (int, error) { +- p1 := 1 +- p2 := 1 +- p3 := 1 +- return p1 + p2 + p3, ctx.Err() //@loc(B_LongList, re`return.*ctx\.Err\(\)`) +-} +- +-func (b *B) AddPWithB(ctx context.Context, tB *testing.B) (int, error) { +- sum := b.x + b.y //@loc(B_AddPWithB, re`(?s:^.*?Err\(\))`) +- tB.Skip() +- return sum, ctx.Err() +-} +- +-func (b *B) LongListWithT(ctx context.Context, t *testing.T) (int, error) { +- p1 := 1 +- p2 := 1 +- p3 := 1 +- p4 := p1 + p2 //@loc(B_LongListWithT, re`(?s:^.*?Err\(\))`) +- t.Skip() +- return p4 + p3, ctx.Err() +-} +--- @contextMeth1/context.go -- +-@@ -22 +22 @@ +-- return sum, ctx.Err() //@loc(B_AddP, re`return.*ctx\.Err\(\)`) +-+ return b.newMethod(ctx, sum) //@loc(B_AddP, re`return.*ctx\.Err\(\)`) +-@@ -25 +25,4 @@ +-+func (*B) newMethod(ctx context.Context, sum int) (int, error) { +-+ return sum, ctx.Err() +-+} +-+ +--- @contextMeth2/context.go -- +-@@ -29 +29 @@ +-- return p1 + p2 + p3, ctx.Err() //@loc(B_LongList, re`return.*ctx\.Err\(\)`) +-+ return b.newMethod(ctx, p1, p2, p3) //@loc(B_LongList, re`return.*ctx\.Err\(\)`) +-@@ -32 +32,4 @@ +-+func (*B) newMethod(ctx context.Context, p1 int, p2 int, p3 int) (int, error) { +-+ return p1 + p2 + p3, ctx.Err() +-+} +-+ +--- @contextFunc2/context.go -- +-@@ -29 +29 @@ +-- return p1 + p2 + p3, ctx.Err() //@loc(B_LongList, re`return.*ctx\.Err\(\)`) +-+ return newFunction(ctx, p1, p2, p3) //@loc(B_LongList, re`return.*ctx\.Err\(\)`) +-@@ -32 +32,4 @@ +-+func newFunction(ctx context.Context, p1 int, p2 int, p3 int) (int, error) { +-+ return p1 + p2 + p3, ctx.Err() +-+} +-+ +--- @contextFunc1/context.go -- +-@@ -22 +22 @@ +-- return sum, ctx.Err() //@loc(B_AddP, re`return.*ctx\.Err\(\)`) +-+ return newFunction(ctx, sum) //@loc(B_AddP, re`return.*ctx\.Err\(\)`) +-@@ -25 +25,4 @@ +-+func newFunction(ctx context.Context, sum int) (int, error) { +-+ return sum, ctx.Err() +-+} +-+ +--- @contextFuncB/context.go -- +-@@ -33 +33,4 @@ +-+ return newFunction(ctx, tB, b) +-+} +-+ +-+func newFunction(ctx context.Context, tB *testing.B, b *B) (int, error) { +--- @contextFuncT/context.go -- +-@@ -42 +42,4 @@ +-+ return newFunction(ctx, t, p1, p2, p3) +-+} +-+ +-+func newFunction(ctx context.Context, t *testing.T, p1 int, p2 int, p3 int) (int, error) { +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_return_err.txt b/gopls/internal/test/marker/testdata/codeaction/extract_return_err.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_return_err.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_return_err.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,140 +0,0 @@ +-This test verifies various behaviors of function extraction when every return statement in the extracted block is an error handling return. +- +--- go.mod -- +-module mod.test/extract +- +-go 1.18 +- +--- errhandling.go -- +-package err_handling +-import ( +- "encoding/json" +- "fmt" +-) +- +-//@codeaction(errHandlingBlk1, "refactor.extract.function", edit=err_handling_1) +-//@codeaction(errHandlingBlk2, "refactor.extract.function", edit=err_handling_2) +-//@codeaction(errHandlingBlk3, "refactor.extract.function", edit=err_handling_3) +-//@codeaction(errHandlingBlk4, "refactor.extract.function", edit=err_handling_4) +-//@codeaction(errHandlingBlk5, "refactor.extract.function", edit=err_handling_5) +- +-func Func() error { +- a, err := json.Marshal(0) //@loc(errHandlingBlk1, re`(?s)a.*err1....`) +- if err != nil { +- return fmt.Errorf("1: %w", err) +- } +- b, err1 := json.Marshal(0) +- if err1 != nil { +- return fmt.Errorf("2: %w", err1) +- } +- fmt.Println(string(a), string(b)) +- return nil +-} +- +-func FuncReturnsInt() (int, error) { +- a, err := json.Marshal(0) //@loc(errHandlingBlk2, re`(?s)a.*err2....`) +- if err != nil { +- return 0, fmt.Errorf("1: %w", err) +- } +- b, err2 := json.Marshal(0) +- if err2 != nil { +- return 1, fmt.Errorf("2: %w", err2) +- } +- fmt.Println(string(a), string(b)) +- return 3, nil +-} +- +-func FuncHasNilReturns() error { +- if _, err := json.Marshal(0); err != nil { //@loc(errHandlingBlk3, re`(?s)if.*return.nil`) +- return err +- } +- if _, err := json.Marshal(1); err != nil { +- return err +- } +- return nil +-} +- +-func FuncHasOtherReturns() ([]byte, error) { +- if a, err := json.Marshal(0); err != nil { //@loc(errHandlingBlk4, re`(?s)if.*Marshal.1.`) +- return a, err +- } +- return json.Marshal(1) +-} +- +-func FuncErrNameAlreadyExists(err error) ([]byte, error) { +- if a, err := json.Marshal(0); err != nil { //@loc(errHandlingBlk5, re`(?s)if.*a,.err...`) +- return a, err +- } +- if a, err := json.Marshal(3); err != nil { +- return a, err +- } +- return []byte{}, nil +-} +- +--- @err_handling_1/errhandling.go -- +-@@ -14 +14,9 @@ +-+ a, b, err := newFunction() +-+ if err != nil { +-+ return err +-+ } +-+ fmt.Println(string(a), string(b)) +-+ return nil +-+} +-+ +-+func newFunction() ([]byte, []byte, error) { +-@@ -16 +25 @@ +-- return fmt.Errorf("1: %w", err) +-+ return nil, nil, fmt.Errorf("1: %w", err) +-@@ -20 +29 @@ +-- return fmt.Errorf("2: %w", err1) +-+ return nil, nil, fmt.Errorf("2: %w", err1) +-@@ -22,2 +31 @@ +-- fmt.Println(string(a), string(b)) +-- return nil +-+ return a, b, nil +--- @err_handling_2/errhandling.go -- +-@@ -27 +27,9 @@ +-+ a, b, i, err := newFunction() +-+ if err != nil { +-+ return i, err +-+ } +-+ fmt.Println(string(a), string(b)) +-+ return 3, nil +-+} +-+ +-+func newFunction() ([]byte, []byte, int, error) { +-@@ -29 +38 @@ +-- return 0, fmt.Errorf("1: %w", err) +-+ return nil, nil, 0, fmt.Errorf("1: %w", err) +-@@ -33 +42 @@ +-- return 1, fmt.Errorf("2: %w", err2) +-+ return nil, nil, 1, fmt.Errorf("2: %w", err2) +-@@ -35,2 +44 @@ +-- fmt.Println(string(a), string(b)) +-- return 3, nil +-+ return a, b, 0, nil +--- @err_handling_3/errhandling.go -- +-@@ -40 +40,4 @@ +-+ return newFunction() +-+} +-+ +-+func newFunction() error { +--- @err_handling_4/errhandling.go -- +-@@ -50 +50,4 @@ +-+ return newFunction() +-+} +-+ +-+func newFunction() ([]byte, error) { +--- @err_handling_5/errhandling.go -- +-@@ -57 +57,8 @@ +-+ result, err1 := newFunction() +-+ if err1 != nil { +-+ return result, err1 +-+ } +-+ return []byte{}, nil +-+} +-+ +-+func newFunction() ([]byte, error) { +-@@ -63 +71 @@ +-- return []byte{}, nil +-+ return nil, nil +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extracttofile.txt b/gopls/internal/test/marker/testdata/codeaction/extracttofile.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extracttofile.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extracttofile.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,351 +0,0 @@ +-This test checks the behavior of the 'extract to a new file' code action. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/extracttofile +- +-go 1.18 +- +--- a.go -- +-package main +- +-// docs +-func fn() {} //@codeaction("func", "refactor.extract.toNewFile", edit=function_declaration) +- +-func fn2() {} //@codeaction("fn2", "refactor.extract.toNewFile", edit=only_select_func_name) +- +-func fn3() {} //@codeaction(re`()fn3`, "refactor.extract.toNewFile", edit=zero_width_selection_on_func_name) +- +-// docs +-type T int //@codeaction("type", "refactor.extract.toNewFile", edit=type_declaration) +- +-// docs +-var V int //@codeaction("var", "refactor.extract.toNewFile", edit=var_declaration) +- +-// docs +-const K = "" //@codeaction("const", "refactor.extract.toNewFile", edit=const_declaration) +- +-const ( //@codeaction("const", "refactor.extract.toNewFile", edit=const_declaration_multiple_specs) +- P = iota +- Q +- R +-) +- +-func fnA () {} //@codeaction("func", "refactor.extract.toNewFile", end=mdEnd, result=multiple_declarations) +- +-// unattached comment +- +-func fnB () {} //@loc(mdEnd, "}") +- +--- @const_declaration_multiple_specs/p.go -- +-@@ -0,0 +1,7 @@ +-+package main +-+ +-+const ( //@codeaction("const", "refactor.extract.toNewFile", edit=const_declaration_multiple_specs) +-+ P = iota +-+ Q +-+ R +-+) +--- @multiple_declarations/fna.go -- +-package main +- +-func fnA() {} //@codeaction("func", "refactor.extract.toNewFile", end=mdEnd, result=multiple_declarations) +- +-// unattached comment +- +-func fnB() {} +--- @multiple_declarations/a.go -- +-package main +- +-// docs +-func fn() {} //@codeaction("func", "refactor.extract.toNewFile", edit=function_declaration) +- +-func fn2() {} //@codeaction("fn2", "refactor.extract.toNewFile", edit=only_select_func_name) +- +-func fn3() {} //@codeaction(re`()fn3`, "refactor.extract.toNewFile", edit=zero_width_selection_on_func_name) +- +-// docs +-type T int //@codeaction("type", "refactor.extract.toNewFile", edit=type_declaration) +- +-// docs +-var V int //@codeaction("var", "refactor.extract.toNewFile", edit=var_declaration) +- +-// docs +-const K = "" //@codeaction("const", "refactor.extract.toNewFile", edit=const_declaration) +- +-const ( //@codeaction("const", "refactor.extract.toNewFile", edit=const_declaration_multiple_specs) +- P = iota +- Q +- R +-) +- +-//@loc(mdEnd, "}") +- +--- @const_declaration_multiple_specs/a.go -- +-@@ -19,6 +19 @@ +--const ( //@codeaction("const", "refactor.extract.toNewFile", edit=const_declaration_multiple_specs) +-- P = iota +-- Q +-- R +--) +-- +--- existing.go -- +--- existing2.go -- +--- existing2.1.go -- +--- b.go -- +-package main +-func existing() {} //@codeaction("func", "refactor.extract.toNewFile", edit=file_name_conflict) +-func existing2() {} //@codeaction("func", "refactor.extract.toNewFile", edit=file_name_conflict_again) +- +--- single_import.go -- +-package main +-import "fmt" +-func F() { //@codeaction("func", "refactor.extract.toNewFile", edit=single_import) +- fmt.Println() +-} +- +--- multiple_imports.go -- +-package main +-import ( +- "fmt" +- "log" +- time1 "time" +-) +-func init(){ +- log.Println() +-} +-func F() { //@codeaction("func", "refactor.extract.toNewFile", edit=multiple_imports) +- fmt.Println() +-} +-func g() string{ //@codeaction("func", "refactor.extract.toNewFile", edit=renamed_import) +- return time1.Now().string() +-} +- +--- blank_import.go -- +-package main +-import _ "fmt" +-func F() {} //@codeaction("func", "refactor.extract.toNewFile", edit=blank_import) +- +- +- +--- @blank_import/blank_import.go -- +-@@ -3 +3 @@ +--func F() {} //@codeaction("func", "refactor.extract.toNewFile", edit=blank_import) +-+//@codeaction("func", "refactor.extract.toNewFile", edit=blank_import) +--- @blank_import/f.go -- +-@@ -0,0 +1,3 @@ +-+package main +-+ +-+func F() {} +--- @const_declaration/a.go -- +-@@ -16,2 +16 @@ +--// docs +--const K = "" //@codeaction("const", "refactor.extract.toNewFile", edit=const_declaration) +-+//@codeaction("const", "refactor.extract.toNewFile", edit=const_declaration) +--- @const_declaration/k.go -- +-@@ -0,0 +1,4 @@ +-+package main +-+ +-+// docs +-+const K = "" +--- @file_name_conflict/b.go -- +-@@ -2 +2 @@ +--func existing() {} //@codeaction("func", "refactor.extract.toNewFile", edit=file_name_conflict) +-+//@codeaction("func", "refactor.extract.toNewFile", edit=file_name_conflict) +--- @file_name_conflict/existing.1.go -- +-@@ -0,0 +1,3 @@ +-+package main +-+ +-+func existing() {} +--- @file_name_conflict_again/b.go -- +-@@ -3 +3 @@ +--func existing2() {} //@codeaction("func", "refactor.extract.toNewFile", edit=file_name_conflict_again) +-+//@codeaction("func", "refactor.extract.toNewFile", edit=file_name_conflict_again) +--- @file_name_conflict_again/existing2.2.go -- +-@@ -0,0 +1,3 @@ +-+package main +-+ +-+func existing2() {} +--- @function_declaration/a.go -- +-@@ -3,2 +3 @@ +--// docs +--func fn() {} //@codeaction("func", "refactor.extract.toNewFile", edit=function_declaration) +-+//@codeaction("func", "refactor.extract.toNewFile", edit=function_declaration) +--- @function_declaration/fn.go -- +-@@ -0,0 +1,4 @@ +-+package main +-+ +-+// docs +-+func fn() {} +--- @multiple_imports/f.go -- +-@@ -0,0 +1,9 @@ +-+package main +-+ +-+import ( +-+ "fmt" +-+) +-+ +-+func F() { //@codeaction("func", "refactor.extract.toNewFile", edit=multiple_imports) +-+ fmt.Println() +-+} +--- @multiple_imports/multiple_imports.go -- +-@@ -3 +3 @@ +-- "fmt" +-+ +-@@ -10,3 +10 @@ +--func F() { //@codeaction("func", "refactor.extract.toNewFile", edit=multiple_imports) +-- fmt.Println() +--} +--- @only_select_func_name/a.go -- +-@@ -6 +6 @@ +--func fn2() {} //@codeaction("fn2", "refactor.extract.toNewFile", edit=only_select_func_name) +-+//@codeaction("fn2", "refactor.extract.toNewFile", edit=only_select_func_name) +--- @only_select_func_name/fn2.go -- +-@@ -0,0 +1,3 @@ +-+package main +-+ +-+func fn2() {} +--- @single_import/f.go -- +-@@ -0,0 +1,9 @@ +-+package main +-+ +-+import ( +-+ "fmt" +-+) +-+ +-+func F() { //@codeaction("func", "refactor.extract.toNewFile", edit=single_import) +-+ fmt.Println() +-+} +--- @single_import/single_import.go -- +-@@ -2,4 +2 @@ +--import "fmt" +--func F() { //@codeaction("func", "refactor.extract.toNewFile", edit=single_import) +-- fmt.Println() +--} +--- @type_declaration/a.go -- +-@@ -10,2 +10 @@ +--// docs +--type T int //@codeaction("type", "refactor.extract.toNewFile", edit=type_declaration) +-+//@codeaction("type", "refactor.extract.toNewFile", edit=type_declaration) +--- @type_declaration/t.go -- +-@@ -0,0 +1,4 @@ +-+package main +-+ +-+// docs +-+type T int +--- @var_declaration/a.go -- +-@@ -13,2 +13 @@ +--// docs +--var V int //@codeaction("var", "refactor.extract.toNewFile", edit=var_declaration) +-+//@codeaction("var", "refactor.extract.toNewFile", edit=var_declaration) +--- @var_declaration/v.go -- +-@@ -0,0 +1,4 @@ +-+package main +-+ +-+// docs +-+var V int +--- @zero_width_selection_on_func_name/a.go -- +-@@ -8 +8 @@ +--func fn3() {} //@codeaction(re`()fn3`, "refactor.extract.toNewFile", edit=zero_width_selection_on_func_name) +-+//@codeaction(re`()fn3`, "refactor.extract.toNewFile", edit=zero_width_selection_on_func_name) +--- @zero_width_selection_on_func_name/fn3.go -- +-@@ -0,0 +1,3 @@ +-+package main +-+ +-+func fn3() {} +--- @renamed_import/g.go -- +-@@ -0,0 +1,9 @@ +-+package main +-+ +-+import ( +-+ time1 "time" +-+) +-+ +-+func g() string { //@codeaction("func", "refactor.extract.toNewFile", edit=renamed_import) +-+ return time1.Now().string() +-+} +--- @renamed_import/multiple_imports.go -- +-@@ -5 +5 @@ +-- time1 "time" +-+ +-@@ -13,4 +13 @@ +--func g() string{ //@codeaction("func", "refactor.extract.toNewFile", edit=renamed_import) +-- return time1.Now().string() +--} +-- +--- copyright.go -- +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package main +- +-// docs +-const C = "" //@codeaction("const", "refactor.extract.toNewFile", edit=copyright) +- +--- @copyright/c.go -- +-@@ -0,0 +1,8 @@ +-+// Copyright 2020 The Go Authors. All rights reserved. +-+// Use of this source code is governed by a BSD-style +-+// license that can be found in the LICENSE file. +-+ +-+package main +-+ +-+// docs +-+const C = "" +--- @copyright/copyright.go -- +-@@ -7,2 +7 @@ +--// docs +--const C = "" //@codeaction("const", "refactor.extract.toNewFile", edit=copyright) +-+//@codeaction("const", "refactor.extract.toNewFile", edit=copyright) +--- buildconstraint.go -- +-//go:build go1.18 +- +-package main +- +-// docs +-const C = "" //@codeaction("const", "refactor.extract.toNewFile", edit=buildconstraint) +- +--- @buildconstraint/buildconstraint.go -- +-@@ -5,2 +5 @@ +--// docs +--const C = "" //@codeaction("const", "refactor.extract.toNewFile", edit=buildconstraint) +-+//@codeaction("const", "refactor.extract.toNewFile", edit=buildconstraint) +--- @buildconstraint/c.go -- +-@@ -0,0 +1,6 @@ +-+//go:build go1.18 +-+ +-+package main +-+ +-+// docs +-+const C = "" +--- copyrightandbuildconstraint.go -- +-// Copyright 2020 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build go1.18 +- +-package main +- +-// docs +-const C = "" //@codeaction("const", "refactor.extract.toNewFile", edit=copyrightandbuildconstraint) +--- @copyrightandbuildconstraint/c.go -- +-@@ -0,0 +1,10 @@ +-+// Copyright 2020 The Go Authors. All rights reserved. +-+// Use of this source code is governed by a BSD-style +-+// license that can be found in the LICENSE file. +-+ +-+//go:build go1.18 +-+ +-+package main +-+ +-+// docs +-+const C = "" +--- @copyrightandbuildconstraint/copyrightandbuildconstraint.go -- +-@@ -9,2 +9 @@ +--// docs +--const C = "" //@codeaction("const", "refactor.extract.toNewFile", edit=copyrightandbuildconstraint) +-+//@codeaction("const", "refactor.extract.toNewFile", edit=copyrightandbuildconstraint) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_variable-67905.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable-67905.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_variable-67905.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable-67905.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-This test verifies the fix for golang/go#67905: Extract variable from type +-switch produces invalid code +- +--- go.mod -- +-module mod.test/extract +- +-go 1.18 +- +--- extract_switch.go -- +-package extract +- +-import ( +- "io" +-) +- +-func f() io.Reader +- +-func main() { +- switch r := f().(type) { //@codeaction("f()", "refactor.extract.variable", edit=type_switch_func_call) +- default: +- _ = r +- } +-} +- +--- @type_switch_func_call/extract_switch.go -- +-@@ -10 +10,2 @@ +-- switch r := f().(type) { //@codeaction("f()", "refactor.extract.variable", edit=type_switch_func_call) +-+ newVar := f() +-+ switch r := newVar.(type) { //@codeaction("f()", "refactor.extract.variable", edit=type_switch_func_call) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_variable-70563.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable-70563.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_variable-70563.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable-70563.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,50 +0,0 @@ +-This test verifies the fix for golang/go#70563: refactor.extract.variable +-inserts new statement before the scope of its free symbols. +- +--- flags -- +--ignore_extra_diags +- +--- inside_else.go -- +-package extract +- +-func _() { +- if x := 1; true { +- +- } else if y := x + 1; true { //@codeaction("x + 1", "refactor.extract.variable", err=re"Else's init statement has free variable declaration") +- +- } +-} +--- inside_case.go -- +-package extract +- +-func _() { +- switch x := 1; x { +- case x + 1: //@codeaction("x + 1", "refactor.extract.variable-all", err=re"SwitchStmt's init statement has free variable declaration") +- y := x + 1 //@codeaction("x + 1", "refactor.extract.variable-all", err=re"SwitchStmt's init statement has free variable declaration") +- _ = y +- case 3: +- y := x + 1 //@codeaction("x + 1", "refactor.extract.variable-all", err=re"SwitchStmt's init statement has free variable declaration") +- _ = y +- } +-} +--- parent_if.go -- +-package extract +- +-func _() { +- if x := 1; x > 0 { +- y = x + 1 //@codeaction("x + 1", "refactor.extract.variable-all", err=re"IfStmt's init statement has free variable declaration") +- } else { +- y = x + 1 //@codeaction("x + 1", "refactor.extract.variable-all", err=re"IfStmt's init statement has free variable declaration") +- } +-} +--- parent_switch.go -- +-package extract +- +-func _() { +- switch x := 1; x { +- case 1: +- y = x + 1 //@codeaction("x + 1", "refactor.extract.variable-all", err=re"SwitchStmt's init statement has free variable declaration") +- case 3: +- y = x + 1 //@codeaction("x + 1", "refactor.extract.variable-all", err=re"SwitchStmt's init statement has free variable declaration") +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_variable_all_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable_all_resolve.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_variable_all_resolve.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable_all_resolve.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,249 +0,0 @@ +-This test checks the behavior of the 'replace all occurrences of expression' code action, with resolve support. +-See extract_variable_all.txt for the same test without resolve support. +- +--- capabilities.json -- +-{ +- "textDocument": { +- "codeAction": { +- "dataSupport": true, +- "resolveSupport": { +- "properties": ["edit"] +- } +- } +- } +-} +--- flags -- +--ignore_extra_diags +- +--- basic_lit.go -- +-package extract_all +- +-func _() { +- var _ = 1 + 2 + 3 //@codeaction("1 + 2", "refactor.extract.constant-all", edit=basic_lit) +- var _ = 1 + 2 + 3 //@codeaction("1 + 2", "refactor.extract.constant-all", edit=basic_lit) +-} +--- @basic_lit/basic_lit.go -- +-@@ -4,2 +4,3 @@ +-- var _ = 1 + 2 + 3 //@codeaction("1 + 2", "refactor.extract.constant-all", edit=basic_lit) +-- var _ = 1 + 2 + 3 //@codeaction("1 + 2", "refactor.extract.constant-all", edit=basic_lit) +-+ const newConst = 1 + 2 +-+ var _ = newConst + 3 //@codeaction("1 + 2", "refactor.extract.constant-all", edit=basic_lit) +-+ var _ = newConst + 3 //@codeaction("1 + 2", "refactor.extract.constant-all", edit=basic_lit) +--- nested_scope.go -- +-package extract_all +- +-func _() { +- newConst1 := 0 +- if true { +- x := 1 + 2 + 3 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +- } +- if true { +- newConst := 0 +- if false { +- y := 1 + 2 + 3 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +- } +- } +- z := 1 + 2 + 3 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +-} +--- @nested_scope/nested_scope.go -- +-@@ -5 +5 @@ +-+ const newConst2 = 1 + 2 + 3 +-@@ -6 +7 @@ +-- x := 1 + 2 + 3 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +-+ x := newConst2 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +-@@ -11 +12 @@ +-- y := 1 + 2 + 3 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +-+ y := newConst2 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +-@@ -14 +15 @@ +-- z := 1 + 2 + 3 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +-+ z := newConst2 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +--- function_call.go -- +-package extract_all +- +-import "fmt" +- +-func _() { +- result := fmt.Sprintf("%d", 42) //@codeaction(`fmt.Sprintf("%d", 42)`, "refactor.extract.variable-all", edit=replace_func_call) +- if result != "" { +- anotherResult := fmt.Sprintf("%d", 42) //@codeaction(`fmt.Sprintf("%d", 42)`, "refactor.extract.variable-all", edit=replace_func_call) +- _ = anotherResult +- } +-} +--- @replace_func_call/function_call.go -- +-@@ -6 +6,2 @@ +-- result := fmt.Sprintf("%d", 42) //@codeaction(`fmt.Sprintf("%d", 42)`, "refactor.extract.variable-all", edit=replace_func_call) +-+ newVar := fmt.Sprintf("%d", 42) +-+ result := newVar //@codeaction(`fmt.Sprintf("%d", 42)`, "refactor.extract.variable-all", edit=replace_func_call) +-@@ -8 +9 @@ +-- anotherResult := fmt.Sprintf("%d", 42) //@codeaction(`fmt.Sprintf("%d", 42)`, "refactor.extract.variable-all", edit=replace_func_call) +-+ anotherResult := newVar //@codeaction(`fmt.Sprintf("%d", 42)`, "refactor.extract.variable-all", edit=replace_func_call) +--- composite_literals.go -- +-package extract_all +- +-func _() { +- data := []int{1, 2, 3} //@codeaction("[]int{1, 2, 3}", "refactor.extract.variable-all", edit=composite) +- processData(data) +- moreData := []int{1, 2, 3} //@codeaction("[]int{1, 2, 3}", "refactor.extract.variable-all", edit=composite) +- processData(moreData) +-} +- +-func processData(d []int) {} +--- @composite/composite_literals.go -- +-@@ -4 +4,2 @@ +-- data := []int{1, 2, 3} //@codeaction("[]int{1, 2, 3}", "refactor.extract.variable-all", edit=composite) +-+ newVar := []int{1, 2, 3} +-+ data := newVar //@codeaction("[]int{1, 2, 3}", "refactor.extract.variable-all", edit=composite) +-@@ -6 +7 @@ +-- moreData := []int{1, 2, 3} //@codeaction("[]int{1, 2, 3}", "refactor.extract.variable-all", edit=composite) +-+ moreData := newVar //@codeaction("[]int{1, 2, 3}", "refactor.extract.variable-all", edit=composite) +--- selector.go -- +-package extract_all +- +-type MyStruct struct { +- Value int +-} +- +-func _() { +- s := MyStruct{Value: 10} +- v := s.Value //@codeaction("s.Value", "refactor.extract.variable-all", edit=sel) +- if v > 0 { +- w := s.Value //@codeaction("s.Value", "refactor.extract.variable-all", edit=sel) +- _ = w +- } +-} +--- @sel/selector.go -- +-@@ -9 +9,2 @@ +-- v := s.Value //@codeaction("s.Value", "refactor.extract.variable-all", edit=sel) +-+ newVar := s.Value +-+ v := newVar //@codeaction("s.Value", "refactor.extract.variable-all", edit=sel) +-@@ -11 +12 @@ +-- w := s.Value //@codeaction("s.Value", "refactor.extract.variable-all", edit=sel) +-+ w := newVar //@codeaction("s.Value", "refactor.extract.variable-all", edit=sel) +--- index.go -- +-package extract_all +- +-func _() { +- arr := []int{1, 2, 3} +- val := arr[0] //@codeaction("arr[0]", "refactor.extract.variable-all", edit=index) +- val2 := arr[0] //@codeaction("arr[0]", "refactor.extract.variable-all", edit=index) +-} +--- @index/index.go -- +-@@ -5,2 +5,3 @@ +-- val := arr[0] //@codeaction("arr[0]", "refactor.extract.variable-all", edit=index) +-- val2 := arr[0] //@codeaction("arr[0]", "refactor.extract.variable-all", edit=index) +-+ newVar := arr[0] +-+ val := newVar //@codeaction("arr[0]", "refactor.extract.variable-all", edit=index) +-+ val2 := newVar //@codeaction("arr[0]", "refactor.extract.variable-all", edit=index) +--- slice_expr.go -- +-package extract_all +- +-func _() { +- data := []int{1, 2, 3, 4, 5} +- part := data[1:3] //@codeaction("data[1:3]", "refactor.extract.variable-all", edit=slice) +- anotherPart := data[1:3] //@codeaction("data[1:3]", "refactor.extract.variable-all", edit=slice) +-} +--- @slice/slice_expr.go -- +-@@ -5,2 +5,3 @@ +-- part := data[1:3] //@codeaction("data[1:3]", "refactor.extract.variable-all", edit=slice) +-- anotherPart := data[1:3] //@codeaction("data[1:3]", "refactor.extract.variable-all", edit=slice) +-+ newVar := data[1:3] +-+ part := newVar //@codeaction("data[1:3]", "refactor.extract.variable-all", edit=slice) +-+ anotherPart := newVar //@codeaction("data[1:3]", "refactor.extract.variable-all", edit=slice) +--- nested_func.go -- +-package extract_all +- +-func outer() { +- inner := func() { +- val := 100 + 200 //@codeaction("100 + 200", "refactor.extract.constant-all", edit=nested) +- _ = val +- } +- inner() +- val := 100 + 200 //@codeaction("100 + 200", "refactor.extract.constant-all", edit=nested) +- _ = val +-} +--- @nested/nested_func.go -- +-@@ -4 +4 @@ +-+ const newConst = 100 + 200 +-@@ -5 +6 @@ +-- val := 100 + 200 //@codeaction("100 + 200", "refactor.extract.constant-all", edit=nested) +-+ val := newConst //@codeaction("100 + 200", "refactor.extract.constant-all", edit=nested) +-@@ -9 +10 @@ +-- val := 100 + 200 //@codeaction("100 + 200", "refactor.extract.constant-all", edit=nested) +-+ val := newConst //@codeaction("100 + 200", "refactor.extract.constant-all", edit=nested) +--- switch.go -- +-package extract_all +- +-func _() { +- value := 2 +- switch value { +- case 1: +- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +- _ = result +- case 2: +- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +- _ = result +- default: +- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +- _ = result +- } +-} +--- @switch/switch.go -- +-@@ -5 +5 @@ +-+ newVar := value * 10 +-@@ -7 +8 @@ +-- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +-+ result := newVar //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +-@@ -10 +11 @@ +-- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +-+ result := newVar //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +-@@ -13 +14 @@ +-- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +-+ result := newVar //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +--- switch_single.go -- +-package extract_all +- +-func _() { +- value := 2 +- switch value { +- case 1: +- result := value * 10 +- _ = result +- case 2: +- result := value * 10 +- _ = result +- default: +- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable", edit=switch_single) +- _ = result +- } +-} +--- @switch_single/switch_single.go -- +-@@ -13 +13,2 @@ +-- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable", edit=switch_single) +-+ newVar := value * 10 +-+ result := newVar //@codeaction("value * 10", "refactor.extract.variable", edit=switch_single) +--- func_list.go -- +-package extract_all +- +-func _() { +- x := func(a int) int { //@codeaction("func", "refactor.extract.variable-all", end=closeBracket1, edit=func_list) +- b := 1 +- return b + a +- } //@loc(closeBracket1, "}") +- y := func(a int) int { //@codeaction("func", "refactor.extract.variable-all", end=closeBracket2, edit=func_list) +- b := 1 +- return b + a +- }//@loc(closeBracket2, "}") +-} +--- @func_list/func_list.go -- +-@@ -4 +4 @@ +-- x := func(a int) int { //@codeaction("func", "refactor.extract.variable-all", end=closeBracket1, edit=func_list) +-+ newVar := func(a int) int { +-@@ -7,5 +7,3 @@ +-- } //@loc(closeBracket1, "}") +-- y := func(a int) int { //@codeaction("func", "refactor.extract.variable-all", end=closeBracket2, edit=func_list) +-- b := 1 +-- return b + a +-- }//@loc(closeBracket2, "}") +-+ } +-+ x := newVar //@loc(closeBracket1, "}") +-+ y := newVar//@loc(closeBracket2, "}") +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_variable_all.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable_all.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_variable_all.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable_all.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,248 +0,0 @@ +-This test checks the behavior of the 'replace all occurrences of expression' code action, with resolve support. +-See extract_variable_all_resolve.txt for the same test with resolve support. +- +--- capabilities.json -- +-{ +- "textDocument": { +- "codeAction": { +- "dataSupport": false, +- "resolveSupport": {} +- } +- } +-} +- +--- flags -- +--ignore_extra_diags +- +--- basic_lit.go -- +-package extract_all +- +-func _() { +- var _ = 1 + 2 + 3 //@codeaction("1 + 2", "refactor.extract.constant-all", edit=basic_lit) +- var _ = 1 + 2 + 3 //@codeaction("1 + 2", "refactor.extract.constant-all", edit=basic_lit) +-} +--- @basic_lit/basic_lit.go -- +-@@ -4,2 +4,3 @@ +-- var _ = 1 + 2 + 3 //@codeaction("1 + 2", "refactor.extract.constant-all", edit=basic_lit) +-- var _ = 1 + 2 + 3 //@codeaction("1 + 2", "refactor.extract.constant-all", edit=basic_lit) +-+ const newConst = 1 + 2 +-+ var _ = newConst + 3 //@codeaction("1 + 2", "refactor.extract.constant-all", edit=basic_lit) +-+ var _ = newConst + 3 //@codeaction("1 + 2", "refactor.extract.constant-all", edit=basic_lit) +--- nested_scope.go -- +-package extract_all +- +-func _() { +- newConst1 := 0 +- if true { +- x := 1 + 2 + 3 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +- } +- if true { +- newConst := 0 +- if false { +- y := 1 + 2 + 3 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +- } +- } +- z := 1 + 2 + 3 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +-} +--- @nested_scope/nested_scope.go -- +-@@ -5 +5 @@ +-+ const newConst2 = 1 + 2 + 3 +-@@ -6 +7 @@ +-- x := 1 + 2 + 3 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +-+ x := newConst2 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +-@@ -11 +12 @@ +-- y := 1 + 2 + 3 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +-+ y := newConst2 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +-@@ -14 +15 @@ +-- z := 1 + 2 + 3 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +-+ z := newConst2 //@codeaction("1 + 2 + 3", "refactor.extract.constant-all", edit=nested_scope) +--- function_call.go -- +-package extract_all +- +-import "fmt" +- +-func _() { +- result := fmt.Sprintf("%d", 42) //@codeaction(`fmt.Sprintf("%d", 42)`, "refactor.extract.variable-all", edit=replace_func_call) +- if result != "" { +- anotherResult := fmt.Sprintf("%d", 42) //@codeaction(`fmt.Sprintf("%d", 42)`, "refactor.extract.variable-all", edit=replace_func_call) +- _ = anotherResult +- } +-} +--- @replace_func_call/function_call.go -- +-@@ -6 +6,2 @@ +-- result := fmt.Sprintf("%d", 42) //@codeaction(`fmt.Sprintf("%d", 42)`, "refactor.extract.variable-all", edit=replace_func_call) +-+ newVar := fmt.Sprintf("%d", 42) +-+ result := newVar //@codeaction(`fmt.Sprintf("%d", 42)`, "refactor.extract.variable-all", edit=replace_func_call) +-@@ -8 +9 @@ +-- anotherResult := fmt.Sprintf("%d", 42) //@codeaction(`fmt.Sprintf("%d", 42)`, "refactor.extract.variable-all", edit=replace_func_call) +-+ anotherResult := newVar //@codeaction(`fmt.Sprintf("%d", 42)`, "refactor.extract.variable-all", edit=replace_func_call) +--- composite_literals.go -- +-package extract_all +- +-func _() { +- data := []int{1, 2, 3} //@codeaction("[]int{1, 2, 3}", "refactor.extract.variable-all", edit=composite) +- processData(data) +- moreData := []int{1, 2, 3} //@codeaction("[]int{1, 2, 3}", "refactor.extract.variable-all", edit=composite) +- processData(moreData) +-} +- +-func processData(d []int) {} +--- @composite/composite_literals.go -- +-@@ -4 +4,2 @@ +-- data := []int{1, 2, 3} //@codeaction("[]int{1, 2, 3}", "refactor.extract.variable-all", edit=composite) +-+ newVar := []int{1, 2, 3} +-+ data := newVar //@codeaction("[]int{1, 2, 3}", "refactor.extract.variable-all", edit=composite) +-@@ -6 +7 @@ +-- moreData := []int{1, 2, 3} //@codeaction("[]int{1, 2, 3}", "refactor.extract.variable-all", edit=composite) +-+ moreData := newVar //@codeaction("[]int{1, 2, 3}", "refactor.extract.variable-all", edit=composite) +--- selector.go -- +-package extract_all +- +-type MyStruct struct { +- Value int +-} +- +-func _() { +- s := MyStruct{Value: 10} +- v := s.Value //@codeaction("s.Value", "refactor.extract.variable-all", edit=sel) +- if v > 0 { +- w := s.Value //@codeaction("s.Value", "refactor.extract.variable-all", edit=sel) +- _ = w +- } +-} +--- @sel/selector.go -- +-@@ -9 +9,2 @@ +-- v := s.Value //@codeaction("s.Value", "refactor.extract.variable-all", edit=sel) +-+ newVar := s.Value +-+ v := newVar //@codeaction("s.Value", "refactor.extract.variable-all", edit=sel) +-@@ -11 +12 @@ +-- w := s.Value //@codeaction("s.Value", "refactor.extract.variable-all", edit=sel) +-+ w := newVar //@codeaction("s.Value", "refactor.extract.variable-all", edit=sel) +--- index.go -- +-package extract_all +- +-func _() { +- arr := []int{1, 2, 3} +- val := arr[0] //@codeaction("arr[0]", "refactor.extract.variable-all", edit=index) +- val2 := arr[0] //@codeaction("arr[0]", "refactor.extract.variable-all", edit=index) +-} +--- @index/index.go -- +-@@ -5,2 +5,3 @@ +-- val := arr[0] //@codeaction("arr[0]", "refactor.extract.variable-all", edit=index) +-- val2 := arr[0] //@codeaction("arr[0]", "refactor.extract.variable-all", edit=index) +-+ newVar := arr[0] +-+ val := newVar //@codeaction("arr[0]", "refactor.extract.variable-all", edit=index) +-+ val2 := newVar //@codeaction("arr[0]", "refactor.extract.variable-all", edit=index) +--- slice_expr.go -- +-package extract_all +- +-func _() { +- data := []int{1, 2, 3, 4, 5} +- part := data[1:3] //@codeaction("data[1:3]", "refactor.extract.variable-all", edit=slice) +- anotherPart := data[1:3] //@codeaction("data[1:3]", "refactor.extract.variable-all", edit=slice) +-} +--- @slice/slice_expr.go -- +-@@ -5,2 +5,3 @@ +-- part := data[1:3] //@codeaction("data[1:3]", "refactor.extract.variable-all", edit=slice) +-- anotherPart := data[1:3] //@codeaction("data[1:3]", "refactor.extract.variable-all", edit=slice) +-+ newVar := data[1:3] +-+ part := newVar //@codeaction("data[1:3]", "refactor.extract.variable-all", edit=slice) +-+ anotherPart := newVar //@codeaction("data[1:3]", "refactor.extract.variable-all", edit=slice) +--- nested_func.go -- +-package extract_all +- +-func outer() { +- inner := func() { +- val := 100 + 200 //@codeaction("100 + 200", "refactor.extract.constant-all", edit=nested) +- _ = val +- } +- inner() +- val := 100 + 200 //@codeaction("100 + 200", "refactor.extract.constant-all", edit=nested) +- _ = val +-} +--- @nested/nested_func.go -- +-@@ -4 +4 @@ +-+ const newConst = 100 + 200 +-@@ -5 +6 @@ +-- val := 100 + 200 //@codeaction("100 + 200", "refactor.extract.constant-all", edit=nested) +-+ val := newConst //@codeaction("100 + 200", "refactor.extract.constant-all", edit=nested) +-@@ -9 +10 @@ +-- val := 100 + 200 //@codeaction("100 + 200", "refactor.extract.constant-all", edit=nested) +-+ val := newConst //@codeaction("100 + 200", "refactor.extract.constant-all", edit=nested) +--- switch.go -- +-package extract_all +- +-func _() { +- value := 2 +- switch value { +- case 1: +- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +- _ = result +- case 2: +- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +- _ = result +- default: +- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +- _ = result +- } +-} +--- @switch/switch.go -- +-@@ -5 +5 @@ +-+ newVar := value * 10 +-@@ -7 +8 @@ +-- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +-+ result := newVar //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +-@@ -10 +11 @@ +-- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +-+ result := newVar //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +-@@ -13 +14 @@ +-- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +-+ result := newVar //@codeaction("value * 10", "refactor.extract.variable-all", edit=switch) +--- switch_single.go -- +-package extract_all +- +-func _() { +- value := 2 +- switch value { +- case 1: +- result := value * 10 +- _ = result +- case 2: +- result := value * 10 +- _ = result +- default: +- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable", edit=switch_single) +- _ = result +- } +-} +--- @switch_single/switch_single.go -- +-@@ -13 +13,2 @@ +-- result := value * 10 //@codeaction("value * 10", "refactor.extract.variable", edit=switch_single) +-+ newVar := value * 10 +-+ result := newVar //@codeaction("value * 10", "refactor.extract.variable", edit=switch_single) +--- func_list.go -- +-package extract_all +- +-func _() { +- x := func(a int) int { //@codeaction("func", "refactor.extract.variable-all", end=closeBracket1, edit=func_list) +- b := 1 +- return b + a +- } //@loc(closeBracket1, "}") +- y := func(a int) int { //@codeaction("func", "refactor.extract.variable-all", end=closeBracket2, edit=func_list) +- b := 1 +- return b + a +- }//@loc(closeBracket2, "}") +-} +--- @func_list/func_list.go -- +-@@ -4 +4 @@ +-- x := func(a int) int { //@codeaction("func", "refactor.extract.variable-all", end=closeBracket1, edit=func_list) +-+ newVar := func(a int) int { +-@@ -7,5 +7,3 @@ +-- } //@loc(closeBracket1, "}") +-- y := func(a int) int { //@codeaction("func", "refactor.extract.variable-all", end=closeBracket2, edit=func_list) +-- b := 1 +-- return b + a +-- }//@loc(closeBracket2, "}") +-+ } +-+ x := newVar //@loc(closeBracket1, "}") +-+ y := newVar//@loc(closeBracket2, "}") +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_variable-if.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable-if.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_variable-if.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable-if.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +-This test checks the behavior of the 'extract variable/constant' code actions +-when the optimal place for the new declaration is within the "if" statement, +-like so: +- +- if x := 1 + 2 or y + y ; true { +- } else if x > 0 { +- } +- +-A future refactor.variable implementation that does this should avoid +-using a 'const' declaration, which is not legal at that location. +- +--- flags -- +--ignore_extra_diags +- +--- a.go -- +-package a +- +-func constant() { +- if true { +- } else if 1 + 2 > 0 { //@ codeaction("1 + 2", "refactor.extract.constant", edit=constant) +- } +-} +- +-func variable(y int) { +- if true { +- } else if y + y > 0 { //@ codeaction("y + y", "refactor.extract.variable", edit=variable) +- } +-} +- +--- @constant/a.go -- +-@@ -4 +4 @@ +-+ const newConst = 1 + 2 +-@@ -5 +6 @@ +-- } else if 1 + 2 > 0 { //@ codeaction("1 + 2", "refactor.extract.constant", edit=constant) +-+ } else if newConst > 0 { //@ codeaction("1 + 2", "refactor.extract.constant", edit=constant) +--- @variable/a.go -- +-@@ -10 +10 @@ +-+ newVar := y + y +-@@ -11 +12 @@ +-- } else if y + y > 0 { //@ codeaction("y + y", "refactor.extract.variable", edit=variable) +-+ } else if newVar > 0 { //@ codeaction("y + y", "refactor.extract.variable", edit=variable) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_variable-inexact.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable-inexact.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_variable-inexact.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable-inexact.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-This test checks that extract variable/constant permits: +-- extraneous whitespace in the selection +-- function literals +-- pointer dereference expressions +-- parenthesized expressions +- +--- a.go -- +-package a +- +-func _(ptr *int) { +- var _ = 1 + 2 + 3 //@codeaction("1 + 2 ", "refactor.extract.constant", edit=spaces) +- var _ = func() {} //@codeaction("func() {}", "refactor.extract.variable", edit=funclit) +- var _ = *ptr //@codeaction("*ptr", "refactor.extract.variable", edit=ptr) +- var _ = (ptr) //@codeaction("(ptr)", "refactor.extract.variable", edit=paren) +-} +- +--- @spaces/a.go -- +-@@ -4 +4,2 @@ +-- var _ = 1 + 2 + 3 //@codeaction("1 + 2 ", "refactor.extract.constant", edit=spaces) +-+ const newConst = 1 + 2 +-+ var _ = newConst + 3 //@codeaction("1 + 2 ", "refactor.extract.constant", edit=spaces) +--- @funclit/a.go -- +-@@ -5 +5,2 @@ +-- var _ = func() {} //@codeaction("func() {}", "refactor.extract.variable", edit=funclit) +-+ newVar := func() {} +-+ var _ = newVar //@codeaction("func() {}", "refactor.extract.variable", edit=funclit) +--- @ptr/a.go -- +-@@ -6 +6,2 @@ +-- var _ = *ptr //@codeaction("*ptr", "refactor.extract.variable", edit=ptr) +-+ newVar := *ptr +-+ var _ = newVar //@codeaction("*ptr", "refactor.extract.variable", edit=ptr) +--- @paren/a.go -- +-@@ -7 +7,2 @@ +-- var _ = (ptr) //@codeaction("(ptr)", "refactor.extract.variable", edit=paren) +-+ newVar := (ptr) +-+ var _ = newVar //@codeaction("(ptr)", "refactor.extract.variable", edit=paren) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_variable_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable_resolve.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_variable_resolve.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable_resolve.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,70 +0,0 @@ +-This test checks the behavior of the 'extract variable/constant' code action, with resolve support. +-See extract_variable.txt for the same test without resolve support. +- +--- flags -- +--ignore_extra_diags +- +--- basic_lit.go -- +-package extract +- +-func _() { +- var _ = 1 + 2 //@codeaction("1", "refactor.extract.constant", edit=basic_lit1) +- var _ = 3 + 4 //@codeaction("3 + 4", "refactor.extract.constant", edit=basic_lit2) +-} +- +--- @basic_lit1/basic_lit.go -- +-@@ -4 +4,2 @@ +-- var _ = 1 + 2 //@codeaction("1", "refactor.extract.constant", edit=basic_lit1) +-+ const newConst = 1 +-+ var _ = newConst + 2 //@codeaction("1", "refactor.extract.constant", edit=basic_lit1) +--- @basic_lit2/basic_lit.go -- +-@@ -5 +5,2 @@ +-- var _ = 3 + 4 //@codeaction("3 + 4", "refactor.extract.constant", edit=basic_lit2) +-+ const newConst = 3 + 4 +-+ var _ = newConst //@codeaction("3 + 4", "refactor.extract.constant", edit=basic_lit2) +--- func_call.go -- +-package extract +- +-import "strconv" +- +-func _() { +- x0 := append([]int{}, 1) //@codeaction("append([]int{}, 1)", "refactor.extract.variable", edit=func_call1) +- str := "1" +- b, err := strconv.Atoi(str) //@codeaction("strconv.Atoi(str)", "refactor.extract.variable", edit=func_call2) +-} +- +--- @func_call1/func_call.go -- +-@@ -6 +6,2 @@ +-- x0 := append([]int{}, 1) //@codeaction("append([]int{}, 1)", "refactor.extract.variable", edit=func_call1) +-+ newVar := append([]int{}, 1) +-+ x0 := newVar //@codeaction("append([]int{}, 1)", "refactor.extract.variable", edit=func_call1) +--- @func_call2/func_call.go -- +-@@ -8 +8,2 @@ +-- b, err := strconv.Atoi(str) //@codeaction("strconv.Atoi(str)", "refactor.extract.variable", edit=func_call2) +-+ newVar, newVar1 := strconv.Atoi(str) +-+ b, err := newVar, newVar1 //@codeaction("strconv.Atoi(str)", "refactor.extract.variable", edit=func_call2) +--- scope.go -- +-package extract +- +-import "go/ast" +- +-func _() { +- x0 := 0 +- if true { +- y := ast.CompositeLit{} //@codeaction("ast.CompositeLit{}", "refactor.extract.variable", edit=scope1) +- } +- if true { +- x := !false //@codeaction("!false", "refactor.extract.constant", edit=scope2) +- } +-} +- +--- @scope1/scope.go -- +-@@ -8 +8,2 @@ +-- y := ast.CompositeLit{} //@codeaction("ast.CompositeLit{}", "refactor.extract.variable", edit=scope1) +-+ newVar := ast.CompositeLit{} +-+ y := newVar //@codeaction("ast.CompositeLit{}", "refactor.extract.variable", edit=scope1) +--- @scope2/scope.go -- +-@@ -11 +11,2 @@ +-- x := !false //@codeaction("!false", "refactor.extract.constant", edit=scope2) +-+ const newConst = !false +-+ x := newConst //@codeaction("!false", "refactor.extract.constant", edit=scope2) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_variable-toplevel.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable-toplevel.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_variable-toplevel.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable-toplevel.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,51 +0,0 @@ +-This test checks the behavior of the 'extract variable/constant' code action +-at top level (outside any function). See issue #70665. +- +--- a.go -- +-package a +- +-const Length = len("hello") + 2 //@codeaction(`len("hello")`, "refactor.extract.constant", edit=lenhello) +- +-var Slice = append([]int{}, 1, 2, 3) //@codeaction("[]int{}", "refactor.extract.variable", edit=sliceliteral) +- +-type SHA256 [32]byte //@codeaction("32", "refactor.extract.constant", edit=arraylen) +- +-func F([2]int) {} //@codeaction("2", "refactor.extract.constant", edit=paramtypearraylen) +- +--- @lenhello/a.go -- +-@@ -3 +3,2 @@ +--const Length = len("hello") + 2 //@codeaction(`len("hello")`, "refactor.extract.constant", edit=lenhello) +-+const newConst = len("hello") +-+const Length = newConst + 2 //@codeaction(`len("hello")`, "refactor.extract.constant", edit=lenhello) +--- @sliceliteral/a.go -- +-@@ -5 +5,2 @@ +--var Slice = append([]int{}, 1, 2, 3) //@codeaction("[]int{}", "refactor.extract.variable", edit=sliceliteral) +-+var newVar = []int{} +-+var Slice = append(newVar, 1, 2, 3) //@codeaction("[]int{}", "refactor.extract.variable", edit=sliceliteral) +--- @arraylen/a.go -- +-@@ -7 +7,2 @@ +--type SHA256 [32]byte //@codeaction("32", "refactor.extract.constant", edit=arraylen) +-+const newConst = 32 +-+type SHA256 [newConst]byte //@codeaction("32", "refactor.extract.constant", edit=arraylen) +--- @paramtypearraylen/a.go -- +-@@ -9 +9,2 @@ +--func F([2]int) {} //@codeaction("2", "refactor.extract.constant", edit=paramtypearraylen) +-+const newConst = 2 +-+func F([newConst]int) {} //@codeaction("2", "refactor.extract.constant", edit=paramtypearraylen) +--- b/b.go -- +-package b +- +-// Check that package- and file-level name collisions are avoided. +- +-import newVar3 "errors" +- +-var newVar, newVar1, newVar2 any // these names are taken already +-var _ = newVar3.New("") +-var a, b int +-var C = a + b //@codeaction("a + b", "refactor.extract.variable", edit=fresh) +- +--- @fresh/b/b.go -- +-@@ -10 +10,2 @@ +--var C = a + b //@codeaction("a + b", "refactor.extract.variable", edit=fresh) +-+var newVar4 = a + b +-+var C = newVar4 //@codeaction("a + b", "refactor.extract.variable", edit=fresh) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract_variable.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract_variable.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,80 +0,0 @@ +-This test checks the behavior of the 'extract variable/constant' code action. +-See extract_variable_resolve.txt for the same test with resolve support. +- +--- capabilities.json -- +-{ +- "textDocument": { +- "codeAction": { +- "dataSupport": false, +- "resolveSupport": {} +- } +- } +-} +- +--- flags -- +--ignore_extra_diags +- +--- basic_lit.go -- +-package extract +- +-func _() { +- var _ = 1 + 2 //@codeaction("1", "refactor.extract.constant", edit=basic_lit1) +- var _ = 3 + 4 //@codeaction("3 + 4", "refactor.extract.constant", edit=basic_lit2) +-} +- +--- @basic_lit1/basic_lit.go -- +-@@ -4 +4,2 @@ +-- var _ = 1 + 2 //@codeaction("1", "refactor.extract.constant", edit=basic_lit1) +-+ const newConst = 1 +-+ var _ = newConst + 2 //@codeaction("1", "refactor.extract.constant", edit=basic_lit1) +--- @basic_lit2/basic_lit.go -- +-@@ -5 +5,2 @@ +-- var _ = 3 + 4 //@codeaction("3 + 4", "refactor.extract.constant", edit=basic_lit2) +-+ const newConst = 3 + 4 +-+ var _ = newConst //@codeaction("3 + 4", "refactor.extract.constant", edit=basic_lit2) +--- func_call.go -- +-package extract +- +-import "strconv" +- +-func _() { +- x0 := append([]int{}, 1) //@codeaction("append([]int{}, 1)", "refactor.extract.variable", edit=func_call1) +- str := "1" +- b, err := strconv.Atoi(str) //@codeaction("strconv.Atoi(str)", "refactor.extract.variable", edit=func_call2) +-} +- +--- @func_call1/func_call.go -- +-@@ -6 +6,2 @@ +-- x0 := append([]int{}, 1) //@codeaction("append([]int{}, 1)", "refactor.extract.variable", edit=func_call1) +-+ newVar := append([]int{}, 1) +-+ x0 := newVar //@codeaction("append([]int{}, 1)", "refactor.extract.variable", edit=func_call1) +--- @func_call2/func_call.go -- +-@@ -8 +8,2 @@ +-- b, err := strconv.Atoi(str) //@codeaction("strconv.Atoi(str)", "refactor.extract.variable", edit=func_call2) +-+ newVar, newVar1 := strconv.Atoi(str) +-+ b, err := newVar, newVar1 //@codeaction("strconv.Atoi(str)", "refactor.extract.variable", edit=func_call2) +--- scope.go -- +-package extract +- +-import "go/ast" +- +-func _() { +- x0 := 0 +- if true { +- y := ast.CompositeLit{} //@codeaction("ast.CompositeLit{}", "refactor.extract.variable", edit=scope1) +- } +- if true { +- x := !false //@codeaction("!false", "refactor.extract.constant", edit=scope2) +- } +-} +- +--- @scope1/scope.go -- +-@@ -8 +8,2 @@ +-- y := ast.CompositeLit{} //@codeaction("ast.CompositeLit{}", "refactor.extract.variable", edit=scope1) +-+ newVar := ast.CompositeLit{} +-+ y := newVar //@codeaction("ast.CompositeLit{}", "refactor.extract.variable", edit=scope1) +--- @scope2/scope.go -- +-@@ -11 +11,2 @@ +-- x := !false //@codeaction("!false", "refactor.extract.constant", edit=scope2) +-+ const newConst = !false +-+ x := newConst //@codeaction("!false", "refactor.extract.constant", edit=scope2) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/extract-variadic-63287.txt b/gopls/internal/test/marker/testdata/codeaction/extract-variadic-63287.txt +--- a/gopls/internal/test/marker/testdata/codeaction/extract-variadic-63287.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/extract-variadic-63287.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +-This test exercises extract on a variadic function. +-It is a regression test for bug #63287 in which +-the final parameter's "..." would go missing. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-//@codeaction(block, "refactor.extract.function", edit=out) +- +-func _() { +- var logf func(string, ...any) +- { println(logf) } //@loc(block, re`{[^}]*}`) +-} +- +--- @out/a/a.go -- +-@@ -7 +7 @@ +-- { println(logf) } //@loc(block, re`{[^}]*}`) +-+ { newFunction(logf) } //@loc(block, re`{[^}]*}`) +-@@ -10 +10,4 @@ +-+func newFunction(logf func( string, ...any)) { +-+ println(logf) +-+} +-+ +diff -urN a/gopls/internal/test/marker/testdata/codeaction/fill_struct_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/fill_struct_resolve.txt +--- a/gopls/internal/test/marker/testdata/codeaction/fill_struct_resolve.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/fill_struct_resolve.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,721 +0,0 @@ +-This test checks the behavior of the 'fill struct' code action, with resolve support. +-See fill_struct.txt for same test without resolve support. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/fillstruct +- +-go 1.18 +- +--- data/data.go -- +-package data +- +-type B struct { +- ExportedInt int +- unexportedInt int +-} +- +--- a.go -- +-package fillstruct +- +-import ( +- "golang.org/lsptests/fillstruct/data" +-) +- +-type basicStruct struct { +- foo int +-} +- +-var _ = basicStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a1) +- +-type twoArgStruct struct { +- foo int +- bar string +-} +- +-var _ = twoArgStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a2) +- +-type nestedStruct struct { +- bar string +- basic basicStruct +-} +- +-var _ = nestedStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a3) +- +-var _ = data.B{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a4) +--- @a1/a.go -- +-@@ -11 +11,3 @@ +--var _ = basicStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a1) +-+var _ = basicStruct{ +-+ foo: 0, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a1) +--- @a2/a.go -- +-@@ -18 +18,4 @@ +--var _ = twoArgStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a2) +-+var _ = twoArgStruct{ +-+ foo: 0, +-+ bar: "", +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a2) +--- @a3/a.go -- +-@@ -25 +25,4 @@ +--var _ = nestedStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a3) +-+var _ = nestedStruct{ +-+ bar: "", +-+ basic: basicStruct{}, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a3) +--- @a4/a.go -- +-@@ -27 +27,3 @@ +--var _ = data.B{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a4) +-+var _ = data.B{ +-+ ExportedInt: 0, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a4) +--- a2.go -- +-package fillstruct +- +-type typedStruct struct { +- m map[string]int +- s []int +- c chan int +- c1 <-chan int +- a [2]string +-} +- +-var _ = typedStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a21) +- +-type funStruct struct { +- fn func(i int) int +-} +- +-var _ = funStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a22) +- +-type funStructComplex struct { +- fn func(i int, s string) (string, int) +-} +- +-var _ = funStructComplex{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a23) +- +-type funStructEmpty struct { +- fn func() +-} +- +-var _ = funStructEmpty{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a24) +- +--- @a21/a2.go -- +-@@ -11 +11,7 @@ +--var _ = typedStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a21) +-+var _ = typedStruct{ +-+ m: map[string]int{}, +-+ s: []int{}, +-+ c: make(chan int), +-+ c1: make(<-chan int), +-+ a: [2]string{}, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a21) +--- @a22/a2.go -- +-@@ -17 +17,5 @@ +--var _ = funStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a22) +-+var _ = funStruct{ +-+ fn: func(i int) int { +-+ panic("TODO") +-+ }, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a22) +--- @a23/a2.go -- +-@@ -23 +23,5 @@ +--var _ = funStructComplex{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a23) +-+var _ = funStructComplex{ +-+ fn: func(i int, s string) (string, int) { +-+ panic("TODO") +-+ }, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a23) +--- @a24/a2.go -- +-@@ -29 +29,5 @@ +--var _ = funStructEmpty{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a24) +-+var _ = funStructEmpty{ +-+ fn: func() { +-+ panic("TODO") +-+ }, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a24) +--- a3.go -- +-package fillstruct +- +-import ( +- "go/ast" +- "go/token" +-) +- +-type Foo struct { +- A int +-} +- +-type Bar struct { +- X *Foo +- Y *Foo +-} +- +-var _ = Bar{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a31) +- +-type importedStruct struct { +- m map[*ast.CompositeLit]ast.Field +- s []ast.BadExpr +- a [3]token.Token +- c chan ast.EmptyStmt +- fn func(ast_decl ast.DeclStmt) ast.Ellipsis +- st ast.CompositeLit +-} +- +-var _ = importedStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a32) +- +-type pointerBuiltinStruct struct { +- b *bool +- s *string +- i *int +-} +- +-var _ = pointerBuiltinStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a33) +- +-var _ = []ast.BasicLit{ +- {}, //@codeaction("}", "refactor.rewrite.fillStruct", edit=a34) +-} +- +-var _ = []ast.BasicLit{{}} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a35) +--- @a31/a3.go -- +-@@ -17 +17,4 @@ +--var _ = Bar{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a31) +-+var _ = Bar{ +-+ X: &Foo{}, +-+ Y: &Foo{}, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a31) +--- @a32/a3.go -- +-@@ -28 +28,10 @@ +--var _ = importedStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a32) +-+var _ = importedStruct{ +-+ m: map[*ast.CompositeLit]ast.Field{}, +-+ s: []ast.BadExpr{}, +-+ a: [3]token.Token{}, +-+ c: make(chan ast.EmptyStmt), +-+ fn: func(ast_decl ast.DeclStmt) ast.Ellipsis { +-+ panic("TODO") +-+ }, +-+ st: ast.CompositeLit{}, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a32) +--- @a33/a3.go -- +-@@ -36 +36,5 @@ +--var _ = pointerBuiltinStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a33) +-+var _ = pointerBuiltinStruct{ +-+ b: new(bool), +-+ s: new(string), +-+ i: new(int), +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a33) +--- @a34/a3.go -- +-@@ -39 +39,5 @@ +-- {}, //@codeaction("}", "refactor.rewrite.fillStruct", edit=a34) +-+ { +-+ ValuePos: 0, +-+ Kind: 0, +-+ Value: "", +-+ }, //@codeaction("}", "refactor.rewrite.fillStruct", edit=a34) +--- @a35/a3.go -- +-@@ -42 +42,5 @@ +--var _ = []ast.BasicLit{{}} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a35) +-+var _ = []ast.BasicLit{{ +-+ ValuePos: 0, +-+ Kind: 0, +-+ Value: "", +-+}} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a35) +--- a4.go -- +-package fillstruct +- +-import "go/ast" +- +-type iStruct struct { +- X int +-} +- +-type sStruct struct { +- str string +-} +- +-type multiFill struct { +- num int +- strin string +- arr []int +-} +- +-type assignStruct struct { +- n ast.Node +-} +- +-func fill() { +- var x int +- var _ = iStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a41) +- +- var s string +- var _ = sStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a42) +- +- var n int +- _ = []int{} +- if true { +- arr := []int{1, 2} +- } +- var _ = multiFill{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a43) +- +- var node *ast.CompositeLit +- var _ = assignStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a45) +-} +- +--- @a41/a4.go -- +-@@ -25 +25,3 @@ +-- var _ = iStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a41) +-+ var _ = iStruct{ +-+ X: x, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=a41) +--- @a42/a4.go -- +-@@ -28 +28,3 @@ +-- var _ = sStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a42) +-+ var _ = sStruct{ +-+ str: s, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=a42) +--- @a43/a4.go -- +-@@ -35 +35,5 @@ +-- var _ = multiFill{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a43) +-+ var _ = multiFill{ +-+ num: n, +-+ strin: s, +-+ arr: []int{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=a43) +--- @a45/a4.go -- +-@@ -38 +38,3 @@ +-- var _ = assignStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a45) +-+ var _ = assignStruct{ +-+ n: node, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=a45) +--- fillStruct.go -- +-package fillstruct +- +-type StructA struct { +- unexportedIntField int +- ExportedIntField int +- MapA map[int]string +- Array []int +- StructB +-} +- +-type StructA2 struct { +- B *StructB +-} +- +-type StructA3 struct { +- B StructB +-} +- +-func fill() { +- a := StructA{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct1) +- b := StructA2{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct2) +- c := StructA3{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct3) +- if true { +- _ = StructA3{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct4) +- } +-} +- +--- @fillStruct1/fillStruct.go -- +-@@ -20 +20,7 @@ +-- a := StructA{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct1) +-+ a := StructA{ +-+ unexportedIntField: 0, +-+ ExportedIntField: 0, +-+ MapA: map[int]string{}, +-+ Array: []int{}, +-+ StructB: StructB{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct1) +--- @fillStruct2/fillStruct.go -- +-@@ -21 +21,3 @@ +-- b := StructA2{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct2) +-+ b := StructA2{ +-+ B: &StructB{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct2) +--- @fillStruct3/fillStruct.go -- +-@@ -22 +22,3 @@ +-- c := StructA3{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct3) +-+ c := StructA3{ +-+ B: StructB{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct3) +--- @fillStruct4/fillStruct.go -- +-@@ -24 +24,3 @@ +-- _ = StructA3{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct4) +-+ _ = StructA3{ +-+ B: StructB{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct4) +--- fillStruct_anon.go -- +-package fillstruct +- +-type StructAnon struct { +- a struct{} +- b map[string]any +- c map[string]struct { +- d int +- e bool +- } +-} +- +-func fill() { +- _ := StructAnon{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_anon) +-} +--- @fillStruct_anon/fillStruct_anon.go -- +-@@ -13 +13,8 @@ +-- _ := StructAnon{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_anon) +-+ _ := StructAnon{ +-+ a: struct{}{}, +-+ b: map[string]any{}, +-+ c: map[string]struct { +-+ d int +-+ e bool +-+ }{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_anon) +--- fillStruct_nested.go -- +-package fillstruct +- +-type StructB struct { +- StructC +-} +- +-type StructC struct { +- unexportedInt int +-} +- +-func nested() { +- c := StructB{ +- StructC: StructC{}, //@codeaction("}", "refactor.rewrite.fillStruct", edit=fill_nested) +- } +-} +- +--- @fill_nested/fillStruct_nested.go -- +-@@ -13 +13,3 @@ +-- StructC: StructC{}, //@codeaction("}", "refactor.rewrite.fillStruct", edit=fill_nested) +-+ StructC: StructC{ +-+ unexportedInt: 0, +-+ }, //@codeaction("}", "refactor.rewrite.fillStruct", edit=fill_nested) +--- fillStruct_package.go -- +-package fillstruct +- +-import ( +- h2 "net/http" +- +- "golang.org/lsptests/fillstruct/data" +-) +- +-func unexported() { +- a := data.B{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_package1) +- _ = h2.Client{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_package2) +-} +--- @fillStruct_package1/fillStruct_package.go -- +-@@ -10 +10,3 @@ +-- a := data.B{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_package1) +-+ a := data.B{ +-+ ExportedInt: 0, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_package1) +--- @fillStruct_package2/fillStruct_package.go -- +-@@ -11 +11,8 @@ +-- _ = h2.Client{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_package2) +-+ _ = h2.Client{ +-+ Transport: nil, +-+ CheckRedirect: func(req *h2.Request, via []*h2.Request) error { +-+ panic("TODO") +-+ }, +-+ Jar: nil, +-+ Timeout: 0, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_package2) +--- fillStruct_partial.go -- +-package fillstruct +- +-type StructPartialA struct { +- PrefilledInt int +- UnfilledInt int +- StructPartialB +-} +- +-type StructPartialB struct { +- PrefilledInt int +- UnfilledInt int +-} +- +-func fill() { +- a := StructPartialA{ +- PrefilledInt: 5, +- } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_partial1) +- b := StructPartialB{ +- /* this comment should be preserved */ +- PrefilledInt: 7, // This comment should be preserved. +- /* As should +- this one */ +- } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_partial2) +-} +- +--- @fillStruct_partial1/fillStruct_partial.go -- +-@@ -16 +16,3 @@ +-- PrefilledInt: 5, +-+ PrefilledInt: 5, +-+ UnfilledInt: 0, +-+ StructPartialB: StructPartialB{}, +--- @fillStruct_partial2/fillStruct_partial.go -- +-@@ -23 +23 @@ +-+ UnfilledInt: 0, +--- fillStruct_spaces.go -- +-package fillstruct +- +-type StructD struct { +- ExportedIntField int +-} +- +-func spaces() { +- d := StructD{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_spaces) +-} +- +--- @fillStruct_spaces/fillStruct_spaces.go -- +-@@ -8 +8,3 @@ +-- d := StructD{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_spaces) +-+ d := StructD{ +-+ ExportedIntField: 0, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_spaces) +--- fillStruct_unsafe.go -- +-package fillstruct +- +-import "unsafe" +- +-type unsafeStruct struct { +- x int +- p unsafe.Pointer +-} +- +-func fill() { +- _ := unsafeStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_unsafe) +-} +- +--- @fillStruct_unsafe/fillStruct_unsafe.go -- +-@@ -11 +11,4 @@ +-- _ := unsafeStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_unsafe) +-+ _ := unsafeStruct{ +-+ x: 0, +-+ p: nil, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_unsafe) +--- typeparams.go -- +-package fillstruct +- +-type emptyStructWithTypeParams[A any] struct{} +- +-var _ = emptyStructWithTypeParams[int]{} // no suggested fix +- +-type basicStructWithTypeParams[T any] struct { +- foo T +-} +- +-var _ = basicStructWithTypeParams[int]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams1) +- +-type twoArgStructWithTypeParams[F, B any] struct { +- foo F +- bar B +-} +- +-var _ = twoArgStructWithTypeParams[string, int]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams2) +- +-var _ = twoArgStructWithTypeParams[int, string]{ +- bar: "bar", +-} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams3) +- +-type nestedStructWithTypeParams struct { +- bar string +- basic basicStructWithTypeParams[int] +-} +- +-var _ = nestedStructWithTypeParams{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams4) +- +-func _[T any]() { +- type S struct{ t T } +- _ = S{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams5) +- +- type P struct{ t *T } +- _ = P{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams6) +- +- type Alias[u any] = struct { +- x u +- y *T +- } +- _ = Alias[string]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams7) +- +- type Named[u any] struct { +- x u +- y T +- } +- _ = Named[int]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams8) +-} +--- @typeparams1/typeparams.go -- +-@@ -11 +11,3 @@ +--var _ = basicStructWithTypeParams[int]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams1) +-+var _ = basicStructWithTypeParams[int]{ +-+ foo: 0, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams1) +--- @typeparams2/typeparams.go -- +-@@ -18 +18,4 @@ +--var _ = twoArgStructWithTypeParams[string, int]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams2) +-+var _ = twoArgStructWithTypeParams[string, int]{ +-+ foo: "", +-+ bar: 0, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams2) +--- @typeparams3/typeparams.go -- +-@@ -22 +22 @@ +-+ foo: 0, +--- @typeparams4/typeparams.go -- +-@@ -29 +29,4 @@ +--var _ = nestedStructWithTypeParams{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams4) +-+var _ = nestedStructWithTypeParams{ +-+ bar: "", +-+ basic: basicStructWithTypeParams[int]{}, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams4) +--- @typeparams5/typeparams.go -- +-@@ -33 +33,3 @@ +-- _ = S{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams5) +-+ _ = S{ +-+ t: *new(T), +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams5) +--- @typeparams6/typeparams.go -- +-@@ -36 +36,3 @@ +-- _ = P{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams6) +-+ _ = P{ +-+ t: new(T), +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams6) +--- @typeparams7/typeparams.go -- +-@@ -42 +42,4 @@ +-- _ = Alias[string]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams7) +-+ _ = Alias[string]{ +-+ x: "", +-+ y: new(T), +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams7) +--- @typeparams8/typeparams.go -- +-@@ -48 +48,4 @@ +-- _ = Named[int]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams8) +-+ _ = Named[int]{ +-+ x: 0, +-+ y: *new(T), +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams8) +--- issue63921.go -- +-package fillstruct +- +-// Test for golang/go#63921: fillstruct panicked with invalid fields. +-type invalidStruct struct { +- F int +- Undefined +-} +- +-func _() { +- // Note: the golden content for issue63921 is empty: fillstruct produces no +- // edits, but does not panic. +- invalidStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=issue63921) +-} +--- named/named.go -- +-package named +- +-type foo struct {} +-type aliasFoo = foo +- +-func _() { +- type namedInt int +- type namedString string +- type namedBool bool +- type namedPointer *foo +- type namedSlice []foo +- type namedInterface interface{ Error() string } +- type namedChan chan int +- type namedMap map[string]foo +- type namedSignature func(string) string +- type namedStruct struct{} +- type namedArray [3]foo +- type namedAlias aliasFoo +- +- type bar struct { +- namedInt namedInt +- namedString namedString +- namedBool namedBool +- namedPointer namedPointer +- namedSlice namedSlice +- namedInterface namedInterface +- namedChan namedChan +- namedMap namedMap +- namedSignature namedSignature +- namedStruct namedStruct +- namedArray namedArray +- namedAlias namedAlias +- } +- +- bar{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=named) +-} +--- @named/named/named.go -- +-@@ -35 +35,14 @@ +-- bar{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=named) +-+ bar{ +-+ namedInt: 0, +-+ namedString: "", +-+ namedBool: false, +-+ namedPointer: nil, +-+ namedSlice: namedSlice{}, +-+ namedInterface: nil, +-+ namedChan: nil, +-+ namedMap: namedMap{}, +-+ namedSignature: nil, +-+ namedStruct: namedStruct{}, +-+ namedArray: namedArray{}, +-+ namedAlias: namedAlias{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=named) +--- alias/alias.go -- +-package alias +- +-type foo struct {} +-type aliasFoo = foo +- +-func _() { +- type aliasInt = int +- type aliasString = string +- type aliasBool = bool +- type aliasPointer = *foo +- type aliasSlice = []foo +- type aliasInterface = interface{ Error() string } +- type aliasChan = chan int +- type aliasMap = map[string]foo +- type aliasSignature = func(string) string +- type aliasStruct = struct{ bar string } +- type aliasArray = [3]foo +- type aliasNamed = foo +- +- type bar struct { +- aliasInt aliasInt +- aliasString aliasString +- aliasBool aliasBool +- aliasPointer aliasPointer +- aliasSlice aliasSlice +- aliasInterface aliasInterface +- aliasChan aliasChan +- aliasMap aliasMap +- aliasSignature aliasSignature +- aliasStruct aliasStruct +- aliasArray aliasArray +- aliasNamed aliasNamed +- } +- +- bar{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=alias) +-} +--- @alias/alias/alias.go -- +-@@ -35 +35,14 @@ +-- bar{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=alias) +-+ bar{ +-+ aliasInt: 0, +-+ aliasString: "", +-+ aliasBool: false, +-+ aliasPointer: nil, +-+ aliasSlice: aliasSlice{}, +-+ aliasInterface: nil, +-+ aliasChan: nil, +-+ aliasMap: aliasMap{}, +-+ aliasSignature: nil, +-+ aliasStruct: aliasStruct{}, +-+ aliasArray: aliasArray{}, +-+ aliasNamed: aliasNamed{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=alias) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/fill_struct.txt b/gopls/internal/test/marker/testdata/codeaction/fill_struct.txt +--- a/gopls/internal/test/marker/testdata/codeaction/fill_struct.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/fill_struct.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,763 +0,0 @@ +-This test checks the behavior of the 'fill struct' code action. +-See fill_struct_resolve.txt for same test with resolve support. +- +--- capabilities.json -- +-{ +- "textDocument": { +- "codeAction": { +- "dataSupport": false, +- "resolveSupport": {} +- } +- } +-} +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/fillstruct +- +-go 1.18 +- +--- data/data.go -- +-package data +- +-type B struct { +- ExportedInt int +- unexportedInt int +-} +- +--- a.go -- +-package fillstruct +- +-import ( +- "golang.org/lsptests/fillstruct/data" +-) +- +-type basicStruct struct { +- foo int +-} +- +-var _ = basicStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a1) +- +-type twoArgStruct struct { +- foo int +- bar string +-} +- +-var _ = twoArgStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a2) +- +-type nestedStruct struct { +- bar string +- basic basicStruct +-} +- +-var _ = nestedStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a3) +- +-var _ = data.B{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a4) +--- @a1/a.go -- +-@@ -11 +11,3 @@ +--var _ = basicStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a1) +-+var _ = basicStruct{ +-+ foo: 0, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a1) +--- @a2/a.go -- +-@@ -18 +18,4 @@ +--var _ = twoArgStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a2) +-+var _ = twoArgStruct{ +-+ foo: 0, +-+ bar: "", +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a2) +--- @a3/a.go -- +-@@ -25 +25,4 @@ +--var _ = nestedStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a3) +-+var _ = nestedStruct{ +-+ bar: "", +-+ basic: basicStruct{}, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a3) +--- @a4/a.go -- +-@@ -27 +27,3 @@ +--var _ = data.B{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a4) +-+var _ = data.B{ +-+ ExportedInt: 0, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a4) +--- a2.go -- +-package fillstruct +- +-type typedStruct struct { +- m map[string]int +- s []int +- c chan int +- c1 <-chan int +- a [2]string +-} +- +-var _ = typedStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a21) +- +-type funStruct struct { +- fn func(i int) int +-} +- +-var _ = funStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a22) +- +-type funStructComplex struct { +- fn func(i int, s string) (string, int) +-} +- +-var _ = funStructComplex{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a23) +- +-type funStructEmpty struct { +- fn func() +-} +- +-var _ = funStructEmpty{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a24) +- +--- @a21/a2.go -- +-@@ -11 +11,7 @@ +--var _ = typedStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a21) +-+var _ = typedStruct{ +-+ m: map[string]int{}, +-+ s: []int{}, +-+ c: make(chan int), +-+ c1: make(<-chan int), +-+ a: [2]string{}, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a21) +--- @a22/a2.go -- +-@@ -17 +17,5 @@ +--var _ = funStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a22) +-+var _ = funStruct{ +-+ fn: func(i int) int { +-+ panic("TODO") +-+ }, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a22) +--- @a23/a2.go -- +-@@ -23 +23,5 @@ +--var _ = funStructComplex{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a23) +-+var _ = funStructComplex{ +-+ fn: func(i int, s string) (string, int) { +-+ panic("TODO") +-+ }, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a23) +--- @a24/a2.go -- +-@@ -29 +29,5 @@ +--var _ = funStructEmpty{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a24) +-+var _ = funStructEmpty{ +-+ fn: func() { +-+ panic("TODO") +-+ }, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a24) +--- a3.go -- +-package fillstruct +- +-import ( +- "go/ast" +- "go/token" +-) +- +-type Foo struct { +- A int +-} +- +-type Bar struct { +- X *Foo +- Y *Foo +-} +- +-var _ = Bar{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a31) +- +-type importedStruct struct { +- m map[*ast.CompositeLit]ast.Field +- s []ast.BadExpr +- a [3]token.Token +- c chan ast.EmptyStmt +- fn func(ast_decl ast.DeclStmt) ast.Ellipsis +- st ast.CompositeLit +-} +- +-var _ = importedStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a32) +- +-type pointerBuiltinStruct struct { +- b *bool +- s *string +- i *int +-} +- +-var _ = pointerBuiltinStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a33) +- +-var _ = []ast.BasicLit{ +- {}, //@codeaction("}", "refactor.rewrite.fillStruct", edit=a34) +-} +- +-var _ = []ast.BasicLit{{}} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a35) +--- @a31/a3.go -- +-@@ -17 +17,4 @@ +--var _ = Bar{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a31) +-+var _ = Bar{ +-+ X: &Foo{}, +-+ Y: &Foo{}, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a31) +--- @a32/a3.go -- +-@@ -28 +28,10 @@ +--var _ = importedStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a32) +-+var _ = importedStruct{ +-+ m: map[*ast.CompositeLit]ast.Field{}, +-+ s: []ast.BadExpr{}, +-+ a: [3]token.Token{}, +-+ c: make(chan ast.EmptyStmt), +-+ fn: func(ast_decl ast.DeclStmt) ast.Ellipsis { +-+ panic("TODO") +-+ }, +-+ st: ast.CompositeLit{}, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a32) +--- @a33/a3.go -- +-@@ -36 +36,5 @@ +--var _ = pointerBuiltinStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a33) +-+var _ = pointerBuiltinStruct{ +-+ b: new(bool), +-+ s: new(string), +-+ i: new(int), +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a33) +--- @a34/a3.go -- +-@@ -39 +39,5 @@ +-- {}, //@codeaction("}", "refactor.rewrite.fillStruct", edit=a34) +-+ { +-+ ValuePos: 0, +-+ Kind: 0, +-+ Value: "", +-+ }, //@codeaction("}", "refactor.rewrite.fillStruct", edit=a34) +--- @a35/a3.go -- +-@@ -42 +42,5 @@ +--var _ = []ast.BasicLit{{}} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a35) +-+var _ = []ast.BasicLit{{ +-+ ValuePos: 0, +-+ Kind: 0, +-+ Value: "", +-+}} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a35) +--- a4.go -- +-package fillstruct +- +-import "go/ast" +- +-type iStruct struct { +- X int +-} +- +-type sStruct struct { +- str string +-} +- +-type multiFill struct { +- num int +- strin string +- arr []int +-} +- +-type assignStruct struct { +- n ast.Node +-} +- +-func fill() { +- var x int +- var _ = iStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a41) +- +- var s string +- var _ = sStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a42) +- +- var n int +- _ = []int{} +- if true { +- arr := []int{1, 2} +- } +- var _ = multiFill{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a43) +- +- var node *ast.CompositeLit +- var _ = assignStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a45) +-} +- +--- @a41/a4.go -- +-@@ -25 +25,3 @@ +-- var _ = iStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a41) +-+ var _ = iStruct{ +-+ X: x, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=a41) +--- @a42/a4.go -- +-@@ -28 +28,3 @@ +-- var _ = sStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a42) +-+ var _ = sStruct{ +-+ str: s, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=a42) +--- @a43/a4.go -- +-@@ -35 +35,5 @@ +-- var _ = multiFill{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a43) +-+ var _ = multiFill{ +-+ num: n, +-+ strin: s, +-+ arr: []int{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=a43) +--- @a45/a4.go -- +-@@ -38 +38,3 @@ +-- var _ = assignStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=a45) +-+ var _ = assignStruct{ +-+ n: node, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=a45) +--- fillStruct.go -- +-package fillstruct +- +-type StructB struct{} +- +-type StructA struct { +- unexportedIntField int +- ExportedIntField int +- MapA map[int]string +- Array []int +- StructB +-} +- +-type StructA2 struct { +- B *StructB +-} +- +-type StructA3 struct { +- B StructB +-} +- +-func fill() { +- a := StructA{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct1) +- b := StructA2{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct2) +- c := StructA3{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct3) +- if true { +- _ = StructA3{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct4) +- } +-} +- +--- @fillStruct1/fillStruct.go -- +-@@ -22 +22,7 @@ +-- a := StructA{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct1) +-+ a := StructA{ +-+ unexportedIntField: 0, +-+ ExportedIntField: 0, +-+ MapA: map[int]string{}, +-+ Array: []int{}, +-+ StructB: StructB{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct1) +--- @fillStruct2/fillStruct.go -- +-@@ -23 +23,3 @@ +-- b := StructA2{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct2) +-+ b := StructA2{ +-+ B: &StructB{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct2) +--- @fillStruct3/fillStruct.go -- +-@@ -24 +24,3 @@ +-- c := StructA3{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct3) +-+ c := StructA3{ +-+ B: StructB{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct3) +--- @fillStruct4/fillStruct.go -- +-@@ -26 +26,3 @@ +-- _ = StructA3{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct4) +-+ _ = StructA3{ +-+ B: StructB{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct4) +--- fillStruct_anon.go -- +-package fillstruct +- +-type StructAnon struct { +- a struct{} +- b map[string]any +- c map[string]struct { +- d int +- e bool +- } +-} +- +-func fill() { +- _ := StructAnon{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_anon) +-} +--- @fillStruct_anon/fillStruct_anon.go -- +-@@ -13 +13,8 @@ +-- _ := StructAnon{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_anon) +-+ _ := StructAnon{ +-+ a: struct{}{}, +-+ b: map[string]any{}, +-+ c: map[string]struct { +-+ d int +-+ e bool +-+ }{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_anon) +--- fillStruct_nested.go -- +-package fillstruct +- +-type StructB struct { +- StructC +-} +- +-type StructC struct { +- unexportedInt int +-} +- +-func nested() { +- c := StructB{ +- StructC: StructC{}, //@codeaction("}", "refactor.rewrite.fillStruct", edit=fill_nested) +- } +-} +- +--- @fill_nested/fillStruct_nested.go -- +-@@ -13 +13,3 @@ +-- StructC: StructC{}, //@codeaction("}", "refactor.rewrite.fillStruct", edit=fill_nested) +-+ StructC: StructC{ +-+ unexportedInt: 0, +-+ }, //@codeaction("}", "refactor.rewrite.fillStruct", edit=fill_nested) +--- fillStruct_package.go -- +-package fillstruct +- +-import ( +- h2 "net/http" +- +- "golang.org/lsptests/fillstruct/data" +-) +- +-func unexported() { +- a := data.B{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_package1) +- _ = h2.Client{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_package2) +-} +--- @fillStruct_package1/fillStruct_package.go -- +-@@ -10 +10,3 @@ +-- a := data.B{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_package1) +-+ a := data.B{ +-+ ExportedInt: 0, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_package1) +--- @fillStruct_package2/fillStruct_package.go -- +-@@ -11 +11,8 @@ +-- _ = h2.Client{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_package2) +-+ _ = h2.Client{ +-+ Transport: nil, +-+ CheckRedirect: func(req *h2.Request, via []*h2.Request) error { +-+ panic("TODO") +-+ }, +-+ Jar: nil, +-+ Timeout: 0, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_package2) +--- fillStruct_partial.go -- +-package fillstruct +- +-type StructPartialA struct { +- PrefilledInt int +- UnfilledInt int +- StructPartialB +-} +- +-type StructPartialB struct { +- PrefilledInt int +- UnfilledInt int +-} +- +-func fill() { +- a := StructPartialA{ +- PrefilledInt: 5, +- } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_partial1) +- b := StructPartialB{ +- /* this comment should disappear */ +- PrefilledInt: 7, // This comment should be blown away. +- /* As should +- this one */ +- } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_partial2) +-} +- +--- @fillStruct_partial1/fillStruct_partial.go -- +-@@ -16 +16,3 @@ +-- PrefilledInt: 5, +-+ PrefilledInt: 5, +-+ UnfilledInt: 0, +-+ StructPartialB: StructPartialB{}, +--- @fillStruct_partial2/fillStruct_partial.go -- +-@@ -23 +23 @@ +-+ UnfilledInt: 0, +--- fillStruct_spaces.go -- +-package fillstruct +- +-type StructD struct { +- ExportedIntField int +-} +- +-func spaces() { +- d := StructD{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_spaces) +-} +- +--- @fillStruct_spaces/fillStruct_spaces.go -- +-@@ -8 +8,3 @@ +-- d := StructD{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_spaces) +-+ d := StructD{ +-+ ExportedIntField: 0, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_spaces) +--- fillStruct_unsafe.go -- +-package fillstruct +- +-import "unsafe" +- +-type unsafeStruct struct { +- x int +- p unsafe.Pointer +-} +- +-func fill() { +- _ := unsafeStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_unsafe) +-} +- +--- @fillStruct_unsafe/fillStruct_unsafe.go -- +-@@ -11 +11,4 @@ +-- _ := unsafeStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_unsafe) +-+ _ := unsafeStruct{ +-+ x: 0, +-+ p: nil, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_unsafe) +--- typeparams.go -- +-package fillstruct +- +-type emptyStructWithTypeParams[A any] struct{} +- +-var _ = emptyStructWithTypeParams[int]{} // no suggested fix +- +-type basicStructWithTypeParams[T any] struct { +- foo T +-} +- +-var _ = basicStructWithTypeParams[int]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams1) +- +-type twoArgStructWithTypeParams[F, B any] struct { +- foo F +- bar B +-} +- +-var _ = twoArgStructWithTypeParams[string, int]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams2) +- +-var _ = twoArgStructWithTypeParams[int, string]{ +- bar: "bar", +-} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams3) +- +-type nestedStructWithTypeParams struct { +- bar string +- basic basicStructWithTypeParams[int] +-} +- +-var _ = nestedStructWithTypeParams{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams4) +- +-func _[T any]() { +- type S struct{ t T } +- _ = S{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams5) +- +- type P struct{ t *T } +- _ = P{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams6) +- +- type Alias[u any] = struct { +- x u +- y *T +- } +- _ = Alias[string]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams7) +- +- type Named[u any] struct { +- x u +- y T +- } +- _ = Named[int]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams8) +-} +--- @typeparams1/typeparams.go -- +-@@ -11 +11,3 @@ +--var _ = basicStructWithTypeParams[int]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams1) +-+var _ = basicStructWithTypeParams[int]{ +-+ foo: 0, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams1) +--- @typeparams2/typeparams.go -- +-@@ -18 +18,4 @@ +--var _ = twoArgStructWithTypeParams[string, int]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams2) +-+var _ = twoArgStructWithTypeParams[string, int]{ +-+ foo: "", +-+ bar: 0, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams2) +--- @typeparams3/typeparams.go -- +-@@ -22 +22 @@ +-+ foo: 0, +--- @typeparams4/typeparams.go -- +-@@ -29 +29,4 @@ +--var _ = nestedStructWithTypeParams{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams4) +-+var _ = nestedStructWithTypeParams{ +-+ bar: "", +-+ basic: basicStructWithTypeParams[int]{}, +-+} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams4) +--- @typeparams5/typeparams.go -- +-@@ -33 +33,3 @@ +-- _ = S{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams5) +-+ _ = S{ +-+ t: *new(T), +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams5) +--- @typeparams6/typeparams.go -- +-@@ -36 +36,3 @@ +-- _ = P{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams6) +-+ _ = P{ +-+ t: new(T), +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams6) +--- @typeparams7/typeparams.go -- +-@@ -42 +42,4 @@ +-- _ = Alias[string]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams7) +-+ _ = Alias[string]{ +-+ x: "", +-+ y: new(T), +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams7) +--- @typeparams8/typeparams.go -- +-@@ -48 +48,4 @@ +-- _ = Named[int]{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams8) +-+ _ = Named[int]{ +-+ x: 0, +-+ y: *new(T), +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams8) +--- issue63921.go -- +-package fillstruct +- +-// Test for golang/go#63921: fillstruct panicked with invalid fields. +-type invalidStruct struct { +- F int +- Undefined +-} +- +-func _() { +- // Note: the golden content for issue63921 is empty: fillstruct produces no +- // edits, but does not panic. +- invalidStruct{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=issue63921) +-} +--- named/named.go -- +-package named +- +-type foo struct {} +-type aliasFoo = foo +- +-func _() { +- type namedInt int +- type namedString string +- type namedBool bool +- type namedPointer *foo +- type namedSlice []foo +- type namedInterface interface{ Error() string } +- type namedChan chan int +- type namedMap map[string]foo +- type namedSignature func(string) string +- type namedStruct struct{} +- type namedArray [3]foo +- type namedAlias aliasFoo +- +- type bar struct { +- namedInt namedInt +- namedString namedString +- namedBool namedBool +- namedPointer namedPointer +- namedSlice namedSlice +- namedInterface namedInterface +- namedChan namedChan +- namedMap namedMap +- namedSignature namedSignature +- namedStruct namedStruct +- namedArray namedArray +- namedAlias namedAlias +- } +- +- bar{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=named) +-} +--- @named/named/named.go -- +-@@ -35 +35,14 @@ +-- bar{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=named) +-+ bar{ +-+ namedInt: 0, +-+ namedString: "", +-+ namedBool: false, +-+ namedPointer: nil, +-+ namedSlice: namedSlice{}, +-+ namedInterface: nil, +-+ namedChan: nil, +-+ namedMap: namedMap{}, +-+ namedSignature: nil, +-+ namedStruct: namedStruct{}, +-+ namedArray: namedArray{}, +-+ namedAlias: namedAlias{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=named) +--- alias/alias.go -- +-package alias +- +-type foo struct {} +-type aliasFoo = foo +- +-func _() { +- type aliasInt = int +- type aliasString = string +- type aliasBool = bool +- type aliasPointer = *foo +- type aliasSlice = []foo +- type aliasInterface = interface{ Error() string } +- type aliasChan = chan int +- type aliasMap = map[string]foo +- type aliasSignature = func(string) string +- type aliasStruct = struct{ bar string } +- type aliasArray = [3]foo +- type aliasNamed = foo +- +- type bar struct { +- aliasInt aliasInt +- aliasString aliasString +- aliasBool aliasBool +- aliasPointer aliasPointer +- aliasSlice aliasSlice +- aliasInterface aliasInterface +- aliasChan aliasChan +- aliasMap aliasMap +- aliasSignature aliasSignature +- aliasStruct aliasStruct +- aliasArray aliasArray +- aliasNamed aliasNamed +- } +- +- bar{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=alias) +-} +--- @alias/alias/alias.go -- +-@@ -35 +35,14 @@ +-- bar{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=alias) +-+ bar{ +-+ aliasInt: 0, +-+ aliasString: "", +-+ aliasBool: false, +-+ aliasPointer: nil, +-+ aliasSlice: aliasSlice{}, +-+ aliasInterface: nil, +-+ aliasChan: nil, +-+ aliasMap: aliasMap{}, +-+ aliasSignature: nil, +-+ aliasStruct: aliasStruct{}, +-+ aliasArray: aliasArray{}, +-+ aliasNamed: aliasNamed{}, +-+ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=alias) +--- preserveformat/preserveformat.go -- +-package preserveformat +- +-type ( +- Node struct { +- Value int +- } +- Graph struct { +- Nodes []*Node `json:""` +- Edges map[*Node]*Node +- Other string +- } +-) +- +-func _() { +- _ := &Graph{ +- // comments at the start preserved +- Nodes: []*Node{ +- {Value: 0}, // comments in the middle preserved +- // between lines +- {Value: 0}, +- }, // another comment +- // comment group +- // below +- } //@codeaction("}", "refactor.rewrite.fillStruct", edit=preserveformat) +-} +--- @preserveformat/preserveformat/preserveformat.go -- +-@@ -24 +24,2 @@ +-+ Edges: map[*Node]*Node{}, +-+ Other: "", +diff -urN a/gopls/internal/test/marker/testdata/codeaction/fill_switch_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/fill_switch_resolve.txt +--- a/gopls/internal/test/marker/testdata/codeaction/fill_switch_resolve.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/fill_switch_resolve.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,105 +0,0 @@ +-This test checks the behavior of the 'fill switch' code action, with resolve support. +-See fill_switch.txt for same test without resolve support. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/fillswitch +- +-go 1.18 +- +--- data/data.go -- +-package data +- +-type TypeB int +- +-const ( +- TypeBOne TypeB = iota +- TypeBTwo +- TypeBThree +-) +- +--- a.go -- +-package fillswitch +- +-import ( +- "golang.org/lsptests/fillswitch/data" +-) +- +-type typeA int +- +-const ( +- typeAOne typeA = iota +- typeATwo +- typeAThree +-) +- +-type notification interface { +- isNotification() +-} +- +-type notificationOne struct{} +- +-func (notificationOne) isNotification() {} +- +-type notificationTwo struct{} +- +-func (notificationTwo) isNotification() {} +- +-func doSwitch() { +- var b data.TypeB +- switch b { +- case data.TypeBOne: //@codeaction(":", "refactor.rewrite.fillSwitch", edit=a1) +- } +- +- var a typeA +- switch a { +- case typeAThree: //@codeaction(":", "refactor.rewrite.fillSwitch", edit=a2) +- } +- +- var n notification +- switch n.(type) { //@codeaction("{", "refactor.rewrite.fillSwitch", edit=a3) +- } +- +- switch nt := n.(type) { //@codeaction("{", "refactor.rewrite.fillSwitch", edit=a4) +- } +- +- var s struct { +- a typeA +- } +- +- switch s.a { +- case typeAThree: //@codeaction(":", "refactor.rewrite.fillSwitch", edit=a5) +- } +-} +--- @a1/a.go -- +-@@ -31 +31,4 @@ +-+ case data.TypeBThree: +-+ case data.TypeBTwo: +-+ default: +-+ panic(fmt.Sprintf("unexpected data.TypeB: %#v", b)) +--- @a2/a.go -- +-@@ -36 +36,4 @@ +-+ case typeAOne: +-+ case typeATwo: +-+ default: +-+ panic(fmt.Sprintf("unexpected fillswitch.typeA: %#v", a)) +--- @a3/a.go -- +-@@ -40 +40,4 @@ +-+ case notificationOne: +-+ case notificationTwo: +-+ default: +-+ panic(fmt.Sprintf("unexpected fillswitch.notification: %#v", n)) +--- @a4/a.go -- +-@@ -43 +43,4 @@ +-+ case notificationOne: +-+ case notificationTwo: +-+ default: +-+ panic(fmt.Sprintf("unexpected fillswitch.notification: %#v", nt)) +--- @a5/a.go -- +-@@ -51 +51,4 @@ +-+ case typeAOne: +-+ case typeATwo: +-+ default: +-+ panic(fmt.Sprintf("unexpected fillswitch.typeA: %#v", s.a)) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/fill_switch.txt b/gopls/internal/test/marker/testdata/codeaction/fill_switch.txt +--- a/gopls/internal/test/marker/testdata/codeaction/fill_switch.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/fill_switch.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,114 +0,0 @@ +-This test checks the behavior of the 'fill switch' code action. +-See fill_switch_resolve.txt for same test with resolve support. +- +--- capabilities.json -- +-{ +- "textDocument": { +- "codeAction": { +- "dataSupport": false, +- "resolveSupport": {} +- } +- } +-} +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/fillswitch +- +-go 1.18 +- +--- data/data.go -- +-package data +- +-type TypeB int +- +-const ( +- TypeBOne TypeB = iota +- TypeBTwo +- TypeBThree +-) +- +--- a.go -- +-package fillswitch +- +-import ( +- "golang.org/lsptests/fillswitch/data" +-) +- +-type typeA int +- +-const ( +- typeAOne typeA = iota +- typeATwo +- typeAThree +-) +- +-type notification interface { +- isNotification() +-} +- +-type notificationOne struct{} +- +-func (notificationOne) isNotification() {} +- +-type notificationTwo struct{} +- +-func (notificationTwo) isNotification() {} +- +-func doSwitch() { +- var b data.TypeB +- switch b { +- case data.TypeBOne: //@codeaction(":", "refactor.rewrite.fillSwitch", edit=a1) +- } +- +- var a typeA +- switch a { +- case typeAThree: //@codeaction(":", "refactor.rewrite.fillSwitch", edit=a2) +- } +- +- var n notification +- switch n.(type) { //@codeaction("{", "refactor.rewrite.fillSwitch", edit=a3) +- } +- +- switch nt := n.(type) { //@codeaction("{", "refactor.rewrite.fillSwitch", edit=a4) +- } +- +- var s struct { +- a typeA +- } +- +- switch s.a { +- case typeAThree: //@codeaction(":", "refactor.rewrite.fillSwitch", edit=a5) +- } +-} +--- @a1/a.go -- +-@@ -31 +31,4 @@ +-+ case data.TypeBThree: +-+ case data.TypeBTwo: +-+ default: +-+ panic(fmt.Sprintf("unexpected data.TypeB: %#v", b)) +--- @a2/a.go -- +-@@ -36 +36,4 @@ +-+ case typeAOne: +-+ case typeATwo: +-+ default: +-+ panic(fmt.Sprintf("unexpected fillswitch.typeA: %#v", a)) +--- @a3/a.go -- +-@@ -40 +40,4 @@ +-+ case notificationOne: +-+ case notificationTwo: +-+ default: +-+ panic(fmt.Sprintf("unexpected fillswitch.notification: %#v", n)) +--- @a4/a.go -- +-@@ -43 +43,4 @@ +-+ case notificationOne: +-+ case notificationTwo: +-+ default: +-+ panic(fmt.Sprintf("unexpected fillswitch.notification: %#v", nt)) +--- @a5/a.go -- +-@@ -51 +51,4 @@ +-+ case typeAOne: +-+ case typeATwo: +-+ default: +-+ panic(fmt.Sprintf("unexpected fillswitch.typeA: %#v", s.a)) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue44813.txt b/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue44813.txt +--- a/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue44813.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue44813.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +-This test verifies the fix for golang/go#44813: extraction failure when there +-are blank identifiers. +- +--- go.mod -- +-module mod.test/extract +- +-go 1.18 +- +--- p.go -- +-package extract +- +-import "fmt" +- +-func main() { +- x := []rune{} //@codeaction("x", "refactor.extract.function", end=end, result=ext) +- s := "HELLO" +- for _, c := range s { +- x = append(x, c) +- } //@loc(end, "}") +- fmt.Printf("%x\n", x) +-} +- +--- @ext/p.go -- +-package extract +- +-import "fmt" +- +-func main() { +- x := newFunction() //@loc(end, "}") +- fmt.Printf("%x\n", x) +-} +- +-func newFunction() []rune { +- x := []rune{} //@codeaction("x", "refactor.extract.function", end=end, result=ext) +- s := "HELLO" +- for _, c := range s { +- x = append(x, c) +- } +- return x +-} +- +diff -urN a/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue50851.txt b/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue50851.txt +--- a/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue50851.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue50851.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,35 +0,0 @@ +-This test checks that function extraction moves comments along with the +-extracted code. +- +--- main.go -- +-package main +- +-type F struct{} +- +-func (f *F) _() { +- println("a") +- +- println("b") //@ codeaction("print", "refactor.extract.function", end=end, result=result) +- // This line prints the third letter of the alphabet. +- println("c") //@loc(end, ")") +- +- println("d") +-} +--- @result/main.go -- +-package main +- +-type F struct{} +- +-func (f *F) _() { +- println("a") +- +- newFunction() //@loc(end, ")") +- +- println("d") +-} +- +-func newFunction() { +- println("b") //@ codeaction("print", "refactor.extract.function", end=end, result=result) +- // This line prints the third letter of the alphabet. +- println("c") +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue66289.txt b/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue66289.txt +--- a/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue66289.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue66289.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,97 +0,0 @@ +- +--- a.go -- +-package a +- +-import ( +- "fmt" +- "encoding/json" +-) +- +-func F() error { +- a, err := json.Marshal(0) //@codeaction("a", "refactor.extract.function", end=endF, result=F) +- if err != nil { +- return fmt.Errorf("1: %w", err) +- } +- b, err := json.Marshal(0) +- if err != nil { +- return fmt.Errorf("2: %w", err) +- } //@loc(endF, "}") +- fmt.Printf("%s %s", a, b) +- return nil +-} +- +--- @F/a.go -- +-package a +- +-import ( +- "fmt" +- "encoding/json" +-) +- +-func F() error { +- a, b, err := newFunction() +- if err != nil { +- return err +- } //@loc(endF, "}") +- fmt.Printf("%s %s", a, b) +- return nil +-} +- +-func newFunction() ([]byte, []byte, error) { +- a, err := json.Marshal(0) //@codeaction("a", "refactor.extract.function", end=endF, result=F) +- if err != nil { +- return nil, nil, fmt.Errorf("1: %w", err) +- } +- b, err := json.Marshal(0) +- if err != nil { +- return nil, nil, fmt.Errorf("2: %w", err) +- } +- return a, b, nil +-} +- +--- b.go -- +-package a +- +-import ( +- "fmt" +- "math/rand" +-) +- +-func G() (x, y int) { +- v := rand.Int() //@codeaction("v", "refactor.extract.function", end=endG, result=G) +- if v < 0 { +- return 1, 2 +- } +- if v > 0 { +- return 3, 4 +- } //@loc(endG, "}") +- fmt.Println(v) +- return 5, 6 +-} +--- @G/b.go -- +-package a +- +-import ( +- "fmt" +- "math/rand" +-) +- +-func G() (x, y int) { +- v, x1, y1, shouldReturn := newFunction() +- if shouldReturn { +- return x1, y1 +- } //@loc(endG, "}") +- fmt.Println(v) +- return 5, 6 +-} +- +-func newFunction() (int, int, int, bool) { +- v := rand.Int() //@codeaction("v", "refactor.extract.function", end=endG, result=G) +- if v < 0 { +- return 0, 1, 2, true +- } +- if v > 0 { +- return 0, 3, 4, true +- } +- return v, 0, 0, false +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue73972.txt b/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue73972.txt +--- a/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue73972.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue73972.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,52 +0,0 @@ +-This test verifies the fix for golang/go#73972: extraction should +-not modify the return statements of anonymous functions. +- +--- go.mod -- +-module mod.test/extract +- +-go 1.18 +- +--- a.go -- +-package extract +- +-import ( +- "fmt" +- "strings" +-) +- +-func main() { +- b := strings.ContainsFunc("a", func(_ rune) bool { //@codeaction("b", "refactor.extract.function", end=end, result=ext) +- return false +- }) +- if b { +- return +- } //@loc(end, "}") +- fmt.Println(b) +-} +- +--- @ext/a.go -- +-package extract +- +-import ( +- "fmt" +- "strings" +-) +- +-func main() { +- b, shouldReturn := newFunction() +- if shouldReturn { +- return +- } //@loc(end, "}") +- fmt.Println(b) +-} +- +-func newFunction() (bool, bool) { +- b := strings.ContainsFunc("a", func(_ rune) bool { //@codeaction("b", "refactor.extract.function", end=end, result=ext) +- return false +- }) +- if b { +- return false, true +- } +- return b, false +-} +- +diff -urN a/gopls/internal/test/marker/testdata/codeaction/functionextraction.txt b/gopls/internal/test/marker/testdata/codeaction/functionextraction.txt +--- a/gopls/internal/test/marker/testdata/codeaction/functionextraction.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/functionextraction.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,601 +0,0 @@ +-This test verifies various behaviors of function extraction. +- +--- go.mod -- +-module mod.test/extract +- +-go 1.18 +- +--- basic.go -- +-package extract +- +-func _() { //@codeaction("{", "refactor.extract.function", end=closeBracket, result=outer) +- a := 1 //@codeaction("a", "refactor.extract.function", end=end, result=inner) +- _ = a + 4 //@loc(end, "4") +-} //@loc(closeBracket, "}") +- +--- @outer/basic.go -- +-package extract +- +-func _() { //@codeaction("{", "refactor.extract.function", end=closeBracket, result=outer) +- newFunction() //@loc(end, "4") +-} +- +-func newFunction() { +- a := 1 //@codeaction("a", "refactor.extract.function", end=end, result=inner) +- _ = a + 4 +-} //@loc(closeBracket, "}") +- +--- @inner/basic.go -- +-package extract +- +-func _() { //@codeaction("{", "refactor.extract.function", end=closeBracket, result=outer) +- newFunction() //@loc(end, "4") +-} +- +-func newFunction() { +- a := 1 //@codeaction("a", "refactor.extract.function", end=end, result=inner) +- _ = a + 4 +-} //@loc(closeBracket, "}") +- +--- return.go -- +-package extract +- +-func _() bool { +- x := 1 +- if x == 0 { //@codeaction("if", "refactor.extract.function", end=ifend, result=return) +- return true +- } //@loc(ifend, "}") +- return false +-} +- +--- @return/return.go -- +-package extract +- +-func _() bool { +- x := 1 +- b, shouldReturn := newFunction(x) +- if shouldReturn { +- return b +- } //@loc(ifend, "}") +- return false +-} +- +-func newFunction(x int) (bool, bool) { +- if x == 0 { //@codeaction("if", "refactor.extract.function", end=ifend, result=return) +- return true, true +- } +- return false, false +-} +- +--- return_nonnested.go -- +-package extract +- +-func _() bool { +- x := 1 //@codeaction("x", "refactor.extract.function", end=rnnEnd, result=rnn) +- if x == 0 { +- return true +- } +- return false //@loc(rnnEnd, "false") +-} +- +--- @rnn/return_nonnested.go -- +-package extract +- +-func _() bool { +- return newFunction() //@loc(rnnEnd, "false") +-} +- +-func newFunction() bool { +- x := 1 //@codeaction("x", "refactor.extract.function", end=rnnEnd, result=rnn) +- if x == 0 { +- return true +- } +- return false +-} +- +--- return_complex.go -- +-package extract +- +-import "fmt" +- +-func _() (int, string, error) { +- x := 1 +- y := "hello" +- z := "bye" //@codeaction("z", "refactor.extract.function", end=rcEnd, result=rc) +- if y == z { +- return x, y, fmt.Errorf("same") +- } else if false { +- z = "hi" +- return x, z, nil +- } //@loc(rcEnd, "}") +- return x, z, nil +-} +- +--- @rc/return_complex.go -- +-package extract +- +-import "fmt" +- +-func _() (int, string, error) { +- x := 1 +- y := "hello" +- z, i, s, err, shouldReturn := newFunction(y, x) +- if shouldReturn { +- return i, s, err +- } //@loc(rcEnd, "}") +- return x, z, nil +-} +- +-func newFunction(y string, x int) (string, int, string, error, bool) { +- z := "bye" //@codeaction("z", "refactor.extract.function", end=rcEnd, result=rc) +- if y == z { +- return "", x, y, fmt.Errorf("same"), true +- } else if false { +- z = "hi" +- return "", x, z, nil, true +- } +- return z, 0, "", nil, false +-} +- +--- return_complex_nonnested.go -- +-package extract +- +-import "fmt" +- +-func _() (int, string, error) { +- x := 1 +- y := "hello" +- z := "bye" //@codeaction("z", "refactor.extract.function", end=rcnnEnd, result=rcnn) +- if y == z { +- return x, y, fmt.Errorf("same") +- } else if false { +- z = "hi" +- return x, z, nil +- } +- return x, z, nil //@loc(rcnnEnd, "nil") +-} +- +--- @rcnn/return_complex_nonnested.go -- +-package extract +- +-import "fmt" +- +-func _() (int, string, error) { +- x := 1 +- y := "hello" +- return newFunction(y, x) //@loc(rcnnEnd, "nil") +-} +- +-func newFunction(y string, x int) (int, string, error) { +- z := "bye" //@codeaction("z", "refactor.extract.function", end=rcnnEnd, result=rcnn) +- if y == z { +- return x, y, fmt.Errorf("same") +- } else if false { +- z = "hi" +- return x, z, nil +- } +- return x, z, nil +-} +- +--- return_func_lit.go -- +-package extract +- +-import "go/ast" +- +-func _() { +- ast.Inspect(ast.NewIdent("a"), func(n ast.Node) bool { +- if n == nil { //@codeaction("if", "refactor.extract.function", end=rflEnd, result=rfl) +- return true +- } //@loc(rflEnd, "}") +- return false +- }) +-} +- +--- @rfl/return_func_lit.go -- +-package extract +- +-import "go/ast" +- +-func _() { +- ast.Inspect(ast.NewIdent("a"), func(n ast.Node) bool { +- b, shouldReturn := newFunction(n) +- if shouldReturn { +- return b +- } //@loc(rflEnd, "}") +- return false +- }) +-} +- +-func newFunction(n ast.Node) (bool, bool) { +- if n == nil { //@codeaction("if", "refactor.extract.function", end=rflEnd, result=rfl) +- return true, true +- } +- return false, false +-} +- +--- return_func_lit_nonnested.go -- +-package extract +- +-import "go/ast" +- +-func _() { +- ast.Inspect(ast.NewIdent("a"), func(n ast.Node) bool { +- if n == nil { //@codeaction("if", "refactor.extract.function", end=rflnnEnd, result=rflnn) +- return true +- } +- return false //@loc(rflnnEnd, "false") +- }) +-} +- +--- @rflnn/return_func_lit_nonnested.go -- +-package extract +- +-import "go/ast" +- +-func _() { +- ast.Inspect(ast.NewIdent("a"), func(n ast.Node) bool { +- return newFunction(n) //@loc(rflnnEnd, "false") +- }) +-} +- +-func newFunction(n ast.Node) bool { +- if n == nil { //@codeaction("if", "refactor.extract.function", end=rflnnEnd, result=rflnn) +- return true +- } +- return false +-} +- +--- return_init.go -- +-package extract +- +-func _() string { +- x := 1 +- if x == 0 { //@codeaction("if", "refactor.extract.function", end=riEnd, result=ri) +- x = 3 +- return "a" +- } //@loc(riEnd, "}") +- x = 2 +- return "b" +-} +- +--- @ri/return_init.go -- +-package extract +- +-func _() string { +- x := 1 +- s, shouldReturn := newFunction(x) +- if shouldReturn { +- return s +- } //@loc(riEnd, "}") +- x = 2 +- return "b" +-} +- +-func newFunction(x int) (string, bool) { +- if x == 0 { //@codeaction("if", "refactor.extract.function", end=riEnd, result=ri) +- x = 3 +- return "a", true +- } +- return "", false +-} +- +--- return_init_nonnested.go -- +-package extract +- +-func _() string { +- x := 1 +- if x == 0 { //@codeaction("if", "refactor.extract.function", end=rinnEnd, result=rinn) +- x = 3 +- return "a" +- } +- x = 2 +- return "b" //@loc(rinnEnd, "\"b\"") +-} +- +--- @rinn/return_init_nonnested.go -- +-package extract +- +-func _() string { +- x := 1 +- return newFunction(x) //@loc(rinnEnd, "\"b\"") +-} +- +-func newFunction(x int) string { +- if x == 0 { //@codeaction("if", "refactor.extract.function", end=rinnEnd, result=rinn) +- x = 3 +- return "a" +- } +- x = 2 +- return "b" +-} +- +--- args_returns.go -- +-package extract +- +-func _() { +- a := 1 +- a = 5 //@codeaction("a", "refactor.extract.function", end=araend, result=ara) +- a = a + 2 //@loc(araend, "2") +- +- b := a * 2 //@codeaction("b", "refactor.extract.function", end=arbend, result=arb) +- _ = b + 4 //@loc(arbend, "4") +-} +- +--- @ara/args_returns.go -- +-package extract +- +-func _() { +- a := 1 +- a = newFunction(a) //@loc(araend, "2") +- +- b := a * 2 //@codeaction("b", "refactor.extract.function", end=arbend, result=arb) +- _ = b + 4 //@loc(arbend, "4") +-} +- +-func newFunction(a int) int { +- a = 5 //@codeaction("a", "refactor.extract.function", end=araend, result=ara) +- a = a + 2 +- return a +-} +- +--- @arb/args_returns.go -- +-package extract +- +-func _() { +- a := 1 +- a = 5 //@codeaction("a", "refactor.extract.function", end=araend, result=ara) +- a = a + 2 //@loc(araend, "2") +- +- newFunction(a) //@loc(arbend, "4") +-} +- +-func newFunction(a int) { +- b := a * 2 //@codeaction("b", "refactor.extract.function", end=arbend, result=arb) +- _ = b + 4 +-} +- +--- scope.go -- +-package extract +- +-func _() { +- newFunction := 1 +- a := newFunction //@codeaction("a", "refactor.extract.function", end="newFunction", result=scope) +- _ = a // avoid diagnostic +-} +- +-func newFunction1() int { +- return 1 +-} +- +-var _ = newFunction1 +- +--- @scope/scope.go -- +-package extract +- +-func _() { +- newFunction := 1 +- a := newFunction2(newFunction) //@codeaction("a", "refactor.extract.function", end="newFunction", result=scope) +- _ = a // avoid diagnostic +-} +- +-func newFunction2(newFunction int) int { +- a := newFunction +- return a +-} +- +-func newFunction1() int { +- return 1 +-} +- +-var _ = newFunction1 +- +--- smart_initialization.go -- +-package extract +- +-func _() { +- var a []int +- a = append(a, 2) //@codeaction("a", "refactor.extract.function", end=siEnd, result=si) +- b := 4 //@loc(siEnd, "4") +- a = append(a, b) +-} +- +--- @si/smart_initialization.go -- +-package extract +- +-func _() { +- var a []int +- a, b := newFunction(a) //@loc(siEnd, "4") +- a = append(a, b) +-} +- +-func newFunction(a []int) ([]int, int) { +- a = append(a, 2) //@codeaction("a", "refactor.extract.function", end=siEnd, result=si) +- b := 4 +- return a, b +-} +- +--- smart_return.go -- +-package extract +- +-func _() { +- var b []int +- var a int +- a = 2 //@codeaction("a", "refactor.extract.function", end=srEnd, result=sr) +- b = []int{} +- b = append(b, a) //@loc(srEnd, ")") +- b[0] = 1 +-} +- +--- @sr/smart_return.go -- +-package extract +- +-func _() { +- var b []int +- var a int +- b = newFunction(a, b) //@loc(srEnd, ")") +- b[0] = 1 +-} +- +-func newFunction(a int, b []int) []int { +- a = 2 //@codeaction("a", "refactor.extract.function", end=srEnd, result=sr) +- b = []int{} +- b = append(b, a) +- return b +-} +- +--- unnecessary_param.go -- +-package extract +- +-func _() { +- var b []int +- a := 2 //@codeaction("a", "refactor.extract.function", end=upEnd, result=up) +- b = []int{} +- b = append(b, a) //@loc(upEnd, ")") +- b[0] = 1 +- if a == 2 { +- return +- } +-} +- +--- @up/unnecessary_param.go -- +-package extract +- +-func _() { +- var b []int +- a, b := newFunction(b) //@loc(upEnd, ")") +- b[0] = 1 +- if a == 2 { +- return +- } +-} +- +-func newFunction(b []int) (int, []int) { +- a := 2 //@codeaction("a", "refactor.extract.function", end=upEnd, result=up) +- b = []int{} +- b = append(b, a) +- return a, b +-} +- +--- comment.go -- +-package extract +- +-func _() { +- a := /* comment in the middle of a line */ 1 //@codeaction("a", "refactor.extract.function", end=commentEnd, result=comment1) +- // Comment on its own line //@codeaction("Comment", "refactor.extract.function", end=commentEnd, result=comment2) +- _ = a + 4 //@loc(commentEnd, "4"),codeaction("_", "refactor.extract.function", end=lastComment, result=comment3) +- // Comment right after 3 + 4 +- +- // Comment after with space //@loc(lastComment, "Comment") +-} +- +--- @comment1/comment.go -- +-package extract +- +-func _() { +- newFunction() //@loc(commentEnd, "4"),codeaction("_", "refactor.extract.function", end=lastComment, result=comment3) +- // Comment right after 3 + 4 +- +- // Comment after with space //@loc(lastComment, "Comment") +-} +- +-func newFunction() { +- a := /* comment in the middle of a line */ 1 //@codeaction("a", "refactor.extract.function", end=commentEnd, result=comment1) +- // Comment on its own line //@codeaction("Comment", "refactor.extract.function", end=commentEnd, result=comment2) +- _ = a + 4 +-} +- +--- @comment2/comment.go -- +-package extract +- +-func _() { +- a := /* comment in the middle of a line */ 1 //@codeaction("a", "refactor.extract.function", end=commentEnd, result=comment1) +- // Comment on its own line //@codeaction("Comment", "refactor.extract.function", end=commentEnd, result=comment2) +- newFunction(a) //@loc(commentEnd, "4"),codeaction("_", "refactor.extract.function", end=lastComment, result=comment3) +- // Comment right after 3 + 4 +- +- // Comment after with space //@loc(lastComment, "Comment") +-} +- +-func newFunction(a int) { +- _ = a + 4 +-} +- +--- @comment3/comment.go -- +-package extract +- +-func _() { +- a := /* comment in the middle of a line */ 1 //@codeaction("a", "refactor.extract.function", end=commentEnd, result=comment1) +- // Comment on its own line //@codeaction("Comment", "refactor.extract.function", end=commentEnd, result=comment2) +- newFunction(a) //@loc(commentEnd, "4"),codeaction("_", "refactor.extract.function", end=lastComment, result=comment3) +- // Comment right after 3 + 4 +- +- // Comment after with space //@loc(lastComment, "Comment") +-} +- +-func newFunction(a int) { +- _ = a + 4 +-} +- +--- redefine.go -- +-package extract +- +-import "strconv" +- +-func _() { +- i, err := strconv.Atoi("1") +- u, err := strconv.Atoi("2") //@codeaction(re`u.*\)`, "refactor.extract.function", result=redefine) +- if i == u || err == nil { +- return +- } +-} +- +--- @redefine/redefine.go -- +-package extract +- +-import "strconv" +- +-func _() { +- i, err := strconv.Atoi("1") +- u, err := newFunction() //@codeaction(re`u.*\)`, "refactor.extract.function", result=redefine) +- if i == u || err == nil { +- return +- } +-} +- +-func newFunction() (int, error) { +- u, err := strconv.Atoi("2") +- return u, err +-} +- +--- anonymousfunc.go -- +-package extract +-import "cmp" +-import "slices" +- +-// issue go#64821 +-func _() { +- var s []string //@codeaction("var", "refactor.extract.function", end=anonEnd, result=anon1) +- slices.SortFunc(s, func(a, b string) int { +- return cmp.Compare(a, b) +- }) +- println(s) //@loc(anonEnd, ")") +-} +- +--- @anon1/anonymousfunc.go -- +-package extract +-import "cmp" +-import "slices" +- +-// issue go#64821 +-func _() { +- newFunction() //@loc(anonEnd, ")") +-} +- +-func newFunction() { +- var s []string //@codeaction("var", "refactor.extract.function", end=anonEnd, result=anon1) +- slices.SortFunc(s, func(a, b string) int { +- return cmp.Compare(a, b) +- }) +- println(s) +-} +- +diff -urN a/gopls/internal/test/marker/testdata/codeaction/grouplines.txt b/gopls/internal/test/marker/testdata/codeaction/grouplines.txt +--- a/gopls/internal/test/marker/testdata/codeaction/grouplines.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/grouplines.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,206 +0,0 @@ +-This test exercises the refactoring of putting arguments, return values, and composite literal elements into a +-single line. +- +--- go.mod -- +-module unused.mod +- +-go 1.18 +- +--- func_arg/func_arg.go -- +-package func_arg +- +-func A( +- a string, +- b, c int64, +- x int /*@codeaction("x", "refactor.rewrite.joinLines", result=func_arg)*/, +- y int, +-) (r1 string, r2, r3 int64, r4 int, r5 int) { +- return a, b, c, x, y +-} +- +--- @func_arg/func_arg/func_arg.go -- +-package func_arg +- +-func A(a string, b, c int64, x int /*@codeaction("x", "refactor.rewrite.joinLines", result=func_arg)*/, y int) (r1 string, r2, r3 int64, r4 int, r5 int) { +- return a, b, c, x, y +-} +- +--- func_ret/func_ret.go -- +-package func_ret +- +-func A(a string, b, c int64, x int, y int) ( +- r1 string /*@codeaction("r1", "refactor.rewrite.joinLines", result=func_ret)*/, +- r2, r3 int64, +- r4 int, +- r5 int, +-) { +- return a, b, c, x, y +-} +- +--- @func_ret/func_ret/func_ret.go -- +-package func_ret +- +-func A(a string, b, c int64, x int, y int) (r1 string /*@codeaction("r1", "refactor.rewrite.joinLines", result=func_ret)*/, r2, r3 int64, r4 int, r5 int) { +- return a, b, c, x, y +-} +- +--- functype_arg/functype_arg.go -- +-package functype_arg +- +-type A func( +- a string, +- b, c int64, +- x int /*@codeaction("x", "refactor.rewrite.joinLines", result=functype_arg)*/, +- y int, +-) (r1 string, r2, r3 int64, r4 int, r5 int) +- +--- @functype_arg/functype_arg/functype_arg.go -- +-package functype_arg +- +-type A func(a string, b, c int64, x int /*@codeaction("x", "refactor.rewrite.joinLines", result=functype_arg)*/, y int) (r1 string, r2, r3 int64, r4 int, r5 int) +- +--- functype_ret/functype_ret.go -- +-package functype_ret +- +-type A func(a string, b, c int64, x int, y int) ( +- r1 string /*@codeaction("r1", "refactor.rewrite.joinLines", result=functype_ret)*/, +- r2, r3 int64, +- r4 int, +- r5 int, +-) +- +--- @functype_ret/functype_ret/functype_ret.go -- +-package functype_ret +- +-type A func(a string, b, c int64, x int, y int) (r1 string /*@codeaction("r1", "refactor.rewrite.joinLines", result=functype_ret)*/, r2, r3 int64, r4 int, r5 int) +- +--- func_call/func_call.go -- +-package func_call +- +-import "fmt" +- +-func F() { +- fmt.Println( +- 1 /*@codeaction("1", "refactor.rewrite.joinLines", result=func_call)*/, +- 2, +- 3, +- fmt.Sprintf("hello %d", 4), +- ) +-} +- +--- @func_call/func_call/func_call.go -- +-package func_call +- +-import "fmt" +- +-func F() { +- fmt.Println(1 /*@codeaction("1", "refactor.rewrite.joinLines", result=func_call)*/, 2, 3, fmt.Sprintf("hello %d", 4)) +-} +- +--- indent/indent.go -- +-package indent +- +-import "fmt" +- +-func F() { +- fmt.Println( +- 1, +- 2, +- 3, +- fmt.Sprintf( +- "hello %d" /*@codeaction("hello", "refactor.rewrite.joinLines", result=indent)*/, +- 4, +- )) +-} +- +--- @indent/indent/indent.go -- +-package indent +- +-import "fmt" +- +-func F() { +- fmt.Println( +- 1, +- 2, +- 3, +- fmt.Sprintf("hello %d" /*@codeaction("hello", "refactor.rewrite.joinLines", result=indent)*/, 4)) +-} +- +--- structelts/structelts.go -- +-package structelts +- +-type A struct{ +- a int +- b int +-} +- +-func F() { +- _ = A{ +- a: 1, +- b: 2 /*@codeaction("b", "refactor.rewrite.joinLines", result=structelts)*/, +- } +-} +- +--- @structelts/structelts/structelts.go -- +-package structelts +- +-type A struct{ +- a int +- b int +-} +- +-func F() { +- _ = A{a: 1, b: 2 /*@codeaction("b", "refactor.rewrite.joinLines", result=structelts)*/} +-} +- +--- sliceelts/sliceelts.go -- +-package sliceelts +- +-func F() { +- _ = []int{ +- 1 /*@codeaction("1", "refactor.rewrite.joinLines", result=sliceelts)*/, +- 2, +- } +-} +- +--- @sliceelts/sliceelts/sliceelts.go -- +-package sliceelts +- +-func F() { +- _ = []int{1 /*@codeaction("1", "refactor.rewrite.joinLines", result=sliceelts)*/, 2} +-} +- +--- mapelts/mapelts.go -- +-package mapelts +- +-func F() { +- _ = map[string]int{ +- "a": 1 /*@codeaction("1", "refactor.rewrite.joinLines", result=mapelts)*/, +- "b": 2, +- } +-} +- +--- @mapelts/mapelts/mapelts.go -- +-package mapelts +- +-func F() { +- _ = map[string]int{"a": 1 /*@codeaction("1", "refactor.rewrite.joinLines", result=mapelts)*/, "b": 2} +-} +- +--- starcomment/starcomment.go -- +-package starcomment +- +-func A( +- /*1*/ x /*2*/ string /*3*/ /*@codeaction("x", "refactor.rewrite.joinLines", result=starcomment)*/, +- /*4*/ y /*5*/ int /*6*/, +-) (string, int) { +- return x, y +-} +- +--- @starcomment/starcomment/starcomment.go -- +-package starcomment +- +-func A(/*1*/ x /*2*/ string /*3*/ /*@codeaction("x", "refactor.rewrite.joinLines", result=starcomment)*/, /*4*/ y /*5*/ int /*6*/) (string, int) { +- return x, y +-} +- +diff -urN a/gopls/internal/test/marker/testdata/codeaction/imports-generated.txt b/gopls/internal/test/marker/testdata/codeaction/imports-generated.txt +--- a/gopls/internal/test/marker/testdata/codeaction/imports-generated.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/imports-generated.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +-This test verifies that the 'source.organizeImports' code action +-is offered in generated files (see #73959). +- +--- go.mod -- +-module example.com +-go 1.21 +- +--- a.go -- +-// Code generated by me. DO NOT EDIT. +- +-package a //@codeaction("a", "source.organizeImports", result=out) +- +-func _() { +- fmt.Println("hello") //@diag("fmt", re"undefined") +-} +- +--- @out/a.go -- +-// Code generated by me. DO NOT EDIT. +- +-package a //@codeaction("a", "source.organizeImports", result=out) +- +-import "fmt" +- +-func _() { +- fmt.Println("hello") //@diag("fmt", re"undefined") +-} +- +diff -urN a/gopls/internal/test/marker/testdata/codeaction/import-shadows-builtin.txt b/gopls/internal/test/marker/testdata/codeaction/import-shadows-builtin.txt +--- a/gopls/internal/test/marker/testdata/codeaction/import-shadows-builtin.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/import-shadows-builtin.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,55 +0,0 @@ +-This is a regression test for bug #63592 in "organize imports" whereby +-the new imports would shadow predeclared names. +- +-In the original example, the conflict was between predeclared error +-type and the unfortunately named package github.com/coreos/etcd/error, +-but this example uses a package with the ludicrous name of complex128. +- +-The new behavior is that we will not attempt to import packages +-that shadow predeclared names. (Ideally we would do that only if +-the predeclared name is actually referenced in the file, which +-complex128 happens to be in this example, but that's a trickier +-analysis than the internal/imports package is game for.) +- +-The name complex127 works as usual. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- complex128/a.go -- +-package complex128 +- +-var V int +- +--- complex127/a.go -- +-package complex127 +- +-var V int +- +--- main.go -- +-package main +- +-import () //@codeaction("import", "source.organizeImports", result=out) +- +-func main() { +- complex128.V() //@diag("V", re"type complex128 has no field") +- complex127.V() //@diag("complex127", re"(undeclared|undefined)") +-} +- +-func _() { +- var _ complex128 = 1 + 2i +-} +--- @out/main.go -- +-package main +- +-import "example.com/complex127" //@codeaction("import", "source.organizeImports", result=out) +- +-func main() { +- complex128.V() //@diag("V", re"type complex128 has no field") +- complex127.V() //@diag("complex127", re"(undeclared|undefined)") +-} +- +-func _() { +- var _ complex128 = 1 + 2i +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/imports.txt b/gopls/internal/test/marker/testdata/codeaction/imports.txt +--- a/gopls/internal/test/marker/testdata/codeaction/imports.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/imports.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,175 +0,0 @@ +-This test verifies the behavior of the 'source.organizeImports' code action. +- +--- go.mod -- +-module mod.test/imports +- +-go 1.18 +- +--- add.go -- +-package imports //@codeaction("imports", "source.organizeImports", result=add) +- +-import ( +- "fmt" +-) +- +-func _() { +- fmt.Println("") +- bytes.NewBuffer(nil) //@diag("bytes", re"(undeclared|undefined)") +-} +- +--- @add/add.go -- +-package imports //@codeaction("imports", "source.organizeImports", result=add) +- +-import ( +- "bytes" +- "fmt" +-) +- +-func _() { +- fmt.Println("") +- bytes.NewBuffer(nil) //@diag("bytes", re"(undeclared|undefined)") +-} +- +--- good.go -- +-package imports //@codeaction("imports", "source.organizeImports", err=re"found 0 CodeActions") +- +-import "fmt" +- +-func _() { +-fmt.Println("") +-} +- +--- issue35458.go -- +- +- +- +- +- +-// package doc +-package imports //@codeaction("imports", "source.organizeImports", result=issue35458) +- +- +- +- +- +- +-func _() { +- println("Hello, world!") +-} +- +- +- +- +- +- +- +- +--- @issue35458/issue35458.go -- +-// package doc +-package imports //@codeaction("imports", "source.organizeImports", result=issue35458) +- +- +- +- +- +- +-func _() { +- println("Hello, world!") +-} +- +- +- +- +- +- +- +- +--- multi.go -- +-package imports //@codeaction("imports", "source.organizeImports", result=multi) +- +-import "fmt" +- +-import "bytes" //@diag("\"bytes\"", re"not used") +- +-func _() { +- fmt.Println("") +-} +- +--- @multi/multi.go -- +-package imports //@codeaction("imports", "source.organizeImports", result=multi) +- +-import "fmt" +- +-//@diag("\"bytes\"", re"not used") +- +-func _() { +- fmt.Println("") +-} +- +--- needs.go -- +-package imports //@codeaction("package", "source.organizeImports", result=needs) +- +-func goodbye() { +- fmt.Printf("HI") //@diag("fmt", re"(undeclared|undefined)") +- log.Printf("byeeeee") //@diag("log", re"(undeclared|undefined)") +-} +- +--- @needs/needs.go -- +-package imports //@codeaction("package", "source.organizeImports", result=needs) +- +-import ( +- "fmt" +- "log" +-) +- +-func goodbye() { +- fmt.Printf("HI") //@diag("fmt", re"(undeclared|undefined)") +- log.Printf("byeeeee") //@diag("log", re"(undeclared|undefined)") +-} +- +--- remove.go -- +-package imports //@codeaction("package", "source.organizeImports", result=remove) +- +-import ( +- "bytes" //@diag("\"bytes\"", re"not used") +- "fmt" +-) +- +-func _() { +- fmt.Println("") +-} +- +--- @remove/remove.go -- +-package imports //@codeaction("package", "source.organizeImports", result=remove) +- +-import ( +- "fmt" +-) +- +-func _() { +- fmt.Println("") +-} +- +--- removeall.go -- +-package imports //@codeaction("package", "source.organizeImports", result=removeall) +- +-import ( +- "bytes" //@diag("\"bytes\"", re"not used") +- "fmt" //@diag("\"fmt\"", re"not used") +- +-) +- +-func _() { +-} +- +--- @removeall/removeall.go -- +-package imports //@codeaction("package", "source.organizeImports", result=removeall) +- +-//@diag("\"fmt\"", re"not used") +- +-func _() { +-} +- +--- twolines.go -- +-package imports +-func main() {} //@codeaction("main", "source.organizeImports", err=re"found 0") +diff -urN a/gopls/internal/test/marker/testdata/codeaction/inline_issue67336.txt b/gopls/internal/test/marker/testdata/codeaction/inline_issue67336.txt +--- a/gopls/internal/test/marker/testdata/codeaction/inline_issue67336.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/inline_issue67336.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,71 +0,0 @@ +-This is the test case from golang/go#67335, where the inlining resulted in bad +-formatting. +- +--- go.mod -- +-module example.com +- +-go 1.20 +- +--- define/my/typ/foo.go -- +-package typ +-type T int +- +--- some/other/pkg/foo.go -- +-package pkg +-import "context" +-import "example.com/define/my/typ" +-func Foo(typ.T) context.Context{ return nil } +- +--- one/more/pkg/foo.go -- +-package pkg +-func Bar() {} +- +--- to/be/inlined/foo.go -- +-package inlined +- +-import "context" +-import "example.com/some/other/pkg" +-import "example.com/define/my/typ" +- +-func Baz(ctx context.Context) context.Context { +- return pkg.Foo(typ.T(5)) +-} +- +--- b/c/foo.go -- +-package c +-import ( +- "context" +- "example.com/to/be/inlined" +- "example.com/one/more/pkg" +-) +- +-const ( +- // This is a constant +- SomeConst = 5 +-) +- +-func _() { +- inlined.Baz(context.TODO()) //@ codeaction("Baz", "refactor.inline.call", result=inline) +- pkg.Bar() +-} +- +--- @inline/b/c/foo.go -- +-package c +- +-import ( +- "context" +- "example.com/define/my/typ" +- "example.com/one/more/pkg" +- pkg0 "example.com/some/other/pkg" +-) +- +-const ( +- // This is a constant +- SomeConst = 5 +-) +- +-func _() { +- var _ context.Context = context.TODO() +- pkg0.Foo(typ.T(5)) //@ codeaction("Baz", "refactor.inline.call", result=inline) +- pkg.Bar() +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/inline_issue68554.txt b/gopls/internal/test/marker/testdata/codeaction/inline_issue68554.txt +--- a/gopls/internal/test/marker/testdata/codeaction/inline_issue68554.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/inline_issue68554.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,38 +0,0 @@ +-This test checks that inlining removes unnecessary interface conversions. +- +--- main.go -- +-package main +- +-import ( +- "fmt" +- "io" +-) +- +-func _(d discard) { +- g(d) //@codeaction("g", "refactor.inline.call", result=out) +-} +- +-func g(w io.Writer) { fmt.Println(w) } +- +-var _ discard +-type discard struct{} +-func (discard) Write(p []byte) (int, error) { return len(p), nil } +--- @out/main.go -- +-package main +- +-import ( +- "fmt" +- "io" +-) +- +-func _(d discard) { +- fmt.Println(d) //@codeaction("g", "refactor.inline.call", result=out) +-} +- +-func g(w io.Writer) { fmt.Println(w) } +- +-var _ discard +- +-type discard struct{} +- +-func (discard) Write(p []byte) (int, error) { return len(p), nil } +diff -urN a/gopls/internal/test/marker/testdata/codeaction/inline-lhs-var-method.txt b/gopls/internal/test/marker/testdata/codeaction/inline-lhs-var-method.txt +--- a/gopls/internal/test/marker/testdata/codeaction/inline-lhs-var-method.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/inline-lhs-var-method.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,101 +0,0 @@ +-We should not offer a code action to inline 'v' where it appears on the +-left-hand side of an assignment: method with pointer receiver. +- +-Regression test for issue #75200. +- +--- go.mod -- +-module example.com/a +-go 1.18 +- +--- c/c.go -- +-package c +- +-import "fmt" +- +-type V int +- +-func (V) Method() { } +- +-func (*V) PointerMethod() { } +- +-func _() { +- var v V = V(123) +- v = V(13) +- v.Method() //@codeaction("v", "refactor.inline.variable", result=inlineV) +- v.PointerMethod() //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- (v).PointerMethod() //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- var vptr *V = &v +- vptr.PointerMethod() //@codeaction("vptr", "refactor.inline.variable", result=inlintVptr) +- (vptr).PointerMethod() //@codeaction("vptr", "refactor.inline.variable", result=inlintVptrpar) +- fmt.Println(v, vptr) +-} +- +--- @inlineV/c/c.go -- +-package c +- +-import "fmt" +- +-type V int +- +-func (V) Method() { } +- +-func (*V) PointerMethod() { } +- +-func _() { +- var v V = V(123) +- v = V(13) +- V(123).Method() //@codeaction("v", "refactor.inline.variable", result=inlineV) +- v.PointerMethod() //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- (v).PointerMethod() //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- var vptr *V = &v +- vptr.PointerMethod() //@codeaction("vptr", "refactor.inline.variable", result=inlintVptr) +- (vptr).PointerMethod() //@codeaction("vptr", "refactor.inline.variable", result=inlintVptrpar) +- fmt.Println(v, vptr) +-} +- +--- @inlintVptr/c/c.go -- +-package c +- +-import "fmt" +- +-type V int +- +-func (V) Method() { } +- +-func (*V) PointerMethod() { } +- +-func _() { +- var v V = V(123) +- v = V(13) +- v.Method() //@codeaction("v", "refactor.inline.variable", result=inlineV) +- v.PointerMethod() //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- (v).PointerMethod() //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- var vptr *V = &v +- &v.PointerMethod() //@codeaction("vptr", "refactor.inline.variable", result=inlintVptr) +- (vptr).PointerMethod() //@codeaction("vptr", "refactor.inline.variable", result=inlintVptrpar) +- fmt.Println(v, vptr) +-} +- +--- @inlintVptrpar/c/c.go -- +-package c +- +-import "fmt" +- +-type V int +- +-func (V) Method() { } +- +-func (*V) PointerMethod() { } +- +-func _() { +- var v V = V(123) +- v = V(13) +- v.Method() //@codeaction("v", "refactor.inline.variable", result=inlineV) +- v.PointerMethod() //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- (v).PointerMethod() //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- var vptr *V = &v +- vptr.PointerMethod() //@codeaction("vptr", "refactor.inline.variable", result=inlintVptr) +- (&v).PointerMethod() //@codeaction("vptr", "refactor.inline.variable", result=inlintVptrpar) +- fmt.Println(v, vptr) +-} +- +diff -urN a/gopls/internal/test/marker/testdata/codeaction/inline-lhs-var.txt b/gopls/internal/test/marker/testdata/codeaction/inline-lhs-var.txt +--- a/gopls/internal/test/marker/testdata/codeaction/inline-lhs-var.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/inline-lhs-var.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,33 +0,0 @@ +-We should not offer a code action to inline 'v' where it appears on the +-left-hand side of an assignment +- +-Regression test for issue #75200. +- +--- go.mod -- +-module example.com/a +-go 1.18 +- +--- c/c.go -- +-package c +- +-import "fmt" +- +-func _() { +- v := 13 +- v = 78 //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- +- v += 78 //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- v -= 78 //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- v *= 78 //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- +- v++ //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- v-- //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- +- fmt.Println(v) +- +- x := &v //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- x = (&v) //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- x = &(v) //@codeaction("v", "refactor.inline.variable", err="0 CodeActions of kind refactor.inline.variable") +- +- fmt.Println(x) +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/inline_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/inline_resolve.txt +--- a/gopls/internal/test/marker/testdata/codeaction/inline_resolve.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/inline_resolve.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-This is a minimal test of the refactor.inline.call code actions, with resolve support. +-See inline.txt for same test without resolve support. +- +--- go.mod -- +-module example.com/codeaction +-go 1.18 +- +--- a/a.go -- +-package a +- +-func _() { +- println(add(1, 2)) //@codeaction("add", "refactor.inline.call", end=")", result=inline) +-} +- +-func add(x, y int) int { return x + y } +- +--- @inline/a/a.go -- +-package a +- +-func _() { +- println(1 + 2) //@codeaction("add", "refactor.inline.call", end=")", result=inline) +-} +- +-func add(x, y int) int { return x + y } +diff -urN a/gopls/internal/test/marker/testdata/codeaction/inline.txt b/gopls/internal/test/marker/testdata/codeaction/inline.txt +--- a/gopls/internal/test/marker/testdata/codeaction/inline.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/inline.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,34 +0,0 @@ +-This is a minimal test of the refactor.inline.call code action, without resolve support. +-See inline_resolve.txt for same test with resolve support. +- +--- capabilities.json -- +-{ +- "textDocument": { +- "codeAction": { +- "dataSupport": false, +- "resolveSupport": {} +- } +- } +-} +- +--- go.mod -- +-module example.com/codeaction +-go 1.18 +- +--- a/a.go -- +-package a +- +-func _() { +- println(add(1, 2)) //@codeaction("add", "refactor.inline.call", end=")", result=inline) +-} +- +-func add(x, y int) int { return x + y } +- +--- @inline/a/a.go -- +-package a +- +-func _() { +- println(1 + 2) //@codeaction("add", "refactor.inline.call", end=")", result=inline) +-} +- +-func add(x, y int) int { return x + y } +diff -urN a/gopls/internal/test/marker/testdata/codeaction/inline-var-74347.txt b/gopls/internal/test/marker/testdata/codeaction/inline-var-74347.txt +--- a/gopls/internal/test/marker/testdata/codeaction/inline-var-74347.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/inline-var-74347.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-This is a regressoon test of a crash in refactor.inline.variable. +- +--- go.mod -- +-module example.com/a +-go 1.18 +- +--- a/a.go -- +-package a +- +-func _() { +- x := func(notfree int) { _ = notfree } +- println(x) //@codeaction("x", "refactor.inline.variable", result=out) +-} +--- @out/a/a.go -- +-package a +- +-func _() { +- x := func(notfree int) { _ = notfree } +- println(func(notfree int) { _ = notfree }) //@codeaction("x", "refactor.inline.variable", result=out) +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/inline-var.txt b/gopls/internal/test/marker/testdata/codeaction/inline-var.txt +--- a/gopls/internal/test/marker/testdata/codeaction/inline-var.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/inline-var.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,38 +0,0 @@ +-This is a test of the refactor.inline.variable code action. +- +--- go.mod -- +-module example.com/a +-go 1.18 +- +--- a/a.go -- +-package a +- +-import "fmt" +- +-func _(x int) { +- s := fmt.Sprintf("+%d", x) +- println(s) //@codeaction("s", "refactor.inline.variable", result=inlineS) +-} +- +--- @inlineS/a/a.go -- +-package a +- +-import "fmt" +- +-func _(x int) { +- s := fmt.Sprintf("+%d", x) +- println(fmt.Sprintf("+%d", x)) //@codeaction("s", "refactor.inline.variable", result=inlineS) +-} +- +--- b/b.go -- +-package b +- +-import "fmt" +- +-func _(x int) { +- s2 := fmt.Sprintf("+%d", x) +- { +- x := "shadow" +- println(s2, x) //@codeaction("s2", "refactor.inline.variable", err=re`refers to "x".*shadowed.*at line 8`) +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/invertif.txt b/gopls/internal/test/marker/testdata/codeaction/invertif.txt +--- a/gopls/internal/test/marker/testdata/codeaction/invertif.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/invertif.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,218 +0,0 @@ +-This test exercises the 'invert if condition' code action. +- +--- p.go -- +-package invertif +- +-import ( +- "fmt" +- "os" +-) +- +-func Boolean() { +- b := true +- if b { //@codeaction("if b", "refactor.rewrite.invertIf", edit=boolean) +- fmt.Println("A") +- } else { +- fmt.Println("B") +- } +-} +- +-func BooleanFn() { +- if os.IsPathSeparator('X') { //@codeaction("if os.IsPathSeparator('X')", "refactor.rewrite.invertIf", edit=boolean_fn) +- fmt.Println("A") +- } else { +- fmt.Println("B") +- } +-} +- +-// Note that the comment here jumps to the wrong location. +-func DontRemoveParens() { +- a := false +- b := true +- if !(a || +- b) { //@codeaction("b", "refactor.rewrite.invertIf", edit=dont_remove_parens) +- fmt.Println("A") +- } else { +- fmt.Println("B") +- } +-} +- +-func ElseIf() { +- // No inversion expected when there's not else clause +- if len(os.Args) > 2 { +- fmt.Println("A") +- } +- +- // No inversion expected for else-if, that would become unreadable +- if len(os.Args) > 2 { +- fmt.Println("A") +- } else if os.Args[0] == "X" { //@codeaction(re"if os.Args.0. == .X.", "refactor.rewrite.invertIf", edit=else_if) +- fmt.Println("B") +- } else { +- fmt.Println("C") +- } +-} +- +-func GreaterThan() { +- if len(os.Args) > 2 { //@codeaction("i", "refactor.rewrite.invertIf", edit=greater_than) +- fmt.Println("A") +- } else { +- fmt.Println("B") +- } +-} +- +-func NotBoolean() { +- b := true +- if !b { //@codeaction("if !b", "refactor.rewrite.invertIf", edit=not_boolean) +- fmt.Println("A") +- } else { +- fmt.Println("B") +- } +-} +- +-func RemoveElse() { +- if true { //@codeaction("if true", "refactor.rewrite.invertIf", edit=remove_else) +- fmt.Println("A") +- } else { +- fmt.Println("B") +- return +- } +- +- fmt.Println("C") +-} +- +-func RemoveParens() { +- b := true +- if !(b) { //@codeaction("if", "refactor.rewrite.invertIf", edit=remove_parens) +- fmt.Println("A") +- } else { +- fmt.Println("B") +- } +-} +- +-func Semicolon() { +- if _, err := fmt.Println("x"); err != nil { //@codeaction("if", "refactor.rewrite.invertIf", edit=semicolon) +- fmt.Println("A") +- } else { +- fmt.Println("B") +- } +-} +- +-func SemicolonAnd() { +- if n, err := fmt.Println("x"); err != nil && n > 0 { //@codeaction("f", "refactor.rewrite.invertIf", edit=semicolon_and) +- fmt.Println("A") +- } else { +- fmt.Println("B") +- } +-} +- +-func SemicolonOr() { +- if n, err := fmt.Println("x"); err != nil || n < 5 { //@codeaction(re"if n, err := fmt.Println..x..; err != nil .. n < 5", "refactor.rewrite.invertIf", edit=semicolon_or) +- fmt.Println("A") +- } else { +- fmt.Println("B") +- } +-} +- +--- @boolean/p.go -- +-@@ -10,3 +10 @@ +-- if b { //@codeaction("if b", "refactor.rewrite.invertIf", edit=boolean) +-- fmt.Println("A") +-- } else { +-+ if !b { +-@@ -14 +12,2 @@ +-+ } else { //@codeaction("if b", "refactor.rewrite.invertIf", edit=boolean) +-+ fmt.Println("A") +--- @boolean_fn/p.go -- +-@@ -18,3 +18 @@ +-- if os.IsPathSeparator('X') { //@codeaction("if os.IsPathSeparator('X')", "refactor.rewrite.invertIf", edit=boolean_fn) +-- fmt.Println("A") +-- } else { +-+ if !os.IsPathSeparator('X') { +-@@ -22 +20,2 @@ +-+ } else { //@codeaction("if os.IsPathSeparator('X')", "refactor.rewrite.invertIf", edit=boolean_fn) +-+ fmt.Println("A") +--- @dont_remove_parens/p.go -- +-@@ -29,4 +29,2 @@ +-- if !(a || +-- b) { //@codeaction("b", "refactor.rewrite.invertIf", edit=dont_remove_parens) +-- fmt.Println("A") +-- } else { +-+ if (a || +-+ b) { +-@@ -34 +32,2 @@ +-+ } else { //@codeaction("b", "refactor.rewrite.invertIf", edit=dont_remove_parens) +-+ fmt.Println("A") +--- @else_if/p.go -- +-@@ -46,3 +46 @@ +-- } else if os.Args[0] == "X" { //@codeaction(re"if os.Args.0. == .X.", "refactor.rewrite.invertIf", edit=else_if) +-- fmt.Println("B") +-- } else { +-+ } else if os.Args[0] != "X" { +-@@ -50 +48,2 @@ +-+ } else { //@codeaction(re"if os.Args.0. == .X.", "refactor.rewrite.invertIf", edit=else_if) +-+ fmt.Println("B") +--- @greater_than/p.go -- +-@@ -54,3 +54 @@ +-- if len(os.Args) > 2 { //@codeaction("i", "refactor.rewrite.invertIf", edit=greater_than) +-- fmt.Println("A") +-- } else { +-+ if len(os.Args) <= 2 { +-@@ -58 +56,2 @@ +-+ } else { //@codeaction("i", "refactor.rewrite.invertIf", edit=greater_than) +-+ fmt.Println("A") +--- @not_boolean/p.go -- +-@@ -63,3 +63 @@ +-- if !b { //@codeaction("if !b", "refactor.rewrite.invertIf", edit=not_boolean) +-- fmt.Println("A") +-- } else { +-+ if b { +-@@ -67 +65,2 @@ +-+ } else { //@codeaction("if !b", "refactor.rewrite.invertIf", edit=not_boolean) +-+ fmt.Println("A") +--- @remove_else/p.go -- +-@@ -71,3 +71 @@ +-- if true { //@codeaction("if true", "refactor.rewrite.invertIf", edit=remove_else) +-- fmt.Println("A") +-- } else { +-+ if false { +-@@ -78 +76,3 @@ +-+ //@codeaction("if true", "refactor.rewrite.invertIf", edit=remove_else) +-+ fmt.Println("A") +-+ +--- @remove_parens/p.go -- +-@@ -83,3 +83 @@ +-- if !(b) { //@codeaction("if", "refactor.rewrite.invertIf", edit=remove_parens) +-- fmt.Println("A") +-- } else { +-+ if b { +-@@ -87 +85,2 @@ +-+ } else { //@codeaction("if", "refactor.rewrite.invertIf", edit=remove_parens) +-+ fmt.Println("A") +--- @semicolon/p.go -- +-@@ -91,3 +91 @@ +-- if _, err := fmt.Println("x"); err != nil { //@codeaction("if", "refactor.rewrite.invertIf", edit=semicolon) +-- fmt.Println("A") +-- } else { +-+ if _, err := fmt.Println("x"); err == nil { +-@@ -95 +93,2 @@ +-+ } else { //@codeaction("if", "refactor.rewrite.invertIf", edit=semicolon) +-+ fmt.Println("A") +--- @semicolon_and/p.go -- +-@@ -99,3 +99 @@ +-- if n, err := fmt.Println("x"); err != nil && n > 0 { //@codeaction("f", "refactor.rewrite.invertIf", edit=semicolon_and) +-- fmt.Println("A") +-- } else { +-+ if n, err := fmt.Println("x"); err == nil || n <= 0 { +-@@ -103 +101,2 @@ +-+ } else { //@codeaction("f", "refactor.rewrite.invertIf", edit=semicolon_and) +-+ fmt.Println("A") +--- @semicolon_or/p.go -- +-@@ -107,3 +107 @@ +-- if n, err := fmt.Println("x"); err != nil || n < 5 { //@codeaction(re"if n, err := fmt.Println..x..; err != nil .. n < 5", "refactor.rewrite.invertIf", edit=semicolon_or) +-- fmt.Println("A") +-- } else { +-+ if n, err := fmt.Println("x"); err == nil && n >= 5 { +-@@ -111 +109,2 @@ +-+ } else { //@codeaction(re"if n, err := fmt.Println..x..; err != nil .. n < 5", "refactor.rewrite.invertIf", edit=semicolon_or) +-+ fmt.Println("A") +diff -urN a/gopls/internal/test/marker/testdata/codeaction/issue64558.txt b/gopls/internal/test/marker/testdata/codeaction/issue64558.txt +--- a/gopls/internal/test/marker/testdata/codeaction/issue64558.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/issue64558.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,14 +0,0 @@ +-Test of an inlining failure due to an ill-typed input program (#64558). +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-func _() { +- f(1, 2) //@ diag("2", re"too many arguments"), codeaction("f", "refactor.inline.call", end=")", err=re`inlining failed \("too many arguments"\), likely because inputs were ill-typed`) +-} +- +-func f(int) {} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/issue70268.txt b/gopls/internal/test/marker/testdata/codeaction/issue70268.txt +--- a/gopls/internal/test/marker/testdata/codeaction/issue70268.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/issue70268.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,33 +0,0 @@ +-This test verifies the remove of unused parameters in case of syntax errors. +-Issue golang/go#70268. +- +--- go.mod -- +-module unused.mod +- +-go 1.21 +- +--- a/a.go -- +-package a +- +-func A(x, unused int) int { //@codeaction("unused", "refactor.rewrite.removeUnusedParam", result=a) +- return x +-} +- +--- @a/a/a.go -- +-package a +- +-func A(x int) int { //@codeaction("unused", "refactor.rewrite.removeUnusedParam", result=a) +- return x +-} +- +--- b/b.go -- +-package b +- +-import "unused.mod/a" +- +-func main(){ +- a.A/*dsdd*/(/*cccc*/ 1, +- +- +- ) //@diag(")", re"not enough arguments") +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/moveparam_issue70599.txt b/gopls/internal/test/marker/testdata/codeaction/moveparam_issue70599.txt +--- a/gopls/internal/test/marker/testdata/codeaction/moveparam_issue70599.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/moveparam_issue70599.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,99 +0,0 @@ +-This test checks the fixes for bugs encountered while bug-bashing on the +-movement refactoring. +- +--- go.mod -- +-module example.com +- +-go 1.21 +- +--- unnecessaryconversion.go -- +-package a +- +-// We should not add unnecessary conversions to concrete arguments to concrete +-// parameters when the parameter use is in assignment context. +- +-type Hash [32]byte +- +-func Cache(key [32]byte, value any) { //@codeaction("key", "refactor.rewrite.moveParamRight", result=conversion) +- // Not implemented. +-} +- +-func _() { +- var k Hash +- Cache(k, 0) +- Cache(Hash{}, 1) +- Cache([32]byte{}, 2) +-} +- +--- @conversion/unnecessaryconversion.go -- +-package a +- +-// We should not add unnecessary conversions to concrete arguments to concrete +-// parameters when the parameter use is in assignment context. +- +-type Hash [32]byte +- +-func Cache(value any, key [32]byte) { //@codeaction("key", "refactor.rewrite.moveParamRight", result=conversion) +- // Not implemented. +-} +- +-func _() { +- var k Hash +- Cache(0, k) +- Cache(1, Hash{}) +- Cache(2, [32]byte{}) +-} +--- shortvardecl.go -- +-package a +- +-func Short(x, y int) (int, int) { //@codeaction("x", "refactor.rewrite.moveParamRight", result=short) +- return x, y +-} +- +-func _() { +- x, y := Short(0, 1) +- _, _ = x, y +-} +- +-func _() { +- var x, y int +- x, y = Short(0, 1) +- _, _ = x, y +-} +- +-func _() { +- _, _ = Short(0, 1) +-} +--- @short/shortvardecl.go -- +-package a +- +-func Short(y, x int) (int, int) { //@codeaction("x", "refactor.rewrite.moveParamRight", result=short) +- return x, y +-} +- +-func _() { +- x, y := Short(1, 0) +- _, _ = x, y +-} +- +-func _() { +- var x, y int +- x, y = Short(1, 0) +- _, _ = x, y +-} +- +-func _() { +- _, _ = Short(1, 0) +-} +--- variadic.go -- +-package a +- +-// We should not offer movement involving variadic parameters if it is not well +-// supported. +- +-func Variadic(x int, y ...string) { //@codeaction("x", "refactor.rewrite.moveParamRight", err="0 CodeActions"), codeaction("y", "refactor.rewrite.moveParamLeft", err="0 CodeActions") +-} +- +-func _() { +- Variadic(1, "a", "b") +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/moveparam.txt b/gopls/internal/test/marker/testdata/codeaction/moveparam.txt +--- a/gopls/internal/test/marker/testdata/codeaction/moveparam.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/moveparam.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,178 +0,0 @@ +-This test checks basic functionality of the "move parameter left/right" code +-action. +- +-Note that in many of these tests, a permutation can either be expressed as +-a parameter move left or right. In these cases, the codeaction assertions +-deliberately share the same golden data. +- +--- go.mod -- +-module example.com/moveparam +- +-go 1.19 +- +--- basic/basic.go -- +-package basic +- +-func Foo(a, b int) int { //@codeaction("a", "refactor.rewrite.moveParamRight", result=basic), codeaction("b", "refactor.rewrite.moveParamLeft", result=basic) +- return a + b +-} +- +-func _() { +- x, y := 1, 2 +- z := Foo(x, y) +- _ = z +-} +- +--- basic/caller/caller.go -- +-package caller +- +-import "example.com/moveparam/basic" +- +-func a() int { return 1 } +-func b() int { return 2 } +- +-// Check that we can refactor a call in a toplevel var decl. +-var _ = basic.Foo(1, 2) +- +-// Check that we can refactor a call with effects in a toplevel var decl. +-var _ = basic.Foo(a(), b()) +- +-func _() { +- // check various refactorings in a function body, and comment handling. +- _ = basic.Foo(1, 2) // with comments +- // another comment +- _ = basic.Foo(3, 4) +- x := 4 +- x = basic.Foo(x /* this is an inline comment */, 5) +-} +- +--- @basic/basic/basic.go -- +-package basic +- +-func Foo(b, a int) int { //@codeaction("a", "refactor.rewrite.moveParamRight", result=basic), codeaction("b", "refactor.rewrite.moveParamLeft", result=basic) +- return a + b +-} +- +-func _() { +- x, y := 1, 2 +- z := Foo(y, x) +- _ = z +-} +--- @basic/basic/caller/caller.go -- +-package caller +- +-import "example.com/moveparam/basic" +- +-func a() int { return 1 } +-func b() int { return 2 } +- +-// Check that we can refactor a call in a toplevel var decl. +-var _ = basic.Foo(2, 1) +- +-// Check that we can refactor a call with effects in a toplevel var decl. +-var _ = basic.Foo(b(), a()) +- +-func _() { +- // check various refactorings in a function body, and comment handling. +- _ = basic.Foo(2, 1) // with comments +- // another comment +- _ = basic.Foo(4, 3) +- x := 4 +- x = basic.Foo(5, x) +-} +--- method/method.go -- +-package method +- +-type T struct{} +- +-func (T) Foo(a, b int) {} //@codeaction("a", "refactor.rewrite.moveParamRight", result=method), codeaction("b", "refactor.rewrite.moveParamLeft", result=method) +- +-func _() { +- var t T +- t.Foo(1, 2) +- // TODO(rfindley): test method expressions here, once they are handled. +-} +- +--- method/caller/caller.go -- +-package caller +- +-import "example.com/moveparam/method" +- +-func _() { +- var t method.T +- t.Foo(1, 2) +-} +- +--- @method/method/caller/caller.go -- +-package caller +- +-import "example.com/moveparam/method" +- +-func _() { +- var t method.T +- t.Foo(2, 1) +-} +--- @method/method/method.go -- +-package method +- +-type T struct{} +- +-func (T) Foo(b, a int) {} //@codeaction("a", "refactor.rewrite.moveParamRight", result=method), codeaction("b", "refactor.rewrite.moveParamLeft", result=method) +- +-func _() { +- var t T +- t.Foo(2, 1) +- // TODO(rfindley): test method expressions here, once they are handled. +-} +--- fieldlist/joinfield.go -- +-package fieldlist +- +-func JoinField(a int, b string, c int) {} //@codeaction("a", "refactor.rewrite.moveParamRight", result=joinfield), codeaction("b", "refactor.rewrite.moveParamLeft", result=joinfield) +- +-func _() { +- JoinField(1, "2", 3) +-} +- +--- @joinfield/fieldlist/joinfield.go -- +-package fieldlist +- +-func JoinField(b string, a, c int) {} //@codeaction("a", "refactor.rewrite.moveParamRight", result=joinfield), codeaction("b", "refactor.rewrite.moveParamLeft", result=joinfield) +- +-func _() { +- JoinField("2", 1, 3) +-} +--- fieldlist/splitfield.go -- +-package fieldlist +- +-func SplitField(a int, b, c string) {} //@codeaction("a", "refactor.rewrite.moveParamRight", result=splitfield), codeaction("b", "refactor.rewrite.moveParamLeft", result=splitfield) +- +-func _() { +- SplitField(1, "2", "3") +-} +- +--- @splitfield/fieldlist/splitfield.go -- +-package fieldlist +- +-func SplitField(b string, a int, c string) {} //@codeaction("a", "refactor.rewrite.moveParamRight", result=splitfield), codeaction("b", "refactor.rewrite.moveParamLeft", result=splitfield) +- +-func _() { +- SplitField("2", 1, "3") +-} +--- unnamed/unnamed.go -- +-package unnamed +- +-func Unnamed(int, string) { //@codeaction("int", "refactor.rewrite.moveParamRight", result=unnamed) +-} +- +-func _() { +- Unnamed(1, "hi") +-} +--- @unnamed/unnamed/unnamed.go -- +-package unnamed +- +-func Unnamed(string, int) { //@codeaction("int", "refactor.rewrite.moveParamRight", result=unnamed) +-} +- +-func _() { +- Unnamed("hi", 1) +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/removeparam_formatting.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_formatting.txt +--- a/gopls/internal/test/marker/testdata/codeaction/removeparam_formatting.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_formatting.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,55 +0,0 @@ +-This test exercises behavior of change signature refactoring with respect to +-comments. +- +-Currently, inline comments around arguments or parameters are dropped, which is +-probably acceptable. Fixing this is likely intractible without fixing comment +-representation in the AST. +- +--- go.mod -- +-module unused.mod +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-// A doc comment. +-func A(x /* used parameter */, unused int /* unused parameter */ ) int { //@codeaction("unused", "refactor.rewrite.removeUnusedParam", result=a) +- // about to return +- return x // returning +- // just returned +-} +- +-// This function makes calls. +-func _() { +- // about to call +- A(one() /* used arg */, 2 /* unused arg */) // calling +- // just called +-} +- +-func one() int { +- // I should be unaffected! +- return 1 +-} +- +--- @a/a/a.go -- +-package a +- +-// A doc comment. +-func A(x int) int { //@codeaction("unused", "refactor.rewrite.removeUnusedParam", result=a) +- // about to return +- return x // returning +- // just returned +-} +- +-// This function makes calls. +-func _() { +- // about to call +- A(one()) // calling +- // just called +-} +- +-func one() int { +- // I should be unaffected! +- return 1 +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/removeparam_funcvalue.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_funcvalue.txt +--- a/gopls/internal/test/marker/testdata/codeaction/removeparam_funcvalue.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_funcvalue.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,19 +0,0 @@ +-This test exercises change signature refactoring handling of function values. +- +-TODO(rfindley): use a literalization strategy to allow these references. +- +--- go.mod -- +-module unused.mod +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-func A(x, unused int) int { //@codeaction("unused", "refactor.rewrite.removeUnusedParam", err=re"non-call function reference") +- return x +-} +- +-func _() { +- _ = A +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt +--- a/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,144 +0,0 @@ +-This test checks the behavior of removing a parameter with respect to various +-import scenarios. +- +--- go.mod -- +-module mod.test +- +-go 1.21 +- +- +--- a/a1.go -- +-package a +- +-import "mod.test/b" +- +-func _() { +- b.B(<-b.Chan, <-b.Chan) +-} +- +--- a/a2.go -- +-package a +- +-import "mod.test/b" +- +-func _() { +- b.B(<-b.Chan, <-b.Chan) +- b.B(<-b.Chan, <-b.Chan) +-} +- +--- a/a3.go -- +-package a +- +-import "mod.test/b" +- +-func _() { +- b.B(<-b.Chan, <-b.Chan) +-} +- +-func _() { +- b.B(<-b.Chan, <-b.Chan) +-} +- +--- a/a4.go -- +-package a +- +-// TODO(rfindley/adonovan): inlining here adds an additional import of +-// mod.test/b. Can we do better? +-import ( +- . "mod.test/b" +-) +- +-func _() { +- B(<-Chan, <-Chan) +-} +- +--- b/b.go -- +-package b +- +-import "mod.test/c" +- +-var Chan chan c.C +- +-func B(x, y c.C) { //@codeaction("x", "refactor.rewrite.removeUnusedParam", result=b) +-} +- +--- @b/a/a3.go -- +-package a +- +-import "mod.test/b" +- +-func _() { +- b.B(<-b.Chan) +-} +- +-func _() { +- b.B(<-b.Chan) +-} +--- @b/a/a2.go -- +-package a +- +-import "mod.test/b" +- +-func _() { +- b.B(<-b.Chan) +- b.B(<-b.Chan) +-} +--- @b/a/a1.go -- +-package a +- +-import "mod.test/b" +- +-func _() { +- b.B(<-b.Chan) +-} +--- @b/a/a4.go -- +-package a +- +-// TODO(rfindley/adonovan): inlining here adds an additional import of +-// mod.test/b. Can we do better? +-import ( +- "mod.test/b" +- . "mod.test/b" +-) +- +-func _() { +- b.B(<-Chan) +-} +--- @b/b/b.go -- +-package b +- +-import "mod.test/c" +- +-var Chan chan c.C +- +-func B(y c.C) { //@codeaction("x", "refactor.rewrite.removeUnusedParam", result=b) +-} +--- c/c.go -- +-package c +- +-type C int +- +--- d/d.go -- +-package d +- +-// Removing the parameter should remove this import. +-import "mod.test/c" +- +-func D(x c.C) { //@codeaction("x", "refactor.rewrite.removeUnusedParam", result=d) +-} +- +-func _() { +- D(1) +-} +- +--- @d/d/d.go -- +-package d +- +-// Removing the parameter should remove this import. +- +-func D() { //@codeaction("x", "refactor.rewrite.removeUnusedParam", result=d) +-} +- +-func _() { +- D() +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/removeparam_issue65217.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_issue65217.txt +--- a/gopls/internal/test/marker/testdata/codeaction/removeparam_issue65217.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_issue65217.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,57 +0,0 @@ +-This test reproduces condition of golang/go#65217, where the inliner created an +-unnecessary eta abstraction. +- +--- go.mod -- +-module unused.mod +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-type S struct{} +- +-func (S) Int() int { return 0 } +- +-func _() { +- var s S +- _ = f(s, s.Int()) +- var j int +- j = f(s, s.Int()) +- _ = j +-} +- +-func _() { +- var s S +- i := f(s, s.Int()) +- _ = i +-} +- +-func f(unused S, i int) int { //@codeaction("unused", "refactor.rewrite.removeUnusedParam", result=rewrite), diag("unused", re`unused`) +- return i +-} +- +--- @rewrite/a/a.go -- +-package a +- +-type S struct{} +- +-func (S) Int() int { return 0 } +- +-func _() { +- var s S +- _ = f(s.Int()) +- var j int +- j = f(s.Int()) +- _ = j +-} +- +-func _() { +- var s S +- i := f(s.Int()) +- _ = i +-} +- +-func f(i int) int { //@codeaction("unused", "refactor.rewrite.removeUnusedParam", result=rewrite), diag("unused", re`unused`) +- return i +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/removeparam_method.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_method.txt +--- a/gopls/internal/test/marker/testdata/codeaction/removeparam_method.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_method.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,139 +0,0 @@ +-This test verifies that gopls can remove unused parameters from methods. +- +-Specifically, check +-1. basic removal of unused parameters, when the receiver is named, locally and +- across package boundaries +-2. handling of unnamed receivers +-3. no panics related to references through interface satisfaction +- +--- go.mod -- +-module example.com/rm +- +-go 1.20 +- +--- basic.go -- +-package rm +- +-type Basic int +- +-func (t Basic) Foo(x int) { //@codeaction("x", "refactor.rewrite.removeUnusedParam", result=basic) +-} +- +-func _(b Basic) { +- b.Foo(1) +- // TODO(rfindley): methodexprs should not get rewritten as methods. +- Basic.Foo(1, 2) +-} +- +--- basicuse/p.go -- +-package basicuse +- +-import "example.com/rm" +- +-func _() { +- x := new(rm.Basic) +- x.Foo(sideEffects()) +- rm.Basic.Foo(1,2) +-} +- +-func sideEffects() int +- +-type Fooer interface { +- Foo(int) +-} +- +-// Dynamic calls aren't rewritten. +-// Previously, this would cause a bug report or crash (golang/go#69896). +-func _(f Fooer) { +- f.Foo(1) +-} +- +--- @basic/basic.go -- +-package rm +- +-type Basic int +- +-func (t Basic) Foo() { //@codeaction("x", "refactor.rewrite.removeUnusedParam", result=basic) +-} +- +-func _(b Basic) { +- b.Foo() +- // TODO(rfindley): methodexprs should not get rewritten as methods. +- Basic(1).Foo() +-} +--- @basic/basicuse/p.go -- +-package basicuse +- +-import "example.com/rm" +- +-func _() { +- x := new(rm.Basic) +- x.Foo() +- rm.Basic(1).Foo() +-} +- +-func sideEffects() int +- +-type Fooer interface { +- Foo(int) +-} +- +-// Dynamic calls aren't rewritten. +-// Previously, this would cause a bug report or crash (golang/go#69896). +-func _(f Fooer) { +- f.Foo(1) +-} +--- missingrecv.go -- +-package rm +- +-type Missing struct{} +- +-var R2 int +- +-func (Missing) M(a, b, c, r0 int) (r1 int) { //@codeaction("b", "refactor.rewrite.removeUnusedParam", result=missingrecv) +- return a + c +-} +- +-func _() { +- m := &Missing{} +- _ = m.M(1, 2, 3, 4) +-} +- +--- missingrecvuse/p.go -- +-package missingrecvuse +- +-import "example.com/rm" +- +-func _() { +- x := rm.Missing{} +- x.M(1, sideEffects(), 3, 4) +-} +- +-func sideEffects() int +- +--- @missingrecv/missingrecv.go -- +-package rm +- +-type Missing struct{} +- +-var R2 int +- +-func (Missing) M(a, c, r0 int) (r1 int) { //@codeaction("b", "refactor.rewrite.removeUnusedParam", result=missingrecv) +- return a + c +-} +- +-func _() { +- m := &Missing{} +- _ = m.M(1, 3, 4) +-} +--- @missingrecv/missingrecvuse/p.go -- +-package missingrecvuse +- +-import "example.com/rm" +- +-func _() { +- x := rm.Missing{} +- x.M(1, 3, 4) +-} +- +-func sideEffects() int +diff -urN a/gopls/internal/test/marker/testdata/codeaction/removeparam_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_resolve.txt +--- a/gopls/internal/test/marker/testdata/codeaction/removeparam_resolve.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_resolve.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,245 +0,0 @@ +-This test exercises the refactoring to remove unused parameters, with resolve support. +-See removeparam.txt for same test without resolve support. +- +--- go.mod -- +-module unused.mod +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-func A(x, unused int) int { //@codeaction("unused", "refactor.rewrite.removeUnusedParam", result=a) +- return x +-} +- +--- @a/a/a.go -- +-package a +- +-func A(x int) int { //@codeaction("unused", "refactor.rewrite.removeUnusedParam", result=a) +- return x +-} +- +--- a/a2.go -- +-package a +- +-func _() { +- A(1, 2) +-} +- +--- a/a_test.go -- +-package a +- +-func _() { +- A(1, 2) +-} +- +--- a/a_x_test.go -- +-package a_test +- +-import "unused.mod/a" +- +-func _() { +- a.A(1, 2) +-} +- +--- b/b.go -- +-package b +- +-import "unused.mod/a" +- +-func f() int { +- return 1 +-} +- +-func g() int { +- return 2 +-} +- +-func _() { +- a.A(f(), 1) +-} +- +-var _ = g +- +--- @a/a/a2.go -- +-package a +- +-func _() { +- A(1) +-} +--- @a/a/a_test.go -- +-package a +- +-func _() { +- A(1) +-} +--- @a/a/a_x_test.go -- +-package a_test +- +-import "unused.mod/a" +- +-func _() { +- a.A(1) +-} +--- @a/b/b.go -- +-package b +- +-import "unused.mod/a" +- +-func f() int { +- return 1 +-} +- +-func g() int { +- return 2 +-} +- +-func _() { +- a.A(f()) +-} +- +-var _ = g +--- field/field.go -- +-package field +- +-func Field(x int, field int) { //@codeaction("int", "refactor.rewrite.removeUnusedParam", result=field) +-} +- +-func _() { +- Field(1, 2) +-} +--- @field/field/field.go -- +-package field +- +-func Field(field int) { //@codeaction("int", "refactor.rewrite.removeUnusedParam", result=field) +-} +- +-func _() { +- Field(2) +-} +--- ellipsis/ellipsis.go -- +-package ellipsis +- +-func Ellipsis(...any) { //@codeaction("any", "refactor.rewrite.removeUnusedParam", result=ellipsis) +-} +- +-func _() { +- // TODO(rfindley): investigate the broken formatting resulting from these inlinings. +- Ellipsis() +- Ellipsis(1) +- Ellipsis(1, 2) +- Ellipsis(1, f(), g()) +- Ellipsis(h()) +- Ellipsis(i()...) +-} +- +-func f() int +-func g() int +-func h() (int, int) +-func i() []any +- +--- @ellipsis/ellipsis/ellipsis.go -- +-package ellipsis +- +-func Ellipsis() { //@codeaction("any", "refactor.rewrite.removeUnusedParam", result=ellipsis) +-} +- +-func _() { +- // TODO(rfindley): investigate the broken formatting resulting from these inlinings. +- Ellipsis() +- Ellipsis() +- Ellipsis() +- Ellipsis() +- func(_ ...any) { +- Ellipsis() +- }(h()) +- Ellipsis() +-} +- +-func f() int +-func g() int +-func h() (int, int) +-func i() []any +--- ellipsis2/ellipsis2.go -- +-package ellipsis2 +- +-func Ellipsis2(_, _ int, rest ...int) { //@codeaction("_", "refactor.rewrite.removeUnusedParam", result=ellipsis2) +-} +- +-func _() { +- Ellipsis2(1,2,3) +- Ellipsis2(h()) +- Ellipsis2(1,2, []int{3, 4}...) +-} +- +-func h() (int, int) +- +--- @ellipsis2/ellipsis2/ellipsis2.go -- +-package ellipsis2 +- +-func Ellipsis2(_ int, rest ...int) { //@codeaction("_", "refactor.rewrite.removeUnusedParam", result=ellipsis2) +-} +- +-func _() { +- Ellipsis2(2, 3) +- func(_, blank0 int, rest ...int) { +- Ellipsis2(blank0, rest...) +- }(h()) +- Ellipsis2(2, []int{3, 4}...) +-} +- +-func h() (int, int) +--- overlapping/overlapping.go -- +-package overlapping +- +-func Overlapping(i int) int { //@codeaction(re"(i) int", "refactor.rewrite.removeUnusedParam", err=re"overlapping") +- return 0 +-} +- +-func _() { +- x := Overlapping(Overlapping(0)) +- _ = x +-} +- +--- effects/effects.go -- +-package effects +- +-func effects(x, y int) int { //@codeaction("y", "refactor.rewrite.removeUnusedParam", result=effects), diag("y", re"unused") +- return x +-} +- +-func f() int +-func g() int +- +-func _() { +- effects(f(), g()) +- effects(f(), g()) +-} +--- @effects/effects/effects.go -- +-package effects +- +-func effects(x int) int { //@codeaction("y", "refactor.rewrite.removeUnusedParam", result=effects), diag("y", re"unused") +- return x +-} +- +-func f() int +-func g() int +- +-func _() { +- effects(f()) +- effects(f()) +-} +--- recursive/recursive.go -- +-package recursive +- +-func Recursive(x int) int { //@codeaction("x", "refactor.rewrite.removeUnusedParam", result=recursive) +- return Recursive(1) +-} +- +--- @recursive/recursive/recursive.go -- +-package recursive +- +-func Recursive() int { //@codeaction("x", "refactor.rewrite.removeUnusedParam", result=recursive) +- return Recursive() +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/removeparam_satisfies.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_satisfies.txt +--- a/gopls/internal/test/marker/testdata/codeaction/removeparam_satisfies.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_satisfies.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,61 +0,0 @@ +-This test verifies that gopls can remove unused parameters from methods, +-when that method satisfies an interface. +- +-For now, we just update static calls. In the future, we should compute the set +-of dynamic calls that must change (and therefore, the set of concrete functions +-that must be modified), in order to produce the desired outcome for our users. +- +-Doing so would be more complicated, so for now this test simply records the +-current behavior. +- +--- go.mod -- +-module example.com/rm +- +-go 1.20 +- +--- p.go -- +-package rm +- +-type T int +- +-func (t T) Foo(x int) { //@codeaction("x", "refactor.rewrite.removeUnusedParam", result=basic) +-} +- +--- @basic/p.go -- +-package rm +- +-type T int +- +-func (t T) Foo() { //@codeaction("x", "refactor.rewrite.removeUnusedParam", result=basic) +-} +- +--- @basic/use/use.go -- +-package use +- +-import "example.com/rm" +- +-type Fooer interface { +- Foo(int) +-} +- +-var _ Fooer = rm.T(0) +- +-func _() { +- var x rm.T +- x.Foo() +-} +--- use/use.go -- +-package use +- +-import "example.com/rm" +- +-type Fooer interface{ +- Foo(int) +-} +- +-var _ Fooer = rm.T(0) +- +-func _() { +- var x rm.T +- x.Foo(1) +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/removeparam.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam.txt +--- a/gopls/internal/test/marker/testdata/codeaction/removeparam.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/removeparam.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,255 +0,0 @@ +-This test exercises the refactoring to remove unused parameters. +-See removeparam_resolve.txt for same test with resolve support. +- +--- capabilities.json -- +-{ +- "textDocument": { +- "codeAction": { +- "dataSupport": false, +- "resolveSupport": {} +- } +- } +-} +- +--- go.mod -- +-module unused.mod +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-func A(x, unused int) int { //@codeaction("unused", "refactor.rewrite.removeUnusedParam", result=a) +- return x +-} +- +--- @a/a/a.go -- +-package a +- +-func A(x int) int { //@codeaction("unused", "refactor.rewrite.removeUnusedParam", result=a) +- return x +-} +- +--- a/a2.go -- +-package a +- +-func _() { +- A(1, 2) +-} +- +--- a/a_test.go -- +-package a +- +-func _() { +- A(1, 2) +-} +- +--- a/a_x_test.go -- +-package a_test +- +-import "unused.mod/a" +- +-func _() { +- a.A(1, 2) +-} +- +--- b/b.go -- +-package b +- +-import "unused.mod/a" +- +-func f() int { +- return 1 +-} +- +-func g() int { +- return 2 +-} +- +-func _() { +- a.A(f(), 1) +-} +- +-var _ = g +- +--- @a/a/a2.go -- +-package a +- +-func _() { +- A(1) +-} +--- @a/a/a_test.go -- +-package a +- +-func _() { +- A(1) +-} +--- @a/a/a_x_test.go -- +-package a_test +- +-import "unused.mod/a" +- +-func _() { +- a.A(1) +-} +--- @a/b/b.go -- +-package b +- +-import "unused.mod/a" +- +-func f() int { +- return 1 +-} +- +-func g() int { +- return 2 +-} +- +-func _() { +- a.A(f()) +-} +- +-var _ = g +--- field/field.go -- +-package field +- +-func Field(x int, field int) { //@codeaction("int", "refactor.rewrite.removeUnusedParam", result=field) +-} +- +-func _() { +- Field(1, 2) +-} +--- @field/field/field.go -- +-package field +- +-func Field(field int) { //@codeaction("int", "refactor.rewrite.removeUnusedParam", result=field) +-} +- +-func _() { +- Field(2) +-} +--- ellipsis/ellipsis.go -- +-package ellipsis +- +-func Ellipsis(...any) { //@codeaction("any", "refactor.rewrite.removeUnusedParam", result=ellipsis) +-} +- +-func _() { +- // TODO(rfindley): investigate the broken formatting resulting from these inlinings. +- Ellipsis() +- Ellipsis(1) +- Ellipsis(1, 2) +- Ellipsis(1, f(), g()) +- Ellipsis(h()) +- Ellipsis(i()...) +-} +- +-func f() int +-func g() int +-func h() (int, int) +-func i() []any +- +--- @ellipsis/ellipsis/ellipsis.go -- +-package ellipsis +- +-func Ellipsis() { //@codeaction("any", "refactor.rewrite.removeUnusedParam", result=ellipsis) +-} +- +-func _() { +- // TODO(rfindley): investigate the broken formatting resulting from these inlinings. +- Ellipsis() +- Ellipsis() +- Ellipsis() +- Ellipsis() +- func(_ ...any) { +- Ellipsis() +- }(h()) +- Ellipsis() +-} +- +-func f() int +-func g() int +-func h() (int, int) +-func i() []any +--- ellipsis2/ellipsis2.go -- +-package ellipsis2 +- +-func Ellipsis2(_, _ int, rest ...int) { //@codeaction("_", "refactor.rewrite.removeUnusedParam", result=ellipsis2) +-} +- +-func _() { +- Ellipsis2(1,2,3) +- Ellipsis2(h()) +- Ellipsis2(1,2, []int{3, 4}...) +-} +- +-func h() (int, int) +- +--- @ellipsis2/ellipsis2/ellipsis2.go -- +-package ellipsis2 +- +-func Ellipsis2(_ int, rest ...int) { //@codeaction("_", "refactor.rewrite.removeUnusedParam", result=ellipsis2) +-} +- +-func _() { +- Ellipsis2(2, 3) +- func(_, blank0 int, rest ...int) { +- Ellipsis2(blank0, rest...) +- }(h()) +- Ellipsis2(2, []int{3, 4}...) +-} +- +-func h() (int, int) +--- overlapping/overlapping.go -- +-package overlapping +- +-func Overlapping(i int) int { //@codeaction(re"(i) int", "refactor.rewrite.removeUnusedParam", err=re"overlapping") +- return 0 +-} +- +-func _() { +- x := Overlapping(Overlapping(0)) +- _ = x +-} +- +--- effects/effects.go -- +-package effects +- +-func effects(x, y int) int { //@ diag("y", re"unused"), codeaction("y", "refactor.rewrite.removeUnusedParam", result=effects) +- return x +-} +- +-func f() int +-func g() int +- +-func _() { +- effects(f(), g()) +- effects(f(), g()) +-} +--- @effects/effects/effects.go -- +-package effects +- +-func effects(x int) int { //@ diag("y", re"unused"), codeaction("y", "refactor.rewrite.removeUnusedParam", result=effects) +- return x +-} +- +-func f() int +-func g() int +- +-func _() { +- effects(f()) +- effects(f()) +-} +--- recursive/recursive.go -- +-package recursive +- +-func Recursive(x int) int { //@codeaction("x", "refactor.rewrite.removeUnusedParam", result=recursive) +- return Recursive(1) +-} +- +--- @recursive/recursive/recursive.go -- +-package recursive +- +-func Recursive() int { //@codeaction("x", "refactor.rewrite.removeUnusedParam", result=recursive) +- return Recursive() +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/removeparam_witherrs.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_witherrs.txt +--- a/gopls/internal/test/marker/testdata/codeaction/removeparam_witherrs.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_witherrs.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,11 +0,0 @@ +-This test checks that we can't remove parameters for packages with errors. +- +--- p.go -- +-package p +- +-func foo(unused int) { //@codeaction("unused", "refactor.rewrite.removeUnusedParam", err=re"found 0") +-} +- +-func _() { +- foo("") //@diag(`""`, re"cannot use") +-} +diff -urN a/gopls/internal/test/marker/testdata/codeaction/remove_struct_tags.txt b/gopls/internal/test/marker/testdata/codeaction/remove_struct_tags.txt +--- a/gopls/internal/test/marker/testdata/codeaction/remove_struct_tags.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/remove_struct_tags.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,31 +0,0 @@ +-This test checks the behavior of the 'Remove struct tags' code action. +- +--- flags -- +--ignore_extra_diags +- +--- removetags.go -- +-package removetags +- +-type A struct { +- x int `json:"x"` //@codeaction("x", "refactor.rewrite.removeTags", edit=singleline) +- y int `json:"y"` //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.removeTags", edit=twolines) +- z int `json:"z"` //@codeaction(re`()n`, "refactor.rewrite.removeTags", edit=entirestruct) +-} +--- @entirestruct/removetags.go -- +-@@ -4,3 +4,3 @@ +-- x int `json:"x"` //@codeaction("x", "refactor.rewrite.removeTags", edit=singleline) +-- y int `json:"y"` //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.removeTags", edit=twolines) +-- z int `json:"z"` //@codeaction(re`()n`, "refactor.rewrite.removeTags", edit=entirestruct) +-+ x int //@codeaction("x", "refactor.rewrite.removeTags", edit=singleline) +-+ y int //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.removeTags", edit=twolines) +-+ z int //@codeaction(re`()n`, "refactor.rewrite.removeTags", edit=entirestruct) +--- @singleline/removetags.go -- +-@@ -4 +4 @@ +-- x int `json:"x"` //@codeaction("x", "refactor.rewrite.removeTags", edit=singleline) +-+ x int //@codeaction("x", "refactor.rewrite.removeTags", edit=singleline) +--- @twolines/removetags.go -- +-@@ -5,2 +5,2 @@ +-- y int `json:"y"` //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.removeTags", edit=twolines) +-- z int `json:"z"` //@codeaction(re`()n`, "refactor.rewrite.removeTags", edit=entirestruct) +-+ y int //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.removeTags", edit=twolines) +-+ z int //@codeaction(re`()n`, "refactor.rewrite.removeTags", edit=entirestruct) +diff -urN a/gopls/internal/test/marker/testdata/codeaction/splitlines.txt b/gopls/internal/test/marker/testdata/codeaction/splitlines.txt +--- a/gopls/internal/test/marker/testdata/codeaction/splitlines.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/splitlines.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,223 +0,0 @@ +-This test exercises the refactoring of putting arguments, results, and composite literal elements +-into separate lines. +- +--- go.mod -- +-module unused.mod +- +-go 1.18 +- +--- func_arg/func_arg.go -- +-package func_arg +- +-func A(a string, b, c int64, x int, y int) (r1 string, r2, r3 int64, r4 int, r5 int) { //@codeaction("x", "refactor.rewrite.splitLines", result=func_arg) +- return a, b, c, x, y +-} +- +--- @func_arg/func_arg/func_arg.go -- +-package func_arg +- +-func A( +- a string, +- b, c int64, +- x int, +- y int, +-) (r1 string, r2, r3 int64, r4 int, r5 int) { //@codeaction("x", "refactor.rewrite.splitLines", result=func_arg) +- return a, b, c, x, y +-} +- +--- func_ret/func_ret.go -- +-package func_ret +- +-func A(a string, b, c int64, x int, y int) (r1 string, r2, r3 int64, r4 int, r5 int) { //@codeaction("r1", "refactor.rewrite.splitLines", result=func_ret) +- return a, b, c, x, y +-} +- +--- @func_ret/func_ret/func_ret.go -- +-package func_ret +- +-func A(a string, b, c int64, x int, y int) ( +- r1 string, +- r2, r3 int64, +- r4 int, +- r5 int, +-) { //@codeaction("r1", "refactor.rewrite.splitLines", result=func_ret) +- return a, b, c, x, y +-} +- +--- functype_arg/functype_arg.go -- +-package functype_arg +- +-type A func(a string, b, c int64, x int, y int) (r1 string, r2, r3 int64, r4 int, r5 int) //@codeaction("x", "refactor.rewrite.splitLines", result=functype_arg) +- +--- @functype_arg/functype_arg/functype_arg.go -- +-package functype_arg +- +-type A func( +- a string, +- b, c int64, +- x int, +- y int, +-) (r1 string, r2, r3 int64, r4 int, r5 int) //@codeaction("x", "refactor.rewrite.splitLines", result=functype_arg) +- +--- functype_ret/functype_ret.go -- +-package functype_ret +- +-type A func(a string, b, c int64, x int, y int) (r1 string, r2, r3 int64, r4 int, r5 int) //@codeaction("r1", "refactor.rewrite.splitLines", result=functype_ret) +- +--- @functype_ret/functype_ret/functype_ret.go -- +-package functype_ret +- +-type A func(a string, b, c int64, x int, y int) ( +- r1 string, +- r2, r3 int64, +- r4 int, +- r5 int, +-) //@codeaction("r1", "refactor.rewrite.splitLines", result=functype_ret) +- +--- func_call/func_call.go -- +-package func_call +- +-import "fmt" +- +-func F() { +- fmt.Println(1, 2, 3, fmt.Sprintf("hello %d", 4)) //@codeaction("1", "refactor.rewrite.splitLines", result=func_call) +-} +- +--- @func_call/func_call/func_call.go -- +-package func_call +- +-import "fmt" +- +-func F() { +- fmt.Println( +- 1, +- 2, +- 3, +- fmt.Sprintf("hello %d", 4), +- ) //@codeaction("1", "refactor.rewrite.splitLines", result=func_call) +-} +- +--- indent/indent.go -- +-package indent +- +-import "fmt" +- +-func F() { +- fmt.Println(1, 2, 3, fmt.Sprintf("hello %d", 4)) //@codeaction("hello", "refactor.rewrite.splitLines", result=indent) +-} +- +--- @indent/indent/indent.go -- +-package indent +- +-import "fmt" +- +-func F() { +- fmt.Println(1, 2, 3, fmt.Sprintf( +- "hello %d", +- 4, +- )) //@codeaction("hello", "refactor.rewrite.splitLines", result=indent) +-} +- +--- indent2/indent2.go -- +-package indent2 +- +-import "fmt" +- +-func F() { +- fmt. +- Println(1, 2, 3, fmt.Sprintf("hello %d", 4)) //@codeaction("1", "refactor.rewrite.splitLines", result=indent2) +-} +- +--- @indent2/indent2/indent2.go -- +-package indent2 +- +-import "fmt" +- +-func F() { +- fmt. +- Println( +- 1, +- 2, +- 3, +- fmt.Sprintf("hello %d", 4), +- ) //@codeaction("1", "refactor.rewrite.splitLines", result=indent2) +-} +- +--- structelts/structelts.go -- +-package structelts +- +-type A struct{ +- a int +- b int +-} +- +-func F() { +- _ = A{a: 1, b: 2} //@codeaction("b", "refactor.rewrite.splitLines", result=structelts) +-} +- +--- @structelts/structelts/structelts.go -- +-package structelts +- +-type A struct{ +- a int +- b int +-} +- +-func F() { +- _ = A{ +- a: 1, +- b: 2, +- } //@codeaction("b", "refactor.rewrite.splitLines", result=structelts) +-} +- +--- sliceelts/sliceelts.go -- +-package sliceelts +- +-func F() { +- _ = []int{1, 2} //@codeaction("1", "refactor.rewrite.splitLines", result=sliceelts) +-} +- +--- @sliceelts/sliceelts/sliceelts.go -- +-package sliceelts +- +-func F() { +- _ = []int{ +- 1, +- 2, +- } //@codeaction("1", "refactor.rewrite.splitLines", result=sliceelts) +-} +- +--- mapelts/mapelts.go -- +-package mapelts +- +-func F() { +- _ = map[string]int{"a": 1, "b": 2} //@codeaction("1", "refactor.rewrite.splitLines", result=mapelts) +-} +- +--- @mapelts/mapelts/mapelts.go -- +-package mapelts +- +-func F() { +- _ = map[string]int{ +- "a": 1, +- "b": 2, +- } //@codeaction("1", "refactor.rewrite.splitLines", result=mapelts) +-} +- +--- starcomment/starcomment.go -- +-package starcomment +- +-func A(/*1*/ x /*2*/ string /*3*/, /*4*/ y /*5*/ int /*6*/) (string, int) { //@codeaction("x", "refactor.rewrite.splitLines", result=starcomment) +- return x, y +-} +- +--- @starcomment/starcomment/starcomment.go -- +-package starcomment +- +-func A( +- /*1*/ x /*2*/ string /*3*/, +- /*4*/ y /*5*/ int /*6*/, +-) (string, int) { //@codeaction("x", "refactor.rewrite.splitLines", result=starcomment) +- return x, y +-} +- +diff -urN a/gopls/internal/test/marker/testdata/codeaction/splitlines-variadic.txt b/gopls/internal/test/marker/testdata/codeaction/splitlines-variadic.txt +--- a/gopls/internal/test/marker/testdata/codeaction/splitlines-variadic.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codeaction/splitlines-variadic.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,55 +0,0 @@ +-This is a regression test for #70519, in which the ellipsis +-of a variadic call would go missing after split/join lines. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-var a, b, c []any +-func f(any, any, ...any) +- +-func _() { +- f(a, b, c...) //@codeaction("a", "refactor.rewrite.splitLines", result=split) +- +- f( +- a, +- b, +- c..., /*@codeaction("c", "refactor.rewrite.joinLines", result=joined)*/ +- ) +-} +- +--- @split/a/a.go -- +-package a +- +-var a, b, c []any +-func f(any, any, ...any) +- +-func _() { +- f( +- a, +- b, +- c..., +- ) //@codeaction("a", "refactor.rewrite.splitLines", result=split) +- +- f( +- a, +- b, +- c..., /*@codeaction("c", "refactor.rewrite.joinLines", result=joined)*/ +- ) +-} +- +--- @joined/a/a.go -- +-package a +- +-var a, b, c []any +-func f(any, any, ...any) +- +-func _() { +- f(a, b, c...) //@codeaction("a", "refactor.rewrite.splitLines", result=split) +- +- f(a, b, c..., /*@codeaction("c", "refactor.rewrite.joinLines", result=joined)*/) +-} +- +diff -urN a/gopls/internal/test/marker/testdata/codelens/generate.txt b/gopls/internal/test/marker/testdata/codelens/generate.txt +--- a/gopls/internal/test/marker/testdata/codelens/generate.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codelens/generate.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,9 +0,0 @@ +-This test exercises the "generate" codelens. +- +--- generate.go -- +-//@codelenses() +- +-package generate +- +-//go:generate echo Hi //@ codelens("//go:generate", "run go generate"), codelens("//go:generate", "run go generate ./...") +-//go:generate echo I shall have no CodeLens +diff -urN a/gopls/internal/test/marker/testdata/codelens/test.txt b/gopls/internal/test/marker/testdata/codelens/test.txt +--- a/gopls/internal/test/marker/testdata/codelens/test.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/codelens/test.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,38 +0,0 @@ +-This file tests codelenses for test functions. +- +-TODO: for some reason these code lens have zero width. Does that affect their +-utility/visibility in various LSP clients? +- +--- settings.json -- +-{ +- "codelenses": { +- "test": true +- } +-} +- +--- p_test.go -- +-//@codelenses() +- +-package codelens //@codelens(re"()package codelens", "run file benchmarks") +- +-import "testing" +- +-func TestMain(m *testing.M) {} // no code lens for TestMain +- +-func TestFuncWithCodeLens(t *testing.T) { //@codelens(re"()func", "run test") +-} +- +-func thisShouldNotHaveACodeLens(t *testing.T) { //@diag("t ", re"unused parameter") +- println() // nonempty body => "unused parameter" +-} +- +-func BenchmarkFuncWithCodeLens(b *testing.B) { //@codelens(re"()func", "run benchmark") +-} +- +-func helper() {} // expect no code lens +- +-func _() { +- // pacify unusedfunc +- thisShouldNotHaveACodeLens(nil) +- helper() +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/address.txt b/gopls/internal/test/marker/testdata/completion/address.txt +--- a/gopls/internal/test/marker/testdata/completion/address.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/address.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,92 +0,0 @@ +-This test exercises the reference and dereference completion modifiers. +- +-TODO: remove the need to set "literalCompletions" here, as this is one of the +-few places this setting is needed. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests +- +-go 1.18 +- +--- address/address.go -- +-package address +- +-func wantsPtr(*int) {} +-func wantsVariadicPtr(...*int) {} +- +-func wantsVariadic(...int) {} +- +-type foo struct{ c int } //@item(addrFieldC, "c", "int", "field") +- +-func _() { +- var ( +- a string //@item(addrA, "a", "string", "var") +- b int //@item(addrB, "b", "int", "var") +- ) +- +- wantsPtr() //@rank(")", addrB, addrA),snippet(")", addrB, "&b") +- wantsPtr(&b) //@snippet(")", addrB, "b") +- +- wantsVariadicPtr() //@rank(")", addrB, addrA),snippet(")", addrB, "&b") +- +- var s foo +- s.c //@item(addrDeepC, "s.c", "int", "field") +- wantsPtr() //@snippet(")", addrDeepC, "&s.c") +- wantsPtr(s) //@snippet(")", addrDeepC, "&s.c") +- wantsPtr(&s) //@snippet(")", addrDeepC, "s.c") +- +- // don't add "&" in item (it gets added as an additional edit) +- wantsPtr(&s.c) //@snippet(")", addrFieldC, "c") +- +- // check dereferencing as well +- var c *int //@item(addrCPtr, "c", "*int", "var") +- var _ int = _ //@rank("_ //", addrCPtr, addrA),snippet("_ //", addrCPtr, "*c") +- +- wantsVariadic() //@rank(")", addrCPtr, addrA),snippet(")", addrCPtr, "*c") +- +- var d **int //@item(addrDPtr, "d", "**int", "var") +- var _ int = _ //@rank("_ //", addrDPtr, addrA),snippet("_ //", addrDPtr, "**d") +- +- type namedPtr *int +- var np namedPtr //@item(addrNamedPtr, "np", "namedPtr", "var") +- +- var _ int = _ //@rank("_ //", addrNamedPtr, addrA) +- +- // don't get tripped up by recursive pointer type +- type dontMessUp *dontMessUp //@item(dontMessUp, "dontMessUp", "*dontMessUp", "type") +- var dmu *dontMessUp //@item(addrDMU, "dmu", "*dontMessUp", "var") +- +- var _ int = dmu //@complete(" //", addrDMU, dontMessUp) +-} +- +-func (f foo) ptr() *foo { return &f } +- +-func _() { +- getFoo := func() foo { return foo{} } +- +- // not addressable +- getFoo().c //@item(addrGetFooC, "getFoo().c", "int", "field") +- +- // addressable +- getFoo().ptr().c //@item(addrGetFooPtrC, "getFoo().ptr().c", "int", "field") +- +- wantsPtr() //@snippet(")", addrGetFooPtrC, "&getFoo().ptr().c") +- wantsPtr(&g) //@snippet(")", addrGetFooPtrC, "getFoo().ptr().c") +-} +- +-type nested struct { +- f foo +-} +- +-func _() { +- getNested := func() nested { return nested{} } +- +- getNested().f.c //@item(addrNestedC, "getNested().f.c", "int", "field") +- getNested().f.ptr().c //@item(addrNestedPtrC, "getNested().f.ptr().c", "int", "field") +- +- // addrNestedC is not addressable, so rank lower +- wantsPtr(getNestedfc) //@complete(")", addrNestedPtrC, addrNestedC) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/alias.txt b/gopls/internal/test/marker/testdata/completion/alias.txt +--- a/gopls/internal/test/marker/testdata/completion/alias.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/alias.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +-This test checks completion related to aliases. +- +--- flags -- +--ignore_extra_diags +- +--- aliases.go -- +-package aliases +- +-// Copied from the old builtins.go, which has been ported to the new marker tests. +-/* string */ //@item(string, "string", "", "type") +-/* int */ //@item(int, "int", "", "type") +-/* float32 */ //@item(float32, "float32", "", "type") +-/* float64 */ //@item(float64, "float64", "", "type") +- +-type p struct{} +- +-type s[a int | string] = p +- +-func _() { +- s[]{} //@rank("]", int, float64) +-} +- +-func takesGeneric[a int | string](s[a]) { +- "s[a]{}" //@item(tpInScopeLit, "s[a]{}", "", "var") +- takesGeneric() //@rank(")", tpInScopeLit),snippet(")", tpInScopeLit, "s[a]{\\}") +-} +- +-func _() { +- s[int]{} //@item(tpInstLit, "s[int]{}", "", "var") +- takesGeneric[int]() //@rank(")", tpInstLit),snippet(")", tpInstLit, "s[int]{\\}") +- +- "s[...]{}" //@item(tpUninstLit, "s[...]{}", "", "var") +- takesGeneric() //@rank(")", tpUninstLit),snippet(")", tpUninstLit, "s[${1:}]{\\}") +-} +- +- +-type myType int //@item(flType, "myType", "int", "type") +- +-type myt[T int] myType //@item(aflType, "myt[T]", "int", "type") +- +-func (my myt) _() {} //@complete(") _", flType, aflType) +diff -urN a/gopls/internal/test/marker/testdata/completion/anon.txt b/gopls/internal/test/marker/testdata/completion/anon.txt +--- a/gopls/internal/test/marker/testdata/completion/anon.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/anon.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,37 +0,0 @@ +-This test checks completion related to anonymous structs. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "deepCompletion": false +-} +- +--- anon.go -- +-package anon +- +-// Literal completion results. +-/* int() */ //@item(int, "int()", "int", "var") +- +-func _() { +- for _, _ := range []struct { +- i, j int //@item(anonI, "i", "int", "field"),item(anonJ, "j", "int", "field") +- }{ +- { +- i: 1, +- //@complete("", anonJ) +- }, +- { +- //@complete("", anonI, anonJ, int) +- }, +- } { +- continue +- } +- +- s := struct{ f int }{ } //@item(anonF, "f", "int", "field"),item(structS, "s", "struct{...}", "var"),complete(" }", anonF, int) +- +- _ = map[struct{ x int }]int{ //@item(anonX, "x", "int", "field") +- struct{ x int }{ }: 1, //@complete(" }", anonX, int, structS) +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/append.txt b/gopls/internal/test/marker/testdata/completion/append.txt +--- a/gopls/internal/test/marker/testdata/completion/append.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/append.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,58 +0,0 @@ +-This test checks behavior of completion within append expressions. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/append +- +-go 1.18 +- +--- append.go -- +-package append +- +-func foo([]string) {} +-func bar(...string) {} +- +-func _() { +- var ( +- aInt []int //@item(appendInt, "aInt", "[]int", "var") +- aStrings []string //@item(appendStrings, "aStrings", "[]string", "var") +- aString string //@item(appendString, "aString", "string", "var") +- ) +- +- append(aStrings, a) //@rank(")", appendString, appendInt) +- var _ any = append(aStrings, a) //@rank(")", appendString, appendInt) +- var _ []string = append(oops, a) //@rank(")", appendString, appendInt) +- +- foo(append()) //@rank("))", appendStrings, appendInt),rank("))", appendStrings, appendString) +- foo(append([]string{}, a)) //@rank("))", appendStrings, appendInt),rank("))", appendString, appendInt),snippet("))", appendStrings, "aStrings...") +- foo(append([]string{}, "", a)) //@rank("))", appendString, appendInt),rank("))", appendString, appendStrings) +- +- // Don't add "..." to append() argument. +- bar(append()) //@snippet("))", appendStrings, "aStrings") +- +- type baz struct{} +- baz{} //@item(appendBazLiteral, "baz{}", "", "var") +- var bazzes []baz //@item(appendBazzes, "bazzes", "[]baz", "var") +- var bazzy baz //@item(appendBazzy, "bazzy", "baz", "var") +- bazzes = append(bazzes, ba) //@rank(")", appendBazzy, appendBazLiteral, appendBazzes) +- +- var b struct{ b []baz } +- b.b //@item(appendNestedBaz, "b.b", "[]baz", "field") +- b.b = append(b.b, b) //@rank(")", appendBazzy, appendBazLiteral, appendNestedBaz) +- +- var aStringsPtr *[]string //@item(appendStringsPtr, "aStringsPtr", "*[]string", "var") +- foo(append([]string{}, a)) //@snippet("))", appendStringsPtr, "*aStringsPtr...") +- +- foo(append([]string{}, *a)) //@snippet("))", appendStringsPtr, "aStringsPtr...") +-} +- +--- append2.go -- +-package append +- +-func _() { +- _ = append(a, struct) //@complete(")", structs) +-} +- +-//@item(structs, "structs", `"structs"`) +diff -urN a/gopls/internal/test/marker/testdata/completion/assign.txt b/gopls/internal/test/marker/testdata/completion/assign.txt +--- a/gopls/internal/test/marker/testdata/completion/assign.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/assign.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,47 +0,0 @@ +-This test checks that completion considers assignability when ranking results. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/assign +- +-go 1.18 +- +--- settings.json -- +-{ +- "completeUnimported": false +-} +- +--- assign.go -- +-package assign +- +-import "golang.org/lsptests/assign/internal/secret" +- +-func _() { +- secret.Hello() +- var ( +- myInt int //@item(assignInt, "myInt", "int", "var") +- myStr string //@item(assignStr, "myStr", "string", "var") +- ) +- +- var _ string = my //@rank(" //", assignStr, assignInt) +- var _ string = //@rank(" //", assignStr, assignInt) +-} +- +-func _() { +- var a string = a //@complete(" //") +-} +- +-func _() { +- fooBar := fooBa //@complete(" //"),item(assignFooBar, "fooBar", "", "var") +- abc, fooBar := 123, fooBa //@complete(" //", assignFooBar) +- { +- fooBar := fooBa //@complete(" //", assignFooBar) +- } +-} +- +--- internal/secret/secret.go -- +-package secret +- +-func Hello() {} +diff -urN a/gopls/internal/test/marker/testdata/completion/bad.txt b/gopls/internal/test/marker/testdata/completion/bad.txt +--- a/gopls/internal/test/marker/testdata/completion/bad.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/bad.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,68 +0,0 @@ +-This test exercises completion in the presence of type errors. +- +-Note: this test was ported from the old marker tests, which did not enable +-unimported completion. Enabling it causes matches in e.g. crypto/rand. +- +--- settings.json -- +-{ +- "completeUnimported": false +-} +- +--- go.mod -- +-module bad.test +- +-go 1.18 +- +--- bad/bad0.go -- +-package bad +- +-func stuff() { //@item(stuff, "stuff", "func()", "func") +- x := "heeeeyyyy" +- random2(x) //@diag("x", re"cannot use x \\(variable of type string\\) as int value in argument to random2") +- random2(1) //@complete("dom", random, random2, random3) +- y := 3 //@diag("y", re"declared (and|but) not used") +-} +- +-type bob struct { //@item(bob, "bob", "struct{...}", "struct") +- x int +-} +- +-func _() { +- var q int +- _ = &bob{ +- f: q, //@diag("f: q", re"unknown field f in struct literal") +- } +-} +- +--- bad/bad1.go -- +-package bad +- +-// See #36637 +-type stateFunc func() stateFunc //@item(stateFunc, "stateFunc", "func() stateFunc", "type") +- +-var a unknown //@item(global_a, "a", "unknown", "var"),diag("unknown", re"(undeclared name|undefined): unknown") +- +-func random() int { //@item(random, "random", "func() int", "func") +- //@complete("", global_a, bob, random, random2, random3, stateFunc, stuff) +- return 0 +-} +- +-func random2(y int) int { //@item(random2, "random2", "func(y int) int", "func"),item(bad_y_param, "y", "int", "var") +- x := 6 //@item(x, "x", "int", "var"),diag("x", re"declared (and|but) not used") +- var q blah //@item(q, "q", "blah", "var"),diag("q", re"declared (and|but) not used"),diag("blah", re"(undeclared name|undefined): blah") +- var t **blob //@item(t, "t", "**blob", "var"),diag("t", re"declared (and|but) not used"),diag("blob", re"(undeclared name|undefined): blob") +- //@complete("", q, t, x, bad_y_param, global_a, bob, random, random2, random3, stateFunc, stuff) +- +- return y +-} +- +-func random3(y ...int) { //@item(random3, "random3", "func(y ...int)", "func"),item(y_variadic_param, "y", "[]int", "var") +- //@complete("", y_variadic_param, global_a, bob, random, random2, random3, stateFunc, stuff) +- +- var ch chan (favType1) //@item(ch, "ch", "chan (favType1)", "var"),diag("ch", re"declared (and|but) not used"),diag("favType1", re"(undeclared name|undefined): favType1") +- var m map[keyType]int //@item(m, "m", "map[keyType]int", "var"),diag("m", re"declared (and|but) not used"),diag("keyType", re"(undeclared name|undefined): keyType") +- var arr []favType2 //@item(arr, "arr", "[]favType2", "var"),diag("arr", re"declared (and|but) not used"),diag("favType2", re"(undeclared name|undefined): favType2") +- var fn1 func() badResult //@item(fn1, "fn1", "func() badResult", "var"),diag("fn1", re"declared (and|but) not used"),diag("badResult", re"(undeclared name|undefined): badResult") +- var fn2 func(badParam) //@item(fn2, "fn2", "func(badParam)", "var"),diag("fn2", re"declared (and|but) not used"),diag("badParam", re"(undeclared name|undefined): badParam") +- //@complete("", arr, ch, fn1, fn2, m, y_variadic_param, global_a, bob, random, random2, random3, stateFunc, stuff) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/basic_lit.txt b/gopls/internal/test/marker/testdata/completion/basic_lit.txt +--- a/gopls/internal/test/marker/testdata/completion/basic_lit.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/basic_lit.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,19 +0,0 @@ +-This test checks completion related to basic literals. +- +--- flags -- +--ignore_extra_diags +- +--- basiclit.go -- +-package basiclit +- +-func _() { +- var a int // something for lexical completions +- +- _ = "hello." //@complete(".") +- +- _ = 1 //@complete(" //") +- +- _ = 1. //@complete(".") +- +- _ = 'a' //@complete("' ") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/builtins.txt b/gopls/internal/test/marker/testdata/completion/builtins.txt +--- a/gopls/internal/test/marker/testdata/completion/builtins.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/builtins.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,118 +0,0 @@ +-This test checks completion of Go builtins. +- +--- flags -- +--ignore_extra_diags +--filter_builtins=false +- +--- builtin_args.go -- +-package builtins +- +-func _() { +- var ( +- aSlice []int //@item(builtinSlice, "aSlice", "[]int", "var") +- aMap map[string]int //@item(builtinMap, "aMap", "map[string]int", "var") +- aString string //@item(builtinString, "aString", "string", "var") +- aArray [0]int //@item(builtinArray, "aArray", "[0]int", "var") +- aArrayPtr *[0]int //@item(builtinArrayPtr, "aArrayPtr", "*[0]int", "var") +- aChan chan int //@item(builtinChan, "aChan", "chan int", "var") +- aPtr *int //@item(builtinPtr, "aPtr", "*int", "var") +- aInt int //@item(builtinInt, "aInt", "int", "var") +- ) +- +- type ( +- aSliceType []int //@item(builtinSliceType, "aSliceType", "[]int", "type") +- aChanType chan int //@item(builtinChanType, "aChanType", "chan int", "type") +- aMapType map[string]int //@item(builtinMapType, "aMapType", "map[string]int", "type") +- ) +- +- close() //@rank(")", builtinChan, builtinSlice) +- +- append() //@rank(")", builtinSlice, builtinChan) +- +- var _ []byte = append([]byte(nil), ""...) //@rank(") //") +- +- copy() //@rank(")", builtinSlice, builtinChan) +- copy(aSlice, aS) //@rank(")", builtinSlice, builtinString) +- copy(aS, aSlice) //@rank(",", builtinSlice, builtinString) +- +- delete() //@rank(")", builtinMap, builtinChan) +- delete(aMap, aS) //@rank(")", builtinString, builtinSlice) +- +- aMapFunc := func() map[int]int { //@item(builtinMapFunc, "aMapFunc", "func() map[int]int", "var") +- return nil +- } +- delete() //@rank(")", builtinMapFunc, builtinSlice) +- +- len() //@rank(")", builtinSlice, builtinInt),rank(")", builtinMap, builtinInt),rank(")", builtinString, builtinInt),rank(")", builtinArray, builtinInt),rank(")", builtinArrayPtr, builtinPtr),rank(")", builtinChan, builtinInt) +- +- cap() //@rank(")", builtinSlice, builtinMap),rank(")", builtinArray, builtinString),rank(")", builtinArrayPtr, builtinPtr),rank(")", builtinChan, builtinInt) +- +- make() //@rank(")", builtinMapType, int),rank(")", builtinChanType, int),rank(")", builtinSliceType, int),rank(")", builtinMapType, int) +- make(aSliceType, a) //@rank(")", builtinInt, builtinSlice) +- +- type myInt int +- var mi myInt //@item(builtinMyInt, "mi", "myInt", "var") +- make(aSliceType, m) //@snippet(")", builtinMyInt, "mi") +- +- var _ []int = make() //@rank(")", builtinSliceType, builtinMapType) +- +- type myStruct struct{} //@item(builtinStructType, "myStruct", "struct{...}", "struct") +- var _ *myStruct = new() //@rank(")", builtinStructType, int) +- +- for k := range a { //@rank(" {", builtinSlice, builtinInt),rank(" {", builtinString, builtinInt),rank(" {", builtinChan, builtinInt),rank(" {", builtinArray, builtinInt),rank(" {", builtinArrayPtr, builtinInt),rank(" {", builtinMap, builtinInt), +- } +- +- for k, v := range a { //@rank(" {", builtinSlice, builtinChan) +- } +- +- <-a //@rank(" //", builtinChan, builtinInt) +-} +- +--- builtin_types.go -- +-package builtins +- +-func _() { +- var _ []bool //@item(builtinBoolSliceType, "[]bool", "[]bool", "type") +- +- var _ []bool = make() //@rank(")", builtinBoolSliceType, int) +- +- var _ []bool = make([], 0) //@rank(",", bool, int) +- +- var _ [][]bool = make([][], 0) //@rank(",", bool, int) +-} +- +--- builtins.go -- +-package builtins +- +-// Definitions of builtin completion items that are still used in tests. +- +-/* bool */ //@item(bool, "bool", "", "type") +-/* complex(r float64, i float64) */ //@item(complex, "complex", "func(r float64, i float64) complex128", "func") +-/* float32 */ //@item(float32, "float32", "", "type") +-/* float64 */ //@item(float64, "float64", "", "type") +-/* imag(c complex128) float64 */ //@item(imag, "imag", "func(c complex128) float64", "func") +-/* int */ //@item(int, "int", "", "type") +-/* iota */ //@item(iota, "iota", "", "const") +-/* string */ //@item(string, "string", "", "type") +-/* true */ //@item(_true, "true", "", "const") +- +--- constants.go -- +-package builtins +- +-func _() { +- const ( +- foo = iota //@complete(" //", iota) +- ) +- +- iota //@complete(" //") +- +- var iota int //@item(iotaVar, "iota", "int", "var") +- +- iota //@complete(" //", iotaVar) +-} +- +-func _() { +- var twoRedUpEnd bool //@item(TRUEVar, "twoRedUpEnd", "bool", "var") +- +- var _ bool = true //@rank(" //", _true, TRUEVar) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/casesensitive.txt b/gopls/internal/test/marker/testdata/completion/casesensitive.txt +--- a/gopls/internal/test/marker/testdata/completion/casesensitive.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/casesensitive.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-This test exercises the caseSensitive completion matcher. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "completeUnimported": false, +- "matcher": "caseSensitive" +-} +- +--- casesensitive.go -- +-package casesensitive +- +-func _() { +- var lower int //@item(lower, "lower", "int", "var") +- var Upper int //@item(upper, "Upper", "int", "var") +- +- l //@complete(" //", lower) +- U //@complete(" //", upper) +- +- L //@complete(" //") +- u //@complete(" //") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/cast.txt b/gopls/internal/test/marker/testdata/completion/cast.txt +--- a/gopls/internal/test/marker/testdata/completion/cast.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/cast.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-This test checks completion related to casts. +- +--- flags -- +--ignore_extra_diags +- +--- cast.go -- +-package cast +- +-func _() { +- foo := struct{x int}{x: 1} //@item(x_field, "x", "int", "field") +- _ = float64(foo.x) //@complete("x", x_field) +-} +- +-func _() { +- foo := struct{x int}{x: 1} +- _ = float64(foo. //@complete(" /", x_field) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/channel.txt b/gopls/internal/test/marker/testdata/completion/channel.txt +--- a/gopls/internal/test/marker/testdata/completion/channel.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/channel.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-This test checks completion related to channels. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "completeUnimported": false +-} +- +--- channel.go -- +-package channel +- +-func _() { +- var ( +- aa = "123" //@item(channelAA, "aa", "string", "var") +- ab = 123 //@item(channelAB, "ab", "int", "var") +- ) +- +- { +- type myChan chan int +- var mc myChan +- mc <- a //@complete(" //", channelAB, channelAA) +- } +- +- { +- var ac chan int //@item(channelAC, "ac", "chan int", "var") +- a <- a //@complete(" <-", channelAC, channelAA, channelAB) +- } +- +- { +- var foo chan int //@item(channelFoo, "foo", "chan int", "var") +- wantsInt := func(int) {} //@item(channelWantsInt, "wantsInt", "func(int)", "var") +- wantsInt(<-) //@rank(")", channelFoo, channelAB) +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/comment.txt b/gopls/internal/test/marker/testdata/completion/comment.txt +--- a/gopls/internal/test/marker/testdata/completion/comment.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/comment.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,87 +0,0 @@ +-This test checks behavior of completion within comments. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/comment +- +-go 1.18 +- +--- p.go -- +-package comment_completion +- +-var p bool +- +-//@complete(re"//()") +- +-func _() { +- var a int +- +- switch a { +- case 1: +- //@complete(re"//()") +- _ = a +- } +- +- var b chan int +- select { +- case <-b: +- //@complete(re"//()") +- _ = b +- } +- +- var ( +- //@complete(re"//()") +- _ = a +- ) +-} +- +-// //@complete(" ", variableC) +-var C string //@item(variableC, "C", "string", "var") //@complete(" ", variableC) +- +-// //@complete(" ", constant) +-const Constant = "example" //@item(constant, "Constant", "string", "const") //@complete(" ", constant) +- +-// //@complete(" ", structType, fieldB, fieldA) +-type StructType struct { //@item(structType, "StructType", "struct{...}", "struct") //@complete(" ", structType, fieldA, fieldB) +- // //@complete(" ", fieldA, structType, fieldB) +- A string //@item(fieldA, "A", "string", "field") //@complete(" ", fieldA, structType, fieldB) +- b int //@item(fieldB, "b", "int", "field") //@complete(" ", fieldB, structType, fieldA) +-} +- +-// //@complete(" ", method, structRecv, paramX, resultY, fieldB, fieldA) +-func (structType *StructType) Method(X int) (Y int) { //@item(structRecv, "structType", "*StructType", "var"),item(method, "Method", "func(X int) (Y int)", "method"),item(paramX, "X", "int", "var"),item(resultY, "Y", "int", "var") +- // //@complete(" ", method, structRecv, paramX, resultY, fieldB, fieldA) +- return +-} +- +-// //@complete(" ", newType) +-type NewType string //@item(newType, "NewType", "string", "type") //@complete(" ", newType) +- +-// //@complete(" ", testInterface, testA, testB) +-type TestInterface interface { //@item(testInterface, "TestInterface", "interface{...}", "interface") +- // //@complete(" ", testA, testInterface, testB) +- TestA(L string) (M int) //@item(testA, "TestA", "func(L string) (M int)", "method"),item(paramL, "L", "var", "string"),item(resM, "M", "var", "int") //@complete(" ", testA, testInterface, testB) +- TestB(N int) bool //@item(testB, "TestB", "func(N int) bool", "method"),item(paramN, "N", "var", "int") //@complete(" ", testB, testInterface, testA) +-} +- +-// //@complete(" ", function) +-func Function() int { //@item(function, "Function", "func() int", "func") //@complete(" ", function) +- // //@complete(" ", function) +- return 0 +-} +- +-// This tests multiline block comments and completion with prefix +-// Lorem Ipsum Multili//@complete("Multi", multiline) +-// Lorem ipsum dolor sit ametom +-func Multiline() int { //@item(multiline, "Multiline", "func() int", "func") +- // //@complete(" ", multiline) +- return 0 +-} +- +-// This test checks that gopls does not panic if the receiver is syntactically +-// present but empty. +-// +-// //@complete(" ") +-func () _() {} +diff -urN a/gopls/internal/test/marker/testdata/completion/complit.txt b/gopls/internal/test/marker/testdata/completion/complit.txt +--- a/gopls/internal/test/marker/testdata/completion/complit.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/complit.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,104 +0,0 @@ +-This test checks completion related to composite literals. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "completeUnimported": false +-} +- +--- complit.go -- +-package complit +- +-// Literal completion results. +-/* int() */ //@item(int, "int()", "int", "var") +- +-// general completions +- +-type position struct { //@item(structPosition, "position", "struct{...}", "struct") +- X, Y int //@item(fieldX, "X", "int", "field"),item(fieldY, "Y", "int", "field") +-} +- +-func _() { +- _ = position{ +- //@complete("", fieldX, fieldY, int, structPosition) +- } +- _ = position{ +- X: 1, +- //@complete("", fieldY) +- } +- _ = position{ +- //@complete("", fieldX) +- Y: 1, +- } +- _ = []*position{ +- { +- //@complete("", fieldX, fieldY, int, structPosition) +- }, +- } +-} +- +-func _() { +- var ( +- aa string //@item(aaVar, "aa", "string", "var") +- ab int //@item(abVar, "ab", "int", "var") +- ) +- +- _ = map[int]int{ +- a: a, //@complete(":", abVar, aaVar),complete(",", abVar, aaVar) +- } +- +- _ = map[int]int{ +- //@complete("", abVar, int, aaVar, structPosition) +- } +- +- _ = []string{a: ""} //@complete(":", abVar, aaVar) +- _ = [1]string{a: ""} //@complete(":", abVar, aaVar) +- +- _ = position{X: a} //@complete("}", abVar, aaVar) +- _ = position{a} //@complete("}", abVar, aaVar) +- _ = position{a, } //@complete("}", abVar, int, aaVar, structPosition) +- +- _ = []int{a} //@complete("}", abVar, aaVar) +- _ = [1]int{a} //@complete("}", abVar, aaVar) +- +- type myStruct struct { +- AA int //@item(fieldAA, "AA", "int", "field") +- AB string //@item(fieldAB, "AB", "string", "field") +- } +- +- _ = myStruct{ +- AB: a, //@complete(",", aaVar, abVar) +- } +- +- var s myStruct +- +- _ = map[int]string{1: "" + s.A} //@complete("}", fieldAB, fieldAA) +- _ = map[int]string{1: (func(i int) string { return "" })(s.A)} //@complete(")}", fieldAA, fieldAB) +- _ = map[int]string{1: func() string { s.A }} //@complete(" }", fieldAA, fieldAB) +- +- _ = position{s.A} //@complete("}", fieldAA, fieldAB) +- +- var X int //@item(varX, "X", "int", "var") +- _ = position{X} //@complete("}", fieldX, varX) +-} +- +-func _() { +- type foo struct{} //@item(complitFoo, "foo", "struct{...}", "struct") +- +- var _ *foo = &fo{} //@snippet("{", complitFoo, "foo") +- var _ *foo = fo{} //@snippet("{", complitFoo, "&foo") +- +- struct { a, b *foo }{ +- a: &fo{}, //@rank("{", complitFoo) +- b: fo{}, //@snippet("{", complitFoo, "&foo") +- } +-} +- +-func _() { +- _ := position{ +- X: 1, //@complete("X", fieldX),complete(" 1", int, structPosition) +- Y: , //@complete(":", fieldY),complete(" ,", int, structPosition) +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/constant.txt b/gopls/internal/test/marker/testdata/completion/constant.txt +--- a/gopls/internal/test/marker/testdata/completion/constant.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/constant.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-This test checks completion related to constants. +- +--- flags -- +--ignore_extra_diags +- +--- constant.go -- +-package constant +- +-const x = 1 //@item(constX, "x", "int", "const") +- +-const ( +- a int = iota << 2 //@item(constA, "a", "int", "const") +- b //@item(constB, "b", "int", "const") +- c //@item(constC, "c", "int", "const") +-) +- +-func _() { +- const y = "hi" //@item(constY, "y", "string", "const") +- //@complete("", constY, constA, constB, constC, constX) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/danglingstmt.txt b/gopls/internal/test/marker/testdata/completion/danglingstmt.txt +--- a/gopls/internal/test/marker/testdata/completion/danglingstmt.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/danglingstmt.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,158 +0,0 @@ +-This test checks that completion works as expected in the presence of +-incomplete statements that may affect parser recovery. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/dangling +- +-go 1.18 +- +--- settings.json -- +-{ +- "completeUnimported": false, +- "deepCompletion": false +-} +- +--- dangling_for.go -- +-package danglingstmt +- +-func _() { +- for bar //@rank(" //", danglingBar) +-} +- +-func bar() bool { //@item(danglingBar, "bar", "func() bool", "func") +- return true +-} +- +--- dangling_for_init.go -- +-package danglingstmt +- +-func _() { +- for i := bar //@rank(" //", danglingBar2) +-} +- +-func bar2() int { //@item(danglingBar2, "bar2", "func() int", "func") +- return 0 +-} +- +--- dangling_for_init_cond.go -- +-package danglingstmt +- +-func _() { +- for i := bar3(); i > bar //@rank(" //", danglingBar3) +-} +- +-func bar3() int { //@item(danglingBar3, "bar3", "func() int", "func") +- return 0 +-} +- +--- dangling_for_init_cond_post.go -- +-package danglingstmt +- +-func _() { +- for i := bar4(); i > bar4(); i += bar //@rank(" //", danglingBar4) +-} +- +-func bar4() int { //@item(danglingBar4, "bar4", "func() int", "func") +- return 0 +-} +- +--- dangling_if.go -- +-package danglingstmt +- +-func _() { +- if foo //@rank(" //", danglingFoo) +-} +- +-func foo() bool { //@item(danglingFoo, "foo", "func() bool", "func") +- return true +-} +- +--- dangling_if_eof.go -- +-package danglingstmt +- +-func bar5() bool { //@item(danglingBar5, "bar5", "func() bool", "func") +- return true +-} +- +-func _() { +- if b //@rank(" //", danglingBar5) +- +--- dangling_if_init.go -- +-package danglingstmt +- +-func _() { +- if i := foo //@rank(" //", danglingFoo2) +-} +- +-func foo2() bool { //@item(danglingFoo2, "foo2", "func() bool", "func") +- return true +-} +- +--- dangling_if_init_cond.go -- +-package danglingstmt +- +-func _() { +- if i := 123; foo //@rank(" //", danglingFoo3) +-} +- +-func foo3() bool { //@item(danglingFoo3, "foo3", "func() bool", "func") +- return true +-} +- +--- dangling_multiline_if.go -- +-package danglingstmt +- +-func walrus() bool { //@item(danglingWalrus, "walrus", "func() bool", "func") +- return true +-} +- +-func _() { +- if true && +- walrus //@complete(" //", danglingWalrus) +-} +- +--- dangling_selector_1.go -- +-package danglingstmt +- +-func _() { +- x. //@rank(" //", danglingI) +-} +- +-var x struct { i int } //@item(danglingI, "i", "int", "field") +- +--- dangling_selector_2.go -- +-package danglingstmt +- +-// TODO: re-enable this test, which was broken when the foo package was removed. +-// (we can replicate the relevant definitions in the new marker test) +-// import "golang.org/lsptests/foo" +- +-func _() { +- foo. // rank(" //", Foo) +- var _ = []string{foo.} // rank("}", Foo) +-} +- +--- dangling_switch_init.go -- +-package danglingstmt +- +-func _() { +- switch i := baz //@rank(" //", danglingBaz) +-} +- +-func baz() int { //@item(danglingBaz, "baz", "func() int", "func") +- return 0 +-} +- +--- dangling_switch_init_tag.go -- +-package danglingstmt +- +-func _() { +- switch i := 0; baz //@rank(" //", danglingBaz2) +-} +- +-func baz2() int { //@item(danglingBaz2, "baz2", "func() int", "func") +- return 0 +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/deep2.txt b/gopls/internal/test/marker/testdata/completion/deep2.txt +--- a/gopls/internal/test/marker/testdata/completion/deep2.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/deep2.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,65 +0,0 @@ +-This test exercises deep completion. +- +-It was originally bundled with deep.go, but is split into a separate test as +-the new marker tests do not permit mutating server options for individual +-marks. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests +- +-go 1.18 +- +--- deep/deep2.go -- +-package deep +- +-type foo struct { +- b bar +-} +- +-func (f foo) bar() bar { +- return f.b +-} +- +-func (f foo) barPtr() *bar { +- return &f.b +-} +- +-type bar struct{} +- +-func (b bar) valueReceiver() int { +- return 0 +-} +- +-func (b *bar) ptrReceiver() int { +- return 0 +-} +- +-func _() { +- var ( +- i int +- f foo +- ) +- +- f.bar().valueReceiver //@item(deepBarValue, "f.bar().valueReceiver", "func() int", "method") +- f.barPtr().ptrReceiver //@item(deepBarPtrPtr, "f.barPtr().ptrReceiver", "func() int", "method") +- f.barPtr().valueReceiver //@item(deepBarPtrValue, "f.barPtr().valueReceiver", "func() int", "method") +- +- i = fbar //@complete(" //", deepBarValue, deepBarPtrPtr, deepBarPtrValue) +-} +- +-func (b baz) Thing() struct{ val int } { +- return b.thing +-} +- +-type baz struct { +- thing struct{ val int } +-} +- +-func (b baz) _() { +- b.Thing().val //@item(deepBazMethVal, "b.Thing().val", "int", "field") +- b.thing.val //@item(deepBazFieldVal, "b.thing.val", "int", "field") +- var _ int = bval //@rank(" //", deepBazFieldVal, deepBazMethVal) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/deep.txt b/gopls/internal/test/marker/testdata/completion/deep.txt +--- a/gopls/internal/test/marker/testdata/completion/deep.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/deep.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,110 +0,0 @@ +-This test exercises deep completion. +- +--- settings.json -- +-{ +- "completeUnimported": false, +- "matcher": "caseInsensitive" +-} +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests +- +-go 1.18 +- +--- deep/deep.go -- +-package deep +- +-import "context" +- +-type deepA struct { +- b deepB //@item(deepBField, "b", "deepB", "field") +-} +- +-type deepB struct { +-} +- +-func wantsDeepB(deepB) {} +- +-func _() { +- var a deepA //@item(deepAVar, "a", "deepA", "var") +- a.b //@item(deepABField, "a.b", "deepB", "field") +- wantsDeepB(a) //@complete(")", deepABField, deepAVar) +- +- deepA{a} //@snippet("}", deepABField, "a.b") +-} +- +-func wantsContext(context.Context) {} +- +-func _() { +- context.Background() //@item(ctxBackground, "context.Background", "func() context.Context", "func", "Background returns a non-nil, empty Context.") +- context.TODO() //@item(ctxTODO, "context.TODO", "func() context.Context", "func", "TODO returns a non-nil, empty Context.") +- +- wantsContext(c) //@rank(")", ctxBackground),rank(")", ctxTODO) +-} +- +-func _() { +- var cork struct{ err error } +- cork.err //@item(deepCorkErr, "cork.err", "error", "field") +- context //@item(deepContextPkg, "context", "\"context\"", "package") +- var _ error = co // rank(" //", deepCorkErr, deepContextPkg) +-} +- +-func _() { +- // deepCircle is circular. +- type deepCircle struct { +- *deepCircle +- } +- var circle deepCircle //@item(deepCircle, "circle", "deepCircle", "var") +- circle.deepCircle //@item(deepCircleField, "circle.deepCircle", "*deepCircle", "field") +- var _ deepCircle = circ //@complete(" //", deepCircle, deepCircleField),snippet(" //", deepCircleField, "*circle.deepCircle") +-} +- +-func _() { +- type deepEmbedC struct { +- } +- type deepEmbedB struct { +- deepEmbedC +- } +- type deepEmbedA struct { +- deepEmbedB +- } +- +- wantsC := func(deepEmbedC) {} +- +- var a deepEmbedA //@item(deepEmbedA, "a", "deepEmbedA", "var") +- a.deepEmbedB //@item(deepEmbedB, "a.deepEmbedB", "deepEmbedB", "field") +- a.deepEmbedC //@item(deepEmbedC, "a.deepEmbedC", "deepEmbedC", "field") +- wantsC(a) //@complete(")", deepEmbedC, deepEmbedA, deepEmbedB) +-} +- +-func _() { +- type nested struct { +- a int +- n *nested //@item(deepNestedField, "n", "*nested", "field") +- } +- +- nested{ +- a: 123, //@complete(" //", deepNestedField) +- } +-} +- +-func _() { +- var a struct { +- b struct { +- c int +- } +- d int +- } +- +- a.d //@item(deepAD, "a.d", "int", "field") +- a.b.c //@item(deepABC, "a.b.c", "int", "field") +- a.b //@item(deepAB, "a.b", "struct{...}", "field") +- a //@item(deepA, "a", "struct{...}", "var") +- +- // "a.d" should be ranked above the deeper "a.b.c" +- var i int +- i = a //@complete(" //", deepAD, deepABC, deepA, deepAB) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/errors.txt b/gopls/internal/test/marker/testdata/completion/errors.txt +--- a/gopls/internal/test/marker/testdata/completion/errors.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/errors.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,33 +0,0 @@ +-This test checks completion related to errors. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "deepCompletion": false +-} +- +--- go.mod -- +-module golang.org/lsptests +- +-go 1.18 +- +--- errors.go -- +-package errors +- +-import ( +- "golang.org/lsptests/types" +-) +- +-func _() { +- bob.Bob() //@complete(".") +- types.b //@complete(" //", Bob_interface) +-} +- +--- types/types.go -- +-package types +- +-type Bob interface { //@item(Bob_interface, "Bob", "interface{...}", "interface") +- Bobby() +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/field_list.txt b/gopls/internal/test/marker/testdata/completion/field_list.txt +--- a/gopls/internal/test/marker/testdata/completion/field_list.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/field_list.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,38 +0,0 @@ +-This test checks completion related to field lists. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "completeUnimported": false +-} +- +--- field_list.go -- +-package fieldlist +- +-var myInt int //@item(flVar, "myInt", "int", "var") +-type myType int //@item(flType, "myType", "int", "type") +- +-func (my) _() {} //@complete(") _", flType) +-func (my my) _() {} //@complete(" my)"),complete(") _", flType) +- +-func (myType) _() {} //@complete(") {", flType) +- +-func (myType) _(my my) {} //@complete(" my)"),complete(") {", flType) +- +-func (myType) _() my {} //@complete(" {", flType) +- +-func (myType) _() (my my) {} //@complete(" my"),complete(") {", flType) +- +-func _() { +- var _ struct { +- //@complete("", flType) +- m my //@complete(" my"),complete(" //", flType) +- } +- +- var _ interface { +- //@complete("", flType) +- m() my //@complete("("),complete(" //", flType) +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/foobarbaz.txt b/gopls/internal/test/marker/testdata/completion/foobarbaz.txt +--- a/gopls/internal/test/marker/testdata/completion/foobarbaz.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/foobarbaz.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,540 +0,0 @@ +-This test ports some arbitrary tests from the old marker framework, that were +-*mostly* about completion. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "completeUnimported": false, +- "deepCompletion": false, +- "experimentalPostfixCompletions": false +-} +- +--- go.mod -- +-module foobar.test +- +-go 1.18 +- +--- foo/foo.go -- +-package foo //@loc(PackageFoo, "foo"),item(PackageFooItem, "foo", "\"foobar.test/foo\"", "package") +- +-type StructFoo struct { //@loc(StructFooLoc, "StructFoo"), item(StructFoo, "StructFoo", "struct{...}", "struct") +- Value int //@item(Value, "Value", "int", "field") +-} +- +-// Pre-set this marker, as we don't have a "source" for it in this package. +-/* Error() */ //@item(Error, "Error", "func() string", "method") +- +-func Foo() { //@item(Foo, "Foo", "func()", "func") +- var err error +- err.Error() //@complete("E", Error) +-} +- +-func _() { +- var sFoo StructFoo //@complete("t", StructFoo) +- if x := sFoo; x.Value == 1 { //@complete("V", Value), typedef("sFoo", StructFooLoc) +- return +- } +-} +- +-func _() { +- shadowed := 123 +- { +- shadowed := "hi" //@item(shadowed, "shadowed", "string", "var") +- sha //@complete("a", shadowed), diag("sha", re"(undefined|undeclared)") +- _ = shadowed +- } +-} +- +-type IntFoo int //@loc(IntFooLoc, "IntFoo"), item(IntFoo, "IntFoo", "int", "type") +- +--- bar/bar.go -- +-package bar +- +-import ( +- "foobar.test/foo" //@item(foo, "foo", "\"foobar.test/foo\"", "package") +-) +- +-func helper(i foo.IntFoo) {} //@item(helper, "helper", "func(i foo.IntFoo)", "func") +- +-func _() { +- help //@complete("l", helper) +- _ = foo.StructFoo{} //@complete("S", IntFoo, StructFoo) +-} +- +-// Bar is a function. +-func Bar() { //@item(Bar, "Bar", "func()", "func", "Bar is a function.") +- foo.Foo() //@complete("F", Foo, IntFoo, StructFoo) +- var _ foo.IntFoo //@complete("I", IntFoo, StructFoo) +- foo.() //@complete("(", Foo, IntFoo, StructFoo), diag(")", re"expected type") +-} +- +-// These items weren't present in the old marker tests (due to settings), but +-// we may as well include them. +-//@item(intConversion, "int()"), item(fooFoo, "foo.Foo") +-//@item(fooIntFoo, "foo.IntFoo"), item(fooStructFoo, "foo.StructFoo") +- +-func _() { +- var Valentine int //@item(Valentine, "Valentine", "int", "var") +- +- _ = foo.StructFoo{ //@diag("foo", re"unkeyed fields") +- Valu //@complete(" //", Value) +- } +- _ = foo.StructFoo{ //@diag("foo", re"unkeyed fields") +- Va //@complete("a", Value, Valentine) +- +- } +- _ = foo.StructFoo{ +- Value: 5, //@complete("a", Value) +- } +- _ = foo.StructFoo{ +- //@complete("//", Value, Valentine, intConversion, foo, helper, Bar) +- } +- _ = foo.StructFoo{ +- Value: Valen //@complete("le", Valentine) +- } +- _ = foo.StructFoo{ +- Value: //@complete(" //", Valentine, intConversion, foo, helper, Bar) +- } +- _ = foo.StructFoo{ +- Value: //@complete(" ", Valentine, intConversion, foo, helper, Bar) +- } +-} +- +--- baz/baz.go -- +-package baz +- +-import ( +- "foobar.test/bar" +- +- f "foobar.test/foo" +-) +- +-var FooStruct f.StructFoo +- +-func Baz() { +- defer bar.Bar() //@complete("B", Bar) +- // TODO: Test completion here. +- defer bar.B //@diag(re"bar.B()", re"must be function call") +- var x f.IntFoo //@complete("n", IntFoo), typedef("x", IntFooLoc) +- bar.Bar() //@complete("B", Bar) +-} +- +-func _() { +- bob := f.StructFoo{Value: 5} +- if x := bob. //@complete(" //", Value) +- switch true == false { +- case true: +- if x := bob. //@complete(" //", Value) +- case false: +- } +- if x := bob.Va //@complete("a", Value) +- switch true == true { +- default: +- } +-} +- +--- arraytype/arraytype.go -- +-package arraytype +- +-import ( +- "foobar.test/foo" +-) +- +-func _() { +- var ( +- val string //@item(atVal, "val", "string", "var") +- ) +- +- [] //@complete(" //", atVal, PackageFooItem) +- +- []val //@complete(" //") +- +- []foo.StructFoo //@complete(" //", StructFoo) +- +- []foo.StructFoo(nil) //@complete("(", StructFoo) +- +- []*foo.StructFoo //@complete(" //", StructFoo) +- +- [...]foo.StructFoo //@complete(" //", StructFoo) +- +- [2][][4]foo.StructFoo //@complete(" //", StructFoo) +- +- []struct { f []foo.StructFoo } //@complete(" }", StructFoo) +-} +- +-func _() { +- type myInt int //@item(atMyInt, "myInt", "int", "type") +- +- var mark []myInt //@item(atMark, "mark", "[]myInt", "var") +- +- var s []myInt //@item(atS, "s", "[]myInt", "var") +- s = []m //@complete(" //", atMyInt) +- +- var a [1]myInt +- a = [1]m //@complete(" //", atMyInt) +- +- var ds [][]myInt +- ds = [][]m //@complete(" //", atMyInt) +-} +- +-func _() { +- var b [0]byte //@item(atByte, "b", "[0]byte", "var") +- var _ []byte = b //@snippet(" //", atByte, "b[:]") +-} +- +--- badstmt/badstmt.go -- +-package badstmt +- +-import ( +- "foobar.test/foo" +-) +- +-// (The syntax error causes suppression of diagnostics for type errors. +-// See issue #59888.) +- +-func _(x int) { +- defer foo.F //@complete(" //", Foo, IntFoo, StructFoo) +- defer foo.F //@complete(" //", Foo, IntFoo, StructFoo) +-} +- +-func _() { +- switch true { +- case true: +- go foo.F //@complete(" //", Foo, IntFoo, StructFoo) +- } +-} +- +-func _() { +- defer func() { +- foo.F //@complete(" //", Foo, IntFoo, StructFoo), snippet(" //", Foo, "Foo()") +- +- foo. //@rank(" //", Foo) +- } +-} +- +--- badstmt/badstmt_2.go -- +-package badstmt +- +-import ( +- "foobar.test/foo" +-) +- +-func _() { +- defer func() { foo. } //@rank(" }", Foo) +-} +- +--- badstmt/badstmt_3.go -- +-package badstmt +- +-import ( +- "foobar.test/foo" +-) +- +-func _() { +- go foo. //@rank(" //", Foo, IntFoo), snippet(" //", Foo, "Foo()") +-} +- +--- badstmt/badstmt_4.go -- +-package badstmt +- +-import ( +- "foobar.test/foo" +-) +- +-func _() { +- go func() { +- defer foo. //@rank(" //", Foo, IntFoo) +- } +-} +- +--- selector/selector.go -- +-package selector +- +-import ( +- "foobar.test/bar" +-) +- +-type S struct { +- B, A, C int //@item(Bf, "B", "int", "field"),item(Af, "A", "int", "field"),item(Cf, "C", "int", "field") +-} +- +-func _() { +- _ = S{}.; //@complete(";", Af, Bf, Cf) +-} +- +-type bob struct { a int } //@item(a, "a", "int", "field") +-type george struct { b int } +-type jack struct { c int } //@item(c, "c", "int", "field") +-type jill struct { d int } +- +-func (b *bob) george() *george {} //@item(george, "george", "func() *george", "method") +-func (g *george) jack() *jack {} +-func (j *jack) jill() *jill {} //@item(jill, "jill", "func() *jill", "method") +- +-func _() { +- b := &bob{} +- y := b.george(). +- jack(); +- y.; //@complete(";", c, jill) +-} +- +-func _() { +- bar. //@complete(" /", Bar) +- x := 5 +- +- var b *bob +- b. //@complete(" /", a, george) +- y, z := 5, 6 +- +- b. //@complete(" /", a, george) +- y, z, a, b, c := 5, 6 +-} +- +-func _() { +- bar. //@complete(" /", Bar) +- bar.Bar() +- +- bar. //@complete(" /", Bar) +- go f() +-} +- +-func _() { +- var b *bob +- if y != b. //@complete(" /", a, george) +- z := 5 +- +- if z + y + 1 + b. //@complete(" /", a, george) +- r, s, t := 4, 5 +- +- if y != b. //@complete(" /", a, george) +- z = 5 +- +- if z + y + 1 + b. //@complete(" /", a, george) +- r = 4 +-} +- +--- literal_snippets/literal_snippets.go -- +-package literal_snippets +- +-import ( +- "bytes" +- "context" +- "go/ast" +- "net/http" +- "sort" +- +- "golang.org/lsptests/foo" +-) +- +-func _() { +- []int{} //@item(litIntSlice, "[]int{}", "", "var") +- &[]int{} //@item(litIntSliceAddr, "&[]int{}", "", "var") +- make([]int, 0) //@item(makeIntSlice, "make([]int, 0)", "", "func") +- +- var _ *[]int = in //@snippet(" //", litIntSliceAddr, "&[]int{$0\\}") +- var _ **[]int = in //@complete(" //") +- +- var slice []int +- slice = i //@snippet(" //", litIntSlice, "[]int{$0\\}") +- slice = m //@snippet(" //", makeIntSlice, "make([]int, ${1:})") +-} +- +-func _() { +- type namedInt []int +- +- namedInt{} //@item(litNamedSlice, "namedInt{}", "", "var") +- make(namedInt, 0) //@item(makeNamedSlice, "make(namedInt, 0)", "", "func") +- +- var namedSlice namedInt +- namedSlice = n //@snippet(" //", litNamedSlice, "namedInt{$0\\}") +- namedSlice = m //@snippet(" //", makeNamedSlice, "make(namedInt, ${1:})") +-} +- +-func _() { +- make(chan int) //@item(makeChan, "make(chan int)", "", "func") +- +- var ch chan int +- ch = m //@snippet(" //", makeChan, "make(chan int)") +-} +- +-func _() { +- map[string]struct{}{} //@item(litMap, "map[string]struct{}{}", "", "var") +- make(map[string]struct{}) //@item(makeMap, "make(map[string]struct{})", "", "func") +- +- var m map[string]struct{} +- m = m //@snippet(" //", litMap, "map[string]struct{\\}{$0\\}") +- m = m //@snippet(" //", makeMap, "make(map[string]struct{\\})") +- +- struct{}{} //@item(litEmptyStruct, "struct{}{}", "", "var") +- +- m["hi"] = s //@snippet(" //", litEmptyStruct, "struct{\\}{\\}") +-} +- +-func _() { +- type myStruct struct{ i int } //@item(myStructType, "myStruct", "struct{...}", "struct") +- +- myStruct{} //@item(litStruct, "myStruct{}", "", "var") +- &myStruct{} //@item(litStructPtr, "&myStruct{}", "", "var") +- +- var ms myStruct +- ms = m //@snippet(" //", litStruct, "myStruct{$0\\}") +- +- var msPtr *myStruct +- msPtr = m //@snippet(" //", litStructPtr, "&myStruct{$0\\}") +- +- msPtr = &m //@snippet(" //", litStruct, "myStruct{$0\\}") +- +- type myStructCopy struct { i int } //@item(myStructCopyType, "myStructCopy", "struct{...}", "struct") +- +- // Don't offer literal completion for convertible structs. +- ms = myStruct //@complete(" //", litStruct, myStructType, myStructCopyType) +-} +- +-type myImpl struct{} +- +-func (myImpl) foo() {} +- +-func (*myImpl) bar() {} +- +-type myBasicImpl string +- +-func (myBasicImpl) foo() {} +- +-func _() { +- type myIntf interface { +- foo() +- } +- +- myImpl{} //@item(litImpl, "myImpl{}", "", "var") +- +- var mi myIntf +- mi = m //@snippet(" //", litImpl, "myImpl{\\}") +- +- myBasicImpl() //@item(litBasicImpl, "myBasicImpl()", "string", "var") +- +- mi = m //@snippet(" //", litBasicImpl, "myBasicImpl($0)") +- +- // only satisfied by pointer to myImpl +- type myPtrIntf interface { +- bar() +- } +- +- &myImpl{} //@item(litImplPtr, "&myImpl{}", "", "var") +- +- var mpi myPtrIntf +- mpi = m //@snippet(" //", litImplPtr, "&myImpl{\\}") +-} +- +-func _() { +- var s struct{ i []int } //@item(litSliceField, "i", "[]int", "field") +- var foo []int +- // no literal completions after selector +- foo = s.i //@complete(" //", litSliceField) +-} +- +-func _() { +- type myStruct struct{ i int } //@item(litMyStructType, "myStruct", "struct{...}", "struct") +- myStruct{} //@item(litMyStruct, "myStruct{}", "", "var") +- +- foo := func(s string, args ...myStruct) {} +- // Don't give literal slice candidate for variadic arg. +- // Do give literal candidates for variadic element. +- foo("", myStruct) //@complete(")", litMyStruct, litMyStructType) +-} +- +-func _() { +- Buffer{} //@item(litBuffer, "Buffer{}", "", "var") +- +- var b *bytes.Buffer +- b = bytes.Bu //@snippet(" //", litBuffer, "Buffer{\\}") +-} +- +-func _() { +- _ = "func(...) {}" //@item(litFunc, "func(...) {}", "", "var") +- +- // no literal "func" completions +- http.Handle("", fun) //@complete(")") +- +- var namedReturn func(s string) (b bool) +- namedReturn = f //@snippet(" //", litFunc, "func(s string) (b bool) {$0\\}") +- +- var multiReturn func() (bool, int) +- multiReturn = f //@snippet(" //", litFunc, "func() (bool, int) {$0\\}") +- +- var multiNamedReturn func() (b bool, i int) +- multiNamedReturn = f //@snippet(" //", litFunc, "func() (b bool, i int) {$0\\}") +- +- var duplicateParams func(myImpl, int, myImpl) +- duplicateParams = f //@snippet(" //", litFunc, "func(mi1 myImpl, i int, mi2 myImpl) {$0\\}") +- +- type aliasImpl = myImpl +- var aliasParams func(aliasImpl) aliasImpl +- aliasParams = f //@snippet(" //", litFunc, "func(ai aliasImpl) aliasImpl {$0\\}") +- +- const two = 2 +- var builtinTypes func([]int, [two]bool, map[string]string, struct{ i int }, interface{ foo() }, <-chan int) +- builtinTypes = f //@snippet(" //", litFunc, "func(i1 []int, b [2]bool, m map[string]string, s struct{i int\\}, i2 interface{foo()\\}, c <-chan int) {$0\\}") +- +- var _ func(ast.Node) = f //@snippet(" //", litFunc, "func(n ast.Node) {$0\\}") +- var _ func(error) = f //@snippet(" //", litFunc, "func(err error) {$0\\}") +- var _ func(context.Context) = f //@snippet(" //", litFunc, "func(ctx context.Context) {$0\\}") +- +- type context struct {} +- var _ func(context) = f //@snippet(" //", litFunc, "func(ctx context) {$0\\}") +-} +- +-func _() { +- float64() //@item(litFloat64, "float64()", "float64", "var") +- +- // don't complete to "&float64()" +- var _ *float64 = float64 //@complete(" //") +- +- var f float64 +- f = fl //@complete(" //", litFloat64),snippet(" //", litFloat64, "float64($0)") +- +- type myInt int +- myInt() //@item(litMyInt, "myInt()", "", "var") +- +- var mi myInt +- mi = my //@snippet(" //", litMyInt, "myInt($0)") +-} +- +-func _() { +- type ptrStruct struct { +- p *ptrStruct +- } +- +- ptrStruct{} //@item(litPtrStruct, "ptrStruct{}", "", "var") +- +- ptrStruct{ +- p: &ptrSt, //@rank(",", litPtrStruct) +- } +- +- &ptrStruct{} //@item(litPtrStructPtr, "&ptrStruct{}", "", "var") +- +- &ptrStruct{ +- p: ptrSt, //@rank(",", litPtrStructPtr) +- } +-} +- +-func _() { +- f := func(...[]int) {} +- f() //@snippet(")", litIntSlice, "[]int{$0\\}") +-} +- +- +-func _() { +- // don't complete to "untyped int()" +- []int{}[untyped] //@complete("] //") +-} +- +-type Tree[T any] struct{} +- +-func (tree Tree[T]) Do(f func(s T)) {} +- +-func _() { +- var t Tree[string] +- t.Do(fun) //@complete(")", litFunc), snippet(")", litFunc, "func(s string) {$0\\}") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/func_rank.txt b/gopls/internal/test/marker/testdata/completion/func_rank.txt +--- a/gopls/internal/test/marker/testdata/completion/func_rank.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/func_rank.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,83 +0,0 @@ +-This test checks various ranking of completion results within function call +-context. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "completeUnimported": false, +- "deepCompletion": false +-} +- +--- func_rank.go -- +-package func_rank +- +-import "net/http" +- +-var stringAVar = "var" //@item(stringAVar, "stringAVar", "string", "var") +-func stringBFunc() string { return "str" } //@item(stringBFunc, "stringBFunc", "func() string", "func") +-type stringer struct{} //@item(stringer, "stringer", "struct{...}", "struct") +- +-func _() stringer //@complete("tr", stringer) +- +-func _(val stringer) {} //@complete("tr", stringer) +- +-func (stringer) _() {} //@complete("tr", stringer) +- +-func _() { +- var s struct { +- AA int //@item(rankAA, "AA", "int", "field") +- AB string //@item(rankAB, "AB", "string", "field") +- AC int //@item(rankAC, "AC", "int", "field") +- } +- fnStr := func(string) {} +- fnStr(s.A) //@complete(")", rankAB, rankAA, rankAC) +- fnStr("" + s.A) //@complete(")", rankAB, rankAA, rankAC) +- +- fnInt := func(int) {} +- fnInt(-s.A) //@complete(")", rankAA, rankAC, rankAB) +- +- // no expected type +- fnInt(func() int { s.A }) //@complete(" }", rankAA, rankAB, rankAC) +- fnInt(s.A()) //@complete("()", rankAA, rankAC, rankAB) +- fnInt([]int{}[s.A]) //@complete("])", rankAA, rankAC, rankAB) +- fnInt([]int{}[:s.A]) //@complete("])", rankAA, rankAC, rankAB) +- +- fnInt(s.A.(int)) //@complete(".(", rankAA, rankAC, rankAB) +- +- fnPtr := func(*string) {} +- fnPtr(&s.A) //@complete(")", rankAB, rankAA, rankAC) +- +- var aaPtr *string //@item(rankAAPtr, "aaPtr", "*string", "var") +- var abPtr *int //@item(rankABPtr, "abPtr", "*int", "var") +- fnInt(*a) //@complete(")", rankABPtr, rankAAPtr, stringAVar) +- +- _ = func() string { +- return s.A //@complete(" //", rankAB, rankAA, rankAC) +- } +-} +- +-type foo struct { +- fooPrivateField int //@item(rankFooPrivField, "fooPrivateField", "int", "field") +- FooPublicField int //@item(rankFooPubField, "FooPublicField", "int", "field") +-} +- +-func (foo) fooPrivateMethod() int { //@item(rankFooPrivMeth, "fooPrivateMethod", "func() int", "method") +- return 0 +-} +- +-func (foo) FooPublicMethod() int { //@item(rankFooPubMeth, "FooPublicMethod", "func() int", "method") +- return 0 +-} +- +-func _() { +- var _ int = foo{}. //@rank(" //", rankFooPrivField, rankFooPubField),rank(" //", rankFooPrivMeth, rankFooPubMeth),rank(" //", rankFooPrivField, rankFooPrivMeth) +-} +- +-func _() { +- HandleFunc //@item(httpHandleFunc, "HandleFunc", "func(pattern string, handler func(http.ResponseWriter, *http.Request))", "func") +- HandlerFunc //@item(httpHandlerFunc, "HandlerFunc", "func(http.ResponseWriter, *http.Request)", "type") +- +- http.HandleFunc //@rank(" //", httpHandleFunc, httpHandlerFunc) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/func_sig.txt b/gopls/internal/test/marker/testdata/completion/func_sig.txt +--- a/gopls/internal/test/marker/testdata/completion/func_sig.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/func_sig.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-This test checks completion related to function signatures. +- +--- flags -- +--ignore_extra_diags +- +--- func_sig.go -- +-package funcsig +- +-type someType int //@item(sigSomeType, "someType", "int", "type") +- +-// Don't complete "foo" in signature. +-func (foo someType) _() { //@item(sigFoo, "foo", "someType", "var"),complete(") {", sigSomeType) +- +- //@complete("", sigFoo, sigSomeType) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/func_snippets.txt b/gopls/internal/test/marker/testdata/completion/func_snippets.txt +--- a/gopls/internal/test/marker/testdata/completion/func_snippets.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/func_snippets.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,32 +0,0 @@ +-This test exercises function snippets using generics. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "usePlaceholders": true +-} +- +--- go.mod -- +-module golang.org/lsptests/snippets +- +-go 1.18 +- +--- funcsnippets.go -- +-package snippets +- +-type SyncMap[K comparable, V any] struct{} +- +-func NewSyncMap[K comparable, V any]() (result *SyncMap[K, V]) { //@item(NewSyncMap, "NewSyncMap", "", "") +- return +-} +- +-func Identity[P ~int](p P) P { //@item(Identity, "Identity", "", "") +- return p +-} +- +-func _() { +- _ = NewSyncM //@snippet(" //", NewSyncMap, "NewSyncMap[${1:K comparable}, ${2:V any}]()") +- _ = Identi //@snippet(" //", Identity, "Identity(${1:p P})") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/func_value.txt b/gopls/internal/test/marker/testdata/completion/func_value.txt +--- a/gopls/internal/test/marker/testdata/completion/func_value.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/func_value.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,61 +0,0 @@ +-This test checks completion related to function values. +- +--- flags -- +--ignore_extra_diags +- +--- func_value.go -- +-package funcvalue +- +-func fooFunc() int { //@item(fvFooFunc, "fooFunc", "func() int", "func") +- return 0 +-} +- +-var _ = fooFunc() //@item(fvFooFuncCall, "fooFunc", "func() int", "func") +- +-var fooVar = func() int { //@item(fvFooVar, "fooVar", "func() int", "var") +- return 0 +-} +- +-var _ = fooVar() //@item(fvFooVarCall, "fooVar", "func() int", "var") +- +-type myFunc func() int +- +-var fooType myFunc = fooVar //@item(fvFooType, "fooType", "myFunc", "var") +- +-var _ = fooType() //@item(fvFooTypeCall, "fooType", "func() int", "var") +- +-func _() { +- var f func() int +- f = foo //@complete(" //", fvFooFunc, fvFooType, fvFooVar) +- +- var i int +- i = foo //@complete(" //", fvFooFuncCall, fvFooTypeCall, fvFooVarCall) +-} +- +--- generic/func_value.go -- +-package funcvalue +- +-type bar struct{} +- +-func (b bar) Num() int { +- return 0 +-} +- +-func Bar[T any]() bar { +- return bar{} +-} +- +-func BarWithArg[T any](a int) bar { +- return bar{} +-} +- +-func (b bar) Bar2() bar { +- return b +-} +- +-func _() { +- Bar[T].Num //@item(bar, "Bar[T]().Num", "func() int", "method") +- Bar[T].Bar2().Num //@item(bar2, "Bar[T]().Bar2().Num", "func() int", "method") +- var i int +- i = Num //@complete(" //", bar, bar2) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/fuzzy.txt b/gopls/internal/test/marker/testdata/completion/fuzzy.txt +--- a/gopls/internal/test/marker/testdata/completion/fuzzy.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/fuzzy.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,55 +0,0 @@ +-This test exercises fuzzy completion matching. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests +- +-go 1.18 +- +--- fuzzy/fuzzy.go -- +-package fuzzy +- +-func _() { +- var a struct { +- fabar int +- fooBar string +- } +- +- a.fabar //@item(fuzzFabarField, "a.fabar", "int", "field") +- a.fooBar //@item(fuzzFooBarField, "a.fooBar", "string", "field") +- +- afa //@complete(" //", fuzzFabarField, fuzzFooBarField) +- afb //@complete(" //", fuzzFooBarField, fuzzFabarField) +- +- fab //@complete(" //", fuzzFabarField) +- +- var myString string +- myString = af //@complete(" //", fuzzFooBarField, fuzzFabarField) +- +- var b struct { +- c struct { +- d struct { +- e struct { +- abc string +- } +- abc float32 +- } +- abc bool +- } +- abc int +- } +- +- b.abc //@item(fuzzABCInt, "b.abc", "int", "field") +- b.c.abc //@item(fuzzABCbool, "b.c.abc", "bool", "field") +- b.c.d.abc //@item(fuzzABCfloat, "b.c.d.abc", "float32", "field") +- b.c.d.e.abc //@item(fuzzABCstring, "b.c.d.e.abc", "string", "field") +- +- // in depth order by default +- abc //@complete(" //", fuzzABCInt, fuzzABCbool, fuzzABCfloat) +- +- // deep candidate that matches expected type should still ranked first +- var s string +- s = abc //@complete(" //", fuzzABCstring, fuzzABCInt, fuzzABCbool) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/imported-std.txt b/gopls/internal/test/marker/testdata/completion/imported-std.txt +--- a/gopls/internal/test/marker/testdata/completion/imported-std.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/imported-std.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,62 +0,0 @@ +-Test of imported completions respecting the effective Go version of the file. +- +-(See "un-" prefixed file for same test of unimported completions.) +- +-These symbols below were introduced to go/types in go1.22: +- +- Alias +- Info.FileVersions +- (Checker).PkgNameOf +- +-The underlying logic depends on versions.FileVersion, which only +-behaves correctly in go1.22. (When go1.22 is assured, we can remove +-the min_go flag but leave the test inputs unchanged.) +- +--- flags -- +--ignore_extra_diags +--min_go_command=go1.22 +- +--- go.mod -- +-module example.com +- +-go 1.21 +- +--- a/a.go -- +-package a +- +-import "go/ast" +-import "go/token" +-import "go/types" +- +-// package-level decl +-var _ = types.Sat //@rank("Sat", "Satisfies") +-var _ = types.Ali //@rank("Ali", "!Alias") +- +-// field +-var _ = new(types.Info).Use //@rank("Use", "Uses") +-var _ = new(types.Info).Fil //@rank("Fil", "!FileVersions") +- +-// method +-var _ = new(types.Checker).Obje //@rank("Obje", "ObjectOf") +-var _ = new(types.Checker).PkgN //@rank("PkgN", "!PkgNameOf") +- +--- b/b.go -- +-//go:build go1.22 +- +-package a +- +-import "go/ast" +-import "go/token" +-import "go/types" +- +-// package-level decl +-var _ = types.Sat //@rank("Sat", "Satisfies") +-var _ = types.Ali //@rank("Ali", "Alias") +- +-// field +-var _ = new(types.Info).Use //@rank("Use", "Uses") +-var _ = new(types.Info).Fil //@rank("Fil", "FileVersions") +- +-// method +-var _ = new(types.Checker).Obje //@rank("Obje", "ObjectOf") +-var _ = new(types.Checker).PkgN //@rank("PkgN", "PkgNameOf") +diff -urN a/gopls/internal/test/marker/testdata/completion/index.txt b/gopls/internal/test/marker/testdata/completion/index.txt +--- a/gopls/internal/test/marker/testdata/completion/index.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/index.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-This test checks completion related to index expressions. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "completeUnimported": false +-} +- +--- index.go -- +-package index +- +-func _() { +- var ( +- aa = "123" //@item(indexAA, "aa", "string", "var") +- ab = 123 //@item(indexAB, "ab", "int", "var") +- ) +- +- var foo [1]int +- foo[a] //@complete("]", indexAB, indexAA) +- foo[:a] //@complete("]", indexAB, indexAA) +- a[:a] //@complete("[", indexAA, indexAB) +- a[a] //@complete("[", indexAA, indexAB) +- +- var bar map[string]int +- bar[a] //@complete("]", indexAA, indexAB) +- +- type myMap map[string]int +- var baz myMap +- baz[a] //@complete("]", indexAA, indexAB) +- +- type myInt int +- var mi myInt //@item(indexMyInt, "mi", "myInt", "var") +- foo[m] //@snippet("]", indexMyInt, "mi") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/interfacerank.txt b/gopls/internal/test/marker/testdata/completion/interfacerank.txt +--- a/gopls/internal/test/marker/testdata/completion/interfacerank.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/interfacerank.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-This test checks that completion ranking accounts for interface assignability. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "completeUnimported": false, +- "deepCompletion": false +-} +- +--- p.go -- +- +-package interfacerank +- +-type foo interface { +- foo() +-} +- +-type fooImpl int +- +-func (*fooImpl) foo() {} +- +-func wantsFoo(foo) {} +- +-func _() { +- var ( +- aa string //@item(irAA, "aa", "string", "var") +- ab *fooImpl //@item(irAB, "ab", "*fooImpl", "var") +- ) +- +- wantsFoo(a) //@complete(")", irAB, irAA) +- +- var ac fooImpl //@item(irAC, "ac", "fooImpl", "var") +- wantsFoo(&a) //@complete(")", irAC, irAA, irAB) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/issue51783.txt b/gopls/internal/test/marker/testdata/completion/issue51783.txt +--- a/gopls/internal/test/marker/testdata/completion/issue51783.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/issue51783.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,47 +0,0 @@ +-Regression test for "completion gives unneeded generic type +-instantiation snippet", #51783. +- +-Type parameters that can be inferred from the arguments +-are not part of the offered completion snippet. +- +--- flags -- +--ignore_extra_diags +- +--- a.go -- +-package a +- +-// identity has a single simple type parameter. +-// The completion omits the instantiation. +-func identity[T any](x T) T +- +-// clone has a second type parameter that is nonetheless constrained by the parameter. +-// The completion omits the instantiation. +-func clone[S ~[]E, E any](s S) S +- +-// unconstrained has a type parameter constrained only by the result. +-// The completion suggests instantiation. +-func unconstrained[X, Y any](x X) Y +- +-// partial has three type parameters, +-// only the last two of which may be omitted as they +-// are constrained by the arguments. +-func partial[R any, S ~[]E, E any](s S) R +- +-//@item(identity, "identity", "details", "kind") +-//@item(clone, "clone", "details", "kind") +-//@item(unconstrained, "unconstrained", "details", "kind") +-//@item(partial, "partial", "details", "kind") +- +-func _() { +- _ = identity //@snippet("identity", identity, "identity(${1:})") +- +- _ = clone //@snippet("clone", clone, "clone(${1:})") +- +- _ = unconstrained //@snippet("unconstrained", unconstrained, "unconstrained[${1:}](${2:})") +- +- _ = partial //@snippet("partial", partial, "partial[${1:}](${2:})") +- +- // Result-type inference permits us to omit Y in this (rare) case, +- // but completion doesn't support that. +- var _ int = unconstrained //@snippet("unconstrained", unconstrained, "unconstrained[${1:}](${2:})") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/issue56505.txt b/gopls/internal/test/marker/testdata/completion/issue56505.txt +--- a/gopls/internal/test/marker/testdata/completion/issue56505.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/issue56505.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,13 +0,0 @@ +-Test for golang/go#56505: completion on variables of type *error should not +-panic. +- +--- flags -- +--ignore_extra_diags +- +--- issue.go -- +-package issues +- +-func _() { +- var e *error +- e.x //@complete(" //") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/issue59096.txt b/gopls/internal/test/marker/testdata/completion/issue59096.txt +--- a/gopls/internal/test/marker/testdata/completion/issue59096.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/issue59096.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,25 +0,0 @@ +-This test exercises the panic in golang/go#59096: completing at a syntactic +-type-assert expression was panicking because gopls was translating it into +-a (malformed) selector expr. +- +--- settings.json -- +-{ +- "importsSource": "gopls" +-} +- +--- go.mod -- +-module example.com +- +--- a/a.go -- +-package a +- +-func _() { +- b.(foo) //@complete(re"b.()", B), diag("b", re"(undefined|undeclared name): b") +-} +- +-//@item(B, "B", "const (from \"example.com/b\")", "const") +- +--- b/b.go -- +-package b +- +-const B = 0 +diff -urN a/gopls/internal/test/marker/testdata/completion/issue60545.txt b/gopls/internal/test/marker/testdata/completion/issue60545.txt +--- a/gopls/internal/test/marker/testdata/completion/issue60545.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/issue60545.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,33 +0,0 @@ +-This test checks that unimported completion is case-insensitive. +- +--- go.mod -- +-module mod.test +- +-go 1.18 +- +--- settings.json -- +-{ +- "importsSource": "gopls" +-} +- +--- main.go -- +-package main +- +-//@item(Print, "Print", "func (from \"fmt\")", "func") +-//@item(Printf, "Printf", "func (from \"fmt\")", "func") +-//@item(Println, "Println", "func (from \"fmt\")", "func") +- +-func main() { +- fmt.p //@complete(re"fmt.p()", Print, Printf, Println), diag("fmt", re"(undefined|undeclared)") +-} +- +--- other.go -- +-package main +- +-// Including another package that imports "fmt" causes completion to use the +-// existing metadata, which is the codepath leading to golang/go#60545. +-import "fmt" +- +-func _() { +- fmt.Println() +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/issue62141.txt b/gopls/internal/test/marker/testdata/completion/issue62141.txt +--- a/gopls/internal/test/marker/testdata/completion/issue62141.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/issue62141.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,39 +0,0 @@ +-This test checks that we don't suggest completion to an untyped conversion such +-as "untyped float(abcdef)". +- +--- main.go -- +-package main +- +-func main() { +- abcdef := 32 //@diag("abcdef", re"not used") +- x := 1.0 / abcd //@acceptcompletion(re"abcd()", "abcdef", int), diag("x", re"not used"), diag("abcd", re"(undefined|undeclared)") +- +- // Verify that we don't suggest converting compatible untyped constants. +- const untypedConst = 42 +- y := 1.1 / untypedC //@acceptcompletion(re"untypedC()", "untypedConst", untyped), diag("y", re"not used"), diag("untypedC", re"(undefined|undeclared)") +-} +- +--- @int/main.go -- +-package main +- +-func main() { +- abcdef := 32 //@diag("abcdef", re"not used") +- x := 1.0 / float64(abcdef) //@acceptcompletion(re"abcd()", "abcdef", int), diag("x", re"not used"), diag("abcd", re"(undefined|undeclared)") +- +- // Verify that we don't suggest converting compatible untyped constants. +- const untypedConst = 42 +- y := 1.1 / untypedC //@acceptcompletion(re"untypedC()", "untypedConst", untyped), diag("y", re"not used"), diag("untypedC", re"(undefined|undeclared)") +-} +- +--- @untyped/main.go -- +-package main +- +-func main() { +- abcdef := 32 //@diag("abcdef", re"not used") +- x := 1.0 / abcd //@acceptcompletion(re"abcd()", "abcdef", int), diag("x", re"not used"), diag("abcd", re"(undefined|undeclared)") +- +- // Verify that we don't suggest converting compatible untyped constants. +- const untypedConst = 42 +- y := 1.1 / untypedConst //@acceptcompletion(re"untypedC()", "untypedConst", untyped), diag("y", re"not used"), diag("untypedC", re"(undefined|undeclared)") +-} +- +diff -urN a/gopls/internal/test/marker/testdata/completion/issue62560.txt b/gopls/internal/test/marker/testdata/completion/issue62560.txt +--- a/gopls/internal/test/marker/testdata/completion/issue62560.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/issue62560.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,19 +0,0 @@ +-This test verifies that completion of package members in unimported packages +-reflects their fuzzy score, even when those members are present in the +-transitive import graph of the main module. (For technical reasons, this was +-the nature of the regression in golang/go#62560.) +- +--- go.mod -- +-module mod.test +- +--- foo/foo.go -- +-package foo +- +-func _() { +- json.U //@rank(re"U()", "Unmarshal", "InvalidUTF8Error"), diag("json", re"(undefined|undeclared)") +-} +- +--- bar/bar.go -- +-package bar +- +-import _ "encoding/json" +diff -urN a/gopls/internal/test/marker/testdata/completion/issue62676.txt b/gopls/internal/test/marker/testdata/completion/issue62676.txt +--- a/gopls/internal/test/marker/testdata/completion/issue62676.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/issue62676.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,63 +0,0 @@ +-This test verifies that unimported completion respects the usePlaceholders setting. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "usePlaceholders": false +-} +- +--- go.mod -- +-module mod.test +- +-go 1.21 +- +--- foo/foo.go -- +-package foo +- +-func _() { +- // This uses goimports-based completion; TODO: this should insert snippets. +- os.Open //@acceptcompletion(re"Open()", "Open", open) +-} +- +-func _() { +- // This uses metadata-based completion. +- errors.New //@acceptcompletion(re"New()", "New", new) +-} +- +--- bar/bar.go -- +-package bar +- +-import _ "errors" // important: doesn't transitively import os. +- +--- @new/foo/foo.go -- +-package foo +- +-import "errors" +- +-func _() { +- // This uses goimports-based completion; TODO: this should insert snippets. +- os.Open //@acceptcompletion(re"Open()", "Open", open) +-} +- +-func _() { +- // This uses metadata-based completion. +- errors.New(${1:}) //@acceptcompletion(re"New()", "New", new) +-} +- +--- @open/foo/foo.go -- +-package foo +- +-import "os" +- +-func _() { +- // This uses goimports-based completion; TODO: this should insert snippets. +- os.Open(${1:}) //@acceptcompletion(re"Open()", "Open", open) +-} +- +-func _() { +- // This uses metadata-based completion. +- errors.New //@acceptcompletion(re"New()", "New", new) +-} +- +diff -urN a/gopls/internal/test/marker/testdata/completion/issue70636.txt b/gopls/internal/test/marker/testdata/completion/issue70636.txt +--- a/gopls/internal/test/marker/testdata/completion/issue70636.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/issue70636.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,23 +0,0 @@ +-This test reproduces the crash of golang/go#70636, an out of bounds error when +-analyzing a return statement with more results than the signature expects. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module example.com +- +-go 1.21 +- +--- p.go -- +-package p +- +-var xx int +-var xy string +- +- +-func _() { +- return Foo(x) //@ rank(re"x()", "xx", "xy") +-} +- +-func Foo[T any](t T) T {} +diff -urN a/gopls/internal/test/marker/testdata/completion/issue72753.txt b/gopls/internal/test/marker/testdata/completion/issue72753.txt +--- a/gopls/internal/test/marker/testdata/completion/issue72753.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/issue72753.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,86 +0,0 @@ +-This test checks that completion gives correct completion for +-incomplete AssignStmt with multiple left-hand vars. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "usePlaceholders": false +-} +- +--- go.mod -- +-module mod.test +- +-go 1.21 +- +--- string.go -- +-package a +- +-func _(left, right string){ +- left, ri //@acceptcompletion(re"ri()", "right", string) +-} +- +--- @string/string.go -- +-package a +- +-func _(left, right string){ +- left, right //@acceptcompletion(re"ri()", "right", string) +-} +- +--- array.go -- +-package a +-func _(right string) { +- var left [3]int +- left[0], ri //@acceptcompletion(re"ri()", "right", array) +-} +- +--- @array/array.go -- +-package a +-func _(right string) { +- var left [3]int +- left[0], right //@acceptcompletion(re"ri()", "right", array) +-} +- +--- slice.go -- +-package a +-func _(right string) { +- var left []int +- left[0], ri //@acceptcompletion(re"ri()", "right", slice) +-} +- +--- @slice/slice.go -- +-package a +-func _(right string) { +- var left []int +- left[0], right //@acceptcompletion(re"ri()", "right", slice) +-} +- +--- map.go -- +-package a +-func _(right string) { +- var left map[int]int +- left[0], ri //@acceptcompletion(re"ri()", "right", map) +-} +- +--- @map/map.go -- +-package a +-func _(right string) { +- var left map[int]int +- left[0], right //@acceptcompletion(re"ri()", "right", map) +-} +- +--- star.go -- +-package a +-func _(right string) { +- var left *int +- *left, ri //@acceptcompletion(re"ri()", "right", star) +-} +- +--- @star/star.go -- +-package a +-func _(right string) { +- var left *int +- *left, right //@acceptcompletion(re"ri()", "right", star) +-} +- +diff -urN a/gopls/internal/test/marker/testdata/completion/keywords.txt b/gopls/internal/test/marker/testdata/completion/keywords.txt +--- a/gopls/internal/test/marker/testdata/completion/keywords.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/keywords.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,260 +0,0 @@ +-This test checks completion of Go keywords. +- +--- flags -- +--ignore_extra_diags +--filter_keywords=false +- +--- settings.json -- +-{ +- "completeUnimported": false, +- "matcher": "caseInsensitive", +- "experimentalPostfixCompletions": false +-} +- +--- keywords.go -- +-package keywords +- +-//@rank("", type),rank("", func),rank("", var),rank("", const),rank("", import) +- +-func _() { +- var test int //@rank(" //", int, interface) +- var tChan chan int +- var _ m //@complete(" //", map) +- var _ f //@complete(" //", func) +- var _ c //@complete(" //", chan) +- +- var _ str //@rank(" //", string, struct) +- +- type _ int //@rank(" //", interface, int) +- +- type _ str //@rank(" //", struct, string) +- +- switch test { +- case 1: // TODO: trying to complete case here will break because the parser won't return *ast.Ident +- b //@complete(" //", break) +- case 2: +- f //@complete(" //", fallthrough, for) +- r //@complete(" //", return) +- d //@complete(" //", default, defer) +- c //@complete(" //", case, const) +- } +- +- switch test.(type) { +- case fo: //@complete(":") +- case int: +- b //@complete(" //", break) +- case int32: +- f //@complete(" //", for) +- d //@complete(" //", default, defer) +- r //@complete(" //", return) +- c //@complete(" //", case, const) +- } +- +- select { +- case <-tChan: +- b //@complete(" //", break) +- c //@complete(" //", case, const) +- } +- +- for index := 0; index < test; index++ { +- c //@complete(" //", const, continue) +- b //@complete(" //", break) +- } +- +- for range []int{} { +- c //@complete(" //", const, continue) +- b //@complete(" //", break) +- } +- +- // Test function level keywords +- +- //Using 2 characters to test because map output order is random +- sw //@complete(" //", switch) +- se //@complete(" //", select) +- +- f //@complete(" //", for) +- d //@complete(" //", defer) +- g //@rank(" //", go),rank(" //", goto) +- r //@complete(" //", return) +- i //@complete(" //", if) +- e //@complete(" //", else) +- v //@complete(" //", var) +- c //@complete(" //", const) +- +- for i := r //@complete(" //", range) +-} +- +-/* package */ //@item(package, "package", "", "keyword") +-/* import */ //@item(import, "import", "", "keyword") +-/* func */ //@item(func, "func", "", "keyword") +-/* type */ //@item(type, "type", "", "keyword") +-/* var */ //@item(var, "var", "", "keyword") +-/* const */ //@item(const, "const", "", "keyword") +-/* break */ //@item(break, "break", "", "keyword") +-/* default */ //@item(default, "default", "", "keyword") +-/* case */ //@item(case, "case", "", "keyword") +-/* defer */ //@item(defer, "defer", "", "keyword") +-/* go */ //@item(go, "go", "", "keyword") +-/* for */ //@item(for, "for", "", "keyword") +-/* if */ //@item(if, "if", "", "keyword") +-/* else */ //@item(else, "else", "", "keyword") +-/* switch */ //@item(switch, "switch", "", "keyword") +-/* select */ //@item(select, "select", "", "keyword") +-/* fallthrough */ //@item(fallthrough, "fallthrough", "", "keyword") +-/* continue */ //@item(continue, "continue", "", "keyword") +-/* return */ //@item(return, "return", "", "keyword") +-/* goto */ //@item(goto, "goto", "", "keyword") +-/* struct */ //@item(struct, "struct", "", "keyword") +-/* interface */ //@item(interface, "interface", "", "keyword") +-/* map */ //@item(map, "map", "", "keyword") +-/* chan */ //@item(chan, "chan", "", "keyword") +-/* range */ //@item(range, "range", "", "keyword") +-/* string */ //@item(string, "string", "", "type") +-/* int */ //@item(int, "int", "", "type") +- +--- accidental_keywords.go -- +-package keywords +- +-// non-matching candidate - shouldn't show up as completion +-var apple = "apple" +- +-func _() { +- foo.bar() // insert some extra statements to exercise our AST surgery +- variance := 123 //@item(kwVariance, "variance", "int", "var") +- foo.bar() +- println(var) //@complete(")", kwVariance) +-} +- +-func _() { +- foo.bar() +- var s struct { variance int } //@item(kwVarianceField, "variance", "int", "field") +- foo.bar() +- s.var //@complete(" //", kwVarianceField) +-} +- +-func _() { +- channel := 123 //@item(kwChannel, "channel", "int", "var") +- chan //@complete(" //", kwChannel) +- foo.bar() +-} +- +-func _() { +- foo.bar() +- var typeName string //@item(kwTypeName, "typeName", "string", "var") +- foo.bar() +- type //@complete(" //", kwTypeName) +-} +--- empty_select.go -- +-package keywords +- +-func _() { +- select { +- c //@complete(" //", case) +- } +-} +--- empty_switch.go -- +-package keywords +- +-func _() { +- switch { +- //@complete("", case, default) +- } +- +- switch test.(type) { +- d //@complete(" //", default) +- } +-} +- +--- default_name_var_switch.go -- +-package keywords +- +-func _() { +- var defaultVar int //@item(defaultVar, "defaultVar", "int", "var") +- switch defaultVar { +- case 1: +- println("helloworld") +- d //@complete(" //", default, defaultVar, defer) +- } +- switch defaultVar { +- default: +- d //@complete(" //", defaultVar, defer) +- } +- var nested int +- switch defaultVar { +- case 1: +- switch nested { +- default: +- println("") +- } +- d //@complete(" //", default, defaultVar, defer) +- } +-} +- +--- return_different_func.go -- +-package keywords +- +-/* return */ //@item(returnWithSpace, "return ", "", "keyword") +- +- +-func _ () int { +- r //@complete(" //", returnWithSpace) +-} +- +-func _ () (int, int) { +- r //@complete(" //", returnWithSpace) +-} +- +-func _ () (_ int) { +- r //@complete(" //", returnWithSpace) +-} +- +-func _ () (_ int) { +- r //@complete(" //", returnWithSpace) +-} +- +-func _ () (_, _ int) { +- r //@complete(" //", returnWithSpace) +-} +- +-func _ () (_, a int) { +- r //@complete(" //", return) +-} +- +-func _ () { +- r //@complete(" //", return) +-} +- +-func _ () (a int) { +- r //@complete(" //", return) +-} +- +-func _ () (a, b int) { +- r //@complete(" //", return) +-} +- +-func _ () (a, b int, c string) { +- r //@complete(" //", return) +-} +- +-func _ () (a int) { +- _ = func (){ +- r //@complete(" //", return) +- } +- return +-} +- +-func _ () int { +- _ = func () (a int) { +- // func lit will be affected by outer function. +- r //@complete(" //", returnWithSpace) +- } +- return +-} +- +-func _ () { +- _ = func () int { +- // func lit will be affected by outer function. +- r //@complete(" //", return) +- } +- return +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/labels.txt b/gopls/internal/test/marker/testdata/completion/labels.txt +--- a/gopls/internal/test/marker/testdata/completion/labels.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/labels.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,55 +0,0 @@ +-This test checks completion of labels. +- +--- flags -- +--ignore_extra_diags +- +--- labels.go -- +-package labels +- +-func _() { +- goto F //@complete(" //", label1, label5) +- +-Foo1: //@item(label1, "Foo1", "label", "const") +- for a, b := range []int{} { +- Foo2: //@item(label2, "Foo2", "label", "const") +- switch { +- case true: +- break F //@complete(" //", label2, label1) +- +- continue F //@complete(" //", label1) +- +- { +- FooUnjumpable: +- } +- +- goto F //@complete(" //", label1, label2, label4, label5) +- +- func() { +- goto F //@complete(" //", label3) +- +- break F //@complete(" //") +- +- continue F //@complete(" //") +- +- Foo3: //@item(label3, "Foo3", "label", "const") +- }() +- } +- +- Foo4: //@item(label4, "Foo4", "label", "const") +- switch any(a).(type) { +- case int: +- break F //@complete(" //", label4, label1) +- } +- } +- +- break F //@complete(" //") +- +- continue F //@complete(" //") +- +-Foo5: //@item(label5, "Foo5", "label", "const") +- for { +- break F //@complete(" //", label5) +- } +- +- return +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/lit.txt b/gopls/internal/test/marker/testdata/completion/lit.txt +--- a/gopls/internal/test/marker/testdata/completion/lit.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/lit.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,49 +0,0 @@ +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module mod.test +- +-go 1.18 +- +--- foo/foo.go -- +-package foo +- +-type StructFoo struct{ F int } +- +--- a.go -- +-package a +- +-import "mod.test/foo" +- +-func _() { +- StructFoo{} //@item(litStructFoo, "StructFoo{}", "struct{...}", "struct") +- +- var sfp *foo.StructFoo +- // Don't insert the "&" before "StructFoo{}". +- sfp = foo.Str //@snippet(" //", litStructFoo, "StructFoo{$0\\}") +- +- var sf foo.StructFoo +- sf = foo.Str //@snippet(" //", litStructFoo, "StructFoo{$0\\}") +- sf = foo. //@snippet(" //", litStructFoo, "StructFoo{$0\\}") +-} +- +--- http.go -- +-package a +- +-import ( +- "net/http" +- "sort" +-) +- +-func _() { +- sort.Slice(nil, fun) //@snippet(")", litFunc, "func(i, j int) bool {$0\\}") +- +- http.HandleFunc("", f) //@snippet(")", litFunc, "func(w http.ResponseWriter, r *http.Request) {$0\\}") +- +- //@item(litFunc, "func(...) {}", "", "var") +- http.HandlerFunc() //@item(handlerFunc, "http.HandlerFunc()", "", "var") +- http.Handle("", http.HandlerFunc()) //@snippet("))", litFunc, "func(w http.ResponseWriter, r *http.Request) {$0\\}") +- http.Handle("", h) //@snippet(")", handlerFunc, "http.HandlerFunc($0)") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/maps.txt b/gopls/internal/test/marker/testdata/completion/maps.txt +--- a/gopls/internal/test/marker/testdata/completion/maps.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/maps.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-This test checks completion of map keys and values. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "completeUnimported": false +-} +- +--- maps.go -- +-package maps +- +-func _() { +- var aVar int //@item(mapVar, "aVar", "int", "var") +- +- // not comparabale +- type aSlice []int //@item(mapSliceType, "aSlice", "[]int", "type") +- +- *aSlice //@item(mapSliceTypePtr, "*aSlice", "[]int", "type") +- +- // comparable +- type aStruct struct{} //@item(mapStructType, "aStruct", "struct{...}", "struct") +- +- map[]a{} //@complete("]", mapSliceType, mapStructType),snippet("]", mapSliceType, "*aSlice") +- +- map[a]a{} //@complete("]", mapSliceType, mapStructType) +- map[a]a{} //@complete("{", mapSliceType, mapStructType) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/multi_return.txt b/gopls/internal/test/marker/testdata/completion/multi_return.txt +--- a/gopls/internal/test/marker/testdata/completion/multi_return.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/multi_return.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,55 +0,0 @@ +-This test checks various ranking of completion results related to functions +-with multiple return values. +- +--- flags -- +--ignore_extra_diags +- +--- multireturn.go -- +-package multireturn +- +-func f0() {} //@item(multiF0, "f0", "func()", "func") +- +-func f1(int) int { return 0 } //@item(multiF1, "f1", "func(int) int", "func") +- +-func f2(int, int) (int, int) { return 0, 0 } //@item(multiF2, "f2", "func(int, int) (int, int)", "func") +- +-func f2Str(string, string) (string, string) { return "", "" } //@item(multiF2Str, "f2Str", "func(string, string) (string, string)", "func") +- +-func f3(int, int, int) (int, int, int) { return 0, 0, 0 } //@item(multiF3, "f3", "func(int, int, int) (int, int, int)", "func") +- +-func _() { +- _ := f //@rank(" //", multiF1, multiF2) +- +- _, _ := f //@rank(" //", multiF2, multiF0),rank(" //", multiF1, multiF0) +- +- _, _ := _, f //@rank(" //", multiF1, multiF2),rank(" //", multiF1, multiF0) +- +- _, _ := f, abc //@rank(", abc", multiF1, multiF2) +- +- f1() //@rank(")", multiF1, multiF0) +- f1(f) //@rank(")", multiF1, multiF2) +- f2(f) //@rank(")", multiF2, multiF3),rank(")", multiF1, multiF3) +- f2(1, f) //@rank(")", multiF1, multiF2),rank(")", multiF1, multiF0) +- f2(1, ) //@rank(")", multiF1, multiF2),rank(")", multiF1, multiF0) +- f2Str() //@rank(")", multiF2Str, multiF2) +- +- var i int +- i, _ := f //@rank(" //", multiF2, multiF2Str) +- +- var s string +- _, s := f //@rank(" //", multiF2Str, multiF2) +- +- banana, s = f //@rank(" //", multiF2, multiF3) +- +- var variadic func(int, ...int) +- variadic() //@rank(")", multiF1, multiF0),rank(")", multiF2, multiF0),rank(")", multiF3, multiF0) +-} +- +-func _() { +- var baz func(...any) +- +- var otterNap func() (int, int) //@item(multiTwo, "otterNap", "func() (int, int)", "var") +- var one int //@item(multiOne, "one", "int", "var") +- +- baz(on) //@rank(")", multiOne, multiTwo) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/nested_complit.txt b/gopls/internal/test/marker/testdata/completion/nested_complit.txt +--- a/gopls/internal/test/marker/testdata/completion/nested_complit.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/nested_complit.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,25 +0,0 @@ +-This test checks completion of nested composite literals; +- +-Parser recovery changed in Go 1.20, so this test requires at least that +-version for consistency. +- +--- flags -- +--ignore_extra_diags +- +--- nested_complit.go -- +-package nested_complit +- +-type ncFoo struct {} //@item(structNCFoo, "ncFoo", "struct{...}", "struct") +- +-type ncBar struct { //@item(structNCBar, "ncBar", "struct{...}", "struct") +- baz []ncFoo +-} +- +-func _() { +- _ = []ncFoo{} //@item(litNCFoo, "[]ncFoo{}", "", "var") +- _ = make([]ncFoo, 0) //@item(makeNCFoo, "make([]ncFoo, 0)", "", "func") +- +- _ := ncBar{ +- baz: [] //@complete(" //", litNCFoo, makeNCFoo, structNCBar, structNCFoo) +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/postfix_placeholder.txt b/gopls/internal/test/marker/testdata/completion/postfix_placeholder.txt +--- a/gopls/internal/test/marker/testdata/completion/postfix_placeholder.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/postfix_placeholder.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,83 +0,0 @@ +-These tests check that postfix completions when enable usePlaceholders +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "usePlaceholders": true +-} +- +--- go.mod -- +-module golang.org/lsptests/snippets +- +-go 1.18 +- +--- postfix.go -- +-package snippets +- +-import ( +- "strconv" +-) +- +-func _() { +- /* for! */ //@item(postfixFor, "for!", "range over slice by index", "snippet") +- /* forr! */ //@item(postfixForr, "forr!", "range over slice by index and value", "snippet") +- /* range! */ //@item(postfixRange, "range!", "range over slice", "snippet") +- /* var! */ //@item(postfixVar, "var!", "assign to variable", "snippet") +- +- var foo []int +- +- foo.fo //@snippet(" //", postfixFor, "for ${1:i} := range foo {\n\t$0\n}") +- foo.forr //@snippet(" //", postfixForr, "for ${1:i}, ${2:v} := range foo {\n\t$0\n}") +- foo.rang //@snippet(" //", postfixRange, "for ${1:i}, ${2:v} := range foo {\n\t$0\n}") +- foo.va //@snippet(" //", postfixVar, "${1:i} := foo") +-} +- +-func _() { +- /* for! */ //@item(postfixForMap, "for!", "range over map by key", "snippet") +- /* forr! */ //@item(postfixForrMap, "forr!", "range over map by key and value", "snippet") +- /* range! */ //@item(postfixRangeMap, "range!", "range over map", "snippet") +- +- var foo map[int]int +- +- foo.fo //@snippet(" //", postfixFor, "for ${1:k} := range foo {\n\t$0\n}") +- foo.forr //@snippet(" //", postfixForr, "for ${1:k}, ${2:v} := range foo {\n\t$0\n}") +- foo.rang //@snippet(" //", postfixRange, "for ${1:k}, ${2:v} := range foo {\n\t$0\n}") +-} +- +-func _() { +- /* for! */ //@item(postfixForChannel, "for!", "range over channel", "snippet") +- /* range! */ //@item(postfixRangeChannel, "range!", "range over channel", "snippet") +- +- var foo chan int +- +- foo.fo //@snippet(" //", postfixForChannel, "for ${1:e} := range foo {\n\t$0\n}") +- foo.rang //@snippet(" //", postfixRangeChannel, "for ${1:e} := range foo {\n\t$0\n}") +-} +- +-type T struct { +- Name string +-} +- +-func _() (string, T, map[string]string, error) { +- /* iferr! */ //@item(postfixIfErr, "iferr!", "check error and return", "snippet") +- /* variferr! */ //@item(postfixVarIfErr, "variferr!", "assign variables and check error", "snippet") +- /* var! */ //@item(postfixVars, "var!", "assign to variables", "snippet") +- +- +- var err error +- err.iferr //@snippet(" //", postfixIfErr, "if err != nil {\n\treturn \"\", T{}, nil, ${1:err}\n}\n") +- strconv.Atoi("32").iferr //@snippet(" //", postfixIfErr, "if _, err := strconv.Atoi(\"32\"); err != nil {\n\treturn \"\", T{}, nil, ${1:err}\n}\n") +- strconv.Atoi("32").variferr //@snippet(" //", postfixVarIfErr, "${1:i}, ${2:err} := strconv.Atoi(\"32\")\nif ${2:err} != nil {\n\treturn \"\", T{}, nil, ${3:${2:err}}\n}\n") +- +- // test function return multiple errors +- var foo func() (error, error) +- foo().iferr //@snippet(" //", postfixIfErr, "if _, err := foo(); err != nil {\n\treturn \"\", T{}, nil, ${1:err}\n}\n") +- foo().variferr //@snippet(" //", postfixVarIfErr, "${1:err2}, ${2:err} := foo()\nif ${2:err} != nil {\n\treturn \"\", T{}, nil, ${3:${2:err}}\n}\n") +- +- // test function just return error +- var bar func() error +- bar().iferr //@snippet(" //", postfixIfErr, "if err := bar(); err != nil {\n\treturn \"\", T{}, nil, ${1:err}\n}\n") +- bar().variferr //@snippet(" //", postfixVarIfErr, "${1:err2} := bar()\nif ${1:err2} != nil {\n\treturn \"\", T{}, nil, ${2:${1:err2}}\n}\n") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/postfix.txt b/gopls/internal/test/marker/testdata/completion/postfix.txt +--- a/gopls/internal/test/marker/testdata/completion/postfix.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/postfix.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,149 +0,0 @@ +-These tests check that postfix completions do and do not show up in certain +-cases. Tests for the postfix completion contents are implemented as ad-hoc +-integration tests. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/snippets +- +-go 1.18 +- +--- postfix.go -- +-package snippets +- +-import ( +- "strconv" +-) +- +-func _() { +- var foo []int +- foo.append //@rank(" //", postfixAppend) +- +- []int{}.append //@complete(" //") +- +- []int{}.last //@complete(" //") +- +- +- foo.copy //@rank(" //", postfixCopy) +- +- var s struct{ i []int } +- s.i.copy //@rank(" //", postfixCopy) +- +- var _ []int = s.i.copy //@complete(" //") +- +- var blah func() []int +- blah().append //@complete(" //") +-} +- +-func _() { +- /* append! */ //@item(postfixAppend, "append!", "append and re-assign slice", "snippet") +- /* copy! */ //@item(postfixCopy, "copy!", "duplicate slice", "snippet") +- /* for! */ //@item(postfixFor, "for!", "range over slice by index", "snippet") +- /* forr! */ //@item(postfixForr, "forr!", "range over slice by index and value", "snippet") +- /* last! */ //@item(postfixLast, "last!", "s[len(s)-1]", "snippet") +- /* len! */ //@item(postfixLen, "len!", "len(s)", "snippet") +- /* print! */ //@item(postfixPrint, "print!", "print to stdout", "snippet") +- /* range! */ //@item(postfixRange, "range!", "range over slice", "snippet") +- /* reverse! */ //@item(postfixReverse, "reverse!", "reverse slice", "snippet") +- /* sort! */ //@item(postfixSort, "sort!", "sort.Slice()", "snippet") +- /* var! */ //@item(postfixVar, "var!", "assign to variable", "snippet") +- /* ifnotnil! */ //@item(postfixIfNotNil, "ifnotnil!", "if expr != nil", "snippet") +- +- var foo []int +- foo. //@complete(" //", postfixAppend, postfixCopy, postfixFor, postfixForr, postfixIfNotNil, postfixLast, postfixLen, postfixPrint, postfixRange, postfixReverse, postfixSort, postfixVar) +- foo = nil +- +- foo.append //@snippet(" //", postfixAppend, "foo = append(foo, $0)") +- foo.copy //snippet(" //", postfixCopy, "fooCopy := make([]int, len(foo))\ncopy($fooCopy, foo)\n") +- foo.fo //@snippet(" //", postfixFor, "for ${1:} := range foo {\n\t$0\n}") +- foo.forr //@snippet(" //", postfixForr, "for ${1:}, ${2:} := range foo {\n\t$0\n}") +- foo.last //@snippet(" //", postfixLast, "foo[len(foo)-1]") +- foo.len //@snippet(" //", postfixLen, "len(foo)") +- foo.print //@snippet(" //", postfixPrint, `fmt.Printf("foo: %v\n", foo)`) +- foo.rang //@snippet(" //", postfixRange, "for ${1:}, ${2:} := range foo {\n\t$0\n}") +- foo.reverse //@snippet(" //", postfixReverse, "slices.Reverse(foo)") +- foo.sort //@snippet(" //", postfixSort, "sort.Slice(foo, func(i, j int) bool {\n\t$0\n})") +- foo.va //@snippet(" //", postfixVar, "${1:} := foo") +- foo.ifnotnil //@snippet(" //", postfixIfNotNil, "if foo != nil {\n\t$0\n}") +-} +- +-func _() { +- /* for! */ //@item(postfixForMap, "for!", "range over map by key", "snippet") +- /* forr! */ //@item(postfixForrMap, "forr!", "range over map by key and value", "snippet") +- /* range! */ //@item(postfixRangeMap, "range!", "range over map", "snippet") +- /* clear! */ //@item(postfixClear, "clear!", "clear map contents", "snippet") +- /* keys! */ //@item(postfixKeys, "keys!", "create slice of keys", "snippet") +- +- var foo map[int]int +- foo. //@complete(" //", postfixClear, postfixForMap, postfixForrMap, postfixIfNotNil, postfixKeys, postfixLen, postfixPrint, postfixRangeMap, postfixVar) +- +- foo = nil +- +- foo.fo //@snippet(" //", postfixFor, "for ${1:} := range foo {\n\t$0\n}") +- foo.forr //@snippet(" //", postfixForr, "for ${1:}, ${2:} := range foo {\n\t$0\n}") +- foo.rang //@snippet(" //", postfixRange, "for ${1:}, ${2:} := range foo {\n\t$0\n}") +- foo.clear //@snippet(" //", postfixClear, "for k := range foo {\n\tdelete(foo, k)\n}\n") +- foo.keys //@snippet(" //", postfixKeys, "keys := make([]int, 0, len(foo))\nfor k := range foo {\n\tkeys = append(keys, k)\n}\n") +-} +- +-func _() { +- /* for! */ //@item(postfixForChannel, "for!", "range over channel", "snippet") +- /* range! */ //@item(postfixRangeChannel, "range!", "range over channel", "snippet") +- +- var foo chan int +- foo. //@complete(" //", postfixForChannel, postfixIfNotNil, postfixLen, postfixPrint, postfixRangeChannel, postfixVar) +- +- foo = nil +- +- foo.fo //@snippet(" //", postfixForChannel, "for ${1:} := range foo {\n\t$0\n}") +- foo.rang //@snippet(" //", postfixRangeChannel, "for ${1:} := range foo {\n\t$0\n}") +-} +- +-type T struct { +- Name string +-} +- +-func _() (string, T, map[string]string, error) { +- /* iferr! */ //@item(postfixIfErr, "iferr!", "check error and return", "snippet") +- /* variferr! */ //@item(postfixVarIfErr, "variferr!", "assign variables and check error", "snippet") +- /* var! */ //@item(postfixVars, "var!", "assign to variables", "snippet") +- +- strconv.Atoi("32"). //@complete(" //", postfixIfErr, postfixPrint, postfixVars, postfixVarIfErr) +- +- var err error +- err.iferr //@snippet(" //", postfixIfErr, "if err != nil {\n\treturn \"\", T{}, nil, ${1:}\n}\n") +- +- strconv.Atoi("32").iferr //@snippet(" //", postfixIfErr, "if _, err := strconv.Atoi(\"32\"); err != nil {\n\treturn \"\", T{}, nil, ${1:}\n}\n") +- +- strconv.Atoi("32").variferr //@snippet(" //", postfixVarIfErr, "${1:}, ${2:} := strconv.Atoi(\"32\")\nif ${2:} != nil {\n\treturn \"\", T{}, nil, ${3:}\n}\n") +- +- // test function return multiple errors +- var foo func() (error, error) +- foo().iferr //@snippet(" //", postfixIfErr, "if _, err := foo(); err != nil {\n\treturn \"\", T{}, nil, ${1:}\n}\n") +- foo().variferr //@snippet(" //", postfixVarIfErr, "${1:}, ${2:} := foo()\nif ${2:} != nil {\n\treturn \"\", T{}, nil, ${3:}\n}\n") +- +- // test function just return error +- var bar func() error +- bar().iferr //@snippet(" //", postfixIfErr, "if err := bar(); err != nil {\n\treturn \"\", T{}, nil, ${1:}\n}\n") +- bar().variferr //@snippet(" //", postfixVarIfErr, "${1:} := bar()\nif ${1:} != nil {\n\treturn \"\", T{}, nil, ${2:}\n}\n") +-} +- +-func _(){ +- /* tostring! */ //@item(postfixToString, "tostring!", "[]byte to string", "snippet") +- var bs []byte +- bs. //@complete(" //", postfixAppend, postfixCopy, postfixFor, postfixForr, postfixIfNotNil, postfixLast, postfixLen, postfixPrint, postfixRange, postfixReverse, postfixSort, postfixToString, postfixVar) +- bs = nil +- +- /* tobytes! */ //@item(postfixToBytes, "tobytes!", "string to []byte", "snippet") +- /* split! */ //@item(postfixSplit, "split!", "split string", "snippet") +- var s string +- s. //@complete(" //", postfixPrint, postfixSplit, postfixToBytes, postfixVar) +- s = "" +- +- /* tostring! */ //@item(postfixIntToString, "tostring!", "int to string", "snippet") +- var i int +- i. //@complete(" //", postfixPrint, postfixIntToString, postfixVar) +- i = 0 +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/printf.txt b/gopls/internal/test/marker/testdata/completion/printf.txt +--- a/gopls/internal/test/marker/testdata/completion/printf.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/printf.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,42 +0,0 @@ +-This test checks various ranking of completion results related to printf. +- +--- flags -- +--ignore_extra_diags +- +--- printf.go -- +-package printf +- +-import "fmt" +- +-func myPrintf(string, ...any) {} +- +-func _() { +- var ( +- aInt int //@item(printfInt, "aInt", "int", "var") +- aFloat float64 //@item(printfFloat, "aFloat", "float64", "var") +- aString string //@item(printfString, "aString", "string", "var") +- aBytes []byte //@item(printfBytes, "aBytes", "[]byte", "var") +- aStringer fmt.Stringer //@item(printfStringer, "aStringer", "fmt.Stringer", "var") +- aError error //@item(printfError, "aError", "error", "var") +- aBool bool //@item(printfBool, "aBool", "bool", "var") +- ) +- +- myPrintf("%d", a) //@rank(")", printfInt, printfFloat) +- myPrintf("%s", a) //@rank(")", printfString, printfInt),rank(")", printfBytes, printfInt),rank(")", printfStringer, printfInt),rank(")", printfError, printfInt) +- myPrintf("%w", a) //@rank(")", printfError, printfInt) +- myPrintf("%x %[1]b", a) //@rank(")", printfInt, printfString) +- +- fmt.Printf("%t", a) //@rank(")", printfBool, printfInt) +- +- fmt.Fprintf(nil, "%f", a) //@rank(")", printfFloat, printfInt) +- +- fmt.Sprintf("%[2]q %[1]*.[3]*[4]f", +- a, //@rank(",", printfInt, printfFloat) +- a, //@rank(",", printfString, printfFloat) +- a, //@rank(",", printfInt, printfFloat) +- a, //@rank(",", printfFloat, printfInt) +- ) +- +- // Don't insert as "&aStringer" +- fmt.Printf("%p", a) //@snippet(")", printfStringer, "aStringer") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/randv2.txt b/gopls/internal/test/marker/testdata/completion/randv2.txt +--- a/gopls/internal/test/marker/testdata/completion/randv2.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/randv2.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-Unimported completions has to find math/rand/v2 +--- flags -- +--min_go_command=go1.22 +- +--- settings.json -- +-{ +- "importsSource": "gopls" +-} +- +--- go.mod -- +-module unimported.test +- +-go 1.22 +- +--- main.go -- +-package main +-var _ = rand.Int64 //@complete(re"Int64", Int64, Int64N, x64, Uint64, Uint64N), diag("rand", re"undefined: rand") +-// ordering of these requires completion order be deterministic +-// for now, we do not know the types. Awaiting CL 665335 +-//@item(Int64, "Int64", "func (from \"math/rand/v2\")", "func") +-//@item(Int64N, "Int64N", "func (from \"math/rand/v2\")", "func") +-//@item(x64, "Uint64", "func (from \"math/rand\")", "func") +-//@item(Uint64, "Uint64", "func (from \"math/rand/v2\")", "func") +-//@item(Uint64N, "Uint64N", "func (from \"math/rand/v2\")", "func") +diff -urN a/gopls/internal/test/marker/testdata/completion/range_func.txt b/gopls/internal/test/marker/testdata/completion/range_func.txt +--- a/gopls/internal/test/marker/testdata/completion/range_func.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/range_func.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,23 +0,0 @@ +-This test shows we prefer rangeable funcs in range statements. +- +--- flags -- +--ignore_extra_diags +- +--- range_func.go -- +-package rangefunc +- +-func iterNot(func(int)) {} +-func iter0(func() bool) {} +-func iter1(func(int) bool) {} +-func iter2(func(int, int) bool) +- +-func _() { +- for range i { //@rank(" {", "iter0", "iterNot"),rank(" {", "iter1", "iterNot"),rank(" {", "iter2", "iterNot") +- } +- +- for k := range i { //@rank(" {", "iter1", "iterNot"),rank(" {", "iter1", "iter0"),rank(" {", "iter2", "iter0") +- } +- +- for k, v := range i { //@rank(" {", "iter2", "iterNot"),rank(" {", "iter2", "iter0"),rank(" {", "iter2", "iter1") +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/rank.txt b/gopls/internal/test/marker/testdata/completion/rank.txt +--- a/gopls/internal/test/marker/testdata/completion/rank.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/rank.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,212 +0,0 @@ +-This test checks various ranking of completion results. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "completeUnimported": false, +- "deepCompletion": false +-} +- +--- go.mod -- +-module golang.org/lsptests/rank +- +-go 1.18 +- +--- struct/struct_rank.go -- +-package struct_rank +- +-type foo struct { +- c int //@item(c_rank, "c", "int", "field") +- b int //@item(b_rank, "b", "int", "field") +- a int //@item(a_rank, "a", "int", "field") +-} +- +-func f() { +- foo := foo{} //@rank("}", c_rank, b_rank, a_rank) +-} +- +--- assign_rank.go -- +-package rank +- +-// Literal completion results. +-/* int() */ //@item(int, "int()", "int", "var") +-/* string() */ //@item(string, "string()", "string", "var") +- +-var ( +- apple int = 3 //@item(apple, "apple", "int", "var") +- pear string = "hello" //@item(pear, "pear", "string", "var") +-) +- +-func _() { +- orange := 1 //@item(orange, "orange", "int", "var") +- grape := "hello" //@item(grape, "grape", "string", "var") +- orange, grape = 2, "hello" //@complete(" \"", grape, pear, string, orange, apple) +-} +- +-func _() { +- var pineapple int //@item(pineapple, "pineapple", "int", "var") +- pineapple = 1 //@complete(" 1", pineapple, apple, int, pear) +- +- y := //@complete(" /", pineapple, apple, pear) +-} +- +--- binexpr_rank.go -- +-package rank +- +-func _() { +- _ = 5 + ; //@complete(" ;", apple, pear) +- y := + 5; //@complete(" +", apple, pear) +- +- if 6 == {} //@complete(" {", apple, pear) +-} +- +--- boolexpr_rank.go -- +-package rank +- +-func _() { +- someRandomBoolFunc := func() bool { //@item(boolExprFunc, "someRandomBoolFunc", "func() bool", "var") +- return true +- } +- +- var foo, bar int //@item(boolExprBar, "bar", "int", "var") +- if foo == 123 && b { //@rank(" {", boolExprBar, boolExprFunc) +- } +-} +- +--- convert_rank.go -- +-package rank +- +-import "time" +- +-// Copied from the old builtins.go, which has been ported to the new marker tests. +-/* complex(r float64, i float64) */ //@item(complex, "complex", "func(r float64, i float64) complex128", "func") +- +-func _() { +- type strList []string +- wantsStrList := func(strList) {} +- +- var ( +- convA string //@item(convertA, "convA", "string", "var") +- convB []string //@item(convertB, "convB", "[]string", "var") +- ) +- wantsStrList(strList(conv)) //@complete("))", convertB, convertA) +-} +- +-func _() { +- type myInt int +- +- const ( +- convC = "hi" //@item(convertC, "convC", "string", "const") +- convD = 123 //@item(convertD, "convD", "int", "const") +- convE int = 123 //@item(convertE, "convE", "int", "const") +- convF string = "there" //@item(convertF, "convF", "string", "const") +- convG myInt = 123 //@item(convertG, "convG", "myInt", "const") +- ) +- +- var foo int +- foo = conv //@rank(" //", convertE, convertD) +- +- var mi myInt +- mi = conv //@rank(" //", convertG, convertD, convertE) +- mi + conv //@rank(" //", convertG, convertD, convertE) +- +- 1 + conv //@rank(" //", convertD, convertC),rank(" //", convertE, convertC),rank(" //", convertG, convertC) +- +- type myString string +- var ms myString +- ms = conv //@rank(" //", convertC, convertF) +- +- type myUint uint32 +- var mu myUint +- mu = conv //@rank(" //", convertD, convertE) +- +- // don't downrank constants when assigning to any +- var _ any = c //@rank(" //", convertD, complex) +- +- var _ time.Duration = conv //@rank(" //", convertD, convertE),snippet(" //", convertE, "time.Duration(convE)") +- +- var convP myInt //@item(convertP, "convP", "myInt", "var") +- var _ *int = conv //@snippet(" //", convertP, "(*int)(&convP)") +- +- var ff float64 //@item(convertFloat, "ff", "float64", "var") +- f == convD //@snippet(" =", convertFloat, "ff") +-} +- +--- switch_rank.go -- +-package rank +- +-import "time" +- +-func _() { +- switch pear { +- case _: //@rank("_", pear, apple) +- } +- +- time.Monday //@item(timeMonday, "time.Monday", "time.Weekday", "const"),item(monday ,"Monday", "time.Weekday", "const") +- time.Friday //@item(timeFriday, "time.Friday", "time.Weekday", "const"),item(friday ,"Friday", "time.Weekday", "const") +- +- now := time.Now() +- now.Weekday //@item(nowWeekday, "now.Weekday", "func() time.Weekday", "method") +- +- then := time.Now() +- then.Weekday //@item(thenWeekday, "then.Weekday", "func() time.Weekday", "method") +- +- switch time.Weekday(0) { +- case time.Monday, time.Tuesday: +- case time.Wednesday, time.Thursday: +- case time.Saturday, time.Sunday: +- // TODO: these tests were disabled because they require deep completion +- // (which would break other tests) +- case t: // rank(":", timeFriday, timeMonday) +- case time.: //@rank(":", friday, monday) +- +- case now.Weekday(): +- case week: // rank(":", thenWeekday, nowWeekday) +- } +-} +- +--- type_assert_rank.go -- +-package rank +- +-func _() { +- type flower int //@item(flower, "flower", "int", "type") +- var fig string //@item(fig, "fig", "string", "var") +- +- _ = interface{}(nil).(f) //@complete(") //", flower) +-} +- +--- type_switch_rank.go -- +-package rank +- +-import ( +- "fmt" +- "go/ast" +-) +- +-func _() { +- type basket int //@item(basket, "basket", "int", "type") +- var banana string //@item(banana, "banana", "string", "var") +- +- switch interface{}(pear).(type) { +- case b: //@complete(":", basket) +- b //@complete(" //", banana, basket) +- } +- +- Ident //@item(astIdent, "Ident", "struct{...}", "struct") +- IfStmt //@item(astIfStmt, "IfStmt", "struct{...}", "struct") +- +- switch ast.Node(nil).(type) { +- case *ast.Ident: +- case *ast.I: //@rank(":", astIfStmt, astIdent) +- } +- +- Stringer //@item(fmtStringer, "Stringer", "interface{...}", "interface") +- GoStringer //@item(fmtGoStringer, "GoStringer", "interface{...}", "interface") +- +- switch interface{}(nil).(type) { +- case fmt.Stringer: //@rank(":", fmtStringer, fmtGoStringer) +- } +-} +- +diff -urN a/gopls/internal/test/marker/testdata/completion/snippet_placeholder.txt b/gopls/internal/test/marker/testdata/completion/snippet_placeholder.txt +--- a/gopls/internal/test/marker/testdata/completion/snippet_placeholder.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/snippet_placeholder.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,82 +0,0 @@ +-This test checks basic completion snippet support, using placeholders. +- +-Unlike the old marker tests, the new marker tests assume static configuration +-(as defined by settings.json), and therefore there is duplication between this +-test and snippet.txt. This is a price we pay so that we don't have to mutate +-the server during testing. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "usePlaceholders": true +-} +- +--- go.mod -- +-module golang.org/lsptests/snippet +- +--- snippet.go -- +-package snippets +- +-// Pre-set this marker, as we don't have a "source" for it in this package. +-/* Error() */ //@item(Error, "Error", "func() string", "method") +- +-type AliasType = int //@item(sigAliasType, "AliasType", "AliasType", "type") +- +-func foo(i int, b bool) {} //@item(snipFoo, "foo", "func(i int, b bool)", "func") +-func bar(fn func()) func() {} //@item(snipBar, "bar", "func(fn func())", "func") +-func baz(at AliasType, b bool) {} //@item(snipBaz, "baz", "func(at AliasType, b bool)", "func") +- +-type Foo struct { +- Bar int //@item(snipFieldBar, "Bar", "int", "field") +- Func func(at AliasType) error //@item(snipFieldFunc, "Func", "func(at AliasType) error", "field") +-} +- +-func (Foo) Baz() func() {} //@item(snipMethodBaz, "Baz", "func() func()", "method") +-func (Foo) BazBar() func() {} //@item(snipMethodBazBar, "BazBar", "func() func()", "method") +-func (Foo) BazBaz(at AliasType) func() {} //@item(snipMethodBazBaz, "BazBaz", "func(at AliasType) func()", "method") +- +-func _() { +- f //@snippet(" //", snipFoo, "foo(${1:i int}, ${2:b bool})") +- +- bar //@snippet(" //", snipBar, "bar(${1:fn func()})") +- +- baz //@snippet(" //", snipBaz, "baz(${1:at AliasType}, ${2:b bool})") +- +- bar(nil) //@snippet("(", snipBar, "bar") +- bar(ba) //@snippet(")", snipBar, "bar(${1:fn func()})") +- var f Foo +- bar(f.Ba) //@snippet(")", snipMethodBaz, "Baz()") +- (bar)(nil) //@snippet(")", snipBar, "bar(${1:fn func()})") +- (f.Ba)() //@snippet(")", snipMethodBaz, "Baz()") +- +- Foo{ +- B //@snippet(" //", snipFieldBar, "Bar: ${1:int},") +- } +- +- Foo{ +- F //@snippet(" //", snipFieldFunc, "Func: ${1:func(at AliasType) error},") +- } +- +- Foo{B} //@snippet("}", snipFieldBar, "Bar: ${1:int}") +- Foo{} //@snippet("}", snipFieldBar, "Bar: ${1:int}") +- +- Foo{Foo{}.B} //@snippet("} ", snipFieldBar, "Bar") +- +- var err error +- err.Error() //@snippet("E", Error, "Error()") +- f.Baz() //@snippet("B", snipMethodBaz, "Baz()") +- +- f.Baz() //@snippet("(", snipMethodBazBar, "BazBar") +- +- f.Baz() //@snippet("B", snipMethodBazBaz, "BazBaz(${1:at AliasType})") +-} +- +-func _() { +- type bar struct { +- a int +- b float64 //@item(snipBarB, "b", "field") +- } +- bar{b} //@snippet("}", snipBarB, "b: ${1:float64}") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/snippet.txt b/gopls/internal/test/marker/testdata/completion/snippet.txt +--- a/gopls/internal/test/marker/testdata/completion/snippet.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/snippet.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,76 +0,0 @@ +-This test checks basic completion snippet support. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/snippet +- +--- snippet.go -- +-package snippets +- +-// Pre-set this marker, as we don't have a "source" for it in this package. +-// The comment is used to create a synthetic completion item. +-// +-// TODO(rfindley): allow completion markers to refer to ad-hoc items inline, +-// without this trick. +-/* Error() */ //@item(Error, "Error", "func() string", "method") +- +-type AliasType = int //@item(sigAliasType, "AliasType", "AliasType", "type") +- +-func foo(i int, b bool) {} //@item(snipFoo, "foo", "func(i int, b bool)", "func") +-func bar(fn func()) func() {} //@item(snipBar, "bar", "func(fn func())", "func") +-func baz(at AliasType, b bool) {} //@item(snipBaz, "baz", "func(at AliasType, b bool)", "func") +- +-type Foo struct { +- Bar int //@item(snipFieldBar, "Bar", "int", "field") +- Func func(at AliasType) error //@item(snipFieldFunc, "Func", "func(at AliasType) error", "field") +-} +- +-func (Foo) Baz() func() {} //@item(snipMethodBaz, "Baz", "func() func()", "method") +-func (Foo) BazBar() func() {} //@item(snipMethodBazBar, "BazBar", "func() func()", "method") +-func (Foo) BazBaz(at AliasType) func() {} //@item(snipMethodBazBaz, "BazBaz", "func(at AliasType) func()", "method") +- +-func _() { +- f //@snippet(" //", snipFoo, "foo(${1:})") +- +- bar //@snippet(" //", snipBar, "bar(${1:})") +- +- baz() //@snippet("(", snipBaz, "baz") +- +- bar(nil) //@snippet("(", snipBar, "bar") +- bar(ba) //@snippet(")", snipBar, "bar(${1:})") +- var f Foo +- bar(f.Ba) //@snippet(")", snipMethodBaz, "Baz()") +- (bar)(nil) //@snippet(")", snipBar, "bar(${1:})") +- (f.Ba)() //@snippet(")", snipMethodBaz, "Baz()") +- +- Foo{ +- B //@snippet(" //", snipFieldBar, "Bar: ${1:},") +- } +- +- Foo{ +- F //@snippet(" //", snipFieldFunc, "Func: ${1:},") +- } +- +- Foo{B} //@snippet("}", snipFieldBar, "Bar: ${1:}") +- Foo{} //@snippet("}", snipFieldBar, "Bar: ${1:}") +- +- Foo{Foo{}.B} //@snippet("} ", snipFieldBar, "Bar") +- +- var err error +- err.Error() //@snippet("E", Error, "Error()") +- f.Baz() //@snippet("B", snipMethodBaz, "Baz()") +- +- f.Baz() //@snippet("(", snipMethodBazBar, "BazBar") +- +- f.Baz() //@snippet("B", snipMethodBazBaz, "BazBaz(${1:})") +-} +- +-func _() { +- type bar struct { +- a int +- b float64 //@item(snipBarB, "b", "float64") +- } +- bar{b} //@snippet("}", snipBarB, "b: ${1:}") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/statements.txt b/gopls/internal/test/marker/testdata/completion/statements.txt +--- a/gopls/internal/test/marker/testdata/completion/statements.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/statements.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,155 +0,0 @@ +-This test exercises completion around various statements. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "usePlaceholders": true +-} +- +--- go.mod -- +-module golang.org/lsptests/statements +- +--- append.go -- +-package statements +- +-func _() { +- type mySlice []int +- +- var ( +- abc []int //@item(stmtABC, "abc", "[]int", "var") +- abcdef mySlice //@item(stmtABCDEF, "abcdef", "mySlice", "var") +- ) +- +- /* abcdef = append(abcdef, ) */ //@item(stmtABCDEFAssignAppend, "abcdef = append(abcdef, )", "", "func") +- +- // don't offer "abc = append(abc, )" because "abc" isn't necessarily +- // better than "abcdef". +- abc //@complete(" //", stmtABC, stmtABCDEF) +- +- abcdef //@complete(" //", stmtABCDEF, stmtABCDEFAssignAppend) +- +- /* append(abc, ) */ //@item(stmtABCAppend, "append(abc, )", "", "func") +- +- abc = app //@snippet(" //", stmtABCAppend, "append(abc, ${1:})") +-} +- +-func _() { +- var s struct{ xyz []int } +- +- /* xyz = append(s.xyz, ) */ //@item(stmtXYZAppend, "xyz = append(s.xyz, )", "", "func") +- +- s.x //@snippet(" //", stmtXYZAppend, "xyz = append(s.xyz, ${1:})") +- +- /* s.xyz = append(s.xyz, ) */ //@item(stmtDeepXYZAppend, "s.xyz = append(s.xyz, )", "", "func") +- +- sx //@snippet(" //", stmtDeepXYZAppend, "s.xyz = append(s.xyz, ${1:})") +-} +- +-func _() { +- var foo [][]int +- +- /* append(foo[0], ) */ //@item(stmtFooAppend, "append(foo[0], )", "", "func") +- +- foo[0] = app //@complete(" //", stmtFooAppend),snippet(" //", stmtFooAppend, "append(foo[0], ${1:})") +-} +- +--- if_err_check_return.go -- +-package statements +- +-import ( +- "bytes" +- "io" +- "os" +-) +- +-func one() (int, float32, io.Writer, *int, []int, bytes.Buffer, error) { +- /* if err != nil { return err } */ //@item(stmtOneIfErrReturn, "if err != nil { return err }", "", "") +- /* err != nil { return err } */ //@item(stmtOneErrReturn, "err != nil { return err }", "", "") +- +- _, err := os.Open("foo") +- //@snippet("", stmtOneIfErrReturn, "if err != nil {\n\treturn 0, 0, nil, nil, nil, bytes.Buffer{\\}, ${1:err}\n\\}") +- +- _, err = os.Open("foo") +- i //@snippet(" //", stmtOneIfErrReturn, "if err != nil {\n\treturn 0, 0, nil, nil, nil, bytes.Buffer{\\}, ${1:err}\n\\}") +- +- _, err = os.Open("foo") +- if er //@snippet(" //", stmtOneErrReturn, "err != nil {\n\treturn 0, 0, nil, nil, nil, bytes.Buffer{\\}, ${1:err}\n\\}") +- +- _, err = os.Open("foo") +- if //@snippet(" //", stmtOneIfErrReturn, "if err != nil {\n\treturn 0, 0, nil, nil, nil, bytes.Buffer{\\}, ${1:err}\n\\}") +- +- _, err = os.Open("foo") +- if //@snippet("//", stmtOneIfErrReturn, "if err != nil {\n\treturn 0, 0, nil, nil, nil, bytes.Buffer{\\}, ${1:err}\n\\}") +-} +- +--- if_err_check_return2.go -- +-package statements +- +-import "os" +- +-func two() error { +- var s struct{ err error } +- +- /* if s.err != nil { return s.err } */ //@item(stmtTwoIfErrReturn, "if s.err != nil { return s.err }", "", "") +- +- _, s.err = os.Open("foo") +- //@snippet("", stmtTwoIfErrReturn, "if s.err != nil {\n\treturn ${1:s.err}\n\\}") +-} +- +--- if_err_check_return3.go -- +-package statements +- +-import "os" +- +-// Check that completion logic handles an invalid return type. +-func badReturn() (NotAType, error) { +- _, err := os.Open("foo") +- //@snippet("", stmtOneIfErrReturn, "if err != nil {\n\treturn , ${1:err}\n\\}") +- +- _, err = os.Open("foo") +- if er //@snippet(" //", stmtOneErrReturn, "err != nil {\n\treturn , ${1:err}\n\\}") +-} +- +--- if_err_check_test.go -- +-package statements +- +-import ( +- "os" +- "testing" +-) +- +-func TestErr(t *testing.T) { +- /* if err != nil { t.Fatal(err) } */ //@item(stmtOneIfErrTFatal, "if err != nil { t.Fatal(err) }", "", "") +- +- _, err := os.Open("foo") +- //@snippet("", stmtOneIfErrTFatal, "if err != nil {\n\tt.Fatal(err)\n\\}") +-} +- +-func BenchmarkErr(b *testing.B) { +- /* if err != nil { b.Fatal(err) } */ //@item(stmtOneIfErrBFatal, "if err != nil { b.Fatal(err) }", "", "") +- +- _, err := os.Open("foo") +- //@snippet("", stmtOneIfErrBFatal, "if err != nil {\n\tb.Fatal(err)\n\\}") +-} +- +--- return.go -- +-package statements +- +-//@item(stmtReturnZeroValues, `return 0, "", nil`) +- +-func foo() (int, string, error) { +- ret //@snippet(" ", stmtReturnZeroValues, "return ${1:0}, ${2:\"\"}, ${3:nil}") +-} +- +-func bar() (int, string, error) { +- return //@snippet(" ", stmtReturnZeroValues, "return ${1:0}, ${2:\"\"}, ${3:nil}") +-} +- +- +-//@item(stmtReturnInvalidValues, `return `) +- +-func invalidReturnStatement() NotAType { +- return //@snippet(" ", stmtReturnInvalidValues, "return ${1:}") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/testy.txt b/gopls/internal/test/marker/testdata/completion/testy.txt +--- a/gopls/internal/test/marker/testdata/completion/testy.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/testy.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,61 +0,0 @@ +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module testy.test +- +-go 1.18 +- +--- types/types.go -- +-package types +- +- +--- signature/signature.go -- +-package signature +- +-type Alias = int +- +--- snippets/snippets.go -- +-package snippets +- +-import ( +- "testy.test/signature" +- t "testy.test/types" +-) +- +-func X(_ map[signature.Alias]t.CoolAlias) (map[signature.Alias]t.CoolAlias) { +- return nil +-} +- +--- testy/testy.go -- +-package testy +- +-func a() { //@item(funcA, "a", "func()", "func") +- //@complete("", funcA) +-} +- +- +--- testy/testy_test.go -- +-package testy +- +-import ( +- "testing" +- +- sig "testy.test/signature" +- "testy.test/snippets" +-) +- +-func TestSomething(t *testing.T) { //@item(TestSomething, "TestSomething(t *testing.T)", "", "func") +- var x int //@loc(testyX, "x"), diag("x", re"declared (and|but) not used") +- a() //@loc(testyA, "a") +-} +- +-func _() { +- _ = snippets.X(nil) //@signature("nil", "X(_ map[sig.Alias]types.CoolAlias) map[sig.Alias]types.CoolAlias", 0) +- var _ sig.Alias +-} +- +-func issue63578(err error) { +- err.Error() //@signature(")", "Error() string", -1) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/type_assert.txt b/gopls/internal/test/marker/testdata/completion/type_assert.txt +--- a/gopls/internal/test/marker/testdata/completion/type_assert.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/type_assert.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-This test checks completion related to type assertions. +- +--- flags -- +--ignore_extra_diags +- +--- type_assert.go -- +-package typeassert +- +-type abc interface { //@item(abcIntf, "abc", "interface{...}", "interface") +- abc() +-} +- +-type abcImpl struct{} //@item(abcImpl, "abcImpl", "struct{...}", "struct") +-func (abcImpl) abc() +- +-type abcPtrImpl struct{} //@item(abcPtrImpl, "abcPtrImpl", "struct{...}", "struct") +-func (*abcPtrImpl) abc() +- +-type abcNotImpl struct{} //@item(abcNotImpl, "abcNotImpl", "struct{...}", "struct") +- +-func _() { +- var a abc +- switch a.(type) { +- case ab: //@complete(":", abcImpl, abcPtrImpl, abcIntf, abcNotImpl) +- case *ab: //@complete(":", abcImpl, abcPtrImpl, abcIntf, abcNotImpl) +- } +- +- a.(ab) //@complete(")", abcImpl, abcPtrImpl, abcIntf, abcNotImpl) +- a.(*ab) //@complete(")", abcImpl, abcPtrImpl, abcIntf, abcNotImpl) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/type_mods.txt b/gopls/internal/test/marker/testdata/completion/type_mods.txt +--- a/gopls/internal/test/marker/testdata/completion/type_mods.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/type_mods.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +-This test check completion snippets with type modifiers. +- +--- flags -- +--ignore_extra_diags +- +--- typemods.go -- +-package typemods +- +-func fooFunc() func() int { +- return func() int { +- return 0 +- } +-} +- +-func fooPtr() *int { +- return nil +-} +- +-func _() { +- var _ int = foo //@snippet(" //", "fooFunc", "fooFunc()()"),snippet(" //", "fooPtr", "*fooPtr()") +-} +- +-func _() { +- var m map[int][]chan int +- +- var _ int = m //@snippet(" //", "m", "<-m[${1:}][${2:}]") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/type_params_reverse_infer.txt b/gopls/internal/test/marker/testdata/completion/type_params_reverse_infer.txt +--- a/gopls/internal/test/marker/testdata/completion/type_params_reverse_infer.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/type_params_reverse_infer.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,78 +0,0 @@ +--- flags -- +--ignore_extra_diags +- +--- declarations.go -- +-package x +- +-import ( +- "cmp" +- "io" +- "os" +-) +- +-var File *os.File +- +-func A[T cmp.Ordered](T) int { return 0 } +- +-func B[T comparable](T) int { return 0 } +- +-func C[T int | string](T) int { return 0 } +- +-func D[T io.Reader](T) int { return 0 } +- +--- a.go -- +-package x +- +-func _(i int) { +- i = A(File.Nam) //@acceptcompletion(re"Nam()", "Name", A) +-} +- +--- @A/a.go -- +-package x +- +-func _(i int) { +- i = A(File.Name()) //@acceptcompletion(re"Nam()", "Name", A) +-} +- +--- b.go -- +-package x +- +-func _(i int) { +- i = B(File.Nam) //@acceptcompletion(re"Nam()", "Name", B) +-} +- +--- @B/b.go -- +-package x +- +-func _(i int) { +- i = B(File.Name()) //@acceptcompletion(re"Nam()", "Name", B) +-} +- +--- c.go -- +-package x +- +-func _(i int) { +- i = C(File.Nam) //@acceptcompletion(re"Nam()", "Name", C) +-} +- +--- @C/c.go -- +-package x +- +-func _(i int) { +- i = C(File.Name()) //@acceptcompletion(re"Nam()", "Name", C) +-} +- +--- d.go -- +-package x +- +-func _(i int) { +- i = D(Fil) //@acceptcompletion(re"Fil()", "File", D) +-} +- +--- @D/d.go -- +-package x +- +-func _(i int) { +- i = D(File) //@acceptcompletion(re"Fil()", "File", D) +-} +- +diff -urN a/gopls/internal/test/marker/testdata/completion/type_params.txt b/gopls/internal/test/marker/testdata/completion/type_params.txt +--- a/gopls/internal/test/marker/testdata/completion/type_params.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/type_params.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,72 +0,0 @@ +-This test checks various ranking of completion results related to type +-parameters. +- +--- flags -- +--ignore_extra_diags +- +--- type_params.go -- +-package typeparams +- +-// Copied from the old builtins.go, which has been ported to the new marker tests. +-/* string */ //@item(string, "string", "", "type") +-/* float32 */ //@item(float32, "float32", "", "type") +-/* float64 */ //@item(float64, "float64", "", "type") +-/* int */ //@item(int, "int", "", "type") +- +-func one[a int | string]() {} +-func two[a int | string, b float64 | int]() {} +-type three[a any] int +- +-func _() { +- one[]() //@rank("]", string, float64) +- two[]() //@rank("]", int, float64) +- two[int, f]() //@rank("]", float64, float32) +- int(three[]) //@rank("]") // must not crash (golang/go#70889) +-} +- +-func slices[a []int | []float64]() {} //@item(tpInts, "[]int", "[]int", "type"),item(tpFloats, "[]float64", "[]float64", "type") +- +-func _() { +- slices[]() //@rank("]", tpInts),rank("]", tpFloats) +-} +- +-type s[a int | string] struct{} +- +-func _() { +- s[]{} //@rank("]", int, float64) +-} +- +-func takesGeneric[a int | string](s[a]) { +- "s[a]{}" //@item(tpInScopeLit, "s[a]{}", "", "var") +- takesGeneric() //@rank(")", tpInScopeLit),snippet(")", tpInScopeLit, "s[a]{\\}") +-} +- +-func _() { +- s[int]{} //@item(tpInstLit, "s[int]{}", "", "var") +- takesGeneric[int]() //@rank(")", tpInstLit),snippet(")", tpInstLit, "s[int]{\\}") +- +- "s[...]{}" //@item(tpUninstLit, "s[...]{}", "", "var") +- takesGeneric() //@rank(")", tpUninstLit),snippet(")", tpUninstLit, "s[${1:}]{\\}") +-} +- +-func returnTP[A int | float64](a A) A { //@item(returnTP, "returnTP", "something", "func") +- return a +-} +- +-func _() { +- var _ int = returnTP //@snippet(" //", returnTP, "returnTP(${1:})") +- +- var aa int //@item(tpInt, "aa", "int", "var") +- var ab float64 //@item(tpFloat, "ab", "float64", "var") +- returnTP[int](a) //@rank(")", tpInt, tpFloat) +-} +- +-func takesFunc[T any](func(T) T) { +- var _ func(t T) T = f //@snippet(" //", tpLitFunc, "func(t T) T {$0\\}") +-} +- +-func _() { +- _ = "func(...) {}" //@item(tpLitFunc, "func(...) {}", "", "var") +- takesFunc() //@snippet(")", tpLitFunc, "func(${1:}) ${2:} {$0\\}") +- takesFunc[int]() //@snippet(")", tpLitFunc, "func(i int) int {$0\\}") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/unimported-std.txt b/gopls/internal/test/marker/testdata/completion/unimported-std.txt +--- a/gopls/internal/test/marker/testdata/completion/unimported-std.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/unimported-std.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,49 +0,0 @@ +-Test of unimported completions respecting the effective Go version of the file. +- +-(See unprefixed file for same test of imported completions.) +- +-These symbols below were introduced to go/types in go1.22: +- +- Alias +- Info.FileVersions +- (Checker).PkgNameOf +- +-The underlying logic depends on versions.FileVersion, which only +-behaves correctly in go1.22. (When go1.22 is assured, we can remove +-the min_go flag but leave the test inputs unchanged.) +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module example.com +- +-go 1.21 +- +--- a/a.go -- +-package a +- +-// package-level func +-var _ = types.Sat //@rank("Sat", "Satisfies") +-var _ = types.Ali //@rank("Ali", "!Alias") +- +-// (We don't offer completions of methods +-// of types from unimported packages, so the fact that +-// we don't implement std version filtering isn't evident.) +- +-// field +-var _ = new(types.Info).Use //@rank("Use", "!Uses") +-var _ = new(types.Info).Fil //@rank("Fil", "!FileVersions") +- +-// method +-var _ = new(types.Checker).Obje //@rank("Obje", "!ObjectOf") +-var _ = new(types.Checker).PkgN //@rank("PkgN", "!PkgNameOf") +- +--- b/b.go -- +-//go:build go1.22 +- +-package a +- +-// package-level decl +-var _ = types.Sat //@rank("Sat", "Satisfies") +-var _ = types.Ali //@rank("Ali", "Alias") +diff -urN a/gopls/internal/test/marker/testdata/completion/unimported.txt b/gopls/internal/test/marker/testdata/completion/unimported.txt +--- a/gopls/internal/test/marker/testdata/completion/unimported.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/unimported.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,93 +0,0 @@ +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "importsSource": "gopls" +-} +- +--- go.mod -- +-module unimported.test +- +-go 1.18 +- +--- unimported/export_test.go -- +-package unimported +- +-var TestExport int //@item(testexport, "TestExport", "var (from \"unimported.test/unimported\")", "var") +- +--- signature/signature.go -- +-package signature +- +-func Foo() {} +- +--- foo/foo.go -- +-package foo +- +-type StructFoo struct{ F int } +- +--- baz/baz.go -- +-package baz +- +-import ( +- f "unimported.test/foo" +-) +- +-var FooStruct f.StructFoo +- +--- unimported/unimported.go -- +-package unimported +- +-func _() { +- http //@complete("p", http, httptest, httptrace, httputil) +- // container/ring is extremely unlikely to be imported by anything, so shouldn't have type information. +- ring.Ring //@complete(re"R()ing", ringring) +- signature.Foo //@complete("Foo", signaturefoo) +- +- context.Bac //@complete(" //", contextBackground) +-} +- +-// Create markers for unimported std lib packages. Only for use by this test. +-/* http */ //@item(http, "http", "\"net/http\"", "package") +-/* httptest */ //@item(httptest, "httptest", "\"net/http/httptest\"", "package") +-/* httptrace */ //@item(httptrace, "httptrace", "\"net/http/httptrace\"", "package") +-/* httputil */ //@item(httputil, "httputil", "\"net/http/httputil\"", "package") +- +-/* ring.Ring */ //@item(ringring, "Ring", "type (from \"container/ring\")", "var") +- +-/* signature.Foo */ //@item(signaturefoo, "Foo", "func (from \"unimported.test/signature\")", "func") +- +-/* context.Background */ //@item(contextBackground, "Background", "func (from \"context\")", "func") +- +-// Now that we no longer type-check imported completions, +-// we don't expect the context.Background().Err method (see golang/go#58663). +-/* context.Background().Err */ //@item(contextBackgroundErr, "Background().Err", "func (from \"context\")", "method") +- +--- unimported/unimported_cand_type.go -- +-package unimported +- +-import ( +- _ "context" +- +- "unimported.test/baz" +-) +- +-func _() { +- foo.StructFoo{} //@item(litFooStructFoo, "foo.StructFoo{}", "struct{...}", "struct") +- +- // We get the literal completion for "foo.StructFoo{}" even though we haven't +- // imported "foo" yet. +- baz.FooStruct = f //@snippet(" //", litFooStructFoo, "foo.StructFoo{$0\\}") +-} +- +--- unimported/x_test.go -- +-package unimported_test +- +-import ( +- "testing" +-) +- +-func TestSomething(t *testing.T) { +- _ = unimported.TestExport //@complete("TestExport", testexport) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/unresolved.txt b/gopls/internal/test/marker/testdata/completion/unresolved.txt +--- a/gopls/internal/test/marker/testdata/completion/unresolved.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/unresolved.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,16 +0,0 @@ +-This test verifies gopls does not crash on fake "resolved" types. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "completeUnimported": false +-} +- +--- unresolved.go -- +-package unresolved +- +-func foo(any) { +- foo(func(i, j f //@complete(" //") +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/unsafe.txt b/gopls/internal/test/marker/testdata/completion/unsafe.txt +--- a/gopls/internal/test/marker/testdata/completion/unsafe.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/unsafe.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-This test checks completion of symbols in the 'unsafe' package. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "matcher": "caseinsensitive" +-} +- +--- unsafe.go -- +-package unsafe +- +-import ( +- "unsafe" +-) +- +-// Pre-set this marker, as we don't have a "source" for it in this package. +-/* unsafe.Sizeof */ //@item(Sizeof, "Sizeof", "invalid type", "text") +- +-func _() { +- x := struct{}{} +- _ = unsafe.Sizeof(x) //@complete("z", Sizeof) +-} +diff -urN a/gopls/internal/test/marker/testdata/completion/variadic.txt b/gopls/internal/test/marker/testdata/completion/variadic.txt +--- a/gopls/internal/test/marker/testdata/completion/variadic.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/completion/variadic.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,67 +0,0 @@ +-This test checks completion related to variadic functions. +- +--- flags -- +--ignore_extra_diags +- +--- variadic.go -- +-package variadic +- +-func foo(i int, strs ...string) {} +- +-func bar() []string { //@item(vFunc, "bar", "func() []string", "func") +- return nil +-} +- +-func _() { +- var ( +- i int //@item(vInt, "i", "int", "var") +- s string //@item(vStr, "s", "string", "var") +- ss []string //@item(vStrSlice, "ss", "[]string", "var") +- v any //@item(vIntf, "v", "any", "var") +- ) +- +- foo() //@rank(")", vInt, vStr),rank(")", vInt, vStrSlice) +- foo(123, ) //@rank(")", vStr, vInt),rank(")", vStrSlice, vInt) +- foo(123, "", ) //@rank(")", vStr, vInt),rank(")", vStr, vStrSlice) +- foo(123, s, "") //@rank(", \"", vStr, vStrSlice) +- +- // snippet will add the "..." for you +- foo(123, ) //@snippet(")", vStrSlice, "ss..."),snippet(")", vFunc, "bar()..."),snippet(")", vStr, "s") +- +- // don't add "..." for any +- foo(123, ) //@snippet(")", vIntf, "v") +-} +- +-func qux(...func()) {} +-func f() {} //@item(vVarArg, "f", "func()", "func") +- +-func _() { +- qux(f) //@snippet(")", vVarArg, "f") +-} +- +-func _() { +- foo(0, []string{}...) //@complete(")") +-} +- +--- variadic_intf.go -- +-package variadic +- +-type baz interface { +- baz() +-} +- +-func wantsBaz(...baz) {} +- +-type bazImpl int +- +-func (bazImpl) baz() {} +- +-func _() { +- var ( +- impls []bazImpl //@item(vImplSlice, "impls", "[]bazImpl", "var") +- impl bazImpl //@item(vImpl, "impl", "bazImpl", "var") +- bazes []baz //@item(vIntfSlice, "bazes", "[]baz", "var") +- ) +- +- wantsBaz() //@rank(")", vImpl, vImplSlice),rank(")", vIntfSlice, vImplSlice) +-} +diff -urN a/gopls/internal/test/marker/testdata/configuration/static.txt b/gopls/internal/test/marker/testdata/configuration/static.txt +--- a/gopls/internal/test/marker/testdata/configuration/static.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/configuration/static.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +-This test confirms that gopls honors configuration even if the client does not +-support dynamic configuration. +- +--- capabilities.json -- +-{ +- "configuration": false +-} +- +--- settings.json -- +-{ +- "usePlaceholders": true, +- "analyses": { +- "composites": false +- } +-} +- +--- go.mod -- +-module example.com/config +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-import "example.com/config/b" +- +-func Identity[P ~int](p P) P { //@item(Identity, "Identity", "", "") +- return p +-} +- +-func _() { +- _ = b.B{2} +- _ = Identi //@snippet(" //", Identity, "Identity(${1:p P})"), diag("Ident", re"(undefined|undeclared)") +-} +- +--- b/b.go -- +-package b +- +-type B struct { +- F int +-} +diff -urN a/gopls/internal/test/marker/testdata/definition/asm.txt b/gopls/internal/test/marker/testdata/definition/asm.txt +--- a/gopls/internal/test/marker/testdata/definition/asm.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/definition/asm.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-This test exercises the Definition request in a Go assembly file. +- +-For now we support only references to package-level symbols defined in +-the same package or a dependency. +- +-Repeatedly jumping to Definition on ff ping-pongs between the Go and +-assembly declarations. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-import _ "fmt" +-import _ "example.com/b" +- +-func ff() //@ loc(ffgo, "ff"), def("ff", ffasm) +- +-var _ = ff // pacify unusedfunc analyzer +- +--- a/asm.s -- +-// portable assembly +- +-TEXT ·ff(SB), $16 //@ loc(ffasm, "ff"), def("ff", ffgo) +- CALL example·com∕b·B //@ def("com", bB) +- JMP ·ff //@ def("ff", ffgo) +- JMP label //@ def("label", label) +-label: //@ loc(label,"label") +- RET +- +--- b/b.go -- +-package b +- +-func B() {} //@ loc(bB, "B") +diff -urN a/gopls/internal/test/marker/testdata/definition/branch_issue73797_go124.txt b/gopls/internal/test/marker/testdata/definition/branch_issue73797_go124.txt +--- a/gopls/internal/test/marker/testdata/definition/branch_issue73797_go124.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/definition/branch_issue73797_go124.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,25 +0,0 @@ +-This test checks the case of a definition operation on a "continue" with an invalid label. +-In gotip, the typechecker no longer associates the continue statement with its invalid label, +-so this test case should only be run for go1.24 or earlier. +-See the related change in go/types: https://go-review.git.corp.google.com/c/go/+/638257 +- +--- flags -- +--max_go=go1.24 +--ignore_extra_diags +- +--- go.mod -- +-module mod.com +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-func InvalidLabel() { +- label: +- for i := 0; i < 10; i++ { +- } +- for i := 0; i < 10; i++ { +- continue label //@def("continue") +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/definition/branch_issue73797.txt b/gopls/internal/test/marker/testdata/definition/branch_issue73797.txt +--- a/gopls/internal/test/marker/testdata/definition/branch_issue73797.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/definition/branch_issue73797.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-This test checks the case of a definition operation on a "continue" with an invalid label. +-In gotip, the typechecker no longer associates the continue statement with its invalid label, +-so this test case should only be run for go1.24 or earlier. +-See the related change in go/types: https://go-review.git.corp.google.com/c/go/+/638257 +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module mod.com +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-func InvalidLabel() { +- label: +- for i := 0; i < 10; i++ { +- } +- for i := 0; i < 10; i++ { //@loc(for, "for") +- continue label //@def("continue", for) +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/definition/branch.txt b/gopls/internal/test/marker/testdata/definition/branch.txt +--- a/gopls/internal/test/marker/testdata/definition/branch.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/definition/branch.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,176 +0,0 @@ +-This test checks definition operations in branch statements break, goto and continue. +- +-We suppress staticheck since it also gives a diagnostic +-about the break being ineffective. +- +--- settings.json -- +-{ +- "staticcheck": false +-} +- +--- go.mod -- +-module mod.com +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-import "log" +- +-func BreakLoop() { +- for i := 0; i < 10; i++ { +- if i > 6 { +- break //@def("break", rbrace1) +- } +- } //@loc(rbrace1, `}`) +-} +- +-func BreakNestedLoop() { +- for i := 0; i < 10; i++ { +- for j := 0; j < 5; j++ { +- if j > 1 { +- break //@def("break", rbrace2) +- } +- } //@loc(rbrace2, `}`) +- } +-} +- +-func BreakNestedLoopWithLabel() { +- Outer: +- for i := 0; i < 10; i++ { +- for j := 0; j < 5; j++ { +- if j > 1 { +- break Outer//@def("break", outerparen) +- } +- } +- } //@loc(outerparen, `}`) +-} +- +-func BreakSwitch(i int) { +- switch i { +- case 1: +- break //@def("break", rbrace4) +- case 2: +- log.Printf("2") +- case 3: +- log.Printf("3") +- } //@loc(rbrace4, `}`) +-} +- +-func BreakSwitchLabel(i int) { +-loop: +- for { +- switch i { +- case 1: +- break loop //@def("break", loopparen) +- case 2: +- log.Printf("2") +- case 3: +- continue loop +- } +- } //@loc(loopparen, `}`) +-} +- +-func BreakSelect(c, quit chan int) { +- x, y := 0, 1 +- for { +- select { +- case c <- x: +- x, y = y, x+y +- break //@def("break", rbrace5) +- case <-quit: +- log.Println("quit") +- return +- } //@loc(rbrace5, `}`) +- } +-} +- +-func BreakWithContinue() { +- for j := 0; j < 5; j++ { +- if (j < 4) { +- continue +- } +- break //@def("break", rbrace6) +- } //@loc(rbrace6, `}`) +-} +- +-func GotoNestedLoop() { +- Outer: //@loc(outer, "Outer") +- for i := 0; i < 10; i++ { +- for j := 0; j < 5; j++ { +- if (j > 1) { +- goto Outer//@def("goto", outer) +- } +- } +- } +-} +- +-func ContinueLoop() { +- for j := 0; j < 5; j++ { //@loc(for3, `for`) +- if (j < 4) { +- continue //@def("continue", for3) +- } +- break +- } +-} +- +-func ContinueDoubleLoop() { +- for i := 0; i < 10; i++ { //@loc(for4, `for`) +- for j := 0; j < 5; j++ { +- if (j > 1) { +- break +- } +- } +- if (i > 7) { +- continue//@def("continue", for4) +- } +- } +-} +- +-func BreakInBlockStmt() { +- for { +- if 0 < 10 { +- { +- break //@def("break", rbrace9) +- } +- } +- } //@loc(rbrace9, `}`) +-} +- +-func BreakInLabeledStmt() { +- outer: +- for { +- goto inner +- inner: +- break outer //@def("break", for5) +- } //@loc(for5, `}`) +-} +- +-func BreakToLabel(n int) { +- outer1: +- switch n { +- case 1: +- print("1") +- for i := 0; i < 10; i++ { +- if i > 3 { +- break outer1 //@def("break", outer1) +- } +- } +- } //@loc(outer1, "}") +-} +- +-func ContinueToLabel(n int) { +- outer1: +- for { //@loc(outer2, "for") +- switch n { +- case 1: +- print("1") +- for i := 0; i < 10; i++ { +- if i > 3 { +- continue outer1 //@def("continue", outer2) +- } +- } +- } +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/definition/cgo.txt b/gopls/internal/test/marker/testdata/definition/cgo.txt +--- a/gopls/internal/test/marker/testdata/definition/cgo.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/definition/cgo.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,66 +0,0 @@ +-This test is ported from the old marker tests. +-It tests hover and definition for cgo declarations. +- +--- flags -- +--cgo +- +--- go.mod -- +-module cgo.test +- +-go 1.18 +- +--- cgo/cgo.go -- +-package cgo +- +-/* +-#include <stdio.h> +-#include <stdlib.h> +- +-void myprint(char* s) { +- printf("%s\n", s); +-} +-*/ +-import "C" +- +-import ( +- "fmt" +- "unsafe" +-) +- +-func Example() { //@loc(cgoexample, "Example"), item(cgoexampleItem, "Example", "func()", "func") +- fmt.Println() +- cs := C.CString("Hello from stdio\n") +- C.myprint(cs) +- C.free(unsafe.Pointer(cs)) +-} +- +-func _() { +- Example() //@hover("ample", "Example", hoverExample), def("ample", cgoexample), complete("ample", cgoexampleItem) +-} +- +--- @hoverExample -- +-```go +-func Example() +-``` +- +---- +- +-[`cgo.Example` on pkg.go.dev](https://pkg.go.dev/cgo.test/cgo#Example) +--- usecgo/usecgo.go -- +-package cgoimport +- +-import ( +- "cgo.test/cgo" +-) +- +-func _() { +- cgo.Example() //@hover("ample", "Example", hoverImportedExample), def("ample", cgoexample), complete("ample", cgoexampleItem) +-} +--- @hoverImportedExample -- +-```go +-func cgo.Example() +-``` +- +---- +- +-[`cgo.Example` on pkg.go.dev](https://pkg.go.dev/cgo.test/cgo#Example) +diff -urN a/gopls/internal/test/marker/testdata/definition/comment.txt b/gopls/internal/test/marker/testdata/definition/comment.txt +--- a/gopls/internal/test/marker/testdata/definition/comment.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/definition/comment.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,52 +0,0 @@ +-This test executes definition requests over doc links. +- +--- go.mod -- +-module mod.com +- +-go 1.19 +- +--- path/path.go -- +-package path +- +-func Join() //@loc(Join, "Join") +- +--- a.go -- +-package p +- +-import "strconv" //@loc(strconv, `"strconv"`) +-import pathpkg "mod.com/path" +-import _ "unsafe" +- +-const NumberBase = 10 //@loc(NumberBase, "NumberBase") +- +-// [Conv] converts s to an int. //@def("Conv", Conv) +-func Conv(s string) int { //@loc(Conv, "Conv") +- // [strconv.ParseInt] parses s and returns the integer corresponding to it. //@def("strconv", strconv) +- // [NumberBase] is the base to use for number parsing. //@def("NumberBase", NumberBase) +- i, _ := strconv.ParseInt(s, NumberBase, 64) +- return int(i) +-} +- +-type T struct { +- Field int //@ loc(Field, "Field") +-} +- +-func (T) Method() {} //@ loc(Method, "Method") +- +-// The declared and imported names of the package both work: +-// [path.Join] //@ def("Join", Join) +-// [pathpkg.Join] //@ def("Join", Join) +-// +-// Also, both [T.Field] and //@ def("Field", Field) +-// [T.Method] are supported. //@ def("Method", Method) +-func _() { +- pathpkg.Join() +-} +- +-// Built-in and unsafe symbols work too. +-// [unsafe.Pointer] //@def("Pointer", UNSAFE) +-// [unsafe.Slice] //@def("Slice", UNSAFE) +-// [int] //@def("int", BUILTIN) +-// [error] //@def("error", BUILTIN) +-// [error.Error] //@def("Error", BUILTIN) +-func _() +diff -urN a/gopls/internal/test/marker/testdata/definition/embed.txt b/gopls/internal/test/marker/testdata/definition/embed.txt +--- a/gopls/internal/test/marker/testdata/definition/embed.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/definition/embed.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,336 +0,0 @@ +-This test checks definition and hover operations over embedded fields and methods. +- +-Its size expectations assume a 64-bit machine, +-and correct sizes information requires go1.21. +- +--- flags -- +--skip_goarch=386,arm +- +--- go.mod -- +-module mod.com +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-type A string //@loc(AString, "A") +- +-func (_ A) Hi() {} //@loc(AHi, "Hi") +- +-type S struct { +- Field int //@loc(SField, "Field") +- R // embed a struct +- H // embed an interface +-} +- +-type R struct { +- Field2 int //@loc(RField2, "Field2") +-} +- +-func (r R) Hey() {} //@loc(RHey, "Hey") +- +-type H interface { //@loc(H, "H") +- Goodbye() //@loc(HGoodbye, "Goodbye") +-} +- +-type I interface { //@loc(I, "I") +- B() //@loc(IB, "B") +- J +-} +- +-type J interface { //@loc(J, "J") +- Hello() //@loc(JHello, "Hello") +-} +- +--- b/b.go -- +-package b +- +-import "mod.com/a" //@loc(AImport, re"\"[^\"]*\"") +- +-type embed struct { +- F int //@loc(F, "F") +-} +- +-func (embed) M() //@loc(M, "M") +- +-type Embed struct { +- embed +- *a.A +- a.I +- a.S +-} +- +-func _() { +- e := Embed{} +- e.Hi() //@def("Hi", AHi),hover("Hi", "Hi", AHi) +- e.B() //@def("B", IB),hover("B", "B", IB) +- _ = e.Field //@def("Field", SField),hover("Field", "Field", SField) +- _ = e.Field2 //@def("Field2", RField2),hover("Field2", "Field2", RField2) +- e.Hello() //@def("Hello", JHello),hover("Hello", "Hello",JHello) +- e.Hey() //@def("Hey", RHey),hover("Hey", "Hey", RHey) +- e.Goodbye() //@def("Goodbye", HGoodbye),hover("Goodbye", "Goodbye", HGoodbye) +- e.M() //@def("M", M),hover("M", "M", M) +- _ = e.F //@def("F", F),hover("F", "F", F) +-} +- +-type aAlias = a.A //@loc(aAlias, "aAlias") +- +-type S1 struct { //@loc(S1, "S1") +- F1 int //@loc(S1F1, "F1") +- S2 //@loc(S1S2, "S2"),def("S2", S2),hover("S2", "S2", S2) +- a.A //@def("A", AString),hover("A", "A", aA) +- aAlias //@def("a", aAlias),hover("a", "aAlias", aAlias) +-} +- +-type S2 struct { //@loc(S2, "S2") +- F1 string //@loc(S2F1, "F1") +- F2 int //@loc(S2F2, "F2") +- *a.A //@def("A", AString),def("a",AImport) +-} +- +-type S3 struct { +- F1 struct { +- a.A //@def("A", AString) +- } +-} +- +-func Bar() { +- var x S1 //@def("S1", S1),hover("S1", "S1", S1) +- _ = x.S2 //@def("S2", S1S2),hover("S2", "S2", S1S2) +- _ = x.F1 //@def("F1", S1F1),hover("F1", "F1", S1F1) +- _ = x.F2 //@def("F2", S2F2),hover("F2", "F2", S2F2) +- _ = x.S2.F1 //@def("F1", S2F1),hover("F1", "F1", S2F1) +-} +- +--- b/c.go -- +-package b +- +-var _ = S1{ //@def("S1", S1),hover("S1", "S1", S1) +- F1: 99, //@def("F1", S1F1),hover("F1", "F1", S1F1) +-} +- +--- @AHi -- +-```go +-func (a.A) Hi() +-``` +- +---- +- +-[`(a.A).Hi` on pkg.go.dev](https://pkg.go.dev/mod.com/a#A.Hi) +--- @F -- +-```go +-field F int // through embed +-``` +- +---- +- +-@loc(F, "F") +- +- +---- +- +-[`(b.Embed).F` on pkg.go.dev](https://pkg.go.dev/mod.com/b#Embed.F) +--- @HGoodbye -- +-```go +-func (a.H) Goodbye() +-``` +- +---- +- +-@loc(HGoodbye, "Goodbye") +- +- +---- +- +-[`(a.H).Goodbye` on pkg.go.dev](https://pkg.go.dev/mod.com/a#H.Goodbye) +--- @IB -- +-```go +-func (a.I) B() +-``` +- +---- +- +-@loc(IB, "B") +- +- +---- +- +-[`(a.I).B` on pkg.go.dev](https://pkg.go.dev/mod.com/a#I.B) +--- @JHello -- +-```go +-func (a.J) Hello() +-``` +- +---- +- +-@loc(JHello, "Hello") +- +- +---- +- +-[`(a.J).Hello` on pkg.go.dev](https://pkg.go.dev/mod.com/a#J.Hello) +--- @M -- +-```go +-func (embed) M() +-``` +- +---- +- +-[`(b.Embed).M` on pkg.go.dev](https://pkg.go.dev/mod.com/b#Embed.M) +--- @RField2 -- +-```go +-field Field2 int // through S, R +-``` +- +---- +- +-@loc(RField2, "Field2") +- +- +---- +- +-[`(a.R).Field2` on pkg.go.dev](https://pkg.go.dev/mod.com/a#R.Field2) +--- @RHey -- +-```go +-func (r a.R) Hey() +-``` +- +---- +- +-[`(a.R).Hey` on pkg.go.dev](https://pkg.go.dev/mod.com/a#R.Hey) +--- @S1 -- +-```go +-type S1 struct { +- F1 int //@loc(S1F1, "F1") +- S2 //@loc(S1S2, "S2"),def("S2", S2),hover("S2", "S2", S2) +- a.A //@def("A", AString),hover("A", "A", aA) +- aAlias //@def("a", aAlias),hover("a", "aAlias", aAlias) +-} +-``` +- +---- +- +-```go +-// Embedded fields: +-F2 int // through S2 +-``` +- +---- +- +-[`b.S1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S1) +--- @S1F1 -- +-```go +-field F1 int +-``` +- +---- +- +-@loc(S1F1, "F1") +- +- +---- +- +-[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S1.F1) +--- @S1S2 -- +-```go +-field S2 S2 +-``` +- +---- +- +-@loc(S1S2, "S2"),def("S2", S2),hover("S2", "S2", S2) +- +- +---- +- +-[`(b.S1).S2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S1.S2) +--- @S2 -- +-```go +-type S2 struct { // size=32 (0x20) +- F1 string //@loc(S2F1, "F1") +- F2 int //@loc(S2F2, "F2") +- *a.A //@def("A", AString),def("a",AImport) +-} +-``` +- +---- +- +-```go +-func (a.A) Hi() +-``` +- +---- +- +-[`b.S2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2) +--- @S2F1 -- +-```go +-field F1 string +-``` +- +---- +- +-@loc(S2F1, "F1") +- +- +---- +- +-[`(b.S2).F1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2.F1) +--- @S2F2 -- +-```go +-field F2 int // through S2 +-``` +- +---- +- +-@loc(S2F2, "F2") +- +- +---- +- +-[`(b.S2).F2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2.F2) +--- @SField -- +-```go +-field Field int // through S +-``` +- +---- +- +-@loc(SField, "Field") +- +- +---- +- +-[`(a.S).Field` on pkg.go.dev](https://pkg.go.dev/mod.com/a#S.Field) +--- @aA -- +-```go +-type A string // size=16 (0x10) +-``` +- +---- +- +-@loc(AString, "A") +- +- +-```go +-func (a.A) Hi() +-``` +- +---- +- +-[`a.A` on pkg.go.dev](https://pkg.go.dev/mod.com/a#A) +--- @aAlias -- +-```go +-type aAlias = a.A // size=16 (0x10) +- +-type A string +-``` +- +---- +- +-@loc(aAlias, "aAlias") +- +- +-```go +-func (a.A) Hi() +-``` +diff -urN a/gopls/internal/test/marker/testdata/definition/import.txt b/gopls/internal/test/marker/testdata/definition/import.txt +--- a/gopls/internal/test/marker/testdata/definition/import.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/definition/import.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,61 +0,0 @@ +-This test checks definition and hover over imports. +- +--- go.mod -- +-module mod.com +- +-go 1.18 +--- foo/foo.go -- +-package foo +- +-type Foo struct{} +- +-// DoFoo does foo. +-func DoFoo() {} //@loc(DoFoo, "DoFoo") +--- bar/bar.go -- +-package bar +- +-import ( +- myFoo "mod.com/foo" //@loc(myFoo, "myFoo") +-) +- +-var _ *myFoo.Foo //@def("myFoo", myFoo),hover("myFoo", "myFoo", myFoo) +--- bar/dotimport.go -- +-package bar +- +-import . "mod.com/foo" +- +-func _() { +- // variable of type foo.Foo +- var _ Foo //@hover("_", "_", FooVar) +- +- DoFoo() //@hover("DoFoo", "DoFoo", DoFoo) +-} +--- @DoFoo -- +-```go +-func DoFoo() +-``` +- +---- +- +-DoFoo does foo. +- +- +---- +- +-[`foo.DoFoo` on pkg.go.dev](https://pkg.go.dev/mod.com/foo#DoFoo) +--- @FooVar -- +-```go +-var _ Foo +-``` +- +---- +- +-variable of type foo.Foo +--- @myFoo -- +-```go +-package foo +-``` +- +---- +- +-[`myFoo` on pkg.go.dev](https://pkg.go.dev/mod.com/foo) +diff -urN a/gopls/internal/test/marker/testdata/definition/misc.txt b/gopls/internal/test/marker/testdata/definition/misc.txt +--- a/gopls/internal/test/marker/testdata/definition/misc.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/definition/misc.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,283 +0,0 @@ +-This test exercises miscellaneous definition and hover requests. +- +-Its size expectations assume a 64-bit machine. +- +--- settings.json -- +-{"analyses": {"unusedfunc": false}} +- +--- go.mod -- +-module mod.com +- +-go 1.16 +- +--- flags -- +--skip_goarch=386,arm +- +--- a.go -- +-package a //@loc(aPackage, re"package (a)"),hover(aPackage, aPackage, aPackage) +- +-var ( +- // x is a variable. +- x string //@loc(x, "x"),hover(x, x, hoverx) +-) +- +-// Constant block. When I hover on h, I should see this comment. +-const ( +- // When I hover on g, I should see this comment. +- g = 1 //@hover("g", "g", hoverg) +- +- h = 2 //@hover("h", "h", hoverh) +-) +- +-// z is a variable too. +-var z string //@loc(z, "z"),hover(z, z, hoverz) +- +-func AStuff() { //@loc(AStuff, "AStuff") +- x := 5 +- Random2(x) //@def("dom2", Random2) +- Random() //@def("()", Random) +-} +- +-type H interface { //@loc(H, "H") +- Goodbye() +-} +- +-type I interface { //@loc(I, "I") +- B() +- J +-} +- +-type J interface { //@loc(J, "J") +- Hello() +-} +- +-func _() { +- // 1st type declaration block +- type ( +- a struct { //@hover("a", "a", hoverDeclBlocka) +- x string +- } +- ) +- +- // 2nd type declaration block +- type ( +- // b has a comment +- b struct{} //@hover("b", "b", hoverDeclBlockb) +- ) +- +- // 3rd type declaration block +- type ( +- // c is a struct +- c struct { //@hover("c", "c", hoverDeclBlockc) +- f string +- } +- +- d string //@hover("d", "d", hoverDeclBlockd) +- ) +- +- type ( +- e struct { //@hover("e", "e", hoverDeclBlocke) +- f float64 +- } // e has a comment +- ) +-} +- +-var ( +- hh H //@hover("H", "H", hoverH) +- ii I //@hover("I", "I", hoverI) +- jj J //@hover("J", "J", hoverJ) +-) +--- a_test.go -- +-package a +- +-import ( +- "testing" +-) +- +-func TestA(t *testing.T) { //@hover("TestA", "TestA", hoverTestA) +-} +--- random.go -- +-package a +- +-func Random() int { //@loc(Random, "Random") +- y := 6 + 7 +- return y +-} +- +-func Random2(y int) int { //@loc(Random2, "Random2"),loc(RandomParamY, "y") +- return y //@def("y", RandomParamY),hover("y", "y", hovery) +-} +- +-type Pos struct { +- x, y int //@loc(PosX, "x"),loc(PosY, "y") +-} +- +-// Typ has a comment. Its fields do not. +-type Typ struct{ field string } //@loc(TypField, "field") +- +-func _() { +- x := &Typ{} +- _ = x.field //@def("field", TypField),hover("field", "field", hoverfield) +-} +- +-func (p *Pos) Sum() int { //@loc(PosSum, "Sum") +- return p.x + p.y //@hover("x", "x", hoverpx) +-} +- +-func _() { +- var p Pos +- _ = p.Sum() //@def("()", PosSum),hover("()", `Sum`, hoverSum) +-} +--- @aPackage -- +-```go +-package a +-``` +- +---- +- +- - Package path: mod.com +- - Module: mod.com +- - Language version: go1.16 +--- @hoverDeclBlocka -- +-```go +-type a struct { // size=16 (0x10) +- x string +-} +-``` +- +---- +- +-1st type declaration block +--- @hoverDeclBlockb -- +-```go +-type b struct{} // size=0 +-``` +- +---- +- +-b has a comment +--- @hoverDeclBlockc -- +-```go +-type c struct { // size=16 (0x10) +- f string +-} +-``` +- +---- +- +-c is a struct +--- @hoverDeclBlockd -- +-```go +-type d string // size=16 (0x10) +-``` +- +---- +- +-3rd type declaration block +--- @hoverDeclBlocke -- +-```go +-type e struct { // size=8 +- f float64 +-} +-``` +- +---- +- +-e has a comment +--- @hoverH -- +-```go +-type H interface { +- Goodbye() +-} +-``` +- +---- +- +-[`a.H` on pkg.go.dev](https://pkg.go.dev/mod.com#H) +--- @hoverI -- +-```go +-type I interface { +- B() +- J +-} +-``` +- +---- +- +-```go +-func (J) Hello() +-``` +- +---- +- +-[`a.I` on pkg.go.dev](https://pkg.go.dev/mod.com#I) +--- @hoverJ -- +-```go +-type J interface { +- Hello() +-} +-``` +- +---- +- +-[`a.J` on pkg.go.dev](https://pkg.go.dev/mod.com#J) +--- @hoverSum -- +-```go +-func (p *Pos) Sum() int +-``` +- +---- +- +-[`(a.Pos).Sum` on pkg.go.dev](https://pkg.go.dev/mod.com#Pos.Sum) +--- @hoverTestA -- +-```go +-func TestA(t *testing.T) +-``` +--- @hoverfield -- +-```go +-field field string +-``` +--- @hoverg -- +-```go +-const g untyped int = 1 +-``` +- +---- +- +-When I hover on g, I should see this comment. +--- @hoverh -- +-```go +-const h untyped int = 2 +-``` +- +---- +- +-Constant block. When I hover on h, I should see this comment. +--- @hoverpx -- +-```go +-field x int +-``` +- +---- +- +-@loc(PosX, "x"),loc(PosY, "y") +--- @hoverx -- +-```go +-var x string +-``` +- +---- +- +-x is a variable. +--- @hovery -- +-```go +-var y int +-``` +--- @hoverz -- +-```go +-var z string +-``` +- +---- +- +-z is a variable too. +diff -urN a/gopls/internal/test/marker/testdata/definition/return.txt b/gopls/internal/test/marker/testdata/definition/return.txt +--- a/gopls/internal/test/marker/testdata/definition/return.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/definition/return.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,23 +0,0 @@ +-This test checks definition operations in function return statements. +-Go to definition on 'return' should go to the result parameter list. +- +--- go.mod -- +-module mod.com +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-func Hi() string { //@loc(HiReturn, "string") +- return "Hello" //@def("return", HiReturn) +-} +- +-func Bye() (int, int, int) { //@loc(ByeReturn, "(int, int, int)") +- return 1, 2, 3 //@def("return", ByeReturn) +-} +- +-func TestLit() { +- f := func(a, b int) bool { return a*b < 100 } //@loc(FuncLitReturn, "bool"),def("return", FuncLitReturn) +- f(1, 2) +-} +diff -urN a/gopls/internal/test/marker/testdata/definition/standalone_issue64557.txt b/gopls/internal/test/marker/testdata/definition/standalone_issue64557.txt +--- a/gopls/internal/test/marker/testdata/definition/standalone_issue64557.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/definition/standalone_issue64557.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-This test checks that we can load standalone files that use cgo. +- +--- flags -- +--cgo +- +--- go.mod -- +-module example.com +- +--- main.go -- +-//go:build ignore +- +-package main +- +-import ( +- "C" +- +- "example.com/a" +-) +- +-func F() {} //@loc(F, "F") +- +-func main() { +- F() //@def("F", F) +- println(a.A) //@def("A", A) +-} +- +--- a/a.go -- +-package a +- +-const A = 0 //@loc(A, "A") +diff -urN a/gopls/internal/test/marker/testdata/definition/standalone.txt b/gopls/internal/test/marker/testdata/definition/standalone.txt +--- a/gopls/internal/test/marker/testdata/definition/standalone.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/definition/standalone.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,43 +0,0 @@ +-This test checks the behavior of standalone packages, in particular documenting +-our failure to support test files as standalone packages (golang/go#64233). +- +--- go.mod -- +-module golang.org/lsptests/a +- +-go 1.20 +- +--- a.go -- +-package a +- +-func F() {} //@loc(F, "F") +- +--- standalone.go -- +-//go:build ignore +-package main +- +-import "golang.org/lsptests/a" +- +-func main() { +- a.F() //@def("F", F) +-} +- +--- standalone_test.go -- +-//go:build ignore +-package main //@diag("main", re"No packages found") +- +-import "golang.org/lsptests/a" +- +-func main() { +- a.F() //@hovererr("F", "no package") +-} +- +--- standalone_x_test.go -- +-//go:build ignore +-package main_test //@diag("main", re"No packages found") +- +-import "golang.org/lsptests/a" +- +-func main() { +- a.F() //@hovererr("F", "no package") +-} +- +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/addgowork.txt b/gopls/internal/test/marker/testdata/diagnostics/addgowork.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/addgowork.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/addgowork.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,51 +0,0 @@ +-This test demonstrates diagnostics for adding a go.work file. +- +-Quick-fixes change files on disk, so are tested by integration tests. +- +-TODO(rfindley): improve the "cannot find package" import errors. +- +--- skip -- +-These diagnostics are no longer produced, because in golang/go#57979 +-(zero-config gopls) we made gopls function independent of a go.work file. +-Preserving this test as we may want to re-enable the code actions go manage +-a go.work file. +- +-Note that in go.dev/issue/60584#issuecomment-1622238115, this test was flaky. +-However, critical error logic has since been rewritten. +- +--- a/go.mod -- +-module mod.com/a +- +-go 1.18 +- +--- a/main.go -- +-package main //@diag("main", re"add a go.work file") +- +-import "mod.com/a/lib" //@diag("\"mod.com", re"cannot find package") +- +-func main() { +- _ = lib.C +-} +- +--- a/lib/lib.go -- +-package lib //@diag("lib", re"add a go.work file") +- +-const C = "b" +--- b/go.mod -- +-module mod.com/b +- +-go 1.18 +- +--- b/main.go -- +-package main //@diag("main", re"add a go.work file") +- +-import "mod.com/b/lib" //@diag("\"mod.com", re"cannot find package") +- +-func main() { +- _ = lib.C +-} +- +--- b/lib/lib.go -- +-package lib //@diag("lib", re"add a go.work file") +- +-const C = "b" +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt b/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,141 +0,0 @@ +-Test of warning diagnostics from various analyzers: +-copylocks, printf, slog, tests, timeformat, nilness, and cgocall. +- +--- settings.json -- +-{ +- "pullDiagnostics": true +-} +- +--- go.mod -- +-module example.com +-go 1.23 +- +--- flags -- +--min_go_command=go1.23 +--cgo +- +--- bad/bad_test.go -- +-package bad +- +-import ( +- "fmt" +- "iter" +- "log/slog" +- "sync" +- "testing" +- "time" +-) +- +-// copylocks +-func _() { +- var x sync.Mutex +- _ = x //@diag("x", re"assignment copies lock value to _: sync.Mutex") +-} +- +-// printf +-func _() { +- printfWrapper("%s") //@diag(re`%s`, re"example.com/bad.printfWrapper format %s reads arg #1, but call has 0 args") +-} +- +-func printfWrapper(format string, args ...any) { +- fmt.Printf(format, args...) +-} +- +-// tests +-func Testbad(t *testing.T) { //@diag("Testbad", re"Testbad has malformed name: first letter after 'Test' must not be lowercase") +-} +- +-// timeformat +-func _() { +- now := time.Now() +- fmt.Println(now.Format("2006-02-01")) //@diag("2006-02-01", re"2006-02-01 should be 2006-01-02") +-} +- +-// nilness +-func _(ptr *int) { +- if ptr == nil { +- _ = *ptr //@diag("*ptr", re"nil dereference in load") +- } +-} +- +-// unusedwrite +-func _(s struct{x int}) { +- s.x = 1 //@diag("x", re"unused write to field x") +-} +- +-// slog +-func _() { +- slog.Info("msg", 1) //@diag("1", re`slog.Info arg "1" should be a string or a slog.Attr`) +-} +- +-// waitgroup +-func _() { +- var wg sync.WaitGroup +- go func() { +- wg.Add(1) //@diag("(", re"WaitGroup.Add called from inside new goroutine") +- }() +-} +- +-// inline +-func _() { +- f() //@diag("f", re"Call of bad.f should be inlined") +-} +- +-//go:fix inline +-func f() { fmt.Println(1) } +- +-// recursiveiter +-func F() iter.Seq[int] { +- return func(yield func(int) bool) { +- for range F() {} //@ diag("range", re"inefficient recursion in iterator F") +- } +-} +- +--- cgocall/cgocall.go -- +-package cgocall +- +-// Note: this test must be in a separate package, as the unsafe import +-// silences the unusedwrite analyzer. +-import "unsafe" +- +-// void f(void *ptr) {} +-import "C" +- +-// cgocall +-func _(c chan bool) { +- C.f(unsafe.Pointer(&c)) //@ diag("unsafe", re"passing Go type with embedded pointer to C") +-} +- +--- maprange/maprange.go -- +-package maprange +- +-import "maps" +- +-func _(m map[int]int) { +- for range maps.Keys(m) {} //@ diag("maps.Keys", re"unnecessary and inefficient call of maps.Keys") +-} +- +--- unusedresult/unusedresult.go -- +-package unusedresult +- +-import "fmt" +- +-func _() { +- fmt.Appendf(nil, "%d", 1) //@ diag("fmt.Appendf", re"result.*not used") +-} +- +--- staticcheck/staticcheck.go -- +-package staticcheck +- +-// staticcheck includes hundreds of other analyzers. +-// Here we test only two: one enabled by default, one disabled. +- +-func S1000(ch chan int) { +- select { case <-ch: } //@ diag("select", re"use .*receive instead of select") +-} +- +-func S1011(x, y []int) { +- for _, e := range y { +- x = append(x, e) // no "replace loop with append" diagnostic +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/excludedfile.txt b/gopls/internal/test/marker/testdata/diagnostics/excludedfile.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/excludedfile.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/excludedfile.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-This test demonstrates diagnostics for various forms of file exclusion. +- +-Note: this test used to also check the errors when a file was excluded due to +-an inactive module, or mismatching GOOS/GOARCH, comment, but with zero-config +-gopls (golang/go#57979) and improved build tag support (golang/go#29202), we no +-longer get these errors. +- +--- go.work -- +-go 1.21 +- +-use ( +- ./a +-) +--- a/go.mod -- +-module mod.com/a +- +-go 1.18 +- +--- a/a.go -- +-package a +- +--- a/a_plan9.go -- +-package a // Not excluded, due to improved build tag support. +- +--- a/a_ignored.go -- +-//go:build skip +-package a //@diag(re"package (a)", re"excluded due to its build tags") +- +--- b/go.mod -- +-module mod.com/b +- +-go 1.18 +- +--- b/b.go -- +-package b // Not excluded, due to zero-config gopls. +- +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/generated.txt b/gopls/internal/test/marker/testdata/diagnostics/generated.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/generated.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/generated.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,26 +0,0 @@ +-Test of "undeclared" diagnostic in generated code. +- +--- settings.json -- +-{ +- "pullDiagnostics": true +-} +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- generated.go -- +-// Code generated by generator.go. DO NOT EDIT. +- +-package generated +- +-func _() { +- var y int //@diag("y", re"declared (and|but) not used") +-} +- +--- generator.go -- +-package generated +- +-func _() { +- var x int //@diag("x", re"declared (and|but) not used") +-} +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/initcycle.txt b/gopls/internal/test/marker/testdata/diagnostics/initcycle.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/initcycle.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/initcycle.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-This test verifies that gopls spreads initialization cycle errors across +-multiple declarations. +- +-We set -ignore_extra_diags due to golang/go#65877: gopls produces redundant +-diagnostics for initialization cycles. +- +--- flags -- +--ignore_extra_diags +- +--- p.go -- +-package p +- +-var X = Y //@diag("X", re"initialization cycle") +- +-var Y = Z //@diag("Y", re"initialization cycle") +- +-var Z = X //@diag("Z", re"initialization cycle") +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/issue56943.txt b/gopls/internal/test/marker/testdata/diagnostics/issue56943.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/issue56943.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/issue56943.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +-This test verifies that we produce diagnostics related to mismatching +-unexported interface methods in non-workspace packages. +- +-Previously, we would fail to produce a diagnostic because we trimmed the AST. +-See golang/go#56943. +--- settings.json -- +-{ +- "pullDiagnostics": true +-} +- +--- main.go -- +-package main +- +-import ( +- "go/ast" +- "go/token" +-) +- +-func main() { +- var a int //@diag(re"(a) int", re"declared.*not used") +- var _ ast.Expr = node{} //@diag("node{}", re"missing.*exprNode") +-} +- +-type node struct{} +- +-func (node) Pos() token.Pos { return 0 } +-func (node) End() token.Pos { return 0 } +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/issue59005.txt b/gopls/internal/test/marker/testdata/diagnostics/issue59005.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/issue59005.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/issue59005.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-This test verifies that we don't drop type checking errors on the floor when we +-fail to compute positions for their related errors. +- +--- go.mod -- +-module play.ground +- +--- p.go -- +-package p +- +-import ( +- . "play.ground/foo" +-) +- +-const C = 1 //@diag("C", re"C already declared through dot-import") +-var _ = C +- +--- foo/foo.go -- +-package foo +- +-const C = 2 +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/issue60544.txt b/gopls/internal/test/marker/testdata/diagnostics/issue60544.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/issue60544.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/issue60544.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,9 +0,0 @@ +-This test exercises a crash due to treatment of "comparable" in methodset +-calculation (golang/go#60544). +- +--- main.go -- +-package main +- +-type X struct{} +- +-func (X) test(x comparable) {} //@diag("comparable", re"outside a type constraint") +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/issue60605.txt b/gopls/internal/test/marker/testdata/diagnostics/issue60605.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/issue60605.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/issue60605.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,12 +0,0 @@ +-This test verifies that we can export constants with unknown kind. +-Previously, the exporter would panic while attempting to convert such constants +-to their target type (float64, in this case). +- +--- go.mod -- +-module mod.txt/p +- +-go 1.20 +--- p.go -- +-package p +- +-const EPSILON float64 = 1e- //@diag(re"1e-()", re"exponent has no digits") +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/issue64547.txt b/gopls/internal/test/marker/testdata/diagnostics/issue64547.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/issue64547.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/issue64547.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,14 +0,0 @@ +-This test checks the fix for golang/go#64547: the lostcancel analyzer reports +-diagnostics that overflow the file. +- +--- p.go -- +-package p +- +-import "context" +- +-func _() { +- _, cancel := context.WithCancel(context.Background()) //@diag("_, cancel", re"not used on all paths") +- if false { +- cancel() +- } +-} //@diag("}", re"may be reached without using the cancel") +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/issue67360.txt b/gopls/internal/test/marker/testdata/diagnostics/issue67360.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/issue67360.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/issue67360.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,13 +0,0 @@ +-Regression test for #67360. +- +-This file causes go list to report a "use of internal package +-cmd/internal/browser" error. (It is important that this be a real +-internal std package.) The line directive caused the position of the +-error to lack a column. A bug in the error parser filled in 0, not 1, +-for the missing information, and this is an invalid value in the +-1-based UTF-8 domain, leading to a panic. +- +--- foo.go -- +-//line foo.go:1 +-package main //@ diag(re"package", re"internal package.*not allowed") +-import _ "cmd/internal/browser" //@ diag(re`"`, re"could not import") +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/issue69505.txt b/gopls/internal/test/marker/testdata/diagnostics/issue69505.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/issue69505.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/issue69505.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,22 +0,0 @@ +-This test checks that diagnostics ranges computed with the TypeErrorEndPos +-heuristic span at least a full token. +- +--- go.mod -- +-module example.com +- +-go 1.21 +- +--- main.go -- +-package main +- +-import "example.com/foo-bar" //@ diag(re`"[^"]*"`, re`not used`, exact=true) +- +-func f(int) {} +- +-func main() { +- var x int +- _ = x + 1.e+0i //@ diag("1.e+0i", re`truncated`, exact=true) +-} +- +--- foo-bar/baz.go -- +-package foo +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/issue70791.txt b/gopls/internal/test/marker/testdata/diagnostics/issue70791.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/issue70791.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/issue70791.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-In addition to the Diagnostic, the SA4023 analyzer reports a +-RelatedInformation at the position of b.B, in an another package. +-Since this is in a dependency package, we cannot resolve to +-protocol.Location coordinates. This used to trigger an assertion, but +-now we resolve the location approximately. +- +-This is a regression test for #70791. +- +--- settings.json -- +-{"analyses": {"SA4023": true}} +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-import "example.com/b" +- +-var _ = b.B() == nil //@ diag("b.B", re"comparison is never true") +- +--- b/b.go -- +-package b +- +-func B() any { return (*int)(nil) } +- +- +- +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/issue71812.txt b/gopls/internal/test/marker/testdata/diagnostics/issue71812.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/issue71812.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/issue71812.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-This input causes the unreachable analyzer to report a diagnostic +-about the var decl statement. Since the computed End pos of +-ast.StructType is beyond EOF, validation of SuggestedFixes fails. +-This used to trigger an assertion in gopls' analysis driver. +- +-See golang/go#71659 (and also #71812). +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +-func _() { return; var x struct{ +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/osarch_suffix.txt b/gopls/internal/test/marker/testdata/diagnostics/osarch_suffix.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/osarch_suffix.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/osarch_suffix.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,46 +0,0 @@ +-This test verifies that we add an [os,arch] suffix to each diagnostic +-that doesn't appear in the default build (=runtime.{GOOS,GOARCH}). +- +-See golang/go#65496. +- +-The two p/*.go files below are written to trigger the same diagnostic +-(range, message, source, etc) but varying only by URI. +- +-In the q test, a single location in the common code q.go has two +-diagnostics, one of which is tagged. +- +-This test would fail on openbsd/mips64 because it will be +-the same as the default build, so we skip that platform. +- +--- flags -- +--skip_goos=openbsd +- +--- go.mod -- +-module example.com +- +--- p/p.go -- +-package p +- +-var _ fmt.Stringer //@diag("fmt", re"unde.*: fmt$") +- +--- p/p_openbsd_mips64.go -- +-package p +- +-var _ fmt.Stringer //@diag("fmt", re"unde.*: fmt \\[openbsd,mips64\\]") +- +--- q/q_default.go -- +-//+build !openbsd && !mips64 +- +-package q +- +-func f(int) int +- +--- q/q_openbsd_mips64.go -- +-package q +- +-func f(string) int +- +--- q/q.go -- +-package q +- +-var _ = f() //@ diag(")", re`.*want \(string\) \[openbsd,mips64\]`), diag(")", re`.*want \(int\)$`) +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/parseerr.txt b/gopls/internal/test/marker/testdata/diagnostics/parseerr.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/parseerr.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/parseerr.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +- +-This test exercises diagnostics produced for syntax errors. +- +-Because parser error recovery can be quite lossy, diagnostics +-for type errors are suppressed in files with syntax errors; +-see issue #59888. But diagnostics are reported for type errors +-in well-formed files of the same package. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- bad.go -- +-package p +- +-func f() { +- append("") // no diagnostic for type error in file containing syntax error +-} +- +-func .() {} //@diag(re"func ().", re"expected 'IDENT', found '.'") +- +--- good.go -- +-package p +- +-func g() { +- append("") //@diag(re`""`, re"a slice") +-} +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/rundespiteerrors.txt b/gopls/internal/test/marker/testdata/diagnostics/rundespiteerrors.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/rundespiteerrors.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/rundespiteerrors.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,21 +0,0 @@ +-This test verifies that analyzers without RunDespiteErrors are not +-executed on a package containing type errors (see issue #54762). +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a.go -- +-package a +- +-func _() { +- // A type error. +- _ = 1 + "" //@diag(`1 + ""`, re"mismatched types|cannot convert") +- +- // A violation of an analyzer for which RunDespiteErrors=false: +- // no (simplifyrange, warning) diagnostic is produced; the diag +- // comment is merely illustrative. +- for _ = range "" { //diag("for _", "simplify range expression", ) +- +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/stdversion.txt b/gopls/internal/test/marker/testdata/diagnostics/stdversion.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/stdversion.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/stdversion.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,89 +0,0 @@ +-Test of "too new" diagnostics from the stdversion analyzer. +- +-This test references go1.21 symbols from std, but the analyzer itself +-depends on the go1.22 behavior of versions.FileVersion. +- +-See also go/analysis/passes/stdversion/testdata/test.txtar, +-which runs the same test in the analysistest framework. +- +--- flags -- +--min_go_command=go1.22 +- +--- go.mod -- +-module example.com +- +-go 1.21 +- +--- a/a.go -- +-package a +- +-import "go/types" +- +-func _() { +- // old package-level type +- var _ types.Info // ok: defined by go1.0 +- +- // new field of older type +- _ = new(types.Info).FileVersions //@diag("FileVersions", re`types.FileVersions requires go1.22 or later \(module is go1.21\)`) +- +- // new method of older type +- _ = new(types.Info).PkgNameOf //@diag("PkgNameOf", re`types.PkgNameOf requires go1.22 or later \(module is go1.21\)`) +- +- // new package-level type +- var a types.Alias //@diag("Alias", re`types.Alias requires go1.22 or later \(module is go1.21\)`) +- +- // new method of new type +- a.Underlying() // no diagnostic +-} +- +--- sub/go.mod -- +-module example.com/sub +- +-go 1.21 +- +--- sub/sub.go -- +-package sub +- +-import "go/types" +- +-func _() { +- // old package-level type +- var _ types.Info // ok: defined by go1.0 +- +- // new field of older type +- _ = new(types.Info).FileVersions //@diag("FileVersions", re`types.FileVersions requires go1.22 or later \(module is go1.21\)`) +- +- // new method of older type +- _ = new(types.Info).PkgNameOf //@diag("PkgNameOf", re`types.PkgNameOf requires go1.22 or later \(module is go1.21\)`) +- +- // new package-level type +- var a types.Alias //@diag("Alias", re`types.Alias requires go1.22 or later \(module is go1.21\)`) +- +- // new method of new type +- a.Underlying() // no diagnostic +-} +- +--- sub/tagged.go -- +-//go:build go1.22 +- +-package sub +- +-import "go/types" +- +-func _() { +- // old package-level type +- var _ types.Info +- +- // new field of older type +- _ = new(types.Info).FileVersions +- +- // new method of older type +- _ = new(types.Info).PkgNameOf +- +- // new package-level type +- var a types.Alias +- +- // new method of new type +- a.Underlying() +-} +- +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/strangefiles.txt b/gopls/internal/test/marker/testdata/diagnostics/strangefiles.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/strangefiles.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/strangefiles.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,22 +0,0 @@ +-This test checks diagnostics on files that are strange for one reason or +-another. +- +-Note(rfindley): ported from the old marker tests. I'm not sure why these were +-written originally. +- +--ignore_extra_diags is required because the marker framework fails for +-noparse.go, and we therefore can't match the EOF error. +- +--- flags -- +--ignore_extra_diags +--errors_ok +- +--- go.mod -- +-module golang.org/lsptests +- +-go 1.18 +--- %percent/perc%ent.go -- +-package percent //@diag("percent", re"No packages") +- +--- noparse/noparse.go -- +- +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/typeerr.txt b/gopls/internal/test/marker/testdata/diagnostics/typeerr.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/typeerr.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/typeerr.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,28 +0,0 @@ +- +-This test exercises diagnostics produced for type errors +-in the absence of syntax errors. +- +-The type error was chosen to exercise the 'nonewvars' type-error analyzer. +-(The 'undeclaredname' analyzer depends on the text of the go/types +-"undeclared name" error, which changed in go1.20.) +- +-The append() type error was also carefully chosen to have text and +-position that are invariant across all versions of Go run by the builders. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- typeerr.go -- +-package a +- +-func f(x int) { +- append("") //@diag(re`""`, re"a slice") +- +- x := 123 //@diag(re"x := 123", re"no new variables"), quickfix(re"():", re"no new variables", fix) +-} +- +--- @fix/typeerr.go -- +-@@ -6 +6 @@ +-- x := 123 //@diag(re"x := 123", re"no new variables"), quickfix(re"():", re"no new variables", fix) +-+ x = 123 //@diag(re"x := 123", re"no new variables"), quickfix(re"():", re"no new variables", fix) +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/useinternal.txt b/gopls/internal/test/marker/testdata/diagnostics/useinternal.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/useinternal.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/useinternal.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,21 +0,0 @@ +-This test checks a diagnostic for invalid use of internal packages. +- +-This list error changed in Go 1.21. +- +-See TestValidImportCheck_GoPackagesDriver for a test that no diagnostic +-is produced when using a GOPACKAGESDRIVER (such as for Bazel). +- +--- go.mod -- +-module bad.test +- +-go 1.18 +- +--- assign/internal/secret/secret.go -- +-package secret +- +-func Hello() {} +- +--- bad/bad.go -- +-package bad +- +-import _ "bad.test/assign/internal/secret" //@diag("\"bad.test/assign/internal/secret\"", re"could not import bad.test/assign/internal/secret \\(invalid use of internal package \"bad.test/assign/internal/secret\"\\)"),diag("_", re"use of internal package bad.test/assign/internal/secret not allowed") +diff -urN a/gopls/internal/test/marker/testdata/diagnostics/usemodule.txt b/gopls/internal/test/marker/testdata/diagnostics/usemodule.txt +--- a/gopls/internal/test/marker/testdata/diagnostics/usemodule.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/diagnostics/usemodule.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,52 +0,0 @@ +-This test demonstrates diagnostics for a module that is missing from the +-go.work file. +- +-Quick-fixes change files on disk, so are tested by integration tests. +- +--- skip -- +-Temporary skip due to golang/go#57979, with zero-config gopls, these modules +-are no longer orphaned. +- +--- go.work -- +-go 1.21 +- +-use ( +- ./a +-) +- +--- a/go.mod -- +-module mod.com/a +- +-go 1.18 +- +--- a/main.go -- +-package main +- +-import "mod.com/a/lib" +- +-func main() { +- _ = lib.C +-} +- +--- a/lib/lib.go -- +-package lib +- +-const C = "b" +--- b/go.mod -- +-module mod.com/b +- +-go 1.18 +- +--- b/main.go -- +-package main //@diag("main", re"add this module to your go.work") +- +-import "mod.com/b/lib" //@diag("\"mod.com", re"not included in a workspace module") +- +-func main() { +- _ = lib.C +-} +- +--- b/lib/lib.go -- +-package lib //@diag("lib", re"add this module to your go.work") +- +-const C = "b" +diff -urN a/gopls/internal/test/marker/testdata/fixedbugs/issue59318.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue59318.txt +--- a/gopls/internal/test/marker/testdata/fixedbugs/issue59318.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/fixedbugs/issue59318.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-Previously, this test verifies that we can load multiple orphaned files as +-command-line-arguments packages. In the distant past, we would load only one +-because go/packages returns at most one command-line-arguments package per +-query. +- +-With zero-config gopls, these packages are successfully loaded as ad-hoc +-packages. +- +--- a/main.go -- +-package main +- +-func main() { +- var a int //@diag(re"var (a)", re"not used") +-} +--- b/main.go -- +-package main +- +-func main() { +- var b int //@diag(re"var (b)", re"not used") +-} +diff -urN a/gopls/internal/test/marker/testdata/fixedbugs/issue59944.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue59944.txt +--- a/gopls/internal/test/marker/testdata/fixedbugs/issue59944.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/fixedbugs/issue59944.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,37 +0,0 @@ +-This test verifies that gopls does not panic when encountering the go/types +-bug described in golang/go#59944: the Bindingf function is not included in +-the methodset of its receiver type. +- +-Adapted from the code in question from the issue. +- +-The flag -ignore_extra_diags is included, as this bug was fixed in Go 1.24, so +-that now the code below may produce a diagnostic. +- +--- flags -- +--cgo +--ignore_extra_diags +- +--- go.mod -- +-module example.com +- +-go 1.12 +- +--- cgo.go -- +-package x +- +-import "fmt" +- +-/* +-struct layout { +- int field; +-}; +-*/ +-import "C" +- +-type Layout = C.struct_layout +- +-// Bindingf is a printf wrapper. This was necessary to trigger the panic in +-// objectpath while encoding facts. +-func (l *Layout) Bindingf(format string, args ...any) { +- fmt.Printf(format, args...) +-} +diff -urN a/gopls/internal/test/marker/testdata/fixedbugs/issue61543.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue61543.txt +--- a/gopls/internal/test/marker/testdata/fixedbugs/issue61543.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/fixedbugs/issue61543.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,13 +0,0 @@ +-This test verifies that we fail loudly if a module name contains +-command-line-arguments. +- +--- flags -- +--errors_ok +- +--- go.mod -- +-module command-line-arguments //@diag("module", re`command-line-arguments.*disallowed`) +- +-go 1.12 +- +--- x/x.go -- +-package x //@diag("x", re`command-line-arguments.*disallowed`) +diff -urN a/gopls/internal/test/marker/testdata/fixedbugs/issue66109.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue66109.txt +--- a/gopls/internal/test/marker/testdata/fixedbugs/issue66109.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/fixedbugs/issue66109.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +-This test exercises the crash in golang/go#66109: a dangling reference due to +-test variants of a command-line-arguments package. +- +-Depends on go1.22+ go list errors. +- +--- flags -- +--min_go_command=go1.22 +- +--- go.mod -- +-module example.com/tools +- +-go 1.22 +- +--- tools_test.go -- +-//go:build tools +- +-package tools //@diag("tools", re"No packages found") +- +-import ( +- _ "example.com/tools/tool" +-) +- +--- tool/tool.go -- +-package main +- +-func main() { +-} +diff -urN a/gopls/internal/test/marker/testdata/fixedbugs/issue66250.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue66250.txt +--- a/gopls/internal/test/marker/testdata/fixedbugs/issue66250.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/fixedbugs/issue66250.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-This bug checks the fix for golang/go#66250. Package references should not +-crash when one package file lacks a package name. +- +-TODO(rfindley): the -ignore_extra_diags flag is only necessary because of +-problems matching diagnostics in the broken file, likely due to poor parser +-recovery. +- +--- flags -- +--ignore_extra_diags +- +--- a.go -- +-package x //@refs("x", "x") +- +--- b.go -- +- +-func _() { +-} +diff -urN a/gopls/internal/test/marker/testdata/fixedbugs/issue66876.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue66876.txt +--- a/gopls/internal/test/marker/testdata/fixedbugs/issue66876.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/fixedbugs/issue66876.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +-This test checks that gopls successfully suppresses loopclosure diagnostics +-when the go.mod go version is set to a 1.22 toolchain version (1.22.x). +- +-In golang/go#66876, gopls failed to handle this correctly. +- +--- flags -- +--min_go_command=go1.22 +- +--- go.mod -- +-module example.com/loopclosure +- +-go 1.22.0 +- +--- p.go -- +-package main +- +-var x int //@loc(x, "x") +- +-func main() { +- // Verify that type checking actually succeeded by jumping to +- // an arbitrary definition. +- _ = x //@def("x", x) +- +- for i := range 10 { +- go func() { println(i) }() +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/fixedbugs/issue71044.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue71044.txt +--- a/gopls/internal/test/marker/testdata/fixedbugs/issue71044.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/fixedbugs/issue71044.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,18 +0,0 @@ +-This test checks that we don't crash while completing receivers that may happen +-to be builtin types (due to invalid code). This crash was reported by telemetry +-in golang/go#71044. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module example.com/amap +- +-go 1.18 +- +--- a.go -- +-package amap +- +-import "unsafe" +- +-func (unsafe.Pointer) _() {} //@ rank("unsafe") +diff -urN a/gopls/internal/test/marker/testdata/foldingrange/a_lineonly.txt b/gopls/internal/test/marker/testdata/foldingrange/a_lineonly.txt +--- a/gopls/internal/test/marker/testdata/foldingrange/a_lineonly.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/foldingrange/a_lineonly.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,269 +0,0 @@ +-This test checks basic behavior of the textDocument/foldingRange, when the +-editor only supports line folding. +- +--- capabilities.json -- +-{ +- "textDocument": { +- "foldingRange": { +- "lineFoldingOnly": true +- } +- } +-} +--- a.go -- +-package folding //@foldingrange(raw) +- +-import ( +- "fmt" +- _ "log" +- "sort" +- "time" +-) +- +-import _ "os" +- +-// Bar is a function. +-// With a multiline doc comment. +-func Bar() string { +- /* This is a single line comment */ +- switch { +- case true: +- if true { +- fmt.Println("true") +- } else { +- fmt.Println("false") +- } +- case false: +- fmt.Println("false") +- default: +- fmt.Println("default") +- } +- /* This is a multiline +- block +- comment */ +- +- /* This is a multiline +- block +- comment */ +- // Followed by another comment. +- _ = []int{ +- 1, +- 2, +- 3, +- } +- _ = [2]string{"d", +- "e", +- } +- _ = map[string]int{ +- "a": 1, +- "b": 2, +- "c": 3, +- } +- type T struct { +- f string +- g int +- h string +- } +- _ = T{ +- f: "j", +- g: 4, +- h: "i", +- } +- x, y := make(chan bool), make(chan bool) +- select { +- case val := <-x: +- if val { +- fmt.Println("true from x") +- } else { +- fmt.Println("false from x") +- } +- case <-y: +- fmt.Println("y") +- default: +- fmt.Println("default") +- } +- // This is a multiline comment +- // that is not a doc comment. +- return ` +-this string +-is not indented` +-} +- +-func _() { +- slice := []int{1, 2, 3} +- sort.Slice(slice, func(i, j int) bool { +- a, b := slice[i], slice[j] +- return a > b +- }) +- +- sort.Slice(slice, func(i, j int) bool { return slice[i] > slice[j] }) +- +- sort.Slice( +- slice, +- func(i, j int) bool { +- return slice[i] > slice[j] +- }, +- ) +- +- fmt.Println( +- 1, 2, 3, +- 4, +- ) +- +- fmt.Println(1, 2, 3, +- 4, 5, 6, +- 7, 8, 9, +- 10) +- +- // Call with ellipsis. +- _ = fmt.Errorf( +- "test %d %d", +- []any{1, 2, 3}..., +- ) +- +- // Check multiline string. +- fmt.Println( +- `multi +- line +- string +- `, +- 1, 2, 3, +- ) +- +- // Call without arguments. +- _ = time.Now() +-} +- +-func _( +- a int, b int, +- c int, +-) { +-} +--- @raw -- +-package folding //@foldingrange(raw) +- +-import (<0 kind="imports"> +- "fmt" +- _ "log" +- "sort" +- "time"</0> +-) +- +-import _ "os" +- +-// Bar is a function.<1 kind="comment"> +-// With a multiline doc comment.</1> +-func Bar() string {<2 kind=""> +- /* This is a single line comment */ +- switch {<3 kind=""> +- case true:<4 kind=""> +- if true {<5 kind=""> +- fmt.Println("true")</5> +- } else {<6 kind=""> +- fmt.Println("false")</6> +- }</4> +- case false:<7 kind=""> +- fmt.Println("false")</7> +- default:<8 kind=""> +- fmt.Println("default")</3></8> +- } +- /* This is a multiline<9 kind="comment"> +- block +- comment */</9> +- +- /* This is a multiline<10 kind="comment"> +- block +- comment */ +- // Followed by another comment.</10> +- _ = []int{<11 kind=""> +- 1, +- 2, +- 3,</11> +- } +- _ = [2]string{<12 kind="">"d", +- "e",</12> +- } +- _ = map[string]int{<13 kind=""> +- "a": 1, +- "b": 2, +- "c": 3,</13> +- } +- type T struct {<14 kind=""> +- f string +- g int +- h string</14> +- } +- _ = T{<15 kind=""> +- f: "j", +- g: 4, +- h: "i",</15> +- } +- x, y := make(chan bool), make(chan bool) +- select {<16 kind=""> +- case val := <-x:<17 kind=""> +- if val {<18 kind=""> +- fmt.Println("true from x")</18> +- } else {<19 kind=""> +- fmt.Println("false from x")</19> +- }</17> +- case <-y:<20 kind=""> +- fmt.Println("y")</20> +- default:<21 kind=""> +- fmt.Println("default")</16></21> +- } +- // This is a multiline comment<22 kind="comment"> +- // that is not a doc comment.</22> +- return <23 kind="">` +-this string +-is not indented`</2></23> +-} +- +-func _() {<24 kind=""> +- slice := []int{1, 2, 3} +- sort.Slice(<25 kind="">slice, func(i, j int) bool {<26 kind=""> +- a, b := slice[i], slice[j] +- return a > b</25></26> +- }) +- +- sort.Slice(slice, func(i, j int) bool { return slice[i] > slice[j] }) +- +- sort.Slice(<27 kind=""> +- slice, +- func(i, j int) bool {<28 kind=""> +- return slice[i] > slice[j]</28> +- },</27> +- ) +- +- fmt.Println(<29 kind=""> +- 1, 2, 3, +- 4,</29> +- ) +- +- fmt.Println(<30 kind="">1, 2, 3, +- 4, 5, 6, +- 7, 8, 9,</30> +- 10) +- +- // Call with ellipsis. +- _ = fmt.Errorf(<31 kind=""> +- "test %d %d", +- []any{1, 2, 3}...,</31> +- ) +- +- // Check multiline string. +- fmt.Println(<32 kind=""> +- <33 kind="">`multi +- line +- string +- `</33>, +- 1, 2, 3,</32> +- ) +- +- // Call without arguments. +- _ = time.Now()</24> +-} +- +-func _(<34 kind=""> +- a int, b int, +- c int,</34> +-) { +-} +diff -urN a/gopls/internal/test/marker/testdata/foldingrange/a.txt b/gopls/internal/test/marker/testdata/foldingrange/a.txt +--- a/gopls/internal/test/marker/testdata/foldingrange/a.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/foldingrange/a.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,274 +0,0 @@ +-This test checks basic behavior of textDocument/foldingRange. +- +--- a.go -- +-package folding //@foldingrange(raw) +- +-import ( +- "fmt" +- _ "log" +- "sort" +- "time" +-) +- +-import _ "os" +- +-// Bar is a function. +-// With a multiline doc comment. +-func Bar() ( +- string, +-) { +- /* This is a single line comment */ +- switch { +- case true: +- if true { +- fmt.Println("true") +- } else { +- fmt.Println("false") +- } +- case false: +- fmt.Println("false") +- default: +- fmt.Println("default") +- } +- /* This is a multiline +- block +- comment */ +- +- /* This is a multiline +- block +- comment */ +- // Followed by another comment. +- _ = []int{ +- 1, +- 2, +- 3, +- } +- _ = [2]string{"d", +- "e", +- } +- _ = map[string]int{ +- "a": 1, +- "b": 2, +- "c": 3, +- } +- type T struct { +- f string +- g int +- h string +- } +- _ = T{ +- f: "j", +- g: 4, +- h: "i", +- } +- x, y := make(chan bool), make(chan bool) +- select { +- case val := <-x: +- if val { +- fmt.Println("true from x") +- } else { +- fmt.Println("false from x") +- } +- case <-y: +- fmt.Println("y") +- default: +- fmt.Println("default") +- } +- // This is a multiline comment +- // that is not a doc comment. +- return ` +-this string +-is not indented` +-} +- +-func _() { +- slice := []int{1, 2, 3} +- sort.Slice(slice, func(i, j int) bool { +- a, b := slice[i], slice[j] +- return a > b +- }) +- +- sort.Slice(slice, func(i, j int) bool { return slice[i] > slice[j] }) +- +- sort.Slice( +- slice, +- func(i, j int) bool { +- return slice[i] > slice[j] +- }, +- ) +- +- fmt.Println( +- 1, 2, 3, +- 4, +- ) +- +- fmt.Println(1, 2, 3, +- 4, 5, 6, +- 7, 8, 9, +- 10) +- +- // Call with ellipsis. +- _ = fmt.Errorf( +- "test %d %d", +- []any{1, 2, 3}..., +- ) +- +- // Check multiline string. +- fmt.Println( +- `multi +- line +- string +- `, +- 1, 2, 3, +- ) +- +- // Call without arguments. +- _ = time.Now() +-} +- +-func _( +- a int, b int, +- c int, +-) { +-} +- +-func _() { // comment +- +-} +- +--- @raw -- +-package folding //@foldingrange(raw) +- +-import (<0 kind="imports"> +- "fmt" +- _ "log" +- "sort" +- "time" +-</0>) +- +-import _ "os" +- +-// Bar is a function.<1 kind="comment"> +-// With a multiline doc comment.</1> +-func Bar() (<2 kind=""> +- string, +-</2>) {<3 kind=""> +- /* This is a single line comment */ +- switch {<4 kind=""> +- case true:<5 kind=""> +- if true {<6 kind=""> +- fmt.Println(<7 kind="">"true"</7>) +- </6>} else {<8 kind=""> +- fmt.Println(<9 kind="">"false"</9>) +- </8>}</5> +- case false:<10 kind=""> +- fmt.Println(<11 kind="">"false"</11>)</10> +- default:<12 kind=""> +- fmt.Println(<13 kind="">"default"</13>)</12> +- </4>} +- /* This is a multiline<14 kind="comment"> +- block +- comment */</14> +- +- /* This is a multiline<15 kind="comment"> +- block +- comment */ +- // Followed by another comment.</15> +- _ = []int{<16 kind=""> +- 1, +- 2, +- 3, +- </16>} +- _ = [2]string{<17 kind="">"d", +- "e", +- </17>} +- _ = map[string]int{<18 kind=""> +- "a": 1, +- "b": 2, +- "c": 3, +- </18>} +- type T struct {<19 kind=""> +- f string +- g int +- h string +- </19>} +- _ = T{<20 kind=""> +- f: "j", +- g: 4, +- h: "i", +- </20>} +- x, y := make(<21 kind="">chan bool</21>), make(<22 kind="">chan bool</22>) +- select {<23 kind=""> +- case val := <-x:<24 kind=""> +- if val {<25 kind=""> +- fmt.Println(<26 kind="">"true from x"</26>) +- </25>} else {<27 kind=""> +- fmt.Println(<28 kind="">"false from x"</28>) +- </27>}</24> +- case <-y:<29 kind=""> +- fmt.Println(<30 kind="">"y"</30>)</29> +- default:<31 kind=""> +- fmt.Println(<32 kind="">"default"</32>)</31> +- </23>} +- // This is a multiline comment<33 kind="comment"> +- // that is not a doc comment.</33> +- return <34 kind="">` +-this string +-is not indented`</34> +-</3>} +- +-func _() {<35 kind=""> +- slice := []int{<36 kind="">1, 2, 3</36>} +- sort.Slice(<37 kind="">slice, func(<38 kind="">i, j int</38>) bool {<39 kind=""> +- a, b := slice[i], slice[j] +- return a > b +- </39>}</37>) +- +- sort.Slice(<40 kind="">slice, func(<41 kind="">i, j int</41>) bool {<42 kind=""> return slice[i] > slice[j] </42>}</40>) +- +- sort.Slice(<43 kind=""> +- slice, +- func(<44 kind="">i, j int</44>) bool {<45 kind=""> +- return slice[i] > slice[j] +- </45>}, +- </43>) +- +- fmt.Println(<46 kind=""> +- 1, 2, 3, +- 4, +- </46>) +- +- fmt.Println(<47 kind="">1, 2, 3, +- 4, 5, 6, +- 7, 8, 9, +- 10</47>) +- +- // Call with ellipsis. +- _ = fmt.Errorf(<48 kind=""> +- "test %d %d", +- []any{<49 kind="">1, 2, 3</49>}..., +- </48>) +- +- // Check multiline string. +- fmt.Println(<50 kind=""> +- <51 kind="">`multi +- line +- string +- `</51>, +- 1, 2, 3, +- </50>) +- +- // Call without arguments. +- _ = time.Now() +-</35>} +- +-func _(<52 kind=""> +- a int, b int, +- c int, +-</52>) {<53 kind=""> +-</53>} +- +-func _() {<54 kind=""> // comment +- +-</54>} +- +diff -urN a/gopls/internal/test/marker/testdata/foldingrange/bad.txt b/gopls/internal/test/marker/testdata/foldingrange/bad.txt +--- a/gopls/internal/test/marker/testdata/foldingrange/bad.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/foldingrange/bad.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +-This test verifies behavior of textDocument/foldingRange in the presence of +-unformatted syntax. +- +--- a.go -- +-package folding //@foldingrange(raw) +- +-import ( "fmt" +- _ "log" +-) +- +-import ( +- _ "os" ) +- +-// BadBar is a function. +-func BadBar() string { x := true +- if x { +- // This is the only foldable thing in this file when lineFoldingOnly +- fmt.Println("true") +- } else { +- fmt.Println("false") } +- return "" +-} +--- @raw -- +-package folding //@foldingrange(raw) +- +-import (<0 kind="imports"> "fmt" +- _ "log" +-</0>) +- +-import (<1 kind="imports"> +- _ "os" </1>) +- +-// BadBar is a function. +-func BadBar() string {<2 kind=""> x := true +- if x {<3 kind=""> +- // This is the only foldable thing in this file when lineFoldingOnly +- fmt.Println(<4 kind="">"true"</4>) +- </3>} else {<5 kind=""> +- fmt.Println(<6 kind="">"false"</6>) </5>} +- return "" +-</2>} +diff -urN a/gopls/internal/test/marker/testdata/foldingrange/parse_errors.txt b/gopls/internal/test/marker/testdata/foldingrange/parse_errors.txt +--- a/gopls/internal/test/marker/testdata/foldingrange/parse_errors.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/foldingrange/parse_errors.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,26 +0,0 @@ +-This test verifies that textDocument/foldingRange does not panic +-and produces no folding ranges if a file contains errors. +- +--- flags -- +--ignore_extra_diags +- +--- a.go -- +-package folding //@foldingrange(raw) +- +-// No comma. +-func _( +- a string +-) {} +- +-// Extra brace. +-func _() {}} +--- @raw -- +-package folding //@foldingrange(raw) +- +-// No comma. +-func _( +- a string +-) {} +- +-// Extra brace. +-func _() {}} +diff -urN a/gopls/internal/test/marker/testdata/format/format.txt b/gopls/internal/test/marker/testdata/format/format.txt +--- a/gopls/internal/test/marker/testdata/format/format.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/format/format.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,80 +0,0 @@ +-This test checks basic behavior of textDocument/formatting requests. +- +--- go.mod -- +-module mod.com +- +-go 1.18 +--- good.go -- +-package format //@format(good) +- +-import ( +- "log" +-) +- +-func goodbye() { +- log.Printf("byeeeee") +-} +- +--- @good -- +-package format //@format(good) +- +-import ( +- "log" +-) +- +-func goodbye() { +- log.Printf("byeeeee") +-} +--- bad.go -- +-package format //@format(bad) +- +-import ( +- "runtime" +- "fmt" +- "log" +-) +- +-func hello() { +- +- +- +- +- var x int //@diag("x", re"declared (and|but) not used") +-} +- +-func hi() { +- runtime.NumCPU() +- fmt.Printf("") +- +- log.Printf("") +-} +--- @bad -- +-package format //@format(bad) +- +-import ( +- "fmt" +- "log" +- "runtime" +-) +- +-func hello() { +- +- var x int //@diag("x", re"declared (and|but) not used") +-} +- +-func hi() { +- runtime.NumCPU() +- fmt.Printf("") +- +- log.Printf("") +-} +--- newline.go -- +-package format //@format(newline) +-func _() {} +--- @newline -- +-package format //@format(newline) +-func _() {} +--- oneline.go -- +-package format //@format(oneline) +--- @oneline -- +-package format //@format(oneline) +diff -urN a/gopls/internal/test/marker/testdata/format/generated.txt b/gopls/internal/test/marker/testdata/format/generated.txt +--- a/gopls/internal/test/marker/testdata/format/generated.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/format/generated.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-This test checks that formatting includes generated files too +-(reversing https://go.dev/cl/365295 to address issue #49555). +- +-See https://github.com/golang/go/issues/73959. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module example.com +-go 1.21 +- +--- a/a.go -- +-// Code generated by me. DO NOT EDIT. +- +-package a; import "fmt"; func main() { fmt.Println("hello") } +- +-//@format(out) +- +--- @out -- +-// Code generated by me. DO NOT EDIT. +- +-package a +- +-import "fmt" +- +-func main() { fmt.Println("hello") } +- +-//@format(out) +diff -urN a/gopls/internal/test/marker/testdata/format/issue59554.txt b/gopls/internal/test/marker/testdata/format/issue59554.txt +--- a/gopls/internal/test/marker/testdata/format/issue59554.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/format/issue59554.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-Test case for golang/go#59554: data corruption on formatting due to line +-directives. +- +-Note that gofumpt is needed for this test case, as it reformats var decls into +-short var decls. +- +--- settings.json -- +-{ +- "formatting.gofumpt": true +-} +- +--- main.go -- +-package main //@format(main) +- +-func Match(data []byte) int { +-//line :1 +- var idx = ^uint(0) +- _ = idx +- return -1 +-} +--- @main -- +-package main //@format(main) +- +-func Match(data []byte) int { +-//line :1 +- idx := ^uint(0) +- _ = idx +- return -1 +-} +diff -urN a/gopls/internal/test/marker/testdata/format/noparse.txt b/gopls/internal/test/marker/testdata/format/noparse.txt +--- a/gopls/internal/test/marker/testdata/format/noparse.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/format/noparse.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +-This test checks that formatting does not run on code that has parse errors. +- +--- parse.go -- +-package noparse_format //@format(parse) +- +-func _() { +-f() //@diag("f", re"(undefined|undeclared name): f") +-} +--- @parse -- +-package noparse_format //@format(parse) +- +-func _() { +- f() //@diag("f", re"(undefined|undeclared name): f") +-} +--- noparse.go -- +-package noparse_format //@format(noparse) +- +-// The nonewvars expectation asserts that the go/analysis framework ran. +- +-func what() { +- var hi func() +- if { hi() //@diag(re"(){", re".*missing.*") +- } +- hi := nil +-} +--- @noparse -- +-7:5: missing condition in if statement +diff -urN a/gopls/internal/test/marker/testdata/highlight/controlflow.txt b/gopls/internal/test/marker/testdata/highlight/controlflow.txt +--- a/gopls/internal/test/marker/testdata/highlight/controlflow.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/highlight/controlflow.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,73 +0,0 @@ +-This test verifies document highlighting for control flow. +- +--- go.mod -- +-module mod.com +- +-go 1.18 +- +--- p.go -- +-package p +- +--- issue60589.go -- +-package p +- +-// This test verifies that control flow highlighting correctly +-// accounts for multi-name result parameters. +-// In golang/go#60589, it did not. +- +-func _() (foo int, bar, baz string) { //@ hiloc(func, "func", text), hiloc(foo, "foo", text), hiloc(fooint, "foo int", text), hiloc(int, "int", text), hiloc(bar, "bar", text), hiloc(beforebaz, " baz", text), hiloc(baz, "baz", text), hiloc(barbazstring, "bar, baz string", text), hiloc(beforestring, re`() string`, text), hiloc(string, "string", text) +- return 0, "1", "2" //@ hiloc(return, `return 0, "1", "2"`, text), hiloc(l0, "0", text), hiloc(l1, `"1"`, text), hiloc(l2, `"2"`, text) +-} +- +-// Assertions, expressed here to avoid clutter above. +-// Note that when the cursor is over the field type, there is some +-// (likely harmless) redundancy. +- +-//@ highlight(func, func, return) +-//@ highlight(foo, foo, l0) +-//@ highlight(int, fooint, int, l0) +-//@ highlight(bar, bar, l1) +-//@ highlight(beforebaz) +-//@ highlight(baz, baz, l2) +-//@ highlight(beforestring, baz, l2) +-//@ highlight(string, barbazstring, string, l1, l2) +-//@ highlight(l0, foo, l0) +-//@ highlight(l1, bar, l1) +-//@ highlight(l2, baz, l2) +- +-// Check that duplicate result names do not cause +-// inaccurate highlighting. +- +-func _() (x, x int32) { //@ loc(locx1, re`\((x)`), loc(locx2, re`(x) int`), hiloc(x1, re`\((x)`, text), hiloc(x2, re`(x) int`, text), diag(locx1, re"redeclared"), diag(locx2, re"redeclared") +- return 1, 2 //@ hiloc(one, "1", text), hiloc(two, "2", text) +-} +- +-//@ highlight(one, one, x1) +-//@ highlight(two, two, x2) +-//@ highlight(x1, x1, one) +-//@ highlight(x2, x2, two) +- +--- issue65516.go -- +-package p +- +-// This test checks that gopls doesn't crash while highlighting +-// functions with no body (golang/go#65516). +- +-func Foo() (int, string) //@hiloc(noBodyInt, "int", text), hiloc(noBodyFunc, "func", text) +-//@highlight(noBodyInt, noBodyInt), highlight(noBodyFunc, noBodyFunc) +- +--- issue65952.go -- +-package p +- +-// This test checks that gopls doesn't crash while highlighting +-// return values in functions with no results. +- +-func _() { +- return 0 //@hiloc(ret1, "0", text), diag("0", re"too many return") +- //@highlight(ret1, ret1) +-} +- +-func _() () { +- return 0 //@hiloc(ret2, "0", text), diag("0", re"too many return") +- //@highlight(ret2, ret2) +-} +diff -urN a/gopls/internal/test/marker/testdata/highlight/highlight_kind.txt b/gopls/internal/test/marker/testdata/highlight/highlight_kind.txt +--- a/gopls/internal/test/marker/testdata/highlight/highlight_kind.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/highlight/highlight_kind.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,88 +0,0 @@ +-This test checks textDocument/highlight with highlight kinds. +-For example, a use of a variable is reported as a "read", +-and an assignment to a variable is reported as a "write". +-(Note that the details don't align exactly with the Go +-type-checker notions of values versus addressable variables). +- +- +--- highlight_kind.go -- +-package a +- +-type Nest struct { +- nest *Nest //@hiloc(fNest, "nest", text) +-} +-type MyMap map[string]string +- +-type NestMap map[Nest]Nest +- +-func _() { +- const constIdent = 1 //@hiloc(constIdent, "constIdent", write) +- //@highlightall(constIdent) +- var varNoInit int //@hiloc(varNoInit, "varNoInit", write) +- (varNoInit) = 1 //@hiloc(varNoInitAssign, "varNoInit", write) +- _ = varNoInit //@hiloc(varNoInitRead, "varNoInit", read) +- //@highlightall(varNoInit, varNoInitAssign, varNoInitRead) +- +- str, num := "hello", 2 //@hiloc(str, "str", write), hiloc(num, "num", write) +- _, _ = str, num //@hiloc(strRead, "str", read), hiloc(numRead, "num", read) +- //@highlightall(str, strRead, strMapKey, strMapVal, strMyMapKey, strMyMapVal, strMyMapSliceKey, strMyMapSliceVal, strMyMapPtrSliceKey, strMyMapPtrSliceVal) +- //@highlightall(num, numRead, numAddr, numIncr, numMul) +- nest := &Nest{nest: nil} //@hiloc(nest, "nest", write),hiloc(fNestComp, re`(nest):`, write) +- nest.nest = &Nest{} //@hiloc(nestSelX, "nest", read), hiloc(fNestSel, re`(nest) =`, write) +- *nest.nest = Nest{} //@hiloc(nestSelXStar, "nest", read), hiloc(fNestSelStar, re`(nest) =`, write) +- //@highlightall(nest, nestSelX, nestSelXStar, nestMapVal) +- //@highlightall(fNest, fNestComp, fNestSel, fNestSelStar, fNestSliceComp, fNestPtrSliceComp, fNestMapKey) +- +- pInt := &num //@hiloc(pInt, "pInt", write),hiloc(numAddr, "num", read) +- // StarExpr is reported as "write" in GoLand and Rust Analyzer +- *pInt = 3 //@hiloc(pIntStar, "pInt", write) +- var ppInt **int = &pInt //@hiloc(ppInt, "ppInt", write),hiloc(pIntAddr, re`&(pInt)`, read) +- **ppInt = 4 //@hiloc(ppIntStar, "ppInt", write) +- *(*ppInt) = 4 //@hiloc(ppIntParen, "ppInt", write) +- //@highlightall(pInt, pIntStar, pIntAddr) +- //@highlightall(ppInt, ppIntStar, ppIntParen) +- +- num++ //@hiloc(numIncr, "num", write) +- num *= 1 //@hiloc(numMul, "num", write) +- +- var ch chan int = make(chan int, 10) //@hiloc(ch, "ch", write) +- ch <- 3 //@hiloc(chSend, "ch", write) +- <-ch //@hiloc(chRecv, "ch", read) +- //@highlightall(ch, chSend, chRecv) +- +- var nums []int = []int{1, 2} //@hiloc(nums, "nums", write) +- // IndexExpr is reported as "read" in GoLand, Rust Analyzer and Java JDT +- nums[0] = 1 //@hiloc(numsIndex, "nums", read) +- //@highlightall(nums, numsIndex) +- +- mapLiteral := map[string]string{ //@hiloc(mapLiteral, "mapLiteral", write) +- str: str, //@hiloc(strMapKey, "str", read),hiloc(strMapVal, re`(str),`, read) +- } +- for key, value := range mapLiteral { //@hiloc(mapKey, "key", write), hiloc(mapVal, "value", write), hiloc(mapLiteralRange, "mapLiteral", read) +- _, _ = key, value //@hiloc(mapKeyRead, "key", read), hiloc(mapValRead, "value", read) +- } +- //@highlightall(mapLiteral, mapLiteralRange) +- //@highlightall(mapKey, mapKeyRead) +- //@highlightall(mapVal, mapValRead) +- +- nestSlice := []Nest{ +- {nest: nil}, //@hiloc(fNestSliceComp, "nest", write) +- } +- nestPtrSlice := []*Nest{ +- {nest: nil}, //@hiloc(fNestPtrSliceComp, "nest", write) +- } +- myMap := MyMap{ +- str: str, //@hiloc(strMyMapKey, "str", read),hiloc(strMyMapVal, re`(str),`, read) +- } +- myMapSlice := []MyMap{ +- {str: str}, //@hiloc(strMyMapSliceKey, "str", read),hiloc(strMyMapSliceVal, re`: (str)`, read) +- } +- myMapPtrSlice := []*MyMap{ +- {str: str}, //@hiloc(strMyMapPtrSliceKey, "str", read),hiloc(strMyMapPtrSliceVal, re`: (str)`, read) +- } +- nestMap := NestMap{ +- Nest{nest: nil}: *nest, //@hiloc(fNestMapKey, "nest", write), hiloc(nestMapVal, re`(nest),`, read) +- } +- +- _, _, _, _, _, _ = myMap, nestSlice, nestPtrSlice, myMapSlice, myMapPtrSlice, nestMap +-} +diff -urN a/gopls/internal/test/marker/testdata/highlight/highlight_printf.txt b/gopls/internal/test/marker/testdata/highlight/highlight_printf.txt +--- a/gopls/internal/test/marker/testdata/highlight/highlight_printf.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/highlight/highlight_printf.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,62 +0,0 @@ +- +-This test checks functionality of the printf-like directives and operands highlight. +--- flags -- +--ignore_extra_diags +--- highlights.go -- +-package highlightprintf +-import ( +- "fmt" +-) +- +-func BasicPrintfHighlights() { +- fmt.Printf("Hello %s, you have %d new messages!", "Alice", 5) //@hiloc(normals, "%s", write),hiloc(normalarg0, "\"Alice\"", read),highlightall(normals, normalarg0) +- fmt.Printf("Hello %s, you have %d new messages!", "Alice", 5) //@hiloc(normald, "%d", write),hiloc(normalargs1, "5", read),highlightall(normald, normalargs1) +-} +- +-func ComplexPrintfHighlights() { +- fmt.Printf("Hello %#3.4s, you have %-2.3d new messages!", "Alice", 5) //@hiloc(complexs, "%#3.4s", write),hiloc(complexarg0, "\"Alice\"", read),highlightall(complexs, complexarg0) +- fmt.Printf("Hello %#3.4s, you have %-2.3d new messages!", "Alice", 5) //@hiloc(complexd, "%-2.3d", write),hiloc(complexarg1, "5", read),highlightall(complexd, complexarg1) +-} +- +-func MissingDirectives() { +- fmt.Printf("Hello %s, you have 5 new messages!", "Alice", 5) //@hiloc(missings, "%s", write),hiloc(missingargs0, "\"Alice\"", read),highlightall(missings, missingargs0) +-} +- +-func TooManyDirectives() { +- fmt.Printf("Hello %s, you have %d new %s %q messages!", "Alice", 5) //@hiloc(toomanys, "%s", write),hiloc(toomanyargs0, "\"Alice\"", read),highlightall(toomanys, toomanyargs0) +- fmt.Printf("Hello %s, you have %d new %s %q messages!", "Alice", 5) //@hiloc(toomanyd, "%d", write),hiloc(toomanyargs1, "5", read),highlightall(toomanyd, toomanyargs1) +-} +- +-func VerbIsPercentage() { +- fmt.Printf("%4.2% %d", 6) //@hiloc(z1, "%d", write),hiloc(z2, "6", read),highlightall(z1, z2) +-} +- +-func SpecialChars() { +- fmt.Printf("Hello \n %s, you \t \n have %d new messages!", "Alice", 5) //@hiloc(specials, "%s", write),hiloc(specialargs0, "\"Alice\"", read),highlightall(specials, specialargs0) +- fmt.Printf("Hello \n %s, you \t \n have %d new messages!", "Alice", 5) //@hiloc(speciald, "%d", write),hiloc(specialargs1, "5", read),highlightall(speciald, specialargs1) +-} +- +-func Escaped() { +- fmt.Printf("Hello %% \n %s, you \t%% \n have %d new m%%essages!", "Alice", 5) //@hiloc(escapeds, "%s", write),hiloc(escapedargs0, "\"Alice\"", read),highlightall(escapeds, escapedargs0) +- fmt.Printf("Hello %% \n %s, you \t%% \n have %d new m%%essages!", "Alice", 5) //@hiloc(escapedd, "%s", write),hiloc(escapedargs1, "\"Alice\"", read),highlightall(escapedd, escapedargs1) +- fmt.Printf("%d \nss \x25[2]d", 234, 123) //@hiloc(zz1, "%d", write),hiloc(zz2, "234", read),highlightall(zz1,zz2) +- fmt.Printf("%d \nss \x25[2]d", 234, 123) //@hiloc(zz3, "\\x25[2]d", write),hiloc(zz4, "123", read),highlightall(zz3,zz4) +-} +- +-func Indexed() { +- fmt.Printf("%[1]d", 3) //@hiloc(i1, "%[1]d", write),hiloc(i2, "3", read),highlightall(i1, i2) +- fmt.Printf("%[1]*d", 3, 6) //@hiloc(i3, "[1]*", write),hiloc(i4, "3", read),hiloc(i5, "d", write),hiloc(i6, "6", read),highlightall(i3, i4),highlightall(i5, i6) +- fmt.Printf("%[2]*[1]d", 3, 4) //@hiloc(i7, "[2]*", write),hiloc(i8, "4", read),hiloc(i9, "[1]d", write),hiloc(i10, "3", read),highlightall(i7, i8),highlightall(i9, i10) +- fmt.Printf("%[2]*.[1]*[3]d", 4, 5, 6) //@hiloc(i11, "[2]*", write),hiloc(i12, "5", read),hiloc(i13, ".[1]*", write),hiloc(i14, "4", read),hiloc(i15, "[3]d", write),hiloc(i16, "6", read),highlightall(i11, i12),highlightall(i13, i14),highlightall(i15, i16) +-} +- +-func MultipleIndexed() { +- fmt.Printf("%[1]d %[1].2d", 3) //@hiloc(m1, "%[1]d", write),hiloc(m2, "3", read),hiloc(m3, "%[1].2d", write),highlightall(m1, m2, m3) +-} +- +-// This test checks that gopls doesn't crash (index out of bounds) +-// while haven't fill the last non-variadic argument. +-func NoEffectOnUnfinishedArg() { +- var s string //@hiloc(var, "s", write) +- fmt.Fprintf(s) //@hiloc(firstArg, "s", read),highlightall(var, firstArg) +-} +diff -urN a/gopls/internal/test/marker/testdata/highlight/highlight.txt b/gopls/internal/test/marker/testdata/highlight/highlight.txt +--- a/gopls/internal/test/marker/testdata/highlight/highlight.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/highlight/highlight.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,158 +0,0 @@ +-This test checks basic functionality of the textDocument/highlight request. +- +--- highlights.go -- +-package highlights +- +-import ( +- "fmt" //@hiloc(fmtImp, "\"fmt\"", text),highlightall(fmtImp, fmt1, fmt2, fmt3, fmt4) +- h2 "net/http" //@hiloc(hImp, "h2", text),highlightall(hImp, hUse) +- "sort" +-) +- +-type F struct{ bar int } //@hiloc(barDeclaration, "bar", text),highlightall(barDeclaration, bar1, bar2, bar3) +- +-func _() F { +- return F{ +- bar: 123, //@hiloc(bar1, "bar", write) +- } +-} +- +-var foo = F{bar: 52} //@hiloc(fooDeclaration, "foo", write),hiloc(bar2, "bar", write),highlightall(fooDeclaration, fooUse) +- +-func Print() { //@hiloc(printFunc, "Print", text),highlightall(printFunc, printTest) +- _ = h2.Client{} //@hiloc(hUse, "h2", text) +- +- fmt.Println(foo) //@hiloc(fooUse, "foo", read),hiloc(fmt1, "fmt", text) +- fmt.Print("yo") //@hiloc(printSep, "Print", text),highlightall(printSep, print1, print2),hiloc(fmt2, "fmt", text) +-} +- +-func (x *F) Inc() { //@hiloc(xRightDecl, "x", text),hiloc(xLeftDecl, " *", text),highlightall(xRightDecl, xUse),highlight(xLeftDecl, xRightDecl, xUse) +- x.bar++ //@hiloc(xUse, "x", read),hiloc(bar3, "bar", write) +-} +- +-func testFunctions() { +- fmt.Print("main start") //@hiloc(print1, "Print", text),hiloc(fmt3, "fmt", text) +- fmt.Print("ok") //@hiloc(print2, "Print", text),hiloc(fmt4, "fmt", text) +- Print() //@hiloc(printTest, "Print", text) +-} +- +-// DocumentHighlight is undefined, so its uses below are type errors. +-// Nevertheless, document highlighting should still work. +-//@diag(locdoc1, re"undefined|undeclared"), diag(locdoc2, re"undefined|undeclared"), diag(locdoc3, re"undefined|undeclared") +- +-func toProtocolHighlight(rngs []int) []DocumentHighlight { //@loc(locdoc1, "DocumentHighlight"), hiloc(doc1, "DocumentHighlight", text),hiloc(docRet1, "[]DocumentHighlight", text),highlight(doc1, docRet1, doc1, doc2, doc3, result) +- result := make([]DocumentHighlight, 0, len(rngs)) //@loc(locdoc2, "DocumentHighlight"), hiloc(doc2, "DocumentHighlight", text),highlight(doc2, doc1, doc2, doc3) +- for _, rng := range rngs { +- result = append(result, DocumentHighlight{ //@loc(locdoc3, "DocumentHighlight"), hiloc(doc3, "DocumentHighlight", text),highlight(doc3, doc1, doc2, doc3) +- Range: rng, +- }) +- } +- return result //@hiloc(result, "result", text) +-} +- +-func testForLoops() { +- for i := 0; i < 10; i++ { //@hiloc(forDecl1, "for", text),highlightall(forDecl1, brk1, cont1) +- if i > 8 { +- break //@hiloc(brk1, "break", text) +- } +- if i < 2 { +- for j := 1; j < 10; j++ { //@hiloc(forDecl2, "for", text),highlightall(forDecl2, cont2) +- if j < 3 { +- for k := 1; k < 10; k++ { //@hiloc(forDecl3, "for", text),highlightall(forDecl3, cont3) +- if k < 3 { +- continue //@hiloc(cont3, "continue", text) +- } +- } +- continue //@hiloc(cont2, "continue", text) +- } +- } +- continue //@hiloc(cont1, "continue", text) +- } +- } +- +- arr := []int{} +- for i := range arr { //@hiloc(forDecl4, "for", text),highlightall(forDecl4, brk4, cont4) +- if i > 8 { +- break //@hiloc(brk4, "break", text) +- } +- if i < 4 { +- continue //@hiloc(cont4, "continue", text) +- } +- } +- +-Outer: +- for i := 0; i < 10; i++ { //@hiloc(forDecl5, "for", text),highlightall(forDecl5, brk5, brk6, brk8) +- break //@hiloc(brk5, "break", text) +- for { //@hiloc(forDecl6, "for", text),highlightall(forDecl6, cont5), diag("for", re"unreachable") +- if i == 1 { +- break Outer //@hiloc(brk6, "break Outer", text) +- } +- switch i { //@hiloc(switch1, "switch", text),highlightall(switch1, brk7) +- case 5: +- break //@hiloc(brk7, "break", text) +- case 6: +- continue //@hiloc(cont5, "continue", text) +- case 7: +- break Outer //@hiloc(brk8, "break Outer", text) +- } +- } +- } +-} +- +-func testSwitch() { +- var i, j int +- +-L1: +- for { //@hiloc(forDecl7, "for", text),highlightall(forDecl7, brk10, cont6) +- L2: +- switch i { //@hiloc(switch2, "switch", text),highlightall(switch2, brk11, brk12, brk13) +- case 1: +- switch j { //@hiloc(switch3, "switch", text),highlightall(switch3, brk9) +- case 1: +- break //@hiloc(brk9, "break", text) +- case 2: +- break L1 //@hiloc(brk10, "break L1", text) +- case 3: +- break L2 //@hiloc(brk11, "break L2", text) +- default: +- continue //@hiloc(cont6, "continue", text) +- } +- case 2: +- break //@hiloc(brk12, "break", text) +- default: +- break L2 //@hiloc(brk13, "break L2", text) +- } +- } +-} +- +-func testReturn() bool { //@hiloc(func1, "func", text),hiloc(bool1, "bool", text),highlight(func1, func1, fullRet11, fullRet12),highlight(bool1, bool1, false1, bool2, true1) +- if 1 < 2 { +- return false //@hiloc(ret11, "return", text),hiloc(fullRet11, "return false", text),hiloc(false1, "false", text),highlight(ret11, func1, fullRet11, fullRet12) +- } +- candidates := []int{} +- sort.SliceStable(candidates, func(i, j int) bool { //@hiloc(func2, "func", text),hiloc(bool2, "bool", text),highlight(func2, func2, fullRet2) +- return candidates[i] > candidates[j] //@hiloc(ret2, "return", text),hiloc(fullRet2, "return candidates[i] > candidates[j]", text),highlight(ret2, func2, fullRet2) +- }) +- return true //@hiloc(ret12, "return", text),hiloc(fullRet12, "return true", text),hiloc(true1, "true", text),highlight(ret12, func1, fullRet11, fullRet12) +-} +- +-func testReturnFields() float64 { //@hiloc(retVal1, "float64", text),highlight(retVal1, retVal1, retVal11, retVal21) +- if 1 < 2 { +- return 20.1 //@hiloc(retVal11, "20.1", text),highlight(retVal11, retVal1, retVal11, retVal21) +- } +- z := 4.3 //@hiloc(zDecl, "z", write) +- return z //@hiloc(retVal21, "z", text),highlight(retVal21, retVal1, retVal11, zDecl, retVal21) +-} +- +-func testReturnMultipleFields() (float32, string) { //@hiloc(retVal31, "float32", text),hiloc(retVal32, "string", text),highlight(retVal31, retVal31, retVal41, retVal51),highlight(retVal32, retVal32, retVal42, retVal52) +- y := "im a var" //@hiloc(yDecl, "y", write), +- if 1 < 2 { +- return 20.1, y //@hiloc(retVal41, "20.1", text),hiloc(retVal42, "y", text),highlight(retVal41, retVal31, retVal41, retVal51),highlight(retVal42, retVal32, yDecl, retVal42, retVal52) +- } +- return 4.9, "test" //@hiloc(retVal51, "4.9", text),hiloc(retVal52, "\"test\"", text),highlight(retVal51, retVal31, retVal41, retVal51),highlight(retVal52, retVal32, retVal42, retVal52) +-} +- +-func testReturnFunc() int32 { //@hiloc(retCall, "int32", text) +- mulch := 1 //@hiloc(mulchDec, "mulch", write),highlight(mulchDec, mulchDec, mulchRet) +- return int32(mulch) //@hiloc(mulchRet, "mulch", read),hiloc(retFunc, "int32", text),hiloc(retTotal, "int32(mulch)", text),highlight(mulchRet, mulchDec, mulchRet),highlight(retFunc, retCall, retFunc, retTotal) +-} +diff -urN a/gopls/internal/test/marker/testdata/highlight/issue60435.txt b/gopls/internal/test/marker/testdata/highlight/issue60435.txt +--- a/gopls/internal/test/marker/testdata/highlight/issue60435.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/highlight/issue60435.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-This is a regression test for issue 60435: +-Highlighting "net/http" shouldn't have any effect +-on an import path that contains it as a substring, +-such as httptest. +- +--- highlights.go -- +-package highlights +- +-import ( +- "net/http" //@hiloc(httpImp, `"net/http"`, text) +- "net/http/httptest" //@hiloc(httptestImp, `"net/http/httptest"`, text) +-) +- +-var _ = httptest.NewRequest +-var _ = http.NewRequest //@hiloc(here, "http", text), highlight(here, here, httpImp) +diff -urN a/gopls/internal/test/marker/testdata/highlight/issue68918.txt b/gopls/internal/test/marker/testdata/highlight/issue68918.txt +--- a/gopls/internal/test/marker/testdata/highlight/issue68918.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/highlight/issue68918.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-Regression test for https://github.com/golang/go/issues/68918: +-crash due to missing type information in CompositeLit. +- +-The corresponding go/types fix in Go 1.24 introduces a +-new error message, hence the -ignore_extra_diags flag. +- +--- flags -- +--ignore_extra_diags +- +--- a.go -- +-package a +- +-var _ = T{{ x }} //@hiloc(x, "x", text), diag("T", re"undefined"), diag("{ ", re"missing type") +- +-//@highlight(x, x) +diff -urN a/gopls/internal/test/marker/testdata/highlight/switchbreak.txt b/gopls/internal/test/marker/testdata/highlight/switchbreak.txt +--- a/gopls/internal/test/marker/testdata/highlight/switchbreak.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/highlight/switchbreak.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-This is a regression test for issue 65752: a break in a switch should +-highlight the switch, not the enclosing loop. +- +-We suppress staticheck since it also gives a diagnostic +-about the break being ineffective. +- +--- settings.json -- +-{ +- "staticcheck": false +-} +- +--- a.go -- +-package a +- +-func _(x any) { +- for { +- // type switch +- switch x.(type) { //@hiloc(tswitch, "switch", text) +- default: +- break //@hiloc(tbreak, "break", text),highlight(tbreak, tswitch, tbreak) +- } +- +- // value switch +- switch { //@hiloc(vswitch, "switch", text) +- default: +- break //@hiloc(vbreak, "break", text), highlight(vbreak, vswitch, vbreak) +- } +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/hover/basiclit.txt b/gopls/internal/test/marker/testdata/hover/basiclit.txt +--- a/gopls/internal/test/marker/testdata/hover/basiclit.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/basiclit.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,87 +0,0 @@ +-This test checks gopls behavior when hovering over basic literals. +- +-Skipped on ppc64 as there appears to be a bug on aix-ppc64: golang/go#67526. +- +--- flags -- +--skip_goarch=ppc64 +- +--- basiclit.go -- +-package basiclit +- +-func _() { +- _ = 'a' //@hover("'a'", "'a'", latinA) +- _ = 0x61 //@hover("0x61", "0x61", latinAHex) +- +- _ = '\u2211' //@hover("'\\u2211'", "'\\u2211'", summation) +- _ = 0x2211 //@hover("0x2211", "0x2211", summationHex) +- _ = "foo \u2211 bar" //@hover("\\u2211", "\\u2211", summation) +- +- _ = '\a' //@hover("'\\a'", "'\\a'", control) +- _ = "foo \a bar" //@hover("\\a", "\\a", control) +- +- _ = '\U0001F30A' //@hover("'\\U0001F30A'", "'\\U0001F30A'", waterWave) +- _ = 0x0001F30A //@hover("0x0001F30A", "0x0001F30A", waterWaveHex) +- _ = 0X0001F30A //@hover("0X0001F30A", "0X0001F30A", waterWaveHex) +- _ = "foo \U0001F30A bar" //@hover("\\U0001F30A", "\\U0001F30A", waterWave) +- +- _ = '\x7E' //@hover("'\\x7E'", "'\\x7E'", tilde) +- _ = "foo \x7E bar" //@hover("\\x7E", "\\x7E", tilde) +- _ = "foo \a bar" //@hover("\\a", "\\a", control) +- +- _ = '\173' //@hover("'\\173'", "'\\173'", leftCurly) +- _ = "foo \173 bar" //@hover("\\173","\\173", leftCurly) +- _ = "foo \173 bar \u2211 baz" //@hover("\\173","\\173", leftCurly) +- _ = "foo \173 bar \u2211 baz" //@hover("\\u2211","\\u2211", summation) +- _ = "foo\173bar\u2211baz" //@hover("\\173","\\173", leftCurly) +- _ = "foo\173bar\u2211baz" //@hover("\\u2211","\\u2211", summation) +- +- // search for runes in string only if there is an escaped sequence +- _ = "hello" //@hover(`"hello"`, _, _) +- +- // incorrect escaped rune sequences +- _ = '\0' //@hover("'\\0'", _, _),diag(re`\\0()'`, re"illegal character") +- _ = '\u22111' //@hover("'\\u22111'", _, _) +- _ = '\U00110000' //@hover("'\\U00110000'", _, _) +- _ = '\u12e45'//@hover("'\\u12e45'", _, _) +- _ = '\xa' //@hover("'\\xa'", _, _) +- _ = 'aa' //@hover("'aa'", _, _) +- +- // other basic lits +- _ = 1 //@hover("1", _, _) +- _ = 1.2 //@hover("1.2", _, _) +- _ = 1.2i //@hover("1.2i", _, _) +- _ = 0123 //@hover("0123", _, _) +- _ = 0b1001 //@hover("0b", "0b1001", binaryNumber) +- _ = 0B1001 //@hover("0B", "0B1001", binaryNumber) +- _ = 0o77 //@hover("0o", "0o77", octalNumber) +- _ = 0O77 //@hover("0O", "0O77", octalNumber) +- _ = 0x1234567890 //@hover("0x1234567890", "0x1234567890", hexNumber) +- _ = 0X1234567890 //@hover("0X1234567890", "0X1234567890", hexNumber) +- _ = 0x1000000000000000000 //@hover("0x1", "0x1000000000000000000", bigHex) +-) +--- @bigHex -- +-4722366482869645213696 +--- @binaryNumber -- +-9 +--- @control -- +-U+0007, control +--- @hexNumber -- +-78187493520 +--- @latinA -- +-'a', U+0061, LATIN SMALL LETTER A +--- @latinAHex -- +-97, 'a', U+0061, LATIN SMALL LETTER A +--- @leftCurly -- +-'{', U+007B, LEFT CURLY BRACKET +--- @octalNumber -- +-63 +--- @summation -- +-'∑', U+2211, N-ARY SUMMATION +--- @summationHex -- +-8721, '∑', U+2211, N-ARY SUMMATION +--- @tilde -- +-'~', U+007E, TILDE +--- @waterWave -- +-'🌊', U+1F30A, WATER WAVE +--- @waterWaveHex -- +-127754, '🌊', U+1F30A, WATER WAVE +diff -urN a/gopls/internal/test/marker/testdata/hover/comment.txt b/gopls/internal/test/marker/testdata/hover/comment.txt +--- a/gopls/internal/test/marker/testdata/hover/comment.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/comment.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,101 +0,0 @@ +-This test checks hovering over doc links in comments. +- +--- go.mod -- +-module mod.com +- +-go 1.20 +- +--- a.go -- +-package p +- +-import ( +- "unsafe" +- +- "mod.com/util" //@hover(`"mod.com/util"`, `"mod.com/util"`, strconv) +-) +- +-// [NumberBase] is the base to use for number parsing. //@hover("NumberBase", "NumberBase", NumberBase) +-const NumberBase = 10 +- +-// [Conv] converts s to an int. //@hover("Conv", "Conv", Conv) +-func Conv(s string) int { +- // [util.ParseInt] parses s and returns the integer corresponding to it. //@hover("util", "util", util),hover("ParseInt", "ParseInt", strconvParseInt) +- // [NumberBase] is the base to use for number parsing. +- i, _ := util.ParseInt(s, NumberBase, 64) +- return int(i) +-} +- +-// UnsafeConv converts s to a byte slice using [unsafe.Pointer]. hover("Pointer", "Pointer", unsafePointer) +-func UnsafeConv(s string) []byte { +- p := unsafe.StringData(s) +- b := unsafe.Slice(p, len(s)) +- return b +-} +- +--- util/conv.go -- +-// Package util provides utility functions. +-package util +- +-import "strconv" +- +-// ParseInt interprets a string s in the given base (0, 2 to 36) and +-// bit size (0 to 64) and returns the corresponding value i. +-func ParseInt(s string, base int, bitSize int) (int64, error) { +- return strconv.ParseInt(s, base, bitSize) +-} +- +--- @Conv -- +-```go +-func Conv(s string) int +-``` +- +---- +- +-\[Conv] converts s to an int. //@hover("Conv", "Conv", Conv) +- +- +---- +- +-[`p.Conv` on pkg.go.dev](https://pkg.go.dev/mod.com#Conv) +--- @NumberBase -- +-```go +-const NumberBase untyped int = 10 +-``` +- +---- +- +-\[NumberBase] is the base to use for number parsing. //@hover("NumberBase", "NumberBase", NumberBase) +- +- +---- +- +-[`p.NumberBase` on pkg.go.dev](https://pkg.go.dev/mod.com#NumberBase) +--- @strconv -- +-```go +-package util +-``` +- +---- +- +-Package util provides utility functions. +--- @strconvParseInt -- +-```go +-func ParseInt(s string, base int, bitSize int) (int64, error) +-``` +- +---- +- +-ParseInt interprets a string s in the given base (0, 2 to 36) and bit size (0 to 64) and returns the corresponding value i. +- +- +---- +- +-[`util.ParseInt` on pkg.go.dev](https://pkg.go.dev/mod.com/util#ParseInt) +--- @util -- +-```go +-package util +-``` +- +---- +- +-Package util provides utility functions. +diff -urN a/gopls/internal/test/marker/testdata/hover/const.txt b/gopls/internal/test/marker/testdata/hover/const.txt +--- a/gopls/internal/test/marker/testdata/hover/const.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/const.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,168 +0,0 @@ +-This test checks hovering over constants. +- +--- go.mod -- +-module mod.com +- +-go 1.17 +- +--- c.go -- +-package c +- +-import ( +- "math" +- "time" +-) +- +-const X = 0 //@hover("X", "X", bX) +- +-// dur is a constant of type time.Duration. +-const dur = 15*time.Minute + 10*time.Second + 350*time.Millisecond //@hover("dur", "dur", dur) +- +-const _ = dur // pacify unusedfunc +- +-// MaxFloat32 is used in another package. +-const MaxFloat32 = 0x1p127 * (1 + (1 - 0x1p-23)) +- +-// Numbers. +-func _() { +- const hex, bin = 0xe34e, 0b1001001 +- +- const ( +- // no inline comment +- decimal = 153 +- +- numberWithUnderscore int64 = 10_000_000_000 +- octal = 0o777 +- expr = 2 << (0b111&0b101 - 2) +- boolean = (55 - 3) == (26 * 2) +- ) +- +- _ = decimal //@hover("decimal", "decimal", decimalConst) +- _ = hex //@hover("hex", "hex", hexConst) +- _ = bin //@hover("bin", "bin", binConst) +- _ = numberWithUnderscore //@hover("numberWithUnderscore", "numberWithUnderscore", numberWithUnderscoreConst) +- _ = octal //@hover("octal", "octal", octalConst) +- _ = expr //@hover("expr", "expr", exprConst) +- _ = boolean //@hover("boolean", "boolean", boolConst) +- +- const ln10 = 2.30258509299404568401799145468436420760110148862877297603332790 +- +- _ = ln10 //@hover("ln10", "ln10", ln10Const) +-} +- +-// Iota. +-func _() { +- const ( +- a = 1 << iota +- b +- ) +- +- _ = a //@hover("a", "a", aIota) +- _ = b //@hover("b", "b", bIota) +-} +- +-// Strings. +-func _() { +- const ( +- str = "hello" + " " + "world" +- longStr = `Lorem ipsum dolor sit amet, consectetur adipiscing elit. Curabitur eget ipsum non nunc +-molestie mattis id quis augue. Mauris dictum tincidunt ipsum, in auctor arcu congue eu. +-Morbi hendrerit fringilla libero commodo varius. Vestibulum in enim rutrum, rutrum tellus +-aliquet, luctus enim. Nunc sem ex, consectetur id porta nec, placerat vel urna.` +- ) +- +- _ = str //@hover("str", "str", strConst) +- _ = longStr //@hover("longStr", "longStr", longStrConst) +-} +- +-// Constants from other packages. +-func _() { +- _ = math.Log2E //@hover("Log2E", "Log2E", log2eConst) +-} +- +--- @bX -- +-```go +-const X untyped int = 0 +-``` +- +---- +- +-@hover("X", "X", bX) +- +- +---- +- +-[`c.X` on pkg.go.dev](https://pkg.go.dev/mod.com#X) +--- @dur -- +-```go +-const dur time.Duration = 15*time.Minute + 10*time.Second + 350*time.Millisecond // 15m10.35s +-``` +- +---- +- +-dur is a constant of type time.Duration. +--- @decimalConst -- +-```go +-const decimal untyped int = 153 +-``` +- +---- +- +-no inline comment +--- @hexConst -- +-```go +-const hex untyped int = 0xe34e // 58190 +-``` +--- @binConst -- +-```go +-const bin untyped int = 0b1001001 // 73 +-``` +--- @numberWithUnderscoreConst -- +-```go +-const numberWithUnderscore int64 = 10_000_000_000 // 10000000000 +-``` +--- @octalConst -- +-```go +-const octal untyped int = 0o777 // 511 +-``` +--- @exprConst -- +-```go +-const expr untyped int = 2 << (0b111&0b101 - 2) // 16 +-``` +--- @boolConst -- +-```go +-const boolean untyped bool = (55 - 3) == (26 * 2) // true +-``` +--- @ln10Const -- +-```go +-const ln10 untyped float = 2.30258509299404568401799145468436420760110148862877297603332790 // 2.30259 +-``` +--- @aIota -- +-```go +-const a untyped int = 1 << iota // 1 +-``` +--- @bIota -- +-```go +-const b untyped int = 2 +-``` +--- @strConst -- +-```go +-const str untyped string = "hello world" +-``` +--- @longStrConst -- +-```go +-const longStr untyped string = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Curabitur e... +-``` +--- @log2eConst -- +-```go +-const math.Log2E untyped float = 1 / Ln2 // 1.4427 +-``` +- +---- +- +-Mathematical constants. +- +- +---- +- +-[`math.Log2E` on pkg.go.dev](https://pkg.go.dev/math#Log2E) +diff -urN a/gopls/internal/test/marker/testdata/hover/embed.txt b/gopls/internal/test/marker/testdata/hover/embed.txt +--- a/gopls/internal/test/marker/testdata/hover/embed.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/embed.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,134 +0,0 @@ +-This test checks that hover reports accessible embedded fields +-(after the doc comment and before the accessible methods). +- +--- go.mod -- +-module example.com +- +-go 1.18 +- +--- q/q.go -- +-package q +- +-type Q struct { +- One int +- two string +- q2[chan int] +-} +- +-type q2[T any] struct { +- Three *T +- four string +-} +- +--- p.go -- +-package p +- +-import "example.com/q" +- +-// doc +-type P struct { +- q.Q +-} +- +-func (P) m() {} +- +-var p P //@hover("P", "P", P) +- +-var _, _ = P.m, p // pacify unusedfunc +- +-type A struct { +- *B +-} +- +-type B struct { +- *C +-} +- +-type C struct { +- *D +-} +- +-type D struct { +- E int +-} +- +-type X struct{ +- *Y +-} +- +-type Y struct { +- *Z +-} +- +-type Z struct{ +- z int +-} +- +-var a A +-var _ = a.E //@hover("E", "E", E) +- +-var x struct { +- *X +-} +-var _ = x.z //@hover("z", "z", Z) +- +-type Al2 = int +-type N struct{ +- x Al2 +- y struct{ ZA } +-} +-type Al = *N +-type S struct{ Al } +-type ZA = *Z +-var _ = new(S).x //@hover("x", "x", X) +-var _ = new(S).y.z //@hover("z", "z", Zz), hover("y", "y", y) +- +--- @P -- +-```go +-type P struct { +- q.Q +-} +-``` +- +---- +- +-doc +- +- +-```go +-// Embedded fields: +-One int // through Q +-Three *chan int // through Q.q2 +-``` +- +-```go +-func (P) m() +-``` +- +---- +- +-[`p.P` on pkg.go.dev](https://pkg.go.dev/example.com#P) +--- @E -- +-```go +-field E int // through *B, *C, *D +-``` +- +---- +- +-[`(p.D).E` on pkg.go.dev](https://pkg.go.dev/example.com#D.E) +--- @Z -- +-```go +-field z int // through *X, *Y, *Z +-``` +--- @X -- +-```go +-field x Al2 // through Al +-``` +--- @Zz -- +-```go +-field z int // through ZA +-``` +--- @y -- +-```go +-field y struct{ZA} // through Al +-``` +diff -urN a/gopls/internal/test/marker/testdata/hover/generics.txt b/gopls/internal/test/marker/testdata/hover/generics.txt +--- a/gopls/internal/test/marker/testdata/hover/generics.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/generics.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,127 +0,0 @@ +-This file contains tests for hovering over generic Go code. +- +-Requires go1.20+ for the new go/doc/comment package, and a change in Go 1.20 +-that affected the formatting of constraint interfaces. +- +-Its size expectations assume a 64-bit machine. +- +--- settings.json -- +-{"analyses": {"unusedfunc": false}} +- +--- flags -- +--skip_goarch=386,arm +- +--- go.mod -- +-// A go.mod is require for correct pkgsite links. +-// TODO(rfindley): don't link to ad-hoc or command-line-arguments packages! +-module mod.com +- +-go 1.18 +- +--- issue68213.go -- +-package generics +- +-// Hovering over an interface with empty type set must not panic. +-type empty interface { //@hover("empty", "empty", empty) +- int +- string +-} +- +--- @empty -- +-```go +-type empty interface { // size=16 (0x10) +- int +- string +-} +-``` +- +---- +- +-Hovering over an interface with empty type set must not panic. +--- generics.go -- +-package generics +- +-type value[T any] struct { //@hover("lue", "value", value),hover("T", "T", valueT) +- val T //@hover("T", "T", valuevalT) +- Q int64 //@hover("Q", "Q", valueQ) +-} +- +-type Value[T any] struct { //@hover("T", "T", ValueT) +- val T //@hover("T", "T", ValuevalT) +- Q int64 //@hover("Q", "Q", ValueQ) +-} +- +-func F[P interface{ ~int | string }]() { //@hover("P", "P", Ptparam) +- var _ P //@hover("P","P",Pvar) +-} +- +--- @value -- +-```go +-type value[T any] struct { +- val T //@hover("T", "T", valuevalT) +- Q int64 //@hover("Q", "Q", valueQ) +-} +-``` +--- @valueT -- +-```go +-type parameter T any +-``` +--- @valuevalT -- +-```go +-type parameter T any +-``` +--- @valueQ -- +-```go +-field Q int64 // size=8 +-``` +- +---- +- +-@hover("Q", "Q", valueQ) +--- @ValueT -- +-```go +-type parameter T any +-``` +--- @ValuevalT -- +-```go +-type parameter T any +-``` +--- @ValueQ -- +-```go +-field Q int64 // size=8 +-``` +- +---- +- +-@hover("Q", "Q", ValueQ) +- +- +---- +- +-[`(generics.Value).Q` on pkg.go.dev](https://pkg.go.dev/mod.com#Value.Q) +--- @Ptparam -- +-```go +-type parameter P interface{~int | string} +-``` +--- @Pvar -- +-```go +-type parameter P interface{~int | string} +-``` +--- inferred.go -- +-package generics +- +-func app[S interface{ ~[]E }, E any](s S, e E) S { +- return append(s, e) +-} +- +-func _() { +- _ = app[[]int] //@hover("app", "app", appint) +- _ = app[[]int, int] //@hover("app", "app", appint) +- _ = app[[]int]([]int{}, 0) //@hover("app", "app", appint), diag("[[]int]", re"unnecessary") +- _ = app([]int{}, 0) //@hover("app", "app", appint) +-} +- +--- @appint -- +-```go +-func app(s []int, e int) []int // func[S interface{~[]E}, E any](s S, e E) S +-``` +diff -urN a/gopls/internal/test/marker/testdata/hover/godef.txt b/gopls/internal/test/marker/testdata/hover/godef.txt +--- a/gopls/internal/test/marker/testdata/hover/godef.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/godef.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,447 +0,0 @@ +-This test was ported from 'godef' in the old marker tests. +-It tests various hover and definition requests. +- +--- go.mod -- +-module godef.test +- +-go 1.18 +- +--- a/a_x_test.go -- +-package a_test +- +-import ( +- "testing" +-) +- +-func TestA2(t *testing.T) { //@hover("TestA2", "TestA2", TestA2) +- Nonexistant() //@diag("Nonexistant", re"(undeclared name|undefined): Nonexistant") +-} +- +--- @TestA2 -- +-```go +-func TestA2(t *testing.T) +-``` +--- @ember -- +-```go +-field Member string +-``` +- +---- +- +-@loc(Member, "Member") +- +- +---- +- +-[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Thing.Member) +--- a/d.go -- +-package a //@hover("a", _, a) +- +-import "fmt" +- +-type Thing struct { //@loc(Thing, "Thing") +- Member string //@loc(Member, "Member") +-} +- +-var Other Thing //@loc(Other, "Other") +- +-func Things(val []string) []Thing { //@loc(Things, "Things") +- return nil +-} +- +-func (t Thing) Method(i int) string { //@loc(Method, "Method") +- return t.Member +-} +- +-func (t Thing) Method3() { +-} +- +-func (t *Thing) Method2(i int, j int) (error, string) { +- return nil, t.Member +-} +- +-func (t *Thing) private() { +-} +- +-func useThings() { +- t := Thing{ //@hover("ing", "Thing", ing) +- Member: "string", //@hover("ember", "Member", ember), def("ember", Member) +- } +- fmt.Print(t.Member) //@hover("ember", "Member", ember), def("ember", Member) +- fmt.Print(Other) //@hover("ther", "Other", ther), def("ther", Other) +- Things(nil) //@hover("ings", "Things", ings), def("ings", Things) +- t.Method(0) //@hover("eth", "Method", eth), def("eth", Method) +-} +- +-type NextThing struct { //@loc(NextThing, "NextThing") +- Thing +- Value int +-} +- +-func (n NextThing) another() string { +- return n.Member +-} +- +-// Shadows Thing.Method3 +-func (n *NextThing) Method3() int { +- return n.Value +-} +- +-var nextThing NextThing //@hover("NextThing", "NextThing", NextThing), def("NextThing", NextThing) +- +--- @ings -- +-```go +-func Things(val []string) []Thing +-``` +- +---- +- +-[`a.Things` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Things) +--- @ther -- +-```go +-var Other Thing +-``` +- +---- +- +-@loc(Other, "Other") +- +- +---- +- +-[`a.Other` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Other) +--- @a -- +--- @ing -- +-```go +-type Thing struct { +- Member string //@loc(Member, "Member") +-} +-``` +- +---- +- +-```go +-func (t Thing) Method(i int) string +-func (t *Thing) Method2(i int, j int) (error, string) +-func (t Thing) Method3() +-func (t *Thing) private() +-``` +- +---- +- +-[`a.Thing` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Thing) +--- @NextThing -- +-```go +-type NextThing struct { +- Thing +- Value int +-} +-``` +- +---- +- +-```go +-// Embedded fields: +-Member string // through Thing +-``` +- +-```go +-func (t Thing) Method(i int) string +-func (t *Thing) Method2(i int, j int) (error, string) +-func (n *NextThing) Method3() int +-func (n NextThing) another() string +-func (t *Thing) private() +-``` +- +---- +- +-[`a.NextThing` on pkg.go.dev](https://pkg.go.dev/godef.test/a#NextThing) +--- @eth -- +-```go +-func (t Thing) Method(i int) string +-``` +- +---- +- +-[`(a.Thing).Method` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Thing.Method) +--- a/f.go -- +-// Package a is a package for testing go to definition. +-package a +- +-import "fmt" +- +-func TypeStuff() { +- var x string +- +- switch y := any(x).(type) { //@loc(y, "y"), hover("y", "y", y) , def("y", y) +- case int: //@loc(intY, "int") +- fmt.Printf("%v", y) //@hover("y", "y", inty), def("y", y) +- case string: //@loc(stringY, "string") +- fmt.Printf("%v", y) //@hover("y", "y", stringy), def("y", y) +- } +- +-} +--- @inty -- +-```go +-var y int +-``` +--- @stringy -- +-```go +-var y string +-``` +--- @y -- +-```go +-var y any +-``` +--- a/h.go -- +-package a +- +-func _() { +- type s struct { +- nested struct { +- // nested number +- number int64 //@loc(nestedNumber, "number") +- } +- nested2 []struct { +- // nested string +- str string //@loc(nestedString, "str") +- } +- x struct { +- x struct { +- x struct { +- x struct { +- x struct { +- // nested map +- m map[string]float64 //@loc(nestedMap, "m") +- } +- } +- } +- } +- } +- } +- +- var t s +- _ = t.nested.number //@hover("number", "number", nestedNumber), def("number", nestedNumber) +- _ = t.nested2[0].str //@hover("str", "str", nestedString), def("str", nestedString) +- _ = t.x.x.x.x.x.m //@hover("m", "m", nestedMap), def("m", nestedMap) +-} +- +-func _() { +- var s struct { +- // a field +- a int //@loc(structA, "a") +- // b nested struct +- b struct { //@loc(structB, "b") +- // c field of nested struct +- c int //@loc(structC, "c") +- } +- } +- _ = s.a //@def("a", structA) +- _ = s.b //@def("b", structB) +- _ = s.b.c //@def("c", structC) +- +- var arr []struct { +- // d field +- d int //@loc(arrD, "d") +- // e nested struct +- e struct { //@loc(arrE, "e") +- // f field of nested struct +- f int //@loc(arrF, "f") +- } +- } +- _ = arr[0].d //@def("d", arrD) +- _ = arr[0].e //@def("e", arrE) +- _ = arr[0].e.f //@def("f", arrF) +- +- var complex []struct { +- c <-chan map[string][]struct { +- // h field +- h int //@loc(complexH, "h") +- // i nested struct +- i struct { //@loc(complexI, "i") +- // j field of nested struct +- j int //@loc(complexJ, "j") +- } +- } +- } +- _ = (<-complex[0].c)["0"][0].h //@def("h", complexH) +- _ = (<-complex[0].c)["0"][0].i //@def("i", complexI) +- _ = (<-complex[0].c)["0"][0].i.j //@def("j", complexJ) +- +- var mapWithStructKey map[struct { //@diag("struct", re"invalid map key") +- // X key field +- x []string //@loc(mapStructKeyX, "x") +- }]int +- for k := range mapWithStructKey { +- _ = k.x //@def("x", mapStructKeyX) +- } +- +- var mapWithStructKeyAndValue map[struct { +- // Y key field +- y string //@loc(mapStructKeyY, "y") +- }]struct { +- // X value field +- x string //@loc(mapStructValueX, "x") +- } +- for k, v := range mapWithStructKeyAndValue { +- // TODO: we don't show docs for y field because both map key and value +- // are structs. And in this case, we parse only map value +- _ = k.y //@hover("y", "y", hoverStructKeyY), def("y", mapStructKeyY) +- _ = v.x //@hover("x", "x", hoverStructKeyX), def("x", mapStructValueX) +- } +- +- var i []map[string]interface { +- // open method comment +- open() error //@loc(openMethod, "open") +- } +- i[0]["1"].open() //@hover("pen","open", openMethod), def("open", openMethod) +-} +- +-func _() { +- test := struct { +- // test description +- desc string //@loc(testDescription, "desc") +- }{} +- _ = test.desc //@def("desc", testDescription) +- +- for _, tt := range []struct { +- // test input +- in map[string][]struct { //@loc(testInput, "in") +- // test key +- key string //@loc(testInputKey, "key") +- // test value +- value any //@loc(testInputValue, "value") +- } +- result struct { +- v <-chan struct { +- // expected test value +- value int //@loc(testResultValue, "value") +- } +- } +- }{} { +- _ = tt.in //@def("in", testInput) +- _ = tt.in["0"][0].key //@def("key", testInputKey) +- _ = tt.in["0"][0].value //@def("value", testInputValue) +- +- _ = (<-tt.result.v).value //@def("value", testResultValue) +- } +-} +- +-func _() { +- getPoints := func() []struct { +- // X coord +- x int //@loc(returnX, "x") +- // Y coord +- y int //@loc(returnY, "y") +- } { +- return nil +- } +- +- r := getPoints() +- _ = r[0].x //@def("x", returnX) +- _ = r[0].y //@def("y", returnY) +-} +--- @hoverStructKeyX -- +-```go +-field x string +-``` +- +---- +- +-X value field +--- @hoverStructKeyY -- +-```go +-field y string +-``` +- +---- +- +-Y key field +--- @nestedNumber -- +-```go +-field number int64 +-``` +- +---- +- +-nested number +--- @nestedString -- +-```go +-field str string +-``` +- +---- +- +-nested string +--- @openMethod -- +-```go +-func (interface) open() error +-``` +- +---- +- +-open method comment +--- @nestedMap -- +-```go +-field m map[string]float64 +-``` +- +---- +- +-nested map +--- b/e.go -- +-package b +- +-import ( +- "fmt" +- +- "godef.test/a" +-) +- +-func useThings() { +- t := a.Thing{} //@loc(bStructType, "ing") +- fmt.Print(t.Member) //@loc(bMember, "ember") +- fmt.Print(a.Other) //@loc(bVar, "ther") +- a.Things(nil) //@loc(bFunc, "ings") +-} +- +-/*@ +-def(bStructType, Thing) +-def(bMember, Member) +-def(bVar, Other) +-def(bFunc, Things) +-*/ +- +-func _() { +- var x any +- switch x := x.(type) { //@hover("x", "x", xInterface) +- case string: //@loc(eString, "string") +- fmt.Println(x) //@hover("x", "x", xString) +- case int: //@loc(eInt, "int") +- fmt.Println(x) //@hover("x", "x", xInt) +- } +-} +--- @xInt -- +-```go +-var x int +-``` +--- @xInterface -- +-```go +-var x any +-``` +--- @xString -- +-```go +-var x string +-``` +--- broken/unclosedIf.go -- +-package broken +- +-import "fmt" +- +-func unclosedIf() { +- if false { +- var myUnclosedIf string //@loc(myUnclosedIf, "myUnclosedIf") +- fmt.Printf("s = %v\n", myUnclosedIf) //@def("my", myUnclosedIf) +-} +- +-func _() {} //@diag("_", re"expected") +diff -urN a/gopls/internal/test/marker/testdata/hover/goprivate.txt b/gopls/internal/test/marker/testdata/hover/goprivate.txt +--- a/gopls/internal/test/marker/testdata/hover/goprivate.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/goprivate.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,32 +0,0 @@ +-This test checks that links in hover obey GOPRIVATE. +- +--- env -- +-GOPRIVATE=mod.com +--- go.mod -- +-module mod.com +--- p.go -- +-package p +- +-// T should not be linked, as it is private. +-type T struct{} //@hover("T", "T", T) +--- lib/lib.go -- +-package lib +- +-// GOPRIVATE should also match nested packages. +-type L struct{} //@hover("L", "L", L) +--- @L -- +-```go +-type L struct{} // size=0 +-``` +- +---- +- +-GOPRIVATE should also match nested packages. +--- @T -- +-```go +-type T struct{} // size=0 +-``` +- +---- +- +-T should not be linked, as it is private. +diff -urN a/gopls/internal/test/marker/testdata/hover/hover-74351.txt b/gopls/internal/test/marker/testdata/hover/hover-74351.txt +--- a/gopls/internal/test/marker/testdata/hover/hover-74351.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/hover-74351.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-Regression test for crash in hover on an alias to a built-in named type. +- +--- flags -- +--skip_goarch=386,arm +- +--- go.mod -- +-module example.com +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-type A = error //@hover("A", "A", out) +- +--- @out -- +-```go +-type A = error // size=16 (0x10) +- +-type error interface { +- Error() string +-} +-``` +- +---- +- +-@hover("A", "A", out) +- +- +-```go +-func (error) Error() string +-``` +- +---- +- +-[`a.A` on pkg.go.dev](https://pkg.go.dev/example.com/a#A) +diff -urN a/gopls/internal/test/marker/testdata/hover/hover_alias.txt b/gopls/internal/test/marker/testdata/hover/hover_alias.txt +--- a/gopls/internal/test/marker/testdata/hover/hover_alias.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/hover_alias.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,104 +0,0 @@ +-This test checks gopls behavior when hovering over alias type. +- +--- flags -- +--skip_goarch=386,arm +- +--- go.mod -- +-module mod.com +- +-go 1.18 +- +--- main.go -- +-package main +- +-import "mod.com/a" +-import "mod.com/b" +- +-type ToTypeDecl = b.RealType //@hover("ToTypeDecl", "ToTypeDecl", ToTypeDecl) +- +-type ToAlias = a.Alias //@hover("ToAlias", "ToAlias", ToAlias) +- +-type ToAliasWithComment = a.AliasWithComment //@hover("ToAliasWithComment", "ToAliasWithComment", ToAliasWithComment) +- +--- a/a.go -- +-package a +-import "mod.com/b" +- +-type Alias = b.RealType +- +-// AliasWithComment is a type alias with comments. +-type AliasWithComment = b.RealType +- +--- b/b.go -- +-package b +-// RealType is a real type rather than an alias type. +-type RealType struct { +- Name string +- Age int +-} +- +--- generic/a.go -- +-package generic +-func generic[T any]() {} +- +-type Named string +-type Alias = Named +- +-func _(){ +- generic[Alias]() //@hover("Alias", "Alias", Alias) +-} +- +--- @ToTypeDecl -- +-```go +-type ToTypeDecl = b.RealType // size=24 (0x18) +- +-type RealType struct { +- Name string +- Age int +-} +-``` +- +---- +- +-@hover("ToTypeDecl", "ToTypeDecl", ToTypeDecl) +- +- +---- +- +-[`main.ToTypeDecl` on pkg.go.dev](https://pkg.go.dev/mod.com#ToTypeDecl) +--- @ToAlias -- +-```go +-type ToAlias = a.Alias // size=24 (0x18) +-``` +- +---- +- +-@hover("ToAlias", "ToAlias", ToAlias) +- +- +---- +- +-[`main.ToAlias` on pkg.go.dev](https://pkg.go.dev/mod.com#ToAlias) +--- @ToAliasWithComment -- +-```go +-type ToAliasWithComment = a.AliasWithComment // size=24 (0x18) +-``` +- +---- +- +-@hover("ToAliasWithComment", "ToAliasWithComment", ToAliasWithComment) +- +- +---- +- +-[`main.ToAliasWithComment` on pkg.go.dev](https://pkg.go.dev/mod.com#ToAliasWithComment) +--- @Alias -- +-```go +-type Alias = Named +- +-type Named string +-``` +- +---- +- +-[`generic.Alias` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#Alias) +diff -urN a/gopls/internal/test/marker/testdata/hover/hover.txt b/gopls/internal/test/marker/testdata/hover/hover.txt +--- a/gopls/internal/test/marker/testdata/hover/hover.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/hover.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,108 +0,0 @@ +-This test demonstrates some basic features of hover. +- +-Needs go1.22 for the gotypesalias godebug value. +- +--- settings.json -- +-{"analyses": {"unusedfunc": false}} +- +--- flags -- +--min_go_command=go1.22 +- +--- go.mod -- +-module example.com +- +-go 1.18 +- +--- a.go -- +-// package comment +-package aa //@hover("aa", "aa", aa) +- +-const abc = 0x2a //@hover("b", "abc", abc),hover(" =", "abc", abc) +- +--- a2.go -- +- +-//go:build go1.21 +- +-package aa //@hover("aa", "aa", aa2) +- +--- typeswitch.go -- +-package aa +- +-func _() { +- var y any +- switch x := y.(type) { //@hover("x", "x", x) +- case int: +- println(x) //@hover("x", "x", xint),hover(")", "x", xint) +- } +-} +--- cmd/main.go -- +-//go:debug gotypesalias=0 +- +-// Note that since GODEBUG shows only settings that differ from +-// the current toolchain, the output here depends on the toolchain used. +-package main //@hover("main", "main", main) +- +-func main() { +-} +- +--- @abc -- +-```go +-const abc untyped int = 0x2a // 42 +-``` +- +---- +- +-@hover("b", "abc", abc),hover(" =", "abc", abc) +--- @x -- +-```go +-var x any +-``` +--- @xint -- +-```go +-var x int +-``` +--- @aa -- +-```go +-package aa +-``` +- +---- +- +-package comment +- +- +---- +- +- - Package path: example.com +- - Module: example.com +- - Language version: go1.18 +--- @aa2 -- +-```go +-package aa +-``` +- +---- +- +-package comment +- +- +---- +- +- - Package path: example.com +- - Module: example.com +- - Language version (current file): go1.21 +--- @main -- +-```go +-package main +-``` +- +---- +- +-Note that since GODEBUG shows only settings that differ from the current toolchain, the output here depends on the toolchain used. +- +- +---- +- +- - Package path: example.com/cmd +- - Module: example.com +- - Language version: go1.18 +diff -urN a/gopls/internal/test/marker/testdata/hover/issue74361.txt b/gopls/internal/test/marker/testdata/hover/issue74361.txt +--- a/gopls/internal/test/marker/testdata/hover/issue74361.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/issue74361.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,40 +0,0 @@ +--- flags -- +--skip_goarch=386,arm +- +--- settings.json -- +-{"analyses": {"unusedfunc": false}} +- +--- go.mod -- +-module mod.com +- +--- a/a.go -- +-package a +- +-type ( +- Named int +- Alias = Named +- Alias2 = Alias +-) +- +-var ( +- named Named +- alias Alias //@hover("alias", "alias", alias) +- alias2 Alias2 //@hover("alias2", "alias2", alias2) +-) +- +--- @alias -- +-```go +-var alias Alias +-``` +- +---- +- +-@hover("alias", "alias", alias) +--- @alias2 -- +-```go +-var alias2 Alias2 +-``` +- +---- +- +-@hover("alias2", "alias2", alias2) +diff -urN a/gopls/internal/test/marker/testdata/hover/issues.txt b/gopls/internal/test/marker/testdata/hover/issues.txt +--- a/gopls/internal/test/marker/testdata/hover/issues.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/issues.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,34 +0,0 @@ +-This test verifies fixes for various issues reported for hover. +- +--- go.mod -- +-module golang.org/lsptests +- +--- issue64239/p.go -- +-package issue64239 +- +-// golang/go#64239: hover fails for objects in the unsafe package. +- +-import "unsafe" +- +-var _ = unsafe.Sizeof(struct{}{}) //@hover("Sizeof", "Sizeof", "`Sizeof` on pkg.go.dev") +- +--- issue64237/p.go -- +-package issue64237 +- +-// golang/go#64237: hover panics for broken imports. +- +-import "golang.org/lsptests/nonexistant" //@diag("\"golang", re"could not import") +- +-var _ = nonexistant.Value //@hovererr("nonexistant", "no package data") +- +--- issue69362/p.go -- +-package issue69362 +- +-// golang/go#69362: hover panics over undefined implicits. +- +-func _() { +- switch x := y.(type) { //@diag("y", re"undefined"), hover("x", "x", "") +- case int: +- _ = x +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/hover/json.txt b/gopls/internal/test/marker/testdata/hover/json.txt +--- a/gopls/internal/test/marker/testdata/hover/json.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/json.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,33 +0,0 @@ +-This test demonstrates support for "hoverKind": "Structured". +- +-Its size expectations assume a 64-bit machine. +- +--- flags -- +--skip_goarch=386,arm +- +--- go.mod -- +-module example.com/p +- +-go 1.18 +- +--- settings.json -- +-{ +- "hoverKind": "Structured" +-} +--- p.go -- +-package p +- +-// MyType is a type. +-type MyType struct { //@ hover("MyType", "MyType", MyType) +- F int // a field +- S string // a string field +-} +- +-// MyFunc is a function. +-func MyFunc(i int) string { //@ hover("MyFunc", "MyFunc", MyFunc) +- return "" +-} +--- @MyFunc -- +-{"synopsis":"MyFunc is a function.","fullDocumentation":"MyFunc is a function.\n","signature":"func MyFunc(i int) string","singleLine":"func MyFunc(i int) string","symbolName":"p.MyFunc","linkPath":"example.com/p","linkAnchor":"MyFunc"} +--- @MyType -- +-{"synopsis":"MyType is a type.","fullDocumentation":"MyType is a type.\n","signature":"type MyType struct { // size=24 (0x18)\n\tF int // a field\n\tS string // a string field\n}\n","singleLine":"type MyType struct{F int; S string}","symbolName":"p.MyType","linkPath":"example.com/p","linkAnchor":"MyType"} +diff -urN a/gopls/internal/test/marker/testdata/hover/linkable_generics.txt b/gopls/internal/test/marker/testdata/hover/linkable_generics.txt +--- a/gopls/internal/test/marker/testdata/hover/linkable_generics.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/linkable_generics.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,176 +0,0 @@ +-This file contains tests for documentation links to generic code in hover. +- +--- go.mod -- +-module mod.com +- +-go 1.19 +- +--- a.go -- +-package a +- +-import "mod.com/generic" +- +-func _() { +- // Hovering over instantiated object should produce accurate type +- // information, but link to the generic declarations. +- +- var x generic.GT[int] //@hover("GT", "GT", xGT) +- _ = x.F //@hover("x", "x", x),hover("F", "F", xF) +- +- f := generic.GF[int] //@hover("GF", "GF", fGF) +- _ = f //@hover("f", "f", f) +-} +- +--- generic/generic.go -- +-package generic +- +-// Hovering over type parameters should link to documentation. +-// +-// TODO(rfindley): should it? We should probably link to the type. +-type GT[P any] struct{ //@hover("GT", "GT", GT),hover("P", "P", GTP) +- F P //@hover("F", "F", F),hover("P", "P", FP) +-} +- +-func (GT[P]) M(p P) { //@hover("GT", "GT", GTrecv),hover("M","M", M),hover(re"p (P)", re"p (P)", pP) +-} +- +-func GF[P any] (p P) { //@hover("GF", "GF", GF) +-} +- +--- @F -- +-```go +-field F P +-``` +- +---- +- +-@hover("F", "F", F),hover("P", "P", FP) +- +- +---- +- +-[`(generic.GT).F` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT.F) +--- @FP -- +-```go +-type parameter P any +-``` +--- @GF -- +-```go +-func GF[P any](p P) +-``` +- +---- +- +-[`generic.GF` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GF) +--- @GT -- +-```go +-type GT[P any] struct { +- F P //@hover("F", "F", F),hover("P", "P", FP) +-} +-``` +- +---- +- +-Hovering over type parameters should link to documentation. +- +-TODO(rfindley): should it? We should probably link to the type. +- +- +-```go +-func (GT[P]) M(p P) +-``` +- +---- +- +-[`generic.GT` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT) +--- @GTP -- +-```go +-type parameter P any +-``` +--- @GTrecv -- +-```go +-type GT[P any] struct { +- F P //@hover("F", "F", F),hover("P", "P", FP) +-} +-``` +- +---- +- +-Hovering over type parameters should link to documentation. +- +-TODO(rfindley): should it? We should probably link to the type. +- +- +-```go +-func (GT[P]) M(p P) +-``` +- +---- +- +-[`generic.GT` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT) +--- @M -- +-```go +-func (GT[P]) M(p P) +-``` +- +---- +- +-[`(generic.GT).M` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT.M) +--- @f -- +-```go +-var f func(p int) +-``` +--- @fGF -- +-```go +-func generic.GF(p int) // func[P any](p P) +-``` +- +---- +- +-[`generic.GF` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GF) +--- @pP -- +-```go +-type parameter P any +-``` +--- @x -- +-```go +-var x generic.GT[int] +-``` +- +---- +- +-@hover("GT", "GT", xGT) +--- @xF -- +-```go +-field F int +-``` +- +---- +- +-@hover("F", "F", F),hover("P", "P", FP) +- +- +---- +- +-[`(generic.GT).F` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT.F) +--- @xGT -- +-```go +-type GT[P any] struct { +- F P //@hover("F", "F", F),hover("P", "P", FP) +-} +-``` +- +---- +- +-Hovering over type parameters should link to documentation. +- +-TODO(rfindley): should it? We should probably link to the type. +- +- +-```go +-func (generic.GT[P]) M(p P) +-``` +- +---- +- +-[`generic.GT` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT) +diff -urN a/gopls/internal/test/marker/testdata/hover/linkable.txt b/gopls/internal/test/marker/testdata/hover/linkable.txt +--- a/gopls/internal/test/marker/testdata/hover/linkable.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/linkable.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,154 +0,0 @@ +-This test checks that we correctly determine pkgsite links for various +-identifiers. +- +-We should only produce links that work, meaning the object is reachable via the +-package's public API. +- +--- go.mod -- +-module mod.com +- +-go 1.18 +--- p.go -- +-package p +- +-type E struct { +- Embed int64 +-} +- +-// T is in the package scope, and so should be linkable. +-type T struct{ //@hover("T", "T", T) +- // Only exported fields should be linkable +- +- f int64 //@hover("f", "f", f) +- F int64 //@hover("F", "F", F) +- +- E +- +- // TODO(rfindley): is the link here correct? It ignores N. +- N struct { +- // Nested fields should also be linkable. +- Nested int64 //@hover("Nested", "Nested", Nested) +- } +-} +-// M is an exported method, and so should be linkable. +-func (T) M() {} +- +-// m is not exported, and so should not be linkable. +-func (T) m() {} +- +-var _ = T.m +- +-func _() { +- var t T +- +- // Embedded fields should be linkable. +- _ = t.Embed //@hover("Embed", "Embed", Embed) +- +- // Local variables should not be linkable, even if they are capitalized. +- var X int64 //@hover("X", "X", X) +- _ = X +- +- // Local types should not be linkable, even if they are capitalized. +- type Local struct { //@hover("Local", "Local", Local) +- E +- } +- +- // But the embedded field should still be linkable. +- var l Local +- _ = l.Embed //@hover("Embed", "Embed", Embed) +-} +--- @Embed -- +-```go +-field Embed int64 // through E +-``` +- +---- +- +-[`(p.E).Embed` on pkg.go.dev](https://pkg.go.dev/mod.com#E.Embed) +--- @F -- +-```go +-field F int64 // size=8, offset=8 +-``` +- +---- +- +-@hover("F", "F", F) +- +- +---- +- +-[`(p.T).F` on pkg.go.dev](https://pkg.go.dev/mod.com#T.F) +--- @Local -- +-```go +-type Local struct { // size=8 +- E +-} +-``` +- +---- +- +-Local types should not be linkable, even if they are capitalized. +- +- +-```go +-// Embedded fields: +-Embed int64 // through E +-``` +--- @Nested -- +-```go +-field Nested int64 // size=8, offset=0 +-``` +- +---- +- +-Nested fields should also be linkable. +--- @T -- +-```go +-type T struct { // size=32 (0x20) +- f int64 //@hover("f", "f", f) +- F int64 //@hover("F", "F", F) +- +- E +- +- // TODO(rfindley): is the link here correct? It ignores N. +- N struct { +- // Nested fields should also be linkable. +- Nested int64 //@hover("Nested", "Nested", Nested) +- } +-} +-``` +- +---- +- +-T is in the package scope, and so should be linkable. +- +- +-```go +-// Embedded fields: +-Embed int64 // through E +-``` +- +-```go +-func (T) M() +-func (T) m() +-``` +- +---- +- +-[`p.T` on pkg.go.dev](https://pkg.go.dev/mod.com#T) +--- @X -- +-```go +-var X int64 +-``` +- +---- +- +-Local variables should not be linkable, even if they are capitalized. +--- @f -- +-```go +-field f int64 // size=8, offset=0 +-``` +- +---- +- +-@hover("f", "f", f) +diff -urN a/gopls/internal/test/marker/testdata/hover/linkname.txt b/gopls/internal/test/marker/testdata/hover/linkname.txt +--- a/gopls/internal/test/marker/testdata/hover/linkname.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/linkname.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,34 +0,0 @@ +-This test check hover on the 2nd argument in go:linkname directives. +- +--- go.mod -- +-module mod.com +- +--- upper/upper.go -- +-package upper +- +-import ( +- _ "unsafe" +- _ "mod.com/lower" +-) +- +-//go:linkname foo mod.com/lower.bar //@hover("mod.com/lower.bar", "mod.com/lower.bar", bar) +-func foo() string +- +--- lower/lower.go -- +-package lower +- +-// bar does foo. +-func bar() string { +- return "foo by bar" +-} +- +-var _ = bar +- +--- @bar -- +-```go +-func bar() string +-``` +- +---- +- +-bar does foo. +diff -urN a/gopls/internal/test/marker/testdata/hover/methods.txt b/gopls/internal/test/marker/testdata/hover/methods.txt +--- a/gopls/internal/test/marker/testdata/hover/methods.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/methods.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,83 +0,0 @@ +-This test checks the formatting of the list of accessible methods. +- +-Observe that: +-- interface methods that appear in the syntax are not repeated +- in the method set of the type; +-- promoted methods of structs are shown; +-- receiver variables are correctly named; +-- receiver variables have a pointer type if appropriate; +-- only accessible methods are shown. +- +--- go.mod -- +-module example.com +- +--- lib/lib.go -- +-package lib +- +-type I interface { +- A() +- b() +- J +-} +- +-type J interface { C() } +- +-type S struct { I } +-func (s S) A() {} +-func (s S) b() {} +-func (s *S) PA() {} +-func (s *S) pb() {} +- +-var _ = (*S).pb +- +--- a/a.go -- +-package a +- +-import "example.com/lib" +- +-var _ lib.I //@hover("I", "I", I) +-var _ lib.J //@hover("J", "J", J) +-var _ lib.S //@hover("S", "S", S) +- +--- @I -- +-```go +-type I interface { +- A() +- b() +- J +-} +-``` +- +---- +- +-```go +-func (lib.J) C() +-``` +- +---- +- +-[`lib.I` on pkg.go.dev](https://pkg.go.dev/example.com/lib#I) +--- @J -- +-```go +-type J interface{ C() } +-``` +- +---- +- +-[`lib.J` on pkg.go.dev](https://pkg.go.dev/example.com/lib#J) +--- @S -- +-```go +-type S struct{ I } +-``` +- +---- +- +-```go +-func (s lib.S) A() +-func (lib.J) C() +-func (s *lib.S) PA() +-``` +- +---- +- +-[`lib.S` on pkg.go.dev](https://pkg.go.dev/example.com/lib#S) +diff -urN a/gopls/internal/test/marker/testdata/hover/return.txt b/gopls/internal/test/marker/testdata/hover/return.txt +--- a/gopls/internal/test/marker/testdata/hover/return.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/return.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,12 +0,0 @@ +-This test checks that hovering over a return statement reveals the result type. +- +--- a.go -- +-package a +- +-func _() int { +- return 1 //@hover("return", "return 1", "returns (int)") +-} +- +-func _() (int, int) { +- return 1, 2 //@hover("return", "return 1, 2", "returns (int, int)") +-} +diff -urN a/gopls/internal/test/marker/testdata/hover/sizeoffset.txt b/gopls/internal/test/marker/testdata/hover/sizeoffset.txt +--- a/gopls/internal/test/marker/testdata/hover/sizeoffset.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/sizeoffset.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,149 +0,0 @@ +-This test checks that hover reports the sizes of vars/types, +-and the offsets of struct fields. +- +-Notes: +-- this only works on the declaring identifier, not on refs. +-- the size of a type is undefined if it depends on type parameters. +-- the offset of a field is undefined if it or any preceding field +- has undefined size/alignment. +-- the test's size expectations assumes a 64-bit machine. +-- requires go1.22 because size information was inaccurate before. +- +--- settings.json -- +-{"analyses": {"unusedfunc": false}} +- +--- flags -- +--skip_goarch=386,arm +- +--- go.mod -- +-module example.com +- +-go 1.18 +--- a.go -- +-package a +- +-type T struct { //@ hover("T", "T", T) +- a int //@ hover("a", "a", a) +- U U //@ hover("U", "U", U) +- y, z int //@ hover("y", "y", y), hover("z", "z", z) +-} +- +-type U struct { +- slice []string +-} +- +-type G[T any] struct { +- p T //@ hover("p", "p", p) +- q int //@ hover("q", "q", q) +-} +- +-var _ struct { +- Gint G[int] //@ hover("Gint", "Gint", Gint) +- Gstring G[string] //@ hover("Gstring", "Gstring", Gstring) +-} +- +-type wasteful struct { //@ hover("wasteful", "wasteful", wasteful) +- a bool +- b [2]string +- c bool +-} +- +-type sizeclass struct { //@ hover("sizeclass", "sizeclass", sizeclass) +- a [5]*int +-} +- +--- @T -- +-```go +-type T struct { // size=48 (0x30) +- a int //@ hover("a", "a", a) +- U U //@ hover("U", "U", U) +- y, z int //@ hover("y", "y", y), hover("z", "z", z) +-} +-``` +- +---- +- +-[`a.T` on pkg.go.dev](https://pkg.go.dev/example.com#T) +--- @wasteful -- +-```go +-type wasteful struct { // size=48 (0x30) (29% wasted) +- a bool +- b [2]string +- c bool +-} +-``` +--- @sizeclass -- +-```go +-type sizeclass struct { // size=40 (0x28), class=48 (0x30) +- a [5]*int +-} +-``` +--- @a -- +-```go +-field a int // size=8, offset=0 +-``` +- +---- +- +-@ hover("a", "a", a) +--- @U -- +-```go +-field U U // size=24 (0x18), offset=8 +-``` +- +---- +- +-@ hover("U", "U", U) +- +- +---- +- +-[`(a.T).U` on pkg.go.dev](https://pkg.go.dev/example.com#T.U) +--- @y -- +-```go +-field y int // size=8, offset=32 (0x20) +-``` +- +---- +- +-@ hover("y", "y", y), hover("z", "z", z) +--- @z -- +-```go +-field z int // size=8, offset=40 (0x28) +-``` +- +---- +- +-@ hover("y", "y", y), hover("z", "z", z) +--- @p -- +-```go +-field p T +-``` +- +---- +- +-@ hover("p", "p", p) +--- @q -- +-```go +-field q int // size=8 +-``` +- +---- +- +-@ hover("q", "q", q) +--- @Gint -- +-```go +-field Gint G[int] // size=16 (0x10), offset=0 +-``` +- +---- +- +-@ hover("Gint", "Gint", Gint) +--- @Gstring -- +-```go +-field Gstring G[string] // size=24 (0x18), offset=16 (0x10) +-``` +- +---- +- +-@ hover("Gstring", "Gstring", Gstring) +diff -urN a/gopls/internal/test/marker/testdata/hover/std.txt b/gopls/internal/test/marker/testdata/hover/std.txt +--- a/gopls/internal/test/marker/testdata/hover/std.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/std.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,106 +0,0 @@ +-This test checks hover results for built-in or standard library symbols. +- +-It uses synopsis documentation as full documentation for some of these +-built-ins varies across Go versions, where as it just so happens that the +-synopsis does not. +- +-In the future we may need to limit this test to the latest Go version to avoid +-documentation churn. +- +--- settings.json -- +-{ +- "hoverKind": "SynopsisDocumentation" +-} +- +--- go.mod -- +-module mod.com +- +-go 1.18 +- +--- std.go -- +-package std +- +-import ( +- "fmt" +- "go/types" +- "sync" +-) +- +-func _() { +- var err error //@loc(err, "err") +- fmt.Printf("%v", err) //@def("err", err) +- +- var _ string //@hover("string", "string", hoverstring) +- _ = make([]int, 0) //@hover("make", "make", hovermake) +- +- var mu sync.Mutex +- mu.Lock() //@hover("Lock", "Lock", hoverLock) +- +- var typ *types.Named //@hover("types", "types", hoverTypes) +- typ.Obj().Name() //@hover("Name", "Name", hoverName) +-} +--- @hoverLock -- +-```go +-func (m *sync.Mutex) Lock() +-``` +- +---- +- +-Lock locks m. +- +- +---- +- +-[`(sync.Mutex).Lock` on pkg.go.dev](https://pkg.go.dev/sync#Mutex.Lock) +--- @hoverName -- +-```go +-func (obj *types.object) Name() string +-``` +- +---- +- +-Name returns the object's (package-local, unqualified) name. +- +- +---- +- +-[`(types.TypeName).Name` on pkg.go.dev](https://pkg.go.dev/go/types#TypeName.Name) +--- @hoverTypes -- +-```go +-package types +-``` +- +---- +- +-Package types declares the data types and implements the algorithms for type-checking of Go packages. +- +- +---- +- +-[`types` on pkg.go.dev](https://pkg.go.dev/go/types) +--- @hovermake -- +-```go +-func make(t Type, size ...int) Type +-``` +- +---- +- +-The make built-in function allocates and initializes an object of type slice, map, or chan (only). +- +- +---- +- +-[`make` on pkg.go.dev](https://pkg.go.dev/builtin#make) +--- @hoverstring -- +-```go +-type string string +-``` +- +---- +- +-string is the set of all strings of 8-bit bytes, conventionally but not necessarily representing UTF-8-encoded text. +- +- +---- +- +-[`string` on pkg.go.dev](https://pkg.go.dev/builtin#string) +diff -urN a/gopls/internal/test/marker/testdata/hover/structfield.txt b/gopls/internal/test/marker/testdata/hover/structfield.txt +--- a/gopls/internal/test/marker/testdata/hover/structfield.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/hover/structfield.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,33 +0,0 @@ +-This test checks that the complete struct field is +-shown on hover (including struct tags and comments). +- +--- go.mod -- +-module example.com +- +--- lib/lib.go -- +-package lib +- +-type Something struct { +- // Field with a tag +- Field int `json:"field"` +-} +- +-func DoSomething() Something { +- var s Something +- s.Field = 42 //@hover("i", "Field", field) +- return s +-} +- +--- @field -- +-```go +-field Field int `json:"field"` +-``` +- +---- +- +-Field with a tag +- +- +---- +- +-[`(lib.Something).Field` on pkg.go.dev](https://pkg.go.dev/example.com/lib#Something.Field) +diff -urN a/gopls/internal/test/marker/testdata/implementation/basic.txt b/gopls/internal/test/marker/testdata/implementation/basic.txt +--- a/gopls/internal/test/marker/testdata/implementation/basic.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/implementation/basic.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,79 +0,0 @@ +-Basic test of implementation query. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- implementation/implementation.go -- +-package implementation +- +-import "example.com/other" +- +-type ImpP struct{} //@loc(ImpP, "ImpP"),implementation("ImpP", Laugher, OtherLaugher) +- +-func (*ImpP) Laugh() { //@loc(LaughP, "Laugh"),implementation("Laugh", Laugh, OtherLaugh) +-} +- +-type ImpS struct{} //@loc(ImpS, "ImpS"),implementation("ImpS", Laugher, OtherLaugher) +- +-func (ImpS) Laugh() { //@loc(LaughS, "Laugh"),implementation("Laugh", Laugh, OtherLaugh) +-} +- +-type Laugher interface { //@loc(Laugher, "Laugher"),implementation("Laugher", ImpP, OtherImpP, ImpS, OtherLaugher, OtherImpS, EmbedsImpP) +- Laugh() //@loc(Laugh, "Laugh"),implementation("Laugh", LaughP, OtherLaughP, LaughS, OtherLaugh, OtherLaughS) +-} +- +-type Foo struct { //@implementation("Foo", Joker) +- other.Foo +-} +- +-type Joker interface { //@loc(Joker, "Joker") +- Joke() //@loc(Joke, "Joke"),implementation("Joke", ImpJoker) +-} +- +-type cryer int //@implementation("cryer", Cryer) +- +-func (cryer) Cry(other.CryType) {} //@loc(CryImpl, "Cry"),implementation("Cry", Cry) +- +-type Empty any //@implementation("Empty") +- +-var _ interface{ Joke() } //@implementation("Joke", Joke, ImpJoker) +- +-type EmbedsImpP struct { //@loc(EmbedsImpP, "EmbedsImpP") +- ImpP //@implementation("ImpP", Laugher, OtherLaugher) +-} +- +-var _ error //@defloc(StdError, "error") +- +-type MyError struct {} //@implementation("MyError", StdError) +- +-func (MyError) Error() string { return "bah" } +- +--- other/other.go -- +-package other +- +-type ImpP struct{} //@loc(OtherImpP, "ImpP") +- +-func (*ImpP) Laugh() { //@loc(OtherLaughP, "Laugh") +-} +- +-type ImpS struct{} //@loc(OtherImpS, "ImpS") +- +-func (ImpS) Laugh() { //@loc(OtherLaughS, "Laugh") +-} +- +-type ImpI interface { //@loc(OtherLaugher, "ImpI") +- Laugh() //@loc(OtherLaugh, "Laugh") +-} +- +-type Foo struct { //@implementation("Foo", Joker) +-} +- +-func (Foo) Joke() { //@loc(ImpJoker, "Joke"),implementation("Joke", Joke) +-} +- +-type CryType int +- +-type Cryer interface { //@loc(Cryer, "Cryer") +- Cry(CryType) //@loc(Cry, "Cry"),implementation("Cry", CryImpl) +-} +diff -urN a/gopls/internal/test/marker/testdata/implementation/generics-basicalias.txt b/gopls/internal/test/marker/testdata/implementation/generics-basicalias.txt +--- a/gopls/internal/test/marker/testdata/implementation/generics-basicalias.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/implementation/generics-basicalias.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,26 +0,0 @@ +-Test of special case of 'implementation' query: aliases of basic types +-(rune vs int32) in the "tricky" (=generic) algorithm for unifying +-method signatures. +- +-We test both the local (intra-package) and global (cross-package) +-algorithms. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-type C[T any] struct{} +-func (C[T]) F(rune, T) {} //@ loc(aCF, "F"), implementation("F", aIF, bIF) +- +-type I[T any] interface{ F(int32, T) } //@ loc(aIF, "F"), implementation("F", aCF, bCF, bIF) +- +--- b/b.go -- +-package b +- +-type C[T any] struct{} +-func (C[T]) F(rune, T) {} //@ loc(bCF, "F"), implementation("F", aIF, bIF) +- +-type I[T any] interface{ F(int32, T) } //@ loc(bIF, "F"), implementation("F", aCF, aIF, bCF) +diff -urN a/gopls/internal/test/marker/testdata/implementation/generics.txt b/gopls/internal/test/marker/testdata/implementation/generics.txt +--- a/gopls/internal/test/marker/testdata/implementation/generics.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/implementation/generics.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,31 +0,0 @@ +-Test of 'implementation' query on generic types. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- implementation/implementation.go -- +-package implementation +- +-type GenIface[T any] interface { //@loc(GenIface, "GenIface"),implementation("GenIface", GC, GenConc, GI, GIString, GenConcString) +- F(int, string, T) //@loc(GenIfaceF, "F"),implementation("F", GCF, GenConcF, GIF) +-} +- +-type GenConc[U any] int //@loc(GenConc, "GenConc"),implementation("GenConc", GI, GIString, GenIface) +- +-func (GenConc[V]) F(int, string, V) {} //@loc(GenConcF, "F"),implementation("F", GIF, GenIfaceF) +- +-type GenConcString struct{ GenConc[string] } //@loc(GenConcString, "GenConcString"),implementation(GenConcString, GIString, GI, GenIface) +- +--- other/other.go -- +-package other +- +-type GI[T any] interface { //@loc(GI, "GI"),implementation("GI", GenConc, GenIface, GenConcString, GIString, GC) +- F(int, string, T) //@loc(GIF, "F"),implementation("F", GenIfaceF, GenConcF, GCF) +-} +- +-type GIString GI[string] //@loc(GIString, "GIString"),implementation("GIString", GenConcString, GenIface, GenConc, GI, GC) +- +-type GC[U any] int //@loc(GC, "GC"),implementation("GC", GenIface, GI, GIString) +- +-func (GC[V]) F(int, string, V) {} //@loc(GCF, "F"),implementation("F", GenIfaceF, GIF) +diff -urN a/gopls/internal/test/marker/testdata/implementation/issue43655.txt b/gopls/internal/test/marker/testdata/implementation/issue43655.txt +--- a/gopls/internal/test/marker/testdata/implementation/issue43655.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/implementation/issue43655.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,22 +0,0 @@ +-This test verifies that we fine implementations of the built-in error interface. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- p.go -- +-package p +- +-type errA struct{ error } //@loc(errA, "errA") +- +-type errB struct{} //@loc(errB, "errB") +-func (errB) Error() string{ return "" } //@loc(errBError, "Error") +- +-type notAnError struct{} +-func (notAnError) Error() int { return 0 } +- +-func _() { +- var _ error //@implementation("error", errA, errB) +- var a errA +- _ = a.Error //@implementation("Error", errBError) +-} +diff -urN a/gopls/internal/test/marker/testdata/implementation/issue67041.txt b/gopls/internal/test/marker/testdata/implementation/issue67041.txt +--- a/gopls/internal/test/marker/testdata/implementation/issue67041.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/implementation/issue67041.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,37 +0,0 @@ +-This test verifies that Implementations uses the correct object when querying +-local implementations. As described in golang/go#67041, a bug led to it +-comparing types from different realms. +- +--- go.mod -- +-module example.com +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-type A struct{} +- +-type Aer interface { //@loc(Aer, "Aer") +- GetA() A +-} +- +-type X struct{} //@loc(X, "X") +- +-func (X) GetA() A +- +--- a/a_test.go -- +-package a +- +-// Verify that we also find implementations in a test variant. +-type Y struct{} //@loc(Y, "Y") +- +-func (Y) GetA() A +--- b/b.go -- +-package b +- +-import "example.com/a" +- +-var _ a.X //@implementation("X", Aer) +- +-var _ a.Aer //@implementation("Aer", X, Y) +diff -urN a/gopls/internal/test/marker/testdata/implementation/issue68641.txt b/gopls/internal/test/marker/testdata/implementation/issue68641.txt +--- a/gopls/internal/test/marker/testdata/implementation/issue68641.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/implementation/issue68641.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,64 +0,0 @@ +-Regression test that Implementation(I) returns J even when I and J are +-both interfaces; see issue #68641. Previously, interface/interface +-matches were never reported. +- +-However, the direction of the query is determined by the concreteness +-of the query type: Implements on a.B, an interface, reports types that +-are assignable to it, a.C; but Implements on concrete a.impl reports +-only interface types to which it may be assigned, and there is no way +-to query from interface B to find the (wider) interface A. (This would +-be a useful feature of LSP though; see +-https://github.com/microsoft/language-server-protocol/issues/2037.) +- +-The test exercises both the local (intra-) and global (cross-package) +-algorithms and checks that they are consistent. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-type A interface { //@ loc(aA, "A"), implementation("A", aB, aC, aimpl, bA, bB, bC, bimpl) +- A() //@ loc(aAA, "A"), implementation("A", aimplA, bimplA, bAA) +-} +- +-type B interface { //@ loc(aB, "B"), implementation("B", aC, aimpl, bB, bC, bimpl) +- A +- B() +-} +- +-type C interface { //@ loc(aC, "C"), implementation("C", aimpl, bC, bimpl) +- B +- C() +-} +- +-type impl int //@ loc(aimpl, "impl"), implementation("impl", aA, aB, aC, bA, bB, bC) +- +-func (impl) A() //@ loc(aimplA, "A") +-func (impl) B() +-func (impl) C() +- +--- b/b.go -- +-package b +- +-type A interface { //@ loc(bA, "A"), implementation("A", aA, aB, aC, aimpl, bB, bC, bimpl) +- A() //@ loc(bAA, "A") +-} +- +-type B interface { //@ loc(bB, "B"), implementation("B", aB, aC, aimpl, bC, bimpl) +- A +- B() +-} +- +-type C interface { //@ loc(bC, "C"), implementation("C", aC, aimpl, bimpl) +- B +- C() +-} +- +-type impl int //@ loc(bimpl, "impl"), implementation("impl", aA, aB, aC, bA, bB, bC) +- +-func (impl) A() //@ loc(bimplA, "A") +-func (impl) B() +-func (impl) C() +diff -urN a/gopls/internal/test/marker/testdata/implementation/issue74305.txt b/gopls/internal/test/marker/testdata/implementation/issue74305.txt +--- a/gopls/internal/test/marker/testdata/implementation/issue74305.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/implementation/issue74305.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,11 +0,0 @@ +-Regression test for a crash, #74305. +- +--- go.mod -- +-module example.com +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-var _ = undefined() //@ diag("undefined", re"undefined"), implementation("(", err="not a dynamic function call") +diff -urN a/gopls/internal/test/marker/testdata/implementation/signature.txt b/gopls/internal/test/marker/testdata/implementation/signature.txt +--- a/gopls/internal/test/marker/testdata/implementation/signature.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/implementation/signature.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,80 +0,0 @@ +-Test of local Implementation queries using function signatures. +- +-Assertions: +-- Query on "func" of a function type returns the corresponding concrete functions. +-- Query on "func" of a concrete function returns corresponding function types. +-- Query on "(" of a dynamic function call returns corresponding function types. +-- Different signatures (Nullary vs Handler) don't correspond. +- +-The @loc markers use the suffixes Func, Type, Call for the three kinds. +-Each query maps between these two sets: {Func} <=> {Type,Call}. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-// R is short for Record. +-type R struct{} +- +-// H is short for Handler. +-type H func(*R) //@ loc(HType, "func"), implementation("func", aFunc, bFunc, cFunc) +- +-func aFunc(*R) {} //@ loc(aFunc, "func"), implementation("func", HType, hParamType, hCall) +- +-var bFunc = func(*R) {} //@ loc(bFunc, "func"), implementation("func", hParamType, hCall, HType) +- +-func nullary() { //@ loc(nullaryFunc, "func"), implementation("func", Nullary, fieldCall) +- cFunc := func(*R) {} //@ loc(cFunc, "func"), implementation("func", hParamType, hCall, HType) +- _ = cFunc +-} +- +-type Nullary func() //@ loc(Nullary, "func") +- +-func _( +- h func(*R)) { //@ loc(hParamType, "func"), implementation("func", aFunc, bFunc, cFunc) +- +- _ = aFunc // pacify unusedfunc +- _ = bFunc // pacify unusedfunc +- _ = nullary // pacify unusedfunc +- _ = h +- +- h(nil) //@ loc(hCall, "("), implementation("(", aFunc, bFunc, cFunc) +-} +- +-// generics: +- +-func _[T any](complex128) { +- f1 := func(T) int { return 0 } //@ loc(f1Func, "func"), implementation("func", fParamType, fCall, f1Call, f2Call) +- f2 := func(string) int { return 0 } //@ loc(f2Func, "func"), implementation("func", fParamType, fCall, f1Call, f2Call) +- f3 := func(int) int { return 0 } //@ loc(f3Func, "func"), implementation("func", f1Call) +- +- f1(*new(T)) //@ loc(f1Call, "("), implementation("(", f1Func, f2Func, f3Func, f4Func) +- f2("") //@ loc(f2Call, "("), implementation("(", f1Func, f2Func, f4Func) +- _ = f3 // not called +-} +- +-func f4[T any](T) int { return 0 } //@ loc(f4Func, "func"), implementation("func", fParamType, fCall, f1Call, f2Call) +- +-var _ = f4[string] // pacify unusedfunc +- +-func _( +- f func(string) int, //@ loc(fParamType, "func"), implementation("func", f1Func, f2Func, f4Func) +- err error) { +- +- f("") //@ loc(fCall, "("), implementation("(", f1Func, f2Func, f4Func) +- +- struct{x Nullary}{}.x() //@ loc(fieldCall, "("), implementation("(", nullaryFunc) +- +- // Calls that are not dynamic function calls: +- _ = len("") //@ implementation("(", err="not a dynamic function call") +- _ = int(0) //@ implementation("(", err="not a dynamic function call") +- _ = error.Error(nil) //@ implementation("(", err="not a dynamic function call") +- _ = err.Error() //@ implementation("(", err="not a dynamic function call") +- _ = f4(0) //@ implementation("(", err="not a dynamic function call"), loc(f4Call, "(") +-} +- +- +- +diff -urN a/gopls/internal/test/marker/testdata/inlayhints/ignored-error.txt b/gopls/internal/test/marker/testdata/inlayhints/ignored-error.txt +--- a/gopls/internal/test/marker/testdata/inlayhints/ignored-error.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/inlayhints/ignored-error.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,73 +0,0 @@ +-Test of "ignore error" inlay hint (#73930). +- +-- f.Close() generates a hint, except when followed by +- an "// ignore error" comment, or in a "_ = f.Close()" stmt. +-- fmt.Println() is exempted. +- +--- settings.json -- +-{"hints": {"ignoredError": true}} +- +--- p/p.go -- +-package p //@inlayhints(out) +- +-import ( "os"; "fmt" ) +- +-func _(f *os.File) { +- f.WriteString("hello") +- f.Close() +-} +- +-func _(f *os.File) { +- f.Close() // irrelevant comment +-} +- +-func _(f *os.File) { +- f.Close() // ignore error +-} +- +-func _(f *os.File) { +- _ = f.Close() +-} +- +-func _() { +- fmt.Println() +-} +- +-func _(f *os.File) { +- // Allow horizontal space before comment. +- new(os.File).Close() // ignore error +- f.Close() // ignore error +-} +- +--- @out -- +-package p //@inlayhints(out) +- +-import ( "os"; "fmt" ) +- +-func _(f *os.File) { +- f.WriteString("hello")< // ignore error> +- f.Close()< // ignore error> +-} +- +-func _(f *os.File) { +- f.Close()< // ignore error> // irrelevant comment +-} +- +-func _(f *os.File) { +- f.Close() // ignore error +-} +- +-func _(f *os.File) { +- _ = f.Close() +-} +- +-func _() { +- fmt.Println() +-} +- +-func _(f *os.File) { +- // Allow horizontal space before comment. +- new(os.File).Close() // ignore error +- f.Close() // ignore error +-} +- +diff -urN a/gopls/internal/test/marker/testdata/inlayhints/inlayhints.txt b/gopls/internal/test/marker/testdata/inlayhints/inlayhints.txt +--- a/gopls/internal/test/marker/testdata/inlayhints/inlayhints.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/inlayhints/inlayhints.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,409 +0,0 @@ +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "hints": { +- "assignVariableTypes": true, +- "compositeLiteralFields": true, +- "compositeLiteralTypes": true, +- "constantValues": true, +- "functionTypeParameters": true, +- "parameterNames": true, +- "rangeVariabletypes": true +- } +-} +- +--- composite_literals.go -- +-package inlayHint //@inlayhints(complit) +- +-import "fmt" +- +-func fieldNames() { +- for _, c := range []struct { +- in, want string +- }{ +- struct{ in, want string }{"Hello, world", "dlrow ,olleH"}, +- {"Hello, 世界", "界世 ,olleH"}, +- {"", ""}, +- } { +- fmt.Println(c.in == c.want) +- } +-} +- +-func fieldNamesPointers() { +- for _, c := range []*struct { +- in, want string +- }{ +- &struct{ in, want string }{"Hello, world", "dlrow ,olleH"}, +- {"Hello, 世界", "界世 ,olleH"}, +- {"", ""}, +- } { +- fmt.Println(c.in == c.want) +- } +-} +- +--- @complit -- +-package inlayHint //@inlayhints(complit) +- +-import "fmt" +- +-func fieldNames() { +- for _, c := range []struct { +- in, want string +- }{ +- struct{ in, want string }{<in: >"Hello, world", <want: >"dlrow ,olleH"}, +- <struct{in string; want string}>{<in: >"Hello, 世界", <want: >"界世 ,olleH"}, +- <struct{in string; want string}>{<in: >"", <want: >""}, +- } { +- fmt.Println(<a...: >c.in == c.want) +- } +-} +- +-func fieldNamesPointers() { +- for _, c := range []*struct { +- in, want string +- }{ +- &struct{ in, want string }{<in: >"Hello, world", <want: >"dlrow ,olleH"}, +- <&struct{in string; want string}>{<in: >"Hello, 世界", <want: >"界世 ,olleH"}, +- <&struct{in string; want string}>{<in: >"", <want: >""}, +- } { +- fmt.Println(<a...: >c.in == c.want) +- } +-} +- +--- constant_values.go -- +-package inlayHint //@inlayhints(values) +- +-const True = true +- +-type Kind int +- +-const ( +- KindNone Kind = iota +- KindPrint +- KindPrintf +- KindErrorf +-) +- +-const ( +- u = iota * 4 +- v float64 = iota * 42 +- w = iota * 42 +-) +- +-const ( +- a, b = 1, 2 +- c, d +- e, f = 5 * 5, "hello" + "world" +- g, h +- i, j = true, f +-) +- +-// No hint +-const ( +- Int = 3 +- Float = 3.14 +- Bool = true +- Rune = '3' +- Complex = 2.7i +- String = "Hello, world!" +-) +- +-var ( +- varInt = 3 +- varFloat = 3.14 +- varBool = true +- varRune = '3' + '4' +- varComplex = 2.7i +- varString = "Hello, world!" +-) +- +--- @values -- +-package inlayHint //@inlayhints(values) +- +-const True = true +- +-type Kind int +- +-const ( +- KindNone Kind = iota< = 0> +- KindPrint< = 1> +- KindPrintf< = 2> +- KindErrorf< = 3> +-) +- +-const ( +- u = iota * 4< = 0> +- v float64 = iota * 42< = 42> +- w = iota * 42< = 84> +-) +- +-const ( +- a, b = 1, 2 +- c, d< = 1, 2> +- e, f = 5 * 5, "hello" + "world"< = 25, "helloworld"> +- g, h< = 25, "helloworld"> +- i, j = true, f< = true, "helloworld"> +-) +- +-// No hint +-const ( +- Int = 3 +- Float = 3.14 +- Bool = true +- Rune = '3' +- Complex = 2.7i +- String = "Hello, world!" +-) +- +-var ( +- varInt = 3 +- varFloat = 3.14 +- varBool = true +- varRune = '3' + '4' +- varComplex = 2.7i +- varString = "Hello, world!" +-) +- +--- parameter_names.go -- +-package inlayHint //@inlayhints(parameters) +- +-import "fmt" +- +-func hello(name string) string { +- return "Hello " + name +-} +- +-func helloWorld() string { +- return hello("World") +-} +- +-type foo struct{} +- +-func (*foo) bar(baz string, qux int) int { +- if baz != "" { +- return qux + 1 +- } +- return qux +-} +- +-func kase(foo int, bar bool, baz ...string) { +- fmt.Println(foo, bar, baz) +-} +- +-func kipp(foo string, bar, baz string) { +- fmt.Println(foo, bar, baz) +-} +- +-func plex(foo, bar string, baz string) { +- fmt.Println(foo, bar, baz) +-} +- +-func tars(foo string, bar, baz string) { +- fmt.Println(foo, bar, baz) +-} +- +-func foobar() { +- var x foo +- x.bar("", 1) +- kase(0, true, "c", "d", "e") +- kipp("a", "b", "c") +- plex("a", "b", "c") +- tars("a", "b", "c") +- foo, bar, baz := "a", "b", "c" +- kipp(foo, bar, baz) +- plex("a", bar, baz) +- tars(foo+foo, (bar), "c") +- +-} +- +--- @parameters -- +-package inlayHint //@inlayhints(parameters) +- +-import "fmt" +- +-func hello(name string) string { +- return "Hello " + name +-} +- +-func helloWorld() string { +- return hello(<name: >"World") +-} +- +-type foo struct{} +- +-func (*foo) bar(baz string, qux int) int { +- if baz != "" { +- return qux + 1 +- } +- return qux +-} +- +-func kase(foo int, bar bool, baz ...string) { +- fmt.Println(<a...: >foo, bar, baz) +-} +- +-func kipp(foo string, bar, baz string) { +- fmt.Println(<a...: >foo, bar, baz) +-} +- +-func plex(foo, bar string, baz string) { +- fmt.Println(<a...: >foo, bar, baz) +-} +- +-func tars(foo string, bar, baz string) { +- fmt.Println(<a...: >foo, bar, baz) +-} +- +-func foobar() { +- var x foo +- x.bar(<baz: >"", <qux: >1) +- kase(<foo: >0, <bar: >true, <baz...: >"c", "d", "e") +- kipp(<foo: >"a", <bar: >"b", <baz: >"c") +- plex(<foo: >"a", <bar: >"b", <baz: >"c") +- tars(<foo: >"a", <bar: >"b", <baz: >"c") +- foo< string>, bar< string>, baz< string> := "a", "b", "c" +- kipp(foo, bar, baz) +- plex(<foo: >"a", bar, baz) +- tars(<foo: >foo+foo, <bar: >(bar), <baz: >"c") +- +-} +- +--- type_params.go -- +-package inlayHint //@inlayhints(typeparams) +- +-func main() { +- ints := map[string]int64{ +- "first": 34, +- "second": 12, +- } +- +- floats := map[string]float64{ +- "first": 35.98, +- "second": 26.99, +- } +- +- SumIntsOrFloats[string, int64](ints) +- SumIntsOrFloats[string, float64](floats) +- +- SumIntsOrFloats(ints) +- SumIntsOrFloats(floats) +- +- SumNumbers(ints) +- SumNumbers(floats) +-} +- +-type Number interface { +- int64 | float64 +-} +- +-func SumIntsOrFloats[K comparable, V int64 | float64](m map[K]V) V { +- var s V +- for _, v := range m { +- s += v +- } +- return s +-} +- +-func SumNumbers[K comparable, V Number](m map[K]V) V { +- var s V +- for _, v := range m { +- s += v +- } +- return s +-} +- +--- @typeparams -- +-package inlayHint //@inlayhints(typeparams) +- +-func main() { +- ints< map[string]int64> := map[string]int64{ +- "first": 34, +- "second": 12, +- } +- +- floats< map[string]float64> := map[string]float64{ +- "first": 35.98, +- "second": 26.99, +- } +- +- SumIntsOrFloats[string, int64](<m: >ints) +- SumIntsOrFloats[string, float64](<m: >floats) +- +- SumIntsOrFloats<[string, int64]>(<m: >ints) +- SumIntsOrFloats<[string, float64]>(<m: >floats) +- +- SumNumbers<[string, int64]>(<m: >ints) +- SumNumbers<[string, float64]>(<m: >floats) +-} +- +-type Number interface { +- int64 | float64 +-} +- +-func SumIntsOrFloats[K comparable, V int64 | float64](m map[K]V) V { +- var s V +- for _, v := range m { +- s += v +- } +- return s +-} +- +-func SumNumbers[K comparable, V Number](m map[K]V) V { +- var s V +- for _, v := range m { +- s += v +- } +- return s +-} +- +--- variable_types.go -- +-package inlayHint //@inlayhints(vartypes) +- +-func assignTypes() { +- var x string +- var y = "" +- i, j := 0, len([]string{})-1 +- println(i, j) +-} +- +-func rangeTypes() { +- for k, v := range []string{} { +- println(k, v) +- } +-} +- +-func funcLitType() { +- myFunc := func(a string) string { return "" } +-} +- +-func compositeLitType() { +- foo := map[string]any{"": ""} +-} +- +--- @vartypes -- +-package inlayHint //@inlayhints(vartypes) +- +-func assignTypes() { +- var x string +- var y = "" +- i< int>, j< int> := 0, len([]string{})-1 +- println(i, j) +-} +- +-func rangeTypes() { +- for k, v := range []string{} { +- println(k, v) +- } +-} +- +-func funcLitType() { +- myFunc< func(a string) string> := func(a string) string { return "" } +-} +- +-func compositeLitType() { +- foo< map[string]any> := map[string]any{"": ""} +-} +- +diff -urN a/gopls/internal/test/marker/testdata/inlayhints/issue67142.txt b/gopls/internal/test/marker/testdata/inlayhints/issue67142.txt +--- a/gopls/internal/test/marker/testdata/inlayhints/issue67142.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/inlayhints/issue67142.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,35 +0,0 @@ +-Regression test for golang/go#67142. +- +--- flags -- +--ignore_extra_diags +- +--- settings.json -- +-{ +- "hints": { +- "assignVariableTypes": true, +- "compositeLiteralFields": true, +- "compositeLiteralTypes": true, +- "constantValues": true, +- "functionTypeParameters": true, +- "parameterNames": true, +- "rangeVariabletypes": true +- } +-} +- +--- go.mod -- +-module w +- +-go 1.21.9 +- +--- p.go -- +-//@inlayhints(out) +-package p +- +-var _ = rand.Float64() +- +--- @out -- +-//@inlayhints(out) +-package p +- +-var _ = rand.Float64() +- +diff -urN a/gopls/internal/test/marker/testdata/links/links.txt b/gopls/internal/test/marker/testdata/links/links.txt +--- a/gopls/internal/test/marker/testdata/links/links.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/links/links.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,47 +0,0 @@ +-This test verifies behavior of textDocument/documentLink. +- +--- go.mod -- +-module golang.org/lsptests +- +-go 1.18 +--- foo/foo.go -- +-package foo +- +-type StructFoo struct {} +- +--- links/links.go -- +-package links //@documentlink(links) +- +-import ( +- "fmt" +- +- "golang.org/lsptests/foo" +- +- _ "database/sql" +-) +- +-var ( +- _ fmt.Formatter +- _ foo.StructFoo +- _ errors.Formatter //@diag("errors", re"(undeclared|undefined)") +-) +- +-// Foo function +-func Foo() string { +- /*https://example.com/comment */ +- +- url := "https://example.com/string_literal" +- return url +- +- // TODO(golang/go#1234): Link the relevant issue. +- // TODO(microsoft/vscode-go#12): Another issue. +-} +- +--- @links -- +-links/links.go:4:3-6 https://pkg.go.dev/fmt +-links/links.go:6:3-26 https://pkg.go.dev/golang.org/lsptests/foo +-links/links.go:8:5-17 https://pkg.go.dev/database/sql +-links/links.go:21:10-44 https://example.com/string_literal +-links/links.go:19:4-31 https://example.com/comment +-links/links.go:24:10-24 https://github.com/golang/go/issues/1234 +-links/links.go:25:10-32 https://github.com/microsoft/vscode-go/issues/12 +diff -urN a/gopls/internal/test/marker/testdata/mcptools/context.txt b/gopls/internal/test/marker/testdata/mcptools/context.txt +--- a/gopls/internal/test/marker/testdata/mcptools/context.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/mcptools/context.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,513 +0,0 @@ +-This test exercises the "go_context" MCP tool. +- +--- flags -- +--mcp +--ignore_extra_diags +- +--- settings.json -- +-{ +- "mcpTools": { +- "go_context": true +- } +-} +- +--- go.mod -- +-module example.com +- +--- a/main.go -- +-// File doc for main.go part 1. +-package main +- +-// File doc for main.go part 2. +-import( +- "example.com/a/comment" +-) +- +-// File doc for main.go part 3. +- +-// doc comment for func foo. +-func foo() {//@mcptool("go_context", `{"file": "$WORKDIR/a/main.go"}`, output=withComment) +- comment.Foo("", 0) +-} +- +--- a/a.go -- +-// File doc for a.go. +-package main +- +-// doc comment for func a. +-func a () {} +- +-// doc comment for type b. +-type b struct {} +- +-// doc comment for const c. +-const c = "" +- +-// doc comment for var d. +-var d int +- +--- a/comment/doc.go -- +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-/* +-Package doc for package comment. +-*/ +-package comment +- +--- a/comment/comment.go -- +-// File doc for comment.go part 1. +-package comment +- +-// File doc for comment.go part 2. +-import ( +- // comment for package renaming. +- myfmt "fmt" +-) +- +-// File doc for comment.go part 3. +- +-// doc comment for comment.Foo +-func Foo(foo string, _ int) { +- myfmt.Printf("%s", foo) +-} +- +-// Random comment floating around. +- +--- @withComment -- +-Current package "example.com/a" (package main): +- +-main.go (current file): +-```go +-// File doc for main.go part 1. +-package main +- +-// File doc for main.go part 2. +-import( +- "example.com/a/comment" +-) +- +-// File doc for main.go part 3. +- +-// doc comment for func foo. +-func foo() +- +-``` +- +-a.go: +-```go +-// File doc for a.go. +-package main +- +-// doc comment for func a. +-func a () +- +-// doc comment for type b. +-type b struct {} +-// doc comment for const c. +-const c = "" +-// doc comment for var d. +-var d int +-``` +- +-Current file "main.go" contains this import declaration: +-```go +-import( +- "example.com/a/comment" +-) +-``` +- +-The imported packages declare the following symbols: +- +-"example.com/a/comment" (package comment) +-comment.go: +-```go +-// File doc for comment.go part 1. +-package comment +- +-// File doc for comment.go part 2. +-import ( +- // comment for package renaming. +- myfmt "fmt" +-) +- +-// File doc for comment.go part 3. +- +-// doc comment for comment.Foo +-func Foo(foo string, _ int) +- +-``` +- +-doc.go: +-```go +-/* +-Package doc for package comment. +-*/ +-package comment +- +-``` +- +--- b/main.go -- +-package main +- +-import( +- "example.com/b/function" +-) +- +-func testFunction() {//@mcptool("go_context", `{"file":"$WORKDIR/b/main.go"}`, output=withFunction) +- function.Foo(0, "") +-} +- +--- b/function/function.go -- +-package function +- +-func Foo(int, string) {} +- +-func foo(string, int) {} +- +-type unexported struct {} +- +-func (*unexported) unexported(int) {} +- +-func (*unexported) Exported(int) {} +- +-type Exported struct{} +- +-func (*Exported) unexported(int) {} +- +-func (*Exported) Exported(int) {} +- +--- @withFunction -- +-Current package "example.com/b" (package main): +- +-main.go (current file): +-```go +-package main +- +-import( +- "example.com/b/function" +-) +- +-func testFunction() +- +-``` +- +-Current file "main.go" contains this import declaration: +-```go +-import( +- "example.com/b/function" +-) +-``` +- +-The imported packages declare the following symbols: +- +-"example.com/b/function" (package function) +-function.go: +-```go +-package function +- +-func Foo(int, string) +- +-type Exported struct{} +- +-func (*Exported) Exported(int) +- +-``` +- +--- c/main.go -- +-package main +- +-import( +- "example.com/c/types" +-) +- +-var x types.Exported //@mcptool("go_context", `{"file":"$WORKDIR/c/main.go"}`, output=withType) +- +--- c/types/types.go -- +-package types +- +-// Doc for exported. +-type Exported struct { +- // Doc for exported. +- Exported string +- // Doc for unexported. +- unexported string +-} +- +-// Doc for types. +-type ( +- // Doc for Foo first line. +- // Doc for Foo second line. +- Foo struct { +- foo string +- } +- +- // Doc for foo. +- foo struct {} +- +- // Doc for Bar. +- Bar struct { +- bar string +- } +- +- // Doc for bar. +- bar struct {} +-) +- +--- @withType -- +-Current package "example.com/c" (package main): +- +-main.go (current file): +-```go +-package main +- +-import( +- "example.com/c/types" +-) +- +-var x types.Exported +-``` +- +-Current file "main.go" contains this import declaration: +-```go +-import( +- "example.com/c/types" +-) +-``` +- +-The imported packages declare the following symbols: +- +-"example.com/c/types" (package types) +-types.go: +-```go +-package types +- +-// Doc for exported. +-type Exported struct { +- // Doc for exported. +- Exported string +- // Doc for unexported. +- unexported string +-} +- +-// Doc for types. +-type ( +- // Doc for Foo first line. +- // Doc for Foo second line. +- Foo struct { +- foo string +- } +- // Doc for Bar. +- Bar struct { +- bar string +- } +-) +- +-``` +- +--- d/main.go -- +-package main +- +-import( +- "example.com/d/values" +-) +- +-var y values.ConstFoo //@mcptool("go_context", `{"file":"$WORKDIR/d/main.go"}`, output=withValue) +- +--- d/values/consts.go -- +-package values +- +-const ( +- // doc for ConstFoo +- ConstFoo = "Foo" // comment for ConstFoo +- // doc for constFoo +- constFoo = "foo" // comment for constFoo +- // doc for ConstBar +- ConstBar = "Bar" // comment for ConstBar +- // doc for constBar +- constBar = "bar" // comment for constBar +-) +- +-// doc for ConstExported +-const ConstExported = "Exported" // comment for ConstExported +- +-// doc for constUnexported +-var constUnexported = "unexported" // comment for constUnexported +- +--- d/values/vars.go -- +-package values +- +-var ( +- // doc for VarFoo +- VarFoo = "Foo" // comment for VarFoo +- // doc for varFoo +- varFoo = "foo" // comment for varFoo +- // doc for VarBar +- VarBar = "Bar" // comment for VarBar +- // doc for varBar +- varBar = "bar" // comment for varBar +-) +- +-// doc for VarExported +-var VarExported = "Exported" // comment for VarExported +- +-// doc for varUnexported +-var varUnexported = "unexported" // comment for varUnexported +- +--- @withValue -- +-Current package "example.com/d" (package main): +- +-main.go (current file): +-```go +-package main +- +-import( +- "example.com/d/values" +-) +- +-var y values.ConstFoo +-``` +- +-Current file "main.go" contains this import declaration: +-```go +-import( +- "example.com/d/values" +-) +-``` +- +-The imported packages declare the following symbols: +- +-"example.com/d/values" (package values) +-consts.go: +-```go +-package values +- +-const ( +- // doc for ConstFoo +- ConstFoo = "Foo" // comment for ConstFoo +- // doc for ConstBar +- ConstBar = "Bar" // comment for ConstBar +-) +- +-// doc for ConstExported +-const ConstExported = "Exported" // comment for ConstExported +- +-``` +- +-vars.go: +-```go +-package values +- +-var ( +- // doc for VarFoo +- VarFoo = "Foo" // comment for VarFoo +- // doc for VarBar +- VarBar = "Bar" // comment for VarBar +-) +- +-// doc for VarExported +-var VarExported = "Exported" // comment for VarExported +- +-``` +- +--- e/main.go -- +-package main +- +-func main() {} //@mcptool("go_context", `{"file":"$WORKDIR/e/main.go"}`, output=samePackage) +- +--- e/foo.go -- +-package main +- +-var ( +- foo string +- Foo string +-) +- +--- e/bar.go -- +-package main +- +-const ( +- bar = "" +- Bar = "" +-) +- +--- e/baz.go -- +-package main +- +-func baz(int) string { +- return "" +-} +- +-func Baz(string) int { +- return 0 +-} +- +--- @samePackage -- +-Current package "example.com/e" (package main): +- +-main.go (current file): +-```go +-package main +- +-func main() +- +-``` +- +-bar.go: +-```go +-package main +- +-const ( +- bar = "" +- Bar = "" +-) +-``` +- +-baz.go: +-```go +-package main +- +-func baz(int) string +- +-func Baz(string) int +- +-``` +- +-foo.go: +-```go +-package main +- +-var ( +- foo string +- Foo string +-) +-``` +- +--- f/main.go -- +-package main +- +-import "fmt" +- +-func Foo() { //@mcptool("go_context", `{"file":"$WORKDIR/f/main.go"}`, output=withoutStdLib) +- fmt.Println("foo") +-} +- +--- @withoutStdLib -- +-Current package "example.com/f" (package main): +- +-main.go (current file): +-```go +-package main +- +-import "fmt" +- +-func Foo() +- +-``` +- +-Current file "main.go" contains this import declaration: +-```go +-import "fmt" +-``` +- +diff -urN a/gopls/internal/test/marker/testdata/mcptools/file_context.txt b/gopls/internal/test/marker/testdata/mcptools/file_context.txt +--- a/gopls/internal/test/marker/testdata/mcptools/file_context.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/mcptools/file_context.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,43 +0,0 @@ +-This test exercises the "go_file_context" MCP tool. +- +--- flags -- +--mcp +--ignore_extra_diags +- +--- go.mod -- +-module example.com +- +--- a/main.go -- +-package main +- +-import "example.com/a/other" +- +-func main() { //@mcptool("go_file_context", `{"file": "$WORKDIR/a/main.go"}`, output=content) +- other.Foo() +- _ = other.Bar +-} +- +--- a/other/other.go -- +-package other +- +-// Foo should have a doc comment. +-func Foo() { +- // The body should be ignored +-} +- +-var Bar int // line comments get dropped +- +-var Baz string // Baz is not referenced +- +--- @content -- +-File `$WORKDIR/a/main.go` is in package "example.com/a". +-Below is a summary of the APIs it uses from other files. +-To read the full API of any package, use go_package_api. +-Referenced declarations from $WORKDIR/a/other/other.go (package "example.com/a/other"): +-```go +-// Foo should have a doc comment. +-func Foo() +- +-var Bar int +-``` +- +diff -urN a/gopls/internal/test/marker/testdata/mcptools/file_diagnostics.txt b/gopls/internal/test/marker/testdata/mcptools/file_diagnostics.txt +--- a/gopls/internal/test/marker/testdata/mcptools/file_diagnostics.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/mcptools/file_diagnostics.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,89 +0,0 @@ +-This test exercises the "go_file_diagnostics" MCP tool. +- +--- flags -- +--mcp +- +--- settings.json -- +-{ +- "mcpTools": { +- "go_file_diagnostics": true +- } +-} +- +--- go.mod -- +-module example.com +- +--- a/main.go -- +-package main +- +-func foo() {} //@loc(foo, "foo") +- +-//@mcptool("go_file_diagnostics", `{"file":"$WORKDIR/a/main.go"}`, output=unused) +-//@diag(foo, re"unused") +--- @unused -- +-2:5-2:8: [Information] function "foo" is unused +-Fix: +---- $WORKDIR/a/main.go +-+++ $WORKDIR/a/main.go +-@@ -1,6 +1,6 @@ +- package main +- +--func foo() {} //@loc(foo, "foo") +-+ //@loc(foo, "foo") +- +- //@mcptool("go_file_diagnostics", `{"file":"$WORKDIR/a/main.go"}`, output=unused) +- //@diag(foo, re"unused") +- +- +--- b/main.go -- +-package main +- +-func _() { +- _ = deprecated([]string{"a"}, "a") //@loc(inline, "deprecated") +- +- _ = deprecated([]string{"a"}, "a") //@loc(inline2, "deprecated") +-} +- +-//go:fix inline +-func deprecated(slice []string, s string) bool { +- return proposed(slice, s, true) +-} +- +-func proposed(_ []string, _ string, _ bool) bool { +- return false // fake +-} +- +-//@mcptool("go_file_diagnostics", `{"file":"$WORKDIR/b/main.go"}`, output=diagnoseInline) +-//@diag(inline, re"inline") +-//@diag(inline2, re"inline") +--- @diagnoseInline -- +-3:5-3:35: [Hint] Call of main.deprecated should be inlined +-Fix: +---- $WORKDIR/b/main.go +-+++ $WORKDIR/b/main.go +-@@ -1,7 +1,7 @@ +- package main +- +- func _() { +-- _ = deprecated([]string{"a"}, "a") //@loc(inline, "deprecated") +-+ _ = proposed([]string{"a"}, "a", true) //@loc(inline, "deprecated") +- +- _ = deprecated([]string{"a"}, "a") //@loc(inline2, "deprecated") +- } +- +- +-5:5-5:35: [Hint] Call of main.deprecated should be inlined +-Fix: +---- $WORKDIR/b/main.go +-+++ $WORKDIR/b/main.go +-@@ -3,7 +3,7 @@ +- func _() { +- _ = deprecated([]string{"a"}, "a") //@loc(inline, "deprecated") +- +-- _ = deprecated([]string{"a"}, "a") //@loc(inline2, "deprecated") +-+ _ = proposed([]string{"a"}, "a", true) //@loc(inline2, "deprecated") +- } +- +- //go:fix inline +- +- +diff -urN a/gopls/internal/test/marker/testdata/mcptools/file_metadata.txt b/gopls/internal/test/marker/testdata/mcptools/file_metadata.txt +--- a/gopls/internal/test/marker/testdata/mcptools/file_metadata.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/mcptools/file_metadata.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +-This test exercises the "go_file_metadata" MCP tool. +- +--- flags -- +--mcp +- +--- settings.json -- +-{ +- "mcpTools": { +- "go_file_metadata": true +- } +-} +--- go.mod -- +-module example.com/cmd +- +-go 1.21 +- +--- main.go -- +-package main +- +-//@mcptool("go_file_metadata", `{"file":"$WORKDIR/main.go"}`, output=metadata) +- +-func main() { +- +-} +--- @metadata -- +-File `$WORKDIR/main.go` is in package "example.com/cmd", which has the following files: +- $WORKDIR/main.go +diff -urN a/gopls/internal/test/marker/testdata/mcptools/package_api.txt b/gopls/internal/test/marker/testdata/mcptools/package_api.txt +--- a/gopls/internal/test/marker/testdata/mcptools/package_api.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/mcptools/package_api.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,49 +0,0 @@ +-This test exercises the "go_package_api" MCP tool. +- +--- flags -- +--mcp +- +--- go.mod -- +-module example.com/mod +- +-//@mcptool("go_package_api", `{"packagePaths":["example.com/mod/lib"]}`, output=outline) +- +-go 1.21 +- +--- main.go -- +-package main +- +-import "example.com/mod/lib" +- +-func main() { +- println(lib.Foo(0)) +-} +--- lib/lib.go -- +-package lib +- +-type T int +- +-func Foo(int) string { +- return "" +-} +- +--- lib/lib_test.go -- +-package lib +- +-import "testing" +- +-func Test(*testing.T) { +-} +- +--- @outline -- +-"example.com/mod/lib" (package lib) +-lib.go: +-```go +-package lib +- +-type T int +- +-func Foo(int) string +- +-``` +- +diff -urN a/gopls/internal/test/marker/testdata/mcptools/references.txt b/gopls/internal/test/marker/testdata/mcptools/references.txt +--- a/gopls/internal/test/marker/testdata/mcptools/references.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/mcptools/references.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,50 +0,0 @@ +-This test exercises the "go_references" MCP tool. +- +--- flags -- +--mcp +--ignore_extra_diags +- +--- go.mod -- +-module example.com +- +--- settings.json -- +-{ +- "mcpTools": { +- "go_references": true +- } +-} +- +--- a/a.go -- +-package a +- +-func Foo() {} //@loc(Foo, "Foo") +- +-func callFoo() { +- Foo() +-} +- +--- b/b.go -- +-package b +- +-import "example.com/a" +- +-func callFoo() { +- a.Foo() +-} +- +-//@mcptool("go_references", `{}`, location=Foo, output=threeref) +- +--- @threeref -- +-The object has 3 references. Their locations are listed below +-Reference 1 +-Located in the file: $WORKDIR/a/a.go +-The reference is located on line 2, which has content `func Foo() {} //@loc(Foo, "Foo")` +- +-Reference 2 +-Located in the file: $WORKDIR/a/a.go +-The reference is located on line 5, which has content `Foo()` +- +-Reference 3 +-Located in the file: $WORKDIR/b/b.go +-The reference is located on line 5, which has content `a.Foo()` +- +diff -urN a/gopls/internal/test/marker/testdata/mcptools/search.txt b/gopls/internal/test/marker/testdata/mcptools/search.txt +--- a/gopls/internal/test/marker/testdata/mcptools/search.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/mcptools/search.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,66 +0,0 @@ +-This test exercises the "go_search" MCP tool. +- +--- flags -- +--mcp +--ignore_extra_diags +- +--- go.mod -- +-module example.com +- +-go 1.21 +- +-//@mcptool("go_search", `{"query": "foo"}`, output=foo) +-//@mcptool("go_search", `{"query": "bar"}`, output=bar) +-//@mcptool("go_search", `{"query": "baz"}`, output=baz) +- +--- main.go -- +-package main +- +-import ( +- "example.com/a" +- "example.com/b" +-) +- +-func FOO() {} +- +-type baz int +- +-func main() { +- println(a.Foo + b.Ar) +-} +--- a/a.go -- +-package a +- +-const ( +- Foo = 1 +- Bar = 2 +-) +- +-type B struct { +- ar string +-} +- +--- b/b.go -- +-package b +- +-const ( +- aZ = 1 +- Ar = 2 +-) +- +-var fOo = 3 +- +--- @foo -- +-Top symbol matches: +- FOO (Function in `$WORKDIR/main.go`) +- Foo (Constant in `$WORKDIR/a/a.go`) +- fOo (Variable in `$WORKDIR/b/b.go`) +--- @bar -- +-Top symbol matches: +- Bar (Constant in `$WORKDIR/a/a.go`) +- b.Ar (Constant in `$WORKDIR/b/b.go`) +- B.ar (Field in `$WORKDIR/a/a.go`) +--- @baz -- +-Top symbol matches: +- baz (Type in `$WORKDIR/main.go`) +- b.aZ (Constant in `$WORKDIR/b/b.go`) +diff -urN a/gopls/internal/test/marker/testdata/mcptools/symbol_references.txt b/gopls/internal/test/marker/testdata/mcptools/symbol_references.txt +--- a/gopls/internal/test/marker/testdata/mcptools/symbol_references.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/mcptools/symbol_references.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,58 +0,0 @@ +-This test exercises the "go_symbol_references" MCP tool. +- +--- flags -- +--mcp +--ignore_extra_diags +- +--- go.mod -- +-module example.com +- +-// TODO(rfindley): add error assertions here. +- +-//@mcptool("go_symbol_references", `{"file":"$WORKDIR/a/a.go","symbol":"Foo"}`, output=aFoo) +-//@mcptool("go_symbol_references", `{"file":"$WORKDIR/b/b.go","symbol":"a.Foo"}`, output=aFoo) +-//@mcptool("go_symbol_references", `{"file":"$WORKDIR/a/a.go","symbol":"T.Bar"}`, output=aBar) +-//@mcptool("go_symbol_references", `{"file":"$WORKDIR/b/b.go","symbol":"a.T.Bar"}`, output=aBar) +- +--- a/a.go -- +-package a +- +-func Foo() {} +- +-func callFoo() { +- Foo() +-} +- +-type T int +- +-func (T) Bar() {} +- +--- b/b.go -- +-package b +- +-import "example.com/a" +- +-func callFoo() { +- a.Foo() +-} +- +--- @aFoo -- +-The object has 3 references. Their locations are listed below +-Reference 1 +-Located in the file: $WORKDIR/a/a.go +-The reference is located on line 2, which has content `func Foo() {}` +- +-Reference 2 +-Located in the file: $WORKDIR/a/a.go +-The reference is located on line 5, which has content `Foo()` +- +-Reference 3 +-Located in the file: $WORKDIR/b/b.go +-The reference is located on line 5, which has content `a.Foo()` +- +--- @aBar -- +-The object has 1 references. Their locations are listed below +-Reference 1 +-Located in the file: $WORKDIR/a/a.go +-The reference is located on line 10, which has content `func (T) Bar() {}` +- +diff -urN a/gopls/internal/test/marker/testdata/mcptools/workspace_diagnostics_empty.txt b/gopls/internal/test/marker/testdata/mcptools/workspace_diagnostics_empty.txt +--- a/gopls/internal/test/marker/testdata/mcptools/workspace_diagnostics_empty.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/mcptools/workspace_diagnostics_empty.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,23 +0,0 @@ +-This test exercises the "go_diagnostics" MCP tool. +- +--- flags -- +--mcp +- +--- go.mod -- +-module example.com +- +-go 1.21 +- +-//@mcptool("go_diagnostics", `{"files":["$WORKDIR/main.go"]}`, output=diagnostics) +-//@mcptool("go_diagnostics", `{"files":[]}`, output=diagnostics) +-//@mcptool("go_diagnostics", `{}`, output=diagnostics) +- +--- main.go -- +-package main +- +-func main() { +- println("Hello world") +-} +- +--- @diagnostics -- +-No diagnostics. +diff -urN a/gopls/internal/test/marker/testdata/mcptools/workspace_diagnostics.txt b/gopls/internal/test/marker/testdata/mcptools/workspace_diagnostics.txt +--- a/gopls/internal/test/marker/testdata/mcptools/workspace_diagnostics.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/mcptools/workspace_diagnostics.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,52 +0,0 @@ +-This test exercises the "go_diagnostics" MCP tool. +- +--- flags -- +--mcp +--ignore_extra_diags +- +--- go.mod -- +-module example.com +- +-go 1.21 +- +-//@mcptool("go_diagnostics", `{"files":["$WORKDIR/a/a.go"]}`, output=diagnostics) +-//@mcptool("go_diagnostics", `{"files":["$WORKDIR/b/b.go"]}`, output=diagnostics) +-//@mcptool("go_diagnostics", `{"files":["$WORKDIR/main.go"]}`, output=diagnostics) +- +--- main.go -- +-package main +- +-import ( +- "example.com/a" +- "example.com/b" +-) +- +-func main() int { +- a.Print(b.B) +- return 0 +-} +- +--- a/a.go -- +-package a +- +-func Print(x string) { +- println(x) +-} +- +--- b/b.go -- +-package b +- +-const B = 1 +- +--- b/b2.go -- +- +-const B = 2 +- +--- @diagnostics -- +-File `$WORKDIR/b/b2.go` has the following diagnostics: +-1:0-1:0: [Error] expected 'package', found 'const' +- +-File `$WORKDIR/main.go` has the following diagnostics: +-7:5-7:9: [Error] func main must have no arguments and no return values +-8:9-8:12: [Error] cannot use b.B (untyped int constant 1) as string value in argument to a.Print +- +diff -urN a/gopls/internal/test/marker/testdata/mcptools/workspace.txt b/gopls/internal/test/marker/testdata/mcptools/workspace.txt +--- a/gopls/internal/test/marker/testdata/mcptools/workspace.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/mcptools/workspace.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,37 +0,0 @@ +-This test exercises the "go_workspace" MCP tool. +- +--- flags -- +--mcp +--min_go_command=go1.23 +- +--- go.work -- +-go 1.23.0 +- +-//@mcptool("go_workspace", `{}`, output=workspace) +- +-use ( +- ./a +- ./b +-) +- +--- a/go.mod -- +-module example.com/a +- +-go 1.23.0 +- +--- a/a.go -- +-package a +- +--- b/go.mod -- +-module example.com/b +- +-go 1.23.0 +- +--- b/b.go -- +-package b +- +--- @workspace -- +-The `$WORKDIR` directory is in the go workspace defined by `$WORKDIR/go.work`, with the following main modules: +- $WORKDIR/a/go.mod (module example.com/a) +- $WORKDIR/b/go.mod (module example.com/b) +- +diff -urN a/gopls/internal/test/marker/testdata/modfile/godebug_bad.txt b/gopls/internal/test/marker/testdata/modfile/godebug_bad.txt +--- a/gopls/internal/test/marker/testdata/modfile/godebug_bad.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/modfile/godebug_bad.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-This test checks that we surface the error for unexpected godebug values. +- +-TODO(golang/go#67623): the diagnostic should be on the bad godebug value. +- +--- flags -- +--min_go_command=go1.23 +--errors_ok +- +--- go.mod -- +-module example.com/m //@diag("module", re`unknown godebug "gotypealias"`) +- +-go 1.23 +- +-godebug ( +- gotypealias=0 // misspelled +-) +-godebug gotypesalias=1 +diff -urN a/gopls/internal/test/marker/testdata/modfile/godebug.txt b/gopls/internal/test/marker/testdata/modfile/godebug.txt +--- a/gopls/internal/test/marker/testdata/modfile/godebug.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/modfile/godebug.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,43 +0,0 @@ +-This test basic gopls functionality in a workspace with a godebug +-directive in its modfile. +- +--- flags -- +--min_go_command=go1.23 +- +--- go.mod -- +-module example.com/m +- +-go 1.23 +- +-godebug ( +- gotypesalias=0 +-) +-godebug gotypesalias=1 +- +--- a/a.go -- +-package a +- +-import "example.com/m/b" +- +-const A = b.B //@def("B", B) +- +--- b/b.go -- +-package b +- +-const B = 42 //@loc(B, "B") +- +--- format/go.mod -- +-module example.com/m/format //@format(formatted) +- +-godebug ( +-gotypesalias=0 +-) +-godebug gotypesalias=1 +--- @formatted -- +-module example.com/m/format //@format(formatted) +- +-godebug ( +- gotypesalias=0 +-) +- +-godebug gotypesalias=1 +diff -urN a/gopls/internal/test/marker/testdata/quickfix/embeddirective.txt b/gopls/internal/test/marker/testdata/quickfix/embeddirective.txt +--- a/gopls/internal/test/marker/testdata/quickfix/embeddirective.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/embeddirective.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,22 +0,0 @@ +-This test checks the quick fix to add a missing "embed" import. +- +--- embed.txt -- +-text +--- fix_import.go -- +-package embeddirective +- +-import ( +- "io" +- "os" +-) +- +-//go:embed embed.txt //@quickfix("//go:embed", re`must import "embed"`, fix_import) +-var T string +- +-func _() { +- _ = os.Stdin +- _ = io.EOF +-} +--- @fix_import/fix_import.go -- +-@@ -4 +4 @@ +-+ _ "embed" +diff -urN a/gopls/internal/test/marker/testdata/quickfix/infertypeargs.txt b/gopls/internal/test/marker/testdata/quickfix/infertypeargs.txt +--- a/gopls/internal/test/marker/testdata/quickfix/infertypeargs.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/infertypeargs.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,25 +0,0 @@ +-This test verifies the infertypeargs refactoring. +- +--- go.mod -- +-module mod.test/infertypeargs +- +-go 1.18 +- +--- p.go -- +-package infertypeargs +- +-func app[S interface{ ~[]E }, E any](s S, e E) S { +- return append(s, e) +-} +- +-func _() { +- _ = app[[]int] +- _ = app[[]int, int] +- _ = app[[]int]([]int{}, 0) //@quickfix("[[]int]", re"unnecessary type arguments", infer) +- _ = app([]int{}, 0) +-} +- +--- @infer/p.go -- +-@@ -10 +10 @@ +-- _ = app[[]int]([]int{}, 0) //@quickfix("[[]int]", re"unnecessary type arguments", infer) +-+ _ = app([]int{}, 0) //@quickfix("[[]int]", re"unnecessary type arguments", infer) +diff -urN a/gopls/internal/test/marker/testdata/quickfix/issue65024.txt b/gopls/internal/test/marker/testdata/quickfix/issue65024.txt +--- a/gopls/internal/test/marker/testdata/quickfix/issue65024.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/issue65024.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,78 +0,0 @@ +-Regression example.com for #65024, "incorrect package qualification when +-stubbing method in v2 module". +- +-The second test (a-a) ensures that we don't use path-based heuristics +-to guess the PkgName of an import. +- +--- a/v2/go.mod -- +-module example.com/a/v2 +-go 1.18 +- +--- a/v2/a.go -- +-package a +- +-type I interface { F() T } +- +-type T struct {} +- +--- a/v2/b/b.go -- +-package b +- +-import "example.com/a/v2" +- +-type B struct{} +- +-var _ a.I = &B{} //@ quickfix("&B{}", re"does not implement", out) +- +-// This line makes the diff tidier. +- +--- @out/a/v2/b/b.go -- +-@@ -7 +7,5 @@ +-+// F implements [a.I]. +-+func (b *B) F() a.T { +-+ panic("unimplemented") +-+} +-+ +-@@ -10 +15 @@ +-- +--- a-a/v2/go.mod -- +-// This module has a hyphenated name--how posh. +-// It won't do to use it as an identifier. +-// The correct name is the one in the package decl, +-// which in this case is not what the path heuristic would guess. +-module example.com/a-a/v2 +-go 1.18 +- +--- a-a/v2/a.go -- +-package a +-type I interface { F() T } +-type T struct {} +- +--- a-a/v2/b/b.go -- +-package b +- +-// Note: no existing import of a. +- +-type B struct{} +- +-var _ I = &B{} //@ quickfix("&B{}", re"does not implement", out2) +- +-// This line makes the diff tidier. +- +--- a-a/v2/b/import-a-I.go -- +-package b +-import "example.com/a-a/v2" +-type I = a.I +- +--- @out2/a-a/v2/b/b.go -- +-@@ -3 +3,2 @@ +-+import a "example.com/a-a/v2" +-+ +-@@ -7 +9,5 @@ +-+// F implements [a.I]. +-+func (b *B) F() a.T { +-+ panic("unimplemented") +-+} +-+ +-@@ -10 +17 @@ +-- +diff -urN a/gopls/internal/test/marker/testdata/quickfix/noresultvalues.txt b/gopls/internal/test/marker/testdata/quickfix/noresultvalues.txt +--- a/gopls/internal/test/marker/testdata/quickfix/noresultvalues.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/noresultvalues.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,18 +0,0 @@ +-This test checks the quick fix for removing extra return values. +- +-Note: gopls should really discard unnecessary return statements. +- +--- noresultvalues.go -- +-package typeerrors +- +-func x() { return nil } //@quickfix("nil", re"too many return", x) +- +-func y() { return nil, "hello" } //@quickfix("nil", re"too many return", y) +--- @x/noresultvalues.go -- +-@@ -3 +3 @@ +--func x() { return nil } //@quickfix("nil", re"too many return", x) +-+func x() { return } //@quickfix("nil", re"too many return", x) +--- @y/noresultvalues.go -- +-@@ -5 +5 @@ +--func y() { return nil, "hello" } //@quickfix("nil", re"too many return", y) +-+func y() { return } //@quickfix("nil", re"too many return", y) +diff -urN a/gopls/internal/test/marker/testdata/quickfix/self_assignment.txt b/gopls/internal/test/marker/testdata/quickfix/self_assignment.txt +--- a/gopls/internal/test/marker/testdata/quickfix/self_assignment.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/self_assignment.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,19 +0,0 @@ +-Test of the suggested fix to remove unnecessary assignments. +- +--- a.go -- +-package quickfix +- +-import ( +- "log" +-) +- +-func _() { +- s := "hiiiiiii" +- s = s //@quickfix("s = s", re"self-assignment", fix) +- log.Print(s) +-} +- +--- @fix/a.go -- +-@@ -9 +9 @@ +-- s = s //@quickfix("s = s", re"self-assignment", fix) +-+ //@quickfix("s = s", re"self-assignment", fix) +diff -urN a/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic_resolve.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic_resolve.txt +--- a/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic_resolve.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic_resolve.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-This test exercises basic 'stub methods' functionality, with resolve support. +-See basic.txt for the same test without resolve support. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-type C int +- +-var _ error = C(0) //@quickfix(re"C.0.", re"missing method Error", stub) +--- @stub/a/a.go -- +-@@ -5 +5,5 @@ +-+// Error implements [error]. +-+func (c C) Error() string { +-+ panic("unimplemented") +-+} +-+ +diff -urN a/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic.txt +--- a/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-This test exercises basic 'stub methods' functionality. +-See basic_resolve.txt for the same test with resolve support. +- +--- capabilities.json -- +-{ +- "textDocument": { +- "codeAction": { +- "dataSupport": false, +- "resolveSupport": {} +- } +- } +-} +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-type C int +- +-var _ error = C(0) //@quickfix(re"C.0.", re"missing method Error", stub) +--- @stub/a/a.go -- +-@@ -5 +5,5 @@ +-+// Error implements [error]. +-+func (c C) Error() string { +-+ panic("unimplemented") +-+} +-+ +diff -urN a/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_basic.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_basic.txt +--- a/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_basic.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_basic.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,68 +0,0 @@ +-This test checks the basic of 'Declare missing method T.f' quick fix. +- +--- basic_stub.go -- +-package fromcallbasic +- +-type Basic struct{} +- +-func basic() { +- i := 1 +- b := Basic{} +- f(b.basic(i)) //@quickfix("basic", re"has no field or method", basic) +-} +- +-func f(i int) string { return "s" } +--- @basic/basic_stub.go -- +-@@ -5 +5,4 @@ +-+func (b Basic) basic(i int) int { +-+ panic("unimplemented") +-+} +-+ +--- pointer.go -- +-package fromcallbasic +- +-type P struct{} +- +-func recv_param_pointer() { +- p := &P{} +- i := 42 +- p.pointer(&i) //@quickfix("pointer", re"has no field or method", pointer) +-} +--- @pointer/pointer.go -- +-@@ -5 +5,4 @@ +-+func (p *P) pointer(i *int) { +-+ panic("unimplemented") +-+} +-+ +--- other.go -- +-package fromcallbasic +- +-type TypeDeclInOtherFile struct{} +- +--- this.go -- +-package fromcallbasic +- +-func fun() { +- i := 1 +- t := TypeDeclInOtherFile{} +- t.other(i) //@quickfix("other", re"has no field or method", del_other) +-} +--- @del_other/other.go -- +-@@ -5 +5,3 @@ +-+func (t TypeDeclInOtherFile) other(i int) { +-+ panic("unimplemented") +-+} +--- should_insert_after.go -- +-package fromcallbasic +- +-type HasMethod struct{} +- +-func (h *HasMethod) m() { +- h.should_insert_after() //@quickfix("should_insert_after", re"has no field or method", insert) +-} +--- @insert/should_insert_after.go -- +-@@ -8 +8,4 @@ +-+ +-+func (h *HasMethod) should_insert_after() { +-+ panic("unimplemented") +-+} +diff -urN a/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_params.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_params.txt +--- a/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_params.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_params.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,84 +0,0 @@ +-This test checks the param name and type of the generated missing method based on CallExpr. +- +--- basic_stub.go -- +-package fromcallparams +- +-type A struct{} +- +-func untypedParams() { +- a := A{} +- a.untyped("s", 42, 4.12, make(map[string]int), []int{1}, [1]int{1}, make(chan string)) //@quickfix("untyped", re"has no field or method", basic) +-} +--- @basic/basic_stub.go -- +-@@ -5 +5,4 @@ +-+func (a A) untyped(s string, i int, f float64, m map[string]int, param5 []int, param6 [1]int, ch chan string) { +-+ panic("unimplemented") +-+} +-+ +--- nonexistent_type.go -- +-package fromcallparams +- +-type B struct{} +- +-func invalidBasicKindParam() { +- b := B{} +- b.basicKind(NonExistentType{}) //@quickfix("basicKind", re"has no field or method", nonexistent),diag(re"NonExistentType",re"undefined: NonExistentType") +-} +--- @nonexistent/nonexistent_type.go -- +-@@ -5 +5,4 @@ +-+func (b B) basicKind(param any) { +-+ panic("unimplemented") +-+} +-+ +--- pass_param_by_ident.go -- +-package fromcallparams +- +-type C struct{} +- +-func passParamByIdent() { +- c := C{} +- stringVar := "some string" +- intVar := 1 +- sliceVar := []int{1} +- c.ident(stringVar, intVar, sliceVar) //@quickfix("ident", re"has no field or method", ident) +-} +--- @ident/pass_param_by_ident.go -- +-@@ -5 +5,4 @@ +-+func (c C) ident(stringVar string, intVar int, sliceVar []int) { +-+ panic("unimplemented") +-+} +-+ +--- tail_param_name.go -- +-package fromcallparams +- +-type Tail struct{} +- +-type TypeWithLongName struct{} +- +-func TailParamName() { +- t := Tail{} +- t.longName(TypeWithLongName{}) //@quickfix("longName", re"has no field or method", trail) +-} +--- @trail/tail_param_name.go -- +-@@ -5 +5,4 @@ +-+func (t Tail) longName(name TypeWithLongName) { +-+ panic("unimplemented") +-+} +-+ +--- selector_param_name.go -- +-package fromcallparams +- +-import "net/http" +- +-type Select struct{} +- +-func selectExpr() { +- s := Select{} +- s.sel(http.ErrNotMultipart) //@quickfix("sel", re"has no field or method", select) +-} +--- @select/selector_param_name.go -- +-@@ -7 +7,4 @@ +-+func (s Select) sel(multipart *http.ProtocolError) { +-+ panic("unimplemented") +-+} +-+ +diff -urN a/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_returns.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_returns.txt +--- a/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_returns.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_returns.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,278 +0,0 @@ +-This test checks the return type of the generated missing method based on CallExpr. +- +--- param.go -- +-package fromcallreturns +- +-type A struct{} +- +-func inferFromParam() { +- a := A{} +- f(a.as_param()) //@quickfix("as_param", re"has no field or method", infer_param) +-} +- +-func f(i int) {} +--- @infer_param/param.go -- +-@@ -5 +5,4 @@ +-+func (a A) as_param() int { +-+ panic("unimplemented") +-+} +-+ +--- assign.go -- +-package fromcallreturns +- +-type Assign struct{} +- +-func inferReturnfromAssign() { +- var assign int //@diag("assign",re"not used") +- a := Assign{} +- assign = a.as_assign() //@quickfix("as_assign", re"has no field or method", infer_assign) +-} +--- @infer_assign/assign.go -- +-@@ -5 +5,4 @@ +-+func (a Assign) as_assign() int { +-+ panic("unimplemented") +-+} +-+ +--- multiple_assign.go -- +-package fromcallreturns +- +-type MultiAssign struct{} +- +-func inferReturnfromMultipleAssign() { +- var assign1 int //@diag("assign1",re"not used") +- var assign2 int //@diag("assign2",re"not used") +- m := MultiAssign{} +- assign1, assign2 = m.multi_assign() //@quickfix("multi_assign", re"has no field or method", infer_multiple_assign) +-} +--- @infer_multiple_assign/multiple_assign.go -- +-@@ -5 +5,4 @@ +-+func (m MultiAssign) multi_assign() (int, int) { +-+ panic("unimplemented") +-+} +-+ +--- multiple_return_in_param.go -- +-package fromcallreturns +- +-type MultiReturn struct{} +- +-func inferMultipleReturnInParam() { +- m := MultiReturn{} +- m.param_has_multi_return(multiReturn()) //@quickfix("param_has_multi_return", re"has no field or method", multiple_return) +-} +- +-func multiReturn() (int, int) { +- return 1, 1 +-} +--- @multiple_return/multiple_return_in_param.go -- +-@@ -5 +5,4 @@ +-+func (m MultiReturn) param_has_multi_return(i int, param2 int) { +-+ panic("unimplemented") +-+} +-+ +--- error_nodes.go -- +-package fromcallreturns +- +-type E struct{} +- +-func all_error() { +- e := E{} +- errorFunc(e.errors(undefined1(), undefined2(), undefined3{})) //@quickfix("errors", re"has no field or method", all_error),diag("undefined1",re"undefined"),diag("undefined2",re"undefined"),diag("undefined3",re"undefined") +-} +-func errorFunc(u undefined4) {} //@diag("undefined4",re"undefined") +--- @all_error/error_nodes.go -- +-@@ -5 +5,4 @@ +-+func (e E) errors(param any, param2 any, param3 any) any { +-+ panic("unimplemented") +-+} +-+ +--- paren.go -- +-package fromcallreturns +- +-type Paren struct{} +- +-func paren() { +- p := Paren{} +- fn()((p.surroundingParen())) //@quickfix("surroundingParen", re"has no field or method", surrounding_paren) +-} +- +-func fn() func(i int) { +- return func(i int) {} +-} +--- @surrounding_paren/paren.go -- +-@@ -5 +5,4 @@ +-+func (p Paren) surroundingParen() int { +-+ panic("unimplemented") +-+} +-+ +--- if_stmt.go -- +-package fromcallreturns +- +-type IfStruct struct{} +- +-func testIfStmt() { +- i := IfStruct{} +- if i.isValid() { //@quickfix("isValid", re"has no field or method", infer_if_stmt) +- // do something +- } +-} +--- @infer_if_stmt/if_stmt.go -- +-@@ -5 +5,4 @@ +-+func (i IfStruct) isValid() bool { +-+ panic("unimplemented") +-+} +-+ +--- for_stmt.go -- +-package fromcallreturns +- +-type ForStruct struct{} +- +-func testForStmt() { +- f := ForStruct{} +- for f.hasNext() { //@quickfix("hasNext", re"has no field or method", infer_for_stmt1) +- // do something +- } +- for i := 0; f.inside(); i++ { //@quickfix("inside", re"has no field or method", infer_for_stmt2) +- // do something +- } +-} +--- @infer_for_stmt1/for_stmt.go -- +-@@ -5 +5,4 @@ +-+func (f ForStruct) hasNext() bool { +-+ panic("unimplemented") +-+} +-+ +--- @infer_for_stmt2/for_stmt.go -- +-@@ -5 +5,4 @@ +-+func (f ForStruct) inside() bool { +-+ panic("unimplemented") +-+} +-+ +--- unary.go -- +-package fromcallreturns +- +-type Unary struct{} +- +-func testUnaryExpr() { +- u := Unary{} +- a, b, c, d := !u.Boolean(), -u.Minus(), +u.Plus(), ^u.Xor() //@quickfix("Boolean", re"has no field or method", infer_unary_expr1),quickfix("Minus", re"has no field or method", infer_unary_expr2),quickfix("Plus", re"has no field or method", infer_unary_expr3),quickfix("Xor", re"has no field or method", infer_unary_expr4) +- _, _, _, _ = a, b, c, d +-} +--- @infer_unary_expr1/unary.go -- +-@@ -5 +5,4 @@ +-+func (u Unary) Boolean() bool { +-+ panic("unimplemented") +-+} +-+ +--- @infer_unary_expr2/unary.go -- +-@@ -5 +5,4 @@ +-+func (u Unary) Minus() int { +-+ panic("unimplemented") +-+} +-+ +--- @infer_unary_expr3/unary.go -- +-@@ -5 +5,4 @@ +-+func (u Unary) Plus() int { +-+ panic("unimplemented") +-+} +-+ +--- @infer_unary_expr4/unary.go -- +-@@ -5 +5,4 @@ +-+func (u Unary) Xor() int { +-+ panic("unimplemented") +-+} +-+ +--- binary.go -- +-package fromcallreturns +- +-type Binary struct{} +- +-func testBinaryExpr() { +- b := Binary{} +- _ = 1 + b.Num() //@quickfix("Num", re"has no field or method", infer_binary_expr1) +- _ = "s" + b.Str() //@quickfix("Str", re"has no field or method", infer_binary_expr2) +-} +--- @infer_binary_expr1/binary.go -- +-@@ -5 +5,4 @@ +-+func (b Binary) Num() int { +-+ panic("unimplemented") +-+} +-+ +--- @infer_binary_expr2/binary.go -- +-@@ -5 +5,4 @@ +-+func (b Binary) Str() string { +-+ panic("unimplemented") +-+} +-+ +--- value.go -- +-package fromcallreturns +- +-type Value struct{} +- +-func v() { +- v := Value{} +- var a, b int = v.Multi() //@quickfix("Multi", re"has no field or method", infer_value_expr1) +- var c, d int = 4, v.Single() //@quickfix("Single", re"has no field or method", infer_value_expr2) +- _, _, _, _ = a, b, c, d +-} +--- @infer_value_expr1/value.go -- +-@@ -5 +5,4 @@ +-+func (v Value) Multi() (int, int) { +-+ panic("unimplemented") +-+} +-+ +--- @infer_value_expr2/value.go -- +-@@ -5 +5,4 @@ +-+func (v Value) Single() int { +-+ panic("unimplemented") +-+} +-+ +--- return.go -- +-package fromcallreturns +- +-type Return struct{} +- +-func r() { +- r := Return{} +- _ = func() (int, int) { +- return r.Multi() //@quickfix("Multi", re"has no field or method", infer_retrun_expr1) +- } +- _ = func() string { +- return r.Single() //@quickfix("Single", re"has no field or method", infer_retrun_expr2) +- } +-} +--- @infer_retrun_expr1/return.go -- +-@@ -5 +5,4 @@ +-+func (r Return) Multi() (int, int) { +-+ panic("unimplemented") +-+} +-+ +--- @infer_retrun_expr2/return.go -- +-@@ -5 +5,4 @@ +-+func (r Return) Single() string { +-+ panic("unimplemented") +-+} +-+ +--- successive_return.go -- +-package fromcallreturns +- +-type R struct{} +- +-func _() (x int, y, z string, k int64) { +- r := R{} +- _ = func() (a, b float32, c int) { +- return r.Multi() //@quickfix("Multi", re"has no field or method", successive1) +- } +- return 3, "", r.Single(), 6 //@quickfix("Single", re"has no field or method", successive2) +-} +--- @successive1/successive_return.go -- +-@@ -5 +5,4 @@ +-+func (r R) Multi() (float32, float32, int) { +-+ panic("unimplemented") +-+} +-+ +--- @successive2/successive_return.go -- +-@@ -5 +5,4 @@ +-+func (r R) Single() string { +-+ panic("unimplemented") +-+} +-+ +diff -urN a/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61693.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61693.txt +--- a/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61693.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61693.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,26 +0,0 @@ +-This test exercises stub methods functionality with variadic parameters. +- +-In golang/go#61693 stubmethods was panicking in this case. +- +--- go.mod -- +-module mod.com +- +-go 1.18 +--- main.go -- +-package main +- +-type C int +- +-func F(err ...error) {} +- +-func _() { +- var x error +- F(x, C(0)) //@quickfix(re"C.0.", re"missing method Error", stub) +-} +--- @stub/main.go -- +-@@ -5 +5,5 @@ +-+// Error implements [error]. +-+func (c C) Error() string { +-+ panic("unimplemented") +-+} +-+ +diff -urN a/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61830.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61830.txt +--- a/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61830.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61830.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-This test verifies that method stubbing qualifies types relative to the current +-package. +- +--- p.go -- +-package p +- +-import "io" +- +-type B struct{} +- +-type I interface { +- M(io.Reader, B) +-} +- +-type A struct{} +- +-var _ I = &A{} //@quickfix(re"&A..", re"missing method M", stub) +--- @stub/p.go -- +-@@ -13 +13,5 @@ +-+// M implements [I]. +-+func (a *A) M(io.Reader, B) { +-+ panic("unimplemented") +-+} +-+ +diff -urN a/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64078.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64078.txt +--- a/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64078.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64078.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-This test verifies that the named receiver is generated. +- +--- p.go -- +-package p +- +-type A struct{} +- +-func (aa *A) M1() { +- panic("unimplemented") +-} +- +-type I interface { +- M1() +- M2(aa string) +- M3(bb string) +- M4() (aa string) +-} +- +-var _ I = &A{} //@quickfix(re"&A..", re"missing method M", stub) +--- @stub/p.go -- +-@@ -5 +5,15 @@ +-+// M2 implements [I]. +-+func (*A) M2(aa string) { +-+ panic("unimplemented") +-+} +-+ +-+// M3 implements [I]. +-+func (aa *A) M3(bb string) { +-+ panic("unimplemented") +-+} +-+ +-+// M4 implements [I]. +-+func (*A) M4() (aa string) { +-+ panic("unimplemented") +-+} +-+ +diff -urN a/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64114.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64114.txt +--- a/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64114.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64114.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,37 +0,0 @@ +-This test verifies that the embedded field has a method with the same name. +- +--- issue64114.go -- +-package stub +- +-// Regression test for issue #64114: code action "implement" is not listed. +- +-var _ WriteTest = (*WriteStruct)(nil) //@quickfix("(", re"does not implement", issue64114) +- +-type WriterTwoStruct struct{} +- +-// Write implements [io.ReadWriter]. +-func (t *WriterTwoStruct) RRRR(str string) error { +- panic("unimplemented") +-} +- +-type WriteTest interface { +- RRRR() +- WWWW() +-} +- +-type WriteStruct struct { +- WriterTwoStruct +-} +--- @issue64114/issue64114.go -- +-@@ -22 +22,11 @@ +-+ +-+// RRRR implements [WriteTest]. +-+// Subtle: this method shadows the method (WriterTwoStruct).RRRR of WriteStruct.WriterTwoStruct. +-+func (w *WriteStruct) RRRR() { +-+ panic("unimplemented") +-+} +-+ +-+// WWWW implements [WriteTest]. +-+func (w *WriteStruct) WWWW() { +-+ panic("unimplemented") +-+} +diff -urN a/gopls/internal/test/marker/testdata/quickfix/stub.txt b/gopls/internal/test/marker/testdata/quickfix/stub.txt +--- a/gopls/internal/test/marker/testdata/quickfix/stub.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/stub.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,348 +0,0 @@ +-This test checks the 'implement interface' quick fix. +- +--- go.mod -- +-module golang.org/lsptests/stub +- +-go 1.18 +- +--- other/other.go -- +-package other +- +-import ( +- "bytes" +- renamed_context "context" +-) +- +-type Interface interface { +- Get(renamed_context.Context) *bytes.Buffer +-} +- +--- add_selector.go -- +-package stub +- +-import "io" +- +-// This file tests that if an interface +-// method references a type from its own package +-// then our implementation must add the import/package selector +-// in the concrete method if the concrete type is outside of the interface +-// package +-var _ io.ReaderFrom = &readerFrom{} //@quickfix("&readerFrom", re"cannot use", readerFrom) +- +-type readerFrom struct{} +--- @readerFrom/add_selector.go -- +-@@ -13 +13,5 @@ +-+ +-+// ReadFrom implements [io.ReaderFrom]. +-+func (*readerFrom) ReadFrom(r io.Reader) (n int64, err error) { +-+ panic("unimplemented") +-+} +--- assign.go -- +-package stub +- +-import "io" +- +-func _() { +- var br io.ByteWriter +- br = &byteWriter{} //@quickfix("&", re"does not implement", assign) +- _ = br +-} +- +-type byteWriter struct{} +--- @assign/assign.go -- +-@@ -12 +12,5 @@ +-+ +-+// WriteByte implements [io.ByteWriter]. +-+func (b *byteWriter) WriteByte(c byte) error { +-+ panic("unimplemented") +-+} +--- assign_multivars.go -- +-package stub +- +-import "io" +- +-func _() { +- var br io.ByteWriter +- var i int +- i, br = 1, &multiByteWriter{} //@quickfix("&", re"does not implement", assign_multivars) +- _, _ = i, br +-} +- +-type multiByteWriter struct{} +--- @assign_multivars/assign_multivars.go -- +-@@ -13 +13,5 @@ +-+ +-+// WriteByte implements [io.ByteWriter]. +-+func (m *multiByteWriter) WriteByte(c byte) error { +-+ panic("unimplemented") +-+} +--- call_expr.go -- +-package stub +- +-func main() { +- check(&callExpr{}) //@quickfix("&", re"does not implement", call_expr) +-} +- +-func check(err error) { +- if err != nil { +- panic(err) +- } +-} +- +-type callExpr struct{} +--- @call_expr/call_expr.go -- +-@@ -14 +14,5 @@ +-+ +-+// Error implements [error]. +-+func (c *callExpr) Error() string { +-+ panic("unimplemented") +-+} +--- embedded.go -- +-package stub +- +-import ( +- "io" +- "sort" +-) +- +-var _ embeddedInterface = (*embeddedConcrete)(nil) //@quickfix("(", re"does not implement", embedded) +- +-type embeddedConcrete struct{} +- +-type embeddedInterface interface { +- sort.Interface +- io.Reader +-} +--- @embedded/embedded.go -- +-@@ -12 +12,20 @@ +-+// Len implements [embeddedInterface]. +-+func (e *embeddedConcrete) Len() int { +-+ panic("unimplemented") +-+} +-+ +-+// Less implements [embeddedInterface]. +-+func (e *embeddedConcrete) Less(i int, j int) bool { +-+ panic("unimplemented") +-+} +-+ +-+// Read implements [embeddedInterface]. +-+func (e *embeddedConcrete) Read(p []byte) (n int, err error) { +-+ panic("unimplemented") +-+} +-+ +-+// Swap implements [embeddedInterface]. +-+func (e *embeddedConcrete) Swap(i int, j int) { +-+ panic("unimplemented") +-+} +-+ +--- err.go -- +-package stub +- +-func _() { +- var br error = &customErr{} //@quickfix("&", re"does not implement", err) +- _ = br +-} +- +-type customErr struct{} +--- @err/err.go -- +-@@ -9 +9,5 @@ +-+ +-+// Error implements [error]. +-+func (c *customErr) Error() string { +-+ panic("unimplemented") +-+} +--- function_return.go -- +-package stub +- +-import ( +- "io" +-) +- +-func newCloser() io.Closer { +- return closer{} //@quickfix("c", re"does not implement", function_return) +-} +- +-type closer struct{} +--- @function_return/function_return.go -- +-@@ -12 +12,5 @@ +-+ +-+// Close implements [io.Closer]. +-+func (c closer) Close() error { +-+ panic("unimplemented") +-+} +--- successive_function_return.go -- +-package stub +- +-import ( +- "io" +-) +- +-func _() (a, b int, c io.Closer) { +- return 1, 2, closer2{} //@quickfix("c", re"does not implement", successive) +-} +- +-type closer2 struct{} +--- @successive/successive_function_return.go -- +-@@ -12 +12,5 @@ +-+ +-+// Close implements [io.Closer]. +-+func (c closer2) Close() error { +-+ panic("unimplemented") +-+} +--- generic_receiver.go -- +-package stub +- +-import "io" +- +-// This file tests that the stub method generator accounts for concrete +-// types that have type parameters defined. +-var _ io.ReaderFrom = &genReader[string, int]{} //@quickfix("&genReader", re"does not implement", generic_receiver) +- +-type genReader[T, Y any] struct { +- T T +- Y Y +-} +--- @generic_receiver/generic_receiver.go -- +-@@ -13 +13,5 @@ +-+ +-+// ReadFrom implements [io.ReaderFrom]. +-+func (g *genReader[T, Y]) ReadFrom(r io.Reader) (n int64, err error) { +-+ panic("unimplemented") +-+} +--- ignored_imports.go -- +-package stub +- +-import ( +- "compress/zlib" +- . "io" +- _ "io" +-) +- +-// This file tests that dot-imports and underscore imports +-// are properly ignored and that a new import is added to +-// reference method types +- +-var ( +- _ Reader +- _ zlib.Resetter = (*ignoredResetter)(nil) //@quickfix("(", re"does not implement", ignored_imports) +-) +- +-type ignoredResetter struct{} +--- @ignored_imports/ignored_imports.go -- +-@@ -19 +19,5 @@ +-+ +-+// Reset implements [zlib.Resetter]. +-+func (i *ignoredResetter) Reset(r Reader, dict []byte) error { +-+ panic("unimplemented") +-+} +--- issue2606.go -- +-package stub +- +-type I interface{ error } +- +-type C int +- +-var _ I = C(0) //@quickfix("C", re"does not implement", issue2606) +--- @issue2606/issue2606.go -- +-@@ -7 +7,5 @@ +-+// Error implements [I]. +-+func (c C) Error() string { +-+ panic("unimplemented") +-+} +-+ +--- multi_var.go -- +-package stub +- +-import "io" +- +-// This test ensures that a variable declaration that +-// has multiple values on the same line can still be +-// analyzed correctly to target the interface implementation +-// diagnostic. +-var one, two, three io.Reader = nil, &multiVar{}, nil //@quickfix("&", re"does not implement", multi_var) +- +-type multiVar struct{} +--- @multi_var/multi_var.go -- +-@@ -12 +12,5 @@ +-+ +-+// Read implements [io.Reader]. +-+func (m *multiVar) Read(p []byte) (n int, err error) { +-+ panic("unimplemented") +-+} +--- pointer.go -- +-package stub +- +-import "io" +- +-func getReaderFrom() io.ReaderFrom { +- return &pointerImpl{} //@quickfix("&", re"does not implement", pointer) +-} +- +-type pointerImpl struct{} +--- @pointer/pointer.go -- +-@@ -10 +10,5 @@ +-+ +-+// ReadFrom implements [io.ReaderFrom]. +-+func (p *pointerImpl) ReadFrom(r io.Reader) (n int64, err error) { +-+ panic("unimplemented") +-+} +--- renamed_import.go -- +-package stub +- +-import ( +- "compress/zlib" +- myio "io" +-) +- +-var _ zlib.Resetter = &myIO{} //@quickfix("&", re"does not implement", renamed_import) +-var _ myio.Reader +- +-type myIO struct{} +--- @renamed_import/renamed_import.go -- +-@@ -12 +12,5 @@ +-+ +-+// Reset implements [zlib.Resetter]. +-+func (m *myIO) Reset(r myio.Reader, dict []byte) error { +-+ panic("unimplemented") +-+} +--- renamed_import_iface.go -- +-package stub +- +-import ( +- "golang.org/lsptests/stub/other" +-) +- +-// This file tests that if an interface +-// method references an import from its own package +-// that the concrete type does not yet import, and that import happens +-// to be renamed, then we prefer the renaming of the interface. +-var _ other.Interface = &otherInterfaceImpl{} //@quickfix("&otherInterfaceImpl", re"does not implement", renamed_import_iface) +- +-type otherInterfaceImpl struct{} +--- @renamed_import_iface/renamed_import_iface.go -- +-@@ -4 +4,2 @@ +-+ "bytes" +-+ "context" +-@@ -14 +16,5 @@ +-+ +-+// Get implements [other.Interface]. +-+func (o *otherInterfaceImpl) Get(context.Context) *bytes.Buffer { +-+ panic("unimplemented") +-+} +--- stdlib.go -- +-package stub +- +-import ( +- "io" +-) +- +-var _ io.Writer = writer{} //@quickfix("w", re"does not implement", stdlib) +- +-type writer struct{} +--- @stdlib/stdlib.go -- +-@@ -10 +10,5 @@ +-+ +-+// Write implements [io.Writer]. +-+func (w writer) Write(p []byte) (n int, err error) { +-+ panic("unimplemented") +-+} +diff -urN a/gopls/internal/test/marker/testdata/quickfix/undeclared/diag.txt b/gopls/internal/test/marker/testdata/quickfix/undeclared/diag.txt +--- a/gopls/internal/test/marker/testdata/quickfix/undeclared/diag.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/undeclared/diag.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,97 +0,0 @@ +-This test checks @diag reports for undeclared variables and functions. +- +--- x.go -- +-package undeclared +- +-func x() int { +- var z int +- z = y //@diag("y", re"(undeclared name|undefined): y") +- if z == m { //@diag("m", re"(undeclared name|undefined): m") +- z = 1 +- } +- +- if z == 1 { +- z = 1 +- } else if z == n+1 { //@diag("n", re"(undeclared name|undefined): n") +- z = 1 +- } +- +- switch z { +- case 10: +- z = 1 +- case aa: //@diag("aa", re"(undeclared name|undefined): aa") +- z = 1 +- } +- return z +-} +--- channels.go -- +-package undeclared +- +-func channels(s string) { +- undefinedChannels(c()) //@diag("undefinedChannels", re"(undeclared name|undefined): undefinedChannels") +-} +- +-func c() (<-chan string, chan string) { +- return make(<-chan string), make(chan string) +-} +--- consecutive_params.go -- +-package undeclared +- +-func consecutiveParams() { +- var s string +- undefinedConsecutiveParams(s, s) //@diag("undefinedConsecutiveParams", re"(undeclared name|undefined): undefinedConsecutiveParams") +-} +--- error_param.go -- +-package undeclared +- +-func errorParam() { +- var err error +- undefinedErrorParam(err) //@diag("undefinedErrorParam", re"(undeclared name|undefined): undefinedErrorParam") +-} +--- literals.go -- +-package undeclared +- +-type T struct{} +- +-func literals() { +- undefinedLiterals("hey compiler", T{}, &T{}) //@diag("undefinedLiterals", re"(undeclared name|undefined): undefinedLiterals") +-} +--- operation.go -- +-package undeclared +- +-import "time" +- +-func operation() { +- undefinedOperation(10 * time.Second) //@diag("undefinedOperation", re"(undeclared name|undefined): undefinedOperation") +-} +--- selector.go -- +-package undeclared +- +-func selector() { +- m := map[int]bool{} +- undefinedSelector(m[1]) //@diag("undefinedSelector", re"(undeclared name|undefined): undefinedSelector") +-} +--- slice.go -- +-package undeclared +- +-func slice() { +- undefinedSlice([]int{1, 2}) //@diag("undefinedSlice", re"(undeclared name|undefined): undefinedSlice") +-} +--- tuple.go -- +-package undeclared +- +-func tuple() { +- undefinedTuple(b()) //@diag("undefinedTuple", re"(undeclared name|undefined): undefinedTuple") +-} +- +-func b() (string, error) { +- return "", nil +-} +--- unique.go -- +-package undeclared +- +-func uniqueArguments() { +- var s string +- var i int +- undefinedUniqueArguments(s, i, s) //@diag("undefinedUniqueArguments", re"(undeclared name|undefined): undefinedUniqueArguments") +-} +diff -urN a/gopls/internal/test/marker/testdata/quickfix/undeclared/missingfunction.txt b/gopls/internal/test/marker/testdata/quickfix/undeclared/missingfunction.txt +--- a/gopls/internal/test/marker/testdata/quickfix/undeclared/missingfunction.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/undeclared/missingfunction.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,155 +0,0 @@ +-This test checks the quick fix for undefined functions. +- +--- channels.go -- +-package missingfunction +- +-func channels(s string) { +- undefinedChannels(c()) //@quickfix("undefinedChannels", re"(undeclared|undefined)", channels) +-} +- +-func c() (<-chan string, chan string) { +- return make(<-chan string), make(chan string) +-} +--- @channels/channels.go -- +-@@ -7 +7,4 @@ +-+func undefinedChannels(ch1 <-chan string, ch2 chan string) { +-+ panic("unimplemented") +-+} +-+ +--- consecutive.go -- +-package missingfunction +- +-func consecutiveParams() { +- var s string +- undefinedConsecutiveParams(s, s) //@quickfix("undefinedConsecutiveParams", re"(undeclared|undefined)", consecutive) +-} +--- @consecutive/consecutive.go -- +-@@ -7 +7,4 @@ +-+ +-+func undefinedConsecutiveParams(s1, s2 string) { +-+ panic("unimplemented") +-+} +--- error.go -- +-package missingfunction +- +-func errorParam() { +- var err error +- undefinedErrorParam(err) //@quickfix("undefinedErrorParam", re"(undeclared|undefined)", error) +-} +--- @error/error.go -- +-@@ -7 +7,4 @@ +-+ +-+func undefinedErrorParam(err error) { +-+ panic("unimplemented") +-+} +--- literals.go -- +-package missingfunction +- +-type T struct{} +- +-func literals() { +- undefinedLiterals("hey compiler", T{}, &T{}) //@quickfix("undefinedLiterals", re"(undeclared|undefined)", literals) +-} +--- @literals/literals.go -- +-@@ -8 +8,4 @@ +-+ +-+func undefinedLiterals(s string, t1 T, t2 *T) { +-+ panic("unimplemented") +-+} +--- operation.go -- +-package missingfunction +- +-import "time" +- +-func operation() { +- undefinedOperation(10 * time.Second) //@quickfix("undefinedOperation", re"(undeclared|undefined)", operation) +-} +--- @operation/operation.go -- +-@@ -8 +8,4 @@ +-+ +-+func undefinedOperation(duration time.Duration) { +-+ panic("unimplemented") +-+} +--- selector.go -- +-package missingfunction +- +-func selector() { +- m := map[int]bool{} +- undefinedSelector(m[1]) //@quickfix("undefinedSelector", re"(undeclared|undefined)", selector) +-} +--- @selector/selector.go -- +-@@ -7 +7,4 @@ +-+ +-+func undefinedSelector(b bool) { +-+ panic("unimplemented") +-+} +--- slice.go -- +-package missingfunction +- +-func slice() { +- undefinedSlice([]int{1, 2}) //@quickfix("undefinedSlice", re"(undeclared|undefined)", slice) +-} +--- @slice/slice.go -- +-@@ -6 +6,4 @@ +-+ +-+func undefinedSlice(i []int) { +-+ panic("unimplemented") +-+} +--- tuple.go -- +-package missingfunction +- +-func tuple() { +- undefinedTuple(b()) //@quickfix("undefinedTuple", re"(undeclared|undefined)", tuple) +-} +- +-func b() (string, error) { +- return "", nil +-} +--- @tuple/tuple.go -- +-@@ -7 +7,4 @@ +-+func undefinedTuple(s string, err error) { +-+ panic("unimplemented") +-+} +-+ +--- unique_params.go -- +-package missingfunction +- +-func uniqueArguments() { +- var s string +- var i int +- undefinedUniqueArguments(s, i, s) //@quickfix("undefinedUniqueArguments", re"(undeclared|undefined)", unique) +-} +--- @unique/unique_params.go -- +-@@ -8 +8,4 @@ +-+ +-+func undefinedUniqueArguments(s1 string, i int, s2 string) { +-+ panic("unimplemented") +-+} +--- param.go -- +-package missingfunction +- +-func inferFromParam() { +- f(as_param()) //@quickfix("as_param", re"undefined", infer_param) +-} +- +-func f(i int) {} +--- @infer_param/param.go -- +-@@ -7 +7,4 @@ +-+func as_param() int { +-+ panic("unimplemented") +-+} +-+ +--- assign.go -- +-package missingfunction +- +-func inferFromAssign() { +- i := 42 +- i = i +- i = assign() //@quickfix("assign", re"undefined", infer_assign) +-} +--- @infer_assign/assign.go -- +-@@ -8 +8,4 @@ +-+ +-+func assign() int { +-+ panic("unimplemented") +-+} +diff -urN a/gopls/internal/test/marker/testdata/quickfix/undeclared/undeclaredfunc.txt b/gopls/internal/test/marker/testdata/quickfix/undeclared/undeclaredfunc.txt +--- a/gopls/internal/test/marker/testdata/quickfix/undeclared/undeclaredfunc.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/undeclared/undeclaredfunc.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-This test checks the quick fix for "undeclared: f" that declares the +-missing function. See #47558. +- +--- a.go -- +-package a +- +-func _() int { return f(1, "") } //@quickfix(re"f.1", re"unde(fined|clared name): f", x) +- +--- @x/a.go -- +-@@ -3 +3 @@ +--func _() int { return f(1, "") } //@quickfix(re"f.1", re"unde(fined|clared name): f", x) +-+func _() int { return f(1, "") } +-@@ -5 +5,4 @@ +-+func f(i int, s string) int { +-+ panic("unimplemented") +-+} //@quickfix(re"f.1", re"unde(fined|clared name): f", x) +-+ +diff -urN a/gopls/internal/test/marker/testdata/quickfix/undeclared/undeclared_variable.txt b/gopls/internal/test/marker/testdata/quickfix/undeclared/undeclared_variable.txt +--- a/gopls/internal/test/marker/testdata/quickfix/undeclared/undeclared_variable.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/undeclared/undeclared_variable.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,108 +0,0 @@ +-Tests of suggested fixes for "undeclared name" diagnostics, +-which are of ("compiler", "error") type. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a.go -- +-package undeclared_var +- +-func a() { +- z, _ := 1+y, 11 //@quickfix("y", re"(undeclared name|undefined): y", a) +- _ = z +-} +- +--- @a/a.go -- +-@@ -4 +4 @@ +-+ y := 0 +--- b.go -- +-package undeclared_var +- +-func b() { +- if 100 < 90 { +- } else if 100 > n+2 { //@quickfix("n", re"(undeclared name|undefined): n", b) +- } +-} +- +--- @b/b.go -- +-@@ -4 +4 @@ +-+ n := 0 +--- c.go -- +-package undeclared_var +- +-func c() { +- for i < 200 { //@quickfix("i", re"(undeclared name|undefined): i", c) +- } +- r() //@diag("r", re"(undeclared name|undefined): r") +-} +- +--- @c/c.go -- +-@@ -4 +4 @@ +-+ i := 0 +--- add_colon.go -- +-package undeclared_var +- +-func addColon() { +- ac = 1 //@quickfix("ac", re"(undeclared name|undefined): ac", add_colon) +-} +- +--- @add_colon/add_colon.go -- +-@@ -4 +4 @@ +-- ac = 1 //@quickfix("ac", re"(undeclared name|undefined): ac", add_colon) +-+ ac := 1 //@quickfix("ac", re"(undeclared name|undefined): ac", add_colon) +--- add_colon_first.go -- +-package undeclared_var +- +-func addColonAtFirstStmt() { +- ac = 1 +- ac = 2 +- ac = 3 +- b := ac //@quickfix("ac", re"(undeclared name|undefined): ac", add_colon_first) +-} +- +--- @add_colon_first/add_colon_first.go -- +-@@ -4 +4 @@ +-- ac = 1 +-+ ac := 1 +--- self_assign.go -- +-package undeclared_var +- +-func selfAssign() { +- ac = ac + 1 +- ac = ac + 2 //@quickfix("ac", re"(undeclared name|undefined): ac", lhs) +- ac = ac + 3 //@quickfix("ac + 3", re"(undeclared name|undefined): ac", rhs) +-} +- +--- @lhs/self_assign.go -- +-@@ -4 +4 @@ +-+ ac := nil +--- @rhs/self_assign.go -- +-@@ -4 +4 @@ +-+ ac := 0 +--- correct_type.go -- +-package undeclared_var +-import "fmt" +-func selfAssign() { +- fmt.Printf(ac) //@quickfix("ac", re"(undeclared name|undefined): ac", string) +-} +--- @string/correct_type.go -- +-@@ -4 +4 @@ +-+ ac := "" +--- ignore.go -- +-package undeclared_var +-import "fmt" +-type Foo struct { +- bar int +-} +-func selfAssign() { +- f := Foo{} +- b = f.bar +- c := bar //@quickfix("bar", re"(undeclared name|undefined): bar", ignore) +-} +--- @ignore/ignore.go -- +-@@ -9 +9 @@ +-+ bar := nil +diff -urN a/gopls/internal/test/marker/testdata/quickfix/unusedrequire_gowork.txt b/gopls/internal/test/marker/testdata/quickfix/unusedrequire_gowork.txt +--- a/gopls/internal/test/marker/testdata/quickfix/unusedrequire_gowork.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/unusedrequire_gowork.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,48 +0,0 @@ +-This test checks the suggested fix to remove unused require statements from +-go.mod files, when a go.work file is used. +- +-Note that unlike unusedrequire.txt, we need not write go.sum files when +-a go.work file is used. +- +--- proxy/example.com@v1.0.0/x.go -- +-package pkg +-const X = 1 +- +--- go.work -- +-go 1.21 +- +-use ( +- ./a +- ./b +-) +--- a/go.mod -- +-module mod.com/a +- +-go 1.14 +- +-require example.com v1.0.0 //@quickfix("require", re"not used", a) +- +--- @a/a/go.mod -- +-@@ -4,3 +4 @@ +-- +--require example.com v1.0.0 //@quickfix("require", re"not used", a) +-- +--- a/main.go -- +-package main +-func main() {} +- +--- b/go.mod -- +-module mod.com/b +- +-go 1.14 +- +-require example.com v1.0.0 //@quickfix("require", re"not used", b) +- +--- @b/b/go.mod -- +-@@ -4,3 +4 @@ +-- +--require example.com v1.0.0 //@quickfix("require", re"not used", b) +-- +--- b/main.go -- +-package main +-func main() {} +diff -urN a/gopls/internal/test/marker/testdata/quickfix/unusedrequire.txt b/gopls/internal/test/marker/testdata/quickfix/unusedrequire.txt +--- a/gopls/internal/test/marker/testdata/quickfix/unusedrequire.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/quickfix/unusedrequire.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,25 +0,0 @@ +-This test checks the suggested fix to remove unused require statements from +-go.mod files. +- +--- flags -- +--write_sumfile=a +- +--- proxy/example.com@v1.0.0/x.go -- +-package pkg +-const X = 1 +- +--- a/go.mod -- +-module mod.com +- +-go 1.14 +- +-require example.com v1.0.0 //@quickfix("require", re"not used", a) +- +--- @a/a/go.mod -- +-@@ -4,3 +4 @@ +-- +--require example.com v1.0.0 //@quickfix("require", re"not used", a) +-- +--- a/main.go -- +-package main +-func main() {} +diff -urN a/gopls/internal/test/marker/testdata/references/crosspackage.txt b/gopls/internal/test/marker/testdata/references/crosspackage.txt +--- a/gopls/internal/test/marker/testdata/references/crosspackage.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/crosspackage.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,37 +0,0 @@ +-Test of basic cross-package references. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-type X struct { +- Y int //@loc(typeXY, "Y") +-} +- +--- b/b.go -- +-package b +- +-import "example.com/a" +- +-func GetXes() []a.X { +- return []a.X{ +- { +- Y: 1, //@loc(GetXesY, "Y"), refs("Y", typeXY, GetXesY, anotherXY) +- }, +- } +-} +- +--- c/c.go -- +-package c +- +-import "example.com/b" +- +-func _() { +- xes := b.GetXes() +- for _, x := range xes { //@loc(defX, "x") +- _ = x.Y //@loc(useX, "x"), loc(anotherXY, "Y"), refs("Y", typeXY, anotherXY, GetXesY), refs(".", defX, useX), refs("x", defX, useX) +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/references/imports.txt b/gopls/internal/test/marker/testdata/references/imports.txt +--- a/gopls/internal/test/marker/testdata/references/imports.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/imports.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-Test of references to local package names (imports). +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-import "os" //@loc(osDef, `"os"`), refs("os", osDef, osUse) +- +-import fmt2 "fmt" //@loc(fmt2Def, `fmt2`), refs("fmt2", fmt2Def, fmt2Use) +- +-func _() { +- os.Getwd() //@loc(osUse, "os") +- fmt2.Println() //@loc(fmt2Use, "fmt2") +-} +diff -urN a/gopls/internal/test/marker/testdata/references/interfaces.txt b/gopls/internal/test/marker/testdata/references/interfaces.txt +--- a/gopls/internal/test/marker/testdata/references/interfaces.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/interfaces.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,42 +0,0 @@ +-Test of references applied to concrete and interface types that are +-related by assignability. The result includes references to both. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-type first interface { +- common() //@loc(firCommon, "common"), refs("common", firCommon, xCommon, zCommon) +- firstMethod() //@loc(firMethod, "firstMethod"), refs("firstMethod", firMethod, xfMethod, zfMethod) +-} +- +-type second interface { +- common() //@loc(secCommon, "common"), refs("common", secCommon, yCommon, zCommon) +- secondMethod() //@loc(secMethod, "secondMethod"), refs("secondMethod", secMethod, ysMethod, zsMethod) +-} +- +-type s struct {} +- +-func (*s) common() {} //@loc(sCommon, "common"), refs("common", sCommon, xCommon, yCommon, zCommon) +- +-func (*s) firstMethod() {} //@loc(sfMethod, "firstMethod"), refs("firstMethod", sfMethod, xfMethod, zfMethod) +- +-func (*s) secondMethod() {} //@loc(ssMethod, "secondMethod"), refs("secondMethod", ssMethod, ysMethod, zsMethod) +- +-func main() { +- var x first = &s{} +- var y second = &s{} +- +- x.common() //@loc(xCommon, "common"), refs("common", firCommon, xCommon, zCommon) +- x.firstMethod() //@loc(xfMethod, "firstMethod"), refs("firstMethod", firMethod, xfMethod, zfMethod) +- y.common() //@loc(yCommon, "common"), refs("common", secCommon, yCommon, zCommon) +- y.secondMethod() //@loc(ysMethod, "secondMethod"), refs("secondMethod", secMethod, ysMethod, zsMethod) +- +- var z *s = &s{} +- z.firstMethod() //@loc(zfMethod, "firstMethod"), refs("firstMethod", sfMethod, xfMethod, zfMethod) +- z.secondMethod() //@loc(zsMethod, "secondMethod"), refs("secondMethod", ssMethod, ysMethod, zsMethod) +- z.common() //@loc(zCommon, "common"), refs("common", sCommon, xCommon, yCommon, zCommon) +-} +diff -urN a/gopls/internal/test/marker/testdata/references/intrapackage.txt b/gopls/internal/test/marker/testdata/references/intrapackage.txt +--- a/gopls/internal/test/marker/testdata/references/intrapackage.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/intrapackage.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-Basic test of references within a single package. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-type i int //@loc(decli, "i"), refs("i", decli, argi, returni, embeddedi) +- +-func _(_ i) []bool { //@loc(argi, "i") +- return nil +-} +- +-func _(_ []byte) i { //@loc(returni, "i") +- return 0 +-} +- +-var q string //@loc(declq, "q"), refs("q", declq, assignq, bobq) +- +-var Q string //@loc(declQ, "Q"), refs("Q", declQ) +- +-func _() { +- q = "hello" //@loc(assignq, "q") +- bob := func(_ string) {} +- bob(q) //@loc(bobq, "q") +-} +- +-type e struct { +- i //@loc(embeddedi, "i"), refs("i", embeddedi, embeddediref) +-} +- +-func _() { +- _ = e{}.i //@loc(embeddediref, "i") +-} +diff -urN a/gopls/internal/test/marker/testdata/references/issue58506.txt b/gopls/internal/test/marker/testdata/references/issue58506.txt +--- a/gopls/internal/test/marker/testdata/references/issue58506.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/issue58506.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,56 +0,0 @@ +-Regression test for 'references' bug golang/go#58506. +- +-The 'references' query below, applied to method A.F, implicitly uses +-the 'implementation' operation. The correct response includes two +-references to B.F, one from package b and one from package d. +-However, the incremental 'implementation' algorithm had a bug that +-cause it to fail to report the reference from package b. +- +-The reason was that the incremental implementation uses different +-algorithms for the local and global cases (with disjoint results), and +-that when it discovered that type A satisfies interface B and thus +-that B.F must be included among the global search targets, the +-implementation forgot to also search package b for local references +-to B.F. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-type A int +- +-func (A) F() {} //@loc(refa, "F"), refs("F", refa, refb, refd) +- +--- b/b.go -- +-package b +- +-import ( +- "example.com/a" +- "example.com/c" +-) +- +-type B interface{ F() } +- +-var _ B = a.A(0) +-var _ B = c.C(0) +- +-var _ = B.F //@loc(refb, "F") +- +--- c/c.go -- +-package c +- +-type C int +- +-// Even though C.F is "rename coupled" to A.F by B.F, +-// it should not be among the results. +-func (C) F() {} +- +--- d/d.go -- +-package d +- +-import "example.com/b" +- +-var _ any = b.B.F //@loc(refd, "F") +diff -urN a/gopls/internal/test/marker/testdata/references/issue59851.txt b/gopls/internal/test/marker/testdata/references/issue59851.txt +--- a/gopls/internal/test/marker/testdata/references/issue59851.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/issue59851.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-Regression test for 'references' bug golang/go#59851. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-type Iface interface { +- Method() +-} +- +-type implOne struct{} +- +-func (implOne) Method() {} //@loc(def1, "Method"), refs(def1, def1, ref1, iref, ireftest) +- +-var _ = implOne.Method //@loc(ref1, "Method") +-var _ = Iface(nil).Method //@loc(iref, "Method") +- +--- a/a_test.go -- +-package a +- +-type implTwo struct{} +- +-func (implTwo) Method() {} //@loc(def2, "Method"), refs(def2, def2, iref, ref2, ireftest) +- +-var _ = implTwo.Method //@loc(ref2, "Method") +-var _ = Iface(nil).Method //@loc(ireftest, "Method") +diff -urN a/gopls/internal/test/marker/testdata/references/issue60369.txt b/gopls/internal/test/marker/testdata/references/issue60369.txt +--- a/gopls/internal/test/marker/testdata/references/issue60369.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/issue60369.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,26 +0,0 @@ +-Regression test for 'references' bug golang/go#60369: a references +-query on the embedded type name T in struct{p.T} instead reports all +-references to the package name p. +- +-The bug was fixed in release go1.21 of go/types. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-type A struct{} +-const C = 0 +- +--- b/b.go -- +-package b +- +-import a "example.com/a" //@loc(adef, "a") +-type s struct { +- a.A //@loc(Aref1, "A"), loc(aref1, "a"), refs(Aref1, Aref1, Aref3), refs(aref1, adef, aref1, aref2, aref3) +-} +-var _ a.A //@loc(aref2, re" (a)"), loc(Aref2, "A") +-var _ = s{}.A //@loc(Aref3, "A") +-const _ = a.C //@loc(aref3, "a") +diff -urN a/gopls/internal/test/marker/testdata/references/issue60622.txt b/gopls/internal/test/marker/testdata/references/issue60622.txt +--- a/gopls/internal/test/marker/testdata/references/issue60622.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/issue60622.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,22 +0,0 @@ +-Regression test for 'references' bug golang/go#60622: +-references to methods of generics were missing. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-type G[T any] struct{} +- +-func (G[T]) M() {} //@loc(Mdef, "M"), refs(Mdef, Mdef, Mref) +- +--- b/b.go -- +-package b +- +-import "example.com/a" +- +-func _() { +- new(a.G[int]).M() //@loc(Mref, "M") +-} +diff -urN a/gopls/internal/test/marker/testdata/references/issue60676.txt b/gopls/internal/test/marker/testdata/references/issue60676.txt +--- a/gopls/internal/test/marker/testdata/references/issue60676.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/issue60676.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,68 +0,0 @@ +-This test verifies that even after importing from export data, the references +-algorithm is able to find all references to struct fields or methods that are +-shared by types from multiple packages. See golang/go#60676. +- +-Note that the marker test runner awaits the initial workspace load, so export +-data should be populated at the time references are requested. +- +--- go.mod -- +-module mod.test +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-type A struct { +- F int //@loc(FDef, "F") +- E //@loc(EDef, "E") +-} +- +-type E struct { +- G string //@loc(GDef, "G") +-} +- +-type AI interface { +- M() //@loc(MDef, "M") +- EI +- error +-} +- +-type EI interface { +- N() //@loc(NDef, "N") +-} +- +-type T[P any] struct{ f P } +- +-type Error error +- +- +--- b/b.go -- +-package b +- +-import "mod.test/a" +- +-type B a.A +- +-type BI a.AI +- +-type T a.T[int] // must not panic +- +--- c/c.go -- +-package c +- +-import "mod.test/b" +- +-func _() { +- x := b.B{ +- F: 42, //@refs("F", FDef, "F", Fuse) +- } +- x.G = "hi" //@refs("G", GDef, "G") +- _ = x.E //@refs("E", EDef, "E") +- _ = x.F //@loc(Fuse, "F") +-} +- +-func _(y b.BI) { +- _ = y.M //@refs("M", MDef, "M") +- _ = y.N //@refs("N", NDef, "N") +-} +diff -urN a/gopls/internal/test/marker/testdata/references/issue61618.txt b/gopls/internal/test/marker/testdata/references/issue61618.txt +--- a/gopls/internal/test/marker/testdata/references/issue61618.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/issue61618.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +-Regression test for 'references' bug golang/go#61618: +-references to instantiated fields were missing. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a.go -- +-package a +- +-// This file is adapted from the example in the issue. +- +-type builder[S ~[]F, F ~string] struct { +- name string +- elements S //@loc(def, "elements"), refs(def, def, assign, use) +- elemData map[F][]ElemData[F] +-} +- +-type ElemData[F ~string] struct { +- Name F +-} +- +-type BuilderImpl[S ~[]F, F ~string] struct{ builder[S, F] } +- +-func NewBuilderImpl[S ~[]F, F ~string](name string) *BuilderImpl[S, F] { +- impl := &BuilderImpl[S,F]{ +- builder[S, F]{ +- name: name, +- elements: S{}, //@loc(assign, "elements"), refs(assign, def, assign, use) +- elemData: map[F][]ElemData[F]{}, +- }, +- } +- +- _ = impl.elements //@loc(use, "elements"), refs(use, def, assign, use) +- return impl +-} +diff -urN a/gopls/internal/test/marker/testdata/references/issue67978.txt b/gopls/internal/test/marker/testdata/references/issue67978.txt +--- a/gopls/internal/test/marker/testdata/references/issue67978.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/issue67978.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,18 +0,0 @@ +- +-This test exercises a references query on an exported method that +-conflicts with a field name. This ill-typed input violates the +-assumption that if type T has a method, then the method set of T is +-nonempty, which led to a crash. +- +-See https://github.com/golang/go/issues/67978. +- +--- a.go -- +-package p +- +-type E struct { X int } //@ diag(re"()X", re"field.*same name") +- +-func (E) X() {} //@ loc(a, "X"), refs("X", a, b), diag(re"()X", re"method.*same name") +- +-var _ = new(E).X //@ loc(b, "X") +- +- +diff -urN a/gopls/internal/test/marker/testdata/references/shadow.txt b/gopls/internal/test/marker/testdata/references/shadow.txt +--- a/gopls/internal/test/marker/testdata/references/shadow.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/shadow.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-Test of references in the presence of shadowing. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-func _() { +- x := 123 //@loc(x1, "x"), refs("x", x1, x1ref) +- _ = x //@loc(x1ref, "x") +- { +- x := "hi" //@loc(x2, "x"), refs("x", x2, x2ref) +- _ = x //@loc(x2ref, "x") +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/references/test.txt b/gopls/internal/test/marker/testdata/references/test.txt +--- a/gopls/internal/test/marker/testdata/references/test.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/test.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-Test of references between the extra files of a test variant +-and the regular package. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-func fn() {} //@loc(def, "fn"), refs("fn", def, use) +- +-type t struct { g int } //@loc(gdef, "g") +-type u struct { t } +- +-var _ = new(u).g //@loc(gref, "g"), refs("g", gdef, gref) +-// TODO(adonovan): fix: gref2 and gdef2 are missing. +- +--- a/a_test.go -- +-package a +- +-func _() { +- fn() //@loc(use, "fn") +- +- _ = new(u).g //@loc(gref2, "g"), refs("g", gdef2, gref, gref2) +-} +- +-// This declaration changes the meaning of u.t in the test. +-func (u) g() {} //@loc(gdef2, "g") +diff -urN a/gopls/internal/test/marker/testdata/references/typeswitch.txt b/gopls/internal/test/marker/testdata/references/typeswitch.txt +--- a/gopls/internal/test/marker/testdata/references/typeswitch.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/references/typeswitch.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,18 +0,0 @@ +-Tests of reference to implicit type switch vars, which are +-a special case in go/types.Info{Def,Use,Implicits}. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-func _(x any) { +- switch y := x.(type) { //@loc(yDecl, "y"), refs("y", yDecl, yInt, yDefault) +- case int: +- println(y) //@loc(yInt, "y"), refs("y", yDecl, yInt, yDefault) +- default: +- println(y) //@loc(yDefault, "y") +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/rename/bad.txt b/gopls/internal/test/marker/testdata/rename/bad.txt +--- a/gopls/internal/test/marker/testdata/rename/bad.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/bad.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-This test checks that rename fails in the presence of errors. +- +--- go.mod -- +-module golang.org/lsptests/bad +- +-go 1.18 +- +--- bad.go -- +-package bad +- +-type myStruct struct { +-} +- +-func (s *myStruct) sFunc() bool { //@renameerr("sFunc", "rFunc", "not possible because \"bad.go\" in \"golang.org/lsptests/bad\" has errors") +- return s.Bad //@diag("Bad", re"no field or method") +-} +- +--- bad_test.go -- +-package bad +- +- +--- badsyntax/badsyntax.go -- +-package badsyntax +- +-type S struct {} +- +-func (s *S) sFunc() bool { //@renameerr("sFunc", "rFunc", "not possible because \"badsyntax.go\" in \"golang.org/lsptests/bad/badsyntax\" has errors") +- # //@diag("#", re"expected statement, found") +-} +diff -urN a/gopls/internal/test/marker/testdata/rename/basic.txt b/gopls/internal/test/marker/testdata/rename/basic.txt +--- a/gopls/internal/test/marker/testdata/rename/basic.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/basic.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,35 +0,0 @@ +-This test performs basic coverage of 'rename' within a single package. +- +--- basic.go -- +-package p +- +-func _(x int) { println(x) } //@rename("x", "y", xToy) +- +--- @xToy/basic.go -- +-@@ -3 +3 @@ +--func _(x int) { println(x) } //@rename("x", "y", xToy) +-+func _(y int) { println(y) } //@rename("x", "y", xToy) +--- alias.go -- +-package p +- +-// from golang/go#61625 +-type LongNameHere struct{} +-type A = LongNameHere //@rename("A", "B", AToB) +-func Foo() A +- +--- errors.go -- +-package p +- +-func _(x []int) { //@renameerr("_", "blank", `can't rename "_"`) +- x = append(x, 1) //@renameerr("append", "blank", "built in and cannot be renamed") +- x = nil //@renameerr("nil", "blank", "built in and cannot be renamed") +- x = nil //@renameerr("x", "x", "old and new names are the same: x") +- _ = 1 //@renameerr("1", "x", "no identifier found") +-} +- +--- @AToB/alias.go -- +-@@ -5,2 +5,2 @@ +--type A = LongNameHere //@rename("A", "B", AToB) +--func Foo() A +-+type B = LongNameHere //@rename("A", "B", AToB) +-+func Foo() B +diff -urN a/gopls/internal/test/marker/testdata/rename/conflict.txt b/gopls/internal/test/marker/testdata/rename/conflict.txt +--- a/gopls/internal/test/marker/testdata/rename/conflict.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/conflict.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,62 +0,0 @@ +-This test exercises some renaming conflict scenarios +-and ensures that the errors are informative. +- +--- settings.json -- +-{"analyses": {"unusedfunc": false}} +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- super/p.go -- +-package super +- +-var x int +- +-func _(y int) { +- println(x) +- println(y) //@renameerr("y", "x", errSuperBlockConflict) +-} +- +--- @errSuperBlockConflict -- +-super/p.go:5:8: renaming this var "y" to "x" +-super/p.go:6:10: would shadow this reference +-super/p.go:3:5: to the var declared here +--- sub/p.go -- +-package sub +- +-var a int +- +-func _(b int) { +- println(a) //@renameerr("a", "b", errSubBlockConflict) +- println(b) +-} +- +--- @errSubBlockConflict -- +-sub/p.go:3:5: renaming this var "a" to "b" +-sub/p.go:6:10: would cause this reference to become shadowed +-sub/p.go:5:8: by this intervening var definition +--- pkgname/p.go -- +-package pkgname +- +-import e1 "errors" //@renameerr("e1", "errors", errImportConflict) +-import "errors" +- +-var _ = errors.New +-var _ = e1.New +- +--- @errImportConflict -- +-pkgname/p.go:3:8: renaming this imported package name "e1" to "errors" +-pkgname/p.go:4:8: conflicts with imported package name in same block +--- pkgname2/p1.go -- +-package pkgname2 +-var x int +- +--- pkgname2/p2.go -- +-package pkgname2 +-import "errors" //@renameerr("errors", "x", errImportConflict2) +-var _ = errors.New +- +--- @errImportConflict2 -- +-pkgname2/p2.go:2:8: renaming this imported package name "errors" to "x" would conflict +-pkgname2/p1.go:2:5: with this package member var +diff -urN a/gopls/internal/test/marker/testdata/rename/crosspkg.txt b/gopls/internal/test/marker/testdata/rename/crosspkg.txt +--- a/gopls/internal/test/marker/testdata/rename/crosspkg.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/crosspkg.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,74 +0,0 @@ +-This test checks cross-package renaming. +- +--- go.mod -- +-module golang.org/lsptests/rename +- +-go 1.18 +- +--- crosspkg/crosspkg.go -- +-package crosspkg +- +-func Foo() { //@rename("Foo", "Dolphin", FooToDolphin) +- +-} +- +-var Bar int //@rename("Bar", "Tomato", BarToTomato) +- +--- crosspkg/another/another.go -- +-package another +- +-type ( +- I interface{ F() } +- C struct{ I } +-) +- +-func (C) g() +- +-func _() { +- var x I = C{} +- x.F() //@rename("F", "G", FToG) +-} +- +-var _ = C.g +- +--- crosspkg/other/other.go -- +-package other +- +-import "golang.org/lsptests/rename/crosspkg" +- +-func Other() { +- crosspkg.Bar //@diag("crosspkg", re"not used") +- crosspkg.Foo() //@rename("Foo", "Flamingo", FooToFlamingo) +-} +- +--- @BarToTomato/crosspkg/crosspkg.go -- +-@@ -7 +7 @@ +--var Bar int //@rename("Bar", "Tomato", BarToTomato) +-+var Tomato int //@rename("Bar", "Tomato", BarToTomato) +--- @BarToTomato/crosspkg/other/other.go -- +-@@ -6 +6 @@ +-- crosspkg.Bar //@diag("crosspkg", re"not used") +-+ crosspkg.Tomato //@diag("crosspkg", re"not used") +--- @FToG/crosspkg/another/another.go -- +-@@ -4 +4 @@ +-- I interface{ F() } +-+ I interface{ G() } +-@@ -12 +12 @@ +-- x.F() //@rename("F", "G", FToG) +-+ x.G() //@rename("F", "G", FToG) +--- @FooToDolphin/crosspkg/crosspkg.go -- +-@@ -3 +3 @@ +--func Foo() { //@rename("Foo", "Dolphin", FooToDolphin) +-+func Dolphin() { //@rename("Foo", "Dolphin", FooToDolphin) +--- @FooToDolphin/crosspkg/other/other.go -- +-@@ -7 +7 @@ +-- crosspkg.Foo() //@rename("Foo", "Flamingo", FooToFlamingo) +-+ crosspkg.Dolphin() //@rename("Foo", "Flamingo", FooToFlamingo) +--- @FooToFlamingo/crosspkg/crosspkg.go -- +-@@ -3 +3 @@ +--func Foo() { //@rename("Foo", "Dolphin", FooToDolphin) +-+func Flamingo() { //@rename("Foo", "Dolphin", FooToDolphin) +--- @FooToFlamingo/crosspkg/other/other.go -- +-@@ -7 +7 @@ +-- crosspkg.Foo() //@rename("Foo", "Flamingo", FooToFlamingo) +-+ crosspkg.Flamingo() //@rename("Foo", "Flamingo", FooToFlamingo) +diff -urN a/gopls/internal/test/marker/testdata/rename/doclink.txt b/gopls/internal/test/marker/testdata/rename/doclink.txt +--- a/gopls/internal/test/marker/testdata/rename/doclink.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/doclink.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,180 +0,0 @@ +-This test checks that doc links are also handled correctly (golang/go#64495). +- +--- go.mod -- +-module example.com +- +-go 1.21 +- +--- a/a.go -- +-package a +- +-// Foo just for test [Foo] +-// reference others objects [A] [B] [C] [C.F] [C.PF] +-func Foo() {} //@rename("Foo", "Bar", FooToBar) +- +-const A = 1 //@rename("A", "AA", AToAA) +- +-var B = 1 //@rename("B", "BB", BToBB) +- +-type C int //@rename("C", "CC", CToCC) +- +-func (C) F() {} //@rename("F", "FF", FToFF) +- +-func (*C) PF() {} //@rename("PF", "PFF", PFToPFF) +- +-// D just for test [*D] +-type D int //@rename("D", "DD", DToDD) +- +-// E test generic type doc link [E] [E.Foo] +-type E[T any] struct { //@rename("E", "EE", EToEE) +- Field T +-} +- +-func (E[T]) Foo() {} //@rename("Foo", "Bar", EFooToEBar) +- +--- b/b.go -- +-package b +- +-import aa "example.com/a" //@rename("aa", "a", pkgRename) +- +-// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +-// reference pointer type [*aa.D] +-// reference generic type links [aa.E] [aa.E.Foo] +-func FooBar() { +- aa.Foo() +- var e aa.E[int] +- e.Foo() +-} +- +- +--- @FooToBar/a/a.go -- +-@@ -3 +3 @@ +--// Foo just for test [Foo] +-+// Bar just for test [Bar] +-@@ -5 +5 @@ +--func Foo() {} //@rename("Foo", "Bar", FooToBar) +-+func Bar() {} //@rename("Foo", "Bar", FooToBar) +--- @FooToBar/b/b.go -- +-@@ -5 +5 @@ +--// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +-+// FooBar just for test [aa.Bar] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +-@@ -9 +9 @@ +-- aa.Foo() +-+ aa.Bar() +--- @AToAA/a/a.go -- +-@@ -4 +4 @@ +--// reference others objects [A] [B] [C] [C.F] [C.PF] +-+// reference others objects [AA] [B] [C] [C.F] [C.PF] +-@@ -7 +7 @@ +--const A = 1 //@rename("A", "AA", AToAA) +-+const AA = 1 //@rename("A", "AA", AToAA) +--- @AToAA/b/b.go -- +-@@ -5 +5 @@ +--// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +-+// FooBar just for test [aa.Foo] [aa.AA] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +--- @BToBB/a/a.go -- +-@@ -4 +4 @@ +--// reference others objects [A] [B] [C] [C.F] [C.PF] +-+// reference others objects [A] [BB] [C] [C.F] [C.PF] +-@@ -9 +9 @@ +--var B = 1 //@rename("B", "BB", BToBB) +-+var BB = 1 //@rename("B", "BB", BToBB) +--- @BToBB/b/b.go -- +-@@ -5 +5 @@ +--// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +-+// FooBar just for test [aa.Foo] [aa.A] [aa.BB] [aa.C] [aa.C.F] [aa.C.PF] +--- @CToCC/a/a.go -- +-@@ -4 +4 @@ +--// reference others objects [A] [B] [C] [C.F] [C.PF] +-+// reference others objects [A] [B] [CC] [CC.F] [CC.PF] +-@@ -11 +11 @@ +--type C int //@rename("C", "CC", CToCC) +-+type CC int //@rename("C", "CC", CToCC) +-@@ -13 +13 @@ +--func (C) F() {} //@rename("F", "FF", FToFF) +-+func (CC) F() {} //@rename("F", "FF", FToFF) +-@@ -15 +15 @@ +--func (*C) PF() {} //@rename("PF", "PFF", PFToPFF) +-+func (*CC) PF() {} //@rename("PF", "PFF", PFToPFF) +--- @CToCC/b/b.go -- +-@@ -5 +5 @@ +--// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +-+// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.CC] [aa.CC.F] [aa.CC.PF] +--- @FToFF/a/a.go -- +-@@ -4 +4 @@ +--// reference others objects [A] [B] [C] [C.F] [C.PF] +-+// reference others objects [A] [B] [C] [C.FF] [C.PF] +-@@ -13 +13 @@ +--func (C) F() {} //@rename("F", "FF", FToFF) +-+func (C) FF() {} //@rename("F", "FF", FToFF) +--- @FToFF/b/b.go -- +-@@ -5 +5 @@ +--// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +-+// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.FF] [aa.C.PF] +--- @PFToPFF/a/a.go -- +-@@ -4 +4 @@ +--// reference others objects [A] [B] [C] [C.F] [C.PF] +-+// reference others objects [A] [B] [C] [C.F] [C.PFF] +-@@ -15 +15 @@ +--func (*C) PF() {} //@rename("PF", "PFF", PFToPFF) +-+func (*C) PFF() {} //@rename("PF", "PFF", PFToPFF) +--- @PFToPFF/b/b.go -- +-@@ -5 +5 @@ +--// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +-+// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PFF] +--- @pkgRename/b/b.go -- +-@@ -3 +3 @@ +--import aa "example.com/a" //@rename("aa", "a", pkgRename) +-+import "example.com/a" //@rename("aa", "a", pkgRename) +-@@ -5,3 +5,3 @@ +--// FooBar just for test [aa.Foo] [aa.A] [aa.B] [aa.C] [aa.C.F] [aa.C.PF] +--// reference pointer type [*aa.D] +--// reference generic type links [aa.E] [aa.E.Foo] +-+// FooBar just for test [a.Foo] [a.A] [a.B] [a.C] [a.C.F] [a.C.PF] +-+// reference pointer type [*a.D] +-+// reference generic type links [a.E] [a.E.Foo] +-@@ -9,2 +9,2 @@ +-- aa.Foo() +-- var e aa.E[int] +-+ a.Foo() +-+ var e a.E[int] +--- @DToDD/a/a.go -- +-@@ -17,2 +17,2 @@ +--// D just for test [*D] +--type D int //@rename("D", "DD", DToDD) +-+// DD just for test [*DD] +-+type DD int //@rename("D", "DD", DToDD) +--- @DToDD/b/b.go -- +-@@ -6 +6 @@ +--// reference pointer type [*aa.D] +-+// reference pointer type [*aa.DD] +--- @EToEE/a/a.go -- +-@@ -20,2 +20,2 @@ +--// E test generic type doc link [E] [E.Foo] +--type E[T any] struct { //@rename("E", "EE", EToEE) +-+// EE test generic type doc link [EE] [EE.Foo] +-+type EE[T any] struct { //@rename("E", "EE", EToEE) +-@@ -25 +25 @@ +--func (E[T]) Foo() {} //@rename("Foo", "Bar", EFooToEBar) +-+func (EE[T]) Foo() {} //@rename("Foo", "Bar", EFooToEBar) +--- @EToEE/b/b.go -- +-@@ -7 +7 @@ +--// reference generic type links [aa.E] [aa.E.Foo] +-+// reference generic type links [aa.EE] [aa.EE.Foo] +-@@ -10 +10 @@ +-- var e aa.E[int] +-+ var e aa.EE[int] +--- @EFooToEBar/a/a.go -- +-@@ -20 +20 @@ +--// E test generic type doc link [E] [E.Foo] +-+// E test generic type doc link [E] [E.Bar] +-@@ -25 +25 @@ +--func (E[T]) Foo() {} //@rename("Foo", "Bar", EFooToEBar) +-+func (E[T]) Bar() {} //@rename("Foo", "Bar", EFooToEBar) +--- @EFooToEBar/b/b.go -- +-@@ -7 +7 @@ +--// reference generic type links [aa.E] [aa.E.Foo] +-+// reference generic type links [aa.E] [aa.E.Bar] +-@@ -11 +11 @@ +-- e.Foo() +-+ e.Bar() +diff -urN a/gopls/internal/test/marker/testdata/rename/embed.txt b/gopls/internal/test/marker/testdata/rename/embed.txt +--- a/gopls/internal/test/marker/testdata/rename/embed.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/embed.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,57 +0,0 @@ +-This test exercises renaming of types used as embedded fields. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-type A int //@rename("A", "A2", type) +- +--- b/b.go -- +-package b +- +-import "example.com/a" +- +-type B struct { a.A } //@rename("A", "A3", embedA) +- +-var _ = new(B).A //@renameerr("A", "A4", errAnonField) +- +-type C int +- +-type D struct { +- C //@rename("C", "C2", embedC) +-} +- +--- @errAnonField -- +-an embedded field must be renamed at its declaration (since it renames the type too) +--- @type/a/a.go -- +-@@ -3 +3 @@ +--type A int //@rename("A", "A2", type) +-+type A2 int //@rename("A", "A2", type) +--- @type/b/b.go -- +-@@ -5 +5 @@ +--type B struct { a.A } //@rename("A", "A3", embedA) +-+type B struct { a.A2 } //@rename("A", "A3", embedA) +-@@ -7 +7 @@ +--var _ = new(B).A //@renameerr("A", "A4", errAnonField) +-+var _ = new(B).A2 //@renameerr("A", "A4", errAnonField) +--- @embedA/a/a.go -- +-@@ -3 +3 @@ +--type A int //@rename("A", "A2", type) +-+type A3 int //@rename("A", "A2", type) +--- @embedA/b/b.go -- +-@@ -5 +5 @@ +--type B struct { a.A } //@rename("A", "A3", embedA) +-+type B struct { a.A3 } //@rename("A", "A3", embedA) +-@@ -7 +7 @@ +--var _ = new(B).A //@renameerr("A", "A4", errAnonField) +-+var _ = new(B).A3 //@renameerr("A", "A4", errAnonField) +--- @embedC/b/b.go -- +-@@ -9 +9 @@ +--type C int +-+type C2 int +-@@ -12 +12 @@ +-- C //@rename("C", "C2", embedC) +-+ C2 //@rename("C", "C2", embedC) +diff -urN a/gopls/internal/test/marker/testdata/rename/func.txt b/gopls/internal/test/marker/testdata/rename/func.txt +--- a/gopls/internal/test/marker/testdata/rename/func.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/func.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,55 +0,0 @@ +-This test checks basic functionality for renaming (=changing) a function +-signature. +- +--- go.mod -- +-module example.com +- +-go 1.20 +- +--- a/a.go -- +-package a +- +-//@rename(Foo, "func(i int, s string)", unchanged) +-//@rename(Foo, "func(s string, i int)", reverse) +-//@rename(Foo, "func(s string)", dropi) +-//@rename(Foo, "func(i int)", drops) +-//@rename(Foo, "func()", dropboth) +-//@renameerr(Foo, "func(i int, s string, t bool)", "not yet supported") +-//@renameerr(Foo, "func(i string)", "not yet supported") +-//@renameerr(Foo, "func(i int, s string) int", "not yet supported") +- +-func Foo(i int, s string) { //@loc(Foo, "func") +-} +- +-func _() { +- Foo(0, "hi") +-} +--- @dropboth/a/a.go -- +-@@ -12 +12 @@ +--func Foo(i int, s string) { //@loc(Foo, "func") +-+func Foo() { //@loc(Foo, "func") +-@@ -16 +16 @@ +-- Foo(0, "hi") +-+ Foo() +--- @dropi/a/a.go -- +-@@ -12 +12 @@ +--func Foo(i int, s string) { //@loc(Foo, "func") +-+func Foo(s string) { //@loc(Foo, "func") +-@@ -16 +16 @@ +-- Foo(0, "hi") +-+ Foo("hi") +--- @drops/a/a.go -- +-@@ -12 +12 @@ +--func Foo(i int, s string) { //@loc(Foo, "func") +-+func Foo(i int) { //@loc(Foo, "func") +-@@ -16 +16 @@ +-- Foo(0, "hi") +-+ Foo(0) +--- @reverse/a/a.go -- +-@@ -12 +12 @@ +--func Foo(i int, s string) { //@loc(Foo, "func") +-+func Foo(s string, i int) { //@loc(Foo, "func") +-@@ -16 +16 @@ +-- Foo(0, "hi") +-+ Foo("hi", 0) +--- @unchanged/a/a.go -- +diff -urN a/gopls/internal/test/marker/testdata/rename/generics_basic.txt b/gopls/internal/test/marker/testdata/rename/generics_basic.txt +--- a/gopls/internal/test/marker/testdata/rename/generics_basic.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/generics_basic.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,107 +0,0 @@ +-This test exercise basic renaming of generic code. +- +--- embedded.go -- +-package a +- +-type foo[P any] int //@rename("foo", "bar", fooTobar) +- +-var x struct{ foo[int] } +- +-var _ = x.foo +- +--- @fooTobar/embedded.go -- +-@@ -3 +3 @@ +--type foo[P any] int //@rename("foo", "bar", fooTobar) +-+type bar[P any] int //@rename("foo", "bar", fooTobar) +-@@ -5 +5 @@ +--var x struct{ foo[int] } +-+var x struct{ bar[int] } +-@@ -7 +7 @@ +--var _ = x.foo +-+var _ = x.bar +--- generics.go -- +-package a +- +-type G[P any] struct { +- F int +-} +- +-func (G[_]) M() {} +- +-func F[P any](P) { +- var p P //@rename("P", "Q", PToQ) +- _ = p +-} +- +-func _() { +- var x G[int] //@rename("G", "H", GToH) +- _ = x.F //@rename("F", "K", FToK) +- x.M() //@rename("M", "N", MToN) +- +- var y G[string] +- _ = y.F +- y.M() +-} +- +--- @FToK/generics.go -- +-@@ -4 +4 @@ +-- F int +-+ K int +-@@ -16 +16 @@ +-- _ = x.F //@rename("F", "K", FToK) +-+ _ = x.K //@rename("F", "K", FToK) +-@@ -20 +20 @@ +-- _ = y.F +-+ _ = y.K +--- @GToH/generics.go -- +-@@ -3 +3 @@ +--type G[P any] struct { +-+type H[P any] struct { +-@@ -7 +7 @@ +--func (G[_]) M() {} +-+func (H[_]) M() {} +-@@ -15 +15 @@ +-- var x G[int] //@rename("G", "H", GToH) +-+ var x H[int] //@rename("G", "H", GToH) +-@@ -19 +19 @@ +-- var y G[string] +-+ var y H[string] +--- @MToN/generics.go -- +-@@ -7 +7 @@ +--func (G[_]) M() {} +-+func (G[_]) N() {} +-@@ -17 +17 @@ +-- x.M() //@rename("M", "N", MToN) +-+ x.N() //@rename("M", "N", MToN) +-@@ -21 +21 @@ +-- y.M() +-+ y.N() +--- @PToQ/generics.go -- +-@@ -9,2 +9,2 @@ +--func F[P any](P) { +-- var p P //@rename("P", "Q", PToQ) +-+func F[Q any](Q) { +-+ var p Q //@rename("P", "Q", PToQ) +--- unions.go -- +-package a +- +-type T string //@rename("T", "R", TToR) +- +-type C interface { +- T | ~int //@rename("T", "S", TToS) +-} +- +--- @TToR/unions.go -- +-@@ -3 +3 @@ +--type T string //@rename("T", "R", TToR) +-+type R string //@rename("T", "R", TToR) +-@@ -6 +6 @@ +-- T | ~int //@rename("T", "S", TToS) +-+ R | ~int //@rename("T", "S", TToS) +--- @TToS/unions.go -- +-@@ -3 +3 @@ +--type T string //@rename("T", "R", TToR) +-+type S string //@rename("T", "R", TToR) +-@@ -6 +6 @@ +-- T | ~int //@rename("T", "S", TToS) +-+ S | ~int //@rename("T", "S", TToS) +diff -urN a/gopls/internal/test/marker/testdata/rename/generics.txt b/gopls/internal/test/marker/testdata/rename/generics.txt +--- a/gopls/internal/test/marker/testdata/rename/generics.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/generics.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,190 +0,0 @@ +-This test exercises various renaming features on generic code. +- +-Fixed bugs: +- +-- golang/go#61614: renaming a method of a type in a package that uses type +- parameter composite lits used to panic, because previous iterations of the +- satisfy analysis did not account for this language feature. +- +-- golang/go#61635: renaming type parameters did not work when they were +- capitalized and the package was imported by another package. +- +--- go.mod -- +-module example.com +-go 1.20 +- +--- a.go -- +-package a +- +-type I int +- +-func (I) m() {} //@rename("m", "M", mToM) +- +-func _[P ~[]int]() { +- _ = P{} +-} +- +-var _ = I.m +- +--- @mToM/a.go -- +-@@ -5 +5 @@ +--func (I) m() {} //@rename("m", "M", mToM) +-+func (I) M() {} //@rename("m", "M", mToM) +-@@ -11 +11 @@ +--var _ = I.m +-+var _ = I.M +--- g.go -- +-package a +- +-type S[P any] struct { //@rename("P", "Q", PToQ) +- P P +- F func(P) P +-} +- +-func F[R any](r R) { +- var _ R //@rename("R", "S", RToS) +-} +- +--- @PToQ/g.go -- +-@@ -3,3 +3,3 @@ +--type S[P any] struct { //@rename("P", "Q", PToQ) +-- P P +-- F func(P) P +-+type S[Q any] struct { //@rename("P", "Q", PToQ) +-+ P Q +-+ F func(Q) Q +--- @RToS/g.go -- +-@@ -8,2 +8,2 @@ +--func F[R any](r R) { +-- var _ R //@rename("R", "S", RToS) +-+func F[S any](r S) { +-+ var _ S //@rename("R", "S", RToS) +--- issue61635/p.go -- +-package issue61635 +- +-type builder[S ~[]F, F ~string] struct { //@rename("S", "T", SToT) +- name string +- elements S +- elemData map[F][]ElemData[F] +- // other fields... +-} +- +-type ElemData[F ~string] struct { +- Name F +- // other fields... +-} +- +-type BuilderImpl[S ~[]F, F ~string] struct{ builder[S, F] } +- +--- importer/i.go -- +-package importer +- +-import "example.com/issue61635" // importing is necessary to repro golang/go#61635 +- +-var _ issue61635.ElemData[string] +- +--- @SToT/issue61635/p.go -- +-@@ -3 +3 @@ +--type builder[S ~[]F, F ~string] struct { //@rename("S", "T", SToT) +-+type builder[T ~[]F, F ~string] struct { //@rename("S", "T", SToT) +-@@ -5 +5 @@ +-- elements S +-+ elements T +--- instances/type.go -- +-package instances +- +-type R[P any] struct { //@rename("R", "u", Rtou) +- Next *R[P] //@rename("R", "s", RTos) +-} +- +-func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +- var x R[P] +- return rv.Do(x) //@rename("Do", "Do2", DoToDo2) +-} +- +-func _() { +- var x R[int] //@rename("R", "r", RTor) +- x = x.Do(x) +-} +- +--- @RTos/instances/type.go -- +-@@ -3,2 +3,2 @@ +--type R[P any] struct { //@rename("R", "u", Rtou) +-- Next *R[P] //@rename("R", "s", RTos) +-+type s[P any] struct { //@rename("R", "u", Rtou) +-+ Next *s[P] //@rename("R", "s", RTos) +-@@ -7,2 +7,2 @@ +--func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +-- var x R[P] +-+func (rv s[P]) Do(s[P]) s[P] { //@rename("Do", "Do1", DoToDo1) +-+ var x s[P] +-@@ -13 +13 @@ +-- var x R[int] //@rename("R", "r", RTor) +-+ var x s[int] //@rename("R", "r", RTor) +--- @Rtou/instances/type.go -- +-@@ -3,2 +3,2 @@ +--type R[P any] struct { //@rename("R", "u", Rtou) +-- Next *R[P] //@rename("R", "s", RTos) +-+type u[P any] struct { //@rename("R", "u", Rtou) +-+ Next *u[P] //@rename("R", "s", RTos) +-@@ -7,2 +7,2 @@ +--func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +-- var x R[P] +-+func (rv u[P]) Do(u[P]) u[P] { //@rename("Do", "Do1", DoToDo1) +-+ var x u[P] +-@@ -13 +13 @@ +-- var x R[int] //@rename("R", "r", RTor) +-+ var x u[int] //@rename("R", "r", RTor) +--- @DoToDo1/instances/type.go -- +-@@ -7 +7 @@ +--func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +-+func (rv R[P]) Do1(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +-@@ -9 +9 @@ +-- return rv.Do(x) //@rename("Do", "Do2", DoToDo2) +-+ return rv.Do1(x) //@rename("Do", "Do2", DoToDo2) +-@@ -14 +14 @@ +-- x = x.Do(x) +-+ x = x.Do1(x) +--- @DoToDo2/instances/type.go -- +-@@ -7 +7 @@ +--func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +-+func (rv R[P]) Do2(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +-@@ -9 +9 @@ +-- return rv.Do(x) //@rename("Do", "Do2", DoToDo2) +-+ return rv.Do2(x) //@rename("Do", "Do2", DoToDo2) +-@@ -14 +14 @@ +-- x = x.Do(x) +-+ x = x.Do2(x) +--- instances/func.go -- +-package instances +- +-func Foo[P any](p P) { //@rename("Foo", "Bar", FooToBar) +- Foo(p) //@rename("Foo", "Baz", FooToBaz) +-} +- +--- @FooToBar/instances/func.go -- +-@@ -3,2 +3,2 @@ +--func Foo[P any](p P) { //@rename("Foo", "Bar", FooToBar) +-- Foo(p) //@rename("Foo", "Baz", FooToBaz) +-+func Bar[P any](p P) { //@rename("Foo", "Bar", FooToBar) +-+ Bar(p) //@rename("Foo", "Baz", FooToBaz) +--- @FooToBaz/instances/func.go -- +-@@ -3,2 +3,2 @@ +--func Foo[P any](p P) { //@rename("Foo", "Bar", FooToBar) +-- Foo(p) //@rename("Foo", "Baz", FooToBaz) +-+func Baz[P any](p P) { //@rename("Foo", "Bar", FooToBar) +-+ Baz(p) //@rename("Foo", "Baz", FooToBaz) +--- @RTor/instances/type.go -- +-@@ -3,2 +3,2 @@ +--type R[P any] struct { //@rename("R", "u", Rtou) +-- Next *R[P] //@rename("R", "s", RTos) +-+type r[P any] struct { //@rename("R", "u", Rtou) +-+ Next *r[P] //@rename("R", "s", RTos) +-@@ -7,2 +7,2 @@ +--func (rv R[P]) Do(R[P]) R[P] { //@rename("Do", "Do1", DoToDo1) +-- var x R[P] +-+func (rv r[P]) Do(r[P]) r[P] { //@rename("Do", "Do1", DoToDo1) +-+ var x r[P] +-@@ -13 +13 @@ +-- var x R[int] //@rename("R", "r", RTor) +-+ var x r[int] //@rename("R", "r", RTor) +diff -urN a/gopls/internal/test/marker/testdata/rename/issue39614.txt b/gopls/internal/test/marker/testdata/rename/issue39614.txt +--- a/gopls/internal/test/marker/testdata/rename/issue39614.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue39614.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,18 +0,0 @@ +- +--- flags -- +--ignore_extra_diags +- +--- p.go -- +-package issue39614 +- +-func fn() { +- var foo bool //@rename("foo", "bar", fooTobar) +- make(map[string]bool +- if true { +- } +-} +- +--- @fooTobar/p.go -- +-@@ -4 +4 @@ +-- var foo bool //@rename("foo", "bar", fooTobar) +-+ var bar bool //@rename("foo", "bar", fooTobar) +diff -urN a/gopls/internal/test/marker/testdata/rename/issue42134.txt b/gopls/internal/test/marker/testdata/rename/issue42134.txt +--- a/gopls/internal/test/marker/testdata/rename/issue42134.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue42134.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,80 +0,0 @@ +-Regression test for #42134, +-"rename fails to update doc comment for local variable of function type" +- +--- 1.go -- +-package issue42134 +- +-func _() { +- // foo computes things. +- foo := func() {} +- +- foo() //@rename("foo", "bar", fooTobar) +-} +--- @fooTobar/1.go -- +-@@ -4,2 +4,2 @@ +-- // foo computes things. +-- foo := func() {} +-+ // bar computes things. +-+ bar := func() {} +-@@ -7 +7 @@ +-- foo() //@rename("foo", "bar", fooTobar) +-+ bar() //@rename("foo", "bar", fooTobar) +--- 2.go -- +-package issue42134 +- +-import "fmt" +- +-func _() { +- // minNumber is a min number. +- // Second line. +- minNumber := min(1, 2) +- fmt.Println(minNumber) //@rename("minNumber", "res", minNumberTores) +-} +- +-func min(a, b int) int { return a + b } +--- @minNumberTores/2.go -- +-@@ -6 +6 @@ +-- // minNumber is a min number. +-+ // res is a min number. +-@@ -8,2 +8,2 @@ +-- minNumber := min(1, 2) +-- fmt.Println(minNumber) //@rename("minNumber", "res", minNumberTores) +-+ res := min(1, 2) +-+ fmt.Println(res) //@rename("minNumber", "res", minNumberTores) +--- 3.go -- +-package issue42134 +- +-func _() { +- /* +- tests contains test cases +- */ +- tests := []struct { //@rename("tests", "testCases", testsTotestCases) +- in, out string +- }{} +- _ = tests +-} +--- @testsTotestCases/3.go -- +-@@ -5 +5 @@ +-- tests contains test cases +-+ testCases contains test cases +-@@ -7 +7 @@ +-- tests := []struct { //@rename("tests", "testCases", testsTotestCases) +-+ testCases := []struct { //@rename("tests", "testCases", testsTotestCases) +-@@ -10 +10 @@ +-- _ = tests +-+ _ = testCases +--- 4.go -- +-package issue42134 +- +-func _() { +- // a is equal to 5. Comment must stay the same +- +- a := 5 +- _ = a //@rename("a", "b", aTob) +-} +--- @aTob/4.go -- +-@@ -6,2 +6,2 @@ +-- a := 5 +-- _ = a //@rename("a", "b", aTob) +-+ b := 5 +-+ _ = b //@rename("a", "b", aTob) +diff -urN a/gopls/internal/test/marker/testdata/rename/issue42301.txt b/gopls/internal/test/marker/testdata/rename/issue42301.txt +--- a/gopls/internal/test/marker/testdata/rename/issue42301.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue42301.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,63 +0,0 @@ +-This test verifies the fix for golang/go#42301: renaming an ident inside its doc +-comment should also rename the ident. +- +--- go.mod -- +-module example.com +- +-go 1.21 +--- a/a.go -- +-package a +- +-// Foo doesn't do anything, Foo is just an empty function. //@rename("Foo", "Bar", fooToBar), renameerr("anything", "Bar", "no identifier found") +-func Foo() {} +- +-func _() { +- Foo() +-} +- +--- b/b.go -- +-package b +- +-import "example.com/a" +- +-func _() { +- a.Foo() +-} +- +--- c/c.go -- +-package c +- +-// A is an empty struct. //@rename("A", "B", aToB) +-type A struct {} +- +--- d/d.go -- +-package d +- +-// Bar doesn't do anything, Bar is just an empty function. //@loc(Bar, re`^.*?\bBar\b.*?\b(Bar)\b.*`), rename(Bar, "Foo", barToFoo) +-func Bar() {} +- +--- @aToB/c/c.go -- +-@@ -3,2 +3,2 @@ +--// A is an empty struct. //@rename("A", "B", aToB) +--type A struct {} +-+// B is an empty struct. //@rename("B", "B", aToB) +-+type B struct {} +--- @barToFoo/d/d.go -- +-@@ -3,2 +3,2 @@ +--// Bar doesn't do anything, Bar is just an empty function. //@loc(Bar, re`^.*?\bBar\b.*?\b(Bar)\b.*`), rename(Bar, "Foo", barToFoo) +--func Bar() {} +-+// Foo doesn't do anything, Foo is just an empty function. //@loc(Foo, re`^.*?\bBar\b.*?\b(Foo)\b.*`), rename(Foo, "Foo", barToFoo) +-+func Foo() {} +--- @fooToBar/a/a.go -- +-@@ -3,2 +3,2 @@ +--// Foo doesn't do anything, Foo is just an empty function. //@rename("Foo", "Bar", fooToBar), renameerr("anything", "Bar", "no identifier found") +--func Foo() {} +-+// Bar doesn't do anything, Bar is just an empty function. //@rename("Bar", "Bar", fooToBar), renameerr("anything", "Bar", "no identifier found") +-+func Bar() {} +-@@ -7 +7 @@ +-- Foo() +-+ Bar() +--- @fooToBar/b/b.go -- +-@@ -6 +6 @@ +-- a.Foo() +-+ a.Bar() +diff -urN a/gopls/internal/test/marker/testdata/rename/issue43616.txt b/gopls/internal/test/marker/testdata/rename/issue43616.txt +--- a/gopls/internal/test/marker/testdata/rename/issue43616.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue43616.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,31 +0,0 @@ +-This test verifies the fix for golang/go#43616: renaming mishandles embedded +-fields. +- +--- p.go -- +-package issue43616 +- +-type foo int //@rename("foo", "bar", fooToBar),preparerename("oo","foo",span="foo") +- +-var x struct{ foo } //@rename("foo", "baz", fooToBaz) +- +-var _ = x.foo //@renameerr("foo", "quux", "must be renamed at its declaration") +--- @fooToBar/p.go -- +-@@ -3 +3 @@ +--type foo int //@rename("foo", "bar", fooToBar),preparerename("oo","foo",span="foo") +-+type bar int //@rename("foo", "bar", fooToBar),preparerename("oo","foo",span="foo") +-@@ -5 +5 @@ +--var x struct{ foo } //@rename("foo", "baz", fooToBaz) +-+var x struct{ bar } //@rename("foo", "baz", fooToBaz) +-@@ -7 +7 @@ +--var _ = x.foo //@renameerr("foo", "quux", "must be renamed at its declaration") +-+var _ = x.bar //@renameerr("foo", "quux", "must be renamed at its declaration") +--- @fooToBaz/p.go -- +-@@ -3 +3 @@ +--type foo int //@rename("foo", "bar", fooToBar),preparerename("oo","foo",span="foo") +-+type baz int //@rename("foo", "bar", fooToBar),preparerename("oo","foo",span="foo") +-@@ -5 +5 @@ +--var x struct{ foo } //@rename("foo", "baz", fooToBaz) +-+var x struct{ baz } //@rename("foo", "baz", fooToBaz) +-@@ -7 +7 @@ +--var _ = x.foo //@renameerr("foo", "quux", "must be renamed at its declaration") +-+var _ = x.baz //@renameerr("foo", "quux", "must be renamed at its declaration") +diff -urN a/gopls/internal/test/marker/testdata/rename/issue57479.txt b/gopls/internal/test/marker/testdata/rename/issue57479.txt +--- a/gopls/internal/test/marker/testdata/rename/issue57479.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue57479.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,34 +0,0 @@ +-Test renaming a parameter to the name of an imported package +-referenced by one of the function parameters. +- +-See golang/go#57479 +- +--- go.mod -- +-module golang.org/lsptests/rename +- +-go 1.18 +--- a/a.go -- +-package a +- +-import ( +- "fmt" +- "math" +-) +- +-func _(x fmt.Stringer) {} //@rename("x", "fmt", xToFmt) +- +-func _(x int, y fmt.Stringer) {} //@rename("x", "fmt", xyToFmt) +- +-func _(x [math.MaxInt]bool) {} //@rename("x", "math", xToMath) +--- @xToFmt/a/a.go -- +-@@ -8 +8 @@ +--func _(x fmt.Stringer) {} //@rename("x", "fmt", xToFmt) +-+func _(fmt fmt.Stringer) {} //@rename("x", "fmt", xToFmt) +--- @xToMath/a/a.go -- +-@@ -12 +12 @@ +--func _(x [math.MaxInt]bool) {} //@rename("x", "math", xToMath) +-+func _(math [math.MaxInt]bool) {} //@rename("x", "math", xToMath) +--- @xyToFmt/a/a.go -- +-@@ -10 +10 @@ +--func _(x int, y fmt.Stringer) {} //@rename("x", "fmt", xyToFmt) +-+func _(fmt int, y fmt.Stringer) {} //@rename("x", "fmt", xyToFmt) +diff -urN a/gopls/internal/test/marker/testdata/rename/issue60752.txt b/gopls/internal/test/marker/testdata/rename/issue60752.txt +--- a/gopls/internal/test/marker/testdata/rename/issue60752.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue60752.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,54 +0,0 @@ +- +-This test renames a receiver, type parameter, parameter or result var +-whose name matches a package-level decl. Prior to go1.22, this used to +-cause a spurious shadowing error because of an edge case in the +-behavior of types.Scope for function parameters and results. +- +-This is a regression test for issue #60752, a bug in the type checker. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/type.go -- +-package a +- +-type t int +- +--- a/recv.go -- +-package a +- +-func (v t) _() {} //@ rename("v", "t", recv) +- +--- a/param.go -- +-package a +- +-func _(v t) {} //@ rename("v", "t", param) +- +--- a/result.go -- +-package a +- +-func _() (v t) { return } //@ rename("v", "t", result) +- +--- a/typeparam.go -- +-package a +- +-func _[v t]() {} //@ renameerr("v", "t", re"would shadow (.|\n)*type.go:3:6") +- +--- b/b.go -- +-package b +- +-import _ "example.com/a" +- +--- @param/a/param.go -- +-@@ -3 +3 @@ +--func _(v t) {} //@ rename("v", "t", param) +-+func _(t t) {} //@ rename("v", "t", param) +--- @recv/a/recv.go -- +-@@ -3 +3 @@ +--func (v t) _() {} //@ rename("v", "t", recv) +-+func (t t) _() {} //@ rename("v", "t", recv) +--- @result/a/result.go -- +-@@ -3 +3 @@ +--func _() (v t) { return } //@ rename("v", "t", result) +-+func _() (t t) { return } //@ rename("v", "t", result) +diff -urN a/gopls/internal/test/marker/testdata/rename/issue60789.txt b/gopls/internal/test/marker/testdata/rename/issue60789.txt +--- a/gopls/internal/test/marker/testdata/rename/issue60789.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue60789.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,35 +0,0 @@ +- +-This test renames an exported method of an unexported type, +-which is an edge case for objectpath, since it computes a path +-from a syntax package that is no good when applied to an +-export data package. +- +-See issue #60789. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-type unexported int +-func (unexported) F() {} //@rename("F", "G", fToG) +- +-var _ = unexported(0).F +- +--- b/b.go -- +-package b +- +-// The existence of this package is sufficient to exercise +-// the bug even though it cannot reference a.unexported. +- +-import _ "example.com/a" +- +--- @fToG/a/a.go -- +-@@ -4 +4 @@ +--func (unexported) F() {} //@rename("F", "G", fToG) +-+func (unexported) G() {} //@rename("F", "G", fToG) +-@@ -6 +6 @@ +--var _ = unexported(0).F +-+var _ = unexported(0).G +diff -urN a/gopls/internal/test/marker/testdata/rename/issue61294.txt b/gopls/internal/test/marker/testdata/rename/issue61294.txt +--- a/gopls/internal/test/marker/testdata/rename/issue61294.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue61294.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,26 +0,0 @@ +- +-This test renames a parameter var whose name is the same as a +-package-level var, which revealed a bug in isLocal. +- +-This is a regression test for issue #61294. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-func One() +- +-func Two(One int) //@rename("One", "Three", OneToThree) +- +--- b/b.go -- +-package b +- +-import _ "example.com/a" +- +--- @OneToThree/a/a.go -- +-@@ -5 +5 @@ +--func Two(One int) //@rename("One", "Three", OneToThree) +-+func Two(Three int) //@rename("One", "Three", OneToThree) +diff -urN a/gopls/internal/test/marker/testdata/rename/issue61640.txt b/gopls/internal/test/marker/testdata/rename/issue61640.txt +--- a/gopls/internal/test/marker/testdata/rename/issue61640.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue61640.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,33 +0,0 @@ +-This test verifies that gopls can rename instantiated fields. +- +--- a.go -- +-package a +- +-// This file is adapted from the example in the issue. +- +-type builder[S ~[]int] struct { +- elements S //@rename("elements", "elements2", OneToTwo) +-} +- +-type BuilderImpl[S ~[]int] struct{ builder[S] } +- +-func NewBuilderImpl[S ~[]int](name string) *BuilderImpl[S] { +- impl := &BuilderImpl[S]{ +- builder[S]{ +- elements: S{}, +- }, +- } +- +- _ = impl.elements +- return impl +-} +--- @OneToTwo/a.go -- +-@@ -6 +6 @@ +-- elements S //@rename("elements", "elements2", OneToTwo) +-+ elements2 S //@rename("elements", "elements2", OneToTwo) +-@@ -14 +14 @@ +-- elements: S{}, +-+ elements2: S{}, +-@@ -18 +18 @@ +-- _ = impl.elements +-+ _ = impl.elements2 +diff -urN a/gopls/internal/test/marker/testdata/rename/issue61813.txt b/gopls/internal/test/marker/testdata/rename/issue61813.txt +--- a/gopls/internal/test/marker/testdata/rename/issue61813.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue61813.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,14 +0,0 @@ +-This test exercises the panic reported in golang/go#61813. +- +--- p.go -- +-package p +- +-type P struct{} +- +-func (P) M() {} //@rename("M", "N", MToN) +- +-var _ = []*P{{}} +--- @MToN/p.go -- +-@@ -5 +5 @@ +--func (P) M() {} //@rename("M", "N", MToN) +-+func (P) N() {} //@rename("M", "N", MToN) +diff -urN a/gopls/internal/test/marker/testdata/rename/issue65098.txt b/gopls/internal/test/marker/testdata/rename/issue65098.txt +--- a/gopls/internal/test/marker/testdata/rename/issue65098.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue65098.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,49 +0,0 @@ +-This is a test for issue 65098: a renaming in package a does not +-propagate to package b, even though the two packages are coupled via +-an assignment in c, which is renamed. +- +- c +- / \ +- a b +- +-The bug (a dup of #58461) is not yet fixed, so the golden file records +-the wrong behavior (i.e. no changes to package b). +-TODO(adonovan): fix. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-type I interface { +- F() //@ rename("F", "FF", fToFF) +-} +- +--- b/b.go -- +-package b +- +-type S struct{} +- +-func (s S) F() {} +- +--- c/c.go -- +-package c +- +-import ( +- "example.com/a" +- "example.com/b" +-) +- +-var _ a.I = b.S{} +-var _ = a.I.F +- +--- @fToFF/a/a.go -- +-@@ -4 +4 @@ +-- F() //@ rename("F", "FF", fToFF) +-+ FF() //@ rename("F", "FF", fToFF) +--- @fToFF/c/c.go -- +-@@ -9 +9 @@ +--var _ = a.I.F +-+var _ = a.I.FF +diff -urN a/gopls/internal/test/marker/testdata/rename/issue67069.txt b/gopls/internal/test/marker/testdata/rename/issue67069.txt +--- a/gopls/internal/test/marker/testdata/rename/issue67069.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue67069.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,57 +0,0 @@ +-This test verifies spurious pkgname conflicts. +-Issue golang/go#67069. +- +--- settings.json -- +-{"analyses": {"unusedfunc": false}} +- +--- go.mod -- +-module example +-go 1.19 +- +--- aa/a.go -- +-package aa +- +-var cc int //@rename("cc", "aa", CToA) +-const C = 0 +-const D = 0 +- +--- aa/a_test.go -- +-package aa_test +- +-import "example/aa" +- +-var _ = aa.C //@rename("aa", "bb", AToB) +--- @CToA/aa/a.go -- +-@@ -3 +3 @@ +--var cc int //@rename("cc", "aa", CToA) +-+var aa int //@rename("cc", "aa", CToA) +--- @AToB/aa/a_test.go -- +-@@ -3 +3 @@ +--import "example/aa" +-+import bb "example/aa" +-@@ -5 +5 @@ +--var _ = aa.C //@rename("aa", "bb", AToB) +-+var _ = bb.C //@rename("aa", "bb", AToB) +--- bb/b.go -- +-package bb +- +-import "example/aa" +- +-var _ = aa.C +-var bb int //@renameerr("bb", "aa", errImportConflict) +- +--- @errImportConflict -- +-bb/b.go:6:5: renaming this var "bb" to "aa" would conflict +-bb/b.go:3:8: with this imported package name +--- aa/a_internal_test.go -- +-package aa +- +-var _ = D //@rename("D", "aa", DToA) +--- @DToA/aa/a_internal_test.go -- +-@@ -3 +3 @@ +--var _ = D //@rename("D", "aa", DToA) +-+var _ = aa //@rename("D", "aa", DToA) +--- @DToA/aa/a.go -- +-@@ -5 +5 @@ +--const D = 0 +-+const aa = 0 +diff -urN a/gopls/internal/test/marker/testdata/rename/issue70968.txt b/gopls/internal/test/marker/testdata/rename/issue70968.txt +--- a/gopls/internal/test/marker/testdata/rename/issue70968.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/issue70968.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,20 +0,0 @@ +-Test that an (ill-typed) redeclaration of a name, which causes +-types.Info.Defs to lack an entry, doesn't lead to gopls to crash in +-renaming. Now, it proceeds with a partial rename. +- +-See golang/go#70968 +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-type T int //@ diag("T", re"T redeclared") +-type T struct { f int } //@ diag("T", re"T redeclared"), rename("f", "g", out) +- +--- @out/a/a.go -- +-@@ -4 +4 @@ +--type T struct { f int } //@ diag("T", re"T redeclared"), rename("f", "g", out) +-+type T struct { g int } //@ diag("T", re"T redeclared"), rename("f", "g", out) +diff -urN a/gopls/internal/test/marker/testdata/rename/methods.txt b/gopls/internal/test/marker/testdata/rename/methods.txt +--- a/gopls/internal/test/marker/testdata/rename/methods.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/methods.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,59 +0,0 @@ +-This test exercises renaming of interface methods. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-type A int +- +-func (A) F() {} //@renameerr("F", "G", errAfToG) +- +--- b/b.go -- +-package b +- +-import "example.com/a" +-import "example.com/c" +- +-type B interface { F() } //@rename("F", "G", BfToG) +- +-var _ B = a.A(0) +-var _ B = c.C(0) +- +-var _ = B.F +- +--- c/c.go -- +-package c +- +-type C int +- +-func (C) F() {} //@renameerr("F", "G", errCfToG) +- +--- d/d.go -- +-package d +- +-import "example.com/b" +- +-var _ = b.B.F +- +--- @errAfToG -- +-a/a.go:5:10: renaming this method "F" to "G" +-b/b.go:6:6: would make example.com/a.A no longer assignable to interface B +-b/b.go:6:20: (rename example.com/b.B.F if you intend to change both types) +--- @BfToG/b/b.go -- +-@@ -6 +6 @@ +--type B interface { F() } //@rename("F", "G", BfToG) +-+type B interface { G() } //@rename("F", "G", BfToG) +-@@ -11 +11 @@ +--var _ = B.F +-+var _ = B.G +--- @BfToG/d/d.go -- +-@@ -5 +5 @@ +--var _ = b.B.F +-+var _ = b.B.G +--- @errCfToG -- +-c/c.go:5:10: renaming this method "F" to "G" +-b/b.go:6:6: would make example.com/c.C no longer assignable to interface B +-b/b.go:6:20: (rename example.com/b.B.F if you intend to change both types) +diff -urN a/gopls/internal/test/marker/testdata/rename/packagedecl.txt b/gopls/internal/test/marker/testdata/rename/packagedecl.txt +--- a/gopls/internal/test/marker/testdata/rename/packagedecl.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/packagedecl.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,26 +0,0 @@ +-This test verifies the behavior of renaming a package declaration. +- +--- settings.json -- +-{ +- "packageMove": true +-} +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/rename +- +-go 1.20 +--- one/one.go -- +-package one //@ rename("one", "golang.org/lsptests/rename/one", sameName), rename("one", "golang.org/lsptests/rename/two", newNameSameDir), renameerr("one", "golang.org/lsptests/otherdir/one", re"invalid package path") +- +--- @newNameSameDir/one/one.go -- +-@@ -1,2 +0,0 @@ +--package one //@ rename("one", "golang.org/lsptests/rename/one", sameName), rename("one", "golang.org/lsptests/rename/two", newNameSameDir), renameerr("one", "golang.org/lsptests/otherdir/one", re"invalid package path") +-- +--- @newNameSameDir/two/one.go -- +-@@ -0,0 +1,2 @@ +-+package two //@ rename("one", "golang.org/lsptests/rename/one", sameName), rename("one", "golang.org/lsptests/rename/two", newNameSameDir), renameerr("one", "golang.org/lsptests/otherdir/one", re"invalid package path") +-+ +--- @sameName/one/one.go -- +diff -urN a/gopls/internal/test/marker/testdata/rename/prepare_func.txt b/gopls/internal/test/marker/testdata/rename/prepare_func.txt +--- a/gopls/internal/test/marker/testdata/rename/prepare_func.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/prepare_func.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,44 +0,0 @@ +-This test verifies the behavior of textDocument/prepareRename on function declarations. +- +--- settings.json -- +-{ +- "deepCompletion": false +-} +- +--- go.mod -- +-module golang.org/lsptests +- +-go 1.18 +- +--- main.go -- +-package main +- +-func _(i int) //@ preparerename("unc", "func(i int)", span="func") +- +-func _(i int) //@ preparerename("func", "func(i int)") +- +-func _(a, b int) //@ preparerename("func", "func(a, b int)") +- +-func _(a, _ int) //@ preparerename("func", "func(a, _0 int)") +- +-func _(a, _, _ int) //@ preparerename("func", "func(a, _0, _1 int)") +- +-func _(a, _, _, d int, _ string) //@ preparerename("func", "func(a, _0, _1, d int, _2 string)") +- +-func _(a int, b string) //@ preparerename("func", "func(a int, b string)") +- +-func _(a int, b ...string) //@ preparerename("func", "func(a int, b ...string)") +- +-func _(a int, b string) error //@ preparerename("func", "func(a int, b string) error") +- +-func _(a int, b string) (int, error) //@ preparerename("func", "func(a int, b string) (int, error)") +- +-func _( //@ preparerename("func", "func(a int, b string)") +- a int, +- b string, +-) +- +-func _( //@ preparerename("func", "func(a int, b string) (int, error)") +- a int, +- b string, +-) (int, error) +diff -urN a/gopls/internal/test/marker/testdata/rename/prepare_move.txt b/gopls/internal/test/marker/testdata/rename/prepare_move.txt +--- a/gopls/internal/test/marker/testdata/rename/prepare_move.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/prepare_move.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,17 +0,0 @@ +-This test verifies the behavior of textDocument/prepareRename when the experimental package move setting is enabled. +- +--- settings.json -- +-{ +- "packageMove": true +-} +- +--- go.mod -- +-module golang.org/lsptests +- +-go 1.20 +- +--- b/b.go -- +-package b //@ preparerename("b", "golang.org/lsptests/b") +- +--- a/other.go -- +-package other //@ preparerename("other", "other") // package move disabled when the package name does not match its directory base name +diff -urN a/gopls/internal/test/marker/testdata/rename/prepare.txt b/gopls/internal/test/marker/testdata/rename/prepare.txt +--- a/gopls/internal/test/marker/testdata/rename/prepare.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/prepare.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,66 +0,0 @@ +-This test verifies the behavior of textDocument/prepareRename. +- +--- settings.json -- +-{ +- "deepCompletion": false +-} +- +--- go.mod -- +-module golang.org/lsptests +- +-go 1.18 +--- types/types.go -- +-package types +- +-type CoolAlias = int //@item(CoolAlias, "CoolAlias", "int", "type") +- +-type X struct { //@item(X_struct, "X", "struct{...}", "struct") +- x int +-} +- +-type Y struct { //@item(Y_struct, "Y", "struct{...}", "struct") +- y int +-} +- +- +-type Bob interface { //@item(Bob_interface, "Bob", "interface{...}", "interface") +- Bobby() +-} +- +-func (*X) Bobby() {} +-func (*Y) Bobby() {} +- +--- good/good0.go -- +-package good +- +-var _ = stuff +- +-func stuff() { //@item(good_stuff, "stuff", "func()", "func"),preparerename("stu", "stuff", span="stuff") +- x := 5 +- random2(x) //@preparerename("dom", "random2", span="random2") +-} +- +--- good/good1.go -- +-package good +- +-import ( +- "golang.org/lsptests/types" //@item(types_import, "types", "\"golang.org/lsptests/types\"", "package") +-) +- +-var _ = random +- +-func random() int { //@item(good_random, "random", "func() int", "func") +- _ = "random() int" //@preparerename("random", "") +- y := 6 + 7 //@preparerename("7", "") +- return y //@preparerename("return", "", span="") +-} +- +-func random2(y int) int { //@item(good_random2, "random2", "func(y int) int", "func"),item(good_y_param, "y", "int", "var") +- //@complete("", good_y_param, types_import, good_random, good_random2, good_stuff) +- var b types.Bob = &types.X{} //@preparerename("ypes","types", span="types") +- if _, ok := b.(*types.X); ok { //@complete("X", X_struct, Y_struct, Bob_interface, CoolAlias) +- _ = 0 // suppress "empty branch" diagnostic +- } +- +- return y +-} +diff -urN a/gopls/internal/test/marker/testdata/rename/random.txt b/gopls/internal/test/marker/testdata/rename/random.txt +--- a/gopls/internal/test/marker/testdata/rename/random.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/random.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,238 +0,0 @@ +-This test ports some "random" rename tests from the old marker tests. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests/rename +- +-go 1.18 +--- a/a.go -- +-package a +- +-import ( +- lg "log" +- "fmt" //@rename("fmt", "fmty", fmtTofmty) +- f2 "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) +-) +- +-func Random() int { +- y := 6 + 7 +- return y +-} +- +-func Random2(y int) int { //@rename("y", "z", yToz) +- return y +-} +- +-type Pos struct { +- x, y int +-} +- +-func (p *Pos) Sum() int { +- return p.x + p.y //@rename("x", "myX", xTomyX) +-} +- +-func _() { +- var p Pos //@rename("p", "pos", pTopos) +- _ = p.Sum() //@rename("Sum", "GetSum", SumToGetSum) +-} +- +-func sw() { +- var x any +- +- switch y := x.(type) { //@rename("y", "y0", yToy0) +- case int: +- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +- case string: +- lg.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +- default: +- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +- } +-} +--- @SumToGetSum/a/a.go -- +-@@ -22 +22 @@ +--func (p *Pos) Sum() int { +-+func (p *Pos) GetSum() int { +-@@ -28 +28 @@ +-- _ = p.Sum() //@rename("Sum", "GetSum", SumToGetSum) +-+ _ = p.GetSum() //@rename("Sum", "GetSum", SumToGetSum) +--- @f2Tof2name/a/a.go -- +-@@ -6 +6 @@ +-- f2 "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) +-+ f2name "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) +-@@ -40 +40 @@ +-- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-+ f2name.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +--- @f2Tofmt2/a/a.go -- +-@@ -6 +6 @@ +-- f2 "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) +-+ fmt2 "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) +-@@ -40 +40 @@ +-- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-+ fmt2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +--- @fmtTof2y/a/a.go -- +-@@ -6 +6 @@ +-- f2 "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) +-+ f2y "fmt" //@rename("f2", "f2name", f2Tof2name),rename("fmt", "f2y", fmtTof2y) +-@@ -40 +40 @@ +-- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-+ f2y.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +--- @fmtTofmty/a/a.go -- +-@@ -5 +5 @@ +-- "fmt" //@rename("fmt", "fmty", fmtTofmty) +-+ fmty "fmt" //@rename("fmt", "fmty", fmtTofmty) +-@@ -36 +36 @@ +-- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +-+ fmty.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +--- @fmtToformat/a/a.go -- +-@@ -5 +5 @@ +-- "fmt" //@rename("fmt", "fmty", fmtTofmty) +-+ format "fmt" //@rename("fmt", "fmty", fmtTofmty) +-@@ -36 +36 @@ +-- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +-+ format.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +--- @lgTolog/a/a.go -- +-@@ -4 +4 @@ +-- lg "log" +-+ "log" +-@@ -38 +38 @@ +-- lg.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +-+ log.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +--- @pTopos/a/a.go -- +-@@ -27,2 +27,2 @@ +-- var p Pos //@rename("p", "pos", pTopos) +-- _ = p.Sum() //@rename("Sum", "GetSum", SumToGetSum) +-+ var pos Pos //@rename("p", "pos", pTopos) +-+ _ = pos.Sum() //@rename("Sum", "GetSum", SumToGetSum) +--- @xTomyX/a/a.go -- +-@@ -19 +19 @@ +-- x, y int +-+ myX, y int +-@@ -23 +23 @@ +-- return p.x + p.y //@rename("x", "myX", xTomyX) +-+ return p.myX + p.y //@rename("x", "myX", xTomyX) +--- @yToy0/a/a.go -- +-@@ -34 +34 @@ +-- switch y := x.(type) { //@rename("y", "y0", yToy0) +-+ switch y0 := x.(type) { //@rename("y", "y0", yToy0) +-@@ -36 +36 @@ +-- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +-+ fmt.Printf("%d", y0) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +-@@ -38 +38 @@ +-- lg.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +-+ lg.Printf("%s", y0) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +-@@ -40 +40 @@ +-- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-+ f2.Printf("%v", y0) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +--- @yToy1/a/a.go -- +-@@ -34 +34 @@ +-- switch y := x.(type) { //@rename("y", "y0", yToy0) +-+ switch y1 := x.(type) { //@rename("y", "y0", yToy0) +-@@ -36 +36 @@ +-- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +-+ fmt.Printf("%d", y1) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +-@@ -38 +38 @@ +-- lg.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +-+ lg.Printf("%s", y1) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +-@@ -40 +40 @@ +-- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-+ f2.Printf("%v", y1) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +--- @yToy2/a/a.go -- +-@@ -34 +34 @@ +-- switch y := x.(type) { //@rename("y", "y0", yToy0) +-+ switch y2 := x.(type) { //@rename("y", "y0", yToy0) +-@@ -36 +36 @@ +-- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +-+ fmt.Printf("%d", y2) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +-@@ -38 +38 @@ +-- lg.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +-+ lg.Printf("%s", y2) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +-@@ -40 +40 @@ +-- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-+ f2.Printf("%v", y2) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +--- @yToy3/a/a.go -- +-@@ -34 +34 @@ +-- switch y := x.(type) { //@rename("y", "y0", yToy0) +-+ switch y3 := x.(type) { //@rename("y", "y0", yToy0) +-@@ -36 +36 @@ +-- fmt.Printf("%d", y) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +-+ fmt.Printf("%d", y3) //@rename("y", "y1", yToy1),rename("fmt", "format", fmtToformat) +-@@ -38 +38 @@ +-- lg.Printf("%s", y) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +-+ lg.Printf("%s", y3) //@rename("y", "y2", yToy2),rename("lg", "log", lgTolog) +-@@ -40 +40 @@ +-- f2.Printf("%v", y) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +-+ f2.Printf("%v", y3) //@rename("y", "y3", yToy3),rename("f2", "fmt2", f2Tofmt2) +--- @yToz/a/a.go -- +-@@ -14,2 +14,2 @@ +--func Random2(y int) int { //@rename("y", "z", yToz) +-- return y +-+func Random2(z int) int { //@rename("y", "z", yToz) +-+ return z +--- b/b.go -- +-package b +- +-var c int //@renameerr("int", "uint", re"cannot be renamed") +- +-func _() { +- a := 1 //@rename("a", "error", aToerror) +- a = 2 +- _ = a +-} +- +-var ( +- // Hello there. +- // Foo does the thing. +- Foo int //@rename("Foo", "Bob", FooToBob) +-) +- +-/* +-Hello description +-*/ +-func Hello() {} //@rename("Hello", "Goodbye", HelloToGoodbye) +- +--- c/c.go -- +-package c +- +-import "golang.org/lsptests/rename/b" +- +-func _() { +- b.Hello() //@rename("Hello", "Goodbye", HelloToGoodbye) +-} +- +--- c/c2.go -- +-package c +- +-//go:embed Static/* +-var Static embed.FS //@rename("Static", "static", StaticTostatic) +- +--- @FooToBob/b/b.go -- +-@@ -13,2 +13,2 @@ +-- // Foo does the thing. +-- Foo int //@rename("Foo", "Bob", FooToBob) +-+ // Bob does the thing. +-+ Bob int //@rename("Foo", "Bob", FooToBob) +--- @HelloToGoodbye/b/b.go -- +-@@ -18 +18 @@ +--Hello description +-+Goodbye description +-@@ -20 +20 @@ +--func Hello() {} //@rename("Hello", "Goodbye", HelloToGoodbye) +-+func Goodbye() {} //@rename("Hello", "Goodbye", HelloToGoodbye) +--- @aToerror/b/b.go -- +-@@ -6,3 +6,3 @@ +-- a := 1 //@rename("a", "error", aToerror) +-- a = 2 +-- _ = a +-+ error := 1 //@rename("a", "error", aToerror) +-+ error = 2 +-+ _ = error +--- @HelloToGoodbye/c/c.go -- +-@@ -6 +6 @@ +-- b.Hello() //@rename("Hello", "Goodbye", HelloToGoodbye) +-+ b.Goodbye() //@rename("Hello", "Goodbye", HelloToGoodbye) +--- @StaticTostatic/c/c2.go -- +-@@ -4 +4 @@ +--var Static embed.FS //@rename("Static", "static", StaticTostatic) +-+var static embed.FS //@rename("Static", "static", StaticTostatic) +diff -urN a/gopls/internal/test/marker/testdata/rename/recv.txt b/gopls/internal/test/marker/testdata/rename/recv.txt +--- a/gopls/internal/test/marker/testdata/rename/recv.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/recv.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,74 +0,0 @@ +-This test exercises renaming of method receivers (golang/go#41892). +- +-Notes: +-- x to print fails for A.J because it would shadow the built-in print; +- that renaming is quietly skipped. +-- various combinations of named, aliases, and pointers are tested. +-- package b exercises generics. +-- renaming a receiver declaration causes the broader renaming; +- renaming a receiver use (see vrefz) effects only a local renaming. +- +--- a/a.go -- +-package a +- +-type T int +-type A = T +- +-func (T) F() {} +-func (t T) G() {} //@rename("t", "x", tx) +-func (U T) H() {} //@rename("U", "v", Uv) +-func (_ T) I() {} +-func (v A) J() { print(-v) } //@rename(re"-(v)", "z", vrefz) +-func (w *T) K() {} +-func (x *A) L() {} //@rename("x", "print", xprint) +- +--- @tx/a/a.go -- +-@@ -7,2 +7,2 @@ +--func (t T) G() {} //@rename("t", "x", tx) +--func (U T) H() {} //@rename("U", "v", Uv) +-+func (x T) G() {} //@rename("t", "x", tx) +-+func (x T) H() {} //@rename("U", "v", Uv) +-@@ -10,2 +10,2 @@ +--func (v A) J() { print(-v) } //@rename(re"-(v)", "z", vrefz) +--func (w *T) K() {} +-+func (x A) J() { print(-x) } //@rename(re"-(v)", "z", vrefz) +-+func (x *T) K() {} +--- @Uv/a/a.go -- +-@@ -7,2 +7,2 @@ +--func (t T) G() {} //@rename("t", "x", tx) +--func (U T) H() {} //@rename("U", "v", Uv) +-+func (v T) G() {} //@rename("t", "x", tx) +-+func (v T) H() {} //@rename("U", "v", Uv) +-@@ -11,2 +11,2 @@ +--func (w *T) K() {} +--func (x *A) L() {} //@rename("x", "print", xprint) +-+func (v *T) K() {} +-+func (v *A) L() {} //@rename("x", "print", xprint) +--- @xprint/a/a.go -- +-@@ -7,2 +7,2 @@ +--func (t T) G() {} //@rename("t", "x", tx) +--func (U T) H() {} //@rename("U", "v", Uv) +-+func (print T) G() {} //@rename("t", "x", tx) +-+func (print T) H() {} //@rename("U", "v", Uv) +-@@ -11,2 +11,2 @@ +--func (w *T) K() {} +--func (x *A) L() {} //@rename("x", "print", xprint) +-+func (print *T) K() {} +-+func (print *A) L() {} //@rename("x", "print", xprint) +--- @vrefz/a/a.go -- +-@@ -10 +10 @@ +--func (v A) J() { print(-v) } //@rename(re"-(v)", "z", vrefz) +-+func (z A) J() { print(-z) } //@rename(re"-(v)", "z", vrefz) +--- b/b.go -- +-package b +- +-type C[T any] int +-func (r C[T]) F() {} //@rename("r", "c", rc) +-func (r C[T]) G() {} +- +--- @rc/b/b.go -- +-@@ -4,2 +4,2 @@ +--func (r C[T]) F() {} //@rename("r", "c", rc) +--func (r C[T]) G() {} +-+func (c C[T]) F() {} //@rename("r", "c", rc) +-+func (c C[T]) G() {} +diff -urN a/gopls/internal/test/marker/testdata/rename/shadow.txt b/gopls/internal/test/marker/testdata/rename/shadow.txt +--- a/gopls/internal/test/marker/testdata/rename/shadow.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/shadow.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,36 +0,0 @@ +- +--- shadow.go -- +-package shadow +- +-func _() { +- a := true +- b, c, _ := A(), B(), D() //@renameerr("A", "a", re"shadowed"),rename("B", "b", BTob),renameerr("b", "c", re"conflict"),rename("D", "d", DTod) +- d := false +- _, _, _, _ = a, b, c, d +-} +- +-func A() int { +- return 0 +-} +- +-func B() int { +- return 0 +-} +- +-func D() int { +- return 0 +-} +--- @BTob/shadow.go -- +-@@ -5 +5 @@ +-- b, c, _ := A(), B(), D() //@renameerr("A", "a", re"shadowed"),rename("B", "b", BTob),renameerr("b", "c", re"conflict"),rename("D", "d", DTod) +-+ b, c, _ := A(), b(), D() //@renameerr("A", "a", re"shadowed"),rename("B", "b", BTob),renameerr("b", "c", re"conflict"),rename("D", "d", DTod) +-@@ -14 +14 @@ +--func B() int { +-+func b() int { +--- @DTod/shadow.go -- +-@@ -5 +5 @@ +-- b, c, _ := A(), B(), D() //@renameerr("A", "a", re"shadowed"),rename("B", "b", BTob),renameerr("b", "c", re"conflict"),rename("D", "d", DTod) +-+ b, c, _ := A(), B(), d() //@renameerr("A", "a", re"shadowed"),rename("B", "b", BTob),renameerr("b", "c", re"conflict"),rename("D", "d", DTod) +-@@ -18 +18 @@ +--func D() int { +-+func d() int { +diff -urN a/gopls/internal/test/marker/testdata/rename/testy.txt b/gopls/internal/test/marker/testdata/rename/testy.txt +--- a/gopls/internal/test/marker/testdata/rename/testy.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/testy.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,41 +0,0 @@ +- +--- flags -- +--ignore_extra_diags +- +--- testy.go -- +-package testy +- +-type tt int //@rename("tt", "testyType", ttTotestyType) +- +-func a() { +- foo := 42 //@rename("foo", "bar", fooTobar) +-} +--- testy_test.go -- +-package testy +- +-import "testing" +- +-func TestSomething(t *testing.T) { +- var x int //@rename("x", "testyX", xTotestyX) +- a() //@rename("a", "b", aTob) +-} +--- @aTob/testy.go -- +-@@ -5 +5 @@ +--func a() { +-+func b() { +--- @aTob/testy_test.go -- +-@@ -7 +7 @@ +-- a() //@rename("a", "b", aTob) +-+ b() //@rename("a", "b", aTob) +--- @fooTobar/testy.go -- +-@@ -6 +6 @@ +-- foo := 42 //@rename("foo", "bar", fooTobar) +-+ bar := 42 //@rename("foo", "bar", fooTobar) +--- @ttTotestyType/testy.go -- +-@@ -3 +3 @@ +--type tt int //@rename("tt", "testyType", ttTotestyType) +-+type testyType int //@rename("tt", "testyType", ttTotestyType) +--- @xTotestyX/testy_test.go -- +-@@ -6 +6 @@ +-- var x int //@rename("x", "testyX", xTotestyX) +-+ var testyX int //@rename("x", "testyX", xTotestyX) +diff -urN a/gopls/internal/test/marker/testdata/rename/typeswitch.txt b/gopls/internal/test/marker/testdata/rename/typeswitch.txt +--- a/gopls/internal/test/marker/testdata/rename/typeswitch.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/typeswitch.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-This test covers the special case of renaming a type switch var. +- +--- p.go -- +-package p +- +-func _(x any) { +- switch y := x.(type) { //@rename("y", "z", yToZ) +- case string: +- print(y) //@rename("y", "z", yToZ) +- default: +- print(y) //@rename("y", "z", yToZ) +- } +-} +- +--- @yToZ/p.go -- +-@@ -4 +4 @@ +-- switch y := x.(type) { //@rename("y", "z", yToZ) +-+ switch z := x.(type) { //@rename("y", "z", yToZ) +-@@ -6 +6 @@ +-- print(y) //@rename("y", "z", yToZ) +-+ print(z) //@rename("y", "z", yToZ) +-@@ -8 +8 @@ +-- print(y) //@rename("y", "z", yToZ) +-+ print(z) //@rename("y", "z", yToZ) +diff -urN a/gopls/internal/test/marker/testdata/rename/unexported.txt b/gopls/internal/test/marker/testdata/rename/unexported.txt +--- a/gopls/internal/test/marker/testdata/rename/unexported.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/rename/unexported.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,25 +0,0 @@ +- +-This test attempts to rename a.S.X to x, which would make it +-inaccessible from its external test package. The rename tool +-should report an error rather than wrecking the program. +-See issue #59403. +- +--- go.mod -- +-module example.com +-go 1.12 +- +--- a/a.go -- +-package a +- +-var S struct{ X int } //@renameerr("X", "x", oops) +- +--- a/a_test.go -- +-package a_test +- +-import "example.com/a" +- +-var Y = a.S.X +- +--- @oops -- +-a/a.go:3:15: renaming "X" to "x" would make it unexported +-a/a_test.go:5:13: breaking references from packages such as "example.com/a_test" +diff -urN a/gopls/internal/test/marker/testdata/selectionrange/selectionrange.txt b/gopls/internal/test/marker/testdata/selectionrange/selectionrange.txt +--- a/gopls/internal/test/marker/testdata/selectionrange/selectionrange.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/selectionrange/selectionrange.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,42 +0,0 @@ +-This test checks selection range functionality. +- +--- foo.go -- +-package foo +- +-import "time" +- +-func Bar(x, y int, t time.Time) int { +- zs := []int{1, 2, 3} //@selectionrange("1", a) +- +- for _, z := range zs { +- x = x + z + y + zs[1] //@selectionrange("1", b) +- } +- +- return x + y //@selectionrange("+", c) +-} +--- @a -- +-Ranges 0: +- 5:13-5:14 "1" +- 5:7-5:21 "[]int{1, 2, 3}" +- 5:1-5:21 "zs := []int{1, 2, 3}" +- 4:36-12:1 "{\\n\tzs := []int{...range(\"+\", c)\\n}" +- 4:0-12:1 "func Bar(x, y i...range(\"+\", c)\\n}" +- 0:0-12:1 "package foo\\n\\nim...range(\"+\", c)\\n}" +--- @b -- +-Ranges 0: +- 8:21-8:22 "1" +- 8:18-8:23 "zs[1]" +- 8:6-8:23 "x + z + y + zs[1]" +- 8:2-8:23 "x = x + z + y + zs[1]" +- 7:22-9:2 "{\\n\t\tx = x + z +...ange(\"1\", b)\\n\t}" +- 7:1-9:2 "for _, z := ran...ange(\"1\", b)\\n\t}" +- 4:36-12:1 "{\\n\tzs := []int{...range(\"+\", c)\\n}" +- 4:0-12:1 "func Bar(x, y i...range(\"+\", c)\\n}" +- 0:0-12:1 "package foo\\n\\nim...range(\"+\", c)\\n}" +--- @c -- +-Ranges 0: +- 11:8-11:13 "x + y" +- 11:1-11:13 "return x + y" +- 4:36-12:1 "{\\n\tzs := []int{...range(\"+\", c)\\n}" +- 4:0-12:1 "func Bar(x, y i...range(\"+\", c)\\n}" +- 0:0-12:1 "package foo\\n\\nim...range(\"+\", c)\\n}" +diff -urN a/gopls/internal/test/marker/testdata/signature/generic.txt b/gopls/internal/test/marker/testdata/signature/generic.txt +--- a/gopls/internal/test/marker/testdata/signature/generic.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/signature/generic.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,21 +0,0 @@ +-This test checks signature help on generic signatures. +- +--- g.go -- +-package g +- +-type M[K comparable, V any] map[K]V +- +-// golang/go#61189: signatureHelp must handle pointer receivers. +-func (m *M[K, V]) Get(k K) V { +- return (*m)[k] +-} +- +-func Get[K comparable, V any](m M[K, V], k K) V { +- return m[k] +-} +- +-func _() { +- var m M[int, string] +- _ = m.Get(0) //@signature("(", "Get(k int) string", -1) +- _ = Get(m, 0) //@signature("0", "Get(m M[int, string], k int) string", 1) +-} +diff -urN a/gopls/internal/test/marker/testdata/signature/issue63804.txt b/gopls/internal/test/marker/testdata/signature/issue63804.txt +--- a/gopls/internal/test/marker/testdata/signature/issue63804.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/signature/issue63804.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,13 +0,0 @@ +-Regresson test for #63804: conversion to built-in type caused panic. +- +-the server's Signature method never returns an actual error, +-so the best we can assert is that there is no result. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-var _ = int(123) //@signature("123", "", 0) +diff -urN a/gopls/internal/test/marker/testdata/signature/issue69552.txt b/gopls/internal/test/marker/testdata/signature/issue69552.txt +--- a/gopls/internal/test/marker/testdata/signature/issue69552.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/signature/issue69552.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,14 +0,0 @@ +-Regresson test for #69552: panic in activeParam of a builtin, when requesting +-signature help outside of the argument list. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-func _() { +- _ = len([]int{}) //@signature("en", "len(v Type) int", -1) +-} +- +diff -urN a/gopls/internal/test/marker/testdata/signature/signature.txt b/gopls/internal/test/marker/testdata/signature/signature.txt +--- a/gopls/internal/test/marker/testdata/signature/signature.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/signature/signature.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,259 +0,0 @@ +-This test exercises basic tests for signature help. +- +--- flags -- +--ignore_extra_diags +- +--- go.mod -- +-module golang.org/lsptests +- +-go 1.18 +- +--- signature/signature.go -- +-// Package signature has tests for signature help. +-package signature +- +-import ( +- "bytes" +- "encoding/json" +- "math/big" +- "fmt" +-) +- +-func Foo(a string, b int) (c bool) { +- return +-} +- +-func Bar(float64, ...byte) { +-} +- +-func FooArr(a []int) { +- +-} +- +-func NoArgs() { +-} +- +-type myStruct struct{} +- +-type Bar struct { +- A, B, C, D string +-} +- +-func (*myStruct) foo(e *json.Decoder) (*big.Int, error) { +- return nil, nil +-} +- +-type MyType struct{} +- +-type MyFunc func(foo int) string +- +-type Alias = int +-type OtherAlias = int +-type StringAlias = string +- +-func AliasSlice(a []*Alias) (b Alias) { return 0 } +-func AliasMap(a map[*Alias]StringAlias) (b, c map[*Alias]StringAlias) { return nil, nil } +-func OtherAliasMap(a, b map[Alias]OtherAlias) map[Alias]OtherAlias { return nil } +- +-func Qux() { +- Foo("foo", 123) //@signature("(", "Foo(a string, b int) (c bool)", -1) +- Foo("foo", 123) //@signature("123", "Foo(a string, b int) (c bool)", 1) +- Foo("foo", 123) //@signature(",", "Foo(a string, b int) (c bool)", 0) +- Foo("foo", 123) //@signature(" 1", "Foo(a string, b int) (c bool)", 1) +- Foo("foo", 123) //@signature(")", "Foo(a string, b int) (c bool)", 1) +- Foo("foo", 123) //@signature("o", "Foo(a string, b int) (c bool)", -1) +- _ = Foo //@signature("o", "Foo(a string, b int) (c bool)", -1) +- Foo //@signature("o", "Foo(a string, b int) (c bool)", -1) +- Foo() //@signature("(", "Foo(a string, b int) (c bool)", -1) +- +- Bar(13.37, 0x13) //@signature("13.37", "Bar(float64, ...byte)", 0) +- Bar(13.37, 0x37) //@signature("0x37", "Bar(float64, ...byte)", 1) +- Bar(13.37, 1, 2, 3, 4) //@signature("4", "Bar(float64, ...byte)", 1) +- +- NoArgs() //@signature("(", "NoArgs()", -1) +- NoArgs //@signature("s", "NoArgs()", -1) +- +- fn := func(hi, there string) func(i int) rune { +- return func(int) rune { return 0 } +- } +- +- fn("hi", "there") //@signature("hi", "", 0) +- fn("hi", "there") //@signature(",", "fn(hi string, there string) func(i int) rune", 0) +- fn("hi", "there")(1) //@signature("1", "func(i int) rune", 0) +- +- fnPtr := &fn +- (*fnPtr)("hi", "there") //@signature(",", "func(hi string, there string) func(i int) rune", 0) +- +- var fnIntf any = Foo +- fnIntf.(func(string, int) bool)("hi", 123) //@signature("123", "func(string, int) bool", 1) +- +- (&bytes.Buffer{}).Next(2) //@signature("2", "Next(n int) []byte", 0) +- +- myFunc := MyFunc(func(n int) string { return "" }) +- myFunc(123) //@signature("123", "myFunc(foo int) string", 0) +- +- var ms myStruct +- ms.foo(nil) //@signature("nil", "foo(e *json.Decoder) (*big.Int, error)", 0) +- +- _ = make([]int, 1, 2) //@signature("2", "make(t Type, size ...int) Type", 1) +- +- Foo(myFunc(123), 456) //@signature("o(", "Foo(a string, b int) (c bool)", -1) +- Foo(myFunc(123), 456) //@signature("(m", "Foo(a string, b int) (c bool)", -1) +- Foo( myFunc(123), 456) //@signature(" m", "Foo(a string, b int) (c bool)", 0) +- Foo(myFunc(123), 456) //@signature(", ", "Foo(a string, b int) (c bool)", 0) +- Foo(myFunc(123), 456) //@signature("456", "Foo(a string, b int) (c bool)", 1) +- Foo(myFunc) //@signature(")", "Foo(a string, b int) (c bool)", 0) +- Foo(myFunc(123), 456) //@signature("(1", "myFunc(foo int) string", -1) +- Foo(myFunc(123), 456) //@signature("123", "myFunc(foo int) string", 0) +- +- fmt.Println //@signature("ln", "Println(a ...any) (n int, err error)", -1) +- fmt.Println(myFunc) //@signature("ln", "Println(a ...any) (n int, err error)", -1) +- fmt.Println(myFunc) //@signature("Func", "myFunc(foo int) string", -1) +- +- var hi string = "hello" +- var wl string = " world: %s" +- fmt.Println(fmt.Sprintf(wl, myFunc)) //@signature("Func", "myFunc(foo int) string", -1) +- fmt.Println(fmt.Sprintf(wl, myFunc)) //@signature("wl", "Sprintf(format string, a ...any) string", 0) +- fmt.Println(fmt.Sprintf(wl, myFunc)) //@signature(" m", "Sprintf(format string, a ...any) string", 1) +- fmt.Println(hi, fmt.Sprintf(wl, myFunc)) //@signature("Sprint", "Sprintf(format string, a ...any) string", -1) +- fmt.Println(hi, fmt.Sprintf(wl, myFunc)) //@signature(" fmt", "Println(a ...any) (n int, err error)", 0) +- fmt.Println(hi, fmt.Sprintf(wl, myFunc)) //@signature("hi", "Println(a ...any) (n int, err error)", 0) +- +- panic("oops!") //@signature(")", "panic(v any)", 0) +- println("hello", "world") //@signature(",", "println(args ...Type)", 0) +- +- Hello(func() { +- //@signature("//", "", 0) +- }) +- +- AliasSlice() //@signature(")", "AliasSlice(a []*Alias) (b Alias)", 0) +- AliasMap() //@signature(")", "AliasMap(a map[*Alias]StringAlias) (b map[*Alias]StringAlias, c map[*Alias]StringAlias)", 0) +- OtherAliasMap() //@signature(")", "OtherAliasMap(a map[Alias]OtherAlias, b map[Alias]OtherAlias) map[Alias]OtherAlias", 0) +- +- var l []Foo +- l = append(l, Foo{ //@signature(",", "append(slice []Type, elems ...Type) []Type", 0) +- A: "hello", //@signature(",", "", 0) +- B: "world", //@signature(",", "", 0) +- }) +- +- FooArr([]int{1, 2, 3, 4, 5}) //@signature("1", "", 0) +-} +- +-func Hello(func()) {} +- +--- signature/signature2.go -- +-package signature +- +-func _() { +- Foo(//@signature("//", "Foo(a string, b int) (c bool)", 0) +- Foo.//@signature("//", "Foo(a string, b int) (c bool)", 0) +- Foo.//@signature("oo", "Foo(a string, b int) (c bool)", 0) +-} +- +--- signature/signature3.go -- +-package signature +- +-func _() { +- Foo("hello",//@signature("//", "Foo(a string, b int) (c bool)", 1) +-} +- +--- signature/nonsignature.go -- +-package signature +- +-var x = (1) //@signature("1)", "", 0) +- +--- signature/signature_test.go -- +-package signature_test +- +-import ( +- "testing" +- +- sig "golang.org/lsptests/signature" +-) +- +-func TestSignature(t *testing.T) { +- sig.AliasSlice() //@signature(")", "AliasSlice(a []*sig.Alias) (b sig.Alias)", 0) +- sig.AliasMap() //@signature(")", "AliasMap(a map[*sig.Alias]sig.StringAlias) (b map[*sig.Alias]sig.StringAlias, c map[*sig.Alias]sig.StringAlias)", 0) +- sig.OtherAliasMap() //@signature(")", "OtherAliasMap(a map[sig.Alias]sig.OtherAlias, b map[sig.Alias]sig.OtherAlias) map[sig.Alias]sig.OtherAlias", 0) +-} +- +--- snippets/snippets.go -- +-package snippets +- +-import ( +- "golang.org/lsptests/signature" +-) +- +-type CoolAlias = int //@item(CoolAlias, "CoolAlias", "int", "type") +- +-type structy struct { +- x signature.MyType +-} +- +-func X(_ map[signature.Alias]CoolAlias) (map[signature.Alias]CoolAlias) { +- return nil +-} +- +-func _() { +- X() //@signature(")", "X(_ map[signature.Alias]CoolAlias) map[signature.Alias]CoolAlias", 0) +- _ = signature.MyType{} //@item(literalMyType, "signature.MyType{}", "", "var") +- s := structy{ +- x: //@snippet(" //", literalMyType, "signature.MyType{\\}") +- } +-} +- +--- importedcomplit/importedcomplit.go -- +-package importedcomplit +- +-import ( +- // TODO(rfindley): re-enable after moving to new framework +- // "golang.org/lsptests/foo" +- +- // import completions (separate blocks to avoid comment alignment) +- "crypto/elli" //@complete("\" //", cryptoImport) +- +- "fm" //@complete("\" //", fmtImport) +- +- "go/pars" //@complete("\" //", parserImport) +- +- namedParser "go/pars" //@complete("\" //", parserImport) +- +- "golang.org/lspte" //@complete("\" //", lsptestsImport) +- +- "golang.org/lsptests/sign" //@complete("\" //", signatureImport) +- +- "golang.org/lsptests/sign" //@complete("ests", lsptestsImport) +- +- "golang.org/lsptests/signa" //@complete("na\" //", signatureImport) +-) +- +-func _() { +- var V int //@item(icVVar, "V", "int", "var") +- +- // TODO(rfindley): re-enable after moving to new framework +- // _ = foo.StructFoo{V} // complete("}", Value, icVVar) +-} +- +-func _() { +- var ( +- aa string //@item(icAAVar, "aa", "string", "var") +- ab int //@item(icABVar, "ab", "int", "var") +- ) +- +- // TODO(rfindley): re-enable after moving to new framework +- // _ = foo.StructFoo{a} // complete("}", abVar, aaVar) +- +- var s struct { +- AA string //@item(icFieldAA, "AA", "string", "field") +- AB int //@item(icFieldAB, "AB", "int", "field") +- } +- +- // TODO(rfindley): re-enable after moving to new framework +- //_ = foo.StructFoo{s.} // complete("}", icFieldAB, icFieldAA) +-} +- +-/* "fmt" */ //@item(fmtImport, "fmt", "\"fmt\"", "package") +-/* "go/parser" */ //@item(parserImport, "parser", "\"go/parser\"", "package") +-/* "golang.org/lsptests/signature" */ //@item(signatureImport, "signature", "\"golang.org/lsptests/signature\"", "package") +-/* "golang.org/lsptests/" */ //@item(lsptestsImport, "lsptests/", "\"golang.org/lsptests/\"", "package") +-/* "crypto/elliptic" */ //@item(cryptoImport, "elliptic", "\"crypto/elliptic\"", "package") +diff -urN a/gopls/internal/test/marker/testdata/symbol/basic.txt b/gopls/internal/test/marker/testdata/symbol/basic.txt +--- a/gopls/internal/test/marker/testdata/symbol/basic.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/symbol/basic.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,119 +0,0 @@ +-Basic tests of textDocument/documentSymbols. +- +--- settings.json -- +-{"analyses": {"unusedfunc": false}} +- +--- symbol.go -- +-package main +- +-//@symbol(want) +- +-import "io" +- +-var _ = 1 +- +-var x = 42 +- +-var nested struct { +- nestedField struct { +- f int +- } +-} +- +-const y = 43 +- +-type Number int +- +-type Alias = string +- +-type NumberAlias = Number +- +-type ( +- Boolean bool +- BoolAlias = bool +-) +- +-type Foo struct { +- Quux +- W io.Writer +- Bar int +- baz string +- funcField func(int) int +-} +- +-type Quux struct { +- X, Y float64 +-} +- +-type EmptyStruct struct{} +- +-func (f Foo) Baz() string { +- return f.baz +-} +- +-func _() {} +- +-func (q *Quux) Do() {} +- +-func main() { +-} +- +-type Stringer interface { +- String() string +-} +- +-type ABer interface { +- B() +- A() string +-} +- +-type WithEmbeddeds interface { +- Do() +- ABer +- io.Writer +-} +- +-type EmptyInterface any +- +-func Dunk() int { return 0 } +- +-func dunk() {} +- +-var _ = dunk +- +--- @want -- +-(*Quux).Do "func()" +-(Foo).Baz "func() string" +2 lines +-ABer "interface{...}" +3 lines +-ABer.A "func() string" +-ABer.B "func()" +-Alias "string" +-BoolAlias "bool" +-Boolean "bool" +-Dunk "func() int" +-EmptyInterface "any" +-EmptyStruct "struct{}" +-Foo "struct{...}" +6 lines +-Foo.Bar "int" +-Foo.Quux "Quux" +-Foo.W "io.Writer" +-Foo.baz "string" +-Foo.funcField "func(int) int" +-Number "int" +-NumberAlias "Number" +-Quux "struct{...}" +2 lines +-Quux.X "float64" +-Quux.Y "float64" +-Stringer "interface{...}" +2 lines +-Stringer.String "func() string" +-WithEmbeddeds "interface{...}" +4 lines +-WithEmbeddeds.ABer "ABer" +-WithEmbeddeds.Do "func()" +-WithEmbeddeds.Writer "io.Writer" +-dunk "func()" +-main "func()" +1 lines +-nested "struct{...}" +4 lines +-nested.nestedField "struct{...}" +2 lines +-nested.nestedField.f "int" +-x "" +-y "" +diff -urN a/gopls/internal/test/marker/testdata/symbol/generic.txt b/gopls/internal/test/marker/testdata/symbol/generic.txt +--- a/gopls/internal/test/marker/testdata/symbol/generic.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/symbol/generic.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,23 +0,0 @@ +-Basic tests of textDocument/documentSymbols with generics. +- +--- symbol.go -- +-//@symbol(want) +- +-package main +- +-type T[P any] struct { +- F P +-} +- +-type Constraint interface { +- ~int | struct{ int } +- interface{ M() } +-} +- +--- @want -- +-Constraint "interface{...}" +3 lines +-Constraint.interface{...} "" +-Constraint.interface{...}.M "func()" +-Constraint.~int | struct{int} "" +-T "struct{...}" +2 lines +-T.F "P" +diff -urN a/gopls/internal/test/marker/testdata/token/comment.txt b/gopls/internal/test/marker/testdata/token/comment.txt +--- a/gopls/internal/test/marker/testdata/token/comment.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/token/comment.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,55 +0,0 @@ +-This test checks the semantic tokens in comments (golang/go#64648). +- +-There will be doc links in the comments to reference other objects. Parse these +-links and output tokens according to the referenced object types, so that the +-editor can highlight them. This will help in checking the doc link errors and +-reading comments in the code. +- +--- settings.json -- +-{ +- "semanticTokens": true +-} +- +--- a.go -- +-package p +- +-import "strconv" +- +-const A = 1 +-var B = 2 +- +-type Foo int +- +- +-// [F] accept a [Foo], and print it. //@token("F", "function", "signature"),token("Foo", "type", "number") +-func F(v Foo) { +- println(v) +- +-} +- +-/* +- [F1] print [A] and [B] //@token("F1", "function", "signature"),token("A", "variable", "readonly number"),token("B", "variable", "number") +-*/ +-func F1() { +- // print [A] and [B]. //@token("A", "variable", "readonly number"),token("B", "variable", "number") +- println(A, B) +-} +- +-// [F2] use [strconv.Atoi] convert s, then print it //@token("F2", "function", "signature"),token("strconv", "namespace", ""),token("Atoi", "function", "signature") +-func F2(s string) { +- a, _ := strconv.Atoi("42") +- b, _ := strconv.Atoi("42") +- println(a, b) // this is a tail comment in F2 //hover(F2, "F2", F2) +-} +--- b.go -- +-package p +- +-// [F3] accept [*Foo] //@token("F3", "function", "signature"),token("Foo", "type", "number") +-func F3(v *Foo) { +- println(*v) +-} +- +-// [F4] equal [strconv.Atoi] //@token("F4", "function", "signature"),token("strconv", "namespace", ""),token("Atoi", "function", "signature") +-func F4(s string) (int, error) { +- return 0, nil +-} +diff -urN a/gopls/internal/test/marker/testdata/token/format.txt b/gopls/internal/test/marker/testdata/token/format.txt +--- a/gopls/internal/test/marker/testdata/token/format.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/token/format.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,26 +0,0 @@ +-This test checks semanticTokens for format string placeholders. +- +--- settings.json -- +-{ +- "semanticTokens": true +-} +- +--- flags -- +--ignore_extra_diags +- +--- format.go -- +-package format +- +-import "fmt" +- +-func PrintfTests() { +- var i int +- var x float64 +- fmt.Printf("%b %d %f", 3, i, x) //@ token("%b", "string", "format"), token("%d", "string", "format"),token("%f", "string", "format"), +- fmt.Printf("lit1%blit2%dlit3%flit4", 3, i, x) //@ token("%b", "string", "format"), token("%d", "string", "format"),token("%f", "string", "format"),token("lit1", "string", ""),token("lit2", "string", ""),token("lit3", "string", ""), +- fmt.Printf("%% %d lit2", 3, i, x) //@ token("%d", "string", "format"),token("%%", "string", ""),token("lit2", "string", ""), +- fmt.Printf("Hello %% \n %s, you \t%% \n have %d new m%%essages!", "Alice", 5) //@ token("%s", "string", "format"),token("%d", "string", "format") +- fmt.Printf("%d \nss \x25[2]d", 234, 123) //@ token("%d", "string", "format"),token("\\x25[2]d", "string", "format") +- fmt.Printf("start%[2]*.[1]*[3]dmiddle%send", 4, 5, 6) //@ token("%[2]*.[1]*[3]d", "string", "format"),token("start", "string", ""),token("%s", "string", "format"),token("middle", "string", ""),token("end", "string", "") +-} +- +diff -urN a/gopls/internal/test/marker/testdata/token/illformed.txt b/gopls/internal/test/marker/testdata/token/illformed.txt +--- a/gopls/internal/test/marker/testdata/token/illformed.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/token/illformed.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-This test checks semanticTokens on ill-formed code. +-(Regression test for #68205.) +- +--- settings.json -- +-{ +- "semanticTokens": true +-} +- +--- flags -- +--ignore_extra_diags +- +--- a.go -- +-package p +- +-type _ <-<-chan int //@ token("<-", "operator", ""), token("chan", "keyword", "") +diff -urN a/gopls/internal/test/marker/testdata/token/issue66809.txt b/gopls/internal/test/marker/testdata/token/issue66809.txt +--- a/gopls/internal/test/marker/testdata/token/issue66809.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/token/issue66809.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,16 +0,0 @@ +-This is a regression test for #66809 (missing modifiers for +-declarations of function-type variables). +- +--- settings.json -- +-{ +- "semanticTokens": true +-} +- +--- main.go -- +-package main +- +-func main() { +- foo := func(x string) string { return x } //@token("foo", "variable", "definition signature") +- _ = foo //@token("foo", "variable", "signature") +- foo("hello") //@token("foo", "variable", "signature") +-} +diff -urN a/gopls/internal/test/marker/testdata/token/issue70251.txt b/gopls/internal/test/marker/testdata/token/issue70251.txt +--- a/gopls/internal/test/marker/testdata/token/issue70251.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/token/issue70251.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,13 +0,0 @@ +-This is a regression test for #70251 (missing modifiers for +-predeclared interfaces). +- +--- settings.json -- +-{ +- "semanticTokens": true +-} +- +--- a/a.go -- +-package a +- +-var _ any //@token("any", "type", "defaultLibrary interface") +-var _ error //@token("error", "type", "defaultLibrary interface") +diff -urN a/gopls/internal/test/marker/testdata/token/modifiers.txt b/gopls/internal/test/marker/testdata/token/modifiers.txt +--- a/gopls/internal/test/marker/testdata/token/modifiers.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/token/modifiers.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,61 +0,0 @@ +-This test checks the output of semanticTokens modifiers. +-(including test for #70219.) +- +--- settings.json -- +-{ +- "semanticTokens": true +-} +- +--- flags -- +--ignore_extra_diags +- +--- standard.go -- +-package modifiers +- +-func _() { +- a, b := false, true //@ token("false", "variable", "readonly defaultLibrary"), token("true", "variable", "readonly defaultLibrary") +-} +- +-const ( +- c = iota //@ token("iota", "variable", "readonly defaultLibrary number") +-) +- +--- custom.go -- +-package modifiers +- +-type Foo struct{} +- +-func _() { +- var array [2]string //@ token("array", "variable", "definition array") +- array = [2]string{"", ""} //@ token("array", "variable", "array") +- +- var b bool //@ token("b", "variable", "definition bool") +- b = true //@ token("b", "variable", "bool") +- +- var c chan string //@ token("c", "variable", "definition chan") +- c = make(chan string) //@ token("c", "variable", "chan") +- +- type inter interface{} //@ token("inter", "type", "definition interface") +- +- var m map[string]string //@ token("m", "variable", "definition map") +- m = make(map[string]string) //@ token("m", "variable", "map") +- +- var number int //@ token("number", "variable", "definition number") +- number = 1 //@ token("number", "variable", "number") +- +- var ptr *Foo //@ token("ptr", "variable", "definition pointer") +- ptr = nil //@ token("ptr", "variable", "pointer") +- +- var sig func(string) //@ token("sig", "variable", "definition signature") +- sig = nil //@ token("sig", "variable", "signature") +- +- var slice []string //@ token("slice", "variable", "definition slice") +- slice = nil //@ token("slice", "variable", "slice") +- +- var str string //@ token("str", "variable", "definition string") +- str = "" //@ token("str", "variable", "string") +- +- var foo Foo //@ token("foo", "variable", "definition struct") +- foo = Foo{} //@ token("foo", "variable", "struct") +-} +- +diff -urN a/gopls/internal/test/marker/testdata/token/range.txt b/gopls/internal/test/marker/testdata/token/range.txt +--- a/gopls/internal/test/marker/testdata/token/range.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/token/range.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-This test checks the output of textDocument/semanticTokens/range. +- +-TODO: add more assertions. +- +--- settings.json -- +-{ +- "semanticTokens": true +-} +- +--- a.go -- +-package p //@token("package", "keyword", "") +- +-const C = 42 //@token("C", "variable", "definition readonly number") +- +-func F() { //@token("F", "function", "definition signature") +- x := 2 + 3//@token("x", "variable", "definition number"),token("2", "number", ""),token("+", "operator", "") +- _ = x //@token("x", "variable", "number") +- _ = F //@token("F", "function", "signature") +-} +- +-func _() { +- // A goto's label cannot be found by ascending the syntax tree. +- goto loop //@ token("goto", "keyword", ""), token("loop", "label", "") +- +-loop: //@token("loop", "label", "definition") +- for { +- continue loop //@ token("continue", "keyword", ""), token("loop", "label", "") +- } +-} +diff -urN a/gopls/internal/test/marker/testdata/typedef/typedef.txt b/gopls/internal/test/marker/testdata/typedef/typedef.txt +--- a/gopls/internal/test/marker/testdata/typedef/typedef.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/typedef/typedef.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,68 +0,0 @@ +-This test exercises the textDocument/typeDefinition action. +- +--- typedef.go -- +-package typedef +- +-type Struct struct { //@loc(Struct, "Struct"), +- Field string +-} +- +-type Int int //@loc(Int, "Int") +- +-func _() { +- var ( +- value Struct +- point *Struct +- ) +- _ = value //@typedef("value", Struct) +- _ = point //@typedef("point", Struct) +- +- var ( +- array [3]Struct +- slice []Struct +- ch chan Struct +- complex [3]chan *[5][]Int +- ) +- _ = array //@typedef("array", Struct) +- _ = slice //@typedef("slice", Struct) +- _ = ch //@typedef("ch", Struct) +- _ = complex //@typedef("complex", Int) +- +- var s struct { +- x struct { +- xx struct { +- field1 []Struct +- field2 []Int +- } +- } +- } +- _ = s.x.xx.field1 //@typedef("field1", Struct) +- _ = s.x.xx.field2 //@typedef("field2", Int) +-} +- +-func F1() Int { return 0 } +-func F2() (Int, float64) { return 0, 0 } +-func F3() (Struct, int, bool, error) { return Struct{}, 0, false, nil } +-func F4() (**int, Int, bool, *error) { return nil, 0, false, nil } +-func F5() (int, float64, error, Struct) { return 0, 0, nil, Struct{} } +-func F6() (int, float64, ***Struct, error) { return 0, 0, nil, nil } +- +-func _() { +- F1() //@typedef("F1", Int) +- F2() //@typedef("F2", Int) +- F3() //@typedef("F3", Struct) +- F4() //@typedef("F4", Int) +- F5() //@typedef("F5", Struct) +- F6() //@typedef("F6", Struct) +- +- f := func() Int { return 0 } +- f() //@typedef("f", Int) +-} +- +-// https://github.com/golang/go/issues/38589#issuecomment-620350922 +-func _() { +- type myFunc func(int) Int //@loc(myFunc, "myFunc") +- +- var foo myFunc +- _ = foo() //@typedef("foo", myFunc), diag(")", re"not enough arguments") +-} +diff -urN a/gopls/internal/test/marker/testdata/typehierarchy/basic.txt b/gopls/internal/test/marker/testdata/typehierarchy/basic.txt +--- a/gopls/internal/test/marker/testdata/typehierarchy/basic.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/typehierarchy/basic.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,50 +0,0 @@ +-Basic test of type hierarchy. +- +-We pose the same queries across two identical packages to exercise +-the local and global algorithms. +- +-TODO(adonovan): test other properties of the result, such as kind. +- +--- go.mod -- +-module example.com +-go 1.18 +- +--- a/a.go -- +-package a +- +-type I interface { F() } //@ loc(I, "I") +- +-type J interface { F(); G() } //@ loc(J, "J") +- +-type S int //@ loc(S, "S") +- +-func (S) F() {} +-func (S) G() {} +- +-//@subtypes(S) +-//@subtypes(I, J, S, BI, BJ, BS) +-//@subtypes(J, S, BJ, BS) +- +-//@supertypes(S, I, J, BI, BJ) +-//@supertypes(I, BI) +-//@supertypes(J, I, BI, BJ) +- +--- b/b.go -- +-package b +- +-type BI interface { F() } //@ loc(BI, "BI") +- +-type BJ interface { F(); G() } //@ loc(BJ, "BJ") +- +-type BS int //@ loc(BS, "BS") +- +-func (BS) F() {} +-func (BS) G() {} +- +-//@subtypes(BS) +-//@subtypes(BI, BJ, BS, I, J, S) +-//@subtypes(BJ, BS, J, S) +- +-//@supertypes(BS, BI, BJ, I, J) +-//@supertypes(BI, I) +-//@supertypes(BJ, BI, I, J) +diff -urN a/gopls/internal/test/marker/testdata/workfile/godebug_bad.txt b/gopls/internal/test/marker/testdata/workfile/godebug_bad.txt +--- a/gopls/internal/test/marker/testdata/workfile/godebug_bad.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/workfile/godebug_bad.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,22 +0,0 @@ +-This test checks that we surface the error for unexpected godebug values. +- +-TODO(golang/go#67623): the diagnostic should be on the bad godebug value. +- +--- flags -- +--min_go_command=go1.23 +--errors_ok +- +--- go.work -- +-go 1.23 +- +-use . +- +-godebug ( +- gotypealias=0 // misspelled +-) +-godebug gotypesalias=1 +- +--- go.mod -- +-module example.com/m //@diag("module", re`unknown godebug "gotypealias"`) +- +-go 1.23 +diff -urN a/gopls/internal/test/marker/testdata/workfile/godebug.txt b/gopls/internal/test/marker/testdata/workfile/godebug.txt +--- a/gopls/internal/test/marker/testdata/workfile/godebug.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/workfile/godebug.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,60 +0,0 @@ +-This test basic gopls functionality in a workspace with a godebug +-directive in its modfile. +- +--- flags -- +--min_go_command=go1.23 +- +--- a/go.work -- +-go 1.23 +- +-use . +- +-godebug ( +- gotypesalias=0 +-) +-godebug gotypesalias=1 +- +--- a/go.mod -- +-module example.com/a +- +-go 1.23 +- +--- a/a.go -- +-package a +- +-import "example.com/a/b" +- +-const A = b.B //@def("B", B) +- +--- a/b/b.go -- +-package b +- +-const B = 42 //@loc(B, "B") +- +--- format/go.work -- +-go 1.23 //@format(formatted) +- +-use . +- +-godebug ( +-gotypesalias=0 +-) +-godebug gotypesalias=1 +- +--- @formatted -- +-go 1.23 //@format(formatted) +- +-use . +- +-godebug ( +- gotypesalias=0 +-) +- +-godebug gotypesalias=1 +--- format/go.mod -- +-module example.com/format +- +-go 1.23 +- +--- format/p.go -- +-package format +diff -urN a/gopls/internal/test/marker/testdata/workspacesymbol/allscope.txt b/gopls/internal/test/marker/testdata/workspacesymbol/allscope.txt +--- a/gopls/internal/test/marker/testdata/workspacesymbol/allscope.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/workspacesymbol/allscope.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-This test verifies behavior when "symbolScope" is set to "all". +- +--- settings.json -- +-{ +- "symbolStyle": "full", +- "symbolMatcher": "casesensitive", +- "symbolScope": "all" +-} +- +--- go.mod -- +-module mod.test/symbols +- +-go 1.18 +- +--- query.go -- +-package symbols +- +-//@workspacesymbol("fmt.Println", println) +- +--- fmt/fmt.go -- +-package fmt +- +-import "fmt" +- +-func Println(s string) { +- fmt.Println(s) +-} +--- @println -- +-fmt/fmt.go:5:6-13 mod.test/symbols/fmt.Println Function +-<external> fmt.Println Function +diff -urN a/gopls/internal/test/marker/testdata/workspacesymbol/caseinsensitive.txt b/gopls/internal/test/marker/testdata/workspacesymbol/caseinsensitive.txt +--- a/gopls/internal/test/marker/testdata/workspacesymbol/caseinsensitive.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/workspacesymbol/caseinsensitive.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,28 +0,0 @@ +-This file contains test for symbol matches using the caseinsensitive matcher. +- +--- settings.json -- +-{ +- "symbolMatcher": "caseinsensitive" +-} +- +--- go.mod -- +-module mod.test/caseinsensitive +- +-go 1.18 +- +--- p.go -- +-package caseinsensitive +- +-//@workspacesymbol("", blank) +-//@workspacesymbol("randomgophervar", randomgophervar) +- +-var RandomGopherVariableA int +-var randomgopherVariableB int +-var RandomGopherOtherVariable int +- +-var _ = randomgopherVariableB // pacify unusedfunc +- +--- @blank -- +--- @randomgophervar -- +-p.go:6:5-26 RandomGopherVariableA Variable +-p.go:7:5-26 randomgopherVariableB Variable +diff -urN a/gopls/internal/test/marker/testdata/workspacesymbol/casesensitive.txt b/gopls/internal/test/marker/testdata/workspacesymbol/casesensitive.txt +--- a/gopls/internal/test/marker/testdata/workspacesymbol/casesensitive.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/workspacesymbol/casesensitive.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,119 +0,0 @@ +-This file contains tests for symbol matches using the casesensitive matcher. +- +-For historical reasons, it also verifies general behavior of the symbol search. +- +--- settings.json -- +-{ +- "symbolMatcher": "casesensitive", +- "analyses": {"unusedfunc": false} +-} +- +--- go.mod -- +-module mod.test/casesensitive +- +-go 1.18 +- +--- main.go -- +-package main +- +-//@workspacesymbol("main.main", main) +-//@workspacesymbol("p.Message", Message) +-//@workspacesymbol("main.myvar", myvar) +-//@workspacesymbol("main.myType", myType) +-//@workspacesymbol("main.myType.Blahblah", blahblah) +-//@workspacesymbol("main.myStruct", myStruct) +-//@workspacesymbol("main.myStruct.myStructField", myStructField) +-//@workspacesymbol("main.myInterface", myInterface) +-//@workspacesymbol("main.myInterface.DoSomeCoolStuff", DoSomeCoolStuff) +-//@workspacesymbol("main.embed.myStruct", embeddedStruct) +-//@workspacesymbol("main.embed.nestedStruct.nestedStruct2.int", int) +-//@workspacesymbol("main.embed.nestedInterface.myInterface", nestedInterface) +-//@workspacesymbol("main.embed.nestedInterface.nestedMethod", nestedMethod) +-//@workspacesymbol("dunk", dunk) +-//@workspacesymbol("Dunk", Dunk) +- +-import ( +- "encoding/json" +- "fmt" +-) +- +-func main() { // function +- fmt.Println("Hello") +-} +- +-var myvar int // variable +- +-type myType string // basic type +- +-type myDecoder json.Decoder // to use the encoding/json import +- +-func (m *myType) Blahblah() {} // method +- +-type myStruct struct { // struct type +- myStructField int // struct field +-} +- +-type myInterface interface { // interface +- DoSomeCoolStuff() string // interface method +-} +- +-type embed struct { +- myStruct +- +- nestedStruct struct { +- nestedField int +- +- nestedStruct2 struct { +- int +- } +- } +- +- nestedInterface interface { +- myInterface +- nestedMethod() +- } +-} +- +-func Dunk() int { return 0 } +- +-func dunk() {} +- +-var _ = dunk +- +--- p/p.go -- +-package p +- +-const Message = "Hello World." // constant +--- @DoSomeCoolStuff -- +-main.go:41:2-17 main.myInterface.DoSomeCoolStuff Method +--- @Dunk -- +-main.go:61:6-10 Dunk Function +--- @Message -- +-p/p.go:3:7-14 p.Message Constant +--- @blahblah -- +-main.go:34:18-26 main.myType.Blahblah Method +--- @dunk -- +-main.go:63:6-10 dunk Function +--- @int -- +-main.go:51:4-7 main.embed.nestedStruct.nestedStruct2.int Field +--- @main -- +-main.go:24:6-10 main.main Function +--- @myInterface -- +-main.go:40:6-17 main.myInterface Interface +-main.go:41:2-17 main.myInterface.DoSomeCoolStuff Method +--- @myStruct -- +-main.go:36:6-14 main.myStruct Struct +-main.go:37:2-15 main.myStruct.myStructField Field +--- @myStructField -- +-main.go:37:2-15 main.myStruct.myStructField Field +--- @myType -- +-main.go:30:6-12 main.myType Class +-main.go:34:18-26 main.myType.Blahblah Method +--- @myvar -- +-main.go:28:5-10 main.myvar Variable +--- @nestedInterface -- +-main.go:56:3-14 main.embed.nestedInterface.myInterface Interface +--- @nestedMethod -- +-main.go:57:3-15 main.embed.nestedInterface.nestedMethod Method +--- @embeddedStruct -- +-main.go:45:2-10 main.embed.myStruct Field +diff -urN a/gopls/internal/test/marker/testdata/workspacesymbol/issue44806.txt b/gopls/internal/test/marker/testdata/workspacesymbol/issue44806.txt +--- a/gopls/internal/test/marker/testdata/workspacesymbol/issue44806.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/workspacesymbol/issue44806.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,27 +0,0 @@ +-This test verifies the fix for the crash encountered in golang/go#44806. +- +--- go.mod -- +-module mod.test/symbol +- +-go 1.18 +--- symbol.go -- +-package symbol +- +-//@workspacesymbol("M", M) +- +-type T struct{} +- +-// We should accept all valid receiver syntax when scanning symbols. +-func (*(T)) M1() {} +-func (*T) M2() {} +-func (T) M3() {} +-func ((T)) M4() {} +-func ((*T)) M5() {} +- +--- @M -- +-symbol.go:8:13-15 T.M1 Method +-symbol.go:9:11-13 T.M2 Method +-symbol.go:10:10-12 T.M3 Method +-symbol.go:11:12-14 T.M4 Method +-symbol.go:12:13-15 T.M5 Method +-symbol.go:5:6-7 symbol.T Struct +diff -urN a/gopls/internal/test/marker/testdata/workspacesymbol/workspacesymbol.txt b/gopls/internal/test/marker/testdata/workspacesymbol/workspacesymbol.txt +--- a/gopls/internal/test/marker/testdata/workspacesymbol/workspacesymbol.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/workspacesymbol/workspacesymbol.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,72 +0,0 @@ +-This test contains tests for basic functionality of the workspace/symbol +-request. +- +-TODO(rfindley): add a test for the legacy 'fuzzy' symbol matcher using setting ("symbolMatcher": "fuzzy"). This test uses the default matcher ("fastFuzzy"). +- +--- go.mod -- +-module mod.test/symbols +- +-go 1.18 +- +--- query.go -- +-package symbols +- +-//@workspacesymbol("rgop", rgop) +-//@workspacesymbol("randoma", randoma) +-//@workspacesymbol("randomb", randomb) +- +--- a/a.go -- +-package a +- +-var RandomGopherVariableA = "a" +- +-const RandomGopherConstantA = "a" +- +-const ( +- randomgopherinvariable = iota +-) +- +--- a/a_test.go -- +-package a +- +-var RandomGopherTestVariableA = "a" +- +--- a/a_x_test.go -- +-package a_test +- +-var RandomGopherXTestVariableA = "a" +- +--- b/b.go -- +-package b +- +-var RandomGopherVariableB = "b" +- +-type RandomGopherStructB struct { +- Bar int +-} +- +--- @rgop -- +-b/b.go:5:6-25 RandomGopherStructB Struct +-a/a.go:5:7-28 RandomGopherConstantA Constant +-a/a.go:3:5-26 RandomGopherVariableA Variable +-b/b.go:3:5-26 RandomGopherVariableB Variable +-a/a_test.go:3:5-30 RandomGopherTestVariableA Variable +-a/a_x_test.go:3:5-31 RandomGopherXTestVariableA Variable +-a/a.go:8:2-24 randomgopherinvariable Constant +-b/b.go:6:2-5 RandomGopherStructB.Bar Field +--- @randoma -- +-a/a.go:5:7-28 RandomGopherConstantA Constant +-a/a.go:3:5-26 RandomGopherVariableA Variable +-b/b.go:3:5-26 RandomGopherVariableB Variable +-a/a.go:8:2-24 randomgopherinvariable Constant +-a/a_test.go:3:5-30 RandomGopherTestVariableA Variable +-a/a_x_test.go:3:5-31 RandomGopherXTestVariableA Variable +-b/b.go:6:2-5 RandomGopherStructB.Bar Field +--- @randomb -- +-b/b.go:5:6-25 RandomGopherStructB Struct +-a/a.go:3:5-26 RandomGopherVariableA Variable +-b/b.go:3:5-26 RandomGopherVariableB Variable +-a/a.go:8:2-24 randomgopherinvariable Constant +-a/a_test.go:3:5-30 RandomGopherTestVariableA Variable +-a/a_x_test.go:3:5-31 RandomGopherXTestVariableA Variable +-b/b.go:6:2-5 RandomGopherStructB.Bar Field +diff -urN a/gopls/internal/test/marker/testdata/workspacesymbol/wsscope.txt b/gopls/internal/test/marker/testdata/workspacesymbol/wsscope.txt +--- a/gopls/internal/test/marker/testdata/workspacesymbol/wsscope.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/workspacesymbol/wsscope.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-This test verifies behavior when "symbolScope" is set to "workspace". +- +--- settings.json -- +-{ +- "symbolStyle": "full", +- "symbolMatcher": "casesensitive", +- "symbolScope": "workspace" +-} +- +--- go.mod -- +-module mod.test/symbols +- +-go 1.18 +- +--- query.go -- +-package symbols +- +-//@workspacesymbol("fmt.Println", println) +- +--- fmt/fmt.go -- +-package fmt +- +-import "fmt" +- +-func Println(s string) { +- fmt.Println(s) +-} +--- @println -- +-fmt/fmt.go:5:6-13 mod.test/symbols/fmt.Println Function +diff -urN a/gopls/internal/test/marker/testdata/zeroconfig/adhoc.txt b/gopls/internal/test/marker/testdata/zeroconfig/adhoc.txt +--- a/gopls/internal/test/marker/testdata/zeroconfig/adhoc.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/zeroconfig/adhoc.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,49 +0,0 @@ +-This test checks that gopls works with multiple ad-hoc packages, which lack +-a go.mod file. +- +-We should be able to import standard library packages, get diagnostics, and +-reference symbols defined in the same directory. +- +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Println(mainMsg) //@def("mainMsg", mainMsg) +- fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +-} +--- main2.go -- +-package main +- +-const mainMsg = "main" //@loc(mainMsg, "mainMsg") +- +--- a/a.go -- +-package a +- +-import "fmt" +- +-func _() { +- fmt.Println(aMsg) //@def("aMsg", aMsg) +- fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +-} +- +--- a/a2.go -- +-package a +- +-const aMsg = "a" //@loc(aMsg, "aMsg") +- +--- b/b.go -- +-package b +- +-import "fmt" +- +-func _() { +- fmt.Println(bMsg) //@def("bMsg", bMsg) +- fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +-} +- +--- b/b2.go -- +-package b +- +-const bMsg = "b" //@loc(bMsg, "bMsg") +diff -urN a/gopls/internal/test/marker/testdata/zeroconfig/dynamicports.txt b/gopls/internal/test/marker/testdata/zeroconfig/dynamicports.txt +--- a/gopls/internal/test/marker/testdata/zeroconfig/dynamicports.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/zeroconfig/dynamicports.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,118 +0,0 @@ +-This test checks that the zero-config algorithm selects Views to cover first +-class ports. +- +-In this test, package a imports b, and b imports c. Package a contains files +-constrained by go:build directives, package b contains files constrained by the +-GOOS matching their file name, and package c is unconstrained. Various +-assertions check that diagnostics and navigation work as expected. +- +--- go.mod -- +-module golang.org/lsptests +- +--- a/a.go -- +-package a +- +-import "golang.org/lsptests/b" +- +-var _ = b.F //@loc(F, "F") +- +--- a/linux64.go -- +-//go:build (linux && amd64) +- +-package a +- +-import "golang.org/lsptests/b" +- +-var _ int = 1<<32 -1 // OK on 64 bit platforms. Compare linux32.go below. +- +-var ( +- _ = b.LinuxOnly //@def("LinuxOnly", LinuxOnly) +- _ = b.DarwinOnly //@diag("DarwinOnly", re"(undefined|declared)") +- _ = b.WindowsOnly //@diag("WindowsOnly", re"(undefined|declared)") +-) +- +--- a/linux32.go -- +-//go:build (linux && 386) +- +-package a +- +-import "golang.org/lsptests/b" +- +-var _ int = 1<<32 -1 //@diag("1<<32", re"overflows") +- +-var ( +- _ = b.LinuxOnly //@def("LinuxOnly", LinuxOnly) +- _ = b.DarwinOnly //@diag("DarwinOnly", re"(undefined|declared)") +- _ = b.WindowsOnly //@diag("WindowsOnly", re"(undefined|declared)") +-) +- +--- a/darwin64.go -- +-//go:build (darwin && amd64) +- +-package a +- +-import "golang.org/lsptests/b" +- +-var ( +- _ = b.LinuxOnly //@diag("LinuxOnly", re"(undefined|declared)") +- _ = b.DarwinOnly //@def("DarwinOnly", DarwinOnly) +- _ = b.WindowsOnly //@diag("WindowsOnly", re"(undefined|declared)") +-) +- +--- a/windows64.go -- +-//go:build (windows && amd64) +- +-package a +- +-import "golang.org/lsptests/b" +- +-var ( +- _ = b.LinuxOnly //@diag("LinuxOnly", re"(undefined|declared)") +- _ = b.DarwinOnly //@diag("DarwinOnly", re"(undefined|declared)") +- _ = b.WindowsOnly //@def("WindowsOnly", WindowsOnly) +-) +- +--- b/b_other.go -- +-//go:build !linux && !darwin && !windows +-package b +- +-func F() {} +- +--- b/b_linux.go -- +-package b +- +-import "golang.org/lsptests/c" +- +-func F() { //@refs("F", "F", F) +- x := c.Common //@diag("x", re"not used"),def("Common", Common) +-} +- +-const LinuxOnly = "darwin" //@loc(LinuxOnly, "LinuxOnly") +- +--- b/b_darwin.go -- +-package b +- +-import "golang.org/lsptests/c" +- +-func F() { //@refs("F", "F", F) +- x := c.Common //@diag("x", re"not used"),def("Common", Common) +-} +- +-const DarwinOnly = "darwin" //@loc(DarwinOnly, "DarwinOnly") +- +--- b/b_windows.go -- +-package b +- +-import "golang.org/lsptests/c" +- +-func F() { //@refs("F", "F", F) +- x := c.Common //@diag("x", re"not used"),def("Common", Common) +-} +- +-const WindowsOnly = "windows" //@loc(WindowsOnly, "WindowsOnly") +- +--- c/c.go -- +-package c +- +-const Common = 0 //@loc(Common, "Common") +- +diff -urN a/gopls/internal/test/marker/testdata/zeroconfig/nested.txt b/gopls/internal/test/marker/testdata/zeroconfig/nested.txt +--- a/gopls/internal/test/marker/testdata/zeroconfig/nested.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/zeroconfig/nested.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,69 +0,0 @@ +-This test checks that gopls works with nested modules, including multiple +-nested modules. +- +--- main.go -- +-package main +- +-import "fmt" +- +-func main() { +- fmt.Println(mainMsg) //@def("mainMsg", mainMsg) +- fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +-} +--- main2.go -- +-package main +- +-const mainMsg = "main" //@loc(mainMsg, "mainMsg") +- +--- mod1/go.mod -- +-module golang.org/lsptests/mod1 +- +-go 1.20 +- +--- mod1/a/a.go -- +-package a +- +-import ( +- "fmt" +- "golang.org/lsptests/mod1/b" +-) +- +-func _() { +- fmt.Println(b.Msg) //@def("Msg", Msg) +- fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +-} +- +--- mod1/a/tagged.go -- +-//go:build tag1 +- +-// golang/go#60776: verify that we get an accurate error about build tags +-// here, rather than an inaccurate error suggesting to add a go.work +-// file (which won't help). +-package a //@diag(re`package (a)`, re`excluded due to its build tags`) +- +--- mod1/b/b.go -- +-package b +- +-const Msg = "1" //@loc(Msg, "Msg") +- +--- mod2/go.mod -- +-module golang.org/lsptests/mod2 +- +-require golang.org/lsptests/mod1 v0.0.1 +- +-replace golang.org/lsptests/mod1 => ../mod1 +- +-go 1.20 +- +--- mod2/c/c.go -- +-package c +- +-import ( +- "fmt" +- "golang.org/lsptests/mod1/b" +-) +- +-func _() { +- fmt.Println(b.Msg) //@def("Msg", Msg) +- fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +-} +diff -urN a/gopls/internal/test/marker/testdata/zeroconfig/nonworkspacemodule.txt b/gopls/internal/test/marker/testdata/zeroconfig/nonworkspacemodule.txt +--- a/gopls/internal/test/marker/testdata/zeroconfig/nonworkspacemodule.txt 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/test/marker/testdata/zeroconfig/nonworkspacemodule.txt 1969-12-31 18:00:00.000000000 -0600 +@@ -1,79 +0,0 @@ +-This test checks that gopls works with modules that aren't included in the +-workspace file. +- +--- go.work -- +-go 1.20 +- +-use ( +- ./a +- ./b +-) +- +--- a/go.mod -- +-module golang.org/lsptests/a +- +-go 1.18 +- +--- a/a.go -- +-package a +- +-import ( +- "fmt" +- "golang.org/lsptests/a/lib" +-) +- +-func _() { +- fmt.Println(lib.Msg) //@def("Msg", aMsg) +- fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +-} +- +--- a/lib/lib.go -- +-package lib +- +-const Msg = "hi" //@loc(aMsg, "Msg") +- +--- b/go.mod -- +-module golang.org/lsptests/b +- +-go 1.18 +- +--- b/b.go -- +-package b +- +-import ( +- "fmt" +- "golang.org/lsptests/b/lib" +-) +- +-func main() { +- fmt.Println(lib.Msg) //@def("Msg", bMsg) +- fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +-} +- +--- b/lib/lib.go -- +-package lib +- +-const Msg = "hi" //@loc(bMsg, "Msg") +- +--- c/go.mod -- +-module golang.org/lsptests/c +- +-go 1.18 +- +--- c/c.go -- +-package c +- +-import ( +- "fmt" +- "golang.org/lsptests/c/lib" +-) +- +-func main() { +- fmt.Println(lib.Msg) //@def("Msg", cMsg) +- fmt.Println(undef) //@diag("undef", re"undefined|undeclared") +-} +- +--- c/lib/lib.go -- +-package lib +- +-const Msg = "hi" //@loc(cMsg, "Msg") +diff -urN a/gopls/internal/util/asm/parse.go b/gopls/internal/util/asm/parse.go +--- a/gopls/internal/util/asm/parse.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/asm/parse.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,245 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package asm provides a simple parser for Go assembly files. +-package asm +- +-import ( +- "bufio" +- "bytes" +- "fmt" +- "strings" +- "unicode" +-) +- +-// Kind describes the nature of an identifier in an assembly file. +-type Kind uint8 +- +-const ( +- Invalid Kind = iota // reserved zero value; not used by Ident +- Ref // arbitrary reference to symbol or control label +- Text // definition of TEXT (function) symbol +- Global // definition of GLOBL (var) symbol +- Data // initialization of GLOBL (var) symbol; effectively a reference +- Label // definition of control label +-) +- +-func (k Kind) String() string { +- if int(k) < len(kindString) { +- return kindString[k] +- } +- return fmt.Sprintf("Kind(%d)", k) +-} +- +-var kindString = [...]string{ +- Invalid: "invalid", +- Ref: "ref", +- Text: "text", +- Global: "global", +- Data: "data", +- Label: "label", +-} +- +-// A file represents a parsed file of Go assembly language. +-type File struct { +- Idents []Ident +- +- // TODO(adonovan): use token.File? This may be important in a +- // future in which analyzers can report diagnostics in .s files. +-} +- +-// Ident represents an identifier in an assembly file. +-type Ident struct { +- Name string // symbol name (after correcting [·∕]); Name[0]='.' => current package +- Offset int // zero-based byte offset +- Kind Kind +-} +- +-// End returns the identifier's end offset. +-func (id Ident) End() int { return id.Offset + len(id.Name) } +- +-// Parse extracts identifiers from Go assembly files. +-// Since it is a best-effort parser, it never returns an error. +-func Parse(content []byte) *File { +- var idents []Ident +- offset := 0 // byte offset of start of current line +- +- // TODO(adonovan) use a proper tokenizer that respects +- // comments, string literals, line continuations, etc. +- scan := bufio.NewScanner(bytes.NewReader(content)) +- for ; scan.Scan(); offset += len(scan.Bytes()) + len("\n") { +- line := scan.Text() +- +- // Strip comments. +- if idx := strings.Index(line, "//"); idx >= 0 { +- line = line[:idx] +- } +- +- // Skip blank lines. +- if strings.TrimSpace(line) == "" { +- continue +- } +- +- // Check for label definitions (ending with colon). +- if colon := strings.IndexByte(line, ':'); colon > 0 { +- label := strings.TrimSpace(line[:colon]) +- if isIdent(label) { +- idents = append(idents, Ident{ +- Name: label, +- Offset: offset + strings.Index(line, label), +- Kind: Label, +- }) +- continue +- } +- } +- +- // Split line into words. +- words := strings.Fields(line) +- if len(words) == 0 { +- continue +- } +- +- // A line of the form +- // TEXT ·sym<ABIInternal>(SB),NOSPLIT,$12 +- // declares a text symbol "·sym". +- if len(words) > 1 { +- kind := Invalid +- switch words[0] { +- case "TEXT": +- kind = Text +- case "GLOBL": +- kind = Global +- case "DATA": +- kind = Data +- } +- if kind != Invalid { +- sym := words[1] +- sym = cutBefore(sym, ",") // strip ",NOSPLIT,$12" etc +- sym = cutBefore(sym, "(") // "sym(SB)" -> "sym" +- sym = cutBefore(sym, "<") // "sym<ABIInternal>" -> "sym" +- sym = strings.TrimSpace(sym) +- if isIdent(sym) { +- // (The Index call assumes sym is not itself "TEXT" etc.) +- idents = append(idents, Ident{ +- Name: cleanup(sym), +- Kind: kind, +- Offset: offset + strings.Index(line, sym), +- }) +- } +- continue +- } +- } +- +- // Find references in the rest of the line. +- pos := 0 +- for _, word := range words { +- // Find actual position of word within line. +- tokenPos := strings.Index(line[pos:], word) +- if tokenPos < 0 { +- panic(line) +- } +- tokenPos += pos +- pos = tokenPos + len(word) +- +- // Reject probable instruction mnemonics (e.g. MOV). +- if len(word) >= 2 && word[0] != '·' && +- !strings.ContainsFunc(word, unicode.IsLower) { +- continue +- } +- +- if word[0] == '$' { +- word = word[1:] +- tokenPos++ +- +- // Reject probable immediate values (e.g. "$123"). +- if !strings.ContainsFunc(word, isNonDigit) { +- continue +- } +- } +- +- // Reject probably registers (e.g. "PC"). +- if len(word) <= 3 && !strings.ContainsFunc(word, unicode.IsLower) { +- continue +- } +- +- // Probable identifier reference. +- // +- // TODO(adonovan): handle FP symbols correctly; +- // sym+8(FP) is essentially a comment about +- // stack slot 8, not a reference to a symbol +- // with a declaration somewhere; so they form +- // an equivalence class without a canonical +- // declaration. +- // +- // TODO(adonovan): handle pseudoregisters and field +- // references such as: +- // MOVD $runtime·g0(SB), g // pseudoreg +- // MOVD R0, g_stackguard0(g) // field ref +- +- sym := cutBefore(word, "(") // "·sym(SB)" => "sym" +- sym = cutBefore(sym, "+") // "sym+8(FP)" => "sym" +- sym = cutBefore(sym, "<") // "sym<ABIInternal>" =>> "sym" +- if isIdent(sym) { +- idents = append(idents, Ident{ +- Name: cleanup(sym), +- Kind: Ref, +- Offset: offset + tokenPos, +- }) +- } +- } +- } +- +- _ = scan.Err() // ignore scan errors +- +- return &File{Idents: idents} +-} +- +-// isIdent reports whether s is a valid Go assembly identifier. +-func isIdent(s string) bool { +- for i, r := range s { +- if !isIdentRune(r, i) { +- return false +- } +- } +- return len(s) > 0 +-} +- +-// cutBefore returns the portion of s before the first occurrence of sep, if any. +-func cutBefore(s, sep string) string { +- if before, _, ok := strings.Cut(s, sep); ok { +- return before +- } +- return s +-} +- +-// cleanup converts a symbol name from assembler syntax to linker syntax. +-func cleanup(sym string) string { +- return repl.Replace(sym) +-} +- +-var repl = strings.NewReplacer( +- "·", ".", // (U+00B7 MIDDLE DOT) +- "∕", "/", // (U+2215 DIVISION SLASH) +-) +- +-func isNonDigit(r rune) bool { return !unicode.IsDigit(r) } +- +-// -- plundered from GOROOT/src/cmd/asm/internal/asm/parse.go -- +- +-// We want center dot (·) and division slash (∕) to work as identifier characters. +-func isIdentRune(ch rune, i int) bool { +- if unicode.IsLetter(ch) { +- return true +- } +- switch ch { +- case '_': // Underscore; traditional. +- return true +- case '\u00B7': // Represents the period in runtime.exit. U+00B7 '·' middle dot +- return true +- case '\u2215': // Represents the slash in runtime/debug.setGCPercent. U+2215 '∕' division slash +- return true +- } +- // Digits are OK only after the first character. +- return i > 0 && unicode.IsDigit(ch) +-} +diff -urN a/gopls/internal/util/asm/parse_test.go b/gopls/internal/util/asm/parse_test.go +--- a/gopls/internal/util/asm/parse_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/asm/parse_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,67 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package asm_test +- +-import ( +- "bytes" +- "fmt" +- "testing" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/util/asm" +-) +- +-// TestIdents checks that (likely) identifiers are extracted in the expected places. +-func TestIdents(t *testing.T) { +- src := []byte(` +-// This is a nonsense file containing a variety of syntax. +- +-#include "foo.h" +-#ifdef MACRO +-DATA hello<>+0x00(SB)/64, $"Hello" +-GLOBL hello<(SB), RODATA, $64 +-#endif +- +-TEXT mypkg·f(SB),NOSPLIT,$0 +- MOVD R1, 16(RSP) // another comment +- MOVD $otherpkg·data(SB), R2 +- JMP label +-label: +- BL ·g(SB) +- +-TEXT ·g(SB),NOSPLIT,$0 +- MOVD $runtime·g0(SB), g +- MOVD R0, g_stackguard0(g) +- MOVD R0, (g_stack+stack_lo)(g) +-`[1:]) +- const filename = "asm.s" +- m := protocol.NewMapper(protocol.URIFromPath(filename), src) +- file := asm.Parse(src) +- +- want := ` +-asm.s:5:6-11: data "hello" +-asm.s:6:7-12: global "hello" +-asm.s:9:6-13: text "mypkg.f" +-asm.s:11:8-21: ref "otherpkg.data" +-asm.s:12:6-11: ref "label" +-asm.s:13:1-6: label "label" +-asm.s:14:5-7: ref ".g" +-asm.s:16:6-8: text ".g" +-asm.s:17:8-18: ref "runtime.g0" +-asm.s:17:25-26: ref "g" +-asm.s:18:11-24: ref "g_stackguard0" +-`[1:] +- var buf bytes.Buffer +- for _, id := range file.Idents { +- line, col := m.OffsetLineCol8(id.Offset) +- _, endCol := m.OffsetLineCol8(id.Offset + len(id.Name)) +- fmt.Fprintf(&buf, "%s:%d:%d-%d:\t%s %q\n", filename, line, col, endCol, id.Kind, id.Name) +- } +- got := buf.String() +- if got != want { +- t.Errorf("got:\n%s\nwant:\n%s\ndiff:\n%s", got, want, cmp.Diff(want, got)) +- } +-} +diff -urN a/gopls/internal/util/browser/browser.go b/gopls/internal/util/browser/browser.go +--- a/gopls/internal/util/browser/browser.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/browser/browser.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,67 +0,0 @@ +-// Copyright 2016 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package browser provides utilities for interacting with users' browsers. +-package browser +- +-import ( +- "os" +- "os/exec" +- "runtime" +- "time" +-) +- +-// Commands returns a list of possible commands to use to open a url. +-func Commands() [][]string { +- var cmds [][]string +- if exe := os.Getenv("BROWSER"); exe != "" { +- cmds = append(cmds, []string{exe}) +- } +- switch runtime.GOOS { +- case "darwin": +- cmds = append(cmds, []string{"/usr/bin/open"}) +- case "windows": +- cmds = append(cmds, []string{"cmd", "/c", "start"}) +- default: +- if os.Getenv("DISPLAY") != "" { +- // xdg-open is only for use in a desktop environment. +- cmds = append(cmds, []string{"xdg-open"}) +- } +- } +- cmds = append(cmds, +- []string{"chrome"}, +- []string{"google-chrome"}, +- []string{"chromium"}, +- []string{"firefox"}, +- ) +- return cmds +-} +- +-// Open tries to open url in a browser and reports whether it succeeded. +-func Open(url string) bool { +- for _, args := range Commands() { +- cmd := exec.Command(args[0], append(args[1:], url)...) +- if cmd.Start() == nil && appearsSuccessful(cmd, 3*time.Second) { +- return true +- } +- } +- return false +-} +- +-// appearsSuccessful reports whether the command appears to have run successfully. +-// If the command runs longer than the timeout, it's deemed successful. +-// If the command runs within the timeout, it's deemed successful if it exited cleanly. +-func appearsSuccessful(cmd *exec.Cmd, timeout time.Duration) bool { +- errc := make(chan error, 1) +- go func() { +- errc <- cmd.Wait() +- }() +- +- select { +- case <-time.After(timeout): +- return true +- case err := <-errc: +- return err == nil +- } +-} +diff -urN a/gopls/internal/util/browser/README.md b/gopls/internal/util/browser/README.md +--- a/gopls/internal/util/browser/README.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/browser/README.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1 +0,0 @@ +-This package is a copy of cmd/internal/browser from the go distribution +\ No newline at end of file +diff -urN a/gopls/internal/util/bug/bug.go b/gopls/internal/util/bug/bug.go +--- a/gopls/internal/util/bug/bug.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/bug/bug.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,145 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package bug provides utilities for reporting internal bugs, and being +-// notified when they occur. +-// +-// Philosophically, because gopls runs as a sidecar process that the user does +-// not directly control, sometimes it keeps going on broken invariants rather +-// than panicking. In those cases, bug reports provide a mechanism to alert +-// developers and capture relevant metadata. +-package bug +- +-import ( +- "fmt" +- "runtime" +- "runtime/debug" +- "sort" +- "sync" +- "time" +- +- "golang.org/x/telemetry/counter" +-) +- +-// PanicOnBugs controls whether to panic when bugs are reported. +-// +-// It may be set to true during testing. +-// +-// TODO(adonovan): should we make the default true, and +-// suppress it only in the product (gopls/main.go)? +-var PanicOnBugs = false +- +-var ( +- mu sync.Mutex +- exemplars map[string]Bug +- handlers []func(Bug) +-) +- +-// A Bug represents an unexpected event or broken invariant. They are used for +-// capturing metadata that helps us understand the event. +-// +-// Bugs are JSON-serializable. +-type Bug struct { +- File string // file containing the call to bug.Report +- Line int // line containing the call to bug.Report +- Description string // description of the bug +- Key string // key identifying the bug (file:line if available) +- Stack string // call stack +- AtTime time.Time // time the bug was reported +-} +- +-// Reportf reports a formatted bug message. +-func Reportf(format string, args ...any) { +- report(fmt.Sprintf(format, args...)) +-} +- +-// Errorf calls fmt.Errorf for the given arguments, and reports the resulting +-// error message as a bug. +-func Errorf(format string, args ...any) error { +- err := fmt.Errorf(format, args...) +- report(err.Error()) +- return err +-} +- +-// Report records a new bug encountered on the server. +-// It uses reflection to report the position of the immediate caller. +-func Report(description string) { +- report(description) +-} +- +-// BugReportCount is a telemetry counter that tracks # of bug reports. +-var BugReportCount = counter.NewStack("gopls/bug", 16) +- +-func report(description string) { +- _, file, line, ok := runtime.Caller(2) // all exported reporting functions call report directly +- +- key := "<missing callsite>" +- if ok { +- key = fmt.Sprintf("%s:%d", file, line) +- } +- +- if PanicOnBugs { +- panic(fmt.Sprintf("%s: %s", key, description)) +- } +- +- bug := Bug{ +- File: file, +- Line: line, +- Description: description, +- Key: key, +- Stack: string(debug.Stack()), +- AtTime: time.Now(), +- } +- +- newBug := false +- mu.Lock() +- if _, ok := exemplars[key]; !ok { +- if exemplars == nil { +- exemplars = make(map[string]Bug) +- } +- exemplars[key] = bug // capture one exemplar per key +- newBug = true +- } +- hh := handlers +- handlers = nil +- mu.Unlock() +- +- if newBug { +- BugReportCount.Inc() +- } +- // Call the handlers outside the critical section since a +- // handler may itself fail and call bug.Report. Since handlers +- // are one-shot, the inner call should be trivial. +- for _, handle := range hh { +- handle(bug) +- } +-} +- +-// Handle adds a handler function that will be called with the next +-// bug to occur on the server. The handler only ever receives one bug. +-// It is called synchronously, and should return in a timely manner. +-func Handle(h func(Bug)) { +- mu.Lock() +- defer mu.Unlock() +- handlers = append(handlers, h) +-} +- +-// List returns a slice of bug exemplars -- the first bugs to occur at each +-// callsite. +-func List() []Bug { +- mu.Lock() +- defer mu.Unlock() +- +- var bugs []Bug +- +- for _, bug := range exemplars { +- bugs = append(bugs, bug) +- } +- +- sort.Slice(bugs, func(i, j int) bool { +- return bugs[i].Key < bugs[j].Key +- }) +- +- return bugs +-} +diff -urN a/gopls/internal/util/bug/bug_test.go b/gopls/internal/util/bug/bug_test.go +--- a/gopls/internal/util/bug/bug_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/bug/bug_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,91 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package bug +- +-import ( +- "encoding/json" +- "fmt" +- "testing" +- "time" +- +- "github.com/google/go-cmp/cmp" +-) +- +-func resetForTesting() { +- exemplars = nil +- handlers = nil +-} +- +-func TestListBugs(t *testing.T) { +- defer resetForTesting() +- +- Report("bad") +- +- wantBugs(t, "bad") +- +- for i := range 3 { +- Report(fmt.Sprintf("index:%d", i)) +- } +- +- wantBugs(t, "bad", "index:0") +-} +- +-func wantBugs(t *testing.T, want ...string) { +- t.Helper() +- +- bugs := List() +- if got, want := len(bugs), len(want); got != want { +- t.Errorf("List(): got %d bugs, want %d", got, want) +- return +- } +- +- for i, b := range bugs { +- if got, want := b.Description, want[i]; got != want { +- t.Errorf("bug.List()[%d] = %q, want %q", i, got, want) +- } +- } +-} +- +-func TestBugHandler(t *testing.T) { +- defer resetForTesting() +- +- Report("unseen") +- +- // Both handlers are called, in order of registration, only once. +- var got string +- Handle(func(b Bug) { got += "1:" + b.Description }) +- Handle(func(b Bug) { got += "2:" + b.Description }) +- +- Report("seen") +- +- Report("again") +- +- if want := "1:seen2:seen"; got != want { +- t.Errorf("got %q, want %q", got, want) +- } +-} +- +-func TestBugJSON(t *testing.T) { +- b1 := Bug{ +- File: "foo.go", +- Line: 1, +- Description: "a bug", +- Key: "foo.go:1", +- Stack: "<stack>", +- AtTime: time.Now(), +- } +- +- data, err := json.Marshal(b1) +- if err != nil { +- t.Fatal(err) +- } +- var b2 Bug +- if err := json.Unmarshal(data, &b2); err != nil { +- t.Fatal(err) +- } +- if diff := cmp.Diff(b1, b2); diff != "" { +- t.Errorf("bugs differ after JSON Marshal/Unmarshal (-b1 +b2):\n%s", diff) +- } +-} +diff -urN a/gopls/internal/util/constraints/constraint.go b/gopls/internal/util/constraints/constraint.go +--- a/gopls/internal/util/constraints/constraint.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/constraints/constraint.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,52 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package constraints defines a set of useful constraints to be used +-// with type parameters. +-package constraints +- +-// Copied from x/exp/constraints. +- +-// Signed is a constraint that permits any signed integer type. +-// If future releases of Go add new predeclared signed integer types, +-// this constraint will be modified to include them. +-type Signed interface { +- ~int | ~int8 | ~int16 | ~int32 | ~int64 +-} +- +-// Unsigned is a constraint that permits any unsigned integer type. +-// If future releases of Go add new predeclared unsigned integer types, +-// this constraint will be modified to include them. +-type Unsigned interface { +- ~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | ~uintptr +-} +- +-// Integer is a constraint that permits any integer type. +-// If future releases of Go add new predeclared integer types, +-// this constraint will be modified to include them. +-type Integer interface { +- Signed | Unsigned +-} +- +-// Float is a constraint that permits any floating-point type. +-// If future releases of Go add new predeclared floating-point types, +-// this constraint will be modified to include them. +-type Float interface { +- ~float32 | ~float64 +-} +- +-// Complex is a constraint that permits any complex numeric type. +-// If future releases of Go add new predeclared complex numeric types, +-// this constraint will be modified to include them. +-type Complex interface { +- ~complex64 | ~complex128 +-} +- +-// Ordered is a constraint that permits any ordered type: any type +-// that supports the operators < <= >= >. +-// If future releases of Go add new ordered types, +-// this constraint will be modified to include them. +-type Ordered interface { +- Integer | Float | ~string +-} +diff -urN a/gopls/internal/util/cursorutil/util.go b/gopls/internal/util/cursorutil/util.go +--- a/gopls/internal/util/cursorutil/util.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/cursorutil/util.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,31 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package cursorutil provides utility functions for working with [inspector.Cursor]. +-package cursorutil +- +-import ( +- "go/ast" +- +- "golang.org/x/tools/go/ast/inspector" +-) +- +-// FirstEnclosing returns the first value from [cursor.Enclosing] as +-// both a designated type and a [inspector.Cursor] pointing to it. +-// +-// It returns the zero value if it is not found. +-// +-// A common usage is: +-// +-// call, callCur := cursorutil.FirstEnclosing[*ast.CallExpr](cur) +-// if call == nil { +-// // Not Found +-// } +-func FirstEnclosing[N ast.Node](cur inspector.Cursor) (N, inspector.Cursor) { +- var typ N +- for cur := range cur.Enclosing(typ) { +- return cur.Node().(N), cur +- } +- return typ, inspector.Cursor{} +-} +diff -urN a/gopls/internal/util/fakenet/conn.go b/gopls/internal/util/fakenet/conn.go +--- a/gopls/internal/util/fakenet/conn.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/fakenet/conn.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,129 +0,0 @@ +-// Copyright 2018 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fakenet +- +-import ( +- "io" +- "net" +- "sync" +- "time" +-) +- +-// NewConn returns a net.Conn built on top of the supplied reader and writer. +-// It decouples the read and write on the conn from the underlying stream +-// to enable Close to abort ones that are in progress. +-// It's primary use is to fake a network connection from stdin and stdout. +-func NewConn(name string, in io.ReadCloser, out io.WriteCloser) net.Conn { +- c := &fakeConn{ +- name: name, +- reader: newFeeder(in.Read), +- writer: newFeeder(out.Write), +- in: in, +- out: out, +- } +- go c.reader.run() +- go c.writer.run() +- return c +-} +- +-type fakeConn struct { +- name string +- reader *connFeeder +- writer *connFeeder +- in io.ReadCloser +- out io.WriteCloser +-} +- +-type fakeAddr string +- +-// connFeeder serializes calls to the source function (io.Reader.Read or +-// io.Writer.Write) by delegating them to a channel. This also allows calls to +-// be intercepted when the connection is closed, and cancelled early if the +-// connection is closed while the calls are still outstanding. +-type connFeeder struct { +- source func([]byte) (int, error) +- input chan []byte +- result chan feedResult +- mu sync.Mutex +- closed bool +- done chan struct{} +-} +- +-type feedResult struct { +- n int +- err error +-} +- +-func (c *fakeConn) Close() error { +- c.reader.close() +- c.writer.close() +- c.in.Close() +- c.out.Close() +- return nil +-} +- +-func (c *fakeConn) Read(b []byte) (n int, err error) { return c.reader.do(b) } +-func (c *fakeConn) Write(b []byte) (n int, err error) { return c.writer.do(b) } +-func (c *fakeConn) LocalAddr() net.Addr { return fakeAddr(c.name) } +-func (c *fakeConn) RemoteAddr() net.Addr { return fakeAddr(c.name) } +-func (c *fakeConn) SetDeadline(t time.Time) error { return nil } +-func (c *fakeConn) SetReadDeadline(t time.Time) error { return nil } +-func (c *fakeConn) SetWriteDeadline(t time.Time) error { return nil } +-func (a fakeAddr) Network() string { return "fake" } +-func (a fakeAddr) String() string { return string(a) } +- +-func newFeeder(source func([]byte) (int, error)) *connFeeder { +- return &connFeeder{ +- source: source, +- input: make(chan []byte), +- result: make(chan feedResult), +- done: make(chan struct{}), +- } +-} +- +-func (f *connFeeder) close() { +- f.mu.Lock() +- if !f.closed { +- f.closed = true +- close(f.done) +- } +- f.mu.Unlock() +-} +- +-func (f *connFeeder) do(b []byte) (n int, err error) { +- // send the request to the worker +- select { +- case f.input <- b: +- case <-f.done: +- return 0, io.EOF +- } +- // get the result from the worker +- select { +- case r := <-f.result: +- return r.n, r.err +- case <-f.done: +- return 0, io.EOF +- } +-} +- +-func (f *connFeeder) run() { +- var b []byte +- for { +- // wait for an input request +- select { +- case b = <-f.input: +- case <-f.done: +- return +- } +- // invoke the underlying method +- n, err := f.source(b) +- // send the result back to the requester +- select { +- case f.result <- feedResult{n: n, err: err}: +- case <-f.done: +- return +- } +- } +-} +diff -urN a/gopls/internal/util/fingerprint/fingerprint.go b/gopls/internal/util/fingerprint/fingerprint.go +--- a/gopls/internal/util/fingerprint/fingerprint.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/fingerprint/fingerprint.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,466 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package fingerprint defines a function to [Encode] types as strings +-// with the property that identical types have equal string encodings, +-// in most cases. In the remaining cases (mostly involving generic +-// types), the encodings can be parsed using [Parse] into [Tree] form +-// and matched using [Matches]. +-package fingerprint +- +-import ( +- "fmt" +- "go/types" +- "reflect" +- "strconv" +- "strings" +- "text/scanner" +-) +- +-// Encode returns an encoding of a [types.Type] such that, in +-// most cases, Encode(x) == Encode(y) iff [types.Identical](x, y). +-// +-// For a minority of types, mostly involving type parameters, identity +-// cannot be reduced to string comparison; these types are called +-// "tricky", and are indicated by the boolean result. +-// +-// In general, computing identity correctly for tricky types requires +-// the type checker. However, the fingerprint encoding can be parsed +-// by [Parse] into a [Tree] form that permits simple matching sufficient +-// to allow a type parameter to unify with any subtree; see [Match]. +-// +-// In the standard library, 99.8% of package-level types have a +-// non-tricky method-set. The most common exceptions are due to type +-// parameters. +-// +-// fingerprint.Encode is defined only for the signature types of functions +-// and methods. It must not be called for "untyped" basic types, nor +-// the type of a generic function. +-func Encode(t types.Type) (_ string, tricky bool) { return fingerprint(t) } +- +-// A Tree is a parsed form of a fingerprint for use with [Matches]. +-type Tree struct{ tree sexpr } +- +-// String returns the tree in an unspecified human-readable form. +-func (tree Tree) String() string { +- var out strings.Builder +- writeSexpr(&out, tree.tree) +- return out.String() +-} +- +-// Parse parses a fingerprint into tree form. +-// +-// The input must have been produced by [Encode] at the same source +-// version; parsing is thus infallible. +-func Parse(fp string) Tree { +- return Tree{parseFingerprint(fp)} +-} +- +-// Matches reports whether two fingerprint trees match, meaning that +-// under some conditions (for example, particular instantiations of +-// type parameters) the two types may be identical. +-func Matches(x, y Tree) bool { +- return unify(x.tree, y.tree) +-} +- +-// Fingerprint syntax +-// +-// The lexical syntax is essentially Lisp S-expressions: +-// +-// expr = STRING | INTEGER | IDENT | '(' expr... ')' +-// +-// where the tokens are as defined by text/scanner. +-// +-// The grammar of expression forms is: +-// +-// τ = IDENT -- named or basic type +-// | (qual STRING IDENT) -- qualified named type +-// | (array INTEGER τ) +-// | (slice τ) +-// | (ptr τ) +-// | (chan IDENT τ) +-// | (func τ v? τ) -- signature params, results, variadic? +-// | (map τ τ) +-// | (struct field*) +-// | (tuple τ*) +-// | (interface) -- nonempty interface (lossy) +-// | (typeparam INTEGER) +-// | (inst τ τ...) -- instantiation of a named type +-// +-// field = IDENT IDENT STRING τ -- name, embedded?, tag, type +- +-func fingerprint(t types.Type) (string, bool) { +- var buf strings.Builder +- tricky := false +- var print func(t types.Type) +- print = func(t types.Type) { +- switch t := t.(type) { +- case *types.Alias: +- print(types.Unalias(t)) +- +- case *types.Named: +- targs := t.TypeArgs() +- if targs != nil { +- buf.WriteString("(inst ") +- } +- tname := t.Obj() +- if tname.Pkg() != nil { +- fmt.Fprintf(&buf, "(qual %q %s)", tname.Pkg().Path(), tname.Name()) +- } else if tname.Name() != "error" && tname.Name() != "comparable" { +- panic(tname) // error and comparable the only named types with no package +- } else { +- buf.WriteString(tname.Name()) +- } +- if targs != nil { +- for i := range targs.Len() { +- buf.WriteByte(' ') +- print(targs.At(i)) +- } +- buf.WriteString(")") +- } +- +- case *types.Array: +- fmt.Fprintf(&buf, "(array %d ", t.Len()) +- print(t.Elem()) +- buf.WriteByte(')') +- +- case *types.Slice: +- buf.WriteString("(slice ") +- print(t.Elem()) +- buf.WriteByte(')') +- +- case *types.Pointer: +- buf.WriteString("(ptr ") +- print(t.Elem()) +- buf.WriteByte(')') +- +- case *types.Map: +- buf.WriteString("(map ") +- print(t.Key()) +- buf.WriteByte(' ') +- print(t.Elem()) +- buf.WriteByte(')') +- +- case *types.Chan: +- fmt.Fprintf(&buf, "(chan %d ", t.Dir()) +- print(t.Elem()) +- buf.WriteByte(')') +- +- case *types.Tuple: +- buf.WriteString("(tuple") +- for i := range t.Len() { +- buf.WriteByte(' ') +- print(t.At(i).Type()) +- } +- buf.WriteByte(')') +- +- case *types.Basic: +- // Print byte/uint8 as "byte" instead of calling +- // BasicType.String, which prints the two distinctly +- // (even though their Kinds are numerically equal). +- // Ditto for rune/int32. +- switch t.Kind() { +- case types.Byte: +- buf.WriteString("byte") +- case types.Rune: +- buf.WriteString("rune") +- case types.UnsafePointer: +- buf.WriteString(`(qual "unsafe" Pointer)`) +- default: +- if t.Info()&types.IsUntyped != 0 { +- panic("fingerprint of untyped type") +- } +- buf.WriteString(t.String()) +- } +- +- case *types.Signature: +- buf.WriteString("(func ") +- print(t.Params()) +- if t.Variadic() { +- buf.WriteString(" v") +- } +- buf.WriteByte(' ') +- print(t.Results()) +- buf.WriteByte(')') +- +- case *types.Struct: +- // Non-empty unnamed struct types in method +- // signatures are vanishingly rare. +- buf.WriteString("(struct") +- for i := range t.NumFields() { +- f := t.Field(i) +- name := f.Name() +- if !f.Exported() { +- name = fmt.Sprintf("(qual %q %s)", f.Pkg().Path(), name) +- } +- +- // This isn't quite right for embedded type aliases. +- // (See types.TypeString(StructType) and #44410 for context.) +- // But this is vanishingly rare. +- fmt.Fprintf(&buf, " %s %t %q ", name, f.Embedded(), t.Tag(i)) +- print(f.Type()) +- } +- buf.WriteByte(')') +- +- case *types.Interface: +- if t.NumMethods() == 0 { +- buf.WriteString("any") // common case +- } else { +- // Interface assignability is particularly +- // tricky due to the possibility of recursion. +- // However, nontrivial interface type literals +- // are exceedingly rare in function signatures. +- // +- // TODO(adonovan): add disambiguating precision +- // (e.g. number of methods, their IDs and arities) +- // as needs arise (i.e. collisions are observed). +- tricky = true +- buf.WriteString("(interface)") +- } +- +- case *types.TypeParam: +- // Matching of type parameters will require +- // parsing fingerprints and unification. +- tricky = true +- fmt.Fprintf(&buf, "(%s %d)", symTypeparam, t.Index()) +- +- default: // incl. *types.Union +- panic(t) +- } +- } +- +- print(t) +- +- return buf.String(), tricky +-} +- +-// sexpr defines the representation of a fingerprint tree. +-type ( +- sexpr any // = string | int | symbol | *cons | nil +- symbol string +- cons struct{ car, cdr sexpr } +-) +- +-// parseFingerprint returns the type encoded by fp in tree form. +-// +-// The input must have been produced by [fingerprint] at the same +-// source version; parsing is thus infallible. +-func parseFingerprint(fp string) sexpr { +- var scan scanner.Scanner +- scan.Error = func(scan *scanner.Scanner, msg string) { panic(msg) } +- scan.Init(strings.NewReader(fp)) +- +- // next scans a token and updates tok. +- var tok rune +- next := func() { tok = scan.Scan() } +- +- next() +- +- // parse parses a fingerprint and returns its tree. +- var parse func() sexpr +- parse = func() sexpr { +- if tok == '(' { +- next() // consume '(' +- var head sexpr // empty list +- tailcdr := &head +- for tok != ')' { +- cell := &cons{car: parse()} +- *tailcdr = cell +- tailcdr = &cell.cdr +- } +- next() // consume ')' +- return head +- } +- +- s := scan.TokenText() +- switch tok { +- case scanner.Ident: +- next() // consume IDENT +- return symbol(s) +- +- case scanner.Int: +- next() // consume INT +- i, err := strconv.Atoi(s) +- if err != nil { +- panic(err) +- } +- return i +- +- case scanner.String: +- next() // consume STRING +- s, err := strconv.Unquote(s) +- if err != nil { +- panic(err) +- } +- return s +- +- default: +- panic(tok) +- } +- } +- +- return parse() +-} +- +-// writeSexpr formats an S-expression. +-// It is provided for debugging. +-func writeSexpr(out *strings.Builder, x sexpr) { +- switch x := x.(type) { +- case nil: +- out.WriteString("()") +- case string: +- fmt.Fprintf(out, "%q", x) +- case int: +- fmt.Fprintf(out, "%d", x) +- case symbol: +- out.WriteString(string(x)) +- case *cons: +- out.WriteString("(") +- for { +- writeSexpr(out, x.car) +- if x.cdr == nil { +- break +- } else if cdr, ok := x.cdr.(*cons); ok { +- x = cdr +- out.WriteByte(' ') +- } else { +- // Dotted list: should never happen, +- // but support it for debugging. +- out.WriteString(" . ") +- print(x.cdr) +- break +- } +- } +- out.WriteString(")") +- default: +- panic(x) +- } +-} +- +-// unify reports whether x and y match, in the presence of type parameters. +-// The constraints on type parameters are ignored, but each type parameter must +-// have a consistent binding. +-func unify(x, y sexpr) bool { +- +- // maxTypeParam returns the maximum type parameter index in x. +- var maxTypeParam func(x sexpr) int +- maxTypeParam = func(x sexpr) int { +- if i := typeParamIndex(x); i >= 0 { +- return i +- } +- if c, ok := x.(*cons); ok { +- return max(maxTypeParam(c.car), maxTypeParam(c.cdr)) +- } +- return -1 +- } +- +- // xBindings[i] is the binding for type parameter #i in x, and similarly for y. +- // Although type parameters are nominally bound to sexprs, each bindings[i] +- // is a *sexpr, so unbound variables can share a binding. +- xBindings := make([]*sexpr, maxTypeParam(x)+1) +- for i := range len(xBindings) { +- xBindings[i] = new(sexpr) +- } +- yBindings := make([]*sexpr, maxTypeParam(y)+1) +- for i := range len(yBindings) { +- yBindings[i] = new(sexpr) +- } +- +- // bind sets binding b to s from bindings if it does not occur in s. +- bind := func(b *sexpr, s sexpr, bindings []*sexpr) bool { +- // occurs reports whether b is present in s. +- var occurs func(s sexpr) bool +- occurs = func(s sexpr) bool { +- if j := typeParamIndex(s); j >= 0 { +- return b == bindings[j] +- } +- if c, ok := s.(*cons); ok { +- return occurs(c.car) || occurs(c.cdr) +- } +- return false +- } +- +- if occurs(s) { +- return false +- } +- *b = s +- return true +- } +- +- var uni func(x, y sexpr) bool +- uni = func(x, y sexpr) bool { +- var bx, by *sexpr +- ix := typeParamIndex(x) +- if ix >= 0 { +- bx = xBindings[ix] +- } +- iy := typeParamIndex(y) +- if iy >= 0 { +- by = yBindings[iy] +- } +- +- if bx != nil || by != nil { +- // If both args are type params and neither is bound, have them share a binding. +- if bx != nil && by != nil && *bx == nil && *by == nil { +- xBindings[ix] = yBindings[iy] +- return true +- } +- // Treat param bindings like original args in what follows. +- if bx != nil && *bx != nil { +- x = *bx +- } +- if by != nil && *by != nil { +- y = *by +- } +- // If the x param is unbound, bind it to y. +- if bx != nil && *bx == nil { +- return bind(bx, y, yBindings) +- } +- // If the y param is unbound, bind it to x. +- if by != nil && *by == nil { +- return bind(by, x, xBindings) +- } +- // Unify the binding of a bound parameter. +- return uni(x, y) +- } +- +- // Neither arg is a type param. +- if reflect.TypeOf(x) != reflect.TypeOf(y) { +- return false // type mismatch +- } +- switch x := x.(type) { +- case nil, string, int, symbol: +- return x == y +- case *cons: +- y := y.(*cons) +- if !uni(x.car, y.car) { +- return false +- } +- if x.cdr == nil { +- return y.cdr == nil +- } +- if y.cdr == nil { +- return false +- } +- return uni(x.cdr, y.cdr) +- default: +- panic(fmt.Sprintf("unify %T %T", x, y)) +- } +- } +- // At least one param is bound. Unify its binding with the other. +- return uni(x, y) +-} +- +-// typeParamIndex returns the index of the type parameter, +-// if x has the form "(typeparam INTEGER)", otherwise -1. +-func typeParamIndex(x sexpr) int { +- if x, ok := x.(*cons); ok { +- if sym, ok := x.car.(symbol); ok && sym == symTypeparam { +- return x.cdr.(*cons).car.(int) +- } +- } +- return -1 +-} +- +-const symTypeparam = "typeparam" +diff -urN a/gopls/internal/util/fingerprint/fingerprint_test.go b/gopls/internal/util/fingerprint/fingerprint_test.go +--- a/gopls/internal/util/fingerprint/fingerprint_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/fingerprint/fingerprint_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,204 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package fingerprint_test +- +-import ( +- "go/types" +- "testing" +- +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/go/types/typeutil" +- "golang.org/x/tools/gopls/internal/util/fingerprint" +- "golang.org/x/tools/internal/testfiles" +- "golang.org/x/tools/txtar" +-) +- +-// Test runs the fingerprint encoder, decoder, and printer +-// on the types of all package-level symbols in gopls, and ensures +-// that parse+print is lossless. +-func Test(t *testing.T) { +- if testing.Short() { +- t.Skip("skipping slow test") +- } +- +- cfg := &packages.Config{Mode: packages.NeedTypes} +- pkgs, err := packages.Load(cfg, "std", "golang.org/x/tools/gopls/...") +- if err != nil { +- t.Fatal(err) +- } +- +- // Record the fingerprint of each logical type (equivalence +- // class of types.Types) and assert that they are all equal. +- // (Non-tricky types only.) +- var fingerprints typeutil.Map +- +- for _, pkg := range pkgs { +- switch pkg.Types.Path() { +- case "unsafe", "builtin": +- continue +- } +- scope := pkg.Types.Scope() +- for _, name := range scope.Names() { +- obj := scope.Lookup(name) +- typ := obj.Type() +- +- if basic, ok := typ.(*types.Basic); ok && +- basic.Info()&types.IsUntyped != 0 { +- continue // untyped constant +- } +- +- fp, tricky := fingerprint.Encode(typ) // check Type encoder doesn't panic +- +- // All equivalent (non-tricky) types have the same fingerprint. +- if !tricky { +- if prevfp, ok := fingerprints.At(typ).(string); !ok { +- fingerprints.Set(typ, fp) +- } else if fp != prevfp { +- t.Errorf("inconsistent fingerprints for type %v:\n- old: %s\n- new: %s", +- typ, fp, prevfp) +- } +- } +- +- tree := fingerprint.Parse(fp) // check parser doesn't panic +- fp2 := tree.String() // check formatter doesn't pannic +- +- // A parse+print round-trip should be lossless. +- if fp != fp2 { +- t.Errorf("%s: %v: parse+print changed fingerprint:\n"+ +- "was: %s\ngot: %s\ntype: %v", +- pkg.Fset.Position(obj.Pos()), obj, fp, fp2, typ) +- } +- } +- } +-} +- +-// TestMatches exercises the matching algorithm for generic types. +-func TestMatches(t *testing.T) { +- const src = ` +--- go.mod -- +-module example.com +-go 1.24 +- +--- a/a.go -- +-package a +- +-type Int = int +-type String = string +- +-// Eq.Equal matches casefold.Equal. +-type Eq[T any] interface { Equal(T, T) bool } +-type casefold struct{} +-func (casefold) Equal(x, y string) bool +- +-// A matches AString. +-type A[T any] = struct { x T } +-type AString = struct { x string } +- +-// B matches anything! +-type B[T any] = T +- +-func C1[T any](int, T, ...string) T { panic(0) } +-func C2[U any](int, int, ...U) bool { panic(0) } +-func C3(int, bool, ...string) rune +-func C4(int, bool, ...string) +-func C5(int, float64, bool, string) bool +-func C6(int, bool, ...string) bool +- +-func DAny[T any](Named[T]) { panic(0) } +-func DString(Named[string]) +-func DInt(Named[int]) +- +-type Named[T any] struct { x T } +- +-func E1(byte) rune +-func E2(uint8) int32 +-func E3(int8) uint32 +- +-// generic vs. generic +-func F1[T any](T) { panic(0) } +-func F2[T any](*T) { panic(0) } +-func F3[T any](T, T) { panic(0) } +-func F4[U any](U, *U) { panic(0) } +-func F5[T, U any](T, U, U) { panic(0) } +-func F6[T any](T, int, T) { panic(0) } +-func F7[T any](bool, T, T) { panic(0) } +-func F8[V any](*V, int, int) { panic(0) } +-func F9[V any](V, *V, V) { panic(0) } +-` +- pkg := testfiles.LoadPackages(t, txtar.Parse([]byte(src)), "./a")[0] +- scope := pkg.Types.Scope() +- for _, test := range []struct { +- a, b string +- method string // optional field or method +- want bool +- }{ +- {"Eq", "casefold", "Equal", true}, +- {"A", "AString", "", true}, +- {"A", "Eq", "", false}, // completely unrelated +- {"B", "String", "", true}, +- {"B", "Int", "", true}, +- {"B", "A", "", true}, +- {"C1", "C2", "", false}, +- {"C1", "C3", "", false}, +- {"C1", "C4", "", false}, +- {"C1", "C5", "", false}, +- {"C1", "C6", "", true}, +- {"C2", "C3", "", false}, +- {"C2", "C4", "", false}, +- {"C3", "C4", "", false}, +- {"DAny", "DString", "", true}, +- {"DAny", "DInt", "", true}, +- {"DString", "DInt", "", false}, // different instantiations of Named +- {"E1", "E2", "", true}, // byte and rune are just aliases +- {"E2", "E3", "", false}, +- // The following tests cover all of the type param cases of unify. +- {"F1", "F2", "", true}, // F1[*int] = F2[int] +- {"F3", "F4", "", false}, // would require U identical to *U, prevented by occur check +- {"F5", "F6", "", true}, // one param is bound, the other is not +- {"F6", "F7", "", false}, // both are bound +- {"F5", "F8", "", true}, // T=*int, U=int, V=int +- {"F5", "F9", "", false}, // T is unbound, V is bound, and T occurs in V +- } { +- lookup := func(name string) types.Type { +- obj := scope.Lookup(name) +- if obj == nil { +- t.Fatalf("Lookup %s failed", name) +- } +- if test.method != "" { +- obj, _, _ = types.LookupFieldOrMethod(obj.Type(), true, pkg.Types, test.method) +- if obj == nil { +- t.Fatalf("Lookup %s.%s failed", name, test.method) +- } +- } +- return obj.Type() +- } +- +- check := func(sa, sb string, want bool) { +- t.Helper() +- +- a := lookup(sa) +- b := lookup(sb) +- +- afp, _ := fingerprint.Encode(a) +- bfp, _ := fingerprint.Encode(b) +- +- atree := fingerprint.Parse(afp) +- btree := fingerprint.Parse(bfp) +- +- got := fingerprint.Matches(atree, btree) +- if got != want { +- t.Errorf("a=%s b=%s method=%s: unify returned %t for these inputs:\n- %s\n- %s", +- sa, sb, test.method, got, a, b) +- } +- } +- +- check(test.a, test.b, test.want) +- // Matches is symmetric +- check(test.b, test.a, test.want) +- // Matches is reflexive +- check(test.a, test.a, true) +- check(test.b, test.b, true) +- } +-} +diff -urN a/gopls/internal/util/frob/frob.go b/gopls/internal/util/frob/frob.go +--- a/gopls/internal/util/frob/frob.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/frob/frob.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,402 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package frob is a fast restricted object encoder/decoder in the +-// spirit of encoding/gob. +-// +-// As with gob, types that recursively contain functions, channels, +-// and unsafe.Pointers cannot be encoded, but frob has these +-// additional restrictions: +-// +-// - Interface values are not supported; this avoids the need for +-// the encoding to describe types. +-// +-// - Private struct fields are ignored. +-// +-// - The encoding is unspecified and subject to change, so the encoder +-// and decoder must exactly agree on their implementation and on the +-// definitions of the target types. +-// +-// - Lengths (of arrays, slices, and maps) are currently assumed to +-// fit in 32 bits. +-// +-// - There is no error handling. All errors are reported by panicking. +-// +-// - Values are serialized as trees, not graphs, so shared subgraphs +-// are encoded repeatedly. +-// +-// - No attempt is made to detect cyclic data structures. +-package frob +- +-import ( +- "encoding/binary" +- "fmt" +- "math" +- "reflect" +- "sync" +-) +- +-// A Codec[T] is an immutable encoder and decoder for values of type T. +-type Codec[T any] struct{ frob *frob } +- +-// CodecFor[T] returns a codec for values of type T. +-// It panics if type T is unsuitable. +-func CodecFor[T any]() Codec[T] { +- frobsMu.Lock() +- defer frobsMu.Unlock() +- return Codec[T]{frobFor(reflect.TypeOf((*T)(nil)).Elem())} +-} +- +-func (codec Codec[T]) Encode(v T) []byte { return codec.frob.Encode(v) } +-func (codec Codec[T]) Decode(data []byte, ptr *T) { codec.frob.Decode(data, ptr) } +- +-var ( +- frobsMu sync.Mutex +- frobs = make(map[reflect.Type]*frob) +-) +- +-// A frob is an encoder/decoder for a specific type. +-type frob struct { +- t reflect.Type +- kind reflect.Kind +- elems []*frob // elem (array/slice/ptr), key+value (map), fields (struct) +-} +- +-// frobFor returns the frob for a particular type. +-// Precondition: caller holds frobsMu. +-func frobFor(t reflect.Type) *frob { +- fr, ok := frobs[t] +- if !ok { +- fr = &frob{t: t, kind: t.Kind()} +- frobs[t] = fr +- +- switch fr.kind { +- case reflect.Bool, +- reflect.Int, +- reflect.Int8, +- reflect.Int16, +- reflect.Int32, +- reflect.Int64, +- reflect.Uint, +- reflect.Uint8, +- reflect.Uint16, +- reflect.Uint32, +- reflect.Uint64, +- reflect.Uintptr, +- reflect.Float32, +- reflect.Float64, +- reflect.Complex64, +- reflect.Complex128, +- reflect.String: +- +- case reflect.Array, +- reflect.Slice, +- reflect.Pointer: +- fr.addElem(fr.t.Elem()) +- +- case reflect.Map: +- fr.addElem(fr.t.Key()) +- fr.addElem(fr.t.Elem()) +- +- case reflect.Struct: +- for i := 0; i < fr.t.NumField(); i++ { +- field := fr.t.Field(i) +- if field.PkgPath != "" { +- continue // skip unexported field +- } +- fr.addElem(field.Type) +- } +- +- default: +- // chan, func, interface, unsafe.Pointer +- panic(fmt.Sprintf("type %v is not supported by frob", fr.t)) +- } +- } +- return fr +-} +- +-func (fr *frob) addElem(t reflect.Type) { +- fr.elems = append(fr.elems, frobFor(t)) +-} +- +-const magic = "frob" +- +-func (fr *frob) Encode(v any) []byte { +- rv := reflect.ValueOf(v) +- if rv.Type() != fr.t { +- panic(fmt.Sprintf("got %v, want %v", rv.Type(), fr.t)) +- } +- w := &writer{} +- w.bytes([]byte(magic)) +- fr.encode(w, rv) +- if uint64(len(w.data))>>32 != 0 { +- panic("too large") // includes all cases where len doesn't fit in 32 bits +- } +- return w.data +-} +- +-// encode appends the encoding of value v, whose type must be fr.t. +-func (fr *frob) encode(out *writer, v reflect.Value) { +- switch fr.kind { +- case reflect.Bool: +- var b byte +- if v.Bool() { +- b = 1 +- } +- out.uint8(b) +- case reflect.Int: +- out.uint64(uint64(v.Int())) +- case reflect.Int8: +- out.uint8(uint8(v.Int())) +- case reflect.Int16: +- out.uint16(uint16(v.Int())) +- case reflect.Int32: +- out.uint32(uint32(v.Int())) +- case reflect.Int64: +- out.uint64(uint64(v.Int())) +- case reflect.Uint: +- out.uint64(v.Uint()) +- case reflect.Uint8: +- out.uint8(uint8(v.Uint())) +- case reflect.Uint16: +- out.uint16(uint16(v.Uint())) +- case reflect.Uint32: +- out.uint32(uint32(v.Uint())) +- case reflect.Uint64: +- out.uint64(v.Uint()) +- case reflect.Uintptr: +- out.uint64(v.Uint()) +- case reflect.Float32: +- out.uint32(math.Float32bits(float32(v.Float()))) +- case reflect.Float64: +- out.uint64(math.Float64bits(v.Float())) +- case reflect.Complex64: +- z := complex64(v.Complex()) +- out.uint32(math.Float32bits(real(z))) +- out.uint32(math.Float32bits(imag(z))) +- case reflect.Complex128: +- z := v.Complex() +- out.uint64(math.Float64bits(real(z))) +- out.uint64(math.Float64bits(imag(z))) +- +- case reflect.Array: +- len := v.Type().Len() +- elem := fr.elems[0] +- for i := range len { +- elem.encode(out, v.Index(i)) +- } +- +- case reflect.Slice: +- len := v.Len() +- out.uint32(uint32(len)) +- if len > 0 { +- elem := fr.elems[0] +- if elem.kind == reflect.Uint8 { +- // []byte fast path +- out.bytes(v.Bytes()) +- } else { +- for i := range len { +- elem.encode(out, v.Index(i)) +- } +- } +- } +- +- case reflect.Map: +- len := v.Len() +- out.uint32(uint32(len)) +- if len > 0 { +- kfrob, vfrob := fr.elems[0], fr.elems[1] +- for iter := v.MapRange(); iter.Next(); { +- kfrob.encode(out, iter.Key()) +- vfrob.encode(out, iter.Value()) +- } +- } +- +- case reflect.Pointer: +- if v.IsNil() { +- out.uint8(0) +- } else { +- out.uint8(1) +- fr.elems[0].encode(out, v.Elem()) +- } +- +- case reflect.String: +- len := v.Len() +- out.uint32(uint32(len)) +- if len > 0 { +- out.data = append(out.data, v.String()...) +- } +- +- case reflect.Struct: +- for i, elem := range fr.elems { +- elem.encode(out, v.Field(i)) +- } +- +- default: +- panic(fr.t) +- } +-} +- +-func (fr *frob) Decode(data []byte, ptr any) { +- rv := reflect.ValueOf(ptr).Elem() +- if rv.Type() != fr.t { +- panic(fmt.Sprintf("got %v, want %v", rv.Type(), fr.t)) +- } +- rd := &reader{data} +- if len(data) < len(magic) || string(rd.bytes(len(magic))) != magic { +- panic("not a frob-encoded message") // (likely an empty message) +- } +- fr.decode(rd, rv) +- if len(rd.data) > 0 { +- panic("surplus bytes") +- } +-} +- +-// decode reads from in, decodes a value, and sets addr to it. +-// addr must be a zero-initialized addressable variable of type fr.t. +-func (fr *frob) decode(in *reader, addr reflect.Value) { +- switch fr.kind { +- case reflect.Bool: +- addr.SetBool(in.uint8() != 0) +- case reflect.Int: +- addr.SetInt(int64(in.uint64())) +- case reflect.Int8: +- addr.SetInt(int64(in.uint8())) +- case reflect.Int16: +- addr.SetInt(int64(in.uint16())) +- case reflect.Int32: +- addr.SetInt(int64(in.uint32())) +- case reflect.Int64: +- addr.SetInt(int64(in.uint64())) +- case reflect.Uint: +- addr.SetUint(in.uint64()) +- case reflect.Uint8: +- addr.SetUint(uint64(in.uint8())) +- case reflect.Uint16: +- addr.SetUint(uint64(in.uint16())) +- case reflect.Uint32: +- addr.SetUint(uint64(in.uint32())) +- case reflect.Uint64: +- addr.SetUint(in.uint64()) +- case reflect.Uintptr: +- addr.SetUint(in.uint64()) +- case reflect.Float32: +- addr.SetFloat(float64(math.Float32frombits(in.uint32()))) +- case reflect.Float64: +- addr.SetFloat(math.Float64frombits(in.uint64())) +- case reflect.Complex64: +- addr.SetComplex(complex128(complex( +- math.Float32frombits(in.uint32()), +- math.Float32frombits(in.uint32()), +- ))) +- case reflect.Complex128: +- addr.SetComplex(complex( +- math.Float64frombits(in.uint64()), +- math.Float64frombits(in.uint64()), +- )) +- +- case reflect.Array: +- len := fr.t.Len() +- for i := range len { +- fr.elems[0].decode(in, addr.Index(i)) +- } +- +- case reflect.Slice: +- len := int(in.uint32()) +- if len > 0 { +- elem := fr.elems[0] +- if elem.kind == reflect.Uint8 { +- // []byte fast path +- // (Not addr.SetBytes: we must make a copy.) +- addr.Set(reflect.AppendSlice(addr, reflect.ValueOf(in.bytes(len)))) +- } else { +- addr.Set(reflect.MakeSlice(fr.t, len, len)) +- for i := range len { +- elem.decode(in, addr.Index(i)) +- } +- } +- } +- +- case reflect.Map: +- len := int(in.uint32()) +- if len > 0 { +- m := reflect.MakeMapWithSize(fr.t, len) +- addr.Set(m) +- kfrob, vfrob := fr.elems[0], fr.elems[1] +- k := reflect.New(kfrob.t).Elem() +- v := reflect.New(vfrob.t).Elem() +- for range len { +- k.SetZero() +- v.SetZero() +- kfrob.decode(in, k) +- vfrob.decode(in, v) +- m.SetMapIndex(k, v) +- } +- } +- +- case reflect.Pointer: +- isNil := in.uint8() == 0 +- if !isNil { +- ptr := reflect.New(fr.elems[0].t) +- addr.Set(ptr) +- fr.elems[0].decode(in, ptr.Elem()) +- } +- +- case reflect.String: +- len := int(in.uint32()) +- if len > 0 { +- addr.SetString(string(in.bytes(len))) +- } +- +- case reflect.Struct: +- for i, elem := range fr.elems { +- elem.decode(in, addr.Field(i)) +- } +- +- default: +- panic(fr.t) +- } +-} +- +-var le = binary.LittleEndian +- +-type reader struct{ data []byte } +- +-func (r *reader) uint8() uint8 { +- v := r.data[0] +- r.data = r.data[1:] +- return v +-} +- +-func (r *reader) uint16() uint16 { +- v := le.Uint16(r.data) +- r.data = r.data[2:] +- return v +-} +- +-func (r *reader) uint32() uint32 { +- v := le.Uint32(r.data) +- r.data = r.data[4:] +- return v +-} +- +-func (r *reader) uint64() uint64 { +- v := le.Uint64(r.data) +- r.data = r.data[8:] +- return v +-} +- +-func (r *reader) bytes(n int) []byte { +- v := r.data[:n] +- r.data = r.data[n:] +- return v +-} +- +-type writer struct{ data []byte } +- +-func (w *writer) uint8(v uint8) { w.data = append(w.data, v) } +-func (w *writer) uint16(v uint16) { w.data = le.AppendUint16(w.data, v) } +-func (w *writer) uint32(v uint32) { w.data = le.AppendUint32(w.data, v) } +-func (w *writer) uint64(v uint64) { w.data = le.AppendUint64(w.data, v) } +-func (w *writer) bytes(v []byte) { w.data = append(w.data, v...) } +diff -urN a/gopls/internal/util/frob/frob_test.go b/gopls/internal/util/frob/frob_test.go +--- a/gopls/internal/util/frob/frob_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/frob/frob_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,119 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package frob_test +- +-import ( +- "math" +- "reflect" +- "testing" +- +- "golang.org/x/tools/gopls/internal/util/frob" +-) +- +-func TestBasics(t *testing.T) { +- type Basics struct { +- A []*string +- B [2]int +- C *Basics +- D map[string]int +- E []byte +- F []string +- } +- codec := frob.CodecFor[Basics]() +- +- s1, s2 := "hello", "world" +- x := Basics{ +- A: []*string{&s1, nil, &s2}, +- B: [...]int{1, 2}, +- C: &Basics{ +- B: [...]int{3, 4}, +- D: map[string]int{"one": 1}, +- }, +- E: []byte("hello"), +- F: []string{s1, s2}, +- } +- var y Basics +- codec.Decode(codec.Encode(x), &y) +- if !reflect.DeepEqual(x, y) { +- t.Fatalf("bad roundtrip: got %#v, want %#v", y, x) +- } +-} +- +-func TestInts(t *testing.T) { +- type Ints struct { +- U uint +- U8 uint8 +- U16 uint16 +- U32 uint32 +- U64 uint64 +- UP uintptr +- I int +- I8 int8 +- I16 int16 +- I32 int32 +- I64 int64 +- F32 float32 +- F64 float64 +- C64 complex64 +- C128 complex128 +- } +- codec := frob.CodecFor[Ints]() +- +- // maxima +- max1 := Ints{ +- U: math.MaxUint, +- U8: math.MaxUint8, +- U16: math.MaxUint16, +- U32: math.MaxUint32, +- U64: math.MaxUint64, +- UP: math.MaxUint, +- I: math.MaxInt, +- I8: math.MaxInt8, +- I16: math.MaxInt16, +- I32: math.MaxInt32, +- I64: math.MaxInt64, +- F32: math.MaxFloat32, +- F64: math.MaxFloat64, +- C64: complex(math.MaxFloat32, math.MaxFloat32), +- C128: complex(math.MaxFloat64, math.MaxFloat64), +- } +- var max2 Ints +- codec.Decode(codec.Encode(max1), &max2) +- if !reflect.DeepEqual(max1, max2) { +- t.Fatalf("max: bad roundtrip: got %#v, want %#v", max2, max1) +- } +- +- // minima +- min1 := Ints{ +- I: math.MinInt, +- I8: math.MinInt8, +- I16: math.MinInt16, +- I32: math.MinInt32, +- I64: math.MinInt64, +- F32: -math.MaxFloat32, +- F64: -math.MaxFloat32, +- C64: complex(-math.MaxFloat32, -math.MaxFloat32), +- C128: complex(-math.MaxFloat64, -math.MaxFloat64), +- } +- var min2 Ints +- codec.Decode(codec.Encode(min1), &min2) +- if !reflect.DeepEqual(min1, min2) { +- t.Fatalf("min: bad roundtrip: got %#v, want %#v", min2, min1) +- } +- +- // negatives (other than MinInt), to exercise conversions +- neg1 := Ints{ +- I: -1, +- I8: -1, +- I16: -1, +- I32: -1, +- I64: -1, +- } +- var neg2 Ints +- codec.Decode(codec.Encode(neg1), &neg2) +- if !reflect.DeepEqual(neg1, neg2) { +- t.Fatalf("neg: bad roundtrip: got %#v, want %#v", neg2, neg1) +- } +-} +diff -urN a/gopls/internal/util/goversion/goversion.go b/gopls/internal/util/goversion/goversion.go +--- a/gopls/internal/util/goversion/goversion.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/goversion/goversion.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,95 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package goversions defines gopls's policy for which versions of Go it supports. +-package goversion +- +-import ( +- "fmt" +- "strings" +-) +- +-// Support holds information about end-of-life Go version support. +-// +-// Exposed for testing. +-type Support struct { +- // GoVersion is the Go version to which these settings relate. +- GoVersion int +- +- // DeprecatedVersion is the first version of gopls that no longer supports +- // this Go version. +- // +- // If unset, the version is already deprecated. +- DeprecatedVersion string +- +- // InstallGoplsVersion is the latest gopls version that supports this Go +- // version without warnings. +- InstallGoplsVersion string +-} +- +-// Supported maps Go versions to the gopls version in which support will +-// be deprecated, and the final gopls version supporting them without warnings. +-// Keep this in sync with gopls/doc/index.md. +-// +-// Must be sorted in ascending order of Go version. +-// +-// Exposed (and mutable) for testing. +-var Supported = []Support{ +- {12, "", "v0.7.5"}, +- {15, "", "v0.9.5"}, +- {16, "", "v0.11.0"}, +- {17, "", "v0.11.0"}, +- {18, "", "v0.14.2"}, +- {19, "v0.17.0", "v0.15.3"}, +- {20, "v0.17.0", "v0.15.3"}, +-} +- +-// OldestSupported is the last X in Go 1.X that this version of gopls +-// supports without warnings. +-// +-// Exported for testing. +-func OldestSupported() int { +- return Supported[len(Supported)-1].GoVersion + 1 +-} +- +-// Message returns the message to display if the user has the given Go +-// version, if any. The goVersion variable is the X in Go 1.X. If +-// fromBuild is set, the Go version is the version used to build +-// gopls. Otherwise, it is the go command version. +-// +-// The second component of the result indicates whether the message is +-// an error, not a mere warning. +-// +-// If goVersion is invalid (< 0), it returns "", false. +-func Message(goVersion int, fromBuild bool) (string, bool) { +- if goVersion < 0 { +- return "", false +- } +- +- for _, v := range Supported { +- if goVersion <= v.GoVersion { +- var msgBuilder strings.Builder +- +- isError := true +- if fromBuild { +- fmt.Fprintf(&msgBuilder, "Gopls was built with Go version 1.%d", goVersion) +- } else { +- fmt.Fprintf(&msgBuilder, "Found Go version 1.%d", goVersion) +- } +- if v.DeprecatedVersion != "" { +- // not deprecated yet, just a warning +- fmt.Fprintf(&msgBuilder, ", which will be unsupported by gopls %s. ", v.DeprecatedVersion) +- isError = false // warning +- } else { +- fmt.Fprint(&msgBuilder, ", which is not supported by this version of gopls. ") +- } +- fmt.Fprintf(&msgBuilder, "Please upgrade to Go 1.%d or later and reinstall gopls. ", OldestSupported()) +- fmt.Fprintf(&msgBuilder, "If you can't upgrade and want this message to go away, please install gopls %s. ", v.InstallGoplsVersion) +- fmt.Fprint(&msgBuilder, "See https://go.dev/s/gopls-support-policy for more details.") +- +- return msgBuilder.String(), isError +- } +- } +- return "", false +-} +diff -urN a/gopls/internal/util/goversion/goversion_test.go b/gopls/internal/util/goversion/goversion_test.go +--- a/gopls/internal/util/goversion/goversion_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/goversion/goversion_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,74 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package goversion_test +- +-import ( +- "fmt" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/util/goversion" +-) +- +-func TestMessage(t *testing.T) { +- // Note(rfindley): this test is a change detector, as it must be updated +- // whenever we deprecate a version. +- // +- // However, I chose to leave it as is since it gives us confidence in error +- // messages served for Go versions that we no longer support (and therefore +- // no longer run in CI). +- type test struct { +- goVersion int +- fromBuild bool +- wantContains []string // string fragments that we expect to see +- wantIsError bool // an error, not a mere warning +- } +- +- deprecated := func(goVersion int, lastVersion string) test { +- return test{ +- goVersion: goVersion, +- fromBuild: false, +- wantContains: []string{ +- fmt.Sprintf("Found Go version 1.%d", goVersion), +- "not supported", +- fmt.Sprintf("upgrade to Go 1.%d", goversion.OldestSupported()), +- fmt.Sprintf("install gopls %s", lastVersion), +- }, +- wantIsError: true, +- } +- } +- +- tests := []test{ +- {-1, false, nil, false}, +- deprecated(12, "v0.7.5"), +- deprecated(13, "v0.9.5"), +- deprecated(15, "v0.9.5"), +- deprecated(16, "v0.11.0"), +- deprecated(17, "v0.11.0"), +- deprecated(18, "v0.14.2"), +- {19, false, []string{"Found Go version 1.19", "unsupported by gopls v0.17.0", "upgrade to Go 1.21", "install gopls v0.15.3"}, false}, +- {19, true, []string{"Gopls was built with Go version 1.19", "unsupported by gopls v0.17.0", "upgrade to Go 1.21", "install gopls v0.15.3"}, false}, +- {20, false, []string{"Found Go version 1.20", "unsupported by gopls v0.17.0", "upgrade to Go 1.21", "install gopls v0.15.3"}, false}, +- {20, true, []string{"Gopls was built with Go version 1.20", "unsupported by gopls v0.17.0", "upgrade to Go 1.21", "install gopls v0.15.3"}, false}, +- } +- +- for _, test := range tests { +- gotMsg, gotIsError := goversion.Message(test.goVersion, test.fromBuild) +- +- if len(test.wantContains) == 0 && gotMsg != "" { +- t.Errorf("versionMessage(%d) = %q, want \"\"", test.goVersion, gotMsg) +- } +- +- for _, want := range test.wantContains { +- if !strings.Contains(gotMsg, want) { +- t.Errorf("versionMessage(%d) = %q, want containing %q", test.goVersion, gotMsg, want) +- } +- } +- +- if gotIsError != test.wantIsError { +- t.Errorf("versionMessage(%d) isError = %v, want %v", test.goVersion, gotIsError, test.wantIsError) +- } +- } +-} +diff -urN a/gopls/internal/util/immutable/immutable.go b/gopls/internal/util/immutable/immutable.go +--- a/gopls/internal/util/immutable/immutable.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/immutable/immutable.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,44 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The immutable package defines immutable wrappers around common data +-// structures. These are used for additional type safety inside gopls. +-// +-// See the "persistent" package for copy-on-write data structures. +-package immutable +- +-import ( +- "iter" +- "maps" +-) +- +-// Map is an immutable wrapper around an ordinary Go map. +-type Map[K comparable, V any] struct { +- m map[K]V +-} +- +-// MapOf wraps the given Go map. +-// +-// The caller must not subsequently mutate the map. +-func MapOf[K comparable, V any](m map[K]V) Map[K, V] { +- return Map[K, V]{m} +-} +- +-// Value returns the mapped value for k. +-// It is equivalent to the commaok form of an ordinary go map, and returns +-// (zero, false) if the key is not present. +-func (m Map[K, V]) Value(k K) (V, bool) { +- v, ok := m.m[k] +- return v, ok +-} +- +-// Len returns the number of entries in the Map. +-func (m Map[K, V]) Len() int { +- return len(m.m) +-} +- +-// All returns an iterator over each mapped (key, value) pair. +-func (m Map[K, V]) All() iter.Seq2[K, V] { +- return maps.All(m.m) +-} +diff -urN a/gopls/internal/util/lru/lru_fuzz_test.go b/gopls/internal/util/lru/lru_fuzz_test.go +--- a/gopls/internal/util/lru/lru_fuzz_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/lru/lru_fuzz_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,38 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lru_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/util/lru" +-) +- +-// Simple fuzzing test for consistency. +-func FuzzCache(f *testing.F) { +- type op struct { +- set bool +- key, value byte +- } +- f.Fuzz(func(t *testing.T, data []byte) { +- var ops []op +- for len(data) >= 3 { +- ops = append(ops, op{data[0]%2 == 0, data[1], data[2]}) +- data = data[3:] +- } +- cache := lru.New[byte, byte](100) +- var reference [256]byte +- for _, op := range ops { +- if op.set { +- reference[op.key] = op.value +- cache.Set(op.key, op.value, 1) +- } else { +- if v, ok := cache.Get(op.key); ok && v != reference[op.key] { +- t.Fatalf("cache.Get(%d) = %d, want %d", op.key, v, reference[op.key]) +- } +- } +- } +- }) +-} +diff -urN a/gopls/internal/util/lru/lru.go b/gopls/internal/util/lru/lru.go +--- a/gopls/internal/util/lru/lru.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/lru/lru.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,179 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The lru package implements a fixed-size in-memory LRU cache. +-package lru +- +-import ( +- "container/heap" +- "fmt" +- "sync" +-) +- +-// A Cache is a fixed-size in-memory LRU cache, storing values of type V keyed +-// by keys of type K. +-type Cache[K comparable, V any] struct { +- impl *cache +-} +- +-// Get retrieves the value for the specified key. +-// If the key is found, its access time is updated. +-// +-// The second result reports whether the key was found. +-func (c *Cache[K, V]) Get(key K) (V, bool) { +- v, ok := c.impl.get(key) +- if !ok { +- var zero V +- return zero, false +- } +- // Handle untyped nil explicitly to avoid a panic in the type assertion +- // below. +- if v == nil { +- var zero V +- return zero, true +- } +- return v.(V), true +-} +- +-// Set stores a value for the specified key, using its given size to update the +-// current cache size, evicting old entries as necessary to fit in the cache +-// capacity. +-// +-// Size must be a non-negative value. If size is larger than the cache +-// capacity, the value is not stored and the cache is not modified. +-func (c *Cache[K, V]) Set(key K, value V, size int) { +- c.impl.set(key, value, size) +-} +- +-// New creates a new Cache with the given capacity, which must be positive. +-// +-// The cache capacity uses arbitrary units, which are specified during the Set +-// operation. +-func New[K comparable, V any](capacity int) *Cache[K, V] { +- if capacity == 0 { +- panic("zero capacity") +- } +- +- return &Cache[K, V]{&cache{ +- capacity: capacity, +- m: make(map[any]*entry), +- }} +-} +- +-// cache is the non-generic implementation of [Cache]. +-// +-// (Using a generic wrapper around a non-generic impl avoids unnecessary +-// "stenciling" or code duplication.) +-type cache struct { +- capacity int +- +- mu sync.Mutex +- used int // used capacity, in user-specified units +- m map[any]*entry // k/v lookup +- lru queue // min-atime priority queue of *entry +- clock int64 // clock time, incremented whenever the cache is updated +-} +- +-type entry struct { +- key any +- value any +- size int // caller-specified size +- atime int64 // last access / set time +- index int // index of entry in the heap slice +-} +- +-func (c *cache) get(key any) (any, bool) { +- c.mu.Lock() +- defer c.mu.Unlock() +- +- c.clock++ // every access updates the clock +- +- if e, ok := c.m[key]; ok { // cache hit +- e.atime = c.clock +- heap.Fix(&c.lru, e.index) +- return e.value, true +- } +- +- return nil, false +-} +- +-func (c *cache) set(key, value any, size int) { +- if size < 0 { +- panic(fmt.Sprintf("size must be non-negative, got %d", size)) +- } +- if size > c.capacity { +- return // uncacheable +- } +- +- c.mu.Lock() +- defer c.mu.Unlock() +- +- c.clock++ +- +- // Remove the existing cache entry for key, if it exists. +- e, ok := c.m[key] +- if ok { +- c.used -= e.size +- heap.Remove(&c.lru, e.index) +- delete(c.m, key) +- } +- +- // Evict entries until the new value will fit. +- newUsed := c.used + size +- if newUsed < 0 { +- return // integer overflow; return silently +- } +- c.used = newUsed +- for c.used > c.capacity { +- // evict oldest entry +- e = heap.Pop(&c.lru).(*entry) +- c.used -= e.size +- delete(c.m, e.key) +- } +- +- // Store the new value. +- // Opt: e is evicted, so it can be reused to reduce allocation. +- if e == nil { +- e = new(entry) +- } +- e.key = key +- e.value = value +- e.size = size +- e.atime = c.clock +- c.m[e.key] = e +- heap.Push(&c.lru, e) +- +- if len(c.m) != len(c.lru) { +- panic("map and LRU are inconsistent") +- } +-} +- +-// -- priority queue boilerplate -- +- +-// queue is a min-atime priority queue of cache entries. +-type queue []*entry +- +-func (q queue) Len() int { return len(q) } +- +-func (q queue) Less(i, j int) bool { return q[i].atime < q[j].atime } +- +-func (q queue) Swap(i, j int) { +- q[i], q[j] = q[j], q[i] +- q[i].index = i +- q[j].index = j +-} +- +-func (q *queue) Push(x any) { +- e := x.(*entry) +- e.index = len(*q) +- *q = append(*q, e) +-} +- +-func (q *queue) Pop() any { +- last := len(*q) - 1 +- e := (*q)[last] +- (*q)[last] = nil // aid GC +- *q = (*q)[:last] +- return e +-} +diff -urN a/gopls/internal/util/lru/lru_nil_test.go b/gopls/internal/util/lru/lru_nil_test.go +--- a/gopls/internal/util/lru/lru_nil_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/lru/lru_nil_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,19 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lru_test +- +-import ( +- "testing" +- +- "golang.org/x/tools/gopls/internal/util/lru" +-) +- +-func TestSetUntypedNil(t *testing.T) { +- cache := lru.New[any, any](100 * 1e6) +- cache.Set(nil, nil, 1) +- if got, ok := cache.Get(nil); !ok || got != nil { +- t.Errorf("cache.Get(nil) = %v, %v, want nil, true", got, ok) +- } +-} +diff -urN a/gopls/internal/util/lru/lru_test.go b/gopls/internal/util/lru/lru_test.go +--- a/gopls/internal/util/lru/lru_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/lru/lru_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,152 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package lru_test +- +-import ( +- "bytes" +- cryptorand "crypto/rand" +- "fmt" +- "log" +- mathrand "math/rand" +- "strings" +- "testing" +- +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/gopls/internal/util/lru" +-) +- +-func TestCache(t *testing.T) { +- type get struct { +- key string +- want string +- } +- type set struct { +- key, value string +- } +- +- tests := []struct { +- label string +- steps []any +- }{ +- {"empty cache", []any{ +- get{"a", ""}, +- get{"b", ""}, +- }}, +- {"zero-length string", []any{ +- set{"a", ""}, +- get{"a", ""}, +- }}, +- {"under capacity", []any{ +- set{"a", "123"}, +- set{"b", "456"}, +- get{"a", "123"}, +- get{"b", "456"}, +- }}, +- {"over capacity", []any{ +- set{"a", "123"}, +- set{"b", "456"}, +- set{"c", "78901"}, +- get{"a", ""}, +- get{"b", "456"}, +- get{"c", "78901"}, +- }}, +- {"access ordering", []any{ +- set{"a", "123"}, +- set{"b", "456"}, +- get{"a", "123"}, +- set{"c", "78901"}, +- get{"a", "123"}, +- get{"b", ""}, +- get{"c", "78901"}, +- }}, +- } +- +- for _, test := range tests { +- t.Run(test.label, func(t *testing.T) { +- c := lru.New[string, string](10) +- for i, step := range test.steps { +- switch step := step.(type) { +- case get: +- if got, _ := c.Get(step.key); got != step.want { +- t.Errorf("#%d: c.Get(%q) = %q, want %q", i, step.key, got, step.want) +- } +- case set: +- c.Set(step.key, step.value, len(step.value)) +- } +- } +- }) +- } +-} +- +-// TestConcurrency exercises concurrent access to the same entry. +-// +-// It is a copy of TestConcurrency from the filecache package. +-func TestConcurrency(t *testing.T) { +- key := uniqueKey() +- const N = 100 // concurrency level +- +- // Construct N distinct values, each larger +- // than a typical 4KB OS file buffer page. +- var values [N][8192]byte +- for i := range values { +- if _, err := mathrand.Read(values[i][:]); err != nil { +- t.Fatalf("rand: %v", err) +- } +- } +- +- cache := lru.New[[32]byte, []byte](100 * 1e6) // 100MB cache +- +- // get calls Get and verifies that the cache entry +- // matches one of the values passed to Set. +- get := func(mustBeFound bool) error { +- got, ok := cache.Get(key) +- if !ok { +- if !mustBeFound { +- return nil +- } +- return fmt.Errorf("Get did not return a value") +- } +- for _, want := range values { +- if bytes.Equal(want[:], got) { +- return nil // a match +- } +- } +- return fmt.Errorf("Get returned a value that was never Set") +- } +- +- // Perform N concurrent calls to Set and Get. +- // All sets must succeed. +- // All gets must return nothing, or one of the Set values; +- // there is no third possibility. +- var group errgroup.Group +- for i := range values { +- v := values[i][:] +- group.Go(func() error { +- cache.Set(key, v, len(v)) +- return nil +- }) +- group.Go(func() error { return get(false) }) +- } +- if err := group.Wait(); err != nil { +- if strings.Contains(err.Error(), "operation not supported") || +- strings.Contains(err.Error(), "not implemented") { +- t.Skipf("skipping: %v", err) +- } +- t.Fatal(err) +- } +- +- // A final Get must report one of the values that was Set. +- if err := get(true); err != nil { +- t.Fatalf("final Get failed: %v", err) +- } +-} +- +-// uniqueKey returns a key that has never been used before. +-func uniqueKey() (key [32]byte) { +- if _, err := cryptorand.Read(key[:]); err != nil { +- log.Fatalf("rand: %v", err) +- } +- return +-} +diff -urN a/gopls/internal/util/memoize/memoize.go b/gopls/internal/util/memoize/memoize.go +--- a/gopls/internal/util/memoize/memoize.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/memoize/memoize.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,335 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package memoize defines a "promise" abstraction that enables +-// memoization of the result of calling an expensive but idempotent +-// function. +-// +-// Call p = NewPromise(f) to obtain a promise for the future result of +-// calling f(), and call p.Get() to obtain that result. All calls to +-// p.Get return the result of a single call of f(). +-// Get blocks if the function has not finished (or started). +-// +-// A Store is a map of arbitrary keys to promises. Use Store.Promise +-// to create a promise in the store. All calls to Handle(k) return the +-// same promise as long as it is in the store. These promises are +-// reference-counted and must be explicitly released. Once the last +-// reference is released, the promise is removed from the store. +-package memoize +- +-import ( +- "context" +- "fmt" +- "reflect" +- "runtime/trace" +- "sync" +- "sync/atomic" +- +- "golang.org/x/tools/internal/xcontext" +-) +- +-// Function is the type of a function that can be memoized. +-// +-// If the arg is a RefCounted, its Acquire/Release operations are called. +-// +-// The argument must not materially affect the result of the function +-// in ways that are not captured by the promise's key, since if +-// Promise.Get is called twice concurrently, with the same (implicit) +-// key but different arguments, the Function is called only once but +-// its result must be suitable for both callers. +-// +-// The main purpose of the argument is to avoid the Function closure +-// needing to retain large objects (in practice: the snapshot) in +-// memory that can be supplied at call time by any caller. +-type Function func(ctx context.Context, arg any) any +- +-// A RefCounted is a value whose functional lifetime is determined by +-// reference counting. +-// +-// Its Acquire method is called before the Function is invoked, and +-// the corresponding release is called when the Function returns. +-// Usually both events happen within a single call to Get, so Get +-// would be fine with a "borrowed" reference, but if the context is +-// cancelled, Get may return before the Function is complete, causing +-// the argument to escape, and potential premature destruction of the +-// value. For a reference-counted type, this requires a pair of +-// increment/decrement operations to extend its life. +-type RefCounted interface { +- // Acquire prevents the value from being destroyed until the +- // returned function is called. +- Acquire() func() +-} +- +-// A Promise represents the future result of a call to a function. +-type Promise struct { +- debug string // for observability +- +- // refcount is the reference count in the containing Store, used by +- // Store.Promise. It is guarded by Store.promisesMu on the containing Store. +- refcount int32 +- +- mu sync.Mutex +- +- // A Promise starts out IDLE, waiting for something to demand +- // its evaluation. It then transitions into RUNNING state. +- // +- // While RUNNING, waiters tracks the number of Get calls +- // waiting for a result, and the done channel is used to +- // notify waiters of the next state transition. Once +- // evaluation finishes, value is set, state changes to +- // COMPLETED, and done is closed, unblocking waiters. +- // +- // Alternatively, as Get calls are cancelled, they decrement +- // waiters. If it drops to zero, the inner context is +- // cancelled, computation is abandoned, and state resets to +- // IDLE to start the process over again. +- state state +- // done is set in running state, and closed when exiting it. +- done chan struct{} +- // cancel is set in running state. It cancels computation. +- cancel context.CancelFunc +- // waiters is the number of Gets outstanding. +- waiters uint +- // the function that will be used to populate the value +- function Function +- // value is set in completed state. +- value any +-} +- +-// NewPromise returns a promise for the future result of calling the +-// specified function. +-// +-// The debug string is used to classify promises in logs and metrics. +-// It should be drawn from a small set. +-func NewPromise(debug string, function Function) *Promise { +- if function == nil { +- panic("nil function") +- } +- return &Promise{ +- debug: debug, +- function: function, +- } +-} +- +-type state int +- +-const ( +- stateIdle = iota // newly constructed, or last waiter was cancelled +- stateRunning // start was called and not cancelled +- stateCompleted // function call ran to completion +-) +- +-// Cached returns the value associated with a promise. +-// +-// It will never cause the value to be generated. +-// It will return the cached value, if present. +-func (p *Promise) Cached() any { +- p.mu.Lock() +- defer p.mu.Unlock() +- if p.state == stateCompleted { +- return p.value +- } +- return nil +-} +- +-// Get returns the value associated with a promise. +-// +-// All calls to Promise.Get on a given promise return the +-// same result but the function is called (to completion) at most once. +-// +-// If the value is not yet ready, the underlying function will be invoked. +-// +-// If ctx is cancelled, Get returns (nil, Canceled). +-// If all concurrent calls to Get are cancelled, the context provided +-// to the function is cancelled. A later call to Get may attempt to +-// call the function again. +-func (p *Promise) Get(ctx context.Context, arg any) (any, error) { +- if ctx.Err() != nil { +- return nil, ctx.Err() +- } +- p.mu.Lock() +- switch p.state { +- case stateIdle: +- return p.run(ctx, arg) +- case stateRunning: +- return p.wait(ctx) +- case stateCompleted: +- defer p.mu.Unlock() +- return p.value, nil +- default: +- panic("unknown state") +- } +-} +- +-// run starts p.function and returns the result. p.mu must be locked. +-func (p *Promise) run(ctx context.Context, arg any) (any, error) { +- childCtx, cancel := context.WithCancel(xcontext.Detach(ctx)) +- p.cancel = cancel +- p.state = stateRunning +- p.done = make(chan struct{}) +- function := p.function // Read under the lock +- +- // Make sure that the argument isn't destroyed while we're running in it. +- release := func() {} +- if rc, ok := arg.(RefCounted); ok { +- release = rc.Acquire() +- } +- +- go func() { +- trace.WithRegion(childCtx, fmt.Sprintf("Promise.run %s", p.debug), func() { +- defer release() +- // Just in case the function does something expensive without checking +- // the context, double-check we're still alive. +- if childCtx.Err() != nil { +- return +- } +- v := function(childCtx, arg) +- if childCtx.Err() != nil { +- return +- } +- +- p.mu.Lock() +- defer p.mu.Unlock() +- // It's theoretically possible that the promise has been cancelled out +- // of the run that started us, and then started running again since we +- // checked childCtx above. Even so, that should be harmless, since each +- // run should produce the same results. +- if p.state != stateRunning { +- return +- } +- +- p.value = v +- p.function = nil // aid GC +- p.state = stateCompleted +- close(p.done) +- }) +- }() +- +- return p.wait(ctx) +-} +- +-// wait waits for the value to be computed, or ctx to be cancelled. p.mu must be locked. +-func (p *Promise) wait(ctx context.Context) (any, error) { +- p.waiters++ +- done := p.done +- p.mu.Unlock() +- +- select { +- case <-done: +- p.mu.Lock() +- defer p.mu.Unlock() +- if p.state == stateCompleted { +- return p.value, nil +- } +- return nil, nil +- case <-ctx.Done(): +- p.mu.Lock() +- defer p.mu.Unlock() +- p.waiters-- +- if p.waiters == 0 && p.state == stateRunning { +- p.cancel() +- close(p.done) +- p.state = stateIdle +- p.done = nil +- p.cancel = nil +- } +- return nil, ctx.Err() +- } +-} +- +-// An EvictionPolicy controls the eviction behavior of keys in a Store when +-// they no longer have any references. +-type EvictionPolicy int +- +-const ( +- // ImmediatelyEvict evicts keys as soon as they no longer have references. +- ImmediatelyEvict EvictionPolicy = iota +- +- // NeverEvict does not evict keys. +- NeverEvict +-) +- +-// A Store maps arbitrary keys to reference-counted promises. +-// +-// The zero value is a valid Store, though a store may also be created via +-// NewStore if a custom EvictionPolicy is required. +-type Store struct { +- evictionPolicy EvictionPolicy +- +- promisesMu sync.Mutex +- promises map[any]*Promise +-} +- +-// NewStore creates a new store with the given eviction policy. +-func NewStore(policy EvictionPolicy) *Store { +- return &Store{evictionPolicy: policy} +-} +- +-// Promise returns a reference-counted promise for the future result of +-// calling the specified function. +-// +-// Calls to Promise with the same key return the same promise, incrementing its +-// reference count. The caller must call the returned function to decrement +-// the promise's reference count when it is no longer needed. The returned +-// function must not be called more than once. +-// +-// Once the last reference has been released, the promise is removed from the +-// store. +-func (store *Store) Promise(key any, function Function) (*Promise, func()) { +- store.promisesMu.Lock() +- p, ok := store.promises[key] +- if !ok { +- p = NewPromise(reflect.TypeOf(key).String(), function) +- if store.promises == nil { +- store.promises = map[any]*Promise{} +- } +- store.promises[key] = p +- } +- p.refcount++ +- store.promisesMu.Unlock() +- +- var released int32 +- release := func() { +- if !atomic.CompareAndSwapInt32(&released, 0, 1) { +- panic("release called more than once") +- } +- store.promisesMu.Lock() +- +- p.refcount-- +- if p.refcount == 0 && store.evictionPolicy != NeverEvict { +- // Inv: if p.refcount > 0, then store.promises[key] == p. +- delete(store.promises, key) +- } +- store.promisesMu.Unlock() +- } +- +- return p, release +-} +- +-// Stats returns the number of each type of key in the store. +-func (s *Store) Stats() map[reflect.Type]int { +- result := map[reflect.Type]int{} +- +- s.promisesMu.Lock() +- defer s.promisesMu.Unlock() +- +- for k := range s.promises { +- result[reflect.TypeOf(k)]++ +- } +- return result +-} +- +-// DebugOnlyIterate iterates through the store and, for each completed +-// promise, calls f(k, v) for the map key k and function result v. It +-// should only be used for debugging purposes. +-func (s *Store) DebugOnlyIterate(f func(k, v any)) { +- s.promisesMu.Lock() +- defer s.promisesMu.Unlock() +- +- for k, p := range s.promises { +- if v := p.Cached(); v != nil { +- f(k, v) +- } +- } +-} +diff -urN a/gopls/internal/util/memoize/memoize_test.go b/gopls/internal/util/memoize/memoize_test.go +--- a/gopls/internal/util/memoize/memoize_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/memoize/memoize_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,166 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package memoize_test +- +-import ( +- "context" +- "sync" +- "testing" +- "time" +- +- "golang.org/x/tools/gopls/internal/util/memoize" +-) +- +-func TestGet(t *testing.T) { +- var store memoize.Store +- +- evaled := 0 +- +- h, release := store.Promise("key", func(context.Context, any) any { +- evaled++ +- return "res" +- }) +- defer release() +- expectGet(t, h, "res") +- expectGet(t, h, "res") +- if evaled != 1 { +- t.Errorf("got %v calls to function, wanted 1", evaled) +- } +-} +- +-func expectGet(t *testing.T, h *memoize.Promise, wantV any) { +- t.Helper() +- gotV, gotErr := h.Get(context.Background(), nil) +- if gotV != wantV || gotErr != nil { +- t.Fatalf("Get() = %v, %v, wanted %v, nil", gotV, gotErr, wantV) +- } +-} +- +-func TestNewPromise(t *testing.T) { +- calls := 0 +- f := func(context.Context, any) any { +- calls++ +- return calls +- } +- +- // All calls to Get on the same promise return the same result. +- p1 := memoize.NewPromise("debug", f) +- expectGet(t, p1, 1) +- expectGet(t, p1, 1) +- +- // A new promise calls the function again. +- p2 := memoize.NewPromise("debug", f) +- expectGet(t, p2, 2) +- expectGet(t, p2, 2) +- +- // The original promise is unchanged. +- expectGet(t, p1, 1) +-} +- +-func TestStoredPromiseRefCounting(t *testing.T) { +- var store memoize.Store +- v1 := false +- v2 := false +- p1, release1 := store.Promise("key1", func(context.Context, any) any { +- return &v1 +- }) +- p2, release2 := store.Promise("key2", func(context.Context, any) any { +- return &v2 +- }) +- expectGet(t, p1, &v1) +- expectGet(t, p2, &v2) +- +- expectGet(t, p1, &v1) +- expectGet(t, p2, &v2) +- +- p2Copy, release2Copy := store.Promise("key2", func(context.Context, any) any { +- return &v1 +- }) +- if p2 != p2Copy { +- t.Error("Promise returned a new value while old is not destroyed yet") +- } +- expectGet(t, p2Copy, &v2) +- +- release2() +- if got, want := v2, false; got != want { +- t.Errorf("after destroying first v2 ref, got %v, want %v", got, want) +- } +- release2Copy() +- if got, want := v1, false; got != want { +- t.Errorf("after destroying v2, got %v, want %v", got, want) +- } +- release1() +- +- p2Copy, release2Copy = store.Promise("key2", func(context.Context, any) any { +- return &v2 +- }) +- if p2 == p2Copy { +- t.Error("Promise returned previously destroyed value") +- } +- release2Copy() +-} +- +-func TestPromiseDestroyedWhileRunning(t *testing.T) { +- // Test that calls to Promise.Get return even if the promise is destroyed while running. +- +- var store memoize.Store +- c := make(chan int) +- +- var v int +- h, release := store.Promise("key", func(ctx context.Context, _ any) any { +- <-c +- <-c +- if err := ctx.Err(); err != nil { +- t.Errorf("ctx.Err() = %v, want nil", err) +- } +- return &v +- }) +- +- ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) // arbitrary timeout; may be removed if it causes flakes +- defer cancel() +- +- var wg sync.WaitGroup +- wg.Add(1) +- var got any +- var err error +- go func() { +- got, err = h.Get(ctx, nil) +- wg.Done() +- }() +- +- c <- 0 // send once to enter the promise function +- release() // release before the promise function returns +- c <- 0 // let the promise function proceed +- +- wg.Wait() +- +- if err != nil { +- t.Errorf("Get() failed: %v", err) +- } +- if got != &v { +- t.Errorf("Get() = %v, want %v", got, v) +- } +-} +- +-func TestDoubleReleasePanics(t *testing.T) { +- var store memoize.Store +- _, release := store.Promise("key", func(ctx context.Context, _ any) any { return 0 }) +- +- panicked := false +- +- func() { +- defer func() { +- if recover() != nil { +- panicked = true +- } +- }() +- release() +- release() +- }() +- +- if !panicked { +- t.Errorf("calling release() twice did not panic") +- } +-} +diff -urN a/gopls/internal/util/moremaps/maps.go b/gopls/internal/util/moremaps/maps.go +--- a/gopls/internal/util/moremaps/maps.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/moremaps/maps.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,84 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package moremaps +- +-import ( +- "cmp" +- "iter" +- "maps" +- "slices" +-) +- +-// Arbitrary returns an arbitrary (key, value) entry from the map and ok is true, if +-// the map is not empty. Otherwise, it returns zero values for K and V, and false. +-func Arbitrary[K comparable, V any](m map[K]V) (_ K, _ V, ok bool) { +- for k, v := range m { +- return k, v, true +- } +- return +-} +- +-// Group returns a new non-nil map containing the elements of s grouped by the +-// keys returned from the key func. +-func Group[K comparable, V any](s []V, key func(V) K) map[K][]V { +- m := make(map[K][]V) +- for _, v := range s { +- k := key(v) +- m[k] = append(m[k], v) +- } +- return m +-} +- +-// KeySlice returns the keys of the map M, like slices.Collect(maps.Keys(m)). +-func KeySlice[M ~map[K]V, K comparable, V any](m M) []K { +- r := make([]K, 0, len(m)) +- for k := range m { +- r = append(r, k) +- } +- return r +-} +- +-// ValueSlice returns the values of the map M, like slices.Collect(maps.Values(m)). +-func ValueSlice[M ~map[K]V, K comparable, V any](m M) []V { +- r := make([]V, 0, len(m)) +- for _, v := range m { +- r = append(r, v) +- } +- return r +-} +- +-// SameKeys reports whether x and y have equal sets of keys. +-func SameKeys[K comparable, V1, V2 any](x map[K]V1, y map[K]V2) bool { +- ignoreValues := func(V1, V2) bool { return true } +- return maps.EqualFunc(x, y, ignoreValues) +-} +- +-// Sorted returns an iterator over the entries of m in key order. +-func Sorted[M ~map[K]V, K cmp.Ordered, V any](m M) iter.Seq2[K, V] { +- // TODO(adonovan): use maps.Sorted if proposal #68598 is accepted. +- return func(yield func(K, V) bool) { +- keys := KeySlice(m) +- slices.Sort(keys) +- for _, k := range keys { +- if !yield(k, m[k]) { +- break +- } +- } +- } +-} +- +-// SortedFunc returns an iterator over the entries of m in the key order determined by cmp. +-func SortedFunc[M ~map[K]V, K comparable, V any](m M, cmp func(x, y K) int) iter.Seq2[K, V] { +- // TODO(adonovan): use maps.SortedFunc if proposal #68598 is accepted. +- return func(yield func(K, V) bool) { +- keys := KeySlice(m) +- slices.SortFunc(keys, cmp) +- for _, k := range keys { +- if !yield(k, m[k]) { +- break +- } +- } +- } +-} +diff -urN a/gopls/internal/util/moreslices/slices.go b/gopls/internal/util/moreslices/slices.go +--- a/gopls/internal/util/moreslices/slices.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/moreslices/slices.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package moreslices +- +-// Remove removes all values equal to elem from slice. +-// +-// The closest equivalent in the standard slices package is: +-// +-// DeleteFunc(func(x T) bool { return x == elem }) +-func Remove[T comparable](slice []T, elem T) []T { +- out := slice[:0] +- for _, v := range slice { +- if v != elem { +- out = append(out, v) +- } +- } +- return out +-} +- +-// ConvertStrings converts a slice of type A (with underlying type string) +-// to a slice of type B (with underlying type string). +-func ConvertStrings[B, A ~string](input []A) []B { +- result := make([]B, len(input)) +- for i, v := range input { +- result[i] = B(string(v)) +- } +- return result +-} +diff -urN a/gopls/internal/util/morestrings/strings.go b/gopls/internal/util/morestrings/strings.go +--- a/gopls/internal/util/morestrings/strings.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/morestrings/strings.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package morestrings +- +-import "strings" +- +-// CutLast is the "last" analogue of [strings.Cut]. +-func CutLast(s, sep string) (before, after string, ok bool) { +- if i := strings.LastIndex(s, sep); i >= 0 { +- return s[:i], s[i+len(sep):], true +- } +- return s, "", false +-} +diff -urN a/gopls/internal/util/pathutil/util.go b/gopls/internal/util/pathutil/util.go +--- a/gopls/internal/util/pathutil/util.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/pathutil/util.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,49 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package pathutil +- +-import ( +- "path/filepath" +- "strings" +-) +- +-// InDir checks whether path is in the file tree rooted at dir. +-// It checks only the lexical form of the file names. +-// It does not consider symbolic links. +-// +-// Copied from go/src/cmd/go/internal/search/search.go. +-func InDir(dir, path string) bool { +- pv := strings.ToUpper(filepath.VolumeName(path)) +- dv := strings.ToUpper(filepath.VolumeName(dir)) +- path = path[len(pv):] +- dir = dir[len(dv):] +- switch { +- default: +- return false +- case pv != dv: +- return false +- case len(path) == len(dir): +- if path == dir { +- return true +- } +- return false +- case dir == "": +- return path != "" +- case len(path) > len(dir): +- if dir[len(dir)-1] == filepath.Separator { +- if path[:len(dir)] == dir { +- return path[len(dir):] != "" +- } +- return false +- } +- if path[len(dir)] == filepath.Separator && path[:len(dir)] == dir { +- if len(path) == len(dir)+1 { +- return true +- } +- return path[len(dir)+1:] != "" +- } +- return false +- } +-} +diff -urN a/gopls/internal/util/persistent/map.go b/gopls/internal/util/persistent/map.go +--- a/gopls/internal/util/persistent/map.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/persistent/map.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,328 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// The persistent package defines various persistent data structures; +-// that is, data structures that can be efficiently copied and modified +-// in sublinear time. +-package persistent +- +-import ( +- "fmt" +- "iter" +- "math/rand" +- "strings" +- "sync/atomic" +- +- "golang.org/x/tools/gopls/internal/util/constraints" +-) +- +-// Implementation details: +-// * Each value is reference counted by nodes which hold it. +-// * Each node is reference counted by its parent nodes. +-// * Each map is considered a top-level parent node from reference counting perspective. +-// * Each change does always effectively produce a new top level node. +-// +-// Functions which operate directly with nodes do have a notation in form of +-// `foo(arg1:+n1, arg2:+n2) (ret1:+n3)`. +-// Each argument is followed by a delta change to its reference counter. +-// In case if no change is expected, the delta will be `-0`. +-// +-// TODO(rfindley): add Update(K, func(V, bool) V), as we have several instances +-// of the Get-<check>-Set pattern that could be optimized. +- +-// Map is an associative mapping from keys to values. +-// +-// Maps can be Cloned in constant time. +-// Get, Set, and Delete operations are done on average in logarithmic time. +-// Maps can be merged (via SetAll) in O(m log(n/m)) time for maps of size n and m, where m < n. +-// +-// Values are reference counted, and a client-supplied release function +-// is called when a value is no longer referenced by a map or any clone. +-// +-// Internally the implementation is based on a randomized persistent treap: +-// https://en.wikipedia.org/wiki/Treap. +-// +-// The zero value is ready to use. +-type Map[K constraints.Ordered, V any] struct { +- // Map is a generic wrapper around a non-generic implementation to avoid a +- // significant increase in the size of the executable. +- root *mapNode +-} +- +-func (*Map[K, V]) less(l, r any) bool { +- return l.(K) < r.(K) +-} +- +-func (m *Map[K, V]) String() string { +- var buf strings.Builder +- buf.WriteByte('{') +- var sep string +- for k, v := range m.All() { +- fmt.Fprintf(&buf, "%s%v: %v", sep, k, v) +- sep = ", " +- } +- buf.WriteByte('}') +- return buf.String() +-} +- +-type mapNode struct { +- key any +- value *refValue +- weight uint64 +- refCount int32 +- left, right *mapNode +-} +- +-type refValue struct { +- refCount int32 +- value any +- release func(key, value any) +-} +- +-func newNodeWithRef[K constraints.Ordered, V any](key K, value V, release func(key, value any)) *mapNode { +- return &mapNode{ +- key: key, +- value: &refValue{ +- value: value, +- release: release, +- refCount: 1, +- }, +- refCount: 1, +- weight: rand.Uint64(), +- } +-} +- +-func (node *mapNode) shallowCloneWithRef() *mapNode { +- atomic.AddInt32(&node.value.refCount, 1) +- return &mapNode{ +- key: node.key, +- value: node.value, +- weight: node.weight, +- refCount: 1, +- } +-} +- +-func (node *mapNode) incref() *mapNode { +- if node != nil { +- atomic.AddInt32(&node.refCount, 1) +- } +- return node +-} +- +-func (node *mapNode) decref() { +- if node == nil { +- return +- } +- if atomic.AddInt32(&node.refCount, -1) == 0 { +- if atomic.AddInt32(&node.value.refCount, -1) == 0 { +- if node.value.release != nil { +- node.value.release(node.key, node.value.value) +- } +- node.value.value = nil +- node.value.release = nil +- } +- node.left.decref() +- node.right.decref() +- } +-} +- +-// Clone returns a copy of the given map. It is a responsibility of the caller +-// to Destroy it at later time. +-func (pm *Map[K, V]) Clone() *Map[K, V] { +- return &Map[K, V]{ +- root: pm.root.incref(), +- } +-} +- +-// Destroy destroys the map. +-// +-// After Destroy, the Map should not be used again. +-func (pm *Map[K, V]) Destroy() { +- // The implementation of these two functions is the same, +- // but their intent is different. +- pm.Clear() +-} +- +-// Clear removes all entries from the map. +-func (pm *Map[K, V]) Clear() { +- pm.root.decref() +- pm.root = nil +-} +- +-// Keys returns the ascending sequence of keys present in the map. +-func (pm *Map[K, V]) Keys() iter.Seq[K] { +- return func(yield func(K) bool) { +- pm.root.forEach(func(k, _ any) bool { +- return yield(k.(K)) +- }) +- } +-} +- +-// All returns the sequence of map entries in ascending key order. +-func (pm *Map[K, V]) All() iter.Seq2[K, V] { +- return func(yield func(K, V) bool) { +- pm.root.forEach(func(k, v any) bool { +- return yield(k.(K), v.(V)) +- }) +- } +-} +- +-func (node *mapNode) forEach(yield func(key, value any) bool) bool { +- return node == nil || +- node.left.forEach(yield) && +- yield(node.key, node.value.value) && +- node.right.forEach(yield) +-} +- +-// Get returns the map value associated with the specified key. +-// The ok result indicates whether an entry was found in the map. +-func (pm *Map[K, V]) Get(key K) (V, bool) { +- node := pm.root +- for node != nil { +- if key < node.key.(K) { +- node = node.left +- } else if node.key.(K) < key { +- node = node.right +- } else { +- return node.value.value.(V), true +- } +- } +- var zero V +- return zero, false +-} +- +-// SetAll updates the map with key/value pairs from the other map, overwriting existing keys. +-// It is equivalent to calling Set for each entry in the other map but is more efficient. +-func (pm *Map[K, V]) SetAll(other *Map[K, V]) { +- root := pm.root +- pm.root = union(root, other.root, pm.less, true) +- root.decref() +-} +- +-// Set updates the value associated with the specified key. +-// If release is non-nil, it will be called with entry's key and value once the +-// key is no longer contained in the map or any clone. +-// +-// TODO(adonovan): fix release, which has the wrong type. +-func (pm *Map[K, V]) Set(key K, value V, release func(key, value any)) { +- first := pm.root +- second := newNodeWithRef(key, value, release) +- pm.root = union(first, second, pm.less, true) +- first.decref() +- second.decref() +-} +- +-// union returns a new tree which is a union of first and second one. +-// If overwrite is set to true, second one would override a value for any duplicate keys. +-// +-// union(first:-0, second:-0) (result:+1) +-// Union borrows both subtrees without affecting their refcount and returns a +-// new reference that the caller is expected to call decref. +-func union(first, second *mapNode, less func(any, any) bool, overwrite bool) *mapNode { +- if first == nil { +- return second.incref() +- } +- if second == nil { +- return first.incref() +- } +- +- if first.weight < second.weight { +- second, first, overwrite = first, second, !overwrite +- } +- +- left, mid, right := split(second, first.key, less, false) +- var result *mapNode +- if overwrite && mid != nil { +- result = mid.shallowCloneWithRef() +- } else { +- result = first.shallowCloneWithRef() +- } +- result.weight = first.weight +- result.left = union(first.left, left, less, overwrite) +- result.right = union(first.right, right, less, overwrite) +- left.decref() +- mid.decref() +- right.decref() +- return result +-} +- +-// split the tree midway by the key into three different ones. +-// Return three new trees: left with all nodes with smaller than key, mid with +-// the node matching the key, right with all nodes larger than key. +-// If there are no nodes in one of trees, return nil instead of it. +-// If requireMid is set (such as during deletion), then all return arguments +-// are nil if mid is not found. +-// +-// split(n:-0) (left:+1, mid:+1, right:+1) +-// Split borrows n without affecting its refcount, and returns three +-// new references that the caller is expected to call decref. +-func split(n *mapNode, key any, less func(any, any) bool, requireMid bool) (left, mid, right *mapNode) { +- if n == nil { +- return nil, nil, nil +- } +- +- if less(n.key, key) { +- left, mid, right := split(n.right, key, less, requireMid) +- if requireMid && mid == nil { +- return nil, nil, nil +- } +- newN := n.shallowCloneWithRef() +- newN.left = n.left.incref() +- newN.right = left +- return newN, mid, right +- } else if less(key, n.key) { +- left, mid, right := split(n.left, key, less, requireMid) +- if requireMid && mid == nil { +- return nil, nil, nil +- } +- newN := n.shallowCloneWithRef() +- newN.left = right +- newN.right = n.right.incref() +- return left, mid, newN +- } +- mid = n.shallowCloneWithRef() +- return n.left.incref(), mid, n.right.incref() +-} +- +-// Delete deletes the value for a key. +-// +-// The result reports whether the key was present in the map. +-func (pm *Map[K, V]) Delete(key K) bool { +- root := pm.root +- left, mid, right := split(root, key, pm.less, true) +- if mid == nil { +- return false +- } +- pm.root = merge(left, right) +- left.decref() +- mid.decref() +- right.decref() +- root.decref() +- return true +-} +- +-// merge two trees while preserving the weight invariant. +-// All nodes in left must have smaller keys than any node in right. +-// +-// merge(left:-0, right:-0) (result:+1) +-// Merge borrows its arguments without affecting their refcount +-// and returns a new reference that the caller is expected to call decref. +-func merge(left, right *mapNode) *mapNode { +- switch { +- case left == nil: +- return right.incref() +- case right == nil: +- return left.incref() +- case left.weight > right.weight: +- root := left.shallowCloneWithRef() +- root.left = left.left.incref() +- root.right = merge(left.right, right) +- return root +- default: +- root := right.shallowCloneWithRef() +- root.left = merge(left, right.left) +- root.right = right.right.incref() +- return root +- } +-} +diff -urN a/gopls/internal/util/persistent/map_test.go b/gopls/internal/util/persistent/map_test.go +--- a/gopls/internal/util/persistent/map_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/persistent/map_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,349 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package persistent +- +-import ( +- "fmt" +- "maps" +- "math/rand" +- "reflect" +- "sync/atomic" +- "testing" +-) +- +-type mapEntry struct { +- key int +- value int +-} +- +-type validatedMap struct { +- impl *Map[int, int] +- expected map[int]int // current key-value mapping. +- deleted map[mapEntry]int // maps deleted entries to their clock time of last deletion +- seen map[mapEntry]int // maps seen entries to their clock time of last insertion +- clock int +-} +- +-func TestSimpleMap(t *testing.T) { +- deletedEntries := make(map[mapEntry]int) +- seenEntries := make(map[mapEntry]int) +- +- m1 := &validatedMap{ +- impl: new(Map[int, int]), +- expected: make(map[int]int), +- deleted: deletedEntries, +- seen: seenEntries, +- } +- +- m3 := m1.clone() +- validateRef(t, m1, m3) +- m3.set(t, 8, 8) +- validateRef(t, m1, m3) +- m3.destroy() +- +- assertSameMap(t, entrySet(deletedEntries), map[mapEntry]struct{}{ +- {key: 8, value: 8}: {}, +- }) +- +- validateRef(t, m1) +- m1.set(t, 1, 1) +- validateRef(t, m1) +- m1.set(t, 2, 2) +- validateRef(t, m1) +- m1.set(t, 3, 3) +- validateRef(t, m1) +- m1.remove(t, 2) +- validateRef(t, m1) +- m1.set(t, 6, 6) +- validateRef(t, m1) +- +- assertSameMap(t, entrySet(deletedEntries), map[mapEntry]struct{}{ +- {key: 2, value: 2}: {}, +- {key: 8, value: 8}: {}, +- }) +- +- m2 := m1.clone() +- validateRef(t, m1, m2) +- m1.set(t, 6, 60) +- validateRef(t, m1, m2) +- m1.remove(t, 1) +- validateRef(t, m1, m2) +- +- gotAllocs := int(testing.AllocsPerRun(10, func() { +- m1.impl.Delete(100) +- m1.impl.Delete(1) +- })) +- wantAllocs := 0 +- if gotAllocs != wantAllocs { +- t.Errorf("wanted %d allocs, got %d", wantAllocs, gotAllocs) +- } +- +- for i := 10; i < 14; i++ { +- m1.set(t, i, i) +- validateRef(t, m1, m2) +- } +- +- m1.set(t, 10, 100) +- validateRef(t, m1, m2) +- +- m1.remove(t, 12) +- validateRef(t, m1, m2) +- +- m2.set(t, 4, 4) +- validateRef(t, m1, m2) +- m2.set(t, 5, 5) +- validateRef(t, m1, m2) +- +- m1.destroy() +- +- assertSameMap(t, entrySet(deletedEntries), map[mapEntry]struct{}{ +- {key: 2, value: 2}: {}, +- {key: 6, value: 60}: {}, +- {key: 8, value: 8}: {}, +- {key: 10, value: 10}: {}, +- {key: 10, value: 100}: {}, +- {key: 11, value: 11}: {}, +- {key: 12, value: 12}: {}, +- {key: 13, value: 13}: {}, +- }) +- +- m2.set(t, 7, 7) +- validateRef(t, m2) +- +- m2.destroy() +- +- assertSameMap(t, entrySet(seenEntries), entrySet(deletedEntries)) +-} +- +-func TestRandomMap(t *testing.T) { +- deletedEntries := make(map[mapEntry]int) +- seenEntries := make(map[mapEntry]int) +- +- m := &validatedMap{ +- impl: new(Map[int, int]), +- expected: make(map[int]int), +- deleted: deletedEntries, +- seen: seenEntries, +- } +- +- keys := make([]int, 0, 1000) +- for i := range 1000 { +- key := rand.Intn(10000) +- m.set(t, key, key) +- keys = append(keys, key) +- +- if i%10 == 1 { +- index := rand.Intn(len(keys)) +- last := len(keys) - 1 +- key = keys[index] +- keys[index], keys[last] = keys[last], keys[index] +- keys = keys[:last] +- +- m.remove(t, key) +- } +- } +- +- m.destroy() +- assertSameMap(t, entrySet(seenEntries), entrySet(deletedEntries)) +-} +- +-func entrySet(m map[mapEntry]int) map[mapEntry]struct{} { +- set := make(map[mapEntry]struct{}) +- for k := range m { +- set[k] = struct{}{} +- } +- return set +-} +- +-func TestUpdate(t *testing.T) { +- deletedEntries := make(map[mapEntry]int) +- seenEntries := make(map[mapEntry]int) +- +- m1 := &validatedMap{ +- impl: new(Map[int, int]), +- expected: make(map[int]int), +- deleted: deletedEntries, +- seen: seenEntries, +- } +- m2 := m1.clone() +- +- m1.set(t, 1, 1) +- m1.set(t, 2, 2) +- m2.set(t, 2, 20) +- m2.set(t, 3, 3) +- m1.setAll(t, m2) +- +- m1.destroy() +- m2.destroy() +- assertSameMap(t, entrySet(seenEntries), entrySet(deletedEntries)) +-} +- +-func validateRef(t *testing.T, maps ...*validatedMap) { +- t.Helper() +- +- actualCountByEntry := make(map[mapEntry]int32) +- nodesByEntry := make(map[mapEntry]map[*mapNode]struct{}) +- expectedCountByEntry := make(map[mapEntry]int32) +- for i, m := range maps { +- dfsRef(m.impl.root, actualCountByEntry, nodesByEntry) +- dumpMap(t, fmt.Sprintf("%d:", i), m.impl.root) +- } +- for entry, nodes := range nodesByEntry { +- expectedCountByEntry[entry] = int32(len(nodes)) +- } +- assertSameMap(t, expectedCountByEntry, actualCountByEntry) +-} +- +-func dfsRef(node *mapNode, countByEntry map[mapEntry]int32, nodesByEntry map[mapEntry]map[*mapNode]struct{}) { +- if node == nil { +- return +- } +- +- entry := mapEntry{key: node.key.(int), value: node.value.value.(int)} +- countByEntry[entry] = atomic.LoadInt32(&node.value.refCount) +- +- nodes, ok := nodesByEntry[entry] +- if !ok { +- nodes = make(map[*mapNode]struct{}) +- nodesByEntry[entry] = nodes +- } +- nodes[node] = struct{}{} +- +- dfsRef(node.left, countByEntry, nodesByEntry) +- dfsRef(node.right, countByEntry, nodesByEntry) +-} +- +-func dumpMap(t *testing.T, prefix string, n *mapNode) { +- if n == nil { +- t.Logf("%s nil", prefix) +- return +- } +- t.Logf("%s {key: %v, value: %v (ref: %v), ref: %v, weight: %v}", prefix, n.key, n.value.value, n.value.refCount, n.refCount, n.weight) +- dumpMap(t, prefix+"l", n.left) +- dumpMap(t, prefix+"r", n.right) +-} +- +-func (vm *validatedMap) validate(t *testing.T) { +- t.Helper() +- +- validateNode(t, vm.impl.root) +- +- // Note: this validation may not make sense if maps were constructed using +- // SetAll operations. If this proves to be problematic, remove the clock, +- // deleted, and seen fields. +- for key, value := range vm.expected { +- entry := mapEntry{key: key, value: value} +- if deleteAt := vm.deleted[entry]; deleteAt > vm.seen[entry] { +- t.Fatalf("entry is deleted prematurely, key: %d, value: %d", key, value) +- } +- } +- +- actualMap := make(map[int]int, len(vm.expected)) +- for key, value := range vm.impl.All() { +- if other, ok := actualMap[key]; ok { +- t.Fatalf("key is present twice, key: %d, first value: %d, second value: %d", key, value, other) +- } +- actualMap[key] = value +- } +- +- assertSameMap(t, actualMap, vm.expected) +-} +- +-func validateNode(t *testing.T, node *mapNode) { +- if node == nil { +- return +- } +- +- if node.left != nil { +- if node.key.(int) < node.left.key.(int) { +- t.Fatalf("left child has larger key: %v vs %v", node.left.key, node.key) +- } +- if node.left.weight > node.weight { +- t.Fatalf("left child has larger weight: %v vs %v", node.left.weight, node.weight) +- } +- } +- +- if node.right != nil { +- if node.right.key.(int) < node.key.(int) { +- t.Fatalf("right child has smaller key: %v vs %v", node.right.key, node.key) +- } +- if node.right.weight > node.weight { +- t.Fatalf("right child has larger weight: %v vs %v", node.right.weight, node.weight) +- } +- } +- +- validateNode(t, node.left) +- validateNode(t, node.right) +-} +- +-func (vm *validatedMap) setAll(t *testing.T, other *validatedMap) { +- vm.impl.SetAll(other.impl) +- +- // Note: this is buggy because we are not updating vm.clock, vm.deleted, or +- // vm.seen. +- maps.Copy(vm.expected, other.expected) +- vm.validate(t) +-} +- +-func (vm *validatedMap) set(t *testing.T, key, value int) { +- entry := mapEntry{key: key, value: value} +- +- vm.clock++ +- vm.seen[entry] = vm.clock +- +- vm.impl.Set(key, value, func(deletedKey, deletedValue any) { +- if deletedKey != key || deletedValue != value { +- t.Fatalf("unexpected passed in deleted entry: %v/%v, expected: %v/%v", deletedKey, deletedValue, key, value) +- } +- // Not safe if closure shared between two validatedMaps. +- vm.deleted[entry] = vm.clock +- }) +- vm.expected[key] = value +- vm.validate(t) +- +- gotValue, ok := vm.impl.Get(key) +- if !ok || gotValue != value { +- t.Fatalf("unexpected get result after insertion, key: %v, expected: %v, got: %v (%v)", key, value, gotValue, ok) +- } +-} +- +-func (vm *validatedMap) remove(t *testing.T, key int) { +- vm.clock++ +- deleted := vm.impl.Delete(key) +- if _, ok := vm.expected[key]; ok != deleted { +- t.Fatalf("Delete(%d) = %t, want %t", key, deleted, ok) +- } +- delete(vm.expected, key) +- vm.validate(t) +- +- gotValue, ok := vm.impl.Get(key) +- if ok { +- t.Fatalf("unexpected get result after removal, key: %v, got: %v", key, gotValue) +- } +-} +- +-func (vm *validatedMap) clone() *validatedMap { +- expected := make(map[int]int, len(vm.expected)) +- maps.Copy(expected, vm.expected) +- +- return &validatedMap{ +- impl: vm.impl.Clone(), +- expected: expected, +- deleted: vm.deleted, +- seen: vm.seen, +- } +-} +- +-func (vm *validatedMap) destroy() { +- vm.impl.Destroy() +-} +- +-func assertSameMap(t *testing.T, map1, map2 any) { +- t.Helper() +- +- if !reflect.DeepEqual(map1, map2) { +- t.Fatalf("different maps:\n%v\nvs\n%v", map1, map2) +- } +-} +diff -urN a/gopls/internal/util/persistent/race_test.go b/gopls/internal/util/persistent/race_test.go +--- a/gopls/internal/util/persistent/race_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/persistent/race_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,66 +0,0 @@ +-// Copyright 2025 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build race +- +-package persistent +- +-import ( +- "context" +- "maps" +- "testing" +- "time" +- +- "golang.org/x/sync/errgroup" +-) +- +-// TestConcurrency exercises concurrent map access. +-// It doesn't assert anything, but it runs under the race detector. +-func TestConcurrency(t *testing.T) { +- ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second) +- defer cancel() +- var orig Map[int, int] // maps subset of [0-10] to itself (values aren't interesting) +- for i := range 10 { +- orig.Set(i, i, func(k, v any) { /* just for good measure*/ }) +- } +- g, ctx := errgroup.WithContext(ctx) +- const N = 10 // concurrency level +- g.SetLimit(N) +- for range N { +- g.Go(func() error { +- // Each thread has its own clone of the original, +- // sharing internal structures. Each map is accessed +- // only by a single thread; the shared data is immutable. +- m := orig.Clone() +- +- // Run until the timeout. +- for ctx.Err() == nil { +- for i := range 1000 { +- key := i % 10 +- +- switch { +- case i%2 == 0: +- _, _ = m.Get(key) +- case i%11 == 0: +- m.Set(key, key, func(key, value any) {}) +- case i%13 == 0: +- _ = maps.Collect(m.All()) +- case i%17 == 0: +- _ = m.Delete(key) +- case i%19 == 0: +- _ = m.Keys() +- case i%31 == 0: +- _ = m.String() +- case i%23 == 0: +- _ = m.Clone() +- } +- // Don't call m.Clear(), as it would +- // disentangle the various maps from each other. +- } +- } +- return nil +- }) +- } +- g.Wait() // no errors +-} +diff -urN a/gopls/internal/util/persistent/set.go b/gopls/internal/util/persistent/set.go +--- a/gopls/internal/util/persistent/set.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/persistent/set.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,84 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package persistent +- +-import ( +- "iter" +- +- "golang.org/x/tools/gopls/internal/util/constraints" +-) +- +-// Set is a collection of elements of type K. +-// +-// It uses immutable data structures internally, so that sets can be cloned in +-// constant time. +-// +-// The zero value is a valid empty set. +-type Set[K constraints.Ordered] struct { +- impl *Map[K, struct{}] +-} +- +-// Clone creates a copy of the receiver. +-func (s *Set[K]) Clone() *Set[K] { +- clone := new(Set[K]) +- if s.impl != nil { +- clone.impl = s.impl.Clone() +- } +- return clone +-} +- +-// Destroy destroys the set. +-// +-// After Destroy, the Set should not be used again. +-func (s *Set[K]) Destroy() { +- if s.impl != nil { +- s.impl.Destroy() +- } +-} +- +-// Contains reports whether s contains the given key. +-func (s *Set[K]) Contains(key K) bool { +- if s.impl == nil { +- return false +- } +- _, ok := s.impl.Get(key) +- return ok +-} +- +-// All returns the sequence of set elements in ascending order. +-func (s *Set[K]) All() iter.Seq[K] { +- return func(yield func(K) bool) { +- if s.impl != nil { +- s.impl.root.forEach(func(k, _ any) bool { +- return yield(k.(K)) +- }) +- } +- } +-} +- +-// AddAll adds all elements from other to the receiver set. +-func (s *Set[K]) AddAll(other *Set[K]) { +- if other.impl != nil { +- if s.impl == nil { +- s.impl = new(Map[K, struct{}]) +- } +- s.impl.SetAll(other.impl) +- } +-} +- +-// Add adds an element to the set. +-func (s *Set[K]) Add(key K) { +- if s.impl == nil { +- s.impl = new(Map[K, struct{}]) +- } +- s.impl.Set(key, struct{}{}, nil) +-} +- +-// Remove removes an element from the set. +-func (s *Set[K]) Remove(key K) { +- if s.impl != nil { +- s.impl.Delete(key) +- } +-} +diff -urN a/gopls/internal/util/persistent/set_test.go b/gopls/internal/util/persistent/set_test.go +--- a/gopls/internal/util/persistent/set_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/persistent/set_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,132 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package persistent_test +- +-import ( +- "fmt" +- "strings" +- "testing" +- +- "golang.org/x/tools/gopls/internal/util/constraints" +- "golang.org/x/tools/gopls/internal/util/persistent" +-) +- +-func TestSet(t *testing.T) { +- const ( +- add = iota +- remove +- ) +- type op struct { +- op int +- v int +- } +- +- tests := []struct { +- label string +- ops []op +- want []int +- }{ +- {"empty", nil, nil}, +- {"singleton", []op{{add, 1}}, []int{1}}, +- {"add and remove", []op{ +- {add, 1}, +- {remove, 1}, +- }, nil}, +- {"interleaved and remove", []op{ +- {add, 1}, +- {add, 2}, +- {remove, 1}, +- {add, 3}, +- }, []int{2, 3}}, +- } +- +- for _, test := range tests { +- t.Run(test.label, func(t *testing.T) { +- var s persistent.Set[int] +- for _, op := range test.ops { +- switch op.op { +- case add: +- s.Add(op.v) +- case remove: +- s.Remove(op.v) +- } +- } +- +- if d := diff(&s, test.want); d != "" { +- t.Errorf("unexpected diff:\n%s", d) +- } +- }) +- } +-} +- +-func TestSet_Clone(t *testing.T) { +- s1 := new(persistent.Set[int]) +- s1.Add(1) +- s1.Add(2) +- s2 := s1.Clone() +- s1.Add(3) +- s2.Add(4) +- if d := diff(s1, []int{1, 2, 3}); d != "" { +- t.Errorf("s1: unexpected diff:\n%s", d) +- } +- if d := diff(s2, []int{1, 2, 4}); d != "" { +- t.Errorf("s2: unexpected diff:\n%s", d) +- } +-} +- +-func TestSet_AddAll(t *testing.T) { +- s1 := new(persistent.Set[int]) +- s1.Add(1) +- s1.Add(2) +- s2 := new(persistent.Set[int]) +- s2.Add(2) +- s2.Add(3) +- s2.Add(4) +- s3 := new(persistent.Set[int]) +- +- s := new(persistent.Set[int]) +- s.AddAll(s1) +- s.AddAll(s2) +- s.AddAll(s3) +- +- if d := diff(s1, []int{1, 2}); d != "" { +- t.Errorf("s1: unexpected diff:\n%s", d) +- } +- if d := diff(s2, []int{2, 3, 4}); d != "" { +- t.Errorf("s2: unexpected diff:\n%s", d) +- } +- if d := diff(s3, nil); d != "" { +- t.Errorf("s3: unexpected diff:\n%s", d) +- } +- if d := diff(s, []int{1, 2, 3, 4}); d != "" { +- t.Errorf("s: unexpected diff:\n%s", d) +- } +-} +- +-func diff[K constraints.Ordered](got *persistent.Set[K], want []K) string { +- wantSet := make(map[K]struct{}) +- for _, w := range want { +- wantSet[w] = struct{}{} +- } +- var diff []string +- for key := range got.All() { +- if _, ok := wantSet[key]; !ok { +- diff = append(diff, fmt.Sprintf("+%v", key)) +- } +- } +- for key := range wantSet { +- if !got.Contains(key) { +- diff = append(diff, fmt.Sprintf("-%v", key)) +- } +- } +- if len(diff) > 0 { +- d := new(strings.Builder) +- for _, l := range diff { +- fmt.Fprintln(d, l) +- } +- return d.String() +- } +- return "" +-} +diff -urN a/gopls/internal/util/README.md b/gopls/internal/util/README.md +--- a/gopls/internal/util/README.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/README.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,7 +0,0 @@ +-# util +- +-This directory is not a Go package. +- +-Its subdirectories are for utility packages, defined as implementation +-helpers (not core machinery) that are used in different ways across +-the gopls codebase. +\ No newline at end of file +diff -urN a/gopls/internal/util/safetoken/safetoken.go b/gopls/internal/util/safetoken/safetoken.go +--- a/gopls/internal/util/safetoken/safetoken.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/safetoken/safetoken.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,131 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package safetoken provides wrappers around methods in go/token, +-// that return errors rather than panicking. +-// +-// It also provides a central place for workarounds in the underlying +-// packages. The use of this package's functions instead of methods of +-// token.File (such as Offset, Position, and PositionFor) is mandatory +-// throughout the gopls codebase and enforced by a static check. +-package safetoken +- +-import ( +- "fmt" +- "go/token" +-) +- +-// Offset returns f.Offset(pos), but first checks that the file +-// contains the pos. +-// +-// The definition of "contains" here differs from that of token.File +-// in order to work around a bug in the parser (issue #57490): during +-// error recovery, the parser may create syntax nodes whose computed +-// End position is 1 byte beyond EOF, which would cause +-// token.File.Offset to panic. The workaround is that this function +-// accepts a Pos that is exactly 1 byte beyond EOF and maps it to the +-// EOF offset. +-func Offset(f *token.File, pos token.Pos) (int, error) { +- if !inRange(f, pos) { +- // Accept a Pos that is 1 byte beyond EOF, +- // and map it to the EOF offset. +- // (Workaround for #57490.) +- if int(pos) == f.Base()+f.Size()+1 { +- return f.Size(), nil +- } +- +- return -1, fmt.Errorf("pos %d is not in range [%d:%d] of file %s", +- pos, f.Base(), f.Base()+f.Size(), f.Name()) +- } +- return int(pos) - f.Base(), nil +-} +- +-// Offsets returns Offset(start) and Offset(end). +-// It returns an error if either failed, or if start > end. +-func Offsets(f *token.File, start, end token.Pos) (int, int, error) { +- startOffset, err := Offset(f, start) +- if err != nil { +- return 0, 0, fmt.Errorf("start: %v", err) +- } +- endOffset, err := Offset(f, end) +- if err != nil { +- return 0, 0, fmt.Errorf("end: %v", err) +- } +- if start > end { +- return 0, 0, fmt.Errorf("start (offset %d) > end (offset %d)", start, end) +- } +- return startOffset, endOffset, nil +-} +- +-// Pos returns f.Pos(offset), but first checks that the offset is +-// non-negative and not larger than the size of the file. +-func Pos(f *token.File, offset int) (token.Pos, error) { +- if !(0 <= offset && offset <= f.Size()) { +- return token.NoPos, fmt.Errorf("offset %d is not in range for file %s of size %d", offset, f.Name(), f.Size()) +- } +- return token.Pos(f.Base() + offset), nil +-} +- +-// inRange reports whether file f contains position pos, +-// according to the invariants of token.File. +-// +-// This function is not public because of the ambiguity it would +-// create w.r.t. the definition of "contains". Use Offset instead. +-func inRange(f *token.File, pos token.Pos) bool { +- return token.Pos(f.Base()) <= pos && pos <= token.Pos(f.Base()+f.Size()) +-} +- +-// Position returns the Position for the pos value in the given file. +-// +-// p must be NoPos, a valid Pos in the range of f, or exactly 1 byte +-// beyond the end of f. (See [Offset] for explanation.) +-// Any other value causes a panic. +-// +-// Line directives (//line comments) are ignored. +-func Position(f *token.File, pos token.Pos) token.Position { +- // Work around issue #57490. +- if int(pos) == f.Base()+f.Size()+1 { +- pos-- +- } +- +- // TODO(adonovan): centralize the workaround for +- // golang/go#41029 (newline at EOF) here too. +- +- return f.PositionFor(pos, false) +-} +- +-// Line returns the line number for the given offset in the given file. +-func Line(f *token.File, pos token.Pos) int { +- return Position(f, pos).Line +-} +- +-// StartPosition converts a start Pos in the FileSet into a Position. +-// +-// Call this function only if start represents the start of a token or +-// parse tree, such as the result of Node.Pos(). If start is the end of +-// an interval, such as Node.End(), call EndPosition instead, as it +-// may need the correction described at [Position]. +-func StartPosition(fset *token.FileSet, start token.Pos) (_ token.Position) { +- if f := fset.File(start); f != nil { +- return Position(f, start) +- } +- return +-} +- +-// EndPosition converts an end Pos in the FileSet into a Position. +-// +-// Call this function only if pos represents the end of +-// a non-empty interval, such as the result of Node.End(). +-func EndPosition(fset *token.FileSet, end token.Pos) (_ token.Position) { +- if f := fset.File(end); f != nil && int(end) > f.Base() { +- return Position(f, end) +- } +- +- // Work around issue #57490. +- if f := fset.File(end - 1); f != nil { +- return Position(f, end) +- } +- +- return +-} +diff -urN a/gopls/internal/util/safetoken/safetoken_test.go b/gopls/internal/util/safetoken/safetoken_test.go +--- a/gopls/internal/util/safetoken/safetoken_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/safetoken/safetoken_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,133 +0,0 @@ +-// Copyright 2021 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package safetoken_test +- +-import ( +- "fmt" +- "go/parser" +- "go/token" +- "go/types" +- "os" +- "testing" +- +- "golang.org/x/tools/go/packages" +- "golang.org/x/tools/gopls/internal/util/safetoken" +- "golang.org/x/tools/internal/testenv" +-) +- +-func TestWorkaroundIssue57490(t *testing.T) { +- // During error recovery the parser synthesizes various close +- // tokens at EOF, causing the End position of incomplete +- // syntax nodes, computed as Rbrace+len("}"), to be beyond EOF. +- src := `package p; func f() { var x struct` +- fset := token.NewFileSet() +- file, _ := parser.ParseFile(fset, "a.go", src, parser.SkipObjectResolution) +- tf := fset.File(file.FileStart) +- +- // Add another file to the FileSet. +- file2, _ := parser.ParseFile(fset, "b.go", "package q", parser.SkipObjectResolution) +- +- // This is the ambiguity of #57490... +- if file.End() != file2.Pos() { +- t.Errorf("file.End() %d != %d file2.Pos()", file.End(), file2.Pos()) +- } +- // ...which causes these statements to panic. +- if false { +- tf.Offset(file.End()) // panic: invalid Pos value 36 (should be in [1, 35]) +- tf.Position(file.End()) // panic: invalid Pos value 36 (should be in [1, 35]) +- } +- +- // The offset of the EOF position is the file size. +- offset, err := safetoken.Offset(tf, file.End()-1) +- if err != nil || offset != tf.Size() { +- t.Errorf("Offset(EOF) = (%d, %v), want token.File.Size %d", offset, err, tf.Size()) +- } +- +- // The offset of the file.End() position, 1 byte beyond EOF, +- // is also the size of the file. +- offset, err = safetoken.Offset(tf, file.End()) +- if err != nil || offset != tf.Size() { +- t.Errorf("Offset(ast.File.End()) = (%d, %v), want token.File.Size %d", offset, err, tf.Size()) +- } +- +- if got, want := safetoken.Position(tf, file.End()).String(), "a.go:1:35"; got != want { +- t.Errorf("Position(ast.File.End()) = %s, want %s", got, want) +- } +- +- if got, want := safetoken.EndPosition(fset, file.End()).String(), "a.go:1:35"; got != want { +- t.Errorf("EndPosition(ast.File.End()) = %s, want %s", got, want) +- } +- +- // Note that calling StartPosition on an end may yield the wrong file: +- if got, want := safetoken.StartPosition(fset, file.End()).String(), "b.go:1:1"; got != want { +- t.Errorf("StartPosition(ast.File.End()) = %s, want %s", got, want) +- } +-} +- +-// To reduce the risk of panic, or bugs for which this package +-// provides a workaround, this test statically reports references to +-// forbidden methods of token.File or FileSet throughout gopls and +-// suggests alternatives. +-func TestGoplsSourceDoesNotCallTokenFileMethods(t *testing.T) { +- testenv.NeedsGoPackages(t) +- testenv.NeedsLocalXTools(t) +- +- cfg := &packages.Config{ +- Mode: packages.NeedName | packages.NeedModule | packages.NeedCompiledGoFiles | packages.NeedTypes | packages.NeedTypesInfo | packages.NeedSyntax | packages.NeedImports | packages.NeedDeps, +- } +- cfg.Env = os.Environ() +- cfg.Env = append(cfg.Env, +- "GOPACKAGESDRIVER=off", +- "GOWORK=off", // necessary for -mod=mod below +- "GOFLAGS=-mod=mod", +- ) +- +- pkgs, err := packages.Load(cfg, "go/token", "golang.org/x/tools/gopls/...") +- if err != nil { +- t.Fatal(err) +- } +- var tokenPkg *packages.Package +- for _, pkg := range pkgs { +- if pkg.PkgPath == "go/token" { +- tokenPkg = pkg +- break +- } +- } +- if tokenPkg == nil { +- t.Fatal("missing package go/token") +- } +- +- File := tokenPkg.Types.Scope().Lookup("File") +- FileSet := tokenPkg.Types.Scope().Lookup("FileSet") +- +- alternative := make(map[types.Object]string) +- setAlternative := func(recv types.Object, old, new string) { +- oldMethod, _, _ := types.LookupFieldOrMethod(recv.Type(), true, recv.Pkg(), old) +- alternative[oldMethod] = new +- } +- setAlternative(File, "Line", "safetoken.Line") +- setAlternative(File, "Offset", "safetoken.Offset") +- setAlternative(File, "Position", "safetoken.Position") +- setAlternative(File, "PositionFor", "safetoken.Position") +- setAlternative(FileSet, "Position", "safetoken.StartPosition or EndPosition") +- setAlternative(FileSet, "PositionFor", "safetoken.StartPosition or EndPosition") +- +- for _, pkg := range pkgs { +- switch pkg.PkgPath { +- case "go/token", +- "golang.org/x/tools/gopls/internal/util/safetoken", // this package +- "golang.org/x/tools/gopls/internal/cache/parsego": // copies go/parser/resolver.go +- continue // allow calls within these packages +- } +- +- for ident, obj := range pkg.TypesInfo.Uses { +- if alt, ok := alternative[obj]; ok { +- posn := safetoken.StartPosition(pkg.Fset, ident.Pos()) +- fmt.Fprintf(os.Stderr, "%s: forbidden use of %v; use %s instead.\n", posn, obj, alt) +- t.Fail() +- } +- } +- } +-} +diff -urN a/gopls/internal/util/tokeninternal/tokeninternal.go b/gopls/internal/util/tokeninternal/tokeninternal.go +--- a/gopls/internal/util/tokeninternal/tokeninternal.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/tokeninternal/tokeninternal.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// package tokeninternal provides convenient helpers for the go/token package. +-package tokeninternal +- +-import ( +- "go/token" +- "slices" +-) +- +-// FileSetFor returns a new FileSet containing a sequence of new Files with +-// the same base, size, and line as the input files, for use in APIs that +-// require a FileSet. +-// +-// Precondition: the input files must be non-overlapping, and sorted in order +-// of their Base. +-func FileSetFor(files ...*token.File) *token.FileSet { +- fset := token.NewFileSet() +- fset.AddExistingFiles(files...) +- return fset +-} +- +-// CloneFileSet creates a new FileSet holding all files in fset. It does not +-// create copies of the token.Files in fset: they are added to the resulting +-// FileSet unmodified. +-func CloneFileSet(fset *token.FileSet) *token.FileSet { +- return FileSetFor(slices.Collect(fset.Iterate)...) +-} +diff -urN a/gopls/internal/util/typesutil/typesutil.go b/gopls/internal/util/typesutil/typesutil.go +--- a/gopls/internal/util/typesutil/typesutil.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/util/typesutil/typesutil.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,210 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package typesutil +- +-import ( +- "bytes" +- "go/ast" +- "go/token" +- "go/types" +- "strings" +- +- "golang.org/x/tools/go/ast/edge" +- "golang.org/x/tools/go/ast/inspector" +-) +- +-// FormatTypeParams turns TypeParamList into its Go representation, such as: +-// [T, Y]. Note that it does not print constraints as this is mainly used for +-// formatting type params in method receivers. +-func FormatTypeParams(tparams *types.TypeParamList) string { +- if tparams == nil || tparams.Len() == 0 { +- return "" +- } +- var buf bytes.Buffer +- buf.WriteByte('[') +- for i := 0; i < tparams.Len(); i++ { +- if i > 0 { +- buf.WriteString(", ") +- } +- buf.WriteString(tparams.At(i).Obj().Name()) +- } +- buf.WriteByte(']') +- return buf.String() +-} +- +-// TypesFromContext returns the type (or perhaps zero or multiple types) +-// of the "hole" into which the expression identified by path must fit. +-// +-// For example, given +-// +-// s, i := "", 0 +-// s, i = EXPR +-// +-// the hole that must be filled by EXPR has type (string, int). +-// +-// It returns nil on failure. +- +-func TypesFromContext(info *types.Info, cur inspector.Cursor) []types.Type { +- anyType := types.Universe.Lookup("any").Type() +- var typs []types.Type +- +- // TODO: do cur = unparenEnclosing(cur), once CL 701035 lands. +- for { +- ek, _ := cur.ParentEdge() +- if ek != edge.ParenExpr_X { +- break +- } +- cur = cur.Parent() +- } +- +- validType := func(t types.Type) types.Type { +- if t != nil && !containsInvalid(t) { +- return types.Default(t) +- } else { +- return anyType +- } +- } +- +- ek, idx := cur.ParentEdge() +- switch ek { +- case edge.AssignStmt_Lhs, edge.AssignStmt_Rhs: +- assign := cur.Parent().Node().(*ast.AssignStmt) +- // Append all lhs's type +- if len(assign.Rhs) == 1 { +- for _, lhs := range assign.Lhs { +- t := info.TypeOf(lhs) +- typs = append(typs, validType(t)) +- } +- break +- } +- // Lhs and Rhs counts do not match, give up +- if len(assign.Lhs) != len(assign.Rhs) { +- break +- } +- // Append corresponding index of lhs's type +- if ek == edge.AssignStmt_Rhs { +- t := info.TypeOf(assign.Lhs[idx]) +- typs = append(typs, validType(t)) +- } +- case edge.ValueSpec_Names, edge.ValueSpec_Type, edge.ValueSpec_Values: +- spec := cur.Parent().Node().(*ast.ValueSpec) +- if len(spec.Values) == 1 { +- for _, lhs := range spec.Names { +- t := info.TypeOf(lhs) +- typs = append(typs, validType(t)) +- } +- break +- } +- if len(spec.Values) != len(spec.Names) { +- break +- } +- t := info.TypeOf(spec.Type) +- typs = append(typs, validType(t)) +- case edge.ReturnStmt_Results: +- returnstmt := cur.Parent().Node().(*ast.ReturnStmt) +- sig := EnclosingSignature(cur, info) +- if sig == nil || sig.Results() == nil { +- break +- } +- retsig := sig.Results() +- // Append all return declarations' type +- if len(returnstmt.Results) == 1 { +- for i := 0; i < retsig.Len(); i++ { +- t := retsig.At(i).Type() +- typs = append(typs, validType(t)) +- } +- break +- } +- // Return declaration and actual return counts do not match, give up +- if retsig.Len() != len(returnstmt.Results) { +- break +- } +- // Append corresponding index of return declaration's type +- t := retsig.At(idx).Type() +- typs = append(typs, validType(t)) +- +- case edge.CallExpr_Args: +- call := cur.Parent().Node().(*ast.CallExpr) +- t := info.TypeOf(call.Fun) +- if t == nil { +- break +- } +- +- if sig, ok := t.Underlying().(*types.Signature); ok { +- var paramType types.Type +- if sig.Variadic() && idx >= sig.Params().Len()-1 { +- v := sig.Params().At(sig.Params().Len() - 1) +- if s, _ := v.Type().(*types.Slice); s != nil { +- paramType = s.Elem() +- } +- } else if idx < sig.Params().Len() { +- paramType = sig.Params().At(idx).Type() +- } else { +- break +- } +- if paramType == nil || containsInvalid(paramType) { +- paramType = anyType +- } +- typs = append(typs, paramType) +- } +- case edge.IfStmt_Cond: +- typs = append(typs, types.Typ[types.Bool]) +- case edge.ForStmt_Cond: +- typs = append(typs, types.Typ[types.Bool]) +- case edge.UnaryExpr_X: +- unexpr := cur.Parent().Node().(*ast.UnaryExpr) +- var t types.Type +- switch unexpr.Op { +- case token.NOT: +- t = types.Typ[types.Bool] +- case token.ADD, token.SUB, token.XOR: +- t = types.Typ[types.Int] +- default: +- t = anyType +- } +- typs = append(typs, t) +- case edge.BinaryExpr_X, edge.BinaryExpr_Y: +- binexpr := cur.Parent().Node().(*ast.BinaryExpr) +- switch ek { +- case edge.BinaryExpr_X: +- t := info.TypeOf(binexpr.Y) +- typs = append(typs, validType(t)) +- case edge.BinaryExpr_Y: +- t := info.TypeOf(binexpr.X) +- typs = append(typs, validType(t)) +- } +- default: +- // TODO: support other kinds of "holes" as the need arises. +- } +- return typs +-} +- +-// containsInvalid checks if the type name contains "invalid type", +-// which is not a valid syntax to generate. +-func containsInvalid(t types.Type) bool { +- typeString := types.TypeString(t, nil) +- return strings.Contains(typeString, types.Typ[types.Invalid].String()) +-} +- +-// EnclosingSignature returns the signature of the innermost +-// function enclosing the syntax node denoted by cur +-// or nil if the node is not within a function. +-func EnclosingSignature(cur inspector.Cursor, info *types.Info) *types.Signature { +- for c := range cur.Enclosing((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)) { +- switch n := c.Node().(type) { +- case *ast.FuncDecl: +- if f, ok := info.Defs[n.Name]; ok { +- return f.Type().(*types.Signature) +- } +- return nil +- case *ast.FuncLit: +- if f, ok := info.Types[n]; ok { +- return f.Type.(*types.Signature) +- } +- return nil +- } +- } +- return nil +-} +diff -urN a/gopls/internal/version/version.go b/gopls/internal/version/version.go +--- a/gopls/internal/version/version.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/version/version.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,29 +0,0 @@ +-// Copyright 2024 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package version manages the gopls version. +-// +-// The VersionOverride variable may be used to set the gopls version at link +-// time. +-package version +- +-import "runtime/debug" +- +-var VersionOverride = "" +- +-// Version returns the gopls version. +-// +-// By default, this is read from runtime/debug.ReadBuildInfo, but may be +-// overridden by the [VersionOverride] variable. +-func Version() string { +- if VersionOverride != "" { +- return VersionOverride +- } +- if info, ok := debug.ReadBuildInfo(); ok { +- if info.Main.Version != "" { +- return info.Main.Version +- } +- } +- return "(unknown)" +-} +diff -urN a/gopls/internal/vulncheck/copier.go b/gopls/internal/vulncheck/copier.go +--- a/gopls/internal/vulncheck/copier.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/copier.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,142 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-//go:build ignore +-// +build ignore +- +-//go:generate go run ./copier.go +- +-// Copier is a tool to automate copy of govulncheck's internal files. +-// +-// - copy golang.org/x/vuln/internal/osv/ to osv +-// - copy golang.org/x/vuln/internal/govulncheck/ to govulncheck +-package main +- +-import ( +- "bytes" +- "encoding/json" +- "fmt" +- "go/parser" +- "go/token" +- "log" +- "os" +- "os/exec" +- "path/filepath" +- "strconv" +- "strings" +- +- "golang.org/x/tools/internal/edit" +-) +- +-func main() { +- log.SetPrefix("copier: ") +- log.SetFlags(log.Lshortfile) +- +- srcMod := "golang.org/x/vuln" +- srcModVers := "@latest" +- srcDir, srcVer := downloadModule(srcMod + srcModVers) +- +- cfg := rewrite{ +- banner: fmt.Sprintf("// Code generated by copying from %v@%v (go run copier.go); DO NOT EDIT.", srcMod, srcVer), +- srcImportPath: "golang.org/x/vuln/internal", +- dstImportPath: currentPackagePath(), +- } +- +- copyFiles("osv", filepath.Join(srcDir, "internal", "osv"), cfg) +- copyFiles("govulncheck", filepath.Join(srcDir, "internal", "govulncheck"), cfg) +-} +- +-type rewrite struct { +- // DO NOT EDIT marker to add at the beginning +- banner string +- // rewrite srcImportPath with dstImportPath +- srcImportPath string +- dstImportPath string +-} +- +-func copyFiles(dst, src string, cfg rewrite) { +- entries, err := os.ReadDir(src) +- if err != nil { +- log.Fatalf("failed to read dir: %v", err) +- } +- if err := os.MkdirAll(dst, 0777); err != nil { +- log.Fatalf("failed to create dir: %v", err) +- } +- +- for _, e := range entries { +- fname := e.Name() +- // we need only non-test go files. +- if e.IsDir() || !strings.HasSuffix(fname, ".go") || strings.HasSuffix(fname, "_test.go") { +- continue +- } +- data, err := os.ReadFile(filepath.Join(src, fname)) +- if err != nil { +- log.Fatal(err) +- } +- fset := token.NewFileSet() +- f, err := parser.ParseFile(fset, fname, data, parser.ParseComments|parser.ImportsOnly) +- if err != nil { +- log.Fatalf("parsing source module:\n%s", err) +- } +- +- buf := edit.NewBuffer(data) +- at := func(p token.Pos) int { +- return fset.File(p).Offset(p) +- } +- +- // Add banner right after the copyright statement (the first comment) +- bannerInsert, banner := f.FileStart, cfg.banner +- if len(f.Comments) > 0 && strings.HasPrefix(f.Comments[0].Text(), "Copyright ") { +- bannerInsert = f.Comments[0].End() +- banner = "\n\n" + banner +- } +- buf.Replace(at(bannerInsert), at(bannerInsert), banner) +- +- // Adjust imports +- for _, spec := range f.Imports { +- path, err := strconv.Unquote(spec.Path.Value) +- if err != nil { +- log.Fatal(err) +- } +- if strings.HasPrefix(path, cfg.srcImportPath) { +- newPath := strings.Replace(path, cfg.srcImportPath, cfg.dstImportPath, 1) +- buf.Replace(at(spec.Path.Pos()), at(spec.Path.End()), strconv.Quote(newPath)) +- } +- } +- data = buf.Bytes() +- +- if err := os.WriteFile(filepath.Join(dst, fname), data, 0666); err != nil { +- log.Fatal(err) +- } +- } +-} +- +-func downloadModule(srcModVers string) (dir, ver string) { +- var stdout, stderr bytes.Buffer +- cmd := exec.Command("go", "mod", "download", "-json", srcModVers) +- cmd.Stdout = &stdout +- cmd.Stderr = &stderr +- if err := cmd.Run(); err != nil { +- log.Fatalf("go mod download -json %s: %v\n%s%s", srcModVers, err, stderr.Bytes(), stdout.Bytes()) +- } +- var info struct { +- Dir string +- Version string +- } +- if err := json.Unmarshal(stdout.Bytes(), &info); err != nil { +- log.Fatalf("go mod download -json %s: invalid JSON output: %v\n%s%s", srcModVers, err, stderr.Bytes(), stdout.Bytes()) +- } +- return info.Dir, info.Version +-} +- +-func currentPackagePath() string { +- var stdout, stderr bytes.Buffer +- cmd := exec.Command("go", "list", ".") +- cmd.Stdout = &stdout +- cmd.Stderr = &stderr +- if err := cmd.Run(); err != nil { +- log.Fatalf("go list: %v\n%s%s", err, stderr.Bytes(), stdout.Bytes()) +- } +- return strings.TrimSpace(stdout.String()) +-} +diff -urN a/gopls/internal/vulncheck/govulncheck/govulncheck.go b/gopls/internal/vulncheck/govulncheck/govulncheck.go +--- a/gopls/internal/vulncheck/govulncheck/govulncheck.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/govulncheck/govulncheck.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,160 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated by copying from golang.org/x/vuln@v1.0.1 (go run copier.go); DO NOT EDIT. +- +-// Package govulncheck contains the JSON output structs for govulncheck. +-package govulncheck +- +-import ( +- "time" +- +- "golang.org/x/tools/gopls/internal/vulncheck/osv" +-) +- +-const ( +- // ProtocolVersion is the current protocol version this file implements +- ProtocolVersion = "v1.0.0" +-) +- +-// Message is an entry in the output stream. It will always have exactly one +-// field filled in. +-type Message struct { +- Config *Config `json:"config,omitempty"` +- Progress *Progress `json:"progress,omitempty"` +- OSV *osv.Entry `json:"osv,omitempty"` +- Finding *Finding `json:"finding,omitempty"` +-} +- +-// Config must occur as the first message of a stream and informs the client +-// about the information used to generate the findings. +-// The only required field is the protocol version. +-type Config struct { +- // ProtocolVersion specifies the version of the JSON protocol. +- ProtocolVersion string `json:"protocol_version"` +- +- // ScannerName is the name of the tool, for example, govulncheck. +- // +- // We expect this JSON format to be used by other tools that wrap +- // govulncheck, which will have a different name. +- ScannerName string `json:"scanner_name,omitempty"` +- +- // ScannerVersion is the version of the tool. +- ScannerVersion string `json:"scanner_version,omitempty"` +- +- // DB is the database used by the tool, for example, +- // vuln.go.dev. +- DB string `json:"db,omitempty"` +- +- // LastModified is the last modified time of the data source. +- DBLastModified *time.Time `json:"db_last_modified,omitempty"` +- +- // GoVersion is the version of Go used for analyzing standard library +- // vulnerabilities. +- GoVersion string `json:"go_version,omitempty"` +- +- // ScanLevel instructs govulncheck to analyze at a specific level of detail. +- // Valid values include module, package and symbol. +- ScanLevel ScanLevel `json:"scan_level,omitempty"` +-} +- +-// Progress messages are informational only, intended to allow users to monitor +-// the progress of a long running scan. +-// A stream must remain fully valid and able to be interpreted with all progress +-// messages removed. +-type Progress struct { +- // A time stamp for the message. +- Timestamp *time.Time `json:"time,omitempty"` +- +- // Message is the progress message. +- Message string `json:"message,omitempty"` +-} +- +-// Vuln represents a single OSV entry. +-type Finding struct { +- // OSV is the id of the detected vulnerability. +- OSV string `json:"osv,omitempty"` +- +- // FixedVersion is the module version where the vulnerability was +- // fixed. This is empty if a fix is not available. +- // +- // If there are multiple fixed versions in the OSV report, this will +- // be the fixed version in the latest range event for the OSV report. +- // +- // For example, if the range events are +- // {introduced: 0, fixed: 1.0.0} and {introduced: 1.1.0}, the fixed version +- // will be empty. +- // +- // For the stdlib, we will show the fixed version closest to the +- // Go version that is used. For example, if a fix is available in 1.17.5 and +- // 1.18.5, and the GOVERSION is 1.17.3, 1.17.5 will be returned as the +- // fixed version. +- FixedVersion string `json:"fixed_version,omitempty"` +- +- // Trace contains an entry for each frame in the trace. +- // +- // Frames are sorted starting from the imported vulnerable symbol +- // until the entry point. The first frame in Frames should match +- // Symbol. +- // +- // In binary mode, trace will contain a single-frame with no position +- // information. +- // +- // When a package is imported but no vulnerable symbol is called, the trace +- // will contain a single-frame with no symbol or position information. +- Trace []*Frame `json:"trace,omitempty"` +-} +- +-// Frame represents an entry in a finding trace. +-type Frame struct { +- // Module is the module path of the module containing this symbol. +- // +- // Importable packages in the standard library will have the path "stdlib". +- Module string `json:"module"` +- +- // Version is the module version from the build graph. +- Version string `json:"version,omitempty"` +- +- // Package is the import path. +- Package string `json:"package,omitempty"` +- +- // Function is the function name. +- Function string `json:"function,omitempty"` +- +- // Receiver is the receiver type if the called symbol is a method. +- // +- // The client can create the final symbol name by +- // prepending Receiver to FuncName. +- Receiver string `json:"receiver,omitempty"` +- +- // Position describes an arbitrary source position +- // including the file, line, and column location. +- // A Position is valid if the line number is > 0. +- Position *Position `json:"position,omitempty"` +-} +- +-// Position represents arbitrary source position. +-type Position struct { +- Filename string `json:"filename,omitempty"` // filename, if any +- Offset int `json:"offset"` // byte offset, starting at 0 +- Line int `json:"line"` // line number, starting at 1 +- Column int `json:"column"` // column number, starting at 1 (byte count) +-} +- +-// ScanLevel represents the detail level at which a scan occurred. +-// This can be necessary to correctly interpret the findings, for instance if +-// a scan is at symbol level and a finding does not have a symbol it means the +-// vulnerability was imported but not called. If the scan however was at +-// "package" level, that determination cannot be made. +-type ScanLevel string +- +-const ( +- scanLevelModule = "module" +- scanLevelPackage = "package" +- scanLevelSymbol = "symbol" +-) +- +-// WantSymbols can be used to check whether the scan level is one that is able +-// to generate symbols called findings. +-func (l ScanLevel) WantSymbols() bool { return l == scanLevelSymbol } +diff -urN a/gopls/internal/vulncheck/govulncheck/handler.go b/gopls/internal/vulncheck/govulncheck/handler.go +--- a/gopls/internal/vulncheck/govulncheck/handler.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/govulncheck/handler.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,61 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated by copying from golang.org/x/vuln@v1.0.1 (go run copier.go); DO NOT EDIT. +- +-package govulncheck +- +-import ( +- "encoding/json" +- "io" +- +- "golang.org/x/tools/gopls/internal/vulncheck/osv" +-) +- +-// Handler handles messages to be presented in a vulnerability scan output +-// stream. +-type Handler interface { +- // Config communicates introductory message to the user. +- Config(config *Config) error +- +- // Progress is called to display a progress message. +- Progress(progress *Progress) error +- +- // OSV is invoked for each osv Entry in the stream. +- OSV(entry *osv.Entry) error +- +- // Finding is called for each vulnerability finding in the stream. +- Finding(finding *Finding) error +-} +- +-// HandleJSON reads the json from the supplied stream and hands the decoded +-// output to the handler. +-func HandleJSON(from io.Reader, to Handler) error { +- dec := json.NewDecoder(from) +- for dec.More() { +- msg := Message{} +- // decode the next message in the stream +- if err := dec.Decode(&msg); err != nil { +- return err +- } +- // dispatch the message +- var err error +- if msg.Config != nil { +- err = to.Config(msg.Config) +- } +- if msg.Progress != nil { +- err = to.Progress(msg.Progress) +- } +- if msg.OSV != nil { +- err = to.OSV(msg.OSV) +- } +- if msg.Finding != nil { +- err = to.Finding(msg.Finding) +- } +- if err != nil { +- return err +- } +- } +- return nil +-} +diff -urN a/gopls/internal/vulncheck/govulncheck/jsonhandler.go b/gopls/internal/vulncheck/govulncheck/jsonhandler.go +--- a/gopls/internal/vulncheck/govulncheck/jsonhandler.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/govulncheck/jsonhandler.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,46 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated by copying from golang.org/x/vuln@v1.0.1 (go run copier.go); DO NOT EDIT. +- +-package govulncheck +- +-import ( +- "encoding/json" +- +- "io" +- +- "golang.org/x/tools/gopls/internal/vulncheck/osv" +-) +- +-type jsonHandler struct { +- enc *json.Encoder +-} +- +-// NewJSONHandler returns a handler that writes govulncheck output as json. +-func NewJSONHandler(w io.Writer) Handler { +- enc := json.NewEncoder(w) +- enc.SetIndent("", " ") +- return &jsonHandler{enc: enc} +-} +- +-// Config writes config block in JSON to the underlying writer. +-func (h *jsonHandler) Config(config *Config) error { +- return h.enc.Encode(Message{Config: config}) +-} +- +-// Progress writes a progress message in JSON to the underlying writer. +-func (h *jsonHandler) Progress(progress *Progress) error { +- return h.enc.Encode(Message{Progress: progress}) +-} +- +-// OSV writes an osv entry in JSON to the underlying writer. +-func (h *jsonHandler) OSV(entry *osv.Entry) error { +- return h.enc.Encode(Message{OSV: entry}) +-} +- +-// Finding writes a finding in JSON to the underlying writer. +-func (h *jsonHandler) Finding(finding *Finding) error { +- return h.enc.Encode(Message{Finding: finding}) +-} +diff -urN a/gopls/internal/vulncheck/osv/osv.go b/gopls/internal/vulncheck/osv/osv.go +--- a/gopls/internal/vulncheck/osv/osv.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/osv/osv.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,240 +0,0 @@ +-// Copyright 2023 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Code generated by copying from golang.org/x/vuln@v1.0.1 (go run copier.go); DO NOT EDIT. +- +-// Package osv implements the Go OSV vulnerability format +-// (https://go.dev/security/vuln/database#schema), which is a subset of +-// the OSV shared vulnerability format +-// (https://ossf.github.io/osv-schema), with database and +-// ecosystem-specific meanings and fields. +-// +-// As this package is intended for use with the Go vulnerability +-// database, only the subset of features which are used by that +-// database are implemented (for instance, only the SEMVER affected +-// range type is implemented). +-package osv +- +-import "time" +- +-// RangeType specifies the type of version range being recorded and +-// defines the interpretation of the RangeEvent object's Introduced +-// and Fixed fields. +-// +-// In this implementation, only the "SEMVER" type is supported. +-// +-// See https://ossf.github.io/osv-schema/#affectedrangestype-field. +-type RangeType string +- +-// RangeTypeSemver indicates a semantic version as defined by +-// SemVer 2.0.0, with no leading "v" prefix. +-const RangeTypeSemver RangeType = "SEMVER" +- +-// Ecosystem identifies the overall library ecosystem. +-// In this implementation, only the "Go" ecosystem is supported. +-type Ecosystem string +- +-// GoEcosystem indicates the Go ecosystem. +-const GoEcosystem Ecosystem = "Go" +- +-// Pseudo-module paths used to describe vulnerabilities +-// in the Go standard library and toolchain. +-const ( +- // GoStdModulePath is the pseudo-module path string used +- // to describe vulnerabilities in the Go standard library. +- GoStdModulePath = "stdlib" +- // GoCmdModulePath is the pseudo-module path string used +- // to describe vulnerabilities in the go command. +- GoCmdModulePath = "toolchain" +-) +- +-// Module identifies the Go module containing the vulnerability. +-// Note that this field is called "package" in the OSV specification. +-// +-// See https://ossf.github.io/osv-schema/#affectedpackage-field. +-type Module struct { +- // The Go module path. Required. +- // For the Go standard library, this is "stdlib". +- // For the Go toolchain, this is "toolchain." +- Path string `json:"name"` +- // The ecosystem containing the module. Required. +- // This should always be "Go". +- Ecosystem Ecosystem `json:"ecosystem"` +-} +- +-// RangeEvent describes a single module version that either +-// introduces or fixes a vulnerability. +-// +-// Exactly one of Introduced and Fixed must be present. Other range +-// event types (e.g, "last_affected" and "limit") are not supported in +-// this implementation. +-// +-// See https://ossf.github.io/osv-schema/#affectedrangesevents-fields. +-type RangeEvent struct { +- // Introduced is a version that introduces the vulnerability. +- // A special value, "0", represents a version that sorts before +- // any other version, and should be used to indicate that the +- // vulnerability exists from the "beginning of time". +- Introduced string `json:"introduced,omitempty"` +- // Fixed is a version that fixes the vulnerability. +- Fixed string `json:"fixed,omitempty"` +-} +- +-// Range describes the affected versions of the vulnerable module. +-// +-// See https://ossf.github.io/osv-schema/#affectedranges-field. +-type Range struct { +- // Type is the version type that should be used to interpret the +- // versions in Events. Required. +- // In this implementation, only the "SEMVER" type is supported. +- Type RangeType `json:"type"` +- // Events is a list of versions representing the ranges in which +- // the module is vulnerable. Required. +- // The events should be sorted, and MUST represent non-overlapping +- // ranges. +- // There must be at least one RangeEvent containing a value for +- // Introduced. +- // See https://ossf.github.io/osv-schema/#examples for examples. +- Events []RangeEvent `json:"events"` +-} +- +-// Reference type is a reference (link) type. +-type ReferenceType string +- +-const ( +- // ReferenceTypeAdvisory is a published security advisory for +- // the vulnerability. +- ReferenceTypeAdvisory = ReferenceType("ADVISORY") +- // ReferenceTypeArticle is an article or blog post describing the vulnerability. +- ReferenceTypeArticle = ReferenceType("ARTICLE") +- // ReferenceTypeReport is a report, typically on a bug or issue tracker, of +- // the vulnerability. +- ReferenceTypeReport = ReferenceType("REPORT") +- // ReferenceTypeFix is a source code browser link to the fix (e.g., a GitHub commit). +- ReferenceTypeFix = ReferenceType("FIX") +- // ReferenceTypePackage is a home web page for the package. +- ReferenceTypePackage = ReferenceType("PACKAGE") +- // ReferenceTypeEvidence is a demonstration of the validity of a vulnerability claim. +- ReferenceTypeEvidence = ReferenceType("EVIDENCE") +- // ReferenceTypeWeb is a web page of some unspecified kind. +- ReferenceTypeWeb = ReferenceType("WEB") +-) +- +-// Reference is a reference URL containing additional information, +-// advisories, issue tracker entries, etc., about the vulnerability. +-// +-// See https://ossf.github.io/osv-schema/#references-field. +-type Reference struct { +- // The type of reference. Required. +- Type ReferenceType `json:"type"` +- // The fully-qualified URL of the reference. Required. +- URL string `json:"url"` +-} +- +-// Affected gives details about a module affected by the vulnerability. +-// +-// See https://ossf.github.io/osv-schema/#affected-fields. +-type Affected struct { +- // The affected Go module. Required. +- // Note that this field is called "package" in the OSV specification. +- Module Module `json:"package"` +- // The module version ranges affected by the vulnerability. +- Ranges []Range `json:"ranges,omitempty"` +- // Details on the affected packages and symbols within the module. +- EcosystemSpecific EcosystemSpecific `json:"ecosystem_specific"` +-} +- +-// Package contains additional information about an affected package. +-// This is an ecosystem-specific field for the Go ecosystem. +-type Package struct { +- // Path is the package import path. Required. +- Path string `json:"path,omitempty"` +- // GOOS is the execution operating system where the symbols appear, if +- // known. +- GOOS []string `json:"goos,omitempty"` +- // GOARCH specifies the execution architecture where the symbols appear, if +- // known. +- GOARCH []string `json:"goarch,omitempty"` +- // Symbols is a list of function and method names affected by +- // this vulnerability. Methods are listed as <recv>.<method>. +- // +- // If included, only programs which use these symbols will be marked as +- // vulnerable by `govulncheck`. If omitted, any program which imports this +- // package will be marked vulnerable. +- Symbols []string `json:"symbols,omitempty"` +-} +- +-// EcosystemSpecific contains additional information about the vulnerable +-// module for the Go ecosystem. +-// +-// See https://go.dev/security/vuln/database#schema. +-type EcosystemSpecific struct { +- // Packages is the list of affected packages within the module. +- Packages []Package `json:"imports,omitempty"` +-} +- +-// Entry represents a vulnerability in the Go OSV format, documented +-// in https://go.dev/security/vuln/database#schema. +-// It is a subset of the OSV schema (https://ossf.github.io/osv-schema). +-// Only fields that are published in the Go Vulnerability Database +-// are supported. +-type Entry struct { +- // SchemaVersion is the OSV schema version used to encode this +- // vulnerability. +- SchemaVersion string `json:"schema_version,omitempty"` +- // ID is a unique identifier for the vulnerability. Required. +- // The Go vulnerability database issues IDs of the form +- // GO-<YEAR>-<ENTRYID>. +- ID string `json:"id"` +- // Modified is the time the entry was last modified. Required. +- Modified time.Time `json:"modified,omitempty"` +- // Published is the time the entry should be considered to have +- // been published. +- Published time.Time `json:"published,omitempty"` +- // Withdrawn is the time the entry should be considered to have +- // been withdrawn. If the field is missing, then the entry has +- // not been withdrawn. +- Withdrawn *time.Time `json:"withdrawn,omitempty"` +- // Aliases is a list of IDs for the same vulnerability in other +- // databases. +- Aliases []string `json:"aliases,omitempty"` +- // Summary gives a one-line, English textual summary of the vulnerability. +- // It is recommended that this field be kept short, on the order of no more +- // than 120 characters. +- Summary string `json:"summary,omitempty"` +- // Details contains additional English textual details about the vulnerability. +- Details string `json:"details"` +- // Affected contains information on the modules and versions +- // affected by the vulnerability. +- Affected []Affected `json:"affected"` +- // References contains links to more information about the +- // vulnerability. +- References []Reference `json:"references,omitempty"` +- // Credits contains credits to entities that helped find or fix the +- // vulnerability. +- Credits []Credit `json:"credits,omitempty"` +- // DatabaseSpecific contains additional information about the +- // vulnerability, specific to the Go vulnerability database. +- DatabaseSpecific *DatabaseSpecific `json:"database_specific,omitempty"` +-} +- +-// Credit represents a credit for the discovery, confirmation, patch, or +-// other event in the life cycle of a vulnerability. +-// +-// See https://ossf.github.io/osv-schema/#credits-fields. +-type Credit struct { +- // Name is the name, label, or other identifier of the individual or +- // entity being credited. Required. +- Name string `json:"name"` +-} +- +-// DatabaseSpecific contains additional information about the +-// vulnerability, specific to the Go vulnerability database. +-// +-// See https://go.dev/security/vuln/database#schema. +-type DatabaseSpecific struct { +- // The URL of the Go advisory for this vulnerability, of the form +- // "https://pkg.go.dev/GO-YYYY-XXXX". +- URL string `json:"url,omitempty"` +-} +diff -urN a/gopls/internal/vulncheck/scan/command.go b/gopls/internal/vulncheck/scan/command.go +--- a/gopls/internal/vulncheck/scan/command.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/scan/command.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,165 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package scan +- +-import ( +- "bytes" +- "context" +- "fmt" +- "io" +- "os" +- "os/exec" +- "sort" +- "time" +- +- "golang.org/x/sync/errgroup" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/vulncheck" +- "golang.org/x/tools/gopls/internal/vulncheck/govulncheck" +- "golang.org/x/tools/gopls/internal/vulncheck/osv" +- "golang.org/x/vuln/scan" +-) +- +-// Main implements gopls vulncheck. +-func Main(ctx context.Context, args ...string) error { +- // wrapping govulncheck. +- cmd := scan.Command(ctx, args...) +- if err := cmd.Start(); err != nil { +- return err +- } +- return cmd.Wait() +-} +- +-// RunGovulncheck implements the codelens "Run Govulncheck" +-// that runs 'gopls vulncheck' and converts the output to gopls's internal data +-// used for diagnostics and hover message construction. +-// +-// TODO(rfindley): this should accept a *View (which exposes) Options, rather +-// than a snapshot. +-func RunGovulncheck(ctx context.Context, pattern string, snapshot *cache.Snapshot, dir string, log io.Writer) (*vulncheck.Result, error) { +- vulncheckargs := []string{ +- "vulncheck", "--", +- "-json", +- "-mode", "source", +- "-scan", "symbol", +- } +- if dir != "" { +- vulncheckargs = append(vulncheckargs, "-C", dir) +- } +- if db := cache.GetEnv(snapshot, "GOVULNDB"); db != "" { +- vulncheckargs = append(vulncheckargs, "-db", db) +- } +- vulncheckargs = append(vulncheckargs, pattern) +- // TODO: support -tags. need to compute tags args from opts.BuildFlags. +- // TODO: support -test. +- +- ir, iw := io.Pipe() +- handler := &govulncheckHandler{logger: log, osvs: map[string]*osv.Entry{}} +- +- stderr := new(bytes.Buffer) +- var g errgroup.Group +- // We run the govulncheck's analysis in a separate process as it can +- // consume a lot of CPUs and memory, and terminates: a separate process +- // is a perfect garbage collector and affords us ways to limit its resource usage. +- g.Go(func() error { +- defer iw.Close() +- +- cmd := exec.CommandContext(ctx, os.Args[0], vulncheckargs...) +- cmd.Env = getEnvSlices(snapshot) +- if goversion := cache.GetEnv(snapshot, cache.GoVersionForVulnTest); goversion != "" { +- // Let govulncheck API use a different Go version using the (undocumented) hook +- // in https://go.googlesource.com/vuln/+/v1.0.1/internal/scan/run.go#76 +- cmd.Env = append(cmd.Env, "GOVERSION="+goversion) +- } +- cmd.Stderr = stderr // stream vulncheck's STDERR as progress reports +- cmd.Stdout = iw // let the other goroutine parses the result. +- +- if err := cmd.Start(); err != nil { +- return fmt.Errorf("failed to start govulncheck: %v", err) +- } +- if err := cmd.Wait(); err != nil { +- return fmt.Errorf("failed to run govulncheck: %v", err) +- } +- return nil +- }) +- g.Go(func() error { +- return govulncheck.HandleJSON(ir, handler) +- }) +- if err := g.Wait(); err != nil { +- if stderr.Len() > 0 { +- log.Write(stderr.Bytes()) +- } +- return nil, fmt.Errorf("failed to read govulncheck output: %v: stderr:\n%s", err, stderr) +- } +- +- findings := handler.findings // sort so the findings in the result is deterministic. +- sort.Slice(findings, func(i, j int) bool { +- x, y := findings[i], findings[j] +- if x.OSV != y.OSV { +- return x.OSV < y.OSV +- } +- return x.Trace[0].Package < y.Trace[0].Package +- }) +- result := &vulncheck.Result{ +- Mode: vulncheck.ModeGovulncheck, +- AsOf: time.Now(), +- Entries: handler.osvs, +- Findings: findings, +- } +- return result, nil +-} +- +-type govulncheckHandler struct { +- logger io.Writer // forward progress reports to logger. +- +- osvs map[string]*osv.Entry +- findings []*govulncheck.Finding +-} +- +-// Config implements vulncheck.Handler. +-func (h *govulncheckHandler) Config(config *govulncheck.Config) error { +- if config.GoVersion != "" { +- fmt.Fprintf(h.logger, "Go: %v\n", config.GoVersion) +- } +- if config.ScannerName != "" { +- scannerName := fmt.Sprintf("Scanner: %v", config.ScannerName) +- if config.ScannerVersion != "" { +- scannerName += "@" + config.ScannerVersion +- } +- fmt.Fprintln(h.logger, scannerName) +- } +- if config.DB != "" { +- dbInfo := fmt.Sprintf("DB: %v", config.DB) +- if config.DBLastModified != nil { +- dbInfo += fmt.Sprintf(" (DB updated: %v)", config.DBLastModified.String()) +- } +- fmt.Fprintln(h.logger, dbInfo) +- } +- return nil +-} +- +-// Finding implements vulncheck.Handler. +-func (h *govulncheckHandler) Finding(finding *govulncheck.Finding) error { +- h.findings = append(h.findings, finding) +- return nil +-} +- +-// OSV implements vulncheck.Handler. +-func (h *govulncheckHandler) OSV(entry *osv.Entry) error { +- h.osvs[entry.ID] = entry +- return nil +-} +- +-// Progress implements vulncheck.Handler. +-func (h *govulncheckHandler) Progress(progress *govulncheck.Progress) error { +- if progress.Message != "" { +- fmt.Fprintf(h.logger, "%v\n", progress.Message) +- } +- return nil +-} +- +-func getEnvSlices(snapshot *cache.Snapshot) []string { +- return append(os.Environ(), snapshot.Options().EnvSlice()...) +-} +diff -urN a/gopls/internal/vulncheck/semver/semver.go b/gopls/internal/vulncheck/semver/semver.go +--- a/gopls/internal/vulncheck/semver/semver.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/semver/semver.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,45 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package semver provides shared utilities for manipulating +-// Go semantic versions. +-package semver +- +-import ( +- "strings" +- +- "golang.org/x/mod/semver" +-) +- +-// addSemverPrefix adds a 'v' prefix to s if it isn't already prefixed +-// with 'v' or 'go'. This allows us to easily test go-style SEMVER +-// strings against normal SEMVER strings. +-func addSemverPrefix(s string) string { +- if !strings.HasPrefix(s, "v") && !strings.HasPrefix(s, "go") { +- return "v" + s +- } +- return s +-} +- +-// removeSemverPrefix removes the 'v' or 'go' prefixes from go-style +-// SEMVER strings, for usage in the public vulnerability format. +-func removeSemverPrefix(s string) string { +- s = strings.TrimPrefix(s, "v") +- s = strings.TrimPrefix(s, "go") +- return s +-} +- +-// CanonicalizeSemverPrefix turns a SEMVER string into the canonical +-// representation using the 'v' prefix, as used by the OSV format. +-// Input may be a bare SEMVER ("1.2.3"), Go prefixed SEMVER ("go1.2.3"), +-// or already canonical SEMVER ("v1.2.3"). +-func CanonicalizeSemverPrefix(s string) string { +- return addSemverPrefix(removeSemverPrefix(s)) +-} +- +-// Valid returns whether v is valid semver, allowing +-// either a "v", "go" or no prefix. +-func Valid(v string) bool { +- return semver.IsValid(CanonicalizeSemverPrefix(v)) +-} +diff -urN a/gopls/internal/vulncheck/semver/semver_test.go b/gopls/internal/vulncheck/semver/semver_test.go +--- a/gopls/internal/vulncheck/semver/semver_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/semver/semver_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,25 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package semver +- +-import ( +- "testing" +-) +- +-func TestCanonicalize(t *testing.T) { +- for _, test := range []struct { +- v string +- want string +- }{ +- {"v1.2.3", "v1.2.3"}, +- {"1.2.3", "v1.2.3"}, +- {"go1.2.3", "v1.2.3"}, +- } { +- got := CanonicalizeSemverPrefix(test.v) +- if got != test.want { +- t.Errorf("want %s; got %s", test.want, got) +- } +- } +-} +diff -urN a/gopls/internal/vulncheck/types.go b/gopls/internal/vulncheck/types.go +--- a/gopls/internal/vulncheck/types.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/types.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,47 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// go:generate go run copier.go +- +-package vulncheck +- +-import ( +- "time" +- +- gvc "golang.org/x/tools/gopls/internal/vulncheck/govulncheck" +- "golang.org/x/tools/gopls/internal/vulncheck/osv" +-) +- +-// Result is the result of vulnerability scanning. +-type Result struct { +- // Entries contains all vulnerabilities that are called or imported by +- // the analyzed module. Keys are Entry.IDs. +- Entries map[string]*osv.Entry +- // Findings are vulnerabilities found by vulncheck or import-based analysis. +- // Ordered by the OSV IDs and the package names. +- Findings []*gvc.Finding +- +- // Mode contains the source of the vulnerability info. +- // Clients of the gopls.fetch_vulncheck_result command may need +- // to interpret the vulnerabilities differently based on the +- // analysis mode. For example, Vuln without callstack traces +- // indicate a vulnerability that is not used if the result was +- // from 'govulncheck' analysis mode. On the other hand, Vuln +- // without callstack traces just implies the package with the +- // vulnerability is known to the workspace and we do not know +- // whether the vulnerable symbols are actually used or not. +- Mode AnalysisMode `json:",omitempty"` +- +- // AsOf describes when this Result was computed using govulncheck. +- // It is valid only with the govulncheck analysis mode. +- AsOf time.Time +-} +- +-type AnalysisMode string +- +-const ( +- ModeInvalid AnalysisMode = "" // zero value +- ModeGovulncheck AnalysisMode = "govulncheck" +- ModeImports AnalysisMode = "imports" +-) +diff -urN a/gopls/internal/vulncheck/vulntest/db.go b/gopls/internal/vulncheck/vulntest/db.go +--- a/gopls/internal/vulncheck/vulntest/db.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/vulntest/db.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,234 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Package vulntest provides helpers for vulncheck functionality testing. +-package vulntest +- +-import ( +- "bytes" +- "context" +- "encoding/json" +- "fmt" +- "os" +- "path/filepath" +- "slices" +- "sort" +- "strings" +- "time" +- +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/vulncheck/osv" +- "golang.org/x/tools/txtar" +-) +- +-// NewDatabase returns a read-only DB containing the provided +-// txtar-format collection of vulnerability reports. +-// Each vulnerability report is a YAML file whose format +-// is defined in golang.org/x/vulndb/doc/format.md. +-// A report file name must have the id as its base name, +-// and have .yaml as its extension. +-// +-// db, err := NewDatabase(ctx, reports) +-// ... +-// defer db.Clean() +-// client, err := NewClient(db) +-// ... +-// +-// The returned DB's Clean method must be called to clean up the +-// generated database. +-func NewDatabase(ctx context.Context, txtarReports []byte) (*DB, error) { +- disk, err := os.MkdirTemp("", "vulndb-test") +- if err != nil { +- return nil, err +- } +- if err := generateDB(ctx, txtarReports, disk, false); err != nil { +- os.RemoveAll(disk) // ignore error +- return nil, err +- } +- +- return &DB{disk: disk}, nil +-} +- +-// DB is a read-only vulnerability database on disk. +-// Users can use this database with golang.org/x/vuln APIs +-// by setting the `VULNDB` environment variable. +-type DB struct { +- disk string +-} +- +-// URI returns the file URI that can be used for VULNDB environment +-// variable. +-func (db *DB) URI() string { +- u := protocol.URIFromPath(filepath.Join(db.disk, "ID")) +- return string(u) +-} +- +-// Clean deletes the database. +-func (db *DB) Clean() error { +- return os.RemoveAll(db.disk) +-} +- +-// +-// The following was selectively copied from golang.org/x/vulndb/internal/database +-// +- +-const ( +- dbURL = "https://pkg.go.dev/vuln/" +- +- // idDirectory is the name of the directory that contains entries +- // listed by their IDs. +- idDirectory = "ID" +- +- // cmdModule is the name of the module containing Go toolchain +- // binaries. +- cmdModule = "cmd" +- +- // stdModule is the name of the module containing Go std packages. +- stdModule = "std" +-) +- +-// generateDB generates the file-based vuln DB in the directory jsonDir. +-func generateDB(ctx context.Context, txtarData []byte, jsonDir string, indent bool) error { +- archive := txtar.Parse(txtarData) +- +- entries, err := generateEntries(ctx, archive) +- if err != nil { +- return err +- } +- return writeEntriesByID(filepath.Join(jsonDir, idDirectory), entries, indent) +-} +- +-func generateEntries(_ context.Context, archive *txtar.Archive) ([]osv.Entry, error) { +- now := time.Now() +- var entries []osv.Entry +- for _, f := range archive.Files { +- if !strings.HasSuffix(f.Name, ".yaml") { +- continue +- } +- r, err := readReport(bytes.NewReader(f.Data)) +- if err != nil { +- return nil, err +- } +- name := strings.TrimSuffix(filepath.Base(f.Name), filepath.Ext(f.Name)) +- linkName := fmt.Sprintf("%s%s", dbURL, name) +- entry := generateOSVEntry(name, linkName, now, *r) +- entries = append(entries, entry) +- } +- return entries, nil +-} +- +-func writeEntriesByID(idDir string, entries []osv.Entry, indent bool) error { +- // Write a directory containing entries by ID. +- if err := os.MkdirAll(idDir, 0755); err != nil { +- return fmt.Errorf("failed to create directory %q: %v", idDir, err) +- } +- for _, e := range entries { +- outPath := filepath.Join(idDir, e.ID+".json") +- if err := writeJSON(outPath, e, indent); err != nil { +- return err +- } +- } +- return nil +-} +- +-func writeJSON(filename string, value any, indent bool) (err error) { +- j, err := jsonMarshal(value, indent) +- if err != nil { +- return err +- } +- return os.WriteFile(filename, j, 0644) +-} +- +-func jsonMarshal(v any, indent bool) ([]byte, error) { +- if indent { +- return json.MarshalIndent(v, "", " ") +- } +- return json.Marshal(v) +-} +- +-// generateOSVEntry create an osv.Entry for a report. In addition to the report, it +-// takes the ID for the vuln and a URL that will point to the entry in the vuln DB. +-// It returns the osv.Entry and a list of module paths that the vuln affects. +-func generateOSVEntry(id, url string, lastModified time.Time, r Report) osv.Entry { +- entry := osv.Entry{ +- ID: id, +- Published: r.Published, +- Modified: lastModified, +- Withdrawn: r.Withdrawn, +- Summary: r.Summary, +- Details: r.Description, +- DatabaseSpecific: &osv.DatabaseSpecific{URL: url}, +- } +- +- moduleMap := make(map[string]bool) +- for _, m := range r.Modules { +- switch m.Module { +- case stdModule: +- moduleMap[osv.GoStdModulePath] = true +- case cmdModule: +- moduleMap[osv.GoCmdModulePath] = true +- default: +- moduleMap[m.Module] = true +- } +- entry.Affected = append(entry.Affected, toAffected(m)) +- } +- for _, ref := range r.References { +- entry.References = append(entry.References, osv.Reference{ +- Type: ref.Type, +- URL: ref.URL, +- }) +- } +- return entry +-} +- +-func AffectedRanges(versions []VersionRange) []osv.Range { +- a := osv.Range{Type: osv.RangeTypeSemver} +- if len(versions) == 0 || versions[0].Introduced == "" { +- a.Events = append(a.Events, osv.RangeEvent{Introduced: "0"}) +- } +- for _, v := range versions { +- if v.Introduced != "" { +- a.Events = append(a.Events, osv.RangeEvent{Introduced: v.Introduced.Canonical()}) +- } +- if v.Fixed != "" { +- a.Events = append(a.Events, osv.RangeEvent{Fixed: v.Fixed.Canonical()}) +- } +- } +- return []osv.Range{a} +-} +- +-func toOSVPackages(pkgs []*Package) (imps []osv.Package) { +- for _, p := range pkgs { +- syms := slices.Clone(p.Symbols) +- syms = append(syms, p.DerivedSymbols...) +- sort.Strings(syms) +- imps = append(imps, osv.Package{ +- Path: p.Package, +- GOOS: p.GOOS, +- GOARCH: p.GOARCH, +- Symbols: syms, +- }) +- } +- return imps +-} +- +-func toAffected(m *Module) osv.Affected { +- name := m.Module +- switch name { +- case stdModule: +- name = osv.GoStdModulePath +- case cmdModule: +- name = osv.GoCmdModulePath +- } +- return osv.Affected{ +- Module: osv.Module{ +- Path: name, +- Ecosystem: osv.GoEcosystem, +- }, +- Ranges: AffectedRanges(m.Versions), +- EcosystemSpecific: osv.EcosystemSpecific{ +- Packages: toOSVPackages(m.Packages), +- }, +- } +-} +diff -urN a/gopls/internal/vulncheck/vulntest/db_test.go b/gopls/internal/vulncheck/vulntest/db_test.go +--- a/gopls/internal/vulncheck/vulntest/db_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/vulntest/db_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,76 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package vulntest +- +-import ( +- "context" +- "encoding/json" +- "flag" +- "os" +- "path/filepath" +- "testing" +- "time" +- +- "github.com/google/go-cmp/cmp" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/gopls/internal/vulncheck/osv" +-) +- +-var update = flag.Bool("update", false, "update golden files in testdata/") +- +-func TestNewDatabase(t *testing.T) { +- ctx := context.Background() +- +- in, err := os.ReadFile("testdata/report.yaml") +- if err != nil { +- t.Fatal(err) +- } +- in = append([]byte("-- GO-2020-0001.yaml --\n"), in...) +- +- db, err := NewDatabase(ctx, in) +- if err != nil { +- t.Fatal(err) +- } +- defer db.Clean() +- dbpath := protocol.DocumentURI(db.URI()).Path() +- +- // The generated JSON file will be in DB/GO-2022-0001.json. +- got := readOSVEntry(t, filepath.Join(dbpath, "GO-2020-0001.json")) +- got.Modified = time.Time{} +- +- if *update { +- updateTestData(t, got, "testdata/GO-2020-0001.json") +- } +- +- want := readOSVEntry(t, "testdata/GO-2020-0001.json") +- want.Modified = time.Time{} +- if diff := cmp.Diff(want, got); diff != "" { +- t.Errorf("mismatch (-want +got):\n%s", diff) +- } +-} +- +-func updateTestData(t *testing.T, got *osv.Entry, fname string) { +- content, err := json.MarshalIndent(got, "", "\t") +- if err != nil { +- t.Fatal(err) +- } +- if err := os.WriteFile(fname, content, 0666); err != nil { +- t.Fatal(err) +- } +- t.Logf("updated %v", fname) +-} +- +-func readOSVEntry(t *testing.T, filename string) *osv.Entry { +- t.Helper() +- content, err := os.ReadFile(filename) +- if err != nil { +- t.Fatal(err) +- } +- var entry osv.Entry +- if err := json.Unmarshal(content, &entry); err != nil { +- t.Fatal(err) +- } +- return &entry +-} +diff -urN a/gopls/internal/vulncheck/vulntest/report.go b/gopls/internal/vulncheck/vulntest/report.go +--- a/gopls/internal/vulncheck/vulntest/report.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/vulntest/report.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,152 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package vulntest +- +-import ( +- "fmt" +- "io" +- "os" +- "strings" +- "time" +- +- "golang.org/x/mod/semver" +- "golang.org/x/tools/gopls/internal/vulncheck/osv" +- "gopkg.in/yaml.v3" +-) +- +-// +-// The following was selectively copied from golang.org/x/vulndb/internal/report +-// +- +-// readReport reads a Report in YAML format. +-func readReport(in io.Reader) (*Report, error) { +- d := yaml.NewDecoder(in) +- // Require that all fields in the file are in the struct. +- // This corresponds to v2's UnmarshalStrict. +- d.KnownFields(true) +- var r Report +- if err := d.Decode(&r); err != nil { +- return nil, fmt.Errorf("yaml.Decode: %v", err) +- } +- return &r, nil +-} +- +-// Report represents a vulnerability report in the vulndb. +-// See https://go.googlesource.com/vulndb/+/refs/heads/master/doc/format.md +-type Report struct { +- ID string `yaml:",omitempty"` +- +- Modules []*Module `yaml:",omitempty"` +- +- // Summary is a short phrase describing the vulnerability. +- Summary string `yaml:",omitempty"` +- +- // Description is the CVE description from an existing CVE. If we are +- // assigning a CVE ID ourselves, use CVEMetadata.Description instead. +- Description string `yaml:",omitempty"` +- Published time.Time `yaml:",omitempty"` +- Withdrawn *time.Time `yaml:",omitempty"` +- +- References []*Reference `yaml:",omitempty"` +-} +- +-// Write writes r to filename in YAML format. +-func (r *Report) Write(filename string) (err error) { +- f, err := os.Create(filename) +- if err != nil { +- return err +- } +- err = r.encode(f) +- err2 := f.Close() +- if err == nil { +- err = err2 +- } +- return err +-} +- +-// ToString encodes r to a YAML string. +-func (r *Report) ToString() (string, error) { +- var b strings.Builder +- if err := r.encode(&b); err != nil { +- return "", err +- } +- return b.String(), nil +-} +- +-func (r *Report) encode(w io.Writer) error { +- e := yaml.NewEncoder(w) +- defer e.Close() +- e.SetIndent(4) +- return e.Encode(r) +-} +- +-type VersionRange struct { +- Introduced Version `yaml:"introduced,omitempty"` +- Fixed Version `yaml:"fixed,omitempty"` +-} +- +-type Module struct { +- Module string `yaml:",omitempty"` +- Versions []VersionRange `yaml:",omitempty"` +- Packages []*Package `yaml:",omitempty"` +-} +- +-type Package struct { +- Package string `yaml:",omitempty"` +- GOOS []string `yaml:"goos,omitempty"` +- GOARCH []string `yaml:"goarch,omitempty"` +- // Symbols originally identified as vulnerable. +- Symbols []string `yaml:",omitempty"` +- // Additional vulnerable symbols, computed from Symbols via static analysis +- // or other technique. +- DerivedSymbols []string `yaml:"derived_symbols,omitempty"` +-} +- +-// Version is a SemVer 2.0.0 semantic version with no leading "v" prefix, +-// as used by OSV. +-type Version string +- +-// V returns the version with a "v" prefix. +-func (v Version) V() string { +- return "v" + string(v) +-} +- +-// IsValid reports whether v is a valid semantic version string. +-func (v Version) IsValid() bool { +- return semver.IsValid(v.V()) +-} +- +-// Before reports whether v < v2. +-func (v Version) Before(v2 Version) bool { +- return semver.Compare(v.V(), v2.V()) < 0 +-} +- +-// Canonical returns the canonical formatting of the version. +-func (v Version) Canonical() string { +- return strings.TrimPrefix(semver.Canonical(v.V()), "v") +-} +- +-// A Reference is a link to some external resource. +-// +-// For ease of typing, References are represented in the YAML as a +-// single-element mapping of type to URL. +-type Reference osv.Reference +- +-func (r *Reference) MarshalYAML() (any, error) { +- return map[string]string{ +- strings.ToLower(string(r.Type)): r.URL, +- }, nil +-} +- +-func (r *Reference) UnmarshalYAML(n *yaml.Node) (err error) { +- if n.Kind != yaml.MappingNode || len(n.Content) != 2 || n.Content[0].Kind != yaml.ScalarNode || n.Content[1].Kind != yaml.ScalarNode { +- return &yaml.TypeError{Errors: []string{ +- fmt.Sprintf("line %d: report.Reference must contain a mapping with one value", n.Line), +- }} +- } +- r.Type = osv.ReferenceType(strings.ToUpper(n.Content[0].Value)) +- r.URL = n.Content[1].Value +- return nil +-} +diff -urN a/gopls/internal/vulncheck/vulntest/report_test.go b/gopls/internal/vulncheck/vulntest/report_test.go +--- a/gopls/internal/vulncheck/vulntest/report_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/vulntest/report_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,48 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package vulntest +- +-import ( +- "bytes" +- "io" +- "os" +- "path/filepath" +- "testing" +- +- "github.com/google/go-cmp/cmp" +-) +- +-func readAll(t *testing.T, filename string) io.Reader { +- d, err := os.ReadFile(filename) +- if err != nil { +- t.Fatal(err) +- } +- return bytes.NewReader(d) +-} +- +-func TestRoundTrip(t *testing.T) { +- // A report shouldn't change after being read and then written. +- in := filepath.Join("testdata", "report.yaml") +- r, err := readReport(readAll(t, in)) +- if err != nil { +- t.Fatal(err) +- } +- out := filepath.Join(t.TempDir(), "report.yaml") +- if err := r.Write(out); err != nil { +- t.Fatal(err) +- } +- +- want, err := os.ReadFile(in) +- if err != nil { +- t.Fatal(err) +- } +- got, err := os.ReadFile(out) +- if err != nil { +- t.Fatal(err) +- } +- if diff := cmp.Diff(want, got); diff != "" { +- t.Errorf("mismatch (-want, +got):\n%s", diff) +- } +-} +diff -urN a/gopls/internal/vulncheck/vulntest/stdlib.go b/gopls/internal/vulncheck/vulntest/stdlib.go +--- a/gopls/internal/vulncheck/vulntest/stdlib.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/vulntest/stdlib.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,23 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package vulntest +- +-import ( +- "strings" +- +- "golang.org/x/mod/module" +-) +- +-// maybeStdlib reports whether the given import path could be part of the Go +-// standard library, by reporting whether the first component lacks a '.'. +-func maybeStdlib(path string) bool { +- if err := module.CheckImportPath(path); err != nil { +- return false +- } +- if i := strings.IndexByte(path, '/'); i != -1 { +- path = path[:i] +- } +- return !strings.Contains(path, ".") +-} +diff -urN a/gopls/internal/vulncheck/vulntest/stdlib_test.go b/gopls/internal/vulncheck/vulntest/stdlib_test.go +--- a/gopls/internal/vulncheck/vulntest/stdlib_test.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/vulntest/stdlib_test.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,24 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package vulntest +- +-import "testing" +- +-func TestMaybeStdlib(t *testing.T) { +- for _, test := range []struct { +- in string +- want bool +- }{ +- {"", false}, +- {"math/crypto", true}, +- {"github.com/pkg/errors", false}, +- {"Path is unknown", false}, +- } { +- got := maybeStdlib(test.in) +- if got != test.want { +- t.Errorf("%q: got %t, want %t", test.in, got, test.want) +- } +- } +-} +diff -urN a/gopls/internal/vulncheck/vulntest/testdata/GO-2020-0001.json b/gopls/internal/vulncheck/vulntest/testdata/GO-2020-0001.json +--- a/gopls/internal/vulncheck/vulntest/testdata/GO-2020-0001.json 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/vulntest/testdata/GO-2020-0001.json 1969-12-31 18:00:00.000000000 -0600 +@@ -1,50 +0,0 @@ +-{ +- "id": "GO-2020-0001", +- "modified": "0001-01-01T00:00:00Z", +- "published": "0001-01-01T00:00:00Z", +- "details": "The default Formatter for the Logger middleware (LoggerConfig.Formatter),\nwhich is included in the Default engine, allows attackers to inject arbitrary\nlog entries by manipulating the request path.\n", +- "affected": [ +- { +- "package": { +- "name": "github.com/gin-gonic/gin", +- "ecosystem": "Go" +- }, +- "ranges": [ +- { +- "type": "SEMVER", +- "events": [ +- { +- "introduced": "0" +- }, +- { +- "fixed": "1.6.0" +- } +- ] +- } +- ], +- "ecosystem_specific": { +- "imports": [ +- { +- "path": "github.com/gin-gonic/gin", +- "symbols": [ +- "defaultLogFormatter" +- ] +- } +- ] +- } +- } +- ], +- "references": [ +- { +- "type": "FIX", +- "url": "https://github.com/gin-gonic/gin/pull/1234" +- }, +- { +- "type": "FIX", +- "url": "https://github.com/gin-gonic/gin/commit/abcdefg" +- } +- ], +- "database_specific": { +- "url": "https://pkg.go.dev/vuln/GO-2020-0001" +- } +-} +\ No newline at end of file +diff -urN a/gopls/internal/vulncheck/vulntest/testdata/report.yaml b/gopls/internal/vulncheck/vulntest/testdata/report.yaml +--- a/gopls/internal/vulncheck/vulntest/testdata/report.yaml 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/vulncheck/vulntest/testdata/report.yaml 1969-12-31 18:00:00.000000000 -0600 +@@ -1,15 +0,0 @@ +-modules: +- - module: github.com/gin-gonic/gin +- versions: +- - fixed: 1.6.0 +- packages: +- - package: github.com/gin-gonic/gin +- symbols: +- - defaultLogFormatter +-description: | +- The default Formatter for the Logger middleware (LoggerConfig.Formatter), +- which is included in the Default engine, allows attackers to inject arbitrary +- log entries by manipulating the request path. +-references: +- - fix: https://github.com/gin-gonic/gin/pull/1234 +- - fix: https://github.com/gin-gonic/gin/commit/abcdefg +diff -urN a/gopls/internal/work/completion.go b/gopls/internal/work/completion.go +--- a/gopls/internal/work/completion.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/work/completion.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,161 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package work +- +-import ( +- "context" +- "errors" +- "fmt" +- "io/fs" +- "os" +- "path/filepath" +- "sort" +- "strings" +- +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.CompletionList, error) { +- ctx, done := event.Start(ctx, "work.Completion") +- defer done() +- +- // Get the position of the cursor. +- pw, err := snapshot.ParseWork(ctx, fh) +- if err != nil { +- return nil, fmt.Errorf("getting go.work file handle: %w", err) +- } +- cursor, err := pw.Mapper.PositionOffset(position) +- if err != nil { +- return nil, fmt.Errorf("computing cursor offset: %w", err) +- } +- +- // Find the use statement the user is in. +- use, pathStart, _ := usePath(pw, cursor) +- if use == nil { +- return &protocol.CompletionList{}, nil +- } +- completingFrom := use.Path[:cursor-pathStart] +- +- // We're going to find the completions of the user input +- // (completingFrom) by doing a walk on the innermost directory +- // of the given path, and comparing the found paths to make sure +- // that they match the component of the path after the +- // innermost directory. +- // +- // We'll maintain two paths when doing this: pathPrefixSlash +- // is essentially the path the user typed in, and pathPrefixAbs +- // is the path made absolute from the go.work directory. +- +- pathPrefixSlash := completingFrom +- pathPrefixAbs := filepath.FromSlash(pathPrefixSlash) +- if !filepath.IsAbs(pathPrefixAbs) { +- pathPrefixAbs = filepath.Join(pw.URI.DirPath(), pathPrefixAbs) +- } +- +- // pathPrefixDir is the directory that will be walked to find matches. +- // If pathPrefixSlash is not explicitly a directory boundary (is either equivalent to "." or +- // ends in a separator) we need to examine its parent directory to find sibling files that +- // match. +- depthBound := 5 +- pathPrefixDir, pathPrefixBase := pathPrefixAbs, "" +- pathPrefixSlashDir := pathPrefixSlash +- if filepath.Clean(pathPrefixSlash) != "." && !strings.HasSuffix(pathPrefixSlash, "/") { +- depthBound++ +- pathPrefixDir, pathPrefixBase = filepath.Split(pathPrefixAbs) +- pathPrefixSlashDir = dirNonClean(pathPrefixSlash) +- } +- +- var completions []string +- // Stop traversing deeper once we've hit 10k files to try to stay generally under 100ms. +- const numSeenBound = 10000 +- var numSeen int +- stopWalking := errors.New("hit numSeenBound") +- err = filepath.WalkDir(pathPrefixDir, func(wpath string, entry fs.DirEntry, err error) error { +- if err != nil { +- // golang/go#64225: an error reading a dir is expected, as the user may +- // be typing out a use directive for a directory that doesn't exist. +- return nil +- } +- if numSeen > numSeenBound { +- // Stop traversing if we hit bound. +- return stopWalking +- } +- numSeen++ +- +- // rel is the path relative to pathPrefixDir. +- // Make sure that it has pathPrefixBase as a prefix +- // otherwise it won't match the beginning of the +- // base component of the path the user typed in. +- rel := strings.TrimPrefix(wpath[len(pathPrefixDir):], string(filepath.Separator)) +- if entry.IsDir() && wpath != pathPrefixDir && !strings.HasPrefix(rel, pathPrefixBase) { +- return filepath.SkipDir +- } +- +- // Check for a match (a module directory). +- if filepath.Base(rel) == "go.mod" { +- relDir := strings.TrimSuffix(dirNonClean(rel), string(os.PathSeparator)) +- completionPath := join(pathPrefixSlashDir, filepath.ToSlash(relDir)) +- +- if !strings.HasPrefix(completionPath, completingFrom) { +- return nil +- } +- if strings.HasSuffix(completionPath, "/") { +- // Don't suggest paths that end in "/". This happens +- // when the input is a path that ends in "/" and +- // the completion is empty. +- return nil +- } +- completion := completionPath[len(completingFrom):] +- if completingFrom == "" && !strings.HasPrefix(completion, "./") { +- // Bias towards "./" prefixes. +- completion = join(".", completion) +- } +- +- completions = append(completions, completion) +- } +- +- if depth := strings.Count(rel, string(filepath.Separator)); depth >= depthBound { +- return filepath.SkipDir +- } +- return nil +- }) +- if err != nil && !errors.Is(err, stopWalking) { +- return nil, fmt.Errorf("walking to find completions: %w", err) +- } +- +- sort.Strings(completions) +- +- items := []protocol.CompletionItem{} // must be a slice +- for _, c := range completions { +- items = append(items, protocol.CompletionItem{ +- Label: c, +- InsertText: c, +- }) +- } +- return &protocol.CompletionList{Items: items}, nil +-} +- +-// dirNonClean is filepath.Dir, without the Clean at the end. +-func dirNonClean(path string) string { +- vol := filepath.VolumeName(path) +- i := len(path) - 1 +- for i >= len(vol) && !os.IsPathSeparator(path[i]) { +- i-- +- } +- return path[len(vol) : i+1] +-} +- +-func join(a, b string) string { +- if a == "" { +- return b +- } +- if b == "" { +- return a +- } +- return strings.TrimSuffix(a, "/") + "/" + b +-} +diff -urN a/gopls/internal/work/diagnostics.go b/gopls/internal/work/diagnostics.go +--- a/gopls/internal/work/diagnostics.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/work/diagnostics.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,92 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package work +- +-import ( +- "context" +- "fmt" +- "os" +- "path/filepath" +- +- "golang.org/x/mod/modfile" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-func Diagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { +- ctx, done := event.Start(ctx, "work.Diagnostics", snapshot.Labels()...) +- defer done() +- +- reports := map[protocol.DocumentURI][]*cache.Diagnostic{} +- uri := snapshot.View().GoWork() +- if uri == "" { +- return nil, nil +- } +- fh, err := snapshot.ReadFile(ctx, uri) +- if err != nil { +- return nil, err +- } +- reports[fh.URI()] = []*cache.Diagnostic{} +- diagnostics, err := diagnoseOne(ctx, snapshot, fh) +- if err != nil { +- return nil, err +- } +- for _, d := range diagnostics { +- fh, err := snapshot.ReadFile(ctx, d.URI) +- if err != nil { +- return nil, err +- } +- reports[fh.URI()] = append(reports[fh.URI()], d) +- } +- +- return reports, nil +-} +- +-func diagnoseOne(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]*cache.Diagnostic, error) { +- pw, err := snapshot.ParseWork(ctx, fh) +- if err != nil { +- if pw == nil || len(pw.ParseErrors) == 0 { +- return nil, err +- } +- return pw.ParseErrors, nil +- } +- +- // Add diagnostic if a directory does not contain a module. +- var diagnostics []*cache.Diagnostic +- for _, use := range pw.File.Use { +- rng, err := pw.Mapper.OffsetRange(use.Syntax.Start.Byte, use.Syntax.End.Byte) +- if err != nil { +- return nil, err +- } +- +- modfh, err := snapshot.ReadFile(ctx, modFileURI(pw, use)) +- if err != nil { +- return nil, err +- } +- if _, err := modfh.Content(); err != nil && os.IsNotExist(err) { +- diagnostics = append(diagnostics, &cache.Diagnostic{ +- URI: fh.URI(), +- Range: rng, +- Severity: protocol.SeverityError, +- Source: cache.WorkFileError, +- Message: fmt.Sprintf("directory %v does not contain a module", use.Path), +- }) +- } +- } +- return diagnostics, nil +-} +- +-func modFileURI(pw *cache.ParsedWorkFile, use *modfile.Use) protocol.DocumentURI { +- workdir := pw.URI.DirPath() +- +- modroot := filepath.FromSlash(use.Path) +- if !filepath.IsAbs(modroot) { +- modroot = filepath.Join(workdir, modroot) +- } +- +- return protocol.URIFromPath(filepath.Join(modroot, "go.mod")) +-} +diff -urN a/gopls/internal/work/format.go b/gopls/internal/work/format.go +--- a/gopls/internal/work/format.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/work/format.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,30 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package work +- +-import ( +- "context" +- +- "golang.org/x/mod/modfile" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/diff" +- "golang.org/x/tools/internal/event" +-) +- +-func Format(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.TextEdit, error) { +- ctx, done := event.Start(ctx, "work.Format") +- defer done() +- +- pw, err := snapshot.ParseWork(ctx, fh) +- if err != nil { +- return nil, err +- } +- formatted := modfile.Format(pw.File.Syntax) +- // Calculate the edits to be made due to the change. +- diffs := diff.Bytes(pw.Mapper.Content, formatted) +- return protocol.EditsFromDiffEdits(pw.Mapper, diffs) +-} +diff -urN a/gopls/internal/work/hover.go b/gopls/internal/work/hover.go +--- a/gopls/internal/work/hover.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/internal/work/hover.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,93 +0,0 @@ +-// Copyright 2022 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-package work +- +-import ( +- "bytes" +- "context" +- "fmt" +- +- "golang.org/x/mod/modfile" +- "golang.org/x/tools/gopls/internal/cache" +- "golang.org/x/tools/gopls/internal/file" +- "golang.org/x/tools/gopls/internal/protocol" +- "golang.org/x/tools/internal/event" +-) +- +-func Hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.Hover, error) { +- // We only provide hover information for the view's go.work file. +- if fh.URI() != snapshot.View().GoWork() { +- return nil, nil +- } +- +- ctx, done := event.Start(ctx, "work.Hover") +- defer done() +- +- // Get the position of the cursor. +- pw, err := snapshot.ParseWork(ctx, fh) +- if err != nil { +- return nil, fmt.Errorf("getting go.work file handle: %w", err) +- } +- offset, err := pw.Mapper.PositionOffset(position) +- if err != nil { +- return nil, fmt.Errorf("computing cursor offset: %w", err) +- } +- +- // Confirm that the cursor is inside a use statement, and then find +- // the position of the use statement's directory path. +- use, pathStart, pathEnd := usePath(pw, offset) +- +- // The cursor position is not on a use statement. +- if use == nil { +- return nil, nil +- } +- +- // Get the mod file denoted by the use. +- modfh, err := snapshot.ReadFile(ctx, modFileURI(pw, use)) +- if err != nil { +- return nil, fmt.Errorf("getting modfile handle: %w", err) +- } +- pm, err := snapshot.ParseMod(ctx, modfh) +- if err != nil { +- return nil, fmt.Errorf("getting modfile handle: %w", err) +- } +- if pm.File.Module == nil { +- return nil, fmt.Errorf("modfile has no module declaration") +- } +- mod := pm.File.Module.Mod +- +- // Get the range to highlight for the hover. +- rng, err := pw.Mapper.OffsetRange(pathStart, pathEnd) +- if err != nil { +- return nil, err +- } +- options := snapshot.Options() +- return &protocol.Hover{ +- Contents: protocol.MarkupContent{ +- Kind: options.PreferredContentFormat, +- Value: mod.Path, +- }, +- Range: rng, +- }, nil +-} +- +-func usePath(pw *cache.ParsedWorkFile, offset int) (use *modfile.Use, pathStart, pathEnd int) { +- for _, u := range pw.File.Use { +- path := []byte(u.Path) +- s, e := u.Syntax.Start.Byte, u.Syntax.End.Byte +- i := bytes.Index(pw.Mapper.Content[s:e], path) +- if i == -1 { +- // This should not happen. +- continue +- } +- // Shift the start position to the location of the +- // module directory within the use statement. +- pathStart, pathEnd = s+i, s+i+len(path) +- if pathStart <= offset && offset <= pathEnd { +- return u, pathStart, pathEnd +- } +- } +- return nil, 0, 0 +-} +diff -urN a/gopls/main.go b/gopls/main.go +--- a/gopls/main.go 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/main.go 1969-12-31 18:00:00.000000000 -0600 +@@ -1,56 +0,0 @@ +-// Copyright 2019 The Go Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style +-// license that can be found in the LICENSE file. +- +-// Gopls (pronounced “go please”) is an LSP server for Go. +-// The Language Server Protocol allows any text editor +-// to be extended with IDE-like features; +-// see https://langserver.org/ for details. +-// +-// See https://go.dev/gopls for comprehensive documentation on Gopls. +-package main +- +-import ( +- "context" +- "log" +- "os" +- +- "golang.org/x/telemetry" +- "golang.org/x/telemetry/counter" +- "golang.org/x/tools/gopls/internal/cmd" +- "golang.org/x/tools/gopls/internal/filecache" +- versionpkg "golang.org/x/tools/gopls/internal/version" +- "golang.org/x/tools/internal/tool" +-) +- +-var version = "" // if set by the linker, overrides the gopls version +- +-func main() { +- versionpkg.VersionOverride = version +- +- telemetry.Start(telemetry.Config{ +- ReportCrashes: true, +- Upload: true, +- }) +- +- // Force early creation of the filecache and refuse to start +- // if there were unexpected errors such as ENOSPC. This +- // minimizes the window of exposure to deletion of the +- // executable, and ensures that all subsequent calls to +- // filecache.Get cannot fail for these two reasons; +- // see issue #67433. +- // +- // This leaves only one likely cause for later failures: +- // deletion of the cache while gopls is running. If the +- // problem continues, we could periodically stat the cache +- // directory (for example at the start of every RPC) and +- // either re-create it or just fail the RPC with an +- // informative error and terminate the process. +- if _, err := filecache.Get("nonesuch", [32]byte{}); err != nil && err != filecache.ErrNotFound { +- counter.Inc("gopls/nocache") +- log.Fatalf("gopls cannot access its persistent index (disk full?): %v", err) +- } +- +- ctx := context.Background() +- tool.Main(ctx, cmd.New(), os.Args[1:]) +-} +diff -urN a/gopls/README.md b/gopls/README.md +--- a/gopls/README.md 2000-01-01 00:00:00.000000000 -0000 ++++ b/gopls/README.md 1969-12-31 18:00:00.000000000 -0600 +@@ -1,13 +0,0 @@ +-# `gopls`, the language server for Go +- +-[![PkgGoDev](https://pkg.go.dev/badge/golang.org/x/tools/gopls)](https://pkg.go.dev/golang.org/x/tools/gopls) +- +-`gopls` (pronounced "Go please") is the official [language +-server](https://langserver.org) for Go, developed and maintained by +-the Go team. +-It provides a wide variety of [IDE features](doc/features/) +-to any [LSP](https://microsoft.github.io/language-server-protocol/)-compatible +-editor. +- +-- Documentation for [users](https://go.dev/gopls) +-- Documentation for [contributors](doc/contributing.md) diff --git a/third_party/org_golang_x_tools-gazelle.patch b/third_party/org_golang_x_tools-gazelle.patch new file mode 100644 index 0000000000..0aab2cda96 --- /dev/null +++ b/third_party/org_golang_x_tools-gazelle.patch @@ -0,0 +1,11931 @@ +diff -urN b/benchmark/parse/BUILD.bazel c/benchmark/parse/BUILD.bazel +--- b/benchmark/parse/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/benchmark/parse/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "parse", ++ srcs = ["parse.go"], ++ importpath = "golang.org/x/tools/benchmark/parse", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":parse", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "parse_test", ++ srcs = ["parse_test.go"], ++ embed = [":parse"], ++) +diff -urN b/blog/atom/BUILD.bazel c/blog/atom/BUILD.bazel +--- b/blog/atom/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/blog/atom/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "atom", ++ srcs = ["atom.go"], ++ importpath = "golang.org/x/tools/blog/atom", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":atom", ++ visibility = ["//visibility:public"], ++) +diff -urN b/blog/BUILD.bazel c/blog/BUILD.bazel +--- b/blog/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/blog/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "blog", ++ srcs = ["blog.go"], ++ importpath = "golang.org/x/tools/blog", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//blog/atom", ++ "//present", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":blog", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "blog_test", ++ srcs = ["blog_test.go"], ++ embed = [":blog"], ++) +diff -urN b/cmd/auth/authtest/BUILD.bazel c/cmd/auth/authtest/BUILD.bazel +--- b/cmd/auth/authtest/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/auth/authtest/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "authtest_lib", ++ srcs = ["authtest.go"], ++ importpath = "golang.org/x/tools/cmd/auth/authtest", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "authtest", ++ embed = [":authtest_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/auth/cookieauth/BUILD.bazel c/cmd/auth/cookieauth/BUILD.bazel +--- b/cmd/auth/cookieauth/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/auth/cookieauth/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "cookieauth_lib", ++ srcs = ["cookieauth.go"], ++ importpath = "golang.org/x/tools/cmd/auth/cookieauth", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "cookieauth", ++ embed = [":cookieauth_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/auth/gitauth/BUILD.bazel c/cmd/auth/gitauth/BUILD.bazel +--- b/cmd/auth/gitauth/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/auth/gitauth/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "gitauth_lib", ++ srcs = ["gitauth.go"], ++ importpath = "golang.org/x/tools/cmd/auth/gitauth", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "gitauth", ++ embed = [":gitauth_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/auth/netrcauth/BUILD.bazel c/cmd/auth/netrcauth/BUILD.bazel +--- b/cmd/auth/netrcauth/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/auth/netrcauth/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "netrcauth_lib", ++ srcs = ["netrcauth.go"], ++ importpath = "golang.org/x/tools/cmd/auth/netrcauth", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "netrcauth", ++ embed = [":netrcauth_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/benchcmp/BUILD.bazel c/cmd/benchcmp/BUILD.bazel +--- b/cmd/benchcmp/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/benchcmp/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,29 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "benchcmp_lib", ++ srcs = [ ++ "benchcmp.go", ++ "compare.go", ++ "doc.go", ++ ], ++ importpath = "golang.org/x/tools/cmd/benchcmp", ++ visibility = ["//visibility:private"], ++ deps = ["//benchmark/parse"], ++) ++ ++go_binary( ++ name = "benchcmp", ++ embed = [":benchcmp_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "benchcmp_test", ++ srcs = [ ++ "benchcmp_test.go", ++ "compare_test.go", ++ ], ++ embed = [":benchcmp_lib"], ++ deps = ["//benchmark/parse"], ++) +diff -urN b/cmd/bisect/BUILD.bazel c/cmd/bisect/BUILD.bazel +--- b/cmd/bisect/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/bisect/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,30 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "bisect_lib", ++ srcs = [ ++ "go120.go", ++ "main.go", ++ ], ++ importpath = "golang.org/x/tools/cmd/bisect", ++ visibility = ["//visibility:private"], ++ deps = ["//internal/bisect"], ++) ++ ++go_binary( ++ name = "bisect", ++ embed = [":bisect_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "bisect_test", ++ srcs = ["main_test.go"], ++ data = glob(["testdata/**"]), ++ embed = [":bisect_lib"], ++ deps = [ ++ "//internal/bisect", ++ "//internal/diffp", ++ "//txtar", ++ ], ++) +diff -urN b/cmd/bundle/BUILD.bazel c/cmd/bundle/BUILD.bazel +--- b/cmd/bundle/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/bundle/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,22 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "bundle_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/cmd/bundle", ++ visibility = ["//visibility:private"], ++ deps = ["//go/packages"], ++) ++ ++go_binary( ++ name = "bundle", ++ embed = [":bundle_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "bundle_test", ++ srcs = ["main_test.go"], ++ embed = [":bundle_lib"], ++ deps = ["//internal/packagestest"], ++) +diff -urN b/cmd/bundle/testdata/src/domain.name/importdecl/BUILD.bazel c/cmd/bundle/testdata/src/domain.name/importdecl/BUILD.bazel +--- b/cmd/bundle/testdata/src/domain.name/importdecl/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/bundle/testdata/src/domain.name/importdecl/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "importdecl", ++ srcs = ["p.go"], ++ importpath = "golang.org/x/tools/cmd/bundle/testdata/src/domain.name/importdecl", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":importdecl", ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/bundle/testdata/src/initial/BUILD.bazel c/cmd/bundle/testdata/src/initial/BUILD.bazel +--- b/cmd/bundle/testdata/src/initial/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/bundle/testdata/src/initial/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "initial", ++ srcs = [ ++ "a.go", ++ "b.go", ++ "c.go", ++ ], ++ importpath = "golang.org/x/tools/cmd/bundle/testdata/src/initial", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":initial", ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/callgraph/BUILD.bazel c/cmd/callgraph/BUILD.bazel +--- b/cmd/callgraph/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/callgraph/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,72 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "callgraph_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/cmd/callgraph", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/callgraph", ++ "//go/callgraph/cha", ++ "//go/callgraph/rta", ++ "//go/callgraph/static", ++ "//go/callgraph/vta", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ ], ++) ++ ++go_binary( ++ name = "callgraph", ++ embed = [":callgraph_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "callgraph_test", ++ srcs = ["main_test.go"], ++ embed = [":callgraph_lib"], ++ deps = select({ ++ "@io_bazel_rules_go//go/platform:aix": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:darwin": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:dragonfly": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:freebsd": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:illumos": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:ios": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:js": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:linux": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:netbsd": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:openbsd": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:plan9": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:solaris": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:windows": [ ++ "//internal/testenv", ++ ], ++ "//conditions:default": [], ++ }), ++) +diff -urN b/cmd/callgraph/testdata/src/pkg/BUILD.bazel c/cmd/callgraph/testdata/src/pkg/BUILD.bazel +--- b/cmd/callgraph/testdata/src/pkg/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/callgraph/testdata/src/pkg/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "pkg_lib", ++ srcs = ["pkg.go"], ++ importpath = "golang.org/x/tools/cmd/callgraph/testdata/src/pkg", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "pkg", ++ embed = [":pkg_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "pkg_test", ++ srcs = ["pkg_test.go"], ++ embed = [":pkg_lib"], ++) +diff -urN b/cmd/compilebench/BUILD.bazel c/cmd/compilebench/BUILD.bazel +--- b/cmd/compilebench/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/compilebench/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "compilebench_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/cmd/compilebench", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "compilebench", ++ embed = [":compilebench_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/deadcode/BUILD.bazel c/cmd/deadcode/BUILD.bazel +--- b/cmd/deadcode/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/deadcode/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,37 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "deadcode_lib", ++ srcs = [ ++ "deadcode.go", ++ "doc.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/cmd/deadcode", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/callgraph", ++ "//go/callgraph/rta", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/typesinternal", ++ "@org_golang_x_telemetry//:go_default_library", ++ ], ++) ++ ++go_binary( ++ name = "deadcode", ++ embed = [":deadcode_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "deadcode_test", ++ srcs = ["deadcode_test.go"], ++ data = glob(["testdata/**"]), ++ deps = [ ++ "//internal/testenv", ++ "//txtar", ++ ], ++) +diff -urN b/cmd/digraph/BUILD.bazel c/cmd/digraph/BUILD.bazel +--- b/cmd/digraph/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/digraph/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "digraph_lib", ++ srcs = [ ++ "digraph.go", ++ "doc.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/cmd/digraph", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "digraph", ++ embed = [":digraph_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "digraph_test", ++ srcs = ["digraph_test.go"], ++ embed = [":digraph_lib"], ++) +diff -urN b/cmd/eg/BUILD.bazel c/cmd/eg/BUILD.bazel +--- b/cmd/eg/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/eg/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "eg_lib", ++ srcs = ["eg.go"], ++ importpath = "golang.org/x/tools/cmd/eg", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/packages", ++ "//refactor/eg", ++ ], ++) ++ ++go_binary( ++ name = "eg", ++ embed = [":eg_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/file2fuzz/BUILD.bazel c/cmd/file2fuzz/BUILD.bazel +--- b/cmd/file2fuzz/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/file2fuzz/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,21 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "file2fuzz_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/cmd/file2fuzz", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "file2fuzz", ++ embed = [":file2fuzz_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "file2fuzz_test", ++ srcs = ["main_test.go"], ++ embed = [":file2fuzz_lib"], ++ deps = ["//internal/testenv"], ++) +diff -urN b/cmd/fiximports/BUILD.bazel c/cmd/fiximports/BUILD.bazel +--- b/cmd/fiximports/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/fiximports/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,62 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "fiximports_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/cmd/fiximports", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "fiximports", ++ embed = [":fiximports_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "fiximports_test", ++ srcs = ["main_test.go"], ++ embed = [":fiximports_lib"], ++ deps = select({ ++ "@io_bazel_rules_go//go/platform:aix": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:darwin": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:dragonfly": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:freebsd": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:illumos": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:ios": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:js": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:linux": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:netbsd": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:openbsd": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:plan9": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:solaris": [ ++ "//internal/testenv", ++ ], ++ "@io_bazel_rules_go//go/platform:windows": [ ++ "//internal/testenv", ++ ], ++ "//conditions:default": [], ++ }), ++) +diff -urN b/cmd/fiximports/testdata/src/fruit.io/banana/BUILD.bazel c/cmd/fiximports/testdata/src/fruit.io/banana/BUILD.bazel +--- b/cmd/fiximports/testdata/src/fruit.io/banana/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/fiximports/testdata/src/fruit.io/banana/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "banana", ++ srcs = ["banana.go"], ++ importpath = "golang.org/x/tools/cmd/fiximports/testdata/src/fruit.io/banana", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":banana", ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/fiximports/testdata/src/fruit.io/orange/BUILD.bazel c/cmd/fiximports/testdata/src/fruit.io/orange/BUILD.bazel +--- b/cmd/fiximports/testdata/src/fruit.io/orange/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/fiximports/testdata/src/fruit.io/orange/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "orange", ++ srcs = ["orange.go"], ++ importpath = "golang.org/x/tools/cmd/fiximports/testdata/src/fruit.io/orange", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":orange", ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/fiximports/testdata/src/fruit.io/pear/BUILD.bazel c/cmd/fiximports/testdata/src/fruit.io/pear/BUILD.bazel +--- b/cmd/fiximports/testdata/src/fruit.io/pear/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/fiximports/testdata/src/fruit.io/pear/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "pear", ++ srcs = ["pear.go"], ++ importpath = "golang.org/x/tools/cmd/fiximports/testdata/src/fruit.io/pear", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":pear", ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/fiximports/testdata/src/new.com/one/BUILD.bazel c/cmd/fiximports/testdata/src/new.com/one/BUILD.bazel +--- b/cmd/fiximports/testdata/src/new.com/one/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/fiximports/testdata/src/new.com/one/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "one", ++ srcs = ["one.go"], ++ importpath = "golang.org/x/tools/cmd/fiximports/testdata/src/new.com/one", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":one", ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/fiximports/testdata/src/old.com/bad/BUILD.bazel c/cmd/fiximports/testdata/src/old.com/bad/BUILD.bazel +--- b/cmd/fiximports/testdata/src/old.com/bad/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/fiximports/testdata/src/old.com/bad/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "bad", ++ srcs = ["bad.go"], ++ importpath = "golang.org/x/tools/cmd/fiximports/testdata/src/old.com/bad", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":bad", ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/fiximports/testdata/src/old.com/one/BUILD.bazel c/cmd/fiximports/testdata/src/old.com/one/BUILD.bazel +--- b/cmd/fiximports/testdata/src/old.com/one/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/fiximports/testdata/src/old.com/one/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "one", ++ srcs = ["one.go"], ++ importpath = "golang.org/x/tools/cmd/fiximports/testdata/src/old.com/one", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":one", ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/fiximports/testdata/src/titanic.biz/bar/BUILD.bazel c/cmd/fiximports/testdata/src/titanic.biz/bar/BUILD.bazel +--- b/cmd/fiximports/testdata/src/titanic.biz/bar/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/fiximports/testdata/src/titanic.biz/bar/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "bar", ++ srcs = ["bar.go"], ++ importpath = "golang.org/x/tools/cmd/fiximports/testdata/src/titanic.biz/bar", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":bar", ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/fiximports/testdata/src/titanic.biz/foo/BUILD.bazel c/cmd/fiximports/testdata/src/titanic.biz/foo/BUILD.bazel +--- b/cmd/fiximports/testdata/src/titanic.biz/foo/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/fiximports/testdata/src/titanic.biz/foo/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "foo", ++ srcs = ["foo.go"], ++ importpath = "golang.org/x/tools/cmd/fiximports/testdata/src/titanic.biz/foo", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":foo", ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/go-contrib-init/BUILD.bazel c/cmd/go-contrib-init/BUILD.bazel +--- b/cmd/go-contrib-init/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/go-contrib-init/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "go-contrib-init_lib", ++ srcs = ["contrib.go"], ++ importpath = "golang.org/x/tools/cmd/go-contrib-init", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "go-contrib-init", ++ embed = [":go-contrib-init_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "go-contrib-init_test", ++ srcs = ["contrib_test.go"], ++ embed = [":go-contrib-init_lib"], ++) +diff -urN b/cmd/godex/BUILD.bazel c/cmd/godex/BUILD.bazel +--- b/cmd/godex/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/godex/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "godex_lib", ++ srcs = [ ++ "doc.go", ++ "gc.go", ++ "gccgo.go", ++ "godex.go", ++ "isAlias18.go", ++ "isAlias19.go", ++ "print.go", ++ "source.go", ++ "writetype.go", ++ ], ++ importpath = "golang.org/x/tools/cmd/godex", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "godex", ++ embed = [":godex_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/goimports/BUILD.bazel c/cmd/goimports/BUILD.bazel +--- b/cmd/goimports/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/goimports/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,23 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "goimports_lib", ++ srcs = [ ++ "doc.go", ++ "goimports.go", ++ "goimports_gc.go", ++ ], ++ importpath = "golang.org/x/tools/cmd/goimports", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//internal/gocommand", ++ "//internal/imports", ++ "@org_golang_x_telemetry//counter:go_default_library", ++ ], ++) ++ ++go_binary( ++ name = "goimports", ++ embed = [":goimports_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/gomvpkg/BUILD.bazel c/cmd/gomvpkg/BUILD.bazel +--- b/cmd/gomvpkg/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/gomvpkg/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "gomvpkg_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/cmd/gomvpkg", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/buildutil", ++ "//refactor/rename", ++ ], ++) ++ ++go_binary( ++ name = "gomvpkg", ++ embed = [":gomvpkg_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/gonew/BUILD.bazel c/cmd/gonew/BUILD.bazel +--- b/cmd/gonew/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/gonew/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,31 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "gonew_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/cmd/gonew", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//internal/edit", ++ "@org_golang_x_mod//modfile:go_default_library", ++ "@org_golang_x_mod//module:go_default_library", ++ ], ++) ++ ++go_binary( ++ name = "gonew", ++ embed = [":gonew_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "gonew_test", ++ srcs = ["main_test.go"], ++ data = glob(["testdata/**"]), ++ embed = [":gonew_lib"], ++ deps = [ ++ "//internal/diffp", ++ "//internal/testenv", ++ "//txtar", ++ ], ++) +diff -urN b/cmd/gotype/BUILD.bazel c/cmd/gotype/BUILD.bazel +--- b/cmd/gotype/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/gotype/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "gotype_lib", ++ srcs = [ ++ "gotype.go", ++ "sizesFor18.go", ++ "sizesFor19.go", ++ ], ++ importpath = "golang.org/x/tools/cmd/gotype", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "gotype", ++ embed = [":gotype_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/goyacc/BUILD.bazel c/cmd/goyacc/BUILD.bazel +--- b/cmd/goyacc/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/goyacc/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "goyacc_lib", ++ srcs = [ ++ "doc.go", ++ "yacc.go", ++ ], ++ importpath = "golang.org/x/tools/cmd/goyacc", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "goyacc", ++ embed = [":goyacc_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/goyacc/testdata/expr/BUILD.bazel c/cmd/goyacc/testdata/expr/BUILD.bazel +--- b/cmd/goyacc/testdata/expr/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/goyacc/testdata/expr/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "expr_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/cmd/goyacc/testdata/expr", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":expr_lib", ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/html2article/BUILD.bazel c/cmd/html2article/BUILD.bazel +--- b/cmd/html2article/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/html2article/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "html2article_lib", ++ srcs = ["conv.go"], ++ importpath = "golang.org/x/tools/cmd/html2article", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "@org_golang_x_net//html:go_default_library", ++ "@org_golang_x_net//html/atom:go_default_library", ++ ], ++) ++ ++go_binary( ++ name = "html2article", ++ embed = [":html2article_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/present/BUILD.bazel c/cmd/present/BUILD.bazel +--- b/cmd/present/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/present/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,42 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "present_lib", ++ srcs = [ ++ "dir.go", ++ "doc.go", ++ "main.go", ++ "play.go", ++ ], ++ embedsrcs = [ ++ "static/article.css", ++ "static/dir.css", ++ "static/dir.js", ++ "static/favicon.ico", ++ "static/jquery-ui.js", ++ "static/jquery.js", ++ "static/notes.css", ++ "static/notes.js", ++ "static/play.js", ++ "static/playground.js", ++ "static/slides.js", ++ "static/styles.css", ++ "templates/action.tmpl", ++ "templates/article.tmpl", ++ "templates/dir.tmpl", ++ "templates/slides.tmpl", ++ ], ++ importpath = "golang.org/x/tools/cmd/present", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//playground", ++ "//playground/socket", ++ "//present", ++ ], ++) ++ ++go_binary( ++ name = "present", ++ embed = [":present_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/present2md/BUILD.bazel c/cmd/present2md/BUILD.bazel +--- b/cmd/present2md/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/present2md/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "present2md_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/cmd/present2md", ++ visibility = ["//visibility:private"], ++ deps = ["//present"], ++) ++ ++go_binary( ++ name = "present2md", ++ embed = [":present2md_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/signature-fuzzer/fuzz-driver/BUILD.bazel c/cmd/signature-fuzzer/fuzz-driver/BUILD.bazel +--- b/cmd/signature-fuzzer/fuzz-driver/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/signature-fuzzer/fuzz-driver/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,22 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "fuzz-driver_lib", ++ srcs = ["driver.go"], ++ importpath = "golang.org/x/tools/cmd/signature-fuzzer/fuzz-driver", ++ visibility = ["//visibility:private"], ++ deps = ["//cmd/signature-fuzzer/internal/fuzz-generator"], ++) ++ ++go_binary( ++ name = "fuzz-driver", ++ embed = [":fuzz-driver_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "fuzz-driver_test", ++ srcs = ["drv_test.go"], ++ embed = [":fuzz-driver_lib"], ++ deps = ["//internal/testenv"], ++) +diff -urN b/cmd/signature-fuzzer/fuzz-runner/BUILD.bazel c/cmd/signature-fuzzer/fuzz-runner/BUILD.bazel +--- b/cmd/signature-fuzzer/fuzz-runner/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/signature-fuzzer/fuzz-runner/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,22 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "fuzz-runner_lib", ++ srcs = ["runner.go"], ++ importpath = "golang.org/x/tools/cmd/signature-fuzzer/fuzz-runner", ++ visibility = ["//visibility:private"], ++ deps = ["//cmd/signature-fuzzer/internal/fuzz-generator"], ++) ++ ++go_binary( ++ name = "fuzz-runner", ++ embed = [":fuzz-runner_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "fuzz-runner_test", ++ srcs = ["rnr_test.go"], ++ embed = [":fuzz-runner_lib"], ++ deps = ["//internal/testenv"], ++) +diff -urN b/cmd/signature-fuzzer/fuzz-runner/testdata/BUILD.bazel c/cmd/signature-fuzzer/fuzz-runner/testdata/BUILD.bazel +--- b/cmd/signature-fuzzer/fuzz-runner/testdata/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/signature-fuzzer/fuzz-runner/testdata/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "testdata_lib", ++ srcs = ["himom.go"], ++ importpath = "golang.org/x/tools/cmd/signature-fuzzer/fuzz-runner/testdata", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "testdata", ++ embed = [":testdata_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/signature-fuzzer/internal/fuzz-generator/BUILD.bazel c/cmd/signature-fuzzer/internal/fuzz-generator/BUILD.bazel +--- b/cmd/signature-fuzzer/internal/fuzz-generator/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/signature-fuzzer/internal/fuzz-generator/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,32 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "fuzz-generator", ++ srcs = [ ++ "arrayparm.go", ++ "generator.go", ++ "mapparm.go", ++ "numparm.go", ++ "parm.go", ++ "pointerparm.go", ++ "stringparm.go", ++ "structparm.go", ++ "typedefparm.go", ++ "wraprand.go", ++ ], ++ importpath = "golang.org/x/tools/cmd/signature-fuzzer/internal/fuzz-generator", ++ visibility = ["//cmd/signature-fuzzer:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":fuzz-generator", ++ visibility = ["//cmd/signature-fuzzer:__subpackages__"], ++) ++ ++go_test( ++ name = "fuzz-generator_test", ++ srcs = ["gen_test.go"], ++ embed = [":fuzz-generator"], ++ deps = ["//internal/testenv"], ++) +diff -urN b/cmd/splitdwarf/BUILD.bazel c/cmd/splitdwarf/BUILD.bazel +--- b/cmd/splitdwarf/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/splitdwarf/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,44 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "splitdwarf_lib", ++ srcs = ["splitdwarf.go"], ++ importpath = "golang.org/x/tools/cmd/splitdwarf", ++ visibility = ["//visibility:private"], ++ deps = select({ ++ "@io_bazel_rules_go//go/platform:aix": [ ++ "//cmd/splitdwarf/internal/macho", ++ ], ++ "@io_bazel_rules_go//go/platform:android": [ ++ "//cmd/splitdwarf/internal/macho", ++ ], ++ "@io_bazel_rules_go//go/platform:darwin": [ ++ "//cmd/splitdwarf/internal/macho", ++ ], ++ "@io_bazel_rules_go//go/platform:dragonfly": [ ++ "//cmd/splitdwarf/internal/macho", ++ ], ++ "@io_bazel_rules_go//go/platform:freebsd": [ ++ "//cmd/splitdwarf/internal/macho", ++ ], ++ "@io_bazel_rules_go//go/platform:ios": [ ++ "//cmd/splitdwarf/internal/macho", ++ ], ++ "@io_bazel_rules_go//go/platform:linux": [ ++ "//cmd/splitdwarf/internal/macho", ++ ], ++ "@io_bazel_rules_go//go/platform:netbsd": [ ++ "//cmd/splitdwarf/internal/macho", ++ ], ++ "@io_bazel_rules_go//go/platform:openbsd": [ ++ "//cmd/splitdwarf/internal/macho", ++ ], ++ "//conditions:default": [], ++ }), ++) ++ ++go_binary( ++ name = "splitdwarf", ++ embed = [":splitdwarf_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/splitdwarf/internal/macho/BUILD.bazel c/cmd/splitdwarf/internal/macho/BUILD.bazel +--- b/cmd/splitdwarf/internal/macho/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/splitdwarf/internal/macho/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,27 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "macho", ++ srcs = [ ++ "fat.go", ++ "file.go", ++ "macho.go", ++ "reloctype.go", ++ "reloctype_string.go", ++ ], ++ importpath = "golang.org/x/tools/cmd/splitdwarf/internal/macho", ++ visibility = ["//cmd/splitdwarf:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":macho", ++ visibility = ["//cmd/splitdwarf:__subpackages__"], ++) ++ ++go_test( ++ name = "macho_test", ++ srcs = ["file_test.go"], ++ data = glob(["testdata/**"]), ++ embed = [":macho"], ++) +diff -urN b/cmd/ssadump/BUILD.bazel c/cmd/ssadump/BUILD.bazel +--- b/cmd/ssadump/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/ssadump/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "ssadump_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/cmd/ssadump", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/interp", ++ "//go/ssa/ssautil", ++ ], ++) ++ ++go_binary( ++ name = "ssadump", ++ embed = [":ssadump_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/stress/BUILD.bazel c/cmd/stress/BUILD.bazel +--- b/cmd/stress/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/stress/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "stress_lib", ++ srcs = ["stress.go"], ++ importpath = "golang.org/x/tools/cmd/stress", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "stress", ++ embed = [":stress_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/stringer/BUILD.bazel c/cmd/stringer/BUILD.bazel +--- b/cmd/stringer/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/stringer/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,83 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") ++ ++go_library( ++ name = "stringer_lib", ++ srcs = ["stringer.go"], ++ importpath = "golang.org/x/tools/cmd/stringer", ++ visibility = ["//visibility:private"], ++ deps = ["//go/packages"], ++) ++ ++go_binary( ++ name = "stringer", ++ embed = [":stringer_lib"], ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "stringer_test", ++ srcs = [ ++ "endtoend_test.go", ++ "golden_test.go", ++ "multifile_test.go", ++ "util_test.go", ++ ], ++ embed = [":stringer_lib"], ++ deps = [ ++ "//internal/testenv", ++ ] + select({ ++ "@io_bazel_rules_go//go/platform:aix": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:darwin": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:dragonfly": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:freebsd": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:illumos": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:ios": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:js": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:linux": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:netbsd": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:openbsd": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:plan9": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:solaris": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:windows": [ ++ "//internal/diffp", ++ "//txtar", ++ ], ++ "//conditions:default": [], ++ }), ++) +diff -urN b/cmd/stringer/testdata/BUILD.bazel c/cmd/stringer/testdata/BUILD.bazel +--- b/cmd/stringer/testdata/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/stringer/testdata/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,30 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "testdata_lib", ++ srcs = [ ++ "cgo.go", ++ "conv.go", ++ "conv2.go", ++ "day.go", ++ "gap.go", ++ "int8overflow.go", ++ "num.go", ++ "number.go", ++ "prime.go", ++ "prime2.go", ++ "tag_main.go", ++ "unum.go", ++ "unum2.go", ++ "vary_day.go", ++ ], ++ cgo = True, ++ importpath = "golang.org/x/tools/cmd/stringer/testdata", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "testdata", ++ embed = [":testdata_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/cmd/toolstash/BUILD.bazel c/cmd/toolstash/BUILD.bazel +--- b/cmd/toolstash/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cmd/toolstash/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "toolstash_lib", ++ srcs = [ ++ "cmp.go", ++ "main.go", ++ ], ++ importpath = "golang.org/x/tools/cmd/toolstash", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "toolstash", ++ embed = [":toolstash_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/container/intsets/BUILD.bazel c/container/intsets/BUILD.bazel +--- b/container/intsets/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/container/intsets/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,23 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "intsets", ++ srcs = ["sparse.go"], ++ importpath = "golang.org/x/tools/container/intsets", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":intsets", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "intsets_test", ++ srcs = [ ++ "export_test.go", ++ "sparse_test.go", ++ ], ++ embed = [":intsets"], ++) +diff -urN b/copyright/BUILD.bazel c/copyright/BUILD.bazel +--- b/copyright/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/copyright/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "copyright", ++ srcs = ["copyright.go"], ++ importpath = "golang.org/x/tools/copyright", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":copyright", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "copyright_test", ++ srcs = ["copyright_test.go"], ++ embed = [":copyright"], ++) +diff -urN b/cover/BUILD.bazel c/cover/BUILD.bazel +--- b/cover/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/cover/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "cover", ++ srcs = ["profile.go"], ++ importpath = "golang.org/x/tools/cover", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":cover", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "cover_test", ++ srcs = ["profile_test.go"], ++ embed = [":cover"], ++) +diff -urN b/go/analysis/analysistest/BUILD.bazel c/go/analysis/analysistest/BUILD.bazel +--- b/go/analysis/analysistest/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/analysistest/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,36 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "analysistest", ++ srcs = ["analysistest.go"], ++ importpath = "golang.org/x/tools/go/analysis/analysistest", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/checker", ++ "//go/analysis/internal", ++ "//go/packages", ++ "//internal/diff", ++ "//internal/testenv", ++ "//txtar", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":analysistest", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "analysistest_test", ++ srcs = ["analysistest_test.go"], ++ deps = [ ++ ":analysistest", ++ "//go/analysis", ++ "//go/analysis/passes/findcall", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++) +diff -urN b/go/analysis/BUILD.bazel c/go/analysis/BUILD.bazel +--- b/go/analysis/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,25 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "analysis", ++ srcs = [ ++ "analysis.go", ++ "diagnostic.go", ++ "doc.go", ++ "validate.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":analysis", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "analysis_test", ++ srcs = ["validate_test.go"], ++ embed = [":analysis"], ++) +diff -urN b/go/analysis/checker/BUILD.bazel c/go/analysis/checker/BUILD.bazel +--- b/go/analysis/checker/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/checker/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,104 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "checker", ++ srcs = [ ++ "checker.go", ++ "print.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/checker", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/internal", ++ "//go/analysis/internal/analysisflags", ++ "//go/packages", ++ "//internal/analysisinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":checker", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "checker_test", ++ srcs = ["example_test.go"], ++ deps = select({ ++ "@io_bazel_rules_go//go/platform:386": [ ++ ":checker", ++ "//go/analysis", ++ "//go/packages", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:amd64": [ ++ ":checker", ++ "//go/analysis", ++ "//go/packages", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:arm": [ ++ ":checker", ++ "//go/analysis", ++ "//go/packages", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:arm64": [ ++ ":checker", ++ "//go/analysis", ++ "//go/packages", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:mips": [ ++ ":checker", ++ "//go/analysis", ++ "//go/packages", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:mips64": [ ++ ":checker", ++ "//go/analysis", ++ "//go/packages", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:mips64le": [ ++ ":checker", ++ "//go/analysis", ++ "//go/packages", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:mipsle": [ ++ ":checker", ++ "//go/analysis", ++ "//go/packages", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:ppc64": [ ++ ":checker", ++ "//go/analysis", ++ "//go/packages", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:ppc64le": [ ++ ":checker", ++ "//go/analysis", ++ "//go/packages", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:riscv64": [ ++ ":checker", ++ "//go/analysis", ++ "//go/packages", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:s390x": [ ++ ":checker", ++ "//go/analysis", ++ "//go/packages", ++ "//txtar", ++ ], ++ "//conditions:default": [], ++ }), ++) +diff -urN b/go/analysis/internal/analysisflags/BUILD.bazel c/go/analysis/internal/analysisflags/BUILD.bazel +--- b/go/analysis/internal/analysisflags/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/internal/analysisflags/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,36 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "analysisflags", ++ srcs = [ ++ "fix.go", ++ "flags.go", ++ "help.go", ++ "url.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/internal/analysisflags", ++ visibility = ["//go/analysis:__subpackages__"], ++ deps = [ ++ "//go/analysis", ++ "//internal/analysisinternal", ++ "//internal/diff", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":analysisflags", ++ visibility = ["//go/analysis:__subpackages__"], ++) ++ ++go_test( ++ name = "analysisflags_test", ++ srcs = [ ++ "flags_test.go", ++ "url_test.go", ++ ], ++ deps = [ ++ ":analysisflags", ++ "//go/analysis", ++ ], ++) +diff -urN b/go/analysis/internal/BUILD.bazel c/go/analysis/internal/BUILD.bazel +--- b/go/analysis/internal/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/internal/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "internal", ++ srcs = ["internal.go"], ++ importpath = "golang.org/x/tools/go/analysis/internal", ++ visibility = ["//go/analysis:__subpackages__"], ++ deps = ["//go/analysis"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":internal", ++ visibility = ["//go/analysis:__subpackages__"], ++) +diff -urN b/go/analysis/internal/checker/BUILD.bazel c/go/analysis/internal/checker/BUILD.bazel +--- b/go/analysis/internal/checker/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/internal/checker/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,47 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "checker", ++ srcs = ["checker.go"], ++ importpath = "golang.org/x/tools/go/analysis/internal/checker", ++ visibility = ["//go/analysis:__subpackages__"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/checker", ++ "//go/analysis/internal", ++ "//go/analysis/internal/analysisflags", ++ "//go/packages", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":checker", ++ visibility = ["//go/analysis:__subpackages__"], ++) ++ ++go_test( ++ name = "checker_test", ++ srcs = [ ++ "checker_test.go", ++ "fix_test.go", ++ "start_test.go", ++ ], ++ data = glob(["testdata/**"]), ++ deps = [ ++ ":checker", ++ "//go/analysis", ++ "//go/analysis/analysistest", ++ "//go/analysis/checker", ++ "//go/analysis/internal/analysisflags", ++ "//go/analysis/multichecker", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ "//go/packages", ++ "//internal/diff", ++ "//internal/expect", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++) +diff -urN b/go/analysis/internal/versiontest/BUILD.bazel c/go/analysis/internal/versiontest/BUILD.bazel +--- b/go/analysis/internal/versiontest/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/internal/versiontest/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,13 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_test") ++ ++go_test( ++ name = "versiontest_test", ++ srcs = ["version_test.go"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/analysistest", ++ "//go/analysis/multichecker", ++ "//go/analysis/singlechecker", ++ "//internal/testenv", ++ ], ++) +diff -urN b/go/analysis/multichecker/BUILD.bazel c/go/analysis/multichecker/BUILD.bazel +--- b/go/analysis/multichecker/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/multichecker/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,31 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "multichecker", ++ srcs = ["multichecker.go"], ++ importpath = "golang.org/x/tools/go/analysis/multichecker", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/internal/analysisflags", ++ "//go/analysis/internal/checker", ++ "//go/analysis/unitchecker", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":multichecker", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "multichecker_test", ++ srcs = ["multichecker_test.go"], ++ deps = [ ++ ":multichecker", ++ "//go/analysis", ++ "//go/analysis/passes/findcall", ++ "//internal/testenv", ++ ], ++) +diff -urN b/go/analysis/passes/appends/BUILD.bazel c/go/analysis/passes/appends/BUILD.bazel +--- b/go/analysis/passes/appends/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/appends/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "appends", ++ srcs = [ ++ "appends.go", ++ "doc.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/appends", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":appends", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "appends_test", ++ srcs = ["appends_test.go"], ++ deps = [ ++ ":appends", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/appends/testdata/src/a/BUILD.bazel c/go/analysis/passes/appends/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/appends/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/appends/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/appends/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/appends/testdata/src/b/BUILD.bazel c/go/analysis/passes/appends/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/appends/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/appends/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/appends/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/asmdecl/BUILD.bazel c/go/analysis/passes/asmdecl/BUILD.bazel +--- b/go/analysis/passes/asmdecl/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/asmdecl/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,27 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "asmdecl", ++ srcs = ["asmdecl.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/asmdecl", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/internal/analysisutil", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":asmdecl", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "asmdecl_test", ++ srcs = ["asmdecl_test.go"], ++ deps = [ ++ ":asmdecl", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/asmdecl/testdata/src/a/BUILD.bazel c/go/analysis/passes/asmdecl/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/asmdecl/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/asmdecl/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,26 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = [ ++ "asm.go", ++ "asm1.s", ++ "asm11.s", ++ "asm2.s", ++ "asm3.s", ++ "asm4.s", ++ "asm5.s", ++ "asm6.s", ++ "asm7.s", ++ "asm8.s", ++ "asm9.s", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/asmdecl/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/assign/BUILD.bazel c/go/analysis/passes/assign/BUILD.bazel +--- b/go/analysis/passes/assign/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/assign/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "assign", ++ srcs = [ ++ "assign.go", ++ "doc.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/assign", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//internal/astutil", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":assign", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "assign_test", ++ srcs = ["assign_test.go"], ++ deps = [ ++ ":assign", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/assign/testdata/src/a/BUILD.bazel c/go/analysis/passes/assign/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/assign/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/assign/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/assign/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/assign/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/assign/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/assign/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/assign/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/assign/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/atomic/BUILD.bazel c/go/analysis/passes/atomic/BUILD.bazel +--- b/go/analysis/passes/atomic/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/atomic/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,36 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "atomic", ++ srcs = [ ++ "atomic.go", ++ "doc.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/atomic", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/astutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":atomic", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "atomic_test", ++ srcs = ["atomic_test.go"], ++ deps = [ ++ ":atomic", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/atomic/testdata/src/a/BUILD.bazel c/go/analysis/passes/atomic/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/atomic/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/atomic/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/atomic/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/atomic/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/atomic/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/atomic/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/atomic/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/atomic/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/atomicalign/BUILD.bazel c/go/analysis/passes/atomicalign/BUILD.bazel +--- b/go/analysis/passes/atomicalign/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/atomicalign/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,30 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "atomicalign", ++ srcs = ["atomicalign.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/atomicalign", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":atomicalign", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "atomicalign_test", ++ srcs = ["atomicalign_test.go"], ++ deps = [ ++ ":atomicalign", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/atomicalign/testdata/src/a/BUILD.bazel c/go/analysis/passes/atomicalign/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/atomicalign/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/atomicalign/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = [ ++ "a.go", ++ "stub.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/atomicalign/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/atomicalign/testdata/src/b/BUILD.bazel c/go/analysis/passes/atomicalign/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/atomicalign/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/atomicalign/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = [ ++ "b.go", ++ "stub.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/atomicalign/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/bools/BUILD.bazel c/go/analysis/passes/bools/BUILD.bazel +--- b/go/analysis/passes/bools/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/bools/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,30 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "bools", ++ srcs = ["bools.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/bools", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//internal/astutil", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":bools", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "bools_test", ++ srcs = ["bools_test.go"], ++ deps = [ ++ ":bools", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/bools/testdata/src/a/BUILD.bazel c/go/analysis/passes/bools/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/bools/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/bools/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/bools/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/bools/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/bools/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/bools/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/bools/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/bools/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/buildssa/BUILD.bazel c/go/analysis/passes/buildssa/BUILD.bazel +--- b/go/analysis/passes/buildssa/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/buildssa/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,27 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "buildssa", ++ srcs = ["buildssa.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/buildssa", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/ssa", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":buildssa", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "buildssa_test", ++ srcs = ["buildssa_test.go"], ++ deps = [ ++ ":buildssa", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/buildssa/testdata/src/a/BUILD.bazel c/go/analysis/passes/buildssa/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/buildssa/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/buildssa/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/buildssa/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/buildssa/testdata/src/b/BUILD.bazel c/go/analysis/passes/buildssa/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/buildssa/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/buildssa/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/buildssa/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/buildssa/testdata/src/c/BUILD.bazel c/go/analysis/passes/buildssa/testdata/src/c/BUILD.bazel +--- b/go/analysis/passes/buildssa/testdata/src/c/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/buildssa/testdata/src/c/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "c", ++ srcs = ["c.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/buildssa/testdata/src/c", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":c", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/buildtag/BUILD.bazel c/go/analysis/passes/buildtag/BUILD.bazel +--- b/go/analysis/passes/buildtag/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/buildtag/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,29 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "buildtag", ++ srcs = ["buildtag.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/buildtag", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/internal/analysisutil", ++ "//internal/analysisinternal", ++ "//internal/versions", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":buildtag", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "buildtag_test", ++ srcs = ["buildtag_test.go"], ++ deps = [ ++ ":buildtag", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/buildtag/testdata/src/a/BUILD.bazel c/go/analysis/passes/buildtag/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/buildtag/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/buildtag/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = [ ++ "buildtag4.go", ++ "buildtag5.go", ++ "buildtag6.s", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/buildtag/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/buildtag/testdata/src/b/BUILD.bazel c/go/analysis/passes/buildtag/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/buildtag/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/buildtag/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["vers1.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/buildtag/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/cgocall/BUILD.bazel c/go/analysis/passes/cgocall/BUILD.bazel +--- b/go/analysis/passes/cgocall/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/cgocall/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,31 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "cgocall", ++ srcs = [ ++ "cgocall.go", ++ "cgocall_go120.go", ++ "cgocall_go121.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/cgocall", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":cgocall", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "cgocall_test", ++ srcs = ["cgocall_test.go"], ++ deps = [ ++ ":cgocall", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/cgocall/testdata/src/a/BUILD.bazel c/go/analysis/passes/cgocall/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/cgocall/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/cgocall/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = [ ++ "cgo.go", ++ "cgo3.go", ++ ], ++ cgo = True, ++ importpath = "golang.org/x/tools/go/analysis/passes/cgocall/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/cgocall/testdata/src/b/BUILD.bazel c/go/analysis/passes/cgocall/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/cgocall/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/cgocall/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/cgocall/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/cgocall/testdata/src/c/BUILD.bazel c/go/analysis/passes/cgocall/testdata/src/c/BUILD.bazel +--- b/go/analysis/passes/cgocall/testdata/src/c/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/cgocall/testdata/src/c/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "c", ++ srcs = ["c.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/cgocall/testdata/src/c", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":c", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/cgocall/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/cgocall/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/cgocall/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/cgocall/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ cgo = True, ++ importpath = "golang.org/x/tools/go/analysis/passes/cgocall/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/composite/BUILD.bazel c/go/analysis/passes/composite/BUILD.bazel +--- b/go/analysis/passes/composite/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/composite/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,32 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "composite", ++ srcs = [ ++ "composite.go", ++ "whitelist.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/composite", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ "//internal/typeparams", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":composite", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "composite_test", ++ srcs = ["composite_test.go"], ++ deps = [ ++ ":composite", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/composite/testdata/src/a/BUILD.bazel c/go/analysis/passes/composite/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/composite/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/composite/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/composite/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "a_test", ++ srcs = ["a_fuzz_test.go"], ++ embed = [":a"], ++) +diff -urN b/go/analysis/passes/composite/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/composite/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/composite/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/composite/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/composite/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/composite/testdata/src/typeparams/lib/BUILD.bazel c/go/analysis/passes/composite/testdata/src/typeparams/lib/BUILD.bazel +--- b/go/analysis/passes/composite/testdata/src/typeparams/lib/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/composite/testdata/src/typeparams/lib/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "lib", ++ srcs = ["lib.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/composite/testdata/src/typeparams/lib", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":lib", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/copylock/BUILD.bazel c/go/analysis/passes/copylock/BUILD.bazel +--- b/go/analysis/passes/copylock/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/copylock/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,33 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "copylock", ++ srcs = ["copylock.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/copylock", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ "//internal/astutil", ++ "//internal/typeparams", ++ "//internal/typesinternal", ++ "//internal/versions", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":copylock", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "copylock_test", ++ srcs = ["copylock_test.go"], ++ deps = [ ++ ":copylock", ++ "//go/analysis/analysistest", ++ "//internal/testfiles", ++ ], ++) +diff -urN b/go/analysis/passes/copylock/testdata/src/a/BUILD.bazel c/go/analysis/passes/copylock/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/copylock/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/copylock/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = [ ++ "copylock.go", ++ "copylock_func.go", ++ "copylock_range.go", ++ "issue61678.go", ++ "newexpr_go126.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/copylock/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/copylock/testdata/src/issue67787/BUILD.bazel c/go/analysis/passes/copylock/testdata/src/issue67787/BUILD.bazel +--- b/go/analysis/passes/copylock/testdata/src/issue67787/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/copylock/testdata/src/issue67787/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "issue67787", ++ srcs = ["issue67787.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/copylock/testdata/src/issue67787", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":issue67787", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/copylock/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/copylock/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/copylock/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/copylock/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/copylock/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/copylock/testdata/src/unfortunate/BUILD.bazel c/go/analysis/passes/copylock/testdata/src/unfortunate/BUILD.bazel +--- b/go/analysis/passes/copylock/testdata/src/unfortunate/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/copylock/testdata/src/unfortunate/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "unfortunate", ++ srcs = [ ++ "local_go123.go", ++ "local_go124.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/copylock/testdata/src/unfortunate", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":unfortunate", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/ctrlflow/BUILD.bazel c/go/analysis/passes/ctrlflow/BUILD.bazel +--- b/go/analysis/passes/ctrlflow/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/ctrlflow/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,30 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "ctrlflow", ++ srcs = ["ctrlflow.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/ctrlflow", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ "//go/cfg", ++ "//go/types/typeutil", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":ctrlflow", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "ctrlflow_test", ++ srcs = ["ctrlflow_test.go"], ++ deps = [ ++ ":ctrlflow", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/ctrlflow/testdata/src/a/BUILD.bazel c/go/analysis/passes/ctrlflow/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/ctrlflow/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/ctrlflow/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/ctrlflow/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/ctrlflow/testdata/src/lib/BUILD.bazel c/go/analysis/passes/ctrlflow/testdata/src/lib/BUILD.bazel +--- b/go/analysis/passes/ctrlflow/testdata/src/lib/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/ctrlflow/testdata/src/lib/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "lib", ++ srcs = ["lib.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/ctrlflow/testdata/src/lib", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":lib", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/ctrlflow/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/ctrlflow/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/ctrlflow/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/ctrlflow/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/ctrlflow/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/deepequalerrors/BUILD.bazel c/go/analysis/passes/deepequalerrors/BUILD.bazel +--- b/go/analysis/passes/deepequalerrors/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/deepequalerrors/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,30 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "deepequalerrors", ++ srcs = ["deepequalerrors.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/deepequalerrors", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":deepequalerrors", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "deepequalerrors_test", ++ srcs = ["deepequalerrors_test.go"], ++ deps = [ ++ ":deepequalerrors", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/deepequalerrors/testdata/src/a/BUILD.bazel c/go/analysis/passes/deepequalerrors/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/deepequalerrors/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/deepequalerrors/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/deepequalerrors/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/deepequalerrors/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/deepequalerrors/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/deepequalerrors/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/deepequalerrors/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/deepequalerrors/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/defers/BUILD.bazel c/go/analysis/passes/defers/BUILD.bazel +--- b/go/analysis/passes/defers/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/defers/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,35 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "defers", ++ srcs = [ ++ "defers.go", ++ "doc.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/defers", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":defers", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "defers_test", ++ srcs = ["defers_test.go"], ++ deps = [ ++ ":defers", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/defers/cmd/defers/BUILD.bazel c/go/analysis/passes/defers/cmd/defers/BUILD.bazel +--- b/go/analysis/passes/defers/cmd/defers/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/defers/cmd/defers/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "defers_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/defers/cmd/defers", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/defers", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "defers", ++ embed = [":defers_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/defers/testdata/src/a/BUILD.bazel c/go/analysis/passes/defers/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/defers/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/defers/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/defers/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/directive/BUILD.bazel c/go/analysis/passes/directive/BUILD.bazel +--- b/go/analysis/passes/directive/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/directive/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,27 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "directive", ++ srcs = ["directive.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/directive", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/internal/analysisutil", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":directive", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "directive_test", ++ srcs = ["directive_test.go"], ++ deps = [ ++ ":directive", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/directive/testdata/src/a/BUILD.bazel c/go/analysis/passes/directive/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/directive/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/directive/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,22 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "a", ++ srcs = [ ++ "misplaced.s", ++ "p.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/directive/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "a_test", ++ srcs = ["misplaced_test.go"], ++) +diff -urN b/go/analysis/passes/errorsas/BUILD.bazel c/go/analysis/passes/errorsas/BUILD.bazel +--- b/go/analysis/passes/errorsas/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/errorsas/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,28 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "errorsas", ++ srcs = ["errorsas.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/errorsas", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//internal/analysisinternal/typeindex", ++ "//internal/typesinternal/typeindex", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":errorsas", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "errorsas_test", ++ srcs = ["errorsas_test.go"], ++ deps = [ ++ ":errorsas", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/errorsas/testdata/src/a/BUILD.bazel c/go/analysis/passes/errorsas/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/errorsas/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/errorsas/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/errorsas/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/errorsas/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/errorsas/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/errorsas/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/errorsas/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/errorsas/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/fieldalignment/BUILD.bazel c/go/analysis/passes/fieldalignment/BUILD.bazel +--- b/go/analysis/passes/fieldalignment/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/fieldalignment/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,28 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "fieldalignment", ++ srcs = ["fieldalignment.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/fieldalignment", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":fieldalignment", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "fieldalignment_test", ++ srcs = ["fieldalignment_test.go"], ++ deps = [ ++ ":fieldalignment", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/fieldalignment/cmd/fieldalignment/BUILD.bazel c/go/analysis/passes/fieldalignment/cmd/fieldalignment/BUILD.bazel +--- b/go/analysis/passes/fieldalignment/cmd/fieldalignment/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/fieldalignment/cmd/fieldalignment/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "fieldalignment_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/fieldalignment/cmd/fieldalignment", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/fieldalignment", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "fieldalignment", ++ embed = [":fieldalignment_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/fieldalignment/testdata/src/a/BUILD.bazel c/go/analysis/passes/fieldalignment/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/fieldalignment/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/fieldalignment/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = [ ++ "a.go", ++ "a_386.go", ++ "a_amd64.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/fieldalignment/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/findcall/BUILD.bazel c/go/analysis/passes/findcall/BUILD.bazel +--- b/go/analysis/passes/findcall/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/findcall/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "findcall", ++ srcs = ["findcall.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/findcall", ++ visibility = ["//visibility:public"], ++ deps = ["//go/analysis"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":findcall", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "findcall_test", ++ srcs = ["findcall_test.go"], ++ deps = [ ++ ":findcall", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/findcall/cmd/findcall/BUILD.bazel c/go/analysis/passes/findcall/cmd/findcall/BUILD.bazel +--- b/go/analysis/passes/findcall/cmd/findcall/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/findcall/cmd/findcall/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "findcall_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/findcall/cmd/findcall", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/findcall", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "findcall", ++ embed = [":findcall_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/findcall/testdata/src/a/BUILD.bazel c/go/analysis/passes/findcall/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/findcall/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/findcall/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "a_lib", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/findcall/testdata/src/a", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "a", ++ embed = [":a_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/framepointer/BUILD.bazel c/go/analysis/passes/framepointer/BUILD.bazel +--- b/go/analysis/passes/framepointer/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/framepointer/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,27 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "framepointer", ++ srcs = ["framepointer.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/framepointer", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/internal/analysisutil", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":framepointer", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "framepointer_test", ++ srcs = ["framepointer_test.go"], ++ deps = [ ++ ":framepointer", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/framepointer/testdata/src/a/BUILD.bazel c/go/analysis/passes/framepointer/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/framepointer/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/framepointer/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,21 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = [ ++ "asm.go", ++ "asm_amd64.s", ++ "asm_arm64.s", ++ "asm_darwin_amd64.s", ++ "asm_linux_amd64.s", ++ "asm_windows_amd64.s", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/framepointer/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/gofix/BUILD.bazel c/go/analysis/passes/gofix/BUILD.bazel +--- b/go/analysis/passes/gofix/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/gofix/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "gofix", ++ srcs = [ ++ "doc.go", ++ "gofix.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/gofix", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/gofixdirective", ++ "//go/ast/inspector", ++ "//internal/analysisinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":gofix", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "gofix_test", ++ srcs = ["gofix_test.go"], ++ deps = [ ++ ":gofix", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/gofix/testdata/src/a/BUILD.bazel c/go/analysis/passes/gofix/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/gofix/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/gofix/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/gofix/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/hostport/BUILD.bazel c/go/analysis/passes/hostport/BUILD.bazel +--- b/go/analysis/passes/hostport/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/hostport/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,30 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "hostport", ++ srcs = ["hostport.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/hostport", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/types/typeutil", ++ "//internal/analysisinternal/typeindex", ++ "//internal/typesinternal/typeindex", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":hostport", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "hostport_test", ++ srcs = ["hostport_test.go"], ++ deps = [ ++ ":hostport", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/hostport/testdata/src/a/BUILD.bazel c/go/analysis/passes/hostport/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/hostport/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/hostport/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/hostport/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/httpmux/BUILD.bazel c/go/analysis/passes/httpmux/BUILD.bazel +--- b/go/analysis/passes/httpmux/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/httpmux/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,29 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "httpmux", ++ srcs = ["httpmux.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/httpmux", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/typesinternal", ++ "@org_golang_x_mod//semver:go_default_library", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":httpmux", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "httpmux_test", ++ srcs = ["httpmux_test.go"], ++ embed = [":httpmux"], ++ deps = ["//go/analysis/analysistest"], ++) +diff -urN b/go/analysis/passes/httpmux/cmd/httpmux/BUILD.bazel c/go/analysis/passes/httpmux/cmd/httpmux/BUILD.bazel +--- b/go/analysis/passes/httpmux/cmd/httpmux/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/httpmux/cmd/httpmux/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "httpmux_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/httpmux/cmd/httpmux", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/httpmux", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "httpmux", ++ embed = [":httpmux_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/httpmux/testdata/src/a/BUILD.bazel c/go/analysis/passes/httpmux/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/httpmux/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/httpmux/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/httpmux/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/httpresponse/BUILD.bazel c/go/analysis/passes/httpresponse/BUILD.bazel +--- b/go/analysis/passes/httpresponse/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/httpresponse/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,29 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "httpresponse", ++ srcs = ["httpresponse.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/httpresponse", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":httpresponse", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "httpresponse_test", ++ srcs = ["httpresponse_test.go"], ++ deps = [ ++ ":httpresponse", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/httpresponse/testdata/src/a/BUILD.bazel c/go/analysis/passes/httpresponse/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/httpresponse/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/httpresponse/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/httpresponse/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/httpresponse/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/httpresponse/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/httpresponse/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/httpresponse/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/httpresponse/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/ifaceassert/BUILD.bazel c/go/analysis/passes/ifaceassert/BUILD.bazel +--- b/go/analysis/passes/ifaceassert/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/ifaceassert/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "ifaceassert", ++ srcs = [ ++ "doc.go", ++ "ifaceassert.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/ifaceassert", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//internal/typeparams", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":ifaceassert", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "ifaceassert_test", ++ srcs = ["ifaceassert_test.go"], ++ deps = [ ++ ":ifaceassert", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/ifaceassert/cmd/ifaceassert/BUILD.bazel c/go/analysis/passes/ifaceassert/cmd/ifaceassert/BUILD.bazel +--- b/go/analysis/passes/ifaceassert/cmd/ifaceassert/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/ifaceassert/cmd/ifaceassert/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "ifaceassert_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/ifaceassert/cmd/ifaceassert", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/ifaceassert", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "ifaceassert", ++ embed = [":ifaceassert_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/ifaceassert/testdata/src/a/BUILD.bazel c/go/analysis/passes/ifaceassert/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/ifaceassert/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/ifaceassert/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/ifaceassert/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/ifaceassert/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/ifaceassert/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/ifaceassert/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/ifaceassert/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/ifaceassert/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/inline/BUILD.bazel c/go/analysis/passes/inline/BUILD.bazel +--- b/go/analysis/passes/inline/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/inline/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,43 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "inline", ++ srcs = [ ++ "doc.go", ++ "gofix.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/inline", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/gofixdirective", ++ "//go/ast/edge", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/analysisinternal", ++ "//internal/astutil", ++ "//internal/diff", ++ "//internal/refactor", ++ "//internal/refactor/inline", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":inline", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "inline_test", ++ srcs = ["gofix_test.go"], ++ embed = [":inline"], ++ deps = [ ++ "//go/analysis/analysistest", ++ "//internal/testenv", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++) +diff -urN b/go/analysis/passes/inline/cmd/inline/BUILD.bazel c/go/analysis/passes/inline/cmd/inline/BUILD.bazel +--- b/go/analysis/passes/inline/cmd/inline/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/inline/cmd/inline/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "inline_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/inline/cmd/inline", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/inline", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "inline", ++ embed = [":inline_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/inline/testdata/src/a/BUILD.bazel c/go/analysis/passes/inline/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/inline/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/inline/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/inline/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/inline/testdata/src/a/internal/BUILD.bazel c/go/analysis/passes/inline/testdata/src/a/internal/BUILD.bazel +--- b/go/analysis/passes/inline/testdata/src/a/internal/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/inline/testdata/src/a/internal/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "internal", ++ srcs = ["d.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/inline/testdata/src/a/internal", ++ visibility = ["//go/analysis/passes/inline/testdata/src/a:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":internal", ++ visibility = ["//go/analysis/passes/inline/testdata/src/a:__subpackages__"], ++) +diff -urN b/go/analysis/passes/inline/testdata/src/b/BUILD.bazel c/go/analysis/passes/inline/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/inline/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/inline/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/inline/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/inline/testdata/src/binding_false/BUILD.bazel c/go/analysis/passes/inline/testdata/src/binding_false/BUILD.bazel +--- b/go/analysis/passes/inline/testdata/src/binding_false/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/inline/testdata/src/binding_false/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "binding_false", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/inline/testdata/src/binding_false", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":binding_false", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/inline/testdata/src/binding_true/BUILD.bazel c/go/analysis/passes/inline/testdata/src/binding_true/BUILD.bazel +--- b/go/analysis/passes/inline/testdata/src/binding_true/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/inline/testdata/src/binding_true/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "binding_true", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/inline/testdata/src/binding_true", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":binding_true", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/inline/testdata/src/c/BUILD.bazel c/go/analysis/passes/inline/testdata/src/c/BUILD.bazel +--- b/go/analysis/passes/inline/testdata/src/c/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/inline/testdata/src/c/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "c", ++ srcs = ["c.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/inline/testdata/src/c", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":c", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/inline/testdata/src/directive/BUILD.bazel c/go/analysis/passes/inline/testdata/src/directive/BUILD.bazel +--- b/go/analysis/passes/inline/testdata/src/directive/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/inline/testdata/src/directive/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "directive", ++ srcs = ["directive.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/inline/testdata/src/directive", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":directive", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/inspect/BUILD.bazel c/go/analysis/passes/inspect/BUILD.bazel +--- b/go/analysis/passes/inspect/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/inspect/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "inspect", ++ srcs = ["inspect.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/inspect", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/ast/inspector", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":inspect", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/internal/analysisutil/BUILD.bazel c/go/analysis/passes/internal/analysisutil/BUILD.bazel +--- b/go/analysis/passes/internal/analysisutil/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/internal/analysisutil/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "analysisutil", ++ srcs = ["util.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/internal/analysisutil", ++ visibility = ["//go/analysis/passes:__subpackages__"], ++ deps = [ ++ "//go/analysis", ++ "//internal/analysisinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":analysisutil", ++ visibility = ["//go/analysis/passes:__subpackages__"], ++) ++ ++go_test( ++ name = "analysisutil_test", ++ srcs = ["util_test.go"], ++ deps = [":analysisutil"], ++) +diff -urN b/go/analysis/passes/internal/gofixdirective/BUILD.bazel c/go/analysis/passes/internal/gofixdirective/BUILD.bazel +--- b/go/analysis/passes/internal/gofixdirective/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/internal/gofixdirective/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,19 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "gofixdirective", ++ srcs = ["gofixdirective.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/internal/gofixdirective", ++ visibility = ["//go/analysis/passes:__subpackages__"], ++ deps = [ ++ "//go/analysis", ++ "//go/ast/inspector", ++ "//internal/astutil", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":gofixdirective", ++ visibility = ["//go/analysis/passes:__subpackages__"], ++) +diff -urN b/go/analysis/passes/loopclosure/BUILD.bazel c/go/analysis/passes/loopclosure/BUILD.bazel +--- b/go/analysis/passes/loopclosure/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/loopclosure/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,37 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "loopclosure", ++ srcs = [ ++ "doc.go", ++ "loopclosure.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/loopclosure", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/typesinternal", ++ "//internal/versions", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":loopclosure", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "loopclosure_test", ++ srcs = ["loopclosure_test.go"], ++ deps = [ ++ ":loopclosure", ++ "//go/analysis/analysistest", ++ "//internal/testfiles", ++ ], ++) +diff -urN b/go/analysis/passes/loopclosure/testdata/src/a/BUILD.bazel c/go/analysis/passes/loopclosure/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/loopclosure/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/loopclosure/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = [ ++ "a.go", ++ "b.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/loopclosure/testdata/src/a", ++ visibility = ["//visibility:public"], ++ deps = ["@org_golang_x_sync//errgroup:go_default_library"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/loopclosure/testdata/src/golang.org/x/sync/errgroup/BUILD.bazel c/go/analysis/passes/loopclosure/testdata/src/golang.org/x/sync/errgroup/BUILD.bazel +--- b/go/analysis/passes/loopclosure/testdata/src/golang.org/x/sync/errgroup/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/loopclosure/testdata/src/golang.org/x/sync/errgroup/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "errgroup", ++ srcs = ["errgroup.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/loopclosure/testdata/src/golang.org/x/sync/errgroup", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":errgroup", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/loopclosure/testdata/src/subtests/BUILD.bazel c/go/analysis/passes/loopclosure/testdata/src/subtests/BUILD.bazel +--- b/go/analysis/passes/loopclosure/testdata/src/subtests/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/loopclosure/testdata/src/subtests/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "subtests", ++ srcs = ["subtest.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/loopclosure/testdata/src/subtests", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":subtests", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/loopclosure/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/loopclosure/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/loopclosure/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/loopclosure/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/loopclosure/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++ deps = ["@org_golang_x_sync//errgroup:go_default_library"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/lostcancel/BUILD.bazel c/go/analysis/passes/lostcancel/BUILD.bazel +--- b/go/analysis/passes/lostcancel/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/lostcancel/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,37 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "lostcancel", ++ srcs = [ ++ "doc.go", ++ "lostcancel.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/lostcancel", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/ctrlflow", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//go/cfg", ++ "//internal/astutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":lostcancel", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "lostcancel_test", ++ srcs = ["lostcancel_test.go"], ++ deps = [ ++ ":lostcancel", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/lostcancel/cmd/lostcancel/BUILD.bazel c/go/analysis/passes/lostcancel/cmd/lostcancel/BUILD.bazel +--- b/go/analysis/passes/lostcancel/cmd/lostcancel/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/lostcancel/cmd/lostcancel/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "lostcancel_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/lostcancel/cmd/lostcancel", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/lostcancel", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "lostcancel", ++ embed = [":lostcancel_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/lostcancel/testdata/src/a/BUILD.bazel c/go/analysis/passes/lostcancel/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/lostcancel/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/lostcancel/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/lostcancel/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/lostcancel/testdata/src/b/BUILD.bazel c/go/analysis/passes/lostcancel/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/lostcancel/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/lostcancel/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "b_lib", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/lostcancel/testdata/src/b", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "b", ++ embed = [":b_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/lostcancel/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/lostcancel/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/lostcancel/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/lostcancel/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/lostcancel/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/BUILD.bazel c/go/analysis/passes/modernize/BUILD.bazel +--- b/go/analysis/passes/modernize/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,68 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "modernize", ++ srcs = [ ++ "any.go", ++ "bloop.go", ++ "doc.go", ++ "errorsastype.go", ++ "fmtappendf.go", ++ "forvar.go", ++ "maps.go", ++ "minmax.go", ++ "modernize.go", ++ "newexpr.go", ++ "omitzero.go", ++ "rangeint.go", ++ "reflect.go", ++ "slices.go", ++ "slicescontains.go", ++ "slicesdelete.go", ++ "sortslice.go", ++ "stditerators.go", ++ "stringsbuilder.go", ++ "stringscutprefix.go", ++ "stringsseq.go", ++ "testingcontext.go", ++ "waitgroup.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/edge", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/analysisinternal", ++ "//internal/analysisinternal/generated", ++ "//internal/analysisinternal/typeindex", ++ "//internal/astutil", ++ "//internal/goplsexport", ++ "//internal/moreiters", ++ "//internal/refactor", ++ "//internal/stdlib", ++ "//internal/typeparams", ++ "//internal/typesinternal", ++ "//internal/typesinternal/typeindex", ++ "//internal/versions", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":modernize", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "modernize_test", ++ srcs = ["modernize_test.go"], ++ deps = [ ++ ":modernize", ++ "//go/analysis/analysistest", ++ "//internal/goplsexport", ++ ], ++) +diff -urN b/go/analysis/passes/modernize/cmd/modernize/BUILD.bazel c/go/analysis/passes/modernize/cmd/modernize/BUILD.bazel +--- b/go/analysis/passes/modernize/cmd/modernize/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/cmd/modernize/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "modernize_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/cmd/modernize", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/multichecker", ++ "//go/analysis/passes/modernize", ++ ], ++) ++ ++go_binary( ++ name = "modernize", ++ embed = [":modernize_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/any/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/any/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/any/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/any/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "any", ++ srcs = ["any.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/any", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":any", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/appendclipped/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/appendclipped/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/appendclipped/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/appendclipped/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "appendclipped", ++ srcs = [ ++ "appendclipped.go", ++ "bytesclone.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/appendclipped", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":appendclipped", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/bloop/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/bloop/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/bloop/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/bloop/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "bloop", ++ srcs = ["bloop.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/bloop", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":bloop", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "bloop_test", ++ srcs = ["bloop_test.go"], ++ embed = [":bloop"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/errorsastype/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/errorsastype/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/errorsastype/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/errorsastype/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "errorsastype", ++ srcs = ["errorsastype.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/errorsastype", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":errorsastype", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/errorsastype/dotimport/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/errorsastype/dotimport/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/errorsastype/dotimport/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/errorsastype/dotimport/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "dotimport", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/errorsastype/dotimport", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":dotimport", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/fieldsseq/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/fieldsseq/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/fieldsseq/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/fieldsseq/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "fieldsseq", ++ srcs = [ ++ "fieldsseq.go", ++ "fieldsseq_go123.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/fieldsseq", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":fieldsseq", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/fmtappendf/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/fmtappendf/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/fmtappendf/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/fmtappendf/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "fmtappendf", ++ srcs = ["fmtappendf.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/fmtappendf", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":fmtappendf", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/forvar/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/forvar/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/forvar/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/forvar/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "forvar", ++ srcs = ["forvar.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/forvar", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":forvar", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/mapsloop/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/mapsloop/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/mapsloop/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/mapsloop/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "mapsloop", ++ srcs = [ ++ "mapsloop.go", ++ "mapsloop_dot.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/mapsloop", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":mapsloop", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/minmax/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/minmax/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/minmax/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/minmax/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "minmax", ++ srcs = ["minmax.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/minmax", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":minmax", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/minmax/nonstrict/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/minmax/nonstrict/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/minmax/nonstrict/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/minmax/nonstrict/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "nonstrict", ++ srcs = ["nonstrict.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/minmax/nonstrict", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":nonstrict", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/minmax/userdefined/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/minmax/userdefined/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/minmax/userdefined/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/minmax/userdefined/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "userdefined", ++ srcs = ["userdefined.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/minmax/userdefined", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":userdefined", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/minmax/wrongoperators/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/minmax/wrongoperators/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/minmax/wrongoperators/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/minmax/wrongoperators/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "wrongoperators", ++ srcs = ["wrongoperators.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/minmax/wrongoperators", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":wrongoperators", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/minmax/wrongreturn/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/minmax/wrongreturn/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/minmax/wrongreturn/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/minmax/wrongreturn/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "wrongreturn", ++ srcs = ["wrongreturn.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/minmax/wrongreturn", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":wrongreturn", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/newexpr/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/newexpr/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/newexpr/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/newexpr/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "newexpr", ++ srcs = [ ++ "newexpr.go", ++ "newexpr_go125.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/newexpr", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":newexpr", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/omitzero/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/omitzero/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/omitzero/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/omitzero/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "omitzero", ++ srcs = ["omitzero.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/omitzero", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":omitzero", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/rangeint/a/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/rangeint/a/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/rangeint/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/rangeint/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/rangeint/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/rangeint/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/rangeint/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/rangeint/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/rangeint/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "rangeint", ++ srcs = ["rangeint.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/rangeint", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":rangeint", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/reflecttypefor/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/reflecttypefor/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/reflecttypefor/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/reflecttypefor/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "reflecttypefor", ++ srcs = ["reflecttypefor.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/reflecttypefor", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":reflecttypefor", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/slicescontains/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/slicescontains/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/slicescontains/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/slicescontains/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "slicescontains", ++ srcs = ["slicescontains.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/slicescontains", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":slicescontains", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/slicesdelete/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/slicesdelete/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/slicesdelete/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/slicesdelete/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "slicesdelete", ++ srcs = ["slicesdelete.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/slicesdelete", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":slicesdelete", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/slicessort/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/slicessort/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/slicessort/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/slicessort/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "slicessort", ++ srcs = [ ++ "slicessort.go", ++ "slicessort_dot.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/slicessort", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":slicessort", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/splitseq/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/splitseq/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/splitseq/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/splitseq/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "splitseq", ++ srcs = [ ++ "splitseq.go", ++ "splitseq_go123.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/splitseq", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":splitseq", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/stditerators/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/stditerators/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/stditerators/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/stditerators/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "stditerators", ++ srcs = ["stditerators.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/stditerators", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":stditerators", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/stringsbuilder/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/stringsbuilder/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/stringsbuilder/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/stringsbuilder/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "stringsbuilder", ++ srcs = ["stringsbuilder.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/stringsbuilder", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":stringsbuilder", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/stringscutprefix/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/stringscutprefix/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/stringscutprefix/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/stringscutprefix/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "stringscutprefix", ++ srcs = [ ++ "stringscutprefix.go", ++ "stringscutprefix_dot.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/stringscutprefix", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":stringscutprefix", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/stringscutprefix/bytescutprefix/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/stringscutprefix/bytescutprefix/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/stringscutprefix/bytescutprefix/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/stringscutprefix/bytescutprefix/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "bytescutprefix", ++ srcs = [ ++ "bytescutprefix.go", ++ "bytescutprefix_dot.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/stringscutprefix/bytescutprefix", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":bytescutprefix", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/testingcontext/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/testingcontext/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/testingcontext/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/testingcontext/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "testingcontext", ++ srcs = ["testingcontext.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/testingcontext", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":testingcontext", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "testingcontext_test", ++ srcs = ["testingcontext_test.go"], ++ embed = [":testingcontext"], ++) +diff -urN b/go/analysis/passes/modernize/testdata/src/waitgroup/BUILD.bazel c/go/analysis/passes/modernize/testdata/src/waitgroup/BUILD.bazel +--- b/go/analysis/passes/modernize/testdata/src/waitgroup/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/modernize/testdata/src/waitgroup/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "waitgroup", ++ srcs = [ ++ "waitgroup.go", ++ "waitgroup_alias.go", ++ "waitgroup_dot.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/modernize/testdata/src/waitgroup", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":waitgroup", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/nilfunc/BUILD.bazel c/go/analysis/passes/nilfunc/BUILD.bazel +--- b/go/analysis/passes/nilfunc/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/nilfunc/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "nilfunc", ++ srcs = [ ++ "doc.go", ++ "nilfunc.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/nilfunc", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":nilfunc", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "nilfunc_test", ++ srcs = ["nilfunc_test.go"], ++ deps = [ ++ ":nilfunc", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/nilfunc/testdata/src/a/BUILD.bazel c/go/analysis/passes/nilfunc/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/nilfunc/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/nilfunc/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/nilfunc/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/nilfunc/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/nilfunc/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/nilfunc/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/nilfunc/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/nilfunc/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/nilness/BUILD.bazel c/go/analysis/passes/nilness/BUILD.bazel +--- b/go/analysis/passes/nilness/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/nilness/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "nilness", ++ srcs = [ ++ "doc.go", ++ "nilness.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/nilness", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/buildssa", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ssa", ++ "//internal/typeparams", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":nilness", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "nilness_test", ++ srcs = ["nilness_test.go"], ++ deps = [ ++ ":nilness", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/nilness/cmd/nilness/BUILD.bazel c/go/analysis/passes/nilness/cmd/nilness/BUILD.bazel +--- b/go/analysis/passes/nilness/cmd/nilness/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/nilness/cmd/nilness/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "nilness_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/nilness/cmd/nilness", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/nilness", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "nilness", ++ embed = [":nilness_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/nilness/testdata/src/a/BUILD.bazel c/go/analysis/passes/nilness/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/nilness/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/nilness/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/nilness/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/nilness/testdata/src/b/BUILD.bazel c/go/analysis/passes/nilness/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/nilness/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/nilness/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/nilness/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/nilness/testdata/src/c/BUILD.bazel c/go/analysis/passes/nilness/testdata/src/c/BUILD.bazel +--- b/go/analysis/passes/nilness/testdata/src/c/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/nilness/testdata/src/c/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "c", ++ srcs = ["c.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/nilness/testdata/src/c", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":c", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/nilness/testdata/src/d/BUILD.bazel c/go/analysis/passes/nilness/testdata/src/d/BUILD.bazel +--- b/go/analysis/passes/nilness/testdata/src/d/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/nilness/testdata/src/d/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "d", ++ srcs = ["d.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/nilness/testdata/src/d", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":d", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/pkgfact/BUILD.bazel c/go/analysis/passes/pkgfact/BUILD.bazel +--- b/go/analysis/passes/pkgfact/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/pkgfact/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "pkgfact", ++ srcs = ["pkgfact.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/pkgfact", ++ visibility = ["//visibility:public"], ++ deps = ["//go/analysis"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":pkgfact", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "pkgfact_test", ++ srcs = ["pkgfact_test.go"], ++ deps = [ ++ ":pkgfact", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/pkgfact/testdata/src/a/BUILD.bazel c/go/analysis/passes/pkgfact/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/pkgfact/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/pkgfact/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/pkgfact/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/pkgfact/testdata/src/b/BUILD.bazel c/go/analysis/passes/pkgfact/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/pkgfact/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/pkgfact/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/pkgfact/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/pkgfact/testdata/src/c/BUILD.bazel c/go/analysis/passes/pkgfact/testdata/src/c/BUILD.bazel +--- b/go/analysis/passes/pkgfact/testdata/src/c/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/pkgfact/testdata/src/c/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "c", ++ srcs = ["c.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/pkgfact/testdata/src/c", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":c", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/printf/BUILD.bazel c/go/analysis/passes/printf/BUILD.bazel +--- b/go/analysis/passes/printf/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/printf/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,44 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "printf", ++ srcs = [ ++ "doc.go", ++ "printf.go", ++ "types.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/printf", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/edge", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/analysisinternal", ++ "//internal/astutil", ++ "//internal/fmtstr", ++ "//internal/typeparams", ++ "//internal/typesinternal", ++ "//internal/versions", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":printf", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "printf_test", ++ srcs = ["printf_test.go"], ++ deps = [ ++ ":printf", ++ "//go/analysis/analysistest", ++ "//internal/testenv", ++ "//internal/testfiles", ++ ], ++) +diff -urN b/go/analysis/passes/printf/testdata/src/a/BUILD.bazel c/go/analysis/passes/printf/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/printf/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/printf/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/printf/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/printf/testdata/src/b/BUILD.bazel c/go/analysis/passes/printf/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/printf/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/printf/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/printf/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/printf/testdata/src/issue68744/BUILD.bazel c/go/analysis/passes/printf/testdata/src/issue68744/BUILD.bazel +--- b/go/analysis/passes/printf/testdata/src/issue68744/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/printf/testdata/src/issue68744/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "issue68744", ++ srcs = ["issue68744.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/printf/testdata/src/issue68744", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":issue68744", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/printf/testdata/src/issue70572/BUILD.bazel c/go/analysis/passes/printf/testdata/src/issue70572/BUILD.bazel +--- b/go/analysis/passes/printf/testdata/src/issue70572/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/printf/testdata/src/issue70572/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "issue70572", ++ srcs = ["issue70572.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/printf/testdata/src/issue70572", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":issue70572", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/printf/testdata/src/nofmt/BUILD.bazel c/go/analysis/passes/printf/testdata/src/nofmt/BUILD.bazel +--- b/go/analysis/passes/printf/testdata/src/nofmt/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/printf/testdata/src/nofmt/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "nofmt", ++ srcs = ["nofmt.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/printf/testdata/src/nofmt", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":nofmt", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/printf/testdata/src/nonconst/BUILD.bazel c/go/analysis/passes/printf/testdata/src/nonconst/BUILD.bazel +--- b/go/analysis/passes/printf/testdata/src/nonconst/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/printf/testdata/src/nonconst/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "nonconst", ++ srcs = ["nonconst.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/printf/testdata/src/nonconst", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":nonconst", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/printf/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/printf/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/printf/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/printf/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = [ ++ "diagnostics.go", ++ "wrappers.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/printf/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/reflectvaluecompare/BUILD.bazel c/go/analysis/passes/reflectvaluecompare/BUILD.bazel +--- b/go/analysis/passes/reflectvaluecompare/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/reflectvaluecompare/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,35 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "reflectvaluecompare", ++ srcs = [ ++ "doc.go", ++ "reflectvaluecompare.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/reflectvaluecompare", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":reflectvaluecompare", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "reflectvaluecompare_test", ++ srcs = ["reflectvaluecompare_test.go"], ++ deps = [ ++ ":reflectvaluecompare", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/reflectvaluecompare/cmd/reflectvaluecompare/BUILD.bazel c/go/analysis/passes/reflectvaluecompare/cmd/reflectvaluecompare/BUILD.bazel +--- b/go/analysis/passes/reflectvaluecompare/cmd/reflectvaluecompare/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/reflectvaluecompare/cmd/reflectvaluecompare/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "reflectvaluecompare_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/reflectvaluecompare/cmd/reflectvaluecompare", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/reflectvaluecompare", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "reflectvaluecompare", ++ embed = [":reflectvaluecompare_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/reflectvaluecompare/testdata/src/a/BUILD.bazel c/go/analysis/passes/reflectvaluecompare/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/reflectvaluecompare/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/reflectvaluecompare/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/reflectvaluecompare/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/shadow/BUILD.bazel c/go/analysis/passes/shadow/BUILD.bazel +--- b/go/analysis/passes/shadow/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/shadow/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,33 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "shadow", ++ srcs = [ ++ "doc.go", ++ "shadow.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/shadow", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":shadow", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "shadow_test", ++ srcs = ["shadow_test.go"], ++ deps = [ ++ ":shadow", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/shadow/cmd/shadow/BUILD.bazel c/go/analysis/passes/shadow/cmd/shadow/BUILD.bazel +--- b/go/analysis/passes/shadow/cmd/shadow/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/shadow/cmd/shadow/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "shadow_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/shadow/cmd/shadow", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/shadow", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "shadow", ++ embed = [":shadow_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/shadow/testdata/src/a/BUILD.bazel c/go/analysis/passes/shadow/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/shadow/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/shadow/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/shadow/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/shift/BUILD.bazel c/go/analysis/passes/shift/BUILD.bazel +--- b/go/analysis/passes/shift/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/shift/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,33 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "shift", ++ srcs = [ ++ "dead.go", ++ "shift.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/shift", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ "//internal/astutil", ++ "//internal/typeparams", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":shift", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "shift_test", ++ srcs = ["shift_test.go"], ++ deps = [ ++ ":shift", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/shift/testdata/src/a/BUILD.bazel c/go/analysis/passes/shift/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/shift/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/shift/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/shift/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/shift/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/shift/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/shift/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/shift/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/shift/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/sigchanyzer/BUILD.bazel c/go/analysis/passes/sigchanyzer/BUILD.bazel +--- b/go/analysis/passes/sigchanyzer/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/sigchanyzer/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "sigchanyzer", ++ srcs = [ ++ "doc.go", ++ "sigchanyzer.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/sigchanyzer", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":sigchanyzer", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "sigchanyzer_test", ++ srcs = ["sigchanyzer_test.go"], ++ deps = [ ++ ":sigchanyzer", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/sigchanyzer/testdata/src/a/BUILD.bazel c/go/analysis/passes/sigchanyzer/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/sigchanyzer/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/sigchanyzer/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/sigchanyzer/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/slog/BUILD.bazel c/go/analysis/passes/slog/BUILD.bazel +--- b/go/analysis/passes/slog/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/slog/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "slog", ++ srcs = [ ++ "doc.go", ++ "slog.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/slog", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/astutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":slog", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "slog_test", ++ srcs = ["slog_test.go"], ++ embed = [":slog"], ++ deps = ["//go/analysis/analysistest"], ++) +diff -urN b/go/analysis/passes/slog/testdata/src/a/BUILD.bazel c/go/analysis/passes/slog/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/slog/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/slog/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/slog/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/slog/testdata/src/b/BUILD.bazel c/go/analysis/passes/slog/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/slog/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/slog/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/slog/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/sortslice/BUILD.bazel c/go/analysis/passes/sortslice/BUILD.bazel +--- b/go/analysis/passes/sortslice/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/sortslice/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,30 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "sortslice", ++ srcs = ["analyzer.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/sortslice", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":sortslice", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "sortslice_test", ++ srcs = ["analyzer_test.go"], ++ deps = [ ++ ":sortslice", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/sortslice/testdata/src/a/BUILD.bazel c/go/analysis/passes/sortslice/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/sortslice/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/sortslice/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/sortslice/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/stdmethods/BUILD.bazel c/go/analysis/passes/stdmethods/BUILD.bazel +--- b/go/analysis/passes/stdmethods/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/stdmethods/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,33 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "stdmethods", ++ srcs = [ ++ "doc.go", ++ "stdmethods.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/stdmethods", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":stdmethods", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "stdmethods_test", ++ srcs = ["stdmethods_test.go"], ++ deps = [ ++ ":stdmethods", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/stdmethods/testdata/src/a/BUILD.bazel c/go/analysis/passes/stdmethods/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/stdmethods/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/stdmethods/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = [ ++ "a.go", ++ "b.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/stdmethods/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/stdmethods/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/stdmethods/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/stdmethods/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/stdmethods/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/stdmethods/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/stdversion/BUILD.bazel c/go/analysis/passes/stdversion/BUILD.bazel +--- b/go/analysis/passes/stdversion/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/stdversion/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,33 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "stdversion", ++ srcs = ["stdversion.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/stdversion", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ "//internal/typesinternal", ++ "//internal/versions", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":stdversion", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "stdversion_test", ++ srcs = ["stdversion_test.go"], ++ data = glob(["testdata/**"]), ++ deps = [ ++ ":stdversion", ++ "//go/analysis/analysistest", ++ "//internal/testenv", ++ "//internal/testfiles", ++ ], ++) +diff -urN b/go/analysis/passes/stringintconv/BUILD.bazel c/go/analysis/passes/stringintconv/BUILD.bazel +--- b/go/analysis/passes/stringintconv/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/stringintconv/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,36 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "stringintconv", ++ srcs = [ ++ "doc.go", ++ "string.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/stringintconv", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//internal/refactor", ++ "//internal/typeparams", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":stringintconv", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "stringintconv_test", ++ srcs = ["string_test.go"], ++ deps = [ ++ ":stringintconv", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/stringintconv/cmd/stringintconv/BUILD.bazel c/go/analysis/passes/stringintconv/cmd/stringintconv/BUILD.bazel +--- b/go/analysis/passes/stringintconv/cmd/stringintconv/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/stringintconv/cmd/stringintconv/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "stringintconv_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/stringintconv/cmd/stringintconv", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/stringintconv", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "stringintconv", ++ embed = [":stringintconv_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/stringintconv/testdata/src/a/BUILD.bazel c/go/analysis/passes/stringintconv/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/stringintconv/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/stringintconv/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/stringintconv/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/stringintconv/testdata/src/fix/BUILD.bazel c/go/analysis/passes/stringintconv/testdata/src/fix/BUILD.bazel +--- b/go/analysis/passes/stringintconv/testdata/src/fix/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/stringintconv/testdata/src/fix/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "fix", ++ srcs = [ ++ "fix.go", ++ "fixdot.go", ++ "fixnamed.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/stringintconv/testdata/src/fix", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":fix", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/stringintconv/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/stringintconv/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/stringintconv/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/stringintconv/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/stringintconv/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/structtag/BUILD.bazel c/go/analysis/passes/structtag/BUILD.bazel +--- b/go/analysis/passes/structtag/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/structtag/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,28 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "structtag", ++ srcs = ["structtag.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/structtag", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":structtag", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "structtag_test", ++ srcs = ["structtag_test.go"], ++ deps = [ ++ ":structtag", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/structtag/testdata/src/a/b/BUILD.bazel c/go/analysis/passes/structtag/testdata/src/a/b/BUILD.bazel +--- b/go/analysis/passes/structtag/testdata/src/a/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/structtag/testdata/src/a/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/structtag/testdata/src/a/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/structtag/testdata/src/a/BUILD.bazel c/go/analysis/passes/structtag/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/structtag/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/structtag/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/structtag/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/testinggoroutine/BUILD.bazel c/go/analysis/passes/testinggoroutine/BUILD.bazel +--- b/go/analysis/passes/testinggoroutine/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/testinggoroutine/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,36 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "testinggoroutine", ++ srcs = [ ++ "doc.go", ++ "testinggoroutine.go", ++ "util.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/testinggoroutine", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":testinggoroutine", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "testinggoroutine_test", ++ srcs = ["testinggoroutine_test.go"], ++ deps = [ ++ ":testinggoroutine", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/testinggoroutine/testdata/src/a/BUILD.bazel c/go/analysis/passes/testinggoroutine/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/testinggoroutine/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/testinggoroutine/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = [ ++ "a.go", ++ "b.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/testinggoroutine/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/testinggoroutine/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/testinggoroutine/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/testinggoroutine/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/testinggoroutine/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/testinggoroutine/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/tests/BUILD.bazel c/go/analysis/passes/tests/BUILD.bazel +--- b/go/analysis/passes/tests/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/tests/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,33 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "tests", ++ srcs = [ ++ "doc.go", ++ "tests.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/tests", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/internal/analysisutil", ++ "//internal/analysisinternal", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":tests", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "tests_test", ++ srcs = ["tests_test.go"], ++ deps = [ ++ ":tests", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/tests/testdata/src/a/BUILD.bazel c/go/analysis/passes/tests/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/tests/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/tests/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/tests/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "a_test", ++ srcs = [ ++ "a_test.go", ++ "ax_test.go", ++ "go118_test.go", ++ ], ++ embed = [":a"], ++) +diff -urN b/go/analysis/passes/tests/testdata/src/b/BUILD.bazel c/go/analysis/passes/tests/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/tests/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/tests/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/tests/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/tests/testdata/src/b_x_test/BUILD.bazel c/go/analysis/passes/tests/testdata/src/b_x_test/BUILD.bazel +--- b/go/analysis/passes/tests/testdata/src/b_x_test/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/tests/testdata/src/b_x_test/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,6 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_test") ++ ++go_test( ++ name = "b_x_test_test", ++ srcs = ["b_test.go"], ++) +diff -urN b/go/analysis/passes/tests/testdata/src/divergent/BUILD.bazel c/go/analysis/passes/tests/testdata/src/divergent/BUILD.bazel +--- b/go/analysis/passes/tests/testdata/src/divergent/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/tests/testdata/src/divergent/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "divergent", ++ srcs = ["buf.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/tests/testdata/src/divergent", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":divergent", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "divergent_test", ++ srcs = ["buf_test.go"], ++ embed = [":divergent"], ++) +diff -urN b/go/analysis/passes/tests/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/tests/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/tests/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/tests/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/tests/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "typeparams_test", ++ srcs = ["typeparams_test.go"], ++ embed = [":typeparams"], ++) +diff -urN b/go/analysis/passes/timeformat/BUILD.bazel c/go/analysis/passes/timeformat/BUILD.bazel +--- b/go/analysis/passes/timeformat/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/timeformat/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,35 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "timeformat", ++ srcs = [ ++ "doc.go", ++ "timeformat.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/timeformat", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":timeformat", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "timeformat_test", ++ srcs = ["timeformat_test.go"], ++ deps = [ ++ ":timeformat", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/timeformat/testdata/src/a/BUILD.bazel c/go/analysis/passes/timeformat/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/timeformat/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/timeformat/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/timeformat/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/timeformat/testdata/src/b/BUILD.bazel c/go/analysis/passes/timeformat/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/timeformat/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/timeformat/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/timeformat/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/unmarshal/BUILD.bazel c/go/analysis/passes/unmarshal/BUILD.bazel +--- b/go/analysis/passes/unmarshal/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unmarshal/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,35 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "unmarshal", ++ srcs = [ ++ "doc.go", ++ "unmarshal.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unmarshal", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":unmarshal", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "unmarshal_test", ++ srcs = ["unmarshal_test.go"], ++ deps = [ ++ ":unmarshal", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/unmarshal/cmd/unmarshal/BUILD.bazel c/go/analysis/passes/unmarshal/cmd/unmarshal/BUILD.bazel +--- b/go/analysis/passes/unmarshal/cmd/unmarshal/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unmarshal/cmd/unmarshal/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "unmarshal_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unmarshal/cmd/unmarshal", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/unmarshal", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "unmarshal", ++ embed = [":unmarshal_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/unmarshal/testdata/src/a/BUILD.bazel c/go/analysis/passes/unmarshal/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/unmarshal/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unmarshal/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unmarshal/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/unmarshal/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/unmarshal/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/unmarshal/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unmarshal/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unmarshal/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/unreachable/BUILD.bazel c/go/analysis/passes/unreachable/BUILD.bazel +--- b/go/analysis/passes/unreachable/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unreachable/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,33 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "unreachable", ++ srcs = [ ++ "doc.go", ++ "unreachable.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unreachable", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":unreachable", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "unreachable_test", ++ srcs = ["unreachable_test.go"], ++ deps = [ ++ ":unreachable", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/unreachable/testdata/src/a/BUILD.bazel c/go/analysis/passes/unreachable/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/unreachable/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unreachable/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unreachable/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/unsafeptr/BUILD.bazel c/go/analysis/passes/unsafeptr/BUILD.bazel +--- b/go/analysis/passes/unsafeptr/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unsafeptr/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "unsafeptr", ++ srcs = [ ++ "doc.go", ++ "unsafeptr.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unsafeptr", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":unsafeptr", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "unsafeptr_test", ++ srcs = ["unsafeptr_test.go"], ++ deps = [ ++ ":unsafeptr", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/unsafeptr/testdata/src/a/BUILD.bazel c/go/analysis/passes/unsafeptr/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/unsafeptr/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unsafeptr/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = [ ++ "a.go", ++ "issue40701.go", ++ ], ++ importpath = "golang.org/x/tools/go/analysis/passes/unsafeptr/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/unsafeptr/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/unsafeptr/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/unsafeptr/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unsafeptr/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unsafeptr/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/unusedresult/BUILD.bazel c/go/analysis/passes/unusedresult/BUILD.bazel +--- b/go/analysis/passes/unusedresult/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unusedresult/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,35 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "unusedresult", ++ srcs = [ ++ "doc.go", ++ "unusedresult.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unusedresult", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/analysisinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":unusedresult", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "unusedresult_test", ++ srcs = ["unusedresult_test.go"], ++ deps = [ ++ ":unusedresult", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/unusedresult/cmd/unusedresult/BUILD.bazel c/go/analysis/passes/unusedresult/cmd/unusedresult/BUILD.bazel +--- b/go/analysis/passes/unusedresult/cmd/unusedresult/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unusedresult/cmd/unusedresult/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "unusedresult_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unusedresult/cmd/unusedresult", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/analysis/passes/unusedresult", ++ "//go/analysis/singlechecker", ++ ], ++) ++ ++go_binary( ++ name = "unusedresult", ++ embed = [":unusedresult_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/unusedresult/testdata/src/a/BUILD.bazel c/go/analysis/passes/unusedresult/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/unusedresult/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unusedresult/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unusedresult/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/unusedresult/testdata/src/typeparams/BUILD.bazel c/go/analysis/passes/unusedresult/testdata/src/typeparams/BUILD.bazel +--- b/go/analysis/passes/unusedresult/testdata/src/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unusedresult/testdata/src/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeparams", ++ srcs = ["typeparams.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unusedresult/testdata/src/typeparams", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/unusedresult/testdata/src/typeparams/userdefs/BUILD.bazel c/go/analysis/passes/unusedresult/testdata/src/typeparams/userdefs/BUILD.bazel +--- b/go/analysis/passes/unusedresult/testdata/src/typeparams/userdefs/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unusedresult/testdata/src/typeparams/userdefs/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "userdefs", ++ srcs = ["userdefs.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unusedresult/testdata/src/typeparams/userdefs", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":userdefs", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/unusedwrite/BUILD.bazel c/go/analysis/passes/unusedwrite/BUILD.bazel +--- b/go/analysis/passes/unusedwrite/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unusedwrite/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "unusedwrite", ++ srcs = [ ++ "doc.go", ++ "unusedwrite.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unusedwrite", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/buildssa", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ssa", ++ "//internal/typeparams", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":unusedwrite", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "unusedwrite_test", ++ srcs = ["unusedwrite_test.go"], ++ deps = [ ++ ":unusedwrite", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/unusedwrite/testdata/src/a/BUILD.bazel c/go/analysis/passes/unusedwrite/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/unusedwrite/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unusedwrite/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["unusedwrite.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unusedwrite/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/unusedwrite/testdata/src/importsunsafe/BUILD.bazel c/go/analysis/passes/unusedwrite/testdata/src/importsunsafe/BUILD.bazel +--- b/go/analysis/passes/unusedwrite/testdata/src/importsunsafe/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/unusedwrite/testdata/src/importsunsafe/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "importsunsafe", ++ srcs = ["i.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/unusedwrite/testdata/src/importsunsafe", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":importsunsafe", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/usesgenerics/BUILD.bazel c/go/analysis/passes/usesgenerics/BUILD.bazel +--- b/go/analysis/passes/usesgenerics/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/usesgenerics/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "usesgenerics", ++ srcs = [ ++ "doc.go", ++ "usesgenerics.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/usesgenerics", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//internal/typeparams/genericfeatures", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":usesgenerics", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "usesgenerics_test", ++ srcs = ["usesgenerics_test.go"], ++ deps = [ ++ ":usesgenerics", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/usesgenerics/testdata/src/a/BUILD.bazel c/go/analysis/passes/usesgenerics/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/usesgenerics/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/usesgenerics/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/usesgenerics/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/usesgenerics/testdata/src/b/BUILD.bazel c/go/analysis/passes/usesgenerics/testdata/src/b/BUILD.bazel +--- b/go/analysis/passes/usesgenerics/testdata/src/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/usesgenerics/testdata/src/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/usesgenerics/testdata/src/b", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/usesgenerics/testdata/src/c/BUILD.bazel c/go/analysis/passes/usesgenerics/testdata/src/c/BUILD.bazel +--- b/go/analysis/passes/usesgenerics/testdata/src/c/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/usesgenerics/testdata/src/c/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "c", ++ srcs = ["c.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/usesgenerics/testdata/src/c", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":c", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/usesgenerics/testdata/src/d/BUILD.bazel c/go/analysis/passes/usesgenerics/testdata/src/d/BUILD.bazel +--- b/go/analysis/passes/usesgenerics/testdata/src/d/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/usesgenerics/testdata/src/d/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "d", ++ srcs = ["d.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/usesgenerics/testdata/src/d", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":d", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/passes/waitgroup/BUILD.bazel c/go/analysis/passes/waitgroup/BUILD.bazel +--- b/go/analysis/passes/waitgroup/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/waitgroup/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,35 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "waitgroup", ++ srcs = [ ++ "doc.go", ++ "waitgroup.go", ++ ], ++ embedsrcs = ["doc.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/waitgroup", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/analysis/passes/internal/analysisutil", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":waitgroup", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "waitgroup_test", ++ srcs = ["waitgroup_test.go"], ++ deps = [ ++ ":waitgroup", ++ "//go/analysis/analysistest", ++ ], ++) +diff -urN b/go/analysis/passes/waitgroup/testdata/src/a/BUILD.bazel c/go/analysis/passes/waitgroup/testdata/src/a/BUILD.bazel +--- b/go/analysis/passes/waitgroup/testdata/src/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/passes/waitgroup/testdata/src/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/go/analysis/passes/waitgroup/testdata/src/a", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/singlechecker/BUILD.bazel c/go/analysis/singlechecker/BUILD.bazel +--- b/go/analysis/singlechecker/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/singlechecker/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "singlechecker", ++ srcs = ["singlechecker.go"], ++ importpath = "golang.org/x/tools/go/analysis/singlechecker", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/internal/analysisflags", ++ "//go/analysis/internal/checker", ++ "//go/analysis/unitchecker", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":singlechecker", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/analysis/unitchecker/BUILD.bazel c/go/analysis/unitchecker/BUILD.bazel +--- b/go/analysis/unitchecker/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/analysis/unitchecker/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,72 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "unitchecker", ++ srcs = ["unitchecker.go"], ++ importpath = "golang.org/x/tools/go/analysis/unitchecker", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/internal/analysisflags", ++ "//internal/analysisinternal", ++ "//internal/facts", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":unitchecker", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "unitchecker_test", ++ srcs = [ ++ "export_test.go", ++ "separate_test.go", ++ "unitchecker_test.go", ++ "vet_std_test.go", ++ ], ++ embed = [":unitchecker"], ++ deps = [ ++ "//go/analysis/passes/appends", ++ "//go/analysis/passes/asmdecl", ++ "//go/analysis/passes/assign", ++ "//go/analysis/passes/atomic", ++ "//go/analysis/passes/bools", ++ "//go/analysis/passes/buildtag", ++ "//go/analysis/passes/cgocall", ++ "//go/analysis/passes/composite", ++ "//go/analysis/passes/copylock", ++ "//go/analysis/passes/defers", ++ "//go/analysis/passes/directive", ++ "//go/analysis/passes/errorsas", ++ "//go/analysis/passes/findcall", ++ "//go/analysis/passes/framepointer", ++ "//go/analysis/passes/gofix", ++ "//go/analysis/passes/hostport", ++ "//go/analysis/passes/httpresponse", ++ "//go/analysis/passes/ifaceassert", ++ "//go/analysis/passes/loopclosure", ++ "//go/analysis/passes/lostcancel", ++ "//go/analysis/passes/nilfunc", ++ "//go/analysis/passes/printf", ++ "//go/analysis/passes/shift", ++ "//go/analysis/passes/sigchanyzer", ++ "//go/analysis/passes/stdmethods", ++ "//go/analysis/passes/stdversion", ++ "//go/analysis/passes/stringintconv", ++ "//go/analysis/passes/structtag", ++ "//go/analysis/passes/testinggoroutine", ++ "//go/analysis/passes/tests", ++ "//go/analysis/passes/timeformat", ++ "//go/analysis/passes/unmarshal", ++ "//go/analysis/passes/unreachable", ++ "//go/analysis/passes/unusedresult", ++ "//go/gcexportdata", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++) +diff -urN b/go/ast/astutil/BUILD.bazel c/go/ast/astutil/BUILD.bazel +--- b/go/ast/astutil/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ast/astutil/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,29 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "astutil", ++ srcs = [ ++ "enclosing.go", ++ "imports.go", ++ "rewrite.go", ++ "util.go", ++ ], ++ importpath = "golang.org/x/tools/go/ast/astutil", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":astutil", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "astutil_test", ++ srcs = [ ++ "enclosing_test.go", ++ "imports_test.go", ++ "rewrite_test.go", ++ ], ++ embed = [":astutil"], ++) +diff -urN b/go/ast/edge/BUILD.bazel c/go/ast/edge/BUILD.bazel +--- b/go/ast/edge/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ast/edge/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "edge", ++ srcs = ["edge.go"], ++ importpath = "golang.org/x/tools/go/ast/edge", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":edge", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ast/inspector/BUILD.bazel c/go/ast/inspector/BUILD.bazel +--- b/go/ast/inspector/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ast/inspector/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "inspector", ++ srcs = [ ++ "cursor.go", ++ "inspector.go", ++ "iter.go", ++ "typeof.go", ++ "walk.go", ++ ], ++ importpath = "golang.org/x/tools/go/ast/inspector", ++ visibility = ["//visibility:public"], ++ deps = ["//go/ast/edge"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":inspector", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "inspector_test", ++ srcs = [ ++ "cursor_test.go", ++ "inspector_test.go", ++ "iter_test.go", ++ ], ++ deps = [ ++ ":inspector", ++ "//go/ast/edge", ++ ], ++) +diff -urN b/go/buildutil/BUILD.bazel c/go/buildutil/BUILD.bazel +--- b/go/buildutil/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/buildutil/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,36 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "buildutil", ++ srcs = [ ++ "allpackages.go", ++ "fakecontext.go", ++ "overlay.go", ++ "tags.go", ++ "util.go", ++ ], ++ importpath = "golang.org/x/tools/go/buildutil", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":buildutil", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "buildutil_test", ++ srcs = [ ++ "allpackages_test.go", ++ "overlay_test.go", ++ "tags_test.go", ++ "util_test.go", ++ "util_windows_test.go", ++ ], ++ deps = [ ++ ":buildutil", ++ "//internal/packagestest", ++ "//internal/testenv", ++ ], ++) +diff -urN b/go/callgraph/BUILD.bazel c/go/callgraph/BUILD.bazel +--- b/go/callgraph/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/callgraph/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "callgraph", ++ srcs = [ ++ "callgraph.go", ++ "util.go", ++ ], ++ importpath = "golang.org/x/tools/go/callgraph", ++ visibility = ["//visibility:public"], ++ deps = ["//go/ssa"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":callgraph", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "callgraph_test", ++ srcs = ["callgraph_test.go"], ++ deps = [ ++ ":callgraph", ++ "//go/callgraph/cha", ++ "//go/callgraph/rta", ++ "//go/callgraph/static", ++ "//go/callgraph/vta", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++) +diff -urN b/go/callgraph/cha/BUILD.bazel c/go/callgraph/cha/BUILD.bazel +--- b/go/callgraph/cha/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/callgraph/cha/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,158 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "cha", ++ srcs = ["cha.go"], ++ importpath = "golang.org/x/tools/go/callgraph/cha", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/callgraph", ++ "//go/callgraph/internal/chautil", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":cha", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "cha_test", ++ srcs = ["cha_test.go"], ++ deps = select({ ++ "@io_bazel_rules_go//go/platform:aix": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:darwin": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:dragonfly": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:freebsd": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:illumos": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:ios": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:js": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:linux": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:netbsd": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:openbsd": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:plan9": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:solaris": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:windows": [ ++ ":cha", ++ "//go/callgraph", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "//conditions:default": [], ++ }), ++) +diff -urN b/go/callgraph/cha/testdata/BUILD.bazel c/go/callgraph/cha/testdata/BUILD.bazel +--- b/go/callgraph/cha/testdata/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/callgraph/cha/testdata/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "testdata_lib", ++ srcs = [ ++ "func.go", ++ "generics.go", ++ "iface.go", ++ "issue23925.go", ++ "recv.go", ++ ], ++ importpath = "golang.org/x/tools/go/callgraph/cha/testdata", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "testdata", ++ embed = [":testdata_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/callgraph/internal/chautil/BUILD.bazel c/go/callgraph/internal/chautil/BUILD.bazel +--- b/go/callgraph/internal/chautil/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/callgraph/internal/chautil/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "chautil", ++ srcs = ["lazy.go"], ++ importpath = "golang.org/x/tools/go/callgraph/internal/chautil", ++ visibility = ["//go/callgraph:__subpackages__"], ++ deps = [ ++ "//go/ssa", ++ "//go/types/typeutil", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":chautil", ++ visibility = ["//go/callgraph:__subpackages__"], ++) +diff -urN b/go/callgraph/rta/BUILD.bazel c/go/callgraph/rta/BUILD.bazel +--- b/go/callgraph/rta/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/callgraph/rta/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,132 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "rta", ++ srcs = ["rta.go"], ++ importpath = "golang.org/x/tools/go/callgraph/rta", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/types/typeutil", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":rta", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "rta_test", ++ srcs = ["rta_test.go"], ++ data = glob(["testdata/**"]), ++ deps = select({ ++ "@io_bazel_rules_go//go/platform:aix": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:darwin": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:dragonfly": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:freebsd": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:illumos": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:ios": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:js": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:linux": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:netbsd": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:openbsd": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:plan9": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:solaris": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:windows": [ ++ ":rta", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "//conditions:default": [], ++ }), ++) +diff -urN b/go/callgraph/static/BUILD.bazel c/go/callgraph/static/BUILD.bazel +--- b/go/callgraph/static/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/callgraph/static/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,31 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "static", ++ srcs = ["static.go"], ++ importpath = "golang.org/x/tools/go/callgraph/static", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/callgraph", ++ "//go/ssa", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":static", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "static_test", ++ srcs = ["static_test.go"], ++ deps = [ ++ ":static", ++ "//go/callgraph", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++) +diff -urN b/go/callgraph/vta/BUILD.bazel c/go/callgraph/vta/BUILD.bazel +--- b/go/callgraph/vta/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/callgraph/vta/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,54 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "vta", ++ srcs = [ ++ "graph.go", ++ "initial.go", ++ "propagation.go", ++ "utils.go", ++ "vta.go", ++ ], ++ importpath = "golang.org/x/tools/go/callgraph/vta", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/callgraph", ++ "//go/callgraph/internal/chautil", ++ "//go/callgraph/vta/internal/trie", ++ "//go/ssa", ++ "//go/types/typeutil", ++ "//internal/typeparams", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":vta", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "vta_test", ++ srcs = [ ++ "graph_test.go", ++ "helpers_test.go", ++ "propagation_test.go", ++ "vta_test.go", ++ ], ++ embed = [":vta"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/analysistest", ++ "//go/analysis/passes/buildssa", ++ "//go/callgraph", ++ "//go/callgraph/cha", ++ "//go/packages", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//go/types/typeutil", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++) +diff -urN b/go/callgraph/vta/internal/trie/BUILD.bazel c/go/callgraph/vta/internal/trie/BUILD.bazel +--- b/go/callgraph/vta/internal/trie/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/callgraph/vta/internal/trie/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,29 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "trie", ++ srcs = [ ++ "bits.go", ++ "builder.go", ++ "scope.go", ++ "trie.go", ++ ], ++ importpath = "golang.org/x/tools/go/callgraph/vta/internal/trie", ++ visibility = ["//go/callgraph/vta:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":trie", ++ visibility = ["//go/callgraph/vta:__subpackages__"], ++) ++ ++go_test( ++ name = "trie_test", ++ srcs = [ ++ "bits_test.go", ++ "op_test.go", ++ "trie_test.go", ++ ], ++ embed = [":trie"], ++) +diff -urN b/go/callgraph/vta/testdata/src/d/BUILD.bazel c/go/callgraph/vta/testdata/src/d/BUILD.bazel +--- b/go/callgraph/vta/testdata/src/d/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/callgraph/vta/testdata/src/d/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "d", ++ srcs = ["d.go"], ++ importpath = "golang.org/x/tools/go/callgraph/vta/testdata/src/d", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":d", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/callgraph/vta/testdata/src/t/BUILD.bazel c/go/callgraph/vta/testdata/src/t/BUILD.bazel +--- b/go/callgraph/vta/testdata/src/t/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/callgraph/vta/testdata/src/t/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "t", ++ srcs = ["t.go"], ++ importpath = "golang.org/x/tools/go/callgraph/vta/testdata/src/t", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":t", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/cfg/BUILD.bazel c/go/cfg/BUILD.bazel +--- b/go/cfg/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/cfg/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,27 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "cfg", ++ srcs = [ ++ "builder.go", ++ "cfg.go", ++ ], ++ importpath = "golang.org/x/tools/go/cfg", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":cfg", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "cfg_test", ++ srcs = ["cfg_test.go"], ++ deps = [ ++ ":cfg", ++ "//go/packages", ++ "//internal/testenv", ++ ], ++) +diff -urN b/go/gccgoexportdata/BUILD.bazel c/go/gccgoexportdata/BUILD.bazel +--- b/go/gccgoexportdata/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/gccgoexportdata/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,22 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "gccgoexportdata", ++ srcs = ["gccgoexportdata.go"], ++ importpath = "golang.org/x/tools/go/gccgoexportdata", ++ visibility = ["//visibility:public"], ++ deps = ["//go/internal/gccgoimporter"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":gccgoexportdata", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "gccgoexportdata_test", ++ srcs = ["gccgoexportdata_test.go"], ++ data = glob(["testdata/**"]), ++ deps = [":gccgoexportdata"], ++) +diff -urN b/go/gcexportdata/BUILD.bazel c/go/gcexportdata/BUILD.bazel +--- b/go/gcexportdata/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/gcexportdata/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,59 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "gcexportdata", ++ srcs = [ ++ "gcexportdata.go", ++ "importer.go", ++ ], ++ importpath = "golang.org/x/tools/go/gcexportdata", ++ visibility = ["//visibility:public"], ++ deps = ["//internal/gcimporter"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":gcexportdata", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "gcexportdata_test", ++ srcs = ["example_test.go"], ++ deps = select({ ++ "@io_bazel_rules_go//go/platform:aix": [ ++ ":gcexportdata", ++ ], ++ "@io_bazel_rules_go//go/platform:darwin": [ ++ ":gcexportdata", ++ ], ++ "@io_bazel_rules_go//go/platform:dragonfly": [ ++ ":gcexportdata", ++ ], ++ "@io_bazel_rules_go//go/platform:freebsd": [ ++ ":gcexportdata", ++ ], ++ "@io_bazel_rules_go//go/platform:illumos": [ ++ ":gcexportdata", ++ ], ++ "@io_bazel_rules_go//go/platform:linux": [ ++ ":gcexportdata", ++ ], ++ "@io_bazel_rules_go//go/platform:netbsd": [ ++ ":gcexportdata", ++ ], ++ "@io_bazel_rules_go//go/platform:openbsd": [ ++ ":gcexportdata", ++ ], ++ "@io_bazel_rules_go//go/platform:plan9": [ ++ ":gcexportdata", ++ ], ++ "@io_bazel_rules_go//go/platform:solaris": [ ++ ":gcexportdata", ++ ], ++ "@io_bazel_rules_go//go/platform:windows": [ ++ ":gcexportdata", ++ ], ++ "//conditions:default": [], ++ }), ++) +diff -urN b/go/internal/cgo/BUILD.bazel c/go/internal/cgo/BUILD.bazel +--- b/go/internal/cgo/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/internal/cgo/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "cgo", ++ srcs = [ ++ "cgo.go", ++ "cgo_pkgconfig.go", ++ ], ++ importpath = "golang.org/x/tools/go/internal/cgo", ++ visibility = ["//go:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":cgo", ++ visibility = ["//go:__subpackages__"], ++) +diff -urN b/go/internal/gccgoimporter/BUILD.bazel c/go/internal/gccgoimporter/BUILD.bazel +--- b/go/internal/gccgoimporter/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/internal/gccgoimporter/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,36 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "gccgoimporter", ++ srcs = [ ++ "ar.go", ++ "backdoor.go", ++ "gccgoinstallation.go", ++ "importer.go", ++ "newInterface10.go", ++ "newInterface11.go", ++ "parser.go", ++ ], ++ importpath = "golang.org/x/tools/go/internal/gccgoimporter", ++ visibility = ["//go:__subpackages__"], ++ deps = ["//internal/typesinternal"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":gccgoimporter", ++ visibility = ["//go:__subpackages__"], ++) ++ ++go_test( ++ name = "gccgoimporter_test", ++ srcs = [ ++ "gccgoinstallation_test.go", ++ "importer_test.go", ++ "parser_test.go", ++ "testenv_test.go", ++ ], ++ data = glob(["testdata/**"]), ++ embed = [":gccgoimporter"], ++ deps = ["//internal/testenv"], ++) +diff -urN b/go/loader/BUILD.bazel c/go/loader/BUILD.bazel +--- b/go/loader/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/loader/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,36 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "loader", ++ srcs = [ ++ "doc.go", ++ "loader.go", ++ "util.go", ++ ], ++ importpath = "golang.org/x/tools/go/loader", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/ast/astutil", ++ "//go/buildutil", ++ "//go/internal/cgo", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":loader", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "loader_test", ++ srcs = [ ++ "loader_test.go", ++ "stdlib_test.go", ++ ], ++ deps = [ ++ ":loader", ++ "//go/buildutil", ++ "//internal/testenv", ++ ], ++) +diff -urN b/go/loader/testdata/BUILD.bazel c/go/loader/testdata/BUILD.bazel +--- b/go/loader/testdata/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/loader/testdata/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "testdata", ++ srcs = [ ++ "a.go", ++ "b.go", ++ "badpkgdecl.go", ++ ], ++ importpath = "golang.org/x/tools/go/loader/testdata", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":testdata", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/loader/testdata/issue46877/BUILD.bazel c/go/loader/testdata/issue46877/BUILD.bazel +--- b/go/loader/testdata/issue46877/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/loader/testdata/issue46877/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "issue46877", ++ srcs = [ ++ "x.go", ++ "x.h", ++ ], ++ cgo = True, ++ importpath = "golang.org/x/tools/go/loader/testdata/issue46877", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":issue46877", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/packages/BUILD.bazel c/go/packages/BUILD.bazel +--- b/go/packages/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/packages/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,48 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "packages", ++ srcs = [ ++ "doc.go", ++ "external.go", ++ "golist.go", ++ "golist_overlay.go", ++ "loadmode_string.go", ++ "packages.go", ++ "visit.go", ++ ], ++ importpath = "golang.org/x/tools/go/packages", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/gcexportdata", ++ "//internal/gocommand", ++ "//internal/packagesinternal", ++ "//internal/typesinternal", ++ "@org_golang_x_sync//errgroup:go_default_library", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":packages", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "packages_test", ++ srcs = [ ++ "example_test.go", ++ "overlay_test.go", ++ "packages_test.go", ++ "stdlib_test.go", ++ ], ++ deps = [ ++ ":packages", ++ "//internal/packagesinternal", ++ "//internal/packagestest", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++) +diff -urN b/go/packages/gopackages/BUILD.bazel c/go/packages/gopackages/BUILD.bazel +--- b/go/packages/gopackages/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/packages/gopackages/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "gopackages_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/go/packages/gopackages", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/packages", ++ "//go/types/typeutil", ++ "//internal/drivertest", ++ "//internal/tool", ++ ], ++) ++ ++go_binary( ++ name = "gopackages", ++ embed = [":gopackages_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/packages/internal/linecount/BUILD.bazel c/go/packages/internal/linecount/BUILD.bazel +--- b/go/packages/internal/linecount/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/packages/internal/linecount/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "linecount_lib", ++ srcs = ["linecount.go"], ++ importpath = "golang.org/x/tools/go/packages/internal/linecount", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/packages", ++ "@org_golang_x_sync//errgroup:go_default_library", ++ ], ++) ++ ++go_binary( ++ name = "linecount", ++ embed = [":linecount_lib"], ++ visibility = ["//go/packages:__subpackages__"], ++) +diff -urN b/go/packages/internal/nodecount/BUILD.bazel c/go/packages/internal/nodecount/BUILD.bazel +--- b/go/packages/internal/nodecount/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/packages/internal/nodecount/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "nodecount_lib", ++ srcs = ["nodecount.go"], ++ importpath = "golang.org/x/tools/go/packages/internal/nodecount", ++ visibility = ["//visibility:private"], ++ deps = ["//go/packages"], ++) ++ ++go_binary( ++ name = "nodecount", ++ embed = [":nodecount_lib"], ++ visibility = ["//go/packages:__subpackages__"], ++) +diff -urN b/go/ssa/BUILD.bazel c/go/ssa/BUILD.bazel +--- b/go/ssa/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,75 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "ssa", ++ srcs = [ ++ "block.go", ++ "blockopt.go", ++ "builder.go", ++ "const.go", ++ "create.go", ++ "doc.go", ++ "dom.go", ++ "emit.go", ++ "func.go", ++ "instantiate.go", ++ "lift.go", ++ "lvalue.go", ++ "methods.go", ++ "mode.go", ++ "print.go", ++ "sanity.go", ++ "source.go", ++ "ssa.go", ++ "subst.go", ++ "task.go", ++ "typeset.go", ++ "util.go", ++ "wrappers.go", ++ ], ++ importpath = "golang.org/x/tools/go/ssa", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/types/typeutil", ++ "//internal/aliases", ++ "//internal/typeparams", ++ "//internal/typesinternal", ++ "//internal/versions", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":ssa", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "ssa_test", ++ srcs = [ ++ "builder_generic_test.go", ++ "builder_test.go", ++ "const_test.go", ++ "dom_test.go", ++ "example_test.go", ++ "instantiate_test.go", ++ "methods_test.go", ++ "source_test.go", ++ "stdlib_test.go", ++ "subst_test.go", ++ "testutil_test.go", ++ ], ++ embed = [":ssa"], ++ deps = [ ++ "//go/analysis/analysistest", ++ "//go/ast/astutil", ++ "//go/loader", ++ "//go/packages", ++ "//go/ssa/ssautil", ++ "//internal/expect", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@org_golang_x_sync//errgroup:go_default_library", ++ ], ++) +diff -urN b/go/ssa/interp/BUILD.bazel c/go/ssa/interp/BUILD.bazel +--- b/go/ssa/interp/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,41 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "interp", ++ srcs = [ ++ "external.go", ++ "interp.go", ++ "map.go", ++ "ops.go", ++ "reflect.go", ++ "value.go", ++ ], ++ importpath = "golang.org/x/tools/go/ssa/interp", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/ssa", ++ "//go/types/typeutil", ++ "//internal/typeparams", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":interp", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "interp_test", ++ srcs = [ ++ "interp_test.go", ++ "rangefunc_test.go", ++ ], ++ deps = [ ++ ":interp", ++ "//go/loader", ++ "//go/ssa", ++ "//go/ssa/ssautil", ++ "//internal/testenv", ++ ], ++) +diff -urN b/go/ssa/interp/testdata/fixedbugs/BUILD.bazel c/go/ssa/interp/testdata/fixedbugs/BUILD.bazel +--- b/go/ssa/interp/testdata/fixedbugs/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/fixedbugs/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,22 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "fixedbugs_lib", ++ srcs = [ ++ "issue52342.go", ++ "issue52835.go", ++ "issue55086.go", ++ "issue55115.go", ++ "issue66783.go", ++ "issue69298.go", ++ "issue69929.go", ++ ], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/fixedbugs", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "fixedbugs", ++ embed = [":fixedbugs_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/encoding/BUILD.bazel c/go/ssa/interp/testdata/src/encoding/BUILD.bazel +--- b/go/ssa/interp/testdata/src/encoding/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/encoding/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "encoding", ++ srcs = ["encoding.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/encoding", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":encoding", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/errors/BUILD.bazel c/go/ssa/interp/testdata/src/errors/BUILD.bazel +--- b/go/ssa/interp/testdata/src/errors/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/errors/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "errors", ++ srcs = ["errors.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/errors", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":errors", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/fmt/BUILD.bazel c/go/ssa/interp/testdata/src/fmt/BUILD.bazel +--- b/go/ssa/interp/testdata/src/fmt/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/fmt/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "fmt", ++ srcs = ["fmt.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/fmt", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":fmt", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/io/BUILD.bazel c/go/ssa/interp/testdata/src/io/BUILD.bazel +--- b/go/ssa/interp/testdata/src/io/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/io/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "io", ++ srcs = ["io.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/io", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":io", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/log/BUILD.bazel c/go/ssa/interp/testdata/src/log/BUILD.bazel +--- b/go/ssa/interp/testdata/src/log/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/log/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "log", ++ srcs = ["log.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/log", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":log", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/math/BUILD.bazel c/go/ssa/interp/testdata/src/math/BUILD.bazel +--- b/go/ssa/interp/testdata/src/math/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/math/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "math", ++ srcs = ["math.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/math", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":math", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/os/BUILD.bazel c/go/ssa/interp/testdata/src/os/BUILD.bazel +--- b/go/ssa/interp/testdata/src/os/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/os/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "os", ++ srcs = ["os.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/os", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":os", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/reflect/BUILD.bazel c/go/ssa/interp/testdata/src/reflect/BUILD.bazel +--- b/go/ssa/interp/testdata/src/reflect/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/reflect/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,17 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "reflect", ++ srcs = [ ++ "deepequal.go", ++ "reflect.go", ++ ], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/reflect", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":reflect", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/runtime/BUILD.bazel c/go/ssa/interp/testdata/src/runtime/BUILD.bazel +--- b/go/ssa/interp/testdata/src/runtime/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/runtime/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "runtime", ++ srcs = ["runtime.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/runtime", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":runtime", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/sort/BUILD.bazel c/go/ssa/interp/testdata/src/sort/BUILD.bazel +--- b/go/ssa/interp/testdata/src/sort/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/sort/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "sort", ++ srcs = ["sort.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/sort", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":sort", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/strconv/BUILD.bazel c/go/ssa/interp/testdata/src/strconv/BUILD.bazel +--- b/go/ssa/interp/testdata/src/strconv/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/strconv/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "strconv", ++ srcs = ["strconv.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/strconv", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":strconv", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/strings/BUILD.bazel c/go/ssa/interp/testdata/src/strings/BUILD.bazel +--- b/go/ssa/interp/testdata/src/strings/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/strings/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "strings", ++ srcs = ["strings.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/strings", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":strings", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/sync/BUILD.bazel c/go/ssa/interp/testdata/src/sync/BUILD.bazel +--- b/go/ssa/interp/testdata/src/sync/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/sync/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "sync", ++ srcs = ["sync.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/sync", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":sync", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/time/BUILD.bazel c/go/ssa/interp/testdata/src/time/BUILD.bazel +--- b/go/ssa/interp/testdata/src/time/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/time/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "time", ++ srcs = ["time.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/time", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":time", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/unicode/utf8/BUILD.bazel c/go/ssa/interp/testdata/src/unicode/utf8/BUILD.bazel +--- b/go/ssa/interp/testdata/src/unicode/utf8/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/unicode/utf8/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "utf8", ++ srcs = ["utf8.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/unicode/utf8", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":utf8", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/interp/testdata/src/unsafe/BUILD.bazel c/go/ssa/interp/testdata/src/unsafe/BUILD.bazel +--- b/go/ssa/interp/testdata/src/unsafe/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/interp/testdata/src/unsafe/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "unsafe", ++ srcs = ["unsafe.go"], ++ importpath = "golang.org/x/tools/go/ssa/interp/testdata/src/unsafe", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":unsafe", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/ssautil/BUILD.bazel c/go/ssa/ssautil/BUILD.bazel +--- b/go/ssa/ssautil/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/ssautil/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,96 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "ssautil", ++ srcs = [ ++ "deprecated.go", ++ "load.go", ++ "switch.go", ++ "visit.go", ++ ], ++ importpath = "golang.org/x/tools/go/ssa/ssautil", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/loader", ++ "//go/packages", ++ "//go/ssa", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":ssautil", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "ssautil_test", ++ srcs = [ ++ "deprecated_test.go", ++ "load_test.go", ++ "switch_test.go", ++ ], ++ data = glob(["testdata/**"]), ++ deps = [ ++ ":ssautil", ++ "//go/loader", ++ "//go/packages", ++ "//go/ssa", ++ "//internal/packagestest", ++ "//internal/testenv", ++ ] + select({ ++ "@io_bazel_rules_go//go/platform:aix": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:darwin": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:dragonfly": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:freebsd": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:illumos": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:ios": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:js": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:linux": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:netbsd": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:openbsd": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:plan9": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:solaris": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "@io_bazel_rules_go//go/platform:windows": [ ++ "//internal/testfiles", ++ "//txtar", ++ ], ++ "//conditions:default": [], ++ }), ++) +diff -urN b/go/ssa/testdata/BUILD.bazel c/go/ssa/testdata/BUILD.bazel +--- b/go/ssa/testdata/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "testdata_lib", ++ srcs = [ ++ "objlookup.go", ++ "structconv.go", ++ "valueforexpr.go", ++ ], ++ importpath = "golang.org/x/tools/go/ssa/testdata", ++ visibility = ["//visibility:private"], ++) ++ ++go_binary( ++ name = "testdata", ++ embed = [":testdata_lib"], ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/fixedbugs/BUILD.bazel c/go/ssa/testdata/fixedbugs/BUILD.bazel +--- b/go/ssa/testdata/fixedbugs/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/fixedbugs/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "fixedbugs", ++ srcs = ["issue73594.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/fixedbugs", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":fixedbugs", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/bytes/BUILD.bazel c/go/ssa/testdata/src/bytes/BUILD.bazel +--- b/go/ssa/testdata/src/bytes/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/bytes/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "bytes", ++ srcs = ["bytes.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/bytes", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":bytes", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/context/BUILD.bazel c/go/ssa/testdata/src/context/BUILD.bazel +--- b/go/ssa/testdata/src/context/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/context/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "context", ++ srcs = ["context.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/context", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":context", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/encoding/BUILD.bazel c/go/ssa/testdata/src/encoding/BUILD.bazel +--- b/go/ssa/testdata/src/encoding/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/encoding/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "encoding", ++ srcs = ["encoding.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/encoding", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":encoding", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/encoding/json/BUILD.bazel c/go/ssa/testdata/src/encoding/json/BUILD.bazel +--- b/go/ssa/testdata/src/encoding/json/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/encoding/json/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "json", ++ srcs = ["json.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/encoding/json", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":json", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/encoding/xml/BUILD.bazel c/go/ssa/testdata/src/encoding/xml/BUILD.bazel +--- b/go/ssa/testdata/src/encoding/xml/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/encoding/xml/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "xml", ++ srcs = ["xml.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/encoding/xml", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":xml", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/errors/BUILD.bazel c/go/ssa/testdata/src/errors/BUILD.bazel +--- b/go/ssa/testdata/src/errors/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/errors/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "errors", ++ srcs = ["errors.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/errors", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":errors", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/fmt/BUILD.bazel c/go/ssa/testdata/src/fmt/BUILD.bazel +--- b/go/ssa/testdata/src/fmt/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/fmt/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "fmt", ++ srcs = ["fmt.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/fmt", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":fmt", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/io/BUILD.bazel c/go/ssa/testdata/src/io/BUILD.bazel +--- b/go/ssa/testdata/src/io/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/io/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "io", ++ srcs = ["io.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/io", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":io", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/log/BUILD.bazel c/go/ssa/testdata/src/log/BUILD.bazel +--- b/go/ssa/testdata/src/log/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/log/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "log", ++ srcs = ["log.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/log", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":log", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/math/BUILD.bazel c/go/ssa/testdata/src/math/BUILD.bazel +--- b/go/ssa/testdata/src/math/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/math/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "math", ++ srcs = ["math.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/math", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":math", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/os/BUILD.bazel c/go/ssa/testdata/src/os/BUILD.bazel +--- b/go/ssa/testdata/src/os/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/os/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "os", ++ srcs = ["os.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/os", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":os", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/reflect/BUILD.bazel c/go/ssa/testdata/src/reflect/BUILD.bazel +--- b/go/ssa/testdata/src/reflect/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/reflect/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "reflect", ++ srcs = ["reflect.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/reflect", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":reflect", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/runtime/BUILD.bazel c/go/ssa/testdata/src/runtime/BUILD.bazel +--- b/go/ssa/testdata/src/runtime/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/runtime/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "runtime", ++ srcs = ["runtime.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/runtime", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":runtime", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/sort/BUILD.bazel c/go/ssa/testdata/src/sort/BUILD.bazel +--- b/go/ssa/testdata/src/sort/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/sort/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "sort", ++ srcs = ["sort.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/sort", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":sort", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/strconv/BUILD.bazel c/go/ssa/testdata/src/strconv/BUILD.bazel +--- b/go/ssa/testdata/src/strconv/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/strconv/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "strconv", ++ srcs = ["strconv.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/strconv", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":strconv", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/strings/BUILD.bazel c/go/ssa/testdata/src/strings/BUILD.bazel +--- b/go/ssa/testdata/src/strings/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/strings/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "strings", ++ srcs = ["strings.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/strings", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":strings", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/sync/atomic/BUILD.bazel c/go/ssa/testdata/src/sync/atomic/BUILD.bazel +--- b/go/ssa/testdata/src/sync/atomic/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/sync/atomic/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "atomic", ++ srcs = ["atomic.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/sync/atomic", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":atomic", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/sync/BUILD.bazel c/go/ssa/testdata/src/sync/BUILD.bazel +--- b/go/ssa/testdata/src/sync/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/sync/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "sync", ++ srcs = ["sync.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/sync", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":sync", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/time/BUILD.bazel c/go/ssa/testdata/src/time/BUILD.bazel +--- b/go/ssa/testdata/src/time/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/time/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "time", ++ srcs = ["time.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/time", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":time", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/ssa/testdata/src/unsafe/BUILD.bazel c/go/ssa/testdata/src/unsafe/BUILD.bazel +--- b/go/ssa/testdata/src/unsafe/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/ssa/testdata/src/unsafe/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "unsafe", ++ srcs = ["unsafe.go"], ++ importpath = "golang.org/x/tools/go/ssa/testdata/src/unsafe", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":unsafe", ++ visibility = ["//visibility:public"], ++) +diff -urN b/go/types/internal/play/BUILD.bazel c/go/types/internal/play/BUILD.bazel +--- b/go/types/internal/play/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/types/internal/play/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,21 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "play_lib", ++ srcs = ["play.go"], ++ importpath = "golang.org/x/tools/go/types/internal/play", ++ visibility = ["//visibility:private"], ++ deps = [ ++ "//go/ast/astutil", ++ "//go/ast/inspector", ++ "//go/packages", ++ "//go/types/typeutil", ++ "//internal/typeparams", ++ ], ++) ++ ++go_binary( ++ name = "play", ++ embed = [":play_lib"], ++ visibility = ["//go/types:__subpackages__"], ++) +diff -urN b/go/types/objectpath/BUILD.bazel c/go/types/objectpath/BUILD.bazel +--- b/go/types/objectpath/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/types/objectpath/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "objectpath", ++ srcs = ["objectpath.go"], ++ importpath = "golang.org/x/tools/go/types/objectpath", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//internal/aliases", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":objectpath", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "objectpath_test", ++ srcs = [ ++ "objectpath_go118_test.go", ++ "objectpath_test.go", ++ ], ++ deps = [ ++ ":objectpath", ++ "//go/gcexportdata", ++ "//go/packages", ++ "//internal/aliases", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++) +diff -urN b/go/types/typeutil/BUILD.bazel c/go/types/typeutil/BUILD.bazel +--- b/go/types/typeutil/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/go/types/typeutil/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,37 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "typeutil", ++ srcs = [ ++ "callee.go", ++ "imports.go", ++ "map.go", ++ "methodsetcache.go", ++ "ui.go", ++ ], ++ importpath = "golang.org/x/tools/go/types/typeutil", ++ visibility = ["//visibility:public"], ++ deps = ["//internal/typeparams"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeutil", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "typeutil_test", ++ srcs = [ ++ "callee_test.go", ++ "example_test.go", ++ "imports_test.go", ++ "map_test.go", ++ "ui_test.go", ++ ], ++ deps = [ ++ ":typeutil", ++ "//go/packages", ++ "//internal/testenv", ++ ], ++) +diff -urN b/imports/BUILD.bazel c/imports/BUILD.bazel +--- b/imports/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/imports/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "imports", ++ srcs = ["forward.go"], ++ importpath = "golang.org/x/tools/imports", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//internal/gocommand", ++ "//internal/imports", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":imports", ++ visibility = ["//visibility:public"], ++) +diff -urN b/internal/aliases/BUILD.bazel c/internal/aliases/BUILD.bazel +--- b/internal/aliases/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/aliases/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,26 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "aliases", ++ srcs = [ ++ "aliases.go", ++ "aliases_go122.go", ++ ], ++ importpath = "golang.org/x/tools/internal/aliases", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":aliases", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "aliases_test", ++ srcs = ["aliases_test.go"], ++ deps = [ ++ ":aliases", ++ "//internal/testenv", ++ ], ++) +diff -urN b/internal/analysisinternal/BUILD.bazel c/internal/analysisinternal/BUILD.bazel +--- b/internal/analysisinternal/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/analysisinternal/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,27 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "analysisinternal", ++ srcs = [ ++ "analysis.go", ++ "extractdoc.go", ++ ], ++ importpath = "golang.org/x/tools/internal/analysisinternal", ++ visibility = ["//:__subpackages__"], ++ deps = ["//go/analysis"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":analysisinternal", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "analysisinternal_test", ++ srcs = [ ++ "analysis_test.go", ++ "extractdoc_test.go", ++ ], ++ deps = [":analysisinternal"], ++) +diff -urN b/internal/analysisinternal/generated/BUILD.bazel c/internal/analysisinternal/generated/BUILD.bazel +--- b/internal/analysisinternal/generated/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/analysisinternal/generated/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "generated", ++ srcs = ["generated.go"], ++ importpath = "golang.org/x/tools/internal/analysisinternal/generated", ++ visibility = ["//:__subpackages__"], ++ deps = ["//go/analysis"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":generated", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/analysisinternal/typeindex/BUILD.bazel c/internal/analysisinternal/typeindex/BUILD.bazel +--- b/internal/analysisinternal/typeindex/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/analysisinternal/typeindex/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "typeindex", ++ srcs = ["typeindex.go"], ++ importpath = "golang.org/x/tools/internal/analysisinternal/typeindex", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//go/analysis", ++ "//go/analysis/passes/inspect", ++ "//go/ast/inspector", ++ "//internal/typesinternal/typeindex", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeindex", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/astutil/BUILD.bazel c/internal/astutil/BUILD.bazel +--- b/internal/astutil/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/astutil/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,44 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "astutil", ++ srcs = [ ++ "clone.go", ++ "comment.go", ++ "equal.go", ++ "fields.go", ++ "purge.go", ++ "stringlit.go", ++ "unpack.go", ++ "util.go", ++ ], ++ importpath = "golang.org/x/tools/internal/astutil", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//go/ast/edge", ++ "//go/ast/inspector", ++ "//internal/moreiters", ++ "//internal/typeparams", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":astutil", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "astutil_test", ++ srcs = [ ++ "comment_test.go", ++ "fields_test.go", ++ "purge_test.go", ++ "util_test.go", ++ ], ++ deps = [ ++ ":astutil", ++ "//go/packages", ++ "//internal/testenv", ++ ], ++) +diff -urN b/internal/bisect/BUILD.bazel c/internal/bisect/BUILD.bazel +--- b/internal/bisect/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/bisect/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "bisect", ++ srcs = ["bisect.go"], ++ importpath = "golang.org/x/tools/internal/bisect", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":bisect", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "bisect_test", ++ srcs = ["bisect_test.go"], ++ embed = [":bisect"], ++) +diff -urN b/internal/diff/BUILD.bazel c/internal/diff/BUILD.bazel +--- b/internal/diff/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/diff/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "diff", ++ srcs = [ ++ "diff.go", ++ "merge.go", ++ "ndiff.go", ++ "unified.go", ++ ], ++ importpath = "golang.org/x/tools/internal/diff", ++ visibility = ["//:__subpackages__"], ++ deps = ["//internal/diff/lcs"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":diff", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "diff_test", ++ srcs = [ ++ "diff_test.go", ++ "export_test.go", ++ "merge_test.go", ++ ], ++ embed = [":diff"], ++ deps = [ ++ "//internal/diff/difftest", ++ "//internal/testenv", ++ ], ++) +diff -urN b/internal/diff/difftest/BUILD.bazel c/internal/diff/difftest/BUILD.bazel +--- b/internal/diff/difftest/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/diff/difftest/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "difftest", ++ srcs = ["difftest.go"], ++ importpath = "golang.org/x/tools/internal/diff/difftest", ++ visibility = ["//:__subpackages__"], ++ deps = ["//internal/diff"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":difftest", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "difftest_test", ++ srcs = ["difftest_test.go"], ++ deps = [ ++ ":difftest", ++ "//internal/testenv", ++ ], ++) +diff -urN b/internal/diff/lcs/BUILD.bazel c/internal/diff/lcs/BUILD.bazel +--- b/internal/diff/lcs/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/diff/lcs/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,29 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "lcs", ++ srcs = [ ++ "common.go", ++ "doc.go", ++ "labels.go", ++ "old.go", ++ "sequence.go", ++ ], ++ importpath = "golang.org/x/tools/internal/diff/lcs", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":lcs", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "lcs_test", ++ srcs = [ ++ "common_test.go", ++ "old_test.go", ++ ], ++ embed = [":lcs"], ++) +diff -urN b/internal/diff/myers/BUILD.bazel c/internal/diff/myers/BUILD.bazel +--- b/internal/diff/myers/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/diff/myers/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "myers", ++ srcs = ["diff.go"], ++ importpath = "golang.org/x/tools/internal/diff/myers", ++ visibility = ["//:__subpackages__"], ++ deps = ["//internal/diff"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":myers", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "myers_test", ++ srcs = ["diff_test.go"], ++ deps = [ ++ ":myers", ++ "//internal/diff/difftest", ++ ], ++) +diff -urN b/internal/diffp/BUILD.bazel c/internal/diffp/BUILD.bazel +--- b/internal/diffp/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/diffp/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,22 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "diffp", ++ srcs = ["diff.go"], ++ importpath = "golang.org/x/tools/internal/diffp", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":diffp", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "diffp_test", ++ srcs = ["diff_test.go"], ++ data = glob(["testdata/**"]), ++ embed = [":diffp"], ++ deps = ["//txtar"], ++) +diff -urN b/internal/drivertest/BUILD.bazel c/internal/drivertest/BUILD.bazel +--- b/internal/drivertest/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/drivertest/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,30 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "drivertest", ++ srcs = ["driver.go"], ++ importpath = "golang.org/x/tools/internal/drivertest", ++ visibility = ["//:__subpackages__"], ++ deps = ["//go/packages"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":drivertest", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "drivertest_test", ++ srcs = ["driver_test.go"], ++ deps = [ ++ ":drivertest", ++ "//go/packages", ++ "//internal/diff", ++ "//internal/diff/myers", ++ "//internal/packagesinternal", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++) +diff -urN b/internal/edit/BUILD.bazel c/internal/edit/BUILD.bazel +--- b/internal/edit/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/edit/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "edit", ++ srcs = ["edit.go"], ++ importpath = "golang.org/x/tools/internal/edit", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":edit", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "edit_test", ++ srcs = ["edit_test.go"], ++ embed = [":edit"], ++) +diff -urN b/internal/event/BUILD.bazel c/internal/event/BUILD.bazel +--- b/internal/event/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/event/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "event", ++ srcs = [ ++ "doc.go", ++ "event.go", ++ ], ++ importpath = "golang.org/x/tools/internal/event", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//internal/event/core", ++ "//internal/event/keys", ++ "//internal/event/label", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":event", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "event_test", ++ srcs = ["bench_test.go"], ++ deps = [ ++ ":event", ++ "//internal/event/core", ++ "//internal/event/export", ++ "//internal/event/keys", ++ "//internal/event/label", ++ ], ++) +diff -urN b/internal/event/core/BUILD.bazel c/internal/event/core/BUILD.bazel +--- b/internal/event/core/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/event/core/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,22 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "core", ++ srcs = [ ++ "event.go", ++ "export.go", ++ "fast.go", ++ ], ++ importpath = "golang.org/x/tools/internal/event/core", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//internal/event/keys", ++ "//internal/event/label", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":core", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/event/export/BUILD.bazel c/internal/event/export/BUILD.bazel +--- b/internal/event/export/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/event/export/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,38 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "export", ++ srcs = [ ++ "id.go", ++ "labels.go", ++ "log.go", ++ "printer.go", ++ "trace.go", ++ ], ++ importpath = "golang.org/x/tools/internal/event/export", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//internal/event", ++ "//internal/event/core", ++ "//internal/event/keys", ++ "//internal/event/label", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":export", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "export_test", ++ srcs = ["log_test.go"], ++ deps = [ ++ ":export", ++ "//internal/event", ++ "//internal/event/core", ++ "//internal/event/keys", ++ "//internal/event/label", ++ ], ++) +diff -urN b/internal/event/export/eventtest/BUILD.bazel c/internal/event/export/eventtest/BUILD.bazel +--- b/internal/event/export/eventtest/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/event/export/eventtest/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "eventtest", ++ srcs = ["eventtest.go"], ++ importpath = "golang.org/x/tools/internal/event/export/eventtest", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//internal/event", ++ "//internal/event/core", ++ "//internal/event/export", ++ "//internal/event/label", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":eventtest", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/event/export/metric/BUILD.bazel c/internal/event/export/metric/BUILD.bazel +--- b/internal/event/export/metric/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/event/export/metric/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "metric", ++ srcs = [ ++ "data.go", ++ "exporter.go", ++ "info.go", ++ ], ++ importpath = "golang.org/x/tools/internal/event/export/metric", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//internal/event", ++ "//internal/event/core", ++ "//internal/event/keys", ++ "//internal/event/label", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":metric", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/event/export/prometheus/BUILD.bazel c/internal/event/export/prometheus/BUILD.bazel +--- b/internal/event/export/prometheus/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/event/export/prometheus/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "prometheus", ++ srcs = ["prometheus.go"], ++ importpath = "golang.org/x/tools/internal/event/export/prometheus", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//internal/event", ++ "//internal/event/core", ++ "//internal/event/export/metric", ++ "//internal/event/label", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":prometheus", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/event/keys/BUILD.bazel c/internal/event/keys/BUILD.bazel +--- b/internal/event/keys/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/event/keys/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,25 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "keys", ++ srcs = [ ++ "keys.go", ++ "standard.go", ++ "util.go", ++ ], ++ importpath = "golang.org/x/tools/internal/event/keys", ++ visibility = ["//:__subpackages__"], ++ deps = ["//internal/event/label"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":keys", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "keys_test", ++ srcs = ["util_test.go"], ++ embed = [":keys"], ++) +diff -urN b/internal/event/label/BUILD.bazel c/internal/event/label/BUILD.bazel +--- b/internal/event/label/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/event/label/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,23 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "label", ++ srcs = ["label.go"], ++ importpath = "golang.org/x/tools/internal/event/label", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":label", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "label_test", ++ srcs = ["label_test.go"], ++ deps = [ ++ ":label", ++ "//internal/event/keys", ++ ], ++) +diff -urN b/internal/expect/BUILD.bazel c/internal/expect/BUILD.bazel +--- b/internal/expect/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/expect/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "expect", ++ srcs = [ ++ "expect.go", ++ "extract.go", ++ ], ++ importpath = "golang.org/x/tools/internal/expect", ++ visibility = ["//:__subpackages__"], ++ deps = ["@org_golang_x_mod//modfile:go_default_library"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":expect", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "expect_test", ++ srcs = ["expect_test.go"], ++ deps = [":expect"], ++) +diff -urN b/internal/expect/testdata/BUILD.bazel c/internal/expect/testdata/BUILD.bazel +--- b/internal/expect/testdata/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/expect/testdata/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "testdata", ++ srcs = ["test.go"], ++ importpath = "golang.org/x/tools/internal/expect/testdata", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":testdata", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/facts/BUILD.bazel c/internal/facts/BUILD.bazel +--- b/internal/facts/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/facts/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,36 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "facts", ++ srcs = [ ++ "facts.go", ++ "imports.go", ++ ], ++ importpath = "golang.org/x/tools/internal/facts", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/analysis", ++ "//go/types/objectpath", ++ "//internal/aliases", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":facts", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "facts_test", ++ srcs = ["facts_test.go"], ++ deps = [ ++ ":facts", ++ "//go/analysis/analysistest", ++ "//go/packages", ++ "//internal/aliases", ++ "//internal/testenv", ++ "//internal/typesinternal", ++ ], ++) +diff -urN b/internal/fmtstr/BUILD.bazel c/internal/fmtstr/BUILD.bazel +--- b/internal/fmtstr/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/fmtstr/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "fmtstr", ++ srcs = ["parse.go"], ++ importpath = "golang.org/x/tools/internal/fmtstr", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":fmtstr", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/gcimporter/BUILD.bazel c/internal/gcimporter/BUILD.bazel +--- b/internal/gcimporter/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gcimporter/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,51 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "gcimporter", ++ srcs = [ ++ "bimport.go", ++ "exportdata.go", ++ "gcimporter.go", ++ "iexport.go", ++ "iimport.go", ++ "predeclared.go", ++ "support.go", ++ "ureader_yes.go", ++ ], ++ importpath = "golang.org/x/tools/internal/gcimporter", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//go/types/objectpath", ++ "//internal/aliases", ++ "//internal/pkgbits", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":gcimporter", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "gcimporter_test", ++ srcs = [ ++ "bexport_test.go", ++ "gcimporter_test.go", ++ "iexport_common_test.go", ++ "iexport_go118_test.go", ++ "iexport_test.go", ++ "israce_test.go", ++ "shallow_test.go", ++ "stdlib_test.go", ++ ], ++ embed = [":gcimporter"], ++ deps = [ ++ "//go/gcexportdata", ++ "//go/packages", ++ "//internal/goroot", ++ "//internal/testenv", ++ "@org_golang_x_sync//errgroup:go_default_library", ++ ], ++) +diff -urN b/internal/gcimporter/testdata/a/BUILD.bazel c/internal/gcimporter/testdata/a/BUILD.bazel +--- b/internal/gcimporter/testdata/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gcimporter/testdata/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/internal/gcimporter/testdata/a", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/gcimporter/testdata/aliases/a/BUILD.bazel c/internal/gcimporter/testdata/aliases/a/BUILD.bazel +--- b/internal/gcimporter/testdata/aliases/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gcimporter/testdata/aliases/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/internal/gcimporter/testdata/aliases/a", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/gcimporter/testdata/aliases/b/BUILD.bazel c/internal/gcimporter/testdata/aliases/b/BUILD.bazel +--- b/internal/gcimporter/testdata/aliases/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gcimporter/testdata/aliases/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/internal/gcimporter/testdata/aliases/b", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/gcimporter/testdata/aliases/c/BUILD.bazel c/internal/gcimporter/testdata/aliases/c/BUILD.bazel +--- b/internal/gcimporter/testdata/aliases/c/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gcimporter/testdata/aliases/c/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "c", ++ srcs = ["c.go"], ++ importpath = "golang.org/x/tools/internal/gcimporter/testdata/aliases/c", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":c", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/gcimporter/testdata/issue51836/a/BUILD.bazel c/internal/gcimporter/testdata/issue51836/a/BUILD.bazel +--- b/internal/gcimporter/testdata/issue51836/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gcimporter/testdata/issue51836/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/internal/gcimporter/testdata/issue51836/a", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/gcimporter/testdata/issue51836/BUILD.bazel c/internal/gcimporter/testdata/issue51836/BUILD.bazel +--- b/internal/gcimporter/testdata/issue51836/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gcimporter/testdata/issue51836/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "issue51836", ++ srcs = [ ++ "a.go", ++ "aa.go", ++ ], ++ importpath = "golang.org/x/tools/internal/gcimporter/testdata/issue51836", ++ visibility = ["//:__subpackages__"], ++ deps = ["//internal/gcimporter/testdata/issue51836/a"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":issue51836", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/gcimporter/testdata/issue58296/a/BUILD.bazel c/internal/gcimporter/testdata/issue58296/a/BUILD.bazel +--- b/internal/gcimporter/testdata/issue58296/a/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gcimporter/testdata/issue58296/a/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "a", ++ srcs = ["a.go"], ++ importpath = "golang.org/x/tools/internal/gcimporter/testdata/issue58296/a", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":a", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/gcimporter/testdata/issue58296/b/BUILD.bazel c/internal/gcimporter/testdata/issue58296/b/BUILD.bazel +--- b/internal/gcimporter/testdata/issue58296/b/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gcimporter/testdata/issue58296/b/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "b", ++ srcs = ["b.go"], ++ importpath = "golang.org/x/tools/internal/gcimporter/testdata/issue58296/b", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":b", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/gcimporter/testdata/issue58296/c/BUILD.bazel c/internal/gcimporter/testdata/issue58296/c/BUILD.bazel +--- b/internal/gcimporter/testdata/issue58296/c/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gcimporter/testdata/issue58296/c/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "c", ++ srcs = ["c.go"], ++ importpath = "golang.org/x/tools/internal/gcimporter/testdata/issue58296/c", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":c", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/gcimporter/testdata/versions/BUILD.bazel c/internal/gcimporter/testdata/versions/BUILD.bazel +--- b/internal/gcimporter/testdata/versions/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gcimporter/testdata/versions/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "versions", ++ srcs = ["test.go"], ++ importpath = "golang.org/x/tools/internal/gcimporter/testdata/versions", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":versions", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/gocommand/BUILD.bazel c/internal/gocommand/BUILD.bazel +--- b/internal/gocommand/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gocommand/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,39 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "gocommand", ++ srcs = [ ++ "invoke.go", ++ "invoke_notunix.go", ++ "invoke_unix.go", ++ "vendor.go", ++ "version.go", ++ ], ++ importpath = "golang.org/x/tools/internal/gocommand", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//internal/event", ++ "//internal/event/keys", ++ "//internal/event/label", ++ "@org_golang_x_mod//semver:go_default_library", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":gocommand", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "gocommand_test", ++ srcs = [ ++ "invoke_test.go", ++ "version_test.go", ++ ], ++ embed = [":gocommand"], ++ deps = [ ++ "//internal/testenv", ++ "@org_golang_x_sync//errgroup:go_default_library", ++ ], ++) +diff -urN b/internal/gopathwalk/BUILD.bazel c/internal/gopathwalk/BUILD.bazel +--- b/internal/gopathwalk/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/gopathwalk/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "gopathwalk", ++ srcs = ["walk.go"], ++ importpath = "golang.org/x/tools/internal/gopathwalk", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":gopathwalk", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "gopathwalk_test", ++ srcs = ["walk_test.go"], ++ embed = [":gopathwalk"], ++) +diff -urN b/internal/goplsexport/BUILD.bazel c/internal/goplsexport/BUILD.bazel +--- b/internal/goplsexport/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/goplsexport/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "goplsexport", ++ srcs = ["export.go"], ++ importpath = "golang.org/x/tools/internal/goplsexport", ++ visibility = ["//:__subpackages__"], ++ deps = ["//go/analysis"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":goplsexport", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/goroot/BUILD.bazel c/internal/goroot/BUILD.bazel +--- b/internal/goroot/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/goroot/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "goroot", ++ srcs = ["importcfg.go"], ++ importpath = "golang.org/x/tools/internal/goroot", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":goroot", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/imports/BUILD.bazel c/internal/imports/BUILD.bazel +--- b/internal/imports/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/imports/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,58 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "imports", ++ srcs = [ ++ "fix.go", ++ "imports.go", ++ "mod.go", ++ "mod_cache.go", ++ "sortimports.go", ++ "source.go", ++ "source_env.go", ++ "source_modindex.go", ++ ], ++ importpath = "golang.org/x/tools/internal/imports", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//go/ast/astutil", ++ "//internal/event", ++ "//internal/gocommand", ++ "//internal/gopathwalk", ++ "//internal/modindex", ++ "//internal/stdlib", ++ "@org_golang_x_mod//module:go_default_library", ++ "@org_golang_x_sync//errgroup:go_default_library", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":imports", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "imports_test", ++ srcs = [ ++ "fix_test.go", ++ "imports_test.go", ++ "mod_cache_test.go", ++ "mod_test.go", ++ "sourcex_test.go", ++ ], ++ data = glob(["testdata/**"]), ++ embed = [":imports"], ++ deps = [ ++ "//internal/gocommand", ++ "//internal/gopathwalk", ++ "//internal/modindex", ++ "//internal/packagestest", ++ "//internal/proxydir", ++ "//internal/stdlib", ++ "//internal/testenv", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ "@org_golang_x_mod//module:go_default_library", ++ ], ++) +diff -urN b/internal/jsonrpc2/BUILD.bazel c/internal/jsonrpc2/BUILD.bazel +--- b/internal/jsonrpc2/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/jsonrpc2/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,43 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "jsonrpc2", ++ srcs = [ ++ "conn.go", ++ "handler.go", ++ "jsonrpc2.go", ++ "labels.go", ++ "messages.go", ++ "serve.go", ++ "stream.go", ++ "wire.go", ++ ], ++ importpath = "golang.org/x/tools/internal/jsonrpc2", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//internal/event", ++ "//internal/event/keys", ++ "//internal/event/label", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":jsonrpc2", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "jsonrpc2_test", ++ srcs = [ ++ "jsonrpc2_test.go", ++ "serve_test.go", ++ "wire_test.go", ++ ], ++ embed = [":jsonrpc2"], ++ deps = [ ++ "//internal/event/export/eventtest", ++ "//internal/jsonrpc2/stack/stacktest", ++ "//internal/testenv", ++ ], ++) +diff -urN b/internal/jsonrpc2/servertest/BUILD.bazel c/internal/jsonrpc2/servertest/BUILD.bazel +--- b/internal/jsonrpc2/servertest/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/jsonrpc2/servertest/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,22 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "servertest", ++ srcs = ["servertest.go"], ++ importpath = "golang.org/x/tools/internal/jsonrpc2/servertest", ++ visibility = ["//:__subpackages__"], ++ deps = ["//internal/jsonrpc2"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":servertest", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "servertest_test", ++ srcs = ["servertest_test.go"], ++ embed = [":servertest"], ++ deps = ["//internal/jsonrpc2"], ++) +diff -urN b/internal/jsonrpc2/stack/BUILD.bazel c/internal/jsonrpc2/stack/BUILD.bazel +--- b/internal/jsonrpc2/stack/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/jsonrpc2/stack/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "stack", ++ srcs = [ ++ "parse.go", ++ "process.go", ++ "stack.go", ++ ], ++ importpath = "golang.org/x/tools/internal/jsonrpc2/stack", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":stack", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "stack_test", ++ srcs = ["stack_test.go"], ++ deps = [":stack"], ++) +diff -urN b/internal/jsonrpc2/stack/stacktest/BUILD.bazel c/internal/jsonrpc2/stack/stacktest/BUILD.bazel +--- b/internal/jsonrpc2/stack/stacktest/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/jsonrpc2/stack/stacktest/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "stacktest", ++ srcs = ["stacktest.go"], ++ importpath = "golang.org/x/tools/internal/jsonrpc2/stack/stacktest", ++ visibility = ["//:__subpackages__"], ++ deps = ["//internal/jsonrpc2/stack"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":stacktest", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/jsonrpc2_v2/BUILD.bazel c/internal/jsonrpc2_v2/BUILD.bazel +--- b/internal/jsonrpc2_v2/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/jsonrpc2_v2/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,32 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "jsonrpc2_v2", ++ srcs = [ ++ "conn.go", ++ "frame.go", ++ "jsonrpc2.go", ++ "messages.go", ++ "net.go", ++ "serve.go", ++ "wire.go", ++ ], ++ importpath = "golang.org/x/tools/internal/jsonrpc2_v2", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":jsonrpc2_v2", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "jsonrpc2_v2_test", ++ srcs = [ ++ "jsonrpc2_test.go", ++ "serve_test.go", ++ "wire_test.go", ++ ], ++ deps = [":jsonrpc2_v2"], ++) +diff -urN b/internal/mcp/BUILD.bazel c/internal/mcp/BUILD.bazel +--- b/internal/mcp/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/mcp/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,72 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "mcp", ++ srcs = [ ++ "client.go", ++ "cmd.go", ++ "content.go", ++ "features.go", ++ "logging.go", ++ "mcp.go", ++ "prompt.go", ++ "protocol.go", ++ "resource.go", ++ "root.go", ++ "server.go", ++ "shared.go", ++ "sse.go", ++ "streamable.go", ++ "tool.go", ++ "transport.go", ++ "util.go", ++ ], ++ importpath = "golang.org/x/tools/internal/mcp", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//internal/jsonrpc2_v2", ++ "//internal/mcp/internal/util", ++ "//internal/mcp/jsonschema", ++ "//internal/xcontext", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":mcp", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "mcp_test", ++ srcs = [ ++ "client_list_test.go", ++ "client_test.go", ++ "cmd_test.go", ++ "conformance_go125_test.go", ++ "conformance_test.go", ++ "content_test.go", ++ "example_progress_test.go", ++ "features_test.go", ++ "mcp_test.go", ++ "resource_test.go", ++ "server_example_test.go", ++ "server_test.go", ++ "shared_test.go", ++ "sse_example_test.go", ++ "sse_test.go", ++ "streamable_test.go", ++ "tool_test.go", ++ "transport_test.go", ++ "util_test.go", ++ ], ++ data = glob(["testdata/**"]), ++ embed = [":mcp"], ++ deps = [ ++ "//internal/jsonrpc2_v2", ++ "//internal/mcp/jsonschema", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ "@com_github_google_go_cmp//cmp/cmpopts:go_default_library", ++ ], ++) +diff -urN b/internal/mcp/examples/hello/BUILD.bazel c/internal/mcp/examples/hello/BUILD.bazel +--- b/internal/mcp/examples/hello/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/mcp/examples/hello/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "hello_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/internal/mcp/examples/hello", ++ visibility = ["//visibility:private"], ++ deps = ["//internal/mcp"], ++) ++ ++go_binary( ++ name = "hello", ++ embed = [":hello_lib"], ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/mcp/examples/sse/BUILD.bazel c/internal/mcp/examples/sse/BUILD.bazel +--- b/internal/mcp/examples/sse/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/mcp/examples/sse/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "sse_lib", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/internal/mcp/examples/sse", ++ visibility = ["//visibility:private"], ++ deps = ["//internal/mcp"], ++) ++ ++go_binary( ++ name = "sse", ++ embed = [":sse_lib"], ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/mcp/internal/oauthex/BUILD.bazel c/internal/mcp/internal/oauthex/BUILD.bazel +--- b/internal/mcp/internal/oauthex/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/mcp/internal/oauthex/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "oauthex", ++ srcs = [ ++ "oauth2.go", ++ "resource_meta.go", ++ ], ++ importpath = "golang.org/x/tools/internal/mcp/internal/oauthex", ++ visibility = ["//:__subpackages__"], ++ deps = ["//internal/mcp/internal/util"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":oauthex", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "oauthex_test", ++ srcs = ["oauth2_test.go"], ++ embed = [":oauthex"], ++) +diff -urN b/internal/mcp/internal/readme/client/BUILD.bazel c/internal/mcp/internal/readme/client/BUILD.bazel +--- b/internal/mcp/internal/readme/client/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/mcp/internal/readme/client/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "client_lib", ++ srcs = ["client.go"], ++ importpath = "golang.org/x/tools/internal/mcp/internal/readme/client", ++ visibility = ["//visibility:private"], ++ deps = ["//internal/mcp"], ++) ++ ++go_binary( ++ name = "client", ++ embed = [":client_lib"], ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/mcp/internal/readme/server/BUILD.bazel c/internal/mcp/internal/readme/server/BUILD.bazel +--- b/internal/mcp/internal/readme/server/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/mcp/internal/readme/server/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "server_lib", ++ srcs = ["server.go"], ++ importpath = "golang.org/x/tools/internal/mcp/internal/readme/server", ++ visibility = ["//visibility:private"], ++ deps = ["//internal/mcp"], ++) ++ ++go_binary( ++ name = "server", ++ embed = [":server_lib"], ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/mcp/internal/util/BUILD.bazel c/internal/mcp/internal/util/BUILD.bazel +--- b/internal/mcp/internal/util/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/mcp/internal/util/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "util", ++ srcs = ["util.go"], ++ importpath = "golang.org/x/tools/internal/mcp/internal/util", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":util", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "util_test", ++ srcs = ["util_test.go"], ++ embed = [":util"], ++) +diff -urN b/internal/mcp/jsonschema/BUILD.bazel c/internal/mcp/jsonschema/BUILD.bazel +--- b/internal/mcp/jsonschema/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/mcp/jsonschema/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,42 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "jsonschema", ++ srcs = [ ++ "annotations.go", ++ "doc.go", ++ "infer.go", ++ "json_pointer.go", ++ "resolve.go", ++ "schema.go", ++ "util.go", ++ "validate.go", ++ ], ++ importpath = "golang.org/x/tools/internal/mcp/jsonschema", ++ visibility = ["//:__subpackages__"], ++ deps = ["//internal/mcp/internal/util"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":jsonschema", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "jsonschema_test", ++ srcs = [ ++ "infer_test.go", ++ "json_pointer_test.go", ++ "resolve_test.go", ++ "schema_test.go", ++ "util_test.go", ++ "validate_test.go", ++ ], ++ data = glob(["testdata/**"]), ++ embed = [":jsonschema"], ++ deps = [ ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ "@com_github_google_go_cmp//cmp/cmpopts:go_default_library", ++ ], ++) +diff -urN b/internal/modindex/BUILD.bazel c/internal/modindex/BUILD.bazel +--- b/internal/modindex/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/modindex/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,36 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "modindex", ++ srcs = [ ++ "directories.go", ++ "index.go", ++ "lookup.go", ++ "modindex.go", ++ "symbols.go", ++ ], ++ importpath = "golang.org/x/tools/internal/modindex", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//internal/gopathwalk", ++ "@org_golang_x_mod//semver:go_default_library", ++ "@org_golang_x_sync//errgroup:go_default_library", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":modindex", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "modindex_test", ++ srcs = [ ++ "dir_test.go", ++ "export_test.go", ++ "lookup_test.go", ++ ], ++ embed = [":modindex"], ++ deps = ["@com_github_google_go_cmp//cmp:go_default_library"], ++) +diff -urN b/internal/modindex/gomodindex/BUILD.bazel c/internal/modindex/gomodindex/BUILD.bazel +--- b/internal/modindex/gomodindex/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/modindex/gomodindex/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") ++ ++go_library( ++ name = "gomodindex_lib", ++ srcs = ["cmd.go"], ++ importpath = "golang.org/x/tools/internal/modindex/gomodindex", ++ visibility = ["//visibility:private"], ++ deps = ["//internal/modindex"], ++) ++ ++go_binary( ++ name = "gomodindex", ++ embed = [":gomodindex_lib"], ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/moreiters/BUILD.bazel c/internal/moreiters/BUILD.bazel +--- b/internal/moreiters/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/moreiters/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "moreiters", ++ srcs = ["iters.go"], ++ importpath = "golang.org/x/tools/internal/moreiters", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":moreiters", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/packagesinternal/BUILD.bazel c/internal/packagesinternal/BUILD.bazel +--- b/internal/packagesinternal/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagesinternal/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "packagesinternal", ++ srcs = ["packages.go"], ++ importpath = "golang.org/x/tools/internal/packagesinternal", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":packagesinternal", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/packagestest/BUILD.bazel c/internal/packagestest/BUILD.bazel +--- b/internal/packagestest/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,40 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "packagestest", ++ srcs = [ ++ "expect.go", ++ "export.go", ++ "gopath.go", ++ "modules.go", ++ ], ++ importpath = "golang.org/x/tools/internal/packagestest", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//go/packages", ++ "//internal/expect", ++ "//internal/gocommand", ++ "//internal/proxydir", ++ "//internal/testenv", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":packagestest", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "packagestest_test", ++ srcs = [ ++ "expect_test.go", ++ "export_test.go", ++ "gopath_test.go", ++ "modules_test.go", ++ ], ++ deps = [ ++ ":packagestest", ++ "//internal/expect", ++ ], ++) +diff -urN b/internal/packagestest/testdata/BUILD.bazel c/internal/packagestest/testdata/BUILD.bazel +--- b/internal/packagestest/testdata/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/testdata/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,23 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "testdata", ++ srcs = ["test.go"], ++ importpath = "golang.org/x/tools/internal/packagestest/testdata", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":testdata", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "testdata_test", ++ srcs = [ ++ "test_test.go", ++ "x_test.go", ++ ], ++ embed = [":testdata"], ++) +diff -urN b/internal/packagestest/testdata/groups/one/modules/example.com/extra/BUILD.bazel c/internal/packagestest/testdata/groups/one/modules/example.com/extra/BUILD.bazel +--- b/internal/packagestest/testdata/groups/one/modules/example.com/extra/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/testdata/groups/one/modules/example.com/extra/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "extra", ++ srcs = ["help.go"], ++ importpath = "golang.org/x/tools/internal/packagestest/testdata/groups/one/modules/example.com/extra", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":extra", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/packagestest/testdata/groups/one/primarymod/BUILD.bazel c/internal/packagestest/testdata/groups/one/primarymod/BUILD.bazel +--- b/internal/packagestest/testdata/groups/one/primarymod/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/testdata/groups/one/primarymod/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "primarymod", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/internal/packagestest/testdata/groups/one/primarymod", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":primarymod", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/packagestest/testdata/groups/two/modules/example.com/extra/BUILD.bazel c/internal/packagestest/testdata/groups/two/modules/example.com/extra/BUILD.bazel +--- b/internal/packagestest/testdata/groups/two/modules/example.com/extra/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/testdata/groups/two/modules/example.com/extra/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "extra", ++ srcs = ["yo.go"], ++ importpath = "golang.org/x/tools/internal/packagestest/testdata/groups/two/modules/example.com/extra", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":extra", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/packagestest/testdata/groups/two/modules/example.com/extra/geez/BUILD.bazel c/internal/packagestest/testdata/groups/two/modules/example.com/extra/geez/BUILD.bazel +--- b/internal/packagestest/testdata/groups/two/modules/example.com/extra/geez/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/testdata/groups/two/modules/example.com/extra/geez/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "geez", ++ srcs = ["help.go"], ++ importpath = "golang.org/x/tools/internal/packagestest/testdata/groups/two/modules/example.com/extra/geez", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":geez", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/BUILD.bazel c/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/BUILD.bazel +--- b/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "extra", ++ srcs = ["me.go"], ++ importpath = "golang.org/x/tools/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":extra", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/geez/BUILD.bazel c/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/geez/BUILD.bazel +--- b/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/geez/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/geez/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "geez", ++ srcs = ["help.go"], ++ importpath = "golang.org/x/tools/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/geez", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":geez", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/packagestest/testdata/groups/two/modules/example.com/tempmod/BUILD.bazel c/internal/packagestest/testdata/groups/two/modules/example.com/tempmod/BUILD.bazel +--- b/internal/packagestest/testdata/groups/two/modules/example.com/tempmod/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/testdata/groups/two/modules/example.com/tempmod/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "tempmod", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/internal/packagestest/testdata/groups/two/modules/example.com/tempmod", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":tempmod", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.0.0/BUILD.bazel c/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.0.0/BUILD.bazel +--- b/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.0.0/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.0.0/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "what@v1_0_0", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.0.0", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":what@v1_0_0", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.1.0/BUILD.bazel c/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.1.0/BUILD.bazel +--- b/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.1.0/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.1.0/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "what@v1_1_0", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.1.0", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":what@v1_1_0", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/packagestest/testdata/groups/two/primarymod/BUILD.bazel c/internal/packagestest/testdata/groups/two/primarymod/BUILD.bazel +--- b/internal/packagestest/testdata/groups/two/primarymod/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/testdata/groups/two/primarymod/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "primarymod", ++ srcs = ["main.go"], ++ importpath = "golang.org/x/tools/internal/packagestest/testdata/groups/two/primarymod", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":primarymod", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/packagestest/testdata/groups/two/primarymod/expect/BUILD.bazel c/internal/packagestest/testdata/groups/two/primarymod/expect/BUILD.bazel +--- b/internal/packagestest/testdata/groups/two/primarymod/expect/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/packagestest/testdata/groups/two/primarymod/expect/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,19 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "expect", ++ srcs = ["yo.go"], ++ importpath = "golang.org/x/tools/internal/packagestest/testdata/groups/two/primarymod/expect", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":expect", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "expect_test", ++ srcs = ["yo_test.go"], ++) +diff -urN b/internal/pkgbits/BUILD.bazel c/internal/pkgbits/BUILD.bazel +--- b/internal/pkgbits/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/pkgbits/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,31 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "pkgbits", ++ srcs = [ ++ "codes.go", ++ "decoder.go", ++ "doc.go", ++ "encoder.go", ++ "flags.go", ++ "reloc.go", ++ "support.go", ++ "sync.go", ++ "syncmarker_string.go", ++ "version.go", ++ ], ++ importpath = "golang.org/x/tools/internal/pkgbits", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":pkgbits", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "pkgbits_test", ++ srcs = ["pkgbits_test.go"], ++ deps = [":pkgbits"], ++) +diff -urN b/internal/pprof/BUILD.bazel c/internal/pprof/BUILD.bazel +--- b/internal/pprof/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/pprof/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,21 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "pprof", ++ srcs = ["pprof.go"], ++ importpath = "golang.org/x/tools/internal/pprof", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":pprof", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "pprof_test", ++ srcs = ["pprof_test.go"], ++ data = glob(["testdata/**"]), ++ deps = [":pprof"], ++) +diff -urN b/internal/proxydir/BUILD.bazel c/internal/proxydir/BUILD.bazel +--- b/internal/proxydir/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/proxydir/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "proxydir", ++ srcs = ["proxydir.go"], ++ importpath = "golang.org/x/tools/internal/proxydir", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":proxydir", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "proxydir_test", ++ srcs = ["proxydir_test.go"], ++ embed = [":proxydir"], ++) +diff -urN b/internal/refactor/BUILD.bazel c/internal/refactor/BUILD.bazel +--- b/internal/refactor/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/refactor/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,42 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "refactor", ++ srcs = [ ++ "delete.go", ++ "imports.go", ++ "refactor.go", ++ ], ++ importpath = "golang.org/x/tools/internal/refactor", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//go/analysis", ++ "//go/ast/edge", ++ "//go/ast/inspector", ++ "//internal/analysisinternal", ++ "//internal/astutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":refactor", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "refactor_test", ++ srcs = [ ++ "delete_test.go", ++ "imports_test.go", ++ ], ++ deps = [ ++ ":refactor", ++ "//go/analysis", ++ "//go/ast/inspector", ++ "//internal/diff", ++ "//internal/testenv", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++) +diff -urN b/internal/refactor/inline/BUILD.bazel c/internal/refactor/inline/BUILD.bazel +--- b/internal/refactor/inline/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/refactor/inline/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,55 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "inline", ++ srcs = [ ++ "callee.go", ++ "calleefx.go", ++ "doc.go", ++ "escape.go", ++ "falcon.go", ++ "free.go", ++ "inline.go", ++ "util.go", ++ ], ++ importpath = "golang.org/x/tools/internal/refactor/inline", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//go/ast/astutil", ++ "//go/types/typeutil", ++ "//internal/analysisinternal", ++ "//internal/astutil", ++ "//internal/typeparams", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":inline", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "inline_test", ++ srcs = [ ++ "calleefx_test.go", ++ "everything_test.go", ++ "export_test.go", ++ "falcon_test.go", ++ "free_test.go", ++ "inline_test.go", ++ ], ++ data = glob(["testdata/**"]), ++ embed = [":inline"], ++ deps = [ ++ "//go/ast/astutil", ++ "//go/packages", ++ "//go/types/typeutil", ++ "//internal/diff", ++ "//internal/expect", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ ], ++) +diff -urN b/internal/robustio/BUILD.bazel c/internal/robustio/BUILD.bazel +--- b/internal/robustio/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/robustio/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,29 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "robustio", ++ srcs = [ ++ "gopls_windows.go", ++ "robustio.go", ++ "robustio_darwin.go", ++ "robustio_flaky.go", ++ "robustio_other.go", ++ "robustio_plan9.go", ++ "robustio_posix.go", ++ "robustio_windows.go", ++ ], ++ importpath = "golang.org/x/tools/internal/robustio", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":robustio", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "robustio_test", ++ srcs = ["robustio_test.go"], ++ deps = [":robustio"], ++) +diff -urN b/internal/stdlib/BUILD.bazel c/internal/stdlib/BUILD.bazel +--- b/internal/stdlib/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/stdlib/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,29 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "stdlib", ++ srcs = [ ++ "deps.go", ++ "import.go", ++ "manifest.go", ++ "stdlib.go", ++ ], ++ importpath = "golang.org/x/tools/internal/stdlib", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":stdlib", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "stdlib_test", ++ srcs = ["deps_test.go"], ++ data = glob(["testdata/**"]), ++ deps = [ ++ ":stdlib", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++) +diff -urN b/internal/testenv/BUILD.bazel c/internal/testenv/BUILD.bazel +--- b/internal/testenv/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/testenv/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,23 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "testenv", ++ srcs = [ ++ "exec.go", ++ "testenv.go", ++ "testenv_notunix.go", ++ "testenv_unix.go", ++ ], ++ importpath = "golang.org/x/tools/internal/testenv", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//internal/gocommand", ++ "@org_golang_x_mod//modfile:go_default_library", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":testenv", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/testfiles/BUILD.bazel c/internal/testfiles/BUILD.bazel +--- b/internal/testfiles/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/testfiles/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,32 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "testfiles", ++ srcs = ["testfiles.go"], ++ importpath = "golang.org/x/tools/internal/testfiles", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//go/packages", ++ "//internal/testenv", ++ "//txtar", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":testfiles", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "testfiles_test", ++ srcs = ["testfiles_test.go"], ++ deps = [ ++ ":testfiles", ++ "//go/analysis", ++ "//go/analysis/analysistest", ++ "//internal/testenv", ++ "//internal/versions", ++ "//txtar", ++ ], ++) +diff -urN b/internal/testfiles/testdata/versions/BUILD.bazel c/internal/testfiles/testdata/versions/BUILD.bazel +--- b/internal/testfiles/testdata/versions/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/testfiles/testdata/versions/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,18 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "versions", ++ srcs = [ ++ "mod.go", ++ "post.go", ++ "pre.go", ++ ], ++ importpath = "golang.org/x/tools/internal/testfiles/testdata/versions", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":versions", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/tool/BUILD.bazel c/internal/tool/BUILD.bazel +--- b/internal/tool/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/tool/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "tool", ++ srcs = ["tool.go"], ++ importpath = "golang.org/x/tools/internal/tool", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":tool", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/typeparams/BUILD.bazel c/internal/typeparams/BUILD.bazel +--- b/internal/typeparams/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/typeparams/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,34 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "typeparams", ++ srcs = [ ++ "common.go", ++ "coretype.go", ++ "free.go", ++ "normalize.go", ++ "termlist.go", ++ "typeterm.go", ++ ], ++ importpath = "golang.org/x/tools/internal/typeparams", ++ visibility = ["//:__subpackages__"], ++ deps = ["//internal/aliases"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeparams", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "typeparams_test", ++ srcs = [ ++ "common_test.go", ++ "coretype_test.go", ++ "free_test.go", ++ "normalize_test.go", ++ ], ++ embed = [":typeparams"], ++ deps = ["//internal/testenv"], ++) +diff -urN b/internal/typeparams/genericfeatures/BUILD.bazel c/internal/typeparams/genericfeatures/BUILD.bazel +--- b/internal/typeparams/genericfeatures/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/typeparams/genericfeatures/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,15 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "genericfeatures", ++ srcs = ["features.go"], ++ importpath = "golang.org/x/tools/internal/typeparams/genericfeatures", ++ visibility = ["//:__subpackages__"], ++ deps = ["//go/ast/inspector"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":genericfeatures", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/internal/typesinternal/BUILD.bazel c/internal/typesinternal/BUILD.bazel +--- b/internal/typesinternal/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/typesinternal/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,49 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "typesinternal", ++ srcs = [ ++ "classify_call.go", ++ "element.go", ++ "errorcode.go", ++ "errorcode_string.go", ++ "fx.go", ++ "isnamed.go", ++ "qualifier.go", ++ "recv.go", ++ "toonew.go", ++ "types.go", ++ "varkind.go", ++ "zerovalue.go", ++ ], ++ importpath = "golang.org/x/tools/internal/typesinternal", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/aliases", ++ "//internal/stdlib", ++ "//internal/versions", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typesinternal", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "typesinternal_test", ++ srcs = [ ++ "classify_call_test.go", ++ "element_test.go", ++ "errorcode_test.go", ++ "zerovalue_test.go", ++ ], ++ deps = [ ++ ":typesinternal", ++ "//go/types/typeutil", ++ "//internal/testenv", ++ ], ++) +diff -urN b/internal/typesinternal/typeindex/BUILD.bazel c/internal/typesinternal/typeindex/BUILD.bazel +--- b/internal/typesinternal/typeindex/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/typesinternal/typeindex/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,32 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "typeindex", ++ srcs = ["typeindex.go"], ++ importpath = "golang.org/x/tools/internal/typesinternal/typeindex", ++ visibility = ["//:__subpackages__"], ++ deps = [ ++ "//go/ast/edge", ++ "//go/ast/inspector", ++ "//go/types/typeutil", ++ "//internal/typesinternal", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":typeindex", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "typeindex_test", ++ srcs = ["typeindex_test.go"], ++ deps = [ ++ ":typeindex", ++ "//go/ast/inspector", ++ "//go/packages", ++ "//go/types/typeutil", ++ "//internal/testenv", ++ ], ++) +diff -urN b/internal/versions/BUILD.bazel c/internal/versions/BUILD.bazel +--- b/internal/versions/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/versions/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,31 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "versions", ++ srcs = [ ++ "features.go", ++ "gover.go", ++ "types.go", ++ "versions.go", ++ ], ++ importpath = "golang.org/x/tools/internal/versions", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":versions", ++ visibility = ["//:__subpackages__"], ++) ++ ++go_test( ++ name = "versions_test", ++ srcs = [ ++ "types_test.go", ++ "versions_test.go", ++ ], ++ deps = [ ++ ":versions", ++ "//internal/testenv", ++ ], ++) +diff -urN b/internal/xcontext/BUILD.bazel c/internal/xcontext/BUILD.bazel +--- b/internal/xcontext/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/internal/xcontext/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "xcontext", ++ srcs = ["xcontext.go"], ++ importpath = "golang.org/x/tools/internal/xcontext", ++ visibility = ["//:__subpackages__"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":xcontext", ++ visibility = ["//:__subpackages__"], ++) +diff -urN b/playground/BUILD.bazel c/playground/BUILD.bazel +--- b/playground/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/playground/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,14 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library") ++ ++go_library( ++ name = "playground", ++ srcs = ["playground.go"], ++ importpath = "golang.org/x/tools/playground", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":playground", ++ visibility = ["//visibility:public"], ++) +diff -urN b/playground/socket/BUILD.bazel c/playground/socket/BUILD.bazel +--- b/playground/socket/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/playground/socket/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,24 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "socket", ++ srcs = ["socket.go"], ++ importpath = "golang.org/x/tools/playground/socket", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//txtar", ++ "@org_golang_x_net//websocket:go_default_library", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":socket", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "socket_test", ++ srcs = ["socket_test.go"], ++ embed = [":socket"], ++) +diff -urN b/present/BUILD.bazel c/present/BUILD.bazel +--- b/present/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/present/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,44 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "present", ++ srcs = [ ++ "args.go", ++ "caption.go", ++ "code.go", ++ "doc.go", ++ "html.go", ++ "iframe.go", ++ "image.go", ++ "link.go", ++ "parse.go", ++ "style.go", ++ "video.go", ++ ], ++ importpath = "golang.org/x/tools/present", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "@com_github_yuin_goldmark//:go_default_library", ++ "@com_github_yuin_goldmark//ast:go_default_library", ++ "@com_github_yuin_goldmark//renderer/html:go_default_library", ++ "@com_github_yuin_goldmark//text:go_default_library", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":present", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "present_test", ++ srcs = [ ++ "code_test.go", ++ "link_test.go", ++ "parse_test.go", ++ "style_test.go", ++ ], ++ data = glob(["testdata/**"]), ++ embed = [":present"], ++) +diff -urN b/refactor/eg/BUILD.bazel c/refactor/eg/BUILD.bazel +--- b/refactor/eg/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/refactor/eg/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,132 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "eg", ++ srcs = [ ++ "eg.go", ++ "match.go", ++ "rewrite.go", ++ ], ++ importpath = "golang.org/x/tools/refactor/eg", ++ visibility = ["//visibility:public"], ++ deps = ["//go/ast/astutil"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":eg", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "eg_test", ++ srcs = ["eg_test.go"], ++ data = glob(["testdata/**"]), ++ deps = select({ ++ "@io_bazel_rules_go//go/platform:aix": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "@io_bazel_rules_go//go/platform:darwin": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "@io_bazel_rules_go//go/platform:dragonfly": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "@io_bazel_rules_go//go/platform:freebsd": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "@io_bazel_rules_go//go/platform:illumos": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "@io_bazel_rules_go//go/platform:ios": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "@io_bazel_rules_go//go/platform:js": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "@io_bazel_rules_go//go/platform:linux": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "@io_bazel_rules_go//go/platform:netbsd": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "@io_bazel_rules_go//go/platform:openbsd": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "@io_bazel_rules_go//go/platform:plan9": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "@io_bazel_rules_go//go/platform:solaris": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "@io_bazel_rules_go//go/platform:windows": [ ++ ":eg", ++ "//go/packages", ++ "//internal/testenv", ++ "//internal/testfiles", ++ "//txtar", ++ "@com_github_google_go_cmp//cmp:go_default_library", ++ ], ++ "//conditions:default": [], ++ }), ++) +diff -urN b/refactor/importgraph/BUILD.bazel c/refactor/importgraph/BUILD.bazel +--- b/refactor/importgraph/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/refactor/importgraph/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,75 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "importgraph", ++ srcs = ["graph.go"], ++ importpath = "golang.org/x/tools/refactor/importgraph", ++ visibility = ["//visibility:public"], ++ deps = ["//go/buildutil"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":importgraph", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "importgraph_test", ++ srcs = ["graph_test.go"], ++ deps = select({ ++ "@io_bazel_rules_go//go/platform:aix": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "@io_bazel_rules_go//go/platform:darwin": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "@io_bazel_rules_go//go/platform:dragonfly": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "@io_bazel_rules_go//go/platform:freebsd": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "@io_bazel_rules_go//go/platform:illumos": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "@io_bazel_rules_go//go/platform:ios": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "@io_bazel_rules_go//go/platform:js": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "@io_bazel_rules_go//go/platform:linux": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "@io_bazel_rules_go//go/platform:netbsd": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "@io_bazel_rules_go//go/platform:openbsd": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "@io_bazel_rules_go//go/platform:plan9": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "@io_bazel_rules_go//go/platform:solaris": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "@io_bazel_rules_go//go/platform:windows": [ ++ ":importgraph", ++ "//internal/packagestest", ++ ], ++ "//conditions:default": [], ++ }), ++) +diff -urN b/refactor/rename/BUILD.bazel c/refactor/rename/BUILD.bazel +--- b/refactor/rename/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/refactor/rename/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,44 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "rename", ++ srcs = [ ++ "check.go", ++ "mvpkg.go", ++ "rename.go", ++ "spec.go", ++ "util.go", ++ ], ++ importpath = "golang.org/x/tools/refactor/rename", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/buildutil", ++ "//go/loader", ++ "//go/types/typeutil", ++ "//internal/astutil", ++ "//internal/typeparams", ++ "//internal/typesinternal", ++ "//refactor/importgraph", ++ "//refactor/satisfy", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":rename", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "rename_test", ++ srcs = [ ++ "mvpkg_test.go", ++ "rename_test.go", ++ ], ++ embed = [":rename"], ++ deps = [ ++ "//go/buildutil", ++ "//internal/aliases", ++ "//internal/testenv", ++ ], ++) +diff -urN b/refactor/satisfy/BUILD.bazel c/refactor/satisfy/BUILD.bazel +--- b/refactor/satisfy/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/refactor/satisfy/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,27 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "satisfy", ++ srcs = ["find.go"], ++ importpath = "golang.org/x/tools/refactor/satisfy", ++ visibility = ["//visibility:public"], ++ deps = [ ++ "//go/types/typeutil", ++ "//internal/typeparams", ++ ], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":satisfy", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "satisfy_test", ++ srcs = ["find_test.go"], ++ deps = [ ++ ":satisfy", ++ "//internal/testenv", ++ ], ++) +diff -urN b/txtar/BUILD.bazel c/txtar/BUILD.bazel +--- b/txtar/BUILD.bazel 1969-12-31 18:00:00.000000000 -0600 ++++ c/txtar/BUILD.bazel 2000-01-01 00:00:00.000000000 -0000 +@@ -0,0 +1,26 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "txtar", ++ srcs = [ ++ "archive.go", ++ "fs.go", ++ ], ++ importpath = "golang.org/x/tools/txtar", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":txtar", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "txtar_test", ++ srcs = [ ++ "archive_test.go", ++ "fs_test.go", ++ ], ++ embed = [":txtar"], ++) diff --git a/time/slots/slottime_test.go b/time/slots/slottime_test.go index d5594f78e4..a17028f577 100644 --- a/time/slots/slottime_test.go +++ b/time/slots/slottime_test.go @@ -598,7 +598,7 @@ func TestCurrentSlot(t *testing.T) { } } func TestCurrentSlot_iterative(t *testing.T) { - for i := primitives.Slot(0); i < 1<<20; i++ { + for i := range primitives.Slot(1 << 20) { testCurrentSlot(t, i) } } diff --git a/tools/analyzers/comparesame/analyzer.go b/tools/analyzers/comparesame/analyzer.go index e063f228a5..7f69f595fe 100644 --- a/tools/analyzers/comparesame/analyzer.go +++ b/tools/analyzers/comparesame/analyzer.go @@ -27,7 +27,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspection, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/cryptorand/analyzer.go b/tools/analyzers/cryptorand/analyzer.go index 937e44af05..0f6aa431b5 100644 --- a/tools/analyzers/cryptorand/analyzer.go +++ b/tools/analyzers/cryptorand/analyzer.go @@ -25,7 +25,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspection, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/featureconfig/analyzer.go b/tools/analyzers/featureconfig/analyzer.go index d34c9672d0..d9aedfd23c 100644 --- a/tools/analyzers/featureconfig/analyzer.go +++ b/tools/analyzers/featureconfig/analyzer.go @@ -22,7 +22,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspection, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/gocognit/analyzer.go b/tools/analyzers/gocognit/analyzer.go index 479f092ca9..a7d78971cd 100644 --- a/tools/analyzers/gocognit/analyzer.go +++ b/tools/analyzers/gocognit/analyzer.go @@ -34,7 +34,7 @@ var Analyzer = &analysis.Analyzer{ // This threshold should be lowered to 50 over time. const over = 100 -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspectResult, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/ineffassign/analyzer.go b/tools/analyzers/ineffassign/analyzer.go index ffe62046a8..947ef90dde 100644 --- a/tools/analyzers/ineffassign/analyzer.go +++ b/tools/analyzers/ineffassign/analyzer.go @@ -23,7 +23,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { insp, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/interfacechecker/analyzer.go b/tools/analyzers/interfacechecker/analyzer.go index 40cc4f7975..a98bd88b94 100644 --- a/tools/analyzers/interfacechecker/analyzer.go +++ b/tools/analyzers/interfacechecker/analyzer.go @@ -33,7 +33,7 @@ var selectedInterfaces = []string{ "interface.WriteOnlyBeaconState", } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspection, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/logcapitalization/analyzer.go b/tools/analyzers/logcapitalization/analyzer.go index f344296eea..84acba4800 100644 --- a/tools/analyzers/logcapitalization/analyzer.go +++ b/tools/analyzers/logcapitalization/analyzer.go @@ -29,7 +29,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspection, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/logruswitherror/analyzer.go b/tools/analyzers/logruswitherror/analyzer.go index 6dd8ee230c..365497be6b 100644 --- a/tools/analyzers/logruswitherror/analyzer.go +++ b/tools/analyzers/logruswitherror/analyzer.go @@ -18,7 +18,7 @@ const Doc = "This analyzer requires that log statements do not use errors in tem const errImproperUsage = "use log.WithError rather than templated log statements with errors" // Map of logrus templated log functions. -var logFns = map[string]interface{}{ +var logFns = map[string]any{ "Debugf": nil, "Infof": nil, "Printf": nil, @@ -37,7 +37,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/maligned/analyzer.go b/tools/analyzers/maligned/analyzer.go index 29f7274006..220c983858 100644 --- a/tools/analyzers/maligned/analyzer.go +++ b/tools/analyzers/maligned/analyzer.go @@ -22,7 +22,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspection, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/maligned/maligned.go b/tools/analyzers/maligned/maligned.go index d54ee77a83..4842c983a7 100644 --- a/tools/analyzers/maligned/maligned.go +++ b/tools/analyzers/maligned/maligned.go @@ -38,7 +38,7 @@ func optimalSort(sizes gcSizes, str *types.Struct) string { fields := make([]*types.Var, nf) alignofs := make([]int64, nf) sizeofs := make([]int64, nf) - for i := 0; i < nf; i++ { + for i := range nf { fields[i] = str.Field(i) ft := fields[i].Type() alignofs[i] = sizes.Alignof(ft) @@ -62,7 +62,7 @@ func optimalSize(str *types.Struct, sizes *gcSizes) int64 { fields := make([]*types.Var, nf) alignofs := make([]int64, nf) sizeofs := make([]int64, nf) - for i := 0; i < nf; i++ { + for i := range nf { fields[i] = str.Field(i) ft := fields[i].Type() alignofs[i] = sizes.Alignof(ft) @@ -197,7 +197,7 @@ func (s *gcSizes) Sizeof(T types.Type) int64 { var o int64 max := int64(1) - for i := 0; i < nf; i++ { + for i := range nf { ft := t.Field(i).Type() a, sz := s.Alignof(ft), s.Sizeof(ft) if a > max { diff --git a/tools/analyzers/modernize/README.md b/tools/analyzers/modernize/README.md new file mode 100644 index 0000000000..c22cef4af4 --- /dev/null +++ b/tools/analyzers/modernize/README.md @@ -0,0 +1,5 @@ +# Modernize + +These are analyzers that are re-exported from golang.org/x/tools/go/analysis/passes/modernize. + +nogo expects a package to contain a single analyzer, while modernize exposes the named analyzers and an analyzer suite. This is incompatible with nogo so we have re-exported each analyzer. diff --git a/tools/analyzers/modernize/any/BUILD.bazel b/tools/analyzers/modernize/any/BUILD.bazel new file mode 100644 index 0000000000..5a06771395 --- /dev/null +++ b/tools/analyzers/modernize/any/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/any", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/any/analyzer.go b/tools/analyzers/modernize/any/analyzer.go new file mode 100644 index 0000000000..1099ae1823 --- /dev/null +++ b/tools/analyzers/modernize/any/analyzer.go @@ -0,0 +1,5 @@ +package any + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.AnyAnalyzer diff --git a/tools/analyzers/modernize/appendclipped/BUILD.bazel b/tools/analyzers/modernize/appendclipped/BUILD.bazel new file mode 100644 index 0000000000..142e48edbd --- /dev/null +++ b/tools/analyzers/modernize/appendclipped/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/appendclipped", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/appendclipped/analyzer.go b/tools/analyzers/modernize/appendclipped/analyzer.go new file mode 100644 index 0000000000..8e61ed4f9d --- /dev/null +++ b/tools/analyzers/modernize/appendclipped/analyzer.go @@ -0,0 +1,5 @@ +package appendclipped + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.AppendClippedAnalyzer diff --git a/tools/analyzers/modernize/bloop/BUILD.bazel b/tools/analyzers/modernize/bloop/BUILD.bazel new file mode 100644 index 0000000000..98f95a2ebb --- /dev/null +++ b/tools/analyzers/modernize/bloop/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/bloop", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/bloop/analyzer.go b/tools/analyzers/modernize/bloop/analyzer.go new file mode 100644 index 0000000000..d5e6416ae9 --- /dev/null +++ b/tools/analyzers/modernize/bloop/analyzer.go @@ -0,0 +1,5 @@ +package bloop + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.BLoopAnalyzer diff --git a/tools/analyzers/modernize/fmtappendf/BUILD.bazel b/tools/analyzers/modernize/fmtappendf/BUILD.bazel new file mode 100644 index 0000000000..87dcc08420 --- /dev/null +++ b/tools/analyzers/modernize/fmtappendf/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/fmtappendf", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/fmtappendf/analyzer.go b/tools/analyzers/modernize/fmtappendf/analyzer.go new file mode 100644 index 0000000000..5f6d1766e7 --- /dev/null +++ b/tools/analyzers/modernize/fmtappendf/analyzer.go @@ -0,0 +1,5 @@ +package fmtappendf + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.FmtAppendfAnalyzer diff --git a/tools/analyzers/modernize/forvar/BUILD.bazel b/tools/analyzers/modernize/forvar/BUILD.bazel new file mode 100644 index 0000000000..115f3cc0bd --- /dev/null +++ b/tools/analyzers/modernize/forvar/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/forvar", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/forvar/analyzer.go b/tools/analyzers/modernize/forvar/analyzer.go new file mode 100644 index 0000000000..9963192bc2 --- /dev/null +++ b/tools/analyzers/modernize/forvar/analyzer.go @@ -0,0 +1,5 @@ +package forvar + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.ForVarAnalyzer diff --git a/tools/analyzers/modernize/mapsloop/BUILD.bazel b/tools/analyzers/modernize/mapsloop/BUILD.bazel new file mode 100644 index 0000000000..893ce0a245 --- /dev/null +++ b/tools/analyzers/modernize/mapsloop/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/mapsloop", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/mapsloop/analyzer.go b/tools/analyzers/modernize/mapsloop/analyzer.go new file mode 100644 index 0000000000..b5de3113b3 --- /dev/null +++ b/tools/analyzers/modernize/mapsloop/analyzer.go @@ -0,0 +1,5 @@ +package mapsloop + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.MapsLoopAnalyzer diff --git a/tools/analyzers/modernize/minmax/BUILD.bazel b/tools/analyzers/modernize/minmax/BUILD.bazel new file mode 100644 index 0000000000..840f0f25d5 --- /dev/null +++ b/tools/analyzers/modernize/minmax/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/minmax", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/minmax/analyzer.go b/tools/analyzers/modernize/minmax/analyzer.go new file mode 100644 index 0000000000..62082402ba --- /dev/null +++ b/tools/analyzers/modernize/minmax/analyzer.go @@ -0,0 +1,5 @@ +package minmax + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.MinMaxAnalyzer diff --git a/tools/analyzers/modernize/newexpr/BUILD.bazel b/tools/analyzers/modernize/newexpr/BUILD.bazel new file mode 100644 index 0000000000..466e46dbf7 --- /dev/null +++ b/tools/analyzers/modernize/newexpr/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/newexpr", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/newexpr/analyzer.go b/tools/analyzers/modernize/newexpr/analyzer.go new file mode 100644 index 0000000000..3e3a42be81 --- /dev/null +++ b/tools/analyzers/modernize/newexpr/analyzer.go @@ -0,0 +1,5 @@ +package newexpr + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.NewExprAnalyzer diff --git a/tools/analyzers/modernize/omitzero/BUILD.bazel b/tools/analyzers/modernize/omitzero/BUILD.bazel new file mode 100644 index 0000000000..d709246cc8 --- /dev/null +++ b/tools/analyzers/modernize/omitzero/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/omitzero", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/omitzero/analyzer.go b/tools/analyzers/modernize/omitzero/analyzer.go new file mode 100644 index 0000000000..c56e93b890 --- /dev/null +++ b/tools/analyzers/modernize/omitzero/analyzer.go @@ -0,0 +1,5 @@ +package omitzero + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.OmitZeroAnalyzer diff --git a/tools/analyzers/modernize/rangeint/BUILD.bazel b/tools/analyzers/modernize/rangeint/BUILD.bazel new file mode 100644 index 0000000000..4c100c45f8 --- /dev/null +++ b/tools/analyzers/modernize/rangeint/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/rangeint", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/rangeint/analyzer.go b/tools/analyzers/modernize/rangeint/analyzer.go new file mode 100644 index 0000000000..a429a3832d --- /dev/null +++ b/tools/analyzers/modernize/rangeint/analyzer.go @@ -0,0 +1,5 @@ +package rangeint + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.RangeIntAnalyzer diff --git a/tools/analyzers/modernize/reflecttypefor/BUILD.bazel b/tools/analyzers/modernize/reflecttypefor/BUILD.bazel new file mode 100644 index 0000000000..a3be521cc6 --- /dev/null +++ b/tools/analyzers/modernize/reflecttypefor/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/reflecttypefor", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/reflecttypefor/analyzer.go b/tools/analyzers/modernize/reflecttypefor/analyzer.go new file mode 100644 index 0000000000..aaba30e110 --- /dev/null +++ b/tools/analyzers/modernize/reflecttypefor/analyzer.go @@ -0,0 +1,5 @@ +package reflecttypefor + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.ReflectTypeForAnalyzer diff --git a/tools/analyzers/modernize/slicescontains/BUILD.bazel b/tools/analyzers/modernize/slicescontains/BUILD.bazel new file mode 100644 index 0000000000..dfc6b5bc69 --- /dev/null +++ b/tools/analyzers/modernize/slicescontains/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/slicescontains", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/slicescontains/analyzer.go b/tools/analyzers/modernize/slicescontains/analyzer.go new file mode 100644 index 0000000000..80383b96a7 --- /dev/null +++ b/tools/analyzers/modernize/slicescontains/analyzer.go @@ -0,0 +1,5 @@ +package slicescontains + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.SlicesContainsAnalyzer diff --git a/tools/analyzers/modernize/slicesdelete/BUILD.bazel b/tools/analyzers/modernize/slicesdelete/BUILD.bazel new file mode 100644 index 0000000000..3aaf414b68 --- /dev/null +++ b/tools/analyzers/modernize/slicesdelete/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/slicesdelete", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/slicesdelete/analyzer.go b/tools/analyzers/modernize/slicesdelete/analyzer.go new file mode 100644 index 0000000000..52275ddb8f --- /dev/null +++ b/tools/analyzers/modernize/slicesdelete/analyzer.go @@ -0,0 +1,5 @@ +package slicesdelete + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.SlicesDeleteAnalyzer diff --git a/tools/analyzers/modernize/slicessort/BUILD.bazel b/tools/analyzers/modernize/slicessort/BUILD.bazel new file mode 100644 index 0000000000..59b24e45e3 --- /dev/null +++ b/tools/analyzers/modernize/slicessort/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/slicessort", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/slicessort/analyzer.go b/tools/analyzers/modernize/slicessort/analyzer.go new file mode 100644 index 0000000000..9d8095b2ba --- /dev/null +++ b/tools/analyzers/modernize/slicessort/analyzer.go @@ -0,0 +1,5 @@ +package slicessort + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.SlicesSortAnalyzer diff --git a/tools/analyzers/modernize/stringsbuilder/BUILD.bazel b/tools/analyzers/modernize/stringsbuilder/BUILD.bazel new file mode 100644 index 0000000000..75c74d53e6 --- /dev/null +++ b/tools/analyzers/modernize/stringsbuilder/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/stringsbuilder", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/stringsbuilder/analyzer.go b/tools/analyzers/modernize/stringsbuilder/analyzer.go new file mode 100644 index 0000000000..15dab1a5b8 --- /dev/null +++ b/tools/analyzers/modernize/stringsbuilder/analyzer.go @@ -0,0 +1,5 @@ +package stringsbuilder + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.StringsBuilderAnalyzer diff --git a/tools/analyzers/modernize/stringscutprefix/BUILD.bazel b/tools/analyzers/modernize/stringscutprefix/BUILD.bazel new file mode 100644 index 0000000000..c3c0e3752b --- /dev/null +++ b/tools/analyzers/modernize/stringscutprefix/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/stringscutprefix", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/stringscutprefix/analyzer.go b/tools/analyzers/modernize/stringscutprefix/analyzer.go new file mode 100644 index 0000000000..f73273455d --- /dev/null +++ b/tools/analyzers/modernize/stringscutprefix/analyzer.go @@ -0,0 +1,5 @@ +package stringscutprefix + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.StringsCutPrefixAnalyzer diff --git a/tools/analyzers/modernize/stringsseq/BUILD.bazel b/tools/analyzers/modernize/stringsseq/BUILD.bazel new file mode 100644 index 0000000000..82b2812860 --- /dev/null +++ b/tools/analyzers/modernize/stringsseq/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/stringsseq", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/stringsseq/analyzer.go b/tools/analyzers/modernize/stringsseq/analyzer.go new file mode 100644 index 0000000000..80226afffc --- /dev/null +++ b/tools/analyzers/modernize/stringsseq/analyzer.go @@ -0,0 +1,5 @@ +package stringsseq + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.StringsSeqAnalyzer diff --git a/tools/analyzers/modernize/testingcontext/BUILD.bazel b/tools/analyzers/modernize/testingcontext/BUILD.bazel new file mode 100644 index 0000000000..fb21f9d982 --- /dev/null +++ b/tools/analyzers/modernize/testingcontext/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/testingcontext", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/testingcontext/analyzer.go b/tools/analyzers/modernize/testingcontext/analyzer.go new file mode 100644 index 0000000000..c29ac1e22f --- /dev/null +++ b/tools/analyzers/modernize/testingcontext/analyzer.go @@ -0,0 +1,5 @@ +package testingcontext + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.TestingContextAnalyzer diff --git a/tools/analyzers/modernize/waitgroup/BUILD.bazel b/tools/analyzers/modernize/waitgroup/BUILD.bazel new file mode 100644 index 0000000000..371f575e06 --- /dev/null +++ b/tools/analyzers/modernize/waitgroup/BUILD.bazel @@ -0,0 +1,9 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["analyzer.go"], + importpath = "github.com/OffchainLabs/prysm/v7/tools/analyzers/modernize/waitgroup", + visibility = ["//visibility:public"], + deps = ["@org_golang_x_tools//go/analysis/passes/modernize:go_default_library"], +) diff --git a/tools/analyzers/modernize/waitgroup/analyzer.go b/tools/analyzers/modernize/waitgroup/analyzer.go new file mode 100644 index 0000000000..277f1f611e --- /dev/null +++ b/tools/analyzers/modernize/waitgroup/analyzer.go @@ -0,0 +1,5 @@ +package waitgroup + +import "golang.org/x/tools/go/analysis/passes/modernize" + +var Analyzer = modernize.WaitGroupAnalyzer diff --git a/tools/analyzers/nop/analyzer.go b/tools/analyzers/nop/analyzer.go index 45d8baf92c..0cc5e9e239 100644 --- a/tools/analyzers/nop/analyzer.go +++ b/tools/analyzers/nop/analyzer.go @@ -25,7 +25,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspection, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/nopanic/analyzer.go b/tools/analyzers/nopanic/analyzer.go index 8c636c9825..6b86538864 100644 --- a/tools/analyzers/nopanic/analyzer.go +++ b/tools/analyzers/nopanic/analyzer.go @@ -22,7 +22,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspection, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/properpermissions/analyzer.go b/tools/analyzers/properpermissions/analyzer.go index ac3dd8f04d..c8b55b8630 100644 --- a/tools/analyzers/properpermissions/analyzer.go +++ b/tools/analyzers/properpermissions/analyzer.go @@ -31,7 +31,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspection, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/recursivelock/analyzer.go b/tools/analyzers/recursivelock/analyzer.go index d521c2a2e0..7dca9dc1b2 100644 --- a/tools/analyzers/recursivelock/analyzer.go +++ b/tools/analyzers/recursivelock/analyzer.go @@ -65,7 +65,7 @@ func (m mode) ErrorFound() error { return nil } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspectResult, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") @@ -163,7 +163,7 @@ func stmtSelector(node ast.Node, pass *analysis.Pass, keepTrackOf *tracker, insp } case *ast.IfStmt: stmts := stmt.Body.List - for i := 0; i < len(stmts); i++ { + for i := range stmts { keepTrackOf = stmtSelector(stmts[i], pass, keepTrackOf, inspect) } keepTrackOf = stmtSelector(stmt.Else, pass, keepTrackOf, inspect) diff --git a/tools/analyzers/shadowpredecl/analyzer.go b/tools/analyzers/shadowpredecl/analyzer.go index 1dfc8766e5..0b49de33e8 100644 --- a/tools/analyzers/shadowpredecl/analyzer.go +++ b/tools/analyzers/shadowpredecl/analyzer.go @@ -32,7 +32,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspection, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/slicedirect/analyzer.go b/tools/analyzers/slicedirect/analyzer.go index 6c4fd66a59..a2b2aaee54 100644 --- a/tools/analyzers/slicedirect/analyzer.go +++ b/tools/analyzers/slicedirect/analyzer.go @@ -23,7 +23,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspection, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/analyzers/uintcast/analyzer.go b/tools/analyzers/uintcast/analyzer.go index 3b1266a079..dcad1ed004 100644 --- a/tools/analyzers/uintcast/analyzer.go +++ b/tools/analyzers/uintcast/analyzer.go @@ -24,7 +24,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspection, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, errors.New("analyzer is not type *inspector.Inspector") diff --git a/tools/beacon-fuzz/main.go b/tools/beacon-fuzz/main.go index 409dabad06..71dfc1148e 100644 --- a/tools/beacon-fuzz/main.go +++ b/tools/beacon-fuzz/main.go @@ -7,6 +7,7 @@ import ( "os" "path/filepath" "strconv" + "strings" "text/template" "github.com/OffchainLabs/prysm/v7/io/file" @@ -61,11 +62,11 @@ func main() { } func sszBytesToMapStr(ss map[int][]byte) string { - dst := "" + var dst strings.Builder for i, s := range ss { - dst += fmt.Sprintf("%d: \"%x\",", i, s) + dst.WriteString(fmt.Sprintf("%d: \"%x\",", i, s)) } - return dst + return dst.String() } type input struct { @@ -74,7 +75,7 @@ type input struct { MapStr string } -func execTmpl(tpl string, input interface{}) *bytes.Buffer { +func execTmpl(tpl string, input any) *bytes.Buffer { tmpl, err := template.New("template").Parse(tpl) if err != nil { panic(err) diff --git a/tools/blocktree/main.go b/tools/blocktree/main.go index a5bfe2822a..d3214dec99 100644 --- a/tools/blocktree/main.go +++ b/tools/blocktree/main.go @@ -56,7 +56,7 @@ func main() { // Construct nodes m := make(map[[32]byte]*node) - for i := 0; i < len(blks); i++ { + for i := range blks { b := blks[i] r := roots[i] m[r] = &node{score: make(map[uint64]bool)} diff --git a/tools/bootnode/bootnode.go b/tools/bootnode/bootnode.go index 16af7358a3..c64646aced 100644 --- a/tools/bootnode/bootnode.go +++ b/tools/bootnode/bootnode.go @@ -179,7 +179,7 @@ func (h *handler) httpHandler(w http.ResponseWriter, _ *http.Request) { allNodes := h.listener.AllNodes() write(w, []byte("Nodes stored in the table:\n")) for i, n := range allNodes { - write(w, []byte(fmt.Sprintf("Node %d\n", i))) + write(w, fmt.Appendf(nil, "Node %d\n", i)) write(w, []byte(n.String()+"\n")) write(w, []byte("Node ID: "+n.ID().String()+"\n")) write(w, []byte("IP: "+n.IP().String()+"\n")) diff --git a/tools/exploredb/main.go b/tools/exploredb/main.go index ce410e7d57..b8812ab31e 100644 --- a/tools/exploredb/main.go +++ b/tools/exploredb/main.go @@ -270,7 +270,7 @@ func readStates(ctx context.Context, db *kv.Store, stateC chan<- *modifiedState, stateMap[uint64(st.Slot())] = mst } - for i := uint64(0); i < maxSlotsToDisplay; i++ { + for i := range uint64(maxSlotsToDisplay) { if _, ok := stateMap[i]; ok { stateC <- stateMap[i] } @@ -502,14 +502,14 @@ func sizeAndCountOfByteList(list [][]byte) (uint64, uint64) { func sizeAndCountOfUin64List(list []uint64) (uint64, uint64) { size := uint64(0) count := uint64(0) - for i := 0; i < len(list); i++ { + for range list { size += uint64(8) count += 1 } return size, count } -func sizeAndCountGeneric(genericItems interface{}, err error) (uint64, uint64) { +func sizeAndCountGeneric(genericItems any, err error) (uint64, uint64) { size := uint64(0) count := uint64(0) if err != nil { diff --git a/tools/http-request-sink/main.go b/tools/http-request-sink/main.go index 7690220972..7fc66bbba2 100644 --- a/tools/http-request-sink/main.go +++ b/tools/http-request-sink/main.go @@ -42,7 +42,7 @@ func main() { }() http.HandleFunc("/", func(writer http.ResponseWriter, r *http.Request) { - reqContent := map[string]interface{}{} + reqContent := map[string]any{} if err = parseRequest(r, &reqContent); err != nil { log.Println(err) } @@ -59,7 +59,7 @@ func main() { log.Fatal(srv.ListenAndServe()) } -func captureRequest(f *os.File, m map[string]interface{}) error { +func captureRequest(f *os.File, m map[string]any) error { enc, err := json.Marshal(m) if err != nil { return err @@ -68,7 +68,7 @@ func captureRequest(f *os.File, m map[string]interface{}) error { return err } -func parseRequest(req *http.Request, unmarshalStruct interface{}) error { +func parseRequest(req *http.Request, unmarshalStruct any) error { body, err := io.ReadAll(req.Body) if err != nil { return err diff --git a/tools/http-request-sink/main_test.go b/tools/http-request-sink/main_test.go index b4d6474073..3b1b1432b3 100644 --- a/tools/http-request-sink/main_test.go +++ b/tools/http-request-sink/main_test.go @@ -31,7 +31,7 @@ func Test_parseAndCaptureRequest(t *testing.T) { httpReq, err := http.NewRequest("GET", "/", bytes.NewBuffer(enc)) require.NoError(t, err) - reqContent := map[string]interface{}{} + reqContent := map[string]any{} err = parseRequest(httpReq, &reqContent) require.NoError(t, err) @@ -52,7 +52,7 @@ func Test_parseAndCaptureRequest(t *testing.T) { fileContents, err := io.ReadAll(f) require.NoError(t, err) - receivedContent := map[string]interface{}{} + receivedContent := map[string]any{} err = json.Unmarshal(fileContents, &receivedContent) require.NoError(t, err) diff --git a/tools/interop/split-keys/main.go b/tools/interop/split-keys/main.go index f035d86a32..714113d008 100644 --- a/tools/interop/split-keys/main.go +++ b/tools/interop/split-keys/main.go @@ -95,7 +95,7 @@ func generateKeysFromMnemonicList(mnemonicListFile *bufio.Scanner, keysPerMnemon if err != nil { return } - for i := 0; i < keysPerMnemonic; i++ { + for i := range keysPerMnemonic { if i%250 == 0 && i > 0 { log.Printf("%d/%d keys generated\n", i, keysPerMnemonic) } @@ -122,7 +122,7 @@ func spreadKeysAcrossLocalWallets( walletPassword string, ) error { ctx := context.Background() - for i := 0; i < numWallets; i++ { + for i := range numWallets { w := wallet.New(&wallet.Config{ WalletDir: path.Join(walletOutputDir, fmt.Sprintf("wallet_%d", i)), KeymanagerKind: keymanager.Local, diff --git a/tools/interop/split-keys/main_test.go b/tools/interop/split-keys/main_test.go index b3e752e93f..c097acefd4 100644 --- a/tools/interop/split-keys/main_test.go +++ b/tools/interop/split-keys/main_test.go @@ -69,7 +69,7 @@ func Test_spreadKeysAcrossImportedWallets(t *testing.T) { ) require.NoError(t, err) ctx := t.Context() - for i := 0; i < numWallets; i++ { + for i := range numWallets { w, err := wallet.OpenWallet(ctx, &wallet.Config{ WalletDir: filepath.Join(tmpDir, fmt.Sprintf("wallet_%d", i)), KeymanagerKind: keymanager.Local, diff --git a/tools/nogo_config/main.go b/tools/nogo_config/main.go index ea1ab79418..e546dc275d 100644 --- a/tools/nogo_config/main.go +++ b/tools/nogo_config/main.go @@ -68,7 +68,7 @@ func main() { return } - for _, check := range strings.Split(*checks, ",") { + for check := range strings.SplitSeq(*checks, ",") { c.AddExclusion(strings.TrimSpace(check), e) } diff --git a/tools/specs-checker/check.go b/tools/specs-checker/check.go index aa005257ee..ab801a5e6c 100644 --- a/tools/specs-checker/check.go +++ b/tools/specs-checker/check.go @@ -156,7 +156,7 @@ func matchesRefImplementation(defName string, refDefs []string, input string, po inputLines := strings.Split(strings.TrimRight(input, "\n"), "\n") matchesPerfectly := true - for i := 0; i < len(refDefLines); i++ { + for i := range refDefLines { a, b := strings.Trim(refDefLines[i], " "), strings.Trim(inputLines[i], " ") if a != b { matchesPerfectly = false diff --git a/tools/unencrypted-keys-gen/main.go b/tools/unencrypted-keys-gen/main.go index 8729621774..22d9c2ca1b 100644 --- a/tools/unencrypted-keys-gen/main.go +++ b/tools/unencrypted-keys-gen/main.go @@ -67,7 +67,7 @@ func generateRandomKeys(num int) (*keygen.UnencryptedKeysContainer, error) { Keys: make([]*keygen.UnencryptedKeys, num), } - for i := 0; i < num; i++ { + for i := range num { sk, err := bls.RandKey() if err != nil { return nil, err diff --git a/validator/accounts/accounts_delete_test.go b/validator/accounts/accounts_delete_test.go index b8af5e1d53..11a1cbc46a 100644 --- a/validator/accounts/accounts_delete_test.go +++ b/validator/accounts/accounts_delete_test.go @@ -20,7 +20,7 @@ func TestDelete(t *testing.T) { numAccounts := 5 keystores := make([]*keymanager.Keystore, numAccounts) passwords := make([]string, numAccounts) - for i := 0; i < numAccounts; i++ { + for i := range numAccounts { keystores[i] = createRandomKeystore(t, password) passwords[i] = password } diff --git a/validator/accounts/accounts_exit.go b/validator/accounts/accounts_exit.go index ac4ca3ffdb..27ed502098 100644 --- a/validator/accounts/accounts_exit.go +++ b/validator/accounts/accounts_exit.go @@ -156,7 +156,7 @@ func displayExitInfo(rawExitedKeys [][]byte, trimmedExitedKeys []string) { urlFormattedPubKeys[i] = formatBeaconChaURL(key) } - ifaceKeys := make([]interface{}, len(urlFormattedPubKeys)) + ifaceKeys := make([]any, len(urlFormattedPubKeys)) for i, k := range urlFormattedPubKeys { ifaceKeys[i] = k } diff --git a/validator/accounts/accounts_helper.go b/validator/accounts/accounts_helper.go index 9c743d6b38..ebe20f7ce2 100644 --- a/validator/accounts/accounts_helper.go +++ b/validator/accounts/accounts_helper.go @@ -65,7 +65,7 @@ func selectAccounts(selectionPrompt string, pubKeys [][fieldparams.BLSPubkeyLeng } if result == allAccountsText { fmt.Printf("%s\n", au.BrightRed("[Selected all accounts]").Bold()) - for i := 0; i < len(pubKeys); i++ { + for i := range pubKeys { results = append(results, i) } break diff --git a/validator/accounts/accounts_list_test.go b/validator/accounts/accounts_list_test.go index c53c3bb2e4..b046645205 100644 --- a/validator/accounts/accounts_list_test.go +++ b/validator/accounts/accounts_list_test.go @@ -154,7 +154,7 @@ func TestListAccounts_LocalKeymanager(t *testing.T) { numAccounts := 5 keystores := make([]*keymanager.Keystore, numAccounts) passwords := make([]string, numAccounts) - for i := 0; i < numAccounts; i++ { + for i := range numAccounts { keystores[i] = createRandomKeystore(t, password) passwords[i] = password } diff --git a/validator/client/attest_test.go b/validator/client/attest_test.go index 9b4dbb3f95..e770b950ab 100644 --- a/validator/client/attest_test.go +++ b/validator/client/attest_test.go @@ -590,7 +590,7 @@ func TestAttestToBlockHead_DoesAttestAfterDelay(t *testing.T) { BeaconBlockRoot: bytesutil.PadTo([]byte("A"), 32), Target: ðpb.Checkpoint{Root: bytesutil.PadTo([]byte("B"), 32)}, Source: ðpb.Checkpoint{Root: bytesutil.PadTo([]byte("C"), 32), Epoch: 3}, - }, nil).Do(func(arg0, arg1 interface{}) { + }, nil).Do(func(arg0, arg1 any) { wg.Done() }) @@ -745,12 +745,10 @@ func TestServer_WaitToSlotOneThird_ReceiveBlockSlot(t *testing.T) { } wg := &sync.WaitGroup{} - wg.Add(1) - go func() { + wg.Go(func() { time.Sleep(100 * time.Millisecond) v.slotFeed.Send(currentSlot) - wg.Done() - }() + }) v.waitOneThirdOrValidBlock(t.Context(), currentSlot) diff --git a/validator/client/beacon-api/beacon_api_beacon_chain_client.go b/validator/client/beacon-api/beacon_api_beacon_chain_client.go index c0aea27757..bf79e777b4 100644 --- a/validator/client/beacon-api/beacon_api_beacon_chain_client.go +++ b/validator/client/beacon-api/beacon_api_beacon_chain_client.go @@ -220,15 +220,9 @@ func (c beaconApiChainClient) Validators(ctx context.Context, in *ethpb.ListVali return nil, errors.New("state validators data is nil") } - start := pageToken * uint64(pageSize) - if start > uint64(len(stateValidators.Data)) { - start = uint64(len(stateValidators.Data)) - } + start := min(pageToken*uint64(pageSize), uint64(len(stateValidators.Data))) - end := start + uint64(pageSize) - if end > uint64(len(stateValidators.Data)) { - end = uint64(len(stateValidators.Data)) - } + end := min(start+uint64(pageSize), uint64(len(stateValidators.Data))) validators := make([]*ethpb.Validators_ValidatorContainer, end-start) for idx := start; idx < end; idx++ { diff --git a/validator/client/beacon-api/beacon_api_beacon_chain_client_test.go b/validator/client/beacon-api/beacon_api_beacon_chain_client_test.go index f9f1218071..03e1f4cf71 100644 --- a/validator/client/beacon-api/beacon_api_beacon_chain_client_test.go +++ b/validator/client/beacon-api/beacon_api_beacon_chain_client_test.go @@ -390,7 +390,7 @@ func TestListValidators(t *testing.T) { // Generate more than 250 validators, but expect only 250 to be returned validators := make([]*structs.ValidatorContainer, 267) - for idx := 0; idx < len(validators); idx++ { + for idx := range validators { validators[idx] = validValidatorsResponse.Data[0] } @@ -402,7 +402,7 @@ func TestListValidators(t *testing.T) { }, generateProtoValidatorsResponse: func() *ethpb.Validators { validators := make([]*ethpb.Validators_ValidatorContainer, 250) - for idx := 0; idx < len(validators); idx++ { + for idx := range validators { validators[idx] = ðpb.Validators_ValidatorContainer{ Index: 1, Validator: ðpb.Validator{ diff --git a/validator/client/beacon-api/duties_test.go b/validator/client/beacon-api/duties_test.go index df55346908..61769a7ec0 100644 --- a/validator/client/beacon-api/duties_test.go +++ b/validator/client/beacon-api/duties_test.go @@ -656,7 +656,7 @@ func TestGetDutiesForEpoch_Error(t *testing.T) { ).AnyTimes() vals := make([]validatorForDuty, len(pubkeys)) - for i := 0; i < len(pubkeys); i++ { + for i := range pubkeys { vals[i] = validatorForDuty{ pubkey: pubkeys[i], index: validatorIndices[i], @@ -883,7 +883,7 @@ func TestGetDutiesForEpoch_Valid(t *testing.T) { validatorClient := &beaconApiValidatorClient{dutiesProvider: dutiesProvider} vals := make([]validatorForDuty, len(pubkeys)) - for i := 0; i < len(pubkeys); i++ { + for i := range pubkeys { vals[i] = validatorForDuty{ pubkey: pubkeys[i], index: validatorIndices[i], @@ -933,7 +933,7 @@ func TestGetDuties_Valid(t *testing.T) { pubkeys := make([][]byte, valCount) validatorIndices := make([]primitives.ValidatorIndex, valCount) vals := make([]validatorForDuty, valCount) - for i := 0; i < valCount; i++ { + for i := range valCount { pubkeys[i] = []byte(strconv.Itoa(i)) validatorIndices[i] = primitives.ValidatorIndex(i) vals[i] = validatorForDuty{ @@ -1399,7 +1399,7 @@ func generateValidSyncDuties(pubkeys [][]byte, validatorIndices []primitives.Val // We will use a reverse function to easily make sure that the current epoch and next epoch data returned by dutiesForEpoch // are not the same -func reverseSlice[T interface{}](slice []T) []T { +func reverseSlice[T any](slice []T) []T { reversedSlice := make([]T, len(slice)) for i := range slice { reversedSlice[len(reversedSlice)-1-i] = slice[i] diff --git a/validator/client/beacon-api/get_beacon_block_test.go b/validator/client/beacon-api/get_beacon_block_test.go index 5b13b35217..e5200b0edb 100644 --- a/validator/client/beacon-api/get_beacon_block_test.go +++ b/validator/client/beacon-api/get_beacon_block_test.go @@ -43,7 +43,7 @@ func TestGetBeaconBlock_RequestFailed(t *testing.T) { func TestGetBeaconBlock_Error(t *testing.T) { testCases := []struct { name string - beaconBlock interface{} + beaconBlock any expectedErrorMessage string consensusVersion string blinded bool diff --git a/validator/client/beacon-api/rest_handler_client.go b/validator/client/beacon-api/rest_handler_client.go index f21964eab7..dd80c697c0 100644 --- a/validator/client/beacon-api/rest_handler_client.go +++ b/validator/client/beacon-api/rest_handler_client.go @@ -22,9 +22,9 @@ import ( type reqOption func(*http.Request) type RestHandler interface { - Get(ctx context.Context, endpoint string, resp interface{}) error + Get(ctx context.Context, endpoint string, resp any) error GetSSZ(ctx context.Context, endpoint string) ([]byte, http.Header, error) - Post(ctx context.Context, endpoint string, headers map[string]string, data *bytes.Buffer, resp interface{}) error + Post(ctx context.Context, endpoint string, headers map[string]string, data *bytes.Buffer, resp any) error PostSSZ(ctx context.Context, endpoint string, headers map[string]string, data *bytes.Buffer) ([]byte, http.Header, error) HttpClient() *http.Client Host() string @@ -70,7 +70,7 @@ func (c *BeaconApiRestHandler) Host() string { // Get sends a GET request and decodes the response body as a JSON object into the passed in object. // If an HTTP error is returned, the body is decoded as a DefaultJsonError JSON object and returned as the first return value. -func (c *BeaconApiRestHandler) Get(ctx context.Context, endpoint string, resp interface{}) error { +func (c *BeaconApiRestHandler) Get(ctx context.Context, endpoint string, resp any) error { url := c.host + endpoint req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) if err != nil { @@ -150,7 +150,7 @@ func (c *BeaconApiRestHandler) Post( apiEndpoint string, headers map[string]string, data *bytes.Buffer, - resp interface{}, + resp any, ) error { if data == nil { return errors.New("data is nil") @@ -249,7 +249,7 @@ func (c *BeaconApiRestHandler) PostSSZ( return body, httpResp.Header, nil } -func decodeResp(httpResp *http.Response, resp interface{}) error { +func decodeResp(httpResp *http.Response, resp any) error { body, err := io.ReadAll(httpResp.Body) if err != nil { return errors.Wrapf(err, "failed to read response body for %s", httpResp.Request.URL) diff --git a/validator/client/health_monitor_test.go b/validator/client/health_monitor_test.go index 640b70ecde..ce7fc068b9 100644 --- a/validator/client/health_monitor_test.go +++ b/validator/client/health_monitor_test.go @@ -36,12 +36,10 @@ func TestHealthMonitor_IsHealthy_Concurrency(t *testing.T) { var wg sync.WaitGroup numGoroutines := 10 - for i := 0; i < numGoroutines; i++ { - wg.Add(1) - go func() { - defer wg.Done() + for range numGoroutines { + wg.Go(func() { assert.True(t, monitor.IsHealthy()) - }() + }) } wg.Wait() @@ -50,12 +48,10 @@ func TestHealthMonitor_IsHealthy_Concurrency(t *testing.T) { monitor.isHealthy = false monitor.Unlock() - for i := 0; i < numGoroutines; i++ { - wg.Add(1) - go func() { - defer wg.Done() + for range numGoroutines { + wg.Go(func() { assert.False(t, monitor.IsHealthy()) - }() + }) } wg.Wait() } diff --git a/validator/client/runner_test.go b/validator/client/runner_test.go index 76fc64afea..fd7d6ceb3e 100644 --- a/validator/client/runner_test.go +++ b/validator/client/runner_test.go @@ -170,7 +170,7 @@ func TestAttests_NextSlot(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() - attSubmitted := make(chan interface{}) + attSubmitted := make(chan any) v := &testutil.FakeValidator{Km: &mockKeymanager{accountsChangedFeed: &event.Feed{}}, AttSubmitted: attSubmitted} ctx, cancel := context.WithCancel(t.Context()) @@ -192,7 +192,7 @@ func TestAttests_NextSlot(t *testing.T) { func TestProposes_NextSlot(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() - blockProposed := make(chan interface{}) + blockProposed := make(chan any) v := &testutil.FakeValidator{Km: &mockKeymanager{accountsChangedFeed: &event.Feed{}}, BlockProposed: blockProposed} ctx, cancel := context.WithCancel(t.Context()) @@ -216,8 +216,8 @@ func TestBothProposesAndAttests_NextSlot(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() - blockProposed := make(chan interface{}) - attSubmitted := make(chan interface{}) + blockProposed := make(chan any) + attSubmitted := make(chan any) v := &testutil.FakeValidator{Km: &mockKeymanager{accountsChangedFeed: &event.Feed{}}, BlockProposed: blockProposed, AttSubmitted: attSubmitted} ctx, cancel := context.WithCancel(t.Context()) @@ -274,7 +274,7 @@ func TestKeyReload_NoActiveKey(t *testing.T) { func notActive(t *testing.T) [fieldparams.BLSPubkeyLength]byte { var r [fieldparams.BLSPubkeyLength]byte copy(r[:], testutil.ActiveKey[:]) - for i := 0; i < len(r); i++ { + for i := range len(r) { r[i] = bits.Reverse8(r[i]) } require.DeepNotEqual(t, r, testutil.ActiveKey) diff --git a/validator/client/testutil/mock_validator.go b/validator/client/testutil/mock_validator.go index 91b7a84a85..ab3107213a 100644 --- a/validator/client/testutil/mock_validator.go +++ b/validator/client/testutil/mock_validator.go @@ -52,8 +52,8 @@ type FakeValidator struct { PubkeyToIndexMap map[[fieldparams.BLSPubkeyLength]byte]uint64 IndexToPubkeyMap map[uint64][fieldparams.BLSPubkeyLength]byte WaitForChainStartCalled int - AttSubmitted chan interface{} - BlockProposed chan interface{} + AttSubmitted chan any + BlockProposed chan any AccountsChannel chan [][fieldparams.BLSPubkeyLength]byte GenesisT time.Time ReceiveBlocksCalled int diff --git a/validator/client/validator_test.go b/validator/client/validator_test.go index bf43769bf0..92e214ce7c 100644 --- a/validator/client/validator_test.go +++ b/validator/client/validator_test.go @@ -63,7 +63,7 @@ var unknownIndex = primitives.ValidatorIndex(^uint64(0)) func genMockKeymanager(t *testing.T, numKeys int) *mockKeymanager { pairs := make([]keypair, numKeys) - for i := 0; i < numKeys; i++ { + for i := range numKeys { pairs[i] = randKeypair(t) } @@ -859,7 +859,7 @@ type doppelGangerRequestMatcher struct { var _ gomock.Matcher = (*doppelGangerRequestMatcher)(nil) -func (m *doppelGangerRequestMatcher) Matches(x interface{}) bool { +func (m *doppelGangerRequestMatcher) Matches(x any) bool { r, ok := x.(*ethpb.DoppelGangerRequest) if !ok { panic("Invalid match type") @@ -1011,7 +1011,7 @@ func TestValidator_CheckDoppelGanger(t *testing.T) { attLimit := 5 for i, k := range keys { pkey := k - for j := 0; j < attLimit; j++ { + for j := range attLimit { att := createAttestation(10+primitives.Epoch(j), 12+primitives.Epoch(j)) rt, err := att.Data.HashTreeRoot() assert.NoError(t, err) @@ -1362,7 +1362,7 @@ type PrepareBeaconProposerRequestMatcher struct { expectedRecipients []*ethpb.PrepareBeaconProposerRequest_FeeRecipientContainer } -func (m *PrepareBeaconProposerRequestMatcher) Matches(x interface{}) bool { +func (m *PrepareBeaconProposerRequestMatcher) Matches(x any) bool { req, ok := x.(*ethpb.PrepareBeaconProposerRequest) if !ok { return false diff --git a/validator/client/wait_for_activation_test.go b/validator/client/wait_for_activation_test.go index 3311102f14..0b93c80e4b 100644 --- a/validator/client/wait_for_activation_test.go +++ b/validator/client/wait_for_activation_test.go @@ -140,7 +140,7 @@ func TestWaitForActivation_AccountsChanged(t *testing.T) { ðpb.MultipleValidatorStatusRequest{ PublicKeys: [][]byte{inactive.pub[:]}, }, - ).Return(inactiveResp, nil).Do(func(arg0, arg1 interface{}) { + ).Return(inactiveResp, nil).Do(func(arg0, arg1 any) { require.NoError(t, km.add(active)) km.SimulateAccountChanges([][fieldparams.BLSPubkeyLength]byte{inactive.pub, active.pub}) }), @@ -215,7 +215,7 @@ func TestWaitForActivation_AccountsChanged(t *testing.T) { ðpb.MultipleValidatorStatusRequest{ PublicKeys: [][]byte{inactivePubKey[:]}, }, - ).Return(inactiveResp, nil).Do(func(arg0, arg1 interface{}) { + ).Return(inactiveResp, nil).Do(func(arg0, arg1 any) { err = km.RecoverAccountsFromMnemonic(ctx, constant.TestMnemonic, derived.DefaultMnemonicLanguage, "", 2) require.NoError(t, err) pks, err := km.FetchValidatingPublicKeys(ctx) diff --git a/validator/db/filesystem/attester_protection_test.go b/validator/db/filesystem/attester_protection_test.go index 5a7c711e33..0b4eb0f3dc 100644 --- a/validator/db/filesystem/attester_protection_test.go +++ b/validator/db/filesystem/attester_protection_test.go @@ -488,8 +488,7 @@ func BenchmarkStore_SaveAttestationForPubKey(b *testing.B) { validatorDB, err := NewStore(b.TempDir(), &Config{PubKeys: pubkeys}) require.NoError(b, err) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { b.StopTimer() err := validatorDB.ClearDB() require.NoError(b, err) diff --git a/validator/db/filesystem/import_test.go b/validator/db/filesystem/import_test.go index bb50ae6247..c96339c59b 100644 --- a/validator/db/filesystem/import_test.go +++ b/validator/db/filesystem/import_test.go @@ -80,7 +80,7 @@ func TestStore_ImportInterchangeData_BadFormat_PreventsDBWrites(t *testing.T) { // verify nothing was saved to the DB. If there is an error in the import process, we need to make // sure writing is an atomic operation: either the import succeeds and saves the slashing protection // data to our DB, or it does not. - for i := 0; i < len(publicKeys); i++ { + for i := range publicKeys { receivedHistory, err := s.ProposalHistoryForPubKey(ctx, publicKeys[i]) require.NoError(t, err) require.DeepEqual( @@ -122,7 +122,7 @@ func TestStore_ImportInterchangeData_OK(t *testing.T) { // Next, we attempt to retrieve the attesting and proposals histories from our database and // verify those indeed match the originally generated mock histories. - for i := 0; i < len(publicKeys); i++ { + for i := range publicKeys { for _, att := range attestingHistory[i] { indexedAtt := ðpb.IndexedAttestation{ Data: ðpb.AttestationData{ diff --git a/validator/db/kv/attester_protection_test.go b/validator/db/kv/attester_protection_test.go index 88b1040210..c965c280d9 100644 --- a/validator/db/kv/attester_protection_test.go +++ b/validator/db/kv/attester_protection_test.go @@ -24,7 +24,7 @@ func TestPendingAttestationRecords_Flush(t *testing.T) { // Add 5 atts num := 5 - for i := 0; i < num; i++ { + for i := range num { queue.Append(&common.AttestationRecord{ Target: primitives.Epoch(i), }) @@ -543,8 +543,8 @@ func benchCheckSurroundVote( } else { surroundingVote = createAttestation(numEpochs+1, numEpochs+2) } - b.ResetTimer() - for i := 0; i < b.N; i++ { + + for b.Loop() { for _, pubKey := range pubKeys { slashingKind, err := validatorDB.CheckSlashableAttestation(ctx, pubKey, []byte{}, surroundingVote) if shouldSurround { @@ -594,7 +594,7 @@ func BenchmarkStore_SaveAttestationForPubKey(b *testing.B) { validatorDB, err := NewKVStore(ctx, b.TempDir(), &Config{PubKeys: pubkeys}) require.NoError(b, err) - for i := 0; i < b.N; i++ { + for b.Loop() { b.StopTimer() err := validatorDB.ClearDB() require.NoError(b, err) diff --git a/validator/db/kv/eip_blacklisted_keys_test.go b/validator/db/kv/eip_blacklisted_keys_test.go index d006da34a2..e7fbcd065c 100644 --- a/validator/db/kv/eip_blacklisted_keys_test.go +++ b/validator/db/kv/eip_blacklisted_keys_test.go @@ -13,7 +13,7 @@ func TestStore_EIPBlacklistedPublicKeys(t *testing.T) { ctx := t.Context() numValidators := 100 publicKeys := make([][fieldparams.BLSPubkeyLength]byte, numValidators) - for i := 0; i < numValidators; i++ { + for i := range numValidators { var key [fieldparams.BLSPubkeyLength]byte copy(key[:], fmt.Sprintf("%d", i)) publicKeys[i] = key diff --git a/validator/db/kv/import_test.go b/validator/db/kv/import_test.go index d9a586c7f3..c7d7f981cc 100644 --- a/validator/db/kv/import_test.go +++ b/validator/db/kv/import_test.go @@ -72,7 +72,7 @@ func TestStore_ImportInterchangeData_BadFormat_PreventsDBWrites(t *testing.T) { // verify nothing was saved to the DB. If there is an error in the import process, we need to make // sure writing is an atomic operation: either the import succeeds and saves the slashing protection // data to our DB, or it does not. - for i := 0; i < len(publicKeys); i++ { + for i := range publicKeys { for _, att := range attestingHistory[i] { indexedAtt := ðpb.IndexedAttestation{ Data: ðpb.AttestationData{ @@ -126,7 +126,7 @@ func TestStore_ImportInterchangeData_OK(t *testing.T) { // Next, we attempt to retrieve the attesting and proposals histories from our database and // verify those indeed match the originally generated mock histories. - for i := 0; i < len(publicKeys); i++ { + for i := range publicKeys { for _, att := range attestingHistory[i] { indexedAtt := ðpb.IndexedAttestation{ Data: ðpb.AttestationData{ diff --git a/validator/db/kv/migration_optimal_attester_protection_test.go b/validator/db/kv/migration_optimal_attester_protection_test.go index c478b7893e..10a61d56a6 100644 --- a/validator/db/kv/migration_optimal_attester_protection_test.go +++ b/validator/db/kv/migration_optimal_attester_protection_test.go @@ -81,7 +81,7 @@ func Test_migrateOptimalAttesterProtectionUp(t *testing.T) { } // Verify we have (source epoch, target epoch) pairs for epochs 0 to 50 correctly. - for sourceEpoch := uint64(0); sourceEpoch < numEpochs; sourceEpoch++ { + for sourceEpoch := range numEpochs { sourceEpochBytes := bytesutil.Uint64ToBytesBigEndian(sourceEpoch) targetEpochBytes := sourceEpochsBucket.Get(sourceEpochBytes) targetEpoch := bytesutil.BytesToUint64BigEndian(targetEpochBytes) diff --git a/validator/db/kv/migration_source_target_epochs_bucket_test.go b/validator/db/kv/migration_source_target_epochs_bucket_test.go index a10a9749a2..fc35db1e5c 100644 --- a/validator/db/kv/migration_source_target_epochs_bucket_test.go +++ b/validator/db/kv/migration_source_target_epochs_bucket_test.go @@ -18,7 +18,7 @@ func TestStore_migrateSourceTargetEpochsBucketUp(t *testing.T) { // See: https://github.com/prysmaticlabs/prysm/issues/8509 numKeys := 2*publicKeyMigrationBatchSize + 1 pubKeys := make([][fieldparams.BLSPubkeyLength]byte, numKeys) - for i := 0; i < numKeys; i++ { + for i := range numKeys { var pk [fieldparams.BLSPubkeyLength]byte copy(pk[:], fmt.Sprintf("%d", i)) pubKeys[i] = pk @@ -119,7 +119,7 @@ func TestStore_migrateSourceTargetEpochsBucketDown(t *testing.T) { // See: https://github.com/prysmaticlabs/prysm/issues/8509 numKeys := 2*publicKeyMigrationBatchSize + 1 pubKeys := make([][fieldparams.BLSPubkeyLength]byte, numKeys) - for i := 0; i < numKeys; i++ { + for i := range numKeys { var pk [fieldparams.BLSPubkeyLength]byte copy(pk[:], fmt.Sprintf("%d", i)) pubKeys[i] = pk diff --git a/validator/db/kv/prune_attester_protection_test.go b/validator/db/kv/prune_attester_protection_test.go index 83dbf369c8..b4ce4579f3 100644 --- a/validator/db/kv/prune_attester_protection_test.go +++ b/validator/db/kv/prune_attester_protection_test.go @@ -41,7 +41,7 @@ func TestPruneAttestations_NoPruning(t *testing.T) { func TestPruneAttestations_OK(t *testing.T) { numKeys := uint64(64) pks := make([][fieldparams.BLSPubkeyLength]byte, 0, numKeys) - for i := uint64(0); i < numKeys; i++ { + for i := range numKeys { pks = append(pks, bytesutil.ToBytes48(bytesutil.ToBytes(i, 48))) } validatorDB := setupDB(t, pks) @@ -90,7 +90,7 @@ func TestPruneAttestations_OK(t *testing.T) { func BenchmarkPruneAttestations(b *testing.B) { numKeys := uint64(8) pks := make([][fieldparams.BLSPubkeyLength]byte, 0, numKeys) - for i := uint64(0); i < numKeys; i++ { + for i := range numKeys { pks = append(pks, bytesutil.ToBytes48(bytesutil.ToBytes(i, 48))) } validatorDB := setupDB(b, pks) @@ -99,8 +99,7 @@ func BenchmarkPruneAttestations(b *testing.B) { // since genesis to SLASHING_PROTECTION_PRUNING_EPOCHS * 20. numEpochs := params.BeaconConfig().SlashingProtectionPruningEpochs * 20 - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { b.StopTimer() for _, pk := range pks { require.NoError(b, setupAttestationsForEveryEpoch(validatorDB, pk, numEpochs)) @@ -128,7 +127,7 @@ func setupAttestationsForEveryEpoch(validatorDB *Store, pubKey [48]byte, numEpoc if err != nil { return err } - for sourceEpoch := primitives.Epoch(0); sourceEpoch < numEpochs; sourceEpoch++ { + for sourceEpoch := range numEpochs { targetEpoch := sourceEpoch + 1 targetEpochBytes := bytesutil.EpochToBytesBigEndian(targetEpoch) sourceEpochBytes := bytesutil.EpochToBytesBigEndian(sourceEpoch) diff --git a/validator/keymanager/derived/keymanager.go b/validator/keymanager/derived/keymanager.go index a912996880..61f9528c50 100644 --- a/validator/keymanager/derived/keymanager.go +++ b/validator/keymanager/derived/keymanager.go @@ -66,7 +66,7 @@ func (km *Keymanager) RecoverAccountsFromMnemonic( } privKeys := make([][]byte, numAccounts) pubKeys := make([][]byte, numAccounts) - for i := 0; i < numAccounts; i++ { + for i := range numAccounts { privKey, err := util.PrivateKeyFromSeedAndPath( seed, fmt.Sprintf(ValidatingKeyDerivationPathTemplate, i), ) @@ -156,7 +156,7 @@ func (km *Keymanager) ListKeymanagerAccounts(ctx context.Context, cfg keymanager } else { fmt.Printf("Showing %d validator accounts\n", len(accountNames)) } - for i := 0; i < len(accountNames); i++ { + for i := range accountNames { fmt.Println("") validatingKeyPath := fmt.Sprintf(ValidatingKeyDerivationPathTemplate, i) diff --git a/validator/keymanager/derived/keymanager_test.go b/validator/keymanager/derived/keymanager_test.go index 94342cd5ff..19a22f9fb9 100644 --- a/validator/keymanager/derived/keymanager_test.go +++ b/validator/keymanager/derived/keymanager_test.go @@ -99,7 +99,7 @@ func TestDerivedKeymanager_FetchValidatingPublicKeys(t *testing.T) { require.Equal(t, numAccounts, len(publicKeys)) wantedPubKeys := make([][fieldparams.BLSPubkeyLength]byte, numAccounts) - for i := 0; i < numAccounts; i++ { + for i := range numAccounts { privKey, err := util.PrivateKeyFromSeedAndPath(derivedSeed, fmt.Sprintf(ValidatingKeyDerivationPathTemplate, i)) require.NoError(t, err) var pubKey [fieldparams.BLSPubkeyLength]byte @@ -138,7 +138,7 @@ func TestDerivedKeymanager_FetchValidatingPrivateKeys(t *testing.T) { require.Equal(t, numAccounts, len(privateKeys)) wantedPrivKeys := make([][32]byte, numAccounts) - for i := 0; i < numAccounts; i++ { + for i := range numAccounts { privKey, err := util.PrivateKeyFromSeedAndPath(derivedSeed, fmt.Sprintf(ValidatingKeyDerivationPathTemplate, i)) require.NoError(t, err) var privKeyBytes [32]byte diff --git a/validator/keymanager/local/backup_test.go b/validator/keymanager/local/backup_test.go index 3e9f6a9fc7..9f74239be7 100644 --- a/validator/keymanager/local/backup_test.go +++ b/validator/keymanager/local/backup_test.go @@ -15,7 +15,7 @@ func TestLocalKeymanager_ExtractKeystores(t *testing.T) { secretKeysCache = make(map[[fieldparams.BLSPubkeyLength]byte]bls.SecretKey) dr := &Keymanager{} validatingKeys := make([]bls.SecretKey, 10) - for i := 0; i < len(validatingKeys); i++ { + for i := range validatingKeys { secretKey, err := bls.RandKey() require.NoError(t, err) validatingKeys[i] = secretKey diff --git a/validator/keymanager/local/delete_test.go b/validator/keymanager/local/delete_test.go index 7be7117ccb..b52a069605 100644 --- a/validator/keymanager/local/delete_test.go +++ b/validator/keymanager/local/delete_test.go @@ -29,7 +29,7 @@ func TestLocalKeymanager_DeleteKeystores(t *testing.T) { ctx := t.Context() keystores := make([]*keymanager.Keystore, numAccounts) passwords := make([]string, numAccounts) - for i := 0; i < numAccounts; i++ { + for i := range numAccounts { keystores[i] = createRandomKeystore(t, password) passwords[i] = password } diff --git a/validator/keymanager/local/import.go b/validator/keymanager/local/import.go index f5d3dcf0a1..d32065b919 100644 --- a/validator/keymanager/local/import.go +++ b/validator/keymanager/local/import.go @@ -44,7 +44,7 @@ func (km *Keymanager) ImportKeystores( for i := 0; i < len(storeCopy.PrivateKeys); i++ { existingPubKeys[string(storeCopy.PublicKeys[i])] = true } - for i := 0; i < len(keystores); i++ { + for i := range keystores { var privKeyBytes []byte var pubKeyBytes []byte privKeyBytes, pubKeyBytes, _, err = km.attemptDecryptKeystore(decryptor, keystores[i], passwords[i]) diff --git a/validator/keymanager/local/import_test.go b/validator/keymanager/local/import_test.go index f81fca5e0e..6855345393 100644 --- a/validator/keymanager/local/import_test.go +++ b/validator/keymanager/local/import_test.go @@ -41,7 +41,7 @@ func TestLocalKeymanager_NoDuplicates(t *testing.T) { numKeys := 50 pubKeys := make([][]byte, numKeys) privKeys := make([][]byte, numKeys) - for i := 0; i < numKeys; i++ { + for i := range numKeys { priv, err := bls.RandKey() require.NoError(t, err) privKeys[i] = priv.Marshal() @@ -111,7 +111,7 @@ func TestLocalKeymanager_ImportKeystores(t *testing.T) { numKeystores := 5 keystores := make([]*keymanager.Keystore, numKeystores) passwords := make([]string, numKeystores) - for i := 0; i < numKeystores; i++ { + for i := range numKeystores { keystores[i] = createRandomKeystore(t, password) passwords[i] = password } @@ -131,7 +131,7 @@ func TestLocalKeymanager_ImportKeystores(t *testing.T) { numKeystores := 5 keystores := make([]*keymanager.Keystore, numKeystores) passwords := make([]string, numKeystores) - for i := 0; i < numKeystores; i++ { + for i := range numKeystores { pass := password + strconv.Itoa(i) keystores[i] = createRandomKeystore(t, pass) passwords[i] = pass diff --git a/validator/keymanager/local/keymanager.go b/validator/keymanager/local/keymanager.go index f0f06d6ddd..96e823c035 100644 --- a/validator/keymanager/local/keymanager.go +++ b/validator/keymanager/local/keymanager.go @@ -71,10 +71,10 @@ func (a *accountStore) Copy() *accountStore { // AccountsKeystoreRepresentation defines an internal Prysm representation // of validator accounts, encrypted according to the EIP-2334 standard. type AccountsKeystoreRepresentation struct { - Crypto map[string]interface{} `json:"crypto"` - ID string `json:"uuid"` - Version uint `json:"version"` - Name string `json:"name"` + Crypto map[string]any `json:"crypto"` + ID string `json:"uuid"` + Version uint `json:"version"` + Name string `json:"name"` } // ResetCaches for the keymanager. @@ -127,7 +127,7 @@ func NewInteropKeymanager(_ context.Context, offset, numValidatorKeys uint64) (* } lock.Lock() pubKeys := make([][fieldparams.BLSPubkeyLength]byte, numValidatorKeys) - for i := uint64(0); i < numValidatorKeys; i++ { + for i := range numValidatorKeys { publicKey := bytesutil.ToBytes48(publicKeys[i].Marshal()) pubKeys[i] = publicKey secretKeysCache[publicKey] = secretKeys[i] @@ -374,7 +374,7 @@ func updateAccountsStoreKeys(store *accountStore, privateKeys, publicKeys [][]by } // We append to the accounts store keys only // if the private/secret key do not already exist, to prevent duplicates. - for i := 0; i < len(privateKeys); i++ { + for i := range privateKeys { sk := privateKeys[i] pk := publicKeys[i] _, privKeyExists := existingPrivKeys[string(sk)] @@ -414,7 +414,7 @@ func (km *Keymanager) ListKeymanagerAccounts(ctx context.Context, cfg keymanager return errors.Wrap(err, "could not fetch private keys") } } - for i := 0; i < len(accountNames); i++ { + for i := range accountNames { fmt.Println("") fmt.Printf("%s | %s\n", au.BrightBlue(fmt.Sprintf("Account %d", i)).Bold(), au.BrightGreen(accountNames[i]).Bold()) fmt.Printf("%s %#x\n", au.BrightMagenta("[validating public key]").Bold(), pubKeys[i]) @@ -429,12 +429,12 @@ func (km *Keymanager) ListKeymanagerAccounts(ctx context.Context, cfg keymanager } func CreatePrintoutOfKeys(keys [][]byte) string { - var keysStr string + var keysStr strings.Builder for i, k := range keys { if i != 0 { - keysStr += "," // Add a comma before each key except the first one + keysStr.WriteString(",") // Add a comma before each key except the first one } - keysStr += fmt.Sprintf("%#x", bytesutil.Trunc(k)) + keysStr.WriteString(fmt.Sprintf("%#x", bytesutil.Trunc(k))) } - return keysStr + return keysStr.String() } diff --git a/validator/keymanager/local/keymanager_test.go b/validator/keymanager/local/keymanager_test.go index aa83842a27..352ee48078 100644 --- a/validator/keymanager/local/keymanager_test.go +++ b/validator/keymanager/local/keymanager_test.go @@ -29,7 +29,7 @@ func TestLocalKeymanager_FetchValidatingPublicKeys(t *testing.T) { ctx := t.Context() numAccounts := 10 wantedPubKeys := make([][fieldparams.BLSPubkeyLength]byte, 0) - for i := 0; i < numAccounts; i++ { + for range numAccounts { privKey, err := bls.RandKey() require.NoError(t, err) pubKey := bytesutil.ToBytes48(privKey.PublicKey().Marshal()) @@ -61,7 +61,7 @@ func TestLocalKeymanager_FetchValidatingPrivateKeys(t *testing.T) { ctx := t.Context() numAccounts := 10 wantedPrivateKeys := make([][32]byte, numAccounts) - for i := 0; i < numAccounts; i++ { + for i := range numAccounts { privKey, err := bls.RandKey() require.NoError(t, err) privKeyData := privKey.Marshal() @@ -97,7 +97,7 @@ func TestLocalKeymanager_Sign(t *testing.T) { numAccounts := 10 keystores := make([]*keymanager.Keystore, numAccounts) passwords := make([]string, numAccounts) - for i := 0; i < numAccounts; i++ { + for i := range numAccounts { keystores[i] = createRandomKeystore(t, password) passwords[i] = password } diff --git a/validator/keymanager/local/refresh.go b/validator/keymanager/local/refresh.go index 5b656826fa..200c1d175e 100644 --- a/validator/keymanager/local/refresh.go +++ b/validator/keymanager/local/refresh.go @@ -53,13 +53,13 @@ func (km *Keymanager) listenForAccountChanges(ctx context.Context) { } ctx, cancel := context.WithCancel(ctx) defer cancel() - fileChangesChan := make(chan interface{}, 100) + fileChangesChan := make(chan any, 100) defer close(fileChangesChan) // We debounce events sent over the file changes channel by an interval // to ensure we are not overwhelmed by a ton of events fired over the channel in // a short span of time. - go async.Debounce(ctx, debounceFileChangesInterval, fileChangesChan, func(event interface{}) { + go async.Debounce(ctx, debounceFileChangesInterval, fileChangesChan, func(event any) { ev, ok := event.(fsnotify.Event) if !ok { log.Errorf("Type %T is not a valid file system event", event) diff --git a/validator/keymanager/local/refresh_test.go b/validator/keymanager/local/refresh_test.go index a4184a6208..14b0bbe75c 100644 --- a/validator/keymanager/local/refresh_test.go +++ b/validator/keymanager/local/refresh_test.go @@ -60,7 +60,7 @@ func TestLocalKeymanager_reloadAccountsFromKeystore(t *testing.T) { numAccounts := 20 privKeys := make([][]byte, numAccounts) pubKeys := make([][]byte, numAccounts) - for i := 0; i < numAccounts; i++ { + for i := range numAccounts { privKey, err := bls.RandKey() require.NoError(t, err) privKeys[i] = privKey.Marshal() diff --git a/validator/keymanager/remote-web3signer/internal/client.go b/validator/keymanager/remote-web3signer/internal/client.go index e6a4675c2d..0bfa4cda62 100644 --- a/validator/keymanager/remote-web3signer/internal/client.go +++ b/validator/keymanager/remote-web3signer/internal/client.go @@ -189,7 +189,7 @@ func (client *ApiClient) doRequest(ctx context.Context, httpMethod, fullPath str } // unmarshalResponse is a utility method for unmarshalling responses. -func unmarshalResponse(responseBody io.ReadCloser, unmarshalledResponseObject interface{}) error { +func unmarshalResponse(responseBody io.ReadCloser, unmarshalledResponseObject any) error { defer closeBody(responseBody) if err := json.NewDecoder(responseBody).Decode(&unmarshalledResponseObject); err != nil { body, err := io.ReadAll(responseBody) diff --git a/validator/keymanager/remote-web3signer/keymanager.go b/validator/keymanager/remote-web3signer/keymanager.go index b68007a43a..b3af2f8f88 100644 --- a/validator/keymanager/remote-web3signer/keymanager.go +++ b/validator/keymanager/remote-web3signer/keymanager.go @@ -756,7 +756,7 @@ func (km *Keymanager) ListKeymanagerAccounts(ctx context.Context, cfg keymanager // DisplayRemotePublicKeys prints remote public keys to stdout. func DisplayRemotePublicKeys(validatingPubKeys [][48]byte) { au := aurora.NewAurora(true) - for i := 0; i < len(validatingPubKeys); i++ { + for i := range validatingPubKeys { fmt.Println("") fmt.Printf( "%s\n", au.BrightGreen(petnames.DeterministicName(validatingPubKeys[i][:], "-")).Bold(), diff --git a/validator/keymanager/types.go b/validator/keymanager/types.go index c7e21b71a2..78da70ed5a 100644 --- a/validator/keymanager/types.go +++ b/validator/keymanager/types.go @@ -100,13 +100,13 @@ type AccountLister interface { // Keystore json file representation as a Go struct. type Keystore struct { - Crypto map[string]interface{} `json:"crypto"` - ID string `json:"uuid"` - Pubkey string `json:"pubkey"` - Version uint `json:"version"` - Description string `json:"description"` - Name string `json:"name,omitempty"` // field deprecated in favor of description, EIP2335 - Path string `json:"path"` + Crypto map[string]any `json:"crypto"` + ID string `json:"uuid"` + Pubkey string `json:"pubkey"` + Version uint `json:"version"` + Description string `json:"description"` + Name string `json:"name,omitempty"` // field deprecated in favor of description, EIP2335 + Path string `json:"path"` } // Kind defines an enum for either local, derived, or remote-signing diff --git a/validator/node/node.go b/validator/node/node.go index 490051fab1..4428cd1a3d 100644 --- a/validator/node/node.go +++ b/validator/node/node.go @@ -641,8 +641,8 @@ func clearDB(ctx context.Context, dataDir string, force bool, isDatabaseMinimal func parseBeaconApiHeaders(rawHeaders string) map[string][]string { result := make(map[string][]string) - pairs := strings.Split(rawHeaders, ",") - for _, pair := range pairs { + pairs := strings.SplitSeq(rawHeaders, ",") + for pair := range pairs { key, value, found := strings.Cut(pair, "=") if !found { // Skip malformed pairs diff --git a/validator/rpc/auth_token_test.go b/validator/rpc/auth_token_test.go index 22cb744a6f..e8989bdbd7 100644 --- a/validator/rpc/auth_token_test.go +++ b/validator/rpc/auth_token_test.go @@ -42,7 +42,7 @@ func TestServer_AuthenticateUsingExistingToken(t *testing.T) { unaryInfo := &grpc.UnaryServerInfo{ FullMethod: "Proto.CreateWallet", } - unaryHandler := func(ctx context.Context, req interface{}) (interface{}, error) { + unaryHandler := func(ctx context.Context, req any) (any, error) { return nil, nil } ctxMD := map[string][]string{ diff --git a/validator/rpc/handler_wallet_test.go b/validator/rpc/handler_wallet_test.go index c5624507f2..7edf0498fb 100644 --- a/validator/rpc/handler_wallet_test.go +++ b/validator/rpc/handler_wallet_test.go @@ -74,7 +74,7 @@ func TestServer_CreateWallet_Local(t *testing.T) { encryptor := keystorev4.New() keystores := make([]string, 3) passwords := make([]string, 3) - for i := 0; i < len(keystores); i++ { + for i := range keystores { privKey, err := bls.RandKey() require.NoError(t, err) pubKey := fmt.Sprintf("%x", privKey.PublicKey().Marshal()) diff --git a/validator/rpc/handlers_accounts.go b/validator/rpc/handlers_accounts.go index 92dc8b8ca8..b2c2888370 100644 --- a/validator/rpc/handlers_accounts.go +++ b/validator/rpc/handlers_accounts.go @@ -76,7 +76,7 @@ func (s *Server) ListAccounts(w http.ResponseWriter, r *http.Request) { return } accs := make([]*Account, len(keys)) - for i := 0; i < len(keys); i++ { + for i := range keys { accs[i] = &Account{ ValidatingPublicKey: hexutil.Encode(keys[i][:]), AccountName: petnames.DeterministicName(keys[i][:], "-"), diff --git a/validator/rpc/handlers_health_test.go b/validator/rpc/handlers_health_test.go index 5eef237bd2..54ecad0df6 100644 --- a/validator/rpc/handlers_health_test.go +++ b/validator/rpc/handlers_health_test.go @@ -38,7 +38,7 @@ func (m *MockBeaconNodeHealthClient) Recv() (*pb.LogsResponse, error) { return log, nil } -func (m *MockBeaconNodeHealthClient) SendMsg(_ interface{}) error { +func (m *MockBeaconNodeHealthClient) SendMsg(_ any) error { return m.err } diff --git a/validator/rpc/handlers_keymanager.go b/validator/rpc/handlers_keymanager.go index f9df921392..905a685e6d 100644 --- a/validator/rpc/handlers_keymanager.go +++ b/validator/rpc/handlers_keymanager.go @@ -63,7 +63,7 @@ func (s *Server) ListKeystores(w http.ResponseWriter, r *http.Request) { return } keystoreResponse := make([]*Keystore, len(pubKeys)) - for i := 0; i < len(pubKeys); i++ { + for i := range pubKeys { keystoreResponse[i] = &Keystore{ ValidatingPubkey: hexutil.Encode(pubKeys[i][:]), } @@ -276,7 +276,7 @@ func (s *Server) transformDeletedKeysStatuses( return nil, errors.Wrap(err, "could not get public keys from DB") } if len(pubKeysInDB) > 0 { - for i := 0; i < len(pubKeys); i++ { + for i := range pubKeys { keyExistsInDB := pubKeysInDB[bytesutil.ToBytes48(pubKeys[i])] if keyExistsInDB && statuses[i].Status == keymanager.StatusNotFound { statuses[i].Status = keymanager.StatusNotActive @@ -419,7 +419,7 @@ func (s *Server) ListRemoteKeys(w http.ResponseWriter, r *http.Request) { return } keystoreResponse := make([]*RemoteKey, len(pubKeys)) - for i := 0; i < len(pubKeys); i++ { + for i := range pubKeys { keystoreResponse[i] = &RemoteKey{ Pubkey: hexutil.Encode(pubKeys[i][:]), Url: s.validatorService.RemoteSignerConfig().BaseEndpoint, diff --git a/validator/rpc/handlers_keymanager_test.go b/validator/rpc/handlers_keymanager_test.go index 9840a681db..6727bd2e56 100644 --- a/validator/rpc/handlers_keymanager_test.go +++ b/validator/rpc/handlers_keymanager_test.go @@ -109,7 +109,7 @@ func TestServer_ListKeystores(t *testing.T) { resp := &ListKeystoresResponse{} require.NoError(t, json.Unmarshal(wr.Body.Bytes(), resp)) require.Equal(t, numAccounts, len(resp.Data)) - for i := 0; i < numAccounts; i++ { + for i := range numAccounts { require.DeepEqual(t, hexutil.Encode(expectedKeys[i][:]), resp.Data[i].ValidatingPubkey) require.Equal( t, @@ -243,7 +243,7 @@ func TestServer_ImportKeystores(t *testing.T) { password := "12345678" encodedKeystores := make([]string, numKeystores) passwords := make([]string, numKeystores) - for i := 0; i < numKeystores; i++ { + for i := range numKeystores { enc, err := json.Marshal(createRandomKeystore(t, password)) encodedKeystores[i] = string(enc) require.NoError(t, err) @@ -280,7 +280,7 @@ func TestServer_ImportKeystores(t *testing.T) { keystores := make([]*keymanager.Keystore, numKeystores) passwords := make([]string, numKeystores) publicKeys := make([][fieldparams.BLSPubkeyLength]byte, numKeystores) - for i := 0; i < numKeystores; i++ { + for i := range numKeystores { keystores[i] = createRandomKeystore(t, password) pubKey, err := hexutil.Decode("0x" + keystores[i].Pubkey) require.NoError(t, err) @@ -307,7 +307,7 @@ func TestServer_ImportKeystores(t *testing.T) { require.NoError(t, validatorDB.Close()) }() encodedKeystores := make([]string, numKeystores) - for i := 0; i < numKeystores; i++ { + for i := range numKeystores { enc, err := json.Marshal(keystores[i]) require.NoError(t, err) encodedKeystores[i] = string(enc) @@ -316,7 +316,7 @@ func TestServer_ImportKeystores(t *testing.T) { // Generate mock slashing history. attestingHistory := make([][]*dbCommon.AttestationRecord, 0) proposalHistory := make([]dbCommon.ProposalHistoryForPubkey, len(publicKeys)) - for i := 0; i < len(publicKeys); i++ { + for i := range publicKeys { proposalHistory[i].Proposals = make([]dbCommon.Proposal, 0) } mockJSON, err := mocks.MockSlashingProtectionJSON(publicKeys, attestingHistory, proposalHistory) @@ -439,7 +439,7 @@ func TestServer_DeleteKeystores(t *testing.T) { // Generate mock slashing history. attestingHistory := make([][]*dbCommon.AttestationRecord, 0) proposalHistory := make([]dbCommon.ProposalHistoryForPubkey, len(publicKeys)) - for i := 0; i < len(publicKeys); i++ { + for i := range publicKeys { proposalHistory[i].Proposals = make([]dbCommon.Proposal, 0) } mockJSON, err := mocks.MockSlashingProtectionJSON(publicKeys, attestingHistory, proposalHistory) diff --git a/validator/rpc/handlers_slashing_test.go b/validator/rpc/handlers_slashing_test.go index e628c1f796..31256012d7 100644 --- a/validator/rpc/handlers_slashing_test.go +++ b/validator/rpc/handlers_slashing_test.go @@ -93,7 +93,7 @@ func TestImportSlashingProtection_Preconditions(t *testing.T) { // Generate mock slashing history. attestingHistory := make([][]*common.AttestationRecord, 0) proposalHistory := make([]common.ProposalHistoryForPubkey, len(pubKeys)) - for i := 0; i < len(pubKeys); i++ { + for i := range pubKeys { proposalHistory[i].Proposals = make([]common.Proposal, 0) } mockJSON, err := mocks.MockSlashingProtectionJSON(pubKeys, attestingHistory, proposalHistory) @@ -198,7 +198,7 @@ func TestImportExportSlashingProtection_RoundTrip(t *testing.T) { // Generate mock slashing history. attestingHistory := make([][]*common.AttestationRecord, 0) proposalHistory := make([]common.ProposalHistoryForPubkey, len(pubKeys)) - for i := 0; i < len(pubKeys); i++ { + for i := range pubKeys { proposalHistory[i].Proposals = make([]common.Proposal, 0) } mockJSON, err := mocks.MockSlashingProtectionJSON(pubKeys, attestingHistory, proposalHistory) diff --git a/validator/rpc/intercepter.go b/validator/rpc/intercepter.go index c0d8d36fcf..9856d64e67 100644 --- a/validator/rpc/intercepter.go +++ b/validator/rpc/intercepter.go @@ -18,10 +18,10 @@ import ( func (s *Server) AuthTokenInterceptor() grpc.UnaryServerInterceptor { return func( ctx context.Context, - req interface{}, + req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler, - ) (interface{}, error) { + ) (any, error) { if err := s.authorize(ctx); err != nil { return nil, err } diff --git a/validator/rpc/intercepter_test.go b/validator/rpc/intercepter_test.go index 1f34a05066..990bdb9c28 100644 --- a/validator/rpc/intercepter_test.go +++ b/validator/rpc/intercepter_test.go @@ -24,7 +24,7 @@ func TestServer_AuthTokenInterceptor_Verify(t *testing.T) { unaryInfo := &grpc.UnaryServerInfo{ FullMethod: "Proto.CreateWallet", } - unaryHandler := func(ctx context.Context, req interface{}) (interface{}, error) { + unaryHandler := func(ctx context.Context, req any) (any, error) { return nil, nil } ctxMD := map[string][]string{ @@ -45,7 +45,7 @@ func TestServer_AuthTokenInterceptor_BadToken(t *testing.T) { unaryInfo := &grpc.UnaryServerInfo{ FullMethod: "Proto.CreateWallet", } - unaryHandler := func(ctx context.Context, req interface{}) (interface{}, error) { + unaryHandler := func(ctx context.Context, req any) (any, error) { return nil, nil } diff --git a/validator/slashing-protection-history/export.go b/validator/slashing-protection-history/export.go index 4da0132caf..6024788f1c 100644 --- a/validator/slashing-protection-history/export.go +++ b/validator/slashing-protection-history/export.go @@ -139,7 +139,7 @@ func signedAttestationsByPubKey(ctx context.Context, validatorDB db.Database, pu return nil, nil } signedAttestations := make([]*format.SignedAttestation, 0) - for i := 0; i < len(history); i++ { + for i := range history { att := history[i] // Special edge case due to a bug in Prysm's old slashing protection schema. The bug // manifests itself as the first entry in attester slashing protection history diff --git a/validator/slashing-protection-history/round_trip_test.go b/validator/slashing-protection-history/round_trip_test.go index 50a5d534bb..522a2c8129 100644 --- a/validator/slashing-protection-history/round_trip_test.go +++ b/validator/slashing-protection-history/round_trip_test.go @@ -47,7 +47,7 @@ func TestImportExport_RoundTrip(t *testing.T) { require.NoError(t, err) rawPublicKeys := make([][]byte, numValidators) - for i := 0; i < numValidators; i++ { + for i := range numValidators { rawPublicKeys[i] = publicKeys[i][:] } @@ -133,7 +133,7 @@ func TestImportExport_RoundTrip_SkippedAttestationEpochs(t *testing.T) { require.NoError(t, err) rawPublicKeys := make([][]byte, numValidators) - for i := 0; i < numValidators; i++ { + for i := range numValidators { rawPublicKeys[i] = pubKeys[i][:] } @@ -184,7 +184,7 @@ func TestImportExport_FilterKeys(t *testing.T) { // Next up, we export our slashing protection database into the EIP standard file. // Next, we attempt to import it into our validator database. rawKeys := make([][]byte, 5) - for i := 0; i < len(rawKeys); i++ { + for i := range rawKeys { rawKeys[i] = publicKeys[i][:] } @@ -228,7 +228,7 @@ func TestImportInterchangeData_OK(t *testing.T) { // Next, we attempt to retrieve the attesting and proposals histories from our database and // verify those indeed match the originally generated mock histories. - for i := 0; i < len(publicKeys); i++ { + for i := range publicKeys { receivedAttestingHistory, err := validatorDB.AttestationHistoryForPubKey(ctx, publicKeys[i]) require.NoError(t, err) @@ -399,7 +399,7 @@ func TestStore_ImportInterchangeData_BadFormat_PreventsDBWrites(t *testing.T) { // verify nothing was saved to the DB. If there is an error in the import process, we need to make // sure writing is an atomic operation: either the import succeeds and saves the slashing protection // data to our DB, or it does not. - for i := 0; i < len(publicKeys); i++ { + for i := range publicKeys { receivedAttestingHistory, err := validatorDB.AttestationHistoryForPubKey(ctx, publicKeys[i]) require.NoError(t, err) require.Equal( diff --git a/validator/testing/protection_history.go b/validator/testing/protection_history.go index 39614ac964..fe7350eafa 100644 --- a/validator/testing/protection_history.go +++ b/validator/testing/protection_history.go @@ -23,7 +23,7 @@ func MockSlashingProtectionJSON( standardProtectionFormat := &format.EIPSlashingProtectionFormat{} standardProtectionFormat.Metadata.GenesisValidatorsRoot = fmt.Sprintf("%#x", bytesutil.PadTo([]byte{32}, 32)) standardProtectionFormat.Metadata.InterchangeFormatVersion = format.InterchangeFormatVersion - for i := 0; i < len(publicKeys); i++ { + for i := range publicKeys { data := &format.ProtectionData{ Pubkey: fmt.Sprintf("%#x", publicKeys[i]), } @@ -58,7 +58,7 @@ func MockAttestingAndProposalHistories(pubkeys [][fieldparams.BLSPubkeyLength]by attData := make([][]*common.AttestationRecord, numValidators) proposalData := make([]common.ProposalHistoryForPubkey, numValidators) gen := rand.NewGenerator() - for v := 0; v < numValidators; v++ { + for v := range numValidators { latestTarget := primitives.Epoch(gen.Intn(int(params.BeaconConfig().WeakSubjectivityPeriod) / 1000)) // If 0, we change the value to 1 as we compute source by doing (target-1) // to prevent any underflows in this setup helper. @@ -96,7 +96,7 @@ func MockAttestingAndProposalHistories(pubkeys [][fieldparams.BLSPubkeyLength]by // CreateRandomPubKeys -- func CreateRandomPubKeys(numValidators int) ([][fieldparams.BLSPubkeyLength]byte, error) { pubKeys := make([][fieldparams.BLSPubkeyLength]byte, numValidators) - for i := 0; i < numValidators; i++ { + for i := range numValidators { randKey, err := bls.RandKey() if err != nil { return nil, err @@ -109,7 +109,7 @@ func CreateRandomPubKeys(numValidators int) ([][fieldparams.BLSPubkeyLength]byte // CreateMockRoots -- func CreateMockRoots(numRoots int) [][32]byte { roots := make([][32]byte, numRoots) - for i := 0; i < numRoots; i++ { + for i := range numRoots { var rt [32]byte copy(rt[:], fmt.Sprintf("%d", i)) } From 76f3083090b90887423950f6325ad1d2a18c35cd Mon Sep 17 00:00:00 2001 From: Galoretka <galoretochka@gmail.com> Date: Fri, 14 Nov 2025 15:24:21 +0200 Subject: [PATCH 099/103] fix(validator/db): proposals progress bar count (#16020) * fix(validator/db): proposals progress bar count * Create Galoretka_convert-progress.md --- changelog/Galoretka_convert-progress.md | 3 +++ validator/db/convert.go | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog/Galoretka_convert-progress.md diff --git a/changelog/Galoretka_convert-progress.md b/changelog/Galoretka_convert-progress.md new file mode 100644 index 0000000000..5962830b56 --- /dev/null +++ b/changelog/Galoretka_convert-progress.md @@ -0,0 +1,3 @@ +### Fixed + +- Fix proposals progress bar count [#16020](https://github.com/OffchainLabs/prysm/pull/16020) diff --git a/validator/db/convert.go b/validator/db/convert.go index 034d7620c4..6a33b6148c 100644 --- a/validator/db/convert.go +++ b/validator/db/convert.go @@ -212,7 +212,7 @@ func ConvertDatabase(ctx context.Context, sourceDataDir string, targetDataDir st // Initialize the progress bar. bar = common.InitializeProgressBar( - len(attestedPublicKeys), + len(proposedPublicKeys), "Processing proposals:", ) From 2302ef918a6dc2372e83f841983addbd161a6e06 Mon Sep 17 00:00:00 2001 From: Preston Van Loon <pvanloon@offchainlabs.com> Date: Fri, 14 Nov 2025 11:58:44 -0600 Subject: [PATCH 100/103] Vendored github.com/tyler-smith/go-bip39 (#16015) * Vendor go-bip39 dependency locally to third_party/ The github.com/tyler-smith/go-bip39 repository has been deleted from GitHub but is still needed for BIP-39 mnemonic functionality in the validator wallet system. This change vendors v1.1.0 of the library into third_party/go-bip39/ to ensure continued availability. Changes: - Copy go-bip39 v1.1.0 source from Go module cache to third_party/go-bip39/ - Create BUILD.bazel files for main package and wordlists subpackage - Update 5 BUILD.bazel files to reference local vendored version instead of external dependency - Remove go-bip39 from go.mod and deps.bzl - All builds and tests pass successfully The vendored package includes all 9 language wordlists (English, Chinese Simplified/Traditional, Czech, French, Italian, Japanese, Korean, Spanish) and maintains the original import paths for compatibility. * Changelog fraagment * use go mod replace for vendored lib * Run gazelle --------- Co-authored-by: Kasey Kirkham <kasey@users.noreply.github.com> --- changelog/pvl-go-bip39.md | 3 + cmd/validator/wallet/BUILD.bazel | 4 +- deps.bzl | 42 +- go.mod | 20 +- go.sum | 38 +- third_party/go-bip39/BUILD.bazel | 14 + third_party/go-bip39/LICENSE | 21 + third_party/go-bip39/README.md | 45 + third_party/go-bip39/bip39.go | 360 +++ third_party/go-bip39/go.mod | 5 + third_party/go-bip39/wordlists/BUILD.bazel | 20 + .../go-bip39/wordlists/chinese_simplified.go | 2071 +++++++++++++++++ .../go-bip39/wordlists/chinese_traditional.go | 2071 +++++++++++++++++ third_party/go-bip39/wordlists/czech.go | 2071 +++++++++++++++++ third_party/go-bip39/wordlists/english.go | 2071 +++++++++++++++++ third_party/go-bip39/wordlists/french.go | 2071 +++++++++++++++++ third_party/go-bip39/wordlists/italian.go | 2071 +++++++++++++++++ third_party/go-bip39/wordlists/japanese.go | 2071 +++++++++++++++++ third_party/go-bip39/wordlists/korean.go | 2071 +++++++++++++++++ third_party/go-bip39/wordlists/spanish.go | 2071 +++++++++++++++++ tools/interop/split-keys/BUILD.bazel | 2 +- validator/client/BUILD.bazel | 2 +- validator/keymanager/derived/BUILD.bazel | 8 +- validator/rpc/BUILD.bazel | 6 +- 24 files changed, 19165 insertions(+), 64 deletions(-) create mode 100644 changelog/pvl-go-bip39.md create mode 100644 third_party/go-bip39/BUILD.bazel create mode 100644 third_party/go-bip39/LICENSE create mode 100644 third_party/go-bip39/README.md create mode 100644 third_party/go-bip39/bip39.go create mode 100644 third_party/go-bip39/go.mod create mode 100644 third_party/go-bip39/wordlists/BUILD.bazel create mode 100644 third_party/go-bip39/wordlists/chinese_simplified.go create mode 100644 third_party/go-bip39/wordlists/chinese_traditional.go create mode 100644 third_party/go-bip39/wordlists/czech.go create mode 100644 third_party/go-bip39/wordlists/english.go create mode 100644 third_party/go-bip39/wordlists/french.go create mode 100644 third_party/go-bip39/wordlists/italian.go create mode 100644 third_party/go-bip39/wordlists/japanese.go create mode 100644 third_party/go-bip39/wordlists/korean.go create mode 100644 third_party/go-bip39/wordlists/spanish.go diff --git a/changelog/pvl-go-bip39.md b/changelog/pvl-go-bip39.md new file mode 100644 index 0000000000..6a5c7213d9 --- /dev/null +++ b/changelog/pvl-go-bip39.md @@ -0,0 +1,3 @@ +### Ignored + +- Copied deleted dependency `github.com/tyler-smith/go-bip39` to the third_party directory and updated prysm to use that. diff --git a/cmd/validator/wallet/BUILD.bazel b/cmd/validator/wallet/BUILD.bazel index 2c00a6e28b..a9329bad68 100644 --- a/cmd/validator/wallet/BUILD.bazel +++ b/cmd/validator/wallet/BUILD.bazel @@ -15,6 +15,8 @@ go_library( "//config/features:go_default_library", "//io/prompt:go_default_library", "//runtime/tos:go_default_library", + "//third_party/go-bip39:go_default_library", + "//third_party/go-bip39/wordlists:go_default_library", "//validator/accounts:go_default_library", "//validator/accounts/userprompt:go_default_library", "//validator/accounts/wallet:go_default_library", @@ -22,8 +24,6 @@ go_library( "@com_github_manifoldco_promptui//:go_default_library", "@com_github_pkg_errors//:go_default_library", "@com_github_sirupsen_logrus//:go_default_library", - "@com_github_tyler_smith_go_bip39//:go_default_library", - "@com_github_tyler_smith_go_bip39//wordlists:go_default_library", "@com_github_urfave_cli_v2//:go_default_library", ], ) diff --git a/deps.bzl b/deps.bzl index 1b41264eb3..b075e11609 100644 --- a/deps.bzl +++ b/deps.bzl @@ -3405,12 +3405,6 @@ def prysm_deps(): sum = "h1:EBoYk5zHOfuHDBqLFx4eSPRVcbnW+L3aFJzoCi8zRnk=", version = "v0.0.0-20250212181730-4c2b8e9e784b", ) - go_repository( - name = "com_github_tyler_smith_go_bip39", - importpath = "github.com/tyler-smith/go-bip39", - sum = "h1:5eUemwrMargf3BSLRRCalXT93Ns6pQJIjYQN2nyfOP8=", - version = "v1.1.0", - ) go_repository( name = "com_github_ugorji_go_codec", importpath = "github.com/ugorji/go/codec", @@ -4784,8 +4778,8 @@ def prysm_deps(): go_repository( name = "org_golang_x_crypto", importpath = "golang.org/x/crypto", - sum = "h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=", - version = "v0.43.0", + sum = "h1:A97SsFvM3AIwEEmTBiaxPPTYpDC47w720rdiiUvgoAU=", + version = "v0.44.0", ) go_repository( name = "org_golang_x_exp", @@ -4820,14 +4814,14 @@ def prysm_deps(): go_repository( name = "org_golang_x_mod", importpath = "golang.org/x/mod", - sum = "h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=", - version = "v0.29.0", + sum = "h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=", + version = "v0.30.0", ) go_repository( name = "org_golang_x_net", importpath = "golang.org/x/net", - sum = "h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=", - version = "v0.46.0", + sum = "h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=", + version = "v0.47.0", ) go_repository( name = "org_golang_x_oauth2", @@ -4844,32 +4838,32 @@ def prysm_deps(): go_repository( name = "org_golang_x_sync", importpath = "golang.org/x/sync", - sum = "h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=", - version = "v0.17.0", + sum = "h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=", + version = "v0.18.0", ) go_repository( name = "org_golang_x_sys", importpath = "golang.org/x/sys", - sum = "h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=", - version = "v0.37.0", + sum = "h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=", + version = "v0.38.0", ) go_repository( name = "org_golang_x_telemetry", importpath = "golang.org/x/telemetry", - sum = "h1:LvzTn0GQhWuvKH/kVRS3R3bVAsdQWI7hvfLHGgh9+lU=", - version = "v0.0.0-20251008203120-078029d740a8", + sum = "h1:E2/AqCUMZGgd73TQkxUMcMla25GB9i/5HOdLr+uH7Vo=", + version = "v0.0.0-20251111182119-bc8e575c7b54", ) go_repository( name = "org_golang_x_term", importpath = "golang.org/x/term", - sum = "h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q=", - version = "v0.36.0", + sum = "h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=", + version = "v0.37.0", ) go_repository( name = "org_golang_x_text", importpath = "golang.org/x/text", - sum = "h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=", - version = "v0.30.0", + sum = "h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=", + version = "v0.31.0", ) go_repository( name = "org_golang_x_time", @@ -4880,8 +4874,8 @@ def prysm_deps(): go_repository( name = "org_golang_x_tools", importpath = "golang.org/x/tools", - sum = "h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=", - version = "v0.38.0", + sum = "h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ=", + version = "v0.39.0", ) go_repository( name = "org_golang_x_tools_go_expect", diff --git a/go.mod b/go.mod index d0abcd3c1d..8b0952af28 100644 --- a/go.mod +++ b/go.mod @@ -88,10 +88,10 @@ require ( go.opentelemetry.io/otel/trace v1.35.0 go.uber.org/automaxprocs v1.5.2 go.uber.org/mock v0.5.2 - golang.org/x/crypto v0.43.0 + golang.org/x/crypto v0.44.0 golang.org/x/exp v0.0.0-20250128182459-e0ece0dbea4c - golang.org/x/sync v0.17.0 - golang.org/x/tools v0.38.0 + golang.org/x/sync v0.18.0 + golang.org/x/tools v0.39.0 google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 google.golang.org/grpc v1.71.0 google.golang.org/protobuf v1.36.5 @@ -263,12 +263,12 @@ require ( go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678 // indirect - golang.org/x/mod v0.29.0 // indirect - golang.org/x/net v0.46.0 // indirect + golang.org/x/mod v0.30.0 // indirect + golang.org/x/net v0.47.0 // indirect golang.org/x/oauth2 v0.25.0 // indirect - golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 // indirect - golang.org/x/term v0.36.0 // indirect - golang.org/x/text v0.30.0 // indirect + golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 // indirect + golang.org/x/term v0.37.0 // indirect + golang.org/x/text v0.31.0 // indirect golang.org/x/time v0.9.0 // indirect golang.org/x/tools/go/expect v0.1.1-deprecated // indirect gopkg.in/cenkalti/backoff.v1 v1.1.0 // indirect @@ -290,9 +290,11 @@ require ( github.com/go-playground/validator/v10 v10.13.0 github.com/peterh/liner v1.2.0 // indirect github.com/prysmaticlabs/gohashtree v0.0.5-beta - golang.org/x/sys v0.37.0 // indirect + golang.org/x/sys v0.38.0 // indirect k8s.io/klog/v2 v2.120.1 // indirect k8s.io/utils v0.0.0-20230726121419-3b25d923346b // indirect ) replace github.com/json-iterator/go => github.com/prestonvanloon/go v1.1.7-0.20190722034630-4f2e55fcf87b + +replace github.com/tyler-smith/go-bip39 => ./third_party/go-bip39 diff --git a/go.sum b/go.sum index b413fb263a..9afdac043b 100644 --- a/go.sum +++ b/go.sum @@ -1040,8 +1040,6 @@ github.com/tklauser/numcpus v0.7.0/go.mod h1:bb6dMVcj8A42tSE7i32fsIUCbQNllK5iDgu github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/trailofbits/go-mutexasserts v0.0.0-20250212181730-4c2b8e9e784b h1:EBoYk5zHOfuHDBqLFx4eSPRVcbnW+L3aFJzoCi8zRnk= github.com/trailofbits/go-mutexasserts v0.0.0-20250212181730-4c2b8e9e784b/go.mod h1:4R6Qam+w871wOlyRq59zRLjhb5x9/De/wgPeaCTaCwI= -github.com/tyler-smith/go-bip39 v1.1.0 h1:5eUemwrMargf3BSLRRCalXT93Ns6pQJIjYQN2nyfOP8= -github.com/tyler-smith/go-bip39 v1.1.0/go.mod h1:gUYDtqQw1JS3ZJ8UWVcGTGqqr6YIN3CWg+kkNaLt55U= github.com/umbracle/gohashtree v0.0.2-alpha.0.20230207094856-5b775a815c10 h1:CQh33pStIp/E30b7TxDlXfM0145bn2e8boI30IxAhTg= github.com/umbracle/gohashtree v0.0.2-alpha.0.20230207094856-5b775a815c10/go.mod h1:x/Pa0FF5Te9kdrlZKJK82YmAkvL8+f989USgz6Jiw7M= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= @@ -1173,8 +1171,8 @@ golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0 golang.org/x/crypto v0.8.0/go.mod h1:mRqEX+O9/h5TFCrQhkgjo2yKi0yYA+9ecGkdQoHrywE= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= -golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= -golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= +golang.org/x/crypto v0.44.0 h1:A97SsFvM3AIwEEmTBiaxPPTYpDC47w720rdiiUvgoAU= +golang.org/x/crypto v0.44.0/go.mod h1:013i+Nw79BMiQiMsOPcVCB5ZIJbYkerPrGnOa00tvmc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1217,8 +1215,8 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= -golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= +golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk= +golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1274,8 +1272,8 @@ golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= -golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= -golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= golang.org/x/oauth2 v0.0.0-20170912212905-13449ad91cb2/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1305,8 +1303,8 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= -golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180810173357-98c5dad5d1a0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1395,10 +1393,10 @@ golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= -golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= -golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 h1:LvzTn0GQhWuvKH/kVRS3R3bVAsdQWI7hvfLHGgh9+lU= -golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8/go.mod h1:Pi4ztBfryZoJEkyFTI5/Ocsu2jXyDr6iSdgJiYE/uwE= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 h1:E2/AqCUMZGgd73TQkxUMcMla25GB9i/5HOdLr+uH7Vo= +golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54/go.mod h1:hKdjCMrbv9skySur+Nek8Hd0uJ0GuxJIoIX2payrIdQ= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -1407,8 +1405,8 @@ golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= -golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= -golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1423,8 +1421,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= -golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= golang.org/x/time v0.0.0-20170424234030-8be79e1e0910/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -1497,8 +1495,8 @@ golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= -golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= +golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ= +golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ= golang.org/x/tools/go/expect v0.1.1-deprecated h1:jpBZDwmgPhXsKZC6WhL20P4b/wmnpsEAGHaNy0n/rJM= golang.org/x/tools/go/expect v0.1.1-deprecated/go.mod h1:eihoPOH+FgIqa3FpoTwguz/bVUSGBlGQU67vpBeOrBY= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/third_party/go-bip39/BUILD.bazel b/third_party/go-bip39/BUILD.bazel new file mode 100644 index 0000000000..766715b706 --- /dev/null +++ b/third_party/go-bip39/BUILD.bazel @@ -0,0 +1,14 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +# gazelle:prefix github.com/tyler-smith/go-bip39 + +go_library( + name = "go_default_library", + srcs = ["bip39.go"], + importpath = "github.com/tyler-smith/go-bip39", + visibility = ["//visibility:public"], + deps = [ + "//third_party/go-bip39/wordlists:go_default_library", + "@org_golang_x_crypto//pbkdf2:go_default_library", + ], +) diff --git a/third_party/go-bip39/LICENSE b/third_party/go-bip39/LICENSE new file mode 100644 index 0000000000..4dae82d35b --- /dev/null +++ b/third_party/go-bip39/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2018 Tyler Smith and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/third_party/go-bip39/README.md b/third_party/go-bip39/README.md new file mode 100644 index 0000000000..933296fd83 --- /dev/null +++ b/third_party/go-bip39/README.md @@ -0,0 +1,45 @@ +# go-bip39 +[![Build Status](https://travis-ci.org/tyler-smith/go-bip39.svg?branch=master)](https://travis-ci.org/tyler-smith/go-bip39) +[![license](https://img.shields.io/github/license/tyler-smith/go-bip39.svg?maxAge=2592000)](https://github.com/tyler-smith/go-bip39/blob/master/LICENSE) +[![Documentation](https://godoc.org/github.com/tyler-smith/go-bip39?status.svg)](http://godoc.org/github.com/tyler-smith/go-bip39) +[![Go Report Card](https://goreportcard.com/badge/github.com/tyler-smith/go-bip39)](https://goreportcard.com/report/github.com/tyler-smith/go-bip39) +[![GitHub issues](https://img.shields.io/github/issues/tyler-smith/go-bip39.svg)](https://github.com/tyler-smith/go-bip39/issues) + + +A golang implementation of the BIP0039 spec for mnemonic seeds + +## Example + +```go +package main + +import ( + "github.com/tyler-smith/go-bip39" + "github.com/tyler-smith/go-bip32" + "fmt" +) + +func main(){ + // Generate a mnemonic for memorization or user-friendly seeds + entropy, _ := bip39.NewEntropy(256) + mnemonic, _ := bip39.NewMnemonic(entropy) + + // Generate a Bip32 HD wallet for the mnemonic and a user supplied password + seed := bip39.NewSeed(mnemonic, "Secret Passphrase") + + masterKey, _ := bip32.NewMasterKey(seed) + publicKey := masterKey.PublicKey() + + // Display mnemonic and keys + fmt.Println("Mnemonic: ", mnemonic) + fmt.Println("Master private key: ", masterKey) + fmt.Println("Master public key: ", publicKey) +} +``` + +## Credits + +Wordlists are from the [bip39 spec](https://github.com/bitcoin/bips/tree/master/bip-0039). + +Test vectors are from the standard Python BIP0039 implementation from the +Trezor team: [https://github.com/trezor/python-mnemonic](https://github.com/trezor/python-mnemonic) diff --git a/third_party/go-bip39/bip39.go b/third_party/go-bip39/bip39.go new file mode 100644 index 0000000000..557284240f --- /dev/null +++ b/third_party/go-bip39/bip39.go @@ -0,0 +1,360 @@ +// Package bip39 is the Golang implementation of the BIP39 spec. +// +// The official BIP39 spec can be found at +// https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki +package bip39 + +import ( + "crypto/rand" + "crypto/sha256" + "crypto/sha512" + "encoding/binary" + "errors" + "fmt" + "math/big" + "strings" + + "github.com/tyler-smith/go-bip39/wordlists" + "golang.org/x/crypto/pbkdf2" +) + +var ( + // Some bitwise operands for working with big.Ints + last11BitsMask = big.NewInt(2047) + shift11BitsMask = big.NewInt(2048) + bigOne = big.NewInt(1) + bigTwo = big.NewInt(2) + + // used to isolate the checksum bits from the entropy+checksum byte array + wordLengthChecksumMasksMapping = map[int]*big.Int{ + 12: big.NewInt(15), + 15: big.NewInt(31), + 18: big.NewInt(63), + 21: big.NewInt(127), + 24: big.NewInt(255), + } + // used to use only the desired x of 8 available checksum bits. + // 256 bit (word length 24) requires all 8 bits of the checksum, + // and thus no shifting is needed for it (we would get a divByZero crash if we did) + wordLengthChecksumShiftMapping = map[int]*big.Int{ + 12: big.NewInt(16), + 15: big.NewInt(8), + 18: big.NewInt(4), + 21: big.NewInt(2), + } + + // wordList is the set of words to use + wordList []string + + // wordMap is a reverse lookup map for wordList + wordMap map[string]int +) + +var ( + // ErrInvalidMnemonic is returned when trying to use a malformed mnemonic. + ErrInvalidMnemonic = errors.New("Invalid mnenomic") + + // ErrEntropyLengthInvalid is returned when trying to use an entropy set with + // an invalid size. + ErrEntropyLengthInvalid = errors.New("Entropy length must be [128, 256] and a multiple of 32") + + // ErrValidatedSeedLengthMismatch is returned when a validated seed is not the + // same size as the given seed. This should never happen is present only as a + // sanity assertion. + ErrValidatedSeedLengthMismatch = errors.New("Seed length does not match validated seed length") + + // ErrChecksumIncorrect is returned when entropy has the incorrect checksum. + ErrChecksumIncorrect = errors.New("Checksum incorrect") +) + +func init() { + SetWordList(wordlists.English) +} + +// SetWordList sets the list of words to use for mnemonics. Currently the list +// that is set is used package-wide. +func SetWordList(list []string) { + wordList = list + wordMap = map[string]int{} + for i, v := range wordList { + wordMap[v] = i + } +} + +// GetWordList gets the list of words to use for mnemonics. +func GetWordList() []string { + return wordList +} + +// GetWordIndex gets word index in wordMap. +func GetWordIndex(word string) (int, bool) { + idx, ok := wordMap[word] + return idx, ok +} + +// NewEntropy will create random entropy bytes +// so long as the requested size bitSize is an appropriate size. +// +// bitSize has to be a multiple 32 and be within the inclusive range of {128, 256} +func NewEntropy(bitSize int) ([]byte, error) { + err := validateEntropyBitSize(bitSize) + if err != nil { + return nil, err + } + + entropy := make([]byte, bitSize/8) + _, err = rand.Read(entropy) + return entropy, err +} + +// EntropyFromMnemonic takes a mnemonic generated by this library, +// and returns the input entropy used to generate the given mnemonic. +// An error is returned if the given mnemonic is invalid. +func EntropyFromMnemonic(mnemonic string) ([]byte, error) { + mnemonicSlice, isValid := splitMnemonicWords(mnemonic) + if !isValid { + return nil, ErrInvalidMnemonic + } + + // Decode the words into a big.Int. + b := big.NewInt(0) + for _, v := range mnemonicSlice { + index, found := wordMap[v] + if found == false { + return nil, fmt.Errorf("word `%v` not found in reverse map", v) + } + var wordBytes [2]byte + binary.BigEndian.PutUint16(wordBytes[:], uint16(index)) + b = b.Mul(b, shift11BitsMask) + b = b.Or(b, big.NewInt(0).SetBytes(wordBytes[:])) + } + + // Build and add the checksum to the big.Int. + checksum := big.NewInt(0) + checksumMask := wordLengthChecksumMasksMapping[len(mnemonicSlice)] + checksum = checksum.And(b, checksumMask) + + b.Div(b, big.NewInt(0).Add(checksumMask, bigOne)) + + // The entropy is the underlying bytes of the big.Int. Any upper bytes of + // all 0's are not returned so we pad the beginning of the slice with empty + // bytes if necessary. + entropy := b.Bytes() + entropy = padByteSlice(entropy, len(mnemonicSlice)/3*4) + + // Generate the checksum and compare with the one we got from the mneomnic. + entropyChecksumBytes := computeChecksum(entropy) + entropyChecksum := big.NewInt(int64(entropyChecksumBytes[0])) + if l := len(mnemonicSlice); l != 24 { + checksumShift := wordLengthChecksumShiftMapping[l] + entropyChecksum.Div(entropyChecksum, checksumShift) + } + + if checksum.Cmp(entropyChecksum) != 0 { + return nil, ErrChecksumIncorrect + } + + return entropy, nil +} + +// NewMnemonic will return a string consisting of the mnemonic words for +// the given entropy. +// If the provide entropy is invalid, an error will be returned. +func NewMnemonic(entropy []byte) (string, error) { + // Compute some lengths for convenience. + entropyBitLength := len(entropy) * 8 + checksumBitLength := entropyBitLength / 32 + sentenceLength := (entropyBitLength + checksumBitLength) / 11 + + // Validate that the requested size is supported. + err := validateEntropyBitSize(entropyBitLength) + if err != nil { + return "", err + } + + // Add checksum to entropy. + entropy = addChecksum(entropy) + + // Break entropy up into sentenceLength chunks of 11 bits. + // For each word AND mask the rightmost 11 bits and find the word at that index. + // Then bitshift entropy 11 bits right and repeat. + // Add to the last empty slot so we can work with LSBs instead of MSB. + + // Entropy as an int so we can bitmask without worrying about bytes slices. + entropyInt := new(big.Int).SetBytes(entropy) + + // Slice to hold words in. + words := make([]string, sentenceLength) + + // Throw away big.Int for AND masking. + word := big.NewInt(0) + + for i := sentenceLength - 1; i >= 0; i-- { + // Get 11 right most bits and bitshift 11 to the right for next time. + word.And(entropyInt, last11BitsMask) + entropyInt.Div(entropyInt, shift11BitsMask) + + // Get the bytes representing the 11 bits as a 2 byte slice. + wordBytes := padByteSlice(word.Bytes(), 2) + + // Convert bytes to an index and add that word to the list. + words[i] = wordList[binary.BigEndian.Uint16(wordBytes)] + } + + return strings.Join(words, " "), nil +} + +// MnemonicToByteArray takes a mnemonic string and turns it into a byte array +// suitable for creating another mnemonic. +// An error is returned if the mnemonic is invalid. +func MnemonicToByteArray(mnemonic string, raw ...bool) ([]byte, error) { + var ( + mnemonicSlice = strings.Split(mnemonic, " ") + entropyBitSize = len(mnemonicSlice) * 11 + checksumBitSize = entropyBitSize % 32 + fullByteSize = (entropyBitSize-checksumBitSize)/8 + 1 + checksumByteSize = fullByteSize - (fullByteSize % 4) + ) + + // Pre validate that the mnemonic is well formed and only contains words that + // are present in the word list. + if !IsMnemonicValid(mnemonic) { + return nil, ErrInvalidMnemonic + } + + // Convert word indices to a big.Int representing the entropy. + checksummedEntropy := big.NewInt(0) + modulo := big.NewInt(2048) + for _, v := range mnemonicSlice { + index := big.NewInt(int64(wordMap[v])) + checksummedEntropy.Mul(checksummedEntropy, modulo) + checksummedEntropy.Add(checksummedEntropy, index) + } + + // Calculate the unchecksummed entropy so we can validate that the checksum is + // correct. + checksumModulo := big.NewInt(0).Exp(bigTwo, big.NewInt(int64(checksumBitSize)), nil) + rawEntropy := big.NewInt(0).Div(checksummedEntropy, checksumModulo) + + // Convert big.Ints to byte padded byte slices. + rawEntropyBytes := padByteSlice(rawEntropy.Bytes(), checksumByteSize) + checksummedEntropyBytes := padByteSlice(checksummedEntropy.Bytes(), fullByteSize) + + // Validate that the checksum is correct. + newChecksummedEntropyBytes := padByteSlice(addChecksum(rawEntropyBytes), fullByteSize) + if !compareByteSlices(checksummedEntropyBytes, newChecksummedEntropyBytes) { + return nil, ErrChecksumIncorrect + } + + if len(raw) > 0 && raw[0] { + return rawEntropyBytes, nil + } + + return checksummedEntropyBytes, nil +} + +// NewSeedWithErrorChecking creates a hashed seed output given the mnemonic string and a password. +// An error is returned if the mnemonic is not convertible to a byte array. +func NewSeedWithErrorChecking(mnemonic string, password string) ([]byte, error) { + _, err := MnemonicToByteArray(mnemonic) + if err != nil { + return nil, err + } + return NewSeed(mnemonic, password), nil +} + +// NewSeed creates a hashed seed output given a provided string and password. +// No checking is performed to validate that the string provided is a valid mnemonic. +func NewSeed(mnemonic string, password string) []byte { + return pbkdf2.Key([]byte(mnemonic), []byte("mnemonic"+password), 2048, 64, sha512.New) +} + +// IsMnemonicValid attempts to verify that the provided mnemonic is valid. +// Validity is determined by both the number of words being appropriate, +// and that all the words in the mnemonic are present in the word list. +func IsMnemonicValid(mnemonic string) bool { + _, err := EntropyFromMnemonic(mnemonic) + return err == nil +} + +// Appends to data the first (len(data) / 32)bits of the result of sha256(data) +// Currently only supports data up to 32 bytes +func addChecksum(data []byte) []byte { + // Get first byte of sha256 + hash := computeChecksum(data) + firstChecksumByte := hash[0] + + // len() is in bytes so we divide by 4 + checksumBitLength := uint(len(data) / 4) + + // For each bit of check sum we want we shift the data one the left + // and then set the (new) right most bit equal to checksum bit at that index + // staring from the left + dataBigInt := new(big.Int).SetBytes(data) + for i := range checksumBitLength { + // Bitshift 1 left + dataBigInt.Mul(dataBigInt, bigTwo) + + // Set rightmost bit if leftmost checksum bit is set + if uint8(firstChecksumByte&(1<<(7-i))) > 0 { + dataBigInt.Or(dataBigInt, bigOne) + } + } + + return dataBigInt.Bytes() +} + +func computeChecksum(data []byte) []byte { + hasher := sha256.New() + hasher.Write(data) + return hasher.Sum(nil) +} + +// validateEntropyBitSize ensures that entropy is the correct size for being a +// mnemonic. +func validateEntropyBitSize(bitSize int) error { + if (bitSize%32) != 0 || bitSize < 128 || bitSize > 256 { + return ErrEntropyLengthInvalid + } + return nil +} + +// padByteSlice returns a byte slice of the given size with contents of the +// given slice left padded and any empty spaces filled with 0's. +func padByteSlice(slice []byte, length int) []byte { + offset := length - len(slice) + if offset <= 0 { + return slice + } + newSlice := make([]byte, length) + copy(newSlice[offset:], slice) + return newSlice +} + +// compareByteSlices returns true of the byte slices have equal contents and +// returns false otherwise. +func compareByteSlices(a, b []byte) bool { + if len(a) != len(b) { + return false + } + for i := range a { + if a[i] != b[i] { + return false + } + } + return true +} + +func splitMnemonicWords(mnemonic string) ([]string, bool) { + // Create a list of all the words in the mnemonic sentence + words := strings.Fields(mnemonic) + + // Get num of words + numOfWords := len(words) + + // The number of words should be 12, 15, 18, 21 or 24 + if numOfWords%3 != 0 || numOfWords < 12 || numOfWords > 24 { + return nil, false + } + return words, true +} diff --git a/third_party/go-bip39/go.mod b/third_party/go-bip39/go.mod new file mode 100644 index 0000000000..5fc1ddf31c --- /dev/null +++ b/third_party/go-bip39/go.mod @@ -0,0 +1,5 @@ +module github.com/tyler-smith/go-bip39 + +go 1.25 + +require golang.org/x/crypto v0.40.0 diff --git a/third_party/go-bip39/wordlists/BUILD.bazel b/third_party/go-bip39/wordlists/BUILD.bazel new file mode 100644 index 0000000000..0f79848273 --- /dev/null +++ b/third_party/go-bip39/wordlists/BUILD.bazel @@ -0,0 +1,20 @@ +load("@prysm//tools/go:def.bzl", "go_library") + +# gazelle:prefix github.com/tyler-smith/go-bip39/wordlists + +go_library( + name = "go_default_library", + srcs = [ + "chinese_simplified.go", + "chinese_traditional.go", + "czech.go", + "english.go", + "french.go", + "italian.go", + "japanese.go", + "korean.go", + "spanish.go", + ], + importpath = "github.com/tyler-smith/go-bip39/wordlists", + visibility = ["//visibility:public"], +) diff --git a/third_party/go-bip39/wordlists/chinese_simplified.go b/third_party/go-bip39/wordlists/chinese_simplified.go new file mode 100644 index 0000000000..0ee2972704 --- /dev/null +++ b/third_party/go-bip39/wordlists/chinese_simplified.go @@ -0,0 +1,2071 @@ +package wordlists + +import ( + "fmt" + "hash/crc32" + "strings" +) + +func init() { + // Ensure word list is correct + // $ wget https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/chinese_simplified.txt + // $ crc32 chinese_simplified.txt + // e3721bbf + checksum := crc32.ChecksumIEEE([]byte(chineseSimplified)) + if fmt.Sprintf("%x", checksum) != "e3721bbf" { + panic("chineseSimplified checksum invalid") + } +} + +// ChineseSimplified is a slice of mnemonic words taken from the bip39 specification +// https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/chinese_simplified.txt +var ChineseSimplified = strings.Split(strings.TrimSpace(chineseSimplified), "\n") +var chineseSimplified = `的 +一 +是 +在 +不 +了 +有 +和 +人 +这 +中 +大 +为 +上 +个 +国 +我 +以 +要 +他 +时 +来 +用 +们 +生 +到 +作 +地 +于 +出 +就 +分 +对 +成 +会 +可 +主 +发 +年 +动 +同 +工 +也 +能 +下 +过 +子 +说 +产 +种 +面 +而 +方 +后 +多 +定 +行 +学 +法 +所 +民 +得 +经 +十 +三 +之 +进 +着 +等 +部 +度 +家 +电 +力 +里 +如 +水 +化 +高 +自 +二 +理 +起 +小 +物 +现 +实 +加 +量 +都 +两 +体 +制 +机 +当 +使 +点 +从 +业 +本 +去 +把 +性 +好 +应 +开 +它 +合 +还 +因 +由 +其 +些 +然 +前 +外 +天 +政 +四 +日 +那 +社 +义 +事 +平 +形 +相 +全 +表 +间 +样 +与 +关 +各 +重 +新 +线 +内 +数 +正 +心 +反 +你 +明 +看 +原 +又 +么 +利 +比 +或 +但 +质 +气 +第 +向 +道 +命 +此 +变 +条 +只 +没 +结 +解 +问 +意 +建 +月 +公 +无 +系 +军 +很 +情 +者 +最 +立 +代 +想 +已 +通 +并 +提 +直 +题 +党 +程 +展 +五 +果 +料 +象 +员 +革 +位 +入 +常 +文 +总 +次 +品 +式 +活 +设 +及 +管 +特 +件 +长 +求 +老 +头 +基 +资 +边 +流 +路 +级 +少 +图 +山 +统 +接 +知 +较 +将 +组 +见 +计 +别 +她 +手 +角 +期 +根 +论 +运 +农 +指 +几 +九 +区 +强 +放 +决 +西 +被 +干 +做 +必 +战 +先 +回 +则 +任 +取 +据 +处 +队 +南 +给 +色 +光 +门 +即 +保 +治 +北 +造 +百 +规 +热 +领 +七 +海 +口 +东 +导 +器 +压 +志 +世 +金 +增 +争 +济 +阶 +油 +思 +术 +极 +交 +受 +联 +什 +认 +六 +共 +权 +收 +证 +改 +清 +美 +再 +采 +转 +更 +单 +风 +切 +打 +白 +教 +速 +花 +带 +安 +场 +身 +车 +例 +真 +务 +具 +万 +每 +目 +至 +达 +走 +积 +示 +议 +声 +报 +斗 +完 +类 +八 +离 +华 +名 +确 +才 +科 +张 +信 +马 +节 +话 +米 +整 +空 +元 +况 +今 +集 +温 +传 +土 +许 +步 +群 +广 +石 +记 +需 +段 +研 +界 +拉 +林 +律 +叫 +且 +究 +观 +越 +织 +装 +影 +算 +低 +持 +音 +众 +书 +布 +复 +容 +儿 +须 +际 +商 +非 +验 +连 +断 +深 +难 +近 +矿 +千 +周 +委 +素 +技 +备 +半 +办 +青 +省 +列 +习 +响 +约 +支 +般 +史 +感 +劳 +便 +团 +往 +酸 +历 +市 +克 +何 +除 +消 +构 +府 +称 +太 +准 +精 +值 +号 +率 +族 +维 +划 +选 +标 +写 +存 +候 +毛 +亲 +快 +效 +斯 +院 +查 +江 +型 +眼 +王 +按 +格 +养 +易 +置 +派 +层 +片 +始 +却 +专 +状 +育 +厂 +京 +识 +适 +属 +圆 +包 +火 +住 +调 +满 +县 +局 +照 +参 +红 +细 +引 +听 +该 +铁 +价 +严 +首 +底 +液 +官 +德 +随 +病 +苏 +失 +尔 +死 +讲 +配 +女 +黄 +推 +显 +谈 +罪 +神 +艺 +呢 +席 +含 +企 +望 +密 +批 +营 +项 +防 +举 +球 +英 +氧 +势 +告 +李 +台 +落 +木 +帮 +轮 +破 +亚 +师 +围 +注 +远 +字 +材 +排 +供 +河 +态 +封 +另 +施 +减 +树 +溶 +怎 +止 +案 +言 +士 +均 +武 +固 +叶 +鱼 +波 +视 +仅 +费 +紧 +爱 +左 +章 +早 +朝 +害 +续 +轻 +服 +试 +食 +充 +兵 +源 +判 +护 +司 +足 +某 +练 +差 +致 +板 +田 +降 +黑 +犯 +负 +击 +范 +继 +兴 +似 +余 +坚 +曲 +输 +修 +故 +城 +夫 +够 +送 +笔 +船 +占 +右 +财 +吃 +富 +春 +职 +觉 +汉 +画 +功 +巴 +跟 +虽 +杂 +飞 +检 +吸 +助 +升 +阳 +互 +初 +创 +抗 +考 +投 +坏 +策 +古 +径 +换 +未 +跑 +留 +钢 +曾 +端 +责 +站 +简 +述 +钱 +副 +尽 +帝 +射 +草 +冲 +承 +独 +令 +限 +阿 +宣 +环 +双 +请 +超 +微 +让 +控 +州 +良 +轴 +找 +否 +纪 +益 +依 +优 +顶 +础 +载 +倒 +房 +突 +坐 +粉 +敌 +略 +客 +袁 +冷 +胜 +绝 +析 +块 +剂 +测 +丝 +协 +诉 +念 +陈 +仍 +罗 +盐 +友 +洋 +错 +苦 +夜 +刑 +移 +频 +逐 +靠 +混 +母 +短 +皮 +终 +聚 +汽 +村 +云 +哪 +既 +距 +卫 +停 +烈 +央 +察 +烧 +迅 +境 +若 +印 +洲 +刻 +括 +激 +孔 +搞 +甚 +室 +待 +核 +校 +散 +侵 +吧 +甲 +游 +久 +菜 +味 +旧 +模 +湖 +货 +损 +预 +阻 +毫 +普 +稳 +乙 +妈 +植 +息 +扩 +银 +语 +挥 +酒 +守 +拿 +序 +纸 +医 +缺 +雨 +吗 +针 +刘 +啊 +急 +唱 +误 +训 +愿 +审 +附 +获 +茶 +鲜 +粮 +斤 +孩 +脱 +硫 +肥 +善 +龙 +演 +父 +渐 +血 +欢 +械 +掌 +歌 +沙 +刚 +攻 +谓 +盾 +讨 +晚 +粒 +乱 +燃 +矛 +乎 +杀 +药 +宁 +鲁 +贵 +钟 +煤 +读 +班 +伯 +香 +介 +迫 +句 +丰 +培 +握 +兰 +担 +弦 +蛋 +沉 +假 +穿 +执 +答 +乐 +谁 +顺 +烟 +缩 +征 +脸 +喜 +松 +脚 +困 +异 +免 +背 +星 +福 +买 +染 +井 +概 +慢 +怕 +磁 +倍 +祖 +皇 +促 +静 +补 +评 +翻 +肉 +践 +尼 +衣 +宽 +扬 +棉 +希 +伤 +操 +垂 +秋 +宜 +氢 +套 +督 +振 +架 +亮 +末 +宪 +庆 +编 +牛 +触 +映 +雷 +销 +诗 +座 +居 +抓 +裂 +胞 +呼 +娘 +景 +威 +绿 +晶 +厚 +盟 +衡 +鸡 +孙 +延 +危 +胶 +屋 +乡 +临 +陆 +顾 +掉 +呀 +灯 +岁 +措 +束 +耐 +剧 +玉 +赵 +跳 +哥 +季 +课 +凯 +胡 +额 +款 +绍 +卷 +齐 +伟 +蒸 +殖 +永 +宗 +苗 +川 +炉 +岩 +弱 +零 +杨 +奏 +沿 +露 +杆 +探 +滑 +镇 +饭 +浓 +航 +怀 +赶 +库 +夺 +伊 +灵 +税 +途 +灭 +赛 +归 +召 +鼓 +播 +盘 +裁 +险 +康 +唯 +录 +菌 +纯 +借 +糖 +盖 +横 +符 +私 +努 +堂 +域 +枪 +润 +幅 +哈 +竟 +熟 +虫 +泽 +脑 +壤 +碳 +欧 +遍 +侧 +寨 +敢 +彻 +虑 +斜 +薄 +庭 +纳 +弹 +饲 +伸 +折 +麦 +湿 +暗 +荷 +瓦 +塞 +床 +筑 +恶 +户 +访 +塔 +奇 +透 +梁 +刀 +旋 +迹 +卡 +氯 +遇 +份 +毒 +泥 +退 +洗 +摆 +灰 +彩 +卖 +耗 +夏 +择 +忙 +铜 +献 +硬 +予 +繁 +圈 +雪 +函 +亦 +抽 +篇 +阵 +阴 +丁 +尺 +追 +堆 +雄 +迎 +泛 +爸 +楼 +避 +谋 +吨 +野 +猪 +旗 +累 +偏 +典 +馆 +索 +秦 +脂 +潮 +爷 +豆 +忽 +托 +惊 +塑 +遗 +愈 +朱 +替 +纤 +粗 +倾 +尚 +痛 +楚 +谢 +奋 +购 +磨 +君 +池 +旁 +碎 +骨 +监 +捕 +弟 +暴 +割 +贯 +殊 +释 +词 +亡 +壁 +顿 +宝 +午 +尘 +闻 +揭 +炮 +残 +冬 +桥 +妇 +警 +综 +招 +吴 +付 +浮 +遭 +徐 +您 +摇 +谷 +赞 +箱 +隔 +订 +男 +吹 +园 +纷 +唐 +败 +宋 +玻 +巨 +耕 +坦 +荣 +闭 +湾 +键 +凡 +驻 +锅 +救 +恩 +剥 +凝 +碱 +齿 +截 +炼 +麻 +纺 +禁 +废 +盛 +版 +缓 +净 +睛 +昌 +婚 +涉 +筒 +嘴 +插 +岸 +朗 +庄 +街 +藏 +姑 +贸 +腐 +奴 +啦 +惯 +乘 +伙 +恢 +匀 +纱 +扎 +辩 +耳 +彪 +臣 +亿 +璃 +抵 +脉 +秀 +萨 +俄 +网 +舞 +店 +喷 +纵 +寸 +汗 +挂 +洪 +贺 +闪 +柬 +爆 +烯 +津 +稻 +墙 +软 +勇 +像 +滚 +厘 +蒙 +芳 +肯 +坡 +柱 +荡 +腿 +仪 +旅 +尾 +轧 +冰 +贡 +登 +黎 +削 +钻 +勒 +逃 +障 +氨 +郭 +峰 +币 +港 +伏 +轨 +亩 +毕 +擦 +莫 +刺 +浪 +秘 +援 +株 +健 +售 +股 +岛 +甘 +泡 +睡 +童 +铸 +汤 +阀 +休 +汇 +舍 +牧 +绕 +炸 +哲 +磷 +绩 +朋 +淡 +尖 +启 +陷 +柴 +呈 +徒 +颜 +泪 +稍 +忘 +泵 +蓝 +拖 +洞 +授 +镜 +辛 +壮 +锋 +贫 +虚 +弯 +摩 +泰 +幼 +廷 +尊 +窗 +纲 +弄 +隶 +疑 +氏 +宫 +姐 +震 +瑞 +怪 +尤 +琴 +循 +描 +膜 +违 +夹 +腰 +缘 +珠 +穷 +森 +枝 +竹 +沟 +催 +绳 +忆 +邦 +剩 +幸 +浆 +栏 +拥 +牙 +贮 +礼 +滤 +钠 +纹 +罢 +拍 +咱 +喊 +袖 +埃 +勤 +罚 +焦 +潜 +伍 +墨 +欲 +缝 +姓 +刊 +饱 +仿 +奖 +铝 +鬼 +丽 +跨 +默 +挖 +链 +扫 +喝 +袋 +炭 +污 +幕 +诸 +弧 +励 +梅 +奶 +洁 +灾 +舟 +鉴 +苯 +讼 +抱 +毁 +懂 +寒 +智 +埔 +寄 +届 +跃 +渡 +挑 +丹 +艰 +贝 +碰 +拔 +爹 +戴 +码 +梦 +芽 +熔 +赤 +渔 +哭 +敬 +颗 +奔 +铅 +仲 +虎 +稀 +妹 +乏 +珍 +申 +桌 +遵 +允 +隆 +螺 +仓 +魏 +锐 +晓 +氮 +兼 +隐 +碍 +赫 +拨 +忠 +肃 +缸 +牵 +抢 +博 +巧 +壳 +兄 +杜 +讯 +诚 +碧 +祥 +柯 +页 +巡 +矩 +悲 +灌 +龄 +伦 +票 +寻 +桂 +铺 +圣 +恐 +恰 +郑 +趣 +抬 +荒 +腾 +贴 +柔 +滴 +猛 +阔 +辆 +妻 +填 +撤 +储 +签 +闹 +扰 +紫 +砂 +递 +戏 +吊 +陶 +伐 +喂 +疗 +瓶 +婆 +抚 +臂 +摸 +忍 +虾 +蜡 +邻 +胸 +巩 +挤 +偶 +弃 +槽 +劲 +乳 +邓 +吉 +仁 +烂 +砖 +租 +乌 +舰 +伴 +瓜 +浅 +丙 +暂 +燥 +橡 +柳 +迷 +暖 +牌 +秧 +胆 +详 +簧 +踏 +瓷 +谱 +呆 +宾 +糊 +洛 +辉 +愤 +竞 +隙 +怒 +粘 +乃 +绪 +肩 +籍 +敏 +涂 +熙 +皆 +侦 +悬 +掘 +享 +纠 +醒 +狂 +锁 +淀 +恨 +牲 +霸 +爬 +赏 +逆 +玩 +陵 +祝 +秒 +浙 +貌 +役 +彼 +悉 +鸭 +趋 +凤 +晨 +畜 +辈 +秩 +卵 +署 +梯 +炎 +滩 +棋 +驱 +筛 +峡 +冒 +啥 +寿 +译 +浸 +泉 +帽 +迟 +硅 +疆 +贷 +漏 +稿 +冠 +嫩 +胁 +芯 +牢 +叛 +蚀 +奥 +鸣 +岭 +羊 +凭 +串 +塘 +绘 +酵 +融 +盆 +锡 +庙 +筹 +冻 +辅 +摄 +袭 +筋 +拒 +僚 +旱 +钾 +鸟 +漆 +沈 +眉 +疏 +添 +棒 +穗 +硝 +韩 +逼 +扭 +侨 +凉 +挺 +碗 +栽 +炒 +杯 +患 +馏 +劝 +豪 +辽 +勃 +鸿 +旦 +吏 +拜 +狗 +埋 +辊 +掩 +饮 +搬 +骂 +辞 +勾 +扣 +估 +蒋 +绒 +雾 +丈 +朵 +姆 +拟 +宇 +辑 +陕 +雕 +偿 +蓄 +崇 +剪 +倡 +厅 +咬 +驶 +薯 +刷 +斥 +番 +赋 +奉 +佛 +浇 +漫 +曼 +扇 +钙 +桃 +扶 +仔 +返 +俗 +亏 +腔 +鞋 +棱 +覆 +框 +悄 +叔 +撞 +骗 +勘 +旺 +沸 +孤 +吐 +孟 +渠 +屈 +疾 +妙 +惜 +仰 +狠 +胀 +谐 +抛 +霉 +桑 +岗 +嘛 +衰 +盗 +渗 +脏 +赖 +涌 +甜 +曹 +阅 +肌 +哩 +厉 +烃 +纬 +毅 +昨 +伪 +症 +煮 +叹 +钉 +搭 +茎 +笼 +酷 +偷 +弓 +锥 +恒 +杰 +坑 +鼻 +翼 +纶 +叙 +狱 +逮 +罐 +络 +棚 +抑 +膨 +蔬 +寺 +骤 +穆 +冶 +枯 +册 +尸 +凸 +绅 +坯 +牺 +焰 +轰 +欣 +晋 +瘦 +御 +锭 +锦 +丧 +旬 +锻 +垄 +搜 +扑 +邀 +亭 +酯 +迈 +舒 +脆 +酶 +闲 +忧 +酚 +顽 +羽 +涨 +卸 +仗 +陪 +辟 +惩 +杭 +姚 +肚 +捉 +飘 +漂 +昆 +欺 +吾 +郎 +烷 +汁 +呵 +饰 +萧 +雅 +邮 +迁 +燕 +撒 +姻 +赴 +宴 +烦 +债 +帐 +斑 +铃 +旨 +醇 +董 +饼 +雏 +姿 +拌 +傅 +腹 +妥 +揉 +贤 +拆 +歪 +葡 +胺 +丢 +浩 +徽 +昂 +垫 +挡 +览 +贪 +慰 +缴 +汪 +慌 +冯 +诺 +姜 +谊 +凶 +劣 +诬 +耀 +昏 +躺 +盈 +骑 +乔 +溪 +丛 +卢 +抹 +闷 +咨 +刮 +驾 +缆 +悟 +摘 +铒 +掷 +颇 +幻 +柄 +惠 +惨 +佳 +仇 +腊 +窝 +涤 +剑 +瞧 +堡 +泼 +葱 +罩 +霍 +捞 +胎 +苍 +滨 +俩 +捅 +湘 +砍 +霞 +邵 +萄 +疯 +淮 +遂 +熊 +粪 +烘 +宿 +档 +戈 +驳 +嫂 +裕 +徙 +箭 +捐 +肠 +撑 +晒 +辨 +殿 +莲 +摊 +搅 +酱 +屏 +疫 +哀 +蔡 +堵 +沫 +皱 +畅 +叠 +阁 +莱 +敲 +辖 +钩 +痕 +坝 +巷 +饿 +祸 +丘 +玄 +溜 +曰 +逻 +彭 +尝 +卿 +妨 +艇 +吞 +韦 +怨 +矮 +歇 +` diff --git a/third_party/go-bip39/wordlists/chinese_traditional.go b/third_party/go-bip39/wordlists/chinese_traditional.go new file mode 100644 index 0000000000..83812cee5f --- /dev/null +++ b/third_party/go-bip39/wordlists/chinese_traditional.go @@ -0,0 +1,2071 @@ +package wordlists + +import ( + "fmt" + "hash/crc32" + "strings" +) + +func init() { + // Ensure word list is correct + // $ wget https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/chinese_traditional.txt + // $ crc32 chinese_traditional.txt + // 3c20b443 + checksum := crc32.ChecksumIEEE([]byte(chineseTraditional)) + if fmt.Sprintf("%x", checksum) != "3c20b443" { + panic("chineseTraditional checksum invalid") + } +} + +// ChineseTraditional is a slice of mnemonic words taken from the bip39 specification +// https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/chinese_traditional.txt +var ChineseTraditional = strings.Split(strings.TrimSpace(chineseTraditional), "\n") +var chineseTraditional = `的 +一 +是 +在 +不 +了 +有 +和 +人 +這 +中 +大 +為 +上 +個 +國 +我 +以 +要 +他 +時 +來 +用 +們 +生 +到 +作 +地 +於 +出 +就 +分 +對 +成 +會 +可 +主 +發 +年 +動 +同 +工 +也 +能 +下 +過 +子 +說 +產 +種 +面 +而 +方 +後 +多 +定 +行 +學 +法 +所 +民 +得 +經 +十 +三 +之 +進 +著 +等 +部 +度 +家 +電 +力 +裡 +如 +水 +化 +高 +自 +二 +理 +起 +小 +物 +現 +實 +加 +量 +都 +兩 +體 +制 +機 +當 +使 +點 +從 +業 +本 +去 +把 +性 +好 +應 +開 +它 +合 +還 +因 +由 +其 +些 +然 +前 +外 +天 +政 +四 +日 +那 +社 +義 +事 +平 +形 +相 +全 +表 +間 +樣 +與 +關 +各 +重 +新 +線 +內 +數 +正 +心 +反 +你 +明 +看 +原 +又 +麼 +利 +比 +或 +但 +質 +氣 +第 +向 +道 +命 +此 +變 +條 +只 +沒 +結 +解 +問 +意 +建 +月 +公 +無 +系 +軍 +很 +情 +者 +最 +立 +代 +想 +已 +通 +並 +提 +直 +題 +黨 +程 +展 +五 +果 +料 +象 +員 +革 +位 +入 +常 +文 +總 +次 +品 +式 +活 +設 +及 +管 +特 +件 +長 +求 +老 +頭 +基 +資 +邊 +流 +路 +級 +少 +圖 +山 +統 +接 +知 +較 +將 +組 +見 +計 +別 +她 +手 +角 +期 +根 +論 +運 +農 +指 +幾 +九 +區 +強 +放 +決 +西 +被 +幹 +做 +必 +戰 +先 +回 +則 +任 +取 +據 +處 +隊 +南 +給 +色 +光 +門 +即 +保 +治 +北 +造 +百 +規 +熱 +領 +七 +海 +口 +東 +導 +器 +壓 +志 +世 +金 +增 +爭 +濟 +階 +油 +思 +術 +極 +交 +受 +聯 +什 +認 +六 +共 +權 +收 +證 +改 +清 +美 +再 +採 +轉 +更 +單 +風 +切 +打 +白 +教 +速 +花 +帶 +安 +場 +身 +車 +例 +真 +務 +具 +萬 +每 +目 +至 +達 +走 +積 +示 +議 +聲 +報 +鬥 +完 +類 +八 +離 +華 +名 +確 +才 +科 +張 +信 +馬 +節 +話 +米 +整 +空 +元 +況 +今 +集 +溫 +傳 +土 +許 +步 +群 +廣 +石 +記 +需 +段 +研 +界 +拉 +林 +律 +叫 +且 +究 +觀 +越 +織 +裝 +影 +算 +低 +持 +音 +眾 +書 +布 +复 +容 +兒 +須 +際 +商 +非 +驗 +連 +斷 +深 +難 +近 +礦 +千 +週 +委 +素 +技 +備 +半 +辦 +青 +省 +列 +習 +響 +約 +支 +般 +史 +感 +勞 +便 +團 +往 +酸 +歷 +市 +克 +何 +除 +消 +構 +府 +稱 +太 +準 +精 +值 +號 +率 +族 +維 +劃 +選 +標 +寫 +存 +候 +毛 +親 +快 +效 +斯 +院 +查 +江 +型 +眼 +王 +按 +格 +養 +易 +置 +派 +層 +片 +始 +卻 +專 +狀 +育 +廠 +京 +識 +適 +屬 +圓 +包 +火 +住 +調 +滿 +縣 +局 +照 +參 +紅 +細 +引 +聽 +該 +鐵 +價 +嚴 +首 +底 +液 +官 +德 +隨 +病 +蘇 +失 +爾 +死 +講 +配 +女 +黃 +推 +顯 +談 +罪 +神 +藝 +呢 +席 +含 +企 +望 +密 +批 +營 +項 +防 +舉 +球 +英 +氧 +勢 +告 +李 +台 +落 +木 +幫 +輪 +破 +亞 +師 +圍 +注 +遠 +字 +材 +排 +供 +河 +態 +封 +另 +施 +減 +樹 +溶 +怎 +止 +案 +言 +士 +均 +武 +固 +葉 +魚 +波 +視 +僅 +費 +緊 +愛 +左 +章 +早 +朝 +害 +續 +輕 +服 +試 +食 +充 +兵 +源 +判 +護 +司 +足 +某 +練 +差 +致 +板 +田 +降 +黑 +犯 +負 +擊 +范 +繼 +興 +似 +餘 +堅 +曲 +輸 +修 +故 +城 +夫 +夠 +送 +筆 +船 +佔 +右 +財 +吃 +富 +春 +職 +覺 +漢 +畫 +功 +巴 +跟 +雖 +雜 +飛 +檢 +吸 +助 +昇 +陽 +互 +初 +創 +抗 +考 +投 +壞 +策 +古 +徑 +換 +未 +跑 +留 +鋼 +曾 +端 +責 +站 +簡 +述 +錢 +副 +盡 +帝 +射 +草 +衝 +承 +獨 +令 +限 +阿 +宣 +環 +雙 +請 +超 +微 +讓 +控 +州 +良 +軸 +找 +否 +紀 +益 +依 +優 +頂 +礎 +載 +倒 +房 +突 +坐 +粉 +敵 +略 +客 +袁 +冷 +勝 +絕 +析 +塊 +劑 +測 +絲 +協 +訴 +念 +陳 +仍 +羅 +鹽 +友 +洋 +錯 +苦 +夜 +刑 +移 +頻 +逐 +靠 +混 +母 +短 +皮 +終 +聚 +汽 +村 +雲 +哪 +既 +距 +衛 +停 +烈 +央 +察 +燒 +迅 +境 +若 +印 +洲 +刻 +括 +激 +孔 +搞 +甚 +室 +待 +核 +校 +散 +侵 +吧 +甲 +遊 +久 +菜 +味 +舊 +模 +湖 +貨 +損 +預 +阻 +毫 +普 +穩 +乙 +媽 +植 +息 +擴 +銀 +語 +揮 +酒 +守 +拿 +序 +紙 +醫 +缺 +雨 +嗎 +針 +劉 +啊 +急 +唱 +誤 +訓 +願 +審 +附 +獲 +茶 +鮮 +糧 +斤 +孩 +脫 +硫 +肥 +善 +龍 +演 +父 +漸 +血 +歡 +械 +掌 +歌 +沙 +剛 +攻 +謂 +盾 +討 +晚 +粒 +亂 +燃 +矛 +乎 +殺 +藥 +寧 +魯 +貴 +鐘 +煤 +讀 +班 +伯 +香 +介 +迫 +句 +豐 +培 +握 +蘭 +擔 +弦 +蛋 +沉 +假 +穿 +執 +答 +樂 +誰 +順 +煙 +縮 +徵 +臉 +喜 +松 +腳 +困 +異 +免 +背 +星 +福 +買 +染 +井 +概 +慢 +怕 +磁 +倍 +祖 +皇 +促 +靜 +補 +評 +翻 +肉 +踐 +尼 +衣 +寬 +揚 +棉 +希 +傷 +操 +垂 +秋 +宜 +氫 +套 +督 +振 +架 +亮 +末 +憲 +慶 +編 +牛 +觸 +映 +雷 +銷 +詩 +座 +居 +抓 +裂 +胞 +呼 +娘 +景 +威 +綠 +晶 +厚 +盟 +衡 +雞 +孫 +延 +危 +膠 +屋 +鄉 +臨 +陸 +顧 +掉 +呀 +燈 +歲 +措 +束 +耐 +劇 +玉 +趙 +跳 +哥 +季 +課 +凱 +胡 +額 +款 +紹 +卷 +齊 +偉 +蒸 +殖 +永 +宗 +苗 +川 +爐 +岩 +弱 +零 +楊 +奏 +沿 +露 +桿 +探 +滑 +鎮 +飯 +濃 +航 +懷 +趕 +庫 +奪 +伊 +靈 +稅 +途 +滅 +賽 +歸 +召 +鼓 +播 +盤 +裁 +險 +康 +唯 +錄 +菌 +純 +借 +糖 +蓋 +橫 +符 +私 +努 +堂 +域 +槍 +潤 +幅 +哈 +竟 +熟 +蟲 +澤 +腦 +壤 +碳 +歐 +遍 +側 +寨 +敢 +徹 +慮 +斜 +薄 +庭 +納 +彈 +飼 +伸 +折 +麥 +濕 +暗 +荷 +瓦 +塞 +床 +築 +惡 +戶 +訪 +塔 +奇 +透 +梁 +刀 +旋 +跡 +卡 +氯 +遇 +份 +毒 +泥 +退 +洗 +擺 +灰 +彩 +賣 +耗 +夏 +擇 +忙 +銅 +獻 +硬 +予 +繁 +圈 +雪 +函 +亦 +抽 +篇 +陣 +陰 +丁 +尺 +追 +堆 +雄 +迎 +泛 +爸 +樓 +避 +謀 +噸 +野 +豬 +旗 +累 +偏 +典 +館 +索 +秦 +脂 +潮 +爺 +豆 +忽 +托 +驚 +塑 +遺 +愈 +朱 +替 +纖 +粗 +傾 +尚 +痛 +楚 +謝 +奮 +購 +磨 +君 +池 +旁 +碎 +骨 +監 +捕 +弟 +暴 +割 +貫 +殊 +釋 +詞 +亡 +壁 +頓 +寶 +午 +塵 +聞 +揭 +炮 +殘 +冬 +橋 +婦 +警 +綜 +招 +吳 +付 +浮 +遭 +徐 +您 +搖 +谷 +贊 +箱 +隔 +訂 +男 +吹 +園 +紛 +唐 +敗 +宋 +玻 +巨 +耕 +坦 +榮 +閉 +灣 +鍵 +凡 +駐 +鍋 +救 +恩 +剝 +凝 +鹼 +齒 +截 +煉 +麻 +紡 +禁 +廢 +盛 +版 +緩 +淨 +睛 +昌 +婚 +涉 +筒 +嘴 +插 +岸 +朗 +莊 +街 +藏 +姑 +貿 +腐 +奴 +啦 +慣 +乘 +夥 +恢 +勻 +紗 +扎 +辯 +耳 +彪 +臣 +億 +璃 +抵 +脈 +秀 +薩 +俄 +網 +舞 +店 +噴 +縱 +寸 +汗 +掛 +洪 +賀 +閃 +柬 +爆 +烯 +津 +稻 +牆 +軟 +勇 +像 +滾 +厘 +蒙 +芳 +肯 +坡 +柱 +盪 +腿 +儀 +旅 +尾 +軋 +冰 +貢 +登 +黎 +削 +鑽 +勒 +逃 +障 +氨 +郭 +峰 +幣 +港 +伏 +軌 +畝 +畢 +擦 +莫 +刺 +浪 +秘 +援 +株 +健 +售 +股 +島 +甘 +泡 +睡 +童 +鑄 +湯 +閥 +休 +匯 +舍 +牧 +繞 +炸 +哲 +磷 +績 +朋 +淡 +尖 +啟 +陷 +柴 +呈 +徒 +顏 +淚 +稍 +忘 +泵 +藍 +拖 +洞 +授 +鏡 +辛 +壯 +鋒 +貧 +虛 +彎 +摩 +泰 +幼 +廷 +尊 +窗 +綱 +弄 +隸 +疑 +氏 +宮 +姐 +震 +瑞 +怪 +尤 +琴 +循 +描 +膜 +違 +夾 +腰 +緣 +珠 +窮 +森 +枝 +竹 +溝 +催 +繩 +憶 +邦 +剩 +幸 +漿 +欄 +擁 +牙 +貯 +禮 +濾 +鈉 +紋 +罷 +拍 +咱 +喊 +袖 +埃 +勤 +罰 +焦 +潛 +伍 +墨 +欲 +縫 +姓 +刊 +飽 +仿 +獎 +鋁 +鬼 +麗 +跨 +默 +挖 +鏈 +掃 +喝 +袋 +炭 +污 +幕 +諸 +弧 +勵 +梅 +奶 +潔 +災 +舟 +鑑 +苯 +訟 +抱 +毀 +懂 +寒 +智 +埔 +寄 +屆 +躍 +渡 +挑 +丹 +艱 +貝 +碰 +拔 +爹 +戴 +碼 +夢 +芽 +熔 +赤 +漁 +哭 +敬 +顆 +奔 +鉛 +仲 +虎 +稀 +妹 +乏 +珍 +申 +桌 +遵 +允 +隆 +螺 +倉 +魏 +銳 +曉 +氮 +兼 +隱 +礙 +赫 +撥 +忠 +肅 +缸 +牽 +搶 +博 +巧 +殼 +兄 +杜 +訊 +誠 +碧 +祥 +柯 +頁 +巡 +矩 +悲 +灌 +齡 +倫 +票 +尋 +桂 +鋪 +聖 +恐 +恰 +鄭 +趣 +抬 +荒 +騰 +貼 +柔 +滴 +猛 +闊 +輛 +妻 +填 +撤 +儲 +簽 +鬧 +擾 +紫 +砂 +遞 +戲 +吊 +陶 +伐 +餵 +療 +瓶 +婆 +撫 +臂 +摸 +忍 +蝦 +蠟 +鄰 +胸 +鞏 +擠 +偶 +棄 +槽 +勁 +乳 +鄧 +吉 +仁 +爛 +磚 +租 +烏 +艦 +伴 +瓜 +淺 +丙 +暫 +燥 +橡 +柳 +迷 +暖 +牌 +秧 +膽 +詳 +簧 +踏 +瓷 +譜 +呆 +賓 +糊 +洛 +輝 +憤 +競 +隙 +怒 +粘 +乃 +緒 +肩 +籍 +敏 +塗 +熙 +皆 +偵 +懸 +掘 +享 +糾 +醒 +狂 +鎖 +淀 +恨 +牲 +霸 +爬 +賞 +逆 +玩 +陵 +祝 +秒 +浙 +貌 +役 +彼 +悉 +鴨 +趨 +鳳 +晨 +畜 +輩 +秩 +卵 +署 +梯 +炎 +灘 +棋 +驅 +篩 +峽 +冒 +啥 +壽 +譯 +浸 +泉 +帽 +遲 +矽 +疆 +貸 +漏 +稿 +冠 +嫩 +脅 +芯 +牢 +叛 +蝕 +奧 +鳴 +嶺 +羊 +憑 +串 +塘 +繪 +酵 +融 +盆 +錫 +廟 +籌 +凍 +輔 +攝 +襲 +筋 +拒 +僚 +旱 +鉀 +鳥 +漆 +沈 +眉 +疏 +添 +棒 +穗 +硝 +韓 +逼 +扭 +僑 +涼 +挺 +碗 +栽 +炒 +杯 +患 +餾 +勸 +豪 +遼 +勃 +鴻 +旦 +吏 +拜 +狗 +埋 +輥 +掩 +飲 +搬 +罵 +辭 +勾 +扣 +估 +蔣 +絨 +霧 +丈 +朵 +姆 +擬 +宇 +輯 +陝 +雕 +償 +蓄 +崇 +剪 +倡 +廳 +咬 +駛 +薯 +刷 +斥 +番 +賦 +奉 +佛 +澆 +漫 +曼 +扇 +鈣 +桃 +扶 +仔 +返 +俗 +虧 +腔 +鞋 +棱 +覆 +框 +悄 +叔 +撞 +騙 +勘 +旺 +沸 +孤 +吐 +孟 +渠 +屈 +疾 +妙 +惜 +仰 +狠 +脹 +諧 +拋 +黴 +桑 +崗 +嘛 +衰 +盜 +滲 +臟 +賴 +湧 +甜 +曹 +閱 +肌 +哩 +厲 +烴 +緯 +毅 +昨 +偽 +症 +煮 +嘆 +釘 +搭 +莖 +籠 +酷 +偷 +弓 +錐 +恆 +傑 +坑 +鼻 +翼 +綸 +敘 +獄 +逮 +罐 +絡 +棚 +抑 +膨 +蔬 +寺 +驟 +穆 +冶 +枯 +冊 +屍 +凸 +紳 +坯 +犧 +焰 +轟 +欣 +晉 +瘦 +禦 +錠 +錦 +喪 +旬 +鍛 +壟 +搜 +撲 +邀 +亭 +酯 +邁 +舒 +脆 +酶 +閒 +憂 +酚 +頑 +羽 +漲 +卸 +仗 +陪 +闢 +懲 +杭 +姚 +肚 +捉 +飄 +漂 +昆 +欺 +吾 +郎 +烷 +汁 +呵 +飾 +蕭 +雅 +郵 +遷 +燕 +撒 +姻 +赴 +宴 +煩 +債 +帳 +斑 +鈴 +旨 +醇 +董 +餅 +雛 +姿 +拌 +傅 +腹 +妥 +揉 +賢 +拆 +歪 +葡 +胺 +丟 +浩 +徽 +昂 +墊 +擋 +覽 +貪 +慰 +繳 +汪 +慌 +馮 +諾 +姜 +誼 +兇 +劣 +誣 +耀 +昏 +躺 +盈 +騎 +喬 +溪 +叢 +盧 +抹 +悶 +諮 +刮 +駕 +纜 +悟 +摘 +鉺 +擲 +頗 +幻 +柄 +惠 +慘 +佳 +仇 +臘 +窩 +滌 +劍 +瞧 +堡 +潑 +蔥 +罩 +霍 +撈 +胎 +蒼 +濱 +倆 +捅 +湘 +砍 +霞 +邵 +萄 +瘋 +淮 +遂 +熊 +糞 +烘 +宿 +檔 +戈 +駁 +嫂 +裕 +徙 +箭 +捐 +腸 +撐 +曬 +辨 +殿 +蓮 +攤 +攪 +醬 +屏 +疫 +哀 +蔡 +堵 +沫 +皺 +暢 +疊 +閣 +萊 +敲 +轄 +鉤 +痕 +壩 +巷 +餓 +禍 +丘 +玄 +溜 +曰 +邏 +彭 +嘗 +卿 +妨 +艇 +吞 +韋 +怨 +矮 +歇 +` diff --git a/third_party/go-bip39/wordlists/czech.go b/third_party/go-bip39/wordlists/czech.go new file mode 100644 index 0000000000..284dcf18f7 --- /dev/null +++ b/third_party/go-bip39/wordlists/czech.go @@ -0,0 +1,2071 @@ +package wordlists + +import ( + "fmt" + "hash/crc32" + "strings" +) + +func init() { + // Ensure word list is correct + // $ wget https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/czech.txt + // $ crc32 czech.txt + // d1b5fda0 + checksum := crc32.ChecksumIEEE([]byte(czech)) + if fmt.Sprintf("%x", checksum) != "d1b5fda0" { + panic("czech checksum invalid") + } +} + +// Czech is a slice of mnemonic words taken from the bip39 specification +// https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/czech.txt +var Czech = strings.Split(strings.TrimSpace(czech), "\n") +var czech = `abdikace +abeceda +adresa +agrese +akce +aktovka +alej +alkohol +amputace +ananas +andulka +anekdota +anketa +antika +anulovat +archa +arogance +asfalt +asistent +aspirace +astma +astronom +atlas +atletika +atol +autobus +azyl +babka +bachor +bacil +baculka +badatel +bageta +bagr +bahno +bakterie +balada +baletka +balkon +balonek +balvan +balza +bambus +bankomat +barbar +baret +barman +baroko +barva +baterka +batoh +bavlna +bazalka +bazilika +bazuka +bedna +beran +beseda +bestie +beton +bezinka +bezmoc +beztak +bicykl +bidlo +biftek +bikiny +bilance +biograf +biolog +bitva +bizon +blahobyt +blatouch +blecha +bledule +blesk +blikat +blizna +blokovat +bloudit +blud +bobek +bobr +bodlina +bodnout +bohatost +bojkot +bojovat +bokorys +bolest +borec +borovice +bota +boubel +bouchat +bouda +boule +bourat +boxer +bradavka +brambora +branka +bratr +brepta +briketa +brko +brloh +bronz +broskev +brunetka +brusinka +brzda +brzy +bublina +bubnovat +buchta +buditel +budka +budova +bufet +bujarost +bukvice +buldok +bulva +bunda +bunkr +burza +butik +buvol +buzola +bydlet +bylina +bytovka +bzukot +capart +carevna +cedr +cedule +cejch +cejn +cela +celer +celkem +celnice +cenina +cennost +cenovka +centrum +cenzor +cestopis +cetka +chalupa +chapadlo +charita +chata +chechtat +chemie +chichot +chirurg +chlad +chleba +chlubit +chmel +chmura +chobot +chochol +chodba +cholera +chomout +chopit +choroba +chov +chrapot +chrlit +chrt +chrup +chtivost +chudina +chutnat +chvat +chvilka +chvost +chyba +chystat +chytit +cibule +cigareta +cihelna +cihla +cinkot +cirkus +cisterna +citace +citrus +cizinec +cizost +clona +cokoliv +couvat +ctitel +ctnost +cudnost +cuketa +cukr +cupot +cvaknout +cval +cvik +cvrkot +cyklista +daleko +dareba +datel +datum +dcera +debata +dechovka +decibel +deficit +deflace +dekl +dekret +demokrat +deprese +derby +deska +detektiv +dikobraz +diktovat +dioda +diplom +disk +displej +divadlo +divoch +dlaha +dlouho +dluhopis +dnes +dobro +dobytek +docent +dochutit +dodnes +dohled +dohoda +dohra +dojem +dojnice +doklad +dokola +doktor +dokument +dolar +doleva +dolina +doma +dominant +domluvit +domov +donutit +dopad +dopis +doplnit +doposud +doprovod +dopustit +dorazit +dorost +dort +dosah +doslov +dostatek +dosud +dosyta +dotaz +dotek +dotknout +doufat +doutnat +dovozce +dozadu +doznat +dozorce +drahota +drak +dramatik +dravec +draze +drdol +drobnost +drogerie +drozd +drsnost +drtit +drzost +duben +duchovno +dudek +duha +duhovka +dusit +dusno +dutost +dvojice +dvorec +dynamit +ekolog +ekonomie +elektron +elipsa +email +emise +emoce +empatie +epizoda +epocha +epopej +epos +esej +esence +eskorta +eskymo +etiketa +euforie +evoluce +exekuce +exkurze +expedice +exploze +export +extrakt +facka +fajfka +fakulta +fanatik +fantazie +farmacie +favorit +fazole +federace +fejeton +fenka +fialka +figurant +filozof +filtr +finance +finta +fixace +fjord +flanel +flirt +flotila +fond +fosfor +fotbal +fotka +foton +frakce +freska +fronta +fukar +funkce +fyzika +galeje +garant +genetika +geolog +gilotina +glazura +glejt +golem +golfista +gotika +graf +gramofon +granule +grep +gril +grog +groteska +guma +hadice +hadr +hala +halenka +hanba +hanopis +harfa +harpuna +havran +hebkost +hejkal +hejno +hejtman +hektar +helma +hematom +herec +herna +heslo +hezky +historik +hladovka +hlasivky +hlava +hledat +hlen +hlodavec +hloh +hloupost +hltat +hlubina +hluchota +hmat +hmota +hmyz +hnis +hnojivo +hnout +hoblina +hoboj +hoch +hodiny +hodlat +hodnota +hodovat +hojnost +hokej +holinka +holka +holub +homole +honitba +honorace +horal +horda +horizont +horko +horlivec +hormon +hornina +horoskop +horstvo +hospoda +hostina +hotovost +houba +houf +houpat +houska +hovor +hradba +hranice +hravost +hrazda +hrbolek +hrdina +hrdlo +hrdost +hrnek +hrobka +hromada +hrot +hrouda +hrozen +hrstka +hrubost +hryzat +hubenost +hubnout +hudba +hukot +humr +husita +hustota +hvozd +hybnost +hydrant +hygiena +hymna +hysterik +idylka +ihned +ikona +iluze +imunita +infekce +inflace +inkaso +inovace +inspekce +internet +invalida +investor +inzerce +ironie +jablko +jachta +jahoda +jakmile +jakost +jalovec +jantar +jarmark +jaro +jasan +jasno +jatka +javor +jazyk +jedinec +jedle +jednatel +jehlan +jekot +jelen +jelito +jemnost +jenom +jepice +jeseter +jevit +jezdec +jezero +jinak +jindy +jinoch +jiskra +jistota +jitrnice +jizva +jmenovat +jogurt +jurta +kabaret +kabel +kabinet +kachna +kadet +kadidlo +kahan +kajak +kajuta +kakao +kaktus +kalamita +kalhoty +kalibr +kalnost +kamera +kamkoliv +kamna +kanibal +kanoe +kantor +kapalina +kapela +kapitola +kapka +kaple +kapota +kapr +kapusta +kapybara +karamel +karotka +karton +kasa +katalog +katedra +kauce +kauza +kavalec +kazajka +kazeta +kazivost +kdekoliv +kdesi +kedluben +kemp +keramika +kino +klacek +kladivo +klam +klapot +klasika +klaun +klec +klenba +klepat +klesnout +klid +klima +klisna +klobouk +klokan +klopa +kloub +klubovna +klusat +kluzkost +kmen +kmitat +kmotr +kniha +knot +koalice +koberec +kobka +kobliha +kobyla +kocour +kohout +kojenec +kokos +koktejl +kolaps +koleda +kolize +kolo +komando +kometa +komik +komnata +komora +kompas +komunita +konat +koncept +kondice +konec +konfese +kongres +konina +konkurs +kontakt +konzerva +kopanec +kopie +kopnout +koprovka +korbel +korektor +kormidlo +koroptev +korpus +koruna +koryto +korzet +kosatec +kostka +kotel +kotleta +kotoul +koukat +koupelna +kousek +kouzlo +kovboj +koza +kozoroh +krabice +krach +krajina +kralovat +krasopis +kravata +kredit +krejcar +kresba +kreveta +kriket +kritik +krize +krkavec +krmelec +krmivo +krocan +krok +kronika +kropit +kroupa +krovka +krtek +kruhadlo +krupice +krutost +krvinka +krychle +krypta +krystal +kryt +kudlanka +kufr +kujnost +kukla +kulajda +kulich +kulka +kulomet +kultura +kuna +kupodivu +kurt +kurzor +kutil +kvalita +kvasinka +kvestor +kynolog +kyselina +kytara +kytice +kytka +kytovec +kyvadlo +labrador +lachtan +ladnost +laik +lakomec +lamela +lampa +lanovka +lasice +laso +lastura +latinka +lavina +lebka +leckdy +leden +lednice +ledovka +ledvina +legenda +legie +legrace +lehce +lehkost +lehnout +lektvar +lenochod +lentilka +lepenka +lepidlo +letadlo +letec +letmo +letokruh +levhart +levitace +levobok +libra +lichotka +lidojed +lidskost +lihovina +lijavec +lilek +limetka +linie +linka +linoleum +listopad +litina +litovat +lobista +lodivod +logika +logoped +lokalita +loket +lomcovat +lopata +lopuch +lord +losos +lotr +loudal +louh +louka +louskat +lovec +lstivost +lucerna +lucifer +lump +lusk +lustrace +lvice +lyra +lyrika +lysina +madam +madlo +magistr +mahagon +majetek +majitel +majorita +makak +makovice +makrela +malba +malina +malovat +malvice +maminka +mandle +manko +marnost +masakr +maskot +masopust +matice +matrika +maturita +mazanec +mazivo +mazlit +mazurka +mdloba +mechanik +meditace +medovina +melasa +meloun +mentolka +metla +metoda +metr +mezera +migrace +mihnout +mihule +mikina +mikrofon +milenec +milimetr +milost +mimika +mincovna +minibar +minomet +minulost +miska +mistr +mixovat +mladost +mlha +mlhovina +mlok +mlsat +mluvit +mnich +mnohem +mobil +mocnost +modelka +modlitba +mohyla +mokro +molekula +momentka +monarcha +monokl +monstrum +montovat +monzun +mosaz +moskyt +most +motivace +motorka +motyka +moucha +moudrost +mozaika +mozek +mozol +mramor +mravenec +mrkev +mrtvola +mrzet +mrzutost +mstitel +mudrc +muflon +mulat +mumie +munice +muset +mutace +muzeum +muzikant +myslivec +mzda +nabourat +nachytat +nadace +nadbytek +nadhoz +nadobro +nadpis +nahlas +nahnat +nahodile +nahradit +naivita +najednou +najisto +najmout +naklonit +nakonec +nakrmit +nalevo +namazat +namluvit +nanometr +naoko +naopak +naostro +napadat +napevno +naplnit +napnout +naposled +naprosto +narodit +naruby +narychlo +nasadit +nasekat +naslepo +nastat +natolik +navenek +navrch +navzdory +nazvat +nebe +nechat +necky +nedaleko +nedbat +neduh +negace +nehet +nehoda +nejen +nejprve +neklid +nelibost +nemilost +nemoc +neochota +neonka +nepokoj +nerost +nerv +nesmysl +nesoulad +netvor +neuron +nevina +nezvykle +nicota +nijak +nikam +nikdy +nikl +nikterak +nitro +nocleh +nohavice +nominace +nora +norek +nositel +nosnost +nouze +noviny +novota +nozdra +nuda +nudle +nuget +nutit +nutnost +nutrie +nymfa +obal +obarvit +obava +obdiv +obec +obehnat +obejmout +obezita +obhajoba +obilnice +objasnit +objekt +obklopit +oblast +oblek +obliba +obloha +obluda +obnos +obohatit +obojek +obout +obrazec +obrna +obruba +obrys +obsah +obsluha +obstarat +obuv +obvaz +obvinit +obvod +obvykle +obyvatel +obzor +ocas +ocel +ocenit +ochladit +ochota +ochrana +ocitnout +odboj +odbyt +odchod +odcizit +odebrat +odeslat +odevzdat +odezva +odhadce +odhodit +odjet +odjinud +odkaz +odkoupit +odliv +odluka +odmlka +odolnost +odpad +odpis +odplout +odpor +odpustit +odpykat +odrazka +odsoudit +odstup +odsun +odtok +odtud +odvaha +odveta +odvolat +odvracet +odznak +ofina +ofsajd +ohlas +ohnisko +ohrada +ohrozit +ohryzek +okap +okenice +oklika +okno +okouzlit +okovy +okrasa +okres +okrsek +okruh +okupant +okurka +okusit +olejnina +olizovat +omak +omeleta +omezit +omladina +omlouvat +omluva +omyl +onehdy +opakovat +opasek +operace +opice +opilost +opisovat +opora +opozice +opravdu +oproti +orbital +orchestr +orgie +orlice +orloj +ortel +osada +oschnout +osika +osivo +oslava +oslepit +oslnit +oslovit +osnova +osoba +osolit +ospalec +osten +ostraha +ostuda +ostych +osvojit +oteplit +otisk +otop +otrhat +otrlost +otrok +otruby +otvor +ovanout +ovar +oves +ovlivnit +ovoce +oxid +ozdoba +pachatel +pacient +padouch +pahorek +pakt +palanda +palec +palivo +paluba +pamflet +pamlsek +panenka +panika +panna +panovat +panstvo +pantofle +paprika +parketa +parodie +parta +paruka +paryba +paseka +pasivita +pastelka +patent +patrona +pavouk +pazneht +pazourek +pecka +pedagog +pejsek +peklo +peloton +penalta +pendrek +penze +periskop +pero +pestrost +petarda +petice +petrolej +pevnina +pexeso +pianista +piha +pijavice +pikle +piknik +pilina +pilnost +pilulka +pinzeta +pipeta +pisatel +pistole +pitevna +pivnice +pivovar +placenta +plakat +plamen +planeta +plastika +platit +plavidlo +plaz +plech +plemeno +plenta +ples +pletivo +plevel +plivat +plnit +plno +plocha +plodina +plomba +plout +pluk +plyn +pobavit +pobyt +pochod +pocit +poctivec +podat +podcenit +podepsat +podhled +podivit +podklad +podmanit +podnik +podoba +podpora +podraz +podstata +podvod +podzim +poezie +pohanka +pohnutka +pohovor +pohroma +pohyb +pointa +pojistka +pojmout +pokazit +pokles +pokoj +pokrok +pokuta +pokyn +poledne +polibek +polknout +poloha +polynom +pomalu +pominout +pomlka +pomoc +pomsta +pomyslet +ponechat +ponorka +ponurost +popadat +popel +popisek +poplach +poprosit +popsat +popud +poradce +porce +porod +porucha +poryv +posadit +posed +posila +poskok +poslanec +posoudit +pospolu +postava +posudek +posyp +potah +potkan +potlesk +potomek +potrava +potupa +potvora +poukaz +pouto +pouzdro +povaha +povidla +povlak +povoz +povrch +povstat +povyk +povzdech +pozdrav +pozemek +poznatek +pozor +pozvat +pracovat +prahory +praktika +prales +praotec +praporek +prase +pravda +princip +prkno +probudit +procento +prodej +profese +prohra +projekt +prolomit +promile +pronikat +propad +prorok +prosba +proton +proutek +provaz +prskavka +prsten +prudkost +prut +prvek +prvohory +psanec +psovod +pstruh +ptactvo +puberta +puch +pudl +pukavec +puklina +pukrle +pult +pumpa +punc +pupen +pusa +pusinka +pustina +putovat +putyka +pyramida +pysk +pytel +racek +rachot +radiace +radnice +radon +raft +ragby +raketa +rakovina +rameno +rampouch +rande +rarach +rarita +rasovna +rastr +ratolest +razance +razidlo +reagovat +reakce +recept +redaktor +referent +reflex +rejnok +reklama +rekord +rekrut +rektor +reputace +revize +revma +revolver +rezerva +riskovat +riziko +robotika +rodokmen +rohovka +rokle +rokoko +romaneto +ropovod +ropucha +rorejs +rosol +rostlina +rotmistr +rotoped +rotunda +roubenka +roucho +roup +roura +rovina +rovnice +rozbor +rozchod +rozdat +rozeznat +rozhodce +rozinka +rozjezd +rozkaz +rozloha +rozmar +rozpad +rozruch +rozsah +roztok +rozum +rozvod +rubrika +ruchadlo +rukavice +rukopis +ryba +rybolov +rychlost +rydlo +rypadlo +rytina +ryzost +sadista +sahat +sako +samec +samizdat +samota +sanitka +sardinka +sasanka +satelit +sazba +sazenice +sbor +schovat +sebranka +secese +sedadlo +sediment +sedlo +sehnat +sejmout +sekera +sekta +sekunda +sekvoje +semeno +seno +servis +sesadit +seshora +seskok +seslat +sestra +sesuv +sesypat +setba +setina +setkat +setnout +setrvat +sever +seznam +shoda +shrnout +sifon +silnice +sirka +sirotek +sirup +situace +skafandr +skalisko +skanzen +skaut +skeptik +skica +skladba +sklenice +sklo +skluz +skoba +skokan +skoro +skripta +skrz +skupina +skvost +skvrna +slabika +sladidlo +slanina +slast +slavnost +sledovat +slepec +sleva +slezina +slib +slina +sliznice +slon +sloupek +slovo +sluch +sluha +slunce +slupka +slza +smaragd +smetana +smilstvo +smlouva +smog +smrad +smrk +smrtka +smutek +smysl +snad +snaha +snob +sobota +socha +sodovka +sokol +sopka +sotva +souboj +soucit +soudce +souhlas +soulad +soumrak +souprava +soused +soutok +souviset +spalovna +spasitel +spis +splav +spodek +spojenec +spolu +sponzor +spornost +spousta +sprcha +spustit +sranda +sraz +srdce +srna +srnec +srovnat +srpen +srst +srub +stanice +starosta +statika +stavba +stehno +stezka +stodola +stolek +stopa +storno +stoupat +strach +stres +strhnout +strom +struna +studna +stupnice +stvol +styk +subjekt +subtropy +suchar +sudost +sukno +sundat +sunout +surikata +surovina +svah +svalstvo +svetr +svatba +svazek +svisle +svitek +svoboda +svodidlo +svorka +svrab +sykavka +sykot +synek +synovec +sypat +sypkost +syrovost +sysel +sytost +tabletka +tabule +tahoun +tajemno +tajfun +tajga +tajit +tajnost +taktika +tamhle +tampon +tancovat +tanec +tanker +tapeta +tavenina +tazatel +technika +tehdy +tekutina +telefon +temnota +tendence +tenista +tenor +teplota +tepna +teprve +terapie +termoska +textil +ticho +tiskopis +titulek +tkadlec +tkanina +tlapka +tleskat +tlukot +tlupa +tmel +toaleta +topinka +topol +torzo +touha +toulec +tradice +traktor +tramp +trasa +traverza +trefit +trest +trezor +trhavina +trhlina +trochu +trojice +troska +trouba +trpce +trpitel +trpkost +trubec +truchlit +truhlice +trus +trvat +tudy +tuhnout +tuhost +tundra +turista +turnaj +tuzemsko +tvaroh +tvorba +tvrdost +tvrz +tygr +tykev +ubohost +uboze +ubrat +ubrousek +ubrus +ubytovna +ucho +uctivost +udivit +uhradit +ujednat +ujistit +ujmout +ukazatel +uklidnit +uklonit +ukotvit +ukrojit +ulice +ulita +ulovit +umyvadlo +unavit +uniforma +uniknout +upadnout +uplatnit +uplynout +upoutat +upravit +uran +urazit +usednout +usilovat +usmrtit +usnadnit +usnout +usoudit +ustlat +ustrnout +utahovat +utkat +utlumit +utonout +utopenec +utrousit +uvalit +uvolnit +uvozovka +uzdravit +uzel +uzenina +uzlina +uznat +vagon +valcha +valoun +vana +vandal +vanilka +varan +varhany +varovat +vcelku +vchod +vdova +vedro +vegetace +vejce +velbloud +veletrh +velitel +velmoc +velryba +venkov +veranda +verze +veselka +veskrze +vesnice +vespodu +vesta +veterina +veverka +vibrace +vichr +videohra +vidina +vidle +vila +vinice +viset +vitalita +vize +vizitka +vjezd +vklad +vkus +vlajka +vlak +vlasec +vlevo +vlhkost +vliv +vlnovka +vloupat +vnucovat +vnuk +voda +vodivost +vodoznak +vodstvo +vojensky +vojna +vojsko +volant +volba +volit +volno +voskovka +vozidlo +vozovna +vpravo +vrabec +vracet +vrah +vrata +vrba +vrcholek +vrhat +vrstva +vrtule +vsadit +vstoupit +vstup +vtip +vybavit +vybrat +vychovat +vydat +vydra +vyfotit +vyhledat +vyhnout +vyhodit +vyhradit +vyhubit +vyjasnit +vyjet +vyjmout +vyklopit +vykonat +vylekat +vymazat +vymezit +vymizet +vymyslet +vynechat +vynikat +vynutit +vypadat +vyplatit +vypravit +vypustit +vyrazit +vyrovnat +vyrvat +vyslovit +vysoko +vystavit +vysunout +vysypat +vytasit +vytesat +vytratit +vyvinout +vyvolat +vyvrhel +vyzdobit +vyznat +vzadu +vzbudit +vzchopit +vzdor +vzduch +vzdychat +vzestup +vzhledem +vzkaz +vzlykat +vznik +vzorek +vzpoura +vztah +vztek +xylofon +zabrat +zabydlet +zachovat +zadarmo +zadusit +zafoukat +zahltit +zahodit +zahrada +zahynout +zajatec +zajet +zajistit +zaklepat +zakoupit +zalepit +zamezit +zamotat +zamyslet +zanechat +zanikat +zaplatit +zapojit +zapsat +zarazit +zastavit +zasunout +zatajit +zatemnit +zatknout +zaujmout +zavalit +zavelet +zavinit +zavolat +zavrtat +zazvonit +zbavit +zbrusu +zbudovat +zbytek +zdaleka +zdarma +zdatnost +zdivo +zdobit +zdroj +zdvih +zdymadlo +zelenina +zeman +zemina +zeptat +zezadu +zezdola +zhatit +zhltnout +zhluboka +zhotovit +zhruba +zima +zimnice +zjemnit +zklamat +zkoumat +zkratka +zkumavka +zlato +zlehka +zloba +zlom +zlost +zlozvyk +zmapovat +zmar +zmatek +zmije +zmizet +zmocnit +zmodrat +zmrzlina +zmutovat +znak +znalost +znamenat +znovu +zobrazit +zotavit +zoubek +zoufale +zplodit +zpomalit +zprava +zprostit +zprudka +zprvu +zrada +zranit +zrcadlo +zrnitost +zrno +zrovna +zrychlit +zrzavost +zticha +ztratit +zubovina +zubr +zvednout +zvenku +zvesela +zvon +zvrat +zvukovod +zvyk +` diff --git a/third_party/go-bip39/wordlists/english.go b/third_party/go-bip39/wordlists/english.go new file mode 100644 index 0000000000..f69e8a4736 --- /dev/null +++ b/third_party/go-bip39/wordlists/english.go @@ -0,0 +1,2071 @@ +package wordlists + +import ( + "fmt" + "hash/crc32" + "strings" +) + +func init() { + // Ensure word list is correct + // $ wget https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/english.txt + // $ crc32 english.txt + // c1dbd296 + checksum := crc32.ChecksumIEEE([]byte(english)) + if fmt.Sprintf("%x", checksum) != "c1dbd296" { + panic("english checksum invalid") + } +} + +// English is a slice of mnemonic words taken from the bip39 specification +// https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/english.txt +var English = strings.Split(strings.TrimSpace(english), "\n") +var english = `abandon +ability +able +about +above +absent +absorb +abstract +absurd +abuse +access +accident +account +accuse +achieve +acid +acoustic +acquire +across +act +action +actor +actress +actual +adapt +add +addict +address +adjust +admit +adult +advance +advice +aerobic +affair +afford +afraid +again +age +agent +agree +ahead +aim +air +airport +aisle +alarm +album +alcohol +alert +alien +all +alley +allow +almost +alone +alpha +already +also +alter +always +amateur +amazing +among +amount +amused +analyst +anchor +ancient +anger +angle +angry +animal +ankle +announce +annual +another +answer +antenna +antique +anxiety +any +apart +apology +appear +apple +approve +april +arch +arctic +area +arena +argue +arm +armed +armor +army +around +arrange +arrest +arrive +arrow +art +artefact +artist +artwork +ask +aspect +assault +asset +assist +assume +asthma +athlete +atom +attack +attend +attitude +attract +auction +audit +august +aunt +author +auto +autumn +average +avocado +avoid +awake +aware +away +awesome +awful +awkward +axis +baby +bachelor +bacon +badge +bag +balance +balcony +ball +bamboo +banana +banner +bar +barely +bargain +barrel +base +basic +basket +battle +beach +bean +beauty +because +become +beef +before +begin +behave +behind +believe +below +belt +bench +benefit +best +betray +better +between +beyond +bicycle +bid +bike +bind +biology +bird +birth +bitter +black +blade +blame +blanket +blast +bleak +bless +blind +blood +blossom +blouse +blue +blur +blush +board +boat +body +boil +bomb +bone +bonus +book +boost +border +boring +borrow +boss +bottom +bounce +box +boy +bracket +brain +brand +brass +brave +bread +breeze +brick +bridge +brief +bright +bring +brisk +broccoli +broken +bronze +broom +brother +brown +brush +bubble +buddy +budget +buffalo +build +bulb +bulk +bullet +bundle +bunker +burden +burger +burst +bus +business +busy +butter +buyer +buzz +cabbage +cabin +cable +cactus +cage +cake +call +calm +camera +camp +can +canal +cancel +candy +cannon +canoe +canvas +canyon +capable +capital +captain +car +carbon +card +cargo +carpet +carry +cart +case +cash +casino +castle +casual +cat +catalog +catch +category +cattle +caught +cause +caution +cave +ceiling +celery +cement +census +century +cereal +certain +chair +chalk +champion +change +chaos +chapter +charge +chase +chat +cheap +check +cheese +chef +cherry +chest +chicken +chief +child +chimney +choice +choose +chronic +chuckle +chunk +churn +cigar +cinnamon +circle +citizen +city +civil +claim +clap +clarify +claw +clay +clean +clerk +clever +click +client +cliff +climb +clinic +clip +clock +clog +close +cloth +cloud +clown +club +clump +cluster +clutch +coach +coast +coconut +code +coffee +coil +coin +collect +color +column +combine +come +comfort +comic +common +company +concert +conduct +confirm +congress +connect +consider +control +convince +cook +cool +copper +copy +coral +core +corn +correct +cost +cotton +couch +country +couple +course +cousin +cover +coyote +crack +cradle +craft +cram +crane +crash +crater +crawl +crazy +cream +credit +creek +crew +cricket +crime +crisp +critic +crop +cross +crouch +crowd +crucial +cruel +cruise +crumble +crunch +crush +cry +crystal +cube +culture +cup +cupboard +curious +current +curtain +curve +cushion +custom +cute +cycle +dad +damage +damp +dance +danger +daring +dash +daughter +dawn +day +deal +debate +debris +decade +december +decide +decline +decorate +decrease +deer +defense +define +defy +degree +delay +deliver +demand +demise +denial +dentist +deny +depart +depend +deposit +depth +deputy +derive +describe +desert +design +desk +despair +destroy +detail +detect +develop +device +devote +diagram +dial +diamond +diary +dice +diesel +diet +differ +digital +dignity +dilemma +dinner +dinosaur +direct +dirt +disagree +discover +disease +dish +dismiss +disorder +display +distance +divert +divide +divorce +dizzy +doctor +document +dog +doll +dolphin +domain +donate +donkey +donor +door +dose +double +dove +draft +dragon +drama +drastic +draw +dream +dress +drift +drill +drink +drip +drive +drop +drum +dry +duck +dumb +dune +during +dust +dutch +duty +dwarf +dynamic +eager +eagle +early +earn +earth +easily +east +easy +echo +ecology +economy +edge +edit +educate +effort +egg +eight +either +elbow +elder +electric +elegant +element +elephant +elevator +elite +else +embark +embody +embrace +emerge +emotion +employ +empower +empty +enable +enact +end +endless +endorse +enemy +energy +enforce +engage +engine +enhance +enjoy +enlist +enough +enrich +enroll +ensure +enter +entire +entry +envelope +episode +equal +equip +era +erase +erode +erosion +error +erupt +escape +essay +essence +estate +eternal +ethics +evidence +evil +evoke +evolve +exact +example +excess +exchange +excite +exclude +excuse +execute +exercise +exhaust +exhibit +exile +exist +exit +exotic +expand +expect +expire +explain +expose +express +extend +extra +eye +eyebrow +fabric +face +faculty +fade +faint +faith +fall +false +fame +family +famous +fan +fancy +fantasy +farm +fashion +fat +fatal +father +fatigue +fault +favorite +feature +february +federal +fee +feed +feel +female +fence +festival +fetch +fever +few +fiber +fiction +field +figure +file +film +filter +final +find +fine +finger +finish +fire +firm +first +fiscal +fish +fit +fitness +fix +flag +flame +flash +flat +flavor +flee +flight +flip +float +flock +floor +flower +fluid +flush +fly +foam +focus +fog +foil +fold +follow +food +foot +force +forest +forget +fork +fortune +forum +forward +fossil +foster +found +fox +fragile +frame +frequent +fresh +friend +fringe +frog +front +frost +frown +frozen +fruit +fuel +fun +funny +furnace +fury +future +gadget +gain +galaxy +gallery +game +gap +garage +garbage +garden +garlic +garment +gas +gasp +gate +gather +gauge +gaze +general +genius +genre +gentle +genuine +gesture +ghost +giant +gift +giggle +ginger +giraffe +girl +give +glad +glance +glare +glass +glide +glimpse +globe +gloom +glory +glove +glow +glue +goat +goddess +gold +good +goose +gorilla +gospel +gossip +govern +gown +grab +grace +grain +grant +grape +grass +gravity +great +green +grid +grief +grit +grocery +group +grow +grunt +guard +guess +guide +guilt +guitar +gun +gym +habit +hair +half +hammer +hamster +hand +happy +harbor +hard +harsh +harvest +hat +have +hawk +hazard +head +health +heart +heavy +hedgehog +height +hello +helmet +help +hen +hero +hidden +high +hill +hint +hip +hire +history +hobby +hockey +hold +hole +holiday +hollow +home +honey +hood +hope +horn +horror +horse +hospital +host +hotel +hour +hover +hub +huge +human +humble +humor +hundred +hungry +hunt +hurdle +hurry +hurt +husband +hybrid +ice +icon +idea +identify +idle +ignore +ill +illegal +illness +image +imitate +immense +immune +impact +impose +improve +impulse +inch +include +income +increase +index +indicate +indoor +industry +infant +inflict +inform +inhale +inherit +initial +inject +injury +inmate +inner +innocent +input +inquiry +insane +insect +inside +inspire +install +intact +interest +into +invest +invite +involve +iron +island +isolate +issue +item +ivory +jacket +jaguar +jar +jazz +jealous +jeans +jelly +jewel +job +join +joke +journey +joy +judge +juice +jump +jungle +junior +junk +just +kangaroo +keen +keep +ketchup +key +kick +kid +kidney +kind +kingdom +kiss +kit +kitchen +kite +kitten +kiwi +knee +knife +knock +know +lab +label +labor +ladder +lady +lake +lamp +language +laptop +large +later +latin +laugh +laundry +lava +law +lawn +lawsuit +layer +lazy +leader +leaf +learn +leave +lecture +left +leg +legal +legend +leisure +lemon +lend +length +lens +leopard +lesson +letter +level +liar +liberty +library +license +life +lift +light +like +limb +limit +link +lion +liquid +list +little +live +lizard +load +loan +lobster +local +lock +logic +lonely +long +loop +lottery +loud +lounge +love +loyal +lucky +luggage +lumber +lunar +lunch +luxury +lyrics +machine +mad +magic +magnet +maid +mail +main +major +make +mammal +man +manage +mandate +mango +mansion +manual +maple +marble +march +margin +marine +market +marriage +mask +mass +master +match +material +math +matrix +matter +maximum +maze +meadow +mean +measure +meat +mechanic +medal +media +melody +melt +member +memory +mention +menu +mercy +merge +merit +merry +mesh +message +metal +method +middle +midnight +milk +million +mimic +mind +minimum +minor +minute +miracle +mirror +misery +miss +mistake +mix +mixed +mixture +mobile +model +modify +mom +moment +monitor +monkey +monster +month +moon +moral +more +morning +mosquito +mother +motion +motor +mountain +mouse +move +movie +much +muffin +mule +multiply +muscle +museum +mushroom +music +must +mutual +myself +mystery +myth +naive +name +napkin +narrow +nasty +nation +nature +near +neck +need +negative +neglect +neither +nephew +nerve +nest +net +network +neutral +never +news +next +nice +night +noble +noise +nominee +noodle +normal +north +nose +notable +note +nothing +notice +novel +now +nuclear +number +nurse +nut +oak +obey +object +oblige +obscure +observe +obtain +obvious +occur +ocean +october +odor +off +offer +office +often +oil +okay +old +olive +olympic +omit +once +one +onion +online +only +open +opera +opinion +oppose +option +orange +orbit +orchard +order +ordinary +organ +orient +original +orphan +ostrich +other +outdoor +outer +output +outside +oval +oven +over +own +owner +oxygen +oyster +ozone +pact +paddle +page +pair +palace +palm +panda +panel +panic +panther +paper +parade +parent +park +parrot +party +pass +patch +path +patient +patrol +pattern +pause +pave +payment +peace +peanut +pear +peasant +pelican +pen +penalty +pencil +people +pepper +perfect +permit +person +pet +phone +photo +phrase +physical +piano +picnic +picture +piece +pig +pigeon +pill +pilot +pink +pioneer +pipe +pistol +pitch +pizza +place +planet +plastic +plate +play +please +pledge +pluck +plug +plunge +poem +poet +point +polar +pole +police +pond +pony +pool +popular +portion +position +possible +post +potato +pottery +poverty +powder +power +practice +praise +predict +prefer +prepare +present +pretty +prevent +price +pride +primary +print +priority +prison +private +prize +problem +process +produce +profit +program +project +promote +proof +property +prosper +protect +proud +provide +public +pudding +pull +pulp +pulse +pumpkin +punch +pupil +puppy +purchase +purity +purpose +purse +push +put +puzzle +pyramid +quality +quantum +quarter +question +quick +quit +quiz +quote +rabbit +raccoon +race +rack +radar +radio +rail +rain +raise +rally +ramp +ranch +random +range +rapid +rare +rate +rather +raven +raw +razor +ready +real +reason +rebel +rebuild +recall +receive +recipe +record +recycle +reduce +reflect +reform +refuse +region +regret +regular +reject +relax +release +relief +rely +remain +remember +remind +remove +render +renew +rent +reopen +repair +repeat +replace +report +require +rescue +resemble +resist +resource +response +result +retire +retreat +return +reunion +reveal +review +reward +rhythm +rib +ribbon +rice +rich +ride +ridge +rifle +right +rigid +ring +riot +ripple +risk +ritual +rival +river +road +roast +robot +robust +rocket +romance +roof +rookie +room +rose +rotate +rough +round +route +royal +rubber +rude +rug +rule +run +runway +rural +sad +saddle +sadness +safe +sail +salad +salmon +salon +salt +salute +same +sample +sand +satisfy +satoshi +sauce +sausage +save +say +scale +scan +scare +scatter +scene +scheme +school +science +scissors +scorpion +scout +scrap +screen +script +scrub +sea +search +season +seat +second +secret +section +security +seed +seek +segment +select +sell +seminar +senior +sense +sentence +series +service +session +settle +setup +seven +shadow +shaft +shallow +share +shed +shell +sheriff +shield +shift +shine +ship +shiver +shock +shoe +shoot +shop +short +shoulder +shove +shrimp +shrug +shuffle +shy +sibling +sick +side +siege +sight +sign +silent +silk +silly +silver +similar +simple +since +sing +siren +sister +situate +six +size +skate +sketch +ski +skill +skin +skirt +skull +slab +slam +sleep +slender +slice +slide +slight +slim +slogan +slot +slow +slush +small +smart +smile +smoke +smooth +snack +snake +snap +sniff +snow +soap +soccer +social +sock +soda +soft +solar +soldier +solid +solution +solve +someone +song +soon +sorry +sort +soul +sound +soup +source +south +space +spare +spatial +spawn +speak +special +speed +spell +spend +sphere +spice +spider +spike +spin +spirit +split +spoil +sponsor +spoon +sport +spot +spray +spread +spring +spy +square +squeeze +squirrel +stable +stadium +staff +stage +stairs +stamp +stand +start +state +stay +steak +steel +stem +step +stereo +stick +still +sting +stock +stomach +stone +stool +story +stove +strategy +street +strike +strong +struggle +student +stuff +stumble +style +subject +submit +subway +success +such +sudden +suffer +sugar +suggest +suit +summer +sun +sunny +sunset +super +supply +supreme +sure +surface +surge +surprise +surround +survey +suspect +sustain +swallow +swamp +swap +swarm +swear +sweet +swift +swim +swing +switch +sword +symbol +symptom +syrup +system +table +tackle +tag +tail +talent +talk +tank +tape +target +task +taste +tattoo +taxi +teach +team +tell +ten +tenant +tennis +tent +term +test +text +thank +that +theme +then +theory +there +they +thing +this +thought +three +thrive +throw +thumb +thunder +ticket +tide +tiger +tilt +timber +time +tiny +tip +tired +tissue +title +toast +tobacco +today +toddler +toe +together +toilet +token +tomato +tomorrow +tone +tongue +tonight +tool +tooth +top +topic +topple +torch +tornado +tortoise +toss +total +tourist +toward +tower +town +toy +track +trade +traffic +tragic +train +transfer +trap +trash +travel +tray +treat +tree +trend +trial +tribe +trick +trigger +trim +trip +trophy +trouble +truck +true +truly +trumpet +trust +truth +try +tube +tuition +tumble +tuna +tunnel +turkey +turn +turtle +twelve +twenty +twice +twin +twist +two +type +typical +ugly +umbrella +unable +unaware +uncle +uncover +under +undo +unfair +unfold +unhappy +uniform +unique +unit +universe +unknown +unlock +until +unusual +unveil +update +upgrade +uphold +upon +upper +upset +urban +urge +usage +use +used +useful +useless +usual +utility +vacant +vacuum +vague +valid +valley +valve +van +vanish +vapor +various +vast +vault +vehicle +velvet +vendor +venture +venue +verb +verify +version +very +vessel +veteran +viable +vibrant +vicious +victory +video +view +village +vintage +violin +virtual +virus +visa +visit +visual +vital +vivid +vocal +voice +void +volcano +volume +vote +voyage +wage +wagon +wait +walk +wall +walnut +want +warfare +warm +warrior +wash +wasp +waste +water +wave +way +wealth +weapon +wear +weasel +weather +web +wedding +weekend +weird +welcome +west +wet +whale +what +wheat +wheel +when +where +whip +whisper +wide +width +wife +wild +will +win +window +wine +wing +wink +winner +winter +wire +wisdom +wise +wish +witness +wolf +woman +wonder +wood +wool +word +work +world +worry +worth +wrap +wreck +wrestle +wrist +write +wrong +yard +year +yellow +you +young +youth +zebra +zero +zone +zoo +` diff --git a/third_party/go-bip39/wordlists/french.go b/third_party/go-bip39/wordlists/french.go new file mode 100644 index 0000000000..1ae685d3ad --- /dev/null +++ b/third_party/go-bip39/wordlists/french.go @@ -0,0 +1,2071 @@ +package wordlists + +import ( + "fmt" + "hash/crc32" + "strings" +) + +func init() { + // Ensure word list is correct + // $ wget https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/french.txt + // $ crc32 french.txt + // 3e56b216 + checksum := crc32.ChecksumIEEE([]byte(french)) + if fmt.Sprintf("%x", checksum) != "3e56b216" { + panic("french checksum invalid") + } +} + +// French is a slice of mnemonic words taken from the bip39 specification +// https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/french.txt +var French = strings.Split(strings.TrimSpace(french), "\n") +var french = `abaisser +abandon +abdiquer +abeille +abolir +aborder +aboutir +aboyer +abrasif +abreuver +abriter +abroger +abrupt +absence +absolu +absurde +abusif +abyssal +académie +acajou +acarien +accabler +accepter +acclamer +accolade +accroche +accuser +acerbe +achat +acheter +aciduler +acier +acompte +acquérir +acronyme +acteur +actif +actuel +adepte +adéquat +adhésif +adjectif +adjuger +admettre +admirer +adopter +adorer +adoucir +adresse +adroit +adulte +adverbe +aérer +aéronef +affaire +affecter +affiche +affreux +affubler +agacer +agencer +agile +agiter +agrafer +agréable +agrume +aider +aiguille +ailier +aimable +aisance +ajouter +ajuster +alarmer +alchimie +alerte +algèbre +algue +aliéner +aliment +alléger +alliage +allouer +allumer +alourdir +alpaga +altesse +alvéole +amateur +ambigu +ambre +aménager +amertume +amidon +amiral +amorcer +amour +amovible +amphibie +ampleur +amusant +analyse +anaphore +anarchie +anatomie +ancien +anéantir +angle +angoisse +anguleux +animal +annexer +annonce +annuel +anodin +anomalie +anonyme +anormal +antenne +antidote +anxieux +apaiser +apéritif +aplanir +apologie +appareil +appeler +apporter +appuyer +aquarium +aqueduc +arbitre +arbuste +ardeur +ardoise +argent +arlequin +armature +armement +armoire +armure +arpenter +arracher +arriver +arroser +arsenic +artériel +article +aspect +asphalte +aspirer +assaut +asservir +assiette +associer +assurer +asticot +astre +astuce +atelier +atome +atrium +atroce +attaque +attentif +attirer +attraper +aubaine +auberge +audace +audible +augurer +aurore +automne +autruche +avaler +avancer +avarice +avenir +averse +aveugle +aviateur +avide +avion +aviser +avoine +avouer +avril +axial +axiome +badge +bafouer +bagage +baguette +baignade +balancer +balcon +baleine +balisage +bambin +bancaire +bandage +banlieue +bannière +banquier +barbier +baril +baron +barque +barrage +bassin +bastion +bataille +bateau +batterie +baudrier +bavarder +belette +bélier +belote +bénéfice +berceau +berger +berline +bermuda +besace +besogne +bétail +beurre +biberon +bicycle +bidule +bijou +bilan +bilingue +billard +binaire +biologie +biopsie +biotype +biscuit +bison +bistouri +bitume +bizarre +blafard +blague +blanchir +blessant +blinder +blond +bloquer +blouson +bobard +bobine +boire +boiser +bolide +bonbon +bondir +bonheur +bonifier +bonus +bordure +borne +botte +boucle +boueux +bougie +boulon +bouquin +bourse +boussole +boutique +boxeur +branche +brasier +brave +brebis +brèche +breuvage +bricoler +brigade +brillant +brioche +brique +brochure +broder +bronzer +brousse +broyeur +brume +brusque +brutal +bruyant +buffle +buisson +bulletin +bureau +burin +bustier +butiner +butoir +buvable +buvette +cabanon +cabine +cachette +cadeau +cadre +caféine +caillou +caisson +calculer +calepin +calibre +calmer +calomnie +calvaire +camarade +caméra +camion +campagne +canal +caneton +canon +cantine +canular +capable +caporal +caprice +capsule +capter +capuche +carabine +carbone +caresser +caribou +carnage +carotte +carreau +carton +cascade +casier +casque +cassure +causer +caution +cavalier +caverne +caviar +cédille +ceinture +céleste +cellule +cendrier +censurer +central +cercle +cérébral +cerise +cerner +cerveau +cesser +chagrin +chaise +chaleur +chambre +chance +chapitre +charbon +chasseur +chaton +chausson +chavirer +chemise +chenille +chéquier +chercher +cheval +chien +chiffre +chignon +chimère +chiot +chlorure +chocolat +choisir +chose +chouette +chrome +chute +cigare +cigogne +cimenter +cinéma +cintrer +circuler +cirer +cirque +citerne +citoyen +citron +civil +clairon +clameur +claquer +classe +clavier +client +cligner +climat +clivage +cloche +clonage +cloporte +cobalt +cobra +cocasse +cocotier +coder +codifier +coffre +cogner +cohésion +coiffer +coincer +colère +colibri +colline +colmater +colonel +combat +comédie +commande +compact +concert +conduire +confier +congeler +connoter +consonne +contact +convexe +copain +copie +corail +corbeau +cordage +corniche +corpus +correct +cortège +cosmique +costume +coton +coude +coupure +courage +couteau +couvrir +coyote +crabe +crainte +cravate +crayon +créature +créditer +crémeux +creuser +crevette +cribler +crier +cristal +critère +croire +croquer +crotale +crucial +cruel +crypter +cubique +cueillir +cuillère +cuisine +cuivre +culminer +cultiver +cumuler +cupide +curatif +curseur +cyanure +cycle +cylindre +cynique +daigner +damier +danger +danseur +dauphin +débattre +débiter +déborder +débrider +débutant +décaler +décembre +déchirer +décider +déclarer +décorer +décrire +décupler +dédale +déductif +déesse +défensif +défiler +défrayer +dégager +dégivrer +déglutir +dégrafer +déjeuner +délice +déloger +demander +demeurer +démolir +dénicher +dénouer +dentelle +dénuder +départ +dépenser +déphaser +déplacer +déposer +déranger +dérober +désastre +descente +désert +désigner +désobéir +dessiner +destrier +détacher +détester +détourer +détresse +devancer +devenir +deviner +devoir +diable +dialogue +diamant +dicter +différer +digérer +digital +digne +diluer +dimanche +diminuer +dioxyde +directif +diriger +discuter +disposer +dissiper +distance +divertir +diviser +docile +docteur +dogme +doigt +domaine +domicile +dompter +donateur +donjon +donner +dopamine +dortoir +dorure +dosage +doseur +dossier +dotation +douanier +double +douceur +douter +doyen +dragon +draper +dresser +dribbler +droiture +duperie +duplexe +durable +durcir +dynastie +éblouir +écarter +écharpe +échelle +éclairer +éclipse +éclore +écluse +école +économie +écorce +écouter +écraser +écrémer +écrivain +écrou +écume +écureuil +édifier +éduquer +effacer +effectif +effigie +effort +effrayer +effusion +égaliser +égarer +éjecter +élaborer +élargir +électron +élégant +éléphant +élève +éligible +élitisme +éloge +élucider +éluder +emballer +embellir +embryon +émeraude +émission +emmener +émotion +émouvoir +empereur +employer +emporter +emprise +émulsion +encadrer +enchère +enclave +encoche +endiguer +endosser +endroit +enduire +énergie +enfance +enfermer +enfouir +engager +engin +englober +énigme +enjamber +enjeu +enlever +ennemi +ennuyeux +enrichir +enrobage +enseigne +entasser +entendre +entier +entourer +entraver +énumérer +envahir +enviable +envoyer +enzyme +éolien +épaissir +épargne +épatant +épaule +épicerie +épidémie +épier +épilogue +épine +épisode +épitaphe +époque +épreuve +éprouver +épuisant +équerre +équipe +ériger +érosion +erreur +éruption +escalier +espadon +espèce +espiègle +espoir +esprit +esquiver +essayer +essence +essieu +essorer +estime +estomac +estrade +étagère +étaler +étanche +étatique +éteindre +étendoir +éternel +éthanol +éthique +ethnie +étirer +étoffer +étoile +étonnant +étourdir +étrange +étroit +étude +euphorie +évaluer +évasion +éventail +évidence +éviter +évolutif +évoquer +exact +exagérer +exaucer +exceller +excitant +exclusif +excuse +exécuter +exemple +exercer +exhaler +exhorter +exigence +exiler +exister +exotique +expédier +explorer +exposer +exprimer +exquis +extensif +extraire +exulter +fable +fabuleux +facette +facile +facture +faiblir +falaise +fameux +famille +farceur +farfelu +farine +farouche +fasciner +fatal +fatigue +faucon +fautif +faveur +favori +fébrile +féconder +fédérer +félin +femme +fémur +fendoir +féodal +fermer +féroce +ferveur +festival +feuille +feutre +février +fiasco +ficeler +fictif +fidèle +figure +filature +filetage +filière +filleul +filmer +filou +filtrer +financer +finir +fiole +firme +fissure +fixer +flairer +flamme +flasque +flatteur +fléau +flèche +fleur +flexion +flocon +flore +fluctuer +fluide +fluvial +folie +fonderie +fongible +fontaine +forcer +forgeron +formuler +fortune +fossile +foudre +fougère +fouiller +foulure +fourmi +fragile +fraise +franchir +frapper +frayeur +frégate +freiner +frelon +frémir +frénésie +frère +friable +friction +frisson +frivole +froid +fromage +frontal +frotter +fruit +fugitif +fuite +fureur +furieux +furtif +fusion +futur +gagner +galaxie +galerie +gambader +garantir +gardien +garnir +garrigue +gazelle +gazon +géant +gélatine +gélule +gendarme +général +génie +genou +gentil +géologie +géomètre +géranium +germe +gestuel +geyser +gibier +gicler +girafe +givre +glace +glaive +glisser +globe +gloire +glorieux +golfeur +gomme +gonfler +gorge +gorille +goudron +gouffre +goulot +goupille +gourmand +goutte +graduel +graffiti +graine +grand +grappin +gratuit +gravir +grenat +griffure +griller +grimper +grogner +gronder +grotte +groupe +gruger +grutier +gruyère +guépard +guerrier +guide +guimauve +guitare +gustatif +gymnaste +gyrostat +habitude +hachoir +halte +hameau +hangar +hanneton +haricot +harmonie +harpon +hasard +hélium +hématome +herbe +hérisson +hermine +héron +hésiter +heureux +hiberner +hibou +hilarant +histoire +hiver +homard +hommage +homogène +honneur +honorer +honteux +horde +horizon +horloge +hormone +horrible +houleux +housse +hublot +huileux +humain +humble +humide +humour +hurler +hydromel +hygiène +hymne +hypnose +idylle +ignorer +iguane +illicite +illusion +image +imbiber +imiter +immense +immobile +immuable +impact +impérial +implorer +imposer +imprimer +imputer +incarner +incendie +incident +incliner +incolore +indexer +indice +inductif +inédit +ineptie +inexact +infini +infliger +informer +infusion +ingérer +inhaler +inhiber +injecter +injure +innocent +inoculer +inonder +inscrire +insecte +insigne +insolite +inspirer +instinct +insulter +intact +intense +intime +intrigue +intuitif +inutile +invasion +inventer +inviter +invoquer +ironique +irradier +irréel +irriter +isoler +ivoire +ivresse +jaguar +jaillir +jambe +janvier +jardin +jauger +jaune +javelot +jetable +jeton +jeudi +jeunesse +joindre +joncher +jongler +joueur +jouissif +journal +jovial +joyau +joyeux +jubiler +jugement +junior +jupon +juriste +justice +juteux +juvénile +kayak +kimono +kiosque +label +labial +labourer +lacérer +lactose +lagune +laine +laisser +laitier +lambeau +lamelle +lampe +lanceur +langage +lanterne +lapin +largeur +larme +laurier +lavabo +lavoir +lecture +légal +léger +légume +lessive +lettre +levier +lexique +lézard +liasse +libérer +libre +licence +licorne +liège +lièvre +ligature +ligoter +ligue +limer +limite +limonade +limpide +linéaire +lingot +lionceau +liquide +lisière +lister +lithium +litige +littoral +livreur +logique +lointain +loisir +lombric +loterie +louer +lourd +loutre +louve +loyal +lubie +lucide +lucratif +lueur +lugubre +luisant +lumière +lunaire +lundi +luron +lutter +luxueux +machine +magasin +magenta +magique +maigre +maillon +maintien +mairie +maison +majorer +malaxer +maléfice +malheur +malice +mallette +mammouth +mandater +maniable +manquant +manteau +manuel +marathon +marbre +marchand +mardi +maritime +marqueur +marron +marteler +mascotte +massif +matériel +matière +matraque +maudire +maussade +mauve +maximal +méchant +méconnu +médaille +médecin +méditer +méduse +meilleur +mélange +mélodie +membre +mémoire +menacer +mener +menhir +mensonge +mentor +mercredi +mérite +merle +messager +mesure +métal +météore +méthode +métier +meuble +miauler +microbe +miette +mignon +migrer +milieu +million +mimique +mince +minéral +minimal +minorer +minute +miracle +miroiter +missile +mixte +mobile +moderne +moelleux +mondial +moniteur +monnaie +monotone +monstre +montagne +monument +moqueur +morceau +morsure +mortier +moteur +motif +mouche +moufle +moulin +mousson +mouton +mouvant +multiple +munition +muraille +murène +murmure +muscle +muséum +musicien +mutation +muter +mutuel +myriade +myrtille +mystère +mythique +nageur +nappe +narquois +narrer +natation +nation +nature +naufrage +nautique +navire +nébuleux +nectar +néfaste +négation +négliger +négocier +neige +nerveux +nettoyer +neurone +neutron +neveu +niche +nickel +nitrate +niveau +noble +nocif +nocturne +noirceur +noisette +nomade +nombreux +nommer +normatif +notable +notifier +notoire +nourrir +nouveau +novateur +novembre +novice +nuage +nuancer +nuire +nuisible +numéro +nuptial +nuque +nutritif +obéir +objectif +obliger +obscur +observer +obstacle +obtenir +obturer +occasion +occuper +océan +octobre +octroyer +octupler +oculaire +odeur +odorant +offenser +officier +offrir +ogive +oiseau +oisillon +olfactif +olivier +ombrage +omettre +onctueux +onduler +onéreux +onirique +opale +opaque +opérer +opinion +opportun +opprimer +opter +optique +orageux +orange +orbite +ordonner +oreille +organe +orgueil +orifice +ornement +orque +ortie +osciller +osmose +ossature +otarie +ouragan +ourson +outil +outrager +ouvrage +ovation +oxyde +oxygène +ozone +paisible +palace +palmarès +palourde +palper +panache +panda +pangolin +paniquer +panneau +panorama +pantalon +papaye +papier +papoter +papyrus +paradoxe +parcelle +paresse +parfumer +parler +parole +parrain +parsemer +partager +parure +parvenir +passion +pastèque +paternel +patience +patron +pavillon +pavoiser +payer +paysage +peigne +peintre +pelage +pélican +pelle +pelouse +peluche +pendule +pénétrer +pénible +pensif +pénurie +pépite +péplum +perdrix +perforer +période +permuter +perplexe +persil +perte +peser +pétale +petit +pétrir +peuple +pharaon +phobie +phoque +photon +phrase +physique +piano +pictural +pièce +pierre +pieuvre +pilote +pinceau +pipette +piquer +pirogue +piscine +piston +pivoter +pixel +pizza +placard +plafond +plaisir +planer +plaque +plastron +plateau +pleurer +plexus +pliage +plomb +plonger +pluie +plumage +pochette +poésie +poète +pointe +poirier +poisson +poivre +polaire +policier +pollen +polygone +pommade +pompier +ponctuel +pondérer +poney +portique +position +posséder +posture +potager +poteau +potion +pouce +poulain +poumon +pourpre +poussin +pouvoir +prairie +pratique +précieux +prédire +préfixe +prélude +prénom +présence +prétexte +prévoir +primitif +prince +prison +priver +problème +procéder +prodige +profond +progrès +proie +projeter +prologue +promener +propre +prospère +protéger +prouesse +proverbe +prudence +pruneau +psychose +public +puceron +puiser +pulpe +pulsar +punaise +punitif +pupitre +purifier +puzzle +pyramide +quasar +querelle +question +quiétude +quitter +quotient +racine +raconter +radieux +ragondin +raideur +raisin +ralentir +rallonge +ramasser +rapide +rasage +ratisser +ravager +ravin +rayonner +réactif +réagir +réaliser +réanimer +recevoir +réciter +réclamer +récolter +recruter +reculer +recycler +rédiger +redouter +refaire +réflexe +réformer +refrain +refuge +régalien +région +réglage +régulier +réitérer +rejeter +rejouer +relatif +relever +relief +remarque +remède +remise +remonter +remplir +remuer +renard +renfort +renifler +renoncer +rentrer +renvoi +replier +reporter +reprise +reptile +requin +réserve +résineux +résoudre +respect +rester +résultat +rétablir +retenir +réticule +retomber +retracer +réunion +réussir +revanche +revivre +révolte +révulsif +richesse +rideau +rieur +rigide +rigoler +rincer +riposter +risible +risque +rituel +rival +rivière +rocheux +romance +rompre +ronce +rondin +roseau +rosier +rotatif +rotor +rotule +rouge +rouille +rouleau +routine +royaume +ruban +rubis +ruche +ruelle +rugueux +ruiner +ruisseau +ruser +rustique +rythme +sabler +saboter +sabre +sacoche +safari +sagesse +saisir +salade +salive +salon +saluer +samedi +sanction +sanglier +sarcasme +sardine +saturer +saugrenu +saumon +sauter +sauvage +savant +savonner +scalpel +scandale +scélérat +scénario +sceptre +schéma +science +scinder +score +scrutin +sculpter +séance +sécable +sécher +secouer +sécréter +sédatif +séduire +seigneur +séjour +sélectif +semaine +sembler +semence +séminal +sénateur +sensible +sentence +séparer +séquence +serein +sergent +sérieux +serrure +sérum +service +sésame +sévir +sevrage +sextuple +sidéral +siècle +siéger +siffler +sigle +signal +silence +silicium +simple +sincère +sinistre +siphon +sirop +sismique +situer +skier +social +socle +sodium +soigneux +soldat +soleil +solitude +soluble +sombre +sommeil +somnoler +sonde +songeur +sonnette +sonore +sorcier +sortir +sosie +sottise +soucieux +soudure +souffle +soulever +soupape +source +soutirer +souvenir +spacieux +spatial +spécial +sphère +spiral +stable +station +sternum +stimulus +stipuler +strict +studieux +stupeur +styliste +sublime +substrat +subtil +subvenir +succès +sucre +suffixe +suggérer +suiveur +sulfate +superbe +supplier +surface +suricate +surmener +surprise +sursaut +survie +suspect +syllabe +symbole +symétrie +synapse +syntaxe +système +tabac +tablier +tactile +tailler +talent +talisman +talonner +tambour +tamiser +tangible +tapis +taquiner +tarder +tarif +tartine +tasse +tatami +tatouage +taupe +taureau +taxer +témoin +temporel +tenaille +tendre +teneur +tenir +tension +terminer +terne +terrible +tétine +texte +thème +théorie +thérapie +thorax +tibia +tiède +timide +tirelire +tiroir +tissu +titane +titre +tituber +toboggan +tolérant +tomate +tonique +tonneau +toponyme +torche +tordre +tornade +torpille +torrent +torse +tortue +totem +toucher +tournage +tousser +toxine +traction +trafic +tragique +trahir +train +trancher +travail +trèfle +tremper +trésor +treuil +triage +tribunal +tricoter +trilogie +triomphe +tripler +triturer +trivial +trombone +tronc +tropical +troupeau +tuile +tulipe +tumulte +tunnel +turbine +tuteur +tutoyer +tuyau +tympan +typhon +typique +tyran +ubuesque +ultime +ultrason +unanime +unifier +union +unique +unitaire +univers +uranium +urbain +urticant +usage +usine +usuel +usure +utile +utopie +vacarme +vaccin +vagabond +vague +vaillant +vaincre +vaisseau +valable +valise +vallon +valve +vampire +vanille +vapeur +varier +vaseux +vassal +vaste +vecteur +vedette +végétal +véhicule +veinard +véloce +vendredi +vénérer +venger +venimeux +ventouse +verdure +vérin +vernir +verrou +verser +vertu +veston +vétéran +vétuste +vexant +vexer +viaduc +viande +victoire +vidange +vidéo +vignette +vigueur +vilain +village +vinaigre +violon +vipère +virement +virtuose +virus +visage +viseur +vision +visqueux +visuel +vital +vitesse +viticole +vitrine +vivace +vivipare +vocation +voguer +voile +voisin +voiture +volaille +volcan +voltiger +volume +vorace +vortex +voter +vouloir +voyage +voyelle +wagon +xénon +yacht +zèbre +zénith +zeste +zoologie +` diff --git a/third_party/go-bip39/wordlists/italian.go b/third_party/go-bip39/wordlists/italian.go new file mode 100644 index 0000000000..32cd2cf6d8 --- /dev/null +++ b/third_party/go-bip39/wordlists/italian.go @@ -0,0 +1,2071 @@ +package wordlists + +import ( + "fmt" + "hash/crc32" + "strings" +) + +func init() { + // Ensure word list is correct + // $ wget https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/italian.txt + // $ crc32 italian.txt + // 2fc7d07e + checksum := crc32.ChecksumIEEE([]byte(italian)) + if fmt.Sprintf("%x", checksum) != "2fc7d07e" { + panic("italian checksum invalid") + } +} + +// Italian is a slice of mnemonic words taken from the bip39 specification +// https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/italian.txt +var Italian = strings.Split(strings.TrimSpace(italian), "\n") +var italian = `abaco +abbaglio +abbinato +abete +abisso +abolire +abrasivo +abrogato +accadere +accenno +accusato +acetone +achille +acido +acqua +acre +acrilico +acrobata +acuto +adagio +addebito +addome +adeguato +aderire +adipe +adottare +adulare +affabile +affetto +affisso +affranto +aforisma +afoso +africano +agave +agente +agevole +aggancio +agire +agitare +agonismo +agricolo +agrumeto +aguzzo +alabarda +alato +albatro +alberato +albo +albume +alce +alcolico +alettone +alfa +algebra +aliante +alibi +alimento +allagato +allegro +allievo +allodola +allusivo +almeno +alogeno +alpaca +alpestre +altalena +alterno +alticcio +altrove +alunno +alveolo +alzare +amalgama +amanita +amarena +ambito +ambrato +ameba +america +ametista +amico +ammasso +ammenda +ammirare +ammonito +amore +ampio +ampliare +amuleto +anacardo +anagrafe +analista +anarchia +anatra +anca +ancella +ancora +andare +andrea +anello +angelo +angolare +angusto +anima +annegare +annidato +anno +annuncio +anonimo +anticipo +anzi +apatico +apertura +apode +apparire +appetito +appoggio +approdo +appunto +aprile +arabica +arachide +aragosta +araldica +arancio +aratura +arazzo +arbitro +archivio +ardito +arenile +argento +argine +arguto +aria +armonia +arnese +arredato +arringa +arrosto +arsenico +arso +artefice +arzillo +asciutto +ascolto +asepsi +asettico +asfalto +asino +asola +aspirato +aspro +assaggio +asse +assoluto +assurdo +asta +astenuto +astice +astratto +atavico +ateismo +atomico +atono +attesa +attivare +attorno +attrito +attuale +ausilio +austria +autista +autonomo +autunno +avanzato +avere +avvenire +avviso +avvolgere +azione +azoto +azzimo +azzurro +babele +baccano +bacino +baco +badessa +badilata +bagnato +baita +balcone +baldo +balena +ballata +balzano +bambino +bandire +baraonda +barbaro +barca +baritono +barlume +barocco +basilico +basso +batosta +battuto +baule +bava +bavosa +becco +beffa +belgio +belva +benda +benevole +benigno +benzina +bere +berlina +beta +bibita +bici +bidone +bifido +biga +bilancia +bimbo +binocolo +biologo +bipede +bipolare +birbante +birra +biscotto +bisesto +bisnonno +bisonte +bisturi +bizzarro +blando +blatta +bollito +bonifico +bordo +bosco +botanico +bottino +bozzolo +braccio +bradipo +brama +branca +bravura +bretella +brevetto +brezza +briglia +brillante +brindare +broccolo +brodo +bronzina +brullo +bruno +bubbone +buca +budino +buffone +buio +bulbo +buono +burlone +burrasca +bussola +busta +cadetto +caduco +calamaro +calcolo +calesse +calibro +calmo +caloria +cambusa +camerata +camicia +cammino +camola +campale +canapa +candela +cane +canino +canotto +cantina +capace +capello +capitolo +capogiro +cappero +capra +capsula +carapace +carcassa +cardo +carisma +carovana +carretto +cartolina +casaccio +cascata +caserma +caso +cassone +castello +casuale +catasta +catena +catrame +cauto +cavillo +cedibile +cedrata +cefalo +celebre +cellulare +cena +cenone +centesimo +ceramica +cercare +certo +cerume +cervello +cesoia +cespo +ceto +chela +chiaro +chicca +chiedere +chimera +china +chirurgo +chitarra +ciao +ciclismo +cifrare +cigno +cilindro +ciottolo +circa +cirrosi +citrico +cittadino +ciuffo +civetta +civile +classico +clinica +cloro +cocco +codardo +codice +coerente +cognome +collare +colmato +colore +colposo +coltivato +colza +coma +cometa +commando +comodo +computer +comune +conciso +condurre +conferma +congelare +coniuge +connesso +conoscere +consumo +continuo +convegno +coperto +copione +coppia +copricapo +corazza +cordata +coricato +cornice +corolla +corpo +corredo +corsia +cortese +cosmico +costante +cottura +covato +cratere +cravatta +creato +credere +cremoso +crescita +creta +criceto +crinale +crisi +critico +croce +cronaca +crostata +cruciale +crusca +cucire +cuculo +cugino +cullato +cupola +curatore +cursore +curvo +cuscino +custode +dado +daino +dalmata +damerino +daniela +dannoso +danzare +datato +davanti +davvero +debutto +decennio +deciso +declino +decollo +decreto +dedicato +definito +deforme +degno +delegare +delfino +delirio +delta +demenza +denotato +dentro +deposito +derapata +derivare +deroga +descritto +deserto +desiderio +desumere +detersivo +devoto +diametro +dicembre +diedro +difeso +diffuso +digerire +digitale +diluvio +dinamico +dinnanzi +dipinto +diploma +dipolo +diradare +dire +dirotto +dirupo +disagio +discreto +disfare +disgelo +disposto +distanza +disumano +dito +divano +divelto +dividere +divorato +doblone +docente +doganale +dogma +dolce +domato +domenica +dominare +dondolo +dono +dormire +dote +dottore +dovuto +dozzina +drago +druido +dubbio +dubitare +ducale +duna +duomo +duplice +duraturo +ebano +eccesso +ecco +eclissi +economia +edera +edicola +edile +editoria +educare +egemonia +egli +egoismo +egregio +elaborato +elargire +elegante +elencato +eletto +elevare +elfico +elica +elmo +elsa +eluso +emanato +emblema +emesso +emiro +emotivo +emozione +empirico +emulo +endemico +enduro +energia +enfasi +enoteca +entrare +enzima +epatite +epilogo +episodio +epocale +eppure +equatore +erario +erba +erboso +erede +eremita +erigere +ermetico +eroe +erosivo +errante +esagono +esame +esanime +esaudire +esca +esempio +esercito +esibito +esigente +esistere +esito +esofago +esortato +esoso +espanso +espresso +essenza +esso +esteso +estimare +estonia +estroso +esultare +etilico +etnico +etrusco +etto +euclideo +europa +evaso +evidenza +evitato +evoluto +evviva +fabbrica +faccenda +fachiro +falco +famiglia +fanale +fanfara +fango +fantasma +fare +farfalla +farinoso +farmaco +fascia +fastoso +fasullo +faticare +fato +favoloso +febbre +fecola +fede +fegato +felpa +feltro +femmina +fendere +fenomeno +fermento +ferro +fertile +fessura +festivo +fetta +feudo +fiaba +fiducia +fifa +figurato +filo +finanza +finestra +finire +fiore +fiscale +fisico +fiume +flacone +flamenco +flebo +flemma +florido +fluente +fluoro +fobico +focaccia +focoso +foderato +foglio +folata +folclore +folgore +fondente +fonetico +fonia +fontana +forbito +forchetta +foresta +formica +fornaio +foro +fortezza +forzare +fosfato +fosso +fracasso +frana +frassino +fratello +freccetta +frenata +fresco +frigo +frollino +fronde +frugale +frutta +fucilata +fucsia +fuggente +fulmine +fulvo +fumante +fumetto +fumoso +fune +funzione +fuoco +furbo +furgone +furore +fuso +futile +gabbiano +gaffe +galateo +gallina +galoppo +gambero +gamma +garanzia +garbo +garofano +garzone +gasdotto +gasolio +gastrico +gatto +gaudio +gazebo +gazzella +geco +gelatina +gelso +gemello +gemmato +gene +genitore +gennaio +genotipo +gergo +ghepardo +ghiaccio +ghisa +giallo +gilda +ginepro +giocare +gioiello +giorno +giove +girato +girone +gittata +giudizio +giurato +giusto +globulo +glutine +gnomo +gobba +golf +gomito +gommone +gonfio +gonna +governo +gracile +grado +grafico +grammo +grande +grattare +gravoso +grazia +greca +gregge +grifone +grigio +grinza +grotta +gruppo +guadagno +guaio +guanto +guardare +gufo +guidare +ibernato +icona +identico +idillio +idolo +idra +idrico +idrogeno +igiene +ignaro +ignorato +ilare +illeso +illogico +illudere +imballo +imbevuto +imbocco +imbuto +immane +immerso +immolato +impacco +impeto +impiego +importo +impronta +inalare +inarcare +inattivo +incanto +incendio +inchino +incisivo +incluso +incontro +incrocio +incubo +indagine +india +indole +inedito +infatti +infilare +inflitto +ingaggio +ingegno +inglese +ingordo +ingrosso +innesco +inodore +inoltrare +inondato +insano +insetto +insieme +insonnia +insulina +intasato +intero +intonaco +intuito +inumidire +invalido +invece +invito +iperbole +ipnotico +ipotesi +ippica +iride +irlanda +ironico +irrigato +irrorare +isolato +isotopo +isterico +istituto +istrice +italia +iterare +labbro +labirinto +lacca +lacerato +lacrima +lacuna +laddove +lago +lampo +lancetta +lanterna +lardoso +larga +laringe +lastra +latenza +latino +lattuga +lavagna +lavoro +legale +leggero +lembo +lentezza +lenza +leone +lepre +lesivo +lessato +lesto +letterale +leva +levigato +libero +lido +lievito +lilla +limatura +limitare +limpido +lineare +lingua +liquido +lira +lirica +lisca +lite +litigio +livrea +locanda +lode +logica +lombare +londra +longevo +loquace +lorenzo +loto +lotteria +luce +lucidato +lumaca +luminoso +lungo +lupo +luppolo +lusinga +lusso +lutto +macabro +macchina +macero +macinato +madama +magico +maglia +magnete +magro +maiolica +malafede +malgrado +malinteso +malsano +malto +malumore +mana +mancia +mandorla +mangiare +manifesto +mannaro +manovra +mansarda +mantide +manubrio +mappa +maratona +marcire +maretta +marmo +marsupio +maschera +massaia +mastino +materasso +matricola +mattone +maturo +mazurca +meandro +meccanico +mecenate +medesimo +meditare +mega +melassa +melis +melodia +meninge +meno +mensola +mercurio +merenda +merlo +meschino +mese +messere +mestolo +metallo +metodo +mettere +miagolare +mica +micelio +michele +microbo +midollo +miele +migliore +milano +milite +mimosa +minerale +mini +minore +mirino +mirtillo +miscela +missiva +misto +misurare +mitezza +mitigare +mitra +mittente +mnemonico +modello +modifica +modulo +mogano +mogio +mole +molosso +monastero +monco +mondina +monetario +monile +monotono +monsone +montato +monviso +mora +mordere +morsicato +mostro +motivato +motosega +motto +movenza +movimento +mozzo +mucca +mucosa +muffa +mughetto +mugnaio +mulatto +mulinello +multiplo +mummia +munto +muovere +murale +musa +muscolo +musica +mutevole +muto +nababbo +nafta +nanometro +narciso +narice +narrato +nascere +nastrare +naturale +nautica +naviglio +nebulosa +necrosi +negativo +negozio +nemmeno +neofita +neretto +nervo +nessuno +nettuno +neutrale +neve +nevrotico +nicchia +ninfa +nitido +nobile +nocivo +nodo +nome +nomina +nordico +normale +norvegese +nostrano +notare +notizia +notturno +novella +nucleo +nulla +numero +nuovo +nutrire +nuvola +nuziale +oasi +obbedire +obbligo +obelisco +oblio +obolo +obsoleto +occasione +occhio +occidente +occorrere +occultare +ocra +oculato +odierno +odorare +offerta +offrire +offuscato +oggetto +oggi +ognuno +olandese +olfatto +oliato +oliva +ologramma +oltre +omaggio +ombelico +ombra +omega +omissione +ondoso +onere +onice +onnivoro +onorevole +onta +operato +opinione +opposto +oracolo +orafo +ordine +orecchino +orefice +orfano +organico +origine +orizzonte +orma +ormeggio +ornativo +orologio +orrendo +orribile +ortensia +ortica +orzata +orzo +osare +oscurare +osmosi +ospedale +ospite +ossa +ossidare +ostacolo +oste +otite +otre +ottagono +ottimo +ottobre +ovale +ovest +ovino +oviparo +ovocito +ovunque +ovviare +ozio +pacchetto +pace +pacifico +padella +padrone +paese +paga +pagina +palazzina +palesare +pallido +palo +palude +pandoro +pannello +paolo +paonazzo +paprica +parabola +parcella +parere +pargolo +pari +parlato +parola +partire +parvenza +parziale +passivo +pasticca +patacca +patologia +pattume +pavone +peccato +pedalare +pedonale +peggio +peloso +penare +pendice +penisola +pennuto +penombra +pensare +pentola +pepe +pepita +perbene +percorso +perdonato +perforare +pergamena +periodo +permesso +perno +perplesso +persuaso +pertugio +pervaso +pesatore +pesista +peso +pestifero +petalo +pettine +petulante +pezzo +piacere +pianta +piattino +piccino +picozza +piega +pietra +piffero +pigiama +pigolio +pigro +pila +pilifero +pillola +pilota +pimpante +pineta +pinna +pinolo +pioggia +piombo +piramide +piretico +pirite +pirolisi +pitone +pizzico +placebo +planare +plasma +platano +plenario +pochezza +poderoso +podismo +poesia +poggiare +polenta +poligono +pollice +polmonite +polpetta +polso +poltrona +polvere +pomice +pomodoro +ponte +popoloso +porfido +poroso +porpora +porre +portata +posa +positivo +possesso +postulato +potassio +potere +pranzo +prassi +pratica +precluso +predica +prefisso +pregiato +prelievo +premere +prenotare +preparato +presenza +pretesto +prevalso +prima +principe +privato +problema +procura +produrre +profumo +progetto +prolunga +promessa +pronome +proposta +proroga +proteso +prova +prudente +prugna +prurito +psiche +pubblico +pudica +pugilato +pugno +pulce +pulito +pulsante +puntare +pupazzo +pupilla +puro +quadro +qualcosa +quasi +querela +quota +raccolto +raddoppio +radicale +radunato +raffica +ragazzo +ragione +ragno +ramarro +ramingo +ramo +randagio +rantolare +rapato +rapina +rappreso +rasatura +raschiato +rasente +rassegna +rastrello +rata +ravveduto +reale +recepire +recinto +recluta +recondito +recupero +reddito +redimere +regalato +registro +regola +regresso +relazione +remare +remoto +renna +replica +reprimere +reputare +resa +residente +responso +restauro +rete +retina +retorica +rettifica +revocato +riassunto +ribadire +ribelle +ribrezzo +ricarica +ricco +ricevere +riciclato +ricordo +ricreduto +ridicolo +ridurre +rifasare +riflesso +riforma +rifugio +rigare +rigettato +righello +rilassato +rilevato +rimanere +rimbalzo +rimedio +rimorchio +rinascita +rincaro +rinforzo +rinnovo +rinomato +rinsavito +rintocco +rinuncia +rinvenire +riparato +ripetuto +ripieno +riportare +ripresa +ripulire +risata +rischio +riserva +risibile +riso +rispetto +ristoro +risultato +risvolto +ritardo +ritegno +ritmico +ritrovo +riunione +riva +riverso +rivincita +rivolto +rizoma +roba +robotico +robusto +roccia +roco +rodaggio +rodere +roditore +rogito +rollio +romantico +rompere +ronzio +rosolare +rospo +rotante +rotondo +rotula +rovescio +rubizzo +rubrica +ruga +rullino +rumine +rumoroso +ruolo +rupe +russare +rustico +sabato +sabbiare +sabotato +sagoma +salasso +saldatura +salgemma +salivare +salmone +salone +saltare +saluto +salvo +sapere +sapido +saporito +saraceno +sarcasmo +sarto +sassoso +satellite +satira +satollo +saturno +savana +savio +saziato +sbadiglio +sbalzo +sbancato +sbarra +sbattere +sbavare +sbendare +sbirciare +sbloccato +sbocciato +sbrinare +sbruffone +sbuffare +scabroso +scadenza +scala +scambiare +scandalo +scapola +scarso +scatenare +scavato +scelto +scenico +scettro +scheda +schiena +sciarpa +scienza +scindere +scippo +sciroppo +scivolo +sclerare +scodella +scolpito +scomparto +sconforto +scoprire +scorta +scossone +scozzese +scriba +scrollare +scrutinio +scuderia +scultore +scuola +scuro +scusare +sdebitare +sdoganare +seccatura +secondo +sedano +seggiola +segnalato +segregato +seguito +selciato +selettivo +sella +selvaggio +semaforo +sembrare +seme +seminato +sempre +senso +sentire +sepolto +sequenza +serata +serbato +sereno +serio +serpente +serraglio +servire +sestina +setola +settimana +sfacelo +sfaldare +sfamato +sfarzoso +sfaticato +sfera +sfida +sfilato +sfinge +sfocato +sfoderare +sfogo +sfoltire +sforzato +sfratto +sfruttato +sfuggito +sfumare +sfuso +sgabello +sgarbato +sgonfiare +sgorbio +sgrassato +sguardo +sibilo +siccome +sierra +sigla +signore +silenzio +sillaba +simbolo +simpatico +simulato +sinfonia +singolo +sinistro +sino +sintesi +sinusoide +sipario +sisma +sistole +situato +slitta +slogatura +sloveno +smarrito +smemorato +smentito +smeraldo +smilzo +smontare +smottato +smussato +snellire +snervato +snodo +sobbalzo +sobrio +soccorso +sociale +sodale +soffitto +sogno +soldato +solenne +solido +sollazzo +solo +solubile +solvente +somatico +somma +sonda +sonetto +sonnifero +sopire +soppeso +sopra +sorgere +sorpasso +sorriso +sorso +sorteggio +sorvolato +sospiro +sosta +sottile +spada +spalla +spargere +spatola +spavento +spazzola +specie +spedire +spegnere +spelatura +speranza +spessore +spettrale +spezzato +spia +spigoloso +spillato +spinoso +spirale +splendido +sportivo +sposo +spranga +sprecare +spronato +spruzzo +spuntino +squillo +sradicare +srotolato +stabile +stacco +staffa +stagnare +stampato +stantio +starnuto +stasera +statuto +stelo +steppa +sterzo +stiletto +stima +stirpe +stivale +stizzoso +stonato +storico +strappo +stregato +stridulo +strozzare +strutto +stuccare +stufo +stupendo +subentro +succoso +sudore +suggerito +sugo +sultano +suonare +superbo +supporto +surgelato +surrogato +sussurro +sutura +svagare +svedese +sveglio +svelare +svenuto +svezia +sviluppo +svista +svizzera +svolta +svuotare +tabacco +tabulato +tacciare +taciturno +tale +talismano +tampone +tannino +tara +tardivo +targato +tariffa +tarpare +tartaruga +tasto +tattico +taverna +tavolata +tazza +teca +tecnico +telefono +temerario +tempo +temuto +tendone +tenero +tensione +tentacolo +teorema +terme +terrazzo +terzetto +tesi +tesserato +testato +tetro +tettoia +tifare +tigella +timbro +tinto +tipico +tipografo +tiraggio +tiro +titanio +titolo +titubante +tizio +tizzone +toccare +tollerare +tolto +tombola +tomo +tonfo +tonsilla +topazio +topologia +toppa +torba +tornare +torrone +tortora +toscano +tossire +tostatura +totano +trabocco +trachea +trafila +tragedia +tralcio +tramonto +transito +trapano +trarre +trasloco +trattato +trave +treccia +tremolio +trespolo +tributo +tricheco +trifoglio +trillo +trincea +trio +tristezza +triturato +trivella +tromba +trono +troppo +trottola +trovare +truccato +tubatura +tuffato +tulipano +tumulto +tunisia +turbare +turchino +tuta +tutela +ubicato +uccello +uccisore +udire +uditivo +uffa +ufficio +uguale +ulisse +ultimato +umano +umile +umorismo +uncinetto +ungere +ungherese +unicorno +unificato +unisono +unitario +unte +uovo +upupa +uragano +urgenza +urlo +usanza +usato +uscito +usignolo +usuraio +utensile +utilizzo +utopia +vacante +vaccinato +vagabondo +vagliato +valanga +valgo +valico +valletta +valoroso +valutare +valvola +vampata +vangare +vanitoso +vano +vantaggio +vanvera +vapore +varano +varcato +variante +vasca +vedetta +vedova +veduto +vegetale +veicolo +velcro +velina +velluto +veloce +venato +vendemmia +vento +verace +verbale +vergogna +verifica +vero +verruca +verticale +vescica +vessillo +vestale +veterano +vetrina +vetusto +viandante +vibrante +vicenda +vichingo +vicinanza +vidimare +vigilia +vigneto +vigore +vile +villano +vimini +vincitore +viola +vipera +virgola +virologo +virulento +viscoso +visione +vispo +vissuto +visura +vita +vitello +vittima +vivanda +vivido +viziare +voce +voga +volatile +volere +volpe +voragine +vulcano +zampogna +zanna +zappato +zattera +zavorra +zefiro +zelante +zelo +zenzero +zerbino +zibetto +zinco +zircone +zitto +zolla +zotico +zucchero +zufolo +zulu +zuppa +` diff --git a/third_party/go-bip39/wordlists/japanese.go b/third_party/go-bip39/wordlists/japanese.go new file mode 100644 index 0000000000..23a76a3431 --- /dev/null +++ b/third_party/go-bip39/wordlists/japanese.go @@ -0,0 +1,2071 @@ +package wordlists + +import ( + "fmt" + "hash/crc32" + "strings" +) + +func init() { + // Ensure word list is correct + // $ wget https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/japanese.txt + // $ crc32 japanese.txt + // 0acc1419 + checksum := crc32.ChecksumIEEE([]byte(japanese)) + if fmt.Sprintf("%x", checksum) != "acc1419" { + panic(fmt.Sprintf("japanese checksum invalid: %x", checksum)) + } +} + +// Japanese is a slice of mnemonic words taken from the bip39 specification +// https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/japanese.txt +var Japanese = strings.Split(strings.TrimSpace(japanese), "\n") +var japanese = `あいこくしん +あいさつ +あいだ +あおぞら +あかちゃん +あきる +あけがた +あける +あこがれる +あさい +あさひ +あしあと +あじわう +あずかる +あずき +あそぶ +あたえる +あたためる +あたりまえ +あたる +あつい +あつかう +あっしゅく +あつまり +あつめる +あてな +あてはまる +あひる +あぶら +あぶる +あふれる +あまい +あまど +あまやかす +あまり +あみもの +あめりか +あやまる +あゆむ +あらいぐま +あらし +あらすじ +あらためる +あらゆる +あらわす +ありがとう +あわせる +あわてる +あんい +あんがい +あんこ +あんぜん +あんてい +あんない +あんまり +いいだす +いおん +いがい +いがく +いきおい +いきなり +いきもの +いきる +いくじ +いくぶん +いけばな +いけん +いこう +いこく +いこつ +いさましい +いさん +いしき +いじゅう +いじょう +いじわる +いずみ +いずれ +いせい +いせえび +いせかい +いせき +いぜん +いそうろう +いそがしい +いだい +いだく +いたずら +いたみ +いたりあ +いちおう +いちじ +いちど +いちば +いちぶ +いちりゅう +いつか +いっしゅん +いっせい +いっそう +いったん +いっち +いってい +いっぽう +いてざ +いてん +いどう +いとこ +いない +いなか +いねむり +いのち +いのる +いはつ +いばる +いはん +いびき +いひん +いふく +いへん +いほう +いみん +いもうと +いもたれ +いもり +いやがる +いやす +いよかん +いよく +いらい +いらすと +いりぐち +いりょう +いれい +いれもの +いれる +いろえんぴつ +いわい +いわう +いわかん +いわば +いわゆる +いんげんまめ +いんさつ +いんしょう +いんよう +うえき +うえる +うおざ +うがい +うかぶ +うかべる +うきわ +うくらいな +うくれれ +うけたまわる +うけつけ +うけとる +うけもつ +うける +うごかす +うごく +うこん +うさぎ +うしなう +うしろがみ +うすい +うすぎ +うすぐらい +うすめる +うせつ +うちあわせ +うちがわ +うちき +うちゅう +うっかり +うつくしい +うったえる +うつる +うどん +うなぎ +うなじ +うなずく +うなる +うねる +うのう +うぶげ +うぶごえ +うまれる +うめる +うもう +うやまう +うよく +うらがえす +うらぐち +うらない +うりあげ +うりきれ +うるさい +うれしい +うれゆき +うれる +うろこ +うわき +うわさ +うんこう +うんちん +うんてん +うんどう +えいえん +えいが +えいきょう +えいご +えいせい +えいぶん +えいよう +えいわ +えおり +えがお +えがく +えきたい +えくせる +えしゃく +えすて +えつらん +えのぐ +えほうまき +えほん +えまき +えもじ +えもの +えらい +えらぶ +えりあ +えんえん +えんかい +えんぎ +えんげき +えんしゅう +えんぜつ +えんそく +えんちょう +えんとつ +おいかける +おいこす +おいしい +おいつく +おうえん +おうさま +おうじ +おうせつ +おうたい +おうふく +おうべい +おうよう +おえる +おおい +おおう +おおどおり +おおや +おおよそ +おかえり +おかず +おがむ +おかわり +おぎなう +おきる +おくさま +おくじょう +おくりがな +おくる +おくれる +おこす +おこなう +おこる +おさえる +おさない +おさめる +おしいれ +おしえる +おじぎ +おじさん +おしゃれ +おそらく +おそわる +おたがい +おたく +おだやか +おちつく +おっと +おつり +おでかけ +おとしもの +おとなしい +おどり +おどろかす +おばさん +おまいり +おめでとう +おもいで +おもう +おもたい +おもちゃ +おやつ +おやゆび +およぼす +おらんだ +おろす +おんがく +おんけい +おんしゃ +おんせん +おんだん +おんちゅう +おんどけい +かあつ +かいが +がいき +がいけん +がいこう +かいさつ +かいしゃ +かいすいよく +かいぜん +かいぞうど +かいつう +かいてん +かいとう +かいふく +がいへき +かいほう +かいよう +がいらい +かいわ +かえる +かおり +かかえる +かがく +かがし +かがみ +かくご +かくとく +かざる +がぞう +かたい +かたち +がちょう +がっきゅう +がっこう +がっさん +がっしょう +かなざわし +かのう +がはく +かぶか +かほう +かほご +かまう +かまぼこ +かめれおん +かゆい +かようび +からい +かるい +かろう +かわく +かわら +がんか +かんけい +かんこう +かんしゃ +かんそう +かんたん +かんち +がんばる +きあい +きあつ +きいろ +ぎいん +きうい +きうん +きえる +きおう +きおく +きおち +きおん +きかい +きかく +きかんしゃ +ききて +きくばり +きくらげ +きけんせい +きこう +きこえる +きこく +きさい +きさく +きさま +きさらぎ +ぎじかがく +ぎしき +ぎじたいけん +ぎじにってい +ぎじゅつしゃ +きすう +きせい +きせき +きせつ +きそう +きぞく +きぞん +きたえる +きちょう +きつえん +ぎっちり +きつつき +きつね +きてい +きどう +きどく +きない +きなが +きなこ +きぬごし +きねん +きのう +きのした +きはく +きびしい +きひん +きふく +きぶん +きぼう +きほん +きまる +きみつ +きむずかしい +きめる +きもだめし +きもち +きもの +きゃく +きやく +ぎゅうにく +きよう +きょうりゅう +きらい +きらく +きりん +きれい +きれつ +きろく +ぎろん +きわめる +ぎんいろ +きんかくじ +きんじょ +きんようび +ぐあい +くいず +くうかん +くうき +くうぐん +くうこう +ぐうせい +くうそう +ぐうたら +くうふく +くうぼ +くかん +くきょう +くげん +ぐこう +くさい +くさき +くさばな +くさる +くしゃみ +くしょう +くすのき +くすりゆび +くせげ +くせん +ぐたいてき +くださる +くたびれる +くちこみ +くちさき +くつした +ぐっすり +くつろぐ +くとうてん +くどく +くなん +くねくね +くのう +くふう +くみあわせ +くみたてる +くめる +くやくしょ +くらす +くらべる +くるま +くれる +くろう +くわしい +ぐんかん +ぐんしょく +ぐんたい +ぐんて +けあな +けいかく +けいけん +けいこ +けいさつ +げいじゅつ +けいたい +げいのうじん +けいれき +けいろ +けおとす +けおりもの +げきか +げきげん +げきだん +げきちん +げきとつ +げきは +げきやく +げこう +げこくじょう +げざい +けさき +げざん +けしき +けしごむ +けしょう +げすと +けたば +けちゃっぷ +けちらす +けつあつ +けつい +けつえき +けっこん +けつじょ +けっせき +けってい +けつまつ +げつようび +げつれい +けつろん +げどく +けとばす +けとる +けなげ +けなす +けなみ +けぬき +げねつ +けねん +けはい +げひん +けぶかい +げぼく +けまり +けみかる +けむし +けむり +けもの +けらい +けろけろ +けわしい +けんい +けんえつ +けんお +けんか +げんき +けんげん +けんこう +けんさく +けんしゅう +けんすう +げんそう +けんちく +けんてい +けんとう +けんない +けんにん +げんぶつ +けんま +けんみん +けんめい +けんらん +けんり +こあくま +こいぬ +こいびと +ごうい +こうえん +こうおん +こうかん +ごうきゅう +ごうけい +こうこう +こうさい +こうじ +こうすい +ごうせい +こうそく +こうたい +こうちゃ +こうつう +こうてい +こうどう +こうない +こうはい +ごうほう +ごうまん +こうもく +こうりつ +こえる +こおり +ごかい +ごがつ +ごかん +こくご +こくさい +こくとう +こくない +こくはく +こぐま +こけい +こける +ここのか +こころ +こさめ +こしつ +こすう +こせい +こせき +こぜん +こそだて +こたい +こたえる +こたつ +こちょう +こっか +こつこつ +こつばん +こつぶ +こてい +こてん +ことがら +ことし +ことば +ことり +こなごな +こねこね +このまま +このみ +このよ +ごはん +こひつじ +こふう +こふん +こぼれる +ごまあぶら +こまかい +ごますり +こまつな +こまる +こむぎこ +こもじ +こもち +こもの +こもん +こやく +こやま +こゆう +こゆび +こよい +こよう +こりる +これくしょん +ころっけ +こわもて +こわれる +こんいん +こんかい +こんき +こんしゅう +こんすい +こんだて +こんとん +こんなん +こんびに +こんぽん +こんまけ +こんや +こんれい +こんわく +ざいえき +さいかい +さいきん +ざいげん +ざいこ +さいしょ +さいせい +ざいたく +ざいちゅう +さいてき +ざいりょう +さうな +さかいし +さがす +さかな +さかみち +さがる +さぎょう +さくし +さくひん +さくら +さこく +さこつ +さずかる +ざせき +さたん +さつえい +ざつおん +ざっか +ざつがく +さっきょく +ざっし +さつじん +ざっそう +さつたば +さつまいも +さてい +さといも +さとう +さとおや +さとし +さとる +さのう +さばく +さびしい +さべつ +さほう +さほど +さます +さみしい +さみだれ +さむけ +さめる +さやえんどう +さゆう +さよう +さよく +さらだ +ざるそば +さわやか +さわる +さんいん +さんか +さんきゃく +さんこう +さんさい +ざんしょ +さんすう +さんせい +さんそ +さんち +さんま +さんみ +さんらん +しあい +しあげ +しあさって +しあわせ +しいく +しいん +しうち +しえい +しおけ +しかい +しかく +じかん +しごと +しすう +じだい +したうけ +したぎ +したて +したみ +しちょう +しちりん +しっかり +しつじ +しつもん +してい +してき +してつ +じてん +じどう +しなぎれ +しなもの +しなん +しねま +しねん +しのぐ +しのぶ +しはい +しばかり +しはつ +しはらい +しはん +しひょう +しふく +じぶん +しへい +しほう +しほん +しまう +しまる +しみん +しむける +じむしょ +しめい +しめる +しもん +しゃいん +しゃうん +しゃおん +じゃがいも +しやくしょ +しゃくほう +しゃけん +しゃこ +しゃざい +しゃしん +しゃせん +しゃそう +しゃたい +しゃちょう +しゃっきん +じゃま +しゃりん +しゃれい +じゆう +じゅうしょ +しゅくはく +じゅしん +しゅっせき +しゅみ +しゅらば +じゅんばん +しょうかい +しょくたく +しょっけん +しょどう +しょもつ +しらせる +しらべる +しんか +しんこう +じんじゃ +しんせいじ +しんちく +しんりん +すあげ +すあし +すあな +ずあん +すいえい +すいか +すいとう +ずいぶん +すいようび +すうがく +すうじつ +すうせん +すおどり +すきま +すくう +すくない +すける +すごい +すこし +ずさん +すずしい +すすむ +すすめる +すっかり +ずっしり +ずっと +すてき +すてる +すねる +すのこ +すはだ +すばらしい +ずひょう +ずぶぬれ +すぶり +すふれ +すべて +すべる +ずほう +すぼん +すまい +すめし +すもう +すやき +すらすら +するめ +すれちがう +すろっと +すわる +すんぜん +すんぽう +せあぶら +せいかつ +せいげん +せいじ +せいよう +せおう +せかいかん +せきにん +せきむ +せきゆ +せきらんうん +せけん +せこう +せすじ +せたい +せたけ +せっかく +せっきゃく +ぜっく +せっけん +せっこつ +せっさたくま +せつぞく +せつだん +せつでん +せっぱん +せつび +せつぶん +せつめい +せつりつ +せなか +せのび +せはば +せびろ +せぼね +せまい +せまる +せめる +せもたれ +せりふ +ぜんあく +せんい +せんえい +せんか +せんきょ +せんく +せんげん +ぜんご +せんさい +せんしゅ +せんすい +せんせい +せんぞ +せんたく +せんちょう +せんてい +せんとう +せんぬき +せんねん +せんぱい +ぜんぶ +ぜんぽう +せんむ +せんめんじょ +せんもん +せんやく +せんゆう +せんよう +ぜんら +ぜんりゃく +せんれい +せんろ +そあく +そいとげる +そいね +そうがんきょう +そうき +そうご +そうしん +そうだん +そうなん +そうび +そうめん +そうり +そえもの +そえん +そがい +そげき +そこう +そこそこ +そざい +そしな +そせい +そせん +そそぐ +そだてる +そつう +そつえん +そっかん +そつぎょう +そっけつ +そっこう +そっせん +そっと +そとがわ +そとづら +そなえる +そなた +そふぼ +そぼく +そぼろ +そまつ +そまる +そむく +そむりえ +そめる +そもそも +そよかぜ +そらまめ +そろう +そんかい +そんけい +そんざい +そんしつ +そんぞく +そんちょう +ぞんび +ぞんぶん +そんみん +たあい +たいいん +たいうん +たいえき +たいおう +だいがく +たいき +たいぐう +たいけん +たいこ +たいざい +だいじょうぶ +だいすき +たいせつ +たいそう +だいたい +たいちょう +たいてい +だいどころ +たいない +たいねつ +たいのう +たいはん +だいひょう +たいふう +たいへん +たいほ +たいまつばな +たいみんぐ +たいむ +たいめん +たいやき +たいよう +たいら +たいりょく +たいる +たいわん +たうえ +たえる +たおす +たおる +たおれる +たかい +たかね +たきび +たくさん +たこく +たこやき +たさい +たしざん +だじゃれ +たすける +たずさわる +たそがれ +たたかう +たたく +ただしい +たたみ +たちばな +だっかい +だっきゃく +だっこ +だっしゅつ +だったい +たてる +たとえる +たなばた +たにん +たぬき +たのしみ +たはつ +たぶん +たべる +たぼう +たまご +たまる +だむる +ためいき +ためす +ためる +たもつ +たやすい +たよる +たらす +たりきほんがん +たりょう +たりる +たると +たれる +たれんと +たろっと +たわむれる +だんあつ +たんい +たんおん +たんか +たんき +たんけん +たんご +たんさん +たんじょうび +だんせい +たんそく +たんたい +だんち +たんてい +たんとう +だんな +たんにん +だんねつ +たんのう +たんぴん +だんぼう +たんまつ +たんめい +だんれつ +だんろ +だんわ +ちあい +ちあん +ちいき +ちいさい +ちえん +ちかい +ちから +ちきゅう +ちきん +ちけいず +ちけん +ちこく +ちさい +ちしき +ちしりょう +ちせい +ちそう +ちたい +ちたん +ちちおや +ちつじょ +ちてき +ちてん +ちぬき +ちぬり +ちのう +ちひょう +ちへいせん +ちほう +ちまた +ちみつ +ちみどろ +ちめいど +ちゃんこなべ +ちゅうい +ちゆりょく +ちょうし +ちょさくけん +ちらし +ちらみ +ちりがみ +ちりょう +ちるど +ちわわ +ちんたい +ちんもく +ついか +ついたち +つうか +つうじょう +つうはん +つうわ +つかう +つかれる +つくね +つくる +つけね +つける +つごう +つたえる +つづく +つつじ +つつむ +つとめる +つながる +つなみ +つねづね +つのる +つぶす +つまらない +つまる +つみき +つめたい +つもり +つもる +つよい +つるぼ +つるみく +つわもの +つわり +てあし +てあて +てあみ +ていおん +ていか +ていき +ていけい +ていこく +ていさつ +ていし +ていせい +ていたい +ていど +ていねい +ていひょう +ていへん +ていぼう +てうち +ておくれ +てきとう +てくび +でこぼこ +てさぎょう +てさげ +てすり +てそう +てちがい +てちょう +てつがく +てつづき +でっぱ +てつぼう +てつや +でぬかえ +てぬき +てぬぐい +てのひら +てはい +てぶくろ +てふだ +てほどき +てほん +てまえ +てまきずし +てみじか +てみやげ +てらす +てれび +てわけ +てわたし +でんあつ +てんいん +てんかい +てんき +てんぐ +てんけん +てんごく +てんさい +てんし +てんすう +でんち +てんてき +てんとう +てんない +てんぷら +てんぼうだい +てんめつ +てんらんかい +でんりょく +でんわ +どあい +といれ +どうかん +とうきゅう +どうぐ +とうし +とうむぎ +とおい +とおか +とおく +とおす +とおる +とかい +とかす +ときおり +ときどき +とくい +とくしゅう +とくてん +とくに +とくべつ +とけい +とける +とこや +とさか +としょかん +とそう +とたん +とちゅう +とっきゅう +とっくん +とつぜん +とつにゅう +とどける +ととのえる +とない +となえる +となり +とのさま +とばす +どぶがわ +とほう +とまる +とめる +ともだち +ともる +どようび +とらえる +とんかつ +どんぶり +ないかく +ないこう +ないしょ +ないす +ないせん +ないそう +なおす +ながい +なくす +なげる +なこうど +なさけ +なたでここ +なっとう +なつやすみ +ななおし +なにごと +なにもの +なにわ +なのか +なふだ +なまいき +なまえ +なまみ +なみだ +なめらか +なめる +なやむ +ならう +ならび +ならぶ +なれる +なわとび +なわばり +にあう +にいがた +にうけ +におい +にかい +にがて +にきび +にくしみ +にくまん +にげる +にさんかたんそ +にしき +にせもの +にちじょう +にちようび +にっか +にっき +にっけい +にっこう +にっさん +にっしょく +にっすう +にっせき +にってい +になう +にほん +にまめ +にもつ +にやり +にゅういん +にりんしゃ +にわとり +にんい +にんか +にんき +にんげん +にんしき +にんずう +にんそう +にんたい +にんち +にんてい +にんにく +にんぷ +にんまり +にんむ +にんめい +にんよう +ぬいくぎ +ぬかす +ぬぐいとる +ぬぐう +ぬくもり +ぬすむ +ぬまえび +ぬめり +ぬらす +ぬんちゃく +ねあげ +ねいき +ねいる +ねいろ +ねぐせ +ねくたい +ねくら +ねこぜ +ねこむ +ねさげ +ねすごす +ねそべる +ねだん +ねつい +ねっしん +ねつぞう +ねったいぎょ +ねぶそく +ねふだ +ねぼう +ねほりはほり +ねまき +ねまわし +ねみみ +ねむい +ねむたい +ねもと +ねらう +ねわざ +ねんいり +ねんおし +ねんかん +ねんきん +ねんぐ +ねんざ +ねんし +ねんちゃく +ねんど +ねんぴ +ねんぶつ +ねんまつ +ねんりょう +ねんれい +のいず +のおづま +のがす +のきなみ +のこぎり +のこす +のこる +のせる +のぞく +のぞむ +のたまう +のちほど +のっく +のばす +のはら +のべる +のぼる +のみもの +のやま +のらいぬ +のらねこ +のりもの +のりゆき +のれん +のんき +ばあい +はあく +ばあさん +ばいか +ばいく +はいけん +はいご +はいしん +はいすい +はいせん +はいそう +はいち +ばいばい +はいれつ +はえる +はおる +はかい +ばかり +はかる +はくしゅ +はけん +はこぶ +はさみ +はさん +はしご +ばしょ +はしる +はせる +ぱそこん +はそん +はたん +はちみつ +はつおん +はっかく +はづき +はっきり +はっくつ +はっけん +はっこう +はっさん +はっしん +はったつ +はっちゅう +はってん +はっぴょう +はっぽう +はなす +はなび +はにかむ +はぶらし +はみがき +はむかう +はめつ +はやい +はやし +はらう +はろうぃん +はわい +はんい +はんえい +はんおん +はんかく +はんきょう +ばんぐみ +はんこ +はんしゃ +はんすう +はんだん +ぱんち +ぱんつ +はんてい +はんとし +はんのう +はんぱ +はんぶん +はんぺん +はんぼうき +はんめい +はんらん +はんろん +ひいき +ひうん +ひえる +ひかく +ひかり +ひかる +ひかん +ひくい +ひけつ +ひこうき +ひこく +ひさい +ひさしぶり +ひさん +びじゅつかん +ひしょ +ひそか +ひそむ +ひたむき +ひだり +ひたる +ひつぎ +ひっこし +ひっし +ひつじゅひん +ひっす +ひつぜん +ぴったり +ぴっちり +ひつよう +ひてい +ひとごみ +ひなまつり +ひなん +ひねる +ひはん +ひびく +ひひょう +ひほう +ひまわり +ひまん +ひみつ +ひめい +ひめじし +ひやけ +ひやす +ひよう +びょうき +ひらがな +ひらく +ひりつ +ひりょう +ひるま +ひるやすみ +ひれい +ひろい +ひろう +ひろき +ひろゆき +ひんかく +ひんけつ +ひんこん +ひんしゅ +ひんそう +ぴんち +ひんぱん +びんぼう +ふあん +ふいうち +ふうけい +ふうせん +ぷうたろう +ふうとう +ふうふ +ふえる +ふおん +ふかい +ふきん +ふくざつ +ふくぶくろ +ふこう +ふさい +ふしぎ +ふじみ +ふすま +ふせい +ふせぐ +ふそく +ぶたにく +ふたん +ふちょう +ふつう +ふつか +ふっかつ +ふっき +ふっこく +ぶどう +ふとる +ふとん +ふのう +ふはい +ふひょう +ふへん +ふまん +ふみん +ふめつ +ふめん +ふよう +ふりこ +ふりる +ふるい +ふんいき +ぶんがく +ぶんぐ +ふんしつ +ぶんせき +ふんそう +ぶんぽう +へいあん +へいおん +へいがい +へいき +へいげん +へいこう +へいさ +へいしゃ +へいせつ +へいそ +へいたく +へいてん +へいねつ +へいわ +へきが +へこむ +べにいろ +べにしょうが +へらす +へんかん +べんきょう +べんごし +へんさい +へんたい +べんり +ほあん +ほいく +ぼうぎょ +ほうこく +ほうそう +ほうほう +ほうもん +ほうりつ +ほえる +ほおん +ほかん +ほきょう +ぼきん +ほくろ +ほけつ +ほけん +ほこう +ほこる +ほしい +ほしつ +ほしゅ +ほしょう +ほせい +ほそい +ほそく +ほたて +ほたる +ぽちぶくろ +ほっきょく +ほっさ +ほったん +ほとんど +ほめる +ほんい +ほんき +ほんけ +ほんしつ +ほんやく +まいにち +まかい +まかせる +まがる +まける +まこと +まさつ +まじめ +ますく +まぜる +まつり +まとめ +まなぶ +まぬけ +まねく +まほう +まもる +まゆげ +まよう +まろやか +まわす +まわり +まわる +まんが +まんきつ +まんぞく +まんなか +みいら +みうち +みえる +みがく +みかた +みかん +みけん +みこん +みじかい +みすい +みすえる +みせる +みっか +みつかる +みつける +みてい +みとめる +みなと +みなみかさい +みねらる +みのう +みのがす +みほん +みもと +みやげ +みらい +みりょく +みわく +みんか +みんぞく +むいか +むえき +むえん +むかい +むかう +むかえ +むかし +むぎちゃ +むける +むげん +むさぼる +むしあつい +むしば +むじゅん +むしろ +むすう +むすこ +むすぶ +むすめ +むせる +むせん +むちゅう +むなしい +むのう +むやみ +むよう +むらさき +むりょう +むろん +めいあん +めいうん +めいえん +めいかく +めいきょく +めいさい +めいし +めいそう +めいぶつ +めいれい +めいわく +めぐまれる +めざす +めした +めずらしい +めだつ +めまい +めやす +めんきょ +めんせき +めんどう +もうしあげる +もうどうけん +もえる +もくし +もくてき +もくようび +もちろん +もどる +もらう +もんく +もんだい +やおや +やける +やさい +やさしい +やすい +やすたろう +やすみ +やせる +やそう +やたい +やちん +やっと +やっぱり +やぶる +やめる +ややこしい +やよい +やわらかい +ゆうき +ゆうびんきょく +ゆうべ +ゆうめい +ゆけつ +ゆしゅつ +ゆせん +ゆそう +ゆたか +ゆちゃく +ゆでる +ゆにゅう +ゆびわ +ゆらい +ゆれる +ようい +ようか +ようきゅう +ようじ +ようす +ようちえん +よかぜ +よかん +よきん +よくせい +よくぼう +よけい +よごれる +よさん +よしゅう +よそう +よそく +よっか +よてい +よどがわく +よねつ +よやく +よゆう +よろこぶ +よろしい +らいう +らくがき +らくご +らくさつ +らくだ +らしんばん +らせん +らぞく +らたい +らっか +られつ +りえき +りかい +りきさく +りきせつ +りくぐん +りくつ +りけん +りこう +りせい +りそう +りそく +りてん +りねん +りゆう +りゅうがく +りよう +りょうり +りょかん +りょくちゃ +りょこう +りりく +りれき +りろん +りんご +るいけい +るいさい +るいじ +るいせき +るすばん +るりがわら +れいかん +れいぎ +れいせい +れいぞうこ +れいとう +れいぼう +れきし +れきだい +れんあい +れんけい +れんこん +れんさい +れんしゅう +れんぞく +れんらく +ろうか +ろうご +ろうじん +ろうそく +ろくが +ろこつ +ろじうら +ろしゅつ +ろせん +ろてん +ろめん +ろれつ +ろんぎ +ろんぱ +ろんぶん +ろんり +わかす +わかめ +わかやま +わかれる +わしつ +わじまし +わすれもの +わらう +われる +` diff --git a/third_party/go-bip39/wordlists/korean.go b/third_party/go-bip39/wordlists/korean.go new file mode 100644 index 0000000000..1d31775633 --- /dev/null +++ b/third_party/go-bip39/wordlists/korean.go @@ -0,0 +1,2071 @@ +package wordlists + +import ( + "fmt" + "hash/crc32" + "strings" +) + +func init() { + // Ensure word list is correct + // $ wget https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/korean.txt + // $ crc32 korean.txt + // 4ef461eb + checksum := crc32.ChecksumIEEE([]byte(korean)) + if fmt.Sprintf("%x", checksum) != "4ef461eb" { + panic("korean checksum invalid") + } +} + +// Korean is a slice of mnemonic words taken from the bip39 specification +// https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/korean.txt +var Korean = strings.Split(strings.TrimSpace(korean), "\n") +var korean = `가격 +가끔 +가난 +가능 +가득 +가르침 +가뭄 +가방 +가상 +가슴 +가운데 +가을 +가이드 +가입 +가장 +가정 +가족 +가죽 +각오 +각자 +간격 +간부 +간섭 +간장 +간접 +간판 +갈등 +갈비 +갈색 +갈증 +감각 +감기 +감소 +감수성 +감자 +감정 +갑자기 +강남 +강당 +강도 +강력히 +강변 +강북 +강사 +강수량 +강아지 +강원도 +강의 +강제 +강조 +같이 +개구리 +개나리 +개방 +개별 +개선 +개성 +개인 +객관적 +거실 +거액 +거울 +거짓 +거품 +걱정 +건강 +건물 +건설 +건조 +건축 +걸음 +검사 +검토 +게시판 +게임 +겨울 +견해 +결과 +결국 +결론 +결석 +결승 +결심 +결정 +결혼 +경계 +경고 +경기 +경력 +경복궁 +경비 +경상도 +경영 +경우 +경쟁 +경제 +경주 +경찰 +경치 +경향 +경험 +계곡 +계단 +계란 +계산 +계속 +계약 +계절 +계층 +계획 +고객 +고구려 +고궁 +고급 +고등학생 +고무신 +고민 +고양이 +고장 +고전 +고집 +고춧가루 +고통 +고향 +곡식 +골목 +골짜기 +골프 +공간 +공개 +공격 +공군 +공급 +공기 +공동 +공무원 +공부 +공사 +공식 +공업 +공연 +공원 +공장 +공짜 +공책 +공통 +공포 +공항 +공휴일 +과목 +과일 +과장 +과정 +과학 +관객 +관계 +관광 +관념 +관람 +관련 +관리 +관습 +관심 +관점 +관찰 +광경 +광고 +광장 +광주 +괴로움 +굉장히 +교과서 +교문 +교복 +교실 +교양 +교육 +교장 +교직 +교통 +교환 +교훈 +구경 +구름 +구멍 +구별 +구분 +구석 +구성 +구속 +구역 +구입 +구청 +구체적 +국가 +국기 +국내 +국립 +국물 +국민 +국수 +국어 +국왕 +국적 +국제 +국회 +군대 +군사 +군인 +궁극적 +권리 +권위 +권투 +귀국 +귀신 +규정 +규칙 +균형 +그날 +그냥 +그늘 +그러나 +그룹 +그릇 +그림 +그제서야 +그토록 +극복 +극히 +근거 +근교 +근래 +근로 +근무 +근본 +근원 +근육 +근처 +글씨 +글자 +금강산 +금고 +금년 +금메달 +금액 +금연 +금요일 +금지 +긍정적 +기간 +기관 +기념 +기능 +기독교 +기둥 +기록 +기름 +기법 +기본 +기분 +기쁨 +기숙사 +기술 +기억 +기업 +기온 +기운 +기원 +기적 +기준 +기침 +기혼 +기획 +긴급 +긴장 +길이 +김밥 +김치 +김포공항 +깍두기 +깜빡 +깨달음 +깨소금 +껍질 +꼭대기 +꽃잎 +나들이 +나란히 +나머지 +나물 +나침반 +나흘 +낙엽 +난방 +날개 +날씨 +날짜 +남녀 +남대문 +남매 +남산 +남자 +남편 +남학생 +낭비 +낱말 +내년 +내용 +내일 +냄비 +냄새 +냇물 +냉동 +냉면 +냉방 +냉장고 +넥타이 +넷째 +노동 +노란색 +노력 +노인 +녹음 +녹차 +녹화 +논리 +논문 +논쟁 +놀이 +농구 +농담 +농민 +농부 +농업 +농장 +농촌 +높이 +눈동자 +눈물 +눈썹 +뉴욕 +느낌 +늑대 +능동적 +능력 +다방 +다양성 +다음 +다이어트 +다행 +단계 +단골 +단독 +단맛 +단순 +단어 +단위 +단점 +단체 +단추 +단편 +단풍 +달걀 +달러 +달력 +달리 +닭고기 +담당 +담배 +담요 +담임 +답변 +답장 +당근 +당분간 +당연히 +당장 +대규모 +대낮 +대단히 +대답 +대도시 +대략 +대량 +대륙 +대문 +대부분 +대신 +대응 +대장 +대전 +대접 +대중 +대책 +대출 +대충 +대통령 +대학 +대한민국 +대합실 +대형 +덩어리 +데이트 +도대체 +도덕 +도둑 +도망 +도서관 +도심 +도움 +도입 +도자기 +도저히 +도전 +도중 +도착 +독감 +독립 +독서 +독일 +독창적 +동화책 +뒷모습 +뒷산 +딸아이 +마누라 +마늘 +마당 +마라톤 +마련 +마무리 +마사지 +마약 +마요네즈 +마을 +마음 +마이크 +마중 +마지막 +마찬가지 +마찰 +마흔 +막걸리 +막내 +막상 +만남 +만두 +만세 +만약 +만일 +만점 +만족 +만화 +많이 +말기 +말씀 +말투 +맘대로 +망원경 +매년 +매달 +매력 +매번 +매스컴 +매일 +매장 +맥주 +먹이 +먼저 +먼지 +멀리 +메일 +며느리 +며칠 +면담 +멸치 +명단 +명령 +명예 +명의 +명절 +명칭 +명함 +모금 +모니터 +모델 +모든 +모범 +모습 +모양 +모임 +모조리 +모집 +모퉁이 +목걸이 +목록 +목사 +목소리 +목숨 +목적 +목표 +몰래 +몸매 +몸무게 +몸살 +몸속 +몸짓 +몸통 +몹시 +무관심 +무궁화 +무더위 +무덤 +무릎 +무슨 +무엇 +무역 +무용 +무조건 +무지개 +무척 +문구 +문득 +문법 +문서 +문제 +문학 +문화 +물가 +물건 +물결 +물고기 +물론 +물리학 +물음 +물질 +물체 +미국 +미디어 +미사일 +미술 +미역 +미용실 +미움 +미인 +미팅 +미혼 +민간 +민족 +민주 +믿음 +밀가루 +밀리미터 +밑바닥 +바가지 +바구니 +바나나 +바늘 +바닥 +바닷가 +바람 +바이러스 +바탕 +박물관 +박사 +박수 +반대 +반드시 +반말 +반발 +반성 +반응 +반장 +반죽 +반지 +반찬 +받침 +발가락 +발걸음 +발견 +발달 +발레 +발목 +발바닥 +발생 +발음 +발자국 +발전 +발톱 +발표 +밤하늘 +밥그릇 +밥맛 +밥상 +밥솥 +방금 +방면 +방문 +방바닥 +방법 +방송 +방식 +방안 +방울 +방지 +방학 +방해 +방향 +배경 +배꼽 +배달 +배드민턴 +백두산 +백색 +백성 +백인 +백제 +백화점 +버릇 +버섯 +버튼 +번개 +번역 +번지 +번호 +벌금 +벌레 +벌써 +범위 +범인 +범죄 +법률 +법원 +법적 +법칙 +베이징 +벨트 +변경 +변동 +변명 +변신 +변호사 +변화 +별도 +별명 +별일 +병실 +병아리 +병원 +보관 +보너스 +보라색 +보람 +보름 +보상 +보안 +보자기 +보장 +보전 +보존 +보통 +보편적 +보험 +복도 +복사 +복숭아 +복습 +볶음 +본격적 +본래 +본부 +본사 +본성 +본인 +본질 +볼펜 +봉사 +봉지 +봉투 +부근 +부끄러움 +부담 +부동산 +부문 +부분 +부산 +부상 +부엌 +부인 +부작용 +부장 +부정 +부족 +부지런히 +부친 +부탁 +부품 +부회장 +북부 +북한 +분노 +분량 +분리 +분명 +분석 +분야 +분위기 +분필 +분홍색 +불고기 +불과 +불교 +불꽃 +불만 +불법 +불빛 +불안 +불이익 +불행 +브랜드 +비극 +비난 +비닐 +비둘기 +비디오 +비로소 +비만 +비명 +비밀 +비바람 +비빔밥 +비상 +비용 +비율 +비중 +비타민 +비판 +빌딩 +빗물 +빗방울 +빗줄기 +빛깔 +빨간색 +빨래 +빨리 +사건 +사계절 +사나이 +사냥 +사람 +사랑 +사립 +사모님 +사물 +사방 +사상 +사생활 +사설 +사슴 +사실 +사업 +사용 +사월 +사장 +사전 +사진 +사촌 +사춘기 +사탕 +사투리 +사흘 +산길 +산부인과 +산업 +산책 +살림 +살인 +살짝 +삼계탕 +삼국 +삼십 +삼월 +삼촌 +상관 +상금 +상대 +상류 +상반기 +상상 +상식 +상업 +상인 +상자 +상점 +상처 +상추 +상태 +상표 +상품 +상황 +새벽 +색깔 +색연필 +생각 +생명 +생물 +생방송 +생산 +생선 +생신 +생일 +생활 +서랍 +서른 +서명 +서민 +서비스 +서양 +서울 +서적 +서점 +서쪽 +서클 +석사 +석유 +선거 +선물 +선배 +선생 +선수 +선원 +선장 +선전 +선택 +선풍기 +설거지 +설날 +설렁탕 +설명 +설문 +설사 +설악산 +설치 +설탕 +섭씨 +성공 +성당 +성명 +성별 +성인 +성장 +성적 +성질 +성함 +세금 +세미나 +세상 +세월 +세종대왕 +세탁 +센터 +센티미터 +셋째 +소규모 +소극적 +소금 +소나기 +소년 +소득 +소망 +소문 +소설 +소속 +소아과 +소용 +소원 +소음 +소중히 +소지품 +소질 +소풍 +소형 +속담 +속도 +속옷 +손가락 +손길 +손녀 +손님 +손등 +손목 +손뼉 +손실 +손질 +손톱 +손해 +솔직히 +솜씨 +송아지 +송이 +송편 +쇠고기 +쇼핑 +수건 +수년 +수단 +수돗물 +수동적 +수면 +수명 +수박 +수상 +수석 +수술 +수시로 +수업 +수염 +수영 +수입 +수준 +수집 +수출 +수컷 +수필 +수학 +수험생 +수화기 +숙녀 +숙소 +숙제 +순간 +순서 +순수 +순식간 +순위 +숟가락 +술병 +술집 +숫자 +스님 +스물 +스스로 +스승 +스웨터 +스위치 +스케이트 +스튜디오 +스트레스 +스포츠 +슬쩍 +슬픔 +습관 +습기 +승객 +승리 +승부 +승용차 +승진 +시각 +시간 +시골 +시금치 +시나리오 +시댁 +시리즈 +시멘트 +시민 +시부모 +시선 +시설 +시스템 +시아버지 +시어머니 +시월 +시인 +시일 +시작 +시장 +시절 +시점 +시중 +시즌 +시집 +시청 +시합 +시험 +식구 +식기 +식당 +식량 +식료품 +식물 +식빵 +식사 +식생활 +식초 +식탁 +식품 +신고 +신규 +신념 +신문 +신발 +신비 +신사 +신세 +신용 +신제품 +신청 +신체 +신화 +실감 +실내 +실력 +실례 +실망 +실수 +실습 +실시 +실장 +실정 +실질적 +실천 +실체 +실컷 +실태 +실패 +실험 +실현 +심리 +심부름 +심사 +심장 +심정 +심판 +쌍둥이 +씨름 +씨앗 +아가씨 +아나운서 +아드님 +아들 +아쉬움 +아스팔트 +아시아 +아울러 +아저씨 +아줌마 +아직 +아침 +아파트 +아프리카 +아픔 +아홉 +아흔 +악기 +악몽 +악수 +안개 +안경 +안과 +안내 +안녕 +안동 +안방 +안부 +안주 +알루미늄 +알코올 +암시 +암컷 +압력 +앞날 +앞문 +애인 +애정 +액수 +앨범 +야간 +야단 +야옹 +약간 +약국 +약속 +약수 +약점 +약품 +약혼녀 +양념 +양력 +양말 +양배추 +양주 +양파 +어둠 +어려움 +어른 +어젯밤 +어쨌든 +어쩌다가 +어쩐지 +언니 +언덕 +언론 +언어 +얼굴 +얼른 +얼음 +얼핏 +엄마 +업무 +업종 +업체 +엉덩이 +엉망 +엉터리 +엊그제 +에너지 +에어컨 +엔진 +여건 +여고생 +여관 +여군 +여권 +여대생 +여덟 +여동생 +여든 +여론 +여름 +여섯 +여성 +여왕 +여인 +여전히 +여직원 +여학생 +여행 +역사 +역시 +역할 +연결 +연구 +연극 +연기 +연락 +연설 +연세 +연속 +연습 +연애 +연예인 +연인 +연장 +연주 +연출 +연필 +연합 +연휴 +열기 +열매 +열쇠 +열심히 +열정 +열차 +열흘 +염려 +엽서 +영국 +영남 +영상 +영양 +영역 +영웅 +영원히 +영하 +영향 +영혼 +영화 +옆구리 +옆방 +옆집 +예감 +예금 +예방 +예산 +예상 +예선 +예술 +예습 +예식장 +예약 +예전 +예절 +예정 +예컨대 +옛날 +오늘 +오락 +오랫동안 +오렌지 +오로지 +오른발 +오븐 +오십 +오염 +오월 +오전 +오직 +오징어 +오페라 +오피스텔 +오히려 +옥상 +옥수수 +온갖 +온라인 +온몸 +온종일 +온통 +올가을 +올림픽 +올해 +옷차림 +와이셔츠 +와인 +완성 +완전 +왕비 +왕자 +왜냐하면 +왠지 +외갓집 +외국 +외로움 +외삼촌 +외출 +외침 +외할머니 +왼발 +왼손 +왼쪽 +요금 +요일 +요즘 +요청 +용기 +용서 +용어 +우산 +우선 +우승 +우연히 +우정 +우체국 +우편 +운동 +운명 +운반 +운전 +운행 +울산 +울음 +움직임 +웃어른 +웃음 +워낙 +원고 +원래 +원서 +원숭이 +원인 +원장 +원피스 +월급 +월드컵 +월세 +월요일 +웨이터 +위반 +위법 +위성 +위원 +위험 +위협 +윗사람 +유난히 +유럽 +유명 +유물 +유산 +유적 +유치원 +유학 +유행 +유형 +육군 +육상 +육십 +육체 +은행 +음력 +음료 +음반 +음성 +음식 +음악 +음주 +의견 +의논 +의문 +의복 +의식 +의심 +의외로 +의욕 +의원 +의학 +이것 +이곳 +이념 +이놈 +이달 +이대로 +이동 +이렇게 +이력서 +이론적 +이름 +이민 +이발소 +이별 +이불 +이빨 +이상 +이성 +이슬 +이야기 +이용 +이웃 +이월 +이윽고 +이익 +이전 +이중 +이튿날 +이틀 +이혼 +인간 +인격 +인공 +인구 +인근 +인기 +인도 +인류 +인물 +인생 +인쇄 +인연 +인원 +인재 +인종 +인천 +인체 +인터넷 +인하 +인형 +일곱 +일기 +일단 +일대 +일등 +일반 +일본 +일부 +일상 +일생 +일손 +일요일 +일월 +일정 +일종 +일주일 +일찍 +일체 +일치 +일행 +일회용 +임금 +임무 +입대 +입력 +입맛 +입사 +입술 +입시 +입원 +입장 +입학 +자가용 +자격 +자극 +자동 +자랑 +자부심 +자식 +자신 +자연 +자원 +자율 +자전거 +자정 +자존심 +자판 +작가 +작년 +작성 +작업 +작용 +작은딸 +작품 +잔디 +잔뜩 +잔치 +잘못 +잠깐 +잠수함 +잠시 +잠옷 +잠자리 +잡지 +장관 +장군 +장기간 +장래 +장례 +장르 +장마 +장면 +장모 +장미 +장비 +장사 +장소 +장식 +장애인 +장인 +장점 +장차 +장학금 +재능 +재빨리 +재산 +재생 +재작년 +재정 +재채기 +재판 +재학 +재활용 +저것 +저고리 +저곳 +저녁 +저런 +저렇게 +저번 +저울 +저절로 +저축 +적극 +적당히 +적성 +적용 +적응 +전개 +전공 +전기 +전달 +전라도 +전망 +전문 +전반 +전부 +전세 +전시 +전용 +전자 +전쟁 +전주 +전철 +전체 +전통 +전혀 +전후 +절대 +절망 +절반 +절약 +절차 +점검 +점수 +점심 +점원 +점점 +점차 +접근 +접시 +접촉 +젓가락 +정거장 +정도 +정류장 +정리 +정말 +정면 +정문 +정반대 +정보 +정부 +정비 +정상 +정성 +정오 +정원 +정장 +정지 +정치 +정확히 +제공 +제과점 +제대로 +제목 +제발 +제법 +제삿날 +제안 +제일 +제작 +제주도 +제출 +제품 +제한 +조각 +조건 +조금 +조깅 +조명 +조미료 +조상 +조선 +조용히 +조절 +조정 +조직 +존댓말 +존재 +졸업 +졸음 +종교 +종로 +종류 +종소리 +종업원 +종종 +종합 +좌석 +죄인 +주관적 +주름 +주말 +주머니 +주먹 +주문 +주민 +주방 +주변 +주식 +주인 +주일 +주장 +주전자 +주택 +준비 +줄거리 +줄기 +줄무늬 +중간 +중계방송 +중국 +중년 +중단 +중독 +중반 +중부 +중세 +중소기업 +중순 +중앙 +중요 +중학교 +즉석 +즉시 +즐거움 +증가 +증거 +증권 +증상 +증세 +지각 +지갑 +지경 +지극히 +지금 +지급 +지능 +지름길 +지리산 +지방 +지붕 +지식 +지역 +지우개 +지원 +지적 +지점 +지진 +지출 +직선 +직업 +직원 +직장 +진급 +진동 +진로 +진료 +진리 +진짜 +진찰 +진출 +진통 +진행 +질문 +질병 +질서 +짐작 +집단 +집안 +집중 +짜증 +찌꺼기 +차남 +차라리 +차량 +차림 +차별 +차선 +차츰 +착각 +찬물 +찬성 +참가 +참기름 +참새 +참석 +참여 +참외 +참조 +찻잔 +창가 +창고 +창구 +창문 +창밖 +창작 +창조 +채널 +채점 +책가방 +책방 +책상 +책임 +챔피언 +처벌 +처음 +천국 +천둥 +천장 +천재 +천천히 +철도 +철저히 +철학 +첫날 +첫째 +청년 +청바지 +청소 +청춘 +체계 +체력 +체온 +체육 +체중 +체험 +초등학생 +초반 +초밥 +초상화 +초순 +초여름 +초원 +초저녁 +초점 +초청 +초콜릿 +촛불 +총각 +총리 +총장 +촬영 +최근 +최상 +최선 +최신 +최악 +최종 +추석 +추억 +추진 +추천 +추측 +축구 +축소 +축제 +축하 +출근 +출발 +출산 +출신 +출연 +출입 +출장 +출판 +충격 +충고 +충돌 +충분히 +충청도 +취업 +취직 +취향 +치약 +친구 +친척 +칠십 +칠월 +칠판 +침대 +침묵 +침실 +칫솔 +칭찬 +카메라 +카운터 +칼국수 +캐릭터 +캠퍼스 +캠페인 +커튼 +컨디션 +컬러 +컴퓨터 +코끼리 +코미디 +콘서트 +콜라 +콤플렉스 +콩나물 +쾌감 +쿠데타 +크림 +큰길 +큰딸 +큰소리 +큰아들 +큰어머니 +큰일 +큰절 +클래식 +클럽 +킬로 +타입 +타자기 +탁구 +탁자 +탄생 +태권도 +태양 +태풍 +택시 +탤런트 +터널 +터미널 +테니스 +테스트 +테이블 +텔레비전 +토론 +토마토 +토요일 +통계 +통과 +통로 +통신 +통역 +통일 +통장 +통제 +통증 +통합 +통화 +퇴근 +퇴원 +퇴직금 +튀김 +트럭 +특급 +특별 +특성 +특수 +특징 +특히 +튼튼히 +티셔츠 +파란색 +파일 +파출소 +판결 +판단 +판매 +판사 +팔십 +팔월 +팝송 +패션 +팩스 +팩시밀리 +팬티 +퍼센트 +페인트 +편견 +편의 +편지 +편히 +평가 +평균 +평생 +평소 +평양 +평일 +평화 +포스터 +포인트 +포장 +포함 +표면 +표정 +표준 +표현 +품목 +품질 +풍경 +풍속 +풍습 +프랑스 +프린터 +플라스틱 +피곤 +피망 +피아노 +필름 +필수 +필요 +필자 +필통 +핑계 +하느님 +하늘 +하드웨어 +하룻밤 +하반기 +하숙집 +하순 +하여튼 +하지만 +하천 +하품 +하필 +학과 +학교 +학급 +학기 +학년 +학력 +학번 +학부모 +학비 +학생 +학술 +학습 +학용품 +학원 +학위 +학자 +학점 +한계 +한글 +한꺼번에 +한낮 +한눈 +한동안 +한때 +한라산 +한마디 +한문 +한번 +한복 +한식 +한여름 +한쪽 +할머니 +할아버지 +할인 +함께 +함부로 +합격 +합리적 +항공 +항구 +항상 +항의 +해결 +해군 +해답 +해당 +해물 +해석 +해설 +해수욕장 +해안 +핵심 +핸드백 +햄버거 +햇볕 +햇살 +행동 +행복 +행사 +행운 +행위 +향기 +향상 +향수 +허락 +허용 +헬기 +현관 +현금 +현대 +현상 +현실 +현장 +현재 +현지 +혈액 +협력 +형부 +형사 +형수 +형식 +형제 +형태 +형편 +혜택 +호기심 +호남 +호랑이 +호박 +호텔 +호흡 +혹시 +홀로 +홈페이지 +홍보 +홍수 +홍차 +화면 +화분 +화살 +화요일 +화장 +화학 +확보 +확인 +확장 +확정 +환갑 +환경 +환영 +환율 +환자 +활기 +활동 +활발히 +활용 +활짝 +회견 +회관 +회복 +회색 +회원 +회장 +회전 +횟수 +횡단보도 +효율적 +후반 +후춧가루 +훈련 +훨씬 +휴식 +휴일 +흉내 +흐름 +흑백 +흑인 +흔적 +흔히 +흥미 +흥분 +희곡 +희망 +희생 +흰색 +힘껏 +` diff --git a/third_party/go-bip39/wordlists/spanish.go b/third_party/go-bip39/wordlists/spanish.go new file mode 100644 index 0000000000..ad76da976c --- /dev/null +++ b/third_party/go-bip39/wordlists/spanish.go @@ -0,0 +1,2071 @@ +package wordlists + +import ( + "fmt" + "hash/crc32" + "strings" +) + +func init() { + // Ensure word list is correct + // $ wget https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/spanish.txt + // $ crc32 spanish.txt + // 266e4f3d + checksum := crc32.ChecksumIEEE([]byte(spanish)) + if fmt.Sprintf("%x", checksum) != "266e4f3d" { + panic("spanish checksum invalid") + } +} + +// Spanish is a slice of mnemonic words taken from the bip39 specification +// https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/spanish.txt +var Spanish = strings.Split(strings.TrimSpace(spanish), "\n") +var spanish = `ábaco +abdomen +abeja +abierto +abogado +abono +aborto +abrazo +abrir +abuelo +abuso +acabar +academia +acceso +acción +aceite +acelga +acento +aceptar +ácido +aclarar +acné +acoger +acoso +activo +acto +actriz +actuar +acudir +acuerdo +acusar +adicto +admitir +adoptar +adorno +aduana +adulto +aéreo +afectar +afición +afinar +afirmar +ágil +agitar +agonía +agosto +agotar +agregar +agrio +agua +agudo +águila +aguja +ahogo +ahorro +aire +aislar +ajedrez +ajeno +ajuste +alacrán +alambre +alarma +alba +álbum +alcalde +aldea +alegre +alejar +alerta +aleta +alfiler +alga +algodón +aliado +aliento +alivio +alma +almeja +almíbar +altar +alteza +altivo +alto +altura +alumno +alzar +amable +amante +amapola +amargo +amasar +ámbar +ámbito +ameno +amigo +amistad +amor +amparo +amplio +ancho +anciano +ancla +andar +andén +anemia +ángulo +anillo +ánimo +anís +anotar +antena +antiguo +antojo +anual +anular +anuncio +añadir +añejo +año +apagar +aparato +apetito +apio +aplicar +apodo +aporte +apoyo +aprender +aprobar +apuesta +apuro +arado +araña +arar +árbitro +árbol +arbusto +archivo +arco +arder +ardilla +arduo +área +árido +aries +armonía +arnés +aroma +arpa +arpón +arreglo +arroz +arruga +arte +artista +asa +asado +asalto +ascenso +asegurar +aseo +asesor +asiento +asilo +asistir +asno +asombro +áspero +astilla +astro +astuto +asumir +asunto +atajo +ataque +atar +atento +ateo +ático +atleta +átomo +atraer +atroz +atún +audaz +audio +auge +aula +aumento +ausente +autor +aval +avance +avaro +ave +avellana +avena +avestruz +avión +aviso +ayer +ayuda +ayuno +azafrán +azar +azote +azúcar +azufre +azul +baba +babor +bache +bahía +baile +bajar +balanza +balcón +balde +bambú +banco +banda +baño +barba +barco +barniz +barro +báscula +bastón +basura +batalla +batería +batir +batuta +baúl +bazar +bebé +bebida +bello +besar +beso +bestia +bicho +bien +bingo +blanco +bloque +blusa +boa +bobina +bobo +boca +bocina +boda +bodega +boina +bola +bolero +bolsa +bomba +bondad +bonito +bono +bonsái +borde +borrar +bosque +bote +botín +bóveda +bozal +bravo +brazo +brecha +breve +brillo +brinco +brisa +broca +broma +bronce +brote +bruja +brusco +bruto +buceo +bucle +bueno +buey +bufanda +bufón +búho +buitre +bulto +burbuja +burla +burro +buscar +butaca +buzón +caballo +cabeza +cabina +cabra +cacao +cadáver +cadena +caer +café +caída +caimán +caja +cajón +cal +calamar +calcio +caldo +calidad +calle +calma +calor +calvo +cama +cambio +camello +camino +campo +cáncer +candil +canela +canguro +canica +canto +caña +cañón +caoba +caos +capaz +capitán +capote +captar +capucha +cara +carbón +cárcel +careta +carga +cariño +carne +carpeta +carro +carta +casa +casco +casero +caspa +castor +catorce +catre +caudal +causa +cazo +cebolla +ceder +cedro +celda +célebre +celoso +célula +cemento +ceniza +centro +cerca +cerdo +cereza +cero +cerrar +certeza +césped +cetro +chacal +chaleco +champú +chancla +chapa +charla +chico +chiste +chivo +choque +choza +chuleta +chupar +ciclón +ciego +cielo +cien +cierto +cifra +cigarro +cima +cinco +cine +cinta +ciprés +circo +ciruela +cisne +cita +ciudad +clamor +clan +claro +clase +clave +cliente +clima +clínica +cobre +cocción +cochino +cocina +coco +código +codo +cofre +coger +cohete +cojín +cojo +cola +colcha +colegio +colgar +colina +collar +colmo +columna +combate +comer +comida +cómodo +compra +conde +conejo +conga +conocer +consejo +contar +copa +copia +corazón +corbata +corcho +cordón +corona +correr +coser +cosmos +costa +cráneo +cráter +crear +crecer +creído +crema +cría +crimen +cripta +crisis +cromo +crónica +croqueta +crudo +cruz +cuadro +cuarto +cuatro +cubo +cubrir +cuchara +cuello +cuento +cuerda +cuesta +cueva +cuidar +culebra +culpa +culto +cumbre +cumplir +cuna +cuneta +cuota +cupón +cúpula +curar +curioso +curso +curva +cutis +dama +danza +dar +dardo +dátil +deber +débil +década +decir +dedo +defensa +definir +dejar +delfín +delgado +delito +demora +denso +dental +deporte +derecho +derrota +desayuno +deseo +desfile +desnudo +destino +desvío +detalle +detener +deuda +día +diablo +diadema +diamante +diana +diario +dibujo +dictar +diente +dieta +diez +difícil +digno +dilema +diluir +dinero +directo +dirigir +disco +diseño +disfraz +diva +divino +doble +doce +dolor +domingo +don +donar +dorado +dormir +dorso +dos +dosis +dragón +droga +ducha +duda +duelo +dueño +dulce +dúo +duque +durar +dureza +duro +ébano +ebrio +echar +eco +ecuador +edad +edición +edificio +editor +educar +efecto +eficaz +eje +ejemplo +elefante +elegir +elemento +elevar +elipse +élite +elixir +elogio +eludir +embudo +emitir +emoción +empate +empeño +empleo +empresa +enano +encargo +enchufe +encía +enemigo +enero +enfado +enfermo +engaño +enigma +enlace +enorme +enredo +ensayo +enseñar +entero +entrar +envase +envío +época +equipo +erizo +escala +escena +escolar +escribir +escudo +esencia +esfera +esfuerzo +espada +espejo +espía +esposa +espuma +esquí +estar +este +estilo +estufa +etapa +eterno +ética +etnia +evadir +evaluar +evento +evitar +exacto +examen +exceso +excusa +exento +exigir +exilio +existir +éxito +experto +explicar +exponer +extremo +fábrica +fábula +fachada +fácil +factor +faena +faja +falda +fallo +falso +faltar +fama +familia +famoso +faraón +farmacia +farol +farsa +fase +fatiga +fauna +favor +fax +febrero +fecha +feliz +feo +feria +feroz +fértil +fervor +festín +fiable +fianza +fiar +fibra +ficción +ficha +fideo +fiebre +fiel +fiera +fiesta +figura +fijar +fijo +fila +filete +filial +filtro +fin +finca +fingir +finito +firma +flaco +flauta +flecha +flor +flota +fluir +flujo +flúor +fobia +foca +fogata +fogón +folio +folleto +fondo +forma +forro +fortuna +forzar +fosa +foto +fracaso +frágil +franja +frase +fraude +freír +freno +fresa +frío +frito +fruta +fuego +fuente +fuerza +fuga +fumar +función +funda +furgón +furia +fusil +fútbol +futuro +gacela +gafas +gaita +gajo +gala +galería +gallo +gamba +ganar +gancho +ganga +ganso +garaje +garza +gasolina +gastar +gato +gavilán +gemelo +gemir +gen +género +genio +gente +geranio +gerente +germen +gesto +gigante +gimnasio +girar +giro +glaciar +globo +gloria +gol +golfo +goloso +golpe +goma +gordo +gorila +gorra +gota +goteo +gozar +grada +gráfico +grano +grasa +gratis +grave +grieta +grillo +gripe +gris +grito +grosor +grúa +grueso +grumo +grupo +guante +guapo +guardia +guerra +guía +guiño +guion +guiso +guitarra +gusano +gustar +haber +hábil +hablar +hacer +hacha +hada +hallar +hamaca +harina +haz +hazaña +hebilla +hebra +hecho +helado +helio +hembra +herir +hermano +héroe +hervir +hielo +hierro +hígado +higiene +hijo +himno +historia +hocico +hogar +hoguera +hoja +hombre +hongo +honor +honra +hora +hormiga +horno +hostil +hoyo +hueco +huelga +huerta +hueso +huevo +huida +huir +humano +húmedo +humilde +humo +hundir +huracán +hurto +icono +ideal +idioma +ídolo +iglesia +iglú +igual +ilegal +ilusión +imagen +imán +imitar +impar +imperio +imponer +impulso +incapaz +índice +inerte +infiel +informe +ingenio +inicio +inmenso +inmune +innato +insecto +instante +interés +íntimo +intuir +inútil +invierno +ira +iris +ironía +isla +islote +jabalí +jabón +jamón +jarabe +jardín +jarra +jaula +jazmín +jefe +jeringa +jinete +jornada +joroba +joven +joya +juerga +jueves +juez +jugador +jugo +juguete +juicio +junco +jungla +junio +juntar +júpiter +jurar +justo +juvenil +juzgar +kilo +koala +labio +lacio +lacra +lado +ladrón +lagarto +lágrima +laguna +laico +lamer +lámina +lámpara +lana +lancha +langosta +lanza +lápiz +largo +larva +lástima +lata +látex +latir +laurel +lavar +lazo +leal +lección +leche +lector +leer +legión +legumbre +lejano +lengua +lento +leña +león +leopardo +lesión +letal +letra +leve +leyenda +libertad +libro +licor +líder +lidiar +lienzo +liga +ligero +lima +límite +limón +limpio +lince +lindo +línea +lingote +lino +linterna +líquido +liso +lista +litera +litio +litro +llaga +llama +llanto +llave +llegar +llenar +llevar +llorar +llover +lluvia +lobo +loción +loco +locura +lógica +logro +lombriz +lomo +lonja +lote +lucha +lucir +lugar +lujo +luna +lunes +lupa +lustro +luto +luz +maceta +macho +madera +madre +maduro +maestro +mafia +magia +mago +maíz +maldad +maleta +malla +malo +mamá +mambo +mamut +manco +mando +manejar +manga +maniquí +manjar +mano +manso +manta +mañana +mapa +máquina +mar +marco +marea +marfil +margen +marido +mármol +marrón +martes +marzo +masa +máscara +masivo +matar +materia +matiz +matriz +máximo +mayor +mazorca +mecha +medalla +medio +médula +mejilla +mejor +melena +melón +memoria +menor +mensaje +mente +menú +mercado +merengue +mérito +mes +mesón +meta +meter +método +metro +mezcla +miedo +miel +miembro +miga +mil +milagro +militar +millón +mimo +mina +minero +mínimo +minuto +miope +mirar +misa +miseria +misil +mismo +mitad +mito +mochila +moción +moda +modelo +moho +mojar +molde +moler +molino +momento +momia +monarca +moneda +monja +monto +moño +morada +morder +moreno +morir +morro +morsa +mortal +mosca +mostrar +motivo +mover +móvil +mozo +mucho +mudar +mueble +muela +muerte +muestra +mugre +mujer +mula +muleta +multa +mundo +muñeca +mural +muro +músculo +museo +musgo +música +muslo +nácar +nación +nadar +naipe +naranja +nariz +narrar +nasal +natal +nativo +natural +náusea +naval +nave +navidad +necio +néctar +negar +negocio +negro +neón +nervio +neto +neutro +nevar +nevera +nicho +nido +niebla +nieto +niñez +niño +nítido +nivel +nobleza +noche +nómina +noria +norma +norte +nota +noticia +novato +novela +novio +nube +nuca +núcleo +nudillo +nudo +nuera +nueve +nuez +nulo +número +nutria +oasis +obeso +obispo +objeto +obra +obrero +observar +obtener +obvio +oca +ocaso +océano +ochenta +ocho +ocio +ocre +octavo +octubre +oculto +ocupar +ocurrir +odiar +odio +odisea +oeste +ofensa +oferta +oficio +ofrecer +ogro +oído +oír +ojo +ola +oleada +olfato +olivo +olla +olmo +olor +olvido +ombligo +onda +onza +opaco +opción +ópera +opinar +oponer +optar +óptica +opuesto +oración +orador +oral +órbita +orca +orden +oreja +órgano +orgía +orgullo +oriente +origen +orilla +oro +orquesta +oruga +osadía +oscuro +osezno +oso +ostra +otoño +otro +oveja +óvulo +óxido +oxígeno +oyente +ozono +pacto +padre +paella +página +pago +país +pájaro +palabra +palco +paleta +pálido +palma +paloma +palpar +pan +panal +pánico +pantera +pañuelo +papá +papel +papilla +paquete +parar +parcela +pared +parir +paro +párpado +parque +párrafo +parte +pasar +paseo +pasión +paso +pasta +pata +patio +patria +pausa +pauta +pavo +payaso +peatón +pecado +pecera +pecho +pedal +pedir +pegar +peine +pelar +peldaño +pelea +peligro +pellejo +pelo +peluca +pena +pensar +peñón +peón +peor +pepino +pequeño +pera +percha +perder +pereza +perfil +perico +perla +permiso +perro +persona +pesa +pesca +pésimo +pestaña +pétalo +petróleo +pez +pezuña +picar +pichón +pie +piedra +pierna +pieza +pijama +pilar +piloto +pimienta +pino +pintor +pinza +piña +piojo +pipa +pirata +pisar +piscina +piso +pista +pitón +pizca +placa +plan +plata +playa +plaza +pleito +pleno +plomo +pluma +plural +pobre +poco +poder +podio +poema +poesía +poeta +polen +policía +pollo +polvo +pomada +pomelo +pomo +pompa +poner +porción +portal +posada +poseer +posible +poste +potencia +potro +pozo +prado +precoz +pregunta +premio +prensa +preso +previo +primo +príncipe +prisión +privar +proa +probar +proceso +producto +proeza +profesor +programa +prole +promesa +pronto +propio +próximo +prueba +público +puchero +pudor +pueblo +puerta +puesto +pulga +pulir +pulmón +pulpo +pulso +puma +punto +puñal +puño +pupa +pupila +puré +quedar +queja +quemar +querer +queso +quieto +química +quince +quitar +rábano +rabia +rabo +ración +radical +raíz +rama +rampa +rancho +rango +rapaz +rápido +rapto +rasgo +raspa +rato +rayo +raza +razón +reacción +realidad +rebaño +rebote +recaer +receta +rechazo +recoger +recreo +recto +recurso +red +redondo +reducir +reflejo +reforma +refrán +refugio +regalo +regir +regla +regreso +rehén +reino +reír +reja +relato +relevo +relieve +relleno +reloj +remar +remedio +remo +rencor +rendir +renta +reparto +repetir +reposo +reptil +res +rescate +resina +respeto +resto +resumen +retiro +retorno +retrato +reunir +revés +revista +rey +rezar +rico +riego +rienda +riesgo +rifa +rígido +rigor +rincón +riñón +río +riqueza +risa +ritmo +rito +rizo +roble +roce +rociar +rodar +rodeo +rodilla +roer +rojizo +rojo +romero +romper +ron +ronco +ronda +ropa +ropero +rosa +rosca +rostro +rotar +rubí +rubor +rudo +rueda +rugir +ruido +ruina +ruleta +rulo +rumbo +rumor +ruptura +ruta +rutina +sábado +saber +sabio +sable +sacar +sagaz +sagrado +sala +saldo +salero +salir +salmón +salón +salsa +salto +salud +salvar +samba +sanción +sandía +sanear +sangre +sanidad +sano +santo +sapo +saque +sardina +sartén +sastre +satán +sauna +saxofón +sección +seco +secreto +secta +sed +seguir +seis +sello +selva +semana +semilla +senda +sensor +señal +señor +separar +sepia +sequía +ser +serie +sermón +servir +sesenta +sesión +seta +setenta +severo +sexo +sexto +sidra +siesta +siete +siglo +signo +sílaba +silbar +silencio +silla +símbolo +simio +sirena +sistema +sitio +situar +sobre +socio +sodio +sol +solapa +soldado +soledad +sólido +soltar +solución +sombra +sondeo +sonido +sonoro +sonrisa +sopa +soplar +soporte +sordo +sorpresa +sorteo +sostén +sótano +suave +subir +suceso +sudor +suegra +suelo +sueño +suerte +sufrir +sujeto +sultán +sumar +superar +suplir +suponer +supremo +sur +surco +sureño +surgir +susto +sutil +tabaco +tabique +tabla +tabú +taco +tacto +tajo +talar +talco +talento +talla +talón +tamaño +tambor +tango +tanque +tapa +tapete +tapia +tapón +taquilla +tarde +tarea +tarifa +tarjeta +tarot +tarro +tarta +tatuaje +tauro +taza +tazón +teatro +techo +tecla +técnica +tejado +tejer +tejido +tela +teléfono +tema +temor +templo +tenaz +tender +tener +tenis +tenso +teoría +terapia +terco +término +ternura +terror +tesis +tesoro +testigo +tetera +texto +tez +tibio +tiburón +tiempo +tienda +tierra +tieso +tigre +tijera +tilde +timbre +tímido +timo +tinta +tío +típico +tipo +tira +tirón +titán +títere +título +tiza +toalla +tobillo +tocar +tocino +todo +toga +toldo +tomar +tono +tonto +topar +tope +toque +tórax +torero +tormenta +torneo +toro +torpedo +torre +torso +tortuga +tos +tosco +toser +tóxico +trabajo +tractor +traer +tráfico +trago +traje +tramo +trance +trato +trauma +trazar +trébol +tregua +treinta +tren +trepar +tres +tribu +trigo +tripa +triste +triunfo +trofeo +trompa +tronco +tropa +trote +trozo +truco +trueno +trufa +tubería +tubo +tuerto +tumba +tumor +túnel +túnica +turbina +turismo +turno +tutor +ubicar +úlcera +umbral +unidad +unir +universo +uno +untar +uña +urbano +urbe +urgente +urna +usar +usuario +útil +utopía +uva +vaca +vacío +vacuna +vagar +vago +vaina +vajilla +vale +válido +valle +valor +válvula +vampiro +vara +variar +varón +vaso +vecino +vector +vehículo +veinte +vejez +vela +velero +veloz +vena +vencer +venda +veneno +vengar +venir +venta +venus +ver +verano +verbo +verde +vereda +verja +verso +verter +vía +viaje +vibrar +vicio +víctima +vida +vídeo +vidrio +viejo +viernes +vigor +vil +villa +vinagre +vino +viñedo +violín +viral +virgo +virtud +visor +víspera +vista +vitamina +viudo +vivaz +vivero +vivir +vivo +volcán +volumen +volver +voraz +votar +voto +voz +vuelo +vulgar +yacer +yate +yegua +yema +yerno +yeso +yodo +yoga +yogur +zafiro +zanja +zapato +zarza +zona +zorro +zumo +zurdo +` diff --git a/tools/interop/split-keys/BUILD.bazel b/tools/interop/split-keys/BUILD.bazel index ad25aad0c5..ed7298094a 100644 --- a/tools/interop/split-keys/BUILD.bazel +++ b/tools/interop/split-keys/BUILD.bazel @@ -8,11 +8,11 @@ go_library( visibility = ["//visibility:private"], deps = [ "//io/file:go_default_library", + "//third_party/go-bip39:go_default_library", "//validator/accounts/wallet:go_default_library", "//validator/keymanager:go_default_library", "//validator/keymanager/derived:go_default_library", "//validator/keymanager/local:go_default_library", - "@com_github_tyler_smith_go_bip39//:go_default_library", "@com_github_wealdtech_go_eth2_util//:go_default_library", ], ) diff --git a/validator/client/BUILD.bazel b/validator/client/BUILD.bazel index ea938b1ea6..3a26a17e2b 100644 --- a/validator/client/BUILD.bazel +++ b/validator/client/BUILD.bazel @@ -149,6 +149,7 @@ go_test( "//testing/require:go_default_library", "//testing/util:go_default_library", "//testing/validator-mock:go_default_library", + "//third_party/go-bip39:go_default_library", "//time/slots:go_default_library", "//validator/accounts/testing:go_default_library", "//validator/accounts/wallet:go_default_library", @@ -171,7 +172,6 @@ go_test( "@com_github_sirupsen_logrus//hooks/test:go_default_library", "@com_github_stretchr_testify//assert:go_default_library", "@com_github_stretchr_testify//require:go_default_library", - "@com_github_tyler_smith_go_bip39//:go_default_library", "@com_github_urfave_cli_v2//:go_default_library", "@com_github_wealdtech_go_eth2_util//:go_default_library", "@in_gopkg_d4l3k_messagediff_v1//:go_default_library", diff --git a/validator/keymanager/derived/BUILD.bazel b/validator/keymanager/derived/BUILD.bazel index d30b6d1ae3..6758a87486 100644 --- a/validator/keymanager/derived/BUILD.bazel +++ b/validator/keymanager/derived/BUILD.bazel @@ -20,14 +20,14 @@ go_library( "//crypto/rand:go_default_library", "//io/prompt:go_default_library", "//proto/prysm/v1alpha1/validator-client:go_default_library", + "//third_party/go-bip39:go_default_library", + "//third_party/go-bip39/wordlists:go_default_library", "//validator/accounts/iface:go_default_library", "//validator/keymanager:go_default_library", "//validator/keymanager/local:go_default_library", "@com_github_logrusorgru_aurora//:go_default_library", "@com_github_pkg_errors//:go_default_library", "@com_github_sirupsen_logrus//:go_default_library", - "@com_github_tyler_smith_go_bip39//:go_default_library", - "@com_github_tyler_smith_go_bip39//wordlists:go_default_library", "@com_github_wealdtech_go_eth2_util//:go_default_library", ], ) @@ -47,11 +47,11 @@ go_test( "//proto/prysm/v1alpha1/validator-client:go_default_library", "//testing/assert:go_default_library", "//testing/require:go_default_library", + "//third_party/go-bip39:go_default_library", + "//third_party/go-bip39/wordlists:go_default_library", "//validator/accounts/testing:go_default_library", "//validator/testing:go_default_library", "@com_github_pkg_errors//:go_default_library", - "@com_github_tyler_smith_go_bip39//:go_default_library", - "@com_github_tyler_smith_go_bip39//wordlists:go_default_library", "@com_github_wealdtech_go_eth2_util//:go_default_library", ], ) diff --git a/validator/rpc/BUILD.bazel b/validator/rpc/BUILD.bazel index 86df1841d2..5b3c14aeac 100644 --- a/validator/rpc/BUILD.bazel +++ b/validator/rpc/BUILD.bazel @@ -49,6 +49,8 @@ go_library( "//network/httputil:go_default_library", "//proto/prysm/v1alpha1:go_default_library", "//runtime/version:go_default_library", + "//third_party/go-bip39:go_default_library", + "//third_party/go-bip39/wordlists:go_default_library", "//validator/accounts:go_default_library", "//validator/accounts/petnames:go_default_library", "//validator/accounts/wallet:go_default_library", @@ -76,8 +78,6 @@ go_library( "@com_github_grpc_ecosystem_go_grpc_prometheus//:go_default_library", "@com_github_pkg_errors//:go_default_library", "@com_github_sirupsen_logrus//:go_default_library", - "@com_github_tyler_smith_go_bip39//:go_default_library", - "@com_github_tyler_smith_go_bip39//wordlists:go_default_library", "@com_github_wealdtech_go_eth2_wallet_encryptor_keystorev4//:go_default_library", "@io_opentelemetry_go_contrib_instrumentation_net_http_otelhttp//:go_default_library", "@org_golang_google_grpc//:go_default_library", @@ -124,6 +124,7 @@ go_test( "//testing/assert:go_default_library", "//testing/require:go_default_library", "//testing/validator-mock:go_default_library", + "//third_party/go-bip39:go_default_library", "//validator/accounts:go_default_library", "//validator/accounts/iface:go_default_library", "//validator/accounts/wallet:go_default_library", @@ -146,7 +147,6 @@ go_test( "@com_github_google_uuid//:go_default_library", "@com_github_pkg_errors//:go_default_library", "@com_github_sirupsen_logrus//hooks/test:go_default_library", - "@com_github_tyler_smith_go_bip39//:go_default_library", "@com_github_urfave_cli_v2//:go_default_library", "@com_github_wealdtech_go_eth2_wallet_encryptor_keystorev4//:go_default_library", "@org_golang_google_grpc//:go_default_library", From 4914882e97b764150e70df858ba597be8f0bcf70 Mon Sep 17 00:00:00 2001 From: terence <terence@prysmaticlabs.com> Date: Fri, 14 Nov 2025 18:18:56 -0500 Subject: [PATCH 101/103] Record gossip KZG batch verification durations (#16018) * Record gossip KZG batch verification durations * Add path --- beacon-chain/sync/batch_verifier.go | 6 ++++++ beacon-chain/verification/data_column.go | 2 +- beacon-chain/verification/metrics.go | 3 ++- changelog/terence_gossip-kzg-latency.md | 3 +++ 4 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 changelog/terence_gossip-kzg-latency.md diff --git a/beacon-chain/sync/batch_verifier.go b/beacon-chain/sync/batch_verifier.go index 0f1e61f35d..d6109daec1 100644 --- a/beacon-chain/sync/batch_verifier.go +++ b/beacon-chain/sync/batch_verifier.go @@ -5,6 +5,7 @@ import ( "time" "github.com/OffchainLabs/prysm/v7/beacon-chain/core/peerdas" + "github.com/OffchainLabs/prysm/v7/beacon-chain/verification" "github.com/OffchainLabs/prysm/v7/config/params" "github.com/OffchainLabs/prysm/v7/consensus-types/blocks" "github.com/OffchainLabs/prysm/v7/crypto/bls" @@ -183,11 +184,13 @@ func (s *Service) validateWithKzgBatchVerifier(ctx context.Context, dataColumns func (s *Service) validateUnbatchedColumnsKzg(ctx context.Context, columns []blocks.RODataColumn) (pubsub.ValidationResult, error) { _, span := trace.StartSpan(ctx, "sync.validateUnbatchedColumnsKzg") defer span.End() + start := time.Now() if err := peerdas.VerifyDataColumnsSidecarKZGProofs(columns); err != nil { err = errors.Wrap(err, "could not verify") tracing.AnnotateError(span, err) return pubsub.ValidationReject, err } + verification.DataColumnBatchKZGVerificationHistogram.WithLabelValues("fallback").Observe(float64(time.Since(start).Milliseconds())) return pubsub.ValidationAccept, nil } @@ -202,9 +205,12 @@ func verifyKzgBatch(kzgBatch []*kzgVerifier) { } var verificationErr error + start := time.Now() err := peerdas.VerifyDataColumnsSidecarKZGProofs(allDataColumns) if err != nil { verificationErr = errors.Wrap(err, "batch KZG verification failed") + } else { + verification.DataColumnBatchKZGVerificationHistogram.WithLabelValues("batch").Observe(float64(time.Since(start).Milliseconds())) } // Send the same result to all verifiers in the batch diff --git a/beacon-chain/verification/data_column.go b/beacon-chain/verification/data_column.go index b27b8df8c9..9d350b4fc0 100644 --- a/beacon-chain/verification/data_column.go +++ b/beacon-chain/verification/data_column.go @@ -473,7 +473,7 @@ func (dv *RODataColumnsVerifier) SidecarKzgProofVerified() (err error) { return columnErrBuilder(errors.Wrap(err, "verify data column commitment")) } - dataColumnBatchKZGVerificationHistogram.Observe(float64(time.Since(startTime).Milliseconds())) + DataColumnBatchKZGVerificationHistogram.WithLabelValues("direct").Observe(float64(time.Since(startTime).Milliseconds())) return nil } diff --git a/beacon-chain/verification/metrics.go b/beacon-chain/verification/metrics.go index 3083c2291a..c135723bc0 100644 --- a/beacon-chain/verification/metrics.go +++ b/beacon-chain/verification/metrics.go @@ -27,11 +27,12 @@ var ( Buckets: []float64{5, 10, 50, 100, 150, 250, 500, 1000, 2000}, }, ) - dataColumnBatchKZGVerificationHistogram = promauto.NewHistogram( + DataColumnBatchKZGVerificationHistogram = promauto.NewHistogramVec( prometheus.HistogramOpts{ Name: "beacon_kzg_verification_data_column_batch_milliseconds", Help: "Captures the time taken for batched data column kzg verification.", Buckets: []float64{5, 10, 50, 100, 150, 250, 500, 1000, 2000}, }, + []string{"path"}, ) ) diff --git a/changelog/terence_gossip-kzg-latency.md b/changelog/terence_gossip-kzg-latency.md new file mode 100644 index 0000000000..d643658724 --- /dev/null +++ b/changelog/terence_gossip-kzg-latency.md @@ -0,0 +1,3 @@ +### Added + +- Record data column gossip KZG batch verification latency in both the pooled worker and fallback paths so the `beacon_kzg_verification_data_column_batch_milliseconds` histogram reflects gossip traffic, annotated with `path` labels to distinguish the sources. From 21bb6f5258f18865f46f61403808cdd382536809 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Kapka?= <rkapka@wp.pl> Date: Mon, 17 Nov 2025 16:02:06 +0100 Subject: [PATCH 102/103] Remove validator cross client from e2e (#16025) --- changelog/radek_remove-cross-validator.md | 3 +++ testing/endtoend/component_handler_test.go | 12 ++++++++---- testing/endtoend/components/lighthouse_beacon.go | 4 ++++ testing/endtoend/components/lighthouse_validator.go | 4 ++++ testing/endtoend/endtoend_setup_test.go | 3 +-- testing/endtoend/mainnet_e2e_test.go | 2 +- 6 files changed, 21 insertions(+), 7 deletions(-) create mode 100644 changelog/radek_remove-cross-validator.md diff --git a/changelog/radek_remove-cross-validator.md b/changelog/radek_remove-cross-validator.md new file mode 100644 index 0000000000..78e9bc97fd --- /dev/null +++ b/changelog/radek_remove-cross-validator.md @@ -0,0 +1,3 @@ +### Removed + +- Remove validator cross-client from end-to-end tests. \ No newline at end of file diff --git a/testing/endtoend/component_handler_test.go b/testing/endtoend/component_handler_test.go index 48a67947f4..788d116135 100644 --- a/testing/endtoend/component_handler_test.go +++ b/testing/endtoend/component_handler_test.go @@ -261,11 +261,15 @@ func (c *componentHandler) printPIDs(logger func(string, ...any)) { msg += "This test PID: " + strconv.Itoa(os.Getpid()) + " (parent=" + strconv.Itoa(os.Getppid()) + ")\n" - // Beacon chain nodes - msg += fmt.Sprintf("Beacon chain nodes: %v\n", PIDsFromMultiComponentRunner(c.beaconNodes)) - // Validator nodes + msg += fmt.Sprintf("Prysm beacon chain nodes: %v\n", PIDsFromMultiComponentRunner(c.beaconNodes)) + msg += fmt.Sprintf("Prysm validators: %v\n", PIDsFromMultiComponentRunner(c.validatorNodes)) + if c.lighthouseBeaconNodes != nil { + msg += fmt.Sprintf("Lighthouse beacon chain nodes: %v\n", PIDsFromMultiComponentRunner(c.lighthouseBeaconNodes)) + } + if c.lighthouseValidatorNodes != nil { + msg += fmt.Sprintf("Lighthouse validators: %v\n", PIDsFromMultiComponentRunner(c.lighthouseValidatorNodes)) + } msg += fmt.Sprintf("Validators: %v\n", PIDsFromMultiComponentRunner(c.validatorNodes)) - // ETH1 nodes msg += fmt.Sprintf("ETH1 nodes: %v\n", PIDsFromMultiComponentRunner(c.eth1Nodes)) logger(msg) diff --git a/testing/endtoend/components/lighthouse_beacon.go b/testing/endtoend/components/lighthouse_beacon.go index 6e3be4dc93..8ce88548b6 100644 --- a/testing/endtoend/components/lighthouse_beacon.go +++ b/testing/endtoend/components/lighthouse_beacon.go @@ -249,6 +249,10 @@ func (node *LighthouseBeaconNode) Stop() error { return node.cmd.Process.Kill() } +func (node *LighthouseBeaconNode) UnderlyingProcess() *os.Process { + return node.cmd.Process +} + func (node *LighthouseBeaconNode) createTestnetDir(ctx context.Context, index int) (string, error) { testNetDir := e2e.TestParams.TestPath + fmt.Sprintf("/lighthouse-testnet-%d", index) configPath := filepath.Join(testNetDir, "config.yaml") diff --git a/testing/endtoend/components/lighthouse_validator.go b/testing/endtoend/components/lighthouse_validator.go index da9e7054ce..c1363c901e 100644 --- a/testing/endtoend/components/lighthouse_validator.go +++ b/testing/endtoend/components/lighthouse_validator.go @@ -240,6 +240,10 @@ func (v *LighthouseValidatorNode) Stop() error { return v.cmd.Process.Kill() } +func (v *LighthouseValidatorNode) UnderlyingProcess() *os.Process { + return v.cmd.Process +} + var _ types.ComponentRunner = &KeystoreGenerator{} type KeystoreGenerator struct { diff --git a/testing/endtoend/endtoend_setup_test.go b/testing/endtoend/endtoend_setup_test.go index 174c867e48..1ebc9b92aa 100644 --- a/testing/endtoend/endtoend_setup_test.go +++ b/testing/endtoend/endtoend_setup_test.go @@ -20,7 +20,6 @@ func e2eMinimal(t *testing.T, cfg *params.BeaconChainConfig, cfgo ...types.E2ECo require.NoError(t, params.SetActive(cfg)) require.NoError(t, e2eParams.Init(t, e2eParams.StandardBeaconCount)) - // Run for 12 epochs if not in long-running to confirm long-running has no issues. var err error epochsToRun := 16 epochStr, longRunning := os.LookupEnv("E2E_EPOCHS") @@ -103,7 +102,7 @@ func e2eMainnet(t *testing.T, usePrysmSh, useMultiClient bool, cfg *params.Beaco } else { require.NoError(t, e2eParams.Init(t, e2eParams.StandardBeaconCount)) } - // Run for 10 epochs if not in long-running to confirm long-running has no issues. + var err error epochsToRun := 16 epochStr, longRunning := os.LookupEnv("E2E_EPOCHS") diff --git a/testing/endtoend/mainnet_e2e_test.go b/testing/endtoend/mainnet_e2e_test.go index 6b460e85d6..aaf1de08de 100644 --- a/testing/endtoend/mainnet_e2e_test.go +++ b/testing/endtoend/mainnet_e2e_test.go @@ -15,5 +15,5 @@ func TestEndToEnd_MainnetConfig_ValidatorAtCurrentRelease(t *testing.T) { } func TestEndToEnd_MainnetConfig_MultiClient(t *testing.T) { - e2eMainnet(t, false, true, types.InitForkCfg(version.Bellatrix, version.Electra, params.E2EMainnetTestConfig()), types.WithValidatorCrossClient()).run() + e2eMainnet(t, false, true, types.InitForkCfg(version.Bellatrix, version.Electra, params.E2EMainnetTestConfig())).run() } From 35c1ab5e887bc5f1b2620cb9b9c1099f9ffdef98 Mon Sep 17 00:00:00 2001 From: Chris Berry <bez625@gmail.com> Date: Mon, 17 Nov 2025 16:44:03 +0000 Subject: [PATCH 103/103] Downgrade log level for all validator indices. (#15998) * Update logging behaviour for updated fee recipient. * Updated changelog. * Display validator indices only on TRACE * Fix tests --------- Co-authored-by: Manu NALEPA <enalepa@offchainlabs.com> --- beacon-chain/rpc/eth/validator/BUILD.bazel | 1 + beacon-chain/rpc/eth/validator/handlers.go | 11 ++++++++--- beacon-chain/rpc/eth/validator/handlers_test.go | 3 +++ .../rpc/prysm/v1alpha1/validator/proposer.go | 16 ++++++++++++---- .../prysm/v1alpha1/validator/proposer_test.go | 13 ++++++++----- changelog/chris-downgrade-log-level.md | 4 ++++ 6 files changed, 36 insertions(+), 12 deletions(-) create mode 100644 changelog/chris-downgrade-log-level.md diff --git a/beacon-chain/rpc/eth/validator/BUILD.bazel b/beacon-chain/rpc/eth/validator/BUILD.bazel index d0a0e9d044..31ea92adb5 100644 --- a/beacon-chain/rpc/eth/validator/BUILD.bazel +++ b/beacon-chain/rpc/eth/validator/BUILD.bazel @@ -99,6 +99,7 @@ go_test( "@com_github_ethereum_go_ethereum//common/hexutil:go_default_library", "@com_github_pkg_errors//:go_default_library", "@com_github_prysmaticlabs_go_bitfield//:go_default_library", + "@com_github_sirupsen_logrus//:go_default_library", "@com_github_sirupsen_logrus//hooks/test:go_default_library", "@org_uber_go_mock//gomock:go_default_library", ], diff --git a/beacon-chain/rpc/eth/validator/handlers.go b/beacon-chain/rpc/eth/validator/handlers.go index 2377a5fe9e..a382c7d386 100644 --- a/beacon-chain/rpc/eth/validator/handlers.go +++ b/beacon-chain/rpc/eth/validator/handlers.go @@ -835,12 +835,17 @@ func (s *Server) PrepareBeaconProposer(w http.ResponseWriter, r *http.Request) { s.TrackedValidatorsCache.Set(val) validatorIndices = append(validatorIndices, primitives.ValidatorIndex(validatorIndex)) } + if len(validatorIndices) == 0 { return } - log.WithFields(logrus.Fields{ - "validatorIndices": validatorIndices, - }).Info("Updated fee recipient addresses") + + log := log.WithField("validatorCount", len(validatorIndices)) + if logrus.GetLevel() >= logrus.TraceLevel { + log = log.WithField("validatorIndices", validatorIndices) + } + + log.Debug("Updated fee recipient addresses") } // GetAttesterDuties requests the beacon node to provide a set of attestation duties, diff --git a/beacon-chain/rpc/eth/validator/handlers_test.go b/beacon-chain/rpc/eth/validator/handlers_test.go index 51498b0c62..a0e3def4fa 100644 --- a/beacon-chain/rpc/eth/validator/handlers_test.go +++ b/beacon-chain/rpc/eth/validator/handlers_test.go @@ -44,6 +44,7 @@ import ( "github.com/OffchainLabs/prysm/v7/time/slots" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" + "github.com/sirupsen/logrus" logTest "github.com/sirupsen/logrus/hooks/test" ) @@ -2854,6 +2855,8 @@ func TestPrepareBeaconProposer(t *testing.T) { func TestProposer_PrepareBeaconProposerOverlapping(t *testing.T) { hook := logTest.NewGlobal() + logrus.SetLevel(logrus.DebugLevel) + db := dbutil.SetupDB(t) // New validator diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go index d3bb406952..764d80b0aa 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer.go @@ -547,11 +547,19 @@ func (vs *Server) PrepareBeaconProposer( vs.TrackedValidatorsCache.Set(val) validatorIndices = append(validatorIndices, r.ValidatorIndex) } - if len(validatorIndices) != 0 { - log.WithFields(logrus.Fields{ - "validatorCount": len(validatorIndices), - }).Debug("Updated fee recipient addresses for validator indices") + + if len(validatorIndices) == 0 { + return &emptypb.Empty{}, nil + } + + log := log.WithField("validatorCount", len(validatorIndices)) + if logrus.GetLevel() >= logrus.TraceLevel { + log = log.WithField("validatorIndices", validatorIndices) + } + + log.Debug("Updated fee recipient addresses") + return &emptypb.Empty{}, nil } diff --git a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go index 11681e5ef8..6455882424 100644 --- a/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go +++ b/beacon-chain/rpc/prysm/v1alpha1/validator/proposer_test.go @@ -53,6 +53,7 @@ import ( "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/pkg/errors" + "github.com/sirupsen/logrus" logTest "github.com/sirupsen/logrus/hooks/test" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" @@ -3162,6 +3163,8 @@ func TestProposer_PrepareBeaconProposer(t *testing.T) { func TestProposer_PrepareBeaconProposerOverlapping(t *testing.T) { hook := logTest.NewGlobal() + logrus.SetLevel(logrus.DebugLevel) + db := dbutil.SetupDB(t) ctx := t.Context() proposerServer := &Server{ @@ -3178,13 +3181,13 @@ func TestProposer_PrepareBeaconProposerOverlapping(t *testing.T) { } _, err := proposerServer.PrepareBeaconProposer(ctx, req) require.NoError(t, err) - require.LogsContain(t, hook, "Updated fee recipient addresses for validator indices") + require.LogsContain(t, hook, "Updated fee recipient addresses") // Same validator hook.Reset() _, err = proposerServer.PrepareBeaconProposer(ctx, req) require.NoError(t, err) - require.LogsContain(t, hook, "Updated fee recipient addresses for validator indices") + require.LogsContain(t, hook, "Updated fee recipient addresses") // Same validator with different fee recipient hook.Reset() @@ -3196,7 +3199,7 @@ func TestProposer_PrepareBeaconProposerOverlapping(t *testing.T) { } _, err = proposerServer.PrepareBeaconProposer(ctx, req) require.NoError(t, err) - require.LogsContain(t, hook, "Updated fee recipient addresses for validator indices") + require.LogsContain(t, hook, "Updated fee recipient addresses") // More than one validator hook.Reset() @@ -3209,13 +3212,13 @@ func TestProposer_PrepareBeaconProposerOverlapping(t *testing.T) { } _, err = proposerServer.PrepareBeaconProposer(ctx, req) require.NoError(t, err) - require.LogsContain(t, hook, "Updated fee recipient addresses for validator indices") + require.LogsContain(t, hook, "Updated fee recipient addresses") // Same validators hook.Reset() _, err = proposerServer.PrepareBeaconProposer(ctx, req) require.NoError(t, err) - require.LogsContain(t, hook, "Updated fee recipient addresses for validator indices") + require.LogsContain(t, hook, "Updated fee recipient addresses") } func BenchmarkServer_PrepareBeaconProposer(b *testing.B) { diff --git a/changelog/chris-downgrade-log-level.md b/changelog/chris-downgrade-log-level.md new file mode 100644 index 0000000000..8b3ad7f271 --- /dev/null +++ b/changelog/chris-downgrade-log-level.md @@ -0,0 +1,4 @@ +### Changed + +- Downgraded log level from INFO to DEBUG on PrepareBeaconProposer updated fee recipients. +- Change the logging behaviour of Updated fee recipients to only log count of validators at Debug level and all validator indices at Trace level. \ No newline at end of file